diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..77712ae250b4ece8f3d95d34a3a43f79a41f6f06 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,9 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/current_animation-checkpoint.gif filter=lfs diff=lfs merge=lfs -text +Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/input_output_animation-checkpoint.gif filter=lfs diff=lfs merge=lfs -text +Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/kuroshio_animation-checkpoint.gif filter=lfs diff=lfs merge=lfs -text +Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/ocean_currents_animation-checkpoint.gif filter=lfs diff=lfs merge=lfs -text +Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/sample_animation-checkpoint.gif filter=lfs diff=lfs merge=lfs -text +Exp3_Kuroshio_forecasting/plt_triton/nmi_vis.ipynb filter=lfs diff=lfs merge=lfs -text diff --git a/Exp3_Kuroshio_forecasting/.DS_Store b/Exp3_Kuroshio_forecasting/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..30c46081cae21afa6ec5513df1770379be9ae5a7 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/.DS_Store differ diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_ConvLSTM_exp1_20250311_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_ConvLSTM_exp1_20250311_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a26ac33138452d02f0fef4bab5ccbcbc0164201d --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_ConvLSTM_exp1_20250311_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:68e12e494d088a481aa995c63c709dc208890abe9da52ac6f72742981a5658cc +size 11610 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250221_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250221_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..14fc1c25afe5beb074c11833324588ab0cc76f0a --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250221_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63595e507a023ffaf26d6d1d7d3ec7b4f3dadbdf87e5c7881b8e1c9bc598ee83 +size 75550190 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250222_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250222_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ea6f1beb527b7eaa02011423361b4399b4af4f41 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250222_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f39ba8c5caaef358aba27d54b8ef392a5a51d4836480753dba4d898565a13a94 +size 75550056 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250223_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250223_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..840a181c9805c1b74277b4571e5b3f164728f862 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250223_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e00a1db14d32761d9f1ea1660dbd64c7c33d23f09358be805292b587c1c71dda +size 75550200 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250224_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250224_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..52cdc31a0208989ab6871592346fbe6a94b43343 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp1_20250224_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e15e71e84d50c3bd848dd1bd1a888c7396deade2855f0f334fd7edb19a763b80 +size 75550204 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..47d5228a319b98a1bd573a77332f370cbe592fac --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b6bef4b0385afabcbb6a0c677a568e378de0b76a52a06dfbafd071a5bae24591 +size 75550709 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp2_20250316_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp2_20250316_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..62dedb762bacca61f9d312e1f596c4a192330978 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Dit_exp2_20250316_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f708b01a88610e1517094eb1da50ea24d99d7e41a5cac7638aaecfc5fc0b9cee +size 63095505 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Kno_exp1_20250226_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Kno_exp1_20250226_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..12781a8c78c3a5477e8b2461760131ee0656690b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Kno_exp1_20250226_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fe9e87f65bd7b0e3ea3f25d826332065c787b3ef8c0479b18bf13701a6ede152 +size 529476 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Kno_exp2_20250225_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Kno_exp2_20250225_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4b270eac6610709dd680a319d70a7165580f4b4a --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Kno_exp2_20250225_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b0ea4a92f7be963bfb50bb4c6d8976fb98b3f6a2236c351ffcccbe03239909a +size 99835562 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2e8d2b3d398a822fd8a181ad80599bd34144da09 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:489e579df4993f8b7e24606dc7773f6aae4823cc5fbec0ad31d32be4b304ca5c +size 19040548 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Simvp_exp_128_20250324_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Simvp_exp_128_20250324_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..4f832c5b8d6eebbf272459c3b450d94690016bdf --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Simvp_exp_128_20250324_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:613ba5274130915e187780bb4b29586a0dcb2f991a94e7696ec20468bb07f97d +size 19040464 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_K_uv_20250218_exp1_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_K_uv_20250218_exp1_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..a9442ad2e59c20ac86e08ccdf41cd583b86e1548 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_K_uv_20250218_exp1_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5e3f0a2d53bb67432564819e7e08fa35c15f46a898e0d8056cfbc3fdf78c8703 +size 378552683 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_K_uv_20250218_exp2_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_K_uv_20250218_exp2_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..45ef91ef9d7881f660ad351840141c8e12c84265 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_K_uv_20250218_exp2_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9502f5eab2ce1a8ce6cc8961e9fafb201e62f0eed638ff1f3c6894dc1103cfdf +size 378552813 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_128_20250322_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_128_20250322_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..073bc5247a1dcc504f41faa808b08972986d4882 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_128_20250322_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d00a458f45ea7f8253f71ec5fb54aee5b4db91bc3b206d7c9c1a6a9f6e61f884 +size 378552684 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250221_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250221_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..329018a2147ef28f14947dd84daef085136201ce --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250221_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:671add7c61a1efa5544703e212dd6aa5845107977578ba0333d8772524daa301 +size 397465196 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250222_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250222_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2cb37bd4eb7930e4cc6b321dceb8711e25e9eba9 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250222_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c31f88b68ded17d0abee7bb76f0dfbc73b94173839b866c81b592cbd407c208d +size 397465203 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..80ab3759f85d10030ef740aa675b2dad870732a7 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:940b8236f041d3c847412a62cdac18e7f7cccc5f04059dc632d03167ca781760 +size 378552801 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250224_best_model_prediction.h5 b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250224_best_model_prediction.h5 new file mode 100644 index 0000000000000000000000000000000000000000..8986acf0ec5eb1a0905ebc38b16625ffe6749c7b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_20250224_best_model_prediction.h5 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37f30d197d020780343ab8c9054d2d5943d2560bd7b655bb22874250217d398f +size 68163584 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_64_20250323_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_64_20250323_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ac10af548024cda2a0f649ef7202ee8b6d656a83 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp1_64_20250323_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c29fbe97c5e335b97a2cba5c2d63f8e46b26c7ee8df8e95a001e9d6180496b8e +size 378552678 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp2_20241107_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp2_20241107_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..c93867b7017353e34d59c567c99ec0ab1ab391b8 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp2_20241107_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7abe16e33f1e63941f77a02f136639d9a928d41c63f935feb40b161e7a468c6b +size 378552823 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp3_20241107_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp3_20241107_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..f40138592befe0ad4e5e7d752543044d872f627e --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp3_20241107_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d8e14d8f13ab5530b1129ef408b64260c99c4acd4974d80f6a24925fbdf9c16 +size 378552675 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp3_20241111_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp3_20241111_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..849903798a80f1d30b7429f82645c85d42df6ed8 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp3_20241111_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a7c63b24f362dd6b929f1fc003949f843a8229040c12f94f54c90b22d0c16fe +size 378556268 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp_20241107_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp_20241107_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..810d533f59b217c62e826164d00506a62703f9e3 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_exp_20241107_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e54888a509a62f9805009527d153209b16c9861ea3226619523e92b3b879672 +size 378552694 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_multi_finetune_20250227_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_multi_finetune_20250227_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..652f526f2df937d560c54787ed5449da021df011 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Triton_multi_finetune_20250227_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58c4db8010b0f292ac4bc93df237cf107f4f21643317a648c47c33e650b216c9 +size 432755108 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp1_20250225_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp1_20250225_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..ec02fdf95a769584ccf2c9c7c4267595fd7c5e35 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp1_20250225_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d4a08539f0188ea40e21b4b2b189ef3c24dd44ab683bed220525e3c84681927 +size 99835639 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp1_20250226_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp1_20250226_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..632c567dc2e6be23dde6d04b12cf4b6a23a87bf0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp1_20250226_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:928f8e6b4d14093c2f5a007d3a2b7bcec4e34adbca102b646db884ad19e61e10 +size 124189508 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp2_20250226_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp2_20250226_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..feafed3039c3b8f32645661881bc5654ea272197 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_U_net_exp2_20250226_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64bbfd30c0a223c0a7bf002291b34bdf62eb7de532321e0a1305b420ebbff8e6 +size 99901810 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Unet_exp_128_20250324_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Unet_exp_128_20250324_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..01c9d14cdd06ee5cdccc2013cfca43bda710aefe --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Kuro_Unet_exp_128_20250324_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a6fbbf634b2e46b116f94759b6ee669d508f8c03b8caba95b8f4eb713291a0dc +size 30872161 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Triton_Gulf_uv_20250218_exp1_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Triton_Gulf_uv_20250218_exp1_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7a6fcb7946aa7ffe6a1ff37be6cd6d3f5dbfcc8a --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Triton_Gulf_uv_20250218_exp1_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f94637d268f4bcbb77bda05bd7cf32cce1390ecdf56bbfd75f9f8cc6a2202eee +size 378552693 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/Triton_Kuroshio_uv_20250218_exp1_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/Triton_Kuroshio_uv_20250218_exp1_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..7ebff465b0beea161e89efe0ab06f76abf8a8cee --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/Triton_Kuroshio_uv_20250218_exp1_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9b7d665a91e73d33ecd27bf5dcbfcb07c104cfd6eb33c442726c30b96bd2cae +size 378552695 diff --git a/Exp3_Kuroshio_forecasting/checkpoints/dit_kuro_256_20250227_best_model.pth b/Exp3_Kuroshio_forecasting/checkpoints/dit_kuro_256_20250227_best_model.pth new file mode 100644 index 0000000000000000000000000000000000000000..2dc0e9d412db923626f655b15e5c6ec1b89ddb34 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/checkpoints/dit_kuro_256_20250227_best_model.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:08389ce9e69da332168b798f6790544b3e9ff6c1fa8432c320d83a4d973ae1f7 +size 63092615 diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/Untitled-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/Untitled-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..363fcab7ed6e9634e198cf5555ceb88932c9a245 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/Untitled-checkpoint.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/current_animation-checkpoint.gif b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/current_animation-checkpoint.gif new file mode 100644 index 0000000000000000000000000000000000000000..baeeb057733357eb5b550a0047525063be6391ad --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/current_animation-checkpoint.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b041bd12464292ae0bcd2a8e5126f023a73c5e950d8a1a184ebcd1bd465dc152 +size 3085710 diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..7c6e3bb0284bd0a5f32206a2287c1c2a5fbc0a84 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader-checkpoint.ipynb @@ -0,0 +1,397 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "f3b16ba8-ad82-45c1-8119-b6c61e7311b8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Input shape: torch.Size([32, 10, 2, 128, 128])\n", + "Target shape: torch.Size([32, 5, 2, 128, 128])\n" + ] + } + ], + "source": [ + "import h5py\n", + "import numpy as np\n", + "import torch\n", + "from torch.utils.data import Dataset, DataLoader\n", + "\n", + "class KuroshioDataset(Dataset):\n", + " def __init__(self, data, input_length, target_length, downsample_factor=1):\n", + " \"\"\"\n", + " Args:\n", + " data: Tensor of shape (num_samples, num_timesteps, C, H, W)\n", + " input_length: Number of input time steps (T_in)\n", + " target_length: Number of prediction time steps (T_out)\n", + " downsample_factor: Spatial downsampling factor\n", + " \"\"\"\n", + " super().__init__()\n", + " self.data = data\n", + " self.input_length = input_length\n", + " self.target_length = target_length\n", + " self.downsample_factor = downsample_factor\n", + "\n", + " # Validate time dimensions\n", + " self.num_samples, self.num_timesteps, self.C, self.H, self.W = data.shape\n", + " self.max_t_start = self.num_timesteps - self.input_length - self.target_length\n", + " assert self.max_t_start >= 0, \"Not enough timesteps for input and output\"\n", + "\n", + " # Generate sample indices (sample_idx, t_start)\n", + " self.sample_indices = []\n", + " for s in range(self.num_samples):\n", + " for t_start in range(self.max_t_start + 1):\n", + " self.sample_indices.append((s, t_start))\n", + "\n", + " def __len__(self):\n", + " return len(self.sample_indices)\n", + "\n", + " def __getitem__(self, idx):\n", + " s, t_start = self.sample_indices[idx]\n", + " \n", + " # Extract sequences\n", + " input_end = t_start + self.input_length\n", + " output_end = input_end + self.target_length\n", + " \n", + " input_seq = self.data[s, t_start:input_end] # (T_in, C, H, W)\n", + " target_seq = self.data[s, input_end:output_end] # (T_out, C, H, W)\n", + "\n", + " # Apply downsampling\n", + " if self.downsample_factor > 1:\n", + " dsf = self.downsample_factor\n", + " input_seq = input_seq[..., ::dsf, ::dsf]\n", + " target_seq = target_seq[..., ::dsf, ::dsf]\n", + "\n", + " return input_seq.float(), target_seq.float()\n", + "\n", + "def load_datasets(file_path, args):\n", + " # Load and preprocess data\n", + " with h5py.File(file_path, 'r') as f:\n", + " u_k = np.transpose(f['u_k'][:], (0, 3, 1, 2)) # (2046, 50, 128, 128)\n", + " v_k = np.transpose(f['v_k'][:], (0, 3, 1, 2))\n", + " \n", + " # Combine u and v channels\n", + " combined = np.stack([u_k, v_k], axis=2) # (2046, 50, 2, 128, 128)\n", + " data_tensor = torch.tensor(combined, dtype=torch.float32)\n", + "\n", + " # Split dataset\n", + " total_samples = 2046\n", + " train_size = int(0.8 * total_samples)\n", + " val_size = int(0.1 * total_samples)\n", + " \n", + " train_data = data_tensor[:train_size]\n", + " val_data = data_tensor[train_size:train_size+val_size]\n", + " test_data = data_tensor[train_size+val_size:]\n", + "\n", + " # Create datasets\n", + " train_dataset = KuroshioDataset(train_data, \n", + " args['input_length'],\n", + " args['target_length'],\n", + " args['downsample_factor'])\n", + " \n", + " val_dataset = KuroshioDataset(val_data,\n", + " args['input_length'],\n", + " args['target_length'],\n", + " args['downsample_factor'])\n", + " \n", + " test_dataset = KuroshioDataset(test_data,\n", + " args['input_length'],\n", + " args['target_length'],\n", + " args['downsample_factor'])\n", + "\n", + " return train_dataset, val_dataset, test_dataset\n", + "\n", + "# Example usage\n", + "if __name__ == \"__main__\":\n", + " config = {\n", + " 'input_length': 10, # T_in: 输入时间步数\n", + " 'target_length': 5, # T_out: 预测时间步数\n", + " 'downsample_factor': 1 # 空间下采样因子\n", + " }\n", + "\n", + " # 加载数据集\n", + " train_ds, val_ds, test_ds = load_datasets('./Kuroshio_window_data.h5', config)\n", + "\n", + " # 创建DataLoader\n", + " batch_size = 32\n", + " train_loader = DataLoader(train_ds, batch_size=batch_size, shuffle=True)\n", + " val_loader = DataLoader(val_ds, batch_size=batch_size, shuffle=False)\n", + " test_loader = DataLoader(test_ds, batch_size=batch_size, shuffle=False)\n", + "\n", + " # 验证数据形状\n", + " sample_input, sample_target = next(iter(train_loader))\n", + " print(f\"Input shape: {sample_input.shape}\") # 应为 (B, T_in, 2, H, W)\n", + " print(f\"Target shape: {sample_target.shape}\") # 应为 (B, T_out, 2, H, W)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "9c6b1e5c-7874-49f2-9004-c17470a3ae85", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "可视化已保存为 kuroshio_animation.gif\n" + ] + } + ], + "source": [ + "import h5py\n", + "import numpy as np\n", + "import torch\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.animation as animation\n", + "from matplotlib import gridspec\n", + "from torch.utils.data import Dataset, DataLoader\n", + "\n", + "\n", + "# 修正后的可视化函数\n", + "def create_visualization(input_seq, target_seq, sample_idx=0, downsample=4, fps=10):\n", + " # 数据准备\n", + " input_np = input_seq[sample_idx].cpu().numpy()\n", + " target_np = target_seq[sample_idx].cpu().numpy()\n", + " full_seq = np.concatenate([input_np, target_np], axis=0)\n", + " full_seq = np.transpose(full_seq, (0, 2, 3, 1)) # [T, H, W, C]\n", + " \n", + " # 创建网格\n", + " H, W = full_seq.shape[1], full_seq.shape[2]\n", + " x = np.arange(W)\n", + " y = np.arange(H)\n", + " X, Y = np.meshgrid(x, y)\n", + " X_ds, Y_ds = X[::downsample, ::downsample], Y[::downsample, ::downsample]\n", + " \n", + " # 计算速度幅值\n", + " speed = np.sqrt(full_seq[...,0]**2 + full_seq[...,1]**2)\n", + " speed_min, speed_max = speed.min(), speed.max()\n", + " \n", + " # 创建画布\n", + " fig = plt.figure(figsize=(15, 5), facecolor='white')\n", + " gs = gridspec.GridSpec(1, 3, width_ratios=[1, 1, 1])\n", + " ax1 = plt.subplot(gs[0])\n", + " ax2 = plt.subplot(gs[1])\n", + " ax3 = plt.subplot(gs[2])\n", + " \n", + " # 初始化子图\n", + " im1 = ax1.imshow(full_seq[0,...,0], origin='lower', cmap='RdBu_r', vmax=1, vmin=-1)\n", + " ax1.set_title(\"U Component\")\n", + " plt.colorbar(im1, ax=ax1)\n", + " \n", + " im2 = ax2.imshow(full_seq[0,...,1], origin='lower', cmap='RdBu_r', vmax=1, vmin=-1)\n", + " ax2.set_title(\"V Component\")\n", + " plt.colorbar(im2, ax=ax2)\n", + " \n", + " # 初始化矢量场\n", + " U = full_seq[0,...,0][::downsample, ::downsample]\n", + " V = full_seq[0,...,1][::downsample, ::downsample]\n", + " speed_initial = np.sqrt(U**2 + V**2)\n", + " quiver = ax3.quiver(X_ds, Y_ds, U, V, speed_initial, \n", + " cmap='RdBu_r', \n", + " scale=50, \n", + " width=0.003,\n", + " clim=[speed_min, speed_max])\n", + " plt.colorbar(quiver, ax=ax3, label='Flow Speed')\n", + " ax3.set_title(\"Vector Field\")\n", + " \n", + " # 统一设置\n", + " for ax in [ax1, ax2, ax3]:\n", + " ax.set_xticks([])\n", + " ax.set_yticks([])\n", + " ax.set_xlabel(f\"Timestep: 0/{full_seq.shape[0]-1}\")\n", + " \n", + " # 动画更新函数\n", + " def update(frame):\n", + " # 更新分量图\n", + " im1.set_data(full_seq[frame,...,0])\n", + " im2.set_data(full_seq[frame,...,1])\n", + " \n", + " # 更新矢量场\n", + " U = full_seq[frame,...,0][::downsample, ::downsample]\n", + " V = full_seq[frame,...,1][::downsample, ::downsample]\n", + " speed = np.sqrt(U**2 + V**2)\n", + " \n", + " quiver.set_UVC(U, V)\n", + " quiver.set_array(speed.flatten())\n", + " \n", + " # 更新时间标签\n", + " for ax in [ax1, ax2, ax3]:\n", + " ax.set_xlabel(f\"Timestep: {frame}/{full_seq.shape[0]-1}\")\n", + " \n", + " return [im1, im2, quiver]\n", + " \n", + " # 生成动画\n", + " ani = animation.FuncAnimation(fig, update, frames=full_seq.shape[0], \n", + " interval=1000//fps, blit=False)\n", + " ani.save('kuroshio_animation.gif', writer='pillow', fps=fps)\n", + " plt.close()\n", + " print(\"可视化已保存为 kuroshio_animation.gif\")\n", + "\n", + "# 完整使用示例\n", + "if __name__ == \"__main__\":\n", + " # 配置参数\n", + " config = {\n", + " 'input_length': 25,\n", + " 'target_length': 25,\n", + " 'downsample_factor': 1\n", + " }\n", + " \n", + " # 加载数据\n", + " train_ds, val_ds, test_ds = load_datasets('./Kuroshio_window_data.h5', config)\n", + " train_loader = DataLoader(train_ds, batch_size=10, shuffle=True)\n", + " \n", + " # 获取样本数据\n", + " sample_input, sample_target = next(iter(train_loader))\n", + " \n", + " # 生成可视化(关键参数调整)\n", + " create_visualization(\n", + " sample_input, \n", + " sample_target,\n", + " sample_idx=2, # 选择样本索引\n", + " downsample=1, # 矢量场密度(值越小越密集)\n", + " fps=4 # 动画帧率\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d0454a79-3e01-49dd-b4c5-0aca0bd76bcf", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import torch\n", + "import torch.distributed as dist\n", + "from torch.utils.data import Dataset, DataLoader\n", + "from torch.utils.data.distributed import DistributedSampler\n", + "import h5py\n", + "import numpy as np\n", + "from torch.utils.data import Dataset\n", + "from torch.utils.data import DataLoader\n", + "import torchvision.transforms as transforms\n", + "import torch.utils.data as data\n", + "import h5py\n", + "import torch\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "class WeatherDataset(Dataset):\n", + " def __init__(self, data_path, horizon, transform=None):\n", + " with h5py.File(data_path, 'r') as f:\n", + " self.data_uv_g = f['u_k'][:] \n", + " self.data_uv_g = torch.from_numpy(self.data_uv_g).to(torch.float32)\n", + " self.data_uv_g = self.data_uv_g.permute(0, 3, 1, 2).unsqueeze_(2) \n", + " \n", + " self.data_uv_k = f['v_k'][:] \n", + " self.data_uv_k = torch.from_numpy(self.data_uv_k).to(torch.float32)\n", + " self.data_uv_k = self.data_uv_k.permute(0, 3, 1, 2).unsqueeze_(2) \n", + " self.data_uv_gk = torch.cat([self.data_uv_g, self.data_uv_k], dim=2)\n", + " self.transform = transform\n", + " self.horizon = horizon\n", + " self.mean = 0\n", + " self.std = 1\n", + " \n", + " def __len__(self):\n", + " return len(self.data_uv_gk)\n", + "\n", + " def __getitem__(self, idx):\n", + " input_frames = self.data_uv_gk[idx][:self.horizon]\n", + " output_frames = self.data_uv_gk[idx][self.horizon:2*self.horizon]\n", + " input_frames = (input_frames - self.mean) / self.std\n", + " output_frames = (output_frames - self.mean) / self.std\n", + " return input_frames, output_frames\n", + "\n", + "def load_data(data_path, batch_size, val_batch_size, horizon, num_workers):\n", + " dataset = WeatherDataset(data_path=data_path+'/kg_all_20_mask_latmean.h5', horizon=horizon, transform=None)\n", + " \n", + " total_samples = len(dataset)\n", + " train_size = int(0.8 * total_samples)\n", + " val_size = int(0.1 * total_samples)\n", + " \n", + " train_dataset = dataset[:train_size]\n", + " val_dataset = dataset[train_size:train_size+val_size]\n", + " test_dataset = dataset[train_size+val_size:]\n", + " \n", + " train_sampler = DistributedSampler(train_dataset)\n", + " val_sampler = DistributedSampler(val_dataset)\n", + " test_sampler = DistributedSampler(test_dataset)\n", + "\n", + " dataloader_train = DataLoader(train_dataset, batch_size=batch_size, sampler=train_sampler, pin_memory=False,\n", + " num_workers=num_workers, drop_last=True)\n", + " dataloader_validation = DataLoader(val_dataset, batch_size=val_batch_size, sampler=val_sampler, pin_memory=False,\n", + " num_workers=num_workers, drop_last=True)\n", + " dataloader_test = DataLoader(test_dataset, batch_size=val_batch_size, sampler=test_sampler, pin_memory=False,\n", + " num_workers=num_workers, drop_last=True)\n", + " mean, std = 0, 1\n", + "\n", + " return dataloader_train, dataloader_validation, dataloader_test, mean, std" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "6b051bb4-492a-4b4a-828a-6099ce9437b4", + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'data_tensor' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[3], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;18m__name__\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m__main__\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[0;32m----> 2\u001b[0m train_loader, val_loader, test_loader, mean, std \u001b[38;5;241m=\u001b[39m \u001b[43mload_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata_path\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43m/jizhicfs/easyluwu/ocean_project/kuro/ft_local\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 3\u001b[0m \u001b[43m \u001b[49m\u001b[43mbatch_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m8\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 4\u001b[0m \u001b[43m \u001b[49m\u001b[43mval_batch_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m8\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 5\u001b[0m \u001b[43m \u001b[49m\u001b[43mhorizon\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m10\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 6\u001b[0m \u001b[43m \u001b[49m\u001b[43mnum_workers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m8\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m input_frames, output_frames \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28miter\u001b[39m(train_loader):\n\u001b[1;32m 8\u001b[0m \u001b[38;5;28mprint\u001b[39m(input_frames\u001b[38;5;241m.\u001b[39mshape, output_frames\u001b[38;5;241m.\u001b[39mshape) \u001b[38;5;66;03m# [B, T, C, H, W]\u001b[39;00m\n", + "Cell \u001b[0;32mIn[2], line 50\u001b[0m, in \u001b[0;36mload_data\u001b[0;34m(data_path, batch_size, val_batch_size, horizon, num_workers)\u001b[0m\n\u001b[1;32m 47\u001b[0m train_size \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mint\u001b[39m(\u001b[38;5;241m0.8\u001b[39m \u001b[38;5;241m*\u001b[39m total_samples)\n\u001b[1;32m 48\u001b[0m val_size \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mint\u001b[39m(\u001b[38;5;241m0.1\u001b[39m \u001b[38;5;241m*\u001b[39m total_samples)\n\u001b[0;32m---> 50\u001b[0m train_dataset \u001b[38;5;241m=\u001b[39m \u001b[43mdata_tensor\u001b[49m[:train_size]\n\u001b[1;32m 51\u001b[0m val_dataset \u001b[38;5;241m=\u001b[39m data_tensor[train_size:train_size\u001b[38;5;241m+\u001b[39mval_size]\n\u001b[1;32m 52\u001b[0m test_dataset \u001b[38;5;241m=\u001b[39m data_tensor[train_size\u001b[38;5;241m+\u001b[39mval_size:]\n", + "\u001b[0;31mNameError\u001b[0m: name 'data_tensor' is not defined" + ] + } + ], + "source": [ + "if __name__ == '__main__':\n", + " train_loader, val_loader, test_loader, mean, std = load_data(data_path='/jizhicfs/easyluwu/ocean_project/kuro/ft_local',\n", + " batch_size=8, \n", + " val_batch_size=8, \n", + " horizon=10,\n", + " num_workers=8)\n", + " for input_frames, output_frames in iter(train_loader):\n", + " print(input_frames.shape, output_frames.shape) # [B, T, C, H, W]\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "95e177b0-9b93-42b6-b809-350fadc23a9b", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.20" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..fb847307cf14da80d465e2f77c21a50654daab0b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader-checkpoint.py @@ -0,0 +1,122 @@ +import numpy as np +import netCDF4 as nc +import torch +import torch.utils.data as data + +args = { + 'data_path': '/data/workspace/yancheng/MM/OriSTP/dataset/05res', + 'ocean_lead_time': 10, + 'atmosphere_lead_time': 10, + 'shuffle': True, + 'variables_input': [0, 2, 3, 4], + 'variables_future': [2, 3, 4], + 'variables_output': [0], + 'lon_start': 0, + 'lat_start': 0, + 'lon_end': 720, + 'lat_end': 360, + 'ds_factor': 1, +} + +class train_Dataset(data.Dataset): + def __init__(self, args): + super(train_Dataset, self).__init__() + self.args = args + self.years = range(1993, 2018) + self.dates = range(12, 357, 3) + self.indices = [(m, n) for m in self.years for n in self.dates] + + def __getitem__(self, index): + years, dates = self.indices[index] + train_data = nc.Dataset(f'{self.args["data_path"]}/025res_{years}.nc') + input_now = train_data.variables['mhws_variables'][dates-self.args['atmosphere_lead_time']+1:dates+1, + self.args['variables_input'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input_future = train_data.variables['mhws_variables'][dates+1:dates+self.args['atmosphere_lead_time']+1, + self.args['variables_future'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input = np.concatenate([input_now, input_future], 1) + + target = train_data.variables['mhws_variables'][dates+1:dates+self.args['ocean_lead_time']+1, + self.args['variables_output'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input = torch.tensor(input, dtype=torch.float32) + target = torch.tensor(target, dtype=torch.float32) + input = torch.nan_to_num(input, nan=0.0) + target = torch.nan_to_num(target, nan=0.0) + + return input, target + + def __len__(self): + return len(self.indices) + +class test_Dataset(data.Dataset): + def __init__(self, args): + super(test_Dataset, self).__init__() + self.args = args + self.years = range(2018, 2022) + self.dates = range(12, 357, 3) + self.indices = [(m, n) for m in self.years for n in self.dates] + + def __getitem__(self, index): + years, dates = self.indices[index] + train_data = nc.Dataset(f'{self.args["data_path"]}/025res_{years}.nc') + input_now = train_data.variables['mhws_variables'][dates-self.args['atmosphere_lead_time']+1:dates+1, + self.args['variables_input'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input_future = train_data.variables['mhws_variables'][dates+1:dates+self.args['atmosphere_lead_time']+1, + self.args['variables_future'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input = np.concatenate([input_now, input_future], 1) + + target = train_data.variables['mhws_variables'][dates+1:dates+self.args['ocean_lead_time']+1, + self.args['variables_output'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input = torch.tensor(input, dtype=torch.float32) + target = torch.tensor(target, dtype=torch.float32) + input = torch.nan_to_num(input, nan=0.0) + target = torch.nan_to_num(target, nan=0.0) + + return input, target + + def __len__(self): + return len(self.indices) + +if __name__ == '__main__': + args = { + 'data_path': '/jizhicfs/easyluwu/dataset/ft_local', + 'ocean_lead_time': 10, + 'atmosphere_lead_time': 10, + 'shuffle': True, + 'variables_input': [1, 2, 3, 4], + 'variables_future': [2, 3, 4], + 'variables_output': [1], + 'lon_start': 0, + 'lat_start': 0, + 'lon_end': 1440, + 'lat_end': 720, + 'ds_factor': 1, +} + + + train_dataset = train_Dataset(args) + test_dataset = test_Dataset(args) + + train_loader = data.DataLoader(train_dataset, batch_size=2) + test_loader = data.DataLoader(test_dataset, batch_size=2) + + for inputs, targets in iter(train_loader): + print(inputs.shape, targets.shape) + break diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_high_kuro-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_high_kuro-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..692b24836205b35ee279809ff2a75624ebd58ab0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_high_kuro-checkpoint.py @@ -0,0 +1,82 @@ +import numpy as np +import netCDF4 as nc +import torch +import torch.utils.data as data + + + +class train_Dataset(data.Dataset): + def __init__(self, args): + super(train_Dataset, self).__init__() + self.args = args + self.years = range(1993, 2018) + self.dates = range(12, 357, 3) + self.indices = [(m, n) for m in self.years for n in self.dates] + + def __getitem__(self, index): + years, dates = self.indices[index] + train_data = nc.Dataset(f'{self.args["data_path"]}/KURO_{years}_norm.nc') + input_now = train_data.variables['mhw_variables'][dates-self.args['atmosphere_lead_time']+1:dates+1, + self.args['variables_input'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input_future = train_data.variables['mhw_variables'][dates+1:dates+self.args['atmosphere_lead_time']+1, + self.args['variables_future'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input = np.concatenate([input_now, input_future], 1) + + target = train_data.variables['mhw_variables'][dates+1:dates+self.args['ocean_lead_time']+1, + self.args['variables_output'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input = torch.tensor(input, dtype=torch.float32) + target = torch.tensor(target, dtype=torch.float32) + input = torch.nan_to_num(input, nan=0.0) + target = torch.nan_to_num(target, nan=0.0) + + return input, target + + def __len__(self): + return len(self.indices) + +class test_Dataset(data.Dataset): + def __init__(self, args): + super(test_Dataset, self).__init__() + self.args = args + self.years = range(2018, 2021) + self.dates = range(12, 357, 3) + self.indices = [(m, n) for m in self.years for n in self.dates] + + def __getitem__(self, index): + years, dates = self.indices[index] + train_data = nc.Dataset(f'{self.args["data_path"]}/KURO_{years}_norm.nc') + input_now = train_data.variables['mhw_variables'][dates-self.args['atmosphere_lead_time']+1:dates+1, + self.args['variables_input'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input_future = train_data.variables['mhw_variables'][dates+1:dates+self.args['atmosphere_lead_time']+1, + self.args['variables_future'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input = np.concatenate([input_now, input_future], 1) + + target = train_data.variables['mhw_variables'][dates+1:dates+self.args['ocean_lead_time']+1, + self.args['variables_output'], + self.args['lat_start']:self.args['lat_end']:self.args['ds_factor'], + self.args['lon_start']:self.args['lon_end']:self.args['ds_factor']] + + input = torch.tensor(input, dtype=torch.float32) + target = torch.tensor(target, dtype=torch.float32) + input = torch.nan_to_num(input, nan=0.0) + target = torch.nan_to_num(target, nan=0.0) + + return input, target + + def __len__(self): + return len(self.indices) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..30f2cf7250f8b1bafdda5ff5cb932f22d321e0ca --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio-checkpoint.ipynb @@ -0,0 +1,209 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 4, + "id": "f7a16b9b-07cb-46af-b891-d225ca8a8b2c", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/cuda/__init__.py:129: UserWarning: CUDA initialization: The NVIDIA driver on your system is too old (found version 11000). Please update your GPU driver by downloading and installing a new version from the URL: http://www.nvidia.com/Download/index.aspx Alternatively, go to: https://pytorch.org to install a PyTorch version that has been compiled with your version of the CUDA driver. (Triggered internally at /pytorch/c10/cuda/CUDAFunctions.cpp:109.)\n", + " return torch._C._cuda_getDeviceCount() > 0\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256]) torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])\n", + "\n", + " torch.Size([10, 2, 256, 256]) \n", + "torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])\n", + "\n", + "\n", + "\n", + " torch.Size([10, 2, 256, 256])\n", + "torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256]) torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])\n", + "\n", + "torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256]) \n", + "torch.Size([10, 2, 256, 256]) torch.Size([10, 2, 256, 256]) torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])torch.Size([10, 2, 256, 256])\n", + "\n", + "\n", + " torch.Size([10, 2, 256, 256])\n", + "torch.Size([10, 2, 256, 256]) torch.Size([10, 2, 256, 256])\n", + "torch.Size([1, 10, 2, 256, 256]) torch.Size([1, 10, 2, 256, 256])\n", + "输入数据范围: [-1.54, 1.66]\n", + "NaN值存在性: False\n", + "Inf值存在性: False\n" + ] + } + ], + "source": [ + "import torch\n", + "import torch.distributed as dist\n", + "from torch.utils.data import Dataset, DataLoader\n", + "from torch.utils.data.distributed import DistributedSampler\n", + "import netCDF4 as nc\n", + "import numpy as np\n", + "\n", + "class OceanCurrentDataset(Dataset):\n", + " def __init__(self, data_path, input_steps=10, output_steps=10, transform=None):\n", + " \"\"\"\n", + " 海洋流数据集类\n", + " :param data_path: NetCDF文件路径\n", + " :param input_steps: 输入时间步数\n", + " :param output_steps: 预测时间步数\n", + " :param transform: 数据增强变换\n", + " \"\"\"\n", + " self.data_path = data_path\n", + " self.input_steps = input_steps\n", + " self.output_steps = output_steps\n", + " self.transform = transform\n", + " self.total_steps = input_steps + output_steps\n", + " \n", + " # 加载并预处理数据\n", + " self.data = self._load_and_process_data()\n", + " self.mean, self.std = 0, 1\n", + "\n", + " def _load_and_process_data(self):\n", + " \"\"\"加载和处理NetCDF数据\"\"\"\n", + " with nc.Dataset(self.data_path, 'r') as ds:\n", + " # 处理缺失值\n", + " def process_var(var):\n", + " arr = var[:]\n", + " if '_FillValue' in var.ncattrs():\n", + " fill_value = var._FillValue\n", + " arr = np.ma.masked_values(arr, fill_value).filled(np.nan)\n", + " return torch.nan_to_num(torch.FloatTensor(arr), nan=0.0)\n", + "\n", + " # 加载并合并UV分量\n", + " ugos = process_var(ds['ugos']) # (time, lat, lon)\n", + " vgos = process_var(ds['vgos'])\n", + " \n", + " # 调整维度顺序 [time, channels, lat, lon]\n", + " return torch.stack([ugos, vgos], dim=1) \n", + "\n", + " def _compute_stats(self):\n", + " \"\"\"计算训练集的统计量\"\"\"\n", + " return torch.mean(self.data[:10000]), torch.std(self.data[:10000])\n", + "\n", + " def __len__(self):\n", + " return len(self.data) - self.total_steps + 1\n", + "\n", + " def __getitem__(self, idx):\n", + " window = self.data[idx:idx+self.total_steps] # [T_total, C, H, W]\n", + " \n", + " window = (window - self.mean) / self.std\n", + " \n", + " # 分割输入输出\n", + " input_seq = window[:self.input_steps]\n", + " target_seq = window[self.input_steps:]\n", + " print(input_seq.shape, target_seq.shape)\n", + " \n", + " if self.transform:\n", + " input_seq = self.transform(input_seq)\n", + " target_seq = self.transform(target_seq)\n", + " \n", + " return input_seq[:,:,::2,::2], target_seq[:,:,::2,::2]\n", + "\n", + "def create_dataloaders(config):\n", + " full_dataset = OceanCurrentDataset(\n", + " data_path=config['data_path'],\n", + " input_steps=config['input_steps'],\n", + " output_steps=config['output_steps']\n", + " )\n", + " \n", + " train_size = 10000 - config['input_steps'] - config['output_steps'] + 1\n", + " val_size = 500\n", + " test_size = len(full_dataset) - train_size - val_size\n", + " \n", + " train_dataset, val_dataset, test_dataset = torch.utils.data.random_split(\n", + " full_dataset, [train_size, val_size, test_size],\n", + " generator=torch.Generator().manual_seed(config['seed'])\n", + " )\n", + " \n", + " # train_sampler = DistributedSampler(train_dataset, shuffle=True)\n", + " # val_sampler = DistributedSampler(val_dataset, shuffle=False)\n", + " # test_sampler = DistributedSampler(test_dataset, shuffle=False)\n", + " \n", + " dataloader_train = DataLoader(\n", + " train_dataset,\n", + " batch_size=config['batch_size'],\n", + " num_workers=config['num_workers'],\n", + " pin_memory=True,\n", + " drop_last=True\n", + " )\n", + " \n", + " dataloader_val = DataLoader(\n", + " val_dataset,\n", + " batch_size=config['val_batch_size'],\n", + " num_workers=config['num_workers'],\n", + " pin_memory=True,\n", + " drop_last=True\n", + " )\n", + " \n", + " dataloader_test = DataLoader(\n", + " test_dataset,\n", + " batch_size=config['val_batch_size'],\n", + " num_workers=config['num_workers'],\n", + " pin_memory=True,\n", + " drop_last=True\n", + " )\n", + " \n", + " return dataloader_train, dataloader_val, dataloader_test, full_dataset.mean, full_dataset.std\n", + "\n", + "config = {\n", + " 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc',\n", + " 'input_steps': 10,\n", + " 'output_steps': 10,\n", + " 'batch_size': 1,\n", + " 'val_batch_size': 1,\n", + " 'num_workers': 8,\n", + " 'seed': 42\n", + "}\n", + "# dist.init_process_group(backend='nccl')\n", + "\n", + "train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config)\n", + "\n", + "for sample_input, sample_target in train_loader:\n", + " print(sample_input.shape, sample_target.shape)\n", + " print(f\"输入数据范围: [{sample_input.min():.2f}, {sample_input.max():.2f}]\")\n", + " print(f\"NaN值存在性: {torch.isnan(sample_input).any().item()}\")\n", + " print(f\"Inf值存在性: {torch.isinf(sample_input).any().item()}\")\n", + " break" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ad0379fc-13ba-48b3-84ad-5356f0e03968", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..731aa178b81396b5228669fdc7710f31ffe9912a --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio-checkpoint.py @@ -0,0 +1,69 @@ +import os +import torch +import torch.distributed as dist +from torch.utils.data import Dataset, DataLoader +from torch.utils.data.distributed import DistributedSampler +import h5py +import numpy as np +from torch.utils.data import Dataset +from torch.utils.data import DataLoader +import torchvision.transforms as transforms +import torch.utils.data as data +import h5py +import torch +import numpy as np +import matplotlib.pyplot as plt + +class WeatherDataset(Dataset): + def __init__(self, data_path, horizon, transform=None): + with h5py.File(data_path, 'r') as f: + self.data_uv_g = f['uv_g'][:] + self.data_uv_g = torch.from_numpy(self.data_uv_g).to(torch.float32) + self.data_uv_g = self.data_uv_g.permute(0, 3, 1, 2).unsqueeze_(2) + + self.data_uv_k = f['uv_k'][:] + self.data_uv_k = torch.from_numpy(self.data_uv_k).to(torch.float32) + self.data_uv_k = self.data_uv_k.permute(0, 3, 1, 2).unsqueeze_(2) + self.data_uv_gk = torch.cat([self.data_uv_g, self.data_uv_k], dim=2) + self.transform = transform + self.horizon = horizon + self.mean = 0 + self.std = 1 + + def __len__(self): + return len(self.data_uv_gk) + + def __getitem__(self, idx): + input_frames = self.data_uv_gk[idx][:self.horizon] + output_frames = self.data_uv_gk[idx][self.horizon:2*self.horizon] + input_frames = (input_frames - self.mean) / self.std + output_frames = (output_frames - self.mean) / self.std + return input_frames, output_frames + +def load_data(data_path, batch_size, val_batch_size, horizon, num_workers): + dataset = WeatherDataset(data_path=data_path+'/kg_all_20_mask_latmean.h5', horizon=horizon, transform=None) + dataset_size = len(dataset) + train_size = int(dataset_size * 0.8) + val_size = int(dataset_size * 0.1) + test_size = dataset_size - train_size - val_size + + train_dataset, val_dataset, test_dataset = data.random_split(dataset, [train_size, val_size, test_size]) + + train_sampler = DistributedSampler(train_dataset) + val_sampler = DistributedSampler(val_dataset) + test_sampler = DistributedSampler(test_dataset) + + dataloader_train = DataLoader(train_dataset, batch_size=batch_size, sampler=train_sampler, pin_memory=True, + num_workers=num_workers, drop_last=True) + dataloader_validation = DataLoader(val_dataset, batch_size=val_batch_size, sampler=val_sampler, pin_memory=True, + num_workers=num_workers, drop_last=True) + dataloader_test = DataLoader(test_dataset, batch_size=val_batch_size, sampler=test_sampler, pin_memory=True, + num_workers=num_workers, drop_last=True) + mean, std = 0, 1 + + return dataloader_train, dataloader_validation, dataloader_test, mean, std + + + + + diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_G_uv-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_G_uv-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..cd8c30b89b89ffe94a738146372f33ae239a35e0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_G_uv-checkpoint.py @@ -0,0 +1,69 @@ +import os +import torch +import torch.distributed as dist +from torch.utils.data import Dataset, DataLoader +from torch.utils.data.distributed import DistributedSampler +import h5py +import numpy as np +from torch.utils.data import Dataset +from torch.utils.data import DataLoader +import torchvision.transforms as transforms +import torch.utils.data as data +import h5py +import torch +import numpy as np +import matplotlib.pyplot as plt + +class WeatherDataset(Dataset): + def __init__(self, data_path, horizon, transform=None): + with h5py.File(data_path, 'r') as f: + self.data_uv_g = f['u_g'][:] + self.data_uv_g = torch.from_numpy(self.data_uv_g).to(torch.float32) + self.data_uv_g = self.data_uv_g.permute(0, 3, 1, 2).unsqueeze_(2) + + self.data_uv_k = f['v_g'][:] + self.data_uv_k = torch.from_numpy(self.data_uv_k).to(torch.float32) + self.data_uv_k = self.data_uv_k.permute(0, 3, 1, 2).unsqueeze_(2) + self.data_uv_gk = torch.cat([self.data_uv_g, self.data_uv_k], dim=2) + self.transform = transform + self.horizon = horizon + self.mean = 0 + self.std = 1 + + def __len__(self): + return len(self.data_uv_gk) + + def __getitem__(self, idx): + input_frames = self.data_uv_gk[idx][:self.horizon] + output_frames = self.data_uv_gk[idx][self.horizon:2*self.horizon] + input_frames = (input_frames - self.mean) / self.std + output_frames = (output_frames - self.mean) / self.std + return input_frames, output_frames + +def load_data(data_path, batch_size, val_batch_size, horizon, num_workers): + dataset = WeatherDataset(data_path=data_path+'/kg_all_20_mask_latmean.h5', horizon=horizon, transform=None) + dataset_size = len(dataset) + train_size = int(dataset_size * 0.8) + val_size = int(dataset_size * 0.1) + test_size = dataset_size - train_size - val_size + + train_dataset, val_dataset, test_dataset = data.random_split(dataset, [train_size, val_size, test_size]) + + train_sampler = DistributedSampler(train_dataset) + val_sampler = DistributedSampler(val_dataset) + test_sampler = DistributedSampler(test_dataset) + + dataloader_train = DataLoader(train_dataset, batch_size=batch_size, sampler=train_sampler, pin_memory=False, + num_workers=num_workers, drop_last=True) + dataloader_validation = DataLoader(val_dataset, batch_size=val_batch_size, sampler=val_sampler, pin_memory=False, + num_workers=num_workers, drop_last=True) + dataloader_test = DataLoader(test_dataset, batch_size=val_batch_size, sampler=test_sampler, pin_memory=False, + num_workers=num_workers, drop_last=True) + mean, std = 0, 1 + + return dataloader_train, dataloader_validation, dataloader_test, mean, std + + + + + diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_K_uv-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_K_uv-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..ccfcc23535474ea9304c02f8e62d2ae0a53b090c --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_K_uv-checkpoint.py @@ -0,0 +1,69 @@ +import os +import torch +import torch.distributed as dist +from torch.utils.data import Dataset, DataLoader +from torch.utils.data.distributed import DistributedSampler +import h5py +import numpy as np +from torch.utils.data import Dataset +from torch.utils.data import DataLoader +import torchvision.transforms as transforms +import torch.utils.data as data +import h5py +import torch +import numpy as np +import matplotlib.pyplot as plt + +class WeatherDataset(Dataset): + def __init__(self, data_path, horizon, transform=None): + with h5py.File(data_path, 'r') as f: + self.data_uv_g = f['u_k'][:] + self.data_uv_g = torch.from_numpy(self.data_uv_g).to(torch.float32) + self.data_uv_g = self.data_uv_g.permute(0, 3, 1, 2).unsqueeze_(2) + + self.data_uv_k = f['v_k'][:] + self.data_uv_k = torch.from_numpy(self.data_uv_k).to(torch.float32) + self.data_uv_k = self.data_uv_k.permute(0, 3, 1, 2).unsqueeze_(2) + self.data_uv_gk = torch.cat([self.data_uv_g, self.data_uv_k], dim=2) + self.transform = transform + self.horizon = horizon + self.mean = 0 + self.std = 1 + + def __len__(self): + return len(self.data_uv_gk) + + def __getitem__(self, idx): + input_frames = self.data_uv_gk[idx][:self.horizon] + output_frames = self.data_uv_gk[idx][self.horizon:2*self.horizon] + input_frames = (input_frames - self.mean) / self.std + output_frames = (output_frames - self.mean) / self.std + return input_frames, output_frames + +def load_data(data_path, batch_size, val_batch_size, horizon, num_workers): + dataset = WeatherDataset(data_path=data_path+'/kg_all_20_mask_latmean.h5', horizon=horizon, transform=None) + dataset_size = len(dataset) + train_size = int(dataset_size * 0.8) + val_size = int(dataset_size * 0.1) + test_size = dataset_size - train_size - val_size + + train_dataset, val_dataset, test_dataset = data.random_split(dataset, [train_size, val_size, test_size]) + + train_sampler = DistributedSampler(train_dataset) + val_sampler = DistributedSampler(val_dataset) + test_sampler = DistributedSampler(test_dataset) + + dataloader_train = DataLoader(train_dataset, batch_size=batch_size, sampler=train_sampler, pin_memory=False, + num_workers=num_workers, drop_last=True) + dataloader_validation = DataLoader(val_dataset, batch_size=val_batch_size, sampler=val_sampler, pin_memory=False, + num_workers=num_workers, drop_last=True) + dataloader_test = DataLoader(test_dataset, batch_size=val_batch_size, sampler=test_sampler, pin_memory=False, + num_workers=num_workers, drop_last=True) + mean, std = 0, 1 + + return dataloader_train, dataloader_validation, dataloader_test, mean, std + + + + + diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..366fb2f3b6164487d70c32c1192609c9ab2f36ce --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi-checkpoint.py @@ -0,0 +1,134 @@ +import torch +import torch.distributed as dist +from torch.utils.data import Dataset, DataLoader +from torch.utils.data.distributed import DistributedSampler +import netCDF4 as nc +import numpy as np + +class OceanCurrentDataset(Dataset): + def __init__(self, data_path, input_steps=10, output_steps=10, transform=None): + """ + 海洋流数据集类 + :param data_path: NetCDF文件路径 + :param input_steps: 输入时间步数 + :param output_steps: 预测时间步数 + :param transform: 数据增强变换 + """ + self.data_path = data_path + self.input_steps = input_steps + self.output_steps = output_steps + self.transform = transform + self.total_steps = input_steps + output_steps + + # 加载并预处理数据 + self.data = self._load_and_process_data() + self.mean, self.std = 0, 1 + + def _load_and_process_data(self): + """加载和处理NetCDF数据""" + with nc.Dataset(self.data_path, 'r') as ds: + # 处理缺失值 + def process_var(var): + arr = var[:] + if '_FillValue' in var.ncattrs(): + fill_value = var._FillValue + arr = np.ma.masked_values(arr, fill_value).filled(np.nan) + return torch.nan_to_num(torch.FloatTensor(arr), nan=0.0) + + # 加载并合并UV分量 + ugos = process_var(ds['ugos']) # (time, lat, lon) + vgos = process_var(ds['vgos']) + + # 调整维度顺序 [time, channels, lat, lon] + return torch.stack([ugos, vgos], dim=1) + + def _compute_stats(self): + """计算训练集的统计量""" + return torch.mean(self.data[:10000]), torch.std(self.data[:10000]) + + def __len__(self): + return len(self.data) - self.total_steps + 1 + + def __getitem__(self, idx): + window = self.data[idx:idx+self.total_steps] # [T_total, C, H, W] + + window = (window - self.mean) / self.std + + # 分割输入输出 + input_seq = window[:self.input_steps] + target_seq = window[self.input_steps:] + + if self.transform: + input_seq = self.transform(input_seq) + target_seq = self.transform(target_seq) + + return input_seq, target_seq + +def create_dataloaders(config): + full_dataset = OceanCurrentDataset( + data_path=config['data_path'], + input_steps=config['input_steps'], + output_steps=config['output_steps'] + ) + + train_size = 10000 - config['input_steps'] - config['output_steps'] + 1 + val_size = 500 + test_size = len(full_dataset) - train_size - val_size + + train_dataset, val_dataset, test_dataset = torch.utils.data.random_split( + full_dataset, [train_size, val_size, test_size], + generator=torch.Generator().manual_seed(config['seed']) + ) + + train_sampler = DistributedSampler(train_dataset, shuffle=True) + val_sampler = DistributedSampler(val_dataset, shuffle=False) + test_sampler = DistributedSampler(test_dataset, shuffle=False) + + dataloader_train = DataLoader( + train_dataset, + batch_size=config['batch_size'], + sampler=train_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_val = DataLoader( + val_dataset, + batch_size=config['val_batch_size'], + sampler=val_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_test = DataLoader( + test_dataset, + batch_size=config['val_batch_size'], + sampler=test_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + return dataloader_train, dataloader_val, dataloader_test, full_dataset.mean, full_dataset.std + +# config = { +# 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc', +# 'input_steps': 10, +# 'output_steps': 10, +# 'batch_size': 1, +# 'val_batch_size': 1, +# 'num_workers': 8, +# 'seed': 42 +# } +# dist.init_process_group(backend='nccl') + +# train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config) + +# for sample_input, sample_target in train_loader: +# print(sample_input.shape, sample_target.shape) +# print(f"输入数据范围: [{sample_input.min():.2f}, {sample_input.max():.2f}]") +# print(f"NaN值存在性: {torch.isnan(sample_input).any().item()}") +# print(f"Inf值存在性: {torch.isinf(sample_input).any().item()}") +# break \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_128-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_128-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..1aff21a4d2a5d775220deadfeb1751633d322210 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_128-checkpoint.py @@ -0,0 +1,134 @@ +import torch +import torch.distributed as dist +from torch.utils.data import Dataset, DataLoader +from torch.utils.data.distributed import DistributedSampler +import netCDF4 as nc +import numpy as np + +class OceanCurrentDataset(Dataset): + def __init__(self, data_path, input_steps=10, output_steps=10, transform=None): + """ + 海洋流数据集类 + :param data_path: NetCDF文件路径 + :param input_steps: 输入时间步数 + :param output_steps: 预测时间步数 + :param transform: 数据增强变换 + """ + self.data_path = data_path + self.input_steps = input_steps + self.output_steps = output_steps + self.transform = transform + self.total_steps = input_steps + output_steps + + # 加载并预处理数据 + self.data = self._load_and_process_data() + self.mean, self.std = 0, 1 + + def _load_and_process_data(self): + """加载和处理NetCDF数据""" + with nc.Dataset(self.data_path, 'r') as ds: + # 处理缺失值 + def process_var(var): + arr = var[:] + if '_FillValue' in var.ncattrs(): + fill_value = var._FillValue + arr = np.ma.masked_values(arr, fill_value).filled(np.nan) + return torch.nan_to_num(torch.FloatTensor(arr), nan=0.0) + + # 加载并合并UV分量 + ugos = process_var(ds['ugos']) # (time, lat, lon) + vgos = process_var(ds['vgos']) + + # 调整维度顺序 [time, channels, lat, lon] + return torch.stack([ugos, vgos], dim=1) + + def _compute_stats(self): + """计算训练集的统计量""" + return torch.mean(self.data[:10000]), torch.std(self.data[:10000]) + + def __len__(self): + return len(self.data) - self.total_steps + 1 + + def __getitem__(self, idx): + window = self.data[idx:idx+self.total_steps] # [T_total, C, H, W] + + window = (window - self.mean) / self.std + + # 分割输入输出 + input_seq = window[:self.input_steps] + target_seq = window[self.input_steps:] + + if self.transform: + input_seq = self.transform(input_seq) + target_seq = self.transform(target_seq) + + return input_seq[:,:,::2,::2], target_seq[:,:,::2,::2] + +def create_dataloaders(config): + full_dataset = OceanCurrentDataset( + data_path=config['data_path'], + input_steps=config['input_steps'], + output_steps=config['output_steps'] + ) + + train_size = 10000 - config['input_steps'] - config['output_steps'] + 1 + val_size = 500 + test_size = len(full_dataset) - train_size - val_size + + train_dataset, val_dataset, test_dataset = torch.utils.data.random_split( + full_dataset, [train_size, val_size, test_size], + generator=torch.Generator().manual_seed(config['seed']) + ) + + train_sampler = DistributedSampler(train_dataset, shuffle=True) + val_sampler = DistributedSampler(val_dataset, shuffle=False) + test_sampler = DistributedSampler(test_dataset, shuffle=False) + + dataloader_train = DataLoader( + train_dataset, + batch_size=config['batch_size'], + sampler=train_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_val = DataLoader( + val_dataset, + batch_size=config['val_batch_size'], + sampler=val_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_test = DataLoader( + test_dataset, + batch_size=config['val_batch_size'], + sampler=test_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + return dataloader_train, dataloader_val, dataloader_test, full_dataset.mean, full_dataset.std + +# config = { +# 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc', +# 'input_steps': 10, +# 'output_steps': 10, +# 'batch_size': 1, +# 'val_batch_size': 1, +# 'num_workers': 8, +# 'seed': 42 +# } +# dist.init_process_group(backend='nccl') + +# train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config) + +# for sample_input, sample_target in train_loader: +# print(sample_input.shape, sample_target.shape) +# print(f"输入数据范围: [{sample_input.min():.2f}, {sample_input.max():.2f}]") +# print(f"NaN值存在性: {torch.isnan(sample_input).any().item()}") +# print(f"Inf值存在性: {torch.isinf(sample_input).any().item()}") +# break \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_64-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_64-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..946289ae838ea67ee555e231ebd6c87e66daedb6 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_64-checkpoint.py @@ -0,0 +1,134 @@ +import torch +import torch.distributed as dist +from torch.utils.data import Dataset, DataLoader +from torch.utils.data.distributed import DistributedSampler +import netCDF4 as nc +import numpy as np + +class OceanCurrentDataset(Dataset): + def __init__(self, data_path, input_steps=10, output_steps=10, transform=None): + """ + 海洋流数据集类 + :param data_path: NetCDF文件路径 + :param input_steps: 输入时间步数 + :param output_steps: 预测时间步数 + :param transform: 数据增强变换 + """ + self.data_path = data_path + self.input_steps = input_steps + self.output_steps = output_steps + self.transform = transform + self.total_steps = input_steps + output_steps + + # 加载并预处理数据 + self.data = self._load_and_process_data() + self.mean, self.std = 0, 1 + + def _load_and_process_data(self): + """加载和处理NetCDF数据""" + with nc.Dataset(self.data_path, 'r') as ds: + # 处理缺失值 + def process_var(var): + arr = var[:] + if '_FillValue' in var.ncattrs(): + fill_value = var._FillValue + arr = np.ma.masked_values(arr, fill_value).filled(np.nan) + return torch.nan_to_num(torch.FloatTensor(arr), nan=0.0) + + # 加载并合并UV分量 + ugos = process_var(ds['ugos']) # (time, lat, lon) + vgos = process_var(ds['vgos']) + + # 调整维度顺序 [time, channels, lat, lon] + return torch.stack([ugos, vgos], dim=1) + + def _compute_stats(self): + """计算训练集的统计量""" + return torch.mean(self.data[:10000]), torch.std(self.data[:10000]) + + def __len__(self): + return len(self.data) - self.total_steps + 1 + + def __getitem__(self, idx): + window = self.data[idx:idx+self.total_steps] # [T_total, C, H, W] + + window = (window - self.mean) / self.std + + # 分割输入输出 + input_seq = window[:self.input_steps] + target_seq = window[self.input_steps:] + + if self.transform: + input_seq = self.transform(input_seq) + target_seq = self.transform(target_seq) + + return input_seq[:,:,::4,::4], target_seq[:,:,::4,::4] + +def create_dataloaders(config): + full_dataset = OceanCurrentDataset( + data_path=config['data_path'], + input_steps=config['input_steps'], + output_steps=config['output_steps'] + ) + + train_size = 10000 - config['input_steps'] - config['output_steps'] + 1 + val_size = 500 + test_size = len(full_dataset) - train_size - val_size + + train_dataset, val_dataset, test_dataset = torch.utils.data.random_split( + full_dataset, [train_size, val_size, test_size], + generator=torch.Generator().manual_seed(config['seed']) + ) + + train_sampler = DistributedSampler(train_dataset, shuffle=True) + val_sampler = DistributedSampler(val_dataset, shuffle=False) + test_sampler = DistributedSampler(test_dataset, shuffle=False) + + dataloader_train = DataLoader( + train_dataset, + batch_size=config['batch_size'], + sampler=train_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_val = DataLoader( + val_dataset, + batch_size=config['val_batch_size'], + sampler=val_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_test = DataLoader( + test_dataset, + batch_size=config['val_batch_size'], + sampler=test_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + return dataloader_train, dataloader_val, dataloader_test, full_dataset.mean, full_dataset.std + +# config = { +# 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc', +# 'input_steps': 10, +# 'output_steps': 10, +# 'batch_size': 1, +# 'val_batch_size': 1, +# 'num_workers': 8, +# 'seed': 42 +# } +# dist.init_process_group(backend='nccl') + +# train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config) + +# for sample_input, sample_target in train_loader: +# print(sample_input.shape, sample_target.shape) +# print(f"输入数据范围: [{sample_input.min():.2f}, {sample_input.max():.2f}]") +# print(f"NaN值存在性: {torch.isnan(sample_input).any().item()}") +# print(f"Inf值存在性: {torch.isinf(sample_input).any().item()}") +# break \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_single-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_single-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..1cae5cc0cf53e5ea57192eb38774e338d343d117 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_kuroshio_ruiqi_single-checkpoint.py @@ -0,0 +1,96 @@ +import torch +from torch.utils.data import Dataset, DataLoader +import netCDF4 as nc +import numpy as np + +class OceanCurrentDataset(Dataset): + def __init__(self, data_path, input_steps=10, output_steps=10, transform=None): + self.data_path = data_path + self.input_steps = input_steps + self.output_steps = output_steps + self.transform = transform + self.total_steps = input_steps + output_steps + + # Load and process data + self.data = self._load_and_process_data() + self.mean, self.std = self._compute_stats() + + def _load_and_process_data(self): + """Load and process NetCDF data""" + with nc.Dataset(self.data_path, 'r') as ds: + def process_var(var): + arr = var[:] + if '_FillValue' in var.ncattrs(): + fill_value = var._FillValue + arr = np.ma.masked_values(arr, fill_value).filled(np.nan) + return torch.nan_to_num(torch.FloatTensor(arr), nan=0.0) + + ugos = process_var(ds['ugos']) # (time, lat, lon) + vgos = process_var(ds['vgos']) + + return torch.stack([ugos, vgos], dim=1) # [time, channels, lat, lon] + + def _compute_stats(self): + """Compute dataset statistics""" + return torch.mean(self.data[:10000]), torch.std(self.data[:10000]) + + def __len__(self): + return len(self.data) - self.total_steps + 1 + + def __getitem__(self, idx): + window = self.data[idx:idx+self.total_steps] # [T_total, C, H, W] + window = (window - 0) / 1 + + input_seq = window[:self.input_steps] + target_seq = window[self.input_steps:] + + if self.transform: + input_seq = self.transform(input_seq) + target_seq = self.transform(target_seq) + + return input_seq, target_seq + +def create_dataloaders(config): + full_dataset = OceanCurrentDataset( + data_path=config['data_path'], + input_steps=config['input_steps'], + output_steps=config['output_steps'] + ) + + train_size = 10000 - config['input_steps'] - config['output_steps'] + 1 + val_size = 500 + test_size = len(full_dataset) - train_size - val_size + + train_dataset, val_dataset, test_dataset = torch.utils.data.random_split( + full_dataset, [train_size, val_size, test_size], + generator=torch.Generator().manual_seed(config['seed']) + ) + + dataloader_train = DataLoader( + train_dataset, + batch_size=config['batch_size'], + shuffle=True, # Changed from DistributedSampler + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_val = DataLoader( + val_dataset, + batch_size=config['val_batch_size'], + shuffle=False, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_test = DataLoader( + test_dataset, + batch_size=config['val_batch_size'], + shuffle=False, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + return dataloader_train, dataloader_val, dataloader_test,0, 1 diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_test-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_test-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..363fcab7ed6e9634e198cf5555ceb88932c9a245 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/dataloader_test-checkpoint.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/input_output_animation-checkpoint.gif b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/input_output_animation-checkpoint.gif new file mode 100644 index 0000000000000000000000000000000000000000..a35c98212fcb0be738b2861c967216404e4bac9f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/input_output_animation-checkpoint.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ad3a31851d266a90b2badc436562f2151145fd07c6b24c04c47bf8276af26bbf +size 170914 diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/kuro_vis-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/kuro_vis-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..d29c4b9d0a3d69c9557edd68b1ef4b2885918b36 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/kuro_vis-checkpoint.ipynb @@ -0,0 +1,452 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 3, + "id": "8c9ccebe-017d-40d8-9d0f-bde54956849e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\n", + "root group (NETCDF4 data model, file format HDF5):\n", + " Conventions: CF-1.6\n", + " Metadata_Conventions: Unidata Dataset Discovery v1.0\n", + " cdm_data_type: Grid\n", + " comment: Sea Surface Height measured by Altimetry and derived variables\n", + " contact: servicedesk.cmems@mercator-ocean.eu\n", + " creator_email: servicedesk.cmems@mercator-ocean.eu\n", + " creator_name: CMEMS - Sea Level Thematic Assembly Center\n", + " creator_url: http://marine.copernicus.eu\n", + " date_created: 2024-10-23T12:55:06Z\n", + " geospatial_lat_max: 89.9375\n", + " geospatial_lat_min: -89.9375\n", + " geospatial_lat_resolution: 0.125\n", + " geospatial_lat_units: degrees_north\n", + " geospatial_lon_max: 179.9375\n", + " geospatial_lon_min: -179.9375\n", + " geospatial_lon_resolution: 0.125\n", + " geospatial_lon_units: degrees_east\n", + " geospatial_vertical_max: 0.0\n", + " geospatial_vertical_min: 0.0\n", + " geospatial_vertical_positive: down\n", + " geospatial_vertical_resolution: point\n", + " geospatial_vertical_units: m\n", + " history: 2024-10-23 12:55:06Z: Creation\n", + " institution: CLS, CNES\n", + " keywords: Oceans > Ocean Topography > Sea Surface Height\n", + " keywords_vocabulary: NetCDF COARDS Climate and Forecast Standard Names\n", + " license: http://marine.copernicus.eu/web/27-service-commitments-and-licence.php\n", + " platform: Cryosat-2 New Orbit, SWOT Nadir science, Sentinel-3B, Altika Drifting Phase, Sentinel-6A, Haiyang-2B, Sentinel-3A, Jason-3 Interleaved\n", + " processing_level: L4\n", + " product_version: vNov2024\n", + " project: COPERNICUS MARINE ENVIRONMENT MONITORING SERVICE (CMEMS)\n", + " references: http://marine.copernicus.eu\n", + " software_version: 8.0_MIOST_DT2024_baseline\n", + " source: Altimetry measurements\n", + " ssalto_duacs_comment: The reference mission used for the altimeter inter-calibration processing is Topex/Poseidon between 1993-01-01 and 2002-04-23, Jason-1 between 2002-04-24 and 2008-10-18, OSTM/Jason-2 between 2008-10-19 and 2016-06-25, Jason-3 since 2016-06-25.\n", + " standard_name_vocabulary: NetCDF Climate and Forecast (CF) Metadata Convention Standard Name Table v37\n", + " summary: SSALTO/DUACS Delayed-Time Level-4 sea surface height and derived variables measured by multi-satellite altimetry observations over Global Ocean.\n", + " time_coverage_duration: P1D\n", + " time_coverage_end: 2023-12-31T12:00:00Z\n", + " time_coverage_resolution: P1D\n", + " time_coverage_start: 2023-12-30T12:00:00Z\n", + " title: DT merged all satellites Global Ocean Gridded SSALTO/DUACS Sea Surface Height L4 product and derived variables\n", + " copernicusmarine_version: 1.3.3\n", + " dimensions(sizes): time(11322), latitude(256), longitude(256)\n", + " variables(dimensions): int32 vgos(time, latitude, longitude), int32 ugos(time, latitude, longitude), float32 latitude(latitude), float32 longitude(longitude), float32 time(time)\n", + " groups: " + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import numpy as np\n", + "import netCDF4 as nc\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib.animation import FuncAnimation\n", + "\n", + "data = nc.Dataset('/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc')\n", + "data" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "67fb44cc-3e54-45cc-8bec-2166624c38b5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(11322, 256, 256) (11322, 256, 256)\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import netCDF4 as nc\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib.animation import FuncAnimation\n", + "\n", + "# variables(dimensions): int32 vgos(time, latitude, longitude), int32 ugos(time, latitude, longitude), float32 latitude(latitude), float32 longitude(longitude), float32 time(time)\n", + "\n", + "data = nc.Dataset('/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc')\n", + "vgos = data['vgos']\n", + "ugos = data['ugos']\n", + "print(vgos.shape, ugos.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "561bed40-5e78-4f11-a66e-ded8032f727c", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAa4AAAGiCAYAAAC/NyLhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9XawtS3YWCn4jInOutfbZdU65fly+hatsqwEb6Ma+18aFBbygQpaREMb1gC2EkEFNP2A/UOIBS4DxkyVAwvw/IMTPgyWgJZAQkiXw1RUtVAZUSN2X5qfB7W4bXKfsclWdfc7ee62ZGTH6YYwRMSIycs659inbvXzWkPZe8ydnZmRkxPjG/yBmZjzSIz3SIz3SIz0QCr/aA3ikR3qkR3qkR7oPPQLXIz3SIz3SIz0oegSuR3qkR3qkR3pQ9Ahcj/RIj/RIj/Sg6BG4HumRHumRHulB0SNwPdIjPdIjPdKDokfgeqRHeqRHeqQHRY/A9UiP9EiP9EgPih6B65Ee6ZEe6ZEeFD0C1yM90iM90iM9KPpVA66/8Tf+Br7+678e19fX+MQnPoF/+2//7a/WUB7pkR7pkR7pAdGvCnD9w3/4D/HpT38aP/zDP4x//+//Pb75m78Z3/md34lf+IVf+NUYziM90iM90iM9IKJfjSK7n/jEJ/Dbfttvw1//638dAJBzxsc+9jH84A/+IP70n/7Tv9LDeaRHeqRHeqQHRNOv9AWPxyM++9nP4od+6IfKZyEEfPKTn8RnPvOZ4W/u7u5wd3dX3uec8cUvfhEf/OAHQUS/7GN+pEd6pEd6pK8sMTPefvttfPSjH0UI9zP+/YoD1xe+8AWklPCRj3yk+fwjH/kI/vN//s/D3/zoj/4ofuRHfuRXYniP9EiP9EiP9CtIP/dzP4ev/dqvvddvfsWB61Xoh37oh/DpT3+6vH/rrbfw8Y9/HD/3cz+H119/vXz+f/8fX8YUCL/lf3oDAPBP/5+fw3UMWDLj9/3mr9mc96f+P1/E+64iribCmhnvHBO+8HzB+69nvP8m4ukc8UsvV1xNhEiEF0sGAMRAuJ4IVzFgIuBqCohO8csAvAE2MyNlIDEjM8AAMuQzQCSPzEDW3zAYBELYUSblHPJ7BpAyY8kZnIHEwJJZrsnyd4+CaquRCIEIc7D3AKkAZN/FQCAARPYZEAggIsQABFCZA3Ln9nMg94ZmDtp52h3qkNidU95v58heZ5Z5ysxYEyOxjGnRiya2Y3kzNwDK3MyBQAGYQ2jmJBDKMyOSObCxMbtnr/duv5sDYY4EP13MwJoZz4+5PNfDJNezezAq49SxTEGfBxEOQZ5JDCSf6/MKbnz+tVw819e6COxq/vmwe2/PwX7pn0P/jPrzNJfu9oy/lv+drZ3x78bn7mm0t3rjTXAzc86uU8bo1rSfH79GbW3aegDkeds921ocPIqyHwF51jZuW392b3Yvss7qugzlKNnngaj8vrzWcwcCiDOQV1BKAGdQTgCvMjjOoLwOJjKAKQBh0tcRmGZwmMBhwpLlft969gzf8pu/Ee973/vOzO6WfsWB60Mf+hBijPj85z/ffP75z38eX/M1W3ABgKurK1xdXW0+f/311xvg+uALWQz22Uc+8BK3a0ZecnNcueb6Fp6HCd/4xhMBkauEZVrwwScHXE0BUyT81g9NeGfJeHaXkY4rsi6m6RBBAaBAQCRhLMokekqZFWA8g9RFrAt9yQZE8r0Hrj1rqJ3PGJ2cVwFLx5n4/E425hxCuzFGjDmGClzGDG3B22ujDVjx9v6ByoSMUdgpgtt8dh77LdDOn5yPi9DQM4fVgfmS7DVw2AEuTw3Au3sOJKDTA7tnIv4+RpS5PkO7xyVlJGa8WBcszEiBEZmAJMcujjtHIlxNATMRrqeA68OEOYoQcogVsOYowpcXPAgdcHlOabQDXv79CLiyY9b+ebX3vp3rU7/ZAwV/7f43e3unN0x5l8Pevgsn4KsHUj++3H3uBSoTZJJbnwCG+9fvU3vfrz0bfw9iwd1jA2IEtxbqe1srAcIHoq2TLKBFyYEX83btUACHKH/jBIQJTAFLBuYs93m35s3cX0q/4sB1OBzwrd/6rfjJn/xJfPd3fzcA8Vn95E/+JH7gB37g3Z07UDMJ3/mN1Rz5f/3ffx6/9OKIL71YcLdmfOOHn+L1qwlP5ogvvVzxO77hg7vn/fwvPMPtmvG5t+9wPQVcTRFhBW6mCALAQTdLt0BK3ItKypBDwPIRMoDAhASWjRQAZiflbTYOth9kApPJ8HKFEVSlThQ9yaBDZbsNo9PXBl4G1LbQYzdeImrAyt+/p+g3lBuLbaJ23I4xEkBu/tKF0nYkqppVIOTMiERIzGUO/Pxk/TwJUuqg9PsEhMDIXJmI/JTL81sHeGBn9xrUkqqWnBj6T8D3bk0F5Jp5i0DOXCYqM4OZGsbPJPNGkPtk5u3iGoGWfU5h+Nx6YaFcH+1zwuC3/hzN5Tqg8p81Gp8DhM3v/Dnda7+Wkv4t08Bc9ldi3cPc/ibT6QX2lQhzs3Uo42hPmFien4EagoJc4rL2bBxEdd6JSfaK7pMMuTcmBkHuV0CMyx5MBm4ERIYDsUlex4NoY5zBunbIrSEWqb5oX4mBpJYOEdL54v06ol8VU+GnP/1p/JE/8kfwbd/2bfj2b/92/NiP/RieP3+O7//+739X5/31Xy1a1c998R187ANPm++ezBHxtSs8PUxYUsbTQ8STOQIAvvRyOXneYxLpYMmMmZVRKBXTUNE8nImPlIEo4yPi4Q6W3xMCM/KFEmL5zMAunF4FU6Bu4+si78fixtSbv2ThViDrQctLdJlR7mdvv/ealQGYaW/yXf2MWT5PzLIZ2e5AwKuZwzObQs4rm6cHL/v+HHgl5iI8ZDBCYNiTqgBWhQZ/vpFknVUbtNe2zkzLEpMzl/MDQMgA4vgeM9d5NMHK/u7SyD7VH4JW0/L3lrgFsdSsuf3L5sED29Ou5PjtOTcg2J0zc3vjgbrf25rTaxHVPdMfu8H9ezBhAm3GdinZuikAFuoaMeEJqMKnLFX5DirEkBP0ipB9BsRiQPm+mpwDiAJGcRVljUDWeNK1a9plGrPCi+lXBbj+4B/8g/jFX/xF/Lk/9+fw5ptv4lu+5VvwEz/xE5uAjVelm2m7Mz/0ZMbNFHFQf8Lbdwm3a8Y7xxU/+9btyfPdrWK6+cD1jBDEDDOHgEMkXE2E6xhwHamo1J5y5W2y+JUh2kMjEq0LQSTl6H53LlOBSI9Xqaj/ridvVunNFs1vOzPlRuMiM5VVk0ID2DCNUoAklfeyKTLzEJgam/voHqgylKLJKUoxi+aZC3duKdoAACCwjCPL+ADHtJgawcRoAzp6rgUiiQciIBMW/WLpFJjs3qeO4ScVhrwZMGd5nRyA+XsJJKa/OQY1X1IxXxoVrSQTKDASy1wREciBWv1B3r7fA6/GLFu1LGNOI1Pw3nI+55c6Zw5sfW88/Ny+NQpUNfRiVrPfDbSw7Exvdn2/PP1r5iq8FWEKVfjrx0Wo69OvMy9I9VS0slSPNeEpUl3rJkD1GpjtSbBYBhLX+/eWjrIXswmyVQjvLSQ9jbRvE2x6k/d96Vclj+vd0rNnz/DGG2/grbfeGvquvvDsOT70+mvNZ//7z7+lARgrXiwJT+aIL75c8M4xFWD6P3/71w2v97/99BfwYkmIBFxNEdcx4I3rCU8PAl7X6kvwPhmb1GQAUcw/3DxIO67VhlrfTf28vrGgCKDaqj01Jo4T5+5NLnK+fXu4Nw8StkDTX9fOb5qCLeRy7u4cJime8m8lN59mdmDIa7sv8xuNHOGMVgvy2o831clnVaI1im6ygxNW+oAOT70Pbe/8S+YCYnYfOXPzDAOhCFCRCE/mqH6tgCeHWASKOZh/gjCV1/WzaFq1Adaej4tCK0E381uZ0YZRnfBJGe0YKId0Dqz2fGl71Gv81IGTrUl5vf0O2Pq9RubNkcAIuCAM3vq5Rj6uvUCr1geL+nrH9O/vo8xFdx/b+2yDPOze987XP4ue5wDAs7ef4Vt//cd2+fgpehBRhfelxMD/+NI7+HVfVc2FRPLgb9eMF0vG1RSLYzszI63bRfH/+Pm3cLum8j5QdXTPAcUBfoj9Y6/gVZgwqQhmhgJCMXeZBsLux8TiqyJym7/bWCPnqr9foJrXgNYvVM8tWpsHE//7U9cYkblbemHKJL9e2/KgF9110DFTUsk/GBPVeQw6wSmrFkWsph7VcgE1oTBgUjAgQTUAGISgg12QG00sm+gsk7+hcCrq4itMjTbrQGt2atM9U2HOk/kpOmIHWlWgGIPWKEhB3t9vKOe0Km6+P08Ez1TVGgLeMHBP50DLPsvq37Tzk/kV3Xn29skenYoONjM2IMLXnunOvxAz4fCI8r25cgmmobHej96HBZKd4AmnIk7TfaSWjn5NAtcxMY5nPH9zIHzoyQFLlqCLP/y/fKz5/t/+7BfxZI64niKWxLiOAVdTwJNZwucPUbStOZCEhJpZxW12AhpAokDq76GiMYiERZuNTLJiwKDCDCIJQ7ANM4U2iGFPba8O8nouYTStE9+DnZek/Hm9GaG5BsSn5f0CQDWV2ITEBly76DbOxabWh9myzSkFcRDbLAnKgYkQg6BVpurvApl/zH5h56tSuoFYDFGkXmIERgNgMVLjr+m1LPlsfwP3wR9lVXT+tWiTke0YJ/bD+RdJIhyD+wuI1B42NsByKTke3TOkcNosiFbb2nzvvjul0TdAdg8Px6kADnt5Lvio3xPmIzQAK1ZkMl9R9QdSs2YrYO097sAVvMw06P2vJg9dGpzQa+nNfZyQnYpfNnMN3HDf72GhHV+ntB7YLBtqv9/TuvbokojnPfo1CVw/+9Ydlsz4P3y4zQ94sSTMMeBDTwI+9voV3lkSXi6E/+l921D7G9XIriOQcsSSGW9cT84EI4tmIgA5g1iNBMYAnNmFAIACIoBYTC+kwEWN47Icj7rQWqmybhqvBfXBES1Rw0SqCW2g0iuz3yPb5IHs93KiQCKVWUyjN7FszCz6XaNdZQut1XnT9+UOKAAhNFos6zwQ178xiEYpTmu17+uPWIFBoRHlVqmaEwXE5C6WlDGDiulmHsyHpRDskd+crYRO5ZzJosO4+rsQgUVBvGdY5gs08LJcMDEJtjl2RLZW6rUzZE685O+FLaMesPz68Sbv8ettagJQwc3OP5yzExxvpHnclwF6Zm7raUT+sfYmc/tsOEYAUdeXAJ9aTdTqYj40UkuAySi9Hzaf0UhOgVaJQPRqpY3vzBzuXffUPPd7oNf6GvO6aYevjlu/NoHLzFH/4XNvAZCNsmYuAQVzJBxTxhQI3/qxr9o9jy3MKRCezEEk3QBMlh8TqDLbnKvA6jSH7UklPJTChEihglRAIwH1pr4RoIz8S0V7cccx7BzUScZO+3KbqfUljFeXMTzb+CbdmbYmgRjtPNLovYbUngKthna0Ah/4EpyGlUnNsMo4FBPEhEF+nsVIxKT+JAZmdSIGi7Dp5qPRtjouZsz3pGk1yPkMHA3MEIVJzFy1PPOFmXnSg5fPJZPoL5dUit7vWEPizfR8inl70DILAUPMPM1r1bJS3s+ls/OdirK8D51j7EBloMbI7bqnnov342ySdzvQGp2lWFlMMGAqofTeAsFUzdg2ZzEQcrJnDbRhxuM52ruX/p7774x6f1r7XOyYfbDbghYBqbVKlOAnt31PCSjn6NcMcP3MF97GMTFeLBlzCCUJc8mMNUs4u0mlcwx4+5jxP3/t+3fPZ88iMfB/+ugb5fOf/sW3MQcNyFBtq1BS85Yl5Smxf7BZtYeYgXgQDQz6cLnahIt5omgKZD8vtAtYuY5DziVaXqSqsYBIQ1JVMoOaCNX/Vnxeg6259Q3oe+bym34vbbQsG18PUJdwI8j4Tq37GKAgVSVfoJN+IZqHBzEPYHadED2Yb+djxDfIocUpYPDf2CV6xt6Hzu8loFqqApGP9txekVn9f8X/IAeNAnruq2WVYCTUyi52T+cSbOu1t5+d86168ozU+3w8I7cxeYFD1qZ7r38riI0Ba29sxXdNTiAMNc+u5FUpeNltN+dT8/el99xrOpt77gDLg9VewNDou9G55foVtIaJ+ywgFsJpv905+jUDXMckm+erbiKAiJRR/FxTIGAK1fSST5dCAlAipw6dJH0ziYR7iASko2SQ51VKoQBoggoUMKjLwmJkATkHKJKoBxSznv+Bl+z9x/q3MblZGRYPALqaWfMuCliGULQwUmZjoeYl04RagOinrZiaXpWcf4UBIAbVvOqYy3H6z3JALEjAk+VfMdy43PhIElmc47wyEu8TM1NiDWy5733Vl97pv4nmGs6dAzNjGEVjUcHGna86y1sNwVfIsHOIeVAm0AQqM+8ac/Jm46pxtRpWD1YeyHyVkj5PzZs9R1L8PnETDNObyULHnPfAy1Pxc1EFLfEfjjWtkb+3Xn97frJcSScQ+sCowGKyLAJT8HtQjjNtv9XM70f9b/aeyaLREn2KhvFC+W0V6HoK5C0CrS92ZkImjXoMY4H4PvRrBrjMlHeIwoRSFpPR88U2H2MONTk0n/GMEoDf/DVvbD6f1UxIVvbEQMtKoShwNQwXndZlgMYu7ZJzEzlXGVUv3bVjtN825rbe1Jbt+FxMlVYNwa4l5vXqQLZ8K7umjcebMItZsEiKW83CTFL2XQN0ZirtggM2T8ZADSZQVE2gz4ljruA1oj5I0Mw4paIA0AST2HGn/AnnqZXiR+WFbPx75DU+zzRGkW7enNWf0j8rY0ajvPgerHzghS81ZkzN9tyS825awas644u1wZlL+0CZ3oQ70gb89z5E3IMWnQEtn3d4ylxo4wJajWsDYNGZVXWveDOrmd9NvJjRmtgu0VpOmQXr5612NUrJkO9PmXVJq3hA5U4CwECGS+62Z3l22Cfp1wxw3UwaIhxEill0op4v9UG3ZoHTnMibBz1J7TcC8iKgxazgtTa+LYpTBS9Po886cxlRKGDhNxfQalb+tyf9Q/beHPAOMP21LgWv5tQO0LxEL6Hp1dziAWssrW3nZRs4In99/ktPHrz2yPxgAErZLQMv+azVcPv8ObjP+lDnEfVARd0mPmeCOlVw1s7ZXK/7ffmt+82e3DaqL9lHCY40LA9YPonaF3q+T1Rcr0HJsVsAs/OOztODVw9kXtOSQtFb0LKKETamkVAwSkMxqjIQnwYw1fyr5aOarS14CFCe4CIEI+hiIBv5svbWwdiEe9oXWQNC9GAFL6l2067Td5u68WsGuOYg0WsETbgjCY+ej6oBsVSAn4JKUK8oQh8CIeYjaD22xSV7TWfkv/F5Mab52Of++LyCwrTVrqxS80irupR6u4lqeoHGjKWUObLDC4h0BxvI6OZOzE5K1XsoDPuCYfpTu0v11/XvRqbB5pymbZjUp2MsUWD2802Nqu0JfU3JU6rSOZAaMUMv1ffX35v/ETsZRox2x2+O6UBKPtv6tfaKFi8pN0A1SuAGHAgBGOXDldSAU9Q4fP3BJmypBm1bzmlajSlVBaxovGGgZZ17RsNKLwC8j9rmETCNymu2/j0NUgpMYKramP120ujIpAFovW8UOO1T7CkSkMu9sNyEAnvIp83m9hismktPvq7mffyWPf2aAa6XKwOohU3t+VxNBErA3crq4wqYgpj7/svnnwHwmgLwm77m9ZPXibyWyshMBFAEWdADra1248LiCerDciaxYlbU142GxnnLLvdMgXvUazE9WDog3VuMtgl609HebwywgIG2SO37PbqP62NoOh0Qu2ONgdgm782H0TGJXSIM6+ttxoeOwQ2Y4V5C9t48yfy0mp6N3YOU15K9yc+O81UeRsEYxiDtmmYO9P6rUkfRaVe9qWl8D3Xek5OYGt+Un8dADZidMwfely6pUN6bXv2zsu+BgbncghRQgcj7kkEouZxm9kzOF5sYJYioWjEIgEvqZhRrE7AN6NmrwFG0oUCafC/PTcu4lhJoJS1kJ9DDU3Rr16dsWASsVG2hTSTufehBA9fd21/Cy3nGl24TfHFPTxIaUb9Ycsb//LUf2Bz3337h2UkfgxEVXxZL2X7I8iEt278pUjqqPsDZOVlCATAPZEM6B1YjM6TTrtiPxR07up6BFLDv7wDGzNv4UGXa9ZhzfoFzUNCbbfaCQ075uUbnbIBNf3bO5+QdzBeB2HCcdbymBdjntPObqojV9AYL2mFlZqFz5DfS/Uaar/fUgxWrJjACrJGWZUWC7fenTUujHLjt8VJQlotv65QmNkoMt3OcouKLBcoE551reQGjfrYV1Dx53yKINsFQQJuSgqx7DSJImSkRuSYxi1/uhHBwBrSM7B5HABYIu3U8T9X17lsA+bSNEN6dtgU8cODK07Us5EBYVy6h717rMokCkMlcdoy6UySsl6SyN9pWKMDApgX1/irzGfQPKmtUYXIaGtzLPZDK7vjOUHzSp7YDWD4IpGdk9rkHLZ+z05uTtuQWuQ2nk06Ht9g9Bq+xmRmHwZpwPD7RObPY6Pv+eqfG12v2l9AIZL325fPeCNhI+EaemTLqMYytudfutc+1KkET3JqdynkcUAHYgJUPtjCNC3CBGuV3FcR6Ck5r9f4quUd/59yAV50HKr/tw6/l83qcb/vhyVwxlh6QVYwhKPjr9aWUU+vDlQjU9px7FoASqOG+YyKnhVVLAEEWir1OqH7YvQ4I9sx68j5GP79+TKP3MZJqWTpPmSUBP47zvXoysAJQ8wwVtOYQmtqur0IPGriIMyZkvH4IWCbgmDLeWXLZnAvLprpbM966XaXh3o64xiyL8P/1C8+Q8gmTode2XIQeOrAp2tMofGZUXsd/ltbTx9KOFjU6ZhCpl7sFb5/tVfzuQSvl9tjye+wzKaNS7+yCcNgWsOT5MDGiYyK24ftN0N9vf0+XXvsSOlX9vPRwUg2tZ34NY6RqTgqO0W2GMlgTjRCiwOHD1q06SwaaqEAPUgCKRlVeO6DqPwNaADsFWGPNqwoxfcCFUXBaqDFAeR0a7cqYoi8uC7SlnYDz5mrfqyyDi+/Tg5WRBPZwA157FoD2jqtrorzugqK85lxeaweEU/J1rxV76n9XhYTtefrSZNHXxLTk/p2NNEqS98+mxBm8Z4ErHUHHF6A44UAB83xAYo1yIrO/A7drxm3KuJoCbqZx86Jv+NBl7aNptAFPmd9CF6ZuiQxA1Z5G2lUxH9qyddc6A2bnWq576n0b52jInDup3Q7pJUBf/2zc7rISUY1E8o3woGaToD6AEsGI7Ua4r4mzNf2d3lj9eTyAt+d049Fx9+DFDZDV0HzP5Jqh7KQN+OoW2f018PKgteev2gus6CMDe8AqxzURbpcFA/TkzaVmcpL3NDRBeabow9yB1txbzdT7Wn+NGkWJUG3A6gww3Yd85LAFRwVAS6fVz74SdOo8vfnVA//IpGdrN56w2Q41LhcYY9Gcr0oPG7hun4FmBjSxFmHC6zdv4PmSm8rw/+t//UUsKePJHHEzEf7Hl95BZouMkmP6uoa7ZCa+rnsfu9wrT0F9WMwZoM4sSAZoO/fHLKAHD4gD0Co/qAytNwE2Y+2YycixP5SPe/MEV9CyKgm9iQmo+AxsrJsAxv4H302YtNhggFyLzWzCtaWIaV7t2PYrlvdzYNqc3WfQcksjU7ydq52z7TkBdawbIwak4SUgoc5q8kSur6OlETAjgQrz8iBm1wO2Jl7ThkTrajUvD1pLypuIwL3w9d4MWO/NP+MWsPrvz5FPXLX79H4RoGpZwTn5RwwR2DLFjYnWM+ozYxuBVZPyweNcxQ7zdsnv0UuERzMX8gXAdt/qFKPWPKM9m89U8+i13xFovXc1rryC1iP48AQIEzjEJlTV6I3rSSu7B0yaoHx0TuR7Pdswib/K8rT0nzUO7HN+GFJYl0aBGw7QmjwsJSZqQKt+YcfqaZpJ2fqwTrUWAM4vbh99FxW9rK2Iv/gpCb5ebHx+o+qfYABB60FCawhqGSOVEJlRtBWg05gaBn668KsWeWpCowsTHIBXr12dm1/wtllfFBQW341rypdD5+Oi1j+YBtf0IO3ByQQzA61VHf5LyliSRNn64IqtloXyGtiahvZMrucAy4NT/7nXsipIhY2GNYfQlLrqm5/2PtVLqPFPDX7Wa/eXgld7jvqa3We9md73ODtFFqDBvC3SiyzzVYIsLtjnwNhH2B8zqt25OY/Tfnst691qrQ8auPwqMDABgA+870lzmLQiCZXZQTbXb/zq06HvI+IQBbTCVEDLKjkAbt3AFqM8IQJKqaVC9l5D4YnVlOiYuxnwGhNl59fahNIPqPVHtYvtEtw28CKycHF2wCHmO9s4p2zrI1+GhTn3Ic5Z1ahpsMq9Ccde9gEXphkZ8zbA6gWWUXfYGLgAzblu0v5cu/4zZWQJyhSLgwONBobMJRfOAlCkFxI15tVeOzat0uda+ftOBdhEsEgOqPZC2PuKCXt0qWbVMsD6+V7OT/8bbxb0EryvejEKT78PtZrYq3HXuh73v4f7fuRb9uQtBv74zbg78IoaSCFRgiggtkcjLasKGRWMmt/szJEHLPn9Vw60gIcOXC5n6uqrPrL5+kvvvMCLJeO1OTQb8uMffLo59uJLTgfAQAsGgltFQhyrLXgBO1FHvTbmwcs0M3Lv7ZhmYK0J0ZbnJZuiH1dPZusPVB3FwjS0mSNac4e1EW+u00vr7n0t4XO6aveIRpJtbxpkx7wzO1OWG4P53whatSDLRpPcOx5K4BvNy41ixLTsI9863XpxMXkTotduUU2KXNfM8H65Bena8BElYCmrNpy55l69eomf07Rfckn/dubBV6FRAI//bPd32K7R/vue6p6S7Tly8fTrcfS87LiR1ly/O71fN+M1vFLwsooaJnB6ELuUPGjtBbmcopHZ9ivlH3zQwMVXryHfvCFAMqAv3yZcRSpawpOZcDfodHwviocB09hKwuS0dtbVxlQ3lmhg3f34II89BaqvtuE+N/Dwtfz8BvDBCT3t+XPK6RW8zIQFEBAYnIBMKL2FbAeFpqo1lxXbA9heF+FTIcxGGbzxRRUwcdGPvkRRH5RgtOTtRjUNx/tONmMoUnO500Kj4BQ7yjOAGh0HRAt5JtEERwVee/LP1vvxsn2OWq/Ta1tmyh2Blgesc2HPwGltao9Gv+nNhKNQ903Vig60qjWrnr9fH+bHPEf9nhCTuXxoc+rNhQAwKB2wob1gqNFM++c6HCNKSWzVQOU3Zqmw1Wb95PbaifTP+RKT36U0Cpw6pcWfowcNXPnqKfj6dSQGtq0g5QEdqUqbianUUbsP3b58CaRjAUjh2zWtmYhKTbveZ9naqQkR1RbOep6N/0s/B07YyBmw+n7eNLQHWNuxbGnU2dWH7Y7IO4pZryHaCwE9eAEnOwl723rvdN9z5lpHaA9Ydq/yvtLIVNpkK/iMSjO3aIDIaNpGINXPr6+z5ws7e6bAqtHJsxIBgFTjYq2AKxrF9vm099oGn7CCln9+XtuS8WEIWuX4nfXS+yXPaVGnJG2fjyXv63rY6ztmAtQlDPSUtmy0F2XKOveXRBNW91I9V29mHgXw7GlX9RyoPb3YxoauFqgKjt1YgApkRhXQdm5AaaRlfSUCK94NYBk9aOBawwEvVpEWX9s5xkdU3aZ8v0AMpWMGYjggIleA4dzUExQQqlze1kDDJNklEKJdFH2R2cyt+a2nQCgBIYD34WzByo9jd3Ps3bw/RpmnBWqw5pVYoAapmUtA1TiZgJcxJ18SysgDVnQM3UCrkeBR3xem0t2VZ97AVrrbq6ph2QqmlXiwGZkw28TOHTC0z7oxBgXJJaPpWhA0TyeQaehVmyhRj9vTb6IbTdvyZsLe5HdKiLmP3ypQC1hee2rv+ZT2pcfQIIyaaFeYAcbA1IP8JUDVkxeKhsJCETa4uZ65CeS4He3Gn8d/viMkGYlBQ4QdAy9mEzC9MNGarm3RNPyku87IunEKtC4RCE7RPlSfpwcNXM+PGekgYb0fHnx/M0uI+t3KuM2Mt26XwiTuQ8nMA7nWKQQFcJQCtREoGtIUA1ZFjkbat3OZ8x3V+T+i3u4NqIOebExjM+AecO2RN7eYGcRv1NPalpoO5R0QzHTX9hTy2sg8YLu949cD1p7j3Y/d3+fI4V3uD2gc2NKZdb/dhv/8FDCdA4RhtYJc7z3rCft8pKT+Nm8m9KYwoF0/nhkWEyG3/MnGegkwnYwYc4DVRwPafY7MiHKfp8WkUWKxVVuw+YhuL/RmQm8iHN1mb3Y7JRyWZ+BM0mzGt1zHUD5Toa0E4ZygkQUAqGZ5oJrmM1CSoS09hBkgA1CudRzr8qTN/RMcYNDlfKKn0RrM7vO983l/8HvWVPh8zciqdvynN58hBuA6BmnyCDXPuBmcQ8Btyvhf/+svanh8xBwJr821BIkBwBRQckWmIN2OyfW9alrNUwBrRCMoIIZJilEWJtGO2zvarUZG/xBHD96c9kYjsDJ7+P452veB25p/o9Byv5F6Evu+BmiIciW11QCVSH3ARbuRNmNxUnTrq6iA1fO8EQNqUxJQusxKN1rZMLU1BDU9jk5V1u4/98d76p93k+Nk9xqqBmrVNWp3WC4+t3P+tpGpzBhTn1vn76PfGz2di/Kb4haw2pD1cRkmuffteff6MzVRhAR3ze2a6Ic8YqKboKETzLMEWjDaZ6TfW9QngBIJGkieqQexUzSyhIwei/Em0v1lGl+tA1r3lm9uUKrZ2Hn02BqUxeW8cEKOdYfmeiI9h5zRmmRapOw5yoMxvRt60MB1TBkxccnjADSnw0sDBPx6F/b+r3/ml5DJQuSp9NfyDnLohjPJPxLg25aU0HROtYvsTki6PSIDGAn5rt/7hXtqEwVdWH4b9JqVlyR3pUj32sxsPXiRG1dtZNeOtz2naQfq64OvF1nnNutmK/fTkYGMvD7NlOye67jss+34/Hk9gPnfkW3SHZA6V6hUjtl+tlcWp/ncBW1kludhkZkjEMvNOthGPJbliQrIIw2rTT/QhyMoj8z7fqpey2pNelIJHGjD10dlmDztCQ72W9M8DLR892JgHIjh3xfNW6MJTwljnjx4+f0CODCgai40rcxu0e+j9rzbi5/TQLy/udy3Mw8auEZ/b+S6ewOiZaHuBzNLj67dR92yrjsDsN48eS6ASX7Fm89ehR40cK0ZeLkmfOB6EqYZtvb/Qxe69zVPD4hBpFFpHie/A9rFJSYB51NxxW0ZFbyINTDBJSN753jKVSMC0ASHeLA5C1rYNz343+8VvfULVc5VU5utFJH/nV2zSI92re68xgDEdLOV/vZoo22d8Fd48holcP6+5dwoC6MuBxqa1gyoV/3SM9BhQjXsuNPjbo91DIFqxe0MlPYdC9T0oyZNea9gfiox1FFff3AEXtEQweyoCl4BFVxHVS3aKhahtK0wsx5gPjuU34y0Q2Oac9BUCx6YtwpgUQEtMxWOotVG5LWv0o+NuGXy3TlsbOW9fm/mQ+i5DKzMf0uMKtD2Wpgjf9k9n3RPvcXFP/bo9x3VfRJ0IOU+FIHM/CjgzGUJcDm/22ddYFGfPmL3cypYpget+5onPT1o4EqZcRMCXq6MGBiUCMfAuJlkIx3iNorwZjL1t4Yay95tQatskIIaYg60moPs+24FBa0wwWol+nBj04yYfYhx9cMA24fozWSeKV6SDDs6n4927PPLSimibjP6cZwiO6ZJRuyYij/NtlGifd6N2UtqrAIA1ZuxKKtzoNX3tvLXLaDFEnVKMG2MCnghVDPbXmDHuANvZfpewzqVt9SbFQ3Iyv26Gx1VHNmjPrQ9FLAC1KZV2lmU4MqwPb+ZAOcQNn2W+gKqvX+yJ7+WbT2mXM25Y1N3Cwj9XfdV2D0Zox9pX3UcXlva/t6mxc6RzISLFsRKeHoHYns0smrs3cMp2gNowPui9Fly7e9lUYm9H9jGszGRJy4A1mtiwFj76vfzu6EHDVzHNeNuzVgCcD1F/NaPtpUwvvDsOQLZX9lwBw3H9ovQBz34z6p5SatekFWDH7BIDY9naEIyV+ejB63KJJ3k0S1R0nAlsx83C57bRQicBqz+c3KmAmto580hgGOQMrjdpMV+zur4t4JAwf/Bd3vkpc8ElIhGYz6t72If/Jtx9PdC4iewXkdyvdOVQC6lBsxOBCTsmRP770b+MbvOqUCK8vvuGA9eZu6KkYb5TSP/lYDXtvxSiQylsW9ybPaVHlUI+iw0eEl1QBBod62M7rPZMt3xPXgBVTtpvtsRpEZW3j0/2H18Xn3ll9G1L6U9ADNg9lrkSfCCgJSvulEibsX8UX1hTgPs0wf68V8iEJ+iBw1cX7pbcTcvuI4B3/7xD2y+PybG9SSBF3MAKB1xmA84qjphUjywZaz+AQC6gWhqjiuk/q3MNezYXrNdB23dON9WwsjOaUEYUlyVNqv4Uoeojd8voGqr34JXPaheB6hSpYzNjYNbZuLn7lxXX/vZKcmM9XXSF6ZtMWprh77+YBnbDmiNzEuk0Y/m37FrM9ectIx283q6pNpHu4kHD7QjD1jGKArj6OySeXB9r6mNtEH7zAOVXXN2x7X9rzx4VcAy3xOp2d38X37Od26z3oMKIhHAmlh9J9KKZZpIzfqV2BBNqTGZETVCj5ypZdgbLcsBWD+dIyDzIObBC6iWAK+BlXP5veSvMZyVd+8L6vermRDFROr8X2SgXa0O9kzmGHaTloEayLFHNfBrf2z3pQcNXJ9/+w5TmvDG1Tz8/lrrE84BCMcXoOUlOM64mq8xaQuUvuyKEbOCQ8dkxkw4NGC07YNUtawls/pQpMjpiLwECycJGqVuI40qYftF4jeXN0H21Ac2jPw2XoI235iZW42JEcRnYXMU9TMfkVnO515bcEt0/sKkd4ggG8TMmeXe0Pqo6j1TkSYjBozOrtncm9yfz0ljlZKDmgtHJrltuPd23rpZbN71nWpN8ynz32l9PfD1KROAgFvsEGPUHTgSVaAadPwZ1azzuVR9lJ/5jU148ULgyLTnSeQTwhoYN6iCkPcRe0uJF5T2fD8FkByIb5a1Bz33puwhr1V5v5GjXvAT4bJKYDae5ECyXP5dah/AaaGgXkitR+oLqz6+fQATudl8v216C7BNUh6Nx/s57z3mHXrQwJWYMQF4sYz1j0iQkk/rLSgtQF5BFhV4AGKYgODyrqjVwvrNAvgN04FJrtW5DZx68OpbSniHuS8lk3O+qCleleasurht0nYeToFW7T/kGONwNvW3BejbNuflfDBmYuHQaNIIyJpkDnqQkavDyFqlJKgJto3ErBqsve/zQoJWk5c6ijJRJSJsxLyw41PRv5EIOAFeHqzOBU30VE1M+j6btuS49I7JcitccLmuL1jcj8e0ptGYN8e6dTfyX3kNq6l2ETpwcec+VbXduphHIhwCsLJZMmRv2V7zNFIIvNbl6VJ+6UHOwDDwNiqx9d2oVnxC6Evgdk/6dXtmdP207fmRR+S1TrsXACWdxQI4fGSw8EOtymG/HSDOSKs+V9XkPduPaw6E/8snvn73eyKSRpPrHZCOoKxMMx0R8gqOMzAdME3XJe9qL6G3nFP/RrcLW/+VAVXtNmv9qkzLGrVB1zMBEPNMzNUsI5+hCYuWMbSO0Uu6C/fh5jL+LWi1zmL9LVWfW+AKBn2FaGNuBbR84rZPJwDgiwMz6esM8SPSdhtzxyTYgVbv8zLwIgKIzZ7PTQ5Kz/BG4fQxULHzN2Wh/DFOI/G/A1qGMjZDiSRr+WSiyXYA5sDLtJARc7SmgF4QkvFt11O/lsoYB2P1gpHXsHrwig6wYqA2ORitSb5OQm7+HqKUQKP1CFpeYo4zpvkaPB1ACVgJxdxf57DSyN832sNyL7S5V39O70oYgaOPaj3ra9YPiUzz0vE4K0YJkrLx7ezlc4FPI+rdIjX2iDoTogimLYDVtcw03gP3Lab7ntW4ztGBsoCWAhYtd/IFBbHPzNcAMzgeAITic1rSuChtk7xovhCVBs1k5avQ2/mSApb0QFKNJZtkxlsmlAbO8FLpAQBYCsizK0TLbiESN0zGa1ojSVeqXLQRjqPAkRJpqB2JkWuSIjebTaVzoGpZnEE5bUyF0GPENOhsVa5lTFIpuzRGVA02oxUYmu3EFbySVEBV/14FHz/nvY/M5s4c1Wbn32MMe3Xd5LPTUY1A1Rp7k0xmafZYIsCYsCSZv0j3C8PflFJyJj+73xHz6ddP77+yCNwCXApqkVwOpE/a703kXqDJ62Z9UFpAOYGnFTfzNdYQkHIua+GS8OoehIHTGoppG3tRpM0e4W2wlecdI9D0misguah2vJ9jKzvVazR2nn49jpZn9dGphrUDYD6xOZLxO2rXqB5wyipzKb0bE+mDBq4/9L987OT3KwLmww2wHmXhh1uXPOwYKNeUvD4SsKfiT+EWrKxKAVAZDtDm0Cy5VuKWa7QMtE9KTWxqAnSVyfeS+wPEiCayx9vS9RSNicaD1p4EWe4TNsZ2HmTDmfMcmqUvWgyzANjeombSyDFfVNhXu7fIzTjBUguaf9mBFO9Xy4bNlAMvtqoUrEAGt3E7qbk/r5lzpkDoL9lrJz1Y7QkN42tT469TbxsCmeCjNxWDW0djzasnY1qjmn8+P6s3J9Pwu9YM6DWsairUuVEgKoA1ElyA0u27ac/Tteox26ZcC0hJ8yR3QxvcT229UuvzHVWK8dSXUOvJ14X0gOUFjz5vDzpvkjqggVga2tf7zsJIkCjvx9qiHdsHomzWNVrzYX+uEl3aaGN+348tFJfSu8Cthw1cp+hnvvA23ncIiPM1iAKunr4B4KMAgOV//Od2Q2Drs9ojH87uAcv7rYCtM91Mgj4gY9Rg0b8PwUl7A/Aa+TBGNGKY9nprKtuCN3ffi+Yiklfi+jcyKoDpSMny3JIDKs+QKEieCwWUrtINaJmG5ed8W0T23PbJJgOQ3ESfHwdsAde6y3pnfz/bvU3faymbIB7aMshesrUAFNbq+tWnIBJMziQAUNRBwO7ezIT13DV0HajmQbuP3j/lwWqrbbVSv08A3tWy1KfcAxaNmJ0XZLwwUyY31OatKkQcE8Pa1+w9E7uHbReBFgj2En77td+TCToetFYHWH3ityWS+w7fzEAMrXDlmZHf4nugNTK79cLRKS1yFDE5ArFNdY4B/7k0EvI+3al7+jULXCkz3j5mCYfvQIrna1gzyMI0IRt3NpMHGxN3Epdb9IkZa27NOb6ygteivIa1Jx2PCo/2folTZIxIXm+Z0Sl78qilfV/nDkADlIEIHODMhKR1F+VYIiATSRWFeAC0wzPn3DKpEGrVEQdYa64mwcW9rsLCjolwc2/OZKjAijLm80SgBgz6uRxpVL0pzUfTjU2NNXIuq6YtIGY5gQoyIaqZMDSmQ1tjp55xNLOzSvpWJs3CzEdFaltzZx2/vyfTsorfy5dGS87s10SPUgEvLovWzMQRCJNo3bYu1NSfaEIEEJExh7GOdGoOvGbW93G7lHxZNd+o0/4tKTe8wF4bedN/DtVsS6rWR5D2tzNBizdpJ18JKh3Nsd0/wzlUIdf7s/3v/Nz6/XJKIXv0cQ3o13/16/jCs+eYCBub+uGrvx4AcPelzwPToUhxUmuPXaSdBESIb8OFoUM2PYGL4z6oOF/zbbYgFXRx7vWj8tQWJaVyrAeyUbPFUxK/fe5t1iPKA1OHTWFW/5rco3qzgqBDIJkD1qgp1mtNISDEg5yAO+ByUYMpVa1qSa1Wex/Astsd2vvVBOcDTkbUaxzlc3+dgRmtMbthy+iB7bMW0KJyX8GBFWkqhaqKOMRQTIde+7KqF2WcoS96681TTmMCSgHbU0DrJXu7L69llUAczjUIylMvPFL3mQtx5FBNxXeJAcwgAg7QZ06h5LJZnUyvXfSWhFF2gq940dOu2RBtvzPfy0z81xJ4dZuya87Z7ncDIsm9UyEkMGKIsi+DgFdC55Nl0cLt+VhSc80/O009gJwqsLxHvVY2NCva906APTeWV6EHD1x/+9/+f/G+w4Tv/ZZft/nuZgoSTZhWHL/4JpBXHD70tfUArYQhhhp9MCTRNFk/TCxtSBKrqYGsEaQrkWRAou2xkVtV3CePGngZteHJ5H7T3kvP7LyvAoBjOPtN3saLSCVfp215Gpk7UlI/G7GWRZLNtWTlr5DaejnYeyfRo5qCGCqcc5vjZv5AD1g+YXu/s/AWtM5Jdd7fMZqjka+nOX9nQhuBVZPL5M5lc+8FpcTiL0y2AM2EBFlXVo4KCFiQMZv2xW3uVw9afYV1D1pxMN7RHDb3pr8ltAEYlNcqKO40R21Aqvdx6mvTvG9XJ8vPoRTwbZ6RAy3/1+ik6WojXLYJ9z3VosWdq8AFXy0pN/5sLyRlfR5ichPzrxRVzjrvpPlVEgBF4LKJmFRDOgFg56gHrEswxJ/Zjh+ZIYEKSn1FkuZ8O0LhfejBA9f1FPDBJ+ME5JiPCLdvyxuV4toD6mf9frCILWPQKAzBisnqg2HTYnRxARtzDoDSh6oPxBhpU+cSWH2zRQCbJetNhJ78mvWbuUQTOm3Gt8LoJUcLt04sNeoyE2YEYZ66mWYWc0gMUrC2b8liZEWIi3kwt5pW7y/wVHw2GU2wgQ+QOEc9aJ3z85TjRsy8By53nqrB+KvXNcT6N3L1VUlASs3XW2FNO6vp0CJWswWc6EP2EYTyNxR/VP93k2/VjNfdc3c/BbBS58vqyQQV1/rHA5UtRR8dmHUNMCT0fcmM25Xx2hxwNUmNxDlqYd6Mxnx3SvvY2wNGI/+Y/603pVt6y5JYSs/lLFriav7umsfpNXGrrj8HxpypmO2kYLHVfgxuPXMRSEXLlfFYsreMc7u/Rqv/vsrOhrd059rTwi4BsHdDDx64ft37rvFVN2PgIs4FmMyufvzim2Ai8XNN10XbMju6D8P2klIkAmkYNYveIBJP5pqJzmaKCjjEsY8IUIE0dgva5dV4QDr1uG3NXKJteeqj6PxngDHRbfSjkQGYfb9Ajp1DQA6V4QbNzPf+E0+eQfUJ2mZ68XluNl/FDMZUqj3Exsw1Dj8fmSi8JiTvz/t6RtUgzoEVAaqV2M3XAJVY8tiApDX7WJEsRELIgJbyFlOiSuHG0AzA+md1qsqFB62RxuXvpb1Pt1+yRgs6X5YPvPD+41KI2u2v3mTd5xOKmZmKJv5izaqdB1zF2t4lgLAwb6JCR8C0Fy072mc9kwbgBIVqElxyxu0qWtZq69aBllGZ/yD7fA6EkOTvkmWv1OLFudGW2wLG+tx1LfT761SLl1M04h1VRD9NlwZkAF7IuPw3PT1o4Pq//b9/CV/zwffjyRzw07/4dmkkeTNJZXiJaKu5QYxcN0881MXah+gaI3EzaxtfmK2YCQnSUM/bvU0iluu1eTkGrz2T6cHKM0o5bn8OLtEuGlPFaDOfWUCjKCSLerQNtyQg51yczbmESO9pG06q5lZ6NXPLNkFbpNVZfQRzBHImxEGpIr9hi/ZJrS+g91f1m3/PfNZrJD1g6aVawNr766IsiUIxEWcNnJA6cPbcak1F8bcQSP2NvjFmMw9uzHZ/pGO2Nezv6WTScNOTzr22/Dx/TwAIYRsE44pR1z2jP93RgKSZq5iSj5mRjhnLNA5asr0oc7E9oQesPcFyRD5a2MyAfr0WjSvb+t22wLHUgaC+8BwJM4eimQUCFmLMkbAU60utwL8EXw5OGzo6t4U9N0sQJif5njIl2jSe8/t6ug/meG3rnMZ7KT1o4Hr9KuLJHPG+g0YFkgGW2PpjPIjGZZUbKICnA1gdvwQ1d+QVtB5RotzCJBtLr1OOg0jFxATOVsFADmCuUqLv+dQnJwLSXRkA+iZsfTi10aWSSa899NRoVQOTSuPQLifj6qhygSXeH1ec1CR+rlGFhhFZEIYB1N2axNelPgIAzaYGxOSV1UwESKBCYCCodmJz2PumjHqMa4NZtuDU18LbaFzuGhutBGiFopEZrU+2VfAqYkzQ5x+BYL5Gzfkx4A9aa67vd+SFmmpmagvhBmcy9FpZWfM7YFUShgf1J9tcLNG6LHKUUXPyrIuCX969zxGQjg6ypwKe3SW8c1wAAB96csC1AlifT+XnAWjBCqiaE4ChObwvciznq4JUYgGrzKgal67lY8olad6TtUmKQaJtExMWNfHNmeszSeT6m/WvGaWNTBZAby0NFcRkPalAM6h2Ubc4Ne/79WO/rFxgS3tCsadTGvZ96UED14efTPjgk4hDqCaRiAxwktWoIdYcD0jhgFWdCTlxCXsvoKb29wJaAyZeKjY06r/8FasVbR6eOda9JGhLoHfOXyLpeHoVieXUYmnMa0GSX0sZIudnGm1kAFguXIg+ZUA0K00vcGHEZmrxY7PmdUlBC8iYJTytGb9pFSMhoLnfzqQy1KTOAJb/3o+hmFi6XKQheO2QnKvK1gha/FfDplMWILY8JhOe7LW/9z560O610bqACnAA9nKxNhUu9oIwBq89aPmqJ35PJbQaLnTs1xNwlwh3a8Bbdwveul2xHGITsGH+p/q+krd07BUG6MFJjtffO79V1bYyUgZu14SUGcc1Y9W/nsyicjWJxnQMGYcpFDOgB64YCGsIxR/me57lEDBHavzLPpmZ9aaLEFx4TF0H5XHo9fpefHZ8L0Ts7e4tz9PP9ReNFuw17J3zXUIPGrhev4p4OocSyeTr4ZVcEFQJzzeVDBDmHMMEtllQydBXjQcco82thOj5ob3uw9vlt+RMIxj8St/dE7j6BdJ8do9VYUzLNEBfUBNoQWxJuZTrK5Ucguvi60AJaP1Tfly989qDVX0vx8Zg5kq520ABKXAJeMkSC7N7b8DWVNKYEjtwKnMyADKg1bJ68tJqlVPc1buw8L46xN5jI7IcGplvMyGSJioDIiSVigbuTHtjPVsAeFCaq2hZdi8j0Or/hZqjlxVccwda/X1btJyP2gWAQyA8vYoKOBlv30nrokMMGiTValWe+mo23ofqX/vi2LZGy+90nXpz4DHl0huwlCezdZ8zoou2Wr0ml7ms20aozQxorhoya+i8aNaBrVs5N2ZyA6wMW3u1ug1Qt0fTUFZdG/b9aDmMNGD/XU+XgJYd0yeO34ceNHAdbp8hHpz01yS2JiCEYj6w8FpmZUqMssmD21ScewdvTUDuQ1s9jRonmrnHnq8Hrr4Iak/2lQe63nxYq9hXTU+W/LYleQEhMx0wN7w+U2vi9Nf2PoPg+o5BKzkEZixAya3pNanR5gcwBKqRvyGz+LMAMZchts90Lse1aQS9v8qo0SzLZ/uAVT4v5+00PNj8VNpLNN8+8VD9EANzSyDxd1mEaoRoXAhWYLmarBnQiFeUKNfm2rSdi3sRBY1rNDNgC7p9+S4OsZjeQVoLlC3loQWtPUErUCv0XE0BU2QEmvBLLxa8WBLu1oynV1EYvdI2oEjP4XxVHqj8mm3WLrPT1FA0rN4keLcKeMn7liMnlaxioHIu/xrq96oFnGUhBFedJpAKhxeYZeyw0oXZ8Ye6hhXkmBvBxiqMnDv/iHqDi38/KnIw8kFeSg8auMLzLyDQi9LTqdjRDzclP6QktGbW0Gr5rW2aUb8fL6nZpmLeRgj1TM7e+zyZXtqtmtyZxWH36F7Xc9Sx9mNskllRJZ9eIcnu+oFa38+Iscm5nc/OgiR0o0fl1KX0FVfzXx9l1Wuwqbxv/5b8LN3YYRCEsUf1p1uTn3zfmqJG3/fmlT2Trl87MlfK6HDeP7l3jeb87iTVF8fV36oAZu+bMXXMwVq76I7RrsPCKFntQ7mYrCa5EqtNsvdn7fSTK9VQwuQarJqp3TVXHWhal5jLAwg3U8CHnhywZMZbtwu+9HLBHAKuplBSAPrh7fmqajCQi2wdrFm/Xs0sCKDRsGTNtrvN+7Ym99qbCs2q4M2FTcoC1VB5S50ZFUv2fs3RnI7Wm//+ksaf/nMPYqbxesrcgpY3D79nNS66fRsUxFFL8wEIETxdl8x7DhOSApbkCDlGzkBO9SH07UwANOVdbAEDKDXvpLWJqwVH1antKwp0o969n3PmvSKZkiyGSFQWhGdcVukDjrHlzjm7SXB2tm27FxutZ8zMVBbfms2kI/CaWDZt5qSS8ugex5rVHmiVwq2hJtHa+EpdPH8f5Xluv7sPaNV50e+6Db69LzT34wWdrYl4a6ok//pCjYhgGpk8PxHE5Ho9E2m0b63eUEoeKWgRaRkpHUMq961RglRDRM8BTBGcMlyAEm80LT8Hdk/ynrrz1XsDZC3czMCcgHSYsKQKSnaWEFoNy7736Rx7FSSCSp65W8SN8NppULY2+zXcg5WN34DH3htgGYBNltMV5NhZ/Vvlb5PrVcFqz7fb+7fs2JFlYmSVMBo9N2C87spv2At2co71HMM7QQ8auPKzLyItV6D5ALp+gnB1UyW9+QqJplo7LKP4uALJa3lQbZXxvqzLKC8kAYhBNj8DAhAGWrZIjRmNIq56snyewWf9cWybl+TapGq3tCZBY0Yqmx2oLQv6U3agK6+3C7jce66BATKPhU1U5hACUgQy5wYw+3Bjk1w9+SoYVeq0fzK24D4HcEHH4Uqjexsxy9402B9r5Edfa9fVpqKnTGGjwA82LQhb5t3fh2e6pon1DL6nzDLoEjIN9Y2RlemyBHsDftszl6GpZ1y+IWtvJu+Hd27tRaoCmflxDurrlIjUiCXnk+P081UKAJAvVn2+0r6vggO4/b6zjis/2NesbBzRrTOfMH6lmtkc5bWFxAcHWL3A02+JUaFtD1j2fm+dN+cafOYU9yYMvz2Gi0DT7/v70oMGrvTsi2B+Cjx5H8KT94HjjHz9PuSbN3DEhLs143atBVqrCYcAxm5pF18Xz6uz/sFau2urouHV+0iQgJG+42/fRFGJ/YrbASwAMCd3TVgNWsVCpGcDMJGgq7ZllS56t4d/a5sKqKZOoN0AGQAHbWIYhDFHIiQt90QUsYSaVB0JuNXzLMQAMhYxrBcvcezVom4slqg5x1Ck0OsYaoWBEIomdomZqV6jnQPPMO2+R3lZPdlGtPnxoGWJs76CeWv2M+YGQH0LbRWLKmjd49bq2DqLgY/o8gw6mC+EXCkzoPF1ZGew9tKzpz2G6D/rj+/n3z7rnwcga68P8pmI8MZVxM0UcEwZt4mL9mUaUeZtY84YqRQoRkApWByJsZBFHYqfqQRk6F73pm5Pfs2ONKhAKEKXn6+aOlL3jSUi22vTzuSvgFVfIPnUXPfPw44baVaXCik9mdAEqGXG0hLQalujZPn70oMGLgCgwzXCa68DN+8roHXHEbcp45gYx8SbEPC9NvWhfK/f2WTbtdBKN4GctKK/LxWyDbTyus136ZKdSf9i9BdoOgMT1uLLs1b3gcyfJU5807jMfGgS6946GTGM0YawGo7Mgj1BJXLy0UskpYgA4DY5rSgLoEXzHYStqaY3mwAoOS97ZhOr2WjkTYRmIu07NPd0yjRYN7MDDxeUYFJmUE0j6Rgyt6DlTYjFx87awiTV52BmP9OACKYF+fHW173Pttdu7BhL9Lb3I8e4v1c9a3OOUb8pTyUlJdg+4fIsRg0o/fz7NVi0ELSaAkhKYvW+QyLJjZxCQAyMY+AyXtFMCSFz6SwNSD872yuNzyuwHKuvZ9ZgjQDMztzoqV+329wr970Dp57auqVtxYxRNf8ClJ2Zf4/2zH+9peFVSKayxguYBSrod1IVRo5tAlNekR40cNE0A9OMcPMa0uE18OEpUjiUXAprjWG00R66ucuFYaNRdf2Gts1nWolsuNqaIBCAtFbQcl1/yee+mPE9VKAq7T26RosEieaygVPQQJSmSoH5O2oEmjcd2T0bMddFegq0+nUcCaUhZDFpbayfjKspIATgjmSjxyQSa0iifWVm/V17hZHppM9l8RKo7y/l741Rb5C5mpf2qOGPe6A1yFsiEpNOo8GQAJhdu4yhCELVJEVMmrekgkaqgoStsd586Ntx9GY5D1re5F1eO/DZ0piZGEhZqSOf+1TuWSfXCxYtw1XLBMbChF+DwIk1SD61pGpWc5BriMaYS41HKz3mAQyAAzGUyFivdaVQc7q8ybI3d5eakIEaDal97bSqE8gwqp4zAqu9gKNLqL/+nkB0X6rBZFUjtcr95Vqda+ZVNTvgoQPX9ROE970f6ckHkJ98FfjqKV4uuRTlTLnrkksVsHrA9w8tAKXHVPUFtBUHfHFS+bxqW42mldcKWGlt82AA9YAHlaol54WDK0Zaht6Vz+lDkd19jMCrv0d0i34PtPrNYUwj6vwgWDBKZbYLMQhSaHcOYpO/TVkrYoRNqHwzfi+dEhqp1dcpNIbgSTQZyV8hkg9ioIZRZhaT3AjIRvdeAGuvbBMFUJjEHKu/tfBlAwKv4ffAUcLX9T9f/cAHShizKn23uH0mdv+m5XnQslSPvsL+uVJHfUWJuzWV/lJ3GlFnj88LGNfqh7maAp7MUSvFkDRPJGoSoAP15tH2OdQqImj3TQhNGkggMTtfR8JVItytYm1ZiIvgkvK2ao3dugUKWA8tmxe597jJRwQwXI89YNs9AJ322K0Lo3M93frEeJnZschRNPsd6r96FSCxNRTJrD6mAbLEAiiPQFYTtLbnkRza9yhwxfd/GHj6AfDVU/DhCY65fYBEWtNLPyxJsZ10LMfW3wDARLKwvDmulsVB8amYT2sKJFU7SqtyB1rJaV5azLcP4ScKYI19J8qbdg9WsLRpsEdhs2CbewMKeMl3noG2oDUiAykzRRJaMNMBinaj85HIEoaFUcxBTB1XOWDJkqS5pLCpUlDG7EwpNq45huazUQV9Y/wCSAxrhW7gJf2NSOv+XdY5ur2APC/qgmtYvzPNywA9kyQIl/JMmV2St3U5dhqT3b8CrwVQVEZemVnirXmonqdtdGjnNdOgB62+4n6bOyevDahKPT4tx+WFD6BGya36YFKE6+BtY61WinOFfTeaVm9mL7Uda1sipCNoucXN4QniPIEog5L4WzIYHGT+fbqCzd2k647BOLikXj9XPfmi2EXIUl4QBve4Jdrs340g6eZH5lC/737XW1NkfMOLbsyB94ht2lzLuyCKdccBFNvrIBaaEGqJslGLmkvpQQMXDjfg+QY8XUlfLc8AVVvyfgebJ++XsM3iyT4XPWdrfy+LE6q+EwpokYFUFq2rBy3qfFykmhOHCcRqEuyrKSho9RW2DViAPSNPf19V+zq1ZKxYsDeRePDq50rUA/mWQvV5JaISyRkDMOUoGlng1twUx5KsvG5t/0C1rtr7rCJ11EEmlqjPlEk3TtW8iKkkZ2fsb+7RHPWgtXe8Xy/e1m9YWjQktGY9m8ZAJv3LGhPJVTs4K3Ms0Zrdg9/4Z7l9PaKeKfvqJyZg+IoSVvViE06eDHi31zAzewleovpsPTPutRICtqDlP1ONt//OrCBzML9LbdTpAaudK1n3kapZ1XpkjeauN+eNrTGXRaT297yX+D7i9bqFyj34C418+O091IP3zt2MmWvhAju3CWE2di7j1i7LBO3eIMIjMqvwPxjQhfSggSs9eT/yzRvg6VB9BmgZBvQ1dKOZFDS7h9SrrCYxNZKgAy4JEnCbyuodWqXsbP6tFZSW6uMaBWiUYqR6DJwW5X1ecar5abCNJeQX19j8UBfWqNK7J1H16yaW37QmISMPbDHUdhwWuGHal3SRlujDGGrLF0tVAPZ9B6dC3TcMN9VqJKwmN5krWQClm6xqiRGkwStaLaDbuD1QM4UGvHhgqiWdbFsrBuiIKJGqUCYaiIeSsruCe8a2Jl1nXCX/TFomfBn1Id6n1kgoggSXY+0cXusw0/kcQwGRycxq0TWvxPhe7BNhgm6PoBUgTOMtHR806ZmLRC+gZXPT+hr7OdPjiqmVmnnsp8SPfw+4Ronsp2ivKsslJj+2Qbn72isq3f92j8wEWM4J2vAY/3a0asQvDhFoyfkFT0mNZ+hBAxes+jtUcg5UnKneAW5EaDdLX5HAyC8+v8FiIEwEAaVVtKkGrAycei0rtyHxAEpYvGlYRAHsNS1fNseBVnKbzjuoe+qZVg9egHfoW/sV/W3ZCWrecgBmVfXtvf9LeiEr/JrI/GECWgZeFiY+KYjZ/ZyLNBqWg9IbXXRe+6r0kqQpYBHMMRyVSWlYPlEFFAMGK+grGqVK9egYJdCYaxn1eQQScOTC5OV6JnHK8yAYawW25p6iyKI3IVYjUwCKJlZ/265rM1FGjbaz2pMWaQfURqdGpdeaaixpAmZXvf9qChsTr0V9Pplj8W/NQTo22L9ZwcsiEOXe7PluHnklzc8E11ogVPbbETxd4y4xXq5A5lSudYihycf0tUP9Nb25qwW6UZHsOm6bdg9anpf0oNVrlCNtbGwmHcxHdw4jG6cvjHCKRl83YMXby5fjuO5L40mevBui+vrk7827ULkeNHAxxWI+y7qwfHQVUCVgE36ZzfzqNjdXPwJQNTbpA1QrYBRQ6iMGDZxK+wdutSzAvWdwqrE2FGObxwU0i7JvCQG0oHWpVA13fz2AbeaV6/EZKF11zXzYayI2ZyMNTIC2VjUnDZxIWebdGHdmIMSxSaZ81Plj+kr1/m9gUgDOReplHSllSNCN1oeLGqxB0MANgoILuXsNlQG45+M3dco9g6zaflCgEt8XlcRxOYcJBuzW7WgepIFgduvVa8g9qcu0Aa+i5bH5dVo/SyO9A5hD9fGY2S1xLP3SyrWoRn1eTQHXU9RADdlDc2hBqwm6KOeoc+rnt/qhW0bnAUy+F3OU5fUZOFoUbMlzhDdtKdO1a1ilGXbJzvbcCMP16clr7+yuY8JgKOui3n0P4ARszaLl4Fb79AWcbWh7QHUf/cbErb33/TofgVYB7Y32qFav96zGNc2NaYDZTEPtDEYixyQgUmtlY8V8FODDVmXi5zAALObGHNhoWEYdYHnQQtZg6RDl/VQfQwExMxHaa/sel4FWH/rvqQewPRptOg9exU9o59W/Zl4QMJN5NfNhyPraPgOr+aFa1Tb9lEyCDNv8j70eSiGIH8jaPwAS7SValmnoKOBlNfss9JxJgDbxWFoe5QLubWQzvVk194SqyUbHDgqwDJ5L9TMKeNn1PHgZjQopWxFl0/LsYTX7YEDmQO9zuBbNvvXBGZZzZFF1cyBcRes/RaWvlsgTqxtw0HFvg40YvZav+Yu9uTCvmMNUwgbNXOn03QJg5X6wDS7wZi1WoauAF2pQTbk2V2tGM6K6aeTeWNeB20u9ibS826u206XJ9HQRDFzSVsel2HRbcHzKnc89YBlY+X5v1+9V4LoL15g4Yl1qxWYvdQT1RZnKf5u09bdq3nKMOfFrCLKZiQqZ5uRBq3y21bC405iaxWKgZV/HKMDrgKpoWETweV4bNRznna+nPs+DdTPUeDyD5DZwA3AamA86AUrEF0LN+zKtK5I6/4lKkq6vbMKM6iQ3Zq6aXF8/DkDTPwkAUtJisRpFiEmAYUZAJvkuQ1IGLITXjETmw9lqPfX8fp4uqQDgjxCtrgUvA59swSMNoJ039xg1krxz5puvYwuKVVCT39QxnqI8WFUWyOS1qjm4Emi6X3ZboaBqsN5X5++naFkICCE0wRvEGYdQg5YsbYMgwVPstRPTwjSYwdZA/yz7WRiZDMFShcfmMFHViJfcmuzMOkaQ195fuTER2r3ZHPm//es9ugSoPHVC8uZ0ndnVjhmaUWGaeKdlpyOIM8Lx5f3G5uhBA9cxZUxrjXbyJi4fscSs+S8JsCQ4mVQ5PukDIJZck5RV2iZgIRJJDgCFDM5rK/EF8U2ZGaNsrEGyqhxf3aXFTNj5tex3xAGcM8S2JX2ojImZ32lUMP2S6CD77cbERFsNrZx35/Nd0nlp2qVoiHoNk2Wd95quYGYdi/pj6z8EqNlNTthXPjcyn11SMThGkiCQCIQsOSRcQuZFOjbTIVGbK7U9t96aBy534LkyR56KCQ8+786u46XxqmX1v5dr2vvzILQpPNy998xzVDfQ/XLwqv7GfMJ9U0qg7o0+uKVngr1VIXAXWWkav+6bxMCa6m8CxPxcKtnoWvTWglGwwWhMm88Hvym+RhW2CNAkXKh/uK6xYJrbJQLJiTJwfhS7oHcpXQKE9yBbCxZ5XYR/s1yl5ZXP/cCBixFSbmqGBUKJFrMFkqHSPmkGv6oa3iQlDnnRJhILM12zMA2AEGlCnABaVcNgJ8GRJikA28UyWDyk3d84xApWe4vGAaCA7VaNL+c9MVelDNJmMNuuzeHERt5rSthQZ9oYgRczCmhYaSp7LhY2axJx7UMlkm3Uh2o5IYpmpR9Y77uzgJ2cxXzI0O7BGiyR1PmOUOew72e11xhvcPOtBL2jwUgSdKtNGHjJ954uA60RYI2AxwNTf86ReceCmUbnKPcz+q7TsIbpBDvgNYrm82a2xGgCaDJLEvExsz7bavavJ5egmkuwwpt928/a43qLh7caSCqDABirb7f4UaEWBb2fjZ/baKRhDUDL3p+9t53CBXvPwdOZ2KlCJvzY2inCkGrFJdI6Hy874YAeNHC9fczIS40mK7ZmEmZlFeAtKuiY2uZlXktjWNHMCmaiyYmPZAqE6zgBUXWdnGABE2RaFzTyyUcRur9MBExTU9qpvA5Taxq03zGJhKI9jkyzi/eVjk6YAMwkV49tHfajUPiLyJsOFbwUmoqj3Ax0me1TASmfc8MsgQ9UwE20JtNXrGSPsfFRQrNRYqlFV83BGpxBcqMGOr5UTc9ELcdnL18J6oeyUj2AMxENNJ6iZYGqFnxWW/MamT8X1dfUAVAHSn4MwFZzi9QynUZjGpn8gC1T3Mln8KbxTU7ijuDU+/SSC6ABBMgOeiONlN9bPwZ7x5u+vNvBt0Kqx/IGsJop6ISIS6Dy7BGDKNaRea6aUlEEx+F5TtBG8x0Nh6pplbEVXMy3bc+hNPu1LvW+/N0r0FdWNwTw5//8n9fonvrvm77pm8r3t7e3+BN/4k/ggx/8IJ4+fYpPfepT+PznP/9K13r7uOKdY8LLJeMuMe5WqVJ+TIzbVd4/XzJeLvLeKLMtzu1Dse+sQrzVO7OmcqNE4JJnZWY/D0j9Pw1rt6Rje89EgFXFcFR8ahocQutR88OOJ/+VBOj+n0k9qsHZP8u7CfqvlLBC+9r/C+6zUR2/horGWH0g1ZxU83sm97704LIoMfVZWjWOORKup6hRbEGqxgcqzQTn6KPZauFSH4rdm+BOFWQ20KrdBqovxf7JWmnXT0ZdP/Y7/8+ozH2g5p/fS/ZZPRbV6W0+pmDzbMfbM6wRfpP7V+Yo1CjaWX9TzTvHsv6wHkHrHSgt7T+XFtLMoe4H1h557PZAn5eYzdzm9wDVfyPy4d9XkXBFCQesiPnYjsVF55ZrqvZmXZBrs8gKWuz5hT6w4P7JGP0zq+vWPxPvvjBGb8z9JA0A3oPKHpA1vz11bqfB7Z3Lv/dnDEQdHyF4E2ETvPUugKqnXxaN67f8lt+Cf/kv/2W9iIua+5N/8k/in//zf45//I//Md544w38wA/8AL7ne74H//pf/+t7X+fZXQIOCVdTwJK4RI958s3KLPzSIqP6hm8qgDf+j8Ri0YogNU9Mkm8VYmOrJ0DAK62Nv6tZkgZo+tpy0ApouePkCzWxIACcpEo8UO0TJxZlEzocRPLqC/OOzAZFUlPqExA3x3o6AVrebGimnZKgq4s85W1UF6HWPPOZ9732JQaZjFlNvVntSn2VjbLRHPNo523MRLymVT7DuEK6fcmaexVg7jxnRvIS6hm+dSr4atQAtNey7H0cMJPhHJQbtIAk1bBGnQ6AZr00NTVHdTcHmn/DFMlp+Nj3t5ZzeMc2II7/9ViuNSqR1vvQrBKIaV2l2SXa/K1eqLm0GaPXcP0z2fcftvNU7gUtYDXzoNep0af30OK68/nn0V/H8l9tTTOPtffYrbNLtN770C8LcE3ThK/5mq/ZfP7WW2/h7/ydv4Mf//Efx+/+3b8bAPB3/+7fxW/6Tb8JP/VTP4Xf/tt/+72u8+xuBV2tWLIU9QyJMIfc5AdYMdBAhCeIhbEszjHmo9QkcVSTVXstW40UMRwQDxOQjqoJBbBpMxQEvDQ8njfBGQ6svPaGznxiZAyCgrjRLg1nLeckSBFEBbNQzQcFyOw3/pruXGc3wCVj6sCrhMyH2hAzhMqokgZwRBBCMd1YDzSUqMPaUVf8f7yzsT2j8CkPfduNDakPMKImDcMFivRdbz3JmDrwar6rx17ammJEI7+U3Gd97xlJY/4bSSXexOt9rB60eqJQfbaWMK+CWmPZ4O29j8gzSKO9VZYBEWYCIQZ1RLtxjbQ635WZgRLAVU3UbaHifmwASqV2e93PP9A+G6B/PoNn4sddpwxw47fX/TWan+/M1anz+/PuCatA92yo/dzG09yTF358PEB4dfj5ZQGu//pf/ys++tGP4vr6Gt/xHd+BH/3RH8XHP/5xfPazn8WyLPjkJz9Zjv2mb/omfPzjH8dnPvOZewPX3ZrFTLjm0jF0jmIyAlAqN1ty5N2adyth14oLDEBsLeZqWM15DyBxVlNLwGG6BsIqDyNHydPiDIRVfF6WlOxpD6hGTliglVQ6qWVT8LWPTqSuJQqgDoJ7SDuXqvevYAYQ04JIkuJnqy0rDMSYAQoVtKQLbvWBBcdcfJfnkf0fQBNsAFTf0x5wlLB8VIC00lZ2HQaXIq313lpp3JuULgmiuC9+jSR6H1zRMJM98BlQKXNFWgSaCcShO4bU5B3A8VDAKunzGm25vTlpj9mP+DM/F9CZ20zLKhMjoOULbHtNq5rexqDVjweoz9QDj5lve/Aa7bQ++tOIy/dhAyY9YEkBcCqfhW791JPu+7j2tKwR2XntuL2SY+Teb0yDuqltHXGYT1/0BH3FgesTn/gE/t7f+3v4xm/8Rnzuc5/Dj/zIj+B3/a7fhf/wH/4D3nzzTRwOB7z//e9vfvORj3wEb7755u457+7ucHd3V94/e/YMAHBcM25XyYuas2X2yzGRIL11LCExSKXrUzk3fdFWY0ZJV/dCNaAjazCAJD0ClDV0HRAQy0mkTp9oCbRAtQdWRk5L8Z/thdrb+tlqG2E/amnv2ufoVezVO78hHUMxIRKQLFBBjR6l8nRoE0O9CdFXo9iGfbdMB7gQMEwz1DprzJZG4JngeG79rF4CWF4q98fdh0agRWoe3Uj1lzjuSwASisrTd9L2GlZCKDl5tUhvOz4ANcmbUHID/fc9jVaOgZefWxlrLbproNWbBz0bMNfBHm0Shcl8WF6jrX7bPWAantvuxZQQqlqhjd9/3/pceTO+Rps9sUcvNQ32dEq7G+4h707hXCVS5yp5FfqKA9d3fdd3lde/9bf+VnziE5/A133d1+Ef/aN/hJubm1c654/+6I/iR37kRzaf36YM0qhCA65rnXlpaCevzVFq4bS+2oJ8j8Z570mixxTQEiOAsQapAsDIWDNhChNinMTfDLhgCB+J5Rackw53iTNAsf6u17JM9fYUzpwTgEUnNtc/N47R6wFRv0jPnLM3VRIFyUtzIMZEsGi/jc8LXP1hqJrR5pZ3/EDyuj7vPb8X4BkHYEAqDIbc5xjmlo1apts45Gxbjck+36ORKbRcrwMtC+QgoGrubj2NigX3flCxDrRrsI+OtUjelLlqW4NxVo2kltiyShIjyX/0TC3CsD2vG7sCro3Dn8ubk09pGrYGW219m2Ttk2wbjcOP165/4nq9ZljvdVzot6wjrkUBmjWz4wrYgGE3jnNzAmzByn82DNTy/n1bWvPV/oXO0C97OPz73/9+/Mbf+Bvx3/7bf8Pv+T2/B8fjEV/+8pcbrevzn//80Cdm9EM/9EP49Kc/Xd4/e/YMH/vYxzZl8TMDS84ICUhMAAJeIGEOjCvWSDMK6PNXbMG1jG0rNVmoPTLjCK23R7xZxFOYEOIEipVh8Mg8M/Jn6d/CHNJazTN5FclWI/S2cbjbaMYmV8yc5V2U0imijoHtRQ96oCqve2DtfmeaVvkqVkc+hWkTxGG9xSw5uenwrM/Jd9bd02CGmw1bECufd7dgITOANxnKC3ZnbJlPe449P8i70bb8eDcRXcAGtDZkz/iMJaABLCg4aOLvmrlEVVoEZh1XLWgcIYm59toHr3gToWfYG2sJoyb3UjXdAa0fa0SjChlBBZ+g2nWG7G0v+BSgCjUFp2pf3Tz78+t8NePDVpPqAeqS6hRWnkzAvApx5brtlDXnOqXRteOnbp26e/V/hz927hF3vfQugtp/2YHrnXfewU//9E/jD//hP4xv/dZvxTzP+Mmf/El86lOfAgD8l//yX/CzP/uz+I7v+I7dc1xdXeHqaovOV9pl1dqV2MQumTEDSIGbqte+O2mvWXmbtK/1BnRSmgOvBC4PNGZb2BLd6CVLDY1ADNNJKToEO7ICnWhwWfPEUDbFLhA2UjBV30MBr+mks3fE1AUYdoI0RlqWB6xzGpuLbCz5avq5RSDWigQ7m2rw2TkNZlRgeARWe20mmltw35xjRHKMY+Y9qDbjeXX08j6tPdrVigfRpvKDFtCM8WZufYAFxLvHlaFNBCGasxWjtcaeZvobJh/zVnuzuZb9rz/W5yDpCO0v+q4I8hmGQpAE5FBznO1pC8Twfq7iO7T568Pwu3Hb3Nk6Sd19eg1/z71hplarr4lARfsaaUT9vPXv+zU79M8BW4DeW0eDaE5/b3tCxSX0FQeuP/Wn/hR+3+/7ffi6r/s6/PzP/zx++Id/GDFGfN/3fR/eeOMN/LE/9sfw6U9/Gh/4wAfw+uuv4wd/8AfxHd/xHfcOzACAmzlinkJ5sEnNFAjVhAOgtlyINRfoEMOuVFulIsKaZVGU6gqQxbQw1J9ibKsC4jGzSpfcLXhfC09oBJgS7aZdkSkAtFaTTl6rc3wAXD6XrACWRXhhu2n6JWeLyTNuhgcxB2AjH5z/zv3tTZ5+zBLqb5OuHw1MHAEiYZ+ivSAF+6w5Fq0ZatcnBecz6zfqiEGRcbZ2zgHbtOOF1zOZ+8CWnb8F5858M9KK9XUx45x6psCGCTNkHZ1isJuxMkofNNI+YxZ0k4ASNQqMNY4NILJUnpBO3PKZmSv9PFgpsB7ADLys4K4V2/Xk0wu8pmWRqdQx8b2yVjZnvUbYRDSOwMvBdtUALX1EeI9E4krty4RaFsvucW8cco0WtPzrjWEHaEBrJAAVy47Tyj3PYW5Tle5LX3Hg+u///b/j+77v+/BLv/RL+PCHP4zf+Tt/J37qp34KH/7whwEAf/kv/2WEEPCpT30Kd3d3+M7v/E78zb/5N1/pWk/miKtDFBNhysgBpf2CgZX1BrqZYgGtKRKuprbNgpFfUIkZxyQmwcB1M2Sd9DW3wR6+jbe8r4AWUJNtDdCAyljkd3KutgdYAIVDEw1mGsmGTkg4nC8rjlnvxcbHjeZY7fhdlW6vbZUFzS3D3BszxKRRgtUyAMqNlreXc9OOeRsKLufealDlN2eYbdFa7J40Gdzup9xjdz8mQNhcNVrvuaAcPfdF5H/f/2ag+e6ZcUlqaA3Pw/013DMjVFMuEYHVKSSxS+yqrHdDYxTA8pqDXKMeA7Qg5r7uzmemEC2xhK0JttdqvdWl1fC2xXb7tWQVIZoozT3QGjBw0wgriJ0Ox28FrGrpYeUnmSVPNauvsPd79fv8HGSMhLgiwPWgZWsp2Npv/Z6Vb9YcOaCmKr0KEZ+qMPn/p/Ts2TO88cYb+Cef/W+4ee0pllQL7ZpEJFUTQgGu2TWymwLhtTlIywVkrTRRNy7HAxIC1sy4TXWBHROr81nroqW86QtlQR7lNcn1iCRgpHeae4dvWRyhDa0dJY4CaCQqYKxJ9RLcKdOLJ6+p9DkqcWchNz3JrM3LOcnMM/NS9moSf5dKbatb/FaRor8HPz4/j0CrzW6uP5A0C2NzzvZNgVADrL68F9BpXs5s6z/3m/yXiZr53jPfDn+4M6aOMdm/pAKSZ1Dyfrz2gKoRA+1a66kHq/OC1lY7KtdDq0mPryd/x53ET/h5ujXeFxqwOVidAGlruZ8jAzVvfpU5kFJilW+4yjOhdm6PJ3iGn8tz9+vv+dz9lnt2oLVkOe+SGWveAtfbbz/D//Ebfh3eeustvP766/sPZUAPulbhHAKupgggabFV4DqGkr81qw+slhGqXVqnQIi8gpZb0HpXNzMFYFoRpwNCPAAAFqqSkkS6OY0lS0BIYgxr5Fm5oTkSsuaaNQuBRIKKBdSoVCcX5iuOZ9N8jPw+3zOreK2kNz0ArfmhJ2/qzMmYQfUlMMumYQB9f6QNYPkyQJ6BalsK9r+jeJG2cQ54ba5GzOqUH4oIxdeyF1rcgJZtXpM8fY4cVUZPHbPnUlA4NMe2N3EG1EbBL0b9HF6qwfXH+nG5LeLn3roWNJqXalzI1Xfk51m0CnfJnad5aYCLX9tRm4OSY/A9AwbGDN1MhHkT82/36n6759NqBlmtH778Vw/sbXmpatnxfdBslNbXDQFaPNxptidAuSfvYxsOfc+XsqeRd6CVWKxSolj4UmlyzWO6x5rs6EED19NDwJULLfRgBVSJ2+bftxGfA0CrVrgAWumBpYIxxyOupgMO80Glh1xqmgFW34xLO3OTKDyAzVG0rjkGzCEXjWwOoYlmjNmAjNuNpv4yuY/xIuuBqJHSuoXZL5VeCjYyc4TMo21+DQPmGnqeVbqLumAJ7WYuprXO18UOIQp4sbZygfY1c2apS5e4MadzEnafz1NMQ/aHrE8WqWY5BudN52vXb838dqViCVA2NnWbvbz2vz2nGPmH926AqpwwbN9zquMxsseivjGiIBUrAFglFCuACwhTtUAJv1Z3ef0FzDeAiqBhlgfTVpAJMZq1YmAdyG6NNtf1vGTPhGt/23NsNA+ZDIBq5RB25kEfedlrWfad+YF6d4ToXYJSQTscnAKsnmt43rC3SoJ+PwQv2wvmg3f3ajUejyljzeZuqZYquy8ATf3Y+9KDBq45hlKf0Ex1iRlLFoCwJOJAtUICYCorgTTLn9ajlG0yyUkfDKWlqPri0/Ghrx0gONBqqnMkaDPDhBxUG8xADrV2YiBCDo7hdhqZLb3RVmpt861Z4d6kPyFYDyEtc5ScaSeIH4OVKVkkVwihMuiMurgdFU2FglQZQW7Bq8x/VO0F8hwQSskkMmbV+SFKYU99vWce7E2pe34T7yvYBLxkajWofvsbMysJK8mBcAadq55CoVTFOenD6n2HAyZ6jhrG4wSF/n15XvZ5hmjM5nPNUkHGtC+LAvVPYOS72aO93mKAD05AA0qW3xdDLWrbhKnndTtH3j8zAp8yoP3nsNuuxf3GIi/La67pAr2WZYC2OFdEuXet7CPWDhWsnHWFWECt93PZdYF2/XurTLkG1SAZfw6zQPQh9naf3qS/ZqiJUEDrmKzIdAXi96zGZUxqjmqVULPdkoTJSQJyy7wMtJbMQAiI8VAXWB9Q0JkAzMxn/+YQkHPGAte4EL0dXldsICBnCQcmApCR2KqWaz27ouHY4mvvd2TuKldpFYaTc3aeFLD0hJFqngigfgNrAFnMGVS1Lgo4Gf/nwWtjrzGtqx4nm6WCiM/jAipT85UMjO4zZ30ElgrvWuUBJcpTQIvrurFSNtD1Y8zdmxdLoWTTpHJh/EVydUKTX5O7c1jmywHXjjbRkBt3uX7/vR8DOo3LqDeNcm2oGgjF98X615vCWm23fU7GMGWWNAjBCSx2jFE1oWt9S7IgKDodTODnKLX3u1G4Ot/VnnBwH5+lv/+MClgetJY+ZjxL92wTys+ZB31llz3fdzk123OrTTAtv64HLws88nvHzKFmiVo1ylu0Rrkni0cApOnnq9KDBi6jOQYsx6SgxQAYIRCWbItazD1HZXesG2gNkj9yiBOm+RqUV7DvM8QZPGntNbPvQzbGIRByBKq2J9L0ArfpismOpclhDCiPWjWxBaxmwrr6at3E0/c9cnH4IBFPfZX0U+QBFIC0mSeZRd/uvoxDTxmiJA6LSpmqWSwEIGVNlOxuqkjvaDWHvFY/QjyIkKBlnaJu9JHPo/dnlMvYX/ZO7zbsGIPjyz2ymo+KHT+DrFMAZxBlIK0w1lC1ra3WWdiHfRUwBpA9GoGVF7rs85659r4IAyfTeD3TLfdJzftCo8VpWiSc6S6bFYSxplarACogWbNPWyG+IDFQQcuG0wQvBXvWdd1GD1pW5DUNAmm6eS334bUBD2g++tJ+1s+bnz+cFiaZzYSIBrSMwS9ucZr/LbBaadgsFWpiZIcsmdVqQmWjnMoRM/wzcdN82lt/fKsNey1usQAq1bJM47pbudSJXTIjZ+HPt+9V4DqmjAMzljUX0Comw8RYACRetfwT4WaKSJw1CZkRMyGSqt6T5R1sawvag7PahVVdzrCeR7drwhJEk7vTjWhmQw8WSYEsM5eAkgUtoJTmh+eSlk7QgpqLEoiQEjTJk88CYtUc7X6paIEM0T4a94r5MkgiOREPwkxJTGoMlACORjAAWqbgzE4CWixmuSzmteJLCaGR9Dz1jnf72hjjaK/4DThy3LNyV7sPjlMxbRIOQF4LCJBGGZb7pLg7zyMqOVWdxqMnLcc0wS4etHyzxx3gsty+WoC5TVBvoiDNPLqnSQx9HFzKP3lzkUneI813o3FBGKRVYPdmQaBq1j6CtG+lEUjmh8zHavM0oEaj7Ndn/95HIJ/wVRpIeLOoaaBGo+AT01gsnyxxBS3TJj2ZIGZ701IMZC65OfdelKcnsbZwU0+SIRcwXmjH2blrD7Pay8z+LTmXADaLBQi8TTm4Dz1o4FqTqNJV05IHa5MEAEtOJWgjZ4APEYkZE1vkoYBNTMBkpsPBpo/uiZljlGOoARNTRKQMrBk52OJuVROTNHxwR41ikg8FZF79gdoc2HVGvZx6C89ecJoHuZwsqlHMhACBskrFajY0x7iA11RMCYQDatuXjul21PtMGgADCoMgZaZW27CQnzozZzizxl4FjoZx2l/Py9BqXRynMtaiGTrQae5zb5J3IhDvS968vVu9wR8PNIEvPtoR5MqE7eWeDU/qyj+ZuUgFt2SO+YwSuGHJwEAVCC3UOwAlytZXxDENawRiASpcmpblTa33Aa3NlwMBovxuMH+oa8VeDwv7uvNkoPFJ+7qqdnSJkjXLiROiodcjkv1XgMZdqNGyXBBXT+a6ZqYhgHlTYneJNmVF/zGg6UIVtGQ8XSzAPelBA9dtYtCSmgmYY0BKNkm175aophkvloQ5Ep4eJlxNJBE5CCWM9hBIq2o4P4VqAJMyzEQuWTioz0uBb44BwIqQSVR+HZdFGvbBG/Z9laIGTPVU4glacCpVARQkt+c6eSoAbX25lJwp0Fq+QO3sSp49zkE2xCEeRHq3qEJr+5Kd9Nsz9xPBBd4hbMzUA1ll/FtNgTS4ZmQSbK7hXvfh9HVuIFofu6hHM9e5/K6mJBcbwo/vbWhqGg6wMsYCliMa+Gc353d/m3JgITbV3g2Q9jRboyaxVn0za1bhMtfQ6F7K9gBVOgcHX5XCNIctWNnn5dbs3l1HBnIRrZtp8mbQV6ViUq3ry+TOUeSqBWYAFUBk7NblQIE41GjiYgIlKtYP87OXe2GJLkwQUKljsGvX4+TaW/Jm9kyiFUm5tQpgxfzYUdG42Akpyn+XnJH12du4TjVHvYQeNHD5TeDV50hAVs3LM28DscTScHIKEYnEpCE/r9F9yKuYfaybKgBoFfgYJvHnkETPEOThpizmtCdzLFnhmVOTIC1RQnXcNr4FVWP0FAiNBlb9X7aY7ThqvvN9yHzVe7/obP5itHGdnmNkwqKmpaiaKmB2epSooVi0PUIMB0yqxXKXwMvZXiuTTa6SfvbVKRwAAK120mkKFCf5PB7kvSZ/kubk9dTvn1G1jb51iQCHXLMIAqZx+QCAJoCimopOBk4MB9mZqxwwElklFWqCWvZMhezmqgCUzhXCBJ5E4DDtKefK7JpTdX6O4mdR7YohjLm48cg0K7dPgzDsObSJtKVyzACo6vW30ySmMsfu7f6teWu3fuS7HdAaBXDY7wal1MxMZkC1lzjtpzKAwKTWG/0bSLSrKbQmZu/bA6p2WoYLNHzFX+tcpLFVNpSoelesV6OKPYABKHmm/X15HsfuakHVOPtrn13ib9+jBw1cQU1iyGi6Hs8hyAIYaBzi75JcrxjkYZTQWWPweZX233mVkHgjM8nELMzYLXpbppFlgy5BYhSMTPvzjmlAAg3kXvQ49OOur/vbacGJCojNoTJUL6W1ZwMsWNlybAzAkGuEpGcYibmGoWeAgh1HZQWXmnEQTS0xkINEHAaN4KSsXaIBYcIQJuP9NOQ0M6wrLD+K9S8FKYFK88EVE56AfABCrCZKQMyKnBHUB7NHpyqze+Zt5yCi4jsDzAfnwMmZ7xgYM8NygTNgpr6yPmrRF1/m7F7v+bh60DItazqU92buG5m4bH581N9eWHuAClBB008cRtTcQC29pgLjXkPGc5QBCfSE04jtQufImxUB1zdqYCZ0+XheG/Wg5aOL+6nxgiDZfUJ4hu2lU76nPer9ZmUPujHYIQ13UZufCCAuMEaFtQBoMXF3ArS+YhNa28Cn+r0VYAhMBVxDIKQzlqRT9KCB61pLOEGDLwBpFnk1jSckM2MOkvt1NUlk4BRJzYNyrjkAdDwKYOVVykHpguYwSdRcTkDOEomoCzllVaP1IRtgAM58Yuqye6q52KqrOUD8SXpAGGhhG42KSoWOPrkZqGaW8ZwIADGoFDpdkAWglO/5SMWcoT4u6G/kfeYacWhmIzH1mC9Pxiy9y1RLWo8gnoCkWq2BVjoCyxG8HoF1Aa8LeDkW8PJOOpoPwDSDpgPCzWuixZmEDQWvnIELYySKZNvPk3tdmETHhMRIbOYWAbIQnGnP7nFgHgXGZsR2EK5OpbW6afxqK9gYbufTKSbUYD66QwExnq8FwFxwhWlPPQ9VPlcWVAUwdwxJPpX5QjOqgFbnagxW1oxxePt2LwPGLq5LTZcgZ+rfCXAZfm4BRHCHDMwQ3rQrIIEGtAwsRsEHEg1oxbbtvRxvS3QkKDTXd8DQ5iQKYPkAGJ831QeK1aAraMYOb8Crdmbw13f8azQm1NQZS1cCAmag3GQIAM2vbqJ90MD1vuuIpzeasa8xbzZ5VlDX+ycsEzwGKQ1lDP0qBjmWM2i9kxJQzlRYGEAQbcukXI5TKQvlF2ldwK4ob2K193aZ8NmiobhoRVEXswcbs/dbJQ5fj9HAymoi2rHGTE/V6fMS0hTsb5TKzQEaAdT+zhZ/MQtmiH3ewJtq5QQfvRkDYWapzB913kjD5ouPaL0FjnfIL94G370U0Lp9Dl4WICfwqhqwAle4uhHgOlyDj6+Drp8gXN0gm4/LdVkdhQGPaARa9huTsL3fQp6P19ZqGLF9VwGRAMRqagv+uHrNzRNjBd8++MIVNQYfmu+bOyy+GAWwvmsxA5y3tQabUzhzFavpKOt8lTkLVDpEw7RztMJZvy5H921jbyLy3HOz51B+r9rWomtSzh+wQV6Ecry/Xul6QAGWqCz+0e4xDIJVcrcW6lTUG/NzN2puxM1ottSv3epPkvxG621mdVTFr1T50GhcDT8JVolEe44z1RxLD1QOHJnr9PYtZAovC4QJhEOs8yB7gxAP94u49fSgget6CriZfHuS+kDMpu6rhAN103jTWYT4V8z/UkAruWKqAHy2rDEA/wCt/NOaxV+0JPFnlWTConG5myiooOa2TsPyTuoQtqBVS0qRO9ak/irdeuwqTBetmp+YNAqv1ptjkPQ2G0Q22Byy/mdSW1KmRWBwAjhUE6KAmmh5URmpmbdEUFjBx1sBrdsX4OMt8svnRfvKRwEuUhMQH29Bh2uEdVE/jZrC5pvWz6RMjN1G9qaTtndapSrNVtDyVb3rfHJZX0Dvn8Hguwp4Xsvr28QbmR6y6dkWuiCQvRDRLkLQggkEoFoNa6QxBKollprT2viMGdbLlZH7eWg0T48qLvAENgZ3nYyx4AC0ZisiOcAEqJ5ILQJ+3OanK+PrTIdDUg0tqDLCqoWWtdCBcd+V2uaj3F+3vfa1GtmnQdU8JvW/Mjfn8KDly9AFNUkWz0pCKYAwq53b7+Pi/+rAKjlwBFqrjDz7avVp/JZuztcdy9gl9KCB6yYGPFF10xZBJCqAVSq/A+2G7k0I3FYzb/wre5FZzsZdI2nMJGh5CwZWrpZh3gY8IPDWHEhwARjVhzXHUML75yiLLZAV80XRKH1Oi5d0vabAqGaKDICyaWFUAU0XLLld1kOYF8yqE5d1/6p4q/aHRJCiwTyoB8cZvB5Vy3ohgHW8RX7+DOvtEZwy8rKCUwbFAIoB07IiXi/IOYMO18A0g6eDJJN3z073+vAeNo+YqJH0PWilMme1fJF/bvUcpmGdBrUe0PaaS3qfmrK+Fsj6m3Dr3AOtaVd7muRIG83MQz+pjasGDbjxdX6+RpAoPw4NqJYedP7aaJ+BH/OGGCV0exRubWOz6DgLRoiBWvA6RR3/IJsDNsBqz+AFydjPjZ+H5nUVCrNff/rCgLn3PcnXvAEtn98ai+/AmIHwoKDaajtt7R7wgOXTkIzMlxWJMaFWrbfOGLVkHiFd4n/coQcNXIeJcDOpmU+1JFrugPW2VH0Py0twUt/IpLcbqmPaiOOs3+lkllIZnXklHsBRmGNCkCTLZJniKFnit2vG3ZprOCijAa3e3tyT9RMzACtgFarGdYihgJRfHN7ZPdIkhBFQp0WIZpS1yrTZ6oEaqFFBD43z139nr2sAA8P6JCHUPJ/Ibq4BEDMoJ+TjLfjlc+SXz5FfPAPf3eL49gssz2+RjyvyUoNlwjwj3RwwPVkxpySa1zSDZwEuYm6CFAgyD0w1+bGXhNtIrWqG6TVrA6x+bjwVAMdWk6tSNzdacdDf+Gcnx421tNG5Zex2x9wJK2NTp5F/V8G1jfIbzVkDWJaz51vA+A4Bxvh9NKi1sulMcHWu9xstbu79EsDSebayRpcarZoE8bxKQA6AGEIRBI2rNACeF1BWy44Jyc2JVcAwXuP8kdFSFQgoJUUMvCAh8J5q3dYWtM5RZq75mp26ar4yS3VYcnV9GIkWF3A1mfkROATC1RRwiDVv1jQxfq/6uIofKx0LUNHxBcLtW2JmevE2lufPKghNM2iaQfMBdP0aEAIoROBwJQV3LcrKuAIFWSw+xFpBC/FQtKyidTV+LRcC7xaPOWdrtF9rygwd8/QSiu/kPDvQqgvCQKw6uX2QhifZ+FQYQ6RWq4gYO41r9JCajuw76tsvtGRRX8UfMBpTcpGDGgCTlxXp9lhAKzmNK5vmFY6IhwnheCvBHMuxgFapMp9XhDCJRopa9XrEfIFqOspq8gw0rrph82K/2UyyUl8ExfIAq1ZVmwNWIKu+sloJQYQGOnFuf+mRX2+P6Y98TedAa3vh9sluGm32WkbHpIv5HYDvrMzdPZyj/rAM/2y7IAU/3sE9bFrxWLCWHWvBMegAMOdSaopyqm6IgbblNc4C6BxhlVp6La88X7fImFuTfima0N2Oj0DeoxJpHGjsJiAgU5vPWa1AKlgHEa6vJ6pxBCUlJmNaXp4exAl60MBlROtRQGu5Rbh7G/nLX0B+/gz84m3kl8+BnMA5gaYD6HANmmeEJ68LkMUIun4N9ORprYRgIcLm3DeJZ7oqDQ7NOeol2KKp5BqU0UTkBTEV+gaNRoGqz8h/Jn9r7bUQzMQhZjiiQRJncMfTYDMW6Q5aeqWaCw3EfMM3Y9j9cRKAYQxFAkzsu8sf3ljqMvDKKSMnCfXOKSMftaSSgdeygg8T0nHFlBN4PYJykijEeW2ZBgXMISAxFQbjwcrPU9QIuxKIAAEN4q2U602wzT0MjFk18qocJNCoGpWdR56raMaAJMebAmemsD3qtSkPYuW7oglWQOxNZZeA1saP1OWclfqU3tfYaVs+NL8PxR9pV3vLywsdRlm1Klu3I79X/3u5yI5/y4FYWbojoCsmUsbQb26/Mz+vDyZSfkNqBTHhy5LwuVtD3Pm3du+vuB7sfmsk8h6NQUuffQBmB/sSrR1wPUVtHyWa1lUMOFCWFKP1WFKMaH2PAteqABFIasTRegd+/gz57S8hP38bfKsmp2VFOkoB3TBPmK4PCO97jnDzmpiXACAEiVCjgBxnWUTxULWsqImaqMmGpZTNiYUjgCVcwfLL+u8NtNqcLFkcpfWJ+740naTq9PR5aEUTBeqGMfL1nihgchJvAWFUKas3K1oxUAaQiNqkQxfc0UR8deSZN+k4mAghRsnP0n8cIkIMCDEUrYJiAHfhbgZsEja/SHDH8RakzxNRl3lcQXGqUi3Q+l8csSaagwIoaJcx5eyi8WjDTwN+1Gr+FThcThu337UBBYzquzLtj9t2MrmClWlcexUMbG7tuuaP89c3shydwN4sWQWri7QsVC0ukAh+JT8vBElH6BJqixVDw/CLudpZLLxQ6O+N9Hp7UZimZDBXYEbzff2sGFcG97RZFz1Ape13myajfZCXqyVZKs3bPAAgrAJe3j/p/Oml8gW3czMiS/iNTr1qCxW0gWBNoe/Bxo0gBBdtfI3W5XGIQV0WwI12mL+eAqZ8RLh7CZhlTNNawstn+4M/Qw8auMzWOmnyJIVJTIGHa9AqDMyIc5XWVwDz1S1wdQOEKMdfPwHHA/J8Az48AU9XwHRAnq6L9sRp23a7mgCh/qWAOUrLkhwISfuFBRKbcM9sas0xHxXYlrsx7cnnhhWTkpqVTAuzHkRmjiiNHGUS5K8Jv1QBrLGnA5jiVENd2XxfjBxQa86RgTgQGg2MNowkoGU0ZnYMcQLSov5DfXY3r4FuXwPlhAhg0ucW5gnp9g45ZQRVV8M8Ic5TyaezkHk+3oLuXtbcqCSgxWHCpp/QwHxFaSnaQAwTDmEqQG1RaKVCBCu4u2ANSb0w0BfTYOaxdla1IDuOlDOjJIKSluDxy8fX+6tzXfOE7DIGTKc6Xpfbd7+5BLTsXqHfVwAOQDgMTVJVQKqmaTMDek1/zyw4Aux2TJoHSa321QfF1CAYvzAHmtaeqRNotScfjKINRn3QVxGSmhOEIrgVjcsnhkdzX4gFIKP6/UYkQSZ9rUP9zmlXnpf0ILVRootAI4EWfndX0zYwxdpd/joSriKB1lvQ8SVoeQFaFwEvA+/1Fq9KDxq4rIAnJpPcCGE+gK5uQOuiAHZEwC0mAClIJFqcJ9B0KPk/dP0aeLoWU+DhCfjwGng6YKUJx7X6qoLbCD66jLmCiNec5khY1PMbMoChG70CFWCaVRv+3lcAKZvP3hcQoxpgYOaKUcuLOoN6AtccUd8jrGVTTeoYFg0C2sAFABgWkWROY6lnxpuWFJ6q34JU6tZyQ/EAOqygdUG4fk1MhQDm1yQyNMzrRuOKhwnhMJXweAACXstRwAsK0JyBNGnUmonZYQtalqAbJhBL2xIKGXGSigyrJmhaAeMSYcjbKE0BNgGwlEn9iG1PpNEcyZnaSWNGqR+XgQYQ+vk1LSM7kPXH9tdtA0OweT2iUbi2ndiHmyfq76Nq8nvh93vwakDaj7GMw7T87v7735/BPTn2DIA1gGV/u/Jlw4r9dqrOZLqpD6mFju04r2FlW2/dRNmztaRfq3TjaQRYQ23TgZUcQw1I1SjJavGZo0V0k5gG756D0iKgdXypQnTNi6W0Dq58GT1o4DpmKeTZPPzpSsBoOSLcvAYAYM3zmQDRsKYZdPMa4hsfRHj6fvCT9yNfvQZM18iHGxzpgGPKuEtJTGPgAk5GtoA8ycNlXE8BkQi3SX6wJEbScFOjvghurSXYhr+3wRnVnyX+rcpwvAZGAJqork7qG9GmlTwFRUTVQEqHY6l8Ycxpgeu66irEFxMZWmndM/mUudQxBDN4EoCi1wJCloZlNB1AISLciPkvH2uABoAqiMQAmub6fNYF+eVzEWDWRb4LERTFZLUpsOrym+R+J/A0o5aSkooc03TAFMUXY/fgzakGZD2wWQORYk4V59bQxLwFrXbxed9n88zteHuTjTFJg0Xi2njQfD899YFCm3HYqbHVjMaAU01bgH/+KJ97U+ZmPPBrfOtz82RRl9aWKtK2fNIp02AzagpD8GoqzPdalgctn17TAxaA4jf3vnSnafF0KMesXP3le9pWIKdhB6mEEwaLqwerjdDTAL3nSSipNhbZbKHuAZoziwxKd8C6aiGHW91/t1IdZ10l3UXNr/n2+e78n6MHDVyemEh8UNM16On7Md28hvzydYT1KFpHTgW0MM1iGjw8RZ6vkK/eB756ihUBx8S4XRLWLP2+fGAC0D1YYyD6T0w7hDAHHCJwlQPuYmh6hfkKyaNiuwA25ZxqcAaVYILCvFAXYtE53CbyochlY43s9t58FjppsEiCBGgRVquUD0gk1UoANJTWNJLM2zDjDAMtBT0izNM1KEwSqp6OoOUKIUyYXnsd+e4lggXZqAlwWo4avJHreAHxjc1aTHddwDmJ1vXyORoPtPpbKEbV9jS6VEtHURT/GtKhBOqQmaPXO3CImLQv2GTMBkDiCmRiRq7mxIWqsOOjMmMHXo3WgyrZmmZ0qqaf3aGBCgVXBYGVsSuDix3r9ozMR98V7aXT1PaSgUcgNvKzeRNjDw/VkiDafOCKzK2UP9BK9a/Ma73f/l73Es43Wvge7ZgGm5xQO3QgFJagFNtffWV+V+jYJ4j32rOU1hJLBzmNTJL9T8PzqUAVfwxRFZjnQBraLn8ny5W9k6ALSkct5qCv8yrl24632FS+yaN42MvoQQOXLeDEwDQdRGqfNeosrqDpqiYTA/D5W3m+asyDd1mqyR8T4059Z96HBchD9NnkwljkOwmzBkDQ5FoNlggRSyKkSfIfmqTAvpaSuy8ATdjqqcifk+tvIA0CA8nRBY2Upoh6YanQoGa29QgKWYIX4gGstigJQql+Lg+kJQTetJCMYl4M2sphChNwuAGtYjbMACge5BlOkr7A6xF8pyHvKpBwqV+ofxWUSkh9qbvfzZkW4sU0ayBIkGCOSXxbdLjWYqsqCTMDdBSfQzwAIVWTquYfTZ02iswI0eo5opbG0k7OZjYc+YFOgZalTNjr6NYggJIIa9VPNhoXxv6jU8EeaP9sgib854D7bgBW2b0fEds9a8APyv3Vtd736WI9oJ1X2oDqOWZ+EY00rUF9yEIOuEqvMwdQm+hKqhXnswNjT4Go3meZASqmadZ+g2005s6EX0gBFs3sEqlzjdylpFGD9tppWQW0HFhxvkBA2KEHDVyHIM7AxEBQqZ1ClBYWXhJSKlGCWliUNfz2mBhHBau7NZf+QcwSueidl6KScwNeJcpPP7NabZkJU2DMQZpX5qkm8nkNDNjmCA3blXSb7qzE1DuVs/N1edt8R20uSZbEyai126ziOkSSlI7EskOyqYNZjkjYmjXMV2O+sWMSgE8sPbymK2k6SXGuNSPnG1BaENRGjuOdBGBYEd6U2o1hgGavew2tTKCaImc1I6pJkeaD/FZBLcwH0QTDBE4TKCyN4xz5oJx2AqZDo41mqKaTSTUg2gBYnZuaXwZg2P3Xt6qvNfna3L8iTekziWQFlFHMkyMz2mb5dO9P5YSNgArluxasfE3RoZ8GArJFkyAxQ/tj6nx0v4dYX2xevcl6U51mcH8iKIRyb2QO3R2fFzUA1u0toNWyiIqQ01Tm98nXvqUMtwnY0Pu1s0dQI5T46vI279Edb6kVXykwCwSQTzdR0LLw/9LVQQtlvxsNq6cHDVw3s+QJyAMGiCbEq6fgw5MtYw5TLSaqwLEukvmdWIBL2kwzbtc04nG4nqKYIUK1+8ZQq1YAKNIOAFhdMQsVl4AOeX+IKI31+l5YTdsT822dkRQZujmB1m/Tp6eObPIjUn9PCdXlLFF5QAWv7CIR+5BxB14BdbNY8d0EqzzNCEmK8R6TJVFHTPNTzFeSW+clOeS1hNSSVpKntJRgDD7eSg7X8RZ8TOBUowyR9b3zj9F8KObjsC7A4Rq8LqDlKC1TzLxsvtEQgdlMiBMQZ2DSivRaAr2E0gdtoxLkCRXQCn0ghz1vJyCdACzTsnzi9IhiUA2E9ZkoAyzJ5n5ZNJrJ6LMWqOz7EVDJ+61m5f0zq/f3dpqQFIuuwg2y7IGkGqyds2ia7j56LSw7YdLGBaAREDYCoJkLO7NhUwHklHlwkDNZXu+B1kDTqmOtQjFQ79sKGFfhoa2G4yNdgXpOH6CTlVd5ADMAFwFDTf8aBi9WEvkFEeEwX9dnMIl2VTSxaZIQ7hBBE8A5NOZ9OmVGOkMPGrgOWDFxjX6zqfeROKBam23RthDGMDxoHVOtKWgmPaNAhJSASBlzDGXT+ERfYyoAygsGIXLr5yC1yhGgSZ/iRM3kSrOMYlkdZf19VsZvfbWKpEUQU18fuYRuM/q/A19XMcHkVbUNKXHD2hGak8w9hQmEUMop2YiCgngqzK39ay0TDMSizo9Vkj/qHM/xGnG6LiDG01VtO7Pcij09KqjYP83rqj2mAU4JeVmRrSt2DtWfAhRHeblvQM8l2pf9DdH6fcl82Tx4gYA5IwTLvUEpMxV185eIP4hfFEATMHEJaL0KWbg84JRjR6f8VXuA5cFK7gPNd6MCrb70WU+sjqkE8W+xCTlMEp0brHIIlecwsj4MPz8xb5tj+/3R+7R85KDbQ6WyhgfA0fkQy/HsjytDpdI2ppdOyGnMVUDoBKLu/chM7YVKuNfVp88F3KDgJfUStaNFDIgKXsgH2TvrEYgHBTOAcK2FAWKrdfW5ffegBw1cdHwJOqqkMh0aRl2ShLkmCPdVvS0Xa83V/9T7oXw15cQMi1srEX+hAli/J8y/EFQrBGShBAaCmo8ABgXC6sVf5/vqqy/7QGlr150VHCmYWYFqMz0Nc2czX/XqerPhGOBUI+5YsvVZo+pINa9ea+O8Nv4uYOzvytjWPbQXRJYMq2G2GeqAZxwtwlJBbIpqTlyP8rzTEVilWUSIUTbychRNSatplPlMuWhcKWWJloxJpcEEyrlIjQyIPy8nBbZcAE4aWG7BapPnA2EyhNqE0yToTbuJHdCi/nXn33lVOqVZAfcDrJH2dgq0PGD5Hm9m1owaJifCGUrggUWtEoSJigVk3AbIBLxLqAEt/wyz3xutptWDVh+F2NQ1BACKjSZXamnCAZv6lsv4B5phFZBNWxKyLBEL5DDQStn5OPUXVoezlj6skcCAAZ1pXy14Lbqhl6zaX1DwMj7ixlTBC7IXsxOMp/docAalO9Ci/bjWuwJa+XCDGA9IqKBlG8fsxoB+xgIFUwDWDITAmBHqQ9XFIdUoQolq8mWWekmtPBqqkpHYoqvpQ84tTD5BN2qujCNv9w40yxdzDGAzr+QqPZPGARORVA03qcdLzrTWvCY/l46REPtsI9QOvhbqq+eqwRpiQpziAaTmqazmKe/vCjDmp/fFzkRRLi/Poxaf9YIBu6imGYerg/iollvQtAhArHfil4JeJ0SEnJF1EicAeXnF/JEQikZXnOxhah3tmixq0vNWn7B7b70LPhy8z9XqIw2NNgpCc379rDHZjdqxnAcrP8aR78qOK78xvxLX3xtojeiUxUh6TNm+UaYMDcjSJRYZF5lOgQv8wgNq6gy6YIShyb0zE5KCUQmvZwZIAnrE/K7+1CgBT6R+Y0aNOG0Aa3QtXWtWNCCz5M/Z87FCAQ3wU+3lZTWwN3PvNDoPXpQBoOa2in86YJqv657wEcnpCAoustlo2dsd5+lhA5dJ3e4B8nQQBjlnHKbrZtMSWykjUv+S/Cyoqc90g0yM2YIvDLg0/Njah/SA1YcOG3mGIce192DdQr0kar10ekoJYl3QChzkRLNEJoXKvYlJyRhrrpu5+KnQbjJupR9ibjUvQDYaSwkfwqrBGtgGa0DmKdimS3IEA8U2MZL06tzoJwwQTONtg2QmZXYHbUjZmEApINxowEaMyDnJtUIQ6TBW8yEaH9ahBGxYQWaodlXSKKaD+MGClgCbryToJ0y1ALMyqpGD3SwBXojyTD8QSqKxt9DsVaDoaaQFvSpoAdgkLI+oBy1/jaoN2DOumlVPviOvXdtfw0CMXceBaMKgBSrwV04jlWjBzqflQKsEfvWa9gBYSP9aPqCk5wgYSrK7XksjDq1zRSTnF/KMf+CEt3qGtgdZBVsz32eom4Kg66um5ggbIVhJusYAtLEkiaVHhBGx9KQMxDgVRCnCbl5VdZs288LTezQBGSnJIrKeWybl6kIhCpjDoYAVUB8iOSnDpDX5DcQO3Eu7qM0pLUTZnqdJHsV34LaLZxhFmxqwnB60RtGGkYAlyZ6doyw0iyRKCszItVkjAknVi5gr+ISpaGEcppIuIJFYI7Grs9FzluKfAIjWCloQQZJjLoEJllsCACFXaa+vsNGbKWxO9aIoM92gvlyViHCIFiySwLNtiivtABBBWnU+aJPJkkvic/tCBM1SSaUEYsyHAlb2XupWyl9Mh/o6TAJag/wbD1q95m9CS7lvWOt03uRajeene1xDP9N50PLU+748eF0CZP4awAlNq4mgbAFLNCcTHlHuyUpgwUztof4lloPrMQPz2oC80FBvIFdz4EDTatJsBtpWm26iXQ9ITYEGXuwAjHPV2FmqtcignDrq+5n1ASBmgdTvggpOZqYm5VENiKH60Uzrsk7y22hNKs/Gmy+LHw0mYGk1+7yCcCgBXqzz6cfN04JXpQcNXJKdTQJcnWrKAGghTFdax07BawqVIYgUW6Uzz0yG13Oalig7XWdh/et71PSMwzQ9y4L3PjYDLHstY6oRVwukkaQEcghYXU+xtBbIbJqjUGQgB2XsFCQyz+zuFECLg4UdHw30O+hx1pyxgJf+jlhznXRTxumgJg/CQoSkUlkKLjmXq7TuX3upXea1tbETQTmzSYiEOR40Kp2AMCFb0Mh8QJxm5KtrCcu9uy05YIXUBLirYU2T5PyZVjVftxqWryWX981zpQVOEU7k2Kbgrj4Rc5oTahHTgvs7C9SDla0dr0U1Zr/BOTyvsshDf+4IjegjLj7L1k8yHFbTGsNbMEZAVfPW9skSuC3yMOsaIAI4sybqc5kLr4HtjbGZA9NsRuZBX9ZpJyLXQKukaZRcw9wAka0tMbm7ZHft+9cGVu3MRa+56rFe8/aP2vyFFjiFoLlyGnghx1IzT72pugjuVJ8n6x4OQfYdpoMELYUo2pbOazP25dX14YcNXOkIWkmqKmj1A5mKJ3pAAJZbxOmAoGV6rFisN1X0JokKGpX5eBrVO+sZg3zWVhew2ooZWtneQKt0THadko35ZHaFMfV78q0IEma3GbxPoVQPQMAcJkAbt1HSCMwpiwFcTRiU2+i4DXX29WI+tDwvNVXAfALq/zmECRwCspoxU7DySJYiIJuozg81hbc3w1CtJdk8qevgYJvdgjGscG88IByuajKkVtXYVN4wU6EFX8S5hi57k+B0VQBrLZpV2zvK1oSNt9G8HSRktOuvRDnSvj+mn5o9QOrH4bWl+yTi9iH00fkszYPvTb++vY1dZepupgerxkdl1g4zvTsQOjXGoJI/QRZIICpAb9qH3Pv29+UjW+MWMdhrWjsRuJ4YNRG5rDNfNUKBrESxatWWcHXjQuQPm6TkHsh800kfUn+qNUwPYoEF+BvNC+06tPej+oQW7dqsVw3oIgDIuVa793MWAvjuPRqcUYqprkepfjChLWuU19L7BROaYrH+xntzgmgzXnKmhiGVy3utwEm5PbEyK2FaKK1BCmgl3oBW07m0BGeYUU4+y2o6BLJU6MiM7GygWZmBmejmoPZzzqLGczG6iOZktva9EF51MJc7ZDFsMWyj57pozRGtAEYqicVoPZcUqKia1UhNtCkDIWx7e3lem2E+SxQfR2KImdIerncUhwmYFJzzWrQWTqmtX2gBPj7XZjqoxlVfWzkny8UzxuDXgl8PI8YBbDULn8xuWn2gMdCcAyygXaP9HDbjGHxuv7VkZV+0lrk2EfRMrwevvWv12pW/776je2lqauMamNplfrX3llpECpjpNSzBe2gaRN3/bVJxpyWQdwR01KSaOKbskuN5UaHKCU0WpZrXpZqm51UqtGjgRlMeypKZ1R+2aQ/DrV/Vr83+Xo0kNqyG33s//R5oWUcKK/PWkwDuYBUUsD3szeRZetjApUQHTYKzvADNMwLcQnMMNACl9NGGOGOCglyTGEhYOjNQAYgd0OqZmantZgoz0PJtsA2w2oXmAEzD84VphI3mxSXIhPW10Kyfmz8I61G6PsPMIwIyDMD6KTUA5uanzK1qaGJxDUC2pngrNvUO1XTHIWKKUwkcSSGUjSZaqJgVF8M9nbfho9Lf5QRYlK1I7hPmg3QAoLyKv8vMPNbmxaUA2NkLMwDamnG+6zVDk9Wr2W8k1HgaAZptfs9C+qRjX4/QfuPvXc5dgbG/3oj2mLV8V9+ZllXOTSgloywfrZSQCjU/qHYGGHfe9manYnZ3r40xtoFPrdZqQLZHPhIPqCZWoJpZDcCq2d8Odr6tUfSeyxctn8H9Vv1frDdJOYE1l1CE7K66i53GJ7lf3UjJscM1MB8AvoblENbcsK5EFIXCn/p+ZjYHMjd19D6P0Jt/bZ68hkrNc0PJLfR5hc3ztnnx86OvzZq0njQIn6aHDVxxBm6eOimkU6GtjIpqBkiLxLyzJrIWtdVCYZzElBZQmsq5KE7C9EOVaqJqUHUt0GbBpMxIqAwic/V3ZGV6ObcJz3ubUjQnaG6ZbsLsU2yz+oKCjEW3WHSLLmmOF0+ompfNQ9KNZsDfSJBOcnI+rxIhpOegbsES1CSinzU9scIEirNEI4UJawhImTU/xEyHdY6rgCB3mCBzGNShbPk/gRhrJkSKouVdXcscqN/C15bbmESLr7RWOEiqHae1+iX3zC+XkO8EYBKrPZ89/6kwFZ1+3gLWOd9Nb1UYNVP01/F+NTl/TZjuS0iZn6k8cQL6ckN2brnefg1Gn8hvydKRsKm1uA3nqaTbogRMBdUYyRg1dHwO3Uy4LUEXfl243nRycO0iUC9azYhsRWbzChwAaB5g1bwUtNR0yElfT3Mpj0TLEXS8lUa3N0kCgFgEdAa0EEDWrR6KgHfKNNiTAXbxXwKapIxmURQLgK7PpuwY1edZpgKOD6Bdo3Vc3MQC3JceNHDxdI0831SAss8983ShoSUfw+UjIQhzbhaiaRpZK0UEiJSuETI+3NTnfmcVTUv+hIp0nNlFKp5/WOdyTSwxuoCdmg2zRlcxRKsrycmM0uiRAzWlnGRHV62IdUGy9RDy8zF6rWRmwiZqyMDLS6w5wYrVWlQjJdFqpvkaMQaV9PSmSt5bWzqqmHvQApg1eozGGImr9gIC0UGUwbhl5v0GS5mRVbvyeTCeIYzMVnvGJB+p2jPqvlp5XxljpHW+G9Dyvos6viptG1Wfh6v2wRVUjMeZ1B7JgEN+6PdH6+TflrTa+o3bGzMmW8GrzsOpLTMqrmv3WrQt4wt95JvnC50FYRMYYbld1ssuaTLxlZpNcwJNc43ADaJ5makaOWl0bBAAA4AQkO8CwhWaMVSNzgR23Tdm6ttZEzYPvpt0G7VZTcDlN92zok7g6s9T5s5PTQdavRvgvvSggSsfrqUuYaiaFYDtoioLTx+W1yJK3bqpOZbdby0hl5KYwJizmLp6M4GR9WsKhGMJGavt2fuCuj2ZtnEfqhoYYUmiYVIw86QxGZGUKWhljbCCOdYgCwBIawEvqD/La1abe/XUfb5rOiDRfs10iKiFajkD00E0WyYAVlxV7qsvHZXtvu0SHhj0WddN5zdqq+F48qYVewa+J5s/xq7V3CZtwcwDmZnDfKHcNlJ1MBYoEBSmtF+N3Y/Dj29z3+W4dmx2QBEMnM+jAS8M3lvpKqpmxtE8+HkfgZbdCmHL/OpYqWG+9tk5svv2811aACVLkvUXcs0didAUxO33v1XUSIuASZyAxSwRBApREuGtLBmA2sUA8CWQOCcBN7t3ADSlqslMWcaiAnVUsz8zasj7CRbSm4VtTor25X7rn5f5XO3zTYFnbIWIPdDacwFcQg8auGDV3uMMc/awX0xAWYyFXGWDpr22qe1EQJSImHIuzoJ5zOC8td1vugtT0GCRg7Tm0B9YVXBrARKCbPbyl7wt/vwmPEcKt7AeWCU4RAEUGvkj4HRo5oUplEgqC5MfaVWnwueb+fHFfl0QRHFAm89Rr3WYrkEkUYiCx9qOPm8rjfs7bq7bsb6zmqz7+amNvxe6LRJvex0DMt8QdApdnUuq9+1vwSooJLfJ2TEBez8iM431NAKt0bxUKVwsCKLZjqkArmpjsQNqf5x85iT1E4JD/ewSK0V7Tn/dkbZFQGs+NtDyxXLD1IJVPJSmjtz5oSNNCHES409eQessJvd4AK2ztK2fjjW6VQtCW+HnphRbUzYnSSufnMHhFiEn0NUKpIMcN1+LeT+umOZrCYISSQe+/9opKsEvXCORe/ZjAoYUwa5Vg6LTxDb7Xvln1dLRpFC8ar1N4KEDF1S6cJnmw2i4jC2DLb4MVBUfAEFtx7E7j/2s4ZO5uY4vrmmtVeLVJEETDKyBMasGkRgaxm7nl+tZc8nGXDPYuL4Ule/b5b/zZAvTgkMSa3Iy4MALdfy0gkPQaMHQAtjm7G4+gAbcmpwWm8MYITURtRaijcFMs2kFRUmiHK3t3mS3vzG33zTJvm5eSwqEe9x7VfrnKF4127zWPwrG4NmBAlrm7E10wRhsnxfkzEHmE61g3Zkqd5i6+XXM1+PnzqLsRtRrOWWudo73mg/ctag7prn3wTPdCyLYe7aj89s19rQ5eRZU5x1o+cTIYmNJ5hr8YCkb9mxKlKWugUxUU08svQTQJqQS6o5pBc0HhJvXassdn6LRg1lO4LuXkqKhYBeubuSZcJbuBOr/svxJeS6D2qC781nByvsS+3kle426fkegVeaU2tqLBl4R42owl9KDBy7zY1kCqAoZwvQATeRTiaqR+l39MKCb+NguZKNO0yglX5xdnJhF5ecs2sRyi+nwBDlI+5OkrptI0Kg/D15jjcvATEofOWk2GHipNEQ1v6sxvRQp3fLDZBWZydCDV42E0ntUIAGTAJhqY02Axh7xoCKHnRsCYKXSvNP0bP6IwobxA5WZM7etYexRjMgndPscudHn9p2f50giaIRAuI6MOUpnag4oEXaBZIWJz4d3JUphpjSeP2cxMP+kRaNu/Gs7zJ3Q+nUKw+h8PX3/r3dD/VlGYDXSuFDG6v2X+8EFo/Odus4ItMrvyp71Pi3j3ubz1XZI2dJX2jBzA8fi3yOZjWjmOxOQs4S3czrKOk/a4WC+KSka5PIMsUqrHotABCDJ8ymV0PnwGsCsgR52D3mVnnAaSBaZGg19RJbXasKRB7Eyv24+vWm7Aa2RctCBl52L8R7WuDhIFYMVASnVMv1E9WE1CyivavJrJ5iJagg4oEEcCSAFJo0ya6MOV4x68JSuud78BeAwHcDqhI1ZHqI1UZxyxBwqAwU6DSCOaxcCLkIt2IKiEi3ljZpZtSxrC2HglYgQaUKcxOdlgSvIUXx7FvJuGlfSc6t5tIxqNA+mgWpvHh96for2jjMNwuo7Wh6cL5PlQcmoL6El+XL6ea6VTLJK0/Ic5LeBxKw3h4ApSt+1uylgDgFzJFzp6+qzAsBw1Rs0x6Xbo6y2vKRr1PtWfXCIhTbXyiJbsBoxJQMqMV3Wor2j3KaM6qfoz+v9EN63dkrjawCDxpqPJ6IapZh1ECUEvvcXchdtSW3gQPMZtuZQY6D+cZRgpRJlHEpuVEKQiFLWNkhO2/LzbPlj/lnZ3gqHqQY8eV+alavjXNvz5BW03pWiClLppYbQ43gLtq7d61LC5ulwDV7uSuoGaQJz0PJrTcBaM/nC2/w91VYode6b0Hg/hyNNq7+GAy+gVmWZ99T+C+hhA9fhBokmyatxjtoY6sbqgzQ4qO3VnKnqROW0CotxDLeaBSVRyM7V1C/rNTPTHODMX2kBQsAUJ8xlw8s/680Vg21WYXczLCpRNsOM/YAN3yHZSun0Tn6L9vJ5T1k3PBPAqn1V7UdNpqRtUcwXoHPHANoIrFg1K/veTIv2G86N+lQSJ7vKAJtoLf/MuTJ2SSWooLOkDvQdWPn8uMSMdVBiS4ALzW/Nrj8HxpwJq6q0KUpqAYDirxTTb40elRBjAShmnWcFHyLVeomGYJHRFuPdA63R/JAzy5j2Fan2rrov9RFglzr9e9Dai5gMzKVfmazHWk/PWx48GEbaj0r02hW6e27MlD4SeToUBmv/ShoEb0vB9UDpo0M9Ja7Pi0j6ewTt2sB5BeUrERTDBKy3xdceboB8F6rpcAUsjF6CNmpuGK3671p8apwWcDyCXEqHT1PptUp52fXvC5K72lMBq4EC0M/pBsDst3qePrn7PvSggWtVacir71FtDqyOwzrRzn5tpUzNXAiMwasHJG8is/bUGoXkK6nDa2/GsHNGnFTqJajpiUpvrsQaSkDV6UtBmE2ESaTKKDsA6+u/EaEEDxiJb4MhLQ1Q5yjUih5ZN3ssQLLK4l+PYiakUKRGACDLijfNCqgqIuetb2wwp+zAiv3GMR+Pjt3fMrNpI7zRtE6ZBPdAa8m55GaZD8PTIZo5V/5Omcr7qDWnZgQs+tkU2rYkJjiU8UNBiWpFcyPvy0ruvs8lOdu82N8evExbNQ3hXEftd0OnTj0KCMmo/cq8AFo0MKUepGxOJdWhnr8FlfE4Coj30YH6Xa3sj+JXBKpP71TuXc+ue+3FKAaxdFD0gHmrg8jSr+qQBZxC1vB57S8XIqD+rqKN5SwJzIcr1bi0DmJpwRPa+1XBsb6uxzUVQvaip3saAVUzsa2r5eS5ztCDBq7nxwwsbVfjNTMOUYrpHmJAzEdRzfPaAgyAkrhs2pMHLyNjuHndTPymhxWRk1imlhkrRWVq0pCyljmibFKdxcJxMZvY7woNnncc7NCqadVabWzReaw9eUzjYyBoxCORaA8UDsIcDLC1aGYxE458gJZrbNUD9He1aR6qZAvUTaVRW6U+GwUtWFuB+lXyFXvQyt1JvBZroGX/bE6PDrSAgEBZ+6rY/QckzpiZELUjtcoGwmDd8wCq8GA5Z5sxd4Clv9ol8w8RtQD/buhV5rrXtvrP+u+az6ACHQgUahSs3Y+dpgcrD1S9CbCJ0nSmWCMTbOyzPly7LyZgIBVBG6AaaXh23r5LgJ0z5JpjeJifIE4H0DKDwqTh8xNomsSke7wFVAMTAVGL91pghyYrlyLRh2tYix6aeouGe+1BzVk++kICALaWkAYABxVF3D4fBcGUGIFXoAcNXC9TxpS2BXOJZDFMhAa0AFSw2eulM/rMNAaL9jGKcQyEWteOpwMwXRe7c+nF5Mxdpk2wY85911ijvqeRp5y4+rdYNDf5TRv1xgMAA6AB8+LCMlOrBJAQokZIQhOGeafqhAfyUljTAjyK89j9JriN5OsChsmFHOs94zLw6nPggnecQLlMrkaz6ylgSYygJbQisVayr+eQahztps1Z6kLmLMf7clxBA1+MgxkztAaRIrDodxdqPqeOYrTgVe69Y+SnwKQnHywRsB8Kf4r2ruKj8cq1NNJMWg5RWX+e+maRjZ9lz3zlGacy5CLo4jxYsZvTxkQJNGb5Mo7u2qw/tHJYZe+z+MyCrldmKTM3Xz1FsCjp9RZYpLM7TbP0kwPEnNiXjcpZwA0CcqQ+MgMyX8yXLI9smsS3b5anvg4i0OzRjYZagDADbEUNOn/ajlYlysCrS1kPGrjMTIjQltEJpq4XExVX9O/NgJz3ba3+O85tiKpLFvQRSFI9fK6NBl3ri9rOoy5gX3yXB6DVg9Wm5blSqVmYoW0kTEKUliDla0JTLZvJ2oRoWwjVCCyAIyuXIQREk6rIBWC4QRQNC0CJQvT+LjjzIlAlP1cIt4CYzYmdF2Nzi1FUDuI1qkhtsrcwS1kcAUAQhR1zlNcyz0AopkA7j62tlpMm9T2a6U2eUXuMfWSm2gpeMp7E3IFL+/s9kBkHRWxBYe+3FkVW1sWZ34ngc/q874Yq8GqhVzcWMy9ucoayM0HvhZOGsLF6GJ3yJ8r31SRo1zXwbPLv/DiUSk4kBRCCPut6WwaQ0kfPgSYTDtO1aFkhSmStRdiGKInIGnHI6yJVNoBiKgQgwRzGn0IQIAsRHIKE1DvwshqJIdau3tRpZfKPGnOj37ul0DYAGJvtNS57ZnB8471qKjwmxjEJCFxN4jOagiZ2BgI4tZPjQauPfsvb49oq0QxfFBNA21bAgGqaReOyRoPTtbS+0GK6pnWZ49dMWCkLaKRcAWsUYdgHHhhJqRYAqKHaMViNNgvdNW0MJWTbGHux1ZPgVuaqfQHQjUQgmmTtAqJFhW7eUBem+Lao+ga5ixHzwRiuqLHdv4E7cD8TmAFNCbBwzpJomAz16QWNLAxAzFnmPUjkZzO/VCtcnCOrUmJ9xMyn6DVduSd55SV6HxRyiry/02OKHx9tjq3fFz8YUEL3R+Al2k8dkwVQnKv+0p7jsuPKtc1/5D4jAKUyhZmty3obD4aJWk3ABCRHBlo+atOfzUDeTHpRUyNsDKXAQW99sLUdJ0k5oYBMtfqLnLuuazPbr1k0sUO8xuH6GpgOyGECrVeg6RphmoDjHVjNgxYyzxZCr73mcrpthGyKsZgOm7/zQUDONLJQjyOzKJmvzFlGSM37IshqAW1DLaBVFJqH4kDrvQpcttGmQAW07G9Ra7WhGQ0aPBVgstB2I+fXMvNgvzXkoeqDtBbuYRLToL0OE47ZIpMkEGDNomVZM0Fjzvbag1afm+TDuuU93GupGB+IgARhyLwFMGGO1Jh/RmbEErihB5FerzXXBATrZaXzZuBEzgXkg2E8tT7BKt0ZWN3Ht2WRg36e9kLjjUQ7J2EozAgUsFipqNRe1xhoKdGkf3vKGSil51g03mKuRZ1juX9WAUKBBQM/DSrj9/x5r43JiHyhWdsF3JkrJTiiTZ6Wa16GUH3e1uZ7tOftaWTSbM2AuQGJTSpKF/QjYzEtXk1ZUIMxBViH4KD331gyuB2HvwcDLUrHNiWmDzoyaxnXPnW23hjcXDODhc/L1lWrTMaRIAD25CDh8ocFdLiRzg5pQVhvAW3rxLcvwBZCf9TPS9DGrev9dStaV7gVQNPPy2ugATTfUJUmNWHG2um77F/OQJh3n689J3LPj/hVDNBCDxq4CKJF1FDUak6QA0LJ0eAQW8lsJAno3xKMsRyRfbdcbfWOEIAkr1nPza78lGXam1nQNCp7byaKEU/opeTE0kgyZwwY5fYEtfBuVS36FvBNR2E9TdBNU6Lc1FtOJEpVMSOCGgZsxjHR1vajj3wi8qV+nUC1aO6l1KcMNH3NTlBUx8UMZVwxwOd1WaCGr9UWSLS5kXZkNSEFjKq51ncwVjjXAsLtObyZ6tTwT92ZN1yWdcE2r1XLtjwknzzdjAWtNtIC+vbeG61v8OwueZ4b0OqsH8Pvyo81EMMBGAfIJjIzNznznc1DmZ/2/gxwSusO1RqGoDW8mXBWuABkjq1gtIyfwMjgQJjmJ4hzBoUImhZJZF5mULyThrpqRiwBHMuCtoyUNq8EgOWoWliWsHorl6fAVXLEQhSz4nwADhmUZ+CA4icEUY2a7k2xexGGXtt6rwZnSH4NimkwusXlwUsezCTRg6d6wHDWauXVhlw65mpQBl1di5ptZBFxrm+TZdpbjylrFOlBq1/qQVAYmUWDZAjgBNW+TIoPbgfkXMHtFJUIOSdxB2p/I/b3akpElnpiFglujulaIcAl2Oq4Lynhsil+PFjgQbU63BO0jPpmnKbRehppSwZePngjuJsyU6FpW1arDdgHMKBqXg14wdYBafsPLgKGXX1U1fyUptVrpt6k5+exhJyzPscilGjqBVrAudRMawAAmDCjc3jZz8s4h6BlfiQHFiWPcuQvoQCyzg/a2LQ8VfV7SU84WbzRnHiBmsAc0vGQCsUlf9PncNq1e7+QdkAoQqzu5czOmoDWJC4TDlhbGMpACkAMMsZpfor5CsKnji9Ay0vQekQIEn1oARyUpUGq8DF3bheViBw37g8KUeZplsobNB2Ag/QD42mWZ3ml/jtvVnXBHJtkZ/8cgVbweEV60MBFoCYMtf0OaELYQwA4oOlMapJTp4nll8/Ffnz3UlRwfbgUokT0XGmo6eHa+bhM5Z4KaElYNUr0IGCSXXVAV/NJ9SVkd3zubs5b4L3/xz71vjG5njP7lP+2ocA6AvlL6v/R3jwcqhkjhvrXStxIC2+X4NrZsfu0AUABzG94k4bVrGLjsAFeoqR5E2HO5lOs1THsmEgEDdav2pOb56DMrAnv1u8NvOYYmjJQkWo9Q0/mS7qEzJSnMkYBr+Gx7rVnev5wb0peep8ozLTHLkpP28e4ee8p7zyPEWCZHCCv6w9Gpl/7fS8ANaDlGJ4HrVEFG1lT2tjU8qR0gqwjAsKEKUwgjfqz/nqp07gme77IpRFpc03PAyiUYCwDrMU1Hl1S1VpPCZwe3I6Ji/85krhDYgi4OryOw9VT0HILnq8R4gHh+jXg+jno6kb4Vynmm2tEtGlfrpyUhdfbiGhd1Ne1gHLSDn/Qah1XbSd081PHWsB8KJT2QP8u6EED15403n9eTAbqTKQMN7HORJiOgD3sl8+Rb59rZWY1EUYDp3HHJVYVunY6rloWoMyChFkA1LR5AFCc9QBK+Pd2bW8ZQI2MkvSirEUyR9vCf9Yn2gIo/jCDyMxSXT7AtK8abGAcjkg3fj8pntk0txBq3pvf/PobODOOv2NSMLVxLu601b9VNa0ayNIGtbSajOOYqADlgcxHrJp/a3a+rvp6UHaLTItoBaw+Edfar9yX+uCVvkTSHm9M4CIYWK8483sxteuyGWc3Rirft7/xLOtcb7G9wJD2R47hed90ZzZsjregAWu46HrMsfsbSXy1WavWNPVAVTAjQAJCen844ATXtoq8BWStuVoBRDhp9/YIyJtSW6h7QSJlGdEiX2PA4fBEBG4WU2IgqfHJ0wyeD+DpAKm4YYV8FbCWo2pfAyDJCRspbPRYHEidBC09tlTneJfg9aCBa89+XqR+oJlAy0UQ8LLchQBYt9Ll2IKWal5yYsmBMEdlEwY+DLU1k0A71kA1gVHejxlWA2K9tKw/qMmpWrw3SiCAj1Ls88FOhdfL53LuOYYS0AENLsjefKh2zRCphHy3N/DupSq404oPpmqEzMJglkYD3WqjPjDD+6y29qsWWHwEoQGUvJb5N43LFze2yvE27h60TANpklWpBa1XMY160GoDOE7/zqbAwvTN32M5fV6w8sAyAquRZjUea7Uq2BjOFvvtAMu0sAJa1tXaH2t+GM6iyAMgclVc7DNUhhoLA66XJqBYbmgUjGGgpfmHHKQEnUUQt5V9tqBdrrNnZgYaU2ICIzZCYgYz4UrBCxRqzdbDFeh4jTw9F9BKSaptKIAR4MCrA2OX7uMDOEo4vOVdduXaStBGc28DoNoBt0vpQQPXHAmH2PY1KqBlDkNz1KIuUptsSgt4vQXhDjjeIr94hvz2l5Hf/rIA2HoE391qKKlG2ABF6+ofmI+IA4y51veSyFrNKbEwr9Nh1qPoNtj9oG6IfrOkTDXXjdtK6j603tMC1iReWci1jYcmJitocBDJPBCAwGpqoVow9lKpqo8q1IVfNAl3qEm/WVUFA43M3IS871HwHHaHDLDkvqn8zgBpBFa+RuQ5sBoBVf/oL0kQ3oRt4zxz9GRaDoASIAKg5JmVoA1qAXtzPzgNtpboCzihqRm3jsH5+wqpVlV8W+ZfMgAxwBr5moCasuHXYqO5kUToeSbaW1N6k7f/vWuEamkvS2LcrVzAyyKKfei7zH9da0QoKQk+JSCzfN7H3lkQh5gh7fCAw9VTYJIgsTwd5N7mBeHqBpQTslmT7m4FyNaj3G/OUpi3D5+fZikhdf2a+PYP16Cbp+D5ifjzZ5f2o6Btz7sXdGKYRAAIQUy1WebvVelBA9d1CLiehIlMgQpj885admDSbIqgzhpbsK4Xjpc+fNY5zYeSsEcWUUjUMl7UzRhDjeiTh1fL1AwbsI0YPYWWs/XmEDsmTtI2hYFjkp5Z5pPNutABNKDVt/KwOcrMrput3JGE8RJ8ZXmKasLI1CT7RpsPdnUh/S0YY3agVZK3UZmvugEVGETzk8LAwrSnQMiSeQzkrEyWXQ8qQoxWzaJM6FAQGJn/RloVYMV0qyBi57kErLzgAoytBueox6URaO1h+CUybr2ncafmkscEoPgzPTnnfND9JxoPNaB60v/XhU4baNG6VLO+qxXa/HR0zj5gYC8CNm19MKNGsQAqaIVJQCtzyS09powli7B4TLkREk3YQUQJwgI5Rm/zb28yt2ZbtBpayQUl6YQhfOzQajV5hXVhZo0axCLWIzMj1gtoUIZGFNLhWnqHRc1Rna8kcnq6As/XxZ9npntfiQRw/m9roZSza+n0avSggWsKqAnHBbTWIQA0S7tn/v3xIQIhuYQ8l89wuK5Je8CuyuuLcRojK8mLdt3kTBw+MsqHl44cnP09AECYEDXZMWgdPVFChGnb/RtoLXlbtw/Q4IBAGsSgHyapLiFdVakMKSqzJJX+ovlH1N5NcQLSik0Mpbs33wa9gFY3JiKIRhfgNoSAaMiSRBy45lVZf07zBwRFDH+7xki8SXCOoYDXHEkDL7Z+q/uAlY3fayz1t1s6pytlY/Y8/j2Ahmn082hmJKMm9NtpgqY9+m63c8A28RbYMnUTRtTHZOB1b3LmQNO0fFRfk2OpfmhpUtruH0mi7XwwPfUAOBIQ7f7cfZb2J0m1K7VweNC6XfMmYGqOhGjh56hCtV830P0bg5TD6vPLvC+uuANsT+W5hq076xNBTX/rETTNWn0jbRu9ltytGThcIU8KUPNVU2DBB6H0gGW9Ba0DhuwLGR+ltYTfvwo9cOBqpUBb1P0CtXDtBqQ8UFAokgVdv4aoUohv4lZAS1VmzPLQTjkwm4gkA1WX+d9EJvmq626j9P6zTXkqXZSl1liYMM9XiPM1jokBSJ3AYxKgX4KkoAH7UU3J6j/68DIFL4YlTkrYMJlmw6Z9aYPKeEDp5ZW6fA3PRBotq2oNGX4Tiy8t2wZA9eNhAmKOiJqfs2Qu9QbPUWsKa8FqDlLlvTf9oYzrvEY1eg+M5Ux7qtVhLy9Gd2HHjL5rGEd3QPVpoWl02Y+/FlruIuqOtwUwkI771f51HSJE8CSSdjTBRCNq7fnauJrOxN70l1fRrtYjaNWcpZzAyxG51xJsTmbRNtib8mxMBXCoMSH6ljy7wqEHwxBLpZeka9/nanrQultzsXC0ZueMQwxaV7SCUNl6EG1slOYgv69V8m0uRbPVZ8Asf60IQxEoV+FhXYh6YwlRQGKX5mMpP940mBilF6LsXwfQJb2DkWCBL6GA13vWx2XLyhYshRqOWfwkXoiyqBZdnBwiCAfwIctGmq4QD9c1IMM7LEMsgMXxgGxqsqnMmnTcY4E1DCwLym8GM3X0IbZu85yyJG0AzjZqeoIwr7iar4EploCLzMKQc87IxKViRE99jtMozBuoTJIh0qWZ58zAHVx/r3bglQFYv6LezFV+oYzUriWh98IkViIIKwSCnnMe+O/OtfDwJkADq6hmMs9UgfuBVO8LKtfTN96PRwbYtAWmTeQgt3X1Rr6tem7/mWqMqOBVDLkbTUsEw0OAgMdyKzlDeS2VGzzjK7X5XK1OioeyF62mnVgBaJPkbHNk1ghyfizbG5SOtSiARcgpSQmjWCratBXQfS1Mahmmvfbl4fY0SaVNLuKOdnvKV7mX5hBAzToCWm2sny/flbjMnxtnBasJHAKQI5i7Chd+PoIHZzdvTgAwX1Zy+2wDWgNTeHlpfPBVIpGUHjRwAbJZLeLLVOWRY59QHYakD4WCBqZO13L8KjW4aNYE497koJUxzBnL05W8jjXpeFQRw5hMCVxw4FRKx1hkY4mS4raoLwAf6dNHAYVZWoJQmJoE56urp1hy7Qi95uoXSpwL+nvTGYCSm7Q775CCoDLX0p4lZTQ+sFI9WwHMzwdgtnn9jDcGxSZk3EwlzHKJIp0jgEhCjjEF3VDbXLZR25dyHf0r/pyqwUfTuE74p7yGMgpaKFdtTE96PaCAtzF0jMDHMQXuPz9hXOxz/iyVYFPFw3EXCxoys3YBrfWudulNRyAdgbVq0laklbVJIkIW8MixCC8Scu7udXfgPiCial7y11VFN2tI6EClAJYw2yFoFcAaWDDcX+I8LhjbHB/EnK3rIUDXhfOZ1lqZtqadhuVMyjQQhuQYNL/rtdWm6O9IE9a/RLlNEPamU/Pdec3La0eqYZmgVEvWjddgvye+kvSggUv2XO+7sO8qI/STZ99PYRIzhnb65ThrWZMEWl7IQTm3SXWlpJOClT3AzEi59gVrF5tULA+AlG7RxWESJK2LmF3WW2BdSzRjqdbRSZYjKu0KnCMVnMHpiMwZN1evK9PKSFr01phYCtwEYwBVwhtFz3nyi9fPv0jrtZyOz4Oy3/XPyFMPAGVDCgpqfhxhUVNlzEDKhJiyPJNADdP2fqg96k1/5je9VJtqAm26sG3Y5/WBNebS6EKIfWK4MQWvXQEVrHyYdO9f8M9o+1qECwMvYoBIKneYphCJ1Dz4QsDq7h2E5WVpK8/axJBTan0ih2vg6kk1oyvTpCyvzd/VMzL/nPtIQalrJz6o3JvwywMMsEaIxSw4MBH66EFJeg9D8PJmUBuqRSiKWQ2l3FEIodQxZCJMsbavuZrkerbPALevnPmYqOZ19h2Vbb2V4zZg5taeTw0ANvddLFLehOqsVKO1U9Zf6nxYe1rjiT3ixzVKI7qUHjhwiS3ZRxH1m7d2VG0nWaShSXrfxHaDx5uvksk2/xGEoVh4a05AWpIDyZpobBJ4DCaJ18oEpKat4mR2oMUv3gFrwnO+fS6AZXll2n8nH5fmHiiGEulY/G/Xr4GPryO+AeS8IlDAFCaEwxMAQTU/8QPFQFg04mmUg2iCbA9azAZKjCbHq9y/lZBijbzjjcQ4Ir/YGwnSggEg7ylKteopTJhD0NB/xhRqfUifB+e1plPUaE+dJjXsvcSp+iy7itdNJJqP/tS/nLVqviXFq1YagCb82ZiGL8gsz8BpU6dvS49v3yeFdotos/u35zcHgI6qaR1fIByfg188Q9Zirvn2OZrq44drWYc3r8kznNuotiLxq7+rj2YtTG0nSlEAaJUoOKiwZl+r0MZxLj6ZHrQ29TGdj7uY6L3p3o6xGQ6hNkcllw+qR0wltJtxcCZZmqiUcDNLgFkAPK/wAT9AC1ptTmH9vswXYwu+Nm/I4vuG00adyW/J0E4MPASiUdf1EVz5/e338d5xNr5XpXsD17/6V/8Kf/Ev/kV89rOfxec+9zn8k3/yT/Dd3/3d5Xtmxg//8A/jb//tv40vf/nL+B2/43fgb/2tv4Xf8Bt+Qznmi1/8In7wB38Q/+yf/TOEEPCpT30Kf+Wv/BU8ffr03jfg22oD7WKS79tNW6SeDAmxdsf43BhbOEAuYeMWNQSMAdJU/kPUMwSRYjMsqgwtg7NNYpqWglZ+/kxKTS1HLM9vkW6PSMuKfNQmjkrx+oA4T4jXBxxefw20voaQxXeXD9eycJYDaBbzziEecExaVUPvaY4BIbO2na/UVg9oF6DNMXNlgMRyvyxYpn6qWiS3bkSTOuv57GXfU60wkvXYBbIkRaMJ03RAjBOSAqQEbdBmE+4lerfHjBlGiQT1wTWjkla9mWaHmcAc1AHwpa4CBQlZRq3cDp3nPnBlj4jUnEpqEqR2f/RUfWNmH1CQdoERYh5cGtDil8/1FpMm5CepaacNVEOMQDqC01wAC1y7ZoeOaRVhYDBXUiA7CwjNapb0DV2nyogRD60m0VMXLeiTmE9qyeZ49c9WJDaAsuQ4Bt3rbB3OAdstDKvQX9dhqX3pnt0pou7vrpkTEBOpapS+CG5pHUShRD6O8sxsPCb4nzL1nRJK97St5u8r0L2B6/nz5/jmb/5m/NE/+kfxPd/zPZvv/8Jf+Av4q3/1r+Lv//2/j2/4hm/An/2zfxbf+Z3fif/4H/8jrq/Fl/SH/tAfwuc+9zn8i3/xL7AsC77/+78ff/yP/3H8+I//+L3Gwvov5a1j0ObQaoSl3JpYTG7IjolbpNqqO938IqaVLE5M9AEL1nW3aiYBzIwJsion/by0RGBuzCGS6PyygFb68hexvBDAOj57juOzF1hvjwXAACDEgOnJNabrA6bXrnFze8TV+7Ui9DRrMeAZmI7AegsiwnR9wCEKU88gzFwBN4a4WwLKU39MDBbtx8VXQ5DNG1CTKqsUNjC7kduU5mT2QStpaRkLUAEgLcLYpgPidEBSv9texGTAvobipcShxudMV5sgmt6vsKfCmokkTG31BqCEjjObn6G9hz1zINAyvfJ6cJy3FppAJfde29FHQvW7Lneg9VbMgw60ROPSe5wWIF/L+xDB8wHZch81wILzCiT1d1l/p8EzaLRTYjnWj5uCSJ2Oc5UcQKL9CMLu/JtSUba2UpcbZkyfkwZYWZFajdLLa/HhxTABgcrzIZtXrfnZPzuvmfQ8fzQ37P4OccSbAzmjNNPrfFpWjsqsSL6Jpie/L0+VAWuGcAZ8/XP8FTUVftd3fRe+67u+azwoZvzYj/0Y/syf+TP4/b//9wMA/sE/+Af4yEc+gn/6T/8pvvd7vxf/6T/9J/zET/wE/t2/+3f4tm/7NgDAX/trfw2/9/f+Xvylv/SX8NGPfvRe46m5DVQ0o1JzzR2TwVidjXbJAkRLlnBVe70kxm3KpUirD2W161jo9FUMJVn1egqYQ8DVFDQZmkDZqte7pM00qCyds1Siv7sF377A3Zffwd2X38by/Ba3v/QML7/0EsvzBXfP7pCOWtHiEHH1+hWuXj/g+quelPm4AsRcqGZGun5N814SOB0RacYc1XRhXDyIxEUnxCq/pn25KF8v0DueayUJdmY3dlqMvQfgAkGKtuXrwqVjBTFlOASoNH4rEVOLSNvB/Bx9uO3O655BVDOg9n/KqnlYhJsBal/49RyZ5AuUmpnF5ESSMsAApniQ55AZU1BNFmLKte7AFnnYO+h7H6JowPIcrG18FRDqXBN535YKDetRta1jLYWmVoHStDANfK/HW/A0oySxxqNebAVl8xfn5hnC5tzehwBkgCnXSGH1WQEo81jmtQ8waHw65ECoK9fk/UEesHohpIAXlXHXgI1Y14uuH1vTxfQaRPv1EXhl+E5wMMrM5blL7cgKcrnxcYUKHm5NZ0U2A6Mi8GQgcy4BFiao96XCbEymHcpy5DLORkjCaTL3QW8azoyGd9yXvqI+rp/5mZ/Bm2++iU9+8pPlszfeeAOf+MQn8JnPfAbf+73fi8985jN4//vfX0ALAD75yU8ihIB/82/+Df7AH/gDF1/PVFADI6ucYJrYHmU1u9ytGbcp48WS8GJJWBPjdk14sWQsOePlMeHtW9FwrDVIDISrKeAwBdzMsQDY06sJT2YgBCmzFINoHcUEQ9RunFwlP86pdC/Nt7dYb49Yb484vv0Cd89ucfulWxyfH7E8X5COGRQJ4XYFRUI8BKy3R+SlNSMCAAYJfsUUBRRNiNSk1K9CH73Wy/+jslFWLgrgjX9sCihh1jFgWPPQrrkp+wOgJKN2CebEk2iZQQUC02bWTtr2jM36D6nJriEDl67UUE183Wpcp/LuPEOxCiKcc5Xc9RocUJz9kUIxO/lIBpMzUpZp8/k8gGc4al3IUt8x6LPzq0OkaTWJhspgYyAgL0XTLJF8pygncA7AumgpoaWUFJI5k3I/bObH0TyNzKtqTiUWkGA1N/bzuhuq3lOvYTlf1kiLHp0fNvbRvwHJ/qoJ6AYqI/Jyo2logBPEqfbCk3NTsXIA40Cdprivf41xiTDhU8pXtYZlhr8GF/AqUdp+blSgEnnYBNxqyfBjWwcWnkvpKwpcb775JgDgIx/5SPP5Rz7ykfLdm2++ia/+6q9uBzFN+MAHPlCO6enu7g53d3fl/bNnzwDUyXDRpmK2y3wSuAIJEBlovXO34p1jwu2a8fy44ssvFrw8rnjndi3ABcimPqhGdXOIuDn8/8j7m1hLkuQ6GDzmHhH3vvfyZWVVdTerW6IA6mfBhX4ACiAICANSrRGbWknkhoAWlASQgIAWIHBBgNqRIsCFtBG1EHcSF9RWCy2GGkEcQhiAIERiOAIGAgbkEPo+Uc3uZnVlZeZ7796IcLdZmJm7uUfcl5nV1ODLaQeq8r77Ez8eEW5mx44dGzANAc+ux0J9PWoDwsxUjqk0H+xaqJQamJwKkzAtK9LpXHJb5xdzMVrz3YI0J4QYEMaA8SYhzRlpzmDX4dm0FRs9xUtzAfHo8o5OTiR5aHqI4zHZKBO9jbmeeyTComSVKaqCvkYNiPKdlNVg6QPhTW7DkjJDsUd8SNZinIqBKi3H9zxy+56977fnIcHXaeT5Y+0XVM47i7Tz3MsiWiEg1nYbCGLqrJZaMiVtdGrwXmO02GRyxXjVBqDm1vni1dpbzKIF6uFQoBgvilHqgUIEUir3HYWIwjJUYhGtSzH6SBGICq+p0Who5pfygTpd7R2BR+/pTbRk//ZGa6fHF4B9R2THUBWVmK4QOaNNXdSOC9iQxC6NzKwGhDYGLEGvOaphAPy1r+mTSlZSckj5Xru/wrrlthM2F2KVW2ud8ZLjkk99p20ARTnDDs7et5za0odhbzHeCVbhL/7iL+Lnfu7nNu9bkST3XjrQ4LY1JG4vWs6MJWW8mhM+vp/x8rTimy9OarjEoK1zKtJIIRDCEDDGgKsp4slxwO1RpvD2MOAQJYI7rLkYKyvSy0zlZveLm1HeDS7My4qcxBClOelrluhqjPLfFDAchwYqnJ7eYLy9RnxyC7q+Rbh5CjpeqzyL1p+FAWznolAdESGCm4XPvy4LniX5A8m5BMay9g0b643o2zBUWj1hDFlg1Sgwx6T7sYhMD06UN8ok5ZoHYmsIimJUALfYKPzWMPj6BcfamPef+dHlPTZQYfm8ffiY8iYa6HsXSY3eWqFC/z3OoJCBsGIIA4YoC+IQqscKVGjQImeUe93udxVLDrVjQHYIgGe0DQp3D4FKgbHVEpahzFXOCZSS5rW0r1PKoJikYeE6g9cRWFVOaD6jiNimWc9vqASbS3CuDYMH95yPnetlrz3JopRZ+2uaO8dkzxGyf33+zNrWh1h0+hIDq+aMvCNXI6N6b18qhi+LfXc/mZEAanmJQG9cIJCeXObXPP+vj3I8uxFQiNNO2cpHDF42A0YCH8aguXGLAp0j1eSW3cv+GFMGTnt54Dccf6KG66OPPgIAfP3rX8cXv/jF8v7Xv/51/JW/8lfKd77xjW80v1vXFd/61rfK7/vxsz/7s/jpn/7p8veLFy/w3d/93Zgi4Rj367gCtRbdCoPtQi7dpM1rxsO84n5OeHVacadGazmvEt5mxjBGDDmCB8Y0hAIdFhJH5pIbW3LGkGNhuaXAtRiyHJTCMLaQDqOQLY4HDMcJ+WbF4emKEAPSnJCWhBAJw3HEcDXg6v0jDs+eYLy9xtXnn2F6/32E22ditK6fOEFMkWxhCgIR6O6DMh9TdkoG7rkyjykAhaFmkKxpAPo+V2bE5DrIv0bg9+K1S2YcY61xMWciUL25gYBRlRfkuILAgSGA1lBhJye0updzoRjrAmn/+boei0xtH37Y4nfBG2++ugdJ+mT57o+q91+UJ3RCOFMxYJG05YZdH84gK/51C20IIrLKIWAhY1kCSaNYu+4+ShPqu0o7IYOWuS2GB1BEpfUeRU5inKw1xt5QWSZeF4RxkoLlJUgoHtVA98YIqSkgLnPnnQ6o47k5Hzd/ZT5zKaNguP29LifpHZrSvsMJQVstp9HJ2XQK68JcNoWav+qJDXtImeXqdw/LOSj2+02UhdZQ1RZHlY7vjUtO1YClbIaxdutOAGI1/dJFILe5Lw8d9sMb5N6orv9Hibi+53u+Bx999BH+03/6T8VQvXjxAr/1W7+Ff/gP/yEA4Ad+4Afw/Plz/M7v/A6+7/u+DwDw67/+68g54/u///t3t3s4HHA4HDbvh/WEsE4KdwV9eAe3EEq+qU5efYD32nqkzGLAllSM1nJOSAqH5JU1YhkKu86gQ7vZFo3icqwPWbabJ1L7sAJ14dQi4ng8YLw5Yj0JNMopY7wakBODU0acIobjiHiccHj/CabbG0xPr3H43IcI732IcH2LcH2rRutKPETVGTN82wYRpPA4eBWMOiIUk6ZqvEjrvzKLUnumSs+ukVed25xF2WSBNL/LOlf23WsHAZmeImANI6XWLo7aaTrHuoitc0OkkJ1JF9ei8q9tyI0oIHJAVOvzKGiOzMGL/eiiKtbISCaw7d1k7+0arM5hIeg+tclhibxM35FCNWB+8z7Pp9GDX2QxZGCYMIVB5s8ZLhs93X8g2S+pHqDJLBVjraoYNJryeAYNihJAYUKd52ZosTAvStCw/Bbn0pW4j3hFQVznV6Mt+9zLgwFt9GlwVSDZRjW6AYWZ4K9hCPu5u+6Yms6+zmghTg2dfEm8/3zZ4q4Qmr0HtJEyUfvb142CGjmjZZG25UFLPz7sGy1jQvthuTUzXuU8uJUHY5twtFFhe4yyL7tmZkiz+/ezjrc2XK9evcLv/d7vlb//4A/+AL/7u7+LDz74AH/mz/wZ/ON//I/xC7/wC/gLf+EvFDr8l770pVLr9b3f+734yle+gp/8yZ/EL//yL2NZFnz1q1/Fj//4j781ozC8+hiBzuINRelDw+MBQzwiQ/tP6cUrODGr0oVeNGMIWs7q9jjg0/sFM4CUMs6nBWnNyCtjmAKy6nylq1FJGhHTEC56SkK1p4J/B3u4Lf80DKJ28eSZqDUfrnA1jJieXmO5O2G5O9U8QgwYjhPicZJ/n9yWXjnxvQ9B17cCD05PkA83gEZceTg2D5l5hpYbjDAtRR+h6gsiZWgqnJhliZ2kHTISU1FjB6DYdW4p6e4OHQMhJIEMTylo9CVszMS1Rc2chNARA2GKEeN4LfmXNIv0UFQvfkZZDAvJRf+tN0oEhqVChMMk8+69b1v47fXeMIMUp4ufPfr7/iesGcQMAFkW0y7SIB/pecKIjwJd0a38OANxwBQnmRwKkNXG5XdSl9+5pJsJhYOGsdRpYRgRQgAfjqVQ3uaZopQm2GC7DjmBjnIsFAZwMsq6OgxmHOx8Cmw8lNZEyd1TzaPGrEKzJuQKbHRBKaApjrPzghqkDSlE2amDiWlXsVmLtGZVarFoy3fcNhjXoDUv6Oz4NpvxumjLGztDC23f1rByry5LFOYJQUlmZbs7+/CjJYyIhZWO6NScZ3t+hrpA/61EkP71Zx1vbbh++7d/Gz/0Qz9U/jYI7yd+4ifwb/7Nv8HP/MzP4O7uDj/1Uz+F58+f46/9tb+GX/u1Xys1XADwq7/6q/jqV7+KL3/5y6UA+Zd+6Zfe+uDD+SXCgSSEH7S4kSQ/IirEdpPXG2tVGM+GKYIfh4jb44B5HfH8fsGcMtYlYRgjODNYM+ScuWHhNMejHuwYA8aotG+leo+O8l1zLQOIM8LhSrzXcQJPoj4fru8wrDOu7MHX1iqlodt0BF3dyHcPVyL0OxyRh1EavQ0H8DAhkXRkFWO9rcOqdFooBbf1pgBoHYoYMGhfLCbGiFAf1iDah4ElwsqpQohAZWWek7QiWYkqtDoEnFLAdZaSgjES1lA189ZcVcqnOGE6DMByknlMbsG1kXNdTHXu5F9XLLtqJKHXQxC4tYOuZEg0ZpFhrJ95QdKd3zWMQ5dz2UBVRlCwFvPoIMkdEkEvblteu31RPtXj6NmRj7S93833TAdYXRbnJEbL5nltFV0MQRCSUKjnMJ9F4cIiRYMLrU9TqIK8fpSooSx6W6EAccBQKOklKvU0d3+P2HWzOKK/3k6fr+S0olNE12OwRdhrRta6xdcXvffjsgMs/3p0rc1jbbuel9O0fwkI8bJuoI/t29rLyyLTfvQ5buYKXRqb0B9j6h6Dtxlvbbh+8Ad/8OLCDchJ/vzP/zx+/ud//uJ3Pvjgg7cuNt4dy1mgjWA0aIVXWFQIineCOlkCU7WTPESpw1o06rqaIq7miPUwYNUWo0FpcjEGhBgar8UYhfW/asRIF19/A0t/oEE8S10cwtWNqGpPR/DhCnR9W1tsW/NKa72iPXJYe+QkaztgWorjsXiFa2o9MT/6+9cbsQZaYhJqLPSBCJC8GLE2VcxYMiFmIBMVZqENM5blXzByEPT/BKmTO+qxjYFx5ACOoZQUZJZ6OIERM8IQMIzaliFaol8KXnM+VQjI5zHLe0Ldpp1SgXqBPBRI9d4q77V5l+Z7bhuk92IhAJC9Bh41Xt37vYHpjVaz0Hrj5QxW0cW0NiFeyNkNg1PtXMu/FIBpgBFlwuGqiECbJJns9jJ1nnMCllmjNtRaPAqQEMsdu0HIFADeQmNlm/pvAGoN4COlC40zYfvvS9J1Lk19o4j1qip6zS3VdSSAShdp+9sW94vPmfv7MdisN2T2V2O0uDVa3vf00Zpvz7M3yndRDVZ9jzaRXwmMXbTVBcPyvn/9bUCENt4JVuGlwac75AGymHsYh68aZhyrdT+tSQuNa44FAG6nKLkYksX14emhUN/DEIRZqEXJcQgYRoEH/bCHJgTfyl2EM9nd6KSq9AgJPB7kIc4JPGRgkgcrZJd4J5cUVq8vO+jC98ZZ1LNZEmNdqlSVeT7A1lu6pB1oVFtGCyfCEBeLvIK8eYxQYobAh9kXrATD2Fs4JeWEkUOJvpbEGCPhvAp0aNGwRWDW4RkQZuIwHsHrWbxqVVigEMFYOqOVhab9po3remMU6iJWkvUGb3UipfZQBoLr4GtMPVlEKc2PR2D63iUpqVKQS0GcHcd0g+V3OAvJYpU27bTcA6c7Ya6e7iRS8nNkUZI6RwhBjFMnXLuXC/SMy2D9ssyodYK4rIQO4igRrh4vsYhdy4q5kyO8dKmgaCP1xdOVil8V5i+4+E3OOWyeuaLtR8GxhPV5Ii03cIiFX9Q3au7uM79++5Ieyxf1Pdmye5Yt5WEGwcODvV3wBsuiqM0UUHvsVi5Rf2+vqftdfc3cRoxVqcjVjJX3dg7iLcY7bbgAlIet8YLDIJBgMoafiuNaztdyO46m/d5RID7JqQR8+OSAl6cVr05CjT+vUpAMSAR1exzw3vWIqykiBlJ4MBQ4UERfhawBaKTHhOMwYJgGzcuN8pB3VN72/OqC5HXGvFxLWoE1J/dQYRMVF8FL7RbcQoGtF5i5fXjMIyT9mWYiYC1MZNPy7pJN3ooKUWXJ+rE3Zqj5MOHXAkvxWDNCKQKRH4snLce5Ztl2jGK8pdPyIPfBuqAQXlzbiwJbKUOuRBbFSF2IoCz3Yr2lVIvP671dEik1IedxkOjaaOZyWl0E5oe/BzxcZwt61xyRh0MbFSZVVF9PoPOdSDa9+Bb47qUYrbN2ILAi+BLRj5IzXWbQ4ahRfBWuLW3atbSi7m9tI7vlXKIeSnPbosc7Dw3MScVo9CMQCiwHVxMFoJBMxgDZnxFMdtRWyj5tf+7v3T5U5qjo673nCpD9Z2aMRi13z5J/rnoX0f+tp7YhajD22YLeCdwzVq8bvaHqIUAzupeixr3BevyxUAyrs4ssOXJDakDtPL3teLcNl9aW5PFKGHSjMOlmDjiljNlwXxfSN+3ddUgOKuB6BIABgQjXY8TT44i7ecSsXUwf5tWxCSOupojrKWIMoVFwBqqhSkbTKT5WFvpxPCIOqLTu3iO0thcuOb1mRkpA4qyUfu5w7goXANXTigGbjrfy+bbnT3mAsO8VGSTiYUORwZBPgz8FjWJDFrktYSpxk2czyDVQ+5D3Qx6ICs+kDORACMU7jpVEYGy2VItmMYwScanxeiujtQMZWf+1BqrpFlT1AIBMmMIAjrkwGi1CKzb/sWhgB7osenwlEgxFOqoYkeWMsDwg371AfvkcfP8S+fwAPt3DM+rYmqSqYaEQpYuwfUfnokDRBkvbIQ6Q2rq8AosYVOlavCj8uYrB6ph9bwS76nxJZFUlr2zaCpzlo9K+YNzet7mkUFmd3Vw3NHhvTBWyBC48F7SF0WDXt/uejW3NVt3+JbbgmtvIak9fFNCiZ92/PTvbY64GS35TDZa97s/t0rBTsXJ309pkAhwtGZZmsP1/1vFOGy46XIOPT5Cv3wdPN+DxiHMmvJoT5sSYNeIyFqH1lRpBiGmHpBEjrseI0xqRmbXt9lgih3PKJaFYC/iAQwwYYu2xY8YxMzAnxhjQsAtnEkis3CQ0Ikaq+X9YctOKfHOBA5bcEk2sRqNvTyLnI323LMKyolPzoHqjZf++ifdWcncMBIviAMQQyzwsmbEEEi1IVW7PmZFD9RbteCRapQoPBnkdqbaAsMH6X2Jg0GjLFlYaVULLcoN2vMMEzyis7S4uLJqh/cxHOQm1fcrF+h3vQgeAUduYWE5OTqZGheXi6QJbdPb2YEsPZakXZoruMObl/Arp5ScSaX36MfL9yyIpxllZjACG44Sg6u40TgLneaNFoYm6OCjhh91zQBExDqK1uJyANImO5BrVkVhbAg2wySE1RcZ6PkbgsOsvc1K3Ifc0JJ/lDVV2+T2HZrQlCo7Gv2u02o7qe8/FXnT1mLHaG367l4yW1YsZStG3IvISawCA3Apki0PbNof1UZbvPdf0oYNDay4dP0sEZSiW3LKy3rFLK1BEaTbL7I71M4x32nClp9+F9PRz4ONTnBNjXhl3S8J55ZIz6b2aouIeUFhx9vDZonk9ym+vR6G7Cr27lTayQmNAblgT2R31zkjMCMoXnVNteBi0Aj2u1hp+e7MA1bOyVirGyFkV9jyvoqfolSsAuRkOQ8BoXhQpIy/UQtPHMOuCq7sHyLOUPHvKjtsYhwbBjiFiiqEwOEWo2OYsN/Pom1YetaxgjITjEEs1v0WNpvRgR8zMWqeW9ZqIKgMNB9Bx7uqvqFmEN8YKqHAb1Wh3r225h2xsHnwdizkERtQxweEmz1KMU6iflYP1kcn22OrF0kU5LTWvNd+DVom00iffQPr0Y1Fzf/Ucy92DaFp6ebBSnlEXcYlMo0amoULVCpEuJdq3+8Lub4HLpulaYLs4AvFcojF2tWHuALDbhkQhVMJajJc13LTIW+4bZyiC63MWAphjq1pi+/P/hu090RhSh3hkA09sd91x9MP2Kx0Y9j/fi7IMBuwdVHvufdF/eX5z7VbRily3yEvp/+X+vmSsHoscm3PUY7Au5VbnFWH5t1pLmEj/zt/BERcfnoCPT/Gg0ZWPsjJv1TGAauUjEWKE6sBZPZd4dsyEMQIpBzVYYWMEpeCw/u3zWzaywoRRQXrfu4pV1cD6dCVZveS8eHvzshoxU7Avhc7st6PqFBptDUHYeGOo0j69gSxz6eDUzFuj5VvCNF4mZOUwo8XunIZAGFmMUPUS44bdWByJEMoD5x8q/yDV3Js+AAiIVjek9UucVlCaJPkP1CJd4DIk1Xvd3vN20FyPzBBQNBaB9uEv50KQKMi1Rrmo3rD3/h5JwUUvHhaj5R788ArpdI/88rn0dptPyPPS1ANSCCBlyIZp1BY4I2pBfNDC49rJuEQe7KngXO6DBHmelgyMYairS44QFtW8bf9ic35pHgzh03O2Av4+Ajf5LgoDELm+B1zOccVt1F3IKJ3RMgSkhwn7xdcjFq9jzxVncOdZa+SasjmydYONFiK1XcoNofCkDHOgfJPb1nBtOyu/yei/a2ugRcZS/6niwCBJJWS5pYY9/PINxzttuNZ4xP0qkN6seRQjZRjtHdgJoyGGBqgN3+xCS6JV3jMVh8wADy0kN+48a42UiobE8ofuP1j1fGvAvCHoGUJ2w5qh9Aar6Q+mxkparEQMaqiGWAt5rZ7Mh/6WbNa64rIg9Q+SGVMAhZXUM48i6tzFgMLmTCxRmM+/+WjFfu+ZT82D5AyWfd8WzTUDCAPiqF72qoSXVBdJTtv6HdlpNWa7uZauRYqHizR1VXOmqMa1GCvfQ2yvbYZ9tmOspD38nhufm+ittH3R9iP51fPSLyu/fC5G63SqfdxGeeRFqHmQgmGrC7Ryi3GSTsPGnnQQXh91AHavWFGqIhOZivGiHMSZIOlftTHcG4jQbpIk2o8cahSlNV59Py+mAIqTc1BIjWVwua8OW9szWjskKCNl2K97FGdv+S3PTvddz7rzEZeP4D3CYvVipnpRGztWB7ykQGIoUVZJCZDVkFJ5beuB1ZZuIn5/XWx+d87x0rnb9yUyllxbgNw3lnnLzBi/U6FCu6CvliyRVpJQGpD78jCEJmT2w3tKBqf1ycpmkYQYxhRj8YT8NvuIzG6wJrpw0VeGwIZJV0Ofq7LeYL2AbQ8L2rGHQDgMEdejFFIfYgsPmtHyuooFbjNDvZMo9kZU5kGNnKsFKQt16MkpVOfOeZSba9gZMH9t/CXzXmypWUnAmoEYTAXlgHg4ysN4KUH/2APqjJU3WF4uyzPL4o6hIt9HzJc1dPvdaB4+Jjja13hxy+LjuxelCWl++VyaPK4LksqGURDFFRprnq8wCY24Mozyvha1CyVeSzAcnBosAkZdIC0KD9RCzyIO7HJXvUzXHjzaGTSJsFgMmFhB+ZpGQtv7akAch0LYoCT5vCIa3BtMoOSzzDBakbHJwhkBor997UwfI1z0v7OaSpkfboxiub+wdVxtjC5KuWSs+uhqcEZqilSfjfNcc5meNGRIhMs7btr/NCe443jZMeo8x6iRKxGiSsydvlMjrll1lCyXwEF0PK/H2CpVQLxhf4O0cBcaozWEFk7z0QZlFP03G5kBRIEYAblRR2wZP02ylGsVPMPqnLhpZGkRlheu9ZusNRf2X4tr1662CiN281ejKg9BqKHhNp/1WNF5KA/MPgRpHtbe8O9egla88bPNLAWi0muiGooxs3qTA+IwiCZeUuq3VzzvoCMA1Wi5ui0PFe3NAwGI0Oinrx9y/Z7kYLsrYNHA3jn7ibwApVn7kTxrh+LTfaW7p1zgQBon0PFaFFpCUKOk7EFXv2WfWf1WAxP68w1k/Ucl0nI0cA+RBkIbWXbH78+jeb8YlArZElyE1cF3RhLxw0hSUdm7cig7OTabX82fZX690jsA57i1Tm4x5MqoIyUo9OSdi7qCXNeDftQO62ggdZ+/sufQoyxTDLVc4HwqepTm2LXzUHO7RcHfjPoF47VReWkuhKEbAyIFxCDSbgyU8oHPMt5pw7VmlhbxJLkcuagRN2MoXgbgWTrUdPvcS6wS1QveVoO3Ia8pOjA7DT9CKbDLDFhHYbsFFY1rb+BcxS9Lj6u8F3HVzwDoYuF7MxFex17yo38s7IHxn/URxmOjGExUyGz7je3w2zenQt6vD3ovIt0UMYNLjjCyGnBdAMTzDBiGo8J2A6p4bGwWSQAts62Lsvq8nP0qEBSys75TtX6oKSIGpPTBw5T+gTdW4d7i4A1etzCU+qhS9Fs/D9PYyIOF441EV4djJWDo9q04m0alvHuqvR2TGqFIAZn0fmc0NUHk/vW/2R0eKvXF2Lo/MgfCz5tFBc5oef1Nt3HN97DL4wxih/vDAJBcd3QP1fvrTsCujJO/72V7ss8aRVWGXZOnQoUDM1djtWeEAUfyweMRlvS9k7VhCITIarC0IB2rRFqlEN7mQa83hUEK21XdB8yV9HIp8rpUh5rlmlHOxfhF3c/0nRpxGQmACHgyxeJlAHIjHSLJYhIGrAw8rLnrTVRHU8zHgBEOiGrBoxUaGkfIJ1AX0iLU7rHow3+yfaA1WvWcBPoL1rIddeH0bQByYkDJJYHluISMEkoTR1G50IRo0DquXOu5+k6p9vA0D5M7tiaPY//BIjzCBjoD9m/ofiFSjTqfT7AozRvTrFFgD69UbxdFJoq5vs6ByqLllSU2I9Rcluk6+vlpv6s1RUxF336bv+LdyO7i8AbKR4I+4jGoTL9DU5b+a2qIaBxRlEIORwQVYTYBZigU1g/2+zJSiqqyFEPCWRiCFFR9HhDr5a6xvi5eeN6Zjz6/1xcJI5XrUDpF2/xo3ikpq9eL3HpdPBsmt+ZZdH7sESO8Y9vnWWNgRCUkeZX9shYB1fvSa2RlMNYLa1GMk3VdYN6uBSV3qmkMKttv88CFaEGo5SMGCaYZNM+g5aEUhtMqBsxazpRaPdNCHTW3mSYxYCGW7hLlvnyd8dobdg85OD6sp8vff814pw3X7foKtzTgxXiNqyFIewYbeZWLlhYwKXQ0XRdZJO/dA1XhwhY8QDF69VpqBEEFW5f6MFVxyJLX6T3zGrXoYel7iz4VuQsnSq2T9ybVaG1ZknYDZZxWPcABiDnCx1RBI0KBL2rnVA9h9MbKL9plSglSyBxEyqpAkFSNRzFa/QIF7MII7B+IbtiC4ufydaM4Bcx2wECGwDZEiCR9q5qIwKApVOPpBV0vFRcbcyrYOYS1EASIgtK/bWK2hJDd46fu8+575TAUZhTlkAnhekZ49jlgPsP0AsPhqkRPPBwqyYRca5bXkRZ81JVzZfglJ2IM1AL6xnj5i9d/lrf7buZBnStX6+Vr6EQNZ1vbuCnCz6yLfVWQL/vgNsoCWhTGM/KI5F6KXdlDKXkgoJBxPO2fhPUag7SZKc+YOX6uVZAow9Toyo7DEy3614YMDS6fPRCkLCItwHpCmB8qgef8gDyfwF7VHxAG6TCKPNgwgqZcIUNmgOYKJb6JE7a5nvV+pqx5s2V+/EePjHfacIWH5whjwu0HTyQE1q6yZcFU2Rt78AKAKbriU9RkP1HrWXtVibIQa91MALS9uoa+CVjIjFkbKVSjVWHKpEZrySwUfDVUXcWJGrAKF5renxwfoBQPAPIgR6Fzaa4rKLTBSPrQESspVbfReJoejmQX/YBhAEWQ0wDQRluEmids8hquPbrssIs+MoCuY7AtJn092aX81x46miHnSnDGC2q4uYVcguZLbD4u6b/5iMuo3zaXgUlICMMEY/2xTdVe3ZKda78A7BmtS4tEUIHmmMHDVJmLx7q/ZB17SeTFmm09RljxTDtL1Jff5e0h7UVV/blfgpL2RjGaoRgtUyyx6+IhQqOL900TbVjtn68BBKpzwqhsYcCTHlh+Q+S6xukhorvn98hAdi4AeBCoUqBUyTcHzYMpDlNSCwCaCEvqrvQ4QoUrrcyFIK+nSFWr0WSvlpOujTMwnwtxp3ReL5cnNVR+MiFk1p5rxXmx/ONrYD5/k4RQFWPY9Y/j9fLvXzPeacNFyz3iQwJ94wzEqaoIZPcQWZt3ALyc0Gi8xREhDojDsSQMLaKyUdhivske9EIqpHIMAw7DVDxVq3LfenIkUVoQzysmNRCBFEbIQCeubYunNyzyvm1TjNc9EnJmjCqnkphxHKTRA1GFHH1OrIdJvHxR29NH23bDQxgGD5qRV2IIsDFalNuTYjhjpTexwZSC+ztxYG4Lv23foFZBobxv+2AU45KTPG+l3CFzWQg86aLNSbQRqb8nEnOTx2G9hqP16UorkCOQqhN1cVwwVo3B6C2FT5R740ahcZh6QonBvCZGWwuYX1Mu0I/cGaQdGHADGfaji7aa1jFOnd0LSa8l0rL2RIZEwL2Xa66Ya4Gup43b8CUm2V3gMRIOLJ0PSqpAoUIzgFYvVRisqkPZsEltLlXuKgR7VsQgDiCQIkARWw1GD83vFQobAa1EWeq803qu4sqrkDHy+UEMlUVaWfvXmcwXAF7m1nhlZZ7u5Bh7s3URKSDtVl7mwhmxvs3MW4x32nCxaqdRmsWjiBPy1XsIy13xgCgtVbNurOKofLyV34QByFmKWMtk2w5yYeEIXvywvSnNo9JGfjwcEQyWibWAEUDBuS1Zu4YKccxJjGFQaSSsQOKtIdvkWhKQcsIRESdtL2JNK3OWkgAGIWYqzDt7KACUKK9v9+3hzXK67nbNjMK+NHzfR1sNi2+HgixzNhQSRP2vKob4vENzHHoYsTNU/nVWqavEstAkhfYk8kQDmZapzFtDXqKx7jiYNUmfRMJKrksQ46VafQy0jRr70RmdxmBZxIELi4LletjykiINtjdvvYcurLOAECZZeGPeHlc/vEGKoom4O/r81d7oDWN2r6lTZnfdD5L1lbMoyxmt2ZWR9OIAQM0d++HZus33HFwXirFAIW4ZHF7U6IsKvTPe7jqV09bfIVRjlINgGrvPm7vPvRxTDBKxDYGU0bo0jrVEWedyXPn8UA0VACPliGrKWN4TIeoaW1oUhrmKI1Pc77BAQHvvOrjZorXaNBWSf/9ONVwSNU3iaSwzaJkRKLT9hkr/n0F6AWEGDQN4PYiqOOeaeAwO/7eFdz3XG2G5B+Zz2b2o0svFoqSwTNKbWB88CoN0iw0DYhgQiTRZK5h7Cij6h3ajHjmUvFkkwrpT/meaX4Dc1CIxBSDIA7mkLDd8lpxXUtjDmHcGRQCVEGH5uHJ+JTqDSwr7/Fb9jo+20P/bQ4T+5gZKTsqzwyzZ3h+T36cfBvXaMOMFQAgqJNsl1WvbazfujZZ3EPaMZ4YYRtb9Msk5RJY8WpFyMujajpPzFiLU+683Wo95sd7QW6PQNVfhZTt8cVK4FL9HtsJ6+SyTCBVvJsLPLQRSLV2FOVdkY3MhwusNVv86OgNtSIYVAzvFkgy7RlaMKxF17QSxrYEEgBCkR5y3z33DxkIuUnbeqHnQaGiCc87E+VM3zpAFYHuf+1Nn0V0cQhB5NEaRQbIpv2S8KlvT1WP2kZ4xBpO+Npbr6hRWQgQPCgMCDTFDJ6p+1kfb1m/N/64fpeuCRFSkJQbNnJRo6w0g40fGO224pD/VCFpP0pvr4Q74+GtSRBlibdV+uAJPT4D7/1lD4+kJwKwMl1MNX0MUz0U9KZof5IaYT8gvPpYamSy9nejqpkjl0DCBxlUM2HpGz8wijQ5jlHoGxEEhQxSY0CIh5oBjZO0R9voLbDd/Zvm9wR/QYmxlomIMQZmX1WMs29B/ZYH3NPsWtojl91Rw/pLw7hLv1D+FqItzE21l50mr0ZJCzS1zUF63qhp+N42R85+731tXl0u1ZXsjaI6yH8IU5VKvY4a4Gq36kPcNDC9FWY/CLrDcozgbNndmtOa0Y7hIH/QAmEQKWyTanZNf0HvtOvH8tVs0BTksW7Otk7A+R27Ky2jOay8K90arv0fKfVJhcoGXRdrtlLK20DFlGdlkUszYQ9w2qqHSc9cSmtEV8BemKtVGsQE17/3Yvd4s1jmDMKvzGtTTCzXasuvKm7aWDWQ4EFAK3V3doNUQFqc9J2EOFuOkMB1GXBqbfnX+76YRqzNcjQFawGb8pgNKvncvggfah/Utx7ttuMYD+PgUOYphGG4eqqioUT111aZhQvzwIyAcJdw9v0Qer0CsU+BzV11/Icwn5PsXsm3tZUTDhDCfpEbmcBRGznSU97XXE1EAr5MYrXACBqGYFpw4RAxxwBQGTIehaC0SqRELFcYIKxCIEZIaKI0m5EEL3b9VpV74Glko8zmru62QGdWi5Ggsi25Uw9Vqm1mOy74TDBy3h/QSbNItSEveRgtVmw0NQ6wcT3Nc+8Ozx3yEVgwg2tPtHyFbwLVcTvKDLg/hYZyiTKLHRh4qc3OwyUs5FfJGjd6+Uw46bI7RIhB/XuU6QKTEJKdpGo/tyMwXHGcfhbTEBpsxi77YDs2MF8mB2CLOe8ap2VVHYDAD7gVuUY20RVuVoOEk3ozglKuCus8J22hytEG0NKHPjVHKpxiK4bKuD2bAyFAHwuujBk9SgqqY9N9RQ9b0WOsgRtlO2qiylAjLarKs99kytySLnf5zBcrbux7+uS3OaKw1gzZybgyZGT4GENjam7j73MHf5SJ8xvFOG65yAYajwCfDASGOyM+/ATzcgedU2TLrDJ5PIiSatdOrhu9SS+LgHOe5YF3BysShEIEYJfGuw9qR27qNnItYKcUI8AlWS0ZpqR62YzdSGDBOVxiGI4aOBmsMp/OatM0KNQ/kGMRrPMa2Y3BfjGzJ6d1pdBGM/N1+7o1WvGQpZDK218e99udt3WSZzYPmUkPHjojCXOtfAMsx8KauxY7bH54ZXKBCLeWzfg6gDxx5GN5/Xxdjqt+3tjZeBd5abDSLWr9AdEarSEvZZ5c81G4EyGIfA8CZi2KCrydsImVqOwPYuOT3GrQaNGplOP1EZ0zlpeWr9uLSncEZpQjcba9vb2KRVUaNNA1C9pCuN1CNZqh/PzNyeb4k9sik3bpjhQs9qhDQKvwH7Ky3FLBJRus5etp/Iz2mvytqLeMRyJN4qLFbln2+2DfItHXKcqjrCu4NiaZJ2MGuTZTvD9cR2tA5X4wsxg/6rFyCC3W/bb87qufko9BvY7zbhstGGJCHA3AAeLpGzCs4RIkwyneiGK6cJPIZJ+cRtfRVy2XxLLBgoY8O2izEmhU6S8DLDJhygcKRbH2Suq67JVnpk9BpBsUHHIYDwnStGmOSl5piwJwiXs4J5zU1yeemh1VnuPxDbUlnK2Y0ooUvKO5bcwAVhjOvO9Jl+SYPAfleUgCaFvAmXmpes0Fse6zGlNv6tSXXBLokz1WwWICq0rOuMWbkYK8SsdVztfOxd6rCtTs1blcr205RLNCIC07ct4GPOsPUC/h6g+Vnl7rX9lmgWsZRWGfBlhXaEDPsOzYndmit4nhzisVgZYjh9n3mPKjURJKPejaoz5w9b3tNpfx8dMe0lwe6NAoD15WRWN2k1HWJ7uiItqtCdAbMSydZbqs5HVIQmewOdOeY1y16s8xoi361QWdO4FEcXOSpO+m1bk9VL0q3aVVM8R2ty1xabj9Orlt2p4rir4vlxXIUUlsTMaIar85oUXDv2X7d+tao8Ntv5IdvdiF3xjttuOh8B5qlPoWcV8PTE9B7E+LxBvnVc/my6rCZF8JE9aJ0obF0iG0fLIoRdPvMJS/ro0vDKDfefBIo8fzQHujODVXFTSeEYQQOV8KSHA6YDk8wjle4PR4x84iHVUSE3ztEnNI2CgGwqa635dzXffmqe+AxmK0uzHCQYMpCePDMJuoXM4MDONSF33I3TsDU57X26rSYbcFxUlgu99K3QhHotOYriGqE4eHNPT1FT5F+k0dpSytfRNLGFZ4Wo2VGyubB57EuGCyBglHeI6DkjgiyX4l0qJQocJAoq0YhNer219vuRGNQei092245T9R7Afpb0mMrjLHuuE0/EG5fPkcTlfRQdANjaHJ/3Oe/9IrUaBw1Gnff8p3No3ovkVvD3KtS2OsxCmLhuyqYirqpUfj7qfw6DEBewXHQeqdcSjTKemSkiWWuefh1qXnyg0hyhZunyOlK0grD3DiBtC7FsJQIa10FRdJtlWtm0KAZrSBMZx5GIY9prt3nUv2xshkvvecoJzHOer04r+L0K2zIWtdF0Dx/jMo9mNp8pVdr2SMoveV4tw1XWkCrKGMYZR3LDByu5QvDUIwKAM13Ka2TGZs0qEVfRcdN8lbsc2VONsc/cKRKApID08I+/V2pl+jEVovY6XREuLoBHW9AoyRUeT2B5wmH6QbjdC35r4FwWF2h5SOup0FFLbusGprH4GVPZjBKsEU/1n9rd+jDZsoRZXtOZd3Xaz3mOJMaR68s0jPBLv92Cwn2EFnY+a59z/9u9z1bRDhXj9rebw5kX2nbPmv+3dlf2WeXK7P3G0fBrvfm6OvwJAAEuc5eDBZoo5m9CLUoivSRB9cavN4AW7Rucb44TzU6aZQ8uvlDgY5rkX5/3xequzpWo+G9ETBzHQFh3cLuaWm4Kv9FHIawMVoWTfcQtM1hIEjOOiu8l1ax7Hu5I4Pw1qUaHEB7qMnaEW7UoGdXmuOda4MDNboqUZZnB3rNSUM4imCydrF2aQpouoSwotEjDAHIAAdIrZlKpREEUpRcZqw0eKCkABokye77/tJ+J0OFVhFOAMLyIArZ8wnRIDllDXpM1jwdTkkx2xppEXOp+QJQQu3g3hfv5QAej0JF1d8RABrUG9Gb024uXpZ64/ro6+FOvJTDUXJo66JGTA3XcETmjJBXHIcJh/GIKWyFRRko9HFAF6SSndVT2YED3zAb4Rb/qtXojVv7ZfUUe909qtR3Ywru7oskXjSYSppjMqAak48JCRsJIeh5erjQFt9LBtwbuPK3h7MM3vJF1baY7OX2fP7iUtK9P/e9N53RuiiddWkbHsqmKm6qGRBYkz8zYJcOwubcrlnyf6DegwA2YreWg0uZJRAvGHRriJvFrYtC7VuMCl32EfoYSAiTLq4M5TtBz6NGXEMUw3WMAddjKEzCqetfZ5GW/L7urzFeCoPJv71aKbbX3IyN5QNzAo8TsjqxNOb2N0Zrz0kMnjNYpUbV11eF4Eg/Q80rh9gYLUZ1IIqzqYaXwwDCWsNsO29z2NmtHiUdYPlr2sKD3unaccTedrzThotfPQdCRri6EUM0jOB1xvK//b8L8y9++FH9gRmvWcQdy03ThdolYrIWEMNUwm7EqRIrosJDWb8/TojvfQh+8kxgw3UG370sr3E+gc1T0ryZFf3l47VEXdMR2YRRpyPi1RPweF2KMa+Go3hN4xFpGFTqBkqDrl61p7obM6xv1VJFbdt59bbB5KnM8Bl9NzMjETAFie4KfCWT+KhJtFwUsXriRtEmURVAlvooChZ5xV0RUp+zM8+Yyr/1nPvo8qKauTdM7nVvqBpVEGNP+iS0h0bs4XU1LR4KbAykH+7B3jSidPD2Y4W+xViGQb1xIYJErS20Jn9iBPYdgl7Pz9e59dd4L0dm1yEGKh24N4bY9tXlAT2kvGYzinXbzNCu5fXeGAOkCH/ApstCOS6Nqm6nQSMuEq3TLtryEOfu3MCuZW10Caiauo9OOIMOQAhRTOowAvMJPJ8E0ssZfNY1SdctX1NlYrjF8S2dALqCYi3NCZZ2UFiQCzxYnafN81mIFFmiSM5ipEzkmAnENULb/M5Ui7pcFuCiq97xSm2u7G3GO2240re+gXR+iWQJQfVI0sd/JPIloyQ9S9O8odYwsNPp8n2JoH2J7P0NVmxht12EEMAcAVh4rwxDvfE4dN6WbrcUAhb1AL15lZHG80nELucTwvEGwdpNjNeCV6cFONwgxEkp9Nhgbz6v441XZtVIK8K720UHqIbIG6/ymX4oIvUm3uv2bQu/zZE+3EMYSm4mQAkGQZREAPnXpoTZ1C8k0mKuWhe2r74fkWm5PQ6F8sZ4NfBfr/rR69B51pVtk1zBZc8K68bGaPmFwBurXgPQJ+n3DJkfdjym8xenygiDQFwGNfLOXFUGHxXPplcUueTwmNKDNTGUgl4Ug7+nuuFJKkxbBfjErZanXHPW+STAlSowWkfHcqRybDU3ehhqlHWMoYEH99iXwOYRK/MS9doTVjBHEKY2jxknIM4IwyDEr3URh9YJzXJSBrS1AGl2Yot+jbK82gVZOc7YGis2ZXczrJbX2jsXNbysAgoVRoS8zrkaMKChuG8Qhd6xcpqllv/drX17w/FuG65Xz5HyaSMWubx8hbysAO5xAKrhmo71e6vcMGRN9LQLLHICHa5qYfFYJWeaGwGo6gjDJK3JAUnWeuXsjuThjaKxD/37In6pzCP1sLJCiBhG0FUGspwHqRcVQ1AV7O2w/IJ5vQBKi3WLdqxe6THjZRCNh98uju5GRcoVkoi5QFYURG8wcYV1AtsiIzVopP9ZUp67xcvXcxlEWCD3HSLGxeO1h8wMgjdQPTusNxRGUuCSWHnt2BxWb7CA1nju5dX88e15wXALVBgEClftTg/zWJTsfx+pah0GVLK3Fz8GBG728HNT1xYIUykRWAu0Xo51L8/nIy2uhcfeUAb9nzVu7ds6ZTZ1kEpOanK96sxNDh6cXL2Wdyy4227/Xu8MNHNOVGBEKYmJYsAGobSHtFQ5JkVhZEddvVQ3GlhwGNW5HoHpoOtUa7SaGjF33H7OAXO+AEDPwwIlu0cUhS2OTm+w9nJ7QItO7H3+GcY7bbj4/iXW0yucn7/C+iAtuikEhEkNS8p49YffBABI+/KDdIRVym6IQd6/OWK8uSrsnvD0AyAExMNTyWcZK2e8blW28yqwAJEsJOdXoOW+QoPK+PEaYb46fVf3y6R0lNTBq+iQ4eFOji9nhKuEzBmkyt9xut5spqGBo+YpymdqvPzrPeNlvyoSSST5hN4gbKIHHyk0JJal5H6iwlYcQqF2pyyCo4k8lFmV9fMOLd3OyTxlf9x++HPYPW5vEBwkSDnV+hljWrncAr8JQYqr8S7z083ZRXiy/O2Opzdaj8GNTNXwmUHLa2s4HiuAdsPkxYxS39Ptfav4Rvh1t4ap7tsgTRPSNcPle23lYnxErb2w581RucgaqsOOtcKCamRV86+lVXqdvXZsWLBl+9J5G7E6G8gVXvPzQOsJdFyANCOsJ2EzWyTWwWhlrXAoDQ1T7aGlUVahvA/HmtcyHdYdGK8et8LZnAFyHQ4surJrZz+37fi6MPvIP/MX1oFvd7zbhisnLA8r0rwiLwvSsiI9zEjLirysyPOK84sTcmLkxRYa8fTjFDFeDQjjgHiccHj2BOPNFY4fPsX04R3CzS347iXC7TNl+03g4aENhW2EQVQ8hgnpyedB1+8LhTWvGKwmrE+q+kZufWt3L4ipfzfilymBBosERJds0s+DwiJmrEzJGnBestW37ERY3njZNjy7LOoDH/36zxngWgtH5d82H2PDPHzSKJaidFqNJoOVuRguwGrD23YxfhQHsKd/v2Yd80aLbHH1BZ4+wjIZndRJ3tiC4r1O/19wr5udb71Tqwl63WCiCuUYGQaoOojuP/ZwtO26YGqa8xXamBg1XwzdjUrWq3qP8n4rAGtGC2l+xGh1xJW+TKIzWuzzgups7eVuexakZwRW2SqZK0pnYNm2IWnUTLp6p71uAv0QUkeQ3FccELpVttyWaS7iuHR+hRAnUXIPEbCaU6AYK4usDCUqvdZ6NMjWIzNGVIW+H0s+l1xdXlFKWpAr01BzuTVCo4I+NWuXhxfDUOHPzunk8GboxN54pw2XFPVmUAwIo+Sv0sNcjNZ6WrCeVqQ5I80JaakLznAcEKeIEAnDcUA6zZhurxViBEatyQLQsP38KDUKrIZlPILHKzFMURu3hUGgAfOOtQCRNTm7kVEBinFDF6mZ12UV7NVzmzGMEnUtqGoHfaQF1KR6D6H44X/byDzBsa04lxKADcvNRQn2d1GC9nkvl8CmkIEBJQeGXBlvMdoi2eu5t+N1/na/wAWS47EWH00uyzTgfJTltd+AJnquO+kiFz8te8fsIxzOMBqyvOd+YYtG1AR5VljS59nqgbWJ8segHP+ey4HwzvEStIZMnZum/k2j+4DuOljCH6EaBQCNYojTI8ywWjCoYkYvGNyppLt71d+jhC5/6SLWpv2Iy1WW2rQ4aYRU58brQwLYpf7bSARlxm6p9H4MYUKcdA6yqF6Ac0lZcBb0xeforUOxhwQR3WsKFSJUg2Vz6cdjyMRmuHt0V1ezgXpz7eGFSc4LAFFdCzbb/Qzj3TZck0gkccrAcRKDpdJNaxDoMIyyuOSUwSdGmhM4M5a7Bda4LU4R892C6eYeV/cnpHnB9PQVDs9eSY7sdINwvClsRNM+pOsn3YM/gMejPBB6gTlOAF/Jw7HOoEEiJJoOoNnViLkFkdel1AaZuj1FpcoOY3347SFMATHOiHHCEKgpAgWwyworn7k/fOTSe9C2aBVIzVTwPQTQQ2F+wUhdvoYCiBbwoGoB2svKcmAhhqbuaI8Gv5uT64pfoede4aS6sJVFzeu9qTJBr/2Wff4BAII6D8nlKb0RotZw2HnsR4yh0qqBQkMuCz6Awkq0uTab2eWH+vcuNfwzVZPdOjP/e0ukUwvFPeZCMJS0o4sn2EWHdnx6PoXwkV0dGHf3pYMCS+NEl5NqIimggXmLkTKo1EpYfBSoxwMiyVdTkNc5A9F1MMhvzrCMypbqm1f67yUWVZxhuhYIXY8tjFqgnDUPrkIFGAbk4QjrPchjfW3Gq+QIGeDcRoX+Gcgdgak38r0zuutw7ECPRUaPguSLQwTloUKN3tEaLgv+vm6804YrXN9iGG6xnv4n8iwe/fH9W8QvfggAyPOK+29+gjyv4JQxv7zHepqxPqw4v5ixnhbkxODEOL84Y31YJUI7zRhvr5FOElGM7z0Fbt9HPBwFNrx+gnx8D+nJ52vXW2VK0TqDlofy8LAuykxBFuc0y0XMGTjua4M19FGt3wBQvK1yw2jUw5wROIPDCYNCboY9F8gFKMW/m3ns4EDfPsEgn42h8obpEnZtENbqoheLvDz8EBy5IJshMxVtffL9nPSjy9UAuGgoigHuFLYpzaBFu8ZqIXspFDU6sq+ZsaR4iK8/Nr0OWR2KHqJtotmS7F5bwkfvHMRKECpwjYuW/PmWefXRhf3WG1mgcWyKTBL2I3M/z4DWFhJA2UOIUY7fLZKc3Y/gCB/NfMjCXyA/tAhAJFR6vV1LX2O3R6wxGNhaHlktpyo91EhLR6jXbq/BamO8ujst6XmY00WdkWgncWskyr0FIV2YbBNPV1UBYzxIpNUcI29QlbJPjZKlKTiXMhZv8P065I+ngU7V6bCef75RKSAQaQyQ9Y5VbcMbQ9t+rGS5tx3vtOHKp3uYuDvnjJwy4qh5q3EAaxtz++zw/i2WuweBBZ+/wvxqRloURpyTePmJcfr0LBFayuCUcLg74fDsHgAQnjxDyAkhTuDlQS/AgKY/kSUszRtSL444g5PK/Rd2lkVNs+LALhfEDIwVrjLvpkq1cGEzMgDKASIfv5Z2KjEMSOSKenuShv55EQ5cdUH3UBrQ3oR2rHvDzm9vwTAKPWs0QUHU+gt9zHlzfc+jfnijpVh9MXz98fiI0YRK17k1WgrlWj6y6dZrws0DAH3dHFf34Je32WrtWg/dGltagVe0/B9QH/ju/LxjUvIu+XJLjEhDnc4OrmmgY1QD6w3WY3Vb5gxtnYR9JwlAA9luvwOAJJfmh5deMnp9o5heooW1Lr5drtIayxZBAn8wbk58NFFFfi8bLX/72+aYobAqNyQWM8JDIbDM21ywV3Tva7KGA6wXYUJw7YCcGIE7ICHUKAgEbiDeMvr7tctp7cG6mzyfknYSQ/u8KZqgBgwRjbPLY6fJ+BbjnTZc66efYI4RnHKBCLloCwaE4xFXx0NVvUgZy/0Jy90Jw80Rh7sT1tOMdJqLEeOUsZ5W5CVhfRCSx/ziHufnL3GbM4ZndwinOwyQ65CnG/B0LTcTIBfZEqPuQtvwiVrJE0Au5Hzftv3WBaXok/VQm31GQe4cWuviFid5L2bwAEQaVB2hXTB6Y+XhQFprYp2W+zbv43MCwOPyLWUhUcjNFowVAn+Qo2Ovc3FE5DCoGh4z9nvRTYfBN7I1e8fZtYagddFE+amtsfFUZU+g8Xpw2sKlQCE5t0kN3actfkDN4cj7lgthVX5Aa7w4o/CS3YJq3q4XJt7zHWSx5A5W85Bu809jtPocTk/usd/1C/gehKunBSJu8lS8E+3366odbXGogEps8CxQfw24QzP0O1w09hKq8HXNv/X0MiHy1QABAABJREFUbobMB2v07rtkW21Yf84BkA7cSsf3jVt74wuDp1Uwt9yjw1DKGDy9nYeDOKVxworQRIJeSccXats9Vq4Zo4mAy8s+2oR7fjpHyXdLt/vD9gdAG9dqKoakWSkBCKZVCYDjaf9GeYPxThuuu//5MWiMuP7oQ0yjq1FIGetpRkwZ8cltk0QPxxOm2xnjzRHpNCPNK5a7B6xqxNb7E+ZXM9aTwYkrhk/uMT2ZsNydcPWFFzg8e4Lp5XPE9z5EuH2G+MF3Id98KHT56QprPFZGlBlUd9y91wYAV8M14uAYWeXLrsup6THu1BSVepEwwKRk7CGIY0CIIkbr9ePa4tulRiE9u87pQBbF/DKhmuu51BEXkIUCKL8rQsfrLL3L5E0xOJ3xqhe1GqxLhrLRSHTRVvNgOvhIDLQ7t9NdKQrldSnlDOjOlwZIzU2MFUIkar1J72BwRqAg17y5D9RDZi5RF4O1S7EYGHvQgWokar8yVZTgln1noyUzsEJt3NX0dZcKthBXY7hH6PHyYj73Y7V23tDZmllahpCUVBjJzOjpTbTvrle57qmLoExw9pJeZEEDnOEfBgBDcZYKK28QVnDJH8W6UFu0lXJrqP3oRfF7EokVZRf6PUEcJWsGqakGNnKIc3zzdIWiM6h5LV+g7aOsHiLcG320y0Chw8s1zc3zk9Roe0ep3+dmHwBCro6Td0oMBZjfqI5kf7zThmu8OWA6HsBZIiUACOMAikEgw+Ox5iJsqIzTYZyQTyekZcVwnDBPI+LpjDgNoHAPijMo1gr/NKcSnaXTXCWdlhlYVxH7DTOQRsQB4CB9fqxGCqg5Ju+xWZi/5npTt4rUA8ZpqHU3FEAk+Ze+WWFJ7lOQXBoA5Fgis6HPFxVGXdaHp+314+GVfH4A1kXUrXsK/16n1FAZkJvW4EbvHaZKDnCDFBfv3/PedPuhGqhOH7Fpo+B+3+RANlHWqfZuy7nNa11qaPbIqAwreVgNss1AuTf2HnxmqQ9MjK3hYi6GJXdGq1m0GGCywnQxijnJQsr6RVtQbPT5pnIeMO/djq9GZ1VqrBosi06AGs0zsaqfyIwYAlC7C0s9FVmEWxwBbu9zf492edY9QdfCFmzyNtUwlFonR3Qozxqj9D0j4qKOnwEE15xtEyW6CNIYl5ZPCv4ZBFDIMUB1BO09y5GbOK4ZLa4Q8aMRLtocdmF+dgdc9qn/ejZiL/vVG8m9HJ8Ybi7HYWQVM+YAbzpcvM14pw1XvDpifHIlhsQaQY4QozWJBEo4XNUfOO8VVzeg4wPCfEI0Y3eaEDVyoxgQ4hlpTqVgOS8r1tMZ4W7AdHtCWJay4OE4g1bVgxsmDHECtCYJaFXNt0ZL/rOHd8oBMVhjQJFBGsOAYTy2XqSPBOz9vMpDmRQyy4Pkbxp4qPVKS4Tl2393jLoSiZzutnJZOre7/YCs9sTwbE9qGC4Iz3Juo6ReLmYTcSXZRk41h0eh2UbJwXnli7xujZY5JOcussQk23OG9+LwDoX+HYLBTvUhNuNV5pIAv5xYdFwXiLqAsPsXaBcZP4yXmNRYBFLnyeWaSpdtqvv1o1flt2LgohT/iNGCvrYGqbJPW0hre5hitByzc0MW8M0YPTToIirSaGGvJKFprUFd12FTmDAmpBvmcBi0WVrA7MFt3bzZNSWy7XTfpQAQl+Lh+uOhHKOXbVq5gyl391uPqSde7aW3/PVm9169vq2R6gkqwPbe8/lzokpW8Xn271jDBQAhBvA4IN4IKYMOR4Trp0Wkli5RLkNEMLXl+YR49wI8n7B8+gLT0xssdycsdw84P38lrMSckU4zzp+8KgzGGwBRKfIRQLh+Al6FyMDjFYbxiEE9JBOnnVPGmgFkXdvVo3tYGUk9koyMkanI7gx6k1AcRCVjUWw4uwfWRSjlNbN4pp6Bpt8vQz1YWs6S70lLUdn3Yp4GnVnEtddMro2yNKIaAMJYIl1rC9MsLi7HUM/LQ4O8XcR6j1W3ZxBQM8xLdx58qclS1iAXFuFcW9P46DGnUmxco0YXTV4aWXJUxBkDBcRIGqHQxsgYrRuo0Y2PsvZgu7IoAoXm7Fletnh4Aym/5/qHvb+3qvlTcRAZd0arnoNsszVUFQ70cKGpbAyBWqO1nrdNE83RsOvXC7QOnaix3RO+VKGvbfOqD8qaa094LXBtZAKrIypz3Ob6uqlsRhUAYCSlrBACwnAEDahlDka+Qnvt5WOGOTu229Tt3zsYngRjMKWH6faOeS+/eSm6smtv96X9vp+NSv6q92QMgkSd9xQQ3nC804YrTmKshqfvIdw8lYVxOkqbEKt98BXda9UQ9AstcgK/9yHywx3i7UtMdy+QH+4wv7xHGIeS/0onYf6tpxnzyztQDJhOMyaFlsL1LcLt+0JNX2dwWsCHm0Jx5iBU0SUzYgaWxKUP0hCc5wu94UKVMWrWFHvooiTvN+rLQKWeUwA0QNrL/zQ09WVGtsLoh7u6sDvdxEb9o8v9WDTFgBZNCjRI41Si3+ykaDyBwpMw7HrtJtgtQgRa2SUjZfhtue2RM8LZQZ27LchDBII/t9DqWYZQoUN9vd9zSFXC09rk8QYAjetdILDcQLBkihLYVzmxHJZBbwwoPFNvFjNYvp7oUmuYS3Zrn/RBiDDpp0pCgJ0bWo87ABgilVqsImZbdtJF1HrtKc0l72rOBneRPQ0azWvOSspPqMJtnYNUjJUj8WycOp9bAwqhZYiu/gxeC5Gb/KCPXExOTb/ZXAcPzWaDO7v59tFLP4oDQ1B1kWqw/DV47Lc2vHNkOTTvONmxWTTdQ8R7x27HklwUGMVSY055++U3HO+24bq6Qri6Qbh91kRZjSyKexppdVGJ1m8ULDvNCPMZ+dVz0PEa4XQPOkgN2Fn1De2BycuK9U6iHovGDkDx1AcVw82ASBoNKNInkUJ7NyWBbcYgGn3MXlZJWixYx9+o3uzF0RswhU48Vb2cs33PRyDW5M7VMFn7lRJ9eKPlCqd5gBRMOoWPmssSJ0Kq/KfaPryHc3y+IgBwN7Y3zr3aCKtYcaEPd1Bi8dB3jrsZIdZFEGgjriY/V1mFTf50J9IzPMnXxjSNE72orycRUABGi5LrdvvLTyS08b2GkA1cg+p1b067e6/YU5sGVANpuR0miTwCCxvSC93a0RJ5EeRaPOx7XRXv36IN74DoHBajZcQga+rqIVszWgXy2wrA+uf9UW1G7u67Xu2FaqmFqYgkNxcBtClQTjvXDm7B7yMXX6bSS1v1hcPeePn37Vr2jkpbd1X/9ka3ibhchNUa6C1EDOzXihoZyNwsAII8fcbxThuu4XNfQry90SjrqkZcuoCZLEq5KQ/14SiV5nZzK0uJnnyIYX4Fvn+F8OnHoBAx3Bxxfv4KALBqPm3RKGx+cY/1dEaeV0xPXyHevwByQnz/CwhPs1DlvbcHfXjVDZUbCxhKC3oU79VEQAFlIZ1flWOFK6ws9SmdJBEAiLr8I5GmU4TwsGAxWrbNro7J9wWiGNEzCkmjE7sumI4iVjweSnuYBp5xrUSsfTgBYCWQ7HXJbViKylzcG76J6CXmoxUVl8hxqHk8y9cV6Z3DEUU3zorCO027sl3W5d618ijOhOVqkiMjAC7v0m7Xk3yafZDcT8wWdwGg1lAVBin2I67yvfJ3XdRqYh6FYESscBlaL9szGZuSC+oWX7cP+aHUPZLsVIlFxpYlmOB043AME6ztUBGY7Vp5lGsDNIoPHpazOa0RYihR/qbLgR6rGUbTJGSgGHI7QmYVDFajZOxL+8z2a4u/v65i4FFU7KNCbBHUdGlA99p+Wz9rI7Dm7vcObWO00Bgt67buDZbMHRff0oxd1UHVe7VLewOGIhGW79QcV/zgC4hPnwIhSPM0IljfoVKbYVpeYdguLt1rDgMwHmQxuYZg2znhcLzBePscIQbML+40/3USiSlI9DVPY4m8aPoYCFHyXsMR+eq98mB6BYVRI6ueog64m68sbi5Hkxap+1oeCisune7a5pQXBoeMgh06Y9VAZyXSCCWyKJHVuhQ4UKatNrETQyXqIuHmKeh4DTpeg6cntcrf6lBcBX45X6X6g2o7CCLTOqs9vaACsxta/EWvOZZeR0LDX2reyj1ZRiApc9+zUS3SKgZuKHTqTfuIC6QTOZ4Kh5XjNI1CzyQbj+AwCIknt3mGvciJ1Nkpf2O7cO0NMyJGlGi/WptNJu2NZgLI1etujaI3VG/akHFl1LxPGECrROcZAK1DjcIsWg7apmgYSlfy4owaVLgjT9SzIuV1u4DKsQ7iEFjx7N69Zgari0aM6ZlYjNaa2lorO2cbfZNUQPqFAbS5hXonwN57m9HDeV50m91x1qhx32g9Znd8D7SSC7RbXQ/4NF9u3fK68U4brnD9VPQCw4B8abHwHlbpo0W70vtoYIEBNEwIN0/Lon14do+sxc7raRalCkgUFu4eQCEgjCfE63vw/UvkEBGfvFcWdx4mEA2qDOAV1p1grY6Gyu2bBnKuxYppFlacCQKXPNQ2WgBQDdoeTAaX7wkRFJTGPULmJOriPwCcg8CCfpq90bq6Qbi+BY434OGIfLgBrAnneCwGy+dtIgG+n5EA9tv+RlKnZuSTy1hDA81xFmPI7hz8HJW5altGNL3TbF4MjiwevRoti+Kdg+QZjv5fLlw/oGGxRdQ6omFCoqEset5o2fTYlABbh6eJttwp9jNmRsWrpvcLtUFjIQQEliMPxUOveZ7XCd76YQavvHar6RAGxGkApxmBlcWkxBty966po0uPq2nrQOgzb/ObuDVW2+iH63m4uStK793cwZ1Dz/hszhXOuOl7e3CaDYmunANANaruhQP86Cnu5TDd3Prr3+bXqtHKjMIcvWS0HhveAFu+rPydWTQSaD9v+6bjnTZceboGH55WsUYKTXi/27iMQk34G1vJGwa4h3cYJN+lD8vw9D1MqnuYTjOWlIsSvTc78SiLHqcEOhwR32chBIQBONwUA2rFxZQW6eNlxtVgoj2PvTCvTsD5ATyfKgsQaHNRAIzN10YPricY4KKnUCOvkPTvrkbLJI7Ke5pvGCaJsA5XCE+ega+eiq7aeC2Gy9WfsN6xkjupD9xQouQBlh8ioNHj471Iy/+ds/ueg+NYi4JDrOSOFRCJrIyaj5tqBO8ZaQbX9Un+bpHc5FP2BmlHAd900s47TlgVVlrWfFERo5y6i74eheIgi5Adkc09gFJbVBT/XQ1VOQfVvxRmpGkvUmMAgK2xqoawPf+g0Y9XYTCoasmsxboTpqsBNBxA4xl0HrctSLzArK/J0uvRRIadsepJFNWAtfeXdwIq63ObL7x0nQIA1oXaflJq2+xvEEKuahODwoSFiUkVOjQFFO8MPBZ0CYW/M2A9U5DbnNZj8KDN19uMnF1EmVEi08863mnDxcNBICgzBP2ixlptnxMQVvhELoDWYLnfFUjK3hvGAn9N6qmn0xnraQYWIC1C0OCckecVFIO2SbkHrzPy3QuE2/cRP5xBx9vS5K1Cf2KMAgtRg8djYZQ1C6AzdODas4tiBKajqDmEWCOnvvtyIU0I7OVzRKztVmwUkoKjCO9BaH3rcByuJMo63gLDEXm6whomLKlW3PsRSYkFDFAghDDINYzigLDNkzMCZRt7EZe7nqZeQXkFJ0+lnkFxlNzgfG7ygqbCzSpqumGhPUappoBG/PaS4ZJJL/+aQV8zY5lzUx9j836JCdh/xy9q/hcMFKq8n7WmtsicPhNTNlYq1BEIGv2EAUMYlJhQFQmLsXLbarQtO/YeQxbHlEUBxKt1EEmt1BQJY7zCMF4jHm6qzJM3Xo4h6DX19pbFgBp17EUarNGGX6ztjrNoR8gSjDFS4ywU4wYCaT4qEiEFMQQHlxMC9iKhCqt6I9UKErTX9nXGy18b08O0u70hjzgHoir12zb2YcES/e/sd280bXAeozu+wXinDRcu0JA30RZnUFbIyIY3TP37Ci8QdCFTmjIdr0HnB4w3M4abK8QX96V/V5oTcjqBU0aYBuRFIrODbTdnoYRzBg9n8Hioi5cuzKUQn/N2cbbFWxcO5FWYejk3NPTSx8uTMIzObfmZcqr2m1x7fNno2YG2HQeblUjLKWxbgtyq/RGGktA3ii3gFuJQ5Y5IoZRIQeCf8EgOa284aNBH0uAJiK7ouOS3Vsmn+OsfHMRkTsPrKNW9seqgwd1DRV04lsw1ke9yWXvKB8DW2/W0c2+0Ln6/O5ai5OBYpg18TkGeHwAil1TzjpHa7sAEoKm52yPDaLQlkBSa/I8ZkEDAqvOUGBgZmOKE4TCBfduSchJhO/96boV4oZGHESj2lk4zWl7GyuaWWScvC9u3nE4f4VL9XQ5KdogWoXqIztPp3bZ8fpCqHFbvkLzJ2u+jur1hkRa7yNSf9+uo7vZWn6fb1JipkpC9/nbHO224mu6kexCSvl8+b7w99XHKHaOeJREQBJJgADSaRFDSZpI3QM4Yb+4RryYkNVzLgxkw2W6eV6RZorMDIA/ZMCLmJK1RDteOXafN3y6eaK79k8IgC3oYpD4KEGOmkSAvs2jp2TAD5BUrbKjQaOOV2etxgickNOQEl+cpWm+9CoFJ1FAoizFzxbUL5JA0UgjyxBABmWij7O4fA59TaYZ+vzz82j7daqnK65zK4rdR63eRVUOysDwWgNLioTdk/TE+smKYcfIiuXtGyxYv81aLagO2xi0AjdGy6+kX70sFytsDzM2zgzBU48XttaGd3xWj5Sn+7l8xSFw7Hud2Luw6ZgZiYKyBkNigswHjMNTIrnc8u+PyBtzmIKIayNrVuH6vh8igRqunPdm8G/RX9+dmJToDhQpbmmSXve+Pu4d838Rg+ee4f++x0ee0Kulm32j5bZojakbL+xJ2bpbP8gbLiqI/63inDReA+pDYg1KgHABMAhXq9wCUSIu83+mNVlm0AgCJGCgMIM2NBL0y0+0LHJ7dCjX+5T3ykpCTNKg0XcPpdA3OGWlZpVB5PoHvX4KubxHf+xB0+76QFqarQl4ocEd/moAsviEWxhktB9DoJJqMqGHEg67dt6fBy5RITsvXIpkhNKjQoq6LhsrlFvqePbIYmzemh6SLkV0WIn1AkningUyeaOdSX7gF6gNdH4RKyx70lpjK/v2Ct/Hc++EV5m1RdDBUSbi7RY6ZLybKy7lw69EHAoImUPZgPxuWV0pc8y1+gWvgOlw2Xui+99qhfdIoq91Ka2u8gfKMXTJaVYU9ICVnsDsDbosmab2YjwbGIIYsMRXV8Rj2iBP1mtpnUY+hwGZQoxUY2qSq7BNaceRJGjFUavoQpAmkyVVtokzvMDMXg9cURjtmrSdu+GPeM1J9BP064+TzW76tjieSBNTaPILlQi0PxwC1eUCgjbJ2WYRu7JExvmNzXABayK8kyeGSwow3YaEBijBzQlFHDhUOonFSCaOxRF7jzT3WuwfE44QwnpHTqoxDq9m5B8UACgGcsryejqJbd3VTddW6+pPVeX4N7EMBlIP04LLfpRm0jqB8BMWTdlY+t8bLGSG2jr0abe1KFrmoim2OFUIrtGMn/ml/s8GCjLIwMaqR8qwtv2gXaCjzaxfS/lb3RouoKhSUHlclutDv2e8oAAgIcShutN/3pizB7X/PewbQ5I7giBB7w75rBra+rsaqp6azHk+BVbvIqTdaxZCo4fA1gn6UP21BxXYb5fMO/mxgbf27+T5QDVyo7eSzno9fu4oIsTsqb/8LzJblewgG37XRDtm++2hMtxo1r2jqH0A1WklvRnZRl12jGCTnNgTXxcH1BNuFWXskyPKiaQHiWOZ80zuum2fvKO39XedInKa9Dujy/a1kmD2PMjdA4F4RxGoEAaaqkg+gMVqeVg9s7zNvvLxI8GcZ777hwtYbBswL1tyUu4HKXHUJf7j3pZ8T15yY3WyWK1oX0PEG4/VLLDdXGI4T4hTBWbopp9kiGkYcTwgq0jscJ4w3d6IcYVprLqnMYcCSRc/Qh/0mjzPquZkXDRrl5h+OAn3NEZQmoe+O025NV4mgur+3c0r12EJNeFsvIIShEmPU2C5pH/YCWkNQIgC3v179xS+H3p/uW3dY0lo8dJT+TkCLs28918tPzV7E0xuQx2jVgMJc5fj7c2tHjZx2OtIC5V71OSWynaAzsn3e1jl28vsLZ+6iInGKuu3pd/bU/P0oheLGUO0gVXNsemeb9MbQFFIZTe4INvdCw08lhORynbyzd/kgc5FwKg08LdKIhJDrPoB6PzRRFjJoPmnUXgWBNwhQiULNUhCK+rytQdb1ey+619flmXf/PuY0+fnt2ZP9KI4x6o3kCIANKzOxGHn/vPpuBa8b3nh950Zce14KdVTkMADrCbScJTrxnUabRLt7wFVhvTQktPeP17rNgJAT4jrjxjXiOT9/hVM4Y7lbigGb7+YSda03V0inM+goqhR7SXyDgWzhJwAjACZ9eMyQaM+eRb2dq0ig8UoESpeDNEdkbZutfYuIqyo4xqmeN7C74BUowwzUToS15BpdzVpztGYuhAMApTDW/rV8jdzEtW7E8hzb+9l5j91nQdlnxsQSSKf9zmPU3fqAtobS1Pnt3726mT2D5bd7CS3st7OJsDyMWTZYO2AThTbX4ouovaEx6Nw24X4vL3aeH2NTWj5wB6Xg/jfNh/U4hX5X92X1aL2KeNRFM0aAeX/S+rlMmg/lbP3FoPApleemqb3qSU5qvBgABdL5VNp4aGLOtiP4co+mZ50jAW0avfr9lvMIQE7gEAviIszUxa1HtM2jhnap7mus5L06r5sIbc9gUcv0s9GSesghJ1KAPicAQQz7kmUbft++ZKAflqNdUltw/bbj3TZcZoT2hjNMsgCvoFQ/k5ssgUm9WVeouGEnBetEG0BHCMS2LuD5KQDgSmu5wjggjK9wivfI6laESKUtSnt4aXtTQxhEEyD9vAq05oqVHRFgMYSUCCsDcboGTdeg9Qq0nGFyQqwU4qbHVe/V9cdnpBFTInA5LYN7rDh2yVyMlVcJKLvSvIzd0Jnqw+HrRtbctsvYvaxurgB5+JIufmYsrE05sP/A2uhrW8rxki2CQGSAwIUkYd53u512G2+SdPZRxKUIoe/cvCem3OdT5HvueKy/IlU4rRJOLhBMei//MdLT5sTC/t8U4POAgPP0AyG8wRq2UWPneq3NgLE5AMWABS3i7Rw0fW1RaHNQhVKnDsRyocGqPcM+ynpkTtr+cCwlGhqheiKQdTXnTLIuRWURU61/86QSQ8T79jOXRoHN3X23p8Jhm7IoiTTvmBkQsXfGcYhYkrTt2ZN32tm7fi+rOshnG++04SL1Sjf9czJQmIN2M4XuAVX9PqGB682HHaMF1L85C017guSqjjdAThieLjicZHucpKllWlZwYsQpgkIoxqtpdriuwKjeWpLziGHAFKw2ppWDEpHdNm8B6E3s6jOm4QiKE0rdVzxXOaX+4dIFrIm+AFghdN8PyBd0ZnhmWGvIGjgvWNKdkMEVDqO6LZPHsQSweHg+0to+kMJG1HwEMTIRAvFudNSPS9ES6cIluROxC1HhJIMhmzYer9lPu+3225fyaJfyG5toqiMA7OV0do/TnoGsgraugNqakQLKBKN6PE304pCI1w7dZn8FbaGN1B9gO+r50250EVD1++R9VxvYRGDtc7NhTuq/TRTlO4275pUNcmPz74+5UOOr4TbNzfIMQ9eoDBBczZw+a6UxLIBWG1GeGSLRRgTEGfTGy5yO+l0/73pIaA3WFpbVVcjgVN3YaDh/kPmeYtg8r1ZIbaN5lokRQmjkyd52vNOGCynJzdQVFrN70FrGoUs8D7WvD8Uo0ciebJTH9cNYvaKjdD22bx7WRSKuSZpS5mVF0mLk8eaI4ThhOE7S4FJrqXidRb5pvkeRMqLwZheFAsJ4XZ53H+WY8kCkEXGaMByeoCle9g+dzZtFVd4T1yi0SOUU2IH138qGSswFtvSJX6DNB60Z8J1R/fBGa1EQ3VNse2ghEjXCnWN0152225fPqDE8/Wew55NQcidMos8XdLt76gkXjdDO6L+7+/zuRcH2UR81eM+/gxeb0bcDMTQiVUFfHiaY9BawzVE22+4jQbioYuf4zXgYcw2oUJX/drN4uhyiNMBs70U5RrlpvGNR8p0uJ1oVyp0R6w1W3w3clG1yjbqaTgM6r80d1ZeO2MnZ/jrjBdbtGRHMSFskZCxEi74yOFZ1+hACrABcaiBlrkxwuc4776IAnrC0WxpBte7NouKgz1UMAh1GYneN2g148pUZKUuAZAbWw2t62T0y3m3DZTedU94GzGC5cL8YNA29g0uOhrWSMDiXfFDZhcKIIIYVrQIoclAIQR46LTCO44AQA9K8Ip3OcjwxFqNWlC3WBfnlc4R1QXiySv3EMNbjcrJVDYXWiBGUMfBabvBI1YiICoGcg7VIIYqIw7Wktuw8nWo8KDRSOQwU2npP77bX1obeDIUNo8nawxIDlfeWLoM7RiqRk7/te9jDU21tEwukr5l9/7TmuhAGUxmo25aPuHlA6YKpYYYWqTrPndST5XaBDLRV7N6yGPW4Ctxo+8frI5edCIw2FA80izCALTmgz/NQAGgGLKIOrkWNwlLlEAA0lO+0tvsq59nBkP59HZs6J+Ci4fcMSiaqPet2YLHMXKIvmadqwGQ6uFVv0GhSDqc1YtBzLB3B8wqsq3RaKIo0rfGqGw7ou3+XlIRFu33E6o8j6b8G1yukKfJkjsgRKhvRq5jk4p2XjaMfPanD2IP+WpRfk+QAGTJ/ydaaCBx2omDv9DDX/LF8TOWzZfgOjbj8w7h5MP2/5SGN5SamKBX4oFAbKRZD6NuAaKLaw4WAwCrHa1CIQiXNCUHd/4M2m0zTgKTdkkuey5ozzidgGGXBC1H2k3UB4Qzf9dWMLQMCVdo5OaNs5IRaW4EiZZMsAguMNWsdSjyK8vWFvFfjae8mWbuIK8uNLe9xgf4AycXlDCxZtPd8BX1iwhgJQGjghehoh9kemB24sNJrdV/myWfLDcr2xyg5ljdIP5VtWQTGdNmAxVAXzNeFW3Z6G8p6b1Bs7EQtj44L8J1vxil/y7+FQQjUTrw5SslFGLAHq9Fe/zB/3MSySHMAKbXePjPYyiCs5hhR58aPTT4H9e/eeNVOwyjFyxU+ROkpZvChfW5zJ04q6nMQQnsNvDh119vNd2SWKCujbUbqoi8zROUH9br5iFX0NWMxaGaPGJD5te/rdTRHo3ynm1sbDDSF6HbHFNjxwo1cGLbOALXbaI2YzbE0Gm1xjszAYSf3/6bjnTZcDbbs8eYd5qA9iBa9sC7IpBNK+jBySrXLLyAafCqXVNXlQ4l8KE4I00EM2HQsNVPh7gXWcQBeiqI8AFHTOJ0RAeQQxdDlpN0sRtC0imHi3PTXCocrXYm1liqOorIOlAdrCsCqjKyQgTnnQlkVWIQRUq11siLKMYyIribFHqB4YdEUWBDKfhQm4WqMwixU/iVxY6Q8XdYgQIH5CMcocIfciaGJvGIgyGObxZgUnjQ32wawWcAWoBgtIAj7sIvq/F/7KgHsiBpUDJgCPMgkyeq+XqUch+2HduqsmvxIC/3typi5z3cVYuxj89x7h87DiD5Pas9HWvV3U3ldjI79fp2r0fLRl9t3IToFaIIwNPtrGWs6T28As1qNl61+npCwOw+d8bJvZcgCndHR5w1pUEkwYiVkBS28BkAjA4t0QZeNOaPVtwYyQ+XasPClwv5yki7qMkPKGWAuLERheqo+JmdxeINr+aMwIlCmqplXb7B9WUI2ZAeKpLh5t2tk9+9AcEFBf6Gs1Ytvh8OFLeqf0/U7NeJ6LBkNwEVanXejyehyI+i2ikdjQUhO4Bwkl0YkD6b1+lFKeKkRG68Rn9yD71+VFu80vhT1+LsT8izMw7SsWs91xgAUJY48ThK1HRxlXXthcYhVrWIHhqlRj0QFMQCjtp8oOScGmET4c2HGKYsBO0RRA7galGJtC5TNr5s3jmKoU2bMmXG3ZDwsZriqweoNlzWMk3wVi24ZAYdgD0WFFM2oAqjJYQRhHBIjMLAk+dSObltDUmHKxLpQCWYBChXW65Wuua6LflOOkFEfNElmPyKZVL63Y7QuqSzYb5o/uqhH/71IxvAJfe5qi/w2nEPnOwZXx65FGMp93pEV2l2HQqWnjPqMNfMh51OMPWeZ9J3zAFBYdDZ8lGXG621GT0AAdozX4I7Vjj/VaAaA6yzg8lzldQKgPd8AES2IBh0u1Yh53U94Y1ZhxWY+ARDlktogDmJoWb9rXRXMgehPfu89+OioRqbSyVkPz9+/DRqUW8TG9kESbfvItcKHVZR59oKPbznebcMF1AvVLbL23kbh21GAi6HSaMq8TDpet9p1BtV51YiiCu4gO43AooMUwstXkvPKGevpjLgMyNoaJYwDkJO0PVBPLAO1KabVWmkNVa/UvvfMEgEDWeU7I3l0gyXyYgeRsPtPKvqd0fKLnMImRaIno9ZdAQ002A/zbAGBB2tfIRT9skhVrqh65VV2ZwgEBgEpl8ArBFScw82GhwjttR9qwzbv2bz282nGlKiqg1uXX9L9vQ6CLA+9RT6udUgvCF13HlD47P3new7bBahw8x27b81ouaLy0nyx0wHlftHbM4RvMrrosn+vMO/sbwczVp1L/ddFC/0ISlBoAhrajyCAnWiPghiQMIBI80p51fUgVhgwRHFsVXbKN2ItEc8q79M4gXOdRwbg5db6/V+cU7/mkZNre120jv01Y/f8u+GRAm+0Sn+0YjAZQKp/AxVytWiwd14+w3i3DZd7sPx7vTEpRqvvTqvhdpMv8g+2jl6XT4xJ22+JQwQNR9B6QOAs0UtOGD79GOvdCZwS0sOM9CAFyevpDIoBk+F5wyjQoQnujlOrtm7eLFBvGs17BZI8kiVRI1V6PHUWLuV2ISYYCYMRI5WcXlMa7xa6NUkEteStofLEiBFBIh2Crr01ChpVfcDq0+x3e4s/QSjuWTahzMFc6n5EYk5hCJc7GwOVdhA+onMo4KPDDBbBFQdTbTfha7ouGS0fbdlDD/fQA9jmijaGy72+ZJQuLVa2kPgcrb0fBlFw6QvL41ScOolEFIIyUgCz86QhHsAlI/rYaKLH3JAbCLlBFbzRKgQA3pIB/KgCr7X1SMAbGKwyPwFgXT+iwYaDtMKJEcgSQTHQdFYoj1tnwAgQBnMoIlOtIxpr/r1Zgy443eV7j83vG3zPkANfM7r5jt8mAE8c2+RodX/k9s+ZWghUozHqo7W3GO++4Xr0oQ0NDNJUnxdvQPNWw7F6csbis+910Fzz+zImgR2HA5gCQhgQQ0T69GPEuxMoRqRlLW1QgtZ9racZh2XFpBT5kLNQki3acrJK5QYGCoSxZCncTcyl6Zwlrkd1xqTVuuSgxiC5Lat/smFwHlFAjEcUnV+9AS3KmrXY2IzWGJQyO8aGym6V8UtmjKEalN7LCka6SEDiVGjthNZ4SHQmvznEqPBo1U0zUorfbp/TslEiTLT1JUCr8m1Gyzz1lo1YjdZeXYwtkqa4IAy1jtjQ57l6o+U96D1EwS9q9pESMUz1ouRB9DoWB845RdaVujT6dHNSVD3CIKtFCMBa70GEXD19H8V1kVsDF5YL0S6EzcgoEiiZKxlIrnWFwI211tC/qUbElYFRGYVNnvENBhv0pc1gGZDnE5D3hkkL/DXP1fW2k51KzouGEZuWQJaGcPBtc22jQ3f6zxwcVw/YGS1/Hmhziz1b0M+GQaqBuvm68N9jJRHletvfSSBpWs5vNP974902XED1RC9g5I3R6mRTmjYV+l0ANbJxkCKw88DtHAdTAKUjeJwRbp4i3r6P6f4l1vsThuOEWVU2OGWsd9K1OMSA8fZec1kRPJ+0fcpaBXX7m5FCSXwmpZlnBiJJQeAQSFUfhB4/gqqYrN6M/izECEreiwlIZaEOANcka8r15hd5HS341Yp6BiOQkCGWnEsjPau5MnZgL8y5ZC5MwMRSkW+uqY94vPHILNtmBnJo67N6mac9FLM3dvLDarxYSSCmGC4LIgCVJrJmh9wbsB1vflf1oh/dYrNpRukXhOKFD82iJWoMVNry+IalDUSovdJYW94nd437GiDWsH4ItU+doRrEXO7NAqurwSo1gHbqzC4KFVX3klvuoq5SFA91NIypmtH869VP7NdBr4HYPak/Yr3fI6DPyeXL0Mx3Y1AiRH0gQFoLCWGi1IOq8aLpiIYqD6BpD+R72O1EV97h3vvsktHaONcXDFiZYrL0gTAM/Tc3Gp0+19nto7+3L62T3gkjCqC8XJ7714x32nA1jJzypgtR/cXtPFMAzYVvw/KaEG4fYrS1H93+aig8AuMRnBaE22egl59gvH1A/OQl8PIegERaRpFfphHLy1cYVcSXlxn5/ABrpcLeu6GxfajNGwWq6Cj0wXQepr3nIZee4WW5qpqslvcLItSt8THIAm6LgVeSls9FDmZEracCgKBEC6RKrMiZtTRAfjsGiSABW4So5JdsZIsQnEcOd6xmSHsjBrSR1iY3l2XSSt6LjHkqzEIk1i64ANTE9XT4WuvWDQoAp817m0PsYR5nxJq6vh3jZiosDbnA78upoZjepe8P1URHrPvTm6I0+VQ4qDDsdorWU2L0EJ/dV6YpGElZiJwB6qI3c84yl4i/NV7b1vIyP2JwkcVZyizsTzmONzNaYqBzrf0EpGTAWgtZiQGgSEuNPIydTBeirsZgFae5EmN2u2zbtXsTNAio69JrcpC98dobe9PVK4Xs5i43v6nHxBSasqO3HW9tuP7zf/7P+Gf/7J/hd37nd/C1r30N/+7f/Tv87b/9t8vnf+/v/T38yq/8SvObH/7hH8av/dqvlb+/9a1v4R/9o3+Ef//v/z1CCPixH/sx/It/8S/w5MmTtz4BK1CUPyrkYg9u9h5Mvxj0WlnOY+g7u9r7/XdteFZbHKQ+Kh8y4gffhWGZQSEinWbkZcX84h5pWbHcnYo8FAVR2zhow0lAYEN6oscYM3isEaH1NIoEDEPAFFBETAmiXg3OiCFg5nqeUaMIH0X2Td2yRUL6pYT6/RjsJm9rM7wRSdnJL6GdR98BOQQuuTDzvV6f83LzrMddjRY1bdcrDbcucGupc1MvPbeGq9SBuXwdAIwhCClDvZecuYm+ovZ0osClXsjawhR2GOk9k1y5BnYWATf6vMdjDSwza2pGa/366fPfM9iIc2uweiMfqDozGVobSwFB+5tZFG+/E/mvvOkQUIkS4jANjkE6hKC5NDTOlKEAaxZjVV+3fbu80wJUw4ggLFRJtLIgnKHOTCPCC2wjhDjU6DJEgCdADRT773uo1/I/nrTgnWl3LS8aqkuRFdBG1/161pNc2kvZ1o6hvW6XjJYd8+ZesnWXwvbzPWPpo7IsTG5a/38Ycd3d3eEv/+W/jH/wD/4BfvRHf3T3O1/5ylfwr//1vy5/Hw6H5vO/+3f/Lr72ta/hP/7H/4hlWfD3//7fx0/91E/h3/7bf/u2h7P70F+EWOy1C62LgKlRe/1n/reX4EEKjSGQbUj7DzCDpyeI738BAHA83SEvIsYLAOv9CetpBqeM08efIi8L8rziWg0XcpL6riu9iQ1PtsJpChVGO78Sg2k6Z+tchEAPvkFlyTnQbiQiC03rffVec1Q40bTjLHpjJiQYHMRVAJW3uSRAjEEmxgjCyD4HUTUIQdWjlsinr8WyKBiF9g/Ig8hcPXQzWJZ7y935GbHDG7ExBsQsx+lxFO+1c/fUNtFKf8LmAYcAmBBi95tHIe8++qJKorB/Vz0PHyE/JrzqjVl5z71uBIstD+qINp4g4aMjm3sP5QEoEFTKUKKLfM/IQs2xsRgr26Y3Wl5abJPfcgbQn6cZOO1tXhbcAonZ9XHrBcehFmz3k+egzfJsKmHBoySX6u7eGgJ8BPZroDyrQVXw1Demjd09szcy0NS6+X2XSB5oWt949aHd43RrrUA4O+mdtxhvbbh+5Ed+BD/yIz/y6HcOhwM++uij3c/+23/7b/i1X/s1/Jf/8l/wV//qXwUA/Mt/+S/xt/7W38I//+f/HF/60pfe+Fgao3XJo2l+cAEP9pOYZOJLqjKE5gb1w9gxgMsB2zGoIj1PV6CrW2mDcv8SV+dTia7SaQZSxnpagOevCuwy3BwxWW3H1Q3CdIAxmqTgUQxXDFPpvkppabxvWkRYl9IMyllgpeHobkBC6PMjlvgNYtgAXbh08SFqCR0WyVgOzPt4pIYtkNWWbcMn8YPl/XHzPsq8ivI7a5TnFpqd79tvMlqjZfVlfc2XN1hL5vK3bIcxBsnX+Tow6cortS6Pqa01U+UXQ1sgH1EO6Be71xktg818lGOvbU72SCRvMuya97VyNrxSiyn9m35d32fNRJCz5pyyOhzZ7LM7rN4Q2nXthz+VvnSBqP3cVE7MeDVFyEBrvHxe8HXGw5AeYFPuUCbwglPSG6u3Yg76zbmIr3nfulU7yNGf9ybK7nbTwKu2htouiOtc9fBkf7jY3tefdfwvyXH9xm/8Br7whS/g/fffx1//638dv/ALv4APP/wQAPCbv/mbePbsWTFaAPA3/sbfQAgBv/Vbv4W/83f+zmZ75/MZ53NloLx48UJemNJD8QCMfVDD74aQwRlAV1xp1HLmraEDgHVtDOSG0IH2YWsW1TggTzdySHHCGIRVFG6+CYoBnLL08PrkHsvDivW0YLk7IaeMm2XFYZ2lkDlE0BPUm1tvkjjWm9XOndIqPblOLxCWB+DhJXhdhM2kBrAMcnkJY5f5hTFEhMJAUwVog2gcq8u8Yf8AhPI/KjqKALYECuf5EWqE1o9cftDCTPJ2NT5C10cpiq7ElcuN7iwC88XSzZ7XasRyJIyQSDGSAKEU0ShoZDXyKJApQLocmCyYv9MuxkOuULmp0XFGq297b7CaGeIt2w5K6+eG1m/jUmuMPjIzI8X+fkCNtOy7/f6rColGxCT2O2Q0UKH/ve2nidxQ59vyZvXcUFiFvmzBziOxaBhaB+W+tgs781z+dXk1P2QbUY4hDKWxuEHxDQuvH52hsn35ebdtNceJul1Kvti8PbgiwQWNmNRB9cYL1HY97ou0GaiRFqk0lk8DMIEsJbFD4CjnZohRwONG+TXjT9xwfeUrX8GP/uiP4nu+53vw+7//+/gn/+Sf4Ed+5Efwm7/5m4gx4o/+6I/whS98oT2IYcAHH3yAP/qjP9rd5i/+4i/i537u5zbvm7ST1TmVZOdjHkvO8hZnNK0LoJFFiM3E9wlWgkZyBgGkWaKfSxMSJNJhCgicMXwXQMcbXGnUBQhR4/6PH5CXhOWh1jYIbDiBhkmu89OhNqDLaqDMeOcMWh40wkpSLA0A0wF8upP/UhKNRDvfQQwSjRPoeIMwHYHBilA1eT+McvxxBIUBNB6RKGDdPLht0jsgNJ6usdUMhpT+PgLb9O4ClcVG54HlPk9QEod5yNTms2zRtgirIamReddm5Opxle8EKmoepvBh3wlMGnkHAFnryfT9xKXkIGY1qmUh5k2916XH1XdAfmx4B8Kftxkr3xPN55dKaYHm4xAYprfYGy+Z9/Yi94bEDFbpHgBsyDB9NGRAr3/f4DtCH9XVz21+AsuN03Snplpb5wk8jzkHFqWagjwcdOi/Y3PcRn6X84G2X18qIZEfIdJQAI7+EhfHwBC3S0SJQmpBgekaVZMdyK60T1GGsucAFJFef0yd0drQ4YGaZnERluS9uHlPttfe8aYGggvd199k/Ikbrh//8R8vr//iX/yL+Et/6S/hz/25P4ff+I3fwJe//OXPtM2f/dmfxU//9E+Xv1+8eIHv/u7vbr7jKe8l9L4UluZ6cZuLTWGrYrBzASQ6Q5G8eS3oYlHNdAXKq9ROzCccP/gW1ocZ88t74I8fkOaM5WGV9uGaB5ueXiPc3ALDiHi8EW+JGaC1EU8lzuCX30I+3YGXGcMX/rQY4XUFn0/g0x3y3Uvk+xcVAx9GlaeaEK5uhMY7jKCD9hqbDqB1Qh5XYDwCwxEcB4Q4qUcmc2UdbOsCsyV8RLbFQhenzFpnY57w9kH12ayySDAKNR1AY7gySw2Zb4Uix0fNazFW2wisYZsVo2XGLSMTIXFGYtnGGEKR2DLGZiyLFTqPv85NP5rveFboZkLa3G3fSZjK/G91/fZGn5/ri6mNurAXgV0yWnY+5bgKEaI9FouONsfzyPFaZGDfsagqkrFONT9Kddubtih+f6jQobQEqTuuUGftSG73gjfafjSRG7F20vb3QpvL88t5v1JtIzr9jXohmR1M3cOD3njZ78146c4ooBJMvBFzc9Nchj1Y8rMMH1mGz25+/pfT4f/sn/2z+NznPoff+73fw5e//GV89NFH+MY3vtF8Z11XfOtb37qYFzscDhuCB4A6maHCXaWOBGgawm1+04fUPQnjUrLRkqAZbfjfM2n6nEQYwOM1MoUiCzW8fI6rZcVy/4C7r7/E+cUZy92C9bQiLRnpNCNOI967vpZNHY6IMYLTLHUQaWl6A6Vv/iHSy08k+tIoitcZ+fk3kV5+gvlbz3H6+AXyImye4eYKw3FCPE4Yb64aI8ZPnoEOR4nEbp6ClZZPcSwK9aY4T9okiR2E4TsGAyhkEOuiSqQqHsEioH2yiPy2Qk+A1KUBKLCiec+bKCu0rU1K3ZYG0UYK2JOqMkNURILVwBZVjhwQSbq42n6kcJqLuoaHrwDejaRsAbYFTie2nbsugZ+59fajORFBumcnPXaJDbdwXUCF18pcUVuLJl+2aLfChx6265cuX7AN1HPfM9pKNm9+f4lD4h0P7wx4bcsKH9uiqhsLLXnK5wADVeMFZQHZPWXQcsrSQ86iWeuCYIXvbqoAcBdlSQQelWkqRpbb6Lu7Jy45wYX/SC7n1KNJl6BI/awYL67Q4SXyT42+ugDAk01yu/Y1guf+mPx+vPNlzNTPMP6XG67/8T/+Bz7++GN88YtfBAD8wA/8AJ4/f47f+Z3fwfd93/cBAH79138dOWd8//d//1tt20sw1fdajbXGQPX1M8XwDc33d40XnJfRXzigjdwAWI4IwyRQW4hAPAjkFgX6Gz6Smq7blHH+5BWAT7GeVix3C+5xj1VhQwoBN1+8x5Qz+OEOCFGM1XxCEfdU+C8crpDvXuD5//3/htMnL7G8vId1YA6T6CSuSssP41AM13A8YLiaEMYB0+014vkB4eYpQkqI0xGmWm//+V5NUrS8lY2xxTX4h1SBdZPgsYWVSmTjoK06mRodtQvGmsygcRNltYbELaSRkJmwqIBj1OMJoEYuCqiRmy1YZ2cRvd7iGKWT6542oh2HnxPfzLCF7+S7S6aibjKGCKKhJtAZJccCyPxYYbcZDZt3RsvIs3/9NSOY8+FedytnKot4vW4pAxT3owKfN7MoyJ+/DYHbNKrN9b1LwxvWum3ZV+lskF2uB4DlumO0omGLwgCAG+NVnBdu68UKBZ/FgJ3W1BTP25z68/T3whCocWTkmLmZK2BbAuJhRz+//rOLDNRLOSbUqLx4b5a36mSZLhY1v27sRHvNx4aGkda7fsbx1obr1atX+L3f+73y9x/8wR/gd3/3d/HBBx/ggw8+wM/93M/hx37sx/DRRx/h93//9/EzP/Mz+PN//s/jh3/4hwEA3/u934uvfOUr+Mmf/En88i//MpZlwVe/+lX8+I//+FsxCgEIocCJVBIzkNZKYc2dntYFmmbBZoHOg+g8B6/R1X+3a6XCTrS3Hu9Q6it4ukJ870Pw+QGH+YTrL3wsSvJLAuvTkOaE84sTHr75CThnPAEw3L0odV5muJAT8rwgHI9ACMj393j4+FOcPn6B+cUD4iSNLK3gmVMG62qx2t9Z/otHMV5hPomqwnSSTs2sxabmDFAo8BSjLlo2WrgI7fsa/RAB0XILxA1suOnImrlELt54eU06hEqpH2MotGhC3S4AHIZQqPG+9bkNIxAsUCkcQhMdZEu6gbCkjJyp9Baz0cxFt4B7FREzHAYzMjNWXZSH0Hrnfg5JX0TUhZsWaXgohxYwDBMwSM3fnLYFxmasfGToDRdDry+jdL+1qLnPTPoIaAjilJjhKt/0RCp1RjIDHIzwYA7PvgELbvGP5EgPaa0tVxoiVQYwAGStSULJlTofqhlGLjHjtSphJ7MYrfO6bdfjj8/a9SSW157MIw5ezc9ZjlGur+ZCNTLzLFBv9D0ceZHVs5cmeSwaIxd9uX5qhV3d17s+tr/HUjQ+6gqhXSTecry14frt3/5t/NAP/VD523JPP/ETP4F/9a/+Ff7rf/2v+JVf+RU8f/4cX/rSl/A3/+bfxD/9p/+0gfp+9Vd/FV/96lfx5S9/uRQg/9Iv/dJbHzyPh6KqXi5Mtr5C9e+NF+CZc+oBMHLNEZTfdhGXtXTYCYU9bGc6ZtxfUEet5XyNfLhF/PCL4HXBzZ/6Y2Q1KGkWijznjPOLGfEbn2K5O2G5e8Dh2XPEaUAYB6HUz6v+u2B6eoPhOBU5qXSakRMjP6zAg8hMUQwIkRCnCE4ZGWsxYjbSOCDf34NCBI8jsC4wJWjKCVhnxFExcWNmdU+RPWx+0TUIxiK08uSRPLYSgSlhoFuwPaSXS56hXTS8PmHsohDbEUEC7JRFy7E23qwyVAa7jSx9vQIzxhg2kGIqSSJGSqztVqDH2HwVY6BiwARe9DkZMWRjCFhD2zPNw24x1IV71JVrHEjKHpaTkHWWk5u0oYjoHh1rtC9A37DUynRJ37rMUmwaWSLktDGkNa80BlTy0LIWxm4PGXGIErWHllAl0B05x8Tvx5ESsmo/epXyPg9DtaeVyVL1+b9y2mz7a4kuxkpdcsZ5zU0dYN/NGxBCT8xA1mts+VBDABAY2RWsW1NS6L2aWFoP9c6Kh3g3xAybV0670B76716I1HxTSuuyLqOVxKsXJKCILJc1lNsAwZyV3mjZ/j7jeGvD9YM/+IOPVln/h//wH167jQ8++OAzFRv3g4crYDgKs4+zPMA5ALTs4r+NEGScihioCIe6dh7GNuwxW69m7G+C7kbgJI3jivHLyuZZW4kTnq4Azhi/9D1AThjfe4rrL7yP4eZ/x+njl5jvFqwPK159/Q7AHcJ/f444CewXx1CMkA2KhDgOmJ5e4/DsCY4fvgcAePWH38Srr73Ap//9U4Qx4Ml33eD6c9fd9ORy7GlZVZJKYMn0yTcQ1gXhyYyQpTaN5wmH4aACrXqj67Z83YfAMFXE174zaMRi72WW97zXvYWXxJsfg0JgKrYLuAeaKh0aqDkZ+70YMip/m3dteQtf8wUERGKMgTY1Xn54+rf1HMssHZ/l2EytXqLAhSpj0ZNY+vYrABpjdz1FDAGYAiGMAYMtecmcCreAcwbWM6Sp40mEYNczOETRHHRFqU3Oot05oEn7CIgh1INu6sLMUK1aT7ie5FjU4dldKLs+YF48NviCef8c566lhiMnbUhUbrH0dVF7a1fPFDQWIStsmDuyj1xz/7r+kZKUQABZuxbIfiWiz4A2Na2sTrlnmSpagMBFassYkj4iLoa7rxcD6uuSJnFOOOfWifAoFAWxjhTkOmv3CTGEWirEYbuuvm4010HPL0sU9+3UdL3TWoUF5utyWhfHY59pmFw8gozqQVz6rTeERNLuwJrBwR7oBKS51i/4m0YLiTlEhPc+xJAzroYJWZtNnj5+gbtvvEJ6lbCeVsx3M9Ksi6FGTcNxQJwChuMohmsSFfo0yzbi1YTDsyegEDDdyHcsr7Wq5BSAUldmlcCcM/KyguYTeD6BH+7EixykEJrjBB5n8Houi47JagWgJHuJAmIYhJEHdhh7B4G46Evmb7uIRxBGSLTki14BgyorXAUY84uKcSLduRk1IqGzq69Y8k5DiBhDG4n1HZ1l+5rjsJwIt8ZtdB5qeAQW8ULDFYKybUikdj0CYyIQAnKwBVYixsG6YdtwjldTm8iuIWF/EL2XTgFIrsV80BocXYikbY8uhkkNVl6lHGM5l9elRlLziqV9h3UkD4Ma12rEyAyZKsTs5qy7BboZHk1xDQ2Za06VUY2SGSz5vFXnEMdLBX7dvWv5UbmA22tqOVnp+C3IRGAgZEZfxF6YgvoMSOG+3Rs+f+aujzNGe3PSlMk4Z6bvgu2NuxgqVhKWUNbNqWhYiReIbI+tlf19KNXnlwOg14133nCZ92aCjY1G12cNRcloAzqa5OYOdmwve4+5eKIQnB3u4trnukiE4w3wXgaGEdezFCEDwHw3Y34lTMPzixkv7hbMurhdxYCbMWC6mXB4OmG4GhDHWODG8Wookdd4c4Xjh0+lqaUq1K93JxdpCUxpcGWeV6QQEMYZ+eEOgHh9HCLoeAMaRJWjRK5xKNBMuSYW0RqsCPKMC/hZjkT+o102XPmNRmseLvRwleUujLq8ZACoQqs1UtD8mhINgtZlZRaoMXOl+K85FNaiT8wvKYvOYlav2k4i+6iqTcL3Oa9mW6UOSxmPUSDLrFClTQnrQpsYWBEwTNclQkJcy723N4qiez/8AqREJuvGaxRqcK41PF6Xz1Ra1gW0nmTfyyxOD1DIQ2agKESB1IP2+Sr/ZSDmaiTDAPhmmjsGq0gy9ZCUh+aBGkUxF6O1p/iR3HdS3kZbdt0KfqAXt4/IzXjJ7SDM1KyyZZIrLKnSUqtIhFKmkPUesIL2iAoR+rnYNVq+RtXSG5fuCXUwuX8d1UnVOW2M1+49sxPJ9cM+e12Q8ZrxbhsuQG/O2nOI/YXpJq+pG/Beg803OSVo+2w5tyF3gxnveJIdvktpFuik+37zGwCYDgghgqYjKETcTkccnonosAjxJjx8csKrNeMuZTwkRqSEqzng5rTiC6cV482I8WrAeloRX5wxHCV8Gm+vMd5cYbq9lmjs+cuiTu//C9OAOI2SIzvNMMLGBIDnE/LDHeJ8kmM8HEvdV1ASQI28pFs0WY4lrxjCgOjaXRjzza6ID0jsM+8G7BVBwpLz0MWLAanzCgVqMq09M2bq3NZLxHV/OVBZxAqMyR4+Cp0SByPHsInGHmvKZ4wz+7zkSrIYwJy5wJmBUCDGwxBUfkq2k5hxSowlEx5WaF5vwjQdlOUIhQ7rItbc9/3rvQVIjgLgtE3Y+0WzN1pplvrBWYg9RiCSk1Yy1TCCc5Ii+BGFyESAtLW3BXPj1bd/b84DQFPLqcYrabTliReF3u6MlZUa+NxnvXaAsVD7kV1hVfubeq39v3tD8lhtZ+2gUHJvBqi/XnatFcmhtFQj5qNe127FWq3IOpR17u2J7IqDHR9gc9y9M27/uu82HeX1vtmo57/FeLcNl4tYOKBKjgCt5+XH3vv+IfTDu1obL4/By6wfJQDaY2fPI1lFYzCvS/sAW1M5W/SnATQMQpUHQNMRt0pdpxiQ5oTPn1ZMC2EKslhOupKd5wRy+a6cAnJifPq/v8DDJ6cCKcZxQFByx3t/7k9henoNCgHrvUR5eV6RlwXpXhyAOA6YX9wXeHG6fymGazpKrZgVLQ8j6HBVP5uOUq4QR4nMDBKKUitmkFOZI3dNmkfGHoDsFq7kcjm9o0KhRnoxYwgDKASRGeKtXA+5nFfNdbh5VGNnifqUNcemC+CcMkIIEomF7v7pxh5NXgqZgcQBh6GFGn0n56shNtRqO945c8nbEDkqfSQc4oBxGEADmlzTBjXIdf5VWrVOvy20/nrtDVvYgjBnKbIYoFC7A7sTkwXTMYI32qHhkePcGOEsUZxthzN8S3vAQYHsCBi5GrJV67t8j7Z9A4TNe/JH+33/Hc8gHUKl9Tc1b25TZqzK1CuSsBk7UVaJsNRg2frDvtHlHktwJ1o1J7QxWjvOTsPK7o/P/rWorfnad2rE1XldDI289sYlQ9Z7Lq8LYfXBsMipNIvTxn0+iipeRrlpnLcDgLM82P4GQRhANzVPdLx/gfnFPXLKpUA5fHJCnGXRaqjBADgxcsqgQEhzUkr9GePVgOE44vB0wvH9awy3E66/8AzxyROBbT75BMvLe6Sci2I9AIEMlxXL3QPiNGK9P2nd14R4PJSiZZqO0vxyGBGubhCePANNKyhP4KyQEAVRzc9J8xtbr26Ta/HXzJwL8yItmrChTRENLrYILYYBMQwFovHPjk9VACiMtqIGocYuZsmtpcCYky56BBCFxkPvR+9h73Vltl/tdWO2Bc66W/vFTuBCr14hBnUqGcNcaNmjlmfAoHTvhMnNVgwDu0j24nPjh0Y1kheR68MhgoZJrsPazbk1SSze/s4z2TFd+0hQ39TjJeM9lEiNOGzgUGOjMhQadNEXo9bu9cPWeR9p7RFpxu7K+to9y59aeYZdRyNd2PYvBWTcQwXNhy102Bg05rr+7Ay/XjWF7pf6fDU/7hzPvWO69DdwEcp+k/FuGy6gfcAoNA/d5mb3DyFX+ntvrJo26mpMSsdXnWyJrvQh7PUM+5vffad4mfpgl2irsBwDOB0QwoA4HcHrgtvzCWEawCkjThGnTx7w5IXkviwXFqeIEEmZhbKPvEjOKo4Rw3HE7Zee4PqjDzDeXGG8OSKMA/gskdb5+SucPhbafV5WTLfXpe5rfnFfar180fJ4c1Wo+WbMwtUN8vEG8XwCXd2AjtegqycSbVEAKZmDQ6xQ4ib5/ojjwFnYagaPmNfnm/2FQT3P6iTYtQyajwPkQe3FSYuh6VrPr1p7lkGYorVHgdZHcSGB9EXXnpLv83TN/UJ+IaP2uw42svdrZAgkIhXT9dJGKCLIkRgxEGayuipthRPqMlxacGgOqxzXozkIMXRCM6c2vwnItQmCIGBda3dgE4MeJzQt621YOYtd635RbiYutP8pREh6TgTpi4cwOLIDq6Ohhh/GHrwM7/nOBt509NGSd0kuXXMfOTW1jiTGNHBpRqJCwFQ0FaHEjmiOAgUA6VFUiZFlrSk7deuPNrXs53FPbJyYt2gScNlo7UX33VpM36mGi5hrVNMxXpoIqvxgP9pqPuNce3HFALa2Jua9WFQQcjt7toD2GDwFgAghatfZnOrNM6qQrSM4GItHHUgpUv7cF/FknBBiwHCcML+8x/nFCecX5xJhBTUyQq6wSC8jJ8ZwHHB8/4jjB0K3Nx3E8/NXWE8z0mnG/Tc+UcM44PDsGd77ni8KhHiacf9HH2O5O2F+eY9F/6MQEI8T4iRQXJgGMWTHCdPtNQ7zSfQPj9cIN08dpDi1+UBzCnbyFADa6NXDIt28V7YUS3Rn3YH7e4YCsIb62l8rzrUQ06BIkoL2IQwYoiStF5Li4JQZU6RNw8RtC7/qTe+RC80wGTNyt+gY7aLnYU9TgfeGE6j7YlZ6g8pyRYJTrQeCLYJdXoIvLDq099wQo5REjABSBLIa/kGumT8ldr8tyX9Pbfc54QsRg91LUIjSjFRxYACFjVeNukk0MzNr925oXdeWjHPJKPXGpr823gG5dL1rNL/9rB/M0Fo6IWgk8ZYR41TvMr/WGArhHW0TtW0mT4zfpimpy2MVoptuQ0TI6+93Rwcl+p6H7Yl9dpgQeMcNl1C5NAKyie8p7G8KeXTvs1vUSAvxpA8WqffRjeaB23qHbFiBTTmFqsDujZbdRHEAZ1Vtf/oBEAKu9eFd7k6YPnmJ4/sn5HlVAyUGK80JOTE4c2EXWu7LaPIAajPLuwespxlXHz4FxYjp9hrXf/qLiJ//U+D5hPjyuURan7wCxYDTxy8ESpyl3gsAQgygEDAf7zHeHJGU+DHe3IGub6WtisuLGasMwL4xI8dw6qLiTTRsC64+pAyUglNSyOPSte4NW1PnZw+cWxCtXm0MAyITkuXNIjVRkF8Ay7a7hdBHZq3hqgrxzZrHGQBX40Io8zRkU9yvUV+/ANvwDLX6nh7Xjudu8KKf5zo3Dl4MEWCqeSabt6T1jDy1z2N3bp78RDlJJ4OcwNYltzdcISrxQ9iJVmDczhfqNmGF227OIRT0ZFEsWmOyFyH3uSYfWXljVbQqN/PvImbaN2J2LMEMFlvzTZYO16Rs2DCAB4CS3veWcghrNWBmtPbm3TmMl4gXzXxS2JZYlN/L/xp1eHe/FKfwT2i824aLq+Gyi0D9zXsp4uq8/IuS/EDNCRhz0W/LQyOc5WZRRk95IL1n77wcHg41MjA6sH7P2q/j+ATxwy+KbuD1Uzx5+jHy/QukV69wfv6qKGesp7MSK1SLUA2a9PlakeaEu689B6eMw/tPcHh2i5uPPsCrP/wm8rzic//nvw4ACO9/HvzdfxGUZoT7T5C+9ge4mo44fPAcy8tqvOYXDzi/mJHmVKK+8WrAeDPh8PQV1tMZ0+0NxptPpY7scCwCvkbmKAl6ba2CYZRc2eGqzpNdLr8Q9Z+5e6F2WXVOgH843TXbPEhcnR5CfZgpVCfDIF4Kg1D8bSMUALSKDz5X1uzGwXrmrZcCajuPZHm8rSJEf04HNaocxPnxLU9skbRR+lJx2+G2drludyWfBy01cMbLrknTVDDUlhWQiK3pCNzQstv6osZoLTMKkSB3uWTblc0lACSHYvTwoR4npxlTnISkA8IUqgTWpXYll4x/P3o4txdZ9iPDhH5J03h84R6pxqu0XyEgsDAYMwlkGONU59wXJOvcsuse8bZjt3mpJ2ns5cFsP5Z77u8B7xz+H1kd/n/lEAquJnrtoQFaL9weIve7S95FyY91jCQvY1J+6/IkAECrFhmnRejAyvDhNDeyLKVxY1Do0Fqx+AePM4wZx2EAjVegmw8Q3/s84ocfSeuSu5eYXj1HfrgTqvrpJPVZy1pqsjjnRhLK2Inj9RXiOIDGCV/4a9+PcHOL8N6HEhFdPUGe74E4IB9uEb77exHe+xD5k2+CPv4arl7e4/zJK5xfnPH8v3+K9SE1ebY4SV3Z9ede4Pj+EdPtNY4fPi35sOH6iPHmWFqnmEGTKt9ZWrDcv6yR2DCKR+1JLOgMWXMRnaNAoUSzZW79bx7RnjQP0efQ/HUrrEhPvY5KgMi5XbB8V1tHEgFcZGXGap1BaZF7e5b+akgzMJ/9FkuOyO4Ry5HydI0QRyGqxEmp4FvlfSsPyA3BoxbZ+kXbIghRYA8lP1agxFjn1hvu5vxYSkPYmG92XmltnDsm6WyAZDlKpc1jrKxCi9gdI7cIWXtRa4uS7TrO97LgcZY6+zgAhJLLNCksL0jstR39HPZciT5y9g5J81vNo0rkC0FYqY32MrTVCqOUcyRWqTS2a1KJWURBrsugz4Zdhz2o19ZEX4PnnQf/3c7R9/d5ZRuG1kkztCDWOWG3/bJvADx+drjwnTZcSEaJBh5rg35x7EELO6Px1H2E5L/jQnagXrBSzGe/d1BgY7Si355AMGxFvMNRPKd8hTAcQdfvId4+IL28RdBeW3y6x5CSwCdA8VaLp7ouSKcz0ryqZNQg0c3NbYnmoBR2ygkcBzGscUC4egKaTwinu5rXigFpzpjvZpy0z4jVKM2vFqQ5YX614PCe5NCGmyOG44TDs1v9e8a4s8DDFqSUgHUBD6PUB01HyWd81tFH2+yM1p4R1O80GD1nhUO6qNvGXvkEAHAosKXdE76XUhNdpRlkxmu5B84PpYYOFoW4+bKSBPnvAE6LaHiuEzAcSnTIYWgYlZ41uRQqOBqosbLgoEXf8l7f1Zndqe9FeVFrn6bhCIpZztHgXEDuU4MlFe7HMIA4glMldPhSEq/AwXESwe1hEgM+HBpnwaSRTN2jTyEQiVK5SWENmj8SY96WURRD5tEytHnJnjrv59pv41KOq68TqwZNZtvkoDKxKG9otJ7QQ5mh7JdI1pegIEEcIHOQrM7ORfcedeqQqQYxooAlX47sqwEPmkdtodM8faeSM3KCNXVswlr7tzca/nWf/0ivsf7uYiEMSG7JDQQU1lwOAJtsSt7cnUXUt/Pgt+QE2++xeLaUV6TxSgV9Z9DhFmE9gdJSFjb2i5sRQTQXwcuM/Oq5GISUQONYDcV0RJ5uZD/rGRgPzXHSJBHSoOrxcRSdxLsl49WasbAKiRIwzQlP7hccX8yYnp8wv5pxeHrA9GTEepoxPb3GeHeFvKw4hCDXcVja48lJ6tzMgK1zod1jGNrrDOx7iZyFFp3WKv3Tjz3832/bMPqdB7h4oc6zl3nPze8rfJI3UHYxVlaH46ST+OFVkdvi0728NjUKx1Kl400lwrz3IcLxGjxei6ZkOgJxBMZjSegnRrMY+7Yna+YShTUdhp0xM6ZiOQ2uBq/pFAzrQ8Xa3yxgigHDcKyRFmdhmALVkOncsCEPdv+zyqkZG1ENcmO07LVdL+90mBix1jrVfKG22IjVCJrBl0islkh4WNHGpSgLqBFtb/iyN4J2S7nf+WWjkH10/zangaAQIrvuBuz+X3NqADeOiDFMx+EodXcaDZd7sntWKpnKUIZQ7hevRFIP23f+5iZna5DqQ99G/S3GO224mKhGK2YIbILVs+296CaxuKflot/bfS/URTIGtyjlNvwuGH8YwNOw3Q5QjxUaqcWd71FAgsOR4wCKx624aVrF2LgFsBnWWDOtGE+fSkGiKhoUKA5AfHheiqopzQK5xBH56j2ENJfFMkwDhqsB482I2+MAnFZ8siTMmTEDOJmL+fJcasrykrCeJsmHLSvSwyyFzqe51oU9eVJyXAZ3UMhoWnznJL1YbJo6UkczxzY/pM6E5TF7aKS/1B1Nl72Rcoubb1pargeg98kOY8t9zxZQml+BzncCCWp0xesixISz6kTOJ6SXnyDf32O5P2FR5f9yeY8Txpsjxusjhs99hHD7TKLoZ58TAxZH8OEJeLwSsV31vs3gWOsOK8SVyIwhi4/l31iV6+vCaaNvA2KLJaPCjFJkDSQmHGLANB5r1EurYGZ9hNuNPZr2bg6liZBzhV+XE2hR+HWp80cxyhzFCRTOErGmBVYwH83paYwY4VIHZKCNsnw7mV4f8XVjj6HqafOkzkHPueyZrn7EAFBijNHarwTEeESMuTwzmwa8veOGany9Egk7+NnPSVNsrSbs5dIf9ZuPd9pwIcaykPQyLyAtQHQ9knYNEtBeoNckDAtMZLmrS0ryl7bjseYMlDoMTzIBWpYh2gehem9AoBExjpim6yrx06nQF0POjByHotpNizSyLPA7kbC1TncI4wSa74Tddf0MfLoH1gXxiRA71rsTrt6/x3IntWQPKReDVVUhAM6MtCSkOYLCiuW4Ik5yfGEaQDHWHNlxkf2rqgjlCh36olXPMGNAIjCgZWX6cy/Xjtv3L0Rg3P2+5E7MOTLvvPfq4SJ/PRbLm/gWGJF04QYAXSzt+pdC9WUGr/rfIk6GaUzmeQWnmlsEUJid8XQH1vocno6gGz2eNEskkUUoGXCLTm6jJoaHCytTMmlexuEYSjaocNGsRbzmEx6GoBRqwsBciriLE+idDM4tJb6/ZmWOqfym5FBSS7cv19cgMC9wTUGhSN6vJWO3gJNAw4gDmKWYXZwYlNxhb8D8exlvZ6jK4Ws+a69T9JsMy6MFcsQbNXLGIk1ZDJ+I2MvaGT3E6nPBwD4ZQ4/VDtGTggAUq9n6HPLeti3Mm4932nCxw7ebhajANq/JQfSjhw/3Rs5Fq203sQmURashgZQHMdSHwv2mQEgekqSMEOqNZ96b72wLyM05JMKoLSvi4djAFZYcZ84CLSmBJPiFwo57AHhdkM8P4NM98stPMHzXn0G+fylw3SSEi/XZE1x/7grznfT8ek9UYgG0qgKcuND04xSQl4TlYQWFUGjzgEhL5XmRAuEQxbh6ZY1OImj32hl8tEe+8Quhfrah9XIGEGttoBWEx6kWiJtDYb9x2/cRlkEpdp3MGwUgDSKHgGE8CuwbRyCvCOMEnO4unyNQtSVzrGhyCCLXtag+4HQsOoFBF20uZKEVFA2ytMW1tvHwRktvr3KfXaL0Z1aBYO1b1XQFXgEMkt8wo5UsJ72Bdd1zBRRKfBma22qKYc04aURbrqjfdsc2ZaDJPXvCVA8xCmzGtbwialSorFICkECav6rGCqiRjo98Wojx8nXO3Bovu079kG0z9pQ8ylR026n70M7WsLVEafZQtqKta+TWNjevJk21bSsKF7W7A+0+n79jDddwBR5cM0lUuGZD5QQAWnejJL/Q7XpfftveYHmqqeLlbA+AUZS9x59WcF5LbRIlR6QIzmCFQTw9Y0Tp7zOqN2P4MiA3QQyMWVlGJhVkD4u0gSdMpuAeJZfEnAFlronahIpxnh+w/I/fw/qtP8arP/xjXH3+/4U4DYjHA2gSVuDVF95XoV6S/NXX73DzYi5RAFuV/xQRpwiKhJwYac4YjlllpE7lHAFZlMcsMGw+BwT7TDs+W96r1H35hcauFYvWHseWZWoGmtNaKe/MDTHGrlV5nMxYDRPycGzEfz0bsJ5ANVw+32P9nWwkJsyJkUPAdLgRwQplmAYiYD6L4RknMULHRVVIXoDnE6b7e3AHc0sR+CjK/VZaECwnpA6ZzU+aEeKxUTiX2i45OwLAVO+tQg7o/t1vwllHYqlFFqklJTkw6ZwAQxzkmuq/hcxE7tn0hisnIHthWNSoSwuc7fr0OW8OA3BwRbvu875Mogzn7Jhifm3tsgLDJBEYgJXhqO7tPAF1Dguj0O1m23dOcoIMbGrLbFtJjUXpoJy3/euAeq/aDn2eyZMlbJsSZVcDFpzCSkNo0W0NRomMcrwBQFBHhy0iRVtbaOvWeX0Nr+CR8U4bLlDo2Hg7n/ew0Q4E4T24pi0KqiEEUHNY3fbZGa7ansEtnMDlCK8k7qXanUKoHuE6ayJVtPZqa/jqWzI7nUIWZhHnmhi1b0N/QctD3ac3yg8vkO9fIL98jvXr/xte/ff/ifnFHZaX97j/xidFnJcUkrIx3V4jjgOG44Dzi3OpGbMRYkCcAsIYXf+wWJQ+klL0eRxKW5Wo0RafT9WoA8Kiwyjt6KOD7vzC4xsTevUNK4CNazU25mzs5ccMIjTSDVCcgUap3v3WPOsq3spN1FXmhM3bhbA3xwNApHU7ARhm0OEKQQtxjXTDp3uBEE93UphbdDJzgVFJa+Ho6gbh5qky7AZ46r/dPWTEC6IS2ZPCXFnp2GbEMtcbibHVVQwBMIZAIxgb6n1ojpfl0uJwBJPocgqxqXbblqLvVRRnCjVeh+qCbgqsLDXgr0t0ZQNOwssg3B7qs2aNgVByyEgtfFmjMWERxzCA9eZgoo0B7w1P3x28j7yIa0G72QW5b1rYkBnIJCciBox3o7jeWHl1lhK1u7nsySXlvS5tYUPWHxF3JtT2QRU2FJfCG+Feef9txztvuHyeoRAktLvmhmm4ZzzsJreRc/3zTaBF+7dEXBGeirv7WwcnbbS8Uq6eIw+VoBBWxDhJ0WuAfWO3zsQWV+si7O9lsm7RnkjCGXy6Q375HPnlc6RXrzC/uMN6d0KaV6z3J6xBIKq8rBiuj6VJ5ag093icMF7dYXlYsdwtzen6rs0lArP23a4PmP2d5wVh0EUsRJArRCVnTHwXXYt2y9xa1AvI9bDrySPgIV5/HfV1X2QpkypwVOy9coWcKuOMi+e9ukS8HwWiyYyFhCZeIjUKoDSC02FTfBxudEE/PxTDVZQlypeCGK5BaOFFTswNdjCZUamZZMGUxURriDIrzEQbAxZVQ6/x8gNv4KgxBBGZdc4VQ6BTIsIYJ+RJF86cwHEFLScwj7Um0piFa41uTSv00WHXUmvaEoL0sssZq4sQ+5qzMZKKGgdMWgu3aQ+jzxDBVCsGlY9Suvpjh+UMljcqtR0OVPKt5qm8sWjm0p553WllHdZ92PatMNr3rav7fzwHl9/gnEpzTV2AklhWbSBLCNQ6cN/OeLcNFyp817Q8sGfdmuDtGI4CR+zBgulycliSuQRQ3EZzthh3kVbTYtt/vyyOss8Cj1ii2WjRFj3EFdN0jUTSpiMF82Zq3ssW0djdqEZf5uEIWu6bfj2UFmQnShuvr/HkT30e6/1JDJhT4rA6MM4ZFAIOz26LAPD1F85YTzOWuxPWu5Oc5iP1dZavkWkS+DBMg8BgZqw02gjjJIXK06HOu8FDmofi8arxqr1T0j8vBeYzhwe4CBP3El6+mLgvWvUMu77CYgvNAOeUMScghhFxnDAenrTqGe446v2UtO/VWnsa9ccYBvAgTLkyB64w2R6XSFT6yGT13L2Ab4ZnisFR6MVoiSGSkxr93KJfoKsXvigikDhjJmAMI+Jhqj3ElAXIHX2dYp0PibgcvO/+bic9FKN1v2acVsac5D/fysQU3GOQVkFTDBgCcBwCpjhgGAaA5vaa6FpDWBGjwGpBn0cx6jVX5cdesbJdixqVcGO8zLmIrnidUY2vCSvbvBubM0PSBLYWDEEYnv7+B+Qe7/PsNp/JGa09Mko9L3G5g9Q+yJ0QxHiJ06OlEYGkm/nr+ASPjHfecAGAFdABCvupwephv35cpNdeWAxKC2vzyLvcyC5Gvms0AZgXa/sE9o+VnQYfBSDNiGEQ9XiuEkORUDTz7G+DPSIykJeihM5hkOZ966KsPEgH5pyFjn77DMPnF2W0Lbj/g/8PDs+eiOCvFsOupxnjzVXJP1GI8tm8YLk/4fzJK6G7K/U9maKH9dXSCC44w2YU7zyvGFLGePsEpmzNi3reIYCuJDleFufhWHKdpdCWASAAbItEndJS6Q9RTmhKsGxRYCnaLHUvBFgrFS8+GoM1qxRv2y6ILYLlsneX1fJCdQ3gDsYZEGnYhXQCAQPcveVklmg51UJb/azUNikxIbHNh5xcNK8ddjxqvCKQuULSvji51G15Blk3Ngu2nXsGZog19DVJ4mwFTPEKcbzGQADN9+iZgXtaoP7ZayTUlMpu7Mk1V/r/y/PaqIVY77PrMSIjYwIhJCWlBCqFyeSdY1ciIxE5kPWC9XAqsDVaNmxz7O4fH3k1p6o3WdLvbR0lERHOpPJTLAbP71+af67NuinLEtX5VGe8MilD+U5/akFxZVJnJgegSmlYFF87jAPiFHzW8e4bLi/bYw+vbzHdQ4GPjI30T/9w+G0ZTKmv32wHHnpqe0b5+6CHD73gb6H9drBVjNWjrYrWkId+nev5GO5vUUmKQAqg6YDxe/9PAIDl//F/URmmhOEv/01c6T7W/+f/Vfbx8jmG092G5cfrjHA+IT45IYwD1rsT1tMMI+cnrCVS64cIAi/ltUCLJyForItAYpbDUaWFMqd9Ih7bRcMXzPoF1+DU+tm2iDIGUq/VRXJuf+XblxZw/df200Zl22YRAep0hAr7uB6hCESYIoFImIVplTuJCJjGa8TBXXNlQXoNw7odO0B5QeWGdBCUOzf7fft6m9PpR0tSkD/WZNvmzTkvgcqcT+N12805u3otdM9sM+kCFxu7U4gh2w7HS/K5Fs3hDCj91rKyIAPkGSvsRR+Y++eTQqGgG+NuD4LLzOXa9rkfH9347/thf7GemzkT9Qc12knqnJR7yMGdfReNcpsZ5J4JFDJYiV3RGTB/HFBjFliKoRvYUI0Xs4g723mN/qZ+y/FuG66Cu7T5mhIZaSdVALuQ4aZGxOPXPVuwwBJUDdClHFq3H6HPdlBUACjrMXIGZSew2xvNfj85A7Q25xTUqO2ZUN8iW/IesgBTXkHzA7Ce4Nlx4fop8pMPxXP344M/hXz1Hmi+x7CeJV/m2rPzOpei2cN0xPjqOc7PXwIAlhBApxkrUJpU1mnPWpsklO4wCgkkjAPGGJCnBzFgOcliFyLo+snmegmcNJWHyksZ9dX7BqmK8cq7i0WJtrjCSQFiwPxz2S+MZd7ht2WOhSyE1lF5Tlzo5H3H3TEGpRxXqM3GFGu3Y7/tORCmGDHqgm/ntKZW2BcAcueFj83N0xp6QNiQNYfn53C7ANlC6+c5scGp1Xh7J8PYsAZzy/kZ1B0whoNo4Ol37RqY89fQ3lEjL4mu6zUNZPtSUopLvGR1JoaAAoPa7wWC34ee/ayRznEmgHa+WO81idBtjoDWgPXz1w/v+JgjZvPJQfcTdL6z3a8knan9muXnzjllpecXLaA0FJIL2e8tEnPnLkX+NgECG0a9R3rju3ynGi5WppDcrK+pwu7zBJ2nsWd0amjsoIeedPEao+WPtWeg+ZYD7MklxXBuDStlSH4ur9tjcb8pVPEQhLmm5zBzkAWSgeMwgMIAWg+g4Vh3c/VUjFYXUazv/2nMNOEwXoHmO9nnjUaI6wxazwiqSpCGEXQ44jgdQeFjxGnA+jCDXt4hnSpt3tqsABA5qWlAnEYMqqZBh6PKGd1ItHU4gq6fgKcnyMdbUYMAmnkQXTQBX81ISH5mC2sZZm9er8FyPuJhMBKAkB30QhW2g35/DCiMQS+ka5GdyQUtClcl9fjPa8YpZamFcpZPoKtQ4EM5Xvn8MAQch4hDJFwNocCSq57rrDJLvTQTUJ8UUV1Ag+kVI+AWsKj/loUPba0esAPFOpKKOQ9z0sUcLEpeWeq+2uJsgeuszX0MYrSNRGJED5+7jWR5nKkq7JcTkhU8QDx8dqcbjkMTfQmBQRyCMQBDJN2PqDIGuaX2R5cfJSIEthYgO22QgHI/+AJlf5+aUerVSMp1csM7APZ3+Y06CyEDq4bVYxikGzlQn/UuF1/0NXWdojAAzGDWJrDWp0vP3xw5u18jVXUReb5aZOPwnWq4NuQI91q8gy4qumBUdmFB+8gz1S7kxN7oOG0b+lZlMsn7BEikFSqZg92x+ZYrxYsj8aw22w8dww7mdTFmyzEByAp/8YAm0cOHm93zOj55D2LergB8CAA4vfpUErvnOxhhgQChYq8LaFkwHF+KMr0zUsDlyCsFYS+mZUUwooYVJO/JdFkErBqSa2I1DublV4JBn5PZYPXQ3ADa7xhL0yCYQoun+jsAZVG1RaREGg4a9AXJtQ7KoKtcjmkhIK4ZQRdTP8zgXWu366HAmbIfYiBoztOMce5uXzPQjw4PK4UBlne3NaeBpcv1QDHcqdDDWSJXdkoOmufrPXGxrIwlKFVfDYrAtqaKzmJYWIxWoHqhhs6ZJD3PWCAsAAMQ03ZBJTOCRm4Kek11/w0aY1PUwceB1HCAYJJMPoICfLTvpKDQ3qd2z5zX/eLuoEZe/rbomco599R4c56IgcCSs7M0ha0/hLXN3/UwvCFO7j371+/TDFhtBFvNd9bjfe2998h49w1XMSpSeFqN1rCbg7LwV1iF3eb05us15jZGy9hEYXvTbgxkZ1Q8Iyi7hxgArBNtjEHcI3tA0orSINMb2TBAeMxhU6NickumimCGy7zlIVS2ENFQZIAAYPyuPwsAOH/y9ddeguOT9/Dpq3scjlKPY7AjHW8Q5hOwLlXtwbEVjKhhJA3Jca0lEptf3IGCkDfoeF0h0GEUWvQgOQ9rIMhqtM5qtObEu96rlycql0gvre+hFKjml4BqvIziK79jvW4ouTA/zGjZ8Xj1dZ9jsUXN99HKuRpHM1xSbyUL8HnNCIlwvyQkHsWADWFzDGZIATHGj9X5VGPcwUg2NMq3CEw+xy7aIPMxdEw7qIq5Cb4SQrCEvczBkhmLLe8ORJGoKmCMEoVaXp+DXRcxTqKc7gwp55oL1pMXw6vSUztzYdRxu6ajzjlcns0/8xTQRFRG0jATwuBKWmhyWJeN1ppZo9SMl3PCeU1YEuNk8mhEOAyhROXHGErNnEWlhJYAUtYcXYQyWSnGXM5dkJlcy4nsP6csYqmGPsq0bdj1sn3Wbdc8mMzTZvrfeLzbhqtbxMsitjOxDGio6zwMCo0KRjFC/cXa2595GWbsvAxQH/1ha7BMvgmoBIGyyFCtWLdzKqob3fkTawG07mvlCkV5qMq8Rs9qkgekbu4K7TiNtzi8wWX45Jzw/mHE4eZD0PmVLPzLgxiZEBCubjCozh5FkSfinBvIUJpR1sf/8PSI5e6Ew90tjvOKw+c+lAjh+hbISYVSz6Dh2Gg6lqQwzFBrw74Axd7Z/I7GeEUiHAefU9mqfPu5NAgmM9f1NVWvOrNI2tgitGbgtCbkDJxSRs6VFGCLdiTgWhNNiaGedpb8VGZcjxGRgMMQcXBPrkQtBr3V9+04TM3dDFjvGfsowC6BOVHWBuMxSL2PQIDq0JkRlOiu3oejRQ4hIlIuJIlUok5uIEQx3hnHIWBMjOspKiyIQrhgfbDL8Tr9zwgt5CdqYFxga8z7z2KzTcsH2ZeS0vvIrRuydkR9jpcssOHi4OKiqO4MFoBKad/hGZtDA6AUIscxyHtRDLvBq30jS8svFuckyT2xBkKkEXGaClPVWMzlWnbojb9nqNwvaHNkHm4kJXWgNWCPyVS9brzThovSDEpV7qkRPrVhC3/zQzFulFZwgEhx9bDjnuZd97Cip0J0cKCN3mi1nxm+bSrPdnld4tmOF9AVM1cDuzOY23/tNXlPFHXh/dzTm93tvPfkevf9fqQsLQqICNN0DU4L8niFcJzBD3dCsR/vJH81qsDrvGJ+JTqHbLCagw5LYbIqa8RpwBAi6PoWPJ8U/tGGgnpd4nQNBPG6iYQVxgBysEiretv7eS1L3pNem3aeAHWAysNaZ7NG0DXCs15FmcVoWXRlRstHWn7kzIW80cwzc4lezMP2ixUgJAzLvRXvuhSFAkD97NLoF3KGi8KaD3RxsufCwbglCtGIJyu8Z5BnJXyI8G4IjGXlcu4eMgXES4+KJli9mDmAPTmkPkHdYprXGn01z7X7MQVwaJsjmqG/2F3dDLQ9/56JFwYEPW6gzfnV8oKWWWqXnQgYY8D1yC7izMV4jZFcFNrmBBuqPddzZKqak0Y6kiiNpVEooh5z+3tDb/wwqLlhK/ZOjIvcjJVoUPNe9P+m4502XKWRJFBvxD3DtfcdzuIZpdy+56HBvs+SCeReGl10VX6m//ZeVf8dM14Mdl6J22cIYI4Ak4j1Avt5Nzscao1XezPuL5qfZaxZIIwYAETR36PzK9BhFo29g3Y8Ps0I44igBcxpyZhfzWCLVBxVfnlYkRYtSh4HaV6p0VuejvLQQKNoQOZ+mIqXG5nAoap3p0DqzYtBAwj9Zbhk2JuI2Qwd1Ui5GDWuhaDeaC0pl1yOMeouzb8RNFYfbVCNKgCFzaIVycpiVYgXDNEcVGiKSRcnzXeZY7S3aPj3+o/L0ZgT2D8HXgCAAkyBpjTO1EjLBKeY7TkQ2PqswZGQNWQObIEOgQBQXfz3nh+7VtwdfUd22pzXTt0S4oRAYeNwxjAAYZV1AAm1BqpNMZAJ+ep7ZvDMfvRtQIC9NaPKNx2HKNDmwDikUO4jn+caYyjwoP3OVi6/3vj7LZEZZEZIYrwMjWkgxp3nxNifA3V236JS/14HpVr09W1wM95tw0U5SYW9Ya8dNl8EdcsPDOsOzQ39GDmj3WHYfd3QbhnoH6vM2DVWBg3aQ9ffMEFx9XIul2rWnMcjMjUE6xdUF8p9L3vnsN563C8ZSxZI7jgwnowjjlfvCUxIAXz3stzMeVmRF6HZLw8rTp884PxixunTc4m8qjhvwHQz4fxilnqwF3e4zRljSuDbZ4jvfQi6uhWoNM0I5ztpSkiabzPpLc13Zibw0F6Pvr7Ls/28w9E7AJb/MJ02i+xiBlKWRWDJRkMOGCOQsi06oYjSLplL/kJe52LYxhBKjuuguYxRady2UFVWYzWqfliSXCKH6hjlzRJezw2o93BvWKl8Txdk86ijyZd1zwUAY3pGIjBB5Inc8c1JWJXnNeF+yZs5QGbEWD17g5jsPAwGk9yjSkk5ynfT1qSLCMy4GMpS14ihCOja0bK+T7miHT7nVVh4kOisapi2c2d5Z8tjiTGrzozMbyWkiEh2TVnYnVl8bjcvnjC0F8W1awA3r3tnxhOPLIr3ijwIrOiDEmfISGE75BUfkeLbH++04eqHKRqURd63o/a5KxuXjFYYhHYOiHdVcOuAzYN5Afu1cdloSTEesxXsKTyon4mPidYAc96ejxyIwJ76m8F5jyt7mKwuyB5B+pMY90vShV+inOH4VI4jZ4T3PgTnhJASDs/EaIVRAB8T5E1zwsMnpzq3mQEMWKNAiqePX2phcsR1yphOd+D5hJgTwvVTYDpK08RhrBJZFISJqNTfWDcu14AzsORq4OIkM5jFGzWDD8ji4euMKryoi6jCPZGApIH8EKhTnKhRVnbX4z5QMWD94mH3RQ8L2TDDBGBzTBmMqEZVoi1Z9BKbqZXh1Tn2DNpeU8BmeAPm3wNgslimWmH510Wh1DUzTprLWxI39WxBF0sz2GNQw+0fYa7RqDXYMEg1apsemxyTOfP3QLkPzNBYRBmka3Z0+Z3NWlGeM4iV8Cw8zjCPUoyJ5Zdr4bHA3awlErUsoFDzlYBR7zu7BrJdkXzbidY6B8YbLU/M9w6ZGU8/vAyW1bVRrCoctm2L4mvd1+W8oV8Kvx205/8vDBex9MsR3bBcF3hVme4L6wC0ArfdzWhkB8qKCgCA70Rs23FGK7mb462O3Rkw+9uKJHeNllc2L+cvx0y+gaTm+waDOx1Zw27YPynBS0CIBDkDSeleUyQ8PTwB5YTw5BnCOsO0Bw8AKAhcuNydwFnanaQ5Iy0JnKQlStAVmRNjeVgRpxPOn7xEPEpec1IlDT6fEG6egq5X0DqVdu4obVymtpi6d1iCfm/MGIcjgCoQakbBDJfVDwE99FrrmwyarDCjGbcKPYq2JGEMkiRfUi55DBu2gPvaptKk0zwPyCLtE/FyPFsoZu+aE8liJmw8a23SDrvTjA2W4Yp/93JfDpL3BcfeaFlkOysRw8oCQhCKu81tMVpqsK2uqoV0tW0KuXtb52cIAzjmJj/cOLcNOzCU55+CtlxRA9bnzplURcNqnMjBhTYHbo2xSMi3sjc2ZDPXDiVBEIM2cixGq+QI9bqbELIVGF8afl2yOfKRnjlRdgx2r42RMIl+f3HEzEkgrjJnTK2x3M++Xz6mtx3vtOHy5AlRh7A2D7bQSzfgDY2cjBLv8G0bQcL/glfDGa+uUjyx3vtdROU908z17340N1pHCGi2542wp+NSkIfKsw3tAUta90TSsDDGQSICDbeYKtTw7Y4lZ9wvCdcjkBYufXbmxHgyPcX1M0IcrzHevo/1j78GOhxxOL4sqvJXX3iJJ198gVdfe4H1YcV6WvRUAigSpicTxquhKGrkecV6d0KI35LvTS+RXz0XvcVhBI0TaBhh2obWnBKDtkHpGkGakUNeEdYZhzBgGibAa6k1Ubl51XUbUR0NuRD6rzke1MJR1kstsczRKWWsSTTyPFW/94KXLM7Bkhg550LOEEaXLNwFTgu1SNfo3XtkE7Atwgq9cUs68SOrcWSuOdhivPyiDiOGtNDrmivjVYRua5QxhoD/L3v/FivZlp0Fg9+Yc65LROxL7ryfurl8kW9gIf+W+3eBGqFuZKsFTxip1Q9gJPrFKlsC84BAvIAEJVBLvAFvvLmRUBvRMj8PCIER4G6Q27J+jMsGu8pVdapOnpOZO/clIlasteac/TDGmHOuFbEz82SeW1bVkDJ37NgrIlasyxxjfOMb3zBVqZBOE4etTmtO957bHNzgU+QSUYSimwR/eW0Y0jWR1gbrOAv3LmfuCXlxmTmpgbMamf3yhVw16TxI/VWdDjmLykwbstVJ5++WhXczkUveuzgUN5W950NCy+GfN/eJMTlk4azAgZjUTv3k3fM1liBsFBk9lVAzpblcr2JvtONKNmf7aZalQwPL55AXEb3hIgLSXC+92Oa4rPy8aY7PITt0XvZw5OJ5jZLLE8z7O7tFjeEwOAZg7FMkXG5fZhsAAFejtRw7BcuRWo+IT50dPfc7vIxZ4kX3tBX4LygkFrEbI+rmBDAOoV7AGstEjeUVzGKFo7pCc+sIy3tnWD28Skr0fpBR9oV31X6vsdvx+w8j6n5kxY22RajbRKYhnaRsLahuZQBlBWoW7NRk0CJVxbwmt5tOOS6vAWCatReBT6JGz7PhMivWWpuxcDqA0Do4w7qDurgnNQXERPRQBxciYYiirmEABEKgiO2Qzz/3OEW4yOoPEEJE8HkBIUKK3CfXGXJ2VDLbUk1Pv+9sZSyvPq0TBgg0KN9JnZXSwNVpaVZZWUIF2nNYqfH3BiPK+6OwVqmoQYWYrIoP64iSEnaf12SIjCAEAdHVINTihGQtcC69LobZ+dZaV1H7TvsLqauGKOedA+Mgmo9RlG1KB2DTecvZVv7+lPQBgcOZjpT+eBf1/ESBaIXBOWdyKkQLBAyGkQGSDNFIwF5OZlbCiX5GCVtr03j5XSLiC9fP59m3h+NSKy/C55Atkrp3mdbLzzyBuFicbM60Djmtl+kAf17jZ14Y9r9L3lj2x4jElQHSKJTZeA6oNIs6POMYbjQOzgg11yCpgr+u/cRnz/AbXz+HK6JJhoEAIIAGYFUt4USeyxgHtMw2hDGg5RrV8Qb1yZKbkAf+55N+4YCx6ycU+SEEUMfZl6kdOy/JyNSs/E4Nz6eCMTw9uarSzCpq1KnV/FxV56ysbImYCwPTrMViTgRC4dCQgwkoBCVZXm1rVI6JI94qPTz3nyktnusTuqjlwEBD3pJpRzC5ATQiXWi5iJ+zKiq2K9mRubfs+dd2GZxphljWU8fiOwwhsyy7MeyTEQqGXILLb/gs/bv4Zpbn0tqwZJoko2GSw/J9dljPUZov1vnCOY2cdU021IBW770D9e/iPbNEGKeqJuZ7kNcVmjTNl06EHcF+QzHXyAizPZsesxj3YEm9jkrFlmESaRP/jZCg3BgzVGjAta3E2o1lLXca8FQRIkXD0GIm17y6vdGOa6LldwPRghdviXzmY9bVSomkokM8RU4l2wiSHc17rfTzXrTPxeP0WskI+TvlInfC4ckwR0S2VbYShQBEjhppzMQG8j0vkDKGgR0xAYGHUQJ8cc1lhF7HfuKzZwCA33nnEiFGbIeAypo0+2g7BrTWYXn0FprlGWi3hlmdwy2PEbsNwvoSTgYjxp0I94oq/HB1jd2za4zrLfrLDdfFJBM76Kj0nyhvmKpK2+nPatWiWrawR8fssJqW1T4WDDeadgUYyxN4y+GVRfa1t+gVDa9zBmoiAJBBdA0PerQ1yHCWZ4AsVSTXnHcOfVAVkADjAQJTo3XhH0JMMkwAYH1EI6dbGWAa5c8n35YBV6KpYzoHqnR8cyuhoXLxKrOsIeRovISllDmoaiBaxyop/sp54PfXT5RDilKrsJRoknlTOtdr3AG+l9+HaRnhhhr35GdhFKO0bMwcU1EX3z9IuR0AAEzB+NVvAjkXpa6f9nsBU4c9eWuOvicBgB6l7EAiymGO8xUqMTL1s5K35MzKRMpZuGxP6b01S1S1jywWrbAuo+3icM10EsPrrD5vtOOaWHkRac1HrnwmzRphjWWbTrqVrEoi4T24UEypxaqknRzPjPxxaP/KG33yWh0aqA7ped9xfoNMsqy8D3NRXwoeGDpEMjzLy9wcpb2OVUbxc+BqN6Z+HGMIy8rg/qrCqqrRLFrU7TGMqs33632l+TXXrhA8xnWHIA7IX/WcdW1H+N4jhpgcWam+YSylycuHnq8WDm7Zoj5ZsqjvaoFq1cJUnMFprYzqlskfdct9aVI/46DAIPWtBJ/qaiVUmU6VFPQpjIjjLsOHZRhdBEtULWDrJeqKNQI5owG2Y8D/fLrFde9RGcL3316yRqHsRusIjSW0ziQHpqw0YD/QChFJ1VsXJd1Sd23u6OYsSY221VnFWIgJh6mg7k3ZXFkLSc/lXeGMRX61JqucaPOtSjOR79lp6QDKsi5cBBw6NSF6SVtF9ixl28amwZupbmWKulVR7w6yZ5NgVH9KQDO934oWF93uIGORG6IPokPluUBmqqoj8UGgW4gTkuCEiBVLQgC85X0JgTCA79NwwFMGcUoEFR4HkJAACU5mr9MApDJ5mKVC4wYsFP2q9kY7Lp1GTJAFnfLPFCiq8yLkbdRmI99TQXUmcTK3gxdn+fmlzRzZJMmZQ5taLD7wPefPU+koSwgR2MsM0nsHAGMP2MACmwB2V+f8nHFp2GC7mIs/vbzV1oAoSiMka+nt5IIefMRmCPABqG1E6xya5T3UyxFxe8GZoky/NcMGIQRQ3yF6j7HrsXt2hfU7T7F+tEG/7jGsh0SnDz6KbFRADMpKZHIHOyr53eTHtjZwbYVqVbETayvYtoatmQjCEGSD+niJ+mQJu1yybmKzyDUzgSAVRiKrRBBxbjMnphoJ5Pt0/mh27hlWrBHqHtEPINfgqF5Ca6zW8BC+IbASxVFtkmMLkRmdOvYkyRXp4p0uoFzn1foqX5vaJDy1vbpWzMojeZHMUFFJtS4t0/vzc0rzT5JklB1W+blVAZnpIqjK8LWlDA0OuyIg5ECoHO1TOonofQo4eAhtPjYH14byb8itDWo+6vpgMnHlQC/mpLVlznwOASW1fl5v1YBb+8bSiBFj4CVDoshn0kuWRVGDkJiKXpUl+EgIgfLjCFhb9GyRfq8IIw5Ssy/SDH0C4WaoV8fSlNdjOYF5F6fQ+vuxN9pxpRN7yHmV26S7IH/d6Wwsl+EgqUPMs6MJJPg8iGH++wznnrxOL9i54vkse5s4rZucY+lkCwet5JRE8R93aE7vFC/OTqp/+g7fFK/huD535wiPLtbofeRIy7LzAvhGuOw8KhuSUKolHstx2t4GEaE2AO2uYa8fg9oNsL5E9AH91Rrb957h6f84x9OvPMPT3uNyDJjj9hr9p7EXRDyK3RAqAhbW8GNxXqYyycGxI3Pc/FxZ2NqiWlVoTxs0t47gVi2q5YKdmIxdMYtVro1JdkbGIroqObFoDBNEUKeoXh1V2F0j9l2aY4bg2Rm6CvbsPtAsEV2L0LAslzEOx8sz3GqrVBc6qvk7AZzZ6IKeMnltp5C6Z2KjyoKnzguARNP8y80QVXZaeThjQeMGMK0Ba6O3QaCIqsj5psrvh2tq6sxKlqQzxbwu8HSClGF5GaLpC03BQ2UEdVqHviNRdh5F9lOStYaQj0WZbfDctoIgovdsCHk6tUKZpfOak3sOOK5oeKRIcqrayiGkIiuN02OUfQAwEngkT1RCRYCJhGhlxBEMp2VOmdIxiTnPGaZegkKvMGRhpeRU6awax/efJTBsu+PzY3bXB4/9y9ib77jmC3kJl+k2B36mOhawxyJT5iDwAhzWHD58JfPQh5guYocAGkQ9PfhpLU33S2HN+ffR7yTRe0Qj8N+GI8wwck2LRA6qdF7DDqA+kUxu/Dqbc35w9uB53/qFdlQZmBoIkeG06z5M2GRal1hUhN5HXO4CfufxGo2zuLOocNy0eHDn87D1Am5xhHDxBLb6VmpS/vp2xNvbAedDeOG+PM+0hmIA1IawsJQc25EzaOW5I2fQHjdwC4t6VaM9a1GvKtRHNdrbJ2jOjlGtWrhlyxCjNey0hLkIV2UHV7egZgEEj+g9Yt8hXD1D7Nbw18yqBMBQ5vER98CtjmHP7oPaFVC3oGGDHzj7NB5vLb52scNujDAOaCzB+S2ScnIZ1Wvgouc/ePYG4sT0ukhj2sHXvo/TxXkCT8kvZVGeP0OaVyMvlsnJzWqqyVkWNbeSiMD7g0mNjiEnvZe2wDCCxh1o6Pi7Sj3rhXUrMoBzoDiFcgGkWXcRAFGf+r7SelK8j363yXEJQkCQRd7K6xNsqE6rYDmSHxCHHlGYjPkgccau7FcqiENK8GG0RWSmQkAURMUKa9GAG+rL9oVR+r+ILIIDWpdp+Br4ycfzeaBpKwIH8jQ5hzrBwBlgURm0kgWb3RoYuz02p7m+xKvam+24brLyQi3T6/I5U6T9BWU1FBeh2h40WH4GkGFJMYVtCJnpZAgc+YZM0adoEnS057w0EiwGRs5Ff6MB4Fp+buzz9wqMjZaZGsUI9/AHnn/Yxm7aZP2KtlpyxtZt1gCAo9oktlnvuZhc0vA/DeBbVzsMPqD3Ab0ndD5i2RwjDB3svU/j+HOPMHY9jr95jbPHG7y3e53SLlsofnYhogtx4gw1QzutDI42AxaWndlZY9GcNGhOaizvrtGcPEW1cDkLE5jRVg6mquAWdXJqpm0l+0KaGL17do1h3aG/WqO/3KR6XX2yRHvnFM2tIzQPnsHeeQhzegdka5juEqfNCR6sKgwhwgWGCE3ZaF3IHCUr0AIqrv0kV6QkITJZ8kgWYc1NFBY85MQS11JqaiW5Yp7BTR3XtNcnT0tQxiFSdoUwsKOSKdw0DkxOkoAw7LaTxV9rkmTttK5NM1JFQnBMvj/V2WijcZxLzaavC2CqYpJYhDpTLk4dUox2ymAUanscCiEBYMpmnGWAk1p8aSUpRIIIL/UoYArJ+siSUrGaw36HvufUWZUyU4pkOAn4aHfN56jf8M8wcmARI6L3oO13quMylDOum+CzYhIwgD1nVWLy5dsCSA4nOa0b6leHIEQCshJ1ggmKvjKAbzTjpn1kofism0gfs+8TbQWydYYeLR+TWEIlL2O+h/v+//Xlt3+RidN1cYQjg/bo+MZNzxYVNoOHIabc9j5iUS8R2xPYe59CffkEpz6ge3KJy29c4XwIeLR7wdTr17QhAsMYcCkZTCU352llcXs9YPUu4e43rlCvatjawNZWoEaunbmFg2tdIoFUq1acGw+LYeUQZkr2VxvsLnvsLnbwAq3WqxrLuwss75/i1jBiIZG3WZ7AbC/QALi9OMZ6KBbTkjk7G2VfWrk9w4MBiMVId8OLtrX1QYWV8t4pbU6ucKCJgyttep9lIkjprBLcFgPI71INlMYuIQ0kU7fj2DMrVeqiqXYlkC23QHCDehrAamblA2DqCAKrYkSFWoEJmmEAaczOX7qENcuAdw/yF7g2KXBooCETD8qap65b+efMAScWTbHmyLG1ZNIx5udYCoz/VlLwKe1/OibFLu81FxeOy1KGBx0B1F2Chi1MvwUNm0y8GgcEySrD9RVe1d5sx1WaOrDyRErB8tA8GW3u1FQfyBEIKN8s2puTalFzB1hClVqAFemlsscKwBRaLBwuwSAWPUAJq5e/60XN/VsuLTLc2e/S/tAIZlTq4qV1jJe0eH3xfo74C42GDrE9AXUSWa1udlx3lw6VqSY3TR+Auj0Gzr4H7kdPcPJ9fxTt574X9fG/x8n/7+tY/dYj/H8LfcMP24bIBJO1H/HN7vmF5dYQTiuDhTU4cgYnzmAhTL96VYMsIfoIP3isNwO2PuJyDLguBqTdrS3eOqpx8pln8P2I2z5gGTzqO28Bb38ZdnmMo3vfi1VzzJ5kF0D9ZrojZeBTkpEUEiyz+EPXSmSBXM0eDimzA3hutlSSLV5k2oNl1FkNW2jzMI1dorTT2AG7LeI4IEjmCmmn4DYKYadq9mIs9++1DNmaxYqh17Jv79Ax0FoZxNkTjyyBAyrjktTXpOeq+Dm5j4VVOm9KntSmY9hvkC7OT2qQV6db9hraYn0p4WFIEE0GzhpEK9OpJZGba1Hq91BTBmlZ11QrZdBsHEEDtyCkbEvOU+jWfF4GDi4QPML1+uUuigP2RjuumDIP6dEiOy1Az5xWWbuaz8BJSGLptGY0dQAJ+0gyN2VjaqneDkwvRABpWKVedEVkN2c9ZUxcxmkDfOFKljYhY5Svp8wwTBf3DVH33vEchxdv9H4t9ac9P/P77O0MHV6ut8xiIgDgjDIsz1h14q3vxVs/A5z9yFdw78d+Hw9/9cv4+nmH373aYf1Bii++pnUhotsVMmSvYD5GLCyhudyhv9rg6muPMKw7HF08gVmdcO1rcwXz8Pu5aK+B0pxi/xzjLCJMF+yZqn45iuVFKj1KnCjhvXIRzx98g7Ms77sw7sGBNO4Qhx6hW7OzEieVHJb3kx7A0HWIISD4wEzS5VLaG45hliepf4/VVdzefqVsyA9cU4qB22piAKoWduaEXnSsb6q9lcxhbnY+APPSDBqcZ17pcw7c7wXjmEgGa6YXQNapAtKc72cJTZb0/DDw63w+V5D+OfJMPokAyFhEI9kkhpQZv6q90Y4LkBNhAECmG+v9OoPWInAjZKFmMIUGaVaTmpoXR5LhyuYGUsPwzv+UDw4gW1zo5T7OIB51WhzxFXi9dSkLo/nr9YJTp3YoK3yOVT/xZ164zctY/+5X2WG6ZvK5/dN3AAD17YfPfX1NAZ5MdlyuZtkdY2FiQHV0C/bOW6jPzuDaGp/6yrv4wT94ht/6H0/xlfWA7jU00D5J9rT3uNc49OsB43bE9r1nGNYdds+ucPaDn+WG7V2Hql3B6MJrRZqICvKRKRfIA/WQ0g45LenLmgRKkKh8BvfZmdPimlTOIgBMkJESwUgLrrLtfD+pZcXtNYIyMGU6QByG7KhUomngMThhGDGsO4R+hMoyaR2yPl7CntySzGsJszqZkGlI2xuqGqB+EgCmmrbcy2TctFb1IrvReelzIuB7yIHoz5dFUuazsZBRo/mssr3RLyXRTR35TIyhDM4TdDurZSVLn+slO57V8t6HvfGOCyidl8knyuzfpOXFpars+gd1WImyqU6rZGXpCS2MgBcvBpOdzdHtROtOMzOFJg46SyBJOdGYxjDo+2pmF107hYgO1ec+RLObcwx3vw8gabat2kxM8S/O6sz2AiYGxGaF6Fr0cOh9QDRL4HiJE38Na2tUxuAz/4f/Cz63u8L4ztfwv/yP/x1XX3uE67cf4+n/eIL1u2usn2zx++sB7+1GbH1EH6aZ9ifZtj7iegwYOh6qOWxHABylklDszeoE49u/z4vv8RnMnU8l2EgbZyeBy+w6nTbh7yMUGgMQAY7y8M3DOpw0qXsoMWECfYUZxJqCK8rXvDYQjzvQsGFllW6DcHWOsL5iJub6kp3STB4shgDfD/x8P2LYdEn70vcBtub2B7ds0ZwdpfaG5tYRZ191O+3TExaowosBkADZ8DWt9zIypEZEyblZ47g+lg54gcAA6XjT7FxEe0Ow+bx1plj78kkxe87roMMSpmNiopb10bkDmzmutG4NPAEiKvpTkkpcBQqWA4tuzT/H7+Q+LrkpUy1HJYBuOMHJOR0woynz8xZ5/VtBfHgRKFMVbL7+va+l/Z4IAAsckorNxf5Pmh4VAopCrS96OlITchnRlvv8IZr/3f/EH9UeIRzdS/W8FL0bh3a5eqn3YkJNJYvdBnXVAtYkVYZLe4TjeyewzQp+eQY/9qDTT6H9oZ/CsrvCw/4a/tHX4c/fQ7h8gj/+zhP0l2vsnl1j8+4FnvyPp1i/u8Hjqx6/ddF9oiDG0u41FmeVQdU6+N6jOanTgjtsOpB9wnCLq2BbhsCiaxHqBeBaxKrZlw8D9q+HEgaS3zMrFgDRZKHghmVKjydvVb7/bGxIJBkXUgZSqc6LHLn3W74HdhuEyycI60uE9RX8+bvorzboLzfYPbtCf7mB73qMHTeiBx8RBv7pex6PM3asrqLjcgCALMG1Ds1Jg/qoRr2q0NxasQzYqkV9soJt6yQJZlYnoHaJeHwGc+RBzZIdWDFoUqXU5ro3ibGp3z/9ISTCVlJKKcoIe2oq5c+5lXX3+WuAwwVGWefi2E+Qm3kvHM1YjjR5CyFZaMY79BPnybqfrDaSYNy+Q1hfIWwuMV6cH/4+L2FvtuNSu+mEzU70TdJ8qb9Bfk8ZXBSNwGhypFhGr0UmBgD+q7+Znrff+xOHP6xMtYtRJTQIPqw9XgV9P41lUGenFkPKwFSD6qCq+UdhMsgxNCuExSm8Cr2+wn6UkOvu2XtAGFHZGier5XTDxfcBALrtFjEGbAOhXt2DjSPMyVuouivQsEF98Rjh4gnfMNfP8PCdx7j62iOc//67qP/TN/DVzYCnvcenFw4nLjfzng9MltBm54/SWkP4/LLC3dahXlXYXe6YsVj1IKMq+T0qaycjXJLSva1yxgVkNlyJHMzRCWDvXO0xawEghIxWhOcEevNIn8y0T3HuvERPkMYO6DuEzSXC1TP4q3PEzRW27z3D9skF+ssNNu9eojvvMHYDdpc9hu04UU7Rx6pA7yPQi64jwOzQ1bKCk+Pbnq0nTqw+WaJaLdDe6dDc6kDL43SsDMQZqT6oOBtlEYeY1xSe+GAQrRMlH6n5hBGpYK51bK2nKWHrQP1qLvZ8kAlZBr3l6YhTJifXy2U+WThQaxxHRCW8lJlUIXGWyBYFIUb3c6IqU+iQhksOJPuL7/QGZHm8V8Oa0TeBwjkVbzORvFFHIVkLRaaVgygXGssO91nqr9mO/+pvchbkmgzBuHZSRE8sKd/zzTqOXGgW9mJqPATfKJMaQ8kaIoNAJkk2RdewAgggFHzednj0B0lFofr0D7/OkYf/g/8KkEkO2q/uICzPMJoa2zGAENA6HmQ5RsC9IkDX3Lr3wm3axQKX6y22g8dO6gP14m5yYnS7gzv/GgAgNMdonnwdd3/s/4zvBfC/zN5r8//6fwAAdo+f4OrrjzCuO4xdj/5yi349oL/usX2yRX89oF/3ON95nA8eF0PAO9342jDkWWXwsHX4VOvwmT9yL7EPH//uE+wud1jd50V1lKwAxrKyvbHiRIrZUMQTsDOEZWDJSLfECNCBzEsfYr8VJJOMZlDTc2oxc9o2pHaTPlqv4cJpxc0lw4NX55xlnZ9j9+waV197hM27l9g83uD6W9e4fLzFxeBTgNEHhoLVDiEr2lhbG8JiM7ADk169ZeNQrSocPViiPVtwn969W1jcP0Nza41GyUvjwK63ZAkbA9galsxEMZ3k+xtb5+2DBYIDqM86ijwnhKF1YE9XtXRYe86KMls6okisZ6xpHRCpLE9rHFxlJLtSyJCdEwsK+0yikIb5XEuUemLfsfPqu+zgvM/SZ7rv44DQDyLddo3+ao2Lx6/OYn6zHRewf9IO9IrwqPJ9B6ab5lHq+QY3ZKBNlOSQsiOMPeBNen1yZlq4NTNmUgwA1AH2GRIZe9Cwlehyx3TRWdNkNAFRJIDI2skCMS98mhgQfYtoRe4mVKnWsRexfQDJQ2iOJ0xIGjrQ0KEyI48v8SNi4M+2xoF2G+AlocJXMaVpq6K5DxFMDCdYWqI/+SFukrSEs3p54/u4/+OfB4xDvb3A6e4KYbfN7CeJGP3FE/jzd7F97xmuvvYI2yeX2F32WD9aY/N4i+5qh0c7j69JJveiw31WGXx6UeF7VhXu/tAd6d06QXN2lLKLy29wz8vYjdg9u4apHHzXo153aO/wwmG6NRwAe3wbYegQYoBrjjCCWwuCXF9MnrAgcnvz3+YEjAmqcEhnT7eZm1z7hKLJPtG1BYrUlylxQ4r6se8Qt2vEbgN/fYXds2t0Ty7kOFxi/WiDrz/e4OtbPr5D3P/4l7WyN+9oO+LouseDZx1WdxZY3l0keDH6AFu71DwOV8FUda77hAawmDgtpo3HdMyJCFW1zH1pQ8dZjzoNPW407T89SNUv170iOFFHVTaIK7crCFNXWxUAwFojtHoVOhBiiCI+SqQYCvFrdVAiUxb7Dr7b5ZqjDxNCzLzmqEHg+bNZ68b7sDfbcRWpeVkcLU2jHnVaiREkVo5lSNFJ6msAgjbYycWjqTViANmQ3m/yqXNorIimEvli3IG668SIUgwYAHf4B2lpNIYvEv0c2SYCmOis9Z3AP7XsglzwGrmZ3PtBwwbD219GrFqYfgsUNTX3uR+78XD7r/4mQrXgBcjViDEwPBq51yeOS0Q3Pb6atdK4w/iN3wH117Df95M3fsar2tnREmcA3j6/TirlgAYkIY02/+ztEwA3O9D6/udxud6iqRYI/k6qQQIA+R5mHGDXT+GffAv27D1Ux0uOIC/XuCX1l7Eb8KnzDj/4aI3dZY/uaoeLIaCTjGDrQ9JPPKssbssiefKZE9z5kc+guXWM+oTfN/qA5qTHvT9yF2SIdRTbOqltDOstYghohhEOQFgew7qKmW5+kMzKJS3BGAEinrNkDf9eUtYnkOD88dxK2Lx87jmWam7KyJu/PEZeIFUSK4Q0h833HuN2RL/ucSkQ7us4LSD35m39iCPHmVdFgDnvQJY48+p6jN0Ovh/hVJLppnohclxYBtLa3hGjDro0cPUyB8TeTt+krDvOIMASWZp/fQOkhmh1Xqk3Gbm/zop0FilEGzz/jDH9DAIPcjN3mLYdDLnZ23c7jF2PYc2P/TDCb3t4Jc50DOWO3YhxO2J3ucPYjbi83r3yeXujHVeOZ6YRB6A3KD82MaaiaZl56UUVIiZabGXWZikW9F7HCUzgOz4CHK1ouj+nmZYWAhDlAvE9sJWRHbq/pcyLnV7EOTW3+ff5e2MABQ9TecQieouuRWiPM4RYL2HCCNNvEeoVgnGgsYPZnCNWS4xf/224z/6Rg8fbL8/S+6Z9k9ej70D6OaI035YNx8sVO7mhx/iHvwX3PX/s4Ge8rn367AhffXKF7RjgDLPgLnYjjmuHH3pw8lLv8bQbcdYs4A1LV4UYs6Br6GFdBVvV3Eu1WKFZX2bFhqFP9adx02FYdxi7XUEk6NFfDzCWRMS3QXvnBM2tYxx9+i6qz3w/U7ONRfjN/4xK6Ny3fvCzKYodux2e/Pe3EQaP5mQti/qIJQCzOgY1C5i6AfwxL0LiJJLWYASrMUDEiMsv7/uba1Zltl4KrN7krEqo0BZNvsikhUghkwNu+FydoRZDRAxRHE14badVWgBwKXCjBhX2skd/3cMLtT6WDqv4jgffL4kN58cerBloDUCi7G+Ng3UOZHOAdPC9X+C0lEQDcH/WVEuyeBvKqh4W3LBOXssV/BNh5PVoHMRJ+1y/GgeW1JLrfVhvM5x+td6D1jXQ2F3u4PuAYT1gsxvRh4gL/x3KKtQiKCdJ+eyk/l91TKS/8Ikts675ta/CuOm9iEdPB83c9OazmQmlBdXE1HFuMgNoogAtTXlhHABXpZ4UXazSlF4tuKcdy0VS/lgPMnMHZ5iSaiwX5m2N6CqkplTrYLYXiLaCv/WppMkGAKE9hdldPTfCjtWCB/NpcV/HoVRLxOY4R+BhBFDvvf69Wz+Iu5d/wKzDD9EqQ7izyJf2UW2wfR+CvKOPWA8Bp40VSIX4Bu/lBu+3AACqW9g7D2HvPGToqFnwPLeyyC4tAGF9mVXgVVlFJjCbo1tAy6QWkGGiTn+NxQ/8CJoHzxiqNBbjs6fYPrnE9dvvYftkg2E7YnveIfiYsq9TANWug91tWa6JDFwdsKyW6E2e1cRU7ax6kBruhRKdTKErhZzVxyTI+vBxTXUYgc79bKG1ht+bRgA0cBYvwtNUt6zA0LSwyyWqroc/WWFx1mJ3scPRdsSD3qMPEe/tXgzFvh/rQsTFEHDkAurNgNXlDv26R931CH2hbuMqlFMlyhqUAa8bPrKQrX5xDoojK1Fp4BDVmRgYUx8mw8zq2cBstpeuPWWfHJAdX0noGHNbwkSJxPdZ6LcIwtRBxbHnnjmBBkOnzooDslK6rL8e0J136J516C52uL7ucT4EbD3XIrfiSTc3KPO/jL3RjkszrpQ5YRrhAEjadwCPmjYA85pmhVu92CKYpTefKnrw84mmSu4obk69yQOAKBCgMgal34Ebki1Q8/h4jeb2uvhjSCrWcQRQOi1tlEwabBaxWiacPBpxosMGZuxgvvV7wK0HCMcPsppB5Gwx2hoURviv/Aaia/cyr0MN1sM3fw9AHrBX3/3MjccrxIjxHrcGdJv1S9Pj368RgFVl4KS+eT0E1PaGrOCA/cD9nJl1W65DUr/OCg46NkMX2apmdY9qyVR0WyeCjLL36KyYxFu0UkQiBFcjupYnI4+ixwfAPfgc4tl9hmPO34Ude9T9iGrZojlpQLIgGUsJktk+uYSpBNZerGBjQGg6VO0AWy8AV8uMJiRZJfisEJPqlkIyiuV9UhAB1ObQe/l8qhuHfWm1QITaOMSyfmwZ6qa6BY09zLhCaJeoj9lptLdPcNx7kCV81kcszre41zAp5mVqie/HehGFDgNDlF56wUp4vlSRSBqCgt6UGddNFiT9pQgJCPbXpUk/1bxPa15zLOTlyknq+nsqUxSOKxHCxoGbu5WAcQgWlLKGQoO+6zGut4Iq9Elvs7/u0T3rsHm8xWY34nHv8bT3yWkpgeZ1ztgb7bhSxhVvbirVC0gdWKAMHepFYoidmZXbMFBMopkJbiSaFLEndgAuSDe0gQh0grMzX9BKAV78FiuQTtQFsn6a3AwUtNmYhMZqEjSpQwrhuOE02hqxLuZpaQTvuaGz+4PfRvV5D2pWCbrRxVWzBeqvEY1lxQ9ih3aTQ6o+9YM3HPl9U0X4p1cbNNZgd3Weh/4B4nRbwNYs0umH5zrCm8wQwQVuYnXWwVCN+qbmvRdYBGQ4YVZwSJCOcwDkuEv/VGyOEesltp6n/ypkQ66FrWTfjM4KAybq/wCsH9Lv4eQ+1xx8D7NjlYh6HNDeOcXq/jVc28EPAba2IGvSsE0AWAwj99GMA8zRLUQ/AOOKj62tii9YMAPl9+kBoAxTCWNOSUyllTp3h0gCeyLWBhgjwRmul6Y6bvBAVcO0K4QQYBYrYBxQn4xo75zyYW8r2NqieVTj7mWPT13u8LT36KR+qAvk68KJPmZ6fRi0wbm4d2fsvvn3zN+XA2Eqfp8bK5PwumT5Js/IRjFPDcBkbM2E8VkKF8RZ07MiISpG3HcIKpkV/ITOPpHN0r95nxq9FfL2AoErLK4i0f26x/a8w1U3plrk095j66fr9OsIzL3Rjov7M+JellVeO3rylAqqzssWf08OiWR8OeWbrlQDcLrQlFRgYJqa6z9xNpQWpsgZDQBjrYwFzwsIGQtTZEyxajJDsaAMYxEmnxcLqj1GJmgoDZ+Ch9leYPef/98Yrq7h2hrN/+n/xvJJpRiv7kOM/J7NifSSGcRm+fwC/SsYT1AlwI9pZDx8D7u+Svvkl2cvLPTfZMvKIJoa18ElUsarTgknALFq2aECSNI/yvw6YAxds8K9/tsMAZvBI8SIxhksK4vK8BDNZQW0DnCh5/O/ugN/wtmt2a1Bu2uYs3ugpkU8vYOzOw9x/Ln7iW2Xot+ux/rtx9i++wyXlUN751tY3j9De+eUR6Kc3El1uejy9TWZoFA4KWgfmGFSj4/AEGIaUTP/1vPa8Tzb0PvJS6YRI9d8atdywjFaBJI+KeNgpP+HatYTPFkusXrrNsaux63LDfqrNfy2x6C1xC0rjIzbEWM3oL8esD3vElT1Tjfivd14ozOrCDhyRhq/LU5qnr/mWh5ZQ9bswfNlHU8dejm3TBN9W9TY1ZJae6Knl0r4/VRtRDOv0onNHddsXVKF/JRNDT1LZXWbpPeoNHVWHBmTg/bqqEWNJHrPbEFhB3olXax3GLYjhvWA7fkWw3pgxqAc8+sxBxLlKmIANET7tZqXtDfacSnt+dB3PxTdlCrVExXnmBsqrTGIEPXkSJPt5th/pJAW+wyt6MVoJth0NBaEmp1KDEC1yBR6idaiYOaxXqbaQKx4BAZCYKHP8vuXhW8yQLMCYuDBbUCuNxmD9jPfg+oHfxy+aqcSPIeyRSKAbIYkPmDHde+EIcJdf13MJlNnIFlgGBGNY6URAPW9z730+5+sOONcLICLayHFv6To7NxanQa9Osbw6A8QZwrrqU9P2JNRWJzOEBrLGTIRMARCGw26kZtidxQQrQFRgDUGhiKcq5Mz8ZHVtqOxIG3mVCp2t+bxJjLvK6554e7ON9ied4nCXR89xfLuu6iOl1g9/BZWb92GPTqCPb0Dc3QLtFjBtCtQs+D9lsblico4kDL/EOSem913qV78gmNcLtQAOzIivtfIOG47iRGxGtP7miPPckHtCmaxgh17VMOAdiakqww2JcaEfsTY7TBcbbB7do3teYf1ozWuH62xXQ94LNCVNifrINHWEG7XFotbLZqTGkdvHWFx5wjNrSPUx8tCkLcIOqV3johV9JWtWdr80JjJ33Q0CDJ8P4f0yt9DhnVTS0wJYerPstdK2jrC1bMkUDysO4ybLjmssdsxAUiO4YTROWiDt6iSBP45rDNjsL8eMHYj1gM7Kh5Iqe0GJgU7eqzJROAVBeLfaMd1EwxROq3ygjHIF0nptOYNlLlRs7i85ou3MUAxCDJ1uqeIPO4VR3k3bXq/udJyWgwFsoNhVmD6/EMjVQrH4wWfJDcmOCtaHgHvHnwO4+3PF+SKmGEqAFmiKUNDmDnfD9peprn4Zax/96tJ4ii4FotFhkoVIvwgdHerB9+XHu+evYfoanhT5/4/mXZroEIm7JCMj/BOZxaZRNWP4B4bHySTicQqC/ohsQiMHPcQZfUCpoqHYRAq8g67yx7X37pmFQmZvrs443pY9+QK3ZMLNGfHWNx7hub+fZjVMeLRLdY4lCxMs/hJph+nwxMNZc37uerM3DLUPnseAr+nJ7ROJPeQk89uA2xVI9RbxKbNdR4lKRVZRWqKVXhr6BE2VxiurjFcbbB59xybd55ie97h/uMNuvMOw3ZEEOKOqXimWnNSy6DQBqsHx1jev8UtCrdYTV5FeGNxf1AYYZ1L3+5QOH1T8DRvRSinJc+l4EoCRZiJ1KayQWFMqpDeuG6D0K3hr2Vw6eUGw2YrTr7HuO6SpqPv/Z58FmdlrLSvI3nGrWZiIbEF+xAxxJiIP0cCd+jvOpDVfOc6rtxcN3FQRXd46aj4b7K91HUSG6dIs2+cljyb65U64cvFPUjmYA3Xr3xJcy+aCWNArLhYHm0NjylGXpnCqer26tiK72+QL3gr2+7cEjV4hMq4vAP3J74Ho0Zq/TrBDXMrZ/2krGzs9yLwT5q9Y+/CEXB39wxm7DnVEjP9hr9rc/Scd3j/FpojhgElg9KaJquFEGoD1BhRWyNwoEmwNgCsh1CMT+fr0iKAumuYGPlcFpBRdC1H9LJgD+stuieXWL/zFNvH19g83mL97hqP392kuV59iFg83mBlDe79zmPc+txTLO8ucfo9Zzj+7CUW92+hPjuD7TvE1QloeQxangCh5hEejrNHFYmtXQtLhDHwd9W+MCB/B3VIJbqhC/Zh54VUf9GsG1aQCTsyey8EUOvz35Gzu3Rf70mhZYfSChPu9OIJ/MUThM0ldo+fontyiUHIBb7rE+3etTXcqp0I8JrVCczxLdjTOyz/1Mg1FoKo2HPd2VoHa+uE2pT36vMsITp633kmAdGwy4MY+w5+fZnJE8VYEHJ1FgZWjUDthdtcIWzXiN0aw8Ulds+uMa632D275n7A7YhhzW0afvDcL9dlBzZ2IwYhVTBpRYgrN30XsGSZJUqtBe3CwVYW9VHFmptthc4B+N9+9/kH5gb7ZK9IL7B5Oq6TOC1hOpAuvaDow0gFyxllvSxqJqaQ5V6YGJLjCBHpJlqtFrjZ9rOK/uk7zGCsWq4dwEykagyA3gPgIQTynEUtA+BKG4KqRhAWhmtrHjFpo1kgjV6IAMi1vCiUbCUgO+bydw3ejMPu/NGNY1s+bltUBB+Ap+6M61vbbV4EhNlHYUR3fYH26PQD+UwC1yQoRFhZkdMoD4HuA7kEq5UtFj4CrSOESPAhYlEZVIbgwb2C8BJhD126LvVckathT++gWr3NgrDHSwzrLk1bXlhCHwheMk0fmeL9tPdw31pjlAGYZAzCMCD6gLbmTIa8hzUW1PB9EHlDCYwi4FkVxZJBI1JkpYI8MEc8clCl994eQ+5ARp+hapsa6CeTEIAJLJlkruL+e1IMQMOBmjm9DztsgH6H+voZVlfPeEEXp5DgNlellpQ0r8tVDK0uT5iUUyjkUBhZFUevOVknrNQFy1u2JLWUQfWkrqXj7v3A6vjrS8Ruzfu6yWoucdchSuRO1vC+lq002kDcbeA3G4aTn1yk5nWFlseOa1Sl4+oKCS0mvWTHVa5VtVzvDP9xJrWwlOqD9VGF9laL5rRBvapQnyzQ3DqCa2tsQMD/9u/wKvZGOy6NVoH9cd8kz2F+QQPTmyfe8K+oWzF/N0yA6dcRFD80j2oFZtsBJQxTEE6kqE3YvxESe8vqkDjKsIPUSwyJ86oaIFR5YboBBizFfDUT69ZX8LIYl0zOs6ObJZQ+Cls6g52P2ErNIur3F9g2fRMy6NZXGMnhaPm8YOOwddcXeYggsYhwZQhGPkCZgqVKSyyyLABCTmCGWSUbVnLzhwjul5KggvwwvSYBkLUwR7dgjm6hvT0IbCO1Us8NuvXFDqebIWVdAC8swQdpAi2aozcdgtTM4CqWjqpqgEYgOO7VszK6PhjADzJckVXFjcn1vmSz++sgM+4mCFqDRYGsozFTiFy3KT4rAlM6fwGBa7Cpj6leAO0IWp7C3X4A9LvsuICJeg3rQFYJGtS2h4msW/mdg0xK1nEnkRVldK2YMzGBnHXCzzItmfiM3Zad1pr1G/3VOcImM/n0/JPlcS1c93QgaxKxQjPKQbIsbQ7uzrdJoHhYD9hJb1wJ9/UhTmjsmnHx9U6pXpVhQMKRM1ictWgFpj56sEJ7+wRu1aI9O0Zz6wi2bWBfvf/4zXZcOvNHo98JMwdI0c9elKf2vNpN4ezUWVCYSrAcuhBfx24fZwfw6ILB39bmrMuHiNqyA+t1RINCTSAMgceJN2OX8vhoHKI4GwIA20Jawib1iUTNluMVyvqZPPaRpZQCMGFdXq63iRDxcdhquYC9vsCyrbM+GwxWr+Ccnmc7VOj6gFVlcCRw5Ot8wnuXa9RC4AAYKgSZSWbrv/Ibk2AqugaxPYX7zPfDHN+Cu3uF1UPWTbz+xns4eXaNsRvSYtSve2HZjSBLsBWTPYbtCLfu0F+u0Vxfg1zNTmixQnAVTIN8/oMHqIC8NQtTWNnWmZmosPIsoyr7jOiQ05qxGQnCmLU6nt7sqUUQpEQMZOeFAjnROrKgCVHrUuJUSmEA3X4e1EZIRof82ZNSgu6Lflb52bMscW5lS0SaPzZ2qQE9bq55pMv1M3ZcG65Tam1K1VjUQeVDaSaOy/deankeu8tdIlH06wHrzSAyZJxZKQzoY0y9bFufyRYlI3OIqrbPa+7KGpzUFouzFiefOcHqAcOsq4d3MjHo+IyJQU2Luv+IGpC/9KUv4Vd+5Vfw5S9/GYvFAn/8j/9x/P2///fxQz/0Q2mbruvw1/7aX8M/+2f/DLvdDj/zMz+Df/SP/hEePMg349e+9jX8/M//PP7dv/t3ODo6ws/93M/hS1/6Epx7f35Ue2LKWlZSARB46KD80iHTC6zMtvR5MqKWHAATRISXe78+aOelphkkP849Z0EcULngJUjG96BdD+rX6WaFrWCbowQplmyw0nGp/AwVN1M6HulzDAcG8jtpz8lcqubjMFGhL/v6Puj25ushCJnC44NIMkPMlHnN3gIs2nIjXeRNMRgyBtDtT8Gd3kXcXsPeeQj38BlWn3uCGHyKyJUu319tsH18jX49ZaXqhODds+vUzEyuBg094oLZc1rwn9MKyNo8QkWmHkyGV5b7D+QJyKXDmLwh0/JJGbIzjU2l4petL2WrSm0laJGsh0oHooQp03BrA+UhmXr/lnW4MpAjEdZOE5l1TZkcyCDCj4WjCgEg/nxLeVDk5J4NIyuECCRIvYhud2v4iyecaV09g794kuaQbd89l0bfDt15l3T/hu2Y2H460iXv3tQZqUPqwn4WBeRyi473qY3FkcujYfJ2YHFiZ3DiTNLcXN5d4uRzt7F6eAfN2RHa+/e4HWN1wioxi2O+ZtZbvKq9L0/xa7/2a/jiF7+In/zJn8Q4jvibf/Nv4qd/+qfx3//7f8dqxcvEX/2rfxX/6l/9K/zzf/7PcXp6il/4hV/An/tzfw7/6T/xoEHvPf7Mn/kzePjwIf7zf/7P+Na3voW/+Bf/Iqqqwt/7e3/vfe28PUDAmGDq+rj8CTw3CppAhMW2E6pvzDOFiChRroEMU75utH/35OWX3W6zzlFb8IUckwGwlCjSwIeI+VipAD6OAQCJI4tApvPPokpHecggBAKiGLA7f7SnGP5+KOyvax+WCkdpnz47wu+/d/WBKTSU/VA6xWBu9vt+EuMf/lbKGCAEjegaJves7sAumOIe7jxMit1Vt0b95B24tk5NycA1fFKC8Bi7HlbEUU3l0FjDtZyxB/Vdnqc0N2MZSpNaUOoL8w6w4/5EAq1vCfSZeo7UpL2Aokl1NXaKjHBo0FU2dAMAkQjXSpDnFMaVZub0GZIRRsckqNHzQNIxZGZydihMZ+fF26J2MuBx7ItMUZ1iQeRSZEYccyTDExLAEL2dHw8hPikkSP2Ws6xuw5T1yycIW3Zc3ZNLdlbnV+ieXmJ73mF3scPmCStVdNsRF4OfOKLSwUyuuag/szNTqwsJsKTXWDA/E/IjjEEDpHEw2jpw9GCF5tYKR5++h+X9W3C3bsOe3YM9uw+zOkGoVzzZ3NYIvsKrGsXXSBnee+893L9/H7/2a7+GP/kn/yQuLi5w7949/PIv/zL+/J//8wCAL3/5y/iRH/kR/Pqv/zp+6qd+Cv/6X/9r/Nk/+2fxzW9+M2Vh/+Sf/BP89b/+1/Hee++hrvc17uZ2eXmJ09NTfP2b7+DW6clEOLektydsHbgRmtC/zRv6JtvpRFLR+yujwLlyx8dR7+muL9JNoNCH2a0Z6qgX8McPMJLDdsaA05tUM649Qkt5HMpjWtqhUReyzUfpuN50+/33rtKC+dnbUwbk8OgPMlQojM9oHGK9wtqucNQ/S1mv6a5S1D5+66sYn3wLu8dPeZbVO0/RXewwrIc0Bbg9bbC4dwv1yQr1sTDo6lzXmdOr+UNM0lmkuk0Tgk2zEKmrOjmfiZKDTtUtZjalWpZjolKsGoR6xWxb16IPSJOv9x1XJmTVlhKbU/ue0vUros+eHHbitDTTnavuaLlBSTatM1g4YvWSfssMP6lFlahMCZXqVIbUHlMM6kyEsDAmJZYEDV5fJGjQXzyBv76Wqd3nMvWZ+9A40+pxedGlfrSnPc+Ee5WgyoAd1ZEzWFgmWZxVBgvLv1ctMwLVSIg/tjaoVzWqVcW1rLdO0d45weLOKRYP78Ke3eN67Okd4Og2YrVArJf809W4vN7gwcOHuLi4wMnJyfva59eqcV1cXAAAbt++DQD4jd/4DQzDgD/9p/902uaHf/iH8bnPfS45rl//9V/Hj/3Yj02gw5/5mZ/Bz//8z+O3f/u38eM//uN7n7Pb7bDbZQn8y8tLAIfptZNmYABk3X5qD9xcGJ6bOq2y50rwdnVaefbOx2OJfaZY/dCl/UQIMN0VKhnnHmJIcEuZRHINjKEYzRq5DwlZZUObIMXmPV/RgMdUaFT9XXtpYxIHYTxwHcVqOT3WfkhK/0MfEJZnoN01qF9jvP05mO0FjLuAewtQWTAvTblkrhFDTHWvbYgI/il2z65h2xq2cjD1/rLAqhFKAKjgFjVsWzN1/OQ0O6/jM1C75ExMphyohFBUCSF9z7pNjdVR6PcckR8x09ZH7MYoPW6RYdoZpFWqovDoFoI1bfK3QbIK7yN6z+QDHxii9TEmUW295omASu7typBsI/C4woaaJZX3gjiuaByToAT2TMFGUYpI91JBwFBnFdaX8M+eYvPeM/SXa3Fcl9hd7tCds/7f+nKHi8Hj0c5/INO5A3KT8GllE+ynPYDLuwtUqwamcjyTTGpopnZwbYNq1cItW9S3b8EsT2BWx9wbeHRrMhkdtmZZNMu9j7vXYLi9suMKIeCv/JW/gj/xJ/4E/ugf/aMAgHfeeQd1XePWrVuTbR88eIB33nknbVM6Lf27/u2QfelLX8Lf/tt/+4X7lFL+kjYLANZMqO8HX5v6QqbNl9NtzJ5zCzjcCP1RWlnMH97+8gSyoxiAgaFM1xi0tppQmA/ppqklnL+gZZfHhW/GQn2jzFhjyFqHZFDf//wH+ZW/7awU9p1bUk/R3y3XlcYIPDgViHSxQGq9WB1jeNSD2hXs2X0AwLJnhQR1StvzLYJQn3eXPXaX02ZWAImtRoZgLAlzLc8Eq5Yt7KLG4s41quNzVMdHMOsrHq2ifUXWHlR3KDM5Hc5YNtZzLVHJAhGjjxnhUN6oqN0TP0TwgI08ighAFvoVSFYzNy9ZHGdwciMEDtiQ7gtMRoCQ12Zg0avcbeHL4a+qG5qo9Kqm7/bFbiXA5IbgDeJui3D5FP7qHP76Gtsnl6mW1Z1vsHm8we5yl7KsiyHgcgy4GF7faQHAichcfap1uLWo0J41OPnMCZZ3mbq+vH8mTMAa1WqRviOMgWnkd828a87C0SxFO7XiuqJruC9QnNZh2bCXt1d2XF/84hfx3/7bf8N//I//8dU//SXtb/yNv4Ff+qVfSr9fXl7is5/97GQbJRpMek8Klk+UXyEZwU11rqgUerITSvhkG/0nVOeI3Aj9cVv16R9Oj/1XfmPyPQ0ZNC33jfmYiSW661ovBGZOa9y9mIBxIKJMz7sa/Xtf+y50+IoWtXajVhBRbjJV+fBf+Q1YYxH7Dqt+gK1dYpv1132iRZfNpsHnAr+xBmQpOSwdZlmtKlQLh2pVY3d+jfpkifp4hfbOJStMyAJG1ayOYWxe4HW8uwwuTKw9yL2FrCxSkjN0qwh2LLoCGlAa2KjHRjO1GGOqlUVlx8aY6jVqVDgsncEGcVgYe9DIChTa1KuqHSQjiZRCr03A8/FEQYVsk5rFOtWyUn/Vkwt0Ty6xu+ywPe+wfbLlUSEykPRynI4HeR27W1vcri0eNBb37i4TueL407ewvH8L7Z1TLN56AHN6h+XBVsc5Uy762ZRQwxMpTCbpKJvTsaRY6bReh9j2So7rF37hF/Crv/qr+A//4T/gM5/J6t0PHz5E3/d49uzZJOt69OgRHj58mLb5L//lv0ze79GjR+lvh6xpGjRNc/BvQKap6nkscwijrEAAPA22aDxOG5kDUVGAjtXmmyok6ZsyivMxprrRJ83IDwxZlOMOiOBczROd54X3MIJ0WF4otNF8n7PVQ7XB4vdDj0nmLH3XXtHKWo08VdZ1n2f+6B6MrWFHntNmj56ksSfAM4zdiN0lq3mvN6zhpz07WsAHlLqdlRBOnMGRM2gXDss7vNg1JzWW90/QnB0l1Ylq1cK2zaSpNxoLqiqE4EFDD2MsM3XTFASDpmoBEAbDNajUCwW+90KMMCHXaG1gB1QqdgAoWjfU1cn3MQQLQiXH0hl+r0o0JltnUFtC5TtQdyVix5cIzx5zHerqGcLmciJSC2QqOjuuYnpD2iF2dHHoU39VOYRxWHfYXTJbcFj36dwM3YjrMU/R9q+x6LcCC356UeHTRzXaswbHbx3j5DMnaG4doTnjoabu1m3Y0zuw9z4NOrqVYNzgmlzLex7RTY3yPDadLhDi662Z78txxRjxi7/4i/gX/+Jf4N//+3+P7/3e7538/Sd+4idQVRX+7b/9t/jZn/1ZAMDv/u7v4mtf+xq+8IUvAAC+8IUv4O/+3b+Ld999F/fvM4zxb/7Nv8HJyQl+9Ed/9H3tfOqDQI6wNBsqfwfk4taG1BgwmRk+gwD178nZ6Tb6M0Jupph6S/Tzvv70GoaYgfZxm/mBn4L/3f/Eka3vgcGADA/sgxaTgT2oL+kYltCgMUhLwiG1kZIpNr+YpSF0/Ppvp1rA+xmH8p1uk0nShV1cb3C53qKO/Y2KINE1iM0K9vgMccfqEG3fpejeVDsEH9LCeFEoenc3pHTKJjutLI66EQ82A47OOyzOWvTrAcurDapVy4vgrWO4thaYqc0LuqtBwwDqO8RxgOk7rpGNHTD2iM0R2oYp+TsfUJkIg4Ah8L3M9S++3nyMqKQJtrImETYU7kstHsiz+eZqH85whlVbwsKJQPLYgbYXMN0VTM8Udf/kWwjrK4S1SCeJMr/vc6uB1gHJmtQYrBZ8QOjHpDGpOoG+69GvexGuzf13w5bFbUuB2iNnUiBxE4uwDDSUcLGwHHDcWrDs0ur+CqsHK7RnS7R3TrC8d4b6ZMn9Vmf3YZbHDP8d3UJojmVKghAsMJusjJlKUXkNIq+RH1T70PtyXF/84hfxy7/8y/iX//Jf4vj4ONWkTk9PsVgscHp6ir/8l/8yfumXfgm3b9/GyckJfvEXfxFf+MIX8FM/9VMAgJ/+6Z/Gj/7oj+Iv/IW/gH/wD/4B3nnnHfytv/W38MUvfvG5WdUhS/0a2ldVXJR6gHzM8BeAKS01vZFJZA5AajdhzIMAZZt44LUfIyfjpYyn7dZ895YTbot6wtxx7zmtve+dKe/JYYURGEVOSNlihSl0S1ToNX7XPhCjsUf/9J2Diiza/xWqBU/ZDh5hu0Z9vER/uYatr9OmQ4Q0o97stAA+++dDwPkQcOJYruxsDLi9yZJBzcmQlMY16wrDmAr8pnKgbs2ZmOjoaZ3EnnmEcYfoezTLM1jXwhIHidzPQei9oB0hYvABdiQYQ6hMwLKyIldGk5YZPlj8o+zfUsdVW/7XmAjarUHDllma20v462cIonWobL/tkwv4bZ8cz+SwlyQGIbYASONCWI29nwjaqpMau1G0AvlYAkgUdR95/ErZWzVXsjBAclaVpSS95FqH9qzF4qxFtWrQ3jlBe+cU9fES9S3usTLSv2eOb0mtasHkIKlPReuS0/IhpuNXHNq9dWN+KalgwksABjfa+1pB/vE//scAgD/1p/7U5Pl/+k//Kf7SX/pLAIB/+A//IYwx+Nmf/dlJA7KatRa/+qu/ip//+Z/HF77wBaxWK/zcz/0c/s7f+Tvvf++LoX7aK1FGASmnirEYp83wIZUZFjDJnDhSc5JkFIeIpiPIlYXEEksfL0HjJivnB5Hv+WJS2Z6CLaljLSaOTOuD8/oVvzEvilEVB2JijKlqdfnZOmIByMX479rrmarERFcDQ3dwG5LBlLE9hgkj073HAe3VM3hZPJeP1vB9wMmTLa6dSUoJL1O2vRwDLq973K0tHvceD4aAu5c9w4Z3llite9SrnHHpgs61M1bxYHaabHN8BHv6Di+iZ/dgT7ag9hiuXsE1S2wtibSXjogJ2InUV2VZ87GyBGssDEWZ8iALZdmKGfPvliBOy6CmALO94Blo/Rrx4j2M5+8iXD3D+PQ9rN95iv5qje7JZWrqVoZm2fRra8uEFqM1QlnERVl9rrQeQ0yPebuYHpuKHVAFbqon8RTGGtjapNaGelWn+uPibJGOeXPrmJl/qxbV6QnMklXuzfKYh9jWLbcyyFQK1S2NukaoGkoM3HYTA8g4nk8ISI/egWBXmsmtvFdqwhbY1rwM1n2DvVYf18dl2sf17je+ipOTk4msi48FHi49Guqi5iNN1OZwn9o89dWsTnF2/ZwQtdg4pcV/6hMAF6r5P/iv/MBkbDrVnYqL7EVZWKp3ARNYkXT8+iHW5sgjz9WxkauBqkasjxAWp9JM2wpphM+bMwQbJUOUWiPCCOo3MN0F4vYacXMFf/4u2p/+v7/0cRj/8LcQm9WkoAwyh7OVN8C69VU6RjeJIPfvfhWm36bzF6+ewr/3Nvz5u9h+6xGe/s4f4urtZ1g/WuPyG1d4d81DAN/eDli/DwLAyhLuNQ53a4vTyuDktEV71qBaOJjKolrkILBkK/Ji28C2NeqTJVYP7/Dwy7P7cG99HnR8htCeIqzuYKyW2IwBl7uA9RBwtRtx3Y88v4yAyhgcNRats3BGmmoN5Si/uKFLubhWsi2j8OD2AtRdw7/3NsYn34J/9hTrd55i884Tpqg/3uD60ToNThy6cU981hgS2JDvo9J5AUCU7aOPqTeK5DX6e0mKIWV0Vha2tnALh3pVwbUV3KrlmuJykWBas1zCtCvOnmSeGR2dSuZUp9lr8dD9PhduKNbYyboxLy+kg5sb5rX3NYk5gNfRy8tLfOqtj6GP6xNlE/ZcQfeebaYZVemk5rdmZttRej99bfo4IlioYyQAkaeci+L3J87Ki0waWCnqhSSd/DYX/+eKIVMljeycIhH3rRQX/qRJOQae0RIDTIxMjXbKRKIcpYUR1jiuRRCA6CfZXUSY9MIgBFaJ2HVY//O/n2SO1BIcVdVY/V//Rj4MkkHOWxt254/SZ31Qc8I+Crup/lWatiIM3/w9RMMivRac+S5chZOu5+nYJ89ga4Pq0Qan5x0sAW9vx5emXK99hO09LLGOXX++xcm6TwtvWesBkNiKrnWojypUqxqLsxahZymqVT+A6jbNOItVC2sdFq6WBmLCsrIIQpACMInilQgAgbQixayQIZMXtEZOKscU+P6gGKT3jNl/Oqp+WHfoLnaJnr673KGTmuAQs6oEkBt7td6kPw+ZJSRHBwBV7ZK2pCr/29qmY2Urx47qeIlqtRBW55Kdkzgrhf3o+IwllmyFUC8T+y85rZlNmqS1gVzXg3Jatn7LENLoG7W9oaJ6n0vmZcmkY/8q9kY7rqj1kmIRUqekWoKT7WOeuKrbHrJDA9/mWRqredOEKcP+jmVgQoz45vk1i/9Snvr7cVk0LmdFZPJwQDIsaDp3Xrqd0lkNO68IyUILunskAKYCwDVKfv2MwIFZgFA6uuBZogp9it4ShbZiJRUairETQMrgNIszdYW6bbOat6tBFf/c/Yf/JxebFysuLquyga2mzMoQ0Jze+cCO+SfNYsUQra+XoMUpzK2HaB9+Dvb4DEeisHH0aVbY2Dze4PY3r/H5t6/wtOdm13e68bm1L4Chwz5ELGxmIKrNJYZK8sCJE7LHWYuz8w7Hb/GsrNt1hTj2sOMA41om9jQrLKtGmoQBZyr0nuFCBQMGH2CIcp8Xce+X7o4lGUcj6v4xpit734KI1Qq0qvWosRuT01rP1NPVQWltSh1XJT/1++vj2hBqQCa5sIPijKrm+VULlzKparVIKif2hB2UObo1rUuJo4quRqiWiBUrmowi/aZsS+jxQV73jAFPlVHEKZaBqtlDqIzF4Wkck4tvlsEBXOp5RXujHZcOnAMOFABxOFMqkdF5RnUoApicvFkDs9VxCcbASKalskl6cl+nAPmBWsqGIhA9IoUpUYJsbgsQSACA4Nrj5H0mYyyU2FLCB0Ci0scEJRymymu2pfg3wFFdrHhumKraW+u4XmMY3qN2BTMKk6tQZOAekwrULCZODM0SwdaINcsSTaR4NOq0+9Hnt5PFepW+s+pZhvYY7vsa2AefRXXxBO2n3sb49HGSGtq++wy7S1ZsuPrWNbbPOjztfZIa2noee1EuVV2IL3Rwh+ysMvh0N+L7ZSR8DAHt2TGW0uDqbt0F9Xzu2kWL0REAgyDZ+OAjBgT5GdP0ArXKEipjEvW9SWiCNB8XUHckYT7WXAtqgucxMGuuJWoNylYW7bpHLW0EOgqkdNJeAtoy2yq1AC0BrlJnZdP05XpVoTlp0d45SZJc7R0eaMkklvtJnSLUq5xN2WpPmm4IEX6MGEOYCCbk0gmjRnuJUlEO4RKJT2Ld6vzU8WWpLK4zqgC6Hu+5pN6efNz7sDfacZWkCjWaPY6YQoPloTKSgZVWZlWTaEOJCWG66KqcizUOZHIGFmPuISnHlXxsZhzg+/0UXp2O9mXMMtgyxdfty8eJ4DGHHfzIDZiScc0H/ZUK/km9myxUD1KdFoA8JRfImZzl5k6zWLHunToumStl2hXPTzKW8Xypo0E08TTT0v04BJl8u1m0+dzGGEAOQIwI7TGIDFyzAIyFWZ3A3b7E4t4tdA8vU1PsyXvP0J13uP14i4eP1ri+7nExBDzuPS4Gn+CyV7XzIaA2I+5e9ajPOywuGJqLO6bMJ9iqMCOLptaXB6HJc6tKLLYjVMGgtQCcSUiITpTghvswkXGCc6CG2ZgxeCzu7BB6YUZWl7zJwqFeV7DnHY66EVvPrEydEFwy/pTtpw6rqS1MxUQVt2DHVS2cyCwt4ZYtmrOjCU3d3XmLM6vlMXB0G6FZcXbVrBBcO2nwDSP3n/qQ6/DagD1HnYiilDzYAnKztzopldwafEzHuvx+lRBcSoYmtxiw8+LpEibfc6/BLn6jHVfZ1KYnYJ7lqPM6ZAHsvEIBIehrJqmtLrSzYXgxENddyACWxxfwwgDAGLSLj29Gldru/BHL1Bh2CntTj9Xp2DoLg5YsQpXAOuSstPB6oLibZhP5UcgbxU7pPsjAvXSOXIPoOIseZBNLyEocYZwtLDLgD8Bkeq2xQLNAKIRbS+hk8v30fQHAOOyuztEcn30Qh/4TY7tn77HTngckIfC5FAjOuwb2QQV7dh+x7xDWl6g+s2ZpovUl+vPzNCrl6mvvYfN4i+35Fve+dolHO4/3diO+vn09jcpHO4+Hrcfx+U5Gduwwdj2cZtdy/an8k6pqjCGiGwM2g8dm8EmjMARmr1kiNI7Hx5gANLBZ0skQM+WE5MIjSfhYUbuECZ6DoWYB2zbYPZP+tNUTDFcbdBc7tLc69OseR92I/npAGALGwc/GgAjjUQgW9VEFU5midlULZZ0zq/p4xb1w9++z/t/RLdg7byG2R+yo2lPE5gh9AHY+oNt6HlSt/aWFMk7Zv6atvznjUuQp72t+Dz62GgzsxoDOM5tzCCzFpeYsobUGjTM4bR2WFU9tb60REWTOyirz+m0xb7TjAg5DhGoTkobUvMxMDbp0XgkqPOS00jTXmJ8nA1Bk2K2YuprsE+C4YiUZhq0Rq5ZrRQBDRvq8QHOHptjGMq0vdNnmTE5BQ3IjojouY3Kaq1mrL1Q5wpiGEQaAF46qRU0mbztsWXpKpsMCDCfS8qQgkeT9CUT8fRQ2cQXVX7/bLHJnOv8ImBdPJ3jTLLqaF2Ug1x1Vqb0kq1QtgquBBZ9rc2tAUk4ZelSbSyzXVwjXz3D70dtYv/MU23fPcfR77+DoDy9w99EGlghf3QzP36HnWCV1ICUj2MqhWi1A7TIJC8d6iW4M2I4B2yFiM3g83Q7YDB7bgZ1XmXE11qTGZB+twHPyz8gk6lCovRuTqOFkHGh5AiN9iu6tz6ParrHq1jg9f48d+tUG3ROuyQ3rDsPVJs3H0n6sWCzwyg7U71gtHIvUnixT/Wpx/wy0PGblijtvgVYniPUR/OKUsyxb43pgZ9X7iJ0PIkgcEAL2Ms6bbDLzb1ruxRA4c1Qnpa0HOx/gA3+mZl0qVFwZpu4f1w5HtcWysjhq+Kc2eKvy/qb/DoUK1eYki/L3OYMQYGhhfsheuaVAsxIAiK8+0fPDMm9qGFvLKPieZziJxVL0MoJ7SeRvBpbbB0zhjIBE2gCAsmkbmG1XOPzJyBgdCSFOi/uMCPA9S/6ISngik8QAGodUfygp7Npjkgg6pmArynughByhDng8KLqsenndZv2RzPf6qIw8j9AAgNTSUIz9oALqLWV8okK5fgRVA0zdAu0KtOBjc1xXcG3NhAUfYazBp752ia0PeLR7//eCAfC5ZYW3jmqsHix55PudU5AoOKSs2Tj0PS/YQ4i46j2ue5+yrc3gE7PXTliGU9mxSQ1cr58Y+dqDkr+mr6HVbRhx5u7B5xA2V6i3aywuniBsLhE2m6RKMna7JOkUpPE4ChWerEnN2LatUR+vJr1W9vSOEC7OgJO77KyqJcLiFLtA6AduB1D1/M3gsZOxRepw5na4hl86ruJYCclEHdYQOHvl37Nafy9OTI93bwJqZxhKDJKdBXZ4jTNYOM7CLPFk5Ve1N95x7UF8mP5e1rg063ph65pmC/JYr2tEoWXPSfavUWT8sE1HBxDtj3sYQ+TBgmHK9gJywZZIZ3UpC6rMyqafVTq3cuJtImCEfacVdtvcmOy2MJXAlaWwbIqEGdJUhtSkEC0OWHdJoZLgMwYPsKBC2Sy5J6QsU66vN1v4EHH6McxX+8AtBh6foYGEjsBRUw1EyUxjIQeWBiOGETRUIFvD1A1Ds67CwlXorzYIA7/fvYsdOtEN/Gb38rDh3driXmPx+WWFs+875dHvn77L/VzHnH1Ex0HNEFhAo/cRm97jWnq5NoPHuve46sa0kC5rvuANUYIO9VoPMV8nDJdr/bTOdVeggMWrCapC4w40djDDDnZ3ibi5RuzWqK+eIawvEfsO/voKw6ZL0k7swIQJW1Wp+bpatbBHR6nvypzegVmeILbS61ivEOslNp7QCXNzO7Aj6UaP652fQHihiNbVISWa/myhTM8fcFxBMi6FXgef64dlRsdDarn/sh8DfG3hA9CNAaNnGLd1BrsqCIRosB2+Qx2XXnhlo/C8aVgfW8r1r0h5/LxuM3ldsVDq71F+pzAiSo8DgIMK8nsEiI/RNCpNGLcUXXWYnl6YGkUpZKDsK2cIrcvpfWUpNW0qhdYSuIt+PrdLHFYJEQIFIcJ7xL5jVuDA85qgChuuSirbKgXEwxMXCMszBNei03lNfYSP40ShP0+x5X2tRQLIGAO4Oo1kh7YI6H55Bxp3qOoFrPs2UfgQBuGkPljUu5Sckxbnsim1yJS1HgoysGcRqvR+8nmeoFwfL1GvKiz/8ALfc77D4/PthH2oDoMp8IablJ3B7ZMGp589xuLOEkdvneL4cw+4xnPnLtyDz8Kc3gGtbjGt27UYx4jeMyR41Y+47j2udh7rfsSzzYDrbkiO69oZLGqH4zYvdUMIabEeA2EMBB8NnKlRNTWPLymDLhlEGY1D53mB9gGAbUHuFNWStQ1t6EFDB9OvYXdrVpJfX6JZXyJ2a1aD33VTFRm9xtslqF2JooU4LNewqG17DG9q7HyULIs1G7WmNwTObkqnNUGZ9ppaaRKoDpg6On2oDkqPpT6vx65xBCOtP7Uzk8xLjzNgcNWPGILBbjRpnxtn0W9fHaF6ox0XsA8L7jmfwghZq/AgE/HAayIwZdRpJBbGNJp7b+THJ4ih9vAWwzpfeXyV2Eaa7l/33IcyFMVXAFLIZvmc1hmcthVL6Rigifx8hKhbYHoslWUZSTJT4zK8mBqcObInMswI67nuFseiNhI8oh5WVyESseOSrGsIwG7kG3kMGbogYYq5mJ0VQsRI4H4zECoJSMjx50DraWMPE3ccePgepuqxG7s3nqzRnD0Azh5g+Obv8RNztYQDE77LeqAqtqf7owqIvodZHgPBw57dxxEA19YwtUNz0mJ7vsXxkw0enncYtx5+EM29ysLWBs1Jg1YGFR69dQuLe2dozo7Q3L4Fe3YvKT2YkzvcylCxJNGo128A11vGgG5kp3XVjdj2I7Y9w2Y+8ILajwH9mBfJTCio0TgDH3gFcIbgLaEVKDqxiKX4E5SxF/IgSjWu3TjU1THa9gSm33BG1p7CHq+Z0dttELsNX9uSdeloF6pbZsgam1iCjC7w4MXR54GaAZlgYYkwgGn/VnjpgaaO6aam59LBaTaV/hana6s1hKqAFUsrHVz5frotszcpH8Oibvaq9sY7LiBnTMkONbvN/sZrmDm8XeHc9prtmH4Gq/NmtGP8hvf4pNjcaeV6QL75ywupdQaNMIR8BI5ri0VlYNM2rM9oOO2afpgxU1hVaydyrBLjUBiFxto04gLBsjCwzm0ylvuwNGiQ3hS9kXuJgLUBlW8UAjmCJwCG82wT5HHgfVfnBWMT0zIRRuRxjAExeOwuAqJ1NyqwvylWfeoH0b/71aycoI6qcGLpWk/Xu0mi0wlSjAHRtaA2cP/OjgOPRcNahNVqgcX9LXbPrtFfbllE1kdWyVgwNNbcOmKB15Mlmrs8PsMsT0Cr42krg2tTK0N0NTPZ5F8nDLedD9j0PjkthQp9iOK0+J81TIPn/q4sDwVYEEUJdDgxqdNxGbNzl7VAh1sqYhHBKh6WmFm3swYLt0BdL2Fk38n3oPoIZtllggxQ6IU67jM0luuzBQtWqfVqBnz3WcO3WsUKAAAy7T3MnELpyMo/lU5nCFP4rzJMaFENSBYxplQPs5Tl9UpoUSHZ8nMMYU+b8HWmxr/RjsvSPiFg7rQmTW7hgCMzBSsNmLDUNDrQ3gc9+Vr/ccYxJGXcnobfJ63uZQ1NLuZ51MSQYb5wBx8xOGYOAXph2snrrBHZHAgMWzh8bROIM8ceS/g1BpCt+CauFnCrkwwZAkjzjJoF6xrWHHXPAzVdTAYfEQioLGRhAWwUxakImY6rr2LnRa7mgnwY0/5zc26XIDZULcg44A13XAAQlmdMfSeXJwInJxUmM614kZf7y3ANiABEB4QYWCbJOFhjQatjxO0a9uw+2k9dJlislOFybc1QWLviGs7xLZh2hbhkvUol2kwAJHUarkFPNbY9MwfXQsi43o247EZcbHqBCUdcdyP6InuorcG2tthJ5nXVVhiOeWHdDBanrUNoK/SeWW/eEfpAMuqkZucko0UGH9N05iFkqN3HmOD1nY3oPPdrtW6BZrWERchDWQuC0GTdKTVEy6wYcj5INBBBsCbCeMAZmyY6zxmF6sBKhiFLY3EwJwWQ9F3K+7+ynCm1zuJEqO2tNVjWNvW/KY0+Ykqd1x4vJYmUdUUgtwXE12j4f6MdlzYPHnJYwIzCfciZkOFGEDKcPOliWzitsYwkyoInccSgFFBna24Qkff9pLHSaqsU4IjaRrTOYFVbbEc/ycDKwq5GWMvKonHmoLPTn3oeErwyP97lDVqeK6MyUkyHJ9dMgw0yiZABW2elaiDRa/kmstAyhu6lj5F3IwJOWflGRt1EAIFQW6G/EyGQAfmKWXRSo6MYeT6UA/rH30B9Nw9OfSPNuCRmnP7JiWS4MMKCEIiPkRKbWENSQnw4wLV8vwihwzRL4KSH6TaTumVqLjA2aeehbhDrI8SqgZfm8L1WjJIcVbXw5LDpAzZDwHaMuBIyxmbgbOu642zruhux3Y3MkA3cw+VNSI7MGla4aZyiCTEFPcvKorKErcDkWh8FMoljSI4B2PR+4iQqwxl/lNTNG3ADcBQnWC3h6mWu+85aTNLXnwXRjgAy3G9mjdbYCM5kiJx/muREdL1SZ6ZOpDLZoRgCMAZYssDoJ/f3srKJzn62qLCsTKp3p+CmYGVqYO8D19FHcfSagWm2qkYA3PDqw2XfaMelRf4bs6ww/VsiUiTM3hdEihzdKDwYE3tmqgYPaPZFCWsmw13hwB7Z7hNhtWXozBkggtIFtXAkEKJLURLXu/h1loDGGbTOFioDMufoAHQeJQgoe7f2ZnNNrmADGK0lNpgPOEuUd+0306yIkBiPaofgEP4ZeRGOxDc58ZmDATwZJmEU9R7YERg6hgtfkmgzfPP30rVWffqHX+o1H4cxTPuS26qej1qiy6tTISDIyIqxAvkaZGvQYtaor/Uz1yIo+aNaTqSJ8odKI7Bm70KKUJLRIBTsOaNNf/oQk9OKMcL7CJIa1rb3qB0vwBuhz5fW+ZDqupUxrC52qJ4jjqBUjgCAEDiTGDyrRACAjNPi685EYegyUnNICxDl77q2kUniBiRQnTcRNgA+8OMQSRwE5ensBqiixeAjKvm7Ph58TPfyEOKEeW0kWD2uLY5qh+PaYFFxE3Fjzd69r+tlUuyIEaPNDl4dWtkQbQCR7Ho1e6MdV2KtiU0c1my7SS+RvpZEYHZCQzQcPejJl2iCHRlSeh1jhp9Kts1NC/rHbY01jERgf/9KSJRprUhMRP2+OhKiEt2xkrGn72G0/wXI50UjSwAo2Jjp+aLpE+QQSnUHNaVoF6QBS0wWiZYZhD6Q7HNM+obT6BO8Z0YVwzno0LXZkjDKlBlpa0Rp0tV9gHGsIk/7CvJRmYohYPjm731iJzwvFgtst1sA4JEZBARLk3hhLrrKUCzXu5LSSTFNnHwDuAHR9xky1+BQG92Nm/Toab2srB0TkK8PaXJXp6W1TA0UDVGavbWsLRYCBW57j37gVbjMugCgd1zr2vQeF5se1hBDiI3DbgypnnOIzKD3zE3U8oqHfgEeGEyEoSytFCJgTYQNhNHIY7l/eNSS2e+BLOXlCsfmLM8dhHHwJo8B0vu3DLLVYdRWHAf0Z87GFB4sA9XKMBrTOFa9WFaEhTM8ZmjYTISz+TXcO1klco8BqixMUDo0PecxArH6Ts24tJG0tPnJnmda5U/guSQKTYcDOEtgJIWxYSJZ1AOTFHpILaVYzD9JVpPg6qUCxQGKdGreleJwynYEPr3JIoBpWwZfWoqAaD0S2vwaLBDr/X4iLVan7DnuyVBZo3U0+afORxyYTZI2WQ2lhDV8AIJM1PWk5zliDFy3rJxjxYTB5kK61B6iO6ysEZZn3Og7bJO6xyfVKAZYOb5OT9Csmbw81+q4xuJJpt1Yzh6qGtaNoNAAwy6f0wS9a8aljeP719Ik8yj2KRQLamNZYq02rInXOsO1F8fbL2oncBqJA/PwY0jOJghZA0Darh/ZafWtQ20NDM3WEzHOkIScYKhwnKa4ttghKXwYhXaugRJMhI0ARV4ziFjJB0QTx132QCYnocdmlIkNRHCuTk6MjyntObIMI/J6lTMfQpj1oyoDsNQarI3Q/HfFuuFz8KnniUwus+h5N7KflTKKTd5XAIj1d2iNi8OZ56TZwL7TAqb1Fn3ZzIERERBjioQYMsozt/RtiXI2pvAhReB6s2X6vWBmH1fNa7vdZrWIoUvySWbYAtsrLqKrQK1h4VpVU0/NvtWS5zgVi0/JQMtR1L5nS6wjuSmsTlrWbNnbyQ0w+Vk6LzmO5GWW2N6ktamRwJrledJXBPBO8xRcjeAhqv4Mt9TWcb+XtjwAwIxZqJkL/634XmTQv/tVPt4xTrJM9z1/7Ln7/VGYXou7q/P0nE6ozRntlKQEYC9qBiBThvl4WcctDuTLYGS6mB0KFG+CnHk/eAOuJUfUIcI7wNkAa6pUl/IhYlHbFDRue5/+ldRsNZ+c2CgOrHBowkx9npVKExlpoT3m3OR7ziQSNKslFNmm9MyVfZBUXD8gk4lEChcYB0gmZo0ThykBmuFmbS9QYDQc0JXHng9xRlKcIdQGgO9Bg6iu+H6/R1P3CUjnNZGyJNikIuNOf5NxQjZ8p441OUS4uMlhzRdHeTyJ5vVp+WmANFvLmuyw0gJXvMbHmKIqXgtJHN78cv1oLUVxYw/yA8xuDeqvEa8vML73NuJ2zcrbrmJquMywouUxzGIFuzpBaI6zWkUMSfsvwEyclmLXpQWI81BGJkFYUbLQ2cyw4hfMMmU5x0kX0iDDUJjCuNlJSSSbDkB+HlDYEPCzYo+hyLVAebqxNWd2umDMrpOFaFFeXG/SQNG6OWIkoN/wPvqeGdXCUvR/8F85a/z+//Wlz+GHZmUjPbB3b+gIGUPmYASvZk3EEDgDcaaGlRlqzwMe9hRvinpOOisxAL6HExJDBaA1BtEYHFU1NhVrFS4qrkk97Qa0zqB2BtcdMwyvupGVHMK0OdYH7l1kp+Xhg016e7qIp94o2d+SFq7ZSSU1H66J7Q+LNBIwGVJUIEPs+nieaU20JCeN+4w+pHMlLQqgAeSz4gkZzsqcBA0DEaKJ8JEmtUFTwP2WkBuvx0LhZu5IdZ8KfsFchCFSIT6u903JmFRCT7+5+QJ5gX3bOK7JwZuxktLPuaNSDL48sJjWrkkuRGWm5Y+hA9HndPdSJPVxmjRKU/B8YQ4bxM01wtUzhIsnCNt1kushY3ikurVJH85s17BnHmiPgCoUF2Ceg3bIaamT0B5g3Ub9ewgRgQiWDIw4xASxhemNsr+gukkmlaBCEUpmdZDpkS/3U5UPFH/XKN0J1FjLYsRqISZBl0modmZDyNeBj4AlC9ecwFaspEB2x5H1uJMa2idD0zJaB+PLpu+ZOK4ed3FeAJJ+nTbxBgDG8+Kn90qWCMsKK9bQYYUaXfwK+GmSbfSbDFFJozpZi1AtULfHOKlXuH1yguPaYjM0uNiN+NbRjqny/YjzzZB6vK6lv0shz0acnPZ31S7Dj8wUnvYt6WMAyUkB2cExhZzS/CnNXrQunB7rtaXDW6XtIjFZCy3JicOIRR8kMIENYZ00/ksdMkGIhEZnz5Uji2IAoudremBHRWM3EcAGsD9HT/ZJVW/iKDVNeY6sNsPZHAyLOkg0lvfXD4i24s97RXujHRdFfzizmmdhwNRplQ6rjAyQi6lz2CtBTcV6aIkyeS5On9deB8TwscGE3fWF7JzCchIplSNArAV5nu4KH4ABMNbAukpqOjyBlnzPhIUZuQXAxCEB2YFlGAKAMBnnYOIes6rMtIDsqLS3RTQJy9qHsQSrl4GeP+QisEKDAax4MBcj1Yi5cQa2EnZWeaJ1AQGPiYFxk0nJrcjdaAN3gDgwydgmx0schf/Kb+TvB8B+/sdfcDY/eKM545PsJACMROkeCSEf8yjHUgv+AEBB4dYIL60XqrBiiXEq7QtLLSw3RfRafx13oI71//x2jditeQdkZpg9vQOqVyA/4O7qDtaOmFDgLK52Iy52I45qlwRor3cjU9hDVjO3hlVijhon7FmDhSiZa4Y1d1y5bqvZGCXHnAhMKZPJ4zx07pfKp7HgdDGu5wbJNJqtaVTcG1HRCKnDogzMy/EhpI9t8T4xQ4CCyKQp6YfWUAAYRwR1WOOQJayk9zLq57oaUdYX1CydRlWdpPNuXKdf0t5oxzVnFU4zsFlz3/ylsyzrUK2mPKyKR2tWVcbzeqtPFlPCx55taRS7V+A1PO3YNAuEEGCwhhf1aoC/tx0HRPmHcZi+PmVCxbESzM4Q9wBVcpOmOWbQ7GsaKExgorL/K7HS3GQ0SWZ68ueWdHjd9yjUXx2gNwbCGFkaavCsJr6T/jUfoyxQAYBD4wgh5sbb+T4xrOPRP/5G+kzXHnPtbmZ7zkuGeSYIUReJ5xCEPlQr5k6pxXIxKc5TjFOWnEKGymLTXlJDQFWgEy5ynUX51tZQgl8nTqusoQxb1vnrNvDn77Hq+pbngvGHGG5c3q5Bq2NYz5qTx/UKi8URauOxqngm1HFt5XyHLHGmQYs6L8q9io0SPqzZc1aVZGYAkpPS70xSGigd1iEWbnJaioT4IUGiyiRMGU8smLeHFnl1WnKu9soSRfZV1pwmNeRyxJDvUzaVgtukyi1KH+PAuotDzxlXt9nbnqxFdAP37EnWNfn3Adib77iKNPqlzRxwZHix00oO6XnRgl4U4TnbfFQWxkwOiIEzlvoIZBysintePUOoW1R4wq8pdNOobkHLY1C7mlDR9YZQJzIU9QNmJYmivO6DkhSAXMcgVl1INsfLlT5tuf9njICXpkbNfitDLO5bRqZAaoztA8OC2zEgeC5SX/cjLrpBhEo5GwgRgDSjqul5prHP88DKTFCOQzSWM0zj0ErRGQAiDECOLyzjEKXxNPqGe51GbtKlYfOxXiexaiew77w/x0ck1YjJ67Av0Ayw4kLrLF8TllBFghPnVltBKAzBkcmL9NhltuuwQbh8iiBzv/z5u+gv1xg3HcZtD7IGtpb5VdfPeGbVxRMeY786gW2OcXd1B2G5Qo8aZ61FNzKdvlNtQx+wG/1EGqmyeZZU2cNVWZOcVOmgFBbVx0YkmEqiA6Eke0wDuTRq5lCWpXUkYL/ksXcCtTY8prqu6iACDKtO6mJzgkwJ/w09gjqgRHqTTKrIsEK35kyr79iJlbqLQu4yixU7LfA9QXULVHWW8HI1YvXqof2b7biA/Zu+LKTfwGJCCMxv1eyMTCqSRlnIyrhAs6dSpYPKTO9A9/vHLcy6O3+0B7lFnWNlK5BrYeqGxUy7NXxVQQVFqW6TyCm1K564qkP8qgVi1WKEiJf6TL3lXi+m+foQYUVJftJDJ+dHWVElVJWcFhXUc+PYaQWuT6nTsuq0tKCsThpyHu2IumphrMEQRMlePkcXJCCkqLoVerXWIWpD4rC2idCCceTIUqNKYwFb801vLKiQ6SF1vGRSk20axVK1oHHBC3VfgcYB49d/G+6zf+SjuDSmJgHITaxQIAdxunjrAk4qGRQzTMrKK+wIYjSIlt+7MopWRBgAzuaoX0fc0Nil+mvcXMFfPMH23XMezrjZIvSCIFjDc8A2HWx7xWNErp5xTfb0DuzZBqY+gmmPUS9O0dcOvQ/YecPXa8i6nfP+NSA7pPSd09/3nVapIpGcFhXbolg35tBgmWX5GapRnJ8bbQava+YTixpqHAEyRd1JalB639EBZ8fbmrSuTWDBvkuErth3CH2ui1JVZ2Zy3fK/xQpmeTJlKetk8tdIvt58xzW3A+yvQw6MYpjUW4jMpHt8si0UNppJtZTQGQCmvH28tjt/dPgPCvsYB9jAlFpjYaxER8YiDn2KmszyJEdI9SJHSvUyZTLcFJobl3MvFRDsLACYQx6lA9P9AxKkkaBBmOS0gANOS4vaM1yeIjth59qCxZWlfqoY4Z0RZRCGhpidlqe00m4nLQScBcQdZ0lwFahpmX1Zt4D0fUWl/GpNTqYwA2BVe3ESIM4aKAa+0fs1KIysvmEMqoc/8AFeEc8xhY0K4sqhhVwf60KcHhPSWItSEy+EiCqwOgNqm5wikWHkYr4fPk9Zjt2andbVOfpnPF1YJwv7QvfQVA795QZ2UaNaLtCcbVAfL2GuzuG2a3ZgqxPQ2KGtlmhcg1W7TJJN2mg/J1jtHaLyGIC/r37/ua4jFa8pIXB2Vjmz0VE6e9PAZ05rbx0DkIgVKEoRKesqnE/RJhRV4T54IGQnBuQyh76WjJ04Ph0zFIceMXjEkX9C/pEEIGTFYcloFmqX7LTaFVC3sn5UuT/U1YjuO3WsSeGQJid5Bmnx32nPgaW5UPIcR0ezTC0tuJjq8MnfpmzGl5cI+rAsVm1yrDEGsCxIsVSk799w9uSPGNtuT/mnfLcgmVWsFoj1Ep4c+hCx2wUcknIJAIz0QKn+3QjA2BrRjgDRXsc973BIN2OpWL5PwijgFsmyyA8oR9CnOpkxufYZAhbVEhFIkjWnjcvva5DqGLWV4r4lmN01qLuE2V0hnL8L/97bXGdRZptElAqJKHMKEm3aZoFQrxDrBVAtWeUbJqmbO8NiqXArNM2RQIcfT+OyJYCk9lRSxsuabmkRLCcUtH4FvhYUhrOUVSiGELLWpWGZIU9AtJT7C4PnoaLdmlmu60v462v0Vxvszq/Eee2wu9zB9wFBarHGGtjawLUO7dkS1apFc+sIy4fvoL59C/b4DObWPc7EFitE16BSBfZycjYVQyLnI19o2pgNFE4pDDmI9Zhe2yXJqCxphJkTO1DqeC5aBOT7RfeFZBJDVTOzz9ipEyvrSgkKvNlp0NypGUEZDN+jplmwUkzdYtJC0y5BzUJKDEfipIosS8kh1h12yu/D3nzHVTR9Tp4X26O/A3s1rjnlUw/qTWMsNKuhGFHffjj5W//0nVf5Jh+IbbdbBFNPI74bxtQnEwgxkgHFJl2sSUuuXmLndfJpLsgDQGZYZaVpLQ+PQeogAOpqKdTffg/Ln+8LpLu+jIBThpVqToXC9kyVn2t6HECQFL0rP+CkahBrB7RuSgoJIyjKYL8wgnY+1eVMvwZ2W442FYYZeIqtGXpQz9AJ1W2B7/NNHBcrmCWzMVXtvnINXNWiL65PHyN2PsLaFs61gO/RXV98NCNUgpACkIkzLs1MY1O/FcFrc262jRMCUhC4cDeyjiBDZoHHvfuI09Yloec9pZ8Y8lQAIQT5fkxTg9VpbR5v0a/7yXwvMoRq4dJsr8XZBXbPrtHceoLm1jHaO99KkHcZZPCCW0MnEKj4b5qyrTCvZP6Hg9kZ4qLHrHxenVQJ29/wugmZbB50p/eO+68hkxGkm1b0ohZWwooHTRyXOjByQo4KHrQ6TvdbggQrmUTu2gTxB5X1Uqm20ryMDuq3eFV7sx0Xbj7BAF7osABMUur0MoQJ3XluzdmDG/82d2QfhXWbNfpoUuYDKKHEwJo6sZgSbKFQQ1FTSrOWNMqUhmOdBcTMsgKzBybqAiXVXbOx4IUybVlayNoW1uTJwxN1nVnmrO+lWZZCKzTsplnWDQuDsv+IBqbzd8jnPxTRbtGvEmUB4G1Gdlp9x+9TVYgjQ4QGXe5Xkc9RZxjl/QEgGB6USZFHgEAIGnVzlGpL+k/72mrpaevWV7wNDFbLxcteCu/PymOoT1lt9j4ArxMBMaYaD9cz59dBTKMzDPF8KmMIzciZV7BxotygDkDhJ+4L8oghpH/BR/g+YOxG9NcD+s2QJipbItTrAb4PGNYDxu2I4COGdYf+coP+aoNq1cK2NaplC1Pn3iJl15LjcSspi16dwCiZQOuS0iuVv+jU+exNQi+dU9pmf62J5XVfHnNlAsrjvL28J1lQUZpIbNBDmUyZ8QFcJrAZEiyVc9K5FlIHmwPVDT+vnyHQeBCiBdTZazZVOisyk/Un9avN+wbfh73Rjov7TGYQ4CHnlLYXSDAtTvuRz1w89ZNu15sthkBpgrH2LpU4PEe6lueHAalAPKkL6cVIlIVQIfRmaSINBiKTxC8pFQKiNqZK3SAN2YusTqCkh4WzaOolO5+xnxz7CYwrPw1BHFaXGWgl3FKyFcuel5j/HtQBFawoHbvBH2JzxlS3fANrD4pE5ubkDszRLc4KitHrk+utdAACKfpdJ+PYe6Bes0L6sGVSh2M5rd7r2AfuH0vZIABrjJzjiLOj5SteJVPrtltQd5mPd0FsSezTpH+XgwglbxipZdrA2VfreCx7ZaI4MWUZEg/8NCFN7/WBEq3ezJzjHkEgPc/woO89whDQh4jrMUiQxmSP/lmH1bpnx7buUa9q1EeXaE7O4dqKB1wuedClqRyMNNqbyoGsQbVq+e/LJezxWVKOoeUxQ2NF5rC3jqQdDS9+PHNOk+fIYE/J5+B6VgR3E6d2GF4stQ8xIjkvGBnaOnnrgnhkshMq+yjVScWqxS5QIrr4EAHVSE4kFlZWqY2DpSA6pUK9/47NuPRElZjp/OQduIAmkfUbbushYDfqBOCc7eg6ZA0Pn3MimLmqKtR1nVQq9ijeADcjQv4OoZ7rcTUmr2Q+L95RJJy6MWAIQDfmycQA1zucARbOYOEMaluhqWuGEEvYRc5dej6EJFc1ESAt9z3VDsZpV3/fMQQlxf7YbTBcXWO42mDc9ly0Bhf6lWJdrVpxNCc86FAXr8VRijLnkFHqxxrHrCYwDhle3K4RxiFF9HbsGFapGlQxwDVHiW5O5XeMATAOzjoEOizu+6oW2pN0fsuCf5Tv6CPQj9OxHVT8dIawcLlPqbYG29GjcQabwafrUHuiWhGj1ZpZlOtGAybS4EGCBtfWqWZFxsDWFsYa7C53WHYjji52CMO03kUCBQzrgR3cusf2vINrHWxt4VqHauFAlmArB1M7kGGGYn2yxLBaoFp1aEKA8dxUawAEJeRIH5Kxe/SSic3p6ACmAbaSpEpR6Xlt7UU2K40cys4S21DhxLGflFZ43/J1pVlWVAasOCyePt0ArkZwLTpR6++7mNoK8pDdXItW5ZEa4mqNgXV1uq5j9R3uuF4k4glgsjjf5LTetGwLyIMuSy05HWOvPIXWWVSB9WwNSd3J1LC1pPAHGJJ7TZCYYfDlT2Eq2tpNIu7eBx5O6Xm0RGsNfI00yM9Hne/DfTPzWuO8ITP9XjpbfT4GYOgR1GmMA9N2+w5hfYlw9QzDusPu2TV2z64Q+jE5LtvWMJVD7RmaqnzgmgfAC1WzQJAJzAkSEVOCCIURNA6gRjJCZchphgaBZMYB1HcJjqNxxwKpspBxrS1nizFwPcBWNdab7QcCG7aisZiEoLVJWuDSIWTWaDn1u5L2BmuYGRgNN3o7Y1Bbnu22cDycVGdWaW9UbVnVXZl4ej2l41DVGaprWWmmkf1VJ1afbDCsO4zbEbvLHr7nWlf0EcEHRM+jTNTCIM/5iLEbJWPzMJWFrQPsMMJWgizUDqaqEKoxQZZxrDgrV7KDqwHjpxTzmZWMPDI8p2ACK1NWspgQFUqiyDzruinIflEpRAhaqrUJ6/i76fghMhPiRhqkW/ZQ2jq1wHhy2I3cFznI9ZHRAtkViK6rnOTUsB7BGq7kYOslYhgRm6PD3+sl7M12XIZy1KYHfbYJFdHkjfO63kD75vk1AKGex/wvgsdzdz4kivLOBTTOYOGsQHpGxhZYVNaBrGQ4ZTd/sRijYBvuMa6kGEsoajlguLIbQ5pSC/DpOh4cjhqerNp7g4WLqKxCiSY1aU6yjtI5zaFA7fbXTn5xFMoAjLsOYaPMtA791RrDukMYuPhP1iD4wBG4NbCVQ2yL4nO7RKhXCItThGaF0dQCgzIs5ipR0lY4c9wlGISqGpT2S3UYlUqcIRMM0qJgZzPK1GJIUwY+SDtaLnC53hbNxSZp+Q3S6qCkG0ssq6UMzMpoBkaIFRLNnBczO2GcKjux1OpLX80wLBVtlVmawXPW265gj46wHAf4boex6+G7Hr4fJ78zgYMdWr8eMKx7+J6hxRgifO9BlmAsYQBg5QsbS4g2pADmkMVxyPCZ9jkai2hmr5kTHYxu7wHUgHOTILvsU5ywGkspurQTzzn3s8xr8hoKMk3B5vqrC0zrPCS7NN83hQTrJfdtShO3oilD0Homm+pVEqTpGpmVygo2jCQSEWKssE6hyfu3N9pxKayRnwhTyZOSavpJULL4AK2Wu7+yQB2Ih8VFFj91xqIZTRoUp9Z7Lrz7GLAbOZVXlhg7shp1XQP9Jgtujh2w26BsbCSp+3BUVqdInXyP1rUYHWHhCBEOIUaBD6MMrRsxBB7Bflxb9JVFbXkkeCOReSXBCDvDmGtxRbY1gQa1x0QL/ArVqJMwBqZycG2NsFyAjEmLFUNFDUzt0Nw6QnV6kmjU9t6nEOsjdlrLM+x8TE3XbExrH43UEOsj2HrJzqheiXSRqPKrgHAMGSbSaFeds54oXUBKlMD302v9A7Syhys5HPlbOUC0FujHxRHUbSYjWzRuX5WZhHWAEHzKXilDMo5egyFXI/oWtDCwVc1KLcEnNmYyzVwFxlMFh7C5RP/0GdPnn12he3I5cWT8mghbG8m2LGyd61p2UaO5dZx+T/WtumUSx2QfxJWXdc7imitFZtXJwVhesasDaFB5vA4M2ORtMsR6U7PNTQE7tw0aRM81rXS/HmI2lgFpMZ1andYQsoKKEnSsTDEu5a6yXmPueYvIdbBeRJovtiNe1d5ox7UXYRzKrCbP59O7hwmTwe7iyXPZhJ8kW7iiKAskkdfRspBsYyN8NElTDtDGSP7ePvJ/Xgrt/HfJIkRpOpksIilrkCZF0yBf7L4Hhh2IDBauxlBFEEUY4htfIUOFNZUm7Z2qGHBE7mOEjTRtXD7wWGGmaMC0ZnVWxnKTcPBZ3DN4VKuBs6rKIYguI9ns0Ezbwp4yAYNWxzCndxGWZ4jVEqE95paAMB3nUVLBAQJChCdhT7qWSS5lBjvb/4Pw9gTuMXlx9CMoBOwungBhfC6z9f3YyWofenx0sYYFgYxkW5IlVYbgEHJmWQ48nN9bWrdxSnapp/VBbcoFcsYhRis7DULnx0kcve13DAVfPYNp30W9uUJ9vIRrm9S03Kw7FpDWIyrn3Epdy7VNqqXZ5TKxCtVpUbM4CMHxT79HK4+irAJRSo8ODA87gd+NwnemcCA2f0/sM3SBEoqbNkVPdqsIDCKQm+EVKkzH7jnEkTLjU23QIIMxy/ePUI5IlsQzuV3CmmI4pux/FMhQIcbN8OqJxJvtuAAo1XI+Hv5g34RuD4AgWHJ5UgFeGIz52CWbnme7q3M+ccXAvso6NI77scroSOVtcjRdRteMSUfPkTVr9wFu3rcC5AgzeIZJROYIOgNoNIimg4kBVR1wVLewJiQR3MYZblAd87nIIq2MXvgQEaURdg+/18fRp+eiYfUJVQMh51iWaeylH0yK6WBIisaB2X0F5JOkreoW5vQO6OgWYrWEb48R2mNEW6ditB5DYCr1A7Dz4oVDgwSCMy2sFTp/6bhKNqtef/qd9uqIuYWB/KvTh9+PGQJAPCYm1bSocFp+yCzPAzBuWleNS0oJ0dVAtZheV3qfkvZLcfax5wTtFGbjP+R9casTUNMirC9B7RPY2iU6vEpFRalfkjEwtRNCxioRcszxLZjUPHuce7zKidelnJJk+vw4zGpbfH2lfkFjEYPh4MqIqPKM+ao1xtJ5AUhMTtboZGfg482ZV/laPpcvV/eXHeen5T2U/akZc9pMM6uY2YOq1aj3hQ7WlA6KpKriI8OMuzFg+x3ruOQCSFBgMR6gJBWo3RjlFsXN1+3o/iiMtheZaZeezJCDNQ6LQ2m/rRnukmynL7TaNAEN4OMUXQPEgBADjMhDQZh6ajF40NCnuoyRwXZxd422atFUC8S2xXFTYTtYVt4Y40Qjzpr8L8kHaZ8PGUQKIIFPiAKP/5a/ATOIJIbU4Gw8j8WYUOGL6FhVLszyJEnShPYYsV6lpus+RPiizSBpzxma3Jg+xFRb1CAhICSYzRIHFk4cQHJmBeGkHFef/pVogc6kmgVaH4blVoQcPTsCk0pS/TMripd0+rSYA4AZ94ZEppoO0bSP0ErPVDU7pyVZQTKCNADTOGDsWLXl5E4SeCVjYY/WaG516K82EyJO6bjcyWnOsJbHExmvso9p0rDbdxOoMMkfFaZZF5mQMzDZxlgLkEzVDhVg5Lt4m8+50t2Lvkx5QlpdMjz3PAcmoMpzxMOnr2YnU5Bb9Kdc/+UQypssSG9dlMcyUUiQn1yT12vrVe3Ndlw39UrcZHOnNYNoymi3W1+hXR1/cPv6AVj/5JugsUtOKy1kAGc9NyxoUWVuXI22OUZTLzE6i+0Y0ugPIEMAIYIjTVlwYvCs5WgtQgGbKIWZb0KdrdQzhDfuEPsN4Gosm2MsmiVG8Gey0CkmahvZgWlBVxapmGGL+YKWFjV9SrLsqH0iYwvUK64vFYoBkz4VoaVH1yI0K3hTYyyce1lvUKelBAMuMrNQsGa4Y4iFtmJMUlUF/4H733QxT7CTOK85s1J77sQpfxR12gxTMbGCIhBJaiWB0lwnzXYJEHkxlh0CMlyodRvNmkpHVDbTTmx+f84CSjKQCGsKezFpQhiK4igqHxLZBuDWB6pYnsisTmDalQjBHqdhh5NrTeA83tXnaOsdIHjE4FlFZej5uIyDZJa5tkkKFxaPjWRrWk8qZbhY6Z171zS3fR50qM5P32NeawQUfuSbf67hUPoqQzRpj9DPMAACRSBIhhUm0+wKxxURY0xEndq9egD2Zjuul7Q9HcNywTvE5PmkZl2FKCfr9Q0HmzYnU0nl5lElAPgRcdyhqlhDT+s2CdaAzJEyjum/Qsnm49bJQiY3Ac1gvaRCEUHY8SIwOGDsEYctSx7VS4zOpZpceRswE2lmaZFUKK2IvgHOJPV7A4V2XI3opmKmum9hVoNRKMubOrGl9JhkeaucabkCv/egyd0dItKQRd5f1ecjricqvBieM4pnQiqS860ogiyew9tfBqz7UAR5752s8OhinQIZjwgTeQwNw2Cejx24jpKOuy7CBZQLywMFy76gSeY0v9d0LMweZFpkoGVLxCEWZkGKMG07fb5u09iNRMIQIViVKdqD8sKISCFBz8/T+UsEJv197EEicBuDyaoRcSryfeP7zWA6H6MEEs93YKXTGkOemzZ3XOq0SCW8JBuam0KUfE0QAsXJvRqjOK+INH1cv6EGw7o/BBa8rl4j5XqzHVd50aYLPCaq540Rui5a5U1RPH6ZC+pjsZkDjkS5wdH71PiaxGCVUFGMGrBn94HFMUK9QrUY4eolYuWY4i1vHaOQDFyLSCxbFG3P7zEOKUJMViyyiCHvCzgKtuMOsV8zC7RawFQ81qBVuRiaRdQAwzBkQAjivGbRdwmFyutSYGrBY4AUSin3d369FCwujXA168s3eM6wdIptCdlFAg/PNJn4Eg0yvEg8o8wZpPE5CSIsB0kGgGIxwr4cdRH1OMh3lHrmh2W62Bni6Hm0hNpY1PUSZB3XTuzI2e0oi/msZqfNq4lWLec6Hf/n1KDnUHBa4lKdcEyTAcj3ueF8HDKEZyzIITH8TLNgIVgZt2GPzwRGrxkaT2r54hy17CD7w5l6QYd3FSgY6ClDYLX/PQshid/G4FmM+jnHnpADoBAzISgjIwRov5QcJYWt02ESaC+jAEjqJWXSNJ/UzLPEYlLFUfYglyIjKLLDjKBUBwPYSamDGmPWNQXyfcDfjdEVA2YRv6q92Y5rbnpjR5rqeM0w8oO6YLLtnMXzSbBufcULnYwDKBUHUoOh1BNiyXhKhApx4saKKGwDGh03v5IBXA1n6rRwR7mx0gTfKiQqbbQH1NhjkDtFshuBFUthWnZGDCdGP3AW5erDkbhawTrbi8SBtIhp1M+QhLAW4zy65NcRDN/kEDZUyDeV3sTle8svxYIZkq4hyEjjMI9u9yGi1mhzssjkybdK1Nhr+i6i/EQzL0lHZDL9E/jQqPFqnz7j5tD/+e4liIjH11i+JmpbwzUmQdaRDMqWBb0+o3FJcSHaGiNMvr8C5/hcrzF7cFeJvOW5VgcctWShYeynNUwRhQWqTLQoiDjUslp8nGvsQZ3BvjONFLL8k6tS1khV/Ryiht1nJep+3+DA4uzx/Fr2ArcFX9RZ5bl0/GK+F0oUQSnpqQ9Rxv04Q6ihdSdmlOq1a+SgWMqN57pPWmrwkTAW5IsxTCHOSgWWDausWAPEPbXll7c333EpVGEwgR5KHc8JrPS8SB3Ti+Z6w5IkRx+WyOlz7HLNn826cLzfFIIIfsp38FYWC883mwugUCU8HUAReWbJmmQKPfkxjZ6PYOhBo6QIcO+HLqzGcu+UZlhGFnJRfFbIMcZQ1L/8VAYncj9ZDB6wI9c/CpjmoKL/3GaLvk5T1ii1L4gSpXEEGVNW5eSutCRN2Drsb55VzrJdMkhMMCvXkpUIsvw/76fP8OC8p3D+HdUhF9tMgq2PgKCh9gP3T/C7jy7TPK20y9YlzVmCOFK9JlKQ6FLGNUakxu1yEQZiEY1Pzcyifs5eX/C9VWfSGCBkEV24KsGC5CoW0E3q5S9JyqI8PiS1XwgkD1flxwfge5azen/n7BAXIibnxcclgqFDAJN6bzl6SEfp+AB0omqiAsXBEirGVDjLMhEmMqMUQKK6z6ntIZIgM9zKgsBjjDTrGkS9BwBPlCYkabDGcZ04uO9mXHzh6VCzm9iEJSz04de4X8muN9vJDU5QPTgDVy9TlBeVyRUErhmsDOoTHN2woyuHQzJEcpTqOmnxE1JFkv4pjLMwA9JxKTqEUhl8fsjFa9nf9DPIHK5os+MaR5BVfGEEeY5008RgDS6UUAIkVe6kMTnrz+NRFAHW1vDgXk+NNPXm5W+B1G9SgbMgAMlpKb06seVKOLDMDOc1UTkeSXUk1aamIsbldThx0OV5ENSAwiyrKrNSgD+HuPcQwIfaf+iF6Uk+JggKMKitg6sdolL99bwosmEcRnKiwhETKadsy0hfD0hsOYWSSOqDDGVFGDLcqqGU8pTZ2QltfdI0LM3yCpUfGl1SBgETkWYxhqyRggYO5iRA1MypmD5M6sBQ1LyKUSGTKRbvg2zDbNVplK31JM26yu1UucQHXu/GEJMUF5DPo48erbOwrMmUBLUVyZvA4+X+Gp5eYAnoCejDPrmDx9iwtFvrCAtnsHQE+B5+3Lz0d5/bt4fjmkVLB8kYmFJDX+ZyKW+mj8o6Yd1NC7KACZzOV7bltR3gBWPsQaGo6RmexkvLYsEslBqCyswcguckE7KU6z0llRZgtXIr3fcps0pMhOKmTy0J+7N/EpRiLMh6vuvIAKHmf8bmBmPCxEnwraVFcx1NIoMIa6CxNRNIEDAGSosuUEBORUNtbSkJ26rTKusaSR1bp7bqOIeIFAQZYugDAplRECmqccgQmnECt7p0nkB8/CdIgCwMEWDHp8dMHV46hoXqgP1gBXhvMh+l8AJKOoYMgdb8/S1SbVBh224MCarSVojBRxHizUoMgLQNiLahijKrQDTXDwNax1Ot51krrQATPOJyKnAbiYpA4UDQIEzOuWjBRMB5DumKaWARAaASJEG3k39Gr381p/eew54m4cyklJXaRKwBKNIkE0uOLMH88nyxzmm2Q0RwxiLCTuDCzOQ9tA80RSLK1gciOFfD2hpO1pDKRFQmoraU1s/aElYVy8y50MNs1oDvYXYXN373F9mb7bhm0dLe35AXF72RMgHhBW9dnMjL9fagwsAHbe9drnPDXxFdhcg3tRZDFTqpFMIbeyDUyZFMHBKZnC3oTWxucPRFzUgzp7mTp3ig3qBZgNzsdMBpHaISp+zMWoZglJU2H+7+vPNcEAJo4B6b2jgYZ+R8UzpmQJF1EfLIlIJ1WNYOS5KB1mm0wTsgs7TUGVbGcVY8dJLFji8BbeXF1CfMW/7k3OQ4lZC2Qrt7JKQPwX704Sl+551L/riYlU5C1GszTmA9PS5aU0mOy0dseo9OxJfTKJ7AIsxcAyEsK8tRujNYVhY1+Lq3xA4TlhdMlM6bDDvzMsMFkrOfs4j3rBxGOp9MPCMiTbLnAtKO5Ta6b1qbLvdz4kCfc22Dz702+AatYxVrAdOXikwrObB8VRABjmji1CrDFIv8ObSXLaUG+5jbMkrCCtetGe531mHpWtQmYrA5IGE5OYOaAkugDdtCTu47VfIJOIj5l86qjDzyicsLzk1mkEHdm6KRD9oSjBJzmg+IJiYBNgCjMH6siYiReMZWpfUnqTNUKPqU6kmGACDJxhwUdD1gqU90dhwShFJGpPpvHBFLgsih90VelOH2L8X5KAjeCcNitOWOAVCtP5K+qso41jwsD+7kzcP0Z/o8i4kEj2ZEUqfx2picIC89NjHNG6ubIxhVe5g1GZfj4nXhiuK0SmIJkAvvACbUY9I2BP1L/PCDq/Ie0KK7J8AEXeDyHik8G2N2XGMArnYjrvsRmyHguh9Zw9KXkT9nXEe1TU4rBGBZW3gDEIV0JMiyyjjCyAK9rmBgHgiqDl7nM8dUDhedz3Wbv1+q62n2ZmfXb6jktdNsuMz0Uo/bDc7LyPYRHC2W2RYhwiM7r3Iyeem0JjUqmjs6miAqut1BS/XwPiMB2tQtxJbKbuGMRavBsRDDaJiJdpdO/RXtzXdcBbQCTB1WSf081PVdRomTt5QI/UDs/5GYASFSTNOHoVEuIoxHqtMMhrKyen2UqNb5+wsVtli0Vb0BkKzpwAKuf+MEgCbHQD8j3fTFAgxRoUYMTBSJrNn23BHhqpqtA+pclfp/GKIrshHdByuQnXWg0U7UUkpppb2Bk3qtaEZaCJumBUhuujnlXmHmgHxt6QgZPXI9EYbAWUPjGrSrVm7WQuUk1bFsem/NXLSJWS9Va2LqqwmTskgeNxJiFlz+MK1x04g9M8am9xVnokCUGgtnWwG7YlLAZvC42nl0o0c/8t+cMDMXNSvLL4PlzMAamBGAs7CeEYUh6HGW4ahNO63BxAMkmNk1cPDnvDapryksIuw7GSoCEvk9b4+JY5rUt8rtC4KIBpYBBIsoje7srKJkW4qJqPNSWHGyW0XNMJFb0ifw5qxiQ1NRX0gQRez4nOUG8hgDKGhdtxg31K9nx34Kj+7NJDNFK9Ir2pvtuGaRyjzL0kLy3GVJtn3z2xI+sGmz78eImM0TKDKrxzA0N1HvBpK+ICCUdYoYbVZjLr93CS/uRVOHnFZZN0ABCZY1gPJ3LY6jmOk0WpCtOdtwN0Su8to0nNG4idNSEoSe37KvihshhdDhwDAfUb6RFKabR9DpO4qjigHkMK053WBZ/YKJAoKhwKGc6Iuky9Z7vumtKQgMhbZkGXGXQsjAfiBVmr5/qerxEfgt1IYm15M6ct6nDCfrd1FmWTcG6IQAzrD4XxCChtoYYh17Rl8AAC5OSURBVGopYImhKN8zYCfQuNZR8udmdhz32fFsN2sgUG/BEC1tfj3OZ7wdsAnUWEB8k+t0kklTDo5miMFNwWK5lmmA6EEwUTIu4uNjSAPa6cvLe2Rv/yXo0mtFz6UezywJxde1BU82NjG3xSjKwUhJn1txIvccpv7RUsdRyTEqWlzl6erPg0hfZG+042oXizQUb7vN0zQVh9+n3uYFqLzXYwRuH3/0jmpuekFW0idBkaDjzk3k6CpEABEcefmYajVjQOp+L00XVMuUhsln3Vgcmd1YKXpNLzbTx6ZGDAGwlTiDNkFkMQaGB26CXZQkomPBy99nF3Z5Y/JPJopEMtII2k8L6fMIWj8WEIFek5upA9ejmOYv60vx2QTAkcgUwciCyQtqCdOo+RDRE2AjEIzUI22W0dLFTxs11W+V548j7/3ag2bTmvVVZn+bD9pqO3VcQ8iOI8jiGsCZVum0mIzBPzWzVFNHBWSGJyC1Mc9F/iFEWB/RUeDM0+bF18cIGziQ4CGXMUlyOXUWxkxh4nlWdqj1QY3MNMOaoAtaMy6yraKOrCNB9Bwn5xABIpcJE+U+FftIlHvbIkl7isDgc9mlMtvSTI0D95ytBWL1E82D5szD9OlyLfriTYmA2rAgAZBRHQIAYdHGoUdYX+ZG8F2XWgBUDxJ1KyhLPpavam+04ypNhYZ1EVDGUnmOifJCzjdhLip/Eqwub96oTX6Uotgh8ByblH0FIJJeXPmL6uWgmLX2Z2iBNoIbBi0V+obzG/dQ5gJIpiJsuKJ+5ktfRpqtyfsUSuLz7Gdy0xekkglhxPcp0LC6rb4eQBLhNSFHhEJaIDL7sEQZFYfA4r2A4PBl5Ex78A8BaHT/rOFGMLKJtaoCwrq4+Bi5LjZBS3V/wp5zTKrzaYts+r5ePsOABXs/ikv4SBYbVQrXRlYfScgYfJ324r5InHuIESoFnZT1BeLWb2n3oADeths9zA4YnMEQDELkydmVMYgguMgNrVEWWWsIRrMxICmwJMivvP702p5nWgpbF79PYL5Sqb6oT02CLa2HBh3jE2fIRyzaXOQ+BPZgSyLDMD0ZhgR1ARPXod9bkYAIvqIMNHPKQTywf52UDmtex9YJ6goLK6LjTIu6rWUA6i4dIwJATctlgXEAJnPJ5J+rDtayX8W+bRzXyxamH12sce9k9SHvzavZaQFPrjdbqKyKNxwZWSIMNCVvzC88QOBE0idpwkgsZ0eFohirKoHpd+M4ayLD76g3bzH0zpOT3pxpkMCsxxnTzveAtyA/k9QqYZXCwSRY7VCWRgakka7aIdjFOs4ED63s+llKh5b3KGXAaOa0ys+h8n3kOxjj4KoWHgZ9yFI75cI1tzTDyCApGZTruEVBMNLM2QC6YNmPKPCifgOQgROn7WwNb6jI/CK8kXYJGatjjWV6uwnoJOvZUYDxEMiLMyWd01aaTgkYQmQ4AXx8WH4rSgMsvw8jvC84DmWWJQjCvH41YSBqtlbAgpOsqszogAnpJjutHMikGnsEoJJiwD5Lt7zei2DRag8qSWYJDkaVsKH1L4ssxSSh280MaqIba/ilAwOIB3GSfA9rULmWs8KCDGOWJyLzVSNs17LrllX3a5nLVn7P72hyxvu0B6efTKc1t9VygW6zRjQGJlJiEpKXZkJI93yxqAHqnJRNlJ2bFTqtjznRJ/DNVWZowjbmG0mzn2jzTSSQnofh0R+y6OgNqswlZ7jJN0aArEkYeQQOOiIAEybSpDg+oznnRaPQ+SvfVxYbQfWACSfvBithUf0cIDc9l9vN63wAM6uMA8IIW7WoRLBXtebmOnGaVTkQ+1hob9kMxgakr06DDf49jdyQjT/s65rGIromg2hHWBmh48nAE8NTg2TbzhDGEEHgXi8zAiHw4tfJPiuMaCiPgC9JVFrjsmQFZsy1rxsXY91ffVASLG6o5U6Cp0Ku6pCjmvTcyWuBXDNSZ1pCwGWd/ZB7PfRVJqgEkKBsdnBKZaeDDksdjGq0HwqaynjnUHlBtw/Q4821d0Z2A2LkiekqAYcqgJolT5BwFUyh0EOuyvWtyRe/uab4IvuOc1xvlAnFW/H6aEyqqYwhYvB6w1Bxk2DWMEyTuTsxckY21zYD+HYwxCrmXDdxsI6HNKqNMBhCRO8DdiP/1JuUoNAkywPZCBHuxGFK/Bym9PvO6tCUXYVsqFxgStMaUvkZB2zSDnCAdJIyMWDakFqyzoLPYzqEkBKDh2sNnLEyvoWbcMvMSzMtJ+fIGRYd1R6X5EilXuJsDRhCCLnu8FEa7a4nCzs8N7pHY7kJVdiYjTXwLjNaF46Dm2XFNHduQg7YDAG70admZHViythUFlxlTWpA5uyN/1lFcomvb1NQvm+qG+UvY3IjeEISZrBfUafSa2leq5I33jtWAdkBayCXPlpq0vNMWWHttK9lX5nup+HMi7MvcVSFcwLKe58OojElDH1YyonfY5AWBp2hVe5nTN8RMMI2RAwI1QJkHMhWrOk4YxYmgovcQ+SL2W3v077ruD7B1h6d7j23AHBxvUnFbMWgDZUQQTaddaWFfiBfyMOMUVCON8jZE6CXdgAwhiASPkwFH0KOLK2JQtXnbvnaEmpDoiLRT5oXkx1yOkAmVSjttliIuHbBE5jJZqLDIZryjRYDJn1o5ZiNMtvTv5dOK4zcpzb2SKLG5bTcEGAALJZnwiw0sDSFw5QJ1zqDhZNj1HWc2cRCyNc4IAjbEjm6TxHxi1KPD8DGr/3vEiSALwKD4hgZ1qksjoMDZ+y1daitSb1c3UhYuIghGCyriMG7RJPvfEAIcXJNGjk+lTGpr6sUa1VV8+TEDu186cDKemeRuR/MqASa81EAgJhry3M71GtnTYbwyh0rW3DK7bX2PvkOxXUYKTBpiMYEle81t5M5fBCSZ5oSUg7W96QU0FRtbqcpoM4kzEui7VlCrmX9mQwHvPL+e5lWDPmaeQX7ruN6A62shb13uQbMFOemQmF4fjPoBRix35ObZvIACAZF/StfeDrl1xf1Ne3m1x6xOkXIBBvH/THvhe3NXdJ9uQlGiNm5UDTsOEyxjze8300WgZudlz4/z7TGMdN+x2IKdR1ARfGZjEVTL+GcgzP7MKE1BIcA2q3zZOGhKxYC4mkAANcbTc0QMMrFkiHgRxfrDw8uTDCqsioxzZYjFQGH/BkAjINzNZx1GEVvUJ1YJQoLjWOFjEECInVcyhxUMkZl8wBPdViavXBQpsfjOTZzWhMyUJlxAXtqO5M6ldihBnGSulHkG3JPMq7cDsV+37jLmrlpLyRFAJ7Lzs9/abYJsjDu9zfqZkQJ8ia3A9kK1tWSnWbhbZoHcfreRlpMSqi+vJfm+/Ia9l3H9YZbSTR5dLE+uE0ZnZc06rGIbolyk6LCkcDN+DcJnFiOsK8E8qotYaGw19iDxp2MeM+DECcfrNBNkTWp2jhR4FlcfgRgc5SokTIwzbjmjusmB1bcQDE5wsAYXuToVojeeXsgj9CQ2WdlvwpkjDvVO1DN2VO0Fax1qGZjSOZjYSaOPdUT63TyaOhgawNjHXTyMoDU8Hv3QyIc+a/8xpSZeUD941BmKjstY0+EpONqwDn4yIKsSl7ROmkJc+vCrlR/vc7S8Tuwak8YwsX5j4adLmIhh6Y1SfkOqcE9TiGzMsuaZ0tzqG16r0whvEO2D9XP7EDwNCGYAHsQ9149b/4ahbdLUWCpRxmdAE2GhbgLIhZSfW22f2VtWPYlihBBLPZhrtJDMeK5E6VfYN91XN9GNr+5AXFWyAy3zockwdONPmHttYiaWgNAyAKTdQAKE2iUSynyNbKwsO7fwArwfVazoPn8KTGtVSX1ilJPEeCFBphAhXFOpz8A/Uzoy/q3uUXCxBHGML3RSG5CuSHLrJOcyFjNx8QYAxgrwww9qN+JM5PFQm9UkX+iNIKjeJ9SgDg5txEmBsRhywuI7CNvJwLA+JCyrSIoKNU+9uqTNy2spYOT/WeVd1kUKwc0bpLlTD6+eFw6lBK+Ki0gC0TrtIM0bVhltxSC1Toopk4kNXVT7l1DsZ1meNPMY+pA9LszNygTfebHdmJ67GTAaLo2Z/Xe8vnyGplog8o1qJJrsRwtVDoMVa2302tRxyDx8Mx83kux5zS9YXYt7LE1D3zniBuGbr6kfddxfRvZ3ZMVLq55VIBG5UQEr4ytJMPDTmvwzFJLw4VJay95Giq/V/67woGWwDDg2GMivqk3n1oBI0BQpfS7kiu0h0v6wtJiV9yU6WYWmv4e3FAuAgon6vM3kDeAIqsqPw/FIqNZH5AhkCrwoMyh596UyW6IvE3waSJvHPo8kVqzMhkISq7m/pdi4GC6oV0FciPIuQyxFosUwItHtDVw9uDgNfG6tpfJ6j7cAP/sLeJUUBOKxvV0LAsnYoHDiuklnGdcYlhqVqYIQNqdGOGlr6kUI07c7+L9D2U5Nl0b+9skaam5Wnp5TG66/uaB1E313bL+NHOOU3WYHhjH5KjCyAFiHPp8renPmaUBl8ZCJzPDe+6/Mpab+Y1NyiDZoRX6ikGdmJ9CgrOfBzVRS8TkFey7juvbzOa9YCoJpZpmaRZS4IhVHRX30mZSxVyNQaGRykhm1Xegfp1mWE3mV5UOqWzadFOIiYvhVWJveUiDa4jSSCnMRh1aqJi6t3uEjXmk+1yb/T3KYpRgQb0JdQGLQUgS/NlQ51ktDquHxwAax+y0ujXC+irDi8WMJqrb5LiYNlwzNFpGuxqx79j5hc0l4sC1NaoqmOUJ8D1/7GUuj5cy/9XfFFitqPtQjnBIMt90OOew5wSuKhbN8YaMDJjAxJN+Kg1mjGNlFlfDEkOmc4k3QNhuALK+ZyYYJRHZONXcnGSKui9g55oyXNmWnVWYjMApa0YTK+DVvUz1UEBVfM5kn5RMEbygFxwoht02B0QSHMVx4Gtkt2UnJs5sEhRpNlXVMslZn5PpzWkOXm4ijiVC4GpRxbD73zmddxz+2+Q8fzfj+q4dMBt61m5zTmbk8JycsilWadmt42FvtQHXpgrGT2JaAaBxBA07nqfTb0H9NTBIzQdIN0RMivXVFAoscXPjpObGzkqVFpQwEaXZUmNebcKkENKwxb26ijoc40AQyR7escMHqXBSk5pWub0urrYGXMgyUcC0JlXCOJIpGVchSgNmKYcTx4FZid0a1Hcp4yJXg6oqRcLpHPRdWqj89TV8P4KsgWtr2LOCIPIB2ERLTp3WHA7y4w2vxmS76QJcZAh9h70R98BkYYzGIvqWs2xXAaFOAzS14TtnYvsq+/PG73IwJYQYw0NDixlsyA4rZYhlHVLV0cuhoTc4Y2DqkEnh1tk2ACbZ6IQKP2sNIc9OKoyDXEtbvpbSdZWz+9APiCEg+ABjmYlo6kokz0IKlib7IAETZpmW/j0hCt5ndu8Mgtw7rxqElexNDW5f0b7ruL6NrT06xe7iCaxl+KOpHBYiKqsacTpUkXzPWZTcnMmM4ynFinPPHQUZwDmZuiwXqK2yk9J5U0X2NXdaAEfJZc9LWQBXPcYJ7OgDO7abIMLSysj3kJUJU/k5h6wYe0Ix5pH1MRRah/IYAC0dqF0iiuPCKGKk3RpxGBI7Uf8leKrCXv1MFwFTOZjKgSxnbGbxwda3os6Pet5xM/uOnV8cEANjwmk+m8K90kIAXWiLyQERyItm3cpU4yqxExkmHtLCTwVpoJye7Wyd1CVI20MK1QpjsyhyyoSMAXP9Z6YOSxwU+Z7JRoeINIcyfjJ5ggKQm9nJYJKJksHeBIUSNizYe3Hoc9AjjyH1LK2n7p0qO0MYCkLGbMPktEgzsRImtNPXTM6ffvasfhYDC1mjeO1kEvgr2ncd17e5HRrp3j/+Rv6lLPqW40HkworGIroapFmTvAYQKMfW7DwavhiDOilZSCYzi3ScgWROZQk3NWQWFGegeHzIYR6IcMtFY5IpFAvtXpHcZggMwOFCe/n+Ct2VkXgMSRUkMSGNFQXtCFRLGIV5NtdZkHRzxbDOOMuaSuKG7n+htK3bUNPCnN5B+J//H4T2FP70IRDCwfP+Ihve+Z/8QCn4NFMNEdPnDxJjUg2OYaqENsvvCdradaneV0JZcDVH89YiuorhMVdzTVHH5szhaFcBroXOwKpcCwSSempu/yCwwyJE0eykXGuzbrrox+Lc6iypYTeB6iaTjcV0KGp6j+I6TZkcDlyDB+wQE1CdFgqHNZ95x7WpAFOzc4k+gOxzPk+c08RpzTLfyT7rPlmujc2JIOk9gWnwNSsTvBDSf45913F9B5p79o0cLZXSLMWo85xJ8Gh7XZQnjqFqeaGR189nWB0qPlMMPOKDjCiCFH97Xq1gHvWXqvS6bwdes9eY/JwbJjnS5MDGfWX8+T5o5heRFqn0mbZYpIoiO9VHsKsTYOgRrp/BbNe5FqHyOM0C1C5zPWG+r0pbNg7RNfCuRhT17udmjM+xaGtE10yPT1lLLKDDSevCTAaJho5Vw8mk9gemPx8+lkwi4Cw/KmwKhZnM9Hc57tQsuD4o/2I1IFYNZxNkUNsaQBaqVshQ2YMBLMxtyEwJHCmQk5rS2CVonNs7GOoMswwpLfLPY7KW2z+vDlRmW8IUVAeh18lElcJYntQDILVlCARIkgUdhGVT4MAOqKRKJEZhqcu4t5/KgOWMcQorFlmbKwLYomQQX0Nw93298ktf+hJ+5Vd+BV/+/7d3rcFVVWf7WWvvc04ukEQIyUlQaECLpQRqrWZSp9QZMgHKOFb7wyptsdOBmoZOvdRhcFTUfi2WzvRHO079J51RaeuMlKlTnaKQOGikSmEoUDMmpcZLAhUlCYEk5+z9fj/WZa+9zz5JuCU9sJ6ZwDn7utZ79l7Pei/rfd99F8XFxfjqV7+KX/7yl1iwYIE+5uabb0Z7e3vovB/+8Id4+umn9feenh60tLRg9+7dmDZtGtasWYPNmzfDvUCZgy3Ghpi5SXOgfOBYIim0JCZTFxmLM5XJQ5OXAguvnwqFS0ePVfB9Mb7L/WbBPxYdJNU9OMRIE0dE403aTK0AyAlvjocoKcG4TGfD/FjNQ1eAHS/81xzk9QvsgLkZ8EQSmDaqBxcm1zrp30HJWMnCNLea/QME0Y6ekabeWeN1MgeUKAa5SU3AjIQ/T0VdRv2TQCBPz1Chk8kSIDsszufDIvpMBncwNwHKQhC0z0FZQAcFqAHZH5bizR1sVUABD2UsCcy1ZrtFba7I4nzAyCBDOou6quelgky09SE7KkjL9M8ZwTV6oHbdsAYa9xzEwbQcRAgLQGi9Veg0qe0wNyn+93lAWr6ns1Kod50hEZjyjICf0O/vibFAa8JK8/Vzc4KaxSIVcZoyMSfBmrDMceI8cVZM0d7ejtbWVtxwww3IZrN46KGH0NzcjCNHjqC0NLCzr127Fk888YT+XlISRLp5nodVq1YhnU7jzTffRG9vL773ve8hkUjgF7/4xXl3yGJ8MMcBfGNmJP1Suh6WKuiojjcd0HpjZD2MGtRMk5E6N2aWra9lpp8xcgCKtinTpGHyM4lSXksRkZnpw0T0OIo5JwqxlscwL5ptBvKbE82BKLpd/k+qb34W5KbAksGg5JumQfP+Zr+lD1HdX2fdyIrITp0QNwajJz4OfU/OrA3ukSgKXVdVB9CmLyPBsiqvA0CWNZHdg9BiXLdI+IMcVw6WrhhkEZnxQ/4+irjMyEvDZ0KAILtsRmhY0kyqM53kIQ0OiCUfMXkmfEDWEWMyQ7vaIaMFvVEdMavW4Zn+IZZIhjWSGHIKmQQjgz+AwOcVeWbMgAfTBMe4Y/hBpVnV54HW5HNAkpRoKxfvtyZArq+TE9WnCEtmkBHa3Rj+LSCkDZsaGnE3xwITCn/P52+eIM6KuF555ZXQ961bt6Kqqgr79u3D0qVL9faSkhKk0+nYa/ztb3/DkSNH8Oqrr6K6uhpf+tKX8LOf/QwbNmzAY489hmQyGXuexYWDVzIj0G4Yh6/Cjs1CjkAw6zSzXshzRHoYPxiE4eSGNCuzWb5nlHztWwhKqESgrmloW8R4bCi0PiWS0UDfLuZ4j5CTlgcQhOaZ5CX7kqN5qYFdBm3kZC7QPg3jHm5KbuNBVJyhWSmfHvNGw75H43pafl4GbHgAfHQIdHoAdHoQ3pkhuPnC48kX5sDotQBkmQvfl9UBgHD1YGMwynqEYVnORgT6yEOYGvx9JB2OZGqaKGjoJMEyCTC3KPARuQnhG1GDZzYD4pKkVHi358PLZIOIOBUsIINSWFEpeHEpfLcIfrIYkKZORa4eBXE3cfkETejacSrTi7HMQ/uzuCOyS5iDMw+bSSe6NklrVCE/VmR9oqlpqXRkys/HHZAfRPlBmvq0C0Atmo9qYDkmTiOCVV/LD9ZxQfxGZK7B45FJr5SH70ht3cyonzcgyp+I6SMvzss219/fDwCYMWNGaPtzzz2HZ599Ful0GrfccgseeeQRrXV1dHSgvr4e1dXBgsnly5ejpaUFhw8fxnXXXZdzn5GREYyMBLPIgYGB82n2ZQ+/uDzQcLQJy5HRf9JZ7WVlqhxDszBNGYjU1SKxxsUMnQ+90OreUU2HfID7ALnCLGOYSQCEAyyMgV0Ri+6T/J8jMAHFkZeqEBtFtPJwdFvQHkle+oUMv5i6SWMlEDVJmLsiUbFP8Hxfp89ypJ+GAaES6QDAPCELFVTDRwZBZ07B7z8B77P/Cj9RHiQrrwx9H/3kQ20idLir89EF03rZP7XWjpATcq4TMktbHEFkauEOh5sqBcsmBKkok5uXBecueHYEvvSPUDYjcjC6GcCVa9QcDzyZ0P4SRVSsqAR8+hVgpWXwEyWgZLHoQ6IoVMTRB3LyQ+bkDjSCf0K590zCdqSvhsngIzU4K5MbizFhm7IzycgwZUJFX/qR4yGtIo4jFgUr6Ag++R74puaaCGmD6tizWuIbE/Ye3DswzZr3CQjKWO7CeGwF89B6v6hczhLnTFy+7+Pee+/FTTfdhEWLFuntd911F+bOnYva2locPHgQGzZsQGdnJ1588UUAQF9fX4i0AOjvfX19sffavHkzHn/88XNtqkUElCwJBkMzDU7Id8TDWXgZB1geQjGgfWGGOc9M5aOIyyM1aHBwLn1orpjl61lo1FQWvZfxWbnBfISphJD/5c2XKy7W5GiYc2LNP9H/I4NVfPCIa2TglmuOZBg3OMQsl3EdjaZTZ8lrqskH8z34KkvH8BBodBinnn0cfHoF3Jk14NMrwEvL4J8Zglu/LNwOM50QE+mYdBSnbqcgWEVYOlGzsUZKdxXyseGy6rPjwk3KoqQZNTkalaVBfDDXE9GDrhx4VTSh8tco0kokwUqmS+IqBSstAyVKREBGoiREWqo2HIBzq3IuB16o5Q7GdkFiQZBBjhkszmqgzYKiz+r5ZkxMPNSaRJ2YFhB+XfJ1xWBGpEPKtWYFhM1/ESIDECKjWERD2E3kOTeH1EziNkkrxjRqfh/TNzwOzpm4WltbcejQIezZsye0fd26dfpzfX09ampqsGzZMnR3d2P+/PnndK+NGzfi/vvv198HBgZw1VVXnVvDLZBlrmHWQexDpdLxCN9VMnfg1iY8Ny+JKXiRzAbmdjUL5kxU1dUJb80FrjyXDBjjmmBIlnThpqZmaF3R1yOfJgZEtkd8ejmIM1tq84dajyb6pkrL5ASfQJrYoBZbqxpqkbZwN1j0Kq+jZrDEGHhRKRLXrdSHD237P9DwaWT/+xFY/wlBYrNmw+t8A/7pATjTr4BfVA6UXCECMgDAz4pIT3N2LQdoRQghf1aMHEX3Sfj2WVBby2EOkqlpIqoyOwqfcfnbc3Dfg29oqFxFxOlMDSLKkpdMB1LFIGkaJLdI+GaTJfCYi6yXm8PQzOAeRWjiIrVf5kIQFhPBMczPGgmGw1rFeNYFfWn5e5GOWJRane8CXKwHIy9rrAOMZCUxr6meSScR8pOFgjkA7ZMyAyQABESsJ65BxYOcwBMtqPhgCmEmjPiweISs1Puco3XS5Gtc69evx0svvYTXX38dV1555ZjHNjQ0AAC6urowf/58pNNp/P3vfw8dc+zYMQDI6xdLpVJIpVLn0lSLc4F6yB0XTA1eQJCE1jgmbwJRA8o8R0RaM4qDcNzLoAXHDafgMWdwxr04gzZvqXtos6FM+wMEJqIoacWZCMVNwz4q/dk0ISGIqDOJWS165XIhLJG4kQgACL/YLuNgshGh0hE5bfCkVuFqv5f6PShZChQhXIadO+DTKsTpjgMaHoL32XEdZk9OQmQ/GToBdiaIICW3CEG4cjIwDxpaDIMkVwIcCptsiYAMqWNJV+5W9eNcHqTv8smXhSABzh1QUYk2cyrS4qlinY/Rd1PaF0tuSgeTmFqWqmoQx1W+aqDRVgWPMXDmgjkuHLcomGAo055JXvI38SITFh/xFY8FeTI4senLBGHAyUInkDbMaSr6FkC8hqKegQgxmFYPqAmoYdbU5kqV7SUhCDQ2r2AULHjno5UCQodF3AuhlGATuc8YOCviIiL8+Mc/xvbt29HW1oa6urpxzzlw4AAAoKamBgDQ2NiIn//85zh+/DiqqqoAADt37kRZWRkWLlx4ls23OBcoH1HeB0c9hOoFiE648p2XZ7u6n/It8TwmAj1osxgyNEyP492DS/IyW8M1scT7vvT3ib606nC12Sg1YmYAiYKAIOxa3s8xr2tqfurinIN8BuYWySCWUZCatTOuCcZE6R0b9eeR17eFzUF+MCCLUG+Es367RSEtQWiMTGeiAACuB2gG7geDtUeKuKXWy0RvGAgeF7WqwF2xyFlqNr6UCfOzYMXTtJyJu/CdZBDlqjQsGd2Y8QEy6sMBwTNkmohNs6GJsIEsMCnq0HhwtRQvWP+lYm9ITVoCwlRrxaJtUZXFFYE7msSC9wyeA1BC2l/98B8QEI2JfKY4079q+uBM06Y+VhKm0vomQirGBFJbW2J8WtoffB4ElQ9nRVytra14/vnnsWPHDkyfPl37pMrLy1FcXIzu7m48//zz+MY3voGZM2fi4MGDuO+++7B06VIsXrwYANDc3IyFCxfiu9/9LrZs2YK+vj48/PDDaG1ttVrVJEGXZDCLOpqzM6gX1Pxsns+D0g5A/IOuXjhp0tNOf7Exp1RE6FQWGfZjfGVAQFbq/OjM34SvospkJMFYvi/zvvp6+Q5BuF/aDciAaG0p1QdNzjGz+rg2ERM5DH01U84mwhGebiq/DwOAO2s2/GFRq800J3r/fjsw5ajBzHMEMarPUg6uNg2L+whtg4kgDRaUePdZ8CMJV5cgFkZiMbDnA4wzUXKEcTBPBm6YQRGmGVom1lWalQ4KyfjifjHajY+AvMyq07pCeMzEiYx/A19nuBaYE/lxzNp24f8pFCzEJGk5vqrcTLICAwfnSUGUUgsL+Z4B8dsY2nXIzKwnO0HDcqJaVf+UeVOt09M75LMnEwdrUyX58aRjTiqjf8Y11Z9+zy4weTGKxhKPdXAeY/EzzzyDu+++Gx988AG+853v4NChQxgaGsJVV12F2267DQ8//DDKysr08e+//z5aWlrQ1taG0tJSrFmzBk8++eSEFyAPDAygvLwc/f39oetaTAzDQ4NBlgozMEOahQj5K7+ql5nDyHMIBLZyhVgizE8A+jR9n2BbNPQ9avaLOzYaVaavb0SXRc/P0bryEFfOWzCG5qrOy0e6yucTjWTTPhQZgu5ROCJTyx0BYZeWFMc2I7P/ZdkIH4nrVwXbj/1b+HWMCUx0Zh7bP2YEKTAOOEkdbTjq+dq0mFXdYaIgZNKRz4uxADgffBgkJf/P+kGxyZA2DfEsKk2XsdxnQsiJtKY0EYgJhir1I82jhual+ugRYVRWb/YJyBhBTbrUPYckq+Bzwvisrh83IdS/T0STCgVA5CMPEyxIRKz6YL5PjvJ7S/LSyyFi3utQtpSY+5q+vJySR5KYBwYGUPmFG85pHD8r4vpfgSWu88fI4Gdh4pIDpPLZqJljVOMKBnuzjLpBXgoGaXl5nrBcTU6eGjnOHPxz2xE+3tTMzJlv6D6IJ664e5v3D+03Z8CGOScU2BByXIeDONS9QzXNlINdpd2SJT0EIVDIf+Mov5hsHGPAFUZJm4lg5LNj0Als8y1BiPNJmM54ZSYygjii2dkB6FpuaohTxCR+J7nN0J5Ms5u6XkZeTIXfq+uGBn6DvHR/CIb/KZf48kG1Ne66qm0ZWcl5JEvI+D4yksAUHAYkOEfCYUg4PERWiRChBUE5IQKLmAwBBD63PJND3X7jfTLlqt7HuAmAei+cPPdWkxpzggsE0bw55xvZ96NJBgYGBzHr6sXnNI4XZI4lxbV2Pde5Y2RQyC6ftpU1BpWoaUXNPGPJS8IkkLOZ4Y6FOOLSM0QTUcc5hU03AEKzzLj7R/sS2hYx8UnvfDDIwxj8VeSV1EqUfFXWewe+yIXnZ8FU7jcIZzoli+GBY9QnjGaDkHmTrJUpxuEM/f0DcDlDWWm85qUwMHQGSbV4iBlrlyIQPjDP6FfMcE8+wFS5GifIRQeEyEb0SsAH4BvaVNYgOfNZC4hLaFsZQ+NwmCACBkGIauCP8y9KTy08XzwLBBh+r/GfOwA5EZSEoKr4qOfjTMZHRrYxK5mBywunHA7OGIochqTLtdYliIvBNUiMx2lfUfIwJxDcHfNdyzuxo3iLhOqrk2dip5dEIPgNgWAyoc5xOQvGBUVgkedp8HRWnnv2ulNBEtfg4CAA2JB4CwsLiwLH4OAgysvLz+qcgjQV+r6Pzs5OLFy4EB988IE1F8ZArXWz8omHlc/YsPIZH1ZGY2M8+RARBgcHUVtbC36WuQsLUuPinGP27NkAgLKyMvvQjAErn7Fh5TM2rHzGh5XR2BhLPmeraSmcHc1ZWFhYWFhMMSxxWVhYWFgUFAqWuFKpFDZt2mQXLeeBlc/YsPIZG1Y+48PKaGxcTPkUZHCGhYWFhcXli4LVuCwsLCwsLk9Y4rKwsLCwKChY4rKwsLCwKChY4rKwsLCwKCgUJHE99dRT+NznPoeioiI0NDTkFKa8XPDYY4+BySJ16u/aa6/V+4eHh9Ha2oqZM2di2rRp+Na3vqWLdl6qeP3113HLLbegtrYWjDH8+c9/Du0nIjz66KOoqalBcXExmpqa8N5774WO+fTTT7F69WqUlZWhoqICP/jBD3Dq1KlJ7MXFw3jyufvuu3OeqRUrVoSOuVTls3nzZtxwww2YPn06qqqq8M1vfhOdnZ2hYybyTvX09GDVqlUoKSlBVVUVHnzwQWSzWVwKmIiMbr755pxn6J577gkdc74yKjji+uMf/4j7778fmzZtwj/+8Q8sWbIEy5cvx/Hjx6e6aVOCL37xi+jt7dV/e/bs0fvuu+8+/OUvf8ELL7yA9vZ2fPzxx7j99tunsLUXH0NDQ1iyZAmeeuqp2P1btmzBb37zGzz99NPYu3cvSktLsXz5cgwPD+tjVq9ejcOHD2Pnzp260ve6desmqwsXFePJBwBWrFgReqa2bdsW2n+pyqe9vR2tra146623sHPnTmQyGTQ3N2NoaEgfM9475XkeVq1ahdHRUbz55pv4/e9/j61bt+LRRx+dii5dcExERgCwdu3a0DO0ZcsWve+CyIgKDDfeeCO1trbq757nUW1tLW3evHkKWzU12LRpEy1ZsiR238mTJymRSNALL7ygt/3rX/8iANTR0TFJLZxaAKDt27fr777vUzqdpl/96ld628mTJymVStG2bduIiOjIkSMEgN5++219zMsvv0yMMfroo48mre2Tgah8iIjWrFlDt956a95zLif5HD9+nABQe3s7EU3snfrrX/9KnHPq6+vTx/zud7+jsrIyGhkZmdwOTAKiMiIi+vrXv04/+clP8p5zIWRUUBrX6Ogo9u3bh6amJr2Nc46mpiZ0dHRMYcumDu+99x5qa2sxb948rF69Gj09PQCAffv2IZPJhGR17bXXYs6cOZetrI4ePYq+vr6QTMrLy9HQ0KBl0tHRgYqKCnzlK1/RxzQ1NYFzjr179056m6cCbW1tqKqqwoIFC9DS0oITJ07ofZeTfPr7+wEAM2bMADCxd6qjowP19fWorq7WxyxfvhwDAwM4fPjwJLZ+chCVkcJzzz2HyspKLFq0CBs3bsTp06f1vgsho4JKsvvJJ5/A87xQhwGguroa77777hS1aurQ0NCArVu3YsGCBejt7cXjjz+Or33tazh06BD6+vqQTCZRUVEROqe6uhp9fX1T0+Aphup33POj9vX19aGqqiq033VdzJgx47KQ24oVK3D77bejrq4O3d3deOihh7By5Up0dHTAcZzLRj6+7+Pee+/FTTfdhEWLFgHAhN6pvr6+2OdL7buUECcjALjrrrswd+5c1NbW4uDBg9iwYQM6Ozvx4osvArgwMioo4rIIY+XKlfrz4sWL0dDQgLlz5+JPf/oTiovHLiZoYRGHb3/72/pzfX09Fi9ejPnz56OtrQ3Lli2bwpZNLlpbW3Ho0KGQz9gijHwyMv2d9fX1qKmpwbJly9Dd3Y358+dfkHsXlKmwsrISjuPkRPEcO3YM6XR6ilr1v4OKigp8/vOfR1dXF9LpNEZHR3Hy5MnQMZezrFS/x3p+0ul0TqBPNpvFp59+elnKbd68eaisrERXVxeAy0M+69evx0svvYTdu3fjyiuv1Nsn8k6l0+nY50vtu1SQT0ZxaGhoAIDQM3S+Mioo4komk7j++uvx2muv6W2+7+O1115DY2PjFLbsfwOnTp1Cd3c3ampqcP311yORSIRk1dnZiZ6enstWVnV1dUin0yGZDAwMYO/evVomjY2NOHnyJPbt26eP2bVrF3zf1y/g5YQPP/wQJ06cQE1NDYBLWz5EhPXr12P79u3YtWsX6urqQvsn8k41Njbin//8Z4jcd+7cibKyMixcuHByOnIRMZ6M4nDgwAEACD1D5y2jcwwmmTL84Q9/oFQqRVu3bqUjR47QunXrqKKiIhShcrnggQceoLa2Njp69Ci98cYb1NTURJWVlXT8+HEiIrrnnntozpw5tGvXLnrnnXeosbGRGhsbp7jVFxeDg4O0f/9+2r9/PwGgX//617R//356//33iYjoySefpIqKCtqxYwcdPHiQbr31Vqqrq6MzZ87oa6xYsYKuu+462rt3L+3Zs4euueYauvPOO6eqSxcUY8lncHCQfvrTn1JHRwcdPXqUXn31Vfryl79M11xzDQ0PD+trXKryaWlpofLycmpra6Pe3l79d/r0aX3MeO9UNpulRYsWUXNzMx04cIBeeeUVmjVrFm3cuHEqunTBMZ6Murq66IknnqB33nmHjh49Sjt27KB58+bR0qVL9TUuhIwKjriIiH7729/SnDlzKJlM0o033khvvfXWVDdpSnDHHXdQTU0NJZNJmj17Nt1xxx3U1dWl9585c4Z+9KMf0RVXXEElJSV02223UW9v7xS2+OJj9+7dBCDnb82aNUQkQuIfeeQRqq6uplQqRcuWLaPOzs7QNU6cOEF33nknTZs2jcrKyuj73/8+DQ4OTkFvLjzGks/p06epubmZZs2aRYlEgubOnUtr167NmRReqvKJkwsAeuaZZ/QxE3mn/vOf/9DKlSupuLiYKisr6YEHHqBMJjPJvbk4GE9GPT09tHTpUpoxYwalUim6+uqr6cEHH6T+/v7Qdc5XRrasiYWFhYVFQaGgfFwWFhYWFhaWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgr/DyFEGMryrtmOAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.imshow(ugos[30],cmap='RdBu_r')\n", + "plt.clim(-1,1)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "fbb2ad80-31bd-4d96-bf32-c0766477edb7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAGdCAYAAADuR1K7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACpSklEQVR4nO29eZwcdZ3//6q+p+c+MjMZMrkxByQcQUIAFZcsCbAKu6wrbhTMIvxAsoJxAVEOBRUUl0UUZUVQ+ArisguIVyAGAyIhkUCAQBISch8zk8zVM93Td/3+6Pp86lPVVd3V13R1z/v5eMwjk56a7uqeOl6f9/v1fr8lWZZlEARBEARBVAmOcu8AQRAEQRBEMSFxQxAEQRBEVUHihiAIgiCIqoLEDUEQBEEQVQWJG4IgCIIgqgoSNwRBEARBVBUkbgiCIAiCqCpI3BAEQRAEUVW4yr0D5SCZTOLw4cOor6+HJEnl3h2CIAiCICwgyzJGRkbQ1dUFh8M8PjMhxc3hw4fR3d1d7t0gCIIgCCIPDhw4gClTppj+fEKKm/r6egCpD6ehoaHMe0MQBEEQhBUCgQC6u7v5fdyMCSluWCqqoaGBxA1BEARBVBjZLCVkKCYIgiAIoqogcUMQBEEQRFVB4oYgCIIgiKqCxA1BEARBEFUFiRuCIAiCIKoKEjcEQRAEQVQVJG4IgiAIgqgqSNwQBEEQBFFVkLghCIIgCKKqIHFDEARBEERVQeKGIAiCIIiqgsQNQRAEQRBVBYkbIm9e292P3719uNy7QRAEQRAaJuRUcKJwZFnGpT99DQAwpdmPk7ubyrtDBEEQBKFAkRsiL4bHYvz7Vz84VsY9IQiCIAgtJG6IvDg8FObf7zsWKuOeEARBEIQWEjdEXvQExvj3gXAsw5YEQRAEMb6QuCHyojcQ4d+TuCEIgiDsBIkbIi9GBEETGIuXcU8IgiAIQguJGyIvRsOqoKHIDUEQBGEnSNwQeTESUcWNWDlFEARBEOWGxA2RF0FB3ATGYpBluYx7QxAEQRAqJRU3L7/8Mj7xiU+gq6sLkiTh2Wefzfo769evx6mnngqv14vZs2fjF7/4Rdo2DzzwAKZPnw6fz4fFixdj06ZNxd95IiOjgrhJykAwmijj3hAEQRCESknFTTAYxEknnYQHHnjA0vZ79uzBhRdeiI9//OPYsmULrr/+enzhC1/A888/z7f59a9/jdWrV+P222/HG2+8gZNOOgnLli1DX19fqd4GYcBIWGsiDkXIVEwQBEHYA0kep3yCJEl45plncPHFF5tuc9NNN+H3v/89tm7dyh+79NJLMTQ0hDVr1gAAFi9ejA9/+MP40Y9+BABIJpPo7u7Gv//7v+OrX/2qpX0JBAJobGzE8PAwGhoa8n9TE5h/+vFf8cb+If7/P//HOZjRVgsAeO9wAL94dQ9uWDYXk+q9ZdpDgiAIotqwev+2ledmw4YNWLp0qeaxZcuWYcOGDQCAaDSKzZs3a7ZxOBxYunQp38aISCSCQCCg+SIKY1QXqWEeHFmWccH9f8H/vH4QP16/qxy7RhBlYTQSRyRO6VmCsAO2Ejc9PT3o6OjQPNbR0YFAIICxsTEcO3YMiUTCcJuenh7T573rrrvQ2NjIv7q7u0uy/xOJkM5jw8TN0VG1ud/hoTEQxETgzzv6cModL+D8+/6CeCJZ7t0hiAmPrcRNqbj55psxPDzMvw4cOFDuXap4IvHUBdzlkACoYufYSJRvo4/uEES1suadHsQSMnYfC+K9IxQZJohy4yr3Doh0dnait7dX81hvby8aGhpQU1MDp9MJp9NpuE1nZ6fp83q9Xni95P0oJpFYSsw013pwdCSCYDQlZPqDauTmwABFboiJwfbeEf795n2DWDilqXw7QxCEvSI3S5Yswbp16zSPrV27FkuWLAEAeDweLFq0SLNNMpnEunXr+DbE+BBWIjettR4AQCiSEjv9o2rkZjAYTf9FgqgykkkZ7/eo4ubgIIl6gig3JRU3o6Oj2LJlC7Zs2QIgVeq9ZcsW7N+/H0AqXXTZZZfx7a+++mrs3r0bN954I7Zv344f//jH+J//+R98+ctf5tusXr0aDz30EB599FFs27YN11xzDYLBIFauXFnKt0IIyLKMqCJuWhRxw1JQxwTPzWg0jmSSmvsR1c3+gRDGYqoHrTcQLuPeEAQBlDgt9frrr+PjH/84///q1asBAJdffjl+8Ytf4MiRI1zoAMCMGTPw+9//Hl/+8pfxgx/8AFOmTMHPfvYzLFu2jG/z6U9/GkePHsVtt92Gnp4enHzyyVizZk2ayZgoHcxvA6TSUgAQUtJSgyE1WiPLQDAaR73PPb47SBDjRDSexF92HtU81heImGxNEMR4UVJxc84552Rsy2/Ufficc87Bm2++mfF5V61ahVWrVhW6e0SeRGKquGFpKdahWN/cbzRC4oaoXlY98QZeeC/lATy+vQ47+0bRO0KRG4IoN7by3BCVAevl4ZCAxpqUcGGl4KM6caMXOwRRLbzfO8KFDQBcduZ0AMCxEYrcEES5IXFD5AxLS3ldTtR6U8G/oGIo1pd/k7ghqpV3Dw/z7797yQKcf2KqYjMYTSBBXjOCKCskboicCSvmSa/bgVqPE4DquWEl4QzqdUNUKzt7RwEAnz1jKj794amo96lZfjruCaK8kLghcoZFbnxC5IZdzEcj2s7FI+HY+O4cQYwTu48GAQCzJtUBSEUyPa7UJZWOe4IoLyRuiJxhnhuv2wG/JyVuWIfiUeWiXuNORXTGotZm7YzT/FaCKBqHh1P9bI5rquGPNSjRG0rHEkR5IXFD5Ew4xjw3DtR6UyKGGYqZ94b1vwnHs8/Z+dKv3sSSu17EWweGSrC3BFEajgynqqK6BHFTp4tkEgRRHkjcEDnDIjc+t2AoZp4b5aLeVpcSN5FY5sjNwcEQnnvrMHoCYXzr9++VapcJoqhE40nesLKz0ccfZ20PKC1FEOWFxA2RMxExcsPSUpEEZFnmIodFbiJZIjev7urn37+5f4iLI4KwM30jYcgy4HE6eK8nANxUTGkpgigvJG6InAkzz43LCb9SLRWMxjEWS4BVwLbUpgaVZhM324WZPPGkjD3HgiXYY4IoLmyGWmudB5Ik8ceZuAmQuCGIskLihsgZFrnxuR3cYxCOJREYS13QJQlo9ruVbTOnpXb0BjT/39tP4oawPwPKUNgWIWoDqGkpfTNLgiDGFxI3RM6ITfz8iqEYAI4qnVlrPS74lGqpbJGbAwOpipOpLX4AwF6K3BAVQL+puGFpKfLcEEQ5IXFD5Axv4udywON0wOVIheX7lJk6tV4nvEq/D2Y+NkKWZfQoE5QXTWsGAP5/grAzA8GUkG/VixsveW4Iwg6QuCFyhkdu3E5IksR9N2LkxutWxE3MPHIzFIohqjzXvMn1ANRwP0HYGTVy49U8TtVSBGEPSNwQOcOb+CnRGea76WPixqumpcIZIjcsStNS60FHQ6qclsQNUQkMjDJxo514T9VSBGEPSNxUKC+824Mf/GlnWTr78iZ+SnTGz8VNSqz4PUJaKkPkhkV62uu9aFVWwCRuiEpgIFvkhloaEERZcWXfhLAbsizjqv+3GQCweGYLzpjZOq6vz5v4uVLRGTY8sy+QEit1Xhe8ruyG4sFQ6gbR7PdwY+ZAkML5hP3JbigmcUMQ5YQiNxUIi3gAwGAZIh0RXeSm1iAtZcVQPCjcINhNYjAUpTlTE5A/vdeLW5/dmvF4sRNMmLfWacUNG0cSipK4IYhyQpGbCuT93lH+faAMxkWxFBwAH57JDcVep+q5yZCWGgil9r251s1XvImkjLFYgj8nMTH4wmOvAwCOa67B1R+bVea9yY7qudGKmxp36ri1OjCWIIjSQJGbCkQslxajOOMFKwX38ciNkpZipeAea5GbISEt5fc4wRq90tDBicVwSBXo7xwaLuOeWCMaT3JPTYtfJ26UFC2JG4IoLyRuKhBx/lI5xI1Z5CaWSKWT/F6hFDyD54aZMpv9qRb24pwqYuKwo1cdwdFXAX2OxGhpQ422Woq1RRjL0pmbIIjSQuKmAgkK+fyBUDnSUvpScKfm53Vep2oozpCWGh5L7XujcoNgESCK3EwsWEM8ABgsw/GcKwHluK33uuB0SJqfsXRsPCnzHk4EQYw/JG4qEDFyEy7DCjHMZ0tpIzcMv8fFU1aZ0lJsuCAXN8rz0GTwicWQIGjKYZDPFXbc6qM2AFDjVoU+RW8IonyQuKlAgkLaphziRk1LaT03DLEUPJOheIStgBUzMau6ClKlyYSCRfCAVBVSMmnvarmA7rgV8bjUcSTkuyGI8kHipgIR0zaZ0j6lQp+WYqKEoS8FNyvtZt6FhrS0FN0UJhJDgrhJylqxY0f0x60eFr2hyA1BlA8SNxWI2EMj03iDUhHRpaVqdWmpWo/quUnKKf+BEfrwPhvjQGmpiYVezPTbPDUVGFOOW5+JuPFQrxuCKDckbiqQ0bKnpZTIDRu/4NGmpWqFaqnU9unRpXAswQ2XaWkpEjcTimGdidjuhnI1cmPci4mJm3KcmwRBpCBxU4FoDcVlSEvFtKXgdbq0VGudh6elAOOLPLtBSBJQp0R+eDifvAoTCn0jypDdxY0SaTKN3PDjmKqlCKJckLipQMSbf1mqpeLaJn7Nui6tbXVeSJIEj8u81w0L7dd7XXAoBkwrk8SJ6kN/DAdtLm6zem4oLUUQZYfETQUillePt7hJJGXerI9FbiY3+vjPPS4HFynqZPD0fRwJs4oT9QbBUlnliEYR5SOkEzN2T0uyoZgNBtVSABmKCcIOkLipQMRISHicG4WJjcmYeGkUVrBiGS8TOYaRG4NeITVu8ipMRJgIaPKnjgW7twLgaSmTyI2fRjAQRNkhcVOBiGIhGk8iUYS+IM+9dRi/2XIo63ai8GDiRpLULq0+oYkZ+7mh54b7FtTVr5Vhm0T1wURAW50XgP3Hb3BhbuK58VHkhigBdu//ZDdI3FQg+jRPpi7AVjgyPIYv/epNXPfkFhwcDGV+bUVYuRwSXE718Pn8mdNR73XhoctO44+p4iZdrLDQvpiW8rHtyXMzoWAioK0u5d2qnMiNcVrKzz03dBwTxWHdtl6c9M0X8OBLH5R7VyoGEjcViD7NU2ik48/bjwrf92V5bW0DP8Y3PnkCNt/691gyq5U/xjw50YRRWir9BsEjN3RTmFCEdJEbu3tu+LGbpVqK0qtEsbji0dcxEonj7j9uL/euVAzjIm4eeOABTJ8+HT6fD4sXL8amTZtMtz3nnHMgSVLa14UXXsi3+fznP5/28+XLl4/HWyk7siyniZtCB/TtGwjy73f2jWbcVj9XSsSjEzzs/0b7Z1ROS9VSE4+EMGCSixubi1tW6ddoWi2lTLe3+fsgKgO9SO4bCZdpTyoL47hqEfn1r3+N1atX48EHH8TixYtx3333YdmyZdixYwfa29vTtn/66acRjaodSvv7+3HSSSfhU5/6lGa75cuX4+c//zn/v9frLd2bsBFGUZCYwWO5IDZROzCQLS1lHLkxQhzBoCcQTp/PQ56biYfoS2FpKTv3uYnGk3yfs/a5ocgNUQT29gc1/z8wEEJ7vc9ka4JR8sjNvffeiyuvvBIrV67E/Pnz8eCDD8Lv9+ORRx4x3L6lpQWdnZ38a+3atfD7/Wnixuv1arZrbm4u9VuxBUbVSoWKG3Eq88HBsYzb8qGZBpEbPWwbo/lXwwarX5/b3IBMVCfMTCxJar8kO0duRoSGg3UmpeBULUUUk3392gXn4SGK3FihpOImGo1i8+bNWLp0qfqCDgeWLl2KDRs2WHqOhx9+GJdeeilqa2s1j69fvx7t7e2YM2cOrrnmGvT395s+RyQSQSAQ0HxVKmJKio0rYH1n8mVoTI2UHRwcMx10CajCw0rkxqMYjo2iTWyekFbckFdhosEEQI3byTtd27n5HauUqve64HRIhtv4SNwQRWRQN2utZ5jEjRVKKm6OHTuGRCKBjo4OzeMdHR3o6enJ+vubNm3C1q1b8YUvfEHz+PLly/HYY49h3bp1+O53v4uXXnoJ559/PhIJ44vJXXfdhcbGRv7V3d2d/5sqM0zceFwOuJ2pi2sxIzdjsQQGMgwu5KMXLEVuzJv4ZRY3lJaaKDB/VY3byQew2nkqfLYeN4CalgqRSCeKwKBu9trh4czRdSJFyT03hfDwww9jwYIFOP300zWPX3rppfz7BQsWYOHChZg1axbWr1+Pc889N+15br75ZqxevZr/PxAIVKzAiQiRE3eGyEgu6KcyHxwcQ2udsYeJp6WseG4y7F/AUNyYe3SI6oRF6XxuJ/xeRRTY2HNj5BXTw9JSVPVHFIPBkHaxOaQTO4QxJY3ctLW1wel0ore3V/N4b28vOjs7M/5uMBjEk08+iSuuuCLr68ycORNtbW3YtWuX4c+9Xi8aGho0X5WKKi6cPO0TK7BaigkNVq2SyXcj3oyyoUZuMqSl/Kq44WJtnLsuE+VDFMu1FVBlxCqlrEVu7CvSiMqBpaW6W2oAqNdrIjMlFTcejweLFi3CunXr+GPJZBLr1q3DkiVLMv7uU089hUgkgs9+9rNZX+fgwYPo7+/H5MmTC95nuyPeDJgYiBfQuVKWZT7C4fj2OgDAgQyN/HKJ3Jh5bmRZNkxLcbFWoIeIqBy4h8vtRK0SubFzE79sPW4AdXAmeW6IYsDSUtNbU77TQJjEjRVKXi21evVqPPTQQ3j00Uexbds2XHPNNQgGg1i5ciUA4LLLLsPNN9+c9nsPP/wwLr74YrS2tmoeHx0dxQ033IDXXnsNe/fuxbp163DRRRdh9uzZWLZsWanfTtnRpKVcKc9NIWmpWELm4xuO70iJm0xdinMqBTeZLRWMJvhrasSNqzhpNqJy4B4ulwN+JXJj5yZ+2boTA2ITPzqOicIZHmORG7/yfxI3Vii55+bTn/40jh49ittuuw09PT04+eSTsWbNGm4y3r9/PxwO7Y1yx44deOWVV/DCCy+kPZ/T6cTbb7+NRx99FENDQ+jq6sJ5552HO++8c0L0umFRDY/LAZej8LSU2DBvdjsTN5nSUuZN/PSYTQVnJ6fbKfEbQer/qe0TyZTgMqtGIaqHsCCWxeq/aDyZ1hTSDliJ3KjjF+wr0ojKgRnsj2tiaSk6rqwwLobiVatWYdWqVYY/W79+fdpjc+bMMS1HrqmpwfPPP1/M3asoWGWUx+UoShqHpQUkCZjZll3c5BK5MUtLsaaBjTVuzdBN8WYWSyThdGQXUERlExHEMhMFQCp643F5yrVbpljx3NDgTKKYMJE8uTHVuI8iN9aw39KIyAgTCm6nmpYqpBSc3Vxq3E50KidPpj4KoqE5G2aG4mGTclpW2g5QamqioPeQMUFsV2GgRm4sVEvFkjTJmSgYlqad3JiK3IzFElR0YQESNxVGjIsbiadxChE3Y0L1ExM3o5E4Rk18D2q1lPXITUQfuTEwEwOAW0hPFloBRlQG+uo7dlzZtWLKUp8bIQJFc9KIQmHX4o4G1XZBZvXskLipMGJi5KaIaSmfy4E6r4t3ie0NGEdvcovcGI9fYDeIJt0NwuGQhMaEtOKdCOir75ip2K4Xb9ahOJPnxiecG3YVaURlkEjK3OfYWOOGS/EhUpuB7JC4qTBiccVQ7CxOh2K9QZitDnpNUlNqh+ICPDcmkRuAet1MNNRScCZu7O1XsVIt5XBIPAJlV5FGVAZiW4Rar4vaDOQAiZsKI2oYuSlOWgoAOhoU341J5IaF2X2WSsGNq6XYLKsmf7phtFhdl4nKgEVuWLSDHYd2rTSyUi0F0GRwojiElEopp0OC1+VQG0SSuMkKiZsKg0U03EUav6D30HQq4qY3EDHcPqfZUi7jPjd9ynNPqk8v3fcUadI5URmYRm5sevEOGEyzN8JfAd2WCfvD/Da1HickSRLM6nRcZYPETYVhZCiOF8NzwyI3jUzcmHlucigFdxmnmI6OKuLGYH6Vh9JSEwp95KbGxmmpaDzJ9ytb5Ka2AuZkEfaHRTBZDygfRW4sQ+KmwuB9bpwOeIriudGKGxa5MSsHj+TTxE9XMcIjNw3p4qZYk86JyoCLZSVyY+ew+4jQ9r4uQyk4oEZugjZ8H0TlwCKY7LywuyfNTpC4qTCiSpTG7XTAVYS0VFRXrcINxSNFjNzo9q9vJEPkhkYwTCjU8Qu6i7cNRQGrlKr3urJ2z+aRG5t6h4jKgM39YzYAMhRbh8RNhWFYCh7PPy3F0gLsuSbVpyI3fWaem1xKwfn4BVWohGMJ9AdTz806bopQtdTEgoll5vmqYaXgNlyZWulxw2CRG7N+UQRhBb0nssZt3/PDbpC4qTBi3FAsFSUtJc6qAoB2xeR7dCRiOAIjlyZ+XoMozMHBEGQZqPO60FJrXi1FfW4mBmGTyI0d01KsUqo+S0oKSBlAAbXahSDyQe1Dpo3c2PH8sBskbioMJmS8QuQmniw8LcXEDatgiiaShjNMcovcpDfx29efmjg+tcWvmSvFoGqpiUVa5IaVUNswnTPK0lJWxI2XeW7s9z6IyoGPx1FEjd9N1VJWIXFTYYieGzevRso/yhFNpE4SVqXkczt5mSvzxohwcZND5EY0FO8fUMWNEVQtNbHQR27sXC3Fy3K91sUNrbCJQginpW3Jy2UVEjcVBvfcuIrTxE8fuQFUU7GR70YfJs0Ee86kDMSVfdx7LAgAmN5Wa/g7rFqKDMUTA71B3c5pqWAO4oa9jyB5bogCMEtLjUXp+pgNEjcVhtZQXETPjVM9FNqZqdigYiq3yI0qgNjv7VXSUtNbTSI3lJaaUET01SBu+1aDsLLuOo8Vzw1FbojCYSIm7fyg2VJZIXFTYah9boozFTxiELlhpmJ9WiqeSCKRTImhXErBATVCtK8/FbmZ1moWuaG01ESCdyh2acPudkxLjYRziNx4KXJDFI4+LWXnVgl2g8RNhcH8NcWaCm6UlmLN9fRdisOC4LDSxM/pkPgU20g8iVgiiYODYwCAGSZpKU8RBBtROfAOxbxJmX0jHkyo1HmzH/u1HjIUE4Wjb7JKHYqtQ+Kmwih2WioqdDxmqGkpbeRGHIApbp8JcQTD4aExxJMyfG4Hjw5l2p6oftIiN3ZOS+XlubHf+yAqB2a49+mbXNowsmk3SNxUGKKhuBhRjpgwiJPRVpfqP9M/qhU37ITyuBxwZOnQymA3rXA8wf02U1v8pr+vDgOlPjfVjizLaR4uO6elWLVUttELQKqPE0BVLURhRNKa+NlX/NsNEjcVhui5cRVBCESFvjmMNmUsQv9oVLMtO6HY6sEK4snYp6S5JjfWmG5PhuKJQzSRBOsT6dPNzrFj2J2lmOoseW6UtBRFbogCYJ4bJvrtLP7tRvazlLAVUWFcAptvEysghWPkuWHi5pgucsNuOLUWqkUYYkfN/mBKLLXWpXcmZpCheOIQEf7G6Wkp+0U8RiPWj/9a6kdCFAF9WooiN9ahyE2FITbx85SgQzGgio/BUIz3pwHUlWtNDpEbP58VFMcxxcPTZjAwk1GMkRJEZcC6r0qS6uESPQVG4z/KSU6eGy9NBScKh4kYL6+WotlSViFxU2FoDMWuIlRLCc/HaPZ7wCwxA0E1NZVXWkpoOsUjNwYzpRiUlpo4iGZiNoqDHS9JWRvZsQNqtZT1yE1UqRIkiHxQS8FZWip1fbRj2tZukLipMLjnxiUVJYVjFLlxOiQ+1PKY4LsJFeC5CUXjPM3VmiFyw96T3W5sRPHRl4ED6vEC2C/0Psr73GQ//v1C6oqGZxL5wtNSulJwcaQNYQyJmwojJnhuWA+ZYpSCs7JyRmutYioOqr4b5h/w5+C5EdMMzKDcZsFzQ1PBqx99GTgAuIR0q51C77Is52Qo9rjUVg2j5Lsh8oRXSynnCB9GHE/aLm1rN0jcVBgaz00RUjhxgz43ANBWzyI3orjROvetoDUUW/DcsPdEkZuqx2zCfI0NK6bGYgkozbktlYIDwmRw6lJM5Im+iR/z3sgyLQCzQeKmwlDTUmqH4ngBBzk7QVw6ccMjNwZpqdqcDMXKjSoS58+VqVqKTwUnn0LVo+/hwWCpqbCNIjesx41D0qbOMsGqqkZJ3BB5wrrCM8EvRjkpNZUZEjcVRkyItBRjgjartHLpmuoxAXJ0tLC0FLsR9ATCiCtL35YMhmK3i6qlJgpmkRs79roJCmXgzPycjTqK3BAFop8KLkbYy+1LfHF7L77//A4+b9BuUJ+bCkOslmLX2ILEjZDmEpncmBrBcHhInS/FLvC5VUulDrEDA6mZUvU+V9rNTMTjVKtMiOomEjeJ3NiwR0wuZeCMWhqeSRSALMtCWip1jkiSBK/LgUg8WVZxc2w0gn/7xesAgFOnNeHv5naUbV/MoMhNBSHLMk8juYWp4LKMvNUzE0sunaG4u9kPADgwEOKPBcZiAIDGGrfl52cprP3K80zK4LcBUJRoFFEZsEqQNM+NjdNSViqlGEwIjVK1FJEH0USS+7y8QiqUj7Qp4/nx+t4B/v32npGy7UcmSNxUEKKBzO1yaARJvmkclipyO7SHQndLStwcHFTFzVAe4qZB2fbQUCpyk8lvA6gzrihyU/2wyI3oIwDsaSjOpccNg9JSRCEw8Q9oo5tM6ERi5btG9gZUu8IOEjdEoYgCxuN0aARJPM/ITZwbio0jN8dGozw9MJyHuNFvy4zKZniKYJImKgN9Dw+GHT03LHKTi99MjdyQuCFyhxnuxQ7egLoYKKehuG9EtSv0DIczbFk+xkXcPPDAA5g+fTp8Ph8WL16MTZs2mW77i1/8ApIkab58Pp9mG1mWcdttt2Hy5MmoqanB0qVLsXPnzlK/jbIjihu3Uxu5iecZuYkljdNSjX436pWS14ODqagLFzf+HCI3Pp24yRa5YX1uChgpQVQGppEbG6aluKGYIjfEOCHOlRJN7Kq4Kd81sk+I3AyGohm2LB8lFze//vWvsXr1atx+++144403cNJJJ2HZsmXo6+sz/Z2GhgYcOXKEf+3bt0/z8+9973u4//778eCDD2Ljxo2ora3FsmXLEA7bU0EWC+ZDcUipLsJihVM+PQ8SSZlPZdanpQA1esNSU0OhfNJS2ptBpu7EgOq5oWqp6od7btz6PjepY8ZOkZsQb+CXi+eGDMVE/oRNDPdiI79y0TeiipuBYKxs+5GJkoube++9F1deeSVWrlyJ+fPn48EHH4Tf78cjjzxi+juSJKGzs5N/dXSoTmxZlnHffffhlltuwUUXXYSFCxfisccew+HDh/Hss8+W+u2UlZiuskmSVIGTz/BMUUA4nenlrd0tNQCA/f0hyLKcl6FYH7nJ1J0YECI3cUpLVTtmkRs7p6Vyq5YiQzGRPyxyqe+rxBr5RcoY2WRRfAAYCkVt2S25pOImGo1i8+bNWLp0qfqCDgeWLl2KDRs2mP7e6Ogopk2bhu7ublx00UV49913+c/27NmDnp4ezXM2NjZi8eLFGZ+zGuBzoIT8K0sn5eNRESusjCI3M9rqAAAfHA1iJBLnkaNMfWr0NOiEUGeDz2RLZT/4+AWK3FQ7Y1Fjz40901JkKCbGFzNPmh3SUqKPLJ6UMWLDY7yk4ubYsWNIJBKayAsAdHR0oKenx/B35syZg0ceeQS/+c1v8Mtf/hLJZBJnnnkmDh48CAD893J5zkgkgkAgoPmqRHiPG2Gly0RJPmJAFER6zw0AzOlMiZsdPSPoVUxjDT5XTqbKBp8LYn/AyY01GbentNTEwSzsbsc+N6N5eG5Yh+Kgjd4HUTnw2Wtp4qb8aSk2RJYxMGo/343tqqWWLFmCyy67DCeffDI+9rGP4emnn8akSZPw3//933k/51133YXGxkb+1d3dXcQ9Hj+ifGimqhZ45CaPainRtKvvUAwAczsbAABvHRzCJqWvQWdj5siLHpfTgfZ69Xc6GrN5bmhw5kRBPzeHYce0VJBXS+XT54bEDZE7YybjSexQLaU/pu14jJdU3LS1tcHpdKK3t1fzeG9vLzo7Oy09h9vtximnnIJdu3YBAP+9XJ7z5ptvxvDwMP86cOBArm/FFojdiRmuAtI4vAzcIRm2lJ/bWY8TuhoQiSfx9We2AgA6sqSVjAgLJ2FbllJwSktNHCK8GqQSqqUoLUWML/rRCwx3mYcLJ5MyFzMNPvsK+JKKG4/Hg0WLFmHdunX8sWQyiXXr1mHJkiWWniORSOCdd97B5MmTAQAzZsxAZ2en5jkDgQA2btxo+pxerxcNDQ2ar0qERTNEz43bkb/nxqw7MUOSJDzwr6diUr0qSI5rypxWMuLs2W0AgOmtfjgMIkQibiESZUeTGlE8zCI3dmzil5+hmFVL2ed9EJUDF/+6yI2nzNFtMc3KbAb6NJUdKPlsqdWrV+Pyyy/HaaedhtNPPx333XcfgsEgVq5cCQC47LLLcNxxx+Guu+4CANxxxx0444wzMHv2bAwNDeGee+7Bvn378IUvfAFA6oZ7/fXX41vf+haOP/54zJgxA7feeiu6urpw8cUXl/rtlJVMkZt8qqXMuhOLTG+rxWcXT8N//el9AMCCKY05v87XL5yHqS1+rDxrRtZtxenksYQMj8vakEKi8lA9N/q0lP1KwYPR/CM3dlzVEvbH7Pwo94gadjy7HFKqb1mvPX1lJRc3n/70p3H06FHcdttt6Onpwcknn4w1a9ZwQ/D+/fvhEG6ug4ODuPLKK9HT04Pm5mYsWrQIr776KubPn8+3ufHGGxEMBnHVVVdhaGgIZ599NtasWZPW7K/aYAezRzQUswM9j9LpeJbIDePS07u5uFk8oyXn15ncWIMbl8+1tK1HI26SmvdKVBdhk5WpHdNSoXwMxUJaSpZly9PECQIwLwVn18Rype55itbn4gJ+ZCJGbgBg1apVWLVqleHP1q9fr/n/f/3Xf+G//uu/Mj6fJEm44447cMcddxRrFyuCmIGh2F1A5CbGRy9kFhAdDT784UsfQe9IGLPb63N+nVxwa7ouU1qqmjGrBrF3Wip3Q3E8KSMST6atwAkiE2ZNLsvtS+SVgx6XrX1l4yJuiOKgb+IHFNbnhgkidxYfDADM72rAfJTeq+QU9oUmg1c3ZitTO14w2b7U5jJbSqisCkbiJG6InAibVEux6Ha5hguPKYuOGo8TdRPVUEwUl5hBWspVQJ8bJpaMuhOXC0mSBMMciZtqxqxJGRvZEQjHbGEqTyZlBKO5p6VcTge/MZGpmMiVMRPDfbnbZYiLEjv7ykjcVBBRA0Oxu4A+N8xzk8lQXA7cBUSjiMohYtLEj43siCVkLoDKSUjw/uRiKBa3t+PFn7A32Tw35Ypss3Rxjdup8ZXZDXvd1YiMqNVSQhO/AiI3bPxCNkPxeMM8QJSWqm7Eqccifo+TpycD4fIP5WMXboeULsSywS/+NqwmIewNS//oG0eq8/fKlJaKqWkpO4yCMIPETQWhGoqL47mJMXFju8gNpaUmAmZ9biRJ4s3BAmPlFzdij5tcK56YR4ciN0SumKelylsKPiZElJjZOWKDCKsee93ViIwYNvErpM+NQSTIDngoLVX1xBJJnko1ioawgavDNhA3+XQnZtjRHE1UBmOKYLBbKXg4mh65CZdxFIQZJG4qCCPPDZsJlY+5zGop+HjjLnNOmSg9Yg8boyoi5ruxR1rKOD1gBbVLMYkbIjfGosbzzNy8Wqo4iz9ZlrFuWy9e2XnM0vYhQdz4KHJDFAM+ONNl0Ocmn9lSSrTHaGhmOVEFm/1OGKI4iEZhr0GjxkYlchMYK78oKChyo4g01htk99HRvM5VYuLB01I6cVPsatLn3+3FFY++js89shEHBkKW96vG7bTFEE8zSNxUEMbjFwqplkrvm2MHyHNT/fAGfi6HoY9FLAcvN8wMnEsZOKNe8A49sXE//u4/X8J/v7y7qPtHVCdjUeNqKXeR01Kv7x0AAMgy8H7vSNbtxSouH+8mbr9rtb3uakRGeJ8bTVoq/54H2QZnlguWUybPTfUSMZmbw+BpKRt4bvIZmsloFLxDX3vmHQDAPc/vKN7OEVWLWbWUh4/cKY6gePdwgH+/t99C5MbAc0ORG6IgjDoUs8GS+aWl7FktxdJS5LmpXszmSjGqxVDcpLyPY6ORou4TUf2MmfS5KXZke7+QitrXH7S8X16XgyI3RHEwNhQrB3ohTfxsFrmhtFT1Y1YGzlBLwcvvueGzdHKYK8VgkZsdPdpwvx1EG2FvsnUojhYhsi3LMvpGwvz/Q6Hsx6UYdaU+N0RRMDIUq31uChi/YDND8XikpX68fheuffwNSwY6oviYNfBjsMiNLTw3RUhL7ewb1Tw+FIoWvmNE1ZJMqt25a/RpqSJ6bgZDMY2lwUpVH7sPeYTITSRGaSmiAPhBZdDnppAOxXYzFJc6LTUaieN7a3bg9+8cwaOv7i3JaxCZMRsKyGi0kbgZUfaB+YBygb0PPYPCCjmeSOLKx17HP/74rzgyPJbfThJVhdg3xrRDcRGuj72BsOb/IxbEDYvSeF0OitwQxYEdzF7N4MwC+tzYtBS81GkpMa/cH6QVdDlgF29vFkOxlTB5qRktxHPj9xg+LkZu3j0cwNr3evHm/iGs2dqT304SVQUz7QLp0c1iTgU/OqL1glmJ3KjiRq2WiiaSfLFsF0jcVBBiOJDhKqhDsb2b+JUqLbVPqAjQn9zE+BA26b7KaKvzArCHEXcknLrgs7LuXDiuqcbwcVG0HRxUozX7KU1KQGvadegWn8yWUJy0VEpkM8FkZUxI1CByIz5uF+x1VyMywtI0orhxO/IfVWBbQ3GJm/gdHCRxU26ypaXa6lMRj/7RKJJlXhEGuLjJIy3ldxuKIjFyc2hIPR7JA0YA5mXgQHEjN4NK5HpKc0qEW4vcqMJLFDdhm/luSNxUEBHuuVEPeBdP4VTf4MxSeW5Ev8NRG0QGJiLcpOsxjoa01qYiN/GkjKEyVxaNKp6bfNJSANDZ4OPfnzSlEYD2GDwkRG7EKA4xcTErAwfEtH3hop+dW8cp4oZFKTPBIzduB1xOB7c12M13Y6+7GpERo7SUm3corp7BmaVOS4kpgYFg+SMDE5FsFUgelwNNfnv0iCkkLQUAJ3Q18O8/PL0FABCKqjeRAeF4HKQqKgJq5EY/egFQr//FWPyxa+GUZj+AlEDJVnmrX2TbtZFffmcrURZ4KbggRlwFpKXUwZn2EjfFnp2iZ3hMewMJxRJ5r8qJ/FB7x5h/7u31XgyFYugNhPGhjvrx2rU0ChU33/zkiXA6HPj43EnY2ZsqCQ8JhtERoSLMDgZqovyELERuEkkZiaRcUCsPlh5laSkACEYTaKwxj3tEhMgNkOp3E4wmbNfIjyI3FYSR58ZVgBBQB2fa6zAodSm4/gZCE5vHH7Xrr3ljPGbGHe9UzWMb9uIXf90DIBXdZCmCfDw3QMp385//chL+YWEX91CMacSNevxF4knbeReI8SecwXMjLm4LXQCy9OikOi/YiLdsERh9SxKK3BAFY1QK7i7K4Ex7RW7GMy0FpCoEOkrySoQZoxaGUXa3pELl41lB1DcSxm2/eRcAcFJ3E2a01fKfFSO6x25WYuRGPz9reCxm2rmZmBiYdScGtIvbWCJZ0LHCezjVuOFxOhCJJ7MalbmhWIjcAPYbwWCvJTuRkaiBobiQnjBqh2J7HQal7nOjb31PkZvxx0rX3+7m8Rc37x5Shwj+aVsvj6p4XQ7NTSVfahQDdShmHLkBaDQDoYpfw7SUQxQ3hS0Ag0p6uM7rstSQL5mU+Wt6lf47HptGbux1VyMykrHPTT6l4EmbGopLXArOut6y9JeV3g5EcbEyjHJOZ8pns2X/0HjsEgDgvSPChORjIcFvk19KSo+allKPObZ6ZmkB8t0QLDVplJZyOCQ1dV9ghRJvUOlzwaOIlUzPKVoF2H2IIjdEwRhWSzkKqJbipeA2EzesGiBe/LSULMv8xtqhlOiy1QsxflgxFC+a1gynQ8KhoTFL04qLgdj3aG9/UBi9UJwMfo0uLZVIyggq37NIFUVuCObJ0s+VYhQruj0itDmwErmJCAKGbW9Xzw2JmwoiktFQnH8TP9t1KC6g63I2xmIJMHtSe0OqlwqlpcYfK4biWq8Li2ekSqd//86Rcdkvsbnevv6QZmVbDPxuraFYjBp2NvqUx0jcTHQyeW4ANdpeSNGFLMv8+Kv3qeImU+Qmkkjtl0NSF8UUuSEKQpZl41Lwgvrc2NRQ7CxdWoqdzJKktvintNT4o4qbzOmeT57UBQB4bsvhku8ToO05MxqJ4/BwarBgvmXgevzMc6OIG9bvxuN0oEWZQ2WlkRpR3YQyVEsB4CmkQq6R4kKvzuuy5J1hkRuPywFJyaNS5IYoCDEy4xUNxYq5LJZHCsf2HYpLkJZiKahajwu1BmW5xPgwyg3FmSs9lp/YCbdTwvaeEezsHSn5fg3pmujtUl6zPosIs4qalkq9f3Y81nicXECRuCHCGfrcAICHLQALuEaKCz2/x8mH2EYyRGDEoZkMK79XDux1VyNMiRkYuQA1chMroEOx3Zr4lTItFRRuqiycarcVR7UTSyT5RTJbeXWT34OPHj8JAPDcW6WP3ug7BG87khI3jTVFNhQrNy9xhhAzLQfClJaa6GRNS/Euxflfu0bDqqlfkiR4LYy9MfJ9+pTvwza7jpK4qRDEPKjh+IW8PDcsLWWvw2A80lK1Fg10diKWSOL/bdiLQ0OVPX9I9DhlMhQzPnlyKjX1u7dL77sZCqaExVSlx87Ww8MA1EGehcLETSwhI5ZIIqhEcPxC5GaUIjcTnpBFQ3Eh0e1RXcUi61uTMS0lDM1ksN8jzw2RF0xNOx2Spt02SyllmwdiRIx3KLZn5KaQkKsZYgmyaoSz14rDjNufexe3/uZdfOlXb5Z7VwqCXVQ9LoclYf0RJXKz51iwpH8rWZZ5c8Hj2+sAqDcZNsizUMSbVSiaECI3LkpLEZxMpeBAcaqlxMgNYG3auJqWUs9b1netVK078oXETYWgb3nNUNNS1RS5UU7cEqSlRoVp1JUWuXli434AwOZ9g2Xek8IQG4dZodnvhk9ZHfYoBt9SEI4lISun0SxF3DDa6osjbjxOB1+cjEUTmshNg5KWGgnHMBSK4oV3e5Cgoa4TkrEMTfwANXpfiKAY0VUCqpEbK2kpdb88FqqsyoG97mqEKRGDXCcg+FPy6lBsT8+Nq4RpqaDQX8VbYZGbasGqmZghSRK6GlNzpg4Ply4lFxQa64ljFwCgrbY4aSlJkng5eCga11TF1AmRm3ue34Gr/t9m3PLsO0V5XaKyYMeFmeeGGYoLERTFi9yUtulqvoyLuHnggQcwffp0+Hw+LF68GJs2bTLd9qGHHsJHPvIRNDc3o7m5GUuXLk3b/vOf/zwkSdJ8LV++vNRvo6yoZeC6yE0BU8FZE79CpsqWAs+4pKWcFRW5kWXtZ5GPmLULQSF6ZpUuZYjmkaHSRW5CEVVosNdjtCsNH4uB2MgvFGGRGzUtNRqJ43ElSverTQeK9rpE5aCmpYzPEbcF8282mJhnxx2rgLISudGIm4kaufn1r3+N1atX4/bbb8cbb7yBk046CcuWLUNfX5/h9uvXr8dnPvMZ/PnPf8aGDRvQ3d2N8847D4cOHdJst3z5chw5coR//epXvyr1WykrUYOhmUBhKRwW8p6QaSkhcmO3EkYjxnTRpf5g1GRL+2Nl9IKe1rpU5ERfzVRMgsIwz5m6yM20Vn/RXkesmArF0qulyHNDjGUpBXcX0LyVMaKP3FjpUKwYirVFLYULrVJQ8rvavffeiyuvvBIrV67E/Pnz8eCDD8Lv9+ORRx4x3P7xxx/HF7/4RZx88smYO3cufvaznyGZTGLdunWa7bxeLzo7O/lXc3Nzqd9KWYkZdCcGxLRUHn1uWFrKZpGbUlZLaQzFNi1hNELfkr+Sb4DsveRSXs1Wl4ESvm81ouTElGZt5KaYC4AaoZGfGC1i71FfDZck382Egx2LZtVSxfDcjOoaaVppxmfU52ZCRm6i0Sg2b96MpUuXqi/ocGDp0qXYsGGDpecIhUKIxWJoaWnRPL5+/Xq0t7djzpw5uOaaa9Df32/6HJFIBIFAQPNVaWQzFMeTclrqIhu2NxSXIi0VrczIjV7chKITTdyoZttSERQqlyRJwsVKCfqX/m52UV9HHJ7JDcVel2kX5GAF/62J3JFldd6YWXTTU9RqqdTxyAzFGQdnUloqxbFjx5BIJNDR0aF5vKOjAz09PZae46abbkJXV5dGIC1fvhyPPfYY1q1bh+9+97t46aWXcP755yNh0tDorrvuQmNjI//q7u7O/02VCaPmSYDaoRhQPTRWYU3y7GYoLtZQOCNYxEPsc1MJkRv9pOhQBXdVZuKmIQdx0zAOKZuQzuh89yULselr52L1eXOK+jp+jeeGdcxWq6X00GDXiUU4luSWAbOZZu5iGIp11VKspNtKWsprkJaym6G4OANTSsTdd9+NJ598EuvXr4fPpxr6Lr30Uv79ggULsHDhQsyaNQvr16/Hueeem/Y8N998M1avXs3/HwgEKk7gmFVLicIknpBhkqI1hOVr7Td+YTzSUkKH4gqI3ASqKHLDOvDmIm7UHjDjE7kBUpUqZtUqhVDjVsWNWgqeEtsuh5S2SEkN0iyeoZmwN5qxCFk8N4X4XNLSUqwUPNP4BeVnbFtAFToTynPT1tYGp9OJ3t5ezeO9vb3o7OzM+Lvf//73cffdd+OFF17AwoULM247c+ZMtLW1YdeuXYY/93q9aGho0HxVGuzAMUtLAbkbcFnFjf0GZxZuljNDUwpu04FvRugjNZW8mh8eS11U8/HclDJyM6YIDTMTZ7FgkZtwTG3iV+t1QpIkQ8FXyf4qInfEXlwOEz8kG79Q0GypsD5yY2H8gsF9qJQ2gkIoqbjxeDxYtGiRxgzMzMFLliwx/b3vfe97uPPOO7FmzRqcdtppWV/n4MGD6O/vx+TJk4uy33aEl4JnSkvlKAb44Ey7em5KPH5B7VBsrxWHEfpqqUoe9pmP56ZhHOYu8QoVExNnsRBLwcXIDQBMbkyP0NDU+omFlWrConhulNep149fyND3yyiDwPYlMpEiNwCwevVqPPTQQ3j00Uexbds2XHPNNQgGg1i5ciUA4LLLLsPNN9/Mt//ud7+LW2+9FY888gimT5+Onp4e9PT0YHR0FAAwOjqKG264Aa+99hr27t2LdevW4aKLLsLs2bOxbNmyUr+dsmFmKHY4JDBxn2vvEx65sWm1VD4m6Wzw3g4VNlsqLXJTyWkp5rkx8RMYMR5zl5jILUUqSqTGLVRLCZEbAJjeWpu2Pc2amliovkDz47Co1VK5RG4qqFqq5J6bT3/60zh69Chuu+029PT04OSTT8aaNWu4yXj//v1wCNGHn/zkJ4hGo/jnf/5nzfPcfvvt+MY3vgGn04m3334bjz76KIaGhtDV1YXzzjsPd955J7ze4rRItyMxkz43QCryEo0nc8p5JpMyWGrfbpEbcX9iCRkeV/HElxiK5U2rKqBDsb6LciUbigN5RG6Y4NBHsIpJmE9iLu35IKalghFt5Ebsp9Pkd2MoFKsI8U0UDx65MTGYA+oCsJBjQ9/nxkr1qFHkZkIbiletWoVVq1YZ/mz9+vWa/+/duzfjc9XU1OD5558v0p5VDmbVUkAq8hJFbmkp0Z9j1w7FQOqEMXrP+TIihHwdUuEXiPFCn4aqZEMxT0v5cxc3pUwhjnFxM15pKXX8AuvWfN4Jnfh/r+3DpDovuppq8MquY7ZbEROlZVQoejCjGIJCn/7KJXLjqYBScFtXSxEqZoZigEU6Ery02wqiELKfoVhbAVYsovEkPwHrvW7+eUUTSSSTsql5zw5Ui6FYluW8PDc1QrSjVDDhVGpDsaZaikVulBvZyd1NePPWv4fTIeHqX24GYD8vA1FaRix4bgoVN/FEkov5Or3nJkOBhdF9qBj+n1Jgr3wEYYpZKTgglk5bFwJiuandSsHFSFIxywuDgjGz1qst87V79IZdiFg3abuVXVplLJbgx55ZXxcjfII/qlQdeyPjnJYaEz03wgwhl9MBSZL45OWYzY9NorgEhaIHM7zcc5PfuSAujtjreC1EYKIG4xfsGrmx112NMCXThTefEQyi+dhukRtJkkqyGmDhXp/bAZfTofEv2b0cnEUsWLSjEnrzGMGiNi6HxG/yVhgPITreaanhsRgXen6DFARv1FahQpbID+YLrLcQucn32GBVhz63g4sTKwUWRmkpux6nJG4qhHCGCy/rdZNLnxtxIrgk2UvcAELFVBHTUqqBLiUQXE4Hj4TYvRyceWyYT8VuFxKriCmpXI478bgvVWoq0zlWTJh5+NhoRH3M4DX56txmK2KitIxaiNxwcZPnscEbaQrRU7XAInufmwk/foEoHpnKVFmvm5wMxTYdmslw866XxbuR8TJwoQS5Uhr5jSl/fxa5idp8f81gYyRy8dsAKRHOonmlqpgav1Lw1PP3j6YmnHtdDsOKRSsGT6L60JdoG1FoF/eA0khTbBrpsdBpmPdbcxqIm0Sy6K07CoHETYWQKWTOh2fmcKDbdWgmgzeGKuJqYDScbtSrlEZ+4ag2LWW3VZJVBoKpG3pLrSfn32WGx1JFbvg5VsTqPCNYCmokywq90NU5UZlYauJXYJ8bNXJjsNDLcH4Z9Vtj38sy+EwsO2DPOxuRRqYeHMwQHMvhwLLr0ExGKUKdI5H05liVErlhwz1ZGLlSV/P9BYibUgvR8Dh1KNY3LzTzHllZSRPVx2guHYrzHHkQMBhea2XsjWGHYuF7Ox2rJG4qhLByUPlc5sbDXCI3dh2aySiFuBnVeW4AoXFVEV/nyU378dX/exuHhsaK9py8hF25MVbqan5QETetdbmLG5bOKdUUd3YMlDotVa+rEhMrpUQocjMxycVzk2+bgIByLRQ9N6JJ2Sy9xEvBDZr4AfaaL0V9biqEzIbi3MsC1bSUPSM33NxWxAs7C/caeW6KleoYCcfw1affAZC6ANxx0YlFeV5V3EzctBSLWoZL1J2ZNUo0WkAUk3p95MakWVsxWuwTlYelaqkCzeYsciMei+K9IJGUDaP6RtVSLocESUqlpSKJBIDc/HSlwp7LdiINNWRulJZis5hyiNwk041hdqKUaSkx3Gul5XguvH1wmH//fu9IUZ4TUEUeuxjZvS+PGSwt1ewvIC1VosgNe16jc6yY1LidGiO/WeTGQ5GbCYmVPjcFG4rD5mmp1POaRG4MPDeSJNkyymjPOxuRBo/cGKwq8xECMe56t2nkpgSVIuJcKQYzjxbrhvnuYVXc7OwdLcpzAurnwISZnXLbuTAQTJU/55OWYsd+qT033hJHbiRJ0qyYzTw+5LmZmIxYqJbyFmooHjNPSwHmbUWMSsEB9Xqdb1PBUkDipkJgF3SvYT+M3FM4MbtXS5XA6DsaSa1WjCI3xVpxHBpUfTb9wWjaTKh8iehapdtphZQLrPy5pTb3Ibc+obNvsZFledxKwQGt76bJpCxeXbTY54ZBlBZZli1VS1kx/2ZCjdwYp6XM0l1mMw7t2OvGnnc2Ig2eljK48PryKJGNGRjD7ISVVuC5wlqOixeNYpecHx4Oa/7fH4yYbJkbPHJT4YZi5rlpzcdzU+Qom4j49y91tRSgvam01RsLvUK70BKVx1gsAVb0akXc5Hvd4tVSgsiWJEmwOGRJS+nuG/pZV3/ZeRQ//+sevH1wKK/9Kwb2vLMRaYxlKAXPJ3LDLph2j9wU8ybOVivayE323g65cFhXIcVu5oWiDvysXM+NLMsYDNmzFFxcGJS6zw0AtNWpgmZSnbG4Uc8Be7cpIIoHS51LknmLAKDwwZm8WkoXNWQmYqPrbjIpc9GjH+CsRtpTv/fbtw7jm799D6/sOpbX/hUDe97ZCA2yLPNQvN/AfKjeoHNJS9nbc6M/WYxY+14v7vzde5ZLrnlKRPB7WJmnkgu9gVSkhk0W6C+CuIknkupqroIjNyOROA+jF1QtVYImfuJgUqNuwcWmu9nPvzeL3HjyGIhLVDa8x43HlXE8SaGz99TIjfZ+kkk0iRHE9MiN1uDMjlm9CBpPqBS8AojEk1wx1xqUjXKjZQ4rvJjNIzfZREc8kcSXfvUmxmIJ7OsP4meXfzjrcxqlRFjUqxhiQZZlDI+lXmP2pDrs7BvlgqoQxItKfQU38RtQPotajzMvXwvvc1MCccOiQUZp31JwXHMN/77NxFxtxwqUiUwgHEO9N7PoEEkkZTz/bg+8LgfOnddh6XesjF4AitihWBe54UOYDdJSmcSNR3cdtUNmwJ53NkIDO+AB47LRvCI38fIr60xkS0tt7xnhq+2Nuweytv2WZdmwx0oxIzehaIKvWGZOqgWgVgcVgvh3ZSm1RFK2VatzK/DuxHlUSgFiWqoU4kaplBoncTO3s55/v3BKk+E2LIJUaX/nauQvO49i4TdewI/Xf2D5d276v7fxxcffwBWPvo6X3z9q6XesNPADxEiJnPM8p2RS5q+j77nkzpCWEh8zS0uxbdRqXBI3RAZ43wOPEw6DQZf5RG6Ysrbt+AWnshIwWZm8c0gtuR6JxDX/NyIYTfDnahUqdQoZv3BgIIRP/PAVrPjZa4jGk9xP4nE50NngA6CWXBYC22+HLg9faSt6Li7z6HEDqMKjFJ6bTJ62UvCxD03Cf336JLx8w8dNjaP59K8iSsM9z+/g/1oRE9uOBPC/mw/y/z+xcb+l1zGaf2eEW4ic5BrFDcUSkE1MyxkjN0KPG330ypOWliq/7YHETQUwEs6s5gvz3NjzEOCeG5P3tPdYUPP/9Tv6Mj4fS4nUuJ2aaphCIjd/3HoE7xwaxl939WN7T4BPvG6qcfMBlyz8WwhihYJmjkvFiZtUFCsfvw2gpoxKMRU8UzViKZAkCf94yhRMbfWbbpOtcoUYP8RFxdHR7NHY3799BIBqHN+4p9+SKApGrYkbj4WGe6avoSyWHVL68Z7Rc2NSKSU+FtV7bspYjWvPOxuhIZglD5tP9IGXgttU3PBS8ITxe9qjiJuFUxoBAE+/cSjj8x0ZTpmOOxq05s1COhTv7Q9pvh9WTHpNfjfPZbPHCiEirJhYq3OAtTqvHAaCqc8inx43QGkNxZFx7HFjFRZVpbRU+ekLqIJmMJj9nP6zsti6cdkceFwODIZi2D8QyvJbOURuNPOccrt28dSXgWk5U+djo7lS+v0hzw2RE9mmxOZTImv3Jn7ZolEHlGZ5V310JgBg/0AIQyGteffI8Bhu/81WvPrBMRxUtheNnIAoonIXN/v61ejR3mNBIXLj4eImUARxwy4YXrcTkiRVbFv+QroTA6UtBR8b58iNFZzKUNs4VUuVnWNCtEZ/ndEzGIzivSMBAMA5cydhZlvKf7erL3vH8lGlF1c2z43TIcHpyG8EQ6bxDmyQslE0yGj0AsPj1EduSNwQFsgmbvKJ3LAD1e2yp+cmWwqiL5BqljezrY5HY/boUlXX/WoLHt2wD198/A1s70ldbI5rMhY3+XhuDgrdiA8NjnHPTZPfzZtjsX4ShRDVRdns2A3UCoOK+MtnrhRQ4lJwNjRzHBr4WYU8N/YgmZT5SAQAGMqyYEmloIDj2+vQXu/D8R0p8/hOS+ImfaClGSzKkmtKXTUtpx/rmQZyRjKkpfS/R54bwhLZHPS+PFIrdlDWmchU9htLJHnlTXuDF9NbUyujfUKa6OhIBJv2DgAAhkIxPPSXPQCAKc1aj0M2b08mBoQy78FQVJOWaixmWorPPHJo/q20cvAhQfzlQylLwdXIjX3OB0pL2YPRaByiXSZb5Oa13anrzpmzWgGkRA5gbdZckEdusovsfBv5GXVq58+ZQVBn8tzoZwHaoRrXPmcyYYrawM/4gK9Gzw0z/RpFbliI2OWQ0OL3YJpiyhTFzaY9A/x7p1Bh9vE57Zrnyqe7M5A60TWruVBMuHl7eHv9oqSldLnuSk1LDfHITX7ixlvCqeDjbSi2AhmK7YH+HGbHsRksJXVSdxMAVdzs6hvJ+loj3HOT/RxRG/nlZyg2WiyrIz/M+9wYpqX0peBs8VxGQzE18asAsl14vXlMS7a754anpQyGJDJzX1udFw6HxKMxh4ZUcfOWMtNkxeKp+JfTunHfn97HhzrqsUAxIDPyTUvpV28DoSi/6DXWiGmp4lZLif9W2ggGlrZrrMkzLVXCqeDsOBuPuVJWIc+NPdC3cxjMIm6Yt+ZDSjrq+A4lctM3ClmWMzYBVIdmZj8O801PZ8oEMDESz7FaSi+K7GAoJnFTAag9OIwPeOZFyMlzY4ODLxPsJhMyEDe9it+mXfHaMB+NOIbhrQNDAFKrp5O6m/Dzlacbvg4zLud6gRjQiZuhUJRf9Jr8bjQq0YlwLIlIPMEFaD7ojXyV6rnhkZva/CI37DgvxVTwbOdYOSDPjT3QL1BCUXMf3UAwyvs5zZqUEjXTWmvhdkoIRRM4PBxO8/2JjCiem2wdigF1YZZrJDPT1HF3BpOypVJw8twQuTAWVVrDm6alck+txCrEUGzkr+gbSUVu2utTjfKmKBVQzOCbSMq8qd/JSmjYjHzTUsxvw4x/g0JaqtnvUWbDpLYttJEfE6JMiFWiuJFlmRsxm/KN3DBvWQnSUnasliLPjT3Qp6WMFlwMVu7d0eDl12u304EZSsXUzt7MqSl2rRCndZuRb8fuYCZDcYZUF2vL4c0QubHTbCkSNxVAtgtvPlUkZtNd7YIvQ7UUFzcscqOIm8NDY0gmZXxwdBShaAJ+j5OvnszIN8XDVnPMzJxIytinXNia/G44HBKf4F1oaoqZnXnkxll5aamRSJzfpPM1FI/HVHA7iRsneW5sgTj+BsgcOTygXAO6dYULx7enUlTZysHZtaKxJhdxk2tayrzc3JWpz02mUnAav0Dkg1XPTS43O7unpZh52uhCcnRESUsp05Q7G3xwOiTEEjL6RiLYoqSkFhzXqDETG8E9Nzmuftjqrcnv5vt6VBFdk5SupCw1VWjFVERnKObDPiuoWmpYSUn53I68Uz9G0Twjb0A+2NFz41Y8N7JM0ZtyEtRdgzKlpVjkZmqLVtzMtlgxxad1WxA3+XbsDgqTx/VkmjaesVpKJ26iNjAU2/PORmjI1oNDjNxYHaLGlLVdZ0uxm4zRqoQZillayuV0YHJj6vuDgyG8rZiJT8qSkgLyT0uJ0TR935ZWRdxwU3GB4ka9qDiVf8cvLfXB0dGilF4PCim7fGHHeTwpI55IYn9/CIu+9Sd89f/eLnj/7Oi5cQrnJvluyseYImbYeZcpLXVwMCVupujEjWoqNk9LybLM+2JZS0vl1/dpNJqhiZ8wkFNPxj43NFuKyIdsaSl2g07K1kPYldLnJppIpq3OVc+N2safhYH39ofw1oGU3+Ykk2nLIvoVh1XE8nwxzeKQUrOlAPUCVWjkplyG4mffPIRz//MlfPJHr2RcrVphMGQ93G6GKDzC8SS+9fv3MDwWw5N/O1DQvgHAmCKi7ZSWcglRR4rclA/WF6ZNmYmWKVKSNXKjVEwZEYom+N+ZtZLIRKGeG0NDcabITYZ7Bk+VJ5JIJGWww5U8N0RGsoobofEYU9eHhsbwsXv+jPvX7TT8HTsYvjIhpgdCupO3b0RbLQUAcyenctpv7h/k3YgX6sq+jeBjHnIUCiGexnBpBkG21Hr55HZ1eGaBhmKTUvBoCYy1Ij9/dS8A4P3eUctTjc0YKkLkRjQyjkUTODIc5v8vVHyFbZiWYq3wAfLdlBN2/WUR2UyRmwMDqaKGbt2Yl+mttZCkVB+bY6PGTQCZ38blkCyJbKM0bTIpZ43eW+lzY9R+IGMpuNChWBRG5LkhMsI9Nx7jP5d40Wfb/mrjfuzrD+Hete9r5qIw7O658bqc/CQS0zqJpMwvDiwtBQAndqWEzFObDyKWkNFa6+FVVJkQZ6Ikc7iBiIKzSbhhs/QYgKI18tNXKei7gZaCaDyJd5WKMwD407begp5vSCiTzxdJktTy11hCE7liN5V84X9PW4kbIS1FvW7KBhPObcpMNDNDcTyR5O0o9NPefW4njy7vPmrsu+GVUjXujL1wGF6doTieSOKyRzbh7O/+OWO0eDRDF+SMgzMtzpaKTiRx88ADD2D69Onw+XxYvHgxNm3alHH7p556CnPnzoXP58OCBQvwhz/8QfNzWZZx2223YfLkyaipqcHSpUuxc6dxhKIa4J4bEzUvSVJa1c8OoeSQ9XwRsUNONBtGIwwGglEkkjIkSb3YAMCJx6XEDTsBT+puyukCAeQmFsS0VJcgaE7oauDfF8tzw6qlvGmRm9KJmz3HgppowRv7hgp6vUGhe3MhiOXg/UFVtB8eKpK4sVFayiFMgCfPTfkIRfSRG+Mo4ZHhMBJJGR6nAx3Cwosxa1KqsvKDo9oZeOxanEulFJBuKH7hvV68susYDg2NYb0yldwIa2kp88iNUSk4uybFEknNXKqq9tz8+te/xurVq3H77bfjjTfewEknnYRly5ahr8/4w3/11Vfxmc98BldccQXefPNNXHzxxbj44ouxdetWvs33vvc93H///XjwwQexceNG1NbWYtmyZQiHw4bPWelYufD6hBUtkJpSzdAPlATs0R47G0bihqWkWms9cAmrglmTajUn3Yent1h6DfF3cpkvxS5wNR4nTp+hvpbYV0dNSxUauTFLS5XuhseMj6dMbUKd14VoIon9A+nHkVUGlcZmLXk28GOwcyAYSfBmaUDhviZeLWUjcQOoFVPkuSkfLA3FptmbpaVYGfiU5hqemhaZqbSl+ECJ3CSSMr7w6Os46ZsvYPfRUbVSykIDPyDdULxFWMS+trvf9PcyTgW34LkxbOInjIRRu99LlhaYpaLkd7Z7770XV155JVauXIn58+fjwQcfhN/vxyOPPGK4/Q9+8AMsX74cN9xwA+bNm4c777wTp556Kn70ox8BSEVt7rvvPtxyyy246KKLsHDhQjz22GM4fPgwnn322VK/nbLAwo6ZQuYsAsFu0Mx0C6SvFAB7DDbLBhcHGnGjlFvrVkYup0MjaP5+foel13A5JLDrUCRh3cMiGlDPnNWGeZMb8NEPTcJFJx/Ht2ng+18kz43yt8o0/6VYsEjI1BY/X3FaGfxnxkCBE8EZ7IJ+ZDgM8X6fbZhhNsI2TEsBQq8bSkuVjaCykGEtHiLxpKHYPGBSKcWYpRM3r+w6hj9t60UomsAft/bwRZCVMnAg3VD83uEA/9neYyHD3wHUvj1GkRtPEfrcsO3KbXko6atHo1Fs3rwZS5cuVV/Q4cDSpUuxYcMGw9/ZsGGDZnsAWLZsGd9+z5496Onp0WzT2NiIxYsXmz5nJBJBIBDQfFUS7OD1ZWjhL85ICscSmpWsUcje7tVSgCpujo5G8cN1O/H63gEcDaRXSjFu+Yd5mDmpFjctn8urE7KhSenlELlh5aF+jxM1Hif+eN1H8Ni/na65OTLPTdGqpcYxcnN4KBUh62z0YbbFBmSZUCM3xUlLHdId00OFRm5smJYCaHimHRjTRW4A44opds4c15SekgKAOZ2pa9K7hwOQZRmb96rDfV/b3c97QVkpAwdEQ3HqOrC3X13EMqGlJ5ZIcuuCkbhxWUhLZZstZRc/Z0lnSx07dgyJRAIdHdpVdEdHB7Zv3274Oz09PYbb9/T08J+zx8y20XPXXXfhm9/8Zl7vodzIsmzJ7Ch29GV9YBiZDcX299z8csO+lIdoLfCpRVMAGIubuZ0NePEr5+T8Ol6XU5kBlUtaKvvfpFhpKX0TP9W8V7pqqSPDKfHQ1VjDxyXsLEDcsBRSc4HihkUoj45oj+lsk5ozIZ5jdupzA6i9bhLkuSkbasNODyQp1VQxFI2niQM2866jwVjcnNDVCJdDwtGRCA4NjWHz/kH+szf3D+HUqc0ArJWBA9pZa7Isa6L1R4bDiCWSaQIjKHRbzlQtZRS50V+HRMQKTrssnO27bC8iN998M4aHh/nXgQOF98UYL2IJmYdAM114a4WOvsyXwtDfCFLPa48DMBMshSGao5/afBCAtgy8UPKZDG5lpV/sJn7ecWzi16OUWU9u9PEoWEGRGyVt1Fpo5EZ57/pjvJDoWCSeBKuetVtaipWDU+SmfLDBlDVup2rizTDQt9NE3PjcTsxXCg5e3zuIN/cP8Z+NRuJ4U/HMWI3c8LRUPIHAWFxzPUgkZRwZSvegspSUx+Uw8c6YD2u1ErmJJWR+b/GUeeFc0jtbW1sbnE4nenu1ZaS9vb3o7Ow0/J3Ozs6M27N/c3lOr9eLhoYGzVelIIY/M91I/Uor7WA0gV4lcsNKofuD0bQyZ97nxsaG4qkt5qXcM9qspZ2skE+vG7VaynyV1WBgiM4HfWdQT4bQcbE4rIibrqYaHN+uegXyMbbKsqxGbopULZUeucnfcyP2CfHZ7Hxwkeem7LB0tc/t5KNWjEzFPcp1t6PRWNwAwClKwcETm/YjFE2g3uviBQkvv38UQH6eGyb2G3wuzFQ8ckapKSZu6g2iNoCalorGM6SlDBbEYjNUuxSrlPTVPR4PFi1ahHXr1vHHkskk1q1bhyVLlhj+zpIlSzTbA8DatWv59jNmzEBnZ6dmm0AggI0bN5o+ZyXDLrxOh5QxhcR6FgQjcb6CmD85JeISSZmvnBl2GGyWjenKJF0jFhyXvUGfVfisprzSUuafH4/chOOWx2IYwZr1jZfnJhpP8lTm5EYfulv88LgciMSTODSYe8n1WCzBBVqhnpsanbhhxRiFCEi2gPA4HZoKPDtAwzPLD/c8uh08smckbrJFbgDg1Gmp1NOmPSm/zSnTmrFIeYzRZeLZ0aNaEZLCMGEf76dzYCCE4bEY3j44xBe3mSqlAKGJn0HkJmYlLZVIcmFU7ntLyV999erVeOihh/Doo49i27ZtuOaaaxAMBrFy5UoAwGWXXYabb76Zb3/ddddhzZo1+M///E9s374d3/jGN/D6669j1apVAFIG0Ouvvx7f+ta38Nxzz+Gdd97BZZddhq6uLlx88cWlfjvjjliimqmsjkduInF+oHc11aBeKSvUGy6Z58aVZbBkOTm+o55/f0JXA645ZxYA4LimGl7BUwzymbKtpqXMIzfMc5NIyhm7mmbDbPxCqaaC9wbCkOXU67TUeuB0SJipCM1dR81n45jBojYel4OvfPOF+QyYuGEXcnZ8R+IJfOt37+GO375nOcqk9pGyl7ABVE8ceW7Kh1jQ4VfOd31aKhJXWxNkEjdnzmrT/P/06c34yGztY/rRDWbU8ApZNXLTXu9FtxLxPjAYwuce3ohP/uiv+NbvtwFIdUgGMomb7NVSRn1ueFpKjNxUs6EYAD796U/j6NGjuO2229DT04OTTz4Za9as4Ybg/fv3wyG0GT/zzDPxxBNP4JZbbsHXvvY1HH/88Xj22Wdx4okn8m1uvPFGBINBXHXVVRgaGsLZZ5+NNWvWwOezpngrCatGRxa5CUUT6BOMbQ0+N0bC8TTfRyYVbheOa6rBqo/PxlObD+C2f5iP02e04IITJ2N6m7+oK2yelsphRovYxM8Mn9sBtzM1rXx4LGZ6QckGE6LsopLJ9FcMWHXd5EYfF9SzJtVhe88I9mQoMTVjMJg69lr8noL7XrDzoF+5kXS31GD/QIhXmqzb1oefvbIHAHDOnEn46IcmZX1OO3YnZrDITSlTkERm2CLC53YKkRttewdWxOFxOTJ24Z5U78WZs1rx6gf98Lkd+JcPd6O11ouWWg8XR90WxQ03FAtFJO31Xi74n37jEB9R8od3juC2T8znc7LM0lKqSDFIS1mI3EQSSdt4bkoubgBg1apVPPKiZ/369WmPfepTn8KnPvUp0+eTJAl33HEH7rjjjmLtom1RL7yZb+a13HMTRw8XN1401rhxaGhMM99IHGxWbnWdjf9YNgdfOe9D/Ka4wMK8qFzx5hgJkWVZ08TPDEmS0OBzoz8YRSAcQxeyj4MwYrxLwdkFsatR3V/WTn5ff+6N/AZCxamUAtJFfupC3o+hsRhkWcZbykR4IFXdZUXcsAt+Jv9UuWDiJpfRIETxiCeSPCXoc6uRR30peI+Qksom4O/9l5Pxs7/sxoULJ/MRMv+8aAp++vJuzO2s5/10sqH13KhpKRb5EWev9QTC6A2EhbSU8XWLRfJjmQzFzsxjG+zS58Z+ZzOhIWyxc6pfUeKhSEKT+zWab6QdbGbftBSj1F0umefGqriJJpJcHGZb7TfWpMTNcAGlymaG4lLNljqslIFPFnL/01qYuMk9cjOgjEloLmCuFMOrSx2xVW4iKWM0Etc0MvvAZIaPntFI6m9Tb7Ez7HjiUI79RAGeLSJ/wsI1IZOhmFUXZkpJMTobfbjlH+ZrHrvu3OMxp6MeH5/bbvl65xP63HBxU+81jfwcGAhhJJvnxmUeFc5ULeVVBI8sq8Kv3OLG3st2wnJzMVYKnjIUq6591dRqJm7oEPDmGAkR8+3Z/i4sRK03dOeCPtftcZnnxYsBS0sVK3JzJIcLfzb0jSwn1Xv55zIUimma+1k1PzMfglFTs3Lj4p4bY3Hzq0378ff3vlTw1HbCGLGSzutyoEaJ7unFDe9xk6FSKhO1XhcuWTQlJ8O9Ni2Vev1J9V5eLQWkqqdYNdbhYTVyYybk2cLJqDpPv8gScbtUQRZUPpuqrpYiCseq54ZFbvpGIrzcLxW5SS9HFvP3JG5Eg641z426MpGyfn7sYjUQzD9yk5aWcuZe3ZULrD9GV5Mqbqa3pi6YBwfHEM9RVHGx1JRfWk5EHylr8Lm4gBwei/EVNGC9PJyJGztGbpySubiJJ5L4zxd2YGffKL72zDv4665j4717VQ8TNx6XA5Ikwc/73Gg9N+y46zBoLloq2MIqGk9ycdVe74Pf48LKs6YDAL57yUI+2PfI0Bi/N9SapGBZWsooKsw9NxmmggNqRVa5PTd0ZysBw2MxbNzdX1D5L2PMQidcQB229r7S8K7e60Kt1yU0klNPRnZTdDokntOfyOSalgrlMGRRFTfpjRStYlYtVSpxw3rciGmpzgYfPC4H4klZk8u3AougHNdcuLjR96Gp97l5B+X9AyHNinrQYipQnbVTeNqs2LABjEbi5t3DARwbVQXco6/uHa/dmjCw0QbsuKvJ5rnJM3KTD+KC94ByjrHmpl+/YB5euenjOH/BZExWFhWHBXFTZyLk801LuZwOPqOPiZtyL5xJ3BSZoyMRfPJHr+DTP30NP17/QcHPF7aYlmLN0VjulYVHuedGSEuxCIVRSd9ERK2Wyi0tZaW6pqU2dbHpD+afltK3PWc+qVKVgoujFxgOh4RuRZzszTE1xS68xxUhclOn697a4HOjUYncbD+inRlnNRU4Erav54atpJMGC6V3FX8R22aXRY8RYZ2wLnLOxU1Ue+5lG71QCkRxw8QvG0vjcjowRamaYhFTMS1lloJlU+iN0lKZSsEBVcwwgz6Jmyrjjt+9x02XD67/wLBNdy5Y9dzoc7XM32A0AiAsdNwkch+/wP4mVqprWnnkJj9xI8uyabVUMTw37/eO4OX3j/JqnFA0zuc06ZuJTVNSU/sHrJuKo/Ek9h5LiSGrw0wzoRcg9T4X7ye0rScVtZysCPuRcNxSCm3Uxp4bZig2auL37uFhAMD5CyYDSBlG8+kgTZjDrglc3PDGedq0FPM5jmfkxumQNL2ZfG6H4THM01LDY/xYNzMUu5zmrQcylYKLj1PkpgrpH43g928f5v8ficTxl51HC3pOtkLwZYkS6MtsWXhSHd6onoz8hKXIDYDcOxTnkpZqq1ciaoH80lJi7ptVJHADdIHipmc4jE/88BVc9sgmPPLXVG+Yg0qUpd7rQr0uSsJKTPfnUDG1+9go4kkZ9V4XFx2FoO/P0VDjRhMTN0rkZk5nPe9cbGVaODcU2zBy48ySlgKApfPaeT8lFnUjioO6EEydc35hhh9DlmVNKfh4wlKyQMpvY1RpNVmJwB4ZCgsp2MxN/PQdihNJdcahkedGfDwYZfOryHNTNbz6QT+SMjC3sx7/ungqAOBvwlj7fLAcufGbRG5qzCM3XorcAMi94++YhR43DBYaNprzYgVRcLH0WbEMxb/Zcoi/5wdf2o14IondSmpjpkEH6Kl5lIOzNvPzuxqKUtKvF1x1XtVQfFBIfzERZGUsAxNAjRZn+ownTpO0VCIpY3tPStyceFwj75diNCSXyB99WkodeaCKm6FQjJ+LxRzoawWxYWC7iZmZpYP7g1EcVcaqmHpuhE7DIuK1hiI3ExBWrXDW7DZ8eHpqXsjf9g5m+pWsWPXc1HicmhAlm8vEjMbkuTEn/7RUdnHDBMHhobG80kiai4pysXAXqRScCQ8AODYawcY9A/jgaCqFNMsghTRNKQe3mpZKJmX8nzLF/eNz2wvaV4aYlmqscSsdYbXCfnKjj6cMraSFmR+q0LlXpcBpMjjz2GgE4VgSDilVycZuclYiVYR1eHdiJbpr1OeGRW1aaj08CjxeaMSNibBqqHHxSM1u5fzWL4YZrPN7TBcptCJumJjhk8dJ3FQPV39sFu646ARcdHIXTpuW6i2w9dBwWqvuXMjFvDqtRV1tz5qUujmpkRt1HyhyoyXXDsW5pKUm1aX6sCTl3LwqDHEGGKucEaeC59u5NtXNN+XZYHO6Xnr/KD7oG1UeyyxurFQC3ven9/HWwWH4PU780ynH5bWfekRxw/xM+hXrlGa/aVWLEaySrdWO4kYyjtwwX1STPzX7q9EgQksUDltcsqhpjdAVmMGGzFrtLFxMmv3atJQRkiRhiq5S0UzIu7mY1l4LI4mE8lzm8wiZ6GH3mnKPMyFxU0Smt9XisiXTsXBKE6Y012Byow/xpIwtB4byfk6rfW4AbQOp2XpxYxC5Ic9NCibySlEt5XBIWKiMjLj4R3/FA3/elVOLAKPyS/H7fH03w2MxflG+6qMzAQB/2XmMV9wYiRuWYhuNxLMapAPhGH7yUqpa8FsXn4j2InkRxLQUOyf0VVhTW/38JmRlYOnAqP0jN/o/M+vhw/xGPHJTQCdsIh198YXPQDQPlDHyJ0ZuJmXoscPOXUZrXebITVLW+rzEdhRm6WW26GL3mnIXrNDdrURIksTH27+5fyjv57HquQGAS05NrY4vWzKNl8eytFQ0nuQ3ZYrcaMnVoGtlaKYIS8mMROK45/kdePqNQ5b3TRzaxxBz2fmmplgUqb3ei3PnpYbYbjsSwNtKNGd2e7rnxud2ci9XtijU+z0jiCVkdDb48E+nTslrH40QhR1LB+ibA05rESI3WcRNOJbgHVVby7DyzoYqbrR/Z+4TUs7zRsVYSuKmuPDIjXLc+Q1E82AZxY0YLZrTUW+6nRi5Sc3IylwtBWivLZl63DDcPHJD4qbqWTQ1JW7+uPUIzrnnz/j7e1/iFR1WCVscnAkAnzypCy/f8HF885Mn8MfqvC4eRmTmSorcaMnVcxPKIZoGAP921gzccuE8TFfSOv+r+FCswKJJoj9KzGXnaypm4mRqix9tdV7Mn9zAf+Z1OTC1JV3cAOoYhmziZqeS3vpQp/kFN1+WKmLss2dMA5BefttS6xEGHGZOCbPoldsp8YWAnTBr4sdmlaVFbsby76dEpBPWl4Irx1VYEDcDyt+iuXb8DelnzGrl32caKiyKm9ZacxEvXlvE9gOZuhMzatysWop9ZuS5qVoWKZGbrYcC2Nsfws6+UXz9mXdyeg6rhmIgFS2a2urXhA0lSUq78FHkRgsXNzmmpaxGbnxuJ77wkZn42eWnAQDeOjhk2SvDBJe4YnI4JF6ymW9aShQ3APCR49v4z86Y2Wq6QrM6QHOXIm6OL0JvGz0/XnEqfrLiVJ5O87md+FBH6nWWzuuAJElqVUs08+fDytq7m/0lH9CaD2xhom87ws5lZqZmIqeQAa3F5M39g7j7j9v5cVCp6EvBawyqpZhny8ykW0pOn96CCxZ04tOndWdsICimpTJ1Chf9NPEcIzf6aJB+Dtx4Y7+lShUxv6sBfo9TE8J888AQBoJRyyHMMW5oy/9Aaaxx49hoFINBitwYkev4BVXc5Hb6TGuthcshIRRN4EggbKljb8SkK6jb6UAskcg/csNu6opYuWTRFDy2YR/C8QQuP3Oa6e/xXjcWIzfFaNynx+Ny8MZ1jJ+vPB2PvLKHCx61qiVz5Gav8jkws7TdUGdL6dJSIW35ujhfyw585am3sPtoEK9+cAzPrTq73LuTNxEWpXVpIzfatFTqMy9HWsrldODHKxZl3a67Rb3WzM0QTRXH8YiN/KyIG70HkQzFVYzb6cCVH0ldbP9ubjtmTaqFLKdW7lYZy6Eyxwy2uhvWRW7KnRO1C6VOSzHcTgcv0f/A4opWLdvXvlahXYqZOGE39Q911GPdVz6GP173Efzd3A7T3+NpqSyRmw9KGLkx4rimGtz6D/P56pWvsLN4bvYcS+0n675sNxwmhmJ2c631pt4n99zYQNwkkzIvOWYerkolbfyCW10IsegrMxTrG6naibmdDVzUnDW7zXQ7SVKjwoaemwxpKb/uekhpqSrn+qXHY+2XP4qHLjsNx7enDi7Wjt4KTIgUJG5qtJUUEZ1JbqLDyjwtdyjmk3Vz/5uIfW+sYOS5AdSLTL7zpZi4YZEbIGXMndvZYPYrAFQRsG/A/BgORuI4pLy/UkRurGC1FJx1+Z03ufjeoGJgNluKRaRY9LCRn+PZPTeb9w3ikz96Bd9ds72Yu8rpHdEOVh0Jl19w5Ut6h2I1WsuOLTbDzI7VdgynQ8LTXzwTT151Bs6bb754AdSCBbG3kn6+nRH6NH2501J0dysxkiTh+I56OB0SZij9RPYYiJs39g9i66H0VQ6vliogxNeoa/DF2mP7bThLpxywjr9WhQL7/Mzms2SiQ2m0xSp9ssHTUrpVUCGTwWVZ5oP+9JVG2WCem95ARNPrQ+QDpZy8rc6b1mBvvDBqtqZHlmV+zp14nLkZs5w4TJr4BXW+L6tpqWg8iS/96k28fXAYP1n/AfpGcpvwbgV9VC+XjtZ2Qz9bSlxksGszj9yU6Vi3it/jwhkzW7N6y5igjiVz89zU6D03lJaaOLAbwwGdX2E4FMOlP30N//DDV7Bxd7/mZ0VJSykha7bCYE2W7FgdUg74VHADocC6bYqwqbcsJZALLG3Sa1HcqJN4dWkpoZFfrgyGYvz3cm081uR389EG+uOYsbOX+W3Kl+oxaramZ/9ACIFwHB6ng0dV7Qb33OgiN3pTu9jnJlMfpWfePMijagDwl/ePFXV/AbXjM4NVpFUi+uILhzCsciyagCzLFRG5yQWjyI2ltBRFbiYurMnSsVHtyb+tJ8APnhe39/HHZVnOqYmfGc1+bSUFCxM3+Ma/dNGOqNVS2hvh/20+iBNvfx7f/v17msd55CZHQzGgzvzqGbYauTFOIRYSuWGr9Wa/O+NKzAhWkQeYm4pZI8ByCga2iswUudl6KJWSmju5PufPYbww63OjT0uxBUw8KfOojhEv68TM3n7rKXLGvv4glt/3Mu770/uGPx8M6cVN5Zan8+uvcHzw0R6xBEYicb5QqBZx48rkucklLUWem4kDEzf64Xbv947w73cLKSsxklBIWkrfvZRNQa6nyA0A82opNin7ob/s0XTkZYPh8klLsfkvRy2uZs2qpbi4SVgzQYuwCeVm7dqzwUzIZukGNXJTHr8NYFyyq2dnX+q8m5fFZ1ROzDoUh3SRG5/bwY2gmTwurFv6x+dMAqBWi+XCH7f2YHvPCO77006eghTRNxKs5MiNOkdOPddFszpr4Of3OKumQIMPzxTFTcL4OiRC1VITmDYlBdAfjGhCx2KOWvTjiJUehZRtN/pZJYWSllIufvoJyxMVdsLGkzJvljYWTeA9oeHiW8IIjVAktz43IsyDwspHsxExSUuxC1A0nntaqk8R1/lOMJ7ZlhItbCq1HnbDK6e48VvoUMxSg5ObijMaohSYTQUP6doRSJLExXbQIJWa+h3V6M1K6fflEbnZLpwX24+MpP18UJ+WquBJ5eqoFfX6y9NSsUTF+G1ygaeljMYv5BC5YSb3ckHiZhxh8zxiCVlj/BPLN3uFdAVbNXicDj7zIx/01VIsctNQQ5EbQDerSTmJd/WNQryfMOOpLMs8LVWXR+SGXQStVLUAQmWb3lDMxE0epeAsLZVpFk0mTu5uAgC8sX8IsixreskkkzIODpa/d4yPt8k373PTq0SwOos096oUmE0FZxV74g2FHY/s/NbDKvTqvS7ekfqIxfSoyPu9arTGqEnfYEjb96UaIjc1bvVcFyfOV5vfBhAMxcK1JWahQ3GdVxUzHqejIJ9oMSBxM454XU6eChJPeDGMOxKJc5+F6rcp7M+kT0ux2R8UuUkhhlrZZ89SFgwWUQvHkmALmnyqzVgX02DUWgO+rGmpfDw3BaalTp3WDJdDwq6+Ucy4+Q9YcteL/CbXNxJBLCHD6ZDKKhrU8Qvmnw/zPWXq7FpuzKaCh2Lp0UMmboxM8ABwcDAlbo5rruHv+dhoJOdeSYeHVUOyUUsA1k+LDe+taM+NwZBcMeXZP2r/Hje54jIoVohYiNyIAq+hxl32jt8kbsYZdgAMCoJmWDcPhomQXKZPW3nN/mAEkbg6KLDcYUO74HI6+AqZncRslctuGHuU8H1QiATom1ZZod7nAmsCaiV6kz0tlbu4YZ6v9jwjNy21HnxBaU4JpMqPH3p5NwDg0FAqatPZ4Cso2lgoalrKPHJTaHpuPDCbLRUyuDawhdOoSeSGpaSOa6pBa60HbqcEWU73AGYiHEtoFmNG0+GZuGKRu0qO3KjGbUHcCClPHrnxV8+11ONk0cLcDMVtwqTxRhtkBUjcjDOq50K9KAzqDHhsNZDLXKlMsBV6LCFjm5Ij97gcvIqKSJ8vxVabp01PzQdj5tmgkA5wOHJfmTgcknoMWJgDZFYt5S2gQzFLSxVyU79h2RzcuHwOJitDK1/ZlarC4dGBHPvnFBtfFkOxLMtcXNo5pcBSBKL/QZZlfrMRrw3ZIjfsujKp3guHQ+LXBas9l4D0Kr9+g6gMW5SxhpWVLG7GDK7B7PtQLIEBPnrBvgI5V4wiN+rgTPN7kXge6Rdj5YDEzTjDFL5YLqmvLmA/K0YZOJASMszM/Ob+QQDA5EZf2cOGdoJ9xmwKMKtmOmlKE4DUCjUcS/AeN7nOlRJpMjgGzOAdiovYxI9HLPJMSwEpL8gXz5mNdV/5GFwOCYeGxnBwMKRGBzIM5xsPsjXxG4sluGCwcwSTG4qT6SkCQDtzrjaLuGHHGxPXbKU9kEPaSO/R6TcQLuwzZy0DBoLRtMhTJZBMymqHeIPITTia4EMzq2mhaOS5sRK5ET2Idvh7k7gZZ5p1q3ZZlnlaiq10WBOsYqWlAKCzkYmbIQDgK24iBV+NKZ85q/CYOamW/6xnOCyYiQvpO5QevTMjYtI8K19DsSzLguem8NWm3+Pi3X037h7AIZtEbmqyVEsxQ7/LIZXd+JgJh0ETP1HciMdFtrQU63PFbsQNNda6GouwNFSXcv04FoymNQ1k59CU5hpIEpCUjdNXxeToSAS3PrsVr+maoBZCWJg1Z5SWCkXVaqmWOvtG/3JFrZbKTdxIksQHdC6e2VLCPbQGiZtxRk1JpE6KUDTBw39M3LA+FUYh0Xxh5s7N+1KRm1zb7lc7+pshE5iT6ry8VPjw8JiQlso/ctPMIzdW0lLaDqkMtyt108s1cjMaifPjKt9qKT3sQrZpz4B9IjdKdUs8KRum7tgNvdEGxsdMuAw8NyxVKUngvW2A7Gkpds1h4roxD3HDnmOWUuYfjSfTXo8dX3VeNzfQlyI19bu3D+MXf90DWZZx3ZNv4v+9tg/XPflmxq7UuRDStOIwNhQzcdNq49RmrqhN/NJLwbPNI7zrHxfipuVzccuF80u3gxYpv+tngtFSq9zYlJOClYF7nA6+kmYrr2J5bgC1IoTdfGbYdApyuVAvWKnPnl2M2+q9mNzow+6jQRwZCvP0VT5l4Ay9wM2EaYdiJfeda+SGpaRqPc68mhAacdq0Fvw3duON/YM8wjClzOLGJ/QlCUUTaKzRfn4simHnlBRgbCgWh6mKwoyV4o6YihvlPSviOh9xw7btbPCh1uNEMJqqGBIrL0UTbludF/3BaNHFTc9wGNc9uQWJpIxQLIFXP0hFbHoDEWzcM4CPfWhSwa/BFjo+t0PjrxNHe/DITRV5bgzHL1goBQeAs49vw9nHm08dH08ocjPO6M2kzNTY6HfzsDLrU8FPrmKkpXTlrjMnla/Bmh1RIzdJxBJJ7oNqq/NicmPqRn1keEwYOlr4OIxCqqXy9dzwSqkilj+fOrUJALCzbxS7j6aqymaV+fjyOB28Kk0/VgMAArzXk73FDQvMGKWl9MdEnS9zE7+hYkRuWNO6Wg9ahaakDL1Ppa1eqdQscjn4H945wgXf99bs0Pzs1Q+KMy/LqDsxIKal4jzCa2dTeq648xy/YDcqZ0+rBF4KrpwUbAXZVOPmqx8W5mU9OoqSlmrUixuK3IgY9a5wOiQ01bi5v+DIcJg3T8tnrhSj2aAdgBmmfW4MLkBWYJGbYqWkAKC1zosZberxVON2lr0xniRJGUcwqL2e7B28dior5YSFFAHzgZl5bgZ1nhsmbgI5iBsWaW7yu3lTUrGPjfhZ+z1OtNayeXrFjdy8dXAo7TE2CPjVXcXx3fBye931l/0/MBbnC9GqSks50isxrfS5sRuVs6dVgr5Shl0smv0evvIKlMBzI3psPC5HWVvj2xGxLwq7ELfUeuBwSOjkkZsw7xGUz0RwRi5dik07FOcZuelTyn6LYSYWWTStmX8/p7M+rzL5YsOjcQbihkXgbC9uDA3FxsdEprRUIinz60pTAZEbdsw21Xi4cBGjMmIfKJ/LyUV0X5FHMLyjdAwXuf0TJwAAth4ezjhfyypmBR3sWsGaGTodku3Tm7nAPDea8QsW01J2omR7OjAwgBUrVqChoQFNTU244oorMDqa3qpb3P7f//3fMWfOHNTU1GDq1Kn40pe+hOFh7UEsSVLa15NPPlmqt1F01BtbTPOvmJZK89wUIS21cEoj/z7VwKtyDtLxQFzlc7+NEnbnhuKh8TcUR3NMS+3sHcGzbx4yLcU8WoQycCM+cVIX//7v53cU9bnzhX1mYYMuxaNFiMCNB+w0NSoF199o6jJUSwXGYnycCLsR81lUGRod6hGjP6yUXCwHH+Mzr1J9oNiiinn9ikEskeR9p/7r0yehq9GHCxZ04p9OPQ5djT7IMrC9J33mVa4w/11a5Ea5HrPKwGa/2xZivlh4uOdGTEspo4AqKHJTsjN7xYoVOHLkCNauXYtYLIaVK1fiqquuwhNPPGG4/eHDh3H48GF8//vfx/z587Fv3z5cffXVOHz4MP73f/9Xs+3Pf/5zLF++nP+/qampVG+j6KgdiqNIJmU+zLKpxp02G0Y1tBUubup9biyd14EXt/fi6xfOK/j5qg2fUN7Jwuzs4t2lRG56AmH+tymKoTiHUnB9CoKJ04hwARoMRvHPD27A8FgMf911DN/754VplUCl6sr70ePbcP3S49E3EsHnlkwr6nPnS6Zy8EImu48nTkf6EENTz02GaikWKa7zuvgNyspwUT1DQq8c5lcKCFES/bRy1hKANXcsBgcHx5BIyvC5Hbj45OPwj6dM4T+bO7kBh4fD2H4kgA9PL6wc2agLNKBej6vRbwOokZuoQSp0woubbdu2Yc2aNfjb3/6G0047DQDwwx/+EBdccAG+//3vo6urK+13TjzxRPzf//0f//+sWbPw7W9/G5/97GcRj8fhcqm72tTUhM7OzlLseslhaamknBIx3HPjd6NB57kpZrUUAPzoX0/BYCjKDbKEit8gcjNJidwwv9JQKIYjSii6kFkyzflUS5mkpWJC5Gbd9j6eYnhq80GcM6cdFy6crPk93p24yGkpSZJw/dIPFfU5C0WsatHDmjEWkl4cD3jkRkxLmaQq6zMYigeF6wyD3bSDOYkb9XlY1Gs0ov6+XhCwqrlDRRQ3e5U5b9Nba9PE+4c66vHi9j7DgZ65MqYTagx9KrPqxI3DIHKTqDxxU5I93bBhA5qamriwAYClS5fC4XBg48aNlp9neHgYDQ0NGmEDANdeey3a2tpw+umn45FHHklrIqUnEokgEAhovsqF1+VErXKyDIaimq6hdbxaSuu5KXRwJsPndpKwMUFc5bMGfm2KAGjwufjfjI2vKKQjKfvd4bGYJt1ghFr2q0tLGTTxe0PpPs24f93OtHOjEiZhF4vM4qYyIje8iZ9h5EbvuVGuHwbiRl8pBagpOauRG1mWNR5BJgxFMcUFgdJnqFsYwRAogg8GUMdFGPXqYk3kipEGM/M8snQ1o9rEjdvIc8OOuQqyM5RkT3t6etDe3q55zOVyoaWlBT09PZae49ixY7jzzjtx1VVXaR6/44478D//8z9Yu3YtLrnkEnzxi1/ED3/4w4zPddddd6GxsZF/dXd35/aGigxLSwyEoqrnpkbw3JQockOYw1vXh1VDMauAkCQJk3XeAfEmkSvs75+UkfWCn8tU8G1HUqL9O/+4ALUeJ3b0jmD9+0c1v9c7zOZKVb+4YZGNTIbiQtKL44HLIC1l5sNix3A0nuQRP8aQQeTGL5Q0W2EkEuciq8mvptFFccOeiy0WGmvcXEi/XwQfDKB2D59Ulx59nNKcElO5psESSRl3/XEbHn5lD3/MLC2lr4wy2o9Kxs1nS6WXgrurNXLz1a9+1dDQK35t37694J0KBAK48MILMX/+fHzjG9/Q/OzWW2/FWWedhVNOOQU33XQTbrzxRtxzzz0Zn+/mm2/G8PAw/zpw4EDB+1gIzbVqnxOxtJJdLFgPjrEiGoqJzIiVI6rnRr1ozWzTls43FRC58bgcQvTOXNzIsswjM+ml4OkXoAMDKZPlwimN+NfFUwEA//3SB/znwUicr+o7bDwJu1hkKgUvxoyw8SCTodgscgOo74+hnysF5J6WGlKGRPrcDvjcTsNZVmpvGPWaNaezHkBxTL6A2GAzfYHB0mC5ips/bj2C/35pN+783XvYfTSV0hozKQVv8nsg+odZdKpacGUQN1VbLfWVr3wF27Zty/g1c+ZMdHZ2oq+vT/O78XgcAwMDWb0yIyMjWL58Oerr6/HMM8/A7c58E1m8eDEOHjyISMS81NDr9aKhoUHzVU7Yqn8gGBP63Hh4nxu28iqmoZjIjFbcaNNSQCqXL1JI5Eb/emZoZghlidyEonEuyrpb/Fh51gy4HBJe2z2Atw4MAdB2JxY7ylYrVgzFhcwIGw+MZ0sZV644HRIXFfqKqSFdjxtATUtF40lLgw5ZlJEdu3UG1VZ6QzEAzJ2cOnd2FE3cpC8+GKwT+2gkbjkiBQB/3q5GOF/cnrp3GQk1IPU5i6moqVUmbtxsEr1Rh+IKitzktGyZNGkSJk3K3tZ6yZIlGBoawubNm7Fo0SIAwIsvvohkMonFixeb/l4gEMCyZcvg9Xrx3HPPwefLHjrfsmULmpub4fVWzkpU7HPCq6WEyA2QujgVs4kfkRlRbAzy7sTqBez4Dm1foNYCB+U11LhxeDicUdyIXhH9McAuMkwAMcNmg8+Fxho3Gmvc+OTJXXj6jUP46V9244F/PRW9ilehYwKkpAD1M4sY9AIarRDPDatcMfbcpF8X6rwuhKKJtIqpTJEbICWOswleVinItuOl5EKUiInGGiEiNpdHborjdWTtDIzETa3HCa/LgUg8if7RKPwt1v6+HxxVDch7+1OGZbXPTfpzNPs9XGSx6efVghq5MWg/UEHipiR7Om/ePCxfvhxXXnklNm3ahL/+9a9YtWoVLr30Ul4pdejQIcydOxebNm0CkBI25513HoLBIB5++GEEAgH09PSgp6cHiUTqIPvtb3+Ln/3sZ9i6dSt27dqFn/zkJ/jOd76Df//3fy/F2ygZYjm4mAvXrLwi8aL2uSEyw8TNYCiGgWD6xVNsethW5y048mElcsNWjm6nxC84DG4oVi46R0fTxyr821kzAADrtvUiHEtwcVPsMnC7wiKeRpEbNcJgb3FjaChmJnODQoM6X3qqCDCO3Hhd6oiKkIXU1GhE61NihuJRA0NxrZiW6khFyrf3jGQt/rCCvg+ViCRJ/HGrXZFlWdaIG9ZDh5vODa6/pygjRwDg+Pb6tJ9XMqqhuLLTUiU7sx9//HGsWrUK5557LhwOBy655BLcf//9/OexWAw7duxAKJQ6kN544w1eSTV79mzNc+3ZswfTp0+H2+3GAw88gC9/+cuQZRmzZ8/GvffeiyuvvLJUb6MkML/GkaEwV8RsRVXvS628RsJx05wvUXyY2BAviGLoWZyVVIwyakviJkNa0qeLShjdvE7oakBHgxe9gQhe3zvIxc1EqJQCBHGToVqqUgzFRlPBjSY0q71utMeVGCFmSJKEWo8LI5G4RXGjHVlRr3REFlNgIYMF2az2WjgdEkbCcRwZDhtWOeUCE/KTDDw3QCriemhozPI8q5GIOkYBAPYr3rWALlIl8h/L5mAgGMWlH54KZxU18APSDcWZvH92pmRndktLi2nDPgCYPn26RsWfc845WVX98uXLNc37KhWWltqjhD9dDomvDuq8LvQikhI3MfLcjBeNOoNws9+t6eLsczuxZGYrNuzux9IidOC1Mtcn0/gNn67MWa26Uy/4kiRh8YxWPPfWYby5f5Cb1ydaWspI3KhpKXufW4q2ySktBUBzswaAwaB29AKjxuNUxE12f8po2DhyMxZLIJGU4XRIhr1hvC4nZrbVYmffKHb0jBQkbsKxBH9vRpEbAIYDPTMxoBNBR4bDkGWZt+SoMxjR0V7vw88u/7Dl/a4kXHxuXeqYiydl3t26ktJS9l62VCmsAdwepRlVk9/Nm1GxVcJIOEbVUuNIvdeFep+LXziNBMCj/3Y63u8dSfPf5IOVyE2mtCRbQbFtBnkfE61IO7m7Cc+9dRhbDgxxsaYfolqt1Hi0nxEjnkhygWD78QsGhmKzwZmAeZdioz43gFgObiVywxofujT/AilTcYPPzUWSPt03QxE3BwdDWV8nE6wrsNtpPs+JlWofsxi5YSKos8GHnkAY0XgSg6EY/wztPn+s2Lh1TfyiGQob7Ezl7GkVwW5AYo8bBjuRhsdi/KCitFTpkSQJM4XUk9FgUY/LgROPazRcMecKFzcZSsHZTCSfwevxyA1PSyk3L10PjgXKTLH3jgS4UXK6rqy9WjFr4ieWPleKoThplJYy8NzwSkzdjV0/EZzBREguaSkmoLwuB1xKSoal+cwmaU9WBPVhpc9SvrAeN6213rTuxIzWHD03TARNbvJxYdQzrI5aaZhg4kY/OFMjbirIc1M5e1pF6FdPRuLmqHBikrgZH2ZNUm/6pTYJNvpz8NwYRG5Y1+pEUkY8kTRsrw+olSpHhsO8z8iM1okhbswMxexG7HE6bL8SZYZio9lSRjcaFpU7ElBFRDiW4FHgphrjyM1YDmkpdo2SJEmomNLOw9OXT3ey+WyFipsMPW4Y6kBPi5EbZbvWWg///HoCYzwtNRHaJoi4dcUKzG/jkJBW2GBnKmdPqwj96nqSYFBlqyJW7ghUlomrkrngRHUO08fmZG95UAi5VEvVGKzQRR9WOJ40TTvU+9y8JT2QuiEe1zwxRnCYGYp56sTmfhsA3KyaNKyWSt9/FiE5IowfYMeYQ0pPsfBGfpHskZsRAxO2mgZL/b5ZV18euSlwLEKmSilGW46em/5RNRrEzPZHhMjNhEtLcc+NNi1l94WAnon1V7MJ+tCw6O9gqwTWcM3ndsBRZW58u3LuvHbccuE8TKr34uTuppK+VkNO4sbccwOkVuZmaQcAmNfZgAMDqZvKSd2NGqN0NaOmpbR9brh3xOZ+G0AVN0ZN/IwWPTxyI0RIxB43+msJ+wxCBqZrPdxQLNzs9fOlQrzxnfazZQu4gaC1aIoZmRr4MVgPqmMjVj03Uf57LCWz91iQR8vsXlFXbPhQXsVQzBv4Vdh1Y2L91WxCjVttNAVoxQ07kdgMoIl2YpUTSZLwhY/MHJfXKtRQLEkSPC4HovGkIm6iyvOmh+vnTW7AC+/1AgDOnNVW8L5XCuxzS/PcVEilFCCIGwvjFwB1FMDe/iDiiSRcTgcXFEYjQ3JJSxnN49KPYAiZ9IaxcrxbIVMDP0bOkRsubrz882ApXJdDmnDXYOYpZCJajdzY/3wRqSwpViVIkqRJH4hpKRYCZZNv7d5kjMgPS6XgWcZv+IQuxcyYzOaWiVxy6hS01nowqd6LFWdMLWi/KwmztJRaBWN/L4XToImf2eBMIOWnqvU4EY4lsUtpTHc0w6DJXNJS+lJw8Xv2M7O0FDvehwoUN2paytxzwyI3A8GopbES/cJzsoXme4dT3ZSbaz2mxuVqxasb7ZKpOs/OVNbeVhHiKqqrUfVAMHHTy8VNZallwhrsYi9OWtbDq6XMxI1gmGU3DaOZV1Nb/Xjta+fixa98DO31E6MMHFBN13pD8YjBTdquZIzcGHixHA4JC6c0AQC++dx7CIRjXNwYTYLnkRsLaSkjzw1La7GozphJWooZ6FmkMV+Y+XdShkaaLco5kJTVKkIrz9la68Vk5VrMozm15iKqWtGPdqnEuVIAiZuyIR4o8yarlTlsNcnynZVwASZyR6yQM4veZPLcAKq4OTYa4Tc/s2nlbqejIiIVxcSsid9ohuZsdsNY3CieGxMPxOrzPgSP04ENu/tx67Nb1Y6+BpEbtRQ8hyZ+vvS0VJAbilmfG+0xW+918fdSSGrKiqHY5XRw75mVXjcsfdVS60Fno/Z5WyaguGERQX3kptI8N5W1t1XECV2p/iNtdV7elwFIFzN+EjdVidvp4DcAs4t9OIu4YWFiVl7r9ziL0oOnWqgTypTF7uc8LVUB55bxVHDzyA0AfHh6C375hdSA4t+9fYRPhTeKdvAmfhbSUkH+uakiuU4wFCeTMo826tNSkiTxfjFDGXo7ZcOKuBF/3p+l100yKXNPUludh5esM/SVrRMBfeQmzDvlV5ZcsP/ZXaXc/on5aKvzYNG0Zs3j+rJDo6FtRHXQWONGKJowFTdjJv4FBnucldc2mXRsnaiwSFVSTjXu46MJKmSuFCA28VMfU1fS5teG02e04MxZrXj1g368tnsAgFqOLVJjMS2VSMq8+aFoxGaLr2A0rnkOo3R6k9+DwVAs78hNTOjnlMlzA6R8Nzv7tP3CjBgai4EFxZprPakIp9fFj5G2CShu9J6bcIbWA3amsqRYFeFzO/GV8+bgnDntmsfTxE0FXICJ/MhWQZJtthjzOxxk4sbAbzOR8bnVDrqsIVvq+8ozFIsTmln/Ebcrs9GVTYVnzBXS34xM87dEgkLaSkxLidExto0kGUcbrbQ/yASLsDgdkqG3TKS11lrpOYvsNAmz5KYoFWcANF3LJwoschNNJJFMyhlbD9iZytrbCUCdV3vBpchN9ZLtYp8tHMyE78HBlLgxqpSayEiSxBcL4iBJI++IXWF9aZIyeGqN+fGy9Sv6u7ntmgnwM9vSb9Q8cpNl/AL7zNxOSZP6rBWqrUJC/yCjCiNeMWXB5GvEIUXEt9d7s/b+EiumMnFsNN04fJIysgRAUebIVRqiiIkmklkLG+wKiRuboY/ckOemerEauTHz3DC/w6FBityYwQSkGLmpJM+NS7iJs/RJzGJTNYdDwh0XnYBprX585e8/ZFjtYjZ/S49Z+byYlmLbmFV4Nhkc77Is45Zn38E1v9ysmWFkxP7+1NDNqUJkxQxmBO43ETeHhsZwdCTCzcSi7/Gs2aleUDVuJ/dGTiTE4yQiVLdVWuTG/mf3BMPvccIhqRcyo46zRHVgdLEXyWYoZpEbtqKlYyUdtlgIVHjkBkilppwOJxc3zI+TifNO6MR5J3Sa/rzG4lRwdc6S9jMT01Ih7skx/lyNejv9dVc/fvnafgDA8q1HcNHJx5nuwz5F3ExrzS5uWCTGyFC891gQF9z/FyRlGcuVz6ZdMFv/w8LJaK3zYFprrenk8WpGFM3ReJIbiylyQxSEJGk7YrbUZq4KICoXy54bk5Ww3hCrH4pIqJU9YloqwErBKyBy4xTSO8lkKtJhNS1lBaueG7PeQGqH4gT33Jh1fmZtCsRGfpv29PPv1+84mnEf9hxLNSWcZmHwK4vEGKWlHt+4D6FoAuFYEs9uOaw8pyqYJEnCmbPacFzTxJjBpod1PwdSbQcqtVqqsvZ2giA225qITaQmClzcmJTG8mopkxWTvlGaWY+biUxDTeozEgWkmmKpAHEjRG4SgrABiiRuTEZU6Bk1qTCr5ZGfOC8VN+uqbiTmPzgW5N+/e3g44z7s6E2Jmzkd6cZoPZnSUm8dTH+d7ubs0aCJhFgxFcnQEdvOkLixIWLJ5kRsIjVRYF1bzdNSmcPB+hVytgqSiQiLfA4IzdwqVtwkZE3VVDGaqjHhnD0tZey5qRXTUoqh2Cwi1sANxerxvueoKm529Y3yyhw9sUQSH/Qp4qYzu7hR01JacSPLMh+tIGIlGjSR8Aq9bihyQxQNsZMoiZvqJVtaKpvnRn8ToWqpdFg/FGYclWVZmJFk/89LTEslZBmxuBi5KXzmkdjnhlVjMQHAvD2A6lNK78OldigOmnQnZjT40s3dh4fH+PdJWa380/Pe4QCiiSQaa9yW0kUsLTU8FtO8j6OjEYxG4pAk4GMfmsQf1/cbm+iIXYrVUnCK3BAFMqlBFTftDeS5qVaylYJnq5Zq0JkdqVoqHf0KPhxLIq649SvFUMz0TSIp8zk/gDaqky/s2JJltSPtt36/DRfc/xf8+M8f8O3MGh+y6OFYLMGjO7UmaSmWImTbReIJHsVh0er9A6G03+sbCeOvHxwDAJw2rTlrGTiQMuuzzQaF1NSBgZR46mqswfc/dRKWzuvA3f+0oOLmJpUasUtxhEeQK+szsv/ZPQG59MNT8X7PCFaeNaPi1DJhnUyRG1mWBUOx8UVF306f0lLpsBU8a9s/Ekl91pIE+Cuk+sMpSYjLMhJJGUkluuJxOooyrVoUzmPRBDxOBx5+ZQ8A4Ccv7cJ1S48HYF5hJlZG9Y2E0x4TYZEbZuhmgtPtlLDguEYcGQ7zcm/Gmq09uPaJN/hsrY8c32bpfTkcElpqPTg2GkV/MMp9jAcU8dTdUoNJ9V787PLTLD3fREP03ITjmZuJ2hUSNzZkRlstfr7y9HLvBlFijEpjGZF4EmyckFnkRj9fp4XETRqsmRubjD0qVP1YiQDYAYfSGyIhy0jwSqni7LvL6YDH6UA0kcRYLKFJ4YRjSQwEo2ip9ZiWgntdDjgdEhJJGX2B1GdsVi3Fxc1Y6m/A/iaT6ry8WmmfIG5kWcY9z2/nwqbG7cQFCyZbfm9M3IgVU6xtwnFNZCDOhFgtxSI3ldbnprL2liCqCCZuRiJxzdRnQFu9YrZiEiM3LofEDcqECjOK7h8IIZZI8htdJVWWsUZ+SSEt5S7ijYalG8ZiCX7zZ2zvSZlvzRofSpLEK6b6Rpi4yZyWYiKKi5t6L6YKfyfGB0eD+EAxHF9x9gw8edUZmkrSbDC/4jGh1w373miIKKFSDZEbEjcEUSbEBmH66A2rlHI5JNOSX3E0h2y4BdHV6EON24l4Usbf3/sSdikVN+311m+S5UadLyWrDfwcxbt0iyMYDg+FNT/bfmQEgFAKbuBTYmKGiRWzkTGiX2ckHOdDLSfVe3nX4f0DavXU1kOpku3TpjXj1n+Yj5O6m3J6X0a9bkRBRZjD7BCpaimK3BAEkQNup4NXluh9N9nMxEBq1cwuOOI8HEJFkiTMUwZG7u0P4bbfvAtAW5Fod1j6LCGIG0+R0lKA2pdmLJbgvhkGq14KZKgwY+KG/a5ZnxuX08GFT2AsxtNYorg5MDDGq7a2KVGjeZMb8npfzExO4iZ3PJo+N0q1FEVuCIKwipmpmDXwM+tOzFhz/Uex/IROfP3CeaXZwSrg2/+4gH/OLK1TSTc3npaSZWEieDHTUmrkZkjXULInkBI3oyaeG0AVN6zBoFlaClAr/ALhGI6OpsTQpDovr5YaiyUwqOzDNiVqZDTN3ApqWkoVNywtxVoEEMZ4NR2KlWqpCituIXFDEGXEVNxYiNwAKfP5g59bhEXTWkqzg1XAvMkN2PT1czVpkfYKEjcschNPFHf0AqNG8Nyw43DmpJQHpmc4JUDMOhQDQENaBZX5MSuaisUois/t5IKTDYLdfiQVuZnbmWfkhqelVM8Ne81K+vuXA49BEz9vhZWCV9beEkSVYdbrJlsDPyI3vC4nPj63nf9/YY7+jXLCPDeayE0xxY1HjNykohzzFEHBxY1JEz8gvb9S5sgN63UT40KDVf2x5nwHB0MYCEa5QXmuhY7ERqT3OErw9Jq+0pDQInYo5oMzKXJDEIRVzCI3ldry3M78fx+diRq3Ex0NXpwxs3IiXU4Dz02xSsEBoMatem7YUEsmKHpHIojEEwgqaVL9+AUAaKwx7lpsRL1PTEspURSlUemU5pS4OTQ0hh09qZRUd0tNRrGUCb3nhs2ZcjulCTntOxdEz02lRm6ozw1BlBF9YzMGb+BHkZuiceJxjXjphnPgdEgV1RyTiZt4UkY0XoK0lCfdc3N8Rx3vX7NTGVgpSTAUBfpp9JnTUuogU2YoZpVrxzWzyM0Yv7l+qD2/qA2g9jhiokaMFBWjAWI1I1ZL8chNhV2LSNwQRBlRG/nFNY9zQ3GFXVDsTi59UuyCUzAUs8GZxY3cqJ4b1pKg2e/BpDovegJhbFO8L001bsORD/qeQa215ikfloY9PBTmN02WIprSpEZu2Ps83sIEcDPY0FQ2X4oqpazDxGU4lkCUp6UqK3JTWXtLEFUG8yDo01LBDAZOYmJhnJYq3qWblW6HonGelmrye9CpVDCxqiWz8R5iNKfW4+SRICOYZ+eDo6loUL3XxbcXIzfvK9GiD3XU5femkD5fSq2UInGTDea5YXPAgMorBacrJ0GUEbO0FPM4ZArxExMDZihOJNWp4MUUN+wYGwnHuaG4ye9GZwMTN6nITXOtsbhpFcqq27JERVg0hQkmcUjwjLaUkNl9dJRHLD9UQORGP1/q2AiVgVuFRW7E6xJFbhQGBgawYsUKNDQ0oKmpCVdccQVGR0cz/s4555wDSZI0X1dffbVmm/379+PCCy+E3+9He3s7brjhBsTjcZNnJAh7YzZfipXe5mumJKoHsYlftASGYtaYr2c4DDYFpLHGzY2+7/dmjtywEReAauI1gwkmPgZBiKJ0N9fA43IgEk9ieCwGr8uB2e35R24AtddN/yhFbnKBeW7YdcnpkOAqoqAeD0p25VyxYgWOHDmCtWvXIhaLYeXKlbjqqqvwxBNPZPy9K6+8EnfccQf/v9+vDjhLJBK48MIL0dnZiVdffRVHjhzBZZddBrfbje985zuleisEUTIaTMQNpaUIBmvil5BlxEuQlmIjFVg3Yp/bAZ/byXvBMEOumXBh3YUBZDXqdjRohYVmPprTgdmT6vCeEik6a3ZbwZ6zlP9nFP3BCG/mR+ImOywtxdLllRa1AUoUudm2bRvWrFmDn/3sZ1i8eDHOPvts/PCHP8STTz6Jw4cPZ/xdv9+Pzs5O/tXQoDZweuGFF/Dee+/hl7/8JU4++WScf/75uPPOO/HAAw8gGo1meFaCsCc8chPWRh8pckMweORGaOLnKaK4YcMwDw6mhlay6ie98VYvTBhupwMnK32DvnjOrIyv1aEzdOtf48KF6tTvz5w+NcueZ6elTo3csNLzbKkzQk1VMkGYyUdlV0oibjZs2ICmpiacdtpp/LGlS5fC4XBg48aNGX/38ccfR1tbG0488UTcfPPNCIXUKbEbNmzAggUL0NHRwR9btmwZAoEA3n33XdPnjEQiCAQCmi+CsAPMUGwWuSFxQ7AMVEJW01KuoqallGNQEdhMcOuHi2aqNPvp5xbhd/9+Ns6d12G6DZASN2LFlf41Vp41HVd9dCZu/8R8LJ3Xrv/1nGkTet0YpcIIY5jJnH1mlRhBLske9/T0oL1de2C6XC60tLSgp6fH9Pf+9V//FdOmTUNXVxfefvtt3HTTTdixYweefvpp/ryisAHA/5/pee+66y5885vfzPftEETJYIbi4bEYZFnmYf1gJGUoriND8YSHTQBPlqhaSj/pm5V2dyml2Qx91EWkvcFnqcze43JgRlstn87OKqQYfo8LX7ugeHPSmIFZNBRPqidDcTb0jRiNpsHbnZzOkK9+9atphl/91/bt2/PemauuugrLli3DggULsGLFCjz22GN45pln8MEHH+T9nABw8803Y3h4mH8dOHCgoOcjiGLBVsnxpMwb9wFCWipDt1diYqBoG8RLJW50q3KWthG9NIB5WipXjhdMwid05Tc3yiosLdUzPEajF3LAr1tU1RtMg7c7OV05v/KVr+Dzn/98xm1mzpyJzs5O9PX1aR6Px+MYGBhAZ2en5ddbvHgxAGDXrl2YNWsWOjs7sWnTJs02vb29AJDxeb1eL7xeOqAJ++H3OHkn2MBYnIeDyVBMMDRN/JjnpogGT/28KJbKqfE4IUmArFRQFVq5xLhw4WT8cWsPmv1uTBcqrUoBey+sbw6NXrBGNURuctrjSZMmYdKkSVm3W7JkCYaGhrB582YsWrQIAPDiiy8imUxywWKFLVu2AAAmT57Mn/fb3/42+vr6eNpr7dq1aGhowPz583N5KwRhCyRJQoPPhcFQDMNjMd44jTw3BMOphG5KVQqu71/TKkQ2ZrbV4oOjQQCqD6NQ/mFhF2q9LrTWegw7HhcTVgp+aChVCdZaS6MXrOD36CM3lXcdKomheN68eVi+fDmuvPJKbNq0CX/961+xatUqXHrppejq6gIAHDp0CHPnzuWRmA8++AB33nknNm/ejL179+K5557DZZddho9+9KNYuHAhAOC8887D/Pnz8bnPfQ5vvfUWnn/+edxyyy249tprKTJDVCxqxZRqKqZqKYLBdIyYlmI+nGJQ73VpBrSKaZuHL/8wPjy9Gd/754VFez0A+Picdiyc0lTU5zSiVdewr6Ox8sZvlAP9dafqIze58Pjjj2PVqlU499xz4XA4cMkll+D+++/nP4/FYtixYwevhvJ4PPjTn/6E++67D8FgEN3d3bjkkktwyy238N9xOp343e9+h2uuuQZLlixBbW0tLr/8ck1fHIKoNPS9bmRZ5h2KKS1F8LSU0KG4mGkpSZLQXu/D/oHUtVgsz57eVounrj6zaK813ujnXHXrDMyEMfrITSVeh0q2xy0tLRkb9k2fPh0yS+YC6O7uxksvvZT1eadNm4Y//OEPRdlHgrADLHLDGmZF4kkklFaxNH6BcApN/GIlGJwJpEYSMHEzc1JpfTDjSZPfjQafi5uJpzT7s/wGAaSa+DEvIFCZkZvKaztIEFUGny+liBuWkgKoWorQD84s/mwpAHAIPpRpLdUjACRJwrzJakWWvgKMMEaStMbrSqwwI3FDEGVGnQyeEjXMTOz3OHl3WmLi4tAMzmRN/Ip76b75grnwuhw4c1Zrxc0QygbrngwAHzm+rXw7UmGIA0b1naQrAVoWEkSZadAZislMTIi4NJGblLjxFDkttWhaC/5y08d5FLGa+OLHZ+Ptg8OY3V6HborcWKatzstL6CuxqzNdPQmizOjTUmp3Yjo9CbOp4MWPruhHIVQLjTVu/OqqM8q9GxWH2CKgEiM31RV/JIgKRG8oVnvckJmYAJySOBW8NJ4bgtAjFvyYTYS3M3SGEESZ0aelBkOpSbzN/sq7oBDFhw3JLNVsKYIw4jOnT0WDz4XvXrKgIn1YFPcmiDLT4GOTwVMRm4FgStw0kbghoBqKtbOlyGhOlJaPHD8Jb91+XsV2dK48OUYQVYY+LTUUSv3b7K8+cyeRO2ITv2gJZksRhBmVKmwAEjcEUXYoLUVkgombUk0FJ4hqhM4QgigzrFpqNBJHMikL4oYiN4TWUEzihiCsQWcIQZQZ1sRPloGRcByDQSUtVYEVCkTx0c6WYn1u6NJNEJmgM4QgyozX5eRTmQPhGI/ckKGYALRpKea5cbsq1wtBEOMBiRuCsAHMVDwUipGhmNCgidxQWoogLEFnCEHYAGYe7g9GMECGYkLAYeC5obQUQWSGzhCCsAFs6u6hoTFEFV8FeW4IwHi2FEVuCCIzdIYQhA1oUYTMrr7UoDq3U0Kth8YvEOoE8GhcRoyPXyDPDUFkgsQNQdgAvbhp9nsquoEWUTxY5CYcT/DH3NTEjyAyQmcIQdgANpju3cMBAEBnY3VOaCZyh82WGouq4oY8NwSRGTpDCMIGdDSkxAybK9XZQOKGSMHSUqFonD9GnhuCyAydIQRhA7qaajT/n0yRG0LB7dBGbhySWh5OEIQxJG4IwgZ0NWnFTGdjjcmWxERDjdykxA1FbQgiO3SWEIQN0EduZk2qLdOeEHaDGYrHYilxQ34bgsgOnSUEYQN8bic6Grz8/3M668u4N4Sd0BuKqVKKILJDZwlB2IQzZrby77ub/WXcE8JOuBypy3RQMRRTjxuCyI6r3DtAEESKqz82C6FoAld/bCYcZBglFJiYCceoOzFBWIXEDUHYhHmTG/DQZaeVezcIm6GvjCLPDUFkh84SgiAIG6OP1FDkhiCyQ2cJQRCEjXHpIjduF6UsCSIbJG4IgiBsjIsiNwSRM3SWEARB2Ji0yA2JG4LICp0lBEEQNsblJEMxQeQKnSUEQRA2Jt1QTJ4bgsgGiRuCIAgbo09LeahDMUFkpWRnycDAAFasWIGGhgY0NTXhiiuuwOjoqOn2e/fuhSRJhl9PPfUU387o508++WSp3gZBEERZoVJwgsidkjXxW7FiBY4cOYK1a9ciFoth5cqVuOqqq/DEE08Ybt/d3Y0jR45oHvvpT3+Ke+65B+eff77m8Z///OdYvnw5/39TU1PR958gCMIOUBM/gsidkoibbdu2Yc2aNfjb3/6G005LdVz94Q9/iAsuuADf//730dXVlfY7TqcTnZ2dmseeeeYZ/Mu//Avq6uo0jzc1NaVtSxAEUY3oDcUUuSGI7JTkLNmwYQOampq4sAGApUuXwuFwYOPGjZaeY/PmzdiyZQuuuOKKtJ9de+21aGtrw+mnn45HHnkEsixnfK5IJIJAIKD5IgiCqATcDl1aipr4EURWShK56enpQXt7u/aFXC60tLSgp6fH0nM8/PDDmDdvHs4880zN43fccQf+7u/+Dn6/Hy+88AK++MUvYnR0FF/60pdMn+uuu+7CN7/5zdzfCEEQRJmhyA1B5E5OZ8lXv/pVU9Mv+9q+fXvBOzU2NoYnnnjCMGpz66234qyzzsIpp5yCm266CTfeeCPuueeejM938803Y3h4mH8dOHCg4H0kCIIYD/Rihjw3BJGdnCI3X/nKV/D5z38+4zYzZ85EZ2cn+vr6NI/H43EMDAxY8sr87//+L0KhEC677LKs2y5evBh33nknIpEIvF6v4TZer9f0ZwRBEHZGL2YockMQ2clJ3EyaNAmTJk3Kut2SJUswNDSEzZs3Y9GiRQCAF198EclkEosXL876+w8//DA++clPWnqtLVu2oLm5mcQLQRBVicMhwetyIBJPAiBxQxBWKInnZt68eVi+fDmuvPJKPPjgg4jFYli1ahUuvfRSXil16NAhnHvuuXjsscdw+umn89/dtWsXXn75ZfzhD39Ie97f/va36O3txRlnnAGfz4e1a9fiO9/5Dv7jP/6jFG+DIAjCFvjcTlXckKGYILJSsj43jz/+OFatWoVzzz0XDocDl1xyCe6//37+81gshh07diAUCml+75FHHsGUKVNw3nnnpT2n2+3GAw88gC9/+cuQZRmzZ8/GvffeiyuvvLJUb4MgCKLs+NwODI+lvifPDUFkR5Kz1VFXIYFAAI2NjRgeHkZDQ0O5d4cgCCIjH7vnz9jXn1oIfvOTJ+DyM6eXd4cIokxYvX/TEoAgCMLm+FxO/j15bggiO3SWEARB2ByfW71U13qdGbYkCAIgcUMQBGF7vG5V0NR5S2aVJIiqgcQNQRCEzfEJ4sbvIXFDENkgcUMQBGFzfC71Uk2RG4LIDokbgiAImyNGbshzQxDZIXFDEARhc0RDMUVuCCI7JG4IgiBsjuiz8ZO4IYiskLghCIKwOVNb/Px7v5vSUgSRDRI3BEEQNmfxzBb+vcNBs6UIIhsU3yQIgrA5J3Q14scrTkVLrafcu0IQFQGJG4IgiArgggWTy70LBFExUFqKIAiCIIiqgsQNQRAEQRBVBYkbgiAIgiCqChI3BEEQBEFUFSRuCIIgCIKoKkjcEARBEARRVZC4IQiCIAiiqiBxQxAEQRBEVUHihiAIgiCIqoLEDUEQBEEQVQWJG4IgCIIgqgoSNwRBEARBVBUkbgiCIAiCqCom5FRwWZYBAIFAoMx7QhAEQRCEVdh9m93HzZiQ4mZkZAQA0N3dXeY9IQiCIAgiV0ZGRtDY2Gj6c0nOJn+qkGQyicOHD6O+vh6SJBX1uQOBALq7u3HgwAE0NDQU9bkJFfqcxwf6nMcH+pzHD/qsx4dSfc6yLGNkZARdXV1wOMydNRMycuNwODBlypSSvkZDQwOdOOMAfc7jA33O4wN9zuMHfdbjQyk+50wRGwYZigmCIAiCqCpI3BAEQRAEUVWQuCkyXq8Xt99+O7xeb7l3paqhz3l8oM95fKDPefygz3p8KPfnPCENxQRBEARBVC8UuSEIgiAIoqogcUMQBEEQRFVB4oYgCIIgiKqCxA1BEARBEFUFiZsi8sADD2D69Onw+XxYvHgxNm3aVO5dqijuuusufPjDH0Z9fT3a29tx8cUXY8eOHZptwuEwrr32WrS2tqKurg6XXHIJent7Ndvs378fF154Ifx+P9rb23HDDTcgHo+P51upKO6++25IkoTrr7+eP0afc3E4dOgQPvvZz6K1tRU1NTVYsGABXn/9df5zWZZx2223YfLkyaipqcHSpUuxc+dOzXMMDAxgxYoVaGhoQFNTE6644gqMjo6O91uxLYlEArfeeitmzJiBmpoazJo1C3feeadm9hB9zvnx8ssv4xOf+AS6urogSRKeffZZzc+L9bm+/fbb+MhHPgKfz4fu7m5873vfK3znZaIoPPnkk7LH45EfeeQR+d1335WvvPJKuampSe7t7S33rlUMy5Ytk3/+85/LW7dulbds2SJfcMEF8tSpU+XR0VG+zdVXXy13d3fL69atk19//XX5jDPOkM8880z+83g8Lp944ony0qVL5TfffFP+wx/+ILe1tck333xzOd6S7dm0aZM8ffp0eeHChfJ1113HH6fPuXAGBgbkadOmyZ///OfljRs3yrt375aff/55edeuXXybu+++W25sbJSfffZZ+a233pI/+clPyjNmzJDHxsb4NsuXL5dPOukk+bXXXpP/8pe/yLNnz5Y/85nPlOMt2ZJvf/vbcmtrq/y73/1O3rNnj/zUU0/JdXV18g9+8AO+DX3O+fGHP/xB/vrXvy4//fTTMgD5mWee0fy8GJ/r8PCw3NHRIa9YsULeunWr/Ktf/UquqamR//u//7ugfSdxUyROP/10+dprr+X/TyQScldXl3zXXXeVca8qm76+PhmA/NJLL8myLMtDQ0Oy2+2Wn3rqKb7Ntm3bZADyhg0bZFlOnYwOh0Pu6enh2/zkJz+RGxoa5EgkMr5vwOaMjIzIxx9/vLx27Vr5Yx/7GBc39DkXh5tuukk+++yzTX+eTCblzs5O+Z577uGPDQ0NyV6vV/7Vr34ly7Isv/feezIA+W9/+xvf5o9//KMsSZJ86NCh0u18BXHhhRfK//Zv/6Z57J/+6Z/kFStWyLJMn3Ox0IubYn2uP/7xj+Xm5mbNdeOmm26S58yZU9D+UlqqCESjUWzevBlLly7ljzkcDixduhQbNmwo455VNsPDwwCAlpYWAMDmzZsRi8U0n/PcuXMxdepU/jlv2LABCxYsQEdHB99m2bJlCAQCePfdd8dx7+3PtddeiwsvvFDzeQL0OReL5557Dqeddho+9alPob29Haeccgoeeugh/vM9e/agp6dH8zk3NjZi8eLFms+5qakJp512Gt9m6dKlcDgc2Lhx4/i9GRtz5plnYt26dXj//fcBAG+99RZeeeUVnH/++QDocy4VxfpcN2zYgI9+9KPweDx8m2XLlmHHjh0YHBzMe/8m5ODMYnPs2DEkEgnNhR4AOjo6sH379jLtVWWTTCZx/fXX46yzzsKJJ54IAOjp6YHH40FTU5Nm246ODvT09PBtjP4O7GdEiieffBJvvPEG/va3v6X9jD7n4rB792785Cc/werVq/G1r30Nf/vb3/ClL30JHo8Hl19+Of+cjD5H8XNub2/X/NzlcqGlpYU+Z4WvfvWrCAQCmDt3LpxOJxKJBL797W9jxYoVAECfc4ko1ufa09ODGTNmpD0H+1lzc3Ne+0fihrAl1157LbZu3YpXXnml3LtSdRw4cADXXXcd1q5dC5/PV+7dqVqSySROO+00fOc73wEAnHLKKdi6dSsefPBBXH755WXeu+rhf/7nf/D444/jiSeewAknnIAtW7bg+uuvR1dXF33OExhKSxWBtrY2OJ3OtGqS3t5edHZ2lmmvKpdVq1bhd7/7Hf785z9jypQp/PHOzk5Eo1EMDQ1pthc/587OTsO/A/sZkUo79fX14dRTT4XL5YLL5cJLL72E+++/Hy6XCx0dHfQ5F4HJkydj/vz5msfmzZuH/fv3A1A/p0zXjc7OTvT19Wl+Ho/HMTAwQJ+zwg033ICvfvWruPTSS7FgwQJ87nOfw5e//GXcddddAOhzLhXF+lxLdS0hcVMEPB4PFi1ahHXr1vHHkskk1q1bhyVLlpRxzyoLWZaxatUqPPPMM3jxxRfTQpWLFi2C2+3WfM47duzA/v37+ee8ZMkSvPPOO5oTau3atWhoaEi70UxUzj33XLzzzjvYsmUL/zrttNOwYsUK/j19zoVz1llnpbUyeP/99zFt2jQAwIwZM9DZ2an5nAOBADZu3Kj5nIeGhrB582a+zYsvvohkMonFixePw7uwP6FQCA6H9lbmdDqRTCYB0OdcKor1uS5ZsgQvv/wyYrEY32bt2rWYM2dO3ikpAFQKXiyefPJJ2ev1yr/4xS/k9957T77qqqvkpqYmTTUJkZlrrrlGbmxslNevXy8fOXKEf4VCIb7N1VdfLU+dOlV+8cUX5ddff11esmSJvGTJEv5zVqJ83nnnyVu2bJHXrFkjT5o0iUqUsyBWS8kyfc7FYNOmTbLL5ZK//e1vyzt37pQff/xx2e/3y7/85S/5Nnfffbfc1NQk/+Y3v5Hffvtt+aKLLjIspT3llFPkjRs3yq+88op8/PHHT/gSZZHLL79cPu6443gp+NNPPy23tbXJN954I9+GPuf8GBkZkd988035zTfflAHI9957r/zmm2/K+/btk2W5OJ/r0NCQ3NHRIX/uc5+Tt27dKj/55JOy3++nUnA78cMf/lCeOnWq7PF45NNPP11+7bXXyr1LFQUAw6+f//znfJuxsTH5i1/8otzc3Cz7/X75H//xH+UjR45onmfv3r3y+eefL9fU1MhtbW3yV77yFTkWi43zu6ks9OKGPufi8Nvf/lY+8cQTZa/XK8+dO1f+6U9/qvl5MpmUb731Vrmjo0P2er3yueeeK+/YsUOzTX9/v/yZz3xGrqurkxsaGuSVK1fKIyMj4/k2bE0gEJCvu+46eerUqbLP55Nnzpwpf/3rX9eUFtPnnB9//vOfDa/Jl19+uSzLxftc33rrLfnss8+WvV6vfNxxx8l33313wfsuybLQxpEgCIIgCKLCIc8NQRAEQRBVBYkbgiAIgiCqChI3BEEQBEFUFSRuCIIgCIKoKkjcEARBEARRVZC4IQiCIAiiqiBxQxAEQRBEVUHihiAIgiCIqoLEDUEQBEEQVQWJG4IgCIIgqgoSNwRBEARBVBUkbgiCIAiCqCr+fyNSdcR53dCfAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.plot(ugos[0:1000,213,200])" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "58fe1617-790d-4260-92de-417258ee37a8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(11322, 256, 256) (11322, 256, 256)\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAyQAAAKqCAYAAADLx5oYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOy9ebwlRXk+/lRVn3PvnRkG2YYBZBMVwhYEERQVXACJoqKIawBRNAaJikti1AAaJd/407igRoOICxiDu0ZFENyCqBhQgaiIIIoKCrLO3HvO6arfH1Vv1VvV1X363Lkzc+/Qz+czc8/p00t1dXXV+7yrMMYYdOjQoUOHDh06dOjQocNGgNzYDejQoUOHDh06dOjQocP9Fx0h6dChQ4cOHTp06NChw0ZDR0g6dOjQoUOHDh06dOiw0dARkg4dOnTo0KFDhw4dOmw0dISkQ4cOHTp06NChQ4cOGw0dIenQoUOHDh06dOjQocNGQ0dIOnTo0KFDhw4dOnTosNHQEZIOHTp06NChQ4cOHTpsNHSEpEOHDh06dOjQoUOHDhsNHSHpsGjxta99Dfvttx+mp6chhMCdd965sZvUYZ4477zzIITAlVdeOXbfww47DIcddtj6b9Q8QPdx0003rZfzn3HGGRBCrJdz5/Bf//Vf2HLLLXHvvfdusGuuC26//XYsX74cX/nKVzZaG9b3GOjQoUOH+yM6QtKhNX7605/i2GOPxc4774zp6WnssMMOOPzww/He9753wa91++2347jjjsPMzAze97734eMf/ziWL1++4NdZV2it8bGPfQyHH344tt56a/R6PaxatQpHHHEEPvShD2Fubm5jNxFve9vb8PnPf35jN2OD4rbbbkNRFHjBC15Qu88999yDmZkZPOMZz9iALZsc6+v5lWWJ008/HaeeeipWrFix4OdfH9hqq63w4he/GG9605sW9LyHHXYYhBBj/51xxhkLet2Fwr333ovTTz8de++9N5YvX46tttoK++23H17xilfgd7/7nd/vK1/5yka/h1122SXbt3/zN39T2ffOO+/ES17yEmyzzTZYvnw5Hve4x+F///d/s+f94he/iP333x/T09PYaaedcPrpp2M0Gq3v2+nQocMCQRhjzMZuRIfFj8svvxyPe9zjsNNOO+GEE07A6tWr8Zvf/AZXXHEFbrjhBvzyl79c0Ot97Wtfw1FHHYWLL74YT3ziExf03AuFtWvX4phjjsFFF12ERz3qUTj66KOx7bbb4o477sC3vvUtfOUrX8EJJ5yAD3/4wxu1nStWrMCxxx6L8847b6O14bzzzsMLX/hC/PCHP8TDH/7wxn0HgwEAoN/vr9M1jzrqKHz3u9/FrbfeimXLllV+/+hHP4oTTzwRn/nMZ1qTErqPG2+8Ebvssss6tS+H0WiE0WiE6elpv219Pb/Pf/7zeMYznoHf/OY32GGHHRb03OsT//d//4c999wT3/jGN/D4xz9+Qc558cUX49Zbb/Xff/jDH+I973kP/vEf/xF/8Rd/4bfvu+++2GuvvTAcDjE1NbVBrVl1GA6HOOigg/Czn/0MJ5xwAvbbbz/ce++9uPbaa/GlL30JF154obc4vvzlL8f73vc+bMxlf5dddsEWW2yBV7/61dH2hz70oXjEIx7hv2ut8ZjHPAY//vGP8drXvhZbb7013v/+9+M3v/kNfvSjH+EhD3mI3/erX/0qnvzkJ+Owww7Dc5/7XPz0pz/F+973PrzkJS/BBz7wgQ12bx06dJg/io3dgA5LA29961ux+eab44c//CEe8IAHRL/ddtttC3ad++67D8uXL/fnTK+1mPCqV70KF110Ed71rnfhFa94RfTbq1/9alx//fW4+OKLN1Lr5gfq/42JdSUihOc///n42te+hi9+8Yt4znOeU/n9ggsuwOabb44nP/nJC3K9hUBRFCiKDTMtf+QjH8EhhxyypMgIAPzFX/wF9t57b5x33nkLRkgOP/zw6Pv09DTe85734PDDD8+6DyqlFuS6C4HPf/7zuOqqq3D++efjec97XvTb7OysJ/iLCTvssEOj9RIAPv3pT+Pyyy/HhRdeiGOPPRYAcNxxx+GhD30oTj/9dFxwwQV+39e85jXYd9998fWvf92/PytXrsTb3vY2vOIVr8Aee+yx/m6mQ4cOC4LOZatDK9xwww3Ya6+9sgRh1apV/vNNN90EIURWm5u6PJC//HXXXYfnPe952GKLLfDoRz8ahx12GE444QQAwIEHHgghBE488UQAwHe+8x0861nPwk477YSpqSnsuOOOeNWrXoW1a9dWrvezn/0Mxx13HLbZZhvMzMxg9913xxve8IZon1tuuQUnnXQStt12W0xNTWGvvfbCueeeO7Y/fvOb3+Ccc87Bk570pAoZITzkIQ/B3/7t30bbtNZ417vehb322gvT09PYdttt8dKXvhR//vOfK8e///3vx1577YWpqSlsv/32OOWUUypxNNdffz2e+cxnYvXq1ZiensYDH/hAPOc5z8Fdd90FwPb5fffdh49+9KPeNYL6sq7/Aaupf8tb3oLddtsNU1NT2GWXXfCP//iPFRe0XXbZBU95ylPw9a9/3cf77LnnnvjsZz+b7ZO5uTmcdtpp3gXjmGOOwR//+Mdon1wMyezsLM444ww89KEPxfT0NLbbbjs84xnPwA033JC9DgAcc8wxWL58eSS4EG677TZ84xvfwLHHHoupqSkAwPe//3086UlPwuabb45ly5bh0EMPxf/8z//Unp+jzbOia/zVX/0VtthiCyxfvhz77rsv3v3ud/vf0xiSuud32WWXQQiBz33uc5VrXHDBBRBC4Hvf+15te2dnZ/G1r32tYn2c5P0FgG9+85t4+MMfjunpaey222744Ac/mI2DaTuerrzyShx55JHYeuutMTMzg1133RUnnXRSpS2HH344vvSlL20UTX8uhoTeA+qPmZkZ7LPPPvjmN78JAPjsZz+LffbZB9PT0zjggANw1VVXVc77s5/9DMceeyy23HJLTE9P4+EPfzi++MUvjm0PvQOHHHJI5bfp6WmsXLkSAHDiiSfife97HwBErlKEtnPTpO98HQaDAe67777a3z/96U9j2223jayX22yzDY477jh84Qtf8GPnuuuuw3XXXYeXvOQlEZn/27/9Wxhj8OlPf3qidnXo0GHjoCMkHVph5513xo9+9CNcc801C37uZz3rWVizZg3e9ra34eSTT8Yb3vAGvOQlLwEAvPnNb8bHP/5xvPSlLwUAXHjhhVizZg1e9rKX4b3vfS+OPPJIvPe978Xxxx8fnfMnP/kJDjroIFx66aU4+eST8e53vxtPf/rT8aUvfcnvc+utt+Lggw/GJZdcgpe//OV497vfjQc/+MF40YtehHe9612Nbf7qV7+KsizHavlSvPSlL8VrX/taHHLIIXj3u9+NF77whTj//PNx5JFHYjgc+v3OOOMMnHLKKdh+++3xjne8A8985jPxwQ9+EEcccYTfbzAY4Mgjj8QVV1yBU0891bso/OpXv/LC8Mc//nFMTU3hMY95DD7+8Y9HfVnX/wDw4he/GP/0T/+E/fffH//2b/+GQw89FGeddVbW0nD99dfj2c9+No466iicddZZKIoCz3rWs7LWoVNPPRU//vGPcfrpp+NlL3sZvvSlL+HlL395Y5+VZYmnPOUpOPPMM3HAAQfgHe94B17xilfgrrvuahyPy5cvx9Oe9jRcdNFFuOOOO6LfPvWpT6EsSzz/+c8HAFx66aV47GMfi7vvvhunn3463va2t+HOO+/E4x//ePzgBz9obF+bZwVYt6DHPvaxuO666/CKV7wC73jHO/C4xz0OX/7yl2vPXff8DjvsMOy44444//zzK8ecf/752G233fDIRz6y9rw/+tGPMBgMsP/++zfeWxOuuuoqPOlJT8Ltt9+OM888Ey960Yvw5je/ORvv0mY83XbbbTjiiCNw00034R/+4R/w3ve+F89//vNxxRVXVM53wAEH4M4778S111477/YvNH75y1/iec97Ho4++micddZZ+POf/4yjjz4a559/Pl71qlfhBS94Ac4880zccMMNOO6446C19sdee+21OPjgg/F///d/+Id/+Ae84x3vwPLly/H0pz89Szo5dt55ZwDAxz72sUaC9tKXvtRbgmgsffzjH49+bzM3AZO98zlceumlWLZsGVasWIFddtklIuWEq666Cvvvvz+kjMWURzziEVizZg1+8Ytf+P0AVFxBt99+ezzwgQ/Mkr8OHTosQpgOHVrg61//ulFKGaWUeeQjH2le97rXmYsuusgMBoNovxtvvNEAMB/5yEcq5wBgTj/9dP/99NNPNwDMc5/73Mq+H/nIRwwA88Mf/jDavmbNmsq+Z511lhFCmF//+td+22Mf+1iz2WabRduMMUZr7T+/6EUvMtttt53505/+FO3znOc8x2y++ebZaxFe9apXGQDm6quvjrbPzc2ZP/7xj/4fP/d3vvMdA8Ccf/750TFf+9rXou233Xab6ff75ogjjjBlWfr9zj77bAPAnHvuucYYY6666ioDwFx44YW17TTGmOXLl5sTTjihsr2u/6+++moDwLz4xS+Otr/mNa8xAMyll17qt+28884GgPnMZz7jt911111mu+22Mw972MP8NnqeT3ziE6Nn8KpXvcoopcydd97ptx166KHm0EMP9d/PPfdcA8C8853vrNwDP1cO//3f/20AmA9+8IPR9oMPPtjssMMOpixLo7U2D3nIQ8yRRx4ZnW/NmjVm1113NYcffnjlPm688UZjTPtnNRqNzK677mp23nln8+c//7n2HuiZcNQ9v9e//vVmamoq6rvbbrvNFEURvWc5nHPOOQaA+elPfxptn+T9Pfroo82yZcvMLbfc4rddf/31piiK6B7ajqfPfe5z2Xc+h8svv9wAMJ/61KfG7jsfXHjhhQaAueyyyyq/pWPAmPAeXH755X7bRRddZACYmZmZaB764Ac/WDn3E57wBLPPPvuY2dlZv01rbR71qEeZhzzkIY1tXbNmjdl9990NALPzzjubE0880Xz4wx82t956a2XfU045pTK+jGk/N/F7HffO1+Hoo482/+///T/z+c9/3nz4wx82j3nMYwwA87rXvS7ab/ny5eakk06qHE/v9Ne+9jVjjDFvf/vbDQBz8803V/Y98MADzcEHHzy2TR06dNj46CwkHVrh8MMPx/e+9z089alPxY9//GP867/+K4488kjssMMOrdwKmpDLrlKHmZkZ//m+++7Dn/70JzzqUY+CMcZrwv74xz/i29/+Nk466STstNNO0fHkomCMwWc+8xkcffTRMMbgT3/6k/935JFH4q677qrN5gIAd999NwBUshN95StfwTbbbOP/kfYSsNadzTffHIcffnh0vQMOOAArVqzAZZddBgC45JJLMBgM8MpXvjLSDp588slYuXIl/vu//xsAsPnmmwMALrroIqxZs6Z1H6ZI+59Sqp522mnRdgpCpesTtt9+exxzzDH++8qVK3H88cfjqquuwh/+8Ido35e85CWRm8hjHvMYlGWJX//617Xt+8xnPoOtt94ap556auW3cUHFRxxxBLbZZpvIbevGG2/EFVdcgec+97mQUuLqq6/G9ddfj+c973m4/fbb/XO577778IQnPAHf/va3I202R9tnddVVV+HGG2/EK1/5yorb43wDo48//njMzc1FLimf+tSnMBqNxlrubr/9dgDAFltsMa9rl2WJSy65BE9/+tOx/fbb++0PfvCDcdRRR0X7th1P1C9f/vKXKxr5FNTuP/3pT/Nq//rAnnvuGVmlDjroIADA4x//+Ggeou2/+tWvAAB33HEHLr30Uhx33HG45557/Pi7/fbbceSRR+L666/HLbfcUnvdmZkZfP/738drX/taANal7EUvehG22247nHrqqa0y/bWdmwiTvPMpvvjFL+J1r3sdnva0p+Gkk07Ct771LRx55JF45zvfid/+9rd+v7Vr13p3Sg5K+EBuuvS3bt+cO2+HDh0WHzpC0qE1DjzwQHz2s5/Fn//8Z/zgBz/A61//etxzzz049thjcd111837vLvuumvrfW+++WaceOKJ2HLLLbFixQpss802OPTQQwHAx03QQr/33nvXnuePf/wj7rzzTnzoQx+KCMQ222yDF77whQCag/U322wzAKjUbzjkkENw8cUX4+KLL8YRRxwR/Xb99dfjrrvuwqpVqyrXvPfee/31SDjffffdo+P7/T4e9KAH+d933XVXnHbaaTjnnHOw9dZb48gjj8T73vc+3w9tkfb/r3/9a0gp8eAHPzjavnr1ajzgAQ+okIcHP/jBFaH6oQ99KABUajWkBJEEy1wMDeGGG27A7rvvPq9g76Io8OxnPxvf+c53vFBH5ITcta6//noAwAknnFB5Lueccw7m5uZq+7TtsyI//6YxOSn22GMPHHjggZHb1vnnn4+DDz648uzqYOYZg3Hbbbdh7dq12euk29qOp0MPPRTPfOYzceaZZ2LrrbfG0572NHzkIx/JCtTU7iYyV5Yl/vCHP0T/1meAdzq2SWGw4447ZrfTmP/lL38JYwze9KY3Vcbf6aefDmB84pDNN98c//qv/4qbbroJN910Ez784Q9j9913x9lnn423vOUtY9vedm4iTPLOj4MQAq961aswGo18zA1giVbu2c/Ozvrf+d+6fbkSq0OHDosXXZatDhOj3+/jwAMPxIEHHoiHPvSheOELX4gLL7wQp59+eq2AUJZl7fnaLhhlWeLwww/HHXfcgb//+7/HHnvsgeXLl+OWW27BiSeeWKvFzoH2fcELXuAD6FPsu+++tcdT1pZrrrkGf/mXf+m3b7PNNj5Q+BOf+ETlmqtWrcr6/dOxk+Id73gHTjzxRHzhC1/A17/+dfzd3/0dzjrrLFxxxRV44AMf2Oocdf2/PlKa1mUnmq9g3AYveMELcPbZZ+OTn/wkXvOa1+CTn/wk9txzT+y3334Awlh4+9vf7relWKx1Oo4//ni84hWvwG9/+1vMzc3hiiuuwNlnnz32uK222gqAFYr5OJnP+9sW48aTEAKf/vSnccUVV+BLX/oSLrroIpx00kl4xzvegSuuuCJ6BiTMb7311rXn+81vflMh25dddtl6K7pZN7bHjXkaf695zWtw5JFHZvdtSzABG1Ny0kkn4ZhjjsGDHvQgnH/++fjnf/7nxmPWx9w0CYi08Viv7bbbDr///e8r+9I2ssxtt912fntK/n7/+99HqYQ7dOiweNERkg7rBAokpEWCNN5phqEml5y2+OlPf4pf/OIX+OhHPxoFsaeBlA960IMAoDHgeZtttsFmm22GsiznVefkqKOOglIK559/vte0j8Nuu+2GSy65BIccckgjCSM3r5///Of+XgAbxH7jjTdW2rvPPvtgn332wRvf+EZcfvnlOOSQQ/Dv//7vXgiZlFjsvPPO0Frj+uuvj2ow3HrrrbjzzjsjNzQgaHj5dSjgdCFqdey22274/ve/j+FwiF6vN/HxBx10EHbbbTdccMEFOPzww3HttdfirW99a3R+wLqdTDoW2j4rusY111wz8TWant9znvMcnHbaafjkJz+JtWvXotfr4dnPfvbYcxKhvvHGG7HPPvv47W3f31WrVmF6ejpbfyjdNul4Ovjgg3HwwQfjrW99Ky644AI8//nPx3/+53/ixS9+sd/nxhtvBIDofClWr15dmRu48mCxgMZNr9db0JpLW2yxBXbbbbdoHqwbS23nJsJCv/Nk1ebEZ7/99sN3vvMdaK0jd8jvf//7WLZsmbfIkBLhyiuvjMjH7373O/z2t7/1CVI6dOiwuNG5bHVohcsuuyyrxSb/cHJZWblyJbbeemt8+9vfjvZ7//vfv85tIE0jb4cxppKhZZtttsFjH/tYnHvuubj55puj3+hYpRSe+cxn4jOf+UyWuKSpaFPstNNOOOmkk/DVr361ViOd9tdxxx2HsiyzLhSj0cgLgU984hPR7/fxnve8JzrHhz/8Ydx1112+bsbdd99dqUS8zz77QEoZuS8sX748m4K2Dn/1V38FAJVMY+985zsBoFK343e/+12UCejuu+/Gxz72Mey3335YvXp16+vW4ZnPfCb+9Kc/Zfu5rWXl+c9/Pq666ipvxeP1Gg444ADstttu+P/+v/+v4oIHNI+Fts9q//33x6677op3vetdlWcx7h6ant/WW2+No446Cp/4xCdw/vnn40lPelKj1YBwwAEHoN/v48orr4y2t31/lVJ44hOfiM9//vNRJfBf/vKX+OpXvxrt23Y8/fnPf670BQmbqTvOj370I2y++ebYa6+9au9xenoaT3ziE6N/842ZWZ9YtWoVDjvsMHzwgx/MWgTGzUU//vGPs7E0v/71r3HddddF7oRUYygdT23nJsJ83/k77rijYm0bDof4l3/5F/T7fTzucY/z24899ljceuutUTrhP/3pT7jwwgtx9NFH+5iRvfbaC3vssQc+9KEPRef+wAc+ACGEr2HSoUOHxY3OQtKhFU499VSsWbMGxxxzDPbYYw8MBgNcfvnl+NSnPoVddtnFx10ANsXnv/zLv+DFL34xHv7wh+Pb3/62156tC/bYYw/stttueM1rXoNbbrkFK1euxGc+85ls/MF73vMePPrRj8b++++Pl7zkJdh1111x00034b//+79x9dVXAwD+5V/+BZdddhkOOuggnHzyydhzzz1xxx134H//939xySWXVFLFpnjXu96FG2+8Eaeeeir+8z//E0cffTRWrVqFP/3pT/if//kffOlLX4qEgUMPPRQvfelLcdZZZ+Hqq6/GEUccgV6vh+uvvx4XXngh3v3ud+PYY4/FNttsg9e//vU488wz8aQnPQlPfepT8fOf/xzvf//7ceCBB/qA5UsvvRQvf/nL8axnPQsPfehDMRqN8PGPf9yTLcIBBxyASy65BO985zux/fbbY9ddd/WBtTn85V/+JU444QR86EMfwp133olDDz0UP/jBD/DRj34UT3/60yOhAbC+4y960Yvwwx/+ENtuuy3OPfdc3HrrrfjIRz4y9pm2wfHHH4+PfexjOO200/CDH/wAj3nMY3Dffffhkksuwd/+7d/iaU972thzvOAFL8Cb3/xmfOELX8AhhxwSaXGllDjnnHNw1FFHYa+99sILX/hC7LDDDrjllltw2WWXYeXKlVG6aI62z0pKiQ984AM4+uijsd9+++GFL3whtttuO/zsZz/Dtddei4suuqi27eOe3/HHH++FrjbxAoAV1o844ghccsklePOb3xz91vb9PeOMM/D1r38dhxxyCF72spehLEucffbZ2Hvvvf07BrQfTx/96Efx/ve/H8cccwx222033HPPPfiP//gPrFy50pMawsUXX4yjjz56UVRKXwi8733vw6Mf/Wjss88+OPnkk/GgBz0It956K773ve/ht7/9LX784x/XHnvxxRfj9NNPx1Of+lQcfPDBWLFiBX71q1/h3HPPxdzcXFQ75oADDgAA/N3f/R2OPPJIKKXwnOc8p/XcRJjvO//FL34R//zP/4xjjz0Wu+66K+644w5ccMEFuOaaa/C2t70tIjPHHnssDj74YLzwhS/Edddd5yu1l2WJM888Mzrv29/+djz1qU/FEUccgec85zm45pprcPbZZ+PFL35xoxWtQ4cOiwgbNqlXh6WKr371q+akk04ye+yxh1mxYoXp9/vmwQ9+sDn11FMr6SXXrFljXvSiF5nNN9/cbLbZZua4444zt912W23a3z/+8Y+V69Wl/b3uuuvME5/4RLNixQqz9dZbm5NPPtn8+Mc/zqYqveaaa8wxxxxjHvCAB5jp6Wmz++67mze96U3RPrfeeqs55ZRTzI477mh6vZ5ZvXq1ecITnmA+9KEPteqX0WhkPvKRj5jHP/7xZssttzRFUZitt97aPOEJTzD//u//btauXVs55kMf+pA54IADzMzMjNlss83MPvvsY173uteZ3/3ud9F+Z599ttljjz1Mr9cz2267rXnZy14WpYz91a9+ZU466SSz2267menpabPllluaxz3uceaSSy6JzvOzn/3MPPaxjzUzMzMGgE8h29T/w+HQnHnmmWbXXXc1vV7P7Ljjjub1r399lJbUGJsC9MlPfrK56KKLzL777mumpqbMHnvsUUlFXPc8L7vsskoK1DTtrzF2TL3hDW/w7Vm9erU59thjzQ033FBpex0OPPBAA8C8//3vz/5+1VVXmWc84xlmq622MlNTU2bnnXc2xx13nPnGN75RuQ+e8tWY8c+K8N3vftccfvjhZrPNNjPLly83++67r3nve9/rf8+l/a17foS5uTmzxRZbmM033zw73urw2c9+1gghKulS276/xhjzjW98wzzsYQ8z/X7f7Lbbbuacc84xr371q8309HS0X5vx9L//+7/muc99rtlpp53M1NSUWbVqlXnKU55irrzyyuhc//d//2cAVMb5QmI+aX+f/OQnV/YFYE455ZRoG6VWfvvb3x5tv+GGG8zxxx9vVq9ebXq9ntlhhx3MU57yFPPpT3+6sa2/+tWvzD/90z+Zgw8+2KxatcoURWG22WYb8+QnPzlK0W2Mna9OPfVUs8022xghRGWstZmb2r7zOVx55ZXm6KOPNjvssIPp9/tmxYoV5tGPfrT5r//6r+z+d9xxh3nRi15kttpqK7Ns2TJz6KGH1qaF/tznPmf2228/MzU1ZR74wAeaN77xjZW09B06dFi8EMZshFK3HTp02CSwyy67YO+9924s7tdh/WI0GmH77bfH0UcfjQ9/+MOtjyvLEnvuuSeOO+641paVNnj605+Oa6+91mcvW2i88pWvxLe//W386Ec/2mQsJEsJ3TvfoUOH9YEuhqRDhw4dljA+//nP449//GOU6KENlFJ485vfjPe9733Z2Jk2SGs8XH/99fjKV76y3jJZ3X777TjnnHPwz//8zx0Z6dChQ4dNCF0MSYcOHTosQXz/+9/HT37yE7zlLW/Bwx72MF+PZxI8+9nPbpWVqw4PetCDcOKJJ/qaKx/4wAfQ7/fxute9bt7nbMJWW201b/LUoUOHDh0WLzpC0qFDhw5LEB/4wAfwiU98Avvttx/OO++8jdKGJz3pSfjkJz+JP/zhD5iamsIjH/lIvO1tb8NDHvKQjdKeDh06dOiwNNHFkHTo0KFDhw4dOnTo0GGjoYsh6dChQ4cOHTp06NChw0ZDR0g6dOjQoUOHDh06dOiw0bAkY0i01vjd736HzTbbrMu00qFDhw4dOnTosAhgjME999yD7bffHlIuPp337OwsBoPBRrl2v9/H9PT0Rrn2UsCSJCS/+93vsOOOO27sZnTo0KFDhw4dOnRI8Jvf/AYPfOADN3YzIszOzmKrmRVYg3KjXH/16tW48cYbO1JSgyVJSDbbbDMAdsCvXLlyI7emw2LC2Zf/Ckpaq5kUAtJZ0JQEJASUEHA/+/3IylaX36HU8XY6jh+rmKVOCoAfUhoDYwxKbaCN/a5h3LkBbYxvJ7UVCO2lc7ZB0tToOH6/6XmlrG5TGetj6frI34e7p/Q+c21JUfcc6q6dQ9mQk4M/z7RN6TOwv1XPlduWQkbPXkBJoCckpLD31ncPtKdkbd/y+6jr2/SeCMPSVPpBCYG/PmDhlDaX33g7htpeR5vQLu23AUPXkbkxKN2N92T1fWkDPu6i7zoej+n4S8dY5bzJO0nvIz33UpvK+0/InVNJO+f0lICEQE8KFO75TxUSSthtUtp5SLJ3sXqv8T3rMS+UlMk7Hb3nAoptB+C/16FMLuf7xITPQx2eOY0F3mZ+P/aa8UXHzWvR+1rzrOueHz23wSg8S8LIfS5oPEr73tq/wq8dNBf3hPTtzc3/KZrmzqZ96vpj3FyaO386V6TjOD1nNAeBz5259uTXLAAY3Hcf/v6pj/Ry2mLCYDDAGpR4PnZAfwNHLAygcf4fbsFgMOgISQ2WJCGhSWDlypUdIekQYflmK+NFmIR7kQhEbOaXEwpHKfkAgjDAf0+FpqEOgs6wNNE+ufPnyMQ41AlP6TmCUBKuNY6UpGSE7s0LBBkyQvvVCSKcPOb24/s2LqANxIH3SSTgeKEzf46mZxO3L2zrqTDOvHDqBNAmcpkKcKnw1QQuPPt2SrGgc+OWW4wwLI0dwzqMXW2MFVC1/S1tP4ETEt8PbVk2kFwTlevQtYelriXlfDsJ0qUjc6U2GIw0oJJ3SNlASxUJru5fMs/QswYs+VRCoFDC33NPEVmRUIyMSDlegNW6/rcU0fzUUubi5296r4ik5MZAur0JcRur956ehxPe9FmHuQeuLfZ5Srcf/c0REkLhnikA9ArbafSM+fPNKbRypDJF27mKzjcJmpQbTdfMrRWTzrFpW5UQmHODbjG70/chNzgh6TAeS5KQdOjQBG4Voe/jBMkmNBGQJiGTzq+FQGkMegC0oR3t4ioz2ifb9nkSpmSO5YsOfVZSZBYXZ9ZxwhFpl1NCwResJjLSJJCMW9Dr5FRueUq1yOm9toEUuX6Iz89RJVXxOOP7EBnJjbtcM2Phs6plzrWF+iOMwXpN9LpiulCAc3PQ7tQ9CAxLA2mM32bvQ/hxqLWpvCv5+6lq5DmkFNDauHMYf99ck1v3LHMkdHakPQmZG9mXcOD+8veE/k5xITUlGBnyyUlIT0q/DbCklQRd0tAL0c4KysfEpAn7affwnlbJQ+69Cr/Vn5ueX/ycqogVNw2N9SwytInOnWt7SkZ8mzNkJAdOUGgc9AsJSLqGRAkDKAAQ0bNqmrvSy+aUBylKVH9LFVJ8TUjn6BxB4ftTG9JzltpkxmBV0ZbDfJRnGxMK4y2EC37NCd/X+yM6QtJhyeOd37nBCwkqYxXJabXqULuQZohIblHiC27QEtsFgYiJ1gY9Ke1CoeqFYn4PbVDRZtaY6LWTLKSwi1/ok0BK/HePqmaak5H0GusC7mqTa/9CkBHlnoUUApDUMe36OkdEZINQ4ttoqoLaOCLC7zUdzzL1DcT4NuRw8+33gpolBbDDFisq+wgAPSlhibRgGnUNaUhQ1IykuKaxPvJkhQQudw4pRVbY5QIDCbuAtWpUbr0BvA+94Krr/mkoZlaILCJSoF9I9KSEksCUklDSEo7CkQwiLtNKOqsYHCGh/rPaeCIgwgm3InC4Rs0yd8PRflt1v4i4IIwrY8IcqGsYBnfNqvzGhf2KC1L8nJrAn21OWG4Leo+j80g7H/C5XknRao7gJLQN2rz3hBwZaTOGpcjMb8kaB+TnF6BqsU/XlDqCAljBfVy/pcqz4SK2jHRY3OgISYclD/LRT03rdpvdJ3UN4mij+eEuN6TpJIGj7rx0cOR7rQ1KyYR5bYW0VNieBE3m+Nx5+f5SCAydRpP8o6W296pFncCSCnn110qRrnl1hKvJitB2Yc9brfi3IOBbcsj6poGctCG8qXWELAU5dybe/iZ3C20skeXudUqFa7dx78qhNAYCgITAsOZ4Iaxmv5DKbzMASi29ZnpY2s+zQkMbExETr8nm7j307jhBqkl7zr9rEd4rrjXP3RftB4RYm1IbrB2UmBtpDEYag5G1/ATLCAmzEv1ConBEREmB6UJiWU9BCuH+Ast6Cj0l0ZPCWz/6SnrSETTX1JeBhPB5SsCOG04OgUAz+V0SueD3ZxDGkTE2DsAYa5W1MRc8/iK2EEbvs2kmHin4edpqneveezoXt5ipjCVTChFZErxywA0qpew9c2E5J1hPqsiou4dUYTUu3meS+Z73Q+UEiVKkjpRUjm+xndBr4dnE59VebuJdZFBivJvdgl8TAhnjVweGjpB02CTA40VyZGRSIsKP54JSTFCaYx+qcKp/7T471xatg8aXa/w4JtEcpots3qJgF/EgSDhSVGMhyWmkJyUjHOtq1m8bON8ELshzi4k2BkrmgzlzZKQtmogI0Owu09T+qH3zXGQle9Y33X6PF3ZLbTBk/RCd3mnbFQRQ6uDKpSSGpUbPe3kFEtIE7t4VtS0RSImESCa00n6QALSpHRdkCUljCDisNcQSEIot6CvpXLKkt4jYIHVg2llNeioQESKpFdIhbF8HC0m4pp+v3P68r8k2RbDkwto+hHMDNcYSAm0MjLDbSxhI2t91T2mS58j7x6TfN6wE1Wbs07uq/KQUCKqSqFcmzDNkYF0F18glTpvGuTJVFqXn4a6zZDlpmkvHuZBNgqYpO1ojlwAh6bA40RGSDkse5MvNtfxAPKFPsqjw+TRnGYmu5124Gs5ngtaM/O3JzUWaEGMSVgtREU4n1eTl3JtSATv+rp2GjRY7A52Y4nMuUmmzUkJV1++0kLYRQNpcty543rejYZEktwQf82MyFhNUBQQuaDZZ4HKocz+bJKMTtSGnlZ1EKJgpgqQ21FZ4LWGD1I2JA8mBIEQLAa/xsxnErPueKg160lrepLOWKCfpDuchEWWDaaVAWToBTcLFZ9GPiARVOkdwtQuxG2T5IFC8iJICM33lycaynkKhBKaVdBYR4S0jhbRuWBJA4bJrKRlbPDj5AMI4YV5GLGMf7RMfw2F1B8JbRYwx0ACzgtjP3nXOdoKde5iihliOFiTwBlIyKUHm9zXuuMYkFJl327Y5EE47Hwk/5npgLlmqfn7IXb8pyx7dk8qQmXh/9w4mjY+TZwTLeP4cTefPgWlUgIhwjXM9rkPTvNE2K9h0rrMWGazlcgNfE+gsJGPQEZIOSx4pGZmP9jqci31OLCOSCZ0kCDa6bHnQghEEgJy1RKO6aE7qb5xDurBl3R80AGm82d/HkzikgZZNbZlPv49bfOv6oY0Gt06Yj1xNKD5BM+Gn7nwt/Mbn0wd1ZKvOEsKvoUS8Y91ie9e9ayzRQLhD4iMGVhgkUsrJCA/05sdSEwQAIQWs85f2YTmA9PElpbHuWQsRZ2TvkRhREFR9y/z3WMDlsQRF0rFEVGb6Cn0lA+mQEjM9iZ4jI8t6lqRMFeSeFWLXCmfpUNz1CnFcSO55CvabAACjAa3tXwDCaBgRBD0hJJSQgJAwgki0PVHpWIoRgC6DZUYIAWkMBASMe1KTojT1Y2vS7FB15yfkLIpEQDV7T4PLZT0ZiggLbH/Z6wXFg4LIWkXbtTs/qFMykvttEtAx3D2LrCVt0FZR0SY9c2Uft9Ows5B0mCcmorJnnXUWDjzwQGy22WZYtWoVnv70p+PnP/95tM9hhx0GIUT072/+5m+ifW6++WY8+clPxrJly7Bq1Sq89rWvxWg0Wve76XC/BPlu95xbBRfWpIj/tUUavJ76gU+COs36JOdaN1N7SF1JMTb0r6fC9rR92lSvm9vWrg3V/q/z6c79q7s2zyyTPnMgn52G16dJvwdLR/D/T//xe1oIEPGk+ytdTAb98wrQOuuPACjDU09aIXpFP69r0u48/EwGwEjDp/UdlNoLWAJhX20M1gxKzI5KrB3ZGIxhGdrtDRTOtWm6kN6iMFUoTCu7bcr9o/bmEkIQMcr9A5p99EOq1pARy9eHkRJ9JbFiusBMX2Gz6QIPWNbDViv62GbFFLZe0ceWMz1sMdPDVsv62GpZH1vMFNh6WR9bL+thy5keNusXWN6XWNaz/2YKiZmewEwhMa0EppTElBKYdv+mlEBfwv8rRPinkn/COBJiYjJCf+kf34e4aKNLDcsMZUmPJUy+5oaM5yVuGebPJPds/NgyJvrX5lnWnsuTyRqrhbQKocLFUNFYon/2Gdh/5GY3XbhYH0c2e4ofIyvzIf9n+40TnjBXpfdN/4alzeRG70h4n02WjOTIUKnDPw5fa8VbseL5dL6udrnYinRcRXOtew45t+YOHSbFRBaSb33rWzjllFNw4IEHYjQa4R//8R9xxBFH4LrrrsPy5cv9fieffDLe/OY3++/Lli3zn8uyxJOf/GSsXr0al19+OX7/+9/j+OOPR6/Xw9ve9rYFuKUO9zf03IrRlII3hzYBgCSU8piRJusIPx9fFCRpcyX5v1vtPLmeAME3vupegOw5c9eke/feCzkzu6psyp4rd31qQ1vz/Ti0yYU/DnUkYVzq5PRZ8Sw9aWMWSqufIhbcTLTNCyEKmBJ53ZGUwX0w1LkQmCnqH7JwY5pbQMg9a25kMCiNczli41oKzA001gxDhWNKZcvT2NK+9pNAX1mi0iuFD3ynWiZDEUtZ47TjKdJ3gbKOUUwQoacAqeFdfNJUplKABaUHsjRdSKzoF9aNS9n3vVACfdfnfWWF+p6bCwQAlAPAAEI7CwdgiQN/fi7blhESIvdcEyJSKUICezp+90JIT0yMAYyIY6LCftY+IsGOl9b9CdJ499KxtW9YvM+4+KA2zzJF2zorANArmieebJwWvdcybAuFPZv7IGx3ipuGDhjnntXsRlv/G1cg8fm41AaQwYJCbWwzN7chIv43r7CrnmchLGXrGxstqL1DIyYiJF/72tei7+eddx5WrVqFH/3oR3jsYx/rty9btgyrV6/OnuPrX/86rrvuOlxyySXYdtttsd9+++Etb3kL/v7v/x5nnHEG+v3+PG6jw/0d4yaXOqG+TbwDoT4jVBBmmwIVx6XRrFu4nZzl29imzXwxaSInk6JWY1lDBOYb+zIp6q5vf6u/77aaRP4MJkWwcMTfQw0FVvk8ifXRTqhIKyNzEEEmMkJpaX9+691Y0bdZongCBgKPERkxC80+22/eeD+XXv9H2zYXk6BtUFRoD2K3LnvvTngTsNnbpAixVSkBz5DyFE1CZg4UAB0pD6jPnIaXUvVaNy1LSIhs2eD2YI0VcFYNIiNkrUhcraK/RD40ACmtGxYQkxUGI+w+ueqG3H0LQsIgZNnSCH1Kge9tYDPrNb8TbearJsxXc5+7VPTON85tEXVzB/MTJxm5GKltam/TPNw+uUfNuefpOjYOdYkjgKqiaRJXsA4dFgLrFENy1113AQC23HLLaPv555+PT3ziE1i9ejWOPvpovOlNb/JWku9973vYZ599sO222/r9jzzySLzsZS/Dtddei4c97GHr0qQO90Oki0FdQGNdResY1foi3BoSn7d6jagdSUPS1Jp17eVtzaGVfy+PJ6g9Ux519UvGXTsX2N1UZ2XdUm7G1wzb26+gTYGuTddMkbahTjjiAd3cTYvcOKhGRnxNEWKJmFuN9G41IckCBVsrITBdiCjbE7/uyF1nrjSYHRkY2G1AU+RMHqUBylLD1UyEEsCUoTTSVpAXro+Uiy8BJKQxCPElzgVFBxc1nh65aZhUa2FUrZUhKUXoN164cKpQUAI+nS/FhihhU4orCWsNAXwRRAFAQQOmhChHnoQI7TrCZCRKYWCofY6UeDKSEAxC7a27fQzCPMQJJgm03sXN1REymTFuLSvxe6oEoqxePBVwI2FZT5ZEjhwRadLY9xAsOXQPUZ0cR1R9NjmWqS0lJfOJ+wjtzlmf8+ebT1y473s3V8QI84bvg8yEliYH4aQkyvKlKY25a28yxyx2kIvkBr3mhr3cksS8CYnWGq985StxyCGHYO+99/bbn/e852HnnXfG9ttvj5/85Cf4+7//e/z85z/HZz/7WQDAH/7wh4iMAPDf//CHP2SvNTc3h7m5Of/97rvvnm+zO9zPMC6L0bi0iZNiXP7+qIgg0wRzX+AcUu16HXL3MmlxRR7g3RZNWaZC9ipTOWY+pCT3uNbVTWAhFtG6yuy5a5VMuIsK9tVoXbVTf6c1AXx8EGnxXawEkZEUW69cXtm2LkjHuxbOLUrZtuV6VbrUrEPELosAIjJCn+328c9HMuGxjjiTe2dP2ZS9FNtCKXspMJ0IC7llURYtJQUKARd0PrIWjHLkrCOmahkhOAIhjA02z6LGUkJIe4DidohoGEOxBcEyQt02yfBWokGJMGaeWmg0Zc+rIyP1c0GQrqm4Jk+9npKP1Co6aUasNnNSXZHCNmgqithmXWuyltQhR0rs9g0v4HfY9DBvQnLKKafgmmuuwXe/+91o+0te8hL/eZ999sF2222HJzzhCbjhhhuw2267zetaZ511Fs4888z5NrXD/Qw5EpK6yXDQbz2vwmDVfw2cNlOAiwRl4n+VumWNK9bGr92k5a1ousa5pSRom/rY32+DMFI9d3zdpmtFMRoOnPywjZXrpC5PuWu2ubf02v73ltJVrh2ENm5hTVYRKtjH29Kn9FdaZIuTKRF8oXsqpJeVsEHMpC23mZbi8bsuuGtuVKslV0JgqrTCPtXmAGJSSprqoYuNmB1pG8dSat83wZ+/mZTEz98Kmb7adsWaVHXPmnYxI4UM5MNnzRI2A5kUwmfK8k+ZMlwBEKoAtIYxGsQ2hGFWDhobdIwq/Gc6hzYATLX4IdBcnZ2IBwU1+88mFELkKYEB7ipov1t3PZ5EwfgikvQZCBa+Ni5yQF4DP19wYhIu67JMMW091WEZRwbGuWMB7clIDnWJTMZhXd1qq9NpyJyYy8hVlyo839/22Og3XR2bozZFhzp0yGBehOTlL385vvzlL+Pb3/42HvjABzbue9BBBwEAfvnLX2K33XbD6tWr8YMf/CDa59ZbbwWA2riT17/+9TjttNP897vvvhs77rjjfJreYRMEabLGEZFKsDAS03jJSIlLl6lJkPOkBCCxIQ3C5RmRJvWV9mZ0dy8+lWW6QLU05TeRgxwH8EGRWaGvyhPqyMi4JAH8Gvnq9uFjyfokOpespnbOxczk0KamyThM4ltdR0Zs5p2YiOSsd9QfPEA1hfSZ00J613AOu22+vvs53DU7rPXpV1JgbiSjQPGcyyORktIYzBEh0ZoJv/YlJcHY3ktC3BnxIOLTk9K7rHANeghej2NtbMYvm0uMx4mQpUkxywjBPwkiFHoEKBniRwBLTgjkXqXccisLT0S8AsME4lFHOsJ+8XbaRpXZS238OWiuozMEkuPcvXRMOogwesLsP7d3o/NIdloIglKxBJj6xCQ58Dorba8XfW+I7ci5Wa2PrHyTIkdKcu3JkZPUYsuPHR/0v3jRBbUvTkxESIwxOPXUU/G5z30O3/zmN7HrrruOPebqq68GAGy33XYAgEc+8pF461vfittuuw2rVq0CAFx88cVYuXIl9txzz+w5pqamMDU1NUlTO9wPUaf1qiMj/jsLyJUatuaGkgAEhqW22a8EnN87kZQ0g0q8YNM2jqxmPWvVoE9jTO4tXAhybix1wsR8F5ImMpJzNatbCCoEbD3W11qX+JUc+KKf+83+DWORk+ZGQgJAqVgAq1iaNKCFQamtBt+Idbu3/7z6logcpDjpwJ2i7+//3k3+sxXo7XE9pV061eAqxe+D3pU5lx6VSEmd1ShHSJQzCUlNxDROMFGf9GG8cECCfklfYMketUK4ASpV3/a70XYCAWKXLXLFkoVrFy9gaM9mv7vPiIlD7XbkrSDWlcv49tOxBOpHIn1au343IdMUZUIrjfGWEXKjs/dQP8Zy8QcLbS3h2v7SmMDiEqVRfJz9m3OVbQslJws4rxuDC+kmnJvLxmUYHIdcnEmd1aRy7OLnIx0WKSYiJKeccgouuOACfOELX8Bmm23mYz4233xzzMzM4IYbbsAFF1yAv/qrv8JWW22Fn/zkJ3jVq16Fxz72sdh3330BAEcccQT23HNP/PVf/zX+9V//FX/4wx/wxje+EaecckpHOjrMG6kWP1cVt64ir3dpkTbQ107gGkM4DakOGtboOH4974JTJSR1rkZKxI1OF+1exqpQQDAtZRC86sjJJK4GdXE2TfdRh/TnNutvmue/zmrTVutYF4w6X8tI3TXHVZ1P40WGpa31AQBrBzYIepQQEiUFFB0nw/O1ZEa4sQ1Aawy1QOnGgoFN/zpk9zgp+VozLMe6uXHcPTtEqW2mLkLhyAJVQu+5TFFUB4efj6wgpTYYuOh6ajPvlxwhoarqZWHQVxLauPdCx4XygJo4hORhkkAvXE96NyW/W7UPBOw9CSEgXeiqEPHSaozx6WEj1yodkw0iGrwtRDbsse6vMZ5sjHQYG5bohX1oO0cqmPMkGykhAbUzmWN4oUwOromvQ5M1NodcrEM4NowlKYIrrc5cn8e/pG5n/B7HvS91pIS2KRkrhKrHN5OFtooduqe6gpDzdRvjyLrlNSiKtA7ulosZXVD74sREhOQDH/gAAFv8kOMjH/kITjzxRPT7fVxyySV417vehfvuuw877rgjnvnMZ+KNb3yj31cphS9/+ct42ctehkc+8pFYvnw5TjjhhKhuSYcOk4Dqd6wLtDGAFn4xIWGf3D/q/JJTIT6vQQxuRTzYkNxwKi5JDYtUaUz2ftPYj5zA0ORb3URcci5b/LrrA/yabfy9FyuayE8qdEdjo2E8e19/pyK2MpjT+pY6eiZDrXNlLBqRWkDGgcgIkQkAGCAkLSDCYEmbcM82kLTUSkQCdkpEcn3CSVBpDKSOa42UxqBosDSW2jjlQNhmjA0FKY3xbhZNbj7kEidFUnFdxMHkabwHnZdbO4A8CSGXqzTuA4C3ZEWWYEY6ovtNSDp3nQNQsYa0SQoCtNf4p1nQ6t7rtN2TxDrUuRPVvVKp4mpdUerYfautW1kbRUt1e7P1NHfuOmS5R+awdV1rO3Sow8QuW03Ycccd8a1vfWvseXbeeWd85StfmeTSHTq0AmWIUV6iZbEY5Jql8xolbUIAcOmkOCUBlBiLJqWQlkFbpoSALo2PVUn93O1+1Qwy/lyk1eSLeRkWo1TDVxEgEASISTTnkwSw8ybP11XDFo/kW+qvER2XEMKKdpgLNikRnEdbm4TltC1ccOQoKBbC/eXflYx9nSmuZFQazEJD6RDUu2ZYJlamEJy8Pv2liYysHYxCG13717p76RfK30+R9PMcs4qE/gwvVL1Liqz0vxLBrWhYaigpMCqNrX0C6cajdUPSwkBqQEnl1zb77hkI5gIGcFcp9521hWRPweJ47Hb2LBjhoBiPnHuVod+ZtYNbMXJWDU4o8gqRvBDZRDjaTA0UM2Q/uzGbWJJDBqx6QblNW8f9rjOWq8ZzsN2bkiZUU8rT2K7O+ZGbrHchqyqmoONkHmnyhXTbuPvwMXnraJjIKZ7mwz06vtJhvlinOiQdOiwGKCHAc8fnfucCO2mvSl21LAAJSUnIyMQ+x0IEAiSDNs9rjhOtFpGR2rz6UgDabddBM8szg4X7yLeJLDWTIiewt6mJMh/Q/fNg/3FoIiN1AdHRNVtpEavnaarbkrq61B1fSIGR++uDtdlnDqvVN0AJGztiBJQOAgzvhxEjJOsrqDa1cKTWHm4pUVJW7snuH7tqNX0Ox2uUOrYoldJaSey7JnxfkSsRWZXo77A0UEI7lytynRKeiBCIONBnIFg/PAERxle3t8QknIPIBxDcrSYlHzYzGbOGsPmuDQHn9+J/bxDGU4zL2FeZCyawmkyS8SqH9WFBbbIMp0jfcR/HpGMXrnGFbNsopYAwL6pIzxasgul+0bUWwKOgCUvBmt0FtS9OdISkwyYHTjLCXB5rnkpjKhYT7kZCaLI21CESxCKfDQGpgjWj1MYVDoTVmrnPSuT9pm374QUqIiJExqiiNwmAdQuqk8Va3wfQ3iISbV9HCXiSRXPcrk0FFNu4StD5s/7vSWawtoIMHycA0EdsFZFC+JgLIhT8PkonqCoDjHhKahN84YdO4KXg8vUBajOlKeauW/x3lak4HvaRKLWOCB53X+P3TdfpFwp9l7I3Z02ica6NwVADPQBDYXxyCqUBTbEtQng3TdvvMdHhcwN3gePWJxkVrswIkYx4pFnEqiQkJh1pfEPeulHt1yaiUTfP5bX3QfINMUD1fvHjLCN17WhKClJ3TNg/f0Bbt9KgsArPnt7lVKk1DuNSD3uCniEddUqpyPrpPpbGWuJsHRVuKbe/U0KMtL/49RaanGxoQb/DpoOOkHRY8kizWwG0gIbv6cIpNZgAYnwMSVP2o9S33Z+budpwbXC4GM88Q0JE7EtPFg5a+KpCfzieSIn1G7FEhFccBoJr1iRavjqhvU5gXxd3rIVA7jRN2rm6VME54Yl+S69BY40v+qE9prKv/8xTTQsRMmep0LYc+WgT88MDkek3EnTXDkqU2qBfSCzvr5/pvpACRV8FYlVUA4PbuMNxUkIIlhVOSIL7FwW156xJRNgA+z5QMgD+TvRkqIfSyxCmitBv4vGbvqv8naBHXA0iD+OInhmvwZKLDUnnIt6/oxbv06TKlIjkuX92zrLEpEnTPwkZ4agjI7VxJg0WUA7uphraWdd+Ns8mip2cpZ3e6zpLO7lveau8sfN1NI8y6whPkx25wdVYSXp0Tmmv34OIxqh352Ik0vcL/cas0XXzXltIsXBW8vUJAazPJI611+zQjI6QdFgS+M+rb6kIkCkmmUQVk0pK4/x+G/yQ0yxIaQCyci43/twtkGoYrRsJYKcug1ztE2rvOKRavnH7EcZZDhYyfSch99zGXabNsyYhIXdPOcEpd02+jYQSv5L5RvA4Jf4bS7ksqy6CXuCoISJ1blZ02ZymVhuDlx60S2O/LDTe8ISH+s9v/cYvaok7EI/LHJRUiba+ejyPRYmEZpEncERCyH3Lvj82nTe5PVphs2piSAU7bpkgDPm8UeYFca40IasVzwRovwcrSp1CJEdI5uN+mUNWmcK3J+mnuespbQMY2+MKoaRP6uJGJiUi80Fd9il+PW4tmcTqOV/Mx6pArlc5l7em39YHlgIJ6bD40RGSDksCXLM0LkDTfo8XlxysuxRbCJUTXrSAMz9kF/umNKR8n9S9xl6ItrlsSKW10ORSDEsZxwaMg5QCPSTavDG5DdsGpLdx02pjsUgFj7rFkpOupsW6abHlfsJ11hCuncyBaw+964R/lEGQoYBSbZwAwNzowskcOUEQnHtOmO5JiULF1cRtu/i9IhJqScidg14woXRdscWyfvS9yQ2yLmNT7nvqskXfeTphAgmTPFUybR+6oLC61KqtxnCTG1RNoDORDLovnlEsJh/hWaYpkNtYRdKEAXWI3LKYdYm7w5Vsu5ICA2hvKempYAGwTbFCu5/HROhMJUTkUliHSeJZouMaxn7OKlKngEitCHwOtRb0+H1uYyXhaHLjqks8kW5LrW6ceARY0j0JKal6GIwnYLm1QglRqTm0GNHFkCxOdISkw6LGeVfePMZ1JbetWUglRFptbZCzPNRlo8ppEicFd9dyGxAqc9vYAC1sLElp2mvRuIuBPVdCnBpiJupISCtClDH3Ny2GbQSQproGbTV/k5KRbLHKyFRCH0xkNdFMAOMxTF54YBYbIp09JaGEQKEEppXNAtWrcXmxMRBM4tWALwroCpSvq/Z4XdFTovKOAXGcQ88F+ZYsA1GFwNdo6gGg7wQecnOjzxw54ZAXehwiFiY5mlzv6tAUjJ9aOPi2waiM9qvbP3fuFJY0VLc17Q/E7lmc/KTWrPj5uA81WaRKhOPHxVMQ2rpgtUVTRsCcMJ0m0OCKHTqfHmOezlllx4FN+xW0yZCXC1KftCL9pMgNq7aZwTp0qENHSDosamgDv8BBST/JpsL2fDRTlcWGTiENAAklDAbQViPLNJopSUkXdr6NX4uDZ1/xvsbGar6jTCuSrCj12r0oIJIWYXatujoMk2bIGrcwlqa6oNNxsfUK7HNeWx4a1dympnbWafDq2u2/lya7b0pU7O+MVLJMb0R0y0RDmbpmkTWkJwV6yhJvS1Lylr2ei3saylBFm85v4yE2rjSwrKdq3XP4s04tZLEwmj93jnzUjck4DTb8NcpkvKXWCv7bJEitGCnRsNtsQ0bJtcd9bos6Apf7zL83WVVKbfzvPHsaHT/UuXmEX6tm+3xclBoOacqal7OSZq0RaQC4Zu82aB0JRKXOZZL/zWXbUmRV0i5bHmAtrBKABqSxad0ppEkLA+ne++aCj2SNc8fpOB20/5yMqXXhfjklTxfU3mG+6AhJh0WNodbeLYPqCgAZLWZmVm0VSBtZAaxJ3m7TgBboK4lSslSm7m9uEc+la21qAxe8ePzK0AdhAlQDJc3zz+MMoE2reIjcPft7bzigbRB6ShKbzP48eJcLqNWLN1dBJzRpJOvIyLjbIgGEjvUa1MiNi+JHAE5KUu1kam0it4ZARqTNhOW+KyeQ1ArchQ1ctxm0bEXxodSYMhvXXWKqkL79ORLKYylISKpoxllgFR2XDs+696pkQmTqwlmaULxxMNK+fgqRh8FINxKBunc6JRJ03tTqkbN0+L+JsGjYvvRZZO6Zb8tZN9M2p4HqFPtWR17S9tZ9T7fVkaMos5RvY9ivqbBfesncEGiyhqS1nvg+/h64GxqLB6M4w1TpMB+LJLd68CQn2tXLkVLAZq52vyXkpP68bh82pureu7Q9bdBENHh/ro84w4VGV6l9caIjJB0WDd76jV94/3Dyq1eST5hBwh0XHDsfUKB70HCbEPPRUs7LLex1qUB5PZQcou2pa4S2vs1+AWWkZBJXpIXG+gigbOMe18ZNwlvDWvhU0352LCRCBOvruvORBrXJzWMSP+bsvSlAa4GhIJ8P6TNubSwcu8/2lW1fvu4P4JYk5cat1RCLEHcDVN4zhYVxj+TIx24EEhFVf0+uXWTe79z5OCHxsSAZDXWOeES/s3ElmPWLxp/RxpMSzX4LWv4x5EBmkgOwWjHjyMm4uJa06Ge/kJE7l21jICaTujvVgSeE4AqJ3NzIXZ5SBcMkc0aTxYRn29KZ9czHFLptGmGOSckJv6foWvy5jCEj85mn27iPdeiwLugISYdFgzRrVaiczoR3MpeXQQs/CcZNqiH7FrnWGB/oroTV3qZuH3X30VoTKOPzcBcTgvaWEkBLWrRDkKlffEWspapz/alDpK1mzZp0Aau45CRacX/eBkLJn20uxoCDxguPOSEyUXFRYNaoJj5bjd8Jwgm3loSgUjtuIAFdBpehttr9OvC6GHSkgU1gwDXscyO9wbV+bdBTAj2Q9cxu0zrEkNS5k9DHHq/zMm4cOjc5SyKdIIjYgsEJw9rBCGsHZdaqkUOdgB6d1xiUIw2jDcpSo3SExDQNNgfTcH+CvcdChnecCIlw773h250bUM4yQjVc+oX0iiCeUjklE/x+iXzwwPtccctARJT7G+rG+AB65dX/WVJSl4CAXyNXKT61iKhEmOfvSnDXgq/rkaZUpzpPbZG1hJRhTGo270tdbT+PgancFyennNslc3aOiMw3ziw3t/LrLgHDiIcS+XG1Xq+5Qa+2NNERkg4bFf900c8iLR2h1Fbr692ANAAZC5zAwk6EUWEsHTRMFWsJ3GIjqwHv44TNHDmQyaI1CUptIoEtagsjI23jP8J3+3cSf2O69YUwWs33mdaRkup+4XNd0L82iNzhKm1saGROIE6vkR5urQb24NIYSOa7nmq6Az2CdefQwqexXcygYFveVvKrL42J3OFy46kuUcN8wDNa2e/19T5SZUndMcPSEZGRhi61LWzoNAxtXR/T7MPCv99sLGo3VKSIrSTaQCLv3kXIBbNzy0g6H9dZo2NXNZ0lctRfuf6ss3xNSkZyyLln0XZ73vh6oZJ6GJ88lkz7uk6BoDS9315pxSwsBD/PJ5ahcCy8K669aGZ7NOdnSHLm/Wh6Z9qsY7m5lc+P2mx4N6gOmxY6QtJhoyJHRghlOvFq4cz97PiM0JlL9cj3bwsy24cN8UJit+WPtQJnbBlJNV11vuiaFdHKkRQfO5Kcg7sARdq/ujiExMfebot9kO1v7QTA1DOpzk0gF8g+qRUhFZK4oFPNkpQ/R7qfZMJuqk311xXVlLzWBSloVi13FZDSOOtJrBuTApF7XZqVihc4rATmZsYbxZIsdMXl9YU0A1DoYhb835KYABlCScIuEUunTEiDua0AXmVwOUKSCtJ87A4c8SAiYozBaFDCGINyZLfTWONWkibSYPtFuGPA3LJcO5QlIkRK/DFSQEgBVdjsbcHyEawgVExyxhWz5BXveUrl3PxE951L7lFH4kJ/x2Sn79pIyRx4fFwbpMSFv1f+XJn3NT2Wf6fCs9wqqj2BdhYTAJTVzp6YlAb182wTQYgTAwTi49cNlgpe07zBt6FeacIxzu2sTb/XkZIQb4ON7jbaYemiIyQdNije+o1fRN/ryAihQkoAcqr1X+s04W2zuqRZkOoKYqVF7TQTXHOasLQdaUB6uj9lXPHXYNXk+UKSIyPZVLUN99w2MwvtO6mwm7NAjM2o5dBEKPl5cojOPSbuJxWc0sw4beQimQgSkAISwi3KLF6C2u40iirz/EOFbu20soGAUDArZduhY4m0loYEH+ArP7sVWhs8Zc/V429gA+DI3bcFAFz081uj7fl0pZNbQCaJCfKfM9bNNojICo1n556ltcFoWHpXLaPh/ppsfAiQf2+JfJQIlo/AmwSEtK5dInk3uAuXEolblCMkBSMiM/3CExQeu8eVKQB739j7VEhRibVpg8gq48hIT8poXmyaq+usmZFCKiEjTUSkmohDeFLiY3KY1SS4Cgs/X4eA95DRrQnZhACJlYVccen65Kbr1zn/LEJtrnQs1c3ZuWD8SdxIa0kJEBO1RYwuqH1xoiMkHTYomia+1BUidZMIEyBpPN2+CxBsN1Z7lPk9NvXnrx/HjeTiS4IgUXjfZVp4TERO4vOKRsHZrptxe3PxITlrCH3P+fO3Qdby0nCunHWrydUsehY8vqDGBaTSvhq3ErpuLvhVipAZi9cJ4UKA1sbGdRgDJWSljwGglPFYzQanMstYWab3Dt9Ovj9ZtoaLXBrglpGFtOiEYOSq2yUJi7pk758T1ksto+x5udS7UfszloE2EFK0iiHhVhPB3nEhLRGRUkAI4WNIhBRQlLGtsFna+ioQDCIfRDr6hcRM3y77y/oqWFCUhHQEgfozum8ivq6aPPUBxZnMjWTWQkL/ppilpielIyLuN3qvlKxVAuQUHBxprAgnIvNNN0xuhZGLsGBZuJSM4/xMICl+HtXxOlHn6ptzDUy9AyADAVAIro6N7UewMmoTvyfA/NyE6TgZrYnufKXpLCQd5o2OkHTYoJhEE5OSEgDZSbopLmOhA9faasLqNHGcRJCmPBJ+VUwOeLBvnSDPhVqeXSdk3UmEYpOpA5G4V0XnaoG6NMzpdfLIW45y90dtJaGgjpzk0CREpmQmdamK0/EKH58Tja/o2Y0nf/5+EisVLej0/HPPntoWzkHXWryEpC0ZqauXkILun3ZLSUnYx+7YUwAQp/EutdVbcmFaSRGlB+a/831KdkNam2xAel2MARCP65SIAPBEgxMR5Swe0hXRbHLP6hcSy7xblvSuWbRtulBQEp4gAPAp1uvGliUlBkMtI4JSmiqB49YWKpg5VYRCoPQ+TRWBkNjnGM6Rt9zmi9jyY9N3s631IEWIdTJRG0tjnJJIQMv4vS+1YdYNNpc7cjLUOhtXk37296SCEk4KF2/FlHRj3ZJl+EMKr/kgneNjMhY+L3alCBC7N2+wa3aV2seiIyQd1hve/T+/8p9TH+zFjNRtKweq8pzTNOWyakXHMs06Cbzp7xZs8fCZXtqBa/W4xiolObSNHxf2nfw5NRGRXB/lkPMHj5AssFlywuCtWGPGXm5h5wSEExPfThn2jQLRdXAF8YIzPUojInKSc6HQxj43T0oSS1MdsS+1wYkP36n2HjcFpLdO2l8gHgPN7689ibWSGAChxlGwnhj2ewyKUeExLy7JlLVeGBsIzeM/gCpn9hmy+D7OAkLB7ErJyCJCxIS29VUgImQV4RYJcs+KtjGSQG5TAFCoOBic3zq5IWoJKHezwzKM41xMFr3LtthnICK27k64vt836SDq01IG10Ruiaw81zHWx/S3NN0vtZvHksTJIoyfa3idEm81ASCV8MQkF4NIZMUXTs2M0zSOh8eU0HZOuuqKPVbujT1Ufx8O45QZWTe+DJoyxXXo0ISOkHRYb6gTDiaxkkTnS7VcxkBFqRiDVn0++exzAq2KVuVUEgr7KojIalJHRuqQ+jrH6SfhA6Xtb/kJn2IICENU3a6a4jn4mjifRzSOhFAeftqeez6cjHBfcKBGy8kcc+Prhm2xhaGq8c5dPwhSduGfLqhwYdAiF1IgvQX+aNIrWM2pcW2y24ZaozQk7Nlih0Nt2zxXagxLV5G9DMfm3qPUArdU0WQdaRqTqbUkdd3iVhKpBVRPePcZiodQ7rmuHYyizFm5miWU4hcABsLGDAn+Lkn77hpl40mAavYsDiIfdS5Zwn3uJeSDB6vnrCE+gL2norEsRSAGgTAwC0kyH9lnEz7XWfsIsaAcyIknHzK2DEuByrsEhPeJW3WDOxQj6jlFRKa/KxbMyBQUt58rEyDteErTV9vrcFKWXMfNPdy61JOxC1zUHgau2OL9B8DH3dSlNq6ci/VdpSI9gmJnHHLEP11LhnXmq0UE21cb+Job9nJLEh0h6TAx0lS9XJujZKypSUkJ0D4AciEwaXxJzsc21WDV+eHmSEhuki+NQTHGfKvYQggAZRn7AdN5UuTS9OYWjvGuWVxj29jU7LnCteJ9OSnh5yeNXyqk1GXJAcLCSuhBVFw8pBDQQngrkX8esn4cSslJSXA7ocrpoT4DIJLnaES1/wlCChgISG0rMpfGuCB1A82sJF6I0cYSFg3MjspIGI76gb2H8yX7GwJ/tce2/rMtlrjuqAuMb9SesxTeXrhnfUtWESIdSgpb40UGVy46NvyVvq6K0QbGCCoBAuVEESOrySF8ILoIlpJcbEiOiNDfOGuWjCwkMz3lXaa4ZYK7IPrA8oQ85DK6tUFKjpW0b4kQ9l0X5KJJ77rvA6eEYVTeExLmgkpzij1MwJhA/iMFiw865+wh1vLzjG85q0l6X8GKwub9JMseEAjAEFYQldpm05LKzkVUENTHCWYydOWyM45z+a0rmBhb3QlhHRmXpcvfFyP+fB1M3Uw7dJgPOkLSYWIQGalDSgJSAXo+QlO6OHBBf5xmZz6kJAWZ4QG47FtERsabvfnvZBnw7lLCCqRUfyIFCdZZ60ONdYPvNxkJyaOJPzZZQ3IgbR/ASAdzP6BFloSmWECqEpP02afpi0kTOZROmHAC5VBjrMpKcoHNCW2WhFuhSsncKdy4cN+oS7QJZEWAEROvxs2QS39sEJbnRtV6D5RJiXzxlwpyiRSAvCth3ZTRNvaJk3k77ijgXWGYqNLnRtIXSewXOoovImsKJy48FfDAjUeqRwIgFEZseOeoqCFZQgBEJCQlItwaQtmy/DYlfawGWSTqrCFETLjmnQgECcREIBiX9wSKnguRCfqcPjP++HygfrKfSPY2CM/VGPse0DZjYF3iTCwEl5qOtXEcKVkhRQBc2tzIpZIRjDr0MoqkJssRdzOjebwsZKVQbKOFpMZqXFf8tqKwIqI1xsLTCJXO9TyOBgBcgL+ebK3t0IGjIyQdJkZdBV+A+cTS5J6ZKCdF1rVHxr9xTVEOufbE16i/djpfS8WsJ3T+OreutB3awKZG5IsGIyWZNkdWjjECWGo+zxEZvh+/T8K455QL3LbXrD8m1fYBqHU/CJ/htLupO0mshU3HYeoaNdQas0IHC4QBAG1JCbsPxcaS/Qcm0En32Y5/Caf59UIaEZFYu2uFJ0ojagUkSoqgpKgtahgELPt5MNK+mviAWUusMKy8ULyYLSQc6XjkWd1y8lGOnIx7Bwg8hXdAICZKSjt2+2H/KUc25jwxscX/+oWMKrqPMhYVIKlg3kuE1IyVxI89Zumqt4LEMSEzfQUKDk+tINNKRtYQHgMVYkikt2AEgdeRJMHme1IOSDtjSfeO0H4C8H8BNFV4tD9jPDQjFPQuadh3iwR8Y0IgdencoOzv8Nu0qVooAWcsc83MKqIyHL9JAeVjXhJCAvBkFYGkxPsm185YPnIZ/uqKPHLylBKQHrtOKds8iVjZQ+cZeouwdmva0siw1QW1L050hCSD8668GQA2+eDQScFriNQJPmlaW2Dd/NrrYjHmm8oxPkfyvcZCQZtTcpLN8DRGqKJzkdVGO4ICFwvDm1BHROoIAVAlInStFG0KRjYJfjmXrLqg9SarCF0nF+RP5IVrAImMULYbEqQ4rIbZarKt4CFRKqelVFYwkFJAZZ7ROHleQEQaYiCQEf45VHYOCzgRlDYIAqornOYaNgCcAB3nAyYyUkiBf7nseigp8NpDH9zuYhsB6ZjicT7j4hKy1sIxFr/KfJUoEKTLwOXjTthxwVXLOmFxAkKuXmQ14celaYT936QgQcXlNSEkvE5IxRpSSB8TQnFOdRmsAilBlogAQE+GNilhCXfPEe9grQxuWAoaMCWgNUQ5cn07yj8EIXmwjFXDJCQlJS2KvgsJq8MJbpmWrNOzp8nKzhGlM6VoAMLYNpew7EY58u/T5jYI5ONisyoWWkZ0AEBrEaUCB5j1Vo4fv9XA9fi6is2LAe49YXNzlASAWUzsb+3Wz9QdVgvuMind+UPGtg4dJkVHSDJoKzTc35Au6mkmEE5G0oA8f8wY8rAuCt4obWuDwO0JRpY85RuUGxMUC2KFKVq4jLec5MiBdgultZJY9y1lgFFGX5imkkzPNamvLg9+9NvGdHiUktLEtQjq2qCE8PcWrCDCk5DUBYHalGoC6XsaEMvdpsiVhEgCd4spjSWQI2UwpSWGWkMOBbSSWDMsMaQ4Dh2Txbp+S0FuKkJUyZs0AlqY4LIF0vba/htpqq4eWwekJGuJ8T7jPSlRFiGegZ7bUo0lAaqpje3nfO0Pf0xmW93u6RivCHAs1oeuSwqGnhI+mQCRD7KYpAHuAFBqHW3jv438PuMJUyGrhARAtoAhkY+esnU8yBpCroXespdYQey14PuCvzvcGtJz7xjFTEkBFNCAHkKMBhDlEDAaYjRryYguLRExGiIlJEIG4iEL+10qQFnxw8jC75cSEv7deOHb7l+oAlAFjJAYams9KRxJGWmgFDYeZQRLTKAFjIitJQAgdRh/dmzE6xn1G3/P7fb4HTMsKyJ/3DQH0KY4kUj9uEjbkbZlEuQuE1PyFudA3vpMSTmG2mAoNQa9xV8CsCuMuDixyRGS8668OVqA6tCmYNu5P7y58RwnHbjpWlDe/q1fVrbxoPRaC0mDZhxoEPpaCFDjUngSmp59HRnh5vHKdU3q7hH80X1AYKJxbVpoPCkBGuMYckSkSSs8LpWu3cd99xaJ+mNIW6zLUAOAW0TKzDuUe45KxGSkYMXXcv1et1CQe5YQ1cBYEqiilVobCGEXUit8SPSkwRDWXYXc5TSqz5djPkGa3L+dXEwMeKagQOjqxoqSllBpaSJB1b5/Eiqj2V3sRITAx3NdnFMb1BPiZuUG97OXJJXrIFD2FIASte9nbAGxlhO+zWdC44R+zPxVtZCoDBGxtUOmXHyJJR/W5ZHHjfCMcJTRKn3XeFA5kWsp4ONHyDpJ/8RoAJQjS0bKAYQeQQznLAkpB4AeQRgDU5YAt+BJBVE4UUP1YYSEMAWMc+kSSgfS0kBIBH03BkYIcnSFkAWUkDDCWk8MnPVZ2KQR1ghiYIR1lZXCukzR8b6ZmbkwJSOchFTHV5U4GJhgvDF0SyJ7TOVs7Cc6JCVB/FRN706abKP2mpn2GIQ1SCoBYwQMSKkgIU14BwDZxZB0mDc2OUICBD/97HbMX8t8f3rRSDuXLdSUuDQAwSpC2jdeYCunoaTfUqSZToDwvChdYluMs4qkC3RdphGZTNI2a4n7zTXHa2+ZUJNmcuJt9+ZvA6Te1HVuUU1mfcC6JORia+x3RKSAttUVDOOP3WqT4zSPPP1sDjzFKACvxeWWD9u+2lNE8IKrq+tgNZ2JD7uIn5VQlghI2Cdim2KDmMn9RBuDWSYwcPJpNX8GUsIFPksY514nnfWDhlhqITHkJ84sADx1b/gcXCB8jQUpfIpnKQx6JvQlCb2DkY6qY3PB1fbr4p6r1gyDwBqTErutbc2aNH20knxbfVxZ2M6lPqvFtBYz+4yHpU3TSoHvdW5Y3BKS/t6EnJWZ3KyIhFhSAT8GeJB6T4ZigxEhcRaROksi9ZW97RAXQuSFLCNKAMqMrP+Ps4yIcmCJiB5ZC4nRwGAOZjBryf9omCEkNmpB9KchpAJ6fQhprRueYNgKkFVS4sCtLAJwLF8DSkMV0zCg+cSREbEwEQ3cMkL9aPuNfm9610RjMoO63BqiZp2c5LWOL5shW8mmdJ3jIAUL4BQHAKaUwlDb7YNSozTSWhanNkmxssMGwCY3cvwakCwG4zTMtBBWU5KGl/T+lM7Oa9FUPoVmui+REe6Sk7roAC3dpNjGyKVDx9mt2oJfO3URCtv5/uFLlgREain3hwRSr22vFrvLna8uM1XqEtWkXY2yiLmGSGYfzj2LNGWkvy5ZfQCgbDfmU4uZFPxawWWEpxv17RfjxxcJmGUifHJw9yl/blhNnr0ZAw0BrQDFOmdYGh9fEhWiBPPz1pRbP4gPQti+ksJZYUws/JQ6WES0gU/hmyMi+Qw79hlNSYFRaYDCjYG+bQvvb0pJS8LrUgClMyYQUSOMU4Tw/fh27cgbpPGWktRVQorc55iYeMWConeRiF7s+58jJ6m7Voo0KUjFQpLMp9zdMRes7kkKywRHQnTOLSt9V/hc6K0m/njY2BCtrWVEj4DRwFtIMBzAjAaWjMzNWiKiNQwjJEIqGCktMdElRNG3Ll79KQgh7VsnJCAL0sUjF00unEWF96qg73IEIaT95+ZgeqKWmNjcdq0i6WsgEPqSP7fIkpEV6uNUxikiJUp2bLZ3qwKabzE9D59Pm8iOMXY9MwC0W3NIIWP/ShgD9KTB7BLI9te5bC1OLGlC8sHv34TlK1YCYNoeb4qP9x0XhBs2pmqDek3df/zg17XneulBu4xr/qJGVIeDuTdwpAF/KRmhfVJC0NYyATgSklghKNC8DSlJiVDl9zEuLjxrCYHnrufX0QbZughAXgsM5MdgjoS0KSpZqeZbQwybqvrScTwrmfadLaz7EETkzpImMogrQFMcSCB+6XPn+fvrYOMtBIaeFAgI55IBtzgqCcDkF1bhVk7qvp6U0BIAtI01EbD3hEAElRC2SKEwKIcGPS2hhK64wwDVhd4g+FtToTDyt+ZEhAeK1j0H625jJaGekZAiJmY+1sAVv1sKmGX1PHIWhbaEhPb1v/nkEgIUJyIraujxfaTYmLcnoQcl4rlRhQKL6TvK3bVygfJAPIdyhQ4QrIz8vU2rm1t3LUqVHcajFcgD6RCiqnzgINIfU3L3NghpjRbkUuX+GVlAFM5aQRHUWsMMBxDoBSuJVBBKOUtJH5ASomddtyALRHElREQyVhJvIaE2yCLEojiXL6ODq6RBnOmOu03Sk8kVVwzKHffcnLLBWkXtXEGcNSK3SdpjjroMS+kzCSSqnihE+rCWBCv37Ntcy/afAYR1eyXlihSuELAMbdBu7u0vAULSYXFiSRMSzYQ8XdoXq0SscRyndeMgbTPAJyU+47TRFjfHDywV9KjuRqZgE0cby0gufzrPnMRdeAg+JaI0PqUjoK3Gk6w2GfeiuG2Tbaf7adoWUsRaUlJHQOpAwmlKOtKxOZG7R9Lmts8iPZaEX4qP8RljnDBsNTxe4rOFCJPq9NydhCwjUsBrcUmY4m0lwc62wZ2dni1f9bV2hQS114STlo4sO1yDSUIYD9cppPCuB0oqDEt7nqE2kEL7KulDElSi51A2jmGgSnCJfABAahnx+5j8mCRLm1IC0hXVIyF7KDWmCxkRXQpm3pCFR+eLe2dD4HNqSWgbKwbkrQ1B4LeuTkOetCBy6XLnZc8wssI5qdOOUUcSmNuodyV170FP5ud+nuCDvqeKm/QdtdeK50ee+pqTFXqffOYr5qZFt0Eg68g42OxUzo3LWTEgrVhtir4LXi8B1QeEhFQKes5ZD4teZCEBrJXEu25J5chHAUOkQkhQgHo2uN2exP81QjgyY4mRERKlgftHcTzWYk3afJ8SGCGpAgVn02cgLHlSGj/PFNJl93IKGS1ijXea3IL3cDY2pYZk5I5rsly0Ff1Tt7L89arHWfImPKnTAErhFC6CspwFhVxp4N10FzOUaFbKrZdrdml/x2JJE5IUucw/bQS9pmBtnnkjV2W6rvbC2Zff6D+nPs5LyXqiZLjH1Fow33S+XInLF9sIXgiFK2ZFgkMIfCUy4HdtKYfFMq4NSrfnXz9kMhcQbr+30wwT6rKaAeOJIZC3BjW5S1H/hnSrsYWhNCZyDQupaoOWtg35y1mh6ixXZCkh5cDIaTENkRER2iGY3zOBfxesDUpYC5ytFxLAM2BFbSuZBYr3b1l1BUhJFtVFiO+L90t1G8E/CycBcGGZW1IWO9I0uRxtCZWSokJmYvctp1DxiqSYnebmL24djPqSKT9SQcYmf6AvmXM2EBEea5cqCyi1dVocNFdNPVaa0FwWxrM01mVRAiw1beziqI2BMOH6xsV2ILJMmJg8GO1+H0BOzdjjpAJqCAmKglk3nHWELBzOUuKvxf86+PYICaPCuUgw1iZYQDRCTRJynTRwbnXeDbgasxc6xI4dSmdLSSYcr7XWFmTWLoZxLlF8mhN+W5WI1L3STfErTVaRumtzGASiIR35kIBLHmDnW+F+o1fF1ChXOnRogyVNSCRbPHJCXVstc46IRH75CJNVMs82CnTZjE1Lo25QFrn4B+6+YEEzGCrf+eRM7gah+F2yyFM6VAHAZfIYAk5aNOBB1vXtrX7x9SGkCLKDIzXkEhbFt4yZXeusI02FCIMGr36cNqVUBgIBAaoJBPzvft9q20hITsdo2m4prRBT+Nz2sXaYjkljgki4aqOF4vvJyHKQKBPc17LUFcE/trbZbbmq05VrSzv2tBa+RglgPBGZK7XP/hQCm9NzhOuHonOytv/5mMqNn0b3Qrf/lPImqwyqxTwXG+6dzSXxjaGSOIJcnQ/67MHiafpFqI0Aafx8pUQ8J6Xjl8c45S0jbg7x4yD0dy8ZZ94SKUU0RpqsxrnYtnh71V2WYNx/pZvnqDahQIh3ChZELkwKd7zNR8fJjBYSUkgUUytg9AhCT8GMBjajFk/1a7SduCl7Fnt/DRCTG1UEAiKLiGhAytg9K/lrEJMLXRqXRpuIrn1WQ/ZZG/tu83S1QNVizcGfi3bx9uTqKQV8DBlZablyguab1BrHx4R9LuH5+m30zNkxtUHuY84bIS1UqTPCCOtngSReR4QAfeGuZ5wySYkQr7cUMv0pbIQYkkU+Jy8GLGlCQsgWn0p+y6GOjPhjW2rLl1KF5A0JTlLI5za2joSFtiK4SuO1jbaatdPsSbtK0iTYFCwYt4WdWlhBUMN44S9YYGJLAKnL64om2mNZOtcJJcG2ZGRSIsKb20SqJiXUwbUqHFcky1+dIB6uGcYBF7IAZDXMFSLsyWW4h5REciFCCYEeQhG4yj0JYcdbGY6nz0RGZkfaBWJXiSSPCeg7n/6esyzlLFW6bE4zXDeGfI0S3m4GeibH7rN9/ckXCQYshiSbKloKlDmBCZao8L7n7xC5cPFK9mQpofeG3LIoG6NsGK/ReyQSosclxob3OEdWUyLi32VGTIDq+5EjImn/Gf9feEd8qlsjXIFBQMFtEzZrndV42zFrDGAEfB8ZAwgpbA0QIg5GA6Wyf03f/Q2ExKQCMFCNQxHCWkmAYPFgLlvUq0RAYNg67ywiBpZYGMBnfSIyQjEjZBVpSipRiTt1MWo2411wF3WDBtJl+rPTh7WWCGFsTByzFORICZAnDbn5KUdGRPI9Ol/a7+O++xPI+LeabGd1ECIQlQ4d5oNNgpCki1Kb/dO/df74HPPNspUWXFoqCL7UptayE8WZMC2k1xg6LS4FlkoTV4YljXa2bxwpISJiFwTjAgydklNSYcJw2LjnFC08JNgIgRGrR+HTDzvNV1maxrHhmyxjNycK+lNuUQt9FcZtSmhzqZTHuXvwNtOx1ft2gjzrLO/B0kBguOaPMOWunQrPdX1fplKSFM4NwPggeABQPDuYiZV4/NxtNJxkgUsLxQHx/XA/ciUEhoj7h4SWwcgSkjUDy1bSZ0cpW0k778kJC/IP43zyuYCITG1yCNPebXFjY1ycSFMSh5ioBKGJu3BFFhQXD0c+3Ny1kGKegNCvQF6ooiZn36Ma2Y2/mz5YXVYtIjQuU1esOksIxzAzOVetlnTPYeQRQRPuswRcWmurMFDG/eaE65IsARBQsm/3K8I5vDCb/s0hDVCHPT/1aFkGpYNhxAMIFkpSBBkTPlOciP+ubXY7sopQwUtOSvi5ODhpLB2LmzYSQ2F8RjMlnSuTsH1m3ZrsnEB9JYSzPLnz2xTHMbnwSQVMICWUi8G6o9Y//woRyfV722fh/nIrlO3jYFXn/c6fy3rwdu5wP8OSJyTk85suZLX7c/KRISLpxD8upoBrS3NuX/zcm0KwOxALf0ow/38nbJcw0NJphUqrnQMkeoCvr8ChRKaytFftmUBE3LWkFH7BqmsXkLhJZYRwDu5aAQBSW8FAC1MhJ+MQXGtII2qv33OmHp+lClZYojSiuYw7TRXOU4E0hzQAN+0D7/LILETQ4Z5zLiXRNVU4jzbGBviaKlGxFqlAUImbpW58UVyMNk5LaYkJxRLxmh2zpfaElNeKSF1kcrUa2ioItDEYlNrV/tDe3YgIChDGzUxfQUmJmb7y5GSmp9y1bIYsbvFKMY5MS+EILyNxnkB6d8bFjzrLYDqPjrdwa+/aRfsW7Dyltml60/5O0+jm3Key7aapThtAxc+raV4B8q5ZREQocxt9pjbWIdTUqsZDjKv+TYSHKzy8UO0E4hIGirT/mlySjI+7Ej7hhT2viK5BL7mqj4Mw4S9Zuf0ywoVfZg0xJsSF2G08o1acYpuy25HrFllDKEEKkTj6nq+/5OZaLTAUYa2TQkCNBKYKE80lhSTCFyz3lD5Z0NgSjny4eZKISUpKAJesA/WkhI6znafbEcEUCTH0pI/6NSEi9pmEtTUQlvaXXAzgFusNec0OzVjShIQLR174H6NhSwkJHV83WHjALb8OP19bvPxRu7bed2Pj5EfsDCBUqy9hfF/Qi1y6xTAlXFFazKymfv2adSuxB8likxf6gjDux5BxpIIJ6Lqm7cRBfKaehJSQzxoFg1uSQseGE6bWEApsTQUnIAg4/ljWLs/X/Npk21In8OaeU6zdre7HM4zVke6UlNOzLx15CdfKEFVpq8Sn0CYIGJyMkHsVCR5BALAWO6oQTxpPHjQMxCSKuwNyokTzRy5ZRtiuUWpZFa6Z60aqwa7DOOtqas3a0IvsJHjZhVdXtjUlapgP6twdoxgjRgh4XRwe00b7pvAxTlJEBJzQFBOUxoME0s/f4eb750QEgHdDom25LG7xuY0Tson4GF9I0RgA0gS3Lm2COxdsLQohaBzbc1Jr4xiI+Nr8kTbdnZ+yTD0RyWnq6RggHyfCyUhw04KfP/w1K31m5x9SSoXsW859eBTmEiUEtCLXUONmWwFISkluM4AaQwTQuAxm7dy7GjEJAYlur0pGUquIRrCC5KwifLi3dZ/u0KEOS5qQ8GBSAJEKJhUa6vzx6Tz8O8AmfD2elGzKsN0m4AuE6argmbUE6TDxpstQNQuRgUw16sb4hYUW2ly/VzTxGasIX2y0ybufDV2Vc+kXCC5MOnLBA/WBCjHxAjz1AxM+tTbQMm1HaB+30KSEhBOTNONODlob9Nx1SXCSRmBYakghXHIA6qeq1o27l6TvSoUAEVHTQXAOVhfuBkGSQ7CMBDcuJszEeyfnI+EikA5Kzzs7CkUHB8kDVjK4V0wpa7WYUjKymkT7O61lT0lMFQbD0tYemekrDEYag0JhMCorcwy5cFkiEsdI2H8S2jiCS/UrUs09u18+llNBxbYvkDwAlcQQixm1SRtqxnS9i1fsskV/yW0OCClyeTrqqSK8VyGxhozeLwC5+nyVNLFA3vqd4dK2fQkxqbtO7p75vEh1bYalrpCRVPPPC9JSF1Ox0pzl0M5DZBWhOcA0VCnPEDFORPxxIvqeg2HzNcWE0BxuDFwAe+iHNA4krfPDSUjFMuLX+Vhpld5DLkaP5mTbd2EM2XFm9+sraa0jmtziguVJCQGjHdkTwY3LKyEokYhIrEvrgoyrHCciVIW9QggTUgLEBGSpiUQbpTDi0pmeNxqWNCGxvqyBNADw7ltNfvlAcIuhz5Pg/kRKTnz4Tv7zf/zg1z57C8C13kEA3RAa2jaB47kFhpORuvTFcZrnIDimlhI6d05+8lYS/5vwFhIufEvv2x63IyUjNui/SkRqJzjmwhPIInytDhvE3a4f666TbqtzoyPQpUJRyXbXjoTyxCqiSTBxf0lIyQWdlzLMB5KK24207WNtIs24bafVJithC0MqyYmFJRdKjncVrdTYMMHSOOm8o904rFpFgCN333aic20M5KzVub/2M7lhtdP+0rFFpW/COxWsJYjeK+suxS0lVaUVR0iby7YlyRji979+vsm1NTc2uEWEkxH/DnhiEv7S/u4E/hqkdAA0tHYkhGL1ICGl/QtJMQ1Un0j415YC40XDe0zTQZwtyk1PpkpSuOsWvU9ETux9EUnRERGpxoLE27ibVrCo1BMRvraTKytZaqUw0E5ppI1mSigiKNqNBekULzRHWKVQCZcqF5SaOEzX1hKUuGJNijbWEhaszsmIdtc3BlmLiP8+hoi0XFY6dKhgSRMSwE3uSRVdXmitsi/yFpHcvpRfPrWSAFVh/P4AJYSPGbBmdfuZ9w3vYx4wSoI0X6RLY10gyhLQieCR+vtyn2BubucLb7rIZO+h5nkSsq5DLQTH1JWJFx+zFgsrFKeCAicFqRaz6mMepwGtA2nZlLKJAEiQ7yn47zZfQJ5Yp9YRIhBSWCuLdWcKiKw+TCOZa5c9L5Gr2L1PiiDecD9wrwl2QalDbTA30t5dhf4OXIpeyuCUkoFCCpR9Bcq8NKUket6yE4o3Wk0nXOwSMGVccLrUGBbaKzzWDEofR5IKzlxADgQJgDQY+vioap/H/VUVrrn1imcrWwrgFZyJ1NFnvp2j1KRcqp6Pu99SvI63jihZsYzUxRJR/AZPE02Cs8hYQ/mwIueVdMqh4U9aZ7sPF3Sr7wePBeGEnb9fJJRT7FRp7LugTdVtkVv5c7VQyFpUKIFhaRwp065fdFSE0f6136l/OGoMWxXQbt7lS8T3CsQWkpGGD0wnCygnJrZ/YkJi+ypeIzxRqbGaUz9xNK0PaapvssIWjsxOldZqMlUqbzGZLpRPqWzn9xD8TvGJCnxudXOTK8RImbwAp2By5xE8O1ZTZqxM0Dr1ny96qI0bs3kikvDbDh0WFEuakHAtVtviVLQ/P56jiWBw4SltQ9PxmwppkSK4NAGAVMHqkIuDsJ/Dgp72ldbGP6p4sY7N7mEBMpEgn1tUovOP6fZxmsoowNprFcecM2O9IHJMAq4nKMZkV/Km4NfJg/HsNSTIZQu+noviXJ71Ve4266waqauE/VwV0toILJpUhODCRkw6htq6UJHwZf/CW0ZKbTwR4allAaDkjZgufBspPqfHiFdPShcvwIRoITCkxA3ajg8iJHMswB2IheX4/mynk+tWrkBfquwI81xMVu0+cd2MxYx+EVKo5WL5gGDhGCV9mSoZ+LEFc8/qO6Lhn6EApgvlBXAinfQ3ZEuKLSfOiOhdknJdbJsUajMAfvhGglshSaizJzGATdiQWD0IFUGZzYcUB0HvgTbA7KhkhCS4dfEU1bmYGkpN3dPCF4idVtLHRQyF8Z95u6KYHElrIip1K3IglQMf8jmyZxVe+QxZ3B2NK6homz/eBIVWuq3OWs7vs658QDpuyR1UCu3JL1mi6f3saXK5FQCkk0fsHCCETTLgkiX7bHBkhaIgeCIlwiluoiHJiUhqJUmyaHmSQX8dGeGWEU5GqD+stcRdYonLNGridXRhrtmhGUuakADcPYbFeTQoCRYi00Gddp23hz4vpUD2ceDuW4T/+MGvY98FVAlf2uc+HkQC5Sg+li/CuUwofILk+/tzc4uD4K5CfAVs0HyRwCdF9bgMSEtFC1Ba+T2HSmrgzO+EhZjEyE3Ku4QIaz0pDRAHvefB7wmoEuyUiAB5MhisZPWLgfaCBY8TIQsJkZGYpJTGMEtFfcFJLpwNRto9a3uNnrsPTx6lgE9xA9jK6IWEdNnRiCT3lcSgDFYTLkin7kN+XkBwA1TsnnPxJN4lpIHYL3acefHPUWrjSUPqXpV12aoRBjliFzomaCdplgvFrSTknhWsIoV0LnlCRNmQLCmxbfKuRgxBOHPvFXM54oKbhoiyQRGZpXpK/j5N7G7E4yhyhCSyIOpgIal7B6h/g6VKQZN7FjR6RmIWGkpbN8Wet7Da99andBdwbl3OjQs1rm01a2SsvIpru1B7eVrv1FJaR0SCxj+2lucsRpwMptfm39uQEnIHJYuSdoTAz7eSksEwqwqkT2cvbKox/25rcpEr7f5ChLTBRgjAnQuwhNla8gBZU92eg4gIvx9ez4WsIfwzUE9GMsbeJU9WOmw8LHlCAky2QLfdd1yK3jof30nbs9QxXUgvFBK4Rpf3EwlhnmiwqvfcxA7AL7h2P5OcI74OIafF5NuChav+uFzwIhCThLQNQavn/MZDNTK3D6K/AFiBwWqbc25ZnOBMEqsTzhFISM8tH9IECxW33tA1osU6m+2qzfXt3zR4mNqWWgNIU0wuKUOtMetcUtYMS+QC2MlNa+QECLKMDEZhgIUsWDZNLCcL1sJgtZrSkAXCPqMpQW4r0mmkpW/PnCuWOCxtm4BYKOTw1zcG0llJAPhq35INDj7mUosId9tbKi5by/pxSflclkNCzgqdEuBcUhJuSeRuWtZCIv24sxaAYDWxsSVWoCqU8HGJwp2f1+qoU2aFFKhhLvAxEBBWAPZZooBS2PlNSIEBcz8iJQwpYMgtccRqZ9A4o99ovBMZTglJ2u9KCsy5c5R949zbDHpaQAod+koKyNL1qWxjsa1eb1LPgJy1tY6E1FnIiYDQ9VNLSGr9yBE3+jxiv+V+T5MoTBXSWuikwFDbfhxq6V256H0daoWe1JEbVyHJ/SpkBwxWOnIfZG6vflwaT1oAVILfycJXZ8Wry6CV1nqhc6QWQN8nCFavpUBHuqD2xYlNgpC0wbqQBIo7aEJuIb0/4K/33zH6/h8/+HXj/j5APPoeWz9CoHKs2WpC7vnW1TOo83BJyUgbpG419jLEUOILLsSERFrKSRMIhJTNtj3cQtOmf5vIR53AWPfdbmPn1rAuC0n2opK0xZVxEQewp8IYAB/TkQ1w15a8KCbkaPpLJq9MeylrGglgUlDqVI3SCHcO4aWCOlJiExmE/a0lLx5HKbgrYPQsFynOvvzG8G5wy0fG+scFf/+bmyNSd1B7Dvjj6DxRrJUIGaSCNUR4X34pmHuWr8cR/yUhMMRTheunQh7FVBkTXJJswgokqXRdNW83diqVwZklmBQCRMq5RZATce4umBv/oc9kNB65G1wpA0kelga8bhQVsy1L4zqepH0ak6EYborUDXcc0sB0Oi6XOSwlI7nU7mmWulzCizoLSI6M8L5N+5N+H4w0+oX08WKltv0ptYuFFAI9aTCEcwF1mQ+tFQQQxj4HY5BNuexjlhLrCYxdVUvE70uadIC6hIgH9VlKTgBu8XPXTM7B+1sKREUdO3SYD5Y0IUnlhjYCWs5fNb9fEsTegpTQcfd3BAFVMP9dm8mFkCMddf7PHN7twC2OsTATu1tFaVJrKnT782aeWzRWNKtFovlCZ/+OfM0KdjwJkVr42I1WJMKfhC322nkQyaCNnE+mJt42nqoXqBbA0sl3IL53Ok/TbeTiYXI1TdKsOXOj0v3VFTctHjMSrCEhuDznvgUEIYxctmjbsLC+WXasuqxbLsjXWytEHOhO7S+NQcGSaQxlLDhyjWxU36akfteguCzK4ENCNQ8+pvSs6bNbrKDkAADQi9Kxh9+B2C0tjtmqH1eRBVYGLT7AUvyyPguF6+w/KeD/KiGcdtr+te0V3hXGu8bkis5RE6NgYRH559N4NbAW4dLYQG13MkgtPPGmAHVrCbSxIZTAgQjImkHpxzVZAGksN82XlBGu1DaWh/YlSyG5HZWuuN9QGx+sPefeW17bJ2exa/IaaKrVknP1bIoJyWXIygWop25ZTe5s3NUyR1o4wbPfSzZeAzmppAIvgJ7rS9J1kDJpTsVkmY9VRa5ciBMs0CXJemI/p9uF/wzUK+B4n5u0300cW+K31SVxoLYYUbGeLEZ0MSSLE0uakLTFuBeyLZTXfC7M+TZ1RNYQbXzK4FSLRRmSgLj6dS5LEpnHB9Cgmh2lBmTsFeLR5NZVXUCr+0S/J+3hC2G4Z3Zdf0IgrbDRZnIiE39kwWB5Iiu/oR0hppgSipNocq1IBYMUuSxhCE2Mir8ROcm1s+o/H4qckfse7UNktupaERORuSSofaSNrznC97Na+KRYWmY8EXGDkjYxgfOxB7TzExcodQlAWN98CoA3cQpy33fG7eNjmkTlskTgOLF7zIO2zj2KRYFzf3gzhlpXLB7VlMqxOx9tz717ueKP6bEk0OWEu0KG2BByz7J/qbq262Mp3G/2uwAAPQIACPcXWueDhqWEERJCSCgqNCcsgZVwmn6X+rt0wqRgYeA6GuPWVWmu1H5uXDsoMdIGawdlRLqBvLa/rp4LJ+V+f6fVdzforXawzfVEGS7uISoSaIy3HKa1VPjckEt0kZtbcr/b7VXCkLOGpIHqqVUk7asmIpJ+rv+NCGHo88FIO9etEIimtFU+ULpxf6wGtCMmQ83nShrrRCoZB2Zkw9MiT0qMT8jAiQoQyAqBxz3Zfqu6ZhERodvNLRe0/2K22nZY/NjkCcm6kJG64PVc0DwnKU1B9Zs6KgtDJoCcfuP+9jwGoC6jDv9MQayp/zgQu1xN8vzb7FvR4JnqIgbALeCuLcZaUEijC6CSCI5fuyyZOxazloSTVw/y1cbHqKdSTSUnAnRf0UKfLP4pUuLB0US6UncKahuleOa1RqrXjDNYFVJgwO8xERp4EHWa3Sly6zMGhQs4H2rtyIbMLrJSEhlxBMWRmWkoe6yQGDqSI7WoWEuInCsnhPSkhJYGpRFA4SwM0qZnttYUg7IMwt9ixrjU2dwiwmO1eErjVAtvzxGfP9IoOwGOp+61AlwgIcITktgqQkSErCbCaKAc+b8AILSLRzI1hAQ2/aohzYgqAFmgkAWEFBDGWTRdLIGRVhhUjsiSciBYVUxERmyK6dIRkthNMQfKPkaf09/4PpVaOm79GpZ2/iYLnrcKlPAB3NH7X6IW6TySzp/R9VE/33DkrCLR93mq6nl/BALHCERl/+YF3yrdACUMAAMUEtpoDAXQ09YaShnN8kUzq/M87WO3xYSQng1ljov2YcUtUxD5APIExKDBCkf7CNPq2XXokMOSJiTjBv5CWEaaMmpx3J9JCAc3lQPAMMmixQkHkZDBSHv3A/57yAqjvPBIi0Sa2pNnOeGCPyENnubICb11Y2tcEKW/b02kwvj2ETkB4IurAYgyWFFbKUDek5MGYpIe34RUaxmyWgWBoS5zDW9fDnUkMBXoU8tPXVpTHt/Bz18iFHeMxoSUzkJRT0p8m5jAlvaPrc8CAI4YIXZN8ffjLCXQNt0ncW8iI6RtBpWnZ/1Y0W7LMK7pXgMpDX2w2GPUbJeKzDaLXNII7naVuvelsTP2c9Ae95SMyIcUwZ2FB83TK0Sfe9xqItwz1SNAa4hyBOgRhDF2myMhgshIWqyRJEEhAemWVN0HxAAo+lCqDyUljLGxJj3phD9hBcSUwJGL4tqhrXNzz+wIawcjDEY6ctnKuSTSX+t+JbxFsI6UEFKrSenI0lAHLb2S8K6GQyoOKfLnS9HGqtGEpvW1jog0WTry14iJWe6eVI0pPpcpzj8f1/bZkbMcOmNbTwkfF1OXQKWxvQnJp+88niquYxXeGSBffJF6KTyvKrnTft1x301wM1sqZIS7lG7Ia3ZoxpImJOPAXw5ugs5hvtqUOrz0oF0W9HxLBX/7yF3853f/z6/855zZfJyGj/7mspgEX/GQVYcXFmyLnBtWDjn3gWjBW4fx42M5aEF0PsbWXcKdWzPrShI7krUi0L6Z36JgUFPdTsflyAhvnz2HbaN2C2+u78vkPmwDQxwEb0+dRYSTEu4OZQNIM/eoTVSHhMbTTF9BSTuO6B+vzM3vH1JA0rOgG0WipXSWkhBfop0Vw7pq2D4WgDTQZb0fO43xfiH9vXpLC+BjWpYC6pqZCnhcexs+x2SkjoiQACRFLIwpwVKhCmsNSckIJy10fEo6hDHBGmJ0s8tWqZ2VxJatM0JaQiMLmHLkiYqStiCJoOtTu5L7zCGdP7nL1rh+5tuVDAUkuXVkHKHw7oWAHct+DnLzQGlqn3u9VRnx9xqlB7nlhvtAdHzlPoXwbpJ1sR2Euro3kyC11o47D82fnKzY7e64zGMdt75EbozM6tJz8W2lApRzB7NFMJvXyTTrYfb62r6PpZl/LGOHDhybNCHhqBOu8vuuGznpXs6AnPA1xwKR+XaAE5CwcC7rK/QLielCYdoJkFOugBcV8koXdB6PAIRaJv73BveAdIw01TqZRCMUnyejnWeLugYJoEHQIqE4FzuSIp+mlxMot41ZItJ0mqk7RVZjyIiTPa9h9xYIyJDaQJYhXW2DbWPVt5wWzqnCEpOeUt4yNeUE+BDwXk2Dyl22ZlyldtIeUnpYJYSvsuyfgbbZcLQIiQUA+GfgLSaF9PcylBTcLnzdFNtWgWEZrDdzSQyAj40a6aiaPP2WktbFil4LU3HqisItI2mQb0pABFAlGKhaQ+xn+1eKGgsJ7eOsIGI0cOSjdJaQkSUiWlddtqLgdmYh0aVtoCrsd9W3+8oCRX8ZhLQFOOnVspY+g56UKBVcOljr0hM09qH4Jo1tnriBEOJA4s9ATEa41aSJkHBrCT/PUMfPkfq4DmkK56bYF36d9K9XgpT566WKCx67lc5fbawhufa0RV2/UvYtaqfMFHEmpAH6ub7LXS8UahQ+McGUXyerlkggfpZtyA8AV0zWnlMLO9eLRT4/EYQSG7ytouEd6WCxSRGS+WqpF8oN4v5qFanDKw55EADg7d/6Ze0i1KTl41aRvnICpMtMMuUqMtNfLqj5lLZae028FIE8LBQZWR/gwn10XWaVyAa7T4A2ZMTvW+f2wDMnOVICTQUUqymJ0+KKtI3uraJFTRdcIWJNnAQUAKnJZSAkSODt5cI+YOtPKBncGWS0QMeLMu/vXDKCKIsbPQ9y44INyJd0bR1rEXMWw+pv8K4xQ0Fjev2Ov3VF+tzTtNgEbh0J24KgxF3XUjJCpFGw/VJrCD+nt4ogWEZSiNTqQd+5e1ZCRugYw3+XhStg5wJXnLXFuOOkkJGFRAibmUiSO5RrfJOsFLlsZd5VPu5T5CzP48DPFc0HiDPH1Wncm0jIRPOpe3DBpXUMiRCsSLEU2ftIXazSbf5c62A1CdZkE1l7lMzP4WndFN5XpMRLE74AwTpDXgR9JaGVzW5J926Jr/CZH2ntHE5QPYQraIhP9bIOYB06TIYlTUgoV3sbNAlvXUD6+kXTZB4WiHiBJKtIT0os69kJdkW/wFRh03ku66koqJWDLDCzpXC534FyVM3930RG8gtFdRvdWpRFyAsVVe1T2hW59dhvi0z6AtziMC5AKhe34c+fEIGcZaQuNiZtoxQup750z8GRwTQbDwCbkYedKhc0n14rSvEqxrvl8XPSef0zksECkpLYtG8q36Xrf1jBl9zQSItvIWz6YqrrAOtqNSotQQHyPualy8RDQgN3pdFGYigEeq5fe+mDXWSYSjTzdfMzfw48Va/yz7lKRkLKYPc3OaeGLWzJM/5oYyAhfHI68jxyRbJt7QchYWThsmppV1CBpN8CMBrGaIjkipU7E9KSEvqr4RjzyL69zo1LCQklDAyRYRhMFwra1aqgPuwrCfSBNQOFfqEr5NrfN5H9GhLi+3wCoTpVRIwjDk3nTt10c+dPz8WtmpVzy3pSkvvurSYqJijptXNWjSqRzswZDbJFsxte9RycgBD54HVmcrGWufaTWyqNJW4Z5tYT2j+3VoV7cH/Z/NlTlHFNWTJaGvSULQRqJiA3HTpwLGlCAkyuJR4XQJf7vQ1J6dy06pELoEwDikkQ49odIh8rpgr0pMBmREiUwEyhIm0pYIVPAwMlFdMe2voSXFuWoskq0uROkKKS9YRN5DmklpvafbxwClSKLubaIeuD3OvIiC86xqwMTYGnPHsar6MBAFDWhUmZ4KJFbWqqK1C5j6QfqfI2F8x9IoOWBC1XK8G7WxF5NbEbmd0RnpTQM8ml5tTCWG3hSEO7oolEpmx9B7j+DGSkdBYVKrY2N9JVodNId43FrTGZKmRE6niSPU4OOSHk9Wl8hqCMZQSgVLnVAmzG2G1ESoQIhIGTEgH72cC4iA9AuXS9xmjHJQSELAA9sql8AVdjpEpKKvEkDsK4K7hzwNhgeQNAyT6UtG2g5AxSGGcBtnOelnaMl4bNiYmr1YC9lBTDBZ23PuTcleqQO55/zmnnAVTm9Elqe3CQa5k7afJbvu2NazTTilTqizUQfJ5ul3+vol3im5xskcbPcFc87to8GJWRhWSuxt0ZiLMJ0thZMwiJYabS8eTftcy9s/WVu7ZSrJwUbm11GQGlNo3r2WKBVHmF1Hq9ZicjjsWSJiR6AibeNBlwTDJoaN+TH7Fz62Puj1BCZLVT0T6JKwH37/d/mcsWT+kJWIEktYDUtQUyJgOpq44XLk2zxrGptkKcqrF6DnLnaSIlJMB5YQNAWs8kd3+cxEwCXvE4JSPpu0Iu9VIIlDA+PagS1ioV+o2xRcQfc5nKCFI433VlKzbw25EypHpN076G51AlvBz2mnERQx/nooMrm+8LAV9zgVLwKgEbyM4S78QJA0x2PJIwYAWPqqBIAkPaZp4wYmPiKz+7tZbg9Sz7Cj7yrpCkdaOM34tQzFBG52rKomq1ryJkqaL+hmUgNG4EbHVrAL6atS1ALsLzFM6K4oSuQvWta5WQQDkCpIIopX3T5MjHlXg3Lgp+d5+9VQUI6X9dTRKqVcL34fCk1RNuax0uZVXApBoXfSWjwGgl6t2wuBvXOGtHjpDkLBt1x407HxGROmUPWQzTtqZuV1yIbly3GSsudV4p4XeNLN3Ib88ot/iakUOd/JEr2shjzMgqkouNqyOeJVtL/VgpdCCyhR2b/UJmU6HTX05U7P1pDAFMQ7k6TLaIrBaGxcLoJUFIOixOLGlCMl80aTP8QlozweWE0A7N8PVBtIAS+UmU/k0XVhM8XUjM9JS3jPSUddPqK0tGCil8bQHAChyliYuNNYGTgRjV55uzrNQRkDhjUJ6IRNeRsZtQ9VqxQE/B7qmgG64VW1BSYjKuynfstjU+mBJgz0+EzGdkMdGOPMSucvZLkyWGL4JESsK9hurlNsmBtPUkhKjEGKQwxt7jyJEgK2iV0Fq4uiGubTq4sQGppSf4yyshMNTB/YHuj0gOPwelqeazbpPLSCRELlCc20KAWzJ47R8OntIZiMd4rq5ICJSNLU4EY7hR0J5LGOFLX0iWRlv4V8vtx8aD1LzytSPR0jiXMOFqlPQhVd/OJKWtcCPKUXDdoqB3+p6t4m4LJRIZMULY7wmEcLEtrjZKlKrVWUhm+gpzowJK2pokXFDnWvOUjFRJuEapRdZykaLJtSrexjPZJe56SXXzuvPx9gbCYUkJIZeiG6h3i+Wn5vM4z9rrXbky8Q9N67xtStK3TG6oxh3G507ds1LLB5EQnsBglOyXSwiTIlXy8c9kKQFCzIn9HMgKgCizZV9Jbw2nlMU+Y5ixCoaelhjMNRSkWSxQEmJM/ZgFh1g8c/hixf2SkMwXTRrxDvXgwf7v/95N/nOTtq6NpcomLDH+szaOmOhqfY3sNaSo7JM+z9SCErcxnCcdDwtFVisEhhrF2xEJrIGYzMdawq1MuYD+OiECsO4P2lkW+P03BaBy8pPCnqvZHYLOK5BW444DnL21A1ZTT8QkbVZJhIxZiuCuQCeyaX1dzRRh0IP01hV+X3SuFEoEd4nWWX42svLjy9f9IcrMw92tgGr7ghbZszH2G7LHtpUPAsHjRN7+FZGlxJEP9yyEEDAiWE/IcgLtMnNpG9cBbWwRRQEolyXLj4KSuWA5M6GhlMC5zFuyCBYSt71JLJGeiAeLIxci+4X0ldVp7OQzatW8b4zkjnTVdXY+RKRuW12yhtxvtG2SOJfUTSuyoibvPVBVMNVZNcaTkfaoixOxv8WWjkA64loz3CIy7rnE1zYJyYsJSfp7ui9HdLwGAImeCtZ7ykgI6EpGyw4d2uJ+SUjaFjsEYv/RitDZsZGJQf2Zmyg5SLs8BDBbalvLAfA51HuJ9MJTv4bietpv8wJp5rmPS1VaiTHRQVvOMY6M1KW2TduWi2FJBV6CcpIXjU/S5NvUoc4ClIuLEmRJEoBEyH0vucDX5IIQFkN6dgNYtwCphRMWBfJlxPKLKHdhoGJbpCmVwqbSlVLYAEpp3W60MV7DSXUleMC0cOQjZCUTLuWqvY4x1gNnqLUbL3ZBpUV16CQK3v/KbaMxmKYLtu2nqvP23Hb/eD8eTJr2A2ALqQ3H+SBuIJQuDoPHe+Rc5TikAbRz5ejx7czFKL0G/wsAmt3/2KxKksa0Bd/bHhosomkRRariTu3y9yfhrCf2DlTR94RK0YulXVX3tEaJc9MiS4mRhRsPJGTaezWGXNF4HwWXxOlCYrPpgmnMy8Ttr6ok4DF5Kca5b9W5WOV+S7fT9et+S7fXWXTI2sIFaP89saCl7x6/r3EKpqYaGrkxnd5KmgiliYDQPcdxNMYXB07JCC8cTPvzvymp4Nfgfyv3lSEnEeEFojgTayGRmOkrFFJgQPWbpMSssjEkc5TtUgDD2brVavFASAGxgROEiIwlrkOM+yUhASYjJWl6T+7DCgD/efUtAIDn7LfDArdy0wMFqrbxY9YucyYRE1UalEZDsXSohJSQAEEgBPJEBKhaunJCEpmpw020Pz5F3W3Hbkumdv+0eBjVLrFxHEFgjDX79Sk5/S05N4RcAcJKYUSm0Su4hWQdCHrFGkPfHbnxz0AbQLnn4khqqU3kMmXdcYJrDoSLx0F8L8aJgUEQtvtpHWcaqyY9sH+HZWnnA83iWJhmtsw8TymCuxkQAvX5eclCQ5a+jZlO/AvX/j67PSUjtWRBxn1XlzZZe9ePqhBHGOe2RlXFc23115dkUbPnCu5bwZ1L0TZhQnpU7+plmGugS7JAtUYUQlpgXpvEWUVofJF1zltx3W3R3fksdWSNErHAyAVKLtQS2hY8TAlBXqjVY4XdOnIy7tq5NuTci9LfQrILUXneuftO3z8g9Pl8LY/jMjLWVY1PCQkPTqe+TmNF+HFNJCN9JnUp1IcU/+c6w7pBx+tySnSUrD7TUhqfbKM0xtc8GY4yGrAOHVrgfkNIctVd60hJffxIvE/ntrXuSCe+YUlB3lYYlCNgzmX3uFfE7lFAvaWhzgwfpZGV+ZoZHJXYDADZooYkPzcITePaSn0QWVCS2IrcfdHY7hnp4zZKVrCK0u/WjVMlhI1NcQJQT9m2SCGcz7VNU1rKeFFUieBB2kvlhdTQRroOT2ygy9D39prxfdn9XPt8n1jfZYw0NC2CTmAzCBOaENbqwwUQ47TTxhgMtMFIA7Oj0vptG1fE0I07vzCz50DtS3P2p1ZUKfLEk2rmFCoU9KT28+sMS+MW+/zz2pDQJi5cGLYHwpiSjjDOMyckwSYz91oiFp9jUkxST4HawS02aZphsqRQBXg+tgMxofgTQIjCGh35uPNjiayAJDDaMW/Sd1oKSGN9/KUO/TUsgqvWYFREgc9Angg0ud9wNFlM+D5V68n8nlOujbngaiCOHym1gfIKiuqcnKsYv6Heo7rK83Ukri5RgO+HkUYh28X88GfDM3WVToFDGdlyLsz8nBQEz5VOtI3aRtsLKVAWdtwMdbDozQ1GY/uqQ4ccNilC0pQ9g6BkXvDJ+bmrjCacNDS0KI2ri9ChCsp0Vac1CpOfdv1aVhag1A+3cg0yQ4sQaM2zMpHaPM2GVRdvIcGDwavEgZCb8PmmKtHIkxAeUJ7ro8p9ShuU3XOFsGh89pT06XebxioFu3vC5lI78nvwSQmchSLVbvIc92RJTIkgvzYRHxImpKoKD6kWHSA3KumsV6U/51RhsxKVkly3TEh6YAw0QjD73EhblyhtMFtqn/p4VBrvYsW12GmAf92z6DvSwQsC8kxgU642gK2jE0gKCeOlsVpTasPGVnaUxmCZkpW5EYhJCUVZ1KVy9sdrSwC4IoATmDhLmTtkHhJlToM9PgOf/ctTSQdLFs0fLPW0DCSFYpZ49WshQkX2tB3GkLuW8fFvAnReASWsy2XPhG3aKCzr6chyx4lybn4Eci5YCRF0wmWd4JvT0udS/6bzQRqjkhKhVMi185jM7svPz62ntjhjuPc0gUZbtF2/2xLlcZaRaqasap0icp/qUxsz/Zc7/xwLhidy4tvCkqiEGkuwWQO1iGKTmkjhYBRqnQCh/pCSYkm4bEkl3Bq3Aa/ZuWyNxSZBSBYiv/NCnIO7Nzxtr+3W+XybIihF8rv/51fRdq6ha9LW5Sb19Bw0kfIMIYB0qWmld7lal2eeNq1t9fdxblmpHzLXeqXHVSAtabDpcoE4Pz5T248JHiYB2fsoe5LCzP0iZEyjfgy1QZimc0wfK29OyO+Xs0Jya0nIiqUhXbClED4Pk6044W67NNYqUmprhRhqaxXxaX41E2oQPreJo+HbeAXr0FdOgeGEVm/Z4e5OzkVOSoEjH7JtY79tKBy7z/YAgIt/cVvcD76quKkI/DkCQVYILYKrnU4eeWQtNMgkFqhHtpApaw+P36oDPbIhy6jmCYm2RRt7pbG1VqSPwIIywc3LCNsn3l0waqP9awwqKeulsPS/p6QbiwJTBXy65KETxAvXaU2xZ5MoOegvzZ1zLGMXF0ybNPN13ylwHoB37Wzr1plzJ4vawUitLsMcVIJfv/kafP4fZyn358xY56P3F4gHdk0b6F5oDaPsVk3ze5MbniUJdnLuM0JC5GTtIA6Ur1Pi0d/UXW6kDVRlXbZZ2wD42kkjbTAazYMRdqjg29/+Nt7+9rfjRz/6EX7/+9/jc5/7HJ7+9KfX7v/Nb34Tj3vc4yrbf//732P16tXrsaULhyVPSJqEyjphIrWS5FA3mZXabHBmvSkimth18KWl7/wvTbS5olF8MQXsZEoBeTN9hZl+gX4hscxVqkXfujb1lA2chjYuOxRAldDHZaXigkBTob860kHHht+CFh4IQdD8flONGt0r/SW/cTKhU1D4UGdStOp6d0PiByQs92R8X9NFVegIxwYSUteFdO7omiq8p70GLRJ/10elzXC1xrfZYHakfXwGj2/gGvih1igNMDcqvdBL6Ss5lHQB+SW1V3tBY8DqKKRuFzQWiQjzeik9JTFdSEwVyqe3LiQF3luXszLTlsUAShTRJLhR7FalL9l4oGrsQNWNcpxVI+tuklooM+8mL/rJrQtNCK6ERLKtZWTKpZm21i3laiRRbaRAOtOgeYJgfUeuWhJWYDfGtr/oK2gDLDfKi9clu4c6cCsbWZhsHF0g4KWxVkByUaS+oDTVVBG+cOSk1AJrB6NKvEpOM1/pwxoBehJSQn+VFBgN7DpQ1Jx33LW4koSUDrk6Jrnm5a7F5ypu6dXkHl5ZDxB/z1ixcu0kq3OaECM9dqiNf9aD0lpH1jgisnYw8haTudSlC1UCmCMlOfKZs6aM5ha/y5aQGz7tr5hwXr/vvvvwl3/5lzjppJPwjGc8o/VxP//5z7Fy5Ur/fdWqVRNdd2NiyROS+aINKRkHbeqVzWQtaWsp+dw1v68skqSdzGGpB9K//FG7AgDe+o1fZIU6CvAjTQ+REL4t7BsTkrKv2IRqFzEyKZca6I2Zh5pS5eYEmaomMq+NbQqErCxWrC+aCmHRPY8yGkR7QitMSyGcBQHJoE3IAdPs0qlyBMJfu2YhT/svWB/yWupJrFVVdx/hs6/BZeUawqBnhI8pSAU0+hzOEUDnpixl2sRWk2wAaYP21/ap8H9DBidLRvbebvPW976xwItF+uxtqBKAnLBPcTPuCJc2GY4wNls36Np0jahNmXGUawcXvHPFPuuePxF6AOjJMBaGOiRRGApbe6EnjXfJA2yBcbKScHJC5rp0CpIAIODJqUrek0KOz9JDmbpKDU9uh1pDa5upkDLIzQnt3yEiJqE/pY8Vi+bUHBlMyEgqnKZWjSYikv7WZCUncOtL2gbe7sp1W1rIm9qbHsuVKsq1gxJxUD9bl1SbFTAt0EhrUi4ejbJAhliuoEzi7x8nmkMtMSziPuwXJiKXqVIvvfe6+68bC7Q/KRA7rBuOOuooHHXUURMft2rVKjzgAQ9Y+AZtAGzyhKTJ5WISUkKCj2QLqBIi8sOsczvpUA++eAF5awhpeQY1hITOk54rNVH7342BZMSkjbmeu33QMVHhPEYk+HxdFaKax2Iw/8eTPF+g2y4U5DpkrSXMz5rFmAwRx3rwquhSxMSiyBSNlKJKWGLiBX8/qYZ7IeKufBV7DVtJXdugdyWAuahNbP8EPr5FxeMDEN59Z1jqIFi4OUVJEY1DbhmheJqetPEiUtrCntMuhkTJpZUEkrTrOjO+iYTHloiwD9fw8ngaGl85a13eaoLqtoT859pD2v9BWW9hzYHPKWRh7avSx/1MFdJZSKQXFtNkBZSinFtPlBCOfHAriogyftn+sJ8Eex+F285fHTf8AYTYFG1ImWHdwwZlSDW8dlRCa2DNsMRQ21iquZElbbSNYF1x+BXg+y33l38eN1/RPnSM3Z9bytN9mzVJddYRHt9AMRKKKVVy1odcEhSOVOHSNAcCzXFQ/B3gCSR6Mh5L/t1hcwe3oKXkc26kMVtqjEoTPWty3wrrauzOReer69foPpPnPo5EdgDuvvvu6PvU1BSmpqYW7Pz77bcf5ubmsPfee+OMM87AIYccsmDnXt/YpAhJbhEDxpMSQuy7H37jAe/aIDGNOGEv2rbuaBPIuRhdO+aLnMaZ52Gn756UMJcMADarlI7TY4ZJtp510mMjwjkuRa7XtOqqD3cTGRlX3M8KuG5fp2DiCzr/PE7j2ERM7PVgU/rCLnq6NOixgiE+rWbkOgD/mX7jAbxxHxkWA9AuW1TuVpqqyltNPfy7RxXsh7DBmSl4kHl6PRISuU940OIb58evWJxK0CancU/ThYoEcApiD+5ackkmwCAN7JC5q3GrQ0nucIlgA8TuHzzxAcUa5ZKEZB4hc70LJCR1iwl1h4yv4bJ2WHoSkqv3QPfD/6btvpfVYqDYNHrWU4rcuGw9BingXfIsWQnWk0IKmwlYWE06ZeqyfUGukuH6Ai7Y3hVfFKOBlUL1qOICYjxzKWBUAfQKjOCycmnbHyMNTBVWQz9VSAy1xpqhtgKryzLXY+m0AVIOZZ5HjQCbA5+/yJrLj2sjyA5GeVegOtetlMBMERnJ7C9FOI9PaiBQSQSSI81xYpTGW0iuyYiICslXiMCSVSyXPIEuydegUisYYzDUwQ3PEhHjn+/aocbsVNhm3bnKqEJ803Nt87znk4RiQ2NjBrXvuOOO0fbTTz8dZ5xxxjqff7vttsO///u/4+EPfzjm5uZwzjnn4LDDDsP3v/997L///ut8/g2BJU9IOFloEtDrqrJyjAuC4wJLmZASANmUg//141tCGxo0DTnwYwlL4F2fCP/wuIf4z2/46v9l9+GuAyHtpY60XJyIzJA20wkO/UKxAk9u0Z/wWQD1LiJROxkp4QS5yT2A4jXoXD0Fn9Eq5x5UOZ71T6V9jMQQMbECWyg8yN25pIun4QQk+uwXbVHZBvDFyJ6kbEh5w0kCHVaniWyzyJGrXVuinhKD+DudQzgVtHFxBAaAdv0XZzkii4gU8AJq4Qrb9aT9R37hAsA+22/eqp2LDTkyQlaJHCHhpBoFvMUOsO9UIMP2/WkzT/LhwN0kORkhiwj5zVuFRmxtbXKH5BYSIiGDUYmZfgElBTabdu9TAfR0cIGi8SwFMG0MtJRW0DQCI6/ookx0dvUoJBxhEygEbC0TPYQoR5Z86NJWhy8H9reSERJX98QTkqIPIwtAFugVUyhUgaLoYyQtMRFCundERkHySlhL2NxIe018mFskgNL30Ti3nUl+a0LddQjp/JizkvD50RPjJDaIyDIQiEjOCpwjH6k1Jd2P75vuT+3oK+mytAVi2pNJWmnWDrqUjTsyMM4NTBv7jEst0JMhe5oSwLCUUKJEoQRGZSDySgooZy0h0HuS9ntTTEndtg4xfvOb30QxHgtlHdl9992x++67+++PetSjcMMNN+Df/u3f8PGPf3xBrrG+saQJSep3n1ZvjfbN7JNDHWHh1xi6xQdOyNPsJc2d3/ukJ4SlbuFt0grze9nUEFIO6tiHlWm2aDFMKxCTq4ySAsv6yhGTAjN9hWV9heX9wmmfglbWCw+JEFQa44Pbc/7rTcKxksL7EBPi1LWJlo19oSDJUhtAxRrgYREWkDbPnwtY5FZUaacU6CsJwABOcz/0pkHh+TaRDl6d2wfIMy1faezNUQA0NMULsBSuGa022HXS/vF9mFhLHPerkMScUiC19PD7qQNlghqW2ltMSmNQIGReSq/NBZmeFN4aYl22guvO0rOPWJDWFSBrSBiPbRMw8ArQVFQNCPVzelJ6s+U4S1LqnpUSkVxAb+r+GdVsSKyuQBg7M72g5CAryV1sniHCMtNTkEI4CwmwrKe8hcxrwE0wRxZuHE4XEgU0RDkHMVhrSchwFkKXEKNZoBwAoxHMYBbQJcxoCD0c2JOQSVUqCKUg+tOQ/WmIXh+6NwPIAnJqOXrFNKZ7U1g2PY2RNuhLjYE26DutvK3HAz8P9KSNJ2mKIaG/dZrzHAkpMttS99T0Grlz8238OikBoYQfM33lLXRTKrhW8veWzw8AIitwSko4ISH3PNov1FEK7ztZNkIKdPe7sAoeAaCvQrr6ngQEkVCtAa0tMWUwQtjim6qA6RWAkBhB2RTm2rrqlRpY3pcoNTA7stawuZHGTE9iqA3WDkvM9hSGpUG/GLl3J6y1aUzIOKvWUiAlQm28Su0rV66MCMn6xCMe8Qh897vf3SDXWggsaUKyPjCugnvqviVF/ILyY1PLTaoJnsS0uQTe8XXG6Ydbdv/aL10TERJu4gcQERPaToRkyltF7D8SgKiOQNY/uKLNqtfK16UPTS1wdZokb21oILEUIKmAEIjuoI21ntTBx7IkaUXrtIqljMnB2EBPUd/uJvBA4/T9oncq1S77QPiMANP0PuSsLSp55m3bzOFjSNyJU4MqF2hICKUsTKRx3neJWkVi98j483wEECLd4XwCqvGIyc+f/otdtsJfH9Q9qhaPo2e9Fu75u9/SGLZSKx9k7V1/lMBcaTO/UTY3JQRyy4t1y7KWD5QDK4iO5iD0CGI0Bz23FhgNodfeZwnJYBbQGkaXlpBIBSEVjJQQwwHEaAhR9CCXA0YqSCGh4dIUqz6kkhhom9ltJKwmfShJAG+OG1gfGHe9tmOMW3S4RYSEfCV5TE8IGs8pKqLixzUkxP7mri2qMR5EQnhiA5onOBHpKSIkNoObErBEVGuIcugsZiP7l0EICcjCjgGlASlRFNMQTGkiXIIPCYNSCfS0VQLMlrbg7cgXYo0TGVC/d3EhSxdXX301tttu6ZSgWPKEhLtLxMWRAgngf2vP00BCeDwJEGt2SfOrhK08ba/vfkO8QMXuZdU20XG5d39cLYtNCf1CYTAqQwBig5mYu2r1C+VJCdeIcd/9QolEqM6b2rllxAvSOpCRuvHSk1W/Px48D6RCfdivrnBhqrlNLWV8PJCmGK5wINdekuaa+pOnzxwyy4JyJEXahFWQJvi/cnAiwdtBCQBKE/rP30ciwHpSj9Slg1mVJqx0Rqeh7DQkdAB5H+804J2TzjRugZOjUDRPek1qiB2IU8EudVhrVPU5KCGsNYMrCWq05UBsyaSUpjxgN9VW0zVCMgF4Fzpeh4ZcxyjdKXfRWjsosWZQYsACetcOS5QjjXKkoV0tGqNdoUJtIrlPSEC58VP0Fe5RErKQuGuN8hYTsprM9BX6hcJm0zbd+PK+XWJ7zm1La1vHpNCx4GsJyABiNAdJFpK1d8MMZlHedzfM7H3WKnLf3ZaIDGZhnIXEuEVQ9nuWmPSnIWeWA0UPcu19EL0+1ObW0qJ7M5BGw6geZvoroIR9P4daY6ZQuFeWGIpmF9MceIwI38Y/133PuVkB7d28OOngawC57nqXSSUw5ZIOVJMPxBZgIBANIK+IobY1kQ+JYAXhCgsiI2QlKaCtS95o1hIQPQqERJc+hqgyMGVhrSSqAFTfbitHUEUfM6pv4yOlAKAxEgIaGoDCUIY6TDzt+VAZn2WNaouApU1oI3NsaDI7H1gLyQZO+4uqp0IT7r33Xvzyl7/032+88UZcffXV2HLLLbHTTjvh9a9/PW655RZ87GMfAwC8613vwq677oq99toLs7OzOOecc3DppZfi61//+oLex/rEkiYkETGoCUKfpOBRFNSaLL5p1pgmKwoXpuosKDnSNI6I0L1xnPvDmyvHnHTgTg1tW/x461F/4T+f9oWf1k5wFLhIhIS7g6RZcIiQpAQEQMV9J0o5mvlMgnXORbBpLs4t8rksQ+MIEm8PD7Dn8Q1pIH/OjQYAVEJStAGLj6q/GUuo8+ScW0Hocrl+rHz3182jzXtMfSgZEeCEgbuXxcH39UQkaq82/pmRIENjjCqv96R0biL2EAGBv1i9YUz0GwP+uTBiAtTHO3FtNZEQOo+3PrH3VEqBsJZbIgJZT1S5qwkJVmQN8bEjg9I+21Jj5Kp9jgZO6Co1DGu7kAIjaV33ylJDKQlVSOiRxiyLaZvpK+dOas9DAnFPCZTSYFgqABpDrTEFgej9MjpowcsBRDmwJGQwC7PmHuj77vaERA9HGM0OYEpt/2kNISVkv4CQEr3lA5jRAKI/ba0mRQ+i6EFML7eiZdEHjEbRX2bjW9xYLY17bzxRCP3Z+PwzZCR93oRC1hMT/vz4edPP/Fj6S4UF+y5xwExf+aQDy5wbHV8PpimVrlMcUM0ioEo4gPxM6Ie9CO44OSsIfS5kWH88ERHw7nkgq1g5sDFCzjIi9MgTEoob8q5aLlbIu+0Jaf/BvimF7EMYqqdjLSWlBowkhYpET2r0ZKhdxZ9VWdPv6XOJlIaZvuowOa688sqo0OFpp50GADjhhBNw3nnn4fe//z1uvjnIf4PBAK9+9atxyy23YNmyZdh3331xySWXZIslLlYsaUKSIrWW1JGSdN6k942TEo4wMVevE86RJxqpMBMagcr+OVSJUVVTvimjXygvXFPWFL6IcRLSZ37apHX1QcYZTRiQt0ZIYdPhRlYSpkXSyV/ubsTRJji3LnAyDZhMBy0J00Cos2D/CkDbjF26zGurUn9tv8BI6w7mU90KA+XHmHZxFNr2nxEYJudNq2yXLN4lZx2JEgLQu+L9rZL7Z/uPIyaexJng+qWcGxzPxhTSwvI6DHm3stAW4bOy8cJoKeHh1zdY+i8qZdDiUBK+poKSPAaq/jy8zgLFc3E/fv6eAsGipZSwAjNcnBOErfouRMU9kSMNsuexIsYYayFxFhEiIvRbSkqEEJBDAVlIKCVRjjSEFNAjjaKvsHZQYuAsI5akKP9+TRcSa4YlpoyN1ZguFJSw7bJyYRGESS5UliWMLr1rlimtRceUGnoYsk4ZpoAwpZ53nFLkmhdlnVzHol0M4ywfOfdc+hz/Fqxt3CpOyiiyjMz0mILKuVFOF0RIrO2XWzfagprC+LhP5ew9M2RsEeFuWgrWKsbdsjwZ0aUlIo6keiJSlhBqjHMjG0MGtgAn1abRxs5H9nNY17wiah5In+WkyXs65HHYYYf54qk5nHfeedH3173udXjd6163nlu1frHkCUk2ZsMXgIu12HXviRRVUhIHItdp4nIny++bxo/4BbLiqsLPXz1X6q6zqcMu7kFjRUGKFNROQaQ8YD0n4ERZVWTI887BM/RoYfzAIC15raucDKQkTZkLNAvQOSKS+i6PS687lDaQnMaxFMZ/j+4vEjbyRDe4cACAdgkbpA/8p5S6uSHLT5laGHJVsnMk3usiE2JC+6UYt/bZ0xigjK1NsyXVXjBRqtgcQlpaAygJqavPN+wTC02bitJgdqRr3V+lClpgAFC9WNOcIrUIch/+XLIEfx33jChRgpREvoVXy9YF1aeB60RGylJDjwxKZikpR3YbUJ37pRCQhSVMRU9Z68mwhFISc32F2WGJnpKRxURJm2ijJ20QMQnKoq9gQ1EMRgVQqAKCERPDA5i1hhkNUQ5H0PQvWYBkuiDNk0TYdzajQKixUKRxBpO47HByUldnRMm8+xetA5bwxSmYpwpb/0cJayHpSUq/HbtN8XGbG6788de9y/x4Sssbip+6uB23FhERAbnoEREZkYVk6GOIULqkBaNRRLlNWQJFUXXfEhKGsq1J+10ba1wkl8bS/R2W2illgsuWn6fdTY8mmLz48zFLgJBszLS/Heqx5AnJJCCNZQ45UhJ+C3UixhVSbBPTkrZjnGYi/fn+QkYI3DRPJv+cHzp9pzgRKXgcwXgBnxC7iMALPjlXvnTxzY2xcZp9bl0pSYCu2T/KaDUh2gQn2qKAApDGW1sA7euVhEDkeosiEDStTZdL+7Piuiargu0417g0W9qx+2xf2efTP/0d5lwwsyVfoc5CxRXPvcd1zy5d07SxEoCyphEAeUFnqYEE1KYsggDCO9niOaXWQSnjhBMVTSulpnYu7UNQEoTxlhI/hwgB7fYj9yTNjjM6WEZ0YiUBAC0BjAAjjbWaaAEhhZcNhRAwhcHagZ13pgqJtQNLLFZOa8jSkrtlPY2RtgRFCPteFqoPI2dhpAJMAdGfttpz53IFXUL17JJtSg3BMl4IJSF7hf3b70EUfXs8HVv0gKKAkYVPCWyEhHHCqDGhqGIT2gipTW5WPvA/u4+ukJI6IgIgpHJnVvHpQtlU284tq6eEd9kipRVZKYBATIB4rFKTQsts6ty0e4J1JK4PQsUrhfts8xa637UOLnr0D6gSSCEtSVGqYhkxjIBQymdDLlyemLBaXCaQkRGzXpM1O3Z/b86a1uSeBywNQtJhcWJJExLpNJLN8RwxxpGSgLqd6EXl58xYMkrubtOgCa7BuNiR+wuW9cMkrNhClEvfOJUtShb3cy5gkVAyhaR0WtieFJAGTisb6k8A9c+Eux5V0gmn1zQIQhVZDoRNIamFQc+aXwBUY1asoAzMJbEP3sqTaDh5H9Ln1Keb7ssqZ12wrHPjkgLZooMpctbFOiLEi+Lx9qUact7EcS4B47LX3XbvwGsCyf+fXzuKc0DcX3R9LkCnz5jqvPj72AQW6LtnR/69A2LXK/s3kH+qjF3nfpUipE3NuCoyDGEFP21KQNq6HlqH94I06eT7zt06bWyHdM9aQiuJEhpCWEIhjHXJSmOYvAtXIiwKKTGcKyGkQNGzMSVSSfSnSshCoBxp3OvIyNxI+3mM/mrjrE5yCoPSjv/SSMxMr4QwBqYcQOkRxPQyO1lJZTNrFT0oF9QObkFxweyQEnJmuY0f6U9DLl9pt89sBlNMQ/dnYPrLYYo+ZkuDuZHB7Mimh107Kn11b7Iapi5vKXKpf/lvqQCbbuPWbvs3uOfmXHN5jCC55PaUwExPoSct+egpiWU9iZlCQQhgWtlnGxWfZJmxUtCc5V2cTJWgpCDiQacLlm7uxgVPQEQ5qhISKWGMhpCF/evObYwGVM8SDgQyAiFhVA9Qjmj2Zuy2/jKMDDAYGcy6VM5rXZHhuZHBvYMRhqXBvYMSpQnFE3nK7FHyPJXTAtQlLuDu1HOjhcyXt35A7/0GvWaLtfP+jiVNSDgmzQoy/nz2b86Vq621hEA+1rm4lsniR/L7pckiPvj9m/znlx60S7tGLlJQ4cS3f+uXsaAog7BHVhESimiRIq0YEJRPucwpjQU1nbVEm+AiEmWASp+RqaauHSdAV0mqs7NLYUmKRlQEkh/DM0SlcRpN91UXW2Jv2o135vpI12oi9P48DW6IlXZktOt8+yREhOO4v9yh9rfZUVzgrdZqxOIiPGFiBCoVsA/eZcvW7VtqGLiMO4riNnR13gGY77yor1eToqnmA6F05Ls0QVlAMUx0rTRtaU67ntPaCxc7Fb4Lbykhcsvdp4wuXYpdBeEmFKMBWdh3XRqJ4dwIxiisZRYS+rumrzFd2EDi2VEJFLZuhCpt+l2p7KgzxbSNb3DkQkgFjIbWZUdKYDS0/nK2E61VxNUhEdPLIXp9iOnl1jKi+jBFz1pGij4gC4yG2llHrACqdRwv1/hutESOpBA59M8m0canc0dOUWBjkELmLMqaRe5w04XyiSXIYq5ESK+rZKizJNhYM8b478bYlLlGAMIIX4QwBY1ZspBwckJkRACBjJgxggMRD1n4opfRb8wqQmQEqm8/C2mtITpYQEpDBAPOMmwc6dTOTQuRAqvpmectI3FsZ5vkIx065LBJEJKUjEyaza0xwM79LWsE0TjLUOx+knNxyFWWz01yFXfghokwzQC2KYJiRQBE8SKUxpECFX02FR/YbrWjWuSsE2Fbai33gqcx0AK+MJ/N106uTfCa8LQ2g/3rzpWZ37PJE5gQTvEa2rmVlCaOK7F1DWyj1wzt4jJXaszSgsMqUqduFjTeU/c3/lsTWskoDS5blbTLksdgwN9jk9sPnTdNy0vPpgl9JeN3NSPwpJWcydpG2bvq4ik2VYx0XGBTSYGhtv0zhLHvmyOuRWL9JWslUCVxOXe8JihB1krm1qWt9a4nJYbCCrtKGwwKG8Mx0zcRGSFheFg6C8mwhJYaQIGSUuhKA+EzrRoYqXxwuQ8ydyilgiz6EFJCjwyEhA107ymfWrjfCxaS0r27a4Z2+bUWpSnMKWup2GxqBr3eMvSkslmXejNQy1ZClEPo+x5grz0a+rS/RErElCUtoj8N9KcAWUD3l1tBtbcMppiC6U1jTgsMhxr3DTVGpcFapzWfdemS58hC4jXl4bkXjNCl1pE0cUD2+Q2S2iAyxIFQhizrimWPn+kHEaXPxoySYBYSGRUhpXfVFhkEpl0VdEqzKwS57cZts0u88PO2oYxu7hsYMeHgLl+cjHjCA2/kroIFn8NoCGGtJLZByaIkmZsWuWaRhUT1MYJd62aHds4fOotIaaw1ZM4VRaT1giwjcyPtLSNpfR2Au2nJaFv6l1zpZH/xW0g6LE5sEoSEI3UlAKoCUNiXbx+zGEp4KcgLmjJYzdMJOGjKqxrFcQQkp3VPQedO21+nuVzq8MXlRLCEEOGQAlGueSImhXQLgxKBODD/cEKVjICZyGxf9wBfPGpY2gxLQ8GrO4tKYLS/VpM1AsHqQBrfnjWn+bSM0MYTE3uw/UMarrmRTSVKVXhpYcmb3qtkJP1tUiE7L3zUW/9SopE+VyD4eHM3nrheSLAqpel6x7k39pSwfZikqeVZ2uIaI81xSfbam75LZakN1g7KeNwU9nkOSzjriRu3zLoEBCJSV+ehDvRu0jHBUmmfY+myvfWUtNXFlUBpgouWFZRlcBlyhKDUBn0tcZ+7jilscIgYuf2cYGY0oGVsKTG6hE5IiZAKUiro0Yy1nmig6Jc2aL7UGPYU/uiEbXo/1y7rAQgubpv1CwynFDQM+lJgRX8F+lMrIByREHoE0V8OSWlgszUppLeGGFkAxbQlU/1lGIEqc2uMNLB2aOeH+wYl1gxLS0hYsHMb60iFlBhycaP+is8xgstYJoVXOnBC0i9kVC9JyRJKyihBQEgXbd9LSuVrSYgjNcoREmGrn0eEBHD9xxompFU+MmtECQEFYwtKkvFYIOttTZs4GSHrCPKHODJi2OdATAAEbSi1SUgbXwQARd8SE9XHyFlEBi4Wbra0RHPgnvWwNLhnMMJQW/estcPSKa3sukFrRbpe5NYN/pmTEE4wi3LxCyBSWRfLDXpNs/j7ZWNjSRMS6zM8Zp8Jhavc/j57knQ+sHxxJFeBxP2qmtO8/po5MpKNN+C+uDWBtpsiGQFioTWY7UNALFDv0pN2ZUpGuKUqLZKXa4eUzhVLBCGZI5Aed66Gwn7cmlIdV+F3aYDSnYfG2tC5lMyOSm9259rKUc2CUmcZqRuzTUHK2lTfGZ7udz7gVpJxqHtPcviPH/zanV+gp4KbDrkh9VSV8HprlYgtI4THP2SbSW5tyaJkQuJIGz+GyJUrWJ1C+uZSxqmRQ6yI/d40N+cUPHUIKVYFpBYuzkX72BGgiKw7qUZ/rbQxH0YbSGWcbCigHTnxRRO1gNG2IroEUDJSYnQJFH2bOUkqjIbOaiFtBi6tDdasHWLQV5iZVSFLYF9hqK2Gn+YlJQRGLkPkSAnMTK2AVAVMOYJQfZsOtqxW7YYirbl1ySIiAiEx50jGwFlEaP6YHQUyQnEEREZS9x0e+8HnjdRFtOLulj670gnr0tW5YGsZv2aYp0K8yWCk0VcSPeVi3bQJgvu6gggB4NPlaoT1g36jLmkxPXlEthYhLcdw9UDg4kayxwkRExJ6xrKANsDIpS8vDTBwcT9zI+Of5RpHPsgaspaedY0VvSmYPf3M4w9pPCspYNpoGjp0yGBJE5Ic6iwducmjLltEdX+2qKqQB99uMIAOpCTXlrYkgafcWxff3YWOp1kM6LHJrycl8wtmVbFFfO/G/ZfrSavJCxp1XiCvDkQElHAxHSWgyYhRhudG1dLbxhgRNKWpLgFKMUvWkLRtVGVXG0SarjWDEB/h2y3j4PXIZUKETGVA7MNP7wCP7YjaawwUqgQs3YejrmZL+N39lblsaO46/tyhb0Kfm2w7APjaA6A2M0GGEiMQGfH3LgTGBWXfH7B2EGpeUMAxEArRlc7FpqcEho4UWEKdxgGBjbnwA3m05OKfmlxb7TgyvrCbtdgYKB3e177TAM+NFGb62rlxWY07VXAfjDTunR3Z/eZGNg3woMRwrrTjvBj5uiO2hl2JcmRdpvRwaK0mwyFKNetctwYYFn2MhjMYDUoUfWs1URTwPjvCiukCawcjzPQL3LeyxGZTBTbrj7DFTIGpQmGrmR4KKTBdCEypHpTsoz+z3MdDkItQ6Cf7d+QEVG0MBnMGpSm9sDpKtOYUxH7v3AjawFpYTSykUkAzWTEw0q76d9XFU2ubTpnSKntLSeJaSe+UEAKqkBBSYG5OQrpaIbbiva3ZMlWEbFFkRSmNc9eS8IoZLRyRgHEkouGFdRmsImEf8GOWyAiNR/otutdJiQnFf+iRC0wP8SSeCLH2+FgSEd6l0hiUpcFoaFOWD7TBqAyEZKRD0Lp1zyq9ZYRIyMC/D7pCOglNLlmpyx0QUuj3lMBgsPgJia3UvmEndGHuxwtIS2wyhGRcOsp1O3c8GSkpoMvgkqUknGTavp0L5eLx8kftuiDnWew48eGh+vx/Xn2L/1ynRY/Sr6LqqsXJyLCsCkHjkBo9uNtQ1IaW4ONCZT6Hc1aPbboOJyMVbRYjI7lg8rqMV0CwGjZdO5t9jlwYM/FVdH/8kaZxWN6C1WDJKo3BuT+8GVLE4+av99+xsu/5V/0WACpuWUoIPGXP1bX3dn/BaV/4KYBY8LQxBbIijNrf3N+GcUFkJD1agGnhRd7lr43igK4hidAWNiifXKPIjSun/R+MNEqjgAGAvisqNzLQjrHrkUFZ9KhwdgTuwqV1CYwGKEd9yEICgxKjXglAYTQssZZZSIggAXbcTRUSw9Jg2sVGGAhoI6GkQalJkVAlAxR0TUHNBlZI1UxYHemq1nzOWUd45rkUloQSQanPHJe6aNl2cZc3+GcgpCWoRPQIQwBqFOaquZF2rm4apY4tMvyfNPY3KYS3bNjM5cZlZwvxHIIL/nBkmCwhjIxwq0hd7Agdn/aIMQYQwpMZTyCZW5gRifCeI0jajUNjyZdxz1PD+OdaamDtqIysIrMuTkQb+DgRrrQK8SKx5qxN6uU0gN0nmWGKrA4dJsWSJiQ9EdJQ1qHp5WibCSicJ+zfU3bCoutLFjTN64/Un2+ydMW8vfeXQNo20G6ml25BAizBkJIIhyMkrq99NhFDMRi8MF69IJXrc7JQlCZYK6imBbd2NZ1LJhO63RasEhX3pcQCYDX61lUFRexWYc8rxhIP2i9qT+7aDDagn/ZzcRusbXEsDbt3GX4b+tof2t8zHT90QmnORYwHsvNq67xf6ghPihVMEIyLHY499H6BOheOfsGeqwsi5+CZyJRw1icJTBeqUhmbxD5t1dte4QNULZj57+G6wR0vjPGesXFl/L0sdc8TkDlnIVk7GHmBba37d9/cCHqkMbdWYjTUEMJGepWFhNEroEcDjABoFwBGpESPBtbff/ZeACugCwUhhxgNSxhtMBqUmHNkpF9IrBmU2Gy6wJ3L+rhnRR89KXHvoETh6mgsc2ltp50FQbL+I1A/UlVurwU3PLWrxtoh20auXEkcQfysgzuOba9G3/VZNDYGlsdZdy3XJh9T4rbzObEMz0xIAV1qlEpClbY+yuwwJCGgZ9UvyqhNdv6O486mlcSyvoIx2mZm05ZMKSFARlI7N5BdLnbXJSKSIyEEGr9AHMhunEneK5W0ibJ52WOk+x7IkO+ShGxpYy1e9HfoyAc9S+5ux61cZDW/d3ZkrWKD0hG6ODkFIc5Cp6GkCjEhMiQfAEKsHV9PyIPBKgC6oPYO88OSJiRNWB8sPZyTJDUwlXV8wXGkoY2GOTpfTijsiEkWpTHBYiXzFhHra6ujWAwSdnPJAoB8rAS5CZFgHAs9iZBG1prE8sFd+hRzZWl2a7IZuEgjaO9TQgnjXZH4osHjb+ruK3atqdYBiW7Hjf3gtpAvTJe6rdUndyAh1Oca9pnRUkKkdZX4pGTy5EfsXGlLDpQAgXD4Q1e1Om5Txmlf+GktESHUbQ+uf/TXZSaT1q1DuH0ERHU+dQHE6TBKycgwIivxc/djWYW51dYPFNAyiRfTwEwv1F64Z9YFw88OMeVcgwCg7Nn5QUhLJnSpIUbAqOi785cwWtt0vIBz3wKEsmNZDNbCaGsp0SOeVtbgXueyBMC7jpEQro3NXra2p3Cv4tmkXPY3KbLEOadsISuIFWLtb2tdAoyc605d1j0rsHJrmT3eC7RaIGM88mSEF570v0kBlIDRErIIvwshbHwP23+kDfrFCKVWkRtqb6ShpcCcS+ktR4AUCqUAhDAuRi/vZqWNiVyyqFAkkMx5CMcKAMYEZVJpDBQEq6drLFGBQAlHYBjpSUEWHWpLqcP45u52I1dl/Z5B6QkJPdt7ByOUGrgvItajYPlzN8MJJ/Uhzz7XFG+YKrd48g8gWJoXOzqXrcWJJU1Iemr9mQdT15EUXlgjYZK95NVUv+2v6+MU0oXAC43doAacUAorhNuCghLalKAUo0NXWZdWCBJqhlr7BXtuxK0aIQ4BqArSOetCKtz4zya4NmTdHwSrt5EhDLmsU344MCuCtB4gngzl4jXqUum2ATPKVKwPbUECKu/PCkmBgfa1TmwbS6dppkBlIE8I7fkmszRy1NW9uD+Dp8ht+l1JK7RPFdbnf6qwmY54kbplPQUlbAVtcoVTElH9QRoOwgSNNQeREVIccCLC0z379iXPtEdjkI1bXrBzqK3wT5r4vqsdMjMY+cD9UhsMmFZejzSMBkZDFTJs6RLaxZUQhMuKZHQJPdJAIaFLjdHQxqYIIaBLibvddTghGbiCijN9helCQcmQhprPEylyihKe2nXNgGfiC2QkdutU2eBlOpfVmBu3bxxTBMDFWcIqTUYagIbQwhajlCJLTGw/AUba+BPpjh3AunDRNda6uB+eOY0X17RB28pmQ1QCZWEJAY09gC3bdF03xxkEZQef5vxyLwKZVk5hooy1jpQI8x09F0FKlZrphV+LkxGNamwIpe0daktIRixOZFBq3DM7iix9ZP0DULGK8Hd8ij0/eq//f/b+Pla2bbsLA39zzLlWVe29z7n3vmu/94w/wEHE4JiOCUnaBhHRBLA6iRQBrQgJEZCiqGXZqBPnj3yqExIRdxIpUUsNkdUKICEj1ImCLBk6CSFAAhhDjNMKdiCAcJ5N/Pxx37vnY+9dtdaaY/YfY445x5y1qvbe536dc+8e0jlVu2rV+l5zjt8Yv/EbFoTo9bV1Iv38ZAVA4uNY+mivaG80IHGu6tsD65z1+zorltsOINeHyHf9YKLRYi2qjimBDNo+2g9Oa2/zujS6jRLV18xJE8WmFox8FmRGT9nMbGouuFwPdXiE7lOXt7zzfeRC9dEMib7qIA7giLYAHEvlrmWt1iKKzd8GiADrDsaaxKwWeDdUqEA4ZWtN5jTToOfiSKa4v23LOaiTdN/3oyy7AoZOd7Kv21ebzXxpwfcaLcCuv4nquvW6g3PWU7UeTZxROa+8eo/rMp6cOMyDgJEnY0DwDk9Gj4FIAMkoFC0rvVpjORXYcwJAQu2xrpMCfLa0SHV20zE4PgXA9bP1LKesUwMSl6PHfmEBJaMUnntyuJ0ingXJTCxzFo+IHvPeYwoDEjPidFtkgWUfKn0lLrE+ewvDB6EmeU+IC2MePW4PC17uF4yB8P7NXACfOoXqMAJ3U44FaFSH1BYz1+9beufFWIGI9gCx25bPjXRyBk5jWLDJ9R667pcZUJBnuExV01ry5BIiWieZk/R2igsAMBK7kpW6Ntuz0fzbibAbQ94HwsyMgQi3A+NliCCHAorXhDLqWNSOi71ZOXJ1yMfc38QzCkBxnBshulTu794/t6vnXIDPqdaJ6D2p89LLQ8x9a7gUrL+/nzEtAkJemvcx07N0HptM9Eez7/pvl6/10twDcq0VpIyBGjn0u1QIAazK3L9u9ij7+3raGw1ILCAA1gvGz0X67mv3yZZ8EJnTNVuTXrXb+6xbT/eRHgQS6ZRmhoZWYGg9CkbmmOlaMTVNoU5N1gAQqeXUqrUFv/frUr7WL8YWVJdoconMVVUxtWFlvb0i1GrhuH6XXDMJC7CrE/TRPp8AI832HaC9QSwo8XScGSnrPULp3c5SBeenzq8NEjzEPiuyvQ+x2r/DrZ5v67DsBl96/2yCgOtNkAyJ/q33sXOuRKqtOcglj0lrIlYCSysxnrOF88ZJsoD8qDFjfgbIiXoUJ4eN6WQqBcABF2Pttr7kJodhqNF+Th6JCYmFxuVMpsTRaU59YnHMKTq4xcE5hylwOe/TwpgMfawfe6z145B9tbSsvvbDOqkbE+Tor71mdjml8oxWoQBf1mcBy+QcZnJIyfQmoYS4AB7UjOOnsv9ah2GliPU45BxFHJZa3wAw9ouu0+z7yvW3Mu9rwiT6+yF/uPWUVeRqYARe79hcEZXkXk8uCRjpblNLB0sWEBkwonSsmHJNSM7oa32IZkOe3UxFKU5V4m7n2PSCAbKMdWYUlGPn1Mxnp/5pYKin/QJ1vunP86M92qvYGw1Igq9KKnaC6qOzp0CJHZCBu7IO5x+yu4DROSvFzN28dZ/C48+izdFOYklUWcjhEPn0pJavhXKqrd7+7RRL4d+UtdnXooc1he2bCdxG7NROcu9hJnmWgnDb5AtA6QiuDb/0M0AHfNT3rjooDsfUAjVbOMxJ9s/y8bXru9LZCp8//+ZcVFpNnzMFJWvnv5wHk91Y4697kpqU0VMBJZxaiqb2Biq/W92rR3uoXXSdlnvwrUpQ3gkta5PBiGZGLkehF4V8D5fMCEzxb3HKEgC51oRS51xM5WuBKu9qqZVqVhBCi74litvWW6yNnX1t2c2sDQSjdH9nzjSuBRejx/s3AbdTxIvBI0bGfFgwHYSSNW28gIy4FYoWKoBXRy1xQnKuNF9UhSleEpY5Yj4scORwc0OlX4dVodJ1qPXrX7O1niBay+IDYRxqb5SxZGRqJsbnzEAtBhebg5y7acuYlgGHhXG1nyVDsl+KYIA6y/tZzlOMXGSB1ySBnWuPObH0tVEQYushdAz25HAzxZJRGgOZMbI9Hz3Fdk3+1tZN6LFvA2Fj6nmK0EBuyjt4rZOqEKW5bibb3I/DtdZHmtyq+MCz/YKZGe/fzHh2M2FaGO+9nMp5neaIuHARTLCqZnpPhMELeB68XG+0J6QHpZampZkR25zYzlVAC/imN8Et+QRqSPBYQ3KnvdGABJAHjjsnaM1hOpcp6YEJcKyUda53wn3tvoma+wKR4B3+i//5fy+/+7/86l/ygfbvTbK+oSDH3KhwtetIPfeqgmWVZWr08HwUsRS5mgupE+FdmZH7Zk7swG4zJIUPX5wCGF34ymuudJjeXCnsrNkLB1tErsou9T5vKQwWjJws/DegpL/fz2VJrNlzbtWyzp0+XeZ3/ZpvuHsDj3bWLLjuFeG0jkEByW6gnB3JSlC5U7YsJ9FXzVI4Z65hvj9Is2lQKpd1tGHenx67fecU6XsFIAruFczbGi5O0rxR7jNxDjkl+KavkMNtBmmaRdXsxRxrr42Yvd4YGZgiyHl5TlZUjaSHjyvRf82UcHKlPUYp7u4cc6AFFv25uW+E2jr9M9UM5Kpz7qozCvTPYg2M6LXQ8XNckZmdSLJBagpMvJdMSg9G9LhFURFHgYymqJ5kDLcdx9dqoqyi2Mlj7hx0FRqInErGZM67Gb3Uv8m5cUgFiqQGgAOVDjwzF9VHC0i007rWi1xPC26miGc3E96/mXNmZBYZ3/0i9UiRsUwaNEqZHifXxZGDc5LN8ys0pZ6uZa+5zYz0NYk2Y2+zT491ro/2qvZGAxL7MBwNyu7hsrqrgMU8v/eR823lTo9TwKtg6Z4ApIkOf4bTorb2wdb9nGmI3hSrH9UedBHg/jP9+9RnzT+9Zr4Fx2vUBx3szwKZZOhXmhnJ0WYFJAPlBmOEk7xl3ZeUVMFF7ksH7U7MpZRplj/Lc+U1E8FVJaavc2qeCd1PICscAVwma4mIWyGAkp3psySuNr3UiLeeP3suH+3DtSe5dsCOR31908UgvPKLwRcp2iGDj1GbllIFIn0w0j6qKQmXPqZUmtppBnOOnN9rkW+tJVHjmDCoshzXZ0/32YKRITtdDsiOoiv7410FDjOlcv/PnMAXQ+HcKxjR12eeMM0RyxzhQ/7NQaLWImWrmZHT5zxGKfomciWzok65ghA22YSmE3o3b1ln3s4T+rlmXSgrzBWQExKmpQKvaYnFkZ/AhZ7U0EYNAFR55d3gEVPC7RRMhkSyJko1mnKNTuSEaVZ1srQKrlz+pzS8NYqsjh3NPndAsA866TIWlKhZMOLJ4Wo7wJPDk23IIgPUjH8+97nRzwbfzt3aiBdAI6yiWT/tF6IgZL9IRiRywldeHnAzRbzcL3h2M2OOjOl2zlmRmhmJXbSnNJ1MDj64JlvTz1uaESm1I7luRKXndTwmhwLw7f113MT20R7t4fZGAxKgUqWoCckec9iBV6snuQuknPvNXUAEaIswLRDpQUjbI8EMyB9y7cqbYPaargsZrC8LtOfrOCJGiBxPgg991X+nJBGtnRqkB99e3/s61uRckU5Vh2/IEWkHew+1v+MkzhenhCU/M3L82geCQJzAzGBX6VpErpGrXKMm9vvfLONrJBqARKPzdlWGVc67AKuyXKEEUMkK9ZNgc3wPfK4f7bQ93bbTgjr0Cg6H3BuDnMMmaG1TptBCej0oNUsdFb09WqpWfU0JBSxLv4UKRLTmq1fDs/cfSa9G9FTBUkuSwUigro7F4mhHZf8UkACVrjgQCygj12RJPLnSt+TgCTGqQlQqHd+1c7maRrCBWiORkMARDdBIXCPegGYTGIlj+SfLVrKb1qw48t17ahz8ceObwt6UPG5zNuOwMHacMC0R0+JLdoBTG0RZG9/0fGnvl5lFLWzhhN04FzCial+3VvXrRJ1aD0Z6UFIBiQoxcPO9frdwFS+J3AqZ6H7rdR99zYzcTBGBHKZlwG4MuBg94rbWAQ7kSqCMnMPAVYwEqEXzVi1OMyFSsB4RGXi+n4tc77ObGQsnfOXlJPfWYcF0WMCRcbhdCuC1mRF7TSjkcxVI7rEM2vXeVxCi9DwrXmCL2PVvBSM2M9JnRfo6x9fVNHP6cW/z0c7bGw9IPmy7l19zIroOrAOYc1kRu55zTelOPeRvwsP/YZsFmmuKKVZqtv++nC+qxbshgwv5baVfHUfM6gBeP3OFa2u3fU4RzQJOe93L8pxylTZjAOWplXPjR/W8xAmMSJLZYAElxeHDaYqTc1pvok0NHTRmTVITKhOPZjeck0L4nCXRmq32mPq/+40rMGllWEu2yreUMEtRs81P14D5b/u2r1s/0Ed7kP2J/+nvlwyImmamtBajZqzkmXGZauXMbxLqs6bvbUpEPqsN32au3bZbMCL0KI0ka71TD2baZ609Js73llqTyFvL2jhgkBsdMWl2JatHBaE1Xm0D/BQxLQGe4jHFkEU9S2gyed0sVKRS3H3i4dRiZAtEEifEJWbpYCmWj8t0BEYSa2YlN94zgMSHEY48mDzCOJZtOarZ5gJ6OkrUwgk+XyPSurc8NpDJBGhM0AdfpOvnmGQc2YZmvWNYikKXzWjcRZ9ai+qHlTFZrV9Pf2yRazG7gkTm1IwzulzI+1kDjkK7K9cuaZbeZHoM6OzV4paYsF+kYeV+qV3UX+5nqVHK2aSXhwXLFHN9UW6sqVmRhY+uH7yDoxxMyuDT+wpG+6Canre18/dQ+ywGSB/tw7NPHSB51UDpuWzGcT2JfK61C6fsrodTU6FW1hSoUQhd5j77/Fmyhruaz49G7gBz3qmb1OBK9kQi7nJtSzFnp7CltuTJCGgnxr5j7Vq265ytARGhrjj4LB85Zx37gXNRZSIwAewFIRTwhYQAZ+hSrtynQk2p2xRaTFWBEXgj+8wpO5wpAcHniZULfSVqo8m8vh5A2GtiP7ccY/vItBLE7TOjjsEa6H5oZunR7jYt0u0LwBWQbHLxrjgyUrekdEFylS4I1E7hybUZMgUjMQkFiyF9FjiJopVSVm5mASKHTNlSIQpdl9Y6qWw6J/GGh5x1o248t/tGDqXeyia7vZPGdg6SLRm8ALA5CgifWQqot4Ewj6EENN6/mXG1XXA7xfLZM0+Yc3akdGefuTQIVFqW0mxKgTfLZ9LNfSogRAFIPOyb7IhKDNsMCYAGiACAH3egYQSFAYmv4MjD+x0ARhiSUXuqVKZS82Ga6sWQwImAXEOk581KlffjsKXbvb0datF2zoyIXC2vShMDXZ3QWma6/KugROlZmsXq60VKhiTLwCfzz+nvXVsbaKXgFcgQS3BsZoAoN9iVuxT2pTarRFO4fjOLQpbUhkQ8u5nxYi/30rPrCYkTDrdzoWbNhwVs7iWbdSsgyus9ILQ87wk+iMztOEhfG5WTFmDX/i3nsD4ra6bPX204aZQQ3wBQ4jzBfcyyv44fZX/vsk8dIHkVOwVG+sj2Qwrd10DNWgZErXXcToORdnsykX/MmcdPxP7o//ilVTlGIDv/DTo7PiF99qR+UZdVZ+IoO8JtQboCmB6MNPuDh6doa2IkSfFmygJTDjk7ItQpyg6XOH35aHPBOAGlOFi3Tmipi2tA1oIWa0LZyhkUcoYJcRydLse/AkLsOVK54phknSLVLN9ZNS9/R+PTf/YfesyMfBj2R//HL+Gt7XByTFJwovQNh77Tuhgj5XutfmbV3YBMyyrgE43iXfkXU5MVUSEK23QUMGMsu5J1exWzndMJKoYj97x3DvDIFBzGQA5LTiNugy90JB0vtGZiN3pgAjCiZEWS1hEsAHIPEseuLVDPy4qTXMEILzNSlL8TxwJQUjymbVmaVjnG8hkhcTwrRay2cMKIlayCyarYrLOCkQJotZO5S5hdvXYU6/2kfTJU5VAzTlV6+piNcJyhbvuk6HdVtlrWEcghUgsyPIscrq27cd161EJ3w2udlL4/Z8w14KKgb44V+Ak1Trury79CycpgJC6qTAbwokDK8JOb6+1yoECyJaUG58S//rjOHYfWaKmtgZJHe7RXsTcakOjDvdat9762BkbawvSV7SIdO7Z3WF+gbp1WzYzYrtz3ifwqd/tNiEh8UPPkCvOir7Gx6XUd+IG2+H2NSscdfUg+0+XX9mF9+6dECPT7te1ai8bRKtkDlol+gShWzflv7T5MzuHGc4lcC8efmsyBRoFPAdzIqe1GDL0HGQMchuxADVQBzTnhgKPz1YERVTiyxw2ImlKlM1DznZyK421/3IqNn3abc91DZisBJLVD9npJhqKV1pZnomoKAekYqKQKTGyNiDR+y4W8uc/CQbuJz1wASF+crNZLbffG6XTACFDwboCVLss5c5gcaKCSfY1JsiM3IWKOCRsv8sDbQLjahpwh8ZiWiIvRV+dy8JhytoQz3YZzZF77mACSJaF8fhIzeJly9/cZ8SANFxdtvDhPhb6lrxaQ6D8FHylnSuz3FBzC4OE9IQxST7JWNK6F4r0whwoJxJSwcToGuSOZZbmOsqxkCYSipLUUmgVTMGqv+ali86P3K9fZZnsip9yvo61bsf1Z7DYANE0pdzkj9mQrr9tApY5KXlGEHez+FBBGKLV4WovDqfa+0maGpRbpsGA+LEVWOpZsGxvAWp8LR1Rqg8Io2ZAw+vp+8CU7ouIMtmbkVH8b1pQmVWESHStEjT2VzCKRK7TLR3u0V7E3GpAA68Ws93XQz4GRc4XR8qF6W6cjxaeMjhy11mG0n93L+NMPStZASC9BCCBH7zJNg3t1p2rC/61mi60BYC272tOx7gNAVq8jp7W3JzNyADAkwoKE6J1QulztSD9HpTbFUm+hoEUmGnEYC6DS9Xf74HIiZPBUefFeb3En4OT4aE6aBSCqz1+2g5qRWTKNJaZKudHjtx2UKSX8ln/w8w/Yg0e7j4lzJG84O5ADAGQHQzNkVqxAMUIyQGTN9D6amcGMpj5kvwiFRRSGxEnV7tS2nkAdnJ42GU201gYG7iP6cW649uTgMoiSug9IRiMp0Jdnz66fzLN3yEpVtZ+H7OtLc/6YXIl2R3BRvuJG9jgW8KHZkub9ou/n7ghmKWDPdSMAmgJ4W+AualvuKIKu164vGNfzo3SlyKnUkRC5LHRQx6AeKGjA4WI4vg+K6lSqMrhrc+99xl2dAzhJL6OYtKu8x2FhjMEXmpjNANnjl+snzvuTbYAnh8vca2fw0hTUO+3JU/vdyL7lAMqJudnWTClo0uzaNEcBIHPu15LBSIyVoqXCBvZaek9SO5IpWhZoap+ZXk1rLbsk5wKNbxM5lTmUnCtZfFVMJIfKUnsDAAl5V+7bj22b/PFu7020NxqQ2Gi42jnH3EZB7sqM1OXuC27uTlc2xep9zYGlcz0AjOjD/2nn0iu3HajKUkBVYyrLIWcR4ApNgA3QWFOIUrMD1LmJUPehfn68v+fECLyvjslakfippm/1ewBIAMscEMWryVmDKFm3jD62nhpwAqDpB2GpNAlWIc5EDPXe9MYZuOc9akEIua5ZY57gxOlzR9tXqcxZ5Vj7rnmP9qFYzdC5milhAEt+DwY7h5i4CB309+Rq5ssASpV2tpFxlTrd58LcKfIR37+sn1xTy6WflWZtZJSAXAUOp8bFlFAivIpOLFAmJx8oMBFqIeAcZUDCAEKhGPryWZWxnhbGGJaiygXI/f0SwDJFyY6klGlUKb9HBiYEIo94gl7VqmpREy2nMOQMyCj/hhFhewW/2WLYXmHYboqzqv+8J6k36JxTVaXSz6cFNbruaqS/n3dljG4llgFgE3SsEZqbBR+qptY2bDVsBaXdrmR8++tsa9KWvK79Estn5Xqk9QJ6oIo4VNDhSp3V4B02wRsgsr4P2mfE19Q3Dnn+0n4+Fgh5coVu5ag6sY4ciOu8wqACROSaEyhkUFnACBVwcg6M9GZ7PwF1jqgNSmv9LLuUMyM1RvtpD44+2kdnbzQgmTlhOBNVtrYGRs5RtO4CIgpAIlcqzylQsqaopWYj/RaMWD/7FE3mTYhEfFimUTc9R1tfo3DWYgIoO7CkE5xzxXFYqy+xY3LtabOy3D2d8H7CbMBLXr8W3bJJ7UtGp13XUdYkR4XVEQCkCLanLWonXQtI2s7Vx/u25lTIcdTagZ4zvebvrT06ulybeJJUv8FmGMjlbsaufLZf8m/dZ+d+/zhNlX9mZJENljEpehFXmLux6eR6uqyBRr05aUPSSs+ZIjcSszUa39KzbKFyXzugal/q/CptptYx1Nfzx3/aufUZm3mdMwqFkbANct62gbAfJMI9kETjt4Fy4XbAblwKjUui84TbIeLW9N+wPUpiTMDClXrl7673sMpaBYiEEcPuChRGhN1lBiM7bHYBYfDYXg4SPd8EbDahofHIsdbMQXWaUykUB2RcpvaSFeqWNshUqWS5dsf7rkOz9klS0KDXZi1I0jdjPV5nzbq2lNCasZNt9vt+LOZATvqrACIFrMEVOwYq0xFAOYaEhIVz5odSWX+h33KtlbH1i2T+JRWNIIdECZRrj/SYtfmh0u+IXAaZStmSZqVayH6RX9eUtvS8ibhJQezmpOq5T2DLEuCWXmwFCV5Xc59Ap3b3mCG5095oQJJOpHR7O5UZOV7uw9ir+9laYXtvj0CkNXUweiUgSyUBUo7m2mhaMkDjtJVo0wOBx+q6zITVr49I+jXI/ug+yaxGwFEmYE0G2poFI+U8RNkHVekSf0WczugBnyWR18ZI6pwGG+W0HeF1Mdedh3SCvnO0XJIi4sJ+1ONJKkdcj//XfuM7q+t8tA/H1CmPhYoBAchIGHxu+HaPsbZxJrn2DNEsiIIR25Cu1gusD8A9z10V7rQbvGRHlDJU6YpljDANSPvHx9aQWCUue6t6ZPlipCKg4ZyMz0oHUtMo/8Z43urkFdUqU7C9eBLKFrWFyImcgAvyYKICNFyuF6Ewgpep1ImU/e4yI6quFcYd/LgtGREKtbZgLTNiTTNTa5K5Ug8hY1mRKzfnUGWhdcwotW1m/akE9Fxtksk1UJNMcMLauWGaycEKKtgMcJ/RYW7HvMGAYJtxAwRQ6D3juvuGUWvy5hLUSlC4MmTt9IG4NJxdkzEePSES5yxJgg+Us2eSJYFvZe81M6LNLn2oWZbBt+DD1t/cZRaUaANir9l8vRhHAPNYEv7RHu2+9kYDkjW98lN2F0Wrz4zc1TukUldw9NkpI3d/qksPRqwzvbZvn/ZBQAsG+wyJOhsl48AOyD07OEVYF4fovEwzcLoAHFg/x/1yawpTspxrvpc/juktADD3IUccgxvVtS/H1e2cRgAPkY/ogUrnWgN2RNJxWfeviQI6ud8JGaQ4mYyPb/vak0DN3rJsQRhqRBFOQIpzyA5JyhKsj5Glj9LU2RC1HH1GEma9DSKv378nzPaXsYBEi4lPARFvvELrQFV50p42Qw2VRgQeKo3GUhUbZ9i1z7nvxvLT91wu0CaX60mEZhUoYBcEaG0DYY4JL6YFMyfczrF8tgmUMyQTXuyXkvGZsswvkdAtU0qgxSHxDnEZS6aEl7kAEC14t6a9RiiM8JstKIwYL96CD5INUTCyvRzgyGGzG5rmf/a8l2tpQAiAXIcRcVhkeRU6mJkK7Upll3W8GMhh4xVIZrU2l1kAidsW9i6LkDsqEtHW/00rc9+atZnX+hv7az1KsmMcDOBAHd/LGUnd2Gz2U1XjhHIo76eYnwPyhuoHDL6uZyAq9SMKXvV6xIVBNCMMvtSS2POg2RMKmaJFDsMmCGWrK2BvnqMTtSNADUoI+BBQokI+hRligJcG/ZDP12F5/bm1j7K/r6e90YCE091O4n0c9fvStD4OKwW8K07mKW6mLvZf/M//O5aY8Du//es/0n38JOyf+pVfKO//0t97rwKRjnKkjf2AHD2iTIGAfnb3PWGdFF2vfn7KesCw1sHWfl+2pdE1thHo44Grzy576Yoov3X5J1xBjQXqEanScZwD50k1egEfM6SrshQzU5YaPj5YB1cm7N6pk+/7Y67PVDLnXeVA1dilcsQxB+gjgG/7ureO9uHRPnz7F//xXwoA+H//1f+tk9WVVwl8Z+C88hD094rSI1W2V9WEtDbkXEakl3a1TpTUJMi9qsXTCky2ufGb8vtVdc5BnGLN7NXnUl6diXTb+9d1y6m5JMEPzjU3ucQYgINz1Agz2LoRcqk4h9NSmwQqbW3yBGJpVOg12JLPQeIRXqV9s8yvIw9ayYw47yUTMu5AYcRmN4KCOKnDRgDJsAlwlIuyqVVbusssQFkUfOReHKtzMWp2SoCivDpe4OYJSAzH8QiUJOdKP5hcWIPk7uHQ6TL6O/njeDndngFELi7NZy6ldrmVbSTn4CiAiOD9iESESE6ASD4fWQNNKJDk831BmAPhINXjuBhbIYTIkhkBAPJF+g4p5PE0n2xV1tLGh+RJQEqoimnnJH7PWd/WQOnpa+Irtkfbpz04+mgfnb3RgOSUnaVlfUSg41WiuJr61JoAG7+JsX/o6/vVDMlnaBTw7jQvXPjCKniQ7nVeeg752vas9aBiLRNif9dnIep6ZH8BmIaDCUM6vp69tLFsS46xz5pwAmZyjdJMPdZjKVRbK1JlO4FtkEkyZPDnXJsZWTMbbQTqclIoaQokDb1BKBXSUEyDA4/SkZ+crQVmNHsC5bkboH2cJcxN+ZjhXUuPGYOVWW2nn17OVQMO9yks7qWvyYkqVxPxNhk9C6IVgPSfAcc1UimDkWSygASprZB715f7ehsYQ64T0cLqg6+qTkrjmhbGpGICEGdTawWid3C0AYUBgRlxvM3LSENEMt3YVVUrjAFhyMXMu0HO4UbqCcLgsevqRe5yVldldu9wbHWMFqodMHoBi4EnuGWCWw5wyx6IC1ycMwAw2ZLs9CcLMNZegdJ8U4GLvl8DMGUbzC0YSgzHGZDwImAEAgCb33sv26OAREG2EzaAD0h+RAobkA9wfouYHACGA7A4EYWIKeFi8Ng7ySrN7DFQwn6Ua6L3hha7F/Cee5L4UMd6wNCMc/G6c+6obqQtZG+VtcId1x6o44HWzPaf56tQltl/nNz3R/tU2RsNSGz/BuA8neBU9/VXsQ9CISmN94xjWPft7v1fc7A/a74b0fo1KMWL2TF/SM+M9e3cDTzW7rm1LIntw6FF4mp90aYWYq7ZWjNBIN8X3hRvkit89nklA6ibV5pWpXVV5SLr2FnhBQBHEeTeidPPi5RAqnBbwYjet30xa0qPgOSTMqFatZ9FvXIeAMQLH0ItIu99Gb2O5KiAZq3zGrw7Wv8a7dX2ZtJ7rwEkmcKp2ZIhN9Hs71Xn2kLlHnjYbVrKTt2f42MDhFYVFZC4TNdxDi4DMOc85kil8F7VogaqDq4CEqXr3LoKSJSSwwvDh1glgsd2ytaiZkcQh5RcVc7SQudM3xm89tSo0fgeYKz14+jllvvPe6sKZ7XAfSQHDwYdruGWPdx0CzffwHEEXz8XWeJlRpqnfKI1zW1vjtzc0Xt5bwr59W99X4CDtWUBVP54mQXUHfby2TKXz5CbUMp+xHa7YYALA1wY4XaXABHo4ikwjEjDBdK4Q6KAsAU8BcAHOHC+R+Tej/mG5+QRE3BwjIuB8rOh1LilUTi7naRXTxi89CAxY7mlbRG5hoZnQafKGGv28dT1s2b9lFN09p42/yZQtuQ2eXU/7pW2+YjT7rQ3GpCci5S/imkTJ133ndt/ICXQFo5SkX41YeM71neuAaQdJH7wx3+mvP9dv+YbHraTb6iVJpn9YHmCSnDOOGW6EacGXDRR4Q509Nb0TVkBIg4o3dF1YtCeDsQip9gfh+ycO5oQZBv5t6VYnkuRL2nBKaqTpw5by8GXV1XH0QizRqutMwesw2f72Ggxqew/TMFqzYgoEBFlLZNBeQ3ok58182b8az7PAQClR3nnsBtqI7xjWWor9yvP5cZXGVa7HLAeXNBnJ/gKMBR8bDN9a/CuqB6Nvr1XvcnqkWszIUA71CoAOQYqx+fIIQPrnC1yToqnXXJwkHuasuMJL4Xa20DAwgjeYQuPQxSlrYVTcQ6VpjNHAQ9xYfgs7RWA0kDRLd3+kCvUnjD4Qt8Jg4AT8rXfiAUdNhp/dO7NgduGedWx9eW9XoeickZWYUuyI4EcfFpyZmQPN0t2JN2+RFpmASQKCJYJYF7NTMhFIQEcCkj0fZAOSW4YG2BiTdcNjkiHvYCgKQOSea7vta+LrdFRwBNGASSbLRxHuDAghVHeU0BaPFwAUlwARyZ4ljI9UEArGdBGDlnZyoI+qsqSVGtKZgCOU6Fs6T3gqEq992CkzY6sZ7t6YHrO/7EiMg01mI8/e7RHe4i90YDkvnYXTUuBReT7Zz8eAkba7bfRhuazeNzluDcbVb6rV8an1exgqeeAuTpCM7OhbdWsiSzfXo9mXbmYD0Apkico95mKxPAp2Vxrtl+Hgg9xjKrzY39qMwahNJyq3yXIQD+DUaWxqkN3XCyfJ7CcJbFm5X8HLxOZ0rICiePpsnNhHTpra09UPw+lJNtPqQJDbQZmwYcq3wA2E7h6Wh/tIzZyDrYhmoIOANgNHttcSH4x+OKI9pFWvVd72VW1RkzE3DNWqEGtSveioRKq8xsy6C7F0h2QdjDF00jHRclA/Sxv2q0tk81pbYKTXg+cqoOWyMFnOppzIsygNLLNUiWO9dwqCNH6GnUin5EEFXxYClUnLplCdOSISjDCOdfUHFB2UvWzthbAyviezn7YyDqAEl3f5CyLzxkqoc7JPXExeOwGuSd2gbAJDjvvQDcv4JY96OarwLQHv3wf/Ow9pGUCv3gfaZkRb26w7CekyOBZkBebsYt87r2Rs0RhO+a/B7hxK+dk3ApIGYYjQKKZj7RMGXww4s0N4rSA56XZdoqxbJs8wXlpQDhcbkFjwHCxhb96AjdukZYZbtzCQ6aPlBJckP0JmxHRZVAfHZJLBYDUjDRlZS/GNnjEUa7xtNRsRqH23dFZ3lLxNgaU7MZK09sYEGz3Q9ZVT5cN0Mp9I68249nfU7Kvrz8gURD3cW/z0c7bGw1IqIvYflBg7ul+0r8fhjhDo/et63XVCV27dy0YWbPf/Y984wffsTfA7gu67lMvZKUMyWRFbF2JSoj2Dtq5/VBnSN7LqxbWAseZFXY1aqs9OLzLmQOIcy/3S+3m3G+/ketttHTbG9YW/mqEWakuQ45qijqR4dcDjWKZcunL3zi+Z7/49mXz9//688+FssCyvAKlUxS1v/T33svnI+E3/ANfc7zAo32oRq4CBKvMptkRBSPb7HzaPhPWOAmoVhlXoALYVoL9GIT2Coi1J0RttKfgWYuk6/2LHLXPQETv2LhUkHEfpHsKkDjphJ3yMo4CvCmcjrkPA8hJ3xJK8EnOqYD/LJ+sVCZqnUOl6IyeEF0Cm4nGZUWvymTKmdYTQMQ6Pz0Y6W2tfmCttuSo5qS7PwZfGwnaLBXiJP+WCS7O4GmPdHuNNO3Bh1vEly8Q5wXL9d6AgrkAgsRc+qyQJ9AwwHkCTwucJ9AQELazKCdxhAsjUpZEBvmS6WioWjc3SMyYr/fgeUGcFszXt0jM4GlBMo6AKjKpKlPgDIS2OZOyzEAYBJhojUqvHtbfSie/QVPbAci9HQ0wXFMXtcDRXqP+s6OMV8ncy3p6KnIDPGrzrJMZ7MfsyKN9EHujAclRMVYz2Z3/bY/+6zpfbV/O0Q9OmYKS8nfD19T9vN96PkvW852VrqVZkbXsyF2ZJTHTk6A4Da7hqwNVNvccZelUNqS4L919ovR8oEo6ChCRtaR03N2jL1andEwTG+BwsUIV0+NTJ+7IoXNoC4LRZkUsl75tCJZyhPz43OwXydZoT4o1W6v7+SwJNnzSpvfPNpCoMDmHq1x/8GT0haq1C1Qczn7MtJHU43u2vlcKX18/ZCl8anq/Kj1Ln015L5+5xHDxAEyLAIao1BuJtp/LfJxVcOqUm1xWVwIFgAieAjwFJDjMLsv25gxJFoWFdw4zyziiSkTkgHkbMBa517k4jJETXubXOTI4Z1bss1BASffa177pGDjH/F1mImnReS/7q9ZTtDYl2u4LbU4yZZodIeyCxybItdkFQuAJdLgG7SVDEr/68+Dba/Cz9xCfvYdlP2H/3nPwvGB6fl0ASdxPAg5MlkKOkeC3I5wnhO0Ivx3hxwHD5RaOCOFyyqAlCH3LGkfJfJhsiAKSZX9AvJVtxmlBMsDVkWzLeQIPM6In+C1noDPU9VPNnsm9EbIUsDwTKgkcWaSeZ+aSwdbvVC5bzr+EgfTanKur0+sDtJkSvXYXRv5X1eq034rSLk9J3vdzZzRzq6roRaq9aebHTMCjvaK90YBEm0gBOY1oHFVy9wMlr2qnHKpT3WTvsy7bfGjN7nNMnzXT6Loqatmi9n45AM2AX6zxRVyRqiQDRpTaBJwHiQpEehDSAxC7fNlyASTZwUhSUEyo9SbHx2//yhOZmVxGT6YotT1cBSJknDznqnKS7toaINHvknNSsJ5SzvK4k6DvZorYRy5FjwPdD/1/VMp4j7ZuGr3XjMhb25CdTypAZJf7fuj9Y42TETA4ovFZsC2fxVxIz0n6N+hnlbIotRrSAyeDEALGDEhcYrjpVtSR4gwXRU5WMiPpfLQ6gw23ptpkvtf0oyorOUdASkjs4IiRPMM5wkAhZzEhNSUJGCFdrLfBwzvGzVydx0MUsDKN2otFtm0lg6eFMfmaTeyv1SmzVFWlejUZKnJZCeo4S7JWL6KO7kDU0D2H7Axvg8eYC9gH7xDAcPNeitfnG6T9jdSL7G/ANy9weP8llv2Ew/svEPcTpuc3WPYz4hSx7JWylZBigvMO5F0GIhPIOwyXGwEkQ0CKUQrZmUFDAA0BfsxANF+7OEvmI7EAnjgtiPsDlv0EnjJI0QyJyco4T4ieQEzgyKDITQbF1rKkDFYFsGqPkurIxyTAYo7ZmS/01dO1jk3gNbRj5prggPYZsb1HdqMv2f1S72MEI/q5rRfcaYJ7JLRoKoEHCMj9EJgjH5cRUQG5H9s24xt0gj4he6MBibVTGY+PWub3FDDpwUgj96oP/QMjCavF7J9BhNI7x70ROYDXr82589UrSa391lOlodjF9FJaWpY1dap6s5mwu27Vu+4WbcwlBfHCt0+QaHPJ1qCTQTXAg1MCQYp0Y5Llnl7u7tjq/e1mjl0mq07qSpXzDviub/nCmbU82pr9oR/5qexsiFOy8YR/7h++Xz+iP/zXviRUG3LYZmflYvB4axuwCYQnY8BAwG4gbPI2Nt5lALAA0VRaOwKp8970gtDv0fZxgKgPqcNTs571OdM+B9JPREC2d5BC6cNepFuXvfS2WGbJiCQWYALUnhLGGqnYM5KyFZDkqZK4RMITBMikfFj6uXfi6IEAn+QnIgyQTM0ASx8gEwWXhpEeY+AyN6jK0poK1ikrNSJ5H5hTKYqXQ5J12SJo69D2UrDn5ilyyM+trZXLARGe5drERRSullmK2A+34H2mSu2nCg7mBfPtgsQJ863cUymmAg6cd/CjFO4n7+DHTNnyhDgv8JCaEwWIJbtyYt9drg2hIdekQOhhPASkyIWi5YcAvxOa1vj0EsPlFuOTC7iLJ6DNDnT5FG57CWwvkYYd0rBFGi+QKGBapDnixAkzCxg5LIx9lH83c8TM0jh0jtJIU5XXIrf1In32Sl/bf7RaQ1LU6KgKUygo7hvklm0YulYvVBGSwxITfN5HVdObY8IYHjMkj/Zq9qkBJB+W3Udd66Egp9fq7wf4+2RS1ihHLR/7Qbv0RpvNFijvfI3qQ64rmjXRHruMvNZIUZ1k6++YnTj6XOtDUqqg5Bw9S+1cl2FegRunlj+SsDbUNemcC1wMACWHgVJVcwGaSPM6/7+4V3cjpAfai2lp7+O5pdJppPXRHm72umnN00NMncin21DoNwpErkaPkLMiIe6BZYG7vZW+DXGB49hmFXyeVtZ6Qaw4+55C7e2gv82ZiGKGl+8mASGq2ARe5D1H8OG21AmoWtKqYhNV1SYrGyu7Vb8v8rEdFUeKSdh8FpHIy7jgCIMfQRlURRIVLpUy3nppigcAh0CgKIIBQI14R+ZC3TostXdJXz/Qv+8BS4ymT5Fe65NR9fV+FXa/9G8ywMO7TBElFcaQ5d2cVbWWA9L+WqhaOUMy3+wxX+8xX98W2tTh+YTldukyJGwyJASeY/nbdtqmDCJoqBQqzXAwDOVLMwFDgMshfkeENA6IQ75fuH6uNSvD5RbOEzZvX8FttqCLp/BvvSvvP/cFpLAFby7BmydIwxaH5BGXhOtZwMXMEpCZY8Kzw4z9wridFZAwrqcKRG6niMhcepConRIh6LMhYyCMvirS6bxm6cfaRNQq5ak0PbCSIeHqe8wxFeGYwyJ0s0Om4c6UsIydmMBraHL/fLzA6ePe3ptoj4DkjJ3iUvYKE6dsjZP50KzIXfZ7/9Fv+lDX97ra//Ll5wAk2n/K7rom5FztqYAWjPTWR4tiSqWmbw2U2CyEfvYQoSgy+95nUr7lC08fsKbX1/7Zf+jrPuld+NTZf/w//N1SnPoQ+8N/7UvlvaokeecKGLkYfGlmN6qMa1pKYbKb901juWbr+V5OzjVUKPQUPW1+x4ssR7HUe5SMRFmnaWIXp9zpOzfXSywSssxI0156WVj5VgC2n4QFHyAPDCOAGS5LxyZi2RcA4AxOguyPS4ykmb0s7argxCXK0fx8eKgZAx2bSJ1350pmgalKglenk+Czw7wJVChcAApQ6d/bv/XfaMIi0aXGoW1rDajJjPQZE/2NvGLVXB9UMRnQxLHK6HIU6pQBirxSdLb2mdp9e0hoBkV+UwGK41qkTp5yZoWOfus8wQ8B4XILPwTQ7hJuewm6fAJ3mVW2whYpbOR12AJ+xDwzlpwRUZrWfuGaDeGUa0gkM6LqWdNKZqS3UxmRooaWxQVsT6nSOJSc+a6qLZ7qkTUgg95M05K7mkHkpbaJHCgJ/Y9zI8jhXD3Woz3aGXujAUnquOq1wdyrre9UUZeafs7pVGS5dWItNetBxe6NY92Cn9UaiM+AedKiaZfTw/K5Rm20M3ulA7Wc3NqYjSoFxFxvdQ4sZat0eqd6DWZIc7eZ2wFcvk5HDQNlPfX9XXQv+fuD3ceP9tmxvtGq0nCWtZRhZ9rx/GLw+JqLEYN3eLIRILL1UjPiHRDmG7h5zrUAOSOR6zRcLhQufSKAklFwHQUqOVotLD/qxt18aTp45w7afLgFMvjgw6001LP9JJZM1bKAZCUjIpkS6S1RXgFpfpezKEmXDwMc+dznIgMRPwpw4qWGOZwDELIzbiL4OXPqkqtghKXPCkAYPIOzVPfF6LFw7RUyLbFRWLKv/fu7GoquyflqrYi+Xwch95u/NHNizSVuw0jkC13KOv1+WJAMz16K2vO86h1ImzzuAvxAGHYBfjuW4nbyIgkctmOuIQlFFli3C2QBEVXeisdN/Mq9rA0QwwC6fAoQgZ68A7e9gNtdIY1XSGEAb99CCiMWGrFfGMvCuJ4lc7BfUqmde7afMXPCy8OCmRNu5oiXe2mAaLMik6kf6s99BR9VaEABZd+nhxyO+kyVGpJc1C6+SVv7CLTzk4iXAJwICQm74ItKomZLVKhkvzDiG5EhoSbD9nFt89HO2xsNSNROUZceQq06BUb6gfhUwXxfw9LWjLijz+67f6f0++XvO3/+qbJMiT6iZx3/XSlMVpXGd9dBL60t6LNND8u6WACL0qHmmNeRUybSPyZHlgpQavdJ/+wnGZeXlUhqWv3toz3aObNqfd45/M5vv1/tSOmREQiXoy+9IwSQOIyQGhE3XbcZCV6AnIVgjgIOLN0JqA4/DEDRDXegpHd1XUpNp+yk29CmeRmEIAORFE2DuyztCpymaqUMPhIgWRHOdSHlNWdTMjBJJBmgpBSfYUQiFJCEKJmgkjlJ2msERZACKY9dUKpTnRfKuONshgSI+XUMHpFbJ/UUIBnNZ2uKWRpVl/XSKvjo3zfreGAm7pz0bW9VXjdleguVbIxStpx38ENejuo/7RXih1DWo7UhTRf3YrmRYr8TZlk3jNLXxAAS/+QdpLABDzukzRWSH5A2V1gShIa1SJ+lQ2REFiGPm1nFPCQzcshKg7dTxM0UETnhdlqazBYg4DKcuCYWVKpyVm1OSQg5u2lBChlA0gMRPc/nlLZSpoVGlxkDjkAuYUhK75JlH2m3j/aq9kYDEn14e8f8vkDkbDfSExGh8nnZqBkkuuzIuajSOaCxuvxntF7knLUN1oTbKp+f/o29JBZ49GCkB5GlsSLEZ5mR4Pl4OZv1P0ft014jtoFiv9+PjNNHu8saBzYl7JfjiO+aXQwe7+wCrsaAp5uAt3Jm5GokUUearisQmbReZJLi5DU6VLZknXpuazGsHQEOoNR+KOgAIIBjmUv2o2w7v6Y5y8QaBSVZf9dHgqjQdyRqPkN7VCSOLZAKEkBwC0rBemJqXjVzUx7ZFSUvTq2aWG9FDY+Qef7ihMfQOqUyz+VrfI/B394TR03wSiY415KtCm1YsGPXK68bT10tAjVzX6lncoREAYmCOPbMoN0lmBnDxRY8LaX2g6cFNO4R95M0gpz46Br6Ua6fzw0Rh8ttkf0dn1zA+dy4MEv+lmaJYWgyYgDqfWlrh7oMmRu38m8YwcMOoIBlcwmELdKwweK3iAm4OWSgsTD2UfyRZ/sZc5QsyM0ccdAC9igZEq0VUSByM8VyfXswqde0Xtu2XkTBhmZBdkO9PueAyJGqojsGJLoXCXXOEnluma8GkuMdPGGOjIEcpvGNdisf7RO0N/rOEbm8V8uEWLPP/n0Kzq3E8H3RQdOcyihXAOcnmVVlrc+wDKpGGys9z9K1UnM/NJr9SqUykeS17yw1ee2aAVIoCgBzvx5yReN/zXTd5FztW0i5MaNJi+i2dAJ5tEc7ZaOhAWhPgPvY1ejxzm7AW5uAXSB8bucRHEC3zwo1y00vpT5jL03kGvBQO/Tlv41Tr3SYXIvhyMvvTJM6ALXoXLMbh70AEI7SOC9nQHi/R2LGshcqlsqyspFf1c7eAI4oZBop96PIwTpPoHnJzu3muL4EEKWmILUGCeK4JgCOfZWY1dqZlDIQy9mdezyylAcyebxdCUgw1bllDFQ6v1sqVu+wrkq/+uqI9oXLwR/3nGgosGx6OHUUYaUf25oElUe3Q1VKqRT8Jz/Ab3ZgZrhxK0Q2jhiZS9d1nhfQGMq1jZNRbsvXTkGl0rPC5U7qOnLndOcJbrMtNCu3yV3bc22QUvMqOPEFrChlz3kvWQ8KgB/BYUDyI9KwA3wAh21Rzbq9FdXA20XqRTQbMjPjxRSxZEAiNSOM929m6S+zn3NWJBbBgnMF7G2WpK0Z0czIJjcrJYfSG6YHJBaEUN5GA0hggI8N3BkKs9wjSuFKWFga9Q4RiN5j9ITD+PpnSDSz9nFv89HO24MAyfd///fjv/wv/0v8zb/5N7Hb7fDrft2vw3/wH/wH+JZv+ZayzH6/x7/yr/wr+BN/4k/gcDjgu77ru/CH/tAfwhe+UKU8v/SlL+G7v/u78ef+3J/D1dUVfs/v+T34/u//foTwMHykDfHuY3cBkePvTn+51vsEOJ4o7LJHykj3VMo69bv/6//xl53e+U+R/d1feCGNxnAcbVR1rSIl+wCcttbvRetFdDA+de5P2hl2ApE0RFurxbTb+ce/6XPnt/Fonwn7gR/9qSY6fde9972/7psftP6rjahpbYLDJuSeEcsk8rlKzZonpGXKgGHl5u5pWkB17PP3rnP0YUGL0qQgICIZylVapBaE9/vSH0Kj50n/cS2MtgXQiRnONNRjiDOgkrAASj+7FGNxWO9tHAGb8UkMwNeMyQnKln5WshT5VWvaVBZYsyZr84nWlqxReVaLmrNjqzQeBRFyeY4HozkyQO5kYAdAkx2x9QreUM+cc7CNApNzAhJypsSNW/jtXhSuIoPnRa5R7tAeuvvNEcGPgzQpzBK8kh3JIHOzFZpVzmhI/cfQgA6p/6k1QQBAm12hFCY/Sh8RPwA+IPkR8KMAlPECCcCtyvjGhH2mZd3OQsHSbEhfJzItjJsp4naKmJZYgIitF5k60YK7zAoMlICaq1k3nzNug5eMnlKzLACpGRLka1bnp6KmZrepIDaDkZQF+FOuMUGmmG3D619D8mivpz0IAfyFv/AX8D3f8z34x/6xfwzLsuDf+Df+DfzW3/pb8ZM/+ZO4vLwEAPzL//K/jD/1p/4U/vP//D/HW2+9he/93u/Fb//tvx1/6S/9JQBAjBH/9D/9T+OLX/wi/vJf/sv42Z/9Wfzz//w/j2EY8O//+//+g3aezygu9c7mqWf8rozIWVpXR8m5j/KHBSI2KmW/k+9x9Nln1YrqlJHtlcwISnbENkMs9Kq1U2c/LKOtSUwz8s3S/liL5Y+tymrO3W9sRkTrUGQXch2Jc42f9xhAeTS12WTMyK2LYtj+HA+1b3gy4mIgXObeIvTyF+DmPejwAthfS2biOivb5UxGobeQLw7gkVwu0IISHNO1gPx0GdUlfU2HvQCRa+3efSON63I3baX3KDWrV2KyHb2dJ6nZyNF1n39DLPUFDIAG5NqRhzlRKZqsipEkBi9wFEQ5zI+5548EPzw5INcgDl6aEg5eHPs5ahE8g70DsQNCW3PW1Ar1WZEcBQdQIuZazCzbo0bmFTgddKuR8Px33gcdv+x6Ri91HmPOygRSBzevzJE492ELRwH05B2kzU6Aw2YLmmf4q+sCRHk6lt4FIIXpmskYt1VgQMUGlJ5lsyJGiEBlpYsUtZfPohdKmWZy4EiUsihgScAUZV7Z52zIIQOS2yXi5UGyIc/2FXwoAHmxXzAtjNtp6eR8q6JW5ISpu3/1PvEkzQyBCkKb653PtzSolOttm1QOXoBBIAEiA1XJ9/OAxBVwCaDQExtzhAQBrQslMICQO7VPnHD7BvQhIaO+9nFu89HO24MAyX/1X/1Xzd9/9I/+UXz+85/Hj/3Yj+Gf+Cf+CTx79gz/2X/2n+GP//E/jt/0m34TAOCP/JE/gl/1q34V/spf+Sv4ju/4Dvw3/81/g5/8yZ/Ef/vf/rf4whe+gG//9m/Hv/fv/Xv4V//VfxX/zr/z72Acx7VNnzSdlK3dp/BuLQrRNwU6/l63eXpb5zI2p8BI/Xxl2c8QIPlfvvy8Ka5bA5FSXFezYzZL8mGaZjNOFcyrCag43rZmRAC5b2KSpoPy9zEV7Nd/87sf4t4/2mfBalH0/R2AH/vpr+LtbcDohXs+epf7eezh4gxMmTZlajcASF1HWKEd2D4e2VYByMo+CpUrg4EYaxaGJQPCOQvC84I4zQJETL3ImsXc0E738l7KNmuRgA8jOpAYlIv5ndN/NROiWQwNavgEkVFlB1ASzn7ejVN1BbbQXLMrStOytQK1IR5yM8ZzhyiBkpI3qgJl8qerkffRiyMbfHZ2SbIjdk+TIwFpQM1GcZReMmHOMrxSE+THTPfL950FvVrjUcBwBhwIg4ATQICJKUpPzgngyPUs0J43YSzABDkrkoK8X0CIUXo6CSDRfhvAIVbVrBdTBSKHKL1EXuylJuTZzVRqQyaTDSmAJAfRkpk7nKbUVFX6mf608QABAABJREFUgXRwzVDpP4eqnKXNcAmWngVz32TA4lC+b4B2c3uIgp53hEQOLgHeCbtgwMPGo0d7NGsfqIbk2bNnAIDPfU5oJj/2Yz+GeZ7xm3/zby7L/Mpf+SvxTd/0TfiRH/kRfMd3fAd+5Ed+BL/6V//qhsL1Xd/1Xfju7/5u/MRP/AR+za/5NQ/ah3Mg4lRtyLkH5j7UrlOOcv/7tcJ1BSOVn9uCj3ONr/Q4Pq11BdLrw+XGh22zQ4YWeAKTKpbEVJRLYkqrUqdH126lhqRsQ/gWiDHXdazs40NUZkr6u9Az6r3XK3492qNZuxj8KvhVyo/amqjHOXtr43E5SCT7Iji4/XO45QCargFekHJ2JB32nWxudfKg1JgwSn8OG3lGzn5o48BsrlOgwkDSV8TWlYxboY0BoO0WzktmxPaISCyAwxawW9PlXJaAVaqPysBqQXTYjhJ1N9KuDaWH/LE8sO1fYk3ljwGAcpf4WSLyYdhiGyiP45RVGgneJcy5kGzOKn1SFO2KYuPecZkj5k5ly1o/n2mT1yJlvhZ86/pOlIRGcUhrobPQztrCZ3VaB+9K7cGQ1+PTIuBymVD6ymgDye2lCCWEQbqbcwSpSEE0vUp6MQQFH5aC5T0SeUMLy9sAwPo5UQEkoFCWV/CRKEhAi0UURXqGxHzOU8mQlIL0aSlNDV9MC+aY8NWbCYeF8XI/4+V+ye+XAj6myJLJX+Q1pRaIyOkRIOcDgcmVe0Wv+ZofUK6zk6yVyPm2mSvNjAh1TzMw+drpNi0IYbl2Li7l3l4DJHpugzYKBZXxavyU+ieP9tHbKwMSZsa/9C/9S/j1v/7X49u+7dsAAF/+8pcxjiPefvvtZtkvfOEL+PKXv1yWsWBEv9fv1uxwOOBwOJS/nz9/fnbf1oDIevO7D+/Bsdu04KQphj4BRiwQ0X+n9OTDpxiQpFwHQgDYBIv0FHISUDLHVHTPZ07SoMlkSdakC88Vs9tTzWVbqXz/Qe4T6sDIIwB5tPvY4B1oBRFvg8+9K8S0cPa+thsIm5Cdx8NL0HSda0YOoliVwUhapjZKDVS6VhgK9z6FbXEC0wrtyeVnMnXRVscLkmNxKsMgbPQMDoAaSadhQtiOYE9tzci0FGBStmW6cWtvC+d97VExSI8KRwTabkvU3Y1bcXxtxF2j8r4FJ7JTff2IOVbOPUo4ZwTihBC2cOQwAoiukkG9A1Ly2Ab9u4IHzboKdU/oXJxSS+qHCWwYKs6arQfaKhjxVOVfCZLxoPy31quIA9sVQbt8DpJkuFxiaRiZWDJuqkZGHkgO2rvFUSj9ZWxTWCtIoLUdDeDQTAcy6FCgqo007fIKSny+P3M2RBsVcgKWWepAYp5LkqFpLQzslwpE5DViv0TczIzn+xmHhfGVl4dC03qZqVr7OSJxwpJfmVOhFzZiK+qfeKpzVCBEWlfbaq5pzkhZ4DmQ1vBU+pdzQMiKakF/A7mu5HKwgDlfs6U2IQXOApLETsQfPGPwIygXuw9vgn/yCfQheSVu7WfMXhmQfM/3fA/+xt/4G/iLf/Evfpj7s2rf//3fj9//+3//2WX6wfgUEPmoHULr3NrakBLtMPSsHoxoYZt25e3VTdbev6n23/3tX1ht0pQgVIVSfwERrlJFtdLdlmvdyF1Kazpgl7/PZLxyXXux+94vNgqp+67UiL606Dt+2WPh+mfN/sCf/V+Pntt/7f/0K04uv/EEXpm/tCZA78tI8gz84I//DN7ZDXgy+kZxR4eQrXe4GISiNZKDh+l8DkOpCgMQo2QaOmlUp9z/MBS+vY06q+NnrelqDi9Oey7+dvn70hNks5UI+TKAmJHmCQMAP+wRc9GzKjD1Re1qzvuqxqQZku1YAEoBIit1CChqTFYmttbN2CJ9530BFuvd2yc4phxlYQQi+LBFJAdPCYEk6OScRMwdAqbA2CwinTpzAs0RG59wiC5H7620ea7ruGcNkaWM9lYi564CkdFX53VUx5UXkX9eluqw8nEkvcmGaS2ILm/Bhd575eJlcKGgAgaUUAtKmn8wYFizIh2QkTlC5tSUUMCHfVWQMuf5+JAL1Wdm3MyMOUv37hfG9bTkOpFY6kVe7hfcThFzZCxTREqpvAo9S0+LAWDlEDg7+K4AmD5A2WdJTjWyV2lfpQkKTatS6vRaezCQBDQXQKJjggUiJwCJy+fWxQWgAO8oU7/W9+vRHu0ueyVA8r3f+7344R/+Yfz3//1/j2/4hm8on3/xi1/ENE14//33myzJz/3cz+GLX/xiWeav/tW/2qzv537u58p3a/av/+v/Or7v+76v/P38+XN84zd+YxlgLRhZAyLnnNHe1hITvZN6H2trQ44zIzPzKhCpg1AvA5gVVAIdDUxvou1zEWdMjCE54aCyKoFk6hY5GcxT7bFwMzMOS5ZLzDSt+4CR+4KMc7oEa9SH/neaBbESwms1I4/22TLbFfs+pkXJpz7X+8v23vncdsDTrW8i3VpoPGQgEsghxL3QaZZ9iWYDkN4Nhqbl1JlUB33cAoNEuFPYVi6+cvP7YvYMPuTAs+PKeXvRAywdzZ0j2XYYkcIo2ZkwIC0z3O4SdNjDLxOGt0TtK82VytXXkmjUUzMi2k+iULOGeiy2SF9+u1Kkr9+F2u3bed/WxCQWRx0AODtrcc7O8A2cz5Q2P4IoIPgBu3ze9lEc4tuFMTNhigmHJWDhhGeHBXOmp97MEZwkYm/nEqCOL0rn62laqh6or1oboo6rdwJApIdFfg0OLk5wy9Q2xoxCw3I9xSqfGwBC4wNaKp/NWgCA3xwDDK3vMJSgIwAC7YmB8h7275RKVp05IQG1qaSyEiDzBgMlO1LOLcvcpOd9HxlLTHgxLeCU8HwvQOR2WvDsRjIkz25mKWA/LFjmiLhwzoxITVPKtSJ9rSGRzHEAEAZfjiaFBOKEKX+n/sEmz/36bzjKlsmrQ6bcofpGCi4HcggOcg2XqclkNfexpWz1ZqhwLrFcZ5LrFygU0YTX2TRo8XFv89HO24MASUoJv+/3/T78yT/5J/Hn//yfxzd/8zc33//aX/trMQwD/uyf/bP4Hb/jdwAA/tbf+lv40pe+hO/8zu8EAHznd34n/sAf+AP4+Z//eXz+858HAPyZP/Nn8PTpU3zrt37r6nY3mw02m83qd3bS/qBg5JzdBUrWMiOr33XP93EDLAEjx6CDCygBgP/Hn/vbGAPh+37DL3/IYbxWZqN2es4ShNMgxet5ckHSQGPOilS9fDsxA+vUvLgifACcBxhrheprqfbeHilZr4f9R3/h72AMhP/br/8HPvZt/8CP/hTm2Cro9OpId9ng2+yaiixYaobca4xtBjubQELDAApVQyPdQwYj3gGIS6FSFXOEBG4lcLXGo0ilKvDI4INqobB1GNWSk/9cSrJMYomoAqK8BMAl6XDuEgOB4XhA7ZpeMzRpGZByPxPpMRHhMzhpDkNrDmytgaVkjaYgeg2A5N/K6wow6cFIOVgWIJIdu2SiyEhJZG/zMuBRHMIwYhe2WbI8AwkkpMTwBFwwYaY6Ps6cMKRM3co4oJ//1ua+c+McUCPpmikJ5DIA2ef6olvZ3/kGmA5Iywye9vUe0XN2QlLX5Xsm75UBJ6EFIhSQfGgASDTAQ2N0SZkHeuobym0qgEU/t1n0lLMkKhO/ZNAyRy59fOYoGfiZBbjMnOnBMTWF6TaAOBngcXxrtJ+V/edcn5OzIiUAx0lKqzghOsOuKL6BK8d6VFtirr+qUzpk6jBMnYgqwmnQgJdy75ZsyYngWQLn4OEi9zkvAA+AY8Elq796tEe72x4ESL7ne74Hf/yP/3H80A/9EJ48eVJqPt566y3sdju89dZb+Bf+hX8B3/d934fPfe5zePr0KX7f7/t9+M7v/E58x3d8BwDgt/7W34pv/dZvxe/+3b8b/+F/+B/iy1/+Mv6tf+vfwvd8z/ecBB132dp4u1ZHcJfdlXjQ79fqRdTuyowoTctK//WZEVvE1hawqjTkm29zZHhHRu5S6kaG7AvEJCluTijZkH3MdC0tEizXo73IFRAKZ4XIrV47tbVMBpvlevnLmGrhe7Hs7K32N7kDMD3ah29rhaAfl2mx6dhF4FRV6T5S3p/bDi3lMwMSpThWpSOPdy+GHNWW+hDvgG3I9Akb+Zw6br8pOE5+lGW3oVJu9DsKpWA4kUfp0aA9G2zxcP4NgErJUcDBnJ2h7AT5Rb7jQYrVeQsMso+0u5JllgVpEUpJygXvWOYandc+JtxRtywFaxiPe1IosNB9dXeMq7ZgvzdLcVFHzmQPEsdaNN7J0KbhQq7l5kqUnrYXmDBg4YSXg5cO4JHxcmprGbR2Tu4PlCJ2oJV1PW72ClPQLkB19A5bL3S+C5/g4gHu9hlo/wJu2YOf/SLSYQ9++X7TsLLPjqjULu0uAfKgiydC89tewO2uUKR1swywfa91HSqvHpPcN/UZaIHHKdNWAFqPCLQ06QSZh7VGU+eWWouYciPDLJiSwcl1luy9yd3VG8WsO8YZBRxHt00S6XrdUZcckCsnF8j1nDhhyo0QPbmSLYmcEHOXdM7vY6rKaFrs7kvWLNefppwZ4QVuOdRxQIMUhoJ3JERhnxUjIFD+zr8d35QMyWNjxNfOHgRI/tP/9D8FAPzG3/gbm8//yB/5I/i9v/f3AgD+k//kPwER4Xf8jt/RNEZU897jh3/4h/Hd3/3d+M7v/E5cXl7i9/ye34N/99/9dz/YkWSzhcNHtQK0LtEK3A1G7lp2rfu6LqdgxC6nZrmhFoxoQVvkhP/nb/s/3H/nPkL7Y3/9pxuVKD2/w5nUZ19orkZOHf1c8Nddq5SAJVn1E5kwVoS07jTmBH9ikFxTvtL3ffalv3Y9+Igmi/Ybf/nXPHxHX3P7gR/9KeyX9h5VfvO//Vu+5e4VfAz2H/2Fv4Np4U+8zsoT4OGajKg2quPkju4ltb/yU1/B4KWXgAYvmB0o32ul43YuRg5UaTbbINHtUWlZ0ToXqUZGrbOh1vH69TPZ8RzB1p4NNqrtQ6XVaHG7+b1ehcQLQJDO53GRzIl+5wigDFaiy93Oc3bCL0K5yvvsUipgJLGVCu4K+23mQztwZxAAR2Cb0VkDI2tUlZXvmvOoACpL2CK/rmZxsgPvtnuQ9+A4I4UNUpyx2Vxi9CN4cFh0YByBJQe0ZpLgzJooh60vsmDklHqk1hgEknvJzbdwcQZNt3DzDdLNS/Cz95CmPeKz98A3N4jzguV6nw9Zjp+GUEQD0rSXcy5hfoAoZ0vac56C0P+mTJtSwZIEqeMAMojoMiLNMXR/91kTTsh1iRkHI88pzOBMnS7zS6ZVaWakiqakBnys/Vszua0dIiT7seY4pJTgXAUsyQlIcS5nSvL+Tgs3dO2z223iZFWZjxxKUMBxFCCSWGiaPWVLX4EjQOI0wwWISoMjgBZ5nuNyolLp0R7tbnswZesu2263+IN/8A/iD/7BP3hymV/6S38p/vSf/tMP2fQr2V00K7vch2l9AXvdTjuY9E2v9P3a6+titu/LKSqATopy/K70C1GT7sSSJeFMOwEk8rtfYi3YzRGkfc6IaNRK1yWbP70Pd0UMe9Wr/ngI2rBMI9QKUlD2rzRDjPm4yL0RHNpXMdtF+nW2JYOkT+rZ0boOKSKtn7eBkhP3rVm+AGUPDN3ygUReU4uO9XX0mft/uG4dDEPFAFDpGH3xahcFVdBRKDa5oRyyGlbJlMBkErtspANANpqa6VsFMLEUu6dM20pA4xAdRWoBw9/htlalPw4I4GFbHJ33I50CJOfkiu055JgzPEvZD9Zszv66qpZNtfbFZnJcGEtPDXrytnQvv3wK3jxBGjZ4snsHadxiEzxuZ4mWDzTmBn1clAb7DC6wPjZbpT/pSeIyRStnSggCSJY93OE5+PlXwC/eR/zqLyBNe+x/4SuYr/eI+wPmDEjKurcj/DjAb0ds3l5EzSwMQhPabOuClDuiDxskP2JmqZ+RwAaKhLvNZgBtZsoe2V0JZwUhgAYJW3GAes+2zXXXrGlKGAhjzlyUOtBRsiFx4RL8ZGK4xQAOljnN+lIpz5OJE5gSHDskSiWDcg4EtfNqe1608aHL58xlFa2mcD1a+haXoEVRx4srCn4hVHoiUDOkWg+2HI5/82iPdg/7QH1IXldbo80AJ+oCVihXp2xtnecyI/bzsvwZ9OOJELmN7vrXMM3HKRkOe7UiLKCR3OSKYpi1kkJP0t08JgcEgJLDDVrePOdoljanOizcgMzmvT1vBoQAaLoTr1EZTh6rOuHMJfqm+79WUL8JhNfeY/8Apl3DvUtFmnJaucafpOn+hE8KkDDne8blXgC50NqZQMMd6+CUxGnM7pdDje4CmhnRbtxyj46O4ZaDFB8fXorjoY4GcEy9MJbsZ7YAOTuQhZ5lG8pBlYuQ31e6DNBFaoHcy0LqKohQ1gFVqTqVlbhPJsPSwuznp9alcrEr63QGfCQDPuSzmmVCLgrWwnxAuo2nZQZfP5du97fXSDcvwNOM+WYvDR9zQb4W3tMQMD69hBu3oCdvg56+C9pdwr/9BaSwweXubex2b2FJwEuf6asLVenafE/oqzrd+RCaMWrwtamh0HmEYrPRBpmHl3DLAfz8K4jvfRl8/Rz7L38ZcT/h5ue/ivl6j+V6j+m6zfqMlyP8dpQMSWSE7Yid6VmTYgT8UAUQwha3iwSZBJCI3K4GEzTwZAGXWp/VtlPk2Tna3Ke63jUQ0vsJqk4l7x3GIE/vxehxWBi70WMMJIXtLiIGwjLJ/bLMEc6xgI4JYErAwsf1MErb4lSyJHFh+CC0ZgU+Ot5aZkWTIaMcjIOCkkrZQlxyvcdSwEgZIxSIZLGC1GcgjUmdl6/1WiVDmtXz5tcfkIg0+F2j8Ie9zfvLs39W7VMHSNY6t59etv37LgWkhyokrXVgl7+P11MHvI/3ITllf+yv/3Tztzqi58xSBbQD8UAO1AC09d/puplTk5qPKxOHzXyd6y+y1pDQFgufk4QuWRiSbZNOfCz1JWvZr9/9j3zj2fPzJtp/8T//77jNUpdCYTieoDy5QpUC0PTQsRQDa3/g//yrPsK9rqbiD9vgRXlqpejXglU1fXZrJPX0s7/WU6hgUpbO3GIOHFMBJj/44z+Dq9Fj8CLTu/XUOFdK23Tl1xVkafGx1JUYh2OZgDiVKKg6GgBOAxJLrVLalpFatf9KLwigSKlaZSN7TqzMLDtdQD7Td+RQi79X9jGV9bXv1ajQSPLfAbXLtH3tj9tmUMz5LX8nFhqZlfNNDDhXnDgZHLp1saltYQaWGTzNWPYT4n5CnJbSeV4AyQDnCTwv8NtbDFp3Mu0Rxi3cZgd2JM3twoiLYSvZP5dMQ9i8i8kVipLOOQsfi344VCdbe1WA5d5xcYaLEzg3xkzTHnE/YbnN+7+fEOcFcWqdqzjmvjBjqMpnpY8LZcoclToSFSaJrEqJsq9TLi7XIFQPGCRD63LQMW87mrGbTj+nvXknl8/nui4i+YAyvcpm0HV+3mQBCQ0c9spXSqvikEqhuiOHtMI17rMkWoTOSbIkcj+1rIqFE8Y7jkuUKs18KCtql1krWk9VRrupyeqaVZaaKO/bgACjAp5He7RXsE8FIOF+pmrm27vByVpH9fuadbQsled4H42zkk3Tv+cByidH/1mrFwEqCLCZKHIofUV0GeZjaULL1VXTvgkAStGm3Y5mWXRy6ms9BmozHnYf+oL1Cprq76mbdKKZxH2UTMDMCZwimuYOnwH76q1IW06RS/MvTw5vXwzi5LsE+HwvmPN3CogsH3FGZQyEi7GC+sgJt1PE7bQenVL6RZFLXQEltSi2j5zW49Vj0s9up4iFEzaBcLUJIOdwyJP34B0u8uSu1MVDjmZSctIUtAdPhNI92ztgFxwuBylcp+mmFKmKVOsMt+zFWZinWgzeg0nTlVylWh3QFKfbYvYURsCPJSuSNFigmRGzanWMiyVZd0QNGK3HN1wuSOayTgV7CoJSqsXL5VBQ10lOwY4rQOd4GK0F4WUdTvdA//bwTq5TGMbaRC5OlW5GXCR+ZYVcqFp8e4348gX2X3mOOC3Yv/cMPC+Yr/fgWX6vmZJwsYUfAzZvX2H77i8K9enZe3C7S/h3Po/hrXeRwga0fSIiA8MOaZSmlBFU6i6U+qS0JwUtck5robPLIgiBpEcN3V4Dyx40XYNvr8Ev30d89h6m959j/9UXiPsJh/dfYno5YdkvmG+r00meQN6BPJXmf3L7ZDWzzQ487KRGZtghjRe4nRm3s2ZIxNG+mUXOXes6gKqQZa+fBpVslr4EFdJKgKpNiknmMtNvhyKo5jOFWO6mSCK9K/dIffXkMC6MTSCMIWYQspQaj9tc/A7IuJDKvyzxGzWL2EoBE7nyvPhAkiWJ0puEF0Y0GRL7b00wRQvaNfDhnANSXAchwHE9mRpzrdVC60klAJinLNDAcHESBb3EcNPt+vpeI9PmqR/3Nh/tvH0qAMmaaabkVFH1B7E1AHHKR9XJtLe1uhH73drnH4b90E/8bHnPyaazU/NZLw6wlolQK/UZrtW/X6NFUXLgFOG9K6l4pXgJvUAmUUk7VxECW7uh+6mDrqxjnY61VrDegxEbhQYkCr2UIF/Cd3zTu2fO6KfbVL7WylwC9d5sskwr461d/lwjwA/Lepnf3/9n/lazH6dqtQAUYGLNPuvHjclSUb45B7x2YxvltfUgVpJaRR7u6pNTuOzqINsi1WgoGYlNtD5iTYkqEQMUQZ2cbXmvdSRGhjWhpaquZS7W/tZTG1PKWYzj42syIkkDOfK+B4bc/bLSwuq5AipIucsqEFFwI2Ib5CQI4VwOuFDIBbwBpTN7fxyl8J5LVmTZT+V1uVVAIscyRoYfMijM3eWHy/cFXKqk7rgFQep2UmKkOMu+DBskRwh+RCQn9x5DGmsuQg9UAOmc9qipjfJcjmq7Zap1L4d97fcya1YnIs4Mjgk8y71EgwdHLsch+68IQDrci7JZyIpao4iVpNqMULMiFozMZtyX1zqGi0KjwwyUMUeBCFDn/VPPkapKt9+nXM+IJhMuTSrrsppR0kCGBCPqe6Vu6fyuGRJrpyTlde7UWpK7frNmjezvqYU6AKJA4k6luXOWWIIZbJpmPtqjPdDeaEByLK/avbnn89UoKXUP/n1pXce1Iu3nBRg19SHH79uI/f32/yFmaVXMCZQjZ5Vickx5s80F+/4IFgSs1pV0x8Asg/zaPul6++9FjrFdj4IkBTMDUVUgMrzi+wLRSiaxTlFao9B+pux6Wsoz8fbFgF/y1rbJmFVQ22YWeiD+SWX6Lkbf0B1sIEBfbZ2TLVwFqnRvnwHSVzteLJzwYr80annqqHhyeLINGD2ZOpz6D8Bq4zoH5MJUcSCDl+dsUBBtC1AdSRO+lKCF427I3zHBlfr2494RCjpKXxEj7VvqRiiUPg4WLAAVPJwycRLXv7O/03dl/Mz0I0sL0+BESm3Bsp4zNT13sv2TuwYNR/RARrNmzuWaOZJ6t5ECfAhFUah0qj8z1hQqTGSkmBBnaeoYp5wJ4gRHc6ZDLU0txubt9+GfvSdF4k/elmL43SVo3IqzP+Ymg2GLQAHjsMEubCWrtb0AKBTFQgVsDoDn2viQbp/BLXvEr/480v4GfPMc04sbyeZMsk+AFG2TdxguhTgUtgHkHbbvXGB8coHx6QUuPv82/MUF/LtfhH/rXbin7yLu3kIKGxySx36Rzue3i4zrN7NIGktXdKWhrTjtTp12BwGgrgQerUN/ypp5RjMjeTOcqZWcUsmKkwbGOJWMis43oyd4iqauIzZjjN4/JUOSUm2SaLIjSsmyoMVR9QEU0JAZl+y/gUjGhCxSIFlBBeEnzocqZJGH9Ani2uRwQAEXcmK65pdA05enBC601gqQ5pmP9mivYG80IDllR8AEuBc46elXp6hcfU3I8fbNsueK5LusQ+NQu3aZD8tsTQU7QBpsGYdy5TzZAnEFHXY9CgB6ysv6DuDIsSspcZhajWzqfMzMR6pYABowoj0aBrOCU9u6r33WO6tbetU2EL7uSnoFaQfjmRmIaO7XyAnwwCEypuiKQ/5JWA/6ezBiJbYPC4ts7gqFq9Q4pVSekR6QRE54uZ8b+ta0cImYbrSWJd/HwbuTjrLe5xrdl4xh1+CwV8gCTDF6qOo5wyjULZJMievr1IpDbZS1jLSvSv3G7NQy2gL2NcpWb+1Y2n7X1J6g0ov0d/q3yrdy3g+Vbj1X/AyccczMedbxC0At+qZUGk0mT3AMjF6uQyKHwQdRFrK1Nqbze29aW8ExIU4RKSYs+1mOc4pw3oHniDgx/EhIzKAhYHpxg/HJS9AYMD5577j7/GabJYQvpefK9kL6uFAAj5eADyA/iihBNpeSNDqMC2i6Bm5fgPdC1RKq2cuiqBXnpcr7egfAw3mJ/o9XA/wQMD65wOadJ9i8fYXw9uekKP+td+GevAPePAFvnyD5EbcH6X6+X1ILRGIqY0ovt16vocupcQConec1q7gm794Hpo6CbU5AjfcCTpgdAM4ZFwZA8C5hDxZlau9y3RIjppoVASp1M57IiFT6VgtG7HL21rF1IJqZCRmIbLLKl87L3slc6PP9qhRGQjlFsg+GxlhqoEgyfQkokr7qGbrkpUjHmBaDHzUILVnZ17+GhIhAH7Ng0Me9vTfR3mhAcpePY7mnfe2GtXN1ILaHSF3mvJN61K9iJd26lhGxHFBLifnBH/+ZMqCugaRSsJ0Hqr5Dr1WtskbOYciSouf6e9iJvc+Q9Ot7FcdT5X1Lmp3MOc7b20HqTC4Gj5s54mWuCYgJoJQaGowVNjinaqZyxJzac6tZke/4ZZ978LF82uzzTwSAcEq4GHxD8fud3/71R8v/sb/+08XxH4gw+oTv/Q3f/LHus7XL0TfZGu2evkaz6rMj9lnT+i/7r+nUvNTGaRaAVXqHz0CnpRqqKd2xPMOacSQUKpJ20i6/tWpY2dFISp3Iijnwo9AohizTy3VSbOpHcvPD0vhQ+4zYvhErlKxzlK01Wxs6bT1I/z1BqDOE4/ELqGBkNo7sQ3oVrY1tA3EJcLDXa8Wll8OcfxMowAUgLSMcZfpUGAFmkbrlCL/dY7jcwnnC+FSKwhWYKAixZnnmy34CRZZi8RjhvMd8vW/UuZwn+PxKu8vSmNBts2rX5dOaRfFtObRbDkgxgm+eI12/kLqXF+8jTfuSGbE1IX70ZX/JS2O58ekFaAjYvvsWNm9fYXyagcj2ooCRtH2KhcYCOqaYTOPBlAUz0ICRU3Tmc3YqQ3IKjJTfudqslEjpX5nCRcAMwOebkMyrzlWB3BEIscEPIgeONQviMmgA1cyOfucDwXsCBQcfCOQJ4+CLzLAqeo2BciCOpIZSsyNogQyQn8tcD1YAh+wIQCYcmbiqzuWMSQIAf0zxWrVzvXse7dHuYW80INEJogcRrR0PQnxixloDIZZydc65bdZzZrkmYltqGEymwaEMMkA7yGraWNPZfY2HRl7b37RAp2y3AyunZBStVGKvQmWBmTaRlwFR13l8/Eq10HMRWcDIjXKSyWHI9I45MgZyeGsTMHiHr70YcDUSvrqP+DtfuTWditsutWvWU+esHKYWG2uHXntePuv2rV97dcTtjkm6R6/ZQCR1EV4m+Sv3yarGvbMdmvFB6Vkz89EzrmZrFSJaqc1pYdxmGtvtFAsosR2UlV+ODCx2YyhKXxsvToQ+r2reSbdsaYro4CANDws1C/KMBuX9O5dpFwEYcp8HXqSo2g9wOeqJmVCUb8i3ClQF0GSaltKz7N/agwQZgAA1Y6F0F0PbAnBUcG7tXCzn1LCpoMQlpdXk5bl22r6ZYxEIEAEKLfptr2lvJfub6S/kUII6ktESx+9y9PCUwEkpfIAfAkIIQJik8SMA2l2CiUAXTwTcAdgC4KyqFacF4XKL8PxG6jL2EzhTt1IGHwAylUt6fcwvbs52edYC3bAd4YjgdyOGi13OqFzAjwF+u5GsClA62GOR7IwoaR0QpwXLzR5xWhD3BywGPCkAAuS4aQyZTvYEfjti/Nzb8G+9C7p6G/4L3yjiB1dfC94+weRGvDhETDHh5SQCGS+miJeHBTEl3M65W7qh5up8o6p0yGC8JEjusH4e64VLjpaHK0XsAB9lSphELZIpNzalBPYOiG0gwwY0AANC8lzPBHjpmloyIgpIHDmEQQCJD4RhE+AD4WobsBs9nmwDrrYDLkaPy9FjIMImULlPgwYwHAptiyE1RCWDp1m9RPUpXVPiw5mMp+UxW1lszZB8kFqUj8kei9pfT3ujAUlvtRlftfsUtd+lsnWfjMfqeh8Y5TmKmnZg5JytfW8pVf16++/WVKfki5xx6A+Z+3Wqo9Qei+sAIbl01E13bZ97qceBqHShHr3DxeBxOHNKTjmZa8vZ5lgF5NxFPfsE7L/+Wz8HwGTEOrUZC6hPRYz/qV/5hQdt89d/86sX9FOOIH6c9if/xs829S2Uo5Eq3cy+Zm+0LNp2VLf9hPRVueHnMiRrxfJKqxiz0zB4h23wCN5lid/6HEp/CNuwTu57lfm1lDiCfb46UxoGciRTMydWWrcsW7usax1E6abdyfyq9bUiyX5+woVZy6zc15xzRR5Vgk8O7sR2VMFPHVuVqD4lLCLrzE4ci5Op12/wBCy5nwwYCxMAl3vvyD5wApITmouAuAWJfKZR7SRTskxwyyWI9ggXE2hYKnVrWjCTULP8sCBm5S2ViNXXGCMyt7YE05IpJKccPdEMRtiOmLY38EPAcn0L5z2Gy23JqCiwKPuhheuRcwYnFjCSsgoYgAKK/G5E2G4EiDy9gN9uQBdP5d/lU6SwrYpafsQ016zIwpIRmSOX66XjVE+TXguCfZjiNOdMMyW6TSsJzDHfJ7GyAgQMEPrMqDP/KLhGzleX1ZoRR66AEcqvo69jiG3GWMYMqtRS547nWzWlY+k4kMCGuhVKv53VLMfRuJFBiKGMJgAu5eDHGwBIHu31tDcakDilCJX6gHVQApyvBVirD1nLjKyp7Jyy+2ZJ2v1A40yV6BDQOHYhT0DKTR08leVs5qN25a0pa40caZ2FOjwWSPSDmnK3bRGpHmMZVIFGmrSsDzWFrB1pY4mwpuK4KSCaY56cOCutUM3GaCTz6ejxKz63xRQTfuFmxsuD0rcSwO7IMVq79lZ6WNd/M/NqI8fXyRpZ2pzu12uqvGFmhznGB9NXPgzTWh+14ePm6TqriKN0xNQCTK81YlQj6SQZsphSqfs4BiFcvrPPt1IpAjnsRg9PrkQyd6PH53YDBiK8swu4GgO2GaDIPmYH2AD6rRdq18bTkWx2OQptbNZ1YAekPqA4BT5UZ0TlfHVZpWVlihZyw8NC28pF7UuqNXVK0eqBSW+vAkLsuNEfdUpalK1jkMNMMt5FdXZj7WA+MxcJVgscrdmItr12mhXZBMLMUtAek1wXiTczhuTgF8ZCDhebq1JzwwDcGEHDCFxNoOnd0rXdv/sCWGbsbq+R9telWSJPKgUsRe3L9b6AA55FYnfZC5CZXs5IzJhvF/DMIg3bPeR+9OIEe4dhFyRrMnr4kTLFi8pyAOCyZC8goEOjxzQEcYy3I5wnDBc7hN2IcLEtjRz9O58Xatg7Xwt3+TZ42IEv30UKI24x4HYfcciZkYUTvno7Yx9lnJWsVqod57ml2wIAk/lb/eecjadUxzwAqzUk5bt0LNbyEPMOADl4dlJc7zSjVmtIdBywTQw9Odx6yRovU5Take7hcBnwOHINCLnaSmb17YsBY5AMyVu7AZtAmTVQacyDrwwL5+RZSnnOjkig5OAdwfmx9hqyjUR7s41DTzQkdabTu+OYFbYW8PCmNEZ8zJC8bvZGA5JXtVMT6VEDw5PLrQCee4CTc5kSNtGf0gPjxADquwF6bVmrflUj6gpQqFCrFICUiKtZdc1kCERJrgITQMBMD2TKgAhNU5udyil3B2lqpXKe4hT4TN/Kk1MGJV0iBg7AxjuM3hcKwM3Mpk6mvQ6twID53ETm9Ls5MvbL6yFZ+AM/+lNNsf7TbThSH7PWXP+V5mAPzYzcx37wx39GFJ+8dBM+xOyoGwriR2k/+OM/U67fbvCN4EJjDKgbq1mlmJIUqWrxqnl+j7MfjD47oqYO7YUBItqn5XIUitbT7Dy8sxuwCwJctsHl58Q1vTN8zgB6p53Xq2JN4W47ylKtXJyBxvpmgD5IUXJyBayU+hAfauf13I09+VpTwskUsnObTUxoqVtl8/Y5S/bzY0dM7Sg73AVF2GWFqLwO547HPK0JmzNwvJliBpCxodTpdQOq1LMn6XweyAHbgGgyxtE7DHlcGKIsa9e1JIcQcn2G7YTtRykyD4M0k9vskOYJ7uIJ0v4aNM+gm+fAMiNc3yLuJ8zXeziikrVwkbIaV0ScIqbrCTwLMImzfKbZUDkHKNk1IAuleAINEnV33sGP1HzmRwINHn70CNsAP3oMO3ENyI+lTmW43EpW5O2noKu34TYZiIxb4OpzUrg+XIA3V5hZevEcYsJ+SbhdauH6IUuIl0xWtONw22vKZyUtTg5r5E8FIffNntwlsFEEVk756S4zDErwsDaX3ATCtBDGILS/hVMBKNPCmHKw1Er46lilan62RuQiv7/aCgi52oYCPiTjmimFObO6Bui1Qalu0jsInTOxZEV6H9nWjCE/3yvnoYxZPgObeS/1aJGAj7kD+qN9euxTCUjWsiQPVVey69GIx9pA1k9wp4DJfWlbnFAG3kZOlY9lDW0NCVCpWPqd/m2zIiol6qldR49xnHMYTLZG1G5y7iR/npCOwIwFIupwWXMuFbrXULIfx3VAmtkh5yRTsjBeTtxkkQDgYiBswojDkvD8kLX9zeRczqt1OLt7QYsq7aV7CK3ko7A5JpBL2DiZaCw4q30rEsDAHox9RM4qZccr1QzKR2U3c8wTJEpdBFCLUz9qhTIr6iDnxwFIiDE1Y0CJ7nc1BZod0XqY/RIbwFHpF35VolsVb2wn+KsxwDuHq7FGLq/GgEBSGzJ6oR3uBgJBwHVDc9Du6pGl0eGybw/adA7vgUjyA7CWkVLvKmdLbMPDQtdSaV9tfJhQatX0GY1cC9itLG9fS1L2x1Jhj/eqgAv5/rTi2BoVrAU91cljk+Hqa31kv4/Ha7mGeZwnBz9FYASwyFgzpFrTpwAcSJg4wSfAO8Y2BPgxiAJXYjg/Ajli7MYrIDHocpLrG2fw4Va6uF8/R1pmUC4m3+6vSwNF5wnLtVz/Ss+iJiPCLPsBABMfAxNp1tFmDccM3DejBw2EYRcQtgFhV92BYSc1I347YnxyibAdsX33KWh3CXryNujpuwJI3v48kh/Bu7eQxkukYYvbJRetZzBiC9iVqlUa5BZaaWquj9aMRK5z1dF9kY8x5jkhplQK0mUBHTA7XyDWbMmaIqZVblvr/+GdZEkGLxnWTaKmRuWwMDwRosnS2SDH0fpMlq5SsyS7qo1Vh0xT1sDLk7F+NuaMqja8tHNuQqUlC4FAfYRWZUp3q4AQTqZeLJXvgJqlFSVCAjlCGC9kbFompOFi/YI92qPdYW80IOkjbpbudIq6tWZr9bl9elejMECroFHXcfo7yyvtjTPNyFPlyGoxbbu8K6CkX00vwwvgiJ9OmY/eZ0Uszb+PWIa8rZkTUpRdaE9L3lZ3nDZ62e+rSy7T3MWRcU4aeekp08GTyGFDXpzsyJgj8Is3VeY35AH8ay4kCv2V/YLryeXJDg0dqz/fMbVdfg9LLYRV+6Tlfm/nWDJaZX9izXJV3nXCPKdSsK3OtU5c4NrF+MO2F4cF3smkOJDDVZa0fXZY8HL66KUf55iaY9NrL8WvqQEhej21RkhqDeT8TbEWpi/ds6t0DI1WDiRRSXIQRyFTfC4GD3IOTzYB5CDAQzOS+bnTpoZb70D753Bxlv4PcSqF5FbZys23oulvMx7TAYmjKCId9iLBGQY48vCXT4W7b8GGro+opWU5J9vL2RDt9j0tte5Iz0XkCgp68HEOdNz1CEWYQEhKAkp0Xa798al12Yauc26wp9mR25wh0UyJHEsdpzVyrb1i9HNdTqLbsu6BckG1i6Xg3bsE7xIAQkySpd0OFxLw2VyVTElDq8uARMEKTbdwvCBdvy/9P158FbvtLyBNuaB9Kyp3Wl/ivCtKVynKdVIgMpv3U85m6TNhbcxzw9WBMDiHt14SLi8GyY7k1uXunZ0U7WcgErYjwtd8HdzFk9JbJPmx9BdJmysc2GFaEq7niMjAbQYhN3MsgiGHhcu1UpqWVbDTa2AzrUDNaFrJ3zInc8UenB1pIhRqVYx5TEAqeL2pG+X2MzZziAV4apolGZAzrAEYSMaiOeQxZXs8pqxlV/XVBjdUYEHHGQlsCIVQx9qLweffodacUe1BUo5NwRonkfxFnl/tM4T6fFVg2AIZWS6DYuh+i7/i8njnXcAwBvDmCq+7OUdnhSI+qm0+2nl7owEJcJyReJXI9loERkGKjaRY5/BUxuNUJuUUKNGiyo/ahGLhmpYj1o7oVdBC1fo90Ec1T9cs10FsbZ3ppIPRF7QrRxxAUcBiZswEbIPHfkkAtGiSm2LuU+adSc0nAXm/6x8+lrC9r/3pv/lzDRiUbcj73/QrvvZB6/qBH/0pAMDTbcAmp+E18/DPfOsXX3kfPwrTXhCHhRG1TilHCK3C0UdlNQCRpJty3t6CFoz2heqFvpDlm/SZ3OUmitZGo7ik0fECwDYBW69KNznqnF9HX4GIKmWVzuHTjYANXuCWvTiobhIqFlABSZyRZkPZykAEzEiHW+morZ2whxGcgYkWr8pKcr61z4jkGpGU+4vMLM/krLS0pFnRWu8F3A90POS6S0Y1jyUZlOjn/Tq1OaLaQ4IGa4Gi+1rJEJlo/syMmByQa5QiAyCHKaZ6zV1oE1ban2GZMrVLmyUuoN0iMswcBZgQIVyIKtZ8fSuUqV1C2MqUHXbSIX0DBR0OiAwih7k4l8DEmkVQsL5+jI6EyiW1Jh40Bilev9xhuNzBjdvc30QkhVPYCr1v2En3dRAmHX+53jtzpuDabIOez3PXqJ8nlRZ1n2tV52kApnO7XAN3tLyaBSJ1Py1wMWBCI3mce2d5B4BLsEilhC3YPSf/rw1TK63ayfhPNeCjtWdK5fWulQJXcKD0z3pcKgiRgJTn2Ob7ln5paw/tGFDOGVLO2gMgwCVgcSLwAEig49Ee7VXsjb5z1vqH3O93p9PAJcNwRIOswGQwqjw+R/JEfeU0UOmbINpu7FZyt1++7tf5Y+onGm0aRS7TfwjScTZH+vrldUADaiSEoSlw4/zl85BSWnUa5EupN9H3/fc6CdiBzuVjthmAKsHryjEduGYDiBye7SWt/fIQ8SxTtnqFLt33Gtl01ZFOH1y5pdI4PrjtF8Yve3uHt7YDDkvEzazSxh/K6j9UmxbG8/2S6WVVvlozD1og/lGZJ9fw59c6xAPHoN/2/SEPbIMMg1b8YSDJOqoUrxaPjp6wC7luZKBCwfI8wcUZbr4VOtV+qlKY6ogq0OAFiBOwLNID4rAXcDG19Cw3bpsmhhWQxPIbkK/9J2IEMcvvNpoRGSr4CBvAB3DYirMac3YoP2uRJStiaZSRj8E9nRinXuUpUgdKfi+yvs6hRHGtIh8noYkunMFwoQbWfVHVI6W+AChyzZoR6ZWQwonBNbICjVQi9PuFMRBLTYNzmbYDLCQ1LgpC1Sm050ojpN5t4YcMXHdvAbwg3V7Abfegbb6Wt9cAM4br5+X38/W+NFUM24DxckacIjb7BRxTLnxPWOaI23xubqOAOEvn0oj6pSeM5HB5MWD3zhbj1YCLr9lhuNzg8oufw+7z74is79d+PWh7Cf+1Xw9sLsDjJXj3FhBGHGgrylkHqReJnLCPcv/cTBEzZ5GBmE4CEb0W/fzpzZiiH1sGQHOtynrzfJxpm2w+09/1NKyjwIViF7NcD35JA1DkMOhXpk9T//s1s9LfOt7oXOJdKz/dgw+rvtffZ9Zq5qP5tDneBozkYKHWjaVkz089dkCeV73vFxY69kAOt/PrLQwDPBa1v672RgMSnTTvXu54oTVQshbhPmnKS7URF00Hr0R57PrOgZG1ubHfF4143Mf4jMNtgYmmevtTdUrK08pxqunfzrmj62IjrJb2oQpeappaX7NCu2EpjkSumyByma5RnfeYZHAUlS5kpRz5vUo4xpTwz/5DX7e6LWt/+K99qYlY9f1idII/3l/gz//dXyz0BJ2U79rmNhCeZAWc/cL32sdPwhZOgAEcaw1HPyxA8od+5KfK86IqSBL5bBV6rCpeu1/ZGTS1U7q7CiZ14ieHTJdwpQ5kF3yhYV0M4jBcDgQXJ9D+tnS9LpmPnN1IHAsnxIVBIh0ckZZZXm+vkZYZ6eYF+PZadohjBRraOwIZkBwyIJkqIEkhtwXN64TtCQBUCldW05rzva9SrPpeAcnCtdkgcOyM2R4PXAIWp4MpwHF0uCyTKuiQHiMuR3HRRXHrWKEgqY+8r21fAUfsHN5Tr71xSk1hNaBywoQYkmQlHOVMmxwMOVs/V/dNT5F3ADEQKAGeMPgRadjJ6YgzaHuZ1bq2II7w2xuE7YgUWWo7ckd3AFk5yyHF3PsnJri9A+Vid4ALuJuTmWcg1K2RXKkfCdsBw+UGw+VWMiMXW7jtJejiicgYj1vJjIwXSMMWyJK+MQ8DCmqVklXos6kFI3fVc3rjqFu7r0rWufXH1jM/W294VwbO1uWo2Qa9q1X4qGDKZtUVhGj2o8mWGAU+zbaqzC9w7AvYx8Eewppcdw9ENBNZlDBRe7Klbp1aD0pO/R0A3EwJj/ZoD7I3GpCcsj4N29spIGKdFFnu2MEipxmG7OAaYFLXk7oJen1b8vfxtpr+IDrodBmWHpT0ESJZKK+ATYOnpIocuSBdf5UkFdsbQQBCD1bWKBypG7GagRH2mtTFEqqDEVNqFFfqsVUgososWnD7bJHos3a45gS8zMWsAMEnkUm29SKaGbmvFUfVnHBOMnHNcpKwcdQV4ut27r0ZAMAvf+cC7+wGjJ5wMSScb/f4ydq0MG5ZVKgAYAwyA2vncm0K+GGYZimso8Ldc95fU0uDaAAGVZUay8fWzuhKuSInReda9+HiJIXmh2u4ZQIdXgDTHnzzvHS45mtRTUpLpVtpwztVXEIYQJtds6/xxVcxP3suvR/y+dy8/QRuewkQSW8L76VmxHsBNJmu5YZR6kg2W1E8GjfiOJIX59EPSMMWkaRg/Tbz2vdLBiIpNxZkESooBccr0Z6+h5EV0ljra3TKZm6zxZ5cHjZqhsRaSmiAUiyv7Xg/eFcyI5GHAkJ2SkVbo+QVyp7cq5uivOWOnGAJdgBgxmFxjUO5sPDqgZrtsVYBiYCXITkkMBZ2uBgvAEfgxPBxgh9GpHeuwTdbbDjCEWG52YPGAJ4WbN7eS88QFgUuzZwAAM8RcZLPl730F+GZy/dagzJejvAjYfvODhfvXiBcbnH19V+LsB2x+yWfL40Ow+e/AckP4It3sorWJQ7JIy4Jt0sFtJPJjGgWW2m0GiQ6N+72tLr7AhCtIwHylJev51H94Mr93Ef/134HrMzXZj7onwf9zB6DBfH2vtdMh0rm9+BDehHp+o1gTN4vu6eFGpYMDQvHWY41ANIvF1nWrcBSzl97TqxQzsyp3Nf7E01zH+3R7rI3GpD0A9xDCtmBdXCgk9PaYKjdpz0sONHlqyPt0a6jzY7kfe0AyKnleiBylylPWKOHlBzYRjSd7KulKWhGI3HKvV3ytgufu4KSHu8ALZ0CWI/QrA2cVupRo7FzN2mwoYzMWTLS8rmVDvDWNuAqy89KbUMqzkFAnShu5oglptKc7j6mPSN6i0kVnSoYUcUTLIQDNGNz/34g3/T2LtMBpEbmdbZpkcLhm+zoXOSsjhYR698fhm1NQec+cgGkvdnspILDi0FofVtPeGsbSuft0ef+E0GWK/UfBFG3igvcIWc8plv5bNqDX76PNE+Yn78Hvn4Bvn6O/XvPEecFh/dfgCeRbI3TUtL02s+BhoDx6SU2b19Jx+uLCzjymJ89x+H9l/m3uYN2ZGzejgI4RkYiEiBDJFKyyyQZlzAITWvcwg2j0LN8pmqFDVIYEWnEIWdEbhcuRcfat+PltBRZVlVJs4CvOFwOhVoC1MjuEUDpnZeV8auIIFFCn2npVQttxmYfuSjKWeW00neJCH5oe0FYCWdrKuGrNC99X//VfdL5Rumwsj86NgktVs6RDJR9QMtBBT+k3qnSfoFAhHHYAryAhz0cBdCTd0pGbQQwXIokcGLGfL1HnKQniTYw1EaHcZaeJTwvucFhKqDF2ng1wA8B49MLbN6+Qrjc4errRcbXv/tF0Fvvgi6egrdvIQ2b/LrFlEjuoQw6evqfvT4VPKqSodYo1ch7ez3ac3YKlMg10LMK2Bmmz4IApwGHBSlrbIuyO2UKsBTgTO/sGtT2fb4qha+CD/l9FZaxVCwtUC+1Z2AgSdbTLa1YSPJZqMJJh3dOUtMR03p9rT3vWh+i2SsbIJRny9ZNteeKcgpzyHOsBnwCOezfgBTJI2Xr9bQ3GpD09ipgpP59DEYsCCh0H5dBSRfNaeo/VtYNPByEnPr+LsqWLV4sToVRIpmZ4dmDnAAW2/mYksj7ApIxOZcZWQMiPQBp63wqAAFqxEUnLn3fp88VpOikdkQhyc/5HPmo7sQ74Qyr7Cdzwu/89ocVsG9zYys9RqCLlJlrtLCIB+hxHA3iDPzQT/zs0eR3MUifCuWlf9vXvfWgffw47f/+X//N7OgRPHFt8pdPyiZQkbAEgH/tT/0kdqPHv/1bvuWVt3k1BlyNojZzweloolTqEVBpfNoNXeo/RJp3m0HHLtTCc5XeddMNHEcpNJ9uAV5A8y3SPIEzpSpNe8QXXwWWGdNXv4q4nzA9v8H+vWeI84L5xU0TsSbvpMmcdxgupbu1ZkD8EDAyg4aAFFkAiieoy8HzgunFDWiYMFwKjSuRF4BCHnTxFG4YhFIzjLnYeJPrRUaAgrz6EUusnbLlvTjRewNKqgpSakQJxPdI4JwxUPEA74QqqQEPdjUAotZHq1eLqtlhTZrV/q6XYlU60Go0O4sV2IJibXKJFRmOXsa5qB2tgShOiJmaNTuGypLXYweQ1ZzmAjgqeNMsENiBctd3rVGJjhDCKHQoR6CLK5Fm1eL3wx4775FixPBEmiZaIKL3FUcGT0vOngho4al2iFcLl1uQpwqQtxv4d79YGx4+eQcpbMHbJ3IvjReYGDjkcXbhVOh/C9fMlV6fAkbYZEjMfQUcMxgsrU67tysrYY6co/H5nHLKylnubMDnHOi422eok66dl21/L6tmqUBE69HWQIctPtf3uo4CSByAOJXmg0Xiu++krpLhWS3PF0ELB2E/GvpV+ad9hSol6xQQ6Wv06nbzdeEc7FukoJ+9e6RsPdor26cKkKzZubRvGSAMEDmVibAKW7ZzbOkZwlrYez/QAbTAwy7X0MZW9mdtUu/T0TZ1DajzJsfHyYFZUq7b4Jsu68Lsquvqa0VKwV+qnG5d3FKv6n7B/LYd2OyEpWBD6VTWKdK/pW4gnwON0JIczxwZzw82oovabTlykaF9lc7hV5tQwI7lF9saktJpnhmHBbkBWDyaKAfvQHl/LbgSeUyRDn1TVNyVkjWGdgbajaEA9mmJiByx+4DZkq9/usHV6M0E3j4rWoQJVJdTqQ+ihJULRuMeWCap9bjZwy2HBnSkwy14f1P6Q8Tn7xegsdxOWPaHDDoELEwvZ8zXMw7PD4UiE6fqGGqTOUcO23e2GHYBm6cbbN+9gR8Ctu++hbAb4bcjwuUWKQpA4Vm6d9++9yxHsS9BQ8D2C9K/hC6fwmUgQldvAyEgDRcCSIat1CT4gEhjodPcLpL9uF0kmv1iiriZpUeH0hyluaWcP9vnxRNAbOqnUq23IZIO1hakA/o5yjoAFLAiTmQq2Vx1cOaONmo5/raZaUxonkn9nLJjJ1oAHr3S0Snr6/30mNfGCwEg+lfOxuqU0GfdDaVtyFmUgfLEketOmCXDID/cImwYacgyv+MV/O4KdPtu6VsCZgzLVAbX1DfGBHItESNFqSmytUx5x4QGOAxSJ7KTzuvuyTsABfD2CTjfT8twIVS/3G19ikL3S0g4LFVpT8dzDf5okGCJtT+SHc/XwEh51Sw9uwyGK12anFBlyzW7o5ftXeI3a8ImJfhXgIcAINsJXjOCA1GZSxVkDIRG5MA7Gftd3m9CzYaIFLRcTzeLJHTzWVcTJjsorlsKQ5EMR9gAjhCGLaIjgHJNkTk0pRwKiGxByFpg0AYET1HZyLWZ0un2o5d8/6BGXpqCftzbfLTz9kYDEr8CIO6bJenrRT7wvpwBIudAiF3OLtt8Zn5zl3KHNY0U9/UllFypK/E5yifcVKFO2FNKKfXsiwaMJJMaLpNOp+suvzkGNZarXl/5CIhUCeYEmzIv+2ic0t/7j37Tvc/Pfe03/vKvOfrsh3/yy8XRWJP2/e/+9i8cZUgAyCSxQkuLJMf/Gx8oE/xx2n/0F/5OoWdZVRxPtXYEEF6+dirW5+Ji9Ph//eW/h6+5GPHFJ5vVc9rbT733AksU1Z6r3PVcnERXeuRUyN1mDu3Ev/EObroB5gV0eAG3zHDzjTSou73Gkus++OY50jwj7a8RX77IFCyhUU0vrhH3E5bcTTtOjMPzA5bbBdP1hMNzocYoZ1+Nc5icvMOyX0CeJIOS+0rwPCN6Ag0BtKWjArcUGRgE3NAAUd1SmpbWjAw5G+KHNjNCoWQGNDMi9JrapK4UH+szt6JSJtcWAFXHrS7j8onXsy5XQzOTyE4Xpyq/qt/p2HSUze23bcCI/fuUKXVXMyUAmj5SusyarQmO2PFGDzUWEAXJPuMYZEkPjJQzSLnLPEtTvZQckktZXMWVcTQ6Bx9GuAWSKSES53QHcUw7UYRTlmL+bsn0PxU7KAda649E0vcSGEak8Uqc3PEKaZBapCmrZ01RsyK1SF0DSHNMxYnV7wq9Ta+jvYbdpHIKMNYMf82iyT3jcoPD9eO/KxPS32t6varVeeacMAyAQtVzaIOczqHUpHnKfb1Q+3u5xHAZWLo4C+CIU1bhWwoQcRmgWEv5bwcAIav5c0ByAlyIqMyz5TiQJDvSZagUjNRrZ7JdJmhm6ZFlvaTzec2Uztx7DI/2aPezNxqQrNl9ai2aDERH0zpla0Bn7Td3FaOf2o9+mVMdttc+X5MntFK5ZUDJ3GYdfH2mn5FzGDhzrwlZwm+dFlZSv6gZkTW61V2NCRWEaJRTMwYCSOqy/bZlnfb9/QHah2lrhe7W9pFzobBGnVOVJTWRZeXrS8r7jlDfJ2xjIEwLH8mk9jQX7dmxDR5Xo2+kNt/ZDfjc7n6F+l/cCTd6yU4rTTdCrZpvQdNtjSKqtO4ivR3StF9XpGLGfHtdPuNpxrKfsNzswZExPb+W2o8MOlJkHJ6L1Op8PWPZzyKversgccL+2QE8cyOzWgLdLqsYcUKIoUlrckxCtSEWoJGL3v12A5CHzykyGgKGy23uln0Bt8nc/sunkhXZPUGiAB53EiUNW3Ei/YgDS0O467kWsO9zkfHLg8ix3sxcC495vXePlUw+N0Yqp99mkuv7E1StbKfoNv0Y0jes4w6YSH+FdvwTrCzj3MY0Jjs1T/TR8haM5P1Zum2sBJwsPUczRUyUvxc5oiFX6cWU4BzDs8PCjBg8fLjAOF7k+31fHdani9B4eupOZ05OoPyRGM3R6nnIqmtac5RyRk1B67QkxJxB0zqRhSUDonUi+qrBJaAKjpzKdPfn++gacNskUW2tN8kpO7XeU2aVvci5LiMDDOUZ0AxIpWhpZkQDJZ5y80myzVEhohiJ4fYHua5xkmxtSlKflptmqjqfyoAnZqjyHogkKJF7DmGzA+II50dRZsv1JBi2GCggJpfZG8hUbH2WasG6Zkaamh9um8r2vZzK9YhyvhYzp83z6z2PAdJ352NvjPhhRsA/pfZGAxI64zSvLdvbGhh5VQf3LiBy6l5cBSoPuHH/qV/5hXst9yf/xs+2RaqZ8w2QUcRhzNwW5PWma9DBTKMr8lk6ch56s86EVWFRIGIpXyXi7lU1Bw3X/F/8x3/pvY79o7Df8g9+/uz3sykO1sJ7jbp6J3UN9kblhI9UneSHfuJnyyRzM8d7Z5L+6P/4peKs+kxJ7OVTFYhIt3KhLgTv8NYm4AtXGzjUqPzVRuo4fu7ZNd5ZnsHtn8N95e9j+dm/JxSTeYLbbDH8il8DuvpawBFGRwAvoP0LmchvnklheYxIe+nXwLfXSMsk4OMgYGK+2RsuvRb6SsQ4zkvh1c83eflroV3FmTG9rBkPS8XiyJIFYenxMLH8Y7TBgME8OmNkuNg+SykykhdFrcQsk6M6GUEA23A55detULTGLejJO6CLJ8D2EpxpWWm4EEUtlWJluZciq9KR0Ge0cP1mjqVepKogHfdwuStIo/eEzUzJOTDRZa7c++ModF3mlK2NJ3cpNdXnyjiz3Th8Spr41DHrPd8ru6mV33E+J1lha/AkWWYnqRTJSgMKSsgREoAlAknH4QXwpBTbAD9eFWqP9rRxfcT8RBfofrmybJaCTkCpA4mcMM1CMy2ZEK7qhlpnpOIiQB2zzjmvNsq+FmyyTm4PGqxs+H0aI1p7SCNMDaoAMt9wBAZ/en5WWh+R9gHJtKwMSKpkr0NwAOJUgygqER7nAkTS/kbGsWkv6nx9Foy5jA+JSGSYtREqcoZkyQ1PNatCAQSUDu1qWvOp87dmRmJCAyotoARav0rncECYC5zP3ZCo3BuP9mgPtTcakJyyc+DjY9n+SlbETtrW1virOkGfo2o9BLSsbVOpBDFxid7NLJPl7GSfZkITNZLjSGYgO50hmTvn2mY0dPKyERlpMiWa/5RpFkM3x1oaxetuT8aAwdfeKDNVDvWp++AuLvQnZZUayaUWRCfk2j04AxJyeDJmVavgcZGbCGpTuK0naSjoGO76JdxyQNpfC1Vq2mN5/yuiJDTPoCdvAxCaUuKIJS/HN88RX77EkgvKeV4wPb/Gsp8KrcoqCyXz7PRRqsSSrdAi9BQlKmzfy7oEjCROmX8M7BAxZpqOZgH0edmMHjRQqRlRedXxUno9jE8u4HcjNm8/QdiOGC6zSlYYhYpFHnT5FIGjAJEMSNyTdySiPeyQslRsCpvMGxmxpFpkLM4kcoYkGlpNW+eldRf6/BX1py4zsuacrY1fgD7vr9Z0dI3q2WdGdLmefmLtXCZ8PWNtwM+KMzuvRViKteezFmajRI61hg8Aoq9AwDuH6Ak+U+JU3tU2WdTfOffBp2w9TTPHorIkdCpxSFPOkOyzwEH72oqH9L2frFVgsvJZB1pOvS7ddXgIyLjPsjawEgwo8STzkNKEFWg3v3Vtt3RVZROlvtx0My1wszZNNb2K4gTMk4x9yyz9iDQrsswSbFmmZnta8wPyIGakMIDyXjkA8GOmbkXJqiSGc9SAEbVknyFu+/kcX0esfl7/tkGAem882qM91N5oQGKjGtZe1Wm9q2HTXdu5CyPUiEL7eU+zKsuvULF0ORt5vK9ZDjYAzEjwrBM1myK9DAhMoVo9hjUHQSenlneq+99HVmxBty2cLQolJj2+NtHZ/Xhd7WrjccFeFM1claqdmQ24a3/zUXJvvTsuGL6PcUKRz6UMmFRK1zuHq9GX5oFvbQfJkBj5Y6UsXAQn0cBlgrs55IaCz4DDLaKqVz1/Hy++9HMZWPztIptLngQM5B4d8/Uey+2CZb/g5c9dg2fGzS/e4sV+wcQJ17FVavMOGLKjd6W0sgwYgONiQwUep6z0cRiG5vfOO/hR9jdsA8I2wI8e23e28CNhfHKBkIvXx6eXRXLVbzdwF08EdIQB9OQd4fcrrz93WU8+gH1XL7IS6dYidpW/3i8RNzMbOdZaCG7vD629oBX9gbvpMcYpoS4Yk73yc7Stdl3H4wxQo7c2qLGmALQmHav7E7qi5AaYsXkm8y0RV7ZzilKq25J9kPezSwWgLFmUY+aEgRhEVSLVjrWq0GQL6ntVx7vGv5NyuQaMambM1n9oTYiNkM9NtLxdh56jV7FzIMR+Vt/zUS+Zh4COc9+NwRd/omSARwBWuUZ/kwEmmYy3SDcLqBkyVcslFuW+5SC0uwxE0u1LyYTcXoNvXkj9mmZ4D/tSXxazEIuOgTQECVaEEeAIF8Zcu5QbDYRFsiW8AOwzre80LcnWetYsSSsSEe0zd+Kes5lPTsDyBvQheZT9fT3tjQYkzh03rnqd7Hz0sFpLd1g3TsBv+7ZX79jdKl+Z91Bgk+tJUgIgk6M6ElqoKeup6+gBzto2+uiKSjlaW2sECVj6Rf1Q1/M6X3dAnAl2CZQ778Z0XMz+Udmf/ps/d7StRjwgAT/woz+FOfcOAOrE/rWXI77mYsScqV3aoX7whBhEclezIAORAK8MTnaDXMdtkN94B9HQ5xl0/UI41NMt3PQSmA6Iz96TAvKv/gKm934R84sbHN5/WQrHl06tRaVgtZ5jvl2w/+oBy37B+7czni+MiRNedpQB7UitWZ6RHOIhYZwzv3/0JXNSwEX3N1CBCOVX5135XsGNKGpR7nwdMOwCxqe73H/kAmG7KTUhfgzwV1dwYQTtLkHbS6FhbC/gwoi0vZKop9KxFHw4hxQEqHCSCHTtrlw5+5yv6ymcqzVkp2hOa/aQGEifJYn577LtPvp9FIE9riPpC6XP72ubDWnGlu67Oq4Aeg7uG/RonHMd87M6FHINi5wMDUawZEvYFaqLdeoGzjLajrvt1Pfnuolbha/e7Lhtx4S2hi+Z9xWAnAIQR9t/wE3Sr8NmRGzvmBaYnN6PU1S8u/ZxWmJp7LqY9Q6kc1Z7H/dSwlVlTV4dIBmKlIpCVilOX2bJgmg2ZM5U03kqvWMAFJlmx4QU8z3hJayU5twYVcUK1pS4Et/1OMs6c7DqXNDqWMzi0R7tw7c3GpCsqWytWVuYWN+fUoE6vZ71bXkTHSCHI4f/7DrptIb6Wobhg5hdXxNZjBUUWN9Esya6n6fWB7T0iX6CBmoPkf57lfLV5lKN1GguBtRJUuzYuXgdbRMcAlGJpt7MjGf7GYfI5Rg1QqvOyIcFsux1sZkZ2+HaZmM8OXxuN2AbPN7ZBbyzGzDHhK/eSr3F1SbgYiDsgscuU7CuRuk1svMO7vBS+nfsn8vr9BLp9qVMsre5xmMvxeTLz/99PP+pLyNOsyhYZXnb6eWMZb/g8PyAFBOm6wnLbUScI/YZmOikP2V6ic2GPMuF5VrTYe+xnacCSm5zxuYqELb5syf53gvbAPIOwXuQpwJKJPPh4Qdf3pN3CNsgxegjIWxHOC+vNASR8d1uQGOo4OPiojQwpJ10YKfLp+JYbC/gtheAH8HjJTiMSOOlABFrmfp2YIfrfcziEjmwAKH4LJwpWxrlNE68SvSq6hQ5aaamWcmHjolWPcvWjrRF7RkdAfnBTY0D3m+vByJ6O6/Rg85mKbqsSK+s2GZIXAkmK/DVInlyHRgw4OiUc64O78xtsbSlnd5VX1i3VyPUNjtzFoyYTE1vPcgQeW5ZsfL/+0aSdzn1tsFk/739bK0uUW3hZLbbAhHdR7vMqf1a27/1fa6d0lUaWr4PiF1NyUwCKAM7eKeqiBDFNCQwXOnLVZJwuebHca4XiTP4cCuS4tNemqreiMLf4f0XSJEL7RSoSmk0DGVsCZHhh7lkSVIYkDY7GUMSF/Wtc7Z2yryTR9Q7eQ580nvJpg3P36ivQs/8pOwxQ/J62hsNSM7ZqXHPOtx9yv5V7IhmlR4YRTwzoNqJ9yH2gz/+M83k29SimMkNMAM62Sgdt1kTh9L9to8o3seOGiLm18FMYv157MGRptJfdyCi9q1ffKv5+y/9vfeOjrE/h945/In/6e+DHPDP/cMPa954ar1WSlkjni3AQ85qeOwG6Ya+DR7eMS4Gj5gSnozy3cYTrsZcL+KTUBBur5ti87TMiM/eKxK6fCN9E9I8IUXGi5/+eXz1f/1pLLcLDs+nUiSuNRsim5tweH7AHBNuI+N5dpK8c7kUuNIMbjMgeZmzIxMnzN2t6VkyfBNrnYeAGYJkTwDJiJCv2RKfa2U0K6LZDpfpWAJIBtAYihqW84ThYievWR2rdGPPWRDtqi6AxJdsCMZtVTvaXElWZLzAkr1khzquaPR6YuH712agsozKq/bPus+OdRN0aLIjuqVju29klFNLElkbD7XeZlVeuAsYrQUxzpl19tcyImvvY9Jx5fh9T5Vq9tU4xecc45i7tsvxCCBb67lirRf3sP96p7wcS1cPYT9b2+/DwkfAY+oAySnrwYbPYiNCe2odLyuCoWaBic1I1N/0wKge9137qNtaU+qy+xI5AdrUtcvM9PujfxeJ9nx96NQpssBA+8XMuXcMs9SNLBOwzIj7A3haELPCX5xUqrmuw7EXefBM5/IckZjy+rriQ922ETrQ3eTub2tFmhoiyiDPrgCUci5P+ExvEhh5tNfX3mhAQu4M8DgT0V8DJdqBvV3/+iR2l50ax++aVNf1/18dMJWfcl8IuqKcwVVFhpNovC9IR6CGz5yGAji47yOiDRTXopnt3zZqqM6SbUTZc7rfFLvJUogbT8WhtLbWp+VV7flhWVW4ATRiShiyEsw2kFCvRqkD0QL1jff43C5k9SyHXSCEuAddvwe3zKD9M+E9768RX7yPtEzg6xdIy4T48iWm5zdlgk0xYtlP4GnB/ivPcfOLt4hTxHw9Z1WriDhxU0RuqSVbdXqaZ1DeX2bA8NZAhgfdng/NjhCAnZf3FxuhVIWdx+6dLWiQBoY0+Ka7utKyJONRsx9+HET9qmRDxtwt/aIWpoehvAf5XLQuvR+Sl4ZmKWyQHBWFrOQH8Jgb0S2MOda+L0B9XvZLKs309PLe5AiyFZ6wsqw282CpSbZHB6E+v9b5WBsf1dZ6jOjKVIWruEBNpqRbz0pWZK0WrS3IX4mCr3xW9i3p/uq+t8usrofaZYdSUyBfTOAj4HKfCL793jrlLdBpswTWOe/X33ec1896MLBGeTq1/VNWay+qIqMv/9i8p6Pf6PGey5b0dt/z2F8H+7cFIv379jOpvVgM+NE6upkTiBN8FJU6ThLMARJmlxByb5rkCE4bFvoAxFmee45Q6V5HXorQzTlak6J13sMR5axtbuiXf48gRe6g3KFdt5mV1FIOWgAtW8NBdDDA2iMnZy6dvA6exC9igJP2KMtjwR1JmI9TQOjRPl32ZgMSOt0H4tTya6CkfH9mcrtPWr23c/SHfoDtf9//9iF0HksJAtCoXliAcLT9ik4AZMqCAjcd1NwKqCB3xKntwcgagCCH0ptjLZtj08enFFzeFFMqxCaQcLZZikp1opPi0Q+nGPBF7jOhk6wqYildRIuWn4wBT7ehFNMKRU6K0XeB8DU7L8Xnt8/gXtyADi8Qv/y/gac9pq/+goCR/QHz9b5Qr1KMmJ7fFCldVbyaMvjQRoI2GyKNAuXYlwzclJYF1CxG5f3X91oAXJ3Z4/MxdJkP8g6bpxuEbcB4NWD7zlYK0S+2pYDUEeXeIGMGJBuhXW1HDBdb0ChF527cwm22pf5DsyC02SGRF6dAHQQKMuF76f0AR0h+kLqQYYuYnaD9lCV7YzLZj5aCoz0hAEBEPROup4hnB4muWlU8PS9HSn3Ovnf18SeUIMapiOiRg0iApW7ljZt0icvUpw6Y2HWWDOpxcMN+f87uKr5fGxPXTOcHC0o8UBy0mBIGL/SnMTtvwHpUHTjOApyiJa1lQU7967fXgxALRs5lStbWdeq9XT9QAckYqHy+Me891eapaxmLu6zfj+Pzd3pe7cFI/96CkmC+l1dGZCoZmdFnQQLjjcvYQ5gjw4EQs1QzZSnd4AjJOcARXH7vdPDtgIfzBGJqHot1cCJjk1TUZ1CSAU4y24OXMYczPZJTDVz05h0yEMmrhctzkStjgYwPKnrRji1H+/gGZEuco4+/D8kJae5Hq/ZGAxJrd2UwSqSP7ldcbNd3n/FzLeKmdkpz/S4Q0q/jB370p05mbXT7TXr6jBN/n2Oy9RynCvSB41qXaJzrFpy0nGZO6AVMiq1RuLQAl18xhfDn/+4vwjvgN/wDd3cJ/7Dtn/nWL5b3f/Jv/Gw7+Zlz8mF0mp+Nk2PvB1sn5MkheIetl6J7UcoiXA5Cy9p6B9o/h1sOoNtnoOka8cVXpf+HdjPfHyTzMS+5z0dEnJeGaqCmlCitwUga3WeZ4HhMSDEhbGVI0soJ7XpOnpqi8kKpWrmRVSFLv/OjF/WrnAVxnjBeDgjbQQDG5RY0BoTtpgKRUYrTKb+G7QjaSoZD6j4IdPG0gBC3vYTzvmQ82I+Nc2B7PwhIyWBFO6oncZIXzgpZScDIVO71fL5SpVvoPTRzBDPw7DDjRjvD63jgXBnz2vFlvbbulNO/1hjNrqO/j0tmhG0fEpR9W1fOq/twLityztrl8nZLTQjurOlbs2YsIt0pBSqppo07f+MUGJkWPgtENCpvX5vl8vnpleCKEAOpHO06IFmjoZ3L7hyd+4iyfpthUFACSMZEwYpdl/1NL+lrzRMhMjfLPzTyftRIsTsHp5az+7xwpdsBKBk2zgETSlKTRy4hsCs0rpgfjiFnLFIY4OYAjJAeIsySRV0uQbSHn5dSY9DXGuhYFC63GC53JeDRBEM2OwluhJp9jUnpmzXrmFI7hhydM1eDOiWLSnWOt5Tt111Y5tHePHujAUlf1H5qvLKRrtg55U0UzFgTPbzHQHhKMWatSVS/H+f02uv+5P1HbbC3tq9H+3UCDAE1O3HXcjb6eVyI2h5rnxHpj63vcRC5Rlx0Ir3LyLWSmPe1XsL4kzK972ZmOY7u3vigdjvFxgHwvurjk0MBH1ejx1vbgE0gvLP12HiHqwChZc17+JuvgA+3iO99GfOL95H211ieP0OcMtd5lsaCKscr2RAuIAIQIAEvTqFSoRRMaN1IYq7fkwCPnj4FAH7Mr0MoQGGtULAqX8nwpgXmWtuhBaLaGd1l2kN5HYRqBaL6ursU+tW4lXqPTLdCGMH6XgGGgo/c9E6vq75PSRoppgTpB5QdzsXI9nIC9guXrtiH3Efk2WHBHBlXm4DPbQfsI+P5fmkajAL1+bgYfCN129r5AANwmuLZK+wBAqjkHpNGafKsSQDBZlb78bTPrtr1y2f3fzDs2DWfWa4H6sBxgKfdzbpM36+Es/AGacFzSo2jrvtVAUYsIEPrOCzo0PdFAW8RGWrmVJ4tC0hSSiUq7TJroIBxoxpnAUt/vEB3zvP6dbu6neaMOAevNRiBstS3w2705bUHJv25XwNK9TMqGZZpYWxy4XmfPTpn50DJ2t/N+TDbmBYBR5oVIZcbizoUelMghynKc72nhCEBYbzIJ4vAuRkiXb2NFIaS5cAyw4VB+pDkpq4ASsNUGodC+aTdZW6Q+jYQBvgn74CHnQRCtk+l9mxzhSVJFvWQxxKRca5BD061l9hafK+vrQJVOmc+oPZcvWGMBec9yJ8Kh35023y08/ZGAxJrDwme3DdLosv29tDC7n4yt7Y22Z76rO8H0qt7nVrHGhDp7RxHXKOfSus4da6PaWjHQGvNONM8OG/nocpp97XXAYycMgviHmL/5v/3f2mih2u8bFXwUnrY4Ck3LhQnQmpGpJ7EHZ4X7XytESn/pj3itEhGJIORNSMvMpWaEk+R4UdCii5TDmrmQtdhAYsjh+1bG9HdN/SpwqHOheSyjmF1H/Q3FXxk6tXFhQCPHGWUk1Y7pBcKxDAWJwBEQsHSuo9hJ5ztsDVRSQEiMRngwamhYKQcXa3ApP6tHbFTUoc2Nw5laa55M+trxGFhDJ4yUOHceb2qqHnnEJmz3HQ64nzrmHakhHVPO6Vw1VC2uoxJ48AcjRN3r/vkvpwYx+8c3u0CXWZnzWwG3NLYNGOrjSW5yx4c7+96ZsQ6vgpcLCiIsYKSlO+Tdg7LWaDkkNgdZQ49pJ7Akav35EpMp4CPDvDoZ9a043fJwLkus6JBEQMctD5Dz11PnertGKQ83M6pbdni/1PbWDhJSxLIc6r1FNHMW+xyXQw7EFJtOJkcvA8AL3A+yEMfgmQ3lhluyfK9MUqPJvLSoV3NBEVos6s1auMWCIOMP36UMYg8EMYCPBphC9QxR+2uZ765zxWg5GutQbVHe7QP0z41gOSUnXpozj1Y9x37mqLPbn3euXs1ujsHBHrrQYkdUEqzwy61aqlcpxRoZN31fewSuv1Arcu2lI8KuloVr+PjWOu4XjjdZkc+DBW0Zrsf8vpe1WbW2pHqEZBJlT/Ent3Ubr5jIOzGAE8OT7YBYxDVrCdjQPC1d8g7uwGbIIXqKt97EW/hbjM9Kxes87P3RLr3sJdiTAidwHuRuLWAJEWG345NxkQ+l9/xyo0gNCxfgAuNAT6DEC0YF2BAtYATuWOx9zXC2BvHyq8GirKV816aDeokH2rxJ4Cm1iNlYML5sxg28nmu/+CU61xYwENcEhJiud/ZUKzKbmmmJFWgElkcO0752clR85mlU7b2iNCGdYBw1w8L472bCXNMeO9mwpSzKZETLkaPq20AQOCgyk443hGgOKWnAjRlHPF5nGJxwuLK863jBmfpITqngPEB7BRoeGidGfUBnUZpsB0/62/0nc2mpGb5vknjKWrSyToRAwAUjGiWJCqAX043qiPnckZEnjvnHGJuxOjIlaxJomPgYrfLGfj0QMiaW1jWGQmcgwkKIKaFMQbCFAhjoFKnoUXv2vdjDQxUqpmHrbUBjutwrJ0CNafsnExxr84FoNRjzDFhIMn87aP0lRnycSWImIRnBwfGNowImwAG4MIM5xzcMIM2O9DFE6RlBuU+JNJTxDxUGhzJAhm0u8x9iS6QyIPHC6RhhxRGRBIwclgYU6yUz5SzJQkind00ukxoGyHeYw6yvo6ck4cHaR/t0dbsUwNI1rME9/utBSUfIBDzsVlxAk5qDrYTqxb+KhBZpaAZR+Oc1OWr7Oepovw1QNNw1E9cjA+6fyrBO3iHX/uN77zyel7VmFPuv9LSYQBpWLjvaBz/5j/5D55c102mZ0VOeLINZaL3JAXq2yDZkE0gvLUZMHiHJ6P8vQsOVwMB2jV9meCml1Unf38tjbuWuUyStVFgAExyIkUGjaHSDToAYvnRCkBqncZQshNu3IpaVY4AqiqVbFIzGLWQswEkGTQljqZ41Avw0NqNnMlg8lL4CdT6DqVZkZflqS6/pMrHnqeWYqUgvPjrqNHj5roXQFI53FprZZ2E/SK9auZYm9YVkYJSlM3YL/L6Yr/kyHq9F3ajF/oQS3O3tdoKcqeBSPk+3Q3kj+rdYv39fZZf3/ZaVLv+fu37Y+GQ09vTHiOV8ScHe8557UVRSo1KmUCcgDF2q1H/PjvS7mv7mQIAmyURcJAzbyk1WQxriRxcciB2SAQ4SvCQgmkCECEgxSZIFJisra/PylhFW0cpZ2USUki1d8+KMx+Noz8GwrTEfJ6Oe5j0f9ui+L43ynom6o7o/5msibxfpwRLxlFoe0q5lWdYMpZDlONZYkIiYGEndWCeEIatjCuJgTgBFOAoSJ+ScQtwlMAPM7To3ZEXda48JnHYFklwEcPYIIUtliQUzwRR4FOwK/QsFJrWKTByzuw4UM4BrQsEvSnyv499SF5P+9QAEuCjSSHaCdtSHU4tp4ufUpXqH9ymLkMBhokstoXoOqAf72cfgO6XIQNG/IksS/v7dmAhs7+WKma/P86saIrbNZ/Z+g8LLvrr1xevtwBmdbfvtNro8dV+/2EYWQfGZJZ6mt19aQoaYRwD4Wo7YBMITzYBAzlcjQFXo3RTv9p46SEyZKCiTQ3jDDcfZJKcp5oNCYPIUmbaEgCEsWvUp46/AQPWGmCgcpea9Ri3pXajRAHHrfxms8uZigwmAJmI5QTK33ZDGlXM3ZHtd5rt6BWvStG5Zki8kcvMDQMXTmDoBI9SbK5Rx5oRrNftJPWx4ei3mUTrKNT39Teijpb7AjEws2QkKs1HOk2PgXAxSi+ZwQs9j1ylW5bLdo9bqx/LtD5MvkvNa+zu3aNjv8cDe67Q2DtngE4da/pxrh8H79vcUaxSzaqUsd1+e6zcnZems/lK5mPtWCdYBz3322EHkAALApBSzmSwAACgplMduXVQwlLbInQ9hwgudC4fKANmqV+jlXWcoogpIKqWsx5Bal0cOQE75LBHO46dKoIfg1w3T4S17MTxOKgAJu+rySjY2rn+s3N2LI1cqWV2HRIUSCWbCTA2SUDJTA5zJACMmTwYCYETdPrzWcACiSES35PQR5nhhrn2D0ncjkMq6+tH+R15kQmngImBaZZeTDUbwoW2Nef7RDOsa0DEZkb6+jH5fv2c3aVa+miP9lD7VAES4NVThvpA9Q9V4wSvgJO7CjLXttP+7Y636etk5+GaSfY+YKTuT6uMpWCEnI16ttHP+0Q4TilueeNo1432oKJmbGyh/iHaSWV9u2tdjh9qZGpUPqmakoEcOJ//mSvNzTozaneBksipyGzuxoC3LwYMRHhrI8Xqb22H/N7havAYc4bE8wTa3zbd1R1H8DIV5164y7nYuzdLierRHflawKdAxPCgNdqX8uQKX4EHa3F47sWR8jFqETiAMsETpLCWXG0c6ACApXkY+q7FFnxkwJGg3Oo6Ucs2JUuxsNCvFIxEFpCSUOkPWr8BtCBjzSxF00rxFs63cfp1fSLHrPeBAxOAXOQeOeHlfsa0ML749oDPP9kU1TRyWYJVn/VuoDnXq8mOZWtiFUWkwjjlvfO98HomoLfeCe37aPTL2fdHdLQVW+svUq5RUcgy1yR/RaYQvy+QPz4/eTlus5tr58AGYWyHcAUlnhw8HPZ5GZd7RTAASg4MidIzodSFWJBg3zPJQdlsiRbBcwYjHI/V6uLCZV1rNLGiZJc0s1JpYrpPPhKWXPhuj3E0xemVxiXABKA7r3nZx3Kc1GWi9J7k5ncfJKve9GkhOZ+zq85+8NKj5GaO2CTKhe4AQehcC4lsrieHcbySbu68wC2ZcqtjlppK9wJNlrYEQ+YETjHTRuU+1GzIHGtgQ2njGuQAWgACrIOQ8t0dp8z6QX0m5XW2xwzJ62mfOkByX1ujKqyhfE7r0cT+9/cBI6esrv84ayJSt/3yPapZO5ZKy1hTxtL48ofB+ewpHUpn0OM411Ct7s/dxe/6Gy2e/cN/7Usg9+FI5X7U9kM/8bOtE8M1ytZEVlPr1Hly+I//h7+bm2/VZV7uF/zs+7e4GH2Z5J9sQ2l0qEXrWy/cbSm0rc68W3HWk2MBGZk+APJwmvmINfMh9RstGHGWOtVRqpz3lXJAQQoxtRlgqdeo9RkxAdNisgTcZjwEfKjDms+Tc3BOQUoQcGJxcfdsR67FutFel4RSkMrdq078CpJ0wldZTeDu/hbFKeD6/JXtdE6/tbWJv+k1Qw4Xoy+NLYUSWH+vPX3usvsUrZ7qqWQdvrVu4h8GDRToIu2pXv/eTo1tbZY3jyfi7XfL3aFEZcDZ2rMr3x2fG3sci3nO9VWPr4AQckgpZ0vYgQhIOfthQQnQvl+zlAG2NMWT/XIrF3stK6I0Mf3ems3GJEqAZlLy/s25zkKPzZ6nEZay1ha9P9R6MGI/W1uut5YO3O6LFrdrlkQy3HlJrl3clWapoMRxpsrlc0GQ7JenAAwkFyV1rphmcSHZ0MQpK2ShBEo0W6t1aAvX+URBybkaETsG9bZ2eh7rQx7to7ZPHSCxzvF9lbSs6Rhof1ojZu2y/erXFGLODXxHn+XlWzWtnjp1fh3nbOm6r8v66/uT9K17RjzIOj3NcVSKF7l6TtT5qoPn+e1z1G28eRro0tW3OpGHKCBknyVA+2WtBXJHy4yB8K2/5K2SNfI5Gn4x+FwvEjB4h6vRY+Ol2aEAE/l9ogB4BsYLJF6k0DIlYFhKZsE2cjo627bJU8k6FG6C1GnYvhsKQsJYNPKnfN2nmLAsQJwTpjgXqpTlPQM1ETMQGXWcvMkGkLgCSAj5c7dyDMYS0IARvUfnPNnLpN8Whc7ZCVFQAtSakLXnao0qyd2zsKZcpRlNcjVSPzPjvZcTIidcbQd4cvj81QbvXgxH29F9uU+TV06ondtZna71/j9r45zNCtR/fOSkn46CS4R8QuukA60iUv9q6aL3iYhL/UgNAAFyre+yUwCrBx76DPfnwAYaIkuGIJCIFBxRmxZ5nciBmHLPHkZK+XUNNBhQZWtMIlLJZqBlVTbBL/tbfV2T/e17/ajsMJFDjCQgLzK8J6Qg3c2JHCbnjs5Df10tEDhndj2n6kpeBRC34LC9V232DhCBif2ConpH5MApYshg2cHXZxtOmg7mddYxiUAuS6EbkBtTLGNhqV9jS8sS6V4df9aAxxroODfnn60pewWA+Lqao0+gMeInyRN/Q+xTB0jWrH/GXvW5Ojee3Td6cBeIOPq+26j9/qFOuUYG74rivopV3fLqwBxZt901MHIXXUmUflxTy/zH/vpPF6ccQKmVeTJ6XI2h7N8nYf+f/9/fL4dtpVm1cP12iiVKGlYcKY3MWaWeQKLx/2TjSz0OOZSsyEDS7HAbfMmMFEGDkiIh8fC1piIxUmKAfeUwE7XAY8VSD0y6+ozSlyODkIUT5rkWX+rkOueIn3KdD0vr+AMofWSGLFUM1E7tQ3ZkHfqsiUQkfXGcBKToe/vYssp15slfHYmUKhiJqU76DQ+b2wl/9fMzhe6tMl0bNdfrq8ek2RoFqbucGVFAKuuVY4gGRNxn3DuXGbmvrYGR+1K3eu7+XbYmC3tfx/M++/SQ5RtaT/OeV39ngdPacej7EYQJXLIlzBAalPAJQaT3lisSz5zOU7ns3z0O6wHIUTaki9YROSRkSWEGHCcBQOQR89r1OwG67Tmy1K167OvPgT1/FozEE+d+7XevYnptmiaXPmfHcs0Rc24I7CkHUoCBcg0JOyzQepLa46Nm6jJFDi2FN6VUxkZOEsCyIGSO3GRCgNPgo/8OOM30sFaCtPnLTxMwebTXy95oQHIuom6VIGzWwz5LZx+sM4PXuYe8j3CeVW05Q2e66/cPqaVoCk5XorCn1r32+3PbvHNfOj+DEwCfrwvr8RzvW1/MT5lT30sZy3cohbwDUVPEXpy5tF4D81GYpu85VUAyRS5ARLnmNiq8yRQsAOXV1r2oMMDGUzlW7cg9EIHI5axChEOW1nQJKRGcS5hZMiWeNhi2W3HiHTJ1gI9rL86ZAhJTn2F7ccQk0duYz4FE0MWBmqJMujMn7HNNxMtpgUpXKo2pNtyTTSkgkeN15TMFLHqutO9KoAxMigOQ4J2J6q5kRdXIZYct5fuKE+QTV1XSEkBOHAPpYi2LrBWFrt17tnBd3qD9e8U81XtD7xdLQ1IHRa9B30R2zep+AMjN0MCiCBdTwuAB7dINMDycRNtLcfHxCTyVRVgDEDUyrQ66ZEz65e+yU8veld1YTnx/bj3nvj8Vte+BiFrIDm+fRYmcMLKchzjI33NkhKHtURIXLhkTx64UtvfZjh5oNE0WzyiTrRk5qWmRx9+VOVUyJSz9TygV+lZcGMg1JEfnxZxXVeDSe6Ce2z7zlPLy8vkaKLyv2cxI388pZlptWcYEI3R7AkyQgYnQ4uYMsBMBOmZoDY+sqh0jNABi69V0LJw54bDEAkT0d+cYIed8nLX613I8Jqur390XmDxSux7tVe2NBiTWehrSmlTluTntaLJeCdKtKUqcAyPn97c6mK8sg3lPCka/f/fKQpzZ7l0O/WDWabffa5UPPvOgU+1+PHd0AtnHui/qmA6+d9TzMtlJJcqNAEt2wDhAH+OAqU3thJpUgYh0U28dFgAYg8eTbcBu8MWhLkIEVGsDiBy2+Rw0y1F1Sg9LdjLghSqXGM6JFOUanYmcyzQtOqI4nXSOk7mvEq82/5Pv2lqMBNk/TlIIqj03nu+XJlNiC4aVnjaQywDElS7kA5kMGQng0uzR4BWoodTiBKqF8H0j8zU4lnFMpmyIBKl25RbwpwOGOAxkaBN99sPKWa/LYd8dbCgF6+SKWhFQwYg6LArqvK/npjcd11bHzDLYKciTY1CQoqvrFfbkWFvn0b4/lSHR11Pg5D4Z5vuACaWWre1Tn9Gwvzu33XPLnzteu992/y1w6f81nd3zoOkoFmASM2hGzJmLhQsY0YJ1LVJv+o5o/yA+HohtnZjLgY+kmcck2ZmUs5OpoRoTnOPSAyUujOhMnxJDV9Nu7Fr4LnacYdJ/a71c7Pk+dd3WMlENZS7fgxuzH/balG2Rzic12EUKEhh5bOBMva7qYP1d/P9n7/9irWu2umDwV1Vzrb2f9xz49GjDgYgJ6sVHJ8qFJob0DQjxcLQvEG5IvACJGi4wn3JBgjGttHbsBI1BTcuFiWCCF95wYTAYIk0Tu4kRIjExfumGjy/+yQH6k+ac857n2XutOav6YtSoGjVqVM259vO+73n2YY9k77XW/Fuz5pxV4zd+44+sRcRMLI2BwOvrVgwz0rhlXR9fQxlDFJiwRBtMpLS/lQHnRsD3LslLUPu7Kc8akLDLBKP4opS6Ckpm7P9swresmFZmLSkWuPigYh2s3Pvy2uv5ePvxsSz3gNl5pVjpf/W5AaH4WFabYsmtVmZ2NbCUIqvIo3bPktt67/Cn/sev3L22j0JiVtAvG1VgboNYS8BNsQJKZUyKtaz005bdNhzVBACAWPonAdga1y7kM/PEmBIBk4QEVw+ZAYo6lxJeHJUCnhIDkpohiwFJTDSRvr5uBZBcNyowxjE2VdEXz2B0gKdCf7zMx+zGkoCSCDjShfr8jDJAQKzPPY0XdCz5GMvhgsaRCpaLWpTy/WBrsqO+10XFrHGiUxgOjg/aAHLyLhc/ZHc1ZNDZxqVImYEReey3lRkAsdgIAE22pZnyPnLjkefeY0hGYMQyEByxsu+BoKPt1aBExploxTt4V0GJYDyiI2DRZOPi9QJ8MGvJQCSuVySug4E5IHE5w17ytahpWHwObM9eCQAQchxLZgUQUdbJBADyvjyusYmZs5i0ERA5Ckgs0De6T3vzpJyH9lhIhvXNWMXrUhujxoVRyfUrFqByjW3WOyne1faOavUAKJ4jt8jouiyd6Dlk2XqRd1OeNyDJk29QJk4aGFBiGmZy1I1BZsCaSVNzY3Js6zi7rgDoB8enAqBbA+LlQNcdi+MTeGKVfRDqgD2Soqgag6wUC/BYlvF3KeD9YY14c90KGJEKRVVEQgmWHAESFrbGIaYS4PwoLLo6tfPDFksfcQrY987EmISYWQfxXI2UcykcawFU0EFt60EHxwcxuJCB4K+vMl4kZh9pdkVo44oKA7BRC4jFSQA8QspgrDwL2ToJWn6CLw9PzIoR/yYXinrt3JMcX0KbuWzgoHUp1yFw3iHBYUtbY8SVzzF/WuOCBtNdP4uXQQfLU52RyhJ6R6wh96McX7x30/cP6N3LRuMcJ66IW45HE4pVqyjGXB+F2i2DvOX2gK0g3i3W01eBuz4Wy4hhsNvYApGLauNIqR0xG9b60X5721rXKNv8KBiS84Ws+O+jPjMrAOcS4krAgt2ytjVivRIQWS9UcyiuF2xr/n6lUWUESFwIOSCYQElYzgRO4hlhCZkxITct5x1cdsV1q6MCjVuk9+0C4Nxf32wMtECuZIyAOo9YtVnoGuRc0Z7rLNgZFu26BaBpIzPTHKTO4y4LxZaQIcVLpV08pzJxx8MWS1zIQ06E8rhWQ00FJLpNaowx2i3HmltqiDSlA9igNXhub1QrvmjivPvoGZLn0jlfRHnWgMSaNMMAEMziIEYvV4xsYZ2nwbTEYg9uNEqYsqlBTUuxHE9cQazt3+bcshkjN7I58CMlu2T3QW9d1veIjyfdlHQMwRdT/m+/8L8CQLFmWWlQgeqvLCc57WIG9Ja6mFAeKFZAg6OHLCLhlAMrT8iZmXy919ct4hx8tlw6KsUQya8Zqd47dsPhplggRLMfvJ7dDnT6SRmMWXyjheXPKrT3VJF2ivJe5MMGODA2caBrTqkFZAxKgP79ZTbiqTJ7P7rA08F52DVSFzsdHbcu4/Pst7MPer39ovcUfMtafSsTomtajM6jmZlZG/X+Ovj6KNPcW+FvCdrvXcjk9TXZuTJY9J7GRN3+JhtXTA0YiesVact1iDABJJGAiM+liSSjEqMvzAgtS3DKWCjZkxgTuTwZz4XuV7lOAkcJRAA09VNmEgGyuBR30P17yH+LGKeBbFhwbfwMz0tegZPiYRDbsU4myqg1REQWv8guXQRG9Dy/RTSJXj4oscCIJU9hXV7kRbQ8a0AiRVomWhce+jxNkn6OFHVSkFFdM1jpy8GeZDW1FH907QDybuWlzQOJmOCPUP+83QclMzbBZEOKZbkFBNIqz0yFPsesVkCXKcSorVLaoBgRPse7AERYPvdwLffz9WUb3ldpgWusbr69ppjIKu1dZUb0Ib1D2R+gFJQ+oWSVqtvVY4asdJMRj0EGrZdpPiXwYEkiIDMllDS91dWgAhAGH6uoSN6wIbFtG/dB+e7sOBoZ1A60oFQDVQBU8zFbK4MnAJay6xaDEkDUaxFgjEEJ9wFfL2UPYhdS0c8FxbTvemG55E0U6YvLc+zF+6DeRY6Tku8dnzPGVMY7qw/kHZRj3CgDD4NNWbSzKFBZmdLsiKXQA7e5Q+nMS7lVQC6epw0kDDaOgI76vbZVr7eOw6CEz8mffRxCBR1PYUbadlclltsrz8dxGPy55XXSIsvuWjElbCu5Zq2XNxmUXLE9vikABQCiAUjCcgYA+PzpQizLyd0rIMJnF1zXnDfl9yBFcuHkYovJO7xBdtWNT2BIBCMiUxTzuYE2JTHXafHq3nEclvxekkV4KjobvKt1n4IvjPP9ErAEGovuF0p53Mb1MYtpz38x1vmPE59cYx0rNTNiz8vCSCeYmz1GozvOyDVrcBwZF/oCSl7kbeVLBpCwjJRhLR2DIa3PsTIjbDK2ioVZrId8cc2XO89TOqd/adcElNwyic18SIHb3cnMSVUpQxKMNGl4hVKkpcQ8xASfEmK2PGsluj933l8on+EtrdZvK//X//v/B68vW1EO7hYvMmm1cSL6s1VqeoBVlVn6LYuy8fYlhgMJwVH6Mo4lGRenrOxALVpd2Q+gAhEOvJSH4q9btuTFWDOLXWObxrcBKYOJVYIxAE02NamEt+xA/xzwscxrjuTKhZw1h/qggpIjkthamapS1LhJ8XOcBwn9SmtXUESa0G/JADcyAkgX1mOxKqQksrubbCuDESlsqS3XMgAfvO7o8hHA6GMroH4fM+S0Cm0fcC9/N+ldhYzAhQYi+h2f7a9lEdfO+9R2kFmfj7N4u2YLFzt0/CBmSZkZ4b+4XhGvF8TssrUxQ7IZ2UUAeB8Q10sNcF9OiHErBVSlRAGcOE6QY0k43qUY6QRbMWKdmvunXLMKM5Laay1dBnrvdbyhHHtl1jr+TYCEPhdeFnxOlJEZk5DHpOBLlsMjYITbtAljHBdZbVgTgxkZiZdjoZNzbz+XHBF5G3TfSSmeJP74GPrFlJc6JO+mPGtAUmlR+t0xEoPJeM9a8BSkP3px5TpmVKQLmMmwhLkSeaw9TwMjo/WaIrZc0p6iHFI/kFbs4QowkeecSVHMt6p8f7HSDrILwZvLisvqcMkZnd5c1jzJRjH5UaYmCH9lnoivGz0f0j9/lvlMZx/jeBH+fZ8nULbokcsB4JCV/3wcp4/LfcvrKQS+uDIl9HEi0u0gCsWBreucZUu3n4XrikhGpH2e2vfdet5ujSFi1y2fA9MdXOO6Rv2ft0WtSyKtm1p4e5l6d/48C+uGmreoHf2zzUzLHhupDQJFPxX3gedKWbdEMyPyN/u8X7Y4BAP8rI9ctGYgQmZeQv7O2+uUsEfcplrQEbs2zEAVXwvQFme0XLEsY4MWKy7B2mbE9MzauMV5pXa9TsaDMMiIcctxIVtZ5sU2fjnDn85w3pcYEr9QDInL76vzQFg8FVwty1z5/bYSHKcbJtajBleLa1OZ5Zync59CTav+6hwK+LgrbEgoYzQDEV7GNZ6IDaFYPM76Vw0mehxrn4Uj4HlPZCp8Hv/02CmNopYO8lSZuYe+pPx9kbeRZw1I2DrYxo0cm5RnwnOypiM3w/xuARGdeaOKcPPKbl+0nQUAnm7VeIrYc0RvBZHbF5/Zch9s5ZAtRSOJ0SG7JGPzbcyBlE0BFZnVKbh++UctjxmMfP5hzUrHCqBW2mU3AGnZZZ9kli2mJoWqVpykosMpbHkyOnlfgAi7Dtwv1bWA1/G9qrnwa8FAWbwvpRzMrUBIVGDEqlwufaBjYgYlmcBKMiImEMlttBiBkj1sAkZ6g4R4nzJLkpKMIUldjEgBJEm7alWrpgRhMkmDTNGpGda2YnhuK48LvrqW1B3rtbBl2ad8HcE2BHTXz+8RX2NmjJrrjckMpL0aAeDSXYvuJ5m7JVBYB0zGTDkr7IdS/ltGon0vZseyfmtXLW6rPJcWzWiel2C6b2ngcZTh5nbouiS07liNIOdETIMTQGBLTbYsgFyvfHa78ssZHjU2RAIVv5xLUPtyfkXXdH6Vg9sD/OLpGVx8SfHLn36h9rC7lHSb0td+tJ/YcOjUs9H0gzjPOVS3LA1Ezosvblly2TlUFyyZGGQJlHad3Ua5DhKnFz8JpqzVAOZeEMA4Zix4FBrbqsnF52TGptc/qAVSB9lLJLMneniSrrLvssjn+qM85y3y8z//8/jhH/5h/NIv/RI+85nP4Cd/8ifxbd/2bdN9fu7nfg7f//3fj//4H/8jvuZrvgZ/7a/9NXz3d3/30xv9EcvzBiS+t5YC1V1pz/1BB5zdem6pMI9Ykf6cwhBarDqp+2296CxHwMksuP0WtxCWUXBb2y6AnXj2XK6kNGCF/YaEiVrXSbDSK2sQ8i//598o+/0f//efPNyWtxGp3OwVWaOJ0UZplp+9dmehP5TJ8uR98WOWBRJP3pcq5nWSyhOlq/eJb2Mo8SOoLhWgFLrMlMhnWAdkSgVd1h6RXTBPvDDoXMjYi0JpFOWar2PPVZG7fPT8C5zSLS/EgnapGMzofO187/h3FbofGwOLYmGk5VFYfnWR15Fw/QPptsXnlBZMCfr5nsk26vsl3UVmrMJR6++RGBMd1PxBWJaPsDOWzJiRPXlqzJ/FkBAjZAe583c5Fznf1jklpoJBfKCkDn6rqXyzK5dO8QsA/nRq2BRiRk40D2cw4ryDz9qyBCXeuwYkMRur2ZJZX3Xr8nO+pX6ukMbBChy1K5aICclsCC8L3hXXLDbsSLfRlg2pRVnZRWt2LdazJ9lkS+Scp2Pr2u2G3de2YXAuOVYA1aV3JHvrX+Tp8oUvfAFf//Vfj+/5nu/Bt3/7t+9u/2u/9mv403/6T+N7v/d78RM/8RP41//6X+PP//k/j6/6qq/Cpz71qY+gxW8vzxqQ3OdK1SyV2TAABg/KZaCyQYhUbC03C9q9+ktqkRbdmTSsTuPzjeJ6JNsBSMbANcul6PaOYwe04nbQKiUAYG1DqyQCOXe6c1TdGQCi21UGASAESkDA7jBbqnElbc2EfNhmmb72j9Z9a6ScMQvSToihYUxal5TYZeXiyY0n0JP3eO8U4J3Dx8+huAxIZoSK57nCiCyB3OIWz1ZTul3aghczWyDT+KbkciyEK8HsALqATA5gl7Ej0p1rJvq56BgvAUwBFJc0ALhbUlEKdIA7i1QWaP/2fLJ1q6qB0jyPSpmXLBH3lwzer0p+6652RSpuFjSx8wDmsSVyBdGgxJLIhgBRj2ZLNQUy73dFG+vSZj9LTbtl5jPtnqWBdx9s3LtEtev3wbreJnhXUgdb7lPyc09G76kFAGYB7KNz3mLlH7VpJhTT0O6zxlpM8LJGhKUyh1iAsPkcXxHpe3TA/cebeBIrsxaADoSQqxazHwRCwuIbRoSWVWaGWREGJew+Jd2KWuZr3sfz/mmPx393xvhbAQkbboC7zKSwkUfGrMlg9Vngum6zvLs122ES40k7PppV150DfJ23NZvMp7R0EJkB8UiKbwlMGHTwWGSBlqb/bzR2flHEB3woacn2znmDfPrTn8anP/3pw9v/6I/+KL72a78Wf/fv/l0AwNd93dfh3/ybf4O/9/f+3gsg+ShkNEFrYGKzBL37llRyRxlnZnL0RZQWZv6t15fzJsEM+NtiW/aC45s0hOJ69zJtWFKUIgFKAAIXJTuZeczJQX0CIu/fW62laKtuWfYBWFWPysidQqb15SBJPVnWY/R1EljK9poVMeJEzsHDOTQxIydP7gOn4ERldjp2qTOeAIeE5ABER9+TQ3Qpu3gRKCHqvwIGDsiMMYmUv3EawN71n/GudkxYrJXOOZsYfUcBwIVRC/XZ7i2JtiKpz6uBCABRUyUqYNI+czJuZhSU2j6ersZUATgSZG+9V5KppAO173cPqGq7rUDaklBBPY8WGLFkxKRYoEN+l2D8aND6Lcrr3jFnsR5PYUpuaYfNjtT+sdohPyUrwc+CBgP8nDIzArSuWiz8nWNENBBxrrppeeewnDMYyi5NGojQMVsm5wgYGfX32J2vGoIAdECEXbdkXEjwDnfCFYtdkGQ2P+2KbAERbeTR900bBeRy7ZosxXRHb4wutjFTizUvWu60PHZqUCLXvcht8rnPfa75fXd3h7u7u7c+7i/8wi/gW77lW5pln/rUp/CX//Jffutjf1TyrAHJ4tvMSjpF5hXorIQh1pdNMxFy317ZqgqvmYVq8GKOBgQ9to4DxSQLAVgphzc1kFRqt5/A9O8NxsDk7cFuE+eMm3ArK/1L5z6VCyTrKqWfTQ1LMhJLEeRz622kBDHxfoQYpBHuV13UrU0pGcr3Rd2fERhhIMOTK0+e7KJFPsytexaDkHPexgIi1q1gNy7Zx6Jr6zLQ5LylDadcy2TLn3fJU1pdflY8TGBSFO8txyF54BqzVW4WCMQ3OG8PAFgpzfE1OngXSUnY5nFNADEKV4UjGVdaTAJQ4ygYmGil3mLmvHPmu1YYk5Lm2nEZh/reiwfaUjhYYQBSiXOL1k1rjoumzVe+NlFYUScfqG1OnUsiMyNAH18itzsqI7Cit5HxWFr2AIM2BGg2RH63GJJZ1ryjovtkD4jw+S8rvS8c4A+gSf+7RUoPzkCB3bZat2BRxT2SKqBdqHj8dx4m0+EzE+sXyYzkfhGuW02qXWf315FPDTZGbIg0+tw142+Np+sMO75nPpjBrLFqkqGt5x/dcQlGZgYNGW8n4zqOvDNsWIyJ5mUuEmtuG1s9RrthFjHnaZ7w03S7l8D2uXzN13xN8/uv//W/jr/xN/7GWx/313/91/GVX/mVzbKv/MqvxOc+9zm8efMGr169eutzfNjyrAGJVOLlpCpfCM5UdOL0sq4qxdGNJux6TH28vZftVqPZoQxgEahDGwUee6Gn6OwWVoxFOdSg+Q1FmzvAuxawBF+Py2lmJUApA2FEASUERFBDQ3LV2pF7DrdFkg2bGkC7Wg9ASdUsj/W2AXu3ykW4lMjJU4IQCUSAVqlilxQ+jhSeVO8Xj7vgcb+EwoqcROClBCLBA2dfAzI5kJ3Fq/klqWeGwYn1zLO+e/IeMd+sU/AE3gMAxPJc0v2gyurXjRX83h0tbiCGQzxj1L72Nz93XJeFtnHksqTcGSxLpwy8DCOlWwLg2FotR0AEQBmDZM0OKfxudn0aHXzILFEUSqgwRrCFEmjHiRaU0O+rAUbK/ShtlK5kdjawocKC1gVrlLkKOG7t31O+9gCIFF0kcU9Grlj9ZwtCRtsfyaSlZS+rljxnrSoeOsMFu20BHmvwcL6mx5UFA7elLyIoAYms38HrmoxZHk2MCDMiXriLyRiOGdsxYzn0uGkZeKyUvcxc8PgngYbMQsjB6BJ8SOYj+DYwXdsO9TgKtKnRGYxotlW6bMlkGEekeedjUsBgbNBpjCxRvudqwwnoGBkmy65fLIvgLeI9dq2jH8Y5AfyX//Jf8OVf/uVl8QfBjnypyLMGJCz6ZbZeCMmWbFt+mWEDAA1EZsyIlFvmICsD2PC4Wcnn88sAVwlKZBulMiddHuQ+1jXpoFt5bXLik4kD5PljTDXQsLhv0SDmXVa4BkquBCJSMWpcYHbGuvn1fLhiW/Rqikk5qc4UJal0yb9ztkZ2k7mrk6dzlLbWZzDhHU2mzrVZtaSUCTVb5xNy1fZIrltbooxbyaXCrLSgEORa5+gmh0jAv3XVG9+HLaaabUq4sXdt9RIMV4BOjc/vbD5OTImsmtlSzHUOriBDhM8xFtYYYGV3szK7aSZkNA8zEDiF3CdwpXBc2UbFV21KyZBuE9xGDUpk+622RNdbR/mdboFUy1S2wKNn8aSr4hGLruWONXJHeqpYldtHbbF+HwEio+9HY1u0a9otqX4r6Ivm8cu5F49tjSWmI0ZgQUBMNWUuhGt7A0h8ZQL4t8uV4HVsCAMTTqvLbRgBPautTbsVINGuVwxGJHNc1gnmA6hxHzIgvc2KJQ0VfYas4ta68zjquTWhZUZ0DFr/vb67R+Yt6TIlxwvLTV3XDaJl7bFaEfursVWfQ84F0jDyIrZ8+Zd/eQNIPij55Cc/id/4jd9olv3Gb/wGvvzLv/xZsCPAMwckm3iZubIpL++C0aNw03DVesNvjx5M5DlYtKIiKWgerA7HkTxhsq1uSa4qK7pNBYio8wXBmiiFpVpBq2KkxUqBytZpCVAqW0NtvcZUFECAJoGrUDpZL7PcYzQjMrO8aJe6QoN/xIBEsyEAOteCpt1KAdNAhC1+Hzsv4DgQrlQOqMmgWCSrO2PwtNwDJWZq9OjVmKo80XieXCKuSHA5DXAC1XxJLiHBETvhMwAFB5bn9gUHH4FrBjbBA9e1t6RrRdaykMZU318WZgLkc8vK/uapL310xQ2R2CS+3rYjZu+kTp4glQYNALTLZABK5fRy3QGd4qHbcy1MZRs3U7Yz3geL9dDSWklrm2WsiP6UQEQu01XZ9Xcp2vVIftdsmbXfTGbsiXXsfSW4WuFH7bDS+o4ADdAajXR7ud/2QMmMUZIMQr0/AdviixErpVSLCObfLJI9lS5WJe7DtTEoQMuCjP50H+2JZkP4U8Z/AFRDhNPy3kuGxNWaTJ6NNSYQQXFxdahjCBt06Jp3mwsxreYYu7pOMiPS1ZN1Fp7nmhTpylgwk03Mj9cD25uJfAyW+Np4RsiN7fg7OXa9udrJEd4lcSHAhY847e+HfL5v+IZvwL/8l/+yWfYzP/Mz+IZv+IYP9bwfpDxrQAL0ysDwZRYWyJhg0pxSZkBEigVAdDrfPekKlh3ax870VVw6MsDS7lx7IkFJn2GsTqjSYquVAGZJtMRi4u0t3XbMTg9GNCPTtV+4pND1fLSAxHIxsBQVKSOrMAMZsgi2Psu6ro73yPEh9Fh7h8KOeAj3C3c0n1o9Hn0OXLfEd58B83VwvGC4EkpldyRFYfX9McozKSdVz8+UK+85gxmewGXMUZEDJR4k8L1VykTOYxHQMEKlGZ3lUhhOgmveP7t9rQtWf/xqgNDuc7TcBiZyvbXuKe+aVsr3tr1FmCU5csz2HfXNuaRhYdYe81gj5Y0LURoGiacURZTn7yq3RyAuHlgj5BubjHNI8AFgGIguQYi8ZguM3Hrf5rEhgjEWaXmru1XN3CVT83L2Peme5R2N0c7xOEdARL2mU4lAqdF0RKyaRcyM0PrbjWiHEoZMxrbZ/t65bt/NMAKNEuS8yNPl/fffx6/8yq+U37/2a7+GX/7lX8YnPvEJ/P7f//vxgz/4g/hv/+2/4Z/+038KAPje7/1e/MN/+A/xAz/wA/ie7/ke/OzP/iz++T//5/ipn/qpL9Yl3CzPGpA09QDivv9l++K51v9xMPoM84KLQZatMLyclcSa5aff/0iBxmY7Pp+IJ+lpWTSZhfiSfLGCslIqQVtlNW7JSieVIl0hllPQ6utkN646vrUgQwMQ7TIn5aoyWmm/eKAG6H5UworVnXDR6iZnY9AuSpmw5PKflYrybqH+vVso7fV7J7IUngOl+aXUvjUlJLtqlXuRYlucQApFryIhFwAEpwIWQBeUbUs/1/L+bWk+MVkuQFp00D81QB2ncQvjhtTnvKQujrm6N7tqZXZPph0FUvMeAz0zokUHiWqR72gJ5pXumk48+8pCWs4h2Mdr1g6u6F1JZ4oFW2UBG0RYoINlFdu0+0ZzXy2WIm8xBMG63zjuViW/L8ay2b7MigDVJYiPM1Ksu2M4mW2JtrGL3iZi7VIqDIDuWwugrdFyP8u+iKjjhtxfxpSMQOSof0bAQi6z+vkoELGem/78NSZEx4ewW5Z3ukCqqhPCLliu3o+9GkhaHXBqe2aWau+3Iq9MFlCl8aJ105rNc6VflEGodcFqt5XulnWbD3cilPf6MkM/L3JYfvEXfxHf9E3fVH5///d/PwDgu77ru/BjP/Zj+MxnPoP//J//c1n/tV/7tfipn/op/JW/8lfwIz/yI/h9v+/34R//43/8bFL+As8ekABah7aCtbwTL6Thl102zHLUnUrn+5ZgpN2uX2anu60swUg4nsRujxyEjNE1tut8qArOyG5+i7XDu9p3LWCr3y0LkMxgJAfnkQLY/DaUwSiOoYHLhynaNaSZXIWy0ghby4UCwm4IhRXJ1kjvKLMW9zMDv1Mg0MKxJMFVkMxsiQMqCNkDJKC+i8gFEtMsAkRcv5hwC0CMxxiFkcuOjjWQUhRY5fZUs85YBghyf+J+jRsNIk9JX7kHViwwQsv7ba6xWh51vRLasI4NMlmArJ4OVAXzpAI2t9Qqpc06Q/G11rduWXG6zUg0+NDL9/bV3/XnCIhYv2fHkqm6h+dWyi2/r7zOOl25RE+xRLXaaLudBdA0c1LbRGpxDXT3pT8Z7EvgMxJ9vaM+sPqCvh8LEh49O/pYDD5aprgaaSTTUcZEMf80AES8816MuZIdGYkGI/Y26AbJrTGqtYVjG0bk4BhpeTtIl0s+Hp/b+nwb2Xt2+PP6HADJM6hD8o3f+I1dohkpP/ZjP2bu8+///b+/tWXvjDxrQMJKV3Qgd47EE0FVevUjMLOMWEBkBE6CUjYkGJnW1niiyPTGU8BUxoJWGaMdeZuaJlDyrlZGHdvCV/ssNAO9ayZiXZiuOUc+xZEEAlLxk+v3/ORZYfuH/69fw5YS/qf/wx/oN8zyf/pX/3P5/n/+1P843G4mMvsLIKx8BhixLHWnfCs4K4x3svCfL5Y/tgzeB/KD5vdAB63HRMHpOdYcyfkWmABwKSI5dp4mdmTLAITASEJKPVzlPk/gSdXuEwLQWbmK1dIPrxREQ2Gyqt3vKZXy+zn3nXdALAXFCIRwjEtwuR6Dn7+3I6A8mqP1sSSTI/fRbN6WBvVKtvrOXVGz9ljxN3mPRoGRjJQl60Rx2XPL4ufcig0p24jjP8UlaQZGZkHke99lsDr/SUXcYjdHLEgz9hnP6RaZkefnoHVz0e0zFXXjPhHo8B1gPKqUWsyHrpUEtGyR3nfvuWmfL39Yub1TgewcI2fOS5Fi27yr7sFbSoXGkAktgIiYyJhDr0gq99Mlh+xMkIHG/FmtRhuai1Kq196mrh+7avF6S/LQORQrAQXQs26jd27PrfHWd/WjdJN+kS8teeaAJAergQyIxfUoAnXQt/ZTv+WAfxBM8C6jqtC3WFxlZizKTHTMzUunRQWQM1yBakmUi0/N9vC9/7kM0OWAW15uuRnJa1yE/+6o3oNkRnTaVOvYpS2wQYisIj1ql1bGZrLnXnBEzktoct5rFw5WNJn9AGofWQBvlhufqrCzxbCyIloST5T5fXAAAgOQDEwkKCGAQS4JMaUhM5JAivMW23oxLN67UhODr3PLLlGnkLNMLfXehKjdgeq9uxzoe0tJvfjYKpbOib73hS25y+mKm9odTdrpFoxY7oFWXIuWLl5KHUdmuNqzdMq4G14mXQXPiy+MC/fxo8GQSMX3FmtqVeTF9Q1Qqb6vW0w4g9JbS3BisSf6vRyBj/Fna7UfARupbEuXrZO6Tl2Yzrt6nCbhgCHFUBYT2FW2Pmiexn5ZzPPgmDRzt9tzi2o+xfshxyV5ffLauZ1A65lgZWyzlOVZ+0ZGHd02Fj7MdaN6RJRdnsabKwjEU70kYkh4HKY6WfnaI48ddCwxXdLvAYuSUN/jlKqLa3Enz/OU5aql579mLhdgr17rOEOlrv+jk1CMhMfXEYi2RL+nm7hf6/oMAIn3XwSG5CNOM/wM5VkDEnZPyY7t6DJQAUPTgmYZuow7xmCn140mn1mgu5RSUdqgYmdiWdubgSwDGs6sYgGKvjl5QMlBsz63r6kCKyy8vL/0wddg5BaxJjnZVr4HrNhqq5AUXYBuBkj+L//6//2kAFIpP/z/+BW8vmydRVFbUuUy/n4ytp+lp5QZYjggk/5ckxFG3qMImpwjZ49xokEMRvJfZDACnljta3ZwcCUOA0B0uHKBzKxjSfaSWRLOjx840NwYo49mGaLj9Bba4HsLfMjWUg7SpNgNhyCusQSQU8c0+0vwoX93bIYwMFjLZXtvefQ0GNHXLD8tEDB6D2wgOHa7GLnmWMtlWlrtficVGglKtByJU5gBkT2WZOiaNGFC6Hdt44h537u/xQiV3yEAaNNlj4/Pwsxqeb529CyL4eHzcTwgj00AOuZbSmMsirlgr8gmxVmnKMtlwubrM8zPwOw5ZjAyE6u/6rucgJjnQu6fko6ef+faXrk+FpeBCp4gyJYYnBg1N/IhGIiU9yezxhKEVJfW1rAhr0OLdj+3rl3OgRqM3OpKOZLZuGG9zy/yIk+RZw1IKLA3EAXqByli1eBsKf57A54ZA1Imrfx7cAy9XbvSlbYWdkexJG2Ob/rUecYbRUeDJ+8a4DNqn5RR31kir8+yoDXb5muTQMnqlxNbgbwrg7cH4COKm5lUCLdI1iFWaqQCcwRs3Al3oaeAE/bP5u/aqlquK7QTv7T0yUmfY0OCqy4LnNbyHDycQ3bfIiAS2MLnakYslphnyuQcHPKk6By883DOwy80BHDw9xqJfSruWuU4NOm6jCGCB3yicy4xVGt9Sojek6+Cz4UxQcpNTORO6bPfPAd0V+sh3Ue2nmsGoFWc2RpY+zd4306Mi8caE+5yBWtaHnNwOLlxbcnh6kjRCCUYXiitpqJT+3auMPTAeibeOaJ6o6tGBt8CCiuGoGHlvGu2Z1bkskZc1v10nEfceghk+E55tI9XwQixhLFRRJ/iime7ZvnpNkeYFemSpeNBaLndrtHY34PY+dxTDbbO3KYYwiaMRQOSvN1+y8XYYrZlcUB9zUBmR5kFiLXy+OO6YUvVDfG6xcKaSNdEXfPGkhl44jawsYH7riSr8Xlcc9kgkxlQ710piCqPp+eyuhzNsyFFGsZ0nRFKPd9WY7f6gK9jJppJoX3abfQ7z657cp0l1rt3BFwcAf3vqjjv4T5ixuKjPt9zlGcNSLxgSDwomIqsUmmY4cofTnq6L3uZsmagZeaaNRIKaK3794G783ZYYgbhH00BxsdXDIkWCbh0m7jAW5f6OA/utBl3IIDYZ3diRa1Q1h/wgPg//eR/KErdefH4qt/1Cq/OoSgxwdXCXBxwWa5TKbfeIbtZ+Q6IcP/dBy8YEnbT8iVXPgO5JdBz7xwGYKQujymVZ39DjgtxrqjMWyRWhK1tCRN2JIOS6Mjf2jmKC7jmhVbiBY4lAVwGlIIhyC6CPrpiQR3JzHJf11WAKa13Unkp6SxFsHi93moJjZNnyWI55kxnqzjK/aRBoqQtHuAHqcR38Q755jQub2uEFVPQuFxMFKKZK9UMlFj9L4832k+fW283Y0G0G9bsGJoJsVwp6XvfHpZRrJB+DmZ1JcYGnPacVqyKZKg14KDvvdLNY23NTFWBR+2z6qLkXV+PIyU6eDUKAWv0uMZY5rWrd7jGhJN3eBDaM2UZQzFQFGXbeLVHY6kWng9l9kfEmhafzlEZEi6IuqVUM+7lwJEGuDhaHkt/q/uqCvlaaetl4eZZrRH9eJQxaGeKP8J+zgDJUXbjyDaLd8O07y/yInvyrAFJtcrL2gJoJoQPUje1lP+jrkkjNysGVJbSbokGJbzsVtEgaer7rDrxaNYwHpAlS8P3i9utM5M1aY4FKOGx1HuHkPi+VjCwxYRtYnWdWWAfM5C5W3zDdrB84uN32GLCm8sKrrzOmbBkymNu9khpkWyIjrfhjDHcDgYhPoMNCloXTIiDSK3cghJ9P/Rzm7LVECmVeM3CLKpnqXo5pBq8yRNtIv9pdlfQ10zNo/gMn9AwXjEmRF8nZnbvQK4ijXNr8QOQWY7QWP5m0kzAa7v9BXGoTOvYH74eub45z44VvH/3s0KstjvBZaszbx+LqwtfTxDXpF0EuS3EhsQC0iUrYV1vcy3q3WldmSorwu/JXkCsFr5/zJbIc473ke/xcSZkyIaI+0vL6XOPcbDaqt1DAXQK2ezyLLAzYmYs40XLaNTftS5HLfxXx4i+3oYT44o0cgB9QotiM8rvckr5XU2UVvt+oXt8jRHXjZiBuy0ippSZ0pYl4KxM8UhK70GfUXuMCuUxobqFOrEdSowb3y/NlGA7Nk82cTS5c0p67lhds9rYmv2K7Na8ruMwL1tlQ9hNS77zFzF2slieBJbL52yckEzpi7zIByXPGpAAcoBIJaAb6JmQI0b/mWI/isWwj2MPpLK9WzN4pgaUAK1iI31NyarTt20kFmMxourLPk73nZigvT0oS2H6OjhXTUgZYLSyj6Sq5RjggL6i9Efqw3Pw2BZSXC1XEGkttYTTod4tHh8796/E/+7jd7jGiPcfdNV0UZjL2+wQXwN3gWZBpALBysOSrZPBV2WBlSbpmlUVLDoGMxdSItp3oTxX/JGBMd8J+WhXsJIKEOFUwHVZneD4VgfHVkaA8w5L8MyJE67ZcrgWX/OE4FLjutinueTffghKeKJsrIO5r/Zc8iyresM+oHcjoXPU751i5OtvfnZHCo6sS+KdL6DN6g/dnquoml7dNmIHMui7NwFBf92+W6azLUklZiaaaWF3riP76rbJT6tWyBEGpF23e/pGdHP3QKkU7XbEz4Nsi2Y+LAAiiwJaLKo0YEijhY6NcejHD52xT197ASJ5HCCFmAwX60bxamt0mTnJz2NKOHn6vEZfXbq8a47LfXh6ojeDnn+ZhW/cnVJdp2WU5GbWH3xePqZcZiVzof0qu6RFu/vK7XTSi/4vljmNDROW26sMZrcAvTZijGJG+PtzcNUq4r4IaX/dR3y+ZyjPHpA0fpjGPDBTtLWQjjuYTOTLZgwglqIwEw1uJFNSt6ksAjAHHmaTO8uvWKetQegn6Xqc2uaRPzGglAqfchjBMbcyC2TJ7Ea0TZ+zPXhybQme4i1kcKylsGg5Lx7vnWmgeHUOJVZDynsnj2us+e/fO4WhyxW3cxSvY2XN8hnccJC6VCYsIOLL+nxcoUxwznydv1zHg9R+T3n7ug0DDTnp8SNfJ0U0zAjFBqHc81PI91QEuNOGsXnOKfA9odTGcVV5jwnAhsKA6UKJUqmWoq3ovI+cVEfC263qfBYbgWg/U9r6LsFI005zuXxJUdzcYq5bwQX1pHJNbbSvh/oimn2i2QoNTM5LKN+1C9TM71wrLlYmrb4tx+uQHGVDLAB5NC6ERbfJendGReqs5AZ8z3iM6EFTO6bwuhkYsQwZJ98DEnLDsscLHltkn+je4Kt0/N8BiIDLBjXyVHBIHnAJlDrX53d7IXeumFxJu8vsKeCpL2MyztrKkbmkbluvZzR3WoujmIxWI5B973xWbIgGW9Zz0h7MPqlOjKHZEf03C263xseRHHHrss7xIi9yizxrQPKwtS4MWnSgtVSstcigOC0xAie5rTdAkKEQRDkiKjYgqImRJBlMQj2PBhhHmJFRquBREKNskwQffBi2slkpEBfPiiopINEllMJfggWS0vrcyuVtetVmuaC8Wfn1LnQKEQ/EM+vNx84BX3a3gAPI7wxA8nvfOzd99WXnpSjfsl3U1vE94exYQAV3EoQwI8KxIdLSyQpD8K6xYrI0ejJb/7gPUs9qRFQgIvtMPno8acr8+gl1e9M9yTMjIy2U9b55H8gy6mq/nQIpJD4CIVuRQlaQGaCUukLq3vacUK9Yts/DsaJdR+tTtBZUZQAw0qfqRAZAPx5tKWHhtN05vuYa22dbXmfNZuSKC1GroPum/bpfuI6F7kMr+5RWoLm98nh71tRb3bx0u0afIwBiuVPuGaf6zGn0ObJ47ylj3HcxceKK9rmxjBv8fFjp1O+X6jLK4OOcx43F1wQai+fjUXpvpAgX15xCzyiQyr9d/1455wFPyTCCJ9UhBI+UXQ03T+/pGusn9801OtyB3LmuW8R1800QPPenTrEN6HnONfdiJBYbIsUCDNa+t4p+bnThQnn8W1wV22O2oIPZ0DeXFQDw5rIpltSOMbHe671z921p2Wjg9rolXxR5BoURfyfKswYkM+WvjUtwppX/iGzZ8lvOGbUC0QdZv43INMZvMzCWY2HHBaswJTYQAVD8j+W+3tVl3EwOUixuc64HYlI0GLEmID35jIohcm0JmdDgktVrHnB/5P/5vxSWg6/x5H2J2VgCuVL93K/+b/i9753gHfC5hw3vnULDbnzsHOBcm+pRZ1uRMmKYuF8bJcSwagLCGiwspwrKmpJQ2Q/JfHD/STcs3afMqWyxBSJ8H1iKtXf0XuXMUXzUKJ65Ll13FFblDHBj3l9bxqVYVvd2/bF4hbr9VpR5LY01X7w30lWrDcblzzkY8Y6utyzjY8SEk8+gLbjmPLQf9Z98tnQf3S19ViwJ2C1lZc/9ia9Zt9dy59i7P0dlCEiadwtNG+l7D0ZGz+to3JWZoeQyywI9vF7fj8fdNUpGRe9enqEKNopBw+V4tJDBpMuZ+ABguxAgiRGIpLi6lHpAUq+sgJIk27tFuLAgxRVwHsF7Ghv44YVD8ImKCyK/955j0AgwU+2uiC3R/MDP9imAAshjAiWWyOCD0xqXsaG+J9a9ekra7T226xYx0/ca556NRdY7I7eXxgTrmHK59XxqQ8GeHGFH+Ji3enK8yIuwPGtAQkFzWeksSk6fzrBZ73prAFli7ReuW+6rfyspny6nFKwKthauGkvSZgORbatC28viht6ywIvrKNsNmJGmD8Q2vQ+7a5RlPo3MuAKg8TcWumZZ17VVSUxtxdoS2JjawVtashrQIiadAi4W1ygN90to3Fs41iN4V4CId+SCFZzDe6eA984BiwfuckD53cc8Ulq6OA6gVe7ZpzqmqshL0f1Xlud+ZAuvc9kimhXdxsIJkAKhlYhcQyQhM2kOuKa+wDAzI8yaSMaktUGq/fKnBUaAmq3HElZYI5J4/ohF4boEOr6E2JKU6+BQmmA24FPNBleCNB9VMTDZzi3G4YSsvwO9CxF9yuKKpNSzO9eMLQHU+KMtkK59HupYYLC1mTnz5ba38Uqc3OAUXHE5vBppkbXI+ziLi5llOpJdyIXlShB+SqYypWVP2bH6te5rLNN9OwEivI3VPGn4kP3J33XAsK4NI6+vYZsWAPCInuKEqGAop5oDEHO7cr0M2T1sXAu+MqvMkPC4cfYO55DH8PUBiBFuu9axQ4whsihqI1ybCJkZcR7FcXNb4cJCNYzy+uA8nHdwKbtrJW4vcBVunvzp4Avz+eAo4P26pTKX7gGTgn/AxzQUfvSGq1H8hjQoNcf4AJVrfazRs8LrRqBe728dR8fC6ePL75ygInjXFFfVoN86Bp2/HWser2vXnhd5kSPyrAHJmjN4sPJt++2L7wYYAeYTorWGKWo6aGv9lW5JNT7ElcA6oFpSdaxBby1NXeX11ipUFTkroNqcpBVgo2XVwijByAiIACj+xymlAkySI5ZkZK+vLlgScLRKvZUeUyoFcgLxzsEHwSh4Bx9pAvQh9x9aN5n7JZQK6O+dAk7B48vOodT+OAfhgw1S8hxYucznzu0ixd5lX908eSSyEuo5QvafZCPYwsmuFmT5rADl5LOrxbaWT6RYXSqcRwoL4DycXzDq/QSUTFlbytlfMhDU8U9WnNAIjIzeKbkPgMbFrbyvwWVgT8/GCcDVRQpmXWNmUrKveaK+pkP6ZoLWLnqbUhi3mBqlUbdNiryWvnq3UBLE5K33065CrRWf3kENRKiP2sQW2ms/NG4+wkLMCrd3uAv8LlHbZr7s5bjONQXxeL+ZMsarZNwUv3MQ9VyKq90kq4jl6nZkO3n+uo08TntNcvuOuFAKrpRNsCIycYAGIiO3GHZnkvWOgqN+YqB9jZGAPYMS0HgTIyno+jod6tjBhgweNxbv4OJKf9dHGi/ybzBLoo5FnSDGFKAdZ0AgIjnXHsNlNzC/IDifgVRmR7wr4yHQJskoy1C38S7VrFQx0jMjXX4FQB8VFZYgcsZ6WABkxHIdcUG6xQ3xbVyaRm0cyUy30YBDJ4cYAZKjbXuX5aUOybspzxqQaLpWB2hLBoInJYumHAV6SWnmg5RfWDga4bRLCh9rQClb0jyrUdQk4BMCNYvYjdmqNDMCCOp/AEZGkhKtj/xDNjvVNLCNC1Oqblyb+r4nUoHiIlqSFZHKlLxe2X8yCJQVuVPwuM9FDEuaXREMWtwhcl85J2I2TKU8VykuSt7kolxdX4FfBT/UhgxGshLh1kv2/96qouA8kosEQsICxBXeL9XQKtCJ1Ry5jPULrlhMfd+7N1rufHwd3fG9Q8pbnNCDmuqiB8GUZKUtsNLhEbM19+QdgU3XPzua+RixItZvvc4aA2buEuWcSDgHn91UhMLEFk5WxGLKxSmHzWhEW8iB+i5zBjVOb8rucJ37Y2jdU6J6PxgoSdEJJEaZpCTDHBM9tzohRX9N/Ts7uuZb5YgCFVN7jiM61IjxtYCIrInDoESCYU47HcpYTtSIdw4hISd6yAr+BgARJ3iq0xPdzoj/EUgegwrDIsCLd0AEWV54PvGuTlseZLxyeXyS82SpK5baIrosMraw/BbgWc4XIxaE96N1NhDZM1xIYdYUuD0N9i0yY1T4e8OqiGuzQNAoKYSOKRml9tVusPx5XTsF5UVe5JA8a0DC1hS2vs0KPkkZWXtpIX2wcg7UAbWIsKadgoePCVfEahkU1LKcaEeuW1qhroHB6GJWTqjKG9PatwCfUSVezYzQZdqDK59KjnEy8HmNtXovg5FrzkNvxY3IjFpSWtaEj0XbMcC4W3zNbR9pko9b2++cica7WuPjvVPAe6eAc/B4lY9Rig76DEhEW2QaTHJ9kusIiOiilXJ92U+4mgFkDWbQwyBocaggZL2Qr/fGgGSlh8J7JL+QO0VIQFyB5Ux+3c4jZZNvAZiqe0mpr7+bSbxkalYZwXbYM0vkM3JSG8oYlTXmeiQ+u2z5XOiUJ7vyfLBrnwMXAKz597diud4Gk/HMijcCIpaLAis5m+8ZIhm8HD35yMeNfORJpOVYpPrW1nXBosoAZ7m+9I1gX7trEItGaU5lTRs+puVK1yh8PI4qfeWW2DfLfYrbpMWKLbv1fJKV1l1hWtSt/pw8WzppAv2mDrqsEVtWXjmmp4BVn5ldV+/FfT50YVZSJAXdURIC71xxw2RDiAe7ew2C1fdEum8JFna6iwAlzvmGvfEgtoSXBU9JT3jeSi7hBJ/f84jrBsg4s9G9becGzbDzNu3YMWJUgT7mguVtXA0tcPJUdmQv9oNBBK1jJi5O28/70afN9spz6rbI73LMfX0ZVHR9l+QlqP2dlGcNSGYyAiMserBq9s37sMog30+XB1hJ8bMCEm+0W7HimvXL4XU0+whlkRW3WRCZ5arVHH9GhzRt7ZexQsnrUsquQRmMMBtSXdiQP8d9D/QDoL6eI+mb31bY84Sb6Jj5QR2ER73O7Rs1kxkXOm51B2OmhF0ssK2FEWncLVIEIuCyu0TK7lopRtRAA19A1J7CFpzLMVC9QqxFgpECXCf9PWOKUnbB4krvawSxM9HhmhX16GoNmuAcoqfqz8XlMTfiMj7NBypaMdDLqytnXhHJFYWZEg6CZ0bDct2UIrMr8W9rm5k0TMBgDJCgh5ovxpXGfab69IdcqdujBQtN2uad9gAVfOxdBxueaopY+rjFIFP2P3C+aVsmY1R3PlFAbhQPwC5c8LUwphwnfRJuuzmxQUpt+0ml5z7xeWyguA52w3KAeDZ5MmknmSTiR0wwsgNQjoiH8MY6KJp1k8yIvv+6Vof81N9HbMitrkfa3dMSvXzErFixHvK3HIfG4ov7owTKmvE44sY+e941sHsOLlsv8m7KlwQgkZlHgApGpFhZkKQVkI9DE3ZqrMJMbLBFuFqGa6Vr71xRpqIrC+nc0rIX23UlDmQz3BbUuB+ca9zDmvNMDGAzVy3pejOan2WQtgYfALKFqk4A1626aVnMSKlYaygVBWyUjEu5or2v7lDyOmJKuMZ2cgq+xo0soQVs10g+2Y/rRgGXW4JzwMVR/Mg5AJeNrJF8bQBKJiqy+qEEuXNV46Y4oepHZo5ou8oqONRYkeCAkKp7losb+X0zM7LmesLF0rkhcWAqgJRiVjY8kvMI4WzeSzlXOFeVR8pww0drgd/iuYZBTU0s6xdo0c+RLlIaBTOSMtNXY28cQgDFi2Ttid23tuDgizJElueLn1t+RxbJvUlYKwj2JEtMCae9HAW8xxzPcQoOp+SxxY0yDcWE6F22ete+tJJPMOvL49LIfehUxiah0GiGeGQIEONnucIIbCnH8Cwy+UQ1MnB64iNypDaSFp1iPUZOhlCvxRer+Ni1bHRcS8pYwuBK1IGRs+YZKMXnAOCyumZ/65jMlHC8BwBsS01PvjVxj/L+RWJPNtp+jTQurhuQPHDd6vt99gv8khlUjkGLeczgP+ueaSDifM2ypdflsaYsy6CHjFK5xZM+lixt8K4Ev3f9lpk5HZupmRGgT8U8Yjz2WNNb3KNm9XBmssUEe5Te39+K7zur3/r7LTLK3sUsoGZGZIrhh+tzYEj8F4EheYkh2ZMvCUAipXUB6F9qttwDKIpxFQFOeKBk16lkW3udsKJzNqGZwlDbIYGJbZmWzIkZsJ/PU1IFy+PyNkKpsYLYgRaMyHFQZ4tixRqoLjZABXlNat7EPui9m5Zsp2xud40ZlPB1WkoWW515cvIODRg5eYfoXFWkYkJEwtU7bAt18nWjitjsTrClhMtW28tuaNwGUjgTluwvH+BqvGX9OmFQ6jbsYuEd4DZyxyoZcTJLojPjFP/tCFIC5PoYwWaxGcgs/ZgtX+XZkH2cXbR05WegnCJfQ3sSec6WXcysI1t2fSruGy5Rn3LhPwab3mc/e+8QcgpRH5kt2U9bKYvyzfrgVmkn+uqWw+tkADOfg7JQUfDydYuFWZWBzLop/N7qmkHDazFAhd5eVsCesWKcevkaY8kmyNdZsqE5m3XRMgtIt9yzpMhMg5Hd0yiIDUcCnaUccdWztmkC9Ms70Cufm6EoHmrHWrO5YZHX4HCNEd55RNEG9gSLibrCZQNRSllpdBRnsvilGCsSACQHl/y+C5dO+WuAEb3tSPZ04n4GFPuqnUdAZ5aKmdbX7xYYmYnFVkgwuRcILt+pJoHI4Fm5dTzS7MWtQeij36ti86zttfsbucw+DQS9yIs8a0AyYkFK/IXBinAxJituIbq2XknMJPgp0BG3A1VbfYkmBpBrlEi3qi4Yd1N1GVCZD+9cYU60O5hmd+r5xxZIvq5yvYmsfjRR8XFtJVYCEYCVlN7XHGj7dlT4kPtk5L5BFk8HDsadWXW9Q9nOuxonIq3Jmyp4eB887rNW/ViC8ALWuGGNwG+9uVLmpyAnE+rP906hWEr5djtHrnxxa12yuNkxpcKmlGt1FLzuIALMC7CoWXFKvQCZqlNeTIxwLhZg4rYV8Au885RSOANWH5FBQO7bfBRXboJrgCkzgZIVkffLq+35qFKsWgDeVcWCGSfO0LZ4ZI9z0rh8SoiePrH4RkE5ZWBz3RLwHvD64kUMSSzWuxBvdyOQrhSjibtVRNjCXVMDy222mHBi9wkBrmWgO4MSjiuRAGT2HkuxDBh9Fqv6+4Qj4sl1LglDgG+TVQDKbWtnHJoxOCzFwOGgknm0oKTYdvKDpZVWGfBsiazMPWuHVwz1ORf0PC/eVOZm8Uqz5ZcMTso54UGFLyO8o7jFh5wd7z55AL669PmEU3JIFFaOc/DwziMsC1yoYwtdeLSBiQQYAng06YELMCEVIiEbrPJ1sWsrg6SyPlUQxZdf9pExhxwwnWo2QJ4H2Lh0FUYxOddrZmTGiox+m8BCMCJmILgw6sxSZcv2WnJL/ZMjWey64/M+Ymyq556Bjtgs4z+ZZY7H3Bd5kafIswYkLPxSnrJ7T2RfXPFi8wB33VLzXQpbY2v2GQAi+8niXaewy3GF/cFp3xaUjNrcfFfBxMRkCKClwMRMLFcIa+K3fFGtQ/N2EohwUgHLfxfowcgoK5pOxRmcm7qASHAj9R72g6e6Iq01WQescnatmBI+f9lw3SIetojggM9fNvzm+4+ICfj4ObTpXdnFyXsEHzKDkLDG6pKVQDNwfQJqv5DlX98ftPEhKZYiZk0MCVBBCYSCIBgSFzcklwGNXxC8p8cqknuHSw6bo5iYwJBDtUW6pTUMh+rrsj1aEGaB9nK/wGxXjiFByqAs5XostC0XTyOmMhe83MhVhytaF9AbCDi+OgVctog3lw1rnhx1oDswDyydTaZ6P+2qEbzDeREKhUzz6l0xAMhz+eA6UNJlNhPKqcWAsIxAiM6IBhhudeK37gICrK4ojrSNqKpdAvz12Mft79u4Fwdm1Xq6gp4fUkhbUFITBbQi3Xr4uHVdu62VbtjKMtekFI9UTwcBJaZJg+CR+4tept24cM73buVzowHz98E3c0MCsYibp2tzjlxP2e2LWM4FISxtvJr8lDKJH6ljWr6ORO6X/D2mlJ8X+i7BSEopg5R8b2J1+a1zBjPsApTEWM5pJTuxLPZW/+vvWkYghH/rTFTsTinTfZe5zPfv5CxbG+3U9i31ab+PHk+OygmuefaDS+XZlanSddu0m5YGI5aL3LsqLgS48NG6bH3U53uO8iUBSFhKhfCUsG39OlaOrexCHGjKAYOcEcunVNy3OIWp5pelwimlo5UNNsEWAVCEy9UMhIwMJKOJnwNpNRiyxHLNusbU+JMfqVZvu52163Tw/Z51yUofyhXVmxo0NWda2Y4zcrFct4jHRP6wjxvFB0iGhH39H9YILMB1o5iGFKsCn1wq2WV004OrsST6+s2Ch9YyvZ6O3G7vArEoOesNMRDUrw4ETEqtAO4VBhNo40OatnIMlWsZkVHszEwcHydRn6WUmabkSmxWmcC5bZ4BzYaTd/AJWD1bj4HiOnVuJ9TLujWKAYsFTCyr4a3CLIk+5uhZtpZTIHPO1BVTZwG1EmG0GaSqoqqNDg79cyiFmy5ZLADgdOe0Urgx5QFSu+4BNgA55FIiH3sRm0csZWtkGIm+hzIDE/9u1uc5o7kffv85qK6zVIlcu+LM3AblvdHtZaYknFypU8Jswck7XB0971dPqbqXGGpcIUhBXmMe8QLAKVkIvLE7YHbvnLhd6ZZLZqN8T6nE2dUYjwpMUkKug5QKO1JBSipzYzFglblafGdgocCI7Ef+fgQM7olmRHhZ+V5c92h7CUb0Oy/nPtMwmMg4Qf3Lc7Jsq2u2lcfeM07a19YD8iNM3h6z9CIv8jbyrAEJD1ic/aVmrXLddqNYBha22tbUHxE+ObB1VrMfliuKPue43dW9wbL2s+sYuUm4skyLdIuYKfGdaxsHgDZTTW2PJVZgumY/Rik8tcuJjmuRmYR00D3A1nISPQAuvlf8yxWJiVO2i62MrPQGB1wBvH/Z8P5lxevrhv/t/QvWmPCwLrhfQk4dXCejxzXiYY2lyjun8eTjcwA4X793GRT4BJdair1YKjPD4TjDVrmAiRUzu3Ql4bblIpC2ldib4OGDL8xeSq5Yb7fUWpWlkjoDH7PlUvidSKh9zmCNlifaOTq4kCdIn8BuS1R402XAQQwJ9SdKeuBTIDeuhyVizXE/j2ssrjTEkgS8vjBTshWF5YzjVtM90ekz2dJ9XrypgJdTZVak9GQkJoANJAWM8b2SLqXG49Bn+svKiyeXOIBdSlx5MfaASRFjjJGZnvSZ5e4We6bTisusfexGWLwpfQVoVwCUZZ2UW2lYkdZz7mMuqKozL+3db0v5bIFVVex4+Tm/9JtvLcbBO+EaKt1e2CItz9umBAaALXlcsyWb05xTjFzMMSYOj0sszC8nouD05Ut08KipzYnN5XviGoPETLiZVWdOjTsWtVUAkpydsLpx0X2Wqb4f1th8XnPBY36XOWEJx3yyZ8NVuRDJ709J40t9b6fV5XXSPYtdszQQaZhMN/YEaPpV9V2MqaTTbmIzo6z5Y1/DEYAycguTQK5/RntXLWv5LOvnOyPe2zTzh33OF5nKswYkAL+UXIuDvmur4wiM9Mq364+VmRL6XrSDsocMdhwq87GNqWDpXZ34WxLsiF3VfSR7YETLkYw0Mjak0ObNBJGXIQmQ1AfoSiACyHoxLavBg32lyesxuvSIro/XkNcWU7UIAxUseFdZHf68bgQyrlsqCiy7a8XkKYYh8fVS7vzHNVaXGk/XRCApEVkByr4WQctcasFZASNABSBPESvwPUbArXDhTAwbW3AdhO8HK5ZoNEj92DDokIolb6LTHMvLmF2Ry+td/kJuXPToy3sUsmuOz26Y3rmcFcwBiDnGIN+DCMRA8TIn78sIt4rJlV0TWPGYWan3ZBTw2q2bvIebAiUe9OxyFXseh7xvk2WMLK2miGKN9BslaQH3OWArS1Y9Ip8t8bHZrr33FhMoj6XPFZNIqZ6Pxfeexz/L7XOmAG3Ckt5Y2Q8AEt4ueNdo6iX+x1DqmuD3G5gSeS5ZuwQQMSWeruMagSC2CVuicZMDbVYAi8+Mfsp9Tswo7+YSGUicI4DAxEoSN03eP9nt0tBQ5tLYLquMSGZDgMZNK+XjyIyM/CmZkXrOeu7W7c6OF9Hf+3XtzdNpmY+INDjRb5Hpk8dJ34KTcXrvvH3urxBqIhZ2R+QkE9xOHsJvSXutn1kJ0u3t+359kRf5sOTZA5Lid+pQ/IyB9sXXjMgwA0vRygQgiMSUROewJYoxYHcFeR6t2PcsiFgvAApb9HSbpUiGpglKj6lxcbIy8Fig3MqIxccb9Y8F6vSkIdvEwuxGBRw9+OCkAc5VNiE4odg5HQxev+sgfWp/dR0gJgC4bGTJe1wpOLRkWtsSPvtwxTUmfPZxxecf1+JHCwC//fparGJ3i8d58XhYF5wCxapwYcbHxWdFmX5zMcaQA5UDW3jZopYtl8GB2JD1AnCtESuzFkAZtXSNAE7LKeNJgMyarCXWxOWCicE5rKCJz7uswKeEZD96dCz0blnW5im1ygrdk/Hyso9QXLpzOxWn5dG4Y0oWLbiEKyvKuTuuke4DsyV3iy/xJQxQZr7PIwVSAmPp5sXK5EVZw7XfubSwAsApZdcz78BaxwMiTmLswVZZRUvmKVad6KtqxdXFUfkQFqMhpSQ8cJXZyCum2+/pelznybouc1yKvWtPLO99KgqsDnaW9/roPe4CmQ8YiKxjtgC4rtOKsJWFiVnEa4ny93hARIgEzE7R4eRTYU3eOwVicRIQomR06N7xO13f8doXhbGXBgYBDCzw0bQ1v9O6aG5lQSjBDKeGl8wIs50czK5ZrhEQsVy0ZkBEL5fARN4Dfd91VkJrTvK+Mie8XBvlyvnLwEjrmP3dysDYg46OVRHMVN1mpuv07BKzeDWWJHZ9PJPg23pGL/Iit8izBiRR0IPsqsAvwxVVyZc0qBTrxZGghDNgSfepUUasun+v2FtKPy23zi18Q2PvOz5iSqSScgsz2IKMOpFIsdzLJBiR16G7lBWoXlHv3ZsoqLmmlz356lpguRXIoGua8MgtgCY9cvfYnMNli0WxfX3dmnTP1y3i/cuGxzXi9XXDb78mSMvKpbSkU9BywOU+4rx4XLeE9050LdcY6PoiMRAn7/HeKTPDpwBEV7IssaGLrL8rZcXizFolqD0JcBHbfP+c2YY7xPL/ThEOPrtwrVSbxC8EYDwVS6NHl1gbCRq0yL7XT572Y2YlBeiBCCslQOtadJQY4ndZJowoLjuOEBZljaKJ/JQ8gk8ILiFkxuS80P2sge+uxJiMAjpvEdq3ghLpqmP5orOCs8WIU3C570iZ9JHck9jYAqC4c5XzNYaQXmnX1lnvXWGTTsHjlKgIJZ27AhMkek5jIjZEFsLsxXVjmRa5r6XHpyRIu7eULb//HGdggZEjgc6buFe6voy0pGsmzAJOI/aNatjY1npLYkrgYpvYgC1u9Jw7hy24DMAT7vIybtPd4pEyMDn5fG95/HXIMW8166EDusKF8t3ma2R9WcaQaBCSUFn061bnGv7O7r8cl1gBSGXhR2BkBET0d/o9f78lOzVy2ZKyB0aafV3rlqylzGMuz78RAKe19iigRLomAq2xU1eo5/VSrP6R78QIfBxhFmfubu+cvFRqfyflWQMSlpjECx1bYHILGBkdk44DNA4osflopLNYpPb8FXhUyrW0y7WWFCsobtR2Hci9J00QoQBNZb3RZimlHarPWzbEw3uH++AJjATPyYeKfzMrZRJ8NP7P2qok2QNull+QctBnShS8vaJVfLmPoqsWuutGk+CWUqOQ7lnSSv/lWXptTFyUpnZLqcRLaGbBqUlp5q5lptvUv723gYnals/CmW6OSsoKKu/CrWdLKX2v244ACmCDET3H0btRA9y5kjutJKWMrI88IecUwU5s4l05ht+INQmxTtyXNRYlU0+4kuEYKTtaUQVQUsCeFwDg7zv3xbeuPj671XCNktYVshpbAJhGAfmual/2EF0pxgjk+iL52NFlt7jUukzCsYJpg5IjIIK38W4MQC1FtjAdQtmSyUnYSMIZmfidlK5aUkYKmSVyPd9ffb878e2+UqTC1roKhm47ACUujQwhbSpzGbMgrfC1L9s56JoVbcoUSUYITgEefAb0CZAmB82QyPf3KBAhcJiauA+eDyVDAtR7yaCD7ydfx63A0mJGZso0fbZgZFRxndtE723tq02+w0BOkgNoN+yRjLwQpLGhMlAtGLGAyAxY8KcdC9LH58xEP9sv8iJPkWcPSPhd0e/MKLc8T8wzqRmoAGZKQua2i6sP++J3+4o2iIFjJJLOBdBZUY5mn7JYE22t0xXq64TeupjJfhi1V05cp6W2ldvJQOS9E02k51ADLZn5YOAiwQf7tQdEIF6JPbjmAG+uWs5uTdQxpGiHM9JyBx8WINxjjdlNSylC3iMHCEd8/rJh3RIec+yITGdoDcRSCQ2RrXtkzca6kbIXCBQhpxSWCRZcvq8NOFH1RTQ7MgIZjZvWERSawUhMWUlAVd5m7AhQJ96UbADO+0tgYoEP3rY/Pq9LbXKlDEpCjn3gIHegTulF4fHV2uoduYDw8x8TsOUaJtdI9+UUHOKZFKU3V4oXen1xGYxU2/Cby4YlsxwjBVZOwjUOICH4WKyurOhcxPZNPYMMYJABQUa69T0X52SXJGkl5eVa5DjCCuzJe3iHwlpKN0pWeCm7XOEi87Ob2/YWYuk2MpidL8Gqd1Qt6VWpZUWNa1LsuWrxsWcWX4uxaNysFDOima+6X13WuO0ZyrPeTwdPn0IfOA3ABCOc6IXjSurwEBGTwwm+OMY55GK+qFkCAeSUwfn+qDl2lFFLAhAGkZIN4ftJbRSsSbmv9R7W7xWMXIQ70ciaf8Q9S0rb775ZZmXW0lLfufzOFldRujfXLWZWsjKdMkGNjoGSXgi63op852tAvzR89mCj7xM74J9ByAzYta6GqY/nzMvj8u4DEucD3EfMWHzU53uO8uwBydvKXkAYWz+2rI1JMDNLdXsk00Qf72HTvPQ9b3OD3/IeGOFK6rP9yv5CsZef8jokGCmKjWcrH/kvcz58BiEcK3JiIOKAUhRwW6kOBxcI3C7gWh0u5+hMKZRYChcXYhMCTa7BOyy53S4CzmWg6WgQX9mPma1ZjaU7NFYiaUGbWkiFeMcxI9Itgv21lXvbXuVkJS6puA8dX2IsjwmCIbnpdNV6bwQJWJl2jgCRo1LjDwRgEddC/ZxyEVFya+K+164PIe908g5X9qXvFBBy9bIsg5bVXE7UMh5BKqLSFWR03OBScccpY4tO7auAyCiVbQm85jTPuScRkzg+tcmnhOByNkEfgJzAg91RE1Ek2cCbmnsyEx0LMhPZJRXYVkVrNE5Zwc/SVQtoFbJ5UPl+fMFRdz6dLEH/ltvp/SQQ4SxOp7wdx+TNsjgBykMgkjLM9blOaG30SSrL/K6J9RKIACjZspp1GLMhpR2oc2YUz7Fkt2bMCLBv8a/L5+Np3+++W350nK/X58REyWv0slqHCQA2pUQwwObv9nnydwVGjoAJC8hZHgF77KEUPQ6+yIs8VZ41IDn6/MsxxRq8LZ/sUaD5LSntZkBnBDYsH1QNRmbBlHJcsVzH+BqsCWI2EFpt12l7pZX1FLIVNm+3hLr+HEgxPzPrE1e4a2Y/tmuNp4grEPMygACJzCLlPBDOFFPB1ctzITAA+B/OHtgueMQJ/9/XK9W8CB6nAHz28ZrjSWLxV/743YLf/R5FIUh/XPksjFI9stX543cLTp6CSb/sjgqQ3WVL9DnQX3AOC4jlcdcHur4MwMxgdhYNNrgPJBgJZ2JPljMt8wTSaKJrWQh9m/dABdDpx00wK3D8nWzOa7wnOl6Iju1KEDXjDEvBl0YG712Z9Nldj6tfc3wJHZs+rUrDVvGv5nyKHdFuJBxLssVNgR5XiuFtMeGS/TzYxctShvaswrpNVjDuKW9fY54crvy5pfwux5woADhxFifnikLskHYD36lfx+v4tvMmfB2WhV0HP2tXLVnJ26pPcdT1hMUGjRtkILpUYvk+StG/y30tYILvTTu2c+0jzkK4hDZTkzyGFj2ndHNbzFZ75+By3EgCqGAqcryQYEcAgyFBBcEcpC7vl8y+WFzGNgkue1ekETNy2SorwvfkUcV63cKK7AERiw3Rv8s8udF925jh43o84n2i3+1xpoWKJRAr70NvfNAprGefeyDk1jFFGmP0emJInkF6W/dFSPu751L9Is8bkIxkz6ihB4q9eUqyJHoQ2cuQNTq/rsVhLZdyBIzoNllAhNcXC5RYRvv0x7LAm3TLqtY7BhzEIniHBnzU9ZkNSTmge70UIMIsCFcoL1XKM0jBupaKp8nFAkZoQQRE7Vr3+D7c5Qu4+9jvwRKoCKIsCPUgctwD5LP93qnSqtJipycVeV+CcwVwnbzD/eJxv1CsDAfnEyPEDBHa68zgi1mfXWEgokVk3EocwO6oBkkUYCQJC2Q5JH+mHmRQX9DnZoAUC1DcKiM1Qj7ufMUcF8LXwhKcTotLzz49s/k+8vvFRktH7lvXrXWVYZmBEemeBbSMiFyvt5WMCa+TCq08x4hRWQ3lQT+boyB6CfNiisU9a0XCFhxl9opsyXXAgpzQA2A3suBdeWhKaM/OeKofkxEQAdCBEWZzZdyI/KzXM1eqjshsv3ZdufJuOyvwXQIRadQA0IAQZj1qxsHxvDCSGZNeAtIN5ismWmbdSys+hF2HZPpeXTBXx0QAMMEIMyNWmmbAfuafeo9l4gDL3U6zI937m1I33nAAG7OSmq0sREnu6yvG0jIlfK22e9YMiOjl9bvtgnqUEbHct9bcN7d4cbzIi0j5kgEkeyBkVpxIDr5WGl9ex6DkqMysWCNWZG/CGWXZkuupvXZmr4byNS7Fu32A5vOgw2DkvqS8pQl0EWzJ4mnQOheGhJRxt60EQrYLAY71QrET7J4lQYZkCxay+KflnhTu0x0p3+GMFE6kqKeYCxN7YLkHUsSXn09kvcvpfxf/Cu+dAq5bwucvK64x4T5QUTHJALV9005A3H8VnAGvlpCvPwelOuA+f56DI+CxrXCXNxWASWaEr1mLyLJV4kfCUlgQZkrglxJXgwxGttgCkYTKhPBvq4iZxXxIf3+gKidAm4FLW8+PpH19m3mMYrwoEx7XKIkxlUD3+ju79bgEwOXAd7JuNr77uaF3i8cj14HAgJFRir+UEYCxGBPe/6KObR3L+q33seJVAGBb+B76bM0FTp7cSE7JY80Ae0uuKF2cKa9m6JJuRbTMGhqtFkoAAqABIfxbJwTZxHjGIhNyWF0xcpOS7WiXF8jbtPOoWJbjkQsWgw2ZFp1BiIzrAeaAxIpZlKLdgJklgU/AVsEl3ztO+7ulfi4oRgkFRIAaB6JBZIm55HuugEh7jD61L5/Peue0QYCWV6BxJHuZxSRaKb3N91AcbsYolN+KGaNtut3EMenzaIzIUSCil43aPxPL6ML34u45MCQv8k7KswckDbDYQyXGPo17h6GM7wGA3XN1gxK3wZ5snmpdmGX2siyIenKfxdLIPiiZtIIvmbPuFrLqnYXyc87gpCxDhLu+ITZgfawgRDIjXHU8F5pgJoSlWP+Xe6S7jyH5Ben0CgiL2MYTqKHGIvk7uBTxsfiG1p/JxevLzgGfeBWwxoTPPm5YY43rICBVrZPylkgGgVkHgAZjJ66XXbQkAMN1ba+XY2SOAJH8PfnQAZHEwXIZjKTMjMTElrYKUE1QIlww2OVFumUQYBkHp+rMdtKVr1h+i6vPGKxIy6xzvd15xB8F76qxOgAxUvrfq4sNMIF3RXFCTg0MeGxxw8lTJezy/C6hpAPmCVZm3mrOjbHCy58WU9LuV4991G/9iJBC7MV3CtBfvMPFx+JGdA6puHMF5+BXru1S3+9TTJUNjb4B4oCtcAE9G2cBELmdVcPJzD5kjNXW9QNjNxNbWmDC+89Eg0157sIqi3gQ7lt2x+KU6Az8uG8BFHc587yyL0Pfl1IYWG6J4q1kVjUADbik7dtrH7nSjYBI2T/1maLMsUSBkZGLpJZRDMMsfbIVxzOL7bFE1xqy2jA79y3nGxkjLAbpKBCZAZKRS5a1TDO6eAaA5CWo/d2UZw9I3laOMB5t1i3bymsBlxEYqb8/WDBSf9vbzTIpzaT3X87tNWIpKjtSfwcHOAYfca3K+HatcSFXsgunuCHFDc4HJHZhyi+yC4GiB5L6Y83G+6ZmR8r1Oqgmh1D644qTX5CCw9UB75081i2VuibM7LBFU9+VTdTt4DrIXKvjXBQMVDbk+tjGxUggwrVHlHSpfpkZ8SJexC+VLck+seyqFTNzwYoCgwpqfwtEdNacWcYcmemIj1U7pgUjXPOCwO5mWtSdOx4oXV4P8R7yI92AEvblzmlqr0BZx/VKvLC4B0/ufNw2/iNQQjsueaI9YgkFanrY0jXKxWHknqWXjWS2Td8eyvJl7VPiWzxbsz2iTzh5LyzYsaQJBkhZjkwZ58KxzFCZbdVs8wSI6P1uGbOkC00J6k99vx8HJ00Ek33OQWam7kj87DvJmEAwJRWM6MKxGiiwbJHTziaEDEY8HGKcG5ikkOegcD8yLrNJLnAQjPD2dI6eFaljEf/uwciemMrwjaLv2yzN70ikgm+lIdbn4rHhNpA8BhAWEGk/e/esp7hpHV33UhjxRZ4qzxqQsBXmyPMvwcJoH2nRAVoKXNYCqH7pVbwTk59aNwpKH4GRWbCb/t6233bTOio8iUmmSPsx3weKG7lbPO6XgMVnJdwD94Eqk58lELm8rqzI9QEubnBXWobLI9J6RVpz9iwRQ5F8KEFnbFlIy4ksG0BmA5gJWUg5j46+n85NmlvvUWJNCiDABfcA7r3Hx9+75zxMNW5lzUBpi12NkOQ5sxfo4LECI3e9FuDh1msLOlTAenfcQh0sBKyEK1ZaTiW9cQEiHLguAtsZiFwjsThXtjKmWjBSsyAJKLVYYqzZcdgX/HGNBYzoCspSGJSevG8svzLhgUyGIP3mF89Vovs0plyzJYDAk/co2YBCflYTOFjdiWc3Z0faEjav6pXESsFs2c1rS8QmsNIoFdjLujVpoYF+ItcZnLSiEHybhctSfmZMi3UOS0rdHi9dyaIAW75bzyzQudS+iLjPy+4CLVu9L8G6J5/MxBajbIA9SyLWxX5smyXiYKExl5X9CkR8yPtHBgH5WAIwyXt4hq2gtffPN+uk6HumY3fYsFHAR34/pLurdHuVhWP5Oi2wfvI1eS83KyU0xgdpUJD9K78zW5KP0J2Ht5X3RKakBQz2ymBENnNZC3iOKMoWs/D0WJJ6LPneWJ+6XRqI2PEcFqvav39WO7RYMTR2e8bMzS1MTm3vMSanGJqeAUNC1tSPujDiM+iXL7I8a0DyNiJBiXw3LTBS9snLyPKbDoEclrcBI7x8BkSeKjoPem1ve+6S0lek82WgUtL5+joZh5xFCjlYm9yWmCG5Il0vSJcHpPUKxA3pKjznfQC2jVy2fECKkdiPFUgL4FPKFcgBxLXGMPgFKQd8J2TXj6Kc5sEgVlDg4kbK/LbC5diTUgdE1jyRDIYAAFJK4L1gP4obWtzIDU0FrTe97kMN1ldB68VVrWFKlpYpQXXPSnliZ8WEGRGODaHJswUiOihVZjOSQKSAk623Xp+Cg49kbefq0Vg8EFMpxscV06WbT3SVaUK+jlEsFdUiSdWpJuMKmYGL+pBcjraY4HMhRdnrnH2L01AD+6mAzwsyOGmVDJ7cObBTL7d+a3DC+1uKwp4iIo/Jy7US0bqK9cHYJU4mf54XTxm3nMPVsfsNMSQUf0PAjpzgGTXkLcr4KK5dXdas6OqtrMiWBPNRjCqCsYnGw6R0A21tlyLv62y7UfxBOaVjgILy3HnxyWCEwHkFIvIwPD3ULnLlfUlIJcus8w7llgGAT1PmRC6ztrGMXTIxigYj8pgzMCLPv6csc99aoPxt3RyPghFL7GD7WRxSff8+CJakXWZnznpqvMgeGOniZLyfusq9yIvM5EsakOzFflwPvJjWJuTe0bImJK3So+uWSBct+r17+iJH4kCkSKDRpS7OygLvP9q2tWb7zqJ38pSSkpkRSm+bXZUurwmIrA+kpK+PcCsxJPHNFwiEXB4IiMRY3LOcDwQafK4qzsAk96pbWS0iRd1tl1x75EwsQjgDgRiT4MXjrYLGXdwIdADwwvVLulIBAmhIYbAgYz/Wla4pbsB6JdezlfKocExM2T0DrdzJdM0+AMuJPnkdx4fIrFk+lFgRjiFZUwUerBgwG7LGVNiSSw7of1i3DCyAx3UTIKRNn8rfORvZZavFI2UqSRZpZefK0pwkgJUt71CWlc9E8Rwn75F8VrqiK3VWHIMI9OJpA6oinwiYeEeK2RZ5wvS45mIMp+DgkwMQEXLQPzOCj87hlBKunjJPxZRK5fU3lw1rBiPB0/28iPSjM7bkqAsKy0hBORLHIPffYg0wtdxbZKavsv/iG2aAjhVySlOKt9FxBqxgM+PFLly2k9PTAIinATdLAteVQUwlMN/ne8ZVsmtsBCvB9XgncB+N29GwCRPFbhSPwH1S46l8+eSYEc2QcPFYBiLOtc89s4Ul7EoAP2YNU6qsIcF8AlTRZZYpugYYaLESd7Rp4QcxPbk/CmuyA0R0sLaWkXvdBxljNTrvLXIUsGrhgqnWe6vbsRdDMmNFjsoIfIy/95nKAOA8i9R/kReZyJcsINkLAgRaxkMvs1iT9jjZurvVIkeyGFJhX2JCMJDHLWBEirRU8fFHPpu8XDMgxe9YgRItsq6IVCK9Q6mpcfIOd0tO5+uyq9N6gbu+gUuRsknFlYDJ5RExAxHEDenxgRT4uBXH8pSVPbecRWeFwi6k7NoV8/Yuu3HhdAbiPRBWpOUObgEp7Mhsj2RGUsptygzG+khMiEi9y4wNMzgsJa4lt4OZDwYfDEbkNYkbArecSb3mdi8nur4CUDa4ZWmyaiEsNYOWiBVhFy3tlrVFdtlKuMYKUl5fqO7KwxpxjcR2VMYjA5IYCzPC4IOrlF/WWKqYW0q2jL1gK/tddgF6dQoFlFxjDY724uWJMQJ5+8p1UI2EvEn5cOr99ikVYLJli7hzCVyrJCaymJ/Ysg9fgt01MLl6V+oheEe1OYJ3uORgcKl0WJO+Vlrln7WN7L+3kVEsi7W+MiYtW8IxJY/iWnnfzSMDNQClijzFl0gS4hSqsrnnurV7TXn3LYlnJQLILnoxW/NPYGYtp4ROCSFfJ4139pxwkpmSJkDJAi56e3l8ncaXXBmr+2IB6pkt5LpNDO4YiPBzPnw0JLte2sdxYQRH2K2rEHu+BSV0Lf2hLaaDt525aB1xz5LHtcYRSym35CnvzFFF/anvIxtBCHD0AGV0bRp8zcaWGSsy+26d125/+30PiDQuis/GZesjbucLc7Qrzx6Q8HtiVqsV751+t5v4EOPFZ+Xkbc6vhYMO6XudbOXkreNQZhloZu1/G5GsSKm8nl0JOGBdAhIucOguDyjxIuuluGpRDEVC1Eq693nCFKL9OpkxyMq8YxZhyY+uVNwz8+Gub4DtCqwXnDh2JBdXLGmFc/tKoLlyrSrsBse3AA0AoXXXAjzSmgHM9Vq3layPvDYf4KLPasKp9AVfI/xCjIhfIDNqJefod17HLmmFHRFsiGRILhuBjNfXDVtKuSAkuWE9rFthQa4xZtARCxtgxU9odkQGhLbuQaF850kaiDglsmhfIymSm8/F1Dw953FLOAXfBb2nDEy4qjTQAxOgfXeZOSmKMbtveZR0wOQPnxAdPY3eu1KnJDgHhOwSlkFWtYbS9Ujh6x3FehwBMkdcI6x1jXVe3ZNZbIoFTMztXMJ1A6JP8M7n+5U721cPKWx1PDwSWP0UCXqszGwJiSvrmTU54i3uO0qnxmYwcKFz5ufQuLYme2IBIjm1LxeLDYI9ye6uDtVFawZGdkh/2h7ElviUQLeH4rMW76j2yBbre4CeMdfudKN4nhkYqX3Vu2fJdSM5Cgae4u40igk5ekzN3GwxDeI++mxttP9YoZ+d32KLaFl7Ht22W/poxkZZ1exl+7kP0ofMYL3Il648a0Aic7cDfdxDKzUNocWCzN4hy6fWAh56GceZ0EBO+1+3WLfTg0uZYC33ATkRjAfzWTzKLA5F10VhSx65FYRS3O9Vrqnx6uQJiCDCXb5Ayv36ULJpMRDB9UKMBgevA5VxyGDD4VSXGUGA7nxPQOTVlyGFc0l9K7d325VYmLgirA/D/mncrfgvu1tFjmdh4MSxLexSJsFHjBQHo5Y1bEp2N0sCUFF5X5Vy0IcMts5UZ4WD9JcTBbE7TzVWPAW1b/DFBSumhFVkwLpslSG5bsSGPGaw8foacd0iHjNAeVgjHjLYeH2hzzeXtbgnvblshRmxrPxAVX7lHwdG3y0er86hTOB3i8fH75eabcg5bIIqjMLMzu8Ou7FQP5ELikfRpQowsaRR5LIy5gXw4WBfVsa8I9e0sCVcXeuiwgpkqTgdmTWpQe/cH0AbDyI/dT+aqWqdPeHL/raWaeXA2la2x5aYXd1qJfmy35KaY2Kj+iUcOxTz8hNVhTuUbGR03RZjW+6TYEpCfTQyoKx9WgHGfkOs85lxhGLR7N7x4RiESPctcnellOnn4OHy+uArENkDIV5ck16fkOcQAC5n0fPJFdBwCh4+puLGSNeVmmvWQKR12arXvm7t9pIZGVUWB46DgD236yOZKfk6QujnvyMK+956Tp0dxHttJUTYO/Zehi+5flVjCgkDhsqa3Mry6PiPGVtiJ89wWLZ3nwlwIXRlBT6Kc77IXJ41IBnJkbohR9kP3m6mzEtQxPtpkYxISSOsGiGzeHXtjakfTMXPW9iWPZFsj7Te0e9q4QsQbk7ZBYq/MxNRFXwRR8FpfMU5Ry2jeAvKiJFETEUBFjz28eE5PgR27EZJkytqnkgw0YCRbWvASHHLipH2E/EiEPEiSZkDy3XGDWBGpO1w+lyqa1ZzneymlQPbU2ZDSjrfPPGXv+yCdd0IpDxsETG2AenMiFTwsWZAsuF1BiLvP1AczWXlDFyGVT85hNgXFDzngoI8WUtmhRV86kqZAQvFTSpG8mQDSsRAjlegugmkuKHEE43iFfR9sFwofGZGWKH1noGOfK/re6Un6TZg/LhYCTQ4IBzIY85AYbIACv+WYGRkedUuHTOrrJWmlB/xk8/HYrYENU2wlY1Qij7lpp4L7o+6Xu3v2zGUx+tq8Knbzlxbh4lEgjx3KocsbRvsJ8dPHje9Z/csLipL+xMD2PYFfz/ChlhSWEE4JMeuW5QIIoHWSTdGNUx2rlyNN4FgReT2lpvWU8GIdT/epvp3rZSeurlwBtT3WMmWYbyNiZAyS4Sghc8pK6Nb22hgMcv21S5zw98aPI2MJNtb3KsX+Z0tzxqQyJztQDt5jShoFgk2muXiGDH2KXDl/hKInMR+o9iNTQzWV1Q2pJ6Qj21Y60TsiGm5U+eesS3cFksYdPgcL8JFDl8tFLj+sRMtC/ECd3mozMQmCh7GtQ3yjrFlBfRAGAWIMNyc/N2roqC7lIDUZ6ySgefx8U3LWohzSmYi8fkkAAEqEyJZEQE6uuPnfTsgwsF9nMI4u5u504liSJYT3N09/P3H4E5nxOWewMj5VVt93udikACuMceJpJQZEhRFn92grlvE5y8brlvE6+uWGZLsqpXdtN5caPn7D1dc1ojPP1RA8v7jSrVGHlekSMpHEs+c86RgOe/Impu/n0+tBYjjLuhzK7/vlwBfGAi23NIdKe+IR3b9qRbGJSu5zmWv+FxnRBdMvBEX7DCOAMeEceYmy0J4WVs25FAmoAwe5XghFWedyc9iooA+Za8819DyrADUTBGS2/H9LDPHShZZHykbl3f5WFvrviRFn6oZc127zba1RpWRaOBIxzC0fLX9Xq2oWWyFJbqiOru+ApQWHUAZV52r80ZwlRl5qj7XXAqDM+TxPBsxHIPvhMKUABHXDbScrzGNA9cl6LCACLOIVqyN9cy18yo/t+iWPUWadzu259HvvRXrOZNm+zC+Tut3d6wDYEaDnhDHY05/vmMsDYtma0bgo+g/nNDCOSzLM2ACZPKYj/KcLzKVZw1IpPUJ6CcTSfFvIw0cY3cttsDxen7HLWuWBYZGwESKVGHrMVoLlTzGLsuhDSE7A10UF6XdtkKeNBdPhQJPIVtfEammyPpI6Xxz4b9S5HBdhXuWAQj0ixk3pJjX4wTOssXbFraARafi5WVcpZ3Bwnpt0wkDtao5MmMi0/FyYH1slxWGZKsABQrIaDAiz8HX4ji1L4MzdtU6ZeCR3dEKGPEBKQe0bwmVDRGMSETKQe00IT2sBDoey2d10Xp9Jeajgo8V7z+sWDMbclkj3lw3XB5WxC1ivW6kgKwtGAGAsBAYCYtHSg4uOlwzUOHsUzLeAqiTZMwKjmZKwO+bemaZNSlHSpkZcbXKdHOPDb6NrcYjqRnKxhsFpcToWBHLRWsmwbsCSqRMrfkKlCwCmLTrxgrIUXcO7Yuu1wFAcAmIDhtSji2RBWTz/ZbGmuLCWhXRUWKQuk/bRzNwsuvio8ZpbtNo/xOMMXen3yTYkXVa2GWLwYgMXuexFmjdsVjikENumyT3DAKsO/DzlgrrnVIqTImuQ3IkcB1AA0bku/EUFy2u18Lf6Rra58CS0WtW0j+XA8j29EBnFhN0RBomRikj26Bo6NuIZmetmJZRLNvRGkgjl62SsjrfL46T8g5IT83Y8yK/4+VZA5IZGLGErZ011oxeVttfucwOtKliSyQ7Uj/l/r117SiY4Fz+ZfGOYtOuVorD1leQlyL7j10KStClp4rjMoA9pLWNE+FaIyKDVcIgkFu6TOl2MEPCbZUF/2T63lJ08LFxyeJju5QKy8IuV21n1d/FBUv2nqoOT03xQMxAKseAIG7EeMScbFMzNnwMDsYPAe58L5iRV3DLCf7Vx5ACxYqk5dQwIynk76iuWGvk+iA1k9YaKY6E0/lWl6yExxxHct1SCVh/c9lwySwJx468/7DiukWsl42YkZRwedyQDHaEQImHy5NsWDxccLTdwJImP/UkT1mSXPmOSNXVoyPfd7Y+1jSuLitSNSXwTHgLdhfj83DVaatGRlWy2s+mdoLxXh4BJVJxYFBCF6a2c318jmRHOINZC0j8LtAIPry1xbZcX67wHpxrgAkLpeSVY1AFH1d1Hu+qwrwWF7m8TljTq1uqaO+O+1TdTpwL7Rwi10s5wXXPyEis45eMW8FnYFDT+uqMcUfBSERqthWvXiM8Z3lQvR/nqvtWJkuKy6IWfu5lKnDJiPA2dP4+na/ZP01ft+NB8D0Q4c2750Scx7I7c3Y17hs+ZpmDFUCwAIrdfnu5ZFyCVcQSrgvsn4nVh1YMTG1X70LG30eyB0D0b3m/+F7J5A38O75kk3qRJ8qzBiQWPc7SWGu0dYYHvQMopmRrEWyJVVtk71A8uPN3S6S7gnZVGO6jjmm6GEgjEQ/w2hKYwch98NmlBrhfyJp3v2SXLUT4hy+A0/i66yN9l8yIVug5G5ZfEL3IjKUYj8SMRwEhInAdIMU/MzK4PGD9//1mAwLcq4/B33+M0gqrbF6zNLztbwlCQreMz+fOyqXLEh8oEB85hbH3cKdzSffr7j9GIGq5I3es5YR0eg9wHvH8Clz48JqDUq+RgJ4MWqe6IglvrhmEZBet6xbx/mVDTMSKPGQg8vmHFVtMJXD99WUrzMjj44r1umG9bgWIrNetuGxRd1U3LZzJTQeIGaBEhIVS6YZTfbZqPIOHBCk+W9hYGIiUZ5UoEAApu5OgseLKwopHpcZstIoULauFIXlbthB3xSPjvK6IrtAuZepvHtvt+JNBh/y841ovvqZZ1jEj3fUfdNHaE+3CpY83iwnQLjk+SgW0BRoNgBVAhcfgNkalHS81yLAK01rgobSz9yqdity+VdxqwU8JRIK6RmJKxvdED//O9aBk2LaMeX2imJJEpFZZHjzFtMhYEgbl1y2WFNgWG7JXU8RSjEdARIIQbbywEtdAAbL2mlu2httSQYqaVw8yGCM2rRg7ZxLmbI4UvR33cwfGQr2OWTxM04zBu9+7Vsp3WoF4BUL4nQzOIZ2eASB5cdl6J+VZAxIto8xUUm6dh6Oc9IxJB8Cw1givOwJG9Dm5nTowXQpR0uNB7ojwZC2zatGEyZmQslLpQGwIFw3M3512neKAdS5mKAOzB1XOrarkbYcIFy3niW04nVtAosBE1yUzjYLbLGNc1HUAqDM2AxNR0FGL47gRn9taUvrmz9OZAtSlm5YK2o8pV11P9CynVJmSlF0iUkIOXo85kL0qENeNXbuqEs0pfNfyO+KyRWw58J3jRTQzMuw6L+JJvGAsDat+d0vY0h1RU+4mYa0VoKTfOU/MxrqZMjmrqaCzCcntRrIHTiwZuVqMrJO6H5kZsf5GmXpGAbDyOo6IDnC3Avo14NrQnzvmGBNaJ54FV4EGu3Jt+R5HzyBW1H8SLl7e1bgbdntrWWsbjNTl7XZSeBgYZym0QFcuS+i4hRV80Od8Mpq5aaUkQEmq7l68hz6y1WyuUtKdl91CNWhvWBI064C3AyMs8jHRIKUsN8G8vg5jG07dLIE5GxxdH1NyVBqmbkfB4No4vewoJhowCSujdH0FenDSnH/EJFrP/MB9bgZGXuRF3ka+JACJ5XJR19nsiJ6sRgHjwdVjaArYi3VdZC2Uy0dMzb4szbghJkR5jrJaDzJF0UqQA9qev7Vkde6WgOCA905BVGOnweW9E6WlPLsI9/gFctO6vga2lb5zJi0GC6I2SNLuVlxHgwv9IYMVXhdOJfsVACqseH0oRQzLlfoF6eThP/HJvCA2n25dkdiNKoTOLcsKmjfBw/m+xnmwq9lAnDGRNcAq90EBHc5TVXnun5xVK53uKV7ELSUmRFZZ59S+XPzwzbohRuDzl7UErb/JcSKvr7WuSHXLutK6C7lrvblseJMD19fLhm2LWK+xsCLOkxsWAw7qUvYVdvA5jsSHzK6dyFL/8fuluBS9Ogecl9C4GBFYAoJL2ZLpEVOs7iVOPKOxnxCB+i4crcHTWyBbQALUCtPSRYWtwpEVMcGQzALXLYV9xiJYx9AgBKC4HGJGfBPUrkGftpaKMqNl2ez3yPdcrptto2UUJDv6Xj4Fk2a6icTsJuKSUIpSBxA0k1183n09R2Erdq5ldNVyv2LDchWIeIcSM2KBEQ1A5CNrzRu8nl0btzQPiJfHzzkjOoOWTO0b8/jDhVKZIZTb3QrGpQQFALWiy8t0jS/z1eHxwLgWX8BTW5umtl+B5QPAhJ9Buo4WZMm2WnKkGCe5eMl1rrm2k5f7um7fkYzc0uzMZnmdwVbxulOZD4Q3xzOo1O68J1fsj/icLzKXZw1I2KJryYyJ8MYAUqYZMdrxsbX1wRoQ2TLXWldVe5PVrraqO6f9nAdu8oWQJZAnYekruyel+KHngEuPxdMkwYwIx5AwAGFmhDJdxRaMlMsRsR/Nd9cCEcGUpHAqynhhXbZrex59bAY6JXYlpyAOAcn7yp7I7yPhgPtTLbzo715Rm7PrVGm3IcMnrQMkGfhw+2WK35ALIea0vkn4aydAKMKoYCUrCFx1fd2E8hBldfW2jkgteEjgY9siUkqlm+U75cTDzkpUjSNpM25pBZqU51As91oJr1m26sRaYjo4l++AIZklqRgpFHKXPm2pHbhbgYl9riOxGLO0wDOfbQtwSDAi+9WK2dFtOCI6reho/d7+uh16uTy2lfZUgzIS3wXPc1X4k/ccLAFOeW65M/GYL9kQ7Vq1N3zyan7MrO25yRqMHJUZGJHLytyR3besNj3R8F+AeH0X0gcCRKTM4jY0GNHzrj3PVUAlPRtqcpo+DoNB2R5bcijGZAaaTJlsqKcb0efN9R2UMTtibCsWWgxJ1aHaY1Na65ua9SIvUuRZAxIpMyAAKFpVvUT8nY4hJs7gOiWGt9MuWBxs2bRDsSKjLD4y64x0RdhPdQk5IzVsjjwvX79M0XeXix2+dwolte85Z9F6tWSl0ieK27g+AtsFLgetl4xWElRIVyx2PcrfTSXcOWAhtyWEM9aU9QlPQ/RyiogMNnLmrFK1XJ4rB9Sn9QK3PgJuhb8D0nImt6vlBCeybTmZ9YuzXnFcx/meMl45Ryl4ZVstZmcAUIro7RiY5WOVa/FLBh10mZzW97rVoofMkHDwOmfN4jgRrrou64tUNmQt3xmIvMlpgWVKX+cBHx2Wc+/r6h27ZdGks5wCXP48n4gBYWbkvXMozMircyiZoLQVv7wLW1Us5SxMISpJpNRWVv2dd360LdACEuv95NSl1/y8aHZEumptGfxJuSVglD+tYoYViNA9OXMqbum2pdxgaN98TaJeSONe02QcmtdRsWJirKB9q9bBnsxqIci+WJq+8AVIbImvP5KBBeT+1473lR2h75Uh4YxXQM9mjKRcct7GzOpmKJqV0UAp6Mljv8WYJP3Miu/ca8SK0L5JxE3oo5Vz82d3tuqmK5MGWG5Ts8DqkcxctaQlXjJg7X7q9+hckbdTDAJ3WGZEynwvlmtQsnctUqx+0rGmWgg0q+OIG6MzCLasCG9Tf8+YkZEcZUz0Nc10kw8Iq77I70B59oBEUszytxTL1YPjJqR41EJb9TitVt9ZVhl0QA8UVht7f3PLvxqcRN7Tdw2m2kJPtW25IeWc+rA8KZ+8x33wGZjQQHq3OJw9V2anauzu8oaU/Fz0UDMiSQWpNwyAVNYbZiMUIJLCmeIjtoQLK8XI9+b8HjyfL270yeDAL5SBCoBbL0gpwuUaJRUsODjcU3uWc8mIRX2Uq6VzXZDlREAkx3Ik4UKWlnPLZIhChUAFJzqWxgQtAkjV4GkgbZT/P2WL5Brr52WLiEAOYG8zaXF9kcdcjV1XXucAdpldi5ZtuGzEjrDS47KihgWdZdl7cs2SbAgDkvsTgw+PL8uA5NU54NV5QfAO752rIk3Pb312s2aWO4IfYVIssZErSm5B0x7LBXIvNquLLzNmTQuYbOJ90kBkdByWUQC7VrKBtoYIg4+RyxaDEJn7H7CVBO3aUeN2xBgyqGBtiQQiDEBmGcX2Lekte6ndtyo4aV3UzsX9z+fr9/AuIbhQmLfmuI7H/Kx4+xrfUZKcMKEp9ivKfuMG2yr0SSiOo8uNqQ7RKdFVtwS9vePosY5qXw8FbjSDcqOSuGflPgI+2uOpPhYKPB9PxyTothzJiNYYEYVLc0lH7dEjStjL93QJfW2yrTKmYtRVbRKe2n4WMx242ueDiNuY3esZqHrWTIj7IgS1u5eg9j159oBEypGAceAY9Sq33fMpnQ32R9rESkvJnMFKWyRlgQYePh7QW4rnxy8Tbp6UT4H/cpFDVwPaF++wOKCptt64TKX6HSJWwgsGRAeuS2YgAxHkDFIcmM2Scp89bEA4fTxbLeeDn+OK8MsKxFBdv/yFYkJShJOFGoE2PoSrwIezyd7IrF9dkL5otxT5u3k+Eq3jZSlVMGKmmc3MUQJq9XUOYi9MShTBpvbfGntluulDT1qWnBDZLWtUBDF4l8FHKJZry01LWvGBXjkBqvsWYspuOVVBeVxjc/+5b+h7b+nn75ZYhoK6rgUhvEyCERbtzjQCH9L1SRZStAPW22B17Y4VvCuW/eDb3P98LEuqD72wAGdGKoh0pCE4SuMrGJM9kNEyRceA2ky0a1ftp1SWrTEBa8R58YXpYav2lhIWA1ADlQHwrgUj3tXYDqCCkKY7XevDzwxHzMfhR8m7OShp5G0syQpwSIDjxXOdo+kaQMXjTVJuvmxUa2JuUmVK+Bp8nM+JQ6u7QSh7Nwc3FhjZy2gp64CZGTaVC1c9Wf60AMuoXUbbR2BkDh6s/lSgWhkOPGC6rB999fYAhTbYaldyCbJLu6Ldphd5kSPyrAEJv/gzpb8EQLp2MNODgy5maBUss2SkHAH9wOzFrDcsNOZdY+FrYlNKvYJ+JBkNAqMA9rslB7Dn2JFXJ4+zd1T08OHz5Jq1XWrgepaUU9k24EMwF5o50G5LnMp2i9kNKVt9Fo9SbyOmhM8+bHizVmcIBxSXsuCp6nHwwF044xQA5xe4cCLwsV1rbAkHxG/0yS5n3L7oHLliMaujs12FpQSec33Aa0YIHOchZfbEWOM/gxMJSNZYP5kZeX3ZcI2x1Be5bjEzJLXw4ZvrJtyytsKMSFctViD5eXHOwQUHrinixMTvs0Wa37Maw1DBR40V8YUVaVLSBg9ZQMuSaLw/XpiAR+4Oeh9aMT6PBTRo+ZwNkN9lMHsFIBzwkturQNErAT74U7pgAegzZ7lx4TEr8BeYuLEAJTYGMBjbUAP6OYNSTKmAEyuIn93ULBe2WbD7URZF9hUDufMSMwhemjiUkI0p15hd/7IhpxYlpP47BXoeJRCxKqRLhpCXpcTMSB2RUiL7TEqJsHTejkEJj17V+NBe41G1zbqr3C42RnlXD9i4nYmTSuA0ukU+GyZOobrD8fUEdk1U6WuPGgJZuudWMHzMjmh36pGMihs283mT/AUAXFHuWaknw2Mb9D7OiFWfz1EAPv+2XMxGtW4A27go5/aTjoHxrrsXXejJEwDCaCzpmFR16OjqWPJOCxtIP+pzvshUnjUgkeJd/3J82GnoWj/0Yy8hMy5WwGezTFgjtE/rbICxaeb6yfUfQp6og0dRgDi1L2e20mCkccfi7FACiDQuXPITreJNCndWYhKt3URe+JiAS0z43GPNrgUAX3a3kAU0OQTHPr8E3k5+AZYzxZMwQxI3alN296KGxBoDw21TzE5zTZkh2VKrlHIvW8rGDJgWpYdxb6quWlx5vbIjOaA9irS0mRG5xhq4zqmA7fiGudJds1XV3xy0zhlU2liGCkgkGNGWfP47wkZaAeOcDtaSlhXZPXy3nwYiRwCJ/n0kQF0vs1gQyxVLFx4DWiACtMob/+bt2mvO5xbWVR+FQun7beFT9Qny9Vo53a8WC4jcEvy+tz0Dv1DuVwTgy3PexyX1hiDv2/ewASCOGRJaYD2ylX1wbbYqRyyEjveo7aFPXs1b6ettwLjRgO7Z0jg8yethINReL40zvHsqY09zHlcuCpwsxWdEcvLkFhflgcBzmm7PQJFXz+mMHZFisSO3eDrwudmVuWELBZOiA9vn7I29Tgfgz5gd7YGht2mMpIBIYsPrUzOXSJ1Bnv9WUCJrT83Wy3Po877Ii9wqNwOSn//5n8cP//AP45d+6Zfwmc98Bj/5kz+Jb/u2byvrv/u7vxs//uM/3uzzqU99Cj/90z9dfv/Wb/0W/tJf+kv4F//iX8B7j+/4ju/Aj/zIj+DjH//4TW0pE7KodCxpRGAcBNcwDkAX5FdcYNiixJOhcKfRMsowE9RECCOFX2mT2p4zb8WsjI7GRz3eSKspFz3k1L53OXaEgtopQPSMFe7xgdL5rg8QKZeIJVgyc7Dc1dgKUbyP218tcAnSnign5gI6cqC2B1kpqQAgTZafe9jwWQVIrjHhPpAivMYEB4fHXIl+8cApkLPGabkrVk8e4Eu3SYClgAmcr8Apf24x1YxX+RoYkMgJPSJ11sd6r2iBVHqae5f33SK5bzFDwnEiW6LAdY4XYUDysEYCKHGcSYtdtaSwVV8GNvMzJ8GFBhocTN1uVwGJZeVnsYDDrIYBgPJc1WPM37l6bce2n7lgzaRhM1lZNvyRddA6g48g+vHkPXQ6WwaCHMxvuYDIcc0qDGsJP8MyzXHJLObFmBcTro5SM3vnqlvUpD/b2JLjwGTvuNJtqz0XgZJ6TlCts8zw+Wx8OQWfXVLJPXXxdVwI3pV30rnat1YvlnEMyLVygOhSp9DzpfDzzAYYWsf9PXA5ZPCoGrBXuNF2hyQwMXsi2NjBNYv0OU+BrjU4YsvktY1iuGRtId0nbyM1A2Xtn5j2gs/LxYgD1XXS0BeFIs8uf6NU/VLkPEvHbT0y9gpwWr+lnNCOjWwnKMwYJNio7S3Jdxqj6fg6RhIH/uDdeC0A3eN6e3KLF3kR4AmA5Atf+AK+/uu/Ht/zPd+Db//2bze3+dZv/Vb8k3/yT8rvu7u7Zv2f/bN/Fp/5zGfwMz/zM7her/hzf+7P4S/+xb+If/bP/tlNbWE6HsICIAcDwEb4PJBZAKBxZzDAiPycyRYTIKxyeuAq7qoTS1OxfORjBcMSpdstj8WsRy166OEdWTpPrGDmAdM9PpQAdqyUkapR1jMjIgPSrxFIsQaks6sHgCGDEFFTqDIgofZWJfwaIz5/WfH6Wv1vZFwNxU94nLxHyhlTFu9wiZT6cslpQakycu2PfFHCNSM0lsYWVFTQwe2vlvW6raWAaKnns62vHEeyZTBWa5CQS9aWUNy0rhu5aLEiIQsf1jiRGmysGZNGiRbqigQaEmDwslmsg9yOz7HHTr4NW7EXSL63nQYfT4130OeTv3W8SBMP4WpAOlmd0cSHLMGZik05x0DB0bWVmmsUYxkpl9UyvHlKHe1jBoHZsBNF3IZ1reXYBvC9JcD9aP/rbF8WQwLUWBEe/07BVfDhcuxI/i2t2TSVDCzCZZyqrlkjYf60YYMFEJHAsM0UR+OWrJwkWYorRBITUZ/n2lSpT/Ce7iOzbSPp3sHGUl/nLco9UcffE1wBKJp1qzFKroDbkRHtKDsyk2M1Q+hTsw2tFZLvb51jpVsXYHsm6KBvzYzYBTj7625jRJWY+j3fs2MMiFz9tizGrM91ptF3VnSc60d1zheZys2A5NOf/jQ+/elPT7e5u7vDJz/5SXPdf/pP/wk//dM/jX/37/4d/tgf+2MAgH/wD/4B/tSf+lP4O3/n7+Crv/qrD7elDrgJ8K5YC/QLb03SUU0EUprJO9lV1mW9gnIedV4LjHTWEaNxoxdaB5U1xzGsLsyOcEYtrityvzi8ypm0QlrhrpQy122X6rIFIDlUVywu3rfcY0sEJrhA32VLBWiw6wKPoZI1kD7VKVEGKZ6oi+K9kdX/YY0mbUzbsaWsAhOOLXHI7i6oSgfQMxMzi6jsfQtQNdekgIi8znIudkfrjs7n4+J7pGRdYyx9UliQ4qaVLdix1hxpgUdsAAiLVIgZOMh12hWrXRZMK79WVKXCXftPv1u90jqyrs/Szcq278mtVvrRMfeWL6pPrHiQrrCfYkYkQ1JjIGwDiwVMRrW3ilXeu7YAZFY2Y4o5gUbMx5xXr34KiGMwfMv2ViyJXFf/KhDx2Qjjc/9yEDsDFOcqM+JdC0T08DoFHmVsqIkp9LvM7MOWkiq82bJVs4xOXCAUop1yPUux0ksv24FVnkUzG7JN7TES/6DPnGbes1FGZakqxj7Fbh0RaYDjNrZGQ3tO3xO9j8zIJeeHxkgom+1tgyB3yR4rcsTtrHSv1CsC94NmRAByd66eIjMwMEoE8jYS1WMBYOi++CIvsicfSgzJz/3cz+ErvuIr8Lt/9+/Gn/gTfwJ/62/9Lfye3/N7AAC/8Au/gN/1u35XASMA8C3f8i3w3uPf/tt/iz/zZ/5Md7zHx0c8Pj6W35/73OcAKKuDoxfNUvC1jKqy1/VVcbbo6SNiKRIzq6cUCYT6IDJdhbjfny2wJ0/K5XvnkAPXq5vWq+CA7QL/8AUgrlQVfbvqI9Wg9dMdUjjjccuF+LYalP6wJdMa2PZlPwHzssc1FjbkKnx7WGG7yxd53SIQPK5b3Z5cMqi/74UbDPWvtPQpwObbXPe6H+XmI7uGBF3ymenH4yT+536Jsl8qKKPrrAoMV0tm1y0OZL9uiQBdJPcsaaG2gpCBCkS0O0zwlJ6XQYgMTm9ct4TFVcd3aJesWUXnUXyLzAQ2A1dSjlpZrTit0fHktQTvxfK+2noNuBZskgIg7HolAchSGJKa0Uh+12BEAw39XHO7RleXRB8w4A2OnjWfErB4GneCAzYgDpQvKda9uVXB2Qsatr5bLB0zTcwunXybOZBi5XpmJDjXxXWxWE3j9zsK40pN2V3Xa8MCjXNbGQMbdznD4AW0LMUsVmgITNwcwLDsJYXhzyZegQ+1CWDCXgcxQT6JMhZj7z2MqV4bJ3fxDo2LtXbdaq59h5nV13VSyxm0t23Sc/DomH07LCAyAoneo8ybel2NfSEdxyfDVavsl5pnt+gxaiwePXdHZBYnqd3/3kVJujTBR3TOF5nLBw5IvvVbvxXf/u3fjq/92q/Fr/7qr+Kv/tW/ik9/+tP4hV/4BYQQ8Ou//uv4iq/4irYRy4JPfOIT+PVf/3XzmH/7b/9t/NAP/dDuudmiJ+t0aBm5YWnRYGBvntXnHsmhrBvZ2kEDjX280YTEx+WsMvcLZdGSblrBERhx24omk5auiO7ppS31OZxvXYgyQ8IVw9mab1n+9HKWLVVXJDlwysrL65YQXcIpgxF5b2KKRbEoLg85qHPmvyuLoyXfB5szq+EdSuIm69ZKty0JRqzHRYMQoFqupR8397FUXCqAqy5vRxVCrQycFcvBln2ZsrdhSESmrOHz3RTaszeZARF5PRqMHFVyZxb4vWMcUZg0E2Sm8c1g5CTdrhQTwuADaAG1BCblWHkO65SXARAZDj+p7R/viRnxOUHYVTyxviiytysWGrhZjJbFRllgjz9HfyUtsuoLnebXuZp5Sl7jMdW1iu4NrfSlzJ4mtIC8umdBLKvGBsjtxfNX3cQcAo8xYmwr51YXUlISK8Ysutvup3RrfhuRTNutLBnQghQptzAlRxLceO8MBqKCsSrj9ltulPTbBiLa0GAxnDH27edgfAZp0gVzJhKM6Ht7NDGPJfJZfRYuWy/yTsoHDki+8zu/s3z/w3/4D+OP/JE/gj/4B/8gfu7nfg7f/M3f/KRj/uAP/iC+//u/v/z+3Oc+h6/5mq8p8RFtMUAbjFhuWFUpHIMSPq485kgp61gRZdniQXXPtzTGBISWRbBEH0NaVimAnZSbVxmUfPwccPKAv7yGu74B4gp/oU8NRtg9C35BOr+HCxasa8LrK/XfmzXizZVcit6/rLhuCQ9bxLoJ5VkNfgwYpItKcK6krt3YmhgTgo/C0rw1++nBVCp+8v5Ia6IOFD6FqiTK78yqSIXPQWXkEaqMTu0pZdPKRgM6kngWk3BXq6xcTLU/WHFhpkQq6xxEuOd2JBU5oI0NsVgRBiJaubZki9IqFwFRq0DHsoxiAej7fjpZ7bpjfT9quR8pwXVZ23cAupgZCdjYUn8KlFZ7BD7086jBhwYcLPKXZdnXincS1lMGJZx9iWMSyLJKBwqxzSIlZQZ2R6LXWzEfJabJuBcyUQI/r6/OS8nwdr8EBI8mjTnHy/F7Tc9tBSjUT3QfnLOV3RZstEks2G1Tx37FVJ9RyXKyixa/3+x+Kd25rBo41A/0aSqzTm87AK0Gq1K2UQvltnJO0SLnpFmiF6Cfi3U7ZUxniZPkG9DETvIkXts+AiV7bo5lnTWXT55pKwmBJVaiCc12WiDJSmMcAp8vZcCEQtsXl8uYgEhul2RBI2NmRMoMkzymZkzs66vXuWPMUc/k80j7+xJD8i7Kh5729w/8gT+A3/t7fy9+5Vd+Bd/8zd+MT37yk/jN3/zNZpt1XfFbv/Vbw7iTu7u7LjD+FhnFhBxxxbL8LveACa3rafZ+mwokaDtaXnxbi9/wnCKVE8dddhlhMHIOLtfvoGxebrvUTFoxoqT3zZJK2tsKTOAJjBRWJCU8rpT56WGNeH2twdZlIt56ZZwVD84sRI33pdjflmp2qCbmIXgANVBdBs+XfvSArEGh06OeYq7nUvxxa9/L76fkaJAvFv/KJjjQs+RcG6i5N/7yM8RMkoyVsYBI62sugKmR3teKs9DKnRRdbI+UPG8sq25H2tJfziOtYdmHnKyZDpsu3inaa8eOjAPxj6STtSyvVjzKnsjjWH3YBasrRoSVYJlEQoKPnhEZu18BvRVfDiW8mQQhDNjr/q6LaSopTZlVFEadD0KsfrP61FqmY0Z6ZkQDlJypLL+/pWYO97F3HUg7YCyfSnHPEssYrND3OvbFKILYU1Xea9rz+m5rN0c6Rgsmou/vJQuPG3W+oXHqiraPm3krJjSApehMDkC+SKXQazAiRbqg3SIz74JZ7KQUm4Howcg8DmfQPuFKVa95fqEWK2+dd65HWIApFVqs6CI+u3GJ9/kIa7IJhgWwwceey+wLI/IiH5R86IDkv/7X/4r//t//O77qq74KAPAN3/AN+O3f/m380i/9Ev7oH/2jAICf/dmfRYwRf/yP//Gbjj1yDdLreL22Pltgw7JQlGMopc/aT1t5vHeHBlTt7+sdykQwAiPaoh+yNdY7lLS+51AD2P3j+wWMuOtjLRyYYq0hwpm0/IJ0flXiRh7WiDUCjxt9vn9Z8fnLhsc14rMPV8QEvL5uuGyUglZa7aXiyYrE4qnK9zn4so/uY+5nmQXmYbXX6xSxpX+yUs20f4i0LLLyEv47nN8AAQAASURBVOv3E0XxI7qU0106XEFMDaKY1FNvHeOfVmxIz4JIy2iNm7GASOPGFdt0vjXVb9/X3DfBO5xVf1WFLpR7YblnsUsbK9ncp3tyJGhSsyJW+1kYBKyGdZ2Bxh7gGAXDrzE1jAuva9ywfO/aJoE1u0d656pBwAtwIgAJv7O14nq12B+RlKpCXYzIwhWnGF9Su08SfaBjG65bdRFsFGZhRdUyc2mbyZF4oMYYIQCzHDeCJ3fUu1AzwnmH0u+nvA8Pox5jw3ftx/Z3SjY7Qt+TcM9sXbXYJatcszEXMaPIYOS68w48pb8tpVfGflkMCh+TmT6gAmqrj7SXgWZKbnUDaubPfECeQ2U8yWgcssDILHZjzyWyXId4xxgTRu0rN5Aj5zjy7jQIODfiBA+f39PIDGCMgHfwSQa8t88f0IIRK9bP+q1FGxU2JKzxA7RuvMjvKLkZkLz//vv4lV/5lfL7137t1/DLv/zL+MQnPoFPfOIT+KEf+iF8x3d8Bz75yU/iV3/1V/EDP/AD+EN/6A/hU5/6FADg677u6/Ct3/qt+At/4S/gR3/0R3G9XvF93/d9+M7v/M6bMmxJ0cCClTjJhrDS11qc6yAKcLyAbR2dvagMTixrhpX1i84lfJ+ZGRGWlBmrwrL46nLAvtLnbKm9X2pa32V7ALYV/vHzwLYSKNkubU0OADKTFn9eInCJsQSzs5vWZx9WvH/Z8LBueP9CQORNBiiXNeLNZTVjAKSy9+q8mIHWQHXV0IBGsyeW24/lf77lSYEt2TE4+JjvweKz37VHTOQedr94xJRwghczEJrjy+vSFqZRbIgOWNfPo/wts2hZQOSybt216+uWy+izTeurlWsrC1TzbHaPpRPvn16Hrp/07z0wwm3fBHC4pQDfSPaCa+W5tbVeurIV96AMamtGOy8ASVXyLBdALfqStE4nfzs33l6CZMttkBVltuTLBB5a9pRki6HS2+xJCwL71NLyWeW+v19CASK+3AtPBC/22RAL4EmmI4nttKtWTHU/BnxshGADRAtMkgFMBFtijJdA+7xr8LwnM5fE8unaZBUch8djwDWzKHuA7m2ACO8jXbfKcjGHsmGJg9xLGv2O6ZjHDza/DWaSxYHurUyR7llvcO29tGTPLWvGhnbHKmhQHiwVtuQU6J2mFM0EUKICcKzfSBZPgpHNGFuPjLHymXrqmPyRinMffeX0j/p8z1BuBiS/+Iu/iG/6pm8qvzm247u+67vwj/7RP8J/+A//AT/+4z+O3/7t38ZXf/VX40/+yT+Jv/k3/2bjcvUTP/ET+L7v+z588zd/cymM+Pf//t//AC6nWggtRgSolmagn8h5sAPGL6Fe7F21/MsBlWQ8iJf91YBqxTBI4V/O1cGu+Ed74D54OAfcBV9dtK61voiLWxszUg6c3bPCUty0eEK9bjxRcp0Q/iNfaMmKXNaIy7o1CrTsT1buWC5rq4RY18xiWVBH28yOY8ktY6gVG1KOY9QakNmz2OVNx9pIyyJbTflc2t1pjxWxP32j3EnlGoBQmmvf6ix2Vp9pK3qjZAnQb7lrWddgTYL6XVy8K8+VBQ6PiH42etegPpOTdmWTYKS6Z0mF2TdsCBsQdH9ablUsUs+xtrD0oBFQjrFdJutgSOONvG9W/9BvX+7fkXdxvsw36+V59LOqgSD1ue9iHSxFMCYy/qREKc2Lu4tSBXl5ggAdou6QTPELzFMDWxJFX8/kSNrrPZH9Ld0N9WfdzmNDQvSc9pkYuOhdp/RLeZuA6Jk0c/KAGaE5fz+hjJQ9ptcZ3/kKWemW8aq3uJSVNhhgZHqYDHyDdyYooXaAgEhKHTMykyPMyIiFXhQIeRaA5EXeSbkZkHzjN37jNM/0v/pX/2r3GJ/4xCduLoJ4RKyUitINBkCn+EkhMDG2BluWoDKU8OSB1FiWY6puuTwharHAiLSkelf9xL3jit+U4pIKAVbL/6vFwaUId3m/1BYp7llcgT1FuJQoViRwocMTuWot90jn9wC/4HGl1L6PG6X2vW4Jn7+seMyB7J9/JIbkt19fscWE9x+uBYi8vlTrvfyU36Xy8d6Z3Ic+fn8yLfwsvPwklJiYEiAC/7p9rEksUvGueg/67eUEIt0wRimNaV0yAEkFITJIXQKP5lpQlULuT3aBo992BqrW1SV0fSyDsVm5u1907Yuaklb3XQu82vsZE4P9Cvq5/UAOxh+AqZFlTosEH7daioHeuqwVs5q6NzTPJQC8OofCIhU3oeALIGFXrSaGK5Cqyx4vThgouFtrl1Y3FS4SKpmOPRDMMsqco59PXldZkgpKGuup5QpZnqlgsl2WYUH3NWAryM134XIp0ycXVkQxU5xAwBvZ4CJQXFicAxArONRgkC9JsiJ0P2oQO2+XUpvufI99mulqluGBl+s+lvtY33Vfa8OE7vdRIU+LPWnOsaOIW6/pzFCkjXpWkDvPucGNPRCG7dFZrQzD3+hoEpjIsUj2ySzWUwrvr4HI7FJi3j4lgy3xqRSvZHBGSX7oLCtSB4Qto5H1aQFjKXI+X2PCuk4e8ndFvB8HDH2Y53yRqXzoMSRfTGknV1o284u2sn2U937wjo3YEMtqMhtsZAYYC4xIRaa41AjXmuBoUnE5SL0UOlwvwHaBSzmAfQQmHRVA1OwIKykp96UsziddDS7r1kyogG35lt+ZWRq5bR2VmWVsFnB3xKqlpRSYQ6vssYLOfvkAmqw63JaivIug/1Eg4VMpdK3gSZcjAI2lWT4/QH1Grb7p01+OLaOy7ZZlbeSecot7gNxegg0rG5e1L/8eKc5aOS4ubb7GhjArUhNLVIWY32ELiLD0wKQVuViniy59YBhYrFpLo+e2vOcNWLHbw+wUW4mli4YGIvpzZqHnT2kMGLkQepeDeB0OMyOsx/J3jhMD0MXzczdaYEQzI9xNmoHSVdifKvWd72Ps9kC8HE/rvdoK+3fk3BKYIDrh0kzbjBK88H2Qc+gHLZ1r10Gm4sOUIyzNkfnNyuTIhIdzKGxJc1wGId5hm6QA+yDYLAv88vNyNIHIi7yIlmcNSGTaXyvbB0sZ1CdgBLAHE/1ujSzwRZHz1aIklTsGEOwaU7I6+bqM92NFhr67hhXhycE5jhchBfPsItz2CPf4BWJFLm8IkMQVuF6AuOV+2eCWM1wIBYRQ4cN7pOUO6URxI1sidoT/3qwUH1L+tog31624aGnrXtdHvs9exEDk1TlQgPvi8eocGuVZH4PlGmtaYFrH/aeSDURbseGgTVZs9D1mP1wA2LZ2vbQslwxaDEhUak+gpuqVcSGXdb/on1XsUPeDdqORfWtmKXI12DoI5ZoPyUo1MA9gvYp2sXVd+sPzOovZuYhlfJ2l7wdWXnYL2AMUAJogfn2c2XJmldg1Swb7k4WeA6c9luCaAGoZL+JdLcYn+5HfYSmdMoxqAEjg/k0qLi41IFiOa016Tw32DOMMb8fL5P3T/SSBgzY8jJiPDhwrwMHfgXYclYxdBR60klkRma1MJl7g60hwlIACHJju4F2q1uj8fjs17rMHAAMQ7jN9bygZQJu4wqolJN3htJhpVjtLdTTHgZElW8qouj0Q1b1paxPx90X9tj4BdPe1jalsxxPrPR6JNg5aVdw5ExhlACtoUiAhwbpwILrKVha3lumw2qffi6co9t7Z41gZI1TGPDqPXFbfc8ftZCBsOHWWsVm1dVQr6ojMwC/Qjzsv8iJH5VkDkiNyFIwcOtaOO5AeoGt8SP1d3bNoAJXMiD6Gd3VQ8qCJ0zlSvh04aw9w9oC7PBAr8vg+XNzgrq+B6wVpvQCrqMDuAxA3pGWpVdgzK8J/HN/ATMgaU0nNS4p2rhSewYl0LZrJSJm8E9XBR2BEy1bMnsgKtTOtYz67crW1IGodlNIWy60rAZbnfqto1OxZAIEPngQYiHC6X66szor6iCU4MhnydlqR4H2le9bI/YUV7dA8k63yoEUrr9Qf7A5gGAHypwStFnidgVi2umlQYjEbo/4byWi/BswJNukUfKkGzu+z9zVepLhbuqpcOPEOHxHuxsJCphb4Wm6BADrFd2akAdBt24BJdpnxpBBtaaxEWnVh9PNYs4q1xgHeVi/T9ZvkeNrWbqnAhPYT18TPSsoKnMu25kT3ZkN+zo1xnftG1hkagUQNPqj/WjAiP/cUunZ5a7jQ362aPs3+4n7wO2SxgMycbLFlui5qu704FLrOPM/5scfBTHSCGGt/mXWrbgeMYkk674dIz0rrGkbfGTSM2ibbdVQqY5Q6UKKbq/uoApMab+bc7bFLW2yBydsyeMNn9gNgYD5seanU/m7KswYkNIkhm06IhoendHcc0HUKXrhtHVdU5CDAYmfMaCdUua9mRmrtAdekAC3XMhitnSMw4h0FYDJDsuTJ2K0ERsA1RuIKFzfE9QLEiJTZEcfauQ9NAUSEBSmckJYzNnhsiQAHxTyQ5Y2D2GNqXQH4j9kOrNX0wgqltuzzPjw5MjvCx7EUTn0PgmCVgre3kfdIpl4trjXO3p6UDWJgLEOSBiLMiADIBR65IFoLRC5rLEp4TXPcWj+PKtYWEBkxIjP3F2mBbvvAPq+2pMdUYw10tpbqC9+Cr1FK6Pb6WpcgC5TI/tLuKXv9d6Q/b9mfra/RkcXWeXJv8Q7YslV2NPxI6zvQK71sHAD6+jVAZed0jMJRxcAy1GgFip6VhM3392v03M6ePaCCFH7+mvSzynBDy+tx9bpyLUrRlJbv4GucjhOGBj2sy+7grxIcsmuWTFahmdIOjAgQCWhW67gCNwIjQ0BivCszoW1LYMKTjhNCVu5jBSXWdcyOMwMliG3WLa6XUpfxIMWXwcdJZb13DtsmnnPxPHhv3w+dzXbvvjXuZEh9CEGkOTxjq5vlVlDCz39MqRgaAByylFhumreMjy/yIkfkWQMSFgYlMt0dPOgTETE5nPyxgT+mdmDPuKYRadmTrj9NfnNh1avgQ1Zt1u4L2VqCGjuiz8nsyOLp3GcPAiGXN3DX13DrlZiRdUW8PCBdHvJFbQRCMiBxIVC8SDgjne6Ly9bqluKidY0Jl5iwRqov8vq6FXaE+6O6tYROIZTuODx4yToOQK0xcF58yabz6hTgXet2Ja1Ytb9QvvM63Wfcp8wISCAiLa5SZJa26yDzUY0XqdWXH9aNgIlI7fsmB/a/vlR2gFP1XpRSbqX21O4T1jJmmCSYk66BIxBC61pL9Eika48EHdXiWwEK3/vKnG0Ng7bG1F37ByUWSJmDOu7HNvahsfi7/exCMabOE2TNdWtStswD9Gzp5hQQoqzvklmb1a+p1vcKEOlc+/2qxytrPV1/ZQlO4L41tlc1LiwXLH4P+btcJ5kOCTpqe/pz6nuzpVRcLOX7O3u+rXCKWYyOxZDSPi0I4WXWPXoKW6/BiGZYLVBiKZGbAtuWkkm/K2tCy+p7IpkXoGUUL8IdjEFJlf3Mkyw6PkXGjLDrFh8TSNg2mvu5bkl0DDxSY1yqdU3oGemeK+PZtkIyRq5J/Mzy81cANQOAULdNqMBqr19YF3ElGfFxHFPH/Ar0uvNxOmTfM0TWMmDMkrzz8lKp/Z2ULwlAoqVWHiZLigcNHl3mldm7lJ8d+cLJF5iVuwJODIVYumXJ77w9HbMFI1J0hWEPLvRF10TZsihrFjYjlW/ZsTIjbjlRjRHhqlUC2VkBiih/HAthZYjRluSZdX+v6nJ7HIDTGMq+qv1lKC6DkVmyUhKM7MmwHoNwiyl9k2rWLB0jIhWHPbelco6BIq2XzazSzbULMFL75RgYkf3BzYypVbJkthbLjYRlFBNjSadExZ4t0X1xZBK1FDFr/RGGJMaEEEi5OeXYpSgVMAlK+JzGJY+s8G0WLPpuuQBZNZbqNcprq9+teg9S2kuvzwkrcj70+/A28hhBPWPScFM/a5s0EDkCQlhmSv40dnBAXY2SBFhZymR8DjMi8jfQug/T72GThjJ27eoTYOjnXz/7o7gsfVxr/dtYyFn5PnoMzZbw/pxhS7tvtXVLjKxdqg8lUGmWT+aAmTAAKlKOyyA1P9uh9kFKHPaSCnMxNF5MKsQH5zLIchmQAQB931LLEiKgMlnq+rRRh58jK6kFb/ciL/K28qwBCVvUpOIaHLlsRUfWvPvBvjoLjbZscRwFfGPQANAqenoQ077O2s9Zp/YF7NR/0v/c87Xl9L7FVev6BogrBbHn+iIupeqitZzyAQPgPfzdq1r4MFdhb6qxlwB2in34wmXDwxYLQxJTHXjIBa2mRyVXHN8p35c14gwIC75dDwOAGNxqPzd9O3E7smKEJHt1n7UxvhdW8K8EG9LCWbYT6xh8cKrbB3G90i1ri7EJYNexI1qJkP0ggZpkQXJrsMVWaS794eu1j1yzZkBOzi2sXGkrr+wDdknjVM9vLhtkAPsRJmQGJiwXrPKsDADFKK5BHuspClVhRJDgnQe2bGFfY34OI4J6fo8APq3kAmP3LE4qUNwFSwYm3t+2lF9jHXsoPTm69mngOmr/rV1nsZMMNorrlljWBBgbLAiLldZYbmOBij3hNujt+2PX9kgAUpYZbCK3yarVc4vsjSFHjildIIHjLo5aSR2xLTXfvetctyQoYTniwiVjdIJz2SUPgM/MFT8/KXtKAGDDJIAh600HVec0+vCoe10DrLeW/dsC8hxU3bbbWkUpt7rKrFZR8C67rOW5OTlcQVXcsVRm9pTHkVO+L1eRuU2O7yfkWCKXgIDyjJb7bTxj0kAUtQ/1uygvDMk7Kc8akIxEB4pbckIeXHwuIpTIcuITDZZUhbVaWQDbPWik2LVsSFWkdbscxjnIfQYjHMzu8vZ0bBBDEjdRWyQisb+C9EPwHs6HyoiEU82uleuQlNoRZZJk96PYuIbUtrElOA96YpB6XGOm9yPOC4GU8+KbLEbafQsYT0jSPUuDkVPZx1aaam2C1uIqQYm2bgIwLZz8HOjK61wYUgMOXUmdC0cCaJIAWAwJ9xPLKKPRqN9GYMSKc5J9wOv0/ZZti6mNG9HKkVUn5dZUkNZ1jlwGjlhwtUWv3e5YmyLNz9VCmwD2Xy9GxpgtnZ4ZiGPuOUfrhKwiXTQvYwB/jeNnqohydZQiwYhmfFvXqadZxiUYkW2Q7+YRECeZGil9nEa7vNs+2cDq2i9q9qFj9+NEs1wADXkrJBgBbgMQLLM02tYxtRVbvgMMSj4KsRhvKXuMySjFcEyonazcuOpG++0bPSN722hpAuRTdRc7ZcsmGTTq7xJ7BpTMcLLJ3rWsyOhRke+F5cZOc58vAPrkfTG6InIhzHwMnxBQ3fWOZuR4Sfn7Im8jzx6QNClem0kzL5tMcDIokVkVspLUoHh+SelY9TxWAGZd156nydaULarcrgp7qshUvxzMHpyy6kr3LM6WBcCdzkCK8LlfknN1/XJPDMnpHun0Cmk5Y4WnAPYIXGIuhLiS5f9hJXbkqiZ62Z/kukFV4aXSR+0MZaK00tByPQwtwffuDLO0vcNkA4qdkiJrBRTrUBwX0qwKRGvZPFK4ULImI6VBfrdo8AJSuiu9Xfb0D9kfAEQRR9k/1TVNX59ludXyVHcPyxVF9pv+roHI3dLOrLSeAXTrygKgBHGX020AEPPzFOG9Q3QQioBrrKI3X58JTOr9sFgq2qbvjyNSjCY74FUCCX1dfcxXfw3yXFpuLW43cpnqf9ftNXDYk1Fz5P4mU2MAFDl21Hbb7MbI3fFtXGKscWUE0KVUZrE3Glluuvr3rcD16Jig2RISxyvpQ7AiQT2fUpp7qPp4dK+5DVosVk++S4Xd6RgSYi+9c6SLCC8KoPaL1TvcCmZJ6Ae5hREYont39fTMnYx35LpFRMGanOCK0Y2LtPpISS0AdIkt9PO0fkQA90W+9ORZA5Jq4W4nyRo3IAeG3kXjunE101RcIYCYrQqxUL7a11qCkN5la9ZWnqlrNh7dpr7uCODBygIt88r4k5wHnIPLjActjHVdOCM5Vyqwp+UO6fwetgQ8rJSyUtYcYRDy+kq1R9g9RF4/9yldc/7McSfn4HFe2jTAjWuWqzUbZL9q0KN9h72raXtL6lVDWbJ80VlqkUK0A3KqFub63VYedMD+iBngPysAFRhbR0dMAH9K0HdkEtdsyExY+eXv9NkG42pmpLqotYzQ7HqAfQVk1C+j4+2BEYDSMlsV2+neuRKfwvsW15ONLJr0fPri600TtUpXm0nKY/EZrVhK0F5g9EjB1cL9IGOMuD38zljvlXynZgCf2iyUHYdafDC3S76PfSYloKkZofrQCi7XrmxbqqB5FUwmUNm9PbHuj3ZLaq+9fu/aKEAIr9fvMn/vjRZvUSxiIEfASOtGa4MR7RqpxyL9rOnvbytjgFGfmchsojsGNDSonG07ko6B9v0YEZMC+ZtK8LBBvXOtHsPH7c7tHQIcfB6zpPsmJ/WRoL0YZL0r7413Pl8zrfMR4Po93BeeY2AEQJHP1Xl5BoDkxWXrnZRnDUhYZmDEeoGlPl+DwFpWlwaAVI5Zl9lMjCUMZGrwHeoxuQE5RSg3jNJ/ulJZWIo5HPKLFRakGOHS0mxb0vpmIFIyaiWeBFEGI1LAa/aoq1B89ETuXfudBllHZU4SgBx8oxULK00v95UUDmzneAhaVgf2tq5IC0JO4qZI622x3HYKd1UU6nfaTtYOkUoDALG8gi+ZQUqyJrxM7levdZz3Xm6zp0BwnJHl0uCdeB5TvX/thNxak2Usgu6XERuigYh1Xdb1WqzQqF/2lO5bAZBUzrTCVo6VQQklsqhxHhsSohfjjDi1VoQAiHFgXyy3weZY+dku93sy3/HzQd/zMsE4NqBKgRF+/2TxQQ222ALsXXZ5KoMp9ZX3NA5qYCJTtOoxsusPwxWrvMsMgtIYwFlxZpZYlu64c9NsxVa32wbpkhkZrf9iiM4+N2NFuri3Zu415lDX7m+JZt6Oip5zZqBixmzpbWi78Xm5ICS3PTiaGOUYEZzDdavMKrt0sZtgTBtdd9Zhosvu5K71wub3awRMeJvShz7BwxXmhDAFuWxl72rA87vbu3M1ykhJ2kHHDmw9fZEXeUt59oBETspaIZ1lsJL7MqXrffW5hHfwkBPT3P3LCqLkDEScehBAZl1q8ajgHGKZ6F1O+asGACERND4EBiEAHM40hQcgLZkhYTSeY0YQFsTlHtdICvJl2wozsiVy03qzbnh9jfjswxUxAW+uW5NjvxVVU0C7bQR7ItGTg9VvLMygsCJ0txA7whWyiWXxRWnS950nGY71AFId+FmBKYpMy4zIIHWdwtZSwPlz5rp0qwuTxYLUZbWauHR/G2cgyjq1ACVSumQOUNbnVAPYAXRFMfn69qz01vVqEKKXyd9HWJKnuoNd1g3Be3K7y/2q3QwB4Jotnlel4AO2MlvX1e+WBXk2rgAVCLbHdMXf20dXAla7uLdJHJZkRmQVdCr2OE5VrkVaZJkdoWcodSCM/eibFK2ZTbk2faGeA+kClXqGpL7DrXuhxVCMxHI7utW6r0HIaMywChvugREdiG4B6iPXdmS9Bht8fguk7AERyRoAfZ9az78V48ai59rZeqBnPmZslpUYwvpd2qnHaGWAkyxJyKyIlYJd9pNMlc0Ggu5dFMCEW1DsE8Uw50q9kgQ2MCU8uFrMd/P5HVkj4B2xJYIdKf2SwQkZtWzW5Lq8++gkOfdFKIz4tHnpd5I8e0ACKFpYTrwTMLJ/vKR+2wzJKDXgSFghtNiSEtB2oMFlM+eRXITjl0t+5sKHaTkDfilFDtdI405EtvhnJuBxjU2a300o67ovpGgXEEAOhiTVEtpaopp1xjH5OycEYIXJi98StMiBme9/TK7JrrIlW9GzGAAJRi4G4zFTKCxmhOVWpdlSjveq2m8xNfdBpsnUE33vP92m+ZWWQVu5ak2HH6RVdwZGLKZlxsrwp3bbshS56TV4Ae62+rxuXHvAo3tvWClnyylvf+RZsN6Rykpkw4qIvRoBEfreghFmRJrLc04xwU4d1+4rZpx7H3+xrXj+TPZ4sq/MQtakolZgJKb63FoubaNnhNeV6+P07ymZfaTbpq9zdC7rGW6XxenzZzF6vHx2bXvHq799p2QDMAPgJRipy46DkSOupHbyjWMMimY/+Dg6LpCX792b0TmaPmzYDFcm+QaYN/owZQyLLhVgwsV5ryCm5ARfkmfI9OIjsCybIwhLcLzJKXDFevod5Xvp0LAjhXXKzxcdW15v6saeF3mRW+VZAxKZyQpoKU3Kr93vIwceWXG3URbE5ygVZTmWoEelcjCTLSVsG1nr2QpyFZOxc65k30qJBqOYAJdcKaAGUHyI8wCSJxaEhdmTnEnrEmnAfHOtrMhlI2X6C5cNW0p4/7Li9ZVAyesrLZOWxeoWkjqlRNPwVj73a0QXh7Kl3jplpfZlEMLMCLMh3jncL74DIk1fC1C1pdYdjdkAqcCwmxYHasvUvW8um1jWAhBLaS7flQsB953lhtVZHH2b9leyI811qmePEzG0z2JVhHkyHKUr5X7jeyP7BdA1RVrgJfveCtKdWR2n/WgdS/WtVgxGrljjTEVb6WPeXxaSkz7z+hz6u15muS16R9ZIet57cDUSL8akmOqxT75dL49juc3odLwF2Ls67mk/9plIq3OTMlu8C/yulWsprAofP485huFHB6vLOBF+j5nlZB95mXp6FDQu5ci9Hr23e8+2ZFL1shG41+fRho0jrKAeV/i7ZjuetJ1z3TPdsAFiGX3P26n3YiQNCCksBsq+Y/cqe2wDaqZECVQttlsfaySjZ6YZx3M/lTk19n2i2RPvSEfwDri6hFPMabsDECKzmORxwboEUmVJSr9VNaWAkpgI1DRpgkG6kOf5JAOjNrlP7scMVupJiKl9fAEkL/JEedaAxHnlV6mkzfySB6dYf+tATCk6mJOWoTkGSe+O0Bxnx4pGSjq5HXEbU3JILiElV1xtEjI4STQIkBLiiSkJS22U9yWQfUvkgnOJZL143GjQfdhSZkMSPvt4xZaA9x/XEsz+mOuQSKsbS4DwG83XJycYaQ2j6wVCcLjGLdO8bf/MsmlxADsPyBKM0EBM2zB408L9FWN172BLqs5ixG3h666uV7FkkpIuSvKT9pVAN99H9UxFL4Lv0efxtz45O9m51CEJzSTXHJ/QK1nKmYnLDxA/h3wPRsye9Lnn39LCzNe6B8SOMCQjIDIFIyNGR2TVkW4MlmuLdnHR2zYHgW+3HyikI8ux/t4rcvlsEujzw3ygD4E2ZopllPEHaBVBXsbKjAxe92rcG4m+N1L0GCwvqW7fg2fvUNxcaZ04hlA0ZaZEaUTZUmoYzVGWO0v4Hl3Q3zdL4dw7nqXojkC2Bvd77TwCRnR7Z6CZvreB7LqmTwNIBBiRYzdvq5fNUo9bUg0tedtciNRDjgFObZu6YwBjxswCq0eZEdkvQD8+3C11/DgvOeVuqrWAomI5pHGC405iimU+pD4k16icB5KAhE/wB5+Fpt2uxtDCt2mCSRJOIceTqP6uDIpYhsrKvtPyEtT+TsqzBiQj6QLXohqUWNkyBhsrW9PehMwyG1ilewW3Y2+f0VhIYATlAU+A8M1YAOdL0Po1EhuyRRkvQiwIpfUlN633L/Q7JmJSuvZny6mcYNrMO7brB8XLoAMqeyIVNj4uu5F4w2Irbzl/pRoqqdZTiW1BOckExETbSWsqAxEGIxqIXDJwA9CAEAYiOoA3xYTkHZx3uCB2jElz7XkCa5kRG4gAKMCDTuyqHyCzIso9QINGLd5hCrRvEdslZQ5kmuUGCNFgz/maPEKeYzRB8/FZ2ToCTOQxNZi0wPsqzmG2ZeDWIa6qbmpchj7eERCi1zXPoDoHx7+RW0c7ruogb2acOUEGA35d60d/t37Tsv56dZC/BM8yKx7HOUlFs1U+bUav9IOhnI8UTqsAZ3Md4hz78SLzdunzrJPne7SPHD84BfbZYF/5z4qhsliRUtxSKM5l/FYgRLs+zxLD6CxtWzbI0cq8kVCKe6aYN2nBiAar/B0Yg8NZv26RnhX9nsusfnx8yZhELhtWQJx47xIaYIIcee6L7sCdIDKDcmyqJ28KPR1LD4van71wEopync4BPrtYOupr8OkBIIo23+wk/yIvQvIlAUhGDLdF0Ur3LH7hLDeFWj24V7Kjikeg7YTyIF7I7qU+IAkVdKQ8qhQQIrZJztfYkbyM2B7gskWsEYLxQOOS9fnLisc14rOPK65bwhcua2NJBNBVUudr4EmHXam4CrrMwlPYp9WymqLZphzbowE7JdCWA9fzOZkhke3iIxEIq8F6VwHCpLsWf/J2PEkx+HhzWQvoeBTAZIuJKpGnhG2NDfhIzHCpCYwnHecdXKQ+S94hegeplUogEjy5ab06h44ZseIfihTF0VM8QwAkKKn++YbytONqOHIX0e2wXGNGAER+b5YZTJPuVxbnHVJMBZTAeGdH4MTKbmQBjuDb6+77xqvfAhgJRU6nSZVKnqx3JBWTfJVlnRbLV1+7w1jW6CMFCct7vNG6CNVfytBj1QSh9Wp7/Y5MHr0++Fhn0WoVTX6ORrWB+Bjl+MbJ6/3bJgym75b1+4/duLTie7Rde6zIiAGxnr23ASKjBAkShDAA4Wft5NVz2cwt+brFZRcg4kUVcTF/U0KEyprIausczxXF88NZExkcyEK21jNyDLTy96iAH/Upv/8MAHWfys9r5L5JtZ+za1cM1b2TwAqwBYDrmchEOaMso1uU72sydacSQ+sdECG8N1xhqCoAyX0fsucEgPtnENSei7x99Od8kal8SQASLRYbUvzhUz8ZckDlLT7Se8obixVPAbQDspXXn8UphaIchwGLWrZFonI5cJutlFtmAK4x4oED2GMqDAIr3kDrWtBdjxMTk2+z8kj3NpY6uLYKrO6/6n9cj19+D/qHJ+YEVlJY2YG67lSKO3LGqJiq1UzS96MsWc2fdBthZXkARiyJhkKhJ38dsH7UEsp9Uq1sdQKjKuNp+KyVe+ePWWmPujOMQAl/10AEyABvB4hYMiqUeYsrg6XwNW5bCpTRsiiW+Y5xkVZti5UBUNzsaF0LSmR2NOtSbKDRMmEzJfCIjFyyOgXxAAPSHmN+Lis1qwVGZoC4ObZSNPU2x5/tnCtV7Ts77+ice6JBsgTORyqtW/VCNLAagZHQPE8VjAA4DEas+lCj2EzvhLGKr025SfMinl8qOzJ4nmL/HLZjfezHoskzZIl+12lMqM/H4SrmI+Z0q9XdOXUwP4OUYpzcmOHVfkDpPyuGVr6vMiaH3bhiTKWf+RIkg3LU0PoiL7InzxqQpNjGhLDo2BDtRiDHBe8qo7ElSj3JFgErs4d2/2LRtVD0+rJd0INvHaSLW1IeyIMnpcTDld9y8N4S2U23RApxBLllpQQ8bLHEj7y51niRh5Xcs96/EDPy2TdXwQCQeYmtOzzZbVmbpfYQO3G3tGyFTEko70PMfUppBMlv9mFtzTJca8S7tvAhZ9DSEiNwBaf7rPekBK5v5HpGYEvWVEnFLU2msJXB6pza982FPl9favpftr6ysrytsQMis6xrPv9z6prkxH9ePN4rrIgvzAhvZykfUjkpn8ViVZXZZrJMxwC4jBGCb91FuE06A5dk2WY1WDTI4/47CkS4fg9/siKk3VNuAXP1msYK5cj6XRWRTXxvXTm00mIFC/Nv6WveMia2+yMrg0CrJOwBETOFr0A+7fingGPjijXapm9rOZ4yGMljSgCizzmLBRgxI/rZe5sA93qfYvM8SCX/iOgAdoud4zbq82iQPGJG7HYD5yXkT2+CkJIoQaZTF4wIUD0Jbi2oKV2hR+OQjPssaWlR53UOrub00t65UkfDcvnj+z9K5T7LnjjqV2D2bFRmVd9XfV9mCRTOzKw4V4LaT/kYq/d4XGP2HqDjS48Pee+0yP6kfq7XWo1ZaEBhEDgnNrpIwpL1qOtzCCJ5iSF5J+VZAxKWkQWuqTEhwIjMzGNb59iHPgGxDpxy7qgxIO2+Ol2t3NbaRg7MMlUxASUCI8wuOhg+oahgpCp3FMx+zel9HzIb8pAH4sc14iEr2K8vW0lp205qrXVHpvb1jn2FuRZISxXLvjp5D8SY3eFyCsOsSFS/475AWxATmWZe+L6U7+grNceYCiijYNcaN8JuXL0CU8GHVKylGwjQxolIpfnWFNBaLMVUT1wj6UBJASGATA+5xdSlZB62R1jLniIzECK/77ln6dgQYAxGStsNIDLqQ8lacLvmbIhhaBiAGFoeG9bksChLKc/zxeVuoISW3d3TwEg5pWrvzOij19M27fGOPEsjNkSuG2VI0mBYxgBodmLkUqhFv1f8rIwApriS7lgapIyyac2ePbl8BkZGDCy3Y48RsdgQ7Z4FVBAsx21Wgqvbs2tctWZFi+U1FqU40BAWI49HqbgTkVS3LUuatL6WMUSBVSvpwAcjbTyaJXosAghYB1YINoEPEh2TA98hXKW4qCKfSz56M9c4KRKUcHHT5jIa/4za5tNzACQv8k7Kswck1iRJy+syy1oiXYJirBZlmfsbyC+1yq+9VzdDMytmsKmwDvH6xRP4OGVrEk0WbGVqB+6Ur6sAkUSfjxuBrkukYoccM3KNxJQ8rBS8/uayNbESckDWE8R58bVIoaeYjvvFNxmv+JosJVZa2U/B425p17NVu4AdL7IPxVTysCOiFKNjkYoRpxV+XGMJdJUxJAAVe+RrrewHMUPMgjBbwssaJVpcW5LP3mTi4oHceUrp7D0FtnvvVEBpr0RLhYhlltHpCBswc9uSx4tbdhPyFQBa7dub5C3r9IgVAXpmhPuLRYIQoL5TGsBZ361rOOLywu0eHcNSEtv1W2GTOpcu0c4ZY8JuGB9knv++Vkr/HB9hQWi7+blGALdlPnpWRNeLkO5Z2qqtFUkNRPTv0bZHZcRMSKnPBAc1++ZzLHPAckR0xiz5POmgdgA5Nk+5VykQctQ1S/5mkFKBTp+mnR/rlOo7mUQ2Qq42fs1K+VV0EVcbv8K+h7ZbbsuevU2Wre584v3WoFWyJvTZ3qOL+M6fGjTGlOM2oyveBfwe8RztHaUK9t4hCB0m7o394j2tY40Y+5h1SfZxrjc8ny/yIlKeNSCRoMNarrO88NiiAz3lPuQnScovU8A9iKj7WWBjJJJBkEHZDjQY8/40+FRrk0NlD1KqwdNcx4OD17eUSiatN1eZSYsACf/+wmXF5x9q0PZoopbWMh7k7hafP0NTlFD2xzBzh6Orfe8UGuWDJ7s6iSnQETkupld+pFJbKzO3gITrZzAbwiBEZsu6JXYkRSOGxJi4tBLtnENYPFzuS+5fCUq0pd7yU+dnRFppuR/LfTMpejv+QAtbxnhb70gRjn3ytaat1qRuFZOU/chiZc3S361MUTOXlL1lbyNaURkDESvFcA9s2Gp9QXXRuJPBoUYGNcl4yQx+M5mRY6NxtKyXIDLJ7faVttHzaJ07KlZEAhGgB74Amvg3S9mUooHLUeXzCPjQol11Ztv1y8JN55odU44tFhvSsRquHZd52aiaeHB1354hqd8ZiDhnj0UOlPKeJaactAIOPmZvh4LPaUDbLIsjdKxRHZ8sVz6eF4DezfRW0c+Jdf/bcSgay3i7Oi5oMBm8w2VzOAd6909RAJKYiuszNlntnZmTue6ijQd6bAnO4QRbLs8gqD05/0Wo1P7u98sXW541IOEYBSmWP+S45oJ0zcrL8rpa0VruwCvddGJn0S/5aGDOh4R3daDmjF8OmWHQbQddfwIDL5q4r5FiSIgZyC5KsRYBlECgDcRtFVxplWGLFme8KgyOAFi1T6vltGTyaAY2oEsZaPQVNXEOQHrFpaYABchVjZXitoZImzXrEBiJvRIthTM9yd9Ate5rZkRbK5+iINtKzPHjjOJItHIIkHV6eJwBoNW/JRiRYvWb/G4xISxHQMeonzUz8rYgZSSaWWnYkcgMWK5H1CxT+x0EHYCt7M3GrFGKZ3mvngpGjm7H7285twAjchsLYHyYcgSIyHtmfY6s4Vre5hm09p3FN5xF7RnpggX0qXvlMp6faD3KMYD6rsoaNvK9lXMei+ZKHBzSgOngY05qRwJAM/doICu/741derk0Lujv1jKd+puPcQTsyHHBas8WE86LpxTy3oHGD4o9u26xZHH0juI9KEsZxOBQxxNrLtgzsGo32qP7vciLjORZAxLOEmWJjB2x1tXv+Vhot7Vqa5S0thHdS91muann0hQ1u1c6EZjqHA05wUuXLQ5qJzDicltiDlJOieJDmBWRNUauWyppfR+2iDfXDddcDPHNdWusQVq5Oy8er84Lgnf4+P2Ck/d47+Tx6kSMyH2oblq6IBtn7pBxHNJlrvqg1pSBpc9cexx5byT4YODBQeqsKEtXDiulo44PadmScbpHbdFndiQl+i6zjUhlWroZhUCsSFh8UQIkKyJdJkofNM9tVValmAp3USx4XbMLKXwx5UDpvoiYPL/sc74vchKXSQC4P3lf2adbrAkERkHrs5iQGXibgRD5OasXcUQJPGI9PyIaZLSfXEAt0HbC0niLdb43hIz3sww4IzesLkZkcM2z81nnksaEGTNyUSwI0GZKquvm2up+7Zmx7G2nFVMLjFjP8hFwvHf/R+tHY4N0o7IYEs2GjIPT22Uj9yzJikgQopsdU13vuQBgytehsmVJpprH6SsbkPIzJNlxix2xDFIsI+OKZVjQogGKdU9YRgU3mUFpn6fq8vmY40u4eC6QXayz693JE9PNjMkWHE487+aCh3zvRgYq/T6PsqOx3D2HGJK9qtof1jlfZCrPGpBoeUoArs6TX1ekbJhwYEs9VzGl9fsTr2UZ0kAEQAEc7LZVt+0liQm7pq9lZREljW/NM17djUb9oxU8TeXX+h++KUyoY2pkGkFZGE0rLqzEezd3I9KF0NgVi77TumvUrho6y04qjIjcbsSK8DZl21SV56JI3/CcOZcZkgIy7YlHyp7yOYp50ArHSLjPrfM0k7Hy3ef11qRtgTl9vFlhw9G1NM/jEwAJMM9gc1Tk9keV19n2lhVdbv9BsDWsLNL3g209wIK8TfIG/U4P2xH1737bD5oZuQWYvM055DNqAeUjgPkp4FQaKTpXLMWQWAY5maqXj90rq2MwwlLBht1mnheOSmVD6n4x9eMWizWG6W2s33qdxZZY20mxAIpkUvj3jHEbufEigxO5jXcJQDU8+UhsiXcwk4XMxAIiFoC5JZX4i7yIlGcNSB63iCBmrj0rn3T3MZcrn8mQgDXX0OCc39G5kgscETW9qqKovXMlSJ0HdhknIkEJZ9IqAYH5GHJ7bq8MYmeGhJRrKobIcRMl5W1MeNxisRRdGnbE45yfAPZV5bSz58XjvVPAXa4Wfr/4ElxPwf9AZDCQGBzkz40BUXtPZLC/rgSr76FUWDhYnVkRmbJXul3VeIWtWTeyqM6U61HAdYwJzlEsD7tpWRmgpItWWHwBIyOlWFL8PUD0TVyBpaDPmBFJy5fz5a9caKsuby3UtE3PKjHrZLm9yX7k7yzcl87oM6AqQiMgMvo+sjDq79bvkTxFOf2wFVot0vptKZEza6ZVL2Q0Vh6VWxSckchCdjKAHdh3D7RkBDS0QjmzaEs5es76vVqugfoO83YzhpO2qcd9SkKDjjErBnI7RoT34edoVECzD2Kn487iRfbACFDn5ZTrXqSEptZUYeGz8W0TRrky/+WaW8zW6rTulmHqVrFAyew54+1GooHJ/NycWrymFN/UmLj5BCDkQH8GJxT9773jr/lmKOB04Dl7cc16kQ9anjUgibFa4oEeYIz24W2t7C5FVN5/xJSzU1Decx5jtkQDtaQ7S8yFpwFYViDnbQo74mqcCG2vFUw015YSnTMVViRlt60KRKTL1DWDER6c9YAnJ8u7hRTfj50XBI8KRoIvtUZKPzauPLTMAiJtYBxdR5vFw7aGWa4c1636BOsgdVaQgRaIzCqGz6xmVhpaqZxJUKJFghHJjFjKdHPNE0ub3K9jA5wzgQj1b3s8y0WrU0wHbnCyj+TEPkuhemSi3wMjM2CyTNbJPpoJvwOzmhDyem5hS0brR+zIU2TPzUJXYZ9luhoZbbrjG022wMiR1NFHlcGnKI3atcY6jgVGjtwTbeXe4rhIIT+Xeyl2AaX0u/YYlsyyP1rbleMNgAgvk8/VETCiY7wYjLAcASMsOoaEwQgz/fU7zTuFPc/z4XWr49ajYMr1OP9UMCLbNWI95DYjsVgQnVp69imBiTwXu3Bdc8p9egddnreVPgNX9JijMnJRfzbyUofknZRnDUhY8ebvZi7tPDDcaunjl74O8uKljpSKFrloIdct0cLuWR41YH0ERADhWsKMijpeygNxC0ZIgdyMwZov2SzQqJQ4LsZ3vwS8dyILHhc/lH0YgYbdaGMMbCByi1ggsVkmwIi0brG1HkAXQwLYSvKIKTnadl3gEKjPGzMjh655oAAxOyKVGbkO2LdSWfdeu8OMKmvrvrFYEHkNepkls+B/eV1HZFhROvcby2H3lpzVqL2GcTyCBU5mv/vzDazkxveO5XHtMu/az7q8f072QMKtYGTktqN/j5hRDfy4GGTxa/faZeXtZfGVjdTHfsp55u9wD0bOmTWVtT6K0YrfCcFe0O+9Vtgb8KXsgcjZWEJGp6elnD76SnM7GYxEwY5QG4gdKUavxHGKkiHpjVZ6fGf5oFz+rHv/QRgabjmnTIoht9liKsZVgFy22I0LyF4XMeUkPgDkeze41918sbXv+ChBxou8yJ48a0ByjQkhW0bod1UeykSsiY9m4q6WpZE1lDbml9CXYkM+VfozOFdSEXpUv1myMlXGg92yAKbLUbNpObY4tdNKHYxJNUqoKX6vJag9Ni5azBxRLIkAL2LC5QmSA6o/dg64C5TS971TgHcQblp1sOGUyMzExFRdqoC5ctJY+8hXrYklacBN1IHsKJMMF3QEUOqosCUMsF21RvdWAxErjS9/1+BWXqsMyrYqhlvnLn2htrOYAa3McB+OmBEtkvXga9X9oNvXp1K1WaiZlVG+V6X/xHqzsvhACZfrFtEXdfvene1ojRGgVVBqv2iAUZmUt1GQ7Xs+KVgn7rOs6yNr90j/fe3rP5M9ADKSkTJrdXlMFQh5fuHlSOdRlhEIoWeW97FYjKeKvm8zhmQ4JxjHBGphO+uZ5Gf1nBODyIrbXEtC3keeH4DbXOFGrsvmtpPVZd8D8ZIsPB452OzIrA0aiAD1nWRXLFnYtsx5W3XP4m16o5Vdc2Qmt7zfmoWTy6zjfliix+I6jgHwxI7UAqPkuuUTIDOIavetdl0+bmozkLaZGZ8BInlhSN5JedaABOiDIyWzcTxFphuCEnlMeT7vyNJXgEgJeG/PGQHobPKlMjmDFF8nIAcAqapszvlmeChWo0QDOLMlxVoUW3bEmpCkvz1b6rguyin/6QrpOvMVn0OeqzSQr9NXS6ycZPfG+F4JqIyJnEyGDMcEjOhle6yIFXxtxoz4qjho5dr6PpJbFGiguhVSwb1+vWRDdLYsWm9/WrUarMrXR62MjZveALTOQIi1rGUSemV+tK8lWxy72/D6eqzq4jVTYo+wI/VaekZH78/pWY8mLxhei+m2N/9d25A/DTAy6+q6bt+FK5TxGOABdjPGZ50t61ZhlqQ7v3fdPTzKnizGM6zBpbyP9XcdO7iquQwiP3Kv+f3mwnVbIgMZ0HsISJfZ9hiDY8daPBiYewYA/UyYkAooOXq7eDOOG5Ftb923qgFLXpscw8q+Nz4rtxodjrz7H6Y85bzU5CTmepRYUcmWaEPciAl5YUhe5KnyrAHJ4xrhhJLE40bcclE3oaC1g3yL9H0EYsoTZd7HBCbixSwuW4lfWBqdY5SgqBr+nEsIoMnHuTxxZUtmYBPEtrYnFCg+CYW8Mh406NYg9lgsRg8rFUN8VIHsgAAinooUBudKAPspL9PXXTNptTEqMVVmigavul9IrX9y5N9COQm5qFupCl7cs+p52frF1eXpO/XVkVoi8n5y//G1SLHS0VoySlFrud5Y0iujA5cOoWQ394Pbp1P6GoULLSZEfp/H2PQARFsZZyyUvMYtjl0ARgyRtSx4V4KCz4vHeQnlu3bhkvvviQatZ7Xc7ivbGrqXdnbkTmaBKanENrFovI2rLpVs5ABapUKLdrGU8SPDVL4KiFgg5FiQazbmSICax1/vatzYyfs8HicI+rmREWjYEwtMW6yJdQ4p+tnUqVc5GcWdYEa8c7hfaur0JVQQUlgTUWzQcrnrrsewTut0zptvtzllQDFysW27kjV9vv+iMCw3SxTsRKQ+EQQLvOvjQrTEhCaAHRBZFJs5h1220BjgZNyhfh81oJQuezPZonaNOv6M7RlX3ga0aGa1frdjlXTbkOuVxC1li2k24rKrVgEdqR9HdmrAPG6TCrrvirwwJO+kPGtAwko60DIlpNjmhSI9La9r30/XuAsU8WNQUoMI1YAf24GfsoRwdXWiP/hd57iS4FCAiGNA4vsqonIMiKm6b3FwuQzwi3mwZmpbWxV5kDp5X1wGliAmxWyZ01mytjLooytMyINcL67ch9BNrnXy9Io5aVy1DAXYUozLvoZVTDIho+rqOpuWFBkbopfPskKNxFK0n6xM50nEDFY2gUZvQRwBEb2PZqlmYrEH1m+57V6fmUHBBpgDpNLcH1Mr6gyM+TzWu3/cWmpPPEcVlO7ZEHFmzb5iLLMU1hEokcqqVOaGV7PzGB51K4qxPqe0aZvcQhZMZQMFgGE8CbDP0M3GhSNigR0LRMrlrCTqZ5QBh3coLlpcc4oBiveuqaoNaHc4nntqe8SUg5gRgmejC3dpsrIapmxEs/ulGOBSfQZi/uHLmEPH0c8f36OCV9L4OeLT60dQppO3UsmXuagwJe18p4GJlqMMmb73bwtKbpGjx5DP4RSMpL4YYlTz85ZSUThqbAkKW2cxIPIZWl8okhd5ojxrQCLFS2tSsRjWQMFT8MWCqHPzX7OVQAbJx5gQmS2xrEesgEdWplsLVBAZRZwMWMztXLLSj7hWIJJi/vNwYUECg46aXYsybKUyeBcgEtmXtrIKOnsY9wsXTuIaI0EFT3Lw+rS/vWuUBq9SdMRG0Whlpvww0yV9hXsQYrfuFoXjKBsC9GDEihM5D9LxatHKjHaf4+/BSF2iwcPI2mYpYdoqbLNHcyAyOrYlWnEc9YFeZgE1nS5VMiOcjKH0vxFXY7u79EUhTyDXNwA45bgxNjDI6w9Gn47A16hvtGhWpHx31cWHrgWFMdGHiVk5pB/NR5GaIEL9lmA8f5XHn2VhOlLrZEsCsJRGZSMQP+oxZeOEy4G3GVQN4kmAsVLZnNt4Zq14IUtm7zLfM/3uB+/x6hwKY1LG2wxC7pdQDEAcp8e1nhicALUyut2trvTgSVxHzP1WyAoBUGI2MrUPBVuD6m/ujhkr07lupQREh4jUpsX3xJQ40Ln1IeUrmFDvRa2jVee4cu6YpnOIHntCLhLI8W9adIKDmVjvt97nFmbEer5mRVx5WXttY2YkqLGwgI6Y4IsRxlV9pjxt+f0Tc/xoLJESU03//yIvcqs8a0CilW2dLlHm5SfFW4AU8aKfWOkQgEQW4fMlILid0CXdWa38VVmRVW9dnsCLxdMB2C4ERuIKxy93ikgewLYqUELKIRsvYsoTUIQIYq+ghGt1yD7iifPkua4ISsFDmU2rB2B25gzuQskYyX3KduI+jIT7lj+5rsoWax75meVrz22onEesH4ERuY0VLyLBSOuiMQckI6VTLuMJRu/PChQHzeprHU2mFgCR+94CQuygb6GoC0XgFuug7oO2P1oXreAdXp2XbhlXJtZ1FeaiQTQBEQBloq7vcwUoHTAcPIv6+o72RekD17pq8frmGoXSwddAn6pNE+BhiQYjmo2xQMiMiZLWVQlMyCjDByDlOiYgejWmDFy32P1GPndvK7P3Vn/q95ZdB80AduGmdR+YDaHPe7Ed16/yrk2Vqy9NBoEDbDBxVHWCFXsBUGJ0A2CSqosOuztDj+e0XfusVbb5ms/lnQO5duVttgROgw/0LAiLBCIASuFbGR8pXYeBnsUGUJKlaGMPAJyXABnczueVz86t4EQe35I98DF61kbHavdt55w7PRc1hozxu0lCBsbyvIhxZRSk3ukJQnd61yU513mhfBTnfJG5PGtAAthsiFXcqdLgfaCgpLWDS5nK9IUavrr6oskXuwE8nn2BfWVAPM2d5fxZsWiYkRQJjKT5S8yvfsyB7OQOVgdtbfm0+qiAIS8srWoAZApe9g9LCWpzVbFolHdXl+mUlbJt1oTXbtdOPC0YafvJClyk7caTyR4rMirayCKVQ/4cgZGZMiP3pe/2AKmVe20RZoVMyiw2hD57IDL71Mfr+mQARm5hDGRfSFakKHeZFWnYE/a3V6lT5XGPyBaTAjBqX6EQS/euD0K0gtKMacLl1Dt7nBuJBiF866bWZWFc4HPolMFaZgHXnDCk7t+7fFjKNu9Hl5ifJeG6VY7ne5cawA9Z1FvFen/1uy6fy7uG5WwD2HU8SCj9Swp8qWxugBH5CVB/1fX5/vLKVNsbB+4zwdH20n2WXPxyXyf9Ptj3XqeHLfdaxpQ0jRtLTC3zUd2w+nP211OvA2jfo625EK4GOJcjKaHle7tnhNhjTTQQmYEWBsD1e2VGLNHunvxuS9HueezGBfT3fdsBhS8EyYs8VZ41IFlCpcHZ9xZAAzx4MrjjvO++VpJlkcoTUN2FgJpukNgT2k9mm2Ag8t4p4BRIQToHmlTuA1lt7zI9v3iHkDIQWS9wcatuWkAJenLJIzkgOV/iQYg5IAsT/24C2WPCKtIh8nVJ2lammpSuWgQwUqFni8GSrVyxp8i5r2VwKvfF5lOx0FmTmHSL49/MAD2s5Gp22SLe5NS+0l1L3rM9lqRhBFT7k2iDpdiNMml5qQwLVoS/y4nhCPiYTTzyukYxDfo6x30Ru2319yMg5Ih7y96y0TZycj3vuGcxI8LvPyt/5Zg3WqNO0upewH0O/o0UC3byVUmS7l06lfLbiFRcT0G4enrXgZGRggigifHSqbR5uzY2IV+LbxkYuV4zI3sB1zU+pHeP01KAh4jno+PG8huBjRWuC9PRymHwHrVoXAuMpSVc78tiJRYYsSEWQOaxll2wvJiDOFavxPBlQxYzI3yPuVvlpfLYHFxlG2IygEki1bwBi5JlQh3fS7yAg8oamVeL+a70cUKJT6JgaJcL7dFxo0vZndnhKpiVURE9xo/y3ePzSHakxD+xQSwzMFxb4xQcfCTDIgvPFdV45cWcYifusNwyyzpj3D3CkjwFgPRAuA9g1yDYclvVEhP/I6AffM1MysCUwWx0qd9XiB73PkhjzYcmL0Ht76Q8a0Ai3bHKb98yIOyqJcFI8G1W9DLwB84MUjO+sEWFBmgavKI4PvsA3+XJiWNDaHBAce3h5W5byR0rbkAUsSPiYZXUXkIbMxJTTfcLVGujRWnXPuIJq7oajVyomsFE5J/X7AYfWyZ4LApLbOa98fHFcbWyJCeCIwzIyGVL/7YC10eghMWqxs4iJwararj+1EBkNpEddRnYWzdiQ6xlTwUjUm5hJuT2enKVmYokSOH4J+lvP1Kib5UorMT0Oysg5dpbBW+LVeE+AoLm1t3KivDyERiZHTuKd4mBPoMTXsafVWGdu1S27Z2zIkf2txhWFq5TQtbaClJ4rCvnFolHNMD4IFy47HfZF/auq4XDbIhi67oCh/l+FobdS6ZfucvpvsmfETRvzXQ/J/qinhvgJEiSrdJJMeS68fOGxmWvDZLmFaqBg4lBZwrTWcL4fDPhvmbgJZ+DyV581r5N0Xblkuv4+0hmYFevH7EqNkNS5xy5rN0uX+HgXZVzerkGDfQza6LFcutmSc8BkLzIOynPGpC8Onl8/G5pmBCgDqAyW8n9QpF2POA7MQFb7+s50YuYTgkxhSYdIb/4DjS4eAecs4K05MklODq/cw5nD7j1AYgRbrsSAIlry4w4D/iFwMhyRvILkl9wWQloXHIRqMuWsEZyH3t93XCNVBCQWJPUDMh0vf0ECVQfeS6OpMWKo7BcNyooaUUfUg5q2nq7xdQE47O1qi3M11Zdt/50pfayvfB1BvpChzORgeyn0MYtnIU70d3Sx5AArSLT/t4HIrcqU5abyhEQIr9/kEBkbzKW2/G22j3rbqEA4eBdU6hTWqAB4dbyFBQipKgnMaen1dbnJqassiayyOpMToMQZR3/BvRABBgzIyz63aoFTFPH6NB58zIvwElRSggYhNCe72jVbqnkjlgSNmDIzFulDTE1vytYrBaP8+LLux+8K2MG78TBzFIxt4CKdIO0rNeSGdEsHoNkADUgneeg4CuwLJ9zBk/q8xE2QwJUthxgF966nK9mizX7FFCZCCkMDJkx1zVKRlna5Pf/P3tvG2vbkpUFP1VzrX3OvW9LS0NoREGJEuWzQZAWNBEVw4emAZUAEum0sTua0MR0ogEVEDASaEIaGmIbE4ImYMAfCgbTgijRCEHFGEGFhASVvHIbebH70n3v2XutWfX+GPVUjRpz1Jxz7XPu6bvpPZJz9lpzzY+aNWtWjWc846Oen9b0yo70a8Ta++kVdLRMyaLtZcy4MUeIOEaVOtisF4Au/hrUvLiMMwHQsSYcI3tSB28BkPHf5ZrBz26Kc2V41KINDz57oQwwJYZoZnzSxr1d6i59L/eyJncckEyljobvPqAZEQ1EbMaJGMbZP5i2F+BiEOoCwYUpBnHLopWsVlzPCcgzwvWj5p4138hZiqtWju0R5BCAECsYOaVSlb0o7Ofy74WbubiQJTw6p65SLWrbUKxESzBCqYG7c1uArCV1TrkGvdsUyvVaBghSyWjX6D97YIQVdqlcXJ+Tu0Dof5u1R5TFeE82rZFYV63qnqFrDCjlRPp/XGvCbpN79JmKPb7we5iSNVctYBuI2Gts3dMeK6C3EF8Vw4F2z3p4mED3LO12CPhgeY+srbMpQxWXK++GAiEanPS/+e0YuTB4LlMEIfw9hiUI0W3fAvoajJwMUzbFVvSMPv8pl0kgNutoA1y5Ftp70kJFmHMQ0JgSKkzHSdoeQ0CcgBOTJimGwYIJz0Ku75/fvfHav+89GOHYBACbUCEGn9UiKGn3vOyHlJfbvRnAghG9nYYzOZ9iHpwiqZQOKAymHJ2gwGMw7KHV5Ve7/FT3sOXNe4B1UU8lm0K8UMwIZIzYujaEdFNoSSn4j4xsX1NJnifXHcu4bYFbK7cFIfa3kTuwPv/IVeuS6uk6lkjHIDWAun78nciyFRyl72lc8zeZ/PIv/zJCCPgdv+N3AAD+/b//9/j+7/9+fNzHfRze9KY3XXy+Ow1IjjEWJiIu3LFCQLcATEHS7/KlEk+AUD8Dy4U+ZyloaANcI+jr2/y8hSEpexJ0nMtfsiI5CRChKGYEIQozUgCJZkTmLMq3fE81ruXRudUcoVuGtoQCkM81i1i7N7FwNguILmqofc6ftHRWsLRcXDyXqz1gxIqXVYgSYrgYoFRXNwM+lv/WY0MuYUZmAzK3jtv6bQ2IeNcfnX+0wHrV0UcLb7+td9MCFFMyNWWvKu1mYt8LRkZdr49fBvNycihfE5kTdd5SwZrZTm1/rTEJVpmwbIhtnwdGRvVn9Hxg3+mujfXemi++3KcCXCUlKN8nVu3WlZy3RLMk1iVoT40Sus+x6UknGhgA/SkG3NR6s6luG80b+rgt2XIH2nLjm3OfspdrF+tXqeRX5rzyV2fZ8lgRuUZLncs28drdOS+YD9cKKnb7mZhELaNYklFb1pK2AEuXM44Z+cIB07/LMjb61vXb9gXAb4GSfYDEGrSW8+mQfR8wI3ulZ0x7Q4yWveVF0kYBzHvZL9/93d+Nt771rXjuuefwmte8Bm9/+9vx6Z/+6e6+3/u934s3vOEN3bYHDx7g0aNHL1n7/tyf+3N405vehD//5/88nnvuOfyJP/En8PEf//H4vu/7Pjz33HP4uq/7uovOd8cBSSjVxcVy+nASFylxleoBiX1X7UQvC0ETTvK1gmkI9RziClYmjQAgnRFuFAuSzg18EIDQPMWZOJTih/GAPB3l++Eh5gxcn5uL1ovnhDkBjwpr8Ki4aqUkLk2n+jdVxaP3BRbr54xlfAlTUdb9itQCcbSS5GY5TaEFwV9qldbnT9m4lgxodc2O6PS/q/+Ui9YIeOi4kFFBxBj7QPYrw4aMMkABY7bAis2Y5bmn6b/DPr0QuFzKiAD7gIjtg0stfxWQTLGOTx0rZuMo9inC6h6ML/9iX5sKuPTDkQqyUoh70Nu296mDfRm5mlkmc5ThRrfBSw7R0pf3tXyAfkxRmbo6lMroiMp9qylmUwYwAay7pKt2r4GSySg4urDfKBVwN/8oINiSkcj7IQkNaNRoGu6cMq6AhbJJtxyyjtrirfultt0ogCNh3SkCkJSlvWSgYmhxFXPOdSxFjqkUahA4YlCGsKUzbAUjFZT0bMgWALkEeHiig8zZnjXjD2Xxrj2BYt42e1wdfwvjAd9deU426B0o74I7F/djw0sO4n23YufOPdXVtwDHKGB9r3jZeecnACbuQNbfOxHU/gM/8AN4y1vegne84x147Wtfi7e97W34nM/5HPzCL/wCPuzDPsw95oM+6IPwC7/wC+2SLzEr83M/93MVIP3gD/4gPuETPgH/7t/9O/zoj/4o/tJf+ksfWICEvrh0m5oKEGEwOVkMYN16wJ96/SFXA0vIJetJOU8NXA8A5huEnFqQejo3IJIUIAFM4HpsL0WIyNMBLHzINL6sx0H3LGFGUq2RUhUQZQVdy3jhKUinWfqLoMsLirdS1tbdoKQCj7RkRIA2gT1pRuZx3LT0AqrZka3FA/DdR0ZuI2uyZcW9LUjxCsltgREt+n6tG8ESlOx3Qaj/QgsOBlDfbfceDXhebfcKGLFuMjaglxZ8EfbPUgHqZdwmD4jo7ZcEyOvLatbRdYFxwEjnghJy577F9sw5d0zJFKCClx3waizWlwhjDlJWrlyxGERymctNjEntNwUuPFcbkd7ybV24Fu3ZAUrmJPOoNyeym9q8Wb6nrLJacedyvVy/VtHdyY+aDbG1O+RzOU9arg2XiJd+dwRGvOljlH4YWHef3G5X+7wvnfUSdFpQumZAUi97u25aZ8r0teTvPjDiZcva6+r2pGQ7O54d60vD573cTr79278db3zjGyvr8Y53vAM/8iM/gu/5nu/BV3/1V7vHhBDw4R/+4U+tjafTCQ8ePAAA/Mt/+S/xute9DgDw+37f78Ov/MqvXHy+Ow1Iqg9/kODSK+U+RZAiIKIsWyablSd1XcjLF38K5RzphHAu4KOwImG+KTEiZRuvBzTQUUIUczwA0xVynJCPD5GnK8wZlQ25memaBbxwkniR917PeDQLS0JG5IXTDPqGLwK51UztWQDXXBw4GbLibkyorhxt0m9MSRVjGaG/rwYjnn8727Z0x0qD7eM0uJVyj2YxvwCckBkB0AWy72VGRrVHrPjuact4GW9fylaV6jUZndNrr70/D3ww/sMutHZb/W4sf7rmhlbQbXtSRg2+rNuQFyCSx3ffKwBox3qL/KILYgPrTInqsSdsnxarKLmxIxvMjVdLhO8U2UUdwE7llMVF7TtjXfXquwMJDk86q2C9NWFKJCagxE+UvWIkS1Lau6GUePqpdQOiEq8t3xHLGIGYGnCZYpR7n+TeeX96DpFt0WGMfNOuLlaq+6z2p3J5a8iCCASLWJweiLQ5VbblSqKvxSPpNLkEC32cxRKQrMlWMgiPFdEGJi/D45phw/40uvxo7txKJb3uQgV02b6i/q1357OFEtvfqR6z15C2dNPq45HaulvWHpXSfHTfswLle8GZ7Js7hkWzGt35F9falrtRGDG+Hwoj7r/ezc0NfuZnfgZf8zVfU7fFGPHZn/3Z+Kmf+qnhce9973vxO3/n70RKCb//9/9+/J2/83fw8R//8Y/V7jX5+I//eLzjHe/An/yTfxI/9mM/hm/6pm8CAPzv//2/8SEf8iEXn+9OA5JDeYGn2BScKcr2YyyvaToL2ZHOi+NzrfuR6ne++pOl9HIqVdXTGIjk3Fy29HkZuB6mes4cJ4kdKcHrc2ZGHFlIb1IJRC21Rh7NjR15dJ5FEVHV2O2CC2xNyksrPquAc3KMYUKceiUoOqmAgSVjMufc/dXWNNuuJ2FRsZZRpqfhQkyAYYHJGlAJntUqLmNI9PXXqrGzfbrNI9cpq0SOgMcloGJ03F53g6ElL4oLAt2t7H5Ac8UaWf0sgLD3oH2bm7UT6nd06ZvX2DuPNb3E/VAU5awC38XVq8ZXGKC+GkNigJJu36ggnAX4c7Jgf/yu6XG0CmYjxC0rtLiS9p6q+I5qwak3Oz7nLYSns8BEsyP2uwS/i9vfqtU8Wt9/7faVMCpUynlyFIdjQUkM/Viccy6g1q90zrS8NkOuvhebGteCkLUgcC3sX8Z7jESfx46/2h4Dmr12c1+/LZ71f9lWYL9r0RL4hKqMM2U329gxhWYc78mmtSXeWqHnTJs90GYSlPYD2j20Be63m+3elSL2GWjXSS22ptIl68Sl+32gyvPPP999f/DgQWUZKL/2a7+GeZ7x6le/utv+6le/Gj//8z/vnvf3/t7fi+/5nu/BJ33SJ+E973kPvu3bvg2f+Zmfif/6X/9rDTp/0vIt3/It+KIv+iK89a1vxetf/3q85jWvAQD88A//8DDWZU3uNCC5mgIeHoQRCWiuWnXe0IHks0qzWyQQcGTmz/IlVIfd1P3T561gRF2jIuIQBYBMB+TpqgWwT1c4IwqoyMA50V1LJstzyng0J6kWT3ZkTjUr1c0szMic+hS5lDUquVoFnd+vnP2bS5dYp1PoLdK1mJKxrnjMCL/XzFq5B1SjCU2DJ6vY64XEghJgv+/0xAWiMG8agDCjFq/jWU49sW4x9h62LLRrfbJ1zZGsLb56H8AvEscaIXrbokBX7N2vRqltgcsCMhcxUlBuMSkPrb22RpE9x5aVmOYK7b7VMkP16XG9wnLD81rru9rdKpa0Ptp4ER28PufcMaY9MzBuCxVt/Yx1XAlQYjZiwJzl2bK/a/E7ZUnXsqbs3kbP0xm4mAp4grAnp4SahUsUuj67EoAuhavtG2sBB5agfE3mcn1Kl41NJQco/JP8VgsLtvF7Ukq3ZY/WwIeOK9oDRgD0z9Dej3HL0kCE41GDla59OyzvVnoFXL2nBh/eJoh7BIYawzaeCzVrAjRAfwlQGRXc9Aq96qLPI4AhGf5YYb0HrFpY4FULgcucczE85g4Fj9iRbVfh7X74QJaP/MiP7L5//dd/Pf7W3/pbj33ez/iMz8BnfMZn1O+f+ZmfiY/92I/F3/t7f68yF09KXnjhBTz77LP4rM/6LPzar/0ann/+eXzwB39w/f1Nb3oTnn322YvPe6cBycNDxLPHiIdFEeB8H9BctEKJ5eiqomvXrRB7IEFwor9rRbH+nrvv+q9mXOo1mM53OgLxgHx4iJskbMj1LMrEozkhZ5RaIyVYfW6B69dzqsHtrGR+TqJ83JxbVXNPtiZNWgNpsaHoTDHaPzSGIIGuUFbMspjahcsDIgC6miPWpWTN5Uxb3keuW80nvvdlX8uwpauxXxVXrWdU+llPGef11uJFrFhXrNF366a2Ry5ZHNf23apUrSvTd5mxDAA5qvOMskh54ilUVtlZSnBByRSCe601Ny5g7K+9B3hEbc3Wlt2duHKkZHpAxLpnAS2g+2zG1kg0gCUwqXIQFylhDYCY5LmeUJ6rU+zOS6ChxXNb81zu9O+L34oLV0ot4D2GjKmAD777KbPNvQXYvnO6j0bs0VohOh24r9kkum6lXFKsl+QAXf9EBUCcgO8lIGmfz17syAYgWRgEmK1skyVp59dt8OrcaCv7mkLrzfX2u94+DceJPXZ4K/V3t7BptPv5BRZtEPwemcz44XypC71y3mTBZ5viHFDjIfbMVfutGWooNtmGBsp1v8LuJTUGl4azdaPG9fkOIJL3Y1D7L//yL+ODPuiD6mbLjgDAh37oh2KaJrzrXe/qtr/rXe/aHSNyPB7xKZ/yKfjFX/zFx2i0Lx/6oR+KP/bH/hhe97rX4Qu+4AsWTM7v+l2/61bnvdOABGgTawihpt2MAQ1k2MByYOGK5abknVsV9QX40JKXYKUCkeKqlQ/HGjfCooc3hQGReBGtfAIvnmecZnmxXziJe9b1nDpG4VLZsph7LgrMHlMDWrV0uwZ90MKv2LqU8NyXxEoA6BSmS0UDJQ1KdLatmlHLCWK3LlqXyN5jvHsfbfPOOVq0L5FRqskGxlqciFccbqseA9Bb/Gwz28LYNlzyuMVSeJs772VkfdXbdaao8qNqiPxhliW7/wiY2OJwGoykjO598tyzRozI1vvlMXgoxQZZdHCKAQx014xpLXpHw4RzCW29lfPoYm2FXdVxQSqOYvT8vYD37hraDSnmTvnU93zpu3Lbd4tuhKkAFG0s8VymehetfpsXv6FByBp4Z4p33ecz8uK5aFkDOovU0g4Q2Qv47HZvPurYcKiA9Gz60ABlC1C0Jd9N6BK3WeRLZc951sA602XrrHTDJBvqY2csse+UcXnsxgBBzAYIuZf98kEf9EEdIPHk6uoKn/qpn4of//Efxxd+4RcCAFJK+PEf/3F85Vd+5a7rzPOMn/3Zn8Xnf/7nP26TF/LzP//z+KEf+iH84A/+IL7qq74Kr3nNa/C6170Or3vd6/CJn/iJtz7vnQYkVhc4FCASzje9K1U9IPaMiM2IBbSAdJ2618afWGStA6S8iuuHB8jHh8B0hRfnjHnOpcq6gJFHJUj9vddzCVYvVdhnYUUYuO4pFYcYAMVqaKZk5NbkCYOz9fnnmCF+5C2DC4UWHfa9nmj1dTQQ0ZZcaWtzK7GTmpxvnAd+jn2Q4Zp0igtkctZARGc6ouKlmRFd/FDa1ueMX7bbt6DuaauWtWxYWpEaAZGRa8mWK9cSjLT7t0GYTLkNlHTYcelqEEPPUowy31k3lDlLmunmgqLSqCqFa8t1Y84ZULFP7n2bNlnl0AsnSEbhOUIpglUPDrAuWwQmXtE3oBUW0+6O1lVLp/Ple9QXE7V/+/doOTfwc+y2279rCRu2xhWgWKmgs6j1ABYAjll+ZFG9UZrnUcD7nHOXlKMvaNkYgcqaTL0ibfvIu5fRfVE0S5JmaQ/H65SBM2zNm14WIGQFdFhAsMews3gW8z43qBEb4hmXLMPrjccRy91/j+54BMZxe/Y+F0E5Rtyiv171yvrj6uk2Rb8DNZ25Ym17A047puuaGLp500qde1Rb3eQEsc0zTGgxIQhb1wW/92vuXQUjOQTR0Z7yNS+Rt7zlLXj961+PT/u0T8Onf/qn421vexve97731axbX/EVX4Hf/tt/O775m78ZAPCN3/iN+IN/8A/i9/ye34N3v/vdeOtb34r/+T//J/7iX/yLT/xePuqjPgpvfvOb8eY3vxnvec978M//+T/HD/3QD+Gtb30rXvWqV1Vw8kf+yB/BNO1/Ue40IFkIwYOT6SoXp96Q8xJQpNTAC9mSAkjAlL76MsFx+dLxIiGgxovEQ82kdUqobAizaJ1SAycvnCST1ounuWbQejQAGIBvwZliXCgfPP5SS/1cLHmenGaoegWhM5npZupAW35fXGOHXKrMrx0zct9ZY0VGQMNVTjb6ee+9LAJnzTnWtlkwskdZ5Gd+Jxti3bOmGGqdEB23peuEHKfYARG9oFpXqRYMTotxoTmK9Y8WQVq85x0ghLLFmOix4IGRtQJu9reU+mMrI6KsmQtApcBIZ3VOPavYgEljSS2gv43it9yu547Y/T7FgBuMFUIra+8G2RYAnaJer1wZld4Nby1ZQTtWjgOUW0rZVscOi8Ualx0d3LznPQbW3Zy01H4escvcr2NG2vP0QEj9nH2WebRmaHZBxpg2Lq3dg2nXChuyrBmV3LZtARJp89ytcdyu7+ecWuX1Tmqms33zxt65ZdRPe2MoNCDXQ01/ti5pHotWQUxYMmIWUPUJOOTYE1AZkiQnqe9EdN6JS9fhe7mdfMmXfAn+z//5P/i6r/s6PPfcc/jkT/5kvPOd76zuUf/rf/0vRLUI/d//+3/xxje+Ec899xw++IM/GJ/6qZ+Kn/zJn8THfdzHvaTtfOUrX4kv+7Ivw5d92ZfhdDrhJ37iJ/DDP/zDeMMb3oDf+I3fwNvf/nZ8+Zd/+a5zhZxv4f/zfpbnn38er3zlK/Hf/8f/iw/54FeWrFoBh3wWIHG+6ZkRFEBSwEPNkHV6YQlGyJqkxrDkeW5OlXFCmCYBHbqOyOFhY0UODyWl74NXYEbE9dxcs148J8w54/lHTOM744WTxIq890YYkucfnfHuF06L+96jCNBKenOe3cVIi7WS2iBlWyei39cPWAaWk6KtkWIDboFlob5+gVvGmrT7XKY19c7hib23ZZzIvhS/q4BF9YvnWqOVSbmXlsL5Rlm81yxRa/ErlwCR1v5lsHrtF1Ow8Dg1RkSDEJ26EmhWPi+YvFOyym2KpU6+s+6Ol92nux/jBmYZGq8dlrnpAdPy3FvSXGrad5f5ydl8b/fXihqmBRDRWfWu1TgBOK7GAGSvInGJkWBtm1UguU3PLwAWbn8AuoKYU+yzDcUwZru8wGuPWehTp5pz7FwS1wKu3X0cVxw5trwHjlGnj9vYZiSAbau1N1+M/o5kba697uavcRrzvdb1LTdSYJlyfFTXA1Dv/pqxwWNLBs9vJPb2tuYsPc71fCqGHtn/qN6lrSQcXVs0EDTzLeegU8o1HomZPEfJZ4AxoHz0vvfib3/Bp+I973nPpmvS0xbqjs+9611PvW3PP/88PvzVr35Z9svjyKNHj/Bf/st/wa/+6q8iGWP4R37kR+J8PuMP/IE/sOtcvykYEr7nIZ2bJqB/J3iIEsWWQ0A1wFkwMt+Uc83I84yc5F+VNANpQjheSV2ReJCc1syidXwW+fgAebrCC3PAKSW8eE64PssLT7esX3/xtKi0/n9fkG3veeEG737htMh4Q2URKMrw1dRN0rRg8POcltGRmkGx1o65VHunQqozcGkQpNvELDxraRiXedOfrHgLuv1tdFy/qPW1Rvpty1S/y6xbvcV0aGkjTR9DV01azjHVe7k6tFoJkzP5j5iarfv2jvWKdXV94AARHXypF07ZL3aKPwGKp9izjkdKAZGMQN0v1+xDQFj4OwOO77gSGZ88HqU6tpx3jyV0LWh2VZj6dZC1Rm/jwn8ublhrQMRzz/ISWmy9Z97vI8X2Ehcmfh+5dum/awxcyo2BO+aW3csyslYpY6xYVIwIr1ffpfIcWyyJ7oPxc94DOtaky1pUPjKOw2dF2ndPGbRMBLd310zt/ik3WL7/9nnZz+79DBiRZlCZazv1uLXtt+fzrr1mCNJrH8eUPZZs/+SAkT0JF3jtvanCvee5iMM0bfQkFcCAYkjhXHdJIg59IynnMt/KeY8ATiEhZslG96gwpMccwRo/bAdmYVyYsW7EmmyNm5eD5GwLYT+da/5mk3e+8534iq/4Cvzar/3a4rcQAubZydCxIncakBzKxHSMQeJHvAB2iJtWTmcgx5ZtS4OX7B8HQBgRlcMxTJOwH8EUOLx6Fnk6Ih8fYo5XkrL3nHBOwPU512KH4p5VUvemPki9ZSua8OxVWkzCQGMLrCLMAk6aHvdkvF0v3A2YtG19Fp4Hh1iDXYG024K8Nlndxh+1UzTMBGknSn19z9qmFaN+Wx9Dssf6Juc2jU1tnynIcyfo8DK2WDeHPWzPXrHWa90fNn2vBiM2TuQ4RRyjLJDH0g/8DPRApFkmBwAy5tZHKnyIn9dSdmop63fn5qNdf+SkDaTwOtOkAp5DkOkh+mDH6+859SBH4paAPXOyZk48MGKzZm25wWzJ0hjhK5fcdukY9Czt1qjRzinv2NUhtgxfJff4lVSxq4G4MWRgijUo2wKTvem9x/fgb99TSwbwQSdFZyi0+3uMjeeKpVlVva0d61/fU/71s9FuT0DvLro2t3jjRbavF7VdA1CegquNbdxPu2c1I5zfZs7RvVugNSD1+6/tM8oIB8gzPUCPx+20vLzWJfWQ7H2M2uNLrjfDmjiIAccsbT3GVhw18V1DqmNzlIlMPl98C/dyR+XNb34zvviLvxhf93Vft8i0dRu504Dk4SHimSkgnB+BBQsBlDgOdPEfgWCFgepObIjOsiXuV4cuPVyOB1FhpqvmnnV8trpnnbPEiLx4LfEfLxYG5L3Xc40PeeE04zznjhKlXE2xSzer5Ua5Yr33kdznFM+dIq1lZAEdWdKbtVUDFmu9b5Z0nssWd+r+jiZK7QrjgAbdTgs21sRbOEf3a5kRKuLc/szVofttBEKocO/yTe5qEzA3PL9nPHO1bgXV/TOSNcv3qB/4196jB0QelNXmQUnxW122DAjh4k/Lq9c9sfRdzkBGQEylvo0mF+YS2ByFPel9npeBvwC6zEw1c1MsLlOlHa3CONp4nKlo9JZ0y8CMFLRuu3LZqrEJAUAMmGetmDSWpKXzbYks+N6/cCNzlXWDAS5nHbeMAtaqPbrWnuuOLN3WEMB5hO/b9flQ30mdQGGKCnDG5uLSZejS1zNsCLAOGva4WK2JzWDkAQ6bIMRr32ge8BgGGpkedxysJTQYHVPb7IBkjiGmpp+T72K71vbRfM1jOHa4j/SFGHmsS7N2L7ZJFOxz18BDfx+xE4sK6px/+IzyKD1v79rlAZ6RXOpiOuesaikFlZo8ivGmeDtIu2KJTynsZPnM4PfVOjPHx4z2fwqiSxg8zWv+ZpN3vetdeMtb3vJEwAhwxwHJFFCD2IcMB7BM7VvT84YWa1IyZYV4cILUY82eBUgaX4SIPF0JKzIdcM6oWbPoi8l6Io/mVOJF5B+toKfO37tvsq2g3vZb+t56VqE1dmBL1hc2Ln7qWgNr1p4AVE/2gI/RMZbN8fbjX08Z1/7s9jcLRqK2+t/iPgEsXHs8Shzo+9jrG1uk6xJlcXSPbvascu9cmGNoQeuyDQWoyPtTMeige9jMUNqRjLIWY2hVrUMbU2nFRbC66VRlVF1IpVtlCtaklA17nugEHWvhr1u9PQ0Ulu56qbAjqYGTpWJ6ezDiKZd7jrfXuuTa3hykgUj3rlbGFZV97Sz1QRAlK8cjZcRJPcu8DzjodLta3KQGjtV8635j6JXMNfGAklXU7ZxvwchtQeKShaArb3TP01vBx/OMN05G//R9e6LTIds5UZ/jauXe9XPTAeB6mxef5AERLzlHvY4eNwQdAV0geXMhRTWEsJZOPTb6409fY+GqtSPebTTu+31QjT+AzLOdITHJ9nquqM6nPADu5QND/uyf/bP4iZ/4Cfzu3/27n8j57jYgSTcIp4gwmwDw4Lx0DD4HGngh44HGjugjOyDCOBEA+XBVY0celaxZL1y3NL6SsjfjPdcnnMp3ZtK6LkHtOmDZTqSa/rb+uPo4LtZ6UbfpEdcWq1EqUK9dVpEX69TUAQAvwBvYjqWwcjbXvbTy65pVTytENqhWLLRT97tlDAA0lyZjZbOyBgiPXnYds/+apdWzTHmZdtaev32mmvVhfAiByIGxI7G5ZDFORIAKigU7SGHSIItpwFIB0Pc3BSCHYj3MbawQJGwtoL7Li3zmrNApJKlZPJcVkFOtmE0j32kuC3y38Lbzeq1zlTTnPqqFtCoqzfqoreS0MPPcW9blLau2Z7jg3wcFJFwrcOApgdbKC2BYcFSLrvVD0MHra5etZ65am5h6ez6wVkZj7lJOMraqn0hwx5pk1kIHeEeuLlUJjb2Lz56YgVjc/ibwubRjrIVUu+t6Sjb/rrnp3QaYjsCGatlijMjf3oVX/7ZcW0Ztb/+Yyprjxhs/IQbMUOMm9WNS3787nynjkXU5BYBD51K6ZEAqm1LASJ8MY9zHNqkFYzb0NvnM/f3np6+r22aZkVGs28gqL26wPXPbHCIyMBXWJESckrigMT0w78E7/5wygpfp7GUmZbl56tf8zSbf9V3fhS/+4i/Gv/23/xaf+ImfiOPx2P3+VV/1VRed704DknA+IcyTsCQ2DS+Fn3WqX35Wv3WDhal7AQEe06HVF4FMInMS14qbWSYbume9cJpr4Pp7Hp3rNgKRF2/muuBvLSJ68SEQ2aqCqidifue57Lm9FMF6gudiMsXQVW+mknBznosiMclxxqd3JDbNpj5mjwVu7d7bvXkLausXL2GABiPafUsvajrNrVVYlje6vO/aJue4ozOPW0W2Zgyalu5KW6lAvWfCbQRbBCNkOaYQOjBSwUqJHdEAprlOLIGIvd1crNne4xXWJWOeRSk41b6jJTx3hS5rX1HBN+e0BfdSLpW9UwZiwxrVl7p+ljZKHZOwiCfx+rS1wQMlS8WjD341+6t3UL7f3ho++s5t1vqsDQ8jSSkPlck1BStHSS86517BrPdl5jCv/VdTxGkGpL5Cn9J1rVtGY07/rg0pXnyBL2XuyS1l6lQuJsCbbRsDaKtg2zgRvR48DiDxxJt3l/3fAgi8+XkElpb3IGDEjh13zCRVP0ouveiv0VrDMaTnbX7XBgkCEe1+a0GIVf5tPJzty1o/JwOAxKNJVfVc55bmJqUNMKr9VEG6dYN/l2DEW4uiGXcj0e6kTPUbCVLKVMlaPwJUeN7+mscInO6DSD5g5B/9o3+EH/3RH8XDhw/xEz/xEwhqDIYQPsAASToBeFhdqTogUb5X8BFN8UKgY0rq8ZCXj69vykBmoOmJgaWoafKkrggqK/IbN2dJ36vS+L7vZsZNcdd6sfqCz53V04q2AGmlRCvLexRpfX7vfDe15mOfLnTLPcFTjFxlyclm4lXEtdfVLkij2iprC5H9/DhABLCK+ngB6GIN3AVCfY7r+1oZ+R63Wi+9BcsrYKbPoxcy7VutGRKfDQkLMDLFUEGHBiP6tlScOkJYzzqiH7fnaqArVevq5Talq5bev18CpafcrsFsNoAoTFMIfS21Akr6hpiviungfdg2pZw7VkQfZ6UPLo6Y4roSRtliRbz9aWiYU15kdePcUeesuYBCiEIZYuhAiQ0y3+MTP1K0JRV2P0/cIOFqis0TryhOwJLp6Gp9RMEwXsajNSCy1v6W1ctcq1yPjZwzmRoBuBaUeMYg/dsaM2LXEC9ByKhejBa9zoxAKoObJ0MR2PYN2RF13znlOka8CgShvJe8UgLAIn8LJqSsFZzHr1Scm453O0yhS8ChMwMCqLFw2gW1znG1XWPWN04BuSjqmX0am5GosidxyZQAqMk4tIzYmTUw4s35o3TkvK6+Xq0In8r1+XM0ld6NzHcghuRenoz8jb/xN/AN3/AN+Oqv/uquJspt5W4DkvMjAL+l1RhR4AOQIHSyGqdUgAUAZCpDJcXqmb7bM+byG33U9RzJl/BcJtVzAl64mTHnjPfenPHonPBiqbL+6Jzw/CNJ43tzTtXlSjMkut6EFUtJc7F8xcMDfstDCfh85urQVc+eYsAzx6lTMKXdcg5dKd1eU4DJMh0wFzGbArQ/di7HKKVH76cKUy3uM7TCS/rep9SzSFTGKGuLqz6XreKr/xGIdHU2DhG9BUxZzEJjRUYAYgQ49P3q/Zqli4sNz+OfX9fqsAGTXp0Oq6yvtYf3ZrNmrbEiXKS5QAflMlO8ZPr+wUKHv0hGjFHKLSaL49wGujPV7xQh1vkKLiPOyMAhStrhEGqxPiB2ge7zXHCJAn5elqWRy8aWC5oWrRRaYH3JOexnq0i23wEdIG2VUmtAqYoWfFACLN+BYL5rFk+fW7uMSdtKOxb90GrfkPGy76YHFuhaJb83RU/vv3TpG4juznovYfG79w567IgNXLeK/hqTsiZ7WRQy4gBMwHju5k+mldfrhR0nuq1Am6Ny+ZdSrkDEdflj36X1sa/nb4IRJkGgQaX/2+Y07XZqwYee14DeyOK9RZzb+JhTbo4DKYe6vQIV+MCB9wy0uQZYByD1OMcowu8jEOK5YHWgBHDj6bxC9tmj+l9mkvL+OK8nec3fbHJzc4Mv+ZIveSJgBLjjgKSL8YglDS/QxYvMuV8IcpaXLoPsR6/QzalZaqzixHf1RAVolvohp1lSdEoqX9m2J6OCnuSBJW0+2r52TLV+68U3NJcGuhLQEvqg+G3L8aIuancNu4iNFgULnhafHUXB6w99T3Yx3NMOfZzeb2RJ6wFKVAuSHF8L+w3ACC+V8tKausaQWCAi25bKkScxlIWjXhzQVXcjxAJcu8yxCuv2Auisg17gunz322WZEb1NGa5bUzfkNtlImi/++u9ygVAzns0pIwbtPliASw5g3n+5iQZogF5Z8JQIXlMDSP1ds13eQlWDV/lO1LnidpBui03UwF/XKQKWoF4f6zElWxLM+7EXbLGNnK/mlKu1nM9uwhIkUuT01vK8fJe991c3zT4vzeDpOiij5+r53VNG6Xy9fdcAxqUuXLrv9xzrrUF72JE18QAtICxJVYqd+dxNOhKbUY6up32K8p7p3YqBq1XVVZs8sS5YQpARRORWQifLe5Uha7XrJeEQDVsB41sMrQYibvzdyiPaqnlSvQmekHJ6Ly9/ef3rX48f+IEfwF//63/9iZzvTgMSTFdShPDwEHMGblKuAGMuoIAqBt1shAlpcR85C8DIWawWJ0ejsUzDo5IpiwHrc8548SRA5NFZAApf9kOx6tU4C/TWR6+o1driwOPItAASlP1iyeP/bCmWOOelcj1Fmfx0utmrQ2NprEtXs3SN/YL7v6J+3pxT9QO3VjV9f5NSvm0siRZaRaUPlz7UV4sn1p9HX9tzz9LUvmZDANTFbKQrcYE8aqbBWmhHyiAXrY6KD7tYEmC8uCwDKJfuXl77bGCndWsYWRF531py7kGJ3i7tBDJy+1zev4WylvsF1d4T2RG6p3lF5ChTDEAFEBEpEmAkpALIU6C7ROqYEga7x4jqHw6g+oNf+mzWdDM+dxoNrMI6p5b2VP/t+m1Qb2gk/TsXK1tS56SBIjydGzDYo8j6jM1SmezvpZ9fyDA/OETczGRS5HkCUWUdDQuQoVWlB6FXnOwY9kCNxIjIZ327I1BCALRQFNU45T16wev8bdS33vph9916Lp7BR/8G+MUUvWvotuo1hWuJjjuihBCqEVAD2g6IHKLMxVFS4vPvg0PEM1cTXvHwiCkG/JaHB1xNwpI8PEjyg2ePE2KQMgHHGFWqcuDhYaoAhPMfmd0QerYX8FkCLR5D0gycYfEbRbMoFGuX8Z6QZlk8EKKZWW9tqO3O4/HvB/Tzs+NKdvPyByQ5Z9dF8KW+5m82mecZ3/qt34p/8S/+BT7pkz5pEdT+7d/+7Red724DEggrcs5tQk9ZaoFot6uINtHMSdSgc8p1v5s5Vd9OnYrXWqv5Ap9SLuyIsCIST5JqITPrhmAZAwCdrzYpesuYjERP+FOUbDVXB3G7YgaoOWVMyqSxUG6bQ6hc+xhqu5cZwJZWWa+dsq8oDXsWOW3J1K5brrXI6UO7fXQdMiLtc++mRWrfc82iRU230frFa8CiJ+vajpXH2QdMBnf74r5KO8QKKysJ00rKsajpJrX/b8roxkTfjr79LY3vcjFiP2pLoicalHQEhflMMDKrhXItFmRLLGj22MSUc40JSVnMlkwn29z+cxfcDkDFmUi/egCq3ptqh+e2pWsTUKYQkGKL/anbN+YEe//aLUuDk5G7lr2W7ivNlFiXqUvaNVJoPTAyZmIT6BqqlV/tJWIZ2S3Go7Zjh3uWF8/0uDIyTNnPetteFmPv9T2xc6v3LnnnGTEjNnZEiwYl/F4/lznYstmsXWOZkhoLV4q3xsACrlG5aqng9iBMyBR7ICLAJJT2LPvHA6Y1YJ1B7WhAxLInU9DgpM0p1VCjrpVzz+3p3uvi0wwYoYH1ROBSwYnsr59JLTZa7m3INBqjmY61Ae4Zkg8k+dmf/Vl8yqd8CgDg537u57rfRizimtxpQJIevgI3OJRYA+DRLH/pekWZYq4+41S6T0l8dbW/uXav4DatdPC305y6wNRkFoipmFg4eZ+TVt6bck8wwVSber+D+avFMigNyBjKv1iAY50oxos9Lc1zzjgdCLySy5xoC95oMbMLma6qq5WdkU+1d78C4sYL4ug4reh08SKq8B9BhQ1sPE6xjwsJoauHwd943Ba7UdvmsCijY+38nlLPLumFTmdwOQI1cPKIsCtwktf3ssywHWxf0dlb7p3Q3pPKnSysfLToYeFPbYM+2c5LZQ28jiTl9p63AOlQg9tPUEpGVTrUe2jG8TpTgg7YurE9UZQUXZcGaIygVV71e78E6UsFYU4JXr0JDV48BdkaVPh3JLbP177r99Wb96wR5Jxa8L24yQYAqWZQi0Hm8yMUQ6KAN9T2LVl7Vr3LnnnGuVfIyazPOZfYwmW8hS10OGLQ57Qs/tevB+tp37dkbf8RC7P4a5hZAhG6Z/X9H+pvQc2/nK/JjLQYyqnGUZIZefY4VQbkwUFctB4UduUYQ2VNBJCguqIG7UmglPI9o0NAhEidCxUwaQH5BphgGY+SytyZ4bv98audLz1G5JRyB0BGMYaUjukIhekL8v4gLr0E+rWzMebz4XJF9F7upvzrf/2vn+j57jQguckR+ZxqAcIXz6kGK53mVF+ScwLmWIBJfXlR3K36c1oFSFspNEOiwYhnbWCKQaBlymkLRayTH9CC2r1AwhF48EQAgpzYujHplIdTCF3+dbad93IqQOOUZALnAqrZEwGBoS6gvfXMRg6gulistd8TH9Ttt9COgto1K8Lc9PwMLIFJLxmWvtaZWbTsVXgscaHTOlpZbEuqLcYKZwEKUMapB3QXFuV1poZSrXz91tV99R5riyuwL6YkhoBZXXM0Ltz3qYBLWtbJmBCUEHUlSN9YQ0U7T/us5xEPiOg5QyutvBedxhZozNYcl4qfnlsOMXQxFp5Vuym8S/DR3Y/Z5oEJz1q/BgJHz8Uq1mvvt6dkzymX5y8GH7riCRvWnqet56DZv1GqZk/snL+a+Y6GHmX84jy6p8bISOwz19vW+mxNto7dCzj2iI034vcawxZ1JsRYA9afuZpwdYh45uqAZ8vn/+dKkrloEKJdtqbym7AoMt4iCtMblLsW2njQ054HDgDL/Cp3Ph6XQ8eWyG89MAFszIkPSmi4AfoEHmtAhJ9PqR+PwBJcsw9OKEAwA5iKM0WNo2vvDIEfgArwAnxjwstNqCc+7Wvey7rcaUByThmxTOqnVALS4U+gGZJpi8rPac7uYmPTVdbzKBcSbXVoljAs9h9ZL3W7RrKmUFmgQpck7YrEuAhmGaESTsvHws0oo1r+gYhjBKbihhaD9Ncx9m5lzIIlKTlXb6f2wxKcLS18nmhFa6S8rPVj7bdqzWnUsnXPWosZYX8BveLuyV4w4gmVZM0WeEyJPWbZVlGqdN2O1YB502aOeeu2qIsDVtmYcPXPfM7at55gRL6vn0suK/UskORdniLQ5ekdjEmdRc2TGtxuQQlQ73stQkMDkO67Ulo9C6XeF5D7mVMDKGy7dm0cjX0L4PcoqXut3lZGoMOyvd6+Ixm5dFlZMEM5Iw5c8ZByYbUEYNa5fgWILLMQ6X3ac108awNGUm6g23Np0szIWp94zJ/HlKzJJQahIfhT975W2NBKFyMS+3kUaIYz7ZrFv8/W2BFhSAhGHh6m6o5FQKJdtGjFJxipCTsKEKHbqf5c26jabrs45WX6cjVTtONKxXNPvJgTQJjkZLaNDDcEuqeUqpsWx925GBP1Nq+4LoAaU6cTL8SE+s5YI0l9ZqpjCOru5V5uI3cakDw6J8znjOviqvXiuaQhLAqHVnZ+7YVz9aOUfVQGpRg6pdIqQ1pJorWBL/sp2WJlGC46wHKxFteJfuEeLb6cmJ+5OtTvB7WvBiK0XtQAvtDcj5jqEOjdg6qlZZL7e5BjtejSwnKaY+173jtrq1wvYk/W2Jxljnv7u75v+3dkQV2m/OR+fowI0GeWagwJFfBeKdHB7971ALvAure/Krp4mn4+87z/HP3CUfp40BivGNdyn1wKFcriGiMXwX2+9zyHFp2OEujdnKwi2LcxVFNiDAFxEmt4ygHH2BZdnbzByghAkyXhNaesXLViwIl9ueauZazl7j3u2IftJCih1Gx5K6yJriNia1LoPtFz0x73nvNgHgP6+Yt/vXd3JJe4fi32P6vsgDVFMF3foN7ZJO8nPWeNUcGyXB649NgQ+d76kmuNjivUad6v1Wf5O2/2gdcP3RxqnuMo4N0zinnuYPbYm7kPTNdpe0dV1oE+ON2yILym/UcgQgMb3bO0y9b/c9WAyDOFBXn2GDtwQmYkhlKDqsz/h+gDEmlvuwcbPZmzcu0s+yWw0Ku8lwGNUbGApZ0nm3OWzwWI5Oy7slq3P82OaL3EghAvvtWusdpgB8QC5GOZ6/11lm5aDeDtNzq8v2U/LL+XpyV3GpDMWWjPOcnkWw1MXKBzWxQYiE5JIYDpPu0598haxWVgaeEbWTN1Gk+9n1002oIeXSBSPwcG66kMMwElfSutQ8sg5XpszjUWQSrJ0kLSfLTnlGv9BiC1AH1nodsra9be1lf+/VIWaQhVzIP2CyYY6bOINL/ZvbJXEd8rHHsaFFjr7VZsym1kCzRpYOQxN4tigd45HGDQZ3lZAo89wcOaMYCyVLcU1+vH6/tiViaCkubKUxSM1ICJFzfAfVfvb2PfvYH8e1gTXktbz+kyaq3xawYEr8geZZRim3/7z1s06nbiDO8cQ+YmZCDJPK/jglJaMiWeu511tdO/jcBIs2orZT/3Rqk+te+621x/7/vffY81GRmB9szdFUCvgBG7JkaMGRHt0uwBkiVDMvWB7KEFpx9jq48Uqwst65A0V6KIBjxkG7NryfvEqdWOsOZqVb6HZepmD3gQlIyEQIayB4yc1NjUgesptwB2DUbo2uVlH/Se9VTq81SWeEWe9Pp3L/dypwHJo3NCOmfczOJa9GhOwzSbj+aE85xxmKiIqkUnLV0wvCBVPeHGMttEWmtRJp8JOE6TKEMhDCcBWi5lwj26jIDen9tIXettFoxUWjq2fOti2Y/K/7MFYVsl11qqTylhPsTyWViT63PCKSakHPFCmKtCdHNOnV+0vac1tw+9MFpAN7pPD1jxUO6vmRGbcUdbxvzA8lxPGBHc/rLSK66lLeYQD/hWZWf17MsD10DUXoZmC4jr9+Fk23ABc+PJCNx7IKW9u7Rys75OqEH1e1iIYVty+zAjl3TATWLotYy1mhJWaR0dI8etNsuVPawJLfDM5sd3lMYEq6RSbgNClgpm3MWQtGv3AfJrCrrXbm5j++aDfE854lSUV+5DkBljwJT8l2SLDeE19W9e+mkvXkQzInuByAiceeCyc68ya48tpDhSUBfjogwb1pyJUerQ+AHqQOQac4g1U9ZV2aazY+l/nctxbMV/GTtyjIwNkZS9OmZEGJKppvPl3H6MLWj9GItFPxqGBCbNL2wmK2BCAQ0obliKKbGGkDXR7IgeOxaIkAWxMSFAD0IImHVGLX2856mh3+/OYBH9zFojIahD+RsUsLuXe7lU7jQguZkzUMCIpN1N3YuorQZ8SWOI1fpJy++2ktJbu6wwoDYqZZVsQkwyIfIcpL2pKOiARIqeIOxvOksUpSnXTUlnW7RFSO9PRbW5remrlHtIMkHFJNmGEANwTmIQLwvAKSU8yLGfhM++9dUKY0pGMgIiQHOp6mJhFADRv2n3gLrvKggx7VCb9blH44bn1pmshsGxA2W8/j64Ru/KtVxEvOte4j42ym61h7V4XNkXxA5UUCIHlXda7yR/JugxaKzGw3e7PMO5KR0AusD51l7/fN28Yf21d93j8oGtMj6OYq2zYFVFvRoHHKUT+8DIHjZEK5oeiLHt6goeOmK3b7mD0o2rXVM6Tw4rSU9y75IH+IyX55a1BUT4T7tnkRXZAwb8v7H7Lp8nc460YEIeh7X2wB9BiE3Xy98oMYaWtjf47IcHRlgvinEiVyVIndmz6IpFQKKzZ1XX5FgC1mMLXCcYaQk7VJpfttm8RgSv0rPKiwA9+Bi5Z3mi2ZGMHoycUqolCBoD4rMhOli9fW6JafQ/C0QpOq7ztvIYh75fJOV19uqluua9rMudBiQvnmecT/PCkkA2hAtFqzTdMiIdS8rXkTXAKl4jhdK6SwCo7iMSu1GCyIqycDUJw6AVhVFhP++3SVmZKDpOomW9iPV+CUo4CbcUmC2Vqxfoe0JShaBiU7CT3E4KuetDnRkF59QFonuy5nPebXcAV815PqnfDOjQrlgeAOn6Tiv46tl7Y2SulLbvUtUpuQ5YsNeg9MDX/LaYzXqL5Am5vy9VYKtef2XV8LJC2TZ57Vpv41I8Nmcct+Jv5ztXQQngApMONOq8mvAXh2TuuwN9a/c9AB5bhoyRVEBbLbB+By3SQ0/tupOKn2HqYC1rbkM2FqG2y3s3B0BE/3Yw77V3ProYaVBigcsay2pjJOaUa3FWHispsGN5R/t4MgASDG9kxLbZ4GDP4GRZEQ1GVt1m3Hkxur95x/V1WpbFFWt/qu8WDOo22xTPc8jSlykjpOKyZaqKM3Uv5+Arw3zUdQJYsCJXh0kFsLcMWVy3NSDRAewxtjjJnvlo7DaZEbpoaTDCLtU9m2EZEB+U9PvItt42ElxjRnXTwhKMWL1Gu51bFkQHt9/MPRgB+ngib8wsvA/summG2yLBSpZ5PBem517u5TZypwHJ89czrqZ5keLuhdOMa6UZxBDwygeHmou8q8g9UtJMwK0WTeUD8tJWf+5Y/FhrZfhcM1URmMTQWJMutSeDaS8MxNVKtb43uqc1YMLvreq2zjRle4KWtykk4JxKsb1Q/opCPOUSMJqAVOJKtPIzDSbCdo1eefHYEB0L0n5rfW3dsXQKXltLA1gqxYuAVpXtSScA4D7zrJQRc2su0AjeGFr2xaiC7ui8dJXyFg09tm39DH2Mx8jYDEN6P/soRwHdVmqfz8v26noei/3NtmWmsKa0p1yU7yy5/9OANbHjcZR1Zsb+4ow1+F732YrivKpQYgniF+DDAdKynewsugD/mEItfkrFkwBgThFTbKmAb+NGpIEIFU0b57a4Tz2Hxt4S7zHHS4ZkHH+hlXj+9uAQcRNTU36pfDnzSz2vA845TnTSFM8afSkQ6fszuH2rt6+xyw1wtj5o1/ZBykiBvTEB+HRDw6SAYO7fS+uKpQHJwWzjs3rmaqoxIlcO6NBAZArAg8NU1/Njja1sRQ4rQxJ6MKI9BDQYqb2Z27gSm4dcL6OxJUNQgsZ88LzcZhkUWulz7t20+Jcu6F4SnS23rJFbnh5ndkxdFZZJjH6tv3R8jme4vItyX6n95Sl3GpAwroPuWZQpBhxz096prLa/25biNYsms/yIwsNJKiyj4UC3KVHyZB89I/VxEO2g3ClHdsH02hND20dbN7SiPgJgFoxcQj0vLCdqQRpZNq2y5bmgbd2rZU/075YN8cCItcrL9cpz4ipyS7GxRmvgQ8tWatGh6KxQSlGv7Exs2xbtcBiQUarTPYHcQ9E71fekb5d+ll7hRmDpVqOBFd/FNXe6PeKngR2fbwRC1pT6NX99yxCMZGi97L5zPpA5UlgAGdyayWxtkvo8tjjf4tpKWeZ33d6RsrwFTPaK174lq0JFWq6layHdqKxcEn/U5uatGB8NRHitLTCi27oX7MnnuFAeAQxZp63z8hmXO3HHnW2fVwSToJP7T+ifqwdIdLt7pmSqTBq3s05US1lf4iDV5ym0+Z5rWCiJKciMAP370K9zTv9lZ+LPCQjNV4BAY85yHRtCR0ZmrzEDaK5+QDF6Kd2mz5zV/sq+Oi6p1TPzxpo3pyy9FOiJsFyDrf5gf88ZNVv6vdzLbeSOAxJ0NGaM8sI8Eye84kop5UoxBbBQTgHU4O1HDAI1Su1UdFQdVKutsTE1yyTQU/qdLhZovQzAtFTOxVLSAnQpniXUYwhGLkrR9EOMy3NS9Dw6p7xLuSMIpOIDRLGg5eXk6AESfY97mRHdF1EtVMsK436ciHf/bJoX56qVdfvdYzWWX8aK7aIgpwEBvVvQUpmVttt+LGPcYSUW17fXS35tBXt9DzzVdplnxLbG1D9PjltdWGwKYQhKAIdBqXpErqCkd9NaghTt+8/7s0zHGsuxtm3Nuu8p7iMA4m0bpa/ecj9tmaDiatwD26Ot5Wv3bi32Hui4RHnWSrK+7paFX2/r29DAx815GdviuUhtib02+6ivvL6PFbHXbkzT1G0bZVa05/BAsbdtlBbYa/eIPdlyAzqovtbAhCBkiqEmaXnmKMUNj5OKCYlh4dnQxYvEVuyQzAgLHkYUtyzQMNhctSgEFwFoYMQDJby3ECtTQkATkDuWBEA1QGrmxMszm3Nx18q5uGplXJ+lYDN1m/6vvLd0y/KSyNj3RJ5FVJ97UHt1iLia2N+Mv5G/rHI/Ba0LjV3dH8OO91Ql4em39a70zftT7jQgoaTcK1qcxHStEaB3+QCWSuCctYIlH+LCkamcy7HGctuW0KLjBkizVKtzXX2PGozQNcsCETn30pKhJ5NLrWyeeJlFumsa5mixkHaK6zoYscdphbZt78HI4twr7Et7lrKP55o1snxZBb5uXxkU3k9r6WK9/VvgNTMIMVkDMGW6DeTK1qzFtKxVEffubzVwW7GBlaVK2YyHftsi45sCJXsyldFgcKmVzoKRkcLlKWBbrkVWvN/XAktn5drZ/hrF33k/KDqmhq6jR4TqSorymW6k1ohwhaViy3teK3q4V7G/jVgleQ8zJVZ+GadkHnScyShgf9TmdcU+XXxvlhnhNqvYj9rYYsX6c3JcjcadjR+Z4rRgdvQ/va+XAMHW0+r/LV23+I+AQ1iQFgsZA1o9Lf1bOZeADs2MMD4ELggBBuzImhSWREtUU5d23cq4zMsAQKsxknI1dNlMWik3l621GCUt7XmlBSiZFmOqd9WqiQjqetqMmTyHSzLduybdyy3lTgOS05yAYoHQFnKiekCUmP4F733kGXtynnOZ+HhcU2ZPRvPRQbVc6FP1Zede7U09TsKgAEvwBPhWcC3c31rC1wr6eWyQB044YRECZbW9MjzVitrHGPRtLPn+Q0vBqmNmpC2j+9MLcb9dK1oWxPG5s49srIzHjIzAyJpw/MwGnGh3wTVmARi5gaxP3PpnnymRv12fJfPcleLqBe3a9s2pry7N63huKluima8YSuB9RHWnPE5y3mOU6nUx6niP7fSTHUAp16oplwPgsSLePT8uQ7CmAHft3VDQCUrsvs0vX38Oi/fDyybnSQc4Uz9+yTbrMbDGIN1G1kCe7fOR65MHSPRz8tKa9kp3n4FrFDS+9sy8sbIngxbFYzu87GQWiEzGADOKL6Ica1r6/h322uj157Kv19359P1ZYKXvybpnPZjad7rWMV6ExQ41u18V5/JPQMgYmLMPIgIySoYwtDU5hLjKjlAClMtW8A0ja8KYkrmAl9Ocqh7CbKHyucSJpFwziRKIXJ9tFi1boLl3XxyxrxxLrZAyCyvHur7q2JFJffaEjM+93Mtt5G4DkpRxRAMjpHIflnSANBbEnPHCICVwysBv3JxxfU549jjhg59pFhmgFVXUCm9KrcYIxbIvVIjahvJX7TOqYaDn+Wax6JU7oM8osideoinqvWKuQYkGIqSR59ysNp6wL+qklwTe0CLLe50GbJMnXfYuBcSswjXpfomNUrb3rffdK7qOTVXOVD9oMKIVu1GNAntu756939b81wGplt7ujwp5U2IxL5UU7155LS91Ke8HWE8LCyyt5vpfyrTWAUAszzFVUAIzNr0x7LWbhgf2wwiIzKnvvzmhU7L5by0o1Acl24oMLfMUbb0G1tNg61izqN5zbcm0cwFl9Ow940zKkzueNVCtx5rgbsBXdvs+GP8+6nffrWgZszE8rxmPwNKKD8xDgGCPXd7HfhCiz2MBkbVYP6juTSqoXTFlW7GFS2n7bWWG81xtPZC+5z4BLNgQDUToHsTaIXTRijHg4RTreNaJWKYYlLtVy6hFsZ4NOWPhdJBzxpI6KWfR7lt2m2FK1oSAJ+dWkV3/7cBILWFQAtiLfnJ9ln1uzgkv3EjEik4hrd0EPfGYS/08YuA/ZegLdM3Sc0rPjlghsLsLwufxtK95L+typwGJ53LFwGSdrUIrkwC6hRVAtRCItUNmrtNMF5dct03Bjxnw2qL32Wsd3zI6WoV6rrOsWH2axWZsXZ4LawHjTtPAULOCVhq5UslNIb/UQOq5de07rh2/iI0w+3RKmAIx8tv2TOkxGhaM6P1GYMRT3oB+4ef2LrXsrqDzsSKgszNxDKQCRrxzd21zlPJ1lxT1/phViAqgttDRd5w1gKSsTXmv0MZyLJ8rOEt5wfppsSDOGgb2ypry7LnjvBTipsE21kgaKPgZgAtGOgOGAXWUDlzHHnDPURSjAwLOxZ1LgmnJoDQWVKcYbuderynS7TtQckdWeQ846uNG5/dEg0AfkOS6bZT98BIwQlkDI6NxAIxdWoHxHKdfTzatZ1jy4nddP4tZE/X9aTe+0T33Lmh9304FUFVFOIYKRrRBcC3u0d5uAmz24XJPOhV425fe0TEolqSePC7dtELcBCPsBQEduX2uVdjb2pDBd6p5b9giiFyPbwwjogtrjvrfY0Z8pmR9fbQGDv++MwJCBVr3ci+3kbsNSDIVljZJsHjQaRbLwgsneXE50VkwEgPw7HHCXNxIWMSKFhoqm5I6tZ2jXc8AE6W0ruWw9++nKFVKmbCWsW7/lItZKFRFoTnjA0i94t5iCzLmWbua+UqvnhyprPA6VMRH98N+8uIy7P32xylFyrGKa4swgM5Vzbpq3YYZ0TEjHjPChQJATckIeIWqfH98yh53npFFdu37yMVkbN3tQYd1AdijbPkW36Zo8fzMojNHLpSxxJlEHIsmEcuYTcW4sBbvA1jDQmNJ1iRl9M/IKMC2L+S3pUvESNaeJX25Ry5B3nZde6dz1TLMiH4HqNDRqqyBjQaK0h/NCEFDji3IVtOPFmB5njlHSL+kmmLYAO+aynzZdx4Q8cCffRY2m9BtAIHtb37W/b8n4H3rvfSOte+H556lFXcNQrw5rQJT07a12EErIwZ3tHZ5RpaR8URfu7obT72rUAyoaX37eJGe1WcGqK0ZPSEj5iDAIzRXrRmiPE/l84QsKXzVuj4RdFgXrhLQzv7g/nTBSpmgQxt42ntxTu3vOYkL1gsnSe17fU4loD3h0Xmun+ec8eLNXF20BJg01y39jKyMXLYOZnzp/bWxby0jafWqyA0YsvLKBa/f+03083ua17yXdbnTgGTOfhYebfWlwhhjxppPORVnHntKwKR2bUCmV4TWmBEA1Vru/TZ0zdHsBaspK9ajWrFCD0r43WYP44o1hR70bAGjkatSMv3in8PfruVS1mSga49rybj79t9Hxu52rz0Y0b/XBSn3n/dkLrLiAQq7kOxRfi6xElvmwwKRNVcNio176Ns712w6dZ80CNxOudXuSRnTikluD8CcLhxba33rueTsOdcIHO6VThk196xPvfhNKRIjRS6EVlE7IyDW+0pISWosoBRDRUpADOIey4V8Qq3zskjEETOQbt//HIft87LvLwEjlwAUz6p8SdsvPad2IbMuW72yqI6xbqsrQMQDLp5Es7bUOMkpFBasvKN1P7ahzZN0yeU65imzvI/mGmTcDiPjRKz7bRvTm2AkNyZELdlmn8KaQFy6CUoCmnkuAC4jkrJmQlohwIyeHUh5yYzkorbXmiOJngftX8q+2+zIULTHuLUm+13+fBEGqI2vPGDj7+VetuROAxIGWNFKXq3U5fdTyvU3oAcR2uKj3WxOZSH0UqhOqbcYjBZcThDVkr7istMdp8AQr30y1rFRsDIn+OMkvvgpKqagVCXuLz+eNHyGRH7zg16XQGUrUH+PEEtpyl3PtXvm3RZP4/8e4xKUWNcsfq73nNrnmgklpQ6IaIp9y+WpttW1jq9YNS9UvpZKd1osbGssgXcOa1W2ll5uZwpTMpBTDJiOAZL0QLQGAXbLoNRZKRgj2QK3a8Ht+t6Wiu7+1K28r9E2L3DaU0jp3sY4AS92BDAsYlHkmF2QVmWmRQ1hqchSUgboUXdIUw20PWamIg0LxuQkPneIiQlB6M7VgxLA73v2NT/343A8LoFx1WnvGmvf7bYlgxE698NRJrQttsxjYbz6IpcwIx4YGab9Vtv0sXY7MGb89W+eYcrzCLD76DbrGCjNjHAMc922blre+5WRkQtAmiFp+OcUkEMW632S8Z+RETKQg/MduVj7Qw9IFtdq96XXuIyeDZkLUDmlXBmTc5K+ETZESgxoZoTbGMD+4s2MOWW8UP7enGeTVjpvxvON+kxLssbOstCmJIUfadSMubzTNJgmde7c1s0L8P+93EsndxqQyAKNzrUK6CdVaz3X+xF86DgTfrbBg1SIR4o8L7MEOutUdt3mKDx6sTolbemU35mtKIYMTLFMCEmUkwxltshVqZc2rilu7bMGHx4QYXttvz8OCGnt6N11Ut7OurRHumD+PUpqxsI1TfdBAybyG/PDc+HxrFur7VtRXrt2bShbXpyDB4r0Z6uM28Vvvb0KhBwa+rPX1G4Euj+nEIoFVsCJjiNZk7UAdiuayeJ37VK3bPPlbmt7LewaiPD7yDoO9AYIwMRQGWuy9rfXYKTGHaBPe5qLhVhIEemjqymKiwldVRVjoj+nAIj5HEhiPQBrMDBuaQrjGCb2Gf+u1cAYJRrYAotrgPpJiZfVy4p+tzV452++G83Y9bQapAwjokFqZ8Cqxhm9f9/GY/UA6LenlNtvsb23FO3Cm2Lb5vaDYUU0M6IBtb5nMnxWiq6smB1U1gMhVIZEW/HpvrX4HgDkBkzYb/Z6TGvL22uKuHLZSrKfBimPzjNSAh7Nkknr+pxqNfZH57mss/JZrx835fvau1D7NobV5BgA+jTPUT3zlGtGtpHMpa94v7Ry8tkE4E7EkNxXan95yp0GJEyNB4iV4b3XZwDNAjMSa8nRYOTFU1931VMS9lKc2jdbi7dAbi2qvbW5VMmNesGSNJYph1qXQowZGclhCfoCf/41q+JtQIdlRbaCt/cIlaWlS1lbbaisokyklWAvK0sMbcJkggNgn4uPvjbvU39OBpgsEybogMO+WJUOQNxSaue6WDDrT1zsr12gahud/PP289o2P25i39i04l9Hwkh5frsgegAklWeonyfHhY0heVwQbJms0T3dVqG1cSO+1bxXSC0rqsW6tc0ZON6qZUX5ymJNDgxWL+2SOLRcJpPCPicIUKHVFAEpNPctzglTKdCQ8nqA+9pvl2zXoFCf14LFJw1KrJV6SyG8VHTsn5cdkb972ekqKxGWyQ3WAuKPMOtCbAzksb6PpX0FrFig4q0rFiDpeL9agM+sV2spZgOXgyBB1bxkCkCL1ZexbWNKkJdsCdAYE15DC2dZsh78TOZwViBEgxECkDkD1+dZgEfZlnLG9SwM+6PCgnAd0WuKBiNr78XIJRZImFNYvANMMKLX3Dr3ZjIjlfBEMklxuo7BdhbGe7mXkdxpQHLmCx0D3nsz41ffe405ZbzymWNNmbgmtK4ykPPF01zpUY+p4Dkv8dEc7Wst0/y8Zc0jIGGA8BQDrqaINEkFbAYMz7lPCwqIy1k9t6HfR7IGRPR2ae/liiEXmwWIqYtbW2xPszzrGMSf3YKSNmEWhWRSSmsSqzEXdbtY1iBEBTyYXazdJ+p2Wvhr5VwnPzyBiM2QAuwrKCafm4sT/y4yGu0EuN61PUu0/j4615rcnFMFzVN3rCyIy0KCQJxQfaeBEjulYko4hrVytiZrv1NZWD3eWOrbPayLBzo89xw+6ys1p1hXnT1CoMyMgKNCrlqs9bfiewCIQM6MNcklziQKAJnld3nPhDVJYlYWxbJ0TzUcAOJCQ2VaKUK05K7J2hjWYhUsj63SYMQDJSOw6MltFK4R02lltkAzCTBdzqvFAOMooJoZqck/VKY6zZRYhmQtRXTXTquPqnlydIy9jm2Hl5beaw/PSmu8BiU1tgWo7lsJoYCPXI4JXSB2HdoFnIhxpF3Du3bKPRAhQJF1oblq3czFDSu1CuwvnGYJZC+/pZzx6CyB6jfFjUs+01VrWQTREz0Ozuo9s2NkLoCZ52E2NfYdY1kA5bZVWNEYA5ASYtmXa25UAOU0Csx8Gcl9pfaXp9xpQELLQ8oBp5Sqf7pWkhdpQQcuOy0rhu83P8XQ+b+PxNZgWLNkjApPybY+t3i7Zm9lBoRCj8VqkXIwGkZjGGzFdM8X2G2r6k+7vbV3zDB4MpnnAphnY4r+WQu66LdLK/EUNOgAaFUHILG50SyayW+rToyg+4eLDpXnkdtPu4/ldg8QaKp9ZOEdnc/+tgYkPKp/tMhZBe5JW5frfYZc3ba638uiyHTbFaA4bMqTlCdtQbdsiLdtzVVn2M7suzEmMouAKniaqyKWsxSFu+Q2xQ0sY57N9hgwz9KOc8mKpl23ptgbGzRAYJXwkVwy3rx3ZI0Z8ebwteekf+McP2q7Tdu7V2o7zZxIUGJFlPDt8+oU6FtMiWx3+ibYdrENzfgzZzR3nrTuYqvb0Keo3r4fKx5TQqH7Fse7kHa5KdTqfsiaMNi9a2/o10mCkczPJXg9oYGVjJa1Tgoz5+pxcEpcR2T7EnT0RiwPjCx1gyZbDN05SepmoLhDKzcsqyNJDCoNe6hu4Jq10xhkR23Je7kXV+40IHnP9QkvxJtqmabPO/9qf9oqMXcToLb8W0DgAQVgffET1mJZ5Erv31uf14Nml+eXyYrXqBPPAdBF53Sge/U1XqnUvSYjZqT9ptqb/fuwomtm2GMBBVjmfvHjfTEAkkvKcYrVPS1GtehMUc5LYDMvF1ebSUt/T8XyJYtIn9r3NOcuZuScclewStPt7BOPbvfAr/6st+1h3Ow5R/7GawraCIxYtxivHVr503+5nduu7LE5I5YCARpYV+PbrP3fzb3s0FtlvFBpzkjz5iGdrCm1lzAjdptlRryMSltiA1BPc4nvQMJxijU+BAR+xUXLiihaLVNOCP19d0kiEnCqqZmbVVm7bskGZX2lG2U551JxkmJ5ZBcvVer1MR6wX5uX/Gcofz0Fb03p28u4VFBzTl27pxi6ObJltbLgpL0nXtyRB0ZsoVyguUetjbcHJePUFgu+N6aLMrqmBUp2LeRVApprVSq/ZIj7IQDkgIULVqh90L43dtF/N7QQhOQs7UoQ4M1g9hdVvIh20TqXGBFWX2e8SAtcX2dGLjGW7F0zMBWWOrT59zxL/NhximL0Netq4jsfA05mLn50aen694MQSD7ta97LutxpQHJzzjjdzF1ObkBZfJWVsVpyNlwaZvPyb00EXFCoaMwp45krecGZWWjRLviAZwRK+numO0zCnJpbzJwyZmRcTcWnIrZA9mpJUwvZWoD7mmV2LyVv71uLp9SugZN2wVC1005xrfvLJKljSioLpBbhRcyLAlk65XF3ae6TWrICHTfiWUvtmPSybenfLeXuKTx2P32uNTbGs6x54rm7eNey7jd7RM7ptFsxfF5umzp+od2T/GuMXBBrGlNj6bxURpb1pUV9WWvETe3qMCOAURbNJTnWaxHJ3JQ6bcWcc/HtT4w1kON5Os8YvdY3kqnPvMtBWZljX5QyhvZuMQvaiLWwRp4RAN5iVbq2XQhmPFbEMt5aRqzi3ut7TA6/L4wBig2bxey/VNgHjJmXoU67R8k+ofttdE/A9hq6J5bpcVhIOzfpWcOyJYCsERzrdR3gPM8js7h0AQWscNyae00K2PAWWIckg3V4miuvrb4+51bw8JRs0pN1ZuRx+mxN5qonLOdf/ubFZtabN3IXXLbu5eUpdxqQXB0CookVOajJfU4ZNyXY+2qK7gRegUpsKTc5MdT91OIwsjYDovw9OMRqEfeEx90mfR/baRVQ7UrG+5X0qU2Bn7Oh+Deo8c79Ki2zE8l2891hR7x+unhi1cH4HWMCMPh7zqEVswyo/u3Hqb/hZR2S3AEUW3OE7Ih21RrJ1n3psTlSVtYssmvXsWNiVMF6S4nytulze4zHllwyBlKu/+EYY7X+zhltHKjXa1Fzx96Ho0TXYwfAe6rofX1h9RmScbyIBiP1d+Ur71XfXhMdW0PXCioLp5QxBXEZiTG3+QBAZt23MHb5YVeu2RzEn7z9TaXbav0mw5I0bbDMVYUV0HFGTQGTfb0MQ3rcaYPQSM4r75sWD3ysHbf3N+9cHpM4xT5DkmVZyJhMIXTGGbIkozo+c+6f89r4ssUzy9kXcqnHpDUExKkFoHPzJevCiC3RX5LaUHuqMnVsVztS31NbJoxhilsVICErwmKHKaEWNWRa39OcKgCpxUfTMgGKBSNb/eIxcW6BzSippu0xVjj/6gQyJ6DGcLJ8ANdRYUjb8XO+D2q/l9vLnQYkDw4TDlfT4gXTL/X1OUme84dtQe6V7Xa+KQY8qIBkKhPG3IGRLUsFJ5kpBrx4M3eLjt5n7/n0pKHz4Ot7vDpEXJ+TOxGlHGoVbBE/GFL3jc1ClvLYRWs1hfCOCXWP6MWHbT7NMhkeJzJARWNNuQMmLZ4kycSZlgsygQig3bYaGEmZ/UJWpLmx7b23SxiFSzP0bI3H0X57rbnWYnsJCLFxMWIB7DO9zCljCqKwdpnUkFoCA6AujCk39yAPM1h2LVZtqMU3EKyn2QeBcr9jUKIBSP99HYjU7yZo99Iq3FpoaGhFUMu4ZQel3iLe3H9am3j2Guw7eMSRmbNCBtP7Arn20hRCSRWsn1M1SzdQghbQr4EGwcn1uTcIWcuxZVDW5BJQsfX9NjI6h7cuePOEdfOzTPhUWHAvTnJX+zp3wX48AEv3Ji1ezfRRl2nFNSlGImUZb5Yp2iPenMYt1T0L2qVTPvCeGoOHXT41Gqhz99OcugQndM9KqblpscYI19ebOXVgxGZi3LN2esDC0wFkn36e2opzaut6qEYh/U7PiqI+mWNPWxlDXgai9Zynec17WZc7DUgeThGHQ8Qxxh5kxP6ltm4RuogaU1PKD/1LXhfJgcJC0S/1luuOFc8SbbePFzSJJdHWNbtQz4y76HLDh6HFTAOTkQVcB6p6Belcl4OVCXavpZ3Hk/GSdkpbj1F8XQnC0pyBCeiycAE1G9FkFiRgCUZ4/jWfaKtA2Gd4qfLE47zP3jW975deQ4sHhs7JV5RGLMmee10co/ob6Mdk3YYGMKl787jO7WnDPUvGCKpLo7ewkymlrVTGPJnI2J3L+0vFwJ7XgpE1VuRxdGH2C4N3m9k4oEvZqeoIBCyBSMZ+BdH2N2uRyHNCBYGwsWxxOYZpGEI1EIVqHFpjoLXcBqw8CQDSn287Srubr81cwnl9cUxszzMGW7OpzfOaRWu/LwOXgfK8yjWLWeCJSQlbesllLwOswQrQrwNrwt2ELWeQOqorFouH1rUkLZOeXBJjY+da7/cRcNXHW9DSnWNhoCtLp95WQAnnXsCfn2Re3XVr93IvC7nTgOS3PDzg2WevqhX7hVJD5FQzVAHPHFUQ+yCGYoriFxtTAA69xe7m3CaEEWCgWAueJwczMXjuBGtWjNF5ycqQKanuEDFIFqMZOE694jMnCVqr51Z+6J3Pd3KWp06p6X8bTbjWzWIvY6Cluj0dyjmyqcmSAk5BlKAp9RWBYww19bG9JB+tTYc8uhdbqKu2q27rretr1r9LLLg8hwWye0DJaEHT7V8DQIcYOss1r70m9RmrZ24VrXqO8vwQM06p+YEz1TP9LFJocSRtMVXvZnnmLX1nkHOXi8w5F9YwVmbtqJRispyAHatLIOL13fDvChAZVtKOevtyX7rnSDM18BaL9PV5lne5sCMnZEyFMUkBNV2ndukaGkCK8uUJs23xc8dIsa2Z1vgefEqcS0s/ymtZF5bWn5zft1zqxsDRbn/S4r0Xa9s8UA+oeSO2QooEZTTMnGYgRRmfLJLLGDsWyD0BbUoqRhlE1CQfNTbPASV9XMb6fdvfta77UoGSLctzmvNFIGBxfBlmNvaQxXCXBZaZ/VN+43btqmWZEC1r87QVa/jwmBG9zWNQdPFnoH+G2oBJUNJ+8/vrLjAkGU8/yPwep23LnQYkD6eIYwxADDiljGOSyfRYFqKoXAcoUSkr/N5+LNlginJCsCAsxNxNCtp32eb8BsZshxV7vN6+dqynkOq2db+rYEcvHbItqtWUOQtKAAtMCOhmKjm8VvQX4K0sU7ulLMg3NTuNXoFLO7sJtP/eK7Pc1rdDN6t/nr2rn2zzWIv158T91sQDMmvg5hIZtcVuH7Fd+riRsuWdi0USgTJWFwALiMVER2tdZTQ5bh3L7+q9GoCi3YmSUso5Pmy79Xu6sDAOLJIeENmSLTByiTANdp3/dI5TxZhQb681XpxneUlKby2aQeU0oueXtmNrT9vWn+vBQarHW/ZqJCNgCPjxIlsymtO9/tpKDey/L/2kotnvkbvloh+LpCz/1WxIeh0scT+XMCV7QYn9/aVmRbbS0Q+P26EzewlcdJ2qavgs206KFdGZGBmrupYiXp51b8Ca1Xyjx5Edux4g4TlHYIT7Xypr8+5WTbN7uZctudOA5OoQ8exxQsoZDyEAhWJdb87z0uK9jJUQxTamgPlAhqQFjE+xpeObzOLUUbKDhWvkxvHMysSwZi3RfqGjxVZbnzE3t4lTKoqSUtJ1Vh9m52rbeutQrAHt7XfmM6/KugIlc1rGzDTldJ/o+51VP9YMZ4U1oTvXsRSL1OmP+R1A8XXfuGZoRd06pqhYJKeQMcfcMVK81z3A4dJFwQJe+tZz28jqNlJoRtZ9u+/IFY3nXrsP6z6pt9s+mgKtj2L1JTMSA0qIkALQBnDaJrTicIr143iegVRq0hyz3E+s1851DO1NY23vzcta5DG0XiatNSBi9++C+jOwiOitv6v2q1oCgFjJFz4aRrRrinzfr4AwDbBqpDKI9Pd1jG3uJnNCRe7mnHCFfjxtFRi1fy8FiZf6ffe1mey7upwL7b7duRTT34ETez8H4JQAHDhvpy4teiwGGy91a5ciHViwZrqP2Hcpa7PUMkWuDaL3RN+u7eKRgWN5DmPY0mu7LR67MDbZY9VvfE6GEdFj3osz1CUE+u9tDFvp59KIq4PDjqVWM2TMwi6z+nFt5GcLWI51PW2G2rpOqvnJm2us6LngHpfcy23lTgOSh1PsXI4eHKh8yHfWjpgzkKaeThXRAdGy/RhjUa5j89NFs+zXWIyB4qdF/+5ZNfZYKtZ8kNcmILctOvehoeijkxtSn0oDlDnL/gJAeJ+9tTMVVwBvceF3BvLpbWsyxTZRW3BiLfic4GNoii2fdXUBUm3eWkCrUmVBSbHoNqst27rMBKTlUgut5+4m9zkNgR3drEYxIJcKWUN+fhyGhiyJfY/mblxiyXgBC9ZrtFDq7bH40xOcHEt9mpjorpQxldgzzaBSodDuRHtEK7penMhW4PpasTqrIHjpXqvy47Ei3F62xaAKm/Edcyzn9VBj1FkDJy7bqn/H8t2Xfblf6txpAQNwB0Cbn6c4jtnZW+PlUncfnYUw5QxMvQuae42Uq5Xc30dZeSBB/1bhPM2F9ZuwqOdDl625VKtsLlutdlONqzNuXLUF85KNmWLoAAV/vbQUhd3dY1bXpLnaLrdRbJp32T8v9vfZEAVEUm/stCCkbWvX9ZiRKXou283V1xp++rG9NEZ6RqYFe+K8C82jZGlUueVS8bKXlJ8+cLoHattypwHJM8eIV1xNy4mnTASnkBDjJMAkJLEKpYwYYhczEIu/OkoRoBgC0tQvgtoHXv6m7jvFW3SWE8OSSgXGqVrXZMSM8ByLiV3R9EIjF/9Q4xIDOBaR1Cpq97nLqST1rnDrGbj6tMe6zVv3Kv0m+10dps5yOMWWKY379pnGgrGsY6iYtNtXfTKFNkaKIpsyEFOoSuuIlvfuZa0NI2urzzqFwt61wN810LCW4tRTBniuvRnAPNA9WlA98d1J9i2QetxqnK2rSaeUC8kl7p4HBJznrCz0VDJC3WbdP/e4SnsVtru2boARr9jd8h4fT2vQ735KQIz92NM+9F5q7Haey667Z45rQKZZ7HUQ/OI9WihUvX/80YzxPa5wF7Mk5b6SGktHCGDAhFpMdUq5S9neQElvqOnf4wRb44p1qKbI+bcl/IDKVGfTt8r38q4pxgQo7HFxw23FLwEphqf7xjNkLbfd1ghij7+tEcSCkduwILJfPz/obTzPKB2+J3atoivi6D7XwMfq39AXXrV1jvT7MGJF1tYo7if61fB27+VeVuVOA5JnjxOeVTltmW18TswOE2te8FMSZYRZMIA2+cyxTTxdFe9JjmMWr6tDAl22bHCvdUWxv3HiYKpL/tUTBUUXTPLOd6lYy33KuVr2Y2gpb7FhyRWFjpOz8gNXrlk1MN4J4WouRcl8bwvzHkCiXd/0ZE6wp/vt6kAqO1broQYl2nUkBp+W5vpLN7UpyJjTdWxoIeP24T1suOhoWVvwmDoSQFfR14IQ3ccjdyndBq8t6wzeKBNb/6z6f2MtvT7PSY3TzlLbA0qx7re6C9pNywsCZwrhObb+PJbTn8oD1kqIfqYLF8+4/G0k3jtlwcgWgwLcHogwJXDLLpcVSwL07IkFgmWzASNrdXu2ZGtOi+Uxd8+uTPUxEVwsj7NucdYNpUtw4byLWm477Y4U2FPMhZ0LNbZAvz/2Pb1x5kT+3irZx+4YMcwIwy8F7eQ+k3FftQk+WsX2NkbchAsxgMklLKm+yuot3IKXc8/aaGYP3IaZ9cBIN5+aMUw2hLEgdb/ksyCXxld1z6o8O7oi7jnOngNY9usoaYaec7zfYnSe+eo0YwfB6i28fCQ//Urt91Ht23KnAckhFleK8j3n4lZUAAaKdRyxFQ2jD3m17MdQTlAsoKyDMEWw9kQqrjl8+a8Oy7eOytZIOavHTqRHm+Wuq7Isv9bYBE+RHMmeiZquW7cJkgX2+Y2PlLM2ifYBfJ54oI59MXIbku+tzkV3ng0feQtGXGXP+MEDKkFCRHU/2bIQrlnAtRyjXfzIaFFRFteL6i52TpUlknv3+9hzA/AWO72/t31NRr7LI8ZECy29j2v5B3pFq5OuazhGYhnfMgdMhQ0T0aiA52iAZYsRtLLWlWvuWd4+e865R2pQ+4qrS1IKGrA9H1gLNLBvnlrbRYMOPp5RfM4eMDIaZ16F8z0iy4Wa0+kChYSYJN7DgtG+aKrMjxaseEYY/d1zfazjs+WRAKDTost3HXtk06NrtnKeVYIU9OOSz5r91rGcrUR666cL3m/Vwu74x63tYF3L2F/aGOHFhQDowMglQKRe2zEKrRl39Oe1GLUt48cIpFgwYueTpZ7S9tHNviTZyL3ci5Y7DUieOUQ8KOAgZ+3nKZNFDBkBkbwJUpJFoS4QVNgUKDmqYNaUxIXjVNxhUo5+YNroBVaWz7VgVAoD75kqsLZDTYCAPwkuqf114WJRFYvia4yILuDdVrkeWYzX2AEqn9olbVR0bsQK2fgQ/tVsiXVT6oBLbVc/kVslpU7cI0UWvt/xmthzWQvkSGi9A1rc0yk1y32t/Jskm8szx1zdQfqiW32a1O1FzzdzaYCz5mbnW/J6N0VgmTe/+/wYYGRSz3DY5zFUV5rOchpz1+/HMnZ61qSBUBn4vQtjX2y1nDuvA9WtOWLLmr/sg37HvUqCn7Wo/GaYkfZ7bzmW8yzPrRk/HneJaFdTytGgBgvmRmCEQMSOj9E1vWtsBflaFunRLPtfnxNOMRVWtsXW3JSU7S/czJji1BXL038pMge2TJAUugHJvBfFPcsYwGKAYkguH3uj8aX7sQOGUZiXWBgbDUwmY2X3Xv2c28ytgZjO7Djn5k6WkjeH9ONWg+o5NzbkrDJj2dS+HgjZw+qzH7rfpqV3RH9s/30Py6r32wIc3fEXzBexPIneGJHrtZ+EIemlloSlbvM0rnkv63KnAckhhuoTPGepFpxzoeIiMKeAEDKQ5YVLJYAzRvo69hYWeZGU9btYSFuguygwWva8/KOXXkvvLtaux6BbCSSXyZCZrLr2D9Ls7hX6yCNJwHr9Xu9zCUYuVTC01XwNQI3uwwMl/n4tYHpPzAOflbaWrikqk9rI8cCFbuTGsKocD0QrM8eiQE9zczHEubitnGdROnjfDExO4gpHRu/mXNoYl+yJBSG+69akrLNtu8fEXFLBfHS9x5F+8fbdT9r7VmkOFV9SAHfZJoAwdCBkj3VWMiTd/j72KINbsjaWmcbTjt9R1qEuoL0q3evX35or9vRj7JSoMUiQ38s2a/HdACOrsTuxH09r+885IwVUsHuaUyEIxKAltVlieb9DAXPLd0EbbdYMNCIJN2csDD9zyl1l9xm5S/Ih95FrvZjGaCgmpCR96TK6dcADtQZNUn1NhqU0rzyY3E4QlyxoMOdlG0PYdrHR7tayxq/vD6BzreN3vb410FF+z9kFIl48k21buy81bnaADtmuzvUSgo+9rOCc23FJjZVLjQz3ci9a7jQg0WM/QF60DDSlIQI5NTt8LIWgKKMAzQX9WN1wArYs6Xqx8vxytYwsj7SejTJ6nGYyN6lLsztNobp5AdsARceS0GI1ZyDlhBhKMa0iCxcoLJULj8oeAQjPWg6gy5i1Znmn+K5BYo1/cFBW+dBbSo+sYYMW6HqcYlVe9wATYAwqptACRLV/r/WXHlkDgVYlm4ugxEK1QlsPDuJidJhC8XeOrSDXQYJfdTyS7tNRfYS9Qet7xD6vVTexjQV7zfLWgX+9WNvrbyiyGpg08k7eyWasCK6lyxv7bDdBiceSjNJr6u+6nbd1h7hE0dDigREvq1bLcrQdS+XNF4v2GkXNAy3LpAfF8FR1dFpt2SBAMl5RH2bNJJ6nWXn1+bp2qflBv9/2uWor+qkYEx6FVK35j+bU+ja1azEurM3hEi9Co8K6sQaNES01W/rfZf5JxX25c/WZ4SrDniK8tl91h0sS3LxILawyfBGUxNLOQwy13gmF5025gRK+iVwntGFOK8Z7QYkdu9pFS67dxusIjHjyOPEd7f7XAcrIzWpt/VqL9fHEc+HUhosYQzcfPGHb0r18AMmdBiRAm7ximaaklEiooCRAfO71IkMAwMW3LZDL808hVIv4MYrCc4yhU17t/lyojspKbN9RLhYMuu/o/TJpE3xoK6RkeKJyEBVzgg6Y3MzLLEsdMLBW22I9A1ALxWmJaTnLWCXBupRZqaBGWQCvDuJqxTiIB2YhHZ1nFOA3xdASBpTPUww1Lz/BSHMjCPXzMfafgfY8xwpz+2xBRgwo469tjwCCs9hr4e3nnJEgizTHQMoTTnPCnIX9mDPwYE7FbSvhlGIFJzJ+2liy+fAvCaKs225Jx2+xQSMWYanAo3PD0du9Y5YA07nIYmznBSjRek1VuJWxwLOg6vZ4TIlWWGJoyoMee9LmpUFD3+NeP/q9sUvA2C3RGnD0EPLiSfoUwesuqP2BzthbASk1xqE7l2w7zamNZRqkNoDJcl5v1znW9SDWuV2/87x2yqJAH4sbIBXnR0Fqg9ANM8ViPwsqVsMYDnSKdP17v62lBtYB8SNWkt+7v848V/tdgZaREq3rPB0mydB17DJ4SYav+niTxG3G0IowrskaKAGUISwoF640jouSz73boY0XGc2be1y1emNU69cuhimO4zqAMejotqk5YpiMAOuGtZG7JlljLXZfgpK74K4FKE+ap3zNe1mXOw1IUs71IWu/Zk4qc5lUcobKtqUqpzouByPLGBWGKRjr+uRNPPLZX6go3BCL9QhIKYirSEBNUQy0dMWSoUVo/FbEEdWqM5XJlz7JI3eo7jeVKaj1q7330GXN8oJK5bilEjOKV7BsiSdrQX52gb2qbEisv3vsSPveFEGClSk04KKfK5+pbqX3XLmHB0Bi4PfQHa/3b0CkfC+fUwZCmUDnrOOi6CsNTHOuf+didRS3RJXC1gTJy/Nxu970d/99y2XmSdL2o5iAum2nOY4ukUxrS9EpbYEla8rvncLC+UMpLaPg1upCMliorQuZ/rzXxc/bvlAYRpbWjf7zgtJH+7w/xbvfLm6suC7VwayKbdL9TgMT7W4kym17DnYMaSyjZ4kpADPEbTSDGQoD5glIeQamKHELSTJvoRQDvZpiz3THUNObw4m725KRu5f+bAPlddydLpBojVYNCEgf6D6uxXNDgQ9JE/LNUFPXXBQPh9yqwss1Lr7l/n6HTF3PjljR8WB7wMiW25YVzmeULberLfZjr3vwCDRMIRg9gHWbPBbk8RMK3Mu9WLnTgGTOwA19VyGTR85ShZwKxAs3s7Im52pJBvr0fp2bVl2o0CmvtKBLQUZhSerkEZo13FrC+XsHfKIAJVmscrWmZfRuOqeZlu4WeMdzsdo0YwdQemKCLG4VbChXndvEmXi+/t7Ee0k8CEEE62d4qYC3LFD639VhwtUh4hADnrmS9DEPDxOmKJbMCjRie24PD7GAD2G79H6HGKqiMXquPfBoQKMpJzJxt/O0haExe01atrjSd7mNi96CJ+P8GA/IyJjThAyxBOtxMkrPql1qlpavZV971jpgP9Vv/bT1b+262+PSc2+a1Da/DQAt3hqULNqoDBosqAqg608GuzJF9aNzqu/pmoshUxjr4F0CK94Hx2Rj7to9XwIg2Ne3tVTuLRS3V7TiIk3XgynXWDh9nZF44Hl8TK88AaInn1T/WIBLI5N2mTtOsbPyHyc15oqRgs+N84OWqTCcKQPhIO9pPKMqegBqHM8xZpxSqOmBqSBenZvbpU6PvjY/2uxc/H6j2+aMJb1tVOPCbmtGm3I/vK8yl6acxK20KtqAzHZijDsiFjfI1Ax66tZomKHou+5diPuxm4yxTM+B/KzZEb7zep7V77e9npd9cAuUdEkFVsae7LsOPrzfPDbYuoytiTaspMosNWQ4ev9tXZeXuxQ7wFO/5r2sy50GJDKFyVOm0qaVt0fnuVZq10panYRSr5wBaD7Iqa8AXRepKNRzDG0x8hRWftcyBRUfkAEEUtByrlzSuYYYcK4Ldeqyg6WQq8VJx7fUAn0rlgs9mY7cokZK1aXWH88qp8/nV6gFtAVwjSHh36vDVEGJZUUIRqKaqKn4yV+6bDVXDIKRSU3mo+caEYx7Vg843G3qPBR5mqhZOAWEZIRQ7j8ASEVJrYXrAHElkrEzZ7GskhFpwe9qITYZpFLKYBVpXnck9tGvxTRwcTqyUGYM3XvGhBL6mnuCIkcWxNrGHePTO78XqC37Kle5Dtw1VkSDEcv67X1nrCV0S+kY31vbbw9weNz0nFuXsBZXd5/YgMZeq6vvWrJjtS9uqZUkyc1Vaoqhyy5YmROCaT6TLCx2hN932ggl++fqQUyiQIxbEuBwjKKoz5WVZmHEMh+V4rw3qoDi4t43HsQlRiiPPTknYXlG41n6lIViZS0C0KUa5udj6dMUUGvjkJmqzzC1+wwYu7qsgRH92WbTondE/d2ADH2Md701xuk24rGkFoxssSAjINLAeLuGbWl3NyoGju5ugN4WRBW5I8DjXu6W3GlAcj0nHM69lWNOGS+c5sqEaGaEhY6q7+6KNfg4RUxFS4whVOUNKG4e5aUnGJmKxYPWcq2o8jo5o2YsCSEg59xi+8rZprL/FPm3UPVnAJDK88Bc6qokqbabAZS6KfSRjmFJM7eq3tlYevx0rjrbi2YjdKrd2wqtb7ZK/ZzicOLX7eL1Gbh+dYhFIZeFnO5XBCSWFZlCCXoPvZtWY8TkGU2xPc/qwkFrVuhZEGDJoBC0AmUhyAlSlan0eVZmvBDb3yhuWYy3oWX3kESRyrlPR5tzSU2tLLKaeQPawkMGDugtiXvcc/ZQ/yPWQbM13NbvvzyHJws3rq5Nbb+qaEQdpMzz9/tZJon9XeePEqeTcsajc1+Qck65jl+dFGAUg6MBMg0djE+bFr/xFsb93tzCyv3kVijSBqk/DWntUEpVLXTZZ3iSH/m+N1Cy10XsEubXtfAr675nveZzYYHB0mAwOF3e/WKcQivOyzkghOaGO4WAHDKuShbFYw71fvn3NPdJKhgHtpak4uz0wVh51nP9Vn9F9Tks/up1QK8PmidKWQpaMq38KQjDH3MAUirrWKxB7ykAMebOTczKCHzsdb/sCh6mxo4wxS9dtdjXgF+kUveNNrh5zMkesdkevZiyNRDCzyxdELDuWkzhmKUxNCOIy2/OKEV16p5r88k9SLmXx5U7DUjOKeNmlgmZigL/npLQ28w69KgEBVKp0NlwNKXPl/w0J6QYEENcmochx2bjglGt6cFYxssrnyATsaQfRlEeM41xlRsQJoXTRkaIUmkXoFUtyiLXkEwFItreMYXm88vsK3SPsgvWGhU9xVRiNET5ZyasSzMyrbl7aascMGZwgL5+BQPY6W6lgcZBAQzPLYv7X00RIaC45PVAZIpYPFcbjAw04FHvLKcGNspKGXJqq6YFJKEt4gQkCBGHeABCLAuFPO4JZASLe0EqbGEMKv4k10KhtUmZwdkNtAANuADNbZAysv6tPfmM9hx1hrCF+1jU+7Cryju5c21bY2qai5ZSWpx9OA/oBBJ63tAKy5wzXryRl+36nFYVYdtv2lpp45g0GKFCsSsgH1B1AbxftfI2bOqm7DnW+plTaoBxUPNVytCj6JIYlbU565JEDQtlWoOTxMQWxTJ0iKIspzKTTxGnOSPGjClOhZns3bZojCIoQZR5XQZ3qOuHZuRiCXiXuiWpxoHRPXdKWLgSaWXZmzdb39gMXlt9NTZOzSmvrAOpeRpA2JEYcmHw5N5PNctD8VQAumxczb1r3MaR6yX7kmOx1hDJrZ4TmZGmD/Qpfus2Y8BbW5fs+87kMbd1n9QxTAAWBgvZpkF1AzT868U1WqkFpYOwXYEH1BwJAkr2xozssA283yXn+6D2l6PcaUByfc7AecZp7jNSkSE5z7lTKDjR2NSKnj/sceKEKvsep4hTsejIRCHB5TEUS3ZhTQhBgGZ5AJRfLXKnDLYrbEuMqBXn5btMGqfOX1quf8L66PcAiZ50LUUvRQczrg5+yfND7PPf20XKUwTsZ92eKyyfj/7Mivc2NkQDEW11PsbYZdJiDFAobSUrQpbrGEMFHxqIdK5bUCAkF1SggEjQgMQAkGBnpyxKbuaKkQSX5HQGQsQUD5KFJkqAbUYuLFtvka6ApIKNJrQ6ZjMWW0agvPDXTub5ULzNGuAcIs8Z3PTF1p3siKIkcDFViRXW3I9clzFlaFiz2lnlhVZTzYaQgTrNregkFT/PTcv7zLZodoQghKBEKxpURJilz2Z5G3cGH2xTdPuUqKj3uyV7rJ1UuLW7jQYlHlMi38u8kpY1TGxygLW4NA+QbGXo00yWjq/QFv6kAq45O0+B2aAAzv2t0J9M/jGI4pYDMClrsnbjCgEIOWAqtYSOUxSQc0CXoGKOufblKTF1MPDoDGDqDWt6jua8acGJ3GOEVzNorZ+Xxgi5V91vts/tMSmjsUBFSa/ucDEIUxJkLdPABOjnAU/0Owz0rKs2NLRt8vfcsSEt3W/dZgCJl0LZk8d137LsSKzAo80R8l32t0lXmuGsARHN3nui7Zq8y5zLc9zhnmXLJtzLvdxW7jQg+f9ePOGZeFoE8l6X4F5r1WRQ4DD4NJraFTHg4UEsYccpYc5TLVD34ND8fqnYJjBLi5w/a6VKxbpQ+ZPt2xLURJRCUcJzQMqSKubI68x97RCKN6HaydYqVzeDfuF3Wsr43Spmay4s2uWqd7/xJ3IvXaIGIvI9dmmZuU3HhzCGJJRzkvnQ1mpun2IPQnQAayDoyKmxHmuAw3PPys6TD8XWHYQdySkg8POUSnsOmGJERlAAQH2u191YyNmMsrt2m8qZDCDqti2xzAyPa4plS97A8/XWyl6J0BbO2VhJ91vq1Ji3im+nuDSLKY0XnDfosnVzTrg5J2Flz5phTK5rCz9PcT3ltGbuevDs+4B7T5W3xnoOKUjbJV1nDxiYOcfrk60+XBMLSsxJ1JdQ950LU2yrvuu5aBSLRrHzmrePJwzuti5IAGrGvlMKJbVv25fj9xSUkp0zjrNkyiLbmmi0CBB2BIo5icKOA+VdiwEPwHNP9b0g48+1bM7Ag7OcRYNmpvY+xliVaYITjtnWpwKmLJgb9ZkFbfK3VYfncdYANecs8S/MxlUMdnTdKs0AUi7zdYvZnFRMkZaRqyLgMyP63QZQQUhlalOfKY/AhYYH9p3XRyPgsReMpAz4pr0mHhiRuWNZUkADkTru4KeYD8HUcMuFJQcwlXHIgtK1LeV5TaHXWfYkRXk5yn2l9pen3GlAck5002pWj1lN4Nqqea38vW1OdyoUV4eI63PCITZXIJmkZDY9sgL2nBGLVWyaM+Y8Ix6nYh3LqAxJyIuX3wMjW2v+6HdrAY6BVg0J3Ewz769Y0AZ9aMWz+i4X+95SZvcdTcwWjCxy3at7q8eoSZW+3doVi2CiuWLRjaufwDl5C9ho7hWee1YobdBgpMYneGDEApER+PCAiCc5AWGqn0NOyEKZyHVCy/AmFuiya3UT3Dg92rHS7rK9vEtTB3LacSMFlZg4FEYkIde4qKzcVcqbIfUZpqbQIOZSEb21cJpKBXHjmqHHxjCBw+Cd8eJW7Gcb2GqBuyd6vNvkCnafagWl5ZLvQFE6iG80GNHPU+tlObdnzXeRx825FYfTblOaMXncGBOyJHK+dt1uH8OajJQWKonANriw89ZeALNHJj3/FdbjRIt9kHii4xTBxBJzLqBjlt9ljhIXrj5GUCmIHHOhPAcmrShV1TMIksXUBYibLkoxVGr0Ov07UPp2Rq3rpMdlAxVLI9Kovzxjk92fBqg9yriMjQZKbDA7/ywqvWP7Xbf1cepnAzzYbr1tVkyABz72M0jrwtijvWL31WzpiBXRboOA7661ACV2w477WG7bffi93IsrdxqQ/PoLNzjkq87CQesQQQj9a1+8OS/8Qa1oZoTZm64OEc9eTcViJtTyw0PEMUYcJkkBHGPAs8dU3IYCHh6mqvgCyyAywHfnsu84v+r2xggcuTghYgp9/QkA4mscJX3kcZL70dZdb1Hx+sXrJx4v/5qlzPaj/WzjPYC+hgsPIeDQ23UaTh3sq1P2auuRR2O3SRwVZDSGBNV3fMmaKJes+QwACPNZgRIFQCzY2As+PAkGoaUkLlw8b2h5fibu6wCfLm7Fk/IscjzU77mcLxUjWc++yFUtmLaMivgl021FlAuyg7VeUAIOxWqay7WOXVxJc+/iNsnc5VnmCDR0l/Xgg23T23SmPQaxiqW6V1Io2los3/ssb7Su6wQLRzIlZc7Q7oSdC2FhRvh+MPU00CscnYRlUCqALjB1YiY1U2xQ1xkYCQGMrUegwYeeTgQcO+0sQ3St3oNmR2x62y3Fz3Pf6n8fvQOxHmOv8aBT6qfqwpemUOtZSbxhrs+P24RRlud2jEGKxOtnae9nIjMZquukTuvNOEnL3J1UX3kJGDheb4qh7fqcak2TyTCPe5RubXwSdnDLzi+SOJHExixNGTjT/dgbMhco+zq1eBfUrt7vHpDItlPqx9nIa8DrGzsX7BWOEcaYAMoDIqMDYQC69W3NRcsyI/xO0ZnhEnL7bQeQaAkDFHDr+rV9vgvuWznfx5C8HOVOA5IXzjMOp3kR3GfZEP7m+YNaoXWzWZOmuh2Q2IWUM44x45hCzSEPyMTx4DDVwlnNKp/hrdFAnwFpzZpnLRL0aUYU9y1SqsdS5X3OYqXjRHZ1kOq9rUr6toVstI/TuvI3DidnzYrolLoWiOwBIZoNWQMh3kRt2xHqtXswwkWhghGHEQk59+5YTxKAUAr4ANBAiWJK6j4Yx6y4rI29bjy375NMC5MGKfKjchMrl8oCVKYQavxJQEYOwpY0s1mpxByUYhyzm/I6A3VITRNK0dCuU1SAwhI07AUi3G7jFWwWp5GLp/7esSKxT5rA75oZ4Xjm2I5qHI/GMGXJuhare8iNTMrNB7xaZWN2semlTIlmPCwrYsFJu0bo9qlWbQVorVKolcG9it/SqLL+Tva/N3ACyDrygEx5amwE40XqHKxSlkdWHk9yvhDlnZE5SM9FYbkmlOfK90jeuoA5iCElZTIvBQilhGlu43meQr0ukHBKZQ0KrS9tzMye+f1x4iIktrL0lbZuxLamCdPvHGzG2PAa3Xveu2JxG9vixSjZceZlo7TjSBvhHqd/tKTUsuN5wnIDAHaBke5YY8yICE/EhegugI97uTtyMSD5N//m3+Ctb30rfuZnfga/8iu/gn/yT/4JvvALv7D+nnPG13/91+Pv//2/j3e/+934Q3/oD+Hv/t2/i4/5mI+p+/z6r/863vzmN+Of/bN/hhgj/syf+TP4ju/4DrziFa+4qC0v3MyI4VwnEQISghD9T4OUtUlYT9T8Sz/x68IOPDjEmmaWGZ2uz2RIUtnW+4Vrq/9IvJfbS+tXXToY4M7q7U3PA0q7piR+yDE1hmJO05Apcdtltp9Trgv1nHI9Lxdr9qPtU4IGF1QYENLHfyxjQfYE8mk3CW6rTEk5X8eQKKVBzocGNLaYBmAMKrzfnX1rQLu3H5kM5zgPiAT66+n2jwBTiAiJ1zkA6dw+A8jToca2VDYmsD1NIZ5Ff5Kg+wJM4tQUEYIU8aEvwCQATFs8hfYOlCcrmcVSLtl39CMoygbaM11zI7BARP9Gqe99pSX6qtnWfdHGRHFMA8tinJoNscxIY1Lo5IaOpQOcwFQDUIDmEkd/8AACPKV4dmk82Q8+AzIrhdHGnmjrtWZMdF97zImnBOn0s2ssre13T5YuSlpx3Gcw8J41rdkxsc5GY8z1vBdDKvEjGYAwCBI71dpF91BguR6kwkJmoGbNmwuDxLGekXGcgNMsLDifmc7OJX0hbRDlPC76cqIVa0c/7O03fRyZEGlOKv3Dd1Uf+eQUY7Jt9bPjBmjbCsCNFRl7Uow9AkaSCkPZauDIc9a1p9ZO47l+rl/PvH9OH5Ox1slM2I9krBNZ6twSkOhkJLxWxzjvMmLey70s5WJA8r73vQ+vec1r8Bf+wl/An/7Tf3rx+7d+67fiO7/zO/EP/sE/wEd/9Efja7/2a/E5n/M5+G//7b/h4cOHAIAv//Ivx6/8yq/gx37sx3A6nfCGN7wBb3rTm/D93//9F7XlNx7NiJmuWKljQbSlA0ANagfWqekab6HqW2jrJyD+uawMXsFJceMSl63YKSLa0g+gY1TWRMdDPESslqYqE5BSqO5bMhEkHBGAs6RexBl4MEn+d23B5b0czMJr+8TrI4pfc6HR+NaXXitodGEhEFkqaLFuI/hhwUJrEQJ6q1BT3kL9ToBBEKKfhwYiss8SjIQtJsSyFg6oWAUcQEOaRfIagLFtsMxNOvdtt8H3tu1RAY367yRgZT5J27m9tDWHiKBASmRWqMwMW6K8TqHFDnDRmxBakK9aGJk5TNzTNGsSOmDCWBM2ezYmVutGsFUtXoMZGUcFWEylwGPCAvDo+CfrhmjHtvw2DmD3Ypys1bO1zTy+0qwMxqw15y6m8dTFVhn0jtTHlay7ZTXQt5i3bKPMEOvi3BS7YkUrh/uYWV8584CLBSeybV0Zt9/nlMuKGZGizLmxMiSFOeEYTZxbJBOj7rOg5iKytACBn3zWZMKcgFyAvbhuBUwh4xAjribgOJfneGrARANK3stJvSNtLQyLmMrWX+MFyoI2z62rC2Rg5reiGHtZIPcmUFi0ZYXtAHqwYe9h/ftyfHAM6TUUwEJHGLa1MEY1zqsA3TVDpZfspS4lWLIhWkZdyvkWaIxcym2OposgXbBYTFIDE5sQhOe9C3hkq9bRS3XNe1mXiwHJ533e5+HzPu/z3N9yznjb296Gv/k3/ya+4Au+AADwD//hP8SrX/1q/NN/+k/xpV/6pfjv//2/453vfCf+w3/4D/i0T/s0AMDb3/52fP7nfz6+7du+DR/xER+xuy0pZeSUXUbEyxwF+C4Y9jdNZVs3J63AzykCOLRrHTJSjtUy9PAQIYGOPQuA2bgrmQmnWR9zXWCYNYf+p21fYJ4LuInMvkWKPHfX8ACJljUKX/eL3u8wWJT2ChdP7bpCMEI2hL707DOPmqYbBD/X89eJuwcj3CWEpuztEhWvkUNQarWzDxwQssZ4WNCxxbiUduzeZ5ByODPfJgCmrrajICACeUZWZFiNaeE+oVn4Y0CpIp/LNbmtKdBBPxtuSLk+nOqiIo2sLkjVB1sp0Y8rVSmPDAyXltVg8IktbWKTLQA9ELEuh5oh0QHsPE8AlVUFpGv72nX1cM3sV6UDMog6md/0e1uzASY/NfIIOGhg0vpB/vI5XFItXouel2Z1w97cspclGR3vAZTHFSpm2u3GKpOcfzgXaTCi5zCguD7Kj3VMiltpS9eNmCWmMIkb8QnAlBqoZu0XYXb6Pm5/b8+U6PXAplKWG2t9M1K67fjbC0a9/T1jpMeMjM/1GG63o3Ou3Pse8QLhcy6um4V94+MRvCOGnbVbbTWpFFNSko2QGWEMHxmR2VHi19xi7+VeLpUnGkPyS7/0S3juuefw2Z/92XXbK1/5Srz2ta/FT/3UT+FLv/RL8VM/9VP4rb/1t1YwAgCf/dmfjRgjfvqnfxpf9EVftDjv9fU1rq+v6/fnn38eAPDi6YwQ+mB1LxhtTjZV5/YE6Lk0aYaEEzsD3p+5YhD8VF2YaNnXIIBpJKm4cBuApsiUxYPU/zxnTCEKG+KsoW2yysDUmBKRiGOxdjwsblxzypivDpIa8pCAArrsYjJiSDSbNJvFjUKL0ZWqhq6rpk9B6oVoFsT+JSNyiC3bFeM8NONB8eZ8DUiAtvBLzxhQg17x64FFBMIyVqP2UE4AJpfxyJpZYBrfsou2iO6R2kYGteeEnM5y+oQFQOncuYA+7qXuw/+ArAPmS3FG3f6QD/U+dFpixFiPncoxfD4po7hpNQUaQM0M1BgSIJTg3pC5aAo7oWNMUCs7kynJSOodoGvLlkXKLvL8ftzQU2v7Y+hBSWj1A6ybppe607oetvTWS9DdgWwdnBpyvR4LnLXK4GIBZ8EzZNYL0jeTjQtXefcdpoRiM53pfun6vDu+rzkyhSA1bpIEhJ/mVltIzzNb8X57ZCuToJ3ntMJ+SfFX+/z12OA/oAFPDWQrC9ZdrgW451iSRER5zqdAdyRhS+YIMPCe46zV4SguXCFjSn08D/v65tzckxegwvRL32/NXe36nFb3H4m+zih72p7j29/9a711LR7/1rsie14Gq23U74VKh4yScYxugHOWeU26NXTHR5RnmuSaxetLTpmb8SIFDKkRbiYAAZrLZIaMFZ2GnaUU+BloRWMB320r3wFAMqfeVfRpXfNe1uWJApLnnnsOAPDqV7+62/7qV7+6/vbcc8/hwz7sw/pGHA541ateVfex8s3f/M34hm/4hsX2OWUEA0bs75YRGTIA9iVK48mLEzaBif5drjd119JBr/OhTPYRy+KLMdaZRVsidRpPpICpOJ7qhWWUclP/VgNrFxayBpi2LUiPH8DnubA1pa0oa7G5Z+n96eLQAQvTHK91NRjQgJFdQnes8pesRk9geDEhKi6jHMDetUG9W5O4dtmZC4kwFWDC84YIIEfkWuVA2hAuYFECIqrtrOgaok5i4ZZm962MSQUjykqPUBXlBUOi2BOp0pyLE4wc0+JU2vuUngQtMhBveDdFswcg/K1lUOpBiHxuv60lYQAaOzJs247gVJ3Cs38GqAXP9HseC62ylynxrL023mTteE+m0oZL55ZtF5m8UBh1itrRfLZ326XC50eDimzrDSOdhFDrRIQQkBMBN0CGZM7AhIBDBM4lkH2eRHGkAehUlN6orsP76VP2ypunx8clwGKKy0KJa3JJHZm1tUlXoV8ClMvmiq37fanGxpakhLr2E4vIu01QIM92bSnRQIRGoAzpoy6zoWJD9Gf9Lvup0/e7W97LvVi5E1m2vuZrvgZvectb6vfnn38eH/mRHymLzc5zaOXbAyhU7ivid2zWzW9dFAsWSxMLU6og5RDPC/co+RcriCGzcjVFPMilWFqxkmjVtoERoNrRjf+8/L7tEylWyVxjX84p45mrqSYDYH94rmt7hQUTdexIrP+k/6uFmSAjNKVOF5Hjb7VYoTq+urlcsA7ofvXASEZT9usxitmwR+SVIHGeD+AiUEI4aPHMMnnrfYB+Meldz9o45fYK1KIEned0ljYW5oaxJBlACKmwKQksGrKoFg/lYkZJqYKSCsYUEqugJCcAh6Yeq/gSLp65KFdkS2LpAwa+6/iSOaG4pohGHQLAOj9zUoC9joVU34vjJM+R1d9FcTMdrcSmF/XcKS3oGLEh/TFYbgtN6dZARBTUJRtiZZQZRz/KPRZKur8BWBg1bDrgPS5YNt5kb0rhmg64uOdJsb3mMgJcrlBascdfmW3e+a3RRqcsn4rBSMfEVTfTyLTOEk94iI3V5b+oxkLHgtV+YZ+o7+XdkXo+ZXuxpB9j892XUzP9MAslNrehKSUAtHBPuJnb+tW8CxpbovvH9lkzwM3G/Uv6zat7NXou9lprz8Y/ftnGEdvixUFusSNrzIgdK6NratetRfYxlHcnobC9ucY5xDiBq0msyn/rZ46HEPLwfeM+1SCWRpXsbYzIeswIWRMNRO7xyL3cVp4oIPnwD/9wAMC73vUu/Lbf9tvq9ne961345E/+5LrPr/7qr3bHnc9n/Pqv/3o93sqDBw/w4MGDxfZkGJDDwDqj3Yy0eEp3Xnmb+FuO4m4w53asduPi4qVdt/jbM1eNPXlwiJgPcvxxCogzgCmWqrZLBQcwLhGpKQG6T6zEYk2LoSlTV8WcdnXo88jPKddsYnt9b33w1dKdturnoS3GihVhhitx6WqLOAHIsViRjxOrpau0vI5l8dIJ0duf5/Qm+HY537dH0+Jyjly3NyWrARIPjNRrdQorMBelRmKGSvsiXX4OBQyUQopICMyUlRNCOgu6KwHuQjrs5JHpypWXKjEhdE5n2Wc+o9YzKWDJAybV6kuwXUCHxHKUz0H2mUC3LfGhP8SSPSrPNfNWLEo2leGWgYtXhvvINOvB7xZsjNyu6v4GRXjKCbf0QLOxIvoQ28ymdCzbb8fbmijy5GJZqwuh0wBrUDI8VwErFZQkHsPUti2ZwLGCFqNg7ryRrZiS0dzWKaGhzWU1TigwEUdsmdSYuGCSzxWEQNgRAk7bkwG9SxfQmDXOEZzHM4RpZFbtHOSdmDIEtEfg4SFKfZSYEXOuiRUAyRpXCyqGqdbimNTcL0aqFjDmrZVevxGcbLmnrIGJS2I+vID1vW5aI71gy1ULGAOZkdT+U0wi0ICi9YRgKuQplHcryjx2KvGnJ5SkNam12Xsd9L1rkE8WpG1voANQQMMAEW6rwEQZDex693KW+6D2l6c8UUDy0R/90fjwD/9w/PiP/3gFIM8//zx++qd/Gn/5L/9lAMBnfMZn4N3vfjd+5md+Bp/6qZ8KAPhX/+pfIaWE1772tRdfc+Rq1Kw3/uQ0+m0OvutCSrkWs/IWZn0uy5qMaOlzypjKCx9TQIoctGI1mYIaxAWkdNt0+9Q2ZsLwRLuKsDK9BiVsH9tsJ21t8dHWoscRrdBNUcWIBO2W1ayKIfQuTFb0Nrue7fVv3eMRpFkWe16yINKGBjzIjOiiUmsKZVVSAkrtAlG2cxDFpCkvEj5+iAdhMg4SP5Lnc89uMPNW6APdpQFGi9Am/popTG3zsorRtY01UwpIkXuIQzeuqfhHC9jimENlRWj9jSgKGuQDg7NrCuwYymfVfO4srXD7l324F4jo98iedcRw6GtZ1yz9nNlkj9SxQEX/ZrPm2N+By8DIk0oY4IltV51HFDABtDuKMGdA88XWIPCSxX6a+jl+NIf1rJYGIMt0zi2FeTOshHIOxr+5QGQnxcv3pn1vwC8EGf+R9X8CGRzgmAIwo2RrLK7BJfEK3bOACBwMICh1q8rVoEFJ3UfJXhZri9Ww68/ea6y5Z1n3s9F3C1L3rG22T7z+sb/XsadiSWJ5DKmgzBTE1VGeQCqurPJMpUBsREJGrEVj/bZqAMLvuoisZkLsNjlvD0T0No8VuQtg5F5evnIxIHnve9+LX/zFX6zff+mXfgn/+T//Z7zqVa/CR33UR+Gv/JW/gr/9t/82PuZjPqam/f2Ij/iIWqvkYz/2Y/G5n/u5eOMb34h3vOMdOJ1O+Mqv/Ep86Zd+6UUZtgABBq22RpuQrsrvWxPYda1b0gfB7fVltROpnYQY6A5ASlJDg5WWalHuIeGYy+IRUYPbovKdZzIknYWrXl9NCDpFn5aprGqSmnTqrBvX51j7YZS1TN+rthRplzTGy+wVWgGp6NFSSFAS0VweyJqMXBwolXFAv5BrhY3SEU7qy75p1QOG/XkXYKRkMpmTdt1itpTlFbTVHsiVNQoh1+QHhyyK2xRCYVAijlN5C+JZ2BFdaT6V5Js6hXFOEBrCN20GFTuz7IYWfyJfCwiZE8J06Fy9dJrgGQCKJZBsifZTEZ0pAynUgoq58SxSzXqSDENA6lJgl96X/wMqSOGiunTTagquB0KqwhnHtW/K7TwxERyVOxBSY4/Mvv04VtvVuNL7aJeNNdkLSrxYEVurYHnuZWdFlaFKFK9ewUu5MSb9/LadxEDLcQENlsJnD/jpnB9OjQEnS1JTvteimA2M1MxqoRlW5Dr6LrB4B5m9LgbGaJVkASWGRFgSYMqhxmcBEvw+51z7awroCinGkBFnIEUA5z6ZAyu7s77XFKe6Zm2zJOtgxQMde+I+tkDP1nptwcecdA2t/trNEIdu2+j8I6Dm/T7FoKYoPkPGxSUcYyyJCFrbYkRJ1y3nOYWsMnYO+oNzRQcw8mC7DzzkPO1Ye496nxpn8gTqyrzUotmip3nNe1mXiwHJf/yP/xF/9I/+0fqdsR2vf/3r8b3f+734a3/tr+F973sf3vSmN+Hd7343/vAf/sN45zvfWWuQAMD3fd/34Su/8ivxx//4H6+FEb/zO7/z4sZrC71nyV+zcOhJ8eoQ3QlxNJFe4s60dm07ATK935yzeOPXlx3olN9quAoXWzHlOpA8+iqI9BDHGbO0MABSU9aPy5KMXEH2KHd7L23BiAYMdR+M3V72ziXe+TQQke+FIldAJGcfBNXTmawsoTADUnMiS+xHFGU9QCymAeLGhZwElADiqhVRmIwJIfcB+4t6KkADFICN5B+LYkp4WHd/A7YkG1efqvznwpSU+wvFtSdnjulQwQszT6VuADWgo8XGhQBwGREvG5ZXD+dS2eMw5+3jjUcLRPQ27p9hFMqXaJG8iLEYsNJd7ZOo0zGjMMhlrjduMI8rXVr1wLi1llzjOOnUziq+TbFmsRpUGru5JnvnMZqnIsCQqeoeF/T1AprLYhQj1JxDia8Sa3vKCZhE+ZUEK7lbTyktA2PEnrS4W8AAWLpa7XHXug0L422/DRNi5Zz6ZAkjt7DRteeUO1BCgxLA8dxiTiSZDarbVhWde0Rdzw88XwKRDkysMB1bQETvI/vhXu7lVnIxIPmsz/qsVbeXEAK+8Ru/Ed/4jd843OdVr3rVxUUQPXnm6oBnrg6uVX4UkK2tMj0z0gfV6d/0dgBd1XfS2hrA2EBIts+d7NV5jiUffCx+oimoBbi6iqD5mxrGhFVUKdrqAbQF+zgBMUSkKMrVnICrQ8JVua/rc6xtYgpF/mNAaO9n298PXdGkDblOuLEaffwFoAZ/qsD+NR2DCr9+9PaREwBwjuQz0uCDblTZnGM0zNcsQNpVxis81VmkUsYpJZdOp7Rn1iy0h8KQHMsCdYhtQTuUolvnmIvyHIqr1wHTdACmooQX0JF18UQ2RDEnNfB9K9bEAyqdO5gBJTl1Qe9yDgW2kriflByW9TOZEoIxWojJlKSQEWOUWPxQgkJzs94fnab7blq+ghkQKiDRsSd7dWHLZEzYw9yp7Wrs2bHujVe9adbvJHx3Dk88C+yaEcSL9bD+5/p6kwKD/vXZz639dDIVkHK5QukpoZohk+9tbNjilqyXRBctxoxwnNhU5ZoVqSAb+4GIJyEUV8eQC5Nc5rUAgPP61N6vGEKNDdCMOoPdO4Y/BNzMyVn3+vTAlCcBQvawJGvbH1c0yGgsSYmvidPFAMkbY1YPaAY9KbY5Z3EBZIxqHTtRygEAljUfy4j98MCEdb2y59DymwF8pPz0GYuXaNj+ppI7kWVrJMcp1mxVgGFMHKubnUAfKGYE6CsEUxnXx1mG5PqcFud12zJQ3q1QeU+xWUlOqeXx9+JIptCq89Z7y/5E0k1gKpBUir45fsQAdCXfkb/t2n3pSTHlgMndqw9XWBNJTpI7DdB70Qk4qOxpCzHqb75Lld5/dP56HdPPSwZmyYJoIKLzu7tudtVKXP5GIBX3Id0NIRRFpMSZCFBrcSYxgMm1kMsCGADEKH9zTuLIrKvSp9Qyia1k5totjCvh98LGhJLBrCpOQGVKaF2unwtTUl6VypTI6VSNDVVfI9a+WSrSPkPSKq+zKGcMDTB3mZGU9XsVPPO6obFlZIgksJ+pjpfxH5bt6M4HDOGxVZhsnBmwDUZGYplZAj6bYOO2DIydTjQj0u946XnNvGXYEIDMRxs71oXvODHjVnPnO06xxYyUMaLBSAUhYQeT47GU+meU8ZczZjQ2pLIiEcgzUJNeZBq4ZFuKUWK3YqjE65yl0cccS2yKgJk5tvWgNzwZa78jW2DkcQHJmguVJ14cpBUa00axk975Rte9hH2ZlKGF7luMF4khYMooxpbQmI6NU4+Yj7VA9DUlfQQ+7DH3rkn3clu504Dktzyc8IoHhxp0qAsMWvGQ/8KSp9JNalZAu3eRObg5z3gmZbx4My+KMXoZOSxA0akEdZv49zQnWTBCVKyCFElMgcG3SsEYmCynIIlYPVcH8ddOzYWrqES2/VeHqYt56c7vdPZisYnofMFTXOoROse6FoIGKqURoatlsfasvRS7GojMZjKe09i1S5930caB1VoDEJvjvS8y1bKc6PYA7VkdEy21GcfEivai8KRimRUlMasg2hZnEtAWx5gIUrSCHRFD7IpOdvElObUMW9ptY2+WLiU1HoVuXeiVc0DaG5lRCOg+B95fhnJbA3JJm50RGlvCxZvgZAD4dMV0ZnoDRLmja1YXv1SAiI0DsMOxga82NlhrBah2gaFoMKvP17tP7FMALDjwwMhlrlZjpmR23httnV0T753W21hsU9pQ5pWVdo9Ah/2d4IO/67giC0IsaK01kyLU+9cyaun6H56kAlBlBGPBOI7uLpS5MNbrSOzIVAbWFATAXyEihmIIyZL4gewhpNJeLah7hBRbPKWEmMLCUNV7GcxVeX8cRuNSULL3vJeIByDaNXSA//7re65as9IF6n6R4L14SYSAmt4/CHgEIM+jyBpDMor76NgSte5RLgUTWwzKvdzLJXKnAcmzVxNecTXVyt+sjL418QODF0ktmKdCSfMFTjnjNMuk8eLNjOuzULhXh/MCuFA8S8yIWagTcGb+cdFgTiWdI60jli1pbg+8r3Lesl/KvUtXu3zZFslaxGJpXjIlzMQ1lwXLy1Si71lnFmvnoctFawvpEn5mxinGCuRy37mAkaxAyQxRvGelxAKem1QPROg7y89s4yjA3CqC+p60rFmfmVZzzksAYhkSb42LAYgzAUlqVtkYS5aftLTUxlwUpKws+rlTpGPQwCQrpYZBuMXlMEi2gQ6oaLBSxFaFdyUlIMYelKBl4OLtkwGZyJ0EiMsWCDrQ3LcCx06QWPostU1yDg0QFgDgBTN3Gd7QB6wTfEwOENH9VfvAnF5PMwzJF5avvPfFWCDjvQe2HrM2cre6jdwWiOw5323FAwzufuVvTVKwopxdUtRynGmtd+GTdwz1vAT2NUugGiM6gJ2t1BnUtLFIvwMjsb/Xui/RJIKYZO9U3qEQAiahw+u4SSnXYPe5ZmwCRCmOOJU14ViC82/OCVPKixT7nhsXt3N9GGXOWmNN9H5WnkSGR0/WQZBvmNsSGiFHwmQ3YiCU50VDK9ADEWC/uxawBB52DnkpgN3LXe4rtb885U4Dkgcx4uFhwmFiMapldWRPrKuBVtipsDL7iE5vR590oE3OrMrOzFTddVLuJmTKKBC8+a32ynvN61/YhhraqAe4ugcsNpcUkQaYyALIHOjSLxNTD8bmotX/bfcw7F/nnpnOWN/PXCx0nl+/HNM/Ry7YmikBequ3ZjY8VqR+rspez5RUay63DSzRI2uz/s0rPNUzI41W9yxXFFL29VpFAceh3PBhKgp3S+15VRLkSq9rMKIU7qo49WBkiqIBM4VoLqCvPcFYXb00MKlNnLarww8zdjlSRnvti8qaFPctoAGYWI7gPsjlfcO4yCfH6giM2HgmD4yM9AP2nxa+a6PfvWZ27qYGjNylVJucd4ClUuVNKVuK15bLlldjBlgyINYtq21bApERYAV8wErRn3UOJc5rQX32xI6L3m1LMYi5JYKQv83tN2dhdpix6QTgiIg5S0DicYqdpb5dW4xkV106YACd23N/jJY59fEZFM+NSq85er8tVy0ro/0uScRi27LlpjVqA415sm0Zj9LdrxrTaZZnW/IiVoCyx4DgMR8eCHkSAOKlAof38oEldxqQfMizV3jlMwdlKd4uWEbxXmjrUpOS0NZkRrjtepZtp5TwyKQOJqVN1y4A0JlJdKElm63Ka9+pzCpSMKz5kgLG7cAJeJvqwormusK1pn7hfqkuePUcsRVQ5MLTmJL+vhZ9yQm8ah99n6fcs1Vz0SjFElyUvShVg6lEI+VSBKyo2ln2szarrK6h3bM0Q0I2Rn8+Kwu0rV4rn/vreNlM5Lr9/l6VW828aSZOH6+lWmrPALP+XJ8TYgx4dE5VodJ+7UBJUxr7d4FxEUBTpKbunemVLCppsq3sRxcChMK+9O5eGeiZFH7XQMTJ3MVjqWylouDJxdihzCwmMSQ5A2GyiQPCgvFiStmOteCpCTYMEPFctCyTpI+3knIPOtr9OUBEfWabG0Bu43LhcrGiT6wZZvaKN4e2GDa5PotS+tmyAKHY6sHLfRwWg7IGSlzGS8+L9fksgQe3j4AH0N4LD6BWIGvGRrnbDqx646PGwwGFxejB6l6pRoNyPFFfLv2eAIQkQGUK3L/MD4X5Z9rmR3OSdS9kHHLAec6lujtwirLmHWOsrO9y3WsARacJrkDDrJEjRmUESjy5ZLt1R94ThzKKJdmjyHvXc487p/r7S6HcbzFOHnsFYJjC3wNmWzFFLye5L4z48pQ7DUieOU54cJi6TCe2WJlVGNq70r9Q2i0CkIwimIBHc0AM4maDcyrpEgFS2oAo0NNRLP435wSUHO6o52sK157JhkXAqusWxLupDWjZlmblgkVqVy3eujZAq6Asx1fGJDVWgL+d0GeuoQvWaKHYI3NuNQWY3hhArUJbUx4rS1rifZc0tuLFVtgR3qO30JemaZbEsiIanGilT4PSUc72dh0NSNR2AraqOO4DIid1En1uqWZdMkaV8Uf3OjInKVC5iTgh1wwtc06YUq94EdwCkBTAoO+6MA5UunJsygtZlAoIUy5xGwUc5J5FEeVtPwsyGk06valW5mtMUWguUJJtSAWFl/tg9e/at/r8avx4Sid/1/Eidsj15+jvh4xACHBvkm21yRf059mMJwDd2AQcZpTzzwWv6SXgZasS+xSWtvI6B42UHAM8JjOXrYnX9kXWLMWA2NihNQYEwAKgtm096Ajm2iMwMpI1hkTvY4XjS19/lAiC78yRKRZpLIoBx9yY5zoBg4HuudRhkXkm5ZYyn8a3Qww12Yt28/XEgo/R9rW1Zm9ylT1u06P28fNon7Xr+fe23J8Zvp5GzMzeopPa9VqLbeddAiL38vKWOw1IPujBhA9+5rhYRBhYqK2bnlTFRSkFtFQ9PERkZDybJpzmhDlLLMWcgevzvGBNTsW6/ugs7EnKGS+eWoFBCl/wkeWBQeCYuagx/qDFj2gGA1i69DAvPtAv5BEEKE2x1dm2+JkTJrOs3JxTzUg2d+3uZ9a1DGN9YOng3qmUQSzEOZdYEQYBp8aOUL/MA+tthmJIsGRDJHGBXIvPN6USO5Q1q9Gs0GNGZHkPQNvfghAALW6ktEMXalrzl27KE2oGNs2aHKOMl2MNdB9XHwd0OlN+j8pHvoFZDg/rIz8FVouWz2xre//oRAXEgQ+1vl175wlLhqGCEBDICmiJuWiE2hIfeotwyiPltZxfgwulVNr9bPu5PZvtgAEZ6Mel3U8zOjSQMH4N6OORvCQIFJvkYsv1SWfvkf1Xd5djgur/3F8nFeCh5xwe44kGDfr6nWFkBaBM3TOyQKQdbxmQS8AH0ANTzYbYa691N40qAJryX54h4+Ioeyq501Gq9VVjDytTUmJK9FA5p4wYYnknBCxel2cZQ6rsyam816cY6jrHd11iLctaUeY3ghTLmghwEfMCYyY06LD/RmCFssaAeL97v3nbbPpfT0aMwqg9PbhJXTzJKG7GbrtU4X9cpmUPO+Uf91iXvZcPYLnTgOTZ44QHU5+KEyiZUCQOd/VFaoCEeZJ0MUJRiE8p4xgnzDnjYbH4XE+xKq4MUn50FpenY0x4dJ6LkhKqolknWTPBDK0ztNZngg6UTFht4aFSy7zlMYRa6yPmZr3rlIuFkiCdVwFNRA2gs8DHfvZE/+65o20pRpL6uLjDoCzQqRUCjDmUQHa6NywnaQ+IMOOWgBABAXTRsuBSsyOjarZAD1A8UDLKcEIQwn30+ND7j/qWgE9XkY5Jvp9K9x5TAxPVyh9bGtJxEcDUuT6K+1esoF/YFghjwrYEuniVdmYqeSqzF3oFbyTaOGClWn3Rg5JyRGHQIOwiWGxUjLz0r2cbPeN+Uzr3KYJeu62QpZMW+mCEY5Pn4D4EI9WlET4Q2eMKUGsXOcLU4UDPrFzCmEzlofRB5qVdZc4B+ndmy7VK77MGUDTg6M5RjuXYLU25FfNB8cAH9/NEH8s7D2jPzBZMtKlcbVa4kUR1rGYPEVugeyhJHhBRjVE0zgS0eiUpKVe8kBDrM4Vkr0tS/X3O2Ym1bHPazTnhCgI+6KKlGQ+9Jlo3LrsPZSuWZA1oeN9H65k2HO5lFNZkC1x55x6VHbgtY7Q/9mZpOBrpKpZF2WJ5Xi6i59Snec17WZc7DUimWApQBRirLS3GbZGyC0YPRkKnUOQsCj23RVAR5qIeccwBpzmribsVnQJU8FhReKcgWUysBajuPwAqLZgtVFebGY3FqExHymgB7+Xe0tI9YmnNFFCSVBD9MfbV3K+miBuVDphttJm0+LtX/CkG/mtKcgy+K4Y38eUCEGv8jAImbZ9y36XPNBjhPWvrs1b2Uh6DEQ1SbV53bq99nvvnqPe1iza3eVawrUmdlkj50gBrq26dyrampE0ZOBeGrSp6ST2TGDAlIJUFiS6QpznXz3TxYprThFwVIbItGc0lj3VQ6No1c5huiJ92uSn2W1IzsQX9HrfzknW5EHdUNoRKpVYoNZOj25zVsSOgnDNTEDT3Ue1CyHvXYGRPDEkvPshrmfqWwGRLrOsWY0n6fVr7J4Ny9gCQPcCDZ12yF0vwIZ9D3W8P++GBuT1jR7/GPovWP5NNgLJ9yXKt3qVxQou70gVGmfaCBUZpdIsJZd2KkoFLUA2mIIzJnEuh29IiMcyJa2TKYtSiG/MUQ3XpYlFh2Tb366EDTIAehPD7SC4FJFtiwcgWi7E/WF5YkmVsRwMhHii7NG7F68Ot/vBiXvvfe1c2ffxe5uhe7sWTOw1IHk4RDw+huZSExorQ6jWp7V2AbakQnYrVqAck4oOfyl+ZIIWRyDlXhX0+ZKRcWJOzuP2cjrm6dD06pEUQ/GnOwIRaBXfvZFOt4uXfKYl7zYyMFMW3F6mlCQaguXw5R5k3piCKwZzld9keu0VGFrNYrV/HKSyoedtuL4BPXIBKcHUQV6KaFW2KzRqvFA95BpD0vyzIJxpgVSRy9dnq+4lKHsqtWzeYlHs3GMuMjLJg8dxr7IeW5X7ov6vjvb70Jnbr7uf1u3WRa0ob+u8EhxqwlG265gLQgAmD5XUNFHENK22OxSCQWlYvXQclqveUcgke6BW55e8BjTnRYENeg3KfGjzuvHjOKDVNCtgr22OgdblvoAYgPF6O84EIxxWBiB2ft3HZ8uSEpdtTAyOGRVHzxpa7lXYVAhroYH95Yl2s1kCHZTos2LBAw4IMwAMqy/bbfawEjMGwHY/eI/GAY+j6oX+Wsmb1bfTGbDR/uc8UhB1hfaKpGN4Y6TUHcWWk+yoZ5EMEDmnCscStneKOebLUMUm5jVE9fh8eJkjMSVv3rs/SYgbGW4BijV6XKOP688g9ek3WgMjeOA8PnPQAIS22sy8AdIWXL2XQ+df2gedOvcY49daFlpBnDejcBVCiGemnec17WZc7DUho2eUiVRcxNDAyBbRsPwaQhBDFnSP0dqkcQlN+k2R2QqQCEdRIliUqxCDVs3Nb1I8TcCpRtbEoaMcYMac0dLG45EUWxqQpEHMeu2QMz1E0uFafJFT3LWbeknst/ToBp5kKRGN8PIuLzt5EZoT1YrQbEdBbY+090IInSiEWWWm6fRUQke/GJ1+BEU3ZNsDRf9eyF4y42bY2wEg91gEj3gLnBUAugj9JnmRhK3T6SEntrOKIALD4lo5b0vbYGFFTC1NpFMUnVQAzZ3ELkWKFqG52OfexP9kogHJ97JbRa/Ik15ec5f1PubUtoWdE+Jt3XQ1GPMbOgmULRjRztyjgOQAja9MH76EDL+UxM37k0oxcjDto19B9k1cDzXm8XN98d0AI72HN3Uqz5PqePfDB4/V+dfvKPXfrxGAfb3pfJKkw+4bQPz89rgLa+6r3H4nLBKrz0o2rujGWeV8XGpWMdqHwm0knuKvFFFPOwCRAJGYBvLG2AHJcQjEqpVJTaByXKEHwBCeFkT+P1TirmO8BI2sB7I8ja+7Xe65n2RDrvuaBkUuvOXKnGrEd93IvT1vuNCCZgsSK6KDbBRCZz1ITYT5Dqk0X5YAzegEmEQBirPURDvEAhIhzeYnnHHBIojicUh8YLYG3sVijQnXf0orDFEKJOynsSm6TANCqwOtt9T6VIsrij8DS/cHONfJdlCidX19LDLLIHBGam1JZfR7kWBkCntsGZ+vt+rlwXmvB1qHGMPAeHk4tXfPR0V6o8FWlGWgpfp1VvyqBnX9+U/QAsd7ZLFpW0dNuWhacePeu77932eqPs/3E/rEUuPZb1tu1JUqDFh2Eqc9rZRTUqVMCk80CgONUAuQLu2WLMMbCkExhRgwBDw88ruxfKsUDGaxyDgC6QCNlDZysLZG6Ny0jYT9rWbNWsUZEA7iiHcYgLpRVuUMfCC77NvDBNmU1vriti0tSIGSUWGErngnwgbQVDf51HZ8pSOY6iRuS8XpsqfsWDMqk/vq5CpbAQ07Vxp3ecwt06GO0G65OQ12ZcHWdPTYar/l2fHQ4zvZ77l2AgXU2rwdxvACWRrEiNLhZNs6yJvo+untS7x7vpWVyZByJ/HYqNz6ngLkYFg5RAt/TQbN3cTE267idZJzq9eOUhHWfp8KiHFJ1XaW3gI41uanp9OeFsm7XRxvDQFmbA701djbnGRkIF8Yf+HOrlpH7mb2363PvObEFSvT1PGaEbr1TDG6FeHuOxwEldV18THD3NGQv4/akr3kv63K3AUlUND6aL3EFI+mMQCAynzqGpHvlQpR/czlZiMAklaQP0xVCDDWrkyw4oVmDgZrfPYdcikzJzscYShE7shChW8hnLIP89F/An8DWJowaLJmWgIXX9bYxJXCzgMnEcoQsTBZ4WJDSXdu4BQF91ieCIwZU6z6x9yaghOdftj2rpd8qpBaISLuxWEh127Vo5W97Hx+IjI4bCZ/32oLoLUh23zW2bUTfM06p+jEzdXCaReFLJXlCDPU3Kql6XM0512xdjPlJoaXebcYD2T8KZqnfJc5kqVCOhr2+1aoYXsCX2McTQs8RabaE24ACToIXN8LzNiYE6McT99EZ3jz3rDUgMkqqsCZeqvDOGam4atHNR3YbMygajCwLHS6VNAs+ZFuo37WbnXW/2lcXZ1/yhJGwH6fyvYLSwO9YxM20Y/t4Icra668LZAI9k2WBb++u1rMmHLM2RoYyqXshQMm5uXOxnk8LeC/ZDbOsbzTAyb0zVkTG6hHAKSTEOFWDVvst4BQyYsy1vgndgU+Ba0gEJtRYE65xep5jdi5vvVyLj9jatsoyr8jWvjbecgSE1tyzbiw4cd57oBk3WYuMWc7Yn979WwOWbeM9U3Iv7w+504CE1H7LHFSARjojpLOwI2mu4EQYkqVtNKsCbYHgJM0yy89nTNMBUzxgmiIyAm7mYllP9FWXuI25FGwDqHhrK2Ko2YdqULkzqbXJtg96W5sgtP+/TutqAYBNAdudwyj7KWVgctyUsvodS6W9bxf6+y/t0DEKOpuTlaZc1i3DffQvtOKxfVT2eB9LJW+5zd5HGigiDcy135jevx2vfiv3yWDPrcnfMiW8Py2jhXG43bHmMRGB/leLcRa3wyO3GWBynFj7pFjasyghXYHGqfEhVKyqwhXadoBKVYsXsr+vSW/J5jaluDv7rZ4jtI3NEUU1pFM88+IaKfcugwTJ2s+e449FUPtsbxzD/XvmZ5fbp0xVgwHfr5ArewnkCkpQ4n685BgAuvfWy2wFXBZk3m9vgJRjZVJzV41HQmPFeZ09qpTuKf2asA1reukirXj2ExfUa+14LGyzB8aZGAK5d3fUrAld1Sy7p4GJ/szMc9zO+wiVuZe25IzqrsVtKbOfxDV5ThnHEotChg9AjT1h+mBMKJ4DGedZtusYlRhCjVHUwATo10pdcJF/b6tA2zVYf6fSfk4+oBiBjpHYtdxzz9Ig5EaDFDUHZDM461JDN13nfuxa460r3n3dy708TbnTgEQKIipWhKBjPhdQMrdt6SzmceW2Rak6B9mRGBtrEiJCPCDHCYfDFXI8IEwHnBMwByqzZUGMolgcY8QJSZSxGSV7UbMW00VmFIzaMm20fOWjCa8FK2v3mwaIxM2mz5rU+q99fjBI32KLsbuMgAEpnoyqI7Mdy9gR9dk5bXObMsCILgcrQGr02x5hvZY5tQxOe0GJvmYFq9N4HMh5+gVTF6i0fbH83D+8EbidCpvB36YYOoBydZDYn5MGGOW3c4xd+ukpBBxTC4KPcwGirERsnjv7xg9czgsFFvBdbKysu9zsOEE9UFmty6awAowrCFIghIu/dhfsg4J1AHsDIB0bwrF6C1bESmUco+r/3IBJjdWaIixzwr6Idd5p46Z/dttB517AeQOpfaISDUB4rbq/ihGsBic99hWNQ+NTNTwBmAIhcDk0966i9VgFPiwYqXFiJnEBu25LdP8wOxxZQ4KUEMhSoIvFmiJZjmL8CP07olmVTsrx7b5DS48eUO+T9ZrmEOr90VU5ZxlHfVKG4p51yN141y6IerwzMP5U4kck1b7cwaPissV4EwsORqlx94gHELzvqzXDili3KW9f/kagod20LRDRrEhKuYIQC0a4LcRWLPgGqRm+DGjbMnD6br3jGdfTS+5Cetucn36ldi+V/b30cqcBSQhtYaqgAxAwknO/SGkw4rAkAEou9iSz7XRAyAk5HuR8APIsf6fDATk2N6xcFpBZKS9b9Ta2pE0gaXVC4LUYqxEDKjvSgQA90ai2WeUQ6JXnaUInR1CBDnXNr4HmG7fcuXI519X37YkFIUAPRPaAJcuKUDwwNRUNyFa7B8aUvQUl0u7+93rN3J/Xs7h57guj63tgZB+bop09lkLa/2qKOM1yX6kkd4gho6YdpoV9Rg2Cl6KW7Xl32arq2NT1dmQsV++S0KzPtN5qMs97zey8791ydkCFJwE68N8/v74Gz0slTn7rx6jNVmQzF51VdiI5V+7u4fEX/NA3Wrlu6VGQimY+Mpi2xBV+hfM9QMQDIfK5ByK8Xtve2HAAau5fJi9pE5UZ4zlVULIla2CE3ZiQ67tOxZ7Hbgm9hcnEiYtWqIA4BdH2I0IlsKpbV2rgjamoNShpjEbpBvM89fhOCDKfl3cwoH22tUzk1S/JC1IoWRGBECUYPqZc3vdynSBMvMynSX2mmzMz9hX32pxrcV7NGOCg0+U23tLt1xWAssaOeN+949dkb3avkUHppRIPPC2NVPv1lzWXtHu5l0vkTgOSKQATVOD6+UYAR2FFNDNSXbdUti3PfasuUPOpuHCdkOMETIfi+iV20sN0BUSZLEOWlLgpiLJ0iJIcJNIdKaEBhBiQ5qaIXkqPnlMrrhgPVOIa68BYDQk2FrZGV+H2qhYDvm+3qBZ++7ZiN4DbT04j9gPwGZAR6OjP0V/DAyAjRS925lK9sovrw5z6jFWL7EMK1GmwNKlzRYXmmMEM6C1clinp2l73S92+exc77arF71cHafjVIeIQA25iErYkBsxRgIlYM5mKOjfXQcXWsU/qvfIadbzptK99Agbt0jcKiH4pJQTVf0ax092pxz5dBr3YEAIP655lt8n5+5o10obbvVN+YDnquGWWNSBWpoTpmnTBPZ5Lzx+xKMM6xsMCEIJzDTK05V4HodMNS9pqAAjn79Qbmrr4wNJHOYS2PURA4Y/MbUqSeoZkOMgCtMx4S1aEln4ve1p33pVHp4G1ZTTYj/IMspNVLPf9H1s2u4mzsUHu9VtOlSGSPpNfpf1ieND3zr6prEkZHHMxKnR9EENLt140jRYzNXXFhQHUdyDlqbImLDJ8Sqlz5wL6gou1HWoelL7wQYXHFLxUsRNsg52/R/O0JyGGyoQAjSkJTnsnM0/aeR1YxhHa3z0j6Frf3DUgIslsnv4172Vd7jQgqcxIXaDykr63FjMoIOIAEr5ymb/Hg8zS81k+pzOQjkAU5oIMSbX2lX/rMR/FyjwPd7lYdLFB+d5b462rVlUAeN9sP3qL6JjoKRNjRsnAI5v0IhynsAApI7ET2porlgYZGlhsuWKNXMr2KHmxMmIA0LtoccHRuEXHinTnMR3qsSk6lbNHszPofcl+JBeA7F30uCA314IZtnhXbUvISDNwnERpSXPGsQIv+V4VWwAntGKJjVEjCOHvYkE9odVCIaPC4yPZGac/XxLfZ9VlQeNS9AqmBuEWjDTQ0YLUU1FyyYboujcMaud5eF4rlyhQTKDRiZiseXeN8TJMyZrU+UOxHlph7lmNHojYorWrQAQYsyHWDbcwHy4wSVgyJUaW4KLNaZoVSWquq65OXFpM3JAWz9WnMoIgO6Jc63JQTAl3DiXrYMmWVa4aIEwi2ZIQZL+UmwFAg5F/3MH9AAEAAElEQVTuL38L0YCi9oA81kQaKcAoZ0kAwzUxZ44M2e84xcqcABFHAK1YrzC1mjU55lj6X9BxTAEo2b6uiuXvEEMNCmf/TiXFvu5jr98v3XYJE2AZCB4zNiYtz6lZeoISwAcigDL2eEDFASN75D6m5F6eltxpQFJFW8k0GNGLlZp0c4hLdsSClrKfsCxAjrIQVheuJD7IhxgRMnCkS0qd+HOdTE4GGMwb6rlYJ5L6/GRE+317bhY1HgVNqQCM9U7ZpL0Up0BYuCtsWQsz+glbK2J7AtKBHnx4IOW2ooGFBSWdkHlCv7Bo5mTr/ICwKUzJGVMBPFPAjXFJ2Apkrz7IqS8uNmyHWqz4WVJGJswp1jiTWgPlUMBSbmwJM29VhTMGMPDCArHeYq9ZkxIDVRQMzZisVfP2zmtlz6u0limq7uOAQY5DggkLRPibDurVQKQWTUVfNBXon9tWraKRm8jIWpqK8ipppZq7YUzNBU9c7HxgzflDLPQyb0yxsSIEIpxbNAhZzDNYsgMd++EZmVZYbp3avW6bypIXD/W3VueFf/PCJWtO6rNmDDgn5SUI2cP0emPSK2pqDUczmgGsFkIs/Z7L2hOCNJhMyVyMTWT6QlnfFv2YU42xYWFhYdFamuCo+oxrXu2/BNBok2zfJvEgSDngapJ+Os2y7ymGyiQSvJNtPsaMU5L37npuTJV1a+3T51vwsB3NY0H+VpzJJcI2MoOi5yZF91j7/s8FnM4xL85bjw963ibLHTsW5IH53v61Y7x2a7lrbMi93B25+4Akr4CR5FjO+JHFEG3BRGMtqvbDHJGrC9gBeT4DIeI4XSHmUCaKYjkvdDaD2gGZLNrC0bZbaS//7YHImlJWFQI0C6Zct/+s99VWyl5BawoMF2tArGLLqtT0SS7gJDRfbN3PySza4zoMfdpeQAOX3V01FM12sC8ISvqLLPs6Tn02LgtS/Os5wCXmqm2MrHcNeCSMwMjI7UtLPf/AejbF7O4zRbFYnhKTOeTqCnQy1xqNSw1gbMrolv2puQXtBSQ2M9Q00Ef08ZrJkXMs7yEp0EGhgulVqdauWto9i0BkTi2gV/vKW7cOYBtU3jjb7OcHRmGR9gIpFqv1pF11VuaSSPYjdMyITqxBBdrWiurnFPlb52PzdxGo3rndLvujAyL8yxpTahvnrPq3shzLWBFmnNJxIgQuzWWrAUbLknHbQpwxaWu9eMk/DrGBExKGOaNm2krlN8QAFpcNZU4WIFreHw1KdL+rzyFETKW/QunfUPpLZ+hiXRPWrNF1TiY0Nl36sRmrYqnhpSvDn1JLNT1n4DokxBlINPxFeWdimLp3h4yJD9y9NdUyK8H9/jhAxB6vQYk9p07Zy3vSBhDPSLF0t1oCDrrd6ne+AZe4OIcWL7bGu7e7JEl5WjzNa97Lutx9QGIWGYQCHIBCMZdJlZMRAxm1b/FaLIknJqNLZ70qPr2hpEyNMVclKqoByQB0YKkMeq4Ylm4FygBPASmiKg8yN/RWTX5OxUXDWn1jaFN1DI0F6S2XzbLZtSuoAOSy9CVeUzSMGijJQMuUuSfcOg6Uaok0v9uA9BEQ2cOOjJRkb44dsyUAR0FlOoIBJRsL2WzORZcwxKYAeceMtmkwssd1Sy98bOt1Wdw9hVgv+C3XfSoKuwYZ7RqjCbklUQBQEkTUDFApdODkBCm2GINUhmaq2t41rJzXprK24z72z4zH6QKA89yPEa++yYjB04U3U26uWik3ly1aeteAiPWLH4keJ2vZ+fhc67kLgJyTML0yFjMOBowwyN0KN9Fib38LaNttjRANTDwwMpQyb2fv/dXrARQrojIncv7IUP2be1bEgg2CEe2eVWMkyvk0EJFz9i6mwNh1tPaJSgABoEsC0QCyKJGyBDTWNiBXVzxxlxKGBLEAhICSjcswJVtSWJPGlqD6MKayZsh7I/O/1C+Sd1EKtcuD1m5dNiBeVhCZR4Aoa0bk34CRG1f72xR6ijevcTuAhWvXS6VcW9dbgpK9+9p7sO0cMaBkSDQY0ayIFz9CGYEme/2Xst/u5QNL7jQgSRlAPCAfYCxqB+SSGQvTAUhJsmeFCSE3AJIBARUWoFjhtpRk7ssSJJ/nMxDPmOJBFoeUqxUwB77EEcepVGhPfQG5KUIC9dRE46V1lX2D+xmg33mS2hCKeWF2o6msQJEAJiwtn53ft7LINZcuEQ1mLJjQ1ka2nwW39AJejgbQZw+aolif9YLeMhQ1Gh9ogZA8vrdWXzA5qse9ZsG3bIk9ljvY4Hbbtj3tYEBOy+jVLNZ2bGh2RP/ThbZ0Pnv91xO9QF0dIm7QKsH3i+PUWeB0mmCe5xLpxreyBJNtadvbvgtLMh9L7Lfr30YMiq7Ls7RQr9+LTqagXbfW0pqy/shalWrLeum/WzLFpD5rZYVJCcyzOjQWkMxnKmCFQOSIYvl23hNhPkQx1SCExgzLjKwCEfV5NfGIN1eTCeHvNFKhzU8o85OOERFgIodpEKJjeLi//j3DL26p2VsPlFDsXBVj6NhzywK2mk6pujEynTtZkylAal7RGAT5PpX5/Vj3C5X1iCH2K8KK1wDZEuni0AE4fY9TaOsB76OCstyY9LmsI3OZZw8x1vom1WVrirg+z50bV1SsyZSwYBo1WzJSmhlvIp/TcL+9Svdo3rOAaAtgaIOPZxDa0g/snNyASFzM2/r4hSF00A677a6BkjVvgZfymveyLncakOTyL9gFSi1CAdAGFDVFo1jYYgMllNHnRQP0QtqUIklHnBdKPNPxclJtAc19MaYtP3GKZgaYhjVFrUDnUuVXEEEKTcniwrwMhZAN3rSqU3DKvs1/mcfSSpbrdUo8SbGqJeU6IAH0fTVrT+yivaxUm93tdX+12a4XuujbPFC2eJw9vc6mRcbE29dm3dojtHheQvPaCW/hh3yBUuu5CejPYlVcvhsji+RaOxcWwRKArdmWGvSfm4WW+5HRqO8Vg8JDezb2nYsBmOf2vBM08LBa43pfeRnfupoimWyIZkhy3eY/Iz+O5NJFban4JExxWriCtDarPsm3dxz1Ym8eW2Jsblt2exHrlrXmmqXdsYAlO5JqTIgPRmps1gCMrGUCpNh5YVYZGIE2LmVuUvuoGjGxtGuKEpSRCwuRqOhDGPug1oMSUw4g1PlK1q649CKwUtiSCkrQ5nb5XtplWBMAxY2rbYu5sCOhTZqsBB9iQCpuz8ICiRGLc+OxGPxOc6pscspiEbRGvpE8SUV6zQizZWRc298yFaO51XpRaPesERjx3L22RLuceWzOvdzLbeVOAxKxNorFJsYopp86mR6h0/TqDC15PgO5JbTNawAkxFYwcTrIvvFQ/JIP5XdZHAKKBalo6lOJK2E6XiBJlpFCux9zwCkuszVNA/Tu0aYSIB+LotEWlmOxLp1SxsMpFmUrYs6N4j5OsbgcSCA6glisprqklC5Ar2Cwh0IIxTeYCqBy4dLZVwrymZFLFWFUxJOKO5n8X+6ztGNLqOgB/UJvu65f9J0Jl0AiKlZCu+mUPo2h273bT8eXaFcu7nsRW2LuTbviUEnVbj0jZoT/PIZktDhysdNsB3/XFd5lv7Sg//W5LgEklIO5Jv8uzm1YFGDMmOjzr7EqU+q3dfutLNS9JVy29VmzSlxJ2mZGLNul+2vNimrFs3y6z6MoLC2YFqguM6XGzBzFoDHHrOoQDfqCWZ8ccM85BlAKMNCU35qTt33u5uac5GE7c3TdH5o1lQ0WgGQs2RB+zrkHLTZeJGOdEWEM0ZIhaeepfbWhvNUxN/fjlfM6kz0cS2AFsdpxaunfJWufeq+DsCUBhUUJ8pwm9a4ElCxbI9aExSXRz4lkRDjHSqat8pl9HwSUzKkAl9BY9DBBEsjkUr8oAygxTY/OzQ2W8WkxALGsczjPZS1pmbg47+k55TYK8+MwI2v7rIGKPW2wblP6M1kfApIHHTAxbPaK8UAH0XNOsm5k1oj6uDXY7uUDV+40IJFFJuOci3JMYHJArT/SVebNqZEC/Jxzt8AtfJJV5fZMIEKQEpeLI90Tcsgl2DPjOAWckrhQHWOQWOUY6gQ6l8l3zpe9yFVRiXQPKRNykL/0uz2FjJhbWtUpADFK+kWJZRHeKJRFhcqC3I/qitCDEZjPukAiyzfQjSODVjsGQrYJjKwRgc8eVkDPz1zY9baR0qS324lTsyVrMgImT4ItGS1+W0Bm6ca1Zn1vilI9Zu4VyDnlupjpc18XxZWgue0z14XQs76vtVkvpjfAAnysKddTDMUoQeupxJnIb3LOqFwlmUr1pM7HSuXnYn3VLl+VUVlTwpWyqccildUGTJbMSA8s11mRUbrQNaG7nXecvtZU2527+ji8F+1yI2NaQIo+t8Ibu6XOxw4YaTulDnRUcUAIP+tMf3MaFzQkE5Kzfi/IqCyzZp1SWgUiOsuaBSGXzFOAAAaKZvtOyDiqgqMpzwpoAzQ4xRAkO2RBDilJvIfATXlfGF8ic75MVqFz5SpdrZ+RfgTlL+e5DqSgGay0sUqMbw2ISDsAHfsi7WjP81gyT6YUME9ydiAi5QRMqHElc+b99HPEyIrfM77Ld+5JgRFvvx5ERHef24ieH10gUufC3qBjhSno63oc7e9LUDLFsGsNfX/LfVD7y1PuNCCRBUKUEE6hGcAUD8Je6AxaBaAgTcAcF7QzF70KYBhnovyQmSoyT0fxVY4Hyb6CtmhpiWVCjYEB7mK9ovWHVPOxgJO5xHvMcWnVtvethftOUdKwTkGo7phCtXBwjjhOEdMsQaupuG/kTNe2DKSyyJXe1DntbQXgNdETWSgAJYRG0dN1K0JYEgl2VMdHYL5lnZbOCulMArrOh0izPhKUWKZka10aWT7XlI+u4J05pio8KuiZ+3lKLH9bAyL1+xZYS21foFnP6QKgXQn0b3KMPLRLFlZaoW0gdgPc2wDF+9vqleTF4ksr61QAOQEKlbIphApQKN5Ca59156aTSgxJSm1blnf7xmGy7LPj78DYp3wkWgFbL6IZ+m05I6ZipMhqW1EkqdQy6QKVUL7jEc2gkbPE0qVCiYqdpBX2oxGi9m+xyi/E2ZYyhP0w92SZkC0QosELv2s2RJ6hX1fGc9EagVNuk+/7nmVS462OX7U7EzwcYw9M5gn9OlCAwBSBGcCxzMXkyiW+ROZgZuei8WmUEW1NBOr0xhjlNa2YjgaOpGaKrA80WsXKshFUS9FOMdwVgGIMfHMUr4nj1Ax8ei5aYxUf1z3Sky0Q4hlZLq0T4p9bndewyZYl9mTOuUtBX+foScc2LgPuCU7u5V5uI3cakJzmjHNCrVSLKItc02MDQpBbnKaDeFilcwMn86mdrAISeZm6qYgLYsnWkgvgkQWrLGhmseG7zvoerJbOrCE4xI5ybhmlSmG0w9LqbT97k+sim4ZSvEQ5aovpcYqIZy5GsVqoYrV0FaUwOqAk544lkXsW5oOxJWRGAGGeGE/C5SRnVICTIYvTVFYpHc8RGVjriLVI62fg9R1jDiixYyxo2isgzgEmVhYgwwEjI4BkQQj3rYogwUj2lVbr3qNT/Nrz1/2MtdaVMtxb9qwmN7oAWVwPjLx8Qe2fU/scu+03GC/0I5cvb5u2FE5RmBSA7mDLdoxSdQNKITbPUT9Dccnqn5XnpmXBpz6/vd6WWIVBg0i5/twpRnJeUROnIIaLY5RilTRkxByQEnCCGDRQMjvF2NJ706AhBfMAooccWgwZ0DOta3ekb7e6iEIHUrf+vxSA2ExZay5ZBCTcD2hzKq/fv8Pr88Ga8DqN/WvMXlW2Q8Bc4iyOar7n52NqlnIqpvMkgHMqxQyn4l7casc09y4LTsxMOZQOlNDgU5gSYBlf0hgbYU56pkS5VyN0a0Pt/9LvxyxAhWnI6Qqtn8WSJRnX/fHes8dxz/JAyCHaWI8liBjJKFZr5LY6SpFu1yjmxGAyFQEnyng09Uy/nmPS4eUPSO4rtb885U4DEqbUDGqhA4zbVbG8dOwJIM6qdZ8EoJSZtta4qHyZ+RvBiF7cYBfN/jSsQxLLDMx6BymIqwjAySMgqeA8YDlBbhW60wFwV4coriIlt3t14SqKxykGYKJVs7hV5aWlM6FNbhVIVPBVQJxSFDwJUO5ZT2nOWlPkCE6W1dN7tgTYViL2sCIeGBnta8EIZSv7iv3sfb9ERgvy6JzWWmZ/23OtpYsFGcB4cXt4Xvfa6lX3gud1gPyWDuJZwUfPkG3yACM/A3hsMKL35/3rQFS9XV97UmOeLEhEU7RTyiWuRIMJld4bZEKbEYNklTZqyIG5PoatBZvzSiXycsvZ9zhAZIsNARgTtGRDyISMgMiWUeIyUQYTvUyVsXuCPJIj+DkDkPjB05xr2mCy4oi5Kv9MvRuyvkYpsoiAGZDiirmxJqpFjy2BCMZuc/aZipvZVNygddrjKchLnHKSujppGX+2N2nMbWQPI6Lbs3CpUkCERkwvW+DeNnhsiDsVBlMHi+MrZXRPufRpjRFTOsqTcDe7lw9sudOA5HpOeHTOxYqTVVBqP+HQ2h+C+NhOcULEhOl4pX6XfTtQoc7BBQ0A0pwXlrmmfKBmZ7HXj5G+v0InnyBKfQycEMQiFEPAcaLFVlmkjYLipQflpPBimfyuDrFWZ50TMMUZz6YJpzniOIkF6RgjUhIr2qEExlMxCwHF/xfV3xhAA4EBS/Tlb6oSIUHrpOU9w/OewHZtFd2ySrrC/jTZmkTk75ZCas+/5pbhgQ/NhvC7TjVqGZBmZd/PjnQK/kYMC6DAVV2UzD2mPvC9KrBqofWYiksBir6WfG7uYLY2i2ZRRudfi02xigJgF/F9i612c2L7vefEd/fmPLv78FjdD2uy9vvYbz4XtqT1cWXADgAHwJQkzg0Hcfecc5I4qZDx6CyWaOlLiZsDsnIJEiu8jhELYfxONUt4L2y+BSN6+xb4kHuW/XUBQ8Z9eEUtPQByNvvPybhqOe907fsdYMSysewnFh0FlmzeFEO37VCU2WNMNVUwWZPrc3EVnlrClViU+kMsaYJjwJRQ1sxcWUSulZo1se2k9AY6/755PsE/jTUBmptZKOCJoOkIiX/EQdZ63rfOcAegFVI8yz0Aas2soHycQm+LHdn6bONCNCNivRiYuvlYfmNK51iMmJbxqNe6wLKnXU5HBYXnLPWHCLK162ZMwpjEUjZAmBSOOXW+47S7Te8vuY8heXnKnQYkfIFysdrkIAuhtrTVNLahpaOdy6R3qPug7KMsfjkvLP17LHM5j5XxKbSAW/kulqxWZE9m+ZT7wasVCq2s6EA8C1aoZPC70MIJqWb2ygAS5kMEUsIphULTRkwRmFNALIp6RvMFD9VVoFk7H5ftoOWLC9OTEg8AUOjCBhTWJjVQYvez5/LkcYEIt20psvJbGiqrj8OGbIkFvXvYCl1Y0QKEvdv3XJdB9f02tmkZm6L/6t+639GzBfUezYD3lEz9Pup3dx48R48x8T57sud3fV8MdPcYknNqWf6OkUxPBuuRMOMWa5OkJJZ4cZ1tCTJizpiLC6hMeYwbyaUwX6jz9Gj+0N3K92Ux7xqgscaC8P4AFdej3LI0MNnDhMzqO9toQcjszAdbosfdQqrPkz6XMHkxqOPm/pCYM1ISxZfxJRKTAYAZF7P08BSIEJTrbhZAQPc77boLyLN/KaYeAp+cuQaF6lKWinFvlBKYBj5mvdwjt5k/t8CIBiR0z+L2Y5T19hjFhfs4xQpEjgV9HsvxAkzadR8ntXaXrj+L/jGV1zVlNBeJjh1p28ie6myaT6Jd9/KBLXcakJyKokarHNAsCHwnGLbeaGYWLwwVpPD1udQKahdHAK57BlDcgxyfBIKUGEMNWBS/7VisE6FTUvnX+6f3aVZf8VG31po55xIM2awvcwaOMQOYiiVOrn2IzdqZUVL0FnYkF99wr+cyxoxT7UtooNe2z7lZKakYaCXBY0du6yah3XPael7OtZOG9lgZH6T07fEUFs9aboOg7WfKyGWqU3AYzapEW8y84G1PebWuVXo/q/zac9xGLlXMLbOit9m4lDX2xMree7DvZA8ylswnAAM8/ft9EqDT1hBgXZk5tdTRVPpkHgJIlUXOG+UW6K+PUj2cVu6pGIcigDk0yzqgjUB+++zrSvYD0KxIDz6AdTcsAMO4kEvZEBsjBCzn/q3nOJK1d5efmbAhhsac9Ikayv7JWNvnoGJNyJTEyqQImy7ZoKeMWvyWTDmNelNs835LKrOYVtTz4zMr39UaaruHd6vduFjbS9gbOYap9AHJrNkYEnXdSPYlI+VmmGhuzbF7F705TYudJ/jZY0P4VxconGLA1RQ7hioG4OFhqiCE6zKBiIAU1OcIVE/yoVjix1sLdW2cOcp4jmVcnUq+6FNq8SSnBMSJzAjPxXHYzn+zJ+vNvdyLI3cakFyfE67Pjcam6AlNAIea2YroF0gq2GoLizpXZyBQlKc535arQLtWqH85MbTKs6X1UywBk2EBkpqymtwg2XodpViJIigzCIPngANOIWCKLWXknIGHtNBEIBymev91USr9MEWxdM5o7nCe2AUo5aZcZL2Ni0kiCFH3bFwekp5Iu4m2398DAvU7+nFDd61WZbgs/Dsj0dwJ37Afej9fSW3beiasPVu6+NhjPfEW1AoKLP23cxHxWIU1puRxQMmefdf6YN3lIjnKQ1Mqbhb7e+cQ2coqY+N+PECpn7lu+5NivLxnZT8TuF0dYkvvXJUisagDCccccR1SnSuZ2YlSXUTLO8YCqECzeAPLpBhWbEyaBiScVy34kG19HIhWwDQAAZp7FtCCyM/aZSv1v41ig0Z/96RqXhtPVkG2yjL/ttTXqO5VQHPtOqr9z7TGJ/5tQfAMGKf7rp4jWeE9heZiRaNUZb3Ufei1g6Ln5LWRHXkNlBgRtNjFUEByNqBEksVMsnaWpDGnlPGgpASmzNEa8nQOsHXx5gzrkgVgAULomtW7yoXKjDDpQN0WAx6SISnuXFMMNaMd+8gTPcR429zE9RVQKaqjV9Cz+eum+p4yW2DLwjeVc1KHSBk4rhWTfplIShmriV1eomteKt/93d+Nt771rXjuuefwmte8Bm9/+9vx6Z/+6cP9//E//sf42q/9WvyP//E/8DEf8zH4lm/5Fnz+53/+4zT7qcqdBiQSdJjQ0ns24QtMGtqufZ0Vpbga0BqkpXPjMrEp/YvfFk/rLgCohUothkCxNuYMlLiKWKyKI0ZlJGsKzFR/kzSfc5LCbFeTBLxLikR5QU+huG/lgGMsrh2lfQG5pmUkfS8LRy4uV8vnYC1hObf+YV/lzvrZGJA6SVbrpurDpICJVgxM/1og0veZ2aDcILxg95GMrrfFgui/+rMFI+33yxZNDzx4MqdxhXp7zsdhDOy+a8e9lEzKWGgF7928bHvWLKgWnGwF0FrXsZcy4HYkHGeWAetSOs9Ao9YS4lyYtHNSSqEYd5qveQDzLdWYtNwC3FvCkaUs2BADQGTbGIS0bb3hYsSEAOjYEAtEqLxptsUrOArsSzyhZe3d9OrIDI9dJGlooJCARdaUVqsqxoCURYFPJR5D/vKEoSUjIagFjTelCCZ/z9kUUmxxIWZzayefZfZ/txKhXLdCW3cig61DLihKapYgAmdlBGyZq+x8tg+U7HHJ0tuuVHYzsliMEznGiIOK45mCMI0EJQQihxg646pmGj3p3p3Q6yEhBrSnVO63oshcFnQU1/GsjKOKOS8nrvGIKrA9hjFQupfL5Ad+4Afwlre8Be94xzvw2te+Fm9729vwOZ/zOfiFX/gFfNiHfdhi/5/8yZ/El33Zl+Gbv/mb8af+1J/C93//9+MLv/AL8Z/+03/CJ3zCJ7wf7uByudOA5MVTRjz5k8iI4vR0uhjbZNVcNfhrmx6Dmm7tS6cXzpT7bdptwFNSa1pHXjtTIS709EY9Ds/NQy9U1+dUJ8pugTxwYSnWjZRr1V+d6UMmx0LXZ1o52/npAucBvzo5OgGnc5L+mpPcA/24md2GmW2ab3duwCk1C6ZWKjw/btvfI9FWxtL6zWNG519z3bAuOtv7p8U+QG8x1S5G1qJqFc01MOR9t8rS2G1hudh7rk+j810qozbvZU3W2kFGcfz7UjEBsKiZMmor9xm1deR69yRlxNyMXFDmHDEX5fZU3BsfHiZcF1ByjKlmdKK/O+dgZgyyCiFFZo9eNNgA1Ds+AB3cx2NEZN+lMWPOuSpc9XfDguj+GbGYts23MSRIv/SuhEAbU3obMHYr9MdpU8qPlTVpcQunuT07iTMR993jFPDwIKUyaZya1DpQ65agPVs9hY7m3S3WRA/5CAFBNA7SdQtg3GjAFCdRiFO/xicDOtkmy25dn5c1eeRzcp+Jnu9GxQetW5aOCaFrFkHHw0Nz42KSCD5fYaZK8gLVx2sMSVbjWwLQ+3UXAGIgA9N0lNOcF4wJ41YmMocEIkABrqG6xQHAaS1P8ctEZEw8/WteIt/+7d+ON77xjXjDG94AAHjHO96BH/mRH8H3fM/34Ku/+qsX+3/Hd3wHPvdzPxd/9a/+VQDAN33TN+HHfuzH8F3f9V14xzve8djtfxpypwFJUouJFsnQJECjVril0u1MkEdEYSdiBv2g5+RZIcq5ggxmDVA8Cx5gwAgNErlNLBaYeBM42R49Ce4Ra8m1lrjqHqUAFGM1ALpOidUplaq6IaLWGUEqmbKAsmDULqqZtGr/GDBCZiRlsic9M5KSYTpy7pQJ7QJG0ft7wE/3yVBuOZeuKfSXApH+GB+MUPSz1dVyvfZZgGJ/G32319KfR+Nxz/bHYUGsrD3XUb0eC05up/hz/oluH46BSP+dx2jXtqchNsC9XbdX0Or2Q3PZACQO4RiLQhsi5pnBsVJJ/DjxXSyGjpi7dMpAU2S9ubmfN3sQAixdsJbgRAOZNteNYkKAvtr6HiCy9t7a39dE+p/xO36Ka73vrdg0mayFVQjsRzGJHwFJWjADMSRMIeI0CzCZYqkjlUNXBDPzaMWQROx/r/e6cNHVj7es1xrKFANon5wCqquRvFINGNT0wHFptKG0tXM9NsRuu1JuVtKOlgVNA5MYindEaOBlCg2MCJtSspyRIal1YnpvBOvZEUpwjzxa7kcEKQ+uODX8/+z9fcx1S3YXBv6q9nme996bpu2xY9wYtc3AALYQCMmOnWaSkMRGBiJrgBbiSwQyCCMNbRE3ICAh2EaKnEj5IIlABAnBRNhjJQpDBqHxyG4+FEUNCpYYDyPTYywhG+I2AWI37r73fc7ZVfNH1apatfZaVbXPOc9z39M+S3rfZ5+9a9euXbs+1m99pisxv0NI89PD4bjmezwlco4lZUG6M9fr63zbE/HrTn16enrCD/zAD+CP/JE/Us557/EN3/AN+OQnP6ne88lPfhIf//jHm3Pf+I3fiL/0l/7Sczb1qnTTgOSnn04Ij331QdWQKAxWPvXg82RbyakbxY9CrcsRMKjKT5rc3EZT20zVNoiJHrJTOxCSDbCvEYAoSgeZXmmMMF9gpRpZYwjXQOrWJGE6rslu9D0XStZfsi0mxoLbhrd+NkSZCaBfAvSU45DM7o7rVkPCE5FZWhHur6E5l/aAwgz1zJxmz8+EcdXusySr2iZavvdO8DMyKbE0ChY4oWMZUrd3/yzNfDvtvbV7JVjvMXfSZEZ736XMUd99t/rt0kx5ZAlQ07zug6RrABUJTvn44OsE/X46+VKuJFWMyWkkaU+TQ/TrNZkCvT6FJsxs0pasQmPNxkI+1CSW0tyKl9O0H3S+XWfagBfauiHXDPp3CfgYzS2N6tjZ+jilY785txrjk5MW4voYajb3EIGwbL/Nw+Lz9yTH65jSdvkM/lxlnHlES6LRNLe6hWvW+W+gGld5V8ExXX/wHkcEPMBjjQEI6R1CiFgXh7ew4BhC8StZXB3rfD+V64EGPqQ2hEyyuEaQa6FIw1FC/C4eby2+9K1D2lcXl7SLh8UVELK4mraA+2xyFqXutQlARAAhg49k+lyFniFmSwyQdpJCOy+5D5LJm2fChMQXoETlIu0LT5IYQiz+SnfS6TOf+Uzz+9WrV3j16lVz7p/8k3+CdV3xpV/6pc35L/3SL8Xf+3t/T63305/+tFr+05/+9BVa/TJ004DkGGpEGE60lpAUAgCwtgjeMwnKGgOWkO1dA1uU17pZpntog43FvIdLKraakG2sa77p1vYkScSazSC8p4hPdQOnhfPx4Mti+XhYcqbl5IQ4Yh57Uu0QSb1N4RsJINSQTBQikmzDKb8AVefgzGTW0pStURE3oCOF3EzmWqkPpZNpiFzC2QciIzMeS3pONGIkeozstQAIsGVOLa1X73oPpGhaEl6PVb8cZ6Os7XtByYwWowcCret7GHtrXtG19lwbwmykaerZrp+Ub6d9yxmy2t0D1pXpYu0QQo31kKIXkeknMbg+OJxczoexMqfqDFDk2rp5PmsW13gAW/CxOcfWCbpPAyD0nhoz2ls/etqPGaBvac3oWn+MVY2cRb11gjRirYCKOYa7iCWiSZ5LGq/FhWJ2lIMwYUUbdh9gUSvhVJB5DcqKnkJcS0A5rEjYt2T08pAZ6Qfvs1mSK+/NHd2BKtgh0tYxbpYFQM0hYgGRxl+ENCYuO8NncPPgXQYp3Pyxmm9p0yeiWmlUf9YUrj/mOVKPa46XGDMsibT/JZ+iFLUm8waCX6lCVIBrTDTh75tG72cekg9/+MPN+W/7tm/Dt3/7t79oW95UumlA8t5xhVtDd4H3Iv44TzBEIW99jDgCjWSjmHgxwEHmB5423mLmVUnaNxPV6E3pmHKP1HbVZ1FsdYrdD1C76sLJ33Px62ZTba8TmFkKeHnM/3xZKLdO6cV0K8RN+PtTqH1cX9P2IYmoGyVPQEaghM69d7KBiBXpBkDz7vLviDmVdKkUeo/2JJ3f56wuz1kMiLynKacwTjMRgbS2XAJCZEJD2Rc9plmS/N4zEurReY05tBJAriFmLcfKGD2o9XBa/BaULN6VfCD02wKPvXfpPVerG2gdqTkwqce+acuTD3g8eFBY1Vex2spT2FkKn+3zOaAvPeevooEPOh9i+623wAT1WNGC0HEPhFhmkyOgawlDRqZWUoOlEwdF23m0h1afnNl5ErxiHuvqPrDGDFI8ydRRsrg7FpiEfD1kEBiL+J7R4xF5t8kVs3cf5SihnF8IsSQFXmNNQry4CCxVsLVZqxoLCTTAg4AID9VLgITABAci3teEos5l8OFQwMiS6yatyOJrwsqMs0xA4pBCMyc8EUu4/oD0vZCPaYWKGXe4LGRstFEeWFc6dqzjKyihCKEh3s22ZujHfuzH8MEPfrD8ltoRAPgX/8V/Ecuy4Cd+4iea8z/xEz+BD33oQ2q9H/rQh3aVfxPppgHJT70+4elw2pyvjpPGebGIcJBSnbqrNK+aFNTF5kgSmNAy8hoIkcRBCV9ZaU/xMRZHQ1o4SXpFauY11k1rDUvRFGmbJIX6pXCcPPLHW9whr9jYZo1J3nx8XrF8lpRUTRG9j74INaFwWQhOsvPmCcgIbLw+hVKuSVQ24WQK6JL/0p6LAcG+TX+PM+slz5z1IdxrWjICAltwYQMQzTF0277YlKv29LbpUg98au8xw1SOrvP3fMp/Xx38pkwyx9Lffdt3Sx7P22SJBEy4j5D2TjMaJ+3dNGBC78Xr1UKZPmUhB+8DSvxG+RY05g3YCkA0koAjndsKfSTwkO+njRN5XgMeFqjQ+tE6pwHkTf1iznDfHjrX+6vl2knn6ZxvvzP7XmuIWFzEcQUWl6IupoiLqbx3oUSiDCEUqX6MyEkJkXOS0Het76l94pnlqoAaBWnQKia7U/tCXNBH0eDS7xydM5tMBq8LEkubDd4BQJPPq5dDpIb79Y15FgGRqiFBDrBDf8mhHXCRzC8q3+NiQMwO6s55LM4DziO6bF4VU6c5RKyooCTmiJnOOSxkGpf7d2G8SZXpVhCS+BAwTUn9ArcASla2TrzkMwHggx/8YANINHp8fMRXf/VX4xOf+AR+3a/7dQCAEAI+8YlP4GMf+5h6z0c+8hF84hOfwL/77/675dz3fd/34SMf+chV2v8SdNOAJBiDqi4s7cbHc08UsMCYbMCVaCMl7ktW93LzghCSz0kFHHMDm9chQQlFLylJ+uI2WSLKgp/f6dButoBu+0obVMkQy9THcnEl4rwNgaAQIpaFhXHMK5XMycLJDrcZGzBCEjkORrSIN3uBiGVq0bRxwJQCxKxdDjD2kAZG9jKdLXHGWGf45TGRpnmz2tYCkjktgXz2OdTL47FXun3usxelDWQWovXtVqM1Dj8qQcleM6692iKtHPd3Ia1JOWbzc/EpuWLRLrua0ZsY15G2hw5VzfMAiNBfCUTkeQlGekBkDziZ1dZZ2ioATdABXn5mrtaxUZ3ltbatvoY55wx6ytxd+73kzKL2xLRnOVe/ZxOGP241YQSdenyrF/s6nxH8EjcFpjb2GM0momVI70zJ/ix7Yy6w5GBai55FAkwuzOQaEQAFjCSNBEqyR49qqeDYX7qnvnTqjQJOwEAJOweWDyQ9J2lKUkCeLeWugXfV0Z2CT/C8afT+9Ftae1y0Rd2p0Mc//nH8jt/xO/A1X/M1+Nqv/Vr8iT/xJ/DZz362RN36t//tfxs/9+f+XHznd34nAOD3/b7fh1/5K38l/tP/9D/Fv/Vv/Vv4nu/5Hvztv/238Wf+zJ95P19jF900IHn3KeC0VElBjxlvbD9JelE20dYZzQeSHrWghAOPlak/yNwL0J01AUsaWHbmHNkiJRs7AnhANY86oGpx1hhx9BSm0m/8VKpErH12k72XaYboHM/YvtlAIoCc1Xc9xXZRF8QXJisEJ0k4yfmU4v7X47SxPOWX4fH+NeDRM624NuPJyWKwZxhv/XwfgFgaifNp2fTFXg3CqD29cyPGjrIo9xjMPZqxPZqSXtu0b0LO3jx3Bx0nDWU9Pwoj2ka3Cuw4/XsUfXCOH9NMX/Q0YPwfrbNFM3JIDu/0+1D6gN3LzF9GpIXvtoCDpZnYlusDkHPnwSWgWIKRMh5yn3FTOmnSJQGLRlWoIkJVhwo4yFE5gQ7kEK8AQEKikH0xQgmXD3CrhHaftFqkak6Uc06pgUe0BBLzXL51bHPRtD6bqBJ9n449vXuIeCjCSvFdhJk3natakMpPSBAiNSJ0L9eK8GMy2XK5vc6RxiQDkPzPRRHfGBV/UOud88W8K2mzks8IAQ/nkgYllmhrlG2sBR7eV7/YhSlEyBROJhO+BbqFxIi/6Tf9Jvyv/+v/ij/2x/4YPv3pT+OX//Jfju/93u8tjus/+qM/Cs/2kl/xK34Fvvu7vxt/9I/+Ufx7/96/h1/4C38h/tJf+ks3k4MEuHFA8voU4JTNhog7YGpRRmhqpUgdABakuNoUys7XhZbMlxJ1BlbYOnVZmy5JGbhmhDQlAVkqQbcuNKBdWdlCjCUOeKGlfQbXfkjAwRdXft5aWEjCWRZ7wyZtFIKTR8Eh53XuV0KaEAIi0hxtFoQ8JxixSJNwSkZhhnmQ9/WAiZY4rVc/bx/dWyT8ihT2Eurd39PAnEPngJFzwKvFCHLNwSMz4SJgUqnVmPSAJo96Yz1fhnse9WWvf6z+4G2lv/Rv4+jO2kLXpTM1Xdu8OxO8zLaZH480pTOakN76IckCg+esQb3+1sjSbNJvXqc21jb3eqYNj8npm9brYrobksAM2bcguOxnQPflPcHa8+j5/PNSycIUN7dGFahwMBLLuf73Iga6MTPyuS4WU1hrO98rCThw8AGwjOrMAmHxrtGIpHvbvCLkJ0Lv7kDakbovlxZJMMI1InRIyCemUNza8OH7+CwRv8KtOlKfpEbew/9enz72sY+ZJlp//a//9c253/gbfyN+42/8jc/cquejmwYkITOttLA+nVqd68aUgEn05Lm0UBKjn9TPZC5FoQ2LTSU00MGeSwy/8LXQ38EVp7Ajklo1hFj8Reg5ISS1OrUP2G7a6dm9dtkL7YjWzjOpffw6tVFPOtbXhpAmhEuFAZjS8tLGASNxaRZsyfjLDb7HFPQZ863/gTzm56wwtNbv7fO21x/Z8UhzcU3iQGiWkda0A3vHxay2ZJZ5lO/ANSVcU1B/x836JOtryYPM7Pj7S8ZVApNev0rNwQwjbmlJuEaInvt0CtVM1PumLKBrsEd9zX/3tBHtuTmhRe+7z2qh5tpznTlkZXGXQGUvkXakhIF3sYR/DS7msK+JIQ8uln3RBCGWQM4Iqy/rst5hq9nZ+iryutdimh1zqNq63z7Alf011anvnTLhp5YA1AIi1Tyr5hVxjplgox7zLqO2OCD5hoBZY3ElCXWoq39nh5pzCXBGlwS0zlWkl3K2RKxrC2Q2ZlwXCq/udKebBiRPa0A4rQ0TALSSH8q4XE0GthsjOXun3vAIPsKvyJFhPFLkDQZMULUDJYxlJkp2BMjkSIbWwUWIQF0l1jdQzZzowZYUSGpBqC0WSfVhLzKYNMNq6hFAxcoB0AKSCkKIESItCAESznjNSETl9ebdLmAALInizH2WJL1lPnUfixEQmdWezLZ3RJeCFItpI+k+kcVAW2DkEiDSu9YcyzEv6jiuYKFstww71UWR7dYQmXkTwE1orDFQ1ziowEQS9zORfSr7spljsZoyRFG385Qwra4tj0u7nsr35WZrtcxWSDRLU99rIirWDFAYBUjonZ8Zl9p9Gpjc0z+a1sTSjmhrW4ryTuHfY040SH6V5BCeMrjLgC4zkvFmT1rpPnbdqI+DF02L1gu1b7WPmkJWCA8iTLHWLg2EkGmWBCJUngMR3g4yzQJq5vVuDzKfkLg9VRzbrbJqlfRMo9Dik9aLrDXIgoM0IvV7VQB4C8BkBWAYeDzrM+/Up9sGJKe1mGxpmzP/Xc0mctzxEDcb6NMpNJmI02mKzw5QMijL14ISGtEx/W3ta9v2NUBlqQurd1l9vrgCTnrEtTIjonoBNilL+L5W6mFpPppzoV7jWhAAaqheTRtiARH5Xa2INdcCI9o3ktf4hq+FSJX37wUEezQce8DIJap0nvxKe8ao/zXmp1c+HbdMtzYOtHtnv/nMmFmVuUAUm7ZGOO9KgIc1pvC2ElQ8nUJj4pSe1Tfh4qRp4Xq/95AEIxKQ0O/0nomeEMp7Skd3yXgTEGvsZbCdI6OxMgIi/TJz64ZGo7bw3z3AMxr/sxouPl54VK4ZoYQ1xkJ2NiDtSDJpckBIYXOLUK4A03pviTzZI0WbEpp1qVoF8PpoD6Rw+5yaOar4jkjqrYNat0jtCLVfAyN0P/m+EBgh8yt6NG9CMQuPdR9Ppmh5LjpXtSQ5ghb5khRHdglGsiaFbe+Nedse4jyB1Iq0/fTyTP6dPr/opgHJT33uiHf8Nuwvp9enoJhoheb308kXCV6VFC5JBRtcyuTuttE0gBaAkHaEa0boOUArDYmRS7zTX5rLfAMaakRE3RbxulUVt0PJwCqfS/4fgG2CtT3Xgg9ixoAc2reAjdBcezqFTiSceYnliEZmAFpZjbm2QMkMENkTgUozzejXbUgaxWY8GzIYSBJEojWIa153PpZ9J/vHBgU2c2lpRvYwnnsZU01rEIPtGOm9wwmJeX/vmNaGx8U3WpLFO7zzuGRmvu0vbtakU2Xse5Jv690k4Kd5TUAkhNiEXI0ZcKWbK4PlffIRcXztFN9YY5jTv/rOPbNF7R1kxLu9oGPvujEDcCwQMtM2SxN7jkBj7738e5HZEmnCjuwc7X88GArRXqn4jGlx9dugMm5z78xzrUAzRA/G7qlpa7gjP/WHA9dwVPMsus+7to6eNiQirZ3Nrk6CAMcBTwId2uuHjEQi6liKEdkvqB7TeclieFfD/3IE433iF1RfHJD/kQ7q3jR6PxMj3smmmwYka6imPjMMpuZYmShkU4jKBFSNSsr/AVTJAEmMlqE4qC5qHIzwv5wcJFABorJsybrkq8tIGSG2dUe4wlGmjLZtCD/LWV1mSu6ZY8kQvfxbVfBREzpqDuzWpk7nziHJIPfKyGdpY0xKKJ+LRpqYGWrtsu1rc5XJ78ElfRGaWUV6bt+MiB/PMnOXgo1zSIIRmS9hXVN8f27qvbo6zjWgRhGQ5jVkKQqZZqYzS2uowgkORvh7asf0YiGUw8K49rQE1rvIsLR9LYIOVmefea3xMKMF6ZWx6rLWGXn93HVAo0YzwzTlPEzzcU1jrYbJr0x2mBSNU3O5loOH0C8h7x3KPhSysK8EfGHP70XIlmBlHPWytqGtpz3PTbOALRgB6pwAKzNDITJAQP1P7YgRgYX+B6CG8I2sLiB1UYiRnd8//mec4KV1xZ3utJduGpD8b5894i0cy0L6eNiKe+Xiz+2WX4lQlElymcJUvf24JIneIUn5HxePh5x1GAda0FIUC4S0GfsMWpLDXwtGOHhwYlHZzQuyY2fcHNliRMc1ZGaS7qyhZppPKvHWEZCH6SUJ6jHYeUK4TwhJskfgg77PkwArlmZkD5DQzo02cY2h64HdS6SYiZncmq1ozCWBHqDmJRi1u5wXm4R3DilhX/1dyp9r1sUBCuWnYcAEfvsdNWZy9M21hHbaX+1eTpbWa/MteTSekLQERVtQpI+teRPlKYqICCvgc0fHQ8RyaLViXDiSvuG6a0yl0Mir2n7t3Syi9nPNSNGUKPeRtsR7hxgdXP7mzjusrn6fR5Z8bw0RUNZo3h+zuX7OBaEz6weVkf5N/NrMGFTH8gzDZnTBDNgcrW0a8Xcs6y8FXWhMjoFiqqWsE+esHY1mhP3QIkNqka6ArSkVlaF5qF3TfFK099KsG6g92j7uS7l6TesWAh7pODaaiMhucCU5Zf4N+s3LtHWXvb8BIVUzElGFivVaNBNOWiSja8l23elOe+mmAcl7xxXxKa2Q2iauMTXcuZLKL94VJo/XQUzgq2IbveSFMGezzVlQacGjxIHpGMkfvllktypdbl+60XQoc9vDXoiAFojQ79oPqb0UQSNJu8aSD2myRYvZcY0qEHl9CioQ0TQlEpDwcvRsol4cb67Z4RvMPGO3Zdo0W+4es3iplsRiIEfARF7X2jxD7cY8Ks36mEksQ0zJR0ljMiMx22pCtqAU2PqNnAtGOFnmTtq9MgGYJG7WRfcnxj0UILOeQpMFnTSxXCs7C4j59dFY0Ug+RzNHo3ObjNk84iDSf6RhOSLPR8G0y3fT2j1rYmS9w55z55Kl9diO43YNm8lDQJqAdMP2ek/wwJ85O+d7Y15+D8sMtEczDGrR1q46EKFn+9ACEsqd5WMsfijJpMpuI/f7oPZJ0EGkWSBwEML38HqPLmz0iqUDgRL5NfmeR0nvySG6CDldLaN9avqUHJwAKGBEM9Xi91lD1edwzzIxIg8DfAv0fmZqv5NNNw1ITscV7rgmqVzUNzS5QPNwnPwfdzStUktfGGVu6rUEwDsy5ULRFABJ0+C5ZBg18VGqu4IQWtRosWod3dp3tZZ1zggCAPm/u+hyMqRkOsIZCu+gOp9R3hAKmUhAhGdQ52DkyPw/CGhULchargHSb0T/R9+rF+nHombvFip2Daju1ZzM0CwYaZmA2nLNXGVGKjoNhKRwOqQkXDy/TDHBiPpGdyn1QEa6No6QNHMemGfKqB7JfElmbc0Ch9Wn8bkcfGbcUYQPMXccjV/vXfW9UNpNzziFWLK8V/OlpfuOIyAKtFJ+WV6OLa71eU66pmnjjPZnTzkqO1NO05Y05w0gokUvIyKp/ktSD8DJOXSOBqZHi3c5khet13necK0MAB9qMs2SiDDmMMTetaH5faqHtCQLj3bJnNDp+Q763suTMkqAAWy1IbycLBsQyz7P+dI9TuDOtUxtj0fg1XJwQqZySVtCbajn0r31dyx1TMwb5xrh1J3utJduGpA8vT4BOVO78ymRkMUAOJ/D9YnIN4CuNUnXKftyzT5c1dgRQMCR6l2z1oRxcrTYVRVzPpdBiMvnKO54XQhbmouclf6SBGQNKaa4y5lYQSpstIsgZbYNGQSsGYzw8L0hoklguAbgPQY4CGw8ndYCTsg5/UkBIo02JApJbKib+EYqqxB3rqXvXlh8zoDTpteRJJ4LSq4hjUznqravnqNn6GCF/sr3OSjntoAbxbwKfgtKelRuj3UjksSDJ2hMm55rRgKU7X3y3CztZaZ6knAsKAEbQogIeUwHl8atrM8yrdRAuaYp6be9+pJQndb7SCCyZOlmIDAlyBnnY4ybd5phprWxaL1T23YO3Pvgone914+aIEAz2+JlN4IVAUJMH5xMsn+dr6ZGNBclyLSsAbR3m3lfXvdI+zSqs0fWfVZeJbleFUCS1ywKNrN4lzKju2SK7GME8t9NJnnXhufV/D96mo9U1/YdZnhwDgY215Rze/a/Xtt43VQnt6CovENswIv1dGu9p2u3QOHMPeTSZ96pTzcNSID6kclsgDtZEhVGNUSw4DQAdGmYxvAQAwWAaQtyOD4qE4EH+Wy3/euRJNMOLRih37w8kWV5XU3cXWmbdxQqMJa65PoRIW2bWZ0FmFSQUvuhSks0ppL+aecbcCK0IJYNu0X0TYkxIrDFr++VNlrmCfLciJnqkSV51Mrw50rb+sSEjhc4adbVMGvCh6RxFkVfS8IfLXPYyAhc8t005if9tp2VeWQti/aYrYy+3Qw4fczhw+EBZG0Jxd2Rmw8BZiks6Y+DNhywVr5+V880KzpD3jOZ4u0kjQ//yFLLqgEsmmtkSjNL23m1Xe0qIG+BiaYhkvW+NONBJIUs16TeuOlpoLS+7fVPT3uyp03afdZ8lRHZOJDmplsUbAYhluPikB9ik18EQOM3Qi3lYEQCkWuAEGAOgPC5JZfPHi5xPDgAbI2LVScHI9rz1vDy0aju9DOXbh6QSEoTOzP1yioSQkzqU598DkhjsnjXJPIiP4eSNBF5A/QOb4fkyO1dZtbXlM32AWiyvSaAkdTO3BGP/hIoWbxrzbiU9+Kv0kg86Ler7x5iBGm/vRNgI1YNCjmnk3bkGALWmEM9xohTdlw/rgGv16QZITOtd5/Wohl59+lUzpFWhLQg72Yfn6dTaLQhMrQoByT8t0bctMR7V3aQECpT1IQpNcgCICNgspc0Jon6Z0S2+UhQ2ylDCT+hZfhkdmz6FyJt8kCgQAfONXl3Nu/FN1EGRmougG2kNA7s2/NbUy2tvHzfpj2MsZmRFu8BLD1tiQRZFuDi/a19A/m8CkTrGmRJ/QmUaI4Hi3cAG28UdpgyqZf2+uTj4pHmEjngy7kJ1HkmNdMugxFaT2sCyJqtnmukax9sM9ZrtPhFzKcWpMg+lHX2mG97run1bsaBIWSZJR4oAAAz89Uz2mtjR18T7DLbd9oKBZ4L0PXmo5Y8k9auJ0/RvjxqmP2A4B288zgCgIcI1JKCzSzJ0KEKCEUW9Sa0Lmue6gOi6BA2rlZF47AFHFI7wcvzumZ7n7fwHLDEn0PfnK/lAEqaAKISucxXoe8taEnO1bJf+sw79enzDpDMUtlcPbCgbvaa2QT95cfkT8EnrMy4zqkseo5ASAUjNYmibbYFoCRAKsmTUMtGACEH/PVMUsQXj7LA5b8lSlZEY6q1sncqfiMUSYslM0xMTXVat7QltFmvp9AAkUYrEng7BxM3r+hFS8LARwiXJSKc2dy1ezWppCZl37so7S+/smeOIhoxUxDm6MkjcAFyA9qq6zXNSKtV60fU4ue041naC0LktZ5kmV+TyTnlusGv8WMNhFjMJt3HgcgIQPU0JRpTvtGYOIdATDFQkrQWTRp7Hs03CUa88m49hjr99WYfSNLeZfTOe7QmI1DSG7NEEoxoQE6SJTyxxvRorOvX9fXAeifr76VkfYO2/Vy4kLWEvo2OyXOmLC4JqULW7IUOU0zP8Q4qGOk5onOi6xow2QNEatlxLjLNVGqb0+U8gdromZMB8O50p7PppgHJ4WHB4WFRr8kFXrPlpWReT8DGt6T5pzFTMcKHysxZxG/12U2OFgzvkmakgBSHBDqg/K0vVv4613kwUMAELYAhpgUwImk6krQDWTsSi5/IaaWFvvqNyGhaSQuyFm0InavX8vEaCuBYyc6e2d7LKD4jO8sUZjQ76ocUqpBv8tqGLxNU9pkjnYE9V2uyZxOfKTtiLFugQSFkuclLLBt7m28HWFFDfYa1avc4aep7AiIjzQjQml5xqeweMCLfd6av+PFh8vvPfGdLWg5sgYusd5uwdTs+Nc3BuF0eluaAM4O8zRvhi+bbxZbaAkgIhLhW0yyPX23OefGeNrDnJLVr9X1aJtd6fwuMyeuXEBeUAPqaxPtPlqG+pHZax/KcHNftPjbWQPF5qGn65Li+Bs35j8TyDvJdS1jpbB3A/5I/J3dsp/fwEznEehQUE6e01+Z9TLvGtCBc+0ECTnoPGrYbU9hOt1s5VnhCx3NITqF7pKg7PSfdNiB5XHB4TLvkjHp8Y3rAJQK+jdTFN1Vu5tJs4i4x/DokSouNtew5JEmMczVOOkJ20A8nWxzhPaLzCaj49Pm888XeX1u0aKEkacwaYlG9JtARixP7cQ04hm3OEQIj0oG9dWoXgMTQihSzLQZCJO5S+8zPR6EhRomot5Fr1+X50TVOPWbZYnhmN3le7iDGpMao03sSOLFsyJv+YEXC2m5wkjRJmmRmSHsmr6XfOhOk0TlABNAZNeuvZNy0cUH1SeZY/6tHDZtpt+5LMX7vCi6W3KdtYAkKY35iAGQ6D5AIerAR3uS1E0jM4it2nK4tTVlpQijfZwPmRJ/LCGLtPTwtJTbXZf2z30hqXKz7rIAAGhjRhCazQETrRwly5b38fbV3kN9f+nBdS1vyxI7l+2zfhQFQ9l6PB4/jmjZiWo8OZCVQfCu3/p2S+PCztCMSiEgQovlmlOPY+m5S0A9uBkUWC9xSQQMkPIofJyurvcx6z8vsJSmU2hMp7E2h0V7zXM+8U59uGpA8eI/DUtW3mv8B3xS0DYL7HAQghfU0JGcpks1SN3dPmpIaXStkMye+aZdFqgNRylnJmdNvpg1xMSRQwovBitSx/V0WQrbQFSf2WN+Xkh5qWdc1pnM0yTUnz0sjT2jaEa4d45JbYF47cg4QkTSzAO3JEyGfzfORUHkLBFUmpI5hIi6lbJiUPI75BiTDTJf7GBjhdVl9QPOp964aSWZKnuc0A0TqsS2xtxjmR+O96G/6PktzTpYbvad1ri/ploy37lvCiXxJ6C/db40p3g7rn6YV4b4kPYZbvqfFPFuAoNX6tBnteyBihmQ9pc0lKUtmToPux8bPSTByDiCxNCPpWAfY/JiPExnpb1aLdGl/ztTBv5vcex5y+HLSfnDtCCUtJrNq73JIfDDLBLRBPAh4aGZZjclVjBsg0jPD4pEHORA5EkgJdW8mRp/2yK2pLFJEMX5OaEQCJVbMPoKBARTVF2Qn3SIYudObSzcNSP53H3jAw9uPZVGSZkJEWmQbNZRliDgiwPtqysIlfqQZ4Iv6g/cIMaZwgyFiWSOOPgIIWIPPIKVukGfPXwImyUapns/aESBrQWJdmNeYFtI1n19DltDEZKLFTbWOITmxH0PAe1lKStnYuSYk9XEo57TQvk+nUDK7S+f1cwAIYa8COJzbSBmXxatOoVJSeA44kXRJHgUruZ9G15KoaFJhSyrKmYPFu2LGVcqoJltbJoH/lWZae99L00jwNs2YfdBfCUDoX5NniP6VEKEybGi/vQXUc6lnx9mf/sp+scDqaPxRqNqWgePHoXken7uaf4xGsm/V/vOtRkQ6tLfJ6VCOJTVauE4/PrJ3oTFX+6D6mewlTRNp9UkBcTlfjVqu46w+AiU9kN0eb4HIWFjRD0su75s5N0OyXxuQd/AlOTHtyWkP9nh9CkXbF2IEgkPwZKrs8jysQVuCA0JwKTQ3HBCB6CLW4BAdsGQHeJ7ccBXO5jyjOZmqEgDhGhDLBEsCDfscWz9C3KwpGslkkr6ZY6JM2SPr/ZqpF6eeZQLf12/BrGsNL6+x6EWfvFOimwYkD0vd3Lgt/BLcxq+DJgxpSUYRmDTiGxwlMgsxFqd4ksYkSYxrrjVtyXau9Xf1IzmXaG6FGBvQI9eGNYgFUkhmpDp5y6iEzaYv6y/P7kz4mRjrGjWhGBlAKdcVMAL0N3DtOqdrJXLTTHw4zSyQGlM0kmJb9cze09O8jGhkirZnU5A5ISwgwo/l9+5F7yHBw+JTXgMgJWErkfEI8DqnbtjNe3ENZEjOtnTuAa4w1sS0SoAwQ9b3W8MoUWbSmGjahiW3g4Majfr+CtXen/qUa0sIjFCCO60/6bEhogmk4BeUIBtA1mgr2rmD3wJu0hSdy4hozLqch3RuDXa0IdmnIxBCx5q2zgLb2v1WW9p269e1/uR0iWBF9qu2tm33maqOWkNsmGDSmPAkfWued3Tsc71pfKX7SgyH7KPIHynNskKsPiHUZycFiEhTLGA/EJGmW7JPqb9qgltkP8BYNSR5zqUySBFGnRM+mH0Afc+jcafnppsGJD/rrQOWtw6qZGezgDn2Wzh9SEQvN5KnUyiOwHLTobC/r08BYUl+C0n9GnBc0wbonU8q0xBzaMI09WNe1GJe/BZ/AGJAjAFwaZVzwoQrOl98R2LWjpDJVczHkZ2jzTsiAYhjSD4i5DtCIX65UzuFAm60TkqCQ+pneY7IMpFLqvJ0rSh7SujeuoCWeig3i2cRfUjKmBke5x0elmqvTt+I/+Pn6BjoM7Z7Sdt8R1qRPQyS1jb5HiPJK//dk5rOSkz5tR5TrZ3TmI3edY3RtoCIJjHWGOTHg2+0IQ/e12zQPOGayxqSAnrTM9vcLcSQ5N+C0WgTjib/LKAmHKU6uHDA6rten9E9/LjVfrSJNrn2ZPS8tr+3pm6atsQCeBKMyM8rJcO0LiEDk2MIeIArPgQ88MarLF0nsiTx2nn5+yDGmLYGav057j97fTp0rmngQ/urBXDgxMHnGiIeD0v+W9+L7pX9Kfuqp03bM34ls937bs2Y9nk8eI9jCCUE8LLmfeYUsC7AQ3QAAkJMe0YKksIjXDoQc+6d7Q9Cz64BYlozaOkHIteEdF4IBuN2rPOu08AJ5WUpfca0uEWIwjLd07WyjoVU/giuQaG66rNmfU7ueUvudC7dNCB563GB43H0lY2BjntMplaWL37aZtNoCEgaE1KEqqNP0ptjluI85M2TEhQG5N9IkhgXUUBKiZ6VgUjUomw5n02vkMsQGImNjwj5f0RUk5ESWSsvkJVJahklqR1Jx1tTjxniydYSCEm2vSmfSFt2Ecas3DmdA5FNqFG3ZYK0zby3UZ8DRLZS2MvDhs4Sb2+P8eBSa6uMRXvnDv9rRZmaoR6jyMvMHGvmWdx06MH7Boh45/CQx9WDr9J8Yp65ZB/QzRs4c9KAjlij2q0x4iFrVB+ia8pxLQqARpNybh8uyvfYMphz4XgsEFz+CXM3DYQclrb/tGHGAUli2lyjbeKO62sAHhePJ7Go9DQAsp/4u2kSeyLOxPP79wASDcjx3zoQsbUfEjRZbe+9O517OgG0OM+sFyPhRe9ar597wkbaiw5N/wPeZb/OHAJ49WnTTSZbMeUpAXLixFAytxMf7Zjwsu6x+W9+rhQmANXEGWid01M9W7MraYrVaFQFMKF3I2qZfpHQluWRovq9S+9N8w8AFgJIrgaKIWxDWe57GpSReeWbTJoA9SWeeac+3TQgAXSTAUBnlOQxr0MuiqNFmAY0JRejxWkJAT4FyyoakuPq4JBAxNHFEv88OpfUvj6DhCYxYpLIwtV4H9TyENtoHY02JDJ71rj1HanZ1/OCKCQw3OluL/DgfbZmJ7pIEhokJz63pLC9i8/ak04/81wHwBaEAOgCEblpj5yc95K1Ce+RFo4AjGybBkL4eW0+9OzJrT7Rnj3zPvyc5sswqkfORY1RHNVDfzUw8oppzx6XmlhtcdkJOzPKEog8LB4Pue4q5df7itpMc6mYjwTmZJvn3jEnVz2tKds0Z7K9qxrO8hxmojQiC0w239j4Lr1+t+ZPD4SQhuSwVEkt9admz17aQUxT7jda43wAQpZ0J/KJ6Vq3kvT2d5uzpAd6rb7Txude8D0CdPWcDkBG4GNmbbPm7SnEnCQYANrkmedSD3RImgVA9JfMpwEUwVqILmtJXJprpcqQHbw9gotYYx6nLA9Tj9HmplgyIpZlgkX30fnNucZU2gYiMklh0x+xmghSZMQVsQEnlKtF+gUCybzLp26p57J5V51ibHy52wEfd7odumlA8nhYcDgsoAgqtKCSatmSRluMdk+yA7SbztOpRiOpi0mVLqZTJzwsHmsE3lpSuxwiczBLUgp67OLq4lGTM22p2rHGDEyqRObEpCukISHb1tcnysQeipSWyh6zQzt39pLaETrXk1QTPSJvYpkBDCHC5fuKGZcVLxnbPAepf/qaDzpHZjlWKEyr3TzMas8BdmSmIBlr6ZS9h9HWAIjJEHYYGc4AjRiXPQBNY3rl+/N6+TUr+pFkKOnYIpthbvuAm8C8dVhAWhECIq8O1TzrrcXDe5f/ImtRHEtqKpOoJcqGlk3UHdJc0jwscy2imk6uW61JMdnwLZMiTTQsxkBqG3vzTSPJmPGcNJqT/+JdA0RIc+kdiqlbSv7qyu9Uh/48af5GgTIoIMcaIw4Z5AEBvkxZX9ZSAsbizbBRzZZ33C+c2Kv1tOafNCuUQhS57gFAa+7G651pd/orNeKP4ImBPQsJvZb34/+Adm/UwPleYKQd8/KpbUnt3mjtswk1csgzn8fB4hzWxeGYzZMeQmzGIoAyPtU2boBG3IzLBkwYwIO/L3/tVkOCtjwDLDPAkI+RY6hz9IhYhAXHEME1k2Xu0nsy3xOpNSm9a3zTC7Hri9BdQ/Jm0k0DEo2shU8yPNrCOcP8tHXlSFr53BOpf13aIF9FnyJY0UYcgENIEsSVfPKCA1LMjyKBiIjZjMthRTX1ItrYqefFMWZwEzMYiWhNuIBWhSzrk+9okcU4SkZ98Y4LMMuhFZ+frhFpQITqtyWKVgx7ybT3d2w9X4cuWe29f09Tkursg2BgThNSj3VtiBW+Vj03YStM46f3jhrNgBLAHlN7nsP/Sr8GIM2vwiw7FDDCzbVIcnrwrtiZL1lgQNmdU13puWTtEZHDiDsAwSG6iGykicIsxYgQiBmiEJ4pClBiGpDXiHS+OOkW8y2U95A0kl5a9uAaI6uZpkkQQtdngQj93tRbfMlQnNk9SWMXn6MlxWzcn5dPn+3ofeqq4GOxPRkBjJk5OiI5l2fu00Az/aXjV8IXrinHNHQaUJwiigJGfbW4Yu42WiNmyNLQ0TVZv6TZ50kTLhoPPs+54gNR/OATiKWAlTTfECyYKrQZBhCx/EBkOyUQ4eUkSOT3ae8sSRN2NRqVUNcPoM496rdKaf6GfL6Uo3XfOTMn2N35/U7n0k0DkjVEOMYk8vC8JOnZqHZ3MIva8wAk57hcRw1FeEhq7kPA8XHB4pJN+MPiSnjCtw4erx8SaHnnYclMUmKWfH5uYnSI+Ylq5K1Y2pP/xlYrQj4jxYk9O6m/dwrlbw37m/1CYhuadNsnOuMItDkMqJ/kJq1JEbV+HjPc9XiUDEwzd+B1j4i3ObV3YcdbgDJimrXre5y05d+eOYfaL4pJgmRAZ5Mgko8DgCqN9q1N/eLdJmqTTq1+gWgWlMyMJfpHZlpvHaq/CGlG3jr4ojF561DNuYixXlxOZOr5HE3PoqdT60KkXAcRq0slTtkEyzshuY05qIRPzNN7JJHMczR47leSQpquMSIPR3XOatH9APHtBZjiQM0qw4UEdF5KmOn+h8UXiTOZbBHAI7t93ncaRSC/M5mbAsfginaJ+ghA0ZSQZDxEjzWmdWkJSUNM78nDIGsgrAfer0FT5pZsvnIzuHRu29ZznI6lBu5hSfP6cfFFMk8aktenqo2gvuNaE16nPCfJuibXCK2f2jrS935d9uG8R/n0/VPUqTwec79KLV6pv9N/UqCngY4e4ODl2ndof0uzLMuqw+o/DejJcTXSmngHBDamltj6mfDcVDIPinznN5XC+6AhuQO1Md00IJEkNw85eaVNu2R2eFmtTn79lJmsyuysm/uKlDMTVc0jzrx1WIqt5gEkdUUxB+EZY7mmJBTtR9aIxK15CIERAh9kGhJiq1JObdtuKL1+luZIVhkNwBRmVdwrv58m5beuSQZdluttbJz4u/eZaIAYac28ywIpe6Sn/LgHRPg7bkGZU6Wpo5jzaruckLYJCWs6ZwMJqz9rX7W5Iqx5uldTsjV1Q9aIZC1IZqrJqT398zlpWmIG6a9H+uvQmlZKqiaVLhsIpXnpXOqv6BOz/bA4YE0bPiUteyiaABQNSfleTGMCoGpNZvtDmGkAW01FD3zQb83cqviGiHC+EoRQazmgAyi6Ue4/lv8haU3T2EmmJD4zQh4hroB3JdQ7ScbXuGXQrXwicn3Sjq2gEZeClM38FnNVap9mQOOIQmRjZ8m+AnnaHQPgF+BI25mvAifK+VHNeFNULgCggCf0LpbplnxveW12H9ZImuFQCH4gBa04BmSBQKwMN4s+dWJO296NNBkt+NgLPLpld4CRUVj1bTvyd+EgXGhNAPby3j6XgEk+dT2sfqefwXTTgGQNAU5BulKFa+V/mDGXseoFUEIBEyOYpEkOTzkZGE3+45oW8wfvcFwXPCwe74QkjSXtyeKS9JY0IyV5ltu2kda2AkhAmpEkNVwL8IgbO/Xi2F40KhWYcDWx7BNiGJe8cNEmxRlGrZ9nwI087mVx5toPTTPA6xlJNzXmdqsV6TPTiapNulantclq7dqCsnmTjlmp6l6JKhHl0KB3IcamOGMHkj7q48iilnlpNXFa2XOIvzMxxsRYkyT/YcmgJIf69Q548EkrkqSrObOzwoxzCjEiOsBlLQl8TMc+m3T5xG6HzDQ/LOm9yeGWmGwy4YIHHsAYJM9/MMZO9LmV20NKiYFqZkXlNLv6DXAR11RAkgFIwVOuAg96tj4M00m+xjnnEGMCHBTZKPhs+uYdfEySXRqXNCb5WoVTaOayJhwZhc+2+3c/V2b55aTf9VvReQkWZfkRqUx2qFqShbScWfjAk3pyPxO5t3LTZf63PEsR2ozWXo2s/ZkLColIs4MDmFYgr4uh7esezUe7su+fFfrsWTdHea14OYpExvkSPqa51iT4WOZzoGAbfBxGspjkJlv1/c5cou90p1sHJLEAEi4d4ZuNxQReKoHhk3rNGxyFEyW18btPJyze4+3HBe88LikaV17sX5+WAlK4U20TYlRs5kS0ZkVUcwYZWpRACI8CUjUlKKZax7BdZOn9tD7raaEejXst0qX/FXBsz42dPAE0THl91vb5a0gMJ1GyU7clVDxs6lbTxseUDk4sUFSP+xoeDZxpkY3Su7eMJ50DtpLvEUlzBQBN6FWKeMSBCYDGJn3qOUXCuo2EtFczIqmOl8qYUDStElEra0gO+ZxzKSqUh8PBU39C9SEhirGGyyQtCYKDQ9rF1/w7urSFJ02JAxCyaUlMgIg0ACHCx5QjgGtGqpWbkGwy4sNNY2x7vh4ASl4fDXR4JiW1NB8ceDjRbMeYa05cC0yvl7JjZxPBAqoWLI5s+PM4zNm5ixQ8/+UCg+IwPmB4LYBPx/Tusp+t+nq010SOfveik1kBAgA0DtoAmuhlHJzIoAoSnABb4R8/1wpwlqGAoT0e58Npy6aPw3OGaRoxTSO1qU+AgllzKU6jtd4qcw7ZIJD1p9i3ATvoS1rLW3MumktFuBbBAEsLTm7BZEvmeXqpZ96pTzcNSIC6EHJVMp3nf+U9nDiz05PATC1MpyB8KgKeToyRzoDjGBxwAnDwOSxhBE5pA1ojlanmXVLKzjcFrg2RUUAomhZQzbTkZAylfH+SpjbYviRtubHavV0M9zPj1J8AYxI6Dp7S5Ghjgx9qXSHGVq2tkAQl7Mns2Irm49lxu1mMNgx6Fw2IbMxlnM3U8HMaUV/RhtSEnJXMcfM7Md0h9qNtjcIC7wEh1likeuhdJTO5FLOiZFpEZlgEOKqvSA3JzcGIfGQgJjxGkNVLKp/9PpAc3GsdlQGv30J/Z+8TA1Ai3Yj+1V5fasMsMLI1s6rgxAIhVP9eIMLbqg0/fs7nPvVZ6xRivh5r2yhDNVBDqpNTbspbkrV5g/nczDUBRKyoYrxvm7p2aEv4d7M0H1wrla7x+23mmtdV177YPjikc945HJG7KQ9ev7jkq+OIUc1Ar2gYYtasbE2kOeij+U6kRRxs5zvtB7p2pUdzICa32ZhrvB4LNPX8XWbWo16Za5Pex9DPKWa9K2lrQ9oXfR5PZLbl3d1H4k6X000DkuMp4IS1ySYOzJtl9dTxnDQpkLyuaUvo/NMp4Om0Fm3JwTt8wdsPyUTkqIcd1UISErXJlvQETFrUD575lYMZuk97N93pusYP3Sstkn3djYjFmG5g3vxohskmamyCF+6s6IpE8MFnAOjbcfCIdkzouQh0Lkh7f94P5bgDNLgGhPqDpNrExPRMbkoLlbEl+0uOs6Oj8RMKo0KMS4oOFU1NiWUKZwFZ7dyMhtOSiJKjeommVZIh+gQCmz4mM62qGaFrDlsQQD9XOCxkZhRi2cvXXIYC7Hn2bXzuM+8dUgQp2umrm04oJpyCaxd9Wy51gAhfZ2jckLaI52WgawTGgAo8JLCQWiMNeHihydGma5URpPCsMaMR51LHUS6FNC881iULY/LSdAwRb2FJIHBFyQVDc1ojbZ3R5h2AxsSNv+csELFCpqY6WDmpjWkAS1umrSPPJWu943uGryFnH0ioxbQmlC+mCLbYXkHTUAtby59HxNuj7dO6lmXZ7L3WvqPWaawNFo2AiARImnBJ2+ekyVSvTbMCmp6QVPdtbNv6hLpOcgEYJY4lbUm6AcVXlWtLEtl7yJtIFi/33M+8U59uGpBw7Qj/R9fOIblQWGBEmpHIxWIN1QmwmnHFAlrePa4AFgABfk22z37NUtAY8iYQy4YoBe29LLB7M8DO2MFqC+mMFEj7PQtCLJvqdKwzBIC92YdQHYCpf4rksDGB4ZUVzoidG7//iEZ26hYYk0AEQJPxWgKRhiFVmBlJPHfFGhPzSOPMO5QsxxRdJTHPGcDRGCncaUTIktbeXLHOnUvW2PMKQ0mvm5zWa+Qs7xKj7PJ9xIx7EDPOmO38nJjPkcSQTGMcqs7DZ+Y6CfTr+y6+BsBYcuU+Vql2qde77oavMjqCmaUxQn0i+2HkhC6ByIz2YwaEaNd9dAjc2RgJ1KV+zOc8sOSQrYsDgku5E4Kr45LPa79s1zxLA7LH14bImluSej4nzdo2ACFbAQM7hoOmYPAuLWlrhNhbKgjmWhOg+jGl/iDzzAqQa7/UfYWivckQ1SHGstZwDYv82/RJPsfX3RlgMkM2ox/MMvX8yuadrYYbgZIZK43Zd7C0StUU1pv92PQ923+qJpI9j71u1dx2m3mnO5l004Dkc08r1qBoSJh2ANiqweXEL9oNUyLSl9iSFsRaaE4ZnKTzp3L9c09r8S3RfEiovdr6xJs0GwFkFOvcAnO9SFq9cpYpEv2t5mg6wy3f32IG6FqPQtaAtNFQ0l8O4rgktYZZRTFVAJAjs7T18b57hE2axD69jw0+eF9510q5pRakhlftOSN3uyr3VwUdKQiLKzHpQ6wRoYLL9sMEfjwQVjb3nGuYvxngkcpsz89oO+t530j9lty/DZPpq2bE+woynKtmVjySFkn/+ZPkqBttxiGiMNgTn6GAk3VV3t3V3CQ9Bhto5wu9e9WG1D7R/Nc4COF9AbRmaxp/LYEIkZSBOLc9B6CJJph+i/rZ/GmCA3hXQdyafJ6A7VrI6yh901lvrHl1DiCRzx5Ru94Z50V9ifFnz2N9HEIyhQsuh/HOQq4HVK1J8hUgTYOeeTw9R5yXmpBiHtu+kxadioefl0kBpUZ6RkPA1xLSEGwByDbk+OhYF86xxFus3EgAY2lHLKDUc2gfgaj07FBAyZCKsC6buC1AWfkEcLnTnS6hmwYk7x1XRIQCSEqiolO7uETvSsK9xvmK2WNz8yr6zf8SaYuQtmjwuh4PKZqWJiF5OgW885ijcsWUv8SfdA2BRVbIQUt1rqnNtfeV79YDHPK3xnjLcJYaACHzAMl013Op/tm+Ke/pBSgjBickp2F4V7RH5KSdntM6btdzuR3Mlpo/RyPN30UDXtWZ2HY+5n1la0Ss59p9VsCsi1U5xBiXYlvuSeM0ltoXe2O/FQZcS5NJ59I/AUby+CoRtkpfthoBR4w3UHxHUvv3MY7AeVLCxbXaAEne1XFLzDYHJVp9nLmm+2is8HGjRcaSGem5OZYEIRbwmCUORngf9MCIfNf0nsADRFAA501Guhetis+1UYb5PWBfPk+S5fzaAx6mpppHYQsCvPqYNSgsAaVrNaNl3iMJJQACLaRZds1YtwJgWO+mJg70LbApjvS+BSQ42Yy3BTDW0Ibn18pov/evUy0wkcLKkTao164ezQQDqNdqG3lb1GiSQlNSqa4xMofKm0x3k603k24akKwBiCQ1ibFk/w4hIrJJ4bJddgUliZIDeQTClvnWzLP0NlQGa6Q+JjMuAEWjQiS1NY+LR1WZjwdyL5659k7yd2+y9BjYHggBuOlH+j3rA0H2/akOuelXYEJ1atRIQdlmmn7HYjZQpP+IAFxx3k7n8jOzc2cix0y98vfPUYCstkhn+54ZmgQhpR8Z6LCZpVq+1Mm+x4htpHFM7zHaXELkUlK9rNTc0fGeDd+SIMp5o/7Lkn/yDTks1STOs75PwGMbOYtTjBGRaUl4q0Osv1NkqMRQR3aNuoiuN/0x0dfAGAACW+0hB7QtcG1BLNeM0FvOYLGAuAElFria4VcC66dUVxsCmPc1J9IokQ9OO7+ZhkADtGx9obo0LUjP2T9VfRk40zRe/FrTZmNtdtj2j5zbtdLYmqQyp3cCJgWw5DWTgxZaizkwDuwZTSCRTSJADrLqOkLlpCP94iowoYSXa4iNn0jar/0Uc87vkaQJUDTqaWnbNrX5u2bAiGyf9FNsLRtsyD5jDqaBpoaYpqTsey4DUlZ/vAFAcqc3k24akDydAqJLeTfWU8CaJSZhDQmU5EnifAUkS8sZlkNLI1KjCumTWd4nJSBEmjbh4JOG5PHQZpldvCvARaPe4qjRNZH5SAsCbM2PACbNFyCEmCW6TpLaZEYj7u1I/TWSEpsQADKzInOEdD42JgpActbmWpO08S5tfVFIWZatZJx/Kgk86PoIfKR+qSFYNfBhRT2SbbD7Kh/E2k6K6ESM5RpbCSjPaUNEjDWXbHKaHYtW6N+2jAQjSdL3ePAlBPfic6LDXKaE+s3JD1PYX5/7TU90GCLgsoN6CSgGnSEOsYKNNcQcmjubucXEpK9BMNhlHGYTFQby+LGkxCDqknbqKk27Jk3WFlcBGoERAmfAvHmWBCDlvSI/VwGGpEZLwu9hSRIJ4AGpf0vGaIoSQP3iAM+CbwB2P0qyfDY4ABnNuVIX+z3z+AJYBbjrCcWs6a2db+rgpkx5XSSNCYDGZJNMusqt9DK5i1XTq+nytZzUsJAj/XENm/DDFLp+xoxpdN6imb1W3xP7qrKTaLNmFi01FZoAR4KRGVPzLeDYb74VVtqb0nPL/HgfNA/n0ClEM/T3cz7zTn26aUBCxDeamJlHAiMxa0DgU7Sb4GIyPchSRtKSzJj+7AUCnLQNRYY85fHUXwsNirzP8uuwys/SbL1W2zhAkI7nFhihY36PFXZUMgJdYo6UZEtNeSAW54qWhCSqiWgT5jJwBysZnbSdbVmgbb9oQIR+azbq9jldC0IMJdXJySksShxo3zSb8BoMoS1H/jZaHeeaP2hmDRYDQMCkyVhfNHRb5pvMloCx5gjIAMIlRs05p76rphlJGdvzucjqirzctrKZYBMESuQ5op6f1R5fhz0kgYgEIfK1NPAhr0kgkq7FVtgAacLW1kWSfYtkf2x8S4QwhM85YDzvvLMfTu/pXHpX2fpL9p62LbVerjEJsSZDTOaYGtBlLZLtUYXzs+X5m7YoeKmNzRpqCieetCcUtlcXBPaTrJ5Dlqlo+1sP6U5k8QIW9YBIOp4DI73na88zzdM9aUZiaxIY8155pzudSZ8XgMR7V9Y3bm5B4MR7VO1JiFgOHi44LAefsIqv0WzkZkMTjhgcfg2wNST8HkulK+OWp39rkfZq9xA9da7x9syStqBbPiMjzYhmkmQBEe5gC2xDHnvf5mIZbf6cfDMW0nFElawmrXOKQsM1JMnuvJXQkRYFQNGaHDDwnWgkrbU/6LcVCetcqax3W9Ch9U9vaPD+oZDQ72WN4xojXp+SRpKSbB5D9d06hq1mRDqlWiSZhjS2io0AO7fd8DkQeTx4NUjEq8XjYaEQ28BbB98kQixaAdFnIUuNV+Rs4RGIxDVu+jUyprn+ThqR1C8xjzHSLEUk6W/VNsXS1z3iZlvyG/Nxx8fbIsZePR5oGpH6hRIUAlDVQxrwqNqk2IALXgUHGESNBH0TYXD78EbwUbSK12GOpO+GBkSK5ojlYNlSe5JPh2quxLQsop8sktett+7Vw0FJOlFLU6Suph/kN/DVzOyBImuxIjwiJJUHUIKH8PLFrw8MYIbUEgApSEEEaGSuPjYm0C1zX0FJj3Sg4Ydl9tAeoYyu3dB9Znqm2BrpAG3rjE+mcNyag+4v/EJstfvB17xnbzLdfUjeTLp5QLL4GoHGsq0m/wCK7x8zygfSMYESXqdmHiLBRk/S0AMjNMEltZLgNkKIpgLWnj9ayKy6eNt6NGOmRTQLRrikmoMRYiiqqVOtlzPeG+Fb2czS34jYKEAO3iHCpYSU2Q/Ew+G4pgLL4lCy2uWoUtyemnx7gNZWWpImqbY0IPLdU9l5LUifEdLbJYdKlWy3zAMxyMdQTbQoKhmZItF9vUzO/K9FUhNC9uCyDP2lY4pi95iBBuUNSeZaCYx4x/JsMEacGErtU5L2wsORorW8m+AbW/OrDDxirOUJjIRYyxZmPNR7St/FeTMjIpkEk465BrKUFS88w3DFrB3iv4E5EEJgF9C1G2ZwDqMLNLyx16xzL40EAWU+Dh5ZFLP53RwcImJz7lzaczufb9x3hY8f0ppw0sIJ0z1tqOpcH/M7Aeq4Lr54vOVirSUNNYUdpjDO5PdF78H/Anx/pMhafWDSS1arHWu0x1TMLmtH/Jr5PSJLA5Jo6+g+JD42Qj8wx53u1KObBiSPBw+fGZGnU8ARgKMNL0t1nVCTLwcP513ZVMm/hBw6aZI+HrYaCkvTAbSLgqZJ6CXAk79H9qfyOVob+qH/tgDFcrDjzndU1wiMnUtk056O8zlhUsMfaW3+BYgU5jCBgIhYEtSRr8TCgcmCEnHmIdtekUOsdIIn0hhGyzRGOubTtRmHdI0JSufngUhps8IwhthqRlotSCjnXp8CQqz22/V4rBWZBSM9BoBr7xbxj4DIg0/g463DkkAI04yQBo6ASgLMW81SQMw5MBhjFZFzYIiyBDhY366xhqut4Cz1+RqqFPcYAgLZwxegV4Eg1aV1nRXqWpoBlvIKKO4RPZO5ZyRiYIGbZ2m+HjSmkh9W9QFI74XynpvQ5J2hos2nB24rGVyzfsg5pJF8nByre9e6GOeCAdTn177T6P2UrkpwQqRtU7qpF7L2hZ3wFZhopmFrzJYLa8xCAKNteXBqe9cWnOj7qqX5PxjrUH/fTcfS+Zy3T3NCt9bJS8GHRnwvl+dbYKJrS8hkXPIE9ffFTbzTz1C6aUDy9qOHf0xZXB8PHu8+rVhDxBOqtiSI2UFghJzcJRghIEIOsZq2RFuoeoBEO9de65tnzVDPptRa1GrbKbHTVkLE/VWeA4QAiqRWSDYd2nwI6do2J8KGHEqytLSPuuwEDEQWuhccmORwt9yxk/JwAKhx+oXpgdZ+oAUe/J00p3SNcbIAiPZoVcLPPj0HIvSbzpHZEGeOX59WrDFdO4aI0xoLEOHmWcfOeJvZQPl84sfSV0oCEAAFhKR8IhWYeIfGZIvMsyjnCGVld66Gti0GYnmMECgpCQ5zU6rGI5dH/Z2Y8epHQow6MeknwaBzIMKDLbRM+rALBfCo5/j441rIet1iIJMZkctzhgmvNz4fMwCEzP/ofQH+zrVfm3Cx7Dl8rFOABwKb6X0oCEbcAHpaP2Q9/Dl0mr4pjTvZ9dbqF1H76twlkmuT+LOlSRsHBTO+j9ek0fMWkeuJm3u1pnbK+pnHC/n/pBwozMcz+/H5fFzX74gQ07mexYK2R/NjLWEvv269u5bLawm2UIbvz5YGmd9zrjO0ZoUhNdDyWc17Zb5gDa3QkguOtD47daJ9vSkUwsubbN1CBvv3m24akNCGQxqSx0ONFHFcA1yIGzMuAiLOu7KxSfBBwIQmnCWllcc96a7UfmgL515mXy6wdaGpEYpG958DMBqNCbM7TgtXLRciUi76shElbmZZKjAASHrWSuE82sywMeIsf7mUJq0FL4kBqeEkW8EdFYzFJIFvkEUqbXTvbFZlaY5FREcaA6U5pXPquR1oWhFuMsQZZNKGEGNMYGRljGVjntXZTHvU2xBVhoBvfhnEUTjfypzWsL5FG5LBCJUH0PqNdIBc4BrWaJQp/bkFIun90nUORqgfLTBCxLtmLj/F9hz3U9pLxGjz9nDHc3q3CFRfonX7PgRa67XWJJCDkvSc7bvWd0h1PWRH52Q2GeBj8gukwBXwaWyNAnVwkOVcfx5pyxCBEAlKevXQfanOrX/NtuwWjGi/Jb00YOnRFpRUasf8uMxZz+/suQex3jTrj3Nz86dj2NDTdGjAZCZimEWaMJHXJdddqengx/VvG4mL8xuHzjp+pzvtoZsGJF/wzgMObz2UCU2ZWylRIj8mkqADaM2zaHI9Mpv0x0MbO0nTmmh2mUSW6ldSTxWs/ZZqYa3MHqJFh9dDKlo6Vk27mHnEGoAmHfCamaSyWNdNyUeAbGJ8jEi5A1KpEByWBdkxOLFEXGoL2GYRroCYyJtXgEmVhGdQkn8n3xKkQAZrwLLUVwnBDTUjRBx0bM65uuml3qA217IjEDLCkFKCTee45JWbZwFVck3aEPobQsTrtYKU45rue1pDs4n2xl0P0MvfKogvGiRm3ubQaEOKj0gO5UuakRa0tA7sMvu42ocMbAD7fCWIsargY6sB2YSfFlqRPQynpRmRvkoj5ormVf2kcXOdzkjNGr0vmfw1AQ9CNfHj/VI1bDYzSu3mY+BhCVUbFj0z3/KAT/UuLvmLwVgreN/JIWy1pAwNAjFIwDLV35oY9YQIvXDGRFzbBIzHAxH1xaw9f89KeC+o6QEPTnzcp9+tOR/XoPF5QUIQHgK49zxNKwDYIESuN7IPikxKAdE0hp8UiZW2n5Oj+GuWkHmPpllbL3uCxoPSFz2/l3bPbzUm8nnE5xxPY2Ho+00UROSln3mnPt00IOGLyBpqXGn6rU1oTRsiFyZe7vGwFMBCpGlN6BkjzccejYSmZh4tHvxdtfKXmF31/EeqI2MFJcXxO6IJh0vhllPTqjYESKZRPNEWbQSaZLLUB1045VybpIlrSyozUTUlQA2LyZm2fijMLUlfEOD5gciWmYrqNc4s88hOkokMEUUrwjf/ozA30CR6RLOmfhZA4YwBUKOtEdBotCG+RmvjmhHSRD0sfmP6R2BEjh2Z2EuGq5VARDKTkoksfSaACD+XjveBEU6zmpEmoagyaWieaWBfAlpqo8yfIsEtAKZhC4XBnAmCUNqd19xjSN81+AjAI0WnqmtJDUndWzG2NAIjlhaPyvI+o6KkNdnzXAlGmmsd7VF6ntuU7REfK1yhvifb/B6aHc98TvD+4cBEvW+HUMQ0BVWEHwCbX7ye8jzW7zngSY/Zl2unJtix+JcZ0gSHs/yKpTXRNCbSxPuk1HenO+2hmwYkH3j1gFfvPJqahKfTupnYUiKSjn0zkThQoTCi5RqTdPfNCirJ6FMj4m4vWijMOcn0NtzhOWCE7iH1r6ohqY+sICLkGPGERNak3Qg+L/gR+VrOgL5kUwtUE4wH+CzhC9lu2OVNP2YmwDXJY3MTzEyxPbmNdyjmNg5sAQ/1Wy9KUi8iCVQaMy0BQgAbiPRI03yY1xnzCGwl9tK2n0L7ckDy3mktEm26h2sfNQ0df1/52/y70R61kskmWSbThrRakBou+oGZbGkgBNCBSOlH1oc98EHSf8thWwMfgO0f0oa6NZhOvk5JZqlo5lqndRlOmpftAezYjLfcRgG4uA9M9T1KWpMTG2fvnapWhO4n6TD9PYUtE8bHCtdePx48Vr7eoJ1Hno2pNcYSwKAIIlhZ6z0Bnfmv306f89TmEMeQSJufMuSxNYYkzQIJbiqrtX9dBVjBNspWrctt2szbTqQFLZCaEf5uR34t8OAZVWOyZyyV9lpAxAAj3qVyuqakvlN6lgOyVl/6tIz8QEYmXPJ4Rhsys99bgKUn1KznEyiRgOXu1H6nc+mmAcnbjwveetyaUxGtIRZVqCTpjCWJQAjZnwNbta3Kk7s5LYg2yenUg+cLPG1uKV77GoAlL4SrZ5IWpvKtz+/7ksyCk12gJD220HHlgMwhrDWCFdZko5+YrGReUYBFdEBIQKRYgIXc5kiWXrEwBR5ZOrltAt06RaQ9cUjMAr1nL1yqJN6vl4CQnnR1I9FVAEgqxzf2rdkQl2aHqJhs5Y2fTLQ0EwP57nK88D5pBAHMNltKJCUIIe0HMdfnmmXxrONt/+X+QgtEOHCQzLhlgpXum9N8qGHKxalGKBsirOhaQAtYZGLS3jlO2qiTDDMxg5pmreSqEWNIAtrXgolcQzDXlDRufBlXT6eAtx8XhJjW5/dOGajmtfBh8VjWWDRkR4RsoukKMKE5Lt8T0H025FLKzaG8hwpURmuFJnCS5llaoAONKPy9JPm9yxqpaLmtUMqWBuUcIJLKRPP9uCaEg1oCIzyq39Na16GekGSGCIxITSz1gZx2SVBFP5AnrhAS5PXw4F3JHcb3Tk2YqmlOJEnAoP3VyEqqPAtMNK2LJRB+k6kvzH2+Z96pTzcNSBZvhOfNjEmIEW8/6hIGfo9uWpN+E0NEZZtY7UqbtA1fI5m1N2U9re3k5k7tjbGoA9ZoqVO5JIUMlWw6R8IyusazueYwVelZrmZ7TZSQXdKKxBzNKkfb8XRc7ydQ4tD6lPAqOTCRtMMKZgNKZsgCI5KoOou37IUBle8gQUi6r83zwJllTUMiEx2GWG2ii9kW2/Dl8zSSpo1cEpnefWsaYQGRdE13Uh8BEQuESOJjhvqfNCJrBm89IGJFx+LO2/X7dJvStiva40QjaaolE3Cmc9v7rDndSGkzGOM5UyiZIzGS5VzExufo6RQaXz8gabLpObY0NuLxkLTH1MaSDG9NQpzjmoQai3P5uJqA+piEHQ8LENaoAoUeCGnOKZqFdaW+r3kYOEixQufyZ8jrPW3bHuLBQZrnrErI3fL9O88J48Go9ZemFeHnORjh44nACAe3AMqY4mtTj4kfMesWSTAik5KuMY+BgXmeZOb1/XrL5Gv7j2YGrv3l5UbBHXrt1ii1/6wq73QnlW4akDwePN46LI0Gw7sabQdoHdD44sYnmRYFqV4DcxB1RTMimQxZZoYshiX49nwqSxIi8nmIAHwCMsYD60LWZrwGdOaDO7RbNqTEkB6URVLWuSLfG6ndQMi+JQQmyIQLCEVTwqPmtMeVoaAIaTEmEy7aGFx2YAV0Jk7uUyMbb96Xe2i29KY9GgBhxxJ0AHOSVTovGWcyjUj5RdqN/z0mweYbv2yHJM1ki4MRDjqA1idEakMoVC9Qw/hS2N5kSrEFIHLuWziENCGkHYlsPqacIdVpu5qHkDYpQkaMmo0WNSJtPRqBEu60Pj7X1s1JY5KsML5cM8IjsZGG5PUpFKbxcxSS/RSKJmTGzIbGEl9vEjhZNpJanpm9SKi5hNs7rHErhNr0gZg/wFaTNTLb9BkoNT475VtygGc2o2vStJ/afao8QwjGSnhzA7wubn5MW5na5bpkmWcBW+EJF45wcMvHhmYVMMpBIkmOH0BoHxkoKaGJiTmASz5OwQgfnIWEGqiYpREg4fOmd++lxAPhpN/njM2Xp7uG5M2kmwYkJGHl0lXKR8AzggNomYYSFUqpU9vAN4tTWnQApKzepT2ynF1vzUKLUhdPhBDi1j6XFrxZiUyPzlmUOOCw8pOM1MZrAHyOVb9JMBdaTUkCGVVrwmPS8/pjJBOuNkzpaP5bjt/PRXt4U1n0HCCSrvVNI6rzKP2tmhF6rqYd0UiTxjWbJDPR8gw0cDBCQJP7hJCEn4MR0oYUhtOT2Z7bABEv4GFjauME40QgBbVPuUaAO2pzMHISkbSo7yT1gEX1PSDpK8vBYJB37Xqjm9yYt6tkadrq761mBODMJguaEOS/LbjlZjZdoMuupXpa4LSGJMxJ7d06uvMofj3qmd3x69p9ZU0L9Q+t/WQSZQELvkdo81beM+skDjBgy+5PAI2u6++WzLWiWV4jCbK0xJfyHaRWZG3m2FZTa2lEZLLBui9Vf4fnoGINsOceBrIPTNhH14h04WEfkFCdWvlzSRc0+c2508VPutPPVLp5QNKYb2Qg8tbiG2dXIm1zkTRjbmVtTrti/Xu32fiKVZKrYRNDqOCnmIqx68BW/WurZrkkoy7cs5nhJc2AEvlobvIWYpYoeRS768JMhIgjskSOuLiw1ZRwm3DqEuf4N7HbP9JOWKNkJOlYvBtCRZ1ZtaWwPdMR3V9Bvyal+VvTrWqf3WMQNVtivhFuNklXHUa5czqAEhmLzkmzrLcoPPfiCwiR2hCa5h7OTIIX8hjxqGY1IYPZGGv/hFg1I0naX4EImbURCJH27bOaEVUrK/qTiIOSGU0sN4HTsppb7ZAUxPhbGaAtAI31RfVDymZap1Ck2KQZIW2blG5LxlKuJfwa//v6FPDq4PG0JonzEgCcsnBqre/S05DIXFXXAALN9xKaEk6cwfexZWp5uGgibTrO7GdqABZer+iP0gd8LWW+MUdRv9UWy3eEz5VTs/Zs1yUCItx/TQO5wHZdbZn5bTxGDnIXF4HgQJHbECIzrXZ65xvvDaQ1KcRWW1H3y3Z/fDoFPKIF5T1/mJ7wRytzLvWBTzWXl+ZhbzrdNSRvJt00IEn253WzrUxNBSqvRMjensPdLKDQFtmeo6laR4gMWKRkgT7SxshAiIhrPyIOSqwwfGmRPg+ESGnN3kzuyVemBR+08HONkM+M4xpRcpTAc60Jr7SCAAc0kXRm1gBtPz8XjMyWmQEf/Lxm5qGBED22f61LSrR5pKjU9ioVnwFeM9eTaVX1F/GOa0xq7hBuqtVkWc/RshafTLO4VjRpRloQorUq5msEPjQiwLIS0xQqoOO+NeWvwkBZEfJS21jLvNaIFpjAt/dk7K6S7idif5+R4EUGQ6BzxFA2oVkFk8nHTwUdbW4oAilAyxxoJlucQWtNtxKD2UrKs39ejuq3OJeApAMeUBnp4Ko2g5zBR3OMz5sR1eAcMfuVDMg7sQbE6Wd2Tbi6yzwfA+zduW+J18uUZ3fapZlsaeZZGqjnEbSqqV9sxg1F0QT2MJhtZCgJgLX34/6cmpBjRHwdlHukZPI1mjG7usT8a0R6e/UEzwfvEG8ImNzpzaKbBiRElI+gMjW+gpJOZvRZxlGaxhCVKkXdM4u0zPhdQJF3ZYOsjCY2C3YQGz+9z4x0ZUS9Ba3nJDezEErJMDGnTT3MvGBj6pY3iPItmLO7bEeMc74ce8HHSPo9Ylg0wCHr1TZzXl4Dxe31Wl7G9Oe5ROhcYPVa7R85RcrIddxvhDuvE0DxLo13+tvkEFlaIYPP2hDSipBGhPuL1OnImfj2XehnjARAKoiIMfuN5DDHIUbmL9JqSFofktgAOYsa86tQAZXldCzJ1IgoF2S0rXp++JimvdqxLCOnibYmSU1IKdsBI0R7pK8hbse0J8F4qABBAoBy/455xMsBLchrNAtMq6WRdxUE8Oce1yGMGTLEIS8QWvelcL4kBJLvkPuBgZO90VylZkf2oQZEaC4dg9SCRKEZCZuxMyNASWWSuZ80i1rzXnJcgeBTqGiEiBXV13GNWwuF9D7t88kSoMw9z8Yxi/xJSZdTFvRt5MKRdqR3bpbmn9HyUvwvX/vDebLOO93ptgGJR84XksEISVX5Xxmyl0w9YsRGj66tZ2uMlVHJZhxkk646hcYcFUqh5DexZSZTuxyAWBY8btO/ikWbq/K1zX6UwV0uxJzOVftaUh9pstO+L7t/xnSEHEU4hMgRuFpmz27fXoCmgQ9Lm6Fdt+ug+22wMS6ntUe/xjfMhslSNCPp93Zc9MwB1W/uq0N61WJus6u/dfA5iWEyt/QeeOuwFK0IByIcmGgghA+jGFsTwdJHJadImm+UY+QUYgEj3DGb/kogIqW4st80aSgP9LA4x0XprIXzgTG0eWOdmwEjvfFexiLTlPD7pP+MBTSkI7JWRqNeqPbyLB/haU3IzCTgsUSUEOPBIWtl5bu372sFKdDNLVumn7fPE/hke1C55qtfRnCt5uAoFpdZ8zqtvKZhX5zbAJYaIYyDK/OxKo3WIwuESI0IYJtn7QUjVE5qRl7nSG2vDr5EbXtcfH7p0PYhmXl2hJtaM6iOx4PHGmLKoyPanfbsxZwTM/uwNSfOOafVp+3xmq/gEW8+rTGglxLhuZ55pz7dNCABtos0LaR886V5RGAEaMOB0jBZXGvOQZvMyFxKMj1y46dxzyUrWp0rW7RXtumT1I9v+NzGfw8Y6dEeNbDmMGeVt/JNkHbEKxs1p2Es/4ASYlM6FxazjIk+sDQfG4BhhOm06hlpO+Q9MyCkOdeR3HIGUWeulOd3NqgegJXH3KxKj+PfMs+LQ8kbAaQ56rI5lkc7f9t6tmBEoxArwxVjRKBzkTQm3IGdO2i3WkpNQ2kx3xawI1Cyy+9sQLN1zfjJEQUxPrZzYczMzDJT0nxGAx5tvX2EJUFCiGkc1XVie4+cU3ROBirQ7m3OsXfZrituUw5A49fEBQP0zq350PnrWVkT83XZF96J77pT4m2tR1IoYoERC2jMahBmc23wvm2OiwAsAVueb8S7nuZ8e27xwKpo7y3izu1a2T2he6nsKBhJj3rzTwMj2j13utMs3TQgkRtrikFffUgSs5uveWzyEZRr+XyIWVoKkq62zOTGdItJNVWntqKurfcFseFVxmebnK4njeX/evHXR9KPGfVvTxoiy3ONCIDGd4BLCItfATPdAbDJOcH7UjNNKcyF8k3o+l6aBRxWWMu95VLZ7fOsvBVme5Tyans0EGJoRoAxc7kZDwyAkoaSf3P63vRNuQM73esd6j+kOZuASasd4Uy4/NQhpjhqBXAgMT+kGSGwQXPrvVNNCsk1IyFus9YD2zwIgL75cyaCMz4Ns8qYH/kOmjZRfgYrutZMYj41R8UZ82bzfIMxmXHClaDk4B0eD2lNT399c03VmsQIIOUkKeHFs6O7DHdLZGlEevNJI+rTY+CZ5FP5o/Lu2twtv5kWQ0ZW3MtrqnUo36kZEzseogk2OMDSNLK9/UwmzewBlj2MN4WrbbQQub7Hgy+azJAjPNI+BswDfxJULh5Y4Iqm7CGbafJEZqP5tkeIsCrfwNLgSuJ9uMd3hZ/XxtObRkGMnZd65p36dNOAhIiiMElbXZoWjkmkOBiRczxJz/Jfdj6CLa7caZVJ5vcQ14QQGGkd/MbSWLl4n6MRGdGMNESW5UAknW81IlRWghGeL2DTDuUcZ6S4uda1tLA9rceMudVeLYcsqzFAm3YEeW1bb1tee5fzx40FRGVyUfIbKdcZGNmATlaWtCMUypeH7x1teQRGym9k00uhGSlzjeZZ2GpGCnDJEcg4eOPz0KKexHYxwi+Fll9RtYezc2VEM9L2S4mvG1xSzc1otLI9wMFt12eofU/7Pk0jQud5PV0Xj17QglSJaFv7DE41Upb9uFmGVq1DnNS1On2yANUIiNC91n7GtSISkHC6RAvA32GjBZYJfoHm247ASZNImWuahB1cr569yQcJ+ISY1pc12omTOa3KOrVHYEnHt2Cydac3k24akMi5QsBkxhQiRGaylecnSU4BtAxJIGZlu/Cvq84EaGZbKwMftOnx5GokldU0JDw5HS2+bZSaOU5cS36Yzm+PpVmWphmRmpBybGhD6JzMEyMZ1KZ8YWZZ/4o+P1eia5tp5esToIOXo2/ZlN8JNIA+2NDul3VY0Z4241fZnEZaEnlOA6H0u4BOPh6EhN97t/medD0LuRFiToCZmTpq4cZpvZyvWpEY6XfEMSQt6Bpan5FjiHh9Woum0tKM0FwE+gn9ZD/1zDA08mweXIukaaNFUhMstbpNO331Q2iECl6uGTUZ3OPBF2m0ZmLKgQbVQfcByRGYn2/KFRNNlHxHTchWX99J00aVPmDS/aZPZsCIvO7be8k5mtNRseElYc7s2tYDEb3ACeS3WMrOhnQs9/P1TReASA2sBua32g89nK9m3ifJ0uLXcaNz+RIsA9j4fYWVmb2hzVau9bF6bml/6yG59XfqtRtAk1g5xIgHOKyhambIeV8KMWfXpi4gcdu1/E53mqWbBiR8AwLywhjyRur16EkxxqIlKZsO2izN6VrryM7ByMZJlm00AKoJkSIl4xmy6XjE/GjSI7lg8/ehvpF9lcqEzWKsaT00B9LmWEjCNRACgDk1tyCEm2Bx05I9IGREPWdytXwHfPB7R+DDYmKALaMjnzdqf6qj8w4ScAw0IDNaERO4DoBIOhbA1dfvvw1Tqz+fz9mQk2b2+EENiKwkXIjp/NNa51gVBLSZx0NsI2pJE62eqaTWX0TSXv0crcYMpfFqm2RZQTmACkZ6Pm+cKCgHHdN9rWbVYxFrIjF+lk9aC0gSF/d48ButidbPqkaEz3G0TNhek0eN5F6gluemvvnaka3ltU1eNePjz7FIa6Nknq369jjPa4IPDXxof+X80bQi2l+gBQz0u3fcjqk2f4ZGpjZB6z8RMQ9o+1DOsx7QsO4bYoWlHbMhxhRdLicIJXPBEMGCLLBxKE2dBwKWzTmnv8ObSmuI8FfQqu195p36dNOARG4gx6z2SAx/QiXJHj0i5oR6AAr6KAwLcyaMsZpoFdON0KrxgXaDllluSRq1MrsvMs3ipiEETHrMjx5txI7fX4GHLkHqhe4D+loRTSMy0oYcWJQzDkQkCLkGAOF9zclyKq/XDQmfAkBSmXqfFUUG0MHHjA36SPo6Y07RAyLXWBj3gBGi0aZKggQAbBNN4MPHNIcBpGhJUa+MgAjAGJky76rGM4GPnPSw/K1ghPy5yOlWmkvOgBGLNFB3LeoBDRKUeCZB1UhLwjlLi3M4IhYzvbIedYBDc39Hom2tUTNU36Udj9q3G31O6ajMaWbNWkNbBw9Qwk1ranvIsXpi3u/R3DDiQGXPuqQFO9CABAchVpn0VwcjqY39vU2e246l/t63fd+t1qB8IzbPyn4fxuZVvDoNiGggZDrHWRBj3GfG27FzAFgG5tL+olmMdt9y2oKsNx+E3OnNp5sGJACKeRaZOoXgss1nioe9+CUxKD7CRdeAD2JOOPCgOi3wQdSY4aAuGvV8ZAxsZYy05GrvnVKug/dYFuPXpxaQPJ0SutFASM/+WltUNY3HrEakFyWL8sBwkxzKxk3ZtwGogCSd3/azRaPM5akM/4btt+DleFkNdPDrJGWnujTTjhnn8WtSjyE+RyvSM4PoARGgNcfiY4NryHoUspbTedKCpESaK9I3qIEptvdG+i6omdfT+yZGiOYYgX4CIq0DO7vG8iHwxGwaozXbhz3au6mvDSMBWNqQwNbIdGKCaebjOLZmpmvUx7F3aR4sPj3DWks482mZB2pZoGmNItMtuT5t3pfWCBCT1TebGpHpD6DQLDDQfAPLuv3goIeFnn/erC9IWMdl9jpLS1NGvWzYXO+RptVoQYg3zuvgY+QzITUljdaE+cgFObd8HWdEfF2kazIUNAcfWphouX42+x/N8UAAlrLO873KlWiffslghBIVh/OBBZ8PD2esfS9NpwC4F9ZYnAZrwp1uHJDI8RTyprzGJEH1jGF0zKSAtCCUeIonOAN0hnbYlg1Du62LpLT8b4meFbABIyNtiMVs2gylvlhr5/aAEemcrmlCbM0IyvEM7Q8gYAORmdweM0CkPQ/lefb3ei461zyLyvU2fUmadO8SLUBySHdAwiY11wjQZMBu7sl/SeuZ6km/CYxIhpoEBKTp4oEkivOt0IpQ//C/16Aeg6sHdehrnCipaPIPqEz6iDY+RnHfOiiJCzxObFz1pdweGpCxzHI00kyQNL8Ofn2GZs2n6HvuMa8k6q3he+gawQosTWtvL9J8Qrb3tx2zF9iPNP/8WDNFnnmGRqsYw1Jbci2SfnZW/TyVQCnn8/zPmpFG6xES71FAiQGyJ3JyqmvWXVNyp0vopgFJcQwPwBIBLCgSz2OIePB1sj0wZpwm6JGYSiH9k0m/iKzQs0TSt4AAD2daCwgK1Tzr3eOKNcRGK0IaEe5DAmyjicjNS5P4WGYQAPBKkzYWYFGl4NR/FMaVkttRhu10zW8ACF0j8LGNpJRIrmO8++lwDWwBZWpn/u3oOzQgIeIqZlcSeHBzhZ6kUB5Lmt10z6G9wERutj3SNyRdAmgRmS8egRyKtWYHonFCmg/nBkx4rMIGoIIN8g1JWdZbrcipaEMiXq+tVoTm4jbyD/3d7tq93BgWyJN245d+9mqetQUlgG7nrpk5akIWHhEQsMFK0ka4KkVmGtsqOa8BNqhMvb8CmRE44e+srdu6f5YmuQY7J75VZxz3fHE0fxG1jh1gS7+/X/+MIMdaF0ZaEPp9DQ2IJE37MTI3tsbIXuDRu2aBEg347wUszwFugKwNITUmANp9tXkjne5nnyGP73SnPXTTgATITGpmkmlzPLokJuXxsI9K3HkNhHCQwoky/JJkduNMj8q0yky7pIFJZWJzLoGPxPS8+7RCxlvnPiS9PgD6zKQ0gdB8RfZqRCjTNpllPfgtEHlYXANCHCr4oCY4JQRnZE54DonRLO/UsXG3wMgIiEgQooOTamJB5SxJoQYceX9z0sLCNqYB4vyIrq0d6ZFmqsXPd5+VGeVkZoBs85OzI6+A9zGbHRAw2eYaIeKglb5Rzd1DJpJkstXm+uFmXAQuSRBA86791rMR7baCgpHd+qXEzbMkKCntcrUsJztqXCzXLWENJ88YszUzQZqpjBV5zDIf1cpyZmoUwKF5VyjjXDgn6xGTutWmauKWKUvMILWtLU9S7fL7mbUj55p4zoAQLXnh6Jk9bb2m1ZemxiMt2ohB3mgFlTXwJbXb1yDv3ZaHca6AkrVoUPTxuvt5Tj9+U2kNd6f2N5FuHpCEmKWsMRYpPmkh1uhwZBOvVwcHEtKHxLskVdBsOnkd6ZnV9IPXJZlZrvmompGtVkTLM0KkSYjkor34ueg0XCsindUpaR3Q+onQsdSQPHhftCEEVrxrAUg5Nr5NjC4vlDVZJZj0W6MKMCroq9/EBiFSCyJNr6QGhB9rYUuHzJDY8JbMnFkJ9Hi5Ec2AkdnkfZokUJLFuGnnU1huIENMICQc8pClcesagcWnb5Ad14NLwOQYwObfltkD6Pu2GlDSipQEo/nc6xw0Ijmwp2/Oo2i1ZpN9ZgvofxvLTr0XIpPA/wxZ5lkclBBJcMLr4CRBPZWRzD/dZjHACwMlc1o3haHsgDjNHHJmLvL2bMBJqCHM5Zov26BpmZbmuTow0ez1eU6aRms26VvHtSTcFGdkdsWpByRmgUiv3tnvr0XC2qPVT+fS35GZHYXFLfe5bb/12qqR5uAe4pZ3qJoVksdEBk55Yb0tvKs1M+Ne2+mbXANAXKrhu9OdgM8DQLLGiJrFcKwe7i8iW8m4Zh+/eT4UaaNIAMVzhshMtKtwnLUks9o79CRF6V/NaPzq4Nn5rTYEGPuIcPBBGhECLFwbcvA8ozb5laRnOLHJc6JXjYgFrKwh+Z6sGZEs3m1i5XNH29YJF8U0B6haqx5glBuu/GazTKokvtn2mDNiYPdqK0YMxih5n/ZszkisebPkuR6K1DdGIH9nuredE0wc5/M3IpC6AkBiqkNcEzPlXDm3MCbLIhk4AuCAhIJW9IEIN8+SGaKt/uV905P00rEmtZUCgF3ffGCetSnf8eSeCfhQAAhnbEeMm9AAlPOd95xlcCwNpaWpJJJamDXE4iyv5cShNmhmiFXSzAEFA3SBTE2ZRkc4qT+4bQdp39CaA9wPwDLdsvpIXgfaftPumwnNO1oTNYEL36/onNzb6Ds9FNDS7luy7h4gKcKoJndHvo8FfxiBFIs/2AgHIuomt/nkqY6wJiFqoDDahhChPEP0swZSiFqfkcuBg9a3rrNOvyl015C8mXTzgASomzKXdFvEGVk5H4lBTcf1r0xO1Wzcg1CImkRdMrRSEttjHLXFmp9vAUld3PmiTtKk1AdVIwKgC0YWzjQ5FPMsKteYZmVmk5eXQERbt6oHQQpKUCx5jC6R2izySZBaESlJJXMdoGpSiDEFtknveiBkduOtTP42F0zvPnk8S7NghFM3q/hEGzQp4KZMZp6pfJH+5Y9dNCTOFS2K98w5cyNlrPXy8UC/CZDSfAyxalX5HLXmJr3/JWR9SylFlX13jikFgRJA9xch0hjeUeS5rdP7fL80WokOuJslnVFu19DZ7zajwdGiIrXX+S/2g2XgLbwos+uf8bUa0WxYYIu0vccGe7qw7JK5wvcrfm6zb3m+z+hARPants/X75jWAdJqUXQqDhpJmxXi2Bdkj1M3F9xwHqMAF97gAckiut/U/u9yd1K/00vSTQOSgFiYyaOp0rwGKm035tmFVwMiluS1V6fUeljhLyUIkVqRx8WbAITq0XKIcN8Qclrn5lmkIeFakcWlY6o/CUlTfbTGVcli29MBKbwrgRItIiWZ53AfAU0izoMHACjhXEkyDmAT0rX+C+L39hv1zJ8skxPNDEqCS/5XHm/6IlSJ16iN5zLWTd2IeCROOjlWidKuSPko5Cr5c5VxF4FTSaSXAMcREUtoGb4SSGLdAhFOWjQ7rinhYbbJcZ37ici5yTWa5Rmsn3m/aH0lBQH8PO8HOj7HB0cSAQhNomoBB8tUC2jNUGX5AvbZmjgztGYYf0k9ST4Hk/w3Lyd9frg50Mq+U1lPXQSCg1/qOJTCmVRuC05kn+nJ6biZ6DZ8OpXxYYoX3ZKPGyZa27u0NaKv8dgCdKs+jfi372nyNW0+18RzH0a5V/Eoj6U7SPjB30UB2dJkm8b0g68aYO5POEuar1ZxNF9pDaxljnhZs6dz1horgMSt0F1D8mbSTQMSLcRqc92wndUm+8wmKZmUHnjg9V5D8qoxQdqiTptqA0SYKYLmqE7XADSbrhbCt6jJXfUV0bQiZKXBwYgEIvRGEph4uBRxyega2WcaU0WbCw/lKh2X6Z/0F5AMqcb0aG3RgIb2/faev2Rz6oERa05Q+0lbojHiPKoMj70vzRPoFi4N3FIFMNJ+OsRYAkkEYgBFf0hJfglMwZgLOtbC+fLvTm3tzU/5fWfWjuZ7CjCyl2TUHvlb+ozIc7IuTryYBUTktWY+CI3lucTnkkU6+NDX2ZaShlIb1722AHVdBFrQXIkYzCzlZtoRrjVZGRjXpPpUxrv2m2hN1V5xxnSL6JxkhT3gIoU0lglq3b+2AVc0MMIjPVJ/caFZulb3JyLPNp6iMYx1XviI8q08kIUfrmhPVvHteICC9F7Y0DBPDdOGpDrb62U9vDLt3Uv2RJe7ZYBypzeDbhqQnELAe6dWfs4XPWsz23Oen+ORriwwoknMpZRJnpcbo2bSINXWAPB4WFRp7EiyxKVxfHPlkj8JROh+HvZXaka4NkRKqgisaMubQ9rGvauSbm1tixmjhFidl7mzMknByTyHfq+x+uRwf4FyLvvx9Hx4pARQ+z78O60hAkyDJcnSjPDva93LaWa8y2ujcS6Jg5KG5KYbyEGY1ZtvW5xj0sDKkAFCexLajZqPy2LSPWCutSh3o0SHIyZr86wQN9+cjumv9l2lzxa9qyUcoD4AtpL5BvAJBqf0bdCjYslX08CHVlaatFZJ/3UZEgs887/SFy+d1zXPRFVKT9+PARO6z28DMnANFv19ULhOzxnXEJPDdIw1aR2S1mRxbVzVg1D/URnLR7GhIEBQqSQJC/i9s3ujBkZ62hFgbBYq1zZ+bqPNP/jWP9G7ohHhIESL8Aig7FltN3HgnOeG30baJOl/0lC1wIRMulIdsfbzBGnf0PquSbAzVa1JKpgI27FgBSaZMSO8g5A7XZNuGpA8nQKiYloBzDFyIxrFWJfPksCiJ5G2qIINvzmvqbYlCCFTGguEaEyOJvGzkhpKKVV6PrUx8aiONDFwDRDpOWQSKOEUsw9JiMnJPeY+TAAk5ZMofxuTreq4TFoR6bhMuV44c6prS2xzLflt2u+0NZ+ytCbym5fzgw2BRy/qllPa3rtnpk6qY/FuCpjwTU9utPVRLBSlACmLc42Jl0ZUjxbpzkp0yLUjPQkw7xvtOP3Wk7Np39MKl1zuM8CI9s780iXgo1cuXd+OH97nvMwek5YeyTFL343/5t+vxzjzcf14SEDk8QCsE5nrCRgvYj1M14xvQ9eZND75JrhNX5PwpvQjY4oBgDvMN/0TU9j78m1CBCXD885lLQs1xN6jdCGFbbZ4Dhjhx9Z+9phByYP3WDzwaqm/DzloCo/uSN+Ch5kHaF7pgITMfbHU/YODk+OagOTRRRWY1Chq9n7W9mO3a1RaLfMAgzSNjAQaltbaKjtso7E+RGUNetMoKMKKl3jmnfo0GVBwnr79278dzrnm31d+5VeW6++99x5+7+/9vfjiL/5ifOADH8BHP/pR/MRP/MRZz6Ls5nxRpH8WU6lJzTjJOp4Ucx76V9uxlZ7vGexSq0GL8uPBs39L/pd+v8r/6PfbDwsel6S1IPV2kiC5nC+kgpPDkhfxLHHyrm6ID7mOXoZ1bqZFxHOJcAZpgAsLRVTmJiCbW4G0IpmxzAAjaUdog68S8WIDHKq5Fs/3QoCDwMhTOU7aktfst/zHx8LTGpoQsaNxtd2M22ADmzFQvpX9D6ibxl41fDtWw2ZMS6ZOZ+AV5l1hSukfnZf/6D76l75brZfKJcahLSP/UV01IAWa9oxo1nRHl/S2YMQKry21I0UIwK+fYcYlSfYLUPs5BPZP6b/az+0//u3lPemZLRhRneYn1kVtPlm/NTBizckRIJ9Zt+UQ4XNR/qO5mgKAoFlzH3z7r66zlemm9dj7pBWgdZq3gQcnAUjwxNeGtr3WGN+7hkjSAmEcvJ7vqh4bQVe4Vt9TP1bwnva3ulfVfY72PZ/6WeyrtA8WU+O8j/FQ9p5pWcozPX9+28903jLD5GBEW//2/rOIr7XaMwE+T9ncNcZ7T/NB6wMnTVhxpzvtpWfRkPySX/JL8P3f//31IYf6mG/91m/FX/krfwX/3X/33+ELvuAL8LGPfQy/4Tf8BvxP/9P/tPs5a4hwgjkCWklYb6GVi6gVUcmctGFlC6xv7qFzklK5oLaL8oVYGhD+PiQ1AnjywVYTwqV6gG7zrKm4S3kGRNLCXBflGQd2eoz1BYpEBVkLElEYTpJqR9BflHwS1TwrFs2IlXGbQMTnnkgbsjbnAOSElC0AJYf3ECJi/q5RjIM1953zrjHfIDDJJVCaRJAcacuGycJYlu9jmR2E1mGVj6cRs9XPnszFeX15BX8/OqacDnzDSuZaVi2tZC4w8wfvUh4hHhEnldlu/tqGKCPjke/QDFXTHlurZR3zc5zJkj5csk4t3Cxga0fqu23PbRiGEDdlNXMgrX9G9WtaEckEWWPSInltVjMiBVQW8XHba0dhiPk6ulkX6xrKKZlrpeNkvkWVGu8cY4M2HnLflcS63pW1sbimhBpSuJBH67OSTbfoGXLOAvNR+Hpk5ttR54Qva98rJoAjzchDLvcqA7JqsgW8OixFW8819UA1DQa2CXcjW2uWnIp8caGYaR1dyNqsAHhXNO4+Jkdz7l+SvkeuOAP0nqO3nA/APOPerO1G/RtTzVWsJ8KsjGtLaBzIiHwjcyxLY3oLeIR4x5d+5p369CyA5HA44EMf+tDm/E/91E/hz/7ZP4vv/u7vxr/5b/6bAIA/9+f+HL7qq74Kf/Nv/k38y//yv7zrOacQ1ReQjBIgJnWoTrvANilcPQ7qhqWBCQrlOpPF2QIlfOGm5E8ykhaADXNDi7cWIauJosU21PJM2rO4CYkAKQRGqFxloNKiPyvQ1SQ8HIyQZiTGWHxIQkQx1aKoWkmym7UjsZpCkORWmudoZlhbzVd7rjBwpwpMJHF2ffXjkJBEG4kgY3Z6ybzofHGqDP1Qnydl7I6oBfU0ln0D8ok03xJNGKBtbI0TNr8usmW31OY52dyLdmOs5/T2te32WDNq4mBEWz80jRc/v2HMDGk1CRDScSsMOJcsIALYGg1gu1nKLhoxWqNzTV2DMamBEX4sNXQzdVrP2asZWFw11WpApVHN4pI2dwgqQ7t+kMkVkCT5YAk/OeOrZeQetZ/Mgay5sIfs+dQD8rZmZGHadw4Eqd89+ysDqgB1T6JjTmR2BVQfxSLQKGCl+vssropnil+J0xlu63x67vlgxCor+1cKgOi51j6iBR7h7e0F27DWDbm+3OlOe+lZAMkP//AP48u+7Mvw1ltv4SMf+Qi+8zu/E1/+5V+OH/iBH8DxeMQ3fMM3lLJf+ZVfiS//8i/HJz/5yd2ABOioHAfn+fUn1IWVS92ItE3LNtnQpcrb8svmGgchXGpuhenlDn08KlYpJyR3Pcc1CVIafxOmGVl8Wu6dazOuW7J0bcGj8wV0ZC0IkBYz0pSsITHVxxAQAvDeGopGhBzWX58C05wkzch7zDeEm2itIeLdp5X5kGxNsVbySSIgwrhZHlYVSNnknXNJIBlisdG2pORkckfSwMVvQzFLbZbss3o8L83SQLZ2Td5Tx2zqVzm2NYZO05qo9UOfX2Wj5BI+Jt0rITP3MGEZvNFzObgHuD/BAi2iUFsX14ZUIMKj21GdEmxYfiOWEzuwZWQ1wKL6jWjAbGNaZZfh5TTq5V+S9cyYSunPqOc1cM2FPxZg5pJ/GYKZ/mrarFGUJM/WzZEvCX1CLfxrKSO+MznH07pyBDJ3rEvj0+3bc2vM7WIO7iMLgjpvq+CsN5dH5/mcISuAx4Mv36MkOmQO7LQWtmZapC1JJlcP3qsJeC1eOimO0kXn2PsHwPuItaw5NYKgz9dJG03JCul6j2ainO0BJj0tuLaGyraQkEcbp/L7Ujkzgmlvfdnp/3KnOxFdHZB83dd9Hf78n//z+MW/+Bfjx3/8x/Ed3/Ed+Ff/1X8Vf/fv/l18+tOfxuPjI77wC7+wuedLv/RL8elPf9qs8/Xr13j9+nX5/ZnPfAbAVmottSKcRpI0AiKyrCUZPZc0yVEvOhY3ywJaplXmCZHlAFviumV02DWhFdHASMM0lfvaZ8SYympdThqRGAmcJAd2bqZFYIQyanMwwh3YCYyM/EW4tmQDSGLEegobIMJNtojWEOF8AiLwQAiAC6n9ljkG35z595aaLl5WPpOkcOWb+mhuetr9PVMtDYDLc+l+L849w+ajOMmn8/xZNnAzq3VbhtR+uMxdwdeBrdZS5pApZQVzOxtRCxhL1Xu5RfYAEQuEzIwt1U9kEozMmGztNSWSY1fzbZBlzes7tFV7NVwhS+AnKsa65rL5uCeNJyogwjAj6u2VLWDZghLNAoHXKX9LrQitgUXwxgD74riJIzIY8UxbUgVkpBmRYMQUjrlqzSb7cHEOwWXNlJ/TOvUYdkma6eK52mtA0ZAIXoWHZS9Cq7DV0vbqGPmAjdaPN5li3O7rL/HMO/Xp6oDk1/yaX1OOf9kv+2X4uq/7OnzFV3wF/tv/9r/F22+/fVad3/md34nv+I7v2JzfYxPe3KdEDyFqJaCW5GdfLABLwzKThVYCEe8qo6IlLOyZZQEkYdbbqW2qMxsssam00MeIlEcEKBFmatl0vgdEInLUrAwwXmfAwMHI61PyIXnvRL4kNZJWCzqqtsQKdkAJ9GKIZaGiDYnOcXLOpcWM9bHz+TsIqSD/xlwqyCWxD8LkR5W0Ktme0z1Q/TMsRk6aII4k1lvQUc24+HUpvZMMS4/525h4EeOjSPpmyWeJcT1BTPhWQo5TKNJ06p/FL/lvW68UUPBvLOvlTrB8/tY22mDEop4mTGOiOBixgMgsCBlKhA3zOXms/baucS3Hdqwl7XH12Rv7DlZJPWmy2tDpJCQg6pmuXINqWOzt86z+DpF9O7YenGsSRGTlC7FAOtC3HtCAu2YFIDWHnu1zo/mfBFvtntaKTayW76emf2PrZM67OygMOvf1u4aZ3Hn36ef5VtZf+2t5QAci0qn+TnfaQ88e9vcLv/AL8Yt+0S/C3//7fx+/6lf9Kjw9PeEnf/InGy3JT/zET6g+J0R/5I/8EXz84x8vvz/zmc/gwx/+MJ5OK9xphRZuU1LVelSTFYpdTyTNMXqLR1/V3T5T1t8L1QvkhINZikrH3CSLm2ql+7zqC7JpX+wnW+ImWs1voR3RKADwMSKQ5JqeSZm6WVcQCCmgRAARAiEaIDkJbQhdI43HKcTspN5GyCKQ8u5TBSjFcf0UkiYka0mAVkMiyfsESsx+FN+ZoqTRMX1XAiP0raXJljr0mMPqjK2+ZYY4a3tvm2po275Oo01YAy8clNRHJgDHN3mBC5v76/la4GFJG+bj4rF68TwG1nqSeakNkUIMLlQAWjByDc2IpBk/kVVhXHvO6JJm7cJngMdehoz6+xS2GmsOHlPdthZQru0AtkICRSigraVkwsN/j8ALlaf8ODL8NR8TG3AXt5ovCUZmcsNIE09tbktTt9J+A6S3ZbaAneocCd9ofvCIY3soROSw8+3qRCOC9hsqy6mJLBdiA/h4VKnNfeY3GLd3LzjZA0TO6TugXYfWoGtaU3l77bgFLQlFGXzpZ96pT88OSH76p38aP/IjP4Lf/tt/O776q78aDw8P+MQnPoGPfvSjAIBPfepT+NEf/VF85CMfMet49eoVXr16tTnPNyEe5YpoDqCc4dh4poSiVxeXpHIGVYKR4iArjjUg4rM5UXkWuyY3qpkFLLHBgzLFjtQZ56vvCEXUijExGzxM7xrrAk/lKOHhGiLb1LWwtK3DOpd8A0LtzI6taFqcvHcNGHF58yzOl5I59dXpud2U2TdTwAj93buGzQAM2Q8zoFubJ2TCNZJMj+aYpWG5ZG7yNnlNW8JyT5Bfibxv5hn0txwbYITTtRzYAR2MSNKlnC1DsfVv2L95zjBXM2Usoc7I/EqCE41x7mkwudYSsDVWXMscMiKm39zxXL83ljosGpUpoZsVSTXRzLoh5622f0pt52g+SkBiJX3VtId0PEMpP0gs9UfE4sxeTIWNeyPT0q/BDqfb6/9r0WiNPaceTr3+1DTQ5hoyCUbudKdL6OqA5A/8gT+Ab/qmb8JXfMVX4H/5X/4XfNu3fRuWZcFv+S2/BV/wBV+A3/W7fhc+/vGP44u+6IvwwQ9+EN/yLd+Cj3zkI2c7tD+dyL513UxK7ohrmWnpKmevlp1t0wxJaZw03ZGakeq4XhfwRqMitBrlOdmnoVlQwvnMEFUTAHiXgIPLZjCNdZaizo6oAASoIGTNQESaYB2zdOokNSUsxC85qdNYWEPAuzmcLzm112u6AztpRzQie2KSWjvninmWzyEp/cHjcdFyxwgH9lyu5idg8falOQ89dwco4QCjPd5GFKNrPU3AKW/4EiC0c6avKeFmIIC9cV6yGRPJ+5vf5T2prxO8TpLtJRchRrD2iWXWWY4FIyUDUKRrc5qRWbIyqfd8RVrTLTTl+L38vnPo2uYo1ph4vPC5Pa2WBiLJudxH0OIHhJC+Z/5NkZmA2reaCRYPBVzIO1BqWLpOpqS07oWIEsyjZ4qX6pBjQbw/MyFK13XtEweB1lohc41YfzkI4fOk5zsHZGm2q+DvWJBCQIqcRcAnVv/GqIf9lRqSIvCiyIxMEEbakaB8L97vgCLg86nPuZ9J9f1TX/Py9c8QfEjiY3NfaN/tec1M83gDNlspkufLgqi7D8mYrg5I/uE//If4Lb/lt+Cf/tN/ii/5ki/Bv/Kv/Cv4m3/zb+JLvuRLAAD/+X/+n8N7j49+9KN4/fo1vvEbvxF/6k/9qbOeRQvEHrMqq7yUZu+lSzdiIu/ahUSzOyeVtmVWoG2CbajIVnMyQxrDmPeHIp3SuiAyky0ORACU6FlkjpWc1Otf2nxr5vVtfhEORshfhP6Nsq4Duo+IJA5G+G8g+Y4QcSmglLxyPwOgtZdutVrbMUsS/r2SuZ4mZMa0ZvFuA0p42RltCN1vXb8W8TpHPjiJWPklz5NQQUOZN4vty9LMP0UjQk2ywMhe0lT+8lQvgtbIAZ3fNzp3TdKFQudpxvaW15hkaV7HKWTtSAgRS5MhPRco9VJ50b8aGBHX6HrLIMu/9TtrQER7do962hD6/pqGSoIY9a/oY2C77nEtMX8Mzb01JpNghDRNvXNpD8tml5Hmc+SfYgvci3ae1sQYyx5UzeKUecbK7yXp/N6zVNhLs2vJKGqcpHOAyJ3udCldHZB8z/d8T/f6W2+9hT/5J/8k/uSf/JMXP8uLxZNPivp7qxmxFs5UjoeS1Gfx2CwmNPeqzB5vj0vMEJfatvbJdYMDdKaVM02jjYiDEc10q6afkNxOLuNdUouDb6L6M+nsGpj5lQAhEpCQVkRLdEjg43XWbpC/CNd+0LUeEOEMgXOugBLvHQK20jW65ryrZlqHmhH47cfqI/Lq4PH241L8RnhEGdKISIaHtGEacYkoMSD1dzVbo+ut8/42chzXjIzMtuaInIpriDELeIzOWccWzfKhNYRwrju3MTELyFpIPh/qXHrQxoLURBogRJadChLR+RbykmRAJRDpaUa0oCDvN/jQtE+A0t87GSxLWtw+o22DpilZM1PsHYAcTMJ7bIJ4HJX6VbMXAihHxiDncyRp1jQi2vdt2jkIy9z0ge+bR86c5+ADgApA2ut2/WnbjnmuZnVHSH0aXMQamXY5OhxRfTxa8KMLTNIz2r1I+o5IIFjaBb0fSeDn2R7cjNGg9+ElQRNm/JZG91skx8+ekN53cHKnc+nZfUiemySjRRLpkdaEOzbyulqnyDBVTm+XdJj3mwlLjOrq2wXPBxRwsifSUK9sddJrf1vlUmFi4upmuTiHsI4XQs708EWfpFA9QFJNtrZAhAAHBSTgWhEZSUtLfriyTYeTc64yq94VPxKuAeFABEjBBMg0iwDJO4+LarJFWYh5aEtNIjjTp+VbiQSQEoykMtv3P4nr/O+5ZAkEuJZFu8f6bTE48ljSiPmnc2uM8MsWwD9Arh3jjyPfwwIrbZlhtblt+nndpltnVDUgQuX5X3l8Kc0yuPJbA2MTOK2ua0fE0sy2vHdVml6Wx5RML1FsBAtHQ1BDoPNIfymUeV77iKx1+hp2/FxrPmsqx+8FbC1hW8aut/fNSm6n3NfUz94jgxDHhHBbEGkZO/A9UO5J/DeVSfV3u2NDHJik9953/wwtcCrw3JQbPHs2sp5cN+TxLVEM70PY3xvtq5ekmwckgD1BpNNePV9DDm7rqmWuRb26CvPo0UhiaSE4ZFDywCS2tDFyCUkIaQHuqYA1MKJtYjISSyPdEeCkeRde78Siz+2hyU+Em2nJvCKtGdZWCyLD+s5QySfC2gwATsRGLtoR70qUMwk82n++Nd3K9ul7JeRSKkf9FeLWtGAr7e4n+RuBEWsuaeAigY/+e/XoXDOuEBPDQ+CnibvfAeiNCaMwcdzbFo2pusRhvTd0LSDC77PAyB4gsofRmO2vkSakZ9YzMn/jTZiJUjZj/ta7j4BJIhp36de6Vmm9pPb7kcCGtMBkqmo3hPqGhBFavZt7OmNRi+g1U8+sBmQPAOFUvkWR0gGln/O8Jo2Jd26jrfLOYV237d741oh9CeC/UX7PkNaX1wDJPaCprbkSYJzj0nFJkIsbcCG50xtKNw1IgsF8ShDSONZ5v8mqTHRSNmgNuMxu1hKIyHoojCXVd1xFWF6fIkt5h6RFCSg27dyR0sr5oVG7iell5OJzBEviNVhspI00D3FpaUPW0GpGyJREmmCR0zpdo3NSK/BkmGwRLaQN8clxnLQgHkmKwbUiNdu9Y7kLWs3HIZtsAcDbj4ditsWzsXNTLUDfqKzs2txMo/7Gpp9OrF+kIztQx7fsn/IsAUCa7xr6jHoax1vTLVlG1r3HXIc2XwIdRWOngBK6rxf1aPacpDOxk0p7wUe51hkr6Xq9zwIiFiDdk4zw4HWhDyfNpwAYAxCuRaRzPBysd6yuUjeaujWynMuliQ6/JrsksMXzxLQge8YGCV7WEPG5Yyi+cbSmAW1kPkCaF/cYVVYO27nb+G/Mahz99jzX8PZ8pbR+kc1f2Vit52rhTRRCZSxQ4BerHfxZqdoWmNA1DhZHRO0J8TJBhEVcqytJNR9joHWGuqZbhnnWzO83me5hf99MumlAIolvpFp0IE69zZPfZxFlsO1dn3mexeiFmP578PX34tLC6R2qLbNPDn+Lm1fZS+fJEdEzRrRVc0vNSNWGlOsNMNGZ7MpY20x2ebeB1J+o2PzKvlc2bqn94BoRNb6++Cc3xlmJciO1I+Yp95OM8iLfXfbTiAnVfmsOrvRbMqKy/Mx77vUXmSFrPo1MH68JMi4lyXgEgymgww1zFTijNQ9GNCAix4UmVNES6pl/GRjRpOtUPWkhtXMciPDEsU39nQ9K65HPa2YS8iALWyLITG/KrHVzbcyUav4gIdZcSiSAAWqfSuEZXdNIswwYCRSITF+bjibkHK0VRRAEbFBu72f1xlBM5Kr2JGRgkuZ8racX8YyIa0ZGJHmENJ7s8nvXGFmXpYFRg1VckOAUGGtEbgl83Ol26PMKkPCETlZ0oJQwLy8mQvvRY+CAuvA+Hnyud+lOzBEIkiFRV08SIg8sQI2slJnnvGkeUQGIdKLcS1MgZqdWhC/qJGXi0n0ySZA+IgREpOnV06mG8eXmWcAc8w3oDO+i+AfoWrUKRICkWeMaEtK6EeNQQItLYORhsSP30Petx7kvGbNCfZXeqe0v0iDJvpL9omWpt/pK6xPZlxojOsP0DLUkgiHiTEQK4137h1PNeO2K9JpH2ZHmXPJZIY7Nfmakn6oUU+lbeWpGG8Lv00BI8zu284PXo4H5WWmn/MY0FgBs1ttmDgkgskcbsgUp9X4eCj1dc2YI9KbfFL+2owt4gCtaWsABXhe0WH1Ep2Y1AvJ7cW0I/14EUniSSEl2pnVFaCAA+ihogKUR0c5zrbJFRdMPJGARK8NtR4qjed2+F9baPgohLLUnM7SZWyFuzo1o5lkzwHANrbUEtaOnLan3tr9He/w1wMcdoNzpGnTTgGQNEdI4xMoy22o9trkTdH+SmnDRAhecWdMYYOs+aYdPjANdp0hb3JckVR+LuRYwt1AuyvrX03jMqhalPTSd42YOmslRC0gqEAGgMtfcPAuA6bhNx9rf0hcaoykYLGkiQf8asOGr+R+XYMpM7DOO6xajQ/2VzmHTZ9Y/3icjMNLbSOR4bucQzGvVZKAfaasHRABdWlt9vND0mU7Kjs4kvZY5l++0y/qOkmEA6rejnDIazWhD+K17gAj9tYAI/47aONDOWd/foo30XJkTUhsiE8HS9yBmkwMTyt3ENSWL12LkJaKWp/UolvUoucn5rDVF0ZbIdVJGMut9vxHJb0d9y/cwOedmGL+RdYAkyyxrc00BIz1zOrqneWem8W39uMQ4V/pY03QcAwEr0njl5wcl0awyr3sCBK2r9wYOAM7T/kpQOtK+EI2CHMwCq9l94VYphvTvpZ95pz7dNCAZMf0a1cmUQEmPaSJbeOs5vKy1eVukSRWpDh4G2GfsdFojsGQg4VEcJ2coaIxdZ2GaWvg6dtZcykXgwwIiXMoPtH4impQf0JkseUw0wyz1tCLcJKv6jqRxQf4jDUhx2wRrfBMvfaaq2fV+LCZaUe+zeq7vG7KHAbX6TZsvezQjszRyBrVMElZmzx9Y2Ooa9YZ+a23Ov4Niyjds79y84W1oz10fiNDfGSCijwc7UqDULPfM7xYDjGzNuHQwQgKVUk+jIQGbcwmI0NDpaim8SxrSNdRoWRmIBBeTf56IdmVpLVMfbJ+1fbbOIJJARgoPat39gSXNsqyodqW8oRnpaUXS7/o8rzD7Pd+eQqWfqlaEa6GaogKMUB/zOb54FM3nGtv5HRgYrtEhqRlz+98eZ25JvTVvlLSQ1zELAHpgpK9RuRxg7Gnnne5k0U0DkoeDx+Hgd0zYdpFfPG2kFZho4EMu7ro2ZZvbYdQGs4wnZj79TrcEUJhDvnibuSuusDhoVWgSQYuRBlpAIjUdbeLCrX/IDBCZlRxqoLUHQFoHdt+ce3UQUbWyWRawNUXRnt30Z0cSaAERXevROvtT3bP5RuR5yWTOgI4KrBOKngUhPdMRYG5z1aT2R9KmZAkq1U+MMUlXi2lX4cgYuvBbhswiC5RoUdN65j/aPOplVz9XG9KeC5vrktawsm9audeeoMYC6FzC3tOKcPBBc+zBexOIeJdyCHlXgQmnGMk8LzHESdsSSsql4IA11oAJPGwvgRECInxt6/WbRXz+fu5pbeYxaWBH1AiyDMGadgy0Wit5DtCBCJXZBhPQv92GKiIAz0zvvWsCBVBX9vyiAIg5LjRxoW2bfIdZmtWKnANArHKzz9TKzeQM2Usz99b99OzHvBjdM7W/mXTTgKRHs4g9lQnq4n8oE6y/0NNzLHOx0fNJqkVtXkNk+332H2mszJikfYemZLpNxsQZRfqR0iwe4Uf6f2jMtAVGzgEhPSaJ/tIxAQ3KC8OvczCyASuLZ5JCW5qo0ajvyvEAjKS6QvNb9o0GNrR+mdGQvCT1gMjonTiAklGGwkp+C3RGYwIqE8OjeQHnO8DLby3bboOVtp5ZR3ULjO4FIrKtBDppzeTrn0Vcor7RjFhz1c0zcrzYDHCk4CAx1mfInCGaX480oaT5We7Zsf7L+Uw5leja4wFToGQPWRHOzqGXWiL4+NfWgMUroYtT4hJ2goBQFTQQzWortu16cxhMTUO3B4z0BE5v0nve6fObbhqQzJlmzRGX6nJ/gRFjSxOZGO5DlrJbDqOSKdCAzdMplGcXSZ2QMnLqbQyXriV9G/f2nOZIq/l9aCBEY6ZmgcgMMLHMsjgI0XxDeJZ1SxsyowmR7QxRB3DUj1q/bMMb6w7sRCfxm7fRAiWjvpvWeohyMxoWwAYhs8y3RVK4wIEn+SGQBpLMPbxzWIrknJtxddadgSkjf0cNiFzDSd3uKx18zDLUcs0js1c6f46pUJk7TMJe2hKB5Bvgyu8UaTAWhtO7BKSddwWUhDjHLIeYZPQE/IuDe6yBOGp+JOZvkstb4bNnicJ0ryHipz731ETXOpRxqoMSTfNB6xUdb7TABhg5N49OiNITM58PlEDS0JKcQaPw1VL4kL5nHWNHxLJ/UtuJePCLS2h2jRuRFVWvnBuYb/IyWh3WWL0W8HgThFkjuof9fTPppgEJp2tLd+WCvrmeHfJIsiyBBY+GwjUf1uJAWhKNVkQ8Lh4IDivixjaeK0k0Z71zHPE2bTAid1iMkgQWEojwcrK8Vl9tx34wop0j0EnH9O/xsGwAKY+YZdnBz+Q9SMdo+k3TJln9oo0h69yspk7TJI40glaZa5K2ocpxM2uOBgCrYNoI9CcKCfSvye+kMMCIGzMunhDPctgFxpvPLBg5xzfkGkBEGxPaOrd4XbhCpM0LCUb2EGk2iHjktBjRCMI9dLOtvRTYXJVaSwDqWJyZH3JN5OaWOHDtU9VGjYQGdLz5zcCIZaIFbMf06DUIfJR3iv3w2ntoj9+DOvclWtpoTdBoPoGX0/rMkBRgAVswot53BhjZQ6OxfW7Ezzvd6aYByeI9nN86hY+Y1BlJRg/gVOfKCkr4fVySyLUlmjZE/ubAhOpavMMTi71rtQfQbVB7C3svNObm/ICZkRtzu2nbZlnAGIjM/Ab6GzbXirQgpA3dy7UiZJa1+BrRh4CIpq2Sfcil4SHaQETrF6kl0vpS9v/MhqP1kcVIWRomAJt+rGVsOxD+nDJvOz4hMyA3XQ/DdyftF4DmW/NxEKLLviDE6KZ5JzUm3s05xs5EUNPM9Kx3782X9p7Q/Nb6VJ7Xro1IM91a2BooQy3zc4GBixr+Oj+fljuSdrsa4jyE7OweaE5GhJx4NMLBIcK5XJPyOiEm8BIBHNfQaD/o73trwGmNeO+0IkTgvVNAiBFPa2h84Oi9z1mb6lzw+MBbhzImqSyNV00bwn9L7Z+mGZn1F9H6qnyj0CYeTeSYqoTqiFjXbOZIz6LxQGMw1nnAzzfv5tKzkptPK2jraXqJ+PpSIlaWCAbW88zqVJJshLWOahHCtOuynnQtn1PXyfZ+C4y8BBC5050upZsGJI+LR8iLr8bcAq1dswVGNMmfRpLxp9C8AIqqWJM4y+dIACXLcOLRuOgd1LZ1FgvzfYx7eu2X7eLnWwZqmy2d/6Y6NCZpBCz3AhEAjUM6/da0IhKIyBwimtO6pBArWKX2EgPai5o1Yrhl/1l/NWmtBMozGg9L+gpswYhVj/Xc3hyQ70THMhIbHVv9Yr3L4h2eTpXho2/OBQDp+0vmuY3cAwy0JDtNs2a1jbJ/0t+gnrfOyd8jjZpljpXq0IMYrCElHSy/VXCCRvNEiQULA0eMq6v5RQicBOewxpDNuPSoW0QM6uT5WMHHMSTzjffWUP6e1ohjCCVp65FpMdpx2Pa/JA7Oe+PyA289NL9H5YHtnnAOEAHG5lnUDO/6oIQnKUz31ahWWlTI3pDzhHNc+yyKygXYa4i6znBQAnSBiaRe//BH8+HP28S/2R6/FF73HjOtHhCZBSazGvEZbfqbSDFExCuAtL3PvFOfbhuQHDzCoSYn5BOBJ0gEtkm7rAWjyyixhFvnhOeTYGQGRFAZnujRKn8OwLCkdpKsjOj8WJNWa4CkZ2qzV5Izw0hzJlqeJwmk/Ndmkq5ghIgea+WY4Exns7EwZ1h6X/o30i7RNf531DcSlFh9Zt0vj6WNulW+RyMwQmXo73a86P2hCSSIZCJHAqhAwBpqW0g71iQpLUwLCx3K8qBoGaB52zgQoTKaZHP0TnvBiDWvRkDEWhtPyvk6prbP5VqSylyicT5Y+LlMlbFFA1A4OEEu8xAdjkjRso6IeIgu85qxgJPN+8VYEiMSGDmGkP/Gksm9AJfQzslR/7f9wzXbuuaQhCFy/nCfEk7aOib/Wkz0nuhSksrrse9bx7sr1/j36/lbAfQt9HUgVcGAQ3DNfTP76B6ytBiWxYDsYw7cmvsn1l25FvbAiHr/AIzs3Ve19t0C0LjTbdNNA5IvfOcB4eEBjwePp1PAT793NCWlfFE4KMxUogDKPSLNpujexVcJjSRNEk1Eplt7wIhs97mLykjS1itnndfs0meYxBFjPcOkjqQ1msRwpBlZMiPa04zwLMCcuDlCCz4qs0mMzTG00lViuFuGpy/1H/WZZSY4u6H0wlz3mKD0dzs59oxbbbzwfqFjaTbDNSXaM+WYeH0KLGpayPMrjQdyNl68w3qIqrSZhxTV7IJ0TUn9PTLLskwYZ+ed1Z9a31jn6Lw6bhofHFoPfPOsNcQSwtw39vuugBKTycohz3sO197xcMHphofFF0fqnhaLhyVfM2B6fQpZa5I0I8e1akbefVobJ3QNFMt+A+ScawHG42Ep0fu+4O2HEiwj3Q989unUPMOah+W4E0FrxjxrOmkegcSVGPiII2uTd+n7Ls7hpGRX71FpW5EXRAAs2ESMWODY/GPjerL9qVoKiKBc7GhNeD9qz+v5pMysgz0gouUZsXzMesc96glsZ0HJJaD3xSi8vIZktz3gz0C6aUDy4B3iQU/WNaKZe7hWYisRtJniEYg4B1xcIuHQmLNZpsQyRdAWO+vvXPjReTCkUe87auZu3Da7d2/RluxYZKV2JGw2lnlndTrm57Xjc8mKimQBmT440R1vNerNkREY0aXU276dpVbSnYQb3Kl4DVyjAtU5VtIoGt0seNfAyLWAyGw/yW+lMSjpeHtfWwbwCzFRbmqDXhWw1yS8i8AJEYeFtDGB5cNI5Y+IBaSQ5RBJ5Y8hFil9yS8SajQtLWGh5cMlSdsLLI1uEnj4yuT6uLm/95doFEHrHDAiQaNn5qgNA04HYhycEzKbm2p5V81gqT1JI9f6TO51ptf8m4BUrxUOmfeFeq9ox+x7a0CEP2/GV+Qa+8SIv5kFJXe60zl004Dk7YcFeFiwOIfFr3g6HepGYTgcJqldXcx52Fe6rhGBE7mxaBuDZLLot5RczywUmqRCu2YBB+u9WunmPjDQY2wuBSG9PtmjGaG/muM1/0fl6B+ZavFwvpy0nAk8vGvVhiTzj/ROKA6xNB7bpJBbiavF7MxuLjPJPEcmVzMmWVIjYoEXmjPSRKgXHtsCItR/ADbJNi1Aon3rxbuyFtQ2hrIePGErlKB/x2BrR0p9HaaB/+3NFQ2E9OaUlZx1zzgaMSWcFrEGFZNEnzSPpb1lp0l9GwaArpd1WssuroXg7mkF6Nsc11CYwWNuv/QXoWMLFKf2btdXybwtzTiqQRXoHY/lW7f1yHqbv0pAkz1gRAMiWwdrcQ8b8xROl7eB1r0mczvXnnTaQ+fJ38gvtX8ApHxcSADnwXMtgmijERVy9K7l/NqX9C+Gdo+DNf4+o2W7B0KacxMaEW09UJ/ZmefSxNXaP/asFXe604huGpC887AgMDDx9Jg2kEemNZGmHBrJTdW0ybLuH0jYexI0Tucw6zMhPTXaAwZmz8/4hpzLYFvUAyncNK9lSKt2pAl64GpYXzo90o7IXBMERkKsvgJHYWo0m5MFOL9/emCiZ8YltUiz9c/Q7JiUjLgEHVswp2tNZN1Ssr8BpWLNWHzSlqx+G51ttg80kAXoc6UeXw5CLgH+s2Xk2ibbdgqxmHbVciGb+xjPnHDM1dqhmS1JKbfm68Pz/zytrTklN9GipIXat9ibDFcThqwxAkqfmIBEmGela/q78vKcRmCkBwqbe1i7eXMJdHrHNSSVgeUaCtk+0wdmycFk6HfRHLjmnfzSfuO9GdCtsiUkuAA81PeaBmXWlGw2xxf/a50bmWhysgQ40v+WztP6qa2pt0IhRrgdY+Faz7xTn24akCTJS3JADUsN1UoMxbtPawNOZpjitOGseDwsTRkNWPDJqP3WnjVLMxqUnj35Oc/eC0iAvoTbatc5bRuR3ODtay0YkYypRlwNrzlghthqRuh3iabFGJmalZmYnzY7s9VfGtOjmVuNJKr2Odvc6rk2GmtMWNqiXmjo3vi35un2ueN1gEsOe8BOa8dsvpAZINKLcieP956TpL2btj617+Gb608MmMjxN1o3LMd73q6Ro7c81p6pA14d/FqaqBH12qOZp2n3npPnY8QU97QisyHk6XppH4tkVcEHXSOEUrUK2ntxwRDdwnNtEUCpKgh2zdWwwaqvyMT7EC0KoGn8SRqtFpqyI5+KEQg8F4xce8/lmmK5BvBzd8b7TufSTQOSh8VjObSM1HElk47MeL53yr9bkw4+mV7nzVL6i1C9i/fqRJRSeE495hKwbfct6cO2nA5Ges+U5S0aRd4Z1XkJg0Q0YoJHDLZlqsXvkc/QNo7i/Eh27/IdmOkHgBImVJp+0Bikb0RZ1tvQoTazOdsfvX6pf7f+M5rGqPdc7dvtdZyWjDg/J02wtlolW5ukPcdiRukdJeNLDPTTad302xP6YEQ+Q2/jnD+IBWZG79v73Wur9k5Wf0mqfRaw+Dpun1DDbvcyuY/y7sy8Tw9sy/OyPi1UuQRG1piz2kHatVOIJbCG1k6+DtB5TciiaUZSGbUZU9STzANbhlgeS5Jzg5s3yuSyYY14YJhfhlTnJrIPwhzqAIpw6BB8FQoBSXMSYjX/gk9rMvdN6SUQlERh/UckHeUt066m7gH4m10b+Dl5rP2+BkkNyXM84zkoxvch7O8dqA3ppgEJkXdkZuPwsKTNL0W02oZSBLabbU/al6ikd2rqWEPcSA2eg3p1jwDIbJvOASFamVkwci3aK70faUOIQhw7ImqhfZs8I0GXqGpJ/CRDbPXr7PuqzAzTglhAxNKwyLZoDCvVcQp9MG2dt5i9c6XRs8Tfx/LxsjZea27agGQ+RK/1/nv71KKZa721UtM8rc23r/lJZteSCkjXzbnR+8v21jG+/Y7au2rP499Aa0uvPVpfyXsk4Om9T4+kI7blsM2va3Vszqnl7L3SnBssBPMaUIJCLG4cdZJrTvjxGmMTdr11eq9RwDTaA0ZkuTK+Y/tcejagm3bx76P19YwmqjcH9oIR6Xcqc+Zoax6B65F/yZ3udA7dPCDxLklRHgC8lc2z/oXHQwmnCLRMANkF02/5d1U22crABfa7FUlZicNmaC/D1WNqetITva7rABF57tx6zyXOZGumWNaiqZ2niC0hosmZoGXTpVcih1gK6fp0ChsTEG6HzscglwzTOaud2gagg49tpmc6p+USsfpJMgDy/bW/RDzMtX5fP9mj1Tc9BnB0fts/Tu1b+cxUrs/Yau84c6zN1T33W7+tc3tIAx8W2KRQyqQp4X0mQXH7jL6pmhwHvT7oCaDkNR5kRHvn0V/t+dozyWyY5h3N/e07hzJfKSiArFNjVC2/BisS3IgZBtDVisyOQd4HdQ7RN2jz+/B1tmhJfD8JbfEL4ZqTFcwpPrc35rJZS2K1d2auqOBajSqm1BUqYNJolGXdWi/m14dR4Js2Z44maOCgRLumPfdNpHtixDeTbh6QACjZW1Ose+R47ikDc9oMUq6BwgQKqZ/GfJAZlzxfiTbR/SBkBrhYm/4eCeseoGOVOefevYu8Vee5khebYa87h/YNki1yMingNs3WO4QINfu6DBXKI/RIMKyZgWg06g/JdPfASA+I8HwbkjaOoR0zEWLAbFCyDTahMaS9+i0aaUVntGRyA9bGuCX1ts715ig/t2c+zYCSGZLAQ7turUn8Hvk+sq9Xw5tdfnttfuwBBFrb5PGTcc8MMzjzbF6OM3H8XbgmKFH1v5Fz01oDzIR+ff6zubfcMxlOVnt37gRtlQHS+9BaC1+FP4tauoIRyzE/XXflmB5b5q8S+WpTzyTI1QDXBrSzZ2naE+0dRu2wwMjMO/SAiPY+dA/tl729R66Td7rTJXTTgOT1KWAhpnCziCTTrXcelxJ5a/HV4Z02OppMGihJ9bjG/llKVevz7LwW8ncv67n2u54f253PgJHeRnotYPKSUhLevyOwt2fRXGMsScDkeQAbIEL9T5G0eIQewE6GCMwxVtair0v9/Wasvjr4bTkjmZoFSigxGY9cs7hobvgzYSetfrB8Cvg7z5ClIaG/1ry1wIhsf++cfJ9RudEc2jufX4Is5nSPWYfGcGlApAcQejRak632WO3TrlkCLHkP155yJp7G4uOBxl2ejEzjVOpXfBosx3BZxrx25r5khfKW37+ZT+TMHlLkrXMc9QE0YGNxDmEQGIDT7FiyAIpcj1SwMuk3MAIi/HhPQAsNjMwD6QpK6D6pJek5ut/p5eif/bN/hm/5lm/BX/7Lfxnee3z0ox/Ff/Ff/Bf4wAc+YN7zr//r/zr+xt/4G8253/N7fg/+9J/+08/dXJNuH5Aw9XdgCxOAlGjqIEHHKZURG55keIhI3U5qdJIyA1IVXTclK2JRT6I6WgxH5i3874wJUO/aOYBlb5kZ2iN10fq6ZTZ1x9ZmMV1ayR2RFYFGApE1bB3XCaBoQORcMCIBs3xfCUZUB39Xc64AbXjjXpdTeE0yh+CZkyUwWRWmdDS2tDGsMQSSCdTOyXvkNe2cBLS8zpnvNZuI8NxrbwrNMCF8nI6c2fk99NeaJ+q5CcbPzHsxkOiXtmlhckMsjter8FlYgmuiPBK9zoCE1gYAePsxZW1/daiCrRKZzNecLqPxPorU1bzP6H0n9oaewEGGjZVt5n4ua4wphO9AIAJUR3cuhPRk9ZUP+Keo5mdtclr6O7tXUrt769G5NLMe7I2sJ8HIzPee1YTIsvz8jGbu/aYQAPfC6+rAYu4i+m2/7bfhx3/8x/F93/d9OB6P+Hf+nX8H3/zN34zv/u7v7t73u3/378Yf/+N/vPx+5513nq+RE3TTgGSNUUirt5uTnDzEkI0AwEgKcuguQtUJfmbTNt9PWaQs4ETU075cAibmpSrPP8ktifa5PjxEJGEsoARVYwC0QITKa6DW+jeS+lrvuQeYyXtHYITnAaBHdJ9V2tqaIiw+5SMg80lO547/0ftpjIH1e+a4N36sdu7Rdo6AxvsJRPaOL2AMSvYwahrosMqNyhCjKpOYmqAlzAEaLeS3PL95Jmsv5bR5EoCkFZjpxMeZZOzPHSd773uu8Wg54IdAQiKR30RpB50Kofr1yWcQjRzGe6SCwMlzs88c7dlz7bwBVHCnq9AP/dAP4Xu/93vxP//P/zO+5mu+BgDwX/1X/xV+7a/9tfhP/pP/BF/2ZV9m3vvOO+/gQx/60Es1dUg3DUg+ezwBx2qTLMMXFo2J51mDF6wh4vHgSyhguajzzWErac+bh2CKpaMwaU1IYq1JFGaoF4pS1nVudnT72e8vCJlhks4po4FObkrAiZu8a2EY6Z/MXcC1IlYiP16X1mYLiPS0I5Q3Q9OMPC5+E3aTgxArSRknrhkBKLxmMpt4WCpoo36kdxiZbvF+sMpKIYEFTGV5eXxp8Anehtlj7feec3voHHBxaZ3We8xqH2Q92l8TpJBQSlyn3yNnUiNH49S9AOBY35ThL9b6DTCh94qx0aScmnJro/EEULT0RLNjeRTc5Nw5YQUGGBFfaxckIVCIpObYJjpMAhADEEY2BgiYZGFRiHWtkhptaofcP2fokiA2GvXWir3rzjmk7zP9cNnUtpHP0JtIMcYXD8P7XM/75Cc/iS/8wi8sYAQAvuEbvgHee/ytv/W38Ot//a837/2u7/ou/IW/8BfwoQ99CN/0Td+E/+A/+A/eVy3JTQOSNaS8I73ER4AuOV1DVLUccpJrzKwlFZT11esBPGpFratmhNbfb5ujwmrrTGjQay8Ye+rTFrT3E8jQ8y0tRE+KJjez1rxom9TPApO995fjbOZduamgBpC98Bfh90swoj0yxK2T5uIzE0EaEx+BkBOdMW0k37zTO3lYDs4jmu8Pm3Hby2Dvs9vurykzz3uTaC/AGQERS0Aw0449/dQDE9eMeBNDbECJJDlentbQaHGkmSMH3LR/kLad1zMC+lY7JFn1zDLc54KS2q5WEw0wbXWoJnEhbtcl/ljqUz1UsfZcHYz0+omTBCUWv2Ddf8la0F9vdGbImj8jMDIirR/uZNNnPvOZ5verV6/w6tWrs+v79Kc/jZ/9s392c+5wOOCLvuiL8OlPf9q877f+1t+Kr/iKr8CXfdmX4Qd/8Afxh/7QH8KnPvUp/MW/+BfPbsuldNOA5BQC3juNmZpzJsiI6dAmt7URk7aEjrf1rk1Z7fmWpHA2rO9IWrtXkkn3WPXM3m89c1TPuVLgDfDofEN5nkgGRliD7rhuaUZ6oJG/Ww+M8IScUoOn/mb+IlwzoplsSbMTIg8uhXbiQgtKSGsyGleXmnJZAASYz63Se1YPiGhrwywwkXTO++65NnP9HLpkbZ0RUkjhDwe55S+qDxhnRokt2ws+LNMsTnyOSDDCw9WuIZagK4H5Ozrv4LLWUs7ZAlyaSI8hM+5V6LCn72fnn1wbrzlm1DYQ3xtc1r7WZIkhRjx4j7BGBBczeGtvp29FVR8zM35coxoBUa7DM1nNrTVMAjnLjHt2b+0JQXr3b7+h74KSHkkgYq2v0jLkTvP04Q9/uPn9bd/2bfj2b//2Tbk//If/MP7j//g/7tb1Qz/0Q2e345u/+ZvL8S/9pb8UP+fn/Bx8/dd/PX7kR34Ev+AX/IKz672EbhqQPJ0CYmb6ekyHZOKtcrw8kCUtoT1vAQ7JiCzeAcwkjG+k2vN6i5bGxM74ivQYKOudL6FzN6+ZzfWSjTH17/YcsHW47H0j2fcVfHBH9jbkL53jdcx8j9l3rmOrMjYb7Uj2GfFM+1EAi5EZ2QyxSQUzAFmZWWRYqw9OcVZFG/q118d7yJrvGgixNKTy+mh+PCcgmSWLSbTGynOAkBnqvete4YelpVYpr9c8aR4BBgImEqDMAJAZct6pfivrKSCGiKfXJ8QQcXhc4ILDITuy8/lR7lP2kzRu1/y3lpVM5B7/AapHC/HaAyvXIgKSyWSrnvflFUJeS+xEh9KPNEScBUas8TgS3tFvCU6s+2foEo1TahN9z7mx0AMi8vf7taZck2JI/176mQDwYz/2Y/jgBz9Yzlvakd//+38/fufv/J3dOn/+z//5+NCHPoR//I//cXP+dDrhn/2zf7bLP+Trvu7rAAB//+///TsgOYd++r0Vr5YkmZbmGZa0wQInfPEtzOkgFKEFZmaAhzw/I43SwMiexW6WEexpEHplr0HXqE/rSyuuOg9dOKpT28A4GOHfRwMfe8DIpcSZnMZEix1LR9IeGKHzXAKdipM5Rc0B4J0r0X40pkbOs3PeTfs9C0b435nnzwB5C4icIwTYOwf2AJS95a4hhZfPsxi8mXunSfgyETCR5lUxRDVqk0YSbFA9HIjQHDmuoSRfWzPX770DvINzCYw8LDoYsZhRKTip5/tWAjPjjIBOflO1nGTAZ8kKAV3ep/ijuRLdkHxIqk+J4fgu/PqOgYOT9Pdp3QqEemBkND5lP2j9cqmPycgMrreutuXmTa+oHuu35LGs+z8fAMtz0gc/+MEGkFj0JV/yJfiSL/mSYbmPfOQj+Mmf/En8wA/8AL76q78aAPBX/+pfRQihgIwZ+jt/5+8AAH7Oz/k50/dcm24akPxvn32Nt91jyROiOWFttRdBLUN/RyprLq2SZDGbvLzMvgvYkhWNRskORwzvNZidXv3n0gxjpH0bDVzY7WNxJTt1WM+nv/W45hPoaUakhsRq36xGZBs5q9WO8PFG4GLx/LjVlgBjMFKeT1mQs/SZiECId0i5Wzw2viRA7V8u5dXAwV7w3AMje8e8BJZ0TtOQPIdmpCeVHr3LJeBkpj5L0DIq06tfY+h6bZYMLX+m+ZeydhPDq7Sxx7pxEEPHNGfI7Kq8dwCenk44HdcCSLx3eHh1wLJ4HB4XvP2wiDmsr1tarodR/2j1WKTPlyh+t2HsZ4Rnsm4ZAlinUIFIRpTeuQIyLK0twIFJqxGh9mpAZFY4ZM1HDZg8JyixNFbnAkVJo7VDe5dbBR8hxPch7O/zPO+rvuqr8Kt/9a/G7/7dvxt/+k//aRyPR3zsYx/Db/7Nv7lE2PpH/+gf4eu//uvx3/w3/w2+9mu/Fj/yIz+C7/7u78av/bW/Fl/8xV+MH/zBH8S3fuu34l/71/41/LJf9suepZ0zdNOA5OkU8Bgis7XlOjipxu5LG+hvb9HtbQqzUlEJPkZJpbRrzy1dn6E9EuYezW5u17g3tbUGGOBgxFrUbcn31nmdH5+jCbmkLzSq47qe49VfIxmZz1oSAh7nSsmvTZf24yiyzUgg0CszS3vBx7XBiPXMS8DXiImzntmrQ3uGnNvlr9DykdaEjnskgYicP/ydYkzaER5Zx7nkN5L8ufQEuxpJLe7s/BmVkeue1W+8vj3fTStjAaxCWXBSo/X134EDEfqtBQnYA0ZaoeWWj3gOJtzaT0egxDp3zrN75/YGBbnTy9J3fdd34WMf+xi+/uu/viRG/C//y/+yXD8ej/jUpz6Fz33ucwCAx8dHfP/3fz/+xJ/4E/jsZz+LD3/4w/joRz+KP/pH/+j79QoAbhyQ/NS7R5wORyw+hfV9+3Fhk7P6b0iSG9o7jzUUMC1W7z6tzWI2O+G1xU5qa2gj6tVpAaKZcvxZ/Pkzbd/L4M+0UbvvEtI2SiC955ztbWvPoUlatfb2QjBLMy2tzJ5xJGlWIinLaZG1enRptmeAMWrMuZ2oZaw8krPueBPeS6NvKsvyv1pSRm1eW4zNNcDWCGxY2gSrry5lIKRktjdPrN+9+zUGmM7LY0tQRMfad+LXFriiMQGq1sQiPh/4HNPasYaWIXbOYVkcloPH228/4PHgyz95L+8H7b3o+swaNysYIJIaRr5HcY0Jd6zfu79I0jQI/J0X73AkLapz0ML+ylDsdMz7cAREen4WWiTMS0HJrPaPiProJPpbzslrtWUP+Dhn/38TiMwpX/qZz0Vf9EVf1E2C+PN+3s9rhCMf/vCHIbO0vwl004Dk3acA/7SWxZ3+lo3H2OS0ibx4h0d27xpaZ2TtXn6/pFnm09KyzEg9tI2rV79276jsSAq5t93PuWBZ39r+/nWz6be5HQfXYFKb5J07NRWadHVGWq5RiP3M7D2SZlv0XOl8ShJhwGay+Hx9SdLAiHZt9H3lPc05BcD1vvm54GMGmIyetQdEWDS7blnS91EbZ++bpsmQ8RKIaGu3fHfyL/FLAiFvM0d2Kq+tE+esrbKOmXLn9V2rQegxxtq1EbjS9u5RBnptHR7l5drn/F99EGfpEq2C9q1ntCUz9Wo0Y441KxS7053OpZsGJBI8PJ22C4zUSgBQpVNyYpO2hdc5u9HSc2fKWHWey5hpjN8Mk37Oc85po7zvGpLbPQytDlDWqe8lmVJt0xvdpz6jo5U4h8mkZ43K18z0KHEzeV6Ens020NrElgz3grw4bzEh2jfcM8akKQgwN4d4mVnNSK8OjWYB5zlAZIZp2MsISdK+x14QuReEzLbNOtcDVrw9VhusftXK8lDfx2w7dHhYcHhYsBx88TH5wFuHRpjQG+cjQVqPXhbUt3su16DMEr2r5mcyOz7kPObndAHRZSGWrHHDz1v5OfYIC4ZriyHQnHnOrE/IHWTc6aXopgHJca0gZPEO//y9UwM8Fu+KE3kNr7hVl6cNJTQbLN37uacUwaS3uEkaMoLGwnFpndeQmM/SJRumlH6d+3xe10x5CdZm67C+fY+B1UDuiM7pD6kpseoJEnx43YyLJyLrmWapTsEl5C9A0beSo3urIWnNQ6rZlhwXpx0ML5Xv5QLQzo+lqPOar3MBeu/cJcej5/TIkmzvBSU9oG7ddw4jpdXR+5YzwEjrV40BJGHYGlOuEecdPpDNs95+XPBOFm7x++TYtkDJHtLKj/pXE+hIhrpXRgea28iX6XwLVnpAV2vrnnefBSF79lxLSyL7QPYhsA0DfC4okeZbvfZK6kXGOvfcy4Lf69Hnm8nW5wvdNCD5f/yej5Twad/0X38Si3f4v/2Of2nq3j/2//p76uInF93Fu+JbwrUlezb+3ma9Z8Htld3Tnp5UZXTvDBO/V7JzLbI2zp5T6F4Jbw+MzNwP9Bl9TueODf47xGQzr1FgmpAQq+kWByWX0uITI1J/7wMMe8mKbCPrnkl4uOf7AvvfYQQkRwzxnvv3Uk9IMvqG58wvTnsZNt5OLVCIxUzLOrRzVn9r+wQCEA4ePu8ZZJ719uNS7tGEUZf0FW9P77d2TQOasj1DJ3Sl3drYqee2DuPP8f7ynAQj11hreuuuvHaOtuSazP4l2pAR33GroORObx7dNCDh9NnXp93MwMHXJHJPJ7rSSmvfyRKup1PA54Sj+yzDoElZOc22e0aSx3/PSBJnwxL2HOreTxqBvdL37PvMhaBsaca3gJ+bZWYvNdVK77llKtoyGRgw07Bm8+TaEyRgIjUgHKD0whd655rM7SGm9oXoSuC78tzMyKW5lxgGAv+99x3RXqffHgCZucZpDwiw5u1eYDLz+xyy1huLEesxts8BSkZzhEuT9/THTD6bNUR87mlthFZU5vHg8YXvcAf2FDTl3brJmL4Ae/pqBoRcsk5rgh1ga1KlCfX4b60dqdw2f8r4m/fNwfZqQfp1jQFV79171A9PP3e/ts6N9vM9gOM5hIZvCoUY4SYFg9d85p369HkDSMIpYJXpuAckNxxaHP6z/9Mv3ZT92H///8ZrxUdF1qP9BvZJ/ai8VVfvORoYOXdh4YulpirubUbPITnZI1V6jrbMSNJHDOtL0hoilkUwUjEBBepCSjg2iq51bgx16UNCRIzcrKSdXwOuw2Tw3z1mbhZcztI5EvmXBCKyLjmPrHk/AiWyrmu0babds0KXmb633lsCk6QZqT4j2v3y2XvWq5mxOFOHpiXR3o+3Tcv3ZGlFNKZea9vMvE71zfl+PMfeswWlbQSunsZEUi+8/yztvfcORu70ptPnDyAJEU/HfsZaThTWl1PaPBa1/OPB49XBN+FdJfGNhy8W55j3nMO0AHr4RlnH7GLdaBg6G6ls97WYDo2svpdtksDAYq7Oeb48HjGum3F2Zv4PrS2y/0l6Wb4FgYGcBZn8RkJM/y3ebUAJNXf283mX6uM5HpZ8co0JlJS6/BY0Px48nk5Ji/N0Wss561vtlXhT31i/ZzQjWjvOHd8jxvecY6JLGBxJmlZ05p115m1uTdDmqnaf9oxzwNkYhGyTAqa/NfIe+Sku3pVgKG8/Hsp+Id9Fe35v7TwHfJyrZZHXNKCktXPGV2IGjFnXR219LpoDUXb+s5egWZ+qmXNEo/naE+Dc6U7n0ucNIIkxYjU0GBqtIeDdp+SkxqVYFtFms4bkSyI3Gslkyc2NylkJ17Tf8vlavfyZWrkZECOJM2Na2ZGz8fu5gVjP1SR5exjK0QJ8qTTdApBaW3ubhZReUhnyJQkighaV0TQlIZ4HSuh5/DnBxwSKjHftvb/si17SUOuec48l7QWyI0bgkuM9Tqoj0t5JClVGc2dWQvwcAotzpb8SeLTn3Ka/q0CqBf4A8PbjAT8rR9Ki+59OQdWsWyY3M5oGSXvG415QYIFLayzI3zOagJ5wT7v+nNJ7C0hr5fRr8lvbAMWqY+/79TLCX9pX5wDbWwAmd6f2N5M+bwDJ//gH/41d5Z9OIUthAxa/4I9/41d2yyfgUhfbJbSS+h4osDZma1Ma0TlAZHZh6kk3rXZzyRkvPwt89lBvc57V4pxLPVDyfi3IiSkKoI1Plbiy7MfcqnEP4CCSzu6WKRf3JQmRfidQoo3J6uNDDWzDbVtjjEgyz5w0Ke8lNCPB18pb5y4FItdg0GbmPLAVRsjvolGPgZXPHklk9/Tl6LfUfsi/B7/N9UPrfhpjh3reu1KeAxk6j4PHu0/t+OPr6DXGZY9Gde8BQHtpOWOf6zHZ16DROBvdY2tLpOBLz3Ul+9iq+xqA4jkB3K0BkTu92fR5A0j20k9+7ogv/sDj9OLLnRYXb2dxH22YPHkjgJKMkc7Nah8OYtOzgchW8qdRXUhbFXSPCdSYjFk1/ag9vftl/4wAykwde2hWXT1b/4iJ4vWNJNAWo7iSBiQgAQPMaUk4aZG3mnPlfV15HndwTzdEAD47u5NTeyhmL0RkwkXhuPm79cYk16Bcw3HYeg6/Zv2W52YZ8b3g45oMR4/xt7RTFvExqGkoZVl5vPe99qyBG8CAuo426+qEeWUKaw2sATjmtfPBeyxZEEBjfDTurgWctTnR+63dr63hvF+0dtpM+Xnv8YTzx/tonJ6zL43mRq/PZK4rLQSytj7MaofOAXCXAgh5P623Wj64N41ifB80JHen9iH9jAIkv+O7fqAAiMdi9+vNSf4ffuL/B6BKBesGm3InzFKPKZEbNknNtMWcgxGrPn5OSgBHbRwx7rxNlnTP2pT4++6hczfmPRLBS+lade1hwLZMedL0yevXkpDtDQMs/UnCyiJ8sdwkGpht/+p5SnrjTZNAD9s7wSzy5/BzVtmZ3721YXaeX5NmAJgEfdY462m2Zvtutr0jEGOV0wQ75bdzeJBBIZoQ1umvdwm4eBdTNDmzrVXLfsma8RKSaO07jb7nHtC5tx29erVnjOb9c5iQ9dqq989+p/jR/LHe51prhdX3I1+pO91phn5GARIAeCfHhl+8w+NhaTYgSRQ/nojATFpcKDdJmziRji0mpC4+2wzwBSyJc3o9llTPq2V6tF1EqtlMb/PZG1Zz78LYAzUaE2u9z0sukucArtl6JbPNz8tnawnNSANS6hQRt0qbcplR0wrAyMCjgJbSjlrBw0Ltc8BCPiYei4tYPQulHbhJDM0Jx46rMzH/u0e63JNm9pgAKqMdW+cuBR/XYtxHNMs8yv7ha4EUUMwASEn2ujnn59f+rQIZfq2nXU7AImk8vHN42EyOWj/NJZoHR7amhxixhjQ3aGxrEaI00GytZVqfjda6a0rBZ4RWM3XtEVL1gIBWbtSOPaCGU88XpgfGtd/bayu71mpORu3n9fBxJNt56TiwAJ42PmfzbL2fFEM8O3LkJc+8U59+RgGS/+tv++py/Af/8t9tNqLv+L5P4VWOG08b02MWgVHo0ieEcs/rUyvd0DQm7cYt7ZUXNpnbhIuziyUHI1T/42GsFdGcVUdMGq9TLq6a/b5Vl6RzNhmtj6x+mz33ftClTOV2k6vX1CzLi2uc27VM7RpJ7Yg0Y9GASQhtxnYy0dqYbwU6ZvV5B5ghtmne2BpKS5LLtY9WmZ6006IegOj93gNCrg1AZuofSYqpjDw3ElDMCCbO7Q9LMEPH3M+DXyMgAgAPC/1O1/k8ofOApjX0WH3ymeKhrmfXmx6YmAEaz72ujcD/7NzZ086XXKtnwLKV+2O0H/UEJttrrd/JOXPp0rDC2nuOxpwlRL3TnWbpZxQg4fSBtx6acI30961D1Yo0aU1CAihrZqxeiQRuJMVNxzoDrv1OC8pSGMrkZM8lwjZTYG2+wPxCZElSUj2+aY9GM9KxnunM6P5ZabYlkdXq25zbIdG5VsjeazKY9Z0DG0u6dC79FmN7QDxze+/9uZkWByUhVmmy9CnxC3BcyewlgX6ghv2lb7l1VK9zJr3THECxQEmqcxx2dcQI946B64CQc8fOXkbh3OeMBAZ7pdn8WGo36K8m/JGkOqn71k9kEcCYzLEAdIAIynUC3d45rKgMWhpzvkjDaXyPgPKs1s+ia4KE0X52bdrTpr2gr9cH1lycfYYEKL17e+tPXdP08dwT2BGdEzRHq1M71/I/QT2+05320M9YQPKz3jrgcfEsL0PEw+LwIDazwqxmaa53DjigOClKshYAyfhr5g35Qem5E9IGSwLIr82Q3BTpHIGSRK0Wh7dthtnobRyzEZJkm3tgRNvUmwXUACG9DOW9+y4FKtb3sqT2dI8FPlTG2nBWDxHQs+/sJwuUcPMt+PQ8HzI2WWpY4jL+fZu7h0e1a825YqmUA/oeILXAuiZt1M71fvPjnkP6uWDkEnpuBtKaj3ROBhvorQkWkNPAiHYPXxMX70zNCBE30yItSHPMADndtmm2B3xwgAeOGSy/d4pNHqqZb3AOKNkD7qzn7aHZ8nvqPkdzcg6Qkvdpc16e3/P9OGnaE6uNI2DCI3VZoJ+31XqOpFnNR/29NTfnv2/Gqf2FTcvuTu1jumlA8jv+wt/Gw9sfwBoi/vvf9XW77i3SrxwdxTJf4QyWvH/ErPQWtr3+F733mDk3qqPH7GqLtwYIiCzGbtQ2ySxaTvMWGBm936icZlPKz+117O61p/d7lnrgQ5aZrQtAkyBR3jrK6L6XOFjZmJH5vkmCDh50rZ42RiX1NnmLrOs9h/QeGHlu0HBtsiTw1vrQAyYa7c+xNK/608ZWaPLw1HlQs4UCDbimumKySQ+RA+kKmmdAl8Vs9tbnXplZmhGG7L13dK1XfvQee99z1PfW+jm7j/X2v5HpVA90budRAHeAt95DexetzMx1Wwtil7vTnc6lmwYkjwd/FSaRIqlozrx8nlV1fnXGnSVL4sAd2HsMk5QOA+OQoHs3KaklaevaytDrxquHCeZlzpGSLUwKZuVBaNs470fCgeascxuV08bcDKM+Ax73buJbDUrKSWJ9A5m5Hajjfg0xSXlF1nbv2nfnAJ2/swbcWy0JIDUlqS0RQI1URKd8SGZcgK4xoWM+JlIZ26/rHBpt9JZWZCTFn6nvnDZei/a0QzKTPWFFuWdHm7U+3GpEtv5zU4wnnxPpIPsNAmtYN+148B7eAQ95QziuAccMRo5rxBojPndc8XQKeDoFMX76ERppHPMkjBr1hEXXonPXpnPWMOv+a4APre4RwOudk/drQiH5mwOT3r1jbVg7diwn+L2ClRktyAiE0O/jLWhIwj0x4ptINw1IOH30z/4tPJ0C/vLv+cj0PY298GRkISJLo8LPc8fGZdlqWuSiJTfSS6S4Wp0zi7vm7zGWvDfONkaZMfW0LnROA018c+5JuWa0JM9Bz7GRS23STJm932OqHWeooQkMUrCIxbtNDhMgz8/gqtS6I/zWgbRHNXewGY5Rgjqrz+T5PWF6rwVE+L1vgpTSkhTvFUyM6t/+1ZMcjqisCWRmKHL1rBGKljzgwfsGtBMYOYaANaCAESscKl+vekyoVk67fgldOu4upd4eNft+s32wVxtyiYBIe57MF9LTJGpzRwOgls+GzHMy134JeLbj1zre+6w73UnSTQMS79rJK8P09ogyVwPJsTbVl6Rejdo9xgIsVjHvm+grfnuOm6IA2b4YKFJfLH2GTlvYRkz1pdKp2brsRXRhx/w5Y6nJ6H2tzXm0aXNq6uGO1pl62hJLG2dpRmYZWe3cOYwVlU/JBJONvJYckKJtcS1J0VyEuGH8iSkDgLAS4zbVrIZ412qgZA3V36QwgT75ljRzb4dDfvaiV6/wsaCZUO6ZG3vD9o7q20PXZgDscbur49HzO7POzbZJApH2XH/uaJLdxScndLon5RNhjFfeE8jvcPFr8TF57xTSnsFC/H7uqU2ea73TSAgzyvMi3/PS9XzvtT3lZ9dm65ylhdCeuVdDcE0avYc045rRkIx+67RfU6GBjT1g5BS2AUjudKdZumlAwmnxroS8nSFihBLVEI9L3mTWsBbJlwQiMkIRj8SSfrfXKVlWSpxFzrsJ4FialiCACmlYNOb7HIakd8+eUIHzCRLnwSKBGe6gPPo7V2d/0wJa0MGjS1mkgZE90u9rSMpnpGz8tzQva8ca01YwkL0Fzts2bsf9XPt1UJKeYQKTwVRvx+E4WhzdI8fzDM2AkR6T/JLM0oj09o3zIgAaE6jdZ4dHtzQAvfZYQMUia83orSUEKki6Lb/lGmIJA8/X5Ho8xxhqa5qMwKW9a48xHz2vd99MXXufZ63VI83CLDA5p11W+WvPS/mOmraEnns5GDlfw8P/Wud6eUluQUMSEnP38s+8U5duGpC8OnhEyhviHd7ZoyHJ5iCLBzPX2r8AyXu0+c8ZrPYCn/R2vRKccLI21lGZS0guiBoTp0nvZqhtv+0LIEEJYAcIsBjzQqx6znj3wMhzABF5/lKpeXcjK11LWqLIgAfTlgCN5Lj7zKj3y7ZcX4qrAsUCWmqbeSZ44Hzpu6Q9Gd5nSRuv8tre+s4tdw1Ga3as90CKZNRH7ZIApzdXNE2DlEjL8hYYAbABHFY5DYRY33dUV496wp/Rfb1z5zLmPSGW1NJysgVYfe1Br+wsXUNIdNn+tgUlWpkRWBs9r9fm3pjnxxYYuQXwcafboZsGJP/CWwe8eucRAGlI5gGJdw4Phzb5FYV0lHNsAwjCNkQkEZ/sPKQwgBLqtNZbGSu/tM/hAKUwY0JSvXdDm5FwEM04zD8H6RKxdsNJJkmSAdg6OvM6ueSmK2HaoeWe3bjGjJbdt/K4J2knqhomYSYgQlVLExW6FDJQ5qD4GKTTPgNu7PTReKdadnt+ow30W02JLNvkNYG+0bftqOBWjgNtg94DSiSza2mo+DNGQoNzaC9ImZOAb7Uc50rht9cWs81758zMfb3vaWVLp79PpzX/1RzVq3be6tsRKNEYP62cZMxntSOz/cV/j/aAWeqNdTnPZqX/l9I5fXXJNU7WWgVszbioXk07dq1+6gks6VjzhbKe3xOgvikUw4ooAla8xDPv1KebBiQchBCz9h3f9ym887jgD/7K/0P/XgEketSad2XtSrnWtmd0P2dWCPxIe3p6htSa9Ki3EZ5DmuSmR5rWZO/zZH3AVrKc/rZ+AdoGbanCtfI6CNrHKO0FIns393OZAcv0QWOQKOxuNYvioEMz20INi8rIO00y3oLzS0ibG9r46419Oaa0es7VlMyCEnrepfRcDNylYGTuGfvrO1eKPyKNMZMM2Bq25lmvWJLDXntn1herLRb16tuz/uwBItdci/jzemHetfv2ALIRnQuAz33uOVpReX+PRgKrmWt7wMgMULnTnUZ004DkX3j1gA+8lV6BS49nfEneeVhUBovm0pLRgneu2K8TUfJE79pFsrc4brQlRRxN92yZw5qFWn8HTUqyRwsyop6Zg8UUzJTTyNpY7SzGJF2tfib8usZkyo1MMqTa5jfb7tnzM3X3NCS9+9v36DhbnFrfHOs5RzbuLFMsTePRAnYa9/I+tboNbe9zjZZk1vyrJ1m+BmmOqhoI5G2ja28CbZlO2zxqtg7gemY055aTNAsEpGaE1gvtftI+8raNgKj1PKuNI4C7l6HeAzrOBYC9vUADZ5YwoDdfz51H1wYY1yItb8lesLn3nhkeYnac9oD5ne40opsGJK8OHo+HmnOh/HUOf+qT/yCfS4zLWwePB+9xWBwevMOrg8frU8Bv/uU/d1Pv9/ydf1SOl8wAEWPFNSty0d5MVq/5mGyByUhbsoeubftuaUrO3Rh5mZlFc1We3/b1dXNOaDQjzbq0L0bXRmBkRNYmMQvKVujfSstJQppp7yhykZL0kAHwS/Z/acaojS2Lodl7foZmzbeuJdnV6uN0jbpfWnr8XPXskQwToOeMmMVsceEGaeZ6oKTXnksk3z2aTdQpf1+ijboW8B8lDz5H23DJmN5LloanRzMJFS2aBR69MpqfyCwouQW6m2y9mXTTgITs5GlBesyi2Yel9QlZvCtghOcbOSzGxCXmyQMIsdGIpOf2F6sQq9kKMXIzC5wFSng+hm0fzEcuOZd6oITa0KNztAjaIt7Pbt9POHYJnQssriHt1ZiDUb6LraZET5y1sP60bJf3tl1rb40yR+dbgDJyhueAI4hjTUMyA0auQdb41/pTa8dLaEnOERzMhPd9SSnyczyrBwYsMDJD9O01rdioHVbZWdPBnpnsSOs6Lrc35DPdl/5qAQxG2stlsh/3zvHeeNpraszJ+kYjADiqc6/p9DXpVgHHnW6TbhyQpMn3+hTw6uA3QORh8XjwDt4nrUi5j7QdBiAhZill5I1bR1psJbMSLFDmXrqP5jUBJIs4KNHqtRLEzZgCXLJY9SQ2GtNzCYM+ItqAeDbjdI42ptaMS25wMowm/6tpaHrtOOfazHWtjGVm0QMliVrzLY2RftrRrl47D6JN2t9jwMaHS3Ok5yQDPljaw0s2UI2x0ZjBnokDB3l7n3UNssbCPBNqh9S1zj0nPYdmpPdbBsyQa0jvu1I/W6CkB0BHY2EWbPSuzR3r3//S70CBSepvbpIcmr6xjq1zwHlA4tpaEKsN1zCdJjp3HMhy54zBPXW96RRDeB80JG9+Bvv3m24akEjiGXYB0iiIzXcmLGlI5lY1IWJaBLdmJ1sKEbrUluV+mNVcWPbxBIYsydJL03OAEfk+vffb9oP+bCpj+6Vc7gOz99psuXMi3mzfibKWt8CE13npGLKYsg0JPxPNkV6vf397LqG9zERPawLoUXSIrsV876nnpQHGS9Ae06z2WhsoQ2pGrKzrsn5r/F+LCQT2Czw0cDECIpdI9oksMNaCDlqPtsCkpy3h7Znpx2us0dazemvnc4QR156/57wkbZzvefYtgpI7vXl004Dk6RTx8FA3ic8+nQAkp/bFObyKOatu3Epke3QMET/9tGKNyVxr8SnLeqrDdlYMETiGUJgmkgAdc3brIyIeFtJ8tLbzcj5rYESCIQlKOI0WyBHtVRXz5/K/l5DccKw6ZXbturmOuVcNnHA6h3GceffnUsP3QFzd0K8vGdKYG007oiWWW9g5Mu/qkRb6WmMezzG56T7XqGNP3RZgAeYYgdG4sRi2a9BeoPMSTMrMMywQoplmyb+kUbXKcNKYaQIns2R933OY6ZFGRM7T9ppT27N3LZL3S9+Ets94GGgSntjaEn58DaFPjzQtaK8NmqYVuExboo2Nc7VY1vy0coxo72/1SbgBIUdcV8T1hTUkL/y8W6SbBiSceJ6Fp/z3Yan5OzRaY8T/8P/9cfz004rjGop2I8SIpzWr7g9A8k6PJcSplu2Ea0aKOUlw7W9I4GG3S94nSYswtHfz69E1VeCz2o1R3TMSqJ400pK+adfluV67ejTbj88hqT5Xkri3Tjpf+28tmqrFe9UMw+wXYY7I564FROTf55ZGyjbMXJsBulZZTiPzjZ7Jy0tpQyRTe21gMltf//vMgxFZXw+Y9ubFDO0RBM1KxWfASA+IaM85V1ui+QFKAJeoak3oebp25WXH9KVj+Zrakmu9+7n7710zcqdr000DksXbEoI1RBzXiPdcwJrNQA6LQ3BbzcTiXEnoRiDgrcNSjkOMCCuplAEg+ZZwIiBzXBOY0dqzeAdkkPywbCMPVRMxbM5Z1JPEaAvWtZJd7V2IRhK+51rYegsp/eV9MoroQnSpLe9LbaTAeZvo6J6R5qgyOJWJ4EzQyrQimtaklFOie2mSu0u0F881/kbmOr37qF0WacKH2eSM1wQqe8f63n6+Rvm9WpGZv9qzrHE0+z1nrs2CAwuUnANErgFIrHGmrbcaMElkO8Zfk26Bye6NCevbXPpePSEep1vQkNzpzaSbBiSPBw9vSAkBFGBwdInhWaMrTu5AK31dvENY62R7WBwW53EMAe9RIqwYU26GQ81FQlJbMstaY8TTaQtIqF18oaUcJ/V6+mtFD7LeUyNNEjOSes1c45urfP4lzM1LS1ukRI6ePSOdPLfvZq5bdB2pcF9rZNGsRK/HzKS/IQOTyhwROKEys2OUiLe7FzN/luQ4tCSa2tgfte9aJNtI/cT9VOQ8lRLmlwAiM/ddq3+uBUTouPfNZzQkVntmmUft+l5g0pafN8+yhAN72t0jSxg0Ji3S1/mOwpdq7V+SrqFp3/teswAEuA0QxynG9yHsb7ybbI3opgGJJ6DBJsMf/jd+4dS93/N3/lGxvSLm37vqp7E4h//zv/TlV23vf/Y//sjmnKYNWaNu8y7f9dqLwDWk9pr6XWOetPt6dfZImlXM9NG1NDx7z+8p0+u3a0matfOjSEKSegxxn7Yhic8xcRm17xwtyTWYktn7Z+aEpmWQ84t/B8nA9Pp1ps9fUqN3DdLAyN77Lnnu6Jv2hVXYlNkjBNGACD+v1SvByGz955AGkrXr2jEg54VcP+a/9Uij9SZRz5RvRuM5K0CRNLsONuvTROCgO91Jo5sHJOfSP3864YFJa4F2Uj33mrRmLYhmIy+Z6nJ9BxjZY6d66QZjMXJycZxZ3GaBCR2Twyl/5iiPwMzifKlphEZ7+9nSQvG6zunP0djYCw77DMOW4RlpTejanlDEsxJyq6xGXIorQ0XLcT6qe6ZPZzWT/NzsmJJM4B4tybUZUq1dl9Be7eHo70jTduk6tvcbzGhJRiCEH8vnaWDkEkELLztar3gZyxdwz/wf5UvZA1i0+mfP9+hcPxJL4GOtSxr11qrZPCojugXhxT0x4ptJNw1IgPMHf4gxRfRRmOZc4BrNa0iVXAjTrHMlGS9NowVak96ONpeZRd9iGqzj5+zPa2/gvfutDWcvQ3cuGJl5hgZE+Lk5zUmbM+Uc2n7z9zf++16Gmagn6dQYtr3apWvTNSIJ7aVzJdyXrgPXAlJ7gKF8vnY8U57TJYkA99JMf0lmXetn69zsM95vukZeEv7d5BoAPJ9pJDAeM3cfkjudSzcNSB68w8F74HH/hDuuEQ8lMVO6lxIpAsDrZ8j6/eA93jqkh753ChtfkXM30nOYkJ5Epbe4z2pmrEWzt/mOwAMHIVvpZlDO9SWdPZIL+14pZfp9PlNd2zEOf7lHQvZSNKNJsbUmwMJ8q3TNlN8FNM5lXC+hc8HdczFXl2hJZNt6NCsBvuT9XgpQzH7Dmbos4LhHy9W/fllSw9HacgntqUMDJbIOi+EemSvJdeNSwLOnf/aawl5Clka1RzMhxafOHy7f9+70M5NuHpB4n3IXLMs2DG6PtLKLc3jIyUNe783ANkH/l4/8vOb3n/rkPzD9RSSN1LDnSkZnFi7+e4905xxJUO9ZEoj0TLMsEDJq07n5QSyTCXltlqQJghb+Uj5jhom4ZtjJa26iGqMM6GYfqfx11d8vAeLO1ZQQWeZamhZSAxwvqUF5zmRw1wIj7wezfW3aY+6lUU/abv3eU/+5Gmpt/PSA+p5r54ISiy4ZR6N9XT5Hu7aIvpLf8xIaAVvtnBZo6E2ju8nWm0k3DUhCBA4s4eEeQAKkJIZA9UUJx7Xx43huohDCPFKIZpIxQzOgoccgWE5z1j3nSGBm2sHr0I45CBlpQqwkTxbxhX2PU7AFRnqb9ej7bk2z6iaqMZraPdY5GebYehciiynZ8+1nALWV3fq5meg9c/0S5uVSBthaGzRpsRwjfH6/RJ++pNkWp3P6twdQLhUAWM/bQz1w2Z4LTIAxw5hvTezmA1Lsf++938YCtbMAqfctL9GUnDPGrhF8Y6S10b5nj2ZD12tWAlq53rk73WmGbhqQEFECxOMOpcbiXAnny8997rjm6F1XbqRCrxaPo4sAfDHfGklBgfM3+94i2Fto92pnzq2nHgflnA0+Zv1JJFmM3KicdU0Lrbnnftne7Vio2hKNYbe+Re8Ze23IrTHJx8/sxso3unKvsvntiQB0Ke3RLI0Ylj0MzajPeoBQY05l3XsB3iX9Ors+XUtD0SOL6Rxpk07GtdFaMrvWyHv49R4wH48tuQnWjUwTLMysF+cIaa5FI7+k3t4znjO8b8KwPK93z7jtgZFz9srZNo2+jSaElOBjBEw0s+R40FJHv1l015C8mXTTgETTiPxn/+OPgEvPAT0U8Md+xf8eAPAf/bUfbiba4sn0y+O//lv/AMc1AYXf93/8+Vdv/+JT5vfgY8nq3pCfAwwazTL/M+XpurXAnVNfW3arIRqBkR4Ikc8eSTjluVmmXl7jEspLaM+3HpXtSXg59eqQ0W/2jMmRpHaGyeoxI3sYwBFdizk+R3r+Esz5c9P7pRU5l3p93pPOn7smj0gCop62cARWap2kVW01JxZTa52jNhEdlLKc9uwVMzRrAthb73oM/IzG5Bya0YxIOmcv7gkGZ9fCkeBsBES0cne60x66aUByDBGHNWLxwBrOY4z/8WfeK2XefjzgZ711wOPB557xTaLDa5N3Dg8L4J1H8CjZ3lO7Uxm5SVhmSCOmU4I0qYUYLSJ7FrdZTYV2TXu/ETix6pfnLM2FJWmelSSPJI/nSBK1DbTW1fqV6GUuk4hb8+ak1D0z77TvZL0jP2760fh+nCxGaW//y/utyD/y+2vvpl3TnjOSsp9Lsl+1fp6591p06fvtA+zj4AeWJkTTHvZAAdU1Sxao0EDJ7P28Lbw93OcqPUf2gZ8ai6PzRJZPwyU0U4+V+R3QNM62BpqXH81v7bdGM2viLPXWkllgMk5k264XveSanJYbyEMSQ3gfNCTvb7THW6CbBiSceQ/C94MWiFOI+I7v+xSeTgH/4a/5qk0d7z6tpRwAvP24AKeAx0M2o8qM/H/01354wzR/26/6xRe1nyauzyZnWOt7LR5A2CZ+lGQtcjajuDWH0n5rz5llBEZg4VzwMVN3711GzHmPuZTx8Ud1XgOUzFJvc7SY35k6+X2SSZN5OaxnzI7dvRLJWW3KSJJ7Ls20d8TEzNZ97rjh950TrMEC8G8CXVs7MVvvrAT+HJJrLI1nzUn5XPBqzZtWO+DVMtrzZgCKpdl4jpDDmomX9c6AJUDYF8Hv/SRtX5ndk4j6IEQHI7214Rac2u/0ZtJNAxKgzW4O6AwYgQqNPve04ulUQ8Y+HjweD0v+Wyfhc2x+3iUtCVXtXQTgmvfSSDLlLXOu53HQwuLK473gp3ddgg7r2SPQot3bXNsZyGBhpnHWQkwLtyYhO4WIV4fWcb3tl2oicSnzsIcsSe25jK02Li6ZB7OAt3deZx76EkJLOshphjHq+ZD06rak2L3yvfZajEDvrwyvrI/72wrVOTsOewkxZ4Qbs8/VhT9zzDu/btWjaSDOYehnGNg1rEoZTI+R3jrRAyqzTtY9oufKwB09rYG2ZmpBRHrl5TX+Tlqyx169z0U9AYUGNPi6YK0vWn0vmdvmTp9fdPOAhDOk2mZNTK8l8Xj3acW7DJS8Ong8HgLeebQds64V0vJhSSZhCZG4lFAoRKw7qq6bqgyBu1VHjgCatkHPUE/Tof3tA6ot2Aj5XFTapZ3TyOWxwZM2rTEvrkEHHxKYaG3UiEscNWkjldlLWwZna7o1K123GBPtPg5G9mycexk+i3nXGQb9Wu+cJR3U5rK2qVrSV+sZPQDSY2pGJJkGfiwZhoNyjh/PakHOGa8aXYPpuiYYuUYb9oxvor1Cit58XcS47GlUenWNx+52D70EqOwd67PPqG3TwQkv1xN49IKIjAR5vLxcN6w19FxQMiPosNY++VcTUljryqi+N5lCWIEXNtkKd6f2Id00IDmuAYuyGdCEIJBB/37f//0Hy/HrU8C7T9sB8vqUFp/PPa3497/+Fz1r+/dK9816DDOset0GALIMp1nQpW3IMwCkuSb6QoIQ+huUNkWjHx2zZdW2zMAvMp+dEZMrbbv70rfWFEKWuZQ0oD2zsT3nprFHiqyV6QER+t1jCGbu157HqSeBluc1c74eg7cHuFjUAxdSK0J/LSnnyDb8TWAwRmNnz3y6ZHzKcrNgpFfHaLzSuR7Ty3/3nM+tunvPG4FmGcyjty72BAfnan0s0t7rHOf4PSHXe2uVZu46Wtv2vOcs9cAEHffAyKXPv9OdLLppQPLP31sRH9bNJKYF592nFT/93nGzaZwyI/x48A1TDKAx33oJ0hhsIEmegpI00bKJt8DGCIycE5KwV24P+AghbgAHnQcq0NDAyIxmxLGFkuxa6Zxzrtq6HjwCItboNtoSGiOzEsU+Qz0nIXk/FvhrSIx7QHcP42YxLkRy85xhpGa1JZwsBknTKljzlOqZacvMNxiBhpcCIntyFhFdE1RY5SVAnwUP2rVT2ApTtHvOBSV7aCQkod+jtePaTH8lzQKhrznR1kmp5em9k3Ze63/5HAkMtOdsn8vfpR9KunfMaa+lRe/dZkkz1dLWClm3phm5ZSByD/v7ZtJNA5IQYjOh5QbydFpLhCyNidCOexuQpP/or/1wKct9hqWEQwAAM2hJREFUJdYQVQf6c+ncTc4CB9oi2AMVozZNAxLFBEuCDa7t2FwLUdWGhBB1R7qwPfRIoCTGiBASUIkhNuBlJM0GdGk4/8vLjL6fpWGxrl+TZpnEl2S+ZN09MNiT4lrHvWf2ru+5JqWywJYBku3e851HYMQGIXNgpEfXZmivAUSuUe/7WWcPgAPzWr7efCAaZWefqXMkoEnnrhMGfQ/11lwJDKxw5qPfWjQuq49mAYrWt8+5D8zWxct9voCRO725dNOA5PHgm82eTLCeTqspsbUkh29nn5G90kpaQE7G83p0ys4ifG4v3uHYWYjkopau+Y30vdFIhH3JAy1A0pMSmnXsACExxK2pVuTn6nOlhiSg/Z0ARoTL+6HP/iIxODiftSMeWNeIJUu/AgB4YHH25jEaH3sl3hr1wHPvnNWeS8rJb6oFKdgDZtUxNDJdVISvIyZplsFv51Gl3uY7W69sb4/pHAkJrHplO3tAhDOH8rpVv3zGOTRaN2bv2ZbR/QJ761avLfzarLbnWgC9B8D59VmQMPqtaSLk8/cIViwmu5cHpfcOHDRZwoRz+tvSlsy807ZfFtZuapNt2qWBlT17xbXB8Mw6JoEVgE0o6js4udO16KYBiVxwKZIUhfIFdBX148Fj8a6JokWaFH7fHrIA0AzRbSniVsTiHI7Q6+ILtrYJaO16fdKdO0eM5Cwwae6Z8AU5B4gUECPMuDRy3oEiA/gcVSt6wHnAZ3+RgKotCSEWhXwBJRhrSLS+7+UNAPrjqmfvfc1Fv9e+HsM2Mu+T42U4Nid9qKxyHDg25ztM1qZuwRzIvp45nqE9ElgizaRjxplUBybzYTt7z9pLM0KNmWvpuh2KdQ8A3nP9mjQr8Z4Z0z2J+h6Qru2PI/Czp0y9vgUmPWZda5sFTM4hbY3QfMP2P4uD/u2YPWct3wMMeTnr96X0nN/lpehusvVm0k0DkrcfPR4elzIhnk6J2yS7fwB4zGW1TfrxwCUcWynAHlq8wx//xq/cdU8JVawkEpJ5SCypmWSm6Dz9s8LuzoCNGSmgBUA0GkXJ4iZZyb+kLROi7nNikfMJ4HlH2hEAPgOhAMBnc61l/0I6knifc76X+E/6Bly6Cexp1yiK2rlM3eJcF5TwsaSZ5K0ZvPO2aZJXea03p6z+1Bj55wAl1NY9zxiBph4YuRYQmTUDta5rdE0Acs7zOWljxxpvVr3nztU99+0BnID9rWe0NNq58W89LHoPlMh2PgcwkXXLZxLtBymNqKt5jqUxWdk7a4LHkSByRqCl9X17jkR2W1oMQcmsIOpOd7LopgHJBx4OODwuSKEHV3zuaQXg8fajbZpAf+UCNyu94nQJgAFSlLCHZeD451xZF+RC9Org2YLVtonAyCr+0TWLWbAAijS9IpoNuyspxtZ0i8CIpRXhpl68HdynpETVCrHxDQke8IHuS3+XxTW+J0EBGKMxMGJyRtLh0ZjTmcstwzHjeG21d9ZUaDReRkDXIglKLECrnfe+vZc0Jr2NvgdGNm1jfT1i5mds5Xn40Hpuf/9rbdSOZ3MIANf1C5kRcrTXrwM8ZsbcXiCzZdT6Y6h3fi/NAOYZgCzPX9MEzwLR1m8OTKQZ1yz1kp2eA1IsIYZVlogDB8D2EUtjYsl/6zrQAwVUhwVKZkjWT6T5D2nPbz0vsbnHOneNlAjPTuuK6F9YY7HeNSQjumlA8tu/+sP44Ac/2Jz79/+fP6Qu0lLCDNgLKzEgH/8f/j8lSaIGZOi+SyQCvQSIi0+hsjVJ8p7nTgGOnRu8BCLSMfwS4r4iXCtSzhntijEWUGI6uk9Sb2OSTC8vPwtIrP6+VOo3k5n8HBAif4/exaJL58sMWdLWWZpx3twLRrRyFkDpSaDnnmODEUnXACI9BqQ/H14GiFx6XSNrHFtakj3fby9gnj1+Dhoxp/J3e9yacc0+R9OanEN7+mnmW/eid7V//QaUyHrPGZPWWiG/Bw9b3xPY1L8tMNEAJ39+z0riTnfq0U0DEo2+8J2HAigW54q5TnU6S3+PIeAnP3dsonBxSSiZc/UYzUsX+9drgA8OD95tJjm1mx5R/EsWh9XHdmFXFiCuHaFz1rF8L0lrVHw8lLL8nAQnjTZkwncghqiCNU0zspcIqHDAop0D7G88A+x6WoPRhtOTyPck3ueMyWsxfyMpKZXZgDkGuHn/9za2Edg8B4iM+nHb7+dHqiKp6fYc/90ClvkxYzN4vL5rgxFLuDECIqPyvfOz43EPSYClSbPpWJ7j95zbhhnNiHa+pz0B9n3v2bE2U3ZMW0n8uYKEWSA4ownR1q9eXZpwSmo4pMZETzbZd3afEWyNiJ7RAyf8XdK5ddMXWuLfa+VXe06K8eUTI8Z415CM6PMOkDwePB68x+ITE78Ixj74zGBH1zDmXGqRfrd/9/qHzNAaAPgIwJaIcCBFoKRe000DLpGuzJCVpPAcjcSsNOVc0zBOmgaHn6PjxbXffoYs8CG/x4x5kvcOa3bKX0LL9PT+nkt7GMCZaz0ymTtFC8jN6bTzL0VcUGGBkT1M5Pz34gzaVpuit9UGSdeWlmtgRFIPjMwAkUvO7aE9ZiYas2gx6DN9bq/9Yyb43OuXUk9jwK9rzLp2rZXEt4xuj+HXJP+ynbN9MQJ0FmkCSr42a0Cwtsujpyk8h3oCU60/NFBC94/nVdt2DaDc6U6z9HkHSJ5OAcuDK1GSiGiOrDHiuKacGJVp1LNpp2vPP7leZ7UNAShLwiA1Br1NQZPy0T3Whrr3XXuMoQYgZsFHCtXrqt8H+yT10HUZVeddARjeufw7lXHltysJEntgZEbaaWqiFO2SPAZ0B27nHUimUt7Fu00bexqTvXTN8T7HQLMNUAHma5wzvePO7edoLPb02V4n8UueVWlr7iXbo9X/XIzpDAPfSnLnEhZqv2fLXNN2fWYeXAJCrHr20kiDKxl2Ts+dT0bTMljAZFtu6/w+c99MlvpLqAe6eiS1JXSOQInm9M7r1rQs2jOsOmbXdRluucc36OByxdPprgm403n0eQdIKjMI+IVpRmIFIyFGFua3hvt9LkmiRcnBLZmPAcBbh/1JpFoNz3YhsBYU+Y4jNTEn591ujYWVfb2p1zl4n8oSKAGyQ/qStEP82dZ3kkAEAJZDPrf4AkA4GCm5SVi9o3HQ04YQEJF5V3rvT0SfsdHekKbPu2KiV9q8boHUS4zhPVoBWWYa+OwUHs5oBp6jb6jOS5i82czNaa5u14qX+Oaj0M8aWWCkBzJmGK5ZX6iXoJfaM65Jz+l8rOWqmAV5bTlda9K7TwrlLAabl+21Z++1kVbGAiWtkKFt596s7rI9vF4+/yyQofXhjNZLPu9NpRjCy5tsXVkT9vlIn5eA5N3jisU7hOgRREjXECOe1iAY+ctjhUv6ju/7FADg237VL+60NbWnSj2QTbj0cgAazY4k3mZNOqSBknMXDwuUjMCKcylLOpXzvtV0ECgBeLJDBkzQakw0Iv8bqRUB0AUji7MTy1mkMVcSjHAwZpm7AUpggMC0A9QnoZWVl5GrR5dsyFLh7ylvXdsDTmbnlyw3GqvnMBeLdxfN9Us1IvKdZhiPc54pf18rtK9FmvaZn7fKj+qz2rL3fo0szfE59NwAZXbdvsZedk69o1wVM9L37bX9uUz2tvsapO2rFqPfAqmqKRkx/5q2ZGY/t9bUnjBSap5G73p3ar/TufR5B0j++XsnAGnxevvxgHcel4a5fDoFvD4FsekknxO6L51rHU730hoiXg00Hp99OjXllwcHhO1iGWIsZlwkhad/p2ZB8Xg8pPOkAeIMDmee6TrP2VLaoSwyxZ6fndN8MiRTTQz4/7+9s4+1o6r7/Xdm9tnnhdIea2lPi1BbRLgVqAr2pNeIJjS0DTEg5gaRe4PEQMA2QYtEayKIyRMI5BKuhCt/KeZeg0AekYhKUguFBz1UqXARKn1oU61iTyslfTntOWfvPWvdP2av2WvWrLeZvc/Z55z+PsnO3ntmzZo1s2bW+n3Xb73I4wFkUQI0bWnpGFGgiSxorU8SZPbrSLs7SaIk0yWrmdFBGCASC2MG2QkNVCNVdz/UexYzbhQirNklT+wDkFlvJRtx9lpYnL8WFjfFFmuJLNbcz5rprseaAfqaZ0uHrRXR5YHwESo+LeOu1kwfIWVLq8ko6WQ3ljLCQXTtULe5rt10Lt32otfoMv5d/9Vtvp6Rsh6RTogTW9gi4tt1r8s+4zZ88qMTuN5BV/cpkyFtM6yTfea1TFzIYyVEenyEjDYtzXJZ18Va1wCopkN3fnGs9nyOa/RtwDG9MyahknueHM8/a8x8TwBnXRjUTgsjOplzgkSs1p68YMlA0ErYmsVKiBHdC9fpwWUAcN8L76TnufOzH8nsqzXyg+hlTGNGirb26eYyb8c7YkMY/sLwVj0mwiMiRIl8jDhONaQZgABSV60o0M6yJRvu4lziv7pPHjOiihEZebt6/33uX3ahR64VItzw2DE011NpTmfMGc/ORyN5TaS/6fWlwi60VJyaRTldFaO8zyVQbMeKc9kMAlOlXshw6KDImOpz2lpX2427DEXESH6ffgahMnH5ekRcBpuNTt5j37WBbPtsxqU8xazuuKkSIb6o77LOY+LjFdGFlT0m6lomvvnWrijRlZu5MAahZaqHRaOi7r2Rw/kumFj0OdA9/762hu3dJQhf5pwgqVaidFBVrdEaYCVekmolzKzQnnyrHhNzi68vaiu77iU9VYvTcEI0pUadvAI1z0/TK3tHTOcU20xeDxGPrfAxzXxkIxkD4l/5Zrp4aRxTucUYDYP+5W5ZQFZ8hNJ9jdR7bTGuBaaKSr1n8pgRVYyo12MSIq245G5rTXESJ+Ikbt7jMAzAkfyOY57xmohrF2lU80S3sKDp+nX3qsj906EzBNSKOl9hu1vFXd4CX+FkSmvZ7UVbv2XvZlEDoR3aESHJtvxMWlMpRNoRIbbjbPmn+69bo8r2vwhF8l+3mng7RmLZZ059b3VjOuT9ZiPdT5So59bF6zto2yVQyk5taxIW2XMlXbd8GifENvl4+Z3zLTNt+1zPkrpf9AggiKLMQUESNj0gMWoNhlqDpWtyAMCZfRWpVaXVX7NoC4sLXbcLlfFaI114EWh5TMR/XaEnp9WErnVJLgTl9OgKuLRAUxdj9GgVApBpqneJE0BvKGdmnZL2WcenhFljXB7o7WNQq78B+32yoZ1ZS1mJ3nVNiXdJ/Ja6sknjRXIz+Ctek8w4FCVunQdFzWP1PvkIkjLdnmzC2Xbvfd/bTomR5Hx5I8jVsmoyTnXXpTNaTPF0ClMfcZNYcHlDfI6zCROf7lllvCRlsd13mxDRHVc2D13PmFrGq8/NVAg33fGuxhzbYHP1v9/vrKfEJSZ00wPrrscVj4ui3gu5DBSeElvjiq2hUU1DEUGrHutq7FHPUZ8VXbaYVElO4zkJK3NOkOgGkW/+9/+XLoLYWzG7RIHWy1VrxIjCENVKiIFqhAf/Yx+qUYjN/3VFqXRFYYB7tu3B2ERdqZQZYpY9d8x4aliq0wCrBdBUVL46vMWIgs4Y9nJ3R4YwlqE9OkOg3RZ9XyESs8SYF2M4QjTLu1Ca1JEhN2CfM/1YHBPq/RRjdsTK9Km4aZ4nCPL7RDc69by2mbrU37r76rPCOWA2nNRK1tTyVwY13rJGRiu+JHNNhoGur7q4xqK0M8OOjuJT9pp++y9w6CtKynpEfN9RHUXzRA5veuZ9yxYfdM+Oy5sg0qYTsy4RPBX4iKmpEts2fIVH0ffXVzybF04UnhIAYMbyS/7WxeU6v08afcO00uKMhiC0zDlBomNeXw8OH5/E2ESj2WUrTA2p1idErREr3hKGM/sqGOzvQT3mmUHoLmRXrFzgjU00EDOO/qrsGUnEj1yI9FdblncUBEBobq3XtWSkx2oqGt+KRyccfAo5k+DwEQLtVN4uQQIUN5qBlpEk32f12luTA4SIw6YnpJGMfomb7nd5oHsQNAVJc1a1dmYmSWfukrYZPSbNTbIoEYJFxuUZkf9XNPmq+18GU+tpmZY/HzFiWiRMj16URGF+BWQ5DWUo4nUqI15sRr5JhLgaRsqIkCJio6wwMR1TRETonvkigqSIYdspQ93V/a9TosRkvBfxlHTCQ1EkvabzCmzb1Lh8tsnYRQmQlDHCE2Qfl+KTh74NBz5plxHnnh0eEhrUPhM5LQSJakQBwP/6wiWZMN/+1e7Mf/EiNliybomYnveebXtQaw6Mv++qVW2lK/uyM7hm9prqVizXOXQGp7e40Ri3pjht/4tsl42HThnLXi1lTc8Ib4YR4zqEt0QE4aw1uF/nSerECuVCdIiB8e3gyjs1bCcwVcCdwCRQTGLCNx6TiJouinhUigkAPzHiu6+oN8TVOjtVmBo1XAKkE2WOTTyI7b6ivNOetjJ0yxOi4hpk7/vedyKMDt962CRC2s1r33KhE40tBCE4LQRJtRJi6WBfOtWtiSgM0pbuU7W4+bJPpvvFCzdea2C8Zle78rS7Ymav8VriHWkwjnFpQLs4t9wPVi4UQ2l6QbUVVhxra0W0GQG644tUGL4FpywKTS3v6nE2b4ap0jfFld1vXvU6+Z81glIRwrJTLYt9ug8A1ES+VcLslL8suzaJ2AZkRYjwovgid8NSx4eIcwlRouuypRtDUhZdpd5O63U7YUxpMbUq6jwc5nPkRvHk0C0q5qLIO6hLm84g8TEyXN6QIuJD3VZ0UUWf1tqiIsVWXsh5JPLdJx/UssgsSIovfNs6Nl9OFSWWjHB14LvP8+l7P2cLRWf+0uHrJbFtB2wD3GW0K1AVbjgxpUV9x3Pvn25Ma9y8T1K90SAPifmchJXTQpAASbetmPHmzFt+L0wSnmFsog4A6excYi2T//5/XkWtwfDkTWtyx/7bxv+SfG//zzQetXIWBaKpUIvCIBUjMpFi4GR/m69HHtzfydYMncFmEyM2QVKx7JN/Z7fpK3qXWGndtyizTa381XsMw/MjwolPOjkB44gDnnblStYQyYqToDmVcRQG2i5YtsH8MjqhIW9LBta3hIoufKeYiS1mPi2hsuBXRYnqAbEZA2rDQRGvSxF8Wshdxr1LiNh+u/a1P2uX20vi+6zpyimfPDF5R3zEiFo+Fc3/VtkTStvEWhz6menEcaa6osj4krLp9sU081UZXI1RLtTzu4Ra2efOhalO9RWFunzO1WOadOnsA1WIZCacEfGUnHWMIGROC0EyUI1SEZJ4QfxfHrnF+9ipGoBkut6asp6Jida6J8n3pEUM1RoxqpUoaSlpipioR//CR2GQ61eeFKbCCI6dhY6Kq3IuWlnpxIdOmFQM4Vr/w8w5dJV+2QXt1P7rrULbvPS5upikLETk3yJO8a39rSymCACs6RkJQsl7EmY9H+l2B/KAdll4MMYzY0kYst4U3xZCGZ/Za3THyGgrxA6LaJ3RYTLmbKIki9lTMtWtyEXvi/4eM802PwGi/i86Q1aRuHX/Tdt0uDxlRXB5RJJtYS68LU0ug1MVJ/JMTCYx4iOey0wvXQbd+X3H9ZnjLO950qETIZ28H2XEiTiulabsgHfbMerMmoXSKtkdunGOJE6ITnJaCBIxiF20WrsMa1VoiP9HT7VmyPI1kmLG08HyOhEThQFqQDrYPmYMtUZLbNRi89R/pmuxtYyZ0qirvFRc8ajpVIWF8BiIb7GSvRyumm4Lpd+BVrTozmlDZwRVkc1LeXBhcj+i5v6sMEnvl/IcqAPgocSv/pa31ZQV3VXvCWBy2tvRzcCVzr4FZLt5NacEU58DV96r1yqOse13HW8zWotiauk0CQ15m++sVEncLWGia5HMhp06XNP4tr6ZZpu5bNNt68RYkCKCxRXehK3V2fQtwqjH2M7R+h1qtvl5YnxI0lmmRMiLF92Md3LYdrBdk6kBw3XPpvr9AaZOlPliK4Nbz2k6h2MGXV1u8/zYrjW3DpjyX+0mPBtgLEZAXbZmHKeFIBGoK6XLqF10BqpRKmBU41eusF0Fo+i6BSTTD7sKBfHRGba6dJgNLGuyMpgqajWdcljfSln8V9NuGxgahWHmGHnRyFyrpFgMUdO1TV3pXtfNXzU+zC7upOC3Ga4yuj78qtiV/8eMo4qmwAzz7e1M2iAqBHW9FhvqTFrqeBIgO/2wTZCqz4vpeSjVItcB47aT2N8xuwGrMwrKevJ8rtPlmSgrRnxEiClcu/nZyfzvlBBsJ55OilH1PUxmasx25dJ5TNRj1fIPyK9hIqdfTYO63ff6XAtJtot6r3X3voxXdyrwHYDu29CoK49c69KYxEmReoYg2uG0ECQx46hGdrduIj6i1Oj94LxeRGEyIF14NsRLfGSslhuU7oOYyjdmPO3KJaOOM5HjFt4CGa0RmDHqkXpnhJfBhs47pJ5HLdRdv2UhIbbZPCNyHvRKHpUoDBAFrTE1UdgSIaaB2Bl3M+foQUuoRQHPTBIgX59p8UhVlPiIM5NBKCoH2Wum/TS7dAVMeEyQekx4U2S4WrAAZSHE5n8xnkRsE2ljYbICfBwng9zla1S/BdPRYinohAixCQ35HDpPSSXUGwLZYxnEVN5yPDpM906Ed02LqzvGtl0VIiYxYjuvTx74CJQi8RcVLgJbI5C83fQ+y8fIXfjENp1IT/7nnwH5XLZ0mq7JVha3yijAtqheWXQrwOvSZUNXB9nKEtNvEy4Rosv/IrNRFS17OpEHpjKptS/fvVh33aYB/PI29XxiHTS5npmqMYfTCY8ZEEyzh4QWaHFyWggSQN+CrhIzhmqlkhqb6arvBiPs3786XCgNiUiwP5QtYzSZO1YUltVMPOZWKV3Lra6y1B1vKryz12CvkFyGq0+FJIdLxUxTjAhdGQaB5B3RJglA8zo5b+V/yBGz7MxlRTG1huuuxehNEWOJxDPGuHayhThuVgbN/8mCi+Y0m8aVqGNIXIPfhUdFzLylM7oy6XQYArMFU7rl7aZBuKZnQIfvQF5bF0AfVCEib/MRI0UM/6nYpwtT1CicymdxKuM2tVjL+03luixM5LCmsstm9Ap03hNffLpmTRWuxgef49s5t6Dda3V5R3QeMt2xpuNt5wPIU0JMPaeFIInCIN99R0FM4zs2kSx+KAaf91bC3NiHM/sqWo+Fi2olxHitdb5ag2WEDyohKmEgDcBvFZqVMMicU205EgVHvuBPWvV13hz5OPVb3a9i8oao33L6Tfta8WS7aunCAHoxoi3s07RnxUcyi1b+elxGt0ijrvVR3+KZNQqS34o4YdkuegC0Y41qYIooQdow5hIpAnkhRHEcR7ItjptduFira5dYRyVG0ipWlxqUfNdE8Z1G2KfFuAiuY8vErWspl/flDUgGeSpvUwu86Ty6WfF0AqNI2tW4fISI7tymuMukxwdbI4BrmxqPus/ViOKDmr+t7+wzYLoWNe2+2BqXkt/lPSY+RmsZfLwsvl4THa576/usqGGK7NPFmxcQxZ8znYiU4zeNKdFhm5wlg3bFXTPkByDKcloIEh9ixtPuWclUvw3UY4YPnFHFgoEe9Eqru/dXo8xK6r7I3opag2G8FmcGbuuMALGKu5h5y9ZqJuLQGUpqAeaqbHy9IzrxYBMi5njMU2NGmm5aQCJG5HBiH+NKhcQ4GG/FI4vTKAgQw9zqBPhWPJb0h62uesLDk+SrNHhe0xItG4IRC3JjS8Qc8Dq4IsB164/oBrujGa8QJwJVgOgqHZ0r3+YUz4xZ4YoBEuhbf224PDQ2o6OIcSDCm/pkZ9ORX83dFKeMECN5QWKfDcs3/bpvVYzYxJAr/e3gG5e+ldicj0VFb1GBYhMlSTxZcSp++6bHhs7DkU1Xe2tYFAmvS4MN3/fUfD4G3UxbpnS0KwbLhvVJhy+qMMnmi3mGSPX8elFjKSc9ehZ0ai2rqYTzLqxDwmlQu4s5K0ge/I99AJIX9/ZPr3SGb0giIWYctXoMzlozY01K0wb3VyNtAehioBphbKIlakR88kddl0R4DXwRYeWB2brj5YJHZ1iphZyt4mynJbuVnmylkjG4mtPfhiwAmr+jIEjsZTldSpaoaY45z3nK5IJVZwCqaWwHk9EqSLvwaFrBojBIV4AXcYhV3n3IhGPan1nvSFOkCdFgEz8pMZyrwWcG1rPWOWKWPV+uJU7j0cqdvqBx5YNL2KhTlhY1iGVME1n4eCbKGkk2MWJ6F6ai1VzGdM2ulm0f497HQ6LbVlQQu7AZy+08wyaRlr1neq+Jem7Xf/Va5PPZ0udKf5H/Zel0OeFD0Tw3dXMz1ddyXD7ekt5KmOt+55qFNMW23pn7aILQMmcFicC38hRi5Ph4HZxx1CcbiBsMYSVsCpAg7WLVX40wONBTOC1nVCvor1YQhQyDAxwD1ShTIKizSVUlr4y8D7BX2qqXRC6Y1OPUsCKM2KduMwkRV8VtLjSzrckxyx8v7jvAwLgY2J5cj9x9i1mM5qzw0KdPoK7E3i7qdSfkW7FEZSBXFOLaxbG1BgMTcbDEk8GhH9wuyG+XPB+KZ0I0GsnCwiZ6XAJE5zWRRUkQBhkho3YXE2FYM83pXPcxz7XEmSpnH2zHuQwG9b3MfudbxeVjBaJBRITVxasTCLpn1PeZ1Q1a13tlynkNi2IqW3wN3iLGblFBAujXy7CJHpH21nOV9ZSI/bpjilyX7fxqGrKoXt1so4taD5jSaGr5L5NW2/bpFBG2hgATtvC6e2crY2wT0Kj7dDNDtr5b09aLbfI7LrY1NNvkeOR02xoqASAo0Z19uuEsnv5B7TTtr5M5L0hM3P70G+lLN9lg+L//4zJr+Ot/8sf0t6hQ/m37f6aGxLFTNZyqxRhoducS62hEYYDBgR4M9ETZrkKSsSGv2J79hLlt4lgb+lay1gxRprD5ljS/Qrkdo08tnJ3nS+140ZKuEWUat7K490KMMM7T/XJBax7M63J/540NX2z3RK1gZEwiRBYQrrEluv1hGBSIQ398ujdueXPS+OTziLhDca7M33ThRvFbnr5YDLgXmIRuJ9A9u6YZlwQ+gl0uA0xkryn7Hnu/NwqqGPE9fqq9IzrKGIfq8ab/voavS4wUed5MXYymClfjVXZbftybrYFK3a7uK5PWItuTfeZ7WUbY+Db6FaETni9TvEB2wgG90PHrxqWLPy9y7J5+GkNClKVrguSRRx7BAw88gNHRUaxevRoPP/ww1qxZ09FzRGFg7M8oG/g+DA70YF5fBVHY8pjIrQ1i7ImgWuE4sy+ZsevIWA3/qDW0Badc+Il4xdS3vU3vDAD9tMWiz7+jcPQx/NVWFld8LkxpMs37rg7klxcVEx/X4oiuvqu27llyK1F2n7nPvv4+iPDm1nETqjGtnicVKDyZdUs21NXZtbIruotvv0qUKSLDNSGEijqjXeIlEWkFxMASISyCIEg9KamHpOm5CViQG78SM57r2pWKUEMFWhSXV0CNV80jMRlGFPKmVzRIx6EB+mm81XjFMfK+5B2NMs9orRGn+22eDtMChq5v3f0wGWpTRZm4fYxRm7gwrSDu89uW5k6KEdd9sZVV9ndDV1eJ4/KLxOrOWZSyosM3rnJip5zg16Grg13iV02f6xlTF/oF1MkxAmO5EUnhzGPYzF5TAXlILOckrHRFkDzxxBPYsmULHn30UQwPD+Ohhx7C+vXrsWfPHixevLituMXYEUEYBPjfI39NW8SPjNVw7FQtNfwBaKdbVfnhf/s4gMQroqvEhKckCgOpi1ESZmyijiNjNQxUo1TUyMhGt/CsiFm1qlGYWXtDbuFX4yjqKi/T+uNTAdpaz3yJmRj0nVRGURgglkSIKk50abOlQy1s1QI42c5QxChThZ/sNfG9D2oculYpGbFeiAmdGHEJDJtwcYmaIGxNEiD+o5m+MAjAmbSyb9oNC2mzmrogJEKz10T2mKjekqnAp1JWZ8KzfXTI+SwLmDQ+5ZlV0ycbGaZnRvdM267JR4zI2zslHjp1vMs41ZUfusk4TMe5wmW3T58Y0YUxeTlc++T9pmuQp5ztFJ0QIu2E82WqPIe+z5vpWLkcyI8XiXKeMNUWKmMnsJmvR4gZSlcEyYMPPoibb74ZN910EwDg0Ucfxa9+9Sv86Ec/wre//e224s4YdNK6FREC9CDpPiVeZvHyFZnCV4SNWdLVa7zWyCycKMainGpOIxyFAQ4encD7YzXM66uk22oNpu23KdJ2ZjOsWHvDto6GzWjNxivSblfqRQtrl/dFF17nDVC3tb5lAz2/qKKuRdN0HXLadGNFkt/mFazV36ro0bV+ifttS4+tNUolCoJkxfYw7xmRUcWILERs65UITF21TGNKdDNzZQauN6cYRswz4iTxeDS7YumEiaY7l06UtIOP0I5Z1hMqFq2U6WkWOAuaY8xi1vKQiPNEYd5z21+NMmWLHM60hpLIz1qzlSIphypp44gQKHILKNCa0tzkRbK9v+00VnTquHbEh/pfJ0JsYYqKEJNBXaa13mc/kL0O1Qi1Cw27p8NVntrEQ96r4lfntiMcfJ4Fgc5rr1utXq2v5f2dFiU6oWxqfNMdp9oEqjBJygXR0yMpLzJlULPckHtPyHWTKQ2zwUNCzEymXZDUajXs2rULW7duTbeFYYh169ZhZGREe8zk5CQmJyfT/8ePH7eeQ7woYRCgJwwz08P2hCHOqEaoxxxHTtZSg8GXaiVMDRIxPfCkJEjGJhoYm2wgCpLfURjgyLEJTI7XMdnfk3g9pDhSYyEMMC61nvZXI1SReEV6lMJbboEWK6nqCkhA36fUF1cF5LPP1roivoXnQ14JW5eO5DvOtTC39uvXaXGlt8iCcbbrM+WBT5pU70yZxcd0cNaaWUwWG9YuXprwunA6ZGHA4pY3JGiO+VEHrQfNSQo4A3gIBGJGNR4gCIRYaXbbkoSJ/EakM4F5ekmKtjQCSIVILWZo1GIwxsFiBsY4OOPgPJlgYKKZ1pgn+Tmvr4KF0rlUkSEaTHqQTYe61k4UBpltAjFzXMw46oyjHif/x+txs9EkaqY9TkVKrzSTjklEp/EbGjps99SXIsf4GvC+gsG3W0yR+JNt5m65OsruE9gGP7v227romgxOH5GiQ0x9XhSXSPJJh7rPdc904dR71Y74sBnzNjKNcGnZYLnuoPmOh9l6rSr9bvUMaAkTOWyaXkODnLae0nUvn2FQl62ZybQLkvfeew9xHGPJkiWZ7UuWLMHbb7+tPebee+/FPffck9uuEyYTJ0+kvxthCB61BEkQBGjEDHXGMRkzNMab3g3OnSJHjj+p2IHJyQbGxybQYBz1ZitlfSJGY7IBHgSoxxHqABoTk6iP1xDwHtQHklmSssYvUAdQrQRgYYDJsIqJoI6wHqI37kFDMkzqTcMDaLWQMq4zpFvdOoQRKYwk0WrKWLabkmlAN+P6wkjGtwIDkM4SJb7j1DhLvmuwDyINlf/532FumynNyXdekDDlXugKYdeq8+r5Ta34unOJiiI1NhviOzGCY84Rxwyctb5l4zj5VgUJmt9uMeIKZyJWrjMVJGIdFPE//W4e0xxHIupXJh0nwkbNlrcgaI0rCcJkv6iYeRDknq/8t3RNnoZBvcZQbzA0YobaZAOcc8QNlt4jcd9EesOgijqqqLEIk30MLAoR1kOwStIFUxUkMiFaRodcdoltQGtBUMaBRvP5nWgwMCSipB4n4mOiljwvjZhhspZMyFlvChPZQ5vEpX/mRVkib1PxvY9lj9F1A4nCIDeAVo4zVow1+X1kynsaG8Kp72zHBYnFoPTx+vka1zZ0RqXtXbetQdEpxH3RTSGrO3+R+1j2nsn3Sa5T03SVaOyT0y3SmV9EOPldl7aFHoJErrdFPSLSqdoH4jp049FsdSGQf37Y5CkA9tkZu05c10zHMvXnJOzMilm2tm7dii1btqT/3333XaxatQrnnHNOx87xPzsWE0EQBEEQxOnLiRMnsGDBgm4nI0O1WsXQ0BBGdz/ZlfMPDQ2hWq125dyzgWkXJIsWLUIURTh06FBm+6FDhzA0NKQ9pre3F729ven/efPmYffu3Vi1ahX+/ve/Y/78+VOaZqI7HD9+HOeccw7l8RyF8nduQ/k796E8ntuUyV/OOU6cOIFly5ZNceqK09fXh/3796NWq3Xl/NVqFX19fV0592xg2gVJtVrFpZdeiu3bt+Oaa64BADDGsH37dmzevNkrjjAMcfbZZwMA5s+fTwXhHIfyeG5D+Tu3ofyd+1Aez22K5u9M84zI9PX1kSiYoXSly9aWLVtw44034rLLLsOaNWvw0EMP4eTJk+msWwRBEARBEARBnB50RZBcd911+Ne//oW77roLo6Oj+PjHP47nnnsuN9CdIAiCIAiCIIi5TdcGtW/evNm7i5aO3t5e3H333ZmxJcTcgvJ4bkP5O7eh/J37UB7PbSh/iekk4DN6bjaCIAiCIAiCIOYyM38FG4IgCIIgCIIg5iwkSAiCIAiCIAiC6BokSAiCIAiCIAiC6BokSAiCIAiCIAiC6BqzVpA88sgj+PCHP4y+vj4MDw/jD3/4Q7eTRJTge9/7HoIgyHwuvPDCdP/ExAQ2bdqED37wg5g3bx6++MUv4tChQ11MMWHjpZdewuc//3ksW7YMQRDgF7/4RWY/5xx33XUXli5div7+fqxbtw7vvPNOJsz777+PG264AfPnz8fg4CC++tWvYmxsbBqvgrDhyuOvfOUruXd6w4YNmTCUxzOTe++9F5/61Kdw5plnYvHixbjmmmuwZ8+eTBifMvnAgQO46qqrMDAwgMWLF+POO+9Eo9GYzkshDPjk8ec+97ncO3zrrbdmwlAeE51mVgqSJ554Alu2bMHdd9+NP/3pT1i9ejXWr1+Pw4cPdztpRAk+9rGP4eDBg+nn5ZdfTvd94xvfwC9/+Us89dRTePHFF/HPf/4T1157bRdTS9g4efIkVq9ejUceeUS7//7778cPfvADPProo9i5cyfOOOMMrF+/HhMTE2mYG264AW+99Ra2bduGZ599Fi+99BJuueWW6boEwoErjwFgw4YNmXf68ccfz+ynPJ6ZvPjii9i0aRNeeeUVbNu2DfV6HVdeeSVOnjyZhnGVyXEc46qrrkKtVsPvf/97/OQnP8Fjjz2Gu+66qxuXRCj45DEA3HzzzZl3+P7770/3UR4TUwKfhaxZs4Zv2rQp/R/HMV+2bBm/9957u5gqogx33303X716tXbf0aNHeU9PD3/qqafSbX/5y184AD4yMjJNKSTKAoA//fTT6X/GGB8aGuIPPPBAuu3o0aO8t7eXP/7445xzznfv3s0B8D/+8Y9pmN/85jc8CAL+7rvvTlvaCT/UPOac8xtvvJFfffXVxmMoj2cPhw8f5gD4iy++yDn3K5N//etf8zAM+ejoaBrmhz/8IZ8/fz6fnJyc3gsgnKh5zDnnn/3sZ/ntt99uPIbymJgKZp2HpFarYdeuXVi3bl26LQxDrFu3DiMjI11MGVGWd955B8uWLcPKlStxww034MCBAwCAXbt2oV6vZ/L6wgsvxLnnnkt5PQvZv38/RkdHM/m5YMECDA8Pp/k5MjKCwcFBXHbZZWmYdevWIQxD7Ny5c9rTTJRjx44dWLx4MS644ALcdtttOHLkSLqP8nj2cOzYMQDAwoULAfiVySMjI7j44ouxZMmSNMz69etx/PhxvPXWW9OYesIHNY8FP/3pT7Fo0SJcdNFF2Lp1K06dOpXuozwmpoKurdRelvfeew9xHGdeBABYsmQJ3n777S6liijL8PAwHnvsMVxwwQU4ePAg7rnnHnzmM5/Bm2++idHRUVSrVQwODmaOWbJkCUZHR7uTYKI0Is90767YNzo6isWLF2f2VyoVLFy4kPJ8lrBhwwZce+21WLFiBfbt24fvfOc72LhxI0ZGRhBFEeXxLIExhq9//ev49Kc/jYsuuggAvMrk0dFR7Tsu9hEzB10eA8CXv/xlLF++HMuWLcMbb7yBb33rW9izZw9+/vOfA6A8JqaGWSdIiLnFxo0b09+XXHIJhoeHsXz5cjz55JPo7+/vYsoIgijDl770pfT3xRdfjEsuuQTnnXceduzYgSuuuKKLKSOKsGnTJrz55puZMX3E3MKUx/J4rosvvhhLly7FFVdcgX379uG8886b7mQSpwmzrsvWokWLEEVRblaPQ4cOYWhoqEupIjrF4OAgPvrRj2Lv3r0YGhpCrVbD0aNHM2Eor2cnIs9s7+7Q0FBucopGo4H333+f8nyWsnLlSixatAh79+4FQHk8G9i8eTOeffZZvPDCC/jQhz6Ubvcpk4eGhrTvuNhHzAxMeaxjeHgYADLvMOUx0WlmnSCpVqu49NJLsX379nQbYwzbt2/H2rVru5gyohOMjY1h3759WLp0KS699FL09PRk8nrPnj04cOAA5fUsZMWKFRgaGsrk5/Hjx7Fz5840P9euXYujR49i165daZjnn38ejLG0UiRmF//4xz9w5MgRLF26FADl8UyGc47Nmzfj6aefxvPPP48VK1Zk9vuUyWvXrsWf//znjOjctm0b5s+fj1WrVk3PhRBGXHms4/XXXweAzDtMeUx0nG6Pqi/Dz372M97b28sfe+wxvnv3bn7LLbfwwcHBzIwPxOzgjjvu4Dt27OD79+/nv/vd7/i6dev4okWL+OHDhznnnN9666383HPP5c8//zx/9dVX+dq1a/natWu7nGrCxIkTJ/hrr73GX3vtNQ6AP/jgg/y1117jf/vb3zjnnN933318cHCQP/PMM/yNN97gV199NV+xYgUfHx9P49iwYQP/xCc+wXfu3Mlffvllfv755/Prr7++W5dEKNjy+MSJE/yb3/wmHxkZ4fv37+e//e1v+Sc/+Ul+/vnn84mJiTQOyuOZyW233cYXLFjAd+zYwQ8ePJh+Tp06lYZxlcmNRoNfdNFF/Morr+Svv/46f+655/hZZ53Ft27d2o1LIhRcebx3717+/e9/n7/66qt8//79/JlnnuErV67kl19+eRoH5TExFcxKQcI55w8//DA/99xzebVa5WvWrOGvvPJKt5NElOC6667jS5cu5dVqlZ999tn8uuuu43v37k33j4+P86997Wv8Ax/4AB8YGOBf+MIX+MGDB7uYYsLGCy+8wAHkPjfeeCPnPJn697vf/S5fsmQJ7+3t5VdccQXfs2dPJo4jR47w66+/ns+bN4/Pnz+f33TTTfzEiRNduBpChy2PT506xa+88kp+1lln8Z6eHr58+XJ+88035xqLKI9nJrp8BcB//OMfp2F8yuS//vWvfOPGjby/v58vWrSI33HHHbxer0/z1RA6XHl84MABfvnll/OFCxfy3t5e/pGPfITfeeed/NixY5l4KI+JThNwzvn0+WMIgiAIgiAIgiBazLoxJARBEARBEARBzB1IkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TX+P3fBbAqOxeoCAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import netCDF4 as nc\n", + "from matplotlib.animation import FuncAnimation\n", + "\n", + "data = nc.Dataset('/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc')\n", + "vgos = data['vgos']\n", + "ugos = data['ugos']\n", + "print(vgos.shape, ugos.shape)\n", + "\n", + "time_step = 500 \n", + "\n", + "data_slice = ugos[time_step, :, :]\n", + "\n", + "plt.figure(figsize=(10, 8))\n", + "\n", + "im = plt.imshow(data_slice, origin='lower', cmap='RdBu_r') \n", + "plt.colorbar(im, label='m/s')\n", + "plt.title(f\"Surface Geostrophic Velocity (ugos) - Time Step {time_step}\")\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "7983ba78-f67e-4f10-8f71-101cb3d90d0e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ugos的全局最大值:2.4449\n", + "ugos的全局最小值:-1.9527\n", + "ugos的数值范围:4.3976\n" + ] + } + ], + "source": [ + "max_val = np.nanmax(ugos)\n", + "min_val = np.nanmin(ugos)\n", + "range_val = max_val - min_val\n", + "\n", + "print(f\"ugos的全局最大值:{max_val:.4f}\")\n", + "print(f\"ugos的全局最小值:{min_val:.4f}\")\n", + "print(f\"ugos的数值范围:{range_val:.4f}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "fd3ece8d-eddd-4a48-85d9-c78617ba9e62", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "数据安全检查结果:\n", + "输入数据范围: [-1.79, 2.11]\n", + "NaN值存在性: False\n", + "Inf值存在性: False\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import torch\n", + "from torch.utils.data import Dataset, DataLoader\n", + "\n", + "# 加载原始数据\n", + "data = nc.Dataset('/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc')\n", + "\n", + "# 读取数据并处理缺失值\n", + "def load_var(var):\n", + " \"\"\"加载变量并处理填充值\"\"\"\n", + " arr = var[:]\n", + " if '_FillValue' in var.ncattrs():\n", + " arr = np.ma.masked_values(arr, var._FillValue).filled(np.nan)\n", + " return torch.nan_to_num(torch.FloatTensor(arr), nan=0.0) # 转换为Tensor并替换NaN\n", + "\n", + "ugos = load_var(data['ugos']) # (11322, 256, 256)\n", + "vgos = load_var(data['vgos']) # (11322, 256, 256)\n", + "\n", + "# 合并UV通道并调整维度\n", + "combined_data = torch.stack([ugos, vgos], dim=1) # (11322, 2, 256, 256)\n", + "\n", + "# 数据集划分\n", + "train_data = combined_data[:10000] # 前10000时间步作为训练\n", + "test_data = combined_data[10000:] # 剩余1322时间步作为测试\n", + "\n", + "class SafeSpatioTemporalDataset(Dataset):\n", + " def __init__(self, data, input_steps=10, output_steps=10):\n", + " \"\"\"\n", + " 增强型数据集类,包含数据安全检查\n", + " data : 预处理后的Tensor [总时间步, 2, 256, 256]\n", + " \"\"\"\n", + " self.data = data\n", + " self.seq_len = input_steps + output_steps\n", + " self.num_samples = len(data) - self.seq_len + 1\n", + "\n", + " # 数据完整性检查\n", + " assert not torch.isnan(self.data).any(), \"数据包含NaN值\"\n", + " assert self.num_samples > 0, \"有效样本数必须大于0\"\n", + "\n", + " def __len__(self):\n", + " return self.num_samples\n", + "\n", + " def __getitem__(self, index):\n", + " # 生成带安全检查的滑动窗口\n", + " if index >= self.num_samples:\n", + " raise IndexError(f\"索引越界,最大允许索引为{self.num_samples-1}\")\n", + " \n", + " window = self.data[index:index+self.seq_len]\n", + " window = torch.nan_to_num(window, nan=0.0) # 二次保险\n", + " \n", + " return window[:INPUT_STEPS], window[INPUT_STEPS:]\n", + "\n", + "# 参数配置\n", + "INPUT_STEPS = 10\n", + "OUTPUT_STEPS = 10\n", + "BATCH_SIZE = 32\n", + "\n", + "# 创建数据集\n", + "train_dataset = SafeSpatioTemporalDataset(train_data, INPUT_STEPS, OUTPUT_STEPS)\n", + "test_dataset = SafeSpatioTemporalDataset(test_data, INPUT_STEPS, OUTPUT_STEPS)\n", + "\n", + "# 数据加载器配置\n", + "train_loader = DataLoader(\n", + " train_dataset,\n", + " batch_size=BATCH_SIZE,\n", + " shuffle=True,\n", + " num_workers=4,\n", + " pin_memory=True,\n", + " persistent_workers=True # 保持worker进程存活\n", + ")\n", + "\n", + "test_loader = DataLoader(\n", + " test_dataset,\n", + " batch_size=BATCH_SIZE,\n", + " shuffle=False,\n", + " num_workers=2,\n", + " pin_memory=True\n", + ")\n", + "\n", + "# 验证数据完整性\n", + "sample_input, sample_target = next(iter(train_loader))\n", + "print(\"数据安全检查结果:\")\n", + "print(f\"输入数据范围: [{sample_input.min():.2f}, {sample_input.max():.2f}]\")\n", + "print(f\"NaN值存在性: {torch.isnan(sample_input).any().item()}\")\n", + "print(f\"Inf值存在性: {torch.isinf(sample_input).any().item()}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "979c7023-048a-4561-83a5-237f1ec10e1c", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABv0AAASlCAYAAACLEhU2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOydd5wV1fn/P+ecuWV32V16UwTFhsZesBFRjGI09sTYUaNRMUVNjBqDoIkkxiSaoqaYaIz4xYYaNSoaQWOJP00MVlRULIgCUrfdO3PO749T5szce5eFvcuy8Lxfr33t7r1zz5wpd+aZ5/MUppRSIAiCIAiCIAiCIAiCIAiCIAiCIAiix8K7ewIEQRAEQRAEQRAEQRAEQRAEQRAEQXQOEv0IgiAIgiAIgiAIgiAIgiAIgiAIoodDoh9BEARBEARBEARBEARBEARBEARB9HBI9CMIgiAIgiAIgiAIgiAIgiAIgiCIHg6JfgRBEARBEARBEARBEARBEARBEATRwyHRjyAIgiAIgiAIgiAIgiAIgiAIgiB6OCT6EQRBEARBEARBEARBEARBEARBEEQPh0Q/giAIgiAIgiAIgiAIgiAIgiAIgujhkOhHEARBEARBEARBEARBEARBEARBED0cEv0IgiAIgiAIgiAIgiAIgiAIoosZMWIEJkyYsFafZYxh8uTJq11u8uTJYIx1eNwvf/nLOOuss9ZqTtXmpptuwmabbYa2trbungpB9FhI9CMIgiAIgiAIgiAIgiAIgiCqzhFHHIHa2lqsXLmy4jInnXQSstkslixZUtV1X3311bjvvvuqOmZ7vP/++2CMlf3Za6+91tk81oRnnnkGjz32GH7wgx+4115//XVMnjwZ77//ftXWM3bs2LL7Zfz48YnlJkyYgEKhgN///vdVWzdBbGwE3T0BgiAIgiAIgiAIgiAIgiAIYsPjpJNOwt///nfMmDEDp556asn7zc3NuP/++zF+/Hj069evquu++uqrcdxxx+Goo46q6rir44QTTsCXv/zlxGsDBgwAAMydOxecrz95OD//+c8xbtw4bLnllu61119/HVOmTMHYsWMxYsSIqq1r0003xdSpUxOvDR06NPF/Pp/Haaedhl/+8pf41re+tUYZiwRBaEj0IwiCIAiCIAiCIAiCIAiCIKrOEUccgfr6ekybNq2s6Hf//fejqakJJ510UjfMbs1pampCXV1du8vsuuuuOPnkk8u+l8vlumJaa8Vnn32Ghx56CDfddNM6WV9jY2PF/eLzta99Dddccw2efPJJHHjggetgZgSxYbH+hBUQBEEQBEEQBEEQBEEQBEEQGww1NTU45phj8MQTT+Czzz4reX/atGmor6/HEUccAQBYtmwZvvvd72LYsGHI5XLYcsst8bOf/QxSysTnpJS4/vrrscMOOyCfz2PAgAEYP348XnzxRQC6/11TUxNuvfVWV0rS76X33//+F4ceeigaGhrQq1cvjBs3Ds8//3xiHbfccgsYY5g9ezbOO+88DBw4EJtuummn9ke5nn4d3eZy/Otf/8Iee+yBfD6PkSNHrlFZzIceeghhGOKggw5yr91yyy346le/CgA44IAD3L6bNWtWh8dtjzAMsWrVqnaX2W233dC3b1/cf//9VVknQWxsUKYfQRAEQRAEQRAEQRAEQRAE0SWcdNJJuPXWW3HnnXfi/PPPd69//vnnePTRR3HCCSegpqYGzc3N2H///fHxxx/jm9/8JjbbbDM8++yzuPTSS/HJJ5/guuuuc58988wzccstt+DQQw/FN77xDYRhiKeffhrPP/88dt99d9x22234xje+gT333BNnn302AGDkyJEAgNdeew1jxoxBQ0MDLr74YmQyGfz+97/H2LFjMXv2bIwePTox//POOw8DBgzApEmT0NTUtNrtbW5uxuLFixOvNTY2IpPJlF22o9uc5pVXXsHBBx+MAQMGYPLkyQjDEFdccQUGDRq02jkCwLPPPot+/fph+PDh7rUvfvGL+Pa3v41f//rXuOyyyzBq1CgAcL9XrVqF1tbW1Y6dyWTQ2NiYeO2tt95CXV0dCoUCBg0ahLPOOguTJk0qu1923XVXPPPMMx3aDoIgkpDoRxAEQRAEQRAEQRAEQRAEQXQJBx54IIYMGYJp06YlRL+77roLxWLRlfb85S9/iXnz5uG///0vttpqKwDAN7/5TQwdOhQ///nPcdFFF2HYsGF48sknccstt+Db3/42rr/+ejfeRRddBKUUAODkk0/GOeecgy222KKkpOTll1+OYrGIf/3rX9hiiy0AAKeeeiq22WYbXHzxxZg9e3Zi+b59++KJJ56AEKJD23vFFVfgiiuuSLz25JNPYuzYsSXLdnSbyzFp0iQopfD0009js802AwAce+yx2GGHHTo0zzfffLOkZ98WW2yBMWPG4Ne//jW+9KUvlcz5/PPPx6233rrasffff/9EduDIkSNxwAEHYIcddkBTUxPuvvtu/PjHP8Zbb72F6dOnl3x+iy22wG233dah7SAIIgmJfgRBEARBEARBEARBEARBEESXIITA17/+dfzqV7/C+++/74SmadOmYdCgQRg3bhwALQKOGTMGffr0SWTKHXTQQfjpT3+Kp556CieddBLuueceMMZKhDVAl/VsjyiK8Nhjj+Goo45ygh8ADBkyBCeeeCL++Mc/YsWKFWhoaHDvnXXWWR0W/ADg7LPPdiUyLTvttFPZZTu6zeW249FHH8VRRx3lBD9AZ+QdcsghePjhh1c7zyVLlmCTTTbp6GYBAC6++OIO9eXr06dP4v+bb7458f8pp5yCs88+G3/84x9xwQUXYK+99ir5fEtLC5qbm1FbW7tGcySIjR0S/QiCIAiCIAiCIAiCIAiCIIgu46STTsKvfvUrTJs2DZdddhk++ugjPP300/j2t7/tBLW3334bc+bMwYABA8qOYXsCzps3D0OHDkXfvn3XeB6LFi1Cc3Mzttlmm5L3Ro0aBSklPvzwQ2y//fbu9c0333yN1rHVVlsl+uS1R0e3Oc2iRYvQ0tLisgN9ttlmmw6JfgBcZmRH2W677bDddtut0WcqcdFFF+GPf/wjHn/88RLRz85rdSIuQRClkOhHEARBEARBEARBEARBEARBdBm77bYbtt12W9xxxx247LLLcMcdd0Aplchik1LiS1/6Ei6++OKyY2y99dbraroJampqumzs7tzmfv36YenSpWv0meXLl6OlpWW1y2Wz2dWKsrZs6eeff17y3tKlS1FbW9ul+54gNlRI9CMIgiAIgiAIgiAIgiAIgiC6lJNOOgk/+tGPMGfOHEybNg1bbbUV9thjD/f+yJEjsWrVqtVmyY0cORKPPvooPv/883aFpXJZYgMGDEBtbS3mzp1b8t6bb74JznnFHnpdQUe3Oc2AAQNQU1ODt99+u+S9cttWjm233Rb33HNPyevtZdd95zvfWauefuV49913AaBsluN7772HUaNGrXY9BEGUwrt7AgRBEARBEARBEARBEARBEMSGjc3qmzRpEl5++eWSXnVf+9rX8Nxzz+HRRx8t+eyyZcsQhiEA4Nhjj4VSClOmTClZzi9XWVdXh2XLliXeF0Lg4IMPxv3334/333/fvf7pp59i2rRp2G+//RL9/Lqajm5zGiEEDjnkENx333344IMP3OtvvPFG2bHKsffee2Pp0qVOfLPU1dW59ae5+OKLMXPmzNX+/OIXv3CfWbFiBdra2hLjKKXw4x//GABwyCGHlKznP//5D/bZZ58ObQdBEEko048gCIIgCIIgCIIgCIIgCILoUjbffHPss88+uP/++wGgRPT7/ve/jwceeACHH344JkyYgN122w1NTU145ZVXcPfdd+P9999H//79ccABB+CUU07Br3/9a7z99tsYP348pJR4+umnccABB+D8888HoEuKPv744/jlL3+JoUOHYvPNN8fo0aPx4x//GDNnzsR+++2H8847D0EQ4Pe//z3a2tpwzTXXrNN90tFtLseUKVPwyCOPYMyYMTjvvPMQhiF+85vfYPvtt8ecOXNWu+7DDjsMQRDg8ccfx9lnn+1e33nnnSGEwM9+9jMsX74cuVwOBx54IAYOHLhWPf3+85//4IQTTsAJJ5yALbfcEi0tLZgxYwaeeeYZnH322dh1110Ty7/00kv4/PPPceSRR67RegiC0JDoRxAEQRAEQRAEQRAEQRAEQXQ5J510Ep599lnsueee2HLLLRPv1dbWYvbs2bj66qtx11134a9//SsaGhqw9dZbY8qUKWhsbHTL/uUvf8GOO+6Im2++Gd///vfR2NiI3XffPZEd9stf/hJnn302Lr/8crS0tOC0007D6NGjsf322+Ppp5/GpZdeiqlTp0JKidGjR+Nvf/sbRo8evc72xZpuc5odd9wRjz76KC688EJMmjQJm266KaZMmYJPPvmkQ6LfoEGD8OUvfxl33nlnQvQbPHgwbrrpJkydOhVnnnkmoijCk08+iYEDB67VNg4fPhxjxozBjBkzsHDhQnDOMWrUKNx0002J9VruuusubLbZZjjwwAPXan0EsbHDlJ/zTBAEQRAEQRAEQRAEQRAEQRDEBs/TTz+NsWPH4s0338RWW23V3dNBW1sbRowYgUsuuQTf+c53uns6BNEjoZ5+BEEQBEEQBEEQBEEQBEEQBLGRMWbMGBx88MHrvKxpJf7yl78gk8ngnHPO6e6pEESPhTL9CIIgCIIgCIIgCIIgCIIgCIIgCKKHQ5l+BEEQBEEQBEEQBEEQBEEQBEEQBNHDIdGPIAiCIIgu56mnnsJXvvIVDB06FIwx3Hfffav9zKxZs7Drrrsil8thyy23xC233FKyzO9+9zuMGDEC+Xweo0ePxgsvvFD9yRMEQRAEQaxnkG1FEARBEARBlINEP4IgCIIgupympibstNNO+N3vfteh5d977z0cdthhOOCAA/Dyyy/ju9/9Lr7xjW/g0UcfdctMnz4dF154Ia644gr85z//wU477YRDDjkEn332WVdtBkEQBEEQxHoB2VYEQRAEQRBEOainH0EQBEFsQLS2tqJQKHT5epRSYIwlXsvlcsjlcqv9LGMMM2bMwFFHHVVxmR/84Ad46KGH8Oqrr7rXvv71r2PZsmV45JFHAACjR4/GHnvsgd/+9rcAACklhg0bhm9961u45JJL1mKrCIIgCIIgkpBtRbbV2iClxIIFC1BfX19yXAmCIAiCINYGpRRWrlyJoUOHgvPK+XzBOpwTQRAEQRBdSGtrK/rV9EIzoi5fV69evbBq1arEa1dccQUmT55clfGfe+45HHTQQYnXDjnkEHz3u98FABQKBbz00ku49NJL3fuccxx00EF47rnnqjIHgiAIgiA2bsi2IttqbVmwYAGGDRvW3dMgCIIgCGID5MMPP8Smm25a8X0S/QiCIAhiA6FQKKAZEU7CJsh2YQXvAiRuX/UxPvzwQzQ0NLjXOxKJ3lEWLlyIQYMGJV4bNGgQVqxYgZaWFixduhRRFJVd5s0336zaPAiCIAiC2Hgh24psq7Wlvr4eAEqOKUEQBEEQxNqyYsUKDBs2zNkZlSDRjyAIgiA2MGrAkWVd55gSpjB4Q0MDOTEIgiAIgtjgIduKWFNsSU86pgRBEARBVJvVlQ4n0Y8gCIIgiPWOwYMH49NPP0289umnn6KhoQE1NTUQQkAIUXaZwYMHr8upEgRBEARBrPeQbUUQBEEQBLFx0HWhagRBEARBdAuCsS7/6Wr23ntvPPHEE4nXZs6cib333hsAkM1msdtuuyWWkVLiiSeecMsQBEEQBEFUA7KtyLYiCIIgCILoKZDoRxAEQRBEl7Nq1Sq8/PLLePnllwEA7733Hl5++WV88MEHAIBLL70Up556qlv+nHPOwbvvvouLL74Yb775Jm644QbceeeduOCCC9wyF154If74xz/i1ltvxRtvvIFzzz0XTU1NOP3009fpthEEQRAEQaxryLYiCIIgCIIgykHlPQmCIAiC6HJefPFFHHDAAe7/Cy+8EABw2mmn4ZZbbsEnn3zinFQAsPnmm+Ohhx7CBRdcgOuvvx6bbrop/vSnP+GQQw5xyxx//PFYtGgRJk2ahIULF2LnnXfGI488gkGDBq27DSMIgiAIgugGyLYiCIIgCIIgysGUUqq7J0EQBEEQROdZsWIFGhsbcS7fDDnWdcn8bUriRvkBli9fjoaGhi5bD0EQBEEQRHdCthWxtthzh44pQRAEQRDVoqP2BZX3JAiCIAiCIAiCIAiCIAiCIAiCIIgeDpX3JAiCIIgNDMEYBGNdNz66bmyCIAiCIIj1DbKtCIIgCIIgiJ4CZfoRBEEQBEEQBEEQBEEQBEEQBEEQRA+HMv0IgiAIYgNDMP3TZeN33dAEQRAEQRDrHWRbEQRBEARBED0FyvQjCIIgCIIgCIIgCIIgCIIgCIIgiB4OZfoRBEEQxAYG9Z0hCIIgCIKoHmRbEQRBEARBED0FyvQjCIIgCIIgCIIgCIIgCIIgCIIgiB4OZfoRBEEQxAYG9Z0hCIIgCIKoHmRbEQRBEARBED0FyvQjCIIgCIIgCIIgCIIgCIIgCIIgiB4OZfoRBEEQxAYG9Z0hCIIgCIKoHmRbEQRBEARRDRYuXIhBgwaBdaFdQRCU6UcQBEEQBEEQBEEQBEEQBEEQBNGFvPbaa/jZz37W3dMgNnAo048gCIIgNjAYujaqh+LRCIIgCILYmCDbiiAIgiCIarDZZpvhoIMOwjbbbIOjjz66u6dDbKBQph9BEARBEARBEARBEARBEARBEEQXsummmwIATj75ZPz3v//t5tmsW5YuXQopZXdPY6OARD+CIAiC2MCwfWe68ocgCIIgCGJjgWyrnsPUqVOxxx57oL6+HgMHDsRRRx2FuXPnJpYZO3YsGGOJn3POOSexzAcffIDDDjsMtbW1GDhwIL7//e8jDMN1uSkEQRDrBCkl/ve//+HPf/4zCoXCGn32mWeeQUtLSxfNbMOkpqYGAwYMQHNzM77yla9gwYIF3T2ldUJTUxPOOusscE5y1LqAynsSBEEQBEEQBEEQBEEQPZ7Zs2dj4sSJ2GOPPRCGIS677DIcfPDBeP3111FXV+eWO+uss3DllVe6/2tra93fURThsMMOw+DBg/Hss8/ik08+wamnnopMJoOrr756nW4PQRAEACilMG/ePHz00UeJn8MOOwyHHnroGo/30UcfYebMmZg5cyaeeOIJLF++HDNnzkQ2m12jcaSU2GeffXDPPfdgiy22WON59CSUUli0aBEGDhzY6bE222wzLFq0CI2Njbj//vtx7rnnVmGG6y+FQgHHHHMMZfmtQ0haJQiCIIgNDMG6/ocgCIIgCGJjgWyrnsMjjzyCCRMmYPvtt8dOO+2EW265BR988AFeeumlxHK1tbUYPHiw+2loaHDvPfbYY3j99dfxt7/9DTvvvDMOPfRQXHXVVfjd735XMQumra0NK1asSPx0JVJKtLa2Vm28pqamqo1Fc1t7qpkxVSgUoJSqylhRFFV1vzU3N1dtLKC6+219nRtjDPfddx8OOOAAnHLKKbj88ssxatQojB8/fq3GW7JkCS666CLccccdWLRoEW6//XaMGTNmjcfZd999sWjRIuy111545ZVX1mouPQXGGL797W/j7bff7vRYX/ziFzFx4kQsW7YM3/jGN6owu+qyaNEivPXWW1UZSymFiy66CI899hi22267qoxZLBYxffr0qlzjqn1fqPY1ZG0h0Y8gCIIgCIIgCIIgCILY4Fi+fDkAoG/fvonXb7/9dvTv3x9f+MIXcOmllyacdM899xx22GEHDBo0yL12yCGHYMWKFXjttdfKrmfq1KlobGx0P8OGDeuCrYmZMWMGTjnllKqNN3r0aLz++utVGevee+/FqaeeWpWxAGDPPffEG2+8UZWx7rnnHpx22mlVGQsA9thjD7z55ptVGWv69OmYMGFCVcYCgKeffhqjR4/G//t//6/TY9199904/fTTqzArza677lq1Y/p///d/OOOMM6oyFgDssssuJSWB15Zp06ZVdW6bbLIJ6urq0Lt3b/zjH//AxIkTwdayPPSoUaOw8847AwCuu+46HHvssWs1DuccZ5xxBi688ELssMMOazWGz7333os777wTt956KxYsWFAVUWfGjBn49NNPOz0OAHzlK1/BypUrOz3ONddcgx/96Ec44YQTqiIMr1y5EkcccQRefPHFTo2jlMLf/vY37L777rj55ps7PS9Ai6WTJ0/GZZddhj322KMqYz788MM48cQTsc8+++D555/v1Fi33XYbvvnNb1ZlXgCwww474N13363aeGsLU9UK+yAIgiAIoltZsWIFGhsbcWXtFsgz0WXraVURJjW/i+XLlyeiogmCIAiCIDYkyLbq2UgpccQRR2DZsmX417/+5V7/wx/+gOHDh2Po0KGYM2cOfvCDH2DPPffEvffeCwA4++yzMX/+fDz66KPuM83Nzairq8PDDz9ctpReW1sb2tra3P8rVqzAsGHDuuyYKqVQLBbXuBRfJdra2pDL5aoyVrXn1trainw+X5WxpJQIw3C9nBugs/OqNbfm5mZks1kEQec7O0kpEUURMplMFWZW3fOt2uNVe27VPKZKKUyZMgUnnngitt5666qNN3ny5E6NE4ZhVc4zAPje976HX/ziF8jn8xg3bhwefPDBTo/5zDPPoLGxEV/4wheqMMP1j2XLluHQQw/FggUL8M9//hMjR47s9JhRFGHRokUYPHhwFWYYo5Raa6Ha55lnnsHtt9+OPn36oG/fvjjxxBMxZMiQtR5vfb6GpLG26ersC+rpRxAEQRAEQRAEQRAEQWxQTJw4Ea+++mpC8AO0qGfZYYcdMGTIEIwbNw7z5s1ba2dpLpfrUidfGsZY1YQEAFWde7XnVk1RjXO+3s4NQFXn5vep7Cycc3BevWJx1f6uVHO8as+tmseUMYYrrriiKqKJP15nqZbgBwA77bQTAC2on3feeVUZc999961aqdv1ieeeew5bbbUVDj74YKxYsQJPPfUUhg8fXpWxhRBVF/wAVO3c3XfffbHvvvtWZSxg/b6GrC0k+hEEQRDEBkZX94bpujh3giAIgiCI9Q+yrXoe559/Ph588EE89dRT2HTTTdtddvTo0QCAd955ByNHjsTgwYPxwgsvJJaxpeG6wglKEATRUaolmnTVeJ3Fin4jR45c636F5VjftrOzvPjiizjhhBNQX1+PMAzx1FNPYejQod09LWI9gnr6EQRBEARBEARBEARBED0epRTOP/98zJgxA//85z+x+eabr/YzL7/8MgC40mB77703XnnlFXz22WdumZkzZ6KhoQHbbbddl8ybIAiCALbddltkMhlMnDixqtmlGxJKKVx00UWYP38+5s6dixtuuIEEP6IEyvQjCIIgiA0MwRhEF0ayCWxYUXIEQRAEQRDtQbZVz2HixImYNm0a7r//ftTX12PhwoUAgMbGRtTU1GDevHmYNm0avvzlL6Nfv36YM2cOLrjgAnzxi1/EjjvuCAA4+OCDsd122+GUU07BNddcg4ULF+Lyyy/HxIkT15uyXQRBEBsi2WwWe+65J04//fTunsp6y3333YennnoKANC/f3+88cYbGDNmTFXLrBI9H5LMCYLoNOeddx6+9KUvdfc0AACPPPIIevXqhUWLFnX3VAiCIAiCINaYF154AdlsFvPnz+/uqaBYLGLYsGG44YYbunsqBEEQHeLGG2/E8uXLMXbsWAwZMsT9TJ8+HYB2KD/++OM4+OCDse222+Kiiy7Csccei7///e9uDCEEHnzwQQghsPfee+Pkk0/GqaeeiiuvvLK7NosgCGKj4ZprrkHv3r27exrrJYVCARdffDH69++PX/ziF5g3bx7OO+88EvyIEkj0I4hOcMstt4AxhhdffLG7pwIAaG5uxuTJkzFr1qx1ts733nsPf/rTn3DZZZe51xYsWIDJkye7MinVYMKECWCMlfxsu+22ieXGjx+PLbfcElOnTq3augmip8FZ3HumK344BaMTBNFFkG0F/PCHP8QJJ5yA4cOHu9duuOEG3HLLLVVbx6xZs8raVYwxPP/88265TCaDCy+8ED/5yU/Q2tpatfUTRE+DbKueg1Kq7M+ECRMAAMOGDcPs2bOxZMkStLa24u2338Y111yDhoaGxDjDhw/Hww8/jObmZixatAjXXnstOVUJgiDWAfvss093T2G95Y477sDpp5+O9957DxdeeCFqamq6e0rEegpZLASxAdHc3IwpU6YAAMaOHbtO1nn99ddj8803xwEHHOBeW7BgAaZMmYIRI0Zg5513rtq6crkc/vSnPyVea2xsLFnum9/8Jr73ve9hypQpqK+vr9r6CYIgCILYuFjXttXLL7+Mxx9/HM8++2zi9RtuuAH9+/d3Tutq8e1vfxt77LFH4rUtt9wy8f/pp5+OSy65BNOmTcMZZ5xR1fUTBEEQBEEQBNExTjrpJApAIToEnSUEQaw1xWIRt99+O84555x1sr4gCHDyySevdrljjz0W3/rWt3DXXXeRc4ogCIIgiB7DX/7yF2y22WbYa6+91sn6xowZg+OOO67dZXr37o2DDz4Yt9xyC9lVBEEQBEEQBNFNkOBHdBQq70kQVWbChAno1asXPv74Yxx11FHo1asXBgwYgO9973uIosgt9/7774MxhmuvvRa/+tWvMHz4cNTU1GD//ffHq6++mhhz7NixZaPLJ0yYgBEjRrjxBgwYAACYMmWKK9E0efLkinOdPHkyWJmG9La01vvvv9/utv7rX//C4sWLcdBBB7nXZs2a5SLGTz/9dDePapWkiqIIK1asaHeZgQMHYscdd8T9999flXUSRE9DMNblPwRBEOuKnmJbvfjii2CM4dZbby1579FHHwVjDA8++GC723rffffhwAMPTNhnI0aMwGuvvYbZs2e7OVQz63DlypUIw7DdZb70pS/hX//6Fz7//POqrZcgehJkWxEEQRAEQRA9BZKHCaILiKIIhxxyCEaPHo1rr70Wjz/+OH7xi19g5MiROPfccxPL/vWvf8XKlSsxceJEtLa24vrrr8eBBx6IV155BYMGDerwOgcMGIAbb7wR5557Lo4++mgcc8wxAIAdd9yxqtvm8+yzz4Ixhl122cW9NmrUKFx55ZWYNGkSzj77bIwZMwZAXJO7ubkZzc3Nqx1bCIE+ffokXmtubkZDQwOam5vRp08fnHDCCfjZz36GXr16lXx+t912w3333deJrSMIgiAIYn2hJ9hWu+++O7bYYgvceeedOO200xLvTZ8+HX369MEhhxxScX0ff/wxPvjgA+y6666J16+77jp861vfQq9evfDDH/4QANx2SCk7LMQ1NjYik8kkXjv99NOxatUqCCEwZswY/PznP8fuu+9e8tnddtsNSik8++yzOPzwwzu0PoIgCIIgCIIgCGLdQ6IfQXQBra2tOP744/GjH/0IAHDOOedg1113xc0331zimHrnnXfw9ttvY5NNNgEAjB8/HqNHj8bPfvYz/PKXv+zwOuvq6nDcccfh3HPPxY477tihMpid5c0330Tfvn0TTc8HDRqEQw89FJMmTcLee+9dMo9rrrnG9cZpj+HDhycyDYcMGYKLL74Yu+66K6SUeOSRR3DDDTfgf//7H2bNmlWS4r7FFltg8eLF+OyzzzBw4MDObShB9DAE0z9dNn7XDU0QBFGWnmJbHX/88bj22muxdOlSF7xUKBQwY8YMHHPMMSWim8+bb74JANh8880Trx911FG4/PLL0b9//5I5fPDBByXLV+LJJ590GYLZbBbHHnssvvzlL6N///54/fXXce2112LMmDF49tlnEwFdgLarAOD1118n0Y/YKCHbiiAIgiAIgugpkOhHEF1Eus/dmDFjcNttt5Usd9RRRzmnFADsueeeGD16NB5++OE1ckx1B0uWLCnJxlsdp556Kvbbb7/VLldTU5P4f+rUqYn/v/71r2PrrbfGD3/4Q9x99934+te/nnjfzmvx4sUk+hEEQRDEBkBPsK2OP/54TJ06Fffeey/OPPNMAMBjjz2GZcuW4fjjj2/3s0uWLAGANbKtBg8ejJkzZ3Zo2Z122sn9vc8++7gqDABwxBFH4LjjjsOOO+6ISy+9FI888kjis75dRRAEQRAEQRAEQay/kOhHEF1APp93PWAsffr0wdKlS0uW3WqrrUpe23rrrXHnnXd22fyqiVJqjZbfYostXLR4Z7ngggvwox/9CI8//niJ6GfnVa5nIUFs6FA0OkEQGxo9xbbaaaedsO2222L69OlO9Js+fTr69++PAw88sENjrIltlc/nE72VO8OWW26JI488Evfeey+iKIIQ8dWe7CpiY4dsK4IgCIIgCKKnQKIfQXQBvpOkGjDGyjqAoijq9Ljl6Oi4/fr1K+tsa49Vq1Zh1apVq11OCFHi3EtTU1ODfv36le1lY+fVv3//NZofQRAEQRDrHz3FtgJ0tt9PfvITLF68GPX19XjggQdwwgknlJQiT9OvXz8AWCPbKooiLFq0qEPL9u3bF9lstt1lhg0bhkKhgKampkT5drKrCIIgCIIgCIIgegYk+hFEN/P222+XvPbWW29hxIgR7v8+ffrg3XffLVlu/vz5if/XNPralmpatmwZevfuXXHcSmy77ba4/fbbsXz5cjQ2NnZoHtdee+1a9fQrx8qVK7F48eKy4uB7772H/v37r1Y4JIgNEcEYRBdmYwhQpgdBEOsv3WlbAVr0mzJlCu655x4MGjQIK1asKKlIUI5tt90WgLZh0lSax4cffrhWPf0q8e677yKfz6NXr16J1+2cRo0a1aF1EcSGBtlWBEEQBEEQRE+BRD+C6Gbuu+8+fPzxx673zAsvvIB///vf+O53v+uWGTlyJB5++GEsWrTIiVj/+9//8Mwzz2DYsGFuudraWgBaxOsII0eOBAA89dRTOOKIIwAATU1NuPXWWzv0+b333htKKbz00kuJklV1dXUV57E2Pf1aW1tRLBZRX1+fWOaqq66CUgrjx48v+fxLL72Evffeu0PbQRAEQRDEhkN32laAFsZ22GEHTJ8+HYMGDcKQIUPwxS9+cbWf22STTTBs2DC8+OKLJe/V1dWVncPa9vTzt9vyv//9Dw888AAOPfRQcM4T77300ktgjJFtRRAEQRDEBo1SisqZEx1i6dKlePPNN5HJZLD77rt393TWa5YsWeKqmhDrBhL9CKKb2XLLLbHffvvh3HPPRVtbG6677jr069cPF198sVvmjDPOwC9/+UsccsghOPPMM/HZZ5/hpptuwvbbb48VK1a45WpqarDddtth+vTp2HrrrdG3b1984QtfwBe+8IWy6z744IOx2Wab4cwzz8T3v/99CCHw5z//GQMGDMAHH3yw2rnvt99+6NevHx5//PGE6Ddy5Ej07t0bN910E+rr61FXV4fRo0dj8803X6uefgsXLsQuu+yCE044wUXBP/roo3j44Ycxfvx4HHnkkYnlP/vsM8yZMwcTJ05co/UQxIaCQBf3nVmzVp4EQRDrlO60rSzHH388Jk2ahHw+jzPPPLNERKvEkUceiRkzZpQ4nHbbbTfceOON+PGPf4wtt9wSAwcOxIEHHrjWPf2OP/541NTUYJ999sHAgQPx+uuv4w9/+ANqa2vx05/+tGT5mTNnYt9996WHdWKjhWwrgiB6GlJKSClXW16cSHLPPffg6KOPrlpp+ZaWlkRQ+9rQ1taGm266CSeeeOJGU81q7ty5ePvtt3H44Yd391QSzJs3D9/85jfxyiuv4LPPPsPYsWNx3333dWrM5uZmF2i4PqGUwpw5cwAkgwfXhkmTJuFHP/oRBg8eXI2pVZ25c+dim222qcpYb7/9Np5//nn897//xVlnndVtlVI69vRJEESXceqpp+Jb3/oWfvvb3+InP/kJtt9+e/zzn//EkCFD3DKjRo3CX//6VyxfvhwXXnghHnjgAdx2223YddddS8b705/+hE022QQXXHABTjjhBNx9990V153JZDBjxgyMHDkSP/rRj/DrX/8a3/jGN3D++ed3aO7ZbBYnnXQS7rrrrpJxb731VgghcM455+CEE07A7NmzO7hHSunduzcOP/xwzJw5E5deeikuvvhizJ8/H1dffTUeeOCBEkfavffei1wuh6997WtrvU6CIAiCIHom3WlbWY4//nhIKdHc3Izjjz++w3M/44wz8PHHH+OZZ55JvD5p0iR8+ctfxjXXXIMTTjgBV155ZYfHLMdRRx2FxYsX45e//CXOO+88TJ8+HccccwxefPHFkgfT5cuX47HHHsOECRM6tU6CIAiiesyaNQtXXHFF1cY7/fTTy5aXXhtmzZqFyZMnV2UsAJgwYULV5vbUU09VdW6nnXZah9ujrI4nn3yyQ61QOsrcuXOx77774r///W9Vxrv11lvXqO9we5x++ukdCjTvCLNmzeq0XeTz3HPPYeHChVUbb9KkSZg6dWqnxsjlcujVqxe+/vWvV6X/NAA8/vjj+PGPf1yVsQDg5JNPxocffliVsVasWIHDDz8cu+yyS1XGO+2006o2txEjRuCdd97BZ599hmOOOQb/+Mc/Eu2O1pQ33ngDxx57bFXmJ6XE448/3ulxLL/97W9x1llnYauttur0WI2NjXjggQc6Pc5///tffO1rX6vqd37VqlX4wx/+ULXxHn74YZx++unYc889u7U1AlPlOtgTBNHlvP/++9h8883x85//HN/73ve6ezprzbvvvottt90W//jHPzBu3Ljung4AYJdddsHYsWPxq1/9qrunQhDrlBUrVqCxsRF/7Ls1anl1IhPL0SwjnPX5W1i+fDkaGhq6bD0EQRBrwoZiW40bNw5Dhw7Fbbfd1t1TAQBcd911uOaaazBv3rxOR6oTRE+DbCtibbHnTlcd04ULF+L999/HXnvtVZXxHnnkEey///5Vuc53xdzGjh2LfD7f6bE++eQTvPvuu9h3332rMDNdgWjMmDFVydL55JNP8P7771etlPZDDz2EyZMnI5fL4Qc/+AG+8pWvdGq8Z599FgMHDsSWW27Z6bn94x//wP7771+V/bZgwQLMnz9/vSxBvnjxYmy99dZYunQpJk2ahMmTJ6916VApJVauXNkpgclnwYIF+Oijj7DnnntWZbyHH34Y48aNQy6Xq8p4TU1NrnVQZ3nooYcwbty4qlxDAP29nzFjBn73u991KiM0iiKcccYZ+Otf/4q99toLs2fPRjabXevx3nnnHey222549dVXE+0K1halFD777DMMGjSoKmNVq2zu/PnzsWTJkrLBmmtLNedXLBYxe/bstarG0hE6al+Q6EcQ3cSG4pgCgHPPPRfvvPNOh3vKdCWPPPIIjjvuOLz77rsYOHBgd0+HINYp5JgiCGJjZkOxrf79739jzJgxePvttzF8+PBunUuxWMTIkSNxySWX4LzzzuvWuRBEd0C2FbG2dLXoRxBEz2DlypWYM2cOXn75Zey3336dLpNIdD9tbW3IZrNVEYnmzZuH6dOn44477sCBBx6I66+/vlPjzZkzBwsWLMD48eM7PTdi/aSj9gUVdiYIotPceOON3T0Fx/jx47Fq1arungZBdCuCdXHfGeprThAE0WWMHj0ahUKhu6cBQJdsr1b5LYLoyZBtRRAEQawN9fX12HfffauWXUp0P9XKZgSAkSNH4rLLLsNll12G1157rdMZjjvuuCN22GGHqs2P6LmQ6EcQBEEQBEEQBEEQBEEQBEEQBNENbL/99lUZp1plKomeDYl+BNFNjBgxAlRdlyCIrkAwBtGFhl5Xjk0QBLG2kG1FEERXQbYVQRAEQRAE0VPg3T0BgiAIgiAIgiAIgiAIgiAIgiAIgiA6B2X6EQRBEMQGBvWdIQiCIAiCqB5kWxEEQRAEQRA9hR4p+kkpsWDBAtTX11OdWoIgCGK9RimFlStXYujQoeCcEuyJ9ROyrQiCIIieAtlWBEEQBEEQBFGZHin6LViwAMOGDevuaRAEQRBEh/nwww+x6aabrpN1Ud8ZYk0h24ogCILoaZBtRRAEQRAEQRCl9EjRr76+HgDwzttvu7+JrkEBkAoIpUKkAKkUIql/K7MMA8AZA2MAr/CsIlX8t12GAWDmc0D5BpMy9T83n+HMrlf/hjJLyrD8BBjXP/Zvs212HrBj2B//cwDAA7cvIvNBpeKN8rMi3JxQeX/Yl73dAqlK/0/jj12pBIz9mB0vve9Zmc/626aUcsc7lCoxL7vP/flxBgScQTCAc4as+RsAWKEFvG25WYkEpILK5KAydeBNi8GLLVCMg8kQkBFQbIMKi5DNK6EKrZCrlkOu+ByFlc1o+fRzhG0FFFe2AIKDc46a/r2R7V2H/JAhCPoOAu/dH6pxMFSuDrKmNwqKoxhJhKlzVu8/fe5lBDMP8aX7QykFCUCZcz99XFSZYyR4fG4LFv/mDGD++WV/s9SZb/837zMZAlEEJiNAFsCiAlixAMgw3m+R+e3DBSACgDGoIA8lMkCQgczU6fM5W4NQAZE51iq1f/xtEOaYMhkCSoJFkV6/UoAK4/naOfvbKJX+jFLm/zDeDzLSr9v9yRjcBcH/vvrfyfR+YxyKcYAH5m8GCKEXNa/F33/zvv3fnMyqzPUhvU7mbUvJXOw+c9si422VIVauWInNd9+f7lfEeg3ZVusOd4/xXrP3V3uPUYivz6FUifsQZyxxr2FM20fCXNPK2Qe+TWDtF3sFs3cha1/Z+QCe3SFDsLBoruFRbG8xDsUz7r7ibAfE98i0nWfXI7z7Iiu2xfcKmPtBkIHigb6Wm7nbeymLCvG90dsSxQVUkHH3hEihZJvKHQ8f5t1nFICijO0Bux3u/lhmUGXXpSRYoQVMFsHCViAs6vkqqfcXF1CZnP6bieR9iDOEIu+OgT9/VmgBK6z0bNQsZK4WzVJAKX3OFCIFYWycmoBDqBC8bZU+flEBdoeqbA2UyCHM1qEllGgNFZqKEgra/stwfaxqAo6AM+QD7s6JSAGRUmgtSrRJhWKk0GaM5JzQy9YEDL0yHIgKYEq649kSaluzJZKIpD7X7fHNCG1L2jEE7H6MtM3hnXvut7nHmw1L2hthUZ9XkbahkLYzm1dCtTYjammBjCRUJCGyAXhGgPfqDZ6vheg9ALyhr7Yv6/pCZWvRzHJoKUo0hxKLm0KsKIT4dGUblrcV8XlTEUtXtaG5EKG1GNtn+YxAbVZgYGMN+tZl0K8mi6H1OdRlBXrXCOQ5Rz7Q225tCOU9i9hzuuSchWdnyhAsisCiIqBCsLAVLAzNd7hVfzF9u5ELMC6gOHf2osrX6/MiU4NQKvNj1mXO+4TNb45v4jtpbUbPVlI8AHhWr8PYoW2httOLUiGCvdaVfq+4MRE5GAIGNK9ahV2/sA3drwiCIAiCIAiiDD1S9LMCS319PRoaGrp5Nhs29sEyMk9fVvirJCZxT8RL40sb9hj64h1Q6pAp91Cr30iKJ+6BUqZkQs7jh2XP+V92XP/h1B/HjmE/y3iJsy49v446mHznnv8/kBQVE+vw9l176/DH9efnC4f+ctI71kWpj3Exio+1XV/Ak+u3r2V8sc+8Lj5fCvnB/6DCAlSxCFVoRXbrnREO2QuZcAnkyqUAF1DFAlShFaqgnWKypQmq0IqouRltK5qgmloRtRbAWwtAcwEyVl6hmlrApEI+aoUorEKQFRBBiEgKqHwDZLYWBZZFpBTaQivcKSf2OcGSxfugaBysURmR298X/n61CJ4+t5Pin3MQpgSjeDDvm+KcN6F23kjrtCnoc9U6D73vQOJc5dqJqDJ5QGShMjnIII+iBNoiGX/Blf5tTzntjDWiKNfOZZEWAO38UuuH9MQ/Oz8rUCqpncbWYWteQ2gceDKCMk4oxpn529tPUgKCg3GhnVRCaDGTceNICozoF7jvaizwmfd4EH+nUw60Ss5w+51z4i1Qer1w+0HF78lQO9Yis03rMIKbMwbehevryrGJ7oFsq3VHqOL7DAB3j0jfn62oYgNwrC3mB1qJlOgnWPv3+PR1Lh1MZUn87wI+zD3IXtsBfa0VGUAEkEFez7mcxz6Fv07BoMWISI+t7PUlyOprtci67XBiWrE1eR+0c+ECKsjqex6SAUpu3e1NzA9aMQFfRamPg2+WCS/gKSFCGRvTCZRRLVixFSwq6vuekvq+YAPQMvl4H9r7lZLxvduzWZ1ttXQ55If/A8vmwTJZiD4DEWUV6vsOc3MtRAqcQYtnLAIrNIMrBhZKMJhjKAApMlC5HKL6XmAFICxEaFpZQFsYoTWUyAccGc4BkUEuYAgyHFnOkBEMGXNMgkghJxXCSAuO1nawol1twGJ7gQcIFcBDiWKkICLlxG27z7KCISv05+syHCwqgBdYLFiqjHcwk/fzROCOszsKenujgj7PCm2Iik1QkUKEIhSKiCBRlGbygQAXAiIIEOSz4HU1EA31EI0NkLl6RL36QuV6ISoAxUIEVpAotLUilBHaAobWgkCRc6isgOASmUCfU4Iz1OcD1GQF+vapRe+aDPrUZtG3IYdeWYHGnHD7LFDed67Mc0jJ98nbZhbZz1p7K6dFdRmCRVm9X8LQ2VwsyDp7SgV5bSvm6qEyeYQ8i0KknwvS1x/BgKzg2jaEt24bFGZtvHQwJQ/0ujJ5JwDbczaSgIQqCaqzYp9db8acI/o9sq0IgiAIgiAIIk2PFP02JpyDo5uxD1QBM1lPSGY52WcUW5aknCCVcCCls51kKkMIWrxob9v9ZeOVJB+Eywl+lnLiTfLD2ukCKbUPxq4XgPCcMJWyGCsO633Od0ilxQa3fNp3phQYizMOgWT0fHvzaC/z0IpRQjAIziAVkOVxhqfgDBzayZXO9qvYeN7ubyldRHH4yfsIwiKilcu00CcjLfS1tQBhESosakeEEUqC2jxEPotMXR5RIUSxqQVK6khsngkgshmITABZDMFamyCXLoJqa4UYYESjqIBsTSOUcRpGUkGBJfaNApzDSanYARVJVfY4233mi9yVHIo6M4OBmW+zcp/liWVV4jN2TA7GA+PQ4kAktLNLZAEl9WfSzhTnFGLGsRIAIovQRP4X2rRjqxApJ2raiH5l/F2+IznkJhOSq8Tv2AETAExPj8H43cy8lHMACSDi2gHEJZg032/r3LRiX1h0op/yzpkEXACBzloEsvr/cpl65faH2Y+Kx6Kgn+FaOahB/yOco4tBsEBfbpQEmMkmiEIoSDAeACY6n/HkdYcgCMIX8DTxPcIPhlLmvQhawFGeQ9guZwU/xlipjQUAKVsqLfiVI2Er+GNxDigOsHjuLriEB+66ma5+4G+3Xb+95grOzO8ADIAqZ9tZe8z+XSFoxs/k9oVOu247l7SIVrKdvo1q1ssBSFbOJvPmY5ZNHlojSCmlt01JT0yVLpOLMQ7FkbifsagAxQN9T0mtU4VFff/jAspkdfHWFcjmG1CQDFkBZ7OxghUdizoYpViADAt6/WafsWIrBNeZhc3FCKvaQqwqRNoe4AwrCyFqMwL9a7PIBSbDy+xQ/7ja4CBtK+r3QwUw6AoN1raqWI3C/sDYWOX2s0weH8aNTW6Fv3ZgSkGGRW1vmGAzWSgibC1ARd45YHVF+1zBjaDMBRBkEYKjEEVojRRWFfS+WlkIsbI1REshRCGUKITSfc+F2V81WYGabICajEBtRiAXcOQD4cpG2kAxSOlVRQAYpLbnlNS2k79fgEQAUknAVeSJbyY4SXn2FRMCijGdhSky2l42wnnRE/xcvB0UBBgU0wIzwMA5BxNZU72DQ0lZEhymV5b8HjNzjkICAdejM8VKgxaZDbyKA9GCdh/iCIIgCIIgCGLjZqMU/dYXIa0jrC/zTDhtwCCgH/osq83c8x9G05l0FZw37rMdcZhXEvvs573/006Gkn1cbm7G76O8160TpqLgVaZ8YznBLy32+Q4lWeJdSg1k/zX7n0FpxxRY+4JmuXlaxwGAwIojYGauupSOjhhuc/uI2/0upROXXIkfACzSIo4qFrSDCoBcuQyF5UvA8nXa0WDeU4VWqGIhMTWeCSDyOT29KIKKJKJiCFkMoUwJJia4/uFaoFWFVj33VcvBGAcHoAI9RoZnwQEXiV6yK4wTMjICWPpcsfiCn5/R5+MfO6kUuHHcamGNVbwO+auLlBaYmRWZGdflsdzA0p37CYHbZCdY51qhqCPqI6XLKEUmy8Q6nqVSxnGjEYwhI3QJL6UAwRSEMpl+ymal2G1PZZ5AO7cYOISZh3b8WCWzTClSsy1O+IsivYx5jXFhPsu1kxPQr6XGcQ5xPxLeF/xT2X9W6LUZHFIpd+zt8fOPk+R625W5yCnY70qgBU7nzIqS6+0GxxQTDKwL17suI+sJYkNDlbm+uNe975b9izOWuEZXolwmf9WwQRrQ11oG/55TGtxQKfhLgiVKZTIrxpnrc0LQMNndeqUpsSM9t9RcfDur0qUwvj9VyLxHbNNGZXY/98UZGMEyFVwmrD0lWTx/b18iCrVoxQMAImnPSgkwz0bzs+jt/c/9DsHCNigZ6qAUI5gJY7slsq68IBtWaAMTOvuLmVttMZJoiyRWFnQGo2AMkVRoM/Ud67MBarMCAdchVHHZRXP/t/aB2e+RVGCMuQCbdOAgEC+ri3SyRFBVIsgvUeVDASqCMueG8sWlxIHS7yW+a0b0RFhEZG1KG4CYeqaw5S/BOCB0eVJbFaMtlGguRu6npaB/2ozg54t+2YC73xmhK2Tkja1l7apEpq5Mbo/N+ktso1dpwJ0bVlBzVQei+PnLD670bShTLQHGllciGwe/eXYxoJ9XGBQgTbACB7ixERkP9L7mgFICUMydwvGGJEVLznS2oGT22qK0nYXk99cX/Gx29LqGbCuCIIj1m0KhgFdffRW77rprd0+FIAii21kj0W/q1Km499578eabb6Kmpgb77LMPfvazn2GbbbZxy4wdOxazZ89OfO6b3/wmbrrpJvf/Bx98gHPPPRdPPvkkevXqhdNOOw1Tp05FEGyUGmSPIHY8+WJc6YMJ9x/Q3QNomCx/55diqrjC2DGiSp4WkXxgNNl4ZUt5ppa3PoaEM6pcicI0SgKRcRjYcbl2hpQIjBXm6Ued+9l9aeeHLxZVEp1Kpme2TIBBMkB471V8NvWdAlLG5XhSTgHhL6skEBbccsoKMDKCrGmErOsH9sL9aHl3LjL1vVAsFlBsatXLCI5MfS+wbB68vrcW/YJMUuxxpR11NhfjAiyX1w6XbN5sEHfLqpYmLSYaQRGA/lyQcfNSUQGsqHvBBDkgMI6M5P7T+1mm9n9awPZLGvllO8tlMjDGYgeJ0sKa628E5UTC9NfI/wzgi4nCHo1YkGJ68jZLz4pWkQqhlM7sk0bgK0Za+GuNJKRUKBrnVnpf2G3TfXsYajNClzDjSkfuR0AgmCuz5Av9tseU3i4T+c0D8/3xttFG7xunnbQOzLCohV8ptUMO0H9zc14EJrMv7ZDrKNYx7WeBeIKfAhJ/A8nScJHxPikoMLPPGWdxRibgRFkm4cRXlc7SIAgPsq02PrRjV8UBBSYwJH3/BmLxRN/fk8EZNvO4o2KfvZ7ZO3w628+KYNbRrgAvmEEmRDkn25UpN+jfF5g3dnr9OrhGQbp0cZ4UxCpd6624J5G0EY0goxh3fcG0naXv12l7arW+e2P7BDzQJacRZ2g653w668qIGIDZdlkh0E36GVncZfRJbzxmI84ABIveAVZ9rgOkwiJ4bYO2dzK6NKMMcpA1jQAPdOanXZcpmQpAlxAFwHJS21lhIQ6kiYrIZHTJxNqMcAKfzVgrRBKrClrYshlqtRmBjOCozXDUBNZWMFUAoI9vUSpELGn3+HaWFYIF4ioDAYcZB8nzzmSUMoT6JFLGNjBZlIn+dz5+4I8JKlLG3rABZXoxc9xMMBnPBLr0pakuoIwYFrIAraEVRiOsNPtlVTtZftmAozYr0CufQX0+QK9sgFqT7afLmXJTTp0hUSZTRnAGFOMJW8qJuebvODMw+bzFTGl4KAmEobafbUAVFy67T2VrILN1UJk8CoojdFUhkuVt9bHVdiEkc88gkdnWQGSdAM4ik8nqCYw2I5eZihCByDohzZbaLyeyxyXm9fc5Q0UUCIIgiBRXXXUVttlmGxL9CIIgsIai3+zZszFx4kTsscceCMMQl112GQ4++GC8/vrrqKurc8udddZZuPLKK93/tbW17u8oinDYYYdh8ODBePbZZ/HJJ5/g1FNPRSaTwdVXX12FTVo9FEPXcezDl+8Yac9JUvKW9/AZR+WmSr2UDJIs08eUSpS0cuP6JWLayexLC36JeXZE8LPLpf9WZn5OBPSj4D3HVarMVDq7Lx3tzBkrdYgoJP4vhxNmvf9LSqraOdlt9p0KXt+1tIPKF2rjZcK4PGOhDaKwCrx5KVoXvIeWz5Yh9MQ+ngnABQfL14LnarQTJZsHy+XB83XeqjzRz/Zss6XDRNY4fASYUmBKgoetuoxiazNcCVEjCrEgqx1cnuBiHQzCvOYLOzarze5wxQCm4p1dqUdfuf5+iukx/FJgaRFJGYevX3jNzy6zy/rZHb4g5YQ+KOfctOU6rXOmJYwgpS7VVZQmwy+SJeNaMpxBMgbOAREpZASMOMh1lL7xEEcSUMw6rJnZZbokk6nSVOK4VowlszRc5t1qAgAs5cQ+7q1FSTDFtSPaRPQr87pGmOAAuO+CdTYyqMQFIp1VszZB3YoxXQEvfT1aR3DBwLswGp36zlSPDcW2IroODu1QT5SRtplV3n2oYowPvPsK4vtS2Yx3tCP8pSnzekmQmLUdbJlMFjv0y5XHrjSutaWs3aFLwCftP2t7SZW0tcptI2Az8eyLpdUZ/KwybsZNl1FPTFHJZEaZl22VHi9tC7vP2m2zdlrR2FJRAbLQCtXSZHYaB8vlwbJ5yEyNLsdYZrtYertMdjoCM88gcIEpzJSYzAccOdPLL+JG0JEKEVe6HzC0mAcAGalMv2J9j5cm40vbKUbMBpyd4O9/vU6TQa+8/pQmMIjZWp/wbAh3T5Wlzwb+Pq4AE6K0dKQwJSlN9QgA2mYV2g7Vwl/W9Q62GXyh1DZVGCkUIokwpSrb8qe23KngDLmAoyYjdM9Ds5+Fy2BLBiO6Zyb7WhmBuUTo8z8jTelx14NYlbWzpe2JLLIm2y+LyJaAV341hDioDUbws791xp+2e/S1ylSpEIER/tp/xuKMG8HTjC+T5woz+5FDn0tdmtXcDmRbEQRBrL88//zzuPrqqzFt2rTungrRw/joo4+wcOFC7L777t09FYKoKmsk+j3yyCOJ/2+55RYMHDgQL730Er74xS+612trazF48OCyYzz22GN4/fXX8fjjj2PQoEHYeeedcdVVV+EHP/gBJk+ejGw2W/ZzRPfR0WebskKa10ciLfglysz4ZQptZpAVayo5m9IP9hJxGSg3KV76cO9/1j4Q2zmkx3XOmNIMuMTcTPSti1615ZzKZfpVKG3k4z/0SaWc86a9h0G/r4/wH4i9KGC9LTLO7IuKYFFBR4JHxaTwZykWoFqbTA816OhoLytLhUXI5UsQfr4Iqz5eDFksQkYSsliEyOcQ5LPI9M6D19QhGLSZHjMsgtc1gNX2QtQwBCrIQWXy8dxMqU4fJYyzBbHjkJn5smKb3k47f0CXabIls4xgCCnBEMYRyObYCSswm5M9sk4mrwRZxX2b8F7xhGOQKy382WWsUAd4mQ6pMzTOAo0z96y4F0l4vee0I8aW6CxKncnXFkr3d6txRrWGkbe8L2bp7bTR5SHnCASDkHo7I2V78kgIrov6WiePLVOq/9YOK9PaJc7eUEz3v3RR+kpH6UuZiLxnXEBxCdufCIB2sBkRNy7vaRxwvgDoO05lqDPuolCPKwLXH4rJSLeikqGeA+MJh5EV+piZf4njGsnvn9tWWAdrnEHolvKEf4KoBNlWGycdde5a8UOkioHaDD9XvtHHu+6kg0j8XoIiYWvE9p5dwolzME58wGXYld+m5LzdtTkl+GnJxiyX3g9+2XC/6oM/Rz+7Ogrjz9nrcBn7yn+ppOSnv/+84ChnH0YFgAcQTPelk95+iQe12VUpG8avomCx2Xd+dloUAsLYQMaWZWEbWKHZVStgQRYKWvRjmazO9svVIqrrB3AtrkhTzUDBlJ/277ciMMEwgTmWgevjBlNiMSsYGvMZRApY0RZqOySIhb/WUNsXnEkUI4l8IMyx1WXBrd1kg6YiY8PaLDAfBu8c9GwrwVPLGqHSfgPcN8Ee+8SBLg0E1PE3NtCIObtCcQGWyUJEyROacR2shiCjq0wEGSCThcrkoIIcClKhILVtVTT2lxUC/ew+SzbgqDFZfr3yOsOvVy4wGZO6zKcVBQX0s5MLxosK8Xalzje/lGfCxrfnr83wNKXzpd/Hzw+sE/G2qUweBakrRYTS6/3sPbswKF3F3AR5SW1MQ9mynLb8uXmeS1y5/O2wPdON7RYw7jJq7RH0t9q/5jkxkyAIgiAANDc349RTT4WUEplMZvUfIDZaVq5ciRdffBH//ve/8e9//xsvvPACOOclVXW6mubmZuRyOQiRDpcnpJT47ne/i08++QSDBg3C4MGDMWjQIAwaNAi77747hg4d2t1T7DF0qubT8uXLAQB9+/ZNvH777bfjb3/7GwYPHoyvfOUr+NGPfuQi0p977jnssMMOGDRokFv+kEMOwbnnnovXXnsNu+yyS8l62tra0NbW5v5fsWJFZ6ZNrAEJn0hHPlApc45xU8ApznpRQHIZ4yixUc7uc0ZIs/8n1uW/xpMP+XYd6Sy5pNtsNdsClIiVifWmnAsMPC55ZUVI3/nmPTT75RutYAeUcYAhGVW+urknBSlzHKI4yteJIzLUjqewTQtnYVvce8+U/gEQ91izmLJICIuQTSugCq0oLF2K4spmhK1tUFKCcY5sQx1yvXsh6NsforGfzuwTAryhH2TjIEgTUayyNVAiizbJUFTaYVAIVRzVa7YnigAVApGKnHime8dlIERWtyHh2nmiHUnecawQuZ8Wcu3tVvjnVJnIar+Hj3vLCodlshn8YyO9v8sdTzvvCNrBoWQs/FnBr2h6rBRMyc6ilNoRJxWai5ET+toiiUgCrWGUcPTC7Fu9v+Lts6WS7NwEY+7Hf70SNttPmO202SWMB4CQ7m/l33kYB8tKJ+SxIONEZQa4c48JUZrtJyMwJbTDyIyle8kEWmCEucZYp5/NpOASSkgIoYUQZbRMWzlPfx/jjfWzUbQ/K+41tNr9QoIfsYaQbbVxIFWy3GH8d3xN8ast6BKIKnEP8e/1ZTHXH793qX9fssk6trRicn7eciwObnDlFT2hpCTDzZLOcDPCoQ2KSWyHWcbZfyJpL9qSf3b+Gc7BRFYvXy5Lz87NK3+atmN5JcE0sSOMLYg4YCRhX/hZVlExnkcZG8PZZCYIJhmMFWlxkXE3DgtbXRCczNSAiYy2FW01g5oGqGwNbBUEZbL9BAMKUkEyhmwmn7AFmYySwV2mj5st/WmzqDLGPsgFyX3abvAZYhvWb0/pxDrvAEjv+Ftspp+/hkhB9wcWcQY/U9Kch2WOl32tQrYoeKDt0Wxe74tC3tjfreBWbPWyKFlWB62pIK9LYGbyKLYphJHS9ldUWsYzjRb9dB9EW9LT/gTclDO1Ar6pwOF+fPvd3z4n7IWJ84wp5UqYOpEvFUQX94Jm7jlLWVGYcU/ARKJ6hDQBXUwxXQ3DBIIpYzxJBUj7uGbEP+EL9Eoa49C7Lkj7ddHBWrYXtD0xrF2WKBlc7rtFEARBbNRcfPHFePvttwFgoxH9Fi1ahPfffx977LFHd0+lR/HJJ5/gvPPOw5tvvgkAGDp0KJ588klsscUW63QeQggcd9xx+MlPfoLttttuna57faW1tRX/+9//8OKLL2Lu3Ll47LHH3HubbLIJpkyZgoEDB3bjDHseay36WeV13333xRe+8AX3+oknnojhw4dj6NChmDNnDn7wgx9g7ty5uPfeewEACxcuTDilALj/Fy5cWHZdU6dOxZQpU9Z2ql1Ch4WjDQEvK8qPGLeUREunBb80voMmjRfhXPIZIFXeL7mcK6lZQfCz816TqizuIbuC8wZpZ5OSsfBnyjOlzxVf8Eusy+sN5y9f8trqHnIVksfB9uCzmXCe4wdRQTuVwhCytcn1OLE98pTtteZj+q2ptlbIVctQbGpB27JViFoLkMXQ7AqOXO9eyPQfhGDwZuC9eoNlsohWLoWqaUDYuEliyEKk0BLqzLRCpNAWKterKMNh+uNpZ6fvgLDoskjaWZQ1/eaywu+/F48nrMMunflo961/PE3Gpr/tZY8B44nztpzgZ78/InX+VXagKXfuMGaSMwFXtsxF3EuFtjBCc1FH3etSnjrjr6UYoRBK15sn3l+6rJTgDAiAjBCJ74qOptYljDiPS211FFfSS9lIfi/bz2bwCs9JbDIxuY06t2VagZLzj3nZgIAWBZkQzmHEbHavvWYpCYhAn//edYxBXzMED6BcSVdmMhRRVtTzBT+b+bne3gcEB+NlrqXVgpVew4jOs7HbVhsLrsR3Oyn/9lqVFv649xnOEAeh2PsTj20g3x5SqvTeqSsJeAEO9nOpeUYqKZ7BGzteePUOePtZuy32NV5OkUutRyEOnNKTZ6bHV1Bin9kxJZI9dyuuppJo6N/zpRH+/OXSWVXlsq7KrafSHKIQDK2u4gWzthgX+t7GA7C6BiPyBVC5Olf63PYztKXkQ1OmnAuOjAhMkExsC9rtVVzo9XmVMQRjyAiGQOhAKpkyXFKJcV55z8qbB1Swff2/vX9sxQB9X0ZSOOL6nq5SGfsw75ft6we4EqFM6H7ROnsyA8gIPA+XEWdLevJcTVxCVWSgRAYROEIZmYoL8fb4JTzTwl9NVrifuHSqWZ4xF4TFjN1ujxOTYfK8AlK2a2h6V8e2uu6RbIMPvQA+u3+8ygneizoIj3EXZBYpU2HCZvoZ28hMwhwL5lV+MEfTCqDGTnLCn6uEUOY7YR+nzP9WWE+U34WX1dydgh/ZVgRB9DDuv/9+HHnkkVUbTymFefPm4Y033sChhx66XvQO/+CDD7Bq1Sr3/4Yu+s2fPx+/+MUvcOedd+I///lPd0+nS5k5cya23nprbLbZZmWSI9acjz76CFOnTsXcuXMBAIMHD8Y///lPbLnllp0ee03J5XLYeeedsfPOO+Pyyy/HJZdcUrXqPP/85z/x+9//HqNGjcJ2222HUaNGYeutt0Yul6vK+NWgUCjglVdewYsvvuh+Xn31VYRhiKFDh7q+nH369MGll16K888/HzU1NWu8njAMUSwWO/zZKIo2qOzLtb5CT5w4Ea+++ir+9a9/JV4/++yz3d877LADhgwZgnHjxmHevHkYOXLkWq3r0ksvxYUXXuj+X7FiBYYNG7Z2EyfWDFs+xTgE4n5kSVy0rUo5n4DYOWKycKCYWz5+gPdW6cSWwOvnFrjXyq3fFyyUt0A5cVI7z2yZqrj8H7x+E3YblN0m61RKbLQXKVsOGQKKg0kJJQIEIgvX76JEYPH6r3j7skSYSouOqbmU7fdhy3aGBZfVh6gAFNp0ac7WJp3hV2h1WX6QMpndl9gunemnCq0oNrUgai1ARRI8EyDXux4iG6BmQG9kx58JFeQgRYDICKCsTwtktgZFCVc2qBDpn6aiFqdaQy1cJfYPZ5Bm+TYjdBUj6XrKCBY7p+qzATJCi1r5QCDgQJbr9/OC6/8FR9Y4jco67fz9WylTy5YRYzYaWcY95LxsBoFUEmoqa66S6RIpLXQywDlhIOFFVOtSnW1h5PZZm/ndUozQXIjQUogQSZnI8ssGwjmnsqaXTIbrslw1GW4cUgK1Gf2e7TfjSipZxy/T2XCMJTPgfJRSiGymqs3stP1dREZnacgQLMzE+z+T7EHDvaj1EozTygl/xpnEjMCnANP/SQEsdI5R8ADgZjweIgjyYOa7KRTiElZpgdY//IxVDiBwTmqb3dz9D0NEz4Bsq40DCSCU8TXG9jvj0BdTawfY3y77BUmxDEAykAeAkiwOgGAcEtyUgI77kLmKiqZ3FtdRDG7ceF0xXClEiiGw4ossxPfJSCb7GxvKiS/CCAvC2/bEpdQL3vIzFaW5NkdSCw4RlOuxKziH4DwhKup1ecMiDgZzZbpXJx64UqN2h+gS4b4YaMswQpn+e1a0SdloiaoVnn1h+/exqKDHCbnrW8iKzfGxDPJadOq9ZTLIjXGESgdPNbVECGUszui+cQyNmSwQGkFLhlAtq8CCLBDEtrXK5I1tpVzZcd3fTySqAvhlwmsyAnnB0acmg9qMQF7ogKtKNoEvktm/4mPGEu9JU0NbSoUIQMA5hMjqz4WF5HNEqsyjDepx//v7mwdAALDaenATZKRam+NKF1yXHGc1deD5OvBevYGaeqhcL6hcLxTMd8nuK8Bk+AkOZIFQZlymnC3xWZuNy3n2ygbolRWozwXImv2VMeIfZNFl+tlqHIgKpZlt5hxTUQQZFlwwnu1tXcl+1+U8pbN/eMbsTx5/36QRM5WCy/RTtrw8kMhqZC44jUFCue+VMNeYSAAZBW1vWpvbHeTU9y6V8ccYL+kNbcu52uPZboApQWwkfPjhh5g7dy4OOuigqox3zz334Etf+hIaGho6PdaHH36Id955BwcccEAVZqbnNn78+ES/67Xlww8/xLx58zB27NhOj/XKK69g1KhRVRO/qr3fLr/8cuyzzz4YMGBAp8dauXIldthhByxevBgPP/zweiH4AcBmm22Go446CtOmTcP9999fUi1lbVi8eDF+/vOf49JLL0Xv3r07Pd7KlStxzz33YMKECZ0aJwxDXHTRRbjnnntw/fXXd7rM4fz58zF//vxES4m1RUoJXuXAmBtuuAH33XcfBg0ahNGjR2O//fbDOeecg/r6+jUe629/+xvOPvtsDB8+HPfffz9+9atf4Xe/+x222WabNR7rtddew3bbbddpIfKcc87B1Vdfjd/85jc46qijsOOOO3ZqPMsTTzyBGTNm4M4773SvDR8+HNdffz2OOOKINZr3u+++i5deegl77703Nt10UwDAnDlzsHTpUuy///5rPLfvfOc7ePbZZzFnzhwUCgUMHDgQu+++O4444ghceeWV2G233TB06FC88cYb2H777fGDH/wAffr0KTvWq6++ikcffRS9e/dGnz59Sn43NDRACIFx48Zh2223xWmnnYa999674va//fbbuO+++3D00Ud3Wgi+8847cfjhh7vKTN3FWn0jzz//fDz44IN48skn3UGvxOjRowEA77zzDgCtpH/66aeJZez/lXrV5HI5NDQ0JH66m/U2u6PaWBGvIw9WfoZfe5GY6Yy8lPOixOHj92lBXN6pKOMf+5rtdWZ/LBUFlgrrVimhUTFmoqFTPyJjBITACaN27kypuCdHWACiAgJIZLgtm2Qf9I2YAiTEDrs/WRTG5X6Mg6nST2K/W8eTHVeZMkAyhGpt1mJfaxNkS5MW/Aqtpk+fyfazWX6pH+X/D4AJrvv21eWR79eAfN8GiD4DIfPGSRLkdb+QIA+ZMyWSjNPIHifGgJqAJ0qd2Wy1lYUQy1uL+Ly1iKUtRSxtLWJxcwGLm+1v/bOiNcSKthDL20Isbw2xqi3CyrYQLUWp+69EekybLWCj9ZV/vnnnsvvtl1mS8XHwj4ct78W842aPq3ZIMie06eMPBEz/iPZ+zOc404mEvsgUmVKfttynFkElCpFEIbT9ZZKCnx0vG3D9I0zEufltBb+80IKpjUbPcJ1BaYXVwJ67PD53XQacN0dp9rX9Xir7fQqyQJDV358gD5XJx/+LrC6hJbLmb91LB7kaIJtL/gSB+1FcJL6DlSL93XF1x1b/FkwfF1si1kXgez/M+1ltxrAnClcMDOhCGGdgogt/1iRl2uN3v/sdRowYgXw+j9GjR+OFF16ouOzYsWMT+9z+HHbYYW6ZCRMmlLw/fvz4tZpbd0O21caFVHEPMJudBcTlFZG2B7zPunuYT3vBK4izd/S9wzr19Tg2i1whnofN8muv/3BiHfa3F/SVcMp7tondxnYFv9UQb4vejsjbh6zMj329ZL1p29Wbi0rfS4zAagOqWFTUokxUACu2Jl5L2Au+EFguYAuIM7tMBQbXy80T/FSQdTYVvOw8GzyV/rH99BTj+h5rs/p8TCCMYtwEYyElGnLkjI1gf2oyAjnBXYnKmkAYAUtnaLn+dBzuR9sH8dH2jwsQC01xGcn4NQnv3CwTcKUHSAUclgvU4zagMNBZfNk8WL4OLF+rRb6aOv26EfxYTR2Qq4kFV3AXsOZ/LzKcI2O2vzYrUJsVqM8H7nevXJDYfxnBXSCVC6LyRWhnU6p4W1K2vQuGsoKfLc1vq3Wkf+x7fiag2/ntP+PZ46GvVcpdu5R7T7lM4sj2AUz9nciQBZLlStOkttdeC20mpDvW6XIu6wCyrYj1jY8//hgvvvhi1cabNWsWli1bVpWx5s+fj3//+99VGQvQTuwlS5ZUZaz3338fzz//fFXGmjhxInbfffcSO3xtmT9/Pv7f//t/VRkL0H2/+/fvX5Wx6uvrceaZZ+Lhhx+uikh04okn4uOPP67CzIDDDz8cL774Ig499FDsueeenR7v6aefxjXXXIOtttoKt912m7OL14bPP/8cw4cPx3e+8x0sXbq0U/MKggDXXnstvve97+G8887r1FgA8N5777V7z1gTPv74Y+yyyy5VO6ZAHDDf0NCAfffdF6eccspaCX4AsPPOO+NXv/oVXnnlFXzlK1/BnXfeiVGjRq3xOM899xx22WUX/OMf/1irefgMGjQIU6dOxcyZM6sm+AHaD2DF75133hm33XYb3nrrLRx55JFrLFQ+//zzOOWUUzBs2DBsuumm+OpXv4o77rgDY8eOxZFHHumyJjuKlBKHHHIIpk+fjg8//BALFy7EQw89hClTpuArX/mKE7JHjRqFn/70pxUFPwCYN28e/vSnP+Hyyy/HiSeeiHHjxmHXXXfFFltsgT59+iAIAvTt2xcvvfQS/vCHP2DffffFNttsgx//+MeYP39+YqwlS5Zgl112wZVXXokTTzwRxWJxjbYrzcyZM9eL9ilrFJqhlMK3vvUtzJgxA7NmzcLmm2++2s+8/PLLAIAhQ4YAAPbee2/85Cc/wWeffeZqsc6cORMNDQ1Ux3Z9R0lwL9sPSDpNAKBi+UOg1JFjM/dshpSLEjeODNO/ywkFgHPu2GhvAInymenSe2mxz3+Wsp/z+wemt8m95zch8+dZsv2hFul8UchmfoVtOruJ6V4syay85D4ryexLO4zKOY5MtLh1hqSXZ0oBUUELfiuXJYW+CplU6bJAehvNerlAkM8C+SxYJguWrwWva9D9T+p7I0odbwWgyLJQkXKZjtbpURNw1GWME4Qz15uuNdIlKouR/r8QSaxsDVEIJVoK+rcVsXrXZlGTFShGCrmAoygVaiVHUQpkBAdcmSKGiJk+MTaCXgRerxiePHbpc9vr82fPW2ZSw5RkYDajjHPdGzAV2c/S4/m4PivmszyAZHHvFGacQ5FUkBJoC3V2X1so0Wb6+9lynoUyJT2zgXZGZQOOumzgykw15nR2ZGMuY5xSDFmuH/KtuMfMGPa4AeWDHxLR+gqQzOwbc8yZ2TYeBHGmX5TRv2Wko7yNQK2kd+66QVP7LZ2R6Z13Tmwrk4ESO8fN94Tpkk0sPY53DO15u1qT388O8TKUN3amT5+OCy+8EDfddBNGjx6N6667Docccgjmzp1btjb7vffei0Kh4P5fsmQJdtppJ3z1q19NLDd+/Hj85S9/cf+vT2UrOgLZVhsfTqRy1xSFwGTO6LwZk71kroEqgMl+SV5LpDLReyWigMkMFLHtVDQO+KJUiO9g+t7IYP405TATdl4qW65scJEyWYbMpOvYy7Tp06zf4/F90A3op8GXEfvM8q7PLvT90O9PGEEh4PZyy0oz+Px7MEuVg7Qlz6WMF/PvMbbKA+funu1E1aigKyjYUunmtZLtMSKTfs0rTW3tNJvVJyOoqBDbYlwAuVrIbB1kTaMWeso8pEtzbFsj6fr4KgDFSCIrAtRlOIoSAM8jn+ul92euRo9l5qZEJiEW2uNfmxEms4+XZO0BQGM+QE0g0CvLkTNZa6KMaKFMdmVk+v6WK3MfZ/fpf5yox1RcqYOZjDEeJtNQIy+DTMI8BJQ5n+yzBWO6RyIX4JxD5fJQba2Jfc/rGsBr6hDl6qGytXGWXxRnvXFTJaEoFWozAkWukPHSS7kJGMoI5oTT+mwcUBUIP8AIsdCX+H55Yr7dV8ZmV2HRZfhpQc/L+IO24eNyntYukUCQ0c9XUT4u82kF+XQMHOIAgWKJWKi/c5ECpHechCq9bkhT5rMs5bIy07beaoTJjRmyrTZu9tprL+y1115VG+83v/lN1cbab7/9sN9++1VtvBtuuKFqY40ZMwZjxozp9Dj/+9//8PTTT+OEE06oWjm4au83+6xQLS6//PKqlFkE9HHobKaaJQiCqoomX/ziF51w0tmMxr/97W9obm5GW1sbfv3rX+OKK67o1HgjRozANddcU5XjMHbs2KpkvAI6C3HHHXfEJptssvqFO8jJJ5+MCy64APvtt1+nt/cLX/hCon3G2ojhzc3NuPfee11WdDW44IILqvadstx4441obW3FGWecgQMOOKBT45944ok47rjj8PLLL+P555/Hc889h7/+9a8AgAceeAAPP/wwzjnnHFxxxRUd2qfVvM8ceeSRifLFLS0tWLZsGZYuXZr4fckll6CpqQm5XA4DBw7EqlWr8Oqrr2LTTTd11+6PPvoII0eOxJw5c/D//t//w6RJkzB16tS1ntsf//jHTm9fNWBqDUIWzjvvPJcy7afANjY2oqamBvPmzcO0adPw5S9/Gf369cOcOXNwwQUXYNNNN8Xs2bMB6PqoO++8M4YOHYprrrkGCxcuxCmnnIJvfOMbuPrqqzs0jxUrVqCxsRGfLlxIkeldjYk8TpTcROwQctHS1lEfxVHNjnYcOSwtKvklkExDeX+9vuhnz1ypVInY56JnvXWVxRdv0m/5nzXOIjdHoCSTkAEm4juMo79teR672Vbs84XN9D6q1EPOOtdSmYCAJ1ymsg1ddGxUAAuLYIVVQFsL5KplUG2tpqRnIfkwvbqUfFv60/RdY0LoaOnaevD63uC5Gqggh+LArZMCDJDIZnClYpUy4hKwtFX3pltZ0GUql7eFWNEaojXUf7cUIixrLjrBz5ZPqskK9OuVQ698gMZcgHwg0CsrXBR6r5xAhjPUZmwfFV3ik5tsuzhDQjtZbUlU91oFEVtVclims1XtPrBOPqDUeZdaXpno+xDaodRSlGgzDrnlbSHaQonlraEu6xlJrGyLUJTSlPRUOhrdK8OUM9l9dVmBDNcR+jbq3DqhajPCZbllhf4eZQQr6WVX8dQo81os9unfviDPYMqvlbl+uGtDJZGvHJUcSmnnb3t/IyUWsvLlhUuugWknHeC+VytWrMTAzTbH8uXLu/yeZe+PM7baCXVdWIu8KYpw9Nv/W6NtGj16NPbYYw/89re/BaCjvIYNG4ZvfetbuOSSS1b7+euuuw6TJk3CJ5984kr7TJgwAcuWLcN999231tvS3ZBttfGxqiixqijd9VEwe19iqAk4BCRYodnd95XIQIkAEc+6DDcrFARRqy53aMt3A1pkElnIbA0KLItCpNcXSZ0VZp3z+jqvr/eByXQudxW1dpW9XjMZxuUi/ZKN1n6zWVUpEkFW3n0ybYOViIv2MzxAQcKU+tYZR8rsP8F1KW/BWXxfT6xcJm0je62295309d6/j3vlFl3J9EKz3udRAWhr0UFSYVGLKJxrW4iLuOeeqRqR2E6vdxvaml01BVt6U+YboWp6o5hvLOlN6DYLMFl9WvzzbeOsYKjLcKwq6qCgvjkG3roSvElnSiguIHsNgMzWYkVBorkonfAXmqyuKPWY6Mc/1Wd1icpe2bhHnV9KP7H7lUqUZ5Uuq69M1qrZRmt3AHCVMbKCGZu2AFZsceeis61swKC1h+3xTux3qT8jI338ogIQhjoQzpT3RK4GSmQhaxqhMjWQ+QY0FSVaQoVVBV1efnlr0fVWbi5GkKbsp93/Ga7tTJvdlxccjflAZwRmOLKCIR/o73zAoM+pqAiErTpz1M7PZbqZbUyLfmFR2/RhIS7Tnz5YXOjtCjJgQcZlMyJXo8+zTB6yphEhz5qS8abihhGSw5ToZ3s/6zKfSZHPfR9NdYicYMjbbQxb46BGLyDLXhtKAuMq2XyMk21lINtq/caeO+viPCXWPZdffjl22203HH300d09lR7JhtY7qz3ee+89fO9738M///lPvP/++2hsbOzuKVWdJUuWQAhRlXKoxPpLW1sbDj30ULzxxhvo16+f+xk5ciQuueQS9OvXr7unmGDBggW4/fbbse+++2K33XarGMS0cuVK3HXXXfjzn/+MZ555BowxPP744zjwwAPX8Yw7RkftizUKWbjxxhsBoCQS4C9/+QsmTJiAbDaLxx9/HNdddx2ampowbNgwHHvssbj88svdskIIPPjggzj33HOx9957o66uDqeddhquvPLKNZkK0QVUFL7KvJbI9ClXQqfc62WWUbz0NessKld2UT9YMnCldAYRzIOneV8Yp4CfKZfu+ZDO7LOR3+ntV0Bp1g/KZ/voPoHe8nb7XTZRSjwUOrqaSesI8h5+JWJnmRddnsaPhrXHKRGhnnKuKWZ6KXIRR/ZajENALy7aF/6kV2bMOA94XQN4XQNktg6RyADpTEZ4Ao//gveHgnYQKJh+P5zpHnWCQUjmyqDZkpVW2LIlKy0ZwXUZSu9HO1TjXnR6X+mOaxGgezwaMRdMxvvUCN5lz3Nvv8MsW4I9z60zwzunS84tFSUcgkyZnpKp/SiVPiWs41cqoBjFPXbsvvB/C6778mQEc2JfrenF48Q+r7dMQvTjLCHSlcOVVFNwpS/SZ61zRHqZf4zpvBbBTaalzapw5ZzauYas5hzVK/U+7zIty2QN2mNXLgvFzolxL9uGl14D08EOCWdbdSO31ifSJQtyuVxZQ6pQKOCll17CpZde6l7jnOOggw7Cc88916F13Xzzzfj6179e0stj1qxZGDhwIPr06YMDDzwQP/7xj9c7Y7M9yLbasClnWynAZc8AOgFMmTw/bjN+/JLdXIApXRbT2SAm88vdo9zgpddNtz6l+7XZMSKTFRUpgCnmjBkbnGGxgVROoJKl19NEzzqeuvd5me5xBQWZCKKy1Rfc5dIG3QBamGMc4CEyQR6QDIzpD0kFveckg+TmPmTtsdUFoPnZ5CqCYhKMW1uAx/c2E+TDvLmzqAgWtup+cK3Nrnwiy9WABRkT5BUHkJQV/PxjZsunA6b3WgCVrYXM1aE5tBmPDDVBahwAOWdclcmyg7YRWkKFtgxHLlcHhK3xvLK1Tuiz2aCALtHpV8ywIp09TowB+UDbCnkRi61un6XmAKb75nJuz2HThVfF41s4s8E1yvUOlkbolgqlvd7selIBO/FOSv5tjy+UhAoFGA/ARAiWycbLiLgEOYKsKx/rApmgy3oiABBK1GaEE/3sPrS9+nzRLyu4KX8al0cvCzMnc2r7KpXFVLZsp9+X2wTnIYrAhITiERjngBSdypwTZaLF032QE6e3/5txfTjMs076WU8ByUzdSs+YANlWINuKILqbyy67rNv7NfVkNhbBDwA233xz3HPPPXj88cfx5JNP4qijjuruKVUdukdsHGQyGTzxxBNVz07sKoYOHYrvf//7q12uvr4eZ5xxBs444wy89dZbuOWWW3DJJZfgH//4R48+t9e4vGd7DBs2zEWdt8fw4cPx8MMPr8mqNyjsA/P6ts6iBAqRdJHe+qE0SJRkss7xSg4mJQLj3PHKyFhSTvqSiG/7d3pcG+EK/aCvSyam1m/FvYKZny+0WXzhy4tEtw+crJxDKLV9sOu3YyTmaDKVKmWG2Xkx5R5qbcknZh7uncPCF/68/aIgwfySqFZ4smWr/KzMtPjKdF8Yxjh4bT1UNh9HCEcRmDW6Koh+cYkgEUdCZ3Mmm6AOYbYWMl9fIrKuCRmXfsCRjRQ4yyIjGFYWtCOsORs5MdCW9swFHL3yAeqzAWozAn1qAuQCgcZc4MpUulJKPBk5DsA5chi0+ORKqUWhdlJZB1FaGEof+3RPRXvcmBFU7fnmR5/bc8g5Rq3jkRlxNYQIdIlPwRmYVBBcZ98VpY201v/nFHeR5cJsZ8YIfjmvZ5/O5tMZf7bHYOBle1ihz4mkLuOxHfHcCcuAVEbIQ9zrBSgVASUAbvsxwTqYuf5h5oDYU9i799hxKt2OGIs1bV+stL+Vn9UZhV5PxrC0fynjUDIEs5ka9lhxz8meysYtQXEgKlMmt4thXJcr7bLxzQEYNmxY4vUrrrgCkydPLll+8eLFiKIIgwYNSrw+aNAgvPnmm6td3wsvvIBXX30VN998c+L18ePH45hjjsHmm2+OefPm4bLLLsOhhx6K5557rsc8SJJttXFge6UVTClmn5JTwL+XmOAhe831hT9HGfvFinDK658LxEFKgCmJqTxBx7s3Jq6d7j4Xxj3tygl+wmRDp8qfK8aTwp8357h/W1Lwg8nCs+Uz7XZyKZENsoiEQAFAFCkXBMMiQDAFpWwGEi/NkLP71Nhrvr3IGIcKC2BBNhF0oxgHMxUnbEYliwpQzasgm1ZArlrmsqtYoRUsXweRzQMw/fKsDZDYJyyek7UBzM1LZmqgcnUo1A1AU1FiVUELvgFnyAmRDKDqAAWTsbW8TdtDDfWDXLDbioJEW6jQVJQukMiW9c4L7vrNRRKQUE7c4WCoycRBQu4cMdtSUspRSW0Dgbm+3JHUdq+u2JE8Tn6fYPt/vIA51yJzTISEsvdiWyXE26/ub8bjY8E5XAlx23/bZdVJLdqa0qeKB+77I7ju/wsw1GYFIsmRC6QLvrLf7UgpZEyPwwzXNpjdV3FfZJ0tV3I47fyUdIFH9txVkFoUBgBevgS/KhbMLtfCH+Ni9c+BlZ6BPDKeTZHO8IsDoaygmeyJ7K9HZyIjKayb7fOfq9ICZ0LU9bID1yVkW/Uc24og1gUk+BFrykEHHdTdUyCITsG70A5aX9h6661x9dVX48orr6xan9vughoNdQPdoYe3t06FOGvIRrFKFr/uHtwrObXdSpKR3fq1ymVZ3AM4/FKP8Xy55yQqEVqA8qUvy2XceHNjCmadVsQ0b9no83IPkGUyvEo2J5FZaDfCczCo1BHg5dfFlIrnFq1GPHNCYDIqtmzWIjMPqdZplZPgmSxUMaNLSZURZ5mfDWizAIWInXs8cJHQKtcLKtACoFrdedLeJpmSQEICAQfygqExnwFnWuizDodipFx5M5u91isbaAHQiH/5gLvMNStmWUHbj0op6fNoj5vX58/uV72QdKJrWYEz7dgEwJQ958qcX2mhm3F9HsgQgBbRM5wj5AwR171wsiLeD4DuK2MdYrbcrWC6X0yGM9O7T2f21WYFOEMiq886+PzeMgLG4RqVF7GdozIlnAvvO82NR8iPzC8n4Jk3/F8uc9EubZ3S/ntp7D6IHYXlSv9yCDNne3xtL8yK5609phI6K9fXBe11x1suflOL+SVljDcgPvzww0Q5ga7q+XLzzTdjhx12KGnM/vWvf939vcMOO2DHHXfEyJEjMWvWLIwbN65L5kIQHaGSlBtnj63BWL5AlCZtS6QCqfxSxJFKOucrwRhLBEuUy75WaRGG8fJii/vAWtgFaZvOBmhIDs5ESaKPFS6tmOn3J3TLenNL2I3uNSMGMVnWnvLnpsKCLpOe6gfHwqK2rfz1+T3VfGwWoWdnySAHFeTdfVRn3HsZl2tIljOEXPdLZhHQEuoewUoBbaFCIZIIZVxm065T9+jT50DI0hmZcT+6EsEmHXTmCzkms59Dn0OM6Ww+/75eTvBjDK46gNtnfpCb8J5BKtjzTgg35dOtYaJs0E8xcOeYK5dv7XUFF48kONMCqJmb4AKCadGUm+w83xazgp/N7rP71K8+oTecQ/eG9rahzHdKlS2m7o8jUK5PdyKgz1b4sNto1pEuy8qgj5HgLFEy3gp+/vXEbo8t7+m21V5H7HZ4zy+JUumszHckfU7Zlys9s20gkG1FEARBEATRvQRBsFa9H9cnSPTrIrojm29tiRTQVPQfIJWrKCPMA3bgPcAz3/Hjl2YxD+G+UwDeQ3t6nQo6QlsCrq8HYB6gjUBj15uILk9lVCWyc/ySekDshLLRsgYGDlvKSXuHUpl+aedMucjT1TmweGCiWUvHSDzEp5wjet+X9jps9387VysaAXHEvbcuFWQAZKBERm9HLrUfPVQZZ4OyIqYpT6qCnN7OrBfltpYP4gw6ejoDLWxphwGH4Ao5kUE+4GgNJfoVMyhKBSkVMkKLXbHIx10UdSxqxSUqOxwd7zswlRbebES4fw4qmzHGof9G0pkRj8PcPrOCacV95GfT2uhzHqA2W4uAC2QjCcaAMGLICo6WMEJbKNFqyn+lHU0ZoUt7BkbU83s42R45JVl9YRj3j0llOLr9420f0n2cjPjnOztj8Y7FoqDX4wcw1wVzLbB/28wD2w8IQNksP1/Ys44nbpxV3DihGFPu2iIYRzbIA9zsb3u87f5XXhk6GTpRkJn3nAPcfW/LZL1YZ2HUWulM6zK4YOBrmg6yJuObq3pDQ0OHepT0798fQgh8+umnidc//fRTDB48uN3PNjU14f/+7/86VKpyiy22QP/+/fHOO++QY4rodqQyZa2VhDD35oDrQIiMWcZmTllBwZG+znoO+3TwigvESf1vopr0S+Y66MeeWDHJXjMZ4gxyl+GXthHK2SM8fU9ILVMp209JcMYTYqQb1tpbtqSo26kRVBRCZPMQCuBMuRipolSxaMEBIFmesmyZam/7XDlS+xYPwMz8RPreraSulFDQ/ZF90U8FGZ31ZvvVpe1L39xiSr+fyTq7Q+V6QWVqkOU6u6pXBmsNA9AnL9CQE1iwqoiWUAdNKZPh2RLqLLXmYqRLonOObJab/pJx1n9RJu/DHLHtkChvXkbw84+fgq6qoIwiK5mJi5PpoJ3SQC17LF3mpcjE9oddh/+9SdstjEMFWSgeoOA9fzCW0cFQNXl93tnMUsDZbdzMJ2O3gTEUmXL7JOD6+UeaRw1fILM2iF9ZwQpnpQcstj+ZCIydac5Fbz8yIXSZTi6guNTZfDxylQiUyfDTO06AZTJw5f2DDJjpyZ0QSY246ZtYttSqUvp4+8cIsLaVF1hlbW7EwWV2H1gR3NqANmPZivMC8TNdQpC3x5LZ7aks7HY1ZFuRbUUQBEEQBNFTINGvi1jfBb+ChOs5oVLR0BalgMjIdxHgnPgmWU4v45xRgc4WTDkFgNiB5O8T+8BnH7ijlBdfItWzw+8fUymbD0hk0JVE+5aJlo0FvKi0r16a1WUT2rGtMFbB8VVSssYJlrG4khAyGS8rwPnjJbZHaceTMvuDwYp1nrMqQNz/zzq7UlH85bIR/eOtRCYWsqqEy7xigACDCBg4U8gKhYAzREohkhkoKM+BF5cSsuWv7G8n8PBU1oLdd0DSMZQWk7xziSEEwHXpJABMcefIhAy1aKSkdsKk9qHb974I6K3XnVdWdPMzN83nWVRELpNDNsgiwwUKUiFbVKjNcISmj4zfbweA2y9+Nl9WWGeMLjslrMPOK0/rxL5ygrAvWtt9k3YapUVAuy89MTACEEH351SMOSekPsZwfXEic62y2wZoZ5rtWeiSNxKOQuuA0n8rxSCN+OcudhwIlS5hjExeb28kwGTgBF23vdZp6Qce2MyAdC9Hsz+U/Z6YrI+NnWw2i9122w1PPPGE62EgpcQTTzyB888/v93P3nXXXWhra8PJJ5+82vV89NFHWLJkCYYMGVKNaRPEGtESmV6zXvCSzSC3lx/r/AeM3WUuYnG2thVFBAChr0GmZKFdvGyfVc9OUFxAiVgEtJlbHEnbyr9vumz4cgIkrNBi/yt1tpcrtWe3Xf/jBWdx7uwOnSEe26DMXza9XWVERZtxZG1ZWy6RI3nPL8mQ87Oq4N3+7TU/KsKW5OaenaMY87LABBJVESymt5ruqxaae6JnK3FvObuNNlDMrsvaAKWjrzEJ+1sBoSmJKhXQZkS/ojlvwWOB2J4XkCEyPHBBO3ZMK2i3myWffs/uX3B3HkulbKIegFhIsudnGm3/ccCryFBy3vi2uhUFPaHcVjgpRsp997S4yxCIfLxtLA5a4oD7bjOuwBhzAUocMEFMgEI8pt0ewW3AEXMZm5yxxDcpbU/Z74/iMJUG4tLiikkn4rEAUEHGBItJI/hx910DABZktdiXzYPl8mDZvL5O2DYOZb5TLhhAMQieNJBtZp9g8fXIbqc9bxJ2pv+swpJld933NPLKrpexrWCOuf2edEd5z/UNsq0IgiAIgiCISpDot5FhH9lCqVCIVKIXgy/8aVFOl9yJzMOrX+oz8ehnoqAjpceVSiXf5zCZW/ETns3csVk9vuDIoJL9H1LO98Sqy0SelxX7OvBg6EpruojSCp8pF/meXq+JJi5Zt/1s5Asq8evMii82YtzOw+uXk3g4t3OQsuQhmcEIe948dFR08vNQVsRKRdWm9qvd94oL/bBt+p10BdxsgDCOAwmGrLAZX6XlHf0+fX45KOuMSAh+7Ym2QOkxc05U01eFA0AcfQ0gWdZUlBnHF/oqCMFpcSkhDLMCIApAVADjAXK5OuQyWWS5QkFqp1Uo43477mMsdlBlTHS5E0XTYp+XwWgFyERGm51ral8xIBY3zfYlnFaSxa95mcHCZKVIGG8tTIafEflCI/qFEiiaHljp4ADBGLgRNjOCm4h9BUgGxfS1REVJ8Y9Bi4wKSjvpGEPWd3r5303/e2idwCbrhEW6F6YKC4kMY8YFWCZrPGYBILL6c+sYJhhYF0ajt1casBIXXnghTjvtNOy+++7Yc889cd1116GpqQmnn346AODUU0/FJptsgqlTpyY+d/PNN+Ooo44qaaC8atUqTJkyBcceeywGDx6MefPm4eKLL8aWW26JQw45ZO03jiDWEHtlKkQKzUWJnNClp3Pm/mO/iu5eZBzhGR6XNWQsFv7s/d6Nn8oOj+9zcNcfxRiYvSeb5X3bSjAGxhWU993VQRJegIx3v0zcJ5kX5LKG+8R93omWMhYj7LXVFwHS9x0bOGPfZiwWMsw+8wqjx6v0ssMSV6tE5lDKHrK/bXZ3JMw9LkhsRyKoxf7ISIst5rf92x/X9XH21qd7TfNYyEj3/6sS/j6QiANsWkOJolQoRtL1VeYumAqJTMtEBr/dBhs4toZzthlkNivfVimAe6302LkqAcwTx2GeBZh3xqXtdLPvmVIuoMfaG9p2km4OgjNkYW0nneHnBGmmbQjBtRDGFcCNeGqFZ0AZW0OLf35gmhXSyvbx87eTxWI0ONdKov8dMucuEwLKfhdsnz8ZgUkOSBFn+3Eei365PFiQBctkkwFpZY6PPQ62X3P6PbtP0oKfrSKRCCyzmO+e/523AVTM77OcehZxFTNs4JiXKbguIduKbCuCIAiCIIieAol+GxmRAlYVbAZR6QN1+jnGCn9FqR3kimvnvd8WQmf46ahhm21kH94ZMyVuONM922Ej3fUDJDMiTtnIc/ugWE6g8R5+/f8BxOJKJfzsKu//EqEn0aBUR8wqJQHGnECm/HkxXiKI2TKmbttMdLt2KiF+uJUhWFiIM4j87TbCiuIBEHplSK3TygpFQKK8kY2gV4Cum2kjyYUWIvxobQCxaFVBDEsInCILxTgK5onbZo9Vi6x3IgYsiveHnTBDQkCCd8ytE8DfPnee+5l0VujyyzIaZ5sSnoM1dV7Ema5e6Vgfz1FTtr+RPf8iW0ZSJc6BkkwyJROlxsA4WOsKIMiiJsgjLzJQmSyUyEIa4R2A6yAYO2S8fSC9Mr2+uJXO7rP7yc6zXKlPu30sPjeVzXwQnkPJy4zQojgQ8EBn/LHYXydNFqcV/FpN+dJIafHPogU/hoxipiqYdD2GFFOQavXlXCUArhQixXQPUd+BGIUA9PeThQWwqAhWbAaTEWRLE6Qp56aKcRkwmHJZLJsHggx4riberwSOP/54LFq0CJMmTcLChQux884745FHHsGgQYMAAB988EFJY+i5c+fiX//6Fx577LGS8YQQmDNnDm699VYsW7YMQ4cOxcEHH4yrrrqqy/rfEEQ5CpHC8jZZEpiglA0yiO+1WgTU3nQFBQHmyoAyI4REChBBPh4HRnhQqfuZF7QBxPaPtUHsdKzgp0MhYtIBMq6sJ1BqR5UTx4Ck4JPK5jFTdv8DMAKZCVbigDICW0lGIBD3RLb3FG/bbBCQqwzAktntWQ4nJOh9kxQ3XPlEs86SoCAlwSJjQ3Hd400BOpBKSahMLXhtQ1zis60VCItw/dTCIlRYNPfayGR/IynceDaIEhkgMPfyfH1SIKwCnAGD6+I6oUtaQqwsSB14ZwL23A+Uy+pjZfZdnLUZuvulPWeU+Z0UbEoDqey22SAvlsp6S5SvhWfbKQVpy7aWs/WtnZcOoOKh3p4o40RcqWyVBL3NQurqEqGMsxytEMoYcz2JbTCVVPH3N5SmFTeHs2Hs0Yt73DH3t9/KoCSbMbX/IAKoSJd6jwP1zO8gq8+3bF4LfkKYDNO4vCfLZMGCjLNLFBeQIhtX7bCimpKmrD7TnQnsFKTd72ZXIin2MbOf7DbaPtrMv0al4TzuJy7D5POQf02xQrgI3LNH0QSHFbpD9VsPIduKIAiCIAiCKAeJfhsBoYqjVCOls2VsJp19zvTL0KSzqKzwJxkAqcvZKPeefhCMymRfWWz5GhftbpwFAgDnPFEuCEiKE/aBMSHO+BHa9n+kxL5Kop8f4d0BfEeFFXqc+KcYEnKlfTA3AocV/Pwm9Ym5pRxnFedkHWvpfiX++9ITZgC4noBWbDHOAfcRM6+kMMadwFhu/WmB04+yjZR3fDtJie8jLICFbaWCpJdR5hwCJoMsIQjbMZ0Tr0xWmx1PAopJMO45rkom6Dnsyi3ji5GVtjHlWEwIfFYEtPhO1KgQn7/OQaIzyVQQgomgZPvdtrvvkXWypETedr4TiTn5Y3jfJz/T1WZAaoHUZDDI0JyLbI2+g2uCKPGceY5sMC8y3XPAswonre/4NfuayQiyrQUqLGixT0ZQXulOFmC96eeqo9Grny3ixi9bA3n1nH/++RVLTs2aNavktW222cY5WtPU1NTg0UcfXat5EERn8W2roimx7LJjKl1XDNxkBTHv//RnfH+2fz2313RnH3n3f7+0uG9/6HWUn5OfJe+vLyH8reZ6be8JLhDGu//ZObhKEYzH1w8r/PmD+fcYOwdfMLT/K7jgNbsjXVlVEzTmyjLDiJCizGOPP2dPyAS8YChvW4TNyAsyWkzJ6lKJbjiRKvfZgXtdIovJlPksyliArZZtlfW1NxaLvpwD/iVdGUHVba/NfExtl29PWdHI2U+Jla/GNgdKBL/0+ajnpUrL/3vzSQTC+VljjAOKgclQi8xKgjNh1hH39otMtYSiWbdiDDDVTvwnlbicrM1QhCuJaYVMlfjuaRvElcu0y5qMP2enpfeZsa1iQTXuHa2zJDkQBEAIncUXAioAEiVnTZYfM+erEpk4q1Rk4u+UN1ctTCpA6vYO4CZ7MbVNzPwI9zv+/rnrlFc9IYEE2i1tDyTsSsUDROCuGkRkhOp1DdlWBEEQBEEQRE+BRL+NgKaiREsondCX9ov7pREBuCbi9mFKR53HChErfZyPRSQncOmITwavV4wyEddRnC2ky7t4zhwgIUYksvHKOU5sdKrvUOBlHsZkOw9RadHQjJHuo+e2upy3wY7h7QcriAKxsJAWYNIiloJ02UbOceELD4CO4AVcKSnbO8au30bE62wjm2kVCy02OljBZGGmNyXlGORlnDXSPHTbzM5I6Z57NV1Q8oYVW8DaViXLZJn5OAenifxnXkRwIpo8tc99wY/JMBZ3RVaX2YrCOAMgjefcLPGBVXJsWWeGP4eoaH4XSjPuvPGYdUS670NcxpWFrVA2WjswjhyRLf1edGhHJ5f1nb4KMuHsSQiF1kHjfV9cmU/AlEGVOjvD731oPs+ZLp8VIQ4QUEyZfjEKGc6BAChGCkm3oC0dpR2WtlyWX0bLRdSDIeDxdSnxPvOCEWxWiCnfyaICEJqyqlFBC35eRocKk1l+QOzwZVzoDB+/RxNBEBsMLaFES1Elyir71x19nbH2VNL2kp6tJE0gli2lV9pfObYjMjZ7zWQfO4e5u/4K58R36wAgWfrqmcS/7yeFQvt+qfiXCCCxr9nFjNDiBxe5EvHWTpPx533hLyH4GZEu4fxPbVvgSlLGPX2ZDMGKrXHZcycUyoSd6ARIZu9CSJa19kpC2gxMxTiCbC1UVISMQl2GMixCtbVA8pVu6iqKSoOLzD50ZdkTlQYCIMhDZmvQanpDSqUrH3SFbeWfp7rnLTMZ9VpQKUo9B8UZGPOzRj1b1totMowrEgDaFhOBy/B3Anb63EY7gTeGOLjM21c2xdPsy0SVjLCg92tUiI8l44DSoiqTERAWIESNybiD629ov2uRAiIOCK6QVbbvpV63fWZyYrlXmlSZ0p6SqUQQkv1+WwFXmBKYTtC1opeXtQsgIeDrHn3Q8Yam4oizEXkALgRUkE3akab6gK1CIr3MPl9k9r8fdk4B10Kn/g4zRNJUaYFyQVQ8db2zz3uuXKetYJIKdEt8x3zb1x4rxqEyOqNMMQYV6FKkBfO9KEa6jURrSJl+BEEQBEEQBFEJ8kRugCgArVHshNIN5mPBz0aYur4SKWeQZLF452f0lcM+07pIbqudeOVeApiHv2JrXCKwXERr2Y0pk4XkL2uEQ6tIKCYBVVl4SfybdjSklrfiWMXMQsTbbveX7zSQSEUu+/PgXM+TKScI2J5xKhHBHzsylHXuAXGGkRX+fGxJHxmBKWEespUbOzEXM89EsGeFyE9/d9kSrvY3U+07bTqDytVpx9uqRSXHkCmuE8fstinjEYnC2EkJmEy6ZLaaL951SBwrkw1Qsoh1YqTWkRYZoSRYWCx1glRar90XVpxyL0h3bmiBLkw6ONN9iIBSMdCIenr+qfXBczRxxBl8POhQP023Lh7Px4mypo+ezRjVgrmKHWMc4Oa8ElwgwxWKZt/adlC6FDBz5bFcaWCe7DHDoHsa6nG1sGj7WDEgkVWccKQbodVuK+NCC8xSxvvGwoXulcMFWL4OMCVXVZCDEqnv6DqACwbehX1nyuQEE8QGjYIu4+mygnyzhMVBCPa6kjYx/J7JNmOpo5kqNvOJmWs+i4pA2JoQNXSAgYjX4a3HH8dtT0lVB/27vDiZtBvcfUjK5D1JoiTwyh/DZielg2ZU6j7CrJ2E1H0llZGkxY9URly6RLq3j3T2UGwnMsZj+8B+plwpQitgmsoW4AyZTA6IaqGUAq9tgeQcXEpnkzEh4rKkqbFcUI3Zl8yUC7fiTBQqd351VTJTXYZDMGBFa+SqdUipEHFl7sta+JPQ57VSymWmCSAW21Kipt83D0CiKoQdo0xSvsPvL14JOw8nHHlZnXFmvl8WvtTGc/35mO5dHiEW5115TskQMejefEb8s9VPLLaPuXS2fzIj0P6OS3mmqp+kqprAbpeddyKo0a40Fv8SNikXJc9Lyl4bRDZRFcOJfUg9D3nBWMrYUEoB4Cb4wOwHhmQgld/Dj5lex749VVboK2P/+v074fpcasHPiX4m4LCwuhOlCyDbiiAIgiAIgugpkOjXg0g4CVASMOuQCmgL47InCrEYZx/SnEPcPZDGI3OldCnPhJBl/47XaqOC/dJQdnyX3QcJVmw1EZ9t8YN42lGUzvYrhxdprtIP8M6hwwEVlxmqKOb4UevlljGqQiXhz0Z8x/sn6VizQcjWWWL75JRmqgGAfUg3GULWYSKNgBMJQAY6ahmAruNjp5/M9PMdfpAREEWA8IUlBZXKerTZfunzKf0snXbSSOVlg3bhc7cK8lAiC75qkT53WKo/TDmxzBfcrNiXjra3+A6VShly/jKp80CvwxP7fIEx3VvG79XniUoAdFaAEMmM03TmYFp0tM4U/z0ZwWV3moxH5UdyM3O+WceKP2aZ74LiARjTjlCtU2snaUk/w0r7kafWL7QDxwl+rpdQPBQ3/ZmUaWBlHXGBd37b09GPNl+T7D7rQPePT7ljlthGLgAe6d44fqYK4PrlsCALZPN6G4Oc7tGUWfeiH0EQa0d7t7OCyb6KpDICVhyc4Dv2bXCBvW/6Y0oVB9tUWlda+Ej08bPZxzaLBoDiwtwHsol7fIVQEjNfPTmlVEIAlJ6l6Qd26XJ+3nW/vQoKSAqPdlwtdHjZS0qWDbxZnUnh7x93PQfi7K4oTN773caFLlDMlaNMiURun6ZUW2v3SVtiMsiDZ0JIJSHydeBcQEoJmNLPCEz/vPaCijxB0i4bQQfixPZ214gAtQFDVgh9KytqoS8DvX2RUggjIApgDr7+DPcy7BIVK1LBSLCBYIybUqGxWM5MZmy5DD97vpUTBXlqOc7gMvNLMsnKBReWWZcOfLRCpHIZf5zpjL0I+jWlTHlPHgt/bnMRn4v+d9bPBLTriktisviZIL0v7bgmm8+voOB2i7DfQ1uFRLh9nxDQvAAwVw3Cs8fcOe5/B5UOphTmSyaZOZzSZiV7AaOeTeV6E/plh62N6rdqqGAH27kweMIf0yV5i1KXUC5E9txULiuVIAiCIIgNhyiK8MEHH2DzzTfv7qlsEIRhiNdeew077bRTd0+lx/LOO+9g0KBBqK+v7+6prBUk+vUg0s/ARZkUm3zhRXAdjemXnEpEZnoPnelxIzBw8wBmH2wZg+tfYf8HSsd0pZWiAljoRaPLMM5ssg97QHnRAEiWSSzzcMpSPSjSy7teb0g6U0p3ahnx0M4pku7hN7G8WXdgyxbBPpzb46AAFjs0EvvYPryLbIWmJHClPXU2mHbqqagAFmWAsBhHpgN6LmERSsZzZFxHl7MgCwSm8b3IxL+DrBNb/L6DNmsvMQ9/V3l/c7dt+sG/LrMOYlN98YoH+nwIsvrYlytpaQU161jwxtGTD8r3gfRLu65OdIMn+Fmxzz/fo4J2dhRadVZmoRUqLELJSPeEA+K0NQAskzXHzghIXOjXUvglXvWcBYCCLu9ktom5zA+T/WHENqZSPQ+9bXQZf6ZnjD0Pbak2l1EpU9Hkqf3nhFnfuSQyUEKfj34JMYXYcRN5AQZ+FodSCpkynU44vOsYM2JhQuBLZva5yHrnFE4du7QD0zqHzX5kPNBlc5UsPd8Z198xpstSuch6kYEqtOP07SIYY2DtpVN0dnxJ0ejEhom1rcplxeky6do+ijz7yGWamYAne22y+MFBLlMeSeEKSNplLpnEBmjZignFFvBCS3x/sfdEAAhkYp1AfE2VzIg2ZdDyQXJb9euxHeOEP9ejNRn4sMaw5L02IXGVsxHN/3a3JEwohVjwqDCnRClvsx4/+8hlJdn7HfNKUTPusr+KkQKX2oaqy/UCFwFkWADjAbhXhcEFgTjhJWVXeNvkV8Cw5wVnDL2yXW9bFSOdRS+lQpFJRIohlHr/FCKFDNdlK4VJw9fCjjfndFZa6phKFYulWuiFFs+875h/q0ramvH3QCrPprb38GKb/h54pVxLBD9jCymRgcrknS3iB65ZG0KYEp0w8xbMZPRyZUpcqhIx2G5XJFFS6tfvJWy3zWUD2+chT7xPBJUxpp8tnOIaJHtrW0RWf04EcRCgd0xcv8i0/VzG9nUonX1qS5RGCmBmH9jt4CwOpOIMCOw1ymYiy1CXSPd7K7rAT3OMwlDbxu6ACxeQptzcsygUJQqRQlsotf1oMv2K3ZDpR7YVQRAEQXQdQgicddZZ+OMf/7jRCH/p585qUSwWcdJJJ+GMM84g0W8t+PDDD3HVVVfh008/xf3339/d01lrNlzRz38IrfRepffXcxIR4/AcOp6jRj+QxVHevjjnR5l25NJinT2+0BevI/nQl3BMySgZjZ5u1F7JqQPE/cE8p5AWMYybR0rjVatwfNfmuKbmE/cT9LN0IijJ9PyEjlAX5kFamf2UrumV2M9lzkt71BJR/dyUoXLb441pBD+WyQLFAlQAMCn8D2vhLzD93Xig+6l5PVa0cwIuw8p3QrrVpLIMku8pT/hD4ndXobI1rldL2onhevv54pxPWrj1PqNEUHm5cq+VEf6cwGgdHlbwC1uBQhtka5MWZ9tadR84Kcv3gzNOQhVkwDJZKM71WJUyYJ1zs6inYY+96eMSf8+yLjvOOj2ZKW+WyFE12xVHXJfJvmBcZ/5V2MdO7ANi55I9B73sPic6p4RnQDvO4kADAExn+9mzPO4zhURvmYTT3QtuSPTt88U+64QqlxFit5kx7eT29p3+J+notOekFf9gnYwigAoKJeMSRE/Evw+w1OtlM9N6EPF1SF8VeUL8imHeb98e8q89fgnD9gQ/mzgFxKKbHd8fW/cVNb1GZRg71O39yAvSsNcm26vMpz2Jzn/PFwAZi7OtrRBibTPGzQdXk1XVEZLnTwfELv8aXGn97n4n49+JzD8v0y+ygTghbElBF+gCcwwVXOlLqYAMV8gFeX1/UxIsm9P32RA6aEcIyHIBRO1sk71fdWHV9ASNeX1/W9xcACJtbhQjXeLRnq8C8bMDU7r3XKJkejpAz/ztl7xPbKY1SFL4535C/IUn8kovE98I3/4zRlkhkgeurLgTvioIRjqnTyXmqVwGoD72dp7+99gGAaT7d7px06/5z0P+tpZ7hql03ng2r378MNVJ/JMnFRTZoXHhP/vpPeJ/I5kX2GAzbctmLfo/5bL7bFatbUuQnpsRjhXiMrFF09MP0OcpQRAEQawL/B7bRNcyYMAAHHDAAZg1axZGjBjR3dPpcm699VZMmDChqmMWCgUcf/zxuO+++3DTTTdVdewNnc8++wxTp07FjTfeiGKxiDlz5nT3lDrFBiv6sagAhAWoXK+ShxrWtsoJOjJbo4WRHkak4tI09qGaMZYQABOR4vDKsbQzbvoRmJtsv4x5gvU/byNAy0WssmJzopxh2Ydb+9AXRSjXm85lOZlIT+2A0eJaIpuojKCTdjzojTG//Shwbz5O5LNZYd58/cxEm/mnHV6x6MQYB7fR3Hb4Sg/YXgS07yxNGxOCZ+PIel9MCYtmm4R2wPgZelbMCvJxZLMtM2iyrEKpnNOqXNZBOdIOXT0//bs1lMhwhlxX9blgHFHDELCwFbxpicu8UiITC0pmOedYiEIoSJe15s4H289EZCtucyUBk9l1+OeFPc9thl+hRYt9bS2QzSuhigWo5pVQhVbIthaTmRnFxxBIiH42O4BlMvr4ZvPxe96yFXeV7SPEuR4nyGhHpMhCRQEgwoRgyrzzKpEZC+M3UgyASDj4ykab++e2L6ia8a3Dxp57kYwdp5GKo+OB8k4zW7ITQInI55f3LJvVJytk9QEl4m28DYG72LmppR235a41XjlTZZyeIVv3t1ouOLjgq19wbcdXXTc2sf5iy99lyhz+UMYhBBnOdOZHDyJS+l5mA1qkZ+9YbSsdHJMMfkJJWU9f8AtlfM8FksEzFu5d2xK2U1jQgl+hGSxs0zaWyZLRdlIIyMD1tmVIaip+H7LV9RHkjMEmm3DEdkmkYtuEG/tHyVDnYJcR3uwcrK3oRNK0KAM4x77f9xDQpUBLgqbS4oifHcUDgGlRSjHpKjC45VP9xJyA6mffA4AV77x5Rkr3E2sLZSIzMlIcddk6fc2PdDUGG3il0veI9LZ4/7OwAPAAnHHUZWtRlEBTUSIrGGq6yLYSDBjZO4vFLQIfLm8FAASCobkYgTOGolTu+HHEgX3ux+ybRDCVH5CVFp2deGwyB5EMEEiIfZ4tVxKMZwRvG2DoBPAyGX7aHvaqXWTyiKCPpw08StuBDHEFAYu2U8x3yBMFK5f4ZwBXuvxv4lkMcWlSK56Wq0Zhx/KDsBL2mXdOGdsXMgK4l+1oK0lUEvd8gVFK/XUyrzMe6FK8SD5rAijNSLbHK11u1ROH/YBP9+wHlDz7ue0xP7YyRGjKexYi6US/trD9a1lXQLYVsaEipQRvr80KUZauytAh1h+WLl2K2267DQsWLMDUqVO7ezo9gk8//RSDBg1a68/vtttu+L//+z8ccMABmD17NjbbbLMqzm79484770R9fT2OPfbYqozX2tqK4447Dg899BC23npr9O3btyrjbgy8+OKLOPzww/Hpp58CAM4++2xsv/323TyrzrHhiH6VnNDegxRLPzQCYGGhtHeI19x8fcDPBHORl977fhk8HZEaC4B+Xz9pUmoixKVsbKSmW5frvRI/7ArGEhGddp2uH1YU92+wJV0Sgp8d2xPg7LFQMjRzF7r/nP/wx+NShQnHiXWqez0fEr8RPySblejfUdJhVOL4dw+jKRFAxsv45w7sAzdPZja5uRhnVWIuVlwR8QN1ZCLnlRnTdz5YJ5gwUcoK0E1WAriHdT8SPRYrAldiUAU5qEweEFkUjdBiS+P4DsyOil9+hLZ1eOqpd73Bq7ztAw/i8p7cu5SZEkjaaRqXpnRlO21Z1pTgCiQdtenXEo5H64zyslid4B3p3zIsasGv0Kp/igUt+JnSX7JQhO29xMxDFs8aMTAoQhWMcNfWqnv9ca5FwNWIfm65IANkijprUEZgQaidw0rq75mQ8XdJTwIMvET4SzhzUw7WBPb7aZfzSplZh26liH/f6Vbeweytxneue2JfuV59kDLhNHTOtXKCXyXSUfIpkc9lirLk9gLxNtGjINETac+dWpJBppKVByKpIFP3BJdxu56gANMjCi6z2NpKTvjzl/fuGYnKCiwOwkrbVP5nfcEPSImFLM4UTPQa9WwrFhXi316WTHJFNoMmBOcBBNN9yThYyh5EiSBjtwXQGyo4K8n+U8qWfjeZV16mkfstAZhscG4CH/T2xoKOswHNGP4eU0olrv9WLGQV7jv2twJcz0EFsx+Zit9L2XY6W6ygs/HDosu+Z/Y+643tMoykcn0dOQMCqcAjhdpMXtsCthS0iOJ+0HaVHcjWSohpALKCOXGsK/BHbg31+SQkQ2sokQskIsUB24vRnp9WyLFz9wJ9EoKfGZ/ZLxfic8AXu50YjNJz3z1TpO1z+4xhS9yafeYqQfj3bZO1abP9FOMu6EgpmJKcquz3AbCCnv5u20KUpdm7gB8OIEx2HCSDYiput5BQ4lNZib6IDQBMpM6f9MHz7FoztO37pwC9X9qN8kzZQYwDkQls5Nw8n+lqJjbQFOnvJFA5sLODOJvWZsYmysIHieBIu4ejSgeLIIi1ZuXKlZg6dSqmTJmCXC7X3dPpMbzwwgt4/fXXMWHCBBL/NiCUUnjmmWfwhz/8AXfddRfq6urwyiuv0DHuAE8//TTmzJmDiRMnrvUYu+66KwDg/fffd8LfpptuWq0prncopTBx4kQccMABnRboWlpacPTRR+PRRx8FAIwePboaU+wwn332GV599VW89tprePXVV/HWW2/hz3/+81qXal25cuU67aU3atQojBo1Cp9++il69eqFKVOmdMl65s6di6233nqdXFN6tOhnHckl5Xq8BxgWFkojRD2YjIBia3JcI5CsT6Qzwqz4B8TPkNaZZvvLme4ZztHjO+mUdeqYh/FK51q6fEuy/4l50LO9Gmy5Kfug7uMJfnr98etMyYT4Z6M/mRDJ3mq+Q8ET/UoiqIGKD54lfbt8J4IXBV72M3YZez7Z7QLgyv+ZDCu7bfYB3M1fRgkHj7KRtCzO/LMCoFR+OSEt/DHAZahB6IdzlYq2VkFG76MgD8WFzoITWYTKz7JSJY6/dPnWjmBLKK7p5zqFFTW5cM4l/9y2kcmu9Bi8jFDjvFBKO6WB2KnLEQtNvtCXyDrzzxlP2LbOWOuclW0tUIVWLeCFRahIZ7LaXn4qkoiKIVQkoaR0oh8rhjqKOBO4jD3FW2FLdrrSrW5fiBKhXH/OZJ9m83r5sAhktVOSZaXeh8r85hIqgNs/ZYU/2OuKKi/4ASjJdrPONsTn8upIO3L8nj9A7Fz3y+gFPFW+U3rOcudATPWTscfSDRyfM+nXSsQ+HpTN5nMz9+MCVr/JXQYTDKwLFRa2rmrPEesca1sBlYNBKmW3WCKVesF8sGzZu85Nt1MUTPaIyxR2gR8qkVkcZ+dVEMvMcuVKp6fd31ZUtIKfLQfKEItiNkvGXr9s9QptY4X6PmKu/f5nnD1l/hYm20oaJcBej30bwKJtPLjjlLYNbHabK/GZJi1aICXo+NvmAl4QB+YgeU5xK7Z49m6J+OfZTvF22CoMJtsvNcdEr99U+W177+RBBiyIH5EU9D4LI4VQ6r8FZyhEChwKUTYAy+Ri8atcqUL/33QgDeBVxog3JlvhO1NtlALaTGAclwxtoURbKJO9whmSQTN2/jaYKh2Q52FLcZfLMk1k9zkx1hP17HOFdx+P7TAjgJssTWVsIN+GccGDxmb0y7PGwt/q9o9y/S2tKO1/3t8qBgBcC33g+jksDrKLRbN48DLCX2LnlbG5yvSeVozrvpVmfCf8IWXbKC1hOpvOvW4y75gEFI/FP+jnD/s9K/d99kumJ4MjOZItEsyqGDPHyVtUiLhKhB9Y5aqS2LLwarXZyl0J2VbE+oZSCk1NTejVq1enxmlsbMS7776L/fbbD3//+98xePDgKs2weixfvhyNjY1VGSsMQ7S1taGurq5T4+y55574xje+gdtuuw133XUX+vXrV5X5VZNHH30UEyZMwMCBAzFw4EBstdVWmDRp0hod4/nz52Pu3LlYtmwZxo0bV7XtrOYxBVC1zMulS5fi5z//OR544AEAwLRp0zBkyJC1Hk9Kiebm5k5/Ty3r43cBAKIowre//W189atf7dQ4VvQTQuDoo4+G7EwP8R6AUgqffvopfvjDH+LGG2/s1Fj5fB6//e1vsf322yOfz2Ovvfaq0izbJ4oi3HTTTZgyZQoWLVrkXu/Xrx8mT56M4cOHY8SIERg+fDj23ntv1NbWVhxr4cKFuOeeezB9+nSMHz8el112GQD9PWptbUVLS4v7XVtbi6FDh1ZlG1auXInDDjsM7777Lm688UYsXry4U/fCpUuX4pNPPkEmkyn5ueaaazBr1iwcc8wxOPbYY7Hnnnt2WbZ9jxb9LK7nm/+akjoM2XeEAKWiTploRFtyJVGmUWQrOrvXJXGUYyrSMi188gAKDEUJ7aBQycwuF2nL9YhiNb0aXNStEVET5WhSJXgSwoCNAPac5GUFOj9K1L+om2OQaEDvokLLnL7Ke+gsJ+55UcIAkmJA6lxQ3jYkxgeSQqHZB/qBO+XYcX/Y7dAPtMxEGzMrYDHmyoO6koiMpx7UzZy4ADM92vTcM96+DkwpVKHFax4gVLo0TmScmwo6Yh1IOi9tzxY7b+uUtFgnoXV2SKWQFdYpuo4eUhnXpUptiU7jiEj4LQBXstIXl/25p53VZVdlfpdEM0dxRisLW7VzymRfWGesO4c51+U6c3kdwQyAcQEVFsFELP5ZuOBOAFRRFPeNkRLgkdu+xHckne1hs/ys6CcEWCGvy4QGGfCaOj2nfK2JfM86R5Ptu8cqfU/L7igbZW7EQiUBBMbXUwAzkeLaUcTMMWCIuCnxyc01SurLUYkTmsU9Q53AxxkyflaMLSvVke9/+h7AOACpBVXf+Vx2W02fRC98vpwoIlLnTBC1li5EEOspUZnvYVkYSy7Iktl+6fKVkVKIUper9a0MqM2qSt/SrO1kS3TaAAWbV8NFXNbTff8NSsUVFiT8rOS472iGx9cMZ2dZEUiGcVa5vXZ5Wd8uGMEKSOY+pQAIkYXiJhCMwQX9WOHDb4kVl1DWnn0Gr3evWcaKmpyhfGAF05lWdv3gQfkMPbdjJcDMNZwFiawwCS38+cE4CdHBu69H3nmog0J0kJgWYPR+VCZwSkkJFgFQUmfhtzRBtTbrTD9z31SFVjBzP1RcoBAptEUKrZHCyrYwFm4RAJAoSA4hsrpvXBS48pP6AMXH0tl05j3dCzbjgmUQheDNS6G4QDaTN3u+6589JBSaCpHbf83FCLkgztIUnJneeUaMAzz7PkiU8gaS908rIEsj/LkqIqYMdyK7z/RttuXSE8FVftnI0IhZhVYXUOW+D6ZEeqIGsfcMISVc4JcTwK1taGbOvG+w+4qoeDkrGhZNqUk/YEkwhoxgyAhzfprXGWMQdj3mGlEuGNXu25IgKvt66rcdgbGUECgFIIN4n/nnnpLJnom+UO8JuUoysEA/U/vlPhMCbZlgSXct4lx/Nh0ECWh7MYgr69hKHkpkdBUP87zkAlxNkBfAYR+1CuvRvYMguot7770Xd9xxB+6+++5Oj3XqqafizTffrJrg19bWhquuugpXXXVVVXwF++yzD+6++26MGjWq02PNmDEDd999N6ZPn96pcRhj+N73vodPP/20akLYO++8g5deegnHH398VcZrbm7G559/js8//xzjxo3DD3/4wzWe68CBA3HbbbfhN7/5DV5++eWqzAsA9tprL9x3333YZpttOj2WUgrHH388pkyZ0ulzpG/fvjj11FPx6quvYv/998fRRx/dqfGmT5+Ov//975g2bVqnxrHsueeeePDBB7HVVlt1eqw777wTDz/8MP72t791eqy2tjZcfPHFGDZsWKfG6d27N372s58hm81il112qUp5zzvuuAMPPvggbrvttqqIKz/96U9x0EEHYffdd+/0WF/72tfwpS99Ceeff36nx1q0aBHy+TzOO+88TJgwoSrbesMNN6CpqQnf//73KwrrQghMnDgRZ555JqZNm4brrrsOr7zyCoYMGYKmpib84x//wPz587Fo0SK8++67JZl/ixcvxt13340777wTs2fPdkLva6+9hp///OdoaWlBW1tbyXpPOeUU/PWvf+30NgL6GjJo0CD85S9/wfDhw1EoFFb/oXa4//77cfrpp7e7zLXXXotrr70Wm2yyCU455RRceumlaGho6NR60zBVrt7aes6KFSvQ2NiIBZ8srLhDykYW+4JNJfGvXKZHO9Gr6wIXmZ12htvf6QwWxCJZBG6yu+BKOlqSUebe2EgJfkomM/jSzvT2Hvr8fWfErbgUJodfOjM+PqWnZKJsoHdcHL7zyY/i9veLTL2fFgN8UTCR3VNeAEmLhGVJP2zb8ax4KYLEa4l95AulQEJc9XsluvEZj3vcicD17yvK2MlnHZahzXJLRM/H4op1fPptK6xz0Do7AKBXlsfLsnVQus2ci24fprOskBLD7dy9DIdKgl/aqVkiqnv9SpzoV2zVgp8tExaG2nFoynhqATDSjimb9WcyNFShFZAy2UcIOtpZT8jP6LPngBWZo7gfSpTcGia4yxJk2TzAOXiupkT04/k6sIzpcSOy8Tnp98v0x12dKO5HnPvCfNphZX5sjz/bJ8nv8ednmNjzUXAt+nHE4l+Jg9wT4WGOW0kwgReJXlKO1NuestvnfT99J2dJBLx/bYxCrFi+HANHbIXly5dX/Saext4fZ47ZB3VB192zmsIQX3r62XWyTcS6wZ47Hxvbyg98SGPvAYkKAmWqCpR+Ln7Til6igqqYFs+qjQKwqihdpp/gyd6guky6nm/R3EMLxsEfRsrdK7JCO6Pz5rfw7jC6d1gstvlBV3bbGZJZVO6aZuwt5ol+LGyNr3eIBT9X1jtbox3nmRodHCOyiMC9UqbK9MeK5+T2N9fbnuXMZVPbHoV+qWV3XKJCssy1b4v4vd182zltNwLO1olYkAhKstj1u17Cnn1v7+3WvrXHTnCGQBmRqtiWsJf83rty+WLIlcv0PdqU0mbZPHhjP/BevSF7D4HMN2IFy2N5a4TmUOHz/8/evwfbslX14fhnjDm7V6+99znn3gsIvjUVvFImYBmNAb9YgAoCvjUK0UjxuFT5IiJqSqXwgQIGSlMJ8VGWRTQx8iuMWhgDYlEocBERfBKjZZQEVAQu9zz23mv16p6P3x9jjjln917ncLlnn9dljapz1trr0at79mvM8Rmfz2c9IoQIZsK5hUVnGQ/ZM9hvGObwA+DNsXgu6v3ItMVPLvkPT/JmU8aH3Aj4AWH/QaXB6TrMP95/7PD/+7N/kP3OhE840+H2pcUnn+twbmFw0DAWGKXpyW3KfdI0J5r66r2n+Zmv/D41s7CJxahgIuqGKj+UhqrsT+eyF1x0MifRR/GyK97I1HZAu0Bs9kTifnEgj+0eNknSt3cBLgCDD6UhDDEDfnNPv1iBfQr49S4gpHNKozEETsBfZ02+RrSG0BnGXsNoGODNUWYzTqLyCoc2AgLT+U/VnHSSrTvLV+s5Tx3bGiLTsidzkvk+rr9bNVsB1fwtN8kNU+B2Pteqtivn9c1CzpfuDKLtsPYRq3SdPh5DHnsAODq8hMd91qftcqtd3NShx8613KfjOKJpmqtezrXwp/ut3/otPOIRjziVor33HsaYj/zB+xin5WN42n6IH/jAB/AzP/Mz+MEf/MFTWV6MEd/1Xd+Ff/Nv/g0+7dM+7aqWdc899+DBD37wqawXcLr7NMaIe+65B/fcc8+pAMMA8OY3vxmf/dmffdXnrsiIx1M7Tk573E5z3U4rTuN69J73vAef/umfjp//+Z/HXXfdhR/+4R/GC1/4wqte7mq1wld/9VfjZ3/2Z6/6nDrtePnLX46nPOUpp+pD9/M///N47nOfize84Q34vd/7PbzwhS/8iN+JMeJNb3oTfvu3f3vihXl8fIzlcnnieBuGAb//+7+PN77xjXjjG9+It7/97XDO4Ru/8RvxpV/6pVgul1v/3XHHHad6TTrNODo6wj333INhGDCO4+Tfy1/+cvz6r/86HvWoR+FJT3oSvvRLvxSPecxjPip57fuaX9zSTD8tGm8rNGWpQkjXoiGWWjaAmHwOapBsAlrVYF/dNXm5SdN1kAIVNlOZNIsPHCECsLpuyfsFAIiSF4NtYYzI2diqsFaDGkAprMXqORRg0olj7dE3B7246kYmnjLzTCOT1gRC1QWAyKkISCd/n6vimxbG5iAMgCnDpy6SzcCxE+y8CiSMW/wEVWqGiAsoUt0gInB5ADE46URWcAdAluJiI6CLbaXLVSf4iU2a5TmVCUg8+V1ACn1UFYHqApvKF41eimBaDJsDKgqAUfLn4bRNAamoN7sXKgBIsS6+XWd5T2JE252QJq3BS+38PwH6VeePFgc1asAPmAHq2/z7xn5agPVjAe9CSGw7KT7F4EGLZQHq3CjHgj4GP2Xu6ToqJUaLu9V35hKhkyEynBmDnAphIQTQOICaFsGNgG2Sd6B0xAsgaEuH9+VYfhUglju2c8GmAGhUAWj5s9U+lH3FaFSmNZ0LkcxWEFelO8lvpPlgKD4/pSC4pWNdD5DLAPQqcXWlIGVn6LFADNAI8gUAJObCxVAPrspviPtLV/yNXeziZorsr4mToNuVgEDlJlN1BtesvxOfjxFjAKI7KdfGRDho+ZqzAIXpkwgnwASIq9nJ2jQzeGlSGEMEkwAEpHmJSfdDL/eMSML+kvwtLzDnNLV/X/YfnckY5usbkHMrXfakKSmBBCealKIUokK651HU3Cpmv7XL9f6pR+GJ/KtivU8aLqomJEI5fvK6pm2g/HzaZJWZfLpvcJLjlgHHNCZMLF7FKlcaCihotDHDq3y85GuisjCme1WSyFZPv5AaX/S+yxbRthgG8fI73DisRp9Bv84wGlOOEVOBXxmYTd5o+VCuG940XwZAYy/3DNeDNkcyzqYF7AKxvbbTtRCBjQsCWjNhDGECZAElH9J9HIlOMPwmnweKDzkRtDQ1AW+rfCpLeSZgsf49lfAMmi+pXPqmn+ZQ1b6TXHuWu+TtLdtGyX+P81qfzH81h/ZBrgMbFzAGAf3GEDMIJduVmH7MCEFAQEMGngDPMZ1vl7mw5RyLSjPSrGmqbAMm2wRo7puuO2zL/tL50Lwh0legYL0ORMK2iyENhoX6dALApHm2aqSab0fJEWNe9235YG5aYCNzobT+ulRK49oaElUIEtni1syvELvYxcdmnAbgB+DUAT8AeNKTnnRqyzpNwA/AqYEcpw2WPPShD8Xznve8U1seEeEnf/InT2VZp11cP819SkR4yEMegoc85CGntswv/MIvPJXlUJLyP6047XG7Gb0Kr3adQgh42tOehsc97nF4+ctfjle84hX4ru/6rlNZt2EY8D/+x/84tWvvacbb3vY2fOADH8ArXvGKU1vms571LLz1rW/FU57yFBhj8LznPe8jAuFEhCc84Ql4whOeMHn9cjKybdvisY99LB772Mfih37oh3B0dIS3vOUt+Pu//3t80zd906lty/WMg4ODrZK+MUZ80zd9E376p3/6ukhp39Kgn/gqxO1FpVTYCJTkgVIZipAm5VH81PIEhgEtL2xjpcgbYfocuG7svxClAKKFDVnPNBdjkm0yJ+VayA0AO1iknb2NEQfkCTwRT7qOt3o05N+fMWGAIu1TMc4U7HM+5m3IvmpzIAq1j06sfLtiKpIlGaz0mzWQNwHg6gnutk7War0jQvGWmEsl6v6vWFCx3uf178UocoapszYMPaIbENfH0pE8A/3IpMfku0btQsbP2ynYpyDKrMA3AVsqucu53KB6AuYCDFTKMyJW3hGUjqd5MbcGYQHA8kk26PUKBfQAbN3O+WcLG0WeTC4RM+BPl7GNdVEDOFqQyiBQzboLvjDG1KcPJRmQgmk6DlPRKkf1OoAMREc35se46QEn4CKFAKBHNAHBhwn4F3wQEDdJhMb0nRi8XOVcKp4lKVBqUuNCzS6cnwvV+yofCmaR0QKK/9GWbvRtMS9WT1ivs6I1zc6zrUD7laJm7229bk1ByhNfrxtE9HMVAHjivlAzF4KX4+U6Byfw95otP+6KbQ/kUOZqrJ7XDUEK+qsHbX4LgH4yxohA0+vu9HOYyI7rN5vEmLset5YYJe8oxf6KeecHgBiWLcZ0f1R/qdJoIt9lErBk3hCVC/Qo9xy9d2ZQA1URfb5+ChYZixiCSHtX4NqEXc2FnVPnBPV6KOBnEqCpnmMAwEhsx8onNYOTQLnnzQr+J1da36PUBCPrOmEkqUdxFbWMH4AJSDSXTFbgk0xb+TAiS6kixCSbyoBt5fPeCc6Y8zmb7oHtzDtXcjNYm8GHMTi4gAzwhChzizEExGiy7GPOpVE17hBNm0fmjTpzlpQ+z9+5PgUFZX0q85a5NEGJ7KquXOVvWwFRuj/n+1X3j34mX0v8IMzBKr8iN5b757wxLy+QTzRL1f6WmjvV16s810vbohK1hmOSkaXcCDePAORzXq6HMYOiGxdS80LMQHCg8s0xEJgZ8STcdTLqfKhWSahyljngty00T1Zvy5iu1/JYzZ3CTBklj696O19mHbec8/PmKarm06QSn/nDswateS5WNZGV66xet9I1KxAiRdwIzG+XW+1iF7u4HnH77bff6FXYxS5u6fhv/+2/4R3veAfe8Y534Ad/8AdPDfADRH70ZowYI37v934Pb3vb2/DSl7701EDJP/qjP8LrX//6LHf5mte8Bs9+9rNPZdmXi4ODAzz5yU++pr9xo4KI8LVf+7XX7fduadBPO6+B7ROgiJh8TAhi6065iJW7btMEZlLo3dJNKTIvM/DvCgXt045iYF4BZBRLsYMIrWmzkbt61on8XenurCfVkwlXJTeZ/QtnhfcTnaBbHrMcjRVmnAdnsC974QSRlgookn7b/DSyhxdHtFG8MFojk/OJ7JcCfnPPj2qb8zbM91kF/AGQYs8M7Ig1sKYFNf2eFiVokIJS+mocB8T+WDqTV4cC2oxj+dmmKYBL24GaNssvkm0FhFQQFoDKdwnrsCnrklimdSd9DXbl3yPpkvVBin0eMo73tapaew9l2Vcd06oQdK2iPhdV+kt/7XJdQLWUZ11YrkHMOfCnBakJaOM20250lZ6KQeQ8FbyrOs21eKj78XLnCyEVReZFEJWxGofiWbM+FhaoG+VodSO4VcDRTSRLtSChQCABEF9AFn1sZtCmz+s4GWsFG/U82LZd9fFrW3mvBg4V8OTZsrcsE0AFHDZlGXlHVoDqDBgFsL0wpYDkbD9kr8caMK8LavMmj7wOaRyr35V9RzhxndTPVUwdcjtPv13cOqEyL1qmncg217mAHu/pHOD6ugbI92dF9Pov2vIaIPf95TXXipYIKf9Q8o0CTMK4Ss0StoWhNgF8kqf4EEHpRpgblQBk1nG6tmWwoAa8KgDrBNi3taCuwJ/mpWaaz2ihvGbL6LUOVeEcQNDtYwJHwMTqFo5yr1eZR0u6TSfBvgmTuj4uUl5NUYr9Zbt4Ilc/V5qox0cjg0TVPbk06+m13GYFiQiAonzJRwEHWtNKkwYS8BTKmAlQyhnsIyNeiaS5LJvctCb+baGSH6VKNj0xuOa5c/CAA9Ck3Ly+95zY0QJmUozCDvRyz4pbjonTigK+F6lXfzlEqT5OaxbaPPerjgMd8xOWC5U0d83wy+Cfl3w5ej+9ZrCZNUsV5p9uDwXOr0lTUKw8sIOAYZTUK8RoMJ+juoUTFYnq5PVBpDxHn4C/BALnYyIIMOXlJIO3s2HbNq51DpKOu20+6Ntye13mHAzUZj9DnI8hYpvPg7yf5vMkXZY2xerxOFvfqKt9mUMzzbjLcnQ75rluvczMbjzp/652FGWfSdPi9WkN2cUudrGLXexiF7dSrFYrfN/3fV/++81vfjPe//734+M//uNv4Fpd+3jPe96DD3zgAwCA173udfiKr/iKU1nu53zO5+B3fud38LSnPQ1//Md/jFe96lXXHPTbxenFLQ36hcpbbO5Vd+KzkK7cgEq26XIgxZZuynoilQHD6xgByBJRIRbwTyZCEUyEgYGGDYyxsCZkoEIfeXMI8iPC8aXsN0aNFOy52we1C0TbAeojMgf/5pP7GdBXT1Ij8Qk/OS3I1N5dflYQVLBPQSr12NFuc0bq1M0DUxWbthVGtBA2e23yqBNbU01GFUAzYiYfmw7F28IWZpAWoIIDsYA3FLyw/Da9ADbrYxlv9R4BpoBE24Ga5EGSgJQa9Mj+bLZJnmwHsm/sArEJmRUIYsAEkXslRsxFR5oczzEdP2E+ka9Cffpqb6Ps2ac+Pt4VNsF1kLgdfJywJQKKB+H8fJRzZQr41e/Vn59sHxKbTL36/JD9gGhcZc++mKU5q2Mu79NUQLQ2HT+lEFsDx9E0sp9qPx8tqm7xDWTXA5s1Qn8M7vZlPfoV4jiA6+NrDlr7BAx6n0DAFWIo7MAwOvnnA3w/IHoPPzqEwaXvh+QTyODGgg3D7i9hGgtuLWzXghtbgX5cwL5t4J0b899zX8KYxjPMmBAKYhLzxLMQmAGF6bf1ukZtJ4CibUALec7LfYCF2Ssyc0GKxnoO1deHOcAxkwq+rK/njIFMWxg8u9jFzRoBJWeaNFXMGbb6soJ96V6kBed53JeiNV+mieNahYJFg6/uDMkPSxm6FDzazgKG0ZqYpDGFZdIwYdkQFoZhwpAlLyPLtX/jhAXkQkxKE0ky06QmhK3F71QwnxX8AWTm3okmLAX55nLqk9xYJBaZC4tuzvZnVD6Dej/U+32smEGzdc6P9fUyBgHYghOpPuKcD869HTXnm7DtY5jIPWagCLIR0XZADDCLg3xPH4OoRWgzmeUIgGHIwhgGhSDjm8aLF0vE5b4w47XpatGBDs6JH1zTJS9E8YDbuACnB7cBQgL8fAKMRHq9uo/oPbACWJCAmPK6TjTSPTflBjAWfv9BuJaKImMA3vq+S/jg0QZ7raxraxkNc2quq85HZiAyIqPIyRNP/Gy3NgWk5zVonL3sYpg2VAWxE6iZfbkxKT0S0rxDx7ZWWACkiW6WR2dpbj/AmhbGMEYiRI7wkXI+HCJN5lk+RkQPxNk1zVfvb7umGSJwmreIvGfFoq1YzCrVnwGvOkfUVZ8tf9q8Vm1ixAnmdRmUj9CUt6XxdQJQc7nGyEqEBA7OwH5dRHCJmVytr9fcr3gz1o1xykaU5aV55WxjWNnJgeARJ9u/i13sYhe72MUudgEAr3jFK/C3f/u3ePSjH40Xv/jFeMITnnBTSpiedvze7/1efv6qV73q1EA/ALjzzjvx9re/Hd/3fd+Hn/zJn8Rf/dVf4eEPf/ipLX8X1y5ucdDvRBN58fL7aM7pulBRTVq2+drUkleoXr/WkcGuNIPKklRRijZEMUvPmAiQ4eItUhUbwmaNsD4u/iXJ1yuydjc7ILaJFVR1km+ZMOZJ6mUAP5+85BSs1IJfqGZx2ihKBDTaCZwmxwwSKclZ5zlrMWjebT6PK4C6W8HLeac6kA3ss2SWSpbqV41NfjFledH7zP4qLLDKh0SLGQpcBI/oGtA4ygTYNhOGVEwgUmYEsgG1oRQIAFCwqVCIUvxI62TqbWWLiCRjhKmc0YQplvZD3Z2d/VdSd7Z2/5KxlwUPTzO0UKhFYUY5z+eFD6T1R4ypkClFnZrltw3QJJcKjAnwgxN/HXgB2OS8qf37fAGfmjZdK5rkB1kVb5pFZmZG0yDaNnsvqr+lbgfZAkCSH+Q7fgAPIvtqmhaBDeLQI7ABpXM5boQRiASqKShJUOBPPABD8gF0/YAwOLh+A98PCKPDeNwjjA5+8PDDFDw0rYFpDbixaPY7cGNhuha2W4BbC9PYDA4CAtDVkQG9wWXQUZ9H7+XvyqNQwUYAebnyGwbEDG4FgOT0uwpIEjO46zLox4ulgOXhDNCMCMzyGrkkPWWTb83268UJT0LZmMs3GqS47HXpeoUh0LVkS8UHfuL8sRo18+3km+EEOy3jFzOGz+RrV/g9JgFpBBTDCR+3axmGCNZQLuLnAnsteZfAp8Z2sAzowDQsOcrCMFoGaDNMcghtfBLQSMCoiAjPwhCzV2w8CwUYmr2X169+bVac3zbe+T5JBLpMEjuR0lRPZwVpLqf4oM/vAyMt4GTjmqpyMAs7tGHkMc9NN8OqAEU1IGGaE80XmhubCPgonoUTj7SaWJnkPbEIhenX7ed7NdjKvSkDQTE3q9UCpTHdy8u+YLAxU1nK+fjUQCAwlZHOjVzXtqEqxogPHm1w73pEa1POyCp/WR6BdC/U3Hgbw6/eJt2MrF5QgX3ZE1IalSaAX/3dBPLVfsH5vaYt+XOYXW8S4EfGINTnS3CAFzCJ2IrnXhQgPKTjxOf9TOliQPAE0OyMMkQYIay+QNOLpTWERgE/ZjSGU45ZNalVOUdEyM2E2myp/ueX3W84ceoW/8TL7ZPLzZfm14t0LZlIsGtTAXQenHKzK+RNoJkUry4+AX7FQ1PmnpG4NHJu2baYJD6vy4TjSrHLrXaxi13sYhe7uGnj7/7u7/A7v/M7eP3rX48nPvGJHxNgn8b73/9+fOVXfiXe+c534jGPeQw2mw0Wi8WpLX+xWOAnfuIn8CVf8iV43etetwP9bpG4pUG/bey+GqSoC/vyGbo8GLhl4lL7hNQx6Wq9DkEA2uRxs3ahyBZFYc25LLFFGFRaJhJaJiyaLjOGToQyY5RJZkyRsqxkezKT78SKJbBvNjGs/W7yR6tWfoZMkoW5x9lHznKR9NRu84mXTEwgxkxiqi4yzP1vEAlU+SNMigd8maIFqoKGApkJ/MvynmmdslROxdCae42QbfMEPQZhIsVRGFVxGEEmebRV7KWJPKLKKC73M6uJ3AjaT+tuF5m1BBpLd/VlmI8ghqn3q7FXKOBUXx0HIDjwcJwBXtkXXjru70Ncro58X0KZATUjYOLzk8KkbfGQa0DIP1p+WQsvTDJONZtAC40Z7FsdVYzNIXvsyYKSDJjulwTKRiIB/exCAL5midh0iKZF7yMGF7FxPjNAaiaxbl9rCA23WC4WsEywvgcNa8AP4MUZsB9g3AZhs87rhxDkuRsQxxFx6GX/rI/hRwffDxgureD6DTYXjjAerjAcDVif7+F6h82lDfwQ4HqH6KN4AxoGGYJpDNqDBtwwFmcXMK2B7ay8VoFvNdhHhicsvsws9CH/jh883NrBj+UxVi31ZCgtn9EsLbgx+dG0+lz+mdZmMNK0DZr9DqZr0ex1MGdvAy/3wZsecbkP6vZAy4NyPnMoXfbzYJaL7ZzJggLuaXH3hFwq0XYpt13s4iaNua9VXaie3H/Ta4icgD438VmLmLJjtjVqaSgT5qC5fpAfATi3YBxExgeOXQLoAprGgowtfqFBivIcAg7aPQF+QkSbwD5eX4TKPscEFjkwehdwNAS4IGy/ps5pkBoZiOXWW0seB2wtxJ+IbcBfJT+u2whU+7B6Y74r8vsZlHFQ39oaAN3KCtLnW/IyGb/09Vik+fV4UEUHJsDqPXnsQeMatDkSpvsgcunRjUkK3WbwJjYdDDfgAADF59sFARTHtGHG1AwraaaCaUF7B7LM4KV5xy4QujOI7QGGIMtRUHgePorUo09AUbQt4pik2InK/SBGAXJj1fYWw0nvOvXKu05BRHjwngCLR6nRh4mwsAJWTW5dKqdayTVOcn3dDj/dpglLvvK63ebdlxdlbZGZpGnuHYlkXhMDyFpwkkKXdSxy3vW5QG4AkpoH+TEDWzxvXDQWPhbZVpXtdKE0QwRDaAJBD+pG52SQ43hhGZ1lLCzjoDWwLK+1RuZl0kAXUp6vDYZizzCG0jCpofiSFq22+m6nc22afSCBqtWY13MlU/ZnzmOSlYECzrXSiaua1K7U0yQ5LMNwC5Ou51SDnAnwi0NfmuYAUAvZLwkcliaSK98PruRtuItd7GIXu9jFLj72Ym9vD2984xs/psA+jRe84AV48IMfjLe97W34nu/5nmv2O09+8pPxRV/0Rdds+bs43bilQT9gCvbNWTxAYfLoZy4bs+JJhVFd/rc/+tW936EARWsYTieEac441l3LqQnSB8ATEI0AD4gBsT0AmRbGtlDPruwfZxcI9QRPJ3w1m6+OqphTK7AQcZ6gRiqdskgdsYSISICpPHCYpFteQD7p8pbC4ijFg/r3Jh3E8WRxqY5UPIuXM53QTalAS1lw3TW/pfs2bax6ZZz4roKpthinUtMgjqNMZN2Y1i2ATC1xKBKMQNXhbCvgTzvS1YOm2hfZK0bHKMwKLjWDMQF+lIscbS5QXZbZWXVo56IPIGCove+d6PfnnNGO55rhl6VG6+KkRgKiTd430+2QR5wsavohy0zRuAKSL2NYHwuQ1idQTYtLQCoQQjxkJhuqXfrK9CuA33oMcAE4HkMuGCqDwBChMQwmKSA6lsKS5YjOdrCdBbkBgcQfiMYNyC5ASwd0+8JCTF6ScegRjQDM5EagHxLDT5h94+EK/cUNxuMRfQb9BmwGjyHEqogV0DKhSQVB06ZC19LCDx5h9ImFV1h5dSjo50cB/fwQBOjrXQb9huMRfvDoXUi/jUkRreXUxNAacMNoDxrYzmbgsX40LWfJ0fbsHpr9JXw/oAOks5wZFDw4eKgUqPgqCuOPTCid7ajAc2IIHcVOwOvCbqLthXpiwAy43sFJXuxaLn8XD/w4IdsXLnPPrQu0W5izVwL8agY3cP1zKwJw0DJiVIAHUL+4Avw5kGe0TUg5AUmDSN+DhqMkgxmT3HWTvYtdYvoNPiBJF1Ty2jRtJNDQ+3vVsJTHc8agnktMy/dCycVq4G2y4cnraz4gGYCZ5RD6nj7SZfb1NkBw4m+YJN+DMPIklyWAy1WV/CANOK4Hj2uQ28Afni/y1cELSNckn+oYkhJEBHtpKrvcsZbZamwBExDjIq2q5BAhN+t0iKZJ8o0xNxkZoix/bqpjVv2q0RT5bvK2ABhz/1egHFvjIDk5ALSiChAW+5K7X8O4sAkZ6GsMo0mgmEh7CltN1zjGOD32LtMwJx+umKGao6lCROWLrB65OQcGpj7ICjKmucmkQS2xBDNIVI1VBIpPe32cqlRtur+Tvp8b/AbAtjBswWwByFwyxsKiE5CJ4WNEwyLd6aPIvKqcp4B+JrOAG9YmLlEr0fWq5x4ODO8FZCxsujzY0hxZyS3npjWgjPG2eVAtyztv/iROJnl1HlOptiT/8Brw0/XzoTTi1adamdNFGCZ0xgK2lXOAzYlcOc+H3ACyNgPBsfYCrD8f9VGVZK4/6rfLrXaxi13sYhe7uHnj9ttvv9GrcMOCiHDHHXfg3nvvRZzbdJxytO21t3faxenELQ36zQG/bcw+fR+YdjxPYlvxqgL+tkkH3oggQLrKKUkVEkmRAYQAKaIApSN5DIDxwKLdA5iledwPQLPMXZ0hAVS5i7ZmgG0rMoUyMZysm3YvJ/DJaDEpzZQpUf0o+WUA2g1awL6W5fu06ScSQACmk7/6t0+wbmbrx+m/y3hq1Yy+yXZXY6JshRAKEMFp2yxbIBWLMjvPJPZXkh8iNgKwtgL4UfAixZhkGQnI8ouyKQGUnrN2wZrEJlPPGdvmrmeKMUknhsnkXr1RqJYUTcsSPxSb2WhZwnQGfE5iJr2EGAQgvkbyU3qqKQCk57bNndLJs1ILSXkDOQMwup4AprJGtdyRryQ9nRx74fiSgGabHqEXOdywPk5SrGPZ18GDvIBHNfuvHFPqsdRi8BEbF7F2UgRejT57BJXCooB+DRP2GoPGCFNROskDWmPQNHuwts2gH1wvz20nj20n6z70AnczA/1x9sVTGc/haMDm0gabSxusz/cYViMujgK6jRWroWVCQ8AyMHA8wIwmLSeAG4brHdgQ5nKeMswy9sFH+EEYfArwud7hOP3epQT2HWXQr+yrGvQ7sIyWCfuXNlgaRmMI7X4L0zKa/UZAv8xINOhWPexeh/bsHgDA9gNaADyOWZqV2k78nNSvM7EtSGV8q+tgBIAobJHSIX/y2jJhzhLfZybsLnZxswSR5FAKim071jM7RBtrtAloVqyt/dv0ueZrE8WGa7Eh9yEMAedaxsZHXBoCBh9hbMqJ9J7qBwElNkc5P6RxLX7JwzGABCjFRc4ZVNpz7TxGvaBazqwwSd8SABHdyXuuXoMyK2h2bQHkpljnP7qP/FBem+c/mt8AlwdvtqgpTHwc9YmydyagLU9e13WOCTCQ8VGmXwQYoFjl60lWm4Y1sD6E748RDi+k9fIp/zJl3byDMcoWTXkm0jG8bdv0/gyUa7kCUKnxLbP0Q9lWLfZbQwizyUBMIG7UpqrEGqfgEKliOW1pqFJGvtyPFrJu3Zlrdt/QNb9n7fB/z68TgEVoWPw4G8OJ6Ud5HhUgxzQTX/E8VV9r5PzT5fxK8y0KXhQKag/ipGgRrQJObcnLkxx6tIuSm/pW9n06R2J9nG/J+SRH9EBI921geuwmcCvvB9uiNS08MaKR+ZSSkEWRwaQmgbI/1avPsjQvWmX21aBfDcSrz2VElgAefYGx8ryWRRr4cnLL+dz07uS2z87hMiB8EhjV5+kYjmme4WLxZx/S+o0JuFfvbA1O49Aaggny98K0iDwkO4RkYzCECYgPNoBzIHaI3gkrM4bp9FPvNxFJJnjH9NvFLnaxi13sYhe7qOOOO+6A9x6Hh4c4e/bsjV6dXdwEcUuDftp5O/fnmrL+5HEyV5oBF5i/V3Ve02xSdDOYhhsClpZzN6gL08li7engI7DxEcZ0MPtSQFAws/YsnC8fmBaQJl4j867vgFwEqwEzIoYlBrEUYDh9dC5bo55ql2XsVV3nJ9ZBY1u3ex18eeBv0umbWH0qU+q8TGhrKSpAiklNBAITWtsBVgps0QpLkvfPIrYdTnisuTHJevbZVzGOlb9iXl8BY3m5L6DE/lnQ3hnwYonQLDOYtBWQTYUWchvEcUAY+pOSlLaR5TbiFxdZ/OIyw7Hqzs/L1qLjNfaYyT8ZizTcRM5Tu8bdkKVr8/GZxiCfpvn49eXv+ngKDnAuS2KG9bGAsQnwyxJEieGnwKx4zJnMcoyJNaZyuVr4A0uX8xiAIUT0PmDjAzYu4nDwGH0N+gm4N4aILgF0XZQinAuEAIILCSA3BoYMbNfB0DnUnkfUrUDjBuwHAQD74zym1gfYrkX0AXa5QtNbhNGjWcqt4OB4PAG6LZMfTdPZxKZjmMZk2U1AQD1kxqrIgkYfEUM8IeWpQN+xDzhyAWsf02PAeIUCTkPIoN+BZRxYRseEA5eYiMcjTGNglwaub2E7ixgiFgqgNxatDzBtddtjI7K1QDonhBWtBWE5D1AK5Jn2UK6BJ5h9+pHq3hFGXPeo/RWvyfLn7NZdPGCCUyOO5FcoDR8VEDQpqKssYQwpHwgFLASyZ1+IpaAdVJYgxdKKhPmNDMuEcwuTJTgzyKCNYMEBPh33qemE3KYAbKbN140Qi6cxMGUzMiqpeWWc1/nUPIepZdRV4rB6W+9nUZ/X+6mS9MsF/VhJtlORaBQwg6vlzoAtZT6iavTSbUDJHSKh5IKJqSXAQcCY2ExjJWFIKbciSvKe4xq0OQYdn4f/8D8g9MciX63SjbZNPq0CEFDwMAhoDWcw1YeY5waGC3hVmp3aPPbkR0Q9xhUQ1MaqkLwmSRpxVO5RmV2N3gMh7KfBRyyaDtGPAsL4CiSrmF31GGeQxzaXB2FPMXofcXHjwQR8/JkF7l2P4k2X2GWNEXlKBQFPRDVPyt5u+nqWuU/nhx/TeTOAYhSwL3kjT3NSAGgy4BebpTD7bAJgjUU0bfYupyYdrzXLTRUu3CavgzJwkRihJwAwZbymHDi2ITf/wDsY22JpW9hAMBTgQvL/DJj5k1O2KlCpYkOUbQuKrK/8pjYGeB8noJ82bwJCwgvJXFWbMHR+q3Om7D2dGhMm4Pwc8Fe/T23wY3tSzaXyZ/eQ9RzSOausZW1mCBEYfcgNDZy2uTGMhRHgT68ni3YP0W2AJiCOKwF43VDA9uBlO5p261xQr1EBZS6mefP1jl1utYtd7GIXu9jFLm7WuOOOOwAA99577w702wWAWxz0m7P7qAIG9P3LgUhbn1/u/XqCfh0m5B8p8oQPQEygp04U62IHoMw0gvcxd9Vr4W0OYuXJaiqS2BlTai7rNfHfqHs9U9FPg1NRyTBJcaeaKJ/wWcxdppjI1uT1CNv33cSPwk/32RUBv1loYU0BJ5XhUikbn4oiSI8UAR+V8SfyNYgiGwUi8RpJcqoEFKafMcK+G3qQHRFDV8BB70FNYvR1e6DFErx3Buj2RYLVLk54j1FwchJU2x69SLhGN4okpXoNsnS1x+BBbgQv9/N+VbAWFEtBJo11LhRyNb7X4HzIxyaQvaUmjJN5Iaf+uy4WzME9AHAubzvcKOOzqQDYflWkMdO4RR8QQ0CoqjEGVkC/NEbExUklMxuq8YnpGNJ/YwgIqfCphQsfoyicBcCzFMhHH9EaABzhvJyvhuRYNEwwIT2SRdudRQwOxAZkWmGg7DsBItfH4E2Pxo2w+0uEBP41+wHBRzRrB0oXlcVYtlW99FQ20yQfPZX4JOYJmy+PQQL8/OARfITrHcbeYQgRax/QJ1afgn6X3Ec+P8cIrFOBrOWIdfo9k15DL/uYDME3HmQoy4mG0ck+HMRP0LgKaGdTZD+dm7B7cvFa96Ven/KGVsX0OuaSVObayrTtYhenGQy5v53w8tsC+Gnk82XLsvzJl0/4u6qH740MU+WP8kIC/0O7ZQwEUJuw36iAc/MRYqKsaqAMNAFUSz5V8qjtzQP1cutit9mifjCXV6yXofu0MNiukjETt+QENXiQ8lLNqfQ+qLmgEaRUtjo4UGL6hdUhwupSZtwTG0TbgPbOiGS1jpvuE2JYTgoYXAASPZ7n40lIDR3MgDdFbpKnzmic8mLLhM4wQojwnNad5OvCPkp7UOU9TWKOzxhWk3HLP2KKSsA1Cs2tfIhwXuTElw2jGwWkXtjC9GtYxlLApmr1Z+ue50RzVmhwE8APziFoE5XmpLrMpgWQ5PBVYcO2IutpGmFeEidgrADoohZiE9g0nTOodC2FAISxgIOqfFEDjiqnDwjwZEKR/3QDQIyGLWKUY8pHIQzqaOixVWRfC+svNzSGKjfXOUZQVZYC+KmqBVBsEOrI1w09t2t/RJU8ra/R1bkfK2akrAeVZseK+ZvXL+WlCvgp2OdjxMYF9C4IgF/lxp1lND6CFzKmrRFPzdZwBtzBVvyvhx6Rw4m5xuSxivkl6kq+grvYxS52sYtd7GIXH4tRg36f9mmfdmNX5haIS5cu4S1veQue+tSn3uhVuWZxa4N+edI366QMtcTJfQN7rggK1bJK14nl9NGEIcCkrsoxFOYfUCb4QJlYujxxlu/r2LVGvcMYpBW54Irv2TZvlxTqbbet6JcnqdCCGp1gGUYADtJxy62VLnIt/gSL6PriMaPFrLqjtfqtuUdfjKmAUxWjrihjiQI4FYA0yVKliXDGFhHhicQnEQDaPSA4RNdIcWE+VlosTF3PcRwEgFKZm+wH2AJWupzBFqFZ5OfK8Mtd9yEASToJLF2/AEB2KOugspTpt+LQg2yLaBt5zTYCMM5A7ryvY5AuaGUZpILMtYq1KwVJ9YJkUsmuUmQSRuMoXeTa2a3A3tAjKLi36QXg648Rk7RjDeyF0cEPDmF0+XkdZDgBYBbc2FyfygWjxPCrwb95t7IWcvSaJdsGjNDzIxWQWAF7gKN4QTWQopwJAFPMhWnD2vSgvpiMtjmDdnEWJjrEpgN159AslvBnbkO4+GGcaTv4o0O0Z/cxXDrGeLjCmY9fwfUjhuMR0YcJgGdaA2ISCU/D4KoqrrKdABBGn/82LcMPadsHD6q2aYzCehQAUB7va9RF+TlAwEx5PVXmkxuDZin7zDQWrCy/tJ+i+kMl8A9s5FpXFdcnnkK2nV5f9BozW89Jw0kMiP31p/qxocm+OvXlhxuM0OzimoWy7ykVlrfmAJMvbAEqKhbQPOrcA8CW0vbNEdF2gAXi4kBkPd1muv3KEEIC/OwiF9FJmWsMLK1BZ5HZLyr1Zwkpx5GchsACG6XmnAngV0uNVxccBSFMpVAhK1/Alww6KNMnJnnDStI8f78GN6vtzIu90oDN2H26vsoO2ji5n42hyAVmcIRFBp6CAw/HiIfn4T/8fvgP/wN8v4EfHExrYfb25N7dduUem3JFY4GGLUxilubVopJz+hATKy+xk4iwMJ1gIcNqMneIfoDlNgNiQIBhk3zcYh7SRucIUfJvxwyrbL8KZJ2Ao5V6BtlG9sViT1iIM9DxNONoDIgROLMwWLsA55G2DVle3BCw3xosbJGk1IbAibICMAE09VgWT8ZKLn2zTo1VpQEtujGrI+R7b5u8FJsFYruP2HTwZLHxET4G9G47G44hzLrWECwbLBcHkhNuAoChjPewEeULzbmBnG9T2wn4qGAhDcmPkXP+u7CtXKzqc23CMkR+7cR1sgLXArgC+5ABtZjObYbmvakZ4oTfeVK7SIoeOt45D57/tl6nTCtNWgZZhUKVTfS64kPMzXY61gr21bL0R4PDahSlijGNJROJn6FhjMFirzEpT2MYjli2SzkXm16A+P2zWTZfrRC2NRTGqkFVnsfJ+Xy9Y5db7WIXu9jFLnaxi5s11NPw3nvvvcFrcmvEwcEBnvnMZ+Jd73oXPvmTP/lGr841iVsc9CsdvJkFNGF/zZhh9yW2FbQSe4oQiizKTRLzqYECeMqU0shSUxHZC6F2jsgAKoscTcsADatU8Bu3T2KBy4JmW1kxs/UEynrO522cJHdk2b5MdFNB4YRED5RpQJXMqC3dugpIVrJZ27xVFGy0bCdABYysY2aRpnHOHh2hFHQiSRFha3e3dtcrM20x244MnlaeGjXoMGP4CQCRvhscCG3Zq6YFWuSOeACICaBBCInpJ8AHBVM65nU9ZuMbdUwXB0W68hqcC7nDWGW0uDpeiNCwFb8c7eYmAqfjdMLSSzJS6senTL7Q9/Cjg+8F7HPrATEE+GFEDEGYfT5k+R5uLEzbAF0L4iAKVIahMqm5aFVFlj6LAfAOtu3QRDleHBNi8szRyX2FsU0LbCmUuRBTF7jX4zCoJFuECQSmCBcIzhBaNlgsz4GsSFZy8qkBG/D+IQ66Dn61wnBpheFwBT+MeUxqVqNKeM49+5T9GEYnQGH6nkp5Rh8wHI3wo4dpR7Bh2N4BvUNTbdtaql4YwtQbJv8+gKWZevqptOfSMPYTE9F2Fs1SJEgXZxewS4vu3ALNmT00+x3s/hLNfodmfykFPtuKpKcWHPPO0yYPyj4/0TRFagzl+AyIJ7rNSyMKl0aUm+iesYtdfKTIDD+VxAtbmNTAR31c8wyMUTnBWyL0npMAuhic3ANQNRrp9YJYgAiKaJny9VpBFAEnqDSozQEhjSqP8ilXqovcJ8zZNf/AlFWUm6O8+Abq/SnLyBEjVgygyykq5HWqH/PvUv69iXxhZjHF7DftQkTvfAbNbGsQEotK5SHj0COsjzEer+H7Iq1tOs1htvBHY4Cq7ulhFoAKxCjqDS7ItTpCnotHMycKoty3yVgY1vs2EI0Af2g4MRXT3KNiwylwYhNLLY+hd6A4Y8Uh3WeSj5/fu73krnxtpmdMyPdZQ3Jsdtak9wiNEW+/hZ0C1BPAz1Usveo4ULl1aJNA8ALyzeU8UYBOMkVuHkYa0aJpEW0LT1YahPyUZaZy+7o9lEA/HxitAZqWYU0L4k1iccr8INQqD95nhl/2lcuAb2GPAkjn/LTpMV8PFQC9XIPpZB5kaxJvbiScg/h6PHFuaD3ZhEE+gXx+AI0K/G2mLEZAchtrC6sVJp+vMV2rtFl0rgKjKhQK+LlQQL/DwWOdnqu8p2Fg9AxnTWKNiiSuTfuua1u59jSdnJMxyH7X9U3nAYzN46ZjQwRQTA1zLKxaP8u9drGLXexiF7vQuHDhAm677bYbvRq72MV1j8Vigf39/R3odx+DmbHZbPD0pz8db3rTm9A0zUf+0i0WtzTox0CW/ctyJ3WncgiXn4hdKbZ9Z1tBpn4bN0e3uhQgdAJcZkQRUy+EmJ6bDPYhF6NaCqChBw3HArApQKXLUg8IIzJ4EyA0dXrXhSqNuqv/pExLXfBXFtRUBikDfhVLTt5ST4hZ566R7tEIyEQShfE3YevkiTzJtkK6lw1bsFFpKkwACUaRIyNlQugxon5g+ltcMQ1rKSTdrm1jW3fM19/bxsLTzmRm8bfJ8jzSnUx76bOJ3aZjRg6ILCynCCCOjUy662XPzoXIBmHvdlzrGHzMclmcinMNCxsgRsLCdrKfxnWRKhuH7P0T3Sisvo0AgP7SBfjRYTxcYTzu4foN/HrI4F/0AX6csvuUGdbsdQijAzEXAMxMO9QzcMQFPKXgha3qGQYBNhXSQvJnOQOL0QeYxNQIyjxg9dJSQFAlQBMTcFLoLceOSnENvhSVD9oG7aKFJUZsOnB7gGa5j7A+Bp+9A7ZfoR16hONLiG5E6PsEgFad/LPGiRrgixn083BpHMPo4PoBYXBo9ge43mHoBtjOwvUOdL7HsnfiTciEtU+F8VhYfyLhKePQsnxuaRTok+eL1oAbRnvQwDQG7UED2zWwS4vF2Ra2a9Ge2UN7dg92f4nFbQewXSvScLYFLbrEqG0m+7K+PmSgOzFbh1C68n2IqQhfHTMsV6/WSHc+VCb5BoB+ZChLtl6T5e+60R+wkQv8vkjibQX8rpQXzV5XRto8bhXgLxIL2524jIexiFzuzVGbc4hhADSG0Fb+TAr6dYYSS72w/ORHAkDmxL0/y2LGygePckZzcvxYfPsoYLIfBKhIoIB3OYcT7zo33WeXA3i3MXHqnLAq1mf5Ql9kDAW8US9boIkiTx9ZUToHjMlft19hPFzBrQvIFLoWpgY1qpxKwU2TJgU+xIyxCPg3ZS/p2BkiGN3Haf9GBMANMKYFWFQwpEGOQT4icgEdtNHDEGU1iCEAbdPl84SIEWtWZ91gxdJQEs48FNc6VDlBT0Wu2KgNxwT6EZaW0RrCwrAwzNLxmhsB05hPclpVX/AjyA/ShFUBftEXSU0AIvFoG1C3D7Rd8u8TDz/PragBJA9g5yP65CPXO4/Rx+R3J0zLxhB8I3L7bVIlIDYZlFTwMUuLpsa33PBjG5Hjz+zRAKLUvJWAPfLI21jLak6iPj80/1cmKABEC1ABokOcXhTLsYTsC5htK3x1zXB9ksHdgFwv50x/nEHNWjWEQgMsqnNbJVTZZsnUwZ8E/QJi9qkcQ8Rq8DgcPFajx/n1iI0LONq4PNdsLWPZGIQILAb5vTMLC+OBhgX4W9gOZDdiXYoEGmvzg4Ld83lPOk4jEQxHIBACierFjfCB3eVWu9jFLnZxc8drX/tabDYb/Mt/+S9v9KrsYhc3JO64444d6PdRxLlz53D33XfjRS96EV760pfe6NU59bilQb8TfnBA6W4GoC2/H1Hi8yP4xAEQuRnTXrbAdbOk6Nm3JMofMbFnDNIkKbOmRO6oYQEIl5axtAR2PfjwPOAG8HAs7Kk0USdjkrSkTcwzzv4MOrmNqehTe0JoqB/eHLJQbFIlD1XChsYe5DYC8qVO1tobJO+3yghe5RWjGyA0N5LO0lzAmhW1qudZ2jL41FbKICMed/WJMimEBpU9065tLhNq08h4mDYX7RTcrOf5NYbD+TUFPtOY1TJW9W8Bpbigv50YCDLoworkpfwupSIMOVyGoVb5Ms3BRtNO/ECuZagMbdpSAMCQwOyRCQCjtR0MH+eiWlQZz34lLIHjS4hDj/F4nQuHmwuHcP0A3w/YXOoRvbDSVJZSQT27tDCNg11aEM/2v2GRQ2078HJfWGOLLrP+lPGgxaEIgIYV2qYT1iABA0c0HOGDeOooa8CHeKIIVIN8IU7fV4kxABhTgSgYho9SDGECBibstwew7R7isEJol6C9AXz2wblAt01+SxesoHqWhq065dUXT70RFTxVxuB43MMPI9xxj3HVw/cD+osbuN7hQZc2cGsnfn9rhzCGEwxDMgTb2ewnKPtFfAVl3xDagwZsGKYToM90LZr9JUwC/WzXgvf2wN2+7Ke2gH1adIQVD000Se5KfYTsArHp4LjF6EWerva30SIyoRTmTLoIa48QceXxuItd3AJBWthWht/l2CxXOq7T/ZQSyIBY2Bo107+zAizcaD+/jximlXxHC/5I9+AZeKP3I8s2KSeU63VnJB+RvKZqFFKmHFvJGRK4KM0gkjeMoRTkmZLfV8qntPEFVW5AyY+Qok2yf17AAQVn9Hdtl6WLNXfBbHkZGFQQQ5u7qpjLHmemXQISFGwLCQQcE2vLaNOSsplUenvoEfpemktCkKab1oLbJt1zlwVkTeygy7Hj5jMAVWoAkL3hCFWelT/I6TOSK3MGYgCAtua0SNsafISxLYxKSrtNWn6cHjMKFtvr01WqYCdQAE/LIg++hMgzNkw4aBkWATQcTfwhFfCaN6wBKJ9xPeBcVl3I3sdNm4EoMIMXy6Roke617R5CdwbRtAL2+YiNLwy/1Shg32r0CTSOeR7TGMK5RQPXmgQEMUzTIY7rSWNWBh6BJHlrThzLMjgVWK6gVO2fp9tYzUVq9mI+T0xbrgvEgHdg06YcX2XrBVCOKI1DqrpiSGSA4YcC8vkBNKwzuy83bR1fKrmbjvOiA9oO3CT1ES4qIiqz6xKwp8oFoZqrKEB+uHFYjR6Xenn88PGA1eCxHgrot2wtfCfP9xoDJmA1eDAZGIqwLMtcdmdAg0FgCxpMYU+rmontZK5JDMRk3UFyjMYoDWLWy/MbAfrtYhe72MX1ij/5kz/Bbbfdhk/91E+90atyy8T73vc+PPOZz8Qv/dIv3ehV2cUubljsQL+PLs6dO4f3ve99eNnLXoYv/MIvxJOf/OQbvUqnGrc06KeRc/6Z3BAAmUjMu5dnkd+ujNansoZUCho3eSjrkYjAMea/k3qRfCB1EdrE7rMsXjPsetDmWCaTrkdcXcpdumQbRDbg5T4AnCiuTBhqCfAT6ZppUSBgCtQW6RYqMq0qmRNcnuhqJyjFWECI+bZXABbpxLtm9W2L+fGQilvZWycV9/Q9ijEDaeSGSeEjEomsZ3oeUxFtCMiSRFL0ihOwc+5rxKmbVRhDst5Gf18LhUgdsswVk1H3hRT84JMcmXrEJJCPLBIzsHptLnEog5iXJ0XI+w5e3Ffmaz0OGj53GaMUICBgZEzs1DHEVBCp1idJNEVXOsx9v4Hvh8zuc/0Ad9zD9SNc7+AHD7d2Saoyim8dC9hETIgzaoqy/UilMhNghBo4qoufqVscwQNugG0YrWEwCffWM8HUABIDIdIEFJ6sgeLcs/PKx9T9nDi9BJG11QLf4CNgGHZxIOtlR0Tblo71dl/A4UpuLh/XvoB+uWNfH9M459fcCKsSq8EjrFYZZNV9sLhtBT84DIcruLWDHwPc2iWp0HJuc2MEzGsZtrMgw2gSCMvt1KePGwvbLcCtvGb3OwH7alBWJT0X3aQLvvbQzEXvBNYjMfzGJG2lHe816JdGKTP8ECI8kzBUk+fnDbCdEaaDuXb3LAo3//1wF/czlOlyQtJuSz51H+8J9f1N74GGkOXEb4momfzBgdieyBUByWFMktg0CV0iQICCEApTqvpODfh5CGim/nPKHpP7oSBNVN9la4nQbaGqF7XkovrRAgmYSLnSNsZfva7Eublrki9XDUJ6uavXRhUmfIgYQ5h67tWPMZR7uYIp1TU/X8fbrgL7zNY8j4hO5KCaE8cE9jEh2wTMt3O+LIOY1UWAaZ4zYUiln3QhAswwthVgY8u5kqXcT1nOM69P9VqojqG6maiwySiD8I3vATeAxnXVdJZygyT5f6KhUj+XwLCJpKfec9tO8qemFaCPLWKbAFwrzZVFTrLIw7qA1HgjLNHeB4R0HzakYKUHM3AQWO7Pek8n8VyOSM2LeZ0qYI/NdB4z367aXsAPiKujiXoGAESVC29TjmFt8QeOTcmtUMDiSSMEFcDP1MelSnrm33d5PcJmLfnWRoDyGvQr+rkzQFkZxKEwcBVArRsT9bXe+Tzuq1GYfqvB46gfsR5KzmaYsHGMZVuWN4aAEE3OnwBhZtp2rxrb0gCh50MNuBumzGoOUcYLAGyQ+ev1jl1utYtd7OJ6xPvf/3582Zd9Gf73//7fN3pVrhjvec978M53vhNf93Vfd1J2/jqHcw5Pf/rTce+99+Ls2bM3dF12sYv7EufPn8e5c+fA2xrQriJ2oN9HF7fddhvuuOMOPOpRj8JisfjIX7jF4pYG/XTSdLKgWvnIpYlDvgXRSZAls8CShI1WKmIq2sTFwTXagmsXTQIPdPJWd9FL16SMw8Kw+Pf1l4TZtzkGDu9BGHqEwwt5AqmF85g6WQGUAljqtK4LQVr40JJATF3+iNs8sApLhgDQ0CMb1I+9THTTPlL/k8umFDXwuK0zXY+HOVuu/n4dCrAFf7LTVlmQwcs6sZXtTcCBJ5HOqdlBWsQTZlecSJsVSR/pXhUWphQ0sE0KjJKElwKb1fZL53yRFyK2oG4PHEIqpqXOazapeCYFCpXWyQU0tuKxogWp+1iYuq8pX4jA4RAyiKWFqhCl4OFRxklLbD5GxFTyzPtIu8qTD1AceviVSnkO8MOIMIjP3FyuUkMBP9Oa/M92jbDHli3ssoXd72AODsD7Z2U8z9wmjL/FUthhM+YrjM2FVC1otrZFaywWxmTJNvWoUWB4XoABUrGOKY2HAIMuRASKqMgkk2Kel8talm4yTGiaPZgWoHimAvjCZRnPE/ZzJfNHiXVKvmIDVQyh0uk/lIJU7a+ofw8j/Fj2jXoHkhGAlRtbGAJZRjV9JhUPC3AnRTcF9uTY5vI5VEV2WUAaNJv3XWiXgGkRFvvwZLFxEWsXMIZyLo9eCteVojAa5kQup8xMaCLBhYDj8SOwzXexi5spahBpBiZtBf4AKMOtfp2Is7KAMMgjPCB+dwzsWZ6ALbdEKKPPtnIvUda/RsohedzI31VRm5QxqeyuxCgGWzikIrwHQgwFOEtAjTYZAHJPVI9XMaYr1/ET+0blFl2PeHwJIV2PyRiRNNw7C2orxmF9j69zDc3zUjOTegxSel/z8W39DXpPF9ZQmChANMwC/BoCuV7Wc+gRRwFGbdcCibVtzt4Gc+5B4NsfAto7C7/YB2wnY5gUFTC5F0oLTG5nonJ/rJuubAJW8v6tgE0FYIGUm0FyBWCa57i0jzauSD8PJGDtnm0RjQNCO5GQr/1i72tudV8jRODe3meZRqDk4QwCUTme1LdvaQld3IBWh+DVeclzVUmjvm8qqHfiR1NerQoAFbicffuUoWnarKASm6Uw6sE4HgJckvXMkrDpfqtgX+8D1on1N4YAJkLvZP18jDjTWhiKGExEZxdAcIhGfj8OfZnD6LpV0t5ZtWTmWZ2ZskOP2B9ndh0qz2xqO0QWedhaVpOAfK0owB8Xlh9TdYzJcdVkph9Ao8w34HrQKOon2FSKFqvDnPtqfi/g+ChFJDaIez6f39G28GC4ELJXn15ftMkOQAJaC9h3lKQ9L65HXFwNWA8C/gEi7dm6gCH9G33AyITeBfDGAQsLIMAF2dbWENr2ALbpZG7jx3KM2TazqAmpUFHLqJoWjkUWuL2G4NsudrGLXdyoOD4+xpd/+ZdjtVrh4ODmq0P+9V//NV7zmtfgV37lV/C//tf/wt13333DAT8A+KEf+iHcfffdAIAzZ87c4LXZxS4+cjjn8MVf/MX4gR/4AXzRF33RqS13B/p9dPEjP/Ij+Ku/+it8z/d8Dx796Eef2nKdc7D2xkNuN34NTilOdrOeLD3ozUjf0W5LEBcAkFjmeQGlYH+LRX3LNWlSqUy/oJN+KiBTZvXMi3vZD88k3ystuNtSHNICWOXjV498JlHS9O/8G1WCkLu86zDqj5ckcrYxC2YMz4m/DLCd6TeXcKplqUzqitWOi5AkmdL3FNQIm7W8p7KiTQW8sXoBTplBCu6o303dcW1ZJvwWCSANgA2AJ4JhC5hUulH9zQzMzYp8dQd2JR9FAGj/rDC6YiwT7DlguA3wyx4gMzbg/YwIYMzgloytMkAMSUFAZXzUT5EhRTf1eLMIWbYs+8VsAfQ4gUextTCjRQwtiEXS0zQC8sUkKynsMoJdWtiuRbPfwe53aPaXaM7sodlfikzk3hnwch/c7QPtAsFK0TESlYJFfUzVLNQYEIODYQvDDB/VCyhiJCAaYPTK7KAtkkuU/yaSz0zOuXRuN1zkew3L+ObzMQJMXPxutuwfVJ/V5QLVearH2jbfL5WhTe+xdqgnlsCEIageNMBESjj7/tSNBrPO/LSDE6uXqvGeMlEmteBasi4Pms0gLbQYzxYugXc+TBkcQJIjTqA8J4k05YDEKNKtch+KWPuTx+W1DrlcX7sJGO9wzAdsXE4S/bL5UL7fmPz9UrTl4kmVYCHmwrK68SWC+xH5GpO2u2b95+uivJY9kfXaqDmGXqNMCxeBPnnc1V7Mek8EkMGbGAGascEnq6b7IjE1s0yrHxGGHjExg2BbwHug2z+Zd9UALk3zmnmOV7PdtkWEehEmhYO0WEMEZmFotUYYZjRuQOMGIfk1c2PR7Hfy+bO3gc/cBj73INDeWcRmTwA/02TAb9uQFBnPaj1jmK60d6XxhevttZPrvjamFeWJArISt5P8VvM8Q1NwVNUZKBbALzZ7p55buUp+WtdHPxApFuYtKDFugQVH0PEheH0RuHSPyOi7MTfTwDY5541hlm9V+fJcIhwJYKamRUiMPthWtts0iO0ehgBsfMDGiefjkBqgXIhZynMMAiaF1ECn92awNEL5JIOrPsg+AtHYpHZhQdZKPgEgjkPJM2plkKyAsWV6HENuLsvSpW5ElsoPPh0fia2KjyzZmlmuVX5lmIp9RZ1r1eC+Gwq4mtUthvLbOn/b1uSWzuErKRAUT84ixeuj7BttYHRXWIAe/+rjuRo8qDUpbw3Jo1pUSUzDhZk4G+8TTWnMiMGByE5keq9n7HKrXexiF9cyQgj45m/+ZrzrXe/C53zO51z18o6Ojk4VOHzNa16Db/7mb0bfC9P9P//n/3xV6+m9x3/4D/8Bz372s6+KnXf33XfjF3/xF/PfO6bfrR+XLl3C3t7eqYMmm83mpmFzPeQhD8HjH/94fPEXfzGe9KQn4ZWvfCX+8T/+x1e93DvuuAMf/OAHT2ENp9H3PbquO/XlXk38xV/8Bd773vfCGIOmafDoRz8aTfPR2SY8/vGPx2d/9mfjec97Hl772tfiG77hG+73+jjn8Ku/+qt45StfiV/5lV/Bx33cx93vZZ1W3NKgX91VXAN9ym476Y2VvpdkU4hKsdwo8KcsKQREe3Md0B9tZH8/kk5nH2IB/lDAv+wtk6WUkveFbUBp0iqeZcnzKoFBheVXiuz1RJLTj4SI3Gm9DYzNxZi5PBUneUpi6YqWlZuy2uaPNeg373i/DLg5AfuYs3ejFGaCzMBCJfPpB2DYIParsog5A49tliSqO2nHxLjqnXxWpInkayJPxAhpsBoGXCAwRRjDMKYCPuvQ7dzG2CJOnflp+7YUcydgTT32yWslMye7MyfB0/sRegRsvHQZC/hOCfCkifybD+W5FkM6Q1KcGlbCBPUzGalQHcuGQZ5hukWWMuPGIowO3Fphl1WAjMr2qDec7Vq0Z/dh9zo0586Cu30pPJ65TQC/5ZlJx3o+J7YBstW6UZKOBQDLLDKlxGgbYVC0XDycdAwUBAQKE7BIvqXdRsVXzpCwJwyLhJ6CpnlVMmukBhTLtVPZGH5LYUeAak5MHgsQioxddX0tLNYtYGFVnN56jKSxmwMNl/3els/n86EGJf0wlStL31VwO9rFxO9mTF43wt6QsRC5Kbk6ynEJUGIvAekeFHQfRRwPuyrOLm6x0AabbY02l/kcxSSLHUIGxEyWoJbkg0iaNm45ht88KlZcbnAI41QRIEmUw7lciIe1QHsg7DRi8SwLEcdjyNdyTmoMdV25Zn8TASYKi+2yZXdlGLoB5IQFFI4vCUgw9EBbwJuJ8sE8r9LnVUOQykbWSgUZvEB5zytzPQgQNfrkwxYE+O0Mo7OMzhKWlkGrI9C4yjKFZAxMyj3Ngx6WQL8HIyzPIdous7KdNlFheu/R8dP7QO1HN9+X9aPeC/ys0YOAIjmv+WTKN82ihU8+bRlfy/MQKoCpaUHkJp7YYbF/Kky/OrfqXcwMv7lqgI9lnMARTdoPvD4Pc/xhxIsfgvvQ32XvXvUszqoQlwP8dD1q9puy8m1qpkk+ubBdZtPX/n1rF+ADsHYeoWoME+CpAEkhjW1I0uZMyQfaxAlgBdsici8StlFUS+og22T7gqhysZprA9MmoSRdHtbH4h09DlAvcVEcMIg25dQV4BcrwLxWQ6lDzyNtzjKkx23xEZRrjCopjFlBoX4EgOgDOMn2x+BzY1zaIXIOBxmjeTPTPHQfSC4U0jk9/YLmlYZpkmMKYBiwAhDSdxpDAAy8gbajwrJ4dOdrlu6b4HL+pudbjJpf38JNI7vYxSnH7/7u7+K3fuu38JKXvORUlveN3/iNePGLX4x/9I/+0VUv69KlSxiGAQ9+8INPYc1Od93e+MY34k1vehN+9Ed/9BTWDPhX/+pf4cd+7Mfw6Z/+6Ve1nGEY8O3f/u34v//3/+LOO++8qmU55/C1X/u1eN7znoenPvWpV7Usjac+9ak4c+YM+r7Ht33bt+EZz3jGVS3v7W9/O37kR34EL37xi/H85z8fz3ve83Du3LmPejlf8AVfgF/6pV/Ct37rt+JTPuVTTgX0e9rTnoaXvexl+LRP+7SrXtYb3vAG3H333fjhH/7hq14WIOv24z/+46fi9xhCwLvf/W488pGPPIU1k3PhZS97GT7lUz7lqpbDzHjKU56C//gf/+NVnwsav/mbv4mHP/zh+IzP+IyrWk6MES972cvwLd/yLbjtttuualnPf/7z8VM/9VPw3p/KtQ0AHvGIR8A5h0uXLp3KufCud70Lb3vb29D3PR71qEfhiU984v1e1jiOeN3rXoev+IqvuOr1AoA//uM/xtOf/nR85md+Jn7u537uowb8NG6//XZ8/dd/Pd773vfer+/fe++9+Lmf+zm88pWvxN/+7d9isVjgZ37mZ/CiF73ofi3vNOOWBv2AGfCXHnWiPZ/HaMcrRQHCSLumlfE2X/awkmLwLQz+1ZJLTJTHIALZJ8aYFrFNRR/bgtkCwcGckW5rJBkiAZBE4jOatpL1FGaTylBp6JBmAhyAOJuiTdh9OtljBpDk9kwj+zHL8PEUBJgBftrtXc9JdQwMQTpE64kkUBidps1d0nrwNFxYoCBORR7p6tWOYbAB2oWMTbuXWUJ+DLnDvfZSGX2ZQNfgyugjGhOwsAzApGMzwEfKbDBDBkQ2b1Oe9PpBGFfBZS+2sn1tLk5G08xAwgiMK+l0d5vJvslA4Tam5FWESjZJFzxV3pIkPkixAlAZ5ffV12SzAY0rkBtBw5EUYtyQ11klHQ0zTCfykaYfEENAGFyRkkxg31xSUkE/8YlrwXt7oLaDOXO7MPz2z4IObpPjfrFfvGhU9gwnu6jzMWhRChh6HCoImNbBEuf1SWsIigWtFzBaGWUV0Ka/odKwiTGpxwcAUAVw1mzEyIwQKcmFQtgAFfhXs0zkWC7L2QZn6SG4rZgUcyFSC1+FObINYAREllQbFQAWlh1MLo7Lq0hjQVXR3GS246JhAUSDnCvkxzwu9TVO/26MLJUpgn2E5wib1k+3j2jqhyTbFYtsq9eC5JbNusZBLL6U13L5u3gABwmARykfOBEzKU99pFgxm5TVRNLYQEyJ+YXs6XfLBjH65D12tu3A6AG3yYAfD8eI44DQHxfwpGlBnTDUAGEhDSGid0kWMpb7Iif2lV7LCuC3LVkt1y0AUP+xDBgkOcaaFYSqqSuz0WuGerXsSJTlLusrtNH7TtVckXPFWPzCtPGpd5LzMBP2GsbSGhy0jDOtQTMcgdcXEY8vZZ806vbB+2dA3T7sQz4Rce8c/PIc4uIMYtNhiAzvCwhRmoMAJAYbxQAa+zIW2vSRYirJDbmhkYAPZg7ETdhW0kCW/Z+BqslF8prWSN6WQVU2AC0QsZg2Bp1SfjX4iKOxrEvDnOdCYwJffZLY9wAMImKU3MtGB+ovIR7eC3/+g/DnP5gZ+NR2IgW7fxa0jZFYg34KCuZoMtgd2RbAr11igMXgAo4U9HMRlzZukh9r+Ahskmxk/ikiNCY9JtZoYzRXSDkSIEw/K01vtFjKNigrTll6Wc3C5mMeAAicwbY49IjrY5H3TFKaABAtAC8svywVWjEboXKmpkG0BaT29dwA2JonbGVdX4aJTcbIdcaUhrMiiW5OHGecQGqmtA6pYStESr7TZdcaEkCvtYyFZSxbA8OEwYX8+kHX4ExnsWxM3hbJ9dV/MWBhTdqfjBgpeyEbKs1vW8E+IgFmmw6RxTpBjuetQ3FNY5db7eJmi0c84hFYLpentrznPOc5eNjDHnYqy/rTP/1TtG17aqDfs571LHziJ37iqSzrn/7Tf3q/wKXLxbOf/Wx8wid8wlUvp+s6PP7xj8e73vUufOhDH7qqZb30pS/FG97wBvzlX/4lHvWoR+GTPumTrnr99vb28O53vxt33XUXfuInfuKqlwcAFy5cAAC87GUvw+tf/3r8xE/8BD7/8z//o17OYx/7WPzpn/4pNpvN/S781/Gc5zznVPYpADzykY88tfMAkHX7+I//+FNZ1p//+Z/jrrvuwu///u+fyvKe+cxnngq76eDgAL/+67+Ovb29U1griac85SmnIkX7mte8Bt///d+PT/qkT8K//tf/+qqWdXBwgP/+3/87Pv/zP//UvP2e//zn4z/9p/+Et771rXjKU55y1cv78z//c3zv934vQghYLBZ4y1vegkc96lH3a1l/9Ed/hK/8yq/Em970JjzucY+76nX78i//crz4xS/G93zP91w1g/MXfuEX7tfxcffdd+Pf/tt/i3e+853YbKSmvtls8Mmf/MlXtT6nFbc06Kd5cV0nrgG/bfXj8p0IVtd0FBmeSQQHRAZMyMWOKx4CczmkGxx5XZMvAhFlA3QgFafTTJNMC1hh1UnNwyH6RQaQshRk3QVbFSwK66gwYYCqIJTG5sT46evzSWxm7VQyfVXBabLP098xaCdwnBWmREpMiv+pwBOqbu+0LS4BGgqM8nyNT6yjsByJjXQR67iY4nczD5XICaHuXq4/KSBDsOmzisCgeDEaEklQkWQqhS0Zx1jYVBUgqtJR6h+kIBoSE4FqVgdQ2JJJunWrROpVhLJOQRXgF8W3MBfn5uOdAFty4k8EPwjbsmL2gQ2oaUDGJH9FA2aDxjCiD/CNg5n5+s095IgZ3Dal0LXcF9++M7eDuj3Q8kBkqZoFQisd+rHp4CGgs0v79rLHYGIVFynhirmW2DHMLOBVjBncjYkBqX8DCvZRZoUow6/hVOx0GykA+9R5r/JyRMXzMclfGWLExMaR92MB/xLgV3tSAgLg6XOVetLjGhD/JkCuDUGlokI88VzPu5PsbMoFcGaaAGycjv38mervzopfzsIWf0xvpYi+tG0u5lI6rnLU0qTEaNnm8Y2RMuhX1q/sW5VoBShJ+RJGijAhYrPzndnFLRT19T4iZBng8oEZ4FeDTqk5Jmr+pCzAdI3RPCEkUOYj5VV6f78ZWR3Zs9cCLdsCCCX57wwS1Ex0ZnB1j1YgRhthfIiSdnJqUEuNabVKAidG/Dwi8clcKobMzDohyYgECmwB+bTBaaKckHK9vB41GykBf5E455EqL1l8a0su01mTvfxaBmhci1/a0CcwxoCsAn9nEffOSZNNu4/Q7mEMwOBDxVRPq0/yAifgL1/T1ZPNu4lPHalsOluodDrF9Noc+LuMH/BkPKsGHG2+yd8jLrLzpp2M+WmEj0DvYm50sWkMXBBAByyekUCZ8+i9i5KPduwTi23TI4wOwQfYxPaLthWpzi0g3wQMrGVSgQJs1jLxpsWQwL4a8FuNPoN+J7evMPzyotP93yT2sN6vJ6Esy+RvDStzougGYekZM20uTJGB3SSTG5KkZhzHIqXJDFJ5Dt3mNC/Qc2eaT9sE+JU8CijAHyEmW4b0OvHJ694VjhdVtCCe7ac6onJiZf/r/JBjnAB/hgUszjlYGuvWMtoE3im7r7UGe61Bm4BXnQOGENMC9cc9GqYM1PpYmkDyvWCbVyqVY0jla/2W5t5d7OJjMT7u4z7uVOXDHv/4x5/ash796EfDmNORrwZwqp5Xpz1up7luGg95yEPu93ff8Y534Md+7MfwZV/2ZbjrrrtODcgFZOxe/epXo23bj/zh+7CsP/iDP8Cnfuqn4sEPfvBVAzJEdGryg1/8xV98KssBgIc97GGnug9Oc93+yT/5J/iBH/iBU1vel3zJl5zask4T8ANwKoDfZrPBm9/85lOT4AWAxzzmMaeynDp++Zd/GY95zGNOBfT7yq/8SvzGb/wGXvOa12AYBjz1qU/F29/+9vvVSPDP//k/x9d8zdfge7/3e/H7v//7V71P9vf38cIXvvCqlqFxf9flC77gC/DWt74Vzjn89V//Nf7sz/4M7373u0+1seRq4pYG/XSXqGTlfYlajghI3ZVJokc70qVbmYEAKRSsziMu9nN39vYFO5jDDyC2BwjLm2Pn5kgFmdZ08EST4suok08ikN0DN4Chcyd8S1QSSTsuAQgAmCSQXIjTjstQsXP8UICHSt7vRORua5OXrcsXk/mToICGFn7qjm8NgoBknmMFkNh8AMUQM6gBlCKIIYBdD7gBPByL34zrQcHLsdJ2IneqjK8kVxRNC+9jllG1RoE7QmcNDAWMIYIjzda0ljKSDnJHpTjAFCvgSAo6mdUUQgHx5jKdzSIXcTfRpKJPRGssrGFQYjiK3KQUABUcjKZ0wddjfX8vzbq1TISllUITANCY5NDSeOuxkv2IYsyd1XHoEXwlVwTkTmZqEwMzBGFUhJCLNABgVc5MO58BUNMU5maSRNIubbINqNuX/axAn+1kf7OFIysAj4sYvc8Mr7nkpoKzziSAkxhNkpsCV+dDGmstjOeCSAVGI42/ScucgKbBgYZNYkUOk/NWdkAB/bQAmVmubNDYFjYB10OgXKz16flIiS2A0qXeO4/Rx0mxbuPkGJfHgNFHrEePwQUc9g6D81gPHhsXMKR/PkQMrhy7pZjEaC3DpE7++nUAaK3J7+ln9xqDzjIOWouD1uBMa3Gus2iNFJk6Q1g0HVS2V8cqg+bVGBkW8G8r41WPUR1bfbn2wTSEA7O+fyfMVQSzgNjXbPl+B2Q+YCNd+zOQk16m+f23lr7TR2VrUEKtxj695hCNAH8mefX6dI+8EuMvRGDlAlomLG4yauDaBVzaBCwsgRuLVtUAlBW06RFWhzghh6ggWSq+6/1i9HKtHBlJnpnl2p69Q5GbGJQhn0GlWjK5DlImE4uMIaTRhdpO/jUtQu3LnCLnefoeJ3/ldAgYkuYwUglRPTaSbKXes5xXdQNZt4YFFFBJz7Mtg1fnwavzCEcXENfHstpJUtI86GGg5QH83u2I3RmMpsN6DEkyNOamFA0GJbAr3XdT8wv5ATSsi9cilW0iCHudQhoDtyly/0ntIFYe0WRbaRzKyykNaV0CPCwBtDmanBNghjNd9m5s0udOK0JSlkByl12mgfARyYuNEGPIeXKWAWeA+jV4XGNcHyNs1gijgx+cKCQYBqNPku8Npn66ymIU+VpKx4DmUtS0Kd9KkvGmEZZmANZOcoQLvcPRxuNwcFiPkjP4NIcACthU50AZhGJCw8I+O9Na7DUGrVG1gzK46msdE5hN7EC27NPszRhDOhaUKeskLx16xONDAUT74yylSU1V8NTcse2K+kdSgYjNMrNT+ySTWaU7GSyOJD7sFnKeGtMiGlcdo2X+QckPmdxQ2Hy6PM1h0zmegU0AiAGGOIPCaSIMgMS/GDLX8FHP14DOcr5Gaa6lUp9tsibYaxiNkX2h1ytt6PIxCis3Eph8YsMSWiMWB7nxNjcRyHU0X5OMSOivK5A4YN40eX1il1vtYhf3PU4T8NvFfY8YI97znvfg//yf/3Mq7L5tcVrs0oc//OGnspxdXF2cltTix0I0TYNXvvKVN3o1rhjvfe97cffdd2fW2dWGtRbPfOYz8Qmf8Al4/etfj7/8y7/EU5/6VLzlLW+5X/KhL3nJS/BZn/VZeM1rXoOv//qvP5V1vBnCWos777wTd955J77u677uRq9Ojlsa9ANkknRf5D3qeYHKqCjbL1ABjVReRAFA6UeHFDWArVKfNKyEUeMTa2pzhNjuTbrfyQ9FltJcfVfMRxVsEYODQQCzSupRBtBCKt4DcdIla8jCqLwUJzDJF1ZMzQagiVwqimRq8qGQyWl/AkCU5UxBRDKtyAc2VQEqThl8NXibvclmx0EBheVZRNnnrtp2fU8/36R1NwjJD2eTAT/4AVFbpW0q7FfSPTCtdCIjrR+LgBYjMacAEBh7TcTI00JGYxgNC0tJJ811jZMoLUefK+A33996nGmRRTuMiQEfE2tp/iXkolT93W0dxVdTo6L0z7J2lg/ym1vAYGUQYNgguFH8iBLol0NButStPSl6hE6kjoIvHjMVUEjJ61C9qrzDGQABAABJREFUWKQo2pbHtH9jsyfjuDjIBStH0q09+DBhauhzAPlc0EeAwEFAaDBgIoG1OJ6iBq8zIB/U2w/VuZk2PxW9DAE09OU8c0PxgZkVg3PnOFsZD6JSmFTfKbZomw6RGWMCzLVAxUiFszBln4TEYN24kAHA1Sjg3mrwOOpHDC7gwkoej3qHfvQILsD7IEzdGvSzIgdoTHq0jNZwBvy0IKjPtct82Vrcttdg2ZqJXOheK5JTvqJ1U/CTYjD5sfj0aBd/dXxFOnn0U4zlmpDZPDXrwcMeHZ/43i52cdOGAiLBAYazDHdEyAyweeMORWF5TZjmFfgXKZyQ/CQUUGJ+L4tIkoRJkWBMd39toAAAF4VtB8hlrFM54+sUnWHEtlyvGyPeYCeYkRpBWHciMyly1YatMK3SR6Q4nqT2sjQDwRhtSBIWYAaMVNq7jnrfEAPWgmw78WSjthP2Vi3reZmIs3tUHuMECp8Ag1HnlQIuhUjZt7gx6udHYNenZipXwGUFJBddxaxfIpo232eFTV18DnWLTbrX1jX5rBCg13jvgaZNwGsUiW+46XFds1ezPLpKPzJQD1e6p5qU89no5H6cJNMlNxSVDJfY8vU+P61okmyq5gRGZUeZkxR4lNyLAAoFQNaGsXwfQwKcWovgRf5cGWSTJgCURi7K98qUP8kPA7YBbJNlM1VC0zkBb4YQsRolZ1inR82hxPNapCbDTPLQVGwxBZoaQ8IyI9n/ertW9qk04zhQtMWzu47gAG8q4M/leV0c+iyNG92Y5eHJ+PRYgZy2UkMxOkcQ6XeXgDIXCnMRQJHxrZpYGUBkgk3jSTEg2uQv3i4K6Bc8wFVeHERKGLZJAGyRK9XrM7OyImM+Z2KEsEGDsP5KkwGj4YiFBfYak/dLPu6MfGaZQL8acB1DkMbNGXOzZulFoIDVdQPJFvWc+lIj8yrsYhe72MUuZkFE+IZv+IYbvRq72MUDMk5LgvNaxqtf/WoAwB/+4R/i4sWLV80429vbw5Of/GQ8+clPBgD8v//3//Bbv/VbePWrX4277rrro2bI3Xnnnbjrrrvw/d///fiqr/qqU2EN7+LycWuDfpVkZO1JMNf7ONGcXoEyCgL5ULxSDFtZtgmpgzoK82hcI+7bXADQ4PV50LiRicmwAg0rBGMzQEhjDxqORVaGGGF5nQ/qxOQhJx33lltQhEj+xAIoyPM4kdLS4kFrBADMY1N5t5TLHmW/ruylEop3Fo19LrIXMCIWCRzTgtgiNk5YlQlQ01BARPfnNrB3m+RV/ZKwC6YeexoZPCHxlqOxLwy/zSXxxRn6AgRQAvuahRSkbJuZWTIGAoSyIfhUqGNS8MYgWsA1wuzj1PVqGLkDVosAKu6jncC6nrn4OS/wqSxbYkyqV2FdSMjKtvV3lfWl3e3m5LF+GmFS4ZJcYiYGFIZVHen4CEOPsD5GXB1m1l6WMOr2CuBnGyls1l6LKuM1Z1qkwhQZk4uetbdP0CIV21RstPAsBbvBT1l9CvRph/RJL790UTLSTK3+fA7ITE0Nlc90FYCobAbZEC380mQs4YcJm0E71GtPFA0CJpJtRIzox1QU60EKvrsNIhu0ifXpmWFI1gfgDGIbolyQH1PxezUG9M7jw0cD1oPDhdWIi6sR68Hj6HhA8AGbtYMbPbwLcKNHDHFGUhXJTjacAUDbMJhJmgy4vM9MsK3BsjFYtgYPOmhx0FkMZ7vcaX6mtaB22u1OrpfxOrwHoV8hHJ6XTv5xmILLejxVIOC2yN6SbkR0I0Lfw/UDLl66/qAfGQJdQ2bUtVz2Lm5sSF8QF7ADKKw/ZWHUTU3bZLoVBNTGILYCsEST5Y1N8kBV0G9+S1+7wkraeGEGnVsUv6jeBVzchCyz2+1d347y2xaMcwvGh1YeGxexZ9vSvMRmsj0xeX4hBMRRwCdyG9jFEpajgFR+Ko/sY0RntTGEwEhymIaFnTWsCisG1XVdIzXvkGnBy32RM1TgsdsX5l+SJY/1/kQtSVga4VS9YBKXU26ogkmamlogrT9hrxGmH62O5Ro89KnphJOM9j547yzC4ox4Jbd7cGC4EFJDTLk36v0XAAyLT51R2W5tNPMDMPQISZaRjckgNYXCosrbBMj2NyEBViQ5UQL9Qpwerzb9lgXAm2PQuBLfOGJgcYBADAfOjMeuOX3Qb2EIrTE5ByEvUqkEoG33QERwIcBEIDKyvKrk5i7nS2RbcNcBwYO9P9FQpblUDAFkMX1PZSUrAIys+EpHK+CXSLNKLrUeA442DkeDMP02LmTQT/OcxhA4VPLeCYhiQgL7hN2/15gELKfPohyvMbEQRcWCQY6r65rLkq4yXwEIm5xHxaOLCOtjhONDhP4Yoe/hR3eS7ZUAtuxfaJri5WdaYb0GATp9QAatgdLEaEgYjj4CwQAmRHTWwra2NOVRD4S93FzAiYEZk8pFWRcjHoa2LdfhGKRJgC1sXbQKUdRtA2XgT0DsmHzGkSXURx/h27KPyj4QUN9y8m6t1B56Ryl3nIbItU5fm9gU1MdcdZjpvK65AajfLrfaxS52sYtd7GIXN3P88i//MgAghIA3v/nN+PIv//JTXf6nfuqn4rnPfe5VLeMHf/AH8V/+y3/Bz/7sz+I7vuM7TmnNdrEtbm3QDwXwmxbahT2lAAuwhdEVkXyrkICqwlSLTCAwjBX5N/kh8cDi/jAtsMgokrKI8koxqD8E8bp0DisQcwPbEtVDxEQHZgskrw+VlPSz4gmT4J6GAR9DlhI0bDMApcFUAxwoIJ/bFLDP9TJu45D9MGLFjBEPNXuCUWibDoEZJiY5p9n+3sZaq31VJmOAwqSqgTAgq9vIdinLzBd5SQGOfJ5MxySJmDuYUcakgI8kxSEWzmhrZJK/V/1+mewXbzb9vgKC8j4ygzJ76XhXCq3EiKbISClopQVV7UIHkEDNdQZkgdTNbzuEdvkRO/9PJXJxwgqzkxZSZLYBcD2ib0BuA94HwAYheMA1APelo1yli7r9idzqpGBNtJ2JkJYxkWJUaVnTIhJjDMDGB3gHDN5PCo0KJgFyDGqRAyielkDZb9tCJTy1tKHHpXg9Ts/HGKUxgTEtNhJmxXbddxOpNpTrVYxyPLuhgKfYTH3+6vGwMqZkGpimA9oWaxNz0c4wsGw4eQl6MJWubh0Hle4cvAB8bvQYNw7DxiG4iGEjMmKxZmKyEbaB5cT2O8n+0+fGEuxo4BfCwFy2IvnpQ5F/YkbyOZJCJ6+PweuLiMcX4D/wPviLH4a7cC/WH74E3w8Yj9cISd5Mgxsr3o+NhWmbxICQfwAQfUD0AeNqDb8eMByu0J9f4cLhDEDcxS5u4tDmFa4AO7lGJy+s4ISlp7lNXaCd+6Vq7pMahTKLhhjRSiMRMyc/3qkn6vw+HSNwOJRrhMp++wiQqSTirnOcW8g2Ou7QNHvSKJTk99gJ2KbASExsdZEUHtAyZdnnxjDYBczhz1p6vDWMlgJokxQmFLBQVqXKpKfrebQtYvJ05bN3FIBQJRftojT4XOG+X6UPszc4P0YikTdP90MFaLR1y5DIeraGcGABGo5AmyMZC0Du4wmMxGIpDL/FAaJdCDhcJWwxojTG+CL93oCnx0CSlqfE1sps9ryg1BgDFKlmlRUkzsxNBcI1L1B2IUPu7zHlmzSsQOMKtDku4+N6uX8uWnT2GudVSGB5BM4ZBkWXm+0a06I1BM2MW0OwmguOG2G420Z8kZM8OlXjpQ1VdRNVDAnM5i43xah6gjI20bQCSCXv62EM6L2w+g4Hj6PBYzWK3Pd6LOe3SnlzAqKa9PeikpBcWM7qGAvLAmJSAYUiUsNBArbzvgRKYxSQpb41H0YMwLBBHHr4ix8Wpl+S9fSjQ/RB9r+C+fPIkq7F51vZqYMvbFVf2S2U5j9h0rmgaiAhNV92aJctyC9BbEHjAswWvFiKh2i/EjUMBWrTPsuNcLpdSf69sV1i98kWKNOPImV80USArDAo96LB6M0ElFVmX8NISgzyul7D+8ToHEPEaigszsbobwpIr/ccUjamrmu9zmOPZbuHNhIaFtWSWhFmF7vYxS52sYtd7OJjPS5evIjv/u7vxrd/+7fjGc94Btbr628zc1/iYQ97GF7wghfgR37kR/CMZzzjfsmE7uK+xa0N+s3lpUgkSTwwkZ2pAT/MXmdQKrhHUAICUbH+mC1gAXJp8ug2Uw+TiWxemfppQScX0K81gHJfoirOUfJ08ATZbkyLKEAq/CHCRMrFtAiZBJpUzCFURbZKopHGXgotCvyF1Gmd2HIK+k3k8IIXtpaOlR8QTZMnpz7KvvIhbvVxVICsBvzmzD9fHQsK2vBsyq6TzW1AUfYp0S7xWZFMx3W+wJjAv4YVzJstlso41mvjI+X38yRY/YDC9PjPYF9VbMjFDQAILjM1aVgJ4FcXPVJn8vWXn2Uk/iGir9dHCnAxuCT1tSzFJ31cLEG2ERaDdlXbhYyDgn9a5JlLBaUiZe1d6CAd/IML8DEUZl+ImX3n0s6rWaK1r4ghQvByLLKh3FSQNzc9rfeesk7Vv0/PR11s3HYs1t/XYhZxblCIuq/nHdNRikJI/oY10KZjK76GNrEAOxBvpJM8eEQzYLk4SF33UvYcfMR+W8ZiPQaEKF3461SoM0wIIWZGn/cBwUW40Yt8qxu2gH4GwRl424CZ4NO4OvZZ8jMYhk8eKGwYgw3Za6beJ/qvSdKycD2wWSGuDuEvfhjDh+/B+oMXsP7QeYzHPTaXevjBI4weITFvTGtgGoZpDWzXgAzDdC3YMIhZ/I98wHi4wnA0YHNpwPEHj3Hh6HT01D+aqMHIa7X8XTwwYw62ZRni4OR6HRmIPucUkXh6T9LrjQ/5vqRgoUohgiLgXSnKg4v300z6TtQZpNlidAVwqeMjXCKvWRCEYaVFa6u+Z0l+L276cvWvgZIkb62s/YYJAyU5wy3omuYIDUPkmxOQpQ1SArykex1bQUFr5j5Q7oX5MyxNHVUuc6XGtMu+QxUzkIpHIYDsQ6yPCyMS5jQclcajxDJTX120nfihNQu5r9ukVoCTuZPeO5UdqVKQuaGqlg6v7i9RZa1n7NSTMuOxMF+V4ZeOP59+hGNEiMU/UHyJq/y/Gr/T9PC7XAw+yXErwFh5SCq7iwkiQzoWAFqPT7IteBFynpDzLmMyYzUCJ3PQmuWnYJPKpKcmIpfOE+cjehewcV58gUNIcuknT+Qa12EFmmZy+J01heFX5VghCkEsGgvyDpEBCrJfNGei2m5AcyM3Jl/OdQb74tAjJMCvbgbCLIfaFiFttw/IeaR6eNb5o8qTtkaONcMRgIBpEdKF2dgO3Lh0PEoDheZLUeU+NfScmsxTVNIWOS9jkvGidN4EyPkagOzvFyPQsBHPPxRlks4KWKl+liZ9PkTAcsDGRfQ+gGBku0NAw9OZV0jnUGlakO3KzWrqZegHWJbmK0PAeAO6PHa51S52sYtd7GIXu7hZ49y5c/jGb/xGPPvZz8ZjH/tYfO3Xfu2NXqXLxnd/93fjZ37mZ/Dyl78cL37xi2/06jxg49YG/WbgikxUCCYV2TPDg2hSoNdiQIQUligCPn2OKMInKqAyrxpO0iraARpsNlU/OenVwkkBFjIAM5deukYxhRhmkaQ+EYN4/IGg00Mpqp0suFHyASn+DgRbsXuABEoRg0Zh82VZTCegn8rlxeRBEcfC9FMAJ6rxfJLhUelBxAhiC8sWQfQuc2FApZ10XZXhqZ4eNVMug79bioNzCdAMgCUvjQCA7ELAArYItQQmUIoqlDrFq8KRgsOmBkWDS9Kx1Wf09ytZLd4mr1kfW3pczQp3ug4ESCHKy+/ReDF3zOrvxiTxFfZuv/7gNBVwzkeIBCZJBzYFkXkNtgXaAdwm5q2r/IvaDpEtvF2IbJNpEdtlKjDZXHhQ0Gwb0OYC4MeYPPrG3JE9+uJP56M8B+QYa9KkPBeZKp8pKUpRPu4Icu4wqDpfqs5qFJldXafshZSuSTFS9TcmhS0fhS0jYKeRMai8/KIyQtXjD+JPGtyQZSjlfKy8D4HCpkzeMNR24G5PfCvbfXRNh0WzxJnlAYYAHA0eBy3jrBOZraPBgQk4t2ywbC0sE5atwT+EiHHjENNGEgMhSFFagb+grFqIdFVIspls2wkD0I0B7cKAQ4RtOO9nk35r2RqcaS0OWoOD1mCvYexZAh9fhFmdh//w++HPfwjrv/t7rD54Hqt/uBdH77+EzaUNVvesMa4dXO9yQdkQYBsD21m0Bw1sZ9HsNwkELOfqeDzC9SM2lwYcfeAYl4YZG3wXu7iJQwENbX7SnCLfj4hFujBSaSoglm/WDTMxABQTS5ALMKV/K6OeLWzTCTMw/WLwU7CvAIACuNSvAUAFZ92QULWDaFuExRnxkDMtDBuE/hjY9IAbkSk0SU6SXC8MHkPwgRBbA07pAbPIYhoWVlZnGbxJYNmwkmt63cwSalblUl5P8t6aq0zyhMRkO8Hwq9UD9PVQ3Xf194AENDaZNT9UjWOGSNKUlI8tlKU4HIFX54XJPxwXL7mF+PbFxX5u4NH7elLa3842rMKkXNUo8y6NEcWY2I0qz87lGNyy3coMm6g5EKf8uOSWMYoioolO9snmKDMwo+2EpXjmodc1t9r4iN5FYK8pv+sFJGpT/k9+AG2OQa4H94cgt4FPjFRR3Wi2ziOyR/JGmvdOyF0nwA+2AdpFHoPYLBHbPfQuYO0Ceh9xNDgcDh6bBH4BU5WE+m8mQmdFvnOvYSwSyHdmYcEk54eCy7kPMQFtIJESrj1HdY4BpDxYvfo0L0ogXxx6hONLiEOP8XgtDL+xnAthdKBmRNisQUMvYzFsCrswHVPCaENubBh8wOHgBfhLACCg7DmRL1Wfws4a2KxSEJJ34z7a9gB2sS/70Q+gZp337dxbtQafCwgujYAhM1W3nWA0mTNlm4HESmwY4ORrT8dHOc9sEsjbdWcRun1sosWlQRrpelcBkEQIiHCBgMSdbdu9pArTl7w1zQnUJ5NTEwofHW45Snexi13sYhe72MUuPnbDe4/NZoP9/f0bvSpXjDNnzuBFL3oRvvd7vxff+q3fio//+I+/0av0gIxbG/T7CEGQCco2kKf2UZgz/3xUEEgAQQaEVaKTOCADZ0Rl8ncC6KsKKcooygWfaxj3qe+x6qDMrLegsl4COugkVHxlBIgrXc4E46N04KpkZOqqpuCzLCaCK5PpJAmUCzwplN1DgTP7iIMXCbAYMoCB9FO1l58CJD4W3zMiKcJIlyylrtSynRqy2jVoORsfFoBU92uMzcSDUL17pHAUpQu3BvEUeEkyoTp5pTQuiGECYKkcT12Em3Tg6+v6uyjHVczdsVyOAS1uuCHJ+YwiseqTz6Jul7LjrjPgF43NsmTSAS1FZh8FaE9oEBDFU066kCNgh7LuSZos2i4zAjyLvNgwhtydrKAaMJWKC4hwlZTmxkkX8moMGH3AxokMVQgx+5EYEm8SQwRYRqCIBgxUwJ+GFoKB6fGl15ptIYBg8lcBxAMwXccyi7UubAGy720LRCvnXmwKsyE46Wh3w7TAGUIG/OLQJwBwmJyj4uWT/A/bDrQUGVXeGxDHDrERIH/RLIC2Q+ODnH/RojGE0Uc0Ro5xHyJaO2BwARfttOgTA+Atww0C8NGMgajPQyoySqNAI0y/EE9Y7C0swyQWgE3Fcm1YINdnX1FlHYfR5Q5+P8g/Bfx6F8SDJ8o1rg0RXbo2Bh+y/wobyh3aZAjEDG4Y7X6LPbr+gAQxVyDKtVn+Lh6YIddGgHOzBGV2ffb11dxH71HbwJP0nMDI3LwAEASAkHt88vozVqQ+cdLrbA74TV5LV3YfCSsX8nc7y9eFVaWRf0rZc81C/ly4wrwxU6a6SpkbqwV9hogGmswo12Yr9cKjzaYoKGgOp+NPDAQDMEquAoAU8JjnEpD7cF7vtE6T5VavA8j3bFSfIUbJQWoPWqhkofqwATSkdVfJzbEwvOfy0pmNqENLyEwkaXwReekGCXBGYUSyMvlSSK5UebLWOVbarhN5V5JmzPlgtR7ZMy5JrtMo963sd6ngZbN33XOrMQiLzkWgqdc7BkSf8oDERiQnEu/aiAcg3fObyTJzfjpq82NpzAFQ3ZeRgD+R8hTJ9jbvTzcG8bZL/1yV0KsiQB0FBCPY9NgwozMCiFkuTH7dLxohzQsmDZcTQJuyskiW769yojj0Zf7ifQb8oi/HlR8cuBkE9E6gXxx6ULvIcyB4B9N22QdZwT8F/DZO8k1tSGVPaDig98Jm3NiQQUCXWHUA4A1haTvYBRCdqKRQcCKNrw0BtZLHNoAfJafkiUaONsKqDgeyB3X2bB/Xci73l+R8Xl/KoClsA7NYSgOXH7BYnsNB22DjhLOrc8nSvCayonLdIxiyMLaV+QpQgL8MWMpck+fey9chdrnVLnaxi13sYhe7uJnj+FhsBm520A8Anvvc5+Lf//t/jx/6oR/Cz/7sz97o1XlAxq0N+hGfAHLyW2kCk6AYkfGsCv8aJ6SCkCtJeRKpH4rMsE1iG+WO0XZa4KpljtLfWizInZZzmcEbFAK2FNBM/41egI/CbBKPsTz5NgybxsSnwjobqgC/IRV1pAiSWTu1nKcWF2p5T0CkgxLoIJNXYQFkxhLKmPlUiHfqi4EKGEGENQQOonDEs0KA/qXFIfXSqwsOkS3QLgUEsYsi83SZmEyyE9g28TLcrBH6YykgpC7p6MZcKKG2K8yquiNdvVEWe8lHMB17qaASbVtYbQFZCtQkwJFcYlyOG9C4mhYBTIuwPHf9JT2BzEAYg3rXRWjBIUaCYYZt94osU9oHE78d0yAmD74xSSb1Gy+gny9ScRFTqTigeEeNXqSHxuQxM4aI9ejzeVDLLzXMmbFQs/2YCoinBSjLBMLUX+ZKQRDPOSABfZiub/5cXdSEXJ4cAJB4bYLbfAxEPwjo6zYJCE4FSXg5/rS4tTpEHAdhpFQFrhiCgFetSERRtwdeLEF7Z8DLfXncP0a0Hbq929G2Syy6Fi177DeMhhnnBoe9Rjr0Dw9aGCac6xt8qDE4bAyGjcPaDuLzZxhuNPDOZ9ZfZv4BQPDguriIZsLwoySL1VrGsrVYGMZeI8zD1hAWlkDDBjRukrfoKAWqLRFDRAgRQ4hYV4VJAYgDuBfQwzTyyIZhl+I1aDsrUqDpn18b4O8/8jGwi13cDKGslKDs5SQ9nGXPU/MJ+epaTLM2hrn8uko4I4Ccl4I7kJtYKDHWrekE9opFSnku5ykSyOWcDKmx40OrdE4S4SF7BHs9UT+N1IQT2n2QaRMwJPdyBQXINjmHpOAR/YCGG8AChhmMAB8Jg4/C8DOMPcuwXllZSUGhAvU0H9ZGozw6xIgMgJo81pFtBZqmj6Ei+cxz1FqdIDWRADjBHozECIgZbNEmFVGHIHBabxpWkhclYCXLQlZgW6xei2ndYnpiZUPlHpue6/ElKg80vecmwIlsm6TZqYB7CcSeAKe1v6/thO1Huvxy7yUkf2Q/gIZjGXciAVnZwh885IbkVhsXcXHjMIYW1rQgWmeAObMfXQ8a1iDXixecG4GQvOCMEaZfzeJL8xhyo/ghA7lBSAFBQMBBlfWMpk0SrS1i02GTfICHELEaveRdaS7QGEIDwlyZA5DjpzEC9AnTz2CvFfbbwnAGxmu8UMC+IlkZqYDKmgNl4Nv70gSlufnQI2zWeVw0H4o+ZE8/MgxOrD/rgzREhYDQdpKn7DNoFK9qo2AZ5aETn7vkZbga/cTbr2b8KdjZpW1fWIYLBgsj0urLZoF20cE0HchLzh+1wbDyR8/n/xbVGUZirFZgdsPIPoeUmhRVjjc3Hrge4eI9COtjhKMLafzGNGfpYB70MJizd4DGHntnHoJu0YlcaBB2X9CmjQAAMg8oEqdy/GJY5UYEGjeT5kna7Jh+u9jFLnaxi11c79hsNnjjG9+If/Ev/gXuuOOOG706u5jFrQT6NU2Dl7zkJXj605+O5z//+fjMz/zMG71KD7i4pUG/GvCrn6u3XyDAgEAQnz8F/rbJA20DDoEycfT6A8ww2r2agCiaFbcmjD991Mnl7DezV9sNijaxVI5C8ZooQEdazxAxAhgRMQag8TGbsDumDJrZuQ+BdtJaBbFMlgOKgBQYbFUQSV55YAYZg3AZUDR7nkFZXCpHlgqBESDIZNIkNh9RzMUalX8SkGXarc2pgzVjv2wT1bMr+1UjTzxTIQxVgUwB0AS6hc0acXWIsD5OPiF9llNUHxtSTxsgAX9peRVoHJsFwuKMFKWarrDkavqjSj+GUBiXySuvlviJ3Zm8rOsdEcjsPpXKqjeBoxx3YJLzLTFrM6stfX9MgO8wepHpjDGDfcraC+mY9mmZAnTH/HfvU+d5SKCfF9DPhyLpCQh7zNuI1vDEY0ZD/JakKGW5SB8RFZ+Zy4F/KkFM+e/ynq5BVszL1yQFMiPK/1q8leN5kYBV8mMBpIPLxaw49Ij9sRyfQw9/dAQ/OPh+yIUtDTKMZr8DNxbN/hLU7YP3z4DPPgi83IdxPajdR9edRbs8h01qFNhrSqHqaOPQGsbF9YgzncUHL1kc9Q4XG4Nx47BZOIwbBzd4jJsC/tXMP2UUEJut3ld5f3Dxl+FKDjUSIbIBWQveOwMEj+6Ow8TME6+YzaUetrMYjgfsH48Y17UcKcG0Bu1+A7u0WJxdVHKfrRQCG5uLhGFwWPQb4A8vu6rXJNgw+Bp6w1zLZe/ixoakO6WRYQL+iQgcrKoeaA607X6tr0880tJ38mciQC5f143tMouLpsSTE5Kewhap/ga2+v3eiIhJoi4g5aSoGH7ACWadIYAMJXDASANWiOgMY2kJdjgSYGkUP96wWRdQxsryCMiylDL2lToAcWmOcVMfMaA0uSlIKeumIJfksBSVtej1bXmdIbkwMRhyjTUcy71IWfC+UjmY5c06FnqPIhKPVJADsYUhaajQtVb5Tp/YQdr4Zar7bcyNQcJ8J+uKckOSFRfgNDVeKTsyNVLVrEhlydWezeqtOPFGNhYhSVreiNwKkP0yepFS9Kl5TGTmfRnfpAAx2Q9Vs5k0oZkCkmo4JwCRNvIl6W2opGeS6Y9JIl+VJIbIGLJvn1xfZD8JsMWk0qzTBj2gNFnVgF+T8quGRTpWjzWNea6lvuTy5pTtRsZkpp82480VSQAg+gDXizKA6zdpUQy7HmCXLToAcW9PxiF4OZeM5PLk99GaFo0Xdiqz5J8hFBn5wRVPw5y/JGajNptpLnWua3CmNRgagyFEtExYNg2MbUUe0w8CAGrul7Z7YgdQgf9EhNxKlcaThlWewygrVNRCBsTVEWJ/DH98Cf78hxCHHpt7LyAMDjEEmK6F7Vo062OYBx3DPEj8YKk9wEF3DkPyy3YBufFUe0F9BJgifCS0TFi0e/meEakvzL9xAMbr75e8y612sYtd7GIXH4tx8eJFvO51r8Ov/dqv4X/+z/+J5z3veXjKU55yo1drF1viVgL9AODrvu7r8IpXvALf933fh1/7tV+70avzgItbGvQLcQqizUM7F+8L428e9XwxRKQyl1STAhEMMZgrMZQZ208jVsuYR5w9Xm/wjwBYQvIwLOsy8adPK65Sn4EIPkoXujCdpDBlAxBrbcMK8CSTQD4WkC+6IXcTS4dtJSWYuotrX7srhfoQFm+0aksCARzBUcBfJU/lrmCU7uCJZIx2fQNZXkoAKkwYRYYsjGEpSgHSWZt3aigd98qm2gjAEsdRHitpJLAHNY1Ic4UgxZaq8IKmTfKVXfa4cQnsmzIcy77N2zTzqdHnsdlDXBxccXyvZShgp3h6XYP0McKApJCXARtOMrQxg4UK8Alop5JJhak6Z+uNqciSiwzV50cfcbRx2LiQiy8+BBjmzAA1TPAsK1p3omvxilmOKcPC2qPkZbQN3K/9JoEC/AGYiSydDMXiavniyfqwHP5WZYk1VFI2BJHpcqMcj+n4dP0A3w9w6wGu3+TOdg13vAY3Ddx+D7u/RjP0sqz+GIYNeN8hxAAiKdTsNQyTzmEfGzQs0qhNda1YtgY+RKy3SH4SExyQGX/qAVoXz4kpM5DK9k/HdSIZlorgMC1ouQ8OAebsbeiMynwFmG4FNoTheMRwNMD1DtFHBB/AhmFaRrPfwrQGi7NS3NIiF7cWpilj7kcHvxmusDd3sYubM2rfMqJK8lPvz/N/zOlGOZP6vJL8pzLHggOCRQwOTDYzz3zElOU3uzKmzGzruvtIW6+91yUUfGsEZInBl3vyPEIAWSm6GyOAZ4hy/e4MYcERNByDh3UG/OBGRJu2zaGAM5P7voA4Kpc5eLkHaoOMNp4BqnRQGH/lkppA3ryulSQ5MSIFeZcUrLMw0GVTBudOAH2aG9ZAaJ03JalSYpbcKgF/8lWCT8DwGKSRb6SSB2nNPESATJLoDH4CwkXTnMgvc1NRLS2quWAIIEyP4cx6qv0OicXD7gbmVj6pGPgwyyMU8AsuSZGWvBdAUZdIfnwTqVMNYvnuIIyuicSnMTl/j2pnYFqRXY8xWwi4UJq8DBEWtlJNSD7JCkqHIB7KyvYTv2SkHCvlWjRlds7VEaIeGPPg6bZtA/pq7fDoQwL+NnDHRVYyhpCBrgYAd8fCDm1a8P5ZYLQgN8C0bVaAUBnSEKUpbT34nHvOo7UCeLaWsfEBi3SAjz6ItQEMHGszBsEbQsstjGmLnGu1j2M1r6kbZmV/IHutktsAfpDrjoJ9Rxezz2E4PkRYXcJw7wW4VY/+/GGWP232O5hugYPG5qZPs9gTYL5dojOtNLDFkrtqU4dHzNciQPLB1rZFJlkGHXMf6l3sYhe72MUudnH6EWPEK17xCvzAD/wAxlHuu1/2ZV+GF7/4xTd4zXZxubjVQD8iwr/7d/8Oj3vc4/DWt74VH/rQh/DVX/3VN3q1HjBxS4N+ftb1XQcRVQANsg8ICKBIk4kGUDpAs2zPbH6onl8CkMVJVyklNtlJqK/EVJiyej0tYxtz6HqGeo8xicwOK5CQim7a8d+kCbnU9iIIIkElBaOIZbtMBfsk++UHIKbJcS3vqZKWlXSQMAJZig2mzV3C2VePeAJIEpJ/h5F6U4wRHMtE2rB2rcs/9fSyTGi1QLWpjOJrNoL6NzaLJIdjswSl8qqKjKNMrhFc8kkRxgKlyegU3DMgE4C2E/DCmCLr2e2DFp1IBC2WiHYBd/unpEKqE882ttgEQvBT3xyRDUosASQZJ5XBiUE6/VO3PwApqs2N0K5zbJx0+irWU581wQuzVMAaeUfHvhQuC5tvTK+pR4o+KoOvdyEDfD7GCtSTLmsfIgbnsR58fh2QolKbii2t5fy6jxEcpOjCSSZXjwctQhlKHnI07UKvC62+YvdpzEHQXBBBxWzW9xBzoUSvdybJnzVM2YcuS3upj11/jNivMvM0rFZw/YDxuIfvB4zHa4zHvXjdpc7t4ANMYsI1+x3sfodmb4nlxx2h2V8iDj344DbwuQfB+BGxXeLM/oOwXHSZ+XjQGnSWcTh4HLQW55YNLq5H7LUGh73DhxYWR8cDxo2DsQzvAtxoEFyE92HCtqPECjCWwalYqMy/DAwnL8beeQyeYSiiWxzkwi6bFnxbD779IYirQzTHl7B/eAGhP8Z48VIaix6+3yAk8JONFKu5sTCthelacGOF5ZcYuhNZtODRrq6/78wudnEaUQN/ALT3CR7i7xctCnOvBgBrD7gty82+cfpCKjQjhtQoISoNXDkChnxNnOZ+CvxlL0KKuNB7HJmAhyztCa/V6xWRGL7Zg40hAaYhy9Jp4V2D830CaFXLMgbw5hg0HIFX54HNGv7oQm6WosUSMeVOURlaKvtdrwNZOB+xdjF7iGn+rACf5nZSbI+V7Lnchy1bYWTOpD61mYicsOLadk+kXyt2db6NswXYi0pB8FnWlWKUbYoilYmBgeQFixjTtbQ0TlgAlhitbRGtgJm9r/MzwZ6FAdWC2v3MQiQn12J/9qGybsO6AJJzlQ6gAA2xYkxV4GTe1zVIeIOl+xeWcNDa7HkNtkBifFHKUyU3T+do28nxBCSWnslMyBPbE5JEqBsrJ8W0DNvIox6DVR4/DCE3aQGSlywsV6Cz3L/1OMygX4zCBGTCmVZYbupvqTLqWc5cV3HLmETIXEa82R1ioOIhqXM5bVBMzxG0YdEghg1cP2Bz4RCbC0fYXOrhBzk22oMGzX4HP4xo9pfYB2CSjzm1HXjfI7ZLWGIs7R46G+Aj53xl9BGrweOoH3HUu5yTzll/C8s46CyWrcXHnV1grzE4t7C4fdnI886iYZEx17lOw42cxzOp41jJ6StDNvv1uUHmMpsjYfkdn0c4uoBwfAn+4ocR+xWGC5cwHK4wXDpG/2HJlYbjITeJtQct7F6H6AP2R5euVx347CBzqnYfy8VBym9J2MdBmvcAgAPgWT3cA5qmRWwCaLB5HjmZW+1iF7vYxS52sYtrFp/4iZ8ITs1yn/mZn4n/+l//a/57Fzdf3GqgHwD8f//f/4cnPvGJ+Iqv+Ap8xmd8xg70O8W4pUE/YXgV5tU8sswnZNKkQB9rx7pKV80YN1QXKjAtLvkadEq0Lv3clepKTARfLRsQQCrGuHXdr2cYJixMASRaw8lLJyCENIZpjNRrgdNkm6hs/4nLvspcxojgBunKVEkgjblvnTGpUNBKd3b2dilFLAVxDQs4yRG5+GCqoqCAfgSbOoLb5IthCaChL7I1lfxllndK3hcgAnGAaa2wHCtvOC01hhCTxJAFTCoGxbLeZFvZ5kUn+9wNBeizLWiRZJS6Pfls2yE0e8kLxaZtLsUXImT5WpUmzZN17dBVH4/at0a9PACZNJsbe/pvO+wV7NL3Yijj7UIpCiiQsxpDlugcfZCuaX2sGH4bJ7JSgwsVyBfgQ/28AICAHDMuSMGzBpF8EPAQkPXgBCyNIYCZJzJSAcJYFMCuyOX5WeP5RM5z3v08e12XW4N9dZGLquvVhOCmfojBi2dfLWcVKr+aYcwd7WFwcP0oILMP8EMQ/04gg2D2sEX0AYu9YzmOmxbc7clvNmuYBWNpW/gQwSSyVEyUGJdyXdm4kMcZQGb9uTGAmeBtgA0CAubtUVaAYdjGCBjHU1afHg8KCtskKbVoZf0CMcgvQbYD750Fn30QwrlLiJse5vYLgKtYNZW8aPblYgNqmtywkOWK6yQ4BNjjFa57GAZdS5monQTVx0Tw7EKtbL9IWf8A2SeUGMSs6o85agn0mKQSLxcKEtY/S4nSrMy/bVEz/ihxsuaMnxsWiVmsecwEIAUE6KzzyAR+8rAWwK8/RDy+JA0a6+MJuwo183neyJN+Q5rWSsPMkO6rmRmnOqpJHsNwuV/FhD9OgZ/kRQjI/k7qCBQ4gT0BTeVll3clCzBIURqqyCUfwuBk3RWECuLljMT4oxnYVo8bbAvLLZq0r4d8gAizzEGkaGFaRB4Q2325PmcvP5KWvXr75sdn7dVcrwsguarmqWnM4w3OrRaGcXZhE1gfU+NYenO+bcSAtdDpoHo+KtMx54wJyFf/SLINom2KJL0xWco/1mAhF4UGhf9VsrLhAJOOTwX7hMlXVA8A5LlHazg3UxmqJPl1U1DypcsxfGPaFqLZ/as6h4gDInt5DgGXgg9J1nPAeLxBf76AfmH0khs1FjEIy21hZIzC+liYk0uRgm3apMCQtkNjSCy/w97l5zXrTxvQNi5gYSW/P9PZPE4qRb/XGITI8EYaMV36LaK4dUz0+h5RJaXp/CY/gvyAsDoUZt/hBfgL92Jc9Vh/8DyGSyuMqx79+RVc7+DWDvMYV9JAZg6OEdfHiG0H6npRWogh/X4BHxW291FyWWlOIGlQSPtuEjcC+NvlVrvYxS7uY1y4cAG/8Au/gGc/+9k4OLhxCgC72MX9jQ996EP41m/9Vvzqr/4qvuu7vguvetWr8NrXvhbnzp270av2UcWf/umf4p577sFDH/pQPPShD8Udd9zxgAYtb0XQ7+d//ufxhje8AQDwR3/0RxiGAW17/b3RH4hxS4N+AaWjkyvwbOqHVXwKImJiACL7/AEF/JuDfboY9YiJsUgizYtJNfhVf1e+X024YnlNQUCOEdunp9cnDAG3dwaXhoAQQ5a/tEGKeioVBAggqEUi7RC3nMBAJtCYPF+yl9wgbCI3iI9dCvHuM1lKKINdyf8DxoqUpV1khpqCPwxIsR9FDiYaAKAJQKsTXCkUCAOO/EY6WBPYNzGF16JOYgERW8QYEE0LMhaNaeEjIYaYvA8jxtQR71nAoZZbmNYCwwpIAAC1HdgYBGZQ22WJwrzdTSoa2Q6BjXjC7N2OaDtQfykNWLkpGUI+WDOLq5ImhZ9KTcG0iO0S0Xb55ZuhFmqoeEqG6rzyybSxBrRilPEOEVgNHqvRo/figxJCxMYXKU/1iwGQ2ZkqnbQeSkGl7qauu6o1WssTAMkndqFPAF+IZd11e3wEOit+TBQBjgIUI4N9dSFLHglSjytMwAL21cw+LZjNwb7a50rZzBq5gFwd33Hoi9xsJesZBifSTEGKW/rcDx5+LDKfZBjBRzRJxomY0QwOtrsXcGM+tnnPgUwDDg6L7gzQdmgSIGvZ5s7+ZSNX4jOdRWsZe63BhdWIey3DDR7DxiD4AJ/YfoCAwcr0Y1uAv7Yxeb/pflK/nI2zAOT6NgSCNUu0zZ5cJ/QaUHk0mbEvQGmcyf5WRc2J31EVpPs6BvDh0dbP7GIXN3PMAT+NAMllCNuZMxM5zxnIUPv/ZXBgLqVY/X5ursIUS6wL/Pr35LvY3lhyvYMpscDsQkASB8QYpkCRMrC0sUevRf0l8OYQ8dKH4c9/sPgBp6DlvoAvqhiApvywevmxxTAKy2rthAEv9025HzWGkhdt1b0VpKErVjLpEQRinvoVh9JcRBPGlLBy5uy3zB7TJiTrQK5HTN5jpOyzGISJRlR5P4YMRijwF9qUb7aAoQaeyr3ShZKnxqaT7zQdYndGcqGJzxkqZmqcHrM1q8+7fF3PzWHN8oZKeW6Lg5azygCQjj92J8H2fA+r7mu50e4k6xExIHongI2OfWriIwUAbeWFmNh+LjEwZe4lDN7GUAKoylncMGfQrw6VTG9TU4+qdpxUUZg2Yk79ActcztTbTem6xZznJBnwM4WlHEaH8XiNzYUjHH3gGEfvP4IfRAGhO7dAsy/nnu+HLO/dGQNaSrHF7J1FsC24WaA1LTpL6CzDphx4cB6rwePiakQ/eowbB+9Cbq6ipGpw8diibQxWg8eZzuLwYIH+bMBeYzCGiIVlnGkNDlorqiaGTszZ8nOgyM8Tpfmpzifk3ER/jHB4Hv78h+Au3Iujv7sHw6VjrD90Af3FDTaXNhiPx5QnevG7S9I1wUc0Z1aw3QKma2HOXQDYwOzfhmBb0LiAMUuRjo6octt06AGIjOLjfoMZtLvYxS52cV/jH/7hH/CTP/mT+Omf/mm88IUv3AF+u7gl47WvfS3uuusunD17Fm95y1vwmMc8Bk972tPw8Ic//Eav2kcd3ns8+clPxjCIcoi1Fp/+6Z+OH//xH8dXfdVX3XASzmnH8fExmqa5pUCz5z73uWBm3HXXXRiGAX/2Z3+Gf/bP/tmNXq0HRNzSoJ/6AZAy97aw5rYBgB7KuEk9hTSV3LvSKT8BJjBlAda/mQFESNHKkBQJFBwMMU4AhRsZuhaGkDtPY2LgSIFeOi2ZkLtrrSnyMfsNw0YH7g9B/SWQH8GbQ4TNGkGLVDXDjxkwUlSgthM5IbtAaJZiMN/sibyQFWlP9dPT0HGzJ7ag6u6tCzXDkIDIoXiYaPGmAkPqIiUFJ8eGl0IcpUKHTjnFL0gnpwJGKcuwZYZpOumEJxYZKyMAiEoj5S7qmoGn7ELTJvChKjAhgNyAaKQzOnech5lPDlAKqozSdc43z6muni6AFBx03zIBo4J1QQAtnwqTMQJr50UGaRTQT2XKRh8y6Df4qSySAnUi2xmydFIt7znvpq7/LbK0p8m+KlMmmbD+mAI6w2CS9SEI0OyVKbgFZo1x2mzgE5OlLFs/V39nes2Rzuci76qAfZa/QpDjKAF/WUYNyMxbZfUJ089PPPw0iAlRVc58yB3uxAPccY8YAmzXok1+lGAWtpy1iH5EiAGLRRDpNwCDp+zlczQ4GCIcDQbLxuDD3YBzeyMOOovBBVxcjRjSfnbpdyfemlYYfl1jsGzln+4/BedXo8eljcNeMIgR+RqmvjpSBGsBamHaVEhcUvbJ1BGpJVfngIPK4dXsW92dY3vpxJhe6yC+tt3o9ADujttFifsyB4rKnJkX0rf5+M38wRRkyPdCtiead5ThJ2relBolYpb0rNe1FLcpN3TdqMi/nu7DMbHfKAYBRSpGmNzfE0jmBtC4BvcXEQ/Pw5//oMjqDT2gHlZs5Pq/6BC7/ZRXJKUE20jDlO2wdhEbLwznId0jV2N1z9OLFopaQ4BIpYvs32yjsprAMGHdaaMU/JDyalFJmOQf2oTEVvK7GEBuIY0WrhdZT5UkT95x0cnknPyYgLchL0uvzWSkgSpEYTJqQ4wNUcBLy4BtZRnDGjSsEbozst62zWCYNoFl2VLd3mo/InrJ12yL0J2T7bhJYuMjjsYwyZd9BKKxIGMz2CzNQCUriWwyKy9uA+BrBmYFEBIbQFn9thXALzEfwScbYoiUxCTXB2X0FclPyh68et7XEp6toQRU0XZLhjidv9X34+mKJJCvVtBgg8g1+JfYfuOAMIwiX3lphf58j/WH17h0zxprLzK5B8cjlvsNTGPgB49mX1RLyDC6vUsyTv0xyHYg26FdLDB4yX8aLiDn4ALWG4dx47BZy6PKmlNSNWgWFrZhDKPHwbLBavAYnMdB1yDEiL3GYOMsVqPHwhrxVabEkmQBXQ3HIkWf8iFCwnkTczfLEA89Yr+Cv3QB6w9fwuqD5zEernD490fYXEqg39pl6XXTGNiljINd2pRfemkiG0e5fs2bE5Ga2GIB+GTfVrsMmPiUU4xZoeJ6xy632sUudnG5+Ju/+Ru8/OUvx6te9SpsNhs84hGPwHd+53fe6NXaxS4+qrhw4QK+8zu/E7/wC7+A7/iO78BLX/rSzBi7ViDMe9/7XnzCJ3wCrL36muW9996Ld73rXXjnO9+JP/iDP8A73/lOvO9978vvW2tx11134UUvehEe9rCHXfXv3YxxfHx8S7H8NJ7znOeAmfGc5zwH73jHO3ag3ynFzYME3I+IKjsJOlGY2lbq0QlEIdbRpDP0RKf4rHs0PyJmpo38XS0X04lmJPXskhkpQ36IqXgA3iydBSLZqewh6fqU4omsHwO5y9YQsGxkwtqMK9C4Bm2OwMMxyI/wh+dFEm/TT3wXaCKhI1KA0TTC6mv3ANMitEuALTwYPpRxRvp9KbBX4B5QCkUq01n59JGT4hT5LYbvtVzTrEhJMSAqw6diAdYssrxqgRAh4K4DwMYimkb2euMAR9Pla4G06qiuJZWkm32cgnW6LQr6acd7Wl4kKgUZmb1LAdZc/6LUNu5qrB5H9WaDni80YcHVgJ9KNG6cePNtElthk5/L68rgczPQD5AuamX31dJJc4bfspXjU9li+qgAYAYE0znrQ/Hj9Ol65BMgFFKRmmIppgIVmEdlDAAdg/vOTqmvG1oco1QYa1InPMKYipjpOK4Av3mEGdhHhkFeihuMgBg4A4IxiNyn60dw8qsbj+WRGwuyDTgEhEv3gvdcIo9IQXdvcRaGimyr5Qajj7n41RjCcWfRWoP14LDXmlTYCtlzsQ7dJ61lLJPnjwKzIUm7NkzCbokRgHTAk4+zotL2gY+VpK8+1uug1yT1cixyeUVi+HC9857Zxa0VCmIDNRh0haBpEX3Cy5uDfxkc5AJ+1VLeFdsjL55EalxY0eV+Uq/TxFOVC4vlhseczagAS7pfSwFbG4JYlAjGHjFJ6uV/o4MfHMiw+Kumpqkc1mY/tmhbDAEV2CeAX58aKDQaljyYcLLQXDfCCVBUNRoFbUrySbIziJdzaEDkgNhO8ycFBwGEJKcpaYpNrD6WZixiRF/8+zRvIyf+y2F9nFaOQWwFrGkqb73UkBWiMv0F+LLp+KJxLYBGd6asU91Ipetc7zskUBOuAsqs5Kw3QWi+MYSI4zHk4z9GBdVIxooKm3Ti6WzbqSTnZUC/GEMBJIjl2PNeZPmJUgMbFxA/LydmD25DAFgYpJHlHd52P03b0FRzjsZo/n9yzof0epnTnfxA3dyp25DBP2X5AYXtlwAln8473w8YjkYMRyPuHTz6JJU7hIhzMaK70IMMiZxl18J0LRb9CnEhnsfkxUtR8zNhNvJEzVF8jH0C/kZ4HxFcgPoYt6OHbUz+rDaWDS5gYRl9G5IEvcHCBozeoDGMzsTMohSbBqCJqTmQk//zfMCcQ0wS564f4I7XGA9XWJ/vsT6/xubSgM2lDfrUOGeI0AEgQ1kuXv2gow+50WzOAp80ts1WgVITLRMm1x/1Nd3FLq4UL33pS/Grv/qr+Iu/+Assl0s85jGPwY//+I/jzjvvzJ/p+x4veMEL8OpXvxqbzQZPetKT8FM/9VN46EMfmj/z3ve+F9/yLd+CN73pTTg4OMAznvEMvPSlLz2V4vQuHljhvcfb3/52bDYbAMBP/dRP3VJMm13cGuG9x7vf/W486lGPOvVl//Zv/zae9axngZnxxje+EU94whNO/Te2xW/8xm/gBS94AT7rsz4Lj3rUo/DIRz4Sj3zkI/HZn/3ZuOOOOy77vUuXLuEP//AP8c53vjODfH/zN38DIsKdd96Jz/u8z8N3f/d343M/93Pxkpe8BPv7+/jRH/3RW5Kt+NHErQr6AcCznvUsEBHuvvvuG70qD5i4pbMVSl2h2wC/K7H2jBEGm4Ew2OaeWHXMC0/137W0Xl0YBgoopQUosMAgTMBeZV5/M8XSEhbG4HAIqVsa0BFUX7wuMYjY9aDVRdDYSzd6v5JuUJXydAVgo7YTgG/RiQSVbUDLAwH22n3plLZdBvscOBds5kBq7ZeW5aWSJB/5ATSsSxf6mHwEhyKHBdsI8LhYZukh2Uwt/uiBk/5uOulMT0WRmMZF2YcZkEySWE6QSADAot2TQh4bUOMR/VBkRIMyEauJKweA3aQQFZbncuGE1xelGz4PbCkkhsX+TcXmu1yMacyaVGSoAfW5vI+eU31i+PUJ7BtDhPMFYPMBGbxzlT/f4Hx+fZXAIgWN5oBfze5TiUlhjVnstcLy229tKkKV81e9aRaW0VnGwhoBfqptro/j+jpTjm35dA0CXomhkq9thNzwoF3w2hXfMgA/gIe1MFxr4N0YYT00LciN4HYENxYWmEh4EjN8Kqb60YGMMPwy4OcD/BgQfMR43MOPDrZrMR73WNy+QrPXwa6PwftnwGduh7ntwSJZu7eBbZZYLg+wbhi9C1jYBQYfcbTxOBwcVqPHQ/ZbjD7icHCVPKvPPoz1vps+cmZlDml7fGKANExY2DFLiwJACDGDyz7KseVjFC/AJG3cVz6QQGFw6vGismQNExqTuveZMutodXT95T2J+Zp2jO+60R/4MWfJ1EXhbcXzmDywkFhFNAf79LPpURp+2iJBzRYuIl/n9fpeh97/LfME4GkY+Lj95j4pNtyQUFZjTPd5jSSRmdlmMci9frOC+9DfwR+eh79wL4bDFcIon+UkH2gSM4maFtTti0rC4gCxWWIdDdaj+N6unQB+h4PPDTSswEPySmu4sJ9L/loUHWhYCRjpR5H9GzYIKlW+WJYGoyTHfrnwaf8q258ANLzAYpk8j/0AGqhIiCZGYVwdiXLE+jizsYxtBczyY/HgRsnJYxSJ+sFHwDCsKjDMAdha6aEOtiUHu4nDR+ADxw4xxklDkk/gWu8CltbCqMxpmLH5TJtBuxw1KJ09r0MB7fWzpklAX7VMZawCQHCwbNFZFg9kAnwQSfwxyHoyFYaXNoIZlpymMyI9mwHDFAp01jLnwJT5p9evOWOs/MF5W9iYAugGj+hGxHFAHHq44x7D4THW59dYfXiF8xd6fGDjsPYRax9w0TIuOYP2/UcY1w5NYrgBQHtmDy0z+OA2mG4fbDvY6NAag4VN22eqRiUf4IaA/nhEf7yGH9ZwwzqtrkHTHYBti2Hj0C4shn7EeuNwsGywHjyWrcEdBwuc6Sw6a3BmYdAwY9lIjqr5amMIS2tgGWg4whDDERATsCrHiUccB8AJ09H1A/qLG/Tn11jds8b6eMTF0WNMjW1LQzC9g0mgJBkGNzZJflq5TtlGjjMjbN98nU/nrO4jImQZ19aQzO/8mBm/4kXtC/P5OsYut7p14nd/93fxbd/2bfi8z/s8OOfw/d///XjiE5+IP//zP8/F0Oc///n4zd/8TbzmNa/BuXPn8O3f/u34mq/5mlxs9N7jqU99Kh72sIfhbW97G97//vfjm7/5m9E0DV7ykpfcyM3bxU0WMUb84i/+Iv7kT/4Ed955Jz73cz8Xj3vc4270agHA/5+9N4+X5Krr/t9nqerq7rvMZLISAuQBBKJAgCBJUGSJBBBBAg+LL2TVvIxJBHHhgR/KKgj4GJFdHgFRIsiuENkimxBAohDWBCSEbDNJJjN36dvVVXXO+f1xzqmq7tt3MsvNzCT05/W603f6dldX13Lq1Pfz/Xw+WGv5wQ9+wNe//nW2bt3K4x//+EO9SjMcAG6++Wb++I//uM4/2wysrq7yJ3/yJ7z1rW/luc99Ln/1V3/FwsLCpizbOcdNN93E1VdfzdVXX80111xT/x5/rr32Wsqy5L/+67/4r//6L4477jjOOeccfv7nf75ezmAw4Jvf/GZN8H3jG9/g8ssvxznHXe96V0455RR+7/d+j1NOOYX73e9+69b/ggsuOKzJPuccH/jABzjrrLMOuKlkM0m/HTt2MDc3d1BJxGc/+9nc6173Omifd3vH4c8S3ALanejtm7v6xm7KzbwQEiVkfYPp6iyD8JZJBd8mrq9108PUDwfUCrpAIsTvL0OHrZaCjhKI0aq3nhouIaocu/sm3Gjoib+q8IRWtMjRSd2NLnvzkHa8mi/phjyUXl34c8HKMxKO7awPicCKiU7UVsaLMEXI6su9Jc3aql+XqqzVTUL5gqTTre3fVtu11Ad1MUAlREsq42JBafoRERWgxgUbWRfyjpQOx5onGD3xVzVqxLgqtunAdkkGOhsvzETLnbje8VElhx3ht9Hx7Y8rT7RH0ioW/+oItA2270ZQsiHttBQUE3+fVP3594yvYSRv4k83VbWlZzdVpCoUTEL+kRS0iqae5Oklqi6gTipM2kWper2gzvQUosmj9Pzx3m+DxnY3dG9LGuXrxPgXszSJxWKd4HSCzlKM8ko+oSQiD5ZqoQVd5AVS+Qw/C5giSt78uWUKAaxi+1ld7DJ5QVdJT7qHjnnZm0dK7ZUcztLLFuqxOJGuHmv8tpSMQvf6WhKJ34bInVT8TSLm+hnpLVjXMCghGBm7TgVQhqzBqBpt50B6Vaip1aHt5TfHTWP/Go+hxqoMhoPRXu/PGWY4HFDPrTYYzNcpuaPFZ1vhtwFZ0mT5yXX2nsb4czs2gNiJhqraOlc0jgQm2KUrMcWO8jCCV8S1LD2DCtv/3lLAjNawa8s+Dzlf85mrZYU1FpVoZKK90k+nfn6VZhBdE1SCSzKK0ufaldbbaZfGhXFu+rgpWteQaK/obQEFWlCTfX6uVTWNXdEaMtqKhty+qOBsz6kaooax7N34+Z2kE0w4cupJubNQjEKe4dBfT8L80lVFbUMY931ssjM2qr2DHbsNaj/dGbfvXLchGoeE2vbyMCf9oGkuErhas2lCHqNwopmPComTfj4KNPsqWNDG59qPjkD8RXIvKDvH5szRpn5yWzkbmh+pBxOBXyfraFR/LfV/M58RvuFQCBLJmCuH2cPlfyNbz3oV9mQJaW2IIwjEX+Ezj82woMorqqFhaCxD41itLLn12lWApVKhB14J2FnwRJnJfZ65qzyBSNc3K8ZcY98w1sw3RWvAtVVBVQyporoVT0hKnSLkVmwVSDLhs8ZTLZnLdNj3jl7q1yvTrm58ylRstBPBmUD67GnnrT69GrJRP47tymA1ao3DlpbCOsqgMFUCUteseySuPOGX+KxznXh3l2DjjNTe6dPG87S1/0TTNKuEQFTj2dQzzLA3+OQnPzn2/3e/+90cffTRXHrppTzkIQ9haWmJv/u7v+PCCy+s1Szvete7uNe97sVXv/pVTj31VD796U/zve99j89+9rMcc8wxnHzyybzyla/khS98IS972cumqrhGo1Gt9AKvRLk1cdVVV3H55ZfzyEc+clOW9/73v59HP/rRm1Lwv+qqq/jBD37AmWeeuQlrBu973/t47GMfuykZeT/5yU/44Q9/yK/+6q9uwprBq171Kl73utfxvve9j23bto2RFfuD73znO9zznvfcFEXpK1/5Sl72spdxxhln8C//8i8HtKyf/OQn/OhHP+KMM8444PUC+Kd/+ice97jHbQqhcOWVV/LjH/+YRzziEZuwZofvut1000184Qtf4D//8z954AMfeMDLW1lZ4X73ux+DwYB//dd/5bGPfewBLe+rX/0qb3nLW2py75prriHPvXBBKcXxxx/PCSecwB3veEce9KAH8aQnPYlut8vZZ5/N6aefzvnnn89ZZ51Vj6/nnnsuX/rSl/jud7+LtZY73elOnHLKKTzjGc/glFNO4QEPeMAe1YARd7/73bnyyiv55je/ycMf/nAWFxcP6HsC3HDDDRx99NEHvByAl770pVx99dU8+clPPuBlbSbpl6Yp//t//2/+9V//FaXW+ULsF/7rv/6Lk08+GRmaeKc5H5566ql7tawrr7ySyy67jEc84hGbcm2w1iI3oQFr586dOOc48sgjD3hZB4rDiynYR0zLTxqzfWxZ9Yzd5LXsaVSr+FQXJGIBvlWQmFYnCeIufI6EQLYK9SKo+mLY/KHOltkXTG5LJQUdJUklyLVdiHzZZ/btugET7Kdc4W9qo32nyPrIru8+V4vboNOt809c0q0VdJVr5beVts7Ja1v01NYuExDBYgpTIMoRospxa8t+XQYrDQEJfp/rtLbwicULl3TqHL2xwkcojpjQfWyD1WRlfS/qpEorwoSDqIpKMCFIdeY7+wMR4wBph754NlprKf0UaI1QKVVvKy6bmGxH+1Ihw4Ev/frr7ID298GCALTwJWGDt2qzgjofcbK4G9+jhKDEd7Bb4Ykh7StIGOmwTmAkoP2xWtWkUGN9BA1JE39vI9p4RoXfXJbUv8+n2ndFJ4pEipaij5qcigXSWIwGxrLgolqlrQiWAoQTOOGLYPE9ZoNjSwrG3utf03RFi1CsFZEcroqG/AtwQvhOa0BUJUiFlAqhU2RVoLMUW1a+yJUXmKKiHAwxeUE5yH0X+jCo/wqDLQ1lqEeNlhVJN0dnmmJlDRVUf0l/N9kRu1Eru5D9BdS2AbK3gOiNsNWITHdIswUK4xhWkjyxFMax0NEhf8rUtq65sdigAoJYzPW/R1UeQGmiSsir84oNtNzxvW1yL+Y+ruZVTfTlZVA4WocN75HRbitRY/ai8XfdOt6KtUOg9FPSk7y32vJn1lq3d7TttPeItnovWHQKUQFmPQnQshd0yhNVaF8Ijk0/JhB5kcgYy/CUfj7VtsQrrX/ucHRQGENUQIK3sowNS7ZqnAmswey6EZsPsMs7KQdDqrW8tl+WqUZnKWphC2pxG6I/j+gv4NI5r/LrzFE4SWEMRVArr5WmtsWOyuYkFNMT2TSpRAvFNCrfBcgq9w1V+TKiGsForZnvhTmVi04NOmvmVBNkWZxfx/1aN3cFfY9xliT1aj9RarAmZDJ7ws8Owtxu5IsGIklw+Rqy00WUnoxMVUqq/PETjThNsLeUDgwSmfY2nI17Utavt+30D4k1+v4i0u1FsMoWAgpjUYGIidtBhQxDF5vzoN5fbYVmey6mQg4j8dgNjWz1uRzJvri8qBC0BozfE53QOKcEGCnqa3jcVxZRq/5EuHeKVuVpbGSyVU0ySunXoWzdM8T7hah2nKbw27hZIfw/kn2jHFfklIMhxcqAYmXNW1mujBgYVyvcAAbGMTSGI1IDa9DbnaO7Gp0lFCtryFSjBivI/gKynyPKEWk3o6N9g1M3kcxnmqVUoZQn/qxzmKrAjHKKtSWft2wNcm0JpVNMkVNmfapyEWMso9zn/+3qaFbzlJW8Yj7TrPUMvVQx19E+7y8oLjta1uNsVytS5ersVN06FtpziHaOnTW2tjaNpF8iXDN/VQKVeIWf7qaorOMbFLJ+yBvtUOHHKW+J36j8xq1dIZU0bi7BucJV5Z7J21sRs7nVbRdLS0sAdXH40ksvpSzLMfLinve8J3e605245JJLOPXUU7nkkku4973vPWb3eeaZZ3LOOefw3e9+l/vd737rPuc1r3kNL3/5y2/lb9Pguuuu47LLLts00u+rX/0qp59++qaQftdddx3f+c53No30++pXv8qDH/zgTSnsxu22WaTfs5/9bB7ykIfwK7/yK5uyvC996Uvkec4pp5xywMt61rOexVVXXcWb3vQmut3uAS3r2muv5dvf/vamkX5f/epXechDHrIpBMW1115bF/83A5dccsmmrtu3v/3tTVm3E044gWuuuYajjjrqgJcFMD8/z4tf/GIe//jHs23btgNeXlEU5HnOySefzK//+q9zwgkn1CTfscceO5U4+u53v8s3vvGNqRlu3W6XJz3pSfzFX/wFD3jAA8bG433Fl7/8ZZ7znOdgreXUU0/lkY98JGeeeSannHLKPhNaP/7xj/mFX/gFPvShD/HoRz96v9cp4nnPex67d+8+4OXEZT3nOc/ZlGVt3bqVt771rZtG+C0vL/Pwhz+cn//5n+cd73gHH/7whznnnHP2+9j74he/WGcBPuQhD+HRj340j3nMY7jHPe6xzzFqH/3oR/nDP/xDfvjDHx4w8fdHf/RH/OAHP+CLX/wiSZIc0LIOFMLtq6zmMMDy8jKLi4tcfd12tiz6CckY4WercbJvo67A9k1q66Y32kVV1tWEhAlkj7GhSOEaaxloqeLCowo33UmweGs6FqFzGLeiO2DQIt/id+hpgajyQPqtINaWMDu3+270ld3eZsVaf1OnE+TiNl/gX9zmyb6kEx6zUJCy67ZrtNiLqBVM0m8zJUMxCttYeZoCORogyjUYDbGru2tLnvqGUCdecdjpIrIestPFpn3fER9yBJ1U3mYsZJLEdYP1loyxWBUf21l0TaYXddEsWmcJW9UWWTJf8d+hGnn7nPpLK0hSzJYTvK1UC6LKPekHDVEduupvM4gZHq1tXFjfhR9tflx4PhYkKuuVWGul8UVh0xQv2ySQda62AbXOMSybHL9hUY2pw6ZZQ6bak3xaCuYyTSK9uq+X+Jy4XuLJvl4i6WoVjktZn9u1vVhUMtSWpeMZQ234MWt9gQoa8rB+7ZTNKVpjSxYssDqqdY6UI18sK9bqokldtM0H/lwJ3exY02RwViV2OMCWFcXyGuUgp1wbku9cphzk5LtyymFFOShry08hBUIJkq4m6afoTNM7sk/Sz8i2LZJtWyCd76GPPh7ZW0AubkPMH4FNurjuFlzSoVIZo5A/lRtLZfzxEY+BuG+Na9tyNv+Px9Ao5FaNjKUMWVbR0rUhhRtyOJJ7bbJvdVRhraMqTF1wt60BKpJ+QghU6NBPVCv3sbVDy+GAi553BktLS5tmlbER4vXxW+c9mfnOrVe0XhkV3PdN/3xQvtMMBwfx2Pnx1dexbesWT/qI9YXyMbSUFyI2GkQLSON/b2fnOiG92ioq/XUHp1MqoevrgSft4xyMugNQCm/51n6Epqki2gCmh+sl0VbeEn2ws95GLpBZdrAcFEYWO1jG5gOKm32Ony0rb5OXaNItC8i5Lcj5LahtxyK6c9jOPC7t4bJ5SpVRGMfSyJBXnvBr23qCH7t6oYlla1eTSkE3kXSCUr0mWcrcW3pWOTJfAVPg1lZrclJmfcj62O5ivQ6V0Ouad2LDSmzEqCxjpF+cG8+lilRYv32KNWQ5xA18U5nddaN/bGX66W3HIRe3wcKR2LmjsJ0+QxIK4xhVDhtytFMlgo1hkw+3DpPK+Ehc3gZQObh2pWzZ8nvE+XtHC+YS6fetaH3HSQVmeN+0/acEYc5aNLmIpmVTG7PxYFz9F5vppsyvp839oFGYxh9Na9+E5caGwUgeOpr5d1IrCtdDijAXtxWiGHr1ajH0jYyDZcyuG/xxtrKbanmJYnmN5Z9cz9oNS+z49g1sv26Vq4cVP1lbbyt5h0xzVEfxv47qs3DHeebvMMfiXY8j2zpP/3/dBb3tONRRx2O2Ho/rLrLLpCyNDDcMCn5w04DrlnK+8eOb2XnzGrtvHLBy405GK7sY7tqOrQpsaKwUUpH0F9Fpl3R+K525I0iyhP5Ch6Sj6M116HUT5jLNtrmUbqpY7KVjlp8dJZnraOZTRS/xz2VKstiRZG6EyFdQy9djl2+m/OkVjLZvZ237TnZ+/xrWblpj1493s7pasCso/gD6SnBEquhuydhy50V6R3ZZvOtx9I7eSve4Y0ju9HPIhW2YI07AZfPkosPSyPqs0cAWOhet6v140NGCrjDeWaZYQ+ZLUBZ+/moNy8urHPnQJ8/mVjPcIqy1PO5xj2P37t38x3/8BwAXXnghz372s8dUeQC/+Iu/yMMe9jBe+9rXcvbZZ3PVVVfxqU99qv772toa/X6fiy66aGqhd5rS74QTTpjt0xkOKTZLvTLDDLd1rK6u8vnPf55PfepTfPrTn+aKK65g69atnHHGGZx55pk88pGP5IQTTgDgE5/4BA972MPo9abneP/+7/8+73nPe/j617/Oz/3czx3Mr3GbxhVXXMHZZ5/NV77yFRYXF7nXve7FZz7zGTqdzn4tb2lpiYsvvpiLLrqIiy66iOuvv54TTzyxJgDjPlxdXeVrX/vahuT7FVdcwT3ucQ/++7//m3vc4x773STxjW98gw9+8IOce+659bF0ayDOTW9pfnGbVvq1i1GTCr8xwq/OSmkp94QIf4vWQ+E1otHkWTeu6mrs94LiCG9VCN52srbGDF3oAkJgur+BqYsNk9k2hxmkEF6RRLRLIijqYn6eJ9VckePKsib8/Jt9vozs9pFzW7CdeWw2j0u6lEnPF/MrQ2WbAlC0PmwTIlEpmSq89Y/ypIcSobAYrDyjwo/REJsPxm10ws2xCJYyIs2QnW5tgUVQ9DVdyr7IE3PnzETxJD4moQsXhF9nKxoiGOetcgCsw4moABWk07rfhWgsdG4hlN7p7KCcsX5tb40F2ya/SGqvspUCYV1NlEbNm7dy9So+KRyJknQCqQcSJbxtY6JETQTa8NjRktJYEiVqwqejZU32RERSRrdImmjLGK0621koc6kOKj9JpgQ65M3UuZ0BZfycIAOetKGazAWFpsN6LA1wggCfdnQIXG2NVRifvSkEpFKiVOqJeKgVJjifqyKV8p+tE0RV4nQC1uLSzD9WBegEGSzcYod3NciDBagnqq2xmNJgCoszDqEE5aAkibk21pH2R5iiwhmDyQt6StVqDQnI3hxWSDAZOrWotBf2CZTSoW3IcNS+I75tA2Vsk+1YGl8ALI1lTRpKK1GlIQ9je7TmbPIcGyJwrWjsO1fzisJYqsJgjMVUNmR5ju9IZz3pJ6WoSQkrrScCJxjcKj8EuTNKjnXm3xrLn+H2idgVN9ZIVf9xg/0+JbOvfb1z7eeVDhZv0WpYe1c9mnN7srHKr1fbzrjJ71LKz9cq60J3xGHaWNXaHr4BY4hd2YXL17ySrSrBmrrhwkSbZSlRWYrO0obwW9yG6G/B6cwr/LRX/cdx0JM4oTGipYKWMtpAe9W6lv5aFvOzEhkajCrflNQQf6FBKRB+QipIvarP6QyXZFRCU5jWZwUSxrTG65jlG+dLEPLcRLColM1xUx8z1noFVln4axMgpMLmA0QnQ/bmEOUaUkiyri/GV/U1OM7lxYbZ3ZP75raCWj1pm30Ofh5knCMJxT1tRWjkc42VPU2DErTPu/XNbvF80lKDsmD0Lc8T472YBe/BG6wZpUZL7dVsNHa8yvnjJI45WrbuP+IQEIlEGoeQNlGpRDDBCM+1G7Ha/x9bx0lY6+9rqpIq5NiVg5xiUAZrT2/rOQ2rlWVOS28DmlfBDcHb8lKVIe/cE6euKkh1xxOyqWY+1WzpeaJupaPRiQo2ngpnDbYqMK18cmcNNpvz54U1mGIOKRYxlQ5fw9WZxm3Lz/nMoaT/PVrTJ1LW56OlpX4UMtxDpahUI5PEN3V1NSpVpFKQCDBxH7bmwUL5JjCp/NjVtiGOeerGNJ9Zv6/lWlHPrduEfMgaPJSYza1umzj33HP5zne+UxN+tyY6nc5+Fy5nmOHWwozwm2EGj7m5OR772MfWNqY/+clP+PSnP82nPvUp/vAP/5ClpSXuda978chHPpIf/vCHnH/++bz1rW+dqlj+v//3/3LZZZfxG7/xG3z1q1+dNXbsJX7u536uJlT/8z//ky996Us897nP5R/+4R/2WZ0HsLi4yFlnncVZZ52Fc47LLruMiy66iH/7t3/j7W9/O1prHvrQh/LoRz+aF7/4xTz1qU/lL//yL9m6dWu9DGMMRxxxBCeeeCIve9nL+Lmf+zle97rX7df3e8ADHrApKu3Nwm2a9BtT97XtPOPNZXheTBRroSEA3cStrQs/JhB+NhYo4vvizQ0N49i2wWwXpERdmCLka5nwAXa/Cwy3GiHTgpYC5Zr1V1jEyCvUfIi6wVoTCL7Ed3tL5X+f24Lo9pFbj8Vm89hskTKdI68sa0NvNxWVO75QERU0jetqphVSxIwe36Ed7UXFaBVRDJDFEDFahmKEXd3ts29it3yA7HQhVQ3h1w3qvqiOi8U357zllilwQnpybnJi1DqeYtGk3WlcWTF2nMSu6ahCwvoCmNYpOIvt9BG2U1t81R+TdjH9bXVh41CULm+Nz4xKs5ilE5UeWmq6LZuhWBRU1ufRCBGVtqBTNVYQhqbgE48la6nVf2ulobRB9RWsPtcpSaVoCsfCFz+95ZmoM/syLUmkpJeq2nIq075ImqnGBq3dKS9FsPK0oYBeF0GpSe5pDQUCNzVLywaLrniEGUtd+IqIDQbtDvlMddFJD510AmFf+n1hDSRdZFRFW+OzL0MBiaD+c6Mc2d+NHiyT9JcA0P0htqwQUuBCtotTjiKvoARnHOWwQqWKYrUknUvoLI+o1nKS+R6mqEgXVujkA1Q+QPbmUdsqSDNsJ8eVQzoqIe3M4RJFYZpCcdv+d+xcmzh+ikD2rhSGtdL/rKaKkbEMAsG3VhjAq/q0FJg2+YvEav8JceLR9huXUUkwQfDJdVVFX3xzk9KJGWY4jKFlK0Or3UjFBHm3EWTL/k/KcZvtOhcsNN2E17qW0qc9jloa22Q/H2hIK/DnZzw3jXVTFdGHA+rtobS/FuYDzK4bqG68Fpevkd94c8jMsrWyGPBWnllKetTRyP4C+pg7IfoLvqEqkn1pD4P0ymgT1Mwhx640DQHX0V6NnCnJXEeRKsFC6h+7WngrzzwouSLpV+VQFtg8KuyUn/Np7S1F0y6u06cQKcPSWzMDzdy33ccyMXbHFrvYJGesoxQCqTv1cac63TGyw5MyhV+GbqxSpJBetS4VWWeOwoSmG+szx6QQzCWHKRl8ALhhrWJQehVnbJzzv/uc3B6KRPpjohDj17NJ6qptSd622XdxP0qJ0plvjqyKoEytxseIYAHqhBy/DyM2yliQFUpquspb+urQOBMxdu8UlxUb8kzjAtEmLcGvrxBi3dx5nc3nZOTD2EYI1o3WYIO9+Wg5J981ZHkp56bCcFMxnXRarizd0jAqTE382cL/REcFl68hiwFISS/thc5GOH6hg1aC/3X0HMY6RsOS4WqPqugjdYophmOfVeUDqnxAsXozxWCJpDtHmR9Jks2RLSyQ9UrSjqZYqEgzb/m5pVexmmnMQlZbkcd71rmO8uSwdaCb8RmdILt9VK9HupDT2TKHNZbekV2kEshdeXCHcXSVJO0lpP0EnXkbYt3vkvS7iG4/WHtmON3BICmtDfbN4/tKQm1dLwU+z2/KvbwnJG/TZYwZDhLOO+88Pv7xj/PFL36RO97xjvXzxx57LEVRsHv3brZs2VI/v2PHDo499tj6NV//+tfHlrdjx476bzPMMMMMt1UURYG1liy7bUQF3Vq4y13uwtlnn83ZZ59NVVV8/etfr0nAr33tazjneNSjHsXTnvY0LrjggjF70SRJ+MAHPlBnDH74wx+eEex7iWuuuYYzzzwTay2XXnop733ve7nb3e7Gy172sgNarhCC+973vtz3vvflRS96Ebt27eIzn/kM//Zv/8ZrXvMa1tbWeOc738knPvEJ3vSmN/HEJz7R33soVav7r7zySs4///wDWofDCbf52fI6wq/dubkB4QdBYQXj3ehiXEUyiXX2e+GxfeMUC1NjZF/LYjT+36n92/S36uETbsx9d3DQHEU7n9jxbQpPDoAn+rTvqhY69TeH/QVkfwGT9r11V9ojr2xj12e97VIsUNjW/kmQNdcWb/yUjGRGWBdT+mKDKTzhlw884RdUfgRlEjoJCgLpf9eJt/CctGyK+8XiiT8hgZbdZus1EAjCoAzUSYZWGi1DjpwT6yxKgTpzzX8x7bMFncPZCmRTMHFSg87qHJnDa6g4ADjri5ymqDu9sRWxgVup1O9v17RlGxpVBwikbMo4k6Ua58BKQSdkwnSsLyYkSpBXjfKvba/ZtuNUQrTUD/7YiCo/Kf1zWkaLMFFbhCWyIfxiccK6sK/d+P6L+aOevPPEXjz0J61Go62urbvex4+ESC62iU7wuVaJlOE49D8gSRygMpTUPudPSJytEKUvbOGsPxYTmjxAZyH3542wxp+L1tDZWiCUpJhf8+tSGkxpEFJgCl+sdsaTXKYwmFRSDgUyURSrBUJKihVfPNah+xvwjQLWIIXEOgtJeF5qOkmGE9JnN7bUCJPFSRf2v3G+8F1ab1XXSyRrpSdsS+voqIqRsXRTxWouGVWWVFcMAxkY94mSApNEIrE5qWM3+2Q+ZNvOs03GGusQ+tbLf9kIUspbddI5m9DefrGhcwL+8lkX9CeJPynBghO2Voo4qRGiRRi2bf9kM++ahia3eXxe5RVkol5XGzJiXev5wwkmqJh8ExU+myy6JeRrlCurFMtrOGtxxtR5USpLEUp6hV/f23rWlsjZgifcVEqFrBXm7eaWuN1k6DaJ17VeqsiClWdHe9t0GWwOMQWyGPprdCD8oroOqNU6TqX+83XmVTt2cqz0jhHCNYpMO2U+PkYKEshfnfo5g9IhhzYonuLrIilalb5BZeTdHoTUPifR2VYzHmh889jhd2TsP6rQ3BItXOM8IKre/X7wcyHjwrVRgJHNPnCt46SNMcKPkJdHIISkqBV/Dpp7rsl7L8bMCsJz0jsOWBAhdVGJeJ0SrTnPOOo5T2js8fOfhkCOr3eIWiHctnDdm/1eu27U6mMVVtv6uUxh96jyixgGS3JTGKxx/py2Xp3mytKrZWMOebFGN1ugMJItWUJpHNvmUm6eS7mhm5B0FEk2h0zSOi99ErYqKdeWautPW5bhtX2cc+hU4pxjSTSW43OZQUnvhBHnxZ58Gz8/XVD6IaWPbUh0rThO+2m9XezKCBDoRNUqwKTbvBad+HGjJhMlGwz56xD33di9fdxPh0jxN5tb3XbgnOP888/nIx/5CJ///Oc58cQTx/7+gAc8gCRJuPjii3niE58IwOWXX85Pf/pTTjvtNABOO+00/vzP/5wbbriBo48+GoDPfOYzLCwscNJJJx3cLzTDDDPMsAlYWVnhHe94B5dccgnvf//7D/XqHFbQWnP66adz+umnc//7358nPOEJSClJkoRPfOITfOMb3+CCCy7g137t1+r3HHXUUXzkIx/hwQ9+MK94xSt42ctexurq6qZkj96ecY973INXvvKVvPKVr2THjh186lOf4pOf/CQXXXQRj3nMYzbtc7Zu3cqTn/xkzjrrLJ70pCfxsY99DPANPE9+8pN50pOexBvf+EaOOeYY3vSmN3HxxRezvLx8yHP4NhO3adJP+tbR9YWpdi5HgGvf1bY61Z3SE5YxzY1l8zmCtl0RNISELyo0Fp7RClPErIv42C6QOYvYh5ueg4aY4RIKesKacdLPFBAKVTG/D50g5DyiE6xb5o7AJF1sbyuuM8da5VgrQ2ZD5a09fd6WGyP9EimRwhcWfHC7oKObjBllfSFKlGuhCz3Hrq3gAuEXlX7gCUjZ6dZ2MjLrexsqnTV2nhDIKF/MckJ61ZMN33sy32XCsswJATrFSY2UGq0S/3xNUMRiybhCyAkJoUg2loPkLK5zaC4Mt6qi0FbBjjX3j+G49wVl57831HafNlR5Vdh4UolQwBT1+RYtw+r1b6nAorrBOd8VHknYaHfVRiRvEiVrYjaSgbHYHLM5o2VvVJ5Gwk9vsOHieioBSOctOKOVGeP2lBGRaKr54YllRwvQ+NrSWtZKGwhNVxeVO9pns/RSr5JLpKCrJVpKEtWlk/aa/WKD0rSdg+qsV6DqDGkrRJrhun1/PklF0lsBYLR7JeRMKcpBiVDCd7cPmwy8amhwLX9TU1pkqrGFHxczY0nyNd9Vn/WR81uQvQVIs5D3leAKbwelhES3mzXGNrgn+J32Y7lBYRyMjKUwfjsM+wmVhaW8DOo/y1rPj0drpWFYGtYKE8g/07IBDbaBAXovyL74GC1lczXRSDDDDIcx6nlMuCaOzadsi/jbCEJ65/TwX9ee/wjZ5HzFJpwpjTJKgnB+0K8dFGTI8mO8qF+/7zBldUYha3YuVWhnEdUIM/B5yMXuZYqVNfKdS7XKL6r7kn5G0stQi9tQ245FLh6JmTsK1+lTqswTfZWjio4JzrsoRNtMKTzRJ6XEWuhoSaK8Qr2dlyVGa8jRiiciYtawMZ6UrIraikGkWT0227SL68zjdErhfPNEEayYAc9uItZdx/yz02Gdt8fuJClOV2ArnM4Qqb8OiTjHU9KrIkP2LFL6xhGpfHORrVAiAeU/q5/c/orow8py05pXsZfG5yKW1ufexuxG6xxKQFerYPXot4NqnZvxnqa9hWLjUrwn8s85rAhNV1KidQaioLbwhHEFXfiM+tRuN9gJC04i4nFF/ZYxVbAL92bGyZDV22T4TmYXShHWVTpvFdqaM7Y/H8YVzP4J/znREcRa44+3aGueVxSDguXKcvMGKr+I1cqyWlm2FpEs9NbmtvANiy4f4NZW/RxxmCCcY0t3kcIohEi540JGUVl2rhYMlkfYypFkc5jRkCqqbSdgqxJblVT5Kjqbo8pXKfMjKXqLOOtIOt5q3ThHUVlSLTHW0U1UnVldE+pxe4RHkaSedE8zkn5GOt/HGUtv4PPKpJKoVGKNn693Fjp0FjqkC10/fvX9PZjsRpVfgpN6TKXZNPj5JjchBJLGhQMCAdlet3ZT5wwzbIBzzz2XCy+8kI997GPMz8+zfft2wNuAdbtdFhcXee5zn8sLXvACjjjiCBYWFjj//PM57bTTOPXUUwF45CMfyUknncRv/dZv8brXvY7t27fzkpe8hHPPPXdm4TnDDDPcpnDjjTfyN3/zN7z5zW9maWmJSy+9dNZosgc86lGPoigKlLrl5u373//+/L//9/94+tOfzlFHHcU3v/lN3vGOdxyEtbx94JhjjuEZz3gGz3jGM9Y1oW0WhBBceOGFKKXqn8lm4Tvc4Q689rWv5ZxzzkHfjtwkbh/fJN64TbFrcZNF4mg3BfUNJTFYvtXxCKzrlob2jUlQ+IVHgbfBxFT+hrJF9tVqQ9eycdyEr73ZEGXubWdi0d+UvgBkSuxo6DtWYwEIEElaF4JE1sPpDNtdBJ1hO3PkodBV2ibXzJMqwu+CduOmjLaKMhT3AokalVTtfRuzJnQCncwrkaSCTteTkDpBZH3fGd/tY9N+oyyIi7CmVjlFskOYwluXjob+O0arQ2iKXlJ5RWPIs4n2ZPE4conPuFEqDVai4W9xj7vWdwBPeulbLxB+b3Cr1knj+RZznKLSY8x+144XhQnnmfRd2zo8HTOcavtLmkOosbsUWOlVX2mL9HNT2Lk4yCs5TurFfMH4WTK8tm03RSSIna2L3yIoFlV4k8OFznVf+nIuyPdq7Z4vkkwWr+rvP21zImrr27J1SkQSvcSPYzYUeAQa4ytpXpEqo+JEkaY9v/6RAHQ2MKhBgSokWIPoNUpeAJH16UMo6HRR2S6qQY7uaqqhL475LJuoDPK5LuAVGiYvKKVEphqhJLas6EiJHOX+nBvlyG4fkeXeukml6xW6UwiHJptTIlVKIjUd3cF1MgyKtdBs0E9kUAI6VkYVo8qyUlS1FWwsouaVwVhXq/8i8bcn0g/Gib8qvH+U7VkdcGtgljszw/5CTBJ+03KIp9h8uvjfOLyrONZP3Cy1rwvtp8MY60KjlarHwyYnOY7HtyWU1jGsHAsdoCqCO8EaNh9g8gJbVJiyyU0USiKVRPcy1Ny8b4ZYPBKbLWK7i1RCMwwOCpMWh223AYFvmgK/bVPlFeodJegl3jJdrq34fOTRAFkO/Vyv8plmriy86jBaeqadhvBLut5W1FHbF8sJfZcUrSxrvBIrrq8QoQnGtaaCQepVOdBKI+LcKkl9Y1mRNerwhKbwX2835ckbZ+moqHaP2+L2BQEksiHtSmubXFvbKP1yYRlWxufoCZ/nhvXNS1KIsfsbAOmarLzYBAkgHI2dAf66ruqc6tYBGMcKY72VbbQF9m/yc/fQFAAEv/MJtaCQeNbQE3/xWlpaV1t5t3P6BPj5VWiwMqLJhB7LUW/fK7aIP68QlqA98SeMQXbXSPoZOuv4eVpQ+d3SldwCuXW1vaezFlP681vVFp+evJMqxUqJEIK5dB6L48heymphOG5LxvadKVVpSOe3YqsiKPo2zgd21lKuLTc5f6Mcqe9IWtgxG/K5TJNqybA04bix9bpPRchqF0mK7qbYMiPpd+gEAlSE7SOUIO2ndBZSkp4nCFWv5609U+9i4sJ8ztSNn83HxCz3SVvg+vsJP3f29qv+mrKRAvLWxGxuddvBW9/6VgAe+tCHjj3/rne9i2c961kAXHDBBUgpeeITn8hoNOLMM8/kLW95S/1apRQf//jHOeecczjttNPo9/s885nP5BWveMXB+hozzDDDDAeMf/3Xf+VpT3sag4Gfgzz/+c/n5JNPPrQrdZgjTfetRvu0pz2Nj3/845x33nl0Oh1e+9rXcsQRR9xKa3f7xa11n6+Uotfr3eLrzj77bN773vfOSL/DBV4t1LLNHPvjhP3UJNknmhtKGwi/mCnTzhyJaJN9bTKgbe/nibJ2voVbpzissVGexCGEsAZRDL36zVYwGvqcvCL3ljQRssnJ88RbN1h5dnFpH5dk5MZ35VaWdTd2SgiscNGXKzznO9JjgUgJUWeUSUGzvdr7Uyf+BtCYOuxepBkiSZG9ecj6WJ359arz+6KSqQyKTE/0uZhZEzvHrfE2rDXpZ/z3DqSf0Ami0w22N6nfDmG7OJX6zLokQwTyL653rSxtZzruR7bjbQ5SryP2arsna0HY2iLTuUCqC3AtdZ+W453bsf7kLaJ8JoyVDQFoJcA4qdZGvJ405PK0DM6gfjPjxe86EzQWi5T25JnU9XdQQmBlKGqKUPSSgPX2ntKFAqjYYAVvAbFQF9Wy8RwrjfWFrgqGyviMTENt42SdJ9UtXmGrYsZlbFYQAiEDQRsVgEL6Lqeo8LUGmXmln5CScm2IUJIqL9DLmmK18NafRdheoeprjbe9ErKgGuSoROOMRSaapCpx1iBDYUwWeTjX0oZwn0TrOalUi/gLWWEqwSUZUqXMd/qQpKxVLmT+OTrKW37OlYpR1agkypAJaVtWaW1rOq/iHG8EaaPZ1v79g5nSb4bbEqJix3oiqra1nhzD200c8RpH/FX6ZbQaXOr3RBXPBogqv2h13HZVEGK6euxwRmUdI2PDfKVsrD1HuVf3lWVtWRkLyjJJvAVy1kfOb/X5eZ05CnQYqzwRMs0aNXZISsFYM0uqvJVnN5GkImQ1l2te4ZevYnPvmOBMyHWtCghzqmZuk3i3giSjClaK7dlsbbtKc02N+Ys2bAvjaPNHjdU1eHtv69ChUctJ5W07sz5ilPu5Fqy39mt3KluLUhZ1O+5elsJnEEfEpsXSeBKnjERssHGsrAvbXBBT7+r7mNb5ZPATGBvuhcZykF1rHwmx4fat50emRWQDtboXEE42meoTUQzttME6s7d2YWmISBm+txACJxwifDfrphQMNroXi38OZLFIfSOhSLPgZNDcJhd7mc07NJZRYXzzU8z0C8pUVwY7WqmQ1QhReHK7my1gnGQx02ztJhy9kDHXTynykrS3SDUcoLM5itVdt/j5VT6gCM2Eo9U+zi6iU4lOfDPBsHY0sI3FZ2hIq7dH7U4iYJ3FZ4ekn2HLCttycvCkX0I6l6L7GbqbIrIeMvOkn5UKVFp/XnRC2aeGbiG9UjTO/Q4B6TfDbQd7oxbIsow3v/nNvPnNb97wNXe+85256KKLNnPVZphhhhkOKu53v/tx/PHHc8UVV3CHO9xh1rhwK+BrX/saX/nKVwAYjUb8/d//PX/wB39wiNdqhn2FlJK//du/5SMf+cihXpVNw22a9NsQE0Sfm0KyRIu9mP8QFX6R8Bu70d0D4SdoiKS9JvwOU4jRKizfgM3XcKMhdnW3v0ENWRFI5RU4vXlPevUXanWf0x1c2qNAU1mv8KssdWcu+O2YKkmiwFg5ZpnazkzrJxItIdMSHdSTkXzw9pheYcecRtgK3V/wRSprPAGpUmwnZAqqBJT2ZIUpcVWOMJ7swxS4tVVPagaC04V8mKhorJWNgfQDGoIvPqoWEZhmiGAtKntznnzQmVcZqvC7SvzzStf5fQcb47f3tyKE9Laq1t+k1+dF25YXaptPLTVKyVpNBRPqOsfYeaVan+NPRumrnEx8xw1yZwB/3pZVY2frLFTFunVtNxZMZlM5nSLCfo5Zj5XzxTQLKBvHmyb30SscvAIwFj8srOtkj8oJM+XeNZEC25LNGufGuvZL4/BlOkVp/d+MAikclW1I9lRpXxwOeYuR/MN68k/YDqLKkLrjM5/mt2CHA/RRK3SO3onLBwyP2kWxskaxPGC0e5UqLyhWyzrfTyrhlX3GYcqKcuCt2lRW4Iyl7K6hewN0djMyDRkwMV8nYrJZokUGRuUxUtVWpKKTIaPNb28RVMpcNu/Hq26PkdVe4Wf89olkoHWQV6ZRTgQL1eaYFGNZkPE5aKxmbbBgNQ5WdcXBxqwbfYb9hahKn6kGzTiJbBWCg0sC43Zw0RrcxbFStt4/4cggWq+TwqtdZGv8Eq1g1Pa8q7F5bpNG7hYVOIcSOlosl2vIfAWzsgs7HFDlPudUZR3ShZ4/Z6Wku23RF82PPNbbem49GtPbis3mWR0ZijBWTatntrmO2ECVqtacSgpSYZH5irfdHg28Xfrqbm+XHiw0XXBD8E0XCtuZx6VdbGfez6mEbBreEHS0V+VH90MV5nRJywbbOL8PS3wnjMB54pDwXZwnGQrr0FL67Dideov03lxtQRkbtFxZ1PaDMuv7ZitT+OvG7byZKpKpiRIkNlx7bFT9NdcsG06SRAmEUH47i+YaVscSMD7HcKGhqm2LLh0oxPjkcSJeQUS3AMYbM4mZm9oTbIQmqbYDCkSSyTftGKEpWvcT8Ziv1YfExhvfSOWPfZ/9K13zfaSQtzzfjfmjna538qhKZH+BpL9aX+uGZu9GmaXSslQaFgYlxaBAZTnVWo5KNUIn2E7m+790Uh/TLu3TS+dY7CiO7qfsWsi485E9jHUMlkfAHbBVga2KDW0+2yhWd2GKoc/5qwqUPhapJFIKVvOKbqoCAb8HUiQqM8M5hk5QWYozFt33iluZaKo539QU1clJP6O7bZHOljnU/FZEfx6687i0h0s6jIxvCK2sH7fH1H6i1eghWk1V7YxY8NULIUEf/Iaq2dxqhhlmmGGG2xK++93v8qhHPYpjjjmG5z73uTzqUY9ifn7+UK/W7Q6nnXYal112GX/wB3/Au971Lt72trfx/Oc//zbnUDMD3Ote96pzfG8PuN2Qfj5wfP1z67LYaNRBsdAObEj4Qe04NBU+V5AxMkHs6SaqXjm7vmv+EENY4y2nBste4TdY8aRXVeJiEDt9/2KdeAKrZfNUOElhbFD3xUKOq4t14LtxrQOpBNSZG966KwuFqUj+aULmnqmanD2gzoVzulELKN/97WpbzfAYul2FJLQoBwRi1lYFrixqOyui7ec0hOddBdCQoS4QgE4qXyytSkRZeBsynSAyiJmRKJ8Z6HR6SPf9wbr01GeC8jfqzlYI5JhqDlv59akKv30IxeO4ru2sTtgzmd5+3zob0XZFq7UsZ72VXbC0jQpQnIWqavZ767gQOkUoVRO4whpfwA4krtOpt9MKx2qk5Yx1vmhGzMoRrYwarzpzNV05XcUREYt2iQ8OBCxJyEGJhBT4TVcaS6Ik4ChMzYtinS8cSuFwwhdthEr9eFpF4jStx1InBEKliHmJyvrYTubPx7xPZgwqSxGBpFN5AazijKNsDaDOWJwR2LKiyosm/y8fkRQVVapDh70vbMX8Jr8P1u97mWhfzEp0Yzkc1Lcxa1SkGdIaZKfrSYm0i7OGTqeP1hopHNo6lPA/xjmkUDhNsBWzY3yjlI1VsZ5ycfAKHBdyiCwuvd1cag8Yb37zm3n961/P9u3bue9978sb3/hGfvEXf3Hqa9/97nfz7Gc/e+y5TqdDnuf1/51zvPSlL+Ud73gHu3fv5sEPfjBvfetbufvd736rfo/bNdzEeDvFMQFYN19SsfFi2iIJY3mbxA/Lrkk8ogrJK8Xa90gb2XpOGyKjkuxwucVSQWUX84jrrDz8+KazFDffRyiJSjXpQg/V6yH788jQXOVUgkFSWePVThtcBv12araXz0H0pE+qPAHX5LkGG9dwnXNVUa8X1vpmKvBjvApzviRrSF4Ya3rwhK1fMSVbhF8ggZTUmDo3143ZgTZ2+v6q4+LnClmrz4VOvFVgbMIKYz2hAasmo+XPxnjrLfEFmVZYa+prPoTtaR2lCASgbRqNqMmy8Xubdm6wEDBpQmBd3DOiyVSH9XOyNtEf7T2jrafx8z0n9fR7oNisKTUmEEPRMcS7OIw3REX1oqQhKv26eoLQWwV74o/4zaWk8Red/GwN2taNfN7NYN9GkqGxlA7Kobf4tGWFKfyPqspaRVtb6OoOwpQoLFoJMi2ZSxWLvZTFXsHOLKHsGvJuH53NBfJvY5vPCFPkVMUQNepSFQXWJJjK1VnFZoMJpiPMn+vmNlm7LqhEY1ONzjpj6mTwc7Fo+677GbLXq+dftfOCSv06tJrforeOjI0dgc5dt4dke53COf4zcq7vDWZzqxlmmGGGGSbxpS99icc97nE86EEP4oMf/CCrq6scc8wxh3q1brdYXFzkne98J49//OM5++yz+dznPsfDH/7wQ71aM+wHtm3bdqhXYdNw258tt7PRJgi++Pukqq/dFT5p5zlpNxRhiZ3loVAx2ekaH6fdBE8rgjmLKPOxAsohhymwK7uwK7ux+YByabm+qUtCVyd4RY3IerWarkrnGIUMrMK6dcWo4LIYikN+w3mbLk84+E50XzxPJN5ys2oImJqUgQniL/FkS4uQQPu8CKc7vpg4Le+xvW+iiq9N6HSyOkPQlYkne8INtgu2n1iDzfOahJCJRiivMEInyE4XV+SINEOBtwJVaa1scLrZngcbB03lF2AdPuNQSERpcdiGkKsJt2J8nSLRFzFpI2Wb82yPJPu0c5OW6sR5CzvhHM6YuhjrLV5tXZgZI4IDySt06guQIUNSdHqebE46OJWCShEqgahYCCrE9lgTxyZf0PKEnHGCwrPmCJjaiS0FPlMnVbVqtv06NaU4HnNbIlFVSRceffaRDpafvpCofWBQVP25xB83SSA3TR9RFYjeFpL5rdjhAJH1UIMVkv4NJP2MMth3mrJCD3KqvMQa53P9gvWVKgxCCapBHiztdJ1nBWCN9SShDY8t8k9I322tshQZHxONSn03ukq0V8tkKbLbR67uRmR91Najkb05XNKDKkeqFJXN47SmtDAyFhNUBXG/WGR9TWgX1OsMRxqFTVupGQuVHXPwL7VCyJqAvbWWv694//vfzwte8ALe9ra38aAHPYi//uu/5swzz+Tyyy/fsJtqYWGByy+/vPW548f26173Ov7mb/6Gv//7v+fEE0/kT//0TznzzDP53ve+R5YdunH2tgzhzPiY2c7tiwpnmjEsoq2sqZcVf7dVQ/zVn9Oo/VRN/okYfTqGtuMCNHO1SXtJE5oo0pYK91Aj05JMg9y5hBvsxg5W6jlFOu8Vfp0tc7XCWc5vqccqMX8EJpvHJRlFyDVrK3TafIRoWTZqJWpSqKMDAWeL4HQQ8pqDorO2cLf+uuvKYl2Ti+3MYTt9CrQf/wAV5me6Nf9OdbjWOQu2bJp2AKfx15bY+OIcwlIfRPG7RPs/LbVvQsHPvUSGV2FJ6YkTY4IKKYHwuba39fCZU9+K8ISuo6slSliMlRjnSJT/vxQEi09LaSWltbSzNUX7fJqW2RkgEY0Ss628hUAct8aKtuqvNbcifkZUBzvrFX/xM2sVl3dNQGqvureOYenIjW3Iv9BQI0P2d5MPKTDWIVQgxKXwEX/W1eRRvHakKh3LCR+zFg1KMpn1ER0/f1CpQigx1W1hGgbGsVQajhyUjJZG6ExTDoaoVAfL3iFSSu8wEqMCTIEoczKV0UsUW7sJx23JMNZy41KOkAJjjsOWJUIphjuv26t1qYarKJ1iiiFV0cUYW48dkZhPpBjLJvaNmdpb+dq0Ju5ExxN5CWCCStlmKUmYl6k08TmIvYx025G+aWF+C6K34O8X0x4VksIYCtOMY7Gx1qv8Qr52WKf6WGq7a0TreSFx6hC4KMzmVrO51QwzzDDDbQAf/OAHefrTn85Tn/pU3vGOd5AkCXNzc4d6tX4m8PjHP57TTz+dz372s4d6VWaY4TZO+k0rSk0j++Jjyz4vWrVNkn2wnvCLz7WJPyEEwsVO5/HPdthGzTRl8u7q6rAn/pDykJJAEXbuKNTdHoD7zpdgbaUusAOe0NLNzR8hjN1J5VV9gTit7+/DNorkHvjigQoujDGvL1UhE9FWiDJvLBZrsmEDq9QWaVQ/hfTFBFtBOaWIEbZ3JBPrnEKpagsr/5b1GRHR6gprPTFUFpAPGtJQJ3Xen4z2njG8PvPkqCdJu94a6xDiYNdBx+x5gtVq3UkcCD9ahchYSIoqA6H8/nCtG9ImM2YPykxa6rwJhZgDr2xokbhR8UnIlGsUoEXz/nCj7/MsU0Q+8Pu6yBFZHgojvbqrWehAdkbSLxS2IBRV4nNKY4Lirs7iEb7AFbP/1iF06VvZdO1PwoYu6phzGDu7q7qqHmyyRKNodsKfx1pKpEqbTupQoPcZh4kfA0xUqyTIqvDnEZCBt4Gyliof1etjyopqaLHGYUtDlQcbsA3sjDxBaAP55zBls69V4gtxSVcjE0XS1ejME346S5GpJp3vo7spupeTWoMc5QipkNYg5kBKjUssokxApyQq9RasLbszI1pFTyLpJ2rrqUmC1Tqv5jTWW4xJ3FQ14O0Fy8vLY//vdDp0Op2pr/2rv/orfud3fqfuMH/b297GJz7xCd75znfyf/7P/5n6HiEExx577NS/Oef467/+a17ykpfw+Mc/HoD3vOc9HHPMMXz0ox/lqU996v5+rZ9tTFxbgSYvM/wYt94VYZq6ruYK2pl/02yWhVxH7N3iarZ/nyDlXevvh/rsi+SjKFYxSztrC02dpb4oHpTjddNQfwGR9RD9LdhsHpd0MUiccyjZKBkjRHRNaDVSaTle1NeCmvCjNb+q0Zr3uEBIejItCUqd6ZaZ0Q5bxmvytIytqBYaI4MhRjs7J0C6sQxAJQV1QKTwSkP/XX0Qe61CDLD9bbh07meC8AN/THWUZFBWlHY8l1grQYaCymf6+mNA1tcuKUQTbUBzDxMVtz6T2P/HuDAHEXFe0lz7arStPVuEH1XVzMGkAh3t3IPzRRL2ldRjebwGSWFtsNu2IR/c5+xGJEpgnfAOBg7vIhIynOO9iGlL/wBBIAuVROkUUer19xYbwO0t4xewWllWK0sxKEgHCdUgp8o6VHmBCtl+481rfn4V3U7mU80R3YRiIeOoxQxnHeWoolg7EoBi5WZMke9hDcZhW/NkFe7NOlqipHeFiMq6eM/s57ApqAqZ9RBFjiiCawKQlBUy0dgyzOGkbOZevR5yfguyN4+YW/RRCzrzdq3BMt20CD8IYxhNBqmadi0Jql+E8qpRIepx4faI2dxqhhlmmGGG/cUb3/hGnve85/GiF72IV73qVTOLyUOAo446iqc97WmHejVmmOF2QPq1sQcLz0j2QaOcmVZI36NoqEX8RUuS+kY12lbtIfOhJi3GVE4GZwUcDqRfbyu2u+itkiaJlGB1Ex8ncxLjto5oq2BigSDmhyUh60U4iyiHwU6xRFQ5MXuvVoBNs0GdRqRGUsVawN9A18RQmywyZW1nFRVcQqlQmEw8wRRto0KOYbOBjCeBhgNPBElV25+i/Y1nJPzqn0j4RRvUJPuZsqMZm16EPA6EV2lFmydP8hpEuYYbrnrCbRgyS2w4r2J2YgtTCb8Wuef2QAb61wYSt03oRqIvkH7Rlskvr1GX6Sz3RdrME7uuyJExX6jnyS+RpLgqRQRyvC58xmO1CnZXgXQWKgUlG5JcQClA2GYMi2h3rtfZJxN2eLHoZSyUeKJPSVEr/qpgLyrAk1zSURqwktjdEJYv66K9lKEOq6wnayvllTpCoua3YgAZbOLSZA1bVsiBHlPpVcMq5PytJ/OccbXdpy39303RvKZdeJOJVwSmcwkykaT9FN3V6EyTLXZ8Qaqo0P2MpKhQacj2DJakUiq/zaF+RMiQa+jQEoTzdoNuovwk8EXWSZLDQU34Cen8+8PYd7BxsHJnTjjhhLHnX/rSl/Kyl71s3euLouDSSy/lRS96Uf2clJIzzjiDSy65ZMPPWV1d5c53vjPWWu5///vz6le/mp//+Z8H4Morr2T79u2cccYZ9esXFxd50IMexCWXXDIrTG0W4jVXTsyxJiZMNpBPcS4QD/uN1OWufS2P+a4hK26qypCWG8NerPaerNkPJqKayq0uYVd3Y/Pcq/o6mSf4QpNQJNhE1gOVBsKvh0uyurlK+vamsNym4UMJPybV8ywVyb+WxWacU9mG8JskzwDGMoyVqlV0fu7qX1KrtMA3a0VXhvbcMDowtBrj4j6pbSTxhX7houIoZhFCzPgTUvtM5LAMp6p6XijC97KdeVy2sLk77jCGEs29SK36lJ6US8JBn6HQyhNjHS3r+TiM21+2IWjmFS7YZ5qJ87l+7WRzwJjaz+HajVXS+OMkbTUB2MoTS+H/TulgYesoQ75uYR3DkK87qsZnQT7C0FuXjyl/HRByI9sNnpHUVAJUyKV0cX7TxmTDw15m+bUxNI6BsVR5FWw+C6p8hC09ESpatvG12tLZmqjvpYq5VLOlazl6oUNRWUbDkrW5Ps4akt4izpq9svmMhF88DaOKTgUyOGk1Z7qoso2Z47ryrhXRJr3TxQJJv6ydHGTIIpVZVo9lcm4LstsPlsAdnO4Eq3Q/JzVu/HY5CueahqpgITtp6R4bB0V4k5rSZHArYza3ms2tZphhhhkOV1hredGLXsRf/uVf8qY3vYnf+73fO9SrNMMMMxxi3KbZh5oUahcYiHZ5/jXxZrit7tvjMlsdrJOkoGvdVAoatZ8T+MK+swilwVSN2m/dB7Q6nfcm++8QIJJ70dJTKOWtbrr9kJmlGuLSVqFZ15sAGSfGtrWSUc0nSKm8JWDuc20wFaIaeZWfKccIF/A3wzWxE0mfaO0ENSkn0swTLCpFON0iVKNSsFU8jFl+RT5e2GovV6c16SeSkNEmhC9ilAUuzXyeX5qtWy/ZW/AFMpXidIaN2YcxX3BPeUe3wr48nOAAK3T9RZWyOFshqxxhK9zKLszO7djBsi+MFmVdUGhjmq1O+wZ8T/lv02BbhJQtK68uKytsUeGsxRRlvSwhZbCS7Hirpt4AlWpU1sFGlUa336hiO+G5zJ87Tipf5IrKhVhYcQ6sRScZSkkkYKQgdb745QKRVH/flsqsVs+GgUsIUZOEhfEd1SMTyD/rrfe8JRbrMn1ih3xpHVY0lksR0dZSCYnS3prYOVfbLckFUStlXT4gswaZ6LH9wnKOM5YqLylWS6q8ohiUmMIX9obGYhzkoRO8sP7HtIqbSojavm9hp6SrJPOZJuknJF1NtiVDdzXdrSPShS4mL1CpJjHWK2h0iut0vbJU6qAu9WO4CqSnBbAu5I+2tn3cFlM65tpW0T8ruPrqq1lYaIrtG3Wi33TTTRhj1uUIHHPMMfzgBz+Y+p573OMevPOd7+Q+97kPS0tL/OVf/iWnn3463/3ud7njHe/I9u3b62VMLjP+bYb9wKRFuhBejROs1cxEM1XdqOBcTfwJJki3CfJuHdrWgJN/Cuvj1YB+OVFpGC14IRDx4fFwvJ6apZ2YpZ2UazmdLEP2FlDbjkX2FnDZXK10ssEi2nX6PgMLSWWasUVJQUpDzkSiTwaFnwxKGRUJP1OMqfxEtF101s9XKumbn2RsTAmNT53QuBSb0pwlGbv+BtLE+fxlUY58g4vS/j2xKSwQLCbMzeM+q7+PEKTKK9eUAIUNB5b0jVLGW/mh/ZyhTQjXebzVCDFc8o1rP0NQIUtxFIgxJbxKDKCjJYkS9BJFV0vSkOtY3+NAcw8TlhcFlm0IAdKJ5vexP04oOMN57IypG+vG1H7WIDv+2HMyKLbCnAipqcL13kcGOIalZXVkKK0lr6IVsFcgKiHoOG9rmmlFZUM2cZi3GNY3eSrpavvETCVeYScVwrTGH1uFxq8y5A477D4q/YbGK/1GywU601RrOaafUeUFbpTjdBrIv9CgaH0TaBrseHtGcGQvQQq485F9lBQUlaUcGXQiKfMTSLpzrO28do+KP5V20eFHaYlOJN1U0UsV3aQhheMxYZyjtH5sSdOuVwaYKoyrfv+JIsfqFGmNLyJIhUiSuglOdDLkwhH+Hqi3FZf2qZIewyKqN/3cblKJrSZI/7ExPFrDAkjCftM4dcuk520Vs7nVDDPMMMMM+4KiKHjOc57Dhz70IT74wQ/yhCc84VCv0gyHKZxz/OhHP+IrX/kKZ511FvPz84d6lWa4FXGbJv0m0Vb4QdOF3ubWNrptW9fBOuU5u8FrmzwC2dgWOlsTRcB4tzOEQlrz+2GFaHcZVFaR/BLa55MJpRobTasRtiKRGucEikiwNoWnjhJgCuRogKhyRDkKqr4KRkPfrVrkuJEn4lyR192dru5QVeFmL6i+pPJZGEkg6LRfDzdlWzaZIp5EbBcgRPsMmFD4TbOVJKyD0Cl0QWZ96HTDB0nflS+EL3oFa8Sa3NnDfj4cC5R7QhWIoUnFmZ74IpMK0CK8RwiBlL7D2pO+BWY4wA6WMUs7yXcuU+UFpmUN2UbcN1F5N0n6tYm/qUqGDWAn8uMiARgRO5pVWaESjTUWlWhUUaHSETLRPssxkH7R4tVa67P/0gyR2lrtF8cLF4gnbIWQGi1lrbSDptEgblM/PvkCiQ6Fbt1SnhknagKv3alvQ/e7E27MEq49ptlQTHM0ypL6b8IX9l34e7RYEtHOtJX/gjWINENnJVWqkalG5M2+MkXTBT9YKymsYxBIv8I6hqYh/IpAAMbvoYTP7EqlYJgousoyNJbFwpDlGiEFzlpUqlCpoko0pqhQZeXtW4Odq3CuzhQDasJBEAqbITNo7NiLBEfcJvG4m2wSuaWD7VbGwepGX1hYGCtMbSZOO+00TjvttPr/p59+Ove61714+9vfzitf+cpb5TNngProbqu2gsqvVtTQHPNt4m8a1v1p2rWwLrhPqIeEbAvM6nnWntAmMA4nuDjPCRBJ4huFOr3a+tsJCToL+WZZTZTZYIUnRFTHibrpI479tbNC/XeCk0JL5TcFQipP1kVngzDHiWN5TcoQxsi4/ybzkZ1FOOmbZKZY70eyeNqYGufcY+qelsLUOQVuPZk7ptTaS6vGwx7TvsfEOWPc+DWmNFHtFyxdlWAuVejQeJcpWVsnRrSXGOcVU63E4yps8Puev4pp5vOAsMGaVarx66+UoaEgNih5S8+8spTWUprG+twKoLLIYE0ZtghdrUD69wshMLj1zQnWZ/8Z6efpmHJyhX1jYLB2t2WFDU4E+5IRapyf75rCYEsbHqtmfjqRJV4r/YRv4NKBrM0ry0KmWeylLPYSlvsJxlg6c0fgrCUthhtafao0Q3e6qLSL1CH7WElS7X8SFdS1Y/M/v+6VdSTBrt1VI09QdipEtw9S+WMnnqfBAUaE+a7Ierh0ziv8gsNJVc/hnHfJCYrCmCU47fpRN0MKCZgxgnnSbeZgYja3mmGGGWaY4XDD8vIyT3ziE7n00kv57Gc/y4Mf/OBDvUozHEZwznHFFVfw+c9/ni984Qt8/vOfZ/v27bzvfe+bEX4/A7htk37W+W6/KfZPk5aeEaL1urY1W1RtTL3xaBWao4JNiik6vlZXc7RrcWyQ66dTr/A5DBHJijFSLev7mzqd4oxBqMqr3QA58qqlTiS2ZFDUVYW30yxWEVWJGC17W80ix4acNDcaenVfPvDdrxtkqI2tXyhOyW4fkfU8iSQVpDTWiUGVUFtZmQKXr419x3V2kbHgFfdjsF+N6+yguUmXCtlbwC4eg1k8vrYmdWmv+cy6YHXbPs2mYVBaduemLjBqJegnkvlkfH+VFgpj6xv7tdLWXf2pDBavxRC3uhuzczvDa64m37nMyk93UOUFo+UCU5gx20epJEIJpBLIRCGVQKXNvrTGBbLOF1p8sWU98eff65fllxvGACUDabS++qUSf7Of9n1mXNLLEEoiU43OOl4JmGpPBGYp6ZZg3Ta/xT/2F7yVqU5Dt3nqi1628v8POUtKalSwMzNS1IqWqKKVreKubhV343mjomJECrCgYngSsXDVKvy1isWRTIyIxB8Tz4MfZ+PnOIAk8/bH4C0+k9QT+FKRlBXVsMAWFVIJrPHFsGJQMlgr2TEyrFaWmwv/eEulW+t8kXNgHLtCfufWRHJEqlgsDMcVhrQfrDulQCYak48wqfZFyKhAMAahmzydNoTwTQxxU22YVxavN8I/YVsFeueaJoifdRx55JEopdixY8fY8zt27NgwV2YSSZJwv/vdjx/96EcA9ft27NjBcccdN7bMk08+eXNW/GcQtbIvNCtFlboLKmFjG+Imjkduilx97Jy5JSIj/r2tQgvFXmdFbRvpdGisChl3Y01dLpzvhxnZB4Cz2MEyxcoazoQmkKyP6y7gOn1s2vdzhZDxbJxXascxvz2ExNy+mIlcF87D36UI5Jyp6nlYVPgB6y3zpPLrIxXoFNn1cyS19WhcNuftyWMWccvCEUBURUMoblSAjwpRu/77RHIyqjPj3HnSwQOdIkw1Zitak4zxucOtgW5/EFSTANHu3tsjjkcAVNYxKC2FsT63N5wImZIsZIqOkmzNFKkSaFeBC/tepTia2Ot1Sn/Gr2ER0XKxPt7aBH2L4B0jYetM5Wb+5a3xJTI0D3qrRoGTOswXHbnxirBhZRhVXuE3qqx3IGjNR5JgV54oSS9RWBuy/rREinGFX/yuTkBlvVLQKQ0q8Q4t9bpW2NHQ35vka775rPBzycn83j3B4km/vLJ0wzzUFFWw97RM5kmLjvEqVlOQqpQsfCeAXcMEswDDomI19+4XVWlIsg5SJyTZHFUxpBqu1k2NKu2iO13S+SNIewt0uimdbkLS0XRTRTfV3tpTyXpuZ/HzzMLE/GeJlilpdxGkRkjl7d5NiY0W/FIiO926SSG6mti0DzqlUllQbVrykM1YWb/8mH8c57Kidb2oR6hg+xr3TWyqbY63g2/vebhhNreaYYbNw/e+9z36/T53vvOdD/WqzDDDXuP666/nMY95DDfffDNf/vKXude97nWoV2mGwwzGGP75n/+ZP/uzP6ufe+tb38qTn/zkQ7hWMxws3LbZCCnGuv2mFVbHrek2vmGbVG+M/7H5VdXkYKOqGSswtYm/jbqO7R6KI4cQcvVG1GAnVZHX1k51MUgntTUPIb9FhOwupA5qrVZRKKi3MBWyHHrSbXXJk3qlz8FzVVFb6MTfY64aeMVW7HisCbqg+IsWgvGm2b+oZUcWildOSESZI5TPViMWvUJxov0+F8m5YL3okqwpbE1mlwBWalzaxwGVyrASjImVz6Ruw07cFKuaCUyplx6WqBwsjwxFONmioiBTcqwg4oDcuDqbRVrqbLNECjpKIPJlZDGApRswN15LecN1rF57I8MbdrN01W6KQcFoeUQ1NFhjsWWz/YUSCOnJPqlETdwBdQZclVeMChNUY94ysgwqNoCkpRZLQ8e4zrTvhk4jsegJwPi7SiUyUVR5hUokZq5AJhqZaKo09yrAborOOqiyCvl/hT9/wBPp8VyKx5S1IFvFMwuCqlbSqbqIFtLlwvpHxWSzUcbVOdFGrSmSRXJvfKBsMlT2/jiw4ZhuSH5P/qE0zqY+98VaT66XJTrzJKlMdU3YAriY2ROssJarDcbMvcCu0jI0XiGYScFWCsphh05p1mfytHIibRwLWmPyVIszJhWR4+esa6vMw/9NTf4dfNZPKrnOGnezl78vSNOUBzzgAVx88cX8xm/8BuBzBy6++GLOO++8vVqGMYZvf/vbPOYxjwHgxBNP5Nhjj+Xiiy+uC1HLy8t87Wtf45xzztmn9Zuhhckc0thEsL8Edrx23pISK1rdTVxvfYpdaxm3MH863K6laulaxM6rGe6+GWestzyurwWhQUmn/lFqRsZ/WzOxsWVQAUXFVkeNq3TqeUQkjqKlpymmzmHWEWUhz1mmmVftzG/Fdvpehdi+vtjKLy82dpW5329x7lXnpYn6fbW9osAr2WnGxTifrsfXdqPUhKrQWyGW9d+cVJB427tpTg+3KcScxTCnBnx2rlT13Vok5aKSrAgkihJe3ZcqyVyi6CWCfrWCyEeIYq0+l106h1MJadpbd42L53dU1bcbVqTw2YvRMnaMTJ5Q5iI1aE8cU5Vjn1PP3SNa44sJar7K+HljGQhAT/bBqLLeftI4lPTEX2YkHSWx1lFqSaZlbf3ZblRSUbUvHRaHsQKSMGeRGoRvYvRZhLa2JrVF4/SwL0o/oFbp+vxiVztPWGNRUf0Y3UeCe4owFUqlaAmpElRWMtfRjIxlSy/liLkUYx3FQgelJHA0Ou0G0m8QDiOD6nSROqEzdwRJlpD1EzpdTbebMJ9peqny9q9BFRo1tMb6beScIK98w55TCZ3uIiK6lpgKkfab7xmej6Qf2scbVA6GlVdpFkG5aVpzISnEVFedZv7kG8tEaCyLWeC1Tc4hIvpnc6vZ3GqG2xe2b9/OP/3TP/EP//APHHHEEXzqU586oOVVVcVnPvMZfvjDH3L++eeP36vPMMMm4/LLL+dRj3oUi4uLXHLJJdzhDnc41Ks0w2EG5xwXXXQR7373u+vnXvWqV/G7v/u7h2R9qqpC69s2DXVbw216a7t4cxk6gdsqP/BF8XaJY0/X3LZN4dS/x3uMPT0XOxDb9iPrVtr6O+fDEGqwk/JH3/T/CRaatEiK2u7SGm/xZyv/3WNXckup1M7qs6MhVCV2sOxvpCPhV4b8PmtqMtAFe8Va1WX9tqr3D/giQa2488X7Omcw/oSbTqdCUc1W3qbUlOO2U7GTfMJ+1XYXcdne26oMK1tnrrW72AUgEwkxW2cD3Famg9bB0ijsm9DxHe2jdOvO3ToYlrYuHIEv0nS1pKME2uTI4RIyX6K68VqqG69l9dqbGFx7EyvXrbL7qiXKQcnuYcnQOMqQ7RYRibtUNgRetH6MWXCrlSeShsaRb1CplkBXCbpK0lWCOS1JpSALRGBXeQskqSQykahUkXQ1VV6hM40pLSo8r1KFTDSqrDDDAt3PkEr6Im/XK96cTnCdbrC2muiSD8pSJyygPfEXCh6qHlNErSyLhdIxtAi/WDjx+8p3tgsBauJom1Q87ysB6IQMWaZlKFpX3uoz9XlQoioRnQydpZisg0o0MlE1UWscgaw7cGu23DpuHFUckSqUgLnSehI4jCliwr53nXXvBoiFqcnxf/Kwsq1CqaUh/GZCP48XvOAFPPOZz+SUU07hF3/xF/nrv/5rBoMBz372swF4xjOewfHHH89rXvMaAF7xildw6qmncre73Y3du3fz+te/nquuuorf/u3fBvz1+vnPfz6vetWruPvd786JJ57In/7pn3KHO9yhLn7NsB8I1nf1HCYUXdeTBK5+bBfYxwq5e0v4RbRfN2a9G/4JqlyLxE757MMRYufVDL/xeYoV7zYgVLDPDDmvKJ9J7IT0aifbKIXjeBy/oZYta0/rs/piZl6tgokqv0j4RRV4nJ9K6Z0Mwg71eX5hjqcTRKeL7M0Hi76eb4CC8SJ7UKEJE8ifqAyNx82ELSzx+0hBWk+1Nthv7blzm/SrG2Ma1VqdH3h7QNg/0aVCuNA8F9R59cvwxFxlPOGXV6Ym/DpaMJdKetKgdm1HVDludckTcGkHay3E/RlzpttuKTFv0THWMCRo8gMxBVRFc2zF90arb2cRQiKS6Vc+IVUz5xYSlK4tJQvjM+VGUd1nrCcBQ6ZfaS1Fqzmon2pyZWuysgz5v94ms/leiRJhfiPqhpyaVJq0igxuADbkOcf7kX1R+gG1naW3jfcOFO0MaZ/pZ5p9H2x4hfNkWxLm1r1EsZYoFrsJ2+a8g8FwVNX3qzqRlKM5qt5irfSTOkVpRdrV6ESR9RPSbsJcppnL/KPPfJQkssn08/vfW6MaB8KG/2tJJ51DR9Kvaqz36/usJGvGsDD/L2LzX/iJSvH6WJhC+EFUmoqw3WVzXMG4UlnNlH4wm1vNMMP+4N///d953etex2c+8xmstRxzzDFcdNFFqP0cV771rW/xnve8h/e+972MRiP++7//e0b4zXCr4pJLLuGxj30sJ598Mh/+8IdZXFw8oOUtLS3xxS9+kUc84hH0er0DXtaBrs8MB44f/vCHPO95z+OTn/wkz33uc3n5y1/OpZdeyotf/OJDtk5/8Rd/wf/3//1/s/HxIOI2TfqhUgyyZTM1rqio7dn2gHan8UZoK/rGuqXXSUEmChUbkX6HazeyrXDDQUPwpaopkHcyT5qlXv3nyQfrybSiZfVkCv975e0/XeWJPWdMQ/gVuSf4gmqwzu1LUoTyWR+qVZwHxohHvw7eclQfeRzlkXfFxkLWhNoJaApWbdUet9AVvo+WnD29cc7QvnYHH+6IOXBKCHqJ7xSeduO+FpR5Qgi2ZoquFqjBTuRwCXZvp7r2fyh23cDO7/6YtetvZtePd7F8zQqD5RHX5VVt9zg0lvJWYk0sMDCOwZTcv0x6EnAx8UTggpY1OZjNd9DdqPhT6K4m7SeotFmOyAuqYYGQEjscIAGXJP4cg5Dx1y5e+BxIITyp7aTyvytvYayCRZrwGxVoFYM3ymoSXsmnpLdR2kil025gGCveT+xXOfH6qEZ04M9PUl+Ut8Yvq9v353xvgaS/ii0rdD8jyUuSrkaGbbuZ50jp4ObCkEpBOay8StTYYOUqm4aGiUYBJ0Q9JkwqmmIu4piyj0YV4SYe4++upZg42BBSIKbYI2/m8vcVT3nKU7jxxhv5sz/7M7Zv387JJ5/MJz/5SY455hgAfvrTnyJb67xr1y5+53d+h+3bt7N161Ye8IAH8JWvfIWTTjqpfs2f/MmfMBgMOPvss9m9eze/9Eu/xCc/+Umy7HZCBBwCODlO3MRjfRJSiJoYAD+eylbAX32ETCPy2jZttOZWE81TG0GKoK5tWbOnh6m1p8h6qC1HoEKmVzI/5+dUaYZTXlVpkIHc8Eqm2jMgNNa0ld1KBPeAUeHtok3pt0PMYnZ23NKzTfhBa9xzNaErdAJZv8nx62+l2nKHurFuDMqrEl3I/KK/rfVl5dRHKXy+837tHyGbz4T1jVu3FzjrFW8OkjIfI1baiOdiqiRKOvqJz+tLpaCbSPr5zajVG6l+/G3sYBm7toKc24Lsz6OPO9FbtoInT+N+FBICke7Pd1fHIMTsyI4SKFf5fO5yDVE1TXRRGewEIDrgnFcs6s74fVMcU6SubUudzhhVjmHpGBnLsLSsFBVrpWFYWtZKQ2kty3nJWmEYFlVN/M1lCb1UsZYZ5lNNogSjVNcKNhmIyo7zDVxKKkrrlYCVdUiV+ialtsUnnpBrZ0JLJesms71FVPq5kO0XmxrX79CQke5STwKaglRlpEpgnGA+VZRGM6osx23p0k39/clSP2WpXzAallSloSojmehQSiK1IO140m++n3LUQoctvYTjFjMWM818qvwcfkIxXOfv2UY9nCqLloJMJyiRotNePVeMjgZm5DDO1O81Lqj7wnLKVmOXz6SO1wkxll9tgjNIZX2jmhUCJaSfGwjpnTHifjoE8QmzudVsbnV7x0033cSRRx65Kcsqy5K1tbVNK/7/+7//O3/7t3/LjTfeyB//8R/zqEc9ar+Xdf/7358f/ehH3plGCN773vfutSXuJFZXV3nlK1/Jhz70IQA+9KEPcZe73GW/1w3g+9//Pne/+903RRGzN/XOvcVm79PNPN6KomA4HB62ZNM3v/lNjj/+eI466qgDXta//Mu/8JSnPIUnPOEJvPvd7yZN9z82aseOHTz96U/nc5/7HG9/+9sPmPD76Ec/yurqKk9/+tMPaDkAo9GIJEnGrmGHE2Ij/mZgdXWVqqrYsmXLAS8rz3Ne/vKX81d/9VecfPLJfO1rX+OBD3wgy8vL/OZv/uY+r/NmnqcXX3wx8/PzPO95z9uU5R3OyPOcJEn2u5lks3CbJv2imiXeAE8SfhHT5s/xPXt7wI8V151dlyW4rlAVCafWMkTr+cMKtkIOl3CD5ZqIw4aOb3wXeFT9eZuhiSJQ2B7NtnE+s68qPelnWhladoJciQX4WrkXCL2WGgfw6xKVh2nmCUKdYJMurjO35+83sc03u/7uO6A3eaGHEL6LfH2R1xMfrlb0eYux5rtPnmcW6tyhnhbI4RJq+Xrsrhsor/kRgx//hLUbdrHz+9cx2DFg+ZoVbhgULFeWG0PG20YKvYOB3DrywhcwusG+1DgLSHTuu9tVUuGMQyiBCbmCKvW2ni7ROGsxpc9xEUnpcyvTDEbSW31C02Uexq9o8SYsIFxQ+1F3OauJc28yUwdAChksOL0laCy/t3NJJ7GnXFPYgxq6pQaKhT6vvg0Wdp0MkSSorIPKCnSWorMEnSUkXU13raSrJKm0DMzm7O9IvNXrrmTLIlg2P5Md/lArmtwYmbF+o8TcWGPdGAE47ZCdZfo1OO+88za0nPr85z8/9v8LLriACy64YI/LE0Lwile8gle84hWbtYoziOnnxthLxDiZbVvWyRtiAxWfX8AtKAGnqb8mMC1385DCeoKEYoSQPkMWJWtiTaQZtuVWEQmXOJ5EW3DJeG5fzHAVwWod8EoYE1RXY6q48WtDOyc5ZjcKZ73qsJVZHIm2DbGPhNumzJMmCcXbIiZtVltEd8yWrRPDhfQK0AlLTBkIbhJqNVRUhSUmRw524pZupLrxWszaGuUgJwM/B986QGiNKPNA0kmErXBSI6X05HL4nPioZMgOxmd1YwpP+AXbWBfUck4Qjq/QCGREmBeMn9vRAtYTfh0KS60EG1WuzvDzeX6mVvmNKlvn2o0C6Weso6jC3CuSe9JiAmGWKIkVDmnjHMjSUapW+lkHst1QFWHNRMOQd39IBHvdhDY5D9kQbbVfOGelbprrUuVtSztaspD52/bVPKnHhFUtMZWlKk2dRS2VRGlJN2T4Lfa8SnBLL2UuVXQTRS9RZFqFxj2/LBcJPAuDMP+1zgUrUMnINHmi0FwHYtNt3Kax8akK2X1la24XhyDF9CYA6xwS706h8KrDZg4r6/PFr8BteCzYZMzmVj+7cM7xZ3/2Z7z4xS+m2+0e8PIe9rCH8f73v3+MBN5ffOxjH+MDH/gA73//+w94WQBKKb785S/znve8h4c97GEHtKx+v8997nMfAH7zN3+TRzziEfu9rLm5OZ75zGfyzW9+kzPPPJOzzjrrgNYN4Pd+7/fYvXs3b37zmzn99NMPaFmf+MQnuPTSS3npS196wOv1kY98hI985CP80z/90wEvC+CXf/mX+ehHP8o97nGPA17WRz7yET72sY9x4YUXHvCy3v/+93PZZZfx67/+69z//vc/IFIN/Hl6zjnncP311/Oxj32M+973vge0PGst559/Pn/xF39xwITY0UcfTZZlvPjFL+a5z33uAS0LoNvtbsr48dnPfpbnPe95fO5zn+Poo48+4OUBfPCDH+SJT3ziphB1S0tLnHHGGbz0pS/lsY997Kas27Of/WzufOc7c5/73If73ve+3Pe+9+U+97kPd73rXfeJONJa841vfIO3ve1tPPOZz6yPkYWFvXeya+O0007j05/+NCeeeOJ+vb+Ns88+m7vd7W4HvBxrLZ/97Gf53Oc+xxlnnME973lPjj/++ANa5oUXXkie5zznOc854PUD+MxnPsNLX/pS3vjGN/LgBz94U5a5P9gn0u81r3kNH/7wh/nBD35At9vl9NNP57Wvfe3YIJ3nOX/4h3/I+973PkajEWeeeSZvectb6m4z8B1n55xzDp/73Ofqi+RrXvOafe5kMS2bEF+K96gzr1rqlUkoMU7+3BJxOA3tHIv6XVOIv3Wv2WCdDhXk2i6q//hQQ8pZi5MSFYpAAmqizamkLhRMZq00qoDWDX4kEcGTeJpAHCaNrU38m1Se8NNpc1fYtpzSKbLbr9cBwCYHPrmdYT2WRobSujoH0xIzUqCfjB/XFsimVPMyJekmgnllSXb8ALtrO2uXfZXln1zPDd+6ip2X72T5piH/MyhqVd+tpeg7EOwqLUuhg3pO+0548opu6Y9f3W0uwElXo7saGTq6TVEipKTKgwVuOK5FJ/Pd3Z3M21+mBSQp2MwXb6Wqu+8F4KxASFurWeuxxzbFXeFcbZklQle0kromt5oGCdGyUBof8/YGnuwStfot2mO5GIkJoH0mocx6voDVW8AOB6TGkC70ccaSDQqKgd8uR4btW9hy04i/CJ/LKOvsRaHT+scrDmJ2ma5VFjEzCSJJun6d4jPGNqoIY/250s5COlQQUdl4Ky5/hs3B4Ta3mkb4RTtbKQIJvodxI441NQk3TdUHDSE1SYRMKeRGRd+YGnffvtVBh8xXcN/6LMVwgKtKVK+H0AlqcRtqcRt053G6g0syb0sYVDF1/p0UtSJG+wtBUPFVtRVgbXXprFdaR0xuQynHVDFCSrA08ynhCR83RfU+w+ZBjFZrslZYvx9d22ZTV6jUW6q6JFtHvAogMTmJqeiakG3oLKLKEeUIOVqh/OkVVDuvZ+mHP6XKR9jCK0w7W3Lk4jYUIHRWz0dqSkXq2jY8Du9KeEVhKkHkK4hqhMxXEFUOIX9b6KAMjmo/Fb5P0rpyxkbIaOsZSL/CeleIYWVZKy1LI6/wi49rpWFYGNYKw9JawUruSb9h4Y/Tbqropqp+zXymKY0j017B1lESKYVXT4abwo6WCEFQ/IHSKaLUNbla3we1lHlSNfnP5V7OUWIThNub17eta4PFZyIFRvpGu7lUk1cWax0dJVFSMCwM2+YKhoWhCKRoREf718xlmlQrtvQSFjJNL1Fs66XMp4otWUJHewJPBdvTaMc5rAw352VQWwa1oxBBQSlbeYDrx2obBrDxTGn/GVIKEidAQRJe31iLhqZD8M1qVmCF30eEvjgnY/qg/1zDwZ+HzOZWMxxuEELwZ3/2Z4xG09Xh+4pvf/vbm7IcgCc96Uk86UlP2rTl/cqv/Apf//rXOe644w54WUmS8OEPf5gPfvCDm2Jb++u//uts2bKFBz7wgQe8LIB//Md/5A53uMOmkBNnnnkmxx57LMvLy/td9I948pOfzJOf/OQDXqeI73//+5u2rKc85Sk85SlP2ZRlXXTRRbznPe/hc5/7HF/5ylcOeHmrq6vc+973Jssynv3sZ/PqV7/6gJSqv/Ebv7FpdstCCP7+7/+erVu3bsryzjzzzE1ZzhlnnMF3vvMdb02/SbjrXe+6acq8fr/PL/3SL/G4xz2OV7ziFbz4xS+mLEvSNN2vz3jCE57AXe5yF771rW/xrW99i4suuojXv/71jEYjer0e9773vcfIwHvf+957VLV+5jOfOZCvN4Yrrrhi07bb0572tE1ZjpSSV7/61XzhC1/gDW94AzfddNMBL/Of//mfOeKIIzaN9HvYwx7GQx/6UPr9/i2/+FbEPlWCvvCFL3DuuefywAc+kKqqePGLX8wjH/lIvve979Vf5A/+4A/4xCc+wQc+8AEWFxc577zzOOuss/jyl78M+LDoX/u1X+PYY4/lK1/5Ctdffz3PeMYzSJKEV7/61fu08nsqVk8SfntS+7X/3lZ07Ik0nMSYZeQUa6pYADtcINd2eZtFU8DqzbiozDPGk2ztjpGguos361GJVNtqtTOrbMj060pfENAJoipDdoX1xKIxvkt4bIUkIknDZzWfXSv+0gy0xknt8/ZChotTCfuLpnd0c153IDjQz9iX98fXLhWW1cJyVE+3MnbWQwhIwg17O18o5gpNnieiWCOxFccoiRyuIAc7Ka/4b/JrruL6L3+bXT/exbXf38kVqwXb8+qwJPomYYGhsb4AJkWdLxgVf1JVdUadziqkkpiiQpUVVklsUVEBQoZMJ+ttbEVVIIxBhmyXWHirC30hH0lEot3YJrcpLqetsoXxgr2swqP/v2r9zbQK+HEs3dsplQwd3fHiHxWITunaokwoCyr1DQOdrLb6TOd7OGPI1nKqvEIqwbblAhWUpNvz6oCJvzkt6SvZkLCpRiiJSnUzvsUieL2t/KO1sdvdtYjS9YiZR/E1zk0n/PY1I3GGnz0cbnMrYGwOI6C2+otzGedrsmN8eHxOuJaqa4LwA6aSfWJvmg9qK15ZK1qscySysYo71BCjVUQ59Fl3owFG+zmKq4pa3SfntyDmFrFpH5dkGCTG2mAHOL4dRCRO49wq5n5F4qg997TVekVkVDSPLTTO4/AqLwCnsb0+KH9bcLvJyTsUCITetG0onGsIv/o63uTixWxfm3bX28zbClEVyHwJUQwRa7vrZj23toId5VRry1Q3XEu+c5liedDKjwsEVni9dOH/bUWos/46LoBg266EIBUWUeT+uC7zmvBzRe7n7gC6RdzH+wOdNg2C4bu0HVqK0lJZn4ntf0xN9MWf1bxiJa8YFhW710qGhWFpraytIk0gywFSLetH6xyJFRjr6rEhC44NpfFNbMY6jBTrzxFrQwRBk78H0dliYtDbA+LrxT5KXIVzIRdRhWw/MCHbr7QOGb7PqEV4VtaNZR2mQRnYTRWZlvSSRt13RLBE7WqvIgybjUj3O6A0jtVAvN48LOtlqzDW9lNFIuWYherkEBzXM5Kf0XJVJQob9o3ew7gdFX9OjF9b2m+5Ddw+zDDDQUGSJCTJ/tdEbkvYDMKvjc0kJX/5l39505Z1oIqVNpIk4ZRTTtm05f0sICrxfv/3f39Tljc/P8/f/u3fbsqybg0cccQRh3oVpkIIsanWiPe73/02bVlaay644ALud7/7cfbZZ/Pf//3fnHXWWTjn9svadHFxkYc+9KE89KEPrZ+rqoorrriCb33rW1x22WV861vf4hOf+ATXXXcdAHe5y11qNWBbFfja176Wsix5/vOfvyl2oYdr/t5jHvMYvvCFL/DIRz7ygG1pAS6//HKe9axnHfiKBczN3YIj4UHCPpF+n/zkJ8f+/+53v5ujjz6aSy+9lIc85CEsLS3xd3/3d1x44YU8/OEPB+Bd73oX97rXvfjqV7/Kqaeeyqc//Wm+973v8dnPfpZjjjmGk08+mVe+8pW88IUv5GUve9l+S6f31BfXvkEQU563bmNyMNz7judmifH8NjFZ0GphsmB8uJwucrATe83l2LUV3CgPXa0GW1be3idm6Unls150IPoi4RfseWDipsuomvQUUiO0hqoKRStTk3+uCsonaxs7z2jhObaiwU5UheKB1Nhs8ZYtPfcCe7svDsY+O9DP2Nf3O7ya78pdQxY7cyRRgTa5XBEKF63cNSl8xlAiW69vqziKATJfgd3bsUs7Ge28nl3f/C7LP9nOlZ+7ih/vyvnO8uZ0Ix5MFNZRWMfQWFLprYfS0G0ulKhzOExpKYcVMqmwRYVRXumngUoWvjPaWq9O7nT979YT4RIg7dTb1cUGgkD8ATjhVX3tzvCxzJyIWGirH4U/d5V/TsV8JMaJv72BJ/zwdp6B/JPSn/MoC0bjnPWZPkna2NllfdIFf0Gu8gJbVAgpKFZL9HIBu4YoIbi5MNxU7J/ipK8Ei4liTkuSfoJKvMpPJdqrW3Ti10mqMKY0eX7R1rPJNWrIvYi2ShIasq/+fapqfL++ygw/Izic51YR7VL3+DxqPNcvIp4/SjBOINpxC8qa7Lslu7YWeRBVfvGttRLuMIAoBqjBTtzaMlSlH/eU8spunSC7feTCNlxnAdeZw6mUyrjauSLmWcmJZoGoChOmbG07O2YP2by4ZQ/ZJo7a2zY8H5tKcA7b27opc6ufdQhbIcpRo3hrI+4z0yL6TNVc000FqoC0u+69IuT8ybVd2N03UF73k7phzw4HVHmByQtGu1cpB0OKlTWE9M0uNpJXtiH4xlwCwu/eIUDU5IoWeMKvGiGLoW8ULAtc7tWrSD+fEWkG1s8pasJPZ3WuXVU1OXGxWaa0jsqEeVVpWR0Foi+QTZHwW1orGFWWpUD6jUYVxlicddjK1sTeal5hrBsjvIz1qsVoXy6lYFT5fLrKBhvKlqrZmSZr3JmG9IvE3b5a1O63pa2zKKGRwqGVIHHQS1TT5CAEpbF0lMRmCaW1tPhJEuVVeZlWdML2mE/973OpIpWCjpZB5RfG9tBsZaxjVFlWC8NKUXH97mGtJgQ/3nqFpSbVkl7qyclUyXo7x22uRJOt2NHSW65KiRTT55sWh3TCz4+FP1aEEwgCAUjjNDGZvTzDDDPMMMMMm4X73Oc+HHfccTzxiU881Ksyw2GOZzzjGZx00kk84QlP4F/+5V/o9/v8yq/8CieccMIBL1trzUknncRJJ500ppC76aabahLwW9/6Fh//+Md57WtfS1EU9Pt9tm3bxk9/+lPe8IY38Ed/9Ef8/u//PvPz8we8Pocbfu3Xfo0XvvCFPP7xjz/gZVVVxf/8z/9sitXw4YYDyvRbWloCms6ASy+9lLIsOeOMM+rX3POe9+ROd7oTl1xyCaeeeiqXXHIJ9773vccsqc4880zOOeccvvvd705l30ej0ZhlwfLy8obr1Gahxwg7aIoikZSCoH4ZL+SuIwnb76v/0NyMu/b/68K8HLOIg0Ov9pCDnQhTQFUgqhw5v9V36lalJ/bw5Kns9n03+tyWuiPd6awh+6I1z7TiXBIUeLEgZaoxKyPRLlZtgFgErG2AYmd/p4+ZO2p99/MM+4TcOK5ZLimMZbGTcMNaRVdLju0327WXyLqgWnd709hRYT1ZIpd3IEcrmOuurNWaxU++T77jJlZ+uoPB9pvZ9ePdXHP5zVyXV1yxWhyib33giMUr4zwBiBQMjYXWV3LWkashSd8X2J2xqNz/bvIRut/FlhUqS0mswRQ5Mut7lV9Qv4oiR2R9RFKNK/6gGbf8hwVVQFUX8cbOHUCIqla0Cam91a6zzfkbx6tIdI1ZWjaFsmmoXyui8k94JWHI9BNOeyWus8j+QigQKnRVoLIVhPJkXLqwhkoVo+URnR0pR+zKGQ5KbioMQ+OtVduEq5mw9UuC+nJOSxYTxYKWHDeXkm3t0Fno0FnI0FmKTDUyneiCDQoH4Zw/rqVGSYlw4AS+xCRAODFG/k0Sfm5inLeRCBWgYI/d7LcWZhZUt10c6rnVWB213dDRmi+pcKBbAZJx4q9tATxVhd4m/CbmZdOcEmqXgTAOmkDGb5ShebBRz60iKSdkTcTY1d3Y0dC/bn4LcnEbdv4o71iQLZAHO70qEH7GORr9dJiTCRqipmUL2SZKRbRxjvOj2PABYwRPmzx1QnhVme7g+ttmc6vNQCD8RLGKrHJQKba3dewlwllENarnwjbt47SGqsClPVwaLGBM4Ym2SMgNdiLyVcqrvk91w7Us/2Q7o10rNdkHzbgfLa11lpIu9OlsmUPNzXlyTsrm/LMVzobGvKoAZdFSA+HaWBWNcrXy5J/NB7gix5WFb8yzFmlKf+wFMtMJSWGhMJbCOEbGK/JKCw5/rFfWUVpvS+mVfZbl3BN+O1cLVvOS1bxR+K0MCqrSUIyq+tA3lcW02K5RZT0BpRVrhfHElpYYC5mWWOtqm8+uFigpMdrPdermxDIQqaW3RbXG4oI68BZzSwMSQcgq9so2uSf2L95XTT4tCA12ItytS5RM6BlLpiWl8dsv2o22VcKZ9gRcL1EkStDVqm7Wi9ackfATQtRjqQhNHKW19X645uYhq3nFYFRhK4uQwucFJoo0kKuR/EuDrWg31Sgp6ucSKZnvKCopSZRFSkFpLdoqkA7lfPOZBKxw8ca6VpW7w6SpA2ZzqxlmmGGGnwXc5z734Xd/93d/ZhS0MxwYtmzZwp3udCeuueYalpaWePazn82nP/3pA85b3AhHHnkkD3/4w+tmYICyLLn88su57LLLeNnLXgbA7t27eclLXsIFF1zAC1/4Qp73vOcdcCPw4YSTTjqJE088cVMyFa+88krKspyRfm1Ya3n+85/Pgx/8YH7hF34BgO3bt5Om6ToJ6THHHMP27dvr17SLUvHv8W/T8JrXvIaXv/zl654XwoePR0VFO8tvoyLT2P9bBaa2fZWYfF/7cRrahF8rOyuqRiKsW2+FeFARSbjYKS5lbafpQr5ercrp9v1PGjJF2uq+SfuoNuL/pVf6ILXvDLYKrG4sqjbars42mYBCjpGNTq/PN7m9oQhWR+CPxblEbsqxUrkmv21U+bwOJfwNubG+47qNVIq6WBvPCZGvIUyJMEWt+FTD3VQ3XU/xk+/XHdE7v3tlTfatXLfK1btyLl8ZbXpW28FGu2Zjgr2lcd7qU5UGWfhjX2caIb1tZSz0VIMclzXHbtxWOjP+vJLKF5ukgo4nzkX4Iagw1mVpMq4ybtvj1eQfFiwIYXHWk1q1lZf0VqHTjq89kX31Z0TlZ/z/2B9l3RwQv5dIM6Q1uLyPtZZ03hfVZFBCquAxq1JFOijo7BpRlYbVypN+xkFZb/dY5KK2XE2EJ/76iaR3ZJekn5D2E1SWorLUF8VjdugkWuORiqrXFpnhbVAn/j9h4dnGZH1vbwuFM8xwOMytNkS7YYow57qFBoGxbL9py2s/3hJCM5Vf7vj1ZJoK8WChbqYyZWjCqDwpUuS44aBxNMj6yN4CJu3h0h6FZSzHz+IadeRGqJs9TIv0CxbrkqmkaZxz1dePVpapi+P07XxuVTk/t4qbtr9Jc6sx1w9oKfAtwhYb2g/W9p7tY1+noWlGNk1z5Vozb1++CbO0k+qGa1m99iZWr72RfNcaVV5hCoNUAp1p0jl/zUv6GUk/I53vkfS7yKw/7qjRsvWsCeX4PNSf2zR7VThj6vsFgJjZ7YxBaNs0QQpZz5Mq67PiSusojbfObhNWpbXkIZOutNYThZX/WSt8pt+wNFT1j1f5eYtf7d0CSsOwaLLu/HklMVbVRCB4BdyosiTKUlrQwR40iar/+H2s8WTfRI7M3ir3Yv5ftPfcK6JoUtmJP6WVEFgBSnjCUsQ8O+0YVQKjmxw98GrGaKWZaYUUkAblnwq5fFo2Lh7BtGGdutg4bxk6LAyDUUUxLKlivp+SFIlCaUkayD+fH+hVhUXY5sZ6K1FSyCtPXlrrKI3FquZ+2eKbxyLxV19a9sSVTqzvDDPMMMMMM2wWjj766E2z9pzh9o+73e1uvOENb+Dtb387F154IRdffDFvectbOO+88w7aOiRJwi/8wi+wdetWHv3oR/Pbv/3b3PGOd+SEE07ghBNO4A53uMPtivADX4v48z//c4466qgDXtbll1+OUoq73vWum7Bmhxf2m/Q799xz+c53vsN//Md/bOb6TMWLXvQiXvCCF9T/X15e5oQTTmgItA2KTWO2nNPs79qYVNC03zfxuvpxshudhuyLBMsYz2EdQvhOy0MBpzs10SmM9tadaeYLU6PcE346QS5uQ/YXkFuOrIk2p5P1VlER00jAWqEnaqLB1cWqqs4SaWeSiZhH01qGmT8aly3c+hsn4BbuMW/1Zd64VvGpH+1ESZhLNY+9+xF09tsfqMFq4a16ynA4b8m0LyaE3JakdecsAG3yMZUmziLzFU/4DXajOl2cSsi/fQn59u0sX3k9q9fu5KYf7OTaq5a4Lq/4n0E5dV32Bn0lQlc1NckzNG6vM+c2G1FNlra2U634i1gr0WEDq1RS5SnlsCLpaopB4fPlehlJP0emmjTvo7MU3S/Q2QDRyXBliUgSXxwuck/Azy02CoxAvEeVrYvjlgzFx2kkYCweKh0UuI2VF84iRWPnFdM529t52nDVHnPjOCsF4zaj7eKySpHdPlZKZBmyrXRCL01I5nuorEM5GNLdtkY5yLFlRTmMXfaxuNeQpdY4nHF1B36EShVCCToLHZKuprNljmS+R7rQJ+n7z6ybHeK2aRc8bSBupS8k+rFc1EpIYx0OMWH7uf78nCRAykOh9Ivqn1tx+TNsPg6HudU6TGmYUvWcSWBaateYvRQLuGOXrxbJMJbhN+GUMPbRE7bE/jOauZUU/rxEiv230jtAOJUiTOXJvyLH5QPMzu3YwTJmdQXZ6yF7C+gjj8N0t2D72xgaR2FsGFOCzWCYNzonxorh1oFQ2o+vQgaLxqq2SnXS/8055fNf2+Na2wY0qP+cSnBJ9jOl7Lt5aPj6dV7J2ksUD77j/KbMraKjhxKgsIgyr9X0Yxae9Rsqr6hr5fiJauT3d2+r31/lyO9fU3gr/qWbqHZux9x4LfnOJXZdcTVrNyyzev0Ko+UCE6ywk35C78genYWMpJfRP26bv/5t3epdO7J+7eQR10UIGRrzQlOQNE1DZLguRkLb/3+K7bY1TWZli4w2QcFaGMvIOPLKsDoyGBcJQIt1UBrLyHjibzmvWCsMS2sFK3nFal6xOiwpRxVrqwW2shSjCluFxh9jMaY5jtv5fh0tUbKqs/56aWyo88q2+VQhhaJIHB2d+vMI/PlVld6evajCXGPvm9YkPle4q0KGXSoRUtQKzA3Jv7H5m/VzKiHQMswztESFJoFUKYxz9BLVjL2EZrHgzqGkJ/skgeSTolYPxvl1nL8ZEVV/zfzFE6/ebjUfFBTDilFe4qzz30d4xZ9OFEoLdnd0+F2y2E1ItWRLz9BNFXNZAnO+0JQbr/TLjSUxgkRJKusQCJRsLOORjbWnileb4KAQG3UPBWZzqxlmmGGGnw1sRhbaDD87OOWUUzjllFN4/etfzz/+4z/ynve8h1/91V896Mqx448/nje84Q0H9TMPJZ761KduynIuv/xyTjzxxNsdMQr7Sfqdd955fPzjH+eLX/wid7zjHevnjz32WIqiYPfu3WOD5I4dOzj22GPr13z9618fW96OHTvqv01Dp9Oh0+nsz6pujI2KTFMIv7pDuv3eWHyiZfUW/hwJv3bhXIimk/FAaaVoMdhWDW7YSd/+Tq0ub2/vpCBYCspu6ADWaVMUaCn8XDsb7JYwSYZKhTe5oyEcYpdzzKmx5rAIZI/bsAjWQ8atX6uulvuUH7Q3L60cbF8tuWmtQklIpC9YbBa0hFRJ0sB1KNkUBdrKAlHlvuBU5etsI0WxisvXsCu7MTu344qc5St+zOq1N3Ljt69n+ZoVrrppje8tj8j303Mtk4JuyGRTolFIRdIvWjzeWgSg76qm7tBWwaKp/Vwk/+JrI4wDSgMDMIXEFJYqrxglkrRfolJF0h+Rzq2hsxQzLFDdlGTQJelnqCwnrUrQCbIs6zwoGch5pEaQ4uT6hoNJiCnH7VgR2AKm8kUiqVHBts+GYvPexiXHr78u9zR+pNQI5QuFTiXIDrhuH6RCWouzhkQqnLGeAM06JPMjnG2ydGC93ZEtPCHorMUUofAdOvJjhpFMtFc5LPTRvQzRycJ2VGPEn3DO25sJiTNV6Hq3qFggF17RqfDHh6Uh/Fzr7G62xfozPjlUbMQMtykcTnOreqbSzoubsPqUYtzCvLYFRtTEXy2DvQVb77pJaMrztAqs8ePimKNC8XlTEOckk58fH4UcVxRG23ITSJE82Hmu7Ka4/mrKQU45yJn/Xz3U4jZMdwuuu4VRy9JzzCp4fydBca5q/dglbFU3gdSEn7V+InCIC8qjcB1XQtTkxFjjyCZ/XuXgutWS3fkUAm4TEB09lBSNIs9W666FmMJbZZrC23kHG0lnDaKLb8BZiwv19p+iyjE7rsbsupHyhutYvfYmiuUBg+t3M9yV14SfNY60n5D2U/+40CNd6JHM90jm53wTX28B0cnqTNv6ULMVovLzdBGb81q2sN6Wv3VgSoXQ6Xgz20YKeppj2liHtdSEn7Xjj6XxP8Z6dZmxrd/Dj63sOqWfkJ4IKpVEackQgr2npAjWnqNadeZXJl6P10pDqiSFcZD4+x0R5wftfWz3bbYZ54ipFKhUIffTrjGeE1J4Msw63wQhlD/XlRsffyOaHG5P9skw1/fLW3+GRZcb8GO3kj53L1p1RhgT94MLjgcCqQVVaVBKYiqH0hU6UVjryBJVv19JQVFpEimpjKOUXunnrUl985kUvuGhvnZEi+NovTqxzjPMMMMMM8wwwwyHG7Zs2cJ5553Hueeey0033XSoV+d2j82qA1x++eXc85733JRlHW7YJ9LPOcf555/PRz7yET7/+c9z4oknjv39AQ94AEmScPHFF9ehp5dffjk//elPOe200wA47bTT+PM//3NuuOEGjj76aAA+85nPsLCwwEknnbTfX8Q5d8s7fJoV5bQCSLs7Or50kvhjurIPxlUy8cboQOEmfi+Mv4n1NijhpkowXc3YIv2cVF7xEy1NrQ+tF1m/LorHbmCrUt8RHi2HWpZQ65bNBkQD1ERhzOOJtkVegVSCDJ3E7eJiXPZBLlDFb1AYy7ByFMaNWbQCpD2BuoUi/r7ucWMdP7hpjdXC0FGSXqKYDwzdLdHEe3Pzm0hBN2lItKju08IXxeI6i2KIXNuFqPLx7e8sbnXJZxQt78Ts3M7KT3ew6wdXsXT1Mj/41g6uyyuuHu5fYS2TgsVEhu5oOab0g0ZVVzrHauVJv5j3tlE5RuKLL10l6myVNKhB2suehridJo+++B7VPucCfCETilGFKgRqrWS07Is+o8zbV+pMk/ZTdFfT3TpCZ75AV65l6KyDyQtUlpJWJa4qkJ2uJwGt8YU6IcEIT1CtU9ja8QI9gdBq55zGoh4ExW81pgBRYn1xvY3JovS0zNSoRnHC24d6K9gmn1BmfZxUWGtQgNUpqZRQlejBsCbzYke8SkIWX7uoaA22KHHWUuWFVwSWFbZFFEolUVmKzlJU1vFjWyer80ubLxqs7yLxB/VY5IRECImWIVOxpYgcUzsy3oE+tnh8t/3BhlAKqfaWvt2/5c+wOTic51bNSo7bcseGgZi51Nb/WOemW9q2VX7ta30k/OKcbLJ5oNWsFOvv00ijTfmOVTHeKAVeXR3GyDgGKoEnRMpRUEIV2LUVzK4bMLtuZOWnOygHOVVeMH+XOyAXt2F6W7HZAnlhQ55ZM6DGX/fqxkVOzMHa28x4x4S6CSPaNx5isi9+09w4VkY+a00Jnz3mrZT9OLnZbgvGOr55/QqldeEzm2v/5Gfd0lxqo/Wy0FxP25mLbSvuqkAUA5/daytcWfhsvFHurx86ReSrCKVwxmBXd2MGy1Q7fsropptZ+ekO1rbfTLFasHL9KuWgpFgNOcBKoLuadC4h29oj27ZIZ8ucV/j1F5BzWxD9ed9E1OnW55oI5zExb9tKEJVXKUJz7ERIDRoE2dj2EtGifINjzKtVXciNa4i+yvjnSuNtPa1zjALhV7XIP2usV/cG0qkqjG8MsgYhO0CFlCKcBo6BFBSVJ60icaVbpF+qJdY5lvKKREp6ic8f7ChPiMa5hv8MWzsLTGsCnIY410yE8Io46a3e91od1p7D4efrwoUcVT/7CHNTMbZOojU/bYi/QJpNHLwuzA3bOfaiVvp5i1DfGCjrsdxZR1V68tVUIRJCerJVSEFaGnSi0ElwZKhsvf3TYPmZalvnB5bWYpxEOmr3BGRQ9U3r/zhMeqdmc6sZZphhhhlmmOGWIITYFNvJGW5dGGO46qqruPzyy3ngAx94qFfnVsE+kX7nnnsuF154IR/72MeYn5+vc2IWFxfpdrssLi7y3Oc+lxe84AUcccQRLCwscP7553Paaadx6qmnAvDIRz6Sk046id/6rd/ida97Hdu3b+clL3kJ55577j6r+cpwY1jDuToHQIh2JtnG3c3x7ZNFIxGLJVAXTIRsbnJdKP6YCVXfmGNVLGjs07daD0/yeTsrry7x610FtiZVYELRREn/ecr5goNBImM+iJTgpLeiUikkKXJ+Cy5Y/dRKmPkjsVLjdNIo/MKPL9Y1lpz19qm/9Lg9Z63sa2fJCIkLR57Aq8sohljdWd9hewhyZtqdsz7La3wPLhcWJXy38LR9m0ixz7ZRQgiOm++wUngrnSN7KXMdVR9XosprolRYUxOw0aJrGDpvowJUhPWIRAR4S59IehFUfMJZdCj+iNEAMVr1FmXhc+xoFaoSV5W+K31theGV/8Pwxt0Mrt/JynWrDG4YsGNk2L6PnfSR6Ds20/SVJ/w6qUImMnRHN9vQGocpDM468mFVq/0Gxgb1X1OQiYq8OS1r5eBCqlCpQmcalUpU4m0gp8FNtE4729hIjtlK2ub3MqgX4k87f45gc+qLQN72KdM+d05nmu7WLp0FTwR2tsyhs5TOlnk6W1dIehnaGFx/HgXe6tNlDTElpVcXOFcrVERLqRJJK4TEOVUXg2sbKyFxViBENaaoqW1DJxTRSjQZLDA+fhI+ywnp8wKt/13YYEda5kGtCCLtIHWCS1JE1vMKBGuQc3mz4aXyVqc6DSpkT/zFoqi3FCvpBILUjfLQxNCyIAtd+1HBjE4Qna4nAJVqirxxPDMFwoX8Uid9g0T8+wbXkDbhp6YpHsW+KYNn+NnD4Ta3sm59Ft/YnAiCrMzW5WeYaE4KE7Bo+xszzMay6GCc8IMwBoXf4+dNquyEoBNffsATrJYC2rQUWpFIi3aabZ4N7waQSg2iQJoShiuYnddTXn81azfuphzkCCXpH7sNfdTxsOVYn+NnfK6uca4mIWI2dST8Nixut8eg2IjQXlcI8wPXNHfgbSNdkq2bnx1MlBZ2DitK6+r8MDWx82oCYtKaFMDapgFtP2CdJ5mOnE85optQWoeWAm3yOqN4tbQUxjEoba28zLQgVYJe4u0a67kY4HSKlhqlpG9gs1VNGgvncEHhJfJln4038Z3cKMcu78Strfj/W4MbDrBrK4xu3k2xvMbwhl3kNy+zumNAviunyqua7LPG0t2akc4lLNxxgWzbPHPHH0X/2CNQC1tQi9sQWQ85v7W55kVyLux/MaFs9Yr3OEEfbyxywf1DKItIUp/RWxb+Opt4e8zJ5Usx3SJcCtBKgIGSqEDz+yRCSa86k0oiKutz3KVASkFV+WzBqqjwuX7+e1SlVwPKoPyLdpPGOlItWckrhkXFXJYghajJ4H4iSbMFXCT+Alw997O1y8qeIIFuq2lNKlln+gGN6k/KhixtbeMagXCVIpBugdBECpwDpfwxrVt3IZHci4SfDNuw7T4T7YQFLVta2TS5JSEPsJf45sNu6n/WtEQG9xFrHbbyKlVnDVVQelZFhk4Vacd/p6RjWQ6kX0dL1grTKC9DtuCoslgnQvNGsAxtqw5FcCaRzZWmUUc3Y+gMM8wwwwwzzDDDDDPsK5RSPOIRj+DGG29EKcU73vEOfud3fudQr9amYp/unt/61rcC8NCHPnTs+Xe9610861nPAuCCCy5ASskTn/hERqMRZ555Jm95y1vq1yql+PjHP84555zDaaedRr/f55nPfCaveMUr9nnl7YT9omDcVsoiavupfenmXac2i4+xobpl7QlMJfwilGCvSb96eVOWYxxjXeFNnlR4fbR9cS31kbMN4dfu+G5BZn1fSAFP+KUZNu02ZB+Md5FPkH2Tdl1usqBUkwyMFdOiQlIASqWgKlySNWTDIYIN27kI9luxQBULEf548hY04NZ10MZi1r5CCp8zI8MN71xH0dWyLjxNLc8Fu9XS+nU2tiH9GvVDk7U0tozWvquVl20bp7ivqrLOfHRVgR0OGN64m3znEqPdA0bLI4rVsia69hZ9JTi+m7CgJcd0tVfAzSWoRNW5bGOr2yL9dFbRGZTMlYZuJSisI299frTerDNVMu1JtUz7XL0s8RkryncnT2IyP84UFmsstjQ1+WhL/1w1NJjSQGHqfL8yqBKHgZCMz8ciUFQzHrljjayrqfKKKs/QWYIzLjyGDvOiQvaW/X7K+kidIDKaopqhKY5GS7EJCz6C2s6TeA4nZWP/VpOCDidsOFFbhGJb4RyLhO1tFf6/jvyjIRa9ulf7ccZWYFNvDdhxWGu9khFwVekJvohA1nmiLvMFMp3UNmKR4HNF7gnASPpVZa1g9svxBSmR9XxuaZp5e7LJ4mfru3qFJH7MVM1r2sWzeKqsszFcf0BNf/5WhlB7yA7apOXPsDk43OZW64byafaeQTUsN1CveNWIaEhw0xqfWo1AYwo/GFMw74mX2hd1X2xUmBzu6/+2CL94DXQE4obJuc/6lRKmwBU5drBClReYvMCWFRJvMSyyHk4lIDXWOCyuzuuTMEb4tVU3Y+s7ScAIgWgTYNNU3iEDFp3WSutDBeMaMg3AWJoMsQ3eIybnrfs5lkopkNbPiTIlSQKZpAQ+Xy8EbZfGkVeOQRGUZxZcprBO0lFhch0tPIVsLLKF9JmOQcFaz6Pi9aRt+Rl+XGtuZYcD//WKHLO6ymj3Kms37KIc5Ayu381ouWBww4ByWNXzIBHIMN3VJP2UbNs83W0LZNsWUFuO8Db9vfla3T5G+EXUhGqL+AtNYLFBj8mfqA4UQRUYr3uBOG037kUir/m92R/GxOcbYsrbUIpamRdVYjKo5ZSWWGORWiKqZh5gKolSkkoaZJxvqaY5y1SSVeGXF0kiYx3zmSaRgtUiITeKynlVf938ZEzIFbb1vBAam+9paDtJKCEQyu8nGYg/rxKbOOLllO08BYI4xwr/d23CL/69UfmJ1vMGUU9amkiKMO7YceeCtoK6E9SSIpBycT9Gws+GuZasycsMKQRVYhBS+AzGytYKzqLySr8619H6UbCUjkQR7q2a9YjWnrG5KmKas87BwmxuNcMMM8wwwwwzzHD7wdFHH81PfvITvvSlL/Gud73rUK/OpmOf7T1vCVmW8eY3v5k3v/nNG77mzne+MxdddNG+fPRUjIxDV82NY7zBabIL9tAxTVNoiIWpdUWkiQJ6+13tTkP/2JBwzSv3jf2xzufIxYJym9A0dtwqNN6UOQdVtLuSjlQIlGvshaQ1Xr1iK0/k4QtUoljFDVdh4Whv+WkNNungVIpL+82NexXyR4Ldo6jK8cIdjBWbZGt7xYKB0x2vmDFZbReqVNrYogoJnTm/qH3aYpuPyjpuGFSMjGOtMCyNShIpOW6+g78PG7eGss4h8bkZvvs/ZvTs2zcRwGJHMpdKtnaVt94MFb/COirRQSRekRkLgcY6itKrzEbG1YSECPkfIOvjPy5vrfK6xX6SUjm/jI4MyWQ68+qAQCC5qsBVJXY4wK2tYJZ2Mrp5N0v/cy1rN62xumPA4IY1BmslXSU4IlXcXJgNb8L7SnCnXsLWRHHkfEr/6B5pP6V3ZBehJEnXH5/RojEq6/zvFlP6nDdTWEzhybaFQek7sIumGzt2ViddjUz8cjsLHWSiyBY7teWjTDRCSWTiPzcWYurPD6RbtI2MhVxTGIpBSTWsPOk5KChWS1RVUFiC/ai3Hl2uNi5JHJkq5lYlRyyNWLh+QD+RdLdmJP2E3pEr9I7cTTrfo8oLOlvm6BQ5am0bojeP7C/4hQRbTBdUb1EBFxVxMadTpBnoUJSzQS2hxwt7olVwGiMAlW7Iv1YzQH3+Tox7vjjkl6d0tPbUUCmvhgCfnSckQiX+WEszT+BVRaPuCWOQ0KkvWibebnisGIkvDDeNDb7wakdDv4yWMqm9jKjsiEXxsSaHNmSjMDJxfA4KzrquG9VM1rW662VToGMvGk1m+JnG4Ta3ik05fuVahJhtTDz9ObjnkqsU1OonYcr1c6p6jpDWTSy1Wj1Y7tavY//nB7FpKhaOVSTU2nMY67OFIzkjwvMijnumasZKPE8k8jVkMcCt7va2nsu7PdmXaEa7V9G9jM6WOa8cCt/dq1cETsSWCY92nXdy7jh2dITxTwiJ08E6XUxv7nJpD9vftp9bbXPhnM9QWx0Z1krDnbZ06WnBYiepXTPGEJtV/n/2/jzalqQsE8afeCNy2MOZ7lTzXICtOIBAFzhg2yiooKgsbaURKBRpRLGXCmjXQtuhFFEaW1tUUEQQxLa1BVuwtRV+jbT6tfL5IQVVJTVy647nnnHvnTszI+L3xxsRGZlnnzudU3WpYr9rnXXO2UNmZGZkZMT7vM/zaCc17vvIRUQ/kcgl+yQPM4leItCvRxCTCYSpec4tCHByiRvTGtNac18kQKR+IDfM2KvGri19iHrK73V8/KwQzXPFS34aw0oKxQh69Rj0xirqM6eh3TxjdHw1zDm2j62jWCuwfv8GqlGFraIOa4KeJPQGCdJhgsGRAfqHBli+6SrkBxehLr8WNFxm0C8fBLAvAHWhgdE8vq4Dc94z0ESSuoVGGo7DemlsD/45P24A/CxV7rPSS+G6wrIghy4aIFASpLDQZEEkkJBlZQ2rgv9eWlSBhTZSBGsspCQIckzHmr3jAJZVrysHDkpWZlGpZKafJGb/kcCGJKhUIk8kJqXG+mKORBKW8wS5NMhUxkVBWkNXNeqiQl3wvE9b68C82cWZicAOKXkG+xrwT5D/kcFfUbjz5gsgrBAd8JTvH9sB5sL78PMuHlGC1YM17lQzK7jmf4NCji/YkoLHM4NmbZlIQq4apl+aK5RFBVICUhNqAMZo6LLgeScAmeUODOyBnLxBXSlMKo2yNihrjWnNTL/WeXN9AGBfP+XWK57l50F6H0FpAphZbDuPecxjHvOYxzzmMY95nG887nGPw9/93d/hhS98Ia677rpL3Zx9j4vTyfksCe2AMYLgimnBIIywIvx9tmyrdSwtirwNJNCumI4lnqIIDEKvJ3qWNnbDV5x78Cj2BYw/3wCLbUDRJ5m9pw3Aiz5FAtKUnGBzySuhS65MNhoC05AYhzUQKoXxnn3WwCZ9mLTXVISHZF/VbMN7vXVBP982nwDQGiQlL1KtgbUZbL4EK1VLnmnP0lz7HNoC44qrUitjcGSQ8Xkl/74FWQEjbKiy5QU0fyaTLFlzoUEC6CUUrrUHFUptW5Ku3bAWMKGSmqVIJTnGq+v/JAQmtQmgMsCAOft1AIqI2Za6dpXaklOORsNOJ8xgmIxQrm9iurbtqs0ZZJOJRD+zuExbLCUWB1KJiZNBkoJZd0NFWEkkFnKF4RWDAPRlizlkniIZOHlZSU5KyfuoNH/vfJ0BwbqoAxDoX/fRAH8S6TCBylOkC33IXgqVM/jHfnEK3j8unNdov94zrhpNGPQrSqjNMeqiij5vkY6rwOrzY9PZ4nSpsVFpbNeG/QwrwkplMEgI1ahCNSqRLRawxiBbHmJQlMiW10H9Pqi/yKAeEWzlgLS6bI7dyUZ5SUvqDZjhluYQOZzkrgCkY+OdTfLNGHRld4E26833Rf+HZ29YEhCC5c9a+yEnJ+ol0XLHTKkSTjwavWN/VjOY6cGBmCVkgcCwsKaGUNlOgIEUjPtuYDw4f9NulX2TfCN4ZrKJ7k0vkcUNayrs2W2nYfD4RNylUKCaV6PP42LDg5CBpecBP1PPHiM6wYlnTtqKqnQMX8+k6/gjR2zkIJkOlhIOTLaOuoK1Fto9f/3T1ie8/WcaNou7X9sYW3SwXOQSWFq6bIAzlUZjo2C7s1RF39MMBhYjZipLQrY8RDLIsX30FFSehvvEb1+SQq4EtBHB2yomCu+mGBHa5FlYnhXtCqus30d8aDLZsa1LFdpaHNtigCyRhEyy5B/QzAND4V1X2tP1j4sB/SQJ3Hygx5KIjgGVkGieDdE+WMUBAUiKg4SA0FMuoqsrB8yophDGrRUath+cpDUFdqDQJcz6SdjJCHrtFGwxgi5rbB89hXpUYHxqnfclKRQUVaMK4ylLmgMekGGv4GwxRW+lh/zgIrLlIWhhGeSKgnyhTwze71jLhGembrHjbSi8abPhW9fCOs9eDzD7QhqZwMo03MfdwqBEEoyxrMYiALICUlhUwoZnZSJFkONMXR9JPein6qDQUJcadcUsPGNs8JmrAUglobWFrg0zBF3RJCkBOSWUinCMBGpjccVSjs1pjaXMMWOpmYvqUod5JY9pDOhVncUdoQ34Je5zDPbNGHg8M46Y/WfdfKSZ27QVBoCzA32te8eYADKDeH4kSSEiXjbqDNb5sUYMSD939etM6STzSVJgXQqS3GeA8NvUFbRTVrAmZRlQf20cy6+sDSaVBgmBqWCZTy0Fkloz+OjGdWstF4+hXZjrQUoNAXJzzUc65nOrecxjHvOYxzzmMY/HTtx8880QQuC1r33tpW7KwxKPctDPLdBFA24AHgjh8MmEmYAJLISrnhS2kabzrBYRA34d6SmBZqHFbZjN7OtKcvqQAhAkwoLcSzNyJWl0HLbxX4hDgEG+hOAkkGqIsgwyQj5xFf63pqlIhqsYTVJOHElObJl8AVMjQvG+FATlExkuYSHqKXu7RUn5IMlnNIxL2FujA/Ag+nxeg6dMdAyfbWFclbdP+qz0JFISGFfGeSrytXOkP2jnEyMEy98sphcnTyrAPiAAYCEwqgymTmZUG4vKMGDnmUYkRFMNS96/D0giIDj0LSerVdSN/4WxFguZxCAhKLJIiaCkY4GpGiic/FQxhp2MYMsCxeomitWNUPVsjYXqSciUcDjhhIzVlquaSSAdJkgHKbLFDP1DPSSDDL0jy0j6PSSDvAW2xXKWuqphyhrWGOiygqmcf45LevnPAwgsvBik85832kA6Nl8yYIAxXRg0fy86j7c0byrbo7BVGSS4YDSq0QS6KFGNCsg8RTUqoEu+D3SpuaI8SgSVswadTlS2Af96krCRGgwVYXGqcXCzRDJIUE1qZItbmK5vI13oIxnkSBcHrXMHcKJOOHBOumRzsjCESHPYwQIDhb0BJ4vSrAHLouQxgLa/Vjc57/pVSKwDoU+1jtYBYR78tURQKmewD2h8/ogALQMbAoKTSYF1YLx3HyAU2klvko3Hk29bV+o0lobz+4sTal1Qwd880WchCNrASVLBsbA7gLuDG6Q7bhkVdkhxbh+geczjsylCf/WAWMSAA9AujEK7+EkAQToxzE3c/MEzbwVRUB7w95kH0/2YARKQHZAnLjQwjrkHNPKAswup7NnZINE4IXQJUU2D3KEFeB4ovQyi5sIlf/zu2My0AOoq+CJLkugdPMHHKmWzH2sgCeg5mUFtRZAQ53ZbVmz244t1yX1gp98gqQDC2nwxtHe/It5Wd05zrv3MmgNV2uLBjQKH+ikWUoV+wj5q/vPS1vBKGgGw8PNvisbqCwwpgGsWGvBz5OZysB0wC35st53vN36tqB1rtS6aohFXRCJiELHjryusAaZjmO116NVjMKMtTE+eDHOezfuOY3x6gunm1Emdp6hagB9LmCdeujGVSAcJssUM2fIQ+cElyOUDkEsH2b+vP2zOVwBQ/XO6IwGrHeDnvJtB8dweDVDvwb3ouemLbQC0mPMahNrY1nMzvh4gEdZLxji2n7FInJTntDYgITDMDPqphCSBfiqhjcW2amTZK1kDqKC1hSk1TF3yj5ObJJWiTnP29KuNY5MhgFfGAVFXLPdw/XIPh/qsAuClKlnakwvM/JrAF7P59Zq2CKy+nhTMwpQigH+e3RdYfhFgJDxAJgTPZzzbz40xs4r94jGvxerzY5j/2xUyWCMgyAAJQQqCju5e39+F5WsSz20qbcK+s0hmlSSBIoa31XzeuStoCCIYYsBVWcvrghbox6w/6foZCSCxLLlLQiAhgk2aVW+QK3XH6X2lLVyR46VA/eYxj3nMYx6PeKytreGTn/wkPvnJT+JpT3savvALv/BSN2ke85jHYyRuvvlmfPM3fzP+xb/4F5e6KQ9LPKpBP1912WW+xUSr3dhyACd4JbWZVWwS32G/+JjB9jPWVVu6qlUALX8SIURIUnn2i6/O9N+37jUlwOCj/98vUEXkDRixVwLrLgbmqjFEXTVynLqR/LMAV5cmKWw6ZFafT3bLBGK6jZ7RoON3grIeyiu/sH3c1gQZl9Z5jBhGcPugfBDAP71wGWy2cFFV2ueMKMECRImxuEI8kg47VwwSwpMvH6DUhgE3y6y4SW2xkBGWMokzE41S2wDoEoCeEsjV/hxfqOqFk/A0FtulZvahtkgkL4yhyDFVReNT4nwyjAW2pjWmtUGhDcaVRh15qGSKcKifoswVSm2RSoGeytBPCVQXQDmF2VpD9dC9qEYFyq0xitUNlFtjAAhV5qHN2rKMERHylRxJT2Fw+QqylQWuQj+wDJH3uQKdmKEGY7jC3PkFev9AU1bMrnMJMaABtLwcZ7cSdhYzz2oTEizJIAclikGwvA/Keux3oxKIrAehEr43nOxS8IurS1iX0JXjTfbfKUbIzqyjHheQTsZNptwetVmiPDNxSSGLU9Pd5U7jqCxQ1SwHSmDfv+G4Qm9NYOXECD0psJjK4EuYDBLIVEImMshGyZRAiWx8C/OU5b5yB7YuDEHDZciVwwz+LQKwOY8LqZeJE02Rg/e1aTHgGtabdcn0pnDB999YhthCCwbBtOBqc5UNmY1XJ4B2Pn+6ZKkwnUJIZgZxjocCUGCNbieTZcMs6LIRbEi2+geAlzLtAJodBmOL8egBv+ix4RneXGnO954Be3Qxuxb+JAAkIJxnzvnIN+53eBmxh3P783jshgDcvRmx9CKZ8C4LxX+H3LxMCgRZT+HYUQB4HoEEQpgm/SyI89Tw8zQR3UOddvn5FhpG7azoAn4mKqjy8qFBetSDmtoVTPnw3m1Gc7tlChEzqj2DEYD3ILXjLZSjCdSgxzLSw2XYqgSN16EEwSYZZNIPAEmd5NAWKLWBtoIT5PG81e+MCF5y2c97bLbQMP72ObwygJfZi+dZpZUotCtKsvzbKwcc6imoGXn4RApcs8Ss/nGlGzZROWYgVaYQxSaE0UFyHqaG6S1dNOAH8Hh930bJTD8CMklIpUCdDyGTHKJMgp+0JI2UBJZy5drIzDQh3HWQ7E1r/PMkHTpmm+J+bmqIchsop1wsBH5umRHLo5vtdUyOn0Y1KlCNJ6hHBcrNMc7cvYbp5hTTzRKDI/0gde7bDwC5U03oZwoLVwzRP9TDwtXL6B1ZQe/gYsPuI2oY7x7Uq0u+1xzzK/jiut+CJKyCe+a6Ob4vtonZfppVIKxMQ+GNf45aXzxonSS9K1qrXR/x6y8Gdtx96LqzJxb6z1XaotCsuDGtDRbzBKOyRi+VWB9X2BhXWJUFyqmEIIFqWsMai7rQ0HUJPS0gpISgCVRdQqsUus7DOfVsNf/3sfUJNi5fwFRbIM+BNAtzTJICMiWoRGJQagBcCOh9rL13X0qCPaQFX6dBQkgHKZJBAtVTO+et5MFsyf3bs/y81KeTOt5NQj0UVei6te4R1jYFGo6N6YHcNO0DIGjL/noazfhd1zyPKbXFuOI1Q1FrHpecdLnwwJ8SAbD0fdyHNYZ9/rr3oe8TtcHYF8sZC0AhMc3YnEjCwEjoHZUGDdOZi14VrAh2nI9ozOdW85jHPObxyMTb3vY2/N7v/R7uuOMOHDt2DADw+te/Hrfeeuslbtk85jGPx1LcfPPN+NEf/dFLsm9jDCaTCba3tzEajTAajSCEwBOf+MR928ejHPSL/fsaCUa/VgjF6tF6KV46xVKgnq3nJWkEEMC/7vfitQiJpuKQfKV7bFgvdr7n/WQ8U9AfC1eqi/Ba7NEiXOKtJd8Uy/VEVeoxI6/FvFMpV+6iw+axLhFQa4hqjPropyFUClq6sr2vcFCydQ68D0jzPgFKQVgJVCVs0oNN+9iviK+RFNRUWEfnRPhknbDwzM1u2n1WfaiwBlldIE1ylFJiu9TQ7hoTOFnB0mKRjKxwnnl7LTidISPro9INEzTptJyB56Zqd1rzgv30uMK0Ntgqa1S6YQgmUmDBclIrV5wAEwLILNOUhKlhihHMeAvF6mZIUJVb4wCohfNF7J9HCQOBMpXoHxogXehj4drLkB9chFw5Arl0kFkQ+aA5XMegM45JaKcFA8aqROIYbDppEo2etcceKA1IxyfBhH4O521nqrqprM5y7tODhSBzSYNFlr7Mepy8IhX6ib/3hKk5gedYHHZaQIw3kQJQ+Rj1pGS2nTGoJtzWJSdhpq1kwLY2qC4A8zEANiMA8JTSoWrcV5APnIyU/18lDAiqngyJpnSYshSpk081VY3MaIgkARkD5IPGr8o4vyrIdpfsyF76IoVY1tP3QZ/Us2jYEp5JDWoS+b6wIjx9YnYdEJI6IkkBl7Dkt2X4XOxtEwBJqaKXHGBp4nGrA2LG+4wjYjTGxws4xo3gscA6zgTZ2Tc+e/DMfGse83j0RFfOO47AsG0S+qHwKgYMu8U4M7Zlo/HDWAbLvQKDv438fCsUaTkwRtiG5Tcr4kKEeHsiPrb4xys6mMbHD1rweOnmFl66nDdK4Xnknz0qTyHzNMj4Wc8Qi5hR0AoyI5BMA+AXH8YOEkt0/oQ1MB259L2G7fxtotc8a82SCgzvSW2cAgFAghP3JsfMyZUUAlcsZJjWzLQLmgiRbyp5ZQpDzfMXuOCCseC/Cv59elzBGItMEVZ6CgBhqi0yqSCTHHDSsp6lmkhC5sAnAQaWreVjF9LJ4fuCE4rm0bpmdYTxlpvPEPsib6yiPnMKxZlNjI6tonb+wNWoQLldYbJWYLo5xagyyAqvVNBmlgECg36CdJAgX8mRr+RIF/pIF/uQeRaUNQCE4jwbzYn8/0Kl/Fx0vrPwihwALJk28Bc6g/fNbWStgwS2K6rTYEa8tQzuVA7s88CVnxNIIUAU+/shIPfey89Yi34A/rgtuSLHEOPvlLXBNgnUFatO1KVpgVBebUST3AFCGQdY1ZVCOa0xcUV1Ra0DkCmkZDUKySw9Sigw/KSgoOTgAT/v5ycF0M9UmI+pXEGmMhSg8Y+X9pQtRYGgQuD6Ywz4xSNmLOXZzFV1M345dZdQ7ARAaILVJRKZonSLT61tUCywtgHm+Fww088XFAK8fvVgadM12hKfcWGo3UXtQrt9SBKoDJduVMYgs4RKm1Bs4AtA+HnSMBmtU4YQ3fXfPOYxj3nM4zET1locOHAAH/7wh1HXPDf6pV/6JfzAD/zAJW7ZPOYxj8daPPWpT4WU8twf3Of4wAc+gOc///koy6ao+MlPfjLe85737Ot+HtWgXy8h9BMKzDgfPiHNayUbEhTd5Yf1rwv2aCPRJGl3Uwzx1YgeuAM8+MRf6oKDJMBgENMI+Y2okpwiKRbpKzTjBZuZsaADWkkQL2XHMlMT2LqCnowCcyp8Ls0hegPIJAVNt4ByBNNbChW6LAFaQa+dwvTMOsTdn8TgC54Euv4Lm+RLEkkB+sUmNTeIrUtXNZzz+56Fs48xrk1gp6UkkMVJL0EMkOoqnCurctTWFTlHIOvMEBQAykQAK7l0EpuctDg9qVFo60gIvCA9W7LxrOFYBj5pInTpqs0VUkkADKZaQFpgMWu8WajV9/iPBvCz2ChqbJU1To6mGJUak1IHL79MEXqpREICxvJ56ylCTwksiBK0vQoaraJ86D5UJx/C5n3HUG6OUY5K1EWNelJjujkNEp8ylaCEkPQ4saFyhWx5iGx5Ab3LD0GuHIY8eAVo+RDLyEZ+kVQXQFUCKmG2nxrBSsm+bo5tR1FlukhzBvwcaAeVMJhHHaDKM0p8oodkAAlFmnMfJgWrcliSqD0TtMMWC8mTwACpQLqELAvIzVWYyQhLw6OoNzcwObKCbPkUys0x+of6OLA5xWWnJ7hyo8BGZXC6ZO++M+X5Mf9CFwEDgOcTAyctdTiTWFSEA6nE4hVDZIspFle30D+yhPzgEhbKCnJxmU/NwjJo4NokayDtO/ZcWwIzyFm6JJR2IB+Poe50+TbvYMZZVunkvHlg5yhS7I9lDY+JuuY+ophNI7xvkB8vBQXfIC+rFqQ9feI1yHSGLGLzXbRl/2xEU2yf4Z2JKl9rQd4URwBx6cMOCS7RJJC9J84jHXPfmXnsJUgAwnn5huRyeFO1wJpWQhpOmSD4ADOTzt/H4d7sJGs9g8/fKdppWzJDq2H8+ftKO+lrCyBRIgCCvu0eptdoWCTeG6o1rnsPNl9U5aWGXcJcAM18xye2SSH4DZICLawAAMzWOqg3QJblAWgx2+tQR64Chgdgkh4/+8ZrPO+SCQNKqgbJHm8bzbDl5xalAfLpiOcI0TN0v4PBmfYcutTM1u4VW/xs6K+AnOLAaGQYoElkOO+7RU8JfMGhXvifBFAZ4HSZIJMCuQCULiGmI1as8GBHvtQc83nGVmlwdKvEVqlRaRNUGQr3sPKqDD0lsJilUAKQ1rjXDQoSgJKQwkC4YsBRZSBSiby/wnMXoPG2swaiHIOmW6iOfhpmtAVbFszyrEuM73sAm/cdw8b9q5isjmGcH7Jw3nzba0WYIyxslgHwE94fLldIBwmGVwzRW8mxeO2hRkXh0AFW1iBmpNpiDIuxu6CaQZi4MC9zc6o0Z9BJNXK1XlobTumAQcuyedKR4oe4MoBgP3AtVMRU5T5TRUy/ygGAwSaQgIQoSNMLNPKpwaPOF7E5afulrMa40sgUYaOfYrlfou9Yf0cBqITvtbrqMbPSFX75v7muqSkeskaDFHvOWcMAYuFYhTbJYZM+RH8B6cIA2WKOfDmHIIEDAExlUFcNsKUSngPLlFUWBAmonoJMZFDESIcpkn7OhQCJ4rmrSl0BW+rGAh4P4PwQPWjdFQkIoJtfD7q5aRjT/GtuXOOTLgFV81iTGOQqD9YBpeX1Tan5mo1KjY1pha1SY1xpjN36IQb/vExqDPgZo3n7DnCNAT8v79n8GGgjAvCnySJXhGltkCkHAEKEAhBjBaQ1gdEYxmSA5Zgf4ZjPreYxj3nM4+ELYwze97734ad/+qfxD//wD/jSL/1SfOxjH8M73vEOvPCFL7zUzZvHPGCtxcmTJ3HZZZdd6qbMY5/ikQb8iqLABz7wAbz73e9uvf5DP/RDuP3225Gm+6vk86gG/VISyDuIi/HMEuPkNqNkrU9Sny2CBGiTDwYQFS6Tq/oVwqtP7Zro8Mkl+MWKX3RH3iwhoeaS3rF3TgD+gGaBV06bhTrQlu9xIJ+tSv5dV1xt7NtjNG+fCJQPeMHpElNUbIWknDp8FUTeB+oKZnsd9p7/F+qyawNYAiCAhLAGSIglmaqSvxdXrALsJRiBaQBYxkk7cCZi3Vghgj9NN/xCuHSyUspV3VrZZmRamUKk/fB5XTdXMkhVnUeEJGMAegUyxWw/XiSb1vsXGwFQ1SxpCGmQqBzWisDCE4LBFRtSmWJHvxMQUMTAnrYSS1mCTBLGSjoJW4uECLki9BOJhAgkBHIlMFSAPPUAsHka9dpJVCcfwujYGWzcv4pqVDnpqSmqSY2NrTJUcK9kzCyz2kIQIR0kDRMwYj8A2MmwIgUkDEYDaEA6D9D510iyVFPe50SJYwwKlQBZrwGDutctSor6fXv/HUjFiUT/f1fq0fsJ2YTboBiYsnUCIVNIKSH6E95PPoDMM5AklJsjUKpQbk7Yb+f0GEujCktnJtiuDQ6kEmdccq84D8+/C4mRthhpjYnm/RTGgk6MoEuNZJBytTkR0oU+MgDUa1iXgiRgcz4vAHc4Gfn2oAHKuqlmn6Dzw62XLe6Gl6rzhRYwFpYEBAhSNt58wl9PqQIQGLbh2TT+2nXvvTgRHo8rQItxEMs9n+sqxHugCMDbLS3jx4MA/AlAXRDUO495XNoQot3vgQisc/eddcxoM6Nr89jdYc/tFu5z7DfFQMBuari+kMr793WfvWLGZ+NXQ7GWMTvb1zkQTpr75H5TdGVrCSHb0qTUYzlzWlhugEE39xKZK1AxtRtbiYuiTA2qJtDZECBy0qbd9vN8Q9kaVmUNq8+aZlvR5+OxOi5KO1fY6HdXSpAVNUTYtyjHkEmffc6sZ3PZUDx0tvlwd+5lLfsNV4bncqnKIaop7HibAQWSFwVuGmuZqWQaWckY/KuMRD+RkMTPIylFw+KO5PdJiFDswgwo/n4q8+b8evDYaGA6gdlaR7m2hun6NlR+BnVRYnxsFVufOYPtY9tYv38Dxlik/QT5UoZkkGCjYl/fM6XG9a6YKh2kkIlEvlkiHSbIl3MsXDlEtjzE8OrDSBb6SJYW2avXKRkAgK3Kpg8GX1wTzcca/2IhZZCSFJFqCLl1hd+OkDL49wm/jooY8X4+6EF4DxJ1n7Xcn/gZymMM3+9eGlZG0p/+OtYGsJDIFGFc6QDge4nI7SLFBoC60kinCsAQ1rDMp2f7Cefvx83m/6WS7MOsBFIHAmvH5rRJBhosIl3ss2fiygjCdV5daVgPykoRmHwe5PNArkwYGMoWWVpeDXKoQY/9llUKkSQ8h/Wgq5+junPqAb/43FE83sVrxAgAnFUkKsAFmTzHEpAqd2MhjxpevaQ2FpVhGwEvy6o7gB8AB5Z6ULWEdoCqQQP6XUjMKoo66/Q4fqZEssPzmMc8Hvtx/PhxXH755fuyLbapsKC5pO7DFu9973vx53/+53j605+OZzzjGXjCE56w6/nWWuO//bf/hp/+6Z/GHXfcge/8zu/EO9/5Tqyvr+PMmTP4hm/4hgvad1VVeMUrXoHNzU0cOnQIBw8eDL+f/exn48iRI+e1HWMMTp06tS8Az9raGj7zmc/guuuuw+Li7HznpYrxeIx+f/+U2fYSk8kEv/Zrv4bnPve5eNzjHnepm7MjhBD44R/+Ybzyla/E05/+9EvdnEddWKfo87kWdV3jr/7qr/Dud78bf/iHf4jxeIznPOc5eNGLXoT3v//9+J3f+R08+9nPflj2/agG/aQpQXXR8mGS3ncqqhqnwC4R6Pr6+Wh5p6BJOsU+FEKAE9WR7BQQJVVmykw65p4HuOLPefkiXUPUTpbTgWe8cDIB9PCLb7O1zpKEvUGzsHJVrairBvybjBqpQ79wr0uIKUsqoq4g+gtA0uMK9+koJN2Tax8POZ2wN809n8DG//0YDn/dc0ErR9r+XvHxVmPej8qbxJQH/coxywdGoJ8cnYGYbjafc4kHKxNOgs1gB/pq60llUBkgk8zOLHXDBCi9Ybzz4Ztqg37CJvEe8NuNdWOxe4LMWotUEoYJQdoaIIVTE//eHoA/a4C6XZVrrYEghVQq9CxBGQsp/IK86ZsdZST3v8BCppBIZhjFfn4M+jnpKsU/kgQf0+Yx1Hf+P9AbqyjPrGPrwROYnFzH+r3rKDamKMcVHipqbFQGRydVkJu8VidYqQwOuESHrpo+ERh4xoTjs0AD1Dj5XC+vaQBOSBkNGwGBQqVcmZ71WJazN4BVGVdGJ5kDiNIAGgdWhm+Hta19ghoJpZgBFgc5fxtYw/3c3ctCV7CGE7Aiq6BUCrOwDLOwjEF/Af1ijGx5AeXWGIO1LRRnNlFulxieHGG6WeKytQKnt0qMtMHRSY2NSl+Q9Of5xEhbjCY1JtoiEQIH9AQqb4Z6NchhjQHleaiGlyRBPQ0jCEia5J/36OmyP4CmkGLW+Lpb4tda9sKzYLasv0pSEEgQZKoa5nJVBG8iuGvRSo55sNZFV+YpTop7mSoPOjbMRTvz+vtj8Il3LyPtfVmlG0t8H5mZmHYggtD1palGJ/HwVqNfAvbiPB6ZIKDdr2MAnSSg0iBBFyf2OZkvWono8GzzzLkIsAphagb+gSC1HtqBGMBr/pYkghjxrJ7oASav3uCBwh2sxbgAKx5r4AAUcCJbqBRCasdAtg3oQRJWMUggPcDivGpBxCxAIqAsgHQASAWjBpDjNdjROjA8DCuIARP/bEeT4FcCoGIEkw242KocA1YEaWbvK+aHMO8Bp20HaIuk/nx0z5sfG6Ub54xwhVIw7PtsNMR0Cyrth7ZW2qIiiwO5wIH8wqokDYCtqUYqCaW2WOoPIKox7NYaf4BkAzKdI2zn7+C5LEQA/LanNWptMa40FlIJSRKVYdUIBp4VhLDs2er6mAe1GGNi30UtRWBxp5GChylGqDc3MDp6CttHT7OEp1NHGJ0YYevYNj6xMcVEGxye1LhyVGHxUA+nS43jRY2RZjlwlbM8pC41ylGJbDHDwpVDLF5/OXoHl9C/+gqIfADqDYKPX1gnROuAIOupWdY7qB4ExqoM8ycLIPaCgyghrGzkGneRwm4xuNx4EP42DgD08wJyaynsBPoUse9jSiLM5/32hABSMjjUT5FIPqbKGKSR5Kc1FnVlQCRgzCJkXcE45REAIJVCEEEqgpQEUgJpppBkCqki9nd3awepctBwGdmBZeSbYwy2xkHNwmoLow1IsrR96vyVhRSNBL2kIC2v8hSUKqSLAySDnP2l/XVTyY7Cs0bWs5FEBZp71XpdY6CZW0csPz+O7SwaVayoIAjC1FDEfZ3gfIktn9Oi5nulMgZlbXYAftZY98NrVOvYpB5gtal/3TI4uFsFx4yIpxS7Ti/8eB0Y2tUuH3z4Yj63msc8Ll18//d/P37/939/XxLGDzzwAO6++24861nP2oeWfe6FtRbj8RhnzpzZ9eehhx7Cu971Lvzmb/4mAGB5eRlf9mVfhttuuw233HILAE6Ev+c978Htt9+Of/7nf8aLX/xi/NEf/RFuuummi2pXXdf4+Mc/jr/927/Fpz/9aXz4wx8O733BF3wBfuEXfgGHDx8+7+0REV7xilfgjjvuwFd+5Vfimc98Jr7yK78S11577QW3bTAY4IUvfCE+/vGPY3l5Gddddx2uu+46PP3pT8cP/uAPIs/zc2/kYYj19XW89a1vxY/8yI9ckv13o9fr4YYbbsDjH/94PP3pT8cP//AP41u+5VsudbNaccstt+Crvuqr8Gu/9mt46Utfeqmbs++xtraGe++9FydOnMCJEyewvLyM5z//+fuy7be85S14+ctfDqUe1VDUeYW1Fn/zN3+D97znPXjve9+LU6dO4ZnPfCbe+MY34lu/9Vtx8OBB/J//83/wMz/zMw8rc/RRfaZFXTFg5pMvaicNMk5MQwCiY7I0a27t1zgxQGgsL45mZpdaCa52BXlYgMXViL4a08t5WgOqJuzBVxbM0nNVtp69J1KWMbQ1V/Gauuo0wS3MjZPoyXIIY1gqz58vGbGuprwPMqbxR4tuPJH3gYVDyBcPIn38JsTh62ESBlpaYKXREOU2oyTDA+7LBL3gqmeIGFA0NeTmsVBRL+qGgdjs1IGEU96eTfJWoqHUButTjcN9hWFCGFUsJ1RoixwIcp+WBFIJ9BILa2VbNmjG5Qu73+V1EkA/IShylfAu2TlMyVU0n13WqhUxi7MuGURSGaxycoURQ9JYPiYp2DOwihIrcdvitgeAwxIO9hKXgEFgCErnKZlKQqYEeoqgztwPsX4MxQP3Yv2uB3HmruNYu2cN080So3EVWGnHizr401VRI6QAkp5iZttiinSxj2Tg2A3uZwcjSzr/PMvMOSQ1KOsx4BdVmPNBMtPPKgb4tMr4XpepA/9UkETy4BTQyEgCzfVvXQpXUh2z17qfF6CmIo0U30+mbhKgSQbRX4ZaOgS5dBBmMoJcOgUzGUFvrqM4w56Ig5NrqLbGGJ8eo//QNqabJfLVMdYqieNFfd4SnhcS/rpNtEFd1KhGJcrtBNUWS3+pfg4/Yoo0Z4DVyfFaKoFaQiiA3Lig4ZLsrfPY/KOd/8r5sug8I4C3HRdXSBAkVJbyNegmrzoREvhdpg4B1tQQpECCHBjRJJA9i8D72cT+OQD3AemY3co2IIP3ZRW6DEDwDkZTSEzxazTePMuZmMc8PrsiJFMcyCf830IATo6ujpLTuxVTzbpvW4xvL6EMOFlLV5SBNlst9rDy8m4qYsbs2KdPert2A1xYwA/wqOgqKsYKx+m961wiu/GLNfxMk4ljPTnpcumYyQpBQYELwxq2lDWsxiDWj7GcX38ZJh3A9ldg+isoLcGCpSS5oIzHnkwyA07nS5BwTMOEEwIepFGCUBoK58kn6Q3acvQAomtmW6oHvrAjdYw3/znvRe2VGqw1QMoSncOUcN1SGoql0otIgkshsJwr5G4uIvQEVqaQl18HO9lmb7zz2A6DfPw82ZhqjCpm+fnQlgGIylhIY0CCUNQGvYTBxoQsFClYYwOgLAzgBfv980Ebi8r1RWmBhIBEOWl8XcJMC1SjAttHT+Pkx0/ggbvPYKKb61Eai/vG3Ocm2rIv76TGqanGyH3uYD/B8s2XQyYKuqohU4l8pY/hVYexdNNVoMWDLBfrJCJDGB28kT3gZ6aT0H9hWB49FKM4H2O44imOjH0JXeEZTA10WK18QqI1jlDwM0zrxwMgSGn78D5+0sl6sqSnk/aMAL9MCsA9WxXxvUWpROHuhUxRAMT7iYQ2Fr1UIlUEUoRyUiHtJahLHfz+AEAqgiDhAD+CSgj9YYaFQYqDwxSZ67+VscizAdTKEdjxFhaMhtUG1XiCbHmbAS1tQA7g86AeA36sphBOl+RCJkoU0oU+qN8H5QPQcBmiN4DI+7ymko497dey1jpZy3aRn42kimfFTM9397cAYHUNIUqIuoRMlVsbzVYtoHCt2vLkXhJV1yX0dIJqso262ObvqBR1OQGpFLrWMLVktq2/Bm5bqeLr1U8legmzOPvR756SkA4cphnH2yokuQgm8F8mSWYAAQAASURBVDzm8ViLv/iLv8Cf//mf4w1veMO+bO+Tn/wk7rvvPjznOc/ZM7j253/+5/iLv/iLfWmbMQZbW1s4fvw4rrjiij1v79ixY7jzzjv3BfT79Kc/jY9+9KN40YtetOdtAcDJkydBRDh06NC+bO/bvu3b8IY3vAE33HDDvmzvlltuwcc+9rGWDxUAZFmGgwcP4sCBAzhw4ABWVlZARDDG4Oabb8ZLXvISfNd3fReuueaa8J1f/dVfxWte8xq87GUvw5/92Z9dFJgWxxOe8ATcc889OHLkCJ70pCcBAC677DL81E/9FF760pdeENDwh3/4h/ie7/kebGxsQGuNu+66C29729uQJAm+7/u+Dz/xEz+BpaWl897eF33RF+HOO+8EwEBbr9fDrbfeiu/5nu+5YMDvzjvvxObmJp761Kde0Pdmxac+9SmMx+M9bwcAtre38eEPf/iCmZndeP7zn49XvepVeMtb3rLnPuHjE5/4BO6+++59Aa++8Ru/Ef/xP/5HADw27ZUxbIzBvffei5WVFRw4cGBP26rrGj/+4z+OH/qhH7robaVpig984AN4wxvegK2tLXzf933fvoF+a2tr+Iu/+As85znP2ZftfeITn8Af/dEf4bbbbtvztrTW+PVf/3V893d/975IbP7yL/8yXv3qV+MpT3kKXvva1+Lbv/3bcdVVV7U+80iwRR/VoN/MSnR0JJJEm4HSlReKFxQxS8UArRVrYPpF0WL4eVZft7o9rpwFGPCKpFd8daZPMNm6Yqafk+VpSXRGzCmLyKOjdUAOOFOpq7yk5nto2FfWaAhDTga0ZFDRsQqtcomAtMeSUsODMEnuPNkY7PDyntaaRo7PS3+SbEt5Vo7FWE3bC7XIFzC+jn774fS2zrlAXxHkZAMyXQQMJ+69V0+4vqJ1hS4q4v0mUjgAuXlNutWygD1/X7+Y/ekX6JL95QJoZWwAnaTgqk/pjs1I0fbW6CT1fMSMrJitKn3lvilB5QSiGEOcOQp96igmJ9exce9JnP7UKh44MUJh+LhKY1s/8fHnJJArQraYIhkkSBf6UIMe1CBnEE+l3P8cmGm9bKQgWKVCH4KW3L9iUNSfMi/LKZMgy2llwslnd96YyYgAcvrjDueHbEgg7FCEjK6phhsznFyVcD6d/F0nPel8nqxLnrG/kwQJghosQksJMRlBpDn6aYJqNIEgQjlgmScAkOkEB7aY/TXRhIk2+87487Jr2gK6NDDaQlcGuqwhqxq6KGGKAiJ3csAJA5pN1biFNSYwa2YxRGIgVVoRAWqiBabGfbH5TPPdJrHVjMHaFbR7FqAAD28h+e9+W2Anc0cQJzddIi8cg20D5H7fngUY9wUBCxgBKyykAzN92wQQ7mPoMhRBdKvQ/RhP5Wj3C/UwhSBqJSAfju3P4zEeRIAlWH+pqSlOMQZnZ3HEgF8s+d3xTRWWVQOENbCO8efHXB8x4IfOvS66IGI8FwNCUViYL3VlPf33nSfa7EPRTiJvJ6OYd+KeC2kW5BJhDSANUBbuecGgDIwBelyQZbMFaFDrPEoSLUBEWwYietE1CKFLNwYmfGjurPmxTIOfgdadB2MaJlb8bPPPTmmZ1edhgFayr3PtlACGyd7GABLsq5c7aVBMHRtR5aDMe7OVfJzn8PXTlkG9Sc3qBp4l5cGTWWxuD0xpC0g0zHOW+UQjXR2dBuvmaTB+3ifgC/xsWaAuSkw3C2yfGOGu7RJnSh2KreLw86naFVP5SAYJ+odXQKmCKWtM17aRH1xE7/Ay5MoR0OIB0OLBIAvpC6V8QSCcvKetypbSB4wGPCML4Lm+A/X9Pc0H76X7Ld+vccGin8MB0f3TyESGc2qbQrNuQUCYoovmt6vJZHApLqbRAKhGlg0BAJkSMJawkCpM3Tk7OEzZF84VxW24Z35daZRTHZhp5MBCUsLJehL6vQRL/QS9VCGRBPKWASoFMvb1o+Ey8oMs2y6IWqCfTBPIPAVJCiBg61ipAf2oN2jYmYGhyeumeL1zrjDgwoAwZp1LSTNaawlrg3IFrHFSttatn2aP5R7wawF/1sLUJUztVWkaSU9TleE9Y5MwXMfrFumUV1JFSCQzPTPF1gMJeQYoggzsWVl/lwD0m8+t5vHZFk960pNw8ODBfdve1VdfjSuvvHJf2HRPetKTLohVdbYgInzwgx/cl20BDFx5ttle4syZM3jta1+Lpz3tafsmW/ehD30In/d5n7dvoN+rXvWqfQFKfbzuda+DtTaAe/6n1+u1PvcP//APOHDgAF760pfiy7/8y2eem5e+9KV4wQtegCuvvHJf2vb2t78d11xzDa6//np87GMfw7/8l/8SP/IjP4LhcHjB2/qSL/kSvPnNb8YHP/hBvPvd78ZXfMVX4N/+23+LF7zgBRcFpvzKr/wK/vRP/xTvfe978brXve6iwD4AKMsSL3/5y7G0tIT3ve99F/z9buzXvQAAv/RLv4TRaISv+7qv2zMQ9sY3vhHf+I3fiKc85Sn70rbrrrtuz4Caj2uuuQZ33HHHvt2jQgh827d9G970pjfhmc985p629du//du4/fbbcd111+HlL3/5RW1jMBjgP/yH/4CXv/zluP322/F1X/d1e2pTHD/2Yz+2r/Kef/mXf4nbb78dN910E77jO75jT9s6evQoXv/61+MjH/kI3vWud+25D3/Hd3wHvu7rvu6Sy9Q+qkG/AJr4xY9PSoQEMkt8wiVQYq8lAC0PFKCdqLZ250LDL0rJbcOzUDhRbtqMj668CtCYq3c9+6xhFp6RgEpgpxNetBdccSGyvAEAdeOxAaIgfQjnUxESW75qfbdzJ2Uj9QNwMirNQUkKkw5hZQKrctikB5vkzNIzNUx22B2HYaalNTD9lSBRqpeuDJXozYl1wKbbFwBYMQB0DSo2OGlRjICFPmySM9joFsJVlFgcJITFZAMY8fZ6aQ9QF7ZgvtDwapU9xTJGtbFciQxmHuaK0EMFMXUyYNn5TShEPQVkCtNbwtRKlJVB6X1CAOSKoJyXj3KMwJlsJwfQ+GvhfRljpqmZjFwF+ARm8wzMZIRi9TSqrTGKtS1UW2OUm2NsHdvCQ/9wHP/PWoHTzi8lEUBPEnpS4EAqcSCVODXVOJxJXJkrXH3jMvqH+li5+TB6BxfRv/wgsiNHQIMFyINXsA9ffxE26cMq7lOxNKONpTg9WN45xiB95NiBIMWJUjDjUDvg1wN+OlRINxPvxCUPEiZmtADA83nmeOYJwOAfVO76tGP+pQPYbMjgdjqAqgvYyTbMxipUMUaydAzVxiay5QXIPEW2uAWjDfK1KbDK9/mZsqn234/oSWpVhVttoUsNXZTQqUJdlFBVDSoLTg5OJyw1K2tYD2gJl4QjxcyauA/6McyBn+STSz4oSvK7v40VqGfIbIYErWkDgMEXL0qEczLIg4AUvLIEuQ10JD+9DLEAy9lql5AUYJYf0Pb7C80XgHXAuxYCgiysZ4r7MV5XLENqNLOXDbOfPcPC1sz+0Ftb+3RV5zGPRyZiCWRhTXQvq8A2Z0ZPe8wiRriaFyKpYP/9MDZEcyVfQOST1DIGmTQDD/F409qG35V/39ToyopygxswkJ8paes1KwTg/c0UeG7i1RPi+ZXfD6Ez5qmmHf5Z7P3TBLEagkxh+iswaQ9WpswkdrKofq6TkQVgMdU8j01JMHuwM9fxx5ulaQCvYuliAoMhFRrwyj8vJ7WGtYDyhwTA1/j2lHWKAPu3IJsVSoAlQU0N6EgZQ5dcYLZ0GHTy05AqRXXNl7TP9Yywlp8RxgBrk4bF6cf2XBGGqUI/IZZBjyb5Xl7VhOePK+wSnpXmZZ0bmXhJgp/59RR6YxVmYxWTk2vY/MwmTm1McWLK86hqBjh+wyBBTgKbpW5ZC2SLKfrXXg0aLMBWFawxyJaHSK+4BuqqmyAGizzndiGqKZ+zuoAgCWMMRDF2322YqiDnyxf79MmUwWeVRcB5I/8JV5Dl52VBPh1NYUvi6P+Kb3wYAB7D1J0iMT/nkkI4mWwuygyMMsHFNKKe8rzDMdSMNciSHItpgpQMJLGE/VapoaTAgV6Cg4MUhxdLbBcVTm5OMSk1NsYVKm1gas9uFJCK0EuYaXZwmGKpn+DIYuZ8rnl+UloJ2VuCPHwlRJIiJ4msGEFvb4ViL3LMvsC4JGok7aMQCb8XvBfTHDRY5DVW0m/JlAtrHNjsAf/Gk5JlOCNANcjUSwhyXsxkACsaf8ZuuPGCx7UaUiSOfWkhCciVxFQ5eU9tkCuJUhlkqtmWtRamNtDTAnU5QV1sQ5dcmOoVaISUqIptZv5VzLj0fUE6gG+YK/QTiYVUIVOEpUxhmEnH9COkUiCX3K+EqZtiybhoVpfBl34e8/hcjoMHD+4r6LewsLBv2zp06NC+JcU/W2NpaQl/8Ad/sK/b/JZv+ZZ9lb77yq/8yn3bFoDzZvw8+clPxm/91m+d9TMLCwv72ufiY33yk5+MJz/5yRe9rRtvvBE33ngjer0efuZnfgbXX3/9ntr2rGc9C0tLS7j99tv3JOWZpil+93d/Fy9+8Yv31J6HI773e7933+75PM/xNV/zNfuyLQAYDocXBf7uFvs5tgkh8JrXvAZPeMIT9rSdyWSCD37wg/jQhz60Z/AQAA4fPoz/9J/+0wXJtZ8r9tvP7/u///vx0pe+FH/1V3+FqqqQJMm5v7RLXHvttfiTP/kTfPVXfzV+5Ed+BL/4i7+4p7YdPnx43wpf9hKPatDvbBHW8o4BFr/WYuh5ORW45K5lvz6LBjCIK3199bWvTO0mwUMSy1fHRpJzYrebRRCsMLw4BJxsYAoTefnByIa557/mF5ox8Cc7yamZu3MLVZJAmrnTRLAyhXEm9q3KX2sYrAlJfma3eBDPCmLWVZJzhWw3MeUWZlYlDdADV83oq1xVxkkwlYXkYDwc+KQ8kdxRaf9wpKX8lQqggwCUdJXxLhkxlIpBtrqEqCa8MN06CdNbghm0J99iug0ar4XzI8oxIEuQqZEnPWRSQTupJhJuH1UJmm4xU7IuWn3MS4dZJwerR5uRtFPJgENZwFYlyq0xqlGBejTBZHUT9ajA6OQ2JmsFqlGFcruCrjRG4wr3jStsVE3pcGWBqjbYrIGUBIaKcMMgwWWZwtJlfSxevYj+oQEGlx9EfnAR2aGDkCuHuaJ5uAykuQOPs4atFwN4u5z3Vh+KZNxCZb7xySUbEnraIHiTBJaZtSD4Sn7BoI4RngC2o4qfgB3FAQB2jCHWtcsz/wL7IvotBIGMgU1SSJ8UqWqkW31YbdA/xK8dKWpULllZTqp9YfwxWMtMzJ4UkCm5Hxkq1uOq9FD97xIqviI8eDGaOiTvAU7M8QHVM4HacN48M0AQhFQstSlZHrcGQO7aGY0gDwY4MM4C1na8VMlxUwz7h7FPVzOGB+APaCfoBTGD2zrGrHVJWwvUzjNoNx05IXy1uQishJjNLYzmBL8uYYsxbF3BTEYsr1aMYKcF6s3Ni7iKewvhC0Eexu3P47EZ1hVOSMdYC/69HTlNAgMBTWJ6xrZisCoaD9ofcow/mCDZbqVqxhsPsrXGmDa4v+P9eE42q2AG2KnE4NrYgJM1hAJsjfOLiM0Y5oAxM8/7d/nd285cwwFK1j1xhAObdvVIlSkgm210P+ZZbh6EkcQ+a6W2oeDFs/fjGVfsffywwH5+Hgk05yqM2wp6cBA0HYGmWzDjLViV8Pwp6bVUJHwIayAFQRJ7so0rjXE0j0lcAUxCAv2Egb9MCqQeVHDn3qs28GcBitYB3n9OCgTP5JQExLSAKMcwo02Y7XVM17dRjSqM9M7nYRwDSVhKCP1MAWCD6JwEFq5cgLrieogsh50W6B0+CVpYBi0egBgswqZDno/Hx18zI08k1q0FiNcCs8KrfcxicRKxvCfxdYAD3Wy0jmmtLXQNId25F7xNVkuwzkrBAT3RvsI6TDR9vBXxPR/mGwzsZFkKYwV6lpysvoAxFmNJSIhZY0U/QaoktosKvVRiUurAAgS4X3s50KV+gmGeNAwzp+ZhrXXz1j6ovwBaWIbNnAefl0d1tggiaUC/+Pw2p9attzzgl/WCVL0lycoXPiJlBX9eSOz0ojfw8x0FiLoZYySfMz8/bckcd0JYAykd4Gf42DUsEiJIofmeoQbc7nqhey8/U7el3bSX95wW0OkEuu7DaNMqqEoVX69cNf7iXtYzlyLcm6l0gJ8rrIJXovDroLpqSTQ/UjGfW81jHvOIQ+72vN1DfC54XT2a4gUveMG+bWs/5DgBZuT+xm/8xr5IS+5nPNZB/oczXvCCF+z5Wgoh8F//63/dd2Btv7e33zEcDvG85z1vX7Z1yy234Pd///fx/Oc/H1deeSV+6Id+aF+2eynjsfFEIWoBCDuS9QGMazNT/Hf5N58KRQpWONbfjF2FBVlnuzsAv1CZHm2lk3jyMoF+/1ypKSGTFFZriHwAW5cs7+nkPnd4nfkFpZROLkbuWnHqvwOlgkyXVVmQTIRMeRHaYVyJuoRNHF3fVeGKqoDNFzlhU43ZnyZfnL3PuoSoC7fAdck0ByJYt0+b5LDZQuPjF+RnKEhP1QaQaR+oHAB2DrmnOM6Go8RDmP+cT8ZJz+YsC15cepamqSGqaWCvUbEFKkcoPvG3yD7vKTtAPxqtwtz5d0iufTxM0mt7GjqAVSaNJIIwmqvdN9gfzmyvu+tvWP7VSUnp7W1UowKj46uoRwXKzTHqooIuNcpRhXpSo1gvUGxMUY4rPOSkpI4X9QWzyjYqg6VE4qbLhxhe1sfwigUs3XAE2fICFq69DDR08lMrh9mrxDNGvTQsyQYUjlgcMZjnC8N9gqC5fRwTLPKK8x47DAIyUKQNWmxdISI5JHAiSkS/gabqPPZ9PB+PxqaInY9FZkOwh5yEkCmESliWMs1ZaosImTEwZc1+PUUJlfO4c9UxYOCYCdu1CUzLiwkCcCCVWEoklhLCoJ8gGSRQeQKVO6mpVIF282AKLNGax9VaNBJ6XVZmlDQX8XjotyOcDJIgJ4PKY46QClKmKA1fG38thatu99ddN+lvLsSwIhRksPcVWgCCbf/bOia+rsz2MwCCf5NLdlnRHicEGmkpn4TzLG8/5gc5VA/4jbf4ft1ahy0LmNEmyq0xJpuPvLznPOZxsWGAwObzssbdYBl0HkdpFjQUj/PCtl5rFXy0xhQAwnIRVBfAA8+ndvjDAq05WKsIoZP0DvuNmX9Ay//PEgCr+N5WCsKeI5nTBUS9N69MmcEYgX5eojoGGSv3IElJQNoaoixQJkNmpsndgTcLoHK6q7u1UFsujCk1g3+5Ym/gaW3RTxio2S55HEzdRoRAy+/vXLEb4Ljr59x1oZKBLptkwR8aAKzKUKUHkdlj0KcfgBltAiShNo7CDA9Dp/32OO0YP4nKURuBzWmNjWmN7egZOkwBqZhFtJQlGGYNmyh1AJ6Xf4bzhJYC0NT2gk6c52Eq+dyxTPoIVI5QrZ1EsboRCqq2z+HTu5QQFpdy9A/1sHJiG2uVwbX9BMs3HAJd9XguVKkLqM1VnlsdvAImW4DNhrDZgDdimr4X5qZeUn1G0iCsC2Lgr8PGF1I5FqsOAJIQM47FMqsMpmbmHfGzWVn+qoYDjg2C1HoAkj2gBRGerztaa02jjuLmHjIpMUhSkOA5QkKAohSTWmNYGQxTiUIb9BOJcaWxPq4wKTXKWqOMrkeqeB6w3E8xzBWGKTMHibgPVMYiVxls2oMwNeTKEdiyAOWDIGPZgH4Ry28W0897picpQArGr7O8zH3EnLTu3pCk+PwJp3Aa3V3G2gBOSl8cEZ9Ux5i2vniiI88azq0xENID2TZI/ydSIFMSlWYA0J8rf+2s8+izRjt5z3bfsMagLtjnj5IE1XQJupdA1wbaFWulTsozk4RhKpmBm0mkspH5TKUA1UXwPhdOyjj0icD0m+EPP495zGMe85jH50DcdNNNl7oJ89jH2I8Cgr0wSOfRxHOf+1z8xm/8Br77u78bl19+OV74whde6ibtKR7loJ/XkKkhpApslJiB16oAjxNCLqxxLBRjeNHrFkysWNNJELW/uHv1uIsgLxV/plNRHlrjASy/SLPM/BPWAOWUpT1r9ugI4QE+xQCDSNJGKqabWPPHQYqBKu8lETGvmtNqmnZ1AFUIYknDfCEks4RnBu4SZngYxhreR81SSL59ZuFIOL910sdUWxQ1L6pJCCji86UNUMJCSIU0G7Z9Rs4jRpXB/Rsl+75olsxZzhPcvLITOPRsoEYKxwOdY2bdldtAOYXZXg+STiLNYY3G1t33YPXvP47p+i/jzJ0nMd2cQuUKy9cv4cAX3IDhiQcg0hx6ewt1UaIeFzCuItxq9gqpiynqUQFd1ai2xlwlW/J7AFAXNapJjenmFMXaFOWoxLEJg3mnpjUm2qKYZWCzx/gXCykuX85x6PMOYnBkiN6RZQyvOozswDLUFddDLh2EWDzITEeVN4CyZ43uwuzzyeXYV63x44mBv3YY25aH7B5yk1iK9mc5T6UNI0Y+sekBP+VzK57Z1tpgA1LGbQZ4JOLEoYLKhtzXSbHUFinQQsH3qzHoGQ2ZpzDaIF0cQ6YS6SDFYHOKxWPb2Cw1TpcaZ0qN7dqcNzibOybm4UxiJZFYySSGlw2QLaYYHBkgX+kjXegjW1lAMsiRLQ8hh0NQPmgKBsKJ8smVaJzrJuo90Of8s7wErfcpbYX3ufJMYufLmDpAWApmYZbaAbeI5VS9PPNOiTUG4Pz+65njbFxYwQzHFFISKqYQQnvwwjTYBNz1lMTSsJ4RkkkBaOeHWY0h6gpUjpjlscXjgS1GmJ5eRT0uUKxtoVjdxMalAP12SULu6/bn8ZgM46Ssbbjf+HWvcOBZsx4oMV3EJwb8iOc5woDnJzRjXuULCtxvD/7Nmj8JD6q5pDZn6mfMwZx8p3XMfAAQHVbKruGKsATAiXghIonDnc8yZu+1ZahFJcLDK4Ar7j0rk7APz4LiAgbFcuqizY7aLRJqrgPAfwshIGGRSsKoMticGjROqsx4Vj6BjwZ01NZiOZMYJHReRS87wtR8fn1B1wyp88oAKVEA/oQueT4VMeaFNUjHqzzX6nMhmVAp9MJlgMoh6gK1zFE59lZCAqnz9gXQYg9NaxO8/nIwaDdImU3US0Rg//k+LSWDKcmME+DPswAYnNU1xHQEUWzBbq/DbDHLr9wuWUb7HI/t7drgmusWcdUzbsZXnZ5gbb3A0kKKlc+7Dnrxcj7uegp1xQ0Q/SFMvgTbW+bnp1PegKghtGqkcV0IxZ7HQqWA0bCGuDgwSR0INXs+FtYk0rPE3Jwnet7H6wFhLTMDweCbcedJ8QSafXlJwKJRXBG7wMPhDm4VCxBgKl43SAOajmBVjX6SQ2aE0gikFQPYC6nFUqYwrjT6icS0Nu5/g8oYTBwI7IEnSQKDVEX9RSKhZo5nVQqb9LmCfujmNf1IfswpqwTmbrxe9GMZEOa/LaDPr5dmMJ79eZZEfE7cOBx2K5p7V1uwB6ogwKrAzoTRgKy5CM2zaP0aLAL+pJtTJVJAGb5mqSTk0qJy56WoG+CPSEBE94Y1s4vTTF2hmnh5T4O6Ym9Ff94T4m0PM4WFVKGfMgifK4GeIvSlZfWU6RaEWzOGeaVTlwhKJ1uPvIrCfG41j3nMYx7zmMc85vHYjltvvRXHjh3DS17yEhw+fBhf+7Vfe6mbdNHxKAf9fOU3GJwhNMkfX30Ye7sA7eSQIAgQYDUsZ6RCEqnlFdNNTl1s+O35RWArkSRCggxGthNggkBJClslsI7xZ41uy3sqx9zzifXuQhTg4yIFqDx417R8t8IxRov77vG6xWqczzgb4AegJcckAMDLsZDixJBbyLGEp0XpvFWEsMhBwXPGuKp1BgPP3XV1xB6b1Baf2SwwrY2rdDe4fCHDVQssUcp+b6Ll8xj88bTzS3GAn1k/DTsZQa+dRHX6BLYeOIF0kSuvR0dP4fQdx/GZ//cE/ubMBJuuuvgZB3p4cqmx9cAJqDwNkpvFWgFdGhhtMN2cwlQG5XaFuqhRVzpIb8XHNNEGhbE444Ch/fSB2y0OpRJHBimGlw0wODJEfnARvYNLyJYXgvyRWFiByRZgsgWWe1V5C7zxJ9YnzuJWG4sA+HlJMv+3/2qXkRk+E0l6AkA3/WvATAjj+pSwoi3T6aLF3u2CXR1Wot9f8LZy45Bw2UOlch6DEk7Oyd6A79uygJhOkJJErygh0wRGG5AUSIfcF9PNEulGgYEkjLTBmZL7wUTzveGPj8AJm5QEek4qbKgIh1KJ4TBFvpRhcGSAZJCgd2iIdKGPdHHAYF+eNoBfloeigZBssM73BYAQDLA3FenRWGraPkzhe5XzuDQ6VMALV+kevhttJ1E5yIrgpdoFe70Po/dU9YBfkHnywGPsRdQFAoyG9V6nMnWSZAJw8lagNnrRyLkx4JeQaBLbTrJY1AUz+yYjmPEmzHgTensbxeomyq0RitVNjE+PsLkxwTzm8WgJHucaRm2X/RwDf9472d9txqIN7AkCYNyco83yDp8JO97J7ttxL4fXPdIVNVxQkyC3AjuUFmYVfu0Grnngz4NYmDEfjI8xYjSHwqv4O1K1P482kBTv93xSvv47svMs8wxM6TDHws1BJAkoJ48sqVFP8EVWCRFkTwTP4rOFn1vFbZE+IV9P+dzFhVl+3mhtAwA5RQxfBAan+ACAiyocg8cXtYV5q65hiMGbqfZ+vvzsNdYikYRcMlvIWMtUKRcJNZKFwp0735f981+63921QgBUwOCx0Dz+i7qAKUZcxFVMYTUzoc6HKdk/1MfK46/Bgcfdi/TYNpJBguzQQdQJz5mlSiEXDvBcKhvCpD1Apgz2hOK+jtdjUC+h8LcAEDy/zyVN6Nc+7hrGPputO6fF0DWBrScFM9N8kY4VOwuyfBjH3jdhHgUQEaxpQ4MM8tQAleH1LO2762hQG4GEmLGauBM/ldymXBkUtUauWBI0SBELEQC/JCps4PmkaNZRKoXVOfftmLUb1Fl2AfDC52Sz9hIzxr/4XILnNH6+S9zQljeiVxwAHDgJhHaFdatmP+ZGfr5hTndZ1kKQk7UV0AKQZEHUSOLy/cKgXwz4nSt0OYEuJ6jLGro2MEFelWVYEzenyhSv8XxxVSoFA37VhNnAns1X1wje9v53VcJsb5x3m+Yxj3nMYx7zmMc85jGP840f+7Efw0MPPYRv/dZvxYc+9CF86Zd+6aVu0kXFoxv0CwlfB5Dp6PXo/d2YePHiS5Bi8I8AaLNTzqrLeOtux4N0LunDRDEHN1j2q4Fh74V65er2os/UkKNVbrIgeMMx61gzVmeczE7qkAghawOrz8rU/U4aZpVjDsb+OUFa0ctrusW6T5zbaEFqd1uc7jGsynf4kcDUoOkIGRXISIHSXmDkeQVCRb6y1S3Iz2PtuTHV2CoNNqY1prWBFCxt5f1etlZrPLgxQSYJuZL4l1cvBsmrYUJcXez8umj7FMz6adSrx1CfPIrp+ja2j55yXnkFNj+ziXJUohpVQdopjRbIHz0zwUff/Yn9O5GPQBCAL1zKcHU/weLVC1i+bgn5Sh8L117mAKQ+RH8BojeAXFiBSYew+SJsNoQGoYzASAICm66bAGoBdtYGbzf/dwwAmXbaaYecJ8uENixBfwk0GlaKdWCOAH+A4NlkaNhtUfj73ldWe/DZuKSpbzfAgLQmxz5I+6zLLVP2uUl6UPkANFiEnYxAw2X0xpvoX34AxeomqtEEC1evoxoVGJ+eYLo5RTWqUGwwGFxXOkjd+pBCIEslKCHkSxlUrpCv5MgWM2SLOfKDi0gGPWTLQySDHLLPHjUiyyEcw08kKUTe5+KBNG8KB4RoEkiIQFGgKaTwzD5TA1UJW5cwJcsRW+1ksJzPqEgduJjVgE4hZAVralhdgXQNkgpK5dBOWrmd6BLhGjFLpGoDfR2vlyC/GZ0rnxy1Sc7yqzIBpX1URsBYA23YZ9A4uUIP+qWSvZskmGmAuuCk9HQTdrwNvXoMZrQFvXYSo+NnUG6OMDp6GpO1CSarY2yfGGNj6xJIUEVJ34dt+/N4TAb7pDIObuEQFaA1nxKCWF4ODfPZF1gwW6iR8RZe0jJWIOj+9hHN23Z4IzvWIIQr0HIFW17202bD1vasqUFeFg7tcSu85j/s5kaWJKyfeGixY77kx0f/HSsIiH3m/LaJ2oCkrtuex9YEZtF+hX/exYCTsRYnRiWMsbjpQB9EFtYKnNiucKao0E8YAKp0jSOD81sSnCk0tkqNlASUFBgmhEGSMvtNbwFVAVk4Bo4gmP4KoFJkggDLc1aKFBRQO3nPNOPzU5WspLB2kn3QegPI3jGYfAGmtwKA+9xaocPcoJ94FiOwlCtUJkVRmzDf8Ow/DyfVhvu2AVhG1dQQ5dix18rmevt+IZNwPKKeMktxOoLd3oDZWkc9KmDKGkIKpP0EK1ONRaVD4Vc3rr9uCZc95XHIv/jLcfldD2L7wZMw2kAevgrrlcG0ZtD98KEbed0AYjUEbdve4l5mOzChTGAixUw/ZvmlXHyjklB8CMDdC22gPhRaWdsCPFsRFwdRjdQB26lX8heAsSJIdvtrwVisBVmW1tYGqD1oKFNeP9VTpyQAvjYA74sKVnapJkhkApX2YROFUlukkuf4CfHvYSWZ6acNxpWGsUClmz7BICEDxLH8qLVAbQGV9rn/BgZyBJydDbyLmH6hyLMLtPr5SfRZYS2vG0UNKVOARIsNHEcMlDahQOTOoWUvZlG78c/5NYdr7NrAXpgs8ZmQgLYCuZIwxt0zjumXKoIgwXOx8wD/qjHf/+V4A/VChtp5bEoS6CcSw0xhKVfoJ4RMCQxTQk8JkPM/F9UE2D4DW4xhJiO2ugg2FyYw/ertS6GiMJ9bzWMe85jHPOYxj3k81kMIgf/8n/8zjh8/jq//+q/HRz/60UelrO5jAvTb8Vrnvd2S+OFzMavPzljMCApMQp9wmvkZ0SyOGbxrtm9FA/qFBXTUvh1yL65N1iW3bF0GhgrgKjw7sjEhGeWqf1vyWh7s624/XszH78fn8mGOWOoJAHoLfRQaqHRTkQs0Fa4poTmXZwlykoAJEapIt89YGwCFSlsMU8JKT7lFp1t8F5uQozOgYgN6aw3TB+7C9PhxjI+vYuuBk5isTbB+7zqqSY1qVGF9UmGzNrh3VO2a5Hm0xeW5wnXLOQZH+ugf6iNbzJEu9NG//ACShSGDR70B+5yQDD8QtKskp7+7Yjkyz0yA831hhglXi4dkERqftyZx1JYDjcFBv/smueOhQIc6EuA4X61EtfDJLmvayWlBLYafByR5H83f2nLlOgwn7FIv26v7gekhpYRJc4AI1GPgTeUpqlEBmSaoRhMkg22UmxOUowr5xhTWGFSTGrbD6hRSIOkpUCKRDlOoXIXrpAY58pUFqH6OdHEQQD4aLHDyz+1bqATIei6xmbbHD598iHxbdiTOXbIxyA+7hIz3Hw3JQ5989MUMggDtroJw0soApBvHvNSa7y+BHeKZfaZuAL8APnqwQLdBA1fR7sc7m7iCB5UiIYU6yN256+faLInHHQn2jwmAX7nNyV4n62nGW5iub2O6voVyc4zJGoO2k7UCk/UC62UkyzyPeXyWRzx+t55ms5LaO77LhTlBIj0GELrRBf467KEgSxc1RohmbPaKBcy4cfMdn+gmnrf5+ReP6cxcDNuPth3mQpQ03wEXb/mCBxvmc2LnvCoGKn3bfbuic8HHUQNWnU/t0nnHrG0JgcBkmhqLjaLGMJNYdKCmcf5cAAMLpbYY1xY9tZsII4e27AsI5cBhWEiy6CU5bJlCoISoqqYQgyj4RvugYguYbMFsr4fnhegNIEgyk2cy4ucJSWaO+3MaMQgHCaHyUrTWOo9JgUQS+olEpiSMsagMA6wePDG+eMZE3oUehNElhPeNBhhg8udXEAD2WxYVA3+mLHh+LgmUKmSLGfqHeriiMqisxUbVsPU9U38lISzfsIzB5QcghsvoH1mBLkpYbUALy6iNxaQ2EAIYJwlMbaEdmw5wXdvfY3Hxnr/uvtBGSgfISQf08TNdkHR9dxfwKl47WAlhMHut5cEwgwDwKyIoahh82jK4VxkLQgf4c5KPvrhLWzQyr5H3t+9H/j5uJoOuIEmlyFTO/s0a0FZAagCQXOynKUiIV4ZQ6eZYGEAVrhCsKThjxrLitZXNGs+9eOzpsuZiP1H/srdc8ExfP6a5ooXW5yNw0Tpvv+7YGc9t4zkwAOeJDBghGMyTKX/fSbAK69eVTREHkZcyZ09G0jzfIgfaeklOSXxfeYnPs7JFXdTFNurJCHXFaib+eickkEtCT0lkiv/OnIefcB6ZmI6dXHoD+sGx/Hgc0DBlhXLulzyPecxjHvOYxzzmMY+HKaSU+N3f/V08+9nPxrOf/Wx89KMfxZEjRy51sy4oHt2gH9BOQrnfLfm5szHVZoBawlLDcAGaZM7ZgD8icNoD8My+OJllpUumt/ZjQmJB+Epa//loMSmMbpJPum7LdnblpDw7J16we9YO3GLbRn51QEtuzwpq+ZCofWb5nTWsARVbnDBTOXq9JUwqV8nswD5Fgr1USk4AzfKMiUMR0FMMQEkSOD0ug7QPwK9linDtUo4rFzIs2gJiPIFVGeTWSdhTD2D0T3+H7aOncfJjn8b6/RvY/MwW7tkucabUODGd7WfxWIjr+wmu7yc4/PmHkA4SpMMU6SL7weVXXwcaLoMWluHlGzmhp0KCtxueqdWVMpMiAoXAVd8e7IvDJ4i6jD4P9HlmGPv82RijCkkNEgKJRNClM8Rykn57LPflEzo7q94toiQLmsRhDDYKAeeFJQAYQBKkzKFyuMRgCltmENkCZH8IlFPQ0kGYrTWoYox0eRV6PGZvoE2WDKtGBUxVw5S1Yw/7phEnGhMFmSqoQQ6VZ0gGOVSfgUQ5XABUAhosMsDnWBNQCSjrwcokMIND8UDnvuekLVrA344Ed82AGydk/I/ZWbFsTEieCVMDmgI4yIwa7VgKnGwLwIHb90x2nwP7WgCgZ/d4SSjXT4WTGbW6glUZSBBskiGVeehnwnFFfSIwIQGhpxDlBDQdBcBPrx6HmYxQnT7BQN/qBiYn1zHdLDA6OcJ0c4rtk2OcmmqcqR/5sUJITvo+nNufx2M7jLUBKImZvvB/d8YKG2eoO4DezDoQP9/x/4IaUCcC78Nn0ZnvORnp1iarAlRsNPOiqFBKeNayH7eqstWPY4n08B3h+OVWAGjmX+Fzbt4lDFiJQdeBeRiOPZqjAWgkMOVOT+FZ5+piwcGE2CNrIVWoTIX71ie4ajHDoZ5CIpmxUxkDEsy+GVWcmL98mDT+tjNCG2BaGxjLz9WpL9DKCHnS4zZPR6BqAjMZgaZjlqBP+mGOro/fBztmhjR/WYIGC46h5qTsAX6muL+F0RD1FDrhIqDDfYWiNtiuTGD9W7DM41KeBJBs7FlGgkd3bdlfEORBKF+F5HwGq0aO2XdpXwwEAKKc8DNmOmFA0hhQopAuDDC4bIHlugcpspMjlKMKxaRGaXhuUhqLI4MUh554DZIrb4DpLSG/7FDwbKbFAyi1xXapm8I31xblZBGlEG0ZUv88jgqQ+AsJhJf4VImTSk2ZURn39VjmP167kIIwhtm0HoTz+433LwionWSqTJFKCnM4bRqFBD9na7HTjEXlwUzBgJ3y0pqkIDCNZLzLpn06hXASpFbngK7Ry4ZhvKrJQoiG9Zc5udfCSfwbdz0AODCraZKFk7+1Tqo9ntd2CxXi89FlJgPtc2t5Thj8h+OfsC3BYwgw02PeugI0Ly/f9bMmwSxK732cSJZVF7pkiVQr2tcZjdKFFczclQbIlUQiKfjvZY7tR4ogJZ0X6GeNQTXeQFVUqMvmGDPlQXken3qKoMptBvwmG7BbZ2C21niOVYygt7ehyxqmqtn73DCIaMoak/Ejr6Iwn1vNYx7zmMc85jGPeXzuRJ7n+OM//mN8xVd8Bb7+678eH/rQhzAcnh2L+GyKxw7oB+yo3G597HzAP19liXYiqhvec6ElhQnsYNJ11rUtTxwIlpmbxVZstdV5M3DyyrS+H7bTAfrCNmYs7GK2TNif205lGLDg9vLvVBIz62adh+l2SMpZlbW8+y4krEyB3hJ0yr54NhtCW67i9q3MvPxSVQTfrrjq9lxBArh6MQ+LfCkY8FvJJYYJIUMFdfIemLWTqO75J6zf/xC2HjiBz/z1fdg4uoV/3Jji1LRGtQuD7bEUh1KJLzzQw+LVC1i5+TCDSHka/OBEn4EkABCHroVNe062NYNNck5EBF9GBPlMgRmJY4A9K4UH1QAjfPKH3/eSnrNYfXUE9FXa/91sWzrsLSFCIhupKUA4dgB/TrvkthQCQqgWONllGPJ2G3nQWIoUaL6nrQcAAcic5ZpUCqsy9gNKMoisYl+9vA9bjBmUm4wg++vIlscB9LPauIRHI5kpiJkFMlEQkpAMclCiWLozzSFU2gB8jtUHlUDkTrJK5Y2cnZed61b9e3aLNRCCfwfmi6/CFwQoBWEck0ClvC8fztNGqJSZB54N6pNgQJBVtdZAWOU6QrR9n2z1IIAH+6wJSXbosnltOmG2yAypUZE20qaGGGRQObF/kks6AlxkkBKPc1RsQExHwPoJ6K116I1V6LWTKLfGmJxcQzUqMF3fxujkNsrtEsVagXJUYaItCmNRzn4szWMen7URA3gUFwr5mPHs3TFtmsUimvV6AAZN4yE2K0TDrvP+bsKMW6BHa9zoFG/wjw2Any1GsA4Mgco6n3NtCoVkFAoZdgJ+vgghluxzXl5Jf0cxF0jCytnTbwsGpLzktSRxVgDubJFKgQM9iWFKuLxO8Jf3rGGr1DDW4sggwYGewv0bUyQksNJTYIe0c4e2LJNeFIbl0FOFXAmktUAuFVAaiPGGY8FVMKPNUAhiihHsZITpiWPQ7vmm8hRqkCMpRg6okuG3UAkXbmyuQiwYCJLoJyP0VAoqJkiSDHkvx+ZUozR87tjz1brnOeDBWhIIkqQpCSRSIJUMoniGOQQ1fs6AKzrRADX+4KIuIHQF44pURN5HduQI0sUBkkGOcnMcPF3rokS5XcFUGkZb6FKjf6iPpRuuAC0s83W68YlIrhiBlg5CL16BfkLYKglFXWO9qEPR0sBNxjPLzohhzeNApJhhz8/cBDB87EKlYW4AUrBJxsU+vvAnLg4EQL7YUHq7grp9b0YFli0Gnq6Rpn1YIhCASsD5Llpo4+FTDwDy9fGFV9oAlbEQ5Hy7RRXmC8L5s9uar02YgwANyCYVUpnCSoHKzeFqI5Bai0qyjGgqCaU2qDQzQH20WKDRWggkeO7mIjAVo/vBz2GDigvtPlb6OUqrSMk29grMKLUB8GvGjKjAEzwfZtWSnfNP6QrbrHCKJ2DWH/x1ipsWfceC/Y0T4nl0LglTReilEr1UIlUElRBUSpBZDlKzixbiKDZOIV+5DOW0H5h+LO8psZhKLKXEc6zRKqgcwW6uMtg32sT0xDHUo4JtFApWvLDaBIDcVDWK6VxFYR7zmMc8LlX84z/+I774i7/4UjdjHvOYxzwe9lheXsYHPvABPOMZz8C3fuu34v3vfz/S9Nxz4c+GePSDfkCb2deVVEIEgIUXTPt7/jMXACK1G9CwgYxjHRmXtBFuIem9zDzw52U74QHE7vYAwNTNgrvbrnjxLSMT96g98bYC2OIZNp1t+orRuGqUGXJdcZmoCboKHoM7/CouJARxtb5qEl4kGOgrXbIAumS/GLM7Y6Z7HpmpY9kHTArkUobq11QK9JRArzgDMRmDyhHq+z6J6qF7cfT/94/YuH8dm5/ZxMfu38DRSY2ia0T3GI1FRbjeefgNrxiif2QlMMfS5UVQfyEASCIfQPdXYLJBSJZ5wE/POF07bEBieSif7NglYv+ScwF+plv2DIDYRAYkOFkGcskSwYlqYwFDDeDowUoA4R4Om4T7nOOEcTpYBKDcy9F6gBJOUtQSQaq8JVtlqWQWJBD8d6ASBqdUAtmvoPIxVzVXjaxZYPk5pp9I0sgzL3dJbAb7fFKSX1N8rwmCVVmTSIplPaPwclAe/BOmBsglwmPpKWuAlM+zB9gABtnCNr0Mn6DZY7IBs2q8ZJdL0HvPLu4AkZSnB/y8lGcY3zSM836x04J/e9YIAKE1n28AQqacqHd+TalMQ1+TtoYoxhDVhKvPx5vQq8eht9ag189gfGod9WiCyeom6lGB6WaBcpt9PXVpoJ2k2zzm8WgLIRoA72wzovPq3eeaU3XBv1lAoR+bYjlC0YxNQlcR8Gfa29rRaD9GsEeUIMljRDz+ub9FPI+yZsf+A5Bnm/03hVzeczmZeTxnOy86kiAXokOPv4BQgtlhPclznkxRYNf3FKf7c0kMfrlnZfd5B/DnY4/eUlsU2mB7WjspTRuKXwAGNkwx4vG3rmAKlurUW2vQ29uBGe0Z7OliPyT0ZapC8QrS3M35EpjJCDLNIdIhRDVhX72a1TJURkilgnZzVuvYe8odU+Ivo5crFAz4KRIOIATgiv0E0J5nOilq/5wTpobQUSFJws9tGizApjl6aY5smVn62fICdDFt2PoOsMhWhlAHDkHkAxhSkAevBKyB6S3B9JYADYxLje2yRqYkpIBjXIm2v5uuAxg2M5y3H3eGxP2oljWAlW3Az2+J1yvE8xVrABsB5kCryFK4uRyzIRV7IpKCJOWUESxIC1hhd/g6B+UGXzxlBbQFlL/X4rVcBGqyCoorABIE6BK2TiAApCp3/dhAEoN40vktl5qvNQkL1G2PZG6PdYCaCAoT8eQ19qCOQwgvFUsg2mU+G69V3e8wb3H/+zkSz68iJqUBIEyrGI2vGc97Q5GdA1FZZZ7nuZIE92FSoYgLbj+Ak/OEk/kUNhwLkfdSZnnP1LH8yDH9zk/ic+QkPjW0sVAO9MulQKYEA37TLdB0C9bJpeutNdjxFqqtMapREVQvPMuv6Q8G1WR6zjbMYx7zmMc89j/uu+8+/MRP/AT+6I/+6FI3ZR7zmMc8HpG4+uqr8cEPfhBf/uVfjpe97GV4xzveAXoUeDE/ukE/nzTxYF9cwWgwW7Imil3Btvj/+LWYRRd9xsut1GaW3IoNrKHEm6D7vBJvFEDDarLez0y4ymMwEy5ejAs4YKK7eAvV8tH2W9I/0Xlyi0wL8KIeFFXbtsGLXUPQDtmq/YiE+NhEWYCSPi9mNSdaoEvYfGGHpJePIpJDkoLZgoOEkEoBOV6DKDZZcmr1IZiNVYzvuRPrdz2IjXtP4qG/P46NrRKfHpU4XnxusPp8EIDPX8xwTU9h6aoFHHjcCgaXH8DCNUeQXHYV5OGrgCseB6tSiMkGTH8FengY25VBVTHwlpCBoqYvcx/lP7ylohRoJ1KBkGzS1stAMYhnASfZxR+P8yz+z1jK0wN+MZvTA4AVACMsdMXV/0ScRFMkMNUIjAAvnaXIgcbk29wGAVluCZAei3JtjdvmxwDtkjLCeMklBSkVZC9lRoHKYVQKpCUoSUFVCTPeBE1GsGUBWYxhteYK96iowUtVBkZE3g/gHzx7JUmDjJzx8p0e4PPJaEEN66TL8gNa44aN2DTWfaaVaPdfPUtCuzVumfb34iSY30aLIO0r+3XDugi+Uc7jz1Zl4y9YV7AuuR88BssCtiwgJiNQXUJmPU7ikgR85bo1ENMRRLENs7mK8vgDMNvr2HrgBMqtEaZr2yi3xqgnNcpRBV1q1EWNelJDVxrGdYaUBAZSQF9MMcleI074Plzbn8djMqRwP27OEu7BeJ4hGp9THwQ0cxwLeE8wWNEUEEQRg2zhNZKNz7FCi8UTM11a2/JjReT3FsY2cp5kEUjoJRmbhjs2lPftckw9/12WOYy2HcAOV/bhxyBBsAQAEjYbMovKs6gB9hrsJv5nBLlzUzmvuotG/aLIpcBzH3cAhbYYVyawexZzd+xCYCljqb1ugY62wF9/ZgvjSiOXhEIb1Nqil7BM30ovwUJKWMwkaP0h0GQN1YkHoTdWMT2zjnJzDF1MMVndhC5KmKqGLjWEFFA5j7mevW6NgShrKG34PKiUpT+dT10sKWlVHgrOEhKwipldvk/ynJt/8zG6Zzyx9LsSgKiLILPqnym2KllW1F2LsF8TMerAhTqiL4H+AmjpYHPCnPdYb2MVtixgttabZxBJUH8B6c1fBLNyJczgIEb9I5BCoLfxICZI8MnTY7zvn45hu6hx67+8DgBQGQNtJDS5eQQc2O38bbl9EVhJEoIcUE0yFGsFVQaV8/NOpqjtTjBLCPbak25+z8y6yIPOzQuC8oYQENLNESoCqIZK+2Hd1PX5s7a5Jj48iAUIx+iVDdstHiOMBqrSvV86oEw4gNI672+FNFFhXlkbi8pYKLKQwqAyntlng4yqdX1dOKYfFz428zyE9u1cPwonqRmPgf7QurL2fg4lIpYmv0HN2OcKqwDGW+P5sj9P8bowDmP5XLNfNkLFqfTFpp2xTAgK7ED23ONr5Jl+/URimCdY7tfIegmqqUaSD6Gy3oy974zp9hnUlYEkgV4qsdJTWMol+tUW1MZDwGQL9amjsMUIZmsd5doa6lGB8ck16KJEuTlGXVQw2jYsP21BUmAaFcM9YjGfW81jHvOYB/79v//3OHXq1KVuxjzmMY95PKLx+Z//+Xj/+9+PZz3rWbjyyivxhje84VI36Zzx6Ab9LjZEJ/nUYca15DKBnWBfh1HnF16BLWeahSF/z0kMGRtYRMDsCvq4yroBDZpqVF/NyV5TDVsw7GvW8fpEuvfD8tuKjrObez/v8OAB7YEpGW8u/oe4Kl2Q0wfyiYa6bNiPHcAx3nsqBYSpQZMNliI9/QDqU0dRrp3E5t0PYHJqDat3nsDaPes4dXqMu7ZLbFTmc4bV5+PKXOFwJnH9gR6Glw0wvGKIweUHkB9cglxcZv++4TJ0wskiA8D0V7BZGoydB5B0STXp5C4Bh7sLXynt+i6JHQkQH9baIFPUAIF29m/H8vMRWHYhy7EzOK/jGydQgT2JEhCMq0B3b7n7kBMnwvkMQuxM5sAlxoTlW4DBPw+a+7Yh/B+fC5LEDDtjAOvYd9YCsoSUEkalDFilOWxdsVRlN6kXpOkahp9I8xbY58GsIOHlvHKsjLxAsbOtAIKvDKgB9ywAmMRdND7ZvjrdXYSdJz8qNkAs0Sma5GH4HP8B0d2Of8+PYTF4uEtYz/KrK070u/MG5xNljIZNUoj+BOQTOe47ZmMVZpulPMcPnUSxtoXR0dMoRyWmm1NUoxK6NI69YWGNDX+TJMhUomcspCDYWdTXeczjszSEkxOUUZESv0GB0ePnPV2gYAej233PgqXTdw2i5h6PE/2yU7w1q4hr1liw23wk/hy1mUSeyWVJNf7OEcjZKm4wtQNDIhDSmsbbViY87npACQCo5ofEOcYtH9pYlO58elbaxYaAk0kHUGoRntme4WetRakZxNSGPZEHCTOWjAUqbTCpNGptmT2YEA71UyxmCiu5xFImoaabkOMzqE8fg147icnJNUzXtxj0q2pUW2PoUoeEfTc824+cZLUg6cBAGRjwvhAlfuYIa6AkM8S4gMdGc2VmPfm9cSGPO5embvzTPBiiIxDBOIAv9vn259Oz2IWTTfT9xAE2ZGoIlXKBiXuGx6CfWTwC01vB2LCXYkICPSJslwZ3r47w/z24AQDopwwAbBQmSKATnywEmUhrgLrm5108R3DnzLP8ROLmAb6Pumd/7JfsQ7o7VoCBv8B27DL04zAsA87AOZ8zSQTjmJXQFpIAGO+cyxHPN0KxYXzfnYMZy33ABlYmagFBBlYaSFKQRAAaKVHNxuaoScByDw/7DX55RgBknaxDPNd0h955pPs1nhUinDs/VzQWs+/dUDRlIy95auQ+/Rh2tiIqd/78fNuD+bvNs2eG5bGZx2/byPK74jcSaHn6qZQgU5b39ED92cJUJayxSBVhkEpkktBXBNregB2t81xra51Bv8kIuigDs087L2svjWtNA/xZQ9DVxS5e5zGPeczjcyuMMfibv/kbPOMZz9jztv70T/8U//2//3c86UlP2oeWzWMe85jHoyu+7Mu+DL/3e7+Hb/mWb8EVV1yBJzzhCXjGM56BpaWlS920mfHoBv3iBBChodp0JGF2fCdOBnV/x0HRIn7WZ1zyyy+aPeDnf4ePOUDBRsmI5j008imiXRHabBeu4rv5jhRcKct+L+cBtPmKXF8RTAqgBCAVsRMd8BH5lHH96i4gDUkGPZKcmXm6DCyiPYer4ha6hIoTGwBENQHqKYRLrNl8MXzNJ7KMBajYZFm+Bz6B+tRRnPmH/w9rdx3F2j3ruPNTq3ioqPFQsb9VolxhLBxTon3WpADKiA2q7a741MMel2UShzOFQ6nE4SsGyBYzLF69iN7BIfKDi1i49jIkS4uQBy8HrRyG7S2Ge00vXIbN0mC90NguNbS1WM4VM+tcv/FAWAx4W9Fm/HUlhgDf5/nHwIb+75m0Fpx89Aw/32/PFSZCs7S1DPJZgQpc5Z1IgUQSAO+D4lkoFlL6inQvdYS2N6EgWMFecMxsRLh/YqwnYMnuXLD3EEEmOY81OoFx0p826UGkA5Cpgf6Ywb+OPx0AZvb5hF7eb4A+mTSsvi7QJ9OmSMFYrp6ekcDyV0U6ma1WRb5sjskn/YMHY8dfhoSAlE3S2nqpXl2xlKepIVAHYLCR8oxYBXF0mDKBgeMYGNZ5RwUpMM/+K4vwnh/bhWf2+eeFMbDFCLqYYnxyHdP1LUxOrmPr2Bamm1OMToxRFzWmrt/7cVslEpQw0AcAMiXHYlHItQVVFXAGj2w4hsfDuv15PCZDUUf6MB7rgDAmeyk8Hy3vVriEvVQBzLPxttz2WvM1IsAyWy4AMUCrsMezA8M4EbN9dY0gwRlUDkT4nDC6GS+Igvcpe5Iye1nUDti0hn3+/P793MkXUYnGt9mzoYW1MKRgkx5s2oclhZFjuCRurnZ+rnk8jpaGfwohsJTJPYF+AD9fJQkn7ckxTCUqYzGtDTam/GzdKGoc6CUYLPEx+jkqCX5+LiYKS5nCVYspBglhUZSgzeOg8RrKe/4JevU4znzyPkzXt1mib1JzAr8yECQgUwkhBWSiIPMUlPL1NdpAkGH5QC9ZnebMYk/z4O/mryczoyysriGlQSopgErGSbt7AARoABEpuCBM1CWz5YzmfqvLmRLyLQYoGsDPqpyfp94XlxTLZrvnmEgHEPUUqr8Qnj1CJUBvAdWB67BRGqyPa5TaoqcEDsgU64XGn3/iBI7+8yryQYqFlFAZizERP68d+zZmHgpTN+x207TRFwb5+YElFTz8uFhPoTKNwkJ7ri/C7J9cgaGfw3WZ/eEceTlKU7d8f6VgVQW/bc34X5h3SKeqQMGD0W+0WeNZQSAP+gJ8bDEI7O7/8HW/3nHfT5McNjyzCCUstDWuoDKeg1poyyxbw12J2X/RPQS0wb8gqylEsEQQ8ExB0V4/meYcxYoFvnjAGsPIdHz8QBjXPLuaBAOnfjz0ihRAc78CDfu6fbGiYlF3H3lfbQ8iCuFlcAm5ks7XT2GQKVSZgkolZNoDqRS6LHb0hzis0RAksJArDFOFxYyQFBugrVOoTzwYJNN1VQfAjz2sPauP5dJNpaEjg2Qh7TkBx4cl5nOrecxjHo/C+J3f+R088MADewb9iqLAD/zADwAARqPRfjRtHvOYxzwedfFN3/RNeMtb3oJXvOIVuOaaa/CKV7wCP/qjP3qpmzUzHt2gH5rFoACCtx2A3YGnGZJQu7H8dlSXYyeTLpa3EuBFl7BoLVybak/b2oAFIF0KSJFfZDWLyzpKos8KAw+sNCCEiBJRFuAKWe+9pavm2FQGm+SoXeLfgyu2s89S83s95wXTOpUqDYv/XYHRi4lQwVwHdqL10j3+fQCiHIN0BbHxUHhNDw4yADkdwTz0z6iO3YeT/+dj2HzgND7zN0dx95kJ/nm7vGDpzoHzvPFV8T7Zn7oEUk8KJO63f63rFeJf86BfaSyqCACcaBOSfPHPhYKDBE4ADBUhJYGhIvQkISeBA6lElkrkKxnSQYpsMUP/UA/pYg+9w8vIVxaQH1iEOnQ5aLAAuXKEk0XghJgHiae1xXapMUwllEsieo+cWnQk32KwyFpol0ryiRbvf1mZBjRnwK/Nno09/OJz6pMbjnsGTQ0QGINusnVPNtfRVzMDgJgBb59vV+l+M9yHHZCfWTIeEGQZK6lyCOXuKV3DJjV7FpkaQuWALkEuIRkYawAnHpI0gOSWJGzSJCKhUh7HYqBP28BI9CBrPMaEc+ET/h3mq4nGCc+8tDb+27rvN34wihD6R+6AyNY5s6Yp3IhZOzOTi9Frtvm8sLYB9YwH/AxsVQWmpCkrVOMCpqxDgslqjbooQ7KpWBujGpXYPjHCdLPEZL3AqalGYSy2axPuV3+vLypCT1sMNIN+MiWoXHHi2ne6ugbun9Vr5jGPz74I8p5dkA6xAgH/4ZPeOyIGZzwN2r8eF145GXbRmZt5mcwdTJfu+BBL1VGzT2Fq50NKzXxCEKxMITKzY3/hOwDgpAp3yAoKgojk+AR4LPZMHUuS2dsqhRYKteaxgvw80c/Fai58sAB/vhNekjAk7vcI9gEIvnz+qJtx3jo5eRGKy3JFqI3F0a0KlWmkEYepxNWLOZYyiWEqsSJLiMkG5CqrKFRrJzG+7wFM17dQOClPXWrIlEBufk4Je/bJVEEQQfZSqDzj1xIF8p5+nr2e9yF6A1DWa+aZrQMzLMFqaiiZMqBHDNx4sNUXsQSvMoGdzxq3rS4TNDD6vLedO5fWg7tOItOqLBTVAIC1Tp60LkAeNAZYajsbYLsy2HZqCTetZMjKTajVh3Dg4AHceGQIfPEVOLKYYSmT2C75uVMZg8TJNoq6kbWGqYOkqJ8feF9gqITPnWTWqVUpoHJYqXj+7+Ze4ZDP1tcEAdDt+7V1H0lXVOPOowPhpEyREAVojQuOmjmSLw5Srn4zyAP79QtJCOuASiEg7AzAxTb79PetcAWOVpgwf01JhaIlBvf4isbTnCBBqhmgjBVa+Np2TgvAcqVkuVZJoiVtH85pUDuoZwKnVggu1vLrNA9yR695cE6Kpj+HdkTFWrtKjNpOv++s2cgVu5EAEkmQQkNJgVwReqnEMFcYTRWynoLqDaDy4TlBP9UbIuspHBhmONRPsJAQaHMNepUZwXp7C9W42YZMeP5ktQ4FAcAY9UQAqJ3Mp2MLz4l+85jHPOZxzlhbW8NrXvMavPSlL93ztt761rfCuIKL8Xi85+3NYx7zmMejMaxlG46VlRU88MADePOb34wf/MEfRK93fvL3j2Q8ykE/Fx6g200yaZfFTev/s4FVEZB2LuXHmCF0to/Oeq+1KNulPd3kGksinkUyERHwR9IhhMSAHygkeTzg1wUYtWMlabdwjT3tgyk8/3OWo909fGVpvPjnN0yL+WNl4vzE3P4MM4VEuQ196qHAClJHWMavPnUUk7v+Cet3PYj7P3Q31h/YxN+cmWDtAqVgBlKgJwmHM4mUGNjzLL6e9JWwzOiRqUTSUxAkIKQIkn98epqrY40NEldcvcoSgUVtoC0C2GeAFvgHNAAh0IBYcVu4jQxGLqYSMpVIBwmSQeL+TqF6CipXSHpcZZ8u9JEMcuQHl5AtDyGXDkKuHIboDSD6Q3T9GiuXACxqg4N9hZ5iz0SfVGMQeifby/ddZoeI6G/ue3HPs55FYne/53YthjUN6Mp9dieg5c8bUZNY9e/MTGDPis6YEzfTRGCY/5+Ekz51SRtrAe2qwY0HyGTqJD9ZOg664gSbSYBKwZoawjEJfBuCbGeScSJSZS05z5Bgs7PBvhjs5+N3kGxnTGGwkv9mT54oOWbbAKDfjnSV74kV4fsM1BKkSl2FOydsu0mwlocWsPv47t/WHWkzYxwIyD+6mKJ2/jC6mKIaFSi3xtBFiclagenGFOWoxPj0BOW4wompxnZtsFFpbFQm3JMAJ9YYTBfueCx6XtZVkhsHGhDQXIJqdEG0UyJ1n7c/j8dmkOgk/3eZk/g5A6HN0PHPdQANQNDqL20grRVdWTuiIJkIgEE3ADDV2ccE2wH2YunQTgFR/JuBFO0Aueg7UTt3yAsLggDBKp5nWWq8xPw5YkJRG7wUum75FDbzTP58W/Zw90M9W8TPJc/o8vug6NnnizyEAFLJoN+ZQmNcGUxrHlcXUoWDfYWlTKKHCnLtIYjxOqoH70J98iiK1U1sHz2FuihRjwtm7jnfPiGpBeyxfCdB5RmEJAYB3WteklKoFCLrBZaf9YV5/pr5fhKdS8/0A2xgP8kIACHBhSPBW3ZWH+lGXATo/mdWnwP8nHejdcw5ACBBSJIMEKLFErQyhU0HmNQWU20wrjSyahty6yTMdMJFVKnE5125iGtWesikwKQhojPIHi6uccdiYXzBSxwkWWLU9cnA+He/fcFf/Pw32KWvnU0VxX/EslJCYN75ebxgQNbPg8n17VgRpWH6dUqvPOjlihpBPA9qRVz4gwjg9954IFgPQCrF7HxiRjPg5yzNSbBgJRUNBMbe2YLnvZbnnsTHJcnPa5uqCNHtb9Ex+oLTlm1BtwDVfxyOQRgPtMAOkA9o95Vdi6k647uIfidEwU7CS3zmiYRKJFTagzwPXz+V9pBkCsv9BMNUsd1Cuc3y6Q7wM2XN97/7sdpADXjbpqxhnHefrgwIpqkhkbv3x4cr5nOrecxjHo+2uO2223Dq1Kl9Aele9apX4Y477sAnP/lJPPOZz9zz9uq6hlKPjZT0POYxj8+dEELgm77pm/CRj3wE73rXu3Dy5Em8/e1vxytf+cpL3bQd8egfYf1iKE4SATOTOa3vdLfRDRv5V3UYdMBO0E4INj838GDMzlVzA3rsfI3gJAz9fxHYIUnElhKtquUWi8odR9fcnRPxAlADGLiq7amFhZ7JyIp9vfzmxxFYlimBXDJDyXQ8emjmke8es9hRwjOTZMpJM/eeFACVY4h6yt+tC9hiDFsWXC26fgb3/va7MT49wsb9Gzj+wCbuG1e4b1xdQIuAQ6nEgVRiJSEcSHlxy6AZQSYySFN5oE+mEtJJ+wkpmOFzjkWbrmro0qCO5EVtlAHxlay61NCVDn973zCAwQUvKRi3xQN6qp9DpgzsCUmQiXL+OM3CmiRB5ilUnkItHwAtLIOGy5ArR4A0h036TfscsHRqXMNa4PAgwWV9BSrHsOAK99qyl6KxO3tB3Fdjabgu2CzAyRjhEnY+caqo7QMTbyu00fWX2POvy7j0ERiCklpJT79NIZgtQAYwDgiTLmvYHArfqx7QYi8Y24B6IcHalAEYzckt7yPo0k4BGOV72ydHAaF7LIeZVqGqPzBs/b3iGX6kUINcMg/Q2gQgrst+jIG7+Pr4Y/Of85+Jr1fMvqyMgTFA5RKOzFDhY8wVIZEETbyNhACA/YtSqaDSPqDLBvhD4eS6KIy9AIKcXit2S/i3gD8GAm1doRoVqEYFxsdXMV3fxvj0GJuf2cJkrcBDG1OcLjXOlBqb9bkBOg/IN1K+QJIrpIME6TBBvtKDyhWyxRwyT5F8jvmEzuPRHcKB8TsSxWdJ+PtxIozzMUMOO1/rsves2294j2arLTCQsFvDo8946bxuO0kCIm+AAg8gxL+9eoQunQdaJFtO5BiEpr1NlcKkA9i0j1oolK6Ihwsr3BzOg02O9dYtVqssodQmjL38LMU5QYfzDVePFq6Hfz6RK2bKFJA6aUNpAFMJ5JKQS8KVC+zbd7nYBp05BWydRv3g3dAbq9j89GdQrG6g3BqHeUw8/0gXBqCUn2mhLZ1kvZAElafM8ssHoN4Aor8A6i8AWY+lNP010DWE7x8+TA2hS6SUQbsCNUltF8lWYV1dtmTjW0ESlDjGnpfDVFnolwDPh5gx1wB+sf+2toCQOaTKoZ0EPm8vRQ1CWdah8GaL+ugduhnbSzfiw/et40OfOIGv+oLLsJQ1RTuVYVny2kRrEVcYF0t+A4BIUmYpqgRQkeS3coxExW0IMvMd9YTZnceBb+CCv2Y86Mihxux7XblJmXHSsgoggoSTcZ/BUmskhcH3mhEAKVZCsJavs6A2wxcOaDQ1FyMawxtrjV/CsQBLpDKFEALamjBexcoSpnNOWl0jzJNE+D8Agx5wdl6AgmxzPsNcJl7Msdcpn/jGYxlehlWq9tjnxqXAwPa//bZngda7Adm+H7vttq6DYCsKIVihIVMM+A0dcFfWBmv9BL2FIaZLhzE+9eDsfYABrMHha7C40sMVwwwHegmoOAk72oQZbUE7GU+Zpzxe9PvcbwHYskA1miBdHKNY20I9KkByk9dFpYaQhLQLAM9jHvOYx3nGxsbGZ63/0n7G3//93+Mtb3kLgP1h5gkh8Jd/+Zd40YtehNtuu23P2/vVX/1VfP/3f/9OKep5zONhiE996lP4vM/7vEvdjM/qqKoK1lqk6U41mouJ7e1tTCYTHD58eF+299kUl19+Od75znfi1ltvxStf+Uq88Y1vxMtf/vLPukKGz67WXEzMAvy6ca6kVfR68KXoyknBgVHnCAJmol7Gv+fejxkjrc911plCCFCrtBfhodgFUUJz0STpAUSJJ15/e3afiQAU7zQTV/jHi38TJYe6+/QswL1G9xgAB97AJQO8t1pdsHznmeMwW2uoTzyI7QePY3JyDcf+/iGMTo5x7+YU945KjPTshfusIAAHUonr+wmWEsKgnyBbTCETCdVTAfBTjs2X9FTwpQkVqt4rzAFqMcDmX7fawGoDU9XQ5ewFq/epMM7E3i9yjbYwlQ7bkgH0S4IslnIeOUEyy8lo+Up73zaAF+SU5xAqAQ0WQC7ZhjR3UlBJ0yhSnPyyNlTPhwRaBDh737uzhRQI/jAWliulLQCyEJYzdiQ4Yeel0ayjyfl7x+dOpIhANVc9zgChgDYNm2FWH/Z+Kx7wM9blimChXfKGLCBsBOYb2wLcu+w3Bh2b5JFva7jnRcOO0Tbet3sNCF46FoCSaeRtJSFMm2HLknKcmPb+n16u1wOQ7XbY5u/o9vDXrpvr8vdgzOYztvFWrLQN40vsnQiwFCsZC0X++1yA4EFDKX2Si6vwLSmXx2qDDjvYQK7PxUweISVgJCwZgCS6viv+nqtGBYqNKcanJ9g8to0zpcZ94wqny51eTmeLlJhhmxOzgf34QAmza1WeIBnkSBb60JcC9BMSD6vvjHgYtz2PSx+7sFLIFWJEhKqIycws7pCY9tEF+3aZS7VHeMz8HMu5RyytczCA2989v4lKYCrteGNG2z0Y4pL1LAPZzGHIs8wEXNGGaQDNWW2M/t4PwM8fscckrBAotQ3PHRL8IZ5X8nNYGYvaNICFEEBPEZYyCVpbA7ZOQ68eh147ien6FsqtEbRj4vgio2SQIxn0gpqAn4dYw8oGHhy0xjTzJM/yI2KWn5QQSfT8i4DXmQCyMRBKwDu/zZpfSzSssFZxHzeGN+clPT1bTKYBlImBaJb95L9jVQJ/zXzRmnTy2v61UrMcjH9tqzSY1BZbpcZUGxxeynF4mGGYKZTaMOBnLBJFwbc6hGOmhlMhpTslbvyP20sNsOSBrVhlQaCZq8T3r7E77+dgq7CLykqL7WdF+F8SuXmPv07Rd2bdn57dZw0ghPMWZI/BmcWdbn8Ate5jYSmwEIFIvpgEg68OvEN0DXkuFwGizu+c7x0LARHmjEbAWTvwHFaK9r0cWI9xEAEgx4Z0fSswMTvFDvHxub+9l+iuDL5ZEe4Vv3/3MtpLV4Jw4wNL5CbE4F8vVeilNZJMIcklsuEKVD5AXcz2dcpXLkdvYYiFQYphppBKAZS1q36jUAig8hQiy0H5IIDWdlogUQlIEoxmr09d1aCihEwZ+C/LS5Akns+t5jGPx0T8yq/8Cr7ru74L11xzzaVuysMW1lr89m//Nq688kocPXp0Xzz4HnroIdx111346q/+6n1p35vf/GY861nPwud//ufvaVvGGNBFMqWttTh+/Djuvffe8HPPPffgxhtv3DOw+Zu/+Zu455578LznPQ9Pe9rTLrqN89ifeN3rXod3vvOdWFhY2NN2rLX4+Mc/ji/6oi/ac5smk8m+SkIaY/B//+//xdOe9rSL+j4R4XWvex3e+9734sYbb8SNN96Im2++Ga9+9asxGAwueHv9fh+vec1r8L73vQ9f8iVfgic96Ul49rOfjS//8i+/qPbF8clPfhKDwQDXXnvtnrZz4sQJvO1tb8N3fMd34MYbb7zg7/+rf/Wv8I//+I9405vehD/5kz/B85///D21Z7/jUQ/68cLcTZBbiyPRSDC1vmB2frbzegD+tKso9fJSAKRj/HXZdMBsMM4XrIoY0LK2Bah5cM52sjthcd+Vu9kl2sl9uxMAiJg98b6Ekzn07fWMI79oNRbhYCU1QKUHSfy++PSJcwI+52q/926LExHSV4VbAxqtQh+/H9X9n8L4+CrW7noQq586hc3PbOFjD23heFGjuIAk+2WZxOFM4cpcIe8p9FZyqJwlMNNhAnLJfPbqSrkiNVGQvbTFzgMYWPAAHwNuCdKFfgtw85+zxgQAMAYFY5agN7Pv+o/5z3r2XjLIGcBzwB+F/TmZLL9A9X+TDMkhltFKuLp+sAjqDWBCBXveHJvKuOJaVMF3xQJBStL3sYQiMDCOGKAPrwHWJU74+nPiuPJ9jxgk4v7QsB4M2n1ZOEDOWrcb59FCrjNyd286ZmDYtu5VDw76/9nTBWgSIEJYLuQWzTaCfJv7bgy0tcD1cG/6RKpj+lluSCxFyuCfA9KEAAkH3saJNH9OXbKodP5R/Nu2pDfj4wrXc0Y22aA9LnXBPg/wVZoZfjHQpzvjWmu7DvATbkwCBCoH9kqhWOrTyW9Z4zyzwu+dCS0Rxswo8S9qHitJwtYlRJJyvyZOLeqqRl1MMd0sMDo5wtaxbXx6VOHUtL5g2d9EAEsJYSmRWEokegMuEMgWMySDBOliD0k/R7aygGx5CNFN1M5jHo+G6Cb0neoBF0pwhQAJ4eSMbVSkdBa2kCB0a3HIAwjhBdoJ9Jytmbsku7mwJJrrnAVBC2NKl0YYATwsX0hNAZhL0NuY8YX2WOsLZKQQELX3A5xxbPzgasW0tpB0FvbV+YZjbioiaG1RasvJd8EFMtIpVfi9eK9nAyBxczoqNkFbW8DaQ9CnjkKvnWQpz9EE1ahg1YCFPtLFPmSeIV3sI+nnoDxnth7AcsvOX1UXUyf/2RRIUdp40MH50QWfbVLB8ywGSLrn0RcUxRfBPwdVPA77Z4x77oCo6QNBBtP5yElmy8X78z5/HkiLu1aYD3Q6ejzH9W+REPjM5hTaAhtFBRIC//rzj+D65R5WeokDBLlPJrKt7rFruIIXBi8pANNektQKCusCL0Xe3a4vQrJwfpUAz/9i4IqozawHnLdum2knLLlCnpo9jAW5+c7Og4kVRsK95hQNmNVYt+eXrXmBcTYA1G5DiyXM112SQuIWMYpcUZYRjrdoWyoGnr3p5fzJ/QZ4zhMXi9kgZW4h3ZHsOEoPwPq2E5/buIgrFBH4aDEoveKDbfpx51x09xd++30ADt3dOb56lY3Gj5mZfrkmx/TTGA5SlIsZ6nKIweFrMVk7jnJ7rbNbwtJVj8fykT6uO9THgTxBrgRE4QoXVQo5XAAZDeovMujXX3Cgn4SdTkDFGCJdQ58I1YDXI7qqUY8KBv2mF6bkMo95PNbCGINPfepTuP7669Hv98/9hfOI0Wh0UYndWXHvvffiE5/4BJ773Ofuy/Z+53d+B9/4jd+I5eXlPW9rdXUVP/uzP4tf/dVf3XvDAEynU2RZti/bAoC77roLGxsbeOpTn3rR2xBC4Jd/+ZfxP//n/8Rtt92GG264Yc/t+vCHP4zBYLCndvn41Kc+hVOnTuFDH/rQnkG/X/zFX8RznvMcPPGJT7xg1uC9996Ln/qpn8I73/lO6Eg94Ud/9Ef31CYAePGLX4znPve5uP322/G7v/u7+M7v/M49b3MymSDP831hR546dQo//uM/jte85jW4/vrr97y9P/iDP8DXfM3X7BuL9u6778bdd9+Nr//6r9/ztjY3N/GRj3wE//t//+89be+v//qv8drXvhZEhD/7sz/bE2B3+vRpfPu3fzve8Y534Pjx43jKU55y0dsqyxLvec978MY3vhH9fh9/9Vd/dVFjuZQSb3zjG3Hdddfh1a9+NT784Q/jBS94wUU/F4gI/+W//BdkWYY3v/nN+B//43/gG77hGy5qWz4+9alP4ad+6qfwe7/3e3j/+9+/Z9DvsssuQ7/fx0033YT3ve99eN7znnfB20jTFK973etQlmV47bd+67fwbd/2bRgOh3tq317jUQ36hWTAuWTfZlWizpL73G07BmGxyqBHBMp0ADT+eLMwbyTymkU+J/YbBhIQgRDh/+i9aNskdibvcR7vM4OHJQWlaIAH6QC+hNiLwoN+cQjYAJ50Hy0k2omUi8lt1w6ocDnEsMAWYG+ZhACabkNUE9B4DWb1GMzmGUxOrmGyuoHRiS2MV8cYbU4xceyj8wkCcFVP4UAqsagIvUGCpKecZCZBSOESUsK9zlJVySAHJQrJoNdizYXtJg3jTkhi5hxJBiCcZBQnS5wsk0+axJWj7jVbVyxR6H9rHbwt/L5i/xsQBYDPV8uCpAP6aOd+4CrCVQKRMeOP5awkJ4ecpJWVjXznYuq2L4DKAEQpqii5FarCd7vnAkhD4TpL97clCokgA+8pCQjLrD1O6PH/FgycWesYg44RGwOAvj0MLvp+1e6ks+6ZuHpbG8AKy8cqWMyqzbL1yeI2MyNm1LW27T3zrAgV9b66vpXwcrnz2ANRgMc7fyk9mGYN+yx64NUzDWOWXvcYW4BnvNPo88x45GOrtEHlAMUq2hi5DSWh6IETYpwgI8f8bHt+eRC3dtcXlDIbwtQst2ecVJeT8+pWt1v3N4Ag42dJcSKMak7AlQVEVXK/LgsGyL2klZPLLY3F5ALYwAD7fB7OFC7LJIaKMFzJkQ6SBvAbJiyXO8iRDHKkC32UF4Yp7k/Qw1yN/nBuex6XNvy8ahcGiS9+AgTqaICzlsdMXyc0M9mNBtvyYANLAnOC33Z99DrtCACfT5qbuikQ6rRzV5BvBkMwsABnMMgavy2wfKBC8wzrss7QzItiNnqrGMoYTrZ35q9SAFk0n8iUDVLuF1NM5Y9ee8nnaOxWxPOrFDVEXUJUEx4/TQ2K/MS8BKootiCKbdSrx6A3VlFvboSip2SQu4KoDNnykOdIg0WINA/zChgDWxYOXNGgRIOSplDKF2l4sE/MGl9i8M0Db/78SQXrwCTPJLPR86x5sEagiakhjObt6KhPUAS6xACM3yfQzIviQsBOfwtS2qZR2jDumVoZi0oblvcsNaY1+/sBwFKWIFMS0rEyrQWDLkoyUOVjVrW4n++hvUaKPf2CCoBFkCJv7gZ+kaQIQL6wbp7m97kbW3c3NZXuZxBtb7fPR6xOayWEcSAgEWCo8Qrt7tsDY27eIgTPJSCc72YMnrn+QsKzYJv+UhnjiptsKGwiEtDWzW+IIT3rGH+7RTchZx1bcedxqravX/d8dgE/o8PcyM+ZZq593TmMWao8RbawhDbTFQgFfb6wTRKvD3NFqIzEMFPQwwyHF0sUlYYxFtPJjUgGi9h66NPQ5QSmLpEODyBfOoylyw5gaaWPK5Z76Kfcp61MIAeLkEsHYXucQBL9BZb0zQeAk0SyxRg264V+TulWUGyo+5xgq6tLIO85n1vN47Msjh8/jquuumpftnXnnXfi1ltvxYc//OF9kSc7deoU7r333n1oGccdd9yBf/2v//W+gH6vf/3r9w2kO3bsGG677Ta87W1v2xcg5vjx43jqU5+Kxz3ucfjbv/1bSLm3ceFv//ZvYYzBgQMH9ty2b//2b8ctt9yCJEnO/eFzxOMf/3g8+OCDWFxc3NN2xuMxfv7nfx6ve93r8Nu//dt40YtedEHfv/HGG/H2t78dr3/96/FzP/dzePvb3466rnH11VfvqV0AoJTCe9/7XvzkT/4kvvmbv3nP2wOAl7zkJfjrv/5r/PAP/zBe9apX7elePXz4MF75ylfi8ssv33O7PvOZz+BlL3sZHv/4x+MjH/nIvtxfq6ur+zaGDIdD3HPPPRcNXt1xxx34sR/7MfzxH/9x2N6pU6cuGnC6//778exnPxt33XUXbrzxRjznOc/B+973vgvejrUW73jHO3Dbbbfh6NGjAICVlRWsr6/vqYDjVa96Fa677jrceuuteNnLXnbR2wF4PvymN70JWZbhzjvvxJd8yZdc1HbuvPNO/ORP/iTe8573hHXXX/7lX+4LKPzqV78aWZbha7/2a/e0nVgW9Z/+6Z/wvOc9bw767Sn8wpM6C0CfH4JpJGC6CayzLExbElAx8EcIUjHUqVb3Sy1CO0kPNIBfzAbixIRtTNfRJMVnTRZa6/wIpIgXa8L93yQ9HNgnROPd4vcYKjk5Sa9op/+JPybppBc9uCE6zMO9MvsqY1HUxlXOcu5FkpeVAYQuQdMtiOkI2DwNvbEKM9pEuTVGuTlGNapQTzQm2sv2CFTnAP5WEsJQEQ5nCouKMEgISa/xxRPkq89FYOlRwp407DuTBBlNcvI0cbW6SHP+SdLApANJfk26BaMD8fhkd6QFXTW1rUsHgDhwMAYKPVvPSWHFcoYhYRZVfO84/9rLhDrQL82BNHNyVipUh1uZuqQh9+VMtft25RJIwZPOA36dCuydDYjSTB4AJMUJZRKOAee3DYCafkjCgWQOyCY0LDXy116gBap1U9AxUy+wVqM7ylgRqreNA6jYd8bfadHl2sH7bbbd+pxtgH3fnLP1VJ9ER8TY47Y3CU0/ttSGf8eAX2UQWI/t7zf7FRA7GhF/p+vb15LxBCfC/DgSV8JLx16UTt7OJ5P8MRuLIGMKY/maCwWpmmQ+rAG0RCzH1mI6usSdtQ0z2wqCSLg3iKwHURbcv2cA9OxpdP6g36Jin0/v+5k6CeB0kCIdctFAYAOnCY8baQJh5kmceTx6IoB2ccI5/o2mUEPsHA7dve0+s4u6QqwS4OcUnGhus1KaRu0E6XwRRNzuIHk388DMzt9dADBOusft6LZLOc+3XeaSUjQetLKrgBCdk1bCvQsOnpfGw+7hL412RSEe4PGJ/FQYUDEC6gJUTiCqMbynITdIhfNjR5vQo02YrXWY0SbqonSAnQwynskgR3pgGSLNIRdWWnMf768qtHZsKwMYHeZDHvATKg1FSjHw12L8xbKVvpAjuka+L+kWmBWd+w5Q0hUCjWUWbQSAtoDh6Ge3QjwbPZv9M1m7ghrPxAcQAL9xpdFPJHoJISF+jmq3bkgkz9NbbMXzjBj8a8t+tyXI+bP8uyX3vXOK0JwH/7sLwIF2nrfW+52/dwESg4xovL/4/y4TGc06DVbsfA1Nn/LAlv+JD9JEwKwHyqUBoPj7CVlIEjulTxEVUXWDCA5pax9PDGB3gdzuPLpTDCU8W9Wz/lqhedtuzSucJyPi82NMWNtSdH4F4DzaeU6XECEh4yQ+JZb7CbaLFKY2GC5XkOoIrNHQ04JBv4UVZMMDGC7lOLyYYaWfNgoZkhm0YrAA4UA/6i/w+iPy7hTCjQFGh4KBtKxY4tMpp0wn011O9jzm8bkRRLQvMos+Tpw4gT/+4z/eNz+ipz3taRctMTcrfu7nfm7ftrUfwKGPNE3x1re+dd986cqyxItf/GK89a1vxW/8xm/g3/27f7en7e3nsRLRvjAGAWYU7Ufb3v72t+P06dMAgDe84Q34qq/6qouSbb3hhhvw67/+67jtttvw8z//8/t2by0tLeEXfuEX9q1/jMdjHD16FCdPntyXe/WJT3zinrdhrcX3fu/34sCBA/g3/+bfYDKZ7Avod8stt+CWW27Z83YA7rsXCzBbazGZTHDrrbfiuc99Lk6cOIETJ060mKEXEh//+MfxnOc8Bw899BAA4KabbsK73vWui9qWEAIveclL8NznPhd33nkn7rzzTtx99937Asw/73nPw//6X/9rX2RMhRD42Z/9WRw7dgx5np/7CzNiYWEB3/d934fnPe95eOCBB3D//ffvm0QzEeGVr3zlvmzLx5ve9KZ93d7FxqMb9JOyZXTuFzMCNa/0Z4F73aRPvCg9W/UqwKtAwX8blySIGXyAS9CjAR5mpZ7OtmY3re/ulPLpJhcCaCjQkrxRks9HJQQsMWjR9fcCYtBPhIRTDCiG6mXBN+p+elNpCxzfrgI7qZ8QUnfAEgKpFKByDFFNmirztZPsI3P6DMrNEXRRQqaEfCXDYcneWtu1wWZtUBqL0i/WhffgIiQCGCreV76QQfVk8O0jKUD/f/bePNqWpCoT/3ZEZp7h3nffWAMlqCUo0CKli0mw8aegFtDYYJUKNDRIKzQiigsFkaZBREUFbZwnhCVrOdANIt3SlLY0pbSiyGBLu1poQIQqql5VvXrTHc7JIeL3x44dsSNPnvte1b1Vr15x9lvnnXPPkBkZGRm5Y3/7+3ZpQ/C+QLVeMZAntfEs15wxVYEiZLfrOhQ0njIAV5R5MMsocA7IQDgJiJD3CQQU693IyXv4JlGGhzJCZdt6uwCDfL4Nv5WaacbAjCYs31SMo5yVL8Yc9AqAXx1kYgEJYKaxaYGYfW6JQVryDmjrxUCEDtCqABrJZ57rt3EdOwb1GNBLAbTOC4PCLwB/vL100Ujv6Cx8aU1f9lKArti1JAG6flRbAV4RqE9gnsjyxkCS2l+sG6hAf2HgyvYI+Rwx7B/62B9ynYqEmQ4stuGan7U8riQWboyAcYnh2z+6fs0+YfnFviWKgF9pDAeGFLNP5hOpfyiAqRyj98yalH5vNHAMgjUVQnwogn0LGe3e8Ya8AwphB7rA0KizUO54+ywAYHL0FJqtGvVWjct2OEv8czu7Z4tfNmIZz/tNiljrc3J4DFNajDZGsFWYM6Y8J0Tgb1wx0O/OwX64C4xMLhV8V2x/ZfdOa5yw7/hazWpHSWC6GMfAsFcSnyJNbIhy4HCJsasWGN3Ecr9APldSlAlUgJ4p+HpXwNSCBPAQuKBeLySj9MCcbLuSYGbLmFyw8LvQRksm+FIUjyXzOU3yWbX/yEDprt11/uZawBRoXLhnG0ITpAxLY1A226CdLZjtkzDNDrqTt6DbOsMMaVENqMKCzDm42Rb8zha60yfQbM3g6hamSvW4yrUx7HQKe+AwaDyFWdtICgRBrYBaluIja+FE7aCteY4sKmb6CDuwGjPrx1ZcX7gch9dlZPVl8oehbxcY7QDOuRx3vflf1gTCalMsLPlcgzMide4QM4wim6/zLKfaOk7IqQNjfrvpIvNSZNHHBYN9Um+vcQ6ltzAETEuLkTWoLCUlhWXgNpCzuwLI4stxSOLh+3nrEKXSY1e4kIQYZD3JhM/7jggJ2EqLjNo+QK6SMwclKHeZI0hf13oeWmbBTxCw0Ot2hLkiyqQPgYwBqmemn8O8TaCfAMlja4L/A0wKGxiCxMkLFMB7s8j/E7lSbwDALiZEkIkgnPjCsQ+CLKkw/CLYpxl/A4l0IJFI9Yl9bV3on1Br0nPfEJmQgOFj4haPTYNR4dF5i4OjIIt6bA2TqsDR9QqfrizmOw02D43RtVyzs5qUGI0L3O+KDXzZpes4MilTAmkV2H3q+nXlJDF5wzk3dQEyBch1MF3Hc4KxsEEmGMainqn10N1kK99qZfdm+/qv//oL3YSL0o4ePbqv2/viL/5i/OIv/iJe+cpX4q1vfeu+1/y6N1nbtnjDG96Aoijw0pe+FK985Sv3LHV7v/vdD7/0S7+0Ty1k2y/AD+Dxdumll+LlL3/5vm1zr/aJT3wCz3/+8/HkJz95z8zUe6IRER72sIfhYQ972J639f73vx9PecpTcPDgQVx99dV40IMehAc96EE4fvz4nlivx44dw7Fjx/B1X/d1e26jtv0A/MSICFdcccWd/v0VV1yBK664Ao95zGP2rU1fCHZRg35eLygVo48lYAAgBIWwxIEeyt5WtWTSAmoguIOQras3dx5zef8rIqXZl/c8H4uAg7yh6zqEhV4Z5BI7zxv3gV2j92uQMswFcJBMZKnrJcZBKo82FKrfy+2LA/0MTLTOozAC0BFshrGERXxbR0YcGRMzy8eHedFXBtBustNifasJLB6EbTLoV5Q2yHdKjb7A7istyBLXpakMTGljfT1TMeAn8p6xpl9ZBEmqKgtSZXX0lKynzk53/bEH5Awmeb8HRHsAqAYACr3YH/rMO8BYmAGw0dsqBIaqKOsZs+mDLQucRXaG7Mc5lgpTgYilx0Q2Pz690PWpdpQNrL8OIaDck5PTz7vZgtwXEruvn+0O9OSulph8Qp4vJDmCdD1T2Fa+jRiY9OCAkeffdD4xCeM2Bi4y3d5+fUOnjktYHgCy2jRcmFBeq/qhwUQKzgl7cJf+NSH4ZRRbpw/4aXnP/uHIsUQgNICBgSvADEAYLnxNBjAKAADgJQgorD/XAkUAtasxA/FNDRqvoVybo9qYYnx4G5Mzcxw5vg0H4HTDiQJ9GxvCkcriinGBg6XB4UumKIOUZ7VewZYG1XqVSfraCcvcCfiHorwgoN/KVnZnzQdWkhUESl1j/Dc4KKuDxMiBP7mui10C+pLMAQR1ghCWjmoFQz+K2sbB34PLaiYv3HOWAAQRTOgH3jXIN/T7ELQn9Z0+k0n+igCebGOAlabnP0DdUwZbff5Gbc0ypCiSXxdkQq2rQfUWzHwLtH0a3eYpdCdvgdvZYtAvgHAC0sF18PMZs22aludcJHWDWLNYsfW05DgrEwTWTlEy46+oGPArej7UKPhR1RioxiEZiWUpU5LeEt9o4DXQ883VNrI+PgfjrO+v6d+JryLjlhNxfJLaDo+6c5gFEGnWOTiXJ9KUxsAYiiw/SZ4R+EgYmjrBLzXPpuNWEp99ILsLgH6nfAfxEYhCkhVJDeXwDM2PW+wXTitU/aIkbzOwdMnvB02uQ+cis21QMeIcSQULX/eq5uCAuSxxyvfUAAjOeTTk0DhCCRMUIETtQABT8YNU4qasUY1ZzAbtH0O4vijK1gT/RvvVnfKvxR/SSQ8AFljIMmcJ+9GFOsom7cOQCclXPtX2M0mufVTwNuedw9F1TmKctw6nRwWICK5z6FqHclSgGhe4dGOEQ9My/s55D1+O2RfX65bA8PPGJlUHUwOm4MTE2TYzgydroR+4L425+0G/la1sZSu7u+zSSy/FD//wD1/oZtyj7T//5/+ML/mSL8G73/3uPdcFvFjs6NGjeM1rXrNnWdT9tAc+8IF44AMfeKGbcY837z0uv/xy3HjjjSsgf2V3m13UoB/6hc4jeFCELE6XMj1dC/KLWaYCxPiiWgBZ4gK6L/2EQC6Jcpdht+dqbj8wpP7s/5ZokeXH21DfkbapLNg8E54DESKZ2JHUHqEMMOyzBKXORatqhIk5j5i1PCkMioE23hHz8Kg7ljaypkTrfGD7GYytRyQluxZoG84WLypUG2uYNi3ajTWMDq2zzOf2DDu3baKdtag3G3jn4DqfAXr8TAHQo1irUJvU8bOVjfKdNrJ3RrGmnxlzkErqUMhrzeoja+GLEVxg0KFIANsgC3Ug83fXoIlPsoeDAQAXGFJaGlG2T7mkUKxbI2w/mxgXFBhhPO75vIEIFgnoi1nIy5h+ZOCLMu0HdgFojxYkxqwKzgkzzBBi5rUGvM451nrIYOeThKV8shuT1fnFz0orwGTMoY5MNS0fKu8nkDKB6YtY0+K8slAXxvssYK6bLZJy8h5LUBEcPGyQC+tEN1QBf/LdzvnBYzWiqRpeC0tY6vcVoS6oDQEwGTPSF7peocyFejd9YLELMq7Wp+QEnptMvr0IRLQhEFYG0JqZfjTl39rZJYAxWLt8O+2jdli7bQclAVudx+kmQdsHS671eXhksX7ZGsq1EmuXrqGcFCjXRpH9KzJT/DdLelYbU5RTng/MZA3UnMcA3W9b1Z1Z2Z00kUV0PgT7JeDsHUAengLrxBQwZJQMMf9eJA0tge+z4VrVgJZIB3ZQPoZHYPyFazYiYQrkl6QsgxQ8p4Hrqw/+KVsA/OIHi8F3IsegFxlwZgcH7WMSDxDoyGq//XbIyz4zTR27zInCbtyTeQeanwX8GnyxBmMI5B0qAqjehpmfhdk+Cb99Bu3xz6E7exLt7bdFyc7qwJR9pJGSXmkbuLpBszWD7xy8czngV1ZR1jwCfqEPoxVlBrSRtfBdl9QS1g7wNibrnIxUjOCrafRJQKm+MCegYbDPhth+8R4atkGmYL3GrlnoOzYbgIUBvywcV6z1TQYm1MoTf6TzQNuxD906HwG/7aZD4zw25+1COwtLGAdgZVwYVIFRJvfTyrDEJ7paXY9cD9G7DhQ8ZjI2JJpJHUIF+nkXk3pECjzeh72P1x6z7Xf3rVKNPSB6NBqYFaAWAwCU7s8+0Cp+bNeCugbo6gh0xePubyM7d+lv8iZKlvaTAbRCA4DoDzrvAyAbEiCVvym/mJFDF0plF8b2VCIY+COVSMn5DEGRxko5inw8ZW2MbOTcv88Yfn1/f2ic6uOFAXzHbTBIjEgHoGsjE1H8KwPx83wC7aoCpXUwhjAtLTbGBQ5NS5ydtbj90Bjz1qFuHaaVxaSyuO+RKS5ZqzAt+fetA2qqUE0qUDmKxx1Ve4Doy5EteO41FjSeBpDPpuTPsoItt3G328q3WtnKVnYvttlshre//e141rOedaGbcl72wAc+EO973/v2lUl3T7fHPvaxePKTn3yhm7GyO2FEhC//8i+/0M1Y2ReYXdSgnw8LqLiek9eK+eej9JPUiRpgHpm0ONWLdjGTLSpzqUAgLds0WyZmb/eYckAv61i+r14bSFA/gXP9bPChTF/pk7SglDddaB/3l9XbcC13nM5sV8dLlMfTWHqI3+hCpGUvjD/dz03Hi1JbcKZzYQhUN6BmBtRzeJGGqsaAMRgDHIRaG8OUJeyEg0zdrEa5xmw4Ae+EsbfQX53LAlgAgoSnicwdUxWoDqyxfNW4QnlgnQG+A4cS6KcCXWRtkIYq4EwIUBUsmemLEXzBclWdz89r7MdzBSP7oDTy35ELmfiuxWAdNNmGBv7Ue94qMD3si8dMOtFmt3MuGc0Ugi0+3z6GgkEDxyevJYAmAHQTAmkCQJ6PaWlPub414CdAVwZwD8eLM2nMFNihGKTWgF/eBgkGyvUT3u/Jisb95DQEblOvzdqirGhvOxLgO5fJ760C92A8TJA37XwA/vq/k74I/UCUkhY04KetDyoOtS4y/oye9xZZypKkQKaCFWmorgZaCxeYSIYMbEgYmNSzrBbn9NgE1Q0l6s0G9VbKGpc6faONCpOjU5STAqNDa7AVJwGYsgiJBDYCf0V4vzq0wXPD2gGY9UMwFwL0W9nK7qT1EyTSB8JAMfG+Yo2BI4LxPsj1svE1ToDzmdTdMkArTWceNtxrCEjyomLq/kdY9HPOfXB5gswQaEBD98y+xXtZrg7hZTv9wDvlyS1cG3Ux4WtfGH9k4NZYamrkG1AbAJN2zqDfzkm4249zbb5bb0S3vY35qU0AgAs3JlMWICWdJ34S1/Ez6WFSrdRYe7ht4OtZZPd5XbNY6gxXY4jMOAN9a7AHDkd5cX6u2H8iyiQ9xR9oewkqch8CNOueYr8aQmCOLQGNBt6TsUCuY7A7A/7A9Dv5MwBGnfNZQk4b5DSbKK3poi/ANYopgHssmT0uDEprYA2i9H5kXAUAN0umIoIZpWxhT8TroyWsRfYfJLHPR18kSlOG5CC4ADKey4S9JkzWCDampLLYr+e6TgcAP+q4trUGuvg4F6+x7LXIsg5sP3sr+pM+qpnEQyP2e3QdYzlvzrEaQut8GAbKN8mYmovt01KncTxGEM/n7RSWowCf6nMaUtQ4l3kXiqcXEBngyPQzwa9CPh68AVAYlvu0vEYbW4NJkJ09ulbh6HqFumVJ1FFhUBUGh8Yl1qsCo8LCg8fbvHPoPKGy47jUyIB775KEMhEnl4wmocamAsSKEgZ7r1ezspWtbGUrY/vwhz+MZz/72Xjxi198oZty3rYfcosXmz3lKU/5ggI5V7ayle3NLmrQT7J8+8BfNAViMVMoZe1mWeNhcSHgQl/SUvaTZYTq3ahJVwe29qv83XkHf/SCUt4Kx+oBXjjKd2SxqWqZeLUN50Pg3vNKTNbANiwAHUIfaAmwO2He5wtsBiUDYwguZjRLMImMBUZjBtaMBdUzkDXwIWrh6hZtWcCO2xiUt+MqBqfifkLwytVtYASmM2okc90alGuTnLkzncJMNzgzff1QAvyqUZbhrLOcfTlKWeujdbQemDWOA1dqkAhjiqUQi6Wyjj78ps+MknpI1ha8a6/qoAEZipVl1eoxIccQ9qUvqD7LNGYvxzdNRIg9Gc7S9S4PBg0wDBfaoYK62TUpzDwgylreEYafBvy8argG/KSLloFklgjOAaKSSqTGqxHACwtgvW6LbkOfaahN/7bfRgk6iQnjDmCJsHw7ebBKjiOv65cPNmsIrkvfDa0P20i1QO0eHM4EDoS/F8ZzeN/5HkPQZ30sQKCcC0MGRahJKebIwKzXMMbCzrYwNpwI4JoW1YFtmNKinbVotmq4zsNYQjEuUUwKVGslqo0J1606MIWtyiyJIJsvpmOYquSaVpM1mOkGzNoGTH3nijzvyUJw/S7d/srutZYU7Zac51DjWO4FWW1jL++x9J0Af/KZGCn6cwRJwNd0VDuQZC3TAzG8S2DDblXbNADXY9Es/f75flcY8uq30beMB6lZVumrnfNRwpiI5VD3q56fB1B7i5ElmJ3TCSCYb8LMz8Kduo3lPLfOYn77KXSzGs3WLPpILQDTtNittpTMeQvmHPtrTR38NDlPqR+px5ShyRpoPGUfyVbRZxLJ8T5bTRh0jUu1bUWSMIIsJglODvarBhX6fsxCh4b+E6aU9vm9i+sH6XuP4J9Il3gfa8Q55zPAj5Pn0n1WpD2N8i0keUb8b0HpYi09Umw2fQxDwJsckpIfbdVFWRgCjIcLEv67JlepBMsFX64Pig/5m0P9LQCWa5HVrgsSnwv7X2Z6f+dxr5Lz1Tk/mPDVT3jqPCd1ik8lXRhVCAaSnZa1WScwZIBfOF5dyzh+rss5DNiCBK5KxiPP9Rpj1kEPfIxJW+TDGBSPmQHpLlxvpTUYdQ5ja9A4z58FYFsAz/VRgWlpYt3vzkmdaB5XfUULHuNqbWAL+K4ATAsqqzhvyBoQ3QXwQ1a+1cpWtrJ7mTVNg9e97nV47Wtfi8lkgmc84xkXukkr28VWgN/KVrayO2IXNegnpqe9ZcAdEBYSMOjHKTyArksLvgh4DJgEFCTIbNBj36k2WQI6MHDmaJeFM/LPHKU3dZBIFkWpHXwgxphFQFCyP6U2hFeLZVkEahBMMbB0HTxIpqoOgClgaC/mvcepWYOm8zChYHylv0AmsORqmPEUsR7MfMZSU7Mtzhh3XWT26SCUlt4DEDPUedOSme7iZ14Df1Wq0UXWRtkqKiuuERZq0EDq9VmWUvK2TMXoBfQLGeveFJgFadS6S9JKMta2GxUU0+c5MKbEpD6cHjOWUlDIZK8tiMLitLdGXYwV2LAvn7Uh9oka51mdEjLwpYmBCAob9xKUsOq7knndDwaFgKhHiG0o+UrpI7k2O5dkPbkfUkfoNkfgLfSdXIcGgPMJPOoz25Yx/ASULa0JgTmgEqDW5PNCapKX8lexBqgAfp2TjPsE4MUAkgL2mNnYf523TQKEUg9IXg9ZDIA6Qr/OssZxoxyx8XFSct4vBFAFNJQgTz9QqU3mVgFtARXsHzim2K74WgGdKqse8FntQEuANRblaAPFyIG6Gr4cgyYzlNN1uDO3w15yCqNjR9FtnsX2rRL43olzgal47rBlATseLcwnmhlsyyKAfWuRBWwmazDrh4DJAfjtVd2ZlV08JvK90SjUsQMWAQ/vQpCWIhvbew0CMmjvAGjmS7yFEOeKxOlGJRTIvSQmj9jke5CtwDWp2oj5nU/NL/0d8j5CMwKgcDCcpUsHfZxuINg+EMgXMEjApX6SDxDuRQDgPUpDcX7cs7wnGBArDHEAf7YJanaAU8fRnT2F9sRNaG+/DfWZbWzfejL6PpIo1c7mGasPAMrpJKgeTGGrAmRM9J+iOcd1+hBm6qJkH61kz45sqDtnLNc+ltfjKWg8hSsnXO+rnLCvUCiPMJwPYfk1zmPeCnCVfJYySGBW4HEsDD8Ccr84ALTk/SIgJRbBPsB3dfRVKPrAfcaYz1QEhswYXhNYIvQZ73IPSwlgSS7bxnu7Og6EBCtT5Ky36OMXzOYKn/MY5AfX02aFjVnQGLdEYGofwZt4ZSwcgyfiPlBjvL/fyM5Uqg5Zv/bBPwG4WpbypHbOYFdb5ww/DWYFUJOWdPYCy6/3W60gIf2iFR8sITA5h51CqWEaWc3qHMr8Fecwlcym2xPHpTHcAPGjo1x5YDjKdvrswP7xDRxn9rlia3M9VJdYh1KrM6w7LBFcAP4Q5n9JHPVFOpamc8zga8vMXxVgUCRrEXqydh7WB4Zk5rNxy6Ksv5TqKEaIZTuiigjX/3PdvSKMsbKVrWxlF8z+3//7f3jmM5+Jv/3bvwUAPOtZz8KBAwcucKtWtrKVrWxl+2X3Om+ZWUA5gAdIsAq9gHyyVLRdB6GTlJyYg7D4/ALLJUsWDs+MJyTg77yOQe1fzBChA5jVEtor2Zi5FOnAEn1ZxrpDrNsWASr1cT9Q0mdH0kB9mjtq202HzgOlJzSGUBrOEi06oC2A0pYB+BvzgjMsfj3AIKC1IMPyeuXaOAvIS2CqbxQk+WL3dB1859A1bQxyFeMq1agpStBozDVqijKBf9WYJWdsGUA/C1+OU3Z6APp8AFKbTsC+BPhJtr9mPPL5lIx1kXhNZ2YY8PGxhpoNQS6pf7csGWgosEheAjoYuFD4B5ZSYFKzGaIcjwTUVLAhBl9MkcC+nqTusr7wvWuzX8dvKTgZrnW5Rqh38e/Gxo3yVkgZ+AL4mRCciHXsQkAuq1sl++xt3wfATyS/AKBxLgKNjXNB9s3H11oKrO0SOCjzg8xRpaEQrE/gmw7ex5pRgeXHEwlQWgCOj6HfHUPAn+4jY5BJ9y0bazoIqsG+xPJbPBGdPr9Dczbyc0lhzEfA2wGd4WB6YccoKgfYCs47GFPEmlNm7SzWqxLdbI5mawbXtHCdY+lOVbMK4EQCMZlbTBUC20rqV+p8YjThucHuNUXijhtZm81zd8X2V3bvNA7EJnlxbwtQ17LEW996Upb9KSDeK2TuQw9Q7Bkh3cN2k63zQHaPWdxQD1TofxaD5/m1ySwYRFZNwi41WBTmMGHKqO3Fr6uguwBBwuzTrEfTYwoBS/y4O2AZm9J1oHYO0+ygPXsKbvMUulO3Y35qE83WDO3WLHQJKyY4a+DqNh6fzIF+HPpLzYv6O4uN6OBbgIwLIG1g6JQayGP5TyqqDKDSvpM+JknoE9+pUfKUJvgsnKTGALRFAJQhSWwmKR/0x4Ww4uRPOccd1xcmOwACk42M191OmBFGPRGa7L7FYEp/ndFfWxgF/OlrR8A3H9qvjwVAqjupEvWc9+xDwUfpb73WcN7D58K5S+/rC2Ce7DckdkUgUn9P5os+gCXXlutyhl8f8Avb0zU1M5ZjH4w3Ju8Doqh04Z1SkPA+U4CQfu8G1npDJj4Ihf4yeg7rWb9cwzJ2aTx2AUT72xpKPND9fkdt4DqW4/Be+pm4n0h/JyQGGLOgmmEMv18YgniKom7hIGtaghf3nPJjQai96YUtbPQYKOBtrybn3WAr32plK1vZvcmuvPJKfMM3fEME/f79v//3F7hFK1vZyla2sv20exXoJ4EVnbmZA2f8rDPKtbSc8/0lX5Cnir9PgZqhoJUOtNhsQcTA35AbP8RIlHboQDmRBPhzQLEvjSJrJqvZfF0eqILvUl2EsKhqPSD1CrP9dDWoy2tGkEiu7BH0cwDO1h2cU0XiPWene89BkoOjKYwxzBprKi4ybyyDQ7MtziQvSpbQA1AMBp/S6pNMyi4X8y3XCzGzrfgejdc4cD89EEC/CUj2NZqELOYKLrD7fDXlTNRqEoNVs86j6zzmdRdBGzkFAnCJtJJHypZtFL1Tgg1aHUtLT0pWOINRDHBQlOQB+miJsKOEmaZlHUVqx8PHn8l4sAQOTiGwV0kYa+l4GHwzKKspg1+uzQMRans+9oHPGHxdDMDIGEntz9iwA8AngIwVqbEiIh9lqoTt15dHjdsKfa373hKhshy4sAYYh3o7I0sxICfgo5xnAkso+cAo1H0qtX+4zo+L2dHz1sWaP/rZqXGxm/SoCee9NAxOltYwOGkTGFhaA+sAZwxKS+g8jyEXrjkg1Q4KJwEmZHwbv7i/LMilzpWjNEA0m0/LrfY/WzgXCnzWny/O1AgBJa/YsXwcAsyOiwmKYoqqmsDPt0CTgyjWDwH1DPbo5XA7W6hmW/DzGcvTtYvBJF2bKs4lUstzNIYZr/HzxlGgGsOXU7hqAt/eySDcylZ2Aaw0/IgmTCKV/BMtMKFkHkjzXJrsucZtSEzRm43B5PTDMsw9BQF92rVIlaf7UmCE9A+ADOC7QeAv/35IIurUdW1ZPcCb/HfkPTMLJRAPPmByKcieSeqF94TBLvMeEaGAsFWYde7BzDWA+0PYXXs1SwC1M5h6C93JW9DeeiPaU7dj88bbMDtxGm2Q9QQQVA2CH1YnZQhTBcUDE1jPgdUMY+G7WdqZMewjAVGOHQHiijWPlR+VowJVqONXxZp+rU8KHNIvkvxTh/tlG4A/59P4yxmjCDelcF+Ttoo/ey6Z1zC2I7s0AEjJ902j2QQfwFC6BwnQbY3IbxvMVBIP/45iwk5pTWToC0COcFyVNTxuup5qRx9YE/BS/HRJQjMF42qxL5MPIr5Q5z1K72EcryS0q0HSJ3LehoAlaU9I7hpKJhSGIKDAPnVdUSdMvyYDvPT2vbGpxmN/e9LO2BepTdIPMAU6mDCGwnoxrh9z36I0FPOdtM9oSfwsPm8EUX1ILE3qjzGdCBe2J+s1LwxSkeTvlLzpuWRNswS7RZCT++YciUcD51OzsIkIBFF6oLjm1MmDrkhrWp3UJd/Rc5oD14vmOTDUcYWAr1zHz1ueZ7RUexzbAdD19TmOa2UrW9nKVrarfehDH8Iv/MIv4JWvfCU+8pGP4KqrrrrQTVrZyla2spXto130oJ9kkWuwTzI3+yCeLOaEfdMhh0T6C5Q+2Afkn/d/N2R2YLEjv+FMx/OX/5RmRKDF+3gsPizIsrbKgrqfDSuLJcVCExAKQJCc4sWYSFxRL9DAjK42CzrkfS2L2d2P6cysRec9DvgCzgPz1qE0BkQWRedQtYSRHaMsp/BkYESSyXUspdc2IGNVkAksMdWpv0P9GLI2ZYnqejKjMX93ssaBK5HyHE1YdspYrtkXJDw7W6aASgggCNjXUoHOedSNixKeXHcmBzpY2jHVU3HeY9a68wd3jASK8s9CeRhmQQ1QSz0FAMoxONS5VA9NglRd79qxgWUKF+qAhGssAmoITDfPQd26k+Bm0v+ROoQpGJCA7VbVuOncMBCuD1MHGhLonYJj2fVJwqYIKLbh67+DB3kO0HkfQC4QTDgXOtGWAzqIgF9pKDL9+oCftFXmlpRFjij9JaxGAfxmCujbaVx6L3xety6wQ31WewehTWIC0glgV1mD0roYVBRZUpkzOwt0PrAPHKUglmHpLzm+eFy9v/t1AHnu5T7NpqEeqCdMR23L5r5+gr3+3uJPwrhTv7MOsIZBcA/Hz0WJ0fQwqB7BmwJUzWDKCma+AxdAP7RNTAbozy2ZGZMYwCH5AEXJgJ+t4IsyBD4vwK3WmMHM/X3d/srulUYiaRgeUdYQiLLNuh4wvItsD20MHSyaTB2JzYtUt0zYTDpg3tu2oQSmDboYJgANUoctHpiw09qkXCC+QvAJfNexryD77lrAtvH4dQCeTMFsP4sEdij2ulgCKcP9opkxe9IU0Hc3YbnPO/adKkvxPOy0gdnmuHbWtDhPVNA7uJ0tuK0zqE/chp1bTmHzxltRn9lBs9OinfHxFOMGxlKskUzWwFapdilvygXZT66vzKoJJvRFL6FKJUjE/gq+FYoCqOf8Xqmk0YM/1Xr2m8Q3kL4R0K91iaWmsGXAs29l4NERoaPwZgD+jEnjeJnpmmlR+hBgINg7ZiMusSTtyO4XEfsZcm9y3sOFA4pJRYTI0He9G6ORz3dtcA76iaxmBCcDgzKtj3ZTOAjPJvlTkkBzPvuPDD+pGd1L9tL7gAb8uiYCXH2wT8vkZoCfrXLQM0gNx23HTizy30XAL60Xh/wPw9masX4dkJ8HTqgCxoVWf0gPUmMIQASKhZ2s8iOSKTCavE+A3xJG38LPz8Xw2w3kVpYnWCXTWxdwU5QVVANjEh+QJ+6l7ajxhcU1NifWGpb3Df2RsUb1urO4ANLpK99qZStb2b3EbrrpJlxzzTV46lOfih//8R/H6dOnL3STVrayla1sZftsFz3oByQAzftUSybKh5wjCbC/2CMBCWK2cFqcDP3WEaI8U9pGWiDGn/UWW1Zlvy4D/oYWosvYMCI3GlmIA9JF/OVFaUUOnqTM/Aggeq6tEIE/WaTrBYmS3IlrefA2+6AA1OcC8pwNGeWlYRmd0hBmnUPZEipLodaIga0mod58C+pqUNEyWBeAPNLBeeeAtoZvALguFH83Uc4PAAfme0ahPp/U5UJRMNBHJkiMlrxwD8EGHUQQ8HTWOrSOM9HrzkU2Hx93OnkC/rQuMb0E+JHAlnOptoiMSUvErEiHOEiFbcWMiQC8gIGt/tJRQqCOePBSCIrp+msCvgGIn9sAAmsGrQb9Km4IjNdjKR+sct77oF+spyKgnwKFEgAvzEVVIlAFivVQ6486SwjZxAjZxCFUbDy852dpV1bfD0m2U0t6xodJGdCRFOfTz9O8lCclxHOuzvu8daG2TofGeWw3HeqWAb/tugugn1PtMrF92bGGv0eFQW0cqsLAGsK4CBJzITrDbAIH5wKA7AkNGAi0gdkHiyhtNQT26T37eNw+Bl+lD+RzGQ39ulb8vV6wk4aPS/+2Xwcx+35oszPc75b4/lAEtkXngVE55RugreDIgAqW68V8B75t4OsZ4DoGBQLrLwMAQ3CbqnGq+zleA4oiMVYk4Hge8mArW9k9xpT/oBndnOBRxCAshSScc7hZmemkKgbo2fERcEHeH5S0cz2ZvyGLvg4Ah8CXGmDGyPdcB+863qdOBvIe3juQ5wSnBP60cRuxxpTartQJ08w+eW0EEOjCfKLaYQA0cn8l9gcrmySXZ63DPEiEA8C0WNSOGAQRnIOvZ/CzbcxPbWJ24jR2btvE/EyNdtagqx3IEpqtGsW4BFmK4B9QoAiynlK/z3UOpuR+y2TozGJ7JAGLqg4wAfArWcqTyuRv+gBMMRPLxASXussTRAQUZfBTkqfy4/fw6DxF9hZCQotfDhFH6wN+kdkamX4B5DWLY1DGrdQm63xKAApdkVRIwngQBn7eZT4ojFC23YV2DphmQYmf741lHxX5miKxr3LJ0QhG9rZtCIssXw00KvaVvBZQX88f+cG2Sc5Tavd19TBDrudzx7qBYT4CbATLshp6AkQW4rcXPX+zD0oys68ZmNWS+oNKksp8wpQIBud6yZLpeAyZyE6V/lnoG81Gbdt4rYn0cDq+OwkQeQdQfs16tS71fnG8SFkJ6YvoE5scvHPew9vg98b1z+L6dTe/Xa43yP1FAcjax2zvyM1nZStb2cpWFm0+n+Paa6/FJZdcgje/+c0gIhw6dOhCN2tlX8D2N3/zN3jUox51oZuxspXd6+yiBv0kkD5Uww+QRUj+fSAtZPqhKsk+FLDOmvS3tn6wW+pI8TZCsCcAMUmepZe1HhbGha0CmEJcTw0+BIkAUlp6y9Y1krUrMlmdLB+pCHW7irQ4hGIYenA9i3AMjVqNSUBi3voIqFSmhLWEQkbMQn1AXrTLcVW2wm52w9kGN23WmJYWpSF+DsXebQCX5q1HaTwMeUxHFbxteaEf6vsZCdYpGT4fQD8/30k7E5ZfUcZsdPkegCg3RZP1uH2nAwWmgC8nnL0sMmcQRlwAchqHzgGzzkX2nshOzdouBHx8BPQ0uCc12+atW6ivEg8hgH2OgBEQpKAo1cogGbdpdEZQGgmY0vGlLiAxtVpB69pqUiPQe5Z2lABOP2tbmGUSepDj0ibXJrMeF+v3aUlPHQBxEAAHUa4xY4VI4MpQFkzumzSH90OhjRRrBOYMQ8qYtbpGng5yWKJ4vTtQ/H3rEogpNRzr8F7dJWbfvO3QdB6zzkXQb94xECiAnzAadD+mnuGAilUgvIDt89ZlEp1c88jDOKAhD+sRWQUiJ8bH2WVMUpGPNeHZEoOiuv6ftGkIhNPEuD44NzTOtS2rc5SAcNmf/k76rdQcLI2BM4hyWt47NI7ZjZNiirKawhQjUDsHtTNQNQdcC9POYsBNgIFlTD+yNkqHRdaKMEK8A/lF1stdboHhfJduf2X3WpPrpa8CAENJPnyJ9VUWSOZPLXuXtpgZ5yMohgwZdmFEZty1IFOAkEDH2Oa+5LhhqTyvwcJQb5YBN2L/YOj4ieL+0Kq6pT4w3gOIIKwqmYdal/xRIJdJ5RwaocC0oMahiOwjG/sOXjoCmHUe243DrA0S0J3HZBfCjjACJ4UBdTNQ10Sm3+zEaWzfegpnbjiLnZMzNFsN2qZjEKOyqNZKlGslykmBYlzClqHOX+fQzuYs8Tmu4KyBDT7VUgvzj6nGMGsbnBBRjdM5KkbxnPlilIAcwwoBziPePwGtEpAktNvevYcToDiRp3EhiY/SM8vG5kyhPsiX15rzSQo/fkYMdEvNR+9CvctikO0KIMh08z0eCNdCkeS2txuWuW86vh9PnUVpgElpMCnF3/ToyETQnQ9YGK2UrStiskkAyWAKlkrV/h2Y/Q84dF77AOIrEgzS6wVQRt+fBZQThp8oiMQ1l48+qAdf/yT1+roa1PA4pa4O5yIBwnENo/1xW8a6j9l5DABRNidQ6hPngTbImQr4Gn2/XdyRPthniALDjzAqOEGxsuwzFYbnDeox9KhrY9IlmSImfsqc0C+joPt2ocabzF9Ddr5svyUSoc7n64DsK2odYQ2idHolta5J/G9SSX0IdTi175b8fVm7iOIIkLO4JXXRubQd3gYzMOv+YuPusJVvtbKVrewiN+89XvSiF+HjH/84PvShD2Ftbe1CN2llK8Mf/dEf4S1veQv+03/6T5hMJhe6OStb2b3GLmrQj5l9CTzou/6GKFvkigzokPUBvyE5F3nZD0Yj7F/ezuocaAmbuJB1gDcgWzBzzRQRoNSsP50B7NVCJ77Xa5/zzNgS4I//N4P1tIAURNmtb4Q52SAstqzIGZp8ceodqGvgHddi0LU8NJtQgn/MIAKOTSsYYomcMtQYGxUG48KiCAtoa5AtVCVzl6oRqG3zBVLbMBNHASEk8p5FGQFA39Sh2R3X6DMWvlrnZ2EE6kxpBfgJ2CeynEluKg8ouADoCZvLBYaXZvGl930MCOnzYCiBz8552AG9VA2SpbGQgGBho/b5DjIehuKHu4UN9DVSWYrnFAjBOEhwTgPJARztUi1DYfTJ8Wpwy5rFIFNk0Cp2nwQKdJuWsXIBhDp7YGlPL8HpwATwtFSKSABGqZknx+vUDgTAFMBPpNh4/AcgWDE65XXb+Qj8ym+1JTafiXscYvppec8iAH7pIWBlYhZId7sQNGnhk9SYIzhDARBkyU/nfZD/ZDapthjMcfnfYn2AT8dpFiTNTBpL8dhoEUzWvxNGoou/D0FghHuAsBcDwEuEwMh18J4wKseJoQAw5VEAAhtq6wCDATkJwGkG9TkDbytb2UVgwlyOl2JvbtWBZwnkDvlhALJ5um+yFRqYvHUAn7wHuhYegLVV8K16CVXyrBla5FItNNL8q0XTAXaRFsyYVUEy1INBiYwdI8+qHxwW76cRMPHcLkJgtci8pXzILvgHPtxbms7Bhf1vNnwf0QlqW42D98CkMBEo9W0N3zboZjXanRb1Vo2dUzNstg6147lz0iomkjUg08V7AwD4TqQ9BXCySdJTW5BKJZMSragas/+lxouutSbAlMzBzOJL98+YsCcJOqFfnGLNxe2ClIw2AMO/MZZyH9ensROlGF1e49ED0bGnCCgJmz2BgHq8ncsspftTn+XH74ETbIIPJHf9znl4Q/k+yAgSH/9erOk2DKxrn6lUfqVWN6D4nV6/aRtg+PlwhMvAI70tkevXspZRYlftQ9fji4BfYDDKd2KiwABLUMaRg1asWEz+jP1AKelMvxcTikjqBef9xe5HvjaKJnkHcqwasATSPKPrJp6nJGdm++CD9H22yMSm4BMjrU+sHL+woMOxWTKwxsSkVopgn89uBEP3BFnDxPVB8LGjIk187wKAfitb2cpWdpHbb/zGb+DNb34z/vRP/xRXXnnlhW7OylYGAHjOc56DBz/4wfjLv/xL/MEf/AG+8iu/8kI3aWUru1fYRQ36dT4wh8LfmSRgWEVIDrcEASTo7zy4zhcSWCdsnqHglASuFmrz6e9gEfDLggmysCMDUBIb8t7B2ir9TbxA2p2lmIAVHXCSLPFOLWT7SyICEjMrLGz1/mLzKYEQTQtY8iAycEQojVrg2YqleZptbn/I5Jas3DPzDjutx1bjMLKEaWkwLQ0mGyMcm1ax7zWzyhqgMoS10jCwVG8DSlYL0l/WqUxtHwFAAlieU4A7lf3siIBJWmB3RclyfNVaylT2SprH5AtoCcLVCuDLsoddLtu53XQJ6FEgnyxepYZf41yUwrFGAn8y7hiAMQH8aDqHbQCl8SgtYVzYIE/pw/HngHEHHvvtOeIHkZFheGFvdXwJCegrDKEyALUzwAPejtF6BlKcTwCNMOmkf4TpV3eB1TjQHgaYQha6BzzxlREZtD0ATo/nmIksxx6eh64f/lvmhyVRbeRBa0BDb/k12HT8LOyEJoyDVuQ7w3iYdYHV17rIBJXxIO2whlia0/lYt49ZDSkQJu/3a/lZk4JSws4TVmh/Tons0x7wlmSqCPNQs4ZrArq4Pd03ug/7idf9OTNmarvFgC0AdAOZ2/PetvtAIaCy8D0BIfPcOcT6jAH7Q+N4TMquWwe4glDaMQpbgWyJWGdIZ+y7XiBPB9UkyWBIcmsoe/9uMFoWlN/H7a/sXmokdenyQCsRLYbIBfQO3+tfmjJfC8svu48jJTvtGrztFGtGs7K6BNQkkEOBMEg+VgTtVZ1jYdFkWJD8XgMmy4L44XtyzBT6CKofuhC9juCBAF0R7GBvrSr4ftT55ItmXRDm66ZLc9fHjm/js6d3cHhSYlwYjAqL7abD2BpcMl3n2sddDV/P4GYztLMa8zM1tm/bwed2Wtxed8zUMYSJNTjsPDbqBIh659E1LUzTwncOXd2EQwggj/hbRQkvygvOMehXjUGTNa6LPJ7ycYckNy09KfXZvC3hC/Yj6i5ImTpVExcJ6Gs6v6CKYIhCGWOH0ho0LtUv7giwPcAv1obuak5Ya2o4pRrBGzXJl5RzL/4hwEl7ZDKQSZKaOtdbH5Biznuu3yf3LBsQFVG9mJQmJu40MXGIVS9gCZUen5rt1huX+n3xjSwxU9WSjyoRAhoK4KdZuTEBSxRL9D7ieE7+dR9gA/j82SC0m9XrdC1f21LPT+S01T58Nk4Cw68Yx2QqLaNriM+XUUl/CQDOfXRh+nmPhcQ3E5w8YyhTRDDKlxLfSvzhQqldyLnJkiMjSGzCNWKiRLIkhsY1Y69/PS36D7re5MI5kX4fyoAbMr0vDPSHrKNJ5N5TTT89r1Mzi8Bt3L8pUBhmqPpQW7KRLlHXpG6qMEMXWb4p8UsSbS8E0W/lW61sZSu7mO39738/vv/7vx9veMMb8PjHP/5CN2dlK4v2oAc9CI985CPxwQ9+EI94xCPwi7/4i/ju7/7uwaTQla1sZedvFzXo12d+aNBKGD9DDB/vE3Mk/TavDSbbAHSGYw4oiPXhAkNIQakhVCP+MAStYjZ6WBhBy6SEr4Y/RIZHpECZNZf6Q4McfbaVPk44v7AglL8ERPAeIONjn0k/lAagroaZb/G+phUHT+oduNEaB9LmZwG/BozWcWRsY1Z6ZQgjhQIMLdik/6irQfWc5X+6GnAu1v9YWBwHEBAFFrL+F6SO5H3pW5NnmUNAS9kmeJHqA0hRd+lZS00Ja03A16YnpzRksqjtPEvVyFpPAn4Cvogxe9AHjIE5BLKP0hggsuAW2ViZdKdmtiowS+QzZSGvmXPy98gSLBRzQvVlBOQswTi5RjnoRoFJ1nkGkDj9fhH4cyE7Hx0H75xHrJUjbdaMO2nv0LUZx7S86I15v/DNcxsnDDBjUeYgrkGEbHyInKcAfcL0dE4FhAXko3Q++AWfiyEwLv8+RYBYB2Pk78Iq0JhS+wEEZmk6+X0Z0Q4ezhDQcu0h5x1QGK4HCZMx6rI2qq4cYkXH33C0P+uHPjsUWGT0DVmejU9prGjsLfznyaNzqe3y3c5wTdGinDLzM9QW8p3MN34YvBvKqF8ABuaD7V7Zyu6RFoAsGcVR+QC9mVKBXkNKAexHJeZa/G24PowxidEBlTBFvYA4EK8/zgGRens+AxGz2mIScDcFpCYf6QB8v63WLv6+vy3Zj7FZwF37jeK/1RKczu5b4T1bJBZh17BfU29jVI5hjcnk1j1YQUBsu+nQOfZRRBVhp3Gx/8fWYFpa9kXkeIJj0c24jt9W47DZ8qMy7ENaYpCt6Ty0oI7vPEt8Ni3suAp9ZYKCgjwPBMGNARUlKymI3Lv4bkCSXYyAX4U6JMrUXZJHdz5n9WnlBH3/ZDl7dh2M8zCWAljokY1aBRhHSchO5JvDM0Lg3XXp13ocOMeZWb25X7sZ4i97Yn+HQUoCYGC8R4N07ykNoQQDftPSxoSeznm0SAk61rFChhdJWgV0azbqMrCHou/kYUXZwFHMJUqAHx9eSiDC0K1+0M71tf4ckP2WKNWu6zP7bBWfWwXipe36sE7JExOiosH5NR+GKPrjgI8XbUpcSIlVkhQXfSvw2iytJQzge4kD8ty5BAr6fG1zvolCGfCnQV6ZC/fBCDwWnOckWhkjfWajJEdlrE0yfA0ZCzIOKIDCFEDwbd0uTVyWBCLsPzm8FdFvZStb2T3VbrvtNvzd3/1dfHz1V381fuiHfuiCAhg33HADvv3bvx1Pf/rT8YM/+IMXrB0rW9kye85znoMPfvCD2NnZwUc+8hE86UlPwhVXXHGhm7WylV3UdlGDfpzlya9jEjVy1g8oZQ1qph/AAKBmviTQLpcI1IBCDGj3g7ra9KJt2cItSkS5GASR+hx9yReo97JaOtJI5wP8QzEIorMidR95+LAoJfje8lyATtkDGe4zkUHkfuF6JsLs450d5CzPdgaMWBPczLfgyMCXY5jtk6CuxsboAKjp0u+cg5XMadPrw5DBT119ziz7rK7HkByOLICLakH6CAjBBZXVz6CdyixHXqdN2GryrKWnJKNYAL+83hihQQ5uyG/7Wf198CZKfYW0ZOMZeAFcZF42YRQUJsgcUtp5PyBryCfgD4j1+4Tdl8C0nOVqDcH6UKeyazn7uhf0tSpI0nkAnY9So0AYtgH4M86jgdsV+KusiWy/eErV9UgK5JG+61v21rmCKbvUPJFWdDyxRClTYTFGyVYfGHxKzlMzFLRZQ1FGjUvsKADLKranSeeiDAdZWpPkpVSATpih+vwBaU5gIM1E4M95lohr+ii884jgMvF5dJw1EBl1A4qzGUN1yGINJpVJLzbEBOyDpHrbQ+d7mfE1DRCFYCdS4ElqEFnHfVGaiufEArGG07nGTgQVZECL3NiFyNwemlf3e/sru1faUByVwlyb3lABZs9+lvxOz9aENF9nPlHweySIu3Ad91kyvecMPOgFvgVUA0wO+XiHfvhfgIY+4Od7fkJszwCLKpprQVSEfmIfgCRRS3/PFDyfkItKENRyYkARFBIiWBHmpZisESTCAUQ59NMzZqgZAqYTi2llAzM+tdE7h67u0Oy0EfDbYf1LSB3lzgO1BhyD9IDU9fPOMeBnFdAnNZP7Fth+3pZR/SGqQ0BYQCYBO6ZA2yXAT/z7RqkCyP1V10QGmIFUwgQgjWsQe7/8xhATOGJyXge0DXxWHzqAzcbCtzWMtQC1y2upyWHLs1wv3gdfhRlinWcKIN9P5T7O98xpaTEqDMrge7c+ScUDSKw0MgFLsnGe95oNK21U87/4YEQMTHE/Acb4TPUkJn8hAdUL1+5+W3bdSYNTfdwIDNsKHUySvNWbCCC3gH/aNFNMlCi0GbAahvjdznPfOO9VshkDy1w3UgH9CnwSkLnzFBM4yQ30m57HZCyq+u9LE4z6vx8aixrwG2IqL7ABB8ZK6A8eD7TQDzHRDmpsqKTM/Do3IF9w54Q1rg3Anwxsp/a7mwkgG7d9oQC/lW+1spWtbBf753/+Z3zrt34rPvaxj8X3nv/85+MlL3nJBQX8ZrMZrrnmGtz3vvfFb/7mb67YU3fAjh8/jv/6X/8rvud7vmfVb3exPf3pT8dP//RPY2NjAx//+Mdx2WWXXegmrWxlF71d3KBfkPcRSRqRV9ELVjZSDKxcRop2CRwnEFAtbLo2ZZ8vW5T1glXUX5kMBa5cFxa9dZ5RrLepamYUKuhkQ62SzvPCVzN2ZNcOvLCXIEnrFMiAJOnovEdhdFF2UpmoLWi2w8y7toaZnwVcC7t5G++r62D9rUDIXve3fAY4eQuwfggoK9DOFtz2Gbizp1j6qay45ktRgkIdPcmyhuvg2ybV4ZPn0RhUVPz3SOWjy0I3MPm8DsqJPJCAfiZn+0lAzYeAQNe4rA5hAvi8Ch6E/g1hB5H01DKOWn7KhgE5hkFnfAQ7SrW2K2wO8PUtghwKzCkV6CIZyEOm1tdhPAvIl7P7Yqa3WQTQZUwIoOGLCrBVBEukDWX4Lb9PoLCkbwNY5JBkY8kQBFDq9xmDSjJWCc7w+I5yXxTgKJ+zVju/5NqVbOQeIJOB671txWsxyFeRsEC9ALw8NkTGs5Ex0PmFcdB5PnfcHTwGSiBk/zsYx4xSYyUInoCzJK/JYN7YMtOuFKAvvI7fV9e1rreX15F06AoG/gonYOIAKNkHFaPMlclr3JxHnEK6PtbXC32iJT0FBBS2Cwd8e0E6Nc4jK7YHcMb9mTgJ8vdCQNXLOSGPulu8HspwH5H7C8HAGjtYf2whecTm77nRXRQsvQjtV37lV/D6178eN998M6666ir80i/9Eh75yEcOfve3fuu38Na3vhX/5//8HwDAwx72MPzUT/1U9v3v+q7vwu/8zu9kv7v66qtx3XXX3XUH8QVgPH45OUirHER5PgBDUn78K9lG7xpxiqEPRHlES6kuXg7QDVhv3ibvEpNMBWMlmE22SMCfJBH11QAovc52ZSwWArziXyg/Qs86hgLoqe6ppQnseJfvl9vvkzReV4PaCsX4AFoqcHLWYbthqcuz8xbz1mHWOmzWHW7b4YSPg6MC86DbPW8dSmNQGAbvRuU49o3rHFzn4RqHJkue4HZWhoLMJ6EYF/FhKwtTFbBlAepN8uKPwRgGzKSeX1GCygpUVnABuMl/aHqyngzmMKDpA4DsVR3cdC+V+4K+JzgAnXewThLzDMqAQ0pSUKo71gOevYdzSlKyLzEJhPs/A5x9vzyCxfIWUZLQND7IrgMVTEzE6oyHcSnhZVwYVlEoeDuN82jgo5S4+GRyj/JFFcehdEOBWVqX2OCnqNraco2auDiSZMhUy1jXSxb5yug/9U2/58AANrEQ5AJ+r97ga0MB57bgPu8z1UTK01bw5QS+qNDCRH9L/MB8+7zQG2K5aSUUqfduhRVo+LedZ8C4tNLz+XgXHzs7Hp9qRzZO5WyZAPxZt7hm7IF90TftUikI0qw/6UfvOVF0ICFNsz4XQSnDILZPjNBs3OrxC8QkQFaEQUxcCIcVEznEN4rHph9BCYTIADYkTREx46/kNSyZUHt+F/DO+3AdCGiNfD2zsmQr32pl+2nb29u44YYb8BVf8RUXuil3uXnvceONN6KqKlx66aV73t6nPvWpTGb6B37gB/DGN77xgoJF3nu84AUvwGc+8xl86EMfwmQyOfePdrEbbrgBhw4dwvr6+j618J5pt912G372Z38Wv/zLv4y3v/3tK8DvbrAjR47gj//4j1GWJR7xiEfgta99LX7sx37sQjfrC9qEQHVvtdlshpe+9KX4yq/8Slx77bW45JJL9rS9W2+9FT/1Uz+Fpz3taXjUox51j+i7e0U6mSxCJGArC3wKEpCkvhPZSkaKsOOcD6nLx3WedA2KjqWZ9EPecx0/dEYxkD/HLOM2bV9+p7aha1+QXgwqqSQBZAwWA999c/DpoRbNeYAbsY4HNTOY+RZovgVqtmHqHVCzA9Qz+Nk2urMn0Z09CT/bgt/ZhN8+A7d1Bu70CXQnbubHrTeiufFTqG/8Z2zfcBNmn78R9U2fQ3fyFnQnbkZ74ia0t97Ir49/Nr7uTt4Kd/oE3PbZsP0t+LZmQFDJyOR1JEJAzlbwxQi+GHHwILzXwaB2wLzzmIXM8jrU22tV/ZTOi9SUC4AOIrDTeR/Zf60LMo4uB/wQzoU8ShPqgQTwZlQYjAqDScmPsTVBris9SqsfUk9Eaook0GUZ4ELqnwB68lrAPMnmjQDZwnhBBMoZfzML2e4y/kqDKG0rQbZ0vSVgk1lreTCIty9ADzKJVGFoeZ9AtChhi3C+tBxYBPkRZB3lWkvXXLzW1DWXvb9LtrWDsOZEylPahXj+pX7MMhMATcaJrpEiQFqsxxfGxqhQ48TyGGFJMBOlwaaVxaTgR2UNJoXFuLBpbMVt2SgFNy359SRsR/Y1KgwKNWYFcNZjr7Q8/qxq89BjN1usX7Uc8BsyDQLGc6T6Po0ZH8eKjA8BoOUab5yPjJPayfygHmEOaF0O9jZhuzKHNC5/rAx429vehpe85CV49atfjY985CO46qqrcPXVV+OWW24Z/P7111+PZzzjGXjf+96HD3zgA7jf/e6Hb/mWb8GNN96Yfe8JT3gCbrrppvj4/d///bvjcO7VFv0K5RMYAfw04w0pCUJbBvj1N+56fhCQgwXnKW1H2rdaYl4Hwwc3ZBYeGcvPSF2x8OgBfv39y17i/U+Ak750n7Rf1TOjtmbgz7WRKd+4VPe18wJIsC8i94XCUgyES21gJnErpl/nInNP2idAXwb4lRa24gfLePLDVAVspcA7YyLgl7H+jE3vI28DQt95Y5OsuvR5+IqAU8LIEp+K/Su+H8g9oVP3YCe+28I4TH4Y6XYMmQB+rv9wnJB2rjHnXdyf+FUUfK4Ikpg8gcuYVCOuCNeasMay8gXaOzNFAEkR7zc+1EwT9QVdY0+S2oD8urYEFCbIixpRedD38uFjJO/jIxvLqm/Ex1xq6lpLQBX76FHSU5h+gf0q99jo+4V7usw9HuL3+YWHHhYUxwSiFKuAw4bSmBHfhs/VkiHjEddSXvxOJB80JQmYxXGnEhd4fecTU+6OMCuXAX69eUy+E+fCoTb1Nw3dJ/J3YvmRtD/rFDkuj4U1mvpb31fksbB/NW9HAuP59coXlK18qy9s897j+uuvxzvf+U60bXvuH5zD3va2t+Gqq67aN0Dnhhtu2L1u8x2w48eP79u2Tp8+jac+9ak4duwYfvAHfxCHDh3a0/Y+/OEP43GPexwe//jH4wEPeAAe+MAH4kd+5EfuNOC3X8cJAL/8y7+M3/3d38Xb3/52fPEXf/GetnX77bfj2muvxXQ63XO7brzxRvz93//9nrdzV9jm5iae9rSn4fWvfz0e+9jH4olPfOKFblJmN91004VuwoLdfvvtALDneeihD30oHvzgB+PXf/3X8eM//uP4sz/7s/1oHgAsvS/eWfvMZz6zb9fq1tYWjh8/vi/b2k/b2trCr/zKr+zb9jY3N/etz7z32Nzc3NM2xuMxXvSiF+FlL3sZ7nOf++AP/uAP9rS9Sy65BF/zNV+DRz/60fjmb/5mnD17dk/b2w+7qJl+AlLJ4lRAvBhciV/kguo2vAawCMItqb2XybV5twA0xcWubKsPiOjXAxnq+ndAyk7XAbUo94kuZEtyIXhvi/hbIgNDJsoJxd2HP3XAwCBl9ko/lqbXh14kPOdAy6Bfkp9pgXrOQNx8J5NE4r7q4GfbcPMdwHXY/qdPo9maYeeWk/COZaLKtTFMWaDaWIMJQSwXglLdLNW/qjbWUEzHqA5tgMZT2AOH+YORgwtBJwp9yBJSZZ4lHAJLEQAKgF6t5DqTxGY6hAT48efCjGTwyUdASqS2mhBMiyy1HhNqsbZfksNaYCcp4AtIGe0CbAyxqwQoknMubL4cWKMsCJPYe5Qt5omS3JocNyHlPRtbpCCma/lIfI/BQQawVZS1mhTMsK1Cn847Gab94F06XocQu2g5+58Km+TjPFCEc5nX4/TxOATgz2zJNbxgIQBFMHzNIQUmdQ2/CBA5j3nrI/ArY6J/LodMGDX8Oo0fafsosPrGAvwaBuaMAcaFjTVX+udbctWlv5xh1s4ozPrO57WSnEvg2G5jD+D6kRK8lCB0fzkj1wrv647f2DVgqnE/fUqXsWK5RlUYV10Ispo01nibak4UtqBhEJ8IsE6Aaa8kcL1iQ/jsmtHt0VWFPDgp4O42Gqqztc/bv6P28z//83je856H5z73uQCAX//1X8e73/1uvPnNb8bLX/7yhe//7u/+bvb3m970JrzjHe/Ae9/7Xjz72c+O749GI1x++eV3uD0rGzZyLTPT+qb9JDUf9r+Z34uWXKTOsaABgFiXT8wrdkzcl1O+UPpefBZ1BEWESXXjVA29Jf5ZxjSSzwLgByvMKkps8AU2TeoTAlCFedEQWBK7rfOvihReO0uJZHI8zTywypmpfXrWxrn24NhERti4IBgqcHKnQUPMMj9bM0vtwMigRcGqEK4LvhfPS5YIkyANvV4YrBcGG4XB2rTEaKNCuVaimBQoJwXsuEIxrlCuTVCujVFOx6zUUASlhlDbzwMgZ6KKA1XjXNaYDLxVfVrw514kwp0CuxyCRDYzG0XSU4NlANB1Xt0beCA656N7ZUkYSVA+gjq/S8E/5cs0NatStA3QtkvB3uiLh/26sF8HwHpA5KRLY9CAWYlio4Jl2WMtvwjQEUoBj8NuHYA6JECl2o8epigWjlH7vJJIxdsN4F64ocuYBdS9UAGDkaErwI1cr95F1p5sw5si1uoE+n4I1zUUBhxcGWua62QCueYE8GupQNclhp8k7cixOO/DuubcFvuHkGq8e6Cj5KeJwocHAZ2DqNf262sbgOU8HfsXxjNGTOHAjSeuYecds5qlMzTjTzP8eoDfgkqMtl5ywhDgFz+P60SV/NYH/JwDyMHKnAxGbb0aNwnMViw/nUyn1gHSdvIe3jkQWu4DAGhrnpNNkbG8XfDb9FE7H9YqPpSxGFxP3b228q1Wdk8zIsLb3vY2vOAFL0BR7D2096Vf+qV40YtetG91tJ773Ofii77oi/AzP/Mze5bqu/rqq/H7v//7ePCDH7zndm1sbMB7j3/1r/4V3vzmN++572666SZMJhN86EMfwsMe9jBcd911uPrqq+8U4Hf8+HF8x3d8B37jN35jX471yiuvxK/92q/h67/+6/e8rbIs8bznPS9jM95R67oOv/Vbv4XXvva1+I7v+A688Y1v3HO7AOC9730vjh8/jn/zb/7Nnre1vr6OH/qhH8LJkyfxcz/3c3tm61x33XX4ki/5kn05nwDwzd/8zXjnO9+JL//yL9/Tdj7xiU/gn/7pn/C3f/u3uPLKK/HMZz7zTm3nHe94B/7qr/4Ks9kMn/vc5/Cud71rz332rGc9C5/+9Kf3zLzS9rjHPQ5/+Id/uG8s5qc//em48sor8aY3vQlra2t72tb73/9+fOd3fide8YpX4Ad/8AcxHo/v9LZOnz6NX/iFX8CrXvWqPbUJ4Gvh2LFje2b8ee/xrne9Cz/8wz+MD3zgA3s+r03T4MUvfjGqqtrzHPLABz4Qb3nLW/Ca17wGj370o/e0LQD4t//23+KP//iP8XVf93U4cODAnre3V7uoQT8bslMlOzbKJ6mFxyAA12fboQfu9a3P1OsDfnrRQ2ZBAmlh/3q7vdekgleDAKA3ubyLZE5D5BQXF0P9a5P7irOeRcJnZBNgSg1LYFGzA6q3YZqdbJGPtu1JOSH2nW9r7ouiBHUdUI0xOtShXBvDVmm42XHFGeTWcAa6c3BbMwb9miALYwy6ugWZGm42gzEGvp4BIvVZjbkNGWvSASY/h/0FZN98yNTV9Q+13KeEFwTEaJyLAImYAB/LALlzWQZwBH2gUe/3EnCQwJAO8Kgc9vQeJTafNv35kHkV+ND78SGqI/KZNgQzsgCtjOMQcLBkolMoIIsGzvi9BAzpOm78JsfKmg6w5FCGMIsNv2epJZ/Vd+N6hFyrzQFwRKikrdTCE9fTXJpFvSwYqDLm5Vpj4DixEpbkDqTD6Q1AKycpSJjGyJ76XKQ7hRFgFFtAA36lSUBu/9ymLiVVH8jDgTK5Wl0TVH7SDVw0IqEMDART5Vg9onxTKtrioWsWAn4pIKqlSftyTk4ibgjjx0FoSOELCTQWoDKCf+F92bYNQSRDFMcUPNCFK588sfxfOO9Gyx2Sko0DYgZ9v/8vAOZ3t9mZM2eyv0ejEUaj0cL36rrGhz/8Yfzoj/5ofM8Yg2/6pm/CBz7wgfPa1/b2NpqmwZEjR7L3r7/+elx66aU4fPgwHve4x+EnfuIncPTo0TtxNCsDAGobrqcLLLC6NTNr6H5qSF3uy2590d8KU632d4DIfInf68vQ6feH5nEJag+0Wwe7fdhXZO/p7wIpachY/twEmbxdjJCzYhjYqzmoH9uXQD5qmwWf0ofAv/cc/NX3AWZls9/LyTl8EprOYdZ2GLXM5vY+7d/PZ+hmNVzTwXUOBsB6YdB5j43CYGINDowLjDYqVGsVqvWKpT0D4FesjVEdmKJYG8NMpwzsjcYR8GN/zAKuA40moKKEGU1yNh+QAD/pU8uvndxPPStQMJCSWFrSB7wJiixIsc6n+ngCCJ7T69J+fWDyRZYfAJGaJyC+L+eQeswxbXxfkEQq9k0MPLql3w+qI4bv5Xw/1iy0lFQigJeAMfFQwABploCCXtKP+kG/brl8X5u+x4tvHQE/DVCDrxO5+xIAqCTEqBIBAcnC92QcBKA0An9kkhy/FdlXYehjgbWnActdMTJ1vPLSEv/hQTCO3RMfJrbCUGTWig8kiVjeA57YfyVijFn8KAGt5G9PyIFRA8AbwC8ZEQrwE5lZqZmp646eN+AHIJvv+EDC9oZ9XZnDfHDX8qTRc9hQMp0n/uXQPGeKmIjRB6z1ew4M/C27ju6NtvKtVnZH7Fd/9Vf3TULsUY96FB71qEfty7YA4I//+I8Hx+6dsb/7u7/bl+0AfH/9zd/8TVxyySV7ArDEnvzkJ+PJT35y/PsJT3jCnd7W4cOHMRqN8KQnPQl//dd/vWewVLdrr3bgwAF8z/d8z562Ya3FC17wAjz/+c/Hrbfeuk8tY1bpfrBdxZ70pCfh4Q9/+L7Ivt73vvfFgx70oH1oFZvIM+/Vbr/99jhWn/Oc59wp0O9jH/sYnvOc52BrawuXXXYZ3vSmN+3bfLQfoJW2j33sY/vWtrqu8eIXvxg33XQT3vWud+EZz3jGnrb90Ic+FH/1V3+FI0eOwA7VS78DtrGxgUsvvRTz+Xxf5t+nPe1pe94GEeGpT30qnvKUp+wL0++Tn/wknvKUp+DKK6/c87YA4Nprr8W//Jf/cl/qSBIRfvu3f/seAfgBFzvoR/kjq7cH5EGfXSRMst8MAHH8Hb8c8JPaBd4DPrHxAIBct1hUfTfHwjnIUp2WZGwm9lFoJhKI0l/ca5PFkyWKweoqsNGsD2Cfa6PMoal3QPUm/NaZWFOPM8a7BPgBQFnx+03KZKciyUDZ8Rqs62CnU4gklHzu6xl8PUM3m3NQCogSVGQMXNOiBWBnNUpr4Kod2GrMwJ/r4J3hmm6yiFzWrV4W4n4hG9ghsf406Md9G7JVZQHcY0SJSW210lLG0or9gTzo0A/YlEisK2sT0ysFD2kQzOsHVYcYUcvuPWYgLCbghxybBwPEDiF7HQB53nbnFV9RMWE9Eb9uwdm8YexaMvCGUACoLNf7c2CQpS9/GEEYz7UoAWZWSuiog+cgmZf+SECMZPeXgdnmIxBrgMBSjBnH/aDxErBPnysHFdDxSdZJTLMVzzcr2QRUq8+K0XX7Uu1GxG1TGBscMEy1Gfnzc+1VA4AI7N8cBNTAILA8yA+kcaaDX12osWMDEmCI0MDtCo5K3UOpuQPD/bnb8XR+APgD4nud97lUmcvBv6XAnweIpPYfb5qvRR+vQy8sEjATEMBCjZoLkpkegvF36fYB3O9+98vefvWrXz2ovX/bbbeh67oFR+qyyy7DP/7jP57XLn/kR34EV1xxBb7pm74pvveEJzwB11xzDa688kp86lOfwite8Qo88YlPxAc+8IE9O8xfsOZaoOX6wnGe7NXK0/dUMbnEUjJKAi4GLTDz+LsKqIvJW+E+REBk+w1dSjp4rfyv8zE5Ht+/VlRQPaoGkAEo+HvdkqCCd1F9ICom9KXIuzqT88v8SdmGd5A7rABdzJo3UZlBfANRLJh3Dk2QGpe5mhwnaXV1G5l+laI7HaksygD4jTZGsZZfOSlgygJ2UqEYj1CsjVGuTRLgV5TshyGBEggMPyrKKOGppQQjCCi15gS08Om+IdKkwpjXJv0A8P2lpzFxfqbGFnmn5DwdMinP7Dfq736iH4bnGGG5ppqYOdsOQJSQtCZJavL3w+dA/AzgPuoAeOcT0AUFBqKvfBCa75H5/7Ltc9U0FyZXSm5U9ed0omPwvSNrTwE5WTeC7428bb6uyLW53y4M22IcFTeEqZhY+unr4VadsdGG7rfC0JfvLPjjISHJBnzKISBe8Cl5CYtgMidIsSPqA8gXwb/AbDQC/CEBb54oJsdl2+wBfgCDzvEa0/OSfshnQAYI9s0PJUrEpFKuvWfIxGvPoJenhQBES19oVuIy9mtg+8HkCZpkkNayvX7QjL/dFN5XvtXKt1oZ2z2hZtAy2y/A766w/Qju3hVWVRXe/va347GPfSye/OQn4/rrr98zi+ieaMaYfT0HX/d1X7cwd+7V9gPwA4CHPOQh+7Kd/bZHPepR+Kqv+ip87GMfwyMe8Yg7/Pvbb78dT33qU7G1tQUAuM997oMHPvCB+93MfbP9nCurqsIznvGMfdveFVdcsW8MayLCC17wgn3Z1n4bJzTu/Tw8+MEP3jfmrNh+zkf3FMAPuMhBP2Moyc8odl/4MKu9ooMHdll2uGbzBYvyKv3sRSVnw4XoXc7Ay4I3cWMcAIlp8EsCU/LbTi2GbJFlwsYMT1OgccI6SnWqAGGGpQMXEKkQhiQBNN/k+oHtLAKYXE+wBbqa5YzEXAffNpHNZ6YHgGoEX63H/o/1D+c7wGgCX63DVRPAVjDVJARWWph6C9TMQVu3w22eQnfyFhQbJ+DrGZqtHbimhatbmKoAGYNyjSWjuAs6llrSJgEkBBBXsoEDO0sCBiIPFGWCFNAni1vvE7OIEICjGFRJJoy+kWG5Qw3S9a0vvZiCOD6CSJ2Rb4aad1A18KhfWynfiQ6G7Ab8evWd+F4ENwTMSwElqb8nbUgZ7OE4O65zKUBxDHgCgGG5IsnYzsdmAEkIMRtejkPkJeWYGteFAKeLbTIx6LkItgrbrzTcf6WhIClFGAeZNnLh+vV+OfAnGdSxoxhw52BZkr9yARouY269QWdymUwAmeyYPmdD1j9/fRaDc4AjD+8pi0JJn0pQS7/mz3d3djQoLuNIj5n+34CAhek3iJ8zYCtsWROCYtZYxZgNsk0E/q7zMOECNCIN6nzoz8U+64R+yntnuTcSJmT4Tifg4XLwT0t/WqIYHF9mws4gSjKgljys17Wc0lxwAcJSd5t97nOfw8bGRvz7rlrc//RP/zT+4A/+ANdff30md/H0pz89vv6qr/oqPPShD8X9739/XH/99Xj84x9/l7Tl3m7UzUFtpfwMy+i2TWCN1FiV+UInHOjL1ECx3oYSrDRIJxtzSe6OQQCbz9E9llE2Vw8xV/rMk74CBOXHFt8XiUElE07CGjfqu3Lv65Ts4ZAUfDPj2n3N9kJQntokaw5wcL4wQGVZ2nnWcd95L/VFge3WY7vuokS3IcL6qMDhSYmDI4Nydhr+7O2oT9yG2YnTaLZqdDX3w8SGOn6HxignDPqND09QrZWoNiYwZYFybYzqwBrLeh7cgBmvwRw4lKQ9y1S7T+r48WcF3/NFZr13TjwRhHHZeqSaqN5HEJnv4QQUFs55NJT77BGUYFR64Zx75Ak6cRyIb9txkpvUiPZNnaTqJalNMRnzAdUbY8IeJZMBFESISVLanEOs41cFmVUBcUEEi5RIUpjc3/Ohv+Q9XfPYBYakGPuRFNQAgi9gRCGh58v1LF2vPfnJtk71j4UlSwawLVC0QDHmuoIADBXxPMWlFDy6IIBtBQBWWqc+9GHbSW3ctLYZYsxLro94AucCgfShij8kwCyFrYg/LJKfnQP63E7hQeqkBu1aOe/DtgUEHEhK0HOP69J8KxOhMEvLapHlJ4C6Yi9nYF8PCIz77s+DS5h+BAUK68QOUuucoURYpwDAHttPQEUKiZqA6hPdRn2uQsIgb4J9RH0qzlXD/mK2lW+1spV9YdvBgwfx7ne/G1/7tV+LZzzjGXjnO9+J22677R4LVN4T7AEPeMCFbsJFZ0SE5z3vefiBH/iBOwz6tW2Lpz/96fj0pz+NRz7ykbjmmmvwbd/2bXuWHF3Zyla2v3Zxg37IF3A+ZJzGYExYBOk1gsBg1A/09wC/rJZCf/EilgF/ve055Is7IAcQ+vUUhrY/ZP1gFTjw5vN1kArykwKaGKSoDDGzr56B6i1exLfzmMFLXQPfdUmqMzDq4BzLawIxKBJlmmBBngPufHgF3PgA3NpRuGKMuvPYalh+yFpgfWMDpp3BEMEEMA8A/M4WyqIE2gbdbA6RAOXM8oofvXoKejHMf/cCTPLsU0a0gBY6IKRNM/1k/EjtM+5LOU5+LQCAZuVpgEtYQX02FoRZhVAXBIiglbDV+qCb7a1xuW0UZSc5CCQBod53fap1NvS5zggHQkAjPIvcUyZbFmoRDUo+kWE52i6X8NHbHjJhymmAR0O8fZmvPvA3LgwDTAXFoJ8NAHhDhDLIuJHhAAQHI4YyolNAJTFt+TNDkqnNIJEnBuEqa2Aor5MXj8ucP/SzrBZg/9wDEozKM9c1KCnjDkB+/obmHGHzQMYGBdlbDkNx8KsPAqaxpuA/OBBsYC14kXEl3ga3x8AR18FpugC0K/APJvRD1AuUgFzaS5L55LYKaK1Zf8IYZSJRCpTmHZ46bzlfWHeTZPATYOW4+aK3hsJxUWrGhTBjlieW7Nf2wfIROjC1zI4dOwZr7UKB6uPHj5+zZswb3vAG/PRP/zT+7M/+DA996EN3/e6XfdmX4dixY/jkJz+5CkzdSaNQW8rLXOkNYglMSjLH2vRIc9lcJMj3cGA3/i0+WWCGZH7Xbr8NbcqezRLfSoOOveSOxOTTSVV20VfTQJ1uX0hoim3v6vx7AgS6FmhqoAi1AsPvfVOzT1UU8MWIP2u6BclCOZrOIbL6+JD53negslivLNZKA9rehDt7Eu3WDO2sRrPTogs1/4RZZisT2H0lqrUS5doIpixQjCtm+lUs8xkZfuM1Zh0ZmxQd5HVoP1RfZkCE8onlfe2LaZP7uguoGSeQKECLEP1NiIxmb7Ltg898LnxMUKOQyAZRrwAGAT+y8rcdBEn64+JcJsOztCbWG+wDRzrZSx9PBPl6iV6S+Cdy4wBQBvDKIflA4nMO6Z9mb2kgJ45fBv+oqyMgSH3f2zBICDLpuPQ9Gwj12ZI/Kb8XpqfUKxQFE5He79+bjfI9zvc+m6+TKD5LspEPDD9HQJQjN5xgNaSkEZMaov9/DhOQzqF3jVDqS5lXBsDmtLY9B+CngLL48744p94WFtdOeh3TP+64id0SaO+oDc33cb88VxjwMXksqrbcbbbyrVa+1cpWdjfZ/e53P7z73e/GYx/7WLzwhS/E3/3d3+Ev//Iv96V248pWJvasZz0L//E//kdcddVVd+h373nPe/Cv//W/xpvf/Gbc9773vYtat7KVrWyvdlHfMSSLMoJ4Oks7ZGWLbKAUnhxcGKrM8wWJkl0WIejvU77nHWBczGrMIv+mt0BW2Z/kXQIP9W9skbEWdZa0sNhckHcSeUOd/SiA08gS0NUwsx1Qsw2qd2DmZ+GbGm62BbQNXFOnfetFjUh71rMY4PH1DMZazvoN7XdrRwFboZscwmx0EDdvNTBo0Hngtu0mgjMHRhZjW+LI4S9B2WzDjA+iPHQMfnsT7vQJ+LaGnW2n/Rclg30h8IQgLcXZ5YXKNk71YvR58YFBJqy2uvMZS8Go56Dog9Z51J3LwBthAgnYJ+yyPuBHBJSUsmIzxpxhtqVkGccu1oFS9XcE2Fwb0rkV+1LqDMkzGTifgBoA2THqgKFXr/t1UoQdJjUzLUnNzAD0SQCzq0O72vwaoVCnxTuQD+O2ACwVShpTjc/Ql2KzznF2f5A8FaBWzoGYMBy4vqdBEVgRpSWsVwWmpUVpOVjDcp8OnWfgr6iKGLjyWAxe9GtqprlGJKA8Kk/oiC/bwgnwOgy4AovZ4kOmJTc9OLArxz10aWpLfcnPZQBqLVLQLsoRCxMFWMz+BmBCwKlQYwsFX2sSWOwzjCX4SJ7itWbBgTsKTD/GTkVGllACKE0CSgGegkTeTVgexlPoiyBnF8aEsPIc+RT0lcE+IPfpkMA/Yf41S/Sj+jUY5Xc2MDWM8QAM19k0IZKqgD977hDgF4xVVYWHPexheO9734unPvWpAADnHN773vfiRS960dLf/ezP/ix+8id/En/yJ3+Chz/84efczw033IATJ07gPve5z341/QvP2ibJpYuP4wk+zIOdTywq8S+0f2WRJ71wveBcwjL6WtEfWmxGlA0cUlxQqgcLNa1628hqPXunWIE2Mvxgq7x2X58x0zNPJr8XSuKLyHkGlqK3ZfQtKbDLGFgaw9sqvufrGWj9IHy1jma0gc25w07LrL5Z51AGUE+mNg8f6uAhfn5grcJl6xUOjy2K0zeCTnwO9Y2fwuaNt2LrplPYvm0HO1sNaucxsYRRZWFLi0Lq+W1MUE7HKNbGsFUJO65QHpiiXJvAHjgMGk9h1jZiP0cgrMgB0+ivmvwemtXys1WqjeuTXDqRyCQTUBigDSCRW0zYkMSfEhRBwsjwdh7OUPSLiQhwTVCwYKafsPsiy0+YVRrwEzZjUSZAk3feG2jp2oiJeF4n0CDel8VvLAxLrUYfEomhr1ULxD+LQJj3yo9KCUKsZsE+Q+c9xoWFIY/KsvKBD/4ngnrF0swrfa0Jy6+rWZ2jq4H5NicFCjPSWJjJGpz8xlZA18IEhQfvEWux+YijMeMvMQElsSgca5cfK3r9yGBmfo9dgmWmz8O8RfAwQcUDSH64QUi4M+yvdA7wBugcxTIA0lYtjSpJeuy3p2Q0PULiOhUICQaSfBA4g0XFfRrqI6JI8x6v+Yq8RqYG/Ez/+qPFeSvMe4QeICi/sQqoVz/jnKt0rKSeF+byoUSNXvKr3u/Q2rlfHzz+tqhCiQATBgL7dvdiot9528q3WtnK7t121VVX4cd//Mfxkpe8BADwjne8Y1/qa61sZWKHDx/GT/zET9xhRvm3fuu33kUtWtnKVrafdlGDfj4EnCEMJFksIV9o9y0uVkSOKS42BhYu2Q7V4kVniUs2psrQJBdq+3kXqrwvBtX72ePx+2TytHmdNR0CJj6AmonZ1TvGEIizQcYT3oFmZ0FdAzM7y/JSsy10m6fg51xXz7c1fNOk2hHGMLtP6klIoGe8xoEe+VukQQFQvQ1fAb5aQxtqDDY+yTY679E4riXovcNWQ1gvpyjHDbzrQGRgArjoiwq6tgoZy4GXkIVNIQP7XMG5fr/YANJ4YRQhBTABaWuCClgakP9i+URZAFOP/ZaAPR0IiGw0BfZVlnIwzw0slqXNvXFJAfTTx01B6odMAbJFrF8nx6CvBR+OKQZTDIM0WhZKQMxYMxOqDpFIOymwj+Q8ScBhaOHvXRaRkX0IaNc354XJwPX8hNHQuSRhZUPAjKWyOPDpSouRNzDEbZrCwpIPgRqD0jDrrjScWW1NtWuWuAa0+hKTIqErgR4f6hQubCO+NXyci99Psq9FuM5iwEltJY438NiUYBgh1DZEwL1cXocn1pICFue7cB1JwB/ew7uQhW75vcIU8MbAeIpB6M5x/URBzR2kHmSokcd6ueGYucaR9I3U/Esdm2o56rel/9tOAp2683gfxkhGPgf2Oq+kPgNo7+R7Lt/+bpKrdkl0SY/de1IAiqxNc/ldtP07ai95yUvwnOc8Bw9/+MPxyEc+Em984xuxtbWF5z73uQCAZz/72fiiL/oivO51rwMA/MzP/Axe9apX4fd+7/fwpV/6pbj55psBAOvr61hfX8fm5iZe85rX4Nprr8Xll1+OT33qU3jZy16GBzzgAbj66qv372C/EM0LQo7I6InKCV7NwUQxEaKfuAL0AsROBYmHTAWjSYLZuj3q/pe9H37bZ6poJt5gzamMaRNeF0rWFMj9Rf06sNzJdewDiTS6AHvqeOJxBV+OxunYvOXEJarm8OUUbnIwSl12wbezBMxbh+2mw+07KXnq9KzFrHPYaTrYkOQyKQymhYHZPon21hux8/lbsHNrAvw2W8fztQdc50CWQJZgKwtTFjBVgWI8Cs8VCmH5VWPQiGv6AQi+l4CtKvEqngs9GIb9136ikbxDCCx6Exj8PtwzYuKH/IafNdO/tCEBKKgkZECFSqYj7+GkFrXU8wOib8tgX2Iw6gQzL4w/NX6Sn+AzMFOYa2KFJI4Ef5BLFJDyuXK/UUx8Uu8TG06Sb3wApOrOYda6KC3uHFBagvcG3jKGah1fX+wXJPBK+jMb4wrQZlnPAJbOtthHD+xUspbBNAFzuprBLO8iu6xH+IuMP/23JArpZ/G7lpkkHpzLpGYvID4dX91E6MlXIjAIPUxIGCNwMhcT9JIHpmW8CQmojfWWew3TwB9vwDCqaAp414KCzGeEtDTwpcebAOfAcsBvWbICetvslcKI41j9Ricg9sdIXLssY/fp5Mvd1mga8AvzJ98nuE44OW6ntRUQfLfB+q53g618q5VvtbKV3Z12/PhxXH/99fHv17/+9fjO7/zOe3Qdx5VdfHZPrf+2spWtbO92UYN+nU9SgAJwyaKkv2jR9SsApAWGll0C8kD4uay/ELNFDDJ5AUP0KncZW1B/1gMCAWQBhpiNaYqUUdzbpM54tr4FNVyDg+pNUDOHmZ9Fd/Yk/M4WXAb6SdaugchnSha0ZDub6Qao6pLcEQC/sxlfU8F1adz0cGTgCIgmGb3Oeey0HTwsdlrH7RytMQsRgFnr4ITJp4MxQGT8SQZ2ZPT1g0z9bg7n34PrKloCOngFWCBm/QoLLquNkgVGJGiT788QApCUSzMlCcoA+hFAXZ1q4QWppAXwrz8eeoFL0mNDjQvqipgZTGFBH+uJhL7qwnEIg07kKvWxWFI1M7sWCLVcYoBT1yxSbSUo4E+A8HC9DYFrXMXQR7mxBj6w/Bjwm7cOdeewU3fMvmwT+Ceg36gwmFQWFaexhxoqFB+FCdVaOg7ElYENaYI85xBbRbYDJBCOX3PAiIM8AdjKLkKK39vN+hKTPvssyb6KrFRn/EIATGo/ClNA5j9DqmaPHluud95kHswa4lJwJpxDZlJzHZYI/pkChSlAoW/jMQdJLILnceYD682kAJpBAv5cOHgB/pwAc+E8GLlIpW9czgzJjSd/O6SD2u9/JSG7TE4VSMHXzvusZqKwfQEBWleLr3PZ0572NNx666141atehZtvvhlf/dVfjeuuuy7Wp/jsZz8Loxg0v/Zrv4a6rvHt3/7t2XZe/epX48d+7MdgrcXf//3f43d+53dw6tQpXHHFFfiWb/kWvPa1r73L6t98QZl3fENU4JskG/WDwjLXiNmFe9dwUotm7EX2lOf5JvOPAit7yI8i8YP6/hSwPCgtiTHafwhKAVFO0EvSCxbnS+8iyEf1TgBGZkDLdeIo+ihDsqFF3IYPtY5pNIEfrcFNDqLtfJQ6BJhVfHpWY7vpcMtWjVFhcKAqsN10aBzLpq9X4PtgQSiabbiTt6A7cTM2b7wVZz+/ia1btnC66TBTbLF4/qwBWYKxhqU9JyztacoCVAbQb7IGGk8Bqa0sfSjHE1h85LoceNCsIpvYShGwSrkgzL4iH2u0ec8SmOgcypCEBmsyBnZk+4UaeaU18d5skBQZSI2F6JeLfH2rFS5sfJBWlJBzuQsTVK4NAfukzp4eeVxfjwFJraQgQFECKSkDynTSUQQTA9jXBj+pcX3Qz6N0IolteTxzhlC417EE95DvTIHlx+oOdWT7+dk23A6DfsL08wVXNKai5MQz1wKugPcOxpgFiU8B6nS/iEqAfnYir9+71wuUqJsd/Vhg6Z3YhXt42m/4u/cD8eF98FdYXjYkDfW2rs+VnDvxJ/N9q/bFa8KEZArHiXsAYNpUhkLWk3F+LKLk8JCcZ5aAqpmobiBhFUBUCdEJHQP9NpjAodfQaj+7JVcs7F+2Jcfal0iWz8iEObUITEgTZT7PV9b13m4r32plK7v32uWXX44/+qM/wjvf+U58//d/Pz784Q/jL/7iL/D//X//34Vu2sruRbaSjF3Zyu69dlFf3T4sfm1YtIXSHwDSc6EXBLIQEdmqtk6MF73A6C1Yds1OBBIYJ4s2zQjsL356AajdMt41oNMPMMiviFgiSExYZoVnGUaqg5RnMwdtn4KfbaE5fQJ+tsUL99k2fFOjndVwTQvfOVAATnzn4JoWXd3CVhwAGh9NEkhm/RBLV20c4CxV18KXE/hyDJBBZQ0OjYHN2qHuPA5UNrLjpgWFzy0qtDCzszDzLVA743a1NfzOVuozkVqKAZkcHPXliBevxTiXtvEO1oS8XQIAAowEEnhp7VWmtIB9w4BCnnktoBOQSEzxPQEGKK+rZgnQLAEJqKBrQe08LHwHAGMoUE6NBerC+OyKwIxYlCujgoNVIsvYKdkkDiwm4FgCGBG0JEAkPbP2BBAovpbzoPt+AMCWMWtDYI/HK0v0ZDGK0K5567A5b/l51qBuHerWLTD9JpXFvC0wCvX8hmq8iMynJcKcfARgddDNGh/7oG9DY0JAH7kG5RD62XfnE5jQwS3JehcWb15DL5kOOAngXIbjIYAluSR4p8eUPme6EUPnTge5YQAfMtGdg7cONowtRyKXRZE9isD+I08RZI/yXhgG/iwYjDYBPHQkGfo8ZowhWJ/YqnIe4meKaSvYXwTqqVcTsAf2STA5ynhSuh6ModC3iVWSMvuHz7HIie3GVLjLTObKu3L7d8Je9KIXLZWc0pmsAPCZz3xm121NJhP8yZ/8yZ1qx8p2scA00MlQXvykMM5ljmVgJSQXqDp2qQ5qmD/E9yGeQ6LJfUMC2QAo+jwBVA9+WpYYY3ssQNcCPr23wPIL+4q/oxTwBgCRz2s9yyj6APhJAgu1dZY0QUHuEF0N2j4N3zZwwn5yHbPiihK0TvDFOANO4bjOrbclKAAnbnoYbnoY285GWXFrgFnrsd10+MSJLdxyZo7bN+eYVBbr4zL6G0fXKxwcFRgXXMvPnDyB5oZP4uw/fQ4nP3kcp/75NG4/sYPj89Tva5bQhImPLKEYV7DjCsV4FJ4r2PV1mPEaA37VGGY0gSvG+VgRwKEoATJwiimZ+tlmbMDsHhYQjv4tl4Ex9lH4vs7JOo1oRiOXWxd2nzD8LFGQteRnVitoeAx5VrDIEsqA6IREZQth+ImsZzhnws70xrLvbyvUwa9qXKqtF+WvwwFrWfcyyHuWlrLkkb5FAC+CfR6tWwT7tpsOTZCClfvY3BqUISmqKT3GhQFg0RqPzhkUJrApw1hH5juk+n1U77BvfuZ2+PkM7swJ+KbJwG0HAEUJW1Ysn0qGr5lSks4oSNKGy1Kdc/FtzsfOh9mw25YcO/ycyAP2/clrH0HvLPgWYYPRT+jtoA/U6gQyvbkhvzSy/YRtV1TwXWBWZyxmyteDWd1Myua2hTWrNQv1tiOICEmeTf6nli4l9ZytXfVcLNs2JiWGxT5csn6WdZCA+ToBN66BUgIagaVi4Vr2OQ2D/0O++l1uK99qZStb2d1sRIRrrrkGj3/84/Ef/sN/wM///M+vQL+VrWxlK1vZedlFDfoR5dyKmGWpgbZeJmJe3yUBfplk1BDrbuhv/ZFXon69zN/MbKgDo/ejf5dlvfcyivVXoeX7KHsP3oGaGS/YQ+0+arbhts/A72zBb5+Nmbrd9jZc59BuzeCdg+scfEgvZ8CvgXeO5Z7KAtWBKQeplEySNzaAaiZmyiO0pbIccAkHH7Ob1yuDyhBG7Tao2QHNN0H1JtAE6aCmjvUDuWvC8bsOQJn32TLQSfVV3+S8SEBeL+KzmjHnuaAcZLDJ+UEO+EWwTxa3WhbMu8gU4AWuGiOBgUllBU9pnMSxRwHclLqROgs/AH4SzJTnoVp+NsRaraGMDdu/lmJ9EPW3bitCm5aZfHuoixM7lINcAvbVrcO8B/pFiSzjYA2hcQ7WAc5blgZ1Do3jM2HIwxlENnDAt7kuH0lQRmoK5WOnD7bF1wNBHi2HxD/uzUkLnWHCD1NAlLOuKYKBDAQudpbsX5jOAtTGoIpX811/n/oZPWA5zoN28bfCAPRJ7MpQCorJ354oXk+cUR9qq3ofa2eSrs0SMs2ZFUiBNZjAVatS6PsBnz7gJ8CdBvzkeTcZz2WAn43bRQT8UkAsPy/OB1CTRKZ0ZSu7iKwP+nctM1B2U0BQvhUADvwuKT7qiXZPeIpNCHVXNUi4SxsyZgyQM+dDAFmuxr4cpTzrBAsiyll+UttMksbaGdBxTWQ/Z+aTdzmYZAJrJR7tQJCcE5emcMUYs1mHncZF0IjvX8x035w1OLXdYLvusFN3sIZQFQaHpiVMAJFMOwM1O3BnT2F+6izmZ+aYn5ljs3XYCb5dpec4a2AsgQLLz4QELzsexRrKVI2DL9rzr5TPFQFUk0BV/T1dz/p8jQKz2sMHtj5B+GGJec0JGAL2lUb8rQSsRbUCGaO77TP6nEHmM0jIe12PsPdaalxK3WgPRGYekO7RQGqbAH4CBPat38q4TYhvgDg2Zi0/RBlB6iCXjmXMNaO9tOH6Ir5z+5BMAxDIK9BLfNU21G1ualYFCWPd1zMe68bCuw62GvP4b2rQiIFxH7ZhQ22/rgf06ef8vCdjAYBz30N3S6zSYJv4K5r1x/5g7k9ocNFquXLvw5hcbO+QL+t733G6j7UFn8uTAYkfqH2HcA31GX5ZXb5d2HQZEKjWBJJUljF/w9wXjytsY4ExvWwu7u0/WxsEXzTyXzuXvd9nEcbh6B3ImziuiMwC23NlK1vZyu7tdvDgQfzyL/8yPvjBD2Jrawtra2sXukkrW9nKVraye7hd1KBfIcwLCXbDRVbSgmwnsBjI8iIlEoJaYipQNQS4xd/LMxHghU1mFoMdfePCEOH3vUxjvZAbAvyy7HWHQgfaVJYk1dsMJDXb8NubcLMtdKdPcJbu5im42QztrEY3q9E1LVzdwjVNBAC7pkVzdjvudnRoHcXaGKND67CHDKgaw9ezCGr5Ygw/WoMvp8z+A6Lc5cERB1EsAdbVMLMzMKdOxrb5ega/s4VmtgW0Ddx8J3XVaJIkPYHIQOBzQwt9ExeTGgzzDiZ8zwVZpMYl+aW+QKpe+AuTbwj8ISBJeRqEWn0m1moR5pUwyeDaCMainXGNlHbOgcOmjhJTUWrKdZyNLvuzFr4IzABjQEUV6+pIvSVvLAffbAVXTTgD3QFty1niTeczUEnOE4VjFZbYyBKkjksGUMoYtWHxPRRAM8sDEAKSCAuSAltQgJoha10C/vSznJ8ugn6hhl/FY6QMf3chU95Sh7IxkaU1DqzAUSHMRo/CcgCuUJnfItuUH0MKmko9Kw3qUv+61HJFuwVKQl95YzkQGBMAlgRLnZqHNHtYz0/9fYTnTIKtP9do0C/Olb1tKWaOBI6WWVDuZADMsGQpecAYH993HvDkY9Bd+rg0AawNrA6R9uyz87hJObsvk+U1qYanbMdQCuIBgA1jQ76jpeOETSL3GxkXC4AfOGgWqxn1AoR3m91Ds9FXds83b4qcXQfwnNHWsMV4gJWFwPxQ0mxk4B1l85pm4EvSSASNFhJ4wk1VADa3C2AUAsJeGEb6nqPaM6jGIBbmWC0laOTYJDGnnUV2n2l2gloC+1ZwHXzTZL8FADffYf9Fzf/U1WHuLOFH6/C2RDvawKlZh5OzDjeemWHWOpTWxLq26+MCB6cV/vm27QAAtpHl/mWXruNAZbFeWpjtE8CZ27B18+3Yvul2bN2yheM7LW6rO5xuWE79YGkZeCotbGVgSotiXKFcm6BcG6M8sA4aT2EPHAaNp6DJGsxooqQAZcIsor8RGX2KbRR9ccrBCkKeKCKAVjxn1AdGKCblFMag69/WKDHctQ82tmF90NWghgFaYa/pGn4U6kcL0JfkPYOEvK3gixGrWIhvZQp0MBGcFfadSG86AdMCcKxr9dl4X0nrFwCZb2Yk0QcJQBTWoPNc71jqPM5ahzPzlpl+Ud7Th/sVMLIG89ZhVHB7x9ZgreKkPO5nE3wZwHqCDWsJ6thHxc5ZuO2zcGdOwO1swZ09Bde0cJ3jmo+jcazx5+sZ929jQDZJwVpbgQyhDffgvqSmPpcmsDzJUwQo++ODW02ZfzbUjwj9FxOSon+QA396vC3IvIPPoQeDYVEdtpcgmLMXfQZW9hPCIrDVZ8sFgJmox8wLn6VkPiXlqdl/8l7YR/xbzXki5dmGmsZ9tQ+WM+WDMeLXajCu79tqI4M+kzu+lDVZqBGbFeUTkFmScKO/6cKaOdyHOlbAsQbwhpauG+5SW/lWK1vZyi6wPfKRj7zQTVjZyla2spVdJHZRg36VCeCELD5aqY2malZpBt8AgJYxmcTkZSDfLA3YCtvFdQD5VKthN7Cuv4nIrFFOvs7aXJYZLSweF+rCBYAmgn7NNlDPOQN96yx8PYPbOsPPPcDPdwm88Y5f+86BjAFZfkTZp6pAfestqG+9BZOveAjMgUO8z40xXLUGFLzIp/lmBATlfJjZWQb6zpxgAHK2BSdtm81iW8gaGGtgxyM450BFyQvcouTFkOtAmu0HAM6ByMGHxSQvkrn/OpgQjBFwghf6MB6lB7og8+nAJ1vX6hvK4LUxwJBkpBj0C1nlJoFApBbNpMCz+FqAGmsBZ3kMFVViNCrQOtYylH4oipB9XkQpTwlKeVsCtkIXFvYSiNImh6mDURTarMcyUGR1UnTQlGgAxBoCvcM56ULgKtal8SmI1XnPLKuOg2FNAAJHhUHnPCaVjfX8BAjMmX6UnTthSQAGc+J6gp3l811aA7SOpbWcZUYZCMSITRwTAgBJprgOL0hfWQk6CeAnUpoaKBUgUINnut/6TAgdvDlX0kH/vfM4F3rbsU6oBh2BFDiXQI3RiQ75ed0d8EtMAy3lZsDBNGH9GSCT+xRmhwB+HMRExloAhuvwifUz6nWtP2HguV7jheknUp5Su0/YJHIcy+Q8+0E/ol0auLKV3VMtBl9DgNYVDAQAGTNJgAmQiay2lIAAePlynNsMv7kba7CffEBtCoY7NY8a9V3PMLtf4Ejp7QwwYXrtsIEGTqRq2rqWa+9qdt/OVvSp/HzWqwtnuEaysUDbRIaglu72poAvxnDjA/C2wulZhzNzh9u2G5zcaTFrO6yPkot+oCrQrXscnJbYnLXYqTscnJY4MC5wZFLi8KTEemVAW9wu8e/anY4D+yG5oTKEiSWsFwblWoliXKKcFLDjCrYqGMQJUp7M9Jsw+BVkLX1Rxj6MIKAtU/KIkkz1YMDU20X2n7Dt4zhS94ihQL4PbHDviU9n8MwJSqbbBCDLIPoFBSHKk2t/3+uEQMPsKhjL9aqLMtTzK1jK01bwRXguR9G3EsBPs/rkGKSWLZDkp8VXlCQrYVPJvUQnYnW9vklAE/tQTecjyDcPTL9567DTdDEpqg7qB12RfmuphSvDeqOyLIkaqOmdU42JiYRtGOM7keHXzuq4bnDWwNoGvuu4BnjbpHmjq7NrjmUZKQM1+ZwiJuBIgpCwDkUevH8X1eDdbrBP7LewhXjfR/IPImh4B/AjGa+aOQjwOqCTbXmfwH9QBvjxm0uSwHTSXO/9vq82lDCRfX/gtdRYd9zEmLRlSIHMdyZZidir66uALNjQMStJzwUfmd34DHj0Yb7fjeG5spWtbGUrW9nKVraylX2h20UN+lG9A6oLtThVYJ8sHvoMPglU6KC5y7MVhSkmn9NQVnj8sovyTOSD5I83DH7J73Zj9ix73yxZxMVMcQX2zTejlKcwxtz2WWb1aSme2RZc3aDZnsHVbWT4iTmR9QzPUeapLGIGOBmD7Rtvw9nPHsf9jh7j5hUlzPoR+GrKbXUt7PwsM9K6mhf+8x20xz8Ld/YU2ttvRX12G+2sxvzUWa4dGNpB1qBcG6MI+6sOdCwvVVa8sC96YJ/ul66FZPNHORtToO10fRX+ujUcSIAFyHGAoZb6NvpUZIEFir81kKBNqh1mgmST1PWIjCABn1Udvyg1Fc6vN4jHqGvNMJYUXovsVFkxWGwrDsIJ2GcKzkYvuN5MqwE2h6X14HQQykCBGTGo5xKL1btM6keCvYOyb3r8hvEcA2VOPXyexW2JgZfSGpTWoQoF8xjkY0mzLoB+CO2uCgMbWFhiLjAbAQe0KcjoTJCcLISBGjhZhmu7wDHQ40MmeD8Aqeu2COAn5zhel/K31MHqFANmiO23kMldp37T1guGDEnkRQafmkO8KQa/EwG/wIAR2bCsaSF4YwbYhsLKAwTk6v02BPQksMmxxRB0oxAQI2b9SVC3c3w9dUHMjZgukdquti9jQEBB3UXnMiMN70WN5HxrsO9c7D7pC0MpYCa9xZJsdz/wRxLIvgu3v7J7qYXgLbPs2nhfT3OXuh77Q1vud2HuIQpAnD0Pd7OfUBCeyTCzhVyY1/oZLPJbIAF/ff+q/3d/DnYOMFwDGIaZPQUhscPamuf2ruZayLOt8AivXQcEoI+KkLDjOnjXcZ0/IqCe864NM/R9MUJNFXZqh1PzDrdsNbj57BzHt+ZxbitDXbZpAGoOTUvUrcPprRqTaoyD0wpHp1zTb2o9TL2Fbvssmq0dtDstuqbLap8y6GcwqiyqtRLFpEAxLlGMKxTTMex0ChNq+CGAX5x4JTXtqtSfpgjHker4abCChFk50PdEiwF7Acs8qSQR+YnP7xMyBg0AIoqsNXkWwC8bt4pNBKeAP/GtbEguK0qgGrEvKUBnMeba0cUYtdNgn1dsvF4CCXJ2nwB/wkwk5IkpIhvpPKIMdjZEwccfZT0V22+7YcnX7bqL/pH4RnXh4HyBxvkAKAaAygDjwqIM3VBZSudL/JYu1KqcbcPNtuC2t9FuzcJpdPE+Q20d5T3R1HzdtwwGU5g/PABrChCADun4hPVnkIA/GIp9KuCfBlY12NeX4Zfj0/de6coO7NN5j0w9Myk7JJn2c921fe9Z9k3hTUMUj22Bu6Xmn4V62UMWry/KvtdP2jpXmwXw82r86s+EASl+kPO9tp+vg9U3WbtioF/1mn0ooQ1IUvLew4e1nrXVeQi/7r+tfKuVrWxlK1vZyla2spVdLHaRg35nQXP5I4F0AKI8iJb3jOCc3khfWgUhyO27VET9XO2QDGfvQlBKZTUrCbwIjOhF09ACz6hj0aDhUOAkyExRO0d3+kRi8509lbH6XNPG7FzXtNlmyIRaLiW3uzqwBlsVGB/ZgBmHei4SvJrPMDq0DjIGp/7hEyDzSZA1mF76v1EeuwzFfe/P0p/bZ9FtneG6MsdvwvzUJs5+9jjmpzYxOzlDvcm1ArtamG6Eaq1CMSmwduk6qgNTjI8eBBmDEoAf11HmUx79LFZfVByYIUrnwLUoTBHAHARgJ8jwBCaXAS9qDUL2q1qRenhV88+HoD4v5Pk3KcAUAztAZIAREAInidknILIPYJuAaSCRwiLoS5PiWGIJH28D6Fcw6MeBuJIX/yED3SPV7eMxmsCW+HdoXxkyziNjTcaqApgXQHVpkwQelgRYRe7LF2NmHXYejfOoO4+6cxEAbJ1PJAlimVEUBkCB0hg0zmFS2UzeU2Si+ky/ypogH8oH0zkPE+rXdN7D+JCF73wMyCwLlMg2dLBOzq/RgJ/qpwzwG+g7Ggpq6H6UIBk6oC9ptyQgorcRg96SHd07NxkoiPxzCTgCyGTuzscka1yCoLo2lt5u1uSY5Y8I/lnL+y4HwjlSZye1jZLMp0/XawQi79AR9Pal2pjkw3YPMUWmBgLwF9h+d7ArV7ayC29xLuspJ7gW1lQwGeBOKAIARJIEEuYorlHF4E/0hdQ+WOIzBHV7c1GU5AvBXR9/g9yPGphLBw9J5r6w32w+9UEtogCKME+LlCc1M5h6C9TOk4+1dYaBkPkOJ1V1HXznYCrAO5O1z7sO1LaJoV9O4aspfDmO4JElQtM5nJ43+MfPn8HZWYuqMJhWFpPK4r5HprBEWB+X4e8J7ntkiiOTEl98cIxDYwszOwu3dYZZiHUL13nY0mIjJM6sFwYTa3Cssli7dIrpsSnWLl3H6NA6xkcPojq0ATM9wJKe1RhmsgaztsH+RTHiZLaQaDTEEu8njhAMjICE+rwrM5CkED7DnIuVAAdJFvEAiDwnhchv1X05gn3KH8tUFTLJQDVcRDJewNqiZClTSaoqR+E1Mx1bH/wVJTEdj0XLRWswTwF+WhKcx3GuRiJgqB7/cRwFX1QYfttNh9PzFvPW4cRmjZ2aGaAA+z2cDEWYVOxP1p3hcaZuxs4BhbEgopAcFtoujNSmjrKdfh5UQoIqBwCYskULwNQzuJ0tGGPgZ1sMlAc/XDYJ0wLlGJYMjDFBUQEM5obsoHSPF9+BMpnO7NxloF26/5Iag30WJv/OwwyMxizprWeS1CRSmMvapNsm7Nq4cUgpCr3hc/t08nqpJHvv+ltm5+OGSM1B7xGBY10LlaU2d1kbDyRlkPfc9sDO1nN5+o7cY9K9I/5OPpO1kud64961w/URV7ayla1sZStb2cpWtrKVAbjYQT8JrIeFwKAcCrCYAX4uMC8y984vUuvBlCUCEosw7sMsBLPukOnMd98LcgmooKWmts/C7WyhO3MqSngK6NfVDLSJkTGwFTPnRFITALP7xhWKI5cwy66Seh0NvLEYhe9s3ngrugAkNls7qE6cwYG2gZmscSBs+yx2bjkZwb5T/3wK8zNzzE7O0TUdfIhomNLAVhZd3aGqK1RrJUxZQKRGU3tZeinWU9CyNjZkoQsY2MuUNMSLcPKL0kmSXU4E2CghxZ91amFPIaigs7M1E7C/PzlPkek3xPKKgJ/BYLBSFvOSZS9gH5kk4ymApykik05n8S4GPPJgWayJqdrcB/x2Y6Yt1FbU3wlyX1oKq3MeLoiw9ZlllgBHQX4TDigkSEUR/BsXNmN2RWaWyKwSRTktIJdrXWY6LiNjRUzA0T7gl8X1dgvcDFn/e3HO6XHCdrv+Y+P3mBWs9yFvKdbebtOgvlaARYCvz3xYtg2xFMDJz9ligC4EheFh5Vp1CWCLAblzhLl2C5ABi2Df0FDabRsOHOA7137uEqO7uO4M3YXbXtk9x9QYIgHD1Mfxeo/M4pBodb5z4S4Wr2PDSS9k9L2yNx/qeVKPzR57cKn6gviJUsfWe1C9GdlO1M4TwDefsZRhE9hNAfDLTOQipf9cx+yxUB9O6uCZ4FtYQrjvISa36Dq2R9c7VIXBpLIoTIGqMLhkrcLhcYlJYTC2BIR2iVKAsQRbMdDXeWBsPAN/ayVGGyOUayUrKmyswQZZTyorrhdcVkCQVmcJ8ZB0pFniIu+pz5U6d+JryXjRAfo+QNFPTAIAEXUUyUepCSuWyWaSsPAp3Zs1iND3Y4wBGQsvstXqfMkxx2M3BXw4fucQAT/fa7+0qf++9iG0/7Cbb3Wue4bznpOoOo+dpouAX926mBCVgL8O85a32zgH0wGNM4ExSHDexlq5Ar4htM+7INsZxrjI/8t4952Dt45ZrvLdesZ9KfKewYcVkJNr1qWzzeeX4LUcphoPMg6EmSdjq299yVhtkigkNfkc+Qguy7lZZuIPaYnV9Hp4h9J2v2y7d8Rv6wN+wK6An27RncHEeFvM9ktrnrRfnuZ3n9890a5raL0uHlQKCdtY+I0P9VudA4bKC9wdtvKtVrayla1sZStb2cpWdpHYxQ36tQH0UhJ1YpwVnHT/s8xBVXeF3zSIBcV18H43OSj1eSzZ5D1gHGATE8orACoucAyGs9R7YFVcFPWzIbsW1M5B9TaoncNvnubs7tNcK68+u436zDa62RzN1iwAfm1k+JE1UbYTAGxVwtgg0aM/M5aDP+EBY0CjL4af76CsZ6gOfQ71qTM4+7njaLdmmJ04g+2bToCsQTur0ZzdxvZt2zj7+U3Mz9S47eQOaucxcylDujKEsSEcLA2MJRhr4Lp0jsgYDkKNxqmmnzEga+GC9JKrJszwMzaTmUpBwHC6wYvIGNxAvni3IRNXMlwdAAo1xiTAIwEcgxRw0tJCIu1HPkiDecd1gFQtv12lHqOET2i7yHcGUE9YfVntHDKxfdJu75PEUdy0AiglSBYlsGRc9YJkWTulTQKu2iD1FfYvXd0PMgjDT+Q2W5fHCwjcjtISAIPSAKUldN7CBXlSCbLJ37uZBD7635PzJXJppWUgsbRG1WakrG/67E0J1mW1WRbOn8+DXF3LrM5Qx8r3fjOYFLAM4NP7ifuT+YXyOUSPo8GOcjLol5qWfxrs9QzwG/5NPyM+1tbpMU+BYcmunGnX3z0NMgqTrBdlgbr82NIb5xs26kEN8b3h7Qso6u8wa3JlK7vgJj5VgRxQ61pY62DjvZTHeOcJhQbV5D5ihMk0StuNjA3xt5aHpmXb1hRAaUBdE5kn2fzW99m0XzVwbJwikI6L2dh5fWQzOx1ZTu3OFgN8Ndfvy+TTww2NrGFFgmoMGq8lmcyQsNSNDzLDb3wg+naFKTAJc92xrsTJtREeer9D2AwMLgb/kmzj5RsjTEuLSWlxn/URDo4LHBwZjKgLgCWrMpiqQLVeYXpsArKE9drBWEK5VmJyeIIDV6yjPDDF9PKjGB1aR3X4MMzaAZjpBtdqrsbAeA2+GEeWm/jYAvi1MElxemCOi0zNcO9MyVYmJmc4JJaf1FOVZJIkRQgAjD6wT5b8hiEGnSEg1teV+nK9+zT7tDb6lACY4WdsZPZlyWQxeSn5gyxFmRJCluSsAEh+hG6fZv4LiOOxWCvQI5epbFxi+p2ZNTi13eDEZp0BxQAwqWwEAG04xlmoi1y2DmNr0Bgf6gSmOm/ZNSMn2HXRL4+An3PowrrC1Q0w2+J7ZDUGdR1sofj60WclkGGg0AYftvNSvy8k83hOlpGkHUuU3XeX3U5FGlT8fNmWTmIycmMeMNK+PFTyEpJPfT4KBkCoUU2MvxsiBi7DGsOY4P3E+qZdPicOqTMA0b+7s4Af9T8PwGofuMwBex/qMgfgL+wv1XzusGC9dWsG3kkt1oH2QfrEYTmYp5MSJaFyZStb2cpWtrKVrWxlK1vZoF3UoB+ALIs0CyaZAt61IMuMMcpWjLKIVW9JgFkVH1/IMlQBoqVZmhrI0RmZGpB0bfq5c/kCybkc+NM1Cb1LdfzqbZh6C6hn6M6egts+A7d5CvXZbTRbM8xPnYWr28jyi4CfAHvWwFQFivEIpmJmn3xmywKmKjmwVVuWpqq4nh+Np6DxFHAOvp5hVJRot2eoz2zBOxf3Nzu5jfmZGjsnd7B9YgeznRZbnUft+AGE4EeQxyxKi2JcoBgXKCcFinHFbMNxFWrKVJwxL9nZQcIzykzpLNgAdkiwRAIguq6fLJL7wFCSw+R6I1xHhGBDdjmFGi08fLxo+AGO3/cI2cOkALABIMh3bTbWMqZqGC+R1afBPmH19Rb78izBCb2IX6ibo147z/FW0ov5IQhEM/sUCNnCoAuSnWl/FM+vbCnWvVFt8h4xCOLhYcigsvIdznTVNdv4HCqgZklMYAgklN+bwCK0BIwKg9KYHAQVCU8F6A4BfrsFKzw5lrVyLQenbcHAH0epGfwD4jWfjcABxmAWFJRzEcc5KTC2lwUu39XPACc+DNW8Ok8bCnL12X4Lx6VsGdgHSP9Sxo7IwL+h9gB5cNSnv3U7Mtlez3vr1/s5H9PMPe+9qke0aJIEcEEgv2UM3P3c/srunUYG3vA9ltR7IpnuvQORjcz5pZsJSSPnGv+SjEUGWQ04AZK6EIW2ZOBtuSAPRyHwmyTZF5kSg4x07a8JMNTWnFTVztCdPhFBP7QN+0RNYjQxyy8BfqKMYEaTCPhRmeQt4729GMe36o6lquctH+vBUYHuwAjzSYntpsO8ddict7E+23pVYFparFcWG6MCY8ty2KSC31RUXIf5wBSTo3OQMeiaDra0qNZLTI9NMT6ygWJtjGpjyvWax2v8mPAzyioBfpJ0JL5suP84lwCVIelF3dt9dpbc2+V3JtyD+bcUAB9+ZsZ08iv6agX6Hq0ltwctSM1SUYB8kvbMxoi+x0qCUzzexU0S5feuwd2qNhLQA0VC8hZMrHMsMpKxf0Ag8tn2O+8DMNxFwE/kPa0h1AHgGxUGnXPoenLhnfdxX+W5FBGMRazfZw18x/X84raaFqZtItOPjI3PRAZoQwJB10TfWPwUE57lHi6MPQ3CWShgOfrgyZbdw4VF2K+POGQ8lhOIK+8N/VT7GmJ6pjNBetwa8X9DHUF4XlNon5pcSh7t24APN/i3vL1kM7FPkfdJ1/tcm/M+gIKIkrPU269IOssahnpg3/kq5iweiDn3WvtC2cq3umjsL/7iL/D6178eH/7wh3HTTTfhne98J5761KfGz7/ru74Lv/M7v5P95uqrr8Z1110X/7799tvx/d///fhv/+2/wRiDa6+9Fr/wC7+A9fX1u+swztu2t7cxmUyyufPebN77e/2x/u///b9x1VVXXehmrGxlK1vZF5y1bYuiuPjhMuAiB/0Y+FHShyK3QZZBPlnco021YLyPi5As8G0LBrJ28R04aN+rBZN9IdSncY4Zf0AMxOuMdDJFqP+3JEPR9QL/wrhyLaidgdoG1GzDb56Cn21xLb/tM5ifOov5yU002zuB6VejnTVwIb3aViYGYaSOXzFhcK2cjmGqxKQDALQNXAAhrbGcwT5Zh9SWs20NqsaY1jOYskDXtKjP3I75mRnO3rSJZqvB7PQcZ7Ya7HQOO6GGW+18BFgsMduvGBcoJgWq9RLFdIxibYxiXKWagsakmn5lFaWXJBM7s5A5rFlvWo5JgD8HnzHOFsAxoij3qZfS0ofeA10A/hyF2h2GM3oFVEiBo7SFyFLog9ES6JQgU2AyemMT+LcE7NOBNwHY+ib7FxDFgbOenWTOa/Aoo+KFBb0ArKGuT+2Aeecie0/MGp9l4QN5cEQCZNZwDTdjUqCl38f9AMNulmqucGBFwN2mcwz8qUvSBvDPENc0jMw+I23L2ZALYN9ucp7CIhFWhJLuHAyAO32dm3NuOwP8Ass5Zn7Ld4Zs4P1zSQ9TLzV8qI5NP0tc/9WXb+vbucA+osTQlO8NGV8H1JMlpQUAcoF12AMGd7M+aChMwk4C35Q+W9nKLm4zsbDTQlJC8F2sKQCTwBv+mYF3EgRus98sZRbr+c4hfkezbOIbhjiJAgAkoCxsj9Aub4qFQPNSCWp9TIHdR+0M2DkLN9tCd/KWIOVZ9xhPHeAcXJBMN2XBigShDp4w/ahiBQJyHXzXhfu6RauaN2+5xu1m08F74PCkxPrIouk8Ts9bbM5bjAoD5z1KY7BeWRyoCqyPLCYFYVIaFIaAOkhxGwsqS1QHpujqBt2sjvLp5aRAuTbC+OgGqgNrKNfGGB06AJoeYJbf2gYndVXjVM9O+9eBaZTUBUJyTW+cEAKDaCBRo8+eCqeSWXDC9APQgf2vWNcP6d6g2feAvmckwC89h3uAJOB4zz6jrcK9Wd1VdPKgUlOQJLIkgbl434v3L9Wu/HPksqNICT0Cpkp948b5BIpKn0ofKTac8z4D++Q1IP4MoVPb2s2WSmGHRDuyloFta2CrAk5JfALgeuF1A1PUiREbZD6NtfCmjiUQSBLMnAnnpYp9FOviDt3s1bEDORAocqDnkhSneJ7SDgRclTq8RJT51bzPnOWnbSh5qJO1Q0gINOxwp5qFAPuF/SS7c/i8fV9vt6PVfUgIU7r6uwOvVxig9Fn/S22/VLtSygCodYJm5GmQziB+1k+e3dVi0q5JCbGDnZAkPpdJg65sZQCwtbWFq666Cv/u3/07XHPNNYPfecITnoC3vOUt8e/RaJR9/sxnPhM33XQT/sf/+B9omgbPfe5z8fznPx+/93u/d5e2/Y7arbfeile96lX4tV/7tX3ZXtu2+OhHP4pHPOIR+7K9/bTPf/7z+PVf/3X82I/92L6Aft57fOQjH8H9739/HDp0aO8N3Cf7zd/8Tdx2220r0G9lF6X99V//NZqmwdGjR3Hs2DEcOXJkzwDKr/zKr+CDH/wgNjY2ssejH/1ofPVXf/X+NHxlF43deuutOHbs2L4nfzjn8Nu//dswxuC7v/u793XbF8ouatAP1uYMvyg34vJFkSlArk6fxSA8AJIC471th2DBApPGITJ2xHSASYBIzTiDKbLaZZZMXPSCRKJkcf9Rcsq1oK5msK/eBOY7cJun0J28Be7sKcxuuTWy+wTsa7bmLJPZOdjKgiyhOjCNQR7J7rbr68yiq8Yx6GM2jgJFANMEqNg8De86uNO38WLMdVxDsG1A0wMYdZwv6jsHU22iaxzm5Ryuc1ib5cXWO89AX2UIG2slRhsVpkenWLtsDaONMaaXHsLo0AGMjx6EWT/E7TpwmANp03W4YswBm3KcGHABJPOhr0UWyvkESGlpHjG9KNbsHW394JK2PlMpgkRKjsfbItVqI+IAIACilmsVOacWywnEXpDyFHmpJdmLMZ38AAEAAElEQVT1OhChm6mZUn1z4CATy2IZGBm7cu67HjAdaiv1WZPSt3J+AVa5ZdAmtUMCD5YI/dbmQM9iEK0fwOib7Ld1Pp53bqON7dNMQdmuNRwAShKfuVyY7GsB8NMmQSwJdoCnCP2bheCMnq+EzScSsE7k5pxiryAH/CKgrOZAvd3d2tv/3LtYY0dngXOgVQJteVBI3tOA2rKsd22xTs857s99poi0TV4v/gDw6hPn828OgZExYLikDf34oT7eznGAsaMAMEoSAeXBwgtmekzcVdtf2b3SPFFioxGB2jm/dB28a4G2hh2NARAnURhaDNKH5IecZeyS5LnvzVcKCPFYZIV5IjjnQaaCtRVEthEtgwkiG0eBYR23rcdpZAZ24f5WJ7Cv2QZ1DddHDolU3Ymb4bsuKiXwJk0APywnSgEM8BUly2KO15jxt7bBkpi2ivN6nKsRGGvesex1YPoVhjAuDMYw8PCYlhbbowIH6zZKIB4clVirLCrLgF9lVd8TqzGY6QYmlx6Oqg6jQyzzzslUI4wOrbP/Nx4FH2sD5sBhmPVDwGgKV06SfLetFmoJtyGpSnysfu3jZaSx/rw7dA+Qt+Q+VJo0Py+w5dC7J3jHdeRcAILFgj/jAV71RAC3ze8lOoGmN4Zk3pfnIflpqPbpY5F2Utgnt7NNCXamQOtTElXtErssqQ/wdebgMQ4S5dIGAfa6oGxgiSLQJ+CfNSbKl0vdY6tOAA3dVYkBv5h0Nxpz9zUtqG7hrYvXQzTnuNalMfBzBv2cKHQAoDY8o2LFC8uqLP3kvd3cA/EpPZASxpYoDEQ/IlMMoJh8Bsi49CAf1nA+r+29LJFOW1Qb0H44ETyxV2I8QD6UD4jnVs2FQ1S//jxmFu+5FI5ZfLNBP1krR4Tt6frJnRx7/5jUNnxYD8t7XgBLUm0XPxgA5LNl/qeal7PEM0mQ6yWILJ3TL4StfKuLxp74xCfiiU984q7fGY1GuPzyywc/+7//9//iuuuuw9/+7d/i4Q9/OADgl37pl/CkJz0Jb3jDG3DFFVfse5vvjP3TP/0Trr76ajzlKU/Zl+391V/9Fb73e78XP//zP78v29svc87hN37jN/Dyl78cP/dzP8cxiz3YDTfcgDe+8Y14xzvegSc+8Yn41V/91X1q6d7t/e9/P77v+74Pb3rTmy50UzI7deoU3ve+9+Hbvu3bLnRTLirz3uPjH/84vuzLvgxVVZ37B/cC+/znP49rr702e++qq67Cr/7qr+Ixj3nMHdrWzs4OPvjBD+LTn/403vrWt8b3jxw5gte+9rV4yEMesi9tXtldb2fPnsV1112Hb//2b98zWPf5z38e11xzDV760pfiyU9+8p7vCQDw4Q9/GN/7vd+LT37yk/jMZz6z5+3dU+zi9ix3Ybb0i54vLJZ2k/+R7OAAusUAkVsMvsu+5LHQJvmO2nwEADNgcHFBl9rQghqWm/I7m3DbZ+HOnuL6fSdPYn5yMwJ+7daMJTbDqpKzci2KcYVqY4pqYw3joxsYHz2I4tAR2INH48McPAqzcRR+7Qjc2lG46WG48UH40QZngFdjzuCd7/BiPvSHSFlVB6aoDoTntRLVeolqrUK1VmE6KrBemPg4WBocGBcYbVQYbYwwOjjCaGPMv91YQyFyU0EuSx4IdWWkxgpMznSSvvVeSTx6vwD4CSBBlAeeJFgvZ5qzgkPdGJO+Lw+dLWxCMMVALbz7gI0+71EuyzBjS9h9KrPeq/c6ryRKPYNbUZ7JJ/lMeWRj6RxzqveqHpqAfqaALypugw2vA8tzMLAiYEcIfrjAePRIAbIUgCIUhlBZfowsBzsnBWFSGEwKg2lBmBiPERqM3Axls82PehPF/Ez+qDcxarcxcjNMTYf10mC9MlgvDaYlYVrm2x5bg8pywLQ0hEIBfqT6y/Ses/OozqW89vp8UpBC7T9smT+KUQB3BeitAtBr0/b7c5wAxANtyeYj3UYNGJ6nGSyOnaGArq6j5+BjPaL+Y+l+7uxNX8vzBXaHPCzCI4DIhXpUBigNUNkwDsOj7D1kfBZhvJbhe0UYM2Vv/JTCFlVBVYPdA5grW9k9zpYlFCj2lKGU5GIpH+NxHhwA3DJf6g4kKgjDqvNA6xGTfLK2de3CnKDZXmn/oc6ba9m/62qgnnFt5LMn4TZPwZ09hWZrB/XZbbSzGu2sRle38C4wnKQenNTwC7KYZrIGM16LNfB8wfM8A2kVYFkyVepRacCUiOel0nBi1KggHBgVODwpcXBU4OCIWYCjgj+TuSmyXQzXqqMR+0/iT40OHcDo0Hp8rg5MOeFreoDbLf5VYPhpXyRKqQsbrQf4RTDMp8QjYaFJt9+ZuqaS5JHq7FIca1IXT8adAH4INZNZqjWpeiTQRBQUivxv7Yvre+SS4Pv53K40KJlJevpFdpL4Xsz04xrIjTD+VD8KgMXLhRy00zZU91h8LgH8DKVt2KGbujZjme0XGH+mLGCqghmu1sRHPJ7AhOXnDmibuJbRLMxsLvAuS+xZaALlD+njPHEvTyXLAD4KqgGgpcC0hzD6GECV18tGbn8bOonIIfnDCRDssQT7Y27ZWvY8TfeLHm/Qr9V7sT+RkvIG+2VpB+wSQjBL5vb+tgeP2Sw8+n7wHWIQrmxlu9j111+PSy+9FA984APxvd/7vThx4kT87AMf+AAOHToUAT8A+KZv+iYYY/A3f/M3S7c5n89x5syZ7HFX2s7ODtbW1vC4xz1uz9vy3uP666/HkSNH9mV7f/qnf4of+qEf2vN2xE6fPo1LL70Uz3rWs/a8rSuuuALvete7cMUVV+CNb3zjnrf31Kc+FZ/61Kf2vB2AmZaPfvSj9w1Y/vM//3P85E/+JLpuoA7redp73vMePOQhD8HLXvYyHD9+fF/aBexvv73nPe/BS1/60n3ZFgB8/OMf31Of3X777XjZy16GBzzgAXjYwx6G22+/fV/add111+Fnf/Zn92VbAPAP//AP+MxnPrOnY9X2lre8JQP8Dhw4gNe97nX4wAc+cIcBP+89vuIrvgLf8A3fgPe85z04duwYjDH4vu/7PnziE5/AC1/4wvNmEB4/fhz/63/9rzu0/yETJtjDHvYw3HTTTXventhHP/rRfdlO27b40Ic+hM985jOs2rcP9qQnPQmf/exn73R7/uRP/gTPfOYzcdlll+GFL3whbrnllj236aqrrsLDH/5w/PZv/zbadn9qPF955ZW47bbb8OIXvxgbGxt73t5f/uVf4qUvfSm2t7f3oXV33i5qpl9cCMTsbXmf4jMNLBa0rCYzrgy8TYGjyK4LC1RQYObpIIhITAluqrUDs0byAssGQKrffpEv9JGB6FIdvxDEoGbOcp47m+hO3MzBqFs+j+1bTmF+6ixmJ87ANS26Ok3UtuLafHZcYXR4HeV0gvUvOgYzncIevhQmBHvMdINrt4SMbleUMcOf2hk3w1i48UGgWoMJdTp824Ama/x5U3N2+doGDliD0eF1kDWoTm3GOn3trMV4q4bvPFznQ/sspsemKNdKTI+tRcmp0aF12PV1ZvhND4BGE9B0ndtXTnKGn2FACraK0qHeJ+aZBKfkPSAP2JhQ14skXbZnRgcLetnbOgu2tLxgLlRQCjrA4dTiG8iDTD2JJ11DRo5LgkAC9vVZVRLE4EV7YssBIcjg5XhSzRk9UkXOCEhZyIDhWiv9tXUIYBiAwY6QI+4BOENZgEoHhaVmDxHFoB1B9ZMK1umgLDr1ngocDfanyHYRwZQMEo+N5TFTcp92XpigITPc98cEFjJPFtWxTOjPtH89LuBd0J5aAvoiDTcJzvggMQfX8va6Gt4YUNssbuM8rV9Tpc9cHgqmSODHgTJGhkhnydbktQb8hrLd43YDAOZCVrjxFCXbmEUY8sNDtrqwe0Qul1SWeRw7cv0skUTNgqs9pycGZUNfLPbdQF+r/uoMt4dlhEnNNQRndN8QqiFZw7vYYrDsLtz+yu6lFhIWyBfwBbiWcPSz2ihPHqXeujbd5wBEaXVyEVRJtQBVwpQkNmgTxh7CPKwYPAKCAHxPHtkChU1uLHnP7CF9bxUGkU9yoFEmvZ2B2jncfAfdyVujpGdz+gzarRmaLfaBRNLQlAU/VyX7PKMJqyQcPBqVCBKwVDHgV07hx2VIoKnivYSaGajewsHpYUyKIpM1lNcjSxhZYK1M95uRJZQqWcESgK4J/VkAoynMegd79HIGIKdTuO1teOdgp1NmJEoSV1GyHOl4jZmJ5ZQlPYsxjwFbsn8lyVVgYKpRfhWQgIyh5A3xJ4gQpRNF2huAuscs/pYQ/JgFH8Qt/i0JH65L72VgioUw2qO/7dTfcadmYbv8O2QJHDoxKAFMuS1Igvf9QQd4W0R/pO486s5h1vncLzEA+QRYlYYTl0pLmFYWm4WJyVToEIG8qjCoCoNJZePzuDBczziwBcV3tSYBQFothUoGkdE2XOvRWJTAIAOWSwQEdiAAuI5Zf0UJalugQJC6DT5OKHWQqbNgmLUm46H/HUBA5QSepvHIPxIfld+j7PyJnUsB9VyYdV5Tm9/rgFAPnOIahDyFOYzZlzHxz5sFFZk+w2/BX5P1aWykGscDPlFWHzr0P597ZlGLj7dM0WTxoM8RSBqS6VS+1tLa0+fyLfo+7N1sK9/q3mNPeMITcM011+DKK6/Epz71KbziFa/AE5/4RHzgAx+AtRY333wzLr300uw3RVHgyJEjuPnmm5du93Wvex1e85rX3NXNj/Yv/sW/wIc+9CE0TXPuL5/DiAiveMUr8MIXvnBfJNMe+chH4rLLLtvzdgDAGIOXv/zleM5znoPxeHzuH5zH9v77f//vWF9f3xf21Y/+6I/ivve97563AwDf+I3fiG/8xm/ct0D2Yx7zGHz913/9nTqnp0+fxkte8hK8+c1vBgCUZYnbb799387ry172MnzxF3/xvmzrkY98JL7kS75kX7YFAO973/vwFV/xFXf690eOHMGrXvUqPOpRj8L//J//EwcPHtyXdj3hCU/AN37jN+7LtgDgxS9+MT73uc/hH/7hH/Zle0984hPxN3/zN3jBC16AxzzmMXj1q1+NSy655E5ti4jwX/7Lf8H9739/HDt2DM9+9rPx0pe+FA996EPv8Lb+/u//Ht/1Xd+FD3/4w0sZ3udjIvv4iEc8Yt+ug5MnT+If//Ef8TVf8zV73pa1Fo9//OMxmUzw0Y9+FPe5z332vM1XvvKVdzoJgYhw+eWX4zGPeQycc/iHf/iHfZnDAeA1r3kNqqraNwbtkSNH8NGPfnTfAPCDBw/ivve9L/78z//8nMoDd6Vd1KAfgHxhE4I7NERgVAuffrFx/blm10UAAl0IgIUqTqaA1yCfXvgo2SAAHMSnFrAF18DpsaRi4H/IyY+gR80Mv7NJ0nP7llOYnTgds9B9ZPYRjDVcFy+w+0TOszhyCcs4HTwaA1ZudIADO9U0LSZD7UCzfTK1s5zyi9EEpqy4Pk0Irvi2gbcWvqhg5jsYWYtmi+v8ifRPO2tRbpZwTcegX2lhK4vx4TGzAQNLsFwbc2CqGqdM+qEFbwDIcrnJABSAA1NDko4J8EoLfxOk+bSsIX+Wan9oQE2b/Bnrv50P4Afki9p+pq9N2eeJ3YdUd6QHYurgCAd7WKpH3lzIhPZ8HJ1qfz+isSzA0Q9slYaBmwj6qUBglKSSvhFGiIDZgZkhIHsc787FIHO8DkPwyLcNXK++Eh+kTSyHagwqCqDZ4bpEtoRv5xEoNjZl+8c2Dx8u98Uun3XqwzwgtRi80PXksjJYlCR/YVqw1JOJ81QfuNOW1eSTAJAEydXvFlh/WAwgyZDJgs9K5jPbb7gu9LV1PkQOAYulOT4EygxJQCyMzRDw9D3JLQGNM/BPH7/qF97hQBBs6G+HhUDTwrKoF2iyoe+sNUF6jyIrQ4Blyey3Cxtb2couAlsWfOyxa5fd47K5ybXhWu0WWcxDoHvv75jEI38bSbIxKYHKdwmskLlRJ4VJmwPgh/k2uu0z8DsM9rnZFurbT6E+yzLpvuuCjCcDfqYsYMcjZvStbcAENQKzfggoCgbLhKWvk5PCXE71NgwY6AEAX4zYj3FyL+X5Q+cISMKRBOfHNjDlxc9wijkFBv5oNIFZ2+C5tetARQW4ju+NZQUaT9Pr6QH2BQWkFD81KA1I4hGiD+KVLzIM9onPoVlNBgkBEzDPQ8kz9qyfFCRjSif6actA52X+FkktOcDbalFivf/9sC3yzBj3wd/xLvleS9u9m6m6yXKv0vfFuOt4D0mJNcJ+NIZQGhOBvargNnfOZ4oKowAIyrOw0xnwMyiNCRLnCmiXGnVBMYCM5QSkkuVshb1nrY3XiPbBot8uPvydsN0AP227gXXaZ9eJcWIpgSn3X7Jxex4BUu8T8Mj+ZFhDhL2JgoiNf/N3PCgmKAyuAcPcuACOZTvfxa/x+Rhb+J53MCEhlROyUnJX//glULzQGz324HlZf/0jz/3EONX3C76ssI9XANnK9mBPf/rT4+uv+qqvwkMf+lDc//73x/XXX4/HP/7xd3q7P/qjP4qXvOQl8e8zZ87gfve7357aei6z1sLaOzffDtl+1bY7dOjQvtfJ248AttiXf/mX79u2HvWoR+3btsT2WgNNrJT7952wG2+8Ec985jPxrGc9C8YYEBGm0+m+tAvAHWZ+7WZHjx7F0aNH9217z3ve8/YMfq+vr+Paa69dkLrcq/Xrj+7FHvzgB+MpT3nKvo23yy+/HJdddhne9ra37cs19rVf+7UA2J9661vfeqfPyU033YQ//MM/3BPgp+3OAI/L7PDhw3jGM56xL9siInzpl34pHvvYx+7bfLmX69Rai6uuugpXXXUVvu/7vu9OKcAss/1g4/Vtv8B5AHjIQx5yj5CfvfhBPyAy9zywmCm4ZCHSB/5iUF0BDajncAFUoKLiha0EDYDFWoB6e5qNZApQ18T6bLBV3E4e/M8zNkXW0zQ7aM+egjtzAvWtt2B24gy2bz6B+alNNDvM8DOWYCsb2X3joxso1yaYXnoIxaEjMGsHYA9fyhnd03X4YgxnK7jJQfhiBFdNI+PIbN4KqnfQ3XIDH4u1MJfcjwGUcpqYTDGzugVVY/jZFqzr4CdrWAdQH5iiGI9Qro3RbM3Qbs/gOoeudjCWQJYwOrQewMm1WGcwZtAXJciypJAOuEtAKta6IxNBPi091alFOPdveO7J+zgw8NfpKnMif6Rqf8QacypwBWj5KcAiZI+HYNxQtm0mS0Mq+CmL+7CYdT6w+pDq50UpTy8MoxQUYgDEo/Qi++QjgKJNwBQe91gwAU2BxeBHzt7jPioQgmEh8BH7VPVjQWFM1zN+7poE8oWaSlK7Et4BbQvfMsjn24YBv3rGmeM1S8tq4BnGckDK2AD6lVG2zBQVMJqyHFwxirWKvC25jl1RZQHo/nWZB4bOfYOigT7X/dLfBBHgIjBahEBn12uTG05mAJYGOJcCfmqc6e+LCfAXQUAB/lIsMO2qdxx3BPjrFMPW+xSYEyAxZ66meoJR1i2Af1YDf7o/+gFgndjR/y4A4DwyesggouXSn7ZAIdctKCQa5ABgcS75tLvC7uqA2CrYdu+2ANAtG0dRkUCSo7zj+7L8VvtgnmutSfCaUHFJP2HqSw1Z+b5m/YDnMJGwlnuTh0frPCr5vWnT/UDaGBK1+r4ZNdvAbAvd6RPw22fhts5gfvwmtKE2MidSOZiqgITQTFmgGFcB8DsAe/BoAsyqNWQsfVPAS108MtxXXQszP8v9VE5Z1nm0zlKl3qMwYd6Aj36HJURWX/QxXA1qWr6HIs3vwnD7/9n7/2jLkrI8HH/eqr33Pffe7p7u6ZlhGNBvAI2Ii6CiIiJ+BAwMuFAcRFiSLDWIrESiUQQUwZ/BxCULE5AwLoPg8lcIEjVG1IBIEoILEEXj7zUIKsgwQ8/0r3vvufvsXfX9433fqrfq7HO7Z/oO3T2cd61zz7nn7LNP7dq1a7/1Pu/zvNFzYMcdOc73QVVnCCP/33RZ0rNpgdm2AH4buYafMvx8hz4s3780mWqphpjxj5ZB25hlJWVuT7700uhCrtGnvhRwz5g9Oh7JJZCVwVHHeTVAAoWJjLqGft3cR+M4AG6Adw0CESKVyWRA9ne0O2y/ROR7VFp72DFO2VdS0E33rskwNpTriEHixpMw+JpJ0G8rMfz489bJQ1iCyvRL0qkqNVsDoE3Lihs91+hDGIGmRao9rCbjjVoZZyIHWuzrXtjU3dOy/AIgUpw8Pi1Qp5LttkY0YMdyNPtclvIcY1wJOE61KSKaNUdk5j+Zz2P+/RCRkoe09rdNq1vJhKttZRLrREKq7CaPfZeAZIrsl3migmnqsAyYrgQS741ZX3ciQQ3AMhvXuaVtPmW29q3ut/bQhz4U1113HW677TY86UlPwo033rgkQTYMA+66664Dg8YbGxuHGpBf29oupz3iEY/AIx7xiMvdjMtihwmkX8n2ZV/2ZXj6059+qPskokMF1XWfl2Lf8A3fcGgMsyvdPudzPgcveclLLnczJk3LZ63tU2dXPehnM5wpDGUwG8jAWzB1XQyzJX1us4f1f6lhATDwZcGapXoC9WJHAmKEPi9QpMYLmsALPNdkeayirRzQof0d0GIX4913YLzzYxjPnMLOx+7E/unz2L3jDBZ7A8JihBPWXLu9kcC+Iw+6Dn5rC/7E9XBbx0Cb2/DXnORgTruFsLGN2G0jNh3LdO2cKphWcA38jf8o18prNspsfAW0RHaRwRMOkMVNlgCaHZtj47odjOc5iLZ/+rzIkGb5Cd+1KZjWbs84i77pUp0cNC0/9LyZhWv0HUY49CMH/7LsVMQwZkAsLfRpdT0PIs7C9ZRvKGXdkByEs8GDVO9PF8QaiHANQGEy+3wSjDEApkpQjlLTRQOdC5UulSCcApuABuEgdfUATxFjFLAEWdLTHpceW34/Z/gqaGUDW5qJ3jrikJSD1DGU65Bkh5TZrBq4wziAxiGDfWOfwD4a5gyyD0MB7CnQF4cFMCwQ9nbyZ5W0FHcjy0sl2TKRXqNuBto+Cmo6+M1tCWh2GQAcTJ1IqfVjWbnaFxpcApaDRaVNv2/BQz0PhMyAsEkLKk0cyYGcYxkwmEDhVC2VqYBL2r5ZAvs0oHRhWau4BFbq8a/6ql5DNlhsdzBGVBBmLL4HaCA0jztljRIxAKmfw2ktmlJilRyKa88mdqTnKTksrAho6Wb6OzI30pATEYgcSFjd0fF1vAiXCfRb29rurWmtO7HoPJLsnAaLBXRSVQMAoDjB4NDrbOjTdaPMlqhMffA1lO8l/F1NOpjKJlAZvxEO1GwUv2ml35Oce8jJJuE0KyaMp27HcPouLHbmOP+xOxH6AcO8B3kH5x26Y9sskX78CNprmNnnT9zA0ujHr8vJU90mYBhycA0z/bR/wgAa9uF2TjEYtbHAeOQ67AfCIjAw6RPNOjP7Wi/1Rj3BDXO+jw77sr+emVUkvoOyLollOant+HYcRpZiDyP7tOpbdcwyDN020EhCl5HzjMS+lfUztN9HuWkk2W7FJHS8YPm+MlVvjMCgx+R9RHxhDD0Db0RA5Pm12GwVQ8okZaR7nnxGMTCQFxm0SozQGlgwY5iGHmiAzjUyHFfBlcuJYcVvi6QlhZ4/CMok5BrDmjSiMu6ZLSaby7PWkJ01Dludx9FZg12R+FcAb7Pz2OwabHUes4alPRvPrL8N+b/zrqhZ60kAfb0ONTGqC5w0OCyYBSqKC1Hkb5jt5/IYk9qS2ZfPMrGq0JEYv8bfqpP0piyaZ/VN6jNRJ6gp296qGQyB5WajytWa3y9Ar4m2WMUNm2hYnCtkmf38PQHLkZPDvFy/0Y7BKaDPrlMnO2aFD1PLcIbAhQ18k9ZvPvlPMt4Qi7UQ6bFoQmsIRcJHATTadc9SxwVx6lYAW/a6K/zcam7XtdKqZLi1re1e2Ec/+lGcOnUqMSMe+9jH4vTp0/jABz6ARz/60QCAd77znQgh3CesrrWtbW1ruxz27Gc/G+6gOr33E/t0AfwAluM8LNnctV39dtWDfsXiQrN3q0AuTS2kqoWTrfOXrGlKgIaqLONVr2tGScy1bCIABJ/DBRMApdYvo7FHnO8i7JxF2D2H/twu+rO7WOzME+AHILH82u0Z2u1NZs8du4blp44cT/VbNJOb68xIbTxy/Fv9bpJUjL7lRf3s6PIx6+EStzWGkDO2yaWaH24cEbs5oiz6/bCA7xqM/cCyWXLOVP5TpbNg2X0Cuk4x/TQoNQZeuC/Gst5dktdDZEmdAwIIGgSwgSubKZwlPFHUpctMJANu6VhQqS3iqoFpX1ZqVANZdgyRk2CmBDOQs+pV3ilnNvPX6uxhICKoZClypnMaZrHsjCS1aDKU82+ZIAhxxrOLAEUq47D19YTysxwIEBatAICJVSuAXwL99ufM9Ovn8nqBON9B6BfMwggBcZRHCGkckXdoZvs8loYFaNGD+jkHPrsZksRZp4GLTsZ8x/WrgHyduobHNMqAUg14XQg0s9+zFmBYESbCVINKPN5Rsor1c9v/NiBkwMO0iQ2W3IN231OzQTVUzbZ9MMmri7GQ/gTy+I2RACNdq9f2GHJwyq+Y3/P+K/btVEZ8zRSsjIAcjNL5kRwouMRYig2PI2WGXJZkJiLcpz+8ztC639oSQO4cAJN0o3O53aZ+vXKf41JwO7P3wKzZC1iQaz/Kd+Abvm+sYD2rjDQNc9C4wOLcaYRzpzGcvgt7p86iP7vDSUn9gBgCmq0ZIH6J+lVu6xjXGT56HDTbRmxmKXEEvmMAo+lS8g43lME5CmMGmMYRaLnvLNDgiOdrFv2T5CIIA0fBr1FkSZMsNjNjyJvjVulISTDj+14+j5xUpW0WcFLZfe0s170NSEDGaHZv732aAGV9JpsMk/LsZJuU1FEBbJPShuo36DYxcuJLKI+1lnIufCo/IQGooJt8J0LAam2TvYdOJeXFwLXYBOhZNQtOvZ+YXdL21G7ZJ4NTkROrImA9j6kad8ycpELeExDZW1PTj0FAJHafl+St1rO0p3fKmJ9oNzlOenQO1HayhhklGSkkdi25EvSD8+zPN51IfjZSC40q3ya3u/AbJo53qj/5uaxzrUlC7K+bOtsxADEDzbZmqLU0VRV+9XJ7gvjM1h9XAPKgpsfqtfqdhd2DteZFWwVqJya0/H7B0rVrINj+q9bUU4DfAb+50siV127N6q1A0JQceR/5sQfa2re6auz8+fO47bbb0v8f/vCH8cEPfhDXXnstrr32WvzwD/8wnvnMZ+LGG2/Ehz70IbzkJS/BZ33WZ+EpT3kKAJa8u/nmm/H85z8ft956KxaLBV74whfiOc95zr2ubbS2ta1tbVeafToAfp9udpjSo2u7+u3qB/3UYoBqCibGH8pFSKpxZRYSSYYzveGKoE0076dFiam5lj6z7ZDs8qjSNzFwXaqhz8EZJ3XMtC4dkDMnF7ugfg/x/GmEM6cw3n0n5nfehfmpM9g/fR7DfIEYYmL4zU5soTu6he0HnsTWjSfhjhyHv/5BXGtmtg1sbKaM7th0DPg1HYMLBoDROmux2eDPN6/J3bLYL/opmmPF2AOjLORnDtTN4LtZAnHcbBtx6LnGzP4eAzvDAqFfYBS2lvMcUFBmJRwHGtC0QLfBAbZ2g7PRu03EZoa9gZlw+/KswamIMkiVTlMEM98iFYCLJ0LjShYfsIyzMIBGyxmv1RhJr6cW5JZZaoIetoZfBMr6IAkEjMvAkQl08PeQgkW6GHYhgoQ9AHAf6PHw/zmbl/83wY4UyMgCk2OAhFthpCmbQrLLGpHjIJtvgHG5yDjFiBhEqlNr98nYCft7DHzP51jszhH6AYudOcJigSCAX+5+Zmf42QbIO7Tbu2hmHf+/t8PA9/ZRBv02NpOMbALDnQekDhPiBuADomvgXFMFo2IGY+WDgwDAmlnpTH8TNH5AZZDINXwSXJP705VBUABlUGQyAOTTGEtgORQQr87DxLcBAbhXyHseZDVDsJa+OkgKVIN2JHUDmfEX4aMwbiNfg8FBmH5ZyjbJZQEMuGsihQ3q1oErI8WbtgWWAY2JwByZQGZsOqktpkF3oHUN2jXTb21Xk4W+uL/FVC/ZJEdpXVaYhAJ73dSgib4v116MXHNP73dax5McLQXXlbUTiRAoJlY+wHNw0zBYRQvhqtjrXfwwWuwinrsb47nTGD72IcxPncH5j92JvTtOY//sHPtne5AndNst10TenrE8+rFr4E/eCH/iBvZh2k2ROt9IMtFoOkTfYS9Q6oKjO//Aag3DHGHzBOLsGMYj1+X+9A0akdSLkZn83McMzhBEwpHAiSuLPQYQh/1CPp7IsSznFEhFDjSTuis6/2udQZUhFb9wdB36EDGEgDGU97niFMaciNE4vp81jjKr22yf5/GsCuCWsmgmgAKgkPUswAC77Thxz7N+lfTLYNsEx8ENcnxPDY79iFhJVerPGaCSYkAMA8h1IMMe42MtpUtTW6trICUWUQYsKQyIAEvViudJBIyB73+6Bysx7gTEmzUOIXpcs9Vhrx8T+Ocd4cisxVbnsd012Go9NluPLXkc3WiwIdKxyiZtHaWkrHz8DOS5jU2EEEBNi9hyjcg4jqXko9T2U9DPMvyiKJ0UILOwMEe4yVrHNevPDp3afWBWMJtl+HUOkkRZysTCNfDUACil/TXhLe0XWerS+tsqbzsUaw/DgnW00qcK4qPrtV+oFZjxciF3axIsBwCZWyeTmQqwTmtJ57IV3k2EBTS5NobEAtW17ZI/VdXHtImOSStD1uqTIKC2R+YrfU/3khI6BXRfTM0Ba1ub2B/8wR/gCU94Qvpf6+x90zd9E17/+tfjT/7kT/BzP/dzOH36NG666SY8+clPxo/+6I8W0py/+Iu/iBe+8IV40pOeBOccnvnMZ+I1r3nNp/xY1ra2ta1tbWtb29rujV3VoN+SBJsCf2DmzxLzJYYM/K2wtNCwGak2+4FcsU3NqEm1bQAQaT0ZRZea5UWOtlkXTuMCtNjnQNF8B2G+wwynxZAAMvKEZtag3WR23OzkMXRHtzG7Vph9R4/DSc0WbGymxbb9fVrMQYt5anPYPpmPQ6UAm1le1LmBj6MKKlGMKWtaF40RADUNKLQM5GzMuA9DgHMsfxX7OZzzIN/LMUmgQKSAODPY5wxhI8kYmxn6APQjg339GLAwYJ8uvHXRHaVlDKRx0FCBPyv54x0lyU61uo4KQYNWZqFr+qNmRS6N0QMW6YgBTph+RAQPBlpiCn6Q1EHLqJwGOgrpRX2WLPRRwE7uBQIkg1rrjLjI+0xsg4nAi+2TAJYORYgYpH2Rym3s97XmWspmNiAUB4uJs8LtI/0YZ5LHEJL0WlgsUs2lmukXvEMYA3zHkrBxDAhjQAsAYeRYQ8hsgRAC3Kb0vwZOfTCgEUo2rjZLgjURZSAoVGEalXCqZaIsO0JwqxwM1TlFJPVsAHGJySLvT5oNHl2APTMVnNL3bF2/KBKwDMJlOdilJlXvW8BP+22cQkjB16GC12ncRq6jBLmuI2TcRQ5OO6ml5ynPQ9yQKug1FaCSzwp20wTop9+ZZD5D6i6GAdE1aV6EzI1ryG9tV5MlBnadxEKZfVFIq6MMUOt1kN8Q+Vu9rIzvpEwZoJxTa2MZSVsvK19VPG3m2oCRkH6Lf2QAFj1Lep45hb077sbeqTPYu+M0du44j/78AuNiRDNrQMc2WG58a5MVE645yZKeR65BEAnMBJgp4NfMOGEn5mSYKeO5IScacKmQEnSwDL9GQZjI8pI0LlgaW2X1ACTOtGEdF/OekQ1XScUEvDiP2G1hhMP+GEXOk/2oGviYPh/5/gXjQ4QYC/Ar+VbT3ZLHkW13BR5Mbb9kKxKpQnFutBasy6x+YjAjqdNO7Z+E2eaZEcX+zKoDQnEPSb9T76+6PynjzjuCjwBcRByXfSuVum6dw4Z3WIwRWx37TV6cDO9YxrNruH7fhndJDnTWODSOGYGdz4BfQ2AZ9jDmc5ESHgXwCzoaWizV0HQl6FcwSuW6KaTUfSe+wITPYGRuL2QqTa/+lfpVlp2G0SbpNSAMcA0nQWndYD1JrM9Rtqf28bSmdg34cTuq82X3U+0zEEDpurmIgy2Om5/JzM8Hsv9MYhPL5WYQjiDJslOqN+ozTUl6avJHBfhNrbNrdRNa4U9ZVQogM6IB0+9B1V0O7qO1fXrbV37lVx5Yi/13fud3LriPa6+9Fr/0S790mM1a29rWtra1rW1ta/uU2T3m8v7v//2/8fSnPx033XQTiAi/9mu/Vnz+zd/8zak4oz5uvvnmYpu77roLz33uc3Hs2DEcP34cz3ve83D+/PlLOpBkdvExVouXgxYX9nNyLIHZbfGjlUd3hIEwn2uARedz4KcVNlqzATQzAak6QB7RlQzBtHAKKj3FYB8Nc6Cfs6znzlksdvaY3dQPcB3LTc1ObGHz+uPYuvFaHHnQ9TjyoOvQPuBBnI1+zUm47WPAbBux3crBHbOQcv0O/Lk74HbvBmLAmeYoPklHcEfcxmnMcC526EPun6jHl45zlo4XzYz7pRG2lPQPtbk2X2JWbR1j2dHtY3BbR9Nrmm2DZtsJrKSNWcoSjq7h3202ELst7I0Ru4uAnUXA3hAKxl8/BizGwLV+Qs5UtxI8tf+fABhwoKR1SA+VBdJHqjlkTcdaqlsnD7udZYra1/J9XTRzbZcc+GklW7n1KlEk2eTITCjLihhDLB6DPJiVJgGKmP+PUUHD6Sz9LJOUAwxax2QQ4HU+BMzHiP0xYH8M6OUxSBsWActAesF0bFhKt2kzw9NlsD2OI0YB/Mb5PhY7c4zzHoudOfpzu+k5yd+e20V/dkce+f/h7BmE3bMYz92NcP50ur7C3g7Qz5P0GwYJqo6LFMRwBtSMyGCT7dOF1JUcRq4paWtMjhOLziSNVrP8AM6A1wCZb9MDvsu1QVN9nKbsz9qq+a04xyjBvamHAt1EWg+Tcr098zpdDik4pv2UAXnNjF+MAYvAj/0hPxYhYD6MWMj4WYw8zrQPtdblWPS9sFKM7Fxx+BXLbylApYCfzMX1tmS/ExnYIwl0JXBEA2Gh2lae67o+nxLTa+i+fKztUOyK86vGcVmqzQB+API1ZMEZvSbGoVRRUB+iaXMdr2q+Yom+LM9twT8N4DdO7oUuy0py4J19lTRvegEWAPatxp7n/rvvwPDJ23H+Y3di52OfxOm/PY2zHz2Hcx8/j/2z+xjmA8ulb3bojm3Bn7gB/sQNwPEHIGyeQNi8hn1C9QMF8NsfI3pl6sl9Os3Nvpu+VmKAR0j3c3uLSPd+hCQPqseB+Q6z3/d2mAm/YDAQY5/8Rxr7DNIKyyo2G+yfdluI7SbXdZ4dw34g8aPkIX4V+1N5ng2ICfBgUFJVEijVNNYkn1RHzfgw3hkQx557nWOViSW+VJGwZx8GVKgfnKCWAc4hZolSHSdag20RgCEyEAvxMWHuuxe6r6rPmB5k7qfmHpDuBTA+VH09yX1DwV6V4NR+m7LWC5jXOGy1zOY7vtXi6KzB0RnX8dvsfAL5dLut1nNNP+8waxT043qCkFrLxX1Qz4FrgG6DayXP2E93s+38UN9+U9RFOl4XRCsdK5K4+nqIXPN2SDXx8iPGXCsvJD+i6n+5dtQX8VbOU/pQWX7pGgqishKWE3Lqnq7BblXb4GtCnkPp00z5empWNjQkvyWzBpPSx0U+RtMvsb5O9L3KFIxL1432h65bhj77OEOfHkleOPWf+j1yzSoT0F6LU9dB8rVWAOvmtR6j+n29zLO9JCgMISaG9KfU1r7V2ta2trWtbW1rW9varhK7x0y/nZ0dPOpRj8K/+Bf/ArfccsvkNjfffDPe+MY3pv+tTAIAPPe5z8XHP/5xvP3tb8discC3fMu34Nu+7dsuLZOKXCHNExHKBYTWnyBhZBi8M9rsyATmmTot3nRTkS26bJFcLg7nPCg0uX6NZfDYNsfAgRph+MVzdyHsnMV45hTCudNYnNtlGR3v0B3dYuBvaxPdsS2uN3PdA0Cb2/DXnARtHWUZnnYzZ9RatmIMqXZfYv+Bg2kA4DR2B2B/DBgCL6KJuskadjEGwIdUS5F8QBxFthSA2+CgnMp2xkWfamJRN0vnhpoOJPX/3NYxoOVgWhApz7B1AgMcdnoOSDHYxAvt+RAKxpEu/lsPzmB1hEjM8huV7yYSYVlyihIz0C7zlTlUmGU9yL+ZqmDG3YpF7dLY1I9UlpZckiiEyH+RBj81BiNMBg5g5Sxl72iJQWVlhmz8yL4OQGL86f5TwKPYrsrGjvlFtN+VbH+VhhwjsWyQyMpqwIvCACwcBw/IcfCzn4P259x/AgKSN9frGDD2A8JCHiLv47wDOZbqcoYFqBKgYdGwBGgIzPBzjlmE3gOBZarINQA6QM+FuXat5GoO0uTnpViPMOKinJdo+jJlpFfnoQjYxJCppjLnRJtxre9blppurkFnrWkk9XRqwE/NLZ/qyQDbUmY68uvEokVmBuqMXMujjjFKzahYsgUDX4sLRAa4HRAiJQk5OL5+ESLII9WWjKYthJKhW/ZTCfgt1S6z56HqA1All4XljPRino0iG0cH3zPWtrYrza9KfglW3P+sxQBA5nI7Z+pDGX/C+IEbsm8l+3JANRFmdlbQz8EgSBRgyTJqgl73hlHlUjC7R9w7j3DmFBaf/AR2Pn4Xzv7dJ7H7yV2c/eg5DPMBcYxoNzfRzBp0xzaxefIazK6/Fu7E9XDXXIdx8xpObnLlzKJsxkXgtmw2JHPMiPHoA6AMvZTYtZD6suQQxP8in4vKa/KEAhfUzzkZbLErgN4+xvmOdFCA1lCLYUSWRvdSgy2Dq7HdyD6tADA9GgxDxM6CEyzmAwfRI0Qy1XEdOOdywxxybbhGEqQIAMKQfBaSe52y/izYBRlPkzBWDTJbq8AAZnNSKcc5ARjoGNIkHT2GEQAJgBOI4Mhxnb6UICOqGokJpesD/lzbX6hCGL941bEUcoyx3KcFaBTEiohZ2jYl0/DYCBQTkAcAW4PDQvx1BZ62Wgb9NluPoyLxeaTz6Dxhs3HYaIThFwfQMLCMvwLGeswCHCfGYgxAdR1YkKlQS2naDKpWrNhBlDryebLHDqmznRUGUp/LszMvSBKL9Jxogl6W9Zy4B0dN6HLpN6My/gC4ODlKk7+niUcMPIV6iJhkKXv+qyYgKxUo2/cAzHDJbCIaIMxGMlClPWYLuNnPyPH6giISa9gSOO32F+tHHXQdxwCQL/dZ31NMgpoF6hX84zmK1yPDPemwta1tbWtb29rWtra1re3TzO4x6PfUpz4VT33qUw/cZmNjAzfeeOPkZ3/xF3+B3/7t38b73/9+fNEXfREA4LWvfS2e9rSn4VWvetVkYeT9/X3s7++n/8+ePXtwI3VREW24CBn4gyzuaqIjsQRVkh4UwC/JVEWpS3aQ7JAufn2Ti6RHAkVdFNerPmaXKOCH/V2EnbOIu+cQ57vMbuoZ2CDvUo2y7tgWNo4fZZDvmpNwm9twR46znKcushWwrAMNKQPcJ+ahZ5Qn1WrLWfZR6mQpEGTr2uVj5Qz/KjAjv0FNhzj0DOC0nQBlBhhtO5AyAtuOa+ZoNrDUm9kLhH4M2F0EBvtCxLn9ASEy00xNgQI4wQe4MSYoCGG1lechyy1i6dyurJuhXUpOgL/lxfSkrWJkpR3mRbAFW/gcqbyZApkMeEQI7rgC+NNj5G+Wx84yihn406ASkPFrQIIwUt9kqckog7B23/pzEQBckwHiKPXqfEjyRNR2iGHkWo5DD3J+KeisQJ5Kd0YpXhgQiho/zjuEfsDoHFzL1/C4GFhS1jnExYIDpos+/143WwkABeTM7GACEUGOUxlt6WsywyhIRhRBUQJ2ck1Z0Gz5lOXOdybYGIEUxFIAkC/VatwZiTMF/GogDzAgfmX2c71+FMRVmU8NlCXJz1hK1VnTYHIN+I3V5mOMuV7XiITgDUGDXAwmZynAmMC/HBl0y+dR7UKAX9EJ5dx5kEzq0txu930ZQL8UgL0P97+2w7HL4VcBB/hWGtB1+ZKaVEVQUwDKggUhZIlzciKtXvpWfJ3meUUtMc/NT2iAW+dNByqC6MXcpn6VsN9CP0ecMxN8//Q5zO+eY373HPtn9xGUoecIvvMs7Xl0i5UIto6J0sNmSlipgSBlowDCnBl7IATM/SYCPNp2C03oub6fDboLu8Y1EPamgA0CQJAmaI09aOCay3HRAwPXxeVEMI/oAt8nNUlG+8K3RtJTAD+p1zxSg16Sp5hhDcyFaZ1OnydskGMZdNPfmhjlkAE/O78547tkeUU9MS6xlIrZ0tbmnUpgmQIGio1Wf6737XyuooBzug7I4F8r/ZdlYsUvmdi1SnxOtiWxEc09Y8U9YNU8SppcI1KTSSIcGRhrnWPGX4hoPafbBE9wot3eOj6mmRemnwB+6aFsUgGjMfZlvUh7LhRQr5Jfop0TjN+faipaSVllXwpDawhItSPTz8h58U4kwAXUqwmPFgTUep/23JSM+1j1fwlaprEM4+/YuSXm56LeNpCuGfZbpoHC2hTog/ru+j+QZPcvymJOsoI5Bl/5Lcvfq4C8g/ylajzfK7DvgsdhvufyeNL+toCfgoCZLXnvfvJSbO1brW1ta1vb2ta2trWt7Wqx+6Sm37ve9S7ccMMNOHHiBJ74xCfi3/7bf4uTJ7lm3O///u/j+PHjKTAFAF/1VV8F5xze+9734uu+7uuW9vfv/t2/ww//8A9P/xg5XrGFkJlGmAhM6WufA1OcyOmXpDSsBGf9W1F3ZYuHx4BE3tJFPuWAO0VhG9UL0LjIDL/d0wi7ZxHuvhPj3XdgPH8O87vOYtjrMcz34byDm21g48QRtFszNNden2Q8/TUnJXt7qwQtU/A/g5b6HNstxFmbjn0jzOUYgeg7jHDYHYJI8kUEJp3xotbU1VoCxMLUAhvQWh8RYNlP50DOg2bbzOprtxDaDZYC6rYR25nITkXM9wN2FwvsDxHn+0GCU3GygPuscXCR4MkhBAbJ+jFw1rSTA3Ry+lyEEyCrHAAmmxUTQY1qTABYZvzpfgoAplv+fp0Rbn6rXssScVuDQ2bWyWuF8kbEBPytsogIEiDUvJnq/Gk4JAOhOdhaWwpEmW21Wfx5zkQeI5jtpw8JesJ3iCMDfEQ8LtywYECsn4P6GXy/AHlm/bm2gZNzH0MAPBLbLx2OAIKuRQEQhsWA2DYgqRXIj1AEHYtzIdeRBhyGAAlWZYkwW0uy7mkN4Gr9Oa0ZlKViweMx5j7TlqTM/orFyv3rQdQwE0Mub3t6FLRPzwJOAoY14yr49wB2gg0iRZkDFHiDY9CNxyQkasXgMYGDUtEcj5oCfiEsy58uoAA+A4qtc3AuwitjOGpAMAPRCshOgm+4QBBwCqifZDRN7biW7TVB3kgij7WY/u7a1naRdth+FXCAb5WCuwDTbwVMsr6R+F40jMxeD+a7KF9HIgbNgJIJJJvVQfsASsFeACnzRa9IvZWrFbKRMbBUXb8L1+8gnP4kxlMfR//xv8e5v/sEzn3sLpz7+Dnsn+1xth+x6R1mmw22rtvCkQdsY/tB12PjAQ+Ev/5BiFvHWQZz40i+12vNKyqBpvLYQpIddgAamRtCt5nnIWVPxQBPIq8ISIIP+BgWu6B+D9SfR9w9jzj0iPtzTowJIydVJYYf3zsx61gWXZiJLMPOoOXgZ8Lq4wSqfow434/yf6b3MIDk0KooBQGNgzCuRTrREYNEyqRS/6jp+JxGkexUP0hAoFxHT7YPA/uh4wrfEepjrZijNYlN5+AVpvfC8j2+SdrkKmaKuZxoo8l7tsZlalsGo5Zk32Hen/gsTtxrah9Ca82pj8rgZEyM18YxsKegXgKd5LypnOc1swbHNhpsNoQjnUPnCBs0gvZ3WbJxYGUFZfguH0S5luCxTvmzokazAs0+lTWAa9AHlWgMyZfS+769zJXZqEw/hFiwTfkLuXSDt+1Ln8t9vpaJrY9Jxg6J4od3EQgCBNvdad9HBZH5sRgz4Gctyw8v1wkvmmmOO+3D4qyT3yptRO43Hb9ABfypGR/IrnEmtwOKRMalmscWCKy/vmKddEEz5yoniUFq92XJfGWAA6trQ69tbWtb29rWtra1rW1ta7sPQL+bb74Zt9xyCx7ykIfgQx/6EF72spfhqU99Kn7/938f3nvcfvvtuOGGG8pGNA2uvfZa3H777ZP7/L7v+z5893d/d/r/7Nmz+IzP+IxyI+dS0KmQT9TFtFl0MNaXF3z15wW4EwIHd2qbyqSUBRI5lJKhZjuu+yS1EMLAAZ1hjnCWpTzHM6fQ3303hp05+rM76at+tgHfNSzvuX0M7pqTLOc520botpNkk822TW3T/0W2h4Zegi6EySHgQg54pLRxOUSRbKQINATYehkoMoTNotALW6tpQYGlFNG0HKDqpOZHu8E1E9sNLPwM/RixN3Ddvn6MODMfsC+BqfkYUnY+ADipGQMIeKIsIdUiFMofM4UABIJzLPepte0IEQN4sd5olncdrFkRvCn7zhWySPreVICnAAUvMkNWgY2VP0+Z8Te1mUpuJSmkibCCMiGTbKJh8NnYxRTQp8GT+jf1d0cNsCWkSjLp0UmwVsBAkSiDgMXkHXzbII4BrmvgRb7TyXveAPeuExlP7xgodPys8p+8kUv71hoaZOXQNCjtGxkjOcO4kBpSoAm8TQ3QwXE/pukoanCHO8M7QgxRAFvpf9Nv9blWNoiChqPp9+VziMSis+2Kcr6msud5gxWZ4QL+RT44PhbELO8pjD/vEkmIwTmzG++IazxOWJgY2CMjxUAIaOEwhghylDLxPZSFKH06ZTZIVSclTMzRBzJ0613bQLMzQU/9ShQg9DIw/S7IKD6M/a/tU2L3hV8FHOBbia+SfKkQSto3kFhvKfh/EFhgvpPmWOT7groZNRvZyZxSsNF112bOU7ZPrqu7AC32WDnhzCmMZ05h//R5qQHbY+yZIe6JsNF5dNstNo512Dh+BBvHj6Raw6lmce0/6rHJ+61MpEETWwCEaqKLRCDfsV8h36cYESPfvxJgQ8g17oaemYP9PmI/R1z0CfCzTEMKY0rgIu+zT+Uabn/LNQi1Ftb+ELNiQs81VPeHACf15OyxMvyXa/KpfHpxmsMIUESkoMqYRYJFpMAZLwII0UF+T+0713NywTKbUOywbad8D9L7aZL5JMqJMJTvy3ov0TtKAk8m5tMlWexVYMhEUhEJmJ7WClRWkFN/SX8j17ecZtN7YnltTfhyxPX+9JFq9znChie4vR1gmEtSyn5uf9FIc8w1sDqVXKjXtueEwlrKU5OmbE3egkxIWdECIDj1QcH+irf3UllnTK61tP9Xja9qfai/TTLPOFDBDrXG/l6cBJuc4/br2GMwTutwl9eMsvskh+pem45dTiBTWXWttSwy+lj2cwrWnnnf2hTQV/TrioTFpYTJYqcX7zcwwGpYfciAX06Ku+jdHZ6tfau1re2KshjjyuSKta1tbWtb26efpVJOawNwH4B+z3nOc9LrRz7ykfgn/+Sf4GEPexje9a534UlPetK92ufGxsZS/ZrC1EH204GEOhAb7ffqxZ991sX7mLNKo9TIKnxy/TnNBA4AYSgXPIZdQqMURR/2QbtnEHbPYbjjYwjnT2N+513YP30ei509hH5gsGPWoZl16I5twZ+4Ae7ocfiTN4K2j3O9u41tzqxtpI8UeNLFmbSDRoCGnrPf5bPYbpbHLAt5u5CydXMKkEALvS/2+Jgk2MYLySEdNwe7PIAuf7cRCdJ2i4NT7RbC5jXoA3B+f8T+yLVmzu+P2F2MuGu+wDBG7C7GJAvYOscSR96hdcIKChGBstwWt5PlkZyLAByik2MKMdc/CZzb6yMQXK7xQvVitw7eTE0mNXN0RUZ3EfjS8WZe1yxKGxyx61yizKjSIFGoN7KnzWQX22fL0IsC6CCwzJIF/jwtA33e7MvixDpmFDjT9rJkLDPVEKUOpIB9cA2oaUUaloNH1HYM9oWQwL8GwOgdYsXyU4DPzzq4toHvWviukdcNXMeAM3lmSbC8bMfSspqhbp4103iUTGMOVpl6fkFlKmMxPJyLqR4dINNTQGL8RRl7tbxmCn7r8DUnUoNE/J2YzgOEVWjPr57Hom90W81lJ0pSo/yFsBystDWHRJ7VpwAlDwLL+BvluopQ5ij3Q6o7SVQcUy3zqYA+B9A4mz4Bf5EwyL/R/hYhSdwWwl11cKrKUp8MWtmvWzbDlJk6UEV9PxtMDGNZe2pta7uHdl/4VcBF+FZAeX9SmxrnVWLB5H3Q1BYtpqYwgFyT7hWOcsKCzp96/9CkhdZMmt4mIA0slU7z8xjvvgPDqY9j/olPYufjp7Bzx3ns3T3HMGfJ9E1PmJ3YwOaJGbYfeBxbN55Ed/0NcCeuBx05jiD+SUpWsaCDy+5zYukB6AMHoPT4yLKAm0ZYbSrPHFMtXydzalHLb9hn4HL3HOL+HuKwAIZFBv6cBzVgqU9lVvmWWX7tFr/utjDAoa/q9+0u2Lc6vz8kIGYGx3O2nD4S0IKIgU0FZfSepTKqGHsO9pPLNx39LAaQBYSqvivHCC3Je+q+EuOvmpOL0mvWp5LvEGXelmVkJaDE7DOQSlarpL0CaOBjmDAyv5fem2BKZTAwj2NExyUDkK8HBVoJGYjSNijYEcX3KAFyToBro0uvZ1Lzb6v12GwcNlth+M13QfOzPMbGnv127Wodr5r8puUCiJIk79K9jkw5BHKA7wqwb4xAP/IY68cS8LPHzeNMhpBn7G9Julv6U9dlFJV5OwEMW4YamfGjax3Td+w781gJkPqhBiRWwAnQRLasUKAgeOucsGBzXU5l4a2KRx+UxDeVCFUbM/342oziD3lIHW0B/mxSbAHcjcPqJCgLXlc+E8WIOGafhtd3Y6F4MLmmvlgwS+8RMj402S7GknUJoOJkrm1ta/t0s//1v/4XQgh4whOecLmbsra1rW1ta7sC7P/8n/+Dj33sY0X85NPd7hN5T2sPfehDcd111+G2227Dk570JNx444244447im2GYcBdd921sl7NQTbJoALyAmUio7Mwu2itA7w2o1V/ywZ49Xe8ZjY2eXE0DLnmhC6qxp4zuLV+3945jHffgXD+NIZP3o7+3C5272CWXwyB2Updg+7YNjaOH4E/xmCf2z4GHLmWZacakW7S+l3puMyCzTDPYtMhjoucjTns54CesptcA+8cNhtdeGUpFU+E1kFYirsc0B7mvHgUaSCymdm2vxrTb75j6alui+vMbBzB7sCyU+d6Zved3R9wZp8ZfnuLEYsxYhGCUVYNwgTIdUxCiHAjYT4EtN7BE7DReHgCWjgEYnFLZQoVbMDAmdLMViKMABrHMk8UBiPhaRbIExKj9XhLQwVIYChNjrN8/izrIUsKTS9vbY0R6PeqgBAhBxd0mHNwlQEZgANNo36e08qFtUXwLmcTR4jklAlkTDH8MoCVx5DKtMUE/tU50OCgpra96YBmgWbGoHEzBri2RVgsMC4GRGH9kWH2Oe/g2hbNZgdyDs32DL5t4La2QN0MbmMTtHWU60jOtuE2t3kc+lYkqTxfK+QSqMegUkwBtynAr5B5knHF7zkedxQRBOzzFFcyI/W8AjlWo3CxfqeUUcUFTeVEndcafFRmz9dgn/4/KgtBEgXIgVzDEmgEOGHMkgK7ErnXDP0x8NiJQZg6xExbF7Ufcla9ZfACJdOB/49yzSOze+wxAqnOlGUgTwL2B9kUq0HeV5uqZWS/f9ltnY1+v7X72q9aeW6re3sUoKeQBZzyk+rdyHNI96QmS3cawK9glhv2DWBAOCDJTNJiL8l6jnd+DOPdd2D3o7dj79QZ7N15Gv35HmM/opk16LZb+M5j+4ZtbF23iaOf+QAcedD1aE4+EO7Ytayg0LDkuE2MSCyWBCwBrQB3o+swSL0yZSfGGCWDoWGwbwyI3RZo7LnWcQgADWh8x3PVogf1O1xnbZgzw29/D7FnCcYE+NXmHCfLNDPEdoP9Q9+xRPowYghIoN9OP+Jcz77V+T7va3QRLbgOXCvsQy/JFwqCZWnqkPw+y/5R+VMyIEGUcUGj+Jo+17pLc7MFaGqrx9GF/p+wAiiI5dhKyTeR/RK+T2seISewTMqby/HlHwkl4FeBKRZgITiGl4YMmEYA3neITrhZcj+N0dQxE1BK2WYK2NpkLk2E22o9jm402GwctlrCZuPgdu/ma2T/HNDL+NLxJMoH1HRAi+V7mqwP0lqBiF+7/L6CfQr0afsU7FNp/jFGLMYMmjnHoBkABJtUhojWUa7FbUGrYsyMS9Lek/UhV8xNDkj1lz0p2LR8zqeYZcrwawQYb0R9RME/PS/1EFrFJjzoN6cuEaKcPBYccl1AR2jMPJ10F+SaS/N21af5xwzAF0aZe1TulpMOACBquYa2K2R4ExNzao3tJvwT619J/9hjn5TnvRy29q3WtrbLbjFG3HrrrXjxi1+Mf/iHf7jczVnb2ta2trVdZosx4qd+6qfwohe9CLfddtvlbs4VZfc56PfRj34Up06dwgMf+EAAwGMf+1icPn0aH/jAB/DoRz8aAPDOd74TIQQ85jGPuXc/MgXsKZB1MaAfUGYoAkUWaLyI/fNytMourdglJDXMaOAs7nG+g3D+NMK50+jP7aI/u4thZ56AjFaAimbWwW1twW0fhds+Bto6gtBscHCn2UBsutz2muWozQ4hgUoqvZPaKu2KTcdMHgG4Gj1O59LiN2XUD5IdrMdkg+sVI6BY9ImEov5WbDcR2xn6AMyHgH2Rn9rpR5zvB+wuRuwPAYsxSwKpaV0+/icDWC4QgkgAto7Quphkb1wEXIhoHJK8JxEzlEgYUCMArZmnWdZeatAtAX+poysWhB17+nYM00tUmwEvVgNmF2tTkpAX2t6RZi9n8C8FDbR2G8XE+tNsWxIAyiO31dF0AEOzc9P/JrhijWKc7ieXa/k1sw4DkGr8acYxM/dcAv9828AJw6/d3mRJWQX8uhk/Nvg5Oi9SaB2i76B1B7U/x5j7VgNuejyTgB8MoAxmEAjiBQ1VMAswLgF2q2SLUu0W89vK+lsV+9B9q8BTqpWzAiS0AVhuTDXOE+lngMq0afA9CiCs147KV0WSADj4eL0jhEgIJP3iCC7KQQhbt7YQIrw0muWeOMJlW8dMAcoB6YnHUlB5KuhkAb+ayVdtt/QaWM1WWdvaDsk+JX7VlNXzwUHBSQuGT2yXk0FyULz4OvE9O21v5jkHE0RX1k8YmLU07AN75xDOs1z6/O5z2L/7PPrzPYLUrWs3G7jWodvusHXdJmYnj2Lz5DG4I8fhjh4XBYIZojCb9HeXpk0JltNin+9RGx1C5JqvnJRAwjQDz5cjq0GMcPCuAbnAIEsMiMavSuyrYcjA4ATgl+r5yWtmq4u8p+8wHzmRam+IqZafyqTvLkYGYMSX0WQKQEAMl2UJnRxHAfjZh1HDSBLaxkciknt6DOBskFD4y0sMozppTz+qb5b1uJpQXqj9JwXOXCz3NSpIKwlNkTiJhUlSWQj9gHLJ2S4A+OX3IL5fTtKLwlDUxKiImJim2n4FMPXcKXjkBSlrHaH1LO/ZOCRpTx/6tP7AfIcBv37OTFGRU6emZT+3Wa5tnmTPrRS6rD9s7WNl92n940Vgvz5EYLdntY5FiIbVT/CBASuu22tYXQL2F5KyFiS2DMsVUq9ANXYmkniIEfokKczv8fivfTsgM5DhorBfMxNW2bAOtAT22f0DywkQRZur96xnXCTxRV7PeJcTr5gliWlwzdaYN2UZllh8YURIda8z6JcPRnxvqdcKGUdJ0nzq/lADfkv+U7kW0rGeCMRr12pta/u0t77v8cIXvhA/8zM/g6c97Wk4duzY5W7S2ta2trWt7TLa3t4eXvCCF+Dnf/7n8ZjHPAaf+ZmfebmbdEXZPQb9zp8/XyCnH/7wh/HBD34Q1157La699lr88A//MJ75zGfixhtvxIc+9CG85CUvwWd91mfhKU95CgDgcz/3c3HzzTfj+c9/Pm699VYsFgu88IUvxHOe8xzcdNNN96wxNdBSy+/UASq7rXm9lK16sVnE+r7KeqqEig2GKBCmdWbGnhl+50+z9NQdH0sMv3HeY5j3AADfNWi3Z9g4fhSzk9fAn7iBa/gdOc5gX7fFgFnTIQo44QCRVzKZmyb4EH0LtBuIG9ssLxpG0P75DNiFkRfzYZDs/SEBds72QQ1ipu8PZbBGAgGx2WDZH5Wbajpm+kEygYeI/XHE3iKgD8zw212MONeP2FuMmA8hBzVcXmwndpDIAzoiLETeJnip1+cpBRTQaIAsoAmsHeRlsTymOi4c8OGlrEoOSlBSpA1jDIDL/boElNixUdfz048ngC2qFruJyaDgowJuxG0MFBPQofKeEJBFbTIreUXQKktgASQBsWhAJpVN9ZIBnwJSkomtwc0UnJLft3VbrExoqnkSg0ggisRnGIBhkYKb5D0DdhsztG0H387Rbs8QhOGX+svU7XNdy99pOpB8l5quAPpotg00eVyi6fIYbWcY4dBrACvkWn4pCIE8BlWiUvsv9T/pe7kOkmaFZ9nPC0cRdYss7al9mH+sjofyfMBBJx0z9ecH/nQdiNXn6NI8E4Xxx/NfHqcQeakoErFwypLkcduq1imcBPYds3RjTIBzlpPlAKZLwTQyYCZyoM2JnFXQWliLJDu8JPM1AcwnoE/l1mzdySrxo8hYrxNFgg1wh3L7T6Wts9GvGrui/CoAWd64HPcKSqRzPzVHAAB8lvMU2b8LjUdb30/vfQoIern3sOSkYUdrbWSR9HR7ZxDP3YXFJ/4Oe3/7YezdeRo7H/sk9s/OsXf3HOQY6POdR7vdYuu6bRx50HWYnbwG3YP+f/AnbgBdcz3G2VHEbjv5KQp2WYCLbPA7DEnusnXMYO88JWWEEIFIDdAdyceDBmibfA9cCBgz9lzreewR5zuI+3PEoU9sLGpEJl0ZWc7BbW6DZrm2c2w2sLNgsG93EeTBiVQK/O2PQZjqci+WpAp7/1JZQr3vOJlfaegl+cvIQ+p8N5Ekx9iWS3UgEcZyLNRJJgclUV1gHNkkszEo21yTRfQ+hKW6eGn8Ed+y+H9GVJTxZ+Vnl9o7td5YBfjZvoohKT9Eee0l6Ygi5DYZC3KnMvsWo7Lm8mfK8NtoHI52DTZbwpHOYSPMQfNzcHtngPkOxjOnhD0q7C0dTxszGcsx1Z9M17Bez7LuUAA7gtuitY6tlOcgtfx2e67HvT+MDAga303v8a0k8W00TgDzzLqbXM3Zfq9BplXJmtW408QlICe81axi/ozHgjKRveitOsr1LvM1zwAcn8vle7/+Yl7DZDA6Nc36kQnwku2X9gcAEV78zKhy70FByInjNj43hTHVC40W4BsWDACqpLAxUlZo28n1zaAxL5korf+KhM+pJKqJxFtV0pjC94im++ZTamvfam1ru2x2++2345nPfCbe8573AABuueWWy9yiS7e+79F13YU3vJ/Zxz/+cVx//fVomvucg7K2ta3tfmwf+chHcMstt+CP/uiPAABf//Vff5lbdOXZPZ5l/+AP/qDQzf7u7/5uAMA3fdM34fWvfz3+5E/+BD/3cz+H06dP46abbsKTn/xk/OiP/mhRN+YXf/EX8cIXvhBPetKT4JzDM5/5TLzmNa+5VweQ5EOMJabFBPOt/LKRG3FmgSKfLdmqfa0KfNWLfFlghfkOws5ZhJ1zWOzMsdiZIwi7D0CqP9Zsb6LZFvnB2bYwknKtsan6e/p7CYgKQ/mZYnIxAjbD03ZLDUjZbFDbp1oPwjL86j4ykqHKShypwTBybvMQWOKHZYCAIYj8T4gsEaTyibJbb06LmwikexMQXJJNDAwohEAYAsNTC/BCmzOkhUHkMvDnUnAoSzwt1dqDDheHpRpoqVPLIOkSEKjBIXm/rs+mTKoggTcb5Ck+Q2bs8Wf3bkXMIqiUwGRWqmRWBpH+WEzsP85EV0kzAQxXZEdn2SYBswSUYQCZAxAhjDnb2DnOQO9mgBZlDYFrt9lARNNyHyrQ13YC/DHDD85zcLSbcaCi2eBx2W0yu893XF/SNRjgpIZfDmSlAKIEDxUIDBJQrGvS2RM5xpgAQBcJC8QCBJyyLAtlwMGohEENFplBbsYKqnNmmX2TQJ+CeCT1SvUytkFLSGBGr/EAqIwwAGYnyFhUQJiDXnxZeGJwXX9fgT8l63J7qcj+BziIqfJfTiS0nADPCvSlGjoIaQylAH3F8MuAnc/H7nKSQs1QuijZZ9OHWe7Z5WSAdRBnbQfYleZXpZp8ZICaYoMVIIeaq6+h5SCpBfXqOakGV/R5NMwZ0naoesJCGH7n7kY4c0rUE3bQ7/QYF+pbeZAndNstNo7NsHnDccxOXoON40fgto6Bto4itJtAM+OEKtOe9HtTyT06T8SAxjWYIQf7LWvfHoseY6qLZ5OI5L2oyS9lsVi+HzoP2pjxPW62DbSGpe4bjIH9q/0xJrDvXD9iGFnuUxNy9D5jmVRLpYtNn1uggF9XjKGKtQNi+dcIcLJU+nA1SLb0/8XOnyEkJzECSfpc/wdgEpDMz5iEHRJfEALyRJDIpUdAWZtqF1pjTNlKoByg6Li+Mbnk644EluUWAFLbPVaOv72vtp7r+XWe0Dl+0N4eaDFPkp6w9SH1aMOYnuMo8o1yf4wT13MUYFUTogZ5PQT9n9+bDwz4LcYgSXwl6OccYYwENMAiEFwANsDguSpyLJH07RurgL97YGk+Mvutfe3MBuTN9Bp3knikiVQW8JvytwKWjycdSjU+LdC3SgUiuYHisHNtZYCceGHGx8lf0uuWz3WS7hz6BPYpKBwHqfloJD01qS6Oo9Te5vfJK/BXzY3Wx0odusKnssdtWJcw65Ha313b2tb26WFnz57FZ37mZ+I973kPnHP4mq/5msvdpEu2v/qrv8K3fdu34SEPeQi++Iu/GF/8xV+ML/iCL8D29vblbtp9aru7u3j2s5+NX/7lX/60BD3Xtra1HY6dOHECT3jCExLo98xnPvMyt+jKs3sM+n3lV37lgXKDv/M7v3PBfVx77bX4pV/6pXv608t2QIqfBmqXMI+pBWEdlFoF+E1lIQOpoPzUfm3QnMYe2N9DOHca4dxpDKfvwv7p81js7GE0DL9me4ZmtoGN40fQXnMM7uhxkEh7js1GrjnmSumpop2mxl5uTwQgdfeGeWL7AeAMXl2YSbZx1BozYQANi+UFtTIZJdhDBqgh7xG9BsEbAVRmGOAwH4JkAkuAQAA/Kz21P4SiTklttk5Ges9l9lTrhOFnAlljFEmnEIABCJ75cYm9Js8BvKhUoMQRL6o9ASFJXJk6dJKhzRtPjIEVY2LJTPa37tvL9zUY5aKy+xTsE8affKbtzpdoluy6p/UvauBvhAB7YwZy4CQm6XLg4yD9HQX8vGOARhmwGLNcbFz0BdMPzoNagFoG9SZrsxnWQ3pOoJ8w/LxnJoTnLHUF+KJeU77DKOOhHzh7XqWpNICqErODgNGLMSbALwSt35dtNBEjCZuUMlXI4JY1HmM5GOscRA6Us8qztNNy4I8BWAYIFVBUmSlHJCwaSuBs+vmpsZnAwAnJysiyelGYcd4JU1UkOj0IwclwcACMpFqUa5+EJeApJAAVWbGuYEV6R2gcH1vrgEaz6j0DgZ0Dy+MthC0TSsBvqZ6MvC6APp0HyWXG31RWt3nP9op2ZUzXsiRgeI9PtUWilTXVDmv/azscu6L8KgDwxr/Q+5smExk/qAAD6/lDgr1RGUIrxqICSnYanPo/se3k/yQzPuyD9ndAi12Mp27HcOfHsPvRj2P346fEv+oRZB5ut1u0mw1mJ49i4/gRbD/wJGY3XA939DjcNSdBW8cQNo4gSCKIZR+mmsyjzOSuQXRgFQMNeIcBDQa5b3P/DVUOiAUsvTD8ioQq7deB74VxWHAgXoG+Ru5rbQeabcFtbDJQKQlh0beIrkG/4GSqc/sDzsxZPeHM/pBqIye2uWegyBEuHEC3vuXYZ8UIZejXvrGOH3POlpg+F2vWP68zZWqfoNq31gbTdDYrm65gSgZlBSj0AEDwEQByDdwltt89sRVrCPseAcDQIxJLwHpiqVGqFBxU4cJKezpHmHmXmH6bDWG7dWgWu6D983D75xB2zyLu7SDMdxK4Q94jupHZmOMIKxtb3Bu1NIDvUiKU+kWLMYN+Wstvf2CQbxEiy8mOAXsL9e0ZGPQuS/Jz//PzZsPaBKMk4/GxUvKJJ/tUr6Gp56LfXWYtg/35JE8fS1CJPy/rjFLkOsVqXtYF6pNoPT/1tawFAQ0Dyt9QBupBYzMa0NfeLihduyZpihT8iyBHuXa2+kSaWDCOzCTWmqGLPoHCKv0a+7npDGfYxh6EFktWA36rkj+m1t3yHLC8xFdg1kWpGR3vzUV46bb2rdZ2f7cYI377t38bd911F5773Ode7uYUtrW1hd/6rd/Ct3/7t+NjH/sYrr/++nu9r52dnSsCWHvkIx+JW2+9FU960pPwy7/8ywCAtm3xxje+8Yrr/8O0hz70oXj3u9+NW265Bb/yK7+C2Wx2r/cVY8TP//zP4wlPeAI+4zM+4xBbeWXblcgS/Yd/+Ac88IEPTHGmS7UY46Ht60q0d73rXRiGATfeeCNuvPFGnDx58n59vPeF3X333fiZn/kZfO/3fi/+9m//Fg95yEMud5OuOLv6+dQT8i4r6ymlYIMJ/toAQmKwDJkpB/DnpgbCSpsImlMMDJiNPeLeecT5LoN+u+cwzPvE7nNtw/KEbYPZiaPwsw7dyevgto/CX3MS/ugJyUDvEJsZrJSpJ2l7qCQ36745IMNY5XuKRZnJsuagli++n2IQsm9dPHIf+jIALvsZhdWntftU/mcRWIJqMQap4ZclPZ1kXmd5PwaNWhP4SdnzLsvrOGEB6Xb6vy7CxxjhZKXPi2yVxuTfi1JnLMrvh4gk88RbLWesXlQgyAKGwBIUV382JXOTADbH9Qi1vaQBAw0qQDJjBQi6t4XvlwIiQW/AMckeRaLMZjPBWcvuSxKMEuykcZFYGjTMgf29FGhg1qQHdRJQ1UCUMh2kBo2+Z0E/DmJ5CYTyIzifaknCgn6uwSJAMtMDxgCpSWMBPiuplWtMal2aVfKeU7aozoFfYlUwA1WDeIEYLAsEOBcL8K8+nemckwC2KK7aZNaXKGr8aYDWmZ2JReTruHhOG2TAWiWm4LgeDtfM5OYqLjwGJFCwkWt0KshDAlh6ue6JGNRvZCzNGoeGkBIZ6vl7ialnM84tsKH1Rq0UIS3X2kyHO5F1nwEMI4EKZKnQta3tajAT6Ae5pBiQhnoN+E19V78v+0jzTAzwJsiuAF9966zi7vyeBRwT4HcetHcW4ewpDHd+DPt33IHdO+/G/unz6HfYH3Ge4I902DzB6gnbN57ExokjmN14I/yJ67lW8vYxhG6bZdMF1PAW8NMEFVVPIAfAs9+lQfTFfpI/zCCOS8wVlSYdIxjwM4lXETlhC2OfAvGxnyMuFqAWfL9r2iznOTuC4DuEbjP1c+y2MFCDIQzoxyiynmOq49cPLOvZNY79KElIsL6TmjKXFHSt6zen49bHUCbAcX0vllCMMmZ0nqUauKvG0GQtbTn3THeX96eSgCasZm1ZQKUAU+Ss8UnS2tGZ7afM+SwHWiXC3QOrr6EoY0sBUk6Iabi+YHWBZJl0vlK8Z594o+HHTNh+G55AezugxR7X8dvbyUy/tDP2qQp/yrNEbyTi+5drEoDPCXtIPpCy/LSGn/r1msg3HzmZr2aZAvwdHnMOTpKsvHzfS7JTiAk2vncWuU5i8gWETalKBXrPDia5LpL4LjIRWWAtrw9g5MXZLyGUTL/aVDqUkwmXZUS5zmkJ9mlfRVkL1CxPrpMMlnEnnnFGYo+fkxsn+kOu3yDMPgyLBPjpnBPnO5x0sOjTV2mDFTdSEh7EN1epT5X2NL4VJ0ZMlM7IOyj+nSxLICUN2DXNJQ3oUsbF2tZ2P7EYI37sx34M//gf/2M861nPuuT9/cAP/ABe//rX46//+q8veV+33XYb/t//+3/4uq/7ukveFwC87GUvwz/6R/8Ir371q7G3t3dJ+3rVq16Fr/mar8EXfMEXXNJ+Qgj4jd/4DWxsbODmm2++V/t41KMehd/93d/FE5/4RNx1111o2/ZQAtd//Md/jM/7vM/D3//93x9aIPyv//qv8ed//ud4xjOecUn7ISJ82Zd9Gd7xjnfgfe97H77iK77ikvb10Ic+FA972MPwq7/6q/jqr/7qS2qbtcMCnf76r/8af/EXf4Gv/dqvPYRWAb/wC7+AL/qiL8LDH/7wS97XX/3VX+Gv/uqvDoU5+zd/8zd42ctehje+8Y2X3G/vec978IlPfALPeMYzDuUchBBw55134gEPeMAl7efUqVN4yUtegg9/+MN4wxvecEnX1sMf/vAkWXzzzTfjbW972yW1DeDj/IVf+AV89Vd/NU6ePHnJ+wOA/f19fPjDHz6U8QYA//f//l9sbW1d8vwLAC95yUvwOZ/zOfiRH/kRjON44S8cYH/xF3+Bv/mbvzmUOeStb30rrrvuOvx//9//d8n7ulS7f0UirRTQxSy+pzKGhb3CQQ1ZaEmRqSSJObUfchwOqCVnVF5u7BH6eVpwh/kcoR8QJWDhuga+beBnG2i2Zmi3Z3DbR5PkFNfF64Sl5BO7xgZcUu3AmuGXZFyqtpPwt+pFmck4Bzgjm3xT7i+AvztlWtNqHAGfF4YRSMECBfx2+hGLwCDKfAwIIQrgx9/RRbZl9rWe6300glRY5pSCYa0EQBToy58vZ90i8KJ5jAyJcR0VXtjbumTeyWJbgIqpm89YZYBfzO1JR8yqbRPwZ0DQIMEIL4vfBESSZkVDpB1zv9wT4G9qIV0Dfy7mGm3aTypDNRZBEqT6clpzTQO2CENmZo0LhGGRJc20LSagAOdZSqhpoTVEODhFORtdWCW2llT0nTy3aaxHCV4tRpWjymCfys+qdFWMmd03xgzyWcBvUt7zIqyeqZjdJhnhkQDvOOjjkMalflGDShlgLwG9pfNKy9eA6ejymncoA6oHZTYbwE+xSA3MpDGqDEWnQCMlINXnQ8r9kNrM154DM/1Y4lNkPx2hEcZoYi5PME7yTg8A/DTpQWsVRQAGcJ+Sg6t/RecFDVEmRvDlAP2mGIqHvf+13S/NXgeRaDWwMcUM1u+vGh+RQSDLmJqckaaUBWwSUxi4ntywj3C+VE/Yv/s8hvkCY6/KAw7NjFUUumNb2DhxBBvXHmeG37b6WDNJqupSTSkFKZMEpyobaDJAxdBDlESVaCTSyVX+TAmeZjUIuTfGyPXUJBDPVPoRCFk+D00LdDMGKFt+Vn92pCZLpMvzIjDjaq8fk+wiAHRN7l9NMlFmeWq+Pb32OKsHxYi67leU9kapE52gG8r14grT8y0SybFmalkzUur31hTwG0M+J3yP4gDPGIDoFIDh97we1yX98jLgp+8llvjIdSJd06QEq/rW7YjQepd8jtar78v+cedJkqrmoEHAnGFR3Nc1eUqTqkhAYMvQqhlbVtIzyZ8b30gT+faHIP59EEUPZv1ZyX5zNqQed8TolMXICXlR/NsIpHEeAy0ruVywzxm21WvWXqc6J+k6ZfL7AgCmpCZk/8STAn5ZTWFqdK6qGZh7QZ4NGG3BPltHOn1HEsHIceJiADFzEFkVYuoa0uS6qDX9xP+Oi0UC/0K/wLjgtaPzDt4vMjiMcvzUYyatLe3/B1jaLuakKmsW+Itx5Qr0vre1b7W2K8wUPDmswOJ3fdd34clPfjKuvfbaS97X2bNn8fGPf/wQWsX2H//jf8Rdd92FrusumeH0r//1vz4Upp9zDo95zGPw53/+55e0Hwv8vehFL8Lnf/7nX9L++r7HM57xDDzqUY/Cn/7pn+Iv//IvD6V+3rlz5w7tnH7/938/br311ksGYQDgy7/8y/Grv/qr+Kqv+qpDaBkDJ0996lPxXd/1XfcazLV22NfCLbfccmgsv8Ns2+Me9zg86EEPumSQLsaIl770pXj3u9+NN7/5zfiGb/iGe72vEAL++3//7/ibv/kbvPrVr8Ztt912SczSkydP4jWveQ1e/epX48Ybb7zX+wGAG2+8Eb/3e7+H7/iO78BTnvKUQwE3nXN40pOehKNHj17yvgBgHEd8y7d8C97znvfgtttuu+R55B/+4R/w5Cc/GQ972MPw/ve/vyhXcm/sP/2n/4Rz586hbVu07YQCxT2wu+66C5/4xCcuaR993+OlL30p/sN/+A945jOfuQb9LtkcLWcP2kCE2pSDbgC/VLNGauAV2cvQQIzZZ11fRYLIKTAmmfEUBtBijxfb588gnDmFsHMOYecss/xCgJ91XL9v1sHPOrTbM/hrTsJtH4M/eSOcZJ+HbhOxmSFubKes2xSEGrO8ZuqDtEhvUyA8ZU7HkAN6uq0u4sw2tu5fbGZpQZeknQCkmh8xgHxgYCb1TcMgpe8QfZuAvr0h4Mz+gPP7LDc1Cnhis1pV+kdOcwH0aY2v1ttM52w5eIXMDqxOv62XNpWNGwXQSgEAFzGOEshXbJlyfTJd5PP7S7ubtKlFrf3uqmWf18zitI8MnmjwKsQcuNDgimYQwwQCJjNqL6L9uuAOgRfgI2XAlSimNipDQFlZ1O8yW0IDUeMCtNgFBq51yRnGfbrGqGlBG5v8PNsGmiYxXYMF9SYYWwVrq8pOH8eIQWSmVL5TmX25L7OEJ4AC3Jti92ngrZb4rO2CmGCIuUaMA0IcGegNPOZXsv7A4/CgpHgNXjqUkfYcjHXFe7k+3Yr91XOrBPT5/AM+apa51uXhgJS2I0ojrGRVcQ0oyySxUJCZNzXIp8F5ba9zwAQbt6hNpGPFJD1ECdLXYLtm33N7Y+4j3T0kUCqAvAUthwud87Wt7UoyrR2cgrB6gSpAZRKsHJjFJSM+MWb1QpZtfQXg6L1hSZ7P+FF6D/NmuqKBE0RofhZu/xzi2VMY7/wYxrvvwPmPfRLzU2fQn9vFYm9ADBGu9eiOtNg4fgRHHnQ9No4fweymB8FtH4O75iT8NScR2k32rdot9o0E8Etgn841ybcSnyeAATlJKIntLM1DNPQ8r/gm3XtZehloBDzQ71EY5Z64D1rscs1nTRDb32Pwz3n+raYFbR5B2LwGYfskQjPDXOofxwj0+yPGgMTy0zpqCsIoUNO4nDTlhIHYOr7HqKICGT8J1kcWRYmp5IoC+FOwUpn50p/RNaCJRIjo0k7SGLF+K7O1zBeWig/m+b5kYLOP5EBSoxmTPpDKTaovBRcxSqINRUWptWX5vlnIvU8lH9ZrkuKgq7WKAtoKkvoAR07OSUyMMgDYgscihIR/MtOPsNnwcxN6kb7dQ5zvZtlGGUf8kz4lUaUakd0s+1XC7uOyAk3yoTRRqpb0XOi4C5zIt69sv2FMn2vylDlD8EQCCgrjb4xoPZLM4wX9pimgxALGhnmLOPL8NHI/Q6TJiRwIzIANISZQMSUzRZN0BYIDCdMvJyM5yomGyuxVS8l4cv0riGg/BxTky4CfgvTab/WwD4hwTjKqJGVvFB9EwdOlfglDZhMrw29YpMTU2M+x2NlDHAOCgH5B1GhW1RktEu80kUpZohYotPcTu27W5lU+dJJDBcT/VQA+FnXe17a2T2d7whOecGj7uvbaa/H4xz/+UPb1hV/4hfjCL/zCQ9kXwLWbTpw4cSj7OgxQU02l+S7VPv/zPx+/+7u/i4c//OHY3Ny8pH397M/+LD7ykY/gIx/5CF72spdhsVgcCuj36Ec/Go9+9KMveT8A8EVf9EWHsh+1w2T4Oefw9Kc//VAASYCP9TCPd2tr69D2pbUkD8OI6FBYpb/xG7+Bd7/73djY2MAf/uEf4hnPeMa9Bjnf//7343nPex7Onz+PV73qVZcE+Kltb2/jFa94xSXvBwC6rsOtt96KYVhRMuxe2IMe9KBD25f3Hi9/+cvxrd/6rXjrW9+KZz/72Ze0PyLC937v9+Knf/qn8SM/8iN45StfeUn7u+6663Dddddd0j7UHve4x+Fxj3vcJe3jzjvvxDd+4zfihS984aHdry7Vrm7Qr17k2QX2xX4HSAy5GujSfaVAcphYuAf+Q3WRdGHc0dgD/b7JsM2Z565t4ASMZsBvE7R1lGv4zbbhto8hNhu5DpnUapk87ipwYEE8lVcqghBV0Ds2eRLVgF4cB4C4ltfS/lyD2HScKeubzO5TUzDUdwizo4i+w95+wJ7U89tdhBQQmAJJHBGcYfIp2MfgX85w5s+Xvg4AwvQrAb7aLFiiUoIKmrDUj0aGKIERKcM3QlgQEOkogTCqYNM9Me0Ksotc6HsVuCkNV2lSy6bKXCOkIIXKf8J8z/bdAVfNZBvt93x6zcwuALmGnAZBgqnfp4FbvT40w1gp2RK8o7ZjwK+bAa0AyKmupdSKJOKglCvrs1nwZhTmngaqQsz1Zyzop2CfBtJWsfjsmK2zwus6kgdZ/bl+l38mioxZjgw5kZz1RAghl4rTYCVwT0BnBuAs+yQBf2Y+0aBrYl5ccMchJUB4ypcDsxdzsDS9XtFHGjCz4AANfa4DaeqJAshBqCn2xwTgl96nctxYwE9NAT8L9k1JkeocwMFlPhfOJDB8So3o4gfDvd3/2u6fNsXMqKUIDXjHc8a4LKVbf2/KN6vfU2BN/k2MOwOIcD2/faCfI+ztIOycxWJnj+sj9wPGXgL8jtBuclJVKyy/7th2YvfRbKtUURBwK9UNC8t1C5NU71Rimc4zYAAh9ZEmA0Cn82VfMvuZA4MypkYyt0V8R5Wt9h1CM8PewIlUKpmo9zIFV6zZecgLU7o1iVQZADzg2hZfWJPjJpNCzFwcw5ilncHAnq2nlvoPDAZEoszis/1UgTf3BRtG76HqA7IQBAN+MQKRMiCzKhO3YMlX99JJu9A2cj9VGUmu2xwRiBAd4J3PtfEkwUqZ8KRy12HI/pUzAKUqJjRtYpBSNxOp9JwEozXMFHyLUYG4iCDgVDDvLUSxIz0Mw28hY0PHovb5GCNak4lkJS0P1VSVQFl/AN/gRerTOZew3SiMPr0K+X6e/SyblJT9XQuWy/fA419r0iHGBEQf2NQCECwBP/td9QcDKStydeZXwdYNgecVVdhQP1zGShwDYgiIYwBdDLpmfSmTcFffS3L6ofpllNeM1S4TYE81kH9hKf37zNa+1drWtrb70A5D7m5/fz8F0r33OH/+PPb29i4ZSPx0s2/91m89VCBmbRdnwzDg5S9/OZ73vOfhh37oh/DgBz/4kvb3K7/yK7jrrrsAAL/1W7+Fb/zGbzw02cvDtMMA5e8re8QjHoF3v/vdeO9733vJ+3rgAx+IV7ziFfi+7/s+vO1tb8P58+dx5MiRQ2jllWEPetCDDhV0PQy7ckfWRViSoJoC6KxZJpD8D6AA8VIW91QgOWW35xVGFDAMUZaDlawRxp4X2lrHb28HcV+APwC+bdIzeccyU5vbcEeOwx09Abe5zfVlJHs8Nhv87Jq0ICXkBVzdNogcZ5QM5JSZHvk5gX0ts6Z6jdGQAcA87zvavg3KDATgNrPcjs3oRGaaDSGiHyIW/YjzPS/8z8wXOL8/YHcxYj5wv6nMl63VZ1l9G41D613638b1/cTiuWbyrVpCrVpbcZK3ZHkDXOsvlt/xZjFqd2MZf/o5ML3NKqvBPz1ce37MUTDTLFKSXIogeAnO+Nz0fHwxt6gILFRblkEHeS+HSsx2AiCZAyVpP9cCYqAPwzzXYlrsAv0+wnyHQeNFn68j5zPgN9tmwK87gti0iO2WgOES+JSxlmqh6KUrY1eBvAgUslSLwMcyhFynbxHCJJNvldmgg1tRvwXIYKH2oPZnPXb1tSeaBP4WY0AgAhA4OCtfvdB40sCliLAmqTIn5y2NqToxALlDJwG/CwX3YyjG/VR9wbSret4ec3Aq1ZEamQVK42I5cG7Zyz7P9yvbrQ+XA1P12Y4m8MjMRBv0rK6V6roH8nU6hIPH0drWdiVZJD99bVuQx1xfCcQxwV6WexxE3nbFtTgB/FjTyyYBfwAni/S7qY5fOHsK/d13Y37qLPqzuxh254hjgO88nCdsHN9Gd2wLs5PXYOO6a+GPnoC75iRotgXa0jp+M5Z+VgUFbZIkhFGMpf+o4KPOjc1GTjZICVUyB+kcDcNsrOdK/R1Ngtk3SWJLbDbPAGW7iZ1FwO4i4vxiTKCLZagrS139Ie8oAUTqa7VOfCufWX4KYixZkQQ3FABCAii1vcYnToBf/TyVhKdMLAVnqPZKTL/Z79v+n7D6cJRllVhEEQVUwkwrSow/F4Esr51l1RWM8NX9svyxAz5b1eYYBGUMST6ykd8evTDMnNyLXJaY7Gwtv/39pKgQVCZWZdGblpU5mnayHnJia1kVBfA9MUl7RqS6x4MAfVzHb8RijMzyU99KGX5hOQnGU064UuUPQIBBL4CrjO/lEznhs9T9uMLSWHNgxRQBnrgmL59jlazXOnzWv+X1hSpcKGs2N7Fm+el7U2DVlK8NQHyOC/mhsUg24/2JPwxKdb4tU1vr+cV9kX3dk/p9wvxTdh8AkHf8kFqPk30pNSCV1Vcw/PS+ABTzWZJaBRJYGOJyAmIJmpZsv3uaWLm2ta1tbZ8O9oY3vAEf/ehHcfPNN+PVr341PvdzP/dyN+mqtMNghK3tntvf/u3f4s1vfvOhjNsYI37lV34FAPDIRz4Sr3nNa65IwO9qMOccHvvYxx7a/pqmOZQ6kmu7sF3VoB+AHBS2mYu1ubwdf4fyQlEBs7GX4LIsdOpaVjHkLHZ5TxVTCllN3efYAyqZovJMieXHUkeuk6zabpbYff6ak8BsG8F3iN0mL5yaGYNzvsuMowh4qV2mC6Iiu9qwc1TmLmpQWxdmAAMfIsvjzMJVwhlwmhWcgnplkD2Sw1xqoi36iD4MKQiwEMnOuQQAdhcjy/wIw48zgPNi1i7eGk+YeZfAvq3WiaTndEBHeYqafQsABxV4P6iunYKqLMEj8pUJhKMUKBohclFmVxboS6w6E0DTPr4nOeo2a9gCfvbo8vu8c2X7ReK6f3WGbH28atHIANlAQzTb6fHWcQgdO54yi0D/TxK0AuBo7SK7C/IeEHlYzjZvgW6D5Tx9h7CxzYyMlt8bItAPmYlngRk9rpjaHBMTIkaVp+KgkrJNNTt9itlnAT3LjmyrzqwDL2qjU6CPs9mtHKgznaDgt93fFJg4ZQqyKmPVrRhn+hsx8hjXGkU2sJ7MBMwmGR0HsTzs53XixAqAsNw2z+lFUoYJuCdmiM7RrskMGwcBHwxD0TJBtTZqxQrNEluxYPgpaGzBv6IrdOTFfG2O4Gt+uADZ4z6xmql1X+x/bfdPq0GKqWQq3U6A9mLqSNew+iYQAMct+yfIAd9i10DJ7hD/jIY5aNhHnDPDL+ycw+LcLhY7eylQTd6h3WzgZx02ThxBd3QbmyePpRrJNNtill+7hdgJ208To4a+PMbURgLIZ/l35DkqsdbEx7LzeJImJcqAYgylLLtNNFOZRQC0uQ0aFoiLHrQxS/5ikGSwvo/ox4D9IbPTgWUGOUt3sj+ltt01DA41nEx1weB5cS+ob/5OavdVSXAKMokSRPJVhVFpE9KsTw5imcUivUj73L5vgT/dlMLk3DTl8fH4YtnMYDayXTHKjdIBfLMOBHLs+8Wk0Ti1c8c3DGHB1v2l7Vli0FfXAsWAGAZ4aoQVF9FFwliBRw4MQHVSy89HrUFpGKPOgxpksE9ZfW2XfjfoNe1boOkKIF8BGb1Xqi+lcp/zJOXJvv2+SaZahCwtW7P8+FysHn8riVV27Nj3pmzK59CvBHAiYwBAA8g18OSE3cmS9bYOX/Ljjc+bFC0wrT6iqgejrC9W1aObwvcUdAR4WIVQJoxN+Z1EIj8KXbeUc3kcleHXy0NeL3pEqQmg7D4G+xycdzKGukJRQev5JaBYmaLpYeZ87ef85QS2QvocWAY609xR9MvyeuZTYmvfam1rW9sVbPP5HP/tv/03/OZv/iae9rSnXe7mrG1t99ge9rCHHdq+/vAP/xAf+chH8OxnPxtveMMbDqWO59rWdrXZ/QL0q+U3UyAYSM5zkYku3wOQg8mWyaaf1wtJ+55Z8EQFDhMrZWDAb+gF7JNC6UWmrQM5D5ptgzZmLDc12wZm28JkYklPBTkU8BuDZjfK4pEc4DtpW9XmOqAmwe0xZrlDZUApq0gDGWPkhaIGavxEUGJ/5MX+ziKken266D/XD7zoF3AvRCRZn8WY63nYRXSIDEDY2jIalNpsfAHoTRlJ0AOQeL+MgVo+UENHeXE5vb/0uflNp+0WVtsIrt+VmHnpuzkoECVQ4iVwYdt2EACXfrPILC5/x26TA43L8j51EGIqgFJLCQG0BKAFkSXSoA/3RUy1TZSt6WmZ2ZCuNSCPzTBywABI1wScZzlPqd+X5DzT8wy9AMu9ZI8PIbMU6/NagjcZ+FuMCvYxu8+C0Mr0476jSfBP60Xq+9rHU8ErWwsQQKrrwgGgvP9VUrVlfcpyo8RmNWC3Iw2UKfh3AAAeVwQubWA7HcgK9KoGBCzIVwMHNain+62/Y9lFIQfI4yjSd4GrMyV2AiQw5BreDlgJ/CWgUBIgLOBX1/ArQOCK7WdtXNWNqOsXrW1tV7jVfsQU6GfAc4qcRFTMFTrvy7YZwDH7VhnH+ndj3jYFdoP4Z8KIG7X21HwHw17JTPGdg591aGYduqPM9PNHjsBtbmegQxNIfMtS5aMAhmEsE7zqY45jmlPsnBVVtjBmuWW9Z6d5PVRzn00UkzmCvATVQ+D7oPqLmgjTdqnO2hhHjBEpcWVJzlMYQF6YYJ2AfknW08v94QDHSgGOlePgHpje7w/YYBrIqbYhM4YKYNB8556CAbZuGKD+lNZ25l+ZYsvTVLLMPbAlwK9+Hdi3V0YZHNACcKFUcfDCOmscoSGAFn2hWkJOkqq0LmQ3A822oDXYCvY7wOod5IpabLUFxORXLUJI8p0hAoOw+xaBgb8a8LOmPtNUfe1yu4k37ZxxT8GSAmzmneta0blc2y8xzMSPWgX4JZaf9XlswhGyV677WAX+TVmtDGHBvilV88nj1efkSwXEBQN+ECnPtE/vgAr4Y2UEfc/rhsX4sYy/QnZf/DAG/kIC/PTzVb6Vgp627w7C29e2trWt7dPZ9vf38ba3ve1e1z9b29ruT/arv/qr+Imf+Am86EUvWinNv7a13d/tqgb9aALwO9AsU0T/r/cBJGZcLV+VvkOuyEwncojjIOwl/o1UIH1/jjDfScXRUw0F53jhvTGDm23n+jLtFkK3yQGpbhPRdxjgsBijAZZisSji+lkuqxshL44jgH7kOi+eOAikQEkJC+WMVgW5vMtZqQMY6IrRi7QPM/b6MWKnD9hdjDjfDzgzZ9nOu/YW6IeA3X6UfREaR+gaXkh3spBUeRxAszZZynPWOBzpGmy1PtWcqRlyBQNOgA2yC++8ZeoLoFxU2qBFHexP/WIWl7zwh0gcYckS0Ei8QI3RZIVb8NC0sWR2Le9z6Teq/2P1mdYNaaoNa8DwoPhKWXvNAmc0AXxMB0IaBcLGobg2Uw2+GDjoNLAEnMpKwSnY12bQzzcIzQxjZHbfQuWkQq5hBCwDfnwMsZCJ05p9Kjc1Jes5ZQrs2XqSVm62lSDRVLdGZNk1rcti6wVO1QpUq4HFDP6Vv6kMP68kN7Lg3xTAvCwJZS3NeaNhp9RMvVXZ8zVgWCVUFMCegnqAzM9yLhe9bD5yQF2AvBiqMNmwYOBB5crcgOg7UCgDT0nCUzLRo+8wwqXxs4opasd73q5MJlgF4nNCAE3OFfe1RXKYlDY9xP2v7X5sNlGjmL9NEFuCu5lSLSw542fFGADXgFyT74EaaAcY3HLISgQQnyYMaNSpGXvQyHX83GIPmO8gnD+NcO409k+fwzDfR+gHkHfwvkPjXGL4bd94LfyRo3DXnEyKCnF2BLHdZGnPbovbo/epMJQ1kOtxrkwZG+RuNxBdkwC/USaMdD+wvmcw855JimBZ9o79Td+yP9nNcvKYADWxO4LYbmEwfoxzKLOTxFrvsAgRG+l+pX4XxLdiX8smqth7UPat1N+Oy4kf9lw6D7gJGKMCCRLLX0Fh7SMgq1HEAJBP20TDAiySR+rxqPvwFw526bEFAfbqumEA/0/EpD3nGBtSVryDYRvVgOhUP61siJvchmKUpEJm+7WugY8MeQaHQplBFRYaAsvfjot07ybvEbV+n8h4opulWuEwjPdkhqmloGAMsZDaVKZfiJzMNx9CkvWcDyPmAwOB/RjQC93d1h9OihBOpTEpJfy1ntcM6rss+auSSJAA4LReyxseqCxg/yfHIiZUrou8a9IP67EC6mdnVQtCJd1b+T86VxA5ZhCageZIYGQDZDksS4Aq8MXA77Sf2HqXfHC9djUJj3TekXkuhsz0SzLCxrdKIB94/JTJB11m+zUt0Mg4EQYytAa9JCfUxtdned1qQmqdpGmTKrkfBJCXs3SQD3tf2dq3Wtva1nYl2zXXXHO5m7C2tV0x9i3f8i2Hyhxc29quRruqQT8AZTB5yur0x4kFWQ5YWCaIKxbgyQlP2zbFZ+TK7TUrXdl9URh/SeLTeX40rWE4NQUDJYp85jjmgLQjSrJ8uQFZgtJmMOg6aYxgqU0S8EEW6Qpm6WI6xAxOcZBFvo8c+B4D72uMUSSlWLaT6/MFeR1wbj6gHwL6QUE/h83Ow5sFP2eZU1qgzhrPdfy8w0bjixozjcuLWKAE+HTRlz6rwK26P8pgPuX/9ZhDBKjM+lazoylU/98Ts8DbvVmyHoQhWNB35W/FAEFQJvfhyV4LkHqBvDOV0lT50PJ3c+ZzCoTJfkCxCBRHgMGeNi/6o+9YdrGTDHStZUmOwWsZy4OCfYGDBBqE4r4pe0fPrwXeQsgMroNYWJZZlwE+C/5REWzR72h/p99HROMIY6BpANDFQvqzbIOev8wsbH0OmhEE2MNyHSYFw6fsQDm3ioE3xb6bZALa7+r3zXtLYF9KuBC513HMrGipfxptXasa8AMQm/J9ajuAhjyPRwJsYNAEMRXwG0Is5gXAAt45OUBZCjYxgMxkmd+TMQAgUFyqlbm2tV3xZq7jInhtNyFipkYokx3qJKn8QRUs1c+ND1fIDEsyAI0DgxhhAEaTULW3g3HOLL8YQgpUK8Ov3Z4x4Ld9jJOrZttcL9l3iE2X5QsFSCpkK6eOwc6BtX/oGpb/RZVQYee6aj4s9uPlnii+H8IAtxkRFy3isGDWX9shtBuIzicwiojBvEAxYQFpuqQITwz+KXMNQFEn2d5X9DMC309SEpWiXgck1kUiBgeCn5ztEuDn/Wom5UE2Afil94Hsk+u9BtLuQ2BZi/BBqo27sn2YuB8CuQY4sBpMngD+Uj8J249igCNJ9Imlr6PgTpKKrRKt3MYm4jjmmn0G8IsKkFagn17fysKq+yTEnEgVJIlpETLbTwE/vW9OsfwU7MsJY3ldQGTHI0oWXd1HdT8CDArGWF6/qyxd75T7W35TMXWV7E/sV/28AvyKax0AgRHjKGOUYGoGCtjHa68ymVA/m+qzVYw/+131BxNT2vhh0bRPE6mSDKzuQ+v3JcCYWcjUtvxeq+Af+1Qxvfb8ulpHF/1cgfU6lladIStxao/xUw/5rW1ta1vb2ta2tqvF1oDf2tZ2fwD9JqyQ9jRBpiXmijWRsrFF6y/+B8Pyd8LItREkOKW1EgAJSgOSOemFneKLRZAWQmdQAkuLHftviDHVjiIwADjI4mkMUrssRmbrGbYKoOBeZtuFyEENlg/NgXAF+gYDvOxLVq+CfnuLgP2RM3wV8OuHkDJ5ASSW31brE9Cn8lIz79B6whFTa6YTUKXzJijguN3M/suyOrk/LnzKNKNU+5Yz9AUYIkpB/np/lg1on4EMANbMQyAvTpWBlbYDVgexgKWAaT0uD5IDBSqQzy76DXtrMhCSwDl+9uS4hiQAH0kAEUoMQNsHCfCrTJkMab+jR8QGByCcT8Gn6Fuus+Qa9GPEYogYY0jATKrLF0rZThsAK89ZCfipvKxl2FmzzDog1+1rveMAqnOJZaeZ6DouHbJElY3DMEMSiE5+11FiIMbIUqXc7mWmYSHhRFSwCTOz1WR2m/5XwE/H2gXrNgHTgF+S1xyKsTQZJL/Qfg8C+wYjMyXzpYJ//NVKHs45QANVISAOPUgD5A3P5aQBS51XPQeoFgEckIyZnWMBvpLhmsfYqmuODJtX52HlIRxC3Pmem15n9+X+13b/tCmgX80w3NS/coaxtmp7CyDUv6VmZaMRpe5dYMCPxh602Eec73Itv7On0J/bRX92J0nSNbMOfrNDu7WJ2cljaI8egT9xA2i2BXf0BOjIcYRmhrCxjdiykkIC/Cyr0TL9tC0VowiuyczG5KvFJJle9GUYlhjTkYjBIGG9R9mPfpdiQHAN0LKKRLo3tlvMpgHkfkTYaEjux/letwgBLRw2GvZ3WpeBgtY7YVQ5UYCQwxAGuar6rZSFPmiul4S2lIhh5ut7CvglcMJlXykpb9QJf14+E9YmAyzuQAnA2ne4GN+x8K8MIAVp3xKwW/tzBwB+k31jfDYFhRthzaoigyNZ34wDA+RaJ1LHse+YKelbHlPKyjK11ybBsxWMppQYYxjyC5XzNHX8FPDrJ4raKstPAb7WO0n649c8NoUFmLZFCYKtAk5t96mgf8TSNT1lFB1iGECOn71rMBLxHGdOvvpZqe/1OjdM3rIhLtWw9AKyeqGtRSIEUQfJAGM+WO9oCTCt/UJ+NnW1idKDgMQ0riXSp5JmKcmvGGaoPFOba9GnJIpmxmUomlli+2mdZKS5BUnq2Z4r3c6y/HLZB95UfTAF/pLcarX2+5TZ2rda29rWtra1rW1ta1vbVWJXNeiX2HgOB0rpFGDfFKBiaw7YAJXdxmybzNRn4aByD0gALAwLlk7Zn2fgT4ujt+DAiBemn8ijFG2wh0J5gZeBqHIBmBZ9MKBS1NqGHKjux4gYYwE6WWBK9ziGmOpMaCA8gX7CsBqMDGLrHWYCEG4FnyQH+yEkKR/vCNsdf7YhgJ4NOun7rSdsNp7rzwhA2Dgw+CdMq9Zxn9O4X2afSzAl9d7UwknOn1fGDzmMAsgMAgoNlIOPNdOHJU5LVo+eowCuSWjfc4QUrHDFIr0C/BQIqSz9hA1WkEtF7ZPUzSqgz/zGSmmxA0C/GChJnUUfWJ5IWXoARhA88mJdTciS3M/kWD4qBO40DSRJ1rMGO+EaqbHUoY/McO3HuAT21YE5lUOCsDa5f6eDeDWg5hxhHKOcmwjv9dooWXWW1eeICrCPAy1IdfVqpp9tB0s9atZ7DmZ4p2Mtf0n72B6n3S9ZQC/1wfL2F4qxFtKUlq1QPVLgug6Q6/bGDpInKvatZmueyv8qORUHlQzk7SMyCMiynT5/x3lonTACQN5w7DShwwboY06MCCilYjkJIAOzGkyvzTsS1rRKg+X6jrmPV3bH2tZ25ZkB5SdBPAMS0LjMLErbAYn5offbSZsAAlXOk0E/BvxosYvxzCmE86fRn91Bf3YXw7xHHJnlp4DfxokjaK85Brd1LEl60vYxlvNsZwz4NSKleVAf2GdpZ1HjrNlI97OY7j2m3ptuVwFBSdHBNwxqxVC2I4YE0vCclu+Nsd3guSxA7kXsK3GSF0SmkuGDQBFj5IQqZZOrPLWVptbmKeCSwALCdP9Yf8T2DQInW3DnlNujxGts0H9p31VfWFkF68fYpBPGdBioYU1AZsWpogUtT93yvWjuz6YZtPzaJtEoO03vmwXgo+2esimA7yKD/GmtIePFJ+YjEqhTXIvOIaItzpdNsCokr6faqSC9MrGQ1x5B4LQI5Np9KrmuyVUTNfyypCfJWgHJt2L/nscm+84ZxHJACfbWAKorxxP7JC6fE4p5+yVAzrwfA0A+sf10HAVQqukNZB9e/XgL7JP6McXcETOTV/xordGYxyAhOggQKD7HRSYM5XUGCml3ZX9SGLl9Wn5iWBh/S/w47/M61efa8wXgN9sGNS3cJgN+sdlIpSi0NMUQ1d3PNeh5neJK4A9Vokfq27KGn4J8AF+DzKSWbr247lnb2ta2trWtbW1rW9vaPi3tqgb9NNgyBZYkm2Kt6EdFRL0C/CrwLfocME77dTkwVi9kEsuvnzN7ZQyFnEqqhaBZ0ZZZZfalmZ9qgbCSNWJjCLxIKjdW8MS7mAM6E8AEkL+mmes5iz2DY3o4GbQDxujRisbU2OYgkyfCVivynY5SHRl9bj0l1l/rGFRpHTP8GPRjcNDHAdT3Kfu/Bh6KAFIN2Nr3pc/Jc60huAaOiOu2kLIrOcgfJetWoL6lRTjXl6j6X5lX+pNm+yIztQb8KvBam65MpSVpHLO/ewz2mYDuknybjj/XcLBCpI44iMSBC0dZ8mksxk4Glm2QgxwQowdiVfdOg06ea/kNMTOwemHlWeaVNe2FVIdShrzW6LkYq6XN+DCn6+fVYJ+CbQr42fOuIC+lNjESqvJg+v4YuL8soFm33QJ8+t26D1bVlavNjsv03YO+sAT+jdMBRtveVdfexVjI14JmoheAYHEwHsAC5DyzAJuOx5zMrUvXi4xFxZ8VzLOAnwbOE/NXWDO2aYDN/+AkhyWU9nLbRALJoe9/bfdLoziW9w8Nitc+ElAmBeg29n6Skkaa8vMJxpMGh9PnY8+17Rb7oGGOuHdemH7n0J/bxSi1/ACIrOcG2u0ZuqNbDPhtH831kpsZM+SaWWY4rbIaHLBznGvza2ENp2QT8L1I7wNJVk/2keqV6v2OHOBdAeZw/8tve2EByr0xyeiB74WaLNJ48V0cg3wEIDpgCAyohEhwct9Rf02TWbThIUZTE+wAFo1zy3NxDcDY9w+w2t+Y+g7X/wuMaJr36nsPAQKUhlwLb8V8XEuC83vT21qJRAVBCeXrBEStSqJCBfKtOuaJpMJiDKZhOSyNT2X6LSU2OgVVGLyJJvklWoWRe2A8/rL/khJoYqWmIKCfKhk0liknCiBOE6lkbeAUjDZ+liocQOel2pxbGktR+occgADxYScAv3RQefzq+EndCLOmqt7LCRKVf637NEaBr0v1vz01nPRFhJGQavtZWVoiYKgWHZqYtKqeXfZBzRwkvhvXhww5qWpJ0nN5PJDzDPg1HTP82o5rozYbPK/6TmpvM+CX6poC0Br0q/xTPbIl1RDZXsE/y/qzqRoXpV5x2Lb2rda2trWtbW1rW9va1naV2FUP+iWzARWinGlpgKEEbOiiN1IqYJ/BwUFqquTfiE2XpTZHXYiJ5CEFoOlYVk4zTRc94nwHYW8Hcb4j0nUBrm2Y3Tfb4ixJkUah2XZiOtmgGMUgjDRKCyMPpIzQg0ylD7XORozMlrPAyRgALQIzhPwdG/QGlrMwAQhQJ6+dQyvZujPvsAgBx2Z5aOX6Z6U8Yl14vnUcCNgQVl/nCZ2AL9TvFjJfkACHBbCWajTWmeRmoabBjth0IKkj1zYdyyb5BmNkOZ2FxPIXI7MfOdjDGb8jloNGHJ4iEGWZ1LoPlQXnSEC1MJQBirShDXJVVcFiyMCKea8A8WqgD4DWSCpAm6L2iJ5gCdRKIIQYsUMMBHIchCTXwEsGOINWedHO/cPj1pMDuYaDcJKhXYCzEjiFa9AHrbGmTKwyOFdLh9r1fgKqQxRAjQBkecbaNFNY2X0tTI1Jl2vNtEZatgb8gByAUGanAn76v3asl+28BMu8Ak6uZP/ZY9FjTqflgMu+jn0cwGGZ3H5yGw08yThJgF/IjD8bNNIaMJHMNbgqoJvYH4GTHwBEFwBXAnzRsv7GafAvCnM6NiztGUUyOQVkgSQvmGQ70wNFbT+dC/eHwIyFEJdkYJ0j+KABJw5DNdXJ0bHqidK5X9vargoLQ77Ox4H9JKC8ntO9Jqb7SrRJUQos+FYYuc3S9R/NPpOPQwAtetCwD9fvMfA3P49w9hTGM6cw3P536M/tYv/0OYR+wLgY0G7P0Mw6zE4ew8bxo/AnboA7fj37V9vHEZsZQreJuHF0SXpO5+zoPCjfCrNNATkqRU3L0nQO4OQO3dYw46Pef/Ue6Dsoq48MeymKMHC0wGTqyybdcz0B8Pz7zgGLAHitEyYqDt75ogaYJq+wTLom53ALVSZaVRVsvbV0/iE+dp10VyVWTAbGa0Cq2mf6SG+AaYwtnw+qzkuMofCnaBzgfYfoCE4kFImWaUWWyW0tHTLlpK7M8GOxVwZU8v0wJcVUxzjJ6pMkv7p+eN5Ojt1KsQN8H5Yacfp/2S/qLLlcq8+2xSYZ2j49wKI+ZKyPIYN9+4Mw/MaAQSQ+QywZfnpv9I7QNS6x/BR8biXJL732ZizKONd7OdXAZq3UYvs4BrBUJxhtixXAZcfPRDICybXpxG8AKhAYkHWJzpdGAWGqT8mBYp5LfevgvMNCNl0E8XWd+r0EHZlW+l3EDDDGXDuUAeiczJnqIsYAGnpeN408rwZbdmLok7LCEgDoPNfu2zBynkePw21sMmu620RstxA2tgHfYW/MSaJpGJokgkjLPrx1rQSfNb51BjbVj69riU9glGtb29rWtra1rW1ta1vb2sTuP6Af5QVdETCwWehqJps6jpwxS5pRLYGqlJF5TzLuQpAsSqmTYBZR5BwHxJsWbmOTF1CykEIj2dtEJRgSBkCAFRuQqKXjgCyRogFtrVXH4AkzWdK2QJKpGUMGazTwnRbtBnnIkoV58a6B7BYcAHABcBTRRiqYXxn0k+1F/pNQyvd0nqV+Op9ZfiQLahoXSeYLtmaJnls918NyvUandWRsYCDmAGQEQJGz7ikEoAG8a4AkmbWaMZaYCRdhKvmlsqshyrmsmBEFc1VX98gBismfq8E+IAVii6x4u92FLAbkkIP+z0CgBkMs8KjHdqCplpj5ngZscsav2Rxc68RehTXYphZIAfkMylJ5BMmmMqRrsE/lkhTka5xl99FFgWapzXpsyFJFKlPkyGzhYvWN0i4E5C397gQQeY9taqysCGql+c7WbrIMkCk2yD0xCajGGvgLI4COEy8sK3CijVPAqWV+LEYF+0JiLFg5ZUd6AqdrVzqy7MuLA1fvE1tno6/tXhppUogk1iwlnKDyU4BpgN8m3sgcX8i8CSiR30cCHBEGCVLPEXbPIpw7jXBG6/jtIvQD4hjgvINrG7i2RbvFgWmabbF/NdsWFkqbGH4s6X1POqNKGhJQS/djE6l4Xi8TNVKdOecANGVS0kEAQfXbNVPMzj2qTqBsJPYxAIEa0fl8f7V+V96lShTLf1Q90sFU7ZRzalsWTfJa3WaK9r5PaR+1RYQl4M++nmKZq0+C+jFx11QmlY5s7RuVo6+NiPieX/RNpVhgATfb3qmklxrwq8Eq/W6UBCth+RV9MU71SyzHl/ndJQWTGjQ0+5k6J0HWFFHWE0kNI3JSjL5eZVbWM9VAVhDV0ZJPVp8GRxPnfRIsXZ6DEuguYPWFQM6LNgM+1372VBJiBMrE0jCk5LlRwDFWq9Brc7k/jRACm2M1FbulM+BfUvQYpTboKPXmw4i4WCQlBfXd4hgKth9p7XlZs7qNzSzp2cz4te+wCEjlH7QePCDHQwTvNLGgZP7VVs+f6X1RytDZRn2yy+JerX2rta3t0C3GiNtvvx0PfOADL3dT1ra2ta1tbWu7X9lVDfpF8iaLeigDvXU2sMrhAJk1ZWU5x4U48pSYR4ROFlJdkvHRgEDOPEViwHDNBJH1lBpVXCNhBHnHwahuBto6yhnoW0eBDa6FEH3L2fQms1szWlPQSI8Nyxmq3jUY4BLg1yd5nbwAs7XolKUWEWQxz8HuxRgxHwOCSNopyDdrnEh4lmAIIIEmR9gAYZBmWaBM+0wDJgxCUAb8HBLI54mw2QjYtz/PYJ8wDgqmkS5m+/1UnyI9G/Bhuhh9VwbdLJMBABqgcQ0CEaJmqU5Ie+rxT45PiTRwkET2EVn+i2IO1KW6d8qW0POrAFkt+YkVAQsTZFiSGKoBvwsFPfQ3nY5zrVuZ6wlyEIUZr9puK39kZSQLMxJWqbYfuWKMEgkzzsUkrWrjAFN9rrKZcBFj4OCCMjk0FsU1VFiaNkt5lkCfjtMa7GuqcZ+6qg6Ky3s2A5vbTOYzznr2UPattE/jnPa4pvpwwlaF25YCntX/F5IDPfA3p5h3Tuc8j0KwaSlJIwfDeFsAbgQ5L4w/BvBUujP9pkmqsL/JYJ83UTHzuVwTKrGcs81juj51ntQ5cH8YDeiX+6r1XCtrw3kJPOUOzJn2EtRMbIV70blrW9vlsmEBDH2W0HaekaIJAEaBDVunNb9HiTU+msSkqfuo+lQkv0tDD7fYw3jubox3fgzj3Xdi7/ZPYuf2UxjnPYIB/NrtTXTHtuCPXwt/9ATcNSfhj54ogtOh28RITfKJEjDvuC5tcZ9MwMiyixx9A0iQWyUMrZQzkOdUTcbyquQwcn1Sew9OteBMYlq6f1vwTFQIdP9BQDqK2bciGNBA7qdeAD3bvpqtbtn0nLRACYBoRW1hqb5jnbiT+kdqyElbY+13198xvq2tv83AH6aBPp3P5T6j+2NQg5PldDvvnNzb2X8jdWC0HfJafdYa+FNGn7KnNKmjlnYsZNOnQDzbbxXgF5P/YzaDg3Mi/apsNVPDugAYJ8BYALlWHxl5XWsxFPslSUjL+3OFb6YSnsOY6/YtgibIhOVkwSU5T6nb55npp5L+M+/Se4nZJbvhZDkBbqsyDRHiH3vTl3V323NxD8E+PUd13TldD+q5z+NgQkHD+jkut5GCtKfp4F2DzjUYiTDGMLnWYHB1AlgNnBinSZh6hpM6hVmf0rhAmO/wGnW+k5l+VQmKBPpZwG9zG27rGMLGUcR2Q55Z2nNv4HFg1522np8ngAK4HrvMKQQzBxMVDEAyx5AOU2XxY943wKoLa1vb2q5Ou/POO/HOd74Tb3/72/Gud70LP/VTP7UG/da2trWtbW1rO2S7qkE/lTvirEkj0zkOWaZpKkilYB+wvDgnYYq4ITHA0O8CrkGzot4GDfugsQctdhHnu4j78xxwblpQ04KcZ6Cvm6Vn0royrmGJUCPfVNSo0N+xC0zA1ITpRHKHF15DABahXHyljHTEApjTRbqy+3gRz/XUQogJn/EjQzousFwUkAMhnkjAGg4OpYz9+nRVYJ8CKZ5YGrRzXMuPxj4F/gqJMQ32aPBpwQvW2M950bqQ4vRAZgRpzUTngHbBdRRDYPafQw50aL9aIFCHiAWbQIgkEp5VeEEzVNPCVQNziMzqAr+erMNRAHYTAYMpm/hsieVavV6S4tKGrzLL1Kh/OwSAOMDljYRZ2i2ZwMgE0Li0T8jiHxkUQ9AxtRwltrU8lDFnA5cx8j40eOKVYgdA4oTFfhTMBvL4VFkiW2NGgWv+3czYW9U2+3++Xswxg6O2ERkELL+7dOjmuKffr7urDkarPJmts1Kcq+LLWpPRAUGCaxpUbV0RhCvacBDF7SKDbwwAjhx8ApBqoIYRMbi0DQCuM9Mpe7rlxwHBVwVrA0qWnwYw50OYYC8QQohJEhbIILAdF3YOICyPh7Wt7Uo2DRAnhYQwMrO7yYFwAEVAe5JZhLx9RJYQL+WL5RZkmcHyCDvM8BvvvhPzO+/C7p13Y//0eYR+gOsaON/BdQ26Y1tcx2/7GCdVzbYR2s1cc0rqTUWTWBJgwBsL2mh7nc+gn01UUX9LAL+IPEdQRELkUl3byA6RJ4AMGJXAgaEvJFJtAlD6fQVt5D5bSOLJvSmaO3AE37um+jp9TxIUNG+BKCZOXOOQpBYV8KtZZCpBmn0E2bmwKW3btU0FoDrhU0RVbyg/EZ8oy1wq4AdJrksHRy6rQOiY1Y8EHFUfDsBKJYD0u3USkwLTyvKz57H2uS7E1jGgn/ooNlmOhL3VSF9Gs26Zqmk4yQpVgNGWDlBbkfyVyySYTYG0xmDQnHFCvVfy+mE52c8CqAr6KbjXeq7jpyCgMv7yb0apzczXUIRIVeq1Qo6VJ7AMKBdSnEtMyypJ6WLO1UFm2pTGwmhUNtJ2Y5bLB1IyJ5eGCPAtnx9lxhHFlBio/mWS95RnRyzb6yLgQkTDE6nxO8BtkSRJXSPFfo64WKS1U+gXGBcm6cDJekEUaqhp2a/qZpnh121hoAb9EDEfeM24P4SiRAT4rKU68jFmOU4vDUwJCDB+k54Xk/DoqZpr5CSvOXFrW9vVZx/60Ifw3Oc+F+9973vTe2984xtx8803X8ZWre3+YHt7e9jc3LzczVjb2ta2tivKrnLQj3gxrECNBEu4NkqcXNACKGtuYCxriSTQRxbJXhGyxWSQAgCo32WAqp/zYkoWVACyNIosmuwDTZMDYj4DgNGyzixYU2WD6zajZM5bwG8wcju2Rt8gWbmaMRoUEAxZxs6yW5wwlhbEIbLWiZSnz1mlXhaa3LeSRR+XwQtmvph6MYbp16mcZ5T6E1XtsIK9JgGg2EtNir2dkukHZNDVZaYf10LhAEZctMz2cyoJRSuDIDwuJGgkO7DssbxNKf+4yqKAr6BKHnAK2D0I2Ct2OgFuV8+rJD3jBJshSYkuBUyW2xuBJFnk66CSPq8Ceap9ZrCOgTQNEFBc3Z9ABt74+5SkPgEI+yqnajvSbXMwSgODCkynAB9lmSTdxrK2aqlO3b9+poBPAt1yoxIHrgYBFeS8GKCoBp7VVnVX3R5976JMgokkgWcb8IxT40++c0FbNTZcZu2RYyYgtWA5Khiwzxsmr4B91HT8fx0o0rqrKXiG9KzzpSZAKAhoA4jeU5rfrFnAmMHUzPK717Kql2p1kPe+2P/a7p9mk2uALEk3DiDfZDa6br5qLJj3tdaTZc4COfirv2vv92FvB3H3HPbvOo390+exf/d5LHbmiGNE6zkRoZltoN2awW0fS3WSoRJ07QzRt1KntwQdl65JC2hpzV+/DJZk8KOsXQUIGTIym18/iIrSOeL6trqxspaMVHnqthiSXxudzyoQ9f0SGcLwkjATgTSpU+Sb0IFkGImfe+Tge+vVRxOfrBgL9rybHUtfJYBJJOtL89Ogn/hgaUw5l+Qra0lHihFx0cvXTD3ZYQCaJoMF6p/EAAfCCL03rwb5LsYS2093FQ7wby5klCVvVWY/sf4FVuU6aBNrjxUJOsXrmlFoN9X3L9R2crz0iOX1O4SyFEANSAETTD/KNSKV1de6XEtZzaqThAJ9nDjuKPD9BHiXJPSBYkyV35XD1GvO9t9BtsrntgkElTFIyaAgyblJIzJwDUrvxMdABv7KY4rF61V+orPAtKyloiRH8lqpB0JIgF80fRO9kfh0jhOqmk6kkjmJYqAG/cjsvl7AYGU/WwC4cbLmkmYGyPTkCC7nKeTEMzPXLN1jyMFJTVPvKNU2/ZTb2rda29ouyTY3NzGbzdL/P/IjP4Jv/uZvvnwNusLstttuw2d91mdd7mZclfaTP/mT+O7v/u5ifK1tbZ+udvr0aRw/fvxyN+M+tVOnTuHkyZOXuxlXvF3VoF8kh9iYQvVDz/JuAMsoBQ8MyJnDCmTAZAqrBcMmaUwNwHEBuH4aaJHFiVvsIezvYTxzSmRTdnlBBeTafbNtzkIXpl90XhZQG8zY8y1nSDfmJqULnzDkbHCAvysLsEXg7MrFGLE/5oXyogL6LIvP1qlSwM+asgSZ6UdYcCUyjDHAyYJ0Fh3QOAQiBMSlbEsLjlgGnIJ8ieEn7D4PZkwmCU8DOqUlnS5eR2H1zXfkmfub61Pkml8KuKJtM/i36BNYoPvnBrvlhzEn2bcREY2T+jceS2Zrva0KtCUwCeUil8acyW4DXHYsLNnFBJs0eDjFarXPdrer9oMq0DcOOSBVy3biAKBxIojlBcDmQCMA5HqTabNYBj30lQXeiAAXeZwx4wJA4Ox+SgCbBfql+RXgt2rZbeIUSU6NmXqU3lOgx4792iaGjwmGX2wggxKAVezHtneC6aHtUiZjmh8t8AzkGjiuYWCv1aSINoFvBSBv9gOsPv92O52LNQhV1OsTMC+GEYQ2Xd+kjD+AQb62y4kVLc+r0TXC9Om4dpiZM8cIqXfKUmUcwAw8l8ocqYDfKIkPuUYN5blNgphpPMi17VAmNazlPdd2VVl1LZNJqsLYMACok4lvOKDsmixtWM/v5KROZgbLklSynWv1vhEGruV3/jTGM6ewd8fd2Dt1FnunzqeaU65t0GzPsHH8CPyx43BHj4O2j3Idv2aGuHEEsd1E6LYKwA9Q/0TYdzpfKTBClOaKASztZ2+IyvDTfUYwlpDIJx5Jak8tAIlp5pU5HdnXoWHO99E6sYwIsZ2BXINAWQKyNqIcWNffLZhOth3qTsT83RghDKFcf1n9M4+Q6ihPMstUrlCZfUmtwi37G9V4SGbHmN5HDKtPn5P/Oxp2nyZ5AYghgELLbEpyLLU/MNvbuy5JONsEnovB/zTxZ6o/J/2vexiwt4BfWT9aJBAjoSHk6+pifT4DwCb50LTnfN8vfOxqH1q3cjSAnyYV8r0yS3xqooyaN05Ulk9nn7/1Do0nbDQOrSPMGrcEXkVJZowx+zPqZ1AYMpipyUH3ECcpfJMYcvLbBc5fUjKpz4UCfhWIX4CJ2tfiT3FuQAChY0lbcnDwhQ/KPqlc1wqcyfnQBLEQNNHNJJzJGozGBV+/o5Hy3M/qKAr4JWlP5wzDj8G+xPIzcsnzRcC+gH7zgdeMu4vRAMHcVi8Ab+sJm62DDwTX8MWn8w3Lf1aqLnq+1SQRAz6wsoj05dq3Wtvapu38+fPY2tqCc/dwcrwPbRxHvP71r8f3f//340EPehC+5Eu+BJ//+Z+Pl7/85Ze7aVeUveY1r8HTn/50/NN/+k8vd1OuOvvd3/1d3HTTTWsQeW2f9vZ3f/d3eMtb3oIXvehFl7sp96n9m3/zb3Drrbdie3v7cjelsJ/7uZ/Ds5/97CsmAeHK8QQuxdLiT5+5Jl/6Xxe/RuImEqVaIMk0oGyBFgGZONjVpwcNc37Mz2M8dzfCudMMPvXzlIUMIGVIWoYfZ54Ls8836XUUBo3WvpkEY0zWtxZOX4zM8BtlMa7vDwHoRxPEFtlOXbhrYFuzdO0jCFMwhCj1/eR5NAt9CZbrb45mka5dOAX4abDNuxxcQiXhWYAG9eJ8ZEYfyzsJSKAghZV7mjJl/cGAV3bs6GuU8SANEqVgiTJ5zEOPT7PAFdRLABCVLDBbE2aqXs2hAH7pACpA02bjOy8gdLv80M/MY6VMVMgPGoeDAZ8VppKaCYySDHBPLEep48aCLXpO7D7SYcv3LKDnnYIzMOfl4HalU2FjEETFo5Hx3FbP3hzTQQ8NYBz0cBMPlc3Suihk+kv7TI8VWAFE2nFo3wOWxg3LEHeceNAImNbk8QIz10YJzqxkAqnpNTtRH5CBP5ZILtjSm9ug7aPM8Nk6CjpyDSCyfknar50BTZ5zowUfYpYq43m3jAA7l+ctV40TDVRNSvUCRWCZpL8/1RaJUt/fN491tO1+a/X1uuiz3Kc8KIwia1eB/SuY3SHGIpBfk250H2mfY4+4P0eYzzHMe4zzHmM/Ikg02XcNs/y2K8n0JoNPsZ2hV1DC/IzeE7zeg4F0z1PAb4RLAMcQjH8kQF+UeSPELBGc/p9IxGCJQmSWpIJbplYxlvr3YBZZvr/l+6T6V60r39P7ZAIEHNL9ULfP9y4G/KD1Bm07nEuPqL621KVWVmXyt42CRQIFzT2hAAtXzSmW7WNryFqfr6jpnOs9sx8VRNZz+l5/sdNYvdkqoKx4fUiMnXvCTYxEhR8LIAF+CfS1bZx4XfjGyD79KLisXg/BPFtLKgoVg09N31cA0I7fqfg0g184cH65rGaTJOSZjF9PVvozfR6Xvw+khEA7Tv2Eg1ooTiimCMrf1/2aNYWul9KjmoQZ7OMHnOc1sqgnRF2vugaDJk2FrCyzGBkI3h8Cdhcjdhcj9ocR8yFgLhKwmmg1xri8vjJzoT7q+ZDX40OSUr0Y3/2+sLVvtbYrze6880785m/+Jl75ylfiG77hG/A5n/M5eNOb3nRJgN9HP/pR/NAP/RA+/OEPH0ob/+AP/gCPecxj8OIXvxjf+73fiw9+8IP40R/9Ubzuda+7PIzdK9ge9rCH4eu+7uvw/ve//3I35UA7d+7c5W7Ckv3pn/4pfvInf3IyIXptB9snP/lJ/Pmf//nlbsbaDsl+7dd+DW9729sudzPuc/v4xz+O5z//+VfcNX/mzBl88Rd/Mf74j//4cjcFwFXO9ANQLKyTrBByBisAXkjoIqMOTDkBmrScivfJ4U4B8CrrmMKIsL/HIN/eDsLu2Vwfwdby0yD1bAtuk2Wnom85C935MiCdstGhJWHKDEZiWZjYzoC06NLgdc7EXQQOPu30Y8HmA5YZfbUpq2UYs1QPZ5Myuw8BwBAks5RBRGcW6q1zXJNPA+VyIjRw3jiV+AE6z7UrfJRFnEp6VgvjOnuWxgWCSKhiWAg7yEg8GRafLlq5dqIABwrCtl0KTFkgSxdzNgDiAJGG4j9jMDXn9Hd1Wx07FQil4F8COyXYSGNfynxZu1BgowbeDvq8yrpPxwygkIC6kNUMroOkSOsmrVp8KMtCGbnk0ti3fawyTxq8ijEz9xLDD0YWNLA8UozMHORseqkrQrrP5eZwnT7OqnYxYlSWYCT+9UAFM2KaMTdyQ+s+OaCPff15te3U1bskk2e3tew08x1bNyUFvS1wq8eg+yIC01rNXmyWu95kJdAaJciVJdZier8AFi3LLxgAX39CkzK8sHSBxPzTa5lZzzNh9UkyBblUGzXVwSKHXua1+RCSDHJ+b0yJDDlDnZKcpwJ/rcxbGqh0MqeRgsqwgGzJZl7b2q4aE8Ce5L4U5juIUpfY1ptjn2sjy9VpzTrXcF1lkVlXsCsDZwI2ucxicYQE9tFiD3H3PMJ5lvVc7DDwFxYjmlkD3zlsHD+C2cljaK69juv4iY8VmhlCt4m4cQR9dNhZjJKUYaTFFfAbc4JW6LbSfKky6PsyGRCyhG9QAERl7DR5QDjghCx/rhajyHCGCHJSTzWxlkY+ZtOWlKg2DoAXFQDflQkt6YUy44sTWGw7So1BZZIpw0Zfk+2TyBLrhV9Sz901YCdgn9aXTgDTRay/2D90zNBzTZlZkzowVMocwYB8YwYDmxbU7zM7fegQmwEYPeegGGqfEVVcMpX7Tj8dM4Mq925l5FCcBPWzrCN4SAAg72dcfq/y4aysJ2L2FZSpxpKu5vtRpGQtuEuW6ZcfiyDJf/Ksco5TEp8AEsuPn7mWnwJ+yvSrt7eJWUS05KcQhYsCQ5fP3gF2gXOkPmeI7JsWS7QErBmAr0qiUknVxCqsjyOGYqdEXA9vjOxzAAEuAiHQcpJSkZzE/Yw4CoNuZFWUBTP9UoJecegM9vm2Keoj6xo2epH3dE1id/ZjQD9G7PQjFiHgXD+mxFBdb/I5DljIOd7qPLOuZYIigO8ppqzDUgIagCgS0/Dcx2g4oWDtWa1tbUAIAc9//vPx8Y9/HESEW2+9Fd/2bd92r/YVY8SP/diP4Qd/8AfxPd/zPXjIQx5ySW2bz+d48YtfjNe97nW4+eab8V//63/FQx/6UADAk5/85Hu8v49+9KN48IMffEltui/s3LlzmM/nuP766y95Xw9/+MOxs7ODF7zgBXjHO96Ba6+99pL32fc9dnd3cc0111wyyHru3Dn84A/+IG666SZ8z/d8zyXtaxgGNM3hhKPvuOMO3HHHHbj77rvxrne9C094whMuaX96zz8s6/sep0+fxg033HCv93HXXXfhj/7oj/CBD3wAD3vYw/DMZz7zUNr20Y9+FE9+8pPxjne841D293d/93f4zM/8zEPZ15Vse3t7OH/+/KFc98Dh9tuv/dqv4d3vfjfOnTuHo0ePXvL+Dvt6+Pu//3s8+MEPvuR93nDDDXjzm9+Mpz3tafhn/+yfHVLrLt3++T//53jpS1+Kr/qqr8K73vUufN7nfd5lbc9VzfSjcQRZ1t1inuvB2QVXAnOoYKLYoAWsfEkzS6yQBBzqwmnvPMLOWYQzpxDOnMJ45hTCudMI50/z+3s7Av4FDlRLcJozJaVOjAX8Wpaf0+VbBoiQgmuRHLP7tEYVMosks4Ayu2jq2qkBP0/MZJnKwk2sPAX0bPDIyITOJatzLrJ4+0PAIKAjkBfrunBXwC89k8nuVFknzYoNA5/LYcEMAwmKaT2K3FifannRxmz5YReuG5vCBmhLxlpV90THjO0yZZUps6rOrNdFNgcy83mpWV8Fs3Eqi74Y4G6ZaTfBwIMdx/Y4DDur2JdvhWHaCFtrxpn6EsQdafqhDC+YxT8H+pbrHvHArUFAk808kRm99FqzpZGvCXt96E0iMdioBFsVBNQxrAFfPQeW7TAlg6Y4t2aOKJPD7j+xIuIAN8y5vme/y3ORzkdDz8zHccj/6xivHpalkJ4VaMVy8OpCmc4lO60E/GqmH02ciyVZ43pcKVNU2QX2oTWe6s8OMDLgXqrPZ67hVLNLGH2xO4K4sY2wsc2vu23EjW2W9uu2MLoOfXSYj5EfQ0AvwarEUh5jqlkE5LlRmS8z71iGTAKUXItomsWQxqSMDyfMy8uC+U3NBYf9WNv902oAQWpAxUUP9PvAkAOzdo5IjJZq7soMOJ5HnczNep0AEjRfyBw6zNmfms8xLtgPct6hmTVoNhu02zN0x7bRHWPZdLe5LbKeG4jtJtDMMMLJdW6vbVuPzbBxoElP4PkhxMRkYYn0rKqgSSZ6LNYCstxnkKQr3ndcAkKkY3M7lN1iATbtS1GaINvmMMAjiJ+RH508GuJHYsdDgQFKoIqv7mEJfBwNu6b2Twzgl3wRkVC2yWjDBR5JVUIBqSmgrP7tWsnBAH5xHCURbFFKLF4geWpVYmi6V074yAnPFP+xXmPo64u+79H0+XGYvu9Df3tq39qmuq3GShXRfAz5fwX0ndTVMz5RVMyorgfO49ySx6ZAwATyix/WegH/vEMnj6ZgqebrdQoMqk1ZjcUx03Ji20H1+wqmI+W6iwDKTOIpn9dctwVrbcVYLNpl9pd8XmTfjZOOJNnIrD9swqX6tWl9Jb/LiVWldDrA8yp5B9818G0DNyvBPtrgRFNVc0g1TSWhNERgPgZh9uW14P4oD5WBDZkpbS3NxaaOe5L5HPr0YD+6B4Y5q++IH71U5/5TYWvfam1XmN1www14/OMfD+cc3vSmN91rwA/g+9FXfMVX4MlPfjJ+6Id+6JLb1nUd7rzzTrzlLW/Bb/7mbybA797aLbfcgj/7sz+75HYBXP/tx3/8x3H77bdf0n7Onj2Lpz71qXjFK15xKO16xCMegZe97GV405vedCiAHwD84R/+If7n//yfhxK0P3LkCDY3N/Ht3/7tl7yvN77xjfiO7/gO7OzsXPK+hmHA29/+drzrXe/CF37hF17y/n77t38b3/md34m77rrrkvcFAL/+67+Ol7zkJff6+zFG/Nmf/Rn+83/+z3j5y1+Od7/73YfSLoDnkBe84AW46aabDmV/X/u1X4vbbrvtkvfz9re/HT/7sz+LV73qVXjnO995CC0DXvva1x4aK+zXf/3X8bKXvexQ9gUAX/M1X3Mo/RZjxCtf+Ur8j//xP9D3/YW/cBH27ne/G295y1sOre+e9axn4Zu+6ZvwiU984pL2c8stt+BNb3oTnvvc517SfvS4XvziF+Mtb3nLJe0LAE6cOIFv//Zvx4/92I9ddsAPuNqZflWtu6WsSpuxaj9CYE6G1qlSq51tE7yiGHP9uP09Bvn2dhDnuxh3dxHGkGrN+LaBn0FYZm1i/aUMz0aBkyw5l+tFoZSxkXYtgVLINTNcALyLAlAAQW7odc0+ZazkQDV/xxNhYRZiTgJEq2yUtHXngEBcc4R/M6CNlGqbARlALNkvB0h6TrH7VFp1GJIsDddglHztBqbeV5sbKnW/eNGapQGT/JdrisBMEaS5gNU+UwmmZDDKASnoTyiZVWSOvQZXanmlok3OFYysqPsEYM/cUpa5BTg1oINS+kknvKmwij0WT46zbqW2CilVVr84xf4j025zSNG+cUBAh2QbzVJ3lIG5qW0d5c+01hEz/wBlR8QYESgHom08Qhl/kPdJ2H7KknBAGscckFhMBkj1uTink9p2KOojRu2PagyQ7TNMsxVrqxkiJSvR1PGbAPwoxuVrw7I29VAM0yHVdJS5l+uoLrerkGvT3STgr2L3eQW6RV5Uwet2KydSuCbLDBsWdIhIrL5hVDliBv8yg0H7invYG7ZOZjQre8+xwh2VsnnA8tywtrVdbbYkL6ZSiqmG2gLOe56HwsCDPk1pAeQbxDDk5Crdb8xztyYC6X2Fxn1O8Bn2EXfPI/bC7uv53kLOwXce7fYM7dYM7dEtrpU8Y8BPWX6x5WQqBe4YiBOggSofyx4zdE7INftUtpMPSxOqYiEHXKj0yfGR/CZV+7fPBbBXM9maJvsFkVlt6ivZernp9RT4wJ0GIhEgJ2YhAvW9HOketuSPTDCWCl/UsKkHAQMWIn+66r6U5snI82cE318i8v2iNooRhXR7kiccE+OPnAAbTYs4zopjqafki+OI3UM7yG+s/bdVm5mG6jlaub/aV6IJWcAazKqzdO1+bN8rkIlSst+yXGtZTwvwWRaarjusZQlaVyQWZj+aBMSmlCRIptbb0vGZ9l7QdOxa3+oiLYDPyxT7s9xwWKobzyoyKI/BVeOCXOHcpcRJAE5811T70OVEBlUbsFK+BGRG3wq/mjz/vk+ynj7XRt7IiZKa5AfKtSHHoEmgQR6xqImcz3uAG9W/ltqPtg1AyYwch3LuSzYCQRe8fO7iKOvDta3t09yICD/90z+NW265Bc9+9rMveX+Pf/zj8SVf8iXY2Ni45H055/Bf/st/ueT9qL3vfe87tH294x3vwAc+8IFLqnm1t7eH7//+78cjHvEIPOYxj8E4jvB1+aB7aJ/xGZ+BV77ylZe0j9q+9Eu/FF/6pV96KPsiokNr32/8xm/gE5/4BM6dO3fJNbluuummQwOtAOC9730v3ve+9x0aE/FZz3oWnvWsZ93r7xMRHv/4x+Pxj388PvGJTxwaGAkwOP+d3/mdh7a/P/qjPzqU/dxxxx143vOeBwD4l//yX+KJT3ziJe1vHEd85Vd+5aEx1p7znOfgOc95zqHsCwA++MEPHsp+iAiPfexjD2Vfao9//OMPdX+/93u/h83NzUvez9d//dcfQmuAt771rdjc3MSrX/1qvPWtbz2Uff77f//vD23+uFS7MlpxL8315+D2kYEEm60KLC+ulFFWyXVq8ALgYFeSXRrmDGrMdxEE7BvvvhNxvoPh7BksduZY7MxTXQTNlgyzDm6r58BD+n2f5SRV0rOdcVAGkmVdB9+lbVGZVIAE2Byc/O+I4BpgoV0AQuMjhuAxBMeLMOJafnallcG/cjHYIge2ARSZvPo9tZAq2udFvUrMtA5JwsfW8rNMN81CXcpurwG/MDCzQGsuNl1iUQIA6fmvGIC6oKZuxkBCt5EzmH2XM9SVJdcIu1OAA5Xu0sx+XYtPSmxBWT16HkqwrwZZeKMcZLEyljZTO8kATYGRFTOLYkCsJHty/UITpIMcW+DgxWiOL2czS5fGWIKYxOc6Bxga7o8URNFzWLUPSPKlKTAV5fx4ufZwAZPfIAP8pcMkkrbm/YcYs+Sl/iT0swwY5mDWcgvKIBzlYDUxQ4KGOSceCBtVAevi/EnfUwo2ra45GUkCUfJU18JL8eeY/7f9UB+DdWrqwFoNsk8GfFP3ORShGgta6ndCGZRbysifYCSQ9yzfmcA/Ae1FmjcSGYBeZDvJJWaJBfsGOIwjt1KD9vocI1Km+RCisHc0cGnGiiORfmPzqc/4RStvWHZvQUyJLAc7Ep8YJ1Ps4mKQ2cM2Tfu/L/e/tvuvqVzvqBk+geW0YYB5MQpjAv0jUQ6oyz1HAXVloSjzvXMkteMWoPk5uP0d4PxdCFInOSwGxBCY5bc9Q7M9w+bJY8zyu/4G+GtOwh09DjpynGU9t04gthsIzQyjOEW5nl0OhjPAJICkshPB88X+oIkvUoMwcJKAk+NTqV4LoJSvRQ488PEq5EjIAfyo9wPAzMH2vtDk94c+syfJSbv1njwBWli/VvxHL9KgThyNwicZe1Mnqwq018BoAlmE4S2Snr2ZTxfjatAv+0OU+gbgBJxKEbW4D1kJ6MT2sxKfwwLRiX/YdohDZkVGlPdNvfczc1+ZmfV9k4GnpAQwAfCwBL8k8tGFGWiT/RhD8uUjlTKtU4li+p0DGYwmSXDqPCRZ9Hp7gC9O894iZBCP67DFJT9NGX5ABoLGkBld3hFGRDjycN4mARJaT+iEQa8ys+pj6Rwx85SZX6lzXOnTrgDv9Ph5n5L0FkheS2KC6YeViU0XsoLdN+Qk0eKa7vjadygBx5R0KONAfG5l+UViufoYSYA+Tqx0pn+TOovLzN6ytmW5NiBZOyEEkDclKaT+PDOnt9IjNBvsbzWdqSPPSRW5fh+z/RT0c0QILsKRQ5BDTYlVQFLfSMmfieU3FmM8AYJm7inmynG1P32f2dq3WtsVaMePHz8UwE/tMAC/K92+7Mu+DI94xCMuKSi7ubmJ1772tYfYqk8fm8/nOHnyJN785jcfSvD/sK1tW/zO7/wOjh07drmbsmQPeMAD8IAHPOByN+M+tycYidbDmN+893jkIx95yftZ26XblXbNv//978dP/MRP4NixY+j7HovFAm3bXviLB9iVAvgBVznoR4se1O/JPw4xqtThBRZricki2etADqhEZLBpvoM4LBDnO0m2M+yeRdjdNXVm9tNum9kGA3/2t5Rt5oWhonKM3nT9BPMrH+TqY0nBe0cIAGjMtcy8UJQWDmjBATtfR1X4yxhjRGsa3Zr+c8KEcnF6EZJYTy5LzGjmaZKfcTl4kTPLBdybYmlCAiDm/0lwIGV9rwb9yHkOAhElCcskVWPq0SQw0DUpMKTBizpwYll9+b0sxwTk4IXdPh+MLO7TywrYscEHG7iJOXglEGsOZkSpmUZTafJ5X1pvZwyZGapMKAtuWvCPJBKlQagggQWlwUUAzSqwfQJEIjnwBCLJ+VvCNG1QtOq79C8R190r3uNnb4KwCWeM+hkfN0UNfjH/15fQVgneGgBXAy0YhywlZjPRxwD4pmBjFMdyUKZ5Ot7pa98yM/mYNEi9bE7ZelUQobi2CqBu+nrUz2L1K0WQuGAJrg5KRsoh1EiaMCHXNzJom+TFbO1NPwHaSx2pxM6JMUn6pXpEwTwH7b+Y80UENK5BvpTM4FAkNgDLtZ8APgceynogYIwIDhesp7q2tV1JVjPPyfki2F8Es+31L6yyyF9a2i9fX1ny2kPq2g59lvXUmr2a5OMcyDk0bQPyDt2xbbRHt5KkJ8t6ilS6qCfofQ7I8zVReS/WNpIE3AuWN5S1xNe0ZQIXxzORqhIg6gcxMlh0QPw2gX/1fS1GTgQKQwYppb0x5tpgoGhqUFdzN0VEx3OxfkebUigO2GSPqXl7KnmjSiKytRr13sRJNWX/xJjBPg8SpYnV/aPyseyjm3YI4Lf0nvquE+wmK9MdzCN93SQ4XUg2u+gL3f/FbF77A8jDQ32L+vUS+FEc1AofohpTq24/dlzVYLEmQun5TH5xmJaqtRK2FvSzn9uLQRNnNIlK1wx6naY5wlFxnpP/UF83F2LtyXVOct0sfWfqtfG/bUJV8qfsObHqGxakBiRpYkw+TvE7VKpAsNBAzPUMgVRXvDbvyvHKiWmi95F8Mbt4kWRI58H0u7xezrWS21SWglJSpEulMcKQx4Qy/FS+07L8uE8oAX4X5QLZmtKxZEKn44Fca6Pneqd1LfS1rW1ta7tIe9zjHofrrrvucjfj09aICG94wxvg7kmSzafQXvrSl15RQftPR7vpppvw8Ic/HGfPnsWXf/mXX+7mrO1+bH/5l3+JGCPOnDmDT37yk5cM+F1pdnXPZGc+AYzbWbKxO8KBDv28kKDLQY2iVgog8iIxSwX1HHgK507z6/09xH6OuD/H/NRZLHbm2D99DmExYJj3cC3XQvAtdyc5Z+RRZqBUf2ozydLZWoEAVgN+mqUp24/Ev+ErFo6XrNkxAHARnWO2CZGXhbpLGbuechH4VpqxMItqVwEEQbK3p9h+Wvtq1nAtjo2G/++81k/Li/lWwD+bWV4csyywk8STZvCS47R5oBixk3XhbN9p8GIVaEDE7D6X5amCZKwrYKBAmO2bgDIgVNRfMUGjOmZUBEIN8FdbIbdpwLna7G96OVZf+U3R7i9k5qIF+vR1ZjVygGMV+NVI5rQPQOMiBx4ct4EgQRkHYChrNdbnh2wfCLutOL6JY8nHzoBnBBJYR0QM8NW9Sstj2vYz119SGSsqJd2AFIjiABRyTcBRJD3HvqwlaoI4qzLQl8bsQQGrCrC156dmoepx6fjUa9WrqLHthin2wCpLAZcKiJ3KyAbKYOxE0DiF/3zHQTLTB4mlYFl99TVsZJEHYffxdWtYfgbsU6ZfNKA3kG8RzlROsuCeBtVSYLa47s0hgZIc7IiIOAID+NrwAdgbLgPoNwEmHPr+13b/NMvUaxr2sYZFYvoBYH+JqKzhp8kzwTP6rffgIEFpT2gBdMLioXEf1O+B9s/Dzc8g7p5HOHOKZdT7Ocg7NJsdumNbcG2LZrPD1o0n4Y4chz/5QPhrTiLMruGans0GhnYrXfsAX68NiSSzOTwL4Hlh0w8SzPZO7pUydwyBZeuyz5TrZwWidM91JrskIGKMIvIZcy3jBCYRB9FJmeDIQGpiUUYBRBX4021V1ljmRNLzZZ+B9BnPp4P4B8IgHLX2YhVUXwX4VfduTqJqEhtM2T9j5NfAMntOz4eykuBikt1OFsKSzxBHrie5xPBTYEXHpeO9xXGcxFkDNNkpisKB3BcMa10TOQiVXCJlpiiQ77ERyGzLg+ZDTTCcSvaJgf23pQZnRRJdHyQfo2LVFyBVqqXrkmIFUIJWhdk2mUSzMWifQZiRcdIPrW0MEf3AbRtCTMzYrnFlbU2Xwf/Ou8K3ckYVxCNk/8r6COLDF0ooK0xZmZpwGQHOj1MlDXP8ufZ6lrOc7Db5HsWYGGr6HIaex6W5pmMDTlqcYnFV9+p8nfAvjcR+LUXiJLxoxmoap8g1xR2BxqpsgBNlhaZjYK9ugryfmH6pzj3Le6oSyhBCqtk5r2q6z4eQAF8v5xXgeVSPR+scK5hZ+I/1//YaUNBe+350XPYhXsaafvfl/te2trXd5/b4xz/+kqU413bv7Upnk64BvyvDnvjEJ6LruvW1urb71P7yL/8SAPADP/AD+Ff/6l9d5tYcvl3Vs1ns9xGHNgWi3GyfWV2SkahBkfwFCSJopqhZ8GlQIcx3ELVW35lTiP0c4+4uhnmPcd6jP7uDYd6zrGcICP0Acg7RB5D3aGYd2m1eNLmto3Bbx4CNLcRmIwN+EsS2AYPolwPuZAMFpDW+oIqaS8EDXQBSVNkU3niMvPAOE2y9lBwOPxmkAYQx08Qijg+w1J0uznnxDnTe5UWoQ66FRUiLuKXaHNUCJ7oGRAFRa9e5qXx62091hrv0qQTPYtMhZaXr2JA6NBoYGcfMElLWkAIsalqTwiEDf7ZHGW+N8joDLDbAeDHsLP68BHPqU+PkPDMjMgqbjfKCGCi+W4N9enwa2FQgSYNE+sslgMTjK0QJjJL8Zoi8eFfgT0EdYjbfVC03e644yDcm1lcaD1Xdu3ytNAk0VKBxBCVWal0/xqXv1+OfGDxzmf2lAGABMEKDUHxdtY5EwqkMkvK5qGpDGkbEhayo+bdi0V+epzxG62CctooQE1C90lWqL+xVpm2aAPwu+L0YUv/oe7H+ns16N0HtxMQVKbkIljMeQwnmWfBapWsXgceyZSmoOWEaACjAPT0NfH2bBIeLYX4ASd4PYIbfiIh+zfRb29Vm5DjYK+AROZ/Z9KpeoNsKIJFADXlPAYrWNcLyksQRQmJH0zDnOn7zXcQ5KyrEBft0zawDAHQAmq0ZmlkHf/JGuK1jcEeOI3TbCN0mYruJ2M4Y+EeeD2sZ6Nos20t9J3uZq4TdfAg5+acBQuSAdqSY2GupKwT7Y6abACar5g57j0hdORaSdeQzCGhBGQUdIrlJFlg6F/Z9TTJTwK9ui/6O/c6KZKrkO6XkiqyOYM+B9SGyh1ICfsk/TGPGvh5zv2htSVsLVplLB1gpgW3Oy8S2af7XdlH2fab2m+TNDzCy14U1PacTCWw1KFvI4BswJE75FgJWXRDwM9vrs/oU6VxGBkovxkvQ7yjw2w8Bo7D1xpCTcIDcp0RA43g9kZRCFPBS4FvNHKtKf9trYupatz6xZfuxzKaUc6CSbVfXCKxZfk7bZgFYeR3HMQN+Ol4BUOD5k+LEesaMfUdOktfkkAUAtH7FlF2oPg0Djy2w6JnBV3/eirynLZ9g/Nf6eh+E3RdSkuTqMVarJKi865JdyJfUbYq5Yl3Tb21rW9u9szWIsLa1Xfn2xCc+EQ9+8IMvdzPWdj+2vu/xoQ99CC94wQvwQz/0Q5e7OfeJXeWg3x7ifpMWWHFvhyVJuhlnK7Ydg20TQXR9z0rbxWGBuHMOYb6DsHMW49nTGOY9ht15kvIcduYY+wHjvIfWSNBn1zZwbQPamMFtHQNtbgvgN0NsZ9wWYZYpsKDLHu/zIiyFRXRRQ1l+qgCaqmNywuBzsmgmz9msPmYQoA5N6PsxAsFRCpDnfQIQFkus2+w4KN4KGKJZuzabu8hYHYcisFNkoU6dnxiWGzx1LidYZEVwSjN3BehDWrxmGaIM+BiGn1maW6BvSrZLA31B+piZWGQ+531dCKypTQONNQgVKctkUUSKCkyBPxZAtMdYM6As+4+3leCMROwCSY0yBy5L54Eh8HleaAsdoXENVC63CGpNBb7M/5lFNh3aSFKQALSmC7kmZcnXNXGkOfk7WAZqo45tx8+j4yCuBjDsNaeAH7P8spxnMYcIYJUyxVWi0gaUDgpsrAgeRhgQF+X5hPk/HWvk+SDKeK2B0PRzq9pyqZnGVmbN7C8i8MQEgGLVL0BijySwj1wh4TmMJYvPXq9LoB+y3NyiApA14KQsBJ2nyNRtXA0Wl2YB/MSU1bMReZ+rEiruS4uaKHIf7n9t91OrwagwsLfYtMvbGUYWLEstiqzk0AMN0Lgm1/OV+ZPCyNKew1zqJrOigjJk/GwD5ByDfVtbcFtH4U/cANo6ijg7wuoJ3TZit4URLl3nysrSW+PS4clnltHOGIB+wp/tD8zy0+C2Zfs50jp+IlVJObEiGkcpxAlGm0nYSkkiVsLQMiolpk1Nx4oWRvITlCU7l3wgZ+6tsRLanJC/nLRVfpX4Uuo/WXnPqcQh/W0P8THlvpuAH/2tVazDYEDKCTm/xI5UNtOqwwGWfJxJBQtJaNL7gtZYS/uJmTk/VkDxlCUZ9qIxFTgOcz8+COyrAFui6tyuAKsOtHR/tqoH2e+wu7EyqHpsU2YlPvX1ItT3YU0QVDUQ4oSAGPJ6wSYJ2mtGZCcvxvQ4kizuBeppW5af7cHkEwA8J+p5UuWSYUAceq4zKWzUNB7DiBimqkNOm465g8C01Gzj5+ojHbvMLySlJtB2iMNiqR08v4i0Z9MySGjWT/l65zYtQkAQ4E/B4SlZV7Vc7iH/MvEi5t6ZjpHLAPqtfau1XS02DMOarbS2ta3tqranPOUp2N7evtzNWNv92G677TY84xnPwOte97oLxv2uVruqPYE430PcaDPoN99JhchpYwY0Ldxsm8E/CSAvmcixjLtnEfd2EM6fRtg5i7C3g/7cLhY7c/TndjDszBPbL4wBcYzwnQc5h+7oFtrtGTaOH0F3/Bj8NSdB20fhj57A2G4idluI3TZCt8VSSMNyTQy7iM6ZxoQAL+vdmCQGCYAzx5LqVZl9escBFocctK7rVQElqKTBEA0hOCzXwbGxc92NSrZo7T7df5Kf1IfNXAaEiefzeTkIgBCQx7KmCpCjsnQ8KOvXcT2KsBIwAJYDJUSESFF+hxLwRzEvyhVoATgb12mqP0qAZkpKs7Y6iGLHA8Dndoy5Jg53XRTG20QfxOU2aJ9Y+cMsa5pZnSoHC2RmZ/C8gE91j0jPAu+THMG5BmgACo5BcaKV9d8mJcTSwecM9BQ0ixEx+sQoVNkyjbOWsovmt6ay5yGBHAWIfZODPijPT6qDZAPWlANEMPuxYHMKMk3ZASy7qYX/VBBPr+EMbJbAu0rLkR5DGCb7gn9zxY3OXqMGtEvvAZm5Z89jFXgujkWPse4zw+pTubjFIqTXQZ7tGFaAT8ewyhEvzAnUeapmKGv9IO/4GlO5vimg1M7b+frKkrBj4Hl65Isig39rS/a6170OP/ETP4Hbb78dj3rUo/Da174WX/IlX7Jy+7e85S14xStegY985CP47M/+bPz4j/84nva0p6XPY4z4wR/8QfzMz/wMTp8+jcc97nF4/etfj8/+7M/+VBzO/dJi04CGublXN1wTbjMDgcGwblLgVT6jGBGHLONNiwbUbOT6pwbEcP0O4t55RJVRDyMHpzvPyVsC6Ljto3BHT8BdeyNiu8WSnu0mhu4I+hAxBp4j9P4MZHAP4Bi/Buu9axIDP90fiRKgoSzh/SRfl++sizECCGgjIUZGrGySANLvcW2/SDxHEWLy3wrpOjVVNgiZxRYrgMt5zwCrMs2tWab0hKlMfPqevb8I8zqVBLb3S33L7F+D3lrrUGulDgKihcq30aQoAj97ZBadMuoQQhofFMaUIBZNfyiQAiubmDuHx43zoBqcvoBpfbnynimJaxPbB7CqwxjLpJCp+7NVYEjg3xSjD+BjVz9pCujTZDlzDiMA8o3cf/3Fgbl6HLH0lxTw03vnaECdzJ7V9QRLdDpHcIGSqocay3yOCeTZ7Xm8LcaQ7ssk91stCdCIlGftlwAGHNfj1rroE4mAq0yBP/bZSqA1+VsmMVDv8WpkWIg0DqBxwTLvYQSNPYP1w4KZynoNQ32zixuTJL9DsuYjvXaI4CjXFs3+3vRx8vVKPN5cA7Rdqn9H48gA35DbRMLu07Uzug3+nmeFlHy9S8JVKAFJR8x+Hl1OjPDK4PQ6VvIcMDlNkYOuYjSpLyW+TsxbFEbQsGb6qa19q7VZe9/73odTp07hqU996uVuytrWtra13Ws7cuTI5W7C2u7nduTIEfziL/7i/Zr9fVWnk8V+znJQQ18ESWIYERc9Z4zrAmwqe1iyaMP+Hm9r6vcN8x7DvEdYDBj3+gT4jf2Isc/7cF3Dj7aF77j2jQKP0XH9BPgOsenQjzFJogwBxUMLoWsNmSGwJNwQIvox1x/RelV5cT4NIOl6iiU2kQL/LKHDcjqdp/TY8IRZQ9hoCDPvsNlwfb5Zw5/pY7PlbTaa/F3d5yTgV2VI84cuM6CaDtE3/Fw/2hkDpu0MoZlhdB16NNgfI/bGiL2BH7uD/D/y690hYmcRsDsE7A0BewNn7M+HgP0xYj5EDuSN3Lf9mGtUDCGaflX2jgn0V4GsBBRWj7Q4jnyuLQih57R+6O8vTKBFM2stGKu/GQyzTwHO/Pu5FksRmNOAHDJYoiCKgiWLkbN4F4ElzRYhpGPR34wJ5EB6PwOo0sbEdhPmlvMlEFZchxzEpOr1JEho/i+Yo/j/s/fn0bZlVX04/plr7X3uufc1VbTVII2gX1EBFbABTRBiaMSGIUOJI8aGKERjVFBEFCV2Q+MIGZoWY2xiF0g0wk9koEJsMBCIJjqUaGJiAypVSFNVr959556z91q/P2az5lpnn/te1b3Fq1fsOcYZ99xz9tl77bX3Xmuu+Zmfz5RgDFAHVnWf48CsknHDr+GovDaXQJtLCOtLoPUhwrACjWvEPNiLa1Gup+VppU6kMtRy7LkGit7fwb0kYJoVDNSXtyvMYCdjIZDVoONgJaSmpmbRc20cDqgOFVPxWLDdX6sJgC4TleuqzMapl9wL7ctq9TV9hrjAOgFrGQennhf9fDUkXNokXFyPONzUr6OhvPi+LoA2SWBXGcoaeNTxcLHj1duYJ8yEUGexF2njEtS8KklDzfW6R1530V7zmtfgxS9+MV7xilfgf/yP/4FP+IRPwDOe8Qy8973vndz+rW99K774i78Yf//v/338z//5P/Gc5zwHz3nOc/AHf/AHts0P/MAP4J//83+OV73qVXj729+OM2fO4BnPeAZWq9Xd7roPe5tKOFAGrj67Mt6194KOyRwI34A2K5PwhIyr/NkatDliSbz1yoAPA272lghnzrNU+rnrGfA7ez3S3jkD/PLiwPykUoOX2zEpH9ee5o5tlKmmbBbvE+h8yr/nZzzs2FdyygkVHDQhrUlOolL9WGO4eVlLN++RjudtfeTQPJ/at/779qWA4eWea3e91R/QOnkK9pmv6pQU1G/Y2h05ILSd03exnDyzL0T2uUPk/xXwc+fRMv79+6nkDn8tNYmmtYTiH2qNwOQ+99+rf7aT8J11bhZQXF4kEriaZNSC5rsSp7S5x2Wrtm0xH1J9PPtf1QXKtgr4qZ+hL18HV22sfNnyPHjZe2N76vlO1Sz0L/WPmnt1F+uw+njLj+lK4pb6KlP7IFeT0/d/Kn4mUgtSp0lm6k7LqRwDhWXanqMx+3YAfvUPfL8JsNcv+KWsPmX4xWjPU/HxAhA7ebazjY3q9wOubnsk9CHwy783lQytf36ZsVmua5UU1ibXSX/dFZD71Gz2re521832obFbb70VX/AFX4Drr7/+ajdlttlmm2222e7V9rCHPexeX2P0pHZNM/2GD9yC9R0duvs/kAuPdwtbDOZhAwqJg0kAaCEXUrNscwKOLmGUGn7pztuQj1ZIF25jKc/DFdZ3XOQ6fhcOsbl4hM2lklEY9lnKs1susDh3Bv2ZJeLZs1z8vOPaCLlbIon01BodVmMywEetsOUKG0+/9ay6BQgInDOqNdw0ANWyubwpEGcsQZfJ3NasuqLFE5XaGYXpImCU20yXLFpTJxKkPpDL1myyaX3rEwBkrrWXAcnilwxTx6rZZZaw7kAx3mUBzioAD01ASJlSIM47TVyYPlFh+KnM59g0Y4o9qddJM+GHhCKJ08hL9TGUjHgiRLnuAU0jUdoAFOlQf07t8X1tPy99OArgp+AdACTptBCc3CUBgaS3gkiLJsJIqKJCmYAcpO6eSJIhJ9A4FEYBhRLY4gNWGewIQaRMmyCNBiZlYUzCfCgXL5TglwvKFBaBY+opMNgAW1nqmeSJoHZ1nNCV9jhpSv+cqFWBR8+O07YGuIz/xKeh56gv4tqFCIScC3sXQAW0W02cBuDbYlv68yLHEmnP2UDIHWCAOydtv31m18LfHxrVcsxKYd8MKWMcSrLDJmWT9dxI0HAzJquzpX8VWNfvS9M52LTXAX3mpAYNUi0i/112BcRT4LgKKPkgFJyEXda6gQqA89/gxp1AnO1+X7U77rij+n9vb2+n4/TP/tk/w1d91VfhK77iKwAAr3rVq/BLv/RL+LEf+zF8y7d8y9b2P/RDP4RnPvOZeMlLXgIA+O7v/m786q/+Kv7lv/yXeNWrXoWcM37wB38QL3/5y/H5n//5AICf/MmfxA033IDXvva1+Dt/5++c5ql++FhOyN1yK6jqpdLTYp+TMzZHW0FYckF7ZZrkeLSVrEHjBuniHZWkJ+2xPDstzyDsn+FAdQigg/NI+9dhPPsg5MhJVGtJ5imym4SOSq3O4Nh77cgVxNfqZCxVnwPg53pMXMtvkzI2YzKgI+Vc5kZlhIXtun7+/ZbEp47Fft4KEQgMGFCINcvP1QfzSS5+fCKIjzUVJPfXT62d04j3mdNQWPUTc+O271b8UU0iUt9GTc9dAVJl0fl5ysCeyyWj+P5yfynGCtRIHoiGMgpJZFkhteSIJcqNCV8SNgK2E9b0fIuaOZX6rY3vpcf1dZdjoCIzmYp8pQf7vKSl3SNtEpRcD6tPlxMQWfYV44BMRf0ARJWKBd/nXId5BFVrhpJUmA3gad3sKIytmCCATga6IMlhhJQDukAYpfMU7Lu05nt9NSQcDcmeIdi18cCv+NDe11O1Dzn36q/2NcqaY5dZ/7fPhvydAmgV8IvSRk7+2kgi2IbHO5H2xLDZBvV1P5IopW33z6MycacsgDAK808behzglzJcQpWUjkgL0ILZwgngNnZ9aasC6IslywgLy08TPXzSg0+s6ANhLwb0ARhzsOeAJZCBvS5g2QX0kV+TUvN6L4fAKh66PtBzahP+4O4VX/PxPmazbzXb3bH1eo0v/MIvxF/+5V/ixhtvvNrNmW222WabbbbZrrJd06BfWJ5BXETOrpRaMAiRC5IvliUokEZgfcTfCW0zb9ZIq4tcw+/wDqQLtyGvV1hfOMT6jkOMUr9P2X4AEBcRcREQYkB/7gCL8wdYnDuDMzfeH/HsOcQH3Ihw3QMQzt8faf9+zFLbO4sBwQAqBXMAzYaW9xKIyFJgQoEtyPZjlriPWxPqAn0qGVpBK//eA34GCPi6FE1m9Va9Lc2wlYACL9SAlCWQkV1QQUyD3iy7xb9VliIcCOUBSw9i+vpd/m8WcIC3d8EDt6BsJVSvtNB8lGhggoKm8j5zcCIDVt/PpD21z+AAxszXsW17ysBqGJESJFCibeDjb8YsbCKuGzQmQgjZbpw2EDUVbNJ+BICm4kv13VQfANzvwR1IM7p3EdByFqDPgEeOvJi0rMj0ZICBP0olGAxZwAegApWaoMj2QZNciCJZZrUgdZ+NJFYF+Nn9P5T9aTBo5GAHkUpONgDgDrAvSzAUDhQCcLzEV9PWcpEEv5TPVNLXahe6bHCCkx9NG5Y8kqC6sQhaVoDvX5HqIwjw14CvVbdvBYtrQAw5AVGuiQZvcqpr5+jxXf1DBcWVCV2BfsJ4XY8Jm5GZupvRg36Q/wvzlPuGuKYpsbQenMSUSrn1gbAQqc/o5cVa+T0JsHJwTIJhIL62OWMDTgiIVFgMBvpdBaofy/Ddc8fVfT/0oQ+tPn/FK14xWQR5vV7jd37nd/Cyl73MPgsh4LM+67Pwtre9bfIYb3vb2/DiF7+4+uwZz3gGXvva1wIA/vRP/xS33HILPuuzPsu+v+666/Cpn/qpeNvb3jYHpu6m0WaNfOYMD+qIJgtXJV10S062GBkkohih8otZnyGAWSQhgnoHBOaMPI5Irv6Vl/Sk/TOg5QFo/2wBGffOIS2vwxF6rDcs0b1xSggBHBwnZJtLlVViPhdQ1VZTgD8TGQihTL5jcAOEQBbA1lcF+vm+pAnWITHkUoEWMQIpInfgpA+/H88Gr+YNBUfcfp0UeqnH1YB9E6BJAdo6djbbpJDm2FN2HNhS/FBy0vFUSXtWbD+AmY3eQlMdsWH8QVhKdt6uzVyjj+XKk/jAWSZSbVugqbSWpgtgU+9Wol17/uojas1HynWijs35TpHAy5y2zMdqXs7sQ5kMYhq4LSmyrDrY79I5yPulAIsoRtS1p5M7p+y2nXKdg/iDmtCyF3mdkyJh0QUMkijn6/qth2QqEqogwfLY3D6Sc8mN/7EF9m1dlFSAbxQfWc99a3OU73fZlsy+vDxAXTFsPUivUr3AFji9dR6WzBasTVPtKvUjGbjWc2sFaM3vDOxfWX/G3u6fsCfPlm+ntJv6xbYaA4VK4hUQ3ykyyHeAWIGB2l5OtmKwbxmDPWN+fTrFcsu+e3Leeparv1elpt/sW82+1b3XXvziF+Mtb3kLAOCGG264yq2ZbbbZZpttttmutl3boN/BOZAESPKapTwRAmixROwkuzEE+Hp/vCgeWfZTa/hduojhjtsxrNY4+uCd2BxewnBxhc3FFfKYMK5HUCTESOjPLNEtF9i7/iz2rj+HvevPonvgjQhnzqG74WHIZ+6Hcf86pP3rgLjApbFI/wAShKYCQClIljOsdpzPygUgwAkzujSTOmcOePlttmqZgSzAUDKdJeggMoU0blBJCWkQQrIubTFaZY8TtO4W+YxiAAnCRHLWyvlkCeQrQOZBP/u/qtEFJ32ZTKZP2TzH1UdszWeZqjxRuS4MrqXMi1lkyPXg7fhaceBIUqi39q9tB1ReswYu9RyOhmQymi3oF4mwl7l+xn4XEQMf9ziV4V1BjJxLljB2bDPdT/X/GuBRCUPaEWwgZIyZhFFJIBciCw40QQ4cAQOgrLgKPN3BLpi0lCyouVXnCKgDZl46TAE/qb9iTBOtDWTZzsKeiEnqm5RaMrlbVGCf1jkp4KcEnwXEVxBUgbrqHKbYDWMytiOFDtH6ZfvcaFhXQB851oSxGtXseRZAUz8LXQH+Jmwy+DYFAmrWOmkAdzu45FmRzJDLLojPAJ+y+/RZOdwwQ0BBv8ICzAb6AaUWZaCMFFhmqmoiKZANkTsmxDyw5OBwVAWTqgx8kwGLlgm/iAuMmcG/wqSVyyqBucV9mOn37ne/G+fPn7f/d2Wiv+9978M4jltBiBtuuAF/9Ed/NPmbW265ZXL7W265xb7Xz3ZtM9tdNxpWzPJQ2xwVsE/9gtgDRMzO0ID7hkG8vLpogA31rMBAlU8moI6TZld5OYSIcHAOWJ5BWpwp48nyPNLeWdy5Gk0tgccMGIM/BgZYtkA38NjLqgMyV8j3qq6wgfxNZTyZMmW3kOybJYLLd1wfsMyzGuSuwaqAnGQGdQAd9Qtgs66YYnYeofYAFDjl/UWRjKZqfx7w835cluQr31HBxv9her6dAgHFjgdPiqdQJ6B55pSr8ziJMCkLMm0DffZ9KPN2A6SwbGKwWn0AiW9G9VRGCg5ii7mpiV5w3ebPO9uYn80H1TuIKv+9+J2TCUnC7iu1/HQeKtebYmGMkYJ9A/F6aCw+uya37FKjMOWArXPYvgTeFDhFAFIIABL6FLAUwGrRMUh0p2xf5D0T1kPi53Ysih1GntT7VZK5ANQ1k1ub8Pc8qO/VFabAWX8dp8BB/cj8NVFOYKlVBWiHyXaYT6oM1CB+g0ve2j5gfZ4qqVntljjRMlA2Hzy47dX/GDODvjlEUBbWXs7Oh5NfpzKGUIwi1+5k2UlqeOaavRsCKyegi+hDU1Ne/VxZY+11LPNp8p71wAYrAaB97tnlnuFMwgL0oN8xyZzXus2+1Wx31X78x38cP/zDPwwAOHfuHA4ODq5yi2abbbbZZptttqtt1zbod/5+iHs90u3vR1q54NGwMVnPvGbZqHzpoi1q0tElYNggXbqIzcUVxtUaR7ddwLBaY3NxhbQZkNYle3Bxfh/dcoHuzBJ7159Df2aJvftfj3jdAxDOXo9wvweB9s9iPHcD0sH9cJgiL2hFdk7BLF04BgJyZiBGASGgfNdKUQIZNArwN/F9nAAVajCLJXk6BbnGNQe3xw0H91xB+qqWhi36AtclJOIahaGTAMxgwAeFDh0FjJCs+SoYIgG6XNh7nsnH35W6hJ4Vp8F+D5JNMXtaq8A97QuR9ANYBgzgLGUF2vpICJmACGBMksUKBJJsZAJCFuYdoQLSyrmWTNvRnY+215+DApf+HJR5BPDCnsEKvm6cCV3kRaeCGO05l5R0ASuVNZgYYB4Ty4n22JbkqvdHRQIrwGqVxQZsztoH8h9lZiFkCab1ni1n7DbOQK6O7DN7jwOYfEBz4rf8tjOGh/0dB9uWAAbFNmuUWkpcP4d6bhUHQaL9Joco9foW/AyP5Vn2GfVECkDxRdGwrQWQvLzXFOhHgcc1DeZ6y6lmLA5rDhiOawtEkTBvfBY69fIsBwdqdqiCpIQA4+w2WCFfWffBLqAVDQjrgLcqAA0JbqEEqwYJ5o8542hIJgl2HNjXMvzU+hDQSR2+ZRew7CLLAGp9vgbwszHRXRPbmzA8ScBfknqEXegQo8uKF/az3gtH8XL8kdO3nO/ZeJju+/z581Vgarb7gFEArS7Y85v6faBbFBAOcOOPY2atV0iSTIVhw7taHrDvtVhaAFzrrlG3ALoFaHmmAIOLJXJ/gLTYR16csXF2NWYcHSWsx7wzSD+mjDU44WavQ5HrdePB6Hwxk6+kUGrUpvLcMriRkajUMFuKXJ3V+Qw8jvAQTwCo8m2U8eLZhggdKAjzPTIThyR5BALE6Lid26B8e6miC9LHmp3jWX8ACqNaGqejs4KivSW0rAuLrB3fdWx3H3kliZyZ4VUkPZ3PYMzI4lMgu7nPJ780x1UmXzmoA/q0b4jYVxWww/saUWqLZYDlyBEsuQxgn80nz6kfQ8iYSiSbSlhTG3MWuUo+TwaBc3UfGoOvlRx3CgTk7wOg3AspVveDPX/cUaDUscynqBX41lfKEIaylHvBfLgMq9XInzvAhwh9hMhYB4RU989GgJr9RYcYEoCynloPCUdjwuFmxKVhRAxRfHrCsuvQdfWydOt+B+pkp+Y+8fe7geoo/p+X7/T1upPIueo6zM7IK0b4REnvI2jbAjN1PUBvTOduURLJvB/cJLVx/9esOvWOA2Bysgb4NbemyrIOApzGflnWckF8w00Q/3hdA2wUrF5r7nqr21ox+KDrlIBIGb0kWHFbZP3pGuWf/z4AnYyVvtlZEqlI31d+o/MlFeTTbXYlCdzDNvtWs91b7Uu+5EvwW7/1W/iv//W/4uzZs1e7ObPNNttss802273ArmnQjzPH3QJ2HAFZICeAJarSCAwbjHdeAEngVSU7x9Uaw6U10maD9R2HyCkhbQZkiQKERQcKwQC//sw+9q4/i/7MfgH8zl0PWh5IvTqpSzWmCtRSVp7GfbkOXDZmS1k85Er6s15TFGBQ/zNcJ2Cr3pvWI9F6MwbO6MJVF6uatTqugXENDIPL/ByArqvBEgAUEwepsOCoxjhYICs61pYPhFSyQbl5j5Lxm4BKBlOZfb5+1yZxTZ2NgFQqH+Qz6zeu53QBGpLrB7DUDEag12TxrK3m7ceUQYGKzCfAslByrKkwvgYSVK4ou3Mbs75kYZ5KnT2rbxI4kLJJCSFE6Sey/ebM0YgpRh9QZ6f7RXUMAGVhkoLrBI6J/2rwqguEWqy07Mdn51q2LkqwrzWfGa/BriCZ9ZGchBO5oI1j7Bn4pMGsXZne1shpBpodA+B7Npds4qzyWABAA2c++/t/wiygFGuGmgf8fK1JLy1nn+lz6AE/LxM1cW5bNWgmgoU0FqafSuzllLZYjNmk9OSZTgNHQWNXB7A88Kd9K9fI2G875N6qQFxb36b9/Y5rq8+OMns9wDcoS1bGAWA7Ix6ASORy8J9rzJTaPEWWj6w/q2DeuN6qo6T3DFNoE4+ROZUkCKlRGH3QHUD/ocf87nX2wAc+EDFG3HrrrdXnt9566866IzfeeOOx2+vfW2+9FTfddFO1zSd+4ieeYus/zCwNoGHFoAMR0O/XYx/Atfy0VqpYHjacdHW0snEnKJgXBKBw7CwLiCtgEzoD/NAtkRcHGBFwuGE/YJ3KfD9acolrNmCTc84MvnU+2eAudEEQoK9HwJiTAVV90NpUhfmuQJZK/RKVSVplRqNjvDFTsrNxkCTYTppIhVQAHVezrmUH6Wdt3Vs+sGNTa//k0m+ejZ6JWOoxUxm7aKK3PMPGmGSSkAYGJORfm2dVFaCW9puonbpj/qPggM40FtYUYOxRZShtKVO4djP4QJzfoooZ4BqDo2boHGNTIDPgAaT688r/8ey+yZ00IJaAGVvAr20+mo/NQEkqPkIaSq3gZn6dmq2nZ/DjLarmPWCMv40krI2Rn5ExMMNrFJlPbwqsHw0Jyy5KXUVeJyHUGhLarQrG+T7bkkWnAEIq9743B356Px2offkwcV/62tA771cFn0OsfciuZ/WIGHdKo1f/X8EgtatuYc7gJDvIMw4Ygw5pAGHBbRAJ2SznWflr5BjDQJWs5Y+v64Ue9RqhVQCxZx4M+AXwmGlAuvfdnfqHv6aaFMj3OlVsztlm32q2YoeHh3j1q1+Nf/Wv/hU+8zM/82o3Z7bZZpttttlmuxfYNQ365WGNdOcdSKuLllWeNgPW77/DgL1hdcQSnas1kspBOmCPoshYilEICH2H2HfoziwRFz1Leh6wrGd3/weB9s8gnL0etLfk2oEAKA2g1R0IacD55TnkvSWGDNy5Tri44ZcyqXqtfSFMNq1bNSWp6APZY64lII2l5hA/zSpfRP58ETQoBSk+P5QC9KkEtzGugfVRkUlFyVBFCAh7+7zYSgvkURgunWQSa8B8QvKTcTUCJrKhvalMl7L7tG9WYzKATxk+HvCD26ey5TbNQTQLWQG/GMiYAr0gsX0EA2+BGKMI09ndQMkOTs3i1ku1KpiXs0qU7gYmPPgX5es4ZgAjljEAYGmq4DTDdsWnQi4gU9t+Ap9fzgwssmRpCQBW21YAsn5GFcinIZRyPFTnKOEX7pvEcOKYNQDAwcUtqR+UTGx9v3WOO4BNYCIrnALHLOKCAwfCrstJQTPJrB/Xxng1qS3ZnwL6uV9KBvQecrc06UkD/VLeGa8JRJIhjfK8DOsaXBoLy3Yqe5lr+SmK7msADcAwMLs5jUjDBlkkSxVwU6Ava82WrufrupA+DB2y1ObKLkjMJY9yqfNHgSN25K+ub2Sq/05ZDvJ7GT8ABGHahOZ+yCig+Eaf+1QAv22Gaxkb+xCw1/FrGQPO7kUsu4hFFJafjIt9AGhdJLuqeyInToTQ/WvgXYFrCqBwVAJlWqNQJGFziKDQIQyr3f1xD1lq5Ljuif3fFVssFnjCE56AN7/5zXjOc57D+0gJb37zm/G1X/u1k7950pOehDe/+c34hm/4BvvsV3/1V/GkJz0JAPCRH/mRuPHGG/HmN7/ZAlF33HEH3v72t+Orv/qr7/I5zcaWDy8gDYfsA3Q9sLxOvkg2ptDm0GpbYVyztOeli1wreVgz+DdsGIwBgK5HiEtQxzWXeRzac/WjIjPV+gPkfg85LnCUCKsx4f2XyjOoQWWdXzNYXpPAc9smA+uUhH1CWCjG6ALlCiD4ZAqVdFQfKgfgoA84GpIkqzAIeLBgpvBew/Yr7CBh+QYvbl2Oz26GjB8dOElDg9ips2QODtiXerOVtHKWGqnafuk/m6ucRJ+XdhxkrtJks8KMLhKlCIRO2YeaxHXceI4i9xidioC/Xn1g/8FqSlsyGiq5RJW6tHkwliQV7o8a7FMpT2MokcjOOsYjz5sJeVyjiwtQIOSgiU8ZY2BW1C7VCLs3UHyqJEy+jfPdvBqsvxcmwUJ5jux85b3N5zkVwE/88bpmXGQfJozG+rN7YYxAVwNTGaFitV3OvP+nCWz+2cmAY3MmhAwsXSG2I7mZzi6LL+GToTiBb8ThZkQgwl4kjJkwJsIQVVGiNEKZqNa3uUjC0rgpfVoBSFQlKXgG4JTpNVP/rAIUlVFmDEx37eR4FCMg60HrR60hT1TuT60HbQlu8v64tqFI9BMVf918bgBB1h+USe5FSSiNARQWiHsdr/NS4ufC+b1w52LjsI4jrl2B2HeiZjjQhAdVKdm1PlHVGfW9rA61sq0jg9aydelH6fucEyedAkDuzE//UNvsW82+1b3VfvqnfxqLxQJf9EVfNEt7zjbbbLPNNttsAK5x0A8pWWb5uDrCsFpjXK2xvuMig37rAcOK2S7jas0/cSvz2HcM+sVg78OCAb/Q91icP0BcLtCfOwtaLBH2zyCcvz9obx/h7PVAz7JTLCcUkfulBbGGDKxHrk81JGX1ybGlDW2AQQHA42wDAaoC0FtYQTLJnYRnF5jd0gVChAQVxhJQqMwyOlV6C1VmcVVLxkkSYSRQkgzM3JWFIwXkyHJVyvzL2WXqEoDEbEdl+XFWaglEqQymXSsCkqSLstRWYcpxUfuMHiRAQAkK8m8dICoZpoGAEMpfAFWNv7hr1brDjguoxEBIY8n6h8hFhcAL9SDRwCmQkRl/wQWkPPDngiK6+KeMkJWdWI7pF5EWwJJ/yEUmdT816DcN7rXWfpwkIhuIDOxT2SsAIDlnfyzPAi37cW0huCDqNouutRqw5OCHz+TOApxlCTxmZcsNa2lQMiAHcYEce+RuYTKUCvRNXX6Cy36mApoyuOjAJQf4GWumySKn7CBmn2U+DFIXK5W/DvBrGQI7LSeAYnnfnksGlIFy7D6AOhi2tSPZR04AOgP+YghIxDLF2m/EYfzqWYxESCq3FiA3B3/fh5JB3geuy6mA357IenahSOhq4Fnvheo83LnYpZXMeGbgSGAQgEoEEoA86r0yQOvUIC5Am/Xufvswshe/+MX4si/7MjzxiU/Ep3zKp+AHf/AHcfHiRXzFV3wFAOBLv/RL8ZCHPATf933fBwD4+q//ejzlKU/BK1/5Sjz72c/Gq1/9avz2b/82/u2//bcA+Jn6hm/4BnzP93wPPvqjPxof+ZEfiW//9m/HzTffbMGv2e66De/9C6TrziOcOc8sleGofKk1TnW8kgB4EkYSiZQdDxnR+RUMINJiCeztc1BZxtQKpBHAb0QwRYQhFflpnc8urMvY9sCDHn2o5wpl1Y8521y3ZT6gjQL4xcDP/bKLsi9IgkAQ30r/L0BWNW6g1A60oHbWo/D+Og10i6wdQqhlPpVNN8HYMilAt48KRBAgUKWSS58UsNT7mklY+H3bN05ysO0vQOfgbPNczjCWpf1E5j4eb4vyhI27x7GnAL5/lJ0u/5NLvlAgxSQT/b3kaqjxiQ58XSgAgRNxNAOMa8sJqNdIWZZzlc9ErQOASf1tSUUC204RxH/K2D7nar7fnUDEBxDGo69x6H+bhqJiYG3ChIKIng9/T4RJgmdreloZdf8oMN4HQor8/0IAwKOBz0/BPJa8L8k8CrrrNpooFqhI68PfNzr+aAJUrmvvcgIT2LeiAJWJrHl9TT+of+Z9Mu/POCZm6Qx5RgK2rkUrO2/1CafYqNKfKnXeAj/6HKF6bpskqVwSpUiUSzaJzziHgK5blrYnrflZFA7sfFy7FPAnBev5YkDvAi9f3IdSs29Xwh6PjboWQ7khAzMQp8DPDB4XzWfXkw0d8gQT8cPRZt9qtpwzXvWqV+FLv/RLZ8Bvttlmm+1DaO9///vxgAc84Go3Y7bZdto1DfrlxBJ24+oIm4srrO84xLA6wubCIcb1IEy/NcZ1wrBymeKBEBcRsQcoMOC3OH+A0Pfo9heIywWz+s5fx2DfmfOg5UEN9vUHBgAo6IeOa4mM1GE1JAwJWI/Jak0phpUmlt0pFcZfalZLIZB9xgBVQiQOiKl8XSQG+Bjo47ozxu7btYAFilRK6EAxA12PPDigz7MgczaQBG5xjWENqORn7iwgiJiQ4wKEUNVK0cUsASZ9qey4IRX245hR9YXV3JP+QnSSisoGkojFZkxbIFolKaOfuYCF/tVTjm4B2wJdHnArzD/YebQhG8sQJz0XDbPw30Qktd/qA40SGInEYSSrL0QO1JPtrF2Bgb8R3O4xZweU2RlYCxXo9KCeB9b8d635uIRJhTXbWHBxEhgrH2rAw++3fVZULlPPvwrETcV0Mf0d/4YQIEysKNnKyrLQ+nguk1uf9xw6k6C1II0HVe2YZM+myb9lB763tXx8hrkPLHmpLwAm9+WlOxXsA5hhk8q2vn9pKkDSAF4+DmPfa5DfMunzttScZ2lMgX5+HySyogL8sTSwPKNJGc1F7jgEQkzuvQWKSj64gn0hEJYxoI+EvS4a6NcF2Bjpr8nOgLN/7/tRroNl8Y/r8j/p+4XUxunlun7oQb+M6QDvae7/rtrznvc8/PVf/zW+4zu+A7fccgs+8RM/EW984xtxww03AADe9a53Ibh79MlPfjJ+9md/Fi9/+cvxrd/6rfjoj/5ovPa1r8VjHvMY2+abv/mbcfHiRbzgBS/Abbfdhs/4jM/AG9/4RiyXy63jz3ZltnnPuzDiJlC3YFbesALlxJB/Tnxvjy7YPo6mtoAQWAUhRuRxBEWWvKOe94UFM6UROuSuLzWkFPwTwG8Qn2hIwOF6tCQgnhMT3n+4Mbb+A/Z7JyuZjZk0CliIQFJfTZro5jg1MhZaRm+gVcZ+F015ug8Bi1DqgRpbxQEPWyY/zhQQpAZsyiyFGRWcAtgHA5jBlQmUgwW5zdx8ZH890CWf5ciJZyp/rEBpRi2frtOWuA0mCaiSo1tA4wRY4ROFokb7Xeca4OeZftZv2z7ppPkaaR7wszqIoch7KgDoARbA5lUFYrgOqzIsBfjLeTKBx0sUEjHryoNjCtQILmUA1dZ+jj/L3f0wlbwjwF92gCgpKOL/ovhVxzGItG2ZCBlZQNwtb6AyQpH6VMAvJkmoS/yMeMYWdHuxTco4GkYcDRFAsDbEXAAkNIk/+pxVjHytuQfI8yCyuZlr7urvQLxeYmDMnQeVOnPK8Nuqs+x9hNT4Ch4g98/mVocp4Fw/v3oPTwNlUo/7mAk3Z15bUIbV/R6T+mhFZWMkgChgsTjgMWuzAo2hPO+6v4lnPMp9r2ozJaEOlkTVBdq6x/0drc+PsSr9OVDgQ+54BLJC6TZeJpk3Frs75h6y2beafat7o73tbW/DH/zBH+A1r3nNqe73937v9/CYxzwGMcbLbzzbbFfRLl26hP39/avdjGvGNH5JE/7qbFdu73vf+/Ct3/qtljQz27VtVqbjPmaU74rmy73E7rjjDlx33XV47//vVTgzHGK47QNY33GIo9suYNwMSOsC8Pn6fUHYfP3BEmHRcW0+Afj6M0uEvmOAT1h9dHAOtLdEODgP7B0gd0vOQg8d8uIMcr/EiIAjlegcS2b50aCSlJnZdgEmS+UBIQ28rMdkcpZct4p/q6YsNK4nw9JS+11EFwjLjuzvQtl9WQAFBS+8ucVrBTp4yUDdbsoss1WCTCpbQ1TL2wVecOd+iUzB5DvHxAzIMXP2b0IdiFoNI5L9P317evaaBgG9XOouqSaT+HQZyV4OUAN7XSh1eNqAz5Rl1xa7riiBNW1JK+vavvfnB8BqB+l1ZzlXmqyh0bL0qgW2fbYNkhFKFvvUZ9vnqueY3Xltf+YBPAXvFBTVPpsCSndJwOq56vnU18aDgcdb2z7dd3TBSWXMGigEABQKw8+xJdp6PtoOX+epDwr4rWtZzwb0I8d6YObhaAy+qdo+ZvI9gAocLCfI7AiEyIF3leyNLuiuz6+c61a/tcGsdhtttw+SeXPBrcLQkEBtzyDAOgFHMhYebhI2CVgNCYebEZuUcbgZTeoTQAWUKwAQCdjrWIJv2QUsYjCp406k+Hr5uxeJA4frQ9C4YXBj2JRg4tR5uPOlnFnWUEFYCaqEvX2T28vdHu44vIQH/n+Px+23347z589P7++UTOfHv3jPLffose644w58xE03fkjOabYPjem980ff9nzc8IgbsXjQgxGvewDi/R6McOY80vI65K7n4Pml2wtLS2qhpjtvq2UJhw2DgIslwnUPAO2f5YSphTD9+n0DqRTAWY3ZGNSrIeNoSPiLC+tK3nvMGXeuR+x1AQd9xEPPL7GIhX2VMljinAj7XZE4jxKU7gJtzRUZwEYStdQ/GWSsWY/Z5p8zfbDEKk4iaJKrbIcF0GImXsc1CnNhDkYCaFjxnDAc1f6XAhmNH2PJFq10ofpbe2fNL/V1pblfSqKKzss6d/aBpUq7QFjA+Y86j6iP1y0MmFXfdz0Wn6t1u4wBBLkmmpwmPiqtL/Fc6PtApZWB6vzt3MX3NOaUl4d1EqeIXV0f2KlH6D03IlityPVYy9AHgvkCDELxfaOS3mthqg1uTgKKb2K/lXuxC4RFQD3nOJ/AzlskuwGU+R/YmtMBFBn+xdLm8rTY57/Lc8ihs/tZ/RU9N2VoeVBNr+FqSOaj62/Hiakwg/3v1ZBwNCSsRp6vj3TedvV3leW51wX0gXDdssdeF3B/+bvXEfZkvrbknFB8qUUkvmc2K9DmUrl/PDgnCVqgwEmZCqzHnq+5AO/tdS5JWevtWstA8W28KkPLwjUQzz2jzX2sflYOHdct9clk2dc2L8+u9r2un3TN5M3OQfrM+8RA7dOrCswiuPFHpO3tfLQfRdpeEzE2rl26/07XUQG1ckLDNlWrfHS/Hp0CVLduuCK1SsMKd9x+Bx746CfMvtVs92rTe+eevKZf9mVfhj/5kz/BW97yllPZ39vf/nZ853d+Jx772Mfin/yTf3Iq+5zt7tlqtcIf//Ef4zGPecx9Mhh9WvbTP/3TuP/974/P/uzPvtpNuSYs54yXvvSleOxjH4vnPe95WCw+9Ek017oNw4BnPvOZuOmmm/BTP/VTV7s5s52Cve51r8OznvWsU38e3vOe9+BP//RP8eQnP/lU93ul/sW1r4sRuCYfy3RGxL5DXC4QFp3JdnbLBfpzB1icO8De9WexfMB18jqP/Qecx971Z9Gdvw7h3PUI565HvO4B5f25+yEvzyL1+0iLfQH+9pH7JQYJeqzHzMGpkQNUKuupi10jpbgFGNxnJJmTfSyydAry2CtQ+SyoVB0vsjx7RYMMNMjC1bNv1CzbVOo2dFzji2uV7XFQrlvySwIoUxneZIs0ZS2NvCB2TCZfAyPQ5W84ggJyKsMX7KX1uQ76iKX8Vfm+gz5WL9suhurFMn9F9k8lAH3fXg7w0+s4ySzbeX1LVnQvLKRlV86L2xQFrAiyTTAWnoLKGjzJKGAZM84c2JkURIVJy46pBPlyLu3RNpEGDKDgl4JeVL0CNLAgAYQd521gpzumsi6YvZGxliDb0ZixGhIubfilwaIjCSJt5FkaUgnCDgocp4zBmKHZzluDOvrSz4YEk5PSfehrNSasBm7L0Zj42U7AgMABD91vE9U0EDLIywF+VrfInpMSCD62TpKyZzyjb9jIe/eyRgSYjF6/YHZNv6heKq1HXc/ye74OkF04F3RpXsqC1NfWM66fa2AsJTceuCSDKsiT7Xz9GKDgtmaPlzGQLOmhV0ZfIHuG+BmX594BfnpPB1B137bHtoCdl+LyzJGGQdKyHbX/8mZdGAke6f4QmrKd7snXbPdNW1+4iM2FQ+TVIdLFC1zrV5nEOj4Yuy3beIOuL2OOk/OkvX1JMhCJZKuNurBaqSOCgG481m9UGl2SerSu72pMJhUIFKZ6e1+q5GIrw1yNun4sgqs5FckC473zr7xEJU35ANV4Odbj3OXYbGqmolDGGHsJkIVuYUBBbsamTIUlaSCCzI8e8LtcG/IUkOETN/zm8leBBn2xz0A2pqsEpAEDBmwWH9XmRWNOlfOv+qf1SR17qmpra7k+pvql2j47F9K5qPy0JFaV87Tvmr96f+g+q6lix73g+1yZ5OSkW3WOh1fi0ISe5tzba3S5OmHm11rb1f8tDZ+KNWqtPr3HAJhChz4zfZC5mMgAUM/4MzUUBxx7BQWSdth9o6+xJCrWr+0kqgIqDdW1nWKdbVlzj5YO2U5ismc0dvYM++f4cqaJcHpNSNtHpR+qprlkOf5tNr9b1WPGBFGeyQZUr8eEIYPXeN1i61z8uetzomopVpezWiNAktjWUj9+7WrHD7ZerJQspp4DN37aGOr7WgF9lYfu+u193MM2+1az3dvsAx/4AF7zmtfgH/yDf3DifV28eBHPe97z8Gmf9mn4n//zf+LlL3/5KbTw9OzeylU4OjrCm970plPb3y233IIv//Ivx2Mf+1g8+MEPxnve854Z8LuMPeEJT8Dnf/7n4z/+x/94tZsyaX/+539+qvfvX/3VX53o90SEF77whXjhC1+IRzziEfh3/+7fnbhNOWe8/e1vxzve8Y4T7wsAUrrCtdNVspe97GV485vfbKz6k9q73/1u/Oqv/iouXbp04n1tNhv8xm/8Br7t274N73rXu068v8PDQ7zmNa/BMFydespT9pa3vAWve93r8K53vevUnq23vOUt+LEf+7FT2VfOGT/6oz+Kv/E3/gY+4RM+ATfffPOp7Pfu2DUt74kQEPp94Pz1WC6XWJw7QE4Jw2qNS+/9INaXDgEA/bkDHDz4fojLBdfrOzgoslWRF9S0PFOkPBdLk/AcnWxnWyPEL8QiSQZnAChzTT2ufxBZYknZXKmwUxTY8rbsArxwxhSzy0t5xgDLzu4D1++jYS1AW66yUOugiZ7HHgMMekAXrLIi75rxCtSF06eCNCmU7NbY2f5yiOgWB0DoLCM6ENBFYhAKUosug2txCGij5s+f/y/9liGgFuoAl/a1N2MFBQX1SvChBACUDYgtplxrkWSBTtx2lnXi7bU+jP2USns7qdO315UFvrYfQHUOAAzUQiJswAyoPgRreznNvKPfst03WeR/TKbHAZKeHVi1XffuzjMRN7rdxrMIKmAyM+CWs4J3yWq6KHujvVa+zmKpGaPnrowJqfOGAr5xn/APtR1AyaJu2YV8rlzLKRJhSCzzpiC6MSF93xIg1ZgKEOr6z8A+x6g9Ntixy0zCcyLL39U58lK83rSulsmiKTN3CrzSdo0Ny2KivcQ7Lx/4bdokA4xA4uPo74wp7DLpAwWTs4rSoTkCRBGbkNFHwmYsrFhlzPh7IxJhEYMFnStpuVACmlX7LGjXgUQaDCFKvdJaytTGVQ1m9RlQRmZzfWab7Vq01ftux+q6swh9h73NGuHc9ewXqfxwKs+9Tx4IB+f5ORg2Bk6Effat0t45Vkrolsh7Z7j2MXUc4JQ5YkjM8LV2iGJCytmSQfTZ70NJ+AFgNYHVT9DA6ZCKzF2SQHUObrxzY1WkgGXssCHCGDJiAsZEiE7L0eYE+T9l/h27AaEG+TzrOROQBpb0hJMF1eQsb44htAUCxkU9bhsrh4Pgl4aiPKF7NSa+SYKT/bWkJELlY/EGXZEXVVlCkddrl1aBGHQITT0/BQu8rGeVCLMLTGnP379vAD0dnz2TPEeXrCY+7pR8IPtIUj82AObYoMwTfr4wUBBOplQ6TUd9m3vUb3DACAwAydZmllLNZQ5KA9eIIwLlCCh7tq21HSLQMdBr4G90axVhj/lT9r6d+izFxwIocA3mLrC/GYP6T/IMofZZ1VcdBaRj1Y0C2HH93YyeCujXh8ASuc6/AxSoZz+tczdi0HYnqYc8bpgZKux8VP2p4GBnz6T9zQkkbkfcBQpz5yKTkzOf8teqe1HaGp1vpesjUoDyeJ/vcmBPkDXFKL7uCJiaCESOdUzEw8zWjooPTAQMSdYCSNjvpNafZzfqKebMspryWRc6pECWPBHAz3ZHKGxdB9QR6vlBZY63gEUKkkiWthiWWn4iR/m9snwB0NAj9/ee4NNss10t+7M/+zM8/vGPx3Of+9wT7+vMmTN42tOehte//vX4/u//fpw7d+5u7+v222/Hr//6r+PjP/7j8VEf9VEnbtsb3vAGvOtd7zoVcFP398u//Mv4oR/6oRPva29vD6973evw1Kc+9VSkUG+44Qa8/e1vx7vf/W68/vWvx2d+3FVVsgABAABJREFU5mfe7X394R/+Id797nfj6U9/+onb9Uu/9Ev4nu/5Hvzcz/0cHvKQh5x4f29605vwF3/xF/jyL//yE+/r0Y9+NB73uMfhwoULJ5boG4YBP/IjP4L1eo2v//qvP9F+Xv/61+NVr3oV3v72t+MP//APceONN97t/Xn7hV/4BXzN13zNic7zUY96FL7zO78T//gf/2N8wid8wt3ezx//8R/jR3/0R/Ga17wGf/Znf4aXvvSl+JRP+ZS7vT+1n/7pn8bh4SGe/exn46EPfeiJ9/fud78bf/EXf4EnPvGJ6PuTJQ2llPDMZz4Tv//7v38qbQOAP/qjP8LTn/50LJdLPP3pT8f3fd/34eM+7uPu1r6GYcDzn/98fOu3fise9rCHnbht/+2//Td86Zd+KV72spfhG7/xG/EVX/EVd7t+7J133onf+q3fwvnz50/EfPvkT/5kPPe5z8X111+Pn/mZn7nb+/H2kIc8BLfffvup7IuIsNls8Nu//dt4/etfj0c84hGnst+7Y9c06EddD8SMsH8GebEELVZciyscYnOGNZ0pRvRnllg+4DrOOg+RAb6+F5m7yMyX/TOgbsH70kx0V2PGpOg0u5qCreFYfkUBjelVmwZhVG4pEYNciMJoiwVIVClQlmgpmZ2FdVYYMIVNpAGFhgHUSs20Cy+3nS1sh3W9aEMBL7hORrN4dYGtqg6MLsJlP0gDYtdxJj1kDRwgQB9AiRFxBf9aQMaDbxZwk2+jBPl0YVyCXQ2QFGogzge5FBBUsA8oAFLYgrb0+LlqU9JgWi5o2PZczItmH+zQZnZB200ViBma26oFi9Xa846BAFmgEyRD7m46B7viERkKotWAowf8GJRVJiIDfiobpdKsvsYKX8+MHsFFIWWDREjEJ0WBgyBIHL0KOSNnDrT5eoDavy3gp59zN2ULqkTA6qKEyFKyvs/tybFrTO4+gmMyDMfW1DQzYD4YKEaR6/UgRAmeNL+rsv9DAQD1M6DUnSMykK+V2Kya4cevKmDdAPx3wfxYQwjIKZUHa8I0iaKTAKw+E4E4iaIPBdhX8wD+1BjaMnL0elqgXsZ3fkZSCdQB3F5/PkilnpOMr9zPTgagK32dtyLpHxrjjP/Tyyic2v9s901bX9xgFGn0PCZg2CBduoi4+CCw2OMgbH9Q/AZlg8hzkFMCwmjsPyz2itytSArn0DHRKzs5u1QSMwAG7FbDWElg85wIjGlEFwl7Q8KQsrGvNHGn+E8F6K+YV8DWuKa+TqQAgAPoCBmdu9V1H0DxOSr2GQDkIolZWEYBSAMyGHDgAPcEE2nXGOuAnSmwb8zAOGY3324n4ED6Q2U9bdeYGKIU5NPmhAKStABFoCKt6t2LmlFFJTChLL8Jq3zMKVCm/azt+ynT+dVtlt1n1aa07e/pfKHXOxD7/EESSZCaxBAFNt1fU4fYMWxafW3AgD9uX2JfAKhbZYk8DIoo0Je1xnjsrH7kFqYMeR5Q+y7+PuBzy4iJkIMASbmcu/maurZJ2Y7XJnBZUg6hAvwUANzlz7Zt9gzjVjEgb9alT/znknTFNeM65HHgeV7+B9USlFZbLgsQ6z+feDZLfUkF4tuxoP4N5Yyc2MfIxz3vV2iW0JYBoixjWOlPzwTU+0+flDERNimDAqGLXbXW07abzyY+UVSfJ5fn24Oxdo2kDygnGbMSgI6HxPZZ1ePptVW/GagAVLvX9WcdrgrTb/atZru32eMf/3i89a1vPbX9veAFL8DHfMzH4G/+zb95ov1cuHABz3nOc/DIRz4Sv/Zrv3aiwPO73vUufMVXfAWe8Yxn4PnPf/6pSK99xmd8Bj7yIz/yxPtR+xf/4l+c2r6ICD/1Uz+Fo6MjfPqnf/qJ9vWmN70J3/M934N3vOMdePjDH36ifT372c/GYx/7WNx0000n2o/aU57yFHzgAx84lX0REd761rdib2/vxPvqug4veMEL8N73vvdE+xmGATfccAOe/exn42EPe9ip1hz8h//wH57Kfl70ohfhqU99Kp74xCfe7X088pGPxBd90RfhgQ98IN785jefGrD5K7/yK/iZn/kZPPWpT8WP/MiP4FGPetSJ9vfOd74Tz3rWs/ATP/ET+LIv+7IT7SuEgL/1t/4W/tbf+ls4Ojo60b7UHvGIR+ALvuAL8NznPhef8zmfcyI56P39fbz1rW89NRbiOI44d+4cbrrpJvzO7/wOlsslvvzLv/wuJzmklPD1X//1+LEf+zH8wA/8wIlAv+VyiV/4hV/Abbfddrf30dqLXvSiU9sXALzwhS/E4x73uFOX9byrdk2Dfuh6hD12+HMaWYLqaAUKEXubwaQ++zNLxPs9mH8j9WWoW4D2lgwEKtgXIlJccH2rrkfuliWworWnuqVIJtV1vTRLvTW/lK3q9Nnqv4BQJmUnIN4iTktI6YLd6jdooKtZQNpiFKjBPrfQTW7xrtt2i67OtIQEwhNq4M8vDC3LlpmF9htdrOUEjD0oDIjUIRNAgRlnOQMhK2jGa0v+jTvnBvj0ZiBOzogCqEHOrQ9ki97jat9NHcPXv/O/q4JdbnGdKBuQl93nfii03+7AAVImDoIGvscUxBzS7vqG1g/yd3TbpjEjOIZfAi/WcwbyxPGvxHwtvrZFPsjoGX7KxtyMyWRKV0Ndu9IHipKw/IKkFIegQVYOvum2GQxqZmLALhHHaTTYaxnpun0D+Nm9IsclcJCOAWnuOwuW6DY0zYQ0ENdk75LVeLK6SLsAvzxufw4IoCffhXoby/RXwM/JfE0yHwz076ptAAf2eaBP/rZyTJRzqRvo2+MmfZ9oUEA1nW7izkAXQZ7HQIi5PIsxFMZNCrSzdpQH8I3FELal2AC+5swUDEDsSoIDFiUYlxOYvZ3qQLVjkHDfool2dzXz4riA9Gyz3cvs6MKa6yGPCTklZu8dXsCwXiGcOY9wcA7pgQ/i+3tcc42tcSN+EgFDAHULII0m68mSlHtcH4o6k99UwK9IL5fx+dIwSm1PkXFOhfk3JiCGEX0gHA0JMUR0EwlTXqZba095xpVZSiBIYLtjvQWeI6maxG2Yhyb3OD9LQLLsxww1mQ9Ia38mCW4Ls2VrjNHfuIQQBvl6O5aOMcru87V1x1Sz7dV0brT/HRBakllcEooHyWzc2+4PBYx8Io36qjo8KgO+qld4DMi5zRh3bWk+u+z/Lcgnc2FqzkUZi60l1L6csQMNKSs7UmUAz0KvgOYW8FRwlXxjpK/TYPML5XqOtVqOUrNOWX72GeQZc5Nlu36ofJk0gCggULBEukjASDynZqr9tDGxGkXKqtxQapKrlLsCei3Db+lk9UMofVZ1i9078gwLK1YlPEkUTUpd3RFIPAYpsMeN7wqYJFeKHEBH/n4ixwwkBr2m7kVLTWgBP5rYXn/WJlX5hAl5rHzy2y7mX4LzYd3n2XxaB1a6awVo4mNGB6BLGV0CRiLE0PHYNAVWih+Yc0IQlRJTswBAG1aXUfUDq22q/ZITA64AgK4s8vzzqdL3Jh/v1p/u+vh6jJEkKXe22WY7VSOiEzHL1FQG7qu+6qtOzDR5z3veg9///d/Hgx/84BO3S+38+fP36rqZJwFhvP3xH/8x3vCGN5wY8FM7DdaQWt/3pwZKADgVwE8txnhicHO5XOJJT3oSnvSkJ51Sq07fuq478b0WY8TjH/94PP7xj8c3fdM3nUrySs4Zv/7rvw4AePrTn45HPvKRJ97nR37kR+JhD3sYvviLv/jE+/J2WvfdR3/0R+Pnf/7nT2VfAE712XrKU56C973vfSfeTwgB3/Zt34b//J//84lBdQBYLBanOi+cthHRVQf8gGsd9Esj0DGtlFJAPrzAn4WI5Uc8XJh8blEh7Jewf4YBw+UZy1pPWlBd6stopmymAC38PmZgGMoCWoczDUhtUrYFtGZab8aEPgZ0IWLZRfkuGyOlcxKdXMOO5D1smynQr5JEcgEEC2KFrsrM9sweLcKuQJn9lvzqXyzXwRkPAFAare6Y7cUYRnwdkJMEI2RxnQYsAtB3HcZMUm+CwT+G57zczzYq5fvCA2gqN+MBP0DPjypAj89V9uez3fWziW7wZsCe2yjlIvV47I93nI9urvDkmIAUM4aRz6tLtAVU+TbrPnKGyPbUDdgGPEqfW1awZKuTyaTVP9J7RWWcDDTOWpOlfl+B4kkkPcciBWV9EAgpqeQtH7OWbKTm//JbfZZSLoCRCBJV0RB9Qtogir/mLTNsiwkBDSK7DOeWpZHS1jMD3W7qr/YtkQWXMlACJBK8qqQ9ZYyTzkNrFdivAJ8CbruYfbtYfZZ1zUBfHtac9d1IjfGut0FIdG6K0QB2TnDh5er7yNE/ZJQkgC4Bo7Aux0xyP1EFPlftIFQMBiKUwL8/nAuGd3HBAe4UmAnQShpb7aCmVlA5+RpcNQnVWOpuzTbbNWbjao3NxRXi7e9HODjPMp9St2+r1m/oOFEqJ9ASZczo9qxGMDpOqtKkqTGVunODfBYIWI8M+L3vcCOMcFcvzIFb1k7ZVxeUTVTq8qoMekmoQpXUVFlOkm0zIMgzS1RjMdqGAE4y0Qh4FF8RxKyXKnju5wh/LPnr2eCTFkofe7BvnZjdtxqSKAOUn6jyhH6kY2KSpBjI2Hisq+JZUA4QaZfy5DCvLX9V5m455Faf7DquBzyrz4+zdn+6D/87Jw+YcqnTe7n4hCYvJShjVM4ocNJarF0OSeSj4k8Qtu8BPSdixyPnBFBkUpUwnEiZTgKe+N/5sgO542cyx97epyTgNLFagW8TuTmRmuvB2wUGOjMgqZXMJsssvz2IconJtI95iwGq8y5QGH5LqWnN9atJ6ltHW+vo2seS7xSg1OvZMP0sAUnWQRkQxQe3TkED8gEM6CU3X/trASBrIpAAofa93koJxf/ygJ+2U/+KX0hemlXWh+YnjgMQF+Znes9Kk+tSVhC/1MjWNeauMgZAAWk112CTgH3p7yFlDIkQKGPRdcidtKl9LNW3pVL7NKMAxZq8oCUh7FxHsIJCZMCQsBD/1o0r5jtPgIZN0mpZv8q8QVyncLbZZrt32mazwZOf/GS85CUvOfG+PvVTP/UUWvThaS9/+cvv1UHx2e57dhr1J//f//t/eM973oMf+ZEfwVd+5VeeQquAhz/84XjZy152KkzhDzc7zT575CMfiZ/92Z+919bfvC/atR2JTKmwS2IU+c4AhIhw/v6g/TO2XT66ZEwYWiwZENzbhxUD7/ekKPgeZ6S7wK0Cfhp40ixHBTmmggUcgClfeDW7UkeN0Ade4C4CS9MtXJ0LH1jeChTsKmwawjTYh5ItqQvIyZ9LYKICLRyQUYEAG65hVdUZcedvcSAf3NFM947rQ4xy0CydmnORdryc+SALZ2EzWJAk+5rxPgk2NSBfC/D5tlf1W9z3bZN8G+PdnNumsptzBihkA+5yBgIY8OBgxe6+sbvimO5TIDRnCS4kZrRlaoKZOVfQjAIsLeCXHOCnDD8NTGgQiLcvQdutfggF7FVwz3/eAn6tHBQzFzOSSHu2bddtuGuyHKd8p/KxGgxTZpgGolSaTIN3Ffjdgn2e1bcD5NtpPmikASqV9urkWVOWiHRGJes5sa9drIgrAvxyqgA/KOtHgD9vOSQgjKBuYc8/5bjNKG2DsMIczhLoDcSZ5DGIhJQLqg6Jg5AD/P2o97TbpQP8AkpwM7ggJAAJivO9r0HBkuHvTy7VjJw0UUdGg7BUmOHG8rsKoN/lagSdxv5nu28ayUOSx5ElPo9WLKEuYH5ux5wggETs+TnRWnBEAnpHA8DVl9JgtgF/ucgtZwBHQ7KXZ9mPieeQPpLNC8nYTGXsVkZfVdNTxm6aen7VJsZqBU20bdyWwmLSpJNoAE5oIvcTYI//3zPf/CZVVo8bz0OHIQPrMZl/qvOvjoktGzoRgyFXvBBvgbd23G5M+yg3n9n7qfO+B83mlOrD4K4fChPubh5Dgb/sVDtSznbe5k/ot8f5Aw17XK3suTkPBWGrmmmuTu9WW6n5v/g3urHvMwWgbDsWehS2H2+vCVy6NlKAaUpWPygIb5Kehf3HICNMjaM6VQeYTiU6cnf6pCgBAomMnUZZ2GZAAQNFtpIMbK19EjtWs4ay69gCfq21vqH+zr6nrXWVTzSoktbcWjOh3LvKpmzrlyfXjWM7CMiuOYmQXJIe2fhlDMfmPtX+hO+HZq1Y1TOlwH2vkqqhY8BzFCBV+7D9HSC1lH13SdtMRpY74WpgfrNvNdtsV2aLxQL//t//+1OpcTfb3bcZ8JvtWrS3ve1t+E//6T/hC77gC05tn8vl8tQAxNlOZs961rNwv/vd72o348PGrmnQzxZ6ItHZn72+MGB6rsNH44alX7pSww97B5UcTg4d0C3rbFkB+5AdGARhUbmAisrXUATOCOMoS0ZtF0SSrlnMKlMoErCIAV0All1AFwiLAC5Mr/URvFQgXAB/KjM1RADbEp5AWTTq+8J4cwwYXcBtVhy4G1ZcmH3ccAanZHPmzRpp2HDwH6gACKjMYAcGYinIvtZ1YEIyPTthNAWUxZsG76pr3Vz7wuTT4Jv8Twz+pYlIiWf6KNg2xerzwZEKoPIgY56uqzcl2bnL2rZ4y7nUkUs5Y3SShibhZduW36kkE7e1AUIE6EsEDGO28+daMgpm83vtq1bEsQX7fPDHAzAJJYibcslG9gGIPgaklLHXsPv4vYJ+7v3EdUkaBQGQoWzRcr6tTUm9emYYM/24jpzW61FJuD6UzOaK/WUBD/es7ghO2Un57yYy/30dE4QExAIwVvKaLfA2jgISln028aOmQ9LW+1bOU9m8HvBTwN+OHxzPVYHJEI116BkJxqDW2lQTx7flIXGnZwoSoIJJKY+J7B7MmayGY84F7CuScr7eV7EEDoRrwMtYFVkCn2kAhjVoOGIpsXFT+kPOGz1nPlXnpIkjwv7JtN51BWab7V5n+/fbQ+zZxxgurXF0253Y3z+DcN0DQGevR+4PQMPKpL5NypYCIL6CWl7sM9OvXyKHDsNYfCdmfkPkqwt4vxpGXFiPuHM9YEwZfQwsHZgSLspnDz67hz4Q9qTop84tkTiJqo9FJl3BPvarJli6njHtAviaWFBYhmWzQBx0V3Y8sM0m5g5INl9syTx7sMADA8agjGU7CjaerhNwaUi4NCSTAdf2Dakkp6lFImP3VU1r/KQEsPy3nscEmDEVkFZJzADCcQlboZ2MdjH5dgGDLTiza5tMxkzS/ZrvIptp7cgx5cqvm9wl2NcgoiLp6s4pgRBR7j81qzFLKAy10THVzHcXVp/IUObE52B1IHcAsDafhs4USrIkmpjMKmoQyLPetxQL9L0cQ5UNRgJiyKCk97r0VwKSyO2mDKnrV74PxH3AMp5Bntdo8p6LGComfiSgi8R1y1VVAd7vGreYfqUxbk5OIyccJWHcpsFkPknqanIfEpCDAVLWx+Z7BXkoGoBvwmebvnEaUMz9jhKAYV1A3djJPdMBKWNE8bNtvEzq+/AYmRKMaQmU9aj1f7MgScSA9JjYWx4D+1ED8biBEBC7hSkeGOrWjlPuXrHx1NfjcwmrXCM5lIWKv5f98z6uC0gKXpdA6pXm2AOSMLKR51XXQZuZ6TfbbPdaOy05ydlmm+3Dz77gC74AZ86cOfX9dt01DX/cp+zTPu3TrnYTPmzs2r/rQ7QafXl5tiwkdOHhtrMi7/rZRDAacVGkVHTRTGVhP0XrIwGa9GtegzCDRMEocj/VWhom5RlY3rMjBvxoc8TtH1ZoZWGsfkxMtWTcjkWnxlh8rZVKnlBqJ3h5KQUbFfDz8i15s0ZWwM+zfURysMSQXHF13WcoTBgNyMQgQRjXQJ+JvotdZ0EuFPaeyhf579u+mAL7POjW9qK2pQCmGtiBA5eKtdnUl7Ndx00CWkSpe6hIXnYbkwZJCVWmuu4ryYJdQ1qa2QsL6gkTQMEsgcw46LcNRKppFncGttgFKruj1l6/AuAJW0MCti3I1/ZjlAvWZpArkBipdEKG1oGpty3XXIBFxxpUsC9KX/SRTAqrl8CdMrxolOch5yoAZeON1vGbGCsyEba06fy2bUDEMeAAn2ktVyzHqr4exVgDabb/tBv4s42a7PZjGBlWO9C9Z6Z1tCQMik7acgLw89KAW5nyTQCSQsdZ6Pyf3LMM1rEsLaw+KMg9VxOA39Qz6pk6yvbbMgeA5jTWDEsflNdzvAwz5kNhczhstrtj/cEe4v4CcbmHTuoio+s5mD5uAFqJbFvzPEPGhFAKmudYpECT5GiozzQSj+FEQBYQZiMy0JtmnFSWXzsHeFOJ5hjI5Dw7AVzaRA1fdxSAAH8lqcrLVU8M2awuMDVHenaPMYNdvSoH+PhxzzMByxmyXLKvx5pDh1HqG2Y35ypAuUmFRcl9gooFr6BVArP6p3pTE8OonRPa/hY/RYEe/b/63o3H9Y8DkMdqHthKM2rB2VMw76d41QKg9le0zT5Jzu/Df2brA/C+1W9UYK1SBfDWnJ/5zwFAklXE1Hlrcskx80ygu86C2jVvKfPscvsMgWvxJtc5fQgmudsLAN9HQu8AP5VVZ+lR8cNIfXOa7rutg9dMkjzymqRifKZUya4Ym8wDxEC5LvIce/n1LWBwF0OwNe/Duc/Mp1P5zOZey3BSyAkmle8Bv9GYzn6NRBjHDK+aAWwnK5b626ysEIWlTbErMpsT56Hn0rLz2m0pDewDpgE0gOcNuHvN9UPVp07GWH0qlTRN7tltpU0/VDb7VrPNNttss812z9k9AfjNNtuHq13ToB+FCOp6hL195G4P6eB+tmANl27nDFkJiFMnVSmIXABFgiiuBsbgwL5B3midmECwujO6DYEXs2opS620Bh/0zDOVm+KsV85o7ZBA6xUzSpTpNxzVYAI4CJN1UdahgGhT5mVY4AJUmvGeE7NYWvBiWBtQRwL4YbNGTiPyesUMy2FT2D5qCqp2DgijUIOvClbqYlfqZTDLRvLlPWgkSytdZkaiCoALxP2qzLsWnLOmaRCmCf4XUJYqINACZhoMEu6UP0a5Ju44uOvW1hvU46pkFCEjREJI5f7iuF0WRiAQgSJxSAqGal+UeoB6ZtzPIumZWAoza6AFNduvNV10e8BPs+YN/EN5RraknuR8+1iCEMyqK9y8beaj/DaQ7U9ljQAIy0uCHQ7MM+agSsdqTJd8djkVCbhQnmnNctcsfXsu0shsFn02VZptStbTmz6LPlt/ytznKqOnwRSVpkLo7PkpAOCOMISOA5p5veNY+r/d+1NB3hCr35PU8AMA6hZACPy364RNzYzrHGJhv/nPPEDm2wsXBLfAW0AMHUIICFlqJblgLaGWg2pB/ujGAG8J8vzIq73nKWfXlqaOYYzIFCaBTAvSa3B2ttmuEesOluiWe1icP0B/sEQ8fz3C3j7SpYsAAFqeAUmCFPplAfUh82d0ST/K1KBST5jrzha2uYJWYwIONyOOhmTM8xHZWH6bxHKfXeNzjTY/wNh9i0CI4ldtJWkosO/GOz8OcTC5AH6e6afzYiQyBljQhBlQVZ/KfC2VBk4lYYQTaxq2i5jVdHWmLK71mLEeSy1EL42adC6u2FYqTT01urn9A1a3LpP4FYR6PtrxWwO3UMDHlll9RbKil0k2OdYcE45yFllHAVgUZAVMicBAg7TtLwIFvKzI6JkTsiKmwUDevmHSNeCvMb+mznsK+Atubtf705L/uiIpPTHPKBgZGvYdAdv+3eUSfaD+4fR563OscuhA8bF6qePXh2AvBQJrpl/xwaKAxVFBU+u77f6jEJkdeYxVcpGeNan+xjiwL3Pc/XcFPttkP3qfRgEuHX/GwkYk6RcSlDX55zoXlYNNSliNCSll++uZzoAm0KH+rHkGsyU2SPKFgH8xLnhtpht6NRfrz5IoWoGyHghszjdnrqOIxPKrO/tT/UKZO3JcVOOcjnslyXa22WabbbbZZpttttlmm7JrGvRLq0MGojZrgAjh8IP8hQea+kWdPUgBeXmulqNy32k2qwU8wEGCjSwsNIta63YoIwiQgEhy9T2oXhx3Up9sEbmeRUQCjUfAmBjgG9fMrhMpOZY7yRWTB11XBe8pJpOq4aBCs5hqg+k5VcEv8oEpJ1NogaucgGFgZt+wQR42zHTZOGlPAJW8J8DA4DgC5GQ9dRGYBtAoC8CQDHwt9SRc5rUGD+A+U4lGx8LTfPVs1247KuGDHJxRPBGg8dcSBQDUwImyFIACqrWAYWu6ryut0+C3UxAuCRtCg4sJQMjEcj36O5dp7velAJyPkygbtQTmyBgXAbmqiaZmjEono+jlPD3g5y0GQgQhpGydpsw9DSTZMaQtpe8K6OsBXV/HxAd9+eTKs6s1nzSzPGVwHU0JOKl8p9bRVICf7/8N159UdogP3DZgeZvxPGnumdyqs9f8rQJ4oanPY/UD6+c1N+w4e99kpe+qdWR/HRMFEGBLJTsh93hXivlqQgX1C6svZGBf7Ln9ju1TtnEg2YQxMJ0K00WPpy8SSbWgMp9uPLDr34B/k0cq+237hMG8yCArACz2eL8StM/k2OJENSunnVs+xDbXnZnt7tq5hz4I+/e/DovzZ0oN5BCQVxdLQsnybJnvvTR6Gvh7GUfSQmoni38FAEcT1Dll1em47n2nTcpYDQl3rqZr8S0jAwmL6OoiZ1Er2FyqfR1AxiFWdgBQAszg+3oQUG0z1lJygCTUECppTK4dSDwfCMjnmYXe98EwyHgqIKmypkWa2drTJA+oEsV6k4qEp7T30iaZXGrr+4SQEfU6CWhFMhUnyiKHzslEkbaTIIDiB7XG+yg+CqRvtF907K1GP3deDGhJElsIJali13zagivtPvW3lxlvFSywe645OVUi4HMku+YK8I7IJknfgplbgJ/6B96P0HbuOLesaxgF/pBgUq+ASUFu+RGOOUaSZORZiLZ/1KClyVo7IFxZpK1pjcIYhLGXCEDAGIFIGRvKCKEAUAb6BQH+Iplkulc98ck5yvLT31d1xZs+M/WWMJGQ4/poqj5m3SmpZBc2vtRkQtVx95gDca2dfgzQbTT5IA1AzgZcZgGkOfmg1EtMmWudcvLDKGNUwsbdwBvwGNADQChrE/OJQ/G/C6DIMp+Q+xqBmPEnfVA9ak0yFqWxvjYtmJozkEcAYzn30FkN5Wo88EoJLiF3RMAmKduxJDro8/uhttm3mm222WabbbbZZpvtWrFrGvTL65UBUQRYNmsle+MkMC0jNmrdPlvx2jYa3NCFsl/0jg7w86bZkwrMUC71yXQhCwALkZyKeQBt1nXNvnFtdfxoWPGCaX2EJLW06oB7DU5Viyf5vz0v3lgXaVp7wS3YFOgDHJCRS/2qYVMAVpP0bAA/7UOV2dG6Gk00naRGSQkkdfDSOsz82wbL7DPJQtfrrUEnvQY+WDiVje2zXSeznnMtqeR/3i7zq5qIcEEUbLffMuF3LOj851PBliABOcolq5rbUAN6x5luNqZs7LfyDXdwJf0JB646CmaRw6oBv9bIdUTXXAj/bHigr75+pUpfRmENtoBfcuw/AEgu2zlRZnZYYsaknpNmmPf6XBIQs8jd+gCdB6t9bSZfu2Qqe9/3u0hUFSZCAeN2AnB6Aha0UhAQpS7NyEzZ3NT+nAT+3PsK+GtBqalgltTJYYBezsFLJhOV8dXJeeZuYeOwgn1bEp/HmPXoxHY6RpPI1BlE3AScFew7jmmyE7SnYNcgB5dw4QKkWy/f/pnlN9s1aHv3O4/+zNIAP5PwdWwaksQDTTpSYI/iAjmuC7ghn/v5QdlV0Q32bWKKt5Q52KvqC1HVFwKzuYMCEE7SU2WYK2lPtdihDVSPucwxFlRGmdc02SVujxK8C6AG95qar1aPVeSBIRLB2bGHMwoIaJ9ZQDw4Zhr/1aD3ZmT2z5h5vlMLRECSGl6hsPeyy4zwCW6ljnBJfguNLzZ1iTQhSffhj0/VfO7Yg1ovTfr/subntAog2zGuKzss1vtO0kb15afuOwX5pvaswF+SeUV9tXZ+qe+HBjxqAax2/pX5mQAD/rZAlV1+Pty2x6S5+Hs7yDUh9/nlgAcDdInQR2aIJXkmlXkWiVjyk1CYfiLrqYy2IuW5zfTTa0CtP3M5a6Q+y3pwQkq86pQdYN+UX7XVIW6/ymyDrMdyAXrLPkeApF5jKu31/W4Sw/LMa23sjSUlMODHdRWd7xzAKgh6I4v5hAB9m7MqhjDwn1DWFiqpbuB/C/jtGrDNz83bNZABUHRM5+D9QVfDVGq/exBax2hlKCrrb7bZZpttttlmm2222Wabtmsc9DvC+MH3ShY6S33S3j7C2euR988jd3vI/dIWfGnvDHJcIB5+0IqG58UZ5MWBLcKjLDBMvlPBA2F2aI01Ax84FbKS0Oy7YJnfvp4MrYXNtz4soIHIBGIYmEE3bJCkXp6Xz6TIAXZ0G2GeBJY1JSoMmuPAgylAwIEVeRy36vRlQIJTqYB9momqNa1CsMC/XgOEaIDAZN2RlEA0cDYyFjDGkl+UU5FCrJbvBoDA1awrjDPP/uMAVt0lV1Rzr1noTge55L6gEqBo+1aBQ39PGaB8GYDPEo7lPPw9WdoAZvu5FioIBzteCWppQNNqf8jK3kDrUIJcNfsuGyjXAqgt+9KbsfZ2dLkBLRPfZ7ffEvjgtmsNEw12tGy/lLLVL+kzIYm0WRR5MyJYHc0+EPY74udzwyzbCsxLJTinAKBJ3gowD0ACuRMBIQ+QARW7Q4OV5J4PrdNJCAaWT9XZUYlcSK0UpAgaNyKf5ILbxwSrPDBtWdb2XSjAPJEEoh27T9sfOiQF73zdPkuw6AvQ5+QvW0C86rI2ecGZ31zBesg9lnN5boAa6NsVwC3HdEFagMchrT2jG6VUJZEADajX1vKrwNsrGHdO2TiYd88FxO7Jfc92da2/6WGIHcHL2VK3AB2cQzhzHmH/DJKXrg3N/a7PAoARwcbzMlYLu0rqVA0ypmdAALyAPmVEStiAGXfK8tjrAhYdgwfLGHBu0eHsImIvBuwJ0w/jmtnYaWS2X3IsExeMzySM49CZjCeDi7A5pjUdZwjMklNpamV2GcjnEkZ0zsjjaD4VgK05gzoZb1QeNbixJHRFclRe65GB0NWYsBmTKVBEIgNd+kjoMyGLSkDmrJ5azQLlb1a/AbTtq8hf/5le9ST78sCCfufVEKzvQwfCwOOsZ6Rbn41bKhdb1oI5E4kX3jzAp34VX89cAX8EIGcFLJ3jBscCzHBzOK8RlAUXCM6/nmBE7ZqXGyAvA8Z2x4R7Uf00iwaEr0/n+uK4pLP2I6/goIwqv00gxVIJB31Eyhl9CJPKC5oAyfX8ioyn+mGaeKVSngoUa19u9VUDkFb+iYxX3j+p7hGv8KI+T3O/bMm07wL6phKl/F8F/5XBKvvKUioBAMKenEMuz3uGyG268UiBvtXA8sarkf/yM1/GVfXle3DiW6QC5GtyRCSa9IXGzEDhmDKrnIqSQWEiChDdJsR56drGptaVANhPDIH94sWejcWUgyWRaCJZpmA+v9U1zIXxdzUwv9m3mm222WabbbbZZpvtWrFrG/Qb1kiXLiKkBHR9AaH2DkqWIFCC9sO6zhj17JepBaUzzQ72AIpm/JrEk0hGxsA1+pA2oM2qAvhoECaf1slT2UwNBGmgQ8E2nyGpi0d5n4ZNDbRdrsP8osux9bL0ZXX89jfaFl/TKkQJhCugEUo7jQFUstQnzRb0GpGQ9bL/fiKDloE2FxiBBGAUjN2Ree4Xxok/ABGVOiqoQbkrAfxOEs+vpDi3vqvbrp9NWbvw1cBdeV8DfhoQ5N9qFIz/1AxAPSazWLVmy67jAjV770r6ZuqcpgBLDRZrgEPPo2X7AeV90uxxOVd9r9JSXRDAT2V1tZ7mRIDOs/o8SA9lbvCBpRNqsC8H/dyxOzwAGMCBFMCAP0a2IXSCtB1YUqMgbAAFxEMdLNvq8LS9j5wE6KO6fpBlrCcnPUdFfmkK7PNyl1rHT0FAHP9swX0/xb5TEM4HMO09CE1+/9Yz2t6O/rmv5oHk5gMNUFNCDhPjGJV+94CfB0SOZRfMNtu9zGixB4rZGCh0cE6SqpagM+eR+gMG9FWiN2mAWyTh4gIIEuRGARHKWC6JHIDJVLbzgDJ9YmDAYD0S9rqA/UXEsovM6vNygUGkpAmgsanh1wTvNbiszyiDP6V9WsvPhsK7Msc3rKRq3rB5Ytz+XZUQUsZMHn+YJT2OWtNKmH4A1zoUwE9luyu2XSZsxoxAGZ2w/Yi0DjBsANT3ScArljPfnsM9y79qPsp0pTZVr5hbqD5bmPZbbRwu/m9Vk82Psfp/a1OgjDu+nosCfltApiSSCEZqstHVnCN9YMw/SQ6sLNX33tY56vvLzRHVPu76fNJeN3+tvPyn/34nkYuE8RrEf87C9pODbCYAoD6EwtCl9j6B1QJUyU99/nnAqPdnLLG2Xz3Dz/lLXonAP09bawv/3skBG8P/uMTKSb+Mk7KyAG2UM6u3DBveX7cAYn0OKTtfN8GArU3iZ3yTuIafAn7a16PI5/vExjFn7F2B72FsZj8gTJ1n299XwoDUdWWq15XUsZxyULB6ah8UXNvkVpB1gfbL1ZD3nG222WabbbbZZptttmvFrm3Qb71CunAbMGyY7bfk+jPjwf22WG2krDqgqi+lgW2S4JRnaCn447MKx1RYV5rxq3AbZ6wCNK4Rji6ChhVo7erJaLbk6tBkSfPRJQYOZBFYnV8LsIXA24LBBAtAt1I2u6wNNKlkH1CDem6BnaeCU3p8Zfv5Nkj2JgMZsQADEwtsDqToopqBBW5Ls37O7YK/Bj9YDlQyREOR/kwStAlEaIE/yOcjsCVl2RaGv6eyLn1whf8v32l7fXBqEhxzbW+lNi+XAdsulvX/jevuKmtfgbMdSB6hBEcV8NP/S3BJ2zrdJm2zyXTmK2P46f/6G80mt7YLe0Qz1LWmXx8AWhfAj9aX4Bm4CoB5iaLs5G4teDNOAOIAEN3z4Z/XEGE1aFLkengiAUppEClJAdvyhAQoUIFNDBZGTvBXJgs3tmzvExv0uUMNvln1JRfk0no/fjufMZ87ZvNZHb8QC9gndbL4Gslv3cWfqofk73n9vwX+1LwMcHtXbgF9EwGqrTvZM6IV7NTzvJzEmBvjPMM5t2Pfh8gSLksOOfH+Z7tvWlieAeWiNBCve4AlVqW9c8jL8yUhIER+bjYrYHGATAEDAmLgcSmlMjdp4pSyhwhcr29IWjuPn2Zl/IRA6HNAHzKW4q2eEVZfqRMW0AfCQqQ92dfbsKpCKrWKfS1kM3k2Gfwp7VMwSOejVjFgyqoxygONxobZ5sN7/8oSRDSpQscOGX9GaaNn+m3GxDH1XOT+kvQboPNoQh8Im5TQpYgYdgT2fbsyDMTaqgd3jD/kmX1ADfbpWwPNppJYspNBdYwom6c8M2sXy8+DgS5JwwMbCiDkXPylOgGLj0gEUCbz9dVvrOA3x6ZqjabmiQp8dmDSDiYbN6jZT0rc2bt+59rm/VnvIwIMugPsA1dNFOCJn8nSZ7pPxYb8/J0zgMj+954l3G23h1CYfAEFBNRErBgKW/LY5CXAmGi5mY30+mcdn6LzV2Rd0pYlqCRUPUMXsP6nVm69AWsrENoSqYKtKfM4ltroALOR+4WNDZmCPNuO6SfPuTL9VgNLem7ELx6Tq0mZWML+ON+/VTtJwFayVAKsTMWkHcdWndy8JLVyIyLysJZ1LPu/yK4udQvSu+dU+6WszS97+FO32beabbbZZpttttlmm+1asWsa9Dv6wAVcjIfYf8CAeLAB7Z8Bbg+I3QLoJQDdLQGgqidVZXsCW+y/DBRmFIAhlRoKCiwE8MI/ysI0KoAgrCFIjT4DEDZrrkGYRqRLF2Xhp585KU+VygSqRWnF+EMTuGpZRs4mWXvHgYQO2KB2O9e2HEYBGkZQCMgbXrxWzD/Xp1NZsBao8TJhEsSZbpsCFYm3acAQEvBDa31pZEODF/y+7M6DXgUgKEzAFrDiz2gbTGjP1f8FtgJ9PsDUZlN7cPKuQI0mxZSwxZADapbf5czfRgr1hOCBuBaocWDgjniiB/s8i08/s/No2jvmPFm/T7ObNxLRmgL8tIbMXhew7CK6wJKei8AScIuoLL8C+NGGpXfTpYvT7NfmWavYr77j5B62MUbZuDEy6y+oPK7I4I4jf+eDSDkBFAtA7uTpSoelOsDc2lSGtg/WaECqZau53+fm/4rFNiHlmTutmRosG9vLznrTupw+eKjvKsncY87Py+huWRuYukyAyuYClVMzsC9O/tZA4ZZxosFoq294TU+1s32YWU4js/r2lgz2Lc8AiyVyf8By6D5xQBlY7vkbUxZZ5QLwWaA2ZZNrVp9qSAxYAZo8FYAO2KSIjbCk97qAg1HG8kg4u4g46GM13ncE0GYNDCtJtCrjxmTihAKXFTBSb7LFdEPeWddvR2eW2lY7zEulm08UIidNSG3UQcBRZfmljKqWnzJ/gIxxVPCUsIGANunybfbJbMpe25Who8w3tV2A365kDO2bqTF6SwZV+4g7q9xrjfwpUCdc2PbWZgfs5fJ3a35SUJp1O03qswX/AFjZNAVKL5uCN8FQqxJ7Wh9A66f5pCLKIpOaHEBMxX92YLY/d/YRc+lT+9Kdj0s4U9ZZlRhEQOfQoIBSt1G717Mn2+S2gOJHk6yfNGHMy6Ma4NeuzxqfxZL/XC1MA8vbOsNefaAZDwiDNHK0a9LWrjP/LASrTT7J8FN/CpJE2sofT/iMKkc6DkXSU593lfU8EsBvGHMl7dkCzvqM6edcV1FeodzLyrjcpcih40H1dQuMun6qAW25bxy7WcdAc+2A7ZHUxuyy1k3i/2e5nzzwN6Tp8Wm22WZDVfZkttlmm2222Wb78LRrOhI5bgYMqzXGcweIS8mgXB0i3fF+kLD+CJBFdsdBafm/quWQ0lZGsGZ+lxoCJXgFAIlK4CcQ82N0kUppLHU85G8enJTnelVAv6NVFdxAGIEk9fGmTlqDIA6E2JIW3CHjWVlba2zCMhz7UJlJrm05MfA3CfRN7a91PNtghc/a1k12BenlmhlD8wqYNAqETYFfIzx4JUBEZiAroQQqdp6btnfiMzs2jmffXQ7wo4JjHmt3F/CbKkk3+V0o+4rCorySWokt4KdtLPUxC4Cnx5uS7zQ5o1y24d/Xx9M6Mn0kl0nOQY8YiBkFwxpIA6B/N2uk9Qp5dZGZBiLfuSUvlSbA9CmT+pYctGQ2LfUL+073VQU0nVHOyJLVv8VY8//79lnAafr5qYAqDdTI86djWG6DarpPYzB2FWhv28RSX1TH0ZZdANTB4sluI1Rg32TNGA/873r+p9h9V5CZbvtPqe6HdvdtZrqCvT6hAcc/9/eUaXb8Pbn/2e6jNmyA/T3zo7BYIndLpL0zFjy3mpsaoPU+FBjQH92cq4whAxNQmBqje0B43AaAgGXMUpeKt+0DS3xGIiy7YHXCoo6PoqjQAn4AavBnYgCaup+nguHB+X1AGcssgK3AhO3YBfYn5grvg6m0p7GTdEwNHdKYt9g/YzM3jtlawXMncX0vrtdV+r4wtbbPT83k648x7TMiFCXqHUHGLdbfrkSUnCqgwGTk22OrNOuOuYq3KXPh5DmiAFLt9Te5zkxIlKV+cgFBrtTvuaxp8s0uRj9gbNXywQTDbyJhR0+8nYP1VL3f2vqfU+oS/n5Q4Ci658DWR9nvgypFCgOSaDfYZP1y3FytfWAJS9vgYO2TEHYqjwDyzDb3pAJ+powgrMIELWrYXJdpedHyddydzOfuVfWLFdTaSKKbAX6pAH5AAf6mVDim1DpaxmXVD/rXo/oTQPVOuxLm3yhrxxB4TBzHy0YjMvh5TShriM2YJqVk72mbfavZrgX7tV/7Nfz2b/82XvKSl1ztpsw222yzzTbbbFfRrmnQD+BF1OL+1yPe78FIF+8AaSbhZg3a20c4OMeBdwm0ZwDoD0pGLEpgQBddoyy0VkOy98r6U/AnygI3Uwm0jBmIQcDFtCjAWE4c+BdGHwMKGwMBgRIMohCROwmK9L2dZ9642mHKPhJAIq11HwlpTMhj4mONChAm+976Tc49LDpQCIiLrvqc+9Zl0HbSlm4B6iTAJUXnkUZQCjXYOCXD5DJeTQqw29uWZ3KWueH8iuGyAftWzkj3YQwDOPCvWX7zteXfayBR171TDL/L1bbbOo/Gdp3J1LYqNUskLNOcJbucMKIEdBSYzijMCc9o9LXtWotxd32Miv3nyFBtlvGuGJhK8KiMmwYsU9oG8VLTmQXUy8du18sF8Qy/gz6ij4RlJHSRsNfx+0UkZvYNR6D1IWg4QthcwnjnbchHl5DuvI2f0fWqMP7UrgAw5+04EE4y/uQQOICuNUjlmaJuAXQlGOWfBQtOpwS7Y44B8ra/mA7WbDEXpb0cdOaAfiXXKcEzY6y1bSRh+AnQVeTTSrs0KLiF/WvWPyQYJQCfBe6dRF8V6JRjk0h1HVc3rxo3Lhck8gzr6KbIXftv5g8bYwCuwQXgqB2UZpvtXmzDB24F3f/+oLPXs5xnv2TWWb9n2+RhXWqfxg65WxrIB5T5zaQnU6lTdTQmjAlYuzmK2XoBXQCS1AjrI9ej68NoCSAs6Um4btmjD4T9nrAXGQTUsdzqsgJVUgNQg0WUswD3weZWojK3JuKTacGv1hdQtpKBfd6foQDkCWl1/b3ME2Fvn1l9cWFjbY49clxgyMB6zCaFqlJ/zPxJJns9jBlHbqzrAyFQwGZMFRPaAy4tk8fkN3Hl9bIoM6CQqAB/aFlbei3kr47vNraPQ0mSm0i02X3wifmyYbwBChrovOQAsOxYac77oszy7wlZanXLNc5keI8xIlGY6SnDak5vAUmXM71Xnbyk1QTXGnMkfncs6wM7fy9jiTIXqX/oQffWfJ1cYALwk5ucUGpn+hp8rT9cy82XBAB/T/H9cReBU00OzCy9rWsEX+OvMOxjXWe46R9tw2QiFbDtd8ABf+PAayDH4rcyBRPsP1ZEWPNaStd7XV9YvZYslapafszwG+15Xw2jSXuWa9OMT1o7MYhEstQ/1ZqKkYTthwLAqulaCYHXtBmiaOP61/zNXesx/fy4hLip37T7c8C1rtuU6aySp8NdOMRss3042J/+6Z/iJS95CX7hF34B73znO692c2abbbbZZptttqts1zTot7juLPYXPdef0QB8GgVMYzm9vF5xgB2Q4HDHQZ5MNcNPFs3K6Ns46alxYoXMUj4ZSIQNMlLghVMfArp+WRbvwxoIHYKye1qTunpbtfGA6aCHgoQO8Bs3A/KYkORvBf7J/1XbRUqQQkDYdAgxIG06UAygEEAxIMR60WryU8C09Gfbdg1C+Doayg5yC3IL1itbCDXoZQE0DQbp5z4r2i2wp4IZnlXmAT8N9JT2y0YgEDIo01ZNi9MA/C5XI1B33W6lwN/lTLOEgd1Bu7scaAnT+4lEHPgJpTZLW8+vZJeXuk3MVCi1+o5r6/YxhbkQCCnlRsII2OsiIsGYIH1gwK+X4Iey/AorZASNawbkRYY3H63473qFtN7YM6XPhT4nXlJq0kTWM6eRwb0Q+a/1azB2h8lA6vPCB6r31wZdWrusfGWeZgnr/wODkBQjEBwsPlE3CXDg/i6GAhFy5qBpQh1cmvpFFRxWwE+BhSmwEyK/6eJsmSaSA/wYeLlMdP3aywy3Y00zVhUW8XSw82pIUKV2fLsH9j/bfdRUSnvcgIYj5MVBCaDrGKSsmZwMmPJyfp5VlHNJntoklaFuJbY9I4X/9pFHjgNEA/2Mwa0yzYGwiAEdErdXkwSA6lnfAoT0+5wQQ2CAx+YwAXky7WQok4Ae7ThmSRMh8jhEwVhIWgvZWMQuMUSZSCVBikGKTIGD207eU+XuTNozNXVuU2aVglzX9xql5pf1c8Pc8n1vt0LznE/1RysJujW2GwjAEtWVJGAarpx9fYy11zdfxi+csno7RRuI5WtTNrlPrfMH1ACq2rEMttZ2svsa9uMu5puCW750Acr4f6U1qY/ro+L3kqxzCtjXhwY4I/4ggxzjj5zMb80i9M9UxS47zuw8k0l9VjV3Vcozdjv7Z6cddy96xh+BQWuaUBtp9mGSpKHjtZP4gNQvkJSNGMJWQqL6ytpnKmmpzGk7XeeG+sS+SJAX++kt4GdSn3KFDRDXpFbu4enafhMqCldq7F9eQRkIlAQqD8zr3KHj3YfaZt9qtnuj3Xnnnfj+7/9+/NN/+k9xdHSEv/t3/y4e/ehHX+1m3eM2jiPe/e534xGPeMTVbspss80222z3EbvtttvwgQ98AI985COvdlNOxa5p0G/vxpuxf7/7VQBUTiNwtILK6qVLF5nlt0jMsgE4yABeiJlMi2RZDmPGWl5HQ6rqUOhyJMsiTP7DmAmUMsZEGCKhC4T9vbPAsALGtQTa13UQWa39TAJAWyYBes2Azps10mZAGhOGiyvklDCuBwH+RgMC9cV9UzP9KAbE5QIhBsTlHv/fdwiLDnmswT+K0+mUJP1swauuF1aTYy4puFeBfdtZt5ZhrKecSwAu6KJZwb9jWD16jXgfufmcF9S6YPYBkTHxojijLHpT3l0vY/K4x7Tpri7k2sNkObbwErZAspIJW9fDs3ZLlylIN2WtqKyCh0FkPFtrAT8NKLSWpc+V4Xc0JKmTyQA7UDP3QtDAxWQz7fMob4JId4ZAWEYG+/a6gIUGhyXTeaF1n4Y1B4jHtdXcTKuLzMJdHSKtLiIdHrJ88GpdgX4F8Cvv7TzlxtLPYr/h7faY4UcxMkNG6jdRiIXd4TK+t8YF/+y2Ul+7TJ6ryUz2ljVcgX/JwErLpJf9tew++7xhVShAzewADv5FbAcgW5ZfFQRuGX5T5+yZjBpzbCW6vDX/b9Xk8/ttz1veW/IASiAOqJnELav40njXA2OzzXY1LY8jxgsfBC0uAWceYEAUhHGUe/GnKOBIJOfaumD8np+RtflWhd2neVdWawqcNAIAfWAwi8dxGEN8EQNiAPZ7Huv3OsICA2i9Am0OrZZyZQakNYFllXjuA2IIDOyAg+E5AwSuV+jZb8pO2ukH6LEiJ5gpKKHSfjzMjcb0LrVehd0n84HW9NskL/HH/bca+LUZC8NP59Va7jqxKnsOWwoIJIk5ljSzI5HJmJs6Vvp9qM8m834Gy94n4u+05rUx18ahACc+maOVURRJ7FKHetrf2/ID27nTmJbT/lc5NwET8nSSFhGQiFj6PZGVfJ3yT7buC914R/u3AE8PpoxernbY/r0Ht+Jia266EsBvGnxrz4nkXuH3iyignz4/E1K6BFkzKQAr7dsksmRKfxhOpCxKDsZMbRn+4v9T7Oq1gN9mYr1hQLoczz/TfP/dtUQhSjCJdH6mBzlvYftt/YDbRIs9SwLN3Z7U7ZRnH8LGlPuVZTyzsNqSyVkq8K/XRt/3E3K8gfh+7UKRQu7Nv97ePumFCDxaRWSkTJO1Kj0wfVm/dKJkha4fK9lT50sy2CdAJ3Kp55d47tCkqtlmm43lPF/96lfj6OgIIQS8/OUvv9pNusfsgx/8IH75l38Zb3jDG/Abv/Eb+Lmf+7l7Hej3jne8Aw94wAPw0Ic+FIvFjsT/2U7d3vSmN+EzP/Mz0XXXdIh7tsYODw9xcHBwtZsx24eJ/dVf/RWe9axn4U1vetPVbsqp2TU9IlLXVwBZPHc/oOsR9s+UguF7S1C34M9kwUjjmoMxOVkQK/dLUFyIdCfXkekiVUEBwIFJgFuYsRzUQBldYoABCFjEJbqej4VxbdJ+NGxA3QZ56Fm2E0Ars7dlumDqFrYtL3UHUAwYNwz4jesNg4HroQB/VgfQAQcCWOQxgWJAGhO65Z59t/VXmX4hMBOo70WWsGcm5d5Sav/s2QLWMmxV9i8uSl2NiaxkXbt5ZmVyGahb4F91M5RAD+MMjNoFIgacfGCjWSQasJs5q5WIM7n5l8TZ8YHbsottt2vduZ0lX+6f1lr2na8ZkzIHI3P1PS+EC5MxW9CgBfzaYwgW4z6fPi8Kpf+j9IW3LpAFDq1Oi2+j39cxAJ4y9/xn2l7P5Gt/57cJASb9FiUw1QdCF2DvIwEY18AgYJ8E1UzCMyUeO5JI5KaaOTvKMxQXHbAp4Dmw/XzpZ3HRIcYNM/7G0UBy6np+XuJCgj+F6Yc24BUggT9I7rX7/jgA3AfCIL+NTUAVqAJceVhzv/YcpKNxw381Qz50JbB1GfBd79kiT0tbgUgNvqUsgT4FK30mvzsHe7+DBTnJhpwKRnnJrmrfWZg5XVGXcu3yWfejMsIdM9yAd5Q5YrX50EemWLbunjvuPbnv2a6uheUBkEaEg/PIy7P2eQaQiPXHx6wJUAzmsUTn9jivYN/hRuTpxoRlDBU2M+aMIOw0hQJS9mM/P0d9iDaWn11wUkeX1giXLnACx+bIgUiXqfdpJ8U+YeyWUEk7Za5lrY/XuhtU5tBW2rAkOnUshZ4zzzk0AESgHAFRWwDAPhIVkAJRJOI7lfVMDjBlZjzLemasxoSUMlbDaCCBtZXJ2kgBIpGqrJjSWF9bzfAP3zW5gAvqQ7WPvYJ9JAM9QWoBuj4xKc9xY33uJSzhwMDKpthA/q++32KgX8F1F9OjpszgqvpPasqSQoSBIRHsDyn4S1AJRCqn3YBgNm97CU9ta2NkcqfZZD0p55rNqCB8t0COC5l/jvcx1by/N+WXqRQ3AEvqIsCUEjqVt/QSvxOgZNlfQI49KHRY9EvkELAhMga8MleDeN0jqLon6525awygQqN8ck4rS+4TmODueb0XRVlgt8zkNrONEJDHAYhdDfw5yc9ynUWKXMBPAMjdkiWTJelrHF3ikPjwrIhR2GUK+FX3aNNTmgCnvnEfOFEiaoKFrgOaDi7uPQN/FCbqc+5KwNI+uQzwx3+DSxZllnMlay/KMHpPj0nrmUryR3L1TK+CHzL7VrPdGy3GiD//8z/HR3zER+ApT3nKfZLld8cdd+DLv/zL8brXvQ5J1q0/93M/h0/+5E8+0X5TSvhH/+gf4dGPfjSe/exnn5jZkXPGW9/6VrzoRS8CEeEhD3kIHvGIR+CjPuqj8M3f/M342I/92BPt/75ib33rW7HZbPAZn/EZiJdTTrpC+5Vf+RXccsst+JIv+ZJT2d9p2qVLl7C/v3/ifbzpTW/Cx3/8x+MRj3gEwo6kuKtt4zie2jW9/fbb8cM//MP45m/+5lPZn9qdd96J173udfjCL/zCEwHzr371q3HTTTed2n188eJFAMCZM2dOtJ+cM/7wD/8Q/+W//Bf8v//3//Cd3/mdOH/+/Inbd1+3//2//zee8Yxn4LrrrsODHvSgq92cU7N750hxV0yAKIQAWh4gnDmPcPZ6hP0zoH7BANViybVRdEGo9aI8mwSwhYyyl3RxpEERq3khh9bsYGUrbRIwjBlDKkFgOACMugUDlQqUKXgWpeagggEx7pYNDA6AayyPYwX4JX2tB4yrtf2v7L+0qYFBrQNYgRciP2hgn7QPIVpdMuvnblGdry7cLMNW5asaVpA3XXQmKLCaLdOzAgd3BHR88M1vQU0A4XLyS7k5nrbFpGaaV3sOd0X+ZRcYVsudbQOC1s6cXUBAgiipyN8A9e1ikj7k2Hko97sCeIGEBOv6ThmCUWUyDXgrgB8R2Ss056fnESQoUaSIXPtImRSFvafSnSrRuYzB6vYtO/6730Vj8/WSzdwFmBScynrSWBhklp3cMt6aWiQtazaNAgSmVJ4rDxI2Urtb11yfIQ3caWBKgj9bLwsKF6ZdxbYDymdAHfyaCIryV9OMYv2bN2sXQM9VoLayu+BwtsGMNnjiAbYqAOTB0Cq4K+drAeRRAqUuEOkDefq//0w/n2KdpGR1pgAXeHOAH8vu8V8Lzrv3ytCZbbZrxUwVoZeEBAAIwYKwKjW5dgy0UrOvAN9jKrKUR0PC4WbE0TAyg0VYfkmH34qhBgP/gjCMekne0NeeAn5HF0HDCrQ5YuDP1T/bCkBP+Q36nKOANzrXeHaMfwWdo7Y6LlT+TXbjl0k3E1ntVAMm1EdSCVUZ70tSgQb7S8B7I4Bf6f9kktnlVfyA0fkMAEzSr/WFaOKzDPU1JNHIvUbnF7XsLd+/U2OwAn5TtdPMtIbuKQUPpkxZfgqmbMbmlVLFBtxqovo7Ov1eyUF3+LDk5yA3L+VxrBNZ1BcQVY0Cwiv4V7+sndbmyzSvAYTV54sEvnYK+I1rTmrU12ZVXutD/jusuH7ycMT/pwF9wFaSQMqTHLnpvjNG7YKBtG6veqFbIAsg2jL8zFyCwBXJenrgr7pOqfps0lcid71CJ8levbF7xyzrHgfc+mfXy1n6shNaJ1VlfbcA6yA+P8pYRuTHMTkld59kXAGYdpn+Mhl5U4OR+90BftTx+rca/9zYaVKmbp1ja5wZGJttNrNbb70VX/iFX4iv+ZqvwWte85oTs/zW6zXe9a53nbhd73//+/HGN77RALqT2vnz5/E3/sbfsP19//d/P5773OeeaJ/f8R3fgRe+8IX4rd/6LXzd130dHvWoR+HjP/7j8dKXvhRvectbMAy7E1pa+7Vf+zU85SlPwYMe9CC86EUvAsBj1y233IKP+ZiPwctf/vK7BPj9wR/8AX73d3/3rp7Slp1W/5+2fdInfRJe9KIX4WlPe9pd6ufj7NZbb8VP/dRPnVryxHq9PpX9/MIv/AK+5mu+Bhsle9xN29/fx3q9xqMf/Wg8/elPP/G1fd/73ocv+7Ivw5Of/GT83//7f0+0L4CB+a/+6q/Ga1/72hPvS+2Nb3wjfuu3fuvU9ve7v/u7+PZv/3b8h//wH/DSl74UD3/4w/Fd3/VduPXWW+/W/l7/+tfjMz/zM3HjjTfiK7/yK/GGN7wBR0dHd2tfr371q/H3/t7fOzE4DADvfe978amf+ql4xStegRe84AUnBvyGYcD73//+E7fr4sWLGMfTLcicUrrbfd7ab/7mb+LP//zP8bSnPe3E+/qTP/kT/Pf//t9PoVUnt2ua6cf18BLCues5SNUp+6wD7e0be8MyqNVyAhJANFiNGspcRSESkAkgYfmlnDEkQqLsgiW8OOPFUs2/Igdq8JuymKF+wYGlNIKGDQfVB5F4SiMvxj3jJo0WhCcAiJwhzvKAkf8XcMF+M9agQ9rUk2grTRi8TGEIzEpaLkAxoFsuBNDrQYslv9d+XiyNRUmLJdDLwrpb1uw+L1moNf3ugrUsPC8bSMfsi2UwWZpGr1EmZREQRnCdsTGX6wlI0ESlbRL/TmMSGgxr2WxbmbA7zDsg+ns97qQkpvNXmHVQPtcgXJJAgTKM2gxgb5Foi5m3+9gkxwUCZWZ2SGN3SYPusgAFFoEcgBgiIiXEMWMMGX2mLYkeBf4saKEgoErAWTCKLCisIGQvgdpOgh2c6cwBG9qsmG3gWH7k+lafK1osmwTyMAngTQF6ALZqYhqg38uzI89M7pbIi30OAMXeQPFK9ktYEJRGqf1XglRVtymjl1POAYqgzBnquWW/hMgBuyByo5DxxgF4OY2gYQCFociEdrDjct0qPu4u5q4PYBXmm3sO9OZLnFHOxwmcudaH7eCZjo8+4DQBSpKyDvx2xznGrrarBp0oy9iFjm/inJARZIxQdhOM5aTgH8tw1fVmLq5OZzFzV0wTJ+7J/c9237Vw5ryMTwdIy3PIocN6KM/uRsG9XHyjTWK2TgzKzMhYDQxKHW5GHG5GrGTM7ANLd2pQN4QAjAl9DBaY70RiTiU9IxH2O5EXvHQ7aH0n6Ogis7ZzAoahMOg0KcJYOhFbzGhlUAkYFUMHOGaTjmFjpqpWp09w2QLIRHow5wSrexg7ru9MoRrPKnZL7Nlv6pbI/RIjdcbyW4mkpwKn+v5IxhoF+9bVfMSeTS91/TaJt+t96WWoX0MVu8v7ENoHbR1kHdeJCJlySZBLYDYcTxKln9NYgD4HBFbgFgDKGSltLwbzONbMvx3+X8XyU4nAyS11fmLfZkgZq4HlFFdD6cdILA0OAPtd5EsoE6L6UgoUt+yxSdaT3gO7AGjAWGckNW2z+v1xv5yXk67cJGaE6rVqwcepBDKWrt/dN/r7oOfnGX7iR6lagl7bLQDNnaeuDbA44G37JbrADEWW0HX3XN6WlDTA00AhTiL0ZQKMjSq/SXrxmz6xa6RS4uNQJQodx1Yz30J9hdCx1GdOVoPc1xf0+2Ip1t6SA3K3ZJCyX5aanTIeblzC0DDmyq/QhAq1KPXlfYdFAkJglp8m8AV3v3rTsgEG/E18z0xdB9BbH6ZJ0J5ihJeNN2YfUJJeY+RxL3Rb418OHdYbHv8GTapNRcJYr/Vdrk9+Cjb7VrPd2+yGG27Ar/zKr+BJT3rSqTB/XvnKV+IXf/EX8V//63+94hjHlP3ET/wEvumbvgnf+Z3fiW/5lm85FZnL5z3veXjHO96Bg4ODU2H+fPCDH8T73ve+KmB8xx134MKFCxacvlKpyOuuuw5PeMIT8KVf+qV4xCMegc/7vM/Dl3zJl+BlL3vZXZYf/T//5//gUz7lU/DkJz8Zv/qrv3qi6/CVX/mVeP3rX4/P/dzPxStf+Upcf/31d3tfAPDzP//zePOb34x//a//9Yn2s7+/j5/7uZ/De9/73lOT4/w3/+bfYLFYnKi/1IZhwPOf/3z81E/91In295M/+ZN4/vOfj3Ec8bVf+7V4whOecKJ2Pfe5z8XrXvc6hBBO/Lw/8IEPxA//8A/jh37oh/CQhzzkRPv6xV/8RXz1V381/vIv/xKHh4cnBuTVvuiLvgjPec5zTmVfAHDLLbfgl3/5l/Gud73LgL5XvOIV+N7v/V4873nPw9d93dfhiU984hXv78KFCwAYQP3d3/1d3Hjjjbj55pvxiZ/4iXe5bZ/92Z+NJz/5yacyjt9www34gR/4AXzSJ33SidnFOWf8vb/393C/+93vxM/9D//wD+PHfuzHcPPNN+OFL3zhqdwn3//934/HPe5x+JzP+ZwT7+urvuqr8IxnPAPve9/7TrSf//E//gee/exn42lPexp+5md+5sTtOqld06BfWB6ADs4VIGqKuTIpnZOnay6gXgBzYGcHqJezLYyKVIrUD4tkmenYONkiYtYchYjcteuqfgvwI/QM+qVUAECRB8y4skwRcoOGgn2h76w2WdxfgEJAf2Yf3XKBsOgY7JM6ZMZOdAAf7XF/h+UZXsQtlsZSyp0uaLstdl8bjDnOPBB32WHPBRYUmGuJNQoUkSz+i9wnB/PaY+QMJJH5DAJ4qYxVBGFEiWeFHSCbdxB2ZRz5417JQk8zX32Ni9wGOtyOWtDMM/xa8LI6jp4bgJRF6NQFBct58X6SgqfE/dE6R3qsGHjbPgYEmq49CBQMhgNu/I+1W6WfHHhpGcyAyXtqoCoSKmbvFpvLDirZyMLERRoR+hFhTKAQkDYDKEiw9hgASZ+5sOgQ+g6x7wpwLuA5ut6APg14MGi+0J1Y4EiTASwWk1ORkfLtj8GdT5DfHAP+yYXREY5S5DFGxlEbT3V7CY5BaunYM+yBQmwDfh7sa5+CMSuoLnK6GQb+qdQnAdZukAQVExzAN7r3EwFlOYdjLZU6hWRsygDKwR27floUeC91Zgrgx2wcf57HH3622e5V1nWskOBqYbVSai2Ar5+N4HlTg9MbkZ/cpOQC2BkKSOmzkRLnMbWmjKMuFP+KGUWHoM0RwuYS8mbtWNpFMSFPrdHduJr1/0w2P0StWyjbROJFNc+Dtfw4wONcSQgSgANgwC8nTh4AQJ10kBu/KwBD67IJkKNjyiDSdtx/yeSFlfXi2T9w7RtDRkhUtmnaTZJYY+x7lHm/9ZPqa9x4z+qDBJYZTJmv/+SIa2y/Zg52Y7cxhHSOVUqou3Zbu50Kxkz4mB601JQZ+1/6UcEWPnQWwI/9lWUHSxwDih/lJTDRJpx4v56IZVB3AX9NMov1h64DxrHO0lJQM9c1ZEcFprVNurlisBlWC+64MFZo/CiMAwM/HuTbAnMlkcpl8RpLMy5E6pbHlOikLtu60UEeUPVxMpHJmnvAL8cFS7OOxdfwVrqrAegVoGxZeq7vq/3s+MwAPopyrzZru5btB5QESFF5QOgwDsWPGO1eLHX8jPWWtkG/MWTpP+y04PrCA2W+z00oAmVs0IS9to+OlfG0g0YgNAC+qsbEup61jX/iEzPIxy9lkQ+pTqaabbbZin36p3/6qe3rW77lW/CYxzwG73znO/GYxzzmbu0jpYRXvepVODg4wIMe9CD0fX8qbbv55pvxile8Ao985CNPBdj5F//iXyDnjOc///n44i/+Ynze530ePvETP/Fu7fvxj388Hv/4xwNgYOEP//AP8bCHPexutevBD34wvvEbvxE/+IM/iDe84Q149rOffbf2AwDL5RJ//dd/jb/6q786MeAHAE984hNPzFhTe+QjH3liOVVvp1n3bbVa4VWvetWJ7rN/9a/+Fb73e78XT33qU/HYxz4Wd95556m07VnPetap7Afg++OlL33pifYxDAMe9ahH4Y1vfOOpSnsCHBfa29s7tf0985nPxDOf+Uy84x3vwNOe9jQ8+tGPxmMf+1g87nGPw+Me97i7/Mw+4xnPwBd+4Rfib//tv40bbrjhRG07f/78qUpw/oN/8A9OZZwkIrzyla/E137t10qt87u/z5tuugnvfOc78c53vhPf9V3fdeK2ATxn/a//9b9OZV8A8LCHPexuj91q1113Hf7v//2/J5ZpPS27tkG/6x+I7gEPdEECYcXlvB3s8fUmtCbVMYuWnEv2IxEQsrDDdL8KoKCADbow7gNhL+QiZzOuLficiRigTMJBczUOvJHKDY4jMGw4SzzFAgAC1aLafhcDaCzMvRyTgRBRQIiwECAiRsTlArFndh+Dfn3FmmSwr2b66XtdnGmgKndFmlDZfcYAAuoAxzHynAnbQNzOwcUv0mW/GoDT/k6p1ArJElgIYPYmg4t1nbEsrEIuRpaRM2/LLFBiFiBKHQ0PAAIFSDtOWqA9H3++ybapwRIN6CjYMAr2UbLVd0j7yOK9s8AUGQjXMhU0aKBAZqIC/O3KCc8ZyMSZ2RY4lPNpQXIGWfk4XRBAsQEtgQLStiAfA4f1s2fya1SYfgQgIgGZg2U0rEuASiTgasCPM69psQRCRBg2dk/3/Qp5I/K4KvOZitSn/vU1MAtrdo+fpeUZlhxengEtD0DLAw52dCxNpXV5VNKIm+SeGxo4SE2Bz4EciGcXwoHfOTEIKJ8zSBkt0JplW9K/Xcefb5yMhQbOdd9pYHZfSkznUPaMXbRQybwlFLk3vUfbYI0Gn2LgH5bguT5Xes9y4F2DbIQBGGuWSHWNNWg/xfibCsJpAK5lJ8MBrZ61gMJ+0Vp+QypScJqRD/B9eVXqzmA7AHra+5/tvmnU9cDevj0Hvm7lFNFbfaS1Azg2wly5cDRwDTplq6SEww3QJ5ZoVhtDRsiADwtpmF/BvmUkhPUh+1VHFxHWFzHeLlIjmrDQ9ZaMxDX0SrJDhUSJ38bAXOCEhhB47BjrZC10CwYDiekyLdstE4OXkUrN4eyAHRpD+T9xnVIDqsRHyt3S2C5DLnLB2o9HQ2H7tUAAgAoMiIHBvkiFKTQGLwu4nZRDRDbvt55ltvHbM7bd9wQgZSTZD1JGDlSPEQ3IMpmsMQwGcFXXtLV27rkC8/5YkUfnSsg6dq+kVqKyiQBmeY6RWVObMSFQSUIqkugOFPIMv9TMNR7oMwYqtrdxjLlSb1iVP7qyrUju8lxbfEWes2CsVaDBCvXWkzdT84T3t/T8aKylWWlg1QRmy62Bgesj54GDgbpWoRDZ9+kc2B06ZAHZo6y1VHJ2l7/v2bu5XyJTwGpUGWEFy2DXR9dlgXFbAAJe6rUaa+DS7kWvKNBeox3/UwIrH4wNmDvhbyjglyMnfw0ZhdUn52LPuvMrPMtv6ln3FogqiWKW26ctSdVgYwH/XgE/9qmV5cdgbwHt6+SqY/sHsPWtsf26zsmc9gyC9gfcH/3Sxj2WSE/2fB4JE1eTEXS9/aG22bea7b5uRITP/dzPPdE+3vSmN+HGG2/EG9/4RjzqUY86pZaxnXa9QiLCj//4j5/qPm+88cYT/f7666/Hd3/3d+Prvu7r8Iu/+IsnCrR/8id/Mv7Nv/k3eMELXnCiNqk9/OEPx0033XQq+7o329mzZy+/0TE2jiOe97zn4R/+w394Si2691rXdfi4j/u4q92Mu2Qf9VEfhdtuu+3ELNOv+ZqvOaUWnb6dBuCndvPNN+Mnf/InsdlsTsTa/oiP+AgAwLOf/Wx82qd92qm0LYRwtxNU7ik77XnvpHZtg34LCUo1QQFzmHXxrO/th7zYIAAm0aKBbAogYlnISJytrBI4KbvsXpQs2EUky/DlGiUJdHTE/68PWc5zXKOSElSAQReXblFUMtZF0m/YAEeB/w5rFKnABS/KhZkHcEbt2G8QFvWlpRgQ+w6h79HtL5iBJECfgXlan2+xtPYpK4m/Xxirr7CTQqmZobW3QigMAclEBlDJDk2ZwkoK/NnlqrJPSxD+OPaOl/5UIIo0+xkEolwBuVPAnwFUti24hUlr2WW7D0x5iHYzFI8beJVx6K0F/BRA0UxgZfllFAZV9KCkHK+PoTAdBSjzgJ9vVhSgEZI1HHI24A8gAffqBa9lj6MAfyWjnPvRagHKb/w+dl1F7T9tp9X/C3VggiBAHxWZLQvguECFAURVx3PNpbC3X8DWYcNg/7DhAFbXIw8b0LBGHDYstdXU61M5TwoBYcFBZ/IyuB702z/LAd5+rwR6BfDbaKxJ+o3PO4Dke0qDjFeDsffqm0aAyLYz/fOifTBGucl43OJxKdb7cWwU+EC1Z+1qP6IAfsoK0tpCGkDV6+7vGUBiuwLaZjAAGAX1VqZBynL/SLDOAFyVGBs3ErArkmhWp1GDpls3WalXGlzmOWUeP3Mn55oSQKnJhK8ZFTEwyN6rrJ6xQQi5uyxfebbZ7jVGyjjWAC/KOD3FJskOAE/yvGuwmuv4sSylMagys2IPNyP6WNhmQJnvA7FaAtcSk6SONEhdsEPQ5hDp0kXko0vmR9HeEnkAs+q0be1YBfD44Bl/CrLIGGeAFFCAmG7BfiN1wv7P1i+cWAQkl6QQgwvw63FCB6KE7OWENVFK5e3iwthmWq9rbQxiBU7r+l2tpLeCAdzXpe5em3ygLMqA2u9itYR6W2Vuj0nnetfHcoYEDciTHJO2EtoATDL8SMZqL2ufVfbeW5voJfubZHW6tmtf+MQpTZpSFqX275HIe45ZspOQkJxkN1FJTDLAT2Uhp1QAHOBX8SQplv7Q7fx56v5cvwRdQyiA7JY/6iu2fjZLecq1neqgSSC/SKfr+encSg4YN8B2fYQsPhPETzLQVmoYU+rZlxl4rUBDD3RADB1IfNk2ES0rkK4m9Y9HBAxSN1cBck020vOMgdBlOQdobhfBpCoV7NNr50Fpz4CbuOcM0M9FZp2yJgWl+rra9UmVP6UKKJux1D5dj8yMVilkBv4Y8L+0Hsu96Z7xMTGor+b9ZfVPvK/v11T63JB7gEzOVQHfzdqufXWfTzEjLemBhOHJc4nWMS2JVZ2BfAgd+8NxgXUOWKdkgN8mAUdDtn6oAb+AzT1X6nO22WY7gT34wQ/Gr//6r58q4+fD0R70oAfh+c9//on28Smf8im44YYbTkX6Tu00pFrv6xZjxAMf+MCr3YzZdtj973//q92Ea85OCoQDwEMf+lAAODWW32xXZtc06JelXh6AugZWlXGby+ITkCxDXewEDny77FoFEbJKmgDIEnD2gXiCMInSAFqtqkxRAFWRewtES1sqGVJj+sniX+r22Tlq/T5tf7dgtl8InI0PoFsmjCqBKPKdU7X8lOHXnVkWsG95UDP6OgH5pJaf1UlcLEs2eoi28NZaNKDAn8WFAQAVsJNLkByYjDFUn2/XJHH/NzIz0zsKJvOpGexa00/ZCgka8JOf0DTjj4N5DPxp7ZEAvgn4PPNl60rQRFBTLU0c37PfPMNP2UWace8DbzEQ0uiCO7RdA69kfdeAX/BXhPgYyoL0UrYpk9X4O05KKG+TCZxcrgBzICTk+jduW21XyUrX/isypVsynsBuwK+VJTJ5SpVfWwj4H7mE22YNrAMHq4YN0hEzPjCsjYkbxrFIVzkAqQLQux7hzHnQYomwfwZJJYyUIStST2PWe650SBYUnMCBMY6nJid5WTqZct4KbtcXRZgDMt4RBeRxkAimY6X4cVQz04Nj7PogLhXZ3vZ+sMBqroHiJKBeAFWg2ZgzB/6oBtWJWBKtqkOjQVZlG+RkiRUYBqT1igOlwwbGmAbqeQAy3oYILJKNe6ACgBrImntQTvLMEDTMrfLPJAHvPgRskAT4Y+sjIV0F0E/l6u7J/c92HzWR6TZW8ITpeJ7cnJUkcK81qRTwWwlQZaw0mTM3KSMEJ9FMhY0ebFyX8Y/AY7rWY10fFVaRggutf9WaG9tUZjGPg5M7DpWPoTVQCQveDkDoOicRWca1EdiSKOZ2S3A/EUt+psLarpjFKvOnCT65sPf1lVIB746bf4ECBGjywa5avxUj33wmbGWO+LmpTfwZM4DA0ufeV2EmHTFoo/NHy4hTwG8sCRp5KkEjjaAc7xILRtsBeNBa+7DcsylB7tlkYKveq4P4VHoNuM8YqFawlOuaTTCfpubjKdlRBYnaTXN2QCjvP49juTd991RMRm5X64f5a305sxqFOtf6unfep5Lrl9JYAL/N2q4jBbmeOuc68Mjkxmk72a9qZQh8AUMByQfHBjPQzz0XnLQp/wjrtPhWNWNtEvA7jsGmvlHrS5qPuaNTPUIrvpO/D8fkFQOaWn4Nww+QxIMuIO4YDKK71mUNUNa2bX1H/SyKn1Ml0HnAz62/WmDT90dGAiSDnn0qXTMuyphnfvACA0LFcNY6fpW0scwZAEzJ5ENts28122yXt7tTz2q2e8Ye/ehH4+u+7utOTWJ1ttlmm+3u2s0334znPve5JoU824fGrmnQj2vGORaKZSAOJhOUNOjrZGY0OB/29pHTgJxGYfYcAaGbFg1yi0TKiQP/ypIZnaycbu6CzCbHKd9pGxQs8PVIMlBABADAggNaIjsIgEGErrdsWlquEVOqMmwV9Etjsvp9xkBaHiDs7TMLaXlQgRMM+qlEJ/9NIVqWO+Kiqqfh6/1kqKxfnpTyA8oiU7PLPYjqwbDJ7G4fKAKmM6q1dmAaQIEz8nmxzxEQBk+4SxV4ygCINHB5/AIyCfigdWts/w3w58M6PqA2ZRpom6p/5iWbWM4zW+0Sn3Gvu9b6dwr0AaXune9zTVr3oJpvn0qXciyBjMnBbSvg5C7YtWUS6nF8YMH3zXHWAsD6e38+k3vRQJGODQhlzHBBmxwiP7sdS9ciJ9BmXZ6n9Yqfs/WK/08iuQt+zu15lbqi9iw5UF0ZsikuLKM5d3vG7PDyeVXdLCJjkeRAzDIJsGQHH8S+XJhgu35NBqKrdQgA41AYMBrM9+w+V4vGJDD1pQFUTD/7QtyrAWbAGBd8XQv456XJ9DuMAydUjBvQsAIN/BfjGnl1yNdn2CAfXRJm9Mbk0bZqDAlAi64HbdY2Dob9M9wneq+olGlKNk6xXC4AZCxAGInHlS4Rlu7pV6ZEnNPRZ7uGjLTmaMe1RqOBcrkKFLOsIyNEyvBLGcJUYSbf7UcDNmMyIKKPwQK1CmTFGNBHsu80kWMh9VnNEjOLaNzY8+3lIKsEDDsZmRMl8QdA7UtoQsM42Bhn4+o4mFwnoP5Zgs7wOlcroxlQJnq24HkOAV1cCJiYLSnKmNOqjiBs7lEBKAl6p4wi82fsv2zXAFCGf30NFTDdZYX9UxhdbXIWH0MSjeDrjYmvpyAYEbJIphNx92Sp65fyBLtM+96xsm2+beQ9tf51Tm7myAlArGuqHWOqiqB+qrLPx8ygCjOqRutnL+8ZeqoAU/VpSO5RUrabl4S9EmuY8sYEpHaGhPVFeZV5O8ql9zXxWsn2OLHDXf6ofmwymCrtKMmLyrT3LL9ShoABv7xeFfBWayXr/hdLTjgKERSPgJwQ9/gZ9FAv++XB+oSdvWCKCGsn+bhJsNqheuv0kbCIwVa5Xkq+gHxlPcefO7Bvit1nJ9Eyh4fSzl1GoUiAus8ynDx4grGiDzejjaN3ikTyekhYD6M9/8r060TiU42l/Vl5oPLBweNTDNQAwFT52Xrtab3iPhqO7B63dW4LgupbvX9dX1i/KOgn8wucP7zOAesx4WjMWA3aH3xNVR56dIzVPrA89BiP6fPZZpttttkQY8SLX/ziq92M2WabbTYsFgu88pWvvNrN+LCzaxr0y8FrOCVj1KnMjGd62GbYWHA+oQAGZLIjEwtxX4NE2SXjxgIVuQ1UtKbSnSKZpcm32wBfc36+Zl+ILFkli2gKkVmAKSEPvQS1l/ZZ8PJIDiw0Vp/Kee6f4c8WywL2KVtPay14sK9blKC/ZKmOiYMqCliULO/tcyLKkj3MYEKEsOlcwKkO7NR/K/mkdjHeLriFzVSOrewilsXh9nHgkjQgQMAUfKKgVZEeuzxY1YJ9U7/wmehTn3vAzyS6MN23egwP4rX18DTAV2qnlD6fOicNFiXJ2Af02sp7KFi6qw9gtfeAmqXXHu1K+lT32Zr2VyCAQldAMAFsKEgtOieLWTHeNJCln4WOQaA0InU9P2f9AnmzLpK7KOdAUkuKYqwBdAH7qrqXyvSTGpgqiefBMu1OZZWO4AxnZhwIewTgOndX0mltf+RkLD8Ohm8z/SyYqlnbPsFCAb8Qtp67AFjtzMLuzHWm/YQd95zYdz5DXwORUkvIANqjFf9NI/LRCpXEp3ZHCDYuEgCTIFNW4DFsIX3GlJ2UiLshuOCqD7TFAIxTUdd72DJwWSD4pPuf7T5sfn5NA/rQYXRJMZ7lBzhgxYLxo9Wf83OclxqMRCzvGYQ5pSC5Y/iRgOxbIEUIUKnzrc932BYrRfxGlRDMkHnAbW+fxcJuUaWAMj+XcVulimNgh4IyiipFGIBMBYBs5NCV7a1MvwwGpZTh4hloNRClA085NQUEAmndOX/tmn5p3usYDje/m/qAA/xqedEACqVmL0tpksnZ59Qk5+g8o0ocChK1Uswpic/cV2OzSeXbvpr7gEqNWbtW0jaVTTW/yhJuYOeliVz+Ho/Op6uk5ickIfNx/oyfM0Pg3/sEmglJ2nKqI8/fqdTnJQrsWzPXtNr+7pQ8s994NqacWyt16dmIfC2Tu5YjtyaNoI79pzyszbfKkpyX0wCEDmFCMaAFj0Z//TLs/9UwYjN6BlZAoIwuEYIkrFW7zXnr3K4I8NPPPbtP/cZj+lSB/vbclHXK8v3ZZGaTgF4pF3afAn4e4FN2YFvTe8p0nLDkP7emqdZdwn6mcWNrXng51F3mfcX2c/UbtSSEU7wYheE3uGdSAXll4XqL4nN1oawtPpQ2+1azzTbbtWbL5fJqN2G22WabDQDXBZ3tQ2vXNujX7QGhM6YdVhcZgFtdLPIyksFpJrW2coigPZ4AqRsQxpLp6YvFGytmGDiQvLqItGFWidUeURaJC1RQL1rXXW+1vbCXjF3SAn8aINiSI7V2c4CdhJWU02gSg9X2brE9dd6Q81YJQiz2YKw+AfZyv5SM98WWlOeIIt2pgalNylXGO6DsnSb4gFqWkYhRGhXCqoITWxIyjqXkt7EOV4DWSEclwx8wqaNEzGIjB/ZZew1woUkg7rjM6Jbl5+uS6Ddt9rU3z/LzmfS+JtroAlNtTR2qFu/TQJ9thxp4K8HBci7b8SqqgiYF3K37r4BV9TlH2j6mrtWPkz71+2prHvq+Y5lIsvsv6LWnAJDLdBdJKX8/ZSfnlJPLYJeamuj3JQDC7N68Wdcgkj6XvQDjcSGsvt4YsrnbgzHlQpFx04BxC5h7M8YBAaPKSjaMWbtKOwKNvj9iEEBPwFGMQwmcKwNQ39uONZjjxkYXrLYwGwFJni8D+xyorizZ4yxQcz9CgDYNRklNIRo3PPavj5DXK6SLd/AYPWyQL10s9YVS4vlAT0UlPUUeGSnxexlHj42RZmb5RGKpXwql9qc+6X6Ms9o4y5npN9s1ZA5YYMbHIdAvEanU8MjOi9G5fhCWFDNV6jp+QDNPBsJex2yNZQzY6wScUsDPgX1bwxrxmKsy51nj0JIQhRDEr3KBeR+oh/oUDDYUGU8AbTJZFnaOAhTNfhR8UJ9HawanDIzWbkIfOqBblH5VVQIH+Olc4IGNzVjq+CUJ7k/7JwKkRgf2hcL0V4ayzveE4ht5tYVt30RuiewZSRyM36Tkjs1jYxfIZD2TZFJp326x2HJySRkF8PP14Gx7BQO9b7gLlKFSY7YAKh7Qc+cyCtAySl2/kT+LYXquImFMaX9VSYHVdm39vgl0Qu6BcpuKFGpw8+2U5WSgGXJCpIBIIjHb+IXAXUio0nOSY3hA07+MEZcGAfI25dq565k3G2HGgsE+gBmBmzUnVqU95HENGiIzOgX4s9N0/aYJhmPOUu8tG/v1aEgVGwzgyg0AA0OByK47QHWCZmrupeaeol33mU+KOu572YYA8TWDyZoCJVkiAdXzfiSynmup67eWlwf99FledKECAncZqV+icvjq52pb/b2cE6so5MQS6t43dudV1Ux1fyuFiKCgX6lfquUg1jkIaJswJK5fuh75WumzqQkPmiSiLD8d32abbbbZZpttttlmm222abumQT/EyKBU7nghEiIAZuAo4GcyM06GLwvTL69XzHiLLlu8qfkEgAPGwiJJly7yfleHvNgdE0aR0syjAFQxIPYdwoLBPlosQftnECTQg70kwGMCdQX4q8zX8QOMJahtJMCAhzbYsLUfwIAJDnYXWc6qLp9n9SkryWegowYohlSAPg9WAdMAjYE+xOBbYc0BmXhxDkjtHh8gmQL39P1xZln5nE3LUkwcrOR6Hz6g1TLZyudVd1Ith9mCZLEFLDQbvLFI2AJ4FDjzgF/JaM6VnGduAjtTrD4L7gnQB8CyYj0IV1gXBSA8ru1y1Kr9xlDYAV5tgX+oj+W32T6S9A+V2ppAYY3ZtdrBtPVSoDF020xDAf48kzRrDc6cuC6nSEBRzsBiW8YrS0DD2LCRn53c7/HnrlZTytxheaKvPPgMbGf8kvvGA3+l/3J1zabAwRwIhIAYHONR+mHqmFvmAzwO8PMWiRkHCkLWz5M/i4nd6z4EGNYAlc80p2FjwN+4ulhYfp7pp1JjowTZAZCyrqu6qvJZZMDAwAINVqn8n2ynkr4meai7QbmnO0LJkD9aI16643K9euqmDIJ7cv+z3Tctr49Y0i0uqs8Xy4U9s5ukCTtsyqYaswTjRwZSfFA2ZSCNGb0Eaxns49ciELro6/rxuEXut2RJEwNosSd1ZwEcrQCgJDZ1krAULuPitn5GG8BvAL4WaFIgSQG/nDkxQ2E1rW9IGQiZ0MWFJFw4H8Yk/iTwn4uE5+DYfd6/UlMmms3rbmxVhp/KpvYioUqQsZX4pQCWst53Jd5kJ9uugF9h4GSkxPKeWdQbjGWn2zTzRn0AxwwbPQCoLD8IoBS4xrDO2XpNvMn+jUFlbEwBUxOD0ynzOShYtNnBlir9yH2o4Kn1XQtAeuaXP9c81iCeJuLsAC6VfYquBw1A7lBA7eaeZBlHvt+OZRj6Lp8EjmGKCb6N240rDMAC8HGCTSW5q0w/oEiiDxv2pfoFg0kUePvRM24b31j+V8nH6u/oXnJPRgIDRaHxr+xEAzC1bHLXYuu67LrfdiUi7ui3KdMxhMeBsgbYJA/0FcCvZfr5/XgGrneJDYMj8fVzAg2uNIUHdRXgGxzTL237vQidgflTChA+qcF/r2vLwV1TW+/oerJ5FrV0QR9UCpqE6fehB/1m32q22WabbbbZZptttmvFrmnQz8vNARy0zZohKxJ8eePYHgCDX5s1M/16qYunMlGALUwh2xp4eLQyFmFab7C+cGiAX1oL6JdYRpBiQLdcICw6LM6NoDRKTa5otb7yAAb8kvJhYOewZU39P8qyzbi9at2qE9gG6Bv5IJNbkQxMzcLMUmDdL8A4E7WWm/Syk3VNuuYUOM3VwIcsQT+SIAUFZQnSbsDJyyBxh7sTD2Wbtk9QghmRCngU5cucaxBQWYAAjAm4dS62Xxdkc99X59AGC6TvPdg1uqCELv5zdtKpAvjx3/rctNcU8Ktr+NWypB6kjKQBLVRSP0VC8Xg5VaKAIJnqUWTKeOHOWdUtgMW/2Qb8tq51A+pq2zUYlVBqCrbA2dQxrS8IiLluA38fjBHor5nVCB17DkpJwJamgh/kWG/dnmUxj+A+WW/SpOxt29Zsn7vPdgYA8ta15fMpILRyL1VOV3deGIFhp0TolcYdJp91ACwdqu/Lc2a/u8x+9T6OCqCJnKcFoIRlgGFTgD4dp6UOY9oMSGM5UgAnZZQPRCrOar3GEpjyNQsb08A5wOOI3cfCfqCBZWAxrpkhurr9Mmc722z3HsvrFUsZd2u+/0f+m8e1ARmRQiX56IF9ZaZtUrbnmL+rmct9JCw7DuJ2kdAHqsaxbUlP8VPEP8ECJekpJaiigQL3lczc5QLyV7qNsyIfmWu/JwMQec8xQZjPAAXi+QawcaXUmdOAviZR+SA+ZNvtUXlallvGzsCsGJNNpSKHR25e3H1+WqM527kyqFskR6URCDkjTPh+OefK59wJuAA14GeNcBKRKfI20TGT/PCsYANKn6ofZWBqVhZRYYapxOcUqOrnIQWitf+Kn6QPwgQrzAHHqm+QVZJCEwFbc/3FjFaRjmy3zYWlRcTzvL8djk/c2jbzsdQ/1mMe91h4wNZJafu65vY3Rbu+lMYiq941bEJVFpBD2PrDPSMmAZmSu7ayfQZCzpV/pdc25fqWaa0C+1rfvQIFr8xDykTbyQPH2BTA74E+TQYAig/SAoGAPJttLqn3FZP6U6Vshd3PyvRTFZ00sIS6M4qxrqep96yr+9z6UTnKGlMSSa1OaSqJDiVZoJxP0MQvoKn9enXkPWebbbbZZpttttlmm+1asWsa9ENccMZgTpwGS2tZIOvCkmvuYdhUtf1sKaHybkBhfshvc+LC9Bo4HtcD0mbAsBJ23+oIaUwcVF4PyClhFPCPYkB/ZoluuUAeE/ozg+2b9pbMNNzbL+dh7BISUE5WN26N5UE8w6FC3AKSLNDVZl4CFhDh/elnnf01Kc/Idfs2Cjg5YGV07D4P+mnAq5Wd3MVIS8gYJSM8SpChB2fJ95K9SQ6QMDkcoysdE4VopWYk47oqq2WgCP+jC00ASLmASi2bTM8DQCVPWZhsDuizOi8uQCXXQa9RFAYYEdlBUtaFMCzTv2Snu4WwSnVxbNFJ98BqiXmQzzMTPdBXgSq+bofP+nXSqiaBq33sGKIUImK/BCJn8Wpfaj/69fkW4DcVYFFArjEFaHdJoXI/aj+V2nIxFCaclzX1bdK+Jeq5ff2ysLd2yYq5Z26Q9myGjPU4Mug35qoffF+08qb181Pfh5Dz9vegrx1ZAM0poFf2Jd+NNB380r61//MEq9C13R9Hr6lap+9zmo4+ttfW92vasISx3pPjGrQ+ZHnPzSGzrVeHJuuZLl6wBA0F+zz7OsTASRm9SBt3vUgd96XO6WIJq2saS31TS5gABwWjG1ug4PCwtho4YX1JwL8VS5nd/sGJk7+HrQGY74n9z3bftLQ6RFpdRAhRgOwFMA783ItfEffOAiHwmCrPtjJuUpN84dlDkQTg8yy/SFiEIjunv1RmOCCJMRRA/T4QOq7JPGxYEnBvn0FKkVjOTsVgi6nnzTEBVeKz9al0jiugVWfJG+b32N9yzmMiIDAQhkTCgFPZ32AnqckgXtZzk7SmVTa5v2Es85qvOaf9BMCAPgAG9u11AQd9NKaaBct1rtB+uQIFBa3X6GsMcqNQDfy+b5LuM4S6T/218UBfK1EfIigkrrmqtaz7BddizD1fNzmG+r/KqlcGkfbnesxWO2wlkpAmDekm3hjq/utjLSfIPhZqf0DngaYfzdd2f5XNxsCf62A316iULC32ELSecHV/O98+J1ZzEAfC5dpUSgfl+mz7Ff7aaY3zGLidFIRxmZpnaRjqNZbWUh8LCMinFe265s0a6HrkTW9JBTkN7Oc5th9Qnil9LlTScz1mHA3Z5IR1jWLXjxT4rp0OY/zJOutYQLRlb6IG+qqa61NqK1ZLXcaYHQlEwDRwf08Q2AhyH4i/bxLp6ruYv89KF+noErZqbEqiKGdcUAH3iIo6jEp4av0+cmzmDAxjSWYYdK0j44revUFYygAhJQb79LNIxHNGJAxXyGw9VZt9q9lmm2222WabbbbZrhG7pkG/TBFeOoQDCw688+9F9jOPI/KYQLFmt2SVBVXQbxwN4EuboQL+8jgaw88Hl5PIfFIK/L9+PiZEk5er20Xdoqrp541iNFlSCxC0AaxdAZo2s9qDgfo9IFmXVIE3mULF3OP+gQXyOIO6LHqmAL+pBVHOGhvKEhBjEFEz4kPiYEsCHywGsmBhLZN05VmzVV9NdZP0hQ80EgrI6plKU1YDK3UAzWpfjE3h+zGxTJlsGzQAhMIcyNr/uXzmZXvUVM7LS6Uqu88APgf+TbH6SJiwClhYhm+qZX/sfHKyfssUgLEDYs8BKl3gx4QYFyAAI5gFYH3eyL3FE6zZLfgKvb9q1mkADFTNlIUYwP2Tm8zouhnZMSJLVjX/tptMzudM6xJoHCQ4xcFHGFOT3LUC6vM3hl4FummLPBrI4HlmRFx+xzdxpixAec344xKahNH93447U3KjugcPJJpMa2bg3t9/9gzoXw987zLP7NAAm957moU+Diw5mAau47c6RD665Bh+Enx0Yy8AY/YpCxshSK1VBv+oW3ACSAhA5ySPLWhLkwwLOzdl9QnIR8MGtDkEjRuko0vczsM7jz//2Wa7N5myZzsew6kvDJzcLUC5Q04DgozxWsdObYqRpnO51uwrgfkSoFfpRN4+b4EVYwYnlYTAc5UH6ULxk3LrF7Ysdfk/H+dHtH6GS6JSRvlUzTc9UgSMZUYhW13TUbJ0PPsxy74Ku79mp5U+3B0N1r5VdppKpeqrD04NwNh+OkGU60v6ectCP0kgWoEsN6ZOTvupYYf541vN1X6nPwfAfCYDUQXwG0ZNnoJjoiZLHNK+9QCqynn2gUQa1UvPUj0POIUEaQkQAn8u96C/3yi7mn/et7X+IpACKDkBi73CoFK2q9+fu2Y+eWmrHy9zXZPM65lIpDKZocUuxma7ra6mudU69u9DQPa1clW2VeqG7kykKr2IIntfXh7w24y774et8/fviROBkJoxogH8FOzLrbqKu0+ze0+aeKprPXedqrrI0Gcw1z57c91iI2PZXQYRPE4esrpvU0n0M1bfZg2kEUlrIgOVf0Y+aqDPsySMVmoxHa8FRgRbI2qCqCYyJv3boFwETV7QdaqUZwA/i1HGMK2ZPNtsH872nve8B2fPnsW5c+eudlNmm2222WabbbZ7oV3boF+/hxx7BlCEzcX1+QIzOdJoGeBII/JamH6JazyRWyjmlEwGToPF42rtwL2NMfsU4BuF4ZddgNn2N7EItbb1i8Iq6RfHSlBNyn3al9sgWMXy02C1BrA9q8+QB5Zl9IFuzcZUxo8uuBVcSQ24cpy13ypYGANnWyMAeQRHyFJZrBJgoI1nE0mjSxc0+59ibR0n0VM2LpJbUf5OHmCX5STIkwSARseU89JPep1y4kxuACSBUzUftBoka3s1jEgSBLQafBpP0EuJEtAjoKrf00pUVcw+AftoXNd101RGcRj4+XFsWeu2xZLB68USeViwtGVOQOqljVwTapzoyCzdq3+3zAdK/O8cI1L/6j3pmX4cYLUfuQByroDH3aZgH//X1muc2loDGxqwXQvrxV8/3VcIJdht9Re3jo4t2SZukwSKAAPjsgR0NVN/RLYgrwKAY84V49P37C7ALyFXNSEJHCPTYH1EBsjVZ1TgWKW79BloA3sT7ytpWQ84a0BKQL506SLy6mLF8EuHhxhlfG5r+GmNVXQ9wvJMNQbT3j5oseS5QmsxWm2aOrBaBQL13IYjfl42K4TNJWBcI995O5Jr53jh4sQdM9ts905Lq0PkwwtIaeTnY3nAz0dOyONG/C4e4/vQowsZMfEYoVKJQAH1AEnwELlJZvjFwjwLKCwqTaShMm95xk8mQoxLdMsA2qxAoQNt+jJeKGAy4ZuQ9wHaBCg/BgEV6J8d0FKAJFQ1uKokKQH3cgZPOsL040Qb/syT5LKBVEVuUmtdbUTyzqQNHdhYsf0E8OsDIQQSoCqgD4S9LrCMqvgCZNsDk/6QWDtH7NxuIvDO8zGZ3+gThUj699hZ2AENOY08lg8bBlIGBnyt7i43AoVRVBhhY2ZWGPcpcLgepZbfaGy/FqTuA2HZRXQiP7vsAhYxoA+wOmKRAFJpTwFPODnKM8Bc4ggkeU3lD8UHBGVkjz2rD98tkMeSaMVKFyoh3hvbz3z+zLeaT+Rpmfn19Sn+vT97Er8pQXweYkWKDICGUF83ZWQ6pl8eC8Nv+6D8HSkg6JJ6cujEd+jkniyt9sxNZfsdbkZsUsbRMLrakuzj9iGIJOQxbL5dgH8F5DZg3w6Qr0pagtzvoV67teUoqqZQ8d/5mU3GLh27LKBfQAwJiy6YT9iCgQyIbj+vAbCEikAADQL0JUmkGtfAmhOn8npl13Gr3IWw/OyImmChCjGxt5rWKm2vCiUZRSlG/Uz1140RDE5mi4E/C7H43JpotogliaELwHoG/Wb7MLff//3fx+/93u/hJS95ydVuymyzzTbbbLPNdi+0axr0G8MCeXHAMk+h40UYBYTlGV5AmLxKz++7nhfpmr0IlKzUcWQJIQBZ6oVo4DiPCZQiMNYAn2eQoAfiojM2SX9mibjcQ1h0iH1ncnJFGkXscgwYb7vklwTMy1MLypQKatEGuiSDd0tqSXcLyfpFkaYiydbPWZhT4KC/AQvaVJUSzNuZxNUpSTTI2FqEwoZKUu8PpVZHACpJonb53MoNGktwS6an6fdW2qvd78TnddZ2yXqumEp6zCbYSAgM/OWS46qZ/p7lxwvl3MYUynHtvEuQ1AN9mgkbCdOsPqk/puCF1fXYrAtzSoM6G5f1K/cwDRtmSwHMBqHA9Z8ABk9QgD8PKLXr9Ax3nRp2xdY5C7IViO/LrH8zIVEBRceJYKV1o/S1fa5AF8p9a20cIRn+2foaqO+9KtjrGJoK9q1kzEguIKN1Sqxu0EQMatd194BhFzgjH8omIVgNqeTYf9odJSZYwDy9Cy0QbcBfOedAMpwQWQdVLEWgZuU5mVhfL4Z3rM+hAwMVKEwFKMwjSzRnCSrmo0tWW9XXWVX2tQJ+BvbJmBwWPQPUJum5KIBf1zOgIQFUy1qfCgr68SO5DPlhzc/PcIR0dInBvvWKgZOjS0iHF6Yv5D1oCXkrg/609z/bfdMYTL+D7+/1CkGYH7Q8A1qkAnZTQLdYSPA527gEaICZA9YaiA7CQFsKGBWIQRSt5UcyX6UMY4dvMUQyQCljv1sg7jl2iSau6DmkiXnY/wV2An8AKtlIk68jrVurdbTcfA0FUfj36j9xnTUem0dxx3Lm72C/cbXKUmHBpJRNRtMDfQBsX8knAhEZ2KGAn4JWfQyWFMSsddSsbH1vDCQ9D0kwoeJXJHUE3U+VUbjTmvHUwNVdAJGa+huOUeYTQtpjaH2zIRXgdD1mrMdkIN8m8XsFVLX/PVtSa0zudRF7XbB+s9qIBGBY25znmelbp24+drYERajaQ+hAqQbrKraq1rIGCrDpABb1s8qx6mMH+5y/qO/V7UQf+32S2taBWGZfQEZK08DVXTatG5clSWeH760+sQfFV0MB/DYO8VMJSPOtQvFdKtvRfqp8kbwlUzp9Hur7FzlarS9qx2rXWO74gRwDOhVWqf5NmZMkAGBM5f5WU+CPXwXMD4F9SgKByI2vCvSbnyb1kRX0u3QRWsu+uu9CLCy/EIsqgsmhLyrAb514zNaafR7o0wQ92GfOz+R3okoi50ilbEEXNHmEt7kc6/GesNm3mu3eZH/yJ3+CV77ylfjar/1a7O/vX/4Hs80222yzzfZhYDln/M7v/A4uXLiApz71qVe7OVfV7vLK7Td/8zfxuZ/7ubj55ptBRHjta19bfZ9zxnd8x3fgpptuwv7+Pj7rsz4Lf/zHf1xt84EPfAB/9+/+XZw/fx7XX389/v7f//u48867Ln82pIyROuR+idztCfNvAeztIyzPIOyfAS3PMLNjb4mwz5+Fg/P8WedqO/ULhEXPr14CxX2HsOhYClRBjhgM7AvyPi46dMsFujNLLM4fYO/6c+jO7KNbLmQfvS0AKUSR+JT9NUFvv/DlIBNNSn/6RWRbx+/YBbkyAAELINh+sFsWRgE0X9tLF9NEHEBSyRXLyhQppBjK9uQWcmoJNXvQwC+UYEy18E/ZXlYI3gcFki8G7/prykRqiIGwYfs1rBkUm3jRZlW+F9ZcATlcAMjVx9sCAHeY/8aCfA6lCqFci0A+k7cEpjQjvSNwUMqBExhWLEW4WYGGI4TNJa4/tl4hX7oT6fAOrpd26SLyxQvCWJIaait5v14hO4DDAyAGKMr5FnZZCcBMLtV3BEZa06xoBjjLXwsOyD0YUA9yOTuJKAkgbVyAcEjA0cCyUZc2/DoaMgcLx8yvgV+X3Iu3yTiU3xxuRlw4GnAo748Gfq3GhNXIQcfNmLAZ+Virsa4vpC/dvvpdSq52VjY2qJfY9UGynHP1zAxjea3HhLXUixpG/zxlqSdZP1caePdAtQaU7N72bNFx7e67DdeQEXAsbC6xDObmEGF9EbS6E7h0AfniHch33o7x9vcj3fF+pNvfz+8vfBDpztuQLtyGdPGC3IcXMa6OTIo5O5RUx21l+CmbTwE/lvjk73KIRTbN1dw0KS5/iyoTUcfusWTNJ5UbXa+QVxdN3jOtZqbfbLvt3uRXAUA6OkJaXeR6mTIP8PuLwHplzzWNG5OKDiADnQAY8ATUEmz8eanNVMZtmnRIcyMJtxl5nF6njAEBeXHA/l/H/p/V82uSq2gKJAIu6y/ZXx0TcmGumM8CuLGxsPUYFCz+C1ADLB54sf3Je8/602QR/a2XxmtN2ZHK9AtUpFN5TmyVE3DZ5DOfTKW+RzRgoU7+aO04SdJ6wwlJT5P7dHX+dmTC6JhdQFnY3H7JsfpWDvzjOa/sz7MlDTAVxlUXqGL5ad0zaM1j9Xl2vIpPNO3zG6tM5xwHqOR+KescecVeGOkMDO7y21vAb6vPUIAYuxed/5DsHsSxwNXlrmNVMx0w5pz1BWBA2+Quq4Q4WJ1LDwRunfvlQGh/Ds312AL83Dph69We8zEAoZcKz0Tl2gMGyDPgFwy07yNhobVPu1iBfIsuCANw93n6dUEF9Os6RdQ8ikz6RgDAjSRccdJVvdNg92ju9F6MBvgNbn2mfrW+13rl6o+2ajFaZzzKGLYQAHQRSh2/RSR7HvtTwJ9nm+1atj/5kz/Brbfeih/90R+92k2ZbbZ7lR0eHl7tJsw222xXwf70T/8U3/M934OP/diPxfOe9zw8/vGPv9pN+v+z9+7xthxVnfh3VXX32efc3CQECQgEJOAoyCMICohgRpAEBSUimBEUIhBCgAGUh8AP34pIkACCODiMPISPDgwgPhBGBEaF8HAQFYbwUlBegZDHfeyzu6vq98daq2pV79733txzkpsb9vp87t377Efv6u7q6qr1Xd/v95jHNWb67d+/H3e5y13wsz/7s/jxH//xpfd/+7d/Gy996Uvxmte8Bre5zW3wvOc9D2eddRY+/vGPYzabAQAe8YhH4Etf+hLe9a53oe97nHfeeTj//PPxhje84Rq1ZYgJi5jQug7NrEN0DajdBvUbwMZeUAzwwzx7K1WspRCKjIn1EYkRqedFjhOmUzPrsp+f+vZFk23xLQODftYVGTkL7GXvqLZORMXCRVIZz6Uk83hhbaU6bVLaLS/E00TC6lDAIAHZ244AgMhIFkrSynjT+ApYo1zBCZSK95RqFhRQs6jUY06ThkAB+xCLJ5ld1I/X9yxxJfugC1wFJQGuVCZkP5nJBNc48ZCZe8bH7lBsv0OwgqpYwSYcpywcWGKJgTJC68HWh6gZXo1U96p/TyMLY0+EznH7aTFflvAMfUlEbR9EiuZ66EXix1T7agIkt9d57rMxMmjSdizHugmWrAK4j6Ykn+nYG+ZIK8THQAtK3/QExETynJCE6VAkhITplxJ/LhX2XRAGXJFkK/JDIRUgLSTkRKuV5FwVlomh39XfmIrS/yVJHpa9DcPou/q+k2R6pISQtBqcX+NK/STXMB+wcQrKshmPJFiSThiEqQZRiw8XMvhNoQfCQqSj+sL+EzZE6hfc32z/2j6Y+1uWCZPP5THasLLVSxXAkpwneZfH5JrVN+NxWOU95TmaJidQs6znxBipYF9Oluk+hr7IY2nizDzG+RzDfJQ0uw7CAg3X1vbXsTtxfZpXAcAwXyDs2wc/6xHmB0AH98Nt7uF5UQycPBYGN8IeeGpBlNB5h5lP2Ggc+pjgItALQNV6EllP/rfZeE7gikwbF3LUcnxDLOCZjss6xnlJHHtH6NotHh+39xUWO/FGlVmVZQlXAVwZ1BuBg66eR9mCIgtGKhtRLwttp0uU2X4x8TwnpJT307IGy79yL1LWuEp7KrMpz4kiMypdBFopeFHZVAWuKoDVKcig8p4o85LxmGeLu4Tt7okAD8RYvHoVyM3ygeYkVr5yMo8ayz2vYlKNJRJ5fxmIcUmkPRV4yYAfM/sY3GNW2JXzQZhhRc5TC8psKOC30ThstR6zxmGr8xl86Dxh5gk+sZwztHAq9Fw0lSJLj9p9EM9uMv2LpMhODz//eGGXpynaP1AKUuSzej2M5WWBcj2tCgvGWolP7VcsQ0tA5DkTOULjGz7+zvP+qEe5cyDnkVwAtR37wgGoPdZdLnpcsi1YMSceA+xajBSlWKsPsVZlQGFhWp9QexhWFjZqfxwGnp8YOfslvz4bU3108gfG8wnuv8rOax3PVTcb3p6Cme0QESLQNRHeEbaHmu3npb92DYPTFWDoXCkAVFacnbPInCwu5qzkIYAfYkDqec7CthTGh16Kp6KAzwxEbyA1HQa4DO4tQhkXQyzsUj0Hds2m5w0wRaMK/im7VhnMMn4p2G7Z3ddVrOdW67g+xWc+8xkAPE88//zz0XXdMW7ROtZx7ONDH/oQ/vZv/xZPe9rTjnVT1rGD+Md//Efc+c53PkJbnuMrvvGNb+Dkk0/e1X375Cc/iT179uCWt7zlrm3zeIp/+7d/w6Me9Si8973vBQB0XYe///u/x0knnXSMW3bs4xqDfg984APxwAc+cPK9lBIuvvhi/H//3/+HH/uxHwMAvPa1r8VNb3pTvPWtb8W5556LT3ziE3jHO96BD33oQ7j73e8OAHjZy16GH/7hH8ZFF12Em9/85kvb3d7exvb2dv77qquu4t8DJ0IU2GnaWalAjAPLPTnPLIxmozCPxKMszvdXiWT1LSOtcJSqR+88/CwgLngRZP3/AGH8OQc3mxUZTxPUdGXh5PzkIjGFUC+ER0nnMYBnJeiSgiwTAKF9XhnIuxEAY2WtAHhNPkAZWYQCJSQBAAT8I0kgEJBAOSlGxJKLooqYwYYi4VIST6vGu6nFjyYr7FuaRHMgSXwkeBAilQSdX7G/S9XWhpmXEwHirWIrkZMmKHW7E8BrtS+HOD8K7hGlItXpOMnZOMIQS/JMDe1rH6RSna4LY4SFMBYFhBkWFfinAIxeByqdCOn/GQCfqq53AYgqmeug3i4pMOMJ5JAUMCUCRUm6jjzSSv9aEaNkkH5Wz2XQ7RuAWfsjGcCPtdRS9vkjSbcpwMxsipJY5mRSDf4dKizA16uU5+hrhSnBcpwKVGYZu8Pc9KN8F5Bkl4DBMRLgEpwkwCHHQeVIl7Yjx2rVXtlWHMk8hPQ7KtGlcp5h4H6XIku+DgPiYADlvsjH5kSTjLnWQ3Jq3B2z+oAC+DnvcuEFdQL6ta08bzPgh6aVhGzNlF66TlM0lQsjhoZ6d8ahsFCiAchX6bOuYx0mjsW8CjjE3EquNVr0IM99OToH383y/YHCAim0oDDA+zaz+pnF5xBTzEVA+d5lEvJ6/1Im2ti7N8IUa5gijfy+JI5DTBgAJEdoZW5EY4lFvb4ngK38/tSx1wFwxfuZtYcC+Nlk/BKhbuK5fo8l75KwtUsBif0ty/oP0VZgLA/UY589LaxaOaQfpiBHbi3M3gyRq5CkwCaz/xzlX7COfZZdtMSoOpSG9ZGMn66eTxSgge/F21nSM2VpT1WDsPsG1PKoPI9ypV8b8IH6hQB9i+KNJvOfNIxAiMhzfr7XRJ55uCbvO9FIzI9Edl+e29f17zHYBwBHe6cZX1ManqRvEjL7FJB5r6wfqGFGLbUdr50A3v/RGqesf6RA8UiBslFkKdbDTMlY4vMINmhVN/RfZvcZWU/rLymPFfi3BAROFwFSYrC3kglHmcP73NdYkvdAT4jC9gPY3w9Alq8FeH3QiedkZvXJGKvM1Mz0IxS/8SjSqsavcGp/dV8VrCXnhbHIXn7MRuXHEFJdwJDUhxRZiUIjJJ6H22I3lfC0qh16XBptv6qp6JosrGZWrmMd73vf+/DCF74QH/nIR/ClL30Jb3nLW/CQhzwkv59Swi/90i/hVa96Fa644grc+973xu/93u/h27/92/NnLr/8cjz5yU/G29/+djjn8NCHPhQveclLcMIJJxyDPVqOz372swCAL3zhC3jd616HxzzmMce4RetYx7GND33oQ/ihH/ohvO51rzvWTbnOIsaIK664Al/72tfwta99Dbe5zW3wrd/6rce6WTuOD3zgA3jHO96BZz3rWce6KbseV199NR7wgAfg+7//+3HOOefg3ve+N/y4KO4axmmnnYY73elOeMITnoCnPOUpaNv28F+6AcW3fuu34qY3vWn++0UvehHudre7HcMWXX9iVz39Pve5z+HLX/4y7n//++fXTjrpJNzjHvfA+9//fpx77rl4//vfj5NPPjknpgDg/ve/P5xzuOSSS3DOOecsbff5z38+fuVXfmXp9ZiAIfJCgqKanc/gZ7MsJ5iBj0ESW9mLqbBQkCKcJm6F5ZQWc6SD+ytpEx9DWdzJaxVYp74HUwtATUC30xVYU4Bf9vRQAE5BOy+yPr7j1xrxTJtipphYyUqrQK9R6oAcPDl41yCBZPFGVVUxJw0K0wpQ+akCuPHnaNKrQJNDpaKcYywLlfLr9nc5WVa2kXKlKLMSS8Wz3TZ/sU4+LfmPqUyhfW0Ejlqpo7HEapUwdOqMA6gHI1CfM8tSbB1lZl9KxZaRf5fy55ThlyVwPKEBe/fR4iD3da1EDyztqUy+aFl9CnT3ffbyA1BkfWwlftPywr8BA+QAAzjOs4xj2wHkuBJe+iZS4vd9rI7TEQF+475pfEYaOYbesF0TOcRERpYSWAgIvAByB+KkQ9lkls2UR5X+jLEknsdR/JVKEtGy/jTZ3ebkC1fOZ18k6K4Q+kP4eOQq6JQy2Md/ExAjWjDzL4bESdkk+zc6wDYxba+vkIovlAWXFUCsvPtIQerSZ2GYfNzPDLPUjqm2v+nf5nnsB4TFwKCDPAKoiiyAUmgBgCWYHcssu7ZhUE8Bvo1ZAf42ZlnmUwsxkm+RZaom/FYJjgX3xkUB4oWZ91c8z3R/shzdMQxlCF2b21/HtR/X1rwKWD23QoiI4pFJ3sH1A3wMiE0Hmm3x9T3fD0cOKfTw7RYIkoD2zJKKMaEPZexi9pTLHmnZy8+OLRAgzzBCVCZOhyvLuNZhIaSENgFN04H6mO/H+R6dQplT6X1kNB9KOm9LBMDztW6Lr0aJ/GTuzzrX0YKPkBK88yuLlihR/m5mCkbk5HjNHK/lQhf6G3oBOggjsB7sNfGvrL+V7DurPiAMNLt/AIo0e2K2X+sdXAU68rnX+4Gz3yGUJL0WSljWt1UTyA30POc4HHJjz6GADn0sUtVXLwIO9AH7FiH7+FlZVAWdAWUqomL6bTQOM8/+fp1n1mSTBpGoZml0lbmt1ERsSKFfcg7UdEDD/ZznQVQXNnnjk4bRHDoBSWVeof2u7mBWfv9w4UgYp3liWkKLo4BSPBWTgJU08KPv+Lx2M17fhFCKuPT8mWPAaicdzw+dY4WIsbKJiSybCx0Hlj/jpfEK1qqfX37fgtCrjolZB2AYloqObL+s1mlHAvSt+j2PDLwhDvDUgG37CBvRAYiIyaMPCa1nNuN8CJkVOGb6zRqP1vPjhjD+dIzNiiBExm/ZrGdskRJQPWew1uXzhaYFug1h97VZajb5Dn1UCU8sycSPC80IOnfkwlAFOxXgU2ayMhQ9IhD7UsSobScHCqVo5bqK9dzq+Inrm4rCbkdKCWeeeSZSSvihH/oh3Ote9zrWTZqMSy+9FKeddtqueQ4uFgt84hOfwMc//nE8+MEP3hEA+7GPfQx3vvOdd6Vd6zj6iDHiG9/4Bm584xvvaDsK+F155ZXVmuj6FPP5PI8vO4kQAi644AK87W1vw9e//nVEyZn84i/+In7xF39xx9u/PsTNbnYzPOEJT8Bd7nIXnH322dfou1deeSWe8Yxn4DOf+QxOOukknHzyyTjppJNw4xvfGE996lOPeeHGrW51K1x00UW43/3uh4svvhg3uclN8JCHPAQXXHDBUctRbm1t4TGPeQye8Yxn4A//8A/xile8Ave97313ueXXz/jMZz6Dc889F5/5zGfwYz/2Y2iaBk984hOPdbOuN7GroN+Xv/xlAKgQVv1b3/vyl7+MU089tW5E0+CUU07JnxnHs5/9bPzcz/1c/vuqq67Caaedlhe92T89ArqsKABKC/ItfLNV+YoRwHI8xn8qAyTDNlxYAFtG9nBccWwXg1PVkXYB6Hxhlyj4J5I/GSACTPJJGXzGCN7VQGDyLZumk2PwD0e+UMiyKoABvWK9+JXIkkSeJYkaZQkaRh/ybxcwkNlWZcFeEhQ0CW3Y9bhNSI1Zfbba3wKIFviD+exKrxkr02eA4ArsM69VVdxLrLdatkirYFVSDK5BSiwbSL5hSSrd7xSRUoQjJ0ClvsMgFUm1tU14KIDJi2Je9Cvo1yDWclPDAq4/WJhW8/0Msqj3mDL7gmFbWQncUaaFvICXDUDRJD0yw0mOKRa838Mig526r6TA9RFIo477ZSW9mo9HDYqT52PvfYeQCIEIQMwJVe0ckYjlPongHMuwaU9TZkmU5IVKgdlrTEFA7aNLsmsoElPykyyP5oThZ9gvMaSVSSlmBqY62Z34Oo4xwXt+36WSEM/JtxXhHZfva9vH0qXKUsmgnnnOnlCSSDO/kwExTeqGBUvHDj3SfH+Ru9RiCpXADBHDfJHlk2OISAI6jEOBPnSNSI65wvBrmyzn6Tb3AE0LN9uTPVst0y85YfhpgtXKetoQ4K8CoLNPU8iFAlb+lmyCt4twAJrFdS9BtY4bRlxb8ypg9dxKq0wyC0urTlQCXe4R+f4IZA+txiF7UbXR5bFrqy1AijLS6wIfASxQA3827D1d5xQJgBfJzMw+CbWcLrPyV99vsteWvpdinpONi6mOJLWv+6Z+xla+XNuu+x0iMssvz5UUAIzlHqP/bMJfI0SW9jyUpDRQwCA93jEx/mCPiWWPAcLwS1z+YIdCcuN7RmHuZEk+LcCKfTVuUhzqAol+kftW+eERW8woZWS2kYzhyfnKf7aPDJKwl27EfAiZ6WfvzyyLXe5/rXf5H8vQCuDnRIZW51fDHNQz8Id+wfKIRh0ht9l7JBcBF0BNhzQs4LzPc0DSQjkpEtP7UUgiOz7qK/V8ezkSjOIFcZHQWDaIiKR4CFwUR9z/p7wXE8x1lsDXlrK7mpbP6WwLWGxzIYCet2EBoOXzqUWPsv6Bgn9yz01T913iwqXCZpT2mCZ6HSgisq+kN3MqG6uuWbLrHwOAJSngGfvz2XlViq4UeKqVw4j9R96XVUpmEzL4lgAgelB08E2DVlQcWOaTEAV4do7Qh2jYelQB1wCyhO9GwwzBDZX0dbVH3tJceiqy/3wpYs3y6N0M8B2vOX0n8p6d8VZcLlIIsRSa5XOntOFcGEDiCcuAn4KU2aJgKFKkuTgzSQHhMZD3XMfxE9c3FYXdDiLCi1/8Ypx33nn49Kc/jTvc4Q67tu2DBw+CiHYFnHjve9+L5z73uXjiE5+ICy+8EDe5yU2OelsvfelL8fSnPx1EhD/5kz/ZUeL+y1/+Mp75zGfiHe94x1Fv43iIYRjQNLuX8t3t7f35n/85fvM3fxNvetObdrSdgwcP4uUvfzmuvPJK3OIWt7jeMd327duH5zznOXjkIx+J7/3e793x9v7xH/8Rl112GS677DIAwMknn4zXv/71+JEf+ZGj2l6amDMe67jZzW6GlBKe/vSn4253u9s1GjtOOumkPF68/OUvz68/5jGP2THgF2OEcw593++ITfcDP/ADuPjii/HkJz8Zl112GT7wgQ/gpS996Y7aduGFF+K3fuu38C//8i945StfiTPOOAMnnnjijrap+7ubocfu3//933csR/qWt7wFj3rUo3CnO90JH/3oR3M+42j782WXXYbZbIa9e/fuqF0A8LWvfQ3/9//+X9z//vc/ptfX7p69ayk2NjZw4oknVv+AssgYIv+bh4iDQ8S+RcCBPmLfIuKqRcDV2xFXbgdcuR1wxXbAN+YBV2xH7McGttsT0M9OQtw8CWnzJKTZiYibJyHOTkLacyPQnpPhTjwF7uRvgT/pxvAn3RjuhJPhTrxx+XvvjeD23gi0Zy9ocw8vjtoOtLEJ2twDt7mHq+M7WTQ1kmTOC6f6X05CywI7NfKeJqi9GKfL5xcRWISE7RX/FnH0L6ScIMlhQLDqX+iLjJFIeqmXlUdEQ0BDnHBSCZbW0cSjyn7RUvVp8d6gQxrSHw7UrEDAVQtawCy+daE/lArYoS+sOKngxvZBBirm+5HmB5Dm+xH3X1X+XX0F4j75t/8qpPkBYL5fWKZmsTopJRRz0tRK8uhx0mPYeZf/tfn4yTHVhBQhg9bISSn+l+YHEA+YNh+4mvfl4P6ybwLIxAMHEObb7OvUD/lfisr+KMk5C3ZkidxggdTA7D9zHNgPbeCq3QnJL13QUxgqVoDKlLJU6bz86w/w/sq+Ur+dE3MekfvmKAmpUkeaSJmKDPylJAzAlNkC84HZgNtD+XsRIg72AQcXAYsh8r8QKwZh9maKJombDKswHXkV8ao+Pk4I21CWnoYbJ8cmbkbKLB3fLKpPjs+fFFFkFt/2nAHnxTxfR/HAAfT751hcfYD/XXUAi6v287+r92OYb1f/wqJH7HukGCswOnv4tV3x7+tmcDMei3X8dZt7uErdjJ+wbOqxPO9U31SJ6Dh+fZndQd5nL1fqdr5ovqaR0rX/bx3Hd6yaWymYDhjgD+AiKCtjqwoJEirZOcv+UsUjTRl+s8ZnZoeONwmHYOKsiOz1lYz8JzAt1ZtBlZFnp5E9T67JhQD5nxTtHE76Uq8FZ+YyPNZSzXqbar/KTicGbRLqwpEYjWSeAf3sP4y+MxVTe6CAUy44O8T3OTFvpAgdqn96n6DRnM4Rio9fKAyjLKE4AvzGTDktqFI55gwemXMHKn5+epz6oP+i+L8lLELM92b1SLTgiVVa0PmpAtidp1JQ1W/zXERZ7GoHIGw//Zf6hezfyB95VFwHcx4U8FOvx17m7ENE3qchln/aByzjNIN0KACuznfsuXRUinv0M+Mo/VSur1woU9Yi6DYyMJRBvaYzz6XwRu/R8ne+tpy5FrVvasFfKqC4hictMiiAXymi0s8s78zKMSZLzRovPy1wGPoMTmsx3FSslPscAYsZbMsKI8L2EzWIxrGCR+sIW53HzDOQt9V6bMjzDe+E2WdAPlceZ40z1ynl+S/PuUdXufXr039NV4DaLInewW1sylrUC/gnIHU1HgnQZ8ezqD6MMRcujEPvB8WqAJUtQbYpkPWnMhaXpJyvg1jPrW4YcTgVBQCHVVFYFc9//vNx0kkn5X+nnXbatbcjAM4//3w86UlP2rXtveQlL8EP/uAP4qMf/eiubO+iiy7CZZddhl/+5V/GrW51K1xwwQW49NJLj2pbZ5xxBm5+85vjT//0TzNYe7Qxm8127bgdPHgQz3nOc/DXf/3XO97WK1/5Svz6r/86Lr74Ynzyk5/c8fZe//rX4/GPf/yugM8pJZx11ln48z//8x1vS+PmN785zj///B2DdJubm7j44ovxile8Aj/90z+9o2199atfxVe/+lWcc845hyyevCaxZ88e3P72t98VwA8AvvjFL2Y28xlnnIGPfOQjRw34hRBw7rnn4vLLL9+Vtr3xjW/EFVdcsePt3OIWt8CLX/xi/PEf//FRFQvMZjP87u/+Lv7kT/4Ee/fuxebm5o793VJKeNrTnoZ/+Zd/wR3veEe85z3v2dH2nvjEJ+K8887DLW5xCzz0oQ/dcaHFySefjAsvvBCPfOQj8eu//us7BvwA4C//8i9x4YUX4sorr9zxtgCWzb7DHe6Al7/85XjgAx+IAwcO7Gh7m5ubeOITn4j3vOc9uNWtboU73vGOR32e+77HAx/4QDz72c/eUZs0PvCBD+ABD3gA7n73ux9y3nBtx64y/W52s5sBAL7yla9UA/dXvvIVnHHGGfkzX/3qV6vvDcOAyy+/PH//SEOTPYOAWCoBNUwsKGyCvPMORKgkETe8Q+s20G3NykJizALU18YzcpUI1c+Z9zXJlESup0h3+iLlM1XpqjFKWsGxh0ZyDQJclnvSBIH+8lTeXxmO3hHaJAtnX+rPbRXlWAKLkjAMI2X2n5VTZNCqlsJKKAwnZQYCZTGvKY+lw6mLfaCSwVJXQQiL0JsUmmUvOk1ISeItL3grcG+bF91hkc8zhYW8VmT6okhd5qp09bsDstE9gCz7w1JHThIgm6CmhdtzYgF5AWb7UZH7pAFIPsK7Bt45hMT9MsSE4GrZVGaLlfOYfUCiyNj2BwUM60GLfUC/QDhwVWZZpfl+lvDUyvShz55pQeTcxqAK759DCsqo0saU5EeKgX37hh5YzEGx5X4SB04OpAS4Ifd5isKA1P6kfc0Ao5nVZ2VXo2FnTkmtyrVFvkEKPeAXoKbDrJmhF8YfRLKKEw8ss9k6llTqI6GFQ09cCT+gJCgyKyNXMUuydZR81dcsmOgpAZHEZ2/kXZXSBPgmj3KO9TWVsVKmjEpa2QTXmKW35OJEnIzJCfeRLBzMd/V3tRrbSrapD1eV0IrFQxKL7czwi/P9LJe8mCPsuxqhHzDsnyP0A+JiwDDfzv1O/+W2eO53snMVs8+1Ddo9m8zq2zoxV6O7PSdyYnG2B2g4MRUlWVkli3OycYIFlOVk1c9T+pzeA3S/gcJAaTvAezjINTH0SJt74N3G0jFexzqOJK7reRUA+K5BM+sy4OfaJss6s/fmQU4IS+gY0cqgsdE4nLDB90RNym+1Hp0XyTlfioEUINPtAHIfR2HzeAEziAoQMMhwG4UyosBZI0VTlBq+dtUbTu4vaQwGeAUeRgoLQJ7LLX1nfLyc3Jt9GastaMSgWAEm9J5eEuQpSxjmghABpfS2Yll+CvZssCYg+hjhncv3piigka09zawxIdkE+UOHfyJXQCPzPb2XeCf3iphYycHOv4CsQKCSfI3j5w2BmV9hURQIRGkgg2NjiUEggxCZKSZqGdTNgG7G4EM7Q2o2kNoZS3oGBshyQU6I2BZfv0WIOLgIS15ovtVzVnz8Zo1D53VtwOCKG+agxX4G/mRuFWVuZX2QS/uVKSU9tGWmHzUtKBkGmFkPBDgMUpxnvQn5/E0jAVPS9twfUwb3qr5KCvbyuSR5hMw5hTwnfU7IdApAEqF1DVK7kSVKU0q87tkCaLHNRYqLObfJWCJkAKnppPCxyQBStQ4S5ZOIInPLwJ/ZZxLf0MieyIWpqRKfpe9Wyib694R6RPYY1sfR+Uzouf2D3Nsdn8MpsG/JriFvxMwhfIMUB4AINABN14AcIeg+yFFwGx7OAdtDROtJJGqLlLCdD261XiSUS//V96oiJRPkPZIqIJjjREAB/GZboNkWYjNDakXes91A8h0GWXtbpq1l+vUhZs9sJ2xkO991KHPbaswIC77WYsiFpkVJRI5ldMDa028dRxnHREXhWordlPV897vfjec+97nYv38/PvzhD+Oe97znjrd5z3veE1//+tdx+9vfHre//e3xHd/xHfjXf/1X3Pa2t73GHlb3ve998b73vQ+3utWtdtyuk08+GQ960IN2vB2Ak84f/vCHcZe73GXH2/ra176G5z3veSAifOpTn6pYSkcTf/Znf4ZvfOMbCLswXn7sYx/D3/3d3+Fxj3vcjrelcde73hV3vetdd2VbJ598Mp7whCdkqcujjf/23/4b3vWud+FjH/sY+r7H9vY2NjZ2tpYnIjzhCU/Y0TZsPOhBD8KDHvQgvPa1r8XDHvawHcnneu/xB3/wB7j88stxyimn7LhtZ555Jvq+P/wHDxO3vOUt8dSnPnXH23nYwx6Gu971rvj5n/95vOhFL9rRtl73utfhpS99KV796lfj3ve+d16LH20QEX7v934Pr3nNa3D++efvaFsaT3/603HyySfvGiP3ne98J/bt27drEs0nn3wyfvZnfzYXXTzjGc/Y0Th39tlnX2P511Xx8Y9/HN57vPzlL8e5556L7//+79/R9u5617viE5/4BG53u9vtKkP6msau/vJtbnMb3OxmN8Nf//Vf5wvgqquuwiWXXJIHuXvd61644oor8JGPfCQbK7773e9GjBH3uMc9rtHvJSAvBjU50ofix1U8Ufj1kjDnxVNMHkGSNQrAAGDwpWlAXuQI48BJixgAt5DEkSsJ39SWyvc48HdUesSCfBpeJToNq88CHkD93FZimwRVTgrIgjwkWxXLX7Xgn5qkAylXZGd5J3Bii6baYV7LHldAXoVnYszo8wrmeFnEq+QniATQ0++VRkbw4k/br0BfREluxJSWZDuLD5kB/gTwawhLEjEZ4BukejTFIoGpMpcig4kYGSRT2ct+UZ4bST8AoF487XSyEQNi08JtbDK6F5vi6xcHlgAVVpuCwl5ACHIESqWPJwFqiUqSrXXIrDgK4lEZBlkkhwJaqo/a9txI1gbERT8J9o1BlxRikVa0EQMQ/egxIEUHFwOSQ5YyyiAnpFpc9h+UOKlayZ6mKklBFmwx8j7j64SvS5ZQVSAwhQHkIzy5DAgnBeViQpB+EsVbJIITKZAEmHcsv6lxOMBviAnNNaCtWJlP/hvZl8ZTAQjZmwnldUmualgp0THgt9wckvORsGpaTDDb0nahMCTzdi3zTfo9pYSokrhWMiuGwhg1vn2rAL/clgz2+czscy0DE8yeLv59bnNPlvJEq2OskfG0QN+krFg9/lX9zAB+VX/1KunZsuwXwMkoAUlotrPFx9FERFqZKN6t7a/j2o/rel4FIAPrCrZT2xV5xRWRi1EyU8Nho1G/LRm/lBkmDCO9TxMtlSZkECmhABHOFCiM+5+CYt6zxPMSiDTFrNK37DzNeu6a7+hnxr9LxMVJ3oyJCoAqWGY99abmZlyKUopLAIwAP360gB+wXOA2xaAJMWW/MABLV60CO4e6mh0x4BezPHWZN9p9Vok+yy4qDP8h37uruZQy4OxcaiyP72vWUVSWmcyhAxz6GI28JzPsM0sypaVjBwA+FoAC4Puuc5Tvtaq24AW4VF/kzPCTYipbDKZtRgygRuY80WHKozARmb7XFOAkliLGIiM+fW6STKQ9CBE8Z2bvxeXP0viRWOozEc+0VeZTgb78G4k9/lhGnNjPL0aRel+ABvC56Mq2dR6YQTED+OV78oh1Kx2g3r8V+z6W9xyHBZZUTmfpUwb8KyzmwmZeYp1GV4CxFZ69CvgdslCgUkUI3AfiAO+KzGdMXJjKmGy52hwR+ljLwc8antvOPDP9dN6Yr0udpy01VsZ3lSQX4C+/vTErzD9dr/oyp0rk+BCiMNTGY9bhgsgWfVBuK8mxyeoKq+Zex4AWt55breNwsbGxsWOA4FjFD/7gD+LVr341fuqnfgof+chHdry9lBIuuugivOY1r9mF1nHsBuB3bcSzn/3sHbOIAOD0008HwEDMs571rB1tq+973PrWt8Yb3/jGHckQanz4wx/Ge97znl0Bg6/N2KkU4Xw+x/ve9z4AwOMe9zj8xV/8xW4061qJn/mZn9mV7ezdu3dX5AwBXO+kVQHgdre73Y4lZL/4xS/iKU95CgDgwIEDOPPMM3fFG3BjY2PXAD8A+JZv+ZZd2xYAfM/3fA8e8YhH7Jo8ZQgBn/nMZ/Lfr3jFK/DABz5w14owdhJ3uctd8IEPfAAf+MAH8I53vAP3vOc9dwTW3eIWt9jF1h19XOM92LdvHz796U/nvz/3uc/hox/9KE455RTc6la3wlOf+lT8+q//Or792789myLf/OY3x0Me8hAAwO1vf3ucffbZeNzjHodXvvKV6PseT3rSk3Duueeu1EZfFRHFAwUogN/Vi0HkfFjWR325dCHEfgcOMx+4MrLzSInQRMJC2VM5UePR+Qa+mSGFBShs5Ol4npaPKkYzWw4oCSS7kJXkksp2Wq8xlRci3e4oklRjK7vRGqVrQiNCfRRSvd1I8JSqCm1OyDnAN7yworScCLf7CQP8KcAX+JEsfODqhJr+orcLe+fUgbFOMKAcDwt4AZoQqwccZQrYanNPEBk+liZlgG+7Yvrxcwb61GNsktUXI6KCfyrjFGIFlikg5rsGfraR2UZoWrgg/kezPdxy3/H++0bAz6YGG+Q9Tw7eNQBoyd9IJbOyTGZ/EIgD3OKgeFUuWGp06It8p7L9hh5xPs/tV+803Rcbul+aAHZ+Rd/QBEm/yAmvCE4mMPspFtZfDMz6Mz6VNNXnFHDX/RSGnwK2SBEYamkf5z2SbwVMTXk7IAffdGhMQjcwkgyfwGw/79AGhpj7yEnbkAguM/Q4PLEX4BjwswyC/FmnUqJlTFEg0YJ9CvRNsfq8SYjY18rfBfBTlp8CfiUZi+pGzbrtAIQhs5IZbL6bWX5AZn/kcWr0L0vA5uRun5+HhfS7DDBLslfBZV/6nD4yyNei2eyE3TeD39qS62xvYfephPJsDycVsxTVRJJxor/ZxywbJf0PwGTSiZkJkaWvmo6TeDFIYpA/59v9y7+3jnVIXJ/mVQDgNjb4/qV9WWVzxTNTma16LRHMuBcSNhoC4IR5U7ZrQRSVTMzzHUjyWNtAKtXHDJAQEwJKYVOIppih/DT6CHTNTJLEDd/jjazeOBlf+STbcQKor3VRa0ij+VpKgNKp8vhOhcXYqPgBse9g1B2F7Ecq0oxDBqFq8M8CU7bgRCOIfGXrwPNdErafK5/R40aJ8ry5AD3l+Ff9gJCBICIGlnIBF5JhmRXQt/WG4Zik2Cosisz3YpvnIQqcjTyzM6OqQQb8kBl+G8I22mSWX7eJ2MywPTDDb9vIbhdJ7lTktkXWEyiy3p2cIC0mUwZqI/2z8wQ3zOEW+0GLg6D+AOK+K5C2DyIeuLouBIOClsXPjVwEnEcKwg7TewN/OM/34BoMQ2lzlu48TP5f79FRAGaoFIYw9+CoAmhtoRCDe1QYf0YFQYE/PlwJlIBB+7t3aLotpLBgVrtf8BzBNSDXwAWWwbRMPzQTxY7qS24tDZRxmuzvlzUAs/wIfZD2m/kR71+ZR+mcxb5PQJGbzZLdiee+WcozFBBX+6ZzSAOfz9QACKGAf3me7Mv4Mlrz5UgRlByvG0gK4sgBwwLJR3S+k3UMz0V5XyIa5zFrPDaGkPu2Rmb6dR6NQ+63xUIBBeDUZhDJOkxYtOrJrQxukfektkNsN5GaDT5f+uga9FHXnVKkIDKsaWI8cWaM9M7IH2uxgDNzSusNLUW3UwofejzXsY6jiWOhonC8xMMf/nDEGPGCF7xgx9sioh15+B1PceaZZ+4Km+42t7kNAOBRj3rUjgFO5xwuuuiiXUvWP+pRjzqmbJXrKtST85RTTsGrX/3qXfczW8exiZ0A3yklnH/++bjiiisyU+0nfuInvimuh0c+8pG7ur22bfGqV70Kj33sY/Hf//t/xxvf+Eb87M/+LP7pn/5piX1/LIKIcK973WtXWfTHOq5xL/3whz+M//yf/3P+W+ULHvWoR+EP//AP8cxnPhP79+/PF8X3f//34x3veEelT/tHf/RHeNKTnoT73e9+cM7hoQ996I5NKwFb4cuLfvbbClkGVBMyMSGDgRqOPNoEBAd4l+AjiRcYv8/J9Q7ed/k7+fapiW5lhcTlpHD5IVcWueSMv14NKmjVNDBK1mdQzzD8oibBjJF6rBdcMQFwCTHxtgXmQeMSEAmdSE8iiNyMLogn9mFpUTuVQDdJnAwGugJsqZyeSoX6pd/QdpO0tP4bqGV7nF0sRvGDU085kYbJrL44AOrDYoE+ST6lxbz402kCQF6L/cCghWEpKXjhvEPsG8QQ0QjLh7oZUjcX9pgHNQ23UxKIpPumcp+amAiDAUVH0qnSx8YStLmSPg4MZKpfjviRqF9J6hcVYAkgs6ssm8+CLuQY8FO2lUpX8Xf5WNEg+2ISXmg6pBjgNnjfSIFiZV4RgXwBO3O/sADMVIVvKh4s/GdJqEGTGXHgLJU8p+jgfYNEEHYfd1MiZZcm3q3ELJWQItpEGISuwcNFRAzLPoBhxO7LTAdTRKBVzAr4qRSegn1joE+TVK1XxmwN9PFrBjx0tT8PsAz46fhHxInAlFjG1GsyD9PfrVl+si1MMQjNuZsK5+E7vu34ik2qSVFnPsrPXdvCdQ1826DZM2MAcGurePaprOfWXvGcaVl+SpOKU7Ke4zErRSDJuDeSmbXsxXGCiRm5vD8pRcC33DfHxyRe94uFa9sb5hgU2N9g4/o2r1IvyhQdnCtgX/btkntbLmqKA8v+JQBe7gMN4AMqGe9acltY+Dquu6ZSXMhtqZL2Kct6psQFTDoGJcg8KgrY1vBcjVRdJg58n2UkhguX7ByGzNws/3a55yYqvnG27+tYaqUVLehXqxLwPmhKyLL+8vOIPDcd+/RNAX75b1+zA1Xtgv9RGeNJAZVU7gOoS6kURKIEMxct8upAkVcnmMIrKrKmDSErEJDxNVZ/tPwoBSF6D88/YQAyLR5KnhlHKusZmxkWIum5CAl9RA34heIhVvzvYt5LW7RjQSNCAUsoLMQneBs0zJEO7MtS1eng/jJH1DmIi4ALADq+n+X9aKWr+dLnnAJejZm7J/N4eK9GSizR6pEQIgGO5RNDTHCeMrgLMgC8FraQy3MSSjrvLoChZfwlcKFUlAKo6Ait75BamUSN7/vOl8JAIK95lgpxMgO/yTYIXPg3vs6Qx47IO4wYi0ICUIAlLYCqFAlgmGTqpa2FANaP2vhLZg9GlOsjAcxsFHlPxACKIxb0KsDPhi2UUplPOV6dFPt1nmQNypLzhARqPGYNMB9KYtkTs1NVHlMVT7wB42m8nrNjHQ1QBfJShOWL9KrvBPCb8aMUq2oxQkx1AW7+GZTrKp8jKoVpGejT8wNT0LhCjvT6EOu51Q0jjoWKwvEU5557LpxzuyJr+M0SRLQrAMDpp58O59yueEpdU/nUw8U3A8ABFNDvVa961VEVT67jhheve93r8G//9m/4/d//fTziEY/Anj17jnWTjusgItzznvfEPe95T/zO7/wO/uRP/gR/8Ad/gOc85zm7VqSwjhLXeOQ+88wzK0P1cRARfvVXfxW/+qu/uvIzp5xyCt7whjdc059einEzYlSj+wL4bRu5z9a5DOK1SfwgVM5nIERJ7PtUkuBeEyiofQjqhLhUu5sFVrVYGbP8oJKjqa6SlN/ylHISBUDFMgJKgki/o1XvEQXwY/ZfymAnABAciDiR4BInC4aYAAeERPCeq24pchXqYRdbqxaz1Uky2wjleBBr8uRk2nibJM/HYGCVmRrLP8ZStQtr9i6ylxQWLMs0LJDmBwrQJ4w+lb6sqn1DqFh9sR/4X2DQj5vB+xWdgzMShZ0X/6MNTswm8aRBB24PgGSAUN4PIOnuRoAw8PGxyRMrd2kBv2xsX2Q9VYI09YvMvMoyikZSsTqtI7BPGX76PHvt2IihyFjJb5IcG2paw/pLGfxLSSRgRV5Kgc+cubFgi91vBfxUOlI+m8Q0hIYBaBo+JiIjipSQwgDfAEGTeUg5sZcoZfAPHuztlwgxsv8UpKpck0YVSD+BfFnAz1Hx3cusvwzu8fYU2FMgcBWjL79nxiL9fQX8LMvPxjgBpuCfU/mw8efN9zLOn1+jIpllr8FRn4A9n84D6EufEvDPeYewGIC2gZvw8nPewc+62r9vtgekMp579jIgMdtijyDfsd+MZfopmwKl4KCSLZbrKaHIEtuYlJCSz6VV46AtYuimP7KOdQDXr3kVAKBpAefh2o7H1NkeBgI3Nvlv48sFABQHlkQXoKiRAoTWERZBEsJIGRBT6UT1ywRQ3+Og4xlPxLK8J4h9/VYcKmZvM/jT+G7pvlk2PnHNCgATzObLeOdykVUUwEwl2Ky0ZeMU0FR5TyoeamAwj1af5sp7evzalHRnte8jZmBMNWg0ZkyV7U8Xb2jYY2/bsywlXcBclfVUtk5+HAaeK1S+aaHMu5zMC7WQCNwP4QoAwZ5i7OO3COx/p6DfEBPmgb38VOmjD5aVH/PzrgFCpOyhqPvkpNivFVZqZiqGRfYijCPlBL3XFZai+Pe1bQX4aVTFJwo4y7lQeU89d1NgigYfe2UDEggsvxkiss+kznPKj48Y7SgKHMqshZH6tH0HqPtvSsBGMxMwXUHh0TwVyGB6ltCdAvyE5Tf2Jz8kkOSWD0xmksnfDgVYykyypbWDmUuCi8gy4JfPLZjd18KAfa4qcKwbemiwj6Bsv4hkpO4t4w/i8Zh8uc4WQc5r46txigzgbueaU5c2Fys5IErfMwoYyuBOQGFlin9f8i0/kkOQ6yrKeLlKMcLrAAIzV9U5KkoxmbaTAC5qNCf+WEh4ruOGEdc3FYXjLR7+8Icf6yZ8U8app56Kxz3ucbjtbW97rJvyTRvz+RyPfvSj8eM//uPHuinruB5ESglnnHEGPvaxj60BqWsh9u7di8c85jHHuhk36DiuyzWUnRPzIjnlRXLx+CtVs0AUphgnA7ZajwN9yFXVrYvYar3IE0myo1rPpSJT6XShkurkB2ogECgLIU6+pLygjWZxm/07zKpJGTblb9OSpOBfyvKXluHXKxAlSTCAgUDvqPKBmQ8MMgYvi0R4eN+AvJEBGjGsjiQyAJDL/M339HkoSYfVG5pg5Mhjxf5S34dhUYN+cUCaH2BZQanOhkpeGqZfXPQY5ouKwQeAwQigeN2Zyt9oAArnXX4dYMAiLno455G25/zdpkVqWqlclwrbIN5qrllKevJ+mZfycRvK/qvUpZG/VGnF3J44eoQB9pxD8mU/LMgHAK5thNnH7L7s6zSWOVDG4KJUSaehX/YKaVrehiTGtJpYGZ85+TA+Disis/uqtgRQ8sy8igMzEP0ChA4pLND6DgBLRCFwIjolIBKh85xkaV0C4CRxyj20TY5ZwJTgo1S9Jzu+cFiwL0ud+cLua73LQJ+Cevb5KsnOI5lj2PFC8Hxw8X8t6zv53fGxrcY1ZXIUmU/gMNdu7ise1La5/zkArhM/yZavr2aFtGyW9pyxRx/N9oBaflTvPrfnRE4It1vC8uuAZsaJVWX6jRKruYn22nIMFKdhwfJbOTknfpspAqRg94itURrNmxsxiOJw3TP9lOVzbW5/HTfMcCrlKd6Ybu+NuGBFWdxEzPrITL+YQT5PLGMJYRmRwGMhFlBs1jhm+ZlCIA0GIZAZP7wNeY1YvhMQIErGNWbfEc91IoMfgyM0vhP5aCPVC/B9wTVIdk6lY4SZW+kYOMQiuWklF3UcVKCr8yJr6YTFqL+pY49MEn0CUkxLY7qyGQEGfeJUFh18DKdAwAzSjN5Thp/9XBJpZ/u6nd8S3wKzl3C53ssXxtLqnlB8/Po5s/zCIrP90rComHGF8Vf81PIcQ/5mMJZlIVOzwSzujROwSA77FwGLmHCwTzjQB8yHiH3bA+Yh4kDPMogLKQIcy3uOvXgBvie3zhWJT0csBb84AJrvQ9x3BeL+q1g6fTFH2LcPUQqndN7kWmFKNeLtbIukVCHBMEvV109BFO7LZS2T1w+H6AtcLMXSnEC58ZOw9rjIT64joGZRSbGVJ4dC7WP/RgfKNgEpAX3i13TO4x3reLSuQ9NBzl3D+6ny3jAg5wTYp+fWAn4RyOA6998CsEfweUkguIljoscjFxg4KioFdt9VOj7F3Bcty8/O523fTD1YnaJpWe6z7ZD9C/OFNM30y17vWUIcXPzmmwyYkrYtDOiaDm3TcJFEBAaxaNC1nq79AGQfUW+uQ13HLs2hda7dAIjCyExdVaiaiMr5amZ5bhXgsufkWG0GKEs+R0BWmvV2zCjjZZH51H0Qv3fnkGIBLLlQ8NDH9rqM9dzq+Inrm4rCOtZxJEFEeP7zn3+sm/FNHbe4xS3wtKc97Vg3Yx3XkyAi3PnOdz7WzVjHOo46jmvQL1eCm7LvSiZvlFAJkgV3kTinkIrvSYwsczflGWeJUFptGaIwlogr2AmcGI8EQCqsDWEryyklsz39W9etzNozla220nFFdj0nqGAAv2AAP3m0eenMFIzF12uIZT+4qpsZUQxsOjhnFrRH4KlQrc+iIFdTwN9E1FWdI0DHgo+alFeWmwH7KPSVZ0yuyhYvmTjfDww9+v0Hmbm3GCo2XwqheI6tquQFRnKYvgLMUk4iFMmgnDywjLXVG+dHPYHjdhiW0qTX2Diy10yAi1zOHo2HGn+kMPoOBfaNmX4pFJ+aNLCvH8t99qXtWsHvfTkewhrRRLLKG02xp+iw+zcBEJs+ksiJbKr4JRIhagLWQfyVCmgXidnBwQNARFRZKblCHCXxleFETG4GFTlPTQC3rgB9rbKLXWH86fNWkiOtdxVzD+DKaN4+/23zXYfoRQJoIjP6gALorWIXWTnQsl/Tj0u/R469FU1fSbn/hCIbCGQfP7T1dVb1QfFzoqbLQIQCftRJMkqSiKkxfkG+ycnEBFTeqRohaRLKsQ8kAHIq1cxAfAJApIlCM5hagM9KhtL4fULya2mcdRw/oWAfdTP2e+pmxZPL9O/KH0/YK1q8INgBWk+Zgedd8fSbHMvNa8q41eKnaBL/Crop8JcSj9eUSGAK+X1h76l/rDJ7cuI9LQ9iCvgB5ZFBiFSxAG0oC9wbdl9mMSqw4hr2yLNFB8z3mWrGdR7j+4qTuWFiIVSYGWAlx1f5KANQ5QFVIahlDA2wonKKGmZsrXzvoPeQRsb0FkEYUENkf+shJsxF0r+PCUP28ytAn2X5Le97+TUFIpw5jyQeb+M5ZVgM2ROZokMKMn+amDOSZ/CP5zrTUtPaslwrNyoEHIMBKuPJkpfM+kvi2UhUgF2Vysy/yQe5PArwp2sqLmYs11lKEL82Vj2RciCkmHJxYKMWAXquXUTF5FWgaYLdp4BfQlkDpfy3mbeAMrORMK2yoPMmgvEhJrNeTKFSj1g1p1TJ8ZUhgC5vZpnNqfu89FKK1RhCcEhhyOomCTrXlU/5iMZ1MieLACgXPlAqcz/ru6zqEtM7VvocF0Qo8BmRi5n0OjT+i0l8J7XAVgsy7L7Y+aSOxFXhqrL6qLTXvp+WqhAm5lUTAOY61rEqrncqCutYxxHGjW50o2PdhG/qeOYzn4nNzc1j3Yx1rGMd69iVOK5Bv1a8CxB44dF6TsrPGpfZfhsNy/uUZXSpANVFkX62hUrxAa1D5atnQ+WQoqx4KvBP15Kjz1s5zpRK0iSvPSXRNP4cg4RJ2m2SLuY39H1lHQHL+JC+z8wxkt/grQTi33RB/HZEXtS7lKV0dGGvLEZtCVdz1lX6vEMCmKUIkEro1O8BqH2yVjzS+HXrxyFJJRLGW5ofQBp6RAP0ZVbf/ADifI7QDxj26+NBBAH9hvkig34Ais+dyAxa5tFYntC1Lb/XNXAt/6v9XCQxECPLbmrycSw5Zv+tMg0eg3z2H1Ax+qSBXI2s7zkH79noyIXAwIp8bgzwkfoirdCEt2zCMeMuqT+P90iO2Y7UjBh/ztWScfLoBAScXNBrvxDfQNK2m/1Ngb39KApLa1gUP0EAvunQeZajs+vBlMcNBxeRpXUVlOtjQpMIreNrrXfLF5pepwr6jUG91rnl1wTksx6iNXN4+TBYpi9yco/HIU0EZUZKIgRYANGAlKPtjgE/Zfnxe8vtyMdOgDMFA7IPExhEAMByZ9tz7odNJ0ni5f5q+yG1Nein8p7oNjghpVXo7YzlPPU5OQwJCKH4oALLSXtNHjoA3jVoOmHdhoUwaJvM+itfsokzX12vaZykIoe0fQiw+lqKEOuClWtj++u4YYY74SS4k07K1x26WfF2AirmDr/ACX5vmdq5OIC90/oIYVDxuLyUvBXPPRsqO6hzMSuDDsh4DQCRWXnOIf9uUjabb4HYAV6KgQ6z71Y9ITdNAb9U5nNAmR9aL7/WQUCvoUzExJ/XuwZRikSSFJYoyELCfCkyznW7rE+sAj1ejqe2QcONbhhTjEJbVKJvEwrLUv9ORBjf/e38jwBktQGrOhAWzPIbiqdfUsn08Zgvxyhv35U5id5HovM81rczbAcG+Q4OEdsh4ertAVduD9geIvYtBswHZvrtm/NrBxdDZvppjME/L/drgnoyEmiYwy0OgvoDiAeu5n/7r0bYtw/DfIF+/zzPE3Xul7qmFHrZfVOZUgv2KRM82RXKdFjAz0q5eiLEkNB6Zqky4w9IQfqnMAA9kH37lm7jcu357GdJDO6ZhYZlwALsi6zdTMUiOtfBbzQZYEfoy2/o3MC3fD0oAIhSkKMMwgRlUy0fBwKvTfiYLL/vSD8j6zwUVhl7NKrf91AKyQwr8VCR5/IAVN4z9Qvur6quMfY7mgCpKuAvFZl73oEhHx8uYGxAbYT3HXzjsIgJTWSZT14f1mA876sB1mwbyBUJczlHth22jflckUNqZ5nhtwhRiktLEet4XNbzYHvaeD7LbVV/v3o9TlIkAerLvDKF0jbTznSoSem1FOu51TrWsY513LBjDfitYx3ruCHFcQ36adikhS4eWkfYaFiez1MChrJoVtaNsmz0OaDSMJL0xnIl4qrlRZpYtB9KoUPfy5IoCUbSBln60wJ5Wt3pR4uckBJirH3GNKpEkFSi199lACCFkgBiEDMJuCnPSRNFSUBL3s5UAp1/q4ZLE1BYf/rZCeCqYnMZX8Glylz72TgAi21OKKlX33y/sPp6xANXZVZfmC8Q+wGLq/Yj9APCwQUGeY1Bv4SwUPnOBOdJ2HsE33m4li8ZjxrwazY7kGPvsSVJwmaFodeU5I4mhNwy2218/Pi4TLP/AAHFojCrzLlRwAVaTW+kpyzQsiRLpbJHRi6UUIC/KpEXIyAsvzRo+z1S35e2CfCXZUCblkEiN68AQP380uE7VBU2uM8kZYJGSWa5RU6CNN0Wywj5khSNiRNj0QPeeXiK8rqMHUk8gpzIB0+gcdarb4rJx6AfxGNUZKc0ASKJkcw2MIBbVt4CiuQaSjIuILGqV6IM+lkvHq3cJ3BWNzM7Mrh3+LCF2FESiDBgX2bUEEu6UiPV79o3Bjn/Vn7WnjNv2KTaB4RtZBl/aLtlv5lmI8t5aoJqiEby+VCgnxwPnziZ6Z1D08wAL347mhisT3TN8ptibMiT4YiO7jrWcf0Imm3BbZ0ItMKUVX/MtjBW1adLE8OkQDlQ5KoBNK6B964k3bVQRwuD7H3OFGbktoDHplWjfZQiKieDU0K57pjJK2w/38hgOUxes9PbrseMKdk1ZdfomJ+T7NYXOQIEZZmXeZWO/SmV8V3vGWx7TEs73ph7TmOAQEALtOxzmL9LW6fCggOcr3dLwGOOcQFWKIy+7DMsgB/CAogDz8sqIMyDPLP94RwomrG/7bjIo2lZRlGY25BzGULEEFlicD4EHOj537aAffMh4uAi4MAiYDGEDPgNUygRaoUQBou48A+9eBH24vkshWTDfMFzSSMFrxFD5GIqPa5WJcFpUYy5V47OgbI/mamWVkq5aijwZxl/zKzl9yhRYQw6wCdiMN2qb0wU7jkq84h82g0grmuBnngOUYA/B9/OsrpC9RvkMmPMsvtU0jOz++SxSFey97AyYmNSoHv6mHjHY4YTACzLphPKdbl0PzfzYADJFYCPRp+bLIKTvq3zzpVhrzHLOtYChxTBXnsDz2fE/xsxcrGaazAAGXCDAUB1bFmKsfqA/plM/xtLe5Jj5QRymVnLPpPIEsfKAB13z6UiA9Rgn11bW//6yfbqekiY5HaNs7IwcB3rWMc61rGOdaxjHetYB4DjHPQri4cEo1BoZPIcYkwiB1MWJiqz1/hier687bIo0fctK0if2yRQBvKOUI9fAT/L8kuykAbYl8+CeTExA29cq66/F8zLdjHcjlbGRMpWFLZfrv4V35oR+MeHkiS5IK+btttjBtTghAX/MvBnyxitRKdJlI0BwYrtp4tmrRofFsDQIw09kmX1bR9kpt+BAxjmCwwH5hgOLhD7Hv3+krgZ5guERcQwZ3nPsIhZ2tN5BxLArwkJLcDV3D6CXAH8/IxBv0ZAvwx4NG2RyTxUTAF+42ToOGzizYbzYMBNADwFHZVBNQJcCtOvsO0y4FfpwsrvDAuuap76bevjMmb+oSTAEsBgpACMyXk+XoPIgW5Efhx4P5ICktU+Yro6O4byvvaVsABSU9qRIuAbZna5MgymxDkUZfyRgDV9SECjSeSYmR/tStDPJJpGoJ8TFoF3yHJHXqqyOVml700nRLJcF8AJPQBEfOEWsK8812s0JE3GJLlGKSecSQDEI0mfZKZLTlKPgC8FA1Lk/m/Pk/apUWX92PMo+z56L9KCwgxVdp8w+uCb4i3WMBAYwNXwQeTfKpaOGV+BkpxS0I+Q0HoCxcSsD3LwrqsKS/J5sMdDXqhZSHIuwMnp6zrWvjPrONpwsz3Axlbth+k8A+0Aj6uugDAA5JougFoGgmgAPI+zNDB7Nt/bVrHZ5TeI3JJPKaBzpsJsDpGrJSISvFypEVzAkXzD7XINQEPFRl7502Zul8xr1TGqEtco3lQTcxX9BlKEEzAtmjmWMnIIR3ZNNY6yRzOgHtNlLqvzWkdFvcIWj5Dcn5QY6WjUbvu4dHDMfCyaOZp4CmcW1dDn5xgGVL5p+ZCUwh69b+uYn6VlmwI8J+cRUvHq7kPKQJ8+7l8wyHf1fMgMvwOLkKU9LUhajh3ft9WDlz0NY/YkVP/ntJizWsR8gbAYMMy383bIuezvZ/evvC/3tRWAM4NUDDMxQFIkFL2AwKvG3FzwFwFyhER8TbBtcRJPP/b2S+C5gLJoV55nbbd5bouMFIgnAZ56c1o718A3DUBcBJAldXVuAGX2QfwLy5xGvZLttZ7bYgDyle2lAjIpk4yBP3N9VmuLEbRpWKa6//b91YAf+xYnbYQFPJcaWd7LChS2CELXTSmV+bK013dbcN6J0CfP+6KszbLn8sRPJnJSOGhfs58w+6WgtGuQyGW53JC4kEpZmWP253hvDwX2rZzf2t8/FKCX+9MhOsO1FOu51TrWsY51rGMnMZ/Pcemll6594taxjnVcJ3Fcg35DTCAxEQ+jwk1HhNYDG8mhHSXnW1/7a7XeYeadJOTLAhFAlkDibY4qGAnZ3cLmdKckQTV0ER4T4FWST5I+KlYY2aiBEziOF3UW+NPIvjZRQb/yXtRqcSTESFVuLSf5BfjTbZGuAF3KPmDeASmpS05JUumiLcBUlqaUF+XFh2eUXIiGuReLzE6WhLIAoCaSAAapRmBSGnp+Poh/39AjLebA0LOkpyRo+gOcpGGgbxtxMaDfPxfAr0d/cEDsA/qDA1JIGObFh4Q8wXlCCi3I8fNmCwLwbaDZ7NBszdDuKd5H1AhLTtlyyvZrWNYIyn7KydSSWIVv6r/1uMGApnZlPU7QabVw00KlPCG+aCxHNKuO4RLYYnz89P3C7It5m2no2bPPFenS7NMzSu5ZgHEpeWKlHOU4ZR83PW7aNpEozeyxCgQcJQdiQBrK/lHbAbQAxY4Zoc5z//Idum4TXdNhEQmOoiTLJKkYE7xz2GiAmDz6EBGSy2PNGJAHlKlRJIQtm09lpyybz5MBCSfAPrtneurZt6eAWeohGrlRIGLZJ0fpkLJhKlVsIyUej7KMMRJcopxkcii/7YgEhItANJ42ksiiGYBh4HPVtNwvev4cLbE+allZvY7QCMuIHD+qd58+bxn0G+DYzylGLAKPsVMJqqnwLuUE1SImAV5Tkf40ibRxJf0YHLDHlo8nsBjWSZx1HD8Ruy3E2V5kVhJQA3xAlrVNvimSeaEv9y71dQNAwzYXdqjPm+8YdEsO5Jua7TeR6I0w19nEpaTAX3KlKCHExLcF3yFLDTYdUjA+Y4aZvLxN+e0x2EdlHK6KO+Q9xDjNwBewk1wD7zhpn5Dg2XCVWcZJt1fGGXtfsYCVlftsnROZaELjSea18rcrxW1FTg9Z/m8JDAFqUE/fk8cp1YVKeUHmciQMP6gPnnjiafAcxRYljQo+xL8V3SwXdqSmy2yjPqbM8DvYs6zn/kXAFQd6HFwMhulXy3p2TTnX+fjlNYBD4widJ1DYZpbiYs5FZAf3I833cwGZFI+pFDwARJGBX/KAdlwEpkVYU3M7T+yh7V1Ckwq4BpHnZH9K0/EwrewRExe6QO5fIaGoIupxJ52jqGyiYU5h+T62KrgoisHn3CXAa7HkVWayy/0ts28jKsaggn3c/vrvqZjC6l31vpGPdMWj2fozku3H1YY8yImPbwMgjorLTEFZZnBKZL9uAJQ8qpnXeHyZGh9GLFtuD68FKAakZgCiXCtNh851CETgGRqNvDjNpgFT6OAEpT1MyO8uIo+ji1CKqIr9hFlvTm0CqArZuN9pG2sFi6lICnxOjc9rdt861rGOdazjOI5nP/vZOPPMM9eg3zrWsY7rJI5r0E8XHrpEdA4IgSt2HYjBsMzO4e9Yto2Cfp5YClSTI7xYqRcjzixWltuRKh+UyQVQVS6rFufCrkkkQBuvjCOxn0brHVxM6BGhGnxTrL4pQDA3xLEsDqKAiZGrxC3wp+1LIimUAT+ggJJJQTxJ+pv94sV/Ob4RWPKAAbDM1svSi8Wfj3KlePF+WQKRRiCUAn6oAMCFyElFxMUgFdghe/bFnmWZYkhIMfFjSDlhk5l+cEsaQtnjzzu4tmF23wbLDrrNPaV6fZzEUskqTZZadl+1KJfn+ntTFdkTz/X4qqwnHHMeUiMJCsussjs0Ie+pfwNg2S2AGZXyeXIRyUkCxPgWQc8VCtiXQlxOhAFAP0B9EuE9s/qavrD+2g6p6fPxywkXfXShZv9NRL4i+kVVuY3UAGHBkkHE/IrOd0iJEKSfKwjuIJJGKcER++aoTJi95nJy1o39kmoPE32e/YMMqKTeclUydrQ/ym4huYATKWAvbfZF9iklkkr1emzIEsamkGFVGkXHAJcIiTQpZzwECSDD+klxyH2PGXgAYYbM8DNSseXgGWlZC/a5pnj/OJ/lO7N3n2fWX0jAQqvRIz8mSQxrO3WcHA+TRIJRgoQVDjgk+CRV6bF4LWoCcyrG469NZB4rpt9UYng3t7+OG2bwtdXVyVWVhrP3LMPGGPvzZoBPv6739hTZrwquSoxnNtDo9w516Shj2coZF5YeCbCGDLalyPec6jcOIfM5xTbS3+VHypKZNDE31ONSsQqVwYgyniQZv7norHzWMvcAYIql5p3xL6MC+CkYqe1z+R9lydTp9poKugoITDXQp/tiwT4t5rLzuChqDDqHy/OFMtfIqgKG6YemhdvYRNSxX+4xWRVDznMfEvoY0YeEhfj3KeB3UBh+CvpZP8TqGFKR+s/qHgMXpGWwUuakOncM/VCz+qZiNJ9SH+J8bM0x9gJyKOgbCPBIwshPmfGXPb6JDjm+52uBkGUz9bglJIQkcrtHAaBkFQGoN7hiSVx91MuSJaKAP5YNNgX2AfV1psx8DZX4XBVWaSSzyszfFbA9xWbV82N9+3Tb+TOHKDQDZH7DntKkvtTj31nV/mrnZe4MPj+JHGgAj1Pik+g3GADUPmGB1byZpP3nCM6zeT+RqzwWVc5T/waW17nafN1lOx6SGXvs38DyHLfaJtHK9xNRdf+5LmM9t1rHOtaxjnUcbfzVX/0VLr74YjzwgQ881k1ZxzrW8U0SxzXop5G9L4gqtCkS0GYpFp8XHCrh00lVryZMgHrBqEwXlbLTRL6GLjUU6NJ5usWIpj4fXWHq5CS+MnccwQt7sY9A44AmemGspCxrBJXijEVmRP8GJK8uiXrA8XNZpA5REktmkZb3VdiGmhgQFwUECICg4KAu0keLuFLxm8rO6yI7lkQQwoKrba0EVFiAUmJZTpHr1ERLJQllAAOtrs2fzQBgFKlOTtCkEJell/QcCYOPRBKWfIL3Hikk+M6BnIPviq+fbxv4WYd2DzP83NYW/N4bcZJqtqeW9LSylJnx12Q/MvgCaKjXSYArC+jcx2Rxuwr4A7LhfaJYKsujYeLl/R0BZZp4k2vFJigppdp7LZpkhJyL5EYSm9b/TwA/PfZ6LqaCvMsgoG8buK4tx8w5uI3NAqiqH+BYPnWVXFx1DgY475HiwJJhcUAatpGaDWy0M6Dp4MVvhpljNWuMq9RpknHCY0ddbW4ljbJvHzH4nhl/kHGjYk8UILdi26gELIDgKLepJ620J/EcpAxO6iG3R35cHT8VlgkYUcD/7CWjrAJiHx+I/0wSpk9m/PiBQe8YkMKsvp6BzOLU5G5m9REx+1UAiNRs5L8D+Bwt+uLvNGTQTxKbUY9JWkrUWKwaKPeAVhA+ZUHnJPDo/Op74+MFLFfBzw+XHF7HOq5HkWZ7kVoxsdf7jrJjlJE+lp+WIp6coNb7+pjpr3J2ruH7VRgKCylFlgVWgBF18n1K6i+/J3Mil1hFQZ+HxL6CybdyLxxGyf4CKmkhlsoL6vZtElaBGQB57lj5+dljkvdLZlNhQPINKyg4KZCKMleDFmOkJalOVqYgIIyYalLEtiEstY3GYdY4bDQes8ahUzULAba4sE1Z5gL6mLaSPUdWsnPM5juUDLse38U24tAX+fVQAL8K5LNS6ML01/MRXYPUbCA1IufsGoQhZR+xXgo95kPEvu0BVxzoceWBHgcWAfvmLO0ZxNfVMiWtzGfrWOp/Jseq84TWATRsg/rtLOupXn6xHxAWfS4gy4VLwGoQ0BXAfHzdMDA6wPtOFEnKHDwI8JWkMDAmnq8Oh7mdJPkvJJa7DTFl7z2iXEPIiiCm3+Z7JQyj6xC/Y20ClPGnRTaOuG9zNx4VxExcV/ZTk+zaND1PsfdgO9/Sfp4tHJLxngSWAD9+9PkfRYfM9NP2TUqSL38mn8WJOXX+vhYhGU/sZEFxIKtaUDcUxrK23TfSZ6RvJUzKAyeol7NIJU8xqdPo84H7gDL8FqqokxJWdvGJKaST8zGe+1J+34DvhCxzPBnkzIp0HetYxzrWsY7jLy677DI8+tGPBgBsbm4e28asYx3r+KaJ4x70s94XrXecjfbVuskkc4ufFhHQZf+OslC0McW0ye+hXoB6lOrXcfvs9vSzgFSom+pXXlTJAteAf4E4uaQVvwr+cVUpsfTeaPEcc7Y+5edREvW60MrAjW3vIesutd0GCDCyhvq3JpTyPtskkjL6RNLTAn45SbSYLzP4pphiEgr8IUbzfNnrzXnHEkyBGXoA4M0KtgUQQ4LvyiI++/nNGnR72Luv2TNDu2cT7Z4Z/NYW3GwPaLbFCavNPSWBNfYGMcksSBIyGYaUgh1WhlD7oJ6rLHlzqJDFMZoGlAy7DSb5MMXgWD6w7PlBxOetaZEGAC4YNqGAcDGyjBfbJtUJDwvSGvAPKIxKe47IOcSuAc0X8F0D1zYg7xGHXhKDPUunNp1h/RWWgD3mZVeYYUYDP0/OcYIhRWABoDHV32HARrclvjdAT8jPgwL0SEugX2HMLfuBqmynZfK1ToHTITNeq+vEnDPS86TnynPCvCGH5BxiolxNH0iZbcgM5CDtsJXz16SYWJnBSXA+UlZhLgAgOO3LejCi3F6C5+ci5wtvgM3MHhJ2q7I5nC9St4bpl9oZQC5LeYaYsB20SCLlpJQmLC3YF/PrZcdDAhDqQg03xAmJ1lL0USet6uOUQT9znGPi5Nl1HSqVdm1ufx03zEiuZQldCQWDkgHxckECABiwhwwIxHOU0cbJMbgnPnuUfbCEOWjGhcrDT79+iHmZfke/F4XpkhwDlUhxScpSmYtJCj0UbAmxXMX298aAXxnrUe83UEAxcgz8yfFJiZPXVlVC57L1byGz/1xkyXqYqc1YtaJVqU+RrPQiVMCA30iOlEbj16igaAnQU1WGERCY+4IBCrPcugJ+/SLPzyrJbVf8+1TGOU4VfLgGyTfF+y2VMVbH9O0hYjGEDPgdXAQs+oCkhS+O0LU+M5UAYKMRWVRXS6Fm8DIOrEAgUvK5eOwQxUvl5C2rEOgxTfnaKGBpigO8a4rsawJYoUP3VebzELUOYJluhVJENA7L9gtgQJxkHqPz9Vp2szwqyDoVFvgjQr74LLi46j65KqbA/VUM+/G8Cyj9XK9Lp9dmboA8Jy5p5Ke+nrcCuXAu/9a4qMzKkl+DGAN+k7L4uo5RtRBtr3qiyqP3XVaHsZN4ZXlqgaYjBf/quX4F+Mn517FvkDXoENR3ccU618qLAtVauip8pbpowr5PdkwSieRJCdbrQaznVutYxzrWcd3F5Zdfjk996lP49Kc/jbZt8bCHPWxlsfL1OVJKeOxjH4svf/nLANag3zrWsY7rLo570C8nW1AkMREi1AvPJm4V7Gul2rlR0I/KIiQntm1l98R9RRcooxeXPzdqa0446ZvC2ImJK7+1IteCf7rwyh5+iQAVzokRUX53LDcSY4L3IgEUgSDHRGU+rcTReF/srsTEC7nCHIQqh0rbi+RnxXJE2V8ycl/Wwy/7viykonrokQ7uR/bqU1koIEvmTEVmA47AQcseI+8L2CefawA4P8B3DmHhJalTjqPvPJwntHs2DLtPAL/ZBoN9m3tAsz2gjRkDgG1XS6ABULnODGr4lpOQ6ksmzAlNWBYw2PSzMdOv2tERE8wmZM1nDuedBDlfgEnkQVJNzovPSUBy+nfH50h9a/oFkuPEanIRCIETJUHP4Yj5N5L+1PQKA30Ozjv4WQf2UOyZ/dfNkAYB/zZmUL84cgL6Os86v0DdX3K7OfGY5gcYqG1LsjICDDKlCN908L7LzIA+9/fCnLPHV5MZFhjK/ksmAcXHNYAGAb4nGBXVaRufW8uiE9aNJ8dsUCeJa6nuZyZgQnAmkaeHY5QgAgooeKh8RkpAIk4MQY5NkjHYSxKKc2yyH06Spxb8M/2slrflRC9fI111rSSR+rRSnoPxm+lj8e4bM6PVgxDgcXGVJHJdec6Sb1WivAICKZ/3fGzkMUtgyfa3D0fNWMdSXH755Xjyk5+Mt7/97XDO4aEPfShe8pKX4IQTTlj5+V/6pV/CO9/5Tnz+85/HTW5yEzzkIQ/Br/3ar+Gkk07Kn5taKL7xjW/Eueeee63ty/EWqd1gZon+nSJAcu+GFJ+INx7bRMn4ZfzycmIbwBIDW8C3pDKQAJIDIHhfymPgkbE6FOZQVYYsY0h8Lw2J0LgG8DLCJQIlZfcwoKRFAjqG6FgPlPlRvhVTKQLQOaSd64wLN8qY7jPY46TwRIE+OwcrY4/ITsbC/GvNLa114k8tIB+z/JwAWAwONZ6Kt5kr3oNjlt/KUMBvJN+ZgbG836wKEGXOZgG/NPT1NrVIp2nhNvdkOVk71me5aFMoFcx5UfnBKPNj9e5TwG97e0AIESkmkCNQJPRWHjMz/SizJL1jdQ0KC5DMVaPIzDPwd2iwT+ebRxQG7IOwXX3TIBLLaLPwhIIwxfObwMobBIDcFLdL2mLn8OBiwyhsWGbCcsEOFxWV+74qkKRU7qdZwaMC1M32BfgLkduub4YxsLyijVOlbLW05/T37ffsvEsLdRiUp2lpT4lExPNBO2+Fua97n8E5Gj3nRlwzwG95R2vAL18rZr7K7ZmBaIHsq9rMeLx1TZadHzMKGeBN4vc8fk/XGOWzeu5D5Oe9WXsWL7+yjXz+BFlcVTSqYyWNvgeUEV7P3ZGkcaeYk+s4sljPq9axjnUcD/FP//RPePzjH4//9//+H77xjW8AAO5zn/vgrW9963EJ+AHAxz72sQroW4N+65iKL3zhCzjttNN2tI0rr7yyukfvJK644gr8+7//O+54xzvuyvbWcWziuAb9BlmE6KKFpIQxJ25dvRy2CRpr7m49tQDAubL00KStTQZXsnjm+UqPAsMgolFijNvl4BwDMg05bLSNLLKKh4IaqOcEt/MYfMIQXfYz6WOqEtpAnbjWpLUmrHUhVto9AWSOIoIBPmX82M87A56Wis2Bk4Mq3xkHYFiwdFIcACOdlFTWc76fk0aW6aeHMv+YX04k2nAe5AM8GqRZl+WXyDmkrmUJyRn7/SkIBSAndBRwUvCpmXXwsw20e2bo9m5lSU/a2gu3uQfuhJOBbgOpmQk4wUwl+dHS/izPaCTFmi4ntKz3GH+hJHq8BX0oVaBPAkBJQL1xEs8Cj1NMv/xbNTshjSW9GmTmYD7+Xvz2tCK6aYHteTlnzrGEK4CAIZ8HGMAvLHr5edPu+TbIyTk4uIDrGsR+EA9FBv9S08LFwB6ACv5ZP0VgKSFGQE6mKDBFQE5guhT5vMQBFHokN0fbdGhcg65tMoPOVsQDNnlVCgKqRHDYRiVvq8lum0y1CSnDdlkCxACovxHIgQbZV0k+Na4BHI8lKsOXq7RTKmwJ2Gp+SdIJqJ9QJ90ArRzXjJ9I/Ub2qyJhICs41vgZN9E3nNgcFswA0f0eJ9/IVdfGFNsvpFJxnv37UvHvs+y+PsaK2TcG/2rgb+lK4ENMq4C/+u+pGDMKD/bXPegXYgGnr63tX5vxiEc8Al/60pfwrne9C33f47zzzsP555+PN7zhDZOf/+IXv4gvfvGLuOiii3CHO9wB//Zv/4YLLrgAX/ziF/GmN72p+uz/+B//A2effXb+++STT742d+W4i+QaDOa68OKJh6YrhSN6/Q5yb+/noNAvg/mJZLIgTD47zus4lyIoAoki+EYTl+5ROl+pXqMyH7PPAR7TKHExEiUgSEECBLBUMDJ53qdF4CKCPvB8y7K5de5Yz3fKHDIDfbEUckzJCFIMDG4K6KnbczJ5JZL9cEyIdo7QJkJ0BDSOxy8zYCnDb6v1aD0/brUejSN0Avh1wmLzRNlHdqkATc5DktNWkAAF90phCoVFLlSx0uvRAhfGX9myl4oMtzD8mlbkO2dITYvUbhmQrzC/QQ5DKswj9fVTGdY+FpafAn6LbfZyjjExcOodnAH9usaja1gWdav1mDWej5UjoF9k6fnM9BNP6KOKyMcwhQC4ITP3s7Rt6OXYD2hdA4By/wWQfYURuY+wugeQqPQFex/LqiJUvh8Tz0n4cynfw8fFgnpcC+s1ZclPZXpN3TMz8Je0MLGAQatCr9kCHpXXxzEG+Mb7qmAfkWW0Iv8jO99wDohmPi0FbWkAP47lxw2wVxQ8XP2alfMXBmAFTplxMxEts9gy+GeuK/0tZQ4DPA4DSMOcj4hzaJoZSDRQ7bwUUOAPFbALlDnfeB6YQT+kPN/SORa/V0DzlLhNIelaUjwEgdriwYQjMmM2jc4fytg54fuMKAWI9ngegzie51bredU61rGO63uklHDZZZfh61//egb8fuInfgKve93rMJvNjnHrjj7ucpe74O53vzve97734eKLL15ZbLGOb97Y3t7Gk5/8ZLz1rW/d0XZe+cpX4qyzzsIZZ5yx4zbt2bMHP/ETP4FLLrlk14DEdVz3cXyDfkbObUpyxIYCeyqjtJQ8QlkwArJIBi+ax4n9VVFVk47D+EmQLihtcizID1jmju+QQOilGlyZKkGS7Cz7BPjo4SnChyReXuWzPi+mC0tF27oK8Fu1n2l6DbcUTrdvjkcGNZThJ9JJzO5bMOCnTL/eyEKNJG/4B3xma+W/lxrhciUueQeK7BMHMLDXAEghIEx4sWi1NoN+DBAys69DM+vg9pzICSsB/GhrL7CxycnDdrOWJQRGoJ+eEJPUMoCfJrNyW0heGyUPEkXx72OptMrvYrQYzqxDOS6HZPjFuOSbkfRcqmyoMv70PQX8rP+JnDuKUp8eAlxkeVUAcNEheZeBPltBn0IEeSegYGmHfjd4YW46jzT03JahZ2lRTcSInOfKiAEpchopDQvevwYFkFLw1HeyjxHwEd418M4hpOUKez1DWeYtGkk0TYYr0JfEz9K+pucgjZIaIvtK5Pg4kwNcrM5lIu7jqD7jxVTHZSAwJOLq/qRST8zSo6TjXMq+VoXVW/ZUx4AIZv3KGcuMAe9KQpCI0KoPjcpRxYH77aHATef50Re2RymCKIBaZvOkkrQCavaejoUW8Ouln1mmyFQ4R+iR4GPxpGm9yCnLeNojVexmwDCyzXb7G7Be01VXXVX9vbGxgY2NjR1t8xOf+ATe8Y534EMf+hDufve7AwBe9rKX4Yd/+Idx0UUX4eY3v/nSd+54xzvizW9+c/77tre9LX7jN34Dj3zkIzEMA5qmTHdOPvlk3OxmN9tRG2/oUaQ1mSnlx0xxIM9nKJr7uryucr2U5L4Bt5zINZ8FsCSB6YS5rAVdNnSOZudswDKzKAlQke+D4t3HP1hkPcMkqyVxYhrI46IfzZMqCXPrdaf7Zsc4fc2ENncMfChoEbXQQBvtSnGVstRYotKxP50TmUotbDMFbhnwG0l5LgU5II0k0scSniHwPA2o5mg6Z1MPvzGAkudmog4Qs6exYfopyxtlbFep5phy2r+KIHLPOqbHEBGGxPKeDbgwZTQOK9NPJSCV/aeqFBgGkfWcBvyKioRbqUKRw8rO2/vf6J+eI0eEpIw+YnYepN4ryH0npSKrOL4P5Taa1zODFQICpdKXxiCQsvyCAZKm1ll2XaDS/0V69dCHxIMyIKVsNEf1NqfAPnuN69pN39djR1SDStzYci3qeEDkKrl6iARtAurxagzy2TgU4DceM1codtTXzUi1JIZaUYOMRHJsgRThSJjDKSEsbb0u7gJqsA+omXy5gMr0AwX88jakeVkxJmkhKAN/BWCejinAT9eMlUQ0UNY532Sx23Or9bxqHYeLSy+9FFtbW7jlLW95rJuyjutxpJTwF3/xF/jyl7+MK6+8Mv87ePAgnva0p+E7v/M7j2q7IQS8+c1vxm//9m/jIx/5CB784Afjc5/7HC688EL8zu/8DtyRKilcj+OP//iP8bCHPQwPf/jDd22bn/3sZ/HVr34V97znPXdtm7sVMcYbxHm7ruIFL3gB/vEf/3HH27nRjW6EH/3RH8UHP/jBHd+X27bFV77yFZx33nl485vffNwybb/Z47gG/fYvImihCVxmmVj/phwuoZWn2k9TRjJKaDU3oImcNJlUspFgGD2V7NGoytsCX2PvG4CBAaAsLKkAQs636JoOaDosPIOdvcraJa7GjI0XSTtktosN6/NRZHCE4XiI/ZsKZfsBpXpYpVt0oe0JUJYfBa6Ypn7OicFhXhh+8/1I2/w8HtzPAOBiDmX6xZ6TiMrEc0Y+yXXtSgCQnBcQyDPA1zYI7QAfImI/LMlK8nd0u+wjx9KSGywd2c3gNjYz2EcbM7itE1lqspkhdgX0SyLbaaWoNKa8TVKwPirFvYXPCWVfjkRg4Mc5ILkMKiYARNyXltbYNlE7khutQvuiKwlXW92fJFlCKfLvxgGu7ZD6lsG9pmUALs74fMYA17RS7d8yG6/tQc4h9DXTNYVQiAUjxiUARBcB9BmgJe8wQAYvvV5EKqzyDTJeilWYhE0GJgX4I8yh8p4IDeAWLGFKDiRSrCCHRhh3NgmekxIW3M8An7AjQp8B78r7yEo8mf2iLF3qK9lYEiAws/3yudbENiezkmPvv+RbwDVonLBdHDMnYqJcyc+yXCRjKZYqxvMx0ySRJA0rlmPkPr4ISSSVIY8dfNuVhLNlkep2LZNV/lYpVU08qY+fMp+DjHkJKSelAGSWn4J8lu2njGjLqh0zaAAtFCnJdQBoo6sAQKAkHDWWPAPBTJTrOjRBfm1uH8CSBMUv/dIv4Zd/+Zd3tO33v//9OPnkk3NiCgDuf//7wzmHSy65BOecc84RbefKK6/EiSeeWCWmAOCJT3wiHvvYx+L000/HBRdcgPPOO289iTVhAb+I0o+9jo0G/KH+AGhxsEh1Q8A7IlAckJoZj1eqcECFwUVxqNnp+jxGgKIw3Bn4ggPS6BqdZIyYOU3eHyhYAk6Oy+8re7iPxRt0EWIuwgE4iU0ERC2ySvU8MSeqtajBsrbHbL8UAfK1r58WcYHg5A7uie/7rVNo06GnZRUH9aLbaj02GofNxqMTb7oNzwBW5wuwleem48IS85znE6WwyIbKtKd+Udh8CmTpGDdiAPKBKvMyajugaeE2NlnWs9lA6rb4ceMEPicxZaZlYaGjjP1Ri/7KfSD/G2IG/Abx9HMi5dqY49c1DJJuNMz0U0/EhpBVKbT4jIuE4lJxmD7a56XTFX/p1C8yoOSU7amytpnpGoDA14N3DdsRaB+U+xmz84zMvrlOp2LsKW7XP3p29N4KwBzz8rpl9626nYyBv4pppr8zcX8Nwm7V9Yh3xWPQMgAt2Kcg3xI70LzuUNYheW1m78FmrpFkTKO2A0LIYN+4zmCpyNDO0axvN1GZy+hvVQdLgSyI2sQURKcgoACQvRSmOc8sVADot6UdXNAF38GTQwDLtdprx+7HuEAqF5MakFfHvyHyvKoPscyV8mFMXPgUa8nzlMDFXyD2upe1C8z50fF6DPZVLL8JprSy/aaO53UZx+vcaj2vWsfh4rTTTsOd7nQnPOYxj8HP/dzP7biA7/oc+/btw9/93d/hrLPO2tF2/v3f/x0vf/nLceqpp+L000/Hj/7oj15v+/1f/dVf4QEPeMCO20dE+NrXvobHPvax+bVb3/rW+OM//uOjAvwOHjyIP/zDP8RFF12EL3zhC3jEIx6B1772tbjDHe6Al73sZXjyk598jbZ3+eWX45RTTrnG7bi247Of/Sw+/OEP4+KLL97xtlJKePWrX41XvepV+OIXv4iPfOQjO2+gxFVXXYUTTzxxV7b1K7/yK+j7HhdccAFudatb7Whb733ve/EDP/ADu9Ku62N88pOfxG/8xm/gJje5yY63dctb3hJf+MIXcM455+Bv/uZvdsyQ/ZZv+Ra85S1vwQtf+EI885nP3HH7AKDve7Rte/gPXofxla98BZ/85Cdx3/ved0fb+cAHPoD3ve992Lt3L255y1viwQ9+8C618OjjuAb9DvYRWBQAwVas6kKmdZytCZGT0IMktzUxlBLlKvCElBcnRGVhtMQKNGy5ySR2NH/byl77GQUGphJEWvWuEpBNx1KDfoGunaFtGvjAybCQCIOTCnWXTFVunRQAUCXFgLIwPpLIFbypPh5W0lO3WUm0GCaT9fArDD8j76kJlqFHXPQIAs7x4YsMGAkLzHnHHn/AMqNLF+CQBFbbgbxUofsI3zXZUy6fU9kmOQc3Y9kpTUyhadmrb2PGoN9sD9B2LEXVijRVu4nUdBioYXnBPuYklZ6CqYrk8TEen6vx+8q24GScMroarvidqijWR/N8yg+DksvnK8t60sj/qHrP8W92G3ApiSSXL4m+YOS8DDDnAJBfmCZKX+8HBv9cRPL1It5JFb1NqDlvEy6myj5XWyubYFmKqfz2CAyMAZQ8kibjwNe2JkGhgKey6YDqYsjsDgX6jNSbBfvQL4AYEBdzLMmgVTvuBCztkFQmVeRLyQfpRIVNmvsFOSDyeaYG5fWUkJLP/cEraJy46j0mScCJ34+NkiTSv5cTHlViThJvQYoC+phYWo64D3NVukn4oCSm9DdUxi2iXEf6GFFLOxefIWF/KyMkFoafVq9bwC8nlOOyZFkMCY4SvKOc2Aq5WoSzpiwFvXwsLMNhFQPjhhJf+MIXqsXBbiQJvvzlL+PUU0+tXmuaBqeccko2QD9cfO1rX8Ov/dqv4fzzz69e/9Vf/VX84A/+ILa2tvDOd74TF154Ifbt24f/+l//647bfUMJ26PzPR3IoBzp3GUsZzmI559I05FnqeQEmc9MsV9GgB+BmcAUhgwO5rkHdB5CS/MQAHn80nlJZs2N908LCqSQYIjI/lV9LCAHoGMYwbkk80VUPmc2Judz1yDUf8zJ7znHzGL18WtlbFaZYetHN2tE1lOlPI2kZ6NzMmWyo4BNFgCx5wFoGLDSohJ7/JSVNKHEoJ5neZ/sfVbmVdR0hd3XdAL+dRhkzrSIBWSywEXQ8zRKuvuJCdMYILbhMxtSJFI9MyRzsZreq2UfVwEz9X5OlM2JrGdmbg09y7kTleKqKbafMLe0z3sHRp0dS37mOWWWz6SVgBx/bvm1MdAHFLAPQFZQ0fMwtXm72ckCISz722r/jVIYpOdOPQHhADdaaIzlSu3coZpTYxlQWroOda7kXC5gg2t4zPEeiMryC8tzRMvok78tq2+qcGnlMUss86nzZnJ+qRCRPxZy3+IiNeP/HAbAi18yOThRprCg7nheNQbSU+I+oPOtiumMAvhpIZWNEOScOC1IcPCOTJ/R+VLdPw4F+FXFsIcbQ49BMdV1Gbs9t1rPq26YccUVV+BZz3oWnvvc5+44qb65uYknPOEJ+Lu/+zvEXbq+/vIv/xJvf/vb8YpXvGJXtnfWWWfhpS99Kb7jO77jqLdx6aWX4pxzzsFiscB73vMe3OIWtzjqbd3ylrfEgx70IJxzzjk466yz8GM/9mNHvS2Nd73rXTjppJPQNA2++7u/e8fbA4C//du/xU/91E/hvve9L37/939/aSy4JhFjxHOe8xwQEVJKeMhDHoJXv/rVuNGNbnRU23v1q1+NX/iFX8DjH/94PPWpT62YptcU8AOAxz3ucdi7dy9e9rKXYe/evUfVJoDviW95y1tw3/veF9/yLd9y1NvR+MpXvoKzzjoL97rXvXa8LSKC9x7/8i//gne+8527AhSdd955uM997oOvf/3rICL8/M///FEDxG95y1vwjGc8A845fOpTn8ILXvACPOQhD8GTn/xk/MAP/MA12m5KCb/wC7+AV77ylfjgBz+4o2t/sVjgf//v/40f/uEfPupt2Pjyl7+Mb3zjG7j97W+/421573HhhRfisssu2/G2TjvtNNz73vfGYx/7WAzDcPgvHCbufe9747u+67vwkz/5kzvelsa5556Ls88+G4997GN3VIhw2WWX4d3vfveO2/av//qv+KEf+iGccMIJO76m7nGPe+CSSy7BU5/6VJx33nlr0G+ncWAIGBa8tFEPB6BmWvjW8wIzJfShVJcqfuCJE8eRSoKaqLzHj7SUNMpSocrcQ11FbX27lhYy6uFlpf6C+JrZhEnTcKJs6JCaDZBvOAHhW2y0MzQifVVkTiknuO2CHlhekB/q2hon+DUU+NPterPkL5I6VFW+5+Ogvn5xABbbS4BfFHlPDD3CfJu9WAzox8cmZuAnAiy5iLrSlhszkuGJAUALl/0xApwc5wIW+SWgj7oZ3OaeDPqh2wB8h9husrdRu8XJqnaGRXIYYsJ8CCWBqJWzKWWDe+13CoA40BL4AUB80iCyR7Tsj+EcUqQiw6ngjybxeOfzY2aAWZnPfGKll8QBgM9907L70qhvL71HJMy/BR/LGJA8VyhjYEYmDT2ic6CmRes9nHcIAtoH7xAXA8izvFEMsQB73kPlVsmZfwLqkU3E6PO2W07QyHleCnMs8vXXoJb5VKDPDVBZzQK+6pdHgL4w/fi1BUuFWSareFgWGduxDJoXxmpbHtsOyc0ZfHYecENOOpFJOGWG37DgR+kjBDCbwDeZRQNH7HtkwL6pZJ7K3QEFbKvGFfMH9+0EiixN7ImLEziBWeQyNVE3joiSnNJkrx3bkrSlJJZqlp8F+vg1C/TVySsL+C0ltRLvd+v1vuIQiZPxcEAIKcvvfbPGiSeeeMQVgb/wC7+AF7zgBYf8zCc+8Ykdt+mqq67Cj/zIj+AOd7jDUmX88573vPz8rne9K/bv348XvvCF6+SUCXut2ygpXJRiHi1oSOzdWdjKIvnsfCmicA2Sr8fMfE8y423295MCE4IscB1AobCDipQwP6rvngIlhPq2mXTfUBgtC+MTqgyywUx+vAD7LvEPRgWHzDbdkQB89v47Cjt8OAIaR4hJ/OXgeHwW39QWlP0ENxqW9tw0gF+jjDXLXMtAlil00sM+xfZzDQgDF51EN32PtB5+I6DPevqWHeMCFmo6UDczcp6sYpF8h8WQsnd1Bv3MHLaSnUy1XGeW5lwBAI5PEUt7sieiSuU3joSxGU1/Xgb8LMCncvC8i+Y4GcYjZa9Dx/OCAVBFgTzf0r6f2X6O783gvkrEcpgqyQ3w3/kQ2D55CAAQWF4XWAlPZfYx6LcM2gEorK76Z8vvw2xXi3DM8dffd46lIFvvMkMsJWa56kwt1wlgGeyz84fDssds6DyJiMekFPkxDmW8kvVF5a03IeE5ZvUdjuWXhOGsfxegsZ8sTuM28LyYgcHAhXYqlR4HUGwEAOzyWKlrBzunsXOqZSnPcl3puVfArw+1bYQGS3jy2Mjs5AREgAT4I+JzTYkmi58mAT9bEKufO9T4eigN0eM8jnRutZ5XfXPHySefjN/93d9dYl4ebTzhCU/AU57ylF3b3n3ucx/c7na325VtAcBFF12E2972tkf9/be97W34mZ/5GVx11VUgInz2s5/dEegHcEL8Qx/60K4BpX/2Z3+GV7ziFfjWb/1WPOxhD8OLXvSiHW/zU5/6FC6//HK89a1vxd///d/jVa96FX70R3/0qLblnMP/+T//By94wQvwXd/1XXjyk5+8o6T9eeedh5/6qZ86atDQRt/36LoOn/jEJ/CoRz0KL3nJS5ZY00ca+/btw0Mf+lC8973v3THzBwDuda974R3veMeOt6PxqEc9Cne5y11w17vedcfbWiwW2Lt3L570pCdhsVgghID/+I//wIte9KKjkufUfvHa174WAAPF/+t//S+8//3vx0//9E/jec973hF7Gn7qU5/Cy1/+cuzfvx/nnHMOLrnkkqMGcy+99FL81m/9Fs4880xsbW0d1TbG2/unf/qnXQH9bne72+HFL37xSvWOaxLf+Z3fife97327Jq36+7//+7vK/B6GAXe84x3xwQ9+EDe+8Y3x4z/+40e9rRe/+MXYu3fvjqRk//mf/xlnnXUWvvjFL4KI8PGPf3xHrFIiwlOe8hTc5z73ud74IB7XoN/+RcDQBrSO0IPn/n2I6GPKyZ8+cIJ25l2VmPUETpg4J1W/EHmZkjhqJBnQJJhEEi1VoQIG8BvH1ELGVhJPsH5ySEU0NS1oYxNwDWIYGPwbtkHNBrxv0DXs/Wd94XQBD9SLvNws8x5QV4MmQCQ8y2tHErogZ2lUw/IbFoXl1zPoUTH87GshIiwGpBgRF3VlQiWdJH+rNwyAAtzJ8xyatGkKhVjPnl3AUzdjgG9zT35OmycgCdCXmg4QKU/4DtuBK9L77YSDw4BBJMLmQ8D2EDEfYrVQ9sRyXc4Rtlrue1qNr/3OOwYCvWOfNOcZyPUkDAMY1hkZHz8NZW8pyMc7ySDhqkSE/pka8QmJfPykv+ZqdAGwLAAIw2zgSumWE8EpIXUzlq4UgBdDz8k++Zu6OfzQI87nCP1QZFdH0qvqm+PaBuQdmlnHrEtJHlLLAC01LBvGgB8z4ioJphWhVfxWTgmLkughXfgokKaSmprcGVUlawJcPYEQg/GrVNAvVDK2S4wJw5B0G5vM9Gs6oJvx6zGW15rWJKGYjaiyeaR9ZMQqKGDt9M1Rq8WVUacTkOxXl2oWgP0eYK4v8R61UsKF8UdLCbyqDdcwQWUlPENSoK8G/zQKa+/wEVKC5v8cSZozCtinRRAx5fuL9VHV39o4nN/TtRDKeLw2t39N4+d//ufx6Ec/+pCfOf3003Gzm90MX/3qV6vXh2HA5Zdfflht+quvvhpnn3029u7di7e85S2HlY64xz3ugV/7tV/D9vb2DVrK6JqEXu+6iM8sEQBNiqB+GzTMQUMB/ACUAoahF1ZXJxLFsUgZhoFBEAkGl0zRihaUCAuH71uF7deyxIEUOvA4oMUzWvylAI5DYYEp8KhAhjL8FpEl0rcHBvu2hyJlx4w6YNb4zPINKcHJXEu9q5gNd4hkjwEZKmDAXEIZ0NBx0rvMilKvZgAZ8NN2OUKW8Ox8Af5mntlkNCyq+Wdh+je5gEWlTjW8+B6Si4DjIp+x1KfeLyelPM39NjPuncsy6VwsxXOq1G0iNjPMQ8J8iAaITUvglY79ixDRmwZ7InSNx1bn0XqH2Ho0XQAWQECE9w6+4aK0rmFmZCeAaetdlvb0BGBYcBHeYEBSedQ5qPMOYSwP76fvpVCmYL9A5UNsQBtED6IBiE0B/yD3Rj7pgKiUVECfPR8Tr419+uznMsCn90t7f40pS1JPLWsUrFPw70hTfUv3jEgCAEa5XxJPV6W9tvDSXt86f+D3pE22cG4MJGmQAyiVwqgGSGEAoStzawCUvGwm1HNIC/bpXFvVH4B6/q2/Z9pEIjOb9G8gA42Z8Sf+ggrwLYUCgI3MueMgSh8ExCErOAAin5qqYYY3MTGfUmanFj70MVZgX4zL84ko+2mLoloPJClUTOoVLeAgn1NlM6NaVywp5EwBtnH0t57r6ziub3Or9bxqHbspkbYbiXAbJ5xwAr79279917Z3pzvd6ai/+9nPfhb//M//jBe+8IU49dRTceqpp+LWt771rrRrt7YDADe5yU0wDAO+8IUv7Bqz5id/8ifxkIc8BH3f5387SZKffvrpeP7zn78rMppbW1u71u/atsUb3/jGXdnWN77xDQC4XkqFArxG2w3ADwC6rsNLX/pSnHHGGXjMYx4DALj44ovxpS99Ca95zWuu8Tj+n/7Tf8Jpp52GT3/603jEIx6BO9/5zrjLXe5yVOypq6++GhdccAHe9KY34ROf+ATOO+88/M//+T+PCmh2zuHtb3/7rvW3u9/97rjjHe+4K9vS2A154N2Wzdzt+3jTNPiVX/mVHW8npYRnPetZOwLWLrnkElx44YU444wz8F/+y3/BHVJzhgsAAQAASURBVO5wh10pQACwa0zt3YjjGvTrQ4QPKSdct4fi3QSU6mmnTJZRBfB8iGi9ywAgSyI5NE7kZxIMI1CS0kJ389BFqSRzIOu7FcAfgCz9ZD29okhapsWckycW9AsFCMiJs6EpLCsAyiQEOTTq7SAAIKCLfcpAIFAn0zWBoK85YpaNS1y1mVJdhT4VZBbbmqiojNjtv+xdVhJGdmGbIgM+luG39HsixQnAML0EJJnycfNF7hPgSvVKDlLAIQaQOtBsD9zmHklMbSE1+riB1HRYoEEICQf7iEXkqvTtgRNRVy8CtoeAA33MyUObrFM5ru3BYaPxmIk0V+sJrYNUOwOUKDMZ9JxY2MBKbiUqjL8K7LO+cxPsvyoUREwRoCJTy0ARlWSFLsrVQzCa97WKOW+HKhZe0r6t58l5YFiwfFwXKknXKdDPC+iXAb+mZcCv6SrAjxQEMwCw7Q9L8mPAqA8aJgPACQjnQV4TzSsquU3CQhOhCnqqhGeSax0xZO/DwvSzfb7Pxy0CvH8aun9tV9gDQO7nFXvGtu0wof1UkzNW9gvgPliS/7Xsl1b0jyMDX64UBGjNhBd2BSfs00q522TaYhkJVoIqKNs5FVlPC/7VbaoLGby8oAmpKYDwcDEF+KkEnydgsfP52w0ibnKTmxzRhP9e97oXrrjiCnzkIx/B3e52NwDAu9/9bsQYcY973GPl96666iqcddZZ2NjYwJ/+6Z8ekYb9Rz/6UdzoRjdaJ6ZGYRcdIXERUASVe3swxQ3g+U1U2UcBOPQ+r7LJuVgimfEWKFLS4KIJ9dlCjIDjpAQzR5jB4p2CbzxPUcDMsohtUcE4ovxuL4BfH5GBpD7GAoA5wMrD6/Wt405KDH54AJl9n3dkmUGXn5u/SdBIK4usY2HrhFmcgBj5Redg5qrF268x/1pHRYXCKi4ALJ86kiNUMLQ+J66Agvo6OTjvkY5AKWZJRcF5lvX0bZb1hGeGX58ZlgrqpUpiNf++jvdmXmX9xLxjUK8PPEd0DQMR5Bjwc858zrvM9PMKIllPxhHgkqXIs7R4/Vj22/wdhe1aPfI929mCqtE/LcphGWwuNFQJbpj+Uf/wxEsCuujcHqilp4tnZ/EDt+wuABgz/PhFwqHqWBRgn4rJeUKeV1AlCcm/WR4t4DfJ6tPHvOYyoK08z3M4yDza10w/LZRSxl9piJHyzCB+Ac0rRY1JqVcwQ3MMoBv1DXLK+KvXLMvbMqCYKnBIQWkC4AS0T4n313oX6+u2oIuvK/57iDqHQgb8elmPrfKxc4bJx0VSPF7qkjnVp7TsW4rLgJ9ef/b9UVFtzQK89sC34yXW86p1rOPI4vTTT8dzn/vcY92Mw4Zezw9/+MPxvd/7vbuyzd0E1jSur2DYbkQIAZdffjkA7BoAcH2PGCNufetb43d/93dx6aWX4tJLL8WHPvQhPOhBD8Kb3vSmawysbG5u4mUve9mO23W3u90Nd7vb3fDCF74QH/7wh/GmN70Jb3rTm/Cwhz3sGm/rDne4w47bY+PauK7WceRBRDtm0n33d3/3rnpiXl/juAb9FiECQ8iJAWVWLQxgtG8RGGwRRpUNraJmwIUrybdaL/4oDid0zYRkEuDl9xyAQARPDa/x4sCASRiQSBhYOcFjgS8B/FTSUtk+Q48smeRcThBk8KGbsdRVHHih1rAXBUKfgQhddDZAzfAy/0LixbVlzPBikHKVbyJk75A4sXxX3xxddDui2s/PJJoyu9EkBZe8TgzQN+WNohJKS+CP8wz2aBVua4CeQ0h+kgEJFSiibgZ07NEXmg2g6RC7PUDTYfAzBvgGYfWFhIMC9B3oA66cD5iHiKvmA+ZDxHwIOLhgxp/KI3lH2Oo8usZh70aDWeOw2Xrs7Tw2Go+TNrS/IQPOBD6oWpVNMOBflYwzr02cc6AkYeI4w5d7M+qKr3EyKkyw/zSRYhbnyUrcNkXe0qWEONsqIPe2+Dpu7gFihOsX8HINjCuc7XmmVgA+YcJWYJ+yY21yBigsxxQrFiO/ZKqqswxXnZTObVjlEWhYelH3IYSK+WL9+5Jh+mXw24Cd2tfhJVE49EDkhTb3CfGdcQ4UR/K2q4DdFWHZeppUTbkQwI4RRi4zFaBPk4PKDJiSgAIKENZ6TtKqp5IyrB2mq5ssc3lKeqyPMfvN1P59y+OW16y6JKWiXDiOSlKslexUMBlnBhMKkOAMuKDA3hjos4yc2B5dJeVOwoK019b2r624/e1vj7PPPhuPe9zj8MpXvhJ93+NJT3oSzj33XNz85jcHAPzHf/wH7ne/++G1r30tvvd7vxdXXXUVHvCAB+DAgQN4/etfj6uuugpXXXUVAF5Ae+/x9re/HV/5yldwz3veE7PZDO9617vwm7/5m3j6059+Le7N8RkWHF+EJOoH8mZYgIbtmuWnIcUO+hyAsJtaUCdAlE26J2GV2eIoF5GUDTUAaNkHFGCmsIIE7PE5YssL+OfHYIDMfVQSvQ/M7FtELtq5ehGyUgQg0nXCxncR8M5nGXXnIIURyNLJGTBwKB658ru8r8TzNZWHRhlr8zE3jEUu5AAw8mvLAKcZQ2fewTugk/kqxQHUz7PEdHWOhEWpm0wo4zkg4wYRPInvqxug/tJZlhAoAEXVaUyBjZ2XdTOgaRjka2bshdztQWpn2I6EeYjYHhIODnz8c8HUikFmHmLFPGo9Ye+M/ZRP2mpZelIK/WKbpDkeTeexKf82hPG31Xo0XtQ+hgX37Tigktw2gJ96DDsgs/3cKpafhvofqvw5wD7IAowgyvnwWjTEc3siLkoMIHikyfvj1C8vF/hRlshWkFn7b4iYZHcpa95Gm4vtAARmeHkUmU9tXtK5qlasYXkdwfME6XPiV2jBIhtaWMj2C3p/RV34CCwrrkwVO9lrVOUxhfEHx954pGOUnScSVfPq5LsloC8zlu0cTNvgzBwZ/PvK1EPT8Hl3ka+bfGQOEUsFleztR/K7nrjSKsYkxZ31FpMAfpnlGbUQYrkP9DEtFU8BWoDAoCCz/iJakUBPiZCkcJTHSZrep7ElxpilOQY2Lbhrj+91GMfr3Go9r1rHOo6POPXUU9G2LX7zN3/zWDflmzbe/OY34/Wvfz0A4GUvexke+9jH7qpM7fUxnHO43/3uh/vd737V69vb25jP58eoVSWICN/zPd+D7/me7znWTVnHDSh2mxV5fY3jGvTT6GNEiMB84AX6YihTZn2uEk82Qkx11a8nHOhDBv76kLAhAA2zRggdqIB+kpxJshBtncgjQtlDQ06uEAQYiZCFYswJEU4aeZH8KZIuWh0NATN0QZoBjBhZGgvIC1GuIA35N9hfQRhfnmVovGkTTGVtrgZ2mmRKwjCrk05AWYTrAp1Iq/BRLcIO6cUg+w3HEjquaxEXvch2OiQXKzklBULG8o0K8mQQz0gjVuCfZQKabZB6pTUbSK7hx3bGjxsnIMDh4BCxCCVBuAgJ+xcBB3r+d+X2gO0h4urFgIOLgKvnAw4uBixGoN/BzqNrPEJM2Gz5kdlFnMycNezlQomrZ8dJQei51v2JKItfeS0nJvS8wzK0Dh3R9O3CyhAZIflMCuz9Vy3Cpd8zqCaV6ppEiaEkUIhYNksBuqFnia8Y+Ll4Qi2x8ez5HYF9maUpgOA4ObN07BSohLTf+eJFlZmoy5X+VYLT+EPm9w27D0Dt1Sf7ZH9nFeDHh9FcQzpOiDdQimFUgW6SyrYCfbzfE1FJ/mK5kjt7+AnYZpl9CrYp2Kfg3zhRqEdNAbGg/i+e0BbHSpEAXe6h+tsAJ6aAUo0OYAnwW2L25cRjhm/hRwYzts35+yMmA4N5y2BfqzKsE2CfslCGwyWF17EUf/RHf4QnPelJuN/97gfnHB760IfipS99aX6/73t88pOfxIEDBwAA//AP/4BLLrkEAJYWZp/73Ofwbd/2bWjbFi9/+cvxtKc9DSkl3O52t8Pv/M7v4HGPe9x1t2PHQYQRCKSJ26moWBcWCJJipnyPl3GL2hGC5bt6g8oSV2UAgME/18CTQxI0Mpi5SuNQCgkUELAJ5Dz+8WMU8E8lPreHiO0h1KwW4vHIJVoCn6KRrgsxofFyr/HsrWVltyv2vbk/K7MqHwptoYJ+BCAqIy/JZ4pMsjL8WMoUme1HKqke+lJklllOy/fEIuluij+Qah9gA2IUedBR5HmWzLFsQVbTMDCqgF8zQ2pnCNRgEWRuZR4P9GFpLLdKHX2IFevNi2znZuexd8bzHp37J9mGaxxO2Giw1Xlsdlx0tdG4XNDnHQF98dken/RceOaKhx8Z7+FVkUJYArF0XmDZfgoCmQ/JXMWJ2oNYC9g2mXNhlziqRJIEKOS1DoAoDFmRflSGl977menKAI8t4tHbZR+ieLctM+D1T4PzAakAf86l3J9Zsr58V9s0DociBame4c5e33KcKrDvSEAgMtL4dj2UIpBS9Xd1TnR+JZKeiYivefPaUtGVaWc+1+B5tPVh1nGSnOe1D+r5Oq2gVuocvMjwR4AGkPj7yWlnlQVh/VkANpo+0Ie45IlslROWIvLJOZwajI1qM2N2ph4f43m4BPiNvxOPgHa8jhzredU61nH9j5vc5Ca44IILduRduI6dxd3udrcsrfrWt74Vz3/+849xi45dbGxsrFnb61jHcR7HNeinC95eEwYGYBliQohJXosVa0NjMcQM+m12Tfb6OGGjwUbjsD3E/HjChsdmw4suTzASdUlYhJrsFS+aFAEYwA8A+ZirwkkZR00rlexdBgAIjDjXwFTHiRO7kEy8mM7SWDa5RS5XfEKkgShFwDdZusnbCvTIyYEx2KcJebuMLnI7JFXnVElSVQu4qXAlAaYm9mg6pBjhOmSgL4UC+lW+MIcD+6aAP/t9Pe56TF2D6Bn0g29YytO3uQq9j1FYffzvQB8wHyKu3B5woA842AdcvR0wHwK+vm+Bgwb40z4IQPoZA8qLIeCEWYuQ+FxvtSz1CSBLzoaYECWJGVWCZ5xwU+AvAz8C9gmTQBN6wDWrTC3wCERiymWvGe1j2f/PnPOcgAAAZ+TfgueLxjUgP4Dajr19+gVSZ2QugwHJDKA2ZmbqeaRuJteRJmEOU21twEvez7qPKuCXxGsTFRA35a+iUqBxCeCrGINjQFG+Owb8omEpayqRvEp4tuLlYrwpbaJ1QnKqSjiPgxwg1d4W7FNpTyuvmeU8sQz4aTW49a+crgon+Aj0Ik3XR6ANSZg0lJmt4y5eEtGFXZgZhyPAr/o9oTSExL/tIFXvVCeQp2Kq/XkfDsPqs3Jzuj/b1yAptlsR5B54bW7/2oxTTjkFb3jDG1a+/23f9m2V2fWZZ555WPPrs88+G2efffautfGGGhlATwXIr4p/RmNKVaQhhR5JZJGzLB76zPizjGm3kUbAHG8/KWAFAMMC8BHJNcI+4+IrvTe2It/YusL2YRn1mH8nJ+pTKnLAUiw2z9KeCX2IzBAjwMUETwnBpSzxyXJ4lI8RSdGOdyIT6BtkaWyzP1UbACMPakBT0jFQAJ58SHgAUTa0shoV9GOpStQMv7AoftK6FZEy1GOibQnRKj7I78UEL0DXGPyzMoRlfmsk00dzrNTMGFxRwK/ZYMAvJswHZvbNpZBqLqDfGGjQ8Tb30VjuUa0n9l3sEk4Q0E+L+nT+paDgCTMG/ja8w8w7AYpFEjWGIltr+jWp77MUpJF3PF+189N8Lej9PVYqAhQdssSnvh8Cn3TDbEqj5yptq8Bfvsyo9Iyp2xlbX4rcrOPjoSxRe6+0kp4K8Oh9HZD7q9yUW9nNzIZPCW3unXZtINsGMvAXEwEj4G8c/BpV29B91LaTfsKywYw8JFAXIiz561kAW9ZR1fwWAILMmxQwN6zdMeCXXJPn3NNzLi2Qc/XayDcFYExR+hCvg6gB0gBMnNYqtMi0gGWhrDm1wFOKJMbbKkzl4uOoEukKuCsArPKvy8EbkVNa9YvqU6MCh+rdEaBXge72fKbxOa4fr8s4nudW63nVOtZx/Y9v+7Zvw/Oe97xj3Yxv6jj99NNx6qmn4qtf/Soe/ehH74rP2jrWsY51HKs4rkG/jcZVKfsQWa5Jn4eYcHBRgJfBTNTD6HnXuCy/eMKswWbX4MAJHWaNw74u4ISFx1brcdKswazxOdnSOmZjFcN5wDsH38yK9EwcOGlFjoEQclzd7hd5geMmAIVkE/m60NTXNGEzxWaqNiILIziWgIwRTDdE9T1NMkUCSJJpAZxoSabSt3zeLMAJkyy/+gu8GKamY6nCjVkB5TLjq81sphyuJJkKeCevqc/ZUQJ9yancVMsMv3aTX++2MCSWNJsPEUNEJTulvn37tgdsS4Jqv7D6FkPAYog4sAgZhLbMU4D722bncXARsNV5xE6ZB+ME4IoJhoK1Ci57k8wgk8hTAEaAnWsSNrlUAEDetnMFyJ6s1NXFuvNFCtQPWcooRUlOxAFoN0GzPZz0sF54Fiyb6ANFUqmZ9lUxx0nbN66IXpKmsww/Be+UeWilviwTcYIZOJa2A7DEXCyvy/Xp6iSivkbe9G1wcjH7JLYd1Ccp9+nRsZgMBUfNubUAMcs+IctqWvBPQTcF/Gw1eAH/pn/WSfKQpfECWucQUmTwL7Plpj30LMhX/Z1BCTnkWZpPGA00qkJ3ReZZATt+eVn+eVXoEGPBPaAGLEtCVpKl16QUfh3rOMbRB5Za1IgJWcQvuQbkxY8N4MS491wooWH8/NJiXjNWRvfpND/ATO1O5IstoyVqclzY5i4CLRcsNfmaUk821ECXSdRzYYxsElpAUGQ++xCzZGQfE6s5eB5PgiutV8ajRxkzQ0wIcuEr8Jd9cfV7VN+jxyy/fNi4UgtwDCxabg8rE5fiCC9Snp4InZN9H1h2FcL2y/c9TBQMSXuyzKMZu0kYYjFB2EjE4F+UoiInbP1YQDE7L8vqCb7N6gloOi6oajcQmxl7Iks/2x4Y8Ltyu8d8iDjYFxAq+8Iqy8uM4XofcMQqCUCDG5+wga7xGeTTuX7XOGx1HidvdbjRVsesv9aj8yLhr/0nBpmfqLSnzwV6vu0R+4GBv+AOeV+nFWoxKYRSrCN+lxVTS86Zzq8U1IsyDy+wWJmDV/MwOa98zJQ/R0jEAI/tBXq+1cNPPS0VAC+fYxUQJ17TIQF295R5mu97UhjJwCP3q9Y7uJjQW5RIIt+TDZMvb1ektR0MwFnN2+r59djjjaTosYoszy5zaSDL1+d12rAo/tSWMaz/fFlD5LWZb5bXZArgGhlLHTehjL68fQLFFurTnKKr98850/ayfUT22U5SJJEAUBSveWpkLbE84OSCKgH8leWXgT4D+F0Tr+NV72kfnoyJNcSSnGcy4+r4cR3rWMc6biBxy1ve8lg34Zs+iAj3vve98ba3vQ0//dM/faybs451rGMdO4rjGvTzRKIuwqsIC/jZCALW6OJ//E8Zf13jKqDGO8KGmNhEWfh4IvQhYe9G8UQhAuCA3qxl2RMFRUqzQWbY5QUfERBLQquKMYjhC5NpqWJ1VRzp50bBXleprtZd+owuwAvLzwmouLLykpjZlyRZxKBoLEkV58pr+YdKcpA3cWjpzlJtPgL7VPrReQZHvDxvOn4uzD74DovIfpFDBBYxYcgMAAYBt4dgWAFJPFFSBpWPtEpzSnJ27Gcy/khMEDnZmnWnx9cy/ApYM+azjX5z4jULEo6ry0OySSdJ9El7NOEJgL0tTSU1UhQmYhQwXJIs5hogz6yEpYV9JS+27FlofVWW+nvijC+DjaMdtZ91HrDSdBb4A5B64zcofVTfG0t18s8uH/Upv8pV7ynLtQY9Xe7r+h6DoMv9ID8fezyOQls5rsbW17QSXEOrwXm3xz56qN7jJpbkfBRmAGcxI0vnsY4dfzYReqRDAn9AzRIYM/w0prYxBvrU6zUnlo8AnMt7swLgG2+CiyIOu9ldjzGL6NrY/jpumDEkVEl/BzIdn8fb5DwoCcMlGYDEyiCrdy9QWE/6vo531ru4afm+ION5ni9ZtrmR+tTwBH5dwT4LdkHukylm71sthOF7ZWE4RUl+e2JWsEXdLAtJ769J7sdJgIVERaSwujrs/cu8Z+8QTgoV7PGOKLKHWmhV+xdSATvDwJKekSUqs0eWNgEGwBiFHfvLPqGSjs+FW2BgNg2Q86m7JkU5Cvg5X1hQTcfgn2+RfIc+AkP+x0od8xBlfsWeyHovKYAQj89tYs8wy/pTr1jAoe/KSVOmn4J+m12Dzc5j1ji0TsZ/Z/rPxPEh5wFvCm+Mv9/Y0vAaRQwZ3Mm/GSPPj/S5MglRgL8MfMEAfisAECKWBuW1BzChnp2BH2XQ6718zLKM5nP29QqA1HuiNI4Bcv1MAjk5RwL8jX/D3n+1v+tzlfbMn7D7ehjgh4E/bairQXj9DCBgUyrjBTA5ZxxL32bAbyzxKW3LhQBhED9BBRfF189+R9mzDUBDPY5UPuU2Rn2AEs93QRHO8RyJlVkSxtjfeM4XJs5z/qyZ0wUZD1eBeMxaLmNX+T0LW4++cxjAr9rfYxjrudU61rGOddzw4/u+7/tw8OBB3OIWtzjWTVnHOtaxjh3FcQ36bTQO8A4+MEDXOFoC/hTMCzHhwCII+49ZWNvbA0KICAPLOTnv0HQeVx5gtt/BRUDXOFw977B31uDEWYvtIWKz5cXZVuuBhhe4uWKSWBDTEyvEcFKmgVcmToqgsFFJskxGtQic8OiCWSDptmyMwRKgrkIVMHLV0kKBP2C6MjNLe8oxJkws4zRJp4tY3wE0wG2AE0IxgLrZMlMqhGX/CrvYPSRwUkC/4onIC/Go7ATnGeBT/z5JSM1DQugjtgXQG8TDL0Tg4BAkGcX/+lgv+joBhze7Jve5jcZxQksAZADY6rgC/aQt7VMNTuiYRbq3Y9mpXHkuFe3eFWAbQJZ6Uq9GDQX78qOAfVrFO3UOgaWi6wwC5h51CIBFP8sJGU7MZiagMhGr6t1W/i4L+mT8O5bAvnEoy3EMbK0CtnMybeDGxgEIAyhy+0hYKuylKe0JCuzxYwb7htFjCIj9gBhiJdGZwnLb1Q9In7vR3/VnR8C1ytm2IvMrALZ6G2oCalxhvtp7qWabaMJXJWRV2nO8F5og1yjJQeSEuVaH20RSDOoVKj+fgBAM64/YL1XDMvGuaeh3xmOWZfI5x8xC7wqroHGUpfPGbIWp0P07VBPtMXCrzsU61nE9jP19QFpEAzAhM88TOaCdIfUH+cNpxqATBNhrWmbtiUdrWsyR+uLXCiBLFPPnO1DL36GmA20w649mA9DxfbqaKwlw5s39j2Tb2cfOFsSQz98D+JoOcm8MsTCcptgswPS9U7cTUwIlKv5zTiTetSjHxqggZyqxmwEdGS585mmVOVfrtKBM2GljSc+B2WqII7bjuP0KWJiByu6TMjsL4Fck41W1gT8hcvRavNUqwGcKq1QyvdvCdkhYxJh9kvctAg70Efu2B+xb8DyLPf1SlugEdMwGWlckoZVB3Xief7SOpT7nrccJGw0O9iGrfDQir37yrM1zro3GoVMvxH7BDNE4sOy4Vd+QQhvXtfAxwrcNAoaVPmsAYJn+0++NqIBjUCMzxIYMelf3tRVyh1YdIYHniXr7TQqcjdA/lsmGMPYj99HqBoYMhFs/XO2TCj5nYE4+kkxfd6kAh975SqpbC28IZVvq5+dlnbHyXmu9rWUfx2y/HKM5o11XJTlOSQB0XictgMD9gpLOPWW+ZdRXUtMBrkGAXOPmVHpycFLQmMix2oUAfeqVnYHzOAAt5FoO5foa+tEayFfnX0E+oqGad2ufsOdk6jAm1EDveBwc/x0jg+7W6oElch1aT2JRIPcOkVbXuZbOhRiIPoKCVFu4MAa4r2FB6zrWsY51rGMdRxrf933fh9NOO+1YN2Md61jHOnYcxzXoBxiJNWKgxQvTSllXXeOzp5+V99TQhAcvcCPiQFg4yoAfACyGgO3BYT4E9NGjicQsrxjRJo8IrqBVCSxKvIxTLw1dZjldVLqJw05lkQ4U8Eafa9gK37z2tqbnurlDSEsdig2Wqt9anahWwE8TgZWf33i/tJrVFXYkiOVEU9PVUkqoF6UrK1sBTHqs6XesDKQvVebwkoxSualmxqyGkLA98KJ3oaBfTBgi+51sD1ESg+UAsRRRkgpzn5NTIXoGh7vaT1K9Izcah5M2W2y1Hid0HjfS5xssG7vRiFcPERpXEit5v7EMOuTziJJksRW7UykQ3Rc3qsa1Z9D2h6nkpwJLhCQ+lzlVmEHAXOnsYBJZZuGuFfS2nVOg3xSgdahFvyRD7N+lMrtUWyciAdhEqswLe8EC0QoCCuAXFz1SjAgLlitNAvzxZyUJNwFOr2LzVUAfUFiqgAGv2+r1w0VV3c4/Xh23hJJ4tqd2Kl+2KofmiZl5RxKaKFLQWl+DVI2PgboQasbfKhBwCuCz7ePvluetdyKPJ0wFSaSTJhoNw2BpH/J4eYj9RCmWmGJPXlfBoOy1u/113DBj3yIAfcBmI2Ohq+WivRQOwQvDWZPLUtBTJOxKQU9azMtz9SWNEcnNGezre5YvHrZAIXCi30ty2zVIYQD7xLbF+wwwBSWHZ39ocQOwXAyTpTX1b6dzm/pxHLodB6qYWBXwd4TJaVtsZV8DUGQ9BfDL7LQ4MMgxZviZpP+YCb6yIAQ6r1mxvwr+iWoD0JX31FfXMwBSzbd8i+QbDDImsUJCyrKS2+KruD1E9DFiEYoyR2Z/NWBfONFqZX859VWlDEa5ICxuYfKp57d3NShhWd5ZMjIdQqnCMuwPESma+Y2VC7cg39R2pvpILPOmiR8qX7UFgPl7Ln/Oreh/STy7C9gDeZR1kc4ztdmmqCYLbpD2dylQQ5mvMrDDUrWI7CmY569sUrjkf6usPlrVB/NOK3vX8aGzmCkiJn39YArGpPBJ26MsQu+77O9HSQo1Y6y3qYoSrmEVFgH8QiprKdPQbAFBAhaSEynOsWKF+L0npyOReL2bJePS/G9K7tIW2mEa6JuKI53PWYld+5pzdvzQNSKyV+lOIxceHGPAbz23Wsc61rGOG37c7W53w3d/93cf62asYx3rWMeO47gG/ax/kwIvWh1sEwaLQcGYGvSzlawpSuV3iMACOAigW/CiomscusZjIcBP66Qa1rsi8QQFWXhxmxSoiZx4T7ZSXjLxloWVEA3rRpJSo/211d6a+GGmnVTyuvK5qTVDAYRk/80C9XBqInZ9V7O7zGJuhURprqBFWfQiDiJ7aqqUR4s49R2rNjmubM7P5ZhZUEM9zpTd1wjTTyrOEznMQ8q+PtuSiFJ230LkpmJK+VEZAYCACN7BO+4TMTnMGocQjVSSAfw8sVxs6wgnbDSYeYcTNhqctNFgoyFsNuxT1HkyIETpNxoWNLGJWKBOOCjYNwVUZBkmWg3+WdknoAYbNTQ5pB6Qjoy/ZfY640pnfg7x+JMNWHAa1zDGi/5RpXzloyKvKSAJqeReYvs5B4xl6LLMZwH8Qj8ghYgojylGxDHDLxSPvjGbz76WvSeBAuoZeVr27pPnTZfBvypxqPu76vhUgGl5HjE6v9CEeDpiwMpLltsZoHkcts/q3/mMx/KgslFjIC/L7WGZzbcK5Kv+NonFKbCvlfecJC6BOkk1LsBYNU6zIiCVa4fSoVkK61jH9TCu2g5AxwUPs0aY+k7vKcTJbr2XuwGgDSDKdJJ4XhJjZJZf4GKJtD3PzGjf8tga+iGznl3HxQ20fRBOfVGdZ9afyuYJSwYxgiDFTtEkuadCCo8YJOCXxmNbAfgACCvFE00mt20k86hM6WBkfi2TZer+WTVTigTseGbv0zpmKeBHKQrDcciAXz4WKlW5m2GPoxZw2eKb8XxL1RR8m9UUGICTYqrI8p6qoDCIn9x8iFmSU5U7GkdZohNwcJRYJdwVFlgrA/dG49CHiD46tGb+pp5zG97loiplQOn8YJIhZgt1HPdV8g4uOkTx4tWIIcKFKI/qDVhk61MMmRlZydNbmUkFtHVOfKjzqCDlGPQmh+zfLdtm3+3D39CLjKd5zTD8nCm0zGoUVO6r1ZqAgKRgOLGPJhw/OmK/xiTv8feKXYAnZOBPPcOXrkRTLFkBfylmxl8FcGv/JZeBG50zhyT4qiNeU3kB/JKMcRFFKjgzBdnjVAG/QdiL9ign2Y/MAtZ2pHId8Tn3RYHCzlW9B6xfqg2d96WY5XtXrf8gx/UQ/NOVoR7J3PR6vuVsH1jqE/VzBXarIIdEsTAhnXQQ8/7k2vIYS3yuYx3rWMc6btixsbFxrJuwjnWsYx27Esc16Nc1DiRSm30kBF+WOiq/qFW+V8+H/F7YarEYYvbw60NEigkpsucEjRI9DPrxv9YTGl+/rwtaTvhwVat3zPRj8E/hg7rCPIHXNhEMMik7KwqIaBPl6o2QvVz0UcE/KgluBYumIq+JDaAzBS4C01W2Y3Yfmcd6I44rY00yKPvzpAhgg5MggEErVT5nuvU5GTL2fFCwo5L0Mj4bWnlupDwXkmBaJeVZPPsK2AcwyKWJQd8cWSbfJhBnUmF+Qteg9YSZL0DgRqMSOSUJZZMaU+zP2ntPX1PQkf9W4LE6z4l/g4E6kgrsJL9bt9+Cxav6S2b8UUKQ/hBS3RcLeOkKCCLV1lNxWF+LVG9Xq9kzYJQi4CQZJrJdGXwGpIIbUNldTcIl9dSZYOqx9Gct56nhxjKdRr5T5T3JO/i2kddqcG/8nN9ncC+z/ZyrJcViAAYwy8ImfMaAn/4Tj6VEDiGk7NuknqXAcmJ61fkpYB//apSTHYnQS9LGjmGrmHqVdJQeUklss7eoAoFUMw2UAUNUgXzWn88mmTQhqTJkNknZmDF0avwcX1uJGNyriygYrEx2NxIhThkpXQehXmXX5vbXccOM/YuALkS4nhAjcOLMIyaGDHqdJ/mWx5tuC8rmdgAoLIAOoMWcZT4BLpboBwzzBeJiwABkpjSAPC66rkG7xYxAch7Jey7CEHl0pLawCMfTV51zwJm5BMFKH6e4fP9yjuBjAY1CShkUap1JajtaTlgDWQ7TkxQuCNBg2UNAmSvp6xE8abQeyuM0dpljFflEBfxUvlNZfmO5x2vChlHQRX9fx808DzHHuAB+judk5t5ipQ6Tb7OXX6CG51g6rxJAj9UzuOCtjzHn+q0yh3cs2+8F+LM+i04KNtijz0nxHxCSZ4UGz1LsrYv5PtN6l+8jpWivBmiXQucG4L7q2gYpxFy4o3OC/HeW/A6g6GsW3mi76gU5Of+1bM2p8zZm+B0makb/kY3fjupCG/WSHBcgauGMrkc09Bi7xJ8dohSJ5Tll+Z0CahcGpr3mMhAGTBYJgvR4iU+0vV6NvUGSbQUF6lPK82FtDPkOpExNEnQ5lu1k6VrXZN/vIdZzZAID0ypRqsAftBBS52zKJnSoX9fttB1SOARcZ6VdxwVmiZHWzNaUooxEKV/3KUlhlVhTRB2TZFBQn1NU8yxkj+TWK4vWiaSnSO5itEaVQtUcYxUa8zyP5aNxzB7bahvXYaznVutYxzrWsY5rEv/wD/+A7/zO78TW1taxbso61rGOb8I4rkG/VpLoMQEu8OIqvyeT8t5J0rhbZgAyCzDmfwBycll92brGZb/AznNiwSafbUSpFI2UgFiAFAVYKvBE5BBZ4kj8FGKpFB17KOgiuHEMKPqo0o9Ak+rKWMCChcsxZqbYmMrL522OPmeTWSsr7e1ibSxr6kefqRIfdUOy9KNJdmVZ0zCg/jBXpGfPjWZDQL8uJ5/myuozUp7bgwCBwujsY6zAvvFxOhQIAZQFcfa78NyHvAP7yHhCJwkrT0DniRMWoVTpawKIXANn5IiAkrQ5FJCrr2nfG4cy/RzI4i1VjAE/C3QAKP0cEElbkt8swDeAqsrcMsIqzMdKMi03dyn0l6zHJPI5EXnROAiA1/CPGYkyUKrkypLKfCqTzjDwALD83JRn35SUp4B9AOBa7vsV4Ne0hwf73ATwlw9WYRFQ8kXujkyixADqLGPqKqBVwb5xf1kVyz6QAvbpIYpyPrQTjdh5RxrFH6sAf1OhgN8qsE/ZA/w6igdpTl6WhLr1J+J9Lb9DSRgB8n6W9DOJcpb3S1Vyfx3rOB5je+D7oPrH6c1ax/6QgEYT3/C5ACe5OZCYPY2mrQoU1PtUWdIpRAzz7fy+79o8TrrZHPHgfgYNnWcgrSr4cWW8AyrmRzV3IIck8nsWjLeh134rLGO9X1sGi5UyXBUJMn7KtT/lhazb0PufQ828saAgUAA/EsAvz7sUDBrNuyqm2jgZbhPr9mWpWNNxCyjAwNL3oYVsBqCyBVauyUw/BUQUFAl5vsv/EgpeZedXYwn+cdjP6jlyMr9qHXsKx2iY4PKbLk6PyXoPdHKPXHmK9X4sfXqKvZ9lvlsGArOcbWCJT6okPutit+o16JxXtuHcoZmbR8B6yvPFFTf5ODo4tq9nkEeuC+cKOKdSnAr42bkwf7cA48HIeSOy/7ldXxDpNg3ojInr7hBAT9K3pw6JkfW081oLPIak81hdR3gGuKWRyRZPuSYzBnUdZ+fHRAx2BvAjpGDCj+agea4WIT8OYb0peBmX/CPz2bKsUPuaiUOtB8nAi84cNy220u9XCifVtgWgdWW+5cDzoezpR9O/nX80TLBayQEp1PNZ85jsGLSOdaxjHetYx/U4PvShD+E973kPfu7nfu5YN2Ud61jHN2Ec16DfiRse7azN4MyS2bgwtOYhYt+2w56uwXyrxdXzAYsh4uBiyCDggUVgEGiIGRA8eavFZtfg1BM3sKfz2YNto3GYeYeNpjCWuFo4CbuJkMhk0nN7kNl8ujhUcGl7iAgJ2B4Cs/1i2Z+SgAI2Gi+PDEA2AkhaBoutjh1LNgIrgD15TERLhaK8HcuoQpUgOhTYxyQgQlWere8rOGeZSFQkPasK4HwQtbo9ssdPiuxhM9o2AxwNkjD9QkJm9w0G7FsE9ZSJONCHLC8V9RwZ0EITH2xSj+IPY3xiHBVAcMwo0opYAhj8I8AjgsI2MAyg7X7peJJr8r7AN/DKmgTLiI092cbAnDL+8vsCNuftE8SDJa1MDOjHwyhBl5OTqe5vyoZcmbDJ5zdV26+kE0cXj02iaFj/NUIq1cTSnkhaZazMBEkY6wa8SCglSawADKwBQGukPTdmQN8DTY8UuWK6AbKkZxqBgBnoM+y+KVafgnhLjD7+cgX48XZr378UA2hAZiZS23FySJktALLMrUnC2iSVJqrHQK7dIwW27PucHOIj2XoI2y8Cjv1OPcn45aiW0TsM9mfHPP67JG+DgNO8HU3M1wURFuibGhM1EaXJ9OyTJRXoS5LFEp4k2S1jOGS7/FSTdAClCUAw1f32uoqxnPW1sf113DBjewjYtyjFNHo95eKPBJZvlGCmygCERU7KUtMhNS2obZGGFs479P2AMF9gcdX+zPzTcG2DZsYeceQcOufgNvcATYvUL0C+RfYPTBF5TpGLG1zNetK5hdwz+6jXhLytIAbxvTumhHY0vmw0Thh/rk5oX9PrWWXItYgHOq6y5KInKlLgBnjj76CSPLR+XQq2kgE8MyBnf1/nRHq8TLuYIc8/omN87RtYikmSa0A+1ol2LbCaAPtCYm9WVVEYIs8jQqzHj+zN7XhuFJqU/bkB5Dl5p4w+J758jsG+WeOw1fo8B0sJGKKDo8CqHzGhp1QY6CLpPnhmZ4UEeM9zrBQaltIWgC85D3IxP+d7tgNFB981FVu1HFaeG3gtzFFfP+tdnX2npY8KuDTZdxQUmYpVgJ+RshwP1VO4n3MEhAJ82UKdMu8Vr0Tn0Aijq3V83tQjUfuOjZh4XphkfhAikBwQIuU5qXbPcn+W30YNhE/edRSUk+cpRRTWH6o1RkKR9dR1WIiqKACQ0zWayHy6yH1brzvDaE2uwWKoiwd1nadzCT1+/JznEM4JoEcDz9lEbYKoSHXCNez7ByCrURwO+D3cZ4AKUHXgeYt3QCu6q60if4ELWL1q96MuvtLrTwsatSC2dczyUz9yW1ClcyzbOwr46coYI/ujhR1LJQjV+HPdpzHWc6t1rOPQ8cEPfhCnnHIKbne72x3rpqxjHdeL+PznP48/+IM/wOMf/3js2bPnWDdnHetYxzdZHNegX+MIW51HiC4DCBmQiPx8e4iFjeVClv9RcE8n713jlliAJ2112Oo89m40mDWOvUAU3JlYf7OIijzPAIapYJbkegH8imfcgT4I6BcrsAkQwClSXoxzRTrQuiQJDy+J67JYLiBMqsA6oAZoLDNQF2JVpblZqNnPLSXGU6yST/xlN/18AuhTACJGVL50+SvQhHxTQEk3VAkw3TYA8RpqMtgXRHpnIeDwfEi5f8yF1TAPUbxg+PhX6y6hcdpF7IbIvc4aj8ZxQrDzRSYwL3oN6Jf9eMR/h4YFJ4Qsc9EytuJQEgIpchLAd5zYAicNnTCQDsfS0uRKtV8JLPMDZK/JKYDCslM1qQEUxkL2qzTb0u0USdRyKHWb2q78G6PrZlX6wkk7ta97IqSoEkRJEiucBGXgyDLeCjBGzgGxJBqo5aRzaiT9G03fijNg6JFQrnNa9LDF+wAKyAcsAX0V6Ne2S+CeZfONQb4qpPo/qf9gDEAILPOZ95PydaBV6XBNlhLWCvdx5f8q2a8C4gqwa4A/IAHeCUOPX3aJffimGC/jAo2piDGJxx9vfxXbL/v1GcBvzOpTsA8wEp5Uy3muAvzs/muiHkC+7jTdaIE+Zf0B5f11rON4CmW5cyFJQjRaeRFAMIlYJwAD+Y7vXwOPgepDSm6OKF5nYdGj3z+vQD/nXWZJtXtmiL2MtVF90UKZWyzJTZoiBz+ac8g4r0n+8aijbKVISZLduj/Fo6r1VEkW6r2uzJ2mxw3LyhtLbiprPwOpwOT4NpkonzgOidzquZf5e4oVo+OakK/yPk6OWpqUpxGYYgtLfFdA1pQy0BpSkjnE8tivY7ECDiG5KrGuc/LWqXyggg0KAroM+HktugOfu/H9LaQEl1hONMGzvH5KUJ9KVKx/lfwWsE7YX9xfBcA1fY588fjj/lzuz2TnEvm80cpzBaCw/SzL6SiC54q1CgQwzXzk66GonuhrWrSmvm0Elb0tkp8VWGx3Sf5P4E7vSP39mAGozchgVF5vjMHD1fvIc8H6+OSPm7VGVjlYvSnTcFeBp8xm9nk+1UeWOw5JvMFjmctSBWaKr2GeQ6vMp2P0U9l+co4TIhP+BKRcuhaPoi/o/Ebn43mOnlJm+zFjL8ElQusBwJm5Xr09HR+t/DGfA5b1zEDfxBxrKZxDilKwMMHgLmohE/u8ZvqtYx3Xu7j1rW+NO9/5zvjzP/9z3P3udz/WzVnHOo55fP7zn8dXv/pV/N7v/R6e/vSnH+vmrGMd6/gmi+Ma9PMO2PDFW42ZFwl9FNZdBBZNxIbIVLWOsOEjPBH6GLHZ+ZxYUHlP3i5XFO/pPDZ8qSDWCtfsa+GY1ZEoySIO2dPFSi7qIlP9I3hxGDPg1EcG/WK0PnJlP22VbS/ARutirq7cHmKWNyoV01birmbPKPslIJkcGYmES1noj2MJ8LPJp3F1aYWKmoTTRMVtCHy8Qiw+h0ANRnihlNUyfA2vx1GKPVWqMMp2VcJTwT9dmKuE54E+ZLBve7DeMvVvt672xGi9w6xx6LwT6VfuX50vSZAmJw7B4F0cgIXx4bFSpUCd1LQSNimCkgB/wloj17B8JSRxFAsAoeBDmEiQWMDO9lHAJjEP/b2p7ZIB7rRvHYoNMW5HYSoC6mdp21b9Fo36IikzQZOWlGWVICB+TgzYJIsyAJ3xT/ERIGIg1sp7Kitv6JH6FmlgJqBrFnXb3ATYhwImVrKdWd7T8fuj7+TfFnZAUpZABiFj9b5Gvs5U1s75LHE3JOtjU645BXUPV2CshRJ+BfDnPWWwrwUQXO19UmTtqCpsWAUCKvC36m9tUxmXaq8+TUoSapmpcSIKmO73U6GfU8AdI6DPAn9AYmmvYwD8rX1n1nG04R1lGUagjNcMGvAYoCNfSpB7nWM2XoogOigM9ZqtHBfC9Lv6AMIiIizK2OW7Ae1mg2G+wDBfIMy34Yeex9poWCxTSd6xdKVhu/VS5NDHushBAQYuBKuz2laSu4B6FviTR7OdvE2b2F7BvuExiAEIolK4Mw6bKLfA4iTAl5kyWJL5THoezHFSj2TnSju0vMTL7y0l4HV7+X4qEuq+LfcYsF9sb+ZzfVQ5e7nfGznB1jkEz59pI0G9eUMs50qLp1SGtXUueyFzIV493qfEE4DsDedkrAbfQ9h3lufaQ+D+nLwwFsVnTwG+IvVdCnZc2yCGKHK0wxKAlEIEOSc+fwGIAgBWJ9eXY2kA6upY5w0eEURVvmsK67QnpFRAsIiyRqkZlwpAF8DNMrlU1YIZfqg8xqv57qjPe3LSz5jpRgKAWWZ8PixE1TVVHYZD7HalDDJ1HKtjUW/Jgos6H0r6HdfwvFDYxepZ2Udey1nlkBBL36ZEzKJL7PUeIuA8Sf8v26fJQjQA1gvYjnv2+RRgPB4LR1EVdMr82enrjhAT5bEog30yt9b5m67DnKyrHY0k1Wl5+0QTZ3RF+1XNZDQsi3LMBHB+Hcd6brWO4zX6vsff/M3foGkabG5uYjabYXNzE5ubm7jxjW+ME044YVd+56Y3vSluetOb4swzz8Sb3vQmnH322buy3XWs43iNz3/+8wCA3/7t38YTnvCEHbP93va2t+HBD34wF12uYx3rWMdh4vgG/YhBFl1sAizJ0qSEIQCSlh8tXpn5F2PCdiiyoFVyQRa5G97BOcJMHjXBoNsLMaExmeIMXFD524IYKiNjWWUsKakglLDMFFyJyImOmCKDf0nAPwLa6OAoZrlJ60Wjsiu1/B2y7KEjBtAsI0v3X5NLdrGmSaBVYN+h/EZWgX0xWQ/D2t/QHk8AoCx9xW3XJD63uSQIMuBntsVMP8jivIB9fYw40JfjrmDruD9wRX5Co5XNpACfQ+cp/7PgX6NAXxjYc1BlSMfHyVYQ2wXw+HMq86NgIQ2V3CccJ1MCUvEWExBQZWerzZnj2+dkyf/P3puH23ZU1eJjVq21z7n3koQmEkAJUZAkoHmoINIHQmPo9AGf8RPhe7Tm+QOB0EXUZwCBhwgP5BlA5YGE5EVsQAwQATsaUUFESQggvXk0IpCQ5N5z9l6r6vfHnLNqVq21z7n3np3bJGt+d9+9z25q1apVq5o55hhz641mDchZS46EFDFsTpHKa5kAPZT5T9hBWOa0rALjE5asEf/qDI1yjKiED3VaggHRlEdFGH02ejo2YPC1V09HA9rFklxxMUdsGOSLTcvOu24ujuh+WEFbyZrFZ1/Xcp8mv98gd0tvpMG6BTsla0lbp+wEAfqalhkXvmXJMpG4TXlNDeAXYmZx7o95RbXSsaXPycDnYhTnURTZT7YEBOq1TWP2EATkU9JxOzvlnBnrdWwD1PFuGX4lu9ay+uqcfQfiMrLjjJNBMkQWGozCqk5R/qQnOc52nGyyI9Vm3mF91mBdmFQaJDCXMYPvIX696KMEuxB8u57YMOSaPBaGHrEPCAtm+XX7OvSLEvSLcv+HecePPrCsp4611iSooWCaIQf9IOZ5pl5j2FFGmSjORZa3E9OxpfUuBRQ4ysElDuXYYvOBprWSqh8Iq98Wrvm6lIGugTu1LWPGRAsWIK+ryDdA32VZ9cSANG2l7ceNXkgppiqatV4hHQoH+Fmus14HI+e5CFlZIbW7BJpEedaxGnBYayzoRFj0Icmt1jJ3GvyWAb/M8vMjbaVr4tp0DuolQE/lrr2fAX4O+Bmo4esW3QZLPDYz/jv0IO/h2yZJe5MLieGnkt5jpszV4fWkASib1tYwfcSqQNRmApvsmjuYe8G+ri3ltkxy3fl9m1ZAUwp42WewzOdIgFvV5yH93TkHF0UmN2ofKa+RvY4MtW3N8rO/S5LgVIbaRPOsQLv2fbuc0d+EyKqW3jW8RgQSILUISPtHK+3JoJ/un/iVqsfYvYkXkKxk+9nrH5KcOICSyXsQQF9teo66xwuU27z1fHxn9rM2kCu1tdkTc3/Q/eLytdVoYJVlsUYzH1jgryyl/LNaL0822WTLrW1bHHfccXjsYx+Lz3/+8+n9U089FX/zN3+zMtAPAM444wx88pOfxCMe8Qi88Y1vxOMf//iVlT3Zwdn/+3//D7e85S3Rtu32X55spaagX9M0eMMb3rDj3H4f+tCH8JWvfAVPf/rTV1G9ySYb2Je//GV8/vOfxz3veU+sr69v/4PJBvbNb34T3/M933O4qwHgKAf9siSQkW6LwLxX5z9vuoiA9VhuFJQx0roM+iXQjHK+AnUyK7urdiLoRs5RhhGs89wCG4uepTs5ByEzyxYhopOccn3kPCM2yi9Ijo3gVN4uIDhKr5GOb6RWAhCcS6+VBQi4xJjTdiHdIMNsBCGOMIxEmVdSVVuBfQAGjqYEMogjSKVOdcMcJCJ2mcRnr3UhvtbaD1J7xcxa6oThpxH+e+c5Z5+2PT8zs08B174CNAaJ652VNcqsvtYRZgpK9HNQN+c26k2uPrtxryL0c6OF8rX+HbrkyGBnHwrWH4Ak0aW59ZZeF+P8UTfoGINP29T+Lr1G7jOcy5LfHbAMjcPJlmFBPi4jJlne/F4uxkuXd0QIISSWQOMIcCJjKw6VKD5UUiBG66tRxOowFYqonksEGOwDGFgV5wwz/RyDbk3LjD+bo6d2cAHbA336uplJbh8qo/0BcYYQKHpgMReAMqTjJqDQmd9rbiDJA5lyKyUgPEeyW9B1K0uOsZgupwDi5v6wXiLIeJSYQswC7CMwuLG1xBGPkAXL7H2YgbssQ+coByk4YeV4M34X+f3Mb4D9cyhaszWtmX+WcevFo3Y4MD+91tdn+ZPdMK2VnMWtpwS+s8Na7jHHjnoFAzsdHNomjTsgw1oGEENAv+gQFhnwi6YT9fMe5Aj9ouOcaIsOUaQ9l5rMnX0xnsmYAyQWUYQJyjL9VseIIMwwNQ2UElwfpOslE2QwFiBVrJVCx+uj3sh2kwP6APKcrwuuKRh/QK6/ZcZsO34YZiP5RtYMZt1VBV4VFpllVATqaH2rY6RVmQZwyXW2c4zmNdO1XYzlesMewzsyTEs2R4ALIrHuyjyvjkqwwStWpmUuaShPGgSYTdcbWr8+xJwLmhyc94jCwo+hh7L+yHmW/+77lNuvOK8lgN9S20/AhmLcf1aT6Q8AEksX6e98PySJW5nL+Zk/0/2O5rD2BtzRADj7nQT4jfV5CJDjZ/xdCb7UwLSi+shAupP610wxy9bTz3QvZFl/1uo+qGuYUWBd2pvzGzbp9xpAuDD7C92/dOYAjWOZ2QAG79Jai3LZMOvRoh84V+ZxlKDA0ftymSmguMR0nU4SlcdMZgblE/PRg4O5QsjrNzENLNVgKh4jyeRd3r9qLq37VgC3WFQm5iG2aW012dFsd7/73fGJT3wCz3zmM/HGN74RAPDZz34Wj33sY3HWWWfhv/7X/4pb3OIWOz7OGWecgVe/+tXoug5vfetbcfe73x0nn3zyjsud7ODsS1/6Ep761Kfive9970rK29zcxF/91V/hpJNOwqmnnnrQ5XRdh/l8jt27d6+kXkeixRhx7rnn4g/+4A9w73vfG7/4i7+4sjIf/vCH4/u///tXUMsbn60SkDnvvPPw9a9/HWeccQZOP/30HZV75ZVX4vu+7/tWUq+d2IknnogXv/jFeNjDHoZ73eteeOADH4inPvWpuPnNb35A5ezbtw//+3//b3zta19D13Xo+x5d1+Hss8/Gj/zIj+y4npubm1hbW9txOdeHveENb8Dd7nY3POQhDzncVTm6Qb9GpRQt4wMx5VULjuADy30CTiQZA3Ybdt+iZxBOF9mZIVfuWMaYczZZPe/1YvU3CjnPRa/Skj0WfcSGAn+9Ak/j0pJcIDulFiGDjJpjx25wFZBqxYGlsjyLQFj3/PfMO0SKiJEyPqGOcWei1p2RzTMyVWRz6C2zerNpAL8+muh7AXlUGqePWSJHwTdtDXGJFbm7tO7aDpZVqSDr3kVACDGxKhd9wGbP7czP7Bic9+U5cdu7ggGnDqe1hiPLG8P0ax3Bxw60mDPQF7rs+BNLG1Q/Sw6mQVuFIM6THir/SQb8i6FjJ4GPSKy/ZobGNew01XYgZLaR5k4RJ4R6fLSdFYDjdjSO2G0QoQEgOuKUWpZvUz/rI4Pw9Xv83bLt+ZjsgFLQXvMqMqOKJXADpL+IB8jJay8Mi0jsrItBQDXJmwjfsYxqYMczhR5o+Rq4nvMvxr5H7OaJ9bf0XrBSnQ1H1ZHz7ERUloRG42u/GAOAVQLWeTg5PjU5Sk/Li80696t2HWjWEX2L0KznvJbmfrPgOLDEKSsOtxDZKafAHwEgl+7G9H0bN2jHsAGo64bXHkACCK0lcK8A7ygBvpqzLzmeqJwTco4hQqs+8AJU5zb3lEED+1HRHvI8FsUeIdJokdLfIZLkRQJmE9VvsqPI9sw89sw81hth+oHn7IgouV9dYm/N+4DoHQDJn2qYX64VdlS3QL8xZwbfopN8Z5GBPxk/XetZ/m7RoZ8vEOYdB1lIgIUT8I4d5TlfaVpPoGQvq1ngrwD8II5qRGa3mPm/zhOagqrIjiv1GIO0NkrAhzL8gQyAkBuV6gZk3DAjy1JmDCDrBpH91vJFsnPwfS1fA0KWAH/274EsaWJVGhBR27/PjL5FLwzQEAugVQO5dF5pHM8ruxqP9QZpba5rZV0XAsN1QJvGdJfmCF7LUAJ9QtQ5xoCGuo7X6+m0frIu0hy4foYYOrg1IIiUd8rvGwKzTwH4PsB5h37eIfqS6ee8MP7GcvKO2SDYp2T8gVwG/sYkHjWfdcXy6yOypOQSANY5ZnM5h8R41fVW64jHgZTiAEnlonE8t6VAt1FViz6t/0mftc87yXVn9kupOaq+X0tyhvJDrjMMkIVyDq8DdQIIXtaMVvnClh907Sh/W5WETdnPzftY7DcA7lscCEGytuBxU1mNHnLOztxTktMugXuW+VaDYOZ1AejbexO5L9hgCD5/kWdHzn8JF5MsLisWEI+rFEflw7KCTL6n6gBMRyYQUe6xYkyr851uYUUwnO37hwH0m2yyI8UWiwWICF/5yldwu9vdDn4/ma83uclN8Pu///s488wz8ZSnPAUvfelL8ZGPfATPfvazcfbZZ+MpT3kKXve61+2obve73/1w0kknAQDuda977Rjwe8973oNzzz0X973vffHa1752R2UBwDe+8Q2sr6/j2GOPHZcfPgh79atfjcc97nErAU0vu+wyXH755TjrrLN2XFYIAc94xjNwwgkn7LgsALjmmmtw8skn4573vCfe9ra3HXQ5X/nKV/BzP/dz+NVf/dWVScB+6Utfwoknnrgy2UtWqIgIIaBpDm6+ISL8wi/8Aj7+8Y/jM5/5zEpYU845eO/x4Q9/eCWg3+WXX47LLrtsJf3t2muvxfr6+kG315htbGysjG32hS98Af/0T/+E97///bjjHe+Ic845Z0djwDOf+Ux89KMfxd/93d/hDW94AwDgtNNOwxlnnIFnPetZuO1tb7vfZX3xi1/EAx/4QDzpSU/Cc57zHMxms4Ou106NiPD6178e3/jGN3DJJZcghIDnP//5B1zOrl278IQnPAHnnHMOLrjgAgDA7t278cu//Ms7ruMVV1yBiy++GI973ONwhzvcYcflAcyIvvWtb72SMeSHf/iH8aQnPQn/+q//esBg6artqF4tz8jKe+pmnzcqjiCMMQBOInljhCMnCcp5k7ZwEYtASRLGMvks4KH58qwl2RZ1bMj7yqLRY6gjRNl8G4bhl6Ul1Sk/lDTKu04y0ejs2dDcO/Y9zYPABChmn9Q5pTRynYu3Ee3ITnSYqPUa8Fu2QdsmYpkdDzE54RLIF1GAEgqWWqcNUDJ8xgKqtb2DAKgKrlpmpW1zBVnrdvfJ4TE8huZz9CRtZxyB1HcZpKkAP9tGKaJcIuVLua0OKe9cajhl+wlLAIBKfkaHdH2cgqsin9WHmBidypBkNhQGqIa2tWJYWwF+eh0UNNJx0f7GYmHLAL2a1ZffG5bn1aFHBCDARQbxFj3gVebMRcZCIw1AG2b+wUiqMTqUmsI4PqNcwxhDYmzCMTuDfAdqGnbALdphrj1rhtWnbL6Ua8eZa699YuCMFUeQXmtiJ6I6kpPzmxw7KpuWn32L2BhJz20Av0G1kcczHVf5tVxH+WxsiRQjkuxxBOBF4gpgJSYXbVNlBqCV+LRBFzXYV+ctVWeT09dmPFPGcgHU1Y5s40C1LIGx87MsQVuOvmed91w3GshAHyqb8s5MdrCmYIrm8NK1Th+Q2VcRBlSIWVpTHbGuycyk0LNcZxVYE0NAkAmBDNNOgcABy69irPUxgxrqkFeQL5VVnZt+xKzfmOZCDVip+7VdK2UQUOX5kOTPyc75fZeBjxo8U8AvBlDfJWe2Mv7sGKR/23PRP1kiFEAAIgWAPCg6DNZmOrfY9qvAAVu3UTMbn6jX17S/An48xwzBJQUcUimkoI4Kgue1+aInLFwG/UKIKVBEzQbm9QbQdRSTdLyyDfMpEHygpOCha/piWe/MXOwaXss1LRB6fu7mifVPzsPNWoT5An7GOf5SMd4J+KdM//3YOI7kdF4KBKZ2HAH8bJH6bEDQMLgjcpsGyvOxzrsK+DkFup0JrJE5mAAjZ9uPn4usISgGXs+4BgRlmHI+dpvX0p5JzYK1ey1rKRdd+p6uW4bjgAJ/+n2blsE+98hBAzZIUQMVLeCX1jVJmaW0wVgElGD7gVh9ratAsprtmX5mnnXPp2Apv8/jYoAC8wQ3IoVQ5o0fgrTLLMRSwYHfrIC/Qd6+CvCzoN/Btt8ObFpbTXak2Pve9z5cdtll+F//63/h2c9+Np7znOcc0O8f/ehH4yd+4icAAGeffTY2Nzfx3ve+F/P5fJtfbm/HHHMM3vOe92A2m+HEE0/ccXlnnnkmbne72+HLX/7yjssCgE984hP42Z/9WdznPvfBy172Mtz5znfecZn79u1D32+hTnEAFkLAvn37VlKWcw5/+Id/iKuuumol5R1zzDH43d/9XZx++ukH7Rj/0z/9UzzpSU/CVVddhbe85S07Bv3m8zmuu+463Ote98Kzn/3sHctnAsArXvEKvPrVr8atbnUrvOtd78KtbnWrHZX3yle+cmWMxuc973l44QtfiF27dq2kvMVigc3NzZWUdeGFF+LXfu3XcMEFF6yE4fQP//APeOITn4jLL798BbVjadQnP/nJWCw4gO/jH/84fu/3fu+gr433Hscdd1z6+653vSvOOuss/MzP/MwBAX4A8C//8i/4whe+gF/5lV/BBRdcgPPPPx/3v//9D6peH/nIR3DppZfihS984UH9HmA52osvvhiPetSj8IpXvOKgwdHjjz8eb3nLW/DzP//zOPvss3HSSSetJAjh1a9+NX73d38XF1xwAT772c/uGGiOMeJBD3oQ/uf//J945CMfueP6nX766fjUpz6FY489dsdl7dSOatBvvXVY8w6tyxtOkEPbMLNNHQULxUti3jgGAK3z6JuIGH3FJoPkqhlGftcMltCPb+4WISRGkzouxsAnZZopy2zeLXO66EGHwF+PaFiHBvhTJ7rI6MAPiyTZrKWcHEUkNDLYpzk6torKtI6KELKjw0SbhpijjnXDnMHOHCluWZjAUO5kjDhjQSML7nVSzkYXUtlzYfrNu1BI8qitNVz3YAColK/HIUlTeYcU/UyhAzpmg6GfF+y8IiJXHaJeIsqRnXspEjqEJOE5AFq7eQb+IH2274oIaoIC3pQK11winPePfTAqi9Yb4Fnbntt0BHB1hIUcPV2Hke5gWVxbg35I3wkhFs46ld3V4yubtY8ZhG+9g++jOHA5alvllPoQmT0CduyoXJMXkIzzTzWpbZUlUbR5I3mqRLoqikM3xgBq5hULtuxL1tmapIgS6KcynFVktny/kHUNHgiNYf2F/H3XIDrPDD/XIDZriO06erjE8FMpKgX7Bs4yC3QRO+AceLiJxvGsY+iWxLXCSc3Xk8R5xBc6GPAvO934RVmwBfr4Yxo4myzLT8ezJL2n72u1thi/9N7TW6bwBdegobm3CyavXD8ix32MHLwEpkw22dFiKp+oObz2dT0WQcBAvYUMsNIEQnQyjuj8pnn9wOBdmHfFMUIf0M9Deu0lGisKOBiFVRV7E+Cgjm3JUafBQl0YBjVsZQr06RgSgLS+qwMKgDKgIEl7uxz8Q0bWMIEelqXPjZDXAUAK2KHQca5Zcna6HrCOEwBo5ogYuhS4wtckYLDQWwYMbQX4jYxp6TdyDiGKLHsB+MUUtDUIXkttL/O2VLp1PK/ounzN+8QSVJUMDeICTM42ILGVch5ApCCeRc+/y2uIKAoN3I/XGoeZd3JtKZ+r86BmxkCf5vYDeD3QLTjwqplJ4FXPa6m+BzkTBOIF8JNcvSRBP1bWe9j2S4C+sWtl/x67rpSVNVKQnaz1YnovF+GJ9xWtMHgd5UDHmef8yZqz2uZSbx3Bg0HuLYFuACQLswgkeVsv618Cihx2W1kN+FmARO+EMfBvjKGv/UiPrcCjBg5oGyqD2CqSKMNvo+sL9QqtQKCICGEyEq+7PCgB4tFA+AW7z6791MbYu/K7MnDMgtZbAH+U33QQsJeyzGqIPK71smojs35JwaKkACEqtYV8fQIAguR8Jr4etcOI0joqjq6rBuNVDfoNk/5NNtmNxi688EJcdNFFuPnNb45HP/rRB1XG937v96bXa2treMQjHrGq6uGUU05ZWVkAcKc73Ql3utOdVlLWNddcg6uuugpXXnnlyhgiq2CuqJ122mk47bTTVlbe+vr6jkEraw9/+MMP+rdXXHEFLrvsMvzar/0ajjnmGNzsZjeTtC0HN553XYf//t//OzY2NrC2toanPOUpB103ayeccAK++tWvIoSwkrZbZc7M448/fmVlAcBd7nIX3OUud1lJWTFG7Nq1C/e85z1XUt6P//iP44EPfOBKygKAxz/+8bj66qvxS7/0SwCAiy66CFdccQXe/va343a3u90Bl/fKV74Sv/d7v4d73eteOOuss3Y0npxwwgl44xvfiF27dmF9fR3z+Rz79u07KHD3rne960qYgnv27MHb3/72lQDWD37wg/HJT34S//iP/7hjwPqb3/wm3vKWtwBghuSv//qv4zd+4zd2xNr89Kc/jZe+9KUrAfwAFGDw4bajGvTbRT3WwoYmlGFLCeMbAAQilleLAtgE2WjF5DgoO4Y6HpitosAP0oZZTR0Q/DTcqSp4stGFBKJs9AFdH3mTGDMTbd6HBPYtc5TwuSlzBEkxKERITpTSNNehbtxbrw8nm3XJp+JyPg5vXxNKp5XN0QEU4EYCNpZFKOv3pH1t1LE66dRxFCJyfkMB7ixQZM1KPumGWwFW274qnbrR9Qno2zTtXTP8anC3yPXonMgK5pwmzKQL4vjrc7S/Akd2EVM54PIxtJEqAMlGT5s2jj1LfMbi+8JMI43KFieKQ/IYxiD933H7UWR5s9SmsQT/LECX5DVDdpItkAGZguU3wt4Dyuj7MYCvD0OWXwZ8pM9Kfs5AQBszE4AdW5QcKjFSwsBj5O8r4zH3p9qhYK6JKm4a4DsBfirdGtYKZ1e0Tl69pLVjRp1+VEkz2XqkHwdxROp1btkRniqZgcTYrLGzfbYbXUSKRo8Ro4DfVkNNDfypjYF9Wy3Rg4y1IWYAcOYFXAssHQUA7QhnMIG9LufE28rhxLmG8ntb2pLocPWF1RhdDfala1yxntPPDJuGyLFj9BBbkLHw+ix/shum2RxeQCZiLBCAzqF35aqHGTg8ZkQwOJentBz1TN7BtU2SP1Szr0lkEUM/cs/I+Kk55DSflpWU5PqYnyTgrrypVXrO5rJiZku+j3WM0XKsdHDrCI2sk2ixweNCv8hBUtWaSetPIpcKKNjhQOjGmXfIYF8N/BGQJqnkGF8CDgxe76+NAX5AYtNZSc8xhl9RlAnEUIUEy5xWgCRGYKaBYJGwCCQyslQEhXHbRCx6nqP6WEbXWyBG12oteL3MDDbPKiHOsP0SYN0AXiS5W2bZo1kwkz70oLZNB4kAg3pW31oZ/gr6tTNmCDYtg4HWZK0XQwCRmdtHgL4ip58GDAFl36kBP2jfKYE+XQcxuM1tEiimubgVMNnuF2bSXgr4NQRe/2zDbB2cSwiyVehSgExEXl/U/Uf/ZEUABpO0D0TzfQWteJ1HBfhngb9lMTgqfa9rXwv2xViCfrq30P5o2ZHLrGa5FbGY5IDYD+/hJWbXjgXItwX7rQD7BOSzW1gOkIpmTTeClEJ/r/dyvrfrMTZGSH7tEvhLubJtcN3Y+gq8/q4BbXsvxP2Vz12hTWuryY4Eu/baa/GOd7wDAHDVVVfh4osvxvOf//yVyRre0O2aa67Bnj17cPHFFx+x+aBuqHbqqafif/yP/7Gy8j7wgQ/g//yf/wMA+J3f+Z2VyUr+zM/8DJ71rGfhR3/0R1dS3o3Jfu/3fg/HHHPMSsoiIrz0pS9dSVkAs2h/6Id+CBdeeCG+/vWvp8cLXvACvPSlLz0o4G9VQPM97nEP3OMe91hJWW3b4sd+7MdWUtYqc27u2bPnoNmL1s4//3xsbGzgdre7HV73utfhzDPP3HGZp5566o5ylB7JdlSDfjTfB1rkBX90XkILA7w4pTRhvDqcNdISlDdfdo+W5aEgkZ+83+mRUb8QSwnEMSkOBTsUhLKMs0XIbCplnPXivNrOPHHUcogxAVFACUw5Z58p5fVT9gtv1sok7APAz26+jGO7ZjLpe3HZRnck0nRZ1LGVybFMvZoRpu2eyjNAkcp11qCqZfXNO23zsc0117cLlj2pbW9BCMMqAvLmVc3+HYLsjEcW4nGYS2dp/sSx6F/7CIHDqaVM6fqy8eZ7QN/TM41LLps6Ty3bte+jRITnXGVAGWlds/j483GgLwGDo+BfrkvQ/JROqSQsYwonTqDkpMmR2RpZHRPoJPlLMMwrkhxG4iBT06vinQA3TQN4Yfx1c1DgaHU4kXTtO74XamkrIDspDEslo4omInsQBZ2jvhPAaxgFyhyNzgPNLDnDdUyJMcvlKuB3oP4EbYextgGQImpqR1qI6uDRv7NsFKKMOfJhX91vYyBf/iwfL91/5vvZYT9iY87xLZzhqYyxe3Ir1jOfFN/bznH/mGyyo8QMngAgj9cuEkIIWLiSfat3CjvjwePzsrK9K5gj/B7Jg0GxpZXSZ3JGlnsoKRkNoySNXzQic04yJkUOEDPDVSHpSURZNhhmnBGGX8q/q4y/UAXqiEVBLIhCEbCjtVwGF4wBfxHIa4exYCvzemzIHxxr2Tg2AibpfJ2YlchBJRZsUks5twUYUOBUgT+tj5bfBaAnXmcQiVQ/HAeKaMCdWS8AgCpqeaJiLQFgkA+49XktXMv2a+AgfMNsP3JA00AlutG0zGL1AdG5xAqMoUfK4SuBPYnZ17QgAQAhzL+Bja3vxu4jXS+MSXqOAD5bzfmW5coNJm3oTPsI4KcAqaeseoHQpTx+Y+y+VKdl51UHzMj7iYEnn2n9NRgpFYUc1GWqL0zZmMDlMbNvy5awCAjTPUofsqynMoptnvY6KFH3ZltZsIy3JfuCZW06BoANHvo9YeRas2CfMis1t19QUNXsEQbtZj6za7/6lJVhq+PVlu1SM/y2Wp/tNJBhssluIPZnf/Zn2Lt3L25961vj4osvxn3ve9/DXaWjyq699lqcf/75uOMd73i4qzLZDu1P//RPi9dnnXXWSmQv19fX8aQnPemw5lU7Gu2nf/qnV8oqBRgoWpU551YCOk12+GxjYwOvf/3r8axnPQsvetGLVsqivaHaUQ36uY3vgtrATnTXAMSROpozonEN1htxDjkq5Fok2DBt+vWzRZ8dSB55s4dASUZTc9Ask0LUTaCCfQpesaznciCqNmWcqdQkvynnLgCeF2DPE6HxmbnHMkb8rDk5WpGjnJnvsVwPJC8ieLPV57w0Bdi3VRR5/b5zA1mWEJGlPQXY0Tw8HDmrzLxYyKD2kQHTMaDIXgeWC+XvZWBPcimGiLkw/UbzJkJZfhGzZuiQSSBqLftFeeMLYLnjA7lfwjUs0UkOoC57V6W9E1tQ5UJjHIAGBMfyZ9rEAjalY5CDdyIBmrTCiPMxhSj5WwBCTKCvWg3Q6Xt6n6hk0hjL70AAPn1vTErUgkAhIZMhMwwpogWDkp6c3G+Q/GncQgm8h8osiaMlgWD5WAPnkamLU2A8geMztGszkVWdc16mYBiAFri1DhgggXQFCLiE/QkAsQaSUweQck1OyB7cDnqv9AGG6ZevzzKrgdCaHu9i+eMa7LOMmgBhWOu1C9zPYhTIjnJUPiCA7mhZpdV3Vh1lPhbBH2NEXwCFyyPhpWrmxwbYqwG/rcZGiLMYPWIgULdYerzry5SJc32WP9kN0zQgaKNjJGUzrU/MuOiymkAfXTF+UujzPCZyic2uGRZ7G5BzcJ7gZw7Nel6C+pkHOYKfNfBtw+w/zYuWvtSktcUiBHQBmPfKaM4qDVzVmPLG1eOCSto5aF4+AIEd32oEZQ8bhp8ynBzBR2b4UeiBboPBv24BlfasZT35dZPAM4LMC0kRIH/PBqLoOKvvlUz18qysFVOHNMpgzLavLRAz4li3UuB9zME5db7YOhewQwYFkrKEQ8rF7Wk49/RRpUMjNjp+dh2wIC5x74L75SI1RNkv1RQ80uM6B6w3HGqiUpUWeEzBOL7NY7rk06WmZaZf3/NrANT3DAxarrqCg85lgE9Bv3YGMoy/CGSFBjh5lrPRvpHYnGa9UOdkHAFmNdelZcTVAKCyXHPVc//Q68P9nfv+uhemn0h6Ut8ldivG+ntdN53YY2DwG6FY99Q5CbVP6lo2IrPx0nmGfH7pVBwVwF+gUvS2COYxxwEM2Bdz3j4rk65ysxsSmVawsnQfBVk3mmNqsGnQF7VVe4c6wHKQH1wZb87nQLK6PyAv/3Xtuz+Wfm2qORbcVbOox4C/LQHQtK6KSAxpG+i4JAAk5dx0jhNFH2Kb1laTHQl20UUX4UEPehDe+ta34pa3vOXhrs5RZw95yEMmwO8GYCEEvP3tbwcAnHPOOXj5y1++MqYfAPzCL/wCPvnJT66svBuDrRrwm2yy2v71X/8V73znO3G3u93tcFflqLGjGvQb2yRF8IaBwDnOGtfAe5JFOg0cQ1aeJEYAnn0vIUZ0kj8v9syI0k2m5gexZuUQLTNNnxWM0vx9lt1nASh9PWCZSZQtv1Y5O2HyCRjFDjjJWSIg37rkLeHcc5XTSjb2jrKcp83dNxapPmj//Yi0zIDKkGkU0mc5clxBUv1bGZJbMcK2AvuU1WcBvxr0q9t7zIJxLujPUzEaeW2jbR1Kz5u2mTiR2EPRA735TD+3bAHbz5dFBaeyRTZp5Hvq3IrSf5c5Dz3lnH3WsjOvjJGuGZhbyXbWAK3+ZuweSCb93cralvUavqdVzY6WzHoEcpPXgQBjOaGIYmo3Bj9ZOtQ7QuNnLM8WWHIVzqd8f4NrZmWJaufdMvNLPrMyR9IGfeCxrTftq+wLYP8Bv/rvZVH2y8wIlEH9TETEjEyyrmkqvldHjuv7gBmvR5zhY1HmgHFyEop8joWje6sTqQC/MSsAvyVMDdJcTZNNdhSZlVIcMKeYSo1AQAg0GDOL/u54nHMtA34q38mPfAcyEOgT048SU0qeSfKiCntFHfM2j9zI1JHY3bWyg4sM/OnYFAUgtOBaCiqAyXUsa6YkadjPhfltgnWWSRzWY8V+jAs1YDc2jttxORTvD75YjMlFWxEKtlX6iXnWwK08b8r61jD8ijLlWRl+hFIeVfMhIoQk4+cBeD+DJ2LGnwNcBDqK6FPgDRXrh8ISXmDBPwb8WudSHRSMVFBE+4+3eXfJgVyDGANcO2NQt1tIvsoAtC0QfMo7OQD79LUAfmja9FlaBxR1l0CuETCvuCZLQLXBNYvlM5BBY51zlYGfioPuCSiB3ZomIAUHCuCHuq8vAaa4zpTf0++SK9bIVIf16PpIZEBpiZh43pvl8w3YJvfwyL2nezVls+qaMCaVhJjyS9b5qi24tRN1vYG6RwzLAT97D9ftrL+nHFCl66HtcieOjQ35M3mvWr8dsNXrK6tYkgB3AM4ESNj1sxYzsf0muxHat7/9bdz73vfG8573PPjDAHzfEGwC/G4Y9o//+I/49re/jQsuuAA///M/v/Lyb3/72+PEE09cebmTTTbZwduP//iPH+4qHHV2VIN+FGPamOsmgGIE+gWzK4jZUUQODpyjL0XyUs6rog6kEAEfgZ7UscI5RXoCCJyEPUhSsJaznItxSZoLzeaVsyDWwoBXQ1BqfBdmwSgvx80ynRno04j7tcbDE7C79XBEWGvyZl1ZfTOJ1k05OWIAdfOSaQYsd0httcmqpGXS3zDOopgZYRxVm/PyqPSpsiQ3TU7E2gmpbWbbUBkJDP4Ngb6tcibuD/Cnx2uc1l+kpFLf4ihyjdgufBT6ngB7MfVZaWfJATSWGyXltZBNvG7s7fsaKc4R1J3kmmlEjowSU4sEJCFkx5ecAlxk55gPQ1ZVOn/jQbJynkC+rlsBffp7ez22zGUp7eiSo2/5dWKHDTsXObqZ72EX9XyiySGUI8XT62XOY4LkB+V7qI+ADxGtIxA18L6B9xm0rXO9ARiCfDXjzL4eAdTHxqsYchBDCjSQ8+kDBpKetQNZzy1VwfqLjPN7K0dzUBbdSBn2GAEED+S8OxVoN5bnqT733viCivIrGa9or3cUR1jIx9XyRgHAJY75pVLHY9+vGQtVvqlDYSGOS0+vsvzJbpjWR1Em0DWLGeed0zmP5wuEYKQezfisDA5h+rm2gZ+1cN6h3cVLz36e74tmvYGfOcn718LNWFJRH5q7FML6Udk9zes3Nmb30c595ZgXwHLvcJL/NY0P8h3wbzPLLysiKAufmd5zZjuFDtTP03krEAQwGBSJ5X4jUMpAL7E8vgp4OQLs6Vv2XrTzhH7Hjo06vuZgCUpj/NgSqJhv0ppNJTeRZFXHAuoAA5oKeJTywUHWnQqY6vqIHKLbQNPM4P0MaAjzACwCQ0J9iAyqBAzAaLUCgBHAL4FWTqVFyzmD1ybCCm1mCWyJznNQDxEvkoS1pz0qBmb+cdVHGH4+5/azAGC0a+XKijxmQCENLgfKz2aNrdfLArBjo7SyvhT4s/cGM1+pUAbRXH4+CthnrtlSoM+CUsv2DFuw26IEFpCcd3QNnMtBTgBSX0ygrZYDkXM/CDDK9ukkkR5g9iihyHttrZYPtrYtMCbKALkiBvBbFvQ3tqbUj6QP6b1ux8Je34O22xLQ3gRULQP5tto2jeU3VGUGRK5jYvmZ9RWlvisbkbHgt5rteghtWltNdrjtZje7GX75l3/5cFdjsskOu33sYx/Dhz70oZXlLhuztm23/9Jkk0022RFsRzXoF9tZDqmsIk4Jw40zyEmOpQaRKG0myW5SncMiiFNfnOoKioy5bW1ekcTqqwC/ZXhGUwF61mwOjZmw9RjkcylH31rj4ATgUynP/FkG++zm3Up6kgH70gY+dMNNvLRdelan9rII3orx1se8KY8ogb/8kHa04F/I4BGDpfw9y5QEUDAm95fZV7d1Da4OrzMEhOyxaB188Oj6iDmy46RxM/gWSbozao4fu2kXYE8dXAVwUEf12uuw1ca2iLSWugdA8/s5wwbTM7NOAAYFs+RnkLyRrS/ZfVa2c6x99gfsq4Faa3VeNyBfG0+Uos31b2eYraPNIqAsnzuDP9ZDoZHb1mkUY3aY1YwJIpZ/0/yIPZDyhvYEeHJwxLn/Erg7dg1HnHTFG3yF0t8F2Gccq7bu2n6lAyyOqjmNAX5jDinLsquBP9s2CibX4N8y02tsgT4vQQzeqUO6rFBidTg9V6lbXWfzWp2A3NdjYmkTmJGtTn113hZgYwW415H36XX1bO9dC9JPTL/JjjZb9BEbfUhAH1CypwCeC1qRH89S3UArsnNRwA/ynmU712do9qyjn3cgN0e3kWWr/cyzrOdMHm0Dmq2nR3RNCtjqVcI4xgT+2bGAkO99azaowJP+RsYAL+O8Ab+sFKWupUhzunabLO/ZL0DdBq+dug6xm6OQZXYOCJ6Bnq3UEWSeZtAhA3722Y7nOv7z63JO0Pdqy4AfS5nznMi/9Y6KNYKaBfw0cKtgRGEIuOb2VnZklozVYDOac9upNGpaG5ED+QaxXwBNh9naTUBEWPQCwCSmoMOCIvqxSS6db85drXMKoZ5bZO6XecJFQpOY+CIhKH2P8/cFoF2ktiG0QOiH7D4L9hnQD02T5Tv3B7Aw64YBuFMFL2qdUv+p2kZFSqzqQS3hqCw/lfTUvu8VpA1Z0pOM7HwtLzmQL7dWzJlxOJdqtcgBFNMcrcx5W6JePyAD3LrPoMjz/ajEpA2iM+2W+jayckLK4VcBfgo+2jVovXZRxnBx+sj7oQiUuTm3srHxo9qbjQb/kYOX76UAjpjHTAvaA7yms6D4mHznGNhn31JZfDvmaJBhWh/X8vVVfyA4vgr1OprKAN7JJruxWc3GnWyyG6udffbZK5XznGyyySa7IdpRPUpGP0N0TXa06gbURguOLYx8kzbx6BcSSa75AWayOWLJGwZEzDHVwW4203U+Mwt4qOlrBS3UuhGgQ8HAWeMY9PMuMfws4KfsvvWGAUEr5an5OFL+PocUrdsQt1XawIss1SCqFCgBiv1g+NW6NnlTGZOTKAJJAlBlc2wb5TbLzzVwVMujlmDSOOC3HZvMgn+NeZ2OK5v8RR/hKWDhPINlQXPNsCxVBBj403NXOU8LJBg2ZXKcjAEKNnIX2FbKhmIso5tjEFmk7NCJwnZz4DyVTpwkHBHPSInKZ3miLZ1qwdwP+wP41ddiO4ZrAviS447r2SgAqGBRdZurpBtFBjRJIvj7EJNTpgb8bLSzZSwASBKRKeNOEIdGYJDUEzsNiYAg9SSwc2IglVmBeXVb1mYdwGNAZQ3+WcBvOwJlbXWkekCWgwMwKvFpHUX89xBUtK+tI0lBPi8MGu+oyNOXjiHSzF7OTx321uE3Zgr86fEV/HUCcOs1HQOO87xS3pdFrp0xwK/6fbTfm2yyo8A8EeYxJqnyVm5ay/gLlBlVep+p8zgFVZETSUMHcg6+5Xx9zfoMMQT41sxvnljy0zk4zw/yypgykotAkgXn9YQym8uBoHGUx/Hq/raOeYLMmZzkFiTadzo+6Tyjz5rHjOzayQB+6BaIMr+T80DoERsAvQBDMQzG/jE9wHoOSu9vAfbZuQzV73kcJwFFzBwWkST/dC1QHq+sTzTl5ucySIbbLx9X19FeHlB2ZCWNmiUf+7xWamZo/YzniAB4F9F6hz72DEKHvHZM6wRHAyDGuwxi2IAWzbNGkRK7y3th1YmspPZn8h5RQD2oxCcgfxuwTxh9+j41M2F7GiCMljOVYvEd+Y32kSXsPv3bXq8x0/l8zGlcynqW/R4i55lkbGuWnta7OJgb9u0wEghTSz3qb2PgvZkwiuECnHMSrFavQ4TdN3K/12ZBRMAEH6Ls43ZvEoIFzDLgN2a+6m/LLMhaZtneKrXxMqZf+mL1vpXHJLmPzD7A3uW67t1uhZLWbXUgRfW9BK5T/lwBwEH+c31eFlgFcDRG/RugBDsnm2yyySa7UdoE+E022WSTbW9H9UgZZ7uBZg1Y7GWQxUafatTg2A8Tq68B/Iwj0tt1fk0Ozs/Shs5RzgNoIz43uyAAkL6OKZ+ZNS8UmdY5ONKIaAcIi2omUfIF04wywKdAx3rj4QjC5nNY9y4x+/SzlKvPAHw2f59HAMKizD8jclSjcoTaVsrcG9l4juYnsxJcsWQAJFAomPxpUGnUMkdPnesN2A8ZyLrt5XmMRaaf6fuzxidmpb6vG9hFH+CJsBc9PAGbnUMfI3Y1Hl0g9I1D4yL66NC4GWZrM6DbAHoP6gT4s4BflfunkAOTqHEA2VFUtXsC96SPF1Jhgvol56Jh+5E49yCvvRPGX+TI7hAZVO5jQMsZX+AiYdEHpO29Q2KzAlsDfvt7jerrYdl9fO8M7wFPwFrjE/jdOpdZZFBndESvuTnF4dnHWDDXLEhmzYJdgDBHDPBH+ibUWaa/iwVrTB0eW9ky5y4wBLfGnL5claGjt7ZlOfPGIrkBlr9TJxqR5ggkA/CZ+sqzMxVI7D9zPMvu0/xYGqSQnMLKOq7N8ZSleb10zACGTEdtK20sZWrz9YjJwUmIIh2bj22ZJ4Xc8TbsvoGRGwZSHCLTMfT6LH+yG6atNQ5Ny3PQwgRKOOK/1XozRsXInzUBWHcNS/L5GWhtHbS+B/4mx2AWQmLBkXcI8y797ddnaNZn8LMWfn0Gv77GLL81WZs1bcrLmWT3ZJ5Z9KHoj56IR2gTrKD1VwAqjUk6X6ZApMwFU+CjdYTWgYG+xSaz0xabzPDr54gbexns6xYM8ulcrmyvwOCfWrlualKuQlVFsAEe6TfyPAb0WVnqsaCV3C4xSZYG0waOeNmg0o7WkkykHDPYY1fzZg4O0fbLAIiuRZUpiW6D16KLffxev0DsewHWPNDPgL7jtUvTYVezG0BAAOf3ReM5vxoFOJPs16dzymvo2mLMgUG9THCq6gFwH2skMDC6BuQDYt8ArmO2X7OQIEEDSBiAT/NQsownFQDfqLw3GfnQSp1kjNlkwT3bN7YyBl8Y+LXgNlDO/zovt6nvI4OzKmPbGRnbdLGbBNqmc67lSAGAAkuEApnVtYWUpQJ0vJZtSknu6tIq8FfbGBAYq9e637A5/fSRyg85+E/3emXuyPIYjoZ1TGVJpXjdodeY22/wE6MYUAQN1KCgfI+EIQn0+Zo0kq8Ssq9NQW55HE3FkoyHQYKvElAMUUzg79l62nWuZffxmo/HFa9jjgWNC1WFWJ4PAIqO5whnWI5GTeZwrEKmtdVkk0022WSTTTbZZEeLHd2gn2vYOaCbJHXWStQ1Qs8R16FyuMqG3LUzlmD0vEmPACiu8Veo3NDEtAHMEZ8qPzlmhaPBQXaO/J740uACO5KAnLLAMpoY2KAEdGQJTwb9Wp/z+XmHAuCzefuSlOdig9tIN+7q2K7ZK3rONhpe5erGIpBrwM84JdSBpZKeOYI2FnkzlrXhon7PAHicW29sR81sxz6ELQE/Lc87V4BNa41LwJ9eE0DAosAAJRCw2ZHkhnHwPTujvDoCiND4mQBDeeNt25P7KzsIo/ZV/TxV8AASdOuGP0YM4+6rFjIMPgcklpqy/XwA4IhzNiEiSHSwvVQWOBstv9qOb5UzcZTZ5zAqZ7veuOJ9J99rnIJJJUtMLSDK+ZQsipp3oXlPAkqHzUCiKYKZkqDUx52Aqj1sxHMs6rOsFeqmLEDJbYA+/d7Y760V9VgC+A3aTh09kTKrMZK8NwT/lpllWShjMzELLOCnY9NoXsQuOXy8a+CdQyd1UZk6YOj0tk0RYr5mmvsnAX5ABeiVTslC8tN+F8jzzAhrp2YBTjbZkWytI6wZ0E+NGX5xZJDI41IEEAXwg5+DZutwu/YgrO+Bn2/Ar8/h9s3h1wOaPesIiw6xDyznKYxASmwqfq5zBAMZcFKHvb39+hihGTjsOKeOaLu+SzKfck7q3NZ2IAFByKwv0zpT15oC+MXFnOf7EPKaUtl+wWU2VM3ysoFUCcCL1biFVD/9TPO29qEE+5QBqb/Ll4uSMx9OwC5ZoxGiyX+IgtWd1nCwARXlc221jHQCWpMSh3kOHeJizuugDsyM0/boWS69nQGdI3SOMHdcl9YTnHOyJsum6+/0bPorgx3KcswS4MoeJfmc+1655kVi+0nuZrODsjn7lPGn+5OtwD7AgMC5sPI7FRhomX3Wll8L/rIC3oDkszSm94QG4njb70eAuZSLDa7YKxTnUJ9LtDfpyJxo2fUOCegafscVa9jadD63YF+MmdVv74ka2OPvlmC3BXn2N9+abRHNazk4XQuaOgfEvJ8a5JMYk/bU8mv2sH6XHKv8S8BCJJagr8h++WdpzGDFhaSYoexnuW+0/bR5XX2vYwj4OTL76rFA02XBUTFgWXLGGvyebLLJJptssskmm2yyyUo76kG/JNXpG6Db5A1C1yHONzi3yuZGcsIUkkvOgdpZyhcDAGgCYrPGDmU/S4wQIG8McyL3MCrx4gwSkmUESVh+ES0og1w+OyOUzeRcDfppxDKDeyrl2foM6FlJT31PwT+WURJHVQX2DRzaldOBN/PS1mObzW0YfhqBnpl+MTP8oI4pJPnHMRA1MSW9OPZ7lj3tQ0zPCuwBLJcKKJDozGsMyzXPTcXwY9ZYBmEh9VwEYO+iRxu47EUfsbv1iDOPJgBAQO8IIfK1app1aesmM5dMm6ujCyGUTAAXZOMvke+E4TWKQehn5jP7nRD4rercnez4NeqbJKpXGVeAQ3AK6Dgs+sBAdQ+kbb7Lkc4J4Dbgdh9jArIRhshZzWzVemmbM3vPFc8KeNv7oAb61MkAZKejU+cPwA5rPY0RS44gEBRutXKWlPpCLrdHTOWVvq9YAGo2OrrO+6K2JbOjYvLtr7NjDOgDsLRuGKkfO8pUGJb9dZ64+/UBIrs5rFCMsXT+Ijvc9bopm4Bi4HtFWQQKlldR7BrJHkWWuWlmgGvQRR0HuH8HaUR1DFlnfoTIfHpKEfd6jFQPK3tcgZB1kETBxhWHf1Hnw2AhDJnnqy5/shumzRrCrOXAikUfk5oBD+UuAUM2uIkdxMKsbme8lgod3J6bcpDVvusQuzlmfc8g36yB8w7dxhz9xhzO5PMj70BNy+BP0yY2HFCDX6IeEAw7IlBWQjSBDBrUouOO/VzL4/Jj8T1VSbD5+6hbsErCYs5rzfkGz+XC9OODivyj9wAkp1vFVInOMxtKWH59iAXAVgN93M55baVt0AVVTYgpv2G97vGOJF+v+tA5R60GqjjP42dUxk6MwvUfBpQo+JeDT7LpHOyIx3uViHSQ4DMdW9PzHJhvMmg63+Dy3QaDxWu7+NqHDq5dw5pfR4iEPjjMERAaJwDPeFBFPY8pwOmI+ynLukbA8QzQS7BT1LnONSA5vsrVRgH2YnDM4NTzVjlPkaKNqiZSAdZLATLtE3Vg3UhAnV0b1GbnYWawS9CRoxQUs6ydLNNPWa58c/F+QdUpakYWQdlY5nz0WR92Lkyvjcy9Bfz0Wa5LjAEUuQydrXPwENJcn8/HtAf43klSxPL7GjhNAQvDJj0gq/NH2tlfg53UAvjei4S0rqnZnUXAkFGwiQq4ajuNKLEAkBQYDTSfKLnGBEDk7yXWdgSrOqQziIi9BAFQRP5JXl8D+XoQbPAe0nEIyOupEalYm9+8kEav108GQNc95KG2aW012WSTTTbZZJNNNtnRYkc16Bcig1G6UdII1OSE2dzHToQQONdKMBuJpmWngpRFuzogNAyMiayPJ877xGAaA1fOIbGFFDzRXWcgYYPJLijJv4iDRTedrcsbpda7UYAvv1aZQ3bRq3ynJ5OrT76TZDwJLDsVMqvPsmcGOfzE0jZjmXPCbkbVgeVKx0Yfs8MpwEh7Bs0bkSVl4hLng7Y7O6gyihPYQyPAaXaCWXk/tf2RAbXSniolaZmWVt7TOjedY1BLGX8bPaHtmTPk1d9H7EAjR2h8w5H+lJ2lKQdQ6HMOIENVSEcLwpeTSHCKIUt5mlwXKdI3OVeGDEGG89jNQuLoy4CPOMAk+j944pdcIenLWeqTc6Kp7KeWjgT8sbOJ28n57GBpR6Kdx9itjc/An7Ja1wzDL+WdKcC+5Y5cIINm9WeWgaaWosTrD4wV8lrmQNb9qOJDVNVzGfuvduYp2FcDfWMAmx7HmqvOTaU1B5+nv0uJJvuZvooAgjNOd8rOVH1P67gs2bwz7cH5eZCdQQr49fPMMLBsbWU9uAZwesN1DLBLkMBY5H4QBFBZfhqfUUhyWcCvn+cACQEgU/5N+5PCWSeR+1rdEdbfZJMd6bbuHNqG0ATCzDOjPoJlNBcuDnIWA+IeFjBqHoBZy8FUQQATt+eatB6bCbPPeQfXtujkNXmf8vklM2wpNQuIjTFvPGWntlfGCWVQwzJQcpkZyNTxOqkkmBx0MEFUIa01N9JcXtS7Nh23qudIjuN+kNdMwDjYp2OZBlBpm+fAtFAwHwEehhYhM+Lhosh881rKC/HKuxIgsYCfHl/lD7czqoFWR0A0gRw1syf0KKRRQ0AIAeSZs0nzfWhmwHqzjj4GeMeQJEtxumLOzNe0NMW8+grYQOC5quemSUFrXoA3UlURySFO7YxzNFqAV6XYRRJ0VBVDL8agsXTNnftF8ZsRBY3l7c6gpQPvSZjJHpNMIxnW1LL5X+8fnZdhn2vllDEbC3YZY+3thy3Le6j11vs8LmGDqQUgybPbtVeSxN1PBt8yq2Vk63XW2FIoxcVJXyuBX5MXdez8FXANSIDeWPuSZyYt/IzLbgDvOIXFIC+i7NW84yDLHtEE1cVy3HRDxQS9z2vlBgRl9IYyoEtlPvtuOB4AJWhsgkzTZxM2NtlkR5z98z//M+5yl7ss3f9NNtlkk0022WSH1o5q0G8QBagsv26eAD+OwJY8K1Y+cTHnnzSt5FzpEhCmIJlujBzxJsgHlZxkp8iYtMyy9+0ms2bzKbjBjBeXwSdHEjWZHVcqN6VSno5y/r5GNljUd5zj0OaOM47rtNHSNtM2AbZm9NUOjCURyCrpqTm2UgStgn3GQdWH8dwIynpskcE+BM6NyJ9nx5MCSbVa0Fab+ATmGTZfDfrZXB1j1seYQLBFzw6o1nn0BPQuoo/MFksbeP1hJQk4YPqFHrGR6yGSYBR9BvsS2ygzMWujOAapymfiAI3EeT3yRp4dQkSRwegQESSKeAEkqU94BQH5e31CegzwBxjwb+gQUbPgqoJ9ScZ2BPRjsFtkp/R+QBmpDpSsDe2TLtJo3jvLAOPql56wZWAhv47yHQyAOSArNHkJGvAmWlpZlmNWM/tqsK927Tjkz8fy9m3F6uPvlp+PtWm6rKlOzIZwAsD3MWfFCnEIQNZGyIyQIq+PAmz6nrKS1ULPbAsBwIkcYvSA6+BdgyD3b0REj6GDVGVe9Twicbs51atK9ZBxs18U84PWJab2yY57uIaj73H4GH5qPTBgoq+6/MlumNZ6wsyRAPqEGDVnHs9zLgLBMLjV8c5zurD6XQM/250VFtZ3w63v4SCX+QZaAfbIOZB3iH1ght9A3tPIB9YsFhlx7BrCsvxIHPyW2eKICsm5fBLD81HpyQT0CTuFQsfglAJVNnhHywguA38i/ZjqXa2pFHiwayUgz1/8ugT8sgwoEruP1yJDUFYD1TxFdIHPnVltER4876vsoQ142anPzrLvE4A0AkyovHlu0zzr0nyD10DdBkAOjZ9hJpEaM1l/LEKeJ6NR5yDk9Wc6lnmRwwAJHlFkqzU3MUu/F2CDsvdi4DVdNNdUwOlYgLlU/HaZFdKf1izzT86pOBe5UFvNtXb+Z0A3fzY2x1spxmIePhizvzvQ+bBqs0g0uj9Rmc8D6auq1JACB8z7GpwI5PFl7Hh2f5Bznuq+wowl1bhC1fpqYGNsv20AU2ZemtzDxtI+mRwoNLymcTOTg1q+Z8acIAsH3YdqS1CkIt8fnEjmF+tnDbZAEWzK+049iAH5FPBL5yJ3pQU77X0o53M48b5pbTXZZMvtU5/6FF7ykpfgTW96E4455pjDXZ3JJptssskmu9HbUQ36JTaXOoYl8jpccxXi3msQRE6qjkylpkVsAKc5ONQZk2TceFNPkEhvQCIY2ckAcJ6bRRdE7jPLUwJI0kuF04WYueRcKV2o+cnWGlew+DS60jvesHvH9Wg9O69aI79D+wP06bkt28DbzXQVVVlId6pjQ2VVld0X83nbyPMYua30PY1iH91MC2Oy9ZxLzlPEQhw4TaSifXdqNdPSEwkYiwwsmo17CNmRZjd7Kl3FAKDm/gD6wM6jPlQ3mTpRFJzu5uwsFBA6Wd8DXvLGOAH52hnn5lBWn+a6CMhJIcdMgBFAHBAx5j4NZlVGyB5eNvF9FJYlHMvSxoiF43Na9BG9i+l1C0rXKcu0IuWr1Pe5vayjBOl+UNCVc/QRdrcM8M18vi+U0cqAN5K8p5UQAnL/6MWR2hd9ECmCecwsE07L86CSeUeZeQcowM19OrNYS2NHq0pnsWPEOwCxdMrlMkvAT/M1pUtaOWUDSuZKySosgb7a0WfPVT+3Ed3eAIFp7CCHKO3SBY6e74KyT3KuqYMOnreAn45h5vjsUG8AF0QCSxytDdC6Ro4roAVK5gzA19AZxDw55izLcLHJEn79HHFjLxJjXH9jcjhROwOczw4232KyyY5W2+167F7zibHfOL6v533E3PG9Pu9DGtf19ulCRIwR1855vJ55wq5dx4Gch7/JdXCGzR43N0DtNQgbG/Abc8QQGOwD4GYtNJ8f2Zx+yEFDgIJ6PHeoqq6y2VTqnFUQkBnihBRQlca12kLHY3M3T6w+6ue8xtIxwbD8OKefrDUTk0vGAJWT9x7BNYi+TWup2MxE+lzmKGTQFMhAH5DH06yikNdbei10ParrFTVd36AL3A7Ow5Hks4sarMXBbCWoJMdeMpDXazGdNzLbJ7d3AlGBwRqUdOwE8jgrgGpwDi70IMeAhQOwa20PZm0DTwF9zHOPysvqfNkHBgE7OdyiigxLoIbTPH8CADpdy7FSQ4yB5xsfCvAyMayAcbDPN0Ommr4eA8RsYJ1rCrCjT9cpg335OpWsen2dIEkJbPHVd8YDgFBIMdr9UyQG5Hn9SRJ4ls+12DvUNgb2EjHjVEAcAnJ0kQKetUSqMLyc/p4ivMhQWrOqBdxGilPpWpXfT8E/wxpLqEBeuzpH6PsSOLWf8XNuRxt4peotGnhAVAOsyAGCrkFsZvLbDHCl3Op1m5rnGqQlA1RrG/s2IEouc95e8BEWfcCG3CwqcwzksdNK6reO2827nB9S76eGBKTvO5Ck3aC6P6W1nAY7sopCMdY7z/eQn5lxk/eewdwPk0022ZFjj3jEI/CkJz0Jd7/73fH2t78dJ5988uGu0iG3vu/h/Yjaw2STTTbZZJMdBjuqQT8A2TmcpGeMRJBGX4tcEKUJuE1AH3kPNJWD1myqtotvZUdLSIAXgASGWHMU4Yl1Dr2w+hjAYBaTAhuNYQEqoKHgH6GSTgFvoJJjykridXOOqrQynradluR/KCttwb/syIhmcx9ROiKUAaTOqpR3BqUklHVmARBJTw6/TYwyYfopABX2I5R3G3Jezh8nX1wmqWq/q1JZFvSzOReccXDZ42+5F03sPs7rp4w/smwA+R4BifE3cJZFzqRk83yUTqV8fR2xo0OdJfJLVacViS+OeIc4O1jWjeBE3pPZHSzzqZKffUS6TgDB+5LtGkJ2HuR2z31bGZaZ9UpYbzwzYYXRqkCfT6+NfJCyMcBOEZVE8q5JbEtAHJdBopYN62/M6n5ko8KLywjrGOXXmlfSOkpb78SpI8cmQNMLOiDlGmS5pAz4jdnY+9sBfhbMq8+vdPhR4bgljLexRp97Yik+Bf6ZAkQJVKsB0uI8pP0oYlsmBP8gDP+OxKwbAAgeiI0A2uwEJGlnkiFFMO9RIzm3YrwUGd7kiDbyfcrGBVqeY5bI+UV1Xh5iC3HnkmXblT/ZDdNovg9N7NBI0E+MDotqAOxCHudTcAIEPDHrn9YRmmaG2O6C37UHceM6XrM4Dwo9HHghGsUZTM6x+kIzS2C6fAA4N1iTsRQdwfscCMLBUyrhLeMaVNqTMstlbEyRZ2X5pfxlyjhWae5ujtj3iaWWrAD+wICf8yXrS/NQC5jTxwz4WflMXTsBWckggxSZGTgG+BXXy/EPAzFDs5c5mSCMOMr5Wndqy1hnJG1TsLbT+tKXwJ9Y7BYgYYaS5vfrNhhEbtcx842sD1lmWtc3vSxKI2VwR4MEB8oSgcEd73StKmChI2HnG/lmJ6zy2OQ+Up3LKOA3xuSz7C3LYDL9pAYMlwEcNbt/yZfyS3keA/3GAKTBNZP+xJLwmZE2Cvgtu8dseQrymLoMchta1p/5ua5f68CpZWZXxfVYMjalaQCm3oc2xzcff+vjKtNYyyJkcFzXZkUJ5GTRIn0A4KAmIKt9FJU218sEdiZFAlFFADkgdIiqZONn6frbK1bucZjRqACgJwIk97mu9xEiggagWsBYglBJJJGL4K2q3sNGd/kecjnAVMfOet95qG1aW0022XI79thj8ZCHPATvfOc7cbe73Q1vectb8NM//dOHu1qHzLquw0te8hL8+q//+uGuymRHoF1yySV4+MMffrirMdlkk93I7KgH/Ug2OhR6xMWcnS/OQyWXwnyBfsHOat82HEEuzAyarSfnUrHxJEJ0jUQPK3MLhYMFyNHZm31AH8ThEjVi0oBCAtoxYMFyRC04MtQ5BgTUUWUBDWX1DXIkxJA2Uvx6UbL7LNgXuq03V4DZWJdsviRVZBl+0laRXIq0tPn7dCNmX6cI9QQKlofXDXXKkSjykS4y8LdsA1Tn27PMPHXq1Rvy/N38t+ZLZGA15xqzpkyuPgizKQ4j6bkcyoHKdTMb4DWGnp2GCvgJgyg5D50HuQB4j+gCO8QWc2b7hY4jgZPDqAIbLPAXAjjpB4Nh7CBhQCRFRzuO9ncx+XJAAfBRHWmy4XfM/HASrNu7CCcSbwx8Z0ZVkYjeVE/ZlElGNTFfKTE0vDOStcrYcJWMbT8HdczAsIBUYjVKXybfIvoGvplhIYAlRT7vXlgO6syp893V10+dS9bjVDNcuc/HBBRnC9IXCZxPiZ+5vXOfUfCvBBLH+38C8JAjxmuwT/u7Bg3weVDx++IcTVnKck6BBH2WtyQgOeLINyDXgHyDLgALcd31MQOA2r7LLAJl/priEXNUuEo/aRsEIPadOJoaoJsjAmj9DAjMFs6ILSWmZn2ty8qEYuyM3VxykfUFI5cAZuE2OYiEPzAO4q2YD5NNdoSa3/sduOvE2eoa7F4/BrFxmPcR+7oI7wIWIedt7RJLjd2xDAjywxOw3szg1o9F6Bdwx/IaLSaJ9RmouQ5RAHWarXO+5V17mEXrfQYWXMO3pwZMyDinCgzqoG8dUl5YZYh7yoFUdRBUAd6EYMYAUU7o5ybPZ4+grL5ukQLNEAJi3+fgMmX4qbSnsPyisvxcgx4uKSAsegP8LZF2ToCUWYssQlacWPRhlOkHOBmHecZpIzHLjyzMIu1q50AJklBJw9qWBcMos0nXvSm3lw08M+AGeY8o1zqtgbRNNzcQ+x6uaUExwMXAa5R+gdnaHsA16J3j9XokzHtZsxNh3gcAlJj9ORczCiAjeEILDvxCn+X7ucmY4Yee11ykDaVMv3QuAk4JmDua18+2UYwlkKXA1sjcoSCH9oMxs6zVZUE9XIDZD0TzN5bfB/kgsu6MoWDkDc5zC+BvALRrGakdDEBKLuVSVHURe/ZESOtYJe4XWLcJeAKQpNy1BpbJOtaPU6AcEZyL8CZKrs7zrXsOzYc4LCvnCldZ+hxQVd4XkQLIz/hcnQQRaD48rfQY2JeClHoElVV2noMnbLt1a9C89d5FkATDAbKvlXGE3+AnVeRYhJDUaVqPtH8IMng4AkgYftTPQfN9ZfqM2oq+44rrHv0sj5sNv7Z54yebbLIj0x7zmMfgne98J6655hqcf/75+KEf+iHc4Q53ONzVut4thIAnPvGJnBplsskq29zcxNOf/nT85E/+JJrmwFzw1157Lb785S/jlFNOuVGxSGsli8kmO1rtO9/5Dm52s5sdtuMf1aCfd8SOaDXnOTq8aUG79sBJ9LATxp8CfOxMmmXAr2ny5sJlR1foc0662kLIcp4K+C16BoPmAgLaes68w8JFOFq+YWeHuzqyqMjjVzjgU36ZPgOAdSR6HYVsrd5U12w+dVqkNvE5alk23SqTqPKJmntGI9WT5OdI/hkboZ6iZR0rWibgz1HOQxLiUkBP5XQSmEeagyI7O+q5gh0BlF5r9L+CJrks63DLkqVdiAOgJ0XvSllbzk9W4sayBOwiUUCE2PcCaPSIQRqJqHJYOmZJOFQOHWmHGDi61zUctS6tw/n8KEXEIxBIchHCMSONomAucQj+hRjhJX+QF8ZCcjYasNYCr7WMqpVjs0zX1vyt/X/miUH+bp6lbA2zlc81lnJTYQbqG4QZMPMzAC4x8aKA1VbmsbhMpg/lqGK5VFuAcekSuiHpY39tS0aaAfv478yaBDLAp8EG+lqjy/U3o3WWtwWezP3JOpzUIS5OW20n3zR83cH3uOZ8IQFXbd3VlLESIsEri9iwVtUVpXWJtRMyFSSsHHI8J7gGrXOIkRjMJqTcgzHdAbatbAPncZHIyRyxYLatAfdI8sG6tV3slFIJKhMckSPUS6fbobA+1o7/1Zc/2Q3TaPMa+L1ZVo2IQM0Ma826OMwd9rqAXth+m10Y9IdjwTK7655lQZv1Y4B+DhdYojFuXMdKDM5zmTKGu7VdDPyt7wbN1vM9JeO6six07ua5ho+p8uiWGZ7nFAEc0lhm5uGKgZLGORNMpa9ZRSLkudvM2wmgVNl4fTS6tpQxQQCMPiAHlpk1U1orSZsquw/IgF8EsAgBISABfRbwsz7xRR8A71RwdPTe1TWMPqf3ZS7ScVJVAuqcVjq3KLMpsytpqDShTU3E7REDqJkx8AcwACzrohh6UAcOvIDWwyH6OfcjYf15z1KpRCynPUeEN7LwgFw2w2RyDgUY6sgBwmLqA687ksSk88y4ioHXxCJvmc7FMFKt1Gf9mbbB6LLDAn5Shq496v5Qz/96fVKxdbBfDXAv6ftFdWqAqQ5gsSCfMrFggLuirApQ1N8XYU+mHUzZqi5i2yJ9VdY+aY2BnDzAAn7pFM1vdY1a3w0pKEjHGWETt2lRV5aZpDuXrPXI7C2c1CsB4cBwv6bt4mfc38gBHjz+pDGM8jglZSjgF+cbRQBhDD3cGhBdA+pcWjt7WivWPyrPrwEExRgi9V+LLsn5A06A0ZgkPnmfKvlPuwWo2+BAic19ady0ARHUzhAp5LWSAr0K8irg52dmnMxy94fDtT6trSabbGt75CMfiZvd7GY47rjjcLOb3Qy3v/3tD3eVrneLMeJpT3saLrjgArz85S8/3NUpbFXAyb//+7/jtre97QpqBOzduxd/8Rd/gdNPP/2wOsIPpV144YX40pe+hC9+8Yv4wR/8wQP67U1uchP80R/9EX7rt34LP/qjP4q73e1uuOtd74oHP/jBuMUtbnHQder7Hn/zN3+D29/+9jjppJMOuhxrV155Jb7v+75vJWW94hWvwE/91E/tWCY4xogvfvGL+P7v//4JRDxA+5d/+ReccsopWFtbW0l5NzYgd7FY4DnPeQ5+5md+Bve6170OWz2OavpBipLUzaw4XKidwa3vYXBvfQ/cunmWKHKarTPbr2kz4NW00DwaCvbl3FTlIrxelIfIEdfzPmDe8WPvvMe8C+LUyTnhQlyeBNyBsjNaNlmeZMNcA342elJlk5bJeQKDiNxlgF90TQn4WcYfTF40A/ilPF4xA6XqvNINWpb/WnY9DXBHSI9WIvZbR1j3HGG61jjsbj3WGoddjU/vcY5EwponrDX8WPf82NU47Goc1rzDekPY1fJ7/Dt+vS6frTdchr6eef57Zh7KymTnYs7FuJ3E6DIrZMKSQzE7vnTDrBv9Ij9O6gOGpZAcl0amCRXQCcMkTWxUI6HpMuisEqaNgnHeiRQn56VMjD3v0HqHtcZjrfFoPV+XdW1rcw3XG5/kbRtz7JkfPkgYF5xXaSFsP829tuDcIf2cI4y7TX6/20jfYydH7lcDsGfsmmDoENofqxmlW1kC2kbqoiAywIO1ffBvMlhNlPuhytql12TfL++vuk3qdqFlTkE7xgR2kDvTTyygpvUDslNSxwEGCaWdrdMnjTu1Y9EwKGydUl1kjIyhOvcq15SjUaegZT0zW9RnucGmzfPH+h4G/Jq1DPg1s+Sggp8Jo8dnp+9kkx0FFq75DrDvGtBiL2ixrxhPE6BGPGYz6BSw2eXH3kWPfV2PuTLQQsQ8OsSW7xfI/UNrci+trcOt7eI1m95fa7uA2RqvR5YwZlWqs3X6yAEjdt2QAb8u50CWtVRWRej5MaKUYIMdCgn5bcyyFC0YFIWxknMAZxl0BQLTugkoGOWqsLDoQ5YdF8ZfAv5idkwn2fm0Di3ji6zZcb+cZ0bOrfpu/T4pECLrdKrbUSdHaRvyHLRXAKZAWgvFTc6hiDn3Q+6Te0HdJtx8H6jbgI+dUcsoA12A3N7MZAr83Ge2ZAZbdV0r61WqrmGStKT04Oizsq+mzywoVue+s48Rhp9eb7VlIEe6dsW8LP1d+nzq97qP6BciwbjIr81jDKjNLKy2mPNiMxMWazO4V0cBP/NeljTNUo5juQLrtrDrIZWXVLB5u2X4UsCvArwJeZ3g3HB/ooDf/lqSFzaKAryvM0EIuifTtUNivHEbpzZJDZjHptgteL/QLSTXKMvjhs19vH7WMS+EtB601bd5ufn+CIn5twgc4LEIARtyD/G+r2L8ShAqQgcs5gj7rkPc2Iu4cZ28lr/nG8z47kzwrlzzqPlPNUBC9s29jIsTLjbZZNefXX755XjRi16E17zmNfj85z9/wL8/7rjj8Gd/9me48MIL8cd//Me48MILd1Sfb3/72/joRz+KGCO+/e1v76gstRgj3ve+962kLAD427/9W1xyySUAgB/+4R9eWbk7tfl8jnPOOQfz+Xz7L49YjBHvfe978bCHPQzPec5zcOWVV+Lv/u7vsLGxccBlXXXVVbjwwgvx6Ec/Gscffzye9axn4R/+4R/wz//8zwdVt0Nhev47ZW+GEPDa174WAPDZz372oMo477zz8JznPAcf/OAH8apXvQqvfe1rccwxxxxUXT74wQ/iaU97Gr73e78Xj3zkI3HZZZdh7969B1Uvta7r8KIXvQhPe9rT8LWvfW1HZand9a53xWmnnYaLL754R+UQET760Y/ixBNPxLnnnot+P/Zwh8M6ux7agX33u99dGeN43759OPHEE3HOOeespH5f+tKX8NSnPhVf/OIXV1C7I9sWiwV+9md/Fn/0R3+Ee9zjHoe1Lkc1048ll0x0bbPGkYPNLMmxuflGzu/nPDsWVNZzfTc7Zv0MYbaLX892IUpEYS0rqbKVgMnrEDKI1YeIuTi9epEGReM47BjAeuOw6LePEEz5z2Aid9Om3cgZqlOqkoYBDDumcpLFyslQ5B/RzbZE1tv3erADou/zhis5oGLF9Ksi1RXgVMBP2XG1PItzYHaQaV99X5PIq6M+Rc0aAMu7HKFrZX0SM8pu5k1bq2NoWV6UEIEoDDiWh9IoewsMI9Vbj5fOzjiLSIGEpuU+KbI7sQPI3o2140uvX+gBYf/pNY7K3lPWleNo8giAxHGE6DJ7ijjfHTMZKTmWyFxLZWc5uY4qd6WAtSd+35ET+S9xLBZOxuzXq5mYrrp+CsbYvH0qx6YR0bTY4D7fL3KeELkfCvkg7d8GENcIez8z0feQa2ckNROTD0Ozty0rPLGslJMeFYnbZ+a5zZVVof1VGaYKgrVO+964wyjLYmLU66q/1bK172X51AwKphxW3ImGBxuJzM+MBCcfZ2kty3Cw4BvFwIw97pAIDsISyY1nHWQhstRmz50J3jXsvQ9dvoauATxHuKfxzp6HrXsQqc9uDlCHppmh8RqsQAOGZtE+er7C6IjtGr/2M2C2ZxBAESyrTx1UClK60ikXZ6tZxB2IBZ2DrsfyJ7thWrjqP9H7CLfnWJba9DPElsfTZtcsASs61m/IukdtQ+SeN9uAtYaSlPJs7SaI3SbLM7om5fZzQJ4snGcQcH0PYrubQUI/A3yDzjj9NSAK1RiowQ4p72s0YEcNPFkbsP1C+k1i/MWQnephZMOoLD9hASc1CaMgoRKfXc/3pyoHLAzQZxnlQBkspc74XgLNWGY+S3xap72uZTgf8vB+1flI11eEPBcDMmfpzxyv2ci+V5cnv8vXw8w5IQzG0ARu6FuzdalwD6hcv/yta/ogef5otg7q5zngot8N8jM0a3vgfSNrVBJJx7w+sRKokJxlfST0Hmh7AuCwCHl90kfAOZaxTuzzGEA0PB/L0CvW2b7J5yxtua3UIZDWW5bZVLe3rnVzO3cFozUz/Kp+v4wBKFaw9ey1UuUPl8EYrStg195IwWZRy6/7gGmTdIZ6XAVBTcCh3VeMRQk7oaSSYf7VNgb21VOZBlCpWgF/zH0Dktu6lvfX52UAoK77FARH6Hj8A8q1TOojGXaPQGo7CtoPN3K7isVuwXLkmxtIecNlTALAgUvkeP3sGh5Wi7Vjvvc1L2jaB4sSS+sIbXByvsL+aygHssoYq/n8EtA330g5kaNzidnrYO77BCjLmkoCqLpYppHg9tx/oHXVNq2tJjvS7NJLL8Vf/dVf4eUvf/lKGBTXXXcdLrjgArzqVa/CD/zADxxUGfe5z30AAK9//etxt7vd7aDrcuWVV+IhD3kI7nKXu+Dyyy/H8ccfj/e///0HXR4A/Md//Aee8IQn4L3vfS8+/elPr4SJePrpp+OCCy7A+973Ppx22mk7Lq/rOlx00UV4/OMff9BlhBDwhCc8ARdffDE+85nP4N3vfvd+/3bv3r244IIL8JrXvAZXXHFFev9tb3sbAOBf//Vf9xvcDCHgLW95C84991x84xvfSO9/+ctfxplnnomzzjprx6CO2j/+4z/iE5/4BJ761KeupLw3vvGNeNaznoUXv/jFeOYzn3nQ5YQQ8O53vxtXXHHFjlhw5513HgDgt37rt/CIRzwCs9mBKQm95z3vwTOe8Qz827/9W/H+Ix7xCHz6058+aEbd5z//eTzucY/DRz7yEQB8vu985zsPqixr97///fHMZz4T9773vXdc1llnnYWvf/3r6LpuZRKpr3/963H88cfjMY95zI7LuuSSS/Dud78b559//krKet3rXodf+ZVfwU/+5E/uqKyf+ImfwEte8hLs3bv3gGVpx+z7v//7cZ/73OegAOsx+8///E+cd955ePrTn75jRigAvPOd78Rd7nIXnHjiiTsuq21bPPvZz8ZnP/tZuIORfluhHd2gXwxgzZOc9wGkkmwNKEaEpgWpk1jzq8zWkKQ8hY0BcRxoVHkIupEdLr63A877kQ2BSk5ueT5moUbyt27kKQ43y9vJdw4kdsxmugD7TIRtkltyIu8pzgeV6LRSngrgFeBfBfhpDQeRoEusvh90A9pKzh5nnXsG7MsA0hagX9XOVsrQ/m0tCMDVmzx4QZxfDpyvjIjY2WDPQ51t2u4WZHUNXDvLjsO+Z8Av5Hw2JPI3ZWV6fi/0gPc5Uhhg+U4B/6KerH3PgUEyAUAgeQFVwtSRXL8EQhnJT0T0AOdOIf6SFVDSvEBRvuOS98dcR5fbXMEvBmk595JDBvxSBLWAg9bhWsgZjZllf9nX8tsYQ9Gs2lfqvmkv51b7p+QXAkSiEtIeVbUM2KcA8xgz1NbDSw46pL5kgDOVUrIgn0Ny2uZjmMCBMYefttkYgGY+j+kjkTcbc0EOWDgKjg6BzbpNg568Y5lPzWdDznHOPqkjj1XLx8Mo36PQc16cDoiege7GNYjE1ym1I+XfFm1g2M4FmzExPMyY6duBc7JmcvdHN6l+shuZhY3rEOd7EFtmXiXmhpNxmJo0jquCgZWU7PqIjY7zmHZBcqQ5Xj+QFyAsRri1XQjdAlgTp686p2fraZ3GAAPnwKtl/YBhwIOCVsruo24OK4c+OgZWZtdWhaRxjCPQmZhMLBqsQ54Dd8h7hGqNGiKPeVY1wQJ+9jxtsBSApBzBLD9l8JlrF0qAEMjS3FZOvW7HxMyDmTOQx8gYkaQ99VnnM5bVlDle5jpUvx9tb+dSIFJ0DZywqqOVRjUSqlFjJ+R9AkBtSPN9jAHkW6B1KbfaYK6JZc7Dvmd5cn0vgUvIjEkfifcV5KB5CM3UzHVbFlCnTE/uHMM2sDbC8NtORlwBttxHS8CvAPtM36ex+8Dulcx8qHuJJLdpAL8ulEC1gkcRQGPXYNtZDTI6V+xhlu0fivWTyvbHiEAYAH/7BfhB18O8tmUckde+KvPJ0XBlx9pqL1/f0+WHIcvS18Gbtk08X5MIcC5j18i1rvcJId0zusfg/YW8r2O5Z3CY2bhR1ubji93erOlDJAbOHWHhIhZOFV2kvat1d9rnCCDJ5cg4r3Kwci/b+yXlcgTSeDmx+yabbLldfvnlK5NM+5Ef+RFcdtllK5Fze8pTnnLQv73iiivwkIc8BP/+7/+OT33qU3jCE56Ac889d0f16fsel156Kc4880w8+tGPXokTW+1+97sf7nvf+66krKZp8OAHP3hHZTzvec/DRRddBAD43Oc+l+QNtzMFHD/0oQ9hNpthNpthPp/jtre9Ld7xjnfgpJNOOiBJTucc/tt/+2/4+Z//eXz0ox/FpZdeir/4i7/AlVdeic985jPYs2fPQZ9jbccee+zKGE7z+Rwf/vCHce211+Lcc8/FGWeccdAszqZpcOtb3xq3vvWtd1yv8847D7e//e3xuMc97oB/e+aZZ+IBD3gA/v7v/x5/+Zd/ib/8y7/EP//zP+OTn/zkQYHfMUa8+c1vxi/90i/h2muvTe8/8pGPPOCyxoyIViqX+4xnPGOlLL9PfvKTePrTn77jcmKMuMMd7pDYoDu1j33sY/jQhz6Epz3tafjbv/1bfO/3fu+Oynvyk5+80lylB9N3l9nNb35z/Omf/ile9rKXraS8+973vrjpTW+6krIA4J73vCfuec97rqy8g7WjGvTj/GYs/5TADn3W6FXL0JDNamzanLtP3g9re9Lni8AbWWblqQMAybkFZOmksY1cDfDZv9VBtcyISmcWgx6hZLfUDnsOPmWQJjm0ffkdoHRY28hdAaI0b0Yh4xly7roiV18F/I2BfelvuTK6YWMQsQJFx6J2K2afBfsKSUr7rEBgDXbYtis6ythFyA4aLw4YjfTtwZtjOHF6mfx3qd7GFcSOMgZXEdjZGQFmpRKBVP5PNsUEcB4/jUCpZQGDQD+LuTgW2VlAJv8LxYAYfdpEJ+eUMgHVoWJYf7xXp+qaQvKjEJz0A6ry/fWCSiW/HuLAcaDOYQAJ6LPMzMZl0KoRwKoRwE+lqZJ8be28MmAQKUCj5237uV5a+dw7gueBYsCzU3BNr2PtaFCw14tjyKN26Iycv4CptaNa2yQxOqzzVqLWuZZIOVP0CM783oJ9es8k9tp2OazM+BBN30f1OsbA3ibTjgCGvxEHsItRHK9RwDYMjMeZyCxT4vxLPbR/zLjtfM7lR53kL7UO/IqhY8Fw0ALUZScskeO8TXK+o0YO0TvAt6mORfvIvdbJ+NZJNHzsgdCFQf5CR8yEOtQ2Fnyy6vInu2EaS7HtRRAWHoWO5xsZj71vJNAmpnXBIkRmr8k6yXV8f2x2kQNmHLDZR/jZbs7xSw7UbcDtORahadOxyXlgbRdiuzspMECkA7tOGWy84ls6b+hY0Xcs+xyzVOdybcuh8zzlTVYHtqpG1GXYeVqDy4Tpl8A+BS+bWZL0TNLvAYWCAlABfzGvm3Iev5Dz9wXD8jNlAJVqgg0skzWSQyVX6AiNmcK0pEAEL0CYsv2crInGHI32nUGAmqxLbKASAaIyQSBhBDHDTyTOuwWALFlITcuMv6ZlqdgYEPsFgqwDZms3wTwwA0mX4FYicGFUOxZ9BBCw6B08BTTOcb5K0msCeM2vBuTcfmPzqAH8CobfsiA8YxbsqwPnxgKRchAVch+3jFbt8zXIZxh3VC9wpN2BDPIRYPIU8hw4131SiKnf6rqkcQyQkQTxFIFFW1m9FjEgKDBcQ1hwGuD+G6WBKAI98t5lcIojp62EWN1z6LoqQEG+iNY7uBCTEoPea1ux/NRSNcyYkuX6IX1HvjKyfk1MU3J8rWsw2eQHj908BSrFDoDzfN84D5r1HCgYOnhqC0l2mz89xJhy1nsiBLlPHJHIfQYsggZ+aFCBnoD0MZUY7eaIC76vyQcB7oMAfm2+1oZN2sOx3LHUQ5svBSQAhyWcalpbTXak2RlnnFEwqHZqbdtu/6Xr2f7+7/8eD3vYw5Kcp3MOJ5xwwgHnQqvNe78j9tx2tspcVbe61a0O+revetWrcOmll+KXfumX8IAHPAD3u9/99tuR3TQNnvzkJ+PJT34yAJao+9znPodPfvKTuOUtb4mb3/zmB1Wnpmlwj3vcA/e4xz3wwhe+EN/61rcwn89XCvqdfPLJKwHWAGA2m+FNb3oTzj33XLzxjW/EOeecgz//8z/H+vr6Ssrfie0ENFlbW8P97nc/3O9+98OLXvQiXHPNNQVgdyD2mc98Bnv37sWrX/1qrK+vY21tDevr6zj22GOP2Nxtq2L5AcALXvCCHQNqAI8bp5xyygpqxPbRj34UJ510Et7//vevpH4ADjtTbZk55/DKV75yZczBVQJ+R5Id3aCfbiKdYasBRaRhsSlXYKRZy8wMsZ4ayZPCckt9YFVOzrEShem2fQJv73J+MgCYNQ7eEdYbjzXJdZYkDcXxb0Gt/DB5KZZEpquznfjA40005ryXfFnJKTEC9NWsvl4ObYHOrZh9NeBX72G8Y0m/ZZvkMbAvyc8IwKfyiCoFSRrdDwAJKJKoVtN+1skRU5h7bqdo8mWkKFM5vzFTEEhtGFkOli0U9hBjMT2UleraGaJKWamjcXCQocxncpZ5XzL9XMORwHJeCRDX1xQANIrPMAvCAJwEZjZ6Pjl2VEhkM8k3GDyMcCKZ6JwCvSq5VraFAn0KgCkzk8C5l9S5YiU/C4ZV5VyLrgFRMA6eOHDA2RyVOb9ajhwumpey03VMbrO+pjULb/SctWsBWUZ25D0gO4P0ftNjYKSuy8ou5G6l3Qr536VOvp7bKgggXEfZj7AURlnExjlX1pWdQVuRnaOcOwT8Q4zoSfsN901Wu2r43u6NM7MCAMdYgKPnMcbKsOdugPTkpI9A30X0ISRn/ZiTns879/XNZUlcJ5vsCLQkq9gtEBfzHNRBDggBrlo5LnqW91zIQqH1LslQbnaBpUADO4p737DjWdm0KB230beIzTpiu5bHbSO5rmO1jneaC9YTcoBIN095j6nbKAA8OcHBeDC4Q5cxAp0HnJmjnZkxRLZOgT9StorzJYghDuwIpLzHVjUhVUHXWhB5u8Twizm/lVmTWlm4EMfXV97MFUTIuU71QeAxVs5d5wQv42QAuP6CkChQMmi+VA8YltwI0GWD9uQ1A0uuXPcU+Y0lZ1kIiE0LCj1v4mMAdWu81okhAdOqLDBmfYxwEbKWycApXxPKQSmgVL8YA0tc1/3JziVV7j7bRFYK0/6t7VYHz/F38vXU9VT+UblWSn1dpT6rgJ9CMWEZGEcOBDdg9UdyJh9llqftY5TAJl0jRiN76USGfj+Av9qpYPqMqjJYWdNaKaHu8bqmSmMYtmDdoVyvxMhrewfwWjikL2G7U9G6cH25LwUBx5DaY8TqscneMxJ8RQ6pDBCNl5MqIhW1ksRRc2wGeC8sxrTX4jX5IgiI6bZuLz3XGM2YVqynfApuJK97dpcenCt5lsE+3SslSddYAt7Ia16SwNDJJruxW9u2K2VOHG770pe+hDe/+c0455xzcMopp+CUU07BHe5wh5UwD28MFkLAYx/7WJxzzjkrKa9tW5x66qk49dRTV1Ke2i1ucYuVlgfwvHDcccettMyTTz4Zv/mbv4n5fI6NjY0jAvRbpR1zzDEHDZjo/XljtVUBaqu0ruvwne98B3/913+Nk0466XBX55DYz/7szx7uKhzxdnSDftDNhQfgkzM7unGWmzpdFODj/CS8YchR10ign27+AzJ4pTYWOaqbD3WeAAz6tc6h9YTWO3FOkUiq8HsqkaSRpRbsWirHV7TBiDejBvrMJiwaZ0Qkds6NyXcq0KeR/EAJ9PFnGQwrHRTLAT/bXjaiUdtMt3DaBoShlKdXZxWZ3D0EoJ8XuXiWSRtp+5BtIwFDVUJn1EE12HzyebtKMtBuQ1NZLoOr8Ay0RXJAL3mN1KnV5815tABgxS5IwF/oQe2MnTOS0yyKky1CnUAdg18xsCMndECU7/g8DDDAYuotkfweBDhh+gVmblGUz8Q5QjbI10T31Ew3bR+b2y8Burbf2+h0a8ZJS17lJoeWI9Nz5DCkv1uzTh7bh3Ob2O/aD4Yv6xyS9fvao9Rppab3kbaZrQPnT8xvqOMvjxsG7ANKZ199H+jnlVEaLyKDwtI34Fx5L2wx1iRnZVGu1BlUnMPo9UI5bmjEvSMkANA7x4wLAf+o74R5IfdRKmzIZuD7QtFSOQ+RN6bkaAIiiXSZRJkrm6EPEXMzN/SB66rjpHUsMvtSr0vExmEA/aZo9MkO2lROUcEVNTtvAinnmAJ8yggBMgt8ow9oe8n7JPdQk9YiLd+jPqR5WXO0RZVeb2ZJcl1Nx05lqDQCVBVSnsJKpG5RjonGLBBHwYwP6QtbePadB7lq7lFZbiNPWYB9MgeltaU+MGT2AXmcVMAvBxjkL+vQYll+Y+vToppEsq6iNMYmpQRljAEMqsn6wDsGexzyGpWI1z6az7a21Ho6lwQCwcwl0r4K/CmDm5oZ98FuwX1A1C5ikuRZAJ4VEkhYoqG5Dm7XHlC3QPSdBDO1CeAEyuAcPr1YMJty0JtcEzBIo0FsCbyStVVxLijBDhtwZwFQ20z1tJAD6kqobdn11P3CGHMsyaIDKfBtsA7YL/ZdFRGtoJ/MhYnpF2IRvIjAkr5O2wTge6M3xx9bT5jjjFktx27BPPuZo9y+zA4ellWz8vW1rle0f3N+Px3basS2LLMGoRQ8VhZcjHEIbG5j5ZrKZcWOMSB9LGhQywl9GSQVAsijUEhpnYOjkPaqfKrj97cyZwdm66bgnvOIGixh5Xud48BF/a4ZJ+tjWsDPYbjXOlQ2ra0mOxLtSGVhHIyddNJJeP3rX3+4q3HUmrIiJ1utqdTpZJMdyXbVVVfhkksuudEAfkAVCDnZqB3doJ869H1mZOj7PfjveS/Sa3YzH/sC1AJ4oxcjkkRSduRyqV1gp1YIOeLWE6HxhDZQlowDsN54lkkRNpqCfWsNM/12tx7r3vHfjtlpaw2DLTMvUeuOzCZ+CeBXR6rXrDUrkSP5N9Sx3seIGIA+BgH9SrlOoGSwWGBPzQJ86T2zPV22b7G3ZWM2yCVYYoCMAdhH0lbMmkwOv74DunmWNaqjnJcBSML6JMwE9HCjzEn9NUGcR24I0Oh5FE405Ch2+MBfmO3miN3QIfZrZXS2kSIjceIoEBiNfA9gnAFJ7tMnh0AEmE0h55nqkaLuGeBJV8D0F5VkioTE5iPpGyqh1AfAmz41dpVzNDb/rVKUjjKQayO2+XohXztz3RTgp5ivTX3YaB1Mmq/SOOHUQa39WZ2X6uRx4kWrI/HLs6qAN+OA5ufheRVlxADWgqwiup0r2rI8t+FkluuCzG7V8aIG+yxzYxn4V0eXJ9DUD8cT+72RejlpWErnIvJQ8p0xZqOOPb35TM8PXBx8MGOvmzEA2IuMn7RB1LEgBlC/qByioag7M0YdYrsmLNxZukfmfUyPzS5gEVimk18HbHShCIgAmHUMAOuNk6AOZnnvW+yHc3WyyY4Uk/yxia0G8P3SrCF6XksERESItKQAfnsXfQpy0jGSP5d7yUfMQkzOXWiOWdfwfUwOcW0PA37NGmIzS2uX3ozZAK8DWpGipG4jS3kK+JeAv36e51cgyWer3K9ViiC4tI4aMKJJnNMq36mgEwTw13ZyHtTOGIxqREbeN2k81cCqPmguLHlGzt1nwT5AA6/4tbL8Qsjg3v76iDXgjAPTMsuvkXUoWeA0DdKdXPsZS4E7l9h+ifBkQJUADg7qA0CIYGXBJq9BCua5T+seXrcsUm6/WDH90hpIQOgIcBu3MwEnPdel3QXqW1A3RzuboXOZxaRr8hAIC5SAH7dPfh11PpLz7IWh5V2D2EDkRLvi94N5ksr8fCWwO37RanbfMsvgmrDe+wx2J8CvBvsKxr+Zk0b2GPnI+f63TFWV9LVBMI5iTlEg+B6T0kTms15z1MeNQaLoXAYFq/zTQL5ONjBSA5fS+tSsJxTETb8xbayvx4A/QOTGIWBeYOBOVv68Ht6CaRYFGqaY6+T1WQONYlDV9KL99XXdb7zZ71F6LaxBBdJqG3sv8LrQ6Z7K8Xp+rXFYSJ5A3Q9bUxDdUVZZSUVKezsNonQN52fV0wnyuml5jJytg3bdBMHPEGe7UsBHGpPNoS3YpwClJ8DvD3t0sskmm2yyySab7EZgxx9/PI4//vjDXY3JjjC7AYB+2TltncfKzpj347nn9Htqup+zDI76O9Y8EZwjtJEQvCtzqXtlMS0D/Zj9t9YQZp43W21ywliJJQNYyflGiCOfXBmVbqMqgcxyEieTOi8CskydlZOybWOlhYByc7wM4LO2FOzbAoTPcoVZdoqZPSULTB19pBs+Bfw649izUc4pn1ksI5xTm6n0T5cl/QQciFUksifJcEcc+Z2lmcoTrjenfF76oQMgOfRCn5xDyYGkIJ84QCh0OY+HlDPG/kufmYbWc6CYGX+10yWx/sgBcrr2bJT5p2eh598HztFEEgVPI33Bst7UMaB55ywolpwGxilV5J9LleE2GbC1TBT5qJwtzP1f9W2up/iZDBvN+nGo+G4G+/R7CmIWfVbPS0FM0+6pX+ppK9MyhiRlCQwBzdpSvSxQDAxz+mjZtv+P5faTNiMdQ6T/U5TxNfrM/qvBPnE61XJlqR2JgWMPaX9tP2lze656DZLjW539kQvUO1YdaY0C2yJTm6ok9zyl8cBGwLOkKQHCthVZKTmXiBzN3Qk7qQvAtfMOiz5io2cpQ50z1DQ6PsSI1jmW8gIw76t2PgTWh+s3YvwwnNJkh8oMCwNO5idda/mZ9K28ZgoGyPLEz86TgCWxWGdEoJSlk2AHChmIr4GykuUnASUwwQ4G8EiAXz9nEKtfIPZ9MW/G0PP5Ncp+l/FGmfBEMj4Ypzo5DpJxHtGFwsGu5wGA8/EqYLVNkITlCW4F+KXvhOF6QyU9wxb3ujIinTPsPiDl89O2TMFGRgqVyCE6s54gl78v5WvQXGrfqAEdlFiMXq8tVbLrlNdmeUwuF4sq6ZmkPfV9xdycQ1zMgaaFCx2i9AetI683YtkOIR9D15dSVHFN9JnzUFKxrqB6HlySi84CNxbsq4fQMbBPX9m2rteVZNZOdaBgDfgtBftqGwvqkcaxwGQJWvMdk9Za1TkulbTUuujxQshrUV0Lk9tSIrxY1wNpTNK+qEFZNrBxzGomoZX5BJDkbC3gvT+zbLq/ozkvx+vuSNV1cCNtr1aDp86ML/a9GuizARy2OP1YxFx5j+WwoIjWESv7u9x2jli+Od1LlWpFOr5vgL7JrN21kO/dpgXN1uHWdiH4mcg4m3GfSpZfVozIgF8CvEMFvB8Cm9ZWk0022WSTTTbZZJMdLXZUg37RS64XI8MG8AZv3jOwNe8jYoxYBN2UZiaZNeusrgE/BczydwmtB5zzCN5hvQnoI3CTkDdUzmWplNaTgHwugVatAxpPmDlmg8wM2Deaj0uNcqL3FJGroEGVg0oZfMriW/RZttNKQeUNehxsiPdnW2NbchmwN2RM0eD9GuhzoMQMs3nfWgVV4jB/j41wrllOY7m+OP+dSw7/GmADIPnE1JGQK5zrPn7SJSgjL4VFSCqbKNc3GkdbNCAmesfAHwyoNybfE0L6DkWPQtYTYMafsqGodExZ1t9YXjcForxziCCOUiaqgOFhG9TykwoKUWqTEYBvO7kpBWZs3jVpV3UyZWdp6WAbLU4AGk9IkqV8PuPfPyCwrxfGWXVeRW6pJU610c/078KhJ6+DbU/j5NO+b4HG+jtVmXocck3Ki5gcgH6Grdh/Kdp+pP7aRUIkw37IIHLtRLPBCOoq7IWNqZKn3hFIJT9jQOzmDFJinttHAL+wuS+3lbSzW4vivG/SfaiBEl0A5iFio4vYtwjYu+hx9UaHRWDm30bXD5yD6jBrPKF1AeuNY5bTxPSb7Cgyalp+zNZBa7syKNPMsNmzXG0fmcGnzNexvL/KpFoIGzDEakyQ+46RAinANVneMwUtaTBGHm8bR/CQ+V8ZfosNUL8A9XMG+xZzhG4By5KnBGgyKITWjKtjjnYzV6axz2VZSYQ+sQcBsEO7mTGgSAog5iCtaB4ABtKeB2L7qxrsyKxJvQ0wy0w/9HNWSugXsFKoUQI/eB0R4FuX2H5O1tVRpL4BGbMpoo8EpwM6NA9zzomXgp1g1gTa3sssBGjuYz6xLBcYuwVLucp4H2PIyhAO8IHX2m0gZu01LgGlykwdy4HYR8BFaWvZQDQ6X4yx10xfCcVcVgYblcF0Q5BvK7PrqJTHUqVRa+affG9Llr+tO1Dm8zXrxaiPaK41FGCDyEBSFWRVgnc1aJXSMhCV9dLbUdtZ16pVXTNDsEuvAzWDdAXaxJTWIXlMse2qzzYQrJb5ZBYjA8mmi+eq0XCfqeAjS6OC2Y8KgttrYhQq8jppybWqAwpMLkwrPUyiAlIwAc1ak9evgI+ExkWsNfmzRQiFZLOC5kmtxrNUsALRIfK4Hv0MaAOwfhNQO4PbtSfL87bM4E4MP9cgtrv43CV/K1+zmOpXyNib9TVZ+enJJptssskmm2yyySabrLCjGvRLTDYBsVJuiRgL2RkrnWTNRnRaZkqdV6uPmmuLwT6ApT71sxDK6EndS3lill+SenTq+CA0jh0NjURet24bsK8+b8u2MbJRutHuQjQbTAH9pH1sO6mjTr+v5wSURKsxqfic78EkU6821pbBV/xtIjf5b0qKmj5t7jLo56WdlrL7LOAXDMtJHR32YdqRVLoKAHUAZKPM16IHHH+u7KfUDFuBVIZ5plY4Uezf4tQomH4Ay0eFDhCHBcXALAPvgeAzYLi/pgyGwA7WBCDaaiurQV1TfSiZXRKl3bhGABwadUikyOGtAD5zLZKM2AjwxG2lnUUAvionpebh0Xu9lKWV4iqwjl9T8cx1VwdUrsqY1GcN+JGyJxTwq/pdcY76sE6bmNtcz59Q3YAWHE2NUwF5QOr33LYGFLTfq6/JWD9WJ1q/kDGGAMkNSRpg4HwBEnPbjQOXWX7LZRBbJUBdBmsd0UBSSt1XLnKrENipjBDRJ1lVVwLW6lguCjLR5vWgtgXgbOujzCX7tzdMxj5GNAmkBJzMR4faprwzkx2s0WxNpNf2gGbrRX7Uvosmn1fARh9SX9B52tqYcz07qZs8XukYYp4LUB1AMOMt5/DtirlfAT/MNxC6OeeEsyARgBgCaIuVr5WIjjKea/5dcg3Q1Mx7lxk0zjHgp+dhH3reI7cNMyLL9zTnsQam5O/m8dFroAQY1At92dZ1PjsFtthJnxmAnsAOdJMLUdtMA5RSDmLL9iMkCWfL9lOgp2cEkOdVuYBcD82Lh3J8BtIcxWucktk3apITbGAyrzGDieApsupGH9F7AAiJcd46l1iQmmPYWhAgm2S+6iHnwA00BCzl3PSqjSlnLAP6xoZVu2529j3JdZjuoXq+N+1Q2FidgVIpwQb9GOUQ2ya2fnwts/SjtRCR74kllsA/u+7pZQ634J+tv1k/2VzcmmuuBlYdUQLf6vdrs8BflFoRBHyKnNc6RAP8Vb9Nr5ecbwTy+Odc+qYNyFs6xS5pxxTQYKSHyxyjLksOF8FaGrCm0ulA8IS16NBGwsJFM+bwySngl9NZyHnFKOCdBEX0Hfel0AHyFiRfa2xmQLPOr9t11GkotG5F0KAGcvU67m8zPlwPNq2tJptssskmm2yyySY7WuzoBv10gyDO/q7IL6FRnuOAH1BuoPsYC0eIBQQp2o06l+dGNl02stM6WhJgJWALS3pWYF/flZJHY5t0fa4lhGRzqHlvxoA9C4rqM0ughgSYZNBvpK17m0dDIjop+81DHxP4V2+f1RFQt3nazBFvrDU6Vp0u2nYOHNWfNnx9teFLklSG4WcBv8R2yjl9AEhenyriPEXbqjRmlvwbsKKWycpUzr4EjNRArZpxVlBfsfpcSJHxXEd1xHkG8NQswNj3SbYs1df21xByzsJYOYfIsROPFPwKQDTOLQFTLIAzMAWctC+H4TUYBWH1GNWzzbOUncKNANkQJm/JYgVK55k6DaK5DwEMnHvq0LCOOV93aAwBv8LJlVh2BvAbA55rwM++rq9ZrfczEik/eoyq7KWAXw1OSlux06gxUfQRoA5omIWTWLLkEANlkM3WvQa/KbNHOf+LOImJnWt9lGsVUcgxA8wgkZuF2RdAll1zDdetdrQvs21Ac5UGU+a3zStmLQc/1L8H592i8d9NNtmRatTMUs4lt7YLwTWIst5aiNQtBzwBXZ/XVyr3pqZMqjSXp4G3AsRi2BIUIGGW62sNrlCginPRZYZf6OaI840E8ts5H6iAurH6AGZxA54PKYNfaBpmk8mQU0jpuVx2zZjatt1l4tfRQoE/UzT6fgj2bUUVVDBLc6Em5ptZZyWmVK2UAGT1A9dkBp0DemLAg3OL5Uqrmobm9oM8R2FI6ezr7byCcfWCbdvLtPuYdCEDlA3P+cLMajkhszlMTCxI50pwmvcWhCigls4HUYI83Nh8hxK0KCTxDeC0DOirp4ox9QySx2CuXzKXj1oN9ul7aV9BJetM2P3BnAPALcl57/J6fksTSctlQTZbgn8IKQBqAACqJCg5IykcU3sBZb331xTYVpnPSDmPr4L19hasUxXUFmIeUiJB5HObvObT9pbv276UwVVbwXrc8uUDPO6RMpAps4+tObmPeawGZnKPsLqCrqkzQKqAn0vrYDk/qXPKJ96uQ5mnfKAs4Rx9y2tJM79YhQ4rY2vHqWL/F5fswyabbLLJJptssskmm2yyoxv06yPLrym4pXKVc3FCRTDgVUutZKAJgDhPGt3IuBzpa+Ub9aVuxBIhh+wmz4ABAjJw/jnDUNNNSwxAZ3JvbQOAREIJ+BkpTwU+1CnNEfgZ4NP3OgFGNroeQaL0FyrTFWJy0FkHdXJoy0lqtHjOi6JR0vJ94UqlqEwMGXwW5NPy7Gt1EKaNXtrkBbPRs0CSyd1XSxwZB0jsq3wwIUtDkfd50x06oDNSQhXgl3Kn1aCfAWHThtY5oOEI1hj9YEPPTS4Ams9ASnIuJjCyyefS8rmMshXkXKKpT+GQ0nNUR56em3nfni9Zhxw58e6Me3WWgU8WzC5Av0EBGulMSVJSnQOcbSR/T6OBlaGqTug+DIEiEmeN3pecXwgFaAeUjhsQ31OeaBDFnZra/FZ/X7SMc0DUPJz5/IrccUTLHXSxT+2x1IE3BhjqKYwBfuZ7BRNRHON6T9j8Vxo9Tg07aYkcR5GrI1Acg2TOy/w4dZ8EIuo94BuRt+IwcYqQ0PrI7BCKzOyjfD1j1OspDDuRm0WI8N5xv4lB+gyAvsn3UDNjACBYpzzl+mhdxLITjCU71z2waD0Wgd/rRR7OOuSyg12Z3tnZfqhtikaf7GCNZuuIfQ/XzBDaXQhrexDbXUk5wAYVNZ4AUVhzlOf7EFlWXd/XtVUyx4ECaW1V3X/8u/w6gBk2SeKtmyegShl/1C8QNvcx4NcthnM+ULDCOPDHyhdSAh/VKU4k+Qx7Sgw0Ch0wA6jrAOcxkA5VNo1rBmBmCiYDUh6tYNaRQSsq5h3Bg+Bkrkv5UB3gBFTj32resTyfOZWT96w6sdYY5QnS9S6SRHoC/PosmaeMPDiZw3tme3vivF5wmiM6S3wqvteL9CcJUJYYf8g59FDNQxS6wXXjE/KDNQ81LaidAU2bH1U/8joWO6AXwK/1fDohQLSouT1b53KAme17Udb8Tto3SG5nvY7Vd/USWlnPMcDPphDYzhxRkhFP62LTbjnIbWQ9oWurkffKfQalfYZdf0Xf5jVZmu94fiQieMRiPaVBjrZddE0bFbyz62pguH5RYDZqP+A8vIyYuSKQTtshkqie9JqbPe/L9hfws3u9Gk8nWeN5+YDBu7w3UrN7TBvcmE5V9qfMFCxlS1O/AQoADOAxcCwITccr187ki72AqyEHITQt0IjsprI3fb6hUtClIzTSDiTXe12+xrm88/iS1jqOkgQ7zwsAtesJ1I9A3lsAzPBzTVbqiEDfD9ftXo6TFDSWKLwcapvWVpNNNtlkk0022WSTHS121IN+FvAb29PpRmuMbaabFxsfbGU4E7nIbPR04x59+Rtbrk00zpvwBSgI8GFBPt2sFKwdE8E64gSz8ju1UyEo6BENy88w/RQgWfQZ5NvoA0KIBWhqbaH5WIKwH4l4L0kRLXhX70TCCS7CmU1hkvpBufFN4B+yA0PBPlL2IzAE+wyDL4Fvy9hUtg8k2tYQ0OB68nvUzgBskW9Ny+74usVuXpbjPJfhfCojRp+cZSAqrq+VxiRUThFy4Dx7Ii0anETad+z4aBpgZK/LTkwDZGzDLij62xgQZ9tCwaJ8wgUYqs8JTLJAnwUAx45lHK/JyShR9PYcUv8XR4Hma1H5rYA4IMUZcgH/TeZ987eCzMkhp87TbZxFA7BvzCygap1rNeA30vfGAcHtWRBb2ha/H8jhQc6vA88YBkxO/daVoGx6tmNbep/zR+pvQY7BPyBF1SeAViuwhQUBZ9MxTd/XKHqKgdk50TNVBnKvSIR5yrdlPHM6VnmKnPOqcVgEB99zNPyij+hdWTmtRyueOZWN68dC/ieb7Ei1pmFwTPLSQRzFMcZ0O+pzISFpmFJ9iHDGQ+1ddpqrpXkbyPeuWgyFBCSPB5RZTn1nwA4uh8EiBj4ScGRyeKbjqtSdOe5g3jTrLA1WiAhbjimAARJrSUQT7FPURaIYBBrL415qB20//sBJUESgiEAKOAj4Z1AKK1dpg6mso56SM71iiA3WUBKAZNZYzrnETMznkv9W4C9QRATJXM1r6MGEWbPO7RpN8pNBgpks8MfrLcfgn/cc2FGVC113OsD1OR9x65zkHoy5jczaVS2Ag0v0nAgxSTyCcn7a+pLVgE11Ofcb8LPrixRklNqpul7LggfrdUX1HO38aQG/aj6tq6qAjALydp2f6zx2Ui5FTi4Naho5jwELMCCrVpjvJUUM5PEoFWu/OlI1+5l+dyvQMIH1JvgnBZXav2WfUwOhyoxVqwE/bYXBSr4eL4tKebM28znPaHVNtQ72nHXdo8FXkUxgk8+AnwZ0OvmNbeZegrDIz/IeJwbuU+AADwXSa1l+QO9RBkZh+7w82/0e1f1ksskmm2yyySabbLLJJkt2lIN+mrtPpDxle6GsMY4wZrMbMXV21OCTjYBOOWNg2GYK0lEGIlKOMqB0mvQajWikkpYBHkC5CReHVEx/N2AdmJJBo5KeAUhtoOAdR+KXDL/NLqCPEZtdSCy/RYhY9CEBfgwcms2XK5kqjSf4wNHjQJBIcyf/8q7PAqyary/l5DNAX5PaXdtyAdqsIjnHAD7T3rZNx/L3qbRXAvtsXi8AUWgKcb4B1BJRFigMAXExT47Egg3VtLzRbmfsgJqtg9Z2gVyDQA7wDPjEEAAqr3+Sw4rgqF/XsLwokCVyXAZJSM+3NWCd3cDXjsvK8ZhPPIgjpeqPW22i7WdL8sTVLEt0nTCshAdQy3ZJJDK5ysms557ugZy3JUKcBVDHgcnfaZwHNOpx0ij13Peg7aqnCZFQYyR2S9xpcATrLPaN9JUOIOlbKo2pY0e/GLb5ChwZUUA5vc7MOIy5D4wcIwF+1ukaekQXAMc5YpgpM+frE0UCNzgkac2xBKDJuZiBXWifIQc0nCvSetASM0T+T3n9MJJzCdwHGtcg+gCElu+XpgV6Hjupn7PTWnLNJMe8awDrEAMSAOwJmHkHIGARCI1ri+CJLCUrdTMVyyxpIDSHHvQL13M0ergey57s8Jpb3wNa35PnktkuhGZd8vllCXVAczs5+D6gNfd+BpiEaebyA1HGmW4js3Sa9Qy+hQA4mdPJFQxtj5ABv5R/Tp/78lEZeWXisRM8Op/HAN/k13WwlZkrI5AZfwqEmXVDmnddk8uvylMwgiiz9OCYFaef5/J0jAHI8XoraCJUBKCXIIOe31Ig0LKN1xvHAQgCfuX8yASEBQrGZOVI5zm7A3pWKkjrMtfwelrGaQX8LDsqAX/mfV6by0rGsNT02IjV2sp5llEF8nVV2UIFM5oWtC75JyVfmLa5zvGRKI3lykgNkQbsosQsQl4/WGZ5ZIyWGZchA13W7NAYsD2wt8wUPFPQNq1XgpGur4PgBoWU4JDNWZny8FqWvt7zKU9lw3mUBajJudYAiiy9H2UfMlDwGKtL/XqrYLOx9yhL0GvwEJxL7DVeu2XQaqeWJTwZlI+iPsAMRyR2a8H0A8z6ktJ1LNijfAp8avqeAfpSrKJIanJcJbMDfXE9OZpClRdI2H4xOBDaFIwYfcv3hj7L2FRIh+q+17HKgqflfbcO5NTzD3JvJ5a3y0C8nl/f5ZzzlgHL9eBrx2uw8vpRFTTL++tDn9NvWltNdiTaV7/6VdzmNrc53NWYbLLDbiFwYNpkk0022WRsRzXoF5LElPiPZZ2cWR8ZAOANKW9UGpfBJ5U3KnLGVSATgCGwodGzBlwi47ggceRbAKSOZrWskiTXSQ4UWQqSgJzrAb7Y9CrwAci5I59/2iyCIyUjgEUfMqhnHNYK+C1CQB+GbD/ee8TEXAEcUyOlKq1HylsVAhXhqMry8wIIWsCvdRxV3ZC0bTcv2y7kHIeWKbZlZOcIqKrOOVSAXwEAWgsbZZGGLRDF4RQ7kb4yzqfiNQA4D9eqXGfP8mC2r5hjjJGAEttPnTEKAiJf97G2GM0hpI7UdMD9WAiNgXvm/aW54YSRGRfzDJR281zGiGQX65SJhGQQuS7qSonT6jytU1GZrfZ9oHRKqCPKOjwV8E8tY/uNtFvB/NtiHz7K9tMxQmQ+9T11AnOupG40ndGOrY7uF/9w5Jh8AAYUBNh55j0Q/HbEumwK2KU+7YDYD65VqgPArFXKDFQe45D7ERwckPJEOWG/8AhIow4oHftChMRGZOdfYgv1Xa6rBclFgrdw9pMT5zWh9XoEB+8U8OB8TrHNEfDLHWP8HNqRfFOTTXakWtPC7dqDoACKWAK3wWsG/VslcHWd0AdmUjUiK5nyxzkBUuqAnTGwwo7Hdiw2OX2X/hbgOUVl7lTC20luq0qCswDnLPNPjy/jd4wB5ANiCCDMOJijBsns7131uq6izE0OSAy4gBJAsEBgJA2CIORVbX52kZLD2KW1lkvAqyfKazKdH4UtNsrCN2Mlt0PIgTsythJiAYj0ylaM42sbvp5UrhvqdUTRSMIobwBCm9ZaytbU3JM0WwfaWQJwbb0Z5IvwEfAxRfOYNjV124a3r2w/B0p7jGCURA7UX08jwIp2PWWHpeA5XbPI/mRZaoDEnrP9WNtDJB2XBYVZxusgnUACiPh664WOBFAcBlGqJGnRD0jZu9V1tvNzcTLLA6Cs7Lvev1m+ncFo3R/uxOw1YkwqM+C0l+ldyN8p156W5afgnx0NLCtUAT9VseDPc39bXkkFa8FraAXDmiYFOCWZ1oqFbAPlNAiB1985EMG2BVAGXxXjldRX8x4qIAxk2cpapcceO8SY+tJkk022//aKV7wCz3ve83DrW9/6cFdlsskOm1199dX4y7/8SzzqUY863FWZbLLJJjti7KgG/SzopZsHjWRWd7ZuoDQRuzr/W0+YOQEAEUDz61BEHY9sPjOwUYF41mmhEdNdh9jNE0A0yJEFZMADADUzdkr5NoM75DifDAS8GgOHYk5WbyUOowAhRcSxkfZUluSiYq2oRGj6TZKvc7JxjQxSeA6kdZHQjlwbQnZUOJGBsYCfAq/o5yDNIaMR+1XOhhTRrG09ONgIiDUCsg5MAUDdHBsgMIYh2FdIhllLzkUL+jnEfh1J6tM4JtVxphHEo+ejDpegEbyNACQu5RYaA8PqSOqBw8e58e+qjYF92wF9cq0U6IvdgtvM5lSqmJG5CpJrxMtz+kDOe8wJtIWp1JS+ZucKFZHn2i9JvjMGFhd1NIyB/TYFKxXcMrlTErgP5ChtuAM6z62OaZ+1exEwBP6S803qBGasFMCy9gHNC1MfUp3AxXtjdeM3E2PGVXXrGeT1foYojsRAkkLSOI+1hWqQNUsaExrifIEEAD5m52bKxZnPKTn6FdywTCVxfsFnlkqfWA3ifK/GWD7HYQMsRhPxXL9m2VjXV/mT3TCNmlYA8TLvk73iIeT5S9lRayK1tyDO9bfuXRp7HUyeXnWgy9xIoTdsuigM4pGxBSgAv1GQyJoywgzgp4x8doLPUl7SBGwoS8yyaWJIrOkYQ16XmfdyJeW3vgYRy3lEmVIOzBzyjoMJfDqmNlL+fowEOF7ntb4E/ADHTKAEGFFi+mlOP5X19AZAGkh7ppPOwWwp9695HzHAEefk0nNRRlPNnkuXw4AgxVo6VGoY5vpRO+McxkGvR5tBXGH6UdMC7SwzmCxQicza59fEpPs+pjatgYcalAFKsC8qOCGglgJ/1sbmAa2Prv/q4XnQVsh1SXOeBiRaRYURG5XsrIE+C/KZ71p52/S9qp5pXpY5UcHfuu7L1BaYAq+VHYl82i64r/ibgUnN9eyd5Ah2QAxxfJ19EFZgl6R3HsvFalApYPeaQ7afVQMo8/Wx9cGCffy85fIhXas+g3gUOKgBkKCGks1c5M02oJy9rhpoNQY0pjEs/V31fRiAD3neSOknoECnrqW0PXivaAMgKG5z/ofBprXVZEea7d27F29+85vxoAc9aAL9JrtR21/8xV/gz/7szybQb7LJjmDr+x7eTwHxh9KOatBPwSki2ZwY+Tc1T5yDjpCjTxtxeswcGHTq5qD5dfzcC4vLsmQGUd+Rf5ekJxk4UpAvdguWgVTww+bI0qhzE6XMclMLxKYFNT1LFAEZMEoPynUbA2x20pYG8LNSK6HXDV1Iz0EkpjxxVHkvUjCa+yxtBJFBWGYCMNjXOrCTp+9A831IMi3dHBR6UD+X9/oEGtV5xhJYKgwxQBx7lUUr/WJZfhbwqxhpYyDfqOyhXkMACLkeS61mNyjwox8jR8cnoM+FnJssMceasqwxpsM24F6Sl1xmS6Q7E7Ctf4t0Z5hvZKBvMR9vTy1X66bXTgFTc61cO8uAn0q9LalvkkmL7KBIcp2UZaYap8+1nOwWTuOa5TDWjvqZfdOCb9X3rHykHWMsAw/2s2W27P6vjy3lpLaUe48ZM5Qc7Cpdp33LqaytyX8HsCPROg4L5+BWdVYHMjmAYnKiRwXjnE9SeQRnJNNicipy7r5cZO1oUgdRB6DxM3F0OSC00o/6YT0TC6fJIGAMLJ/lSBzwNJqfqT5bK8dVO+3ixrSwmezoMWraxAyJfgaVExxz5HsCnCOsNx5rDa8L+hix3jjsbj1uMmuw3jgBe6qcVqQ5a/s8Lpl5kXxT3q+a31e+X9/LKt2J0PPaIPQMDKmk55jUnUj7Rt8msK5gzCsAqa/JMcNPx9p+rB6ZPWxBRJVHTMw4YaBRpCwprSwyM6twIBe/7gNE0h0gOLQeWPSEhcuBW2rK8FtvPBrHa9/GUWJcehv0MugECiBU85JdC1DOexor+UMdqx0M8AFeF5Itq1bVSJX3oAaIgXP25WrlACG3tksADAP46fU0Ep9JYj4CQEQfOZgjRsk/RsPxPJ1yBJ/PCPqhMp/brcaVKaa54Szwx18w39XTV3YsqRQrUMjZWuURG1hmr5cF+2pQW/t5xfYbgxtqUDSayveCpkbDkdS6F2Bl3XbkEtuvyOtXBy8Bw75RfRYFfFZwb0a8zuwioQ9Zelbb1665bd22ZqdClyMp17AF/rRtNNDMrj/r1wNAGSjYfTZHPdH4NSnMOc4fHps0RqlFP+Pr3shY7mcsMS8gaR8iaoxJgfLiEHYPvAUINwyC0veHYN9YcIAyNZdhatxXsPI98GSTHc32h3/4h7jqqqvw8Y9/HA996EMPd3WuN4sSbDPZZMvsz//8z/Ge97wHXdehaY5qN/dkk91g7Q/+4A/w2Mc+Fmtra4e7Kjcau0GMhnYTBlinSd5wqrQREZKkp7LKqF/ws2WWARlkEdDFys9YRl/oDMghoEecbwwAJK6Qy2BH0/L74sAgQECQTpwiM0ST2wZAdj6NtgM729RlphHVOWcDA6A+QLLYbW19jAzsJc+G5JMwn7slG7Mimh0WgEGWJ+rmKAC/fiE5GuZFuxZynNoO0oYxBJD0YnYSls518j4Df+JcLKxg84UBQ60G+wrGZjrZfEySCHR9ViemOl/2e6NqmXkBLNnDFUBiFqjTw2EUwFvWT5Z9TrWjBRhE/hdyXMJm5b6/4PZazPNreU7gqVrfZxAJAAVlnFWc0f1gvuV8K6XTrpaZ8k5kzbQP1ozFrdpmzOGkL7er4DI25cixRx1f25QXq81PkbNv7LcxMJgcAzv9AgcTEDIwWDA7pPzCCS7Pse7PdZ2XOerqttX3ZaxzwgJU55nKe7qR1q73fjGyJ45/r0y+UI6btYMeyOwGUy/Nm6POeGwh/FaCfGScW1z32WHI6TfZZDsyO2+J8XhaTvpOAnpCiCLlCbhIWGt8kvxkOcklx4CMY7KuSpLmkRhQq+7bghFWBbyQ94gdz8McOOUy4CdqCtF5dn4ri9HZR8WGkjpyYETIgQkiux1DNwQm05himH5VOxZ1Rjl/WXApraOiySvLcRPgxRev+ZT15yJYZl2/6pBy+RFUcjEfswDx5DpogNGWQUGmfprvjCgmtp8zshS6BuTv51y6A5amXXe4LDNdMMz1eur6yvnEZEqMTQNwjdW3BieJMlPLmjoWd+pf5CszDvyNfl8OaHOLF9eqkvUcvU42V1/qg00KcImmnXSuQhwHl0ZvW+J9QIw5KKf8PH9v+GNzPyvbbz8Chgbv1Wuhkd94cnlfpHuZEVP5zAO51gxZSqCp2e9oOcV72wB+Wod0Leyp1Mik/YyqNYkFbu34ZXKXRqL82hzL7hMHOfaQr+lWihcHC/gNy+G9Z4zg9Z/kMUyS9HY9OumATjYZzj//fADAxz/+8cNck+vX1FHctmMaU5Pd2K3rOrz73e/Gd77zHXz4wx/G/e53v8NdpRu1/ed//ieOP/74w12NyY5Au/TSS7G+vo6f+7mfO9xVudHYUQ36pY0lkCKJdTMRJB+UOv4bx3KeHgHUb/LGuV8A3UZm+I1soKlwiBhHiEp4zjcE4OsRNzcMCCLPJrdZKrNpJeF6CzQzw4ISVo3zfGUS+CeOpujYSS/J6xlMo5QHAchOGCJm3hEoye54YidHfo0E6vklG2IF/uzfLgCOIhphwIQYOY+M0zJ0k0gS5V3m82NQT0C+xQZH7XcbCexL7TnfGEpvAgyyiVxXZo1ZdsAWrBqVLwzZaZiAPr1mwtpcBjjma5SZAzRb57/X5Hm2npyLY3JhdVR13nDL9QPgyHFUrguFw8dGOCtIFOXvUdvP9yMM8GcjsEXydhTwU2br5r7U7zMAuFgK2JZ+jOHivc4rU7efRpvz/S1OURMZbUG/IoefOs1qqUdrzlXAV+Wm0XbbClStATBnHMPCck3mm1SPaK4tyBegawHw1ceuo/TTsTUXqCkrhtxfLNsCENZKdiiOntd2557qPwICKlsGPJYR6dgWM+gIifQW5yxkDBuTNwb0XjFjVIgI6limhlMqwvS3ZU5ENWkLHetpTP5urH3IcUAEuaL/Rt9gQddH4satrQ/XswTV9Vj2ZIfZBGhJzmEBCZIkJIC1xsE5YJOyk7aV8Xa3J+xuPWbeYd0TGhN0xUyOimFELgF8CVgLMv+hYujqvac5gCXQIQrTz63tYpAlCE+3aUXO0yMk1ssaBz0I2w+uYYYYZfDfzslFvRpZPxCBXMPjVjDjud73llFV5UQDdP2Wgxkss2YMfEhMvwjJvQz0Mg8uAtA4l9Qv7HjnXVa30Oe0FrPznx3bTTsMrpNcAx0fPXE9fEIvy7G6CL6RBwEJvBqAf+TKnGT6tveZlU0OQUErAftGpZqlvrpe8IxIpiA2ErnUWuKzZnXXASdbMcJSfQVM5N/L7+Q3Cmjk76L4XgJGkRl+iCFJ36c5294TlbrDgN3XcI5p+BnLL0ag72NimfF5ZYnOrJhgctEhr60ADVTiD+o2OyA2Bhm1g/0Am5PFDIAmIBsoWI+6HlRw2jItx2yra5sBRClPpHnT5wbwS3lMBfCz7Zmqr+t3A7hup+xoJemLIDGSaAAbSAGU193k9FOWXx/z/lGLWwb02Wax4GBE/l6d59gqH1hTEHy/zYLXoctj7/4GUq7QprXVZEeSfexjH8PHPvYxADd80O+yyy7Deeedh5e85CUrK/NTn/oU7nSnOx3077/1rW/hgx/8IB760IdiNputrF6THbh95CMfwbe//W0AwCWXXDKBfofRvvKVr+B3fud38PKXv/xwV2WyI9D+6Z/+CV//+tePCtBvsVgcdKDJYrHAfD7Hnj17VlyrA7dDv1q+nkwdKJ4oPxw7G1rj+FdnEfWLLO2pefzAwF6kIeujYPeJtA4DQ71hOFk5QwGPDKCUH/L3Iuc+U6lJZQbWkooDdlAMOWrbRJEmRxKZzboAbs7p5pPgXI761khwRxqRr5t+KgC/2oJJyg5gsAEcy2lCYGd6cX7qvEh5EKWNVHqzWwgAp4++ZI6NGMsQ8iPl81FHUBU5XjyPmEaak5RBzSwzB1p+zX+3CfAjyS9T5NEwzimNetUH52DkjWQQB0AfgR7CVlL5qiY/0KhMWSsAo3ktjxT1rg/rvLN/8wnmvu9cbpPaCWjbeQQQHWs/+0j5eJzjtpRcS9TOcn4eYWnVgGnKkQeIZFe+t1m+LOeMnHmHVuRkPfL9SyonprJsBThXHmdL1tyYVcyJ4rta9xpsFWCoADdTm+d+PHC+ikMZ+lv70PcNk2XQN6Rfwc8Qm3XuX+06YrvGj9kuxGatfGgfa8xDyyQqr9cIY2bbvmLaTscwHd9UHs4+OGfOeIR+cR/ZB0w/H6un1qM3DGR5oNsALfbyo9sAzffmx+a1+bPFPn50m3DzfSxjPNlkR4mRkUaUN4xjl9cJKhu51jisyfN64+Rvx6y/Ol5Cndtj46S+36sCw5wDgXTdpWsFHcPtOkLqqGOmziMW8NP5WCU9E+Cn86lrkkzgIpRjRohIIEpUEFTmJ57XZ8U8m8fXnE8rM10siwYil5ofLIeOlP9YHzq/NfL3zBFmAqjOvM55lD5vzCOth0lzXg8ueL7OI3MNg20j7G45Byf9Qln1Olbr+JxBECpZa3Ud7PGaJj9aBSz0usm81a7JOmiW5jOdg3QOt6vQzEDLAEbNzOLPKZ3bmC2TrBw0q5xz+p15KCDkyb6XAdla0pP6kT4/OKACzlVfFDlH+Bk66d+a07sLOb839/coDwwkTy0QaUHcPFcrWFS2Tur7po4ZmBxZFyxZKwxYjdpXUAHKI/00A1jDuqXXaS8zenguZ8n7th/ZfVjam9n+v8TGjmv3daldx+4fN7Je1PHH5C5Nsp5yfZeBcqnYCvBzck/r62VtYtubqvcdKLWXPmz7Lb1G2l8sAOiO6tjlySbbsV199dV43vOeh2OPPRZnnnlmAj0Ot/3Hf/wH/uRP/gTPf/7zce21166kzF27duFlL3sZPvCBD6ykvO985zt43vOet6MybnGLW+BDH/oQbnOb2+CZz3wmrr766h3Xa2NjA5ubmzsuRy2MBTjv0DY3N/GZz3xmaf7iw2Hvete7cJvb3AbHHnvsyvqI2qrO88orrzyi2uz6sBgjnvrUp+K73/3u4a7KwD71qU9hY2PjcFfjkNqR1t++853v4Atf+AI++MEP4vLLLz/c1RnYt771LbzgBS/AQx/6UDzoQQ/CN7/5zQMu4wtf+AJe8IIX4E53utP1Mv4ejN1gQD/AbEST04SSlGfrYDbPAvgJy2wQpa0bZhjnvzicKPTAYp7zlhWShoY1piBgyuuXQSvLIlMmVCGFKKBWcoAbpon9Wx3f6kDQzZI6WBLgZ95TwK91hNa7BP6po6bezA3bePz9OvH4toGKljmTzkvayeZErAFTK7c5kmdvNK9fDfwBBfiX38vMwRogTLkDFaBqZqBWAD59rO0CzdY5z4yyB3ybHTAGaFAHiIJ9AUgPTRKvUbiLAHQCVvTUZIeXgjQWCPQNs0dd6ZS0oEwBzCjwVwOCtRMwNfABDhl1nsMB4OeSY7Z8netTSFTZosRZVjtHZ54doq0DGgH7SHJwUt+Ng33bsdq2O+9lzDj9GNnhVYCnxoGRctCNgH9FXQzIZ4MUYpI5Mw8BFPWR+ocFjBOQtwY06wwA6mO2W4DA6pFyKDW533l1cvvxept2rWVJi2tgx7Y0bqlDnIqHOoyAcjKzILq9l8YeyRGp1bNjrM4P/ZxBvsUGaLHJjzmDegngWzC4R/N9AvQxAIhug9nlh9iKc7+eHpPdMC3dnxYEElMHrQJRrXNY9wz4qZyn5pBToKe2AbsLENZXlHx9Pahb5EArExykv03BWgb0S3Nt03AQicnfl/L2NTp/thkMcU0CQ3isMI9q7MhBFDmoog660c8Tww85OKo2R/mh69Y0xlHOw1fMb55z9Sn4V7zvkB4zT8XcmAA5dcSba5AAGHLFGF6AgNaM6oAGZ6S1pkMan33qBwYsC93g2qU62DW4Bqa4Js85vs2BKX5WXNsEbPjhmiEBUshAhjPzB5nrAPsdkypg0I9puIlyVTm5ffIDyPcGVdc7gVdpzyFBSrr/MG1Htv3qNtTrKH08pj4eMe8D5n1EF4BFz8CfPvqAlNvbLuvTtTPHJgk+9Oac6/t9v6aJA11X1qbtAwFKzTobyNe8WMqanzMoeWDzmS2rZoJaMMz2Edu3tmOK2mPkfdkWYLMNAktrsRHgV0C/KNc3ByMMjz0G+C09fHWOdf312thABwX/rLRqfQQdoYr181bj0iGwaW012ZFkZ5xxBh7wgAfg537u5/C6170ON7vZzQ66rG984xu47rrr8IY3vOGgHeP/8R//gSc+8Yk44YQT8JjHPAavfe1r8fWvf/2g62TtxBNPxOMe9zh8z/d8z0rK+/KXv4w73vGOOy7npS99KU466SR861vfwnHHHbfj8t761rfi7LPPxhe/+MUdl7WxsYFnP/vZ+P3f/33M5/MdlwcwiPjABz4QX//611eSY/H000/Hv/3bv+24nGc/+9l4zWteg7PPPhvvec970G9DENgf+9u//Vu8/OUvx2c/+9kdlwUAZ511Fr7v+74Pr3rVq7BY7E+ypeV23/veF5/73OdWUq93vOMd+MVf/MWVlHX11VfjtNNOw3/5L/9lJeW95z3vwRe/+MWVgFdt2+K5z33uCmrF9id/8id4+tOfvrLyPv7xj6+k3wJ87//2b/82/uRP/mQl5d373vfG5z//+R2X87WvfQ0veMEL8Bu/8Rs7vgcAbrN3v/vdKykLAG5+85vj7W9/O7761a/ioosuwm1uc5v9+t1iscAf//Ef48EPfjBuf/vb42Uvexm++MUv4txzz11JvXZqFFcM/5533nl44QtfWLx38skn49Of/jSAPPlcfPHF2NzcxEMe8hCcf/75OOGEE/b7GN/97ndx3HHH4dNf+n+4ybHHAtBNXelYcKRRzkiyktR3oG6TN639PINo1hSMUieTdVB0XZYsFEAvbm7w6/lGZvDV8pD2GAJ4JIBD2WHKdlrfw2yntV0Z4GnaMmKzclaFCN6wx5hfBwaQFvJaN/WbXcBGH7DoAza7gIW818eIRc9/A+UmWDd76jhyRFhrHLw8K4i4u2XH31qTHVVrnn+zq3FoHUD9HG6+D+jncJvX8Wa92wA29zHIt3EdM/kUMDVmwThmhrWFzObSjaABGYu8cyohmsDFKg+dHl/BP29kPVXSU+Q9B9esXRNQZA0qHxZdk66PyirVDMnifE37W3DDSi7x5yideMahliVpl4BcY7KF1qmkEeYq86nsizEpVpVJBbgt6/NxFeCn0qjangL4xUbkUdVBW/V5bSsrwZaWndY5XMmhVpXJz27oMCu+s8y2k6Ia64d13lAbqbzsuplo+AI0PAAW3ZY20kaDqPqx/lMD93a8DKHsW6n+lNp8wEiwjnIDjh+sD2QrybE8T5SO6MSoEMlnzf1a3AN2TqhZnRVYHX2L7153HY6/44/i6quvxrEyZ11fpvPjL13891jbfZPr7Tibe6/Fb//sTxySc5rs0K6tvv0vf4Njbn5LhLU9gJ/xs2swD8BmH7DoIzb7KAACrzkWfRAWYBl41Dhmma17XhfMHKGZXyu5lDeY2WcBPDP+6TxQsIF0XNH7E8hBInZ81TFWQaHGgH/NWmLr9XCJ4RTjMICpWFtCwCwgy0UDowBWwe6DSnKKOsLIgFYfAzASi0Ax59nytM4hcrn1eJlZdjmfdeNY6h6h40CGGJlVaUDVshGMXKRh4CtDMgVQxFIuUk3z07WO24xU4j30vC4P3bAfVEE32g+KIKoUsFXlYzR1rttL20iBrd4AHpqjLUYbTFLKNNav7XrsQG10/WbXXgpmjSh/jCkGlAwon68PnOwPIPuBfN7BrDwd+F4l5DzoxR5KAyTt9Rnp52Pz7mC+tfeqDbJJ6558rGK9lCpr+oD2Ed+kuTdJ4Mo9MsgrVzHcxtbaRfsmFmBcCpQps1mZtQS+5+r1OrD8HrZlahkK1rcOHLymedANCFzmh86BfCrpqcBvLeupwQgHC/ptBZbWn9jzW/Y7u4/PEqmVRL+uwfoO3736atzypB+c1laTHdGmfedIv6bPfe5zccUVV+BDH/oQvvKVr+yorp/73Ofw1re+FR/4wAdw6aWXrkT6Msa4EpDp+rDPfe5zuN3tbrfjfIMhBNzpTnfCZz7zGTzmMY/Bm970JtzkJgc/3nziE5/Aj/zIjwAAnv/85+OFL3wh1tbWdlRHgJl+qygHYPbVqaeeesRd23e+8534//6//w9XXnkl3vGOd+CnfuqndlzmJZdcgh/8wR/EySefvOOyLr/8ctzpTndaSbtdc801+M53voMTTzxxx2Wt2u55z3vijne8I9785jevpLxVjiPXXXcdvvnNb+Kkk05aSXkf+MAH8GM/9mMrkYN84xvfiCc/+cl41KMetRLg70i9T3/9138dL3rRi/Crv/qrePGLX7ySMj/xiU/gdre73QEF0fz1X/81LrnkEnz0ox/Fxz/+cVx33XVwzuGSSy7BmWeeuZJ6jdn+ri+uF12MO9/5znj/+9+fD9LkwzzrWc/Cu971LvzRH/0RjjvuODztaU/Dox71KHz4wx/e0THrzZszmwSVE6LKmRA1z5arnDcBQOxLp4PdUNUg3v6YcVpRzX7awjSPDIr8Vzm3H7NiXIq2DTFvqkk32eD/+phlPn0A571ynPuv8QT0QJBdXu2Tqhl+yxh/IQCBOPF6jNkRENUhlBKyE0ifuVEYWHM9ct69PucOCX0J+Hlm3GXZTj+UpQJyHkZyQOg4b0g7y2Bi06ZjMYsPiB2kXAeEEWbgbJ1BP83f12hOv2YI0lpWgHFEWMDPOunGNt4AEBGhqUO4zaI4puoIY5PoXvp7yk3nbU7K8jhR7xG41FYghxg6kG8k35KGTfN3FACNnWlHGMeG86hzMY6Btgr6wUiiKgBkpdKsw0eP5XMHGTrLCgdybtgBc1GdhUAG4MyxxhxD+2XLgDjKUnnLmHCI2RWnH42CfcvKENtvrIzqWHUM80R687d1zFcOuwh2TMEBCLlvFfWt2Ix6745JyKqjcH/Njl3LFiY1m6MAWq3T2zjTlkkuF30L8r5rQKFDdI04tg99Tr8uRPjrMWK8m6LRD7kdjrUVgOL+9UQIDqAQkzM2xohWPN5j92uSbtPPRu6jAZihoABQ5m6ygQa1KeAh8xcfXBnvlpXXpDmlD0is4NqZb+dfD+LsgiFvFp0dN2s2M5Dm+2VgWGqfCkAagCMxpHk95w0FnOSiI6mzoyEYYQPhFHxzhOEaVuedsblgK0ZNDHDyWQ9eTwZTwCBIaVmgDLlyjZHOU9Z1vimBR7ves4DfAQbCOMLg2tSAn+3Prjq3A7G6XQsgqL4HajnPLQA/Lnw4n+qDAwBRMVdjWqOndiDOc2hzd9o9VDHv6zFjAC1p6yBtxP1/C2C0BpqX9ZGtTNpFL2NEBh/Joub7U5QBfa0dKBtwK2Zc+g44a6ne82NnruNmBkuHwVRqA9lyc98Eud56jNRWS05L95MHa2P9HTJG1Xkt02+oui9MXfl3Zm9XB80dIpvWVpPdUO2zn/0s3vWud+GmN70pfuVXfgWvec1r4LZIf7KV3eEOd8B5552XlFdWYUeas9naHe5wh5WU8653vQuf+cxncNppp+E5z3nOjgA/AEk+bzab4YwzzlgZULeqcgDsKKfi9WlnnnkmTjvtNFx55ZW44oorVgL6PfzhD19BzdjufOc7r6ysY445Bsccc8zKylul3frWt8Zv//Zvr6y8VY4je/bsWWm+tvve974rK+sJT3gCvvrVr+LlL385rr322h2PJUfqfXrrW98axxxzDJ7xjGesrMy73OUuB/yb+9///rj//e8PAOj7Hp/+9KfxsY997IgJ9LleVstN0+BWt7rV4P2rr74ab3zjG3HRRRfhAQ94AADgTW96E0499VT8/d//PX7iJ37ioI9ZblApOTeKDbU4pzk62APeRONohGk/Z3ANjKkBZnMRQwYxKknJCADdXIAhw3CyQIetb5KLrBZTY4Cgboh748gy8kzez8TxIYCdFhXkD89Rza3jjf5646WteBHmCUAXODK/BxbyfohZZkQjz1NOwGq8DDEihIjeRbiYI1gpsmQWnDwDcL4BNTNus3Y9nRO1Ac571r4VEE7buQCSnMtMO+c550vFrhmANuI0VBCV1olZmwrmNS1I8giSMNQsy7AAq9bW0/FTvqBmHUvZBH4m0bWl06WONt5qnzdg9aWNcYQjYmDFXCd2ILgM5miktL7W5rFgB5AAHBCxU4FcdriqzBTAG+3QAc7DtTPERcvArMrXAqCK3QogM/yUmam5lqqIZNj3nLmuyPUuHKJGsrNwkvHFS89bOgqNo3aZg6iOfLaOaNTvmWKK728BzgUB8NN4A3F4W1sC/o31o7FI6zEb25CFsuYAAHE5g6jJjuQGPK6qfGrwQJwlp/1S57w66ImgTI0atE9fH6/2qHnz5WWAbXEdrUM1VADfAVjs+yQvrKBDAsL3I/flZJNtZ4dsbWXHtRhA3ZyDLNyM5xpH6BgJQMvef0REI6eXg4dY6lHzyREzVfT+kvzK1BvJIXPfJVb5yLgwxv7hgBjH84fMW4VKgszJ0c+Ycd+bPGahZL4A5TjRy7wbZO6NhAL8KyxmNp8N8DEfD5hF3gBjKf+0ZUJZczy2UGI7KaONEtMqrQO1vcbYMtpGCIUDfXTOSc52O8DKeEkugUN9JNSr2AQEV+Oslh1dA0In47UJtALy3G+ZXQpkjIB89TyYgdyY2jvEmOQFQ4wJbHExn1uWJtT6G7UF87pm+W01Vw3WD1sEK6U5Yxuwr8g/rO1jJD0XASlvXx8i5kGYZSEWAFOM4OC/VNcM/tGy40uAiy6ZLHNNrZe2dAL8FYD2mO0P+FeMA5GDbbYAeRkQr661XHsLEgKQ71AB/Nvz2cp1XYNkIcYUILk/a5h0n6Dscyr9irTGGioNDNayGiwn941l4+p4tL/Yko5XY+ov2q72PWAcNNWfc4BqBGirNVoOhLCgcQTSWBVlfJ9ssslWYyqx+D3f8z34H//jfxw04Getzm072dZ2/vnn4zd/8zfxzGc+c8esQQC47LLL4JzD//2//xcPetCDVlDDG4+1bYu3ve1teOADH5hUVSY79PbqV7/6iAFOjiZzzuHXfu3X8GM/9mP4yEc+coO9/291q1vhuc99Lo4//vjDXZVk3nvc+c53Xikwv1Pb+Ww+Yv/2b/+G29zmNviBH/gBPPaxj8VXvvIVAMA//dM/YbFY4IEPfGD67imnnIITTzwRH/nIR5aWt7m5ie9+97vFQ40dJFRswkOKqs65WKJrcj6qOgfaMgaN3ejGYWRlHJGBZDCjzfKd6TErH+2s/J4FAQ2rLFVLI1atQ8Dk+kAMyammuftsbkP72hFS7h3OC6P5eFiiM8l1OofW88MR/62/d24r8E9zHiA9FkHyh4SIRUDKR8eOOMnT0qwj+hZu1x6WOF3fzc82b555YLaW5SBNbpdBLjvZ/OoGODHw2hmc5OHLefnWQbv2gNbW4XbtSQ/atQduz7H8WOf6YW0XOxHb3fK8JrnQ1uTBrL8uosyZEm3eoNw+mkOo/G5MDkntywrGZjmlmNiD9v0+ga9Ied+S86x6pBxwdZ4i23Z+JvlyNA+c5Nhp1oC1XXy9Zutwu7mN3O5jzONYuN3HgnbfBLTrJsD6HoR2V2672S65P3dLXjltT3NdazCouj9HAT9zXw8AP3PuWeItt539e8yRWJdvgcO6jE4e85Bf17nlFqF8TtfMyJnVDz2WOtyCqa99L30mEZcxmr5lxsmFOAfnpg/Oe/OQzzQnEP/N+SbTvdyupz4CYb4W+ZlSTsrx/FcF+Iqy/a3Z3EnLHjZHkn0vOTKNhDON5Xq04732H73etl8Bo/lEa8fcobSjPe/Mt7/9bTz2sY/Fsccei5ve9KZ40pOehGuvvXbL35x++umD/Flnn3128Z2vfOUreNjDHobdu3fjlre8JZ773Oei67olJR5ZdsjWViPOfc3fpfnbdO3lRb4TyE7d8h7M+YI9IeXpI8lRVuTtG3uk75bfS+NjZZEo568lyuO8b7Okp86lgJlfDQBo592Q592YxusoaxqT9888FolNled6Oy7DPBfEc2k7aBtpDtpQPbo5A6bdnPPV9nP42KGhnMtv5kRK1anUuskVtw1TrHjUc4/9SQVIKWCZxtjq77F5uQjYqvKRDdZ0NjDIynwaUEPnOtvGy8y2vc01Rqb/WvDFqmjw78sF8DK3ZmLLwbSDbbu+zNuXZC5Nnx9l91XtWKgiSJukud4EmyngFyMGbCsNnCQygKYFayvAFshtbqU07WN/SB7LAm0G5z3aD8d/WwPumltuO/ZaAdDH4XuFTCViIZE6Xoc4eu+n+lMGzPS+0fEy5TTen3VuKlDXjS7dHxE5GKCWNj0YqwHAMcDPVQ9v7i9NE+HNw5nHMowgrXHt2HSI7WheW03rqsmWWd/3+PznP49b3vKWuPTSS1eWN2+y/bfrrrsOr3vd6/Dc5z53JYAfwEy/3//938ejHvWolZR3Y7M9e/bgkksuWQkAPtnB2W1ve9vDXYWj2h760IfijDPOONzVuN7szne+M571rGcd7moc8bZypt/d7353vPnNb8bJJ5+Mr33ta3jhC1+I+9znPrjsssvw9a9/HbPZDDe96U2L35xwwglbJvl92cteNshlo+bN7s1Gj8bIEdkxcERvD8A7B3KzYvMdQwdCx1+orXY+wQB9KvEZBPALIQN1zoN8AKwTeITRZ9ljKTecvJ/Yg1vUh2U+u8wmcQ28sktIztCDPSEgRAcgRASS13AIDsgiPAHOERZ9QCBCHyOaETeGRq6qQ6+2Xjb6XeDNXRf4ejCDkPg4jUPjZ4hNxzKfQOlY8gHUNBzF21esSnW+G+dGIfc00t7MYCNQZAZVBLIU0GwNLkZ2mIxJtwozLbEKU64Mk2tOmWlVrqAYyxwqtYRYQJZYWiZ/QUSIFM2VYEdMH6LIpCLJf45Jf3L0vSnPnpq0V5JnEiak0lyp7+RvAoU+tTfFgNgz049PQBxTfhxwA8pNeiQqnXfWYeXNe1KEjaC29S/ywWzhCBmTCEv5fqr217tBTTiFaXzRumhvTscwdc3XeJi7kcuigpGm38n1zQ43Sr8ZLjitb2AATFb9qY7q5+/GYd3VwT1yDNsO6iT1FOEdSe6aBt4hS8r2JgeQte2YlqZO9vxyW0gxxRf2A1irWARF3sTaobasTKlncc+MhrXLPSSg4mQHZo997GPxta99De973/uwWCzwhCc8AU996lNx0UUXbfm7pzzlKXjRi16U/t69e3d63fc9Hvawh+FWt7oV/u7v/g5f+9rX8PjHPx5t2+KlL33p9XYuq7BDurYa3CcOse9AnqUcdT5xxPnh+hAhM0+6cZWlkccJGa87Ye8pyC75YcdBc577IzGThxQcWmb1GOlcAvxQsO5jCpJJDL9Qzsd8XH6h0p59jCxnWs27PbLTfmxMBTCQDKxlIi3ItF/BLDpu9uB5mBzgMitSZUBzW6IY1woGobL9dOZbAqaONnkMsHKqOi76YnKsjlsUYJmaVQCerguquXu7uUJt2VwIIEmhlkylqmoYAn6J4WTK0e+ieq8uiytTAqWjc4/eF9gapB1IOZLLfZ4cemWyhgxW576fi0qyr8LE887cu1uwDMsgp2HuPG0HZlay9Ku9CQZ5l+s22srM/Mtr0sjrfF/K02p99LCeCJ3sCxGR9k9bmb2nl+ExQSSA82+qe0BPbYvjJODPgMpp3JQggDr/KUkABP9AQT5K6ytd54aY15mrgpSUFalmwT6tf206ykdh+gHl2lTNjkCUxta8n9E16Fjgx2TLbVpXTbbM/v3f/x1t2+Jd73oXfuAHfuBwV+dGaauWCgSAJz7xifjpn/7plZZ5Y7Nb3OIWeOUrX3m4qzHZZAdtN2TQ+gd/8AcPdxWOCls56GcTFZ522mm4+93vjtvd7nZ429vehl27dh1Umb/8y7+Mc845J/393e9+F7e97W2T88ZGBSZYSXY2EbKnjAARS7useccRlGarEYlAESk5uG6sUlR6jIgLkaAygF9czJPjhBwDdTRjEC/ZiGRnyusn31PJSAX/tsxxpU4WIAFmFAMn6hPgD87x+ahzTn077FFi0MEB3nksesLCRSwCMdsn5fUrpa5q0w2t3fTx9yMWfYRzEbp16wlAF9B7QhsJEQGNI6zPdrNTfNECfpNBzJ7blEKfzlM39gkcBLIzyESzDpg4+lvNR6aPXsAqZQ3EAGrWljs2LDChkpM2/1xiyc0Meysm0C8mh8sQ6KsBH+ssATAA8eAiYiydjvoDC/4hCuBtyq57kwUG+Vh8js6JM0TzXloAJ3Rc4YYdEdE4hDT/ZD6gZUSVrAG9fhbk07r26kkypvf2ltJQldOwAPtcZnlEAH1ftn0R3W3GFBeXJ/ytgT6gjKLvRUKpUwZmZIcaIRYBC7YOem+qdKveZ7WMmLUx52bdk2u/Z+3Iqh12FpSuTR366sz3MhZnpnHDl15l9mpntTynY0lFtgO/sxOxcqzWIF11Hxf3dRj5jjpc9X196PjqOFadx1oJHBBg00qxFs96nsrWOMQWrueI8XA9ln3FFVfg0ksvxUc/+lHc9a53BQC89rWvxUMf+lD81m/9Fm5zm9ss/e3u3btHJTAB4L3vfS8+9alP4f3vfz9OOOEE3OUud8GLX/xiPP/5z8d5552H2Wx2vZzPKuxQrq00kINClwF8+KJfWwe4d8RjBQACFaoDjQQEzDyBuk1mMnVzlvUUZlPo5pwbFnltFEPPctCABEFlEH3LHHNAyXDxLZSpbpnVGpikY7Q+W2nMPko4lII+BJl74wAEqies0fxfCSwlVl4fG88rMKh28g+MHCjkMZXsvCfv7ZeRG+ZfHfmOrac2FDkdw8OBHe//Z+/Po21JqjIB/NsRec69772qVwVFFVUgFIMDQyk/RIaSQWwU1F40Ir0EZAHatNAKKgLd4Ai2SwV/LrEH0NZGaBQaRVEbJ0RoaLBBgQYRkBKQ+oEgFOOb7z0nM/bvj4gdsSMyMs+59573qm5Vfm+dd8/JITIyMjMyYn/721uIRt2HRiehvqOO9McyThh7X5SoEgskTj6DI4oe4SfvviEMhvrU74ZV11iTvgPjqfhXj6dEIUkmG4dq5Wo53jQlOS/vcAkp2fVDzOrrsXTIcmJqMrHjNEbwbUmpXYYctor7L5sDCeEvTn7SBlKGOEGaBhSc7nQ/JSXZwPhJGV3lHirD8UKVI2Mj+R7nn4FYIzAMfNh9cUCF648j9b2bOQSEtpqFcbgPg+xDIevoBD3o+yFco6iMU6Rsb7eB27/qXBXX1XcqCb9BVSVRen8MbDIGua7nOeBAFYd1bDWNqyaM4frrr8drX/vaeG9MuHlgIvw2g1vd6lY3dhUmTJgwYd8477TvpZdeiq/+6q/GRz/6UVx55ZVYLBb48pe/nG3z2c9+dnAwCfhkscePH88+AFQIu0SixAkOkDxcnQrFFIw8GXlUGimKSXemNtMT364vD4xKvWaeQkY2szyEZDPzZJ+o+4LCDyEMaBaGUNUzHr/0ApdwQF0IgcVOhbWrhfpMYWMMAXPrQ31uNQbbNoT6tMaH/bQS6rP/kTAsMTSLmt117MN8St4IFwwQbeevxVKFDOxgQthVCYsZwrDOttSyrRACNPydbXmFXVDZ6TBQ2UeME2V4z2Yecv2E8JU2lZN9mu0YwjKGnJxt+xx+IUQhYqjYnPCLITzDX6dIlM5xoQJDNDxKbqR4P7MYajhN3DVxGMrQv0Vt2QXCUT6Z0RPKIKTViOqjw0v6tg5tp8M3NtuqnbbB86O+jeZHw7U8Gj5HYju6GM7T79fB+NCXzoeMzENPhrZSxFrPsBgMnTGsmwpNqkOESljNGH5NHUv6Dd2nrAOt7OvKcl3KpyN/JdSttLleHkPH6VxT6vpL+NbyoxOlO6wm/NY6L+T9qg49JISmGNXK8/bnpMKU6hCyZAqjpCo7PBPlB+H8S1tINDQXf0l/JHRaV6iLdP85YnQEkBOVun+W8ynDz2oVArv9XYBDgjI85O7u7oHLfMc73oFLL700Mz58y7d8C4wx+Ou//uvRfV/1qlfhNre5Da655hr82I/9GM6ePZuV+7Vf+7W47W1vG5c94hGPwMmTJ2Oy+8OC8zm2is9DTYUD9a4JtzUVYw3qjTsC8d5JaM8UvtCHR3fxw65DFkkhO3DFeWAAuaNOcipheTdyGp/UnHDiOxXqfR6cdvKQ2/33lX6flKGiszqOngF6jgixT5OQp/Jdch/GsKDLQKp2iSgYCqHaO2hFPbaKZAVi3QbLHnSoKt7b4bsO3SnH1yE8x94XI7QlAMQwj/47YsjBrGrUJ/wGz33VsoLwAwCt8Bsk/EqUxBvQJ3q0skuIPvWsyr2en2tSM8b20OcwQDym8KGIhJ8O6xnrqo6VhQvV7TN0zsU5Zu/g4r0rxF+sthozCJFmTQpPHHnD+lF7dQfqr/JymfyUa5DanrMP0L9XYz3FuW0oj986z7CuR+XcaoScDs0M9NumDOUZDzlQlpRRfspj6WPWsGr9zRGbHltN46oJY3jgAx+YOZdNmDBhwoQJE24e2LjSr8Tp06fxsY99DE984hNxn/vcB7PZDG9605vwmMc8BgBw3XXX4ROf+ASuvfbaPZcthnID73kNIIacrClWEAK5MBOWQSrSmMZPpsgA6DKPUZlk8XIRjVDcKrUf4FV8wTM9hqcKYTtJrRuE2o6sTeq1wpCceW4LZMIX1FgEeNWVcWDrYE0DY3w+vs75s+84hEch75lvOz8ZbIyNRvw2eI27YLwC8jm/oPSOLT1IO2agAzruYInA1sCaYAAzgDWMzvm8go0hzGZHU86faKyXEEf9mXbVm13Ll13FqBDKYvltVTaONQxgPmyOz5URcwXB52HjLhkSdSjPmrov81QPNcgNMel+dkwAI4R8hFf2MQDDMOzVmTAczi0PNeaVfuOmxXxyz9m+yWvXwJDJjGAAUjsOQdot/Ox0GzhZ5jIP6rgrpfoREPJGpZwsMdQb5aGnspCvyPMqRVVHOKAcTs5H1HdagVFT+cX6Ir+moh6Re0AUflHpB4Z1QSlS2LMdtGHKq+csASLi8HnpvOv4kP1xU9xSz4jFnBuNote6J/eN9K1EMQSe72eGPUu04qBmIEyGIR9aj8irLl0IbxvVfoXSLlMlFM99XBYPtqZhOvwl8uUxkBwwwvFZe98Xx631X+cbneq/z1f5QD/W/vOf/3y84AUvOFDZn/nMZ3DFFVdky5qmwa1vfevRcJXf8z3fg6uvvhq3u93t8P73vx/Pfe5zcd111+F1r3tdLFcbpgDE32Pl3hRxPsdWHHLFiVI39vPsYk4oGS9YeIJv1gCdo6iAsSbklrMGcwPQ7hnQ8lwkqKhbAMuFH1MJ0afgFX8+EoJW9fQU/fodxJTyNAspEJTkLSvyTr2fkwNDHvZQ3svEFIuGMuDLu1C+r4f45vR9ZlACjbIOsqec40DIZJL2AKLS0dfbq/9Kp4RMKRULGeith97xsU4uES9wcRw22PsE5TScqx9Th35W7/GSmK2plmKY1RVtqsN6Wgp1LYm/okx9nbXyXC7haNjpisKvpnLrKfyGiO5S5afUrTpHpXaKFKJJ3p8SxtOT834cnnKsIRFOUg+lutR5gBnAsktzrnQMhg0Ea1QThrJiyEo5/xKVc83mR+o5p24BdMtsHiTv7fS8UhyXOBncOvbPtaPqvaSXrBJfyXMsfx0ntZ9TD3nZV2T9SVFPUVtHRwmXHDvj/WFMFuJTQntKu60aAtRC2ubrV3dOcawef1P9WRg5jq7PTZ3bO6xjq2lcNWEMm8ohN2HChAkTJky4aWHjpN9znvMcPPKRj8TVV1+NT3/603j+858Pay0e//jH45JLLsFTnvIUPOtZz8Ktb31rHD9+HD/0Qz+Ea6+9Fg94wAP2fCwhDiTfmYEnQBzlg3Eib5iSyUTHPveCEyIFSJPOYAyRvCQUyMOe17nk5NPQBF4zi0Rer95BISjrslCeQmQA/RA24RjxvDiFmEM4B0I9z5/MwAI3BEspRBIzYEI+CmYhVzzZpOc1ulWFNJGJchcmtTVyUO/fOV8JYgCO0AJR2caB5GhMCA2oQ0vW1AarjFUGyvCevKs5tFWPDKjVOVP15CEqOyGUo9FQhwhLhFAtnCcwRviV7cYgNQ1m9oQLq3tdDJMy0e445ABcY16qc/UISWjCDeQNWYloEqNEOlYwumT1TXAMsFPh0lxBxsvzVbSBCeSmhEGLxjX2xJJj+PsaCIaN9PzqEGBi4NUhVrXXu5yzHJM5/ZbzLY0XYjCL5yfGYiEUowokqPdcuj9ax1kLCckoXszeQMbeYhnCuAoR20HuBV/EOsaYMegcVDrMmZNrzQRHHLJ9hnw8sc3Ssb3Thdzv3sgn3w0SYScoib7yftGG1FS/Nc61RuCJUVWvL/8O7T8E3eeuqkvbesOjOIvcDPHJT34yKsQArx4bwvOe9zy86EUvGi3v7//+7/ddl6c+9anx+9d+7dfiqquuwsMe9jB87GMfw11wXUroAAEAAElEQVTvetd9l3tTwIUcWwHovXtJ9dWRmK84Zsi7SAiEmQHQqXCe7TISf57wU8onQZl7oKJqWvkcKtILZGKYd00EyPs5nrJ0FXpZfEfn70kTSBWSzm8A2qjOYV/9zrOh79UjRSbjybNyWdF39cYuLBEpujh2oTL8eSTnhJgbIN409tLnUSDzCjKieh4DOSZKwk8r2j0x2393CGJ+vgGSRYdi1O/AYRIiL0ePgWKZA/sOvmvCsjJX4yjhByQytULeapWfRISQ+z2rK8m5p3MxYX7kxyCoq/H0HEURizKu14SfbjtfLnk1IZAUa25F/tzimc8c7rTKD/K8hDLkXlXjOF8XGVe6MHZMhJwf69WJP1TOawgukPl6HCPfXepcRscz2XhT2qkM/yqksTxrVrWVar/YXxfnNTaaKuu27jBTE34HgR5nj2Hda3KYse7YahpXTZgwYcKECRMmTBjCxkm/f/qnf8LjH/94fOELX8Dll1+OBz3oQXjnO9+Jyy+/HADw4he/GMYYPOYxj8Hu7i4e8YhH4KUvfem+juUN+0k1lSaraaIiISwtpRxdfrLAsAwMTn/U5Kn0QAfgST1BoezTir1qkvEm1R9QBg45rlb2DRm/5Ccrk1VQLNby/DXB+GMRiB3H0UO/Y4YzFIkJMW6V4We0oWxmKJB1lIX4ExKqVAh61Z8LE0p/kaRVkwe3r9ss/LWFumyviLvoNjK5gSFrP9l8yJsaQtT580rtVSeyxvL3DRF+2uu3Bo58UMyGAwDogrGhW7F/DXKt5NkB9MQ7V9H6danC2kCwMgdK2U7IQ2hqQ4kYYawJBL06dwrbedVqem7kWNHLXSk62nAtooqDU34V7+2e3IvFSCUGsSEjBCM3QpZEn4TtZAYWnYthO3dbh6Vz6ILBd2aMD63b+L+GAEkkxeRP2pqUq1Geh0jsjqCmUOhtE4xdloJKVD2fQvxZQiAgc9TK9dc21RFAzC1aEnx6n7g8q3+xDYV+aeCEBlUUQG7ELJ0BynMYMIJHgzV5KtQbGLusTDmuD1voFUzcLqvlnU9IyNTzWT6ALCzkKjz72c/G937v945uc5e73AVXXnklbrjhhmx527b44he/OBqussT9739/AMBHP/pR3PWud8WVV16Jv/mbv8m2+exnPwsAeyr3xsAFHVuFkLcMpOdJ1gXnBk9Lhecb3oGhc4xZCJ8X8/h1C1C78MTfchfU7vixStsC7RK9UJ6FMxVZWw1lWKk0hDort499NUt4ZO699+M4B/0wwrJG3jvEFBT1+fuwhAESMQh4hXfwNBMHiR6Ugxc76ue8k80Kwii2gZB6ZAB0gFOObKLUEtIEAKgSjcIYDHpwrSJaNRkxQvyNllE475SEXzm+yKoeXuXiLDL0uqiRf4NVUoTfvlGq/MKy6nf5nVXCDP/VbRbev3o85tS9K+cTVWUmKf2sJuf08XUuwajy42zco3P5accpgoznQwQF5+rhKstzLedChbqP1ZjP6j4hlJXCsob+iWR80MJan2PMR8+g6JgmRPEQ+bdXiOOTdlwbKl/3BUK6Zu0UVX/FPVO5H/TztWoIoO/98jHY7/2+SuW3ar919pVtb4yQnze1sdU0rpowYcKECRMmTJgwhI2Tfq95zWtG129vb+MlL3kJXvKSlxz4WK3zOeEEhpIiambChNaKQUomgBSNAXpyOxhiRlCGnxLDlDE+f59NOcRc4YUKoB5KacDLOfNQH4A2VmvvbzHoRMO06wDjyUAbwlxZawLJB7Dx5JE3pMiEVxN8/os2ssg6Meh3TsgsimqnRMBwLEcml0L8GPJkjIRSFONM5xjWULieQjypZhtsldAe5eQZ4tme2laafpCE0OfrOBo3S/Kqtiy23QDhF+sJQml6HLM9Zd7DyJUG3rzHqYzKfHTIjiHHlEdJ6/ZIqWbrk+u80KxNsjaT5Xk79NR9iqxnYhD7VhJiyoQ2kxCPOkyqVlt2TvL25Uo7fTxrvIqNo9w39ROWEHNfRgOY7EsmO7Y/Zjpu2yV14bLzhuZlx1g6h53W4fSii8SfIZ9L8+K5hWMLE0JsAT7slA0EoHOhTZCTf0PXZeg2KlUKnkBLy0y8lwmW07UbMm/I8wlgtD7l8wIMG9aGFBrxmOWKdRR6hXG1VMz0jrGqrPKvClcGViGhA6nBi4Pnubs54PLLL48k1RiuvfZafPnLX8Z73vMe3Oc+9wEAvPnNb4ZzLhqc1sH73vc+AMBVV10Vy/25n/s53HDDDTHM1Rvf+EYcP34c97jHPfZ4NhcWF3JsRZITVRvlw/1uDWHGQGfSmAFI5AlRCOtpCJZb0HLHq/zahSf8ds96hV+7BC920kH1mEocqGbzkOtNqXoUKZgFdaiMl2Tc5QIRwqGe5bu6htq6LhJIalwkREql1xA/Cf8O8+8aF95roiyPxAArkkIct0zjIzcoEoyMARz8W1DqqPs0CTGsVVCmAYjj/qzC2hObvtqvJPwG+ktRGsbxbaa0qozU1lEUhr9yjcrw2auunRAsmvgbPeSapMFYzRlyvDVIiJpKM7t+lXdS2W4l4WdM1mZl2xgE5x1D8Z7RzpA6/2bMnSZlqFCrbBq4MLYRZyoZ82tIKHIbyH9rCBYO6FrvAMBKvVaeVzyWjecmz7/MiRwHdWFw3jKmgZsfAbU2c+g50ph4bahbRNKVycCaBhzagwN575V/yRFKmkFHd4hKyeD0kM9N0g+t8BtyyKuRYyS/a2OWoRyr8VnXrFn9jh1S8pVb71exd1DCLyOcy7/qnKRPvBE4v5scpnHVhAkTJkyYMGHChCGc95x+5xOSoF7gjSYpNJ2N070ckXziMnDiHhEIPzTee5rtPHrCZvnmdH6FIS/1GvR2hQdwFiJJ78JFjrowKWJyaTIVQn5aY9Cx91O3RAPKvhTypjTcyyS2o0T2ESMSL44BMqF8TSQVdXaAzLTF3A92HA0SEm5S9hPqdTCZvFacaZJNbd8VRgq/be38c1XAECFakl2a1CpD65TQBoVVCM3kvxOCIdGTYOlE1iurtr2hYfKwpvSqhUYrCT5ZtyqcKQPJGBrkuszJcOrbORh51DmXCoBll/LpCSHduvy43uBKcISofpWzM+SNVNH4ovO0FV7V4lEv5yfK40R+J8Iv5Xvk6CUPo0jCQBCyMgAT+WfKyKWgdEmEJNfnNIZarxPVjtKOJH1BMoJR5bpn5a4w8vSej8KRIKsP1jPU7htrEH69daXKRhlly3xMYOdDekoO2ED8oaIWP9+4UN7o5wN3v/vd8W3f9m34/u//fvzar/0alsslnvGMZ+Bxj3scbne72wEAPvWpT+FhD3sYXvnKV+J+97sfPvaxj+HVr341vuM7vgOXXXYZ3v/+9+NHf/RH8ZCHPARf93VfBwB4+MMfjnvc4x544hOfiF/8xV/EZz7zGfzkT/4knv70p4+GJb1FQhRj4R7n8OyIYscaSqRV4WgzC0Z+ahcqF5X/y4udGNZTQp37wxW5kE2u8ilD11UJpnXHVvsA977kqmQENZWG45TbyqHfl+viJHQ1gHzsyAZskPotMkFCWHf86hFzAFjyVhN5gTKFN/c+26sMK0q67vsqML+2kbyqbFobgwFl1AFkY8YS5Vmvchs58F2l3zvhd/X7Kug5hKjggCy05+juJI4/6bd/lpGp/HS76eO5MO6X8b8oZ2XcK6G+hfy3QvgRPOHXtTnhV75nS3VfkTs7y4Udxn3e+c3nZ+cmfxasSe9uOXZsc/axMsTZzBNzww04RvytQub0qMoYJYlr5HC1YgOEcAVSb/kei4jr88qcLxWdPqOS8EwbOdUGhWNrud0FxmEdW03jqgkTJkyYMGHChFseDjXpd3bRgRbJaCRzn1n40hGA1vmwU5wmt2kiQ1g6YG7nYLOI+TSiIZeCsaWZe0LC2aCcsxnhBzsHmwZsZ4DNPVJ7oXeIUlglPVkrQ7ZIDWWGVoZzCeQelRNnF6gQTTSGHH9yXFK5ApuiLnm9VWMrr/EY4pKTB7ZWVMlfrb7ShE9q/XxGKYRWB44e8TVCIRl3FDmVrc9/p5/9iZQm84CclJD1w2EIhwkvvywnEWsQtV8e+qd/DtJ2FAghQNpKytnMJLHjMQNXOoY+p4zgVKRevHXRzwGpIddT1J2MYHxwDLKIaj+tjujUgR2nMJ4MZOE1S+UpEIglBG90qBDAhmIeqobgyT4JRRUr659tsnOvykC6Btm9wdKe+fnOjMHFc+DozMKFjYwhbFuDWZC8do49WU6MzhFMUONSCPXpCcqwr1LFDZF/OpSXrM+fp7AvlU8KZeeCYp8haKI7qTpzNUD5XJXllmGxRonAkpTbK8YSkfoN0ldtiJP3hVMhuNo2qZgC2cddd7PO6Xe+8KpXvQrPeMYz8LCHPSyGrfzP//k/x/XL5RLXXXcdzp49CwCYz+f4y7/8S/zKr/wKzpw5gzvc4Q54zGMeg5/8yZ+M+1hr8cd//Mf4gR/4AVx77bU4duwYnvzkJ+M//sf/eMHP79CBnQ/PaRrMbROf7a7SvzYE0M5p0PJcJP68yu8ceOcMeLmIpHjMjby17b83s+RM1Wz7MYwN4dQ1GSZqN00ImpwclBCl6xivDRDCR/v3kFak664q+170YbmTkd+a4Z2evMHd9+WWkpOaFVURJRWWpRSmHWRAXRvGoF34KySe7lRzRaZWx1A2Dg3vWNuAFfnBup8rQiXq70MkxBDx1wtfP4aaWjOQO7nzVd9xROfy09WQd8lYiM79knp7Ufb5HQYUlDVUooJwMZ8oVX5AevfGYpDn2pSXf0/pR0rlJ8cvxv1+TAUsuhRFwSFvd5+X24+n5lrht9zx90636J+3KPlCtBQOUUmYfO5s59JcAyjG6ERwjuHIYGbnyEJhBlVhJBrlvNoFYB2snftzI0IXnkUghfrUaj9pM3/8NFwyRa7PEuU968tJYWflHmJO12YQod+L80Cj74MmXwalNkTKySzOgqnOaVyIogqrxnl7xdAucUyq+rAyVHuqb7rvs5z2E9bCNK6aMGHChAkTJky4ZeFQk35LxzFEns6H4L1P/aRqCUXOmDTDit6d5aSGUv4TNo2feMwAY23mlY6mSRPVZtsbO5p5SjAfytJlxiT0ptLsBrk3cDAOcDTgUN/YUir+nDL2dMrgAwSP8VCmCheFbpnVM9Jp5exVvP7JwMhkK5xPy96jvTMhd5qB/xsMglKVmrpOTr0WSsZ7D4dt1IS0JI7KAC8d1yffZdlA33jUC0GoQlSi2CYvr7+wRowN7a+hPYqHyD9fTlq5CX2DtNmqKXRP0acIHebUlp3j2L5ljkeBMX6dJYIFqbxt6S8DYNL3QwoqJ+SRbNe5FPYphQbrn6c8/1QYveSvkDiZd3hhkBwziIgRZ2YMlnCYwcAwYwlvJOqYYQzFMFgzSxn5ORYqs0S5rexShsWVbaiyrR07TrluhaHSh8gKtKTjLA+jA1B2xxrrGFHHcvrtGfqGXGE8yhwwKiE9iTlX+AExr9+NgcPqjS649a1vjVe/+tWD6+90pztl/e4d7nAHvPWtb11Z7tVXX40//dM/3Ugdb4kg18JHCwjhhomg86xZQ97oLiE9u6V/RrqFV/ktFynHpYRNNyEWgLGgmBc5GP4lzJ9AO0sJiVUq5DTYRQcNDemPJG9ptrwg/rThX1De/9Yk1xuS/RRJYGQF/LhCoh9oo7+ErpY+mkIYTrZ+LCqKPzZNOIYL4QlNFhYvC7uplZBqPJn93gB6oQWNyVU55bUp6yQEE8bHSEPCp5IzcbzivXZjY6jth/IpFu2Xqfz0ZkrOJQo27zSjtgFCRAOkiAJUiXqiVJdd/KjcmMVTEcOiB+cpi0DAZYrf5ERVy58thF8Hn59QcjOXYfUB5RCElGPcFhFSoqpQiHJ2/oQ7348RQk5AeOJPj1Wk7UpkTnq9tf1tSpROTHINxhDnekL8CdlbksBARgL36iXHHCD71s2rtwlCcKiISPiFe0XPfRlCAjb+dCel354wjasmTJgwYcKECRNuWTjUpJ9MQsUcI+RPx4ppgonhaAA/UTYEgETlg2QUCRMoEg9qEzyhw19q0uRCG6S4SWE9s9A0QPK8lH2UCrCcvKbCnY+3Fyc8fgLkvbIpUyLGCY9TKkUpw7eG306M0HpiXxphlDEtm4wpb19pJxKvXDKY2cafm23QGlIewV6p1BkJAURZ6Msayomv9obttZOcIXN/8lgj5tT3MhynfJd1etIzNKVcFXWnhr3O5ca3T4acmC9kH8FqpYxV56NJPvldqvk0ySd2KzESVY/NgbA3DOMYTbh/bDC6SPYmUUWI+ksgyswUcoqz3H6ahAQS4WWDo4A1PhRdE/7OglGVgsoPykDlKxIeO21IlbIpGW8svCHZ5+czXrUaKrLUqUGJcHRmMbO+PjNrksE3qvhSiDjtqKBJvRrhF4+hykhlI/egHggVnM67sl1Jhiovcxv6Ogoqaxh/fxgKnvlc6n43gIN4fdfU1ZV1WcgpUfcJ4SdkRiD+biyyb8KEjUGrx+QT+kVLPmKAhe9j5B1kibyqp93xSr+ghOWdsyG0Z3pGMqWfIOQTjNETtHpPP4syrlK/a3mQJeS5JzU4hkxmIMvNGsNGh7HhGPFXM8rqZRIeWhxVhsgnVttQoK0sGF38Bd+XsgOsKF/8+I/ZpbHqACL5V0aKCN9JryPbH1fGio6r/EaxSuk3oP6rkX+lWlwv92HBw7owbtRE4CZ9RVahJCDX2kcTt5r4M0X7raGcFLLPh0kHKDhByjqgr/TLVH6h7JzwYyydjK3S+ErKlLCeMY8fwefvC8Q/icJPh8e2jcoXaH2kFDtP4dkjyZjGenmb+TGcKGVZt71zkWgURX7Mee46/9y4Fmwa78iI8GhRTvQZqhN/QOpDhrDOPbDSySk4ofoCw1+r5mQyFyVKfWCtHxwI3VmSfdlWI+MpUscoW6eMXDKGuG0WOUHNZweUfuxawMzHC58wYcKECRMmTJgw4RaOQ0367XYdaNlFtcw2+0mIIZ+ry5gwGeEwcQ3u1o69UV4MMzLJZrY+9OXM5Eb9oOhgKOLLzjP1HvS6YtIVDUVCKmkbchmKhwgm7GtDCEGfjyWoSro2hbSKOxlkOV5GjPn7tnmIQSKSo+Qn7GHCSYEAFQJw1jRe/SdEjBMv3ZQHSM58aFK4l3wSQ/PKMjSnJvjqYQhLw9L+qIl1yLehsofCidaQDJap1H540+G6rGrjWluUqr4a2aeJviGFpw1e2qLSlXx2hOSl78IDbEHopA4VQ6AOI6vPzRtT028Cxfw1c0spn40p+gNB8ZyxCt/rOBHCQsYxe+KwcwyyhM4BjfHGt84BlhzOLjs4J0o/wnZjMLcmKRARwo0G73troHLliCF4yEubYn18G1Ns69QGyI34QK4WLvqOXt66so10W5nGOwQEY721cxhjIJZ0ZngCVKkP9uPUrO8PbVBnMv6SSTV9YKtU52DIrOVDzc41Fli2RU6AELMP3ymKJU30iXrJWlBTkMcXAB07dCtDlx6s/Ak3UwQijtj58YebpTxw4d63WdfoyT8CYLkNYT2XoKD4c7vnwGdP+bCeQBoEtUufhyvk8qNmBszmPj+ynSNGRghEVN+5x+WkyBDY63ZMUCeyeufE0IfGhxUn9iHGLcGHVyYV+DK8fqyhasjqoRDESYWklilmyJMNUlYI+s2IRIYNY04Y6adbEHwoQ9+fNSmCw9BzmfVltrd6nbx8TJWIE2F56fwxNB7eJGrvjlU5Yf2dsD/UIlJUt1tro6IW2olG7mu1XS+Xn3Ky6UUhAEK+YgpjLIArYwUZY5Qhv/XYXI/hZRxfEn4mhKwV56mGAHQL0HLX/w2hNjPFllLnCuHHdo42zNWWLoVtlzGkvvWE7IPz5ex2DluWMDOEI4tz/rjdIo7XojJfVHCm8WFHQx0sGcD4UKFRORiOWRJ/ef7IvQ1gavemIcqcs3r3gtwDOkqMvh+k/cSxNFy7ITJukOgbGt8NRLuIVdF1Gjimd9yrIxuTBqcSYo6EbVYHSVMBA3ZucCx3PjGNrW5eeMELXoCf+ZmfyZZ9zdd8DT784Q8DAHZ2dvDsZz8br3nNa7C7u4tHPOIReOlLX4rb3va2N0Z1e/j7v/97fM3XfI2fax0CfOpTn8L111+PBz7wgTd2VSZMmDBhwoRbBA416ecYWHYOjgjGALbzhvQ41XHeMNOEWYcDood3DP1H3nPQ5+kKij4dGkkgy4K6Lar7ijAqvfCQGYHDVdWYnjOKsQcAIN6ywdOb2YFkokHcn5j1Ci4M+mpZ9fdYeTKZC3k2vHGJAWrjZJNEEUkt0MxhgwetqLkSMePJHfHeFfLnoChLGFLzCdlXEn0lqZWVra/RGhYdM+J9P4TyuPqYY6WJogBAliNkFfZCqgpqhB+Qh++sEX6uODmzxsFZrgl5hahh6j0/rK+xInEBIc9Yfc+JNHEWsGJwAfrPgCbvreRMaaIxrISo/azxxmOfj8+TgTCMOQy2m0DqE2ErEH6NCjkM5Oq8kvATo1E/N18i/Ey2HPn2NSJLK/miki0QWKI41gYYvX30Pk/KaH9P+r6STANLBmKfGwvzuZKERjo3ZTNPxrExlNuIs8SQ8Ua1E3HRPorwiyEKdTmi1BAVU6lmmjDhJgwyNvUDQMz9lu59l6nHrGn88yih/LqFz93l2pS/L/yNUQdqStiQ06sXMcFXqk/Y6/55BbGkDdypnw2hloPDhndIUO8VUoo//yral8J/CFHBThTf2x0nJxcb3BYAlefPCnnBwcnBAC4n2BiuSs4JJNfh2hjpXwfDelbIKH3MVSMA/64bdhraC4QIjJEjsB7xt4rk288Yavhg6v4u220PxKm0mRB/Zfh7II0RaoSfQEgjVhETZO5UK8/nZCYV3tcrfCPhp5/38lwo3TN+DImM8NNj+LhLyPEsDoSLjqNiUQg/apfgZtZXr5XKZePbnIAsT7OcmxB/9fYevwlirkkdzrMck41Bq3nL/lD6SKWYHCPegD0QfrXnfWiMpRxky2UyI685eJXnL317T+FXm6eeJ0eCCbdM3POe98Rf/uVfxt9Nk8xjP/qjP4o/+ZM/wWtf+1pccskleMYznoHv+q7vwl/91V/dGFXt4QMf+ABe+cpX4hd+4Rdu7KoMYrFY4PWvfz1+8zd/E+9617vw3ve+d2NlMzPOnTuHo0ePbqzMCRMmTJgw4eaEQ036yaRlGRi8ZeeiIX1mfUi9GRM4eG0yi+LGe1K3IVSNNQZk5n4C0mxnyryS0IvEVSfKNbe2WqoMrzMEISN8PizJ+xUMPo03rEWDt5CBQDL2IBloMmVK2D6SmirfVDTA1QzgwXuMjAWM9fkNg8qRyIC7QPgRgUJeQwq5AtlYGJmQ2nlS+6Ek35InbY1E1ajZshxzFn5Lh54UQqpziXTtHDCWi24V7MhFJwwTQqtQI/pW1UfqIuTfGPGXTf4H6qPrUE7U11Fm7YXwM0LYkzcYlVVyvkCveAiMfc3w2iP8CJj5hwJAypMnRiFLPqSnIYqEm98xeFTbuRdDiAEuGOQ6TsQmcx56ipCe3XRficNBoqgu3trKDHGlMUvWlYYhCeepDSWlsk/MIFlIUFSMS0MGHR1SSYXFiuGWJD9OjRztlt5oHzz2yQkZ6GCbbSCE+vRN4m9QcQJYJ4+MqEK1TkUMkf6cw3ViB0LrW6Nr/fGYQGxi3YXEIGk058/fhz9WBm4h/HrKyILwE4hCSpN8xoDaDTIFa8Kd57wz5XM94eYDN9v2Xzg99wwAxqvJuF14BySJchCeJbM4B7Q7oN0znvhrW3C7TGE9AT9GaBde0b08A2pmoGYONDOgSdEDSgO3PHuryL1sH/0TyenDhH6HCVEpBMM+DCLgI12GQQoFQtCPL8IYhZPab8zoX6q3a/CH8e82Dn28VkGLM4iMBa1pwA28Sol9Xmd0bXJek3dY6SKzISP5UL7nMYVfrafInDZ0cRgeZ5Qk4DrjETlOjfiL5ZR1GLmmtet4IO5PEyaFE2G2vmzXyv0t7WNDmHQjAxO9nXIa0vuVkDG6gyj90rhaz2VSPmQf1aAhgBY7Iafnjr83u2U+ZjANQFZFTvHPfMspNUDrwjGRR3GI4egRyFvyc0BP+gXlblAZUreI+dazsKLkFcziqCjveT/PCmGAOd1fesy5ptgzwlZ26I3pkEICZ1uH/H0+iI0aUww8a10YEJXkbE3hJ/NHyF91fXp54ss6hX1Y37tSL4Eir6VPslQh/uM7RoXULxxMMrVjcZy13wcbxDS2uvmhaRpceeWVveUnTpzAy172Mrz61a/Gv/gX/wIA8PKXvxx3v/vd8c53vhMPeMADLnRVe7jrXe+K7/7u78ad73xnPPWpT72xq9PDb//2b+NHf/RH8fnPfx4A8LrXvQ63v/3t913er/7qr+KP//iPccMNN+CGG24AM+PlL385Hvawh22qyvvCqVOnsFgscNlll+1r/3/+53/Ge9/7Xpw7dy5+HvrQh+KrvuqrNlzTzcE5h7ZtMZ8fLNTyF77wBcxmMxw/fnxDNZuwVzAzPvaxj+Ftb3sb/vmf/xnPec5zDnxdbwlo2zZzEpkwoQQzrx0p5nziULvKXTRrcHRmPcmnZq2O2Q/KOYWlEWWfg59Edhwmio6x0zrsdoydjrHbMc7Jp3U41zqcWTqcXjicXnQ4u/S/z7WMc0vGbsvYCZ9F5z/L4iNhcVxQQMnvLpCOXEykxQjkyTBJYp8mO5I/L3p5ipdsLfeHoPDU1gbrqFIRL/zy0y69p367BLcL77Hfei/e6MnrWk8IdOF38PKnbum9fNuFz/HjWlg4NATMjJ/o+pxq/q8NIQ9tIIDEmGDUx4djRMwjAgTyqEoacbgnwnlLW4MzAwYDvetU+wj0NSw/QkToT3bsASJzP4RfiZwoomhslA+pj0H9o7eRsgBkYUK1og3IDRulEdQYyj5++6SyE4wRqczpGRbjk/5opHx9/tPYdH/JvdaEe6/MtcTwRgT5dNSghUHL3pN86cQYxRnhl10DQBnBUsiruSFsW4Pjc4uLtwwu3jI4OvPLZ+oj97coEyMhSgMGxwrhp+uyL5QqwILwk7yH5NqUA1G2kT6gCNeU3YPqUPLc9gxeujryNz7HibCP178wBJUGMd1f6n6TJUSzredkHQpTR9Y7QZB8mln+mW/DHDkG2j4K2po8UCccImjiTY0dKDgDxFxZnTfUUheUPSGUH2kjsiLFqVS+hmcHIbznIAong7VCulXUIRTD6Kl+R/VJEu5Z1NUyHinJu/L9pxHDRI9VrXhnyE8ZHzKLY5LKTRvGgimPl+qTbBOjUAx+YuOZahjPKpmn9x0re4+E3yZhBt6LY+g5Da1R/tBxqNg2HWSk1PI9E5bFv7W2139HkBE7RL2Prudexwdxv2J8KONvQ8gdZFxyMiSuhGLU8xcKIdPVWA9AL2x7lQyGH9dJ6HaWfOt2ns99Kk6QWskf2w5pHDt8zsOfobaTMV1J+FHoi7L2izvmz7l3qlLOESH6ijipikJyCFUHMNW/xzFfGf0hG8/l29ciIVT3BfKxXnmuQ981pF8hwlqhnSdMWAMf+chHcLvb3Q53uctd8IQnPAGf+MQnAADvec97sFwu8S3f8i1x27vd7W644x3viHe84x2D5e3u7uLkyZPZ53zhLne5C2azGf7sz/4MZ86c2UiZXuG9mbf3ox/9aCyX3unraU97Gh796EcfqLzb3OY2+NM//VO8+93vxuWXX463ve1tByL8zp07d6D6AMAf//Ef4573vCf+9//+3/su44orrsAnP/lJ/Nt/+2/xxCc+EU9/+tNh7eaixLz73e/GM5/5TLzuda+DO2B44rZt8e3f/u34/u///o0Ysz/wgQ/gkksuwV3velc87nGPw4c+9KF9l/W+970Pr3rVqw5cJwD40Ic+lCmAD4pN1g0AXvSiF0Uy/aD40pe+hPve9774qZ/6KTz+8Y8/EOG3yf4DAN773vfif/7P/7mx8l74whfiC1/4wkbK+sVf/EV85CMf2UhZADbabmfPnj3wsy5wzm30PN/znvfgNa95zcbK2+Q99973vhevfvWrN1IW4ENw/+t//a/xwQ9+cGNl7geHerR86RGLWx+Z4ejMYhas9x0Hss05OJe+C9Ej5ErHnqDbadkTeK3D2aXD6aXDqV2HE7sdTi082Xdqt8OJ3Q4n4nL/Ob3scGbp9zvXukj+7QbycOEYrQNap8LV9D4cP46TRytD6pmMPdHgE0m+YOjRBjpjMuNPLSxQLyRdIPu4XXgSsF3mn7AOYXvJXaUVQZH465Y+l08g+dAtQO2u/yx3Qcsd/2l3QN0CjSIAhZhpTE7S2PA7kjmKEBQScGzMoRWFotaMhB8QSZxIyg58fFnjpKBjrzhddm6QCARyArAk+2qE3xgBCQwbnkqDhRB7tmIMko+EuzQQZYQiDSuKhZrhU/LFCKkm3xOJG45jEqkl9cyvmxD1qUMf+kj9IzFMPm/f3IS/6jMziRjUnuPeGcATfDtdcgrYaZ3/3eXkvr5vxPAlZOPMps/c+tx9R2cGR2mJo8tTONqdw0UNQk6/dN/re10MQ5rwI9XGQ9irEVSgc9pp4k4IP1LPevmJxJ+UURKFoe5iUBPjeb3+652A9JXaKSIj9awyjoshUPeZYjRT/Wc0yCrSo0Yg+rymFjSbxw/mW/6zdQSYb4ObLfDsKHh+bH8X5AAYc0zY1GfCzRMc8uqxncc8UsScCH1513fL7LuE1Es5mVKuSyH3IsFnTCDOTSIB9XilMMhnxKP6q9EzQiskp4Oc+EvjCOV0ZLTziFIzmTrxpz/Sc5VEYQmtYIpjPiSizyl1VRfHgCG/WXBWqBF9PeJtyBEM6KtkhsilIaIv9K9chhckk4WDHHLQOQhqb4hyWXk8PW4acsDSBix93WjgU247+uaq5VxaRaaWJKoiC8vrJ8SR1K10mpPfpLat3pvVdkl/DSjk8UvEeRzvybNfIXzGztnPeTjOfYB+FAQg1Vc/P9YA2w1hK4yj0GyD50fBs616mwPRMcH3U6mO5TWXZatIvbKOpmh3fz3UGJGQnNMoXTeN6ICmiD7Jd8qmQRcc0pbOz2OW4owGcRaQeUulkr3oBYrQW0Xe6W3Dh4LyOH4KxZ6OHlF+smgSegxaI/5qfd0FxjS2unnh/ve/P17xilfgz//8z/Grv/qr+PjHP44HP/jBOHXqFD7zmc9gPp/j0ksvzfa57W1vi8985jODZf7CL/wCLrnkkvi5wx3ucN7qf+mll+KDH/wgfv/3fx/Hjh18rnHmzBnc8Y53xB/90R9toHbAsWPH8IY3vAHf+I3fiF/+5V8+cHn3v//9cf/73x/f933fh7e//e24+uqr913We97zHtznPvfZ9/6f/exn8bjHPQ6PfOQj8clPfhJ/8id/su+yrLV42tOehuuuuw5PfvKT8fVf//W44oor9l1eiYsvvhh3uctd8NCHPvTA+R+bpsFzn/tc/Pqv/zpms9mB63bVVVfhDne4A5785CfjxS9+Me5xj3vsuyxr7UaIyP/1v/4XHv3oR+N1r3vdgcsSGGM2mnuzaZqNKYhufetb47/8l/+CN77xjbjzne98oLI+/elP44orrsCf/umfbqRumyS/pbxNtdsP//AP4653vetGygKAxz/+8Th79uxGyvq5n/s5/M7v/M5GynrFK16BP/uzP9tIWcBmrwHgz3VTYOaN9GuCD3zgA/jrv/5rPPWpT8Uf/MEfbJTY3QuIb6wjHwAnT57EJZdcgr+//lNojlwUSLsOp3c7LJ3z4W2CQWdmvQpwW+XOmpk0sY+TIkARbjkp5EkcjmULLAEzY4Jh38TJoCEhVVI4wfK+LvNskJr0icHbG5hyj1CZIBKQDO2d8rwXL/tOGeCjMT5NvKr5qAZy7UhYT19xk343TX8CVubjqRiDvMHKJpKyMvmPRnxoj990rQBlGGBO+wRPYZ0HpHOJOBUDmlb3pXCUm30Uauq3kiDTv2pHH5v86bCe2gNZGyWB5Eks67Pjq9+109dtXz4jvn5+fesSKdoxxxCY1Xorss/XKRlLc3JrdZ3l/HR7yLM3M5SMUYZiO5TlCSkMhPCd+v4RIyvq51PLt2fFbop0XDk/Cwda7vjnMITBXLCJiuTynu8drzAs1vL4lXl6esbIwphTVfOVfYteNmTEM8n7nJtZUCTPwXbmc6DaOTr294pDytcD5M9yOleqqE3z/hDIn4NV5woo4xaAmK+wYqTsGaJ0eauMUUJwBOP8ydNncPld7oETJ06c9/Al8n589EvfhNmRi87bcZbnTuMPfvBhF+ScJlwYyL3zuY//A45fdMSH6JTxhCAqYguCiH3fRq4LTj1LcNfBnT0JtEsfIUDGEeJMtNiB2T4G2tqGOX6ZD+85O5pIDRknWBU6RT97uh7IjcRs574vsrNA/vuxiFPOVPJOA1Lfo8cS4iCU8hH3QztDldHvr9I7TKt68vdzfywoKN8rBGBujX+nybvEtd6pSnKnhVCKpcE8G59JGGYyQFBDZUroFUaJ3rVXf1e9w/L20YW6dN2RxoCdel/IchkX6UPosrL3YoXEKbcBhsdJ+T7D59TbfuzdU7xDsvdLr8B0LcprlJEeBQFSa359TYbOMTqdqftd2t6T0GmcImMcS/6+nBsA3QJm94y/L0N/EMke/X60/llnOwPPj3oCy3lHq875SCwyhgcqzxjS+E6ej7k4XHUqrKgay6STF6egfh/RwUSCXcaA+nz3ilVjtDhWK+6VDJW5kTwXUseh48aQxop8zMY5cjxXjGviPRvavxx418i22jIJAT1AzpXPRLxfAH/dpDrxvg/ODs0c3GzjxMmTuPLKK6ex1YSN4ctf/jKuvvpq/PIv/zKOHDmC7/u+78Pu7m62zf3udz988zd/M170ohdVy9jd3c32OXnyJO5whzscmmv6r/7Vv8J/+2//DVddddXGyvziF7+IW9/61hsp6//9v/+He9/73gc2GjMzfuzHfgwvfOEL97Xvm9/8Ztxwww3oug5t22J7exuPfexjN2LMvv7663GnO93pwOUcBuzs7MBau1FD+0FxUwnJd1hxzTXX4P/+3/97KPq7mxI++MEP4p73vOdGynrDG96Ahz/84beI+/jMmTMbcXo5HzjffYmMTVeNLw51ENrtxmA2M3BwILJYdgzjCLttTloJESEe1IZ9zgYAPUNO67hHXOy2DkvniT8x/HvigjAzztejY8ysSmQf8sPAMIDkWR7rVObxYor5KUQ15Fj9BsERYj53P5HzBmVyDmzShDTm+GMHdvp7679TyP1nbcx3MmQ8qZF9JDn91vG4DHUpp6TkACYHoPHRd/Q6SgSqtIFvs2CQEfKPcqLKUD2XXlYdue7F8oMSflo9LfYyfa/INRVjVU0dF6+fWibbdY4HCcOSKNMGRr9e5XfrEWZpgaOK4UDWgWCY4UK5LuTHlPIkv5Gcr0+aVO/ghgi/IYihKiryVDWT93kyrIghSBN+lnIiDEjtrI2/YmiKOWwCaSzbaUieQaGcdKhQMdSKIbEhJOMTkBmrjdzHqg39c98/V90WhNRXDKF3m42FUiqNTqWX9xDGPK0P6IXN3L9nBQ5yL/rfhvr3iuT5iwZHDnloKBi04nEcenn/yADGG8U4GqYIgE1GWihjmBjLQ7+cqQvnm3UoWAetA+g8eoy3I7fEhMMNT9RveZLItSDs5C85MRbLsxWXcfwtzwUZCzYujiXIWrDr/LPXzJAp/UI5AmI/Vsn6HzEQkwHCuGcwr5MiW/STYCh/pwHpXRjz6QExrxczwRHDIIwZCSAmGModkXIiJSf8elULfZvUTI6v+3w9JiT27xhmTuMvY+AHf33iJxIVsc9TSrGSMCr2jXUcatcB0q8k/AbHleXvsfdLBYYoIzpqZJ5AjyH3AtllL/uuvakiyqvLC2TvmDFIeWSq4+69nouMTTi0t6j8pO1FsW+ocKYq61nen4V6sXdsQnwGjeRFpvw5kW0kWoNErwCQwnoi3f+jp15ch3IucVCyz9c7J/uAFAmjN/6qPJeR8A4OC+KgJsvL+sd5JMKcaczg4Irjy6H1M1aSf+peiygcMmLZ6ier66LXZ85VyLfV7aDnnH5OP3xa5wvT2OrmjUsvvRRf/dVfjY9+9KP41m/9ViwWC3z5y1/O1H6f/exnqzkABVtbW9ja2roAtT0/+Omf/umNEn4ANkb4AcDXf/3Xb6QcIsLzn//8fe97PvMI3lIIPwDY3t6+savQwy2BKDmf+KVf+qWJ8NsHNkX4AcAjHvGIjZV1U8dNlfADbjp9yaEm/Y7SEse2LLYsYdExti1h4RinFwbLTsg7v61zXrFniHrhFDuXiL2d1mHpJCeg9+QeIv0kl6CEF42qP2Ow1Rg0BgA8MSDelkB94u3Ik5FdmJtxSCTPQdUiv2HE6ESwFGTis2B4cy3Q2TCBnMdlLKqVzoYZvEuetzZ5e8ZJXedJUwpy6iz3iGli3rOxsEM618JoonV2gNgO5bdpIjm5ivxzyghRGvVEraVDew5BSKiS/LOVB7XcpgyXLL9rzvJalQTU5+G1rsEUyedqJB8wriQoFQS9ukWjZ1rG8EZJGwyfxIhkHzMFw5LfgQyB4YkrQ1QlEf2x/XGGlIrp2AxNB9YMGxpEOgybhPBECFmaiDLtyS5qs2UIqdMGol+TfyX55uuO4OXu25yNr5B/TjXJGsjAbhHyXi3Bsy2wnaMbiK4cvblLD279lZLCUJwCxixTBAwYdZKXdU/tVmybdqoY9IToiiE003eIykbODYhKW6DftsC44TaeL7zxPToBqAboG5uT0TG7b+w8O1eueL5zqfQryi9VMqIYAPnQW51jnKXcEWXChJsy3PwI2vlFKVzfcssrMNodFTHAARjoO0RVNjN+P1G+SkQBcSBqZj40bpO8e6OaGEAMv6vqFpXHZADnVUNkK0NZTUo6eAIfgCWv5BHizyI4dKg+2/9kGOudrwiMjgkdvLeeYQ5jMb9ev54z4k8vRxoDSn+m+76uoGi874wfE9ow7usAWCCMDeHJHdX/egcFKz4Nvhm0Ck+HAw3LYv8VK1pRcZbGdyk7/K2RfD1nHQy8okYIv3G1YE78ybJYzRrROlSHrIyc8NvXVE0TIuX5yXOQKfgw3A61ttfXqCTf1XeqjL3HiFi9TJxobPji73shmtKeEllB15GJQKbxCn+lmPc7qIgAes4ANV4Ch9Ch/rlkpPGpoAlkXxOiKTio8LfGoJF3u/QXRR+RtR9sdO4xJvQP8OMLW8wZ18UQ2Zcp+0p1XYnQ962jupQ+hZDGh9KOPWiVX6E8zYi3St0Iar43sE1cru8/uT3LDJq148aDqf1FpSyqTDvH0vmx+4QJm8Tp06fxsY99DE984hNxn/vcB7PZDG9605vwmMc8BgBw3XXX4ROf+ASuvfbaG7mm5w/f8A3fcGNX4YLhyJEjN3YVJky42eHbvu3bbuwqTJgwQeFQk35msQPrFjjSzGGNn/zYwPLttpLXL20vZI32opNtdkIett1A+nXMaAPJJ6SfTLIA+ByCLYDGYKdz6JhCaxoY4jgx9qFChdzgMDntT8L8BFMZs8UoJSo/BCLLeS9vCuYLhjdiUfB6BBk/eRVDvlK3EJmkWDE2TYbD5C+SbtYb4KQ8YAXJZ3LDUM87s/Z3CAdUBglkHjhErmvyx0RDXH+CvCq3WMc8GglL56tbRXL161hfWar2tDGxJPu0ClCOvQp6+9LTWu7fUvVnyBOr4HSe5dxeMER81lQS60LvW+aLM6oNtIEkhg0LhpRI/jEHxa/fbtnlJ2LIk5uGGNIIxNK/UMxDaMtTMAaMmc+TRQbdiK1Hh5WqheQq+wWnpOOimZF9q/dYYfApw1dmITA1tGpOLYs5RokiyVeqgKU9h8JR1Zb1qg1kCkdR3QCJ/AMQnCcq/SzVPPj9uWRx9tkBpkJ+lm1ShCfW4QNjfhYGFu1kmJpweLB0wMJxcKJoYGfbkNDhLOHC4Y35Wt0nyJQ1xoLCSJPbBYDiPaSUfsTsIxJwCD8+Q1IUAvl4RYhFpn6fFCuiSEi1jRAa8jfL41Y4ExnyYx2fE9o7cZXvQI2+mr6OVeEvfd8W+nlFLPI6zFWpJKupqmq/ayEji21rxF5J/qHYZqXCrChfh3cfQ218th+Hyto16hF+6zjArACH8XmP+Bu6f2vHWZUPpkY4amIN67WtEFRC/BF7YlpIsNr7mhGIxkBWEYe/lN6lbPK0AHI+hmT/QPCFaClDwwLt3OUYYBWq3xJFUhxCQGrVb+0eL85dyH2H4XtK6lZbL6VWCT+J+KCdjKRMTdIDcdykx6lDDmmppHCd6tUexGiY2cq2vZCfJcq+O+5ckOE1xWt574pzgvH3lw7BOmHCQfCc5zwHj3zkI3H11Vfj05/+NJ7//OfDWovHP/7xuOSSS/CUpzwFz3rWs3DrW98ax48fxw/90A/h2muvxQMe8IAbu+oTJkyYMGHChAkrcahJP2rPwuycAjVbsM0cdj7HonOwRJgbh4Ujr9xRITs75kyx58LvkvRzzNhpXfxbJtfu2KBTc8VWJj8NYBwF8tGBYEDE0VO1g1B1CZEIZMRwgRnRx7nqr4uGKD/Z7CATy8Z7hAPRG5/ZJ1gHzxLJVyZTd05NiFPdemHrwvdVRqH6xdJezZTvNxBqasyQtAkQ8jCbpSfxEGS7qGgbmHUOkX2lSi/WZ+DwfaNiIvhkvzLkkRxP12PsGEBhPIjhm3xBjgP5F7yHRSHRCZEtoccoGSKaUJEhz/KS7FtF9EVF4RAZqlSPiVDsbysGRQd/DZedV/gtHWPRpT5C1L2iGLaxvoSZ9SFD59YE5Ws4O8Mw7O8Rp62zdh7bYRH6JG0wku9DBp2U1yZd56T+pegQYIJxWhwLesRjSeoFA3vqC9qk5OG6QT9/lg16eaK04q/xYbY6Vnm0BkJSxes4cJNGdQwS8Qeke19r6YiAmjVoLBSUfs58iGGDUkBp1Bsz3kfxvIAu5H0V45yEiT29vPBKv84xzHn0gh/LNzrhcKN1jN2WYY0fj8yMRWMb2C2T8nRxUPoJCrKBw8NCs9CXyHauAzsTNrVJBQjEKAOuXXhCEfARB7ThWPI9CbFAxjs5ZQ+wIjmYo4I3jnOC4rdKSFFStAMprCFxIiSIGJ0Lir89kEwSItqrCfvPj1YCAskZTN4n4pTk+xsf7SFGVdDOB2Hs2WuPcrxFJhuPDSn8dH9dJf1GzqVEv73Hx441+kG/M/WylccaQZWcAfqOH3EH0182WHhlW03erVOMKd67GjXSsCT/BlR/Y8QmwTsVynaSdoADqSRb+zGgbGOAZg5yYT/TREeBWJ/CgVCO5Y9H8dnTSr+yXjNDaAzBwqElE8dtADC3euPglWUaiANktk7O2zmAXLz+MoOLSuACNTIwH0Okcbo/L2TzrqpjlcnrFvP3cR6JQqekKEO+G3jCU5zPqk4CFZVfj/AbIrnVdRzK95cRgiXZt+oY5bbSTwWVsnaa821y4cch09jq5oV/+qd/wuMf/3h84QtfwOWXX44HPehBeOc734nLL78cAPDiF78Yxhg85jGPwe7uLh7xiEfgpS996Y1c6wkTJkyYMGHChPVwqEm/aKQO7Nt8ZmCbBoYYOy0wd8AiqPAWnZ8cLF1Q7TFUCE8fzlM8KoUIjAb/gvCTHGEA4v6AeGN6ZdDMBDYOPqRoY0JOGILP9YdkkBAiMKqo4MkUb7dORn4h+wA/h+tIkzychdmxwWhNBNgQ5obFoD8Wtq42QQvfMxJQ1g15HddUQoLSeFEYADTZVxJ/MvmW0ocUUX6b3AvUG/mUKk0fTE6znMRXjEZxkm3FC7nvUVuSW35CnufWM+gTeLU6AInY0OtrCr4yj8jQOVQRtvOqgtybW/KrAMnomc6doxFO2leIQl237HwKom9VHb1BKW0k20sbEuXnLNfDh3alrBwJ99o6T8gI4deqz9llF8P8LtW9bIhCjkYfxtfndKLglQ6wcDsmkcMMYLfjSComMji0gSKPstBN6v6VtuzA0bjTyXVyHAlsbSwzQMjFk5N/VcJP1DsqFNeQgscXopwAhOwLeXTYzoDGG2bYNEn5FoxUiSjT15d736WdNDlahowDBvJxjthM1rOncPX5Kg+hlYty3bRHvhjndial34RDBMccVM/+3m8d0BjG3M4x32qA5U4iElwLdK3PhwmkPiOQfmznvl+Ze5KP2wXI2cpBfefJ7dITg+3S/zYmGtO56xDDg84kqkEbOoewkakYlpmiUR9Aj2zQCH4UweDv+1OGGLEZHYIDjOE4PlunT9HEQUkixKqr5WM5AGKe3uK4TAZkTLD2d4rMkxdmRfEnMJW+HTnZNzQO6xUVttlrHgMu/sblvXFcfZwk6zcGFfawum4vSj8hw4t9ovJvrBrrHGeMpBkh/nr7l+VwCovr5wFAF8LamuKe7iT6iFbzda1/Xkech2SdVfcrybu1qKYBYh5o7xRpsAzOW5rfIiCqhGPO87HrGbZn9hFSbBhn+WFu/zmv9GCpjmqeloXzFIKtS0q/XntE4j6Nm2SsmoWed5y1DXPoM4JCklRoT6frO0a0rUPGHQQD5fZCe5bqPjJ+jBmcTqNDFU8E2YSD4zWvec3o+u3tbbzkJS/BS17ykgtUowkTJkyYMGHChM3hcJN+jtMkzXWAa2ENsGUbACbE+jdBuecnb0s15xDCTiYQsixX9PnvmSLM5Eb7znHMueaC63g0XIeivNFM1HtCEioSA4j5qWLOOiDlRYvGlj4JKKVoElCKtgR0FPL/2XloNzXhHApbFys27gE+CLPCW7T8Xij71iH7/Pqc8Ctz+8XqICmBSEIzqvLGQkv2csdJHYIXsq5nbT+dd0/OfD959/TPdci9OtmmCbD6ZFlH50zhZhMJ4vOd0GC4T4RwZNLGNdTIvrEQSrX8favA3H+mmIWMSUYU/UmOAYxl56IDQGwb459JYyzgXLjNDYjIh+s0QgJpgtHnGl10nvyZmZATUD3/JXkthJ+UoSGhK71fgTdzxXBwvjSvDKHg4F5lrPp5uAbz+2UNXximiCLBF3P4maT2q+WiyRSMA/egXjdG/q1CWXx5vHXNRTViwJcn7Z7uJ2ZEw1wbVOPn2knpN+HwoHO+30zjDQ7PnINtDGwzB3WBlEMwjotqhHxOOTHWxhB/7Lxqz3l1Hzv1TISQ4+y6RPi5zhOExnrVcLZ9B2oa1Xd5ffPQuCRT+wF+H6X2y7ZFIv4kx7Ko/KD6IMMpb23tqL1yK+q+cnSklVM+z+D+GSwh+qJBvTbuku9luw0QfkPjsGxXkrHDMPE3dlZpLNd/9wnK67YO0Te2idSxR9JoDCmW1olyUarudLF7IQ7L49fqWcNIPTVRVpI+EnqSpd6mAZGJ7eRA2T2hx1qWjFf5hX1HlW3qHAzJKDKRf0C6lyzlTkytS2MLX6/kXOdJcF85pkL9CtTbpKIGTnWrtWAfslmV8FtT5Sb3vyf4UtSAzqVxLIAUpphSn826EmOojfE2QPitDPs5BnVNslzJYYwpznESLePGGIVMY6sJEyZMmDBhwoQJhwWHmvTjZpbyQwDe29w5WOtwtJmjZYIhh0bZiqixft7pHJYQgzLDNsl70BIFNSDBGh9GRAg+Ufroyd92Y2HIh5uZhdmon5Axlh1CzimGsz6/m3ipkiJFwuYZIZUIoeys07n0wjyyyuPGcQJMlMhB/9uGMi3I5AdJOcPUETMVTq8aEblyLdBFNDxRzsJF8WqCz+/D2eHTPjlpovcRY5sm3RANE3q7dB6yXzw3tV1qm3qILg2iUt2X8uzJ9YjrMuIxx6oQnXsxG0XFXlFQ2c5WkceSW8UxwxrEcJ+OwnacQqI5IC4bQ+3+rhkIazRumcMwhvZEIr8I3iLAQYao7w8JleTASuWHmL9z2bmo9MuUZJ6194QgEWaBBJTwpnJe4ol9OtwsOy1HQ42ofWvEtSaQ9HPQc4RGKAvwXuxi9KFwXY2E/vRbd0he9L6AQINrQ9SqkJ6a6LMS0rPxoUuF/AtkX8veviekV6eeyfIZXoUeMbhiR70678fCshgqdbif89t5GPU9Ow6Lg0IKxSrnKyGk5fuZxXnynJ8w4TygYwZ1nCIHGARjMgNwmNsG8+2LAdeC2oVX37kWMe9fyN0HwCt/Q2g2BFUeMQNdSfCF7eX3cgEsF2BjvVOXAhmr8v+pnMXysBrkxAwAUfuRgY9oENYPEX8AYl6vmNMvjBcZop6hLHye7ltqYylBqdDJjj3w2qw5+qBGpBQow6lnDlyZgV318bJM1V/eg2MOG5JnNQaF0OMWKBJkAJrwq0HxriuV2GPIxl2V8iJG1G8HIv7OE3r54fZzvDL0IzwBpsuzwaFHiHEZo3TwX4iAJVLEEZk/afKrdlwh4wE1XjFyPwZXsW4BWu6Aty5CC4OT4d1qCJhbn+MvjutJQowCQMjrx9RXlsnxHQDJP2iapHAsN8Ua9xw7v2HFqaoHMv48lbOUd0Arck13eQSBrD5M3imNw/gbaV5UjvV7h6+N+UZ3KBTEG0RWpmkQwxGHEPnSBqldNl6FCRMmTJgwYcKECRNuNjjUpB/CBCkLPQlE9Z8l40PxGcAahg0TBU/MGXTWK/NkQmSYcXRmsUsOpgMkX1sZ2lOIP0ET8nuZyjptGBHVn1h8hPwDkke5pjhECTTInXCfHIyEUiSYuCCYuMj7tnrGNGREB/qGFzl2UuVQNA7UoIm+dUi+/n5pu5I4qWHQoBbX5yRdWp9+WNLlU6/Oet8y114iXvU1GFDuZeUNT67Lth1zEmXmuipBHdsbLCmUxSrcWVD4BdWf9+ym2Bbi+S0hn9wKe8DY+WXnNBrmLBGp+Xl6A5Q3SoX6cDKCeKNt8pwWInAvMCNu3w6M1lFQfHF8vi2l09H3djR2FiFpNeGXVY+Tdzccx7BOHMLOwYV7jCUkKIVwoCZ6v7Mjb2ArFAxD+aCi8VgZp2CaYNRXuVY4Kd1i/hmXP8dDTV1r0r1clTGST5at7nP0setH144FjJQ3VpN9OoT0coVh/nxg8kafsF9ooscrtryDBzFFg7S1BlZylXaBzIMn1MgWZJQogdnFHF9krc/dB4BdlxnRJbefD0PsABuC05n0l7sOZK0fH0VlkgluJyap+nqqqPXJGnmnEThT+8lYR8JMSl/gczFztmwMY9tEx5ZaWOsV5JkvoDg30+/Tx1BziJD3J9Dvl/2rKIWv1nWu1nUolLw6Zs3hRR8vP87I+1iPw88DUbEWNk32rUvSrCAoaaisCuGplbImvOvlGukoG9GfUSIfBIVedAYsjpGRlVpdCACB0Jd1ErITZLIuxgaHSptNShJZKYo//3u47cSJYDQM6OCa/ByyOg+NAfSYilL+4/gJYwiv9uvne9YV8o4Ifsyzbo7ytVFEeakSfkP3+Ip7lYMjSFZm4aQm42SHcK9VxncXCtPYasKECRMmTJgwYcJhweEm/SgYnIF8guAcQC3INJ70I6/CYzH62+QO3gUmzhJFY+22NUHtw3GCJZD5ZOk5aYx4mFIkA7rgcSkGB1H9GeOJRtcJWZKXJZNWIeTWtVEkUikRiLn6riQJFVGqjrGOsarcrzx2MvRwJLlqWEfFV6uTNuJo0oQH9hGP/X79pW3ydilJuT7ypZX5dyiXKm1E9VCf2X5jR+tDH99SSVwkuIGSLJJhT5N1QvJJnWrEnwWFMJMiLAt55FSZNQwpKetKxoGrUJJ9UPcGp5CuaV0ihdsuGBVjKM1c1WejZVfVw0h4qWDEMuF66uMEYmnXOZ/Lr3PYagzmlrDVmBR6KmyfEUgFIZWRVIVxo2PvGMDk+wruEEOrOdJ9lfz1X2w0vrsUmg8IXvbeE52jgUqM7EEBIk4Wdub/NvPogb10KeySKPtqBqpIEleu6X6psVr5JVlaU05qpZ7so1u5NL7IvRWJZIceyQcgknxdUYcJEw4DGLl6hxgx13DrhOT2Bva5mfu8wa4Fljsg1/n+BT7XH2zI7WdM7GuEHAQQVILKdKtUfdwukrHcGMBY0GzuSULXeTKQfS5SNmo461R4hgBvVA7raA11VgGTqf0Q34EAMucBIf56bar6H9l2FbKxg4wfkMYDpbIrVdbkJEMZph3IneV0Pj+1TQpR3Sd29hL2MBtLDdSZi79DKMdXcXnNYYT7241tvzaGQqWW26xDzO3lPiyJUiHAdNhIXbQmsNY5Tk3lxxK/AUkpW9m1U+/QWBw4zguY/JhvZkxyNirTC2iCUZ+nQFRfs2204nxA/vqWKj8GElkov0vZ/oD6UiIfrCL/SvSugX4GSxJVKxlVCEsddj7mnY7qthAFAr4PlrmiD/Hpe1bW0mNsiOQuCL/qunX2H2nLHuFHBmxTbkgXSFAZo2nib8KECRMmTJgwYcKECXUcatLPbV8Enh+teqTKRNgGQ1D0QCWZIBnMbDLqatWdDtEm5B+QQn8CucrHKeuHUZNOQQo/4ss1TFhCh1wJRvAwL1q6ZKDfy4TNTwA5hA4N9aFUfj+PGu/Z8FFTZ9WUhVJ/ojDxzwxE1CPtgDpxV9sOxfZ6n9LgrzGkJoshzPT5ICfs9PbrQDbNSD/0Cb6M7NP3sa7/0ERZTbapMEJJeB9g3Din10kY1NIuoq+XhHOSZaJ2iLn/iGNoM6+CGG+0IbJviJAe2t8xRokVfX8IkVaqCAz58LzOEdCE8EGdyyoppL6E8tWhfaWepxYhPGjHgewz0SBVDxOb6lZbDvSJqdQ2wQjLvm5C/knIT8Ab7S0DlryRSIxudhbC7bED2kXMezUYfoqMz9lnmyyMZ9elEFRC9HUuVy3WFMJCFGuyXbbfS7+0LuGnje2aSBVSUooR1R6Qh9AS+11Sjso2SdGnl/l9eFQRej7hHJ9Xj3F3HsuecONi7I7tnH8+Fx3DOICtz1dsqEEz2/Z9SZfCe/p8faIOdinHn2t9vj4s03YaouoDkhLQWE8KmsLQLPn62KuUq9ERQmi/bB8VynIIki+1C+2SRTIAkhKe83WbQiT7SMYT4T2SjRcqZFCvoAHDfWW5C/1hSfjp9WP1rY11MsKvovLbay85Fh1BStfRC/aFVYTZKvKvRnYMhItcWZVaebVrXZC9NeJPk++pEvtzuYlkcEEKA/5RtUbCAvvxiQmh0CmMKUjlGI9hfDNirEvjDslLHpjHxhC2G5M50cm9aWhN4i8eRwpITgajJJ6u52gDjT+LGeEXiMwuzj9zhV/n/Hp/nn4Qa00eYpiIeg5V0cFQ/q4RFrhWV0CRc+sQ32NY1UcVjghpTMej/c+FwDS2mjBhwoQJEyZMmHBYcKhJv11H6GBgggd5zZNS1DqWgSbMITqLOLkSw4EmfJowSZ1bgGGx03ZR0SHbynzEOaAznBlENHS+AU3+Af2QQxLRypIKmWjWH/zXyEId6kaH8hwyqo/ZpynkBpPtSBnYiQhMPqRMCM4TvNIpzq+l7G7AeKQndb4EVLfT28b1RQi/dc6tpuwTsk/W9xV49QYqF2sVnxxbDHeAMoAVHs6ZkWFsUm6SsWfIk1sMmpoYy2nmfJ3UVxN/YsQUko/VMqgyNIGW9snL7p1CQboCw2TfXniTWtifTNlVUdEB/llxITym4WAWU2oRIfNnxmAW8nMKwS1Gp46BU7stlk4UfnNY43PNDKkVRXk4eD4VIlt3JTo0sCj/UthihuhrJC8VEO5PY2DsPN077LwyB+gbKcXrOij9NNknxqgypKcm2jSkncQozKH+LtY1Lds0ShXCEOGnlXxAnejz6zlbp5dPmHDYUXsGHfwDw0ywhrEMz4glwNoGsE4Zl4NKyEvEPfnHNoXoMzaQe8tM4dc/aJcIvzLUp2CfpMU6oOBQIR2FODk55h7xN4ZVNYz29NA3Sq5aUZPrCAS9fH5jqr8NgbE31bKMeeJ4Z6iO6yrRAkrCr9yzRv5tlIwdIk7X3T6gVqUhJ62MpNujAi0VkhN/B0Vynkl/y/p3zo89yHB4ZhBy5RX1KvP96XvCzsBkMqWqlTF16WWp6hbbT42RI/EHjD+QtfF3JQwpgHqOwAHkOeuCsjbk8euF8nQI4eET4ZeT7574ayoDZOkv9O89Yy8qPjmfEmWY55q6Ul/rkvCDur8mad+ECRMmTJgwYcKECXvCoSb9Ti0c5kuHI43BzAAuTPVtMfEgeKP7PEwOj8582JRzS44J0gFvKLBGDPgmGq+3LKEM8ynwk1o/KdlpvdFKh3zzv3PDcI040grCpTYFRCKwPG5aMEQ4jh0nbmNWrM/Iw6Qi9KFLk8XFsM8ERsQhz5sYwZIhvza/Lok+IDcc1FQ85fJVGCOXamSf8Dw9AhBSTtlG6TsVy+IqMWY4Re6VIY0KI05pSMiMBZwmxYSgpMh2Nj0lgzZKiqGyPAdR59WIP30+2ujJellQUjhKRNUokTywLleq5seOTYBUZzlWqS4riaeeAkypzXw/4dtrZnKSRz8H2vgqzwAzsGAf+u7ssoMlwswabDcGR2aEeTiR2r2t1WlDpKQ2rmkPY92flM+oIYZ1/pm0jvxv8oZ6CttLaCxLM5Chnh29bMeu9Ub8ZcehLipvHycVpbSrNryJ0tiH+Up9BiDKkHTMvRC+WsG3KqcWhf66hl4oT0X4rSL7ynUAosLP0upzOB/oHIOmvDMT9oFZUNDIMw2oPj/0sR3YR8pEeIcSYdExGjMHNVvBkNtG4o+0sdcEtd9s7sOBtgvwcpHIPQBkLdD4UJ4ZjAWFbWKeUUGNPBoI4VfbPiNW9GbqPaBV14Le+1SFvM6qjmEiShNYEj3BmkCQmJBPGoHsKEmSGgFUy+En4TtHlGuMOrm3ztNuiDKVX4/wU85NZajR+HWk7KyqakwG9MdlAFJ+YaT2HRo7Ok6h0X3774/IWwV9+KEw7IKOS6ezcGg5/iriL6hss/yWY8+DRiRc1DOiQlGmsUl6D5ahvDv29653iPFPCTOCA1I+PibXxcgD8f4ORJ+bH8W5lnFup8PFc4vtnS8BroUlg+XRy7L7VbdRNv7Qz7m0Qxjs7yUsZxyXj5HYA20JMinMMBV5/EJY9GVwpmodB5IPGeGXh6Dvk98pxQKlsMDx5u/XuUdYjihWs/5W3ROxqWr7lm2cDciLe7G8Rupeqw035J10oTGNrSZMmDBhwoQJEyYcFhxq0q9ljhMjr0RSIV0AP3lEzEoVDRoIxhPXANQBrZqEeKN4MLQEr1RrgM5RnGzV5kgdM2bG+Amw4aja02HfxCjsw4TmZUTjvVIU6jpFtVyYwZbbyLFy9EPL+bCi6bcp4mBl+dxMCmnqOl8/GxqYyBv2aYMTrhqJAwyTfavmRbXQkZrsk230GURP+2JZzTzSI/ZqywrP5TTxdcPGBA3tmS33H1Fanq139Un3AEpD5booFQ2klkWVnyJf4qmUz80Bbp0xwq8k+0pFn98/X6BzM3l7TKQ3e/X190xaLoSXhAM+OrMx9OfcUkakjxF+Q6gRfj1vbyA+o0AK+eRCuF0h2jowLHvHBivGTXAkDMvnQbezVvFFY5Qi+jRBoCFKRBNj/ErdvFF2LARsjfQdaqp11Ry9+zfsZw2tZWypqfi6YlHZ724kr86ECRcI1ngChNmTV2JcFccOR155YpAIos5xjARgZ9ugbunfTW7pQ26Kihj+HSbh/YTEqz55xoAkP50RZWDYx1b2ie/bivfC0HKo95dsqtaNdQllqHIg9SdD/VFJ/JXrKCrHKR5Dr4vHqYwbBnP8oW+QjwZ8Y/Y0blgXvTEQ53nnsvFKWG4UmTRaNpW/04JVisuxrlgTUfG7ruMB2qms0irCr1y37zeIPDtATrLsg7CKREyoV8xTycPnIGSq7JM9hevcw0KKReIdoOVZf14hX6iM+fR9INcvEn8jzgG67sShvSrEHzH3xu0r23EoRKxSswXBnh9DIY1ZxYFK6leb96Uc8PU7RI5q9IC/kkNxtP5AP+eeOgdW90gPKiJJr21r24/0R2Vty/HqhAkTJkyYMGHChAkTchxu0i/kO1h0HMkpS95gb+F8XpmQqwoA2M4A26AxDZqQZ+tcy1iE3AlACFFV5N5CIP86x9HA1aoZe+d8WEsAcEzoguJPJqpC+Gl1yLI4FwkZuqxMnfUy6+oG5NLwJEZoW3MAV/trQkJUhpEUDMdylAhAwETi7yCzrZy80wTGMCGSCJNhE0mNQNCEX6kg84QV9UjAWr4YKX+I7MuMXPJ3L0TfWOirWAHxFKa6IWEPWIfw0wbK6nrK8/1loZ1AGWG7F95DjKbaU7tmiJXm00pcTUT5/YZVukBuLJmVUbsqddaRoTj0BWcWHc4uOxhDuOLYDFvW4EiTG6Bq97Y+hzFDZfm8lOEn1Za+H3F5qF+tXpW8m4TkCT6kYvXHzpWHDI7q5qFwmGLPskTRWUCuqZDEQ1il8lxFVpf3XVqe38v+/Dka+xnBgBY9LHgwwm6p8suOoxR+/jyop6C+EGBm8Hn0GJ9Cbd18IflNO86t+ZZ88AHDhF3nvDIqkOVMfvvWATSbY2t2BHCtJ/9c63N3mSbLCcWmATUN0MwiiUfWgrsO3HWg2byomAU1c1Az83m+hIQo352mMPCH42V/A8b6k/2Esiz3SQ4xlBSDUN2MLoNSWE+i5ITmQ0hTUvm5No0namq/gmjIjPJDSr+hEJSczq/WHHrMJKqiTOUHpHFQ0aGSQVKglW0G8mrwga5Tk6Dl+6Rs87FxjEPufCLvahnmxroOkER7RY3sW8eBjdV2K8mOipqvR7IO3AMsu/UYlnTv6LqsA71p76iagNL3MRmwnYObrTDO8BFbGm5Bu2fCvr5/4DDHsl5TnI0ZM+IPSM4GcsxwrFXKvR7h1xvL9/etht7Xxw+EpswvRdXXhTGVjLV0nmGBjK2k3zEk0RuUwg/peaSBeo9ihcJvkOwrz1eIv8rq3qCxaKehPocp5RW/0JjGVhMmTJgwYcKECRMOCw416bdtDU4uOhxpPIF3yZZRodSCQQkLv7FrASKQMeBgfAIZbIWwnzttMhNrA4Y2LDBLONBkVAg8GOA45qeyRNhVEypN+IkiRkNyb3WlXCRbH2Co6u0J1BUopULQ16evNMw3TkbrONF01J+TSb4Z9I1VpSGohA61VPOWF2hCRE+EatNVg763b1qX6uXr3if29ORRlJeRfFLlx5CEBfm3Hx6UidbPB1J63BbGOx0OR2+fzkkdt1J8bQ47ZhYYUjLo5UMEzBBiLktQRvyV0ORZqe6T8I2a7NP1zByWFfHlj5sTX0MGBcfeULgIauOOOSr8toJDgSjHusozX7bbOoR2rw7hPMuyRWVccyDISf5+DtDeMULZHXOW13Qof10kvDKHAd9J1vLOCFZxYvJslWeU3dPFPTF2rNBlJ+JPDiLnEW8IjvJn3UfZQHBI32UpV/uVhN+k9JtwmLBlCUcb33t3TJiZNB4SBwBA9dfiWOCSgwE3c8AdAbcLULcEdYvwjrK5AR3wSr9mFsN2kuTvU8v8gSyomYFm8/i+Y6oEeHNehkgYUaHs0WlmqE8ZUvvFiAjZuycn/mSZLyf1g57w04b7ZNiHS+EPR8cOI4b4aMQv1FvVYtQ51Bw2yvFyzwGqUPhlRItzifiDJ2RM0ZeW/FOt3aQe8RCok61xPfffOTK2Kwk2OcYmzPBDhN8qR7ZeFJN1Ifd4qfYbIqO0Kkvz5gVpHEN7Im8X8ZmJ90s2F8jrnp1HQVgzGSAQfjzbBjHhSEOYk4M5e8ITmHYObmZRPaihydGS+BsicmNVVHOsk9evem+HAiLRmhHwYT4pz1+4/oxA7iGFSR3ilbQzlTgHkPQX4beEbs8IPyCFTl0TQwq/Xr861peWKuOBVT3nhGJ7E+rTsVKfr30mEyZMWIVXvepV+K7v+i4cOXJk32V89rOfRdd1uN3tbrfBmk04KD73uc/hNre5zWh0nQkTJkyYcPPEoSb95pbwxWUXjCHeWJCRWYHYA3fRQMJiZAiTMAs/AZrbJiqENNlnQ5l+0uG9rcXwLeGsxJsyTOXi4Z3yztSEn3OcTUq9FWPsJaymSCuUJzVYop5hPhlVRpgeQ3VyEAVxoj1LRwi/nFQbrG6oQj83m64akBsWZF1J/MWJIeVGhyxXX0YC6Tr2iT8AKk9MbiBKESEryoN1UHof62VyzprwE9TC4agQTMDevLI3gYMQf4AyJnP9Bizz4JX55LSqDlCGHF1H6Hsk3cdp2fgz6fOw+F+WCFuNwSw4EYjzQRvqOPR4jl2Xda5ZjYTLyljzwpfhKPW+QzntamUksi84CQQiwJEPBgwGmBiOh/uWIQwRfpkKcg/3WQrpl4g/5tVhPo2hwXbNQicL4XdjuKIjvH/O44N/PsuecOPCwsG6BbabOTrHmFvqkSo7LUKeLg9CykHnw+k1QDP3IfhcpwigQlVDJir4EPL3SR4/aubh7ywdZzaPpFU0nA+dSAjxXr4fa8b4obCQOlT0WP7kUvGu33u67KpTEuVjFW2wj3nyoIz3bn3DfeYMtILgK8muVRjr2apEbEmQDNRF7iGHpNTL1o8QfvJ76DRYvTfKsSPQJ4eEMFqFVd38OoRfjdyU8WyNtIoox5wDY9DefV+7BsXYUS+TZ43RJyp1m2tHOpkLrIv4TNsZuJmjg4nzMHPuFKjdAdiBjQWbJhGQoZ1qIcM18aeR3T8Z+9RvS++gN1Zx1/899KyZYnxe1G2IBNZOW5JfWkdrMIUTW+bQyOg9f1l+whIlIVzUPVs+tK1GrT1qKQlUHzXU3Kl/2Nu9tSlMY6sJN1ecPXsW9773vfHyl78c11577b7KuNWtboXHPOYx+MAHPoBv/MZvxDd+4zfiQQ96EO51r3sdqG5DTt0T1sNLXvIS/Jt/829wxzve8cauyoQJEybcorBcLvH2t78dD3nIQ2BtPdXI+cb6bs43QTADu63D6UWHUwtXN9jMj8JtHYM7cgl4tuUXOgdybTSgkGsxN8B2MNjPjPegnJlgxA2hqahbYG6AufHbzaOBPw8Hqgk4UfnVFH6An+iJCrD2idtwKnvp+h/ZpvbplRcmLC6qkPrtZkzyFJXQo5YITQh9msJOIf2OJGnyQI1hZ0gZrbDaQKLbR4xuYpSXjzb6l01bGtG08cGaUHeTluu6CRip7XwoWf8RtWcr38PvpQPa0J5MxpPOpgHLxzZe/WCbuM6vt8MfIv8JvyHlkgFsKFMmyVKmMszoe4fVp3cPjhgc1hGfrbquQwSavp61T8fB8xnFs+E84bZ06rda58k/xJyfvfrIfWnS/WrVPT43hMYg+1j18QS3v7caQ7hoy+LSbYtLtnweP0OERedDB+90LoYQXqr7qHPsPbzVR9/nAFDmHpRQmqK6c9IfdIxl57DsHHZbH2pUPqcWLU7sps8Xd5b4/NlF/NxwZoHPnNrFZ07t4lMnd/Cpkzv45Ilz+NSpHXzixLmwbBefOrkbt7vhjN/3SztLnNhtcXrR4fRui7NLh7PLDjuhLvH6MI8SaRlBG41445/YD6j7gxnx+kvfUTOg1dS9OiSWD/G8t7CcJeE3tG7ChJs82IHaBWbdDraoq/beMv7RRmZAxgz+PdjabfDsaBp7hdCU1C3imArsQNbCHDkGc+w4aPsYzPYx0PZR0HwbdPQiYPtY/HCz5VVAdp69R+N7UciMSk4ujajQYVc9v9UKrPy3hNrLliEpcMzAR94nekwl76PG+PeRFQcs1+Yqv0K1w4EI1Z+S8NMqPx2ycchYbyD1T32ifOJ4SjnORNJX2n2MnAzXKaqPwrXI1GFEvY+0nYzbdHvG64Oif68dXo0d/Xs3vIf1ewb9Tw3lOGpoeY3wq45vs+WpXnFfICdIxkIsjqEkuMrxpDxXpumRwvrxkPtfCDobr6EOVUvKQbICMuDZEfD8GHjrIoAMLLdolmdBu6eBzkduYTsPDgUp/K+Q4xplu5cfmf90cp3Lcbtt0jOi20fXdx2lm25f9V2PZaSOQE5qU+gPZtbEz9ya2Hc0xj+LjU1j2NiHhO9xrlsJsdurX3ZPUb5O5/BbQfj12ru8t4o5S68fCn/LZ0r6Inm+7RrNPmHCzR2nTp3Cxz/+8QOX84QnPAE33HADvv3bvx0f+MAH9lXGfD7Ha1/7WnzlV34lXv3qV+MZz3gGPvShDx24bp/73OfwPd/zPXjJS15ykw2B65zDq1/9apw7d24j5X384x9HJ+F7DoCu6/Cyl70M73rXuw5c1unTp/F3f/d3OHv27IHL+shHPoIXvOAF+P3f//0DlwV4lel//a//dWP3x1/91V/t+zko0bYt3vKWt6Bt29Ubr8AHPvAB/MRP/AT+4A/+YN9lMDPe/OY34xd/8RfxpCc9CQ9+8IPxuc997sB1c87hne98J97xjnccuCzBiRMncP311x+4HGbGddddh/e9730HLkuwqboJNnF/CE6ePLnRur3//e/Hc57zHLz85S/Hu9/97gM9Z5vuw//u7/5uY2Vuut1+4zd+A1dddRU+8YlP3GiEH3DISb+FY1x2dIaL5w2ONjnx1jKwcMCCDVoKkziZtIgyyrXJOBSMKZqc6k1M1YTGIHliA30D0CrIfmPkV2lwLo0HNYKw/F1iTBEY62ZkwpkTfjGEk3yQiI+0PN+m1o6rCKI835n+nhv9a21SPZ/Y1v0Qn7UqaMNOTp7qTyIy9O9yO21EiIScEIBiULCN/5QT4qFJsb6HK5NlXXd9TntFzTSw3+60psAqb8UasSPbldeeOSd5tLqPgUiqCUaVW4ogFiJYDJz6I4ZOQvKonhnCVkPYrpD/ctyoRnR9Y1750edaO6dYLvdzhZYE4G7rPPnm0u/46VyPGDy1aHE6fE7tdji123kyb5G20fsIwbfbOuy0XTzOMjgUyF8XSErHKScqS1hWpGdcCDx9zcv7JTeKclQE6/ujRqLKZ+je2wu0F/aQ8uemgJJQPh+fCTdTBFVGj0gIEGK8JLMlzJwY0QEE55W+wiOOv4Dk3EIEshZR+TfzxB4Hko/DOI71OzGrWPG7pojThu9aHq6BJtFYNe7TRKg4vGT2c0ofA0oKHaRxpQ0kRoxgIcTYmNFeozY2KAi/oeub6hnGgkWd9Udvm73/9hPtoNhPE3pAfVxZvnMPIqyWI2fqu8oYU48RR8ur7CfLy+gVQ+87vc2esS7xt6qMgoAZUszVxk16zG1QuT6meI6Nd46Lh4iOAstYH7az0B/YWG5Stw3PmwbHmMV2NXJv7Hc1LOWQ+m1Fvxq/E1L4eRQfIVFVHyGhLiNBjxHyvXw2K33paBqBFdC3xxCB3murobYbwdAc7nxjGltNuKnhbW97G170ohcduJyjR4/i2c9+Nn71V38V11xzzb7L2d7exh/+4R/iQQ96EO5973vjwQ9+8IHrdsUVV+C5z30unHMbU/ydOHECz3nOczZSFgD85m/+Jr7v+74PP/iDP3jg55iZ8Z3f+Z34d//u3x24rOVyid/93d/FAx7wgAOVAwDHjh3D29/+duzs7Oy7jDNnzuDHf/zHca973Qs/8zM/g7e85S0HrtfrX/96fO3Xfi1+5Ed+BP/4j/94oLJOnTqFZzzjGXjwgx+MX/mVXzlw3b785S/jOc95Dp7whCfg85///IHLu+aaa3Cve90LH/3oR/ddBhHhmmuuwe7uLt74xjfiQx/6EBaLxYHqtbOzgxe+8IX4nu/5HrzmNa85UFkab37zm/HLv/zLBy7n3e9+N17wghfgd3/3dzdQK483velNePGLX7yRsq6//nr84R/+4UbKAoA3vvGNG7l/BV/4whfw0pe+FH/zN3+Diy666ED98Etf+lJcd911G6vbD/zAD+ATn/jERsp6wxvegP/0n/7TRsoCgHvd61743u/9Xjz5yU/eWJn7AfEhHF2ePHkSl1xyCf76Hz6Br/2K22Tr5PY7vXTY7bzqR/L9RW9i03ivx93TaaIhRq5mXjcmBSSjNaJqZzeoeHZbr+w513pD+DKofMQALqgRb3tRlGjFnkbMp1UpS0+087xeSdEn62Y2kRxC+M2sN10lpR+yfBJSRqmqq0GM/0AKjyiEmXyPxAensHtpgp7OW3yG5fx8HpxkNCPC2nUTA75cY8TjpuPpFk9Gp3oeQ7mDYvhTdS3i0Ycm4mMGs4qBYV2ibxVBOmSIWtU21eOr7yXpl63L9hmuYL9O6V7wxF/IiaIIcJ2zTsIh+RB0geAD0FjqhVXThiogXS85/86lu1AUGovO38PzYAnvHOP00gWVaCgnI53796CUWnp/y3ktO84Iv2XneuRfItuAZTAQy37yHfDrhEhctC7WWataa7BBedIYwrwxmDcmhTc1hO3GogmK6e3GYGZMDH1qiTC3JraDVc9KL2Rbpfuo3TerjKclaqel8z/K32Xn+qrKLt1bY4Sf7k+NIeycPoVv///cBSdOnMDx48fHK3hAyPvxgT//J2i2j52347Q7Z/BXP/4vL8g5TbgwkHvnhv/fx3Dx5VfVNwrjKDYNlg44u3Sx7zzSeJX5maXzSkBDXqWzPAdz5guxiKjwY86JqPJQzXYYl4XwnprEs038Xea2ywzWJpCEQhKUpFcgEDvV3wJ5/5LGINwzaAPj70m9voR+H+jxijXeaC/EKgFAt/Bj2M6TpSQhU+WayN/ScD6g8CudhjJCqtIG9fqrsWQc/yCFIS1z+rlKPYHB+gyNV0riKOMaB85DX8ch6HeOyZbnB6yNDVZhiPAr7538ODlhLO/JrK1VOwMYVnNplRZQv08G6hzrrb7rNAflthoyJpaxuUQH0fc0LXdj/d32xWDToGOgIYDaHdDinHcSiIUGRaBtsCCv9ls39Ftd+Z/GHrV7mFyb+p5SZVs+g0AKoxrqGu/p0P9w450ZOphe1JBWola4fO4jPY/OQy2OZjIva0xS+c1kvIv6Ofh61q9aL41AqfIr16m/vb6jbGv1XTtgZsevlFH2xwhtc/LkSdz5K66axlYTbtKQe+cwXFPnHIzpvw/2g5MnT+LDH/4w7ne/+22kvPOB//E//gee9KQnHZhIXC6XePrTn47f+I3fAAD82q/9Gp72tKftu7wzZ87g9a9/Pf7qr/4K973vffGkJz3pQPW7KWJ3dxfvfve74Zw7EDH81re+Fa985Suxvb2Nra0tfP/3fz/ufve777u8//N//k9UWV522WV41KMete+yNJxzYOaNqX02FfJ2d3cXf/EXf4FHPvKRG6iVr9eJEydw6aWXbqS8TWOTfdwtCadOncJiscBll1124LI++tGP4iu/8is3UKvDgfMZnnrd8cWhzukXc3JV1skkYavxk5+0woEWZ3PPUSAoAR3IJS/yISIlqUn6xh0iYGYMlsSeMHMADGGGNEnWBuF1oElCfSzJLaVzRtnCCFMikiCRIOwv14TfzJgQ6i55WafJe06USD4fOcfy9IaMLRmpxmqSu4Lwk9818qTWtCUp11sP6tVFq5OGzoGICzKRCxIwN9r4fcQrPgW+1C+grHo691F27h5DRN8qgq/EKv5fr11FrOyV8BvKazO4fUGOaWIsD6+bVQSN8blx9hJuUd8zzACBYVUBsk7IwEz96TwhKSFGrSF1PsONKGv0OWnyKYUNTt8llGZU2SmSyhN8/n5eBAZy0boeyec/bpD0s6HfmDcW1lB2XrFfcw6GDJYAZgzAORgnHY1vCxs7JwZxYP3Y527qOBjhBpqnRvIN9Q9Zf1m55kOG+c7l6mmdM7GW41DDkm9n7VhQ2+58gx2D99oJ7LH8CTdPUDsSmsi1oK5FN2tgCDg6C8Ze5mi8bh0nMss2oM6TbtHgLO8yLsIWkTIsB0WPD2Md9jXBeA2ATQMiT4Ix+sTfflAjmiRP2FiAJXk3SG4/ee8jlDc01NPjhpQPsUL4SVjPAZIzVaSuONoL4VfWL57g2PkX51PWiRHIKBnjSOddEglFmWOqvaFVfgTm99VEVVxPw+MXVtfKIY3XxiZrPFIXvQ2Qt2+N8NPVIqT3CI/cQ35jk5FOMCYn/lYZN7SSq1hVkjcyNjfhHOQ9p8fO+t2nFXilEjQjKMN9Ks+3Dc6ZOdnmvMJvdgQ820YHEyumibuqY8/IGBPxfHwdO5Z5jro/rQF1oe/xW6a2K5z0WJbL34GoHLqNDHmyjYhh4cdEFMg+f2pqzIm606UO6xmfH+0okRGTaxB+Us8NGMdqY7GO875jXci9R7TiuThPmMZWE27O2KQx/Pjx4zdpwg/AxtQXs9kMv/7rv46f/MmfxO/93u/hVa96Fe5973vv+/yPHTuGxz3ucXjc4x63kfrdFLG1tYUHPvCBBy7nm77pm/BN3/RNG6iRx0Me8hA85CEP2Vh5gk0TTZsiEba2tjZG+AG+XjdVwg/Y/HW4peDiiy/eWFm3JMIP2NyzehAcatIP8BOJqqEhQHL0idcmsQO1watUDCkIE3fTgJ3LXHzFaFCqkRz8JLdmJJFJlyMx/PpK6rxmNdKhZhh2zquVNHmhf68i/Kqqv4Lwkwm6qP5Kwk8mjzE/hBioIhGYG6yAnOSSaaY2jK0TZhTIjTOa6NHn6VU6/dCKsl5IGV0/qRtQMQDIckX4MSOGEixB8NfOq8UCAchp8qxJwU7qwJy1ERH5MigvV7bQy8t7btNE35DKr7fdgMpv7NKuS/iNEX2yTMg+oE+OCYbe66xMdTxgtXPF9TAAHCmCXG3rDVpetSLkWxvUdf4eksZM+1hDo2FHtWJRE35AUvUJSscAxxyJK60Q7BxHVd9um8i9RdsVxF84dlE/UfnJdwAp55QhWGOw7BiGGDN4tRys8QQlAc75fhEOIEPBaMjonH9+hRuUG2DoGS2JvqF7T1/aVcRbeS/5ds73rRF+WnFtDEWDoWw/lM9ywoSbKqLypkbGSGhO+GdrTl7V4phwrnVonX82xeEA1itcuJkBXQtaEatQh+4URQzbWVTXsI7MAICMSSEDqiez3uRuP2ZWTTCUZNIqY7Ysln40kn9hDEmyjQ5Dv04EgGLZfgi/vfZYgwb74loBBfkH9JVDQNxHCLysSP2j1h4D+wH5eHQIJfEn+2nirxwb1LCKfNLjq1pd03XJHUi4ZABVG1eJvzHjxoAjmT/u8LJ4CKR3toyLyupV1YmxwMo97RwILSQiSy8CBhnwbBstDLrMgUgdc6Dtgfz6Z+NMAgx7Og/QDnsm1oVt0yf+BvrJqtIWGL0e4kTZAZ74g29Mqs0ziXpzMB3WU7dzRq6OOA30FH699XszlJVzZ0GNAFw1d5FrSkSRdAbycfiECRMm3Ni44x3viGc961l41rOehTNnztzY1ZkwYcKECRMON+n3pXMt/uGLu7jdxTNcMs+H/kcbg6MNYNl7pevwMbQ8myZAkk9iIEyLoAwNFFU8nHKLEQHE3vi/1RjMmDAzZjAUnHiF+/Jy47EcA8ZvY5gCARgMyQWZN0bupW3yY8t+OsRnFt5TKfyaLNxUXeGnFXT6aqwi14CcWNMqv5phv9xv0Kg2srysn66b1EV/l/CjOrQMijIAzryZiTiGjQRSPSmYUTRBasKsviQma+1Zw1jbrjMpHlLXaYNU3i7rEX61XH56+1oddNv7Y+Rkryb7lsHYq8m+rrTkAIDxy1qXjKydAzgQtID3ru7C8YiFnPbhQF0w+GmlpkBycBDS9eocsNMlkrgWTnZZGKoz4s7ly8eUZYB/jktP+67S9kLoibrPq/1cj/ATsk+UfM2IhVMr9zrn72XvEOFVlc4xliS6Cf/XGAZgovKRiDPSW9S7oq6pEb5+eSImNUlaa0dg2N4m/WcKnay+K6I1tmOlvy5RGmsvNJzjlXU8aPkTbp4wOycwu+Ef0F5yO/B2HiqCm23AzmPYOLADmQaWTAxNB6Q+j00DNHNwsw2iBbhdAKYBuRYcO1QxjHsDOxvrw3c224EwnCfiy9kUXs+1fbIIACEZ2SPJBa9uYTivHhwKbanInZLUk2gA+s4vFX3rkvzSNWiVTgpNnhN+OoRjPPessFzZ5897hOwL29UIP409hbEcIiR1vXTYw7Ft9aJVx9DLFAEmpI2MER0oEm4lQevKe7YgruTtVVP8DbXLkELOxd/1MVSJqBwHeWJKTru2sVZVAuPk6or7YKxeQrwA/fElqZtG3z8y/yAgzbdc6+9n5UTAjc/dmZ2TnQF25nN6zraxYBP7GVqDgC2d22oObSaM/wgh4kAYqfv7yHjyTzkdUNf6NorPZZ+0BhSRZpr+cxjqzhxIPHEQDXMO4nQ/xuYo+g09F9PpF6TPqIX1rLZRSfipkJ5+fYWczwoYID9ltYyj1DKDFNlhnb4m9rUqrUON9D3fmMZWEyZMWAfHjp2/MMATJkyYMGHCujjUpJ9jxondFse3rA8z1RgVrjJsxAiGopAPYijfRYkwgZFQL6SsBC4QfqL006oTP/8gwAKOCYDDDGPKNvLOuMyRnxDDvgGpwX+a2erlJbFXwxjZJ+t1SM9UM/EWpeitK+FUagq6tQmq2I4FqRSUdCXBOqTkOd/2dEadiCxJKUOJnCClBiQisI/PA6KkBpS6e/LWH0iTfaIMBBA9fHmfJ0tUJwT1Nap5PtcMUgcl/AYfgQpWEX5dRsAMEH76t3fjBuA9qJkBJk9M+W38hSAAzCGUZ/gNF+5tx0m9GQxD0fMf6X7RjgDrnnKNqCpJp3VDRMbwVCGUqTZsaaVe8pI36FwevE5UiJrsS2q+0G+EzmKMEOyfp+/roBR/1st1/bnGLfsnq4k+qb8O56rbbTDfnhZCqGfKqVOotbsoK8fy+JWQPnXChMMGcs7n21qe8yTc/GixgQG1O9GYzA08kRfeZ4B/fixCOGXT+HJcG0PmZQZiSsZwJorGcU1csezCJqlswn7Vp0wbrYeM0Wr5Qe2sSYlSf9eVXYEO7UmK8CPkxMhgjrbaeYwRfsV2Y6RO1QA/4hQ3qnQql42pFYdQ7jNUhla8hePL+1CIP1Nx3Ko5Z5TE30HQi85QIQT1ciAdW0YnZXlyv1lCr12ZTCL+gGGitbJ8FQmZ6ldvnDHyJruXSgJb532kgqAKJKCE9FwVir6MzBKXY3gcqp10HJLqTxPHpNurCFWr7ypxD1ulnNOQvtOEMbuFJwHZF6jK9rC9+Rvy/gPY+7M2QOytJPzWRFkbTaZrIq+859VrJW4zYcKECRMmTJgwYcKE1TjUpJ/gH790DpYI33C7i/phM7Xhx7UxXwSAMCEymSd4zBvBYQpCiUhkBpaBpBLlVxl2E+QnMEIszIythqUsJ/1DiiXPVXiST+cFRJzwpRNeFTJT5+3ToT3TcgSlnzeieQWfhPL0fyVxfBlaRjA0JSwn2zo8Y/weyFPZ1nEi/GqnFvkciLoo1IkZzNSbLA+hNi3u5xnkXr0ES6cmoYyoUjIqJk9qIu6pxUploGyvw/OkbfJ6jk1+KdTHb5dvOJSfaBWht3L9GkTfkEFm1fYl4TeUv68kZqIhx1tyYALZ1Tnf5iyKP0Ig8vL21suk7Jnxz8OW9WrARZdy6p1bhvCeSgEnxFhWM7FxcQrhWyOuSrKvDO0J+GfaCHlpABuvO2NmvMLO59VzmDcGi9ZhqzHYDaE+543t5fLTxKoO6WkNxZx+/rtX7PlwwNJH9MPt6lCXHbNwsLEd8nyHxb6Va17Lcbiq7TRqfaC0pbSzrnv6Xi+vDJtcO9aFxArH/o2UP+HmCU+8NbBnvgDeOYX2sjtFw7uAFufimIrdUfBsC5bm6BAUwoZgnHw3oNlWUqy1ZjjvlSh9hCi0KtynjMucywlEoH9DShQHqbciEfWYT8Yg8byKd6WQRvEwI+2WlCj15YB+p6ecXACS0T7L3Ze+V8PyDZF9gFdKAlE9Gdsg7Foq0VYRNFFFWYuEUV5Hva7ceIA4qHaRZWjHsfVStiL+5P7okQZIjh57cUbqjdlWbK+L7juzeawa34LkHuWqE0kkSIpz7xF9axC/eZ2GG6ZsByBvi8H2UQQfdRK2tpiXhVzrUn+2oS8gg0V4548RsmOEX9pGje9VPZ0ao+hwn9H5E0n1F0ug4jmFH0+mhgntbYq5pt6EwvgtOFtZCmNSpfTrRwzJ+5BMIazTV6xQ+Y0p/EbJvpHnWPcpZajV8rbqgDhvAxDbu0b8lZBzvtCYxlYTJkyYMGHChAkTDgsONelnyJNh243BVjNAN0loE8kPAYCCqoWNzYxLGcqQQUie2B1zMtwg5O7T+xLByuHVhL7mZSoEkiEDZm8gcxQM2ynBFWwgF0qf9pQ7qlQG9lEq+/z3pO4TYz35U/AKv0B6GDXhLImoVSgn2wwhz5RSUqnpklEkD+lZntmqwzv48DhelUVgSpN6rezRhBZnv/skltRL1qfzyrcoDYWaDBTCrSQFxQQZPf5JKwNzVaAg5ghU0D9NvD/8sdY1UtU8zzXhV12v2qp2n/eONWCQSXXZgzVuBTplMHPMcB1jZo2qp79XmIAy36K0ioS0kpxt1hB2TMrfJ89364ZrXhof94qx59tGmY3OJ2rCdTPhfjOw5MnIReuios+Tf6Yg/dJVEVWfEH1bgegT0m9mDGY2hQeeGa8klr8lsaa/i4f9UH7DVUSfrFsKMaiWlY4Z5fHl+dBhjuVmNEUdgfyZrqmsSyJRh06eMOHQgCgjEHrgoATUKp2uRUfzmNOPw7PnQpg8P9aygAqV1yPkbJPGZKVRucxXxgYgVoQhpzrLLopMzAixItRlrefRYTuBNGbJmqmyHyM3dOdjJnmfIyrFZb0lZATbSsIvVsLkhnut7tPnr8+lXlKvvjGEKhBzmdWINoLzx1TL5DirjrkWWVRiXaWfithRhvn0rjBYS5l5MH1THzraRelQFY8ZxyupXWRaYLFHrCD8+uO+/tisbIPyWo2qQzXWZTQiWdagU/2JbqayTmP1H3JcK78PKf4AZPcRqTklA4BV6spKYuka4Vfel0L8SV9jA/mFXltTtQ+JhN8KsrwXOUTXbUAZvBYqoXVL1J43ua9XHaUccsVznjBhwoQJEyZMmDBhQhWHmvSz5A2vW43B0ZmN3oFAmgiINykTpcmB5IExs9wgFNc7by9Q3sJDXoZiBM4NBNq47WsjoStLL1QbJns+v5g32Ldxlq2Jv1xpVBqR47oRJk4bpP1viuo+WV7mR4i2JMrPKy4vjiHhWuR7OtdEqq0K6wn0PUNrBiOuHF8mjxSIGWZEss/nWwztxNqoUiGzKscr66WPWUKTA1rlV9smkXlyHbinMCP0w4OmUhV5qA4jOelqJOCqcxsi+8rt4jYDhN8QR7WK8NskMiNOUPsBgFFWWe9JrfLKheuhFZEMF+7dYGwgTyQyfLhfv01S7Q4RPULNCeGmc3uOnsce4s9Fgj+eYnhaLLAMJzVvEskndZFcf2GvrExrfM4YHeZzbk3MAToLy2bGBLIrkV4ZqaYgZOzQ+a8i+/q/RS09TvoByTseAGY+cWNUMfs2lO3WaHD0Cb8bG5Jr8nyWP+FmCiHjdD65ApIrKubbY6dCG/vnEo7SuKcg2npknOTZC4QfG5seQoUydGE1RKGUOUT4KYyFt1y3y9VjCenfy3etDoGunQ5SKPo1CT8595qSawXhV3uHR0IJxXiqVhd9fL0dGUB4toGQkeXYVysqdXtvLHRfQQCvIv58vfzfMcKvRkzqOpf3jDizpUgR3GuPcoxFyMcta6NCeGbr9N9eHfzfWu41+b2KlKndP73jA1W1aNxH/ySDjkMob6BK+JX3y14JvxrEOZAoEX++vJFwn6rcTIBcI9CGVHJB8efCOGSMjC4JPz9urfQhq6DrUhJ+q0i/IVKX6jk1y9C65T2uSVZ9XWtkXzzWjSCLm8ZWEyZMmDBhwoQJEw4LDjXpd6ujDS66eI5ZMEaLMT6bILDzyr5uAeoWoHaZG01KQ1TYx1sC0qQ5hhshn2vBGT+RJBMzf2UGC1+PZAhiUKH68yYWZu+1a8DomFKUURDQOcAEZZqTHHDjRgBLhcKsgpoxQf8Wokm3oyfTPHEmZJpDQUQWSjY5lj+f5NUshnpfhif89KRejPfSVnI6pRLImhDCk2Vy79vUAOjA4A5p0h6yY+jrAnDmdVrWVbyKdcjRdVBuJvsNKZnS+fi/nlDqqyo9EZiMZZlndTQEJFJQ5wyUcpPKML/+JenC2br+uQDDRF+5T9omX1iSwnq/oVx+Y9gr0eLSw5jKUP1IeUQh9DpmnF26SDTNrMG2zY0i0Vbt1H2qIPno4m/m+IxndcvKTEpeCQdqSF1gE252CJEIwALGAZZsJMksKcLMyfPIsV66jtImun1EEWwNPMFHfvmWTWSfVvoJ4TcL5cxscjqoYShHX8eJUF06ju0v3yPhxxzat96e+t4X4m/mOH6PhKUQkshVfTUSsFdmRVU9YcKhgQ6351rMPv+PcNsXo7vk9ml514LEecc04Nk22l3fbyydfz+4OD5QJJxpwMaCWBF7ZMB2Hv7O4rirJJCEaPQ/DJicV9zYxof+1O8qIbyCupCtGu4qMmzVe0UImyHHhCy8eUUdLMtJORRIKE9AESBlOD6gT/hJ3eM5yos/hPJUoVDHyL6cGOmPJyJxIO0t42hZJ3/lOmlPL0rXR44dwzNL8ez/02SbEBjZOHpIaboXqDCfJQkBaEInJ/uAnOSS+gH5uKzEkMKpWjVFAmbLw98YmaI4luRAGyVIBwg/fS9k37lyjXRxcSytlql6ZURMDUOkMRmv7pXvojJ2LcjOe+PvUuGWHWLgHMpwnnLONSQCOg/1CaCX589vbxL5F56bveRQLO95hxQpprwzSqKc1N89E36a0KuRfbqfqe2uy6g5AsA/v1XiL8y79jo26t1jNxLpN2HChAkTJkyYMGHCYcGhJv22rMGRxsRQe9nk03mFH3VLxJwRehIkxoT46U8+qJi8ERBzgeicCzJx1wZeIW18OUFJBA6qM8Scf444m8yC018pa1Wuvpw8CMscV8PPaUiOLSm/TwTVDdyMujGiZhTLjQrJwKEJPz2pFyVVuX+NMBOFkhB/RAghQSleJ8nv14EBRzkZBkQjYa+eqm578bqskTuCkuTRsERwXboOkt+MQgUZiOEnhUJK9w1FQ0wXiEwwwrUtvNapfnyNVWrGVXn71iH8avvvQcjWgzHp8baV5yYLJ+n89qXKTIeYLBWvQCL8nPPP19K58Ly57NmfqYemVk55L+tcfjVkIXxDec5xQfwBodNQYX69sd1aCmSZ1M8TgY4pKgCFAJwZdZ+qGGJRHRxuNE325aEsE+FXU/glo/d6xp5SzRfDolUIv2XISajJP9/evqx0nZGdi1xDF85lBn+Pz6wBEO6R0J/a7FnKz+GmoO7TcC4pHs9X+RNuAWAHandBiwa0ezqOnTQZxXYGtnN03Hpljh/cRFU1C/EHKELKJcLP5GRVpvAbCy1HBuBObae+l+UVJIgQYsDqd0/pgIR0Nv57j3yoOFIh9T3aoWdMUTcY0hNALxeXtJkQcUjETo3sq6E6ZOS8Tr3QgWQC+etGr1XNcYpZkUkFobKS0DqAsV+TVVr1J/UE+oSf3hdYUb8RSBPsYVgZEYmo8n5T9crapkb89cpcTfjJsj294ta5Ppo0qqiAs03l2RkoKj7LveXrE379Mv34Sp97SbhG1V95TnJeteUFchJRHb+40JmjHwrCby8oCL/eMtWHAP32ik6wZbnl+QbCXZ8fBU9L7XAZy1Xlo/heJfzKY14gTGOrCRMmTJgwYcKECYcFh5r0OzYzuGTLW6YNAQ0BklvGLM4B7Q7MzikA3igVIZMb50DU+smL9mwXb3QyyWs5HMMaQkiN5cvl5MleGnkEiewKZERQ1zABcD7/loR14TCZcgiEgcvJixrBF9dplQ5zNvs1huL2ltJkNhIlwcDfOQYZT+ZFRR95csybwJEp/GrEkkZqH/TUfVqJw9FQnxvYood4xQohJI0m/rpAlAlJ1oW2IPKEmL824x7S2kBWM4JQUA3KRDbzVkefqEznz5l6SWMZ9pK8bDIPn6lcahQIvTQJDufBZQ46f45yvslrXoUUHcmEUctjWLZBdb91DSkr1vdyOY6Uqwk+bSd2bj0Spsz1N+R53BXPljGELdPPrGNJK+Eoa2V9GlpdVzufGjJjUPg+Q3qunWPMQv1gKJJlADAz1At/WVPCDalRhbzMQgOTUsWRD5Ep5GeZz64WSriGnvqR8/Cd8n3pGMvOVcm+ZcfZMzd2TnKtHDNmrPMeijOEfx71ddFtEJcVHcq6IUEnTLhJgtmPg1xQ83UdzOIM6Av/mEg7O/fjKtNgOTuKkzsdFp1/Nndb53OmQvV77Ek+dgQ086hwkxx+XvXXJNVJaSQvSTHbAA5eZWaaQJoFpVAkE21Sv4kaLhRXI8QovgvSsjgGUPsl8q7+oNcUY0SUDPWa2JOx51j+vsKYXubwqyn8Oq6fRwnD3Ce1hNxzqj5O5fTTpJIQf6YBuxakVUPSZsjHgqyOpdVUQvxZRfyROsa6JErWbmo/ISCAnGTR5IpRbV9TVsn3rL3kcMXhNRGVOU7J8mxsn+8rxMoqxPtxiOyr/C7v/bKOQ/kFZbty5NMjnvZKxEhIXyLfF4T7mSFtPUx+xfPhfgjQWjjPsp2T81y+Xq63Jv5KxZ9sp1V//doOQ7ZL55nXoR5VIN+3JOUHQ6fWCOB95P4k9NuoB6lTRvxpItET7Tq3up671I4Zyy3OefB8J0yYMGHChAkTJkyYcLhJPx/KkjE3BAsHahc+5FS3BLU7QLtIE4N2F+7IJeBmG7RzEhKuKHorqolDCjXjjTTiha6NKNHgu2JC6jhtkwwvfva4SsE3BG3k97/r5chyUar0K5jW19R+WjXn/bi9AdzzTBSVdZSK6kFPIDOCz+kcJ4noYR4mzUoMhWfkSBxyJLo0UQYopd8IWZkfS59T+pF7sPaJP4G+RlqFNHROJihPnXNexcaJVBYySeego7A9x/NM5Xsv5dyCxDRwwXr1WWuzKmqqApet7x+nDGdUU1gIdC68Ut1XSQXVQ0mwrRtqyIpso1gmpNbMJsOPJoRr5ZT1LdWKY3VIz2zYd4AIBIDO5AQarA+RCehwmhyteUNKCyH55Lh1cm+c7KupHwHE/Ia6TnFdeGYS+ZdIvqq6ryAxNfFXO/6qa1+ekywDVof7nDDh0IEI1C3BXQcOYR3ZdYCxQNMPk9kQsN0YnF06LAGc2Glx0ZbFRfNED0i+PmoAMIO1YVjKku2GMGLQ9uO01of6DERiFlIUfWN2qeSX7zXDs2w/5jA0VHNxNCpD8ZW5+wAkwq92rhXCL6oYC6N9qeAq30H76qIGlH7pux2uu+xSfK8528Rhc6ysyY81Vr8V68tQn0LEAuLEVR/Pl4RfrflqYz+NsWgYY/tIVI2VziQ1BeQK1VZJCNfquKf8gmvm6wMQcq2b+L1U+RLy66OXA/n8onaPlxgi/OR7T7GLPvGnlalyr6TtChXcOsQ0+vfNWB9TJcDC3xQqeAU5XpJ9SP1uHDsOtGV5//eeVU3GqWg5/jqmukgYU93/ls4Hq9S0E+E3YcKECRMmTJgwYcI4DjfpZ4LSyxJosZNCebY7Pndf8EymkB8Cdg63fTFMtwAtdwC3BLkw2VKTB5mAkfF5YkoMeTfWluuooR17laCEYBziXbyCK1c7laoXWQYMk2I1A7oOVdcFYswXQoihuOBnrdrIkIVfAuCIYThX+dU8VUXZF79z7pGbziERfqtCYpaESOYt3fWNE5KnDcjJML3ZfkM1yb7rkGM6ZxswnnvRK9DC9y5cHiNtRRl54g0Rcu08hAQ05AOe6fM2K469FwwZR/eCA4X1VCo9YJwsG7p/avfLEDzhx9AuymKjEtWYrKlVZVVexyGyUkKSZtuq3qZUNpaKv0QiM7pwH82UElAIvzFHhBrZlSn/qsRYahfdrr1+Eqk/7597SZjnyzThJypqwLf1yvZe4/7V7VqGKy1z+e3XkeN8gB2Dz2OYqPNZ9oQbGWTAy4Un+rQB2XQw1ubR0IMicMs23ummZZxddthqTK4kYsBKBIUyT1wsvwgxN6RgIW+09hE9VWhApXhjIv9bEX4l8VESHsOhA/v3ei8vcmU/nX9XCIyVhN9eDNmF8V6fZ43w06GsayRHVveBfFn6OnChLiNmsHMpdvIG0CMVxrAX9R9y4o+Rk39Af1w4RviV2+2lezxvXWlB+Ak0WRaXlb+liPDXpzA4+Hiv9xxjIIxtuE7rtGWWo5Ilp/H4TtVDqmeiJPDGQn0CWhlYEH9rYu1ta2Et1+0zjCL4BvqN0hGiRKlwzJ4RTfiFQSuxy+8l8iNXG9qoFsJ0tC1uAkTfNLaaMGHChAkTJkyYcFhwqEk/wBuKFw6ws6OwMxfIvBagNoYFYjKAnYPaHZid4PW9fRxutgW0C2946hYp759zILSB8POey8QODRlIENDSYxtAsLTkE1o9ebGmCd6LPjykNnflYaT8gH9o4mNMUu7pEH7xODRszI5GaTmYUT6mQvx1DoYIjfEhIhnI8snpcDh69xpq3uai7ANSvi1N9o3lvhtaFo/npLT+9kMGC03aAIrICLPZIRJnXcNOWV99zUS5VIMj37jGEJZgWCfHTQoqqXe/zknlKHXVoDVNDL39emwNDxpKs3PJd/HLxPCqrtdQWE99m2nySO6ZdcibtQieCuFXhvYE+gRcjdgC0jmUV1iMUuuGIK0dL/tt+wToTF3jpOZLv4dygZb71I5ZqtxqRJ8sB9ALdVo9TyeODSFvon5OxOGBE/E3ppiVY9Z+p2tVz03Y2H5uwpkxVQVjiRiqTrXzhAmHDmHs4059GbxzJiP++FaXw158K7hmCzzbBhuLc2xxaqcL4baB2xydAwDOLjvoYSYDIDLg2bY/hg6r3rU+UkNQ+8SIC1q9wg4SpjOrrp15FaKE8DQWaOZg0/iw4sjHInrMpIkwHb4PyN9bfhv/t+zLhkLT6e0tATHXtA6dKcTnWI6qWrhGqhN+HeeOVfoc9TmX78N1SQcmE4mbMu91LIsdmB1MiJDhHcUAcP4uX4WMQFlTNeUruT/Fn6AWvrMaVlFDKZr22uuXY0l9rNq4RZzwEN/5ap+inXpEt163hspPk1dyyDKvXX+nPRAzgfir5fGT6yTjDF07eYb1vR5z/mr1bjy3NaqyB5bOwd/XEhZYX4OSNF5Z7MC9VG3HCtk3qnhTA5V1yL4hxWR07mSuO/uVjgxAnOBlfTo77/wh5Q5Vu7YiUxWbPT9nEyZMmDBhwoQJEybc0nDoST+ZznQMgAxsMwe1c0/2tYtqonE2FhIOiowLpJb3QCcs/HZ6AirezOx1bqzK6oU7Ko5VTmJrkxQ9wUq5PvJtk8E9J/nMmuqS0sijc5mVij/J7yeLHRCUh17l1yEQfsooX5sE1sg+X2Q9lOcqsm/w3IrmrykgO0558zS8Ic634xIcjPmyf06mcTAMUTGx7523rtsoQdlfpw2QdYI2HaHrlPIsFqDOTZGW6TrValkUr9BxThDquzm2Q2VdURV/xMp9XobzrG0bj4d+rcdUeSVK1dde9o37VJ63/Xq/Z3ly1lDYje0PKFKr6PV02EwXFKH+lkk5PaORW+1quCi/R3onEg2ok+frtDGjUPcOPc978H6W66SJvlSnRNzNbCL7JC+hhC3V62bh5D351yc1V/Vdq1QH5wXn2Rv9/MlTJtzoMA2y3lbHH3YO3AWiKqjrWhfybDpv5+2YMbOE7cb6PKeGCmLDwJg8FCEBiuADdD7ltFGF+JGQgE5lozUpxGcZYQBIz2NpeI6vXNVtRScn5ATIWMjDkjTqdYOFwm+U8NsDckIn1XNwe31CFwClAmsvh163tyFgnDSp1ivvznqk7irCT5YV9+c6jmFyX1WVUysg2w9ybyP75WFt19tvCAe+jYp20+owvb4cA+73FbTp17HOB1pdj5H2WUXsDS3fL+EHrCT8qspmpIeAFeEc++2KcjmD9Okqzx+w3v3ea79aGNsLiWlsNWHCWlgsFrDWwtoyE+yECRMmTJgw4ULhUJN+DeWG+9Yx2DRojlwCWsxAdhfoFnEywqaJHuBwDmb3DNjOvNd5M/ee5gIJDyXehDJBQTEeV2GnyhwJWVnwBEonOalcmGxBVCtJAdcVc5l+SEEf0hRdCgOpiUBN4pnCOK3RMUdmxiqLviabbHCzljK1ui8jgyqxSktiR4g+qV2p7Osp4lxfUdU7hsv3S2UlkiDP9ZXvr4mMMicZ4I38QD9M4Z6NLK6uTBoK0eqKczJE6LpA1BqK111CNyYjZVKFLd3e8tYNhV8kdW2TkdTfF3rCXgRqK84n/63vib1gzD4wxr0xA82IdWGsGrFdlNKytk25tGZYGiLBSqIOqIcCLffPr1O1aJiMraOe+nDVc7gKJQFW1rNWrdqxReEnz8my49736vlROoqx6Z4SpaPUaxbqNLOmSvJpVZ/frkL6qXyN+flQdj4wHMnLG4XwmzDhAHDzY4A7DXPxpcCx46BmBnYdeLEDmm/7jcK4Z2m3sdjt0DHj1KLF6d0ON5xZ4B5XXITbHLE40lAIFQkwTMwlaokwNw3gWlAnCjgGuANTyCAs4To5DAaKwZGE+GQ790EZRDkYnLpkzBWjKASDthi6c3D2VfdnQlatQ2zUcsARfN1iNIkY4pPr5J+gULbEZUUuPzmfGrlZnua65AyTidEysvoEdd+Qyq88FsE7hRlmH0yiqEFUEKHuPFYSYavGDFn7r1JMqWgcK/OH6f2GMJB3UJSOBrnTUEmC1hSGQP5uXxUhoNY8NZUf0HfMGtp/DPH6jOQT9AWneRLJ8+ywuj3lrxB/hcJrSJl2UKyj0hO1HyDPnYw5kIX6BAaIvzUUqWPLMsJPT5hGyD7JTw/0VcG6vyiVpzFsqjrn5IfoYjjgnnJZlJwcWqcISZw9p1JcrSnknMtt96IAnjBhwgXFiRMn8LKXvQzPe97zbuyqTJgwYcKECbdYHGrSD8gnJtHwTgbczAHbgHHMh4zqlp7wk9BQxoDZeqOShBoxTTb5iAYnBYIiimoTXGWEEeiwdD7PFCLx13Z+wiUh62rqt9LYLYYCYwhwYgToE39CCsk+GrVQfXFb9FVmVs1ck2qsTwYJhsI0lueW6qtIroF57BBK9VWN8KvlQgSAJXKl0hKMGZNa7kOd6pyHkQwdQXl+m4ImahFIit5xlExB/KWlDUdDoyplV03h6MsTMAz1lYBDGArhGdevaKpVYrqV3sKV9aVn/6o6CGmYiM/iEJVjDJWpN60ag8LxhtbVjrWuQkCuoZyzHMeCclXgGvfvqnyI5Xlq6GekJPxqOSetMpBqog/G2/tNvDn7ddJqPk/urRe+U4j+xlByeKhAOzTUsKkcmnuBY67nStpg+RNupjCE6IrQzOEuua1f7hw4GG1p9wzQ7qAJz0TngLNLh6VzODIz2G4IR2bK2AyAOJHv0dgb8vyRM+AwViMYsGt9Tj7Ab+Pa3OAr+cokzCUAUmM4TfiVqrfyeSWq9E9rknypjP7WWRj40H4rCT9NHA2QSDVEY738HXg+9TtlLZE6GYC7ROrosPmyvlZHlZONAR8OMhB/JfTeNeVadcwwUF0dWnHl6WkyaaBMKrcf+r0H8iEM3TMVKQaOr/cZgiax96IUNOgTf0PtsCes0xalpF8vNxVSWV2rvTqLDc1HhlAj/NYJD69RkmfVPH9jirW9EH5Ab6KUPZ/yV81NGXXCL+U7RRbVRIg/mS8Joa/DepLr8j4t1tUfn9lBIrlWL0Nx39Sucy+M7Y1A+k1jqwkT1kPTNPjpn/5pfMd3fAe+7uu+bk/7tm2LV73qVei6DsePH88+d77znXHkyJED12+VSnvChAkTDgM+9KEP4R/+4R9w9OhRHDlyJP697LLLcNvb3vbA5U995eHHoSf9gHQjWj3Jt3M4BLWTBSy3cSISlXnNPCf6yIDn/SbJbnGdg0Yr/ML+4kWpJ3wd+zx4OhdV54ClU3mqxN6Dem67eFprhvMEEvFXK2MMQrRlRFyXh7ISMghAFj5SUKvhOsq+sfrsFSXhJ+1fHruXx8d5MsCHQvTh/JKrdthGtUctx1xWh+K8NAG5LmqhxFzmAs+j23ZjMjwEQkWVYYyf3JfnOUYAjtc//72XOe06BFeNeByrk1PPRiQkaT1jUq3YVUqEsebRBlhtaCn3GzufdXM0+nLUvSJfwiIhAPOVwxgjv4cIv1LdB6BK+HWxnyiOSRQl194BgnrnFbcDMqJP1H1C7olyb2bGiT4K+YvKe5FZDGLeeGaDNVcreDuejDgTDhnIgKwFrAUTobv4tshy6bHD7OyHgQXCmIjQOsZu26Fj4OJ5g21rcKQYgNTGI2V+OuqWkFDqQi55RZ/PB5W9UMM+EqbOqu9Lx9VQdd7Y7b9X36tq83WM//qnqX3XYeDjGHSgPygJQE20lZsqJ7NkqOfwN53rgRDUlUwUDfc9aGe3gbElQYguiuOsbL3qq9dBSajofYVYWRX6MhWWE3+6/MF99xBaUJyjBJpU86+y5Myyl+m0g2Qc3zw2Qvz1Cq0QXEOkqR70D9z/ZYjSGvZyDmPTIv3MZ8/7wPZjYVp7xF9vg3GyD6gQfuXmI4SfDulZI/wyJ00ZGpfjbb1A+uSQr3Q85YWwiBXir9LXVeeRnPqT6DgyYcKEmyRmsxmWyyWe9KQn4W/+5m8wn8/X3rdpGjz0oQ/F4x//eLzjHe+Iyx/1qEfh937v9w5Ur9OnT+O3f/u38a53vQsve9nLDlSW4Nd+7dfw1Kc+1YeuvwlhuVzila98Jb71W78Vd7zjHTdSZtd1GwvZevLkSVx00UU3uXabcDBskiS6/vrrcac73WkjZf3RH/0Rrr32WlxxxRUbKW+5XGI2mx24HGbGhz70Iezs7OA+97nPnvf/qq/6Kvz2b/82XvjCF8ax8ZEjR/D2t7/9wKTfH/7hH2KxWOC7v/u7D1SOxmKx2NP7YAg33HAD3vWud+G+973vxq7piRMncNFFFx2oj2Nm/P3f/z1e97rX4b73vS8e8YhHbKRuB8Hh7mHZ+dBsI5aC3c7hXBtCe9o5YEMYT+f8971OGNiBljswZ78EszjnJ1/BW10mVULcLTrGbuew6Bg7rcNOy9htJfcNxzw4rfosO+8pv3QOXQhr55Sdq0b4mWCkngWlijasibFBPDQljJ3+lNDHXXYptJ7Ua+kcdtrO17VzWFQ+y9pHnZc29uvz652bGSf8yjxi/ntSLw6FNXUufVL90rVxgZBdOr3OZe3hWEIPpnN0anmuPsyvm9SvFhpxCJos1XnOpK76o89lnc9O57CjrtEy3LPLTl33cI5yr0o4MSGtxz7MiSARo8PYR6O85ZMh1v+LCi4CrFEfGv7MTPo0huJnpj7yvMTnJhxDwkHqZXmo21RXov4nq6P6SP31cfSHRv6tAw7/qm2pCC0KH2to5UdvX35QXE+vcu4/H2WIXum/5NmYmZBfr7g+W43JPkdnNn4umlscmRlcNLe4aKvBxXO/7OJ5E9Y34bvBxXOLi7caHJun/bcbg62GsG0JW9arlua2f380hsK9RoMG5r0S/JsCs887c94+E5F5swU32z70uZ35cVJvAwc+exJgh9OdQeeAuSUsO8Zu63B22e05TLDZOZVCrtsZ2NiYl8+xN2SznadBQVAItpyiKSydD/O+6NL4SvdBYtgGcjIwezex7qvSfrWzEf+DcmJL+m9U9lUUfbXvB0St2UtV+56gw9bH0KJN/leFw88r47K2kPF6+UnvuLSdfLLioAiLQPrIR5bJdnqfleP8ov31OyzbZqyNCpR19/eJH+sR8nevjOHz/fvLelVSjmxDGLr2+p6NIVb1MuTtYIjSGCG874D+eY629di6Vfd/JYrKprBq+D3ktFPWWL8Ty7Hu0P25V4wRflr1XD6zmvDrnDw3fcJPz2Hquf3CXwJSyOLWOzLId/XRZCC5LqkBOx/WmSpEoVN9elfMVTo138lCl15ATGOrCTc1vP71r8dTnvKUjZX3kIc8BB/+8IcPXE7TNHjUox6FH/zBH8TZs2f3vP/VV1+Nt771rXjuc58bl+3u7qJp9qdZuO666/AjP/IjuP3tb48f+IEfwO/+7u/iox/96L7KErRti2c84xl4+tOfjp/92Z89UFmCj3/843jSk56Ez3/+8wcuazab4corr8Tznve8jfQtf/u3f4tv/uZvxmc/+9kDlwUAzjn80i/90kbKAoDXve51eOpTn7qx8t7znvds7Fxf97rX4WlPe9pGygKAa6+9Fv/wD/9w4HLe+ta34oEPfCBuuOGGA5f1iU98At/1Xd+FZz7zmQcua7FY4HnPex4e8pCH4FOf+tSBywOA293udnjuc5+7kWfhbW9724HbbbFY4Jd+6Zdw97vfHddccw1e/epX76uc2WyGn//5n8eb3/xmfMVXfAUA4Ny5cwcirs6cOYOnPvWpePSjH40XvehF+Pf//t/vuyyNv/7rv8aDH/zgjdxvV1xxBd73vvdtJIT03/7t3+KRj3wkrr76anzpS1/aVxm7u7t44QtfiLvd7W645z3viZ/6qZ/CG97whgPXbRM49Eo/QiK2tDetGI5lkrUIK4kI89l23J7V9tpQPToHLD1SbT6pAvxk0AVFn6g9ujjJSsQNMwKBkgidIQKsVI+kc6W4XkJ/luE+1w1Noz3fM6NdNSbT/jrMofNbB5rck/oZEy5DEZZwHVWk9sh1jiMZF8NoGu/znFR1QQHnEM+/ppzUhEb/HDYf7m9jxEJFOaikkMVB09dV4U5L1PLV6XJk7VCpQnRpwxmQezGUhtghNZ6FMtZQOi1LuQFVhz8t66G/c1A+uuz5GziRNbEXJZ9GP6PTiuOocz5glfeE8lm14TrIct8n+VymHQd1hOO4rUYi1MNvSnn5dOjOmTExJLQ1yNR8YshPv/ODOHA/v2uBMdJ/woSbNnxOPAmxR8udGGqTAwlIzdw7U8E/P404ETlxEmKc6xjbdo+9l22S4VpCrwvI1wsIxmtF/ghkrCVGbDm4Hp8NYYgwM1kZfXUgsEYf7waIvX3kiKsWn72r8p6/VjdN4FRRKLO82o/7ipyyfsb08mzJ+7AyulhbzS4YuoZyD7jQj8v1jmWWSrNa267b3mNhGfVmCPMJoqqBoT9OoCrRshdkY9o1iipDfK6qw1p341C419p2q65JBeu00Pl645bRGPrrD+DdPqaEXLVrJfRx+V23iVPLYl+5AilHvHpuXR6qmCoEeq9orjgIqPqkOuXhmQGEgCs+f2K17AkTboF42MMehnvf+94bK+83f/M3cZe73OXA5WxtbeEP/uAPDqT4mc1meOELX4iHPvSheOYzn4nf+q3f2lc5J0+exJvf/Gbs7Ozgbne7G97//vfj6NGjuM1tbrPvun3pS1/CYx/7WLzxjW8E4EmATajg7nznO+NXfuVXcOzYsQOVI/iX//Jf4uEPf/iBlVdf+tKX8KY3vQnf+Z3fuZF6AcCll16K//Af/sPGyvu2b/s23O9+99tYeV/5lV+Jf/qnf9pIiMSHP/zhuP/977+BWnn81m/9Fu5617seuJxv+qZvwutf/3ocP378wGXd8Y53xO/93u8dmND5/Oc/jx/+4R/GBz/4QdzxjnfE6dOnD1w3ALjvfe+L//7f//uBn4VPfvKTeMtb3oJHPepRBypnPp/jmc98Jh74wAfiL/7iL3DNNdccqLyHPvSh+Nu//Vs87WlPwzXXXLPnsMqCd7/73XjCE54QSeWPfvSj+PEf//ED1Q3wjheveMUr8JCHPARt267eYQ38xE/8xL6cSkrc6173wite8Qq85S1vwSWXXLKvMra2tvDsZz8bD3rQg/Dnf/7n+LM/+zPc7W53O3DdNgHiQ+hSdvLkSVxyySX41D9/BpccP64Uc8nYcdHMYG4JJxcOXViXvFn9RGRmKU6KL5qZeuipMXQL0HIHPD8KmCaSiI6BRefQsScbJZxnGeZTE35lTitgODznGGFUy1sHrA7pqRV/Q+0wlhdwv+iYq+VodV+Wf7AXiis3rJdhAp3joC6U79LWrMqQsuXYScUlysgU/o9iTqI8L2Je/35Iz0TY+t+yXZ8YWGX0qRkcyzbshU5d4zHPwrfK5N6kPGd63dD1GarfuvUoQ8gCSjVBucrPb99XW5REINAnbQBk4UwFOckny8SY2Ed5OqWRTUyv2uy9VyLuINgEF1y7ZONGr+K3+j4U4remZC6fFb/divtHEX613Hy5qsSr9Pz2OcFHVBi2ivMRx42oCgq/W+d/77YuEh/LzuHUqZN4zDd8FU6cOLGRQfUY5P14r3//e7Bbm5kw1tDtnsHf/n//9QU5pwkXBnLv3HD9R3Aru1QKjTQw745fBXfkEpjTnwPsHMvtS7DbeXXdh79wDmeXHSwBFwVV7V0unWNrj4Orlr1a3RZ9OpDGJ5169qQ/is+iS71so3bO+3cOZVNvmWxmKfUF0vVEJbKhzDkA6BNYBPhxogqBFw3kQF/th1zJw4Vax5Oe1v8N+aglpHwXjOQSTrjWU0r9RLWVOTfoOiv1Tax7qVTMClaKG2N69V73NVQjBYFcIaVJ3rImMizR5wQM5OFelY+vRgiuA3W+oqwq6yyKKn1OwPi7Nl0jypyc9LgnXd+0f39Mki8XYiWuH7lYQ/cMoO6bIRRtH++rcl15HwGQ0MKswvfWSKHy+RXoNijPrzaWqT3PfnlaUnM0S+v0nCUvq1f20HM10Jal0q8k2EsyXtbrdksOqEnlJ2Mz3Tz6fpMxVRPmBBYO1C2CYm+ZVH9lf0FBmS0OG6IQBkb7Mj9vzvMMSvsRgmKYCKdPncTtr7pyGltNuElD7p3pmh4Mn/70p3HVVVdtJGxg27b4yEc+giuvvBK3utWt9rz/uXPn8LM/+7M4c+YM7nCHO8TPve99743kG5wwYcKE/YKZ8elPfxq3v/3t97zv7/zO7+AXfuEX0DQNtre3ceTIERw5cgQ//MM/jG/5lm85D7W9eWN3dxdbW1vnrfx1xxeHWumnQzhp7+6OGaeXDrQEthsDZuDkoo2TvSMNoQlWgC6Qb8wuTuJ8SLc1BxTiqcoOgEkhh4BANqY66ZCIEuZuLKdVnrNtPUTyrgiZt0pp0nXJwCZTypIILI1kuTJnnHQaPXZB/NUIpRoZBAAWlKvGDCtVom8/w94fNAr3QFXCZ12UxJUo/6TeY0rGUolpDG1EpVdr77Keq9WFtfOSezBde6+CFFKwf/3WIRhX3R+G/HW1hjLlrX4khKQBCsNXNMYE0nCIxK4o95zKCye3oQQo07mI5BJL2bKq9NwXrCL6NiXUzI65hzLH5k9jRrESjvPtmXNPb2v8dU3PgVws6fRS35KeedXPVPrlmHOS0j0ZQ4PalKuppuazhgaJPilXqx9r97YL51326y44FzheHYJtwoSbGpgIpIy3LMZadqB2Bzw7Am7mlZybXkXrQ+UaOAQHJknktAcsQrLjuU37lcb+jhnSbXNYpqvUd8zgGDpRP89j762MNMA+VdBjRv0K4XeTgjGBrRq4hmLYR0FUyuo9Hq5G/q3ThfpRuNwfiZiK12xNhV4PB7wuhvw4Qog/eeeU95x+18jvEo7rjnL5Nvk4SS8vySedZ9ENHDOeB3KSu4ex9u0pR42/RVaF/hhSlG4IY8q9dZ51uecuFJhMVGD3UMvlh9R/1cYhQ2OT0oEBSI53PrSnIvd0Pr+uzXKWMpx6MBXBqchdqSsDQcEtDrI5Qe7PP5Dejn23NI2tJky4xeB2t7vdxspqmgZ3v/vd973/kSNH8PM///Mbq8+ECRMmbApEtC/CDwAe+9jH4rGPfeyGa3TLxfkk/PaCQ0367bYOC+dzrGkPxrZjnFv6/FVfMbdYdIwbTi+i4usrjm9jiwHTUFR+7XSpXLNt9+SZTuzAahLmEPKYIVeBdA6BpPQTn9pcVxRp/jsDLiffNFapACU03hj5YzODQE7qCbNhDGWqw5qH/F5IvqHwWCVKwq9UfsXtTF5fIf5GQxQdgGwrFZhCeq0btrQ0NpZ1kfMdzCNSaf9yna5rXLfqfJVKKisnsaUAOBJ/muysEcBj98RgW1VCiJbEX28XVX0xpgG5h3wV8X7Wh1fkZbAkl+Sf3y4ZyeRY6z4CY5dhVRmymkaWrXtXZ2WsMDQOtSGBMkKzZ2CkRPxJ3crnVYyYHXMi/FU44XWVwKLkk3Pz3unJQGlJKRQqRF+moCgOGW9XbfxnISA8qSvGKe280TFnffqFhHMAncfjHiRM84RDgDCeiQZm0/gcf64FLXfhti9GB4O2648xZpaw1RCOBKerjgHLbVRzrDw0fL+7G6I3zExSI2knHxn3ldDLaI8OPrU+Nq7j9DrkykupdyQxcjsh9bi/fi8Ksg14t2uVn/+d/lbPOZAMkfjrFZgritYiZ9bJ34bcYWS0uLARUZ+EYfV3sPW0Kkkv2ytJWDl3fQ5EFEITeshYUIfjXOf9O0ZUjUE7JUm9hJAE+uOauJ+qvz6vfaNs11X3zAEIvzGV342OvYb7lU3WDUGr95G/gVDLlxW7hr96jmEoKJtlfVQCc1JudoWSGQDBROKPaMDdMhB+Ub3tkuNs2cczGBYEpqRuvtCYxlYTJkyYMGHChAkTDgsONenXMrDT5gZnCQ3SBpXd9ScWOLvo8MWdJc4tOyw7xswQLp43oKNNnOxkqrh1B9xkfP4Z40OTdF0y9AqZqAk/H/LTk5SlkayWR0PCSdZywK3DSZahQoGK8bkS1lNIuUTscI/420uOwKHl1bw4pk72yZa1wxL8pLUxhFYRf4b7Of2EGHaur74phZWuQu6VJOxYiM9YjqsTYTpMmc+FhGybVblVSvJvL4RfjaQrr7O+Rjrf4RBWep+r52qIEOy6UI4m/4T424OhS8gcYNg4JUeoqfNK00QtpJiQPr4sjttBrS+PNYahPIebRpk3EegTgLqdy5Cqell/bw95dhie8YunpoxNWvXXq2MlHcJQfyChOrXKU4cFHCL4ZP/8HHOURjFviPL3hzh0LF3KzboM4ZzjX+f7mimn34TDBOp2QeR6hnmebQN27tUWyx3YZo4mhjZnXHFsht22welFG9+1W5Y84bcHiAF40TEcGDOX+leJmFAjqXoONZTK0/uUanC9b68PUM4LjtkrtJDyjkp+Kd0nRsN49eRS6EwN0ioYYJjoUKq6dVEz4gOJxBmsayBnIslgTX/92G9gXOFY/h4oT8b2GjqsYyyKFfGnrks2titzEq7KKTd0HUbImRpZKfWIxF+4p/Q9W8vFt2ock1Vpje1qoT9T2/iUB2PZiIbCiEZoQm8FiVpV+w0oRuUZjn97Y3uO29Uw9grWY56xYe4qh8WxPNL6d6+EsdC5Y1hxr+o2A/I20/2kVp6WdSPVZ0aVJztQJ6E82z7hV/ZtQvxVINdZCL8YuceFlB3gjNRnJjgK8zfr3wcTJkyYMGHChAkTJkyo41CTfovOoXWsQuwROoeQ388r6k7sOpxddmg7b4jdaV3IOUO4qLOYW58XwHo+B20w0O4qC4MDMDOEpjcbSrlLGBLmLeXv61wK+yalOZcmWzX1HFCSSvnfEqtUYRqr1CZlffo5/PJjbgo6LF9vnWqLsUm509u7OqmU5wvrL7PB4OEtRH0SS36na7Ka8NPrhPyT0IXrXLNVKD1x91OmDs+5DrmXHV/lRBvDWM62rDxK4UN1m3aO0QTi72Au5h76yCm3TbEN5+uBZGjUZF+5vy5mHSKv1g5DuS73itqzKnWya1znkvCr7aIPIaeSjJdeCZgMnkn5p4m/oboKSgWfEH1+XT/sWHxWRXyiyl9F9MVzgZyDv0Y54RdU2YxE+DmnVH4pvKeEnb7Q8B795+/AhzAd8IR1IXn8xHhrGp9/KRB+Yrwn18LaJj7TW9aAwFg648dV5J2W4LyRmIG6obpYJs+cGHyXajwmzyAywk726wdSzoj7gYd9KORnLINTPx2JPy7DWo8TBlWUYT03GN5zqCqi8ivz3vU3LMixoW2G9isxlE+v3Gad446gdFoRB5QhonjfWKN+8b0HpaqrEH+xSLXdOhByc538RjWHlpJkrC0rz2ctrLpn1HWOxB+wZ4lTjfjN1m/4FbXfuQ/VvpfP/B4UvyuXV/JortMWfacAhHCelNSeqq5RuawdGTTpN3IfxPdI+LgQBdSPs5LDbFZ3RnToZMrn1hcS09hqwoQJEyZMmDBhwmHBoSb9Ti8ctloXjb8Mh9YBXzy3jNvsth2cYxyZmaC8Y5xbOjhu0THjqy47gtse9c1wrmV85swSX9rp8KWdLuaGWXYOVxyb4fIjuf8rk4Ezc0/4hdCdXtHnQ4zKpIVZJjHJuC153UroZUMTzLHwjnlZiRC03urtfxfETlmPfo4s6hF+Q4RESbatIqJqebhKRU+u2urnE/HGnODlWzmUhNpz7K9lVPsxZ6SMMxQNf0OQdjNEVcLLZEZAZajkkH9iDeJP1pflrQNTXnMoJSFWE79j0PeJPuf9kFN529TJZuctWnE7Dk+6XG/H3hDhKISlUvUIokwAw4YqTdaJsk8TfVrZJ0SfkHyynXgZl/k5S8i6WlsNqcDcSLOuEnroZx8YJv9qxN86Hu+ZEjD8dcVyZjFyyjOTt6dsWtZBful6aIJPzq9U8VnSfUde1qrnqFQJyDWuk33pr/Qpy86TfjWF39JNSr8JhwvULgEsgbYFWYvu2GU+h9/2cVC7A3RBueccrPFhOx2A41sGy44xtzNcsmVwbJZCPlK7C1qe6xmFeX4MPD+aHd/nQEYcO+0iPes7nQMzMLfKiUD1A4Kaokmv004I+br0Xs5ySaltfL8QHBZMyolWk6JnhF7xGSX79kB86bpp1Y4+p7Qud5DoOT8MKe/U95W9GZmMHNDXnNgNkzumUBYCoBqBob9XxpzBbygSYprv5VDmWiTLAcjHVJ8+8efPIRF/VuoclFa19i05az3k0TkMZTtNQmd3pVwHdV61Z2UsmsjY2KDX1rJCKysLBWBVSaoIIUau8htDrf1WvX7HhgcjvgIAMJjPr2z7rM10H7hfsm8sp58+1GD9itCyvfXJWUpUfpZU3fXHDZxLjfzT19WGKDmc8t4vO9/3y9gJSNePCOAu3CbOB5efhH4TJkyYMGHChAkTJgzjUJN+1lA0zgs5t+w4I7UM+dwy4nU+M4SZNTCEOKHQRmYKU0bmFI7TuVzFI9tLCBIgTZjEO72cQPm6EmAYXTdMDkmdgWGCoDRg6/0lFKhRM7lEMgUyqJjNV8PrKcJvnXqV+0ldh7CSsFSEn0ATfsnYkYiF0sdfKyq14mbpHLoQIixTVAWF39IBMwAwnnMy6LeXVW0xdC45AdcPNXoQjF0DfY8I+dcPDztQ5xFrjibndLutq0pLXrvpuuiyfb2K58Fb7nz7CUkVCD1PIiX1X7h8KRcfUyy7rF4k9ypkn//NPVWf9jqW517n5yzPr4Z1lZimcr9kqhI3TvyV12SVo8Aqw5bft77cYdjwJfuVhhn9/FJlueynw3Zm6pQK2ZcUgOk3KnWTvlmrd7QxUZZ3QvwFso+5n5/VsVePO04hPUuFnwsE4IXGXtKF7bf8CTdzNE3M5wfjh4tMBmQbT/yx82E+ty4KYZnT8zcz1O9TgjpwVU4q/6wHxR8QH04iHc6NYih26fcN06gBPzuGlK/67dXOAfm+NpCGLvRPPbXfmKJt1TaroMIe1rCOwxAVf4eOIcfRirCxMmsEB42RA4IUsmGwfHHGKfvvWES4jpr4QyD+1lJjjhB9q8571XZUrNc5/jRZu6qs3jnG5SvOr3bfUT7C3bNatVLP2veV5F8JpQAuX58yPlsXNXL/fGKI8CNg8Nmn4pngAcJ9nd9yqppsBvz9xsxqbNQPLRuLhBpX6fVjCsVarkaV61POSav8JCVH51IEBRk7aWc7QmC0DUAMECnHiwuMaWw1YcKECRMmTJgw4bDgUJN+BERybqd12G1dUkiF6dZ2Y2CIsN0YzCzh6MyGMGyeJGTuT6olf4gYcbXxXM+LOsdYuDRBF09UTSDEVG2sSCBVyJDi6yCh/TJFlvU52gx8br51yy2VgDXCbyyHXbkt0DeqDan8Vin8MiJwYDIvdRCD+9J59aUQfksnSj/AmUSIOAZmNqiTwgV0ypJiCVXCbyhUouQsixNrM+zkrrHOdaoRdPoa63urJIP3S0J4cq5vHK0Rf0J01pVvw+XrfIJdstyFE0RG/EWjZTDudZyuRYdk3KhZzoaIvnKdqPuE7IvhG6PaKyeX++e0n+culWPU9dxLv1Dbfh3DdomK6b6HdWwU3i9beZYXZGwtlOgQ2WcVuaC/6+11mXI8ANHIKhBjcEn2JcNTTvYxkoOJVvXJe0X6G507dPJGn3AoYRqwnYPtLL3wTQN2rVcKuRbU7gK2gbXb0SFmbiWnZ6Egk08lpGfv0BSePQd0cRlHok+ezaBb6hF/NUcD6SdKjDk9DD26zAgh5gb65RXEHpUqGY0VUm5WhvR1oB0gdFjPLNxlReGnyYPUTw53ZlK2EKJxuZxnpvjLy2E9zmaVVTfURZMX2X6V31nEAHiSYL9k1tBx9rK+DJkpbZ+F+kR6v/ltOW4r5WsCrTxHHeZT38+961uqr8r8a2MhGYvfQ9dEoElIvR3J8ceIv+IePOgrVM8V9jIEqm26zv5Vwk8wQvbp5VXniDEyEGNOAOvdb2n7fDwl9ywYq8MRlySkNJjO02iSilPGWI5TZByHMrwnh3cCoYPv52+s8J4TJkyYMGHChAkTJhwWHGrS78jM4EhDYPjQnTNrIxFTEkeeNCKcXXZA65VcR2cWpxcOn3I+HKhDMvADwMwSLmosrjzWYF7xPHbwxKCQDFoVZMJ/zEE7KKokP10fJEMEQ+EANaEj0KouTc6VdqMyr9o6qqN1yL5SxabXSzjLErU8fkNkhFb7ALlyp2xCMfh0YRK5dC4a4LPwe53OvZUIAk8AGgAOM2PiZLM07Ol6y71W1qMGfc33kn+vJPgGQy9lLEe5MpFTY4q+odyNWkEbia5OXV/TP5/aPSeE3xDx6ODr17EKUdoBsIjXS57x2NKO0CFcQyfEv4fOd1MaLIdIvvg9GBVKsk8UfpJrSpN/Ak1sivqwhqydnZx7/xqP5b7U68dCiZYYCvGpIV7WmQFvzVtXX+JSiVvND6jI/Zg709CeyL5auZog0IRe51L4zrhMDE/K27xU9YnTSAwdrJR9QvbJcSWs9IWGcww6j2zjjaFenHDh4LYvAbkuGXCdA0KeP+paULeMRnsxTnunmPy5p93TOdFHxhu0TQO3fXFUEGpYQ5hD+m4f9k36YVkvz6ljAMZ/t4Yi8RePH99na5xz4RSx6g6XPlF8U7IA8LUQklr1NqC68gWPSLll+Qg5U3b9+l3YI/wGDPeR5AOyfrGWNzH2w4Fu6in9RBXqurSsIBkJ8IQyEdC1INv447CLRFSpxmbk9xrCeUXiDyuUn+U1WEOFNtbtrbrHMocXDBMxQJ38KwkwOUcgqRpd2f4ae8mrWMEYISgo26fWXrF+itDtVUPtr78PoeY8WUYZGLsXhtpME6hjqDkaxXWxsJL4jt6JqiIr1I+V7yuJPvUbABwIFmE8TH4cXBsvypjK0kB/oQk8YdcLMpeJANuobRvANmDjVeSdpMJwybnKIYRUV2NwQhhTG4YNCr+ac8eFwDS2mjBhwoQJEyZMmHBYcKhJv21LmFuDheswMwbW+Mlunu8J0dBrDWFmCUvnJzEzY7DoUnA67QnKQAxRdWxWBngM2zDQBhnfUH42IgkBRYDhYCRKJEnX9XbJoNU6RtmDauEiS8JPK9F8WM+g/gKtJJ9qijxdh3J5jYyU7bXyr1wvtq11c9jVpsPBLhiN+Kns/LvkVXScCByfk0dXiDAzotDjXjjVjkPozz1gjNwbUrxprJN/UY6TrVchXmuqvxpqZKK+B7XqUe57Wa/tlPr+KtVwsZwRxPWK2HMuPTdyXqRUfRLmNYa6itXsH0svSiHLODMqyibLLhF8MRekInp8Gdxr0+wcK5agWh5D2W+MlK1hLzkWy+MJ8SeGFWZEY0ry0k/En8ZgyM992Cw0dR4dw6XvK4y35Xet7qvVpST8dG4+B68CLglB6U/EWaAM4Vkj+wBEdR+Qlo2pYyZMuMnBWp9nb3EWxAwSokcTVmL8NU3VeK/DwUWyp3eceXUxAXE8ByCqa+VdYyWsc0DnvOqO5HVO/rmuj9zqkP6vFjJSYzSf2SrSpYRT7Qj0yb8VqJFeQxhU+A2Uq8vnMFZKv8uywzuCxf5P4XqkvIXE3CM90gFdJAsIxgfIdy4L8xmJH7Wbfl9Drd+zqk8rl0baPndi8aBi/RDpo5tMk1E14g9I85GxvNiR3CwUf1l+Y1Su9Vj8wBpZveb9WGufWERlu5oKcKi8/aJUWepl+0XRxGvuVCgq14jhOKj208XuoQo18g9Ayklabq+cqDLCr/RYJROcXJPuulT3cfxL8btjREUfIznO+vE3Z+NwfR9rJfE0spowYcKECRMmTJgwYRiHmvS71bbF8aMWdA6w5DC3PrRUY5L6gxnY6RxO7vrJyMwYXDz3s4VSvefYG5ZERSQKkyEsHWOnc5iD0gQkTAat8Z6UmlAQs7YzgOtypd8Q2VWGwDREKmdZf+6lt9MKNAlCakSBQhz3H8o3t0rdtwol8ROXB6N5DFsoarEihOO6c2qZIOpjabWfNuIDUAb/QERl8kyv4vIqvxCej0KIVPYz1M4w4AjWIu5fU0uV6jBdN/9d2qk+bR0i+4ZyK+rr2AvpuSbxpxVmrrhm6XoFkg8A4AmqqGQbsGPUznUd9VPX+fb35xCe4ZBssVT9+eJUvpI1CClN8gMqR6e6blJPyQGaiJ66arF6Xr3ntK7mWwc1EUgZynMvyjJDlCloAMQGyfMhhn5H1Xkd0vogygigIPiQ7v/MiIhkuOrUragVIcsu/HUphNRSKfyEVPBhotN1lvCdY+SeP279RPei6N0k2DH4PHqMn8+yJ9y46C6+Eu6i47AnPgXaOQV0i75aJRhw0cy9QmsFIQLX+nWmAYXvQ5DQoH4swzGc7k7ncPHcv3j12KpzqZ+wBFB4Nzvkef50rk+BvBMZ4wZkQr+/DkMCUCAYtdqKKVdGleEsy8EbMWchLkvEPFjA2iTMSoXfCLkjTg8Sdq91ybmlPBVRXVtDMAxYbaJ3gfR1LahbYCykKYf7iuzch5AFIkGgEa8Xp9/pnCkSf6KEY/hrYstzrxEvFeKPB76Xvwk58UfFutr2ZXnRaQnDxF9JLpaqxiHy0VegUJkNrF9FNvWviSqi2HSoPnr8UKuuLicRvGEcgryNsqpVytgv0ZeTlePh0fV1HwzrqYjw6nMgy4wZvgYr8nnW6pQdIuwo/ezYRFefSyrUAByS65lCkatD84a6cnAMYWMh+WGZjJ+LueCEJfMyTmMtRu7IaY1EnAh97I00BJnGVhMmTJgwYcKECRMOC9azHNxEQZCQH5KjzRt5gGCccCG3kpMcTC5OnMuJ28VbBpduWVx2tMGtjlhcuuU/RxqDL+10+GL4tMVY3MBPVtpgSB6t74pJpyXqfYzpE3lGvqv1xvhwpHq7EprQ2CvKuamenGUTtWDPcWoCt3QuEn3LGMLFG98lt56oaWLYTZeIOlFgiYFNqrLKTzbPvZcmrja0p7SRELzSdrV8dc4lss5lE9NEHGoS8aCEX34e/q+u235yPuak0PB2cn3Kj1yreL2cC6FS07Ub2ldyK0o7aHJy6KPhXCLYytC4cr+I0ZbDvRINCcVH7ivH/rltnT+HnbbD0vncoDttOKcQHna3dbEPSSFj03npZ2AV1lXxxbZTbV57zuSTrkF9fUnC63s4ql9d0ZaqPeXj9vDpXH//obLKsKqAN/Bx6NeZk7KBQ9uIUUh+63u1Df1L64BFl/4uOsZOy9jpOOaCPbXb4tSiw4kd//f0botTixanFx3OLv3n3NLh7LLDbthnN94LLuvjetd7H8/qBOCLX/winvCEJ+D48eO49NJL8ZSnPAWnT58e3P7666/3uYcqn9e+9rVxu9r617zmNRfilA4tqFsAQtgAifATBQc7/54KRu1IrgBAMwc3W+DZkfhx82PgZmv4gK6Faf//7N15fBTl/Qfwz8zsbg5CAhFCuO9DQEVBEURFQQMqFKUKii2gRUSwCt7+lMOjVG0VrVC0VlAL1qMegIhFLq0gKi0qCAiIIEe4cx97zPP7Y46dmZ1NNsmGHHzer1deSWZnn3nmmT2eme8836cEHjk8p7A1ba7B7BtA//xTw0Ep463o9vZzjngxN+v4THKurz3H3kewfi65ibhwbw02CdUcSVkmayo/l8BfWd2IcgN+LgEwo01VhAN9IdtvYfbfrHOdGjdZRDaCZRt6END6Yy63rGdrEyMQJazfA0ZdI7cnRGSblPW1ax2R6NymtU2sf7u9ToRj3Vh72maAyHqjnVE3a8DaUR/jOJnbr3jXPmpHwdYm5c3hZq2Do37h4xT9x3ie88e6r2WRXPrI5mNwDyhKjp9qVVaQuaJiDPgDkfto/BjnQ1pmnOjrubaLddSerMBM2SnrKTtdfsx1LHP5af08EfU4x/paVmN+lxHAfhURERHR6aZOj/QzhPQL3l5ZAJAQstz5GVC1i7zWubeSvYotKCZJQHqiBz6Xc6mioMDeXL/5f5JXhqJHYWRJCxj59bL1G9AjLjIZd6aaF6Ggn5zqI8YMxugdZ0DHDPToo9/MuquRF5SNx9zudAXCF0jMdJp6+k1rek63vwH3EUbGcoNzvkDbvGaqfdvmcstoRm3EmKy1DWRAH8GlyPqoLmjta2zS7eJOOK2pMOeikGUJXkW7NzigGie8EhTLXGvGPGHa35H7adRdmy9IaOkmHfPYqSH7PhrBz7KCfdb2CI/sjH4ZojIBP7N+LsG0Cj3f8rd13j3jDuiQvoPOOjpHesZa18gALGC+oh3tr4bs61vf47a0r8L9GNkfA6wj+qzLbM917IrzPWNUwbkccJ87MZwq1VKw7Hw8XE+3gFJZKYNtczCa5bscD8uBdt5AYP0ciybmV5X18wHaHdwAoIrwPH2q0H9LApKQbCM4jJGIzs8UMwCvX6QWeiDQuEFDCwiqKAqoUFVtBJEW0NVSfRrBbOjbLyuYa9xUABh3oke+bqOlHq1Odf1u9DFjxuDQoUNYuXIlAoEAxo8fj9tuuw2LFy92Xb9169Y4dOiQbdnLL7+MZ555BkOHDrUtX7BgAYYMGWL+36hRo7jXv17QL/BKgVLtf8UXvtBrvfjsCJrY+h+exLK3YQ0k6ovkoB9SoBg+X2P4VfuNLiEh4EX4JgEAKAmG9H6IDFnS+oTWz62yUi5qnzPhUfJGCnS3EYFOxuewou+xEAiP9hMSFFuALlTuRf/yRvs5R/o5gz62+sIl4BdtTjHADN7ayhb6Z6gxGke/McQYRQ3o/TEJALTvYwVS+OvDuj19bj9JCBhzQxr7IknaqCEBQDJGOMme8PNcRpVZA4zOPm60Gy0i5hp0tIGZTtE6J5n1KXAPQDm/rqvyaW98VwghyhzNZqXVS8/kAMfckkBEwElyjrZ0vi4t/0qy47UR5XgY9bD9b6l0tLmVJSkybTgQ28g8o30in2s5b9G3VV5xxuPC8X9VmAF26zK3UX4VTOsb03adrO83x0PmiL9yC7Ycf30ePyEDEBIkIbsHNi1pPaFoI/yMoK52U4F2jIzgnds5nevuxLhedajLfSv2q4iIiIhOL/Ui6FcaUnGkULsoleBRkJbgMQNk/lB4xI5xIlgaVCHLQKJHPzUWxkloGYEWvbzjRUEkeGQ0SVIQCAkUBrQTHONCkyRJlvJgSTFnv0ilXZCSw2kRYQSdIgMVzovuxn8el7Ni41xXOE7CjZFnQPkXsK2PR1xMjxJQcAtqRQuYWMu1BrkUPYgZUgBFBQKSMEcv+iBDMi4Kicg5cyRoacA8sgRhXsCT4RXail6h75tHRlB/PRjtYtt/2TKKUgK8imwGDY3ltn1Qpah3mpYXMHK2nWxte9U98FeZgF/U7bosdwtiRauDkU7RGrQ1y5Giv8bCZUcuK29OwXA9LYE/rTKRT1Rd1kfZAdloqTuNdEP250fZR9t1m/ANAtFEzLEX8Z5zBN+tm4pyjahCaUMtNx4Y7WibxzEUPcBYbtHlBHidQdrwhWShXVvS39dGAFAY17wl+0gI62aMUYFGsM64WG18D5QGw6kCtVSelpGcxghdFa6fEdbPYjPdsizCnw2qiFjH+rs+ysvLs/2fkJCAhIQyRnHFYNu2bVixYgW+/vpr9OnTBwDwl7/8BVdddRX+9Kc/oUWLFhHPURQFmZmZtmXvv/8+brjhBqSkpNiWN2rUKGJdciFUbZSfHqQRsgfwNYBQtJEdAOwpGx0fSEKSo87bZ3ILrngTIRQPFEjwmXMxh0d5a3PuWgJtxntcVaGoijZ6RRijBPXNGGULaKk3Ednjc/u4ivbONVJMAkbfT9sLGWVEfdzmSwNcAym257j8trVXGZ+zZsDP2I5QIYWC4e3qZUqyXqZRvrCM/hZGWmQtJbJ1/ltZ0toT0L6bPPrnuCoQDnoKFZIaNEf1Sdagn75NIXvC6S7l6COjrN8pziBTtO9Z200XzqCnLc++7DqPmjny0SWY5dyOKozUonrbILYAknWeP2udrfP8AeHjWV5cQBXu/QDbiMbyRvBJ2l2BEYE/x3YA9/axlerSZjIi29LY72j7Zw2KGmUYgdEom6mQso5VtD6Q9fVlHB+bskb5OY9HBebzjG1FR/lRUvpKlr/dyjaDoXowXhvBp7+PQkEzxWfUlMGWNJ/mKFW3z1v9M1vVN+qWtcZYr76Ld9+K/SoiIiKi00+dTu9ZGhIoDgmUBrU0bSeLgzhW5Nd+Cv3IKQkgtySIokDIlp4woKrmhSMz5SXC6eKsJCkc8JMkwK9qF4j9IQG/aqSj1NMGwhhV4h7wc6YCAsLpaLyKHN6O/piRdtLK7WRMksI/1mXlcZtOwva4JSDidvHelhZQhIMmzhSIWtpB1UwHZaRitf5vpoiM+FsLuATVyHSf4TbU91m/O1+CNnejV5HglWUkeBQkKjKSvTISFRlJXhlJXgUJioxkr4JEj/aTrC9L8IR/vPoIwWhBlGhpTo10f8b+W1NARktjaW/PyFRmsXIbZWmvsz3gZ02zFJmuNfLHmPvMehyNkVHhdJfhVIvR6lCR5YC1ztb1wwE86/9GO1jb2JZWVLWv6xbws6butLaNNVWpW5qqaMetIvPsuT3X9XVmaXPrT7Q0q9Z2cauXWwpQt/aM9Sdavd1S/4b0EXgCsAftzMCdMVrPCMqFPxuMzxF/SEVQT+Fp/BjpWbU0naqZrtNYVhwImY+VBlUU+kMo9AdR7A+hoET7bf3xB1X9QnjZn49mcFwqO+hbXSpzvCr6A2h3gqelpZk/s2fPrnLdN2zYgEaNGpkXpgBg8ODBkGUZGzdujKmMTZs2YfPmzbj11lsjHps8eTKaNGmCCy64AK+++mqZQZPTknXePsUXvjAc9GspGZ0Xp0PBcODP8iOFgtEvdjvY+kf6SC9Fkswbf7R1wu83oV80Dgd+wgFBJ8myjpPzven2UnBe7Ld/b7vsS+Qi1xF8bikstQ06uubGyEo9NZ714rx1+9Fa2hbwc46+s6TddNbDKFpraz11qj6HotH+4Qv4wrLM0ShugYeyfqIwHrFuw7b/FXgbR53TrhzOzwrVVq/YKuAcGWcV7fsicuRpbHV0XU21tHO0ILTjMbf2cg0OGcclevXC1YC9/SrCmQrVmQ7VfKwaokNxK7Iir8EYg4G2qjlT+FqXuz1mBMPLKF8YdbGk7BSKR7sRxLgZxPojydoIP8Vj++xykiHZzuWcN2sY58baDQw1G/Grq30r9quIiIiITj91eqTfzpMlKMkLp8ILqCry/Sqy80vNi9UtUxPhdZwdJ3pkeGWhzYOnX1wpCqgIKRIaeCNPSIyTcEkfQVYcFNh1stScDyrZq8CrSJAkGYokmXc9A+GgivXilDWoB8cy56gVtwtY1iCk9QTIOpIwlosfbqOErBeytf/DQQG3oIU9eBQOllgDJdYASbTAhznvnizBa8yvJ0tIVGQEVAmJijZnoVfPvylLerpPY6CX+Vu701qRAa9QIASQDEWf31G1je5yptosL82mM/DnbCe3stxGQJYlvD3jCdrOGdt2G/Xmtj2zji7Hx/oc6zGxHidrmUa5tlFZRvX0EX1GvcMD7ywjxYCY57HTtht9pJqqCj2tKAAIc3SklhJTe02XFVxzBrqtr1njf3tdREzvJaDygZ3yLkqVN1rO+bg1zSpgfy3bg6pRylUd9XEbRen2tHKuX0UbNRoeKat9JgdUa4pe4/NOW0cy3+eRF3yNz1jrfJrGqD7zb9X4rY/2C2pBx+JACCHjho5g9FHAxo9HluDzyPB5ZMhqOC2wdb+MzzNFCn9u1Ue//PILUlNTzf+rOsoPALKzs5GRkWFb5vF4kJ6ejuzs7JjK+Pvf/44zzzwT/fv3ty1/7LHHcPnllyM5ORn//ve/cccdd6CgoAC///3vq1zv+kIuKYDsFQilNIVIbQ6lOBeSvwBy4XGIYCmE4oXa4Ax9JJCesjHKKBUpFIBQvBC+5MgNqUFA9tjm8JIASMESSP5i+BIbAooHXjlk9tW09J4Sgpb3p7OPEhJC/45yDxgq+ieKM7OBcdOQ8TnjFiwMBxnDuxEyynQGEAEtdaWeCs9VWaOsrKnxZMtcWnAESRFZvJHaU5b0bahBSEE/zPnyrBf7rYFExaPFFvUU6QKAivB8fkHVnjlCliQIWf8e0/t+ivmJbaG6BB2t9RWqnslBq5vz+W6BMudoIT3RhnaM3YKsQJlBTr2zYmsT6yg/Z0DL1lfXt6+NzDPSUENvo7L3w2BNDRrO4i+Zo/2M78SQ2fb256vCPtI0YrRflLSebnNK2sYXWtvFhbV9jJsdjfqUxdgfSR/ZZR3550yJ7daGzpShRg2NLky8Yg5R08U66mddy/ZaM5a5pfashJh2yxm4dUlnC5dRrdpKsjnq1tbHsq6i770sh9OFCqFquWXdPtMsn10C9sC0LEmQJKFNX6G9iRBSJX1aB2e7Rp731Vfx7luxX0VERER0+qnTQb9Ej4LMtPB8MaVBgZPFAew+WYQj+aUo8oeQluBBokeBRz/zVSQJJUEVIQWQAxLSkxT4FG1ePm2+JxVJHgkJljNlI7CmnWBICAqBfH/IEoAQQAjwKoARiDBI4YRFJut/1hSeEuzBQbd5yNxOPoWjfGeQMeLiteVCl9t8fOH/LWWWMXrJLT1iwAiiqJbgX7ln4JKZSlCVAC+AkCwAVfstC0BWBSRZn7ckyshGRT9O4QsPgMcSBATswQHrvpfVHmZbuATyygvylZVC0yBLllSOxuvKyF0kR76OFMcFIKvI0XyW7btUwC3gFxFAdG0Py9UlwHIVJFxf6xx1ZnBQf+9UlRGMtAb+tO24rFvGsZVlrY5a0NgZRDMunmnbse2DMz7mktrRnO8tyoUj6/aM93dZ75WyRkO6zfcXbhv7MbAGV61BXWfq3SpenyozCOuWPtgIBMqy8Zo02rHs9jNT/+mfP6oe6C8r6OcPaUG+Un30njaKT3UN+Fkv8RmplUOqgPU+ESM9sDXgV1Mj/U7VvDOpqam2C1NlefDBB/HUU0+Vuc62bduqXLfi4mIsXrwYjz76aMRj1mXnnnsuCgsL8cwzz/DilIWQFQjFC8geqAKQvQnaBWJPYXjUnxoCjHnqjHSN1gvI1lEdQoVf1T5vbEEISQZCfsihIPxKkhZYkoAESYYU8kNSg/B6PPApMjxy5GcU4EgR7JIa0HojlTVgZ8wN6nyLGJewo71nnTcaaBtxX9fGmaYzFpbAn/G/GWAx6yAiAn7mqCeEgw9GsC1iTj39cWMOPSP4VtbcdMZNFZaK2gJMVfnYcZ3T0Mw9Gt6+FjgovzxnABewBD4sI1rLrZcl4OfcrvH9KQTCczpCKrMNXevq+Nt8eUl64E9/LTtuDYuNUCP3G+4BP1eqqt1NF2N7uQXKDdZgutFu1sCfU0Q/K0oAMNaRRfZXbsWYNx6Z/5/CL/fKzvnnFvAzitSXCUf51oCf9XPTSpa0fmQ4UC2H04S6pPl0BhGtqTxlCRBCgpC0EcOQoQX+pPA5rnEebk3vXhNqW9+K/SqiilNVFV988QUuvvjiuJRXUFCAQ4cOoXPnzlUqZ/PmzTh58iS6d++OjIyMuHzHCCFQUlKCpKSkSj0/EAjg9ddfR0FBgZlmuH///ujatWuV61ZUVITc3Fw0b968ymUBQCgU0vpMZdyoFAu/349vvvkGwWAQl1xySVzqlpeXF5cpMAxCiFPbB6mAeNYt2s1gFaWqKgoLC1FcXBxxA0xlhUIhKErETNqVpqpqlV+7hngeg1AohGPHjqFZs2ZxKS8QCEAIAZ+vnClAylFYWIhnn30WEyZMqFR68eLiYrzzzjs4fvw4cnNzkZeXh9zcXNx///1x+XwD4ntMq6Lma1AFSR4J7dN86KD/nHlGAjqfkYS0BA+K/CHsP1GEE8UB5JYGUBpUzYvexQEVBaVB5JYEIAFI9sooCQrk+UM4UhREYUC13Xlu0FJ9ar9zSwLa/DGSlmauJKSiyB8y7762ptu0nqAaKUWtqUUN1m26BfzKSw8YTrNkL8eZzs9NtJFigD1IVFYKS2sKT1UV+tx5xoV37UK6tl74x2gHIyWoMTIwvI1wGj0tlWr4Yo+R5tM4AVQkbZSg8eNTtJ9Ej4Qkj4wkj5biM9krI9kjoaFPRsMEBSk+bR7Ihj4PUhMVpCQoaOBT0DDBg2SvYo7ktKYZM9rFmsrUSEdqjAA1ggwllnYx5w5z+1EFrOlOVTXcfqpqpDqFeTxt6SodwVf3Y+wMBLqPxLQeL+sxs6XDddTBeO1ZU2Ia6T/taTUt27P8XZ5ogU3ra9aZptL54yyr3LvQ9aCNNXBj/G+M3pJlyfzR0sBGPgdwD/i5Haey3qtu6VKtKT2t61hfJxGvFZf1rdu2tqfzpyIqmx5Uez9Z5t4LqigJhtNyWn8K/Fr65nx/CAWlIeT7gyjwB1FQGkS+P6gt94eQVxpEbqn2WL4/iEJ/CAWlQeQUBZBfEkRBSQAFJQEU62k9/UHV/B3+CZnvgaDL57d2vMOjlM3PI0Wu1yP9KuKee+7Btm3byvzp0KEDMjMzceTIEdtzg8EgTpw4EVOn7t1330VRURF++9vflrtu3759sX//fpSWllZ6v+ob4U2ASNDm61EkQHgSIXxJEIkNIbwJZrDO+JFCfj31Z1D7Meb5A8zf/pAKI4W67aJ7oARySS5CAigJqigOCm2kSdAPKVAKKeS3fJdr7yNVhRnML+vmFNlykdi44Gz0kZxfPRL0z3jZPeBn1FuI8I/bdo3f5sPOQKjld9QAlySbI3CEJIX/jzLyzI1tH/QRXmbAz3KcjOAqjDn3QkFIQrWl1StLyOiT6WPDVPP7JLwvZmrSGAnr8yxzDLodNwDmaHFncxgBYduIR2O0m1uKQ8dIOOOYm/tk3abj+9I87sIIDIZfB2b/yOX1D1iCs2a9w8udr0VJKjs7gOtDUUaTmgE/lxSrEY/BPWhk7It1lJ/1PeDcf2cbWNsNgGUUZWz9DWv7xHJxw1mqcPyUJVrxEccoYqNq3Eb5WRntGVW09KxuHeNoRejbCalCT/Gr/QhhOZe19DuD+m+hf2aZP3B8Nhj1QvgGA0XW+9hG31u2LJPsAT9JkvS/a+cFz1OJ/Sr67rvv8Morr8StvNLSUhQVFeHw4cNVLmvz5s34+9//XuVyVFXF8uXL8dZbb1WpHCEEPvzwQ/Tq1QuTJ09GTk5Opcs6dOgQXnrpJVx99dVo0qQJFi9eXKW6AUC3bt3w/PPPIzMzE2vWrKlyeQcOHMDw4cPx3nvvVboMr9eLIUOG4KOPPsKkSZPwu9/9rsr1KikpwQsvvICOHTti2bJlVSpLCIGvvvoKU6dORY8ePSLmQ41VcXExnnnmGQwaNAiNGjXCRRddhNWrV1epbt9++y3uvvtu9O7dG61atUJ+fn6VygO0/f3jH/+IJUuWVLksANiyZQsmTpyIoqKiKpeVm5uLBx54IC7v+a1bt2LGjBm44IILkJubW6Wy3nvvPbRs2RKpqal44IEHqlw3APj4448xfPjwuLRbIBDAtGnT4nJM/X4/XnvtNQwePBiFhYWVLqewsBB/+tOfMHToUKSnp+OFF16oUr1yc3MxZcoUtGjRAikpKVX6fgkGg/jb3/6Gzp074/HHH8fatWsrVU5SUhJ69+6N5cuXY9asWXjuuefwzjvvIDExsfwnl+HEiRNYuHAhhg0bVu5NWadKnR7pl57kce3uqwJo2SgJjZK9UGQJZyT70LdlKvblluJAXok5cudkcQAHPSXwh3zmCJ/ckiBySyQkexV0aKzdiRFUtXRyspDMwI0iSQiEBPJFCIp+Ep6oyAipAtp1KS35iZC0kSqSfveiIkuuKTuto84iHtOXmyO7VKGNPHEEAYw/yyrLEB61pN/Aa/xvGWmk6HdxGqOgwu1rDy65pfM0/xbWQJH7sdIutElRL2SE01JqwSdA1dPBSJAlASG0M0YjKAPATLennVBGPyk0gojmhUAY84dpxyskG/WWtQs8sAZLwgHOaEE0bd3YLl4owrjrVRtNZox21BaqevosmFcYQvoxMpSXYtS+3+XXx7mO+b/1Acd1PG30mBReR7Yss4z8i8a6P24jOKLXtWIBqWjrW0fnqUJAUbSUmNYLC9b3grecCw5ljWaMlsLUKZYRt87HjFSezpF80dLDRpRlvXBuWd8tBa57nSPr6RxFGdt+WdeP/AwyyrEFmo0bD0R4TlBtjtBwAC88si9kGeGnv29c6mJNxWyk9wyn+9TSRBsXqjxKOEWxV9E+1zxlvA6qixDVfDd6Bd9zANC0aVM0bdq03PX69euHnJwcbNq0Cb179wYArF69Gqqqom/fvuU+/+9//zuGDx8e07Y2b96Mxo0bx+3Oy3pBjuwaqp5EBBs2h7ckF1KgCFLID+FJhJqUBrnoJORAoTZvUygIuSRXHwkI+Jv3gB8e+P0hfX5lgTOSPPAhCLkkH0JWoCamoVifK9kjIzxSJFgCSZLg8zREgkdCis9jpukOqCqgap8rRmYBL2COHIqFdTXnKGvhWMf6VrImn7R9N+ifuWYAQ+jZ7iRZS/MJhAN/sgyE7KOmIoKAxpxYsmX+LCNwIoQtzaRzBJK2T7CN8jMDfkG/LSAASbak1/RAqEEoik/7TBNa30SSBCRhHX+msX4PWAOiQgi93tbAn5bKVQLso+wkGUL2QMhKeF4wa3pTQA8oho9NOBAnzN9uAQAZ7iMebWlOzQBrxNNN1rY2+o1OIaGlKJSh9d+gCjPNqlmO0TWyBPZs9bXcLGhd15rmU1vmlpVAcvzvUkm3oF4UkhDugWkHt68Z4VgubO8fe/1iHfFn1gvO925kt7SyoTWjGGOv3c4frKP8nMcxnKG/nBpUdk5J/bft88jyOjHfX4D7iLtooowmtn7WWAO6IctTzTawBORCwn20sID99Sz017Ei65/bsgBU7ZxZFpJej/DOSpY+ek0F/Gpb34r9KvL5fEhJSalyOcFgEM899xyeffZZ8+L4LbfcUqUyExIS0LBhwyrXbceOHfj+++9jev1F89lnn+G+++7DV199BQBITk5Gfn4+GjVqVOkyvV4vEhMT4fP54jKqJjExEe+++y5eeuklDBgwoNLlCCGwcOFCTJ06Fbm5uWjQoAHGjBlT6fJatmyJFStW4MUXX8RHH32ENm3aVLqs1atXY9KkSfjxxx8BAN9//32lywKAnJwcbNu2DSdPngSgBVAqIykpCbfffjsyMzPh8Xjw6aefIj09vUp1O/vsszFmzBioqoqioqIqfyYWFxfjd7/7HRYvXoxBgwZh+PDhVXrdff/993jhhRfw888/4+TJk0hOdpmKoQJlTZ06Fdu3b8eQIUOqFBwOBoM4dOgQSktLkZCQALWKN0xdd911GDRoEB577DHz+6+yTp48iXvuuQdvvPEGQqEQfvnllyqNCjt48CBGjRqF//znP9i1axdGjBhR6bJKS0vx/PPP49NPP8XevXuRk5ODBg0aVKqsBg0a4JZbbkGDBg2Qk5NT5fdCWloa/vKXvyArKwuvvvpqpT7LhRBYunQpHnzwQVsGg02bNmH06NGVqlePHj3w73//G0uWLMG0adPQpk2bKo9ozM3NRXFxMTweT60Y5QcAkqiDMy3n5eUhLS0Nh7OzI1JfHC0O4Yt9ubY5mzqmJ+GSNqnYedKPH44UmCeIuaUBZDRIQJo+oiugqsgtCZoXkC5ukwpFlnAgP2AGlEpDAkX+EI4V+c1AjzaaQ7sglexVkODRLvIaJ54hVTtp0X5rnBeWYxnN55znysk6CstaVjiQYb+o5dye8XnqdnHdPlef27LIgJ/x/LKCfuZIKD3oZ1w0N+7q1EZPaRfVjfn+vIoEryzrF96Nu0BhuztUMsuMPHm3pvtyBv1CIhz0C9/Bqs1jo43IC6cMtKYONIINzkBfRYJRxv4b7eKc39AILrvNNxg+hpHH1jl/oXWeOreRfsYxK+uc1nrcrPUHItNaGqOebM+zdFCM/XAGwZzlOdd3e47b863cjodb8ClagCna88oSa3CvvPJjHRUJuLdpxPFxvH4q2pYVDfoZnJ8f0Z4XOb+ipQzVOso1/Dp2C/apIpzGM6Rqn9/aHerhZdZRrMZcYUZgDwB8Hi2ol+TzQJElJHhkJPkU+BQZCR45PKLP8tv6/i0uLMA153ZAbm5uzKkwK8v4fux42yIobvOoxUnIX4TdL4+ptn0aOnQoDh8+jPnz5yMQCGD8+PHo06ePeSftgQMHMGjQILz++uu44IILzOft2rULXbp0wfLlyzFkyBBbmUuXLsXhw4dx4YUXIjExEStXrsS9996Le++9F7NmzYr7PtQ1ZfWtggLwhwQahAohl+QC0EYAqg3OgFxwFHJxLkRCAyDoh5x3BMJfAgAIdLkYBQEVJUGB4qCKoCrQMsWLBCkEuegkhDcJwpuI7BKt/ERFRkayAuXEXsDjg/AkoiSxMfL9IeSWhlAc0Pt2Ia3DokjaqD9jDmCvIsGnyEjy6L+92oVjv+UDxCtL4dRyjsBExEg9hNcxbgyykmHvcxjf0x79b0WCPhoyCClYqgVcjJGQqmoGR4HwqD4oHgjZAyg+LdWqx6f9DW3OU2fAz2C96K7o+ycZQT5V276kBoGgPxzwAvSgm74d2QOR0ABC8cEvZJSGtGBscUAbqekPaSOdjRHkxqh3j94XS/Jofyd5ZSQqEqSQH3JpodYGgRJtf9WgbeSRMAKailfbd0+iFgDUj39IAH7VGGVtz1TgPBaS3uaK+fkL+BR95mv9OEDo7e4IiAhZ0ephtIMk29rbGfBzfiUb/U3ZHIVkHBf370p7/9R1FctI/HAwwJli1JqeW5LCc8oqRvn6yE7rCEfb8QdsrwXrb/M1qQefhWQJQMPIKmBvI+t3Mxx1NRjvP8DeR7cGryVHv9F8rlFlR5nR2sjtPV0et6BfWXWTHH87R5FGjPQray5PINzWxjJLANzYV6eytg8gsg6WCxHCEmg3zolCLgE/6/ZjOX7OdZz1N2+w1N/T1r+N15H1Jgp7gFFCQX4eerRvyb5VjNivqhnGa+dUvE6r6tFHH8UTTzwBQBvJ4jzedZkQAsePH8fBgwdx8OBBHDhwAIMHD0bbtm2rXLbf70d2dnaVgmHxVFxcjJ9++gkAzHSXXbt2jUtgMi8vr8qvYyEETp48iV9++QXFxcW48MILq1wvo1whRFwutGdnZ+PEiRPo3r17HGqmBSMVRalS3b777jscOXIELVu2RMuWLdGwYcNam+Kztopn2ksjwOz1eitdxvr16xEMBnHGGWfgjDPOQLNmzWrlMS0uLq50imCnyh6DQ4cO4dNPP8WRI0dw9OhR83ePHj0we/bsKrdbSUkJvvzySwwcOLBK5ZxKsfYv6vRIPzeKBDRO8qAkqJonDY2TtDei28vASGe4Pz9gzjNmnW8sUdFSiOaUahecAiHVDPKEU1FKCAkJCR6tLI+qaHNrGSf9MiCEBEUx7p4M39VqngBZLkhZb2aI9r0QLWho3S+tPewBv7L+Nq5QOIOCqgpzPhHj9k7VLVjkGOFnraf1ZM3KGuwzAl3hCxaSLRChqlodVFVCwLwXWNLOUyVAEtrcfdaLb4qtPK0cazUEjDtJtTStQh/NGVKBoD6CQBLhiyqyLBAKGW0THu0XtKQas6dmjdxnK1ub6O0fnktNQDVGyOkj/Mx5/oCotzJHm1/QbHPLqDttiKRenGVFRb8aGn20omTbQbcgtEFVLXW2nPpbg0ZlBafM5W6BbhH5fHO7lbifwdiG8w5it4Cb8+u9oukvYw0mum3buS1nW5nPsQT/rBcGjXWiBVxjUdaowbLmm7QyPm+dAerI0arhbTqD69YgtTUVcEgIW1CvNKiac/ZZlwOwBfyMUXnhkXySHvST4fNoNxsYAT+vItkCfQkeLcWr8dsI1gdrYKRfXbdo0SJMmTIFgwYNgizLGDlypC21RCAQwI4dOyLSarz66qto1aoVrrzyyogyvV4v5s6di6lTp0IIgU6dOpm54KlsHgnweCRIJcWQAqVagMYgVEhqECIUhBQKQC3Kh5J2BlRf+O7CAr8KWdKC6ZIkAbIHakpTbfQZtDmZ/SEBRRL6KGVZSzWJEvgkFUkeWfuuVYFASOuLAYCqj2C2zv1r9qsgoApJT03nnp7PNgoLiBoZiBbwi1jP7cnWi/dC1Ub+aQVAOEbcQZbDQTBj5Jt19E2UgJ/bfpkX//XUnZIa1NrUCHhZAj1mQEBRIYLaGHavJxEhffhbQJUgqcZnsgQBCXIZHRyhB4MUfQSfBJ+276EgJEkO77ckh1OYGsE2I/gne8y0gbb+srDvu/XYWkfKSZIl4Gad11APPBrLzXkqhQwhwRz1WBa3gJaRiEEV2qjDEPR6CRExck2y9cMty61taPnfGB0VntskvH1rwM+6HfP4O4J7UQN+xt/GCLHKziGHyGOkLbP3QdxYj2V5FwWsPdloL8WygvhObl/TzjpHjKSF/ZhFC7iVydrWbu0e43Gwdvsk5zE0PneEag/2OYO8CI/yMwJyERlRjHMLGNsz+kza+ZIsSRFzWxqvZaOtFMk4JhIEhD4no1a2JLS/FQHbdp39VGvgmGLDfhWV55577sHcuXNx8uTJuM2zVltIkoQmTZqgSZMmOPvss+Nats/nqzUBP0AbtdajR49qKTsegWtJkpCenl7l0UNu5cYrYJKZmVmpecKiqUpgyBDv1+3pKJ4BtXgc0/79+8ehJtUvXgE/oPLHoHnz5vjNb34Tt3o4JSYm1qmAX0XUu6AfoJ1wJHpkyJKEZikJaOCVLY8BCfq8MEneBH2+NhnavHRa2jdjhAagB5AkIEGRkOJVzJOqZK+sjfaSrME67WKzVxbaCDRI4ZiKpF0Q0FI/aXcdh6ClAjJO2o0TqbJuAInlpBmIDPZFGxloXR4Ox0jm49rJmzDP7rwKzL+11KCxBTqijQyLFuwzLpZb9wHQ040KAahavYR+wmjUXgiYKZpkSRttaVz8crsAIkkyFOOCmqSVpUra3c2KLBBwnK9bR0MawQfrCEjnCLnygjOxUB0nu2W9BmJOJSoZ6VwBwP63sQ1reeEgrpY61XytGiMPbYE9rXxroNcIMJWXXtIt4FdWikxt3Zh2uUrPVxwruY7yc7xWYhntVlaZ0d5b7vMbRrZrtHaLNcWnWU413m3kTBtsDfgZI66jjUYFEBHsC6lGOfa5U61z8EVL3xnSA37GZ5Q16GcN+Bmj/owRx8ZcjuHfbqNzq60JoxJq7HNmVrb86pSenl7m/Bjt2rVzTYP1hz/8AX/4wx9cnzNkyJB6ddfyKSdULSjjTdBGY3m0NDVmMMUYSeZLhAj4tbl2i3ORnJgGr6x1HCI6+vrF5tJQEEX+EBI81iCN/u0eKIHPmwyfbPQXwt9hEfOGqtp7WbX2CaAF7BTL91tlvouN8qI9VY+HxdxPA2C/oG9NhWmd288yl59tW1GYH/16oMsM8jjmsTMCfWZRlqCYCAUhKarWP9L7ZZAFFO1qvt4XCPeTJYRH+RjlCWP/9LSekD2WQI1l4nsj0CmHU3saI8qEEegUxtzBkTteXlOHg59qZDsYj5n7716YM/hhLItYB5bAnxRONas6irUGRIy+krmKHqyRzLKs+2pP8xktkB33rxxRXtJNTbSbmZzLrW1lXcfMSKJvTVjaKVZlhdnK+tqKdnOik/UzzBqUtT3VLdhX3ig/57pR5gK2jU621svyeFmBvwhRAn/WbVgDfs4mNI6tJKTwa19PcWu8y63tZN0PRT/fkgQs53JCexuqkv6agP65at9uTQX86nLfiv0qKk+jRo3w4IMP4oEHHqh3QT8iIqLTUb0J+hldVOOEMdmrINmroF2aDz7HOY7xWOMkj5m2saFPQVEghAN5pfBK2px+1os2KV4ZDbwwgyNpCV6UhFQUBcKpmQKqClmVUBpU4ZEV7QKJZMw9Fz75Me7WNucr0INXsU5C4Tbflmv6PZeAnrWtFFkyl7vdMRkejajNnacKLV2qEfhTtVs6tSCcHt1UHZELZ7DRLYWlIklmOjzjIjkQPUBmBpwEIOuByZCqLVAAfd6ZMOPCCdSg7UKX2R6yJxz8k/S50GQBNaQ9L+JuZUtqq5DlQpQ1AGGrpy1oKcrcN1tbyY7nqeUHwNzYAg6OYQ3W+qgIB/5Uyz5YAykAbIFQryxDlfXgsHHBRLZevAkfa+vIMmOf3NrBCJiUuy9x4nzvxJK20gxmWl8c+kLjYkC00W7R5rWzPresbVcH6/x/hlguWkc7hm51LXO/LQG/WOcHtQb6wmXYg33WgJ/x3GijUq3BPgDm6D5jpJ9zhF+CIpsp5MKpPSUzFbGRlhio0oAJohplvFXNOJLiBaQUqIkNbReTpVAAUkifR6NBKkT+SYROHoHiSQBCfiR40s2RIzaSdtPVieIAigIhpCVq3dJwGkgPpEAxPB6f9t6TjfeYCqhav8t4nxlz/nr1kSLC/JwIB/+sH1fRPuGcVXT76DVG+ZX1MSlLiAwwWVLoSVpB5nLhCPhZg4DaBXdEpHY0i5VgSzvoGuhSwwFAyRr80/dZUkN6cM5vpsP0ehIB6DfCqVo2BehxBOv3ofNj1UiVLskSFEWff1sR4fSQ1nSS5rx+ihbo00f8hYR+Y4deP+MYlsfsFzuPgTMACn1Un/V1LGQIVQXkske6Rfs6dgb+zDo5swdI+g1szheQNQAJLXBjDQyao7lgP2WwphI1+7zW1JKqZf/dtheFFngvf9RftNdlxHqOgHhEwBPhwLo1zGgNyEXbTHXMVhFtFKX19W577Vd0rj7r8Xa0sRQtSAf3NrB+VtsCf9G26UgfagT3jHNUI8hnDfi53fBm7H9I36i1PYTlNe4cGRke7aeH+vTgnzGyVzunk8ztG7d4ElH1mjJlCubNm4cmTZrUdFWIiIioiupN0E8VwPHiEIqDKtKTvDDmVLEyTmiOFwVwHAH8nCOQluBBSoIHDX1aILB1WiKaJHuRlqDA5xJhSFC0qEapR6AoEEJRIGS56KudQIUcdwMbo9cMkqSlkZT0E6uQkABZQNJTJllZ5+mzXkg3U0BaTqCdQTxj2852kmA9+XKcjFmCXIpkpHmx1y0QEpAlAQT1ixaQtYCO/jwPJNvIHDPdknFRXQqPpPQq4fSeXsU9gOkW5FQswUNhBE5FOGWqNkpR20MFxh2l4btYnYE/J+td9bGcwstSOOgZHkXnHtDS1rcsk8P7bR35GC677ECYs/zwhTgJzrurnQFAGeH0itqIVxFxO60zmGIVkoUe+AgH/6z7ZNTfeuzNUZ8uIzrdg4Bl73NlR6KF57aLXBZ1W/ruGzdsW9vbfMXIkkt6ysiAVyxpO2MVy7yHZbVTZQJ+hlhHNEYLcJa3z9aRfc60ntbtRIyqdbxffPoI75A+5CLaiD/jtyJL5sg+nyc8ik8b3afPK6po84iF5xuVbXNv1uTcvca8BtVZPp0eAqr2Oe4x5psDXIMAoZNHAFmBnNxQm9NPDY/+kz1Aoh48d/tMP1bkR15JEGkJHjRJVuDzJUEKlGoP6nOSeeUE+BQJqpChCu1GLetNKAFVhSwr5g05IRUIRHy2ab+1foP2t2R5TOjfm0ZQzggUGi/3iBiNI8ijQOvTSEa5ZczdFdGO1rSeUnj0m3XetPICfmYAxZzHLeg+us22E3rgT6iQVO25QpK1uf9kD7yyR5sXT28HWdI/P9Vwf8YZFFH1i/eSABTFFw5eyIot+GSdM07oqUxhSeupIjyi29rmTs6gmmT9bUntaQuEGYE1s1xHJghnM7ls321kmwzJHpCTIlNVm+cFesYNW0YHox5G2lFo/deQEfgz+u2ObRvH33wNOEZ2Rhx/t/+to8MqmOJTew2EM0fEGggEYL7frCMkzcCf0NI/WtvOEO0+KSNIZZRd1rrlsd0oEKVvZFvqDNxWNBDopB8H63llmavDHvgD9OBftGNqTR8MmCmEjc9Boy3dXv9A+BxJkcKfhxK00X6S282n1veeXhcj44p2fqoda8kS7AtBe79Yj6utzFOIfSuq75KTk/Hiiy/GZV40IiIiqln1JugHAMVBFQFVIMEjmyfUISHgd1zk1S4UqcgrCQKpiXq6Ty3NZ5JXRkOf9iMABIX9pMKnSBDQg1WWQIYiwQxg2AN8kj6vSLgMWWhn5JIkEFIlKPq8M4oceWHDSDvn5Bxh4zZqT3I5HdLurNSCeLKlXta7hI1Tu/BdtlpQUhWAV9HvLw7pbaDlYAnP92fMD2cZ5WQNdBmBHq9sT4Gn3bkfUV2tLGdepCiMO1QlfeSkqp8sqnrKmXJT3CDyhDpaGieDImlpRo2AhnEntjUYFBGQMYJelmCf23rWAFl4mXs9oik7eGOttPa/a9pKXZnpESVh3LoOVWiNICuR+2P92x7YtIwANd9XFd2fSGWmEpUj14mWDtfYZyOQqijh+S6tQd5wil4pSlDPHuyLx+i9igb8nO3sFI90npFzjDoejzHgZyvD5TmhKNfSjM9N63E0bgIJlnHVzJre00jjaQT7ZMk5qk/7njEfq0UBP6J4CqpC69soEiB7IKmqFkwyLiLrd0GIgF/77UuECIUggn6opcWQEkuhJEjwyRJ8ijaa3q+GLy6HVIGC0iDy/UGcLA6gOJCANG8yEAqPzhehIBRfIjyyBK8s4FNkfWRfONuC8fmqquGgk2LcZGEJqmgXuIWZjs642GwVz4vJERf93S6+O0f5Webys16IjyjbEWzTLppDOyZRRva5sgaFVBWSFARUj3mctb6OMdJPT90nhz9LraMMVbPNhZktQpI9EEp41JntU1gOp/M0Rn6GHEFO58V+N5JkP262AI0aJc2pquqVt7Sio42MAIh13wB7fYy/zMCVZSSo9TnG95iQJEiyFsySRHjUky1Q7Aj+yXqaV6Ofqc2DJsx9dY6Ys43ycyyPUNXAVDncbkKLqALcA38AbPP7uX2FG+0QLdDo/hztt+1lYgmUlcd8r5kFqhF/S9bXWZR1Iivm8tkQRbSUl85Apy3459wcwgE/6zmP8b9zTj8AtvlNZaHfEAD9xgeXPqTZTm6B6CjBbWGcVuhj/ML7Uv5riYiq5pprrqnpKhAREVEc1FjQb+7cuXjmmWeQnZ2Nc845B3/5y19wwQUXVKnMJI8Mj6rdgZeWqCBBkfFzrh9Ffi3Il+8PwaNIKClWURIMIacogDOSfQioArklQaQlenBGkhcCQE5pCAV+7cQkwRNOQZmWICPJo803pUg+pCV4ENBP5rx6KjhZ0oKDiiTpv8MX0lUhzLvLFUiWFD3avBXGQCs9oY52t7gsIaifIwUsV88VPV+oLY2nZB9Z6Hb9PnwxQrKcCIYfM05DrYO+jDsuteCGDK8CBEJasEPVR4hZU784Ax72+a3Co/q8ilYbt5GJzpNPN87dC+lnicaYPiFpQUlFlqBKkjbaT/ZFbEdAO6ZCAP6QNkogoL+WzJNcS0BHFtpF/kBINQOhWnBRmOt6HbVzDbxYg8FljIbT/raXEy21otcxD4hb8MwafPJCG7EXUAVUoSIohPk688oyZEn/3wNb2kQgPC9kSBXweWRLG1gCYLagmjP4V3bAL9YAlOtFBpfrJM65LgH3EbIRZSmS5UKSFviXVaGluIU2AjB8Xc0ePHW+L5x3/FdGtItS0drPbY5E6+uiOgJ9gPvch9FSmCrGFTRLAFqWJfOivfZhqV0MDOi/w0MvJcu+6ssUlCsc5LUvM+cXlcPBvmipiMsK9tlHgp5aQhXVOjdMdc/pR5VTHX2rgCoQgPZ+9ikeeLwy5IKjkIJ+bSRfSAv2KY0ztJFcHg+Qexxqfg6EvwSKvwSNGrWANrebhF/yAsgtCeJwYSnySoPILQli/8lihFQVBSVBrX/QJAWZXj/k4lxACEieRCgSkOiRIEsyAqqKBI+MhJBi+1w1RvkZ32HWUXCA/XNXqEYKdtj6QtZRY9pFbC20qECKegMQgPAIP6M8t9FVZQT8hKQFVY0Rb0IP8rgFmmSj3rAHfBR9u86Rbc7UjkKSI4KRklAdaT6D2mg/AB4lUb8pTDVvGDPT8LkwRlKGRHikqKL4rI1v238Bo19iDwa7BR7KYraLtY+lOtvBEggTavgOt7JGQzq4BfyMvyXr45ZfZppCSevja3fLCZgTf8MyB5pRR1k2b1QLp6GXw3MjSuHU6sZXqBE4tKb1tKU2NfbbrHTVA37WdKNm3QDbKFlnsCYcQLKUo7/1zNc5JIT0cpwjG62ce+Ac5edkLcoaXLT1ya39JoQ/Q5zBVWegNWrArzxljax0We72XrAuM25oAPRRt5blznXDr0891boIp/WM2AZERMBUGC98t6qbG9Ffh2pQL8jSLnI4vbFizgGqp/vU52M1RvzVdLyPfSsiIiIiqitqJOj31ltvYdq0aZg/fz769u2LOXPmICsrCzt27EBGRkaFyzPuClZk46RM0tOAGEEaIEGWzXmr0ps0QEgIlJ6hXTRK9MhI8XnM1FMhVegXKrTne1QJiqKd3BQHhXkXr1cWCCkSFFk7TffI4VF/PkWGIsNMEWqcBEvCftFIlYR5l6QshH5XI7TRUi6nq85AhjnfBGC7cBXxPP237aTVcXHLOHkzLmypkjDTtYRP6Iz5QgQkyGYqTeME2xj95LzgLstGMEsyA6PGaERn4NHKGgQFYkxto18wkvQ2lISknUTJ1jt7pYj1jeNuzBNmpKmx3l0KaG2i6iP8oMh6EEMLfMrGOb5wPxBuQT5DecG+cB2iCwmBkCPSErCWb46stL9YVCGghLQ51AISIlKtqh6BkqB2wcZnpAFF+K51Y0SU9UKbW7pOa3BPkSKXWfc3HoEo53at5RrBcmfAL9p7yHbZRujPt12t0i7iuQW/FMk9AAaUP9otlvkfgfIDftFGUMaznQ1ljRi1joJ0GyVpLFel8Byq5lGS9UCzo86yEr7IpyDyWBtlhpfry1xG2xqPybIl+Kc/bk1HbLSnV7Y8twKpYoniLd59q9KQMOdVM96jqpBgzjUHaBdwhQohSRApZ5ipKaUGJ4Dc4xCBAERpCWR/sTYfoDfR/D5XJAmBkEBucQAFJQH4gyqOF/jRpnES2jVKghQqgewvhOpNgja/nwqPHA5yGO9ft3S/Rl3N0SL697xs+W62pqGzXrC2Xsg29tsYXRULGY7r33rgRZuzSoQDf4A94OeYxw+wjNiBPXhi254l4GOdv68iQSxXeqBIqCoUj1YXI7uB1jeUzBtfnIyRlJJ+A4cQWgXDgQfZsq496OAM+JV3kd+Z2tP8DDf6ltFSXFaybZzf8c7eujX+ofWRw4ES84Y8/fGQ0LoOIejfa87gjh4YkWRrIC08z5/erFEDT8b/EXP5lSfG1J7G6875nW8sV40gZ5TXLmAPuhnvbWvw1Bj1V17SD+cNemXW21Kn2vpVXZH5/NxECwA6g33WZdHeEW6ffeW1m/VxCbC/Ll0CocZNBxAqJCn8rpZhHc9trzvDY0RERERE0dVI0O/ZZ5/FhAkTMH78eADA/Pnz8dFHH+HVV1/Fgw8+WOHyQkJLP+VTtAsKXtlIHyXglSV4fAo8soQkj0BQ9aBj4wT49HOp4yUh5JaGkJag6GmjtNMMoRppibSykzwSkjwyjhQGEBJAik+GIiR4RfgkNckja4E/WUKiIpkjiIwLFyERnhjdeqIVghawNOb5C0HbsPWEOSLAAHvAz8q4rGIGMYzllgvY5QU4ICE8gbq1ED3NlwotaKMKPd2Tvk23E0NrICviznSzjpEVsd5Nql28E66jECOfp6f9k7V5/qDPRhhSrYFR+5NVEb7IFFJhmcNOCzgaa8sy9FF+Wq3NQK0eyAjJZQcn3dq7rFFYsQT5rKOJAqowRz0YrznrdlJ8ChSPjGRtB2yv0ZJgKBzI0Ic+Ge+HkBBI8mplG+WqltEU1lSmzn20Bk+s+yfHsN/RAifO1KnRRj06xTL/pVtaXKNu1tS3ErRRuMJYAACy0C8qG29gbXG0i1DOgJ8RDLMGhMvat2jLywr4lTXXZHmiBSjDQf+YirEHvFWYgT+j3VTVEuzTR/hZA3/GG8y+L5YAX5RgnvFa1B6LDELHEiiNXMe+b7Ul2Kc6hwhVR/lUq8S7b1UYUFEcFHpac/0GGqGlz4biA4wRGwAgexBs3EpbDsBbmg/55FGohXkQJYWQS/IAXzIAwCN7keCR0STZh9zSIIKqQJE/hBMFfhw+UoBW6cno3jQFciAXwSP7oZzRHJIe+PPJWvnWt5ltNLX+t/bdpMIrK9AzUpr9AkDrcxij0aypPh0fTea6ZuDP0j7Oj0NjhJl5U5E1raaTEdTTbxqD7LGNdoE+yk/bjvt7zehLGQEW24V1S6DHVgdHmnPzOVZChZmSQqjacVaDUGSPftOTtsHwa8JoL3s9hd6/haplXVBVYdbZ+l3hTM1oHdVnpsNE2R9nzpveJL09JMAx2i1KMNRI8+ngbPto8SSjH2WuZxQrgJKQavZZk7ww53000p8Cev9C709I1qCw8TukBUIgy+Y6sktQyPYaUB0jPKMFgZ3bKyPYJwlVm/vQeJ6xXJIsNy5aMz1EBv60drHPe20NwDlTUoaEPqN4GYFDa7lmmVHXCm/LdbljHWtrWAOstmBrtLa1BrcqE2SOcSqC8kRLA+r2GePsr7ptytl24X605PhtvxnBNvrUVkG9HBnh1xdk14ByWft1KrBvRURERER1xSkP+vn9fmzatAkPPfSQuUyWZQwePBgbNmxwfU5paSlKS0vN//Py8myPG3PNCD0QJYX8kKBCyDISGvoQVAVKgioSFe2ihPXCQIpXRoISDvapgL6+ME9Sc0uDCKgKigIq9uVq9WiTlqAFl8zgCoCgiiSPDJ8CJCgSoKe8giRDyB7IQjLvWjYvbsj6NShVv7NVEhAhmGmRDBL00VT6KaZzvio31rtnJUmCImsn44ps3IUruZ70Ctt2tXSkxoUdSQmvEz4vkfRl4SdaRxRq/2vbNi7MOLfvdhe9EOELB9oFH8kMnBptALjfmSqENl9iSL/L3Hri7rw4ZGw3PHoNZoob6zwSgH5BXwFUVUtjaVxgNAJ+1pPl8gIgZc235mQNkkQ7IVSFQGlQRVEgpM1bGRIIqKr5WlEkCRkNfEhL9CAt0QtZCgf1VAHIXgWJHgUNfYoZzDNGaDoZ6dMCqkAgpEYEG816S/YRhm6pPI22KC+YYr12YgQ6bfO7Oa/Gqe7luHEL+DnfXqoIP64de+M5duH20oLU1v+Nq8bOEWxG3d3m2XMboRaxPMooPns5bsuiP8HabkbbW9cPpymtWMAvcjuSdpHQfOEIbe6wkF5nI/WaHvjTAniRG7MG85ypcq3z8jnnEjX21TpS1Vau7D6vqnXfte1GjvKsqdSedPqpjr6VQPi7EJAQVAX8euArKaGB9p1anGsbmWYINmoFuVtDiC2fhctTvBC+ZIiSAAKqisOFpSjwB+EPqsgtCiCvOIBQSEXDRA8aJXogTpxE6ORRqEX58LVToCalQUnUgn5CDwgFQioCekpuRf9yLw1q40JCQkKCRzazLEDW+leKrPUNJL1vYwsWWfbBvDAOe+APCAcyjPWMLA+K9SK3CyFJkKxhBKPdrAE/i8iL9dpGJSl885QZfFDDcyC6pXE0Rxoa23UEb6wBR+jpP62blxDeph76NQOhQoT7eIDRZ9NGUWr9WaGliZckPUG2o11E5HeJceOd0Q5uwUXnDWNGsM/JNtLN0pmQhJ5MP9pcZ9DOCfyhcLpD63E3bhb0yFpAPKCGR8dqWUgEguYNZECi0EaEG/e4WIX0Owg9RnpDSYUUCiKCkT4SiKy3NbWk2/xpsYgW+DPSjerM428G7CIDf8KyjnPEn/P70hjRZwhZ2hpwCQJGq76z2jF8DYf7gOG62EaqGTcqOtbXNliJEbUVWT+GEZcRxZfT94iYZ9l8nsvmYT/Obu85o23Mv/VzLgnu78fKct4UQERERERE0Z3yoN+xY8cQCoXQrFkz2/JmzZph+/btrs+ZPXs2Zs2aFbE8Pz8/cuWQH7K/BFoUTYVQEiB8SSgJhk8Q8gOy7QK4gHY3siq00YGFfhXFQVU/WQeOFwWQ7JHh88jYn1MMRZLQENod69YghywBfp8M1atAlQKQgnpiRUWBUHzm/CwCekpM47ewjDIT+mgt/UIBYL/YYa2zwdgVazDLOqefcaHGCPp5pPAJrVvc0Jb6xXKCZdbF+LuM+2jNeQNhP5m2bteol1FP82Zkx526ZqBUhNsICLe9c/3IdgkHA8zHHPttDWIKfTtCWLdhOW56sEmoCKcDNS5GmRetwjVxBgas3OaYqwjr9oKqQEFpEIX+EAoCQRSUhBDUL4oC2ug9pVEiQklepMIPjywhYNz0b2kDgfBBVgFomc/sIzMV/QmSCkghFUJPexsMaUFGa2BS1i+IAeEgi7bv4f2QpXC6z7Kubzivk8QaWJUcQR3nSD9n0C9aPN18jVjSzRqvDaOdgyFhe10YrxdjjhSjbdzm9nMGiSoS8JMd7Wldz9q+9iCz+34C7sfB2v6Rr/noZZW3LfsoHe2foMuoVVWNbDNA2/cAYBtVqm1UhpD0C82yPveeLEHVU0Gr+pyiqn7h2u24B2D9bNB/W+vuqIw98K/9XZCXrz//1F2lEmoIQnVLihW/8qn2qI6+VX5JCEVBLaWmIoUzKSgy4PfKkEuLoBQUmSPJAr48c6QfAEBqAKk0BASKIecXQA35ECgF8vIDyC8J4khOMXKK/SgqKEVebh5KivwIFvuhlhYiUJSPnGNHETxxHEINISGpEVTPGVD9QHFIIK8wiKJACIUlQRQHQlAFzPk2AcCvyFpdAx7b57sswUwxDsDcL58iRfRLjL4AENkPsd8MpJXp0W8mUGQJHmgj5LSRPSYd3gABAABJREFULcIWGDACUGZKT0nWRs5JsjbizzrHnd7Hsab3NC6oG99jxjx+5nxZqtC3HdJ+68FAhELhed3c5nYD9Lp4tBGIigLhCUBICoTXr43mhGze5BNU7f0fgchRUM6+Znn9Prc+X1n9Tmdf0zgGHuN4QIUUKIUUCgCqX5t/Uj8ezuMgPF5t3xU/hNcPIXtQHNT6VqUh4ZqiM9GjBf0S9C+4UvNmKK1dgkI7pwjqfUckeeAzg4Tad4RHfz1q7y395sWI+c8sJxpwBGiN42amTtRfd6GQvp8h9+Nt/ZJ3ex1IMgDZ/XVqpqUNn9c4X6vOYwrYz2XcMiBY5z+39lXcgn/GMTC24aYiAT+jPOvrEwiPqHUd5eds14j5/KwnVDGM+nNrZyMIDNjauizW0bNuzOQttufYnxsq431oVhf2805JgtnPst4IYYwYNl+XbvsNrTChfwaGLFNIhPTXk/GZY9TH+K5i34qIiIiIKFKNpPesqIceegjTpk0z/z9w4AC6d++OTp0712CtiIiIYpefn4+0tLSargYRgNrbt/rfH4GKJyMlIqLTEftWRERERESRTnnQr0mTJlAUBYcPH7YtP3z4MDIzM12fk5CQgISEBPP/lJQU/PDDD+jevTt++eUXpKamVmudTwd5eXlo3bo12zMO2JbxxfaML7ZnfMXSnkII5Ofno0WLFqesXrwb/fTCvlXtxM/b+GFbxhfbM77YnvHFvhURERERUdWc8qCfz+dD7969sWrVKowYMQIAoKoqVq1ahSlTpsRUhizLaNmyJQAgNTWVJ1dxxPaMH7ZlfLE944vtGV/ltSfvQqfqxL5V7cb2jB+2ZXyxPeOL7Rlf7FsREREREVVOjaT3nDZtGsaOHYs+ffrgggsuwJw5c1BYWIjx48fXRHWIiIjqFaGq1Xw3erTZgqimsG9FRERUfdi3IiIiIqK6okaCfqNGjcLRo0cxffp0ZGdno1evXlixYgWaNWtWE9UhIiIiqtPYtyIiIiIiotpEVVXIslzT1SAiOu3USNAPAKZMmRJzyik3CQkJmDFjhm0+Gqo8tmf8sC3ji+0ZX2zP+Kqt7SlCIYhQNd6NXo1lU+Wxb1W7sD3jh20ZX2zP+GJ7xldtbU/2rYjKFwwGceTIkVM632Zdl5+fD5/PB5/PB0mS4lq2ECLuZVbU8uXLcccdd6CkpAQpKSlo2LAhUlJS8MADD2D48OE1WrdoQqEQFEWJS1nxPAbBYBDHjh2LOmd7RcV7PwsKCtCwYcO4lBdPQgjk5uaiUaNGcSkvJycnbmUBteN9errJycnBwYMH4fV6zZ+0tDSkpKTUdNXiqs7ebpGQkICZM2fWupOBuortGT9sy/hie8YX2zO+2J5Un/D1HF9sz/hhW8YX2zO+2J7xxfYkik1ubi527txZpTL8fj++/PJL/PnPf8Y///nPKtfJ4/HgxRdfRL9+/fDNN99UuTwA+OMf/4jS0tK4lFVUVIQ333wTQohKl3H8+HG89tpruPfee5GVlYUnnniiSnX66quv0LFjR8iyjKSkJHz55ZdVKq+oqAjvvvsuRo0ahVdeeaVKZRmKi4uxZMmSSj33mmuuwYYNG9CjRw/s2bMH3333HTZv3owjR47EpW4///wz/vCHP0CtQork/Px8fPzxx3jwwQdx4YUX4s9//nOV61VUVISXX34Z119/PYLBYKXLUVUVn332GSZPnoyWLVti5cqVVa6bEAJvv/027rzzzkq3m1HG0KFD0aNHD6SmpuLRRx+tct2CwSA+/fRT3H///fD7/VUq68SJE5gzZw66d++ONWvWVLlu+/btw6RJk3DzzTdX6ZgacnJycNddd+Grr76qclnFxcVYtGgRxo0bV+W6ff7557jjjjswatQoPPfcc1WuW0lJCT755BM89NBDVT6mVoFAoNLPTUlJwb/+9S90794d7du3R7t27bBnz54q1UcIgb/+9a9QVRXHjh2rUlnxUmeDfkREROROiBCEWo0/gnejExER0emDfSuqbdavX4/nn3++SmX8/PPPeO+99/DnP/8ZGzZsiEu9Hn/8cVx44YVo2bJllcuaPXs2HnroIdx0001VLuvYsWO4/PLLMWbMGGzfvr3S5aSnp6Njx47Yu3cvPv30U+Tn51epXoMGDcL333+PUaNGoVmzZmjTpk2ly/r+++9x7bXXYvTo0Xj77bfx/fffV6lugBYY/vWvf43rrruu0uW1bNkSn376KZ544gkoioLevXtj3759VapXUVERZs6ciTPPPBOPPvpolcrbtm0bli1bhr///e/YuHEjjh49WumySkpK8Pjjj6N169aYOHEili9fjgMHDlSqrJ9//hnXX389LrvsMsybNw9HjhypcoBoz549uOqqqzBq1Cj87W9/w08//VSpciRJwg033IAHH3wQGRkZKCgoqPLot2PHjuH3v/89brjhBrz44os4fvx4pco5fvw4Jk2ahJYtW2Lq1KnYvn07Pv3000rXa8+ePbjtttvQqVMnzJ8/H59++mmVXm+qqmLBggXo0qULXnjhBfz973+vdFlCCPzrX/9Cly5dcPPNN+Odd96pckD94osvxvDhw7F161YUFBRUqaz8/Hw8+eSTuOOOO/CXv/wFJ0+erFJ5hm+//bZKN6p4PB48+uij2LhxI7p37442bdqgefPmlS5PCIH7778fDz/8MPr06YObb7650mXFkySqcosLERER1Rp5eXlIS0tD8+v/AtmbVG3bUQPFOPTOncjNzUVqamq1bYeIiIioJrFvRZVlvHbqwjENBoM4fPhwXAJ18TJ79mw8/PDDSE9PxxVXXIGXX3650u34888/49Zbb4UQAl26dMH48ePRt2/fKtdx3759yMvLQ8+ePatclhAC3333Hc4555wql5WXl4d169YhKSkJgwcPrnQ5oVAIY8aMwfLly9G/f39MnToVWVlZVarb+vXrcfjwYVx77bVVKmf79u3Yvn078vLykJeXhxEjRqBVq1ZVKjMQCGDlypVITU3FgAEDKl1OKBTC7t27sWXLFmzduhVTpkxB48aNK11ebm4uPvvsM6xatQoXXnghRo8eXalyDhw4gGeeeQYnT56E3++H3+/Hk08+iW7dulW6boa1a9eiadOm6NGjR5XLCoVC+N///oezzjqr0qP+g8Egtm3bhk2bNuGbb75Bnz59MG7cuAqXI4TAl19+iZ07d+Lnn3/Gzz//jOzsbLzyyiuVTmP89ddfY/Xq1SgoKEB+fj46depUpSk6gPD+fv311xg1ahQaNGhQpfKMMnNyctCkSZMqlyWEwPbt29G5c2d4PDU205yrkpISfP7557jiiisqXcaMGTPw2GOPAQA6deqETz75BB06dIhXFSPE2r9g0I+IiKieML78M0fOqfYLU9n/urtOXMQgIiIiqiz2raiy6lLQr7Y5cOAAvvjiC5x//vlo165dlee7UlUVssxEZxW1d+9eHDt2DOecc06tu1BPRFQb/PGPf8RDDz0EAGjQoAH69u2LJ554Av369au2bcbav+CnNhEREREREREREdW4li1b4oYbbohbeQz4VU7btm3Rtm3bmq4GEVGttGzZMmzduhXz5s1Dv3790LNnz1p1g0Sd/OabO3cu2rVrh8TERPTt2zcuk16eDmbOnAlJkmw/1mHcJSUlmDx5Ms444wykpKRg5MiROHz4cA3WuHb57LPPMGzYMLRo0QKSJOGDDz6wPS6EwPTp09G8eXMzjYJzYu0TJ05gzJgxSE1NRaNGjXDrrbdWOUdyXVVee44bNy7i9TpkyBDbOmxPzezZs3H++eejYcOGyMjIwIgRI7Bjxw7bOrG8v/ft24err74aycnJyMjIwH333ReXCYrrmljac+DAgRGvz9tvv922Tk22Z7XOOaP/UP3CvlXlsG9VNexbxRf7VvHDvlV8sW/FvhURERFRfXLNNdfgjTfewKRJk9CrV69aFfAD6mDQ76233sK0adMwY8YM/Pe//8U555yDrKysKk9Uebro0aMHDh06ZP785z//MR+bOnUqli5dinfeeQfr1q3DwYMHcd1119VgbWuXwsJCnHPOOZg7d67r408//TReeOEFzJ8/Hxs3bkSDBg2QlZWFkpISc50xY8Zg69atWLlyJZYtW4bPPvsMt91226nahVqlvPYEgCFDhther2+++abtcbanZt26dZg8eTK+/PJLrFy5EoFAAFdeeSUKCwvNdcp7f4dCIVx99dXw+/1Yv349XnvtNSxcuBDTp0+viV2qUbG0JwBMmDDB9vp8+umnzcfYnlSXsG9VNexbVR77VvHFvlX8sG8VX+xbERERERGdOnVuTr++ffvi/PPPx4svvghAy83dunVr3HnnnXjwwQdruHa128yZM/HBBx9g8+bNEY/l5uaiadOmWLx4MX79618D0CbGPfPMM7FhwwZceOGFp7i2tZskSXj//fcxYsQIANqd6C1atMA999yDe++9F4DWps2aNcPChQsxevRobNu2Dd27d8fXX3+NPn36AABWrFiBq666Cvv376/0JLD1gbM9Ae1u9JycnIi71A1sz+iOHj2KjIwMrFu3DpdccklM7++PP/4Y11xzDQ4ePIhmzZoBAObPn48HHngAR48ehc/nq8ldqlHO9gS0u9F79eqFOXPmuD6nptrTyO2d8aunq33emSMf3s85SuoJ9q0qj32r+GHfKr7Yt4ov9q3ii32rSOxb1T+c04+IiIjiLdb+RZ0a6ef3+7Fp0yYMHjzYXCbLMgYPHowNGzbUYM3qjp07d6JFixbo0KEDxowZg3379gEANm3ahEAgYGvbbt26oU2bNmzbGOzZswfZ2dm29ktLS0Pfvn3N9tuwYQMaNWpkXkQBgMGDB0OWZWzcuPGU17kuWLt2LTIyMtC1a1dMmjQJx48fNx9je0aXm5sLAEhPTwcQ2/t7w4YNOOuss8yLKACQlZWFvLw8bN269RTWvvZxtqdh0aJFaNKkCXr27ImHHnoIRUVF5mNsT6or2LeqOvatqgf7VtWDfavKYd8qvti3IiIiIiKqPrUr2Wg5jh07hlAoZOvoA0CzZs2wffv2GqpV3dG3b18sXLgQXbt2xaFDhzBr1ixcfPHF2LJlC7Kzs+Hz+dCoUSPbc5o1a4bs7OyaqXAdYrSR22vTeCw7OxsZGRm2xz0eD9LT09nGLoYMGYLrrrsO7du3x+7du/Hwww9j6NCh2LBhAxRFYXtGoaoq7r77blx00UXo2bMnAMT0/s7OznZ9/RqPna7c2hMAbrrpJrRt2xYtWrTAd999hwceeAA7duzAe++9B6Dm21OoarXODSNUtdrKplOLfauqYd+q+rBvFX/sW1UO+1bxxb5V9PKJiIiIiOKhTgX9qGqGDh1q/n322Wejb9++aNu2Ld5++20kJVVfqhKiyhg9erT591lnnYWzzz4bHTt2xNq1azFo0KAarFntNnnyZGzZssU2pxRVXrT2tM5vdNZZZ6F58+YYNGgQdu/ejY4dO57qahJRDWHfiuoS9q0qh32r+GLfioiIiIioetWp9J5NmjSBoig4fPiwbfnhw4eRmZlZQ7Wquxo1aoQuXbpg165dyMzMhN/vR05Ojm0dtm1sjDYq67WZmZmJI0eO2B4PBoM4ceIE2zgGHTp0QJMmTbBr1y4AbE83U6ZMwbJly7BmzRq0atXKXB7L+zszM9P19Ws8djqK1p5u+vbtCwC21yfbk+oC9q3ii32r+GHfqvqxb1U+9q3ii30rIiIiIqLqV6eCfj6fD71798aqVavMZaqqYtWqVejXr18N1qxuKigowO7du9G8eXP07t0bXq/X1rY7duzAvn372LYxaN++PTIzM23tl5eXh40bN5rt169fP+Tk5GDTpk3mOqtXr4aqquZJLUW3f/9+HD9+HM2bNwfA9rQSQmDKlCl4//33sXr1arRv3972eCzv7379+uH777+3XexbuXIlUlNT0b1791OzI7VEee3pZvPmzQBge33WZHuqaqjaf6h+YN8qvti3ih/2raof+1bRsW8VX+xbsW9FRERERKdOnUvvOW3aNIwdOxZ9+vTBBRdcgDlz5qCwsBDjx4+v6arVevfeey+GDRuGtm3b4uDBg5gxYwYURcGNN96ItLQ03HrrrZg2bRrS09ORmpqKO++8E/369cOFF15Y01WvFQoKCsw7TQFgz5492Lx5M9LT09GmTRvcfffdeOKJJ9C5c2e0b98ejz76KFq0aIERI0YAAM4880wMGTIEEyZMwPz58xEIBDBlyhSMHj0aLVq0qKG9qjlltWd6ejpmzZqFkSNHIjMzE7t378b999+PTp06ISsrCwDb02ry5MlYvHgxPvzwQzRs2NCc1yQtLQ1JSUkxvb+vvPJKdO/eHb/5zW/w9NNPIzs7G4888ggmT56MhISEmty9U6689ty9ezcWL16Mq666CmeccQa+++47TJ06FZdccgnOPvtsAGxPqlvYt6o89q2qhn2r+GLfKn7Yt4ov9q2IiIiIiE4dSQgharoSFfXiiy/imWeeQXZ2Nnr16oUXXnjhtLv7tDJGjx6Nzz77DMePH0fTpk0xYMAAPPnkk+YcCSUlJbjnnnvw5ptvorS0FFlZWZg3bx7TpejWrl2Lyy67LGL52LFjsXDhQgghMGPGDLz88svIycnBgAEDMG/ePHTp0sVc98SJE5gyZQqWLl0KWZYxcuRIvPDCC0hJSTmVu1IrlNWef/3rXzFixAj873//Q05ODlq0aIErr7wSjz/+OJo1a2auy/bUSJLkunzBggUYN24cgNje33v37sWkSZOwdu1aNGjQAGPHjsUf//hHeDx17v6QKimvPX/55RfcfPPN2LJlCwoLC9G6dWtce+21eOSRR5CammquXxPtmZeXh7S0NKQPfQyyN7HatqMGSnDi4+nIzc217TPVXexbVQ77VlXDvlV8sW8VP+xbxRf7VuVj36r+MV47PKZEREQUL7H2L+pk0I+IiIgi8cIUERERUfywb0WVxaAfERERxVus/YvT6xZDIiKi04BQQxDVODdMdZZNREREVNuwb0VEREREdYVc0xUgIiIiIiIiIiKqTebOnYt27dohMTERffv2xVdffVXTVSIiIiIqF0f6ERER1TehEIRcjXeMh3g3OhEREZ1G2Lc67bz11luYNm0a5s+fj759+2LOnDnIysrCjh07kJGRUdPVIypTfn4+kpOToShKTVel1lu/fj2OHTuGrl27okOHDvB6vTVdJSIqgxACgUAAPp+vpqtSq3GkHxERERERERERke7ZZ5/FhAkTMH78eHTv3h3z589HcnIyXn311ZquWq2Xk5NT01WIav/+/TVdhaiEEHErKycnBw8++GDcyjtx4gSKi4vjVl4gEIhLOUePHsVHH31UpTLOPfdcvPrqq+jWrRuSk5MxfPhwFBYWVrq8kpISPPXUUwjF+WaOYDBY5fdWbm4u/v3vf2PmzJm44YYbUFBQUOmyCgsLceedd2LAgAH41a9+hcceewyqqlapfoAWsP7Xv/4Vt/ZTVRUlJSVxKev777/H888/H5eyjh8/jqlTp2LXrl1VKkcIgZ9//hnvvfceZs2ahfz8/EqXdfToUfz973/Hn/70Jzz88MNx+77Lzs7G3/72t7iUtW7dOlx66aU4cuRIXMoDgO3bt8etrP3792Pr1q1xK68qGPQjIiKqZ4QImXPPVMuP4N3oREREdPpg3+r04vf7sWnTJgwePNhcJssyBg8ejA0bNrg+p7S0FHl5ebaf6vThhx9i7NixcSvvwgsvxLZt26pczhdffIFLLrkEe/bsiUOtgPfffx//+Mc/UFpaWqVygsEgnnjiCfTt2xd79+6NS90OHToUt4vi+/btw7XXXoujR4/GpbzmzZsjNze3SsErQLtYf/PNNyMzMxMnT56scr2EEHjsscfw+OOPV7msbdu2oW/fvpg8eXKVjmlSUhLeffdd3HbbbQgGg2jQoAEaNGhQqbL+/e9/46yzzsKf//xnTJgwAe3atYvL6+3w4cO44oor8Oyzz1a6jLVr12Lw4MHIysrCrFmz8OGHH+Lw4cOVLq9BgwaYM2cOBg4ciCVLluCXX36BLFc+zLBlyxZcd911aNq0KcaOHYuDBw9WuiyDEAKTJ0/Gc889V6Vyjhw5gokTJ6JXr154/PHHq3TzQHFxMZ566il07NgRc+bMwUsvvVTpslRVxauvvorLL78cI0eOxOzZs6v0Pm3atCnat2+Pf/zjH5g9eza+/vrrSpcFAP/73/8wduxYtGnTBvfffz8OHDhQ6bI2bNiAwYMHY+DAgfj888/x9NNPV6luALBp0yZcc8016N+/P7KzsytdjhAC77//PgYOHIg2bdpg4cKFVa5bPDC9JxEREREREREREYBjx44hFAqhWbNmtuXNmjWLOiJg9uzZmDVr1qmoHgDgyiuvRN++feNW3j//+U+0bdu2SmV89913uOaaa5CYmIhdu3ahffv2Va7XggULsHTpUqxatQqvvvoqJEmqcBmlpaX4wx/+gE2bNqFfv34IBoNVrhegBdZuueWWuJR16NAhTJ48OW6pJT0eD15++eUql5OZmYl//OMfeOCBB9C8efMql/fmm28iOzsbF154YZXK+fTTT/HrX/8aubm5kCQJ27dvr9Lr1+PxYP78+WjevDkeffTRCj9///79mDp1Kt59911z2Z49ezBlyhQkJydXul4A8NNPP2HatGlITExEcnIyhBCVeh8MHDgQX3/9Nb777jssXLgQ3333XZXf84qi4IknnkC/fv1wzjnnVKmsnj174uWXX8aqVauwcePGKqdRFkLgrrvuwpo1azBy5EioqlqpoOSqVatw77334ueff4aqqiguLq70Z8iWLVswZcoUbNq0yRxlWZljaZBlGbfeeivGjx+PTz75BCtXrkSrVq0qXR4AXH755di0aRNef/11nHfeeZUup6SkBL/88gu6deuG66+/Hnv37kViYmKFyykuLsYTTzyBl156CcePHzeXV/XzqKCgAD/++CMuvvhidO7cuVJ1M0iShF/96lfo3Lkz1q5di44dO1apbvEiiXiOHyciIqIak5eXh7S0NKRd9gAkT0K1bUcES5G75ink5uYiNTW12rZDREREVJPYtzo9HTx4EC1btsT69evRr18/c/n999+PdevWYePGjRHPKS0ttY1Gy8vLQ+vWrU+bYyqEwLJly9C1a1d07ty5Sheyra699lp88skn2LJlCzp06BCXMqlu+/HHHzFv3jy0bNkS7dq1Q9u2bdGhQwc0adKkRuqzc+dOzJw5E0eOHEFRURGKi4tRVFSErKwszJkzJ27vhXgLBALweDy1tn5VVVJSguLiYjRu3DhuZYZCIeTm5sLr9aJhw4ZVKqukpARHjx5FXl4eevToEaca1m/FxcU4cOAA9u/fj5KSEgwZMqSmq1QjjL5pef0LjvQjIiIiIiIiIiIC0KRJEyiKEpH67vDhw8jMzHR9TkJCAhISqi8wXNtJkoRhw4bFvVxFUfD4448z4EemLl26YM6cOTVdDVPnzp2xaNGimq5GhcVrVGltlZiYWKXRW24URUF6enpcykpMTETr1q3jUtbpIikpCZ06dUKnTp1quip1Auf0IyIiqmeqdc4Z/YeIiIjodMG+1enF5/Ohd+/eWLVqlblMVVWsWrXKNvKPqt+FF16Iu+66q6arQUREVKdwpB8REREREREREZFu2rRpGDt2LPr06YMLLrgAc+bMQWFhIcaPH1/TVTut3H333fB4eOmSiIioIvjNSUREVM8IVQVUtXrLJyIiIjpNsG91+hk1ahSOHj2K6dOnIzs7G7169cKKFSvQrFmzmq7aaYUBPyIioorjtycREREREREREZHFlClTMGXKlJquBhEREVGFMOhHRERUzwg1BFTj3DCcd4aIiIhOJ+xbEREREVFdIdd0BYiIiIiIiIiIiIiIiIioajjSj4iIqJ7h3ehERERE8cO+FRERERHVFRzpR0RERERERERERERERFTHcaQfERFRPaOqIUi8G52IiIgoLti3IiIiIqK6giP9iIiIiIiIiIiIiIiIiOo4jvQjIiKqZ0RIBaRqvBs9pFZb2URERES1DftWRERERFRXcKQfERERERERERERERERUR3HkX5ERET1jBAhoDrnnRGcd4aIiIhOH+xbEREREVFdwZF+RERERERERERERERERHUcg35EREREREREREREREREdRzTexIREdUzQg0BUjWmoKrG9FZEREREtQ37VkRERERUV3CkHxEREREREREREREREVEdx6AfERFRPSPUULX/VKcnn3wS/fv3R3JyMho1ahTbPguB6dOno3nz5khKSsLgwYOxc+dO2zonTpzAmDFjkJqaikaNGuHWW29FQUFBNewBERER1Sd1uW/FfhURERHR6YVBPyIiIqpV/H4/rr/+ekyaNCnm5zz99NN44YUXMH/+fGzcuBENGjRAVlYWSkpKzHXGjBmDrVu3YuXKlVi2bBk+++wz3HbbbdWxC0RERES1AvtVRERERKcXSQgharoSREREVHV5eXlIS0uD56wxkBRftW1HhPwIfr8Iubm5SE1NrbbtLFy4EHfffTdycnLKro8QaNGiBe655x7ce++9AIDc3Fw0a9YMCxcuxOjRo7Ft2zZ0794dX3/9Nfr06QMAWLFiBa666irs378fLVq0qLb9ICIiorqpPvWt2K86tYzXTnX3l4mIiOj0EWv/wnMK60RERESnQiiAar2jJxQAoHU2rBISEpCQkFCdW3a1Z88eZGdnY/DgweaytLQ09O3bFxs2bMDo0aOxYcMGNGrUyLwwBQCDBw+GLMvYuHEjrr322lNebyIiIqojTqO+FftV8WHcX+88pkRERESVZfQryhvHx6AfERFRPeHz+ZCZmYnsH96u9m2lpKSgdevWtmUzZszAzJkzq33bTtnZ2QCAZs2a2ZY3a9bMfCw7OxsZGRm2xz0eD9LT0811iIiIiKxOx74V+1XxkZ+fDwARx5SIiIioqvLz85GWlhb1cQb9iIiI6onExETs2bMHfr+/2rclhIAkSbZlZd2J/uCDD+Kpp54qs8xt27ahW7ducakfERERUVXV1r4V+1W1X4sWLfDLL7+gYcOGEcc1HvLy8tC6dWv88ssvTB9ag3gcah6PQe3A41DzeAxqh+o+DkII5Ofnl5tKnUE/IiKieiQxMRGJiYk1XY0I99xzD8aNG1fmOh06dKhU2ZmZmQCAw4cPo3nz5ubyw4cPo1evXuY6R44csT0vGAzixIkT5vOJiIiInGpj34r9qtpPlmW0atWq2reTmprKi7u1AI9DzeMxqB14HGoej0HtUJ3HoawRfgYG/YiIiKjaNW3aFE2bNq2Wstu3b4/MzEysWrXKvBiVl5eHjRs3YtKkSQCAfv36IScnB5s2bULv3r0BAKtXr4aqqujbt2+11IuIiIioOrBfRURERETRyDVdASIiIiKrffv2YfPmzdi3bx9CoRA2b96MzZs3o6CgwFynW7dueP/99wEAkiTh7rvvxhNPPIElS5bg+++/x29/+1u0aNECI0aMAACceeaZGDJkCCZMmICvvvoKX3zxBaZMmYLRo0eXmxaBiIiIqK5iv4qIiIjo9MKRfkRERFSrTJ8+Ha+99pr5/7nnngsAWLNmDQYOHAgA2LFjB3Jzc8117r//fhQWFuK2225DTk4OBgwYgBUrVtjScS1atAhTpkzBoEGDIMsyRo4ciRdeeOHU7BQRERFRDWC/qn5KSEjAjBkzypxTm6ofj0PN4zGoHXgcah6PQe1QW46DJIQQNVoDIiIiIiIiIiIiIiIiIqoSpvckIiIiIiIiIiIiIiIiquMY9CMiIiIiIiIiIiIiIiKq4xj0IyIiIiIiIiIiIiIiIqrjGPQjIiIiIiIiIiIiIiIiquMY9CMiIiIiIiIiIqoj5s6di3bt2iExMRF9+/bFV199VdNVqrdmzpwJSZJsP926dTMfLykpweTJk3HGGWcgJSUFI0eOxOHDh2uwxvXDZ599hmHDhqFFixaQJAkffPCB7XEhBKZPn47mzZsjKSkJgwcPxs6dO23rnDhxAmPGjEFqaioaNWqEW2+9FQUFBadwL+q28o7BuHHjIt4bQ4YMsa3DY1A1s2fPxvnnn4+GDRsiIyMDI0aMwI4dO2zrxPIZtG/fPlx99dVITk5GRkYG7rvvPgSDwVO5K3VaLMdh4MCBEe+H22+/3bbOqTwODPoRERERERERERHVAW+99RamTZuGGTNm4L///S/OOeccZGVl4ciRIzVdtXqrR48eOHTokPnzn//8x3xs6tSpWLp0Kd555x2sW7cOBw8exHXXXVeDta0fCgsLcc4552Du3Lmujz/99NN44YUXMH/+fGzcuBENGjRAVlYWSkpKzHXGjBmDrVu3YuXKlVi2bBk+++wz3HbbbadqF+q88o4BAAwZMsT23njzzTdtj/MYVM26deswefJkfPnll1i5ciUCgQCuvPJKFBYWmuuU9xkUCoVw9dVXw+/3Y/369XjttdewcOFCTJ8+vSZ2qU6K5TgAwIQJE2zvh6efftp87FQfB0kIIaqlZCIiIiIiIiIiIoqbvn374vzzz8eLL74IAFBVFa1bt8add96JBx98sIZrV//MnDkTH3zwATZv3hzxWG5uLpo2bYrFixfj17/+NQBg+/btOPPMM7FhwwZceOGFp7i29ZMkSXj//fcxYsQIANoovxYtWuCee+7BvffeC0A7Fs2aNcPChQsxevRobNu2Dd27d8fXX3+NPn36AABWrFiBq666Cvv370eLFi1qanfqJOcxALSRfjk5OREjAA08BvF39OhRZGRkYN26dbjkkkti+gz6+OOPcc011+DgwYNo1qwZAGD+/Pl44IEHcPToUfh8vprcpTrJeRwAbaRfr169MGfOHNfnnOrjwJF+REREREREREREtZzf78emTZswePBgc5ksyxg8eDA2bNhQgzWr33bu3IkWLVqgQ4cOGDNmDPbt2wcA2LRpEwKBgO14dOvWDW3atOHxqEZ79uxBdna2rd3T0tLQt29fs903bNiARo0amcEmABg8eDBkWcbGjRtPeZ3rq7Vr1yIjIwNdu3bFpEmTcPz4cfMxHoP4y83NBQCkp6cDiO0zaMOGDTjrrLPMQBMAZGVlIS8vD1u3bj2Fta8/nMfBsGjRIjRp0gQ9e/bEQw89hKKiIvOxU30cPHEvkYiIiIiIiIiIiOLq2LFjCIVCtouGANCsWTNs3769hmpVv/Xt2xcLFy5E165dcejQIcyaNQsXX3wxtmzZguzsbPh8PjRq1Mj2nGbNmiE7O7tmKnwaMNrW7X1gPJadnY2MjAzb4x6PB+np6Tw2cTJkyBBcd911aN++PXbv3o2HH34YQ4cOxYYNG6AoCo9BnKmqirvvvhsXXXQRevbsCQAxfQZlZ2e7vleMx6hi3I4DANx0001o27YtWrRoge+++w4PPPAAduzYgffeew/AqT8ODPoREREREREREREROQwdOtT8++yzz0bfvn3Rtm1bvP3220hKSqrBmhHVrNGjR5t/n3XWWTj77LPRsWNHrF27FoMGDarBmtVPkydPxpYtW2xzitKpF+04WOeqPOuss9C8eXMMGjQIu3fvRseOHU91NZnek4iIiIiIiIiIqLZr0qQJFEXB4cOHbcsPHz6MzMzMGqrV6aVRo0bo0qULdu3ahczMTPj9fuTk5NjW4fGoXkbblvU+yMzMxJEjR2yPB4NBnDhxgsemmnTo0AFNmjTBrl27APAYxNOUKVOwbNkyrFmzBq1atTKXx/IZlJmZ6fpeMR6j2EU7Dm769u0LALb3w6k8Dgz6ERERERERERER1XI+nw+9e/fGqlWrzGWqqmLVqlXo169fDdbs9FFQUIDdu3ejefPm6N27N7xer+147NixA/v27ePxqEbt27dHZmamrd3z8vKwceNGs9379euHnJwcbNq0yVxn9erVUFXVvBhP8bV//34cP34czZs3B8BjEA9CCEyZMgXvv/8+Vq9ejfbt29sej+UzqF+/fvj+++9tAdiVK1ciNTUV3bt3PzU7UseVdxzcbN68GQBs74dTeRyY3pOIiIiIiIiIiKgOmDZtGsaOHYs+ffrgggsuwJw5c1BYWIjx48fXdNXqpXvvvRfDhg1D27ZtcfDgQcyYMQOKouDGG29EWloabr31VkybNg3p6elITU3FnXfeiX79+uHCCy+s6arXaQUFBeYIGQDYs2cPNm/ejPT0dLRp0wZ33303nnjiCXTu3Bnt27fHo48+ihYtWmDEiBEAgDPPPBNDhgzBhAkTMH/+fAQCAUyZMgWjR49GixYtamiv6payjkF6ejpmzZqFkSNHIjMzE7t378b999+PTp06ISsrCwCPQTxMnjwZixcvxocffoiGDRuac7+lpaUhKSkpps+gK6+8Et27d8dvfvMbPP3008jOzsYjjzyCyZMnIyEhoSZ3r84o7zjs3r0bixcvxlVXXYUzzjgD3333HaZOnYpLLrkEZ599NoAaOA6CiIiIiIiIiIiI6oS//OUvok2bNsLn84kLLrhAfPnllzVdpXpr1KhRonnz5sLn84mWLVuKUaNGiV27dpmPFxcXizvuuEM0btxYJCcni2uvvVYcOnSoBmtcP6xZs0YAiPgZO3asEEIIVVXFo48+Kpo1ayYSEhLEoEGDxI4dO2xlHD9+XNx4440iJSVFpKamivHjx4v8/Pwa2Ju6qaxjUFRUJK688krRtGlT4fV6Rdu2bcWECRNEdna2rQweg6pxa38AYsGCBeY6sXwG/fzzz2Lo0KEiKSlJNGnSRNxzzz0iEAic4r2pu8o7Dvv27ROXXHKJSE9PFwkJCaJTp07ivvvuE7m5ubZyTuVxkPSKExEREREREREREREREVEdxTn9iIiIiIiIiIiIiIiIiOo4Bv2IiIiIiIiIiIiIiIiI6jgG/YiIiIiIiIiIiIiIiIjqOAb9iIiIiIiIiIiIiIiIiOo4Bv2IiIiIiIiIiIiIiIiI6jgG/YiIiIiIiIiIiIiIiIjqOAb9iIiIiIiIiIiIiIiIiOo4Bv2IiIiIiIiIiIiIiIiI6jgG/YiIiIiIiIiIiIiIiIjqOAb9iIiIiIiIiIiIiIiIiOo4Bv2IiIiIiIiIiIiIiIiI6jgG/YiIiIiIiIiIiIiIiIjqOAb9iIiIiIiIiIiIiIiIiOo4Bv2IiIiIiIiIiIiIiIiI6jgG/YiIiIiIiIiIiIiIiIjqOAb9iIiIiIiIiIiIiIiIiOo4Bv2IiIiIiIiIiIiIiIiI6jgG/YiIiIiIiIiIiIiI6pmff/4ZkiRh4cKFFX7u2rVrIUkS1q5dW+66AwcOxMCBA2Mqt6CgABkZGVi0aFGF61QdRo8ejRtuuKGmq0EUNwz6EREREREREREREZ0GJEmK6SeWQM+ptH79esycORM5OTllrhcIBNCkSRMMGDAg6jpCCLRu3RrnnXdeXOt48OBBzJw5E5s3b45ruWVZuHBh1GP44IMPnrJ6VMTzzz+Phg0bYvTo0eay5cuXY+bMmXHdTrR2+eMf/2hb74EHHsC//vUvfPvtt3HdPlFN8dR0BYiIiIiIiIiIiIio+r3xxhu2/19//XWsXLkyYvmZZ555KqtVrvXr12PWrFkYN24cGjVqFHU9r9eL66+/Hi+99BL27t2Ltm3bRqzz2WefYf/+/Zg6dWpc63jw4EHMmjUL7dq1Q69eveJadnkee+wxtG/f3rasZ8+eaNu2LYqLi+H1ek9pfaIJBAJ4/vnnMXXqVCiKYi5fvnw55s6dG/fA3xVXXIHf/va3tmXnnntuxP99+vTBn//8Z7z++utx3T5RTWDQj4iIiIiIiIiIiOg0cPPNN9v+//LLL7Fy5cqI5ZUhhEBJSQmSkpKqXFZVjBkzBvPnz8ebb77pOtpt8eLFkGXZNtKsNissLESDBg3KXGfo0KHo06eP62OJiYnVUa1KWbZsGY4ePXrK0ml26dIlptf2DTfcgBkzZmDevHlISUk5BTUjqj5M70lEREREREREREREAIAFCxbg8ssvR0ZGBhISEtC9e3f89a9/jVivXbt2uOaaa/DJJ5+gT58+SEpKwksvvQQA2Lt3L4YPH44GDRogIyMDU6dOxSeffOKaOnTjxo0YMmQI0tLSkJycjEsvvRRffPGF+fjMmTNx3333AQDat29vpmn8+eefXet/0UUXoV27dli8eHHEY4FAAO+++y4uu+wytGjRAgCwfft2/PrXv0Z6ejoSExPRp08fLFmyJOK5OTk5mDp1Ktq1a4eEhAS0atUKv/3tb3Hs2DGsXbsW559/PgBg/PjxZh2tc+m988476N27N5KSktCkSRPcfPPNOHDggG0b48aNQ0pKCnbv3o2rrroKDRs2xJgxY1z3MxbR5vSLdZ/dvPzyy+jYsSOSkpJwwQUX4PPPP4+5Ph988AHatWuHjh07msvGjRuHuXPnArCn5IyX4uJilJSUlLnOFVdcgcLCQqxcuTJu2yWqKRzpR0REREREREREREQAgL/+9a/o0aMHhg8fDo/Hg6VLl+KOO+6AqqqYPHmybd0dO3bgxhtvxMSJEzFhwgR07doVhYWFuPzyy3Ho0CHcddddyMzMxOLFi7FmzZqIba1evRpDhw5F7969MWPGDMiybAYdP//8c1xwwQW47rrr8OOPP+LNN9/Ec889hyZNmgAAmjZt6lp/SZJw00034Q9/+AO2bt2KHj16mI+tWLECJ06cMANpW7duxUUXXYSWLVviwQcfRIMGDfD2229jxIgR+Ne//oVrr70WAFBQUICLL74Y27Ztwy233ILzzjsPx44dw5IlS7B//36ceeaZeOyxxzB9+nTcdtttuPjiiwEA/fv3B6DNvTd+/Hicf/75mD17Ng4fPoznn38eX3zxBf73v//ZUpYGg0FkZWVhwIAB+NOf/oTk5ORyj1lubi6OHTtmW2a0k1Os++zm73//OyZOnIj+/fvj7rvvxk8//YThw4cjPT0drVu3Lree69evj5hLceLEiTh48KBrmlkAOHnyJEKhULllJycnR7TVwoULMW/ePAghcOaZZ+KRRx7BTTfdFPHc7t27IykpCV988UWZ+09UJwgiIiIiIiIiIiIiOu1MnjxZOC8RFxUVRayXlZUlOnToYFvWtm1bAUCsWLHCtvzPf/6zACA++OADc1lxcbHo1q2bACDWrFkjhBBCVVXRuXNnkZWVJVRVtW2/ffv24oorrjCXPfPMMwKA2LNnT0z7tXXrVgFAPPTQQ7blo0ePFomJiSI3N1cIIcSgQYPEWWedJUpKSsx1VFUV/fv3F507dzaXTZ8+XQAQ7733XsS2jLp//fXXAoBYsGCB7XG/3y8yMjJEz549RXFxsbl82bJlAoCYPn26uWzs2LECgHjwwQdj2s8FCxYIAK4/QgixZ8+eiDrFus9r1qyxHS9jP3r16iVKS0vN9V5++WUBQFx66aVl1jUQCAhJksQ999wT8Zjb69BgvM7K+5kxY4btef379xdz5swRH374ofjrX/8qevbsKQCIefPmuW6nS5cuYujQoWXuA1FdwJF+RERERERERERERAQAtjn5cnNzEQgEcOmll+KTTz5Bbm4u0tLSzMfbt2+PrKws2/NXrFiBli1bYvjw4eayxMRETJgwAffcc4+5bPPmzdi5cyceeeQRHD9+3FbGoEGD8MYbb0BVVchyxWeo6t69O84991z885//xB/+8AcA2tx4S5YswTXXXIPU1FScOHECq1evxmOPPYb8/Hzk5+ebz8/KysKMGTNw4MABtGzZEv/6179wzjnnuI4CKy8V5TfffIMjR45g5syZtvn1rr76anTr1g0fffQRZs2aZXvOpEmTKrS/c+fORZcuXcpdryL7HG0/HnvsMfh8PnP5uHHjzPSr5W1bCIHGjRvHuFeaRYsWobi4uNz1OnToYPvfmiIWAG655Rb07t0bDz/8MMaNGxcx92Tjxo0jRksS1UUM+hERERERERERERERAC1YMmPGDGzYsAFFRUW2x9yCfk579+5Fx44dI4JhnTp1sv2/c+dOAMDYsWOj1iU3N7fCQSLDmDFjcO+992L9+vXo378/PvjgAxQVFZmpPXft2gUhBB599FE8+uijrmUcOXIELVu2xO7duzFy5MhK1WPv3r0AgK5du0Y81q1bN/znP/+xLfN4PGjVqlWFtnHBBRegT58+5a5XkX12Mvajc+fOtuVerzci4FYWIUTM6wLaHI3x4PP5MGXKFNx+++3YtGkTBgwYEFGveM4lSFRTGPQjIiIiIiIiIiIiIuzevRuDBg1Ct27d8Oyzz6J169bw+XxYvnw5nnvuOaiqalvfOVqqIoyynnnmGfTq1ct1nZSUlEqXf+ONN+L+++/H4sWL0b9/fyxevBiNGzfGVVddZdv+vffeGzFa0eAMVJ4KCQkJlRrdGIua3Of09HRIkoSTJ09W6HlHjx6NaU6/lJSUcl8vxryDJ06ciHjs5MmTEQFNorqIQT8iIiIiIiIiIiIiwtKlS1FaWoolS5agTZs25vI1a9bEXEbbtm3xww8/RIyc2rVrl229jh07AgBSU1MxePDgMsuszAisFi1a4LLLLsM777yDRx99FCtXrsS4cePM1JTG6DSv11vu9jt27IgtW7ZUqo5t27YFAOzYsQOXX3657bEdO3aYj58KFdlnJ6OeO3futO1HIBDAnj17cM4555T5fI/Hg44dO2LPnj0Rj5V1fM8//3xzlGFZZsyYgZkzZ5a5zk8//QQAaNq0qW15MBjEL7/8YktJS1RXVc8tA0RERERERERERERUpyiKAsCegjE3NxcLFiyIuYysrCwcOHAAS5YsMZeVlJTgb3/7m2293r17o2PHjvjTn/6EgoKCiHKOHj1q/t2gQQMAQE5OTsz1ALQUn0eOHMHEiRMRCATM1J4AkJGRgYEDB+Kll17CoUOHytz+yJEj8e233+L999+PWM9oq2h17NOnDzIyMjB//nyUlpaayz/++GNs27YNV199dYX2qSoqss9Offr0QdOmTTF//nz4/X5z+cKFC2M+Lv369cM333wTsbys47to0SKsXLmy3J/f/va3Ze5Hfn4+5syZgyZNmqB37962x3744QeUlJSgf//+Me0HUW3GkX5EREREREREREREhCuvvBI+nw/Dhg3DxIkTUVBQgL/97W/IyMhwDRK5mThxIl588UXceOONuOuuu9C8eXMsWrQIiYmJAMKjumRZxiuvvIKhQ4eiR48eGD9+PFq2bIkDBw5gzZo1SE1NxdKlSwHADNL83//9H0aPHg2v14thw4aZwaJoRo4ciTvuuAMffvghWrdujUsuucT2+Ny5czFgwACcddZZmDBhAjp06IDDhw9jw4YN2L9/P7799lsAwH333Yd3330X119/PW655Rb07t0bJ06cwJIlSzB//nycc8456NixIxo1aoT58+ejYcOGaNCgAfr27Yv27dvjqaeewvjx43HppZfixhtvxOHDh/H888+jXbt2mDp1auwHKA5i3Wcnr9eLJ554AhMnTsTll1+OUaNGYc+ePViwYEHMc/r96le/whtvvIEff/wRXbp0MZcbx/f3v/89srKyoCgKRo8eDaByc/rNnTsXH3zwAYYNG4Y2bdrg0KFDePXVV7Fv3z688cYb5mhPw8qVK5GcnIwrrriiwtsiqm040o+IiIiIiIiIiIiI0LVrV7z77ruQJAn33nsv5s+fj9tuuw133XVXzGWkpKRg9erVuPzyy/H888/jiSeewMUXX4xHH30UAMzgHwAMHDgQGzZsQJ8+ffDiiy/izjvvxMKFC5GZmWkLhp1//vl4/PHH8e2332LcuHG48cYbyxyVZkhNTcWwYcMAaHP8OdNIdu/eHd988w2uvvpqLFy4EJMnT8b8+fMhyzKmT59u26fPP/8ckyZNwvLly/H73/8e8+bNQ9euXdGqVSsAWlDstddeg6IouP3223HjjTdi3bp1AIBx48bhrbfegt/vxwMPPICXXnoJ1157Lf7zn/+gUaNGMbdtPMS6z25uu+02zJs3DwcPHsR9992Hzz//HEuWLDHnyivPsGHD0KRJE7z99tu25ddddx3uvPNOrFixAr/5zW9w4403Vnr/AC1QmJGRgVdeeQWTJ0/Gc889h65du+LTTz+1jfY0vPPOO7juuuvQsGHDKm2XqDaQhHWsNhERERERERERERFRnM2ZMwdTp07F/v370bJly5quDtWQxx9/HAsWLMDOnTvNdLI1afPmzTjvvPPw3//+F7169arp6hBVGYN+RERERERERERERBQ3xcXFSEpKMv8vKSnBueeei1AohB9//LEGa0Y1raCgAB06dMBzzz3nOuruVBs9ejRUVY0YfUhUVzHoR0RERERERERERERxM3ToULRp0wa9evVCbm4u/vGPf2Dr1q1YtGgRbrrpppquHhFRveWp6QoQERERERERERERUf2RlZWFV155BYsWLUIoFEL37t3xz3/+E6NGjarpqhER1WtyTVeAiIiI6r/PPvsMw4YNQ4sWLSBJEj744INyn7N27Vqcd955SEhIQKdOnbBw4cKIdebOnYt27dohMTERffv2xVdffRX/yhMRERHVMuxbEVFtd/fdd2PLli0oKChAcXExNm3axIAfEdEpwKAfERERVbvCwkKcc845mDt3bkzr79mzB1dffTUuu+wybN68GXfffTd+97vf4ZNPPjHXeeuttzBt2jTMmDED//3vf3HOOecgKysLR44cqa7dICIiIqoV2LciIiIiIjec04+IiIhOKUmS8P7772PEiBFR13nggQfw0UcfYcuWLeay0aNHIycnBytWrAAA9O3bF+effz5efPFFAICqqmjdujXuvPNOPPjgg9W6D0RERES1BftWRERERGTgnH5ERET1SElJCfx+f7VvRwgBSZJsyxISEpCQkBCX8jds2IDBgwfblmVlZeHuu+8GAPj9fmzatAkPPfSQ+bgsyxg8eDA2bNgQlzoQERERsW/FvlVlqKqKgwcPomHDhhHHlYiIiKgyhBDIz89HixYtIMvRk3gy6EdERFRPlJSU4IykFBQhVO3bSklJQUFBgW3ZjBkzMHPmzLiUn52djWbNmtmWNWvWDHl5eSguLsbJkycRCoVc19m+fXtc6kBERESnN/at2LeqrIMHD6J169Y1XQ0iIiKqh3755Re0atUq6uMM+hEREdUTfr8fRQhhDFrCV43T9vqhYlHBAfzyyy9ITU01l8frTnQiIiKi2oB9K6qshg0bAkDEMSUiIiKqrLy8PLRu3drsZ0TDoB8REVE9kwQZPqn6Lkwp+mzAqamp1XYRIzMzE4cPH7YtO3z4MFJTU5GUlARFUaAoius6mZmZ1VInIiIiOj2xb8W+VUUZKT2r85gSERHR6am81OHV12slIiIiqqR+/fph1apVtmUrV65Ev379AAA+nw+9e/e2raOqKlatWmWuQ0REREQa9q2IiIiITg8c6UdERFTPKJIEpZy7fqpUPiRAVOw5BQUF2LVrl/n/nj17sHnzZqSnp6NNmzZ46KGHcODAAbz++usAgNtvvx0vvvgi7r//ftxyyy1YvXo13n77bXz00UdmGdOmTcPYsWPRp08fXHDBBZgzZw4KCwsxfvz4uOwnEREREcC+FftWRERERHUHg35ERERU7b755htcdtll5v/Tpk0DAIwdOxYLFy7EoUOHsG/fPvPx9u3b46OPPsLUqVPx/PPPo1WrVnjllVeQlZVlrjNq1CgcPXoU06dPR3Z2Nnr16oUVK1agWbNmp27HiIiIiGoA+1ZERERE5EYSQlTwfjIiIiKqjfLy8pCWloZJchskVOO8M6VCxV/VfcjNzeUcJURERFRvsW9FlWW8dnhMiYiIKF5i7V9wTj8iIiIiIiIiIiIiIiKiOo7pPYmIiOqZUzLvDBEREdFpgn0rIiIiIqorONKPiIiIiIiIiIiIiIiIqI7jSD8iIqJ6RpG0n2orv/qKJiIiIqp12LciIiIiorqCI/2IiIiIiIiIiIiIiIiI6jiO9CMiIqpnOO8MERERUfywb0VEREREdQVH+hERERERERERERERERHVcRzpR0REVM9w3hkiIiKi+GHfioiIiIjqCo70IyIiIiIiIiIiIiIiIqrjONKPiIionuG8M0RERETxw74VEREREdUVHOlHREREREREREREREREVMcx6EdERERERERERERERDH79ttvEQwGa7oaROTAoB8REVE9I0H7gq+uHyagIiIiotMJ+1ZERESRtm7dilmzZtV0NYjIgUE/IiIiIiIiIiKq82bPno3zzz8fDRs2REZGBkaMGIEdO3bY1hk4cCAkSbL93H777bZ19u3bh6uvvhrJycnIyMjAfffdx9EsREQOqampePLJJ7Fq1aqargrVEUII/PTTTzVdjXqPQT8iIqJ6RpGkav8hIiIiOl2wb1V3rFu3DpMnT8aXX36JlStXIhAI4Morr0RhYaFtvQkTJuDQoUPmz9NPP20+FgqFcPXVV8Pv92P9+vV47bXXsHDhQkyfPv1U7w4RkUlVVQQCgbiXGwqFsG3bNuzevbvCz23YsCGEELj55ptx+PDhuNeN6g8hBD766CP069cPO3furOnq1HsM+hERERERERERUZ23YsUKjBs3Dj169MA555yDhQsXYt++fdi0aZNtveTkZGRmZpo/qamp5mP//ve/8cMPP+Af//gHevXqhaFDh+Lxxx/H3Llz4ff7T/UuEREBACRJwm9+8xvz86tr1644//zzcfPNN2P//v0VKuvHH3/EnXfeiYsuugipqam4++670aJFiwrXyfjszM7OxtixY6GqaoXLqEsqG3QtKCjA73//e/Ts2RM9e/bEWWedhbPPPhu/+c1vqiWQW9usWLECffr0wTXXXIPMzExkZWVVqTwhBDZv3hyfytVTDPoRERHVM4pU/T9EREREpwv2requ3NxcAEB6erpt+aJFi9CkSRP07NkTDz30EIqKiszHNmzYgLPOOgvNmjUzl2VlZSEvLw9bt2513U5paSny8vJsP9Xp+++/x2uvvRa38p5++mkcOXIkLmV99913eP311+NSFlC76/bUU0/h6NGjcSnr+++/j2vdPv/8c1x//fV44YUXqhxUiHe7/fTTT/j4448RCoXiUt66deviVtZTTz2FY8eOxaWsb7/9Fm+88UZcygKApUuX4osvvkBxcTEOHz6MPXv24LLLLsO8efPQqlWrCpXVsWNHvPvuu1i/fj0GDhyIDz/8EElJSRWuU8OGDdGsWTN4vV6MGzcOJ06cqHAZTqFQCEKIKpdjiOcIxNmzZ+PHH3+s8PNSUlIwZ84cTJkyBXv37sWWLVvw/fffIzExEV6vt8Llffrpp+b3m9/vx2effYbp06fbvssqq6CgAB988AE+/PDDKpdl6NmzJ7Zs2YKEhAQ8++yzVS7vxx9/xNChQzFu3DgcPHiwSmVt2rQJb775ZpXrZHjyySdx8uTJuJVXWQz6ERERERERERFRvaKqKu6++25cdNFF6Nmzp7n8pptuwj/+8Q+sWbMGDz30EN544w3cfPPN5uPZ2dm2gB8A8//s7GzXbc2ePRtpaWnmT+vWrathj8JkWYbH44lbeV6vF7Icn0uEiqLU2rrV5naTZRmKosSlLABo1aoVBg0ahNtvv71SQQUrRVHiWreEhAR06dIlLmUKIfDVV1/ho48+ikPNtGMqxSnlsiRJ8Pl8cSkL0AJ1v//97+HxeHDNNddgy5YtePrpp20jlWOlKAqeeeYZDB8+HO+99x4SExMrVaemTZti/fr1OHLkCEaPHo0mTZpUqhzDt99+i+nTp+Oiiy7Cnj17qjxy8Mcff8Tq1aurVIbVtGnT0KlTp0o9V5Zl3H777fj+++8xaNAgpKSkYMSIERUqIzs7G2PGjMGwYcOwYMECXH311UhPT8ell16K999/v8IjPt34fD40aNCgyp8bVq1atcI333yDRx99FB06dKhyeT/++COys7Px2muvoXPnznj88ccrHfD0eDxx+xwHENf3fFVIIp6hcyIiIqoxeXl5SEtLw2PJHZAoxe+kzKlEhDC96Cfk5uZW6gSDiIiIqC5g36pumzRpEj7++GP85z//KXMUzOrVqzFo0CDs2rULHTt2xG233Ya9e/fik08+MdcpKipCgwYNsHz5cgwdOjSijNLSUpSWlpr/5+XloXXr1jymRBRXR44cwX//+18MGTKkymUVFhbC6/XWmiAFANx///34y1/+AkmS0KdPH3z22Wc1XaVqIYTAK6+8grFjx8bU/qqq4qWXXsJDDz1kjvDLzMzEFVdcgSuuuAKDBw9G8+bNq7vaVaaqalwCbF988QV++OEHNG7c2Pxp165dxKj++sjom5bXv4jf7S1EREREREREREQ1bMqUKVi2bBk+++yzctPe9e3bFwDMoF9mZia++uor2zpGerjMzEzXMhISEpCQkBCHmhMRRZeRkRGXgB8ANGjQIC7lxNOVV16JZ555BgBw7bXX1nBtqo8kSZgwYUJM6x46dAh33nkn1q1bB1VV4fV6EQgEMGDAACxYsCCuo3CrW7xG1F100UW46KKL4lJWfcWgHxERUT1T3XPD1J0uJREREVHVsW9VdwghcOedd+L999/H2rVr0b59+3Kfs3nzZgAwR0n069cPTz75JI4cOYKMjAwAwMqVK5Gamoru3btXW92JiE53AwYMQGJiIkpKSjBy5Miark6t0Lx5c7z77ru2ZaqqorS0NG6paKn+YdCPiIiIiIiIiIjqvMmTJ2Px4sX48MMP0bBhQ3MOvrS0NCQlJWH37t1YvHgxrrrqKpxxxhn47rvvMHXqVFxyySU4++yzAWgjTbp3747f/OY3ePrpp5GdnY1HHnkEkydP5mg+IqJqlJiYiEsvvRQnT55EmzZtaro6tZYsy0hKSqrpalAtxqAfERFRPaNIEpRqvONLAe8mIyIiotMH+1Z1x1//+lcAwMCBA23LFyxYgHHjxsHn8+HTTz/FnDlzUFhYiNatW2PkyJF45JFHzHUVRcGyZcswadIk9OvXDw0aNMDYsWPx2GOPncpdISI6LV1xxRVQVbWmq0FUpzHoR0RxVVBQgA4dOuC5557DmDFjaro6GD16NFRVxdtvv13TVSEiIiKqlDvuuAM7d+7EypUra7oqWLFiBX79619jz549aNq0aU1Xh4jIRghR5uOtW7fGunXryi2nbdu2WL58ebyqRUREMbryyitr5XyDRHVJfGZPJKrnJEmK6Wft2rU1XVWb9evXY+bMmcjJyTll23z++efRsGFDjB492ly2fPlyzJw5M67biXYM/vjHP9rWe+CBB/Cvf/0L3377bVy3T1SbyVJ47pnq+JF5MzoRxQH7V7HZs2cPXnnlFTz88MPmsoMHD2LmzJnmPFTxMG7cONf279atm229IUOGoFOnTpg9e3bctk1U27FvRUREdGr07NkTHTp0qOlqENVpHOlHFIM33njD9v/rr7+OlStXRiw/88wzT2W1yrV+/XrMmjUL48aNQ6NGjap9e4FAAM8//zymTp0KRQlPR798+XLMnTs37oG/K664Ar/97W9ty84999yI//v06YM///nPeP311+O6fSIiIqo89q9i8/zzz6N9+/a47LLLzGUHDx7ErFmz0K5dO/Tq1Stu20pISMArr7xiW5aWlhax3sSJE3Hvvfdi1qxZaNiwYdy2T0RERESnN6ka02kTnS4Y9COKwc0332z7/8svv8TKlSsjlleGEAIlJSX1YgLWZcuW4ejRo7jhhhtOyfa6dOkS0zG44YYbMGPGDMybNw8pKSmnoGZENYvzzhBRXcD+VfkCgQAWLVqE22+//ZRsz+PxxNT+I0eOxJ133ol33nkHt9xyyymoGVHNYt+KiIiIiOoKpvckipMFCxbg8ssvR0ZGBhISEtC9e3dzEnGrdu3a4ZprrsEnn3yCPn36ICkpCS+99BIAYO/evRg+fDgaNGiAjIwMTJ06FZ988olraquNGzdiyJAhSEtLQ3JyMi699FJ88cUX5uMzZ87EfffdBwBo3769maLp559/jroP7dq1w7hx4yKWDxw4MGIidDcffPAB2rVrh44dO5rLxo0bh7lz5wKwp/GKl+LiYpSUlJS5zhVXXIHCwsJaMQ8OERERxa6u96+mTJmClJQUFBUVRTx24403IjMzE6FQKOr+/+c//8GxY8cwePBgc9natWtx/vnnAwDGjx9v1mHhwoVRy6mIUCiEvLy8MtfJyMjA2WefjQ8//DAu2yQiIiIiIqL44Eg/ojj561//ih49emD48OHweDxYunQp7rjjDqiqismTJ9vW3bFjB2688UZMnDgREyZMQNeuXVFYWIjLL78chw4dwl133YXMzEwsXrwYa9asidjW6tWrMXToUPTu3RszZsyALMvmRbHPP/8cF1xwAa677jr8+OOPePPNN/Hcc8+hSZMmAICmTZtWWxusX78e5513nm3ZxIkTcfDgQdd0XQBw8uTJMi92GZKTk5GcnGxbtnDhQsybNw9CCJx55pl45JFHcNNNN0U8t3v37khKSsIXX3yBa6+9toJ7RVT3GPPDVFv51Vc0EZFNXe9fjRo1CnPnzsVHH32E66+/3lxeVFSEpUuXYty4cbaU6E7r16+HJEm29OVnnnkmHnvsMUyfPh233XYbLr74YgBA//79zbLdgoxOiqKgcePGtmVFRUVITU1FUVERGjdujBtvvBFPPfWUa6aE3r1744MPPih3O0T1AftWRERERFRXMOhHFCfr1q2zpZCaMmUKhgwZgmeffTbiotSuXbuwYsUKZGVlmcueffZZ/PTTT/jggw/wq1/9CoAWMHPOUSeEwO23347LLrsMH3/8sTlqbuLEiejRowceeeQR/Pvf/8bZZ5+N8847D2+++SZGjBiBdu3aVdOea4LBIHbv3m3W3dCvXz906dIlarquc889F3v37i23/BkzZtjmBOzfvz9uuOEGtG/fHgcPHsTcuXMxZswY5ObmYtKkSbbnejwetG7dGj/88EPldo6IiIhqRF3vXw0YMAAtW7bEW2+9ZQv6ffTRRygsLMSoUaPKfP727duRnp6O1NRUc1mzZs0wdOhQTJ8+Hf369YvoXz399NOYNWtWmeUCQNu2bW0jFJs3b477778f5513HlRVxYoVKzBv3jx8++23WLt2LTwe+6ljhw4dcOzYMRw5cgQZGRnlbo+IiIiIiIiqH4N+RHFivSCVm5uLQCCASy+9FJ988glyc3ORlpZmPt6+fXvbBSkAWLFiBVq2bInhw4ebyxITEzFhwgTcc8895rLNmzdj586deOSRR3D8+HFbGYMGDcIbb7wBVVUhy6c2e++JEycghIi4Y7w8ixYtQnFxcbnrdejQwfa/NdUWANxyyy3o3bs3Hn74YYwbNy5iDp/GjRvj2LFjFaobUV3Fu9GJqL6o6/0rSZJw/fXX46WXXkJBQYE5Yu6tt95Cy5YtMWDAgDKff/z48Qr3rX7729+WWy6AiL7S7Nmzbf+PHj0aXbp0wf/93//h3XffxejRo22PG/U6duwYg35U77FvRURERER1BYN+RHHyxRdfYMaMGdiwYUNESiW3i1JOe/fuRceOHSPmu+vUqZPt/507dwIAxo4dG7Uuubm5Fb5AFC9CiAqtf9FFF8Vluz6fD1OmTMHtt9+OTZs2RVzsEkLEdS5BIiIiqn71oX81atQozJkzB0uWLMFNN92EgoICLF++HBMnToypb1LRvlWHDh0ibpaqrKlTp+LRRx/Fp59+GhH0M+rF/hUREREREVHtwaAfURzs3r0bgwYNQrdu3fDss8+idevW8Pl8WL58OZ577jmoqmpb33lndUUYZT3zzDPo1auX6zpu867EItpFm1AoVOZ8MwCQnp4OSZJw8uTJCm3z6NGjMc3pl5KSUu5+tW7dGoA26tDp5MmT6Ny5c4XqRlRXKZIEpRovwirgBV4iqn71pX914YUXol27dnj77bdx0003YenSpSguLi43tScAnHHGGRXuWxUUFKCgoKDc9RRFKXeu56SkJJxxxhlR+1YAzHkNieoz9q2IiIiIqK5g0I8oDpYuXYrS0lIsWbIEbdq0MZevWbMm5jLatm2LH374IWJE2q5du2zrdezYEQCQmpqKwYMHl1lmRe+8bty4MXJyciKW7927t9w7xj0eDzp27Ig9e/ZUqB7nn39+peb0c/PTTz8BQMQFrGAwiF9++cWW2ouIiIhqt/rSvwKAG264Ac8//zzy8vLw1ltvoV27drjwwgvLfV63bt2waNGiiFGNZdXhT3/6U6Xm9HOTn5+PY8eOuQYH9+zZgyZNmpQbOCQiIiIiqk/y8/Oxe/du7Nq1C7t378bu3btx+eWXR2TGIKopDPoRxYExCs6afik3NxcLFiyIuYysrCysXLkSS5Yswa9+9SsAQElJCf72t7/Z1uvduzc6duyIP/3pT7jpppsi7jo/evSoefGlQYMGAOAayHPTsWNHfP755/D7/fD5fACAZcuW4ZdffokpTVS/fv2wdu3aiOXWejRq1Mj2WGXm9LPuoyE/Px9z5sxBkyZN0Lt3b9tjP/zwA0pKStC/f/9yt0NUHyio5nlnKpZpjoioUupL/wrQUnw+/fTTeO2117BixQrcddddMT2vX79+EEJg06ZNuPzyy83lZdWhMnP6lZSUIBAIoGHDhrZ1Hn/8cQghMGTIkIjnb9q0Cf369YtpP4jqOvatiKg6xZJdiejHH39Ely5daroap70vvvgCV199NXJzc81lTz75ZExZPKpbbfssEULg+PHjtT4zSH2cEopBP6I4uPLKK+Hz+TBs2DBMnDgRBQUF+Nvf/oaMjAwcOnQopjImTpyIF198ETfeeCPuuusuNG/eHIsWLUJiYiKA8B3dsizjlVdewdChQ9GjRw+MHz8eLVu2xIEDB7BmzRqkpqZi6dKlAGAGv/7v//4Po0ePhtfrxbBhw8wLRU6/+93v8O6772LIkCG44YYbsHv3bvzjH/8w734vz69+9Su88cYbER0Rox6///3vkZWVBUVRzLtfKjOn39y5c/HBBx9g2LBhaNOmDQ4dOoRXX30V+/btwxtvvGEGLA0rV65EcnIyrrjiigpvi4iIiGpGfelfAcB5552HTp064f/+7/9QWloa80WBAQMG4IwzzsCnn35qC/p17NgRjRo1wvz58/H/7L15uC1FdTb+rqru3vucc8+9F4HLJOKIBkVABKMoaohCcAzGAZwwEaNBDBqHoMaAYvyME36Jj+IQZ6NxnhIRVFB/EjVRJDhr8ikGkEnuvWfYu7ur6vfHqlVV3bvPucM5d6Tf57l377N37+7q6urqVetd612zs7OYmZnBAx7wANzlLnfZrpp+N9xwA4455hicfvrpuNe97gUAuOSSS/Cv//qvOOWUUwJhKrjxxhtx9dVX4+yzz96m4/To0aNHj9XB5s2bcdNNN61aDddrrrkGv/d7v7cqzuLVbtt//dd/4YgjjliVts3NzeG3v/3tVvs4toQf/vCHOPzww5Hn+Yr2c/PNN+OlL30pXvGKV6wqqXP11Vfjvve974r3Mz8/jxtuuGHV+m01x9tqX9Orr74a97nPfaCUWpX9ffvb38Zhhx2GAw88cMX7esELXoDPf/7zKx5vAAfS3XrrrZ01sbcHq9lv1lp88YtfxCMf+UgMBoMV72/jxo2w1mL9+vUrJnbKssTHP/5x1HUNAMjzHP/0T/+Epz3tadvdtvF4jA0bNuC1r30tXvKSl0z4NLcFH/jAB/CrX/0Kf/3Xfx3WO9uLa665Bv/2b/+G3//938dDHvKQ7doHEYWgx4suugj3vve9t2s/xhh85CMfwVVXXYVf//rXOOGEE3Duuedu177auOWWW3DNNdfgoQ996KrsbzWfWSvB6sxgPXrcznHPe94Tn/jEJ0BEePGLX4x3vOMdeM5znrPVUdwA14n56le/ij/4gz/AW9/6Vlx44YV4yEMegr/5m78BgMZk/bCHPQxXXnkl7n//++Mf//Efcc455+B973sfDjzwQLzwhS8M2x133HF4zWtegx/84Ac488wzcfrpp+Omm25asg0nn3wy3vSmN+FnP/sZzj33XFx55ZX4whe+gDve8Y5bdQ6PecxjsN9+++Ff/uVfGp+fdtppOOecc/ClL30JT3/603H66advdb904YQTTsCGDRvw7ne/G2effTbe8pa34J73vCcuu+wyPPWpT53Y/uMf/zhOO+20iej1Hj32Vihfd2ZH/VN7WQRUjx49dk/sLfaV4MlPfjI2b96Mu9/97rjf/e63Ve0vigJPfepT8fGPf7zxeZ7neP/73w+tNZ773Ofi9NNPxxVXXLFV++zC+vXr8ehHPxqXXnopzjvvPLz0pS/Fr371K/zd3/0dPve5z004cD71qU9hMBjgSU960nYfs0ePPQm9bdVjd8M3vvENvOENb1i1/b3oRS+akL7eXnz961/HG9/4xlXZF8Bt++Uvf7kq+7riiitWtd9e8IIXbFEqe2twwAEH4I//+I8nlJFWgk996lN44AMfiI985CMr3tdXv/pVvPnNb16FVjH+8i//MpRnWSm+9rWvrWrbVvNeAICzzjoLxx9/PK666qoV7+u8886DMWbljQL320UXXbQq+wJW714AmCianp5eFcIPAH7yk5/gIx/5CMqyXPG+8jzHTTfdhAsuuABr167Fl770pe0m/ADggx/8IA4++GD80R/9Ed7ylrfghBNOWNH4e/3rX48LLrgA973vffHVr351u/cDMKE+Go22K1kjxQtf+ELc7373w4YNG7Z7H1prPO5xj8M+++yDyy67bJsUV7aELMtw6KGHrtr+zj77bFx77bWrtr/tBblUL6dHjx67HS666CK88IUvxG9+8xsccsghu7o5W8RrXvMavPe978XPf/7zXR7VAABXXXUV7ne/++F73/sejj766F3dnB49dig2bdqEdevW4V13OBzTasfdfwvW4Kxbf4aNGzdi7dq1O+w4PXr06LGjsCfZV//93/+Ne93rXvi3f/s3nHTSSbu6OQCAY445Bg972MPwlre8ZVc3pUePHYretuqxvZCx01/THrsKv/zlL/HDH/4Q119/PU499dRVdWr32DrMz8/jXe96F6699lr87ne/wyte8YpVy0jssWsxNzeH//f//h/uc5/7rGg/ZVnikksuwUUXXRRIujVr1uAd73hHZ1LDclhYWMDznvc8/O53v0NRFBgOh/jTP/3ThlrI3oCNGzdi48aNjZrvtydsrX3Rk349euxGWFxcnKivcswxx8AYg5/97Ge7sGVbj7m5Odz1rnfFW97ylm1+QO0IPOUpT4G1diL7sEePvRG9Y6pHjx49JrE32FfPe97z8Itf/AKXXnrprm4KvvSlL+FP/uRP8N///d8ritjt0WNPQG9b9dhe9KRfjx49evTYGiwuLuLMM8/ENddcg7m5ufDv9NNPxz/+4z9O1BrvcfvG1toXfU2/Hj12I5x22mm4053uhKOPPhobN27Ehz70IfzkJz/Bhz/84V3dtK3GmjVrcOONN+7qZgR89KMf3dVN6NFjp0MT/9uR++/Ro0ePPQV7g3319re/fVc3IeCUU07B3Nzcrm5Gjx47Fb1t1aNHjx49evTYEZiamsLHPvaxic9XQ460x+0XPenXo8duhJNPPhnvfve78eEPfxjGGBxxxBH46Ec/iic/+cm7umk9evTo0aNHjx57JHr7qkePHj169OjRo0ePHnsSiqLY1U3osQejJ/169NiNcO655+Lcc8/d1c3o0aPHHg5NBE07LmR8R+67R48ePVYbvX3Vo0ePlaK3rXr06NGjR48ePXrsKVC7ugE9evTo0aNHjx49evTo0aNHjx49evTo0aNHjx49evRYGfbITD9rLa677jrMzs6C+oi4Hj169OixG8M5h82bN+Pggw+GUjsn1qavO9NjW9HbVj169OjRY09Bb1v16NGjR48ePXr06LE09kjS77rrrsOhhx66q5vRo0ePHj16bDWuvfZa3PGOd9zVzejRoxO9bdWjR48ePfY09LZVjx49evTo0aNHjx6T2CNJv9nZWQDAL37+8/B+T4IDsLMD+bb3mM6/GgcY62AcR1byK2Cda2yvlskOkK8UcVuICESsMasVf9n+tezduvhb2S59D2fDP3K2+1xIAeQjQdP3AlvzV/514nf+n3HNtrXb3WhX2EmrTUu0cQJpG/1717GZ7fiw3XcAR5AqQuwjZ+N+ScE6/l1l3cT11YrrWOSqo8+rcWijK6bCPmm0GXr+lsZ5O53DFVPQm34LtzgHynI4awBr4eoKqCvYhc1wowWYudswvvEWjDfOY/76m1HNjzC6bQzSCqQIaw5ci2L9DGbvdCAGGw6AXr8/9IGHwRVrYKb3gSumYChDaR2sjeOWz4fHa6EJhGZ0rXHyz8H4DjQ2jmFNcfwCTZ3k5TJkVPKV9feSBWCtg/PHsHJ/AbDJ1VYgPi6A3LdZKUKufPvhr4WM3+SmcaQAXcA4hL6oLV9nAwdjfVtc86fSZiLfV4rbMMgUMkUoFED1CDAGZMaArfn41m1xjJNzfM85CzIl4Bx/BsBZA1IajghQGk5lPKZUxp+RanZmcp+49j0exreOfyuK+yTFfd1xX4fdp33BDWyen39PzsZzd5bPx9XYvGkT7nr07+/U51Vfd6bHtmJPsa12hQ212pB5htq2i3VhbnNhzlOobHxeAPzMkWdN2+YI81TrWOm8vtw8l6I951Fd8TzvTJwDSfH8qjK4fBDaC3CbBe0ngpyD8pkzJM8PY2I7s5ztLsrCPC3nm6XPPGOac3LbvmtDa95/8hyok+efHMNY7nNjnX9my/OQbaJCETS8DeQsyCZtV/6ZY+vwLCBT8vOyHodr7lSyFJPnlc4BrWEH6+C0Ds+/sFk9Ao3n4jOMNFwxhUWngz1iHPdroQlqPA+yFahaaNoGKoMbroXNhtg0tpirDEa1w80LFSprURsHpQiZIqwdaExlGjOFwkCx7ZRrFfrKepvJ+v3nWqEgCyoXQfVisDedHsDlA4yRo7IOc5VFbR1K41B5Azvzx8w1YUr710xBuzrsT657sBuCgaaXvu7W8TWyNahahBsvwG66FXbT72AXNqPevAmmrGEWRyCfOZbPTkMPCuj1+0Ot3QdqzXpgdl+4fAp2+g6o8ymMaovbxgZj43DTfIW5ssZcabBpXKO2FnMjg7K2wabUivtuulAY5hrrBhkKrbDvdIGZQmG2yDDMCYW3UZUiKPA9JLa5jMdJO537TytCBravqBoBpgTVVXPsBZso4/GqNZwuAMpgiyGgMtRQ4T6Qe7YgHvNUL4LG8yBT8diWsa8LPx8UPL5VwWO6ZXultrbzNnHX3BTGvbwSn+fc3ByOuOc9etuqR48ePXr06NGjR48O7JGknzg6ZmdnsXbt2l3cmp3rgNrZzq7UWSQOF1m0po5yQRfhJUSgEHzi5BHijoDo/G8RBQ0HvrwuQ6B1EX4TJEC6v3QfQvqZurlfUoBSzcVx0i9LnntKrC3Tzs5zDBuqiWNtzYJYUffnkWSpJ9uVEiQ6kixyTHFOUjXyv1EgR4AjIMvg8im4YfN+zBevw+ZvfQ7OWDhrYcsaa499AOyxj0G2eCPqG28EDWcAa+DKEVxVAdbALmyGWVjA+LY5lLduAt02B3P9HMx8iermRVjvGKLfjVGv3YR8boRs4zwGB96GIdVQa/eFHgDW1nCDNbBT6+BUhpGJY1d70q5QcTzKmK49QWhajhUlRLXfXntHTEr0tfu+i5AFogM3vZdMclw+ZnSaisNF3hMRE4aelNON+6rZCOMcE3wWKOFgyKGCQwULa4EKFiAguCsTcjNXCoNMsTNJe0dcFh1yhVrLDtt6BPhxRdZMEvHOAjYhw8TB6yxgM6CueRxYw2NLaUBpkNbReUpqgoQHEmdfaH9yDyn/O+9oYsdTxmPcE6HNPp+8YOG6Y4n5C5g8X2v9OUYncC+Z2GN3xp5sW+1KInB7jt2eZdqBNAFbIP3agVATQQnpPImmLRcOkcxjjViK0NhkbjP1xLzG86sGdAaXDcOcOnHOybNMptk08CuDnzPrSB5AZ0xMqSy0XdqmZXvfJoCDSUJgSPocaBOCYtPJcwGxzV2kX52QftLuQnvSz9WgUnlizxNbREBWxGMa32emZHKzGof3jX5M7UwiuGEBlw3giulGX+pN83DX/gyUF6AsB83uA6sUZtce5O10fqblipC5GkpXoLKGElu3HAPQINIwyOHyAYqZdXALNRbmK9yyeR4LlcWmUYUiIxRaYR001rgMBw0GyAcZKCfkmULuiSbul0hEZ2IXDBTI+L4QYjgbYqFmoo9qG+yT2g9MIiD3xOJUppApJKRfBqoLJrPqKl5ipfm6Cjnacf3JWaAuQdaASgWnHcw4hy00bK0xJoIlQq0icTjQGlmmkRUaemoINT0E1q6FG8zArNkXi07DVhaDzKAqDaguQWSglUGmatjaIiMDW1uQdRh4Qm52mGEq15jONfaZyjGdK+w3XWA615jK+bxzT34uNx7FZlFE0IptwULzNVH1CDAZqNYgk/txODVxPzT6ThdwSsPly5B+PuCLqiGo9Putx8kAbe6D95/FY7QCrtKA0qVsMBkX0gYhN9Nx16NHjx49evTo0aNHj4g9kvTbnbCzHU07m/BrO4KEUEgjTlOkmU/p4r/TISUOpOSz4DQB/KKQkvd+Ad+q2+CShX0ngZZ+33GeaZ82MuCWgrP8G1LdtRe2kuybaG/yvotsBZpR812L3NR5N+H8S8g+iXQOUfLO8qI92X/MqFKRbLUJoQPvZBHoYqJ/HSlkwwL1iJ14qshQXf8/yL//RdSbboUdL4LGIyZ66ipk/NnRCNZYkFLIZ6ZAWsEZi3pUYrjPFEzJDsdsmCGfyqDyDM4Y1PMjmI23cDvzAnqdgbWG+zQboMinYYhQWxeGlkkciUKyVS1SOyXVgEgY6tYYb5x7+r51o9jweXpc/o3z700H8ciOHo6G58/jfsUpoonatwgTe9bCOGBcGxgHVMaG8zNppLjfT644unyo+Vi5JhDx9a5at4cCUGRDUAYeV7bma2AqwFk4W4co8BBlb2uQqeAME76whjM9pW/8SQj5B8VEICkNZInTSGXhfkR4TRzSlh2wlHFWB0kkeRJlLv0asniTy0UEOPDF0UQgOJ/tQWGO5GzI5jxDCoBTALnJIIOdAEW0bOb1auy/R48dia45dalRtytH43LHXq7Nk0FTiueRFvEnc4wFQTXm/PhdaIdtBS1JNn9iY6T2BBEFkovQJLyWRRKAJbaC2DOKus/bEYXMeW6Hf+gpAqxDppvtbNgucbNwjLbBQR1kQXgY2uZnKTHUFcAW9knc5/EaxOMr+AxFIfTkupECqQzOtu09AgVij+IzQ65ZO6NdZ0vbos4CdeWz2nL+rC6hxnOgwRrfNRTaB/+8RTmCG83DLs7zFlpDg5+Yw6l1bNs7h43jGrfMlbjhtkUO+FGE/dcOMTvMUBmL/aoC+0zlqAuHTBOGmok5rWIgldgBcm6cuc8EbkruCJHjiH/PLfdtlzWF2F/h4YpArjaue0dQUKPb/FhypgapjAnTLA/kKduSretmrf8nQUmI411lqCuL0jiMa4exEJjOhQBJGfPSj1oRBpkKhN+agjP9pv3fw4z7k4k7Jtji6RHbKcrbic5B+ztN1meZJ2E5E7Ju2OxhHKIVMCX3RELIyT+71BAkxbaZz+pr9Lcn66GLcN0D0c6d6q+9CvOFBUHDLUngtYOuNHnifyejt6169OjRo0ePHj167CnoSb8VYm82zan1PvHNQCsCOXaGpxlIQHNR1iD6TB0zYNokX+qgSiQnKSw8HQDDi0eXOPeVihI18pm8JtHtwNJZiROR9W04C/gmLZdl19g+fcUk2ddAq+3taPw0ur/R/paDa2KhvFTmkcgqCvEislT1KFwTZwxLsGodyT+/zwYxqPPg/HDiuDIlO5mU4kjsRGqJlEJ5622oNn4f+ewalvYEEz2u5iy/cH5aQQ8LFAD0sAApBTMqUcyOYKsazliQVlB5Bp1n3FZr4cYj2GweNL+JnTlKg6oBAEDnQ9/elHiLZI8Q2W3yJ83uS7O+uuRmgTjW0jGXXsNI9jWljNLfsVMMIfq9spYdSsZhoTIwjh1MS41rlTTI+qhw4xwq79DqcnKKZJkidkpp6yP4awBQsJlk+kaPa0p4akUgyqC1ApQ/Y+vve/KhAinhV5VwdRVJP2MaYwBKs49Pe+Iv887NLAdlBX+eOLVdVyawykBQcOl9rlJpKReIVdPVKU6ctfwH+b81HDvhKTrKU+IvZny0Z8gePW6/WEmg1J5oby1H+LVnG5l+dJv0QnzGGBef9Q0bK92+pZqQBkaFz0QSMWTfRbtN5rOJayVBQE7J7LfsebfJw9D2JGBFAl2sHG8rAiRS241a9lsjyy8hGfjxY5tBY0mA1ZaPST7bP56ftEFsq4Zcp63ZcOg6nxYhFeQpUxJV1Ba67GMAMDWcNbEdxJmGZE04J5ED52AttvlcOeJ/o3nAWjilQMUQKhvwc9mPrcXSYONCiVvmYtZlWVusm85ReGbOOAetBsgtAFgMoRr9k2YxSoBYUJGQZsv2PqBGJyOOiKAQZdTbfehI8dpEiD/Vsv87lADIsS0gNgnlBSgv2LbIciitYLUKgWYAJkjAtA3G22jG22gxUKsZlAXEMgaDTKHIWEEh/TddaM4e1YRMIWZQSj+Sggp9R3CeIEvtVO0z4FLZ4K4M4tAn7TWTZP0lmXhdCPcqKSb9bA0nHoWufaWEX9iJvwaI80Ua2LDc2jL0y3Jrqx49evTo0aNHjx49bufYa0m/lTiW9mTssPNuZbeF47RW42HBn8j5hWy+lHBKySi/f94mccoDDRkYXhwqNCgwiybxFxb5akk5zEZk/DJZhxMwdtIBtjVop10B6Mrwa5N9QMzA6sqobLdQOXbDtbMPU6I1kKxeQotMBSrngHLMdfTGi+wQSjKuyGdZ8UF04zMaTIUMLKUL2DX7w33tg9j405+jmJ2GrWpU8yP+jVYYrF+DbFhAza6HmlkLKoa8L+mXlPABIgHko6zDZ9Y2MsRC25SCGkx5UsgTRFUZag6S0tD5ENrLOjpEuSTnYi3Drsy3lPBLPwOaGQjAJGErEkx8PH8aidNTMvmEfGKSj98vlAaVtdhcGixUBuPaYuOoxqg22DyqUdYWZc31YlLolsc1/VsrNRF1rhVhKtfINSH313uQsVMrTy+Jb6dyBGOBClyfBj4zURPglIJWCsgAqrkDnK0bjlJXlbCjec7wHI/g6rJ5nT1I6Zj1lxU83vICToi/YhgkQGWOSucBfqUQxe50zpkGUEkk/iQBm17fNtJpj9LXtnNtFzqkSBNoq1J2tnP/fTR6j+1AP2om0ZbaDA71ZB4RZ396T2tCeJ4LqG1LAQBF9QSHKGUcvqZoP0gbjEtsJclAhG+b7Nu1nte+rSQqCK35LyUJ02AsgoP2GYdaiEUbg7/S/XZJyDcP4smGlFwiH/SR9IPItAvBKc/w9jNAE3cQ+Wd0yPSDlxa0vsahKaNaApXcZ9rLo6dykxIEI3/L6RkD6Jwzo7wkYniW+f1kt/w/qPHmECyjZtfzMzDjZ5odzKAargs1ejNF8XlkPSEpB5RnbV3ClaNQ963QU8i9fQAw+TeqDJx1KGuLzaMaxjrMjYfYZzrHQmUwW2TYf6ZAnTlM5wqZ0nHcKACKpT1TaW4FJrVgWYJUAVCtwJtmIJWDdcTjQ2ds9znr31teF4QsSgkCouZ48OOOapaatKbmMbVmPV+OLAeshR5xFqQt62a2prUQCfIlyXzrvCoE21GFdsHOAtgOG2ZsZ60bZFgzyDBbaKwbZl7KlEm/YaZY7tbUcW0CgBTfi9pnzjXgLOAMb5/IyQZFDzs57hry52IfZZy5Z6BCnb1m0BpfC6UyII99u2x5hXY7Q4fVfl2pwrpSCEnyQyG958O84oNIU3nXnYXeturRo0eP3R+Li4uYmpra1c3osQdhYWEB09PTW95wD0RVVfjhD3+Io48+elc3pccuwDalH7zuda/Dcccdh9nZWWzYsAGPf/zj8dOf/rSxzcMe9jCO2Ez+Pfe5z21s8+tf/xqPetSjMD09jQ0bNuAlL3kJ6rrGamJ3Npm3NrJ4e7DUea/omOkCTSRZEB0l6T+CkEySzVMHp1Qn4ecXb7A21AMLkdOyry05zpfSntkCUif9RFQsOPo6/LNm8p+pWC4pyBm2/qXnspVtbMt1hZheav5TiP/S7SbWoV195883SHzWPvp7cR5utMD/FufDPzu/CXbzbf67ebhyBDuaj9vNb2ZJzVuuRXbjzzC67n+xeOPvsHjTbSg3LQAAdJEhGxbI161lwm8wBTU9CzW7Hnr/Q5AdeCfogw5DfvBdGv+yg+6C7IA7ITvozsgPuRvyQw9HftjvIT/sXsjvfC8Ud7038jv/HrJD7obsoDsjO+BO0PseCL3PBiYVhzNAXkTS2LkgdRXGq19gCwnXjs62LQdpoyvRJGnR8XfYBxDIJeNEujMSfhYxGy+QgF4mqjKcoVcbzu6bL2tsHtXYuFDi1rkxbpnjiPwbN41x/W0jXH/bCL+5dTG8l89v3DTGxoUKt86NsXGhxNyowmJpsFAyaVgay1HrzgViUxNnDeaa4n3f7odAVrNzyPrzEmdbQyo2DMOEvBXCry5Zsiz55+rSZ4Haxm9ke1eX/HdrrE/U/gtOrSw4fCfI9I737TqOsbZMlHtV4ohKAwhEStc5TFL0PXpE7Em21WpiR9pC24LdpR1ARwZ2YnO17a7UfpF/wd5o2DKtudHF55A83zg4RZ6BfjvEuTzIYCfzuCNqZVGJzVM35kNpxzb5yTuCpRrn0PVZIlUIaWtHJtNykp7tJkZJbz/noyUzKLaUJ1moHoHqMahaBJWLoGoMqphUaxB+4YCezNU6ElWpzCLANejKBZApmfDzwU4S9ILBFNdUzgaB1A11FVNikVTMqAeaWfWAJ5fBtXp9NloKkaosa4uF0mC+NJgrDTaXNRZrg8oCtY02jEh5yr+JDEdPjnle1f+L89vE9U36q53RCZ0tT/h5qUnoAi4rmNzKh3DZABhMgYYzUDNroaZmQMMZZMMCelhA5SwfP9kYG/pL2q2J7SRFTGjmmkIQVeHlPIeZxjBTmM41BpnCUPN7qV/I28cMv1CvUv6Z5DNTsrJGPfL19fgffEBfWIelhF9jcCcKCVJ3T+yjkMUYA9dknrBAGF8GCk76teMfOq57Vz9KYKlkp8r79O/2nBfmuR49evTo0SPBL3/5S1x88cW7uhk99gA45/C1r30NT3ziE/GlL31pVzdnh+C6667Dwx/+cMzPz+/qpvTYRdimTL8rrrgCZ599No477jjUdY2Xv/zleOQjH4kf/ehHmJmZCdudddZZePWrXx3+ThlzYwwe9ahH4cADD8S3vvUtXH/99XjGM56BPM/xd3/3d6twStuPnZUduCsIyeWOuex5t0m3JPqc2tv512UlPLsIP2Cilp+D5ZWlThw2wKQkjZuMBpe/29lXaZsJCAvikAUnDpClsnSW+DxkF6V9JK/k5f1IhazECSnSVtvSqFaWEYzn0eU0IzcpMxkITaDpAGwQrRydLvVd7MgTfUmmH2U5yy9WJdTUTJBVBLwDZnGeSZeFzZi7+nuYv+EWbmfBNfZIKegiw/AOa6HXzCI75G7cprqCWrcvaGoNzPpDgwNmwlGZECciE+T88QGAah/FXI+jQ60uw3hy4vzJihhtL1HPSXRxVy23NoTU0kQNSbQlt0+y/KLzlJ1xQvLJMdPMTt7eheNVXjaqsg6j2mDkHW5zowq3LTBpNzeqsVgZmNrC+Iw/aSMR1+Yj/2+Ya3ZAFRpThcYgM1gzzGAsf87nqKLjSrFjiqP/pZ5hlNySdpJjZ5e034CgFNc2YiemYVmttgNIiDxr4KoqfgZw9qYxIG3hFEfXh1cgyH6GOSxLMitIsfysry3jdB6cqSbpc86qARRYvi0l+yDfJYRfKu3aiDy3iXxxO8jB7nziRWlqSLyu+v77aPRVw+5qW3XZBltrJ23NdtsyglbDPltqH8udY8g+WcHxt+a3ad1X66KMHX9pg23Ttrukblf62SSU36934ifPISKerzUIljjjT+r8dZ2H9gQBAUBdJl/aSTUE+SqxeSSLUNqTtjA49bvQIuokk8wlWYhhOwnySOQJKflejp1eF7Gd0jal5APC9xTeN2wUUzGxN5qPx1Kas9B17omRPMp2SltVBii+flwDLYsEDACqx1AmqhWgGADlKCgbuOEauHwKdnZ/OF2AwNe4tgjP8q6FnrMmKDcgy0N/kQMKT0RNFRmmCs1En38mG+uwUBrohRLGOgwzDeucJ64UcgXUlnug8KRSIztT1gCSDerJMSVskopBVsHGcGBJdv8KlYGo5n5K7fK2bZEQfpK1BgBayCjwviwpKJVBTc2w3VoMebvRPMyI+13lSWZdEpQYyCnF/WZ8X2iKz1++57xNpVUg+9YNYx2/QUYoFP/LFDFZJ/atSbLZ5BzFxlhqbWVjvwT52HT91EX4qQwuGwbJUql13Ji/xG6SXSU3PEux8gfpPdOuUzpBfDvbIGw1NddG6XbBzkqVWXYyetuqR48ePXZfzM3N4fGPfzye97zn7eqm7FQsLi6CiDAcDnd1U/YIbNy4ER/4wAfw9re/HT/+8Y/xpCc9Caeddtqubtaq4/LLL8eTn/xkFEWBBz7wgbu6ObsEt9xyCy666CLkeY6iKDAYDDAYDDA9PY0nPvGJDV/L3optIv3a7Pf73vc+bNiwAf/5n/+JE088MXw+PT2NAw88sHMfX/7yl/GjH/0Il112GQ444AAcffTReM1rXoOXvexlOP/881EUxcRvxuMxxuNx+HvTpk3b0uytxu3VzN7ieafE2lILrA6yD0Cj/tsEOScfp4RfCtVy3qSRvf77sLt2Vg+6pZqADsdSSmy237fPr6sPEiI01BRL+otUlkiSLiN144koIRG17BvwXsfuCPFJj6WQmHUkIEQKKUT+mvBqpZZeO+ob4hhSoeYJiRPLR4q70TxcXaGaX4Staug8gzUWusgxWL+G5Tz3OwhqzXrQYAhkOfTsPjBr9oP1jhdbTAG6wNg4GKdhnUJt2ZmgVRGIltrLXdall2OEg6YBFAFZPh2iowtN7IwxZXR8pHKx0neJQ1ABMjDgEB2e4hRNYayLzhPnwvcxK5PCq/Z7BLheTfAqEmC814RJJ3F0UdjekkOuFYaaG1MZhzpjJ93skM9DJD0l+t56R4RzDs6fhIODcwSy7Kgo/TZFphpSWlpxdL9Epw8yhaGvNSOfSb0YRQi1doQME1jvNFapHJdq3Z+komynSMUCQJ57qa6ItCakyGo5a0DWXz9rWEJUaZDTwRkVrrFE+EuWH6kwMRBF0tyCzynFUoSfVjRRV0aymkMGbZqFswtIvx57DnZX22pbCbIt/XYlWI39be0+qOP99pKfyx23M9OsJR29HAnWtlWCvdUOoPKQ7BzJxuLfAFAUJDYBfn5QI7iF5z0LmfcSsi0Edvmay3XZtPUAkM4a/aUQSTZg0sntiCXbgw3YsunkVZMPKnE2JlMvlyGYPPO3JrvTIbnOKVElf5vSqz6UoU6tWdgcsukkKIWG06DhNOCGfD7aZ6YpBRgK+wv9ZkoQ/P3qnx0uH7CEtR0mta4V7HAWLhsGYlURk0+5chgZh9I66KxgMrFCqOmHuuK+UhpUDDkzKyvgPL+UK8K0DwwCgEVFWDPMQi06kau0okSQkMnGsiUjT71mLbbm9VHEgUSWXIMQDpeMEOyMiWupwP3UCkxsKAuIHRAyVh0MmIgqiulAwlqlQFUBva6EzQsOQspyIJv3u9VeXtzL3SdZrVrFTD1Ao8pdyPrLFYXjso0abas1RcZkn2ayL9dM+GnYYJ+3s2VDZqiMQcn+kyAjGUutvuK1hWSSShBlYpN5+4jnB76OInefql/EezV+Jh+JbUgktnNCkEs7WoRkPJ/J+70BmeNSRZgePXr06NEjgbUWz3jGM3DNNdfgoIMO2tXN2Sm4/vrr8ba3vQ2/+MUv8M///M+7ujl7BH73u9/hjDPOCOvvDRs24G1ve9subtXqwjmHN77xjTjvvPNgjMGLXvQiqC2oqOyNcM7hhhtuwOWXX45vfvOb4fMHPOABuPjii28XhB+wwpp+GzduBADc4Q53aHz+4Q9/GB/60Idw4IEH4jGPeQz+5m/+JkSkX3nllTjyyCNxwAEHhO1PPvlkPO95z8MPf/hDHHPMMRPHed3rXocLLrhgJU3tsb1wSR2QZaIvt4rwkwWej+rsqv8AoEnuhWy55LN2Qfh2Zh26nWmdSCVv2kRg8tqQGU1/I4t/f/yUGAyLV2f9Ypt/SwrN9ncdA61t5Pv250ssjhsR6AkJIZHDIhXEMommdU4aUCbKhCkF8oRfStBIXRlXlTCjEtZYkNbI8hyqyDA8YD/ofTYg2/8Q0Jp1TN6O5mGH61CvO6TR5MoCpXUovfOo9J4fnUgoVr72Wm2dr3lnY+0U71zJFGGYsWOl0ANo3XQ4AUtkS3qSCoE085Kcftt2fcjgMPUfKuIsCQDQ7WMQE33kX5VmJ5BKj2G90wvsWNVwsMQkZK4JxrGjqPINGWYWGDLpl0puoQSssjC+/6zfXvn/5DLLbwShxp+vQ5Mrhdxn9w0zfi+En8hYCSHWBecQ+gPwTtzQH0Laax5Tvh6ffBb6NUWXkWLj/RjIv/QYpELWRCOyvb1r4gw/If6a38VthPAjSgi/VnR920nXkA3e2dAKtCONO9raSbbHtmJ3sa2WIrbaGVK7K1YjO7C9P/h9rnS/Dmhk93Vv1MyMCVunhJ98lGb9pJ8jIbqcEDMu1JbVip8HSlOYt1XSNos4nyvnoi3W0dYwJSRzgyOKdo+3hZrtE2KHtqhIASCplZsQkCoDYesDK5ZSgJDjyP4bz7eO7CQJqhKbyo4XWS3BS1JTlgNK83NDaQ4A89fI6cLbhuAOFvvYWhDVcaz5YLBQc83Z0INOabh8mrPYhQR0vq4uEeqK5SeRF+EZ7Ixh2eyqBOUFKCugBlMwPhNeCJ5csyTltM/0A+CVAVglQEi/tP6xvDeS+S9BU47vxFCv0m8fYqDS4Bof8OQcv0+vQeN6KAVYeCUNzWoQaSYbRQJJ1gShhq/fl9IKOhvGEUkKNDULpTTXt/Y2r6tLvn55Hm3hZBxoypisI3gblO2lylKwL7j95AO5OJBqmHFmpMh6Zsqfb7vcwVJBh0ndPhL5T6C7L4JkrI6fJ98zAezrXCL2VXs9ZTrmLLl2jghELmRwynwRanV2BVumZLeN5B8Q1zqNOqXJOaavOxW9bdWjR489EDfeeCM2bNiwq5uxQ3HhhRfi05/+NADs9aTf9773PbzlLW/Bxz72MWRZhmuuuaavCbuVUEohyyIN8s53vhP77bffVv/+xhtvxPT0NNasWbMjmrcq+Pa3v43PfvazMD5R4ylPecqq7l8CRHdH/Pa3v8Vll12GSy+9FJdeeimuu+467L///gCAtWvX4nWvex3+/M//HDpJLEgxGo0wGo2wfv367W7Dtddei4MOOqgxznYltrsV1lqce+65OOGEE3Cf+9wnfH7GGWfgsMMOw8EHH4yrr74aL3vZy/DTn/4Un/rUpwAAN9xwQ8MpBSD8fcMNN3Qe67zzzsOLXvSi8PemTZtw6KGHAlh9p06PDlg7IbPZFWUeEIi91pXxC3OojAknlYGsZdLDJQtUqc0hTg6RZ0yz/jBJxEhT22hIC7Xa6XQWHWpdpKV8lzrTuueHCMnuEeeOX9SS8xKDFtyfaZRrGv0qzXPdUbshYjZ9bR0fQCQggkPAy2A6C6pHsZafiVKKlPvocqUi4ecdH5TngaiRjCuRQcpnhihm2fmsihxq7b7Qv/84uGwAk5C05CxcNsR8ZaMjxkcVl8b5CGOWpqqMjfXsrMPcuMbIWCxWLHFZCTGogOlcY02RYTpXWDfIQyS1yFHmiqAVgmOG/44ZFdp7HlPHIkBBopPHAILUkdTcA//M18DjDLAJ2UcAmfLXSlMjUyF1RDlQqKdnHUFbyZRjwo9loAwq67Cm0BgZiztM5Zgrh5gb17hx0wiLpQmSn1KjD2DyT3lSb6pgGc81wyxE8q+fzjFVaKwbZpjyclPrBlnM9POEqlbsiNMKjQy/rmd+GJmpw0o+IgXKMs4e9ds6Y0BeVtbZjppDQghKpL3POpUxSko36/goHR2lOg/vpe/bDvc08yT+zR+Eek7ptV0mw4/SiHtrtyEKocftHbuLbQVMkiHy2Z5ic612O1dzf1InL+yb4pwpagSUStiRagYMJTZYyPJp1YwLjn6/PQfNxAAa5xwy8HNBWYCU42xnCXZBfO4pL/9JYnSHZ7qLxFfSVjkmhUCLDFCWJZ+JJgJG2n+7NimBlJDjhjkHOEXQEtiR2mnSN7KP1rFSmVA5pVCLdikEwsH62mqVr6U2BsaLcKMF2IVNcGOfTZfIoUMp6LxgQiY5r1DrTmoCetUFgs/ANaW/9lEVoV6/T6OPHSnMVxZlabBYu6CKkCvAOoWBBoatPnCW5depGMLmU3DFNMaWUBqL2nKm2kyhsc7bBsY6rBnmQREg8zaU2AjTucZUpkMNNiCOG878ikRteg3S7k7fS4ZfsN8IyPxzt5E578d347KJzamLkHkodpbIngIOxkmA2BDZMAPVAz62LlhWf2ETaHqWryXAhN/UDGdGyvg3NfKiQK3YTtPWQZH2x9O+lrPYqnz+YksVSbBa4ftTZD3b6hyN7DiAx57U9SvHHIDn2yl9QD6LU+x6DrZUvNYRm9QHVQpBarwdXhkXMoKl/akVJyoYSs6LYg0+R6xYoX2gm1bUkggVIlPWWMaPZ58tLHGU6XhoZzED3eufHj169NhLcMkll2DDhg24xz3usWJy4Stf+Qq+973v4SUvecmqtO3HP/4x7nSnO+1WWTLf/OY38d73vjf8vbuSfr/+9a9xpzvdaUX7+NWvfoVzzjkH3/rWtwAA/+f//B/c9a53XY3m7fZ43/veh0c+8pE4+OCDt+v3P//5z/HYxz4Wi4uL+OQnP4nPfvazeNzjHrdN+5idncVJJ52Ee93rXjjnnHM6A2u3B4uLi5iamlqVfR199NHYvHkzjjrqKMzNzeH+97//quxX8PznPx93utOd8MxnPnNJFaKdhdFohG9+85v48pe/jEsvvRRXXXUVpqen8dCHPhQveclL8IhHPAJzc3N485vfjIsuumiLc4NIoR566KF40pOehMc+9rFYu3btNrXJGINjjz0W55xzDp7xjGd0Ki7tTGw36Xf22WfjmmuuaaRJAsBznvOc8P7II4/EQQcdhJNOOgm//OUvcbe73W27jiW6q13YHifM1hCFuxOZ2NWWnd6+LpIpyd5z8pVF0+nTgo+fjt9rL82ExIERMnOyWMctzfpD02E24bxPJGWAZj9NyMykNUGoFVXfqEvo27fUgrNNgDbIw6SPyEsSSSZgur9WvZA2GnU52sfrakvaTrk4XRkC2tdE81HpUArUzmZEJPigdJBWpGIIKIXM/5YdDDnUmvWww9nocASP2dqxTKXUEwqHIHaAOE94Gcuk3shYLFQG49rid4sVRrXFplGFBU9qASxTuX46R2UcxrWGIkJlFYDMS1L6boUncjTBODRqIUobBFqcgpSSdBzRYmzMCpNzUJQQYLKvxBEarjeENIrR1cYRjJdSq3xtJWN5H5KNYXxbM5WhNBaagGnDclFSI0YrwuZRjSJTWCy5zxZL03A4AQhR+rPDLNTsWTPIOILfE37TucYwU14mjELNGsm81B1kX4MwW2Zy4mhwvr8pK+JcZg2QFyFjL5DRCYLUp4pkn8jOohh4gq8ItfxkDpG6Ps73ZVpvsbH/cC6xZmFwaiGZP7oy+ZbK6lNq6ZTIHQhSBNI77rjL5+X02F7sLrZVG0td7Z1pi6zkWDsi629r9rfUds5n3Ml3Opk0wxwjryHTBTEAq7Gz1nzkA3ZC0JI49J0NWX4hK8sCUA7aieT08pGbYSrrINiWDASDjm1EUwIQACbOqCOojHymWpCO9t3BafMsI02k0Kif2rZFnU1IyFYXIhJHFh2xXWnwitiGIq1oouqBq6oooSkqCnXF2XXGTNpspOBszZlDpor9mvaByuDyQbSJdXPhaB2TuaKWILWBG7KpXuaaBkOgrkBZARrOgAZDuGwAlw1Q+qCrylpkmrP+108XGHtFgWlvO6T/BkmNOq2ATHN2W/uRJ2MqJVjTwC8hXC1cJJQC8cdE2QQBtgTSDD+TZvi5OO6tA9cRVHw/aJ0B2mdg5hY0mOZgpKyAS4LhhEQL6xTEQKA8SJ6C7ydvR8rn8XyY9BPCL1cUM13b2XDtMegDoVLCz47mQ/Zm6O+8gFOW6zUaw/Ukwz3bEbkoa6vk2qT9JjawnF84H8Q5QynAC/9zjVDnGvMagKiKsFRmclhMti/qMmuenYzetuqxu+Fb3/oW7nznO2+3E7yNBz/4wXjnO9+JI444YsX7+uxnP4vPfvaz+Kd/+qdVaBlwwgkn4D3veQ/uda97rXhfn/nMZ/D5z38e73nPe1a8r02bNuETn/gE/vRP/3TF+xK8+tWvxpVXXonHP/7xeP3rX4973OMe272va665BkcdddSqte3Zz342fvzjH+Pv/u7vcNZZZy2ZMbM1+MY3voH3v//9uPDCC1dEHjz4wQ/GZz7zGbzqVa/Cb3/721UhIr72ta/hQx/6EF73utetOEtycXERr3/96/HJT34S3/72txt12bcVhx12GP7kT/4E11xzDe55z3viL//yL1fUtk984hO49NJLcfHFF69oPwDwP//zP/jRj36EBz3oQdhnn31WvD8AeOADH4gsy3D00UfjRz/6EZ73vOfh7LPPxste9rKQwbW1+MxnPoMDDjgAH//4x7HffvvhD//wD7e5PVNTU3jXu96FRzziEfirv/qrbf59G5dccgle+tKX4qSTTsKb3/zmFe8PAK677jpMT0/jE5/4BK699toVZ+V9+MMfxtlnn4399tsP++23H2699Vb8/Oc/xyte8Qo8+tGPxrOf/WyccsopW5XZVtc1/uIv/gJ/9Vd/hXve854rahcAvPe978XZZ5+N+93vfjjllFPw5je/GQ960IMaPo7xeIyPfexjW7W/ww8/HP/zP/+Dq666Cp///OcxGAxw6qmn4vTTT8dpp522VfPdne98ZzzpSU/Cd7/73VV9Lmwvtov0e/7zn48vfOEL+PrXv4473vGOy277gAc8AADwi1/8Ane7291w4IEH4jvf+U5jm9/+9rcAsNNY4q0Z8ruTyd3Vlp3aPpU4J9Bc8EkkN5z1jgugoSPYQWQ5sOMFQKyx5vcvGTqc5afi9yI746LTXki/VDqHiCWoJpz0QJNcTM8nEI7R6UPwSYX+d65jcRp2I062NNtHztkvbpnoYMkmclkkALWPUk4XsFpkQjn6u5052Sln4/xSu72NkKfy3jvdoDKQduwQGM4ANUs9SS2arvp+IbMKTMiQNXCeCKQsBw2moKZngWIAlw1hW4ty64C50k5cL4nknsoIG8eE+cqisgTjHDaOKtyyUGFzWeM3ty5gblTjlrkykFkSdb5h7QAHrp/CvjMFFHHmn3HA0DukAIXcoZHR4Dyp1yZy5DqH6bxB0AGGCLV1IET5T3ZGRlIs1HGTLDBx7rZIY0eEjBQyncGpDEWehVoqxgJOA5lByAK0cDBWYXagYSywny2wUBosVAa3TlUY1xYbRzVGtQnEH0uA+nNSLNmlpV5PznJdswWTfus8+Sekn0Sji7SnEIfb4+8IZJ+v7wQANGAnGmzNfehcY/xJxl+Q05JsP93O6osEH7KCHXfeUeoU921l47whGZspKBmTE0Rf4kxvSxkv5bwKYyd97YG3ve1teMMb3oAbbrgBRx11FP7hH/4Bxx9/fOe2D3vYw3DFFVdMfH7qqafii1/8IgDgzDPPxPvf//7G9yeffPJEnbw9AXuibbUzbZGVHGu127m1+1tqO4sY1EHCgihCTojzia3D/OKWmkJShYK6jMFBAIiiNHvIeLKxPq7zzzCRUkRSdzYljBryxkAgwNpzXyPLMJnzKJHKZhuE5Q9lFm4QQq3ApKCYAA6WUUqxbKLUjwNgTMy01yprSqV3ESekJoLClkrwE7nMtK/FziNTguoxqB7DzG+CW9gcMv1sVYNUBTW0TAJWZdOuoihbT0pqHo6jTKMEi+XTsIMZ2Kl1S7SQIWRunaQqyttcAS4bwA5nodftx3abUlAza6Fm1qKeWg87XIvFBcMS6z6gaE2hcdDaoVcfaI4JwbphjjWFxpqBxkArFIqQqeaYaUNaaKzzUpIIKg9yT+SeBJNaeWRrUDUKGXBREjxZQyR1wB0pVDbutzI82qROHcDjXicXvtAFXD7FigAASBegQcnXWe4rryDglI5j3FlopZApF7JYreu+84OMKXE/RVl1NBRCGuM2GXdkvES/J5ZdOWpIygqoGLJdnuU+qC/jNUfYdyuAk1RQsDDe/kwl9R2i+oYgV2wj8r3Jn2l/7W0rqE76iW0oF0nz9LutRUfwYo/etro9YzAYbJM03ZbwwQ9+EIcddtiq7OsRj3jEqmaYvP/971+1jKZHPvKROO6441ZlXx/84AeRZRmstatWN+sVr3gF7na3u62KU3ylpFAbb3zjG3Gf+9wHs7OzK97X0UcfjeOOOw7D4XDF+zrqqKPw2c9+FrfeeutWBzUuh9///d/HiSeeuCJSEwDKssTFF18cSip86EMfagRzbg/OPfdcHH744Tj00ENX3L4/+qM/woMe9KAV7UPwq1/9Co9+9KNx8cUXr/gcBeeccw6+973v4aqrrsKVV16J0WiEN73pTXjHO96Bc889Fy9+8Yu3WorxxS9+Mc4991zkeQ4A25zBJbj3ve+Nq666alUkc08++WQ84AEPCOv21cBd73pXfOtb3wIR4ZBDDtnyD7aAE044AW9729tw88034+abb8bHP/5xAJzR9sUvfhHXXnstfvCDH+Cv/uqvtngvz83N4ec//zmOP/54fPjDH8ajH/3oFbXtjDPOwJ/8yZ8sSwBvy3zw9re/Hc9+9rPx61//Gve4xz1w0kkn4aSTTsLDH/7wbbrXXvziF0MptVvUUtwm0s85h3POOQef/vSncfnll+Mud7nLFn9z1VVXAYgp1g984APx2te+tqErfemll2Lt2rWrElHUY+dhYlGXRFBP+LjDYGdCMF0sN7L+gBhBm0g6yfHC/okgYoyaKNTaEnmZILPXWhg2CtfLIlf2ndSwkf3xAl1Bp7X72vsEuA5F+CBxDMl7UhzCan2tFiEvXFL7r91O35ENkgFMmvJv3QSJJARjowYiADgf5asyJmd1zu0rvOMKaCzAg4OjI9uKO8k0CRkh+vIpJnV0MXFOioCpjNDlBg1kko9+Dj42B1TWYm5UY25U47aFChsXKlTGwlkHnfExJBI9dUhoQqiTooNzRSLIKTgaAxln6yin1q6N4skqTQpaZdBawYEaUmBaItMlGj0h/JbKAiO5RtYAVHL0vcqQ6QxGcUZi5glKJqqoIYlaW4eBJqwZcHbeyFisG7AM6rjmf+Kw42tAgZgcZtpH8xOmc41cK8wWGoVm6a5Cx2h0iUxPiTGBdLlN+j58BwQnHCkbHaua+9u1xnZjTKcO5NBh/r5N60q15YCV5s/83+LsZkd3DBaQbIJ47WJNnsZ1bNeRWSbDpdPpTaoZ3LCToDRB7cBodLUdNMrHPvYxvOhFL8I73vEOPOABD8BFF12Ek08+GT/96U87jeZPfepTKMuYvXDLLbfgqKOOwhOf+MTGdqecckpD0mU1Fno7E71tdfuD88EdynNtXANLiJpJUo2cwhI8AgA0twcA6GhbqCxKHLpI+FkXA1akDlusO+sac71O7SvTyrYKgUlJkEM6/4VgJA6g0srbitR8hsbOsRN2D0kAlM6Q64Iz5E08H0vwz0vyz+kOYiDJoGt0pWQRwst9LsUAKgUYDsBim8nXvK5KuNE87OI87MICTFXDVjWyYcHfmWYQS2xLq75a+jwR8qqYgitmMAY7J4gIOZpWFAc6KQy0w0yekK3EQTsA4IppuKwAbA1VrIEeTsPl0zDFFMzUOsxVFoteNt06FwJ/1gz42WVt7OuxV1lQBKwpNGaLDEPNNoNIVrYlXCf4HwjxLTWaHWqvAkEktZEV22yO61FTkgmZKmg0xpv/W6TTK5vIeoKPETbTADnyGYY8fsivQ5ANef+1BukikmVia0i94OT8tL+HwzkmNkaXdHiskZzaoR2ZbH7chXM3FWxC+IVMPz/OWPbc1yO0BmSXcFC071PfZ3xtYkZwmUjtp3ae5Vg4TGU6ZAsjsYulT7TYUyJb6onLpYIElkTrGu+qYKretuqxu+HYY49d1f1tjQ26tZienl5RRlMbd7/73VdtX6vZtuc973mr7tg99dRTV3V/q4kHPvCBq7av1SAO22jXRN9erJbUYlEUOOKII3DYYYfhV7/6Fd761rfirLPOWlH2FRHh1FNPXZW6ajMzM6sm1XrkkUdi/fr1eOpTn7oq+wOY1DnjjDPwgx/8AI961KNwhzvcAYcccgjueMc7IssyXHbZZTjttNO26h4kokD4rRSrWSNz/fr1K6oh14XVrLl35zvfGXe+850BMGl33XXX4TnPeQ4e8IAH4H73u9823Svr16/Hl7/8ZbzkJS/BYx/7WJx//vl45Stfud1z6Lp1ywdGbiuOOuooXHDBBTjppJMaZU+2FbuTzbRNnsizzz4bH/nIR/DZz34Ws7OzoU7MunXrMDU1hV/+8pf4yEc+glNPPRX77rsvrr76arzwhS/EiSeeiPve974AOKrmiCOOwNOf/nT8/d//PW644Qa88pWvxNlnn71bdcztEV2ZL8DkYiythxI3bpFWyx2oS54qyUgLtfqShWPX8VPZmJRw4dp1ZYNICBHVPmPLJfJIQvilmYPOk4q8aCd/3I6lWJdTqf13+pl3XDUi3zscFunvuSZg4gRrHydxZDUyi4QMSR1Kysup6oLJP1vDmcEkGdVFvrSIDXKWI55JsWMqG8LlU5y92LEYJwCDZRbKDuIkQMiaE0fDuLZB0rOsDExtuYCsoiBfaZJBqRRLTOVecioSVynx1yL8EmJugrAFmJhTGqCapaDE+SOQsefJvk7CbxkimjMcHKBqJhgVk4zOPwCti1KjIt/FDhl2bOWKUFmH6VxhXEd5VNsiQ3OtvBSVgvLSUsNMQRFhkHE/Ceknsqsp2de+slacWv7usL5tUv+Jr2kGpy1IIvGdBVwxMbZcx3hrDqImaQ8gSACHzGDiY1hP9sm9HZzdLskO9vWGxAGnFUHDsmSbOKhsPXntlosy73JM7QLSb2dh06ZNjb+Xk4x885vfjLPOOgvPetazAADveMc78MUvfhH/9E//hL/+67+e2L69aPvoRz+K6enpCcfUYDDY5XryK8GeZFvJPL23ID2f5c5tJefdZQ/ZMIejIXUZN7ATz44lWb92IEIyBzkvUS1ZVCnhx21zcJ74cK55iJDd1wqoSp9xcWM1+Zpks7XtGu3n6C4Z9vb5h+doYudoP6eGIBjEfTnP3umujKBlM4SUP+/l2NXWPm3N0p6+plo9KmGrGs5YGKVYtlPOR46/lDxlcr7OPytdPg1bTGN+ZPwz2CEvmk9hAjAVLlDHbgGUFgAyDKbWwWacBeYylvacqywWK7EluG1iE+TJPsWuWFCRvJwtuDZwpiVQiBpZ8l1IbXyHmIFaSbc4BKkFIgJs5aVUmaQQ25PfqxAIl8rfOtm/m5T4FGjX3M46RGUMm8dntynjeGzZGiKjq1SSvaqYNAd1201pCQJCs/5zA61sWbFVnTFM+I0XWUZWpGUDqVx4aVkTpPrJuaUVSyiquMg/Y2VuQKO2tk3sbOMsckfIlQ999CYsvH2tPDkuyhchQzateSxNkEu/jUTeRO34vQi9bdWjx56D3SGTo8fujUc+8pG45ppr8IpXvAL/8A//gMsuuwyPeMQjVrTP1SR1Vgv77rsvzj///B1S7/HII4/Eb37zm1Xfb49tw5o1a1Ysi5znOS666CIce+yxeM5znoPvf//7eP/737/dmZeriQ0bNuDMM8/c1c1YVWyTJ/Ltb387AJaFSPHe974XZ555JoqiwGWXXYaLLroI8/PzOPTQQ/GEJzwBr3zlK8O2Wmt84QtfwPOe9zw88IEPxMzMDJ75zGfi1a9+9crPpseK0cjeaxNLiAuzEDeaOqq6HEDp+yQTsJ1hZx3g6mZGUtvh1CBpvPMJ8BHgIoMlEaRJ1ppTWaghxpJAGUA1oDIoWeyCoL3rQDIHgSTrxztqGvVEuoidds2RhKBwRCGbsQGXbLtUv6dR73JM2+14I5VFKTCJ8vfOPFKF79+kL7ucAO3Mpfax/TYixeqKaZQWWKwttGXyZIhtd5KKtKUi9qVM5xprCg2zpsDcqMJ0oVFkyssPOawZZpgdZth/7RD7rxlg3TDDPlM5pnONdcMsRJ5PZUJ00eR4SiQbA9mXOiUSZyp5nwp1je9W3y2LxPHX6CPF5J8TEjD5p1LnqZBnWYE6Y6fnKOM+mclVo35Nm6AXslyr5P5STIBJNp+Qo8oTgKlMrswPsl9xohlEIg3wzjZxGhFBqSLI5qa/B5rTSDuDIUV0VKeOQ35j5QtmDhsZiOn2QFOmVCtC4ck+MmNQNeb7uB4x+ZcSwB3Xtp09LJmGSOYcp1YnomxbQKnDeUfs33dwOwLqb//2b3H++edPbF+WJf7zP/8T5513XvhMKYU//MM/xJVXXrlVx3zPe96DpzzlKRMLicsvvxwbNmzAPvvsgz/4gz/AhRdeiH333Xcbz2jXYU+yrXa/5eXKQEu8X267bYXYOyItWPnsplC/DIDy5EQmR2oTfhMNis+CdsYOAP5cZUBWTNTQ1SSVuLjmVgiC8fN8fAa01BPSoBYgSCxSks3X2Y4UMpeqjO3IFhnIz1mfJd+VFSRkSwYAGs5xLTsO0OKawEJUOan11z6+f21nGmm/b6FOXPIqEpzQ4Bp+pFhqUdpfV3B1CTMq4ayFKWuoIoNLCb6UyPWS1k7FtqSypE4XcIMZjAbrsDC2mKtskEK3+bZLbJfGojQOY11A5wNkg3VB+vLWRZb13DRmu0cRYd0w87LrKmSrLdYWtXVYrLyCgAXWDjUHCfkAolwRMsh5Jn2eXGftbVIgBjHJPz4+PBHNf0sWa6dsq/N1wp3hetmmDkF9NtmvSFUa54JdwfUshRTkICVDCloXMQDJWjg7iDa3QGUNwknIO0cclRjrHkf7aUmCO73PlQIs4CgJOPP3FxNoDrYumeTz2X52vOgVCUxUEVEd2X1hjDkel9q3V6mwFgvZfolcbEr4Vcl4Vsyu+88UChfJTKlVqG3Jsqz1mO8XsakkaEvsWoVJWxpoziddgQS7IKCqt632HNuqR48ePXYnrFmzBm9961vxpCc9CZ/+9KdXTPrtrjj77LN3yH57cn3vw9Of/nQcccQR+OM//mP8/u//Pj7zmc/g8MMP39XN2uuwzfKey+HQQw/t1Ihv47DDDsO//uu/bsuhdwj2toj1rUX7vJe/qoiRzVtDZrQXbbKwQ9NhH7KzXLdT3vlC8NLQSNAksp2pY0g+F8ItcTKEujZt+CwuWYwz8RePByQRuK3o9k6ZvwknEqEh39TIRGqNvCX6t5N0S0kIIf6sDc415yzXDSQFshYuV1AUpUwBjqjmc1QT18y6pp9OiBohCV16zuIkc3F+6Kr7srUQYg7gTLSh5vpy1gEb1g6xUBpMFdGZMeWjzPeZLrBumGFNwTKX07kOtWDEGSUEj1zvQHomYMfKZJ/EDTxRlzp/wm+7x377t+3XRlal4d8RUmdIK7tNHB1EgLPIvBwoABhFUBRr45iOOZvJ9MTRi4RkJ0QnMDoI9wSTTkfvaI1x9/HUXTPCHmCSLs26W4r468Jyc1bXbxtZB8k5p9l9ZEqgLgFTeimqqknkA81rF+YXcUT569LOxmlnuexluPbaaxtRWUtFot98880wxuCAAw5ofH7AAQfgJz/5yRaP853vfAfXXHPNRGTZKaecgtNOOw13uctd8Mtf/hIvf/nL8Ud/9Ee48sorV1zjYGdhb7OtVhN7sp2WkkZplrGxk8EYy2KZ+UMCRDj4wD+/5DfyOfi4KbnnZ6+QVS/fKeIseQU0s/vSZ16rPY6Sz8QOaWQp2Yn3TvaXzo8dNlXD3hPJTr+N9dLVUgcODjAEKMuvGqIK0dGlSeCNNENsGxX6rNnPAbrwhJ3ISLdsKN8gZ9pPwq52uMa5Bvh9O3/d0tq6XRl0W7pPlCefSsORMZmiIKtZGofF2gTFgNxLdBaaMJV78gqAcwoVxYx5ICF3FAXSuDNIRq57yJTcumfixNy4VPZm+rknAlVSBiDdjQQ3WsQAxpDpCl6XaCF5lQVZBWfq2L/ORvuscX5eJralQpK+LhdkN3Ge5NueBoGFk+iuu52ORWr9vSVYRHtsK1Z7vJ1l8jdMOyRSwISMEGRZuf5lFYMVxX5SmbcYlyD80n9AU9mBFNwe8ozfHvS2VY8ePXrsnTjhhBNw3HHHeRtvT13lLI0s23sVjnqsPo499lj8x3/8B570pCfhuOOOw0c+8hHc+973xsLCQl+iZJWw19yRsp7blmlzb5hit8chtrxjIN2wvaBdhuxIozCROEsSoimV0RSij+Wm3KTDX/FiXeqPUUK8hboepgzOoJQIi5GhLkSitxeO6XmFpYtKHE/+laR2ja0bWWCNiOO2VJNSTWnNNtHXakeoI9eIQE+9TS2yKD2+qZtZf+BFNEjB5SU7pGwdasU5yfpzDmXS96kTh6+Va9xTqlHTTfvIbw1TOyhymMpiPZftBQGYyRWGGTBXWmQKHG2uCaPaYp+pHMY1a7IMMhVq0g0yhWGmMJVzTbqhVqFNIlGp0X3NHClAe7INCNHSjeshmaVpvb7E8UKpUwKIZF3jQDYQuBPkLRCPIZK0QPPekmurc5BScLVcV41BPgR0hqlM+Tp2LhBrsY3RtdIVgS7XIbTVmZiRuATpr33bdLj/KWb/2ehACg5vF2sSOodQOyYcdgsESCqNFc+pg7RMyD3ZDmjJ1YHPkaoRzyumbMh6UppN6yxQy3VKHG5Khxo6Mh+L8zjUF8x2fqbfzqo7s3bt2p0ixfCe97wHRx55JI4//vjG5095ylPC+yOPPBL3ve99cbe73Q2XX345TjrppB3erh47Fnu6nWZa8y+r/1GU1aTmfOck9a+dMdfK5pPsM9gaTmmQ4zpjklnmRNmAREaToInZjlROVOZPraQeXnzVrm5kwgNoknrp862LmJR5EAnJJnZK+jvvwKe2DdSS3YZFDOLyfSAZ7XI+IWBLAdZRN9mStkVsRmN9sE0kWHRiz0Y7yYFIIc+GUFnJBGBWAFkOUhqkFZRVMMaCNGcE8fNBhb4jsa/S538SVOaUhssHgC4w8PbL2qKjf7cSBK6nPNCEG+brEAxU++y320aVz+ayyIsMuVKYyhQGGWFNHjMltWKCMFcUVARyqZnsn72K0BwzLbuBdAbnWA6zTUKLPCYld70FPLFaI8jTt4liGTOeKHO2htJFkO7mZz5fRBknxpPBxjoYTw4q8gFxQJBVJ1LQWcZ1iFOZT2CiDaFWZYKGcspSwZNtIlOy/RQAR3xvq7q5jmj8XEd7PStAeQ5kOStHdBF/ztOdaZDiZKuYwPMZv1BABcnui+TgxPZ+rVAogOoRqJyPmX6pPZWsF0n5ecuvMUINZLF3xY7qWGumpRp2FnrbqretevTo0WOlKIqd//zq0WN3xYYNG3DppZfixS9+MR7zmMfgD/7gD3DbbbfhyiuvXLUajLdn7DWk357uGNoerCQC3rj4+9Thn8p7imOFSThebE8s81oLMCb0ouO+Hb/ZXu6Kw6kt6xkycLzzCaYOMp6dWXVKg8PNYxRoIEkSoiTIInY4DDqRkkOyMJeFavs3ckxP9DnNNUFcVsTf+2NtKYMvbc/EcYRw8nKmFAhQB2eYjCCt4aqcHX/ZAMgKlovKBqCsgNaFJ1+inGY76zJ1Cjo4kKNEFilmZ1lfxGM17kHy/7SPbC+0xlSuQp2R9NhAzNxjQpKCrBA7MGN9wDDO2zURW47VcK1S+HEfxp/NAOJsMB4Hrjl+xKFhLcKID6S5aRLoybV3xsDVpZeTMpD6LOy00aAsh8oLHuu24GNorv8HlQXJJKdzKKWQiQxaF/HYQVh3knrt/pK2S3ael5yKGYj8Kscm5ePsrUMViP4oPZrW3JuoNSWvrSYRXIPgkxqNXMonBgsASQYjIsmpk2yEIDclgQQAzxGAn5yyxj1JqpXtK12ayk21HVMyB93Osd9++0Frjd/+9reNz3/7299usWbM/Pw8PvrRj26VVOVd73pX7LfffvjFL37RO6Z2I3TZK3tyFt9SKK2XEEQMUACawQxE4HqnFJ3o8uwLvxF5Tvm9ZLh0kW3JXEPUerYhEitaEZQDVDt5CvxdyIgnyfCbJPxCRjM1CY5GsMhSSJ498RnC7AYl9tEEOUI+kMrPpS4r4EjBWDtRC7EZ8JFIkyZtm3jWJc9Dgo3nCSYkNCnU4Zkl8pCEIh/C5QNQMYQazsCNR8hnhqhHJUgpqDyDKnJ/LTWTM0LcpOeYBq4JTM3Pd2zdPbKlbdLvo/S1J/5cUxVAsva0BNz5f7mXh5f6d0KccVBYIgMuNnJq/7TsEIJ/LiOqO6QzghB28Qcdtozsu32uvr2aFLRyyBygDC8RUsLbeNvVOLZvhaHXRLDWhbWLyMTyuG/dE622EKmJTMyua0POxuzRrnvGE39y7kJ8ksqY2BtMsa2YVbx/YwK5TAXXbKS8SMYdxfVJO6AA8XpMZPnCYZCpIOupiYPKrHVQfr7INflAgRg0AK+cIEFUrMJSx/s+HRu25gsjC0ZnG4Sfy4rwdy22ojcUR20JidshetuqR48ePXr06LGnI89zvPWtb4UxBm9729sAABdeeCEuuOCCXdyyPR+3K0/krnYwrfbxt3Zf6XGTBDwY66AVNev4oSlpJFAS/ew6FvF+ESj7tGgSMxKZGhKXEqIvSOwk2TuSqRMc+kKSiaO97RAKUayquahNo0RJwUk9vdRx0CIPt4iE+Av7UfG74JTSeVioOh9pL+dHAFySRdSoIydR313kVOos89uT4QW1HS/C1RVcXcWmFkNAKajhNKALuGwIWzDB4QoLyoaAokD2AQiEnziAJOkAIJ+VhSiXhKWls1aKTBO0Y+eBc3HsdMladY/XRMrTf5Y6JYPDpX0tk8j+sK/k+gaZJyECrYWDjZJqgI/it42/GxH9wcHoHZF1DStkXzkCrIGryjAGnDgM8yJcUxpOxzFhDYgqOFX6LFMmBCkZ+w20I+8TAq8LaQ3HCfLP94v0D//zZJlS0NmQa+VIiov/33nCOZUhdekc4riOjHGuEVEuTkCWH1PIJDtFOSjHI1PUNdmB7jMIhBwmJPdO4oRKz42UMIhRytZn1MqYIVvH70JHxX5uSMHRZPT/zgBpAu3AaPRtpfmLosCxxx6Lr3zlK3j84x8PALDW4itf+Qqe//znL/vbj3/84xiPx3ja0562xeP85je/wS233IKDDjpom9rXY8ei0/G901ux4yD3uHGcDSUyjHILWmCCoBL7R2Q1A4Idk3yWZrq00Qq+an8GIDjyHYEJjlY7Qv0+CbSSLLsOwk9smjbpx+SimnhWtG2sNFAm1GOjjmcmEDIAU9tK6hRbF+siAomNieQ8nO1UqOiE5WwqyfoL/akyAMrXO0N4LhU521UuG4KmZqDKEfSQJfhqADrPWDUgyQZPr0mwa00VnkXhOSw24SpDCGDnELLvbYcRFcakM6H/tL/ITnvCz/9MS2DNNrZF6gnrJDrQhX3GgJ00oIg3agVZtWH4OmpVMIGneDzYJMANYDuPwIFvctLKApb4/jXwfeAXSZkEP9q6ybu7mH0qxN9yz3y5H6g1xrs37ggmyjJQlgODIagueVxJkFiW+2y/ImT7oRUwIHX8UvC4YFtKe1tfOwCWfI1wBU0Oo9pyRiQi4ZvKwysv7Uk1rzXIGg6usjVQ1yGYDUrzecg9R4qPkxB/oQ66ymCgQo3BVImk2gWkX29b9bZVjx49evTo0WP18b//+7+Ym5vDcDjEaDTCa1/7WjzqUY+aUCPosW24XZF+u9rBtL3H39oI+aVIxfQz44DF2oYFtVd7atXTY6SfxXVVJNgk0lIcIOywx4STPlMuLASbtcHiO5d80qi35gmWVPqIN1RR1iVdyMr7lOBrkXzpayPjri1bRSoW/JA2Jg4kl8pyyjF8NLrIaDIJ6kLPiSMqlbGaIIQ6JJEahKcnDMlUsONF2HIEtzjPpJ8njUIPi+xUMQQNhtBr7wDoAnY4C5cNkedTyIppGEcorRAwgEucghLxnSsK2VJCDJZmslbbSqEI2G8qTk1hTLQzAMQBCZZkSqVjeT9+jIvkVOr8k4h+AI1ai4gR7M3MDIKmjGWevMxScBCKcy6VLkuvVZqJWZXR6WFtuFaurpjsG4/4+7ri18RBQlkeXmm0wORfVoTPlfbR3FJnSIhvoCk5Kn2HJjnVIPC6kJDvnVHuae1BOb610FkBpTI4R6isi2Rc4nwTwq8yNsiMyatElfMhfGS5Usi1Ra4UZgqWtvJ+KZBjojiQf96JGhzAItGVEn7JPOHSuaRx7o6j12UcmbrhnA4BBj5Dx+mcHVYJ6X97x4te9CI885nPxP3vf38cf/zxuOiiizA/P49nPetZAIBnPOMZOOSQQ/C6172u8bv3vOc9ePzjH49999238fnc3BwuuOACPOEJT8CBBx6IX/7yl3jpS1+Ku9/97jj55JN32nnt7djagKVdHVi1o7A152Ud/DPUTZB4y4UVdQWyROe+z6xqO/2lPRI8lAaTtIMwPAmhVbM9baIstU9Swk0CIGR/xpNfUeoyKgBIm5YMpkoDx4D4PCeFhspp+qwXAlQXIZiqdCpkqAFRGYIIUdZbEQdFSYZ+2idyDGlWO/Aj/c4fNyumUfnzrC2Tc4vGYWowAzgLvX4/UJbDlSPQ4jx0OWK7qxhCTc+ChjOAZOorFaXpfb2ztI1OF3CDNXD51EQXbu895sBjbf/pLDyPblmsMVdyEA1MtJ2MtwUr6+CyrHHdCD7QxkV+Jq39G/oZgNNZJI5TlQ3/PfMmhEJIxOSeIUKoFZjKjMv1aUjshx95W86xNL9WGTJv8xQ6kbVVzq9VeAxLIJIoXKRqI0563BN/mhSPiWScT9Rz7uj7zmsm0uldM4TIwVomv0gyXL2ig5pxsFkOpTRcVQapeSH7gm1YDOJ6SSR/faZsyCT2Gb2aWBqfrw1BwcEoh8oCmeWMyFwrGOuSzL9YAzJTxAobPmigMS8BrECSXi7HUqwgryhjVQgYE4lP5+//0vDcOjauEZi4uBV1M28P6G2rHj169OjRo8eejkMOOQTve9/78Ja3vAUf+MAHcPHFF+PpT386vv/972N6enpXN2+Pxe2K9NtTsbUR8ks5AmrXrK1VGQdoQIPCWnapoMVUppP/jt+1pSA7a/PJe0JwCiy1WI87tnFRn8pfBi9VkqHl/25k8y0VDd9Cg/CT82s51yazvpKFsuqIwg5kX7POzESHtI6bHpuoo386MsqcYXIIkuEn700ki1xVcYS5EEkZZ4uRSkhRpaB1gVwplvdJriUTbjEDQAg4oDkWjI+YXo3gVwKQpfuRrKy65O9FxlIyynSGQhcwjkAyFhGdUQ0SuZ1BCQAWINReGi0hwYgm6ss5gLdRCnB+LAj5hxbhF7I2a6AcwzbIvPT4JlwzIfqE+AP4OpNmyU8ozZ8LSZ1X0YmYyICSj6IO5Ft7XHtSi4AmEZg6e9NrkjpD2+QmwPsz3nmrkuwI6QOVJc7hZp+mTm/jnP+H8GqtCw4eYxwsEQALpTSUc6gtO54tmKNPpcuYTHTRs92FRoYeRaJUJ8Szs3DWAkQsxUo+k7KVCZNm+4qzelfxfRyNvuV5cLv3vyyV0Y0nP/nJuOmmm/CqV70KN9xwA44++mh86UtfwgEHHAAA+PWvf81zbIKf/vSn+OY3v4kvf/nLE/vTWuPqq6/G+9//ftx22204+OCD8chHPhKvec1rMBgMtu/Eekxga6f1bZ3+9xSScKk2utb7YKJgcspJbaTlypYyCUE8v7QJvLBR8nxp1zSW7RNIgIU805RvZJJgBaIkv6TLfgokYiRL2scIbWufk2Qy+eemg2WSL80KTMm4TjJH85ysMjgTM8ClHmFQiFBRmjDUghabAa3+SYOp0iCd5PgyRp2zUMlIkCx15J6MzKdBwxo0PQulFJzIK/pXyvIJ4jY9bshsaqlFpMQmpC3Y9vtGts8o/iF9lisF4yzgg/Y42CYGgWlSjeNxhipBI173hp2V9DHbtU17Ha19EfF6hFSzlnQqxT0RINcVHJcVE5+lkt5c+5cjjEK2I2LGn3EOynFbHEVbknyPkxObGLGGc/uclnrmd5DNAeln7XtPbE1y4T4gMOmoBt7q9OSgbC+EH+VFIwAs1MZr24W+DVqpUL+PvNS6ZAUrsJpCZR1IcW8CseZfkAUOgY2tXvD3H2kdyhA4kSR1JtjWDgBsHmt3Itpy6dpK1rT1Lsv0622rHj169OjRo0ePHYF99tkHf/mXf4kXvOAF+MY3voHvfOc7eNjDHrarm7XHYo8m/Vzy2l4Ab+uieE9xPm0JXeexaWywUNmkTgd7gKyswX20rSzeY00tFyLCOZuPycOU6AMQMtkkQpYX1hKlCxRa8WLQxvoOS0amp68TJ9daGKfET5KtJYtuSn8XiMNJoi9k+CWO+rDItK5TlouhQ/+FvvBRp22uT/ssuU650jQ7CvCkhv9tKq3oo8Fjtprh7L6qbMhChqyxtCbceBHko4SpGEJ7yR1XV7DOArpElg+R+Vp/Loh6tpw5jt2EXN+EoHxE+FxpUWjCdLZ9d9Fy9x+VC6BqccIZR6RYVstUcLqGzgpoUnCqJacaIuqbNUUaGVpOs1Rnxg4uJftIrsWShHUbco1MCZRjvkaj+UDSuiQTc/K3JmQApkSgq9HIRiWfucc1WxSkbos4fKBUdDgqzURvXoT7hSi5X6w458SrmDj4UmdS0mftOoZCRjuw45DIwjlfqxAAVA1NGZxEz1sXpLsMmvfQ1sJaB0tenswRjGXHFJR3VHoJYd3yvjtSforJokSnJ3JT0r8tywsgyHqSqVniy7mJLJKQbekd1G0ZvNs7nv/85y8pOXX55ZdPfHbPe95zgmAQTE1N4ZJLLlnN5vXYidgbbK40KEiTECkxIx5AmNzEdkqVEQD4urNM9llIUpGKwTmNA9aR7BNZSFP67yw79dGcz1OkdQYTIYOG/acIk0Sj/y6QMkmGXyBnhDSTtiRzaqiR579zaNk3S2TaBdtIZP5crEmtPIGl4YkHBQy0Qq4Qa4qltfJIN44XnmOGa/RSXTVtLAA2n4LLLajKofNpaOtQ+mbXFhgbh8FwLWw9hlIZsv0N7MJmuMX58BxWa9YDgymfbaVC8IxIH0qWFhN9Q9hiCm6wBqVTKGvLtnQSUbVa9w3b55ypZRyhIvJyiRal4Wd0aRwyRVCe8JXjhyA+6c92/WsgBtAAk89SRJURzTdAwwYQe1nDxrpw7Rq86fNbgnBIeYl1BCILis8RcMi9EgCS+sHW/w3L2W3Wr4mgHKuhaLYljF8XaKnx1yJDw9pIzgHNcw5tbY/zdvYrKc/Ip4GNYLIPCDaH0zVI56DhdKOmtwR8OcnwIyahQ0CSypp1iMOYrFGoDMYR1zFUTK7Vlsk+4xzISB/IORKPId/HWupVJufrVBbGCV8jCu11VRk3zT1p6xU80n6tLAd4lcYmErt8//Vg9LZVjx49evTo0WNvAhHhxBNP3NXN2OOxR5N+1Hrt+m5b97WnQ6KA5ysbFqC1+GDYHx7qXknkufLuJtvqBSG7rHNBtlMWvVJHpZEFlxB+RSL3UhBL6lG52HBYoeVwD46hpaLbl4KPUJZsLZGzShfU1HZKtHexxHGICMqJEyD2B5A48lv9kPaHEH5pfcLgIPEL+VQKqX3eLiUraw2YKvyWCh/BrhSUjZlgZA1n+KUyn54cIl9bxlmTtKUO/eUsSyK1Scv4B7dNCOBmXy3ZvQ10EXzL/dTlQz7f0cZ4DaW+kThIfPS3OBc0RceaSD015BwbB7Ag64Op6zLIM2o5X7+NSCDB2uh08s7CkG1ha1A94tol5Yj/paSsJ2QDlMYEEuKP/7YNElf6EACoHPE+0ky/PJEBLYYcGW4NqK589DdLk5Hvu+CM7SLfrZmo9dfIuEXLuZxmuyUZhVtC6nfQRLDkM/oUYAnIkxEiUsG5pmYdLPiofCeR+zzS5DMn0fkSFKAAiVZfKsM3jTQHAO0zO53OQvYH0nNMyUMhVXufVI8eewW6buWULMsUNT7XhGAzGTQzVtJgKuUZEGV93b3GQZvEAAnpYcpIgrSfDV4WVOZj47qz84AmCSjfSub+UuffJm4C8QM0svoa82C6D/KyiC31gubBJoNN5HkvspBZ8tNGllE4OU+WWEzYkyFgw9agasQym/WIj1FH0i9kdOVTyPwxlNciZZuYUDsgz6c427weQecFbDH0naUSwi+tK90iOsGBZy7L4fIp1FAojUVtI8Gy2pjOObPrtlGNyjqubacQMrq0ifXuZGyrNtnXDoRKnoXt5ygQx1aaIRi+TwlpCZAzZSC3kZJ/ab1DIbesBRQmbBYBS767MH4sRTWOtsyuhQM5JraNf8zDy7pywFGzXEFKbMbj8fdyj4exJPdIsk7pqq0cbDMZLmRBYp9nFrC+Vp8EUeokkE1sMJ3HoEgh/MTubNwPNhCKWuxCFcO5AILy52csghyvgs+uVcl1TQPqiGKQWViL1YDzRHdL+YJyL52qdCBvYxBmrKlpbHef9+jRo0ePHj169OjRI2KPJv1u71hqrWMdsFhzVKT8DcBn7vGilxzFyHKpy+JfZdv0OCEy3UtWSgacQLJp0vpvmSIUCqDxgicdRjHy2rakMYNziEIdrHb9j8a28ta5KNnkiT8gWVQD6JR0DMdcWkIrRCLDE3/JT9OFqG050gIRQZGgEOmbUKMwaYNIkrokKyguZBWU8g4Q6SP5fTaEGvh2+ahxpzyhV3iCCQCsCdlfSOVdEkLRSTacPzdapt+dnH/61Taw5ttMsGsvUTTaGEm+LljbIH4bZG9K+HWNAyH+PPtHqWRjOn589lqoW1T7Gm9JXR43WuCMvnLENfpqIf1sI6qZ988kbBtB/qgl9xllkbyjyHsDSeso8ym1XHJxgjF5h0FCJCodZv/GtU6cUnI/LOWUncgkESK2/U+IwPbvHUJWi0ATwYJrgOaanY98aZ2/HOS3i/eW3G9RkBahpp/z75taw8k5tj4TBznvL2mr3wWThTy+AvGaOLvDvig623eVT0ppgloNvd2l9r/XhMr02JvQFVSyPftYCoGkax0n3Gqp3UGcFW+TVGYrNpSXqJPgKiJAe0LQOpbalCdDKmcYCD8hqsThHxz1OjjwRV44rYElaM/qW3Kep6RImsHUIH+kC9JMr6QNivz8GSSxMUn8pcFg6bPaEzpMnhA0YjAWEIOr2qQDwAE9E5DneV2C6jErHyzO8XO6HMUgqZzJOjIVB0VRBk1c38yBSTEiIMuHgLOwxQxIZRyEI9dFF96O0ZP2i7NR5lBlXM8vH6I0DrUVm3vHzLVTPjgP8JnzPnu+MjbUNjbWBfJaAYDIwHbUbmwEYgENe1bWFJLBB8RxpKkjE1DIoboM2axiZ5FJZOy9lDlLQyayg0o1+pog6xOCIYDgQqZoOvYtAOX7XOxcAgdHeuuLT0olfZL8Nhwefu1ArfkoDTxaTsYxzfKDnErGk4UQf0rsuzwoK4TfSDCSyHkK2ZfaZS1IIKDIl2cqA0Xd3kTqE4EADKouRKEEQHOnKjKEiOsLR7YR4JYGtVE2AJJyEc6xnK+FKM4grG/bwYc7A71t1aNHjx49evS4vWJ+fh6f/vSnsW7dOuy3337Yf//9sd9++2HdunWNYNIeuw960m8HYzUcUEuhShY+QDMDTRMvTIyPHG8uRmOL2hGscC5I7aU1+9LfGjjvdPGf+YU0SyuxE2GgHMiMQeMxqFpgQqkeRwdVB/HiQoaez0Rqk39LgKAAZ/wiEmhL7kz+QBxSLcJPnEBoyl75gzTIAs6gZIedRJy2NxcnVK4QJSblvFMpQb+Al2h8cc65ZFFtAGhVQOvC14sZgPIhXDUFGlRQMyL9aaLET1tGUuq8eSeUk7oxqSxqK0pbCJDUaSNOSslS0AqYynbCMrWdqbnUuEgdUUndF0cEguqMcu7aBwB/cW0zK0BkwHwUepDx9Jl9dnE+kn5VibQ+XwNKA6jgOpwvDUeIJ/tsVcMaGwi/RldoBaW5np4qKq4nVI5AVclOS2vhJNPPZwdSlsd2JJmC4kATeahGP6fOJGAyQ1fGkTiXUiIwuUnEAacAn4EHwCEh+wBrCbn8QMfDhY/aWX4SDQ4HDYAchYw/45hAJCDIcjUut/+9+PPSdgIdTn5SgC7Cb9P9pG2R9300eo8eOwer8RwyrXu2ndkmCMQ/gJA135K3hC5YqtPbVUIUxZppLmY9icy69uRWncgZikpCXcastKoMz3kmpzSTS0K8kAp2YlpzmeCfh4RGsFc4r46MQFGCmKjhJmgRPim5kdoRwVeuVDBMI6el4/NW7ETZn8/mB4BCZXBgkkFsEzmHVBpVSKhGoFVaJ9ow4aeqRdjFedjNv4v1kbMclOVQWc7PQzsDsjW0zji52yUZm9bB5RmQD/mZrdm2Ctlb/hkbsq2kTRJYozUckd8uA3QBU3Gb1w30DretKuMwMjaMS02EUW2QKR3kVJUDSEgduRYmCd5rKVU0n4XN8RT+kiw3JDa3t/NIyL56zBLvpuK1RDmGTRUTMq8DkErhJjZJ+/mryGdOWjZAXDImxSYJYykYBD5E0rEtrjzpK/trg/wXykUFFVkLNohvuQecbQZRBQI/C/cV39MxAC1kDSbrKdcVTNm2ydqEX4cCCimEwCZN3rb056wIsCpKEwOS/erJQEr26Y/lEJsejiHzUahlLTav6pTRd0hqpSOud9tBlz169OjR4/aNsixRFMWubkaPHluNG2+8Efvvv/8OJ8zm5+dxxRVX4NRTT13RfmZmZnCXu9wFT3ziE3H99deHz4877jhccskl2GeffVba1B6rjJ702wORRj8CTbKP//Z1sxxCOCa1IoVFUkfkPQ3g8/7ijmKEbvwdEddPoSRyM2SyqSjpSWYMqsZMiKQ1OIBJwk/2LdkzjiO/nY/slgwb3mgZoiZ0UEc22BK/Ix/V2m5TQw60LV2kM2hS0NrXl0kcALGuIULUa6zfl+yzFcWbgiOE0awViBgsS75GR/i9rUF1zoRfl9xSev6pAyBxQsnf7QwlcdgI0Zc6DlNHStsZCqw+4e2yImY8JOcVSKiUxE2RfOYS8rZr+yWvu/+7kTnoHVKuKmFH83CL87F+X1XB1WVnDT+px9cJOXbiCHHGwBkbCD+3hEythR87ZcVkmrII8qGKSdngXFEKrip9doG/9kqzA80allgCoixTqx870SYI230H5etAManPzjWEiHvj5y1xBHckQG4RQcLYiZPIQWkKUsRynHSs2mS+a5wOOsi+1tzSvhe67o1dBSJKovR3wP7tjjVMe+yekOdCm/RJyR0Aoe7XnoL03m1nJsn5Lnlbt5+5QixZAIod9DJryL4lcCZkPMOTDGmv2phZTtWI34us53gxykXLPF8MYjvkWe8Jv/TacC01tv24lhqTZkvVd9pqpHN/kuG3zb9zFnAEcgmR6J+9hDo8m7QP8HL+GgkC8SeESUKGhOyqxB7lWsicke/qCm48Ag2GSSYSP++dtayQ6AOx+FiimBEDSpzKQDp5VkvwSyp1mhIyNGmzSmDMzsDaQQbrHBYqw+NE8WkHkqWdabiEDd8FUctIx5ZPsOtcEzRsrCSzj9IayXUZ7BiyrSCulg0iR1VEYY2jQHDE91qiFTIBsedFkpdIbAgX7PFUjlPeagmc9KQ6gAb5pz3x55z1BFtrDZLY4s6fExPPkSh0npjjOpU61gdvI7WR24TfUpCABSFlSQV1BcBn101IGVN0Vvljkn8PWC8pj8Zc6VLCbyl0ELe7Gr1t1aNHjx67JxYXF/Hud78b55xzzq5uSo+txObNm/HCF74Q++yzD0488UQ8+MEPvt2RRv/+7/+OW265Bc961rN2yP7H4zHe9a534cILL8QHP/jBVdnnCSecgO9973t48pOfjK9//esAgJtvvhlvf/vbceaZZ+Lggw9eleP0WB3svaSfLCxUxym2SZwdiB1hutuwACVkLWIISDNMHDJf2SqVomw7plPZpy7ZKv6cP4lOPGoscHPFUdeZq7nmTLnIzikflR6zrTgCO8UEUSMLTYsQsU6quQCOzpIkOraNNBtpGUwQPWmkeSphlDjTxIGjVIZMabismFxQO+OjyWtMOkmS6HW/z8x7Qjz1OiGN5MRxpxRIFdDDItb8S+S/uk+yow98W7syDlPZMXEYGusmHL7aD5jF2nKGZ+Ll3Z6xvyRRSAp29gBQuQA1d9PEd416iEics1IMJ4yFLPYp4j2gkrEMRKJWrp+TuodAqClDpoKd3wS7OM9E32gBdrzI8p7WwJac3UdaxZqKWaytGBy1qbRRO+rZZ/gBaBB+abYfaRU+T4k/0l6MSurgKJ/hpyPJFwhIn9VAecFZgnUFKoac9Sd97McIQbGbbIn71jfKnw87tZyz0H68ZYqlha2PoFd+biLlnWmK4EDLyjal0d1WvOaKHYtacRa08RpcDs7Lh7mQfZsSf0vV4QmkRjt63vdHFzG+FFbsTO/RYzdBafh5MJVNztTzlQ32x1RG0HsQ60fgZ19p3VJV5gBMEpuAJ3uAQEw1yAxn2Ycv6cxAkDWW4ATIexfnNnKWs5zqEajyiglePpqJj2a2E5RiWcLGSSlYNJ/faZaffMJNW3qO4kCKSF4waZjYi+3AqQ4CK51PJ7LEgCBzHn7jSbYGoWprwCWqAiqDVlmUZHYOtbeN5VxVanM6G7Or5HnuOAveLc7Dzm8OwTosiT2EK0ecFT/0mf4d/ROeIT6gihwTOhzZkthXQr6E9UkN6CIhR3z/jOcwM1iD0jjMVxaFJkyt8r0k/aMJuPs+BW5a0PjlrQvQijAmwmxhkGuCsQpWSe1Jiplccm06MtPSkRSCZ6QEgPzdqu8dagUnWZioYyamBFeJTHqQRVecKWnF/k5fk+NJdqYiglbMyBExoUktkr9xTR08QRjniBQSvKT8/vnDZmag7EjWUZnPjMuE+AMaAYFik7dJ81BvmyZlVcMQTDONQyOXmNG6yDaRtLeWCXLFJLwW+08RnKKJEg+S5dcYpX5ckFKJaoYvRVCVk230NqnzgYjxfvHnmZSXkIzUHj169OjRQ/DlL38Zn/jEJ3rSbw/C7OwsXv3qV+MhD3kI3vjGN4KIcOSRR+Kxj30s/vZv/xZZtvfSFYJ169bhKU95Co4//njc+973XrX91nWND33oQzj//PPxq1/9Cqeccgoe8YhHrNr+DzzwQFx22WX467/+a/zzP/8zzjjjDLztbW/D3/zN3+DUU0/Fs5/9bJx66qnI83zLO+uxQ7FX3EVdrgpqO6S70FqcpgTA7rSU6IqWbgQZymI6rPr877wv3DmOZzXgCFeRZeTvXdiWJAvGO4Gi85uSIu3UkMDhCE9/SCH5JDLdO1gc8oZDaEvu766C9vHLJMMtjV4Ftj6CFVgiwthFZ0yaneesl+Z0kZAEglwO6QyuLiaJyHSf6bFIsUMplUlMnGbiAFJIHGG6SVYJOQcolvtU2cSYbY+bNtnbhjNxES+Ei8hXCfEn2Qki5yrt1SqJ8l0BtriHlpykU5plpdrZaG1pIB8tLA4DuS8ax06i97XiyGrlnXZkFeCcd+TVviYJIdTcq6voxPBkHWkVMumoGDKpNhjG+ooCa0PkulM6OBydZXJQgUk+XfA5WmMhupdt8q9xzsYCYOeYsxbOk40uzf7z5CPlOTsrfTuRxwwSKoZ8HkQxU0FebRLR7YlXpzLuL3F0WgJRzRmyKsNQZ6h8xl9pHCorgQwuJMfA8ZiScZiSfBKk0PQX8X5ypSB1SjnDwEJblti1jmtmaXKNeYyvPY+8BuHXngfQDlDwBLNbejzFrXY+lJd83WH77yyU1WNvQtvB7RCDi0oTCazwfUJalbZFkPngoA6ucJfBwQcIuCbRJ8ETS6EtF2hckuWlsxjokxAPQCQHNHGmkfCARPGfkppnEnRUl0z8GX7GtOvDdtWE3RIky8+CbcO2UaZSG4a23B8TSM69EUiWED1Sf8wF+9U2ZKKZRFoioEkykdJDpoEXjimtYJtKmyCBZ+Bnk0iASqZ72JSf6+Sf543jIF5/1xWltITSgpPnp/8bVPEzUxQagCAjHv4mQqER6u6tJqj1CgBjYwHDz9GRsRjU1sspRsKuTeq0sxi71ivpb+TZGqRqJXtV+i2pXRmCqxbnORNzPOL9aa9KoDTUYApO5+F6sl1iwVGDTFbx/ckPfAcHa6LEpyXn78Gl+zisDbu+IwflyCsV8GdJHFJjrBB5KV/L6yatJVNVxR8DoS9t69bkY/iawkldzMa6COB+lOHcoVgBIKzP+CRa91P6uQRuAYCtA/HIwVlNhHmrI3OTnPP3cs3qFf4eC9uJ4kQx4PMXu55Ug5Alcl7WlzwJvfMfKL1t1WNvxubNmzE7O7urm9Gjx3bhk5/8JL71rW9h06ZNWLt27ars86abbsL09DSmp6f7emVbgV/96lc49NBDobbGL+tx8MEH49JLL8WDH/xgXH/99bj66qvx7Gc/G3p7ZJ/2QKxfvx6Li4t44hOfiO9+97uYmZlZ8T6dc/iHf/gHvOpVr8Lc3ByUUnjDG96wCq1tIs9zvOlNb8IjHvEInHLKKTj//PPxpS99Ce95z3vwhCc8Afvvvz+e+cxn4s/+7M9wj3vcY7uP45zDwsICbr75Ztx888246aabMBgM8PCHP3wVz2bHIC1DsauwR5N+Qn4sSYi1o49TJM7bLmm2Jes0IEbI7kzIeS6ViQcAeWtutb4QvbFeusm6UKtGCD/hDNJFlaO4xNfeEaW9kzxXUhcL7JCyMZNNyL7Goi9ZzDZq87XJmdSxnmbedSHJ2uqsyyebta99QviFgvdpW9ttaTkgGp87X1ODPOGmxuzU0ZknhFqR7h0kFHW12x9D+oqC9GYGZAU7MLzrQRyqdesGEAKindWZOmRTyKI5nYvSbFEZeyJFZryGIjlfx8NLNe5AtZtWg6NjsEEAIrlHWve3sQiZkl21KoHmvcVOIr4fcnEkmRrk4pgL0dlJ/T1XtxyxWQ7KCqipGSbPhtOx9hLAv6tKoM5j3T+lfXaeRqzXhEAmqjwh+3J0yn2GjEBrQcYCqDgj0H8v7RNZV8oKJvtqXw8wy0FVyaRfXbJDxp8LE5l14xoEIhsAUdWQj6X0eyqBfIhCZcFpEmRsLXxtp6YUHRPOTaIvzU6WTD9FBOs3yDU7g6zm62icQwGCdpKV7OW5/EVP61o1MlJac1FwfpEKzv5GO8JYohBI0aPHngaX/GsEjiRjvLQxeEiQSo0zKRi/JADIVWf23668TUpjUVuEwCbJXl+K6IpO/BgYoxVFh7ytI/GXLHhVI5Aq2lVsb/lnaBpwZbyEtLer7HiR5+fxCEF2UuplZUtEUDob6gQ6L0Oe2q/OpZlIyc9EBnGZCazx+OwiuoBm8F3LpkrtKKfAn2t+VnAmNttIPN96O7Pj/Pi4EoDhCU0v5Rgm4FYWIsdL6VC7lnwGPGkNlxyGpbWTTPzkfNqW1BKxVPF3nlx05GuWqYyJG58ZGJUlvMQ2oorGzrg/LBzmSwOtCBU5jGuLsbYwdolAMZGZDFmMqrGWknHeBsl5prUqq1EgPKXut5Dc1tdKdsbXRlYK0JwRRsWQ61Lrgom/ZI1BQoypLGTicgBkDCjimpYi9bl0QFzoo47vmTv06ycnKydfNsE150T44zMhRyFYgDyRxz+O9qzMv+IsEIlhGbeyfdwubZW/BioJPEjWGWEcyrX0aHweTryZ9SfSumngbNi/s5O/D2Obvw/S98Hm9Vl+WQEpN+BkfYfJdcuW5qYePXpsP970pjfhpJNOwkMe8pBd3ZTdHv/93/+Nz33uc5iamsJwOMTatWvxuMc9bpvIjp2F733ve/jZz36GmZkZzMzMYM2aNTj22GP3KlKlLEt8/vOfR13X+NrXvobHPe5xq7Lfb37zmzjttNNQFAX22Wcf3OEOd8CBBx6Iv/3bv8VDH/rQVTnG3oSf/vSneO9734vzzz9/m35317veFZdeeilOPPFE3OMe98C5556LT3/603jrW9+KI488csc0dgWw1q7avb5u3ToAwI9//GOcffbZeN/73rfifRIR7nKXu2A8HiPPczzjGc/Afe5znxXvdymccsopAIAsy/DoRz8aj370o3HDDTfg/e9/P97znvfg9a9/PR760Ifiz/7sz/CEJzwB09PTW73v7373uzjrrLPwgx/8oPH5Bz7wgVW9DjsKZVniUY96FA4++GCceOKJeMhDHoLDDz98pxKBezTpV1n+10a6PKCW26arllP6m3bRebkYmhLpvxW0eXvRXkCnjpsYFd4k23SIlo7hpuJQsuDFrtT8S2t2iDNKEzvPNREK7QlAU7LzJXFGdS3yHCk4X3suFJCXiOBlyK5Qy8M7ACYcSRL5KaRCur+0v1JnfViIuoazqUHiAd1EYyo7lHyWEoeEMWdJ+fY1CD3ZXtq0FAmd7DeehIrR6DqHy4fsoMoGnDWlC+9oIZ+F1yS12Fnjpb08YVdZF+qbCSVI3inYqEVI0QHZBeuEbKRAju2M+8KRgsuG0TGQD+FIofKknvReO2sjJUEbUfoJhMAR2bNIshPXcMyH/ofDIE2mpmdhFWdbUlY0as2ELLliGEi/EMUMxOud1KkRh26QbpMsQABS329CArQlB5rKgFpj4UovD9oa3yTOIB+1TEo1sxOzHEhJQc3n05ApFQIzlSxVCkre534O0Ikzpx4DukCeD6CzIbRBUjvHwTqO5q4N16KqjDjXXXD+NMi2VIdLrr/x85dSyDUh1wrGArlycFpBOy+x5Y9Lfr4LGX62jk7mRmBAdESTypCRChHvbamplETOdhojHkGaQDtQXrFdJ7bH3oOlbCtAnNzwUnnozHJtZrfEe6E0DsZypoZCvO92FxVQCWABEJzsAN/DYZbxzxmRywQA8ufEAQUZExetBQiTOARohPnC6lgvTeysXFHIgoK3tRrBTFoDWnM2eSIbTSKHJ/DzllYZtAOgHMjRxLVqBwhJW0UGUXs7oDMbOjnO0hnRsT2U2k7JeTHp5VjZQGUAIoGk/fNSai9PEAoeXZ8qbxuTrRtZhGKncbNq0HCGibb5TVBKcQZkVoBylrymrAiqC0H+3EltN7avrUPjmgd7j2KQUMgyDFKOGaA5C18+o7oCjW/y1zpnu1klagY7CMYCty1ULNOvCOuGGXJFqK0E3VDoT/hzgVaALhLliRh41ebHwlixdZD9p3KBX6sFkDUsj+7JIDdeZJsnyQhju4PtEDWzFi4bwOZTQFZEG4PIByQiyqZ6ezlmsHEjnVd9kLtmSy6DZRVXkNgxiIFJEp8lqip+JMAq3tYS25epw0Iy/FIJTRXk/SnI1lrPDIqNn0rwC6QWH9ddZ/tFK74Wbok1SSfxB4Ssv8Z85D+PP04CJZNsTvI1Gu14MWQrBzUJsTdzvo4uH/K40gXb9ojPGHnmSHCG7m2rHj3wX//1X/j3f/93nHXWWSve1/HHH4+nPe1p+MEPfoD169evvHEAvv3tb+P4449fFQfnhRdeiLPOOgsHHHDAivf1/e9/H9/73vfwZ3/2Z9v1+7ve9a64053uhLPOOgu33norTj/9dPzxH//xitslbfv+97+PP/3TP12V/V1//fU4/fTTAQBHH3003va2t20X4fe///u/ePWrX42Xv/zlOOyww1bcLuccfvGLX6woA0jwgx/8AMcffzx+85vf4Be/+MWK9wcAn/vc5/CCF7wAADvuf/vb3+LBD34wXv/61+Nud7vbNu3rP//zP/Ff//VfOPPMM1elbRdccAH+4i/+Avvvv/+K9/Uf//EfuOaaa1albfvvvz/e+c534jnPec4213W7973vjX/7t3+DtRZTU1N4wQtegGOOOQZf/epXceKJJ66oXZ/5zGfwkIc8BL/5zW9w1FFHrWhfAPDa174Wp5xyCo455pgVS5CuW7cOGzZswObNm/HoRz8ac3NzWLNmzYr2ed111+EpT3kKXvSiF2E4HOI5z3nOdu9rbm4OP//5z3HMMcds0+8OPPBAvOxlL8NLX/pSfOMb38C73/1uPOc5z8E555yDK664Yquvw3HHHYfvfve7eNe73oXzzz8fN93E66RnPOMZeMELXoDnPve5eN3rXrfN57UU3vKWt+Dkk0/GEUccseJ9ffnLX8Yb3vAG/PjHP8ZXvvKVUFNxw4YN+PM//3Ocd955mJqaWvFxtoQ9mvSTOmdtNCWloqNCvmubHOkeYt2S8AHL69GklAmw8wnAdg2yBlKHiERy+noh4lwDWJqRfMipS86gLW8q9a80gSW5ZLEuDhvJhEkjsNtZWErzgtw7LUJEqmwvGVOUkpfeGWR0MyPPbz8h++kdK2GXbQJ0OcIvdewvB3EQIYled16eyphQJyfUfEmdXp7EFMImNrsVYy8LZ/ncL4RVzlHMzlQczWy5Dgx0zdl/KoNSCsYJuUvB4SCL5VAHwzKhVUvGlPcaaCVZewiEiCN2+AEIJKD10jryWXCW7KwbQRx2Og9OuLSmmkwH7eyFkCnWiIbuRirNxH+7KCXr5biQFbzhYIqd4ABfr/Taac3OxCwHBlO+3UUcG+IAUxmUqeCqnGtDJqSfMwbko6Gd5ffOGn8Peom3hBzkjIVI/onHyVkbsgNTSVCAST/jX2M3TxKCqshjtp8nBCnPg9OmUScwyyNJmOUNCS44C2R83xEpFLpA5Z2mnDHjJuZaIfyCY7HlfTPti2plDufoH2UdMgVwvHoc14AnutuEX5r92xG1LtnNjhQ7pUn5+bSJRmBGjx57CIy3rZZz0CiiiQyM9LNmBqAnApyfm/0Hys+tbUd13F+3vbbat5N16bOuSW7J+zD3QGpKxXMk4hNQAEhRCLhK5xHOXGOyVBOBlONntj8IO+V9hrltyokH+LmVne86ZviJPGWbbHM2SodKEAs1+zpKKCeHoWafpJ8L4TdxnCTQKmREyx9dSEnCxCZzjufstgIHpTZYEkgl2yt0Zy2mQVcOCHYoqQwuK+BqJhfUYAq24MAeqqrwbJOacSLdKsSKyOQLISYymBNjU2xaUvyKrGGHBtUAIQNt7es4LgBuyJlVGdiG3gps7/3hnEPpDcMiUxjVloPEkv1OIMky21KWXPiJiQGDYS2RZvR54g/e1kmJIQBB2tPpnPskWV+EY6T3Drlw3bVSMCFjzoWx7BzbBcv5o53rGPdLbNdeTVi4qJ5BfORgl/srlnZfl01rKX5JrTmztmwf1TaOS4HYNlrxgl+Ck/RS9yUQx2VKUIfGJbaRf20ERsrY9lmbIYDT1okEvkVbNjfMX+na0U8k6fmQJ00p/N2jR4+iKFZNkvPhD384rr766pB5slI45/DVr34VGzduxCMf+cgV72/9+vWrVh9KsvNWgtNOOw0PeMADcOaZZ+KMM85YlXYBwNTU1Iqd/inue9/74l73uhfOPvtsPPe5z91ukuKQQw7By172MhxyyCGr0q7/+I//wP/+7/+uCul33HHH4ZJLLlmFVkXc4x73wItf/GK8733vg9Yab37zm7ebfJqamloV2UbBPvvss2r3wtTU1KrJof7e7/0efv3rX2/3GDv++OPD+8svvxxf+MIX8KAHPWhFbbLW4uUvfzkWFhaQ5zl+/OMfr4ioq+sa//iP/4hXvepVuOCCC/CKV7xiRVmza9euxde//nXc8Y53XLUxcvDBB+Pb3/427nvf+8IYs93nOz8/j0c96lG4+eabcfXVV2/XeRIRTjzxRJx44on4v//3/+Jf/uVftplQy/Mcf/EXf4GnPe1peP3rX48vfOEL+Od//md8+9vfxr777rvNbVoKzjl85zvfwde//nWcd955jfG4PVi3bh3ufe97Y2FhAddddx02bNiAM844A09/+tNxzDHH7LRsP3JdrNlujk2bNmHdunX4n99c35igJhy/SJ02zVp0/Fn3QrVN/IVodSTRhUkE+I5GSpS1HTNAQsiltU9ESsgvhk1CtHXJhIZj+IVmOwqf6hHL8dTjeCxPmDVq8ElUcuZld7IChrJGtllbdjJti0SksqwikshwT84tV3geiA6etnyna3/v9ye1+pLvAlKCMSFqxKFAppzYf+gLycjyGVzwNd+CnI1kZ20JQfYmBw2mmAScWeuz3KZDH4f+zocwUKitQ+Udk6XhSO3a108zjqWbKuNQWRucmbkizopSCoOMCd9MSd3GmPWX1nqczlVwKsg1XAm26LDy/e58lqNE3S8n2xl+2vo8KH91bB8k1tAkvzWB689UI45erkexLk0S2d8gvImj4duEdZCMNWUch2nmbMhA9aSxjB9rGnJXoR5gXcJVVUM6yY5GcNaiHpWcAWgsTOUz/0wkAm3F49JUNZyxsGXdyAyULEBVZCClkA0HIK2g/d+kFVSeBZJQ5RlnSAynoQZTnPE4PQsaDKFn94HTOWwxAzdYA5cNUBdrUFqHxcpibLjWX2kcauvHqh+nbbJvKSjVzPQrtEKheTwPM37NFWfWEACNZJ5pO7WS9xPOrcagac0XMmcohU2bNmHDHe+MjRs3rppRvRTk+XjpQx6EmR1Y/Hq+rvGIb3xrp5xTj50DGTu//t9oW4m91DYVTcezXJ7vaeBVSoxJ1kmhKcpbJtm7bRJxKlM7lDB3ADaVFrV1GGgK2XpCBqTZLvK8WaxZ8rA0LmTv5P4ZMcgIhSKeT8TGkOdBkpEes6Li+SqSeQlQ4zlQPYYabebnjSljDbw0eCghQqRmnMun4bIcdjDLmfC6iNlp6bn7/q5t8ppkI2mFME9qQrAFwuVIA6ikhmESnNVQdhBVB2dj4Fg9msiGc7kPjsmHDUKpkVUoWdhyLM3BNKVTwcaUsVUo+P7zzzy/rcjc5wpQ9Qhq4Xeg8Tyw+Wa40QLs/KYQyKL32QA3vQ529gCYqXWYryw2lxbGsj2lFV//NYXCQCsM7AhUjfn8JEtf2ikBIkn/iZw7OQsaz4OqBbiFOdDsHYJspRuuhZ3aNgfstpJ/v95c4f9+438AMOl32H4z2G+6wD32nca+UznWFAprC99OU4ZzMZis49e2BxuS2dUIVI9B9QhqtBlUj2E23sJk32iBbZvUVgbYfhgMoaZnoWbWgtasgxushcsHPN4lKEvscxlbQGiny6fCeqT2NqMoX6RBYUsRe1uyK7vW7IGwQxyXQFNJRWzp5eT1074EWsEVACrTtPUluE+ORYSQRZwpUW7xayxgyeAmIJkD28opqdJLajf5bRrrJE/sunIU7VMj0vVFXNf4zM22XTiuHUYmzrsuKU0xt3kTHnSvO/W2VY/dGjJ2+mu698Jai/F4vFMyNrYXN998M/bbb79d3Yw9Ep///OfxqEc9areXEezRjc985jMhC/eEE07ARRddhPvf//7bvb9LLrkkyFkefvjh+NSnPoV73/veq9LW3QkLCwt4zGMeg5/85Ce44oorcPe7331XNyngN7/5DQ466KA9RqL4wgsvxLHHHotHPOIRK84MTbG19sUen+nXrO00uY0s4KxzPlOJZVIAzvpYCltBx6wKtnVhnjrejIOvweejlp2XXmmL1NiaozpbDhpKs87EYaSTiNkgk+Mz4tJFYeLUDkokKpHx82SUUxmMnZSeaSxqk5pyxjm+Rj4rQCsFrRKnESUR5dIeM0m6dRF9od3Ocq0xZ9kRI3X7oCYXvvIb3ZSYAgBn8sbiN9QQke9Tyca6igtekWkEQhQzgFgHLkgQJQ5HpUCjBUAp2MV5doAMZ0A+g8wWM5x9ZmtkWQGtCxCpcH/UVmSFLNcw0ZKfFglT6xys5Ww+wzcKy4IlJLkmatQ+CkRYK9tye7HFe8FLRUr9mKajiXw9n/jZUlKe7c/azua2VKNk+4UMgXzox5EG5dOcqZC2Mb2f2hK0MjYl603zdXNAcBSnNSXJGt5/LsR+DSqHHAUvdVGsgRvz+HJS96ku2Tj1zjNT1nCapT+tJ/yctTBlBVvVsGWNelTy+6qGKQ2scYEYZJIvIfX8a1MalP9lwwFUnqFYO418egg1PQ21OA81NcPnNJyGcpbnWVtDZwWGuoBzCuKalmtnNSGMU6m3ZR1UEoDBwQLE5X6I5Tx5jLKzWhOTfSKhl8nYRYcTeUtIHV1yPeXSk8KEjJwlwHTmoPTosVvCIjwdOtUUAAmEis7nIBsOCvLIbbQ/4t9E4k/mcJH/3FkZ5MYCLlkzCOFHhkkbUU0QWBcJPwBeTYHnHkVoOsD9nCAS3EDT+R+OCR9g1Q5UaslRhnqwCVwabCIESAJ5XlNKxkIyhrzsJ1xIJ1o26HC5LGiwDebgn11q8ppzG22USU6/siY+B71MeshICrXefC043wY5d60KAOQzp5rHA4C23CrAzxgS+WldQg+meAxLdlmWA8MZuGwAlw9CMEplOSCltghqCpVxIFgUOct/k8oAlQTiGZ9B3sj097anzvyzvgbqmrP2TRlUI5w8a7YB23rrOAcslCZIURvrUBnrCaUoC7+lpfWS92w6rn1/wNYTAXJC+IX6yOl4DxLiLOPpWnaVBF+lJBTBE3/+uErBy9G7KBWJZjBnF+S7ibrYhE5iziUfaNCEkorUpHOyT9f6LZYKSuUt0iC32vL6amR43h3VJkxBIm/OgQ0q1H7XXNTY209qksxHN+EXAtxMGYI/JxRZZNuQ8ZfUxWzDGjirOAvQq2VI5m+jpIUnZ2UsStdsXRhYjx49euxYKKV2a8IPQE/4rQCPecxjdnUTemwnnHNB+nHdunV46lOfus0ylW186EMfAgCcfvrpuPjii1ct03p3QV3XqOsaj3/84/GjH/1otyP8AOCOd7zjrm7CNuGVr3zlLj3+nk36IdZLWAoW3cQfEMk/wJNnHRl+gjRDsKsdwPZlOW3pN10LGgvE7EPHElHs1MkmazS4SJJJjYWQtSeLN4AX0Vkes/Q8adHcj4S0euJLnCipo1sXnIXmC7KntSZEDqtdP0aIKXFIOV9Lxjo+VwMgk7ppnlgj2Ga72gTfcg587wAINWJQ+0418dwEacakON/ku4z72nnZU+csRzAbqcFmAtknr1bqlCQR4OLUcFke66JJVH+aGSjkYLEZNBjC+romXN+kZjLM1nBmCOgSxWANnJe9qgioCQCUzwB0yL0wpfGsrfULauUQnAnsEGEI4VcoCoRfg4BNo9i3EVsiv9PxYqGWzNoT14WMsaWyecO+kmjpsC/4e7+jQSwPrKB8HRvKhuz0C/tDcO7KfWqtCw4elnDL2EGsi+i0lPsU4PEBRAJQMgKtdwY6B2RlJADrmh0nxSjUwnFZzuMNYAlYcLaeZPIBtR/yBraqUc2PYKsa5aZFVIs16lGNerGGqQxckrZDmqA0QeWayT7/N2mubSfvi5kCqsgwWL8G+cwUitlpDNYvQK+ZhbOGI/YtS945a3jeyGoM8mmu6YcYde+gYJzh8eoz/iSTTxGFuqO5VoGYzhVCbSKusxXHbciMrsqQRTlxzwOdzvMYOFHGOTQhap3KvBM5D/MGqQxkK+xsKJ91ucP27/pox70dcld0XWkF5ojSORdAQ0qyUTc5mVOtE5nxCPKBPkLgF0upQmL7bK2lwHJ8vNdg4nhndVAzIG8D+FpzQvRIu0PNLCEL/ZwgdlE7007sydSpTfDEgo22liOKx07JotD4JCtc7LCwQxX2K9ciJTUceRJQ+Wx5xGuQ/k7q+TWy7RKkEqYUbDnniapsSeIPKovBLun5WHC/i90lBERKMIjKAsD1AElBD2JtOekF569dF+E30ZasgMsGwc4npUF5AZtPwxXTsNkQo9KitJ7480oJnE0OlH6w1xmQ5UNuJ+sj+meMJ7dk/8lzx8nf/hkPa4FyDBSYtElXGantUtYWxg/KsrY+sCoqdHT1XWOd1Lox5V5NyUJyriFfK0Qfq2FUDXWMQPLJb31WmFMZB+Il2fWhP1sqJE4lAY++H0NQoSf/tlRCjVp/NezRZQLLUqT33kSFgta+2vtL7VQJmkzlZWsv3T9fGlTWBmlWQBQ8FKZzBZsDteIgqkwBKmj+IgaGShvFtgea6yoh35NSD2HN0zVOXSvIMYU1gNaQ2o3KGl5P2RpwrSALf97ORTlTRRTkiXcmetuqR48ePXr02DNwxRVX4Dvf+Q6e/vSn4w1veMOK64HOz8/jS1/6Ei6++GKcddZZO02ecWfh//v//j/87Gc/wyc+8QlcddVVuOKKK3D44Yfv6mb1WCH2aNJvOcIvlSIUqOSmVGG7mMUkP9KAlEZbMsq9SyJzR4DQIiSI6+AY+OhiF+U4jY+KVqlkizhKTAUaz/FrOQeUYyajAI6uLIagUES9BOmk/liHI6lRXySVM0yyBUVCqdNX0Po7lbfRFD8TyGI3RH0rBdRNSUTIOU8cLJKTjfd+e3YMeaJFMv5Cw5IaE74mWcjeku1NDWQVO6L8vsnZ4LAgpeGUAZTi92nbPMFD7ej9hPgLNd7ga7vVJagccb20umLSzxq+hraGyysmfayBUxqDfIhBzpJWYx+1ParJS4ASpnLVuE6SsSCyVZIVlXkiZaApZkcBUb5yBdiae6hRC9JD+/t2MruPJsk31008t8coDzsHWO4Dx2+DI1sci6kcrpDa4TWJjAc440yBQh8KYUVEUNAAdOgEyiJBqMg7O8XJkrNTJJXzDY6XoScC/b3tyhFoOA/UFWwxBJUjqHIEYI7r+JU17MhLfo5K1KMS401jlHMV6lGNcr6EKS1MaeASxwqTe96hnHiymPDj7/KZHDrXmN5vDsWaAoP1azDcdx3ymU2YPnAEO5yBHi1AjeZB07NQtobLBsiHa6EHM3B5gaKyKI1DYVh6j51biQPQXyuRqhLZzkIThprY+V6NmDSt6yjH25IlFjTkiUlxpgY57xz32ybSxmSqQLw2HNMhKCLj8k3+dz167CkQ52p7HdNe2CifmQ/EDD+A76VOkgDiwI5ZLpLt19jXMg+E1ba3hNQoDdf9LLSQCDZmlBHXc9O6CIEFmZIgDsJUxhk0qlwIBIPLh4AuMA6ZYfGY/Bzgs9Gp3ZNmEIfjispBYvtIFpNkxci8I78Rws/FfMQue9URoFy0eNp1CjXFjMsuwk8kOEkB8FnakvVD8ry28Flvacad8nXWdHfQheVALCKRu3Yxs6iOUqfB7nQW0BmybAjrA/icD4jTYp/IHA2WxA/lFH1fOsn4IwWa5ueAUxns1DrYwQzmK4uFymK+stg4qrkOrXXINRN/mghGA1pZzOQZsmIKSmo8S4Cd/7tdM1auiVuc82SX9FUGs/6OHES3g1BZ4F9/fiv+3+8WUGR83HoJIsX5fkK7Rvcy6imKRBbd24w+YAamBOqaz7cldU9Kg7lcXx94MGT7Nst5ndKwv6t4bU0ZbSLeUbMxPkBNkwJC5is11nntFUQa8NmtIuHC79pUTZcjqG2Ttu3QrnnTAUExpfIEX22j/PnIWFTGYq40qIzFQmVC+0SyfypXmC0yTOcaZqhRKCYvRfmg8LVIg21vk8DQlDQVpZjE/iFffxpAUCxp1F7PMpCkUtdVIIOdNUz4hstjOZjEq3mQm/XPFT730jjMlybUeM6VQrU1ZRJ69OjRo0ePHrdLfO5zn8PXvvY1POxhD1uV/f3oRz/CZZddhqOOOmpV9rc7wTmH8847D//+7/+OtWvX4vLLL8fv/d7v7epm9VgF7NGk31Jo1x7j9xS+6yL82ksz7Z37IMIuCCTshESCG2Yjw+LQWO+YDnUjJOo8Ibm8LAuZskEKAPDklILSGiCfNQQ0M/0mGtMkAkPR9Y6IakqYy0lJMGrUTwNi3cR0sS1R5xNokX3UckoFOc40Sr7tCAiOKwVYwKFjH4H4y0JGW1i4hoxBlojirLyMpaGsgfOvlBXsdDC+np/v5zbhFxbNAKCkdk9SA1B+W5dwtc8o8lHRTEiUXtKo5Ch4f95aZRjojGv3aATnZUVRMkeukciUBRlEFesdNWo6kuqO5N8BEAcTkEokieMuSi3KuNdKwTjvMLAsZQbLUd3kKMjNBj4HDmoJl3J7VEs7pA6VcQjR1pJJ2SD9HN+7NnkvWRRyPumZEhAk7jKVMfnozw9Gg2wWnb22BowOzlIiFTJGFQA3HkUnmDXQeRYkOwFwDb+q9gSfhakMTGkC4VePuG/Tenqq42aUzD9SBFMaZMMMpAmm8vemUnDGQA8LFLMmzBVkDLTSIJFWcxYuG2CqWINcOShy0MTOrkzFcSq1P0W2cyrztcJMCbUwz31RLsR5r0OOE8m9zRnADlDsQHb+e0qyTbYJkmFNdvt+v1J4EnaHYUvpET32KrQd39bFICQZ3e0gJUVo1PdrQ4KnZLsdmd8QqcYmtCJoy051TTHgi3/UynxBDDaQmlyZIhTkgzJMzOh13kYQwk8yb5R//nBGvWsGrXSoDDCiLedI8Zwi52QxGcyU7suxlGazM/y5kArPGguE52N6ntTKyGwcRzIM/b4msgB9ths/qxDOh+VSWdKSpbF1YsO5JMiiioSDr6MLU4bsRqcytrtsDWdqQNVQlDUkYxvBYum5S9+k6hiplKqoX2QDrhlYmZDhl9aZNY6ALF5fqaecidS9swCVIJscKwSJmGYWVF1x5nsq7e7rG64US41/5xz+3+8WcP1ti0HaE/D2R+sHIVvUX/M0sMrvbPIebhSrs3Et4rManZd+DPKPYhf4dQllnN1HWQ5keSMDP0jq+/ehNnKHnU/ORVUFYlnLcNv5NnbJl6a2Zooo99lNdAJJfyf3iU1sUvi1UGqHtiF2rwUrpnRl9S1UBpV1mBszGT2ubVBYKbTCIFMwnnQzzmGQKVgNaMW2Jq8XYrBDIPzkXjMJ+R6+94Sfcw3izsGvYVr978CBkJSB1y/KB0Z6NRM3XuT+VArIp0A29/YsE95cpxCt+s4W9a5QTu9tqx49evTo0WO3h3MOr3/965Hn+art87jjjlu1fe1uuOSSS/CNb3wj/H3NNdfgPve5zy5sUY/Vwl5B+qVrynSxlRJ9wPJkXxfxJ3DiCEqwI2vNtBfnDXLDWV9Elpi8cTGzCGE757OJCIXIQrWdBhJBK9lomZflTIg+snUzo88fv4GlnBFCvvgzUArsUIInPhK5wyCNpShKXIVjJR3v0Ig2bSxAuzL9JOPOf+66HGPtc1FohPpK1k8gNUnBeHlJcVhqLw0FU/v6bN6xMLBQeQFkeai9FmSMfH0/V0UnYbumHykFynN2jJkhL5Stidcszzn62UdAh995ySxVgmvO6QyuHrPMoM6R6QyDYojaZ6ZVifQqMEmaC/lUaALZGmo8z31D5Gt1EJANu/t0FSEyPkJqk89+a8gsNjIiMr7XSSGXDK48C+dd+vta8lPINZ3alLzKMdN+cbKN7zuJxK496Vfb2GZ2oDlPprpArIb9hTkqkuDa8ue5z7jJVcFzQYbgPCRbw1l/D9TjKAGaVWFsuHLEzrJsHk5ruLpCBqD02XppjT9Tspynsy68AuxgEee98YN/wpnvyT1NwHBUQxcazjrUI6kPaFEtjGCqGvn0FAb7LKCYvQ1qagZ27jbOWJ1dj2x2H7hsADWcRZ4NMRzMwA14vFbJRKxl3rAlqC6h5jeDykVQtQC3+Xcsbzq/KdQ8DI5Umfcyf/9oDRrO8D1UDEC68DLHAyb/0jHlpfYo8059lzej4ZOxFyLdE1mxHj32RLQJv9QJnmbpSUAPtd53QRL+5DkqEpI7ypfatVsCsLZQMDlw82KNymfQ5Bk/68XmChnCVKLQBRNK2me9mxJqfmOQ/EVWhIyxygKLNWeo1Fb6BRhkMk9QUJbQieHRkPUMjU2Cj6RdEujRSiTm54NjIiwNbAgH8L/1dpqTtvgsOclaEpusYe+2FRNkblMKcApAMh/6bQiAIwsgKiXwHJuQh8nzLEjRm5Iz+0wFqpn0c1UZ7CCVF6GertRczAZrYMDBcSEbC0z8UetY8M9OmaOd8ln3SsFlQzido86nseiz/BYqi43jGpvLOkgr5orCc2k61ywt7SVqB/nA1+XzAXZCpgAsre1lLENdXiFPFD+nJ4LYsPQ6ZSnIb5b73YY1AxjrcOMmbqdWhKlcI/eEUXiUOVaH0KSYhPGZZ0JMtddfIvWrCVyL0Rpvs/E1teUo1LyGNd4G5utI2pN+iYw9Daf5ukignChtCCHVFdgTOsICjm1YJ8SffCf3VbLWa6wJRVXBB/KkaKwmQkaca5DuqRqKIgWXD6E1zw+iEJFK60403duXlYUfiwa3LlYYe8JvoWISem5UY1xbLJY1q05Yh6kiQ5EpzA4zrB1ypl9lHaZzDTeVw2QE4xRy5aA1MTkt91u5GAMJnQv1F/m0/GvoQi/FmiWkeWveIV2HbFfuV1/L0RpAZG0BqOE02++mhsqLcK+NatOQLjWOMN5SjY8ePXr06NGjx+0SRLSqhN+Owq233oo73OEOu7QN1lq8/OUvBwAcccQR+Pu//3uceuqpu7RNPVYPezTpR6mzvEHgRbJPNbZf2nG1FGnEv1OdJN+O4v2Wck6lbdLEZCQRwdlY70BaL1HkDsS1L5yFywfBmRTqzoTFqJfrUZq3BxoSOhOZdImD26mMP/cR3UQK0EyyaZVBKRWiYS3QqMEghFKotSXOtfRYgrS+hIvOmoD29vK3SGEBzUzELvJPiL/UqSXEn8/yE8LPef2ziehgkQT18kdqMAWXFeyw8s4Nuzgf/663XO8rXH+lIJKh8FJHlOWg4QyQyTXMgjOOLPia6KS/DMspZTpDpjJkSgWnTThM1z1iyuBgAdiZ43QexsuOgnEN6jepJRilvqgeNcYON1Bx9iYQaq1BF8g98Vl4yVOptZRKK4nDKhBLhMZ4k2xCUgpe5JT7zxKIHMhRIN/F+QLFDlUh9Qwi4SdZrY6cv9b8ewWWtZR6l7KdIsXzUpbFviDF1zZx8pJjwS0S52JW8XjRZahLQsq/auWJOgtrFLIpHTL3nHXIjIM1FjkoZP2lxJ9xLvSX8jX+ADB5aBzqxRrACNmwgDOWj1PW0MMRCi9X6xY2w25mIlCt3RfIC7hiDVyWQ+kiBjEk0eZULrJzeHEz7Ggedn4z7NxtsONFuNECO5ZS6aksj9kDU0z2kTF8L9UVaHpNnM/gCd6W1LHTBaD9lZegg9Z808AuIP0UUWdG5mruv8fej5Br1gqMksCHdDue0ziMgokj/77lzJY/eT6cVFPYmSNLAo1mvMx1ZR0y6wOmfN25VN4u15ytowjRMe4zikNwkZ+njA8AkX/keLIX+VSBIoRgIW6U4gdsmmXsCUAnkpgyFwGcnSxkAxDnR4Cd6XrSAd/VDyK1qtGRTb81c1h6DNe010gkPf12jmJ9RILywWwAoeaBJDLKUid5vAjr672xZLrPFFJ18jzI4EzJ9XJ933Bgz5bhWvaC0zmQFV5GkZ91lWUZReuAypMNbL1ZVEZhRDaQKrV1KEgBHZmSgTyxlkmPxXmf4ceKEJQXwNr9YKbWNUmTrTiPNpb7zS0jg41jfjYWmUKRsUKCVoRBxoSfSJcqolDjmTPTJOAQQTVBjsf2k2NbQPn7BUCa5WfHi3DjUQiAC3a6jhl+8Bl+IYApY8lcqefXINlSmW0XZW1hap/JloGcgjN1o85jUAPxNmIjS08IYSEW05p07TVjMkdIln9AUt8XInWrCyYAHZP86ZwpEHvUQuRqHUY1Z/cJ+bdQGcyNmeybG1Uoa4vF0gSJ1rK2KDIV1l3GAgNdw1qHoVasJOGDuKwDS9Kakm2hagGoa7jRPGwrA1XKFXDQaM73Yl401pMhYFL6x8+NYpc6a4CqjO8BvgfSrEzEbPBGn9goB72z0dtWPXr06NGjR4/Vwste9jK8613v2qVt+OQnP4nrrrsO73znO/GsZz0LWbZH00Q9WtgrrmZ0TkySfV21GBpEBhBJpC7SSIg/oOlk2YUQJxUU1y0zcEEeS3w+xofRVxYcmQ7AuhlQNoQzZWuHrchtoNkf7YVsq5+I6riotX5f1vgLwVlw2i+yHSaJ2ECqdEnJSFva0lHJNVvW4d7ej6zZA2GpIxGYkJjhbyEZJDodCeGHptxRoy9l4UuKHXJZsx91MWRnj5AxxkSZm8SxQBlHp1BeAIncUXSKZKGdE069ZNEML7MFU3svTA3yJG8m59shzRog0fctB4cbzKy4nt9ScMlrmuWnCN4ZWAaHIFXjmPHZdS/LNdFFICpdPg2lNLLMEzoq1kICkrkhqXECgB0SPoNQ50MozRmFnjpE7tgd7oBQx8k6B5swZMrXTCQgkVN1yDwJ7gAfde48kQjEunquEUmvSUGrAjrncUqVPw/liV9SgfSjumQnjdacSao1SCuoPIM2FtlUfCQorWByA1MoOMPtd574k7YIEQgAuXdWKa2gchXq/lnTlAkd3zYHlWcwoxLV/CJUniG/bQ7ZsEA28zvkM1Og4QzU7C2gLI9R/p6sC+PDn5NdnOesvs23sXNqcR7l5gXYskY1z5kLzlo+xyKDHhbIhgVUnkHVngT1tTGpKplYLYZBEs8l8wPX9WyNE2djtmVau6lrHPbosQdAnm8xyKBpNwHNjBiRU5RsFdnWpNu0DCfrgwTkWLuyEDoBmM0VxsZhrrKorYPW7NQOWdU+c45UDQ1w5tJ4HlQuQJXzDXLOIZIhxiL808rBOYINROjS5ywBWo0sY//cSSU4U/LDSdBLI4ONs5Wlrt5S5J9co3ZGOzdmy/OYIwWC9TVQk3ZNdDYfv/KZYkx08PUvvBwmyT+fcYRyDLuwOQZwhOCNvBEA4kwJqrlustYc1LOUb17a225bqFGcFXDZEFVlA2lbGefVNVzIPtIOobafUhRqrVXWAXnW2dehDXUJV5Wwo/mQQSVBXXb2ANipdVvs9+2B3Io3zFX40U1zMM6hyBSmigwDT/5N55ozF7W3VXw/8jXjnrNe2cC6Zh1AVu5AyGRt3PohYIsVL1BXQfEiBOVInXFfy0/NrIXTLLUKqTcutq3sU+xr/7cEJZBz4fkMIGaduhjUCaXgfOBUkMKVwEhfv3dizRHOZZJ4bEvGspxtwXKxmm15AqCzIaA4iEpqRsuIDMob/m/ub4Tsvs1ljYXK4LaFKpB9m0ec4bdYmmAzC5lb1hbGOiwUBlpxcIMQu1qpUGuV62fWoHoEtzDHpOziPNtbAn99kBcs16mGnOEnkrYSPCrrIIGUBRC7VCRtK7ZLASbEnTGhlnWj9EPHM0JUbnr06NGjR48ePfY0OOfwgQ98AGeccQYe/vCH77J2GGPwi1/8AmvWrNllbeix47BHk36pFFSoDYemk0q2A1oODIe4iAOibEsXWZA6eXcgwbGtLi85r6GP4qzQlNwT6c/SEpQqQINm29OlUiASnQ0yVUFiyVnOqGovatN9kQokUiDJVAZQFbKQtCdXMqmXlRw7HKct1dheQKdIM//S/aUkn983AF5Eho38QtyqZJGaOMQSOZ40clVqmExcB2sb4yl12E04PAAgGwbHVloXI9RzSR1APqPP6TwQVyAFKxG0QkqI88ETD2TKCQIstCnJXHJdztZlHFVbm9m3PWO6DYk+TqXfmOQbsUNQarZVC0BVshOtHHEEvUippiRqkiGpiiGgFGdipte569zb48+PGZdz5LkqppDnQ1Qa0DU7jUvjpVMtOwKFvLRSj9P6+pUOcDTZWyKXZR2nQ7QU3CDkn9TeKZSG1hkyXYCqEWD8/V5noCE72agqWU4TQDaz2NibHpXQOctv1os1TMWSnynZZyvuS1Nyf7As6KTThTSFGn8654xBa1jq0xoHpUuMN42gfG0UnWdQRQaVZ0zIFRny6Smogv8mn5FIIkmaZArWoxK2qlGPxrBl7esTetlTIPwuGxbQwwL5zJTPMCyQA6DBECqpq+lyLy+WFyG7Bj7zwxbT/z97bx5t21WViX9rrrX32eec27yXPiEiEgkQDcIoCSCNIj0oUgoUFlqkSi3SUBYIihT8EFskKgpKM0TBZiCoAUEp0MSUoyQSC6VREWyqLKECCUle3rvNOWefvVfz+2Ouufba597X35eXF84c445z7mn22c3ae881v/l9X2R8LHhgGoWC+BojYHQQeTrx8byHQ+luf52S5ftTt+xlnN5IEsoLkptyhQroQKL0fBeZT00APD+XfEUaZwQUlLxNfE5PZxhSWC11Whe+vhuodp7ukyILqVwb2e9tJyt4mPspRRDERAnNghSDKQqscJA8srL7jCLBJdL/SXFA/pciOtAx++S6F9cpROAP3iHoogM5drnXLTbH8QJ2aYZbbMBCzKGjxGcgQGUrn8DKmBfaCBKx75g0voTYwBLZjbZhqebtjSTVnKIaY9ELGcLgilKBAVF6fZF5mO/LmAMGcG4kUqGBdPJk7NiCfMwKIhTk40+GrsHOecwV0DiN1jPDH2rRIS7mhlr3mE1Jfjrlcqf2fjGzAXfPLFwIuGBlgDsmDQMqK+Dt0wr7hwXWBwaVViijn7PO5lYhREJmPJaN6xrhCgpQXqGM3tEEwGiTPC9VCPAiaWrFB5v3VWL2DcegagyUA/hyhUG/cpRyL+VdAogZ3MtycMn5ha0v9yqyCLpkAFyFjoW2GAL4ZdKy8F1und5PAGb0QpdtypclzUplBYQqHmcTve9YXtcrlZq8Otnk7joK2d8+MNs0+vbNGoftusWscZg2DrPGobE+AXwAM/00KTSxA62xGqOSt3ul1Jhbj8pk8xRv4zbP4SabDPpNtzq1hKLI2H3E9gVlxezYYsigLBlAl3zeZ6BfiHMdaINgGxAZtpboecxzMxofNwb/Sk0YaGYGjwqN1gW0ykdriHv+prHMrZaxjGUsYxnLWMZexF133YWmafCa17wGN99882lrgn3BC15wWn53GfdMnNGgn8ThAL9dO5UXHneAfbsWOKgrlMQOxb3uLTzS6b0beCKvpSINxZ7lzGuLJXiYCZheC0jG8XmnvlYqFqSIvU8WGH8C+KkMjMs7WVXqNu8YcdxRSz2WJBd+Ytd52iVMvFEAAQAASURBVJiMRZj/ZppUZ+twtNiFDbiD/RWZg8kzJ3ggqK47eBH0WWB/KaWSLJmOY6z3G9l+2bF6eZFBtkc8RnTRYxUAXbHM65In0aQREjPN9LeJZJ+FbB92Hji8SizHQ7pNy1a7rGdiParOF1EA0XQUjsQMxIkDfrJ8H/rskOTjJ8BmZFYp1zDgJz6JswmzKFMXecPFmGy9FekI6mQyUgIIat2xKvPjlb9WDnj8eIdQxI5kAKWp4LTq+QK6+NyFhVZudJ852v7w2fkLsNyTyEBFEV/eR1zphCmqxOgEAGVLLqQJo805FOMhf198VErD7L7GwlQMngmox8BfBAB9SN5/wvJbBP6U7lh/eTDb0cEvKNpKAUUXIjNKPSBwcVniQRi8h6ubyCbk1xaXm5Ylv1EWcLFoE5yH8p4l4yLLIBVh00K4kB+0SZ5fAvoxsKGgFTN4Sm34ripNDIqw44AvYxn38liUTe/lUfGefjTg70hJkkiRi4SxyLSdZsyvUx2QiMV5UNZykTf4ZHLXAHp5DkvSMUsqRFafAH4m/mlSXV7ldrZ19JapO68siUAmAjC+u95QXMcka84evynnid56UIfPU7of2AnuLf6/Q4Zd8oTDvQ4klpz44Ko4gNKQkXt8vK/7eE/nxejDg2IpB1MMBu3WMJath0rAXwSOfPd+rvIh8oIMiHloxWNWK5VYgLxNvD2xT+fo+3chlClTjrbXkedVrQ+oHTNtR4XGwDDTqzIMrhS6Y/oZreI9rg/MJ9UFdGBfkvL1KrFanQc8YXdAfFGZI+ZjqigT4BdMxdYEumSmn4DHor4QzwF4l/LVrvHPASpKn3s5l326jvVikQHruzwzb0ZMjXqSWy8qduRjkzTUgIE+ZQyUa3gOkLMG47Ui90EF+kzpxfAJMN/5J5E/Bxj8mzYuAYCtNKWFABFnpdj4CW9T01zKoWUOobvxqZIffAT5TGyUEnlcaS6QCNmcRREDm4H3ze4bytdGTQMUpGCj7Cwp7oIjUmiWUpjLWMYylrGMZSzjDI0vfvGLAICPfexj+MhHPrL00VvGKYkzGvTjyXgH+GlSPcZaDubtMGDfAf5l7LDdWGU5QwpHL07tBcsp/fRhXsundFoBWiu0SqUOaiACfPGDUmDpCtYd4FcKaUwqXmLeLp2ukTWmXDexXQSSEP3FxD+NgazoJ6goeQgqZxLg1GON7QLOKdsisdbkM2kndMBUemk3cDdnugmYhm5chNghnwpjskzxAPGWu8GDhyYDTcRHQCb/0XcGkeWzCFDm25ozCQ8nx7lDGjBjGiZ2mXxXxm9bR4/AGgpRiqht4G2DUE93st7yzmqROI2yj4sFJ9kSZQrAFKDhSirA9LxTjhBHAq53i8aFBJXk3eWp0CqMKsesKh9lHUP0dBNmQDupo4xkDd+2sHXDYFEEjCSEDSZ/phr05CCpMCw7WXZyUzAFaHUfaLQGDEbwzRShqDAqxwimhDMGTZT6qiMjoHHM/AsLe03H65iERwAFlu+VPZx7SUaojaVyHRfgGs9FvMYplFrBUIWyIijLhTJFBlQOuJN+NmGG28oE5X4uqjJbroFvLFxre/spOBeZfvx6BwjK+33AjX0B+4ChMAfbmUVwAa51sDObQMScUcjL6O+j/D3vw46CmFaAKTSoIJQrBXShMVgboFwpUIyZPWiAzN+v69bOz4H0p4QFbBCKCo5KzK3PGJzxd4n3u9N8HArxj8xlwk6DBBVp1V3PT8XyT5ehzjJOeUg+JU0ticmWFculcScH/oAIBkQJZC8eplkxG4iMFqgkU7kXsZc5Vy/IIAxWOpUDlzU+UQThxEOOTJK1U2BmX6UJjhgMKSO7r9TUMYPbugO5Uk6jusYeUZqIEt69Rhjqg1YAIuPPdvlvbGzIm9ZALh1L7tMyPVAoEtCzZq5dmuWAXUGbsJsSQJbzpMYJx9dw63mM6ABUOsAQ4n29hp9usmxzU8PPZ3ydLriBJeUwC+oMKbxnwDNrXErHSFG6f/hAKMsRxCM4bWMc80XMh5zhZRtiyfU6MqfE48+FwD5jvmtY2jEm0z1hARgqK367rJJPs99jMEOBQaSNuYMLwErZefWNCg2tFAaa5R4rQzh7WGBgFIaGMDSEUnkoN0/HvCwqaE1onAL3M4VkMeAQAK/QqnhsPVjqNIJBQXdefYv7QY1XQdUYYbSOYAYI5QihYF/soMu0X7UuO6DbznkbmzimM/CN8/noq5ifT8EDKgOm8pxaGHwZc195C4j/oG3hZhOWF4/SrKFte4BfaiqrxpwzEkFV8brpGghTN8RGSx99/bipkJfhVY4e90MYbjo2D8ifvJb7+AFIoGBjPebWp/Gajw+FeE3ylsHMxXke0MuBabwGXwwRqtU0JxDgT65VfXUUDUUKuqqgsQrV1gjNiM93U/Gjd6k5UrkGaGYYDku4QsW5l0HjAmbawXugOQ308GVutYxlLGMZy1jGMvYibr311vT8Na95DZ7+9Kcnn/PjCe89tre3sba2tpert4z7SJzRoJ+EMPwS8y3volwsThwJ7AP6BY48ZHK5W4foLnFPpOy7/UYnxcXv+hB2AITcWR+ipFb0BYvSU0ahk06UR5n8ip9KNhHcuVK06/MeOCR+cogAzm77Mwdtd2PRyfIXCjqHZWzK21JMW1zX4GOXd+iDlVIkC4FbcRcL+VlhIBXusvULrP3XGdtnfhdhsVCVFZvCgq9hksmRIqMw7oRJGSVTmb2ooXyUyLGIcldtktDpMd+EuRY7khPbbbdO8+EYVFbwo/2Zd8exXUIOB1wfLjyQCh8B7CET1C7f8ZY9QJoaoZ4g1FP46SbCvMb80DbaSY12MkO7NYWtGzSbs+Qv55oOmOpLQSoUQwNTGZihQbk6gqlKDPavohhXMKMK5b4tqGrE58K8ZvAPQHAtM1hcC1UMoE0FSwoKHi4wI4w9cFRiBOQRoiSvUipKfuYsv9DbL0BmUYmA4IBASAy0QitoU0KXhos3ilhOSRF0LC6G0QyhqUH1FME5mKbuzu84RnzTJhAvB/1cI8/j5xYAwOA8g6yRjdfOLFzjQI2Hcy4Bf67xaCctbOswcwFNbFpoYyG1iQCfCx3Ql/u46Mi4HWpCNXcYaoX9PqTj553pji0RqCiY+VeYVGDlx/g8/gU5V7PzLWdcSiErxIEpr/mgQFmRe1cm7TKWcS8OyacS8zyXxo5NPIuMdsm/5BwRsC8g7JCqowzQEADwpNf5KO+fNCjYAwXy5h3Vyb7n93PXoNAlSq3SPiliYd4ofl8Jg2hHbtNJdwvg50BJ4k9YVmmboqxmWicyEfhDt2znk+pCCFHVgDSzbyJjStZTgL/+Djw64Jf2Qb7PsueJ4Reya7pntnSnHds19iCyqPx8xs9Ntr2x0UzyKsmzksR8Lsvfk02VfU2picYGxVKgOgN1I2hYRhZSqZnJqhRLDJJSaH1IY54UUGRAgDTpKB2B4Ey2PknFkmZ/wrKCGgyhzrk43W9gTo2VAEVwKQT2EdYRmNYKaEklht8OwK+Z7vB11kWFgeGx2vro5YgABZXYkTKOXAB03BdBlwyG2ZaPpY333cGQm6jKCmEwjiy/IUKU1RYJ0RDEN5DYkzmuD3RUXYjykOIBqbTumKfeg005s6bBRcBYIr/+edvl0PMZe0zathubItEKcF4d/YJTE51tWcY/n8t4DygLpUtoBbjIlIaK8p4q5v6I4CkxSFtolmAdlhpzaxK4Z0hhHuU8F5l/AgSW4uOnFChKtvLxUv1rUDY3wKCCeE2q0SrPFVbW4YsRQjlEKEYM5hYVQgTUXWzcc35RWYbnpAUpmGIEQwS0EUwmA9i62++2BVQN1RQYlisAPBotDXIajfMwe0+KXcYylrGMZSxjGcu4R+LLX/4yvuM7vgN/+Id/iJ/8yZ/EgQMHcO655x73cogI11xzDd7ylrdgfX39FKzpMs7kOONBv8Vm2PTvbmDfLoyvHSBR9rled3Be5LoXhwIX7OKUN3WNSse9itgVxUK1gH0lKZQEnti3M6i2BrWzvowNsDvYFxliIWOv7eZrk4A/2b8q86w5msSU+N5l3lj8m/1jGha8s/gzokeUTewX2YFSyIzrBem6J8tFCZWVJQWQE++8WBDoFhj3gWFGXgLIdJG6YC26AljwiMVQisxVjaKsmLWKPuNPOtRzpqZC7HoGAF9GIJK7ZRXxSBCvO7dxAKFl5puEjhKKasAdvMqUkfFH6fgCAMUChlu78JiA794hxPEVXGXM8vGIknGB5WcXwVqR//KTLYR6gnZjE3ZSY35oC83mFM3WFNO7JrC1xfSuKdpJi3ZmMZu0aHzAtvVcnIi/p5XCWCsGkYYG4/NGKMclxuePMVirMNi3guF5UxTjCpVtmfVnW5D3oOGY10kXgBsBpkFBBqYcwQb2umkj+6/1SEVcAfNSg2/oS/ICnXyWPJegiOorMJCoFYOkLgAKHgUpDIbrUE0Br1k+TJUrLJ/lLdDMd5eoitdO9tzxCQjcTdKKmXx2h+SmnczYI3BSg4o6+u9F1mCU5LS1xVZtMXMem9Zj2/p0XBrP4N+xxFgrrBcaK4agVYtx41COC5jKsIynJpYLjX+mKlPXevobMJsT5SAVkoVx43133h0u0tWICMGrro59WnxnVJJZPSXLX3aj32cjNQ95v1N6UhFjX7EBahHsW5SlS+8DO8C9nm/pKW6VOtmlJwCJDDPn4mtKGniAHtCk4j4bmjL9OitOtAlY6vkXA1lTmU5NA8Kasa67Tyhww4iKkuzSXBBId8xMRdwR52LzUPBZXhM9s0SO3ZQ9v0aAj026fOySEx9xHy3cp2WZiQnm+X5rI9tPBemrkv0UAGsR5jXCvE6gikLRqRKYIuZVJYIpUp6V2FzSkLSQ20sTnKIyrZMMRKOYqahsE/MozgcHovChCFoxy7vQLIlZWwb/tOr88ABm6LvAvoLc9GX7eSQQgUsGvPTqfjTnfu1x51ZHPB7oj3vxloTnxj8VQbmBIRRBAQUDmsOCMC4i4Bca0HwG2Do1t/EG8tygGq7zMAsKbY4ZoZufJSYwGc6LnYUajEDeIZQVy2sbvh+H4Rr75w5WEYoKlko0tvN/lLyw0Ao6gAGz0nS5eAiseJH5JgYXWaGL45cyoDy+lO+vJBUqeVJTI8wmrCyxdYgB6emUc54mSqlrkSZvEUwBkjw85k5KF9xkljWmBm9BcbwmQDgyJqV5gqKMe6HZU3KgCc4Ao5K3ib38HIY5m89noDqAUakT8Mf+lNFblLK5tFzXZWwCaYyCNGh1H1Q1QhiswZdDBmUHKwiKWMkik7sNcY4jz6X1k8Dyr4YUhqZCOaxAwSfALynLWJ6nKMVe8OOigvWAdhE8BaE4gW74k41lbrWMZSxjGf2Yz+eYzWbYt2/f6V6VZSzjjIrv+Z7vwdOe9jR88IMfxAUXXHBCgJ+Ecw5PfepT8Sd/8ifLc3EZvTijQb/djC5dAAMDuux1pC92J6duS6V3l15bnByaCr4cAsfIbNrL2K0cdrgSWddZq9JEm4txDAhAcweodNqbKOFlXA01nUHZGqre4knXfJYkXnjhmaeZdFlLQVzYMHoXuZzd4nCSTPlrRICn9BtcZMw6qfPF7XIMldY7lrnDrw8d6JekgmTcCItQcMQou6nsnIGPpt6xfwQ8QFl1LD8zAEyJYCo0nkGexsUJ+a6T4cheIqAkBaVMz0slbX/OfFIErSsG/3QBVbTM1tAl+961ncynrRs0W1NehvMwFUsfFs6DvAdKz95mpKEKlrJS4nV3gnEyU1gBw4TZmMDI4LtCTD2Bn26i3dhEfWAT7WSG+sAmmq0pmu0WswNTtDOL6V0zzCYttq3HXY3DzHnc3TDDLD/jCwWsGML6tsZFWw3WC8LaoRrDs4cYnVPDtxaDfSss+5n5BYamhl4HA7zeIRQNnwveMvhXVGiIWNLMepbvDLyRSbrzCKCSDPOeFGj6vHRUc1kzEDB3gPVccC+KEUw5giqHXHRzbSdZ5zs5OGb4ZE0RC93usNzxDu94/M9nDArayCCN7MdgW/jJJmzdwE5qNFtTtJMZTLWJdjKHjprCpBXKmUXjGaicOQb86iPtiF1i4gJK8ijjiUKamKm5UqJcKVGMK5SrY5SrI5RrY6hqDBqv8jlbjRPojXKQ/DPTPnANSl0iGAXjAetV8kZlUKM7b1PE65YUe5exjDMlGPDrvOZyBntQzHRTQCeDragn85kaNVTH8NvFzhRA13R0GlTajjsSU04bvn6SMOwXtsxbqNZDWUKgrsEm7cdFxQhpKqMupwqaZUNdBMla34lCqwDQbjtMUXwzrpc0I+XXecltvEOIwBmcjSDZwnVqUTFDXlYU0aNdfl++t/C63N8YBAi9e5mAUQUBal4DLuZXIuFpChBVUMMxaLQKvX42MBjCl2OWOzdVlI003T7M1yM2jUlDFLyFLpn5njfVIN6reR9HoLStAWcxKCpoIpSax3LjAgoqOz+/jG1Fio9Z4zygCYNy3Fsf5ZoIXALiY+eL4Z7fJxZHiMiOCnDGHpPA6sCkz48KwtAo7K80qJmCZgeh2jkDMNJ8pxTQzhjAA1AVA6iyQuM8Gtr9vugD56kYjBkEKyqoYsj5aRz73hQI5QpCMUCrKzQuoJ47WN+BRxLWs7S2DwqlJpSDFVZZIMMAUtzHi4zkPHKfaln2blmuCgE+5jq+nsBPNnu5jTQ8KU0gTfBVCeM9qBBQs0gKCj32tHe9uZyW9VQEB2bDUQAsBeigYIiPD2BQO49CexRaYWXAcp1NBPpywE/GpTD9Boawb1hgtTRYLU2UGmbGH3sgRqZwOQCt7osMPAINhgzGlmP4okIYriOYCnMXGJT1vrOO2EWSPuW6/E4aFyEAViuMBmNWSLBN5+fuGh5HADwZkPcYFSNo8oi85R67dhnLWMYy7otx++2344ILLjjdq3HY+MhHPoKf+7mfw0c+8pHTvSrLWMYZF4PBAPe73/1w9tln49Of/jT+zb/5Nye8rCuuuALvfe978ZSnPAU33HAD9u/fv4druowzOc5o0G8xZPLkYvc4RFotyjQCAJQUMCgVuZkXszuTL00KddFJON2LI5/+6AiU6CQ/pOLrXYEtyUzFSb1q51yQaLOiS5z07ZBV0mUs/pR9GcvDAXoL0pw9qU2iToqqB9Kp2MFu+j53ecgxDKEHTC0y/FLnd87azAsCoeve323dBfTwswkDHJOtTgYxFm74sei2kQwDfrqEDdHLxoeuI9YjyTZGsSHowECsliJEpErkEIgPfQlMhACnGPAuyxGCa0DRN4SqEdR0i7t18/2z6MnmIxPAOwBxG2JHvTLlju/vRRwO1pHu5l1DpDhDYGCtbRIjgP37ZlyIqRuW8IwSkr7lP5GMnDmPmQuYLJrDAWgDcLBlUK4kBszLjTmoIBRDA1s30HUDVzcwVRPZbw3QFvDzGWiADLyOY1oHgAiFqQCwzFuI8mDWM2Tn/OH3CdCd33nBNH/O53mAC/yPU7Gj38WuawWUpoIyQJCivu8zcJE9T4VPYdsKCBBlvsjb5PO5CAQG24KGY9BsgmI8AUV/RACgYgrv2MsPAAabDRofUMYmhJOp45QCIhQEXWjogqCrEqYqYYYM/vXYfaZkwM+wxOeOa1bwUJaZzqUuoUklT7+cHUBKZU0X+UE7PYBf7ll4Spbvl0DmfTUUgCQ/t6iSAGLBTs+ARs7cl+8ywLc78AcssNSjPO/xxKnmBe62/PT/Qm6Ss4S6BWSKEbnid6YosCPHkMYppRi8iqx+kcEUYCIE3u07MiEiwPnoX9Y1OcmxTE0bKgNsnYrSnrEBI2NpCoN8Mefa4UUs27vba7KdMbxsAzJmKGQMKG6q8g7KNTH3dDHnKKGKAlSNoYZjYDCM0o8V559RWnAHgLOY68n2588XGInpmCrPyhgh8L3SNTC6hCKC0yqus+RxAW1k5+R+vQJMBVPyPVM3QASLlXaA1p3M6h43FMpa5EBZ7C9K75HqQFdSiMwrhWFBoHoTqp2B6i32TRYZS4AZX1HVQLUDIHiUwxJkCKRCT8ZXPNfTOukya+AyCALwiBrGYAUWhEnL/rmSK+cs+3QdgTTaeJAhFKYEQuD5GrLrmPzeSd6Lg20537fsD+3qJqoXRKDYEYJmGXEX5cR5RdkXecc1K3ggKM6pgF5TolaEQAo+ejWznGlAQYRKI/kwAgwO+gC0xic2pDQW5lEaZsYJm3NgKDZ+qnS+B8X+z9AllFguKoIXYH24rwNlbUAdFRlEtUKOS9pEAf4W8ltu0GX1CwAYDUoEapg5bSk1nwXxl4/zmaIcwQVmKVoV+o1W91Asc6tlLGMZ91S84x3vQFmWeNGLXnS6V2VH/Ou//ite+tKX4oMf/CB+6Zd+CYPB4HSv0jKWcUaGUgoPf/jD8alPfeqklnPFFVcAAP76r/8aT37yk3HjjTfirLPO2otVXMYZHmc06Jfn+jnraefnCCyd2E1wu6JW7IQWH49MtjFI9+lw/bgni8fDzjta7PadY1pO8Az8gCfdPf81z+Cnauc8oWqmbFbvGoTZdvKAS79nCgZ9qlEsspQIxYCXqcsOxJPfyGWuFqVW8/fziTjFQpmLRai47BAZdjuACWEmRWAiLBQne1Kei8dPikGKMkBYwEHdk4dSsk7yXJhMTc3gX9tCDaruuJCO+6eKxvbM8Guch40d+1a6cEMnK5kKGnGSrxT7pHA3dpTgkUOLrgDoF8Z+QQqGDMYr54JszV26ipi9Z1uoasLdyBH0S3JElYAgBdRwzI8ld9cLOBJMP6Hbi+LrYiEHYOBmsWgl7BNmUdRQtoafbiFMt+CnW8zq25zCTlhGUrqvAcBUBkorVM6DtgnllL33cinJxSJqPmIWry3s/Ucsi6oj81WYYbHAA4rrCl5vLrJqKO2hIzNmcd+x319Am/0gkQD1DBQq9K99u9U8fEAcP1xoD7FbXAGY2ViIUwSgBCJYKL50vNBuuak4nxWHjByLCIjlUnUqelySd1DtFGgb+OkmzNYhhHqClYN3ot3axuyOQ5jdeRDN5hTD/ZtYOTjD6m0TjCcKm9bjizO763E5XJw/0LigMthfaOy/aAXVvgprX7WK4dlrGOxbweiCs1GMK/ZfHEam33DM17U4vhN7GYCycy462xaBpnxN0CW0IhRapOV2YZb4DCxJB3HZjb6MMyiE3SUMqcN9xlk+B2LoDHSheKFSseCbS40zc50BhhOJkz2bjnbfOuryU66ATk5T9tmC/PdiTqLIsBxl7vGrqOfzG6K/ns1yhNZ1DCqdr2HOxIvXoaDQY+ep2Jzh57P4sW4J3PwhSgpdq41sV/K4E5b94Rq6FhupFvZXQPT3CsyCk/FhSKGK/nE0Owg130aYbaf8U1UjqCgrqFf3M9Mo+r35wRjid2h9SAwjoGtuE/AgeS/nx9B4lNEvrhu3BJgSysZxEgKDD7aBsg0MgJWiYuR1YBIwO3cerQuY2YAQ+E886HRhUA7GXU4R1QGEKQtFzLrc46hd6CtKZOBxvp+GhvPLoSGsFwDNDkFvfBFhts2S8E3dz99NgVDHe6giBDMHgkdhBuyxl0suxrEiKhe85QXIFNBlN45dbIib1QHWW8xszJcdN2rF3sOUCw2MgvYKIXgYUnDBo9IDlopUin2VLbM0U/OSjGFZNZk3xHyMD0q36gKaJ8nz6OHnZ5OUZ7aTWWqco8KANEUf5f58R2ndb1hzFiDbKYx4h+QbrphNzCCzgiM+8z0UQtFJyLbeo7YarYBt0Q8Z8RjzfosAbJa/rZYGo0JjZaAxLBRKrZKcM3QJX/I1AhinvEe8FadOoXEBk9rCxgZGyVvT5sZcNT+2Xli+Pu0SVEajdtzyOHcBg6JCsHOel8a5VmpYaOf8vWaKYTmC8wrOE8wSH1vGMpZxH43f/d3fxYtf/GL81V/91elelV7UdY3rrrsOr3/961HXNc4991z8wA/8wOlerWUs44yORzziEbjllltOehlaazjn8MAHPhD/8i//sgT9TnFsbGzg4x//OIqi6P0ZY3DJJZdgNBqd7lUEcIaDfhKLJYYdNaroZyeeIVI4IeloDJ7lmqRgw270UarS9IsbxxgnDNTtdWRePHknaSrkpGJV3GlSsImSSkAsEJUVVFHCm9hZbQqW9onduT1JT/5SWl4C6/LduPC51GGNCE7KuvkiAXq94mMmOxik6CUFd4ndvB5yj8co7Zpk/EzGXJRCHMDLd21anqq4eOO9A2wBVbrEhKNqzNKeMnbIxIKLFFu6Cbl0vQt4l4NcLv7nYqe4jRP2RYBHJvi5x1+IRShNAaWuYAargPdczPIeNJuAhuPkzSb7SpGGKrLtMAVovMaAbzlIHh75+XCiY3rxFCUpUsbxl/iEKkM6HRftVDtjgNrOo4xkZNplvm3aR9nSKPFoKgPXepTjEs2kgZ1ZFAdmqGcWK4Ywc52XHBALYVphxRDOKTVGA4OV88cYnTPE6JwRqrPXUK6NWSZytMqMtgiO0nCMEIFfKepKgVc84tLplu0Dkd0STxR+LaAAwSOg0AA8EvB31H0cL3QMAAaIYaBSYcfnFuWY8uPC3+mKqCI9q0AgpaHjdUJHll4CC5zlY+UaqPFZMOvbwHwGWtkHPd1Cuf8AqrPX0GxNYcZfxvzQNgZrA1R3TFAfnGOsG0ycx51zh43WHdbb75ySffzOH2icUxmMzhli7eJVDNYGGF9wFgb7VjHYv4Jy3xofn/FaJ8MbWawqXgNSZzn6YzuxP3L5PSncm6rf8AD0rqm7SQrfE8E49Km769CxorHLOPNi8V56hEgNOfI/doJ/OetPKxX9vlT6/JkSPbAw5SvoZDSzxjHxpUr+ekCS3YTXgI4NRqbsAEDE+7/vGtn8ESSfExsvj6yhCUB3HXKZT6v3UOVgB0An28bLzfKq3Zh8i/tDtn9xXYSxmAGYPvMqK7XCQBNMsFAte0qHpk7+zJJ7JsCvWkUoxwhRQcEFROb1wo4gRNWE/nqLxGpwDatIRD81Af7SehMBIbJaZT9KvuzatG2KNIw20KZCQwEuMFglq+MjAMhKGLpTDAk+5ZnCWNyLyPPIPGXOAb+wcI/XiqUjxwWB6g3QbAN+4y6EyRbcxoFOmlJyRJGGNUVikCYftgXfb5Fd1dSpXcj6STeVeL+1PmDWMog0aRxa71MDFJECHJIUpSadfAkXG8cGkfXJL0aGq1zPcsDaeyT1l2zO0vdz53wrxH3APsZ9hQxpLstDGsNU3hCWS9Njp8egYpSZWabg3IHIsCoEArRndilIYVRqtE6hIEqSuXlI31gf9GNAblRoFFqh0qyKYGjhukYGoaAufy0qOCoxdwHbjUfjPGaW2YS19Ql0BLrjIwBttz5hxy3FeQEz41km10HN8z7oBbZwpjpRkGZfwNMg77nMrZaxjGWc6vjwhz+M7/me7wEAXHbZZad5bfrRtmwhU9fcnPWyl73spArbs9kMg8EAtIcerT/1Uz+FT33qU7j44otx8cUX46u+6qtw8cUX40EPehDOP//8PfudZZza+MxnPoO6rvGN3/iNp3tVTnk8/OEPx9vf/nZ470/4XBiNRnjRi14EYwxuvPFGXH755Xu8lveecM7hN37jN/CEJzwBD3rQg05qWZ/+9Kfxuc99Dg9+8INx6aWXYmVl5Zi/u76+jgMHDuCaa67BwYMH0+vPfvaz8b73ve+k1msv44wG/Sj+SX4sk/7FTtb0eQUosKk5QYrnBCKC8jYBT4EABIUwWDkhwO9eE1KcENlKdMDaIjMnTXRVlKn0rgO5SLOcErE/XZJUKuNrsSu9t9+l0z+KNO528WIvs/TxLGIxRRtAZ8BDtl28gNgl6/ssvx3MhKwgp5TtLSMAqZAfzCBOdiuEWKiS39XBQjUFFBkuWBYliHRXDDElg2PD1SQ7xSy/MnWCyz5aXD0B/KSjOWd5FVFGqtQUgZbdvY96ndsUQI5fK7XCymANhgxCOQKZCuQa6NlW50kYu/9TKzXp5G+mRmupIOnWL0geLicTeVEK6AA/5ZqOJZEAk25jxYOO5hNQE31V6gkDf7Ho0rHv+FFeD97DOw/f2ij76TA7WMM1DmcdrGFri3ZmWQLUeZAmlocsNYb7KxTjAivnjzHYN0Z19hrGF56Nwb5V6P3nMeg3XgONVqEKZkMGKVzEgm4aW1HmVeQhO4+dblx4v8De9J6lIwOSRK8EKewK0qV9HQQ8VglIltc8OpAvIKR1CECvgJqDjKJoJOMwsQHBRfwOFNRQ0CjLCgVxYZdEPnjtPBg7h5puwBy8A8PpFsYXfAHN1hSrX7wTszsOoT44xdqtW5hvzjE5MMNGyx5/29YnQFRkPPcXGuOCsHL+GMOzhxjur7B68VkoVkcYnbefvReHY9Dqvs6/ryg79nJiaLqusAdwcU/kjfNCZgQKtYC7xSgVxlKTSC4rB5zZ95FlfOWGok7+PEZ+TeYCbBzeIl0pYBM68E/F70m/QXGaT4cTLdfu+J6wHL1jDzHPOReznaP3b7x2KNJAOeB9gUHWPFDBBcBGX1lhZAHogTTyXGfXXP7QLo1OHujMiJFYMyEy3RTpjv22oJKQGnCilLPcj1kRYQFAy5e/AywRmUxCGxlnVmSRQ9c8UmmFEhY0Y7AJ9QR+Non5iGYfv2oMX60zw69ahdVxn1nxCOz2m0hKirw1USbFmoGYyjbM5LIFUFCUrZV9HCXlgb4KSMxRlMt8fEkD2sBU69DlCNYo1LYDtwTbcgHQhj2WJW+We4Ufru+JvOcO/DcCqwHsw5dkMrNPKsV54sAoFO0UNDkAbN4B9+X/Bz/ZhNveSp+lKipBVOPoB6mBlsFTtAZQDUuY5nQuXQJmAIrS3s53AF8Obreec9+NucXcOkxbXoYLIQOSWPWi0AQ0nB+HwGwvzmsIjgBdljADViBRruWx6zKWJ5AdU9vJjcrrh4uYH7jWwjUWvm27HNP5pIqf9m1UgRDgL+Ua8XcEVO9J5gob2FsgcDOqVoSg+LoZNCWQrSANZw7TEYCd+aBkcgUBJgJ+lSGUWnXXggykDEWFQAYzy96KAvrNrMNGbeMx86m5QyuFQjMQWeioTnEY+U0d2YAhnrvOBzhSUDFv7h2nfBvivNVoA0MKxWmQ91zGMu5tsbGxgS9/+cu49NJL92R5f/M3f4OHPvShKMuTb0bZ2NjA7bffjgc/+MF7sGbApz71KVx++eUw5uTvmXu93z75yU/iYQ972Emv22QywfXXXw9rLR70oAdhOBye1PL+9m//FpdccgnG4/FJLSdfv1/91V/Ft3zLt+Af//Efce21157U8j7/+c/jiU98Ir75m78ZT37yk/GUpzwFX/3VX33Cy3v729+OT33qU3j/+9+fXhuNRvgv/+W/4BWveMVxLeuTn/wkvuEbvgF6D+xtDh48iLvuuuukAQqJvVy3vYjt7W28733vwwtf+ELcdddde+JDeemll+IBD3gA3vrWt+I5z3nOya8kAGvtnlw/9joe/vCHY3t7G//yL/+Cr/3arz3h5bz97W/HwYMH8Tu/8zt429vehpe+9KV7sn6TyQRt22Lfvn17sryDBw+elOeg1hqXXnopHvrQh+Lbvu3b8Hu/93snfM962MMehne+85349//+3wMAbrzxRjz5yU8+5u+/4AUvwBOe8AR83/d9H/74j/8YAPChD30IT3nKU/B93/d9qYHjdMa9b8QfR6gs2c+ZM70CeiwCAFwHIRUlhmJ7qI5eCKkbOPOWU82Uixbl8XWvyBTstE9FyMBXq9GTo04+F50ElOLJJQAUXLxQ3rGXQ/Bp/XP2W89DJStWSUe6bLuw0gQM2NERvksIztBnry3AgUoxuABAE5/Y+jCjWAGpKzSxOBclt5T4NXLxzQZED5FuIqsAlNqgrNYAU8KT4WUOVqP0E0+UgyIEU3b7KrK6CJ0vH69X7Gr1CkF1LFQB/HKpHpb3BGrrewBgJ1MbgRYZvujAF4DH+twFhGIEU1QIgzEXrVbmqYClmwkX+AT8A4BywDI/w/UkQRWOoSh1LMc5/yyQFRhtk4A9YU6ojJmpXMvrXE/gtg9xYTACMzQaoSoL+KbF8Ow1+KzzWqRMAS7M+NYy+NfIYwsfpUClczt1aWuCqUroaoBybYRydQQar0HvP5cZn6v7OslbU8HLOSLjSiQgIyPB+oDGs+9K7fhR5JGsl+PP3e2J7Re7pnlM8DgoNIPpck3jccVPkowx+tdIAMk/chHs80HktXz8HezaPS7hQiclJZ8RkFK6yQGgih4yA0OojEapxxiOVlAZwvA8BbpwG9TOMPzaAxjOp1g5cBvcwTvhtw9h+qU70GxOMDuwiXZrima7ga0tvAsILkBpBV1qlOMCpiow2LeCwf5VlKvMxExAX9UxMMWjErSQIOdd/E3NxfoEitepWA4gsR1UNeLlru4HlRUzfE2V2DsAXzsP60W6jGXcm0NyISCxnQDsBLCD50o+YlMRSX4hTUOGmduxcC3XG6C7dh3PfeNeEZGd5cDNQQNtox/dLPqeNhG4Yo9TEANKJLLlJIAfs2dczDt2u97mr1C8tkrB/KjXFpUBXougw+JHg2epT0VdzrSosBCZUd3KZWCfSBqLTKk0uLiAWfTHbbKGplJHsIkCaLYFmm0gbB9CqCcI9bRrQDIlMBgy4FeOYXWFuQuJ3beY80NYhKxPCoAwiF5vMGWSCUyygVHFQUePapVtV4+alLM4bR1zFgvSOqlFBO8wGKxDwcN61eVpiUJpWC0BgArDjgW+h35+LiD5ag41596tZ/8zxtEC4NUOtuVQK9BkA2r7ANzBO+E2DsBNWTJdcqFS8/ktm5M8niXPFpnKLL+WRxU8lCL4wMww69hb2Xpg2jjUzmPaOmzPbZJKlRgYglYKA0NoFVB4lq0svIqgIPssAp79Fi03BQ2iv7VytgN9s1DeJVYdH4eOAbizedCnP8kV86DSxGaxAlQY6NJwg1ARm4sk78gBPmm0FLb0AviXlh3HkCbx+lZZw1eX78vx7Ma/jIWseU4h+gMygCpS7Un6NkqLggzmXqFpPSatx6xl/767pg3mlo9VG2U75Zo0iFqbpAIKyUepk2SVyGXqddy2AM6PdVHF60jZTe6C75ozVZfnaoXDgorLWMZXUtxyyy340Ic+hF/5lV/Zk+X96I/+KN785jfvCTjxF3/xF/jjP/5jvPnNb96DNQNe/epX401vetO9dt32Yr+Nx2P88i//Mp71rGfhX//1X09qWV/+8pfx7d/+7XjMYx6D9773vSe1LIlzzz0X1157La699lr88z//M9bW1k5qebfeeivuuusu/O7v/i7++Z//GXfddRf+83/+zycsS/hHf/RHICIMBgMopXD11Vfjla985Qkx/F71qlfhbW97Gx74wAee0LrkcfPNN+PP/uzP8MY3vvGklwXwefr2t799T9btAx/4AC6//HJccsklJ/T9AwcO4JnPfCYmkwl+8id/Eg984ANxww03nPR6lWWJX/7lX8ZjHvOYk14WwGDOxz/+cbz61a/ek+V9+MMfxj//8z/jv/7X/3rSy3rwgx+M97///SfNRC2KAueddx7e/e5342EPe9hJr5fE2972Nvz0T/80XvGKV+AHf/AHsbq6esLL2tzcxKWXXornPe95eMMb3nDCy3r84x+Pn/3Zn8V55513Uk0qRIQ3velNuPDCC/Ge97wHj3jEI457GRdddBE+/OEP41d/9Vfxqle9Cr/1W7+FD37wg/i///f/nvB67WWoEA5T1b0Xx+bmJtbX1/H/vnQ71tbWMpP3fgFg0WMA6CYZ0g3c86iSTl7xinMtgzcLjL/DFqhExukeZHXsti6Lr9H0IFQ7RShXOhm6rICRJuxRAieX5wniMaFN8u9jwK+EUyYWcnxkB3W/Kf5zwqoUIKq3XvGfDuzjR/GH2FnMQc+TUY7h4fZ2xzyKvxsn/z3QLwNlFotvsh9JcVHEkEJJYDDY97u9O6mv3f15RN6qjUWqJhatpPDB4I9PoM+ihxwb3lPywVBxH+uFcSyTftlXWnE3tMn8PFRcHlwD1dag+RbgLKiNoF/w7OGhC4ThvjQGQjnak7GdjisyFqW3nb+kAIDiw5JA2wZo5uzjN5sgNHVXWPWuX6DLWar5b2fMrUUWl8iJ5axBpXVigyVZyNEq+8INhvDFsJO7FTlPAZEzedfcm6lx3fGWDncfmOkpUkncPd2/fhWaUsf7wBCfWwvgb34+5OAv0AfVBWSW8yv3EhQgLwf2+Pvda2183cp4ja8tFq2l43ugCaNCY2AIZw0LBgG1wqggVIYwUi2omYGmB0H1BvxkE/bOLzLT4NDdaLamaCd1zz+HpbMIxXgIXRoUqyMU4yFUNYZeP5uP1XiVWbhl1fPrywYEs1ii1G2wDcJkC8E28PExNPUONqmAfjQYglb2MRtltMqMaB2Bf6W4aOU9Nre2cc5D/g02NjZOenJ0tJD74y3PewpWir33iZLYbls85vdvvEe2aRn3TMjY+fJtX8LayqgP5hwudmO0Zk1UIX8tk3sEOtbakSK/ouxVeXevQEYBn1YoMtBnG+xj2tTw000GCNomMX7kGuFH++EHqwjVGqaWl9G4joVF2drl3qoAOA+JLB0dRG7SdoAFEO9rzDZUzRTUTNjTL94vAYDGawi6gB/t5/v9YJxylpQLLjLaBBCQdZNj7zIFhZhTBV2iifnNrA3Jy1juUXLtH2PO1/3pQfi7vgg/myDUkySZbs69H1CN4dYuhB+MMUex06sOIWv061hHA8PAxkgH9q1uJuwH7B3ERy8Uo6TK0Mk+2l6umFiRzRTKtaD5VmoMkUaSsHou/GAMt3IuWt8x/SQ/01nuCpya5sDEKJS819aJVSg5Z+O6XITAbMvVUmPga+hDX0K46wtwB25H+8V/RTut0U7qCGYZDPatQA0q6NX9naz52tmAMZ0KRA6uxeanUI7gx2ejCYStxvG4cNyQVluH7bnDtOW/7UVZT8VsNMkjiBQqTYlRNojvVUYzC5CAlVLDEDcdGcRzoY0ekZJXpkFM3TwnCyWexLYBbd4BP92Cvf0L8JsHMD+0hfnBbfi2hWtjgxoRdMmAX2oQG45B62eDqvg4HCefb2GISuTMz3SuGc4pLSidP63rxr2ENP2lZstsjiTXWGkA694D590Zo5c/wM0IgQwmrU/svmnrsd1Y3D1rE+gnvoHsOa4wLHQ6NqNCp+Ozmxe15K2l7vwk03XNR6BWlGoy+VWZfwZdYmYD7j60gQfd/6JlbrWMe3XI2Fke02Ucb4QQdjTwHmvUdY0nP/nJmM1m+I7v+A68/OUv3zO2317GHXfcgXe+85147nOfe1IMpzxuu+02/MzP/Axe9apX4aKLLtqTZd6XY3Nz84SvTV/84hfx1Kc+FZ/97GcBAM973vPwi7/4i7jf/e63l6t4r4w//dM/xa233oorr7zydK/KKY/pdIq3ve1t+Nmf/Vl47/EjP/IjuPbaa49LCjOPP/qjP8JVV10FrTV+9Vd/FU9/+tNPaDnio75X8sC33norLr744pNaxv/5P/8HF1xwwT1yvT3W/OKMZvpJEYQAtqsKi+/3b5LyL4MOAVoploTJmGjc6dtNCJVrobbvRBisJsbfrrde18Ac+hL8cB1+tP/kN+4YY7d12fFa8FC2RaC6Y/bl7K3gAQyifBN2lVbMzeVBDGiJTE/u1yLhAwN+UoHRyCaacZ0EpBEWABExCKF4KxY7uSU60FaKAt37MhHmVQ+p+KMUUFAJRWUPCEnrbkMC+2Q7Ulc9dWPHBYAWmJ+L8orJ08xGjxMApAiF4o7zQAY1xQk8BZRJ7kj1GFj5suMw7eQTFcvzCAhkqANBAeyQvXU+wCEWFuNyK2NQFqsYlCOWumo7pl9iqR0ny/VYwkeQaUABud+ReKd0KxH3o2uAtoGfbjL4UrP0V7ANy5CK5+Sg6vwISTPIEyON6TaTeCINJV36mRTZjsiKDQLmeVPBKdXzY5KCWwLVXIC3vlecFFmrsAD25Qw/AX3bDEXXis+HJG0FBnOZ9cljtCCVQN+801v2OcDroCFySp3sLLNNFUjJenSMWwEh2W/HoXUBW43FrHHYqi02pg0a63Fo2qKxHrPGpe/KupeGMCw1hqXGvlGBlarA2SslzhoWGBUa560MsFoOsD64H0bjr8LwAsLwkhammaGoNzGM3oCYzxIjL0UEZVURvTijjKoUzwKAkHXw94q4rknXPXjH46vhPxlvtm7g6oYZolH6VRHBjCvowkBPNpn1OWR/RzWouMhXdN6g1EyPfFIsYxn3ppD8IDCzK5dt7PniLjLBgjQMxQYKH6/pOfBHxMoKigAcvUnKB2BmPQpSGBwGJTxeEO9ojVLHGjMbMLMew1EJZWx3D4sNJaFtEhtdFVkHYgZ+SjG/iQ0XvDs7cEzHezzivb8gue+DgSkB3JzfuS+lASn6xakyAjPedV6zEsKGIuqDAOjngirL3bghpw+iCHjRuIDaBczjPpImk4IUFKl4/1JQsxlUMwOiVDdsm5afmnYyIImLXyHKZnYSka3vmOsCKHgoOFIotYYpKig7j/s+dNvkLeA1VOu738kb4oAOeMgaxkJkcSoDBIvYRKVB820MtEGZ79vMr7HxXZNiqQnlHvcIukzdRLVzKMwRigEGporecB46ANZ3Xn5laKDmE6hmm5tdZpO42wm6MKDSsPpBUTIYS5lXnXcAWIaylz9lIP9u4QNQW4e59dhqLLYbh+3GYtI4NDFv0hFMAkxkKQJFINTwcEGhVXzOFJ7gPTAq+X5fO48y5jPQBKNLPseDR3AWCKrv0ZivM5ByT5FyDd7xuewdgnM7WH6KCKQJuiqhCwNTlbyvet7BRfJ6DqZIDYFdg4ThMSne4mTgouRsmpsILqcASANfvBYIc08B0AJ0Bg/EnLd3nmYWCb3jRV2zojRACji7MbfYqFtszi03p7l+jqfjI5HCwOjoGxgb1RLbL/u5NFdRSe1EgHKtSuiiZJvLRTZzzNNFXWYZy1jGMu7LcaKAH8Asv/e+970nXcA+1XHeeefhR3/0R/d0mRdeeCF++Zd/eU+XeV+OEwX8/umf/glPfepT8fnPfx4AcL/73Q9EdFIssDMpHvWoR+0ZC/HeHqPRCC9/+cvx4he/GG95y1tw3XXX4Rd+4Rfwyle+EldfffVx+3p++7d/Ox7/+Mfjh3/4h/GMZzwDL3rRi/DGN77xuBm+KqoA7lXsxfXyRBmzpzLOaNBPQCQPBQo8iWepEeRiVDs81HqvR8CI2U+UilLw6BcaLEsChdwgXtZjvs3vuwaqnULVC+zAXDqFaE980Y4nghnAD/oFC9nOXhe+fH7h/xSq8zwRACP3/sondKRUAs0IAqChk4yKneTdF3jCyV4qBPjAQC6pXX9DQFtgJ+Anz1U6tiEBwvlWeWAHIJPLjuVsOq06VpX8Zrcvuh82AnBK0cjbbmItsllkMCxHcKTQkACbasd27MZ2zJmqhjompYkrplWHffsIQgXVX57z3BnfOt5GYwx0kRU7pMM4Hm/g5LvRF4uqXMgJXaEB4OIZAHjT7Ssbz0HjuchUlF0RNRZSmHXFoEvQsXM/Fk+6FfBHZ7HSLkyw3T4rcrfoQDPnukKe9TuPnYytVKTE0QG/Nhv3LQKKoCBwbkHs8SdB6DySDGVyT9n+T0w/z/87pdJ68TrnS2OmoUfH7JPu7tp63LE5x3bd4tC0xd3bDWatQz1pYFsH23g45+Esn98Ui7ym0NBGoRqVMKXGWeMS560NsD4qcb99Q6wONM4elThnxEDgWqkxMKsY71uHgWeGhp13Bbk8dmET9Y49wN8hvvYEKXDvIvnVW2ymk6+IABc9HwG4uomMw0k81x2zA+oCYV73fAMRzc7vyaBYiDxly3f3HKN9Gfds9HIARewvhXj/Jurfu9OXdr6mQlxOBAOD8kCIfr0i2Zb/1sI6SFOOPJ+7EFnr/BnxRwX4qlVqddh7VX4PWrwfnej9rdQKPqhOli7eS44ko9n9aGwmCV0eIfcKWUktSQwBGird/xlYjKBE7562APylnJYBXFWU8Tjtcr9L18n+aobFiVQuuS1AwkJeCWkKSxLQ/HZPplBYSBF4Cdl2cD6YMxe5QUO1NcqqhI03ODn2wqYP4Hun5EfOKwTK7m2S92bMIZZXDCxhmzWELIINKtvGILKjpU8AWJB94h3va2RyiVElgfMvXpmC+iDIXoVWUbLRNV1TlW0AxeCXiTk1aWkYU1BNzZKlNt5XSYPKAgZgf2TxSi6jp1+WWwXveP6V79N4Xic2m4y1XeZindpBVECwHnPbH4QsO04RaPIgRfAR/PWBpT5b5dE69mkPAQkU8kHBIfMX1SaC2k42YMe1S+wAlGUZfN/UKe88HOCntI6PcV9FOc8kB16U8CL9L/mmAH0LeWjeQOb8zpySj3PH5BOmXWI1LkrlH4axrUIGfhvOaaE7Bpv8notyqkDf7xngc5lUVHUQFiZ1PuTiH7gI+vXWIx8P8XoXAueopAhKUX/uld0XTgfut8ytlrGMZZwJcTK+eMtYxtFiNpvhAx/4AF772tfisssuw0Mf+lCsr6+f7tW6R+MrBdzMY2VlJQF9b37zm/FTP/VT+Lmf+zm86lWvwotf/GL81V/9FS655JJjYtju27cP73jHO/CCF7wA3//934/LLrsMb33rW/Gd3/md98CWfGXFGQ365TIlHgoqyiIJ8Ad0LJbFSMAIkABDFwAtAAQsQtCJlaZsDTXfhls7n70GstDbd7EMjCKoegtqPoHbd3HHDGxr0PQgFxt0ibBy/KDfkQpWu32m9/pgBWGwAtr6cjfpI0oTzLy45rFzEtXtP35D3u8Xc/q/nBd1pBNVJp87ZFuAWIxhlo4xJbQ2UEqhcb5P5UP3e7tN9qTzM39fSkkhW4bznVxhLg8lUkzSQSuymKkbO3YzByAxtlwInZRO+gEL2Jon3xEwBhiAhTYIdg5FGsaUgELn/eU9UtVNoQOZ8gJd1nGevmO7fSlghjIlNBkAKhXE0npEoAcI0BSgFaHIWH2t58NdqN3H1PFGvoxO4orX0wZAKQMyBkqXXDCzDfsL6Xns4CCocg7xYlMAQMSsvnKAUK7AlUOWTSqHSdIqL5DkYFHqSld9+dVFWVXpQAbi2JLxIh6B2XuLY0kKuQwud0BfXEwP8Kstg2z8iCjvmRWAAXhiYNhHtp+A3hTPMZagBUpNvWsjr19WwMmKyy564rSe16tx8dz1Ovr68Xb6CPodnLbYqi1uvXuKjWmLjUmDyWaNdu4w2ZyjrVs00w24poZvG/jY7KBIg0wJKgoMVs6CKUvcPi7wf1cGGAwN9u8bYn1U4OKzRjhvbYB9VYHzxiVGhcb+YYGBUSipgNElX090d83ZTfJXCkxaGg2kEOYdgq3jdf0I1SLi8yd4D10Y9oKEBXn2h+TjbzsPyEkNU01B1VYal6ooU4e/nc0P/1vLWMa9LKRhhFQUmoyKAAEZ8JcXvxdYULtGLN5CdQCB8j6pByQQLPtKbTupaxeBgbWBTp9pXMBm45JM4VlHMLM/XpDvcPlUHiOjMDQaMxtgQSBTsUy1Yd8+pTWDWTkIuBvA2SvyZ/mpCgApqAw006Sg4XuFfVlu2r+SjwmbiCwUIoMol/GLXsQAOuAL2Hkc89wjrXR2zL3tgJ7ExPEZaBuid7ZKbMVSK5YXjJLeAeBxZaIbGGnOU20LKkpmrcV79qBkKZk5FGz0iJtZlzyRC2JJyIFRcF71tyEBU32mVY/5lQMlOfioCIEARQYoCWpBMlqFwB5ysgxFUe6Z5S/bmGOQOjJAfaKRy3rCNl2+4ymBjiWVCDGXKAhRWn0G1c5ZrhSAKgqWyS4dyLvE6mNfXOr+ckZrxsrsydwL0AXOh2SbSSFa5EXQLpMKT38RzNKkAAMUQYE80KoAIj5DBQgECC2FqFbATFnrI9uPFIgiGzF4vu4Ig3mxCTFXArA10DZJAYCBP59JfWu2L9AaVJj0l+79RcFqFGUVlSJKbkaL8p5JUpYMbMwZrQ3pOpA3i0lOuSjlqfNzKcrPKltD2baTyw/9hjegA/OVeOWFABjPDW05gItuXrUwDYugHvs1G82y8wPDUu5iiVCZvhpJvsywsExh6SZpXIRenpfWPQK6so+WsYxlLGMZy1jGPRvD4RA/8iM/crpXYxmnIZqmwXQ6xWte8xq85CUvwS/90i/hta99La677jo84hGPwOc+9zncdNNNeMADHnBMy3vSk56Ev/u7v8OrX/1qPPe5z8V3fdd34Vd+5VdO2t9wGV2c0aBfAvziREGYe4jAH7L38+j+D1AhTn58p5lNmbcCnM1kPwGabXSTRIDZQ7ZjcUgBhWYHEeZbvJ45sHUS27rb87wwdbQCQhiup27PEItB4u8hElP5JEr2K4CeFIv83/ONoJ1F94J425WbI3nMSGFFpGfyIlLc78q1CLpAWVTQhlIXN69TdzDzw5rAR+ycIEpIjTKxsdBJQ3HJAOyDRrwzSSGxFHNZHCWF0Cy67UbqvpeinHJNB8h5y2OJZjzRVip1y6fP5cxAykC/+Fpe0JOCVGJMpO5hD4DBadaDZ6YAomyYkQ5hzeCoQwB85zuzqKJ2LKDzYhzucwpZUTN04Clk/0NDF6MoX9ZAmQGUnYN0CXgLivsQiuBL9lAL5RihHKHxLAVnbUDrba9QwoVhHcdpd+lzwXU+oAut6DuPdLYdi0WICPYJeC5An2ynfC6gY0Dw73egoRR4eL905wYp1buOacWHXph9pVYYGi6yGFd3DJB4jgmLWTw9gzaAKVNHufgL6lgw0yrAGR5b2+k4eUwbh+2a2X1bkwbT7Tm2D9VoZhaTu2+HnU0w37obtt6Ga9h/r7c/TYFitI5iuIJitI5q/RyUoxEmm3PcOTT40sEZzl+vsC8CgCuVwfrAYFTo5AkoklFF7B4vNB8lE/18OgCUHwsiGDKAskgtALsxlWJHPqoRYAsQgFBWUGUNqmqEtoGN7D7fdoCfqxnYdHUDpaex03+zY0cUJez0nmf6Ka2gjsU07SSWv4z7ZrisSQBAYlswIyveuxMQ1C8mp2aU+LynIiAAS8ZA48/YpKKQNyClRoqsQLw5d/F6yPdx54Gg+P51pJD70bHev47ns6Xme2cYjBEiw4qqMYJtE3iV5BCz/bRD7k7ug/G1RY8uBfafTs1EEWBKDP24T5UALwDEo0tkLFNQdgzyvOJEIjXQaECXaOW4pKabKEkeQYCBVom9LfKkijTUcMxS9CLDTZqllr3j3MBxM5BRBCpHqC3Qeh4Lc9s1z8BoFCFEeVSkXCLlVrovAQ2gL1Ob76O4fUERlIpNMPL5BfYfiDpP4iihKueKUoSSyiMynvYiAgCrK0BXKEPogB/vAGdjIxhvhZoze56aGfv/ecfbUFYgylnu8XmU0k55p3MMLgKgouTzWBGgig7w00XKNQ7XErA4R5MQVpnzAY31KKhrdvKREUYBoMBqL9zA5BnsA4ODae6imAWoiRB0d41S3jJAlgO+3rJ/dJSc9YfuhJ/P4La3WO67tSm3EcBPlybJe6qyYrnvapxUKHwx4ny1GHIjpqLk79xan+Zf4pecz8X6Sh+dXKZIaqroWUjNhMHx6Csampq3wbESQYoobQ9TRPYmnwsKQDDMDtTGpJyqjJ7MrERhkpczRQCfpTwVVkuDUakTqG+yx8TshdxbVJp79RrrsnEc0O0DaXyQ+0BqXDsNmN8yt1rGMpaxjGUsYxlfqfErv/IreMQjHoELLrgA+/btw+te9zr84A/+IF7/+tfj53/+5wEAj3/843HTTTfh0ksvPaZlrqys4E1vehOe//zn4/u+7/tw2WWX4U1vehNe+MIXAgDm8zmq6p5VS7wvxRkN+uWhgATa5Yw/oA/8CZsrDx8i4BdRwwCWgkn+d9HzIYQoMeddv9iyY2UIqpmlwlZQqmfYvtfbfawRTMXb0tZAZEEJ4De3IU02rQ87JuHikUIqcAElTvZy7zCRmlHCjIsT0QQ+9CbVmZRS2hibupFlQq4jW01Aj5AVDqQYKOu3G6tzEfTNO2d9BiYCXDM8oihXyEEYBgQdumMQscJuXCz8MeDpeCVTdzm/HtqGvcSyglealFP2CC6uJPBGZII0d6QHIBbgdt8SAcrzwkHavMh4VdgJ+h0vS+J4PpfLX8pY0gGxSFhCl1yY9CL56W3q0A/FEMGUsFSibj3mLmDWerRSPInHV4vXCWWF1Pj7UnTIx0kHdu+sKCRwWO3WfdwtL2f2pc8sLKvH5BMptAwQ7P2uANGk4rnWsSaMsFKDZX8kKfQ520k4KfaiC7rkorAZwGgDrUuQoSSVpmJtyJCC1wyuyf6Q4ltjPWzr0M75r5luo51soplsoNm+Gz733MvC2xbzzbvQTjdQzLbhbYNmOkbwF2A+NHCWfRA3pgXm1mO1MjhrZYB9o4KZG5pBv4GhtO0CBFbx9UIr+EAoqDtPdQQGe76R+fWbNEAsGYuMmaNMi6A1PBGUYbkz33Lh2IMfXQQA8xBJNEUEKg3mS6bfMs6gCAvXY3mechoA4n3MjSvyRd/Pi3LmOlFfIlHugQA3qXgL6DJdQ60POwC/EIAmhLR+aV2hdr1W53GszVG7fedonzFSzNYlgqmg9BwoB32ZT2H+7dL8JfcjlTXmMKuvu0+xZLqCyHom6b7UwOD5+p6Wn/nhxSYvpTLfNQFe5U9eyyJJD+YMQQFyI6uw2wjqQNsIwEgTjWyf3KsKUlBuzk1RAp5Fycxeg1EEK4IFMN2GGvHvqGIEpU0EsHZpnpF7WZ7P5PKGOeicGKbc0MXb3W/3SaoAMmYNOi9Y+UwE05S3Sdo94biGFS60LnvAx6kKYcgWpJnlt8jKRAccMyus7l4vSm5mzOXRiTqgSLZXgCTbMkDjqh6oHbKxtTjmd8vTAclL+S8H/OS5l3EV8lEiAKCKUp+KATPF1xBpZHM+y9uAbkxLQ6KAfW2DYBuEeoowm8DXE/jJJkJTo53WSdY7bQuxpKfIfFJZJAlU8fRj2fkBM/yKCjZKlc6tAJWhAwDj9rbeR7WFkPYNETA0OoFh7MOMpJ6iWj6eqCfw0y2E+Yy3Q3yQScZviRDBXXgPlCz9G2KODWehtI9KLbGhSjO417qAVgWY0El2DgxFZi3BUJT0pC43TU2gQNagEIE/dKoYKlNoket+Gk/oz+N9vAcsNuotYxnLWMYylrGMZSzj1MSdd96Jn/iJn8C73vWu3uv79u2D9x7r6+vY2NjArbfeiic84Qm48cYbcfnllx/z8h/72Mfi05/+NH7iJ34CV155Jd7znvfgp3/6p3Hdddfh3e9+9576930lxX0C9BNgp/MzUwjgCYTo/u8GCnVd7Dw55OITf6YFA3+aSujSsD+GTGi95Q9lTJodRZNUPFD9ie9JsP32JBT7i9jYTT+zHo3jR+uBaeNQOw/vA+o4sWXPCAJFvwYprFNBkT3WhSb2E1Hz7SQzI13PPUnKvIAkqyb7yc5jd3oRpZFKFLroyYDxpJcnjeJ7sQjwdWBkBGMWAL+jycJI8VEFsM+gdMJm26oyUEf32vYpeWRIMSHYhgtYUiiRx3ndFRnahtkBQI8doIroo0LE3cNFCVWNoKoRSwap6IUoRSzqTm2luAvaK2BYUOx+zwomcf2Le3BoyuRfK2IMNE305RNyjACtNJQZQRejdCxdLBLULqBpAua2Re2423+jtqgd+89Zx0UVATfFW6QgBSJhNsbiRfZ/EStzOShKsSDF/iV8CSjiftwN/JPnvDW7h1YKXoX4m4ALCl6pKF3VfYYUUMRu60oTVgY6dlgThoalzEyzDdXOQPUWn3ex0xve9eTS9GAYpaWq1IVPxQiFLlCVIzSO9yspoLYKK6WJfloeo1JjVmoe+3HbRTY3xLF9OMAvD29bNNt3AwCKZgZFGm29Am8DbOswHWjM5hZloXHWyhzDUmNUaqxUBUpDGJUapaHUgV5oXs9RQVgpDVAZOK1gIjMgkgKgQuiD8jIetQbA0luqrLpzM4LwKkp7qXITZFvoeoJ2MoNvLVwEAV09h4/sv07+i0+qybw56j7Z61DRX+hULn8Z983gxgUG0ySkzK4XpT6jX1ivAWq3ph4vvn7SnAWIBGC6XwWPwlQI0W920YdWrq3CEOlWLoCCSnngqYo+1LBLKEIoh/CKGHDyDqqs074R2ekc8NTE22Q0N+IInqBiYw6BpfFY4liB6glLh7f1DnZaiEoEQZrWcgm/4ONvd01H+XoD6Jhtzu58fVHOVdhtQQHQnO+QQeP7jWOSExIUBkah0goDClCzGXtg227/iE9vsC3nS87Bzyb83mwCVU+gRqusxhE8yuoslFphZrvjLgoGlWGJwcoQyx7aeb9hSBfd+Eub1eWli6BeDo6GJAvpkBQb4vJVO+83J5kSfrR/xz4/3mHaA0OP8v1pG9A4j/Vx1QP6RF6fPRLblKfDNUATG1NMB1r1ctAI+AXnOK9omP0eHOcXoalBWgOuQdAFS2kubC8rZygIzCpBEVQqvEIZFQZymc+59dCkMCxZBrQgBp4KoEv0wXkhvEfj5Nc4N+uuXcwKNHnTnXc8BuczBNsmgC9Mt+Anm/CzCeoDm3CthZ3M0j0+gXyFgS4iy2/EOTmt7gON16BGqwjDNfZVr9YRyhFmLmC7drBx/jW3AdPWYbuxaB3PuyRvldBKwWjOW9erAqOCUJKCj3misPvUfBNhug2/cQB++xD8fMbb0bRpnZUmlhw1ZWQjjljKNcrqqrZI84iqGIEUg5GakKQ724zpp5WKYB+f3wL0Cfu5EHuHBXl9KOK2BDJxcHQe4szeUz3LCRdCAm/TfSEEHGUad0pimVstYxnLWMYylrGMr8T4//6//w8bGxs4dOhQ73Uiwi/8wi/g53/+5/H//t//w9/+7d/ib/7mb/DWt74VP/ZjP4YLLrjgmH+jqir8zM/8DJ773OfiP/2n/4QrrrgCbdviIQ95CF772tfu8RZ9ZcR9AvQDMlAvPhfJD5GB6nUM5s3Jqg/+ucCSn6QUSx6CCyRalzzJ9mXqAg7O7ihs9aQXgSShlCQayfTlG08TCKiACIqKtGcH+E1bxx4bGYI2iH4aWgHkAUN6R4GNZSPBLCMpJIjEpXikhH4xMDiR28vZNbKfSkBbwFggVEn2M2gDRYYlC9Ed60VWVV6y2W1iqOMxTn3xJB4Z0lUv3w1wQfXkZHfsT2EiKnTMO89jJsTCkAKAtonAYQvxQgk2svwi4BdsBhCIJFjcPyqyBbpiTCwiRoA06KKTb4zgqFY8LjW64yWT7sXi0eEKSUctdp5oiPQkYceOFclUDwAh88wTWbcQUFvukK6tx3ZjUVuPjbnF3PI4bp10hXMwa60rVEgxJQfWcpBb9pd8p9AsJ8nXiU4ONQ9hNbjYZLA49hL7UCl4COBHaJ1HESf73gdQXHBB3foKwDU0lCQ9h4agfQPVTLgQ3E4RZtsMIs9nPM6SLw8hDIZc1IvgcXCxMKhLEIBBUUEpSuu5OjBRPs1gfcgMvPVRgcZ5tHOL+dDA+zFMtRJBvwbtdPOoh16XQ/ZqytgBPgQ466E1wbZ8bdiuba8AWBpCYw2GJct9AkDhVTpnSTmMCh2bOYR1oqB1vPaKVGFexCV0TR2R2Q0bZdpMgaA1g3/eAXOW6jTOwWuCbywcLJ+XC4w/39je4zKWcSaFsC+Aju0X0DXopIan4A6fyyyAJz1CXvT3Uzq7H+kSmih5n+WMsZ3r1z13PmBqfZIZL7VKDDzg+ECTw8WxfCfI/b8cpeuNqBwsssv4/qfgCQxgAPALNxQBsUpSfJ1vpymv6lZMQETa4ceVvy8KAaCd7KscuFW68+dLb8f3E3M8/16W4wa7k+Eunovi8Qwf5RR9lxP25QezdRPfOEgzlAaNayhboIxsIm7kIQwM3ztZ0rljFsLa9HuJ7RgBv9zXD7QAbB4mxEuZge/QsRWz7wQy7C9cjI45zz/SuDyWPE2icR61CwhUQDzb8jHXe5QQcM/wSKTBcGfTIgD2hwQ3sEVViuAcy5m6mKeGwPMj7RPQSNGPVxi5SkUCYX78dIAzSEy/ufU9pl9jPXShWIKcAnzo7wnvA7RWSbHEkOoaBbCQiwWfGLMQpY0I9oWmhts6CLe9jXZSY35oC661cLOmA/xKA8DARABIl1HWczBMDD9VjeDNAKEcJcBv1vrUbLlRc6661VhsN64nUZuDfuybR0nVAADWB9l4kW1p5knS088mCPUEzeYkyZELWGVaC10N2NYgsjhDU7PEbuFYwtm1MCVfjwaGQVStQmT2AT563ueSox2zLzvXs/2dmN7eJ6Z4aoiQOZIiBmUVQZFKcviI6j0qNhC4EI4ov7+MZSxjGctYxjKOP6bTKUaj0elejWXcC+Nv//Zv8Y53vAMAdoB+Ekop3P/+98f9739/fNu3fdtJ/d7DH/5wvOQlL8GLX/xiAMCP/diP4dJLL8ULXvCCk1ruV2Kc0aDfkRr8hN3kEAG8w7QDHg4kYnaQgDgR+DNRR9aJSX0nWckL8/3J8YKMUvIOaeuuCBN9bO7pELBCZGW25hbT1mHa8sSz9R1YIkbqUoxiH4fIoELsSBe5T1t3XadiJC9eddYmplvwvmO9LfhrKVNw4aCsWGqoNfBFA56BloCPnahFBa0IDsmpqwfwSnFxEfglKCB2yApTkdBJlcoEVqaTnhfMy9yFUpy/xkAP+3uACMEW7AEY2RBKRc9C2wIC/Nn80SUwkFew8wFSQE8yCNWY2VrFAKEYRQmhigtvC8c6Sau6bLxGhoWmo18GThXgh+BR6BI6KLi4HzuZH94Kke0S/0kbx2zrA7bnDFRvZ+N3u3FonMdWbdGkolFkrRIl+ahRZKwNI5DGRUPxi6M0FgT8K4hQeIXCBYxKzR3QsSNZzmCKIJ9XgAoM7+4G/ElQZHIALMXlFwo9RIgFzK57WsC+UhNKWFC9BdgaNNuAsjX85t0Iky32cplNunEVQT8aDAFTgKoxeyiVFWi8xqCxaxDsCINyCJQVSudjsZ8LgXPrUWiF7bqFJoXbwYU2U2gEfxGa4Ri6HKIZrsDNZ9HXz3WgIxjsU6Rhhisw5RC6HEKbDvwLIcBFQDH4gG1wsW/WOMwiw29UOgzjcwECBZgFgGnroMmg9QHGsVdhGX0M4VpAC5Oj3MnWljCdv49qquTPFcoaoY7sk6aGcR6q7oB617Ts9xcBQH5+z7ejK2LJsVO5/GXc9yNk1yQXAIhUskhGeo+gfE/iMzX2HE4CPS8AK+JzMv1IA22qHRKAixLMPUY1+Fp819Ql2eVzRwYm60o6HtDkpJpcyCCU3HijnIXSM86BhBEpXnqR+VLqMvkBS6NL3txiiD21TDuFamdQ80mUbm465YP8Hi7AVvAMAsp9lTo2njArF69KOSja2w9yHBfZfjmAqMUjNvT8caWRKklRK0S/4whcesu54eK6EMsYcm7UdM0r3oFW9nHjl28wMAUqo9CWGmgAaDATXO6XhMTC68l7Rs85l/HORN7TxQ1PMt9yaBViI5eFguVukRDl/m3n3SzHxY3PZiWGY4y9yrOamBs1LmBgSqh21v8dOafz5hdjoFABhtfd6xI9v0cBfb3j/WQbbhoS4M+2/BoA6AYKJY9RICleaDJJMpJc5/VYkAJMzLlIoXVd09Gir5+O8uYFBZAKqfnQR0UHF4CWKX8wXqGNh8R5BUXxwGZMTeUaBJHwrKdwWwcR6inmdx/C/OA22ukM84Pb8K2FrZsk212udQUxKgx0NUjMORqvQa2sI5Qr7N9XDjG1zOybtB6Haotp63HXtMH2nAG/A5OGc5yma3KSXHVYGqxWBiuVSTlOGz0AeQfztogUqZ9swm0eQrM1RbM5hW9buNZCEUGXBYpxBV01KFd55kQAwnAc5101SBGrnDQFSlMCxjC70geUTu3wv5amOZHOF8CPgORFmiL3d8/VFmQ8Il6rFHGjFhloTewTGoDg+z5+p0PlaZlbLWMZy1jGMu6Lceutt+IDH/gAXvKSl5zuVVnGvSxCCHjZy16W6t6HA/32MpRSeNzjHofrrrsOH/rQh/DRj34UV155JR7wgAfg0Y9+9Cn//ftSnNGgn18sAmXvkQI8FDRYHkRnwoxpniTz3lhkCWHnBEI8QhC9/oypGOxzTZq8HElfpMf8W5BQyuU+TxmT6iixv9IoW4WN2vYAB/ELEw+xgjqWk/huSPB8XcHkfmKZxFHqDgcSyJeKOG2TgC6JIP4hTc3SM4YZSME7oEQfWD1M9P2IYvEJsbBGquftIYU2AtLr8vmQLS8sdN/mkftQCEhqqIIeVNDlqOefAtuAdAFyLbwpAFOwNFL0EwttVwBNrL6ygt5/Hmi8Bj9YhReAT5cs1RjB415nftoZNvm75UWvRbnZ3cbgKR2Xwk7wNkrpyrkgzJLo9xHbtAXwm1mPWcsd0QdnbQL95o5f247d09t1mwpFubef/M0aQmk0mtKzVKQhtJ4Y3NOeQTfFkkZesZ8lBYUCXIBSsYvcHWEv5czQ3JdGQgo42nSAOsDnEwPPUSaJgJKijCcplIElJ6mZsH+orRG2D3HRZ+sQF37qCfx0iuA9vPOpWBWqitlr85rl0soKZBs+z8YNQjFHaKcYVOsoiwpUaVSW16MgwsbcoCDCobNa3HZohs+PS2xMW4xWSzTzVdST89DMLGzTwDUz+Mj+S/vElCDSIFNClxVfXwYaihRMoWFKzR58UqyOHoc5288uSH8BDEjqaEg4bV08FwuEoOI1nFDqCqZiEF6ZFiFdn3YHJoJjfyalSyjX8HWoKFMDRzAFCgCmakCTGq4wCN7DRkZAcA7eeRizLOIs474RSRmBouKBNpE5ngEJwO6AXy7vLYV3uQ/ZCESZQfSTIigVsGjVt3gPzv9nEKBrwjjR2JN7XmLhlywvSYYlHzPGHGwDUxooIrgIgGh092gbEEGyGlRvJM8uJSyrmA8lsC42FgWA81SgD/xFNp7Iu+/GZVvcd5wTEUwEeAHXKTXs8v0EkMVnKuZZmmKDVD4mhNGjNed3WQTv+HVhIrVtkqkO9QTKGFAzQzUsMdAKjVagAV+XS60wLJgNr2zNoGu8los0Z9Cs2mHdzv3gfD6mVE/KmwGW6HsWFS1yQDGQZmZXMQSOoaEqbS/2Nv9yIoGbmHoZa48AhTKOE9OB0IXvPlMMer58su3SQEaDIXzmQZ37+6VGP/mOAMSGm5us51ys0IRBCCwZ6T2cIVDrIsDE37cxh8ufa1JovQIQvxNYtpMYjdohv6BiI0DyHRcPvCiD7iebLIc52YI9dDds3aA+sIn5wS00W1PUB6dwrYdrHEgrUKGTjx+AxPITwI/Ga/DFiKV+B2NYKjGpHaatx3bjcMd2g+3G4cuTOQ5NW9y9PceBbQH93I58dX1UJH9jUTEQi4KQ72PbMluxnqKd1mgnNdrpDL6x8BH0c7FByTtmLJZFyZ7F1YTnQCU3DqpWJWWasqhQmhI2KFSaxxVbKqjePqbePlf9w5CNodznXe12n4hgs4rzG5gShS5BIXr+UUj26MtYxjKWsYxlLGNv4sd//Mexvr5+uldjGffCsNbid3/3d/Gwhz0MT3/601FV1Sn/TaUUHvKQh+AhD3kIXv7yl+PQoUO44YYb8P73vx+XXXYZ1tbWTvk63FfijAb9Ao7M9hPgTxh/auHzR5owJBkrIBX2Q+pyj6w/b4FgugmL362Egj7Il0cG+J2OUABGRiWJnNzbDFFeULqzhQklzxV4gkdQqVtXtQ1P4myzE/BzrpNvEpDPu47ZFkFAXhHKJC0jMFSUkRHQSQbl7BxS7OOYl41y4C9tH0kBiguDhjrwL8muynosyBpJd+luuJ/4BPJ7Ib2mAJTaoCgNs/QaDaVL7qK1c9A4LrMoWaLTtlClSHlGwK+I7L7V/fDlCny1ygXFYtD50yjiYmxiNyLrZl4A/HqDYBeQMH97l9f2NBalz9B122tFvY7eEAuV1qPH6qstF1NqywWTrdrCeY/G8p8ARBICWI9K3SsmpY5yYm89R4EZf9EQTsC6vMgsXnG5gLB4kMj7st78/87BI16BClKY6jrgNSkMNL9faX6ubM1sj/hI7Qx+NoGPHerSqe6mU7STmplmmSSV8R5UGC7QxEKdBxd7iCh5r3CR2mE0XOcCPIAwNBiVXFhdKTX7JhmNjWmDW0uNzVmLZqXEfGZhWwfbrsBZj+BDj2QBANqwfJNSKj0npUAmsivl/UX91Bg7QMAIyreepV0BRECQAHhoxcd1aEqYAbMaEzsjmS8uMllsJ9GnCDQM8HJNEsld76Ai+Kc0sYwWUQJbg/MotMY9HaTZc+hULn8ZX1nhAVBkYjPDhrL7pO9JQKbI2RyLgF8IQHAIKoIJns+n4C2ISpb33GU9Osm+0GveAvg1H1ge0OX393s6IvjmQDBlZAVZ9MAQ5Zk1puN9XAfLShDx+0abrmEoAn5oG26i8J5zBNPloCr69iEo3p8RdFKgxJ4J4Gtn6/mutZsPctoEMFgXAmAoshQdevfrFMGDFEWQLKTjQQo9cCDfP92jyxazoP5AGoo6XlGwLIOumjl7G7ohSm0Y4Is/I4x4gyh7GAE/HmMFWKKCIjOxU4iQOUJOaNTEwLPckwF0sqR5Q1W2XcEMEAYrO7f5CLEX+VeeXWgVAZKF5i4QAR4d8BfzRyUSkdok5mYCC2V9BGTSBY8n6t/XmIkZz2EgzgfkvQGUZ69JTSHlOtJkRaTROo9KE/v1tYRSs0SntAzl93z26FbwPsAfI/ojADR7GjbsPxhBMpHDbCcMljWbEzRbU8w3azSTFnZm4VoHXWjo0sOvd36+ioi9EMuKm/h0/DMVgi7RRBn6eZT03JhbbDcWB7abBPjduTWHbRxstFcAYo6kVMplNSlM1waoDMvB+0Dd+Ru6JsrQNgnoE0lS11goTSBPsDX7N/qqhK9raFMwYBjtBpSLrOQF8NaQgdGsJLIoQ3xMkXlm5ufOIusPZNO1SoVYplCUmKI8tzt9Ep/L3GoZy1jGMpZxX4vPfe5zeOc734nv/u7vPt2rsox7YRRFgc3NTdx22234wR/8QTzsYQ+7x9dh3759eP7zn4/nP//59/hvn+lxRoN+i0y/fA6SZJ+E5aUYoPLRDDz/jLwvy0idytlEkoE/wLl8eRpKmWQ/ohf25mGnIrsVTBY/cqTvn8RndwtSCmsDg0KraNCeM5FkUh67crVKLKSeYXuInbOOfa9EFg/WpgJVAvbE+8O7vqRlBAWlm7sXUrhYkD1RYOBOBSYaSNEmP7YimyUgilYA2Zonm+2sm3hmnebSFS1ymVQOYaIUVONDMpYXUKeNr/Ejd8IKc1CTj/uKUBWrMAMFUwyhLDOq1GAVFAsQqYM++vXZ/ffnTncpxpCBVaZvfxe4hiXbmrxrol+Jip6Cyc/ncJJrJxGHG4NHer0NfCxLyoDzeJxdtl+tD2hdSD41W3OLrcZhe26x1URWX/TxY1Zfn9mXRy6b1Fgf/eHcDs+4YaEBQ9AiA6sUBkajiGw7Wa7Iyi2qN8q46AqsfkdPABESa1aAvoJkjHYMv6EhGHio+RbUdAbV1qD5Fhd/tw/BziaJ4ReaGs2hTdhpDTtrYOt5B/hpAhUF/Lhi8K+1oGLCoPJ8BmVKZlBUY9BwDG1rQJdQ9SaG5QhVMcT+tRFqF7C/0phbj5kNOHDeCrbnFndNG0xbj826xaFpi1ljk8RqJ7OagauLjyGkYle3j1jGVJNKbMwysh2lCz4/tiLrRcqhjYXtSrNs6+rAgBRQafZxLEij1KOuaUF1Ho2pMOgts/0sM5ehSygzYAZyWTFDtxoj2AbU1NDzmuVMm1i4t8za9duTXc6CZSzjzIjFu0UIAS5e2TUZBB19MFUb2UF8T97Bol0A/MTTV2nNcpRe/KQstC6T3x15wKvQQzUWGX9C8gGY8XeodpjqgLOHuuc9fE+GC8Ck9RgXBqaoEvCkvE3ee6rlXEObkj1ZF6QolY2MpPk0+f+mIOJ8q8iTWeJmtChBzx6DLDEZyLAHrov31VwiEEjSqJSOLfdYaWLQT5OBAgMG3AyhgBA/HX0ZB7rs7W9R0OAGusD5jGEv2aAIFIFKRbrzPI6AZjDCPNLcoCHLnM8QTAFqZoCpMKzWOadwIfk5Do2Cqjd5f7q225/eJTDDeYVZ9IEEFpoC46OGSvljaoiJsqTKZc1qugMnTodkv6xzAOcMqupAyj7Lz8TkNPo/SsRztfPuM+n19Oj4mCtvEWxsRqsnCI5zekUawTbwc/acVsHz2AueWVsAyqpi9r0mWB1iDlSgdR6tI2jFku0DQ2g952jzLIcAOgYcDNB6buopGF3ueQTKPCX/YwuCGVQzg9+8G37rENzBO9AcPIh2a4rZgQ3YSY3ZgU3MNxvMN+aoD9VwjYNrPMxQw1QGg7USuio5ryoLlvYcjkGjNZbbL5npN7MBk5ZlPe+aNLhr2uDL28zw+5c7tnH3doPtSYPp5hy2dWjmNu7uAFNqBv1WS8znJZwPWB/xdXHaOqyUmq+DuS+l96nRS66vPv4P8DXSNxYWAE1M8vkTpl8YsPw6X89jA2bwgGuZRUwaStHubOo4R0se9vJaxhAVSdWe33vWIAp085/gHYIpgFCl65mmeC2LufeS7LeMZSxjGctYxsnHq1/9anjv8eUvf/l0r8oy7qVx8803Y3V1FZdffjl7Qi/jjIkzGvQLIfT8ZvwxpP+L/m7HEjk7J2RPpOwv8j8CGeUeb7t5ghxLQeB4JjLH8tkjAYNaASslF9LbrIs2ZyDlUoMmTp5NBCiMsPxck/n4Sddoy2CfbdMEtGP8CbMvetaJrCdp7pqNDDcaDOGjD40w2/KJpHjSacW+cGGhGCgyM1oBKhYUOxnSOoGVyWcCGejnWp6ABg8YC12Ooo8gFyBzYMoHoHEhMg3ipqkA7RUcAUU87CUAXURKdCYJpcSLT1HsEi4RTNltbyyadMBzLlHaByp6vhmyPcQ+QwAYENzDwtThxtaRxmaSvsyKZAL2OS9gWuff13r+fOtD7HIW+d0+eCQhfm/OB2i/8zMCGPFrXAC0PgARDGxdgFYBwyJ+Xol3ScfK65oL4mNctpxDDGR1BeqeZx8IQgBjLxvA6I5dK+eaiewP1c5AIuU520aIslShnjDTb7IJWzdotqZwNQN+vukYCKHkY29rgo7nHjkP7TwXcEyLoDXgPbyPTkem4KKsnSOYKZSdY6QLlNUKGk+YtR5Do9CMC+wfFol9ud1YtC5gq+lAv3ks2gnImgOB3Xu+BwzKcZK/0jCAJ+CfvCYgoIQPAdYFTOHQOo/aEVofUJBCW+ieR6IUBsWPpiB5NNDGQMViNrxlRiRpKGugxiy7KwzdMJ8hVC1f62KBWqR6NQ2OcCacoojFvFO5/GV85YXABBpIagvqaPeSHDhYCHUYFmx+71CCZhwlcol2D/R8p05HKGSykLnUY/AJoOGmnABILuItF8RD6IClhn1EQyanqKLfbyDf5VBa94DVXoNPxmwTX1wX76tADvgh+fAh+qCpKLUtzUNJyTUyNQEwaKsIujQgTUkCMCwkAT4IUMx+xyH45Heo8n3jCQoFgxgAN6Xk6hDSXOYaaHiUKVeN4I7vGtFyQCuYAqEYoQkMKi0qQshxk6Y/hc4DG85m7PCYu5myD7oo2tkBeA/HQMv9TCYgWdOcgH8AfM/PcPdliex6N54iM1A+EAGilN8DfA80gLK212imvENwDQyVKDV7VSrFObOKjHxySpRpAWBHPpC/5rzkVN3KawUUkjvFP63icwU+t9qa/Y9nE4TpFtz2FtqtKbP7NqewkxrNdotmu0EzadBst3Ctg2scOIMHvOThwvIzJVT0v0ugFRm0lucH08YlhYqt2mK7brFdW8zmFk3dYl63sI1DO++Yfrb10JoS4297YDBrLGatQx29150P2fGN6iBRelRpndh9we3MB8R72LcWZBsEW6R5GxUlN2CA95k0EDAYmB2PhWu6gILQvpOHjedfYtuKbLrnhqrgHDc0xGXwfvTpmhCizKgiYqafUlDJCOA0xDK3WsYylnEMUdc1fud3fgf/8T/+x15tcBnLuLfFLbfcgj/4gz8AgCXodx+LEAK899B7oDr1F3/xF3jMYx6zJ8taxj0bZzToJwWMxNJLrL7uxqoiCEQAoKI8VWT8HS52A8iEPRYCdoCGsRzSecPFCVHOGFQRMHChDwSeiELKicSRfqbUCheMDbZbj+3GY2F+3ev+HhiVGH5agLT5duycnUI120DbwE03O7mYpu6KNMLgywE+IIF8IA1lCn4sK6Cs4MmwP4o2ycMOQJIKC8F3TLe0wZlsZQ6CxQ5tambxcQI/n8E3NcJs0l9PIvbnqMYM/g3G8MGjrNbQeIUmgjqpgBYC5pbBnjajfg0iKGFIASCEAOiCUJQj7pKIE9pUVIueMMgZcGR6jDIB/AxXx6JEjk8ynj15NWEsxm5/AAjZ89MVMv5lmwTkEwlPfg40zqP1QG19KprMrWcPmLh7+iARpTmzAEGNdfHR71pAyj/jSEE37B9ZRPkoZMPWUCcj5kOfybcI7sm2SRHHhZB8/NgjMILAqgP7BIyqNBcuqd7qJD3bKcJ0O0l55t5980PbsHWD+aEt+Ialp0LcQUoTdCzwAFzs8c5DFwZOE3RrQZqgbcvd+7MJ/HQLyhRQ1aEEwOvhGIE0dLmC0hQYFyP40RhBl7hgXEQWiUcTmSSz1qONr82tS8+tC6itw9zx+8LUZE8bm46TjfvNxOMoQB+DfboHAkpnv1ZR2s87wHZj4GDcx8zyY8amsDeFBZiubRmTuSCFshhx4deUUf6K2TfKtkC1AsqYzTmDWaTqzPb0OM+OZSzj9IfkUhR2+p4xoz0CLUAPUOhJNubAU5KhjDmS1j35wJ2/L/KQQAgKQTGj3qusaWS37+Hkff32IkgBQ5Paw3psHFDMyywz9wTwEq845S2z+2JTB+dRcZ+SRgBSY4bkTMFFyeEoWRzi73DTDzc+NI7/aiu5S2wAUd21zxBjeUazJDJiU1MgigBv58mFuN4qO8bKlKhMBUcqNfAAnH9bz2w/M1hhUEEXsfDfgJpY3Be5yRjKFPC2BWyDIH0slveRamuo+TYGgxUGkYDI5ppzTirMyTgufbUOW65go3YJJO6atNDJv2fPNSkGLeK1Hy6yDhUhDFY5XztCHKnp7mRjt2WvRW/DxPTLmsdczEHzxqrdziNpfhEmPEX2lsrk75XmfD3YbB0EpLVx3Yzh4xlBHdVq6AIYmhI+KFjP37SKR7T48vqY/3UAH/9uE+/pqbHKB3jJ92JTVqkp5VHpj8DjoZ2BmgnC9iG4g3fCbx7A9M5DqA9soJ3UqA9soZm0qA/OMN9kwG9zo077ag0NgBIh7jTK/fyGY3hTccOeLiKz1qGJDVDcEOWwMW1wYLvB5qzFfNainvBfW8/RTDeS6gmZEtqU8GEtMf8ObDcYliblwdYjyYmqQcXrUpSg0kBXJXTT9vwHe2PHM+DnGgtjW4Q2egKKz580PGbjTOYpcQE8FHzXXJau/77kk0saNIHkf8nsW25m8E3dawxVWiMIiDqKv2sjiKoIynSSwXJvWMYylrGMe1t8/OMfx5VXXomrrrpqCfgt414fBw8exAtf+ELcdNNNKMt7vkb4pje9CZ/4xCegtYYxBlprFEWB7//+78c3fMM33OPrc1+Kt7zlLbjyyiuxsnJ8tgO7xc0334wXvOAFe7BWy7in44wG/YRJI02HQUC3BfBPgL/F2K1oJF2/AX1WjgB+uaTjYhe5TKmkUCXdwkoFaAWECJ6FsFN28HSGrIkhhaGhw/qOKcUAYUHslZKkqOpNUDsD5lP47UMI8xn8dCt6S7RdoQqxC7TgCR0WGX0RUAiaO6e9LpKkJXTZkygCkJiCChmrbbG7XTztfGYeH3xi9vn5DKGeMkunqblYb1s+skT8vaaG1hpeccErFBUKXWKuOtCHwT8kMEpAP5LjHIE/FceAsA57XnsR8IMp07p3B6mT06HY0c6MStttU8bqS4VURV0n+r0kFuUwRQ5VWJPOhyShKixK8fFrfScBSYr9JQHCsNSpELQIFPHrmguciUm2O/jXWBdBQ5VkNufGg0hh5AIK4qKlLDsH/No4HhelPgE+Zt6H5Jkp3egCVCVwKXaoV1qBpHDZTpOfU6inydNIHmFbuJZBPh+BvuA7gG8xvPMAmIEWnO986DTBNRa6nIIKE0F4ZrOp6N+jBvxcVSOowRBUVtCjFUCXMOWYx5qp4EdDgAyaQAkUF2Bcis0z67A9Zymvzdqi9R5bc/ZobKzHtOlkVyXkupmDfZXR0AQMogxoLsucioIBcNn5pGPBqDIaRitszG0CAkeFRqEJJh6XJLWqgMoMoIsBzCBKGuesnIxR0gPfg4cabh/lrNj7UHRqu9F3KyIu474RIk0toZTaFfjrPiCSbhFwIkKqxMtHYh4QnINQnNM9Xe7xpuDncWwROD9RQJKI1DFx05FxHVQftJBCsFLqtPg95SG/fkS+YfBRjrtN/6t2CjRzuI0DzPBr6sQaZoYfMfPNdY1USute/hQMgw4wVWr6sa1PwF/rufGCGe0KheY1lvxWVp5Z6B3rrbfecr1b9LX2DuQZ/DPawIaYK8Ux5FxAqwBAQ5sRinLE11IziQoMg04KVRHLfoqsqUjBO8f5pyKQjlKAsi+8Y+BPZD2jrGUwvFztG5DSCcgBuKnN6E5qG0ACvdJ256A2GfhyyPv4GMfBqYh82ZJb5dOLACRpTR9Znl2DVffo0fmjyTyFGe88d9GkeMKYAbwMJmsow8zMXdcvhCTdy40xc35NW4zLUWKa1s53qg2Bm7xq6zBtXFIH0BGU7rZXckHOsdiGIOZQhuczQ0MM+MXmRJpPEKab8FsH4TcPYH5oC/WBDcwPbaPZnGF2sEY7aTA7WGO+2WAybXGw7datJAXjfPQbJpiqBA2GUIMhgpbrl0EwZbefA6sZ1JZl6WcN5zji4WdbB9tYtPU22skmj2XvQKYEmRKKNOe7A4ONaYvVqsF2ZA7WziOMRgjtDHq0BtgWtLIPA+/SPdo3Nkm9p+NClP74sDpm9EXVArnGJMadIpbmzc8BabwUkD4fG5H5m39O2Sadm2ib1NDQU0cAuOFz4LnpLJ5rSf45eChQmkudjiv8MrdaxjKWcbiYz+d43eteh+uuuw7D4RAvetGLTvcqLWMZR41nPvOZeOc734lv/dZvxZve9KZ77HdDCPjc5z6H22+/Hb/927+dXr/00kvxa7/2a1/xgF9d17jllltw0003YX19Ha94xSuOq4ngs5/9LH74h38YV1111Umvy4EDB/DZz34Wj3vc4056Wcu45+PegwScQPjQgXT8P09aveJiUQjhsCdGMgE/Qre4hLztM8Bvt+5Yl5V2NHERQQA/kGIJmxABgNB14d5bgovbu+8PAf0S4OcaqGaS/MXCbBthusW+YrbpJnIZ4MdAn0vPlYAIsUvWFyPuWI0+eiyPo7lgsyCXpPIiU+gAPZFy6pnDi+RlXpwSRk5Ts1yW9wlA8fMZ/wZpeNIsOjSfsZeXLRhoW2DK5eCfCx0ABB/lhjKNqyQ1Kj6C0iUbwcxURAq+B9YpZJ592QR6h2Sa0h0wqs2OdT1dEbJHJ+xbcIHHx/1mo2STdV1hynpEOU+W9XQhMHAWYnFOAz76A+ZSnrnn26LEp/PqCOCfh/OExnpoYp/Lghjca71CERSsHN4o39l6vyvYl4eAfaQ6zxkBlTShB/4p10C1cyACf+LNGLxnn7gkkeuin05WzNEEtSDlRJkcUfCeZSozDS0HC7SAdh6ubrh7nbjIqqMsqBQ6lNadnFUE62EKZsXG/33BoB8NxrGYP0AYVnDQmFlmAs4dYaXUmFv23audx0BbzJ1GbT1WBh6NY2nQdAwzIFfYfZWhBOBJ8c9HoK9jXDL7NpeD1Uph7jxIKcwtewINDKG2HoUmVJq9TEtNsJ6PXRMlQlkujKBpgGJQ7Tw3XcaA8Q7e3TvOwWUs43iiy62OAvyphWJwHrkUouuAP6V1Ak+CNLxE+cgQ5Qe79eD8KQQFr/g64D3fQ3xQSV1BJHoJSNfUe0t/FQOUameR2mc+V1JEr6fM5N4+hDBnaU8JYcEAHZAK0p1nb8ybUrOULhjwC0ggj0hkyzURACh0bB6VgaYxfeX9uNBUlRg+vv9aAJLka9AeWpfR0y9j9KfUMMAQUJDGYDCGsgWPgyxvU2AgILgITACcq80mnOPPiXM+HdlACw1VIcqJhiirrpyFivR9goJHYNAr3pvz3DyNnwzoUrKPTXUcI+DocaKMwDy3EgZuPrcRwC+ELq9q8xzLdUsQFQmgkzVVmaQrfyy7CkSAYrf1Tkoe8h1n4zjgZjwiQqlLWK3ggkrNVN5HH+dd5L6BnVLuaVViXlXEXCox/MTHr62h2in8dAt+uoV2MkOzOUU7qdFszjDfnGO+OYedWTTbLSbTFtvWY9v6pNIwcwFjL0zlCJrJn1zHotyrl3POI3kMi5x5Yz0fK8uNbN428LaBa2bw3sG3Dahgpl87qKDLCu3cspd1zcoI09bBugAbgMIMgLJKnsyhqVE4B99a+Cz368mw5uF9JpvrExCnwLlib4+LzO8in1rmPcHzMgQkzBsbYi7rm5rnX03dA/34QMamkLJimdE4XkJUP1n6yCzjWOP1r3893v/+9+Mf/uEfMBwO8U3f9E14wxvegAc/+MHpM3Vd4+Uvfzne+973Yj6f42lPexre+ta34vzzz0+f+cIXvoCrr74af/Znf4aVlRW86EUvwutf/3oYc0aX0Zaxx/GP//iP+K7v+i78/d//PQDge7/3e7G+vn6a12pnfPKTn8Rll12GqtrbHGYZZ3b89V//NV760pfinHPOOaW/s729jf/xP/4HPvzhD+MjH/kIvvCFL+B+97sfzj//fBw4cACvfOUr8ZrXvOYrdnzeeeedeOc734mbbroJH/3oR1HXNR73uMfhhhtuOC7Ar2kavPCFL0TTNHtyr/rYxz4GYwyuuOKKk17WMu75uE9kK7sBf4sz0FQsRjdhZAYgkjznbhPubrIsE2ruiJUJtDCUFsMFgMDFqRALVlLcN6ROWSHqRIsGAHf0FqTQ+j4zUimFghhoU82Eu6ubKajeAOYzuIN3cIFqNtnhOwOAmXxRgkoN4qR0vMZsodEagqm4YzoD/UL0n0lFiwX2JlEnbcmsvdgVLgBJ7JQPNsrVRHAk+QeZord+AWAZT9uA8vfKqvtsnKwq10ZfkgIFAU4rFHE9teWipHhHAh3zS+QDS1LcdWzn/BgCj5GigkLJElY684iRdbF1xypaYAH6wbj7/B569e1l+ADMrE9jVASBWsdyuVIYCYF9hgRYl+KkiyBfwWZDqRvfhejV5ilJQi2yVR11QKAUkRhgpAxU6vbporxkHYs+o8JjVDgURAnEk8gBPYDHqMhNpc9Q55GZs/uE4TfUCnANy8/aGqqtk69TKtaYgsclABqvAaRRFiWUJvjWohhX8FHKcxEMPFzI59qmToUhnzEG05/vl/wVEag0oMLAVAOYYQldlShXR1CDCnp1PwODq/tAK/tQDIYoq3WEcoQwGGM+rDB3HvsqjcYFTFufZKu2Gos2Fshyxp6EMFAGmo+FgHGUFQ5doMQOFSnRWpYXApwNaQyypCdhU1sGZHW3TGFlFlolhmkRGaGGAAUVGz242K2pjE0fPC4mdBRE+BRE3sV/qpa/jPtm5Ge5ANoBgE9QCUcvj1GLRW/b8/oLzvXlKSNIlQAZU3KDSmSmC2uJczC5/wMUOL/TWiU5aA8+T88dmR0+yve6yO/PoSuEY7aV8hW/cQB+NoE7eEeS31OaoAsDGrGUpCo6lQSUFYIuEcohA1GRhQZdovGAtZHZF6WX+b4aevfJgiix3HJf2XR/CjbJWyrX9FUK8vAeChbB1tyc5Vl6XEVA0jkGdGa2k0eX5pdxQTBUYTgaMZO6rVlWsp2DxlH2MWO5+40DwGQzNY/RYJjA4459ajg/MhVCMUi+0OsZYLHd+rQeDBirdK9J93hl2Bc3+L7E4R7Gkcbs0XJ7YcwZUjtyZ2mskjxLgF95Lp7UnaQpf9epDlAkod0urrPJGlpE0hoM+Kmi7FQngoeSCZMSsNhC6RKjahWq0HCexyFvj4/gWJRdX2j6ESUGTZ16Akt2KwwLwkBzPqXqTWb41VsM+G3eDXfwDriNA5gf2sb80Bbmh7Yjs2+O6V0ztJMWh2YtNq3HRuux0fI6lKSwYih6+3XNVOLnF9K4UxG43nnft7u8FnyI7D4PZxu4+QyumTHDTxokSWNaaky35rhTE+7YnmNUaGyNPUZzh/XBCmi0H4oMNHju4serUIMDgG3RTmZwdYPgPFxr0/pTabrc0DsG4IRVSxooPas+ZFYDDHh3r6mWIgOvjuCuMPv6Es/K1gz4TTb5OidzxrYP+rFscYkwGLLMqC4Q3AAgm6R6tWJwtzgN3bPL3OrMif/5P/8nrr32WjzykY+EtRb/7b/9Nzz1qU/FZz/7WYzHYwDAy172Mvz3//7f8fu///tYX1/HS17yEnznd34n/uIv/gIA4JzDs571LFxwwQX42Mc+httuuw3/4T/8BxRFgZ/5mZ85nZu3jHtZPPjBD8azn/3sBPpde+21p3mNuggh4E/+5E9w3XXX4aEPfSje8pa3nO5VWsZJxJGILScSd955Jz7/+c/jG7/xG/dsmRIhBPzDP/xDAvk++tGPwnuPxz72sbjmmmvwzGc+E1//9V+Pq666Ctdeey0e9rCH7fk6nElxzjnn4J/+6Z9w4403AgC+7uu+Dn/4h3+I4XB4XMv5sR/7MXz605/GaHRkC4JjjZtvvhmPeMQj0r3zKyE++clP4mu/9muxtrZ2ulflpOOMBv120/MXRhqAXsdq/jGZ0BMjfgn8k5BCSOp9DR3Lz4NlBwXwO9bwEVxUofMKORVxMstV6LZZJFFlH6oIrgngp9oZs/tq7pqVbvSQsZBYhipKAZoSNF5lVt9wDL26H0EX8INV9r8ohgwEkOGiFLru3n6xI6Rj7JWCVgRNJk4GY/dplNXzAvjNZ50klBjdZ48BgBpULDlmiq57HgwIwhT8+VymxlkURYmgmWng4hxtYAiwgNMMVgEM9BW6k6PRpFJnv/yWCoD4E3Y/vrOLVoXQMQMB7uIXebQzIAREB7rzalFqigH2kM5JF/pFEh2rUq3zLMflAYCgicdMq7oRk1gMnkHY0lDm4cf7ugMC+2dPzhKUApTEwADaAwOje2AfUVe0yr3/uutQBw6VmqU9uRs9shK9ZZZYLJSmUB3DDgBQtOktArM/iihH6+s6gXM+A/1y0G4REAzeZ0ChY6nQhuVCbd0kj0DX+Fg0ymWiFEgrmKGBLthHplwbwVQlBvtWYcYVivU16PWzQaM16LMvAIar8LbGoFpHOWCN8cYFZhNrZt4RKbTOY65Zki5nJPS8EaX4l0mnAgwKFGBA3ntmhw50QKFVBJH73puyXDjAeYciEKz4xjgFwKGQomME/ij73SLKiwkwLYwZrRSmzWG665exjHtxqIXnR21WImLEcJH5twAOKc33XQFlkD1K4dzvkmCR6gBJUgC8gtJcY2Y/z2NTUDiZ5qgTiV1/S1QKXAO4Br6ecB7V1MmrtZ3wtdw3FroqESI7W5GO0nvcABKETW0qBDNgWXRdovXsiZt75CaftOy+2vnMSjOFNKbw/cnAc94n0nyiUhAjAW1SrM48v1Qgvu8oj7yWLWDT3AY4AjSx8kapmblZagNTVAhR9jS0vK1qUDHo511iQKop55mhKKGqMXscDoYM9JHuAL+MmZeD11olB8oU+VjvHb+TBPtSjn2c3zvWz3fzlq4pMVcmWVQskTEBMMjOeZRKTYvOs1KvCwAWwQjJS7LfTuspwP4i+zd4zncVcYMbAGUbFMWIGZ+6u4+amBd10p4qNtqoHVLeFO+78r2CVPTdnUewuuFGwFk8z+Y1nOQ2jYdrHFzjYGsL2zrMXIh/Ho2oR0SFGJGYl/yKvXuzeUPofN8p5v0CqA8yaXKtCWQUtKEI7FGPHRm8g7cNbDODaWrYxqGdO8wi22+rsdiYW6wNNErtYcoxKHjQuAF5B7Eo8PMZCjDIJw0Eab8VBqSFrdj/bSXbJexe389Jg1zbSQOBmziUt6ww4izPx/LvB84vpQkT0fM4PUrE+VpoG25qyBnG3gPkoRXBncK59DLuG/HHf/zHvf9/4zd+A+eddx4+8YlP4AlPeAI2Njbw67/+6/id3/kdfOu3fisA4F3vehce+tCH4i//8i/x6Ec/GjfccAM++9nP4k//9E9x/vnn4+EPfzh+8id/Eq985Svxute97rT4Xy3j3hk33ngj3vCGN+Dnfu7ncMstt+Drv/7rT/cqAQBmsxle+MIX4g/+4A9wwQUX4P3vf//pXqVlnGS8+93vxiWXXILHPOYxe7K8v/7rvwYR4RGPeMSeLG8ymfTYfJ///Odx0UUX4RnPeAauueYaPOlJT9rBgn37299+WCDzREDOvQZGjzW8P3FFgltvvRUve9nLcP311+PCCy8EEeEjH/kI9u/ff1zLufnmm/Frv/ZrALBnjMmbb775K0baczKZYDqd4qabbsIVV1yBRz7ykXjSk56EJz3pSXjMYx5zRrJQzwyk4DCh0E/4FwE/+cyuIJsCgviXoAMinEiExoKISMQJyy9kHbSHXS+F5ImT5Kak2fU4CB/3dGGq99uxYKDjOojcoJpvg5roiXHwTvh6kiQ9w7xO31eDqpP/W90HVVbQ62cD5QChGMENxlyoio8WxJ5fWSe6HAsAvX0ozEwpDEhxCABUM+XJqHNxnaJnn0xeTZkmtwzolaDImsonvABSMWNH4cKx5I3RBtqU0IpAin1ygMjgcgptRAIHhqJ8lTCHeBlwkcG14HshHjS7yqRlgB+iv+BeyHfeU2NNZFBzBm3jfAL+kp8QuuKVViy/JTK5BQgUAgO+LgC661wH0AOGEkOMOvBQzu3K6PS/MEbyPwA9H0AAqC1hbj1GhcbAEIM+hhlgIi8pfnFadY0HCfyLoF/uE1dqBSVg36JkayZ/B/HMKgdQpuCiSFMD4zWEtgHZCAT6hfKl93wuRH9NYb7Ctszoi0yS4D1a1PBtC99YNFtTuJq9bezMYr45RzNp4BqP+eYc3rP3ouBmZQS9TGVQrhQoxyVG5wwxWBugOnsVo/P2Y7BvBcOvuhh6dT/02RdA7zsfYTDG2vhs+KrCQCvMXUDjCMOC4Dwwi75TzPpzLJ97GL9CoCv+AUggXXecgXlveT6Cfz5dczqJXtcbL0BXHM1fk8KjMAVzqdFCEwpSqKfdtfGeiiTFesqWvwQyvxJiEehbzH92AIHpWkWdr5/cyyi7p+ZSlJHdJ55YThqrcsUB8D1BmiyEResDs8bzHOFocdoKxdl9XXnHecBsC76ewB24PYF+9tDdsHWD+cHOC1RFVpGJuYsqmUntiyHnUaaCj/lU64F56xN7XsCdxjHQ1u6GqCI2pSigip6mQ0Mw7bRTJRCWn+Qpcm8SpuZi81EP/OskAoVxZj2w3UTWkVJwA8NMNQ8MjEKlNQbVKlRbALbhvLqpgckmy3tONrtGsyh7SuNV0Mo+0PrZwNoIoajgq1W4mGOWCwx9+e2cCC/y+6dCkeNUjT1h47YiewskBl8uh5nkPZPawSL7jG0JKLCELiiC6gr9vDQxe9HlK8ZAhQw4Wsxf8gYAYYB6B68IRhtUpsCwYF/dUaHROI+5NdDk4bxHaTRKAcziMSOlUmOdgNSVVhhQgJqJpOcMYbqZmhRZ2rNGO5nB1k0C+9pJi3pmsW09Jo4Zfvyc96lW0qQW2XnSPOUdgm1BPtoGCMNVlakhaVRozAqPlYHBSmXgfMDdAwPbOrQDB11WMLYBRVYf76IGijTcfIZ2soF5WaGeDqCNwm2HZigNYX1gsFpqhKBRjtdQFhUCGZAuYcZrcEUJmtfwgyHUbALd1HDTaXe0iUCl+KyLDKjjJkrHeaKyFkpbhFDE45dZCFAnqRsAwBvediACedLYyE2LQQA+23JuGkFAYYkq0gz2AXG/MoCpXMu/aTjP1aVhu4nToN+8zK3O3NjY2AAAnHXWWQCAT3ziE2jbFk9+8pPTZx7ykIfg/ve/P2655RY8+tGPxi233ILLL7+8J/f5tKc9DVdffTX+/u//ftci+Xw+x3w+T/9vbm6eqk0CAHzgAx/A+9//fvzWb/3WnizvUY96FH7jN34DD33oQ096WR/4wAfwvve9r+fXdTLxqEc9Cr/5m7+JhzzkISe9rD/4gz/ABz7wAfzmb/7mHqwZ8JnPfAZXXXUVXvGKV2B7++S83Pdy3YbDIZ797Gfjz//8z/GLv/iL2Ldv30kt733vex8+9KEP4V3vetdJrxsAPPKRj8S73/1uXHrppSe9rOuvvx4f/vCH8c53vnMP1gz4xm/8RrznPe/Bgx70oJNe1vXXX4+PfOQj+PVf//WTXta3fMu34HWvex2uuOIK6D24H62srOCaa67ZMxbXe97zHlx11VV47GMfi6uvvhrPeMYzcPnllx8RhDvSe1/3dV+HQ4cO4aKLLsKFF16YHvPnF110Ec4777wkZfnbv/3b+Lu/+ztcddVVuOSSS9KyPvShD+Hd7343fvqnfxoPfOADT3pbn/jEJ+LSSy/Fd3/3d+Pxj388vvd7vxe/8Au/gAsvvPC4lyVy1H/+53+Oz33uc3jsYx+Lr/qqrzru5XzTN30TbrjhBrz5zW/GZDI5+heOIR71qEfhWc961p4sq2kaXH311XjVq16FSy65BHfffTfOPvvsE17ehz/8YXz84x/Ha1/72j2RgL/sssvwhS98If3/l3/5l/jLv/xLXHfddXjOc56DN7zhDfiar/maoy7nS1/6Ej7+8Y/jq7/6q/cMUD/ROKNBPxJ2jPwfH1OhHRngJxPNrONcKeJuX0UISopZCi4a2rPUYFegWHA16MSBFoBGKfyLB54Up0SO5linKKe7g5GABF7Acje6snOgqXnyLJ3plj3xcq8vGq8mGU9a2QeUFXw5ZpAqynlCl5i7gNYGWM9gnxQiAJFtDQv7sfNFU+g6+5M/XuoG7WTEQBE6jM9BOrIQs+5WY3rMhJB3sQKJdaVcA2oIgRpQ8Ai6wKAcQRUGhti43lBAGaXHfECamArIw+tiEEzJrAXxAtFlx3rYJUJRIZgyrUtiSexB3FNjTc4nAVhCYFmqAC4+yRnm0hjoMxIkhJkwMCp9Lu+5cEHkHTsA2S9Uq/N/c0BQZCQ70DD73YVDI/KdldEM6FJ/rHZyWcIQ4+fM9IvXAtcw4BeLImlfKWKPotitDW86pmn0SFLD3b6ndo6htmHgr6kRZpPICIy+m20DE71VRFbIOw9sTeGdT4Df7GCN7YM1Zs7jrsalLviZ645RGeVrVwxhqAnnDzTGowLj80ZYu/ggRueMsT6pUZ29hsFsAtPUoLWz+fpRjrAyXEepDeaO5VvbKLlm43VhStyQUVuVQDwBAB1rL4NLQj51/efsSwBoC9oBIs6t7wF/i3KiIoU3a13PkwfoF1RLw5Kfg/govoPNbIZlLONMisMxnQQwOSJjKZe5E/ZH7icgsp6kmRUizw+TqItctlxLJcdiULADbGrHUs/mHrihnVCjTNwvwphTdg578A746Rbcgdvh6xq2btBsTeEbC1vPWYKvKKKEcplyKlo/m3OqchgfR7DKoImymbXtwL4ekz6uitxDhbVs4v3LaJYsrgzBuJol3ZspaL7FAM28u5apagSlyyR3mZQHFiN67UkTXfo++NgKMDmzDpVhsMdDMfu+0BgMVhDsnLt2q5pZZM7BbW8xaFM3CRQdnMUNFjRe6zXN2Ah4FruABCKvnTcInkpFjlMRApSHALSh80iWyLdFGhgF8MuPSeCOyJ7veMj+FBECGSjV7t6YJk9F4jKX/QUApXv5inxPOYuyKDHQhPWqwFrVSfXOGtfJwMp9VSS3RYKbOnagIQXl5jG3ahmQ8j76dwvAxICdby18yyw/77iRSf4E4JPIwWLXuKSEoGcT0GzC0v/UJMWGoiwTELlemZTPNvuGKI1OCgDeB7Rzi5rW0Uw24y5x6dHbBs42sLMJ6kkLrQm3H6phSGHfqGAv4lEJoxWGxmB99XxmtzbbINLAfMYqEaZAmNepaQzRd4+Zw0WnbGKKrgmSqLNPiNYGkrfmc5CgiL0uFUHJcY9NikqAUKW6eZhhIF8VRRpjystv6sRSRDxuygwAbxC85eW4BlqXpwX0W8aZGd57vPSlL8VjH/vYxMC6/fbbUZblDhDk/PPPx+23354+kwN+8r68t1u8/vWvx4//+I/v8RYcPp7+9Kfj0Y9+9J4t7/d///dx8cUX78mynvGMZ+wZGwkAfu/3fg/3v//992RZT3/60/FN3/RNe7IsgKVixY5mZWXlpJb1jGc8Y0/X7corr8Rll12GRz7ykSe9rGc+85l7yvi5/vrr9+yYPutZz8LjH//4PVkWwADnXq7bE57whD1Z1sUXX4x3vOMde8Zke/zjH7+n++0FL3gBnve85+2Zp+Uv/uIv4vOf/zxuu+02fOlLX8Jtt92GT3ziE7jttttw++23w8dmRiLCeeedhwsvvBDD4RAf+9jH8PM///N46lOfimuuuQbPetazsH//fvzDP/wDHvKQh+Dqq6/Ga17zGpx77rkntF7OOTz3uc/Fe97zHjzxiU/EhRdeiM3NTdx4441417vehW/7tm87ruVdc801uPrqq1EUBR73uMed8PEV1ua73vUutG179C8cQ7zxjW/ck+UAwHQ6xf/+3/8bj3rUo/DGN74RH/rQh/D7v//7J7y8yWSCN7zhDfjMZz6D3/zN3zxp8Pr6669HCAH/63/9L7zsZS/Dk5/8ZDz/+c/Hv/23//a4WJfXX389XvrSl+KJT3wibrrpppNap5ONMxv0A3o24ioVMXJWGDogSJ5LZGwaRTzBFEaf9NmJFA7S/zvXI/0uoefjRIqLJxSBKiMdxjmL514a+b5MXnae/RSEQdczX48Amviq0Mo+9u1bWUcYrCGYAqFcQSgG8KZiVp8N0Sut60BnyaduPURuihCBE9V1ZWtSUAKiilRWCH1ZRKDP4Mu6aAHwZFdrBvl0mRWJePuTf17W3a5s3XUsFxVUCCirVWhDCPDQCrBeJeYZxWMvBQsEj0AaCmVP3qwH1uw2NmRyfYLH9HRF3t2fS0uFxPrrAL8c7APQk2BcUGLs+bfx//33BRAScGgR9Mu93/rf67wEfWBvmRwAWqwxCJAvgJ/JitK5rJMw/wpCB/YFn7yR0ljLmJ5J7pV87zoWsvEu+zZ552iTfIvih/nctczS0O2MiyiZnFyIzwvnkpycAIB5F/xG67BpPb4Yu+EX+SKTeJC+POcr6J0DjfWZxUWTFra2qA/WUJrQTmYsKQVAtw10UXIRSlE8lwwAD+N4/7We5VQBnVgqLR9UeNUB7ECAc8wC9Z5ZoFIIlGNRBA1veKzNNaH1PnlCts4nD8BWxWKoA1rEYp1lmddZYzFrOgAQ6DyHTCxK8h8zE9pZg3s6pAh+Kpe/jPt27GiaklDUB0R2kaWW4i8X/E0fdIgsv961KgMIdrvJUXY9VUBqrABYkUDuJwQGroC9Z7DnQOcJLZeIk8uYU/jJJvz2IfjJFuZ3H4KrG9hZg3Y661hEVQkqmNnCnsgjqPEq1Hit80IuR3CKpdEb13nlsTQ1N9X40DHtZVUImRyxkpxaJeBUtdG/uZkgTDbhbcMMHYBzKWH3EXXMTc15SsrDvO8YX/muUB0DHuD77VxxKmdIJ2lKHf1Qq2LIgEMRlQ28g60b2GmNZjPKfGqWSBxUo3g/UYk56oG0DxYPnjnhA3p6Q/Zn3kTlYw4tQC+ANCfJo1M56Zp2tOqkNFliXfU+DyADdTJAH+hykyS3spMVmJhkzkJlszcVPIK3MIoBMusVVkvN9+FMjj338iOR3s5ktaUJUMNHCVqWSkdk4ImMZHAuAX7BeXgXEFxgic+ocpJk53eRl09gqPPskxebpvycffhYsreE8halZn/yUaG52chxzqBJYdZYPkedRz1t4X1AOV5D8A52PkuAX/AOvm3gmhnaeo55QdjanuMOQzjrUI1RoeEDsDowsI5lcleH61C6iA2ZJct9AghmwnlbbNZMoF8G+AnrL6mdeMfnc8wlgyLev1pAOGlcNHws5f4Q50kBgAp8fVA68BxMa1Ze6cl69q0h0jjPfp8ZlPz7IHNMcs57Hcvc6syMa6+9Fp/5zGdw8803n/LfetWrXoUf+qEfSv9vbm6eEFvjWKOqKlxwwQV7try9AjkAYDAY7ABNTya++qu/es+WNRwOj9sn62ixVyBMVVV7LiF3xRVX7Mly9nq/3ZuP6b153U6HdOWxxsmC3ovxtKc97bDvOedw5513JjBQHj/60Y+mz9xwww245ZZb8OxnPxvXXXcdPvGJT+A973kPXv3qV+Nd73oXXvnKV+JlL3vZcfvfaa1x7bXX4tprr8UXvvAFvPKVr8R73/teTCYTfPu3fzte8pKX4Lrrrjvm426yBtm9Or5FbK66N8W+fftwww034Ad+4Adw5ZVXAmBW+HOe85wTWt7znvc8POABD8BznvMcPPaxj8UHP/jBkzp3pTnCGIPbb78d55xzzgkt59/9u3+HH/qhH8I3f/M3n/C67FWc0aCfUmpX375eoWoB7FsEhBKbSwrtkOJA2FWKMxX8BRRSHdC3G6tPAMhufdD7rbQeuPfUHBIoETxUU3OnbL3J8ir1hOUyY+FHkQbKKk0aaWUf1HAM2n8eQrkCV63CV6sIusSk9bAOaFuHxgWEEHawvKQgJd38AqJUpmPLlaSg4bkD3TuodgrVxq7e6UYyiJcJvvj2pfUVtt+inKfITwUPFSeiykdAxjEjKklJkWZZ0KJE0CVUsw0ig7VyDBj203HQidWW5B7jpDUUFf8WdR3wMgaONom9N42VY42Z5eIN+9YFNCKnGbpikosd5y6I7GIH2gHRb0ZA3wj4VZoSk0tTn0UbwN/PgcNFWUZhggEda283L7/WBUxbl8AlkW4k4u/n9+V+UZp/R1h9qRgVgT5e0axYpjR6bkPZdSIs/M8/FgvlAKDL1N0vBSsJkRM1CuzN5Foes7aFsjXC5BBCPeVzd3gAVBjYes5ycnfNQBP+jdoHbFuPTdu/jh4uvjx3+PLc4X9vN/iaWYvzv7iN+eYcqxcdwvzQNta/ZhvDc+9A0dSg9bOh953HEsKDFawO1+GMxswyC7jxAYYCrOcO/7n1qJ2CCw7wfGylQAjr4eJuHASCCwGV0bGInAPxGv8/e28eLUtWlnn/9hCRkcM55547VlFU0aXyiSBOtBQltlMzqK3tgK1tu5zAoUvA1sJGRBQEBQRbWLoKsFtFeiFO7YwTio22Ao2CCg2CA1Dzne+ZMjMyIvbe3x97iMg85966w6mqW2W+a52VJ6fIGHe8+33e53kcilE41tYGkDece1ECdFKb1tOxbKgaibENs8ayUzZzsrBd76EI/tlysvcOWsYyrsKQdICtruw0tA0vMfbw7EtFfqVxUdatw0qPDL8I/u3F8ou5nXUOJdt7XmRKd5UV/P3T3xm7k6T436XeM8/3+Su978ZiuGhK3M4m5txJmpN3M9vYYefuU5iySlLLQkp0kaPyzHukHjiIWjuEvvZGXDHC9NdwvRVcVjC1gsa0agmRwVXbed/SdmxSc8BpHA9jntVTwst5llvIchO7dRa7eQbX1D5vil6CTQvCRZlWI/xx7jLlYh4Upb0FJIbOSk9TNoaZCECLscjK39O19BL8jRbo3gBtG8Rs7H2+pMLWDfW4ZLbhZbRkpukdGPm8T+c45SVjo1/sKJMXLdl5teVZ51ufmFt1IwK/xno/E48h+OOcpMyDrHU8P6SMjYrzqhpCtHmpZ/8FgFfnbb4cHoFOI5LdPdcJwL4Qcg4ATsC/sxRa4Jzk6LBHT6skkT0zNoGYXk7ds+i91LpikCt08AOMjYreBzs2SkX1j4W5l7W4xY6yEEqE+UbIPUdaUkife/pmqJpqe4Le3PJ+k8XA51TKg1nSWUaDdbLgV1kowWpPM+ppdmYNg0xxdLXgrrMT/lFJxjteDlAXI2SWU26ewtZVAv7qcody6zS2WUXninrmm482JxVHVgt2qob1IuO61YLVnmKYFRw4cJ3PqXQP1R/hpjuI3F/LCcCHcE1Jfz1HhZQI/ml/jQvbQD1LjWq+cSNLwL8LDR6+e0C2gJ7sAMNCIophYh5GH3XXkadPv9cr2uMVAb/oA9mxW1jGMu4rnvOc5/DWt76VP//zP59jsF1zzTVUVcXGxsYc2+/EiRMJRLvmmmt4z3veM7e8EydOpPf2il6vR6/X2+etWMYylrGMZSyGUoprrrlmbjx2zvHCF76Qz/qsz0p/n/iJnzgn/fgN3/ANfM3XfA233XYbP/ZjP8Ztt93GS1/6Ur7lW75lDny72Dhy5AiPfOQj+aZv+ibG4zHj8Zj3v//9PPvZz+bVr371FclXPhxje3ubj33sY+n5d33Xd/EFX/AFly0//Nmf/dn89V//NV/5lV/JZ3/2Z/Mbv/EbV8xI/qzP+qwr+v6xY8d46lOfum8M3yuJhzToB/OFhfOy+zrhhEydiCk6Hal7Mfm6MVfcZ4HJI9uJcgQgEoCzCEAuFO8vVFzYa7J/vxQkQhcnkBhIopklGSrRBFZQlMiBBJ6JXuEnvcMV5GAVl4+8v0xvSCNzqsYxbVwqREWvkUW/EaCVmUqSmL4IlYXnsim9LEw9RaTHEqoZdrLt160zmfWdrDJ1jiZpz722H9pJZAT7jMFWZfLtiNvumir5FgrbeLmyuvTFTJUjUzFTp/WInbWx2Nm4QFIMZrMX07V6fxei9vPcivJIdi8EPf5ep+O8bGx6rEMHNLTAqRWCTHVkXuVuJleE/hwOKSTOQSYdi1KhkSWSKbmrACoEoctdUukgC6ZEkIUMEgKd4xUXG8/p7nGMY4YUzBWh2g/Ivf+PXfGL3fKdgpl1raxXXdsE+kVp4nhpdVmIfT3A13Uyf13XmS+i6zwxeLOmJhtuYeuGfJhRjzW60GQ7FfllyClZ4HjZYBys3+sLtLrwcnXWWFaGq6mgLFfBOosUEvI+hc4BixQiMR+tAusExvlxoY47o7NfamODNLH/LESZz7ZhIxY0pYoAIBgrk4RobV06ll5u1GJyxayx5FomkG/RDzIW12OYiwRJ9zOklPuia36h5S/j4Rlz3YWLudSiUsEeeVZ8P0kUWwsd1hAqFPk7LL+918Nfd851xlHYs9krfn7P5ez98nljv++xEfRSkPIKMx1jx9FbrKTamgSpwSb4bGmEkshMo4ogmT5YwRUjXLGCC5KelQ2+uAHsMyGf2FOVAubuk16dgsRQjjlXkkVsZr7JKzDCsRYnjZf/IzRRde9PUidgJuYRi7cL/5pI0u2+uUtSC5dkuD0QZQEZ7mf+PqdlZAspRJbtyYiRmQ5spawFpUTYzks4sA8G4Heh3GvxdRPypphPdwHWCOxZQDrP2HPxO4H1GHOWbtOTCrl39HbssurmVyZKfDYpHxEL4L9wLgD+nsWHVGnu47rXfHccCNupZAD2lKSnZfJfroVfae+PLT3IF5u/RKu6sOf+kypJjEYGmwhzA7HHiZFYgyHPi9LlSoikUmCNw1S1Z/uVE297IBV6OMWGpgdRTehlBUbLsG0OyCiUDPmFzx+2y4ZTUlCOg0B5s46tK5pyhzp43QHe309KZtNVPw4qyV06eE3nirKxQcWih3GOXGn6xWqQ2HQIIZHW+rlcV6o3gH1J2jMwAKMdQryW/DwR37wYjpd/L4z1u3d82KEaTO3vCbplfgupPJMvzNmcMS3TLwCOcduFs+05FdfFPPD+d8vc6qETzjme+9zn8pu/+Zu84x3v2OXB84QnPIEsy3j729/OM57xDAA+8pGPcMcddyRJyptvvpkf+7Ef4+TJkxw9ehSAP/7jP2Z1dZXHPvaxD+wGLWMZy1jGMu4zhBC84hWvuM/P9Xo9br31Vp75zGfyyle+kuc+97n85E/+JK985Sv58i//8kti2/X7fV796ldfyWr/i4rDhw/zx3/8x/ziL/4ir3nNa/jgBz/I93//9/MzP/Mzl73Ma6+9lj/7sz/jO77jO/iiL/oiXv/61/OsZz1rH9f60uNZz3oWN91004O6DvAwAP1i7JKh2kvOE9oJZ/g/PsYJdPTE8JNYD+w5HAgQru0sb4FGkbrQs1DoTWCfsx3ZyRZoTBOWi5D3vL86zvcKUU0QdZgEOouoO2DfZMd3m463vbxnU3ekYDLv39cfIlYP4bIBZngQ1xsxs4KdmWf2TRtLY0ndxdE7KzKuetp3dvd0C/D1lCBXkhzf4SnKqV/PZoasxtjZ1E+0p+PkLQj4dYuTxl5/3qMiRgcITJNZ57z0T1hWAvtq/3zuN+IyoGUOBl8MEaROhc6hN/CAoO7h8oH3vSkKGgdlBwwYZhc+qg9U5/l+/kZlHTuVpa9FkG1yrXQuvhDadBh1k9pQG8dOkFmKoJ/qFHlAkoXdHtkAEShOMrBhS2wExKx/LYJ+3SJyF7CPvotdwCaCaINMJuZEZIXF73uwCJAuyJD6tnipxC7GsPc+AdgD1IvRKYrFsSkWda2zqXjX9WpqZd2gbEzyoYv7KQvXUl8LtBIMs4JeMSDvr+F0gcgnKK0RvT7ojFFdoYuceuw9kqxxHNn2nehD1SQpz4uNsXGMJ76wc6xsaMqGalwzOraBrRrv8zfeQh+6Frl2CLFWI/I+olhD90ZU8dK2bUEYAqDcWKwUEGXNoqRrbZHCpvElsgaSH5BakGKN5w1ejquxjsootivDrDFpX0Ir59mV94wNDIsgoK0f+MLUMpZxuRGZ6e19sb1PuS7Y1x2zFvy8RFfSUTLvRZvk/vSeHqRx+PVefoBgF8gXn6eGEAQ4d9FMrgcyIqNdAdimlfXcOkN5ZovZxjazjXHyFctHHtBSRU6+OiBfX0cduQ65dhgzOoLtDalEzqz2uVQV7g3OzbPnoWXUzatRtPe8OP4lHz8BcjL27KBqjJls4ybb2Om4lQGEALzlcw10jtZHLo6nsZkprpK/t7q0XlHqu9CSOnic1YH1b0PzlxAe2MyzAhfyTZEXLSCa+amM31/eS5pen+if2FfiISGLfimnbmN9I12872spUj5QGUd0bxQILA5Dq5jQ9XaE3XlU8siT7bkR1TfA+/MJ532pRfB7i+GMaWUas9yDNJ1mJSHaprfoAemCr6dBAjblYsNcJSBYq/Zeq4Sgn/n7+SDzLL+Yuy0ClElmOEYCsmT6U7lKf7kUVAGMiw1O8f+1TDHSEl0EqX1jaQJoLzPP9pNNhVHKq56YCkyD0znD3gqDXkHlFH1tWesphrlio6xZ72f0c8W9G37+tbGhUdqDbtVkEyEVJrD9TOVlP3dOK6rBGtW0oZo2nDmr2ZzUHF3tsXF0xFbZsN7XGNtnmEkO9tfJdA8x20ZKjTQVBAlRIAF8yd/cv+i3s3uOBnlPUeMZfEon1l8Cf7ssv7AcL/GrEstTKOvHktBsmRiie4B4CXQMn3OmAeG9BYXZH6+aZTw849nPfjZvectb+O3f/m1WVlaSB9/a2hr9fp+1tTWe9axnceutt3Lw4EFWV1d57nOfy80335w88p72tKfx2Mc+lm/8xm/kVa96FcePH+dFL3oRz372s5dsvmUsYxnLeBjEgQMHeOUrX8mzn/1sXvziFyeZyFe/+tU86UlP4p3vfCc333zzVS2p+lCMoih41rOexTOf+Uz+5E/+hNe+9rX8+Z//+RUx44qi4E1vehOPf/zj+fZv/3Y+8IEP8BM/8ROXxd7cj3jGM55xVZw3D3nQb69O4r38ZXY970xmumyZRVaSEKCIEz/mXo+/GwvGuwC/C7ANL2X7HqgQpkbOxkEap4HZBFeOMdOx7/Ju6lYiB5L3g+gPkYMVKIbY/houG2CLVUrjmDWOMjD8IuAX2VwtZuBSl3EsTEXAr68lCossx9CUyGqKqHagrjA7G15ys7Nu3XUSuWcfkhfzrARnfaGiu+3O4eoKa03b0V5XybcwMfyCHwjQMv52MQt9t6zoDxE6Q66se9mf/ggbfABdVqBVTr7QqX6h432l58KDIVcVize+2CgwuMSQk7QAYG08u29St+BflOVUEqQVyEwgLUn9UnbOmcSy7RQyYweydQKd/G6EZ0F01jEChYuFri5DTwmBxKGtQwmHtiIBcTG8TK3ACEC6VLAWuMQ009IXSJJX33nGIyAx9aJXTx1BpACUxv/rwJqNQN+ssdS2BdaBBHYVWtLkmp72YKTRYLSgH/1fnEWuh6JNU9PvFTRlhco0KleYypCdnFBax/Gy4XR16WDW3dPaF9WOj7HGMdvyQGJxZpWVsqJflajJFtoa5GgtXHM1eW+I0xnKgnO+OAheujM2DsTtjnJnEfyr8a9JYciUpFCSTFlGufaFTtWeP+l4Su8lqKVCCsEsePPoCBRKwU7pT8ZpZci1pGosVQfIjxKfxi40HDwAsfSdWcblRtu01Mx5s4EfqnYxdSIA6DyTHSdxEoTrAEKLedlezQ4QmFkyyXZ2865urhfHdyn8dQpwtZ6RXibY0RMNoi6x2xvY7Q2qrTFNOfMMvyizqAQq02TDgmJ9hd6BFdT6UeTaYWyxhu2vMbOCaW0pg+Rh6+fW/qbfV/5GGBtidMf7LAuS0/F+nCuBMJVvrgrqDnY69nkQIHv9lnkTcisnVSvbGov3omWRRcAvHkMhhc/7JCjh0Vov++3v8cr3bYT1n5fONo7kvaz6Q9xwlWxtFQBbNcjcM8fVoWuQKwf8Kqkcm3sfjQd/urW/4WU7nWebhRwgejnOmrbhpyvZmaRwnc9dMiXJwIN64XOLOZAS8w2NXc9hz/gLM6To7UYrpemCx51fXz8+zPlYB09Ig0xgZPQj9N8hNezEiPndIFMJ9IvrLzt5mxPCK3zE8SUw1mIzoMw0MvegscokKvd/uRT0laS0/kRVwiWmXxEahiIz0BqHqRvq8RSVa/TWhm8MlApXV8jhKnLUgMp902RW0NM98mKVyvrrsq8Fg8zvo5Wepmosd+UKKQXOOsrc+x2bWUlT7qT920zHwfNvhHWOfOKXsTmtmYU85OhKj0xK1vtZWH6f/qjwbNmmRGSDNManeancoyywOJftjtnWggz3iu77nTlwtElIx0V5MBiVtz9xASsOx0KziROtt6BrAecHKpa51UMnXv/61wPwBV/wBXOvv/GNb0w+Qq95zWuQUvKMZzyD2WzG05/+dF73utelzyqleOtb38ott9zCzTffzHA45Ju/+Zt56Utf+kBtxjKWsYxlLOMBiOuvv56f//mf59Zbb+UFL3gBN998M1/91V/NiRMnePKTn8wrX/nKqwLAebiFEIKnPvWpPPWpT2V7e3tflvdf/+t/5XGPexxf//Vfz4c+9CF+5Vd+hfX19X1Y20tfl6shHvKgX4w5lt+eH2gLTF1pvBb0i0UTt6sjeE7Sc6HoFAsmQogWgNwD7IuyovH//YhFEOeKQR1TIeoJRFP68RauHAcZmNKDanOG7zmiVyBHB6A3wGZ9bPCYSYCfsR1vGZvYR8aBtS7I0PjwgKlIxYdcSZTzRbLo2yeqHdx0x7P7xlsJlEtdpXnh160YIPojnC5woeiTWJfWeAnP6NeX2H0e2HOzjpRnBP3q8F4A/vzy2u/U4ynOWC/NFSS5spVR8L2pkcUQ2dRIqbFCIoyfsCqh55ij92fE33ggwT8ZiomROeeLMi5IaHqJrxjxHCkb0wGGPWCWqQjGz695vOY8cBeKU51iVVy6sSJd28b6zvcuA6/tZm+LX3H9/fvh90LhrDJ+zOiKSXnGiT+3pRMYG9ZBBLk1BAaQC53ncQyaB/xckuDqsvmiX1Ps7o9+g6WxzALoVxrrJdI6FWAlLL0gAeX3teyst0Dlmrw3wpnaQ2lr+OtAZwyvmaQiRDWuUbniWGDsVdZdtL9fOs7OS33mUqBOTzgwrtGFpp426XeKWYnQObIqkYdk4vX1BusePLYWG2VcnUKJILlqBVJY7+kXQOM4vsfIpKXWMnkCoaWXtGM3YKyEoBYOh/fBqq1N54J1niHRWEeuDUrWTAL41wWDcy2xelnEWcZDJ2I+JaJawZ4fWmAmh8Kx6DAAHRYRGh32+q7rJFdzeVEA/i5FjtFdQEL6/o77uqfGHAgZpD3LMa4c00xKbN14H7/Aao8MP1Xk6EGBGKwgVw743KpYZWoFs8YrJ8T7QXd860pcx9pxvC/6e3GQUO+MdUrgQYDojVzPEMZ7iSWwN3p+hbwPnc0BBMI5nG2QKLqtIF7W2hfmVQSKnAcdMUHesLOuUkagUKQ/SdsEo4JqgiiGyMEqmfV5ly5yVNHzDKtiGBhk2Ryw8HCKdJ8KTYskP2TvPxylqaXwQNWiZzGQQNoI9u0F+rVNUaScGSlxNrJ5pQdspE72AIA/b6zBefFtZPR+DPMjJ0QC/KogR55Y+p3hIlOCTMmQ/8m513zzTudcDus711CQWMVyTrZSKumbmTKN7mt0kaGLBp0pcmfIhMCExSgBfeUBQc8I7ORPdYMpq8D202TBTzLOD6Q1ngFoG1wz84of1tDTPWRvENiVgtr06GnJpDYoKXwD0axBSEFdrlEHGUzbYfzZpsLWvuGxLkZIJWlqwx34YzprLINMURqbclqTCUb9NUTdQ6ipB8/OB+h11Wpio2RnfBfO+TE+AL7d5YguiLdwD/Eyq+exWQjnTvSC3Ws+7cca6+Vl7QMP+i3joRMXkxcURcFtt93Gbbfddt7PPOpRj+L3f//393PVlrGMZSxjGVdpfOqnfipvfetb+bM/+zNuvfVW3ve+9/GXf/mXSCl5+ctfftUAOQ/HWFlZ2bdlfemXfinvfve7+fIv/3Juuukmfud3fofHPOYx+7b8h1I8pEG/88o47eGRlQrqFyiudwG/3R3T80Ud2ZkQR3+Z84I3qcC1e70uJvYqKO312pUOP7KaYk/f7bu7qxK7eQZXlZhyFnwvJHK46uUr8wK5fgTRH2EH657dF/37gpRnZbz0UGQipd8RvqSulEgFiUz6wnuuBP1M0FOS3FXI2RjRlMjpJpgKt7OZ1s8F6c3ok4dUvrN2sIIbrmOi7000hTd+gui9akrvIWGqUMRokErhjEFK5f36mtoDH7FYYA10wD8PNhpMOcOUVSregS/cxUKUDkCiswZdDBFCIqqxZ1X1Rld41C49HsjblBKgVKdDXPlCbh2ulcaKdG7MGg/GxIivZ+E8GeWKQgc5p0zNSVUWSiZ5p7z7ezEC2B+ZcZElF697aGWhbOg2F4K5z0TALwGEDkzsnO9IacV1FwEAJLD/RLhqF6egezUdeBnP9v/o0VOH/1sZ1IayadmRnvHXAqbtcQjyllJQGkuhJLNCMwqsv8o6CiUZDQ6hixXcbBWZDVD1BDk6QHbtGUbXHac4tEZ5ZpPB4QHXnBhz413b3LE949TMcE958cUXC3x8UrNZGw6WDZ/4gVOM1gvKcyXlmS2KQ6usjUvygwfQ07FnuowOgG3oZX1UPiI3wo810mKsSHKcnungWTCT2jAL+6ds/P6ZEZk33ifQOEVPC5TzRzAy/gRgJOTBN9BLqGWs2Yy1yrCZ15SNZZBJZsYyrgzbZRPYfm3ZW0lJky/lPZfxEIrI9rcXYHh0cisRn0efJwkgU/HWMzPC9xZ9v/BF5MQ2WfQPPF+uJGTrJ/Ugx33dU3MpEBrkZBMx2cBunqHaGifpZF3kFOuD1DA0OLpOvjKgd8016GM3IA9dSzM8RJOP2J4aKmNbSU9axnkEdJIfmYw+rn4M6ynRHtsmjNedRijRVPNNUdYmGXc5XPUKBtkCOyf4P2MqRCXoFatJkloJENZLs8djqXSOVDrtt8o6lIPG+iYK6bx/bqFVYKe3jTzGgc772HyIXAG1fsTLyhcDn/sVQdpz7Sj1wRv2Zi09zMLnMrEZyLJdGTbL2ufdobEusvwzKQNDzjfVeZCPlBvkKkh8RnZmAm58nr2rCSACakqDcziVeyDRGpy1cx7YVmfIXngvfNcJSRPmCYsqBk0HCFQCelqRyfntyYOCQZQmnZsTJhlR76stdA66TkxVVZSoooeqG7JhQW+1wRpLUzbInZo158iDdC4QGIAClanW0682mLKiApzxKiD1uKRXNWTDLWR/iCvH/jcHK8n7XA5W/b7qr5FlfVaHI3IlWO9rlIC1nmZUZHxQS85slgghKCdDyrxPNdnCzKZJ5rMJ4J8cb2Gbw5SjIfXMUJUN926UnvG32mNSG64d9VgrNOuFIlcD+sORv0YXlGnicRamDtd3dV62X7ehNY0ftvHSpl1Fi8DS9QxP3TI+95J4js2YsTHTNn6MCedWfHTO+kaFZSxjGctYxjKWsYx9jkc/+tGYjuz4K1/5SqSU/OiP/ugS+HuIxKd8yqfwnve8h6/92q/lSU96Er/0S7/El3zJlzzYq/WAx8NrRrxHcajLoiH+3ymGdwE/Op+5rJ+/yHWKr8efua8hY6/375dhxjYenJpsY2dTzM42TQCzZKZRufZdtDrz8k7FAJcNEuBnZE4ZfftsKznUbbSTErC+iANtx66WpIJDLESIyheSaEIBqpol9p0H+zqF9CipGYpSNnZ4K506vZ2QCLvg6SiEX47U/jeU9SbzTQPWYPXYg3+1l/gUue/eFU2NtdbLi2UmecrY0KIciwIxkqxQWl+dipZXe1wJMzDJPoXJv1Y5Ikh9gg3SbAKQjHp+/2TKs9IS6CcFUgpWckUmJcPg35KKmUEGVgt84XFWzXcm4zuLhZBIpdEqxwSGlold8gthnUMmAM8Fv8+2M15JcE5gQze9cxfeQ3F5Dv+b3UTBdQC/Ra8+y7xXk2c+0sp4hseZ8X+1cUxrM+cvB774a5wKfnQNjfb7ujYuSUs1YUf0dEZ/sO6PX91DHbG+WKVzVoF8dYCzlnyYoQuN+NgGqzteAu7UrKG+hPHzXO39CUe6wZydonKJUMIvf3Xou7nzwnfmA1IXONuglUbqIrCwvcSnto7GgHH+3OhZOSctFz0ibdjX1nn2Z2TLWEnyehRhnyna1/yl7X0dRdCY7Wm/vAggpg79RiVvKy0FpVnKe8a47bbbePWrX83x48f59E//dH76p3+aJz7xiXt+9hd+4Rf41m/91rnXer0eZVmm5845XvziF/M//sf/YGNjgyc/+cm8/vWv59GPfvRlrd8ymGN4LAJvXWnPeKnPAX/xeZT6NFEO0HVAhAUwz0Uu78I6dB87y08fYXe+Zh2BUb1/edKVsuNVlEesptjJFnY6xta+iC0zjVCSHJC5Rmaa3oERvQMj1Mo6crga8qwieIxaak9mDjlWZwdYLw8af1MHICdXklyCqCYB9Ov4sEVGZyzYhyK7cM6zonoFAGK4ClJju4yuBBI4vzwjcbbxbDw6YEL3t6w/f5T0DO4onS1EaPBx0WMOMtnKLkdAx/sj5zhTI4erWKlQENQdvK+07Q3bvO8Kj939ERfbyHehiOe9CE10jXWhAciknKA2XlXBM+Wgpz3AJ8N3lYx+yb6RKeXetglNcgsA/F7bEsH34PPnm+gUSBO83QzOGERXGr+zDXEO5puywryB+XnbXo2CuZKBkcjeTZcL3qFCKc8wS4w/hco9008G+fKsr5Nf36C2MGtS/qCEz0OjtKczQVq1MgjZ0MgKmfvvNhP/mEE7N7EGUQXQz1pkfxh2q782Rpl/fnTYwziYGcvZHQ9olZMKKQW2GbXSqYH1ByT2X1NNEROFziRZz0uE3rsxxVjLMPcgunWeXVtor0YR/RqV0CDYDfjaBkHeAvzdfZtOjaYdR0zlAb+68o2RMOcF6iLzNnq6qsyzRfeQecYoD+6F3xMdaXzhXPp7oGOZWy1zq2UsYxnLWMbDPx7xiEfwN3/zN/zzP/8z73nPe/irv/or3vGOd/DKV76SF7zgBUvg7yESBw8e5A/+4A943vOex5d92Zfxqle9iltvvfVf1PF72IB+3QlztxAUC/pdH684R+gWSy40bYjL6jL+EijQOVfmJu3n8xLsfNa4eb+ay4n9LGgI22AnW5jtc9jJhPLsFtZYnLHoIvcTESn9pHW4iuutYntD7GCdqXFMZ4ZZp2O3y4CC4NEiBFZ4LxFBW2TIlaQXutELJZBNiainvnPU1N5wvipxHeP5xL4L6+SByCFO90Dnu1meQiO1RmhwOkcYz/qLE9Y0aYVU9BTDmWfkGV8Qc3WFLce4WemlO6sSq8fkWY6rK1RZpe3VRe5ZkcXQS3StHfLyoyrH9tfmJ83xGOzTsdzPuKJ1MhWymrYybk2FVholNVmm/TUgPfjUzzIOFDqwF+ZZatHr0TPWQiFTCrQpoamQ07Hv+LUNoilTp3AMF7q9nS5wOkOqHJ31ffFQ54lBZ2z7y5Ept1hUjl5JDlKdei9mcBc0NE6EbQqeM91164B5jZ1/bH1GI2jlvG+d9Y9lAPsmtWErsMwmlQf9umwzgFwrlBQMckWuJcNcU2hJT0vWeppBpljvZxRaUmjBKF+jV6zT668jVseoQ5uoI9eRbZ+jf+3HmZ0+w+T4GVavv4cjp6cc/PAZNrcr7pzW3D1tgifOfcdWY/nIdsWpXHF9Y7lmp2ZyeoI1lv4h7/M3KCeonQ20Najhqi9Q90YMdI9+PsAJNXcMZ4FhXGhJ2Vh2qoazUy9rOmtskgOLMsMR1I3HT0l/fsVConHei6oIMn2VdvS0oLGOQSaZ1JbVWrMza6itC6zLVqasEtlF7YuHe/zKr/wKt956K294wxu46aabeO1rX8vTn/50PvKRj3D06NE9v7O6uspHPvKR9HwxQXvVq17FT/3UT/GmN72JG2+8kR/6oR/i6U9/Oh/60IcoiuJ+3Z6HawgT2BrhoohsPGRbSF8E3Ly83h73NKU9409YRBww5wrILSAunG2/1/V0uoBKgr92Oz6tYczMoyThPsSVLkbbClGXsH3as/y2JzQhV8hXh0glEdLLembDwjOb+0PUseth5TB2sE4jNGXjGX6zxiWp5hhSxhyLJOFZaK+a0LMlYubl0TFNAvfmch6YK/Q7YxIrCq0xoyOgcg/uNF7+U1TTxASK7HpRBdlPpVvGULcBp6kQpkFnBUJqz6JWgszhEQhI7K2o/hAZ9hBAv6yPFRJ54BhqpcJNvdeZKAbUR/+/ednRKzx290fsVyOfo5VznTWGSW0802/WMKkNO2WTfGUP9jNG4T6fBZnX5J8dcm9tfdNUYnsugjzx2hfCjw0RuIHA2vIef9JZpFKpMW5u28I55yDdd5OEuaWdO7gAaodViL69baNgkKoN8p6CqMAi2rEqAMQAToVmxV4fkQX2aqaRWYYucrJhDwBTWZoySJLuSJqySYxJXWikEgjZAn+miiCcX1FTVlhj0dOKZlKix6UHFwcD//s6842VeYEYrqBW1hGzMf3REXrFCuDzsiIAP0dW/T3szGaJzhQ7mWI2HTEFbF3RlDteecQa6vFWAgIB6pnhduDsTsW0MmwdHnJokLNdGVZyxVqRJTnXmF9r6cF3LQVFPgis4A7TLx3nzjVWl15BpQpyodMd7PZGUmPpWkGIXj8Afdp7ggZfxybNy9scWmvPJhap+dP4c8k0gX26lPeMscytlrGMZSxjGfsVZVkux/kQQgg+6ZM+iU/6pE/iP/2n/wRAXdc45/5FgUYP9ciyjJ/6qZ/i8Y9/PN/1Xd/FBz7wAd7whjdw4sQJHvWoRz3Yq3e/x0Ma9POTvPb/OTZf+H8R7LtYoG+vWCxw2XCxy+DXFdcnGtb7FdtdBEsdupf4+4ux3x3MZuUY+jE3Yf/mT3E7OwgpkfhOXD0s0EWO7A/Tn9GeTdc4EtDXBfxiKBkmcioW0WOHbtthnEUQR4rOBK9TnAqduUlaysZCoS+m+8l84bt5u5IvtkFa6wuPUs8fdClxTtE1bXQAneOXfj+Y3WMaxGCKNBVuvIWrSmSUG7UG3dRpWSL4C8qVA17Opxhh+mu4rL+rcPmwvWVIjc37yGrqJ+dCBqC2AZ2jpWaUSazezdro+un5ib4vFopmhigD2FeNEabGjrfOL/sKHngNEmWyGEJeYPMhLitw+ZBc5ziVU+HP98iSc531iKwEz/jztD+F2MVmBZIUaCxExbChpb0rJUrYbmO77D4X1qOV/mwC0Fcbz1iLgFWX0RelsaomAn+hEGUd4AuAm4F10s81uZYMcsVm4cG+s2XNSq4Y5Zq1QlMowUqvoD8YkPfXUL0hcu0oWX+IOnSG/ODdqKLH8MwmKleMToxZuWubtaxiszYXDf6V1nEqdtVvlBwuG7K+ptqpEeEYFqX3GJTTMfowWFN7f5xmBlJ7WV6dg9YUWlFbRy4thRYJ3IwyqDEGmWc/ytDdnlh+Yt7DSAnp/4I8rQwMw8o4bNYWpIFUjLeuPTZidv91hZ8vhJBp391fy7/U+Mmf/Em+/du/PXWYv+ENb+D3fu/3+Pmf/3le8IIXnOd3BNdcc82e7znneO1rX8uLXvQivuIrvgKA//k//yfHjh3jt37rt/iP//E/XvI6LoNdTRNtsb8t/HfzLN8IFZqY4v1zUaqzG122fYwwXicQqsMyEvj3XOc78X7RZQvFyzAu9appppEadI7dPIM5d4p6XOKMTaw+XYSieDFE9IeotUPel27lMLZYwfaGVMYl9jDM+/h1vZCjL1tsjMlFyIeaEjEbB9nWDqBrjM+bOmOYCOx2oTPIC880zIegdHsOSJ3YoMKEvCcAfHO56cK5kCRdnQ1MM8icwKk4BrcgRJSajHL6EKSzs8Kfg32LrKb+/CvWsPngX4SkJ/h91VOSc9aDfVHKehoeqyBrHSVfbcgXuiFEJw/HevAmArpNnY6Zk8of+yDPKKIkI4DqNLRIDarx6gWy8l7nQaFD6MyjOGIPVhcEn+WYd7lwfc+vbxfYjkCwRKRGnW7txctHKu8pqixOaq/iEfzIiaBf7v9UkeOMJRs2CCWwxiGVpM4VLnSPqlyiC88KFEqkzwnjEMHTG0CWlZc3tRZrrPcOrBpUPkEVvbQ/ZPBLlysHkEKCqTgwPBLyiR6l8Z5808owKjR3RlC8UMAxmnIHmeXYukqe6+CZf9U0x1mHzhRNyHmMdWyMehjnFR7WKxNyINGRfRUUVpKrOK5olKYdy1WeZFhj9PIB1KVvCADvhT7Z9k2Rs2nrA4qfUwld+PMpnDNRdj8CwECrtiAkmS58vhHnhdAqvZzvHnM/xjK3WuZWy1jGMpbxcI1/+Id/4E//9E/5z//5Pz/Yq3LVRpYtm7kfqvHt3/7tfPInfzLPeMYz+MIv/EKuv/56vuZrvoav/dqvfbBX7X6Nh/Ts+L7km7rFIJsmk5cee6mH2PD7zoEVIJ3DCZG6FPfqeI/rtF9g3X4XsuxgHdtfQ/belTz8wHtSqUwH75ShZ7ipDBe6vhPg57qyPHHiFqSmQve5pJWdUkHOKRarcomf0MWu0VScskGaR3ppUUKhqjPpElmO0HlbiIid59EHwnjwyT9ZYA0Ef5luQTNGOnbRV8JaRDVGmAoptQcoByvJu8R1OtqFzn2ho7+C0wU27+P6azj98Omcuc9zWUgvs+X8PmtlenQqBgpAxiJz18OjIzEnnPPHNHgyRoCv2TyDayr/vJxgy5ImFlxCUVUoSTbse4ZqAGBFf4g8cBjXzHDO4uwAl1lyXXTYwfNFp8gacQtgnxK7i1Mx5EIHUNxfi+NRlLeKcp4R8HO4BAhGwM9LU3q/nlj0NrYFKo21CfCbNTZJfZqFgp+SFXkAw0aFpp9rDo3y5Jl4eJAxyBSVzehrSV9r1lavRVYTz9RcWUcUQ1alojh0DmcsvdUN3w1/+ybrOxVKCM5WhtPVfXva1Q5OzQxKeFZd78QEaxxZ3x9DayyjwUloaoxSbde4qX2xSeng4aPJsj5aabKelxzOlUNJ7Zl5tfJjlW39IlVgCEjROWbR+yqMIYt+VLHvwDhBlBjtnifGuSSZavUDL+/5QMXW1tbc816vR6/X2/W5qqp473vfyw/8wA+k16SUPOUpT+Fd73rXeZe/s7PDox71KKy1fNZnfRYvf/nLedzjHgfAxz72MY4fP85TnvKU9Pm1tTVuuukm3vWudy0LU/sZHZZf9GyL+U4c19L9YC/AT0h8e9QFwMBFOcFu85TFJxHxObvzuTgen6/zsnu/utg8bF/yteBn5SbbmK0NmvHU+4nlmmxlhCgGnvETJCrl6kGfMxQruN4KRubUtU3M4cWIYEgEcSLw55uoAuBXl54F3zSpSSlto1S+cUr5cSrKMIosx8bcJR/sZlwm78dOAT4U5XcBO/GccOEcMA1OSJRQKEGSl4xgTmwMW2RrOvCMw7B8C0jbeG/pi/BI3ut43tcx3u8Gu/2I2HzSWMd2ZSiNZ/6XjfWejwH4i/m2WchRBB4wE7Hhpak84Bfz76CYkADemLOEeyzQsr7isTYNWIV1FtH4c0kIicxCbh5k7S8UbU4TvArPk1ulxqqFeeCir1/09hMq99ujs+BT6dVLVKbTnyty8lGDVAJnnG9UzDzbD0BlCpUrVC53SfgDOGOxNDRlhQoXqzMGm2WYuvHzqLJCF7ME/smmgqb2uYVt0LrHWrGKdY5HrhYUSjKpDbmWvhmsscgdQVMZar3md7vOsJ2GQ9NUNFUF5JSTCmNsYidOKoOSgpVCM2s8qNjTMnlmj3KNy8Aigu2CRHWA9MoSZIZdOg5CSPKsaOdS1njAbzrGlWM/vlSe5SjBN0GaHgTGZ+vluNDMYEFJr5RRRMDZ1F7iM7A7xYMA+j1QscytlrGMZSxjGQ9klGXJ133d1y0Bv2U8rOPzPu/z+Ku/+iu++Iu/mHe/+9386Z/+KZ//+Z/PsWPHHuxVu9/iIQ36icWiUIhYjHJ7TBa7jMDzhVyY3UfgEIKsZ2D7hCkdCoHxbyKESAWC7nK6XfHn+52rpbAgBiuo0QpFHroYZCywe9BEjAJwFSWchEAKRxY2yIhWVlXi5XckIviw+UlkJufZWzStz4wwVZKgEh35FhFketC57yZvas8MiAWrLE+SMwkwFDKBAtJUnQJG6ynkrF9vkTrY5Xw3Ox1AUIajbnvz8japI1q33dHpzQ6YaBoE5b4Bf+c7Zx6oc+mifyNKgcWucCkTMC6qKaKe4MZb2O1zuHKCmY5xTRVYexZXVx7MM5ZmUmKqBlPOqMcltvZFFlNW1NMmyS0BqUCTrwzIhgXFoTXy1QG9Ayuo9aP+XD90DbI/wuUjRLGCkhqd95M8VDfiMY3Xc8TR5uTlmG8UiJ6Ai8tIywngYpcx6AJwbiyB1efl3Izzz+sgXwlRjpJUFFJSomQEAD34N62MLwIa6+UsIwMt+CX2My/5OSo0K4VmVGQcGuWsFppDg5y1nuZgP2OnUvSzHgcP/StkuYUq1siHq+jNM2TDPtOT5xhec4rhsdOU50pWP7rB1mbJiZnh7mnNZm25UImmtI67pzU7jYUTYw6eK2mmDdW4Zrixg60aeutnKWZTXyQfrHggV+eQ5UEySuP0BJRGZgMylTHqFazkWSpWdT0TwUuxxiJziuhLBQusI9KYpxJTAvoB2GtBP6il9/yr9ktn8BLigfKduf766+def/GLX8xLXvKSXZ8/ffo0xphdCdWxY8f48Ic/vOdvfPInfzI///M/z6d92qexubnJT/zET/A5n/M5fPCDH+SRj3wkx48fT8tYXGZ8bxmXH6mJJrJzAivDdPIrL9F8ied3vPdGKb45xl+433cHUWdwEkQAigBs4PNFSV+IbB+RAJHFtRLn+f+Cq3qx23Rf4Sx28wyTUxseFCh6ZCsD9JHr/H1o/ahn1akcU6wkGfDKyTB2+zE/3nO8xxhB8lqmcajQrTSmwrP8RF0i6ylusuPvq7OyBeq6flshn4qvO5Xh8j4uH2KQCVRFBsaflDjj8yzRdOQgIeVSEO55ztKVcnVRfcG6jq9YC1xCe47FpUZVDQPIwJ7EWZxtfDNWM8MOD13wMHSPZxzf7+sY7/WdqyUE/nzfCZKekWmea8mhUU6uJUVHwrtQkkKrlmUZ1DVEPUNUE2Q9BVPhykli38peH2cbnBgihMQKgcu8VH1lSTLkSmWoTKB7I7/M2TgpdyRmqc4uCP45Wo9ja9sGqW4D1V7DTVJkcLQNClJ6WU8hcdqfn7LX99KaPa9e4oz1AHxg6TlrUVmDUJJ6LJGZ9/oDQjOkCHmlQmUysf5k537rjMXQeKZfpoESmWuvnqIkMsuQuSYbbqOLs6jRCFeViMEK2lTYYo1Dw0Nko4KVXJIpwaFBzoFBxkqhObk140RPM5s2lCs5s2mDbSxNVc2B+aYxlOOapjIeJJw1bGxpNic1o8I3eY0Kv8z10OS1XmQcHuSsFp4B6DPbsF3OMW18/jSt22vdWEc/kwyzAU6NEVLhjME1FdXWGKEkUk388WhqxGAFofLEEDbOUTYuqFl0G+18XpZL31iVSU2me61XuGmBzgcylrnVMrdaxjKWsYyHYzzvec/jb//2b1ldXX2wV2UZy7hf4yMf+Qj33nsvAGfOnOGWW27h13/91x+2kq0PadAvdYJ35aT2CCF8UTdOHi/lUDqClCe+EL/I3IEWCLS0RQkv/3lpm3O1nGJCZ15aKpjeC6kCy2/gpRGDF0Pc36kzWwTWowTp2v2eB/+QXgT7bINomtZbJgJ0gVUjTE3ymomMr7huUrWSXfHs1ZkvUMUC4mI4i7BAXXpJmSgzGUGnWICUhHMqAFJRTix+Jkp8xvUJHc8urpOQuI6Pzd7FSzvPZrjCON85cznn0qUUs6KkK5AYUoI9PCpj53/HNxEA0zL7xHTLe0meOY45dzL5SUZgL4J99bjEWYspK0xlMJWhGtc442jKmnraeI+Vjnxj7Mwu1qfoQjPY2KF3YETvwA7DcoYajaCpPXA0OuBlW3XPA8QL57nfVtkCll2Pm8D8i/sR2sYDEZsEFpoQumMLrr2OwPnuaScQwo8txrWPAFJ6WeEoayWtSN49sQCvpJhj+VXGYhr/55wH/mL3d1MZpJZMK8O0MsEX0DKpPEjbBIZhpgoslp4SjIpVrDWopkRKiZps0ZfKF7rqhqy/QzWuEEpgzkyprCOXhhOzC7P+agebteFc7fd7/1yJ7muEFOQrmwBkg7N+GwGnFOSFP3ez2EwgQze4TA0AeVaQK02u5Dw7OYCtkTkAsYgpUNGvSLTNJd2jKAP4JwLjwBfeJZl0SOcAibVtQ8TDMe688865ycFeneiXGzfffDM333xzev45n/M5fMqnfAo/8zM/w8te9rJ9+51l7I6u3GMsojv2bprq+jBd0nIXAb+OBOQuAKnrtxuBv/PkWPelAvFghatKTLifCSW9rGcx8B7Jxcgz2LIeLuvjssIDfsbSJJnnFtyM3mb+vusBPyVFug+nwrjt5lkGmtr/dcP6Ar3QeTq+MsuxoYkiSfHFSDmTRtjg86XsHCuajhLDnnlZyr9I/rdKtNvgme8C5xx7Z/bzyxIB/LvQXGDX1y7iM4t50f1+Xrk2dwTfUDbHrqPT2Nj5WmLIOecBVOllrTMpE3s/srranN3n7cIsnCfVzDdcxd+TXgY/yr7GhrjG4fOKkA9KIVDSeal+VaB74JrMS2yGbXKL13x3G2iv6UWGn8+fRNrW7uOeERoVhJA4Eb3oGs86y7wKh8hyVN54z71Me3WI2u9nVTc44/83HYBH5TJcu97fT+UqKaN0gSDP+gNo0nOIgFHl1QuqBtPP0VWD0DmyqXxzJYDSrKwOEMB6P8M4mNaG7QN9lBTslE3wAgWlJE1tqLTENhbTdIG/OI42lFJgGseGkuzMvAf0qGjYCay/lUIjQxNBpgR9HceXdkcb54KndLurm8D+i1596CyxhgFs3eCMRFWll2Bv6nZOAB1fR5f8rMGPB5nz40NjXWgi1amh82Kv84dqLHOrZSxjGctYxgMV/+t//S9e97rXAZ7ZvYxlPJzj6U9/OnfccQdvetObeN3rXsdv/uZv8su//Mt8/dd//YO9avdLPDxAP5gvzBOL6SIUXv2LInUP7724OT+IBaaOEuAWgLxuYck6hwwdyCJ0qkYw5IGM/ehClsPVJFeZvC/6XtaTYuhZakHGBWtR2ssyQce/r1NQ6Ennu37L6TwAFPxfiEbtoegQpaVidCeP8yuq2i5kqRLLx3+pMyGMhcR6Ck0oAmS9lr0Qf8d1JIswYEC4adjOJi1LOJtYD74TfsDMuMDCct5LJ/Nd9gm0NM180ZILy0w9GN3kl1IAG9eWjdJ4mVYJhZLkSjDQ80sRdYksN1uPRSE9g7Px3jF2OqY+cQfm3El2br+H8d2nmG3ssH2PB4xmWzOqnRpbWyazhso6KuuoQ5Gg6oA38bEbufS+Rqta0leStZWc/npBsV6w8ogR+cqA0XVHKA6tkh9YRa0fQRRDzwLMC+9BoyILQrcgn86Ikm3xNRkZEgH8jdKxLTPGv70I+bpQMI+eWMb5glwtXHpPW4USFmX8WETjfeZiSCHCOCWocrtLzrMKYF9T+45v5xymmWf8CQlTJdnOFVmv7QafVoZDo5xJ7ffDWs9LNFXGsdpfJwdEPiQLjODRYAU9KCjPbCGkZHJ4Qn7nFv27tzlbeUbKZm0Ym/NX7GoHd08bdhpfOLu2bJht+XNosD3BWUu/rMirEg2+cB79ZDJSEVgE2WUhJMy2w3FqwXp0jlOaRugEBIL3lzEuAnk61a+tA9fxLfW+rm3RHS3J/LCU2JjWOlz+wN9qH6hu9NXV1YvqCDx8+DBKKU6cODH3+okTJ87rK7MYWZbxmZ/5mfzTP/0TQPreiRMnuPbaa+eW+Rmf8RkXtcxl7BHd+2eQx0PqOWChGxctXtsF7xaY9cJ0mmS6uR34+66QIJyXhQSQ+Z4/cTXj69XWmGpr7NlFQTJdrh1CDA9g+mugvXS6zQdUxrNpYpE9yj7He0I/k0EmXQTp9OCFFyWLsV6y0fg/6grX1Cm/ctZ4EDDct4RUHviTCnSGE8KDj9qrGnQL8d3j45RGmNafy0tvivncSgKL0F0AkSWk5KMLWkLMuUUCuTyzmjZ/NE0Y49tzdb+BgAc8D6smyNl2u43NDNtfw46OzK1TYx3j2uJwZLKVl86kZ/ZFdlihAuiXK6RovRJzFdU3BGI2mz9PgtdcPD+SV1vWgMt8XoOkbCyT2p+flXHp2ovL7eucolegezYpePgNCDlSJ5xr/yrTSpnXxiGjL6EFKxzWCS91K/29WAoxNzdzxAatwNaX0m+Ls7jGA+1iNvWyutagigpVteCcDfc2qSpUrrDGJaA+SntG6XiZ6fBZL0EelwEe7LIBX3fGptdjyFyjixxV5AwmJXpQ0JuOUWtnUEeuQ5mGlf4abjiiryWFkvSU5JrVHv1csTmpuXejZHtc0dSB1VcbqllDU9sEADaVxauIWnRWU88adKYY90qynkbnisMrPQ4MMrYPDqiNl4bvaYkJSiix4TUyjhvbKlhUxqKkxPW8FLAqBu0+juxHa73XX1DwEEGuH3we1Rifz8cxL57LjRRY/PhvFeR57udgzeyyvO/2I5a51Wdc1DKXsYyHWjjn+KM/+iMe97jH7WLaLmMZD+f46Ec/yi233JKeL0G/ZfxLiLW1Nb77u7+b5z73ubz97W/nl37pl3jKU57CkSNH7vvLD7F4aIN+MMe2WQzpddjaYgVBLijKwFxosd0m9860f5HJJMTeIGJk/S3WtSPwmDqx9zmuZJlycg45OecN4aO/S/S+CACIW5TlcRYBZDJOvBdYX6ZCRrBvttN6s8WikfXSni74WtiuJ55UoeDQkZxKKyvTZyLDz80VKUNBX+oEMoqmAwIaz/br+gKmDmbb+I5V3Skqdth5LhnaBylBCJNg/3I0uFdComSOstX8smKH+x7HQCw8Xm1hHJydGl+UsR7wkK71hZkL56WhkpRpBD6N944x505ix1s0x+9gemqD7TtOsPmx00xOT9m6a5vJrOFcbdmsDZV1bNaWyjrKS6XQApnAg37jioNnpwyV5MhHNyjWeoyuPcPgcJ/egRHDaw6Rrw4ojhz0HoCB4Sp0jugV6XpQ/aFneAaZ2+QpFwpNomnPQSUkSuW4hS71tJsAkkdgkC+zjsZ6IE+Hgm+uRJB3U/QaQ20cmbRkyhdorHOUwpJrGaQ+ZUfyU3hgT/hHZzzgl9iHxiGtACyiEQhhmGb+WpxWDZNKMcwtk9qSSUNlFToU+nRv6Au3/RVkkN3NSy/pVG37R2cszjqycyW1g74SnJoZtprzj8KldajGcrYyZAIOn57SWx1ja4Mq8nTtysL/vouNCR1p3sQqchYRut8FMw8EKo0zBUJqdG+IUpo6dqxDRz41HqH2eEXPIV+0Frgg9WkD86/Qikx6H0ZrobkfC0QPlcjznCc84Qm8/e1v5yu/8isBsNby9re/nec85zkXtQxjDB/4wAf40i/9UgBuvPFGrrnmGt7+9renQtTW1hb/9//+37nJyzIuMWL1PjK2IrOLVnIxsmMvK+K9uusHJiXnNa27QHQVGOJVdrXdP/WZj2Pv+jDlmS0AVN8X+kVeeLlBlXuwTGW4rKA0LhXXPcOv3e/R904I32zTHVoEndwrMreaqmX5xZAhq+3mVeF9B0jZyrdHRQPZYVepAN4mz1PbAXMiO77TdDUHyqWV9f93pWFjw95eoQS+kco0c1KiTirordznMbjqw1lENfGeZUEuVViDrMa46EcdQlQTdDYA/CVTW+vZ5IHFt1ZkDDLJtSsFmWzVNroy1lpCTwmUayBIcGJ9Lu4W2KDOmtarM+TaTch5K2MTWBMjVzL4CAoq4yi0JFcFQoVjGADfOkht16FJqQ6KBGX0Iw7+xdKJ0AgVpSa9L6QjyNziPSGjLHqahyQlBv9/9PYTOvfemb0+tqnRxQxbNZ7plzcJdBFSIrNmjqUXwT4VHrvgjwzAnlMWa2zylnbGYuqgRFEZbGdiqHIvH2qrBj30uXIv7Pus10dYw8r6ANGTGKepbS+AcXBuUtHPFSe3NDtlw2Zg/MmJpJ41NEHq3lqHbWpMI3HW+dRcN+iZQucNSvnPn+17CXQtfT466mnWet7CQcnQ2GrdeRtnkyWCyhHFENnUZOUEV3t2MzrbNZfz13ybG5sA9Pr1tqClJxG76PkHQmrPHoXd3qH/AmOZWy1jGVcWdV3zK7/yK7zqVa/iEz7hE/it3/qtB3uVlvEvMN785jdz7tw5Dh06xOHDh+f+BoPB/frb1113HW984xv5iq/4Cv7Nv/k3S9BvGf+iQgjBU57yFJ7ylKfsaQ/3cIiHNuh3AcCvWzdIXcOwNwi4R3RLT15+aGH5Xc8RMf/5WAzb6zXpWt+/qyG6jDI53cQd/2fv8dIr5sC+5MERC+mpY18l4E8LPxn3BfbKy3TaBtGUYBrvFRIKUXY29V5tAexzJnSdR3BNyjkZHpcIfGp+0hgKTEn+SEi/fjrHZQMP3oX1iJ56uBYIaM1i5lmBSH1JvntRfkuF88t3PfvJc/8C5+lDLWrrODUJBQkl0+RfdZi03XBSQ+6ZmMLUyOkmwlTY6Ri7vYHZPMPO3aeZnjzH1h1nOffRDcZnpvzz2Pu6nZo11PtwsdQO6say1VjunDZkAo5MatZOT3jE3duMRp4BuHr9WXqrOYMjB8hXB+SrQ++7VOSJ7Sp6BXKw6q+RwYpnmqrMF5Qig0wFhmC88MM14oScY+jtiiijpwR18MrJAuhXmQAAOocUCqshUwZZi8AQ9NdFP1OJ6dcLoF+uvUyc0jIUozz4h2nZfiZAWI13KKWsDUqKJPlZNt4vqNQ2FPt8Ybrfy3G5xeZDpLPIqkSV3tdlcHTiZazqhnraIKTgcNj/8bzZaex5j/HUODZrEzxdGvITE0xlyIY5MhTb8gOrIBVy5UBb2I7jQCxOLzQbCCFxTRgPdO7P3axAyZwGX3zqykzBbsnACP4BQd7TF8acEyCj1JlKfjcPdEgl0z66v5Z/qXHrrbfyzd/8zfzrf/2veeITn8hrX/taxuMx3/qt3wrAN33TN3Hdddfxile8AoCXvvSlPOlJT+KTPumT2NjY4NWvfjW333473/Zt3wb4ff493/M9/OiP/iiPfvSjufHGG/mhH/ohHvGIR6Ti1zIuMzosvFg439WwEIe30KwQQZtLBt1iPiEk0F7D/kfmGfJ7fv1qQ/kWwt37j5z+k7dRbY0BUJlG5Tr4EAfvPJXhdE6DZNbYxKZpx5igXBFAGylIYE7XD1aK0NzRdCQbnU1eX0Kp+fzTRnaSaY+bVL6hSXkmtHHnOabRe9lUnvUec7Aui6t7v1vwyPV5Y2c/cf5mKN9E1iDqMknAo3KclLj8/i2G7GecV8XBWa9GAYGp7lKuJIq1XZ9TOgfauURPSzIHtRSs9zNWcsWBniJXgt6CAoMkeCi6sD9jvl77fDwBwNbMr2tsrpMaa1u1hVnj2CyblqGlPCvNOP/7jYWe9nlKrmTSJW1CbuOBSw8CTmrf0FRGtpx1SOkZfhHwM04EoEjgpAsM2PMMAkmKViFsZ16RF/7PGFRVoirvweesxVR1+KqcY+kLKb03XwL9vLSnB/dMAPYstmq8hL30Ep5+mZam9J7TzdS/BiCDBL3Pa3qeMVg39CGoThhkb8hosE6TK5pBRk9LausYZDI1ee2UjZf9nDVIKSjDOGCMg6CYYJsaC5jGr7eUDVJLlBLUs4ZZv+bjAfRrrGO10F6eXInAECWpHHTP4zQfhqCA4vNlmho3WPEM46ZK3uup2WCPMT36OJpwuGtr0Vb580T4ZiutOnO9B2FetcytvnLftnUZy7iccM7xtre9jdlsxr//9//+spezs7PDz/7sz/KTP/mT3HnnnQyHQ37v935vH9d0Gcu4uLjnnnu4/fbbedGLXjT3+jOe8Qxe8YpX8OhHP/qKf+ONb3wjv/Zrv8YTn/hEnvjEJ/LZn/3ZidHU6/V485vfzFOf+lR+4zd+wzfdLONhGVtbW7zwhS/kxhtv5LGPfSyPfexjuf7665HyvnOPnZ0drLUPa8/Hpaff1RhSpy6/xWJUalTvvBY/0mVsdEFAiMWrhWVBkupUneVG/odxHkBMTJ2IKe1RILMCcnF1SFDtKj442xZ+pETozLP7dJEkm9B5mvSngnpdthKaCx59oql9sd02UJW4qsRWJa4L+tm2IAV4rw3dmRim13NfsOpM9Fw+wqx5GZK5js8OyOay4r4Lh4sTx0ucSK7kctf51VnxS1rW1R6xC9c4w0BrhICZcYAlDzeMxsHZqQe4c5WzlmtEue2Lg+MtzKm7qe78ZyanNjj34dvZvmeHM/94lrvPlZyuDB+f1BdYg33YBgf3lA33lPD32xWcmpB9HK778BnWMskjBxn99YLB4T7Do0PyUU5xcDUBgcXBVWRRoNYOeSZgMfRFDp0hi0EqfNAFAWG3n80FzkcVrrNCZTitoZfTOJKUVmMdlZEJfNuaNUxqQz9T7OSK1SJjkCtmjWVzUiWvvp3Se7lMZ82cv5//WYHOFEpL1voZo0KzNsg4MMgY5t4PKAsSbHErrAsd9cqPFXKwihuNQSr6R8aJkSekZLo6RSjBaKti7VzJemYYG8upmWFqLFMzP/5aPCgIDVMjMacnrG2WNGVDea5keGwLZy3FoQ361qDWSy+XNzqQGCpxPydZ4boCQCqFsw3O1J4p6BzkoINsYGTsxkKmcZ4p0fX+k4FaI5xAKRKz21iR7iXOCRr90Af89yO+7uu+jlOnTvHDP/zDHD9+nM/4jM/gD//wDzl27BgAd9xxx1zSee7cOb7927+d48ePs76+zhOe8ATe+c538tjHPjZ95vnPfz7j8Zjv+I7vYGNjg8/93M/lD//wDymKi2/aWMZ8uC7g14nQJ0AHmgNaZYPErO2CeIHx7S+GhfHOWj8cRq9bZz0Ll4V7+R4y7untTlNWZMFdbSGKIYNrDgIeXOsdWEGNRohi4O8TSuN0jhF6Tj4veiW284+oFBEBFC+j2I41fh9kkpSHeS9i5/MpneFsyO3ihH6OAdhtqNLY/ppn+e21UVLj8oHPe3rDsHp7MPouZT/t9TNzHXxevcFd4HzYzzgvQHcFccHldZrORLWDm+7gynGSQYyfEaZCVBNG+SrGwUAL1ooME9j7B/uavpasq8pLq0/KDiCbtevSzBBNiazGXka0HHtpz7pqc/KuGkbMvYXEWJsAu1lj2a4af94ah1aemTeqWi/BQntWaq5suo82oTlp2jjGlWG7atipDHVQkojR0zJIaDsyKxIzzDMIFepiGsMSC83nKaKwvmlLSlxTeZlyJROIZwLzz1nbqgrkOvjotQy/pGRgbPKfbsoZoqywdYMpK6yx1OOKetpQj2tmWzNsYP2BZ/tNVnPyYU5TVvQ3dmjKihXATbbRWY4wFQdWjqGkZpB5r8adqmGtyDg0yNmuGkaFl2U/2dNsZDNmU59HN8Hn2jQGZw2mmoZTyWCtQUrFNO+j85xy7OU/TwY59SOjHpPaMOppVnJFFrcbEms0SsamubXKUEPfjIWULXNUZ8EzfuCb4uIcMgCUUniAFxvYiQKs9cB2K+Ef2X5eXePh0Ey5H7HMrZbxLyGstfzWb/0WL3/5yzl+/Dh///d/f0XLu/fee/md3/kd7rzzTgBe9rKXXZGs5y/+4i/yB3/wB5w7d46NjQ3W1tb46Z/+aT7xEz/xitZzGQ+vmE6nvO997+Pd7353+rvrrrvI85w8z6mqiptuuon/9t/+G09+8pMverllWXLq1ClOnjy55+Pdd9/N2972Nv7gD/4gfeeJT3wiP/3TP81jHvMYfvu3f5uf/dmf3RdW4T/+4z/uC1AZwwXSzNW6PIC/+Zu/4dprr71oWe2LjfF4zHA4vKJlzGYzbr/9dj760Y/ysY99jP/zf/4Pt912W3p/OBzy3d/93bzoRS867/G//fbbeeYzn8kf/dEfXdG6LOPBiYc0GrFX53k3Fi/lxGgL/8RuxeindbEhILFFwAOBsTgTfQQtsQN+9/cvqxP+/gzbIGdjRD0Ba4IHRutPliSbVEfec5HxF9k0i4CfbYJ3g8E2lZcOCt4yBClAvw6GRZAPmJPxFErtko5yOrs4Rl5nYrjfhZzUhX6RcX8Ukq40uuvkpQ3nWbCxVFst6NUaByqwAGTn5mkd7NQGJQTOScjDeWMbXFVits9Rnt1ievIcOyfGjE+O2dmpOFcbNut5T8cHKmoHH594AGizthyeNhw7V1Lt1PRWewynDb3tCb1xiakasmFBYY0H/IYe0BZ5gQzFjcgAjLJSCJnGjFgcF7s6AzoysuFcF1FGVvfIdI6WmizzHnSldOjwB5oigEuZFBTakinBrLEMcsWkMkyrhs1JzayxTCtN1Vjv9xclaaVIneMHBhn9XHFgkDPq6VC88/5AWgq0mgfAXNw+rRNDV/QK9LAiG/fJV30SUY/rtpP5XEne+MaJqRFJwrWyLfhnHFTWsdNYcmmonSM/OQHAGks2PIWparJBEXahQYFnKedF2q/RL9Q1HvRzDcheeLvJgiyrIuvlOOeLWeCZe3YPbnZ7PxGJbxG9hRLGEbris0sZIPYpvE/j/VcQE5eJrjznOc85r+TUO97xjrnnr3nNa3jNa15z4fUQgpe+9KW89KUvvaz1WcYesSCXPfeWEGGyRPLg6s6bovfaHFi3sOy51+O/3bFvYSImFmGnPcE/cUW51f1yX7aNV1Aox97DL9OQafSwSEyj5EUcpMJtkPPssojVwv5Qwo/V3tPPf8jhWTBROl4455nNdPZnVynBegCgC+pgI0AbAR8/RTjvfrlCkO++Ys/f3QfVhIs91vfLqL3YgBYb5ozPm53U7WtSIUYHsLo3v4ymQghJnhWM8oxMCpS06ZxZySXDTCK3ziFqD+w55VmlZEXaf6IufaOeafPyrqf2Lll9/+L85kCQ4YTGeEnr2no/NvDnc5Qlz5SXpZTCdUA/5gA/z/SbB/0gXAMxv1GRDeaSz6iXKr/vI5byFCE966ypW5lPYxLQJzM9B/pFKc/4f5eNZTt+faZuULaV9gRwxmGNoykbmtKrHtjaMoug36T2n6kt+ShDKokeFvS3NkFnqPEWQuXIbJt+fx3wubiSWZJIlQKmI3+e+PzOYK0j7wWJzyY0LyV7coNpKi/7CZimwlQ5Qh5A5/643322R9VYBplM/sSDTHnmXwT/RAT/6IDWHlyW/aFn+FmLq6tkGSF7fWynaRfivLg9frKTX3hJURHGxXCcH0QFlWVutYyrMe644w5uuOGGfVnWS17yEr7zO79zzs/xcuNv/uZvePe7370vsrDOOf7yL/+S9773vfzKr/wKKytXJu196NAh/vqv/5rrrruOI0eO8NznPveKltfr9fjFX/xFAL7sy76MN73pTRw8ePCyl/fmN7+ZL/mSL+HQoUNXtF4AL37xi7nlllv2BZD4q7/6K973vvfxnd/5nVe8LIAXvehFPOc5z9mXdXvPe97D3/7t3/Id3/Ed+7BmMJlMkFLuW7PDU57yFP7sz/6Mpmm48cYbuemmm/i+7/s+nvSkJ/Fpn/ZpfN7nfR7Pf/7z+Zqv+Zr7BKXe9ra38eIXvziBetvb23PvHzx4kKNHj3LkyBGOHj3Kv/pX/8o3OjnHU5/6VL7t276Nr/iKryDPc972trdRFMW+Mblf+9rX8prXvIY839t//VLjB3/wB/nMz/xM/sN/+A9XvKzjx4/zMz/zM7zoRS9CqYt2pT9v/PAP/zArKys0TcOP/uiP8tznPpfnP//5l3Xtv+c97+Etb3kLj3/843n84x/P4x73OH7gB34ApRTPf/7zL3lM/sqv/Ere+973cvfddyeg8xGPeAS9ns8XsyzjG77hG7j11lt5/OMff97lOOd4wQtewCMf+Ui0vnL4qKoqhBBkWXbfH76IeOELX8itt97K4cOH92V57373u3nSk560L8va2NigLMt9B4MvNR7SoF8E7eL/e7H79io6xcJ7F+yb+5+O6uOFViBOzIWc/23hi9gXYv1dDZGK1uU2/P1f0ET5J+lL3F0fD6cDa0ll7URLytSdvyuCz4swFdSVB/yqspUO6kp5gi9GaX/h+0JYkOAJvn5JBisbtGCINb5L/jK3+8GKB/v394rFdTpbNgngM0HGKZ7DPS1TwWVSe6mlo4N8riPfOMfHN0oGmeLYMKeyip4QuOkO5txJJh+/g7N/fztbd21y6kOnObdTcee04XjZXJZn336GxYN/H5/UDLcFN45r1jPJsZNDeqs9Boe3GBzeQA8KikOrZEMPaOUrA0SvQK2s+47mKB8VvTHDuYyUyPh/+tFWTss/b7vLhc6TxK4Lvk8y8+zbIu/j8oLKwrSxVMax1lNUwW9ne+a77ye1l+acNdYX1axlXBmq8FrV8dbLtffjWetnZEqw1ssY5YpRT3Ow7wuMo1ySK0FfywDwVy2TDtL2y8Eq8XYuQiFLKEk9nlGcK+mfK1kbV6yfnjKuLedqw07jGX87TQsATk37l0vB2cpwZGZYvXfM9MyU/qHTTI6fZXTdYfpH1ymuuQbZHyJHB/y6hKRurqCpFDYCtEH+zgUZ4F5WgPYSq42Yl7Py8nkt0B9lxqI/YtuM0hap6gcB9FvGMi47FsGc0OTkz3/f3MQC8GcFiJCHOeg0OMh2UQtsP2ENkTM4xxJcCIdNr7so7dZ5PzINr+Tuen9coXJyjuYvfh1XjnHWkq8OkJkmP3QYtX4UuXYIl/WCkoLEGJe2K7GJ8cBeHHek6DD9IksysCq1CoCqaeZXJAIcC/ec7janRqzYjGUXlvEwigdzNBZ12cqumso/j+9ZA9JLTjtdQDagOfSv5q8JZ5HlJpSbqMk5VH+NoS6wRdsFrManEBtj3Jm7sDOfd4vhSvApHiWVFBGa9NzOpmf4zco51Q2gBYr3AACFiE1vfo9GPzbjHCWWSS3YqQQ9JTmn/Dyp0NJLXwtBbS3GwaQ2TGvD9swwrrwSQfSWU1Iw62kyJRg5TSYFmZWpyWyUB8/LcA9eBMi7+80JEZqwVPCdG3hVE2v8XE9n5JlvKCI0CYHPFWLulrY97A9nDcpasAaTa6yxNOOyZQdONJR1kvUsN2fs7FTsNJZxp4lutbaMtmZYY6l2aqyx6KJHv2qQw1VUUyGFJBcS3RtCoSgbRyZ7DDLFds/Le456mlwrlBRsTjSnAF1KpBRUM0FTS8/wC4CfjfMzaxBSUZdj6vIw0+0+H7KO4ajHTlnzyIMDDg1yDg0yBplirfCNYBLSccLWfhyXOtgrNMg1lZpCY6Oo1YX3qQxsP4lLY1smJbVwsEejVczDjAMtfTOd2wuYXsYy/oWFtZaf//mf3zeg7rrrrtu3Yv1oNOLo0aP7siylFC996Uv3rfh/8OBB/uIv/oKtrS16vd4VF7M/53M+h5tuuolnPOMZPO95z7soubzzhXOOf/iHf+Cf/umf+L7v+z5Go9EVrdt1112XCv1XGqurq/tawL7++uvp9/v3/cGLiNXV1cRw3o/47d/+bdbX1/niL/7ifVne13/91/Pd3/3d3HTTTbvWs65r/uIv/uKij9OxY8f4t//2384Be/H/Q4cO7QJU3v/+93P06FGe+cxncuONN86997SnPY277rrrihllMbossisNYwxve9vb+Lmf+zne8Y538MpXvvKyAX9jDC94wQt405vexNraGt/zPd9zxet3xx138M53vpN//Md/BODHf/zHef3rX8/znvc8vvd7v/eS1nVra4u/+7u/481vfjNnzpxBCMHKygpbW1u84Q1v4JZbbuH5z3/+RV9/X/iFX8jTn/50brzxRj7hEz6BRz3qUfR6PV784hfTNA3Pec5zLuqeIYTgzW9+M2fPnr3obblQ/Oqv/irnzp274kaLGNdff/2+3bM+8pGP8OQnP5n//t//OzfccANf9EVfdEXg8Fd91Vdxww038KY3vWlf1u9yQ7hLcCt8xStewW/8xm/w4Q9/mH6/z+d8zufw4z/+43zyJ39y+kxZljzvec/jl3/5l5nNZjz96U/nda973dzAdscdd3DLLbfwv//3/2Y0GvHN3/zNvOIVr7jom+3W1hZra2vcc+9xVldXd00NYmEqMvLoPi5IQHZBwy5zsLtbYgc5zMt7LkZ3WUnmrQOWxO7s2Im9+N0HshAhyi1kNYamQlZjmtv/vgXhggSLXDuEGKzisgG2N/SFKZX5Ls6uN4uzqaOcxrP6RFP6/5tZy/SbTb3HQ1PPy3uGSZvQmS8wdCbYIi/85LoY+t9UObY3xOmen0SqLPm5XOw+vBqZdg9GbNeWcW053Ne7fHVOTFrQL7IOHG1XdWSGHSgUPSVYL1SSvD09NUxqy91bM0Y9xcFCc91Khto5Bf/8Xuo7/4HT7/t7Tv+/u9i6a5s779hiq7EcLxs2a7MvHn77GYdzxcFccX1fMwyyn8WBAt3XDA730UVOvjIgXx2gih75qvcAVIOBP38D482Dd9n8eQ7+XI+gXwSlIiguZfqs99cMDELd8wBg1sPpwj/PBxgksyAPZwJgFv0AmyAZ50E/N9dR34RCHXiWoJTCd3RLwUquGeT+OPe1REvoZ5JcCpStkNUUTIWcjf1jPcVsn8PNSuzWGf+4s0G1NaaZlEzPbGKmFbONHWZbJdW4ZnJ6Qj2umZ4r2aoMY+M4W5nE8KuCl6EJLKJcCkZaMtKSRxSa1Vxx4FFrDI8NGRwasHLDUfSwT//QKqropUYCL1uctx3nRZBkXTngpfayAba/5vdnsZrkVI118/6nsQC/cK4k0lI4h63z39vc3OLRNzyCzc3N+10HPd4f//nlt7BS7M8Eb6/YLmd84gtf/4Bs08M9rrbc6sS997C60ikwdAC8VHi1raRkjOjtqkQnB7OJWoIwzbwyQAzXAfW6KgOLDUWdBqQqsNGNa2U998qtHuiQ001EtYNoasRsi+rD78PubFBv73i2X1Ggj1yHOnQNYvUQZvVaXN6nVkWSaq6MS+ziTHqAREsCWNLJI6NPKR3p6JCPiWqKaErEbOybrwLbOcYuKdamwfVXcbrnczxdYIdX3l3+LzG6Kczi+Rg9rkUzS6oYWNtK44MHRkLObQfr6bty5xRyNsad+ChuNsU1NXLlgL+P9frtNTTZwpUTzJl7fZ5tDHK44lUJVg60UvlBxtNNtv3/VQtAJrBPe8aWKIa44UFsb4gdHmJqBdPasjmzlI3l3p1ZAu/KxqYmGGhVIKIXpZJeorM2np0Ym5CmlWG7bIJ0qPPnuZSsFNqrD4Smo0IrVgtNoSSHh3nKTXraS032VGD9RkA8+oxHoLWaIEyNmG55y4Hxlt8HXWnTrvxtbEg8D8DUVS5xdUU9Lqm2JzTjKZOTG1TbE7bu3KbcKJmcnnJyXDEOfsUx1jJFIQWPWOsxONRn9fpVDv5/19A/us7Koz8Bfeha1LWPwg4PYXsr1L1Vpo1lp/J5/M7McO/OjJ2q4fj2jJNbJZuTmttPT5jNGspJRTUz3rtv2lCXNdVkE1OVmNk0SX5KnZOvrNMbHeTA0RHFIOPY0RGPPNjnyGrB9Qf6jHLF4UHOej9jkElWckmhJQNpgkekZ5dirfeKnLsAZFJocbqH7Y0ojWN7ZimNpWwck8o3poEHATPl89FcCVZ7kp6S5K5Clttsb5zh8KM/Y5lbLeOqjnjuLI/pwz/uvPPOK5IIXcbVF/shr7iMqyc+8IEPcNttt/Hrv/7rvPOd79wXGVJrLV/6pV/KH/3RH3Hw4EGuu+46rrvuOj790z+dH/iBH2Btbe2Sluec4/jx47z//e/nW7/1W7n33nvTeysrK/zYj/0Y3/Vd33XZYJQxZl9YjpcbH/jAB7jxxhuvuJnh/opXvOIV/MiP/AjHjh3j9a9/PV/6pV962cv6nd/5Hb7qq76KD37wgzzmMY/Zx7X0cbH5xSW1tPzZn/0Zz372s/nsz/5smqbhhS98IU972tP40Ic+lAbD7/3e7+X3fu/3+LVf+zXW1tZ4znOew1d/9Vfzl3/5l4A/yf7dv/t3XHPNNbzzne/k3nvv5Zu+6ZvIsoyXv/zlV7DJPu6z4LNQRFpk/SXpzz2WJGg/4zqvdd+PcqF7SY8mKabLWe99DjndRG4ex062MXXHV8+a0KEaukOiWXqU84yAX5CiiuvuwE+sARcKe37CHTrXRYMoBDTe2w8Ikp5yN9AnWzAkFids6Ah1UuHyYesl04mL3YcPdlHwwY6433Yqyz3bFau5QgXUL4EaCBIkHoqpxnnxHed8BzbAWk/RD9+Nyz09aTg5nrHe9wwxrQSyKRHVBLNxitnps0yOn2X7nh227t3hdBXZXfaqA/wATgfwSQlYbSxr04b+5oysrynPlehCk4+26K320EVG78AI1c/JBn2yYYEqcrJh4Zmrkf2XBd9KaLvHO9dgLNRB7DIPYFVeYGMRLi8Qvb5nBEiNzftIqdG6F3xrNEZojAsyWqELv7IKY9vX4v+xSCeFCNJxvkgXC2m58h37SoCyFaKqfOEsAPyYyns2Bs+8BFQCwq6QA7rIEVLSlBV6WJCvTqjHfh/W45psmJGfmzHaniEhsT693x/UQc6rNo6xMZyaGTZr68G/fzjLobu2GRzuMz6541mZR9fQwyIdC5lrD8gWPe9vNVhF9Pw6yqZGDp0f22yDFRKtc5TKMVIkz9Yot7GXlGAa9zsMbwvpelnGMvaKqy636gBIwC5VA+/jGaTMRQfkpvX2a/3oWrYgUnrqeIy9mrI6AOAuPz8pU+7R9V9WHebtgx1iuoE4ezd2vOUZVr0CJiHPLArkYCX4jq5heyupgcPYKFcYxmHae7IQAfyTwjfoBDZfAlGJKhPzQGmUMnRSB+lIOhJ8gTEppM/VhMSOjngfv2VcVlwI7NszhMSpHEGFi2BtBHF716SGthhq5xT27HHMuZPY7Q2ajbNtc1FHVcDNSi+lPwlST6GxSDS1B/uCb3by1E6S+wsg+14gVwDRJJ5NlkkwSlAoiQ0NRLXw53FtHJXxAJ5ZnBCFMKEZaVoZqsZ7DkevPyUFPe39A/u5RgXAD6DX+FykNhYtJTZKcTuH22vvdwDAFFojrG/KSvMRnQUAtqtEcmG2hshyn69VJUiJNhZbN9iqZct6n0DDLDQ17TSWncbSIftRKcnapEZlitnmjGp7gipy3GQb2x8iJzuIbIAUEp0P6GsdfBUd9BRrtZ/SH+i36g0HRzk7Okpx1kghkoezsyPP+gs5m7UGW02RU5+b7uSapjYoLX0TWWPpaUlZZPS09B7PSiRZ4uiZSFRhkdbnoovM8Y7XdVfGWOLvGVKCdGIOOHa0ssfOOVqbiaWn3zKWsYyrJ5aA38MvloDfwyse//jH84Y3vIFXvOIVfPjDH94X0G82m/G6172ORzziEfsiBSuE4Nprr2VnZ4f/8l/+CzfccAOPetSjuOGGG7j22muvGLB7MAE/4IJSog92lGXJ2bNnaZqGO+64g5/92Z+9ItDvy7/8y/nMz/xMXvrSl/KWt7xlH9f00uKSQL8//MM/nHv+C7/wCxw9epT3vve9fN7nfR6bm5v83M/9HG95y1v4oi/6IgDe+MY38imf8ilJG/Vtb3sbH/rQh/iTP/kTjh07xmd8xmfwspe9jO///u/nJS95yZ7UzNlsxmw2S8+3traAMDkQu8G1XbE40VuMjmwVdDxpzhMX+r34VuyCjxNdL1by4FalRLnVdrqa2ncGl2P/pgwlJukL9aI/RAxWsVkf1xu13d8RfOsst8uAcUKCDp9RGULXHgiInc3aIrLcez0Y0/GOWZA6jB5+KsMGkNHmfd/1LB/SqrQPSFyIzVgax8mxl31c6WlOjBv6meDYoN2vg0yQK5WYSt1rorGkjtytylA2gmEmmYbnqz3Fen9AX0tGmSSbnkPd8zGaE3ew+f8+yNbH7uXMP55l8+5tTs0MU2MxztFXkly65OHW9XV7sGOrsTBtOKsEfWXoT2tyKSju3SGXglwKBj2NzCTFWg9daHqrPXqrOfkop3dglMCnKPGWDYsW7I4RJKVs3cz5xEQPGVX0/LVRDBA6T+Bf7MaP12+UA9URoFdZ68mpclymWsneDnsHOkzpyEJownhRN/Pd8s4mmbJ4Lbsk3xvWOzDslM5x/SHKWtTq2BckZyX1uMSUFaONbepxSXlmi+m5KbOtGcN7d6jGNes7FedqXyCL7L8IBlo8KHu6Mnx8UrO6NWN0esKRj20wVJJDg4zeak42zBgcHqALzeDwkGxYkK0M6B9aQxU5vcOHkMNV5OgAcv0IshhAU+GyAnSBzHp+bJMacLvBiBCO1uMVPJtG8eCAftGD6P5c/jL2J6623Oq8YRuE1KjY2LQg85k+5trcTC0CeIsMvgvlZguAnwuAnwlqCvH+9GDnVmrz7tbDuCk9M8h6/1A3HYO1fvxeO4RcPYg89AhsP4B++SAwim2Q0u4sVxKAlda/LzHFTD2/787jr+ek8uN4jAj2RY83wGYFrrfix7tlXHZYBzPj6CmBNr65zQnpZQxDOKn92VpPA/uyweW+09U1JejCM/iF9EzN6WbyyI6AX/nxf2J6coOdu09ha39shZLoIkcPC3TRQ2a+wUXmmmwQ/CNnJXY6RkgP/KUGo6Zt/Ftks83lKBGYtA1aaTLn5d6FcKz3swQCAZQNlI2X6twuG3bKes4/eDEi0DetTAIJVZT3bCyD3L8+yBV14cGiWaPIpMQ4FfzgLLkTKOkHIB18pL1foknMysQyDrK3kQErstw3LYX8JUqddqU8/YHe7TstrMUphat9w6SqG3TdoHKNVBJrHKayoYHJ5zObtZ3LcY2DncYyCN5/pqzC3wwxHePKMXJYgtSIukTnA3IlyJXEYhn1VGju0ljnyAPYt1M2bBSazUnNdNagc0k1M5SZROqMJu9TZXmS+ozbWpf+3qCzGdY6poGZeHDUI1MieSlmUmC0RApHrgq0yv2+dq0bcmIih/2OkCnXb6w7rwWGcQ7ZaSqJjVTdhtQHOpa51TKWsYxlLGMZD+1YX1/n5ptv3pdl9ft9PuETPmFfltWNRz/60Xz/93//vi93GeePoih45jOfyZ/+6Z/yvve9j9/93d/l+PHjly1pHP2Rv+zLvowf/MEf5HGPe9w+r/HFxRVly5ubmwDJqPK9730vdV3zlKc8JX3mMY95DDfccAPvete7eNKTnsS73vUuHv/4x89JUj396U/nlltu4YMf/CCf+Zmfuet3IsVyMVqptXkgbi8GxgWj21lOC/7tFecD/OZkQl0re9X9uHXeveaS12+fQjgXilJV2mYhFSiF63S1il4fkRce4AvAQGL4dfZTlxnZflm2Hf20bMD4PWGNn/zZBpQ9T/HPn5ZOai8BEwALdDFXPLncuJrlPRvnpRij8lahxGWt6+J3YpEU/PJnxiVwxzhHs1DD8KwCT8qIoF8WDnZtHRZBY51nBArhJd2kC3KQXuonMyViZxO1fYL6zn/AnLqb8d2n2Ll3k/JcyU5jE3gTPYrimkcpR+O8j9vlAIBdb879CC8zKZgaF5glHuyLcpP9mSGXguHWjJGW5EEKNB9l9A/NyEcZ+cqA3mSELno0kwKZaV9ACJ3ktm5wxmI6XeIu+PvFop5Qkmw4QWYaOwkd/jrH9oKUaJQRjUw7qZA6a30xdZFA+diFrbqF4sg2MB3foeD9Q9O0MlbB/2kuFpgCyUdKZwhbeP+bvPDF8LpG9se4pkYVOc2kROYamW+R9TXOOLKxL0CpcU0hRXs+VGbPY7vVWLYay2Zt6CvJwcqwulMx0pK101PyUcZsa0Z/vaC3OsHWDfnK0O/TuC0686w/leOcw8WmEan8eAjznesdYCJed3F/LgLmy1jGxcSDnVt5dn6HodfNkWLuQFA1cPPjbPLXEy0DvNsUJC51VF7wBowS0xHwm1tnHoR7u7OeQdV4WTtnWl88V5VeMtEaP9YXA+RwFZsPvedVVgTwksSW8fvT31vjtkS/K7rjctdDFVp2Zme9dkX4jJPaA0uxYK6LXayyh1tExjv4CVgm7/tcuZhcMeZWUpDYmulb1vpbRPcLi8yk5DGnQSp/TgiNwvp7b5TJryeY8RZ2e4PpyQ3Gx8+wffcG1U6FMw6hBFlfU6wX9A6MyIYFHFjx2fSAFqhqapwG0XRBLJv+T8DfHuBf2g5rUbpl2GbSKwJYJ7FOBa9n76/nGWJeunMSfIS7EYG9CAZGwK8L/MXvKFmnz2UhZ5rUBikFhXZkxjNkjXVIQHXz5ziOLfiQOyF8zmTt/FhlDIIsNSECCBtVGXZ3SCelFEA0NSrTNLLN7QCssQnoMm4+p42Nbsb5z8U/Fx7jOs2xo51FCOWbA6yXAc6UJFNeAnOYa0ZFhup9CjBkAAEAAElEQVSsQ64lprGIwPjzrL8hpql8nhnl5BPQ6WhqQ1MbqrJhu2zItWR71jDIFJNahjmLJTeenZcriZL5XCMtzM+TCXPlJkjQW3bn+H7O4rBW7D33XhzzlrGMZSxjGctYxjKWsYwriE/5lE/hXe96Fz/8wz/Mq171Kt70pjddEfj6JV/yJdx00038yI/8CL/6q7+6j2t68XHZoJ+1lu/5nu/hyU9+Mp/6qZ8KwPHjx8nznAMHDsx99tixYxw/fjx9ZtG4ND6Pn1mMH/iBH+DWW29Nz7e2tjx9Phang+TUruj6OSxIHs1FBKQ4f+Fosbi0l/cfdDoRQ0Gqq2Q1w6KESHKID3Q4lbUeO9FnIXZ6T8decjD38lP0VzDFStt1HBh+Zq+J116/JSQEMMNJ7Y+DLnwBPTKIYgErgJFzXf2xE3R0GNfbX73fq7n+vjUzfPj0lJ6WrPQ0n3AgZz9Ol0njfT8q44uKhRZISH6VeQeVEEBPhGKQYl7qzVmQkCuFRHB0oBCzHaihaGYcmO0gt7ZhNqW++58xm2fYOXmKrY/dy/TUBqc/fIbpuZLNs1Om4WQahW7kPHgXSVqQxISCsnG+IAKkR/DAG7Q+b0oIMtEFEX1MjaN2raTSTmMZX+zJHMLi5SbLPasPu2OoBAeP7zDSkmM9zUrhPQCLAx4ILNYLVKZRRYcFYH2RJwJ/zjiscUglEEqgAkioitx/t+8fZZah+zky08hMo3KNkBKZZy37L0qEZuG16C24Rxe7bWpfHIwMvsjeWwT5wnJE1voVJtAxy/3zRdne+FuRYdDU6O0NXFUy2D5HddZ74AyvOUO1PWF8cofV0xNmWzMOnpgEQE9wNshk7RXjIP95umrXd3WjZC2TPOKubQ6HY7H6yG366wWjjUMUh9YoDm2Rz6bI4SqqqZED74PksoEHTLMiNEuExoSuL1b3UQbZPEAFZtQDHULMFxzvj+UvY//jqsitCIDH+UAkZ1HSSweLDs0vyXwKwC5I4e7l0Qdz144TYr6Q27meopxbLBZHoCX81JxawwMawaPK51UTXDnGVSXm3EnvqzaZoAYD5GAFfewG3HAdu3IEq72HXwL6FprEoPXwS0chARcNoi6J3ogujLER0EurFlhN0dvMS+555rftr191QN/5QLb9aNSaNJYT4wYpPPP6EaPsPnOri/nNndoyqX1uryQMMq+EEWU7d0VXXhWfKzud41TOlrVIAwKLkoJcFWRZgazG2LMnaO79ONPjpzn1/o+xc+825z66yZlJTWUduRSsZZLVa0esPapicGhANuxDaBSic58XDTjqvZlr3fu1zpIvsZPK/3XAfy19g5dvBJNo6VlntXFIATtVkwC9SfDs2ylb1mkE/CIjDUgAX9XY9N2dADRFJuC0aphUhkHumW1rtcFax+FhjgnrBcFSIe7z0PAoTJ1YaGl80RpB2M6Qm4iwX+Yam/bKgWIYD5a6pkLUFTLPkHl7LTrjAbbK+sal6ULu4hvdWuBLBnWHqPIQwbhFX/o4aEgBmZIUytHTklFPkym/tNpaVgrN2qBhWjX0c8VO2bC5PSPrKXrThrKnMcbSdCRJda4RYVC1jQf/zu5UGOvo52Hu6BxSeM+9JpyHWtnUJJh23UJjbvd133gYwD8XdvPC3Nq5ztzatfcJJx54iaplbrWMZSxjGctYxjKW8fCNPM955StfyZd8yZfwspe9jOc///mprnGpEdl+T3va03j/+9/Pp33ap+3z2t53XDbo9+xnP5v/9//+H3/xF3+xn+uzZ/R6PXq9yzDN7shJ7SoNd4AlmC+2XOhwJqkS18rL+ef+sdt93p3kiOiDsw/liwg6xonTXCfreb8UOkVDUS1NKoI0n+gVyReEvJdYfU6IJMviOr8NkSXWkQPcQ74rgn9xvUXotHVRYsdUoVim5kE/8EUs9eBIeTaulWh1C8cxsrv2M4yDjZlhp7Kha/k8QPZlhiAw06JdIsGzTbagGXjpMFHP/HEBLwUZu9Cjf5CQDISk39Ooc/cg6ilisoktx9jtDergN7N9x0nKM5tMTm2wdec2s60Z2/fuMA5SjbVzAayTCejrB8+4fI+NjzWSyALsrrfvcBZIJdGF9gBZ7oslAKYyWOOoxzWzykuKbjWWyrrA4HNzndb7IS06No7x1BdQ7p42rGWSIzsV69k2g56mWPcyoFlfByZfYMYYh7MWa1wA/WzaDpX7YkM+ygLzz39fBvBQZdqz5TKNyrMEAEZGocx0KCC18kRCqeQf6IwNv20T69CZ+T0RlyOU9L83GCCaOsmLAi3glxeJYdgtcgJJJgxnPbBWlZ4JUwzJJ9voIqfampANz9Fb3WK2VaEyRf9cidqc0VeSs9U8sHehiCxA4zx4e/i436/NtEHlKm2rVBIVfEddAADlsMapDGfqlpkhg1/WYhEu/kkVAAH8GLeMZVxEXA251VxfQ+d+3pVrgzYHaL/nn0j2kDNYaL5Ky45vx6aAxYJnANAXczUlCCzzfQT7Ajiw5zouPjqLMJVvXGqCjHm4B9rJFrPjxzFlRT0uWf3EAWr9KG50CNtfo1GF9z7ryL93ZQ8lfrtE2sYFxv9euZ61sOhGbe3eDVVS3qdP2f0VlfWMOyX8JKibS53vOF7J4TUONmdeFjqTfr+qxf15BeFCrhhZZUoE30VznjE/3DNSvtw5ll2gJJMCRTjHZhPM5hlmZz3Db+febbbvGXPvTsW52m/bSPvcqT9tWlnwcJ9P9+Ms2yXf2ZXjBlqGn5RJ2tPfw4Nfm5g/d1SYABgpPDCjvORnbSWZlORakmu1J8AXX4t/MYx1c5+Lr0XpzygFWjWKXEsPPEmfA45yTU97BQp/nrX5xlwT6CJLVmuEU2C6IF/HgiABftluhQPwgF9kBUaANRyHyNozQaEgAnyLkZrWcoUuQk5X5N63MczRnMp9k4HOccL7HUb2c4wsSAJbKRhkEuvaY1E1Pg+LoJ2UAp0ppJbYxtLUnaYB6ccenSt0LlFapuNUNZZpZdjWhp6qmQWwttAyMP1gr6tMxnENf/1HZZzoLV0HuX/jwIaJrozz7Q4j2jiBUnpp97CMZSxjGctYxjKWsYz7JT7/8z+fT//0T2c8HjMaXT4R6SlPeQqf+7mfy4tf/GJ+8zd/cx/X8OLisrLl5zznObz1rW/lz//8z3nkIx+ZXr/mmmuoqoqNjY25jvQTJ04kHdRrrrmG97znPXPLO3HiRHrvkuO+OuK6QNTi63uBgd1YkG3rgnldRh8L76Wvu7YL8nIjQoSL69kF31SUferKvizErmJbXE7w4JLF0Pv45V5C06ksyHrqtJ+MawtTcfsjUyxJ2C1284fu/O76okBhcc4imswDgKH7Nn3nIvbJfkd3HxvrKI1LclBdkFXl6j6LjBezft3tMA7OTA1lYyiUnzTn94EsXgpPzfsCQd5hXuTBJ6jLmBDVFDk5l46Fy/qtxGvnHBN1iazGuK0zHui7+5+pt3eotyfs3HWK2cYOG7dvUJ4rGZ+ccGZSU1oX/Pv8OkUvvCysS195PxiVK1SmdgF33bALQJRUEpnJtlgSHj1DTgaJJIetDabyxZfZ1gxTG6qdmqZsKIPHSmQETk3LDLxSADACTndOG1a1ZKQlR7ZnZEJ4KdBQsJrbRttKnEZGYxfYVLlM26pySTbMO9svE4swgXP5vJSo95mJ3jUtw7AL/IEvXEWAMC5HFzmq6HlvPGtRRS8UojK/fCm9r1Wv38r0BqBskYmCtYhsgDQVcrCCHa5ip2P6xYCinJCvDugdGDHb2EFlkuJcibprm/7ZKX3lJT8XvXEuFPeUTfDVceT37lBPG3Rfeykt67c1Lyt6gJuVSaJP5AWiGPrioNRpO8QCM8kJkXxNoxyxeBBAP6EU8n40ahYPsgn0wzGuptzKdu4L3Yi5hHN2FyOhzYM8y68r89l+aP5KXWTLOqUvCA62TUceMFJy/wAcnPUMuu7vdtZrLjexjW+QaUp/v7QNdryF2TyD3TrDzt2nMGVFU1asfuIjUetHscND2GKFmfFF/+5trJsjRSAzFsh35Xjgx1izR87XVCkPE9bsyscSm/J+jL1ytHhuVMYybRy5EijhpQChza/2O79z+NxKCOiHZiq1R05+abz/NmwAKIRo86zoIwecF1x1QiAcKdcSeMWFaBGgsYhqgqim2MkWdvMM43vPMD25wfY9Y06cnXLntGYnNLIYR5K0tiHJSg06kY0fGaGwS4rbGTM/pgfwSujMN7yoAPwFH2BEaNYKE4BcibQPB5n3gx5kirKx9HNFLzD1uuD2XkBgjMj4ixKf8f9pkAiNXoH+szlSeKDLOM+2lPh1SizhwIpNKiNxXti9d0cZ77RTLNK51BQVmX9iUd7TGr/tUiHkwn6N/oDGYTuNZfXCCRfVKXIp0IVG970XYzbsew/nYogohljdC7YHObUl5Yh+PHFpWVm8rqSgMZ79V2hJGfbrqDCemaklO2WDzhTWWKpZlJVvd41S0gN/mZoD/bbLJhxDL7NaW/87mfS/H1PaOAdWQiCll2ZVUiBFuxNsmF/WoRnCBgnUKJrum05dYAb6PyU17kHIQ5a51TKWsYxlLGMZy1jGv4xYVFq6nIhsvy/6oi/ive99L094whOufMUuIS4J9HPO8dznPpff/M3f5B3veAc33njj3PtPeMITyLKMt7/97TzjGc8A4CMf+Qh33HFHMsq8+eab+bEf+zFOnjzJ0aNHAfjjP/5jVldXeexjH3tJK++C3ORek/U4pZdhUrcInCVPgV2fjwtfANFCZ25ktZkoMxKWtZcJuRAwyuYBokstZsT1jh2O3e2YYzt1/XbwoAxBNihF3B6V43QDNkeODuAKL/EkigGoHDNYbxlespU+VcJ3fs4xDBe6Z3exXkJY1/qp+PUWgEKpPjoTKFvt+b3FLs77C/CLaxTBzco6KjPfjRv393ZlfDe3ZE9AV0tBfhE1te43pYCeEkihmDWWnhYUqvUIw1TzTIPwfwRiu2yLbhdtDOc84VIHCcy8U5jSgQ0gZ9uIaoqoJ+l3hG1wdSj0ZD0wDXLnDHayhZmOMWeOYzbPcOpv/pHJqQ2279nh3Ec32Nyu+PikZrM2nKt3wzGrWnIwV4y0ZDVX9JUgH2RkfT0HXMlMeZA1gHd+tfboHg7ve/DLe+XpIkfmOkhitpNoU9U4azFTX5CttidUOzX1uGZ8ckw9bdg8O2Wz9kDd2crM+Q9eaUQA8J5yHgiSQF+1xZHF6PoI9pVIQGkRng96Gpl5FmAEA/NhjlBiDgCVHXDRswkDq9C6Xaw+IICGHlDNhj1U4WVEVXxvMPCywCsHEMXQP/ZHOF1gcw/6Eb05zwf6NZ5dKvIhsr+CambI4SpuOkaODlAcOkO1NUYPC4ZbE3qrPQYnx6ydnjI8NWGrsdwxqdlqLg7626wtShh//p2ZonJJPa4CAFyRDQr6Wx5wzFe8D5fIC89mDDKmRLmtKGGqM2Sv7yXQ7ABU5sdDqRaoU8tYxnxcdbkV84oCacSYYwS10uoxH+qqAERrs/T9BT+tObBvQcbTLeQzi01aQgh6sv2tKwlRTebYfcIan/t012GPvMapHJfZxFh25QS7dZbm9HEmpzawdYPMNaNDa+hj18OBa7C9IZWTlI3xuesemKjo7DOBvy9IgR8nA3CR9pnOPVBBeD+xkuq5vMysHPMNPLKzb+9HhsxehyQy7qJ/l5fDbJuQdsUFgN9Ljdg35JxjkCsKJdCmTOoFLh9ghWa78hLopbFJcjVXInnWRZDZL8v7G5vwmehvJ6IPXzyPWnHW5JUpIoBWl4jaep9t03ifuuDhJ8tt7+N38i6aez/O1j/dzrl/uIetu7a48/SEE7OGEzN/vGPuMFSC/nrB4HCf3oGRv3+tDv39qxh46e0u6NdUYIMk5V47TnqWoFW5z/86cv9dOVoRpE11eKHQEtCsFi0jzgN2DTsBKPLHZZ7hBx5MMtYl5lj8rmf22fTY9QZMnn/Cg6YrgcWmpZei16pVLtnzVOteC4uNDM4idLzGmvYajO8HCXSiJHpTp9dt3WCqxjea1SYBdPGYdc/wtcznxOujnMHhPoPDffpH1xkcPYBaP+qZwoMDuP4BbN5nZgWVtcwaP1dprJuTxMykQAlJ5oDMN5HVmWfSRfBvkCvWBhnTyrAxqdN+7forxuOUB5ZfP1fpubGWSVBaUFKwUxnfmCaCVH+4XuJzrfxjBAa9oolI+a5xfhtqY9O21MYCfswstJ8fOSeRwuKkoJFX7vO+jGUsYxnLWMYylrGMZdyf8YVf+IV8wRd8AS95yUv43d/9XabTKf1+/wH57Uua9T/72c/mLW95C7/927/NyspK8olZW1uj3++ztrbGs571LG699VYOHjzI6uoqz33uc7n55pt50pOeBMDTnvY0HvvYx/KN3/iNvOpVr+L48eO86EUv4tnPfvYlS3jGbr+ur17XQwb2rrXuAv/Yu6Pdf3i+4BMLXK2cJ3PSnv4z3sxcCd/1e7E1qQuVhbsFuPOGkAnIEdaAcK0kY5yoLprZZ7nvACYUsXSY2C8WvWyDENL7Y9iORE7nOYRt7RTwXOiwlUIihEgT3vbRf1RJfVFFnfNt/n6BgdZB2fjCT5xIgwfLwO//2jpMWBshXHo9ft+fgpe+NlqKcIyj354HSp0UaJXPgb90wL7Yae7SvvS/nsXaTli+B26DnJap/PGL50pgLQhTQdN4MMMZnAjHORZFbeO9iibb2OmY+vQJpme2GB8/x/Y9O2zdtc2d50pOzRrunJ6f3RR9+WJnc1boObCqZbApVCYToCfV3l4aInqfdMC+bNBPoJ8MHnfQstqassJWDdX2mGprQrXtwc7Z1oymbGBSY4GdxtJXYt9Av/OFhY5H3YV/qwgsyZGWCQBcbax/bVKTDzJUrpJ0ZZQ89R4xoWgZfssaOycpGkHBuL90v71NZEP/KJVEBcZfBMNE0RYXXTbw7ICuF17nHI6htG6Lu2HMsYCQGjkyON1KaeVSMSgrVKYxZYWzfl0Plw396GtUGc4GCa372tfGeeZpJmC2VaVzS+UKWzUIJTHlDFNWZGWFLnJcGUG/jjdi8DwSWe7XXeee4ddlDDwIZmPxmrg/l7+M/YmrLbeKcnEXJJunxqiLOA8WJPWEc/P+fV0wqpsHXECy/Lw520Wsy9z/UWlgLlT7fmIJtU1NKR/rLqepsbMppm6wVZMY0vnqIPiC9jzwYH2zmHMe/JIujLeJFRPAPiGStOeem7GwvwSNByjm9MhlACf7D7p/n7GOsnEpXzcapLsPldFFyffLDEELQGRSkCuBmM5aRQNnQZAAv3HVgg4O6aUEZdswE1U8mpTHtBKVKf92ll3nEV2FjnabhA25VmO8xHo1RUy8TKw5c5zZyZNMT55jfGLM+MSEc7W/z3Wjr7yCQG+1R74yIF8dkA08O93fo4LXbpTzlso3ujVVe6/qgPpCeb9fJ4Rv+gpekC54eUZGYppz0Z67mQSjvM9bbSwrVlEOMnpB6jPGIuDnWX0myXt2waf4fwSlus+nlWFSGcrGMqn9Y64kjfXf1WH9hWi8tOqiGslCQ11sSIjXkuswA4Wz7XMIiTog52VB3YKKwoVChuPXV5J8mJEPc/KVAdmwQI1GPq8ajHwTVQBd69pSh8bECFBHxp+UAmxg/KWd7b396rDPMyWwLkNJwaQyQfrTg35NZ793j5VekGNVUia/RiVF8mGc+wvjWBbAcyUFxikyKaito1DSr2/cdQG8jPKeUoJ1Jt2LMqkBi5K+KbK+n/PyvWKZWy1jGctYxjKWsYxlLONS40d+5Ef4/M//fH74h3+Y6667ju/8zu98QH73kkC/17/+9QB8wRd8wdzrb3zjG/mWb/kWAF7zmtcgpeQZz3gGs9mMpz/96bzuda9Ln1VK8da3vpVbbrmFm2++meFwyDd/8zfz0pe+9JJXftZYss5kKoqARJ8XmC9HLU675tLqOMmDFiSLQEcs9gSpyhjz4KFLwNOiAfnFRrdzFtoJVxcsIxQvIvMOaOUZgzSVqKeJQSNj8cFZD+iF7ZHVGMoxdu0anM59p7oucLrni0Ohq17YxnchRw++IGO1S36rCwBC27EvdWIN5vkAIySNdcHLobOvrriosz9RWcc9Ow2TyjCpDZuzhkwKHnWgj5ItaCZEKNgF753oxVIZ5+v7nWrpxQCSAg/W5QjPTg3LOluaNJmPE/sYUfoLWmafkqBs65MTAWhf6JJhgizQkW2hc8R004N9cV2UwtX+uYt+L0HWyE3H1Ld/mNnGNrNzO4yPn2G2scOZfzzL1l3b3D6u+dDW7D5BF+/fJ7zU5XpBPszorfbQ/XlpTl1kCbDTQaqyO+HuSlX6IpWcA/pkFv4vCt+13ul0d1UJTU21NWa2sUO1PUaq0+i+xoSCWrVTsZZ5maLKug4o9+BGaR2ldXPMtvXMy4SuZYrRzDBUgtGGRGctczICf0BHvrPTNBGBQSmSbCiAyiT5yBei9LCgf3Sd3oEV5Ooh1PoRRH+IPHA0sftsPgSdY3Xhpaysw9R2F3tISYFEkOcDP6ZE+TBT+UaBvEAFb0A5XGGgM3oHRuhhn+LQJrONHYr1PrPNGWv37jDdKNmsLacDO3Mz+B1FKS0Jc0xJ8DJb40mNqQz1tKEaV2R9TXFmQj7MyIa+iCpzTTbsJ+9EVeSoXKNGo7B+q77EO1gJO1MGhmyBU1cqELuMh3NcbbmVv9+4lo23lw/WQiyOjN6TTvi8xFSewRQbjwCBxEnpZTODZGCUbpORNSU7TQFBTjTKiV9y2KZlFkc58c72zEnw2qYFA0Tw8LQWRNvcpASeKWUqn0uNt3DlBBmAvp27T6FNTu/Aim+sis1SQnsWn2AO8OuCmDGHXfQr9Hmo8PulA1AI53C2Adn4/Wz9/rLFCnbl2EXvom6uslfeciXNVRaYGcu09sCMcTmjXDIQBqTG0KpK+H/C/jcVOIuR+WUDvbkSSU1BS+Hz2umG36YAhi7m811ZbUm3acqvhHGgrGeTKYGXZJcgqs65pQLkYhuvouE6zXvd8882Xt46SqtXM5oTd2A2z7D5wX9g6+P3cuYjp7j7789wT9nwz+N5kHotk1zf1xy88QCHHnOEA590Hf0jB9BHr/PNOJGhHiU7QyONq0ovw12OW8Zfx5OO4MXrfZ1zXFbQ0EpEOvz2p0OGB5NyKxHCcaifMcgkK7mmn3mW1qyxaT4T839j230eWXtdJt8sgFF5WSdZyejvVzWebbZT1oyrDCVhc9YERpmgZzw4lSvPihWmTnObxCqWum1CUK18aTon4phlGy/X2ZHOjeOGCBKfLubK1mIC+G87OVZXrWFqHH3lm7eO9RSHC83qI1dZe9QBVm44Rv+6R6DWjyAPXoPtr2GLNWayoKotk9rLYEZFkhZMFljhyMKBiZKaANYqTJRdNZZ+ppgWmto4ysYD3ZXZzfRblFgFkq9iPFbGzoOx0JVv9ezAQe49GAutKLSkF/6iHGgMz/SLLD8fk9p6r0jj1984qLWg3ENJZBnLWMYylrGMZSxjGcu4mqKua/7yL/+StbU1Xvayl/Fd3/VdD9hvX7K8531FURTcdttt3Hbbbef9zKMe9Sh+//d//1J+eu/1YTcIAn7yGIURd9utk3z2DCQpxDgRnyt67NURGv8VnkgXmX+wR4HEeeBmrxBC7Oqi34vNZ8OC4q43YQ2lAN0pHAjjwThZjUPRx7RFrghiNmXbFW7qMJHXu7rBk+wUoU/Zefk9YZtdhbvF/dKVySJI27nMejkjSN3WIuxoLzv5wLNgzhfGOjbLmrKx1NZxeJB5ySV8YcLgQrerAytown4woYu1n7VgQoyL2TopYJjJ5GsxDZI9Du/7UxkbvC7872XS+9/lSqKln9T7QmkrQRULh0oIz2wyQf5HgtOFZ/vVZZCjqn1h1jY4Y5LXY3o0JrH7ZhvbgRk3xpQVprIJOIrMvQux4la15IZBxiMKzXC1x+jYAF1kZMNW3lNmAbALgF8C8PLMe9REv5IO+JeAwMDY8lKW4XUpEcGfZo6BpXN0VWOHBc5Y9KAgrwz5KKMpG0Zlw9S0rNRceo+/RT+WqyHO1ZYiFmaco7Je/jg3jqGxyNL7HnbZfkDL7FOi46XopUFVHpgDq33ylQH9o+tkw4LimmuQwwj6HYXeAFus+MaBMI40DqrGF6Rq4+bG68QIwJ+fxgXmg9KhqaAtNMpe37NorEEOV0EqerUfZ3SRI6SktzpB9zX9cyXDrRlrZ6ZMjQdFp8Z6ud5uMSoU3uK1Wnn9Ydia4YyjKb0kVzXMyPozeuOZlzddGXjAr5+TDfrofk4PUCOgP2wPhtS+eBiLpvezh9ZesexGf+jEVZdbOZ97xJFSLEhz3leTjsDnVSrmKHPsp4Xvh8J7bf1+MJFhKAV6D/ZfvIyN64Jk8w1bF1zHPfyPI+s9+X5B8ub0eaD2O6XbAIZnAglnk6SfKAbovECNKvK7TyEz3d5vwu8q6eUPFz397mt/dtUVPECh2v2nNdjM72tdJCZSavS6yBDn+f9Cr11sGOs4Oa5RwoOafS3pxfuQsyEflvPHJ/2wvGyytABGuf++lsFzz9jEMI3gYro3QfIZFCGfiR7L6ft4wLaR3XsKqSku5d5CeulHqcBZdARrXZD0BJAaOTmHaErc1hnfaNXUmM0zNGdPs33nCTZv32Dj9i0+Pqk5viALPlTCS6UfHTC6dsTwmoP0j3hJSDk6gOj1vVd3lOqXCmelv54jENjULeMvIkTx/8X9ZNs5jx+23BxQDFFONUiiSkUmPZtr1pgALvkcpTExP2hfi/l2P4t5lyPvyHlOq3b7IxgVpSdjRD84Y1uWoJMqePGq1iahC+pHsC/6mHfyxDnGX5xThXHNv28RmfOAaje/hCSpLoNndZRjB0kuXWJoHi40g8N9hscGFIdWKQ6tIlcOINcOecWEbIDLB1S1VyKpwzYmn7twAksJGRIp57394jYbG1RogiRtZNz1AugX2YMw38Aax6oIzkbAdS8Z1kXgr2psYm7mWmJyh3Uq/U4TZFljzBpLbW0CIqGd89TGspJrfPYo5/K6ByqWudUylrGMZSxjGctYxjIuJbIs4+abb2Y2mwHwd3/3dw/Yb99/ph4PQNSmnVzIAGxE4G6Rbbc4LTChsCMRWNEWt4A0KYfdEkrQFrRch4EXGVUxotdb2QH9EqlNePkbuYD6RbmcSN+yMM+GC+trApuRwMITzSzIM5YenDMtM8/VVQvchO5eIRUu+E/5hfqJsMsHTI1LDEghhPftsiaAQy0wNFeU6fphdKREnVQIIT1jILMg1hEEsLJt5+ZKYhe74CI/d77POuDstE4T0BsP9MmVYFLbjmeG7wB2IhQuLPQcoOFgoS57i0ZZe47Nxg11kM2pre+Mn9Q2db5mSjLKFWuFZpApCimDnKxIjMQIaCspkmSpkgJnHVOgD/58qQIzNEh7uqbCVaU/d8oxWIuzBrt5BrPjGX71ZEq1NaEpqyRdpHJFNm0YaUm1h8RiIQU3DDIO54rDKznDowPvzXZ4MOfFl+Q4k9xiNsfgU4Hxtyj1KZSck1Fy1u6WAo0+bFImmSs5GJAFCaZsWOCsJRuW9CqLqSxrtkwAfV95H72dxgNJ97fk56VGaR2m8uy2UTDYyaUfp3JpyWuBDsCekL4QZQGVS1TwT9SFRvc1+TDzQNqhVbKVAcX6Cv0jB5ArB9BHrvOSUyvr2GLNs9kC6FdZqGrrvTFNW3yLwIaIXi/Cl6Gs7BT5A9PPdwP4R6c8009Yg2hqpJTk1qCLnHpcIrOMppzRO7BDvT2h3JwxPFfSlA0Hw2MdwFsPhs57YALptcoa+maG2oZ8UJP1NbrQzLZmyEzRW52R9TXZsIdZqdDhnJWZxtWh8C8VVqrgZZgn79llLOOhEulaDX+tpHcofu/B5e4CcDLKHQqSv9lc3hDBjwAmGEe6v6Zfti54CO8ugEZwENrxBJhjrAO7wXa7G1BK7L/YKBWWIaLUsJCgvKSfsPNMoKSi0FReWaE/DACLYnD0uGdTR7DFGkQAt/paYJzAWDcnTxcbI1znz79hW+A17D+ntG/2IjTxdJaxHxGXc7486UKx13caC3duTjk0yDk2zBlkgmEmwbQsy+it1mVdunCuXG5eJQWsdkyWhakQpmn9rgPQY0x7f9LSA1cSEhMpMvkI54eWGi39MYwNbcI086xWa/19TBce/BPVvB+cEDipkOUmZvMM5tzJsEqK5uxpxveeZeOjZzn30Q0+eq7cxfADWMsUh3PF6iNXWXnkOsPrjqCPXodcPeT9daVKEv4pog8d+LmBztr9G4GrqIjQYZQ6wrVn4zgA2JatGve3jte+kCH3cGQqw7qMSWU80Gcdk9pg46Pz8pOFnpeVBJhp3/TWAnx+P0QZSs8gU6nZDaL6iktKAz2d++tI557RGGU8RfA+j2CfysKjTudH9M3sNgdgas+shRYE3Et6PjagBV/kfOa9hFWwBhhp+f+z9+7RtmRVefg311pVu/be53Fv3+7b3bS8FBBEogaDGOMPVB4iIvgAiUTEGA1CMILDZ0YUiQ4GPlHBkBEQefQQEsIQ0RAVfCARbJEEgQQNykOaft6+955z9tl7V9Va6/fHnHPVqjr73L59z2n6wZ5jnLHP2WfvVVWrqlatNb/5fR+2nGHA7/QUkytOYHL6JMpTlzNwu3UZfLWJMJpi6SOWAvi1A2AT6LygreU1AkHl/rsxMohMZxsMCmMwcryuUL8/BV8BBk+5L7vjaYTRt/SdlCcz/TrQb5n5AqosKAN/HfjH9gS88i5MrmYD7DcejY/YWTSpPZaGZaafAo3WlFi268nVOtaxjnWsYx3rWMc67v7x2Mc+Fm9961vx1Kc+FX/913/ds0q4M+MeDfrlJvBDaSQAWQKpW7TkwBsARERmnRn5n2Gjd6PJB/QZhbnEIhFgQYCJ4u/SyYrm+6Tf1+K9JBE5PB7iJIACDDaBgtRJa+ni0wdQs+yYfonxVwP1ArFeILQ1V/DqAl8X/64ETbYQiglX4GOBWE5h5ucxDS3MTf8PZjRGfc2XgNpsUZtVufZ8Y3zTSeMAXA1cVClRFSYnEco7z6QyZ1paQrefGpYTHrkKTHFwbQ4AGDuDr7hmSzz9GLiYtwHnFg1OT0ucGlvcOGuxV3thKHHV7OlJibE72g2714R0bYXIC+FJabFoIZI2gHcGlVQMb5QOI0fJbzAgovZA8B3IEoAeiDt2fF2NLcHM56B6n68ZOaehXiDMdhD2ziHOZ2hvu4XljAA0swXaRY12sUT0AbZ0DNTZGpPLJyBLuMZw1flc+k5ljMYnWMJzeuVUpDwrlFsTmILbIGsPMPg0WaLgX5L3HFVJplOrqnsypBk78UAEzxmRLMgIs1COBwAmly+SvKUbW2zXAZfvLNF4Bo322pD8/uae2X93FwZgEyHsxJAYbVoRPbYGY3gYT4nJZ0uLYlrAFhaj7RFc5VBOS5HRrDA5fRKjExs9sA8nrkQsxgijDWb4GYd5G9HUDI43QZJRwlLOEzqaiArE1yRCRKNJe2NgVYJVmRIxgCrAiMReVE+9agYzXaCYVmgXNaqTm2hmC0xmDEj7RY3ljoB+8xbLnSVCE9DMW/ja8/sZEKgx99xvftagXbAnor6GxqOdlgg+wlYljDAOySqYnEmjAQwmJBbLZzfIrPa/PM7213HvDL0bjD5P858BSy+zAetJjyvzaQimAUhev3kbKlHnAxcCQBQBIvregmxXxc9e3ZMQs8+Ebj/JrAD+0k50QFpPGaFeAI4BANPM+fvesVSjdYhtIUCgSI8GBhIw4sIEZcQnVvDJ07zfy13EnZu4EKCYsM+VcRiJukKbihE6IDBEIEQCjEilq8euMMUSWHE755Ky1zsSIQIWAeoVreGLCeoQsVQmd2B1DWuA7ZHFqqmQM8B9t3keuGgD2iDznOWMZeWLCrTc4z60RVc8Nro4v+cLHcP5JXuOlZZQ2VKeK9ME7ugcsrR8DCoHmt7Teble/8JOjaIaAgCmXYCWM2bt+QaxqdnL0TiQXab9yYEmI2wxf/4MF1WduZELXMoKy3N7mN9yFmf+3234h7MLfGyvxqq4YmRx5dYIW5+3jelVp1BecboD/KpJ2tceu9U5UNS5E8+pUJTAcgVoFRXw9iJLy8BaApwQ07pHi3csAcYweBpBCA4YybN24giN3O97S4OFDzCGGVwuHLxwLFFifhkijEuLSWmxUfnE9Bs5g43KYbPkIrhJYTFyRvzkpCGVDk/nP2PtigUBbPf/NpuLg7KxzYHnJmJ70CsILSsG/0dj2MkEhcxP/KLGaGuE0AScrH2Si7dE2C4MxicqnLj/NqanN1jW86rLYU9dBXPicsTRFuJoCm9KLDKWXxN4btVmcxcF+tRyIH81IAQZ2VvPY+jIESaBfRcXt8P0A3itpcDgsg0YWY+lUzlWC2ta1C3fa0PGHyAqJiKdn8DBKorXpkkswnP7Dea1x807i+TZOBZp0KtPjHF6ixnMI2dAdwXTbz23Wsc61rGOiw7vPewg97KOdaxjHZ+r8aQnPQlvetOb8IxnPAOf/OQn8YAHPOBO3+Y9GvQz2UIs9/EDGJxT9l3M3sv/L/lmZgdGKVgN4jc32FYH+vX/k8t8KtMw/Q/dPuT72ckv9j/bbUw9IjLgSuWxgG7xrkkq/b/41PSYWvVCPuu5whWyMIUkH3IpJV+D6n201/8dyJUwJz4vLWgPSHrq9/V/vU4RiZzQcmWwLRLwdkdiVZIqAn1fFPQlA7V/UoLKmKx6Pt/P1ekvS8D2yKD2EQtP2KvZf8THbiGtC17eJjNFnUEnVXWRx5UD0nmFf4gR1gCFvF85i8ZHFOL/MyksCis+GEbAb+IFvcruNKFLBuh+O7nuCkMM9tV7MM0cWMySB0mY7XDS8vwZhPkMsxtv4zasESnPBl7YfcGHjOVnUE5LRB9RjB22gMQaYzbfGMV0hNGJDdiqZG+0TLoT6C90V4J/o4qr1suKwWtjEsBCAvp1XoQrmAEKEHpOXsWs2l1BR1uNEHxAuTnp9sUYAX0Moo/wdcDmokXbsG9eHQwWAgQqIDj3dy0LkK9ZSmBWHSKM4b9LQx0QLIw/W1g4YbWV0xJuWmF0cpOlPE9tw2ywzJQ9eRqopvDjbcBVCKMpaji0LXvCpOS1JLA1kZ8XWxiKiJFSFb6R8dZHHkutcXxvqB+SlUKDInASU30cAcSCP1MUCxhr5ByWMIWDX9SwVYl2f4Fm3qIYOzTzFm7WJNDPzhpEH2GFfZAz8nyMrA9dezQqm1oHmMIjZtd+kmIypmP1xMjjn4AdvbF8Heu4J0Viuax4zh8SRFw8leTFhU2XQhUvxTdLn835JoIUDARIIdaKQqkAwEQe2xSA681TQujv5kDW88B7kQtH0PIzIYqPYFJ+AHiepWNUYi6axJAK3iO2NYrpmOUTHY9R0Xv+rs6llNVsDI95xjGzJp+nSt9E6YPU7zkAexFSq/nrxUQcvJJ4oQHdvM+HiIUPIj8eYU2EDYTNcvXGrCFcPimSV1earwvjM5KBkTlsr8jtEgrG9DkE8DVShwgrwLAXRnksqrR9PU5rAIqU1gdAp5qQ9lVe9fGu83maz0HLPcS9c6ngzhZlf44t11OU867z9rCcI8xn8Pv7rK7hCrTzGvXOPuazBucbv3I+URDLpbP3bMVFWKNxVhiVLfH0usmuZypKYf3JdX9osVR3PpR5GyISiAQpeoyJ7SeAU6ZaYgjwgdBQhJU56qS0QK3ebUY+2THW0vm0zPhtXIQNUjgpwFLpDErL3nCTwmJc8O+Vs/3iSmUWS59EfU/Yfeot6mEQIxLw3pu7AIiGQOTYGSGIfKsrgRhhRmOEtkn3vRaT2ark+YfMrzbCEj6y/+Foa4TxyQrVyQqjExtcDLdxAma6JXLpI0RbolV2nxRUpQIJAZ6tFKzm3uPKstPrVIVYDTpmHY+tBoZIgNWYwL5VCgVLOvj+uOABvcwMHus2pHOUzqOAtLlf47xmkLA2LOHahojb9pbYXbS4ZWeJug2oGw/rDMaFTb6A2yOHZRtgL1YjeR3rWMc61nGnxkc+8hH83//7f/Gxj30s/cznc7z61a/GIx7xiEtq8/rrr8eZM2ewu7uLnZ2d9Pqwhz0MX/VVX3XMR7COz5X4bLGr1rGOw+JbvuVb8Ju/+Zv48Ic/vAb9bi8qZzB2B5P7fZCHDlQrps9pJad+K+oCsfv88Ju6rdw/Qit9c6hQ37MEONP3I3Gpeh5dgkNBPU0Qq4yMbncI+GW/c1LZc+KgXiAuF4iLGWLbIC4XHbAx4griOKqAnTMgtwNsXNZVwks1rz9/BvVt50B//zeYfPGXg+738I45J/5UeTIo5tX/8nt0BRCsJLduv2pxCPAdVpVOAM4uvJi6EyprsFF2nhXpOHyT9iW4aqUc1YUq30vLVeEbhcHSs4Tnoo341E6Dm/aWaEJM3jSFNSvbv73jDL3qfvb002hkcT9vOfG5NeqqlkvbJQBzsFBljAIAFjbibRWG/XPGBWHTBpjlDGb3JuDcTWjP3Ah/9mZmhtYNfNMiNC3mN59FM1sk0C+X0mwXDJr4jDoZfGRJyI0CxZQTHJOrLkO5OcX49AmUpy4HlRXMdDMlR6MmmdqGGalN3WPspRDfGRqxbBqVVfLmI9sBeWgbBi/rxcH2gO46zKRu5eAAV6AQS7ZiWsFVI7SLJVphN4amRSv+hb72CI1H8BHtvIVvfAKRfB16cpLqKXfjov2sMQELYoblEINelRoxlpl+CviNtiqUmxOMTm5ictVlKLa3mN23fQpm+3KEKfvLhPE26gAsfcCi9Zk/j4LYfbAvDxrcdT4Ajch86icsOdhywtX0wqqJtoAxDjRugWqKsJgBbcNjWtuAqh3YDR7/ys0JQtOimS3gF0u0ixrNbJHea+ct2kWLeq+BbzwaAQJ9HeAbnzwqASSPQ341LPk5dpJkHTMDVST90DaI9QJU7gOhhfEtYB3MfH6kc7qOdXy2wwAHffIgoI9K4UFAqQyrMsquUc+rfE4D9ICTIWAlir9QvN2CQb1OmiHAkYEHkvdwGslzYCVGAF4Km7JtxIzVl8+xEuuvTZ5qEejY5MaCihYIZWLdkUpqWgeabLEk42yHvU7HUy688h5htgt7xTUIG6eYzRZamPl5ZhQZhxAD4Eag0VbX9wM/3tpHVC1LrA99+i40jzlqEIQpaUuYnZv4nE9PsXevNZjv89hfXUTBU2UJD9zuir/UD/us3URJhDICztcsN94u0ufiaJP76g7Ebh1ww14DJyynaWEA2V7tIzwBRK7PTLWERhhsbYj9+WjsmFmGDF+XBEwcgdoFaD6H3bkBcecM6r//SLpuzMYJYDTme2E5R1jMZP4ix2McsxzbBnExw/zMDkY+oHQF6t0ZljsL3Fp7nG8OPr0LAu43KXDZ1gjT01MUmxO4aQUIyMw73gf5UpBJAB2IYOw0FUxB5kzJm87XIF8C7QLULODKCaxRYLWTliRisF9vV1YZ6DbpDSUwtgniWR0DqGRvt2Ub0pwa4O8bWTgVLcs9AkCIFk0I2Ch5/ypn4Swlhl9lDbYqi1IYgKX4bSdmn7HdeCIyubAlWgHRmhDE+67PdEv3ZFBmmoN1jguolE3pa5gpYOYzIHiUwWO8v4CVIiRTcCFcNeN72JY2AX4nvuA+qE5to7z6vjAnroA5cTlCtYlQTtHCiKwmUjHV0DePILKqRor8TL/AtHc9W2YQd6oMhMJEkRxG8v4bRogRlWXJ1n3jeTuWEtuydIZBOvH6Y9nPcMDrTxl/zOJre+/t1x637CyxXLaY7SzhfUBbe5Qjh9nIJiDxxKTAfuNRruU917GOz6lo2xbO3aPTl/fa+MxnPoN//s//OVpRCXv4wx+Od7zjHbjvfe97yW1+5CMfwZOf/OTUJgD8yI/8CL7ru77ryPt7d4gYIz7xiU9gb2/vkoHRddyx8N7jd37nd/DN3/zNd/WurONzPP7Fv/gX2Nvb+6xs6x791HQEuGEqOwOkApjCx0ySzoUmT0Tr2i8x/tD9nUDBTMKHqGMG6nfzimANXes6XXDlUnU+q3oX7z20C/FVaxNgRXmyTX+vlwJSuFR5H5ZzxBA4ybSccwKhXiA2TY/ppxFnuyxFBdkv38DUs5QAc1dcA1NNEdsGYe8czCf+GubK+4oETudzwX2ZJdpCmxJZ3AmGeyIl7rvEDdX7HZBIhqvppdobtlztCRNZFoqlfiJG8qmc9RcBkC0RRpyYYzmabHGc+/+s2MbwPfYl4QWpE3moqzZGaEPE+WWbPnOxkX9UvU8c1A+mTv1hrYPnzKkAe5QSInpNaSjAHSCSOuKJEuV4R44lNqnehzl3C0wzRzh7I8tJ7Z7F/qdvQL27j/2bzyb2krKj9m7Y7bbj2S+umTUIPiL6iNFWmWQwTWFhN8oEHE2vOoXq1BbsydOwJ68AjacwE05skrUIyzmz8xYzRJ+BdcH3rlcYC3JFAlZ68p6aPBPwMLY1A916D0gC94Ih7ce2QWEsXMUMMb+oGegThmNoWoS6RQwheRnmQGC7aBF9RD2r4WuPjUWL7T2WiDxZWMx8wG21xy3L1VX7RwkD9htUOU8F/cbWpL+r7H1jDWjFhatyqqZwCcyiasrjgasQbYloC7SJ0YfkPTVMSyr7WX8HkLxlNEGYh8qAstUNscSyPqGEPRzIsLcoGdii5ESlsYjLObNs5jNEV6K0FqFumKFaMoPTFA6haeGqkoHcukUxrTPAtnuNPsI3HkYZp4XKoJYopiVcVcKU7OUHIF1nYT7rWJTCSI22hJnvH/EM3/HQc3lntr+Oe28cuD+JALIpUa5elQFdcvxCj0KKMbHEVm2LhyP+TD7PworntRFQgad48SA4mdiJAJvGZUVJg7lVYjCqnKRuRItSTADQ8HwpBp7HOCCqDx0ZRFuCRgFm80QnMy0gChUlM4Ck7WgcYjFhMKXZT0w2BlA75QIN9uoFIsrkJ6iApc5xlNGkRUDaR6nfhoDrqghtmrtpX+vclTIZQ6pnfP5RdJKskaXRR+5gsYmGgmu9TUZgrw6pyOqkSs7vn4NK0h8ArC4yglw8+rwBOvafJ1ZnALhv9Xqw5ADxzY69dsCdK99xAmpTW4PqGajeR9xhT776zK0AwNKuZ24Umc0pYvBc7LTYRzQLGCfXQVYsUu/MWGJcWGK2tCiIn+0dD479ka8YsZff+GQlstwjfl4paF0vYKyFwuLp3hvI86pnHRUl72OLTqI6yD0RWsBbYey2sGRBIuvJQF//ohV13t49aY2Ta1JKK6OyJOUaZ23QBPTlAGAwBoYEoIqR2YaWJUSrjOXHKhiEkVUfRupsICJ7auZ9oOy+DvAThYIQD7DZOisIAoJICxsC2ZKvWcPgYQxtAv3NcoFCVCNGJzbEi5pQbjAwq4VWoxMbqE5tozq1DZpsdiw/WU/p/gQpploVOmYadEw/k70qUxFyHSUGI/h+AHi8JDlWJ6om6A4ZAKGwKvHZ7UjrI+BEpYQ62w1m9anU52rwrxb/v2UbsLdosbdoMdtbol62WOzX8G1AyIA9BQZrWQfeJfKe67nVOtZxl8WrXvUqPPe5zz024C+EALOW1D1S7Ozs4D//5/+MX/7lX07g3GMe8xj89m//Nk6cOHFJbX70ox/Ftddei2uvvTa1ecUVV+D1r389vv7rv/6i23n729+OxWKB7e1tbG9vY2trC9vb2zhx4gQmk8ntN3DMsbu7i7/8y7/E+973Przvfe/DX/zFX+D06dP44z/+48/6vqyKzwVQ/U1vehPe8573rEG/ddwtYmNj47OynXv0XU3tAtRkhyCgERkHgukt9lWCE1jNeAFyCU+kBI2+50Wi04IrrwkxefjlUwVNlKUKYrD3ipq/J5nMgUQnNUsGfXwNtG3HRsolsYJHmO0AxsKMpwi5pGHOmGprhPmM/9fWqQ1e0De8kA+bMMbyttsFqF2I5JCDu++DgXqJsJih+fuP4NxffQCXf/1TQNtXdEmClLyzDNq1S66kdSNEVzIDUJNE7ZKZiJMM9FvuwtTMfkleGsaBjEufS2xCea19xPllwMKz74SCrF5AWa0cZwadxWzpUfuIaWFSYsEYrPSbub1oQkDlHCaFwfaIxMcmJC+ii20yPyYCYGMLahaJ5Zn6141A1mFalEn+M/W4JpwGkoFcxawfAiekggfN9kHLGajZR7jtJrTzGct37u/C7+9j5xM3YO+Gc7jxf9/EHm8Fs5pCE7BzQ1d9cL4JmPmAW5bsSVIawn3PFZgWBpPLxxifZOZDuTlBdWobk8+7mgG/U1eBtk4hugqhGHXgd8VSXmayD7Qtg3R6vWYyUyrlSSXLe2I07hinxnXJW2VvSDtRWK96T/TupSw4N8WSn+qBadQPUxiDoWnha2ZBKtjHr8sEBipLkkFCZv/VwiDbPMvecsudGrvC+ru19pj7gPNNuEMgoAJ8CuaVpvMcGr4W8pp/zsn5PdCuvGdKx9KrI5YKM+MpMBrLvc33t5fk22EsamXz6WbysVjvW2tYZgrQZHUERU4QRUkOMuPPIfoa5As+76EF+RF7mhYtbFFy4UNRAq5g1qoxsGUDUxbwiyV83aKYVr3z6OsGVc7ilPPWzFuEhll/IcnXWhiR4iqmo8QGVYlahICwnMMA8PUCNJ8xUD0aM1i9v2b6reOeE4QsaQ/0gIIkg6es3ozlBwigD/QBtuHY25tDdOODeiQfGjrWAyhSkoZ6wFBvjpUDRjnwN2hPJUiTtKHOpwAATXoPruAkPyAee508IMjAbF/etSkFWrHa4DlnppYQRlOY/RpxfweYnkqJfZU0zaMwgFnuoS032CeXAoMwrkTtuZjN2k5n4gDwl7Msbdk/V1k/UGj5mUomzS0I6OYn1gFtzSzFcopgCy7UCBE1gO0RYbu8Y4kzHyN2lj4VVG1vbcP4GmF/h7d/B0C/VVeNNVJ4B2agNQJSJrl9w9cLeZ4f2NL12hk+lk1kQAQxJsDPzM+D5jtobrke/uzNmN98DuqDbCYT2P1TcFc/UBqwCLMzULl92mAGuwJ+81vOoTq5CTMao9yaotwaY2wNtguLWoAfS8BlpcXJwuLyzRKTyycYbXERChnDxX5LLvYLQMdWle2TtZ0Ht55vma+TY6BVfZEBsDehLYDguCCxrWHtmBlkgT390jnICyqB3higUrmFYWCVZewJNiIBfkAQoIqSMIPOF5oQMXJS6Chy5ZaQ/PsmhU2svpFlELHMGG+9MUjHAplHNj6mIib1yguDKypGkXkNWryg954ReU/2G0aUQilZjxXbMxhr4OsWdlwmD2IAKCYV3LRCdXIT1RWXsazn5gnQZAPBjfjacGWvuGoYhjr54yQ1Syr7L0w/dGzAdDyQgkbq5Dp95L8PmdYhgBmQKvFq8jWYDG8LCkluVEG9ZRvgg0nMPwX6ckbg+f0G82WL5bzBfK9Gs2yxv1tzQWvwALg4YrlsE/C3bMNqOdp1rGMd99p4zWteg0c+8pH4yq/8yiO35b3HG97wBjznOc85+o7dzSPGiN3dXWxtbd3+hy8ybrzxRvzKr/wK/uN//I8wxuD5z38+HvjAB+IP//AP8Zu/+ZsYjUa338igvTe96U144xvfiL/6q7/Cgx70IDz72c/G2bNn8aEPfQhvfOMbcZ/73Oei22vbFp/85Cfxghe8oPf+U57yFPziL/4iHvzgB99uG5/+9KfxqU99Co961KOOBQz70Ic+hGc961m48cYbAQAPe9jD8K53vQuXX375kds+avz+7/8+rLV43OMed5fux2w2w6c+9SkURQHnHJxz2N7exubm5pHbbtsWP/3TP40v/MIvPIY9XcdRIsaIG2644Q7d0xeKj3/847jxxhvx6Ec/ei3duiLu0aAfWgYNuoRUJhsVe+8AhtJC5rBK5AT0ofM80ApHs+I7CvjpYipJqGQJJwVeUuJFkiu6AKXg5XML/ky9FPCj6cC8epEYTbHlxaKvF2khn6QK9VVAQYSA6Kve+/xqmJUyk8SKK8SDouzkEssRaHoS1eZJFJ//cODUfeHLMVe558cXWlA9B8ggjHgwphgQJicz0I/7wOyf7ZJ2zaIvPZQndnwN2DIlONLbkiC6eqPAZmkwF1+IfIGqFe/WEpyxSbM5JXnuwBigQJsBsFk6rraPkERbxGbpks+GuYjBRZNxhvgYqa1BzTwlX2DLJJ0WLScUKQbYDNRKTFBfd8k4BQBDy+CZAFZhtoNYL+Bnuwj7Owj7+1jctsNeMbuzBFTtfOo27N+6j72zC9xWe+y0Ifl9jEXKBwBuq32SrASQmGMng0U59/DTADIEUzq4cQmqpqBqAtrYRiwm7E3iqq7K2vE+kxvxcVQ1THaf9BIzxiXflcQmzRLH6qGG0IJK7hcjcmhhOUdc7PO9JMkwrbrn9jkpRpnJdPL7E+C8J0EqVfQIPgFKQQG/uuX3mhZ+XicgsJkt4OuAZlbjMmH/PWBnCV8H1LMaC/ED1MTeSilOYCWwl6q65eJWdloO7FH2P7IEI5KVzGDj6nNS/8RVFY8ii0e+AchgVE7EL4av/6BgHfos6bT97PbI78H8rjEiDeYhidXACSgeY0uQKVnyUxkWJd8HsR6ByilMNQEt9hEX+wy0CfOOJjWcALgIHqFuEAMDekMQV6VAow9oZgtOOGXefcqAVN9AAPBNixj2YZqar7MMoKaiBFUT+PlyxRm9c4MM3bnV6HdkMF3HPSrSZDk9m1wnAy7AkRfASYH/EFfPrbgQi9lEsB1QlphjkO8Js4Sy8cNSxgLM50+6b/qq4CL4c1xANZAUHY5r+ixNB20YEGn7c6o8qUxA95yJofMFswIWuUqeV5Zz1FroJc81u3cLz5XKKWIxBi6/P8+VbAkvbJakPEgs5d0EILopLBgABEyah5XEkolz+a6ljsXjSSUWXR/4w+o5bSTDagDB8/6r+oIr+dyX4wTgxqJCCcI1m0VSwnCXMB5YImyNLAM1jlLhmLn8fsyCXM4vzE7UfQcSQ2uvCVi0EaU1AvhQAnDYOq4roSJiBhgXGnEBnLMlKmc68EeOL/dGpuUM1C5hFrug2Vn488zya87v9GTB28US41Pb2DQW9uRplvvM5hWkEvSzHbT7C5jCYXRiA+7qB2BT5udXXP1JbO3UuM+8Tc/50dYI5bTAaGuE6ZUT/k7VqR6ExUye156vaZnfw3iQFY9EmVcldZQYQCU678Za5dfBXosAqB2l53+6mtrumkvKKXJOcqBd7xeKAZZM8psLkaViDUWYoGocHVtMr6pCx5kQ4U1EYQ0sARulQ2EJlWWJSZaz5+8rIxNBii9lX6D7Jt6MDF6rF3I8wLg1BASKMJF9C4P4bKqsvpOCyWgcYEN67pvgYdsaVFbYKBya2QZ7YisoXLBMuN3Y4Otjsgm7eRKhmCAWFf8YB+9DYtUCgzWPgHwEYi9V6t/bukbt+Rvq+dF5oYJ+AYiGlUWyf/dYxAqO5uWu6n8YIgAHWGLvRR9426Uz8CFiv/bwwQBok68fS3x67M8bYfc1WMxqNEuPRsB/AGjLEsYR2sZjXnvsLRrs1i1GdwHot55brePuFiEEXHvttaiqCk9/+tOP3F7btrj11ltx1VVXHbmts2fP4oYbbsAXfdEXHbmtj33sY/jgBz+IP/zDPzwW0O8P/uAP8Ja3vOXYQL+7s0/Yq1/9anzxF3/xsfRb0zR4/vOfj9e97nW46qqr8NM//dP4nu/5HmxsbODWW2/Fc57znDvEnvyf//N/4iUveQne+c534tSpU3jmM5+JV77ylXjUox4FIsJHPvIRPPShD4XN8jQXipe97GX4oz/6I/z5n/95TzrvoQ99KF7+8pfjiU984kXv23K5xOMf/3g45/C1X/u1eMITnoDHP/7x+IIv+II7fK4XiwVe/vKXJ8DvIQ95CN71rnfh9OnTd6idPOq6BhGhKIrb//AF4oMf/CCe9rSn4WMf+9iR2jmOGI/H+KM/+iP88A//MObzOay1+Ku/+it8yZd8yZHbfutb34obbrgB0+n0GPaUo65rlOUdsyE4LP7iL/4Cj3zkI48MMH/kIx/Bm9/8Zrz4xS++ZCbz9ddfj5//+Z/H3/7t3+KTn/wknvzkJ+Pnfu7njrRfGufOncMzn/lMvPSlLz0W0O+Tn/wk3v/+9+NZz3oWHvawh+G5z30unvWsZx1rkcM9Pe7RoB/FKHJMoZ8Ayj8j0jOiynPh9kiBQU5mUV4tLV9PEirE1ZOUVVkmySiV8Mz8Y5KHjPr3ZdXofcCmYxcpQyku9oEycNIqyYIGRDQHE1matA/smUbq7zEE/uS9uJwnGSuEgGgMjDKp3AjeFqBqG6GU92zRPy4FcIztql2NY3BHw7cAxK8wA3SiYdCTYBB925P/BHgBOQTqCkvYLA2K+VnE8ckDMjwpgZhejzb50rZLSyJX2G2sMJ1sDnCQmXihIEk4kq8TEKaJtugyRqRX5pskKEPLDElfs0znfJaYneoLE+YzBvv29tDMFqh39lHvztDMFmh29xMDLfqA4CP2b93HcqdO7LPr503ynrtyZFOl9dxH7LVd4lBBqlo9XRKQxLKHVBTC0uMkaNRkaLpXCSSV1iwtZvk6zJIykSgliqOr0nWWsyygVeTqISVsMEMG0dQduCfMjeg9SA3kFOwT30D1w0nnMGPL5tK5CgBS1cAKyKoAoF+UDPZNWrSLJWIIKCYVA4EqGVoHLHeWPUnJ0IhfoEinpl3wXZ/ngJ4y8/j3bhzIEwZRkzEr2lDJSlPIjyXY0iX/xryowET23+Hr0CD6Gs4ws8DKGOsNHWBIX2wkqSlNGso4TAICekkockLQweXsPyCxaIiy/W5qGGOS1DEJe9pWoc+kFiCwz9RkZqAXUFCBP2VTuGqUQNLogyRAW5APnAySsZSChwEQ6z4rdx3ruDtH/kwjZVQDCSDwWfL5ou/3jFl0QG6TTHrWq6RnAFLBzgHWoHwHQH8OlDP8cmUFeGSmoSwhqv/L5o6RBOA3VgD/rKAqeMRgevO8lVKJrug86GIA2m7eQ8EjEu9XKCvEYsyA3wBgAHisI3ASqQ4RY/lHgqykzywhFeYog0o/aAwQlXct5y4MzhmzLA0X0kQpPkuJDJMKbHTbGhbsaXcpcyxtxxBQOQZ/2Bcw8vNbJE8psj9zlEKw24sA8eyLMRXlpWsKSD7fvG31npTnWvAMxIUAV04AYSp1zHQBkXzN6hWikBEWM8T9XYT9fbTiG9suarT7XdHI9PwZLgRR71djBYQTeeh6gWbGBUm2GoE2tmFPnsZkbxeTUxOWgVy0MNbAFAbltGQP5WmBcnOCYlolycEYPM/P24ZZe6JiEIFODj27D3VORsGnZzyyZz+Bn6UgArUNomFFksIw498aMEMf8YDsPF+AnepHHnpuDDEjU0vstIgyB78LGJgQUZiIQICJzOizAjart5xKenbsvrYrsszkfGO2n3wvdKCVAn49yVJdG17oUjemKy4wjiV924ZZf8YCIaAcVey5nuYTRSehPtkEjadS3Fak+04Z1bpPKjxrLxIYOpCU1H5I40d27qQ4VtdQQwBRxwwioIhAsMySLAwhBOrWxiSFkCYCMLDEQG0n72nl2qHuPR/QNh5t7dE2AW1dwytQCwZ7Y4jwwgys28DWDdmaZB3r+FwNYwx2dnbwLd/yLcfS3lvf+la88IUvxFvf+lZ8xVd8xZHaev/734/f+73fw8tf/vIj79eVV16JD3zgA0fyiMvj0Y9+NK6++upjaeuWW27Bq1/9avz4j//4sbT3+te/Hn/8x3+M17zmNcciP/rYxz722NhkRVGgKAr8xm/8Bp7xjGf0AKdL2Ubbtrj88svxu7/7u3jc4x53AMB6+MMffofa+8AHPoD73e9+eNaznoWv/uqvxrd927fhO7/zO/H85z//DoNjf/M3fwPvPfb39/G2t70NZ8+ehfce3/md34nt7e071FZVVbjmmmvwR3/0R3jRi16E3/3d373k6+8zn/kMfu7nfg7vec978Ja3vAUPeMADLqkdjS/+4i/G6173umNjXb3xjW/EU5/61Eti5ylr9PGPfzy+67u+C7feeuux7dfTn/50fP7nf/6xSZjWdY3HPe5xuPbaa49lXHrxi1+MV73qVbj//e9/Sd9fLBb42Z/9WbzsZS9DCAH/6l/9K9zvfve7pLauueYa/NAP/RBe9rKX4V3vetcdvt4vFNvb2/ju7/5ufNmXfdmxtPfud78bz3nOcxBCwF//9V/jec97Hn74h38Yz3rWs/C85z3vDgHGr33ta/GBD3wAv/Zrv3bk/brpppvwrd/6rbj22msv+ZweV9yjQb8UeTJJQhl6Nls99jxOBpEnQ3wQGZMVYE4yc09eCUiLygNAH9AlowXsgyQxuOGBbx8gwJ74Qy1m/N2y4gT1UKLQFXwsxnASwdj0+4HjTJXroWM5yWej90BTA6PA27KStDIGsWCZGbN3C+/yaMrHQTGx9cLkZGLfhM0r+oCfHCfvb8W+NRk7i/bPMpAVWviCt40sCVZLtTURsFEYbBe7oJ0F4FuUAKIbIZST3vGuWptf7HvDmLcBPgLTgiu/W/H2A4AaESNHOOG4v2sfUV7A4I/Qyb2a+dkOyHIjZgrYsmM/tHyMVM/48/UMWO4jLmbwZ29GXOzDnz+D+twO6t19LM6cRzNbYHluD8udGu28xezmGeq9BjvnF0lC8nzje0CdJcJ2YVJi61P7TY9ldtPSi6eMwYZjf7hba37vstLiypHDyZHF9MoJJpdPUJ2coNyachLKlUiePKuk1rTCnAgRo56fUpKfEtlXGIdaKrHbEJm4EAEgSGLCwJkRbFGhHG0wW9RV7JXZLGDECy4Yy/eRJMQSK8uVgHMdE9F0QyMJS8J4ZqyG5bwD/paLxAQ0ywUKYeYeJgvqe76ASwaM5O8g4FLOLgOQftfq8ND73+psOwOHAb7htlSqMv987sVYbY9gqxJuWsFNx3DTKvWPSgiTXcAsDaKrYXwNGAcyFo66ZFfUxJee4+xVt5yq1YHO2yXq/zrQUA+TE3KxY94SiWxbAetKlEUF8jXIVXzfjGqYagqElgsm8kKKbBzkPpW/1UtS2IHqDakJ3Bz4S9dFSrLy+14APxMMF4QUJbfrytv3lbwT4lDW5jG2v457Z+RMF4JJCYc8KX7RqVYBDYlCxywSSW/eGD8brPrJZVnmpJ7g2/78CkgAHQI6Xz5fd/OxnAUv+6GMN0qynGVvHwAGRtLcQLcF9AqmKPI4TSSAhoAKUedePUlNYe7HAD+aQlURlMVDEMlxSwiRCyhGMo/Ya1gqrxxI8/W6FyzfLF2RBtoof3tGe2S/hREXY2Jml4YEXDXAaIv3p9kXll813NyxhV5fm6XNVDJkPI6BlQGKEezODbBk0Jx+yIGivjws8bUzKVgFYtFGLDwzjrQmhtnyDAyNHYF8zWy9dsGAFhEXFfkGhS3grPZbAOpOZUG/E87finD2FvjzZ7A8x0VWQQo8dHxsZguc+eBHMT51A8anT6J80D+CPXka7dZVSdp//8YzmN1wBu3+AnAFwvQU3NUPAACceuhHmHXuI1xVgKxBMa24sMoaFNMxbFWimFTsdywAU2zr/npAi6r4DwZXi1F3D4Q2KZWkuVgI/OwUIJGMg4kBMA7FaApbVDDEIGvju/WRDxEwQB1EotugN/9joK9jh/pAzEw16Pn+ducM8AYALIKw8ZQR6KSdUgC/0hLL5vv2oK2ChqJ+MYgVhMKih1+rPfYc5ZLqEOA4pLFO7xsz5Qrj2Dbsizz0mBZfahpV7OU3GiO4iq9BNxKWX+zk1NHdNzp3UvAvIooEKHWyyyTg5e2wEQxYBjcvvhqq2Oj6uDBRWLABIQIjZ7DwJrH9NKzKtBKPpiFGFMagCSHtswJ/fCoiYuD5Lfv41d38DEBoG3hv4X1A3Xic22+ws2hQxc9+QdV6brWOu2M8//nPP7a2nv70p+OJT3wizp07d+S2Hv/4x+Pxj3/80XcKwObmJr70S7/0WNoCgJMnT+LkyZPH0tbm5uaxAX4AYK3F9ddff2x+gxcjZXlH4pWvfOWxtfWYxzwGj3nMY46tvTe/+c3p9xAC/sf/+B+44oorLqmtL//yL8fXfd3X4SlPeQqe9rSnHZn9+su//MsAmGV6qfsEAH/7t3+LV7ziFfDe45Zbbjky6GetxTOe8YwjtZHHN3/zNyMcYqtzsfGQhzwEf/Znf4b3vve9R+qrPIgIX/7lX34sbQFAWZb4vd/7PfzDP/zDsbT3jne840jff/e7343LLrsMP/MzP4O2bY98Du573/viFa94BX78x38ci8XiSG3lQUT49m//9mNr7zu/8ztx+vRpPPnJT8bDH/5wPPKRj0w/D3nIQ+5QWx//+Mfxl3/5l8eyX+9973tx3XXXHYnNe1xx7wD9VoQuVPR3Tuqgn9zJKlCJDKwkgsiwP4o+ZvV2UWYfgA7wy5l9oe3LDOq2BlKe+fZ7UjOOE0CQauDeY76tE3svHaMremAfs6rsQfZfYgf6rj/0O65MnzejMTOybFeBDK381QSdb6WKeNG9B3ACYVR2jID8XKhfncpXpip/lZEiBhqLMUtJaZNSoe2BnpyN9pfKPQ6Xs6uWtxf7noYCD1oNTgS4wFXzmqhQCSFLhJICg0s7ZxCrLYRxvxqClnswi/PctzHALHa7bWki1HbVR+yD2CLu3oa4mKHdPYcw20FYzFDfdg7t/gKLs7tYnNnB8twMu5/Zw3JnidnN+z3POH29UNxaMxtpw5mVidwmAo1U044t4WTBHjOXlRanJgVGWyXKaYnRFieenEof6nWYS5wFD1DkhO0QFCJk4FHnG9WGCN9yokGZnbnnigGDQD4ATiqLC1siFh2oHkWWkkrP+6UV78JsjbaQZM2oAxt1bND7tC0RQwuyIpUVPIKbgQQUiyVLfrL0p3i8tQ2sJHqi9ywDKgBSaDhZoe8B6IFLMXtQK+MkDEDB4eeHwKG27WtmEcYsMWOF6WdLi2JzAleVKCZjlq8sXBpfuG0vLJsWpDkWZfpmP4lRkssDyqsmMjThDuNgbS7px7IoPsTMn7PP5AU4uaXDYCRmLjpXdeMcGU50hRYk0naksreDsbAH+ol0q3EFg3+OAXdbNT1J0GH0WZYMBFJRsqdfWYGKAmgvhgq1jnXcfSLGiCAJ+ByoH7L68rnWAZnrnI2d3fcrwRuZC9msSCDNrzKmXJo/QZ8b1C8s6TH9sjnWsPBE3qPBe2Qtz7VMYOZ3CGmuQMb2ilPy/c6PORqXwMbkO5vJVevnNJGuofKNXt43lPlyKTCmm0XnYQwcBGEJ/JzMZSpLkeuufUzS0I2MtzqvMYSe2sCdEYJxAgBa5NcQKx2E8UlQM+d5Us2LTLPY7STC8wgte/ACIOsAOPZm8wEFo00wsevHUqQgTbtgL+16n+dbvmZWq28YLHIlPz+A7noSSXVTz7j4R1QVYqveY1z4YcUXV4E5v6jRzBagM+dRPNAjErG8a/AAGdiqZD/BULK8Y7UNms5hTs5QndpCMakQQ0iy0olpbg1s4fj9UtQKBsV/3Hnynis61YVU3CdsQwAw+kwnYcJ7LvgBzy/IlczCa0fMZgNQ2gr5Y7ErnmTgL0YgGkr3dX4NW+L71xoCyTxAR4bcH50vckJhGdTKCzGtEX9gyiQss3v+gI86mQNsP5I5/mHkOV0DpIJPo39T39JBQmX6YQOomvL6zRjQ0K86U5no5qE2rQm0uCICPU+/fMzwIXKhBCEp1Oj9TpHET5Q/o6zeVFw3OM4h2JdklUOAlb4LMo9zhmBNTKxKHbtsBII8NLjwlju1MISGIqxhqc9JaVG3ntdQh3S8zoHJMIM1tAGhjWgbj7oN2K89iIYj3zrWsY6jBhFhe3v7WJkd9/aoquMtkvrGb/xGnD179ljb/FwMY8yRAKPTp0/j937v945xjziOCmI99rGPxc///M/jRS96EW6++eZj2qvji+OSz3TO4au/+quPpa07KzY3N49Fvvg44glPeAKe8IQnHHu711xzzbG3edzx0Ic+FLu7uxiPx5fcxnK5hDEGIQQsl0s45y5aVnhVvPe978WXfdmXHWmfjivu4aBf6JLLeaSkEPrJn5Ang7KSRiAlp6J1Cfxz1hxIciUgMZeZUsDPN5KYGlSYZvuRvjsMTYgVBuQcTIwI1YST0fUiMW56R6tJdAH7UpL+QqBfDvY5x0CcysjYgoE836bvka85YScMKPINV0U3S4TxNqKxoHbJgN1oY/Vpahegtu6YbLKNSAQSJld0I8TRBjxMWolaAgIRSCpYfQRMMZZK/RaxnFyw8juPC6XcVy031R/GyULZIQAQINdrAmjO/VeMQPPzMItd+L//INz9H3YA9DP7ZxE+9ldwV1wDjMYI529llph4wyGEJC2p5zw2NZY33YB2tsD8zPnkE7M8N0Mzb7E4u8Ds5hlmZ+b4u1mD22p/uwDfYRGA5NV3WOy0AaWx+IJpie3CYGOjxOTyMcopv45ObKDcYqkpM552FeYxIoZWDNpqvo4yoKjnH2TZP6j2HdCnHiKcvIzJP6arvuYEprMEGwhAQIyEkS0ZqQUEtGtYLqwoRWaNAehgS7kPLGJRdQCsYeAypvu8kfucWWUILUwpDEUBjBT0i97DiDwvQkjvWZWVPHACDgGjhp+RhJGCecogHIKJ0XsG+4Sl1oqPjIKEmjA0hYMtHDP8qhFGJzdQTCrYjU2uPk/ywKED71WOWM8jcADkS5XK6f8ZMGhcSopxxbSDNQ4+SrKblN3XJbuCjAGAJqYIUc47wJfWqJxwEUBbICSwTzxVy9AvxJCg7HmQ/BoXM050llVi/JFKw6rc1OB8pVB5WPX0GzGLlMoLV9rfGUFaEHIntr+Oe2dEAdv12bvKt8IALLWXS0qiS9r3CgIy4Ivv/UyZYdV8KIaMdRREIjMmph6AJPeYv59YfnmiP5uHDeeLiQWY7XNEkPmUvCeynsP9S23nx0BdwUpUoE+PXRhS+ZwlghUCAJYQVyWAOUbwMaI0mTfZiliIEkFhOplxQ5R86ELkZ2ebJrH8+WUbsVEalJawswwMnFB3npWBeDFxADi4nc8lALVdiN9j2RXQkYGHwW40OFkAdP56BPH1MrMzPOcUxQgNM1BO8GYrebPBhx4rS2VERyaCZrvsYbjcBZqa52IAFwJVEySPNt3fGPj5Vy/h984xE3x/h4ux9vfhleFnDYpqwsUo0zGiD5jffBb17gztYokN2U4sJyxL7kYYndjE6MQG7KKGPXUV9twU0+kp2NBi45orEnvQCJBoi2y/LHtRqnwoFV2xDiDPActyoklS1JYM7Mq8B5BinNACrVyfIm+rygVkLIIxMML448+0cJWBk3uRL+XOe84LuKz+3AompXNHDByVUvzTk7nNP2kYzAoRiBR7zDtLlAq/kn+3rM+G40DWKXJRBhhjktQoSXtEMTHmuG+kuEzBP0DkRcHs4qF/qBYvyhyJJTwnPM/IJYO1761NhWfJ99SYbv6T/URZm+RjbgTBB7m3TISJAPFECl6AVYoH71WVDA3oiloNIXkhUlN3fSdjeVlOALCUamsIreGxpjAGlgJ7Hwp6qFehFoMUlsF4SE3BsnUonUi6GpJNHBxBoijU+NYnCdC9RYt53cLcBaDfem61jnWs486O7e1tfO/3fu9dvRvruBvHD/7gD+K66667W4J+61jHZzuOQz7zp37qp/D2t78dN998M57whCfgT/7kTy6pHQUP3/ve9x6Lj+pxxD0c9EPn6QfwwmsYKq+ZqCGDpG/ONtJFryb7k+dBlvBJJZIZkCh+IDngd2B7eUV5XvWtSfGs+htRPMpsIYnraQceZB4H/HGpFFUQT49jGNmiTYGVXGIrZhXNpO/lVfmSrIvlBBTGvC9SDR6LCrkc4oFTMDnJrEpJfpHvGEJhcjJtZx4ItQ9YCivGGkmEEbF8UACcJvbzPryI8BHYWfok3+ojA3qXj1cvrhwJa4yIz4FWgzcLmHoGapfw588g7J6FP3sLqKzQtjVmf/8JLP7oT7A4s4Mzf3MzljtLFGOH7ftv47KH3h/VqS3Y0mH/5nOod2dYnNlBM1vA1wGLswu0iwbLnRrzsws0ixbnm4A6RDTZAn/uI+Y+YK8NONvcOQteg4PMgYdslLi8tLjysjFGWyVGWyNUJyuUGwU273caoxObmFxxAsWV14Amm7Dbp0AVe5QkMCiTWlMgiFAiGtmqVDerBFkblOHHf8fY+RflSaIg8kzRALXn29QaA1dI5Z1cfyRyo1zpTQmETolaN+qBjz4CsOiqniXBQ8VSkiIscUsjn0AlUn8g7xOomwBB7w8CRXkMafiHAPixXiAGn+RFFVhUfzplpYXMk27IVEvefVaYBoVjsK8UmbBqymw1V3CfeC8ZL3MgsTv8Ow7GVdJkv4L+xgGGmb5ELaIrYcnAGAeAxCuMK9UZ3qMDxRfDivu0PcsJd5JrLGaMn5w92RVhMChNbsRFDsaAWpaCpeARq6bzgwxZwk7Ox8okniReExv7TrpP17GOOyOYRRZhQfAArEx8ksce/yGem7GXoE9XuiSu87lONJbH2cPmKUA3Z4uhK54aSHVGMgzAHNJGzPxiKQZECADQdnNEksq9A4VjqroQleVsQOGQJGwIki3vilZgS0Rb8NxS53xS6EAxcnGJK+HBknhp6NI+siVGoOTFdyHrrsp17CllVwUZGwtDmLcRu0vfgSHgBPzIUce0k2erIWBku/fvSDShU784LEIEliKB7hDE2zrAYdF7DlsAJ1wALeaIxQRmi/ul3TzNz47QIhCrABgCF47ZksduX2M6kjl1dCyJGHku6Syhcgz4mcUuzHLGctAC+KVntffA7rl0fWhEY5PXdljM+HU5Zxa8SG5GH1BuThI4V0y5snNy+kQCBf0t1wPBw2xfg3pyCrctLa5+6CNxwhVY3HQrzGVXYq8OGE9OAr5GdZ/7dJKQeVHfYK7QFf1lzx1XgEZjUMmFWCyHP+W5u6s6tYwYeiAVr4V8V3Qo1xfJnMOC789QtDDGIdoWrpgkdpdKxwKs1GENFwgo+KfPayLAglBaBaD6BQTp2KLMCYR9rIqVBh3T78CldyGG74pg1iCleUfEQW87K0CfAn6k65m8wBM8PpErEX3LcxwpTIiyP721oHxe56CHruG0be3XvJ8E3CYCdzTyIgf2XdRCgHy463V1AlHRzXPbJZLaS1IkcShsKYUCvD4rLKFRTz9h+SWgzxAzAOVv7wywbOEdMHIeI2dQOtMrLDGGBFgzUG9V37I/d9tULPEpvn5uzfRbxzrWcS+N0Wh0V+/COu7GQUR49atfjXe/+9139a6sYx33ivi6r/s6vOxlLwPAPperip4vJmazGb7lW74F73//+/H1X//1+NCHPoRHPOIRx7mrdzju2aBfqrSW6uyhTNOwynPAtIualMkWWATDrKTYsVh0kZ17BvaYfods5zCZqZgv7PLqUE2KJVDCCBumBTkHihFhmS34jWEQoxylivKcUXOgr7R9Vx78XLYvaR2Y++/o34Pq54sK20l2Iq+sF88YZVP6EOBDRC3ABAVeKJKRBayy/ezFSU+l2vbILLE6RLSeGQzqv1eX3WKz0K6Q/aTQ8uLdtyw1Vc9B9R7C2ZsR5zO0Z27A8sYbsfsPN6HcnCL6gL3rb8Gt/+cmfPLDt+B9t82xEKTiH5+4BY88u0B1soKxBvu37mN+doHZTftY7C6x1wZ8RkC+S2XrHWdsOJYBmwmqdrIwuHJkcVLYfcW0QLlRotoewU0qlJtTrlTfOsHeJNMtljUqBpKvKVHF4LImQXKtJb4eYq+yOQpjAcik5vKch0h4IUR4QzDCDCRjYF0piWOX7nlNcHX3DQnox5XwbeQq9bzynCvVS65qN4YT0sIog4D9URLUUcE/ZRU27E9Cd1RbW9m8mcwmgM6nbjlnQFHZod6DVMZSWYXyO4AOoEKX9IZhloBKnVJZcSFBWXUs4mHl8TCJNhx7s/5N4C4ZBnelb2L6XJS+5M9ZMvBaVQ/O7EZhHaVuUcAPSFX6/R3gbSkLJyZ2UH9fU7KzpQ6grAC0LYz0N4k0azquoT+q9LGeG8oSs3Qny+RdMFTq7c5sfx33ytCxlyCMMWQMPnQgk0pwB+qPx3kuORIlKc4eELiK6ZcnxPP5lTL8dA6n29D5Wza3IhKAX95XtQJuk8dEMiKVnjNtBhP7SMTjn/cHiq0A9CUCM0nqKPd/DN1xa8GDFqjBuIGsZw6mGlzMnUVAjwHohZFpRCZZAYs6xAQequewJQZnQ4gskx2BxgOl4VH59pY4fnCuGYDo2h5GzD7nA+Cs6cBc8YdF8ICV89vMGZAD0pwzFlWvPS9sVGfZ/4yk6KYUAGLkCK3n4ywMg6ClIVZoUMZ6dl0A6DP2V4zp6X9LLrpB26Q5ja14rDfWwFYjYXexvJHZ8HD6jK4XCLNdOF+j9hHnlh7XnDgNd+pqjJYLRFeh9hF1AEbFGHb7FG83D33mNNl1qYy+DPhLMtOuSHKeieFnC/6deM1DK9YNMXjEpukXKgWPMKpgi1LkUGtArjevxVfo5m6svivPcej1Ffm5DR47rLwX5XcAvSIfhCjgoTLWYma1oN5xdEHQ+UKhoDirB3T7kv4PpPateGDq3OLAuk8LOcGAdCo2EtA/zT9XsA8jUW+dOQyFUkOMfczX6Pvc/yZSkldVxl/q/3gQIFWATlmSKuWeCi1yUFjmvcqIXJUPsVl7hTXpNQRmIVbOogkxsfysAING+zaBfjYVakUZu4N4/6kv6UUNlscd67nVOtaxjnWs424Q0+kUT3rSk+7q3VjHOu4V8ZjHPAZbW1vY2dnBk5/85EtuZ3t7G+9+97sRY8SLX/xiPP3pTz/Gvby0uFeAfkliSd+T156U2woGDWUJo578HLGWTNQKwoDuffluLhV6gMmnkS3eGDxiicv21AP6i+vQwu7cxL8Lyy5IhbouGMk3DCToYiCGDJTLkk3qRbYCmEuMK2E05v44yZ8r/x6Z1R4qRwnjEMv+vlFo4doaG9YBzsEam7zbdEFppUK1DZ2HxO2F+t3sNyFVxSrgN6s9zoWAW/c5mVJYwoNOVhhZhhZcDKDljP1LfA27ezP82ZvRnrkRi09/Estze9j5+A3srXd+iZ1P72C5U2O5s8RO7XHT0ifADwA+cG6BD/zux46jBz8rsV0YXFZaXDlyGE8LVNsjjLZKuIo9/NQLzk0qFNMKoxMbKLa3YLdPwWyegKmmQFn1q5fzUHm3lCSVxN4F9sl0uQtOSA8SzVzVT0Ab0BpO/BWG4IxB4aapSjsla4C0X+qh5Af+gcoMBaQqndQXaARbVChGWu1dr2al6HujjPm7KgbJn/QqoHuUPusOlu9fI8lLCh5hOe+DfcDtMwfTJjsvoPS7k/s0ZyEftu95AcPw/RXHdYDpEwMQAEINa1hiuXAGQSU/s6Qy0CXpEgMzdswRDKW2honMLEsVIcUhruLEZ2gRW5ZsRbXB9392XL3CD00aC9sj1ovk3TgEBg8kbdexjrtxqH+USu3mSdycfRYyoCkH/kJEp5agYJeyuYbFRmmjA7BPxtE0luqci5iHrooEOv+KxiFs9D0yyNcwZz4h41MHXHT3ozB4gQSSxAyUjDEAxsIoK9CrD7GOdxbKzusVSHllJAqjKUYkD2OR77MA7KXQ6g4Jk5h2/TZDBM4vGoQYMSkqOMNJ/jP7LRY+Yuw4Kb/0EZU7CAisiv02YN4o+5NBtdLyTqTxOItF4MINKwDy0keMY2Sp+HrezaO1oMvXoP3z8GdvBo2nMNUUdvcmxHIDodpMgNJO7bFHhMptYHNjwnKd9T42XInKuQQcGXl2s4/fnMd5lfC2JYy1zNpbzvn5udjnv+vOtD4vfBmO51SUKMdTwBUw4ymz5MtO5pxckQC/9ubrYaabCKNNnF96/N1t+3jolVfAnroK/vwZRDdCGyJ2a49YbGLj/g/j+X9TdwzD+UyY5Saxzcmql5/p2Poly0ujrJjhV1SIyvSzZQJvbe47md+fQQBvKTKitgE1TQIl7CYQa2YixNDCiCZukqGMDDjrXNykOVQ3p2eiLKXrNqqSg/a1nsNesVfGFJT2eozZdKJWzLcoUx1QgC7GxDokAb7zosGcJatSsTlonLP8+tuRccJkBUY6R4HtCkbl8weaAA6Aarw2imhkbucjY+VGFFEKmeMFouR77Qmwg+PQ/jMyxjtjUsEj2pqtHFTJIkbABClmGPGYTMUFPRCV3TdyhkF38T4OISLEFouWVvr5GUOw1sC7ggv2oEvw7P4LEWFoLruOdaxjHetYxzrWsY51XGKUZYknPelJeNvb3oav+ZqvueR2rLU4ceIEzp49i+c///l48IMffIx7eWlxzwb9gANV4gAuDPZpCFPuQGI6iyQBmLdPZnWb1MmBwjp0MnIdEEcKLuoCWz2mdF+AHviYpGBIGURgGaMBYzAloVSmMAPv0uJW9qs7FgY2u8V92zGe8u69QNXpsUZoYTwnUsajbSx9gG+7RZ1KAhWGk+3k29sFIzU5ScRdXPuYAL/9xrN8GRG2K4dJYTGxEVTPEF0Js38WZn4e2LsNcX8X80/8XyxuuhV719+C8x+/GYuzC5z5f7eh3m+w1wbctPQ43zDYd0+PqSVcVlpcXlqcvM8GXOX4Z8yvk9MsUWoK1/nClU4kZouDrLBhDJkeqz5CnWdb1MyQSKH5wEmXTtgX4vsGAEw7UVkvHyNsILQGIIoiQcbVxF09NpJPoH5PfQQDOoahkX2JiPI7ALCXCSTpS75jsCWGWwyIKj28Ctxb9Z7ps+QOMGMsjx/RF8wkFHnKNK6s6tRVifZBYUJ6PeQzvRiOvasSbDpGEaVkd4/prP/TNgJAaBNYYMnAGoO44ogSeCt92/PVwSGJuF4DegUEgKwwEV3Hwonj/mcHAEWScnYVgwujMcJyzsxAYV0ihJQw/ayHJIDv1PbX8TkRBkjsFk0cD++u4e0WgT576PZeV40n+RwpK+hK966ANgz6yTzI1zD1nJl6wXfAIzGA1wHyA39VYwDvATfqxm6VEzWyXTcoYDH9MVrBvV4/EB3wPOV53fHeP6vGfAKz+wpLWLYRe3XAtDAYV6pwkM+xLl7aU9UTNNrABTelJRgZtyk7f5WregAcxQBa7DAAt38+MdLNmJlx0XuExSwBTbFteJxPfrIlQoyYFibJMvIBmzQ2O5krAlp01yaliSS/CACxQIiBr2+VslzsHzjmxJoHumsoK5ShagoaVTC50kFZpWcejdkP2HoPM91CO9pAvWzY48yWHUgoQEftI+YUMRlvM9jmFjDGItYLkO9YhrGtxcvNCMBYMsNPfkdRMigtKgYK+LVJSSHCWLl+AWHmoju+dOwCABrD224ZiFRpc76mu3OcgD5pyoN98mCEGmwi3KCYgM+hgH9ynUVIQYEUe+mrfs8MUbELRT7vkPlIjh2pd1/vK3KPDKUx81DPvEO3KWu/VEC0qkjqAtEB+hwhSP9mHaHrmSB+hJAiDD0HETzPVZAPkGNdcUy5ek33O/E3Yjww9l9ovy11csMhMvKYH4uXAjsAieVnHMF6A+/4OvbGwroSxpXMCHQMIo5Li4m9+H48tljPrdaxjnWsoxcxRlx33XX4iq/4irt6V9axjnWs45LjqU99KnZ3dzEej2//wxeIU6dOAQD+/b//98exW0eOezboN0g0J7Avl4UCDiZcgMMn1b0k+IrV0IUWapqkB1hyRBeXboRYTvof9TVoOetVqaftZ4lyCl5kYkJKoumCK8lI5UBfStzTQSBQF5oi5RMpcCJApGdgHC+E0z62FwRmji3kXNFiBxQ8RsUIxlVYtJxYUZCmNEjeeggt4rS84P5pAku9ZhZtxPlFg/3GY9GGVIW6PbK4YuLgbvsk4GvE0RR271Y0n/k4Zh/539i7/hbc9IFP4vwnz+Omm2b4xH6DWwZMvntLTC3hipHDfSqH6akxTjzwBACurCVDKMYOG9dcATcuYasSfsHyUrYaMVMiScFeGPhbCWZJJPYWSKR7xNuNmHVnTBSQjiT5HIUNpokiPi+1j50XiyQ0WZqJkx36vzz1qRXqTYgsKRq7BJM1nLiyovuk11eMgDMGVj2kWkkqBo/EFpHE9AVjALT1ZXVNP2kUQiclqgk3rTzPixK0mCAfX/Jk+4Xu7xUFFcPXnjfNqmIIBS+zMapXiLBqmz5jbsvnaNV4HQ7bp+y+vEAVfbeP6mu6ovCCOo9JZSAoy9CBx1KznCWggWwB8g0D0PUCQdgdcTE7fPvrWMfdLFb5aplsvFTQiiXxMm/VyP5+KuPH97wBsLqAId9K8sCLnaSnMvwoA+d6TF03Qqy2+vu5nMHu3NDNxwTEJwBkIxAsSyMLaJG2Lwzn3Ms4SYSGPns4eSJr++rblUuJpnmX7cZwGWvI1x0LfkUMe/8OwBq9KAxhWhjU3iIE4NM7C3zeVoXTYwbo2kDMcjIsh+kMpQKwC4WyjXzg5++yjZiWBuNoAAQUhjAyPIck38DWc1jpM/INqF3AnL8RYe8c2rM3d8cpPrIAOtnqpgbkPfXR9QX/vRX25flSdfPVGIC2hglzULPPz4lcAUNA41Q0pufLljCTlsfu/V1QUycArpPKFq9eqQQyJXjeU1ascDCewp48jVCMEYpx8ruO4g+L0MIUE0RbYLcJWHouMorFCKimMBsnGKDzPH8JMeDE1tU8723mMMaBihJGmOUxeJ6/qAypK8Tjm/dJAb/oRr1XVbxQnzf2PcwAbTIH5nDRewEXLWJbsxRqvYBKm5NvYWzJAGym/5qmyRGI1AF+kOItCDCUinjSxWAQCfAy3/IRCAbCGO0+lst8X/A+yYuMBPDz8mzXrRpwIyodGhVIy/6fy/AeWsS16n93AOQDINctBrLKhBBZuaTxAUHmqMr0434xMMQ+fgDf3xDWn4mUgD+Ax2uKHdivgLyOeX3wz6RiVlbJ6e9qED9HjY4dzkUHNo3b3YcU8PPZF601sE6PegLfBpi2hHEFrDNwhYUrLMalxbh0mNh73zpsHetYxzruafErv/Ir2N3dXYN+61jHOu7R8aQnPQmz2dHzdpdddhn+zb/5N7jsssuOYa+OHvds0A/os/qU8dEzVM8AP5MtaLNYmYy6UGJ6EJyczxbI2k6eRF7BhIm2WL2/GUDHhLwgXjWmlwxKAGMG6vXAvgsAK1ypKN5+zgFRGC555asp+xW4g1CQDsBKYPOiwziE0RQk/dHYCj5wFTfAeGRpIObyTarWvtjQw5k4QrVRJp+R0hIuHzsU9R7M3s0In/oI/Jkbsfexv8Pup27C7qdvw6ff9xncsFfjg+cXaO7Fa0sD4JqxwxUjh8tLixP328L45BjTKzfTZ6IPsFUJNy5BxiD6gOn9rukq3KspaDwVT7g+2HzgulRGlb6vHmgxwJIBDMGCQT5LJMkNSmw8ZeG1PiIIbOcDJxFU9igj0CbPF2e6au2eZF0G4AFdhbmG1r5TkndiTzmVmrUQiU+R4u0BUasSQxcA3Hpj1qBvVn2WJJF5gJWb970C+jmgtXLrg/v9dsC+OATe9Pf8OIE+Iyb7HA3H7nA7Mqh3JFaBlkAPUEzAqhRHRONQe76WFm2Aj15Yn7GX+LOGMLIEaxym45NwCIjLPZhlAWqWTEwFQMsFgveI9fLox3MHg6ztfBvvpPbXce+MnD3D/pk8AvbuHSB5/626W0OEAD0D6cALRZ44HzD80riYMYbJN6D9s2lOQCJN1ys06IH4JKCF+KHWC6j8YvJpygG9SJzsln3SudiwoEql0nNwUMe7WFQdOJg69fCpdwSw14TEMB85QmVv32dvVThD2CgMRpawrCze/5llYuhdURHiuMCtc4/SErZK059Py76s2m4bgGUbcWbesE+XIVhTwCCgcpY9uhY7CQSlZgEKHtQuEPd3EHbPYXnDJxAXM9Q7M7iqhKkqULmbQD/2kzWgUAFtAzs/h9COEWOAzjTt3q0C4BWIxSRJhedKH6lwzpgkY9/z8yaDWG4gFoFBHecQds/x95cLYfUVDEAGmW34ALKm874dT2GmWzCTTYRqG2E0ZRnNcoIIoAkd60nlNRdLPg9blcM+jTC57AGgjSsQJiexXQPzNrA8vSeMyg0467jAhwyonHMfNTUfWzDIffwgr8Eyqw/W8XrDOngpatLrgBligLEG0RbMgPRNB2aOxggh9CdU8ncMHia03EfC6jSgxDhTKfZOspPnago0kQBsiRWqbH2d95CBMw5kTGLSqRpDHuo7Gg+7ZgfzrsToz/ZNR6ae3KQAoiZbo6ncZyq0tNoXbeddOgT+hvOZHoMun5vIuk/eN9o/g8Nhz+vYsf1MXrrWHQOtKHTrHYPuTq/xg/va/dDtzs0U/Gt8QGENX2dtgLf8vyYEzJuApQ/Yrz2WbUignyvFY9UQvA9whUfwAb51sM7AWIPJ1giTzRGuPlHhmhMVxuFSSyIuPdZzq3WsYx3r6OJtb3sbXvSiF+F1r3vdXb0r61jHOtZxpDhx4gS+67u+68jtPPrRj8b3f//3H8MeHU/co0G/lcy+4YKElC2zOuEU88Vgeu/iAb8hOyf3Kutv6CDolxgmeZJc9xUQzcHQ/9xh+5AnqvS4Bgn+fGkUB4tgkIGHSYmmGDnJXRiCO2RNpRXbveT5ijgscXRg/0tOijU+wgAYWUKtq3tfg9qsiv5C52ew0LZk4QywUdrE7ioNYWQi3Jm/h1nuwp8/g+X/+xB2PnEDPvUnf4OdT+/iptvm+Muz83s12AcAl5cWG87gypHFZaXF+ESFyeUTjLZGKDcnnNwCQNbAFizrqdKe9uRpmM0TncyVSEv1Eq5A/94YAoErolPrYvCPvd2igH7sUaLF5D4AHgBJVoiljzgpgiCeJ6mSmRNSxnRAoCa0h5FLVQFdlTfJT2IPEhi0Ep+93OtlZeSFBSveP/RvjUFShuW4+uPbAZBVqvsjIABWPCDHl++OSe+ZlOBS8mXeJysBwsG+r7p9tKpfk1upyj8fJ1eNmb1GLrK/hvuWfyWy35iO2U0AFj6iDRHzJqAJnCBV1jHA5720Bo1jySqACwjGow2pqjeI7ULA7+LwfVrHOu7GQVLwo6+cVw4H7u0o4/IqdiA3NJh7XezcKothIUR6jqj8Z1snduDKe50y+XVAQIrAUollBQRJNlubAXMGCEBEJ8UZtS2da2X70QMqgU7twXSsojwuVFDV+Ig6cL9aY0CX6P1niX0DR5ZQmIjK6ThuWL48BpRuG4UhmHZxoHAk5+bk+9+EiHnrsbds4WPEpLBofIC3NjH1qd5Lz0RqFyDfwJ+9Gf78GYTzZ7C4+Rb4RY12XqOYVii3AmzwScGCgocZjZnF1tQIsx3QBiG2IxjpZ6r3uI89zzkI2dwDmcxqDB0ybTNgRvpCC/BiMQJ8xcCjFqDk0szBI7YAWfbQS1KargBVE2A0RijHiMUYsZxg6WMC2fSZOXIT+BhRew8fIgpD2KsDUI5RbUzQBMCaIGoDnWKBdRWzE30LU5TMhDQWZEQFZODLq3OASJRZAbikZpDfrgEKBMl9YlwCDpU5OJQ05S92z2yKkdUHyIEE8BsoyCbWWR6GkAC/VDBFBjFwQVMEYMXr0RP77gF94C/3HT00Bms9LVpY+dHB/qn2QAIvI+NfKglOaAGZS/SAv+yVhuPDis/0gMLBfujh5f7kQRi3Pfpe+i6JLHMf8MPBj15S5CPFUIjBiwdz44Nsi8ugmsAMxUXLait1BviRIVCgxPKzTrz9fEhKI9YajMYFLpuW2J6U2B4VqEJfTnkd61jHOtbx2Yv3v//9+I7v+A7EGPHABz7wrt6ddaxjHes4chTF0fN3L3nJS1CW5THszfHEPRr0Yym4NlVZH0g6D6s7VwB8w8/3gIgBK2VlDIC+KMkdXUyyQbz4nJhBstvg8Ar4GECh7qrDbSFJKK1gbg9KXh0GOsZwILmkPjlJzlOqXn2WCbiY1NzQQ3BlF11EO3lUlit/yS8RhfWXEgK+RhxtdtXzgzDz89w37SK9t2EcS9UsZzDNHGE+g7/levjzZ3DzR/4Wt374k7jl/9yCD378PK6fN5gNs3P38JhawtgajC1hwxmUhrDlDMbWYFRajLZK2NKimBYopyXc2GF6egPFtML49EmMTmygPHkS7uoHAMaiveETsCevgL3iGtD0RMd28DWfH1t2ldQipTSUoO2k0Qayb0APBLLyCiB9HmTQRgav6hAZJCagltuhCWzN1HiuKvZSdQz0K5wZ/KPk86dyr/rqTM7mY4DHGaCwhJE1KAxg2gUnnIWBqnJ0B8YiqxKSmcxm/n/9+zAZyyS5lDHhhgB4Dv5Ln3viyv664X7wIaLVIQT5vd5VhgN9UC9n/QzzanGQeAM0x+fTfa9eOJrktkbZkoYB05QUi10f+vb2jxX9ce/AGD/s41VV9zFA5ffqACx9wF4dUPuI2+YsBbzfcDW6RmENRs5gs7QYOYON0mJkDcaOsDHaRlltIhoHU0xgxYPJLDoZwc9aaBL4zmx/HffKcAZJDtlS5sFEHcNPJW+9AH4HCovQgWR5rCyw6P2RPQ+s6937cfCcSAVNpAn3fjtajKRFERQjQlsjLhfsT6YMJt1n7/vbc47ZfW0NiqbPDsvmjD2GnM7TbMlgkswJb7f4aRAqM+0PmXre0Sgt4Ss/bwsA0EagECniaSHj8LIFRI5yGCECN+41aAL79+0sW5xftmgEtJoUFhulxUZpsGFamN2zoDP/gHD+DPzeOYT5DHGxj+Vt59DMFmhnc7QyJpI17A1cCqPLdvPaAMAYA5QV4ngKaluRd1cAKpOjb2th8AmAZywfj8/G3lXXkES0jj2zbQkaTxlQa5tOOrNtEIuyk7U0FmY8BU02+XW8wWzDcoow2sBM5Dt96PwPY4ywhoGOW/db7NYt9mqPj966j8snJb5oO2Lfl/jEuSXe/5nzMER4xsOvgCXicya+vXFxPrH6+LoqEvioYF2Sj7Xu4PMwnVd5WgegBWCLCuQbxBhgJlsMHjY1MynrBWLTJLAz+ZrlfRkDvy1FUR4AjHjMSXRS61JUCGGCahGhSkjKHIZsiVCy/KglA4+YgMQc7/KBiwVhGCykEA6cZ2b69vuCiGBknQasvkdjBrTppIfxS4IlB+scYFq+BkN7wN/v0ALVDBCMh81R0j4Ah7np5fPYwso81QDO8rw1n8cdWpzRNZbdJ9R5jw7n6dJXJn1N2+f5thBZsd94Zv4Sg4BNiDg3b3Buv8H5/ToBf2Vh4V3/GvUy74ohwjiDwhrc//IJrjoxxuefmuDBpyag+i5Yr63nVutYxzrWgU996lN4ylOegv199kFeg37rWMc61sGxvb19V+9CL+7ZoB8w8BzIFvCHVZMfBvjd7obMwYVYSjx3iyRNrOsnYwQ73ETwYhRcTavfPbDAHLJdck8uMoDpmDFRj19emQl4O2yXQT8llp8AlVyPeZGRgAq5jC7CC+Z2mxz87aRSHU3mW+a7yutoHcsXaYRWwCdODrGc1B7CbAfNTZ+CP3cbFrft4NzHrsf+zTu46UO34BM3zfCxvRo77TFl1+7CqAxhLCDfhmOgb2oZ6BtbQjkpUIxdD+ArpwVsafmnKuGqEsXmBMV0jPGpLbitbZjNEyzdaSwDfidPA5uXc1W7cczCBECh7arKc6na7OeivTLza1WkpRTQVglQCpET0wGpulzDS0JaEw1hUNZtDMEGToqI7VSqGLcC+CnYx+wuTaYQCoMkJZcAP2GiJrnelJTm4gSYQRV31PFjcNg6NgwYbzmLcCj1mftQpb91PJJ+aEXSywckJm/WQsbuo57kqVaLHzg9cpjKGhwWOgDKjIypH4k6IWSu8A8C8Dfp3qWWq/1J/bFWjbvKSNDrKohfVJ6wGjKo8+8P2mJmQUxSZD7GJIHWyPterg3101m0fIyF0VHTwFCAd4RJtcnv+Jq9nuaffXnPdazjUoPlGgeAHzp2jDL8mCF0WCq6X2Shv+fg1wVBsCFAo78fytqNB8ZGirF73sTAoN7Q8zSxltizjSU6SynMEj9A6xBDODhY63aBVEgFYxBdiWgsonVos7HxYgl72s9tiJi3AdYAY2cOVV24mCCITHraCI+tLrZdn8Rw6DyOhLbVysFU1mBSWBSGcHLsGPArDOzsZpj9s2ilsCrsMtAXmhb1zj5C0yCoPCYAV41gS1YPSNsqSmbRyQ8z2gTA1aI53V8yUmzDz8cENJMBjO8rZBxSYHfguhGvawXUeEflVa8XY1naU35YQrtAdKWAo6JCIM8TfUY2gfvw/LLBbu0xbzx8ZBYmYLHwAZ84N8dHb9jBRlWgcgzK+sjzXfIC5qksrYYxqZ9uT0VA1QoSeAaZ96eiIfYCpFGAGU/5f8YCZsHnwRXpHK0CgijbhokdpZUVEiDAlKiISEFfevaLVG+Mgb3MyYB8i0gGhjrGrLKLDYjVHIxIakc5Dut4ToFs/ADkHmX2riU9dlrJftP7MMj28qLINJ4JDdoaBv7Ek2H1eis13Af8DoTO48Dj7Krx1cp2Dbo5auUMSssFawr4OdM/NhKZ/AO7FCNCFJBPwT6SY0EGmK7cl37n8XXOEsU5yFh7BvjO7NWY1y32a5+YftYQyqx9awi+6Pz+2MPP4oqtCqc3Rzi9McLmyILiGiBbxzrWsY5LjRACF1ddQpw9exbf8A3fgDe84Q0gIlx99dXHvHfrWMc61rGO44h7NugXBgurFRIpFNkMPiWAh4ngVUnh/LP5Z4afzT0CjUuAX175zvsgCyZZvORLPE6oC4gAdHJVA5Ajee2REYDCgXyNOGTEhMC7vqovlIWkSR3Tyf3k5LZhjxxaoa775kb8Z7NgX7+LAVEvIqJxoHqfQYF22R3fYqeTnSonCNNT2fFnLKFzN6E9cyP2/s+HMLvxDG754Kdw9u/P4aabZvjLswssDtP2uYuiIJYd1R9LDNRpZXQt19ZcSv/1/8rcKw1hq7RwlYPNGHzltIQtDVzlMNqqYKsSxbQSgG+EYlpxpb1IdhprYKsR4AqYyVaqZgcAGIvi8x6EMNpEmJxEGE15X2ZnOOmWM/nUe2kV4JfAbdNJp9n+cJTuA72WfUi+liTAnzXE0j9ZZbklQiN3XxDQpvWxk/yUMIH71ltCEZl5BiBVqjvDQF8pCfBKElUWAVTPGahqF31m2gpWGkXOAyGAZaCoz0ijIP/P39cIHdinYHaP8adMEzKI2n1aJUxGmH0RtUhWasIxSXzl/a2vAp5aZeihA/GALtmvSUxNaGpbXoBYQBlDDJbyNYuU+eb7uu4q/BWo9zXfx/WSWRa5vJoem0rJWpvYNLAlYrQdO+iwcTsdcAdMa9KzG7/Fm0Z+esm+EBGIsGxD8tWpWoNJyX1RegJGI4ynnBC1McDeMRvS4wlJAt+p7a/jXhlc2KBFSt37OldgX9V4QCJPQS1NvhsBKyj7/hD4WxXJq3TVPZwXRkkhEGWeYr3PAQDZbo4YPGJbd+ytskqsJZVLNL5BtK1oCZZp/kXUdmwdvfYHCXsKHsF1PmqwJRZNyNjO/T69UD/EGLFsI5atx24d8HmbBdxRUD+N0CZmFUILs3+Wj9HYVLyTlBT0mUhAJdtuAmFz5DApIiaFQWkJG6XBZmFg9s+Cbv442jM3YP9vPoLluT3UuzNEHxBl3qKMPleNYAoHV5WwVcnS4WXRB9NG3StcweO9Fn9lx5NeWwJGSHNbxE6aNfWrMrqy5zUFAQfVG9KxsgaVVbYdj2AsoD6QxoKqCc+PqimCSHp6U2LZhlQwEsWDWL3ofIjYq1vcsLvEfhMwq1ucmpTYLC0QCLvLgPf83a343x87g5MnxihEwtQXE37GFeLx7USC09iM+Shjvs63jM0KBrtzGRNgReIPJyoAhuAKPuZYjnl+shFglOk3Z2N7Ksp0jqJcO/m1YogQwV6BnSQ4daCfgFGkSgntAtQshemn/pwlYpCCPlmvIJsj6hpLpVODl00ZIBqZVxr2QSQMis3kXFu1ThjcVlrYoAWcOmfKxzxWLogoLHVWCMbJOm4gOZmz/PLr7pCCJv1RwFG9U4FOlr4wRuFoFJb/Li3fjzpvVXBV74AcvNQ5IPL/5fuga0TtINsV80UyB4pDdexXkHtWe9RtSMCeDxF1271Xi7xnLcWW49JKsV33k9o2hM3KYaMq8IBTE5yelrjvVoUrJg5NexdMrtZzq3WsYx338JjP53jJS16C7/u+77tkht4jHvEIvPe978Vzn/tcfMmXfMklg4frWMc61rGOOzfu2aBfLsMEXvCtlPCMoZ9w1/eHVePCoOPv+j5IMWxTv3KggpZA4OpTBRh0sZuz/wCpgDQkJvEk7JbOkyZSB5roIpS/B1hSL4kO4EtsvwCAVrBcfLc4UoacJrjzRH6vaFMS3AmUzI/dWP5wJnl1LOHrTs5z6NGTeYlQuwQ1c9idG7rP2BLwNcK5W1F/7K+x83efxqff81Gc/ftz+OCnd/B3s+bg9i4xCsIB2cypNeLzRolhBSC9B6D3vh6ZyT5jBfyzItUDAMYOrjNLIEPC0DMZsFfAjR1sYVBulDCFQ7k5gR13AJ+tShSTCqaqxItmypJNZdVViztJvq0wlI+2QKgY9DPz8wx8LWcCftWcKILcGwJm9Xc+u1aMOfSeZW8Z6R1lIgwmlNxvfH36GFFEll/0McIKE8sSoc2lLEkZWizvWYlcY+UMNkqHwhLGzqAQ0G/khOlncODapHwMMkjsvZWh9yYCM9Ji5yXH/TWQpxskt7tE0QqwWiWrSBJjeRFCyJL0kdkHEcyA9IO2cp9Djw5MBWKPsRPFU0elnHyM6RT5yIkwq16DJsJyNimxhwoDBvzaJfdpPecEdDNHWM6BtkGYzyRJ3/STaMr6kCQwVVO+jssRyEqi0AaWOM1ZwAP2Cleuux7YbCUZGSKwMbIs5epMHyym7rrSaELA7jICcGiDMAIdYWN6CogBYbH2nVnHPSeSVymQVAWAjHkSVd4TvXlOpI4tzXMKSiCfhiavh8ya3p8kDGmy/edFDKA0kQpdYYh8p9eEyDqSzvNCixg8gzUK5uQ+ZQIIhqUBiccbcua6gkgQUCIGlrNOTLOQwA8FW1YphQ+B0lXefgwYISW+j8ODS4+Hcv/BtFPcr9FyX1PTyaNHV4KMS2DCyUq8kcGSoeRrmP0zsLedRdy9DfXffxjtbbdi/8YzaBc1/KJmUE+KjNyYfzeF64A+Ba/EL86MxqBqwsDSeApyJfv75UVD2Wvyc0znOkAvlJVzeJlXK1icfyYS8bNkCCYEz4r81na+gwL4YTRBLCpEN0qS47UUGvHzVp+9neR4yO6dr7rfCVw1dXC3/F9ccfKhePBVm7j6xBj3OzmG3buV51tugjoCpRMJVlvCjCLLbwYpMMoKflYVEEHm8bpOUbZfjIzr+BBhrQNcyf6BWrhkHMxojFBWCfQ2o7H0g0vzMp0POUOgDOzvAD+R9gTL91M9T89/8jVQi+wuAKpk7DG2K/iTw9P1kAJXAV2BAUKEFwAuWp5rxbx4TNdZxnRrp2yM0X7LCxxq359HQfpQt1kYgOuN1C5B5ob5GjMD/Hr34KAoTkHU6Mo0h8unaYWsBXT+YQxQOQtDSPPWkeN7Nc215LsqM9wEHGAu6jGrJGyMAXBVB6orEChFDbFdzUAMka//ee2xX/sk4blsA+YCANYDVZUh2Kc/Tl5LZ3DZxggnJgWu3hjh6s0RLhtbTNoZ9nZuWrEX61jHOtaxjsPij//4j/G93/u9+KIv+qIjSXL+/u//Pj760Y/i7W9/O77gC77gGPfwro9z585he3u7V/S8jnWsYx331Lhng355DMG8/DWP2CXWD+V5ZSwRgiafVrAED7RrUhVtIBzwyVr5tbzU+3ZkXnKprAh0JvIki7K4orp0FRgnizeVnYo5CCVyObrfAYAR2ZeeRAzA4MJQKuuooQvy0P1QDF3yPoFF8tnFHvyZGxgYAGBPnkasF2g/83Hs/N2ncdtHP4nPfOBGfGK/ORbAT/3xSkPYLjqgb8OxP14xLWAsgSwh+ojgY/o7B+6CD4g+wtcDpoCl3ufJ8O8AA3/p/8YkwI+sQTktQJbgNJlWOrhpBVs4uOkYxbTiivoJJ9LMeMqJtiHolyffeEcR2yYlYli6jJNbSx8xbhcwi12u0FZASuW3YsBB3ii6dvL7dAjSZ9e9fmQYw+SVJYInwIicpBFQD0EBqz5YY4TNooDfyBmpmKYE+JUqkUTo2GiZzGbaOWHyrVB/644pf/UMSlHsnPWUxdhLcA+8X7qD1wQV9Sqzo7GIkvDjJJVUxEtizEt1vBf2WggDySoCjOH7vbAGiIAXwK8bI5CYfY0PaHwnh5lHIKDIMGNmADCYQDEALXtAMtuP2X1hOWdGwXKBuJhxIl28t6IXdo61XMnvSn71HlSUfKWVg0Sh9pv6S+b9q+wHKaowso+FYcBi7CwKYzBy4UA/cT/w343ngdnHiEXrueLe8LVvTcSk2kSs5odcGHdekDEMLt+J7a/j3hkU+wUIBPQKVdKYkikahBhh5Vv5vCaftwBIBVqHgX/siYcDhSEkRRNcHGFWzpfS5/IfSEGU7wA+KkoG/KxN70fvhTXFACGR6Z5juRfrAcApAx6l4CKNx4NJ5qq+GEqg8vjMQFHvHNzBvMMBFqGCDeFgvx04JmUjyTFHAKUpYA2hWO50UqrK0JrdBn/mBoSd27D4zGdQ7+yj3tlHDAHBBzhrYMsC5dZECo9G3VxjMOfQuYgCfqaaikddxybLI51b2xXFrJzbp+ekFo5BCnVC///644bLI4ckAQskRiIKlsKEqxioCeybmwN+WmgT5JmbS41bQzg1thgvbkPc38HoCsJm6XD1xggPODkGNTcD5TgV5rWRYBObj3g/tDEp1EqtD/zrENlzjx9ZlJ5pIUZQJAG5IICZAN+2A32IKEnmMjjl+j7oug0yAKgTQyH1nAMzc1UWXZ//gQupQr3ogPiWFUkOPZ/ASplKPh6w6oMy/QzQM8eMISlHaD8NvaWjKEOoJ6MPHeNP702ZXsoxAmVksPHAnDZ0c7mVoHseOQgY+sdOkKJSIhQWqUCrEDl6na/m89bc09KqagxJUVdWTBrluCK4GIqMIIM65sm5jlIsqmzB4VH4wP3GIF+L3UWLuTD75rWXeWcUH0IG9PJYxfIrncFm5bBZOmxXjv1DCwOzdx5msbO6H+/EWM+t1rGOddwT4+zZs/jhH/5hvOY1rwEAvPrVrz5Se7/0S7+Epz3tafc6wA8APvzhD+PDH/4wnvvc597Vu7KOdaxjHUeOezboJxXVQ3bNoZ5hPlvcG7PaSyGXbtJq4GwRF5VBR8w+ItMtmljKxsBZWrlIHYKNYYgQGIdYZMeSVZzqAnNV3ie6sjOCzyVH9bjFlyaONhAiy0RGD/gVx8/JPd5DC0qJpjZ0HmClIdjAFe6wg8XtKjYkVu/3gcir0AFOOhRjBJGVodAC9TxV7RMZoChhplsIu+cQ9ndw5vd/B/Obz2Lv+jO49aNncMs/7OB/nplfkpTn1BIuK23yx8uZfUXFXniucuKL10lokjWwpRHQjkE5yn431sDXLWII8Is6LfCGn8sjBwzzNl3FEoe2YlDUikSn/s8UrpNicoVUzQvQV5Q90C+6UT+RAyTpRVrOOSk3mqA59QAAwCh62L1bEGc7iFr57UaH+2kOI91T2bHpL9m12fP1g+YguKq/MA5GPEr0pzWEaA0q2/n4aXJGfdkMccV0YQiTwrI3kRUJT2H3qQSVU0++XMJTdqTHJFsVKYmaScMl4I+PgcD/44T1ioSRJpDJiIRl6L+v7JKCGQBLH9E0AW1AYhvkrANl5jU+JnlKZfXk7LUk0xm7hL/eRTm7T5kLjQ/p+0FAWAInqpThp5X+nOjzKdmH0AJNDbQNA361/DQNvwrw12P8uYIZION9lhlr6iS1RmRE70sYOtn1diDBJ4M4iQzpuABKazFyCpZ2fkyaeNUEYBs4uaXSsSEADQLqQEAbmNVQVgjjExe+TtaxjrtTaNHNCvZLrlSQs9Z0jE0AFrhgKE/grnoM6/zAZttK05chA4dUyj2TNNfQ5wkEtAwe0YucZ/pIxybTggKVR4xNjdjUPDesl8DIsMeYA4/zWfspea/H4IQN6EbMkpKiKmUFaTGVNR0jKS+40r5ctFGkIbnd0lICGe5o9ADDer93PlN/aR/KsZFv+n1qnPitNRiHXZjFLuIN/4/H6LZGkPE67JzB8twu6p19NLM5QtPCiE+fLQtMrrqMi462TzHgKgVHyrDstieymaOKx/HJBhcZ5XOEw0ATPSe+7gO/chy9cR/g525An63mHfsx2nKlVKyy21gK1oCqCWIxQSzHCOUY3pSoG59ktNnXT5+z3TksLGGj5DlH4yP2mwhbXYZ4v6/A/75hhvf9/Rk8+YuvwmbpEJfM6mtEejpGoHRlNxfJgWjwfRKNO8Bsz+UkDQleSEgSnwri+wApKptwYQ4ZBs7Uw1iOI62z5HoHIAzLloEvw8Af5H6lRjyPFewLLahZ8ly+XXLBjz7njenPtXRN0D+iznc4WxtFyH0FgjGG2f9axBAG10V6NezbKW34yEAfy6IDy5bP37LtzqWqIowcIbKxHwrDkyknEuMUTLc97cPkyxz7hUhkUtFAdCU8TOeLh24e6zKfPqJOKlUZfoUhuNhyf4e2d51EW4DIoCwn8JE5yzr+ADxmNwEojANKh5gBhlrMsMzmlEk23utP4DWmZ8BvXnuc328Y9Gs8fBsQdX1pDYIz6fmgY+W89um9Uv5vjcG0tNiqHE5NCmyPLKpmF+bsZ9De8HGsYx3rWMelhvcedoWq0b0tlsslfvRHfxSvfe1rAQBf+qVfisc85jGX3N6HPvQhvPOd78Sf/dmfHdcu3q3iiiuuwA/8wA/gS7/0S/HoRz/6rt6ddRwxdO6xZm6u43M17tmgnyFJwmRJcCAldAD0Kr0BWahKxfiByFk4QJ+Jo0kR4w5uL/+uLK564MUABNHqUjWRTx8dtjmIA8NU9tkIMAA3/J8kqXKPitzTQSNfNKdXQs8LovdZBRlzgOKQytWLHl5zMEMk9/LkRYwBMBam5upyU88QZjvw588gnD+DsHcO+zecwf4t57D7mT3sn5ljr129TxeKK0cW24XFhjPJK29UWpjCiEdeASusPmbbsY+esSy3mbzxRL7KjktJohQwhRyLD4gD9lIO5g1ZgfqdPKx446hHjgKFCjb2/HFc0Qf/RuxlRK4AnEtgXwKRBIhMCc5SwGNXoYXp2FrGcRvlCNGKJJTGEFjJK5iH184wskRWYm9coPqVqP9jDSfWRoOCAK0wBoDKMrOvcoZlhKRq2lzgio3EslW9ivAhgyx9OGM+6t+SaNT/y04l2a2Y30PZPTxsO21T5SmNQwsD72OSF9PElXqsKNsgl+IEOsDv0GPOftfEzCqWoLIrAbDfjOUktzUi6yXEhB74me2IMhTIcmI+GguyHRMSwSPUHVvXyL7FRvyX2oaTscIciHkyNAYAVvpucD2iAyoMsf9NMFy1z8l4ATJj5+nDPpGAj5xwayWxq5X37IeorKdLSdkfQ1DG2L2z2l/HvTJWFkTp/wiAMIVy2VsAfeUCrF5cxcF3lFPISew+yHhgfpGDP4cpI2iC+yIWdmQsS//1dmgACGRtRylk6B2PFF/0ijPI9FgwnadfBk5IAUEOnibZVHTj7LGEgKCg2MkaA6yAeQCUUKDMdIx29VvdP4/2zI09RnZoWtQ7+yLlueSvCtjHkp4lis0N9r+bbiFJiCd/1pwObnp+cTAZc1KOIxrXAa4kbMyhX1sO7JDhZ+SF5huAAFb9gr8ElMnvUZh1MF68IIVdb1nysPV9lt9Q+jodJrHCAKsNRMyagABllQVcdWKMy8YFxgUh+hGiLfrPkKzYp+dZuGq+pX2Crp+HDFKd80SoIoCwwrRexpvOG1D7dnivynYoBi7K0vb1+lGFBGX6BZ8Y/rHRop6D91zvNf8Xuuc2SZ8OCyOT9x6ZrphgOP8abE/XRRGZf2lU8K8r8DEiv++ChSf05llu1XWWXZerJNp1vMp9hpUdmnZPlBIMeGy1BonZZ4mL1iiGnmen3itR1qXRWMDXfH6NAcJBz2YfeZyzUtSm45OyHtv02l2zybNSJFB7PzEya1FeyRBCiKDQyaX67Pc8Rk7naMJiFHCT9ucIe+cQ9s4f7Os7O9Zzq3Ws414RMUa86lWvwvOf//y7elfu9BiNRvjyL/9yvPrVr8Z0OsWLXvSiIwEgv/zLv4x/8k/+Cb7qq77qyPv2yU9+EkVR4D73uc+R2zquuOKKK9A0Db7t274NH/jAB3D69Om7epfu9hFjxG/91m/hiU98Ik6dOnVX704viAgvfvGL8bVf+7X4//6//++u3p1DI8Z4LMBkCAF/+qd/iq/5mq+55P14wxvegLZt8eAHPxgPechDcPr06bsNaNq2Lf7hH/4BH/vYx7BcLvGN3/iNd/Uu3e3jng36ZeAQ0F98EmkiA73FXVpsRd9LCnXv59W9WsLOgINWs7KspnxG8wGDBHOPnTSoMM4XzCnJhQ6YS/s0ONyVvi6rFsVZm6s8+4aJOt1WSkzJe1a8rdTDB+AqU6Df13FwLEeNWFRp0etl5WsJIEkA0XIG7N6K5pbrEWa7aG+9EbMbb8Py3C52PnUr5mcX2L1hD7P9Bk0ErhhZnG8CdlYAgJeXFpeVFlvO4Mqxg6scyo0CtrCdfKZhWU0F+BT0U3YdgB5bzyjgV5WwZYHq1BZM4ZiVN6owlM9Mslbqo1cU3Q4KMJgkDltmQ6VQbxljEfOK+RC69nKQr6xAo6ovmZUlKg8AWK7swGjjEIsRL8zFMyUUY74fijH7gLgqVdqnaz4BiQer7YELgMJ5ok+Bv9sJIwwzGKC0hkEYSc74iExWiJMJCZwS7xMFpw7sRw7yme69XjJxWAQgiVSEAPImHQdM6JgIcq5635Nt9qU7D7LTUiJGwKjahyQtpmCfylEpYLXq3ud+k8T74NjzJEzOVmh8x2ABxAPMUurvkQCpWoFuBdS0ROjJEBvDCJsmocsONKbg+T1XMBMHTUrQKxBO3vO1BSC6jEGi8nwh8Pi96nzmb4F9hgDAJdOwAAwLHMSnqw0duFpnvokaWn1vDd1tJkjrWMdFR4j9IpxB8FyhP2FhRQKWCbTZPGKocAB0zGH+B8szG0005wylYUHR8BkgxQNRZAfTd7T4yloAZZ9NdhERg4eJsVeE0Xs2yvidCmVs0SvA4M9IYULmZ9aBVVrI1OuGxOhhET9mFuk4epRIgAsAuBFiOWH2n1a++rpjXQ0ZbjVLLfvzZxD3d1Hf8A9oZgv5mbNiwbxOzD5XjWBKh2JaoZhUsBubsCdPA65gWXFt2nvknopa7MH+rJ1sJvetXAdaLJJfF9l8W+cJUZlUKoOZA4f6vTzyNvXzCh6HzMMxv6aceMY63sc6AAvPz+HaR8xbv5JNX8ixbJYOTWB57Jtmdfr/yFl89QMvw+dtjbBRGARzEnG0Ab8MXeGMsMHSMzI7/g58pv6xZsU2hgwDR+iYpxpetK7bSCBiqfjE9PNF6tshsJq24Vu+5wefIy9Mv7YDo6hdMsO2XnTzV5nPQhUMdB4k/WYgfTm4JxTwMzmAhQz4M+JTR2IhsOo6QFcY1QiLrfZ8Hy7bgN26TXOewhiEqMVihouALOAJvC6l7ppVVYPE8FOpYZGQTfPvDOT2SUq0O75ujcaqBFZl6GVOS/U+g33tQs6XgK2QdatrOwaocXBFBTImFYQG8JI5hAgDoJW/tVDUh049YtFy/8xbj4V49un1rABeq0znwKobUV6NnMgYogDjlEnOC6vR8HulM/AhoDCUZPjHlmDm59GevRntmZsPnMN1rGMd9944e/Ys/vRP/xSf//mfj3/0j/7Rkdp697vfjTe84Q13a9DvF37hF/BDP/RDR15L/sEf/AGe97zn4Rd+4RdgrcW3f/u3X3JbN954I6699lq87nWvO5Y17ktf+lI861nPOjLod+utt+Lyyy8/8v4AwIkTJ2CtxfXXX4/v+77vw1ve8ha4A7Lv68iDiHDq1Cnc9773xXOe8xy87GUvw+bm5l29Wyme+tSn4pGPfCSe9KQn4bWvfe2RgNwzZ84cK7B5/vx5/Nt/+2/xn/7Tf8JoNDpye8YY/MiP/Ah+9Vd/FV/5lV95h79PRPjGb/xGPOUpT8Gf//mfAwC+5mu+Bm9/+9sxnU5v59urI8aId7zjHTh79izatsXXf/3X48orr7zo73/kIx/BT/7kT+LDH/4wPv7xj6NpOB/+0pe+FF/5lV95pPOxu7uLF77whfj0pz+NBzzgAXjBC16Ahz/84ZfcXh4K5L7vfe/DjTfeiKc97WnH0u4djXv06JXYSSuSUir/l5g1uRyUglS5vIrGAXYIQAFJHkYBEPbf6lee976rcUiyXiMvbBym44cA4Co/mLRdrAZPdG0cQLDgCnI7OGY1pNd3c38ZliwVdlPvuI7v0tGFuRUZHg+TDs4JM4iW+1xpXu/D1DP4upMAbBc1M+d8gBs7jPwIm1dHFGOHrb0Gl+8sMfcM+mnlriVeKG85g2lhUE5LVCdHGWuvA/rIEIqxk99N8spTBh9/VoA7Yd65qkQxrWAKBzPdYrCtEClNYd11fakV71myI3hOgrQNJwjUVy8ExHrB/eb7iUwCeslNKkpuU/1xihJUTVOlPTciFf76e1S2X5aEKEwCYmEcRuQ5kRED4mgDsZywrJlKOrUW0HtLgPl0n2ZgVTr5B2JYuc7XZE4i0a91oHbXB4UhmAhEyywzZyzaYA7IMjlDSR7JEnoydACS5JWPBCP9kO+X+t9ESYboXnGC18CQ4Qli6BJMpIyBPGFmsnFE+ikay/1GhiU90YF7DOCF5P2kFda150rqJntv0frEyNNklV/Z5wrmybFT/+Tk3j8HzpYAfcZwIswaSUhJZbbKeposaR6tAwKD29E4kLGcnHIVqBLpr/09xHqBUO4gLmaIywXg2OfPKliu17eyQ4oySxauAGHzMZpM/9zkjAQFbGMG0Eqbxjg4W2BUVIBzaGFThbt2j15P6gW5XM5WdfqdG+ZOrka/M9tex10bJis4yN5SSEp9PmGIE7lD8CqTUdbvDkkcuR8gpftGJPm0Lf3/il2MxnXPrRAQ0Xn9JVZLYkRnRTbAAUbR8Fna39Hss2QAZLLBFwCSdN5FyPuiP1dTQEj7bOwMNoruuHPp1OJSgD8BWxg4kz7QQjmVwqx5bmWauUgsNr0CIpVbVjWF+ZkdtLM5mhnPvdLxljxvYs++EuXWFDTZFMnlSTceDeQ8gWyu4srE9usVB2nBjSZcUlGNsqe4oyLRwe9l/sGkxz0c6ymydH/u1ZtFDoRGIqAoE9gbXYVYVKnoRs8n0J3fxgc0UPnsri31Zttvuj6ZFBbblRMJR0IsJ2hjx5g3ALPgpZiKouuYdYcV3g2YfkA37/cCuvP+CggYtJiHrQpsAqOkoCuTycxZawzyDWQ5FUhWaVCfefgt53zvBp/uM/V5RFH277EQ+NI1JNLB3XxE5xZpikHDuRxgZN2WlBeG51WO0StoJfOtJkTsi2TrUgoHfYyyejZoTMTIrRaP6fq++/3AWHPIOePzw2NHoG5M0OvAmY7lR74G+RbUzBlcFSZlDuJ3ihlyrQiwbYWxypYX3TymGYxNymBVf8MmROzXHgvPgN9C/J2DMP1sVvhUWIPo5OlhOPlhDIFkTAsh8pw2RHgt6JJzOK99r/jMEsQLcoEw20Wzu39Yz995sZ5breNuFq9//evx+Mc/HldfffWxtPfOd74TX/EVX3EsCfu3vvWt+G//7b/h2muvPYY9A/78z/8c/+W//Be88Y1vPHJbzjk84QlPOIa94njb296Gxz/+8ZhMJkduy3uPZz3rWXjrW9+K0WiEF7zgBUdq72/+5m/w3Oc+Fy984QsBHHxO3pE4e/Ysvvmbvxnf+q3feqR9ApiR9MAHPhAbGxtHbuv5z38+HvjAB+IZz3gG/vE//sdHassYg2c84xmoqgqvfOUrjywBG2PEr/7qr+KZz3zmHQI6DovXvva1WC6X+Jf/8l+iLG/HauYi4s1vfjP+2T/7Z7jmmmuO1M4Tn/hEfNu3fRtGo9GxAX433XQT/vZv/xZf/dVffaR2vuzLvgzf8R3fgauuuupIgN+rX/1q/K//9b/w6Ec/Gi9/+cvx1re+Ffe///0vub0//dM/xbOf/Wx8+tOfxmWXXYZf+qVfuuS28nja056Gs2fPXvL3L7vsMrzzne/Ed3zHd+Btb3sb7nOf+1wy4AfwmPOIRzwCz3ve8/Cud70Lf/d3f3eHvv/whz8cv/7rv46Xv/zleOUrX5lAvx//8R/Hhz70oSM9YzY3N/GKV7wC3/M934Nrr70W/+7f/btLbmsYv/Vbv4X/8B/+A77kS74Eb37zm/FN3/RN+LVf+zXc7373O7ZtXEzco0G/vIK3V+k9TO4Cq/+fRfIwi3E1eAcw0EGdVB/IyucOZ7gdxs5ZzbfJNpUle1a9P4wL5YN6RbEDeaokj5Mv0iMAMsljJ48E0F0MU+t2Iu+L7nhNUlc1Cvj5mhezzQJmuYswZwAgNgyExcBJCLIGtnAoNwBjCW7s0MxqlBsFtmqPy+uA4EOSlzHivVdMCxRjh2JaMoOv6KQy1ZvPij+esRnoV43SNnM/PleVMJNJYtNRNeFklla6u4K9YYasukECkWIA6mUH/unrkquiKUuU9Fl+DIho8kS3qUw/AB1IKUmaJHub2GmWL+vBPQZwNXHaTTfi/xVVdwyaWMmvezIDP6j+tXDYdZ5LwVGWRNZI1ckxJn8aoggrldAU+fokgviuaFtIiVhL3LZKg/b2Ayq3Bk4oQyrBPW9X5Y1yMIzlHcXLyhAnzBxYchIAIvPKKD9niUVoO1BMGJhNgIB5XSJKkzFJSqlXiS3V1vKdoBXUgwRZD/xXXxUF9+ggQBiyk6ZsyZxJWVgjTB1mBBvp557yWoyIRpJvyu6T44yRPZmiL7gynky6XqOxzOQD0n2QQD8B/JTlR67orsPDJGGHhR1eZMeC75KSwkgYMkkSc1W8jEpbJpBWP8P3rnzXN6DmLgD91rGOYwoteOrmEsTDmIjfrgL1NMxgmMk97HrfEYaRHXodo4Mreiw0Ad4otPys0uKuwfyNrF0N6mlhzZAFeAEZad1uel3BGk4fk+PNC1LSJpSEhS6pDnASf+KOXjWdQsAtHTvz/acYeW7VLiWBvsNzKykqSk3UC6BtEPbOod6ZoZ3N2btvf4FQt2nuo/LjKunZAX7TrhhjRTDDTwueGPhLDD0gnY9e4VBiybeJ2Zcin6+YQfGH9Il6Pqb5TgYIXYyiQAJ9heUHW8L70BUhAVJsIyynXtFMtz/6DF60Pt0Lk8Ji7KzcUxGeXPL1JpmrpOvOOqh37fD8XmysBNQh8wvxs4Xp5B6ZLdemIhm1O0isPqCbS8ozXdlu+jf5hiU9dV6bS8rKXHXVM5zkh8FIAcSy/V515AcBdPFp1zlq+rJL8qbp+OV8MuDHwB8ghYNeP217IC9/ZNCrGUC6cpjU4xwUWeg5N3rg6NQoVE2AVC5V/LfhuX+H3pd6fSfN1kwlBzbAijS/gnshIilF+BjRZpLxIQL7tRdmX+jNM/1wLmyI2XoKzAcF/6j3t/r9+bZbo1lnULchgX4qn06+5bVRvUC7zHwH17GOz9F49rOffazt/eVf/iUe+9jHHktbT37yk49VTu/JT34ynvCEJ8Dc3lztIuKrvuqrjkWeUuOd73wnvumbvulY2vr4xz+O3//930fTNHjhC1+Ihz3sYXjc4x53ye294AUvODbpwIc97GF405vedOR2AAbXfvRHf/TI7dR1jf/+3/879vb28L73vQ8/+7M/e+Rz+7rXvQ5Frr51hPDe49nPfja2t7ePpb1XvepVuO6663DdddfhNa95zZHO63K5xGQywWWXXXYs+/byl78cJ06cOJa2AAbnr7vuuiODfgDwa7/2azh58uQlfTfGiBe/+MV4yUteAgB4zWteg+c85zmXzABdLpf4yZ/8Sfz8z/98yneeOXPmktpaFccBXI3HY7zlLW/Bj/3Yj+FlL3vZkdu7733vi9/5nd/BX/zFX1xSkcqVV16Jl770pfjRH/1R/Pqv/zpe8YpX4E/+5E+OBEZqVFWFN77xjXjve9+L+973vkduT+ORj3wknvnMZ+L1r389AOB3fud38K53vQsveclL8AM/8AOfNQbxPRz0y2QDkQFpGXMpAiz1KYtTrQQH0DGRgC6xkCeMQrdgSjEEDHNGSQ9kNH0gTT9OppcEy/0jen9nrJwc7Fg1pCeW3iBBTbK9A7EC1KS8ajr305PPK8hXh9v3ALvYCBG4beFTZemkMMnn0BJXsCK0XLUqySnUnJyK+7ucjAoBxYQXq+XmBKMTmwhNi3axhJ/X8E0Lv6gRmhbNvEX0Ab5hViCDdJRkPHPwTv34TOkSAKi+fLYqGVzcmrB/XwbssYxm0WMd5Z56er2GPIF1IZnZciP9bcQPBct5BwCK1GdsuoVvksnKt5tf68BB77icuTBIaiaAQ5OFbd2/fkz3+QiAhJkGdNe1Jk/0XGsSLFVcD66NYWKkB1wPYphAtsS4GhFLekYDhEhJ4jYPBqa6+4sl1jThxY36HuON/9eEmL1yu9pWYcBV2Lp/RLDkYJxjVTcdf9LBdcUArYCYPkR4z+ep9hEBXdJF/WUaH7BoA4JIUbGvCnpJmDyprsdYWJNkUNWDz14gz5kDfwr0IbCcZw74KbtPWW45sMrtdPtDJPeXjl15ktDLOCZV6zTegmkXwHKOsL/Lyfr8eh+NAWNgqilQjpgZ6SoGCpWhkPkR9aS9lKkgcljUSuIstEBTJzndkEvq6nZdwfvuCuSSvQqoq3xdDB5mZ/fwDr6TggwXIdyZ7a/j3hnRln02vygdWAAgAw+pXQAhGB6vlAnI7CZWFqDYzRcUGNTxNgyfBUTp+TCUGdd96L2mQglO4BOJrCOy55fOhRT4GzLN1Ff3MPnPrBAmqTto5IVPMfL8UgEomeeF2O1/zL6jLHF9JmpBDPfF8UywaLnHzxZXpjGVJSFboN4HLXZg6hni7m0IIt8ZZjv9ggpXpjGw3pmh3V8kwK+ZLRDl4eCqouc5TNbAjMagsmLgT4s3god6blN2LkjmKxiNpahCJFOtSGiS4b+zoiJusJCO6zzkkrSlypYb16lk5HN7nS9nbO/Oa3bAANdzp9ecyI5GVyG6sveM7M3vQ+wkD+U6b1JBTae6oc9NLaaJ4LkFAShMB3gYbd+YTDI9U2rQ/culPfPiK1EPUIUCEjQ/B+1D+lvQS/mnJpQMGZ7jKVjqFVzK1FSUYaaMSjlHOeCXM0pVih6Gr5vkk5hYfnKM7QKWTOdHh25+sqroIMRuLaXB4485sDbyUlylkp61zKMWrccyk7DUaIil4vW8GnQgVz6fuegIAUQtYmhBvoZzVVKtyK8pvVZKK4Bfs2DgXiV6Yyel2mdiyvmhGuQFGLdLxHKMaEuev7oKDfFtoP3gQydh3grb0ceYsfs6ac8k924IpbPwIWKjcqjbAGsIvuikP1M/SiGmAn5t49Nlq6Bf3QaRqReGeQiIbYOwWMAvDs7N7uxYz63WcW+PF77whceWiByNRsciV5fHcQExxx0/8zM/c2yWDtdffz2+/du/HW9729tw44034hnPeAauu+46POhBD7rkNu/NdhPvec97sLe3BwC46qqr8MhHPvLIbR7ndeacu2SwaVU0TYPNzU381E/91JHP62g0wlOe8pRj2jMcG3iocerUKXz/93//sbR1qeegaRr863/9r/Ha1742vfeiF70IP/uzP3vJ/f/BD34QD3/4w/Fbv/VbqKoqjZVN09ytxjhrLX7u537u2MYPIsKjH/3oI7Vx4sQJ/MRP/AR+8Ad/EIvF4tiuOSLCP/2n//RY2tL4wi/8Qjz3uc/Fu971Lmxvb6MsS4xGoyR1+hM/8RMYj8fHus1VcY8G/Tq/jmyCHIIAFQAFdECEMUBAB/zlkfn19RJF6u20AiDJ40KVwckPKgf+tB10i7mc2XdYxTzQVdunXc+T5rovId/WCnAmT3ykhrNjFR9C6H5qIiHbBx+zbV9iRPAiUuViiNi3IUSIvxrgNFmv+xo8J/CEwRaF2WPKAqFuYAXgMzOHUI3g6wZe5D/dglmBoW6h3mBGgT5jkh+fSnR2LD6b3stZf3Yy6QC+sch2VlNO+iuzzhUMRCjQZwWAs1qNPkhQrkoUaCLKuJR8pNACLTOTYvC8nXSeB8kTBcVThf/B5OkBL5hVFfIaQ5A7cOU3homUVUzR2Ele5QnftOsrHigpfyWskl572kXS3jAM0g2CXKZ2mAjSfQNphXcERSQ/Jv3/sAI6eZZEAAYwAgD6SKzaicjJcUPpntF0tiakAjgJEsFeeRF92c58W7XnhMt+01VY62vrO0afz/bLCAinrDwvB3p74P1qGVDpV2nPGHQMv8SW7HxnNEKMQJC+FIDAUweUAiyHCmI2AQGc8G1rkC1BdQFyFUw5AsXIsmAS5EoeE2zB17otEZXtl8uDZdd9SoSpz1CzYHahFBfEpkZYzBhYb0RWdxUrCLiwFJOwGPxsfvhn1rGOu1uQ6RdkDJ5Jlgw8sXSlMqxzuU6Ax8uIiBD7DG1DSGBD+iwkoQvqjb1W50P6fZ3f5fO44fwsZsynDKhLYDwAoDn8fs5D2Tn6XKY7Ju+j87m8+AXoz/F8QPLyawNh6dnH70jFVcIw1oKd7j1hNDdz/mmX8FJEFWY7iIt9xKZO3sMR6Jh+TYtWCqlC3cLXXfGUqi2wkkI+57S93/WQIiDzTJuUCaD+wkPAT+dMmSdw7pmt0vvp0PMCp8E8+7C+StKSyv7L59OCQORztShzrAsxPZX1pOwofS/kRTDC4lLwrzCdDHZij11g13X9E/nA++/nrxdYoxiZE6wC/kj2IxDPbfg+V9n/rGAyAJHCQRuAkD9zuS+j7zNsU8FMzvIcqEukc+IVfAez9aibZQzXTsMir1RMEHn0GEaStfSi3iD9kBibse/bq5HPe4wWK/SuoQtMpNLOZUVPwSP6FjAtq0QMirfSHCv/XsbqGzL80iYytjNZPpcEAI2s7/wIMC1MJgPbyZzqWi10xWUhHCgsS32SMfwmZdeesvZ0fsrt0wWvcf18r+9jkIKqFXKp61jHOo4cVVXd/ofWcSCOi8UFAI95zGPwmMc8Br/+67+O6667Dr/927+NH/uxH8Nv/MZvYGtr69i2c2+Jd7zjHQCY3fSSl7zkWJigd+do2xa/8iu/ciRZyXtSHIdk7qXG7u4uvvu7vxsf+9jH8K3f+q148IMfjAc96EF48IMfjKZpLlle9VGPehQe9ahHHfPe3jlxdy0YmEwmd+m1cbFx5ZVX4t3vfvddug/3aNAPNvO9Su+ZTnbGdAmrCAHxYpaUUN8yMoCVG3YVGyp/HUo7Ab2F7YEgw8Cb6QN/CYCRdmzG9lEvGI1VLL/eezlYdIjXBh9PloBZ8f/+cXPCvO9PgpTEiBEY2SMmpsALutqzZ0YbLKwBCMLyg4ErjXhOZFX1ZQVjLGJZpaR6bGvEegE7YwZgMd1Hu6g5OdW0yfNPF4nBr15mqj+fgoC2dMzwq0rY0jFIOKqSVKYy+8x0i6U0qykntwSE4Er7CtEVXQIrkyoEMhDOt5zAGsokyWuUxEn0o5QcWLnIl2tK2YQxT7xpMjcHoTOmRQIjh6w/34L0XjMGgAH8INHApbnd/WhcStBoKOBXC0AVYz9RYyj2GGJ8FfBFpgnTHITDoG2gS7DyfcThiBAlWRUQk9RnnuRC9u0QeLuUZbHy/c1lj7odYMk7CgAbznT3c/45BShzUE+TK8og1ARLCOgx+LiiWjxmfMDSB/FXCQcqqDXxUhjDbDxG/+AuEqof3tuddx+hsiZ5+GnS0maJr9SbMQcPO+8gI/1KWdvdfvOrMyPYskJZgSU3fctsX2EMDBmTPf+fvIhDIt1XykTIJDypXTCzr14gLGbMRKgXzKRV4A/gv5MsICeeQi1sW016Z4lvZb/M5suL6vN1rOPuEMpEYzBf3tTnjzwXCuNgIon8HyMDPmrRQkwyoBbdOMjvcGJcpZiRPQN0TLSG5fuIqO9EJgADyABeGB5ZURIZ04FAAzAmkhyDtd2YsYrll+5dD3KBmTehTT6BB6QUFZzQ4iSZZ+qzLwf+lGHFgEIGBEn/zNuAhQcuq9zRQb/lHlBOhO3cginvDPiZ+Xlg51a0sx34Mzci7O+gOb+Txq4SYN+64BGX7N3XzBZo9ufwixr1rEa7aBPoBwBu0sI2OeNOwQTfPXGUDY0e5sufdQzyRWMRywk6z7wB6zT/rpFzlTHEh+odur0DQdwfiYk2ZKhJP/bm/FoUJdLO+dxK1Lx7ctbMwudCHQVIROQ78xcGRs4k79vCUmqnu5f6RxBJJCqDF1buQJb0EJCPYuj3e9Y/B/A6XYsIGB+ok9nl9ZSyCg13UTQ4iPoNnrsxdN7UwSfAL1fCSAy/bH1FqDtgS8+bcYjWseQounmG3l9aVNbbIxnT8vOUf47BLZ6H1Z5Zfspm04IqPRfqs6lzq+RfLPKTK+fxq0IB0SCaEqZO44wrDKw1IOqUKpJ6gnyPYsz8iLM5uc6HFRTL5i1RLQBcAarkupd+t8UEJs1v+TpVyfgmDNUl+mCcIZ5vwgGblcOy5XNYCmNvKaBfDv7ZllC3Aa3MLY0hrL5KV0cIF+jbdaxjHeu4h4cxBo9+9KMTM0Z9rNbRj3e+8534jd/4DXz3d3/3Xb0rn5X4hm/4BjznOc+5q3fjcyJGoxH+63/9r3db4Gsd67iYuEeDfpEuwEayjhc3wqAiMl21thlUhueJG00umIxdtmrhllfQHlLRGXur//7upa/3mIWZJNQFxpUDgF++TesS2y/myYvBZ3sL8dSwS8mreACs4VetSI6IyevM0iFJlYuItDiXStIIQuXUrwKwCCwnqZJ/RcmymUBPPi/u7yIYC1ougOBhCgcHIMgrf15Bv35iXgFAk8l4GgH7XFXClMLmU5CvFNBvuskA5GjcAXymY/Cpz1d0VUpeeVMK6NUta7VSuCwrlnYSgCMH9tIplGs1JW/yJE4OZgN94G5VRbqCJsMEl14HGr3kyyEL7HQPmd7veYW0DzH5haj/nAJpgCS5olSTGAbmOPHVXYl8zcth5rKoWbV8B1LHJJWlXw4RMJEv2IDIVlBZsqgDFinJeg7ZuCqLZQlIZfvoEkkxRvjQHQMQk7JSQB84zH34VDJJfYCGcp3LtpNRmtUePkTsy+sQ8CudgTWEkTPwLgKwPQ8/Q0hSYkWWHDWGet59+ftAJ++pgJ8zlJKd+bnQY9VIQ+MA5O2i39c58MuJNAdrCrhq0vkwAqvH5pylkRdBKEjua5YMDj5Jecb9PS4cWC4QFzMeP+sFsxLazHuobbhooGnh61akhLm4IDQNWmEVh6ZNACBZg936LlikZZKjd1r767jXhs5DIvhe0/uJTPYekNh4OXtPE+gwyuLvBgi9/UnGDYXclIkT5XcIa6YnywikuRkzVQ5h+xnXB//yZ+iqZ1ieOM4A/ZXJ+lwOcgWjiiIDhVpIpslzfb7kAMOwGATgY96rPQpL2CjMpc2tjEPYvILPV70P9SulZh+0nAHnboI/ezPCbBf+7M1oFzX8ok6qBzxHYm+/Zn/BQN/uDO1skca41J8yzgUprgq1FFk1Dcg1QE+FoAPkKGSA2WiM4CrEYsTAnxslUAeZXHjeV1H6nbRZkXnNx3+V7hyyA3sAn5y3A+d6FfAn21TZ0DTXucCpsERYBpHczkCjTuWgY88Zc/BsM6DVfRYhwon/3NDKoLfvMfSOj4JP/WDJIOiE65BYxY7sgMDVa5R0TxrHYHkOwsfM0+6Aj6bt1l/DuWoMsn7idqLhsqWYzYNDNs/MC6qGahKGujkl6TWE7l7s5mIdy6/nZayFTwRUzqByBiPH8yBn0DF0Q9b/vfl7v9MoxlSMwOcIPB8hA/Isx0rGwZBhbvOBNabB0Kd9ZQwKlWLbAIbPgfFNB6TK9dQDRqPOtXnbOo80hiXsTZqP8zkrTEQTuJ9Kx8detwHzuoU1lKQ+c+CvdAa1E1ZuiCASTz9rMC4txqWVvuZ5LMUIlYa9S9gc67nVOtaxjrso7k6yf3eXOHfuHH7xF38RX/u1X3tX78pnLV784hevQajPUlwqk28d67g7xT0a9NOFoS5sFQBMLDoDAJogMl314wpWlEqFRpuzRWQ7wyRy9t7KNqVdilz8mpJO+UJWJTTz9wWcvKAU0UX0R9RfA3AgkYGDlbfpuzEAZPt9MAhNAgZo5Wmf7afJwIsNXcMaI4vzoAwrBv4QGk5WxdidJ/VqKbLETggg77laOPCrtRbDpVP0vgf4+VqSQiH0JD6NynqWFTP4plsHgb/xBqItEWzZk3tKCSEBz2JRAbZEHSCVrsDCh3TsDGoA3nKVcKWVzjEAbQ1EAkXTna88yTQ8TzlTVReO6tG4IkEJDMAR/b8upPPrfVUCtFcFf2HAz8cusZIDfimnEsFMOURhzUWRl+IrqmOJ8TUWYkx+Sb17JiWlqAcA6ufSUQwS1RoB6CXDVuXFtJeVeDIMBf4UOMyrzzUhpZXTy7bz/FEvPq2gDiFiKVXWizZg0XrUbcDuokUtv+eAn7L76pY9VTRKa1KluoYCfgr+pfcPqTjomH78PfXt05TTkOWX918nZZqDsvL/wQnQ9pQNmDMIFSBX8DGBr+D9MQD7BSZQVqrffQtAkvi+7aQ8fYOwnDPAt5yzz5B4hYblvJ8kE1afbxjs84savm5R787g5zXaRY12fwFfe/YPDTGBfns5A2Yd67gnhwJ/MYg3FomfX0yMoRB5HCeRblNpQA3KAAdD1BURDSYQPRk7iZxZxD562b6liQ9WPidzhvBhEb3nAgEU3XMvSLHYYc/B9OXQ6x/dBwX8cjBCn4Uk4GaSZAZLfEagM4a9gxEBtLaCs4FZfQoq1HPQYg/t+TPw588g7u9ieW4vzYmsLZNcJ8Dzo2bGoF87Yx8/XwcEH3vAn/6t7fimhW1rxFAdOh9MkuRGVBFMJ+epTDpVmxgeW9dX0gQAawysMR3Qks1revswmLPnjKwLSfUDSNLnCZzSOc+KOQKDQ3xHJCZriN3kAX1Aqv+9/DOy7QiQYfZrJJFSF/CPYieFqf552le9UB9DWWfEKHVLF0ItD+sLICmVpN/5aDuwfTjnPCTI2v78lPrHk4D64bol358M8POhA9cTmE4ZazF9p/ucqiyo4gIXW4UeMG+J4NTL2BieB5Gw/GQeRQIoH7afyV807bhce6Hla14lTIn9EiM6yXOP7NrI5/pyPfTA7VURMmlVYRbGhv0UeR3lu7mVFMWRjOrGcMGckSE2UOwm1AYMIoO9Dm3mQ+lLlivmYOBvXnvAmTR3rdsAK6BfjBExMOjnSovSGZRO+1rn7NJnxiTZ5nWsYx3rWMfnZpw4ceJzCvAD1hK861jHOu5Y3LNBv1zyRxItgFY/Zj59Fv3F4go5kAR0aQVtFrpc4SQOL6aTN0q2wF4NsImnTA786avucwy9BW9a72fvrap0NpR9flDNfoDVl783YPcB6MC0QWV1J+2lzLNO3ivag5fPHQH8QgSWnhfXlgil5YrZkTOoHMEZAvkgwNkIiAViOe4vdtsappmzTM2oAlUT2LJCHE+zDQkoLAttsgLQqZ/M4HPqLQJjGPQztpP0VI8+MojFpJOfchVLExUVYBzayAmeAGF0iSyjylrm6qIqaVl7TibsG8AZlnwtXAFrCC6rBu6Bc6tCzo8yMXU/hlXPgEnAibGcYE1giW7DdNfMAXBQt2VMj1WYb0ETMb28LImEY6AE+PRkNiMQe4APf96ik/y0pg8CHZZS6o67S0ZrPxjiCnsG7piVx0zAPhdEchoHgKfkeZO3C/bWY7w99gC+ThJJQDzf+fIp0Kd+fJx04r/nDYN7+7XHvG7T75os0VDAbyRJEn3PGTrg9QcoY4+SRBX398Xdwdb0q8v1a3wu+j6IgTh5pEn9oYQpAPHN0WuBX1d5Cup4xGBlxza0RhJy8nupnjLWorQO1gag3ud9NibRi5gtHJJvH9qG//aDqvjgxYtIWHySDG8XSyzO7KCZLbDcqbE4O0czb9HMGkmOB9jSYn5hZ6Y7J3SMuzPbX8e9MvQ5kCeYmWHTdnMVAQ8ssT/nUNlPnz2tjJkIsVeVygllGd9BiTKiRTA2ex71Crs0uQ0wmD/csHicJbBwOC8jw8/x4AFrOQmeKzsAKTFO3RvMKsrYY/n+dJ8R/7+ApIXYhD7LHejmSUSA9+h5zPoYcbJyGB1RO732EbAGhS1gFudB9Rw4ewMDfrdcj/rs2QTkkWVVAzJc9ORrLmpYnttFvcNS6fXuPmIG9pE18LXOqaS4JZ9b67gpLGkyluUcAagEOoAk6RhtgViMe4oI/v9n7+/jJUmqMnH8ORGZWXXv7e7peWEcUUHelRXfUNBVfAFkQEReVVAQYUFFh0WBdb/4Cr6BDLrgCy8rL6LI6roui6g7LLKOuAjCsig/Z112RVGQGWbomZ7uvvfWzcyI+P1xzok4mZV1u2f6Dj091Pl8uutWVVZkZGRm5InznOc5IU0+25UxaR5/8I7lMmfeZbYVha4AMCMjYXsjtPmzAWhifPV8nn2VwUitl6zPf8tgJyDXu40pofKE0BcgJPd5n+dtiOrXlFrGIRFSTOiIULsKqACKAiwb5ht6ZkPZY2PwkxldCeB6cULbywDbBPpn+6uJfsr2YzZYYW8WFJZZlylFkK/4wS/sW/7e+N4xcB3JuhnWP5f6mUs1Gs2/JKCw1gbv4/C6UGUFO9ZEKc9rWk/TMv36mLi9UJQfvDiZCvDVnnBo5rFReWzUhFlF2KgcfGz5uur3SrKRZVxCD23iuaxzCzmuZWzGEa6CN0md+kqu4pqdAuBR6AQEjpIQcRo0V6Q+XQzcV8/rWk8++5NAQqoIPnph8UXx11z21ToZKPVtQ0qYefZtvQMWwu7jMedtVe5TfdWN6NESZfDPE+GiQw0uPtTg0iNzHJlXuGBeo3GUr2vX1PDzc8B6WftWa1vb2ta2trWtbW1rO0/s/Ab9VpldFOpHsphMQMnYtqbgF4aLd6AE93PbU+DaGAhZZePtx/iNBQDNF7cyGXe434ngV86oVcBvDHpm2Zk+B8U0sfnsQlJsWiujdi4DKXa8cwarb5bBrlgyYl3dgPoONN/iYEJvFoIia5MD/kAG9fh7qXHRFwk+ZfhZsG9ccyQDfr5hWSpXoacKIZQAhLK3FOwIEUgm8xuQsRQZyIAEr+AT8Xc+AdGRsCrc5MCPM8MV6FKWHQdgh6AfA36p1M4jIBGQHOUAQw7oJKltZIgUkPFUZuP43hn/nbHnFReyZXxxPSlhculvqPR7qo6cDoEGpHRsi7waB7UCGJhi4I+/45o1jAwS0RLwp0YKQmo2NA3bhTnGIMGnINKcFuyLkV97AwKGVIC+NsTM4ttpQ2b1aaBkzPBT8472fQ+A5aomPldmwvg3432MbVzDb9XEYLPrObM+5uNVSVPLRtQxmdp97Sxg6VA7wrxyaLyDd0DvOCM/JYfkGURv6jmKjBXPa857vs+FIczBHMeR+HyALgcoVQpYa4OqnF1oI/rdPgN+eydaRDnPVRfR0TkA/da2tltrsc8Ae779bCKVWuKUCIci07nUlMyPOq8OgD/D8rNAGGF/ifMM/JFDopj9u9Oabmfl2cbJP7njE+2N/aiciDPqrDOJN6mAVDr1csJJyv5OUU5Arj17NgoKMSf4EPtvfQfqFwjbJ5B2TiKcOpmlOmPXwaHOtfliiMzsy3X8FsJqjgN235RRruPHPlXqOlBj5FIhcy1RlufUZDt4+ecqkb7mZ+jUcZfnenk4UGJ2UoIBiUaJUmO5y4HEJ5BLBmTAeHhw+dXWP1YWuxonCBkZRKIMFimM54ly3/UaGEt7KjuUUvFv9JSGlOASodJ70oKTKYJUXnOk6gAAWneSoKA7wQuXbpWYLA2e8UXhgxUVXB479Q8TCcBo1hgkY0fOZ3BYJRrRd5p5ZpI5fU4qy+NuAVcZ/5DYt7BMWiujbnrOzSfKYN4Y8NOapGMGpssSEUlq+DnMK4/KlXp+tQNoj9UDFPDL8vynYwgPuhk5WVQZg0TQGoY2vU3Hnv1vD0o8LvnajT0Df1LDlAAkJ/dhnvvc0r6RIsupx1TAzgTAJRAI3nlOJMzJWQm1k9dE6FxJ5nIxoQu8jyzjKQBgTkjTBC2RpA8x5e8PzytcsNngyLzCZu255INJAoHzqGZr2a21rW1ta1vb2ta2trWtbZWd16BfDhqMFu0W8BuyRbT+xzIoZyV01BQIGQQANGtzJBEE7cu4T5JdOw0h2HZLn7JMzohJV+RzJmP2pb0pRtbYMluvHrD7FJzJQTcF/IJhMyYJtim76yxMAx9eFs6VYRDZoFCqZsNjBErGvRwHbR6Cb+ZDdo4F80SCkzTgp9dB33E2+u52DkJkCc9DF0iQgaWncp0+DT74Gqmeo6eK6651UQC/hEXPoMVOJ4BBLLss0k/I2bRBM6cj18Ug0lovKTPb8umTV21jKmFcwT7NWLYMQ87QThk4UwAwSmwjADk4y/eAg9dMbSqZy3oNhAyslaCura+i7Lqp+0zNAoMMADP7LmUdTrkundZOkUALgCzjKAPtgCEg6ZzINkEy2wFEDrwapbmSAZ44vDLAN8n+Xdh+IA7MBZlsIgrgt+gDusDXwLgu30JkPbsYsSvMPQX02r4w+fYMwDeu36cAnQ2a6N9N5TPrz15vOtYxpQwA6veVM8clVjmanh9peh6K4IDk+NzmcwypUxhSltHiWoZDeVNlQRZgsOxc+1t7BvscETZrPt7aEQ43lTCGExpPaDxhq/Zo6k2uYShB5ggwA0UBP7l++DUg9ZgEBGJgFl9se4SuR7/o0W636LY7tNsddvb6fM9tJKBLqyCR287I+9tU+motq3XHNeo7ZlO4Kkv48RcjcI3cgAWtEnpqCQJ6AZka5FGArhyOdkkSXxjQ91TY3JOJUCbRZJWc3aDmH0o7RA6oKlCskaqag+EZ8Df3jNYA1WSxwDXKsi8E9sk4kczcC+Ib9ZLoo0w/K19aOSChzKshAru9SH9OeIu3NsHKE5h11O0gbZ9AuOkGxFPHsbjxhEh1drmWMQ8Ry3O2J3fQbe9i7/gpdNt7Wa6YD6/0JoUE8gRyBNdUuRYyAPar2gWS90gNyxDRfCurSuQ2FMSp5szcgsMixMyO5HEuzxpNgFsC/QC4jFTHwT/rG6jvTv0igyO5PzS8XnLbKSISZb9P5UcDXAYoi+8itXK9g6eASJSfS/pcs5KTtXc5caWWg4ziq0RJENMa1/oID/lcODg3gurkeuV1S8qf2e8pMGPXE/8+JlpK2kojJ81ekyklGB4sJ6T5Bin2GVBMKTIQJYxgrcXHvrey/MSP7Ftm1jbzkuQHIKtIaK1so0iSfKmPbdl+tlam9SeTJHQlsn7oUHaXVRlirjvtCSxb6YBIwIz4XM3lfDWeMJN/rl+Aul2WDe/2ynWnkqrapxAKy3XsuNvrNvSA6zPjL4FlPlXBQ9e3XhIiEwDUcy4JwBcHS4amyAPQA3CBEx6NT0NST1HrLVLo4X0jxyc+M4CYCMHpuJbrFEBO2FOmZBcjdrqIvZ7347uQ/U/1dcdmk80aqZV42dENXLzV4OLNBhdt1NioHN+3MgfTfBP14c2ltm5rW/tWa1vb2u5IZmM3a1vb2ta2tjuendegHwCT/Wnea/Ytlll7gMlc369ZjAC/DPStyP4d/513phnGq/vPfZqQH5J9ZiBTsnnJ/G6wH8vIU+nRKQaiZfdVTQZtuB8Th6CBsvGxixTNUGZVpJTOABBMiUEODtiXgAYH+4x8oMoG5bFMpz8XtnC9gn1VCSSQCdKlSgL/WmOmquFmGwKINjJO5m9fD4C/PrF8TR+R2VttTNjtCqiTZYKiAHUeElRglM07EnlIBrtC4oBoHzkwoAEICzZNSVueSS5xziCXAAgcA2sehAAgRc2uT5KlLv1TZgUwkA41mBzIZLAr0MfZ5Ay5a0DTS+guotQOSdL/JAG0CAM+ozAkfP6HXKOJQm/uUdl/JJY/ShHwDSpygKMsNcmyk9yPpMCd3AEZ8DSBryhAYEBCCiUQqWCl1uFrAwNVe30UsE8AvgnQL8SE3TZkYG+37fNnfSz1TgAMAiUW4NOM6Y2GJZk2mgpN5bDZeGzUHo4YGNMM9dqXuqFjdsHYlDuhx2llO3VMMmMSCibLtYMhu09BZwX8WO40lsC4zAf6mQKBdnzU9JhtvZe9Pubg6l4fWYKrqQQM5OuycYSteobZ5hzU7oDIgfoZBy0rBvUjwExhK/cHDSo7xEIIHpjzlAPgjVM2AWHWeCS//1y4trXdnoxCl4EB6x8AEuAHlvyB08ULHPHcq36VJ6CXOUXLQ+nnuT4WUEC+/Z73apkBwj7DUs0/MMhEyvbLndsnyJoTvIIAGSn7YmnkXyXiZK3kKk62iUXeOsmxqpQ5J4YY0ErQrBCB44uAXU+4cO4zyHXzXszz5FbtcLhePac4GcNc87hdIG6fQDx1HO3xE2hPbCNonVFv2HlSk69f7BW2326P2AVEuQicoSCSJzj5N2kxIO7tMtNwtgHUTanbp0Cc+KFa168AByyVrTbFWtdx0yQQycHJ5y37iQbsyz6qAWOS+LKWCWiT+cagc5JEsD7os2vM5OR/s4qfgiEROgz9CWWpzyqXfRxNbskkTXmNwmAsPqI+U8v5zgR+csz0M+MwIJqrH923UEaqVx/eFXWUmDR5S75KJamJ2yl+XpY+l3tLwaPC4oxA6FiVw3sgcuJdUgZoDEgVgL4DNTMGqzAr4H6u+ai1sh26qFKcRVFDpdVVUaMcszAvJclLz6bWnc6nFiWRDh7oAlB7DJKY5nLOaodSyw8R6FsG/PoOFNpynfVaN7wAbQkG2Fkxj2kdb2X8kdP5Rhi5cr5ZxbcCVYDKfAIoNf60HzWPfQqBQVbdTmVVgSJhnKLUyMzwMTOQl3uaRy4lSHIDIST2NfdkbqkdwVHIdbj7qPW4eWwsww8ADs0rbDQV7nRohos2alw0rzGvGFzNkqlVA7exBb91aGWv1ra2ta1tbfvb+9//ftx44424/PLLz3VX1ra2ta1tbbeRndegX0woASExBfry6wSlSAGPKbMygSsBv7Hsk82Czx0ZZtYuvbe/ITfISM8AoJHTzOCf/ta2YfehwQ1dYFoWYpbhobKAloWhDeJrbCUB0/U6dIHYt/z7plxG1Lfc13piTPLxyb7ALLRZJSwBlGBJzuxOAjrauogasLHyOVOmAYWqHoJ6wCDIRyrv5eYshVPNEMesPqk5A8kwTspuk6BPK2y6NnAgb7cTwCcMGUqQYB+gdUJ04CWoZcAuIj4hUcYiEcG7hMqw+5RlMb7O7fmcHBrZSAORKRHIJRnvcm94YRzawKmyFuxxNa7cUwr2WVN1JA3u6jE62f6W8KAK6CmBJQW5R9d8ZmGkwgzxAvzpuFrGH1/3HLzQmoIqu8bHNQJLzRjbjPG9vsh4at2+3e7M2H3jz8YSnpbJZ+v3jcE+BcKYBQfMK58lxFQa050mSq+w+xj4U8skAhkDrQekzeo8Yll+IaYlKVOV9eylvueegIK2lqEyHy3DsakY5NwUsLNrPBY9g5tdTMysEPB1s/aICWg8CWjtMKs3+QHo95BiYAAQ4OtobyGSY8r+CyyNJeZWgHjOO/jGo+pifrjWWzWCX5X1cRuaSpXelu2v7Q5qQ6YUYMEAfUYvn38Frk6TS1BYWxKIH9Q5s4DfPjbwT1YZOX7oTKgxkOcacwmM4ucgvPUNhP2CDAph6INN7U/qjOlzUsGgiIRaEnW8PGuCASYcgF6gilNtRFcx6Kfz66k2YLdn6WdsVhn0K2H5oXFCUQf0LTPuFtsIOzvC4luYLjuQjxkETCEi7DLg1wvgp/VJASB5NwD7SOY8bkd8Q++4LmrfFf8KEACoYYalikco889XA0UCTZ7KxyN/2seWJv/o5/k6mvC/LeBHxl9Q5lVCLEDk+Pzb8yuvISf6FFUF7YcyxFiNwKGjhEgJkUikIlORpzY3C8tcQ0ARDM6sPmMd+OaI4r9xUlj5PQPnAralyLeymx4TVe3Q73KCHxhI0mNKYF9IfT7rkyS5hz0jZkUVQvZDlmGrCYeA3GddOQfOcy27vgf02tbjqASYEqabAsMK9qn/YFl7S0s/rRkqyW2rzKuDyhsjRsqAp1fWpteEq3KfkdbFE3YfSa3CNKEUQOMkg1XzmLl2U4yAi3BOZYNLfcVclsF5EArzj3QM5ZpPAGATKvX82fMi+/TOmTWKJB6aXw4AUrGc2BUTCCnXk2bAjxO+AAXDgRC9vBa/zjvC0c0GG43HBfMKh+RVFRuo17mEa67T/NPP9Fv7Vmtb29ruCPaGN7wBz372s/HRj370XHdlbWtb29rWdhvaeQ36JQCtXcdOaQau+u1o22GdmTMD/JLzWKrlApTi6fa3CsjlnciiVhlxBpTLAGCQBXEkTDL83DADmUxgK8mikpLLizr4aiB/qhKo1izgx7IuDoBDXUl2vNZAGcmH0t4puL2T/FtyXFC+miE10wuyE23EXp9wp60GjSdsVA6zigM488qVjG1hcVG3C5VwpL7jv/sFL6y1/pZl9wEM8rm5vHqgKmNNoePfxcCZrlWFVG9yBndVl+zuas7SU80mAlwJ8KRSbyxEDk5pIEIzj9VUsjCkxKnDkMx0V6Q1vUgV1s7BOQZpgmQna7atBreUCaHBIl7Yy98jMM4RZ+b7xMBecAkhUq71FxLQdQmO0gBEzMGMXPvOHI9EmBJY+nEc4FVWn30fYsrApQKCWkdPa8oweDlsx5u+VDpeKNcoXAVCL8BewjiwRYo09i1APeArVK5C5R0qR5mtGDVTOZ9TyHclGzzLlMYErcuo5z+mhEUfEVPKYO9CZCsVyOJgyLBWX5Hr5H5rpvNMgDxrlWH2MavPYaMpEp5bjUftXGa7cXCKWQR8XZXrrVyDw3OX5L+Q8rvl71GOOaSEOLrereW6Rq5Mf7ZunzVlp7jIyAHLu03LmurfVt5Ua8Z4Afu8A3a6gLnInF7QseTn3qzCrIrYqBy26g00G5uofc3SXPUGXN0AXYvoPdLeAqlqEJ0H2gVc18KJrKfzHOT2dYVqHlHNa8SQ4GoH3zj+3hGaQzX8Sqr32tZ2OzZl+plpNZO6xolHq5owSRo6h+tvM2PJbJuf/VZ20VX5+a/sugxWWD8rJzY5EPHnKcWhzKccFyc/eQmUi1kJcDVTOxgQNkyuLSbSg67K/liAy9KU0YBG43ppjoDGCxPMAV3UYPxwrjjRRhxfBOxJUsTJNuCCmQEmp8YcwCwuQIuTcO02ws3HEE8eR3tiB/32LvpFO9g+dj1Cq/5mRLezYPnPLiK0EaENCOq71B7VRiVzH8+BJBeF1vzLtf+cA1U13NYR0HxTJDy5VluqZxnISdUMyTd5vo8id77Xl/pgVqK69gp2CAvLJItpMhDFkJUvNDnMyi1mQCZGJK31bAHfusnXivrP6lsncggCMvXi+2kduSzlioRazm8dmf0EcDIKXKlFW3uHPZEB3+sjYu3hHLDlBOgQP4VrQ0viF8pYWABuuI7hsYUzEv2WqTpWAZE1hl0vgBy8SP8DyOxCBaKT+AlELH/JPivk2ccMvyzzqfeZq0B1QpbUNdcwibzuoEyD1NEOcANgT2XrW0kiUllP65PxOOhdJfeXy3+Z+4XnJU32ckQiLY/BGHuRWVV/VOcz6hVMDhlQTl2bywyUk2Kkg70fzp+jRNB8HqaSK8y5jomXoaTgaN/yeR8kSUYgVmWu9P0gESKvIzVhtG+B2qFynOhAogSSEz6g6xZklqf2RdcWe4HPSeUIh2cVNvd61I6wI1KfO11gf6z3eXxnFfu0h2es0HDpVpOBv42Kk9hUb9TNNpAOXwjfmet4bWtb29rWdlpr2xY//MM/jFe+8pV48IMfjEsuueRcd2ltaxtYSgknT57EkSNHznVX1ra2O4Sd16CfMmzU5dc4sk0+vKX5coOl4CoG2eAH46j58DcZ8NMaBCnxYi6CAyKSkTlg743bgANSWK7/oJvbQFfuDp/aHA7Q2hgjVqIk5g+O22INlqWTZb3kODkLWgJwMYDaXVmkEgdVRtmsGhCg2KORrPrD4MDHRsUgTO1QJBtT5NoYsQe1OwIAFumctNgp8kC5W6ZWRdXA1ukZBxJABEq+1AmpZpnJp2OkbD8IGzKlUqcuRJVjQv5uleVghEokkcgleZfPkkucUWyDg5YBqQFRe26EHJjfL4HZ0rpl2ikbA+O2bL9USkr6EyYOTbPsvSPD8iv1+/IxoNSLsUFQDZIN+qtJ4IPjL0fNGedyHCjZ6BQj436nuWVVvoj7z6A3GfBPgUdHDPjBJSByvcMUODidRHZVGXmabb7XBwlUDiUquxgzu60VBp+VdcvjaUC9EBOaFd81lc81TzYE6Ks917SrnbwKgDyTen61BGVXsfsswLjqKraBNAv26T0QzTGpbGgkZvGEUABAu41LhNojy5/VySG4hOiZzQgAbR/zmJzOFAiMic9bdAy0ogd2XEBICbUnEHkAkRkZiVDNikRUZv1tHs5BOmWFUN/Ch4jYVHB1Ddf0cE0F3/WoNirEEBlkb/h3vvZoDtUDRs+nzbR26W3Z/truuKZ+xX7bnCnwBxMcNoADwTGTXJ+Bp2soxsEkTwpiaHeMf5P9GnKTMp+8w30YG3p9W7DPBMcHQXlNwJLvI8qzbmrass9T7whJkmI0oG4lCFOCqQsoNeHA5+WUSIjXvkheRwgjqW9B3YITnITtF7oe0ch4AkD0DhTKGCRJakijh77W78tj75jlx4w/J+w+bjflxKsGNJuz2kIl4JEdqwzqcEKagn0K4OjzNSaVGtdEFZcBl6XxtUlpFhS2DKxgEsREXhIAsz613nMInChmrgHts+4zKyqA/UFdj2hCkDVN9NIFi6fyTCxMPf6jFpaVAp2dyMRGKrUv9XE6cHmSSKWT1oOeYKSOxsUeF7fLCVRJ/uZ1RbXke1o/z2X2nK1F7QYMP6g8pTAQubZcZJDK1NoegI7CAFU1EssEtX73MGFr6Kfk8UcByaxk/NAof29r/2kdy5yIJveq/svjqn+GIbsvxWAURSZYfmYNZz8byMrmHg5t6RjyeEsN7hRBJMB26DkRtHRsev6WBE/vGgCERMhqGBbwG7CyU+RzDEJwBE1mCIm4hOOswp5I1e+ZLBIr7akKFZu1z/82Ko/GU671qn1OzoPmm6D5Yqn7t7mtfau1rW1t56ldd911eMITnoB3v/vdAIAnPOEJ57hHa1vbshERrrjiCrz+9a9HVZ3XcMXa1na7sPP6LuqFYaULsywZlYZB7NMVp82/w4jlZyzRqN4HUBZLlsZivzPsQFtfj+BESgiAE2BmavE12h/L7oSyQOQkYgaOxsy9qgTspoJOXttNEUSu1GowjMSKGCgMJru3IpczhtEvioRiaOH2TiKFAOc9Yr0BMhI/ANBHgvcEt7eNLQBbAI5WtYB6YTlAE/oM8ikzL7ULxHbBWbStLPZGmbTkPAeaNGvfG2DPLIih4y5yU3F2GCp7mtmMGhBMMdeEC4GvOZUV0qBcBp3BQZvoACCiiyz11BmmZxBgLcYEbwJ2JdBj2G1OaocI680JMBUkQLbKnLDqLPBn665psHW85CciOLAMWUIJpgAw/WKWQiM/7iXYonJXQzAv5QCozYgvgbPlgK8GiLPiUBLJK0EsA5IEHAiOKvhGasnEvtT3GxxUaZ1iX1gbeu2TQ+WdqVUIRAf0EejAdUp6Yff1EdhpAxZBJVy1Tt+Q8dflDPSU2X0K9i1LdvId2FR+8Lm+av0677gelLL5lMVmwb7N2qPxDt4JExNlDlSJTg1O6nlQ5mLKxz8N8AEF6Fb2atmuDLfWrowSpFVWqnMK4vL7TgC62rkMjlaeMA8c5O0qPs87rcsSqEMpKDcCRIcMySA0ohADHCGfo70+4pDIEjeRt583h1DVczATmOcvN99EajhonZqFXEp8jmLbg7xj1l9dwdcLNFs1Qsv1rxT8q7dmp62duLa13Z4skc/y38AwsOzIJDNB/ZJpU58qB4dVmjtvUNh+yz8uz177aveNvtTOGqsYWIk7hB4Z9RsnVWltMaCADyIJPk60mqojrMwv3SfPcWzW91JFYH2WKbDnUJ71ChZ5B1SjOnlR5mGVdQSA/98nd3DtqT3c5YI5LpC6VyGyjPHnYQ/U7XItv+2TLPEZIrP6Fi3a7RYpRMyOzOGaCrQQnzFG9IsOoQ28vQToVdLTNx7VvIJvfGY0+3lJUel3W7i6AtUN3NZhuCMXwx++kOXRs18FYUp6pHoO+AZtSIN/KpE+AG4EOKsd+wKp8Xm8oHzOzG4qEvCZ4de1iO2CVSGEhWUBGvI+A5Rqem6T9Ffr3umVoM/NPpbErim2We0BgJNaVpnW+JtVBeQIqdQKrhwBLjGLT/owkBYFJKmQn5sD4C4fUCyKHSO2n72HBsxZH+FdhUAuA/TqBwDsP5A6mTEhZDUBC+4KcCh159QLyLL7osaRqrnI6Te5rrZKqRbwewj+qR9uFQgAWcrItaLAko6D9S/tDFZuO5pMKGXJ1sL0WxpH8HU0JeuZAeURqDMA+/Q680WZZCy96UZzrq5Zc91t53NNQO6/Is3CvNTa7xNsz7wGCz28JEAmWS+kVBISMms7tFzWQfpA5ODIoarnSMS+6aKPaEICocF2y+Oy0wWWYK8LqK9+7AXzCrOKX+eea/nlsQYYCK43eV/dOUioWtva1ra289S2t7dx17veFe9+97tBRHjsYx97rru0trVN2t/8zd/g+c9/Pl7+8pef666sbW3nvZ3XoF9Ko9oNJgtRA91TsVb7kQX8SsPLdKElSUu7TVjefqmvRFliaomxN25vHOyy20iW7JhFpwv2KEH7aLKOxwxIByAQ5QX5eAxIg3Ky/8pVSJqBLkAc+gXc4iTgKsRDdwKFFuHmY1xfIXpg+wScbxAP3QnVjf8E6nbgmy1QvwdanBoCdVPjFbkeTGoXiEYmJ/Udf94ZeSph8mmNhwQA+n0MzAyrGqBGASIzYFtluSlbv4Vlu+R7sKKMSgqqfKcGeRS0IgIoMSvMO2VXOcwqoHacjT8ARkiDQQrgsBxj5XRBzTUsFGjT86cBDVNW7rQ2uE8gwItLcIkGgckCCBoAHDYzWuSNHPdLj8cGY3Qs8rnUazIx4Kff58CoippZxkYatuWIr+kAru3jSWqMCJOMgzEOzjU5UDEA6c29RCkOvrO1MnMtGgleeSIEKmOeRHaSpTsDdrqAPiQs+pABJQv2jcE9tcr8PZbxtJ9lQEvYejMv9fuE1TevHGaVx9xzLT9HHPDVwJR3ZU5U46AkFblSBfVNIM07QgxD+U4L7g1YfQIu2xi1lax18qqsBXu8tZMxcwldJHQSEO0ooYsOnhJi4pp9bR+x17uR1NYQDNX9ZeDcLdcuDAlZSq32HMjyxKzlma/QzA+D+pavz65h8M+5nGSQapYLnnuHftGCvENYtOi2d5khE6Mw/hxcXaHemoP2Q+dvIyNJgLgt21/bHdRE4toqAgAjwG8FS8QRDYAa+1tOgCqMOU1C0Lq0mTEODGqNwc7ZofgnSzV9LegnlsiBvDDCQ7f0POCOyHG6AvpwspBKOlZ5n4P9WJNAOXyzNKfrM1wtyEORoEx5YF7pnAWZk3h7BfkcEZzTWqjczmbtcajxWPQRsz6gFknIeUVAj3L+RFkhdj1i26NfdOgXPVKIqDZ4znJ1VWQ5dTilXl9oA1wkkHOoNhjwqzcqaB0/TnqoROpT9lnVoGYOt7HF8p1WaQIotfxchV5AHQX8+lieoxZcipSACESRq64DwZFDiMaPSUOZRQotEFqg7xn4VNCvG0qcAmDJxVtg0TxDVwF+7Kfwuaw92OcaZeJ5ArwnzCpmNzVy/Sig1ctYcF3Dch1Z38rR0B+0jLaBWb9oUJ88gpxhmOk1H3uQsPScMO4c8TpmVPFwmnXmHFLyxfd2fVEYMb5XBlarGVLVlFcSKVVJrEt63Jq8tM+j1ZJ4FfDLjFrxQccJZkvDNWq/SM6PmMkCtiFVfIwiD5uiA0WtF+qMhCkN55ExwGc+y2+TyhULoJo7GYdzJEliKen1rOUexI+VRIn8GzuXC3s6ZbZfheRI1hvqvxfWNtl5WNeO5Pi6qRpsVHMBApOoaXh0sWI1Cu8ym1frJc4qh0NNhXnlpI6f1FCU/ZXr03P5hXriPr6Nbe1brW1tazsTSynhn//5n/G+970P73vf+/C3f/u3+Lmf+zl80Rd90TnrU13X+MM//ENcccUV+Kd/+idcdtll56wva1vbfnaXu9wFr3jFK3Cf+9wHz3rWs851d9a2tvPazmvQT+urjYO6ymKyoIXaEsg3BeLtZ3a7SXBwIpvcOSDEspizDEHZVhfBU/vLQTK7QB7slL8rwXvOiGXGjtnMADmOgDBe9MJI8khAgMA1dHI2f+iZ1dfugnZPAHXDi7tuD2H7BHxVI7mIcOw6VIcuAEKL9Mm/R3/sWrjNI4jtAnH7hBxayGw6DbLlcYmRmX19myWYMhAYOAOdA0yegT4N1GmGdvRIXSs1PDwHGKqqjHUOMvgiN6XnTbNtwWCfjqdeb31UOSfJJgaPY0QJvGiQzmZ314kGgInWWFMJxizFKFn+CtwwoFWuDxvkmGRIrDArrUnEckcJw8DQGOxzJvCo4Br3iQMoer0VALS0kfcLBaHL97qvqf4vZVcLo9HJbyIY/NNxyQEcShng4oAOSUDViYrqiGWiEmQQRqcyQ8gty87Z40lco7ELKQN+eyFOgn1jSUqVrASGwF7+zMxlzjA7tfbPkNXnsFk7zKsC9mkGugJ+Po9FGVetV9gFlDkiQth+ImVqur2KzWfNzsG2XqXWqPQCbhOGwTXnuXaOowQX+RyGlOBiQhc4UFTGgpaC6Rbsq53LQF/tJhhK8ibGhOCSMAt5uzYfWISrGlSzBjFFljWTJBI32+B5yUg71TUDgbHrUW/NEdo+B845YF7BzxuEfv8kh7Wt7fZkY4Bm4EtNgGaaAGITQeyd6kbvVdZcfZEEk/wx2aFhAkdWTojTrG6bZAVXMQPcOdFnLm2kwc+kvp8G5V0B/QY+V1YLMKDoAEjpQVQxI18Y1AoU6DNS/TLi3BsQAY2AEp5YzlGlK5OMCcsWi9Sj9Hyz8dhsPbOgAn9eO4eZJ8BIdvI4eKQQELqea/TJv3orIvkyEtHMXySM5eQdUkjM7qsd6o2K2XzegRzXNXWN+cw7Bk+bOWi+xawt5wdSrCCH5CsEuOxf9cKGb0PMwLH1mcqZjnCJ0AWH2iNLPOZzof6rSMGj3UOSJLLUtexHqmSzM35nHqvR+dY/0xjmUuUCHvsptlluLuodwDdKPqaY8vNqsxaAwxEnSCWpWReXFTsYbCwy85BErCjgucq0T8o3DhoqErmsfMGMvBR7ToYDkIL8LWsrvUvtukV9gPF6bCDxSS7Lddo6mdkXJ8d1wF2FVDWAbzjhLoObKSeP8aucg3384LGsuJpDAdxV2WIMWkadpAbtCQBPVMZiBNzxHBQZ+Ovawuwzc8tAwnP8b1SvnQdSVhpTyaD6mQVxbZ900/wd5Tk0Tc2hKQKJWBUDLKMPlDk6j5NNho0932958HogBjgA82oOAOgie6KHmkquE67zp6CfKlVs1h6zipMeSxKkOT5yXL8wRS7LsLa1rQ0hBPhbmLiyttvOXve61+EnfuIncO211wIAtra2cNVVV51TwC/GiKc+9am4173uhV/6pV/C9vb2OevL+WA2GXFtn367613vCgB49rOfjXve8574pm/6pnPco89MizHCrSqFcQeyj3/84zhy5Mgdto7keQ36ASUwTihgRO0wKbkCQCIoEtQYs/eAEswBhpKeo+3oNEy1vDuV1fErhjov0kZZthgeQ15cj7NCtd4ckIMmtsj9IPAwWL3y55YJpAyuxjUc5Is9/+tZXhOxh9vbBvULUL+HcOIY0t4C9PG/Q7/YRtzdRrjpeqDv0J+4Gcf/1wewe/1NOfO7PbGDfmeBnU/toN5qUG9UqDbnqOYNqq05fFPnzHGbca6/B4AUIpzI8tVbc7i6QhUCy3kCSH2da/nRbINf51ssG1RvssSU1u1zVXnv+F/UMeyHUkJjVp+9shyGNVkUVPGO4EFoVvjgGmRQaU8G/YSp5YayjEDZd0gFwFMm5371g6zpYj0z91bIDo59HGVO5bV+SugT0EaVWCpBoNpIKClQWlhkyx0sx4AcJJ06Dg0JF2BymKE9ztq2bLfGsRRk5RsOAI/YrPmESr0o59wA4NIAkx5rCUhOD7iCU40AuRoIUxBPx3PqN2q6nQLAs8phLgDXoaZilpsvErCNsNr0PtbritsqYxqEiaOgrYiAyRAw+7OLCbV3cFGvMkKMQK3HLO1PmfoFltmnYJ+eN2uJcvgfMUbUngNDs0r3D9SeA7yhLuOtY1h7Nxgny6qZur5VRlfrL8akrFXif5HVkTdmR+CbHiS1Pqnv+DxuHgYttuHmW0h9C3/BzjBBwQyE1rQKu5/+bHT7LLvN2l/bHdMs60NeB0wSSdaxgWkF9vwA4Cg2ET+HysEp+IcV2+R9TgB+FPtpcM4ey/i7FIusY1WDUBfQz3v2C8hxQFnvo9G9PWD/WXn3xBKfOg6EhMpBgttDWUQAuTZWlXrMapZT3wvsf9yw22O34zp+LCXNdbX2+oSbFgGVAy7aqLHTBXgi7PUJhxt+dqR6A6lZwG8dYeamHnqICF0U+c6E0Aag8Vni3dcVT/QAQtMjtB6uDizlWXtUAvj5ecMsZu+E5cctVPMG9SYz/GhjC2jmSDUz/RKQ2X0K6vTC7Ouzv1UYfsycjyynHqKZz50kVMmlYV/1OgkdqN9D3N1GWmwz2Kc15OzlBTArsW4Ky7NquJ6fMj2FlZUG/lhJ8DrDlEHzzEyD32RGvMzXnXGAQkROZqkksUevoV78L2Doy+dxSZoUxWAbQs/HZPyfpdIDCvB5I88/kntUEMZex4OEK+2CBbF8XYC+VGor5+2s3Kur0EMYfqnI54+9LQeASxxSlnyfOhmOig9CKPXoKgFK9XU89+RUoGTb0gRJc5xyfDCysFBJWedLMqFZ39kaoGf8LI2RNzVsaWD/tWyep5xZg0ZJ5uzbwrY07D+eVxnsJWnLKwAMGIF2HSj5Xejz9UTUIbmW58PQY6PZRKpdrjtae1aq0GQ5Bf1q77KcZyM+bu0IWZJWrxUdx9k5SKha+1Zrux3aq171KlxxxRXnuhtrAwOwVVXh+PHjAIDNzU388R//Mb72a7/2nPbr5S9/Od773vfigx/8IOq6xtGjR291W58JgNiVV16J5z3veWsw/RzZXe5yFwAc2/vN3/xNPPCBD7zDAjK3Z/vEJz6B3/u938MP/dAP3aHv+SNHjuDrvu7r8KY3vemcJmfcVnbee5bKZtEFXO0ErJI6A/mfAliycKDA9T7GCx1duKgMFYCRBA6G2crj39p/usmqYNTYclArDkFLZSRZaSkgZ5pnWU+VQorIEpQhovxLafBPa2QUucqSaR0SysIqRa7b0C1A/QIILeIug3xx5wTCDf+McNMNiCePo7/xU9j71DFsX3sjTvzDtbj+rz+OGz98LW788LW44W+uxSc/dD0+9X9uxI3/70bc9Pc34eQ/H8fJf74RO9fdiFP/fEP+t33dMWxfdwyLYzdjcexm7B0/ifbkNvrFHsKiRWy5Lk3sesRO6lP0XRlLredngzeuyoAfSwfNkJpNxGqOnip0EfnYFfDTMSnBqCKFqP9CGko7AWVRrAEFlqnhf7OK/81FrnFDJG3KNi4ztjSw4CTDmnJQRSCwEeCn51bPY2+CaAqs5SGStqb+nYmNYyv6M2UnKOAXTGa2/o7HGHnMO+nrXs+yi4s+YNEH7LTl327P//b6iD0BZdvAv2lDwiIk7PYRi7581gYDgAv71QZeTndf5vGVcw0MpwOdezTzmyVZHRrPDDyWbOXgxrxymHl5rfT78m/m3fCfbueHr1q7T2vK5JqPyu5zBfzK1w5KgMoRT/xOATL5HWe7UwZMOYDPgcgs1ekggZnhP+0rMxH52FiqtoCwU/jyMm9iNLbKcnQ6ZuWfjmFtgqHKiMx9c1P/XGYiqi3Pk8hzICcHcN3PVM2B2QbcfAu0dRhu6wjcoaNwh4/CX3Ax/IV3Mv8uza/u6CX7XmefSfZrv/Zr+PzP/3zM53M88IEPxPve976V2/76r/86HvSgB+HCCy/EhRdeiIc+9KFL23/P93yPmRv538Mf/vDb+jA+M0wZHBpwjQXgWmKioDyXxs+SCGXNjBgs4tPoHHWLlhLqJ1lfTWzMKBsnUw2+B5jdV7OvkBlH4i/kpKCqEaCqWmJCDvaFcix5PtZ/Ot8Syj9HqAhcFyu08Ig50M0JRCXJxDnCYUn4SGDfQudcQAFFUXqoGqRqzgw7rSWmQ7dCkj7LdTbM3KvmnJxVb1Q5UcvXVZbzdHWFaj6Dn8/g5L2rK7jGgGcDBlOVwR1IvWi9Nixzy5r1rcay0jrWyz9i3z51LVLfIu0tmOmXlSNMUpkyt50vCWPeD/tt2VJ6jhWkPs1FG6eHemBZAlsekvZ5lLdBSdxLULl5GRssg2IRltnlTr8OGbNoDcif75fYm9rTyD5IZa/viQEZKmtUgG/Kv2pe/PF6zjX8DPuz+K7DBMbiEw/9nCm59OFYw/g55VjUl1M/QdeU5XvrS9F04EPB1Xy81ei65zXVYP6YZPTtY2Y9SGYOnDJbw1x9GuvXMOuSlvti17G6LtVDXNGtPK8q2GnkdbU2+/JaiGXpmdnHfnNm9rkiz273mfsrSZuJ1sFYtbVv9Zlrn/jEJ/CCF7wAbXtwCYYf/vCHcfLkyQNr7yAtnsmD9Qzt1KlTeP/7348///M/P+u2Ukp4+9vfjvvf//743u/9Xjz+8Y/PgN/Xfd3XHUBvb739zd/8DV7wghfgpS99Kb7gC77grNt71atedQC9un3b//7f/xvPeMYzDvR6uyNbSgnvfve78V3f9V34h3/4h7Nu7y53uQuuuOIK9H2P7//+718DfufIPvdzPxe//uu/juc85zkI4WASrVJaHf87V3bkyBHc8573xAMf+EC86U1vOrB2by/Hel4z/TQTUMERBqT6UrdFLQd9ytLBZrdObxuBdIaLMduOXTSNbfzdODMT0FTl4WdTGYUKXpEbSFByPRSIPFIBeSwjyloIgENCAwe4hBj4Nz4mBi+Ia925dpfrooQWaecU4vYJxJPHkRbbaG+6Cf32At32Au3JbXTbC2xfexy7Ny2we2wXJ649hZu7iE/uFWnFCz/lcahyuOjiDQko1fC1B3mC05owntBs1ag2OPjEAaYGKcYcnCpD6+DmKAviuhFpqU2kWuWCOGiffI00P8JMtZAQUswZ0ZkNFcs4RiAH38bSmhp8yafFFaZRbQAYGgUMNCtbGX223shggauXhemfA6uU2W00E1pf7XkHOGBBpPWCVGosjdhglAMrdr853iPtRWgAqmR86/6SjFUXE9pY5K40QBKhwDKw6MOgXpzKeK2Sk7T95PdlpJQBoJKStVfwjaR+HtdvI+Jallq7j1QyCcsBEgXRmeuWMphp91lJ3UZUWp+ujLsGLDIAZ8C4cf/tMehx6HHOKwdHpeajo+W6fXoetZ6pmu2PvXaJCD7nbEvNISfsg8h/AyxPx/2ggeRnZldq4NNctfrZ6Vinej+lpMBvlBqCNrDLTL6QEqpRuCnXSZId1nKB5DE318TYshTZaGrlRAkSSdmEkAi+YgljkMtMCXIV3GwDFAMzhpT5rTJelTBEhFGM7d39B+M2sNtj3Znf/d3fxXOf+1y8+tWvxgMf+EC8/OUvx+WXX44Pf/jDuPTSS5e2v/rqq/GkJz0J//Jf/kvM53P8wi/8Ah72sIfhmmuuwed8zufk7R7+8IfjDW94Q34/m60lv87Kkkgjhr4AAfrVKEis36gcsjeif4PnpWxvf5P3RaW2MIABu3DJP0th8Nulv0c+VXK+yAuOjIxUb2b3aYCeiO9fEhaP2qiOYK5/pf0Tqx1lT652hAoR3nOClkoxEoZ+K/V7ADnU9QxVNcduReiFNXmoqTIoMa/4ubZVOzQ+5bpYXUg41Ua0IaF2FTbnh+F2b2ZfSCTUVX7TiwRBLTX6vPhYTiQ7eSi5Pmll0Kdq3sBvNPB1hebIVvbFVOqzuegoJ0VsHmbpdU0eU7CvapB8w0yuWGq15eFNEL9A66/GDPzp3D6TZyE/00v2ogMKSB1axMU20u424mJ7MEeTkVuEc+wvVnVm/CVfl3PvPAM5CpTY6wcMQHmSesMuIYGyC69grY0Xqa9jVQI4MchD1b/td/qcsyBTK3Udc6JZ4jq4MQHRFf9B7z9PDlpTj0K3GmiyjL8YxWHU9UoPilzbr3FVfniO2XBLjF0Luq9KiDSgVN8Xf9AmtampD+rBUuSIzPLTetqsnkp5rC3z0SYgaSKTp2GCXZm9TAKbDoG2g7KtHhu5CnCBf1tFILCEZrLXjKxDlxJHxxYjiIQNOQXUCtN6cGGN5skk12yAG6hxAJIkJuNOjiU+iVxm0yVpj2SupuiyzOd47l56FfY1pD1rzbyCr7iNxiW0niXys2wo+Dpv/DAxIu9Or+PM+HNI9af/Wb/2rdZ2e7M/+7M/w6FDh7C9vY2maU7/g9PYb/zGb+Cqq67C7/zO7xxA74DnPOc5mM/n+P7v/37c7W53O6u23vnOd+LDH/4wfuAHfuCs+7VYLHCf+9wHF110Ef7bf/tvZ93eK1/5SlxxxRX4zu/8TrzlLW/BBRdcgKc//en4+q//+lvV3lvf+lZcfvnlCCFgY2PjrCT2fvEXfxFf//Vfjx/8wR+81W2o/eM//iP+w3/4DwdyDv7gD/4AH/3oR/Gv//W/Puu2AI7d3Hjjjbj44ovPuq0v//Ivxyte8Qr8v//3/3Cf+9znrNrq+x6/+qu/iu/93u/F5ubmWfcNAHZ2dhBCwOHDh8+6rRMnTiClhAsuuOAW//bUqVN485vfjF/7tV/Dhz70oZx4crbn9JGPfCS+7du+Dddccw3e/va342u+5mvOqj0A2Nvbwyc+8YmznoesvfzlL8c3fMM34Eu/9EvPuq1/82/+DZ7//Ofjsz7rs866rXe/+934m7/5G3zf933fWbd1+eWX44/+6I/wnOc8B/e4xz3Oqq0bbrgBv/RLv4QXvvCFB+JT3Hjjjfgv/+W/4OlPf/pZt/WkJz0Jb33rW/Gxj30Mfd+jqm49VJZSwotf/GLc5z73weMf//iz7tvZ2nkN+mm2IC/wua5critggjAacJgM/ExKgEZe9E7kHY8zxKd+W7adAPWWGhwBgePtSRZ23g2yZZMc11jSMzP8EkyghBfTHsMAiUoh8qK3ML9qWQA3DnC726D2VA6koOc6DTDypgrSpRgRFi3Cos2fkyM0mzUuXPR50dk4wpHGo5pXaLZqqRMjgbeQEEIAeYL3HjEkhDbC1xHJR6QYkUJkCSpIZvq4Fov846ztklmeA/CaYZ4MGzKlAcCVM8uBQUBKwRP9jMDAlwYSKgHxlHlE4ICOBfiAko1s8QgNltgrjEbfEYAAA9g4DXoAwQRaE4rsExHXbyQAdVLAhsGiELlmT9lPAf5yH0aA5elczhzUEjA6pDKIyihdCGOvCwmLEA3YV8Z/bFmykZY/A4BSRy6hjhoo495y4DTBU2I5KOfgPdcEGdxzEtTI597sX7PJlbnnQ8r14apIQ7AqA07LcpNj8HJwbOb7cT08bce+kgGJc6AKgEscSNGxGkuV6nZjc6Ait+kSUirXRpL6evpJvibNnMLv+Qub2aL7mpJxDVHr62ntzAL68rikQQDT7ju/X5Hdv2qsdcqonTMMyVJ7S+9VxosttbMqgUqVyKpMEFPl4CRQnAGEcPaL8NurnThxYvB+NputdOJ+6Zd+Cc985jPxtKc9DQDw6le/Gn/0R3+E17/+9fj//r//b2n73/7t3x68f+1rX4vf//3fxzvf+U5893d/92Cf62L0B2fUd0Dfsoy5nSPJAQKOqfShTS4CZE6SYHl+ppmkkCwnZy2ZusHyPsu5TZmVGp0C/gbSd/v4bCrnqQBfZqEVMEKBKkBACh0XKxGdBAg1ffLOgTLowgdPKcJrUNzIUGb/KrMpN4EY0fg5ZhXhUPIsS+ggQB+rBOzJQz4kZcjz+83GY9FHbM6bcjyQ5Cjv4Gv2zQCgmteZ1TeoyadDamv86e/nMwYO500G/bww/NzmEbiNLbiNLUAAtKJOUdhOKt3Iz9shmx7gBKAYE8/tMQkbzhXAzynDXpJYnADPWivaMlPVX1WWqoyFXgPsMzLb0/qKyXlmd9rrQmTgAfWZkyT5cEJVpARyxOBfiPAe6ELxfzS5SQEpZbRzag1l9nsSX01r7YaU2N1bcUv0MSEQ4JMy/xPfhwkgR1gJI0wB6xPbEBySykGSXPtOJB9F/lE+nG5rDPjL3zEZhn0sYN+UD6iAXE4+Swz4OfAtuKrmsNbu00qfVq5+AC4paCX989pfx/6Hkz6tlDcyDD9uTuaG0Vjo2pL45OQxzuOsfdGxT7IetSov9jWOwEFn5mcztnwofCx87NPefK4jL31MMWLJ8V8F+I2TNUJbWIJdDV81mPkmC8o7UPb9LSg7rg2psrTJlsqYAOLvSLb2rdZ2pvakJz0JT3ziEw+MkfTd3/3deNSjHnUgbQHAV33VV+FLvuRLDiTQ/qAHPQgPetCDDqBXwHw+x+te9zo84AEPwEUXXXTW7X3Hd3wHvuqrvgr3v//982ff+I3feKvauummm/DMZz4T97rXvfCUpzwFKSU861nPutV9+/f//t/j5MmTB1Kb6653vSv+9E//9KzbAYBHPepR+OQnP3kgbQE8bz7/+c8fJCvcWnvyk5+MK6644qzHbG9vDy960Yvw13/913jUox511qCJ2nOe8xw88pGPxGMe85izbuvnfu7n8J73vAfvete7btHv3v3ud+OHf/iH8f73vz9/tlgsDmQuUnD0bW97G7a2ts66PQD4H//jf+CHfuiH8L73vQ8bGxsH0uY3f/M34173uteBtPWFX/iFBwYK3+lOd8Lnfd7nHUhbz3ve83DllVeeFQgGcGzwX/2rf4W3ve1tuMc97oFnPOMZZ923iy66CE9+8pPPuh2Az+Vf/uVf4su+7MvOSgAPWAABAABJREFUSsp0Z2cHT3/60/G7v/u7eNSjHoXHPe5x51wa9bz2litCkfGMAegXHDjRugKQLO9xUfTTSdxI8GZfeEMDPCZ7VYNQS7UAz8RMwCj3najsw25nM2NjGkhR6uJOA+splYA7ZyVzAEAXVwpMWYaayvZQvwB1O6C9bROQCkgaVMldKkGi0LLcJgeOCL7xmF8wQ9iocOmJFuRpAPZVcwHvJDqfAi8wyXu4/FkB+wZDlhmBsn8NbNmaLKNM7eRrlr1CAfZUDoszTqeN406Ux3TQDwuUegfvYDKHS90QTzCBSsNWMMGQVcCG1rLIQVQi1UsDXIIfBYNY4tVkizvKtdu84+ARojL+hvtS4C/l91gCLYEhaKS/0cxoDuCVjHbtCgM7EXt9qVG00wWu2SMHEGIaSDRNgWNTY6RMulnF9dlUctOR1sjhWnWFMklw5AbBZg0apTQ8zw4luFd7h1lKcFQAqjoV6bExY0+PYRUz0TIXFeSztfCYFWquISpynGOsK6YEBxKyHo//1EMm16acCqwJiJfk8tKfp0T5ex6Tkjgw/JwQ5VrL4LhcD1GjdtL+GPDTYKiOUYwpj9sUY89aSPuz+sbjCxTAXgOv3pWAoI5/GRgJ5lFEcg6IXKsnyXe8EwEFfZOlzFLVIPbnQIJKAtm3afvAkkP5Uz/1U3jhC1+4tHnbtvjABz6AF7zgBaYJh4c+9KF4z3vec0a73NnZQdd1S4vzq6++GpdeeikuvPBCPPjBD8bP/uzPHkiW52eqsRxbVYA3ef5kpgWwBPilxMH3iOnkkWjvzxFANvCn5LOBbJ1dcK8CAseg4T5+XiLKNegSUfYNBtKdyvDTGsCmbwkoPhFQgvJAZspYloqOoYJ91HOC2uCzGDhAHvv8bJ1tbGaZR00qOtz43HZIlAElZvmxXxFSwpGZQxuBDV8N5oEM0nnAeYKfN/BNNWD6ubqWQy1j7ecMfHoBBl1dwTdVYfrN5qBmDrd1GDTfYpafbwZyhwqmqqwnUNhUmU1vAB/nCHUiRMPqLjLO7Fvl2r4Qf8DKVKq/CgCR6zjSRPBG5U+z/KQqQ4gUZWYoZsn7Asqp3xyJE688ceJMTMJSjwnOcVJeFCb7oNa2YzBT21LQCyjPYP27i2lQ69k+//U7FShRYAeOhixbGlfVHdmUYomOKTkMdCYy45cG53hVvbqcmJTK8z+iyMDraz52GL9C7yWU9xFlzGD+9o543Cd8HythzmCgJFrp/WjWcRD/UOvZhVUjp8cdKSeXqq8wmcgpc1vCstRwGfeJ5AhgOBeu+Ez7rv63jiuPD68XsvKNc5LhNp0AuwRKDhIqVvzGssOpl+uPQH4PSBHNvOHjk9Q6DxpI2dpR1uQ7BuqrQT81+fXTbmvf6hYe0No+HUZEB1Z7zDl3oOf5SU960oG1dRBMRmsHKVl7ySWX4JJLDqacw4UXXoj3vve9+JZv+RY861nPwqFDh/DIRz4y1zm7pVbX9YEAm2pnG/xXI6IDTSy44IIL8OIXv/hA2jqo8ZrNZvj5n//5A2nL2pVXXom//uu/Put2/vN//s+4/vrrccEFF+D9738/vvIrv/KMf/s1X/M1eN/73oeTJ0/iQx/6ED74wQ/igx/84IGe04MC/ABmmb397W8/MMAPAO5973sfWFsHwVZTu/e9731gffvcz/3cA2nnla98JY4fP47Xv/71+LZv+7YDaRM4uOfCxsYGvvzLv/ys2zl27Bh+8id/Eq95zWtw5MiRcw74Aec56EftLlydWHYycVFyDqJY8UPjAI2Zfvp+1cLlFnVmJFmlbazKoB0v3DRTcywbpawZk3WefMmUVsBvDK4oSOWdZKaiBLS5XhwzlmaeC6t7lL5StxCpqQWo3c3BLYoBQeSSUrvgTO6tw/AXX4a6XWBj+ySOfP4JdNsL7N5wHOQcqg2TPe5YMii2PbrtBWLXoT2xg17YgSolZeWlXFPqx2itGa4h06A5vCnBqlkOMtF8Kwef4BvEegOpYXnPONtCoAp9TCx/GpGZRbo+1WVoPk1gtUP9mwMKyGOqQIyCfSq7mMEZk0FMWk9GxjLLpEmgMVI1AP403sMArWbdxkH2sQJ/BAZ7HNJA+lNr/MRUgA4NKnkqkokEloWyjD4AqCwIQtqm9E2AnKxmNQIPI5SBwCyELjITIQordacL6GLK4J/21ZoFaLy9fSyQIwCNiyTZ/3y2YmJAqYoeQYDOXrH4DFQNgbOo4NSoD3AJDVQKiqXTQko4PKsEZJdx1uTmAVOt3O5TgBOP+RDUU1Cej2QqcLX0kbluOMM/6DilNLn96UjLObBoaK3jYKMGz/jYpL+GlafXMwdy5ZqhEthTYIClZrnhWqDlGsjTt70uxtKvyggpHdMvAO9V6rOw+gjDYG0JAEoShDmmHGiqGsl2F47taPAG97KCftDalQk74dw/7G8r+9jHPjbQ2V+Vif6pT30KIYQlyYrP+qzPwv/5P//njPb1b//tv8Wd73xnPPShD82fPfzhD8fjHvc43O1ud8NHPvIR/OiP/ige8YhH4D3vec+6+PqtNOp3QD0NE6iIQJIwA+fQj4L0miygz0yHVcDfCOADpkG6GI08XFX8KfsvFjBypY2ZJ4D8Rr5WVpf4CZmZ5orUpwViKt/wjKwy8ikWNgs5mez6zA6EBKop9pxAFVq43ZtlTMtzHXs7XH+u7+AucIgpYlbN0MwOoXL87CJiBYYEoE/AbhexF0piREgpyxYyOAbuy8AnBnzjsp9Vb80HfpX6WgOGn3eoN+cZ3Msy6lUNZcq52YaAfkdA8y1gviW12lhS3TJydDaMCkjEYf06PQbAZfnmmFKW7lY5TJ6zRWFBwSwDbkEkO1E1XM8vhMz+SwBooLxYajkOpODref48JGSGol7bBPVNmE0XqDzvVJIUcIgxDuvSCXNx7ssTPitOmHMVIkDmmSlX7cBv4PHhZ76CPCovnlS+W/3QlPK9o3/TCEjJ7FU4UOyH1ylQ7j0AcAyM6b2S2cB6/4wS2rJ0OpbBPuuL22tlqk5g/l78EhpJfGrdS33e6zNeFRvyfGWB4lDkKAkxs8qs/HDK3w9/W37o8j2f7HiNjokXFHFS7pPgkELPp0yBLlO7bmV9RpNAqOPaxwKmK0gagWXmp01qGLct10iyx2GSFXKyrdbz63vEnhVfqAqgqgIlLpWAeg64Ck09lxsnoo9ApNGJ110rOJzZmW7wXXcwxKbbpa19q7Wt7TPbFotFTtI9deoUvu/7vg9//Md/fLsIIN+e7TOFnXz06NFbLR1r7R3veAd+4zd+A845HD16FL/1W791i9s4fPgwvuZrvuZAJDhvS/uO7/iO9f1zjiylhEc/+tEHIjF8e7eDYlgepJ3XoF8uGG4lRdSsDIpZIKXRjZ7lVaYkovYD/sbgoc1Wt59PLcwUARiAgHpMGMqKSiA5t7WqTZRFbBT5R0/ItRIAZaOVIuqVIzQw0qi68B3UmFkAXQvEgNh3GfBLfZvrxEACPw6A29iCv6BlMO7QIfgLLobWUKFmzkGtdoFw8zHEnR0sbmSQsD2xjdj1iEa6E2DQT6WjyPsBkFjNG7imBs1FSqqquU/NHKiqkmWugTxXZaCvC8Mgw6r6Y3omdJtx7TIvgFOpqyaggSs1+rJ0V+iLXJrW0JAFOgFwVTWo3Tcipck5dkv9y8GnDMpMgzxTpsc1Zkjp70utuBLwceRKn6jIHiVimZ5Blq4ExxTwy7WHYsJeYIaXftb2cVAfESgyqJb5lz/LjDigTg61Z6DbBpeAYXa5HrNKQdGIuTk83+WLHHhyCS4xuOpkX5UDQqS8n/Ex2D6vApw0CGVlp3isy3Hb/ug1M2X2elEJMZs1r3/neoUF0+O+iURZmZKGexoDkhYotteNjiMJEE3a8UFblAOjCvz5Cbqekwi9Mv8YBBwHQotpTT9n2H1TzEkb+LPgKgej+QsvIB5cBEV5jlharWU2UJFdtoHMftUEc1uaSh3flu2DCx9/Ooprv+QlL8Hv/M7v4Oqrr8Z8Ps+fP/GJT8x/3+9+98MXf/EX4x73uAeuvvpqPOQhD7nN+3WHtBAK4Kd+gdZSAgedVQJZ53yHMnFZ4O9W2Vj1YOr7VSoKU/7RlOycUU1Q/0CVAOy9PAZhAD5+0n6MQZMUQanK+0jkCpsvtKBuD9TvcbBfgbDYIy22i4qCgiyhQ4o9auch1eIYYPQlo5Hn+wL0zSqHjcoX6fsYhvMVkAE/619ln0r9r5rnucz+29wsfpyAiFoTkerie7FfqCy5Icsvz5P7TIdEyIAfEAdzf+15Tq9FVlLndfXFBsx3V7F/WndI7YL76SeumREgqgAfJNlDEzi0drYCfnoI+uzSZycc14Ud1Mx1/BziYykJQcpYtOdSn88K3CUMP1NT9Qb1J/S5z89E/iAkBsJS4qsnX7Nqp0tuFFnHgXKJBQ4BIAp70AvrXUEqIatZQJ+PJWXZ/CLliSzvOn5UeiqA1Sq2v1OgFQXUknw0IxcJYUmu9p3gHIPkAEBuqb6djOz05avSsUQs63u6RATZx6rzwV4mMtjGbGJj4zUoTJIViv+RAT9VgdDuJkyuRU+bQCGvg/k0WQA5DdRgUl8SIliRpwWqDslX8K4ZSOQu7Q6Qe4tWKldYGflPm619q7Vvtba1fRrsvve9L/74j/8Yv/qrv4rXvva1uOqqq/Bbv/VbAwneta3tbO1BD3oQXv3qVyPGeIe/ttaA37kzIjowxuDabrmd16AfpTCUn5KszCSBBmVc5KzficVMllfJ2a0rAD+7SFvqiAHiNCg22t7WSJhsR4NcVg5Gj8EGpaS+iAbc1HjBRGYdIoESAwAoC23mCT62oHYBt3cS1Hcs4xk6xL1dDpCAAz5RAL/Ud0DfIW6fQIoBttA4aTZ1M4e/0+fAzTZQ7+0CRy5FOHIZqGV50FTN4YQ9WO8cRzx1HPWxaxG3TyKcOI5+0SJ2/UDG04l8lMpIuaZmcE+CT1Q3oPkmS3rO5nCHjgLNDKk5hNRsIPkGqZ4h+QZtBNoQ0YaEvVCkPfO+gLy4LEGJNBhHZUw6CNhHBvBzBbghCHsyRlC3kCzirtSbNIEFyDl1oYcTKbHkhtJpVr5I+zOogYISGPJyKXOmM6HGcrbz4JWG4JLW7SMiNBq0CaW+ErdlggXkEEjqF0WgmwAdtf8MuEbsBQYBu8Csy1YBQWWu7gOSMMjK/2aVQ1M5sEosBwlnE7K8KpPFQE6SiPRENBdDcFCBtoRyj4EKqBWkYcsQVBlMa0Tl+lJpTgaf9DqCSIDq9sMMfu7PMOBo+2lfgTI3qJxT7tHE8WqgUFkBPESUM6/Ho0kmmK/HoICfBSwBCcJJmCwllj0jQs7IZzDUSY8NsDu6gJRFGZzIf+Y4rZngUORSnZGAq51juV03vH+npEmjgHVJxoKUMQECUQXnhRs7SnLOwbaYEFGy60Pkc9WuQic/g+ySSy6B936pdsMnP/nJ02ZlvuxlL8NLXvIS/Mmf/Am++Iu/eN9t7373u+OSSy7B3/3d360DU7fSqNsFhXoAaCUAiD77Ugpmd0HBCQnOQhZVAoSkNAy8T/pYowSYSX8sjZhJCibYxK4VgN8S288AO+ofpGaD2VyGcZ9kkrWLxCTMpuzbxZ5rIGYfzhVWpCsMRYpBAL8F4u42Jyc1FdAukNoF4snjQFXDbWwhzbaQ6s3MOowJ2KhYlYEWnJRV1XPMK5YV3ekIh5oKm7XH4ZnHRuVwuHZw2zeBup2cqMXS6Qz4qaynqyu4ukY1b1BvzeHnzCZJQYC92RzkPNzho+xnKeinYJn1xUzS1aDWVk7CqzILKanfYk6VPmMb7+AooU5UfLCInMBRueG/xktSh55f8aPcxhY/WboWcH4ARnBtapF4jSGzqlgOvkFsNgDfsK8IDBhT9vGUk3Xys5kEqGI5cUqFpQhEVl1I5vmkNbENWAPwb6KpBVjUEOQQqYC9VkEA4EQkIqDy/OwP+UFn1ilT94VczxnURmAJCP2NrreMtC1IJH9jwwzKqhHmH5YAP123qG+tPpa+V7nPfD2AE6o02SqXxzPXS66jSKxskusQE6BQ50BFYQrys/PPPqCXgqsE4+tY0H/MYl7Bai7jO5zflgDAQDkxQb9PZNa3sKBeAcAiiu8xTmAbg2uJZD4z+xiMS+5LPxwbSShAaIVNzP9S4PViEqYfb8upY5rkkEIL6j0nh5JK4w7ngiiAtaPikwEAjZzY08DWnxG29q3WtrY7rt3tbnfDL/7iL+KFL3wh3vjGN+JVr3oVLr/88iVm79rWdmvta7/2awEAd77znfHgBz/4HPdmbWtb221h5zXoB4CDCFPZ27qIE8meJZtaYI0CTGdkSyxCX6RabPb36P1kOxiBhXoMvmYgU+qJ2OxNoGQaW8bNUjAeDPpRiqC9kxzU6xZwi5uBdg/x1HGkvUUODgHgBXzXLgdKhNmXmX6a9S3vk6+BCy5A2LoIe9Umy7jIwpfCBqiaw5GDm20CzsFtngQ1c/gFswhj2+VhcU2RjyLnC+AnAabM7ptJX5oZZ2b7ugT1RtnlBVwZMoUinwDoi2X2ZbabBngU3CMFblAyz4kBPwptlr+xrNQsqwQAAlzr+KQkvCrfwCvLIJVAiO2vBomA0geAg0+eCMmV340BJ6ISNMqsLQPYZEBEAz2hH9wXOTArQVdfcXZooMLe0jZ1vDssBxy8Y9CrqdwA7Asxjt4n85sC+oWmyhnxdSLUA+CoALA2SMfnVYC/kVmQT7fdL5G4MN74DeWoNZ83K4eZQVcq96iVgs2BKWA4VwiSHxLLh0UJhOaA2QgcPFOzx2iPfbDN6KNxsNPaUjBpxd9TbQ/qHU5kYTkSEFLizLVH/psDcQlaN9E5wtw7OAfMK5/vVXsOpvobwTdUEkAySHDPZ8C/1JqcCgvaQFuIyAHakBLCftSW28jI+1y37LZq/5ZY0zS4//3vj3e+85256HiMEe985ztxxRVXrPzdS1/6Uvzcz/0c3v72t+MrvuIrTrufj3/84zh27Bg++7M/+xb1b23FlpQTgCHA5iqELg7mHIXvC5hOuY6nA6ZC7YNkKG1qAPxNJUeNE7BiBFxc3na8vf1YZQedH/oJogawJPGM8ny0Y5MZ/P0iPyshQfQUA8scWgBTf9fMQd4jxR6oKhDm7AttHQFtHUHMMon86pBK4kB1mOexRFmmXJ+jBIet2mGjcqDFCdDeKaTtE4i72+zfhYAUYmb4kROZT/0ntaqoqkUNMmS/juZb/Plsg9mDCgp6n30xN9sodfGy36XH7TKAmZLUt5M5lkiSleQQiYZJSyGmPOlmZQX9Z56rsH6KnF/2D6V2R8PSpDlxbYqtY9YNPH8rIFUSbigNE10AZaJpLbllMMI7QkychFIDWXI6y0ZaXzQVufAuRpaJDjHXvQVQEluiM0lD0ljFiS5BcKOQEkIiVGNwfAXTbPL+z6BfQupa+SiI2kfM1z2UHbePjVULVm4HsIqEAnvJJOrIb5VhmcT5SijJBkDxb8ey6do+S44LIBX7wTUxZr1ZFfElP82YVZSZrA1vExzyj4Zjn4QZl8HTERinjL5hwtnQhx37hGPXKoOcUo8Q5EH2kEZgZeljGoDAqmCSWX5qitQDvE6cAJuJqpwQYhm0uYk0Sl4bbRBvqeN7ALb2rda+1drW9um2w4cP44orrsAP/MAP4KabbjrX3VnbHcjucpe74C53uQu+/du/fS3dvLa13UHtPAf9zAJWsymV1eeqzPjLphmZpn6ela/Sbc4Y8NNmlUU4Bu6W6CBmwbTUyBDsGxyDBftMQGosMQQggzm1AiOEnJlLC6nV1+7AtdvA3g7iiWOIu9uIJ4+zvJSy/CSgE7se/aLNEk9+PhPQjcE2OA/0HYNydQPyHtFVCFsXoZ8dwcm9IAxDHovKzdFszjkY0+3yCM1ZmjPtnERqF6DdbXOKXZGQ8gL+VQ2/zrd48SXBKDRcPwYqK2UZkihZsCQLfw1Slvpiy6dFAwxDYGbIatJaMjrezPAzcp4C+OVrb5xZ3Xf5GqLEfQW5gYTXOFsXGC7qszyiA3yB7obSoNpXE6iyZr/X95ZVQXbRngeoQkoeoBbON1k+KUspKbPKBKSc4/p7+l4z4DOQZ4A+lfzsDQDoJbtfAZgQPZrKcTBNalhmxpc5HoCBHZdIsoqXT/iA5YcCAA/GKQN9Jms8B5bsxpS30zGxDMvKyJLlIFs0c1QeY87q9sr6JQCxMGuydNMtAP/GoGaprbc/yDllOvfotbYctNHs87SyfQv0WSlU21++riNcArrAwJ9LlCU/nWFMKoNiPOa8r3IOxyxJrotUxpKI8YQM6qdy309ZkSsrTOIQgX7N9AMAPPe5z8VTn/pUfMVXfAUe8IAH4OUvfzm2t7fxtKc9DQDw3d/93ficz/mcXIT9F37hF/CTP/mTePOb34zP//zPx3XXXQcAOHToEA4dOoRTp07hRS96ER7/+Mfjsssuw0c+8hH8yI/8CO55z3vi8ssvP2fHed6b1NLLLD/1bUyNLpWwHcx4GliHBHFBAy8o3zdTgCLK3OVtIFo3sZKCU5ZiYauM9jHw+3KySpVZ9pbpYxMqAGT5SILIC0bD7NHnYmiBvhcZSWEAbkQgNkvAUiL2lZIGyasZP+9rZqaF2WGZ54mZZzKubc8qBbt9ROMJm7VDG1Ku4+cJqOTzDZfgtk/C7Z1EOHkT0mKbayd3cl4HYJ/neslSyw/OsW8HADEMa/VVrLSAvgMycOaL/+WbnGQ1ltLPxw9he8l7ZZhzsoUkV5CCOPJc8TRg2FWisjDwc21ykl5T5BhUreqy/77LMs0wihUphmVgO5Y5vIvFH/AOIkddnlVen32uAH8qO1leiRNW5LjVB4COSSqAnzL7Fn3MCgnToF8Sn5+PqfYE9AAqJ2CfJArFBO+FhZrXKwagmWKbiVFKwuBitugSiyvWwnB1QPAMAk4oHqSUlsDNwX4KWrfEqJyyMs+wf0DE11FGk027Y8BP95HAgKgX4C/pWOQfD5MPsu88Mc9M2UBlJnG9vsJ8dYNiloOEAiDLeg6AVL2/ZP61yUbW9ys+jVwvqwZTzhtpHULFg825GdccHLA+gymxAQxZwPsYpYSUIt9ucm5sHViVpSda9mnt+mXtWbGtfau1re0zw5xzuPjii891N9Z2B7MHPehBd3hpz7Wt7TPZzmvQjxlwZTGvC3bN2B4AfrEf/VgAv9ivXOjmNm1W7Pg7s40WfV9leVGUF0nTy5UM+ElAqk9ACCXz3P6MwFI+QGFUeUeoEEFhj2v19QtQ3zLQF1qknVMI2yeQFgz2xcU22huPIyxa9Is2y2umGJFCQAwR1XwG11TYuPgIfFXzQn9jC27zCDDbzGMY5hcgNZtI8yNIiSWp2pDQyrFXjoDKYVY1QGBGH9UNB8GqmgNJVV3YhZEXu65uMuCXGX4zqR8z30Sq5hw8U0kt30jGeTlnuvBPEtRPpr5KTEMQaFzzyzvKi9KptXOC1NCQBb53FSA1TkiCkUmBaR5cc771BFbD6y1FeHII4ECILurHgVa9DizDU68HtXH2r8YZ7AKbJI1WWVFOApylnzTMWtbA3hQjQ8bbOw46zSuXA2RdjLn2UMy1/Ri0W/QBu20QgI8Bvx15b2v+KUC410dsNiVw513FwTFHwiQQWUk5v5ox7hJJfZDRedRACYa1HgtgzJE9RwwO2d8PQMA8BtNgXwaHU2TpMQVWx4Ft4sB3rv/oubZM4yoklRUFB/WstNOUjYNAFvBTTCqYgJv23x7fFCvUtq9jmkx7up/MoBnda9a07p4dO2tOgl3zaiiDpte6rds3ZlFOnWt7fjXoqlbA3QQvAWkL+llA2Y4tzwV8DWnQ+Bwko3NA292GGXu3ou3v+I7vwA033ICf/MmfxHXXXYcv/dIvxVVXXZVlav7pn/4JzszZr3rVq9C2LZ7whCcM2vmpn/opvPCFL4T3Hh/60Ifwxje+EcePH8ed73xnPOxhD8PP/MzPYDabnd3xfYYbjechYXAlXw0Y5xp4LzLFKCwc85rvZZOolcyzY3CPEMtvUoWSKAMMgsuFQRJL7a1oZLPV7PaSQMUSjlL3VyUdMwut3LMqtzwAlfqWpTpFqpz6BeKJG4G+Q9pblENoFwyWxT7vZwAUKLswMIDiLvosxNkRpI2jkqzEx9BFYKeLuPZUh5v3enQh4oJ5hcNNZdjLwKxilt9m2AXtnIS7+ZMIN12P/oZ/RnvTTdg7fgphdwj8KdjntZ7fxlZWToDzcFVdkqyMdKeCQNmqKidcad+TL7URrRwhA2Qs1UhIOYkFqkwQiZ+t8iSo3fLcrfWps7KCo2FClfotGfgrGgBUN4O+k/fsV1bNZD0zQhljBSzHNk6i0b9LG+wLMfPfJJ9IUlQSjCo/myOw10d0MWKnY+nzRR8Y9DOs9ixf7Qm1i/xZJGzWAHpgVslz0dRMm9XzfP/p8RWQX86rSvrb1xD4GldQR2p1YzYHmjmcANllPTV8PsSJZ+RgnGWycDLGg2Qt+dw6HYNEIdJ2Se5byj5c2aaMO2CTnShnLLHvIOs466Njf2PQDMsXx8T1lGyV5Fh8vqWao9qH8T+Y9YZJLBqz5Owts0p+ncfK5cRIm5iYS19M9CP3dVT3lQ/ZI7lY5v6xnyC/T/LqCYhE8GTmArv5BEgcUM7/+Dr6tNjatzq741vb2ta2trXdruwHf/AHcb/73e9cd2Nta1vbbWTnNejHkabhISQbZACmJVjGiysbSJpoZyXop6bZwXabCRAxkVvaz9K2dn+u1JYZ7C4HosriiBlWEgAhCTgFqdXX7rLU5O5JxMU20ra87m4jnjqOsNhDe3JHQL89pBDzP2s60llms2pKsEfGM9VzBmOhfZQsTgEnMpgUitzlIHhkTaREswyT7BdVzUChBGogIF8O4lkwyg7zLaUwmXG1gN8Ucw7QzNQRkrQCEEv2OhmBx/uZAiv6d7kWRv0eMdxsBq3alJyj1vNTmckl1pn2c/z3KBNagZcUIMEpjojMkoOLzMAbZK3nrP6EUKUM6qmtAv3aPqJyhLYPaHtfsuJNQCQH5MDXoQMY+MOQPbDK7HVTgDVaAqTykIyAsbGUZ75He5F/Dd3ynKRjmqIAfgnJgYPOrtSe8WaeU2aFDRcMQC0aBlB0vPWj/RiN+n4cNJoClmFebBDdSuadDgQr+xoG6kj+S0ioHEk7422GdTdze7LdWHxsKI1lv1BwlztMxIFpZaQkcL0ms3kJbErAMaUC/K2N7YorrlgpOXX11VcP3n/0ox/dt62NjQ28/e1vP6CerS2bnfP28YMy2C7PDUcozHBycAKk5bag9/C0n5S/l22GwfQRK2nww5iZMfselvNAYr+xyHru/wBw0iey7P3QMvgXWqBrszR6BkXkd4gRbrYxBL0smOCqkgzmG8TZFqLUItZx6wM/z3a6gJ0uoAsRtXeYV4lrtyVCXTHra+YJtLcLt7eNuHOC1RMWO+i3FwjC9FO/joysp2uqUi9ZfCtIrWaqm8zmI89yqCpDnsfOgqdmbPU8Lo2pPgOJGWjqI9rv1XQ7a7aGsiot8CAOWZhar4ycR6ogYJWoRwhwxb4lMx6T7W+KcOQKE5GGMvpqynC3STQD8IVKrTlPVp1Bn0uDKxuA1PCLXPNYAT9OjirMTpZRJ6CCvLqMl3SO4ck+Mj+elRIIgcDs1hSBwIoYSX0M8cWT+CWpXTDY17W5preV/leZf/XNUwigqjxdyfpe+bk4fE6On4os44hJG/izNExA4v2JP4eEAGb+8SikyTZ0/zHJsz1qbVLddn9ffKAwYxMLxlOQSU4Y7nyfdaltc7xPsSh9t4BfnLiH7CGrPzp5ZOpLWvAyLUtyDlUp0vCes8dsmMB5/TY+Vqn15+Q+5nMhm0yAfUC5ZtSnWvtWxda+1drWtra1re3W2Fd/9Vef6y6sbW1ruw3tvAb9khdW1wogjchlYGksrTiuSYAgGeJuFNzSrHB9L22rlXbkaw3gKMA3tdCz+9e2rCSSYQ3mgMJIJstKaBX5xRKUovYUqO9A7Smk3VOIix3EkzdxzbyTxxF3t9FtL9Ce3EFse7QntzPTL7Y9QtcjtBG+cfB1BddUwHwGP5/BHT4Kf/FlcBtbHMiKPVKzwWy7eiP3ncABj7kH4BmkqVIPt3sz3M5NoH6BtHOC6wju7crrImcW87CU+n05EFU3oKrm/ftargM/BGkt69OYlWpKabg+tyCqBmMqz+wty6bUTGsLgnjHC2llce3LkBtdQ0PWH+U6PrzwL5tFDSgZYIEzo4tcZQwaEFleCI/XxlYSlgmYKpkFUGhBoWdAatxfMtJdVraIHINxCYAXkFcy3H0EZt5jVrlci3DRB8QIDmRKRnvtmQGodfpC5KAXgFzjrwtRpBwpf84gIPdlU/SzZlUZc0deJCA5z3qcNZ4PbQSM6b3H47c6uGABYQsGKwthWOtxj+9TAfsotDkAUho07Aj5x0FWnWviYDtvz4W95gUgjIkKy9MEJ239xfE4rAK5x+NkA058TyUZL/1syKJTpm2umbcCPR2zbUutMO3NahsDlmOb6tsUA7EEIPmAFSQu/Rq2bxmiSRiUCWum39rOQxsnpAhTThlxQLkXvDw/SP2t0CP5Sp4J1VLw348TR+RZpz5N2acCTD0znlf1NUYA4ucJS9DOg3kedWZ/K5jqCkBpINo7KiBfvwfqFvK6g7RzCnH7BOKJY8V3Adh/6TuQyKW7Q0eBuSu+WkpZkSJWMwZf6k3EjQtwsos4tegRYkIlspYhJmzWHjElfHKbwb/aEeaVw6xymHmHQ41DvXsT/KkbkLaPIxy7DvHmY9i78Tj2jp9Ee4LBP2X3uaZCvbmBat6gmnPNPrexxXLpzZz9rPlmCdzXjbAkq1KLUd4nXw9VFnRcB74ys3s0UQWOcj21JECNfJznfGBaZaHUsNO/uf2xeoaCEVQ3gCaXjXzyLE0qoKVNyPLOGdAxAXGYPFUY7dL0EuAnPoAkGOXL1QLhCnTktpjN14WERYgZ7D2112dfqHJcB7mpHGJKWdrTE4N/ixAREqFug/hBjOR0ohpQ+zmqGYPycBVSDHJvtQzutQuuA6k+udb67ku9bVXc0KdAmm/yNRMDkta2TAVyU2lPC45OPReJCM4Mln3uq39FVK4BC+Rp+1ZmmxLl79TnZ5CWzycRhsitMP6sWxJH544VQwwwSK4kHIwRtcyAg6CeBTxT+X+aUpyZmiPlb1X80MS2se/lQPk47VpF/9YxXZW4tmQW8JO18pTcfyLie61rGWTP3fcFVLdtSukD76pyn6lUKYa+uPUxrZ0T5fS1b7W2ta1tbWtb29rWtrbzxM5r0G8M9uVAhGXwxZ4X+Ua+Jv/WbLef6W+UpzKW4CPJQudC6JLNrf1IcdDGYH+joFoOlBgJDl2HLjO5kEFFZgwxiEASnHLdLqjf44DU9gmkxQ6/tguEUyfR7Ujm96JFaLuS/S37dt7BbTh4CQg1h7fQHNkEzbcQt08ibv8f0N3/BejQBaDQItEW4vxwzu527Q6DYIBkfUbQ3h5LYu1tw7XbiHu7nIm+t2Dm4d6iZBLb7FDzylJMnJE+APrGY4sC1AY4YdvYoKNk9Lph8F8+GtTvc6PgQp2DAkU+0MqATS3eDdQ0OpElOMkAJgfNuF4SZ2jHlNDHYZ0w7bMc9EDCkPtfAiTD62YEbBqQKssh5o0d1+tLESCzCB1nQFsJVR0DAUoJHHRRkMgTpAYJ4GrPARQHdMHBUZDxjYgpoRHwDyjAUNtH+J7yZ1OAkcp6xqj/JOBDKYPnRAYYGjURATjWHuMagHbARn+O6/VxvwqYpddPRSjBnb5dlhbWhISp7HJzjXOwmMr1FSHhnXIeBkC3q+T8uhys0iCJBd70mFwqoJ5uo9/ba2c/wG8MqA23N4cl14SJd+aAnjVl7E0Bkdam6tksbXMLKsDoeNm/+doQgS65pqb6oEBqTNOBqrWt7XZv6rsAk3eN+iZ8z0pgPsZcVy2zkmX+UiliYBQwH9ng1h5vY9/HCT8qRp6AV/1mCvAbHZOTP1IqSTyaAEOhBXWsnKCAX1psI+5uZ7Akd6+q4QBhSgkrUI+fCKmaI862kOoNwFfo/BzbbcSplhNgEoA5ij8yrxwqV2On4+fkTheybPa8cqhSD9o7BeztIC12WMlBkrtsPT8A8E2Faj5DtdHAzxuu1SzyndTM4Ta2wOw+rtOcJefJMahnZFST8wXwM1KpXK+sH/pC5pryksgSzAXGCUylVh4wnYBSgAwqLEB7PdjkO0D6RHDeI3WtbBZyUhkDflVJuLKXj4BQ/EwHxvKDlpmkSWF6ndvjgNmOa8/J/UOENEK/MpgTmfHXBpb4VLWDxiQ08T4iXOAOdiHKODH4B0hSWuD97fUJwSVsVA2oFsamr4EUeXwAZvwJ4Bd3TvBr1yO0PVKMIOdQzTu4+RxptsH3ebuAm20gSSLlmHWrzLQxG3Lq2WiZnUXCdlQ7WxLt7LZa5xiRpSI5CW7ZNzAwFSs+iC8SqQD++yXqWLamJmStZOflXZmEzzOxwVw1TGgcN2F9LwBZwj6CfXAdO10fWKB8uE4d9j8nLC6xGeMgmTGZMUiIPHfYhmzi7HhOF9awd3reCqM2q5pgOqkMWDP91ra2ta1tbWtb29rWtrb97PwG/YzReNGigfTYD0G6CaBw8HlECRotbWvejIEdINfd4jejNvbL4gRWSkyNM071s8wQCj0HoWIP6hhUQ+yRTt6EaFh9qV0gbp9Av2jRaxBosZdr+KUQEUeyT66uUG/N0RzZwvziI6i3OBjUXvsxnPynT+JOd/oc+GYOuB5py6Glpiwo904OGZQpwrXboNAhbp9A6DsOhu2cROpbDprFCCu5RFUDqllCKGebq7xn1QwXwxNjrJn0feCMWGX3QfqYiDOwuf7WsDYEywMW4M/WyqsdGVCwnA/L1ppkedrzrkCfymFJMM2CfUGyvRNKjTDN+rf95S2SkTVMK0GSXA/GWbCvAJeWZZEAkMrnTh0LMAD8AA3QELxoI3pZuIfIC/dAUmvFIQOxjjxipfKXhEUVEdKwTpuCeyEm7LYh/63Z7tZC5O86HcMYkeCXgjh6rDYwF1PK10ZMQ/Av/25C7tI7yn/r9aEsBGgtKJXyVDaMgn6hBGNtADtNBUjsdvYVAJJJOJBzkzPuyeUgSo5bphJQzSG45RjlgN0HLAN+eh1aMNqCfUvyxLI/BdF4w+VD1GCfyuv6fI3SUlzfsgd47Mp+B8xY837KyI4FbFZ/QkyUpdhooi17h1jmoDJb17a288omAthjP2WJgaOAXwzMSkYF+AIc6vxAZr7UOW4KyFdwUGUaB+DeFFveJltN+WMrwL4ckBe2uqKZpZZfxwlL3R5ct4u4t8uA385J9q8WO0hdi06YdABQS2KSyn6SqBdkNYd6hjQ/jM7P0cWEU4uAnS7iVBtwsg3MntysUTlCTcBGTQiRcKipMvvroo0a3gEzT3ALSaba4USvuBDAb3sXYdEidgzYuKYCOanlN2/g60oUFOpcz0+Zfpn5RqVGnwJoeVy1fp9KpYrPkM/Z2DQBT1ignhjoU4UCQiFL6bw/liocsK3HPpg1ZRX6hoGo2APNjNcEIWRAM8uRjlmf0GQPfuawqsD+gFBRgzDdsJcaAZTkmDFsK0iSEuTzLiYs+ojdlmsdt30QxQOHEBlIaSoHBIAl0iMABxf57z3dcQug8eJXAY0nOEpw1ZzHuheGXgZQQ2b4hVOnZL3QIrRy7rxD7Hr4rmeQGAC2jiB1LcjXpbZfjDn/bj/Wlj6bLcNTn732GrCAn35m10iqaKBj6R2hlwG2z3++xkj8BElMG4HN1lad7iU5zYnrPV+bgwbj8r/8AwP06T2VmcqujB2Kn2GTrZwAmFO0aE2gKkmjmgyrF90+oKWuCfYDNmX+HbP6cuLAeHNZmzOb27Gvh+IwaWLYYOiy/wmRT197V2tb29rWtra1rW1ta1vbKjuvQb8cNB+blfFUCZJoAusj2ScafYbQM+Ntv4C7zSoedEoynKeCSmO5KVPzJDN4pP/kgBTaYRapbUvAAgodqN2B63a5Zt8pBvnizceWWH3d9gIphFzXxdZ28fMGjWcmnasrVPMGG5ddArd5GLSxZcY2oDp6ES6oK7Qf/VvQx/4v4Dz8BRdj4+LL4C75HCTfsPTVYmfAMOy2T0hdkA5LdS4GYB+Dejn4VNWg2Qa/zreASjPKq0GgKbkKqZrxWGnGtkg06WpYgQqV/AEKMy24sngcAx4hlgATs6MKKGUX7vsuiGGCpq7iBbwcQwbGUgEoQ2SGX0TKwKUy/VJCDmYAwoQCb+MFWPNESJTgJ6R+MlAFZjNY5ugg63e/6xwGmJJj0B4p8OeJUAOIvoB2CZTliRL4WPuQULsKm43Homfpsr0+4nBT4fCswl4/w827ncl2jwNZz43GY7Px2Kg9Ds88Zp5lz2rvUDsnzALKAK7+PWZEJvlrDBiNIyiW3Zezp4UB6hGBFDLQN5iH7FxkA9Vqbhjw2Y+VstKmGIPyGcm5cUmzv1MOQg6Pd6JZk6Fvx8eC0cCQUTuWy1xlNh7E58MAfa4AfhZctZZZeFiuXbnKNJjrEtc7HAdg1fTeh9xf+tszMctu+nQbOZeZ27dV+2u7g5p9tplXSilLNK4KkJdnIYCUCns9B99H94SZrwjg+dGCdEAB/CwgkwF+O3mU55H6BqSPrgxYjXypUV88OVSe+BgkUYm6HVYo6HYRbj6GtLebfa3ULhB2dhDaHmHR5qSpnABU1dx0CEAzZzny2Rbi/AL0Avjps7yLCTcvevzTzbvoYsLhpsKhxmOz9rjscANPhM3aZwnrIzOPjYpQhQUgCV+aPKXsLD50BvnIOWb5bc2zrKc/dJilPTePgDa24OZbRu6ygGFJ/LOBdGpOMKkGz6gCFIrUu14/+uyLkb9LXDfPO5JadQqamVM6Avzy+QdyvUUL8iZht6vqhwUWsuRkYjaS9V+S8+V4Ta3wAipRLtk2CTRQua49aPJZos9QBmYIUYpdk6DimtwUc9IX+zvs9wS0I6nzPnKt46Zy6LLOIW8THfehCwld5dDFiFnlBEzU/jnM/BxV3TLw0lWZlZX6FkmA4/bEDvrFHsKCWZLkHV8/WxsMGscAmm+y3GfdgPoOVvJSe8a+Q5oE/PIwyon2VJLWBnKujkwCkFxbSX8joJg4ZSEUoM8qEJD4xojLCUSns1UYU1hxzfoJX2wpKXDERtZ7LIPo5vocqnys7h8J209vHGXFEiSRQcpBrPLtc18QQa4qfis5cZZQFEwsIKi/8UUdJCc6rEpkSxFOAE1W5Zj24yKGiWVTSWWfDlv7Vmtb29rWtra1rW1taztf7LwG/RBTlvRUSbsl0GUExoxrvO1r47YGgEhaCojxFw6IEzXPuIFpQHBqnxLDmFzPpFE9iH4P1O8hnDrOzL7tEwg3H0O/aNGd3BF5pz2E3RYpFkafEzYfAPim5sBQw+y+enMOf+GdciAodS0HktoFB1qqGt2nPomwvUCKEfWCmYQ1wBKg7YLlpXZOIJ48jtQu0J7Ylu6zPBB5B+dd/puE0ceSS3WRm8o1/eolwE8BvvxewbRRtrZmkCooMAja698Tsk2D7OSEgVzO2ZougBUs04XsuG5fZiJKsEQBs8GiOBbQSfs6lekLFPBPgaosSWqzfqfAvjGYpNeyOYahfGPJ0Nflv45bSglBAn0ZkIsJLjDzDwD2eq7dV3kGAGtP6EJCK4EwNe+4vs28YoBPAb+5d6gdofbCgDRAnc8Z5EMmm2WCRk0JnxzDAvYNWH0yjpbxMpXNPZin3CjIMgqMrGIBn9ZU6s4AflqLJqJkticBsROQj3dJDksD7FjO0C/Z5kM7E8BvLJ/JXRhJ64LOGGQbtz1dM2h/cFP7M773rY3ryEydIvubdTb62s4rm2RolXnMuYnnIyRYGVGeFeKrcPCfluaI8uM4ZIVNJS44h0Fdv6ltYACnKR9Mk7KmGC3q2wF5Dlcpz8zw291GGtc621tk2cOyG9mPKhRITatYzZF8jdhsIdXzXHsMKJJ7ADJYs+hD/qyPAFxiyVFJhJhXhMYTJ4Dtk3BE3sFFB3iVbJ/BNRX7f8LyywoKzhffysp1+pKkNviHEtTPzy873jDJQGOww5xDR+VZZ/nWU1N/flaMJt5EXLOOgb9+CZiAsv0E/OPfLB/PeF8qsR8kSUTlX1dZVgwYfFbAvVUWBOwLiROh1NfTf7rN1O9qp4Aa+7hdBDQVBsZfKvX/EmqXpB5nVUDe3OGIFAKrgwjg122zfK2yWcl79IsWruF6lmlvgTRvgZnxd+QcZ7DPjom8W1V7V5OrlN2viWoZ8NMNpxKoRmbBIpXo9mSe9WfgY5Rkp/2f51F8lgSZF0ffJxLQKCBfl7n/FlCfuCbPxpNQH3iQkGaOZeBrar8UxDOAvkDTyzuwfqv9fp/7CzEWeuctsOx3rl2rta1tbWtb29rWtrbPaIuRE8jWNm3nN+g3JYmSIkBS527CcmH5QcDClSCAlTmxbDRdHKk0n13Qjha3AHJmerJZ0qsWpDkD2bD9EiM9pEChgJu5jp9mn3d7oMUphFPHEY5dh3DT9WhPbGP3huPoF3ucobu9QOh69LtFmqfe4IBPc3gzB3+8vFbzBm4+lwCQHM9szmDcxZ/Ndff6Dm7rCOL2CXSf+iQDjDs3IO5ucyH3GBAWe+i2F+i2tR5IxyCf9/DzBs47VJtz3m9TD+rJUDMHbR0WsK8B6mYwnpxFXjPgJ4G0DP7ZcwtIrRV+DWbYB3U5AKhKpMozAby9BT0cJmhG9vrTcwdI0Gm4sM7sBBMM9c5NFqNPUAlMzujuo8pWGsCLCI4IlSvMqIHslf0bKCw/MoCfXtOa9WsDASYYkWvJuYqz9KXGD7MPS7+d2UcGwSbuKz1XofIIiZmLynrYqh26mHBhYInTvT5iESK6ENFFzoC3VnsHTxzQYoYfYV75DB41AvzVrrD9KleATz5fw2PQT1bJgmaGQQoY1LIaA32rwL5xgFQ+y685kcHU8RszLPYz++DTa5JclqRVRkWug4NyrWugc8zus8y+VVKeZwL20egmqvWwLJDqSqa/BbMjlpl2tyTwM5aHGlthKbJlhtI+gT43hfrl9tK+v73NjHxmbdxm7a/tDmmTbK4UgdCC+hogB08Vy94CubafG28PMJvEVSCqAJPQAmDIHjFssGT8KPXXOFBelYC5JlfZQPO4rhxGwWen7ej+y1fZ/wuSSBVauJ2bsiR5L4oFcfskg33C8FOJdABZNcHXFdzmEbitw3BbR+A2tpCqGeLWxYjNBnZRcw4YGMSrQJh5xmYu3GBmYBdjBmgAYKcNmFUOhxtmItaOsFk7zD2BdkU61DclWWo2R73Vo1+0IO+z/+XnMzRHNtEc3gTNxN+ab2VlBdRNkeus6ulrQX1aSbLK43864Nb61QIqIiErDmhtZAWH9DFv509Hpaayghi53eQA37C/LOykDDrHCCRiADaZmnJEXJNQQc6BD97zM1P6y89FGkjV5utn8AwoyU0RIjWpIJD4lxYATAlZiryL4u/0AQth+VnQr6lcljyvzIMwpiSJaxEhsZ/TEVBHTu7RdkMCNmsncpkA4DBr5nwvuiozUxED++0i7dlKAiHACYMpRMS2h583AIDZ5glQVSPO5nAbh/n6UH9y4vG4qr5uFsMwfkrllOlXzjmNfCs9Z1bK3O5rwA4j/sMlZmWinIrTmm1nZd9FsUOZdV7nrylQDxZIG4HQqmAiZQB0e3vUyhoNoyMYA5ma5KYMP00aLRs4UOJbaMrPTA6gJGtRICd4WEBwVSKGBf2W1sJnEKDRw9Can0DxNc9UTeJAbe1brW1ta1vb2tZ2zkxVG9a2NrXf+I3fwNOf/vRz3Y3brZ3foJ/aeCE1ERxfyfAbMGpcAfSs5ovD9OJmDPjZ19yo6YtK97nTDPu4DQ0oAYbdtwC1u6BuB/HEMcSTxxFO3oS94yexd9MpLI7djH7Roj2xi9AGhK60WeUsXWb2eQX6BPRT9l9qF0gxgNoFB4NmG3Deg/wGMNtA3N3mYE27gNvdlozgFpD6MbHtc+3AGKLUlOEASgpRCAHSr6pm+U5bV6ZqGPCrqsGiMS8cLcg3Oqc5KIMSnAkTAAYwBBE0eK+vCRzNtHKgAIqcoeNi9oP6LZpRrsHF5JAUANO+ah0/6H4laJT7lyZqobHZQGCWlHQMpDBQglwHzdYlHDPbAM1gN9enXsYwwN84c3+0SNduZvBUjpX6tgQXJgI0JPcB+RreV2h8heAZAG1DHACBIXq0gRf4uT6gDJAF7Yi0pp6pA2MlIl0Zi1p+t1SXUQ9K/t5v+Z2ZC7Z2qJHvXMW+mGJDDDcYjRVHWYYB7FW/HfVP57X8WQ5eyjkjykhXlGtGg6xjZsIU4GdBtH37MgL6CltD3gMDdui4HqWaAn9j034oaJ9kH7qtk3s4kgL4nJE/+LH+RkAMe0g2GWBsUdgZq2y/79a2ttudKWteLcUc8IX4IN5XCOBnlt6jg8D2UkIUck1MtaWaygCyvLluM+6b+gAKTlifbz9GycjXG8zNKYq+dsyS6dTvIZ06jrjYZonyxQ7i3m6u0Rfbjhl+BvDTpCmazYeAn0hJst/SoG1jfjbFGDPAFZEwqwiXbBU51C5GxAhhrJORNxSJaiCDn+SbIofezOHnHeqtOZ/SuiqyjPOGFRSE2ZcVFrwfAKdax2/Kx1JfID/LxtuY8z9+DmY2pjYnvg+ZuV+BMTt1Zoa9svXH1wc5JBJmH8DHkYGncq2lpd+MjnF0DeZNJWmGZdQxYEZNhR4SET+7XUmgUb/MPhNUsr0LKSc2Rdnegn2AKhv4AfjnzUMyMJUNWr85JlZJQA8ET6h7rhc5rzzLxruERJ4BdScQmAAaMTDbL+aSAAkxJCSf4OoeqakQ255lZFW6v+e6xRQD1wk0584CNA60EvizgJ+e70Hy2uSvZMwx9EVWyazaWFFMLIN/OsafTXKyv7XHBCDLhSdJ2EqQdYGylV3FjFPnWX7Y3At5fpsKZk1dlCvM/txpMhUwXLuO/dMzSSYb3O/LSWz5Xh/5uBbMXGJj30KzvV4z/da2ts9M+4d/+Afc7W53O5C2+r7HH/7hH+Ixj3nMgbR3kLYGN9a2tmIf/ehH8eu//uv46Z/+aXi/TpBZG9vVV1+NN77xjWvQbx87v0G/VQsWBSai+cxV+y9ybEBotCDiRTCkvTQN7AFDmZSxtMmYhTYVICG31MaAcZUig319B+oXXMNvsY1w0/WIJ49j71M3YnHsBPZuOondYyfQ7/bYO9HKwj3BNx6+4QxdrfGidTmqrTlnpzci8+Q84u424ER2c2OLF7FEgG+QXAV3+CiS98z8cx4VTqA9uYPY9lw/MEaEtssBA7XoIlyIIump/3wOQuVafpJ1PgBKx4DfqJZM3kfiwIwGU/izlOW0xgv1COQsc+tbaRYwTay0QwJiLACCAhaa2asLefIVA0M2SHaaxXUCpA7JkBlhM7tLEGQI9inDz4JeFvDT41PJqkG2sVnHZ1msKZNjyAyPfNxgNoiwJVgmrV0tJUQOZM4lOY+KHJpKmAbkpd5hyuczTpxDNXt848ARYJiQ2tfMADB1VgbByuV9cCDP3Jf6qvKopm5R6dgEyLdfsHTq1QTUl+aXfSzLmGlb0k6uf5M4kBj1otMu0zCgMpS7XK6pMpVxfSZAH7+nwXcK+OX9gc/ZgHG7j1mALsusyfFocC9SyvvX7H8HZnI4khGWsZmUVDMDMqUWaG0F9nvb2sS8eODtr+0OacnX5Zlr56G+ZV+KCL5iMGkwRdqkEPM7SiwJGiTYngP3RjJ9YBEDaeJB264azGWDfVn/asSqzsFmPUZ93gWZt4OAFd0Crt0G9nbQH7t2UJc4tl2piRyZ6ZRihJ83IMeAmjLn3NYR0OZhViEwyT69JLN44ppubShyjkSEmXfYkKFnZr/Doo+5nmmpb2qeYeQyUEfNDKRKDACaEEDOIXZ9Bv385maplyy1+uB8rhuWGX6+WQrMU0qZrTPwZSbA3iU5z0FDbiDPp/4LoM8SBoksY0v9GJbnNrLPCjhoktVI0nMJ6LPXjO27/S4iK23kxBnZzj471aZqzRYMiRlKbuKYkvg0muC0J+y+Lka04jcrsNdUHODYbAro15gERX3mxVD6FF1CSFyzuk4see6IMOsDZr5i5YQINMokGzGYYog5aS+03HCEk/c9Yic+vgB+qWsHoBKFHs43p5U2BYpPy30vfqv6s+prrlzLDdZxw6+tv2JldIlKnWmSpKbTPdus/2l3Y/0TrRZAqawJBkkRuh5IUjNPj2kCHBs/x6ee6qvGV5mxJGoRuTTE1BgCoGT8y5GPOkh8Bcw6O+W+T+GmAxatGx6TzsnnFXi39q3OG3vXu96FK6+8Eh/4wAdw7bXX4i1vecsA2Pme7/kevPGNbxz85vLLL8dVV12V399444149rOfjbe97W1wzuHxj388XvGKV+DQoUOfrsM4Y/vIRz6Ce9zjHue6G7e5/bt/9+9w6NAhPPOZzzzrtm6++WY88YlPxMMf/vAD6NnB2okTJ/Af/+N/xDOe8Yxz3ZW1re2c2s7ODn7hF34BL33pS/Ga17xmDfitbWAvfOEL8Y//+I/nuhu3azu/QT9AwJTl4I8GABJqZMk9w8BZueCfWARl6RWvUlM0CCaozEyyvxUZKvimBFDG0pPi2E8BCwAKqy/2oI4BFNduI+7tIpw8nhl+i+tvQHtiB4tjN2Pv+Cl023vYvWmB0AbEjgM+vvGYHWnQHGowO3oI9eFNNEe2MDt6SKSoNuEPXwjaPAx/8WWZYUcpIXUtws3HgBgQrv1HGSau78dZ/xFU1XAbW6hDRKg5uBS7Hq6uMtMPQJb3zPX8vGOgMQeeStCBvC+LXiuTqiw5lb3ROn4KAkJBs5Rlk4Ahu8/WcbNMopgSkIbfA6UW3eksByWA4aLWDT8bBCdy39IAROH9E7zIL2r9EbUs4wlh/BmgT/s8BvtsYE1rjnAAysH5Rjpk2GrB3DPa7xhFurQvE4iVtgwtX7N9l9ui2C/ddzl4oPeKvTc08EQOlTnfZXsq/RmN5xIT0XxHNgBpAL7M0rMB7hVIzfhKyDJT45qftg85MLsP+Kf7Na+rGBIlcJSQdP7bZwyUuQAY8M9VS6y/An7REjttv6BdwjIz1ezNBNko74dM4FaBvrwf2/VRuyzdlYrk0+heLXUkp+/ZUpenMAIDUWYxMsBHOTAbE0v/8j2qQWmgRqmt6b0A0ucC3Fvb2g7avEeqZktSxRTBz4bIDCpPDnAGfNF7dSJQTZDn1HieGCWDIMXMOBqALerbOc+yn5ZlFTr+wz4b9rEBQ1v9rG7BLL9+gXj8eqTtk+hv+GfEnR1OZhKQY1yL2DVVkcrcPCKg3ybckYuBZo44O5wD3ck3hqEONI4Y9ItAGxMaB8wqlxOQguNat945pIQM9tWe0Oi4S0JR8jVQz5H6TdAm4A4dzaAe1SfZVxM/S2XTcy0/qZucfINUzZCqOTITZwymuAkwYiIATqNn4qrtbHLJWG6ZwYpRso6jIeCn7VrAwSbGuHr5Ahg/843/T3BACvxc5QMuLC3ZBrIm8OPjHz2v9fma5Gj0OemdJqXpcbPv2YWYa/ppntyscuhjQlM5qedI2Gh8lnZXzM9Koeqzm5Ok7FhFdNGJRLruR571rmIJyaoCmWCK88vnbNJiQAq8HkDfA5X1ryIn8qD4yGP5TWeYdnretZ6fg/5t/K/xtUSltnQQSe0BSxQlsWdqikgJSCQ5bzEtyb6fCSilm4SUQKmwGwMIzrE8MTyvFQkoryI9S8nccyvAscEhk4CKBLhESDK5qj/lnY7jrauLXA5sxbo4r7dpkCA4GCo7V/hR4utoTay1odUv0/si/y0yrWtb25na9vY2vuRLvgRPf/rT8bjHPW5ym4c//OF4wxvekN/PZrPB99/1Xd+Fa6+9Fu94xzvQdR2e9rSn4Xu/93vx5je/+Tbt+y21t73tbfjd3/1dvOlNbzrrtlJKuOmmm3DRRRcdQM8O1l73utfhuc99Lt7znvecdVsf/ehH8S3f8i245ppr8Cu/8isH0LuDs77v8e3f/u142MMedq678mmxt771rXj0ox99rruxtgO2U6dO4Zd/+Zdx+eWX48u+7Mtucc21lBL+03/6T3je856Hj33sY7jHPe6B7/zO77yNeru2T5fFGEWV7uyTnK6++mr82Z/9Gbz36PseVXX+w1u3hZ3Xo6KL9lyzY/ClBAAcgAiW/NkH2LO/GwAT422dAyBta30DC/6JDUApu8ixwSrZ1yB4NQp+UeyBvmVmX2gRTt6EtLuNeOLGUr/v+pvQbS8y4NdudwhtQAopA36+cZgdmaPammN+8QVoDm+i2pqjvuAIqOLsdHfoKNzho8Chi3IdCYoBVLVw7QJxdxtpscPMvsiyPwA4cOQ81+ObdyBfatzEEOHqGuTL9mNLIZSs/xg5U9g53t7W5pEs0aWxdW5yQWzNAn7AECCYmm5UGgkYspFynydkfgbtTQHL9hVlYZxlSEdtadAjguBSQqTCRlQbs/mWZZFWA35T/WAApgBsA8nbPDgme19lLWNfAOos78SfMUNE2W9SeygGZhgAgPNwEmTKQPh4zMZZz/aasGAamWvBAmSjsZdOLL/aY52aJ1a1MWYyWpD0VtpUzRrSbGoTNCFIgDODqPvvcyz5aTO3ifhaCyjBOa1BNGW2ht9+sRg9CkclCGl7uQrwU7Ntl7aWt7P386o6ewyec/p9SBJodAkktZrIy/5iCaClpDJ6wwSCyhF6e4qcfG/zP5bxj0+PTQAvB97+2u6QljSRBuBgdBjOkfb55jQpaop5Yz6zyQWTbJ0BM9AVxYVROxnQsUxEu81UEoX+VN9PJqlwHT8stpG2TyKeOo5w6tSgLnEKgZORVBJdayFLHWKndYg3tnJtvFTP81wd4HLCgieuyVY7AV1CSYKw8uJeHt46dlnWkwyPWuvZ6bPbs08XY2DlhBhYdlFAPzfbYPCvavg5bKU9/agO9UiKnPc3Iatux9wCfva8pAg784/BH7XBlKruMsq42Hl+illUTrhbfi4qSBp7gERecdx/IIMY2dPTy2x8PZ7GLNAVRW7dJq3sZ2P5TivvqoDfKtnpzE7Pw132GCVZxSabDXwn8c0U8LMgd5azPQM/Y5AwQFW+989U7lrBv9MGBWS9VxLoJjah4kcDQ/9DVS+yioSAaQrYngngN07I49+lst9EI7ZfkvuskiQ6FD9uBPhNgupTw5CPrdRCHvhJel/a36g/aLbJPiWGv11KjpuyCTDWJhBMSnuSW2ZmpuHfmpQ1trMCM2+trX2r88Ye8YhH4BGPeMS+28xmM1x22WWT3/3t3/4trrrqKrz//e/HV3zFVwAAfuVXfgXf/M3fjJe97GW4853vPPm7vb097O3t5fcnTpy4lUdwZvbe974Xj3nMY/Ca17zmQNp7zWteg3vf+9548IMffNZtHTt2DB/72MfwpV/6pWfd1kc/+lG8+MUvBgD8i3/xL86qrZQSfud3fgfXXHMN7n3ve+Oe97znWfcvxniLQY1V9rrXvQ5vf/vb8X3f930H0t6f//mf44EPfCCaprnVbRw7dgxvfetb8Sd/8id47Wtfi83NzQPp20//9E/j2muvPTDQr23bszpOa3/+53+OBzzgAUvJALfE/uiP/ghXX301PvWpT+HFL37xyvnmlloI4cAZb6dOnToQFvP//b//F+985zuRUsJv/uZv4sd+7Mfw2Z/92fjmb/5mPPKRj8RDH/pQHD58+LTtpJRQ1zU+/vGPAwB+7Md+7KxAnXe84x148IMffGDjdv311+P666/HF33RFx1Ie7dXSynh5ptvxtGjR8+qnWuuuQZvetOb8Bd/8Rd4xzvecSD36Vve8hYcOnQIbdvi2muvxed93ueddZtXXnklLr30Ujz2sY/FkSNHzrq924Od16AfwEEblkeJBfyxAFuKAMVcn2mwwJ8CZXJGO4NtAANxS9mXVVOyfmURlUzQP0lwJddEGbOTdH9eToFtQ7PrpQ+u20U6eSPi9gmEG/4Z8dRx7Fx3DDvX34T2xA72jm8jtAH9oud6GyHxIr0hVPMKGxeyfOehz7kTmiObmF1yUc5Id4ePAlUNt3kE0EzvZoOZQaETUNKBDl8IP99CiAHoa6DvyqLfeVAjGb6O5T4bfxJhsQfyDsG3LAMEBvhSCPI3y4zGrkcdIgNCe7tcQ63vOAtgvgk32xCpMV8kp5ThZ8+fq3Kmr2aMTtkUOMZjL6dCAD+bYT4G22IaCn7q4priiBk32PEQ8LM1B3X/mp2s7XFwj7OxnQEatSVlSekxjcG/vD9lcBlt+Km1cgnAlWBDYTvKcVEPCgLy9XscMG13kaU8uxapb5H6DkmYoFlvSvszBQDbbU5HmRJGKHkJWFaFLepnGwIQVyXIkP8eBhqWbCpwbfdpWQETlqU/991mn2COBR41MG2BRBuU8yJzp8Cf+X4cpM3T2wgQ1WDZ2DRT3NYgSkjMjqOUazBp0FavZQCTUphaezJieC9NXbMDMFyCPcMxQpbkHUiC2rZQAujA8rWuIdcQVaaUsnSs1oyMjkx2+TCYyL9BZm3wIVP+fjwCfbWWoVjbeWTEvguRJG2oT2I3iX1RQcAweJychzJX9DlNKfKzXUETO18hFhZ2BgqoqCjY+cwyv8VXSwoU5qi3K9QnbROQGzdmgI+6vQz6pZ0TiDsnEW4+hnjzMYRTJ7Fz/XGExR76RUlkqrfmQF1x0tThQ6D5JvyFl2ZZTxLmXKzmSM0GUrOF2GxiERJ290IGqWaeUO2dwNFmE6GpcCOFPAdaEMc7Zi2rvzKrGCisCNx3e15chVRzYIIOXQCnqglVk2XYyflcQxnOM0NxvsUMP1+zDyj+VbLMnNHzJRg/Jc+v9vm1iuEXI1dzI2Y+jedmnVvz8RsfzT7VrO+Uk5XG+xqrBACAkz5Kf5PDkox+/hvabpBrurD+dJsBCGj3vcIHUHaWMtiIAO8SXCTU3qGOCbWL2KwdukhLKhMW7Bt855gdyOw9/qfS1N6TgHzEkvTyOEoDZpWucRjwZ5l/x0l7ymr1BHgP8sTy/KrcYf0xe6wpAikhJZb3VV9V2Wn6oCxy2+V4xiw/BU/H42t9aX2eKztM20nKgqMh03is8qEKB1qHMaRyzenVvB9eqd+pjLE0XGo7AvDK9gNAaACZR+FKwtmg/p2qoExcT5qUBSQksuUIMFjD5LrVdg2rfbZrXzu8wngdHmAc3uNjm7gH7dp5rNCSfFFn4ZIMKUv5q0qL+l1rgt/abiu7+uqrcemll+LCCy/Egx/8YPzsz/4sLr74YgDAe97zHhw9ejQDfgDw0Ic+FM45/OVf/iUe+9jHTrb54he/GC960Ys+Lf0HgK/8yq/Eb//2b+Prvu7rDqS9RzziEWcUkD8T+6u/+itcddVVBwL6ff7nfz7+4A/+AO973/vOun9EhOc///m4733vi7//+78/674BwPve9z4cO3YMj3zkI8+6rac97Wm4+93vfiBgJAD88i//Ml7ykpfcavnXj3zkI3jRi16E//pf/yt2dnbwspe97MBAvyc/+cnY3d09kLZuuOEGfMM3fAPe9a535fv4bOzlL385rrzyStz97ne/1W1cfvnluPbaa/H2t7/9wMCmv//7v8ev/dqv4Rd/8RfPuq2UEt75znfip37qp/DoRz8aP/IjP3LWbV5zzTV4yUteAuccPvWpTwEArr32WvzWb/0WPvGJT+D48eN4ylOecloAzzmHb/3Wb8VrXvMavP71r8eTn/zkW92nj370o3jc4x6HBzzgAfiTP/mTA2GZvf/978d73vMe/OzP/uxZt3XzzTfjyU9+Ml75ylceCHD15je/GSklfNd3fddZtZNSwgte8AJ86lOfwmtf+9pb1cbe3h6uvPJKvOIVr8CnPvUpHDlyBMeOHcNnf/Znn1XfAOAVr3gFnvzkJ+Po0aNnBc5b+/3f/31cc801+IM/+AP8/M//PO5zn/vcqnauueYa/PiP/zh++qd/Gve73/0OpG+31s5v0E8XH5CsXVQlExdYDtjrGmXFImepXZPVqJmZHGTHcHFjVoVJ5w+7sBoHAuwCSM2LVEoEsxITB9UoBlC/h7B9AvHUcfQ33oC946ewc/1NXL/vxALddoto0qV941DNK/jGozmygdnRQ2gOb2HrsotYxvPCS+HmW6CNLfgLLkaqZoj1BgOZ5Mp+d46X/tUs9+kOX8jBI80a1yHrWiAGrvvX14gxwpvvyTn0AJJziF0nwxwROgbJeteC/DZoNkfc3QZJW3COS0ZoXZzM6jT7NiBKyfQdrhA5y3oZ7Btk+46yZFcGG2DqhOnxCfi7VM/N1oPR8w8MAD9lDWrNQXscw/5TPmor4anvyXynx5j7Zw6A90dLIEvu1woWIwOgDpWX66QHQN1wDFLM10fqOyP5NAoeKMgdzXcxlvf6/dRvgXztZZZpVbMkrXNIsw0OaMpnzvvMWiCTacwNrAhS2M/MvbzEeCC5b305x8kGr8dz0BTIN8Uc0ICjzkHms3GCgu3LYF6ZCuzo5wbsG2N0NiNewb6IEqhzSesTAdPcXW23sGU1AIkEJNKsd9kOCV5q6gyl3qazu1f5aATk+1ZBxlWgHwBhglMOFhJxgNR5EvCPA2mZAUgCekrMNCFlFmCW3iUs1Ta0NYQ+nZbILV+vB9z+2u7AJiwU9n+6JeAnSwZbWWOtX6u/tzkWoR3OTSkO/bKpusuRtxkEw/X3Zk4mMmznccKDJmKh+HXUL0DdHqjbAfZ2kdoFws3HuG7fzcfQn7g5s/tUmpyU9VTXqAXwc4eOwm0dhr/gYq5B3MwAVyG6Cmm2hVTNkaomP9fn1bBvmQUIoPElaUBBv4GvIs/rxkkQv1+UsbdAKhh0JVcV1mGMQMUylwpKau1mmhtWYlWS1PLzSoLylhGutfh0vs4JQQoGxNFzaXDQ5nkWe0AYYMEkauXaaERLCVe6/yKbqe8FMsqMvuIbDiS+9Q9z7eX62eNnrbFBrbOIJR+ekklPGSV4WV9vrDjB55VQOaB2hLl3iDX7N53Ufhwz485Ebt6yAwFkiUfnCM48kKZmcRJw2DV1lrBlVqvUv5Sa4L6uMiBYGhz6HQr62Od60tdbAOSseoZaX1oB47H/TAKIRZQ6vpYVB+hvmPGfG4bxB/exISst5XYnfyrXI/mq1POTdcN4fhvXzjxdghawPF+M+8BtCQg59j3Hf9t7d3xvGDbvqtrX+VjMXGLXwIP1kGlDEyDHiV+3l7p/a9/qjmMPf/jD8bjHPQ53u9vd8JGPfAQ/+qM/ikc84hF4z3veA+89rrvuOlx66aWD31RVhYsuugjXXXfdynZf8IIX4LnPfW5+f+LEiQMJ4q4y7z2e+MQnHlh7d73rXQ+srYc85CF4yEMecmDt3fe+98V973vfA2mrqip867d+64HJBn/VV33VgbQDAE3T4Ju+6ZsOrL3f+73fO6vf3+Me98Bv/uZvIsaI//W//teBAGpqZwOoje1Od7oTXvSiF+EP//AP8dSnPvWs2/v93//9s26jqio84xnPwFOe8pQDAyWOHDmCl73sZQfSFhHhr/7qr/AXf/EXB8ZYe+xjH4vHPvax6LoOX/3VX4373ve+ePSjH42HPexhtxiwd87hmc98Jh796Eejrick+8/QXvrSl+LUqVP44Ac/iD/90z89ECbzIx/5yAMB+QHg8OHDuPnmm/G5n/u5B9LeYrE4kFqli8UCP/RDP3RWDNXZbIYf//Efx4/92I/hwx/+MN797nefNWvQ2ld+5VceWFsA8IY3vAH3ute9zloq9OjRo/iLv/iLA53jbq2d/6Bf7IfB/IgMyo0DFRDgbt9sXBu0iH1+XzIvBT3yBgSQNojMYmgMLuT9u5I9Pdo/13foczY6Qs/1ZQTwizcfw/Z1N2Jx7GZ+vWmBbrfPdfvIE+qNCq72mF+4iVqkPOcXH8HsqNTqmzO7z823gNkGwvwCpGYTceOC3He/fYylRI9dm4Nu/rM+D/AN6NDRDO7kYFrXMljTd4jOi7xjRHQO3jkApxAkEBC6HslIg8a2RwoxAyVViJyJHgOiAn99x3JU5JBCzcwmzeS1AQZzGgsTx4ChNMx+BUogISbAybal7ocBDiaCQXYtzVKWJQs2n9sQeXG/TxAoSD/7WDKUB4AfmF3lR0EKXdhrX/TYpkDNOHp1AvxZU7BvnGFb9sX8qMY71K4CKoBCx9et3gMp8fmNAWlPsrYUzLMmwJ5eNymEAhYadmBsu3xtRMPcyLJTNdefoWZeQL6NLeS6RRrcFEAwg4CafWyDgvKPMGSMJGUJTASU9Xza8z6YB9Roeb4oDZjtzfwzqKc1bi8R8tkdB2emQE1gECBFwmQASYO4FvjLMlck9WnkvWazc5AsLQHIvK8SiNI+ZCktSNu2b1gG+5Zq+oFZAX50HxTZM5TM9lXMWzI1mchl8M9Bjgt8kEn+tuAf950BQG/6VxaNNGAFeDqzIO3a1na7Mjt3aAJPoiEzJJh5CnIfDxKiCkCI2Bd/zFvmlQOEqUww/pQwhBDiMEgs/0Iq9+94TrUMxJJAgTLH7m3DdbsM9O1uIy62EW8+hri7jW57gX5ngdD2RcJc5A2ddwz4bc3hL7gY7shFcFtHQEcuHtRtTs4jNltI9RyBKpnzCA3JfCRAWnCN+BfIMp8xlWc8IEw/ZesQwace1LWljiEgiWIMWmVAwTegOvKzEWC/DMjPQ2rmIO854cs3zBD0DdcGNKwcrT2X/Q+Z+23yUB5nAwQMnkujRBVKiZnszsFV1UDicznJgwZ+ms6zUwCAU2AhCSBtjgFQ38iJVLMT0BrD58MYeNbj0OuXEtKozl1+ZoP95TH4N07usl1nph/3brP2g2eFBf30WTpm0o/HwVvHD5qsxc9GfUbyWE2AUhmQ8ew7Oc/ytSJny757BHnHn8s/8k6Yo/Jb7xGnfCUD5oyBHPtcd+J7ExU/fHA+9E8sA6pjqVP14TlVkCb9Hu2HozK+ScYniKLB6Z7g430rczARsoIH+1AAuQop9vxqWKf2PGS1ipHUZ2G3FqBvvKbRNYCOa85mt/OlroXz/Gjm8Cnmqt3O3ufjcRgnzflRze6Rz53PISxgW+r4lbGVccVwjri9AIFrOz/NAmX3u9/98MVf/MW4xz3ugauvvvqsgKrZbHZgwf213fZ2EIyfzxRzzg2Yr7dHe8ITnoCwoqTQubSDnBMuueSSA2sLAJ73vOfhIx/5CD7ykY8caLspJbz3ve89kDpr4wSMW2Kf+MQn8IY3vAFPf/rT8eIXv/is2rqtzDmH17zmNQc2Hz3lKU85K5BUbWNjAxsbGwfQI55rv+ALvgBf8AVfcCDt3Vb2hV/4hQfSzmWXXYaf+ImfwNbW1oG0dzZ2/oN+qzITFYiwCxTIQk4zyq1p7TFrsl2WzdN/vipAgNl3qYdhQAJrGriaqodiwaQYgNjDdbvAYptBv5PH0d50E/aOn0R7cgftqQ6hjSzlWXtm9W3VuW7f1mUXo96aY+PSC+EOHc2sPppvgbaOIM4OI80OIdV8E7udmwrgkCJSvQH3uYXKGquZZEaXoE2SgDpVs8wOVHlPB4Dmm0h9B2rmqPoO4dRJ9IsWoa4Y/Asx/wttD3ItUuSAggO4rcj1/ri+X5/lXJUlwLUHOagTYwmshFjAM10EW1BgPJ16kxI7YAelKJGFiSz2qQzZMeDiRuCvXJcaiLDMvrGNpQ3HYJ9d2GsMaixvmH+rgTI9n7qD8jIAHDWAkcGYWPYZUkTvCJWr0MwPg7oFAzjOg4jPXepq3odKe4YRo0+/A4aAX7tAbDuWU4uRQeEYEc21AiBLS/maA05+vgtfV3BNjdQumM2wsVUYgLM54DxQdUjKcKgbJIoMIqMAcomo3OMmO3kpmDE69xkEH4F4K23i+hmAx2qj7G8+0TZjuhrWuzTBlGUJ2eF1YI/kTGpW8nYABBjUi2dVbSa1mBJiEOafIw7QgZmCDNJp8GrYh+V9ayCLzD1Q6lx5Afyy7F3ol8/POADmHEtuuQrek4DwHGjVYFMw9wTAEKdlLtgxTQkZDFSW5LruzNrOKxs9z1I9L88/Bcp1Pg9t+Z3zy0kI0t4ApNJtzDzLz/l+KBs31ZYJFIfEcnmpQu7fEpvQJG+hb7nfxz+J/uRxhJuuRzx1HN32LvaOn8rPGwX7XF3D1cggh583WR7dX3wZ3KGjwHwLcXa4HIvI16XZIWaEJE04Atwp9rNQzRE3LsBeSPl5XTlm3yjDWE3r/lUEBp7anVKLcBScX6pHS6Vun9bRpZrlO1nGs0KcbXGfqxkz/fR5J+OrIEhQSWQDvGXp81zDd5T0ZF/1OlBGnapzpMjgrRPpSb2UDGPJmvVd7JURBNCp1G8HP9OVlWjNr+qT9fOs2cQ/XT9EDJL/GEAc/s4CUprcpc/JYUIaAS5hVhG886g9YVY5dDFhrw8ICehCRBeT1ONDAQKVYg4ALgGRBn6hI75+au/g9W9HqH2pDWlVIRK5opTQzOHnDeqtjZx8pa9ea1oKIJjB5NrI75t/fZ9YvjGpjOPoGX8mQY59/KlV3yg4ltU5DICcm5W/w6A7aTKpbul6HPkGdr82Cc9u79VvA5ZqLJcOmARRI4GpxxrNPZh/YvuZj2PU4eznlrQ/K8c7KdsZR3Ny/t3wmAe+lSR2ZD+aDHNRZT0lecOO4fhetWugZHyyBL2Glrt7m9vat7rD2t3vfndccskl+Lu/+zs85CEPwWWXXYbrr79+sE3f97jxxhsPrC7X2ta2toO3g651d0c3IsKv/MqvDBjKB2EHVVvxbO3qq6/G1VdfjQc+8IHnuiv72kGBTQAOBPBb29mZ9x7PetazznU3AJzvoB8wkNTJmeEaKDKL+OUfDjOPs+nfTmtk+VIrRoNUUw7/fu9z1uZqmRayi74MLvVcF61dMBjS9ZkZBwDkCb7x8I2Dqz2aQw1mRw+h2ppjfvERNEe24I5cDLd1mMG+zcNwsw2uNTM7xPJTvuHaNqFlZmFKSFUN+ApRpZ6sORNUCn4UHOxzVjmDLJz162JA2lvAzTtwbI5/E1CORdl+GQiMYTmzdmlMTSZ6LCy1MXgFwlJtkFVB+AL0xdHi1oB9CnbmNNQSmMzBOgv4ncbGwQLLsFNbBfhNvsfyQp+IcoBlKkAxBIYmxhAmqGJW5d47+HpeGH8KyhIJeM1yn8rezMwJAMkNpTuTfJ9i5Gu967kWZMeMi2gYocq6iDXLT6UQkeYNvMrKCgCd25drEc6Bos91lfK+8/kjCU4MWRsWIJo+idEw/uRcpeE9bQ50+NupIKOanRdGwONKmVID+GkQK5hzby2gxNrcRPTklgZUFFwfsxFCYsYCByl1Gw6spfxLrMzDX3UXWUbAIACt9RDHbEndEzkM6jQBWWrLkzO90SvFsP0S8rcAJJNfWASyucqIRTAwuk4iXdt5aTLn6HPR1g1dek6SK/WPVwB/ObDsqgGwpXMuAYABB1fZIGAPZAbhZPBc59fQg0IH6hcIJ48P5NK77QX2jp/krowTShqWMKy25qjmDddD3joM2jwMzLeQ6k0GRV3FLDmbMILCkCOAgbHQ534GmTcg85eygezz2cEkMoS+AH59W8ZzPz+jqkDJyzPX50Qp7SuqOdfyqwQEFEQ1ofhVwPTzgexxTSkdLJ2HtPws1AQ9Odb9crPHiSD2TKt6QSJkEFkBhZCKrCOUbbXUuAH8JgCYvNawa4wReLjKP1CQRnuvjCY+5iKXreAfSW9rAatCAjpH2OsjgkvY6yMAmgSa4FL+W5Pcau9Qi6xn7QsA6NR/HN2LWvuRqlpkPStU89kg8Yo/r5np5xy0VuSYkZt8VQBPAWmSAKE2+SjLxE7YmTDtpkzzkhR0K0k7GEiJA0NwSTdKqYCnIYkMuenTGKDKa9EV/dFjzHOW+GoMGJsN3bQ/l7B8D9j9rVoDnDbpaFUyo7a/yhGcuteNnzzF8Bv80+My56Ww/ZYBv5xYhQIOngvMb213XPv4xz8+qDP01V/91Th+/Dg+8IEP4P73vz8A4L//9/+OGOPtPni8trWtbW23xKqqwktf+tJz3Y3bxL7zO7/zXHdhbZ+hdntJQDjvQb+BpQiKyOwODeTTKBChSwdb0D0v4iWjEuSASgAp3SZnSNJwwW8X+qtYOq4ACRZgUcs1UULPIFy7i7R7CnH7BNJihyU0Q4STbPONC8s+NBg1O3oYG5ceRX3BkUHdPppvMtjXbCFUM6RmM9ebsf3O8lCuAnzDGeBirt0dHFcCAGdYNDHw4rVm+cQIINf8qxpuv5nDAagBdFjwbtt+MA4acOOMdM40pvkWMNtE8g1is8HsxKpBcA1CBNoQlwGqnAU7vdq1DMAc7NHAlQH1FGQZAHz6fSx9L9msNFj0TmbxpgjvhpJWMQ2DQnncTfigABv7g31WCgsoi+UpMM8GQcbjY/erh69tJwmktZGzlpv5YVCYIdQboG6XmZ/NIWasxp7HrO+H9f72BIjrO0CAuBSY1aeAX7e9W8BuYfwBgFO5NZGeUtA4dpV8Hsr15zyz/YDCPBwFVXIGvwZsqzkDQjaAq4FlTFs+U3I9pRQZfAIYiLIBz3FmtX4+WQ9Lj0Pnn2Gf83xk2zVBGCt7tZRsIAfkiHJNy/K78veZSCjZ60RNwcYYbQAn5cNRSapa5VpNJr1+l3FOImYugF8rqW1VO5YtqxA5EC7B/cw+0YDWeF62rOtQ8fVCDqgaeGL2X0Jh/lm2Bh8F5fuIa/qlDADagT5nOds0jubeBu2v7Q5p+mxTiblc289IeWYGnUisKyCifhW/DF3MrCQQexCQE4s48L1cO27J0jA0n6ByvDwfZj8qs/sSP4t6BvvS9nGEk8fRXfsPaG88jp3rjmHn+uPod1gq3TcOzeFNNEc2QXOP2dFDzPDb3GTFhNncKCYcZQnPZhOp3kCq59gJAsQE4MjuMa67ByBuXYwFzTA7/FnyLOA5unEJlUx4fOwEj5QZWABQO4BCyz6YTdAKXZnfLOipfq8yMIVRk4G+quFag8LGSs0m+sRSkqEbju/gDtd+ynzHfWWfFX27zKRWs0lSA0BNJWP5M08OUY7f7rmLmkCx/Hyxu4pg4K+L2s+R3yN+kvo/3iYG2mt5xNYrrSPXYZtiMeo9kj8nZ4AaaSUV5qT1V0vNZq4t6wmonEdCwkbl0ceELkbs9RFdSPAUeFwC0BEz/lzuNj8PFfSrhNGn7L7N2mNWOcwqh8oRKscQI5l+o5J6kH3LLL+wPCYKBtabc7jNTdBszr9p5lIfskaqWDK2DwltYAn7NsQMxpbG+NyltPxMGQBWI0DKqmbIbbf0uwGWhgL+WQBW5cTHteXItKEy+1nxwnbL9gnDhLyxjKpef454zuM+RJgTKA0tA342uXHKJdN1gNZxtEM3kOFF+XtSTn4/1h8wWTfaJsatSpqzzEWVDVZFBZU2trX8AOTzosedWaIyHt3EtXmb29q3Om/s1KlT+Lu/+7v8/h/+4R/wV3/1V7joootw0UUX4UUvehEe//jH47LLLsNHPvIR/MiP/Ajuec974vLLLwfArIuHP/zheOYzn4lXv/rV6LoOV1xxBZ74xCfizne+87k6rLWtbW1ru01sPp+ffqO1rW1t553dcUC/QeC8B5JbHTzSxb59td9lYG+CFqvBFa1JM178WCbc+DeSRT0ZVJHFF2dxM0iiDD8F/ACgmjcg7/IreYd6cwPVRoP5RUfgj3KNGX/hpQyybWwh+RrRVUjNRgn89Jw1nrMtty4u/VY2oxSYp6ljMdn/8EWmhsfObK8BFMeLQapquCagijHX+VOrtuYs0bh1BG5jizPqN7ZKEKGeMRhT8/s2JIRYauEBy2MbNTs3cTBF65PxdyazWIEBlSnVNsYr6zFza/RKMSA58DncjxlmzBEzijTD32n6/IRNfRyVRQUOnHCttcLss4EvK1OY8u9P28VJkFEtpYTkHNd85A+g0q8IHgguZzUTULhTXQtykX+rmeXeASOihwJ6QQBiZoTytZ/HwDsEAQJVni13MbP9fGb8UdUUqSEF46sayTdQeTYF/PJ9awJVw8DQMKOa4OBcqRuEFAuTzMYnxvfVmClgA7nm/SCLXgG/xGEpbZfIcSAMHEwkcH2ZKeCvnMczjzWoxKcDBOjSMyuBKTM+MRbwz1oIMEBjzNKfQAlQjcFsb149cXBLwb8B4BfazKpZDjbLGEbH87XUYKIUec6TWlOJ3CCoOB7H8jkym0CPUwOEOtjrEM7azkvTIK4ynZLU3bOsKGuWUZvcECCx9av2sUS0mlmCci8t1focN6xgTuhZgnRvB1EYft3NJ7B3/BR2j53A3vFttNv60KmQJcZFttBtbhZ233yL/zVzfjZUXAMv1XMEOEQBJU97v0sSQuVc7rdNAvKu1BMmZS4L4EfdXklokGSSnJxkk9EMG42lPD0zEn3DvqBvkMixH6WgXyzc5rGSgD0lCkwpq9omtChLLWEFs2/KzDNL51llhAEMsIxZbcByUlQEd9QmfxU/R+fuFYyyCbaTHdsl5qJzy5/bNQW5Mv+XxyM3P7F7lch2AECaREM5l8xLEpMjlsl0+QaIiIngxFnNDD5XwD7nCHPvsmzovPJS468AuGPA0tZ+rObLtZVZTp0l1XONyLoR30rZpFW5tgSw6SIGTC0nx2llN8dczLzWmJhvnPhnXPuvPJ91nKcYeWOzgN8AJMwKCQL4pSHTT80eizX7dqy+ocAff+eWri8F+wBbQ7P47+P9ji9pfb9crkDn7jR4P5BBtmM9sf6zLEX7ef57/M+uKYklPRkMLwlp6mMrSD4GYvVvC/jxmuYMFjBr+4y1//k//ye+8Ru/Mb9X6bqnPvWpeNWrXoUPfehDeOMb34jjx4/jzne+Mx72sIfhZ37mZwa1t377t38bV1xxBR7ykIfAOYfHP/7x+OVf/uVP+7GsbW1rW9va1ra2td0au8Wg37ve9S5ceeWV+MAHPoBrr70Wb3nLW/CYxzwmf/893/M9eOMb3zj4zeWXX46rrroqv7/xxhvx7Gc/G29729uyA/WKV7wChw4duvVHApSFeoiS3TzKyJ0KOIyBP3IcxPElU3FqH3lbGCAAyECjH/0syGIFGAIGGtyxwSnq90D9AnFvUYA/CUQ1Rwz7rmaWU3OEA1Hu8FG4Q0fhto6Ato4g+QahmpVFl2Z3Vw1ocRLU7gBVg9hs4bg7hCAL8w1HqIkwQ8p1cPLxGpZjZrBpIC8lPo4VYCtVTV7Ke5RMYXIM4LiNLaCq4Q9fyOzErSOgQxcgVXNmHcprTxXaPkmwKsEmekZZ9AMSKKGyuOZM4CTgBg0BPzkHS0GqsU0thmMEobAdFDxIFBl8HkvCmvHRQIUHAU4Byn12b76zbMUc5KKy4TjwZbPMcy3BFfvKko8mYGDrxNj32p4nBv54p7VcdxJw7YV5JW2lGLn2SwzMwosRyXmQ95nFt3TsMQ7kYCmWbYJ3cHWd2YDkIqx0aD4ukawiP5I500BVNRMAsCnZyLFkI0+DZimPEY8P5Wxrr/UCgSwPRhHDpIPSOW5tAtyzYLvddmAxljhj7AWwcvnaj4RBQExtzIYdXmMpB86YUWN/t38dv5BSBvzsPu1vOqh8GbMQAAdyHFjLsng5OElSG4tQucLwG7BNlOHXLfhe1rpXU6CfBqFcxcFeX0CJlEp2urJyp8ZxDPyNxyUHsc9F5rY7s6SDs2p/bQditzu/ygJGJsBLrhKWngGWYiwOj302kuOMAP2986DEv8++xNjyvibARG3SfmyC9CkBlUFzSPwR6vfgul2EnROIx29AuPkYdq67EYtjN2Pn+hNY3LRAv+jg6iK/QU5q+B06zElIR++UE5DQMHuJ5TEbpHqOvSg1QGGeja7iJBL1Sy3yI0F27yoALgex1TyhSJJKHULqFszu67imX5I6uVFq9WWT56iVqk71TBKnNpBEJaGNCSHGDPrZGqbMqAYqB7gRQmallEkS1JTNN9guYRqoGQEcVg1Ba+3pPKvMHmAI3BQ1A0myIFryd6xfaPJJJvqZBmDH2AckvvCXrkl7X7AvZiDIVQmH431TSbZRv1TNA/j/s/fv0ZZlVX04/plr7b3PuffWvVXV1fQDwhuBqICCCAaJiBBA1CgoQlREE4xBY4b4iPgClMCIr6Gggvo1igo+SEZUkl8gRmAkIEMFRKLEBy8R7Ka7q6vq3rr3nnv23mv9/phzrjXXPvvcquq63dXVnDnGrXPqPPZZe+2115prfubnM0EiTSo1ZhmsC5zo1eu5jwPgnkTK07nicVKxtGfjKa2hjp0yOS9i32EyBaQmt1vvUQMpqQpAquenILj+oW6kRECTWH5dgLAVI7oQ03j3Tq6by0l5xXkUHT6oqW2SEXKNxizJrevz0EUZY/nx81is40D2p3ksA5FiusZjrER90443TVwqTiW1ZfEQ9jNjah16PxQEt5GmLAX7VCJYEzrl/1kVQRIJTB3uaOcYnV+khncB/tkSFppUp2UijMqNgpiqnJCAPmX7Id/z+jllAurYaUNYEHC4y2zlW1019qQnPWlUElftrW996wWPcc011+CNb3zjUTZrZStb2cpWtrKVrewus0sG/XZ3d/GoRz0K3/zN34xnPetZo595+tOfjl/5lV9J/7cZUwDwdV/3dbjpppvwB3/wB2jbFt/0Td+Eb/mWb7l8p2oQYCByzLoaM93Ekwfgc0Yv5SLjHRxvKsRhZKDIZaDF/jTKDZw+pcH/iyZIEJtCx9JN3QzuYBe0v42wt42wfRphdwf9+R30UsuvWp9yMGo6SZttt3mCHze2QOvHGCST4A4kwxsAB7fBwQpyVQI6AKDxhCi7xMYLIBZj+my0WcCDftQMfmbNVMxYbKZ5Y1TVoK5lacfA9fog9d5Q1QzENFOWIm2mCbCM9RpCs45YTRAmxzDvI2ZdxLzv0ceIrueNsG5+tTaJQ0xgahQGUtRaKZGDGh4RPUi+swQQXjJu0jWWcy0QXgsguorHWAhcCcTW+DNMrCiRQpLA1VgWegJNqBxr+tiNRA8s0DIMMthM2uL0FOyLnOWvAUyb8W9BLQU9UmDB1vbR/tKAL8BjsJbPa0Cha4HQg6oasarh6p5lbENAaOscZOoDomPpT2v6vpX/5C/wuKeq4XFWNyloy/JT05L9oM9dhS4CfR9Thnq0QQfte5WpNCCzI8DLkKAQUTsGfGoN/pJjEJ0oySnFQV/p8ySLtKQ2ZGLZWDkyC/wB8MJig+OAWhT5MB0DQxsCfvm5vI+cmd/LsQhxfIIbsT4CwYxV5ygBaiEgRcdSUJk0MMljphapMmX3eQRQO0vB/SR91x1IQGueA1i27yQontlLOSGBZdooB6Pt94i4dhTl667s3AAkVrF3AAl7IUYGKFe2smV2t/OrCn1nk4TgHAABx9N8tcjkSvdVdMzkBgDHzDIKVZIN1e8UgWn9rYIxJe/1AfABjhy6oOt/LOWJJbCNbsZA2ew8+vNn0Z+5Bd1tN2H/1rPYvek0Ds7uYu+2PXSzDrGPqDcaVFNm9zVbG5icOMaA37ET8MdPZbBPEwUMW7ztuQ3rVfYP5/44nwp4PmsAuP2dBJJR6BMYt5AIpMF3Bfo0GayfI+6f5zUv9OxLASyhrt+vZDpOMosTrjso0u5dBGZdSHKLsz4UYAKBUAvwJ6t2we53yLVTkzKCBf2MP6WgAn/RFbUOC0vAKM+qNNCKXpAjNwlI/H4GctQFsAwgrcU2Vs1PQS5ldjNDcbGJY6a+WAb+ZN+x5POOUCo6AKLQQOIDGp8tPTVAFkU03jEQSBHOEVpHScVBfcfaOa7f50rgzxHQCNDXeF5Ha0eoCAt+G1UNMOF9Bb/k4ZpZrstcN6zcISUEaG0Dbm2Da4ZXU8RqgnkA5oET9ObiT7G8JycEOcdtJeIav87l66PJSOwrdom5n31s8d09r9Ne5SSJUVA+Zu7PsYQ3yyY79Dor8KwOOBgATFcogYOUkoOW0X0tw28RkCx/0yY0LJPmP+x3bcJTUedYlRAkUYr7ln2lcLBfzi/qX2vJgqpBdDI+qirP40Cer1U9w3OpiORDj9Ret/drYvtFBQfz9dG/1oyfla1sZStb2cpWtrKVrWxlh9slg37PeMYz8IxnPOPQz0wmE9xwww2j7/2///f/8Ja3vAV/+qd/is/7vM8DALzmNa/Bl37pl+InfuInLlkj3QZlrdRUTEGHJRnlQAng6Ee05gC5xFLRoBJBgktm0zbaphgR0sZ15Gcpv8eSniZA1e4hnD+LsLuNsLuDONtFN5snoCMBfuucgU6N1JhppsCU6/RFXyM267zpqiejgAHLFzbpXGtlHIID6YxG5YzaIlfV1B1LgTlXAa5LmeVUVeWe14u0YjDyXs4lICa131UI082clS5Bqt15QGeCB3ZDaPe5ygiCZBEHiuijBE2C1nSJOXgfKWcJkwPiIjts8QKacXPY50QKrZA4M9/TDX4aDzEChCHHgc/LZLL3CjLEcvMMDIMGi8fJWc1lFnFqh45tDfAZwE9rCWbGlQYVkIJ+Y6wqBo6lHxQU9p6zwQXsQ+gZGHQe8B7kXGKy+r7nepbeceAujAQNB6bSnyTjjjwz/KB1jETCMwHjvmapNmF1WRZpG8qaI4tZozHJTxJxdjjPEQw4e8dBPa9gu2cgOILnJxtoK5i0Zi4aYxxHDeY44GIjIGncIweq0iFHAL9hMoNNdrDEFctu844QDqMALrGklkYceLZ9qmBzkvUUwM8GnpOcp6111XWINjieTtDzWKgdKHSSnU45QBaF0UD8f0euuF8V+ItynS3wpyzABN4TOKh6F1ukcbD4KI+/sqOxu5tflWzAOOC5HGmRiobdNAaS2++BsrxzBhCR5z9l8qiNzXv6ucHrKnGdjhe5vid1B4izXcT9XYRdlvScndnBfGcP8902AX4A4BsP3zj4aYNq2khiFYMZmKyV0s9UAlgpyUJYxxQDZjEnEkwrhyp2Odgeua4hQp1K2w2NYsiAX8uAH/UtwnyW5/sgtWvN/BYD10ROfpmv0vo2F8ZVJ2CM1lqzfhSJjKEFvoYyywrSFowhc33So0p/AgkoHvOHrFmZSz2roZ/TC4uLYvalcyJULNYtfs+Mj6OwJe0/bG+g8s9EJeNe1w1AwD+wn2ZBVpU89cTJNrXjV0NkdDaECC1G4JzWuhWGn2cgupbXtcafSn9qHcxUfxhyX0sSFqqaWX966kBSaKCq5nsk1fKrzV/Fcp7iS7WBn8+6IH0VUcOhRUDlfPI5LXCValSHkEDJxMQUfz0Cqdaidy7J21uHRsd3Oj7Gk974N8evH3/XgIjWH7jA9/S76usvAxmtJL+2dejfDwG/9DmUCYNpXja1yhP7WX0l9Zu05nbouea2PmqylMwz5KXipmP/iSL/f0Ethyglz2rtviGzb7h/Sf8f7KoYVI8JNLw7yHmufKuVrWxlK1vZyla2spVdLXan1PR7xzvegeuuuw4nT57Ek5/8ZLziFa/AqVNcM+7d7343Tpw4kQJTAPCUpzwFzjn88R//Mb7qq75q4XgHBwc4ODhI/9/e3i7eJxtk0NdSIJcl7hKoZ51pCTotSC+SS5sUIGecAiab1/y+pxwwieANupVj0Y1r7XLWKgelOlC7D+pmcHtnEHe30Z25Bf2ZWxD3djA7fQ6h7dDPuySD6UROx2+eZIbf+iZo4wSz+5o1rnmnjCXZmPQC8FigJk63OPAvGeX1wXaWxouBQZFmI2dnF2wk088S6COAg2IhpExn8izh6CZrhVQMAM6adxVnoNcThHqd2+8b7Esgar8LOJi1aEPEXtujlUxhZQo5ia6o5F+SM/IOlQN81GBVFInAmEADwFxDkWEEdYhuHLgak+Ys/j9kChZBMErXAr4pssOLQxGPG8udGO5vbS0PlWO1gJTNjB3aWA0SQEBQcE0YHuoG1FOAxQIuCLDSQCn4Z8975J60Ej8gzu4m5/lcnef6cN4jVDUaAGHegrxD31RwdY2urhDaDmGeg1Na19IrQKiSU5KFTs0015tppsBkPdWHjPV6Ash7MHNk3oYE9Cno14VFkFmzxr3TbHRhd4HHmTIhmZEW0ZlAW+WnrBwcuhwwsX1k5yAdK8EAs2ApVedyna2yWKA5jv4XOTCqoFUaC/oZEydTIAsoA6own3WRECkmKTYNSCKaOdPJm2a0+QGV1cl9mxl8IufpODiqTL8Fhl9vavgNstap54CVsouHUq/RBcBJ3ceqSiBCBDh5AZCgVkjBapudH0BJfi2agHK+b/M5EgFtvQrirOzy7Kj9KuAQ38qyiMms/crqp8AJDKEr/SixhaClflfWwKGvlhg8EtRPyQ8KDCZUpGTE6YSl7HNAwLLuAG6+i7h7Ft2tn0Q4dxr7N30KOx//FGant7H7qT10sxbtbot6o0Y1rbB+ah2TE8ewccMpTK+/Fv74Kbjjp+DWtxCbYwyA6Dqu5yHnqQkxVoLQ+8zcrwigdp77QBJgSmWKAWoQeI6j+T6o3QMO9hHmM8SDGWAYOKlesgTi3WSN6zg3G1z/uF5LSgn7wvCb9wEzSZ7am/PcyPKRDs1Am54Akfo0gF8/T9KeyhgaWgIEQ8eJYJrIMsZsH1iSkxan2q5Nhe8SGPxL4KB5P4M74qOLkx51fBWN1WvK7RuFFey1P8TGFD880QLL3jIE87kxAKIy77y2sj8RI7uSEZJYE5nB52JGGj1RYs9V4n84QmL0EbGSh77WyHpK830UMq3ECXkO4ASlqkZsZkmVAUBSUNDkQ1rf4jHXrHOyHlXYP+hxIAod6sPvtXkt7j1QR0Lt2Pf0KNfNlBipY61vF3xznod6wHWIVQOv9xgIkdiXszXj1FdeuLxEy/D3UTAufQ+5ZIBzebzpnkv9I2eOM2bLWH3D14ByjCmonZONTL119YtiLBIOSOX2+3kG/Ga7nCB1MCuAPz4Uy3vGqubkubqRBDovm4SGgT5lQbvM8GMZ4cV64mMJimN1/CJiquOX9j+SkHukQP7KVrayla1sZStb2cpWdg+0Iwf9nv70p+NZz3oWHvjAB+LDH/4wvv/7vx/PeMYz8O53vxvee9x888247rrrykZUFa655hrcfPPNo8d81atehZe//OVLf1PrOoy/qa+Pb+eiCbIPs7ZLKTe2sYLpQBlU1zpOLmYGUKrREqRGiwSq3cEus/vO3YawIxJUt9+GdneGg7Pn8/G9Q1VXoInUzpiuM+C3dow32hUz41A1DGoIM0WzI+WEODu6nSH6ijfLEqSydUxSn1g5poINNwKK6XvC6FJJUNLj+Tp9LMqGkAMJxyRAtY5ZH9G1AfstB6X2u4jdeY9ZH7DX9ghSE0StdpQkjDjYweGpPvaYVp6DJYQEaMWow4DgggIXSDVbfGWCkYM+GQWxhudu318WGDIsBQWJi8xwZZIaNtYwINQHJDknBaYUkBpKUKI89KixzKiyHsc/o5/TrGtbC6QYC4f1j3cFOE4xAA04AAGkQKYDELoWznlUAwZb8A69BCVjz3UuGQyv4ZsKrqngGg5MUN2kLPUk6yk1/KJh9yXAT5ikQ9BP+1RlhYAy3gpI/Rw4I8RFKWO510AdcY2kAGVLVsV8Mgzy6H2ryQMAB5iYAUFyTJflaYdjj3K9oeWhJhQgcyLLpN/LY9Sy2GA+r/NkFMC4F3ldD+LB6iQgFQQYNN+3QUrvMqvUyW+RAM+EHARMmf+XaMr2S7VpBAyk6DNrUjLhI8AyhKYPGPjL9246rrmxdJWxLBPgCsl7XkSA+rKPv7K7xO4Mvwo4xLcaKCgUIJ76SSrZPKznNmR8jHw3mQBkiYUyYoldiDJBy8t8A4eUZJHB/zlwsIewcxbh3GnMz5zBwdnz/Lc9Qz/vEfoIVztU0wqTrQmarXVMTm5icuIYJ1Wtb8Gtb3GiiComaEOcSpTy/xVcsROtAj2pXVrvWPtvyYKc6qtpUk3ogHaO2LUZdBGZ9GEtPzipSV1NGfCrJoj1lIG+EDHrA7P7hHV10AW0Kp9NlCUVKfu6zPbPjLAEbFpfaax+rgIK4geCHGLfZbbfBRh/C8cz3WsBvaGNDaNoHgPUDcySnqCY/w8w+DdcY5xbmFOt37+szYCqJ2T2pAKAMQpoJB+kSAkYSWuukdK2J+eIWA5TkmqcJMZoEo0mGmrCVuU0OY5fSyw/y5bXNRAyPqUGMv+AB+azfG4qmT5dh5ussaynjLmeqiQfq3X88nizncVeRYgRMdLCtUvglVmbi7Fnri2D0bx2e98sXBNlkQ3VGizYPrZMjx1HXx9KagbELPE9uL/5TEeOPziurWG5DOyz/TRk+OWGmr3dgN2Xah2HDpgflHOLlGKIXZtAPwCIjhM3yXnEvhdQv8cwjKDzojL8FPCzwJ/21UKT0/nx/sf2fT+4ENq/V8QNWflWK1vZ3cZOnz6dEuBWtrKVrWxlK1vZoh056Pfc5z43PX/EIx6BRz7ykXjwgx+Md7zjHfiSL/mSO3TMl7zkJXjxi1+c/r+9vY373ve+ix/U4DeQAg2FlNCA5WcBPw1MDTMoIdmfdk9VbAxtxik4CJ8CxWRqngnYx3Va9kHtDK7dR9w9i7C3g+7mjyOcP4v9W87g4OwOutkc/WwOcg5+rUG9MWW5w+kG3MYm3OZJ0MYJrt033UrAmW6u7IZKN1INgTd7812Qz/Vo0nnYfhIJxMTMQt6EEoRlZAq/F9fAV0m60AbKWFJUpBWl5mDnGq7VN+ux33FwYK9lkO/8vMPOvEcn2cEMvsTE6ptULj0qABhCRO1z1nPtGYhh2UUGYjT71jsFECSD2lWctDppcpBOmWwqwzgmY2XH1rIAp2zAVXbWmfE2ZOB5YlBHgT9+PQN+FoxKG2oBppbVvLDgFP++9JHT/2PBinsBArwAZfDAAsBjIHAKcHL/ka+4NkgvP+gq0DpA84McwHOOgaiDfb73lOHqHGJoUn3LGALICdOvqZIsG1SSSth9Cvgh1TeapvpGPRwOinozDPZ1IbP82hAQAtCGMFqTjvsVqPtoACwNeAhrwYGZpoGY8UsxMVTHZDatvGbRpVHnFQGxnc4xZRAiRP7yMFjEx18MttgQg4LNtmYdUAJ/0EBlYOQ8Rhv5zOHZ2js4BS8l4lmb2VTHYeVK6Vgi/b8CfVSMwQQWjI05LA+o81eF4ec8A4HtPCVEROKxyfHtLgN+UPCBl8xljIDcjvRjfH6xW/7Zla3sAnZn+FXARfhWMQDiGkQARf0m9Q0GjDV9bxgYtXORt4kKUo+zAFNcBUOWHU3OymtWlvylVmrfzfe5JvLZWzG75VZm9910Gnu37aIVWU/yhHqtwtrJKSZbU6zfcArTU1uorr0B7sS94DZPIE6Pia8y1YZlXykxioCG8hoYJcHFJ/+R645S6BGriWEvLs5fCrSRgjAi7xnnM/4T8C8BfxWvjySAXwJfmjVEYfrNo8OsZ4bffstr237XMwAj6x37StwGZVUz4zr3bWL36bhI109Y19YSy6hjoKCODBKHHrHvYOs5jkMhy81KABYumLoUA7aYWojRgGooQLyozFXIOA0d4FCWDBiMv+F1G4JDYz6VgjMh0kDS0JxbZO2FPjAg2EPXvOV9ooDfUMqz8c6w5BcVG6gbsOUN4xbkmAXfTNmfqmrEyRRajE/lPTHdQPANjzmR5D8QGc9ZH3AgyXt7bY82MPCXrOKUo7Z3qP2iz5mSzAxrdHjfqD/Aez1hufmm6H8Ge4UxZvxqZd8lxQYqx0+AKm9Q4TdlKVnjFwn6Z0FAC1ITHQ5SX0j2UtuzcO0jMxqD/WHpE02o0Gub1Fz6Od+LrcwtXYt4sM9zis41VikBSNc8ipxwFLafqxtO8FRwklySyVcpYZtEB2QfdAxwXQagj5m7tKljZStb2T3M3vnOd+Jtb3sbfviHf/hKN2VlK1vZyla2srut3SnyntYe9KAH4dprr8WHPvQhfMmXfAluuOEG3HLLLcVnuq7D7bffvrRezWQywWQyubgfHJOVsnbILuFCjBhgfCNfmICNXp4D4EBJL2Bf6EDC7ovnz6E/fTPC7jZm//BJzHf2MDt9Du3uDKHtQM5xbZm6QrU+RbO1Abd5ggG/Yye49l29jjDdZLZSX9ZMyFnGA2DBAH0JuNGgDXGmeApuBc7M7pEZLrmWmwR8BizBlNXNu28G+kQOK/oasZ7iIHqW7Tzok9TUXsuBqJ05P+r/28DBKXsNgotApzJQUTKcgT3JYK69w9RzTZP12jO44JFACg/Oqu55t86AGSQI4AhEFVxVgVyXM+5tfw2u+Sj4IIET3gwrMy4UwSauJ8h9q6dox6Dd/CvDL0QNYGTArwsM9h50IYGjRVm1PsugAplhFSL3FYOgkm3NrS2klorTJcfgHf+HAdGLMQ3MqLSZAenJVXDOizRtgxACnOMafs450AFnmMc+wNUM+qWu9gwKVtMGbjqFW99idt90HW5tg2vNVFMef4b90MEloG8I+rGUbAn2tfKblumWavAQoY+By/W5MghHkOBQAEt9GglLICY2xWGWApoS3HEgTvAPzADo43jQZAzgW5CRs5coxhyCLUC+AftAs9mdsBMACZLKMaRf+hhROZI2Lf6utpnZuCNSsgJqKhCY5l9hFQOVRNR4LMXe1JaspeWhRwwOFAZQnZHEyx0gwau+A9FcgllVkvgEdReONFm0Xcd3e7Dkw3eirbLR77F2FH4VcIhv5UjWNVpIXklJUlUD6gC4PjG6kpFLST7DcbjgQ4lvAAscReFjBa5ZmtiCAwaiB+CdS4AUzXfhZjugvbNob/kkuttuxvlP3oqDM+exd8s22t0W/bxHNa1QrVVoNmps3HgCkxPHsHm/6+G2TqG64X7wx09JQtVmYoVru1KtuyigCZBAS2WPwzc5ScFIX1LfIkaPMDkmsoUlWJYUIbo594eAf6Gbp6B8kip2LrOtmikggB/7hWvo6nXMxSdQ0G/WheRb6brGxwImJGzrQlIZieVH3TzNj4l9ZVQr0nvaPe2ckypCYMYYJJGC+Hol4BQopD9JfMghsw0Yrm0DgED9J8p18ywjUFlZ6htFUPrdlKaSmI6ynqSxCAP0Ul4DjHKDHdfp6ZIkMWUaMiOKUvODYUdRatS4pWQcaYr6GkkG27GUp3dG3hNGnr03DL9+vujf6n07cYkxpjXeyLOvBlchiMJImGwiNuuYR4f9rse8jwXgp6Bf1+ckIu4zlxLYlBGv7FKC3Wt0uUakBSZjNKzjKoG2XpL4UvebsWCN0jVHUhdQc5HrgqexFXWfaC/MhTaF42bBPgCjgJ+6ucvq2B2m3sEHDQks1QQCZQ6HjgE/dG1OJujaIrkgKrMz9JxM17WcFDaZIjZTBoL7HqmgpGV5h1zLsY/R7E9NG6PeqwysXqxZF+yKSHyufKuVreyK23vf+14885nPxM///M9f6aasbGUrW9nKVna3tjsd9PvEJz6B06dP48YbbwQAfMEXfAHOnj2L9773vXjMYx4DAHjb296GEAIe97jH3eHf0aDQUpnPkc8OM8etDRlYh9qAYZjkj4CUaakba+paUDdD3DvPcp7nTjPod3ob7d4+Ds6eTzXL/LQRQIOlC2m6IX/rLN9UMYDRwaEX6UubTW+lA51tq2Q0q+wLb/ZZeiqaz1HfcSAGDLwUx7TnOOiLCIB8HlqJMdisIfoGB4Gw30WR7QzoArBz0OH8vMOsC9hvGejTAJUGBBQMUOsls/WgCwwYOIILhI4o1f6bRIfacRZxTDXIOBjUI8JHAIEDI33gWi8hRJHmAiqXa7xQzzX/GOjSKEAor/+YxQCCQ8ov7iVQpGxTAcBstvgQrBkGu6zEUJAgEQfxct3DoYwiA2hGCkgAUBdypvMyS5noQK7ZqFnyNkhkznnYByBfMCZjpQGdyEBiLcHO0HMQEwBVbQJRKjDoZ+U9AeSaflNh9tUi7zlZ40CF1o+sag4uS1C27UV2SrOQ5S9EFICfsiE0OFOAqTb4o5HFECRYxC9mViWDrAFRxh6ZzPJYAHtACdprpnjxy1Ev4WK7hqPxYkMIFhBM9SUlWKp1/obAn/7fg0Byfyn4pxJmnsoxnNpFSEGfLIWsMp+UWH4F4KftIycxPw8KPM8wm5aDkRQDUAWgQ8nsA7IsnvMcwByyciEyxSr1CQAUEYmZT6NBmcGYtwHwMfm7la3sjtpd5VeNmQanvQImfVveD+RSTT7LjlrqUynTCsjr6fAjcSAxqp+VRwWfaL7HcunnzyKcP8tynmfOY76zh7kAfjEEVFOu4zc9yXX81k4dhz95HdyxE3DHTohk+oR9LAUvLcNIQUkBQ2i+J8k8xxClj3j6N8yk5Cd49NJ/KfkFxrdSf1HrK7dzltkTSc9kzicJazQTBieVbVVPk1y1/nUGAJz1IfkItanhl5K6KEtFWrnRoc+YmEOhZ2aQaZ+t/4bQi1/I9f1Sn4QuA22JrXXh1SqBLyNgCM/eSMxyQMAbQOS3M9vPkcvAXwzgJJIgtZ0zsJd+l0ZAcID95aKBYfRxjDnozR5EVidESWZzAznsZSyoJHVISPVwU21cYsCvImS2nCo1BMPs6xeBe/s8AkmmPwqbi+ty54SqDg7zPphxl8fbQReSb+UkUamOQAjZtxq1kPdUCfALhnHqKln/XUpSTIoa2vzlR0/9poDfHWGcDY9VNF+uo7WLAfw0ue+wtug4pxFpfoq539L9JsBu6GROEcAPIaQ6fszwE4nPlpMSYh9Afc+1/Lo5yPv0eZWFtXtHkDNSqqpQUtZT5DmSko+pfuxhSWkrW9nKVgYAf/mXf4mnPe1p2N7exiMe8Ygr3ZyVrWxlK1vZ3cD+/M//HI961KOudDPulnbJoN/58+fxoQ99KP3/ox/9KN7//vfjmmuuwTXXXIOXv/zlePazn40bbrgBH/7wh/G93/u9eMhDHoKnPe1pAIB//I//MZ7+9KfjhS98IV73utehbVt8+7d/O5773Ofi3ve+9yW1RYMiY7YQIAIyyDKSNW7lWoavAeMbQZb/NPUEdVOlGbQaINFMy/k+qJtx/b5zp9GfuRX7n/wHzLf3sH/rGXSzOdpdzqx0nll+9cYUk5PHUJ24Bv74KbjNE6D1LYR6kiQK255BsS6gAA6GbJk+AkQV4trxJDdK3Uyet5wt6ysO1EjGL/Wc2WzrWQ3lqVhu0PSpBugk+BfrKeAq7PWE2UHAfsd/B13AuVmXavbttz1ayUZvQ8C8C8xmCxFNxcevK2bvDcG/PrKEjDL+asM+q31AH7VeDQNdfeB+4c113jQri6ov5BMdvG84yKGAFVBsplNQfyRoUkgNavAn9IiBSvaQq+CTZBqzo7zWcgks8+QEWMmAEG+evSOESGhFvhSO0BsUKAWOFJjSLXrgTGauQ4JUOy01NyLJrOnhvMpgqUTXsiDd4L5Iz1MAouZgoKs4A1kz/KsGQWqJJJaEsrIAOBNYVKaWreHn1jdBkzW4YycAYfmFyQbgGwQBnm0g1Ep6tj0HoOzjsixrawkQ7COCsChbRLQBqcaOBlH1fiRTc1Ljrjp+c9B1PGiUsvxB6AevWdPvaV1SDahcTEKDZl5rXVM9vAaGQUhyaXrv6MjkYA9hTGZ4GNOx2fVW8nTI+isYxgusIjAjDxmURuh5THkT8LI/rHMUUK4Lan2X5ncNItLY58ZsuC6F+fjn7kxbZaNfNXZ38qsAHD6nm3uGpEZqArgKVpT6AG5BDq9IjjIMI/1/JMdrpHd5jYVJsdDPCnhB7R7QzeH3z6K77SZ0N30M2x/+BPZvPYOdT55Ft99hvjsXhl+DtZNTTE9t4th97oVj97kW/vgpVPd5MGhjC2HjFGK9LrVfs4tMmkoRDJtY2yI+U3SzzBxqZwmkiL5GqLnesvYFrzusaUgETD0VDCz0HdAL48ZK7TmX17zpBtzaBvq1k4jNOuJkA52f4qCP2G1DWt8O+ohW/CwFYNQ8MUtsWrEUpM7FIQXoFZRjf5a6Gdfh1eehR9jf5fNPIJ8wEm1yRYXMPAKy4gGQGH8gBm6c+hgxrzdpnUNcAEE4CUUux0DlgIiTveQs4AIAx/6yd8SJXQDP8W7EZ7Fgr03WMmOxsCHYUrxWNCzP0XJM7xtZw+ScTYIXkF/j52ReV0Y8e+IK9DWO0HjihEORmM2sTAVeDcPPnPdovXNtszOg32QjJfNxol4U/z7i3KxLLL+9thcmY/bTW0dw5IxCSSzOtWiT+bN1/XTcUIc035BIyBI5eOJkPgbFIuJgbLgB4KfzjO1rKwmavmtlyvVY6jMNrg0fI393KN8+BvhpnW4Lbts2KDturJ4fz8cZJE1JBH2LcLCfGX4HszSvlI9c2y+0HcvpJyn9OWoAIYQk+xqrZgFUjZAkyogioa4P5TkTslqE1mYdA/IBrdnKz61PfjH++ZHbyrda2cqumH34wx/GU5/6VJw+fRpVVeHhD3/4lW7Syu6Ave9978MjHvEI1HV94Q+vbGUrW9kF7E1vehPe//73r0C/JXbJoN973vMefPEXf3H6v9aD+cZv/Ea89rWvxQc+8AG8/vWvx9mzZ3Hve98b/+yf/TP86I/+aCEh9YY3vAHf/u3fji/5ki+Bcw7Pfvaz8epXv/rSWx/i8szUsf8f8rqyrMYkeqJ5bj+fPmODP3ZzbzeoPddRQDtH3N9FnO0h7G6j3Z2h3dtHN5tnhl9dwTUV/FqDamMN9XquTcY1FRYlswAUWapDwG94XqRgpGz6I1Gqt5c2+Bqw0d/RLGwNPBnjYIB+vs6Snq7CPLL06G7LwaZ9kfyZmSDUftsz2NdHzLoefYiYdxy0AgAvbDRbt0LPyW6wvWNgpRY5I5dq0uTvqpQnSQaqsvzgDMgi8okmWTxlhVvgD95lFkIMGSGz/VZ0VB6vPEZ6Zg9F4XtFri3jBQCLlAdmDNHIVAlQYqUmicx58jmPbYhDiPBjkYJhU2P5PCKiB5n7QceFCcYaI7gFYDyGLvUZjzuXg7khAKERhlYN9D2oaiRw1qQAYmq5sLRSEFTkhkgkPSFBVpWh0kfOOIYAyhIkDFk+dVQe1diyrlNwro8xR2dcvgYMyFKS/swAoMrxMiAdowGkR37PzkcaJFpmY4ChtbF3lh0vsxIN6Ej5IKQyZRLMAQFx9BcWf1/bqGCfU0k5MjWlhkG/QUA1tV/ZGw7pnloIzI4kftjnw5qAZAOMK1vZEdvdyq+CSZpKwf78f2WXM8ggsowq2zg8hgH8dN4q7qyxupzAAgCRPktGYUDZSv0c1B6AuhkrJ2yfxvz0bTg4u4P59h66/Q79nCEj33hU0wqTExuYnDiG6akt+OOn4E9eB9rYQmyOMcuvWUdRd87IV5KAfgkUsWt66IDuYFwmUfpiOIfoPMdJPoPzT+c+BNA4ycWtbbAMabOeag9aqWplsrd9MJLV/H8nCRW1c0kS3aRv5XOybVIwMhh/NhhAUphCwzZTVSMGlYmUJIq+YwZlutaUGEP6mwrKjPkxFhw4LESuICARr68+8lqVfECYBD7j62YFg1LVYjRBxF6rMXaVlaRM5+aQ2OOohLUe0hUI0YBDsTxfgiR69TH7EQpaJd9fauVGqSnZHSSW30Kbx0yTYrT2twHxoYCf84jVFF0E5iEkdt9BF5NvP+sDj0Fh9PUBqm+azvOSbNiP0ciwDhLMHDlJImJgPfVbAk4FezWAn9ph7VLAL39/UYkgH+dwH8wCfvb/xWeWN6WoIbj4ZjkOY99nVp+R302Pcu8qeB+kdnaYdyDvQMEhtnO5n/txBYOLKLSnyi1Ds+C9NaLFPlnZylb26Wl//ud/jvvc5z646aab8PCHPxxN01zpJn1a2nw+x+///u/jq7/6q+/Q98+fP48nPOEJ+PVf/3U87GEPO+LWrWxld67t7+9jbW3tSjdjZWIf+tCH8C//5b/ED/3QD13pptxt7ZJBvyc96UlLM/EA4K1vfesFj3HNNdfgjW9846X+9IJxwKcz2cLIm9LigyOBXRusiUEyQnNA5sKQCBbBPrO5ShnNtoD6fIY420U4fxb9zhkcnD2P+c4uut1ZAfhVG1NU0waTE5toNtfhj23CbWyC1jZYXsUJOGc2V7ypjIklk+p3pGA5kAMQIbEO0QvLjCpE32RwhBwz9GLIgQIF/cb6VoODVQOQy/XS2oiDjuv27XUM6J0/6AuwT9l9s67HvAvYkwBdr4CfowQCKvCn9dKAzIgCOGPdSeZ67TK7SvuIgwMM8ETK2bNewLUokkiBUS4uaSQbziw/yMEPBXwjUGaIA0WQqKjxZcdNGjvg4E80dcN8leS/yLEUUpRMZQJSNj4RB7DkzOAcUEOBWc6MtdKUabxEHi9l/8if+VzIGpJZ3tXcHIdtxEvZRqk345scUKMyABaBzPgQeU8I4w+W3VfVHPh0BvDzHqhqUMWBUK4NuJ5qSLJUW5PGZSvMWJZDRWL69TEuXK5LsRy4keuhNf+C9LFQFjKwxSBg7ZhlEZHHIhxLZkYBor0EhpXdyb+z/DpoIMuC2C7mYMuyOS4DcfIboCS9FBRoH/m2BScvJpBnA2GJoYx8n2myQsGo1Xko3Wcm8cPeVxJMXsb4PtSGAyAEAJzokO75S83EvlCA9U6wSHTp7bzE46/saOzu5FcBKJOK3GCNhwApMgd4VwFVg9jNQf3ARyBXSFGnOWLZPWzfU6k+YjntOABfKPRAN2NA42Abce88+ls/ifZTn8TOxz+F3Ztvx+zMDPPdzLKtphWmxydYv+4E1q47icn1N6K64X5wW6cQpscRp1voJluYh4i+DWg8yy+mGs2hYwlqCGijyU3apq5lFhwggEiDWK+VrDYx71gGuZbkhC5i4TMUIwfgnQc5AcS0jt9UAL/JBsLacfRU4aBjX6oLSKDfftcL4KcqClGXF/GVCNPKpTp+o/J61nfsWaIeLdcEiyIVqDKBhQQpWpDziF3LfkuMnPhDUus4BPadYgBFJ4wiBr+c+gSD5KtlNpSmDum5vA/2ZXoCECJ6Yp8qUvldXnscnIz7AlRKPzbC9BsD+wYMusRQo6w6QspyRGVQ30PO03Be1QdmX5+SvKf6Fh4C+LX7cPP9vCeBAS/tvT7cK+kY12RAeUwsWN8wizRE7LdRWKQxSfWfP+hw0JdS/dLLXLLzjgI5ds13piajjqfQAbFKdSmDSSbqA0AyoJwB78YsDJiBw89aRQYLHh5mNlFrCPgNu8Oy/BaAQPHnUIxdymPZ9pGCfsLiK2r4KcOvF4WXENL/Yx9YUr/t4IID9Y7rzddtkhzGMMFB2w3LFkY6P93XKfDHPm3MiYwocuYWku+uCLNvYCvfamUruzSLMeJd73oXvvALv/Cyj/WkJz0JL3jBC/CDP/iD2N/fv8PHuemmm3Dy5ElMp9PLbtOno33Hd3wHHvCAB9zh7z/xiU/E+fPn8bmf+7n48R//cbzoRS9ayTuv7KqxD37wg3j1q1+NV7ziFbjvfe97pZtz1Vnf97jllltwww03XPZ9v7+/j6/5mq/Bzs4OTp48eUQtvOfZnV7T70412VRHAfvSa0BmXgFJTohBhcCb1SWZujQSWMYAYIMcv6jbV2z8TVZv33GGbTdD2NtG3N9F2N1B2NtDN5PgkXOpfp+vKzRb66g21rBxwzVwG5yJ7o6fglvfSkAGtC5K6LDmK9lg5WA5Bm1TtqFtH8tzSfau1q4ZMglJZXwkO7gICnTmuSvAvv22xzxECQAwi+/cQYe255p9s65HGzgbWAG9vbk+z0GjpvIF+GfrzCiTL4F78lwBP5VUtIGR4aY8Rt5sSsUZ5lwFKgATBv8Y+SMCQiGRKMeWfimGSQrwGCBQh4k+ofEoj62RoRtfG/xyEtyJPQet+BQJgEOLkEA9lfnUemxqF2L7MdYX0UfuE43jXYwsZAJwSDOruW0KVpG9N4e/SwS4Cm6yxj0UAoN5GvhUcM/Ieua6bPzdoOB1LTWZfM1MCHJojayn1j/UOiNBAD+Vi1VTxuTh2drjgYghCIjAgZlWjuscwQcgOA6+EhzXIkrUYwZ1HXGQRGvh2GOO/TKPBMpqrkWbyteWZ6ib94dMlAuOg/Fgmv3vGKuax0XJirDzcTG3FsABP2ogJrERPMaTPqwNgmNEoXx9hNl86PGWHX9lK7taTMC2FPBPr7k04STgjxy8q0Au5OQM6w/BJbk8T0AhL2h/cnA/R/N9fi3ygqfHF8lCN99FOHsbws4ZzG/6e+zedDv2FPA73yL2Eb5xqKY11k6uYXLiGNZvOIXJtafgT90Ad831iM0xxOkWwmQDe11IrG/v8rxvpfIASH1fBXoETIkhyR4CPp1HHPSdSpF7ynMf940B13TNUOnquhH565qTYiZrCJMNxOYYOqoSu28eIrq+ZPgdiGR6GKxr7Cs5eQQqn+WUvSNRTkDyNVWxQuuClYBBlgjkS9Qz4Od6oJVacO0caCaZ7afsPgVUnRe6TyiYo+rzRcRUm3kg1pyTVDRBa7COJ7WCCAAk8p5xYT3UZV4l5H3y7Qy4i4FZwG8o7Z9AbLOuWN8byHXoRhLrwsgKT7S4lmrCloOCt5A64gdw831QuwfqBZx1HlQxaBdTop+wMqOuoZT3Acrs8zXgHKJv0IMVPGbiTyngt99JUl8XCsBv3uf73VNMEpZ3CMQZJG3a/2udaIjEp3cOAUANTvYjl2XI0+FGfsImKwz7HhgkKBH3u/q8Y8xBPpbxwS8A+F2MLQ2UjCVCxXxfJlauuX+jSumrVLGw/GIfcEEb8YO0ZWn/AhI5TyqYfiqMwT5rTiwDLgMUXtnKPg3t9ttvxzXXXHMkx7rpppvwu7/7u3jmM5+J+93vfpfdrhe+8IX4J//knxwJ6PeTP/mT2NzcxA/8wA+gqu54GHM6neJJT3oSfvVXf/WyJUJ/4zd+Ax//+MfxyEc+Es985jMvK4gdQsC73vUuvPWtb8XLX/5yeH8R2UAXsL/7u7/D/e9//8s+jtrrXvc6/MIv/AJ+9Vd/9Q4fg4jwLd/yLfjO7/xOfM/3fA82Njbwghe84MjaeHe1EELhT95R29vbw9ra2t0SKN3Z2cH6+vqRjN2/+Iu/wGd/9mcfQavY3v/+9+NzPudzLvs4j3nMY7C9vY2HPvSh+Imf+Al827d92+U37iqw3d1dbGxsXPZxvPf4tm/7Nvyv//W/8PCHPzz9Pf7xj8eTn/zkSzrWj/zIj+Cv/uqvAOAOgX4xRvzRH/0RPvKRj+DjH/84/uEf/gEvf/nLce21117yse7Oduelqt0VtixTHEgyQik7O+TC9dTnAE3OyO1ynbu+FaBO/vqWs7u7udRZ4c/pc4TxoDTFXEsB7ZzrJ8z2EOczdLN52kyRqd/XbK2j2dpAs7kOt3ki/61vcX0yXyc2HQBh4c1B/Rw+zGWTP2PpJRtcC9qWRdmpqBI+Cvg5zl6PkMzMCERXoacKPRx6GIkfExDo4VKttIOeM3/Pz3vcvt/itr05zuy3OLPf4txBi3MHHXYOOpyfddiZddibM8tvLtKew2xQKwXjRc7IgnyTymHqHdZrnx7XKo9J5VA5oHKQrF/eelrGmm6+tX5GMLU0+qh9oNJEJWCUWWMqpaUF6w3woNnRKpHk82sLfxdhiZkHSlJOBM3yVqBTA1Y5eFXUGbkI9E77pA8QVhxyYFH+bB/kuh2mFkk6jo2+GbbGEBAxklJUNaBmCre2AVrj2kU03YCbbsBtbIHWjwHTDWCyzoyHep1BvmpSAn4VB6jaAL52QeU8oznH3P4xAM9r5vzgb3hNrCUZ0cFf7rOAtg/yOtcP7EzfBSAHxsJgHEq2eJ/GbPmnWdZAHtOXas4ErphJkP/0PT/yV7v8V5H8IaBCgI9d+iOZt9KcJfMWS5HN05/O3Srlp9JyHHw2c7EFEhJLoZExIDVQh8/lL92nwqIuJNwuFvBbdv9ewr19pDY2vxz138ruuWbYSPZ6ayA2QJJm5LOJzaa+idyLdg5JvtaFgHDrv9k/8eVSneTuAJjPEHbOoD93Ggdnz+Pg7A5mZ/YwP9+im3UIPdejqtZYRaHZWkdzYkt8q5O8bjRriPUEHVVpXesCFtg1CwB+UN+qS+0rajynvqkKAEr9G1u3LCVr2YQzAKRyns0UNF2H29gCphvc7nodsZ4kH6TVtU3WtLbPa439fTt/1579oQXAT0AkTYRTfxbRyAQq00fr+QnTLzGKFGDoWgEZ2uXMzrH+Rd6kKNA1xqhKcvuD19NlKsasqTOW/LXSf7H+QPYB8/qqry2sqroHMGM0Jf/JGsYSsX0JAl6iuWINZp82yflrshVR3p/0c75P9ncRZrtS021QgzPtB6is3Wdf91UG/EIs/EGuH8kAnwJ9bR8l0Sqk5D7t2zvFRtht6gN7UfFQH1nnpOFwYd9Jnl8C4OedAFcXCMDZ41/IVK519L2LO0QyHpch3aOW0Vcw/ESi94KAn3MXVE1Q5ZnhPTwm7QmUe7G7AaFvua18q5XdDe05z3kOvuiLvggf+MAHLvtYf/EXf4EXvehFuP/9748nP/nJ+Iu/+Is7fKyqqtD3Pb7+67/+stsFANdeey1e8YpXYDqdXhbod/LkSXzxF38xbr311stu03Oe8xx84AMfwFve8pY7DMLs7u7iZS97GR7wgAfgn/7Tf4qf/MmfxEc+8pHLbtvOzg6e/vSn40d/9Ecv+1hqT33qU/GiF73osph+APD85z8fD37wg/E1X/M1eP7zn3/Z7er7Hj/wAz+Am2666bKPBTBA9zu/8zv4u7/7uyM53m/91m8dyXkCwK233oqf+ZmfOZJjAcBv/uZv4hu+4RuO5FhEhF/6pV86kmN98zd/M57//Ofj/e9//2Uf653vfCee+9zn4iUveQnC5ch6if3oj/4o7n//++NJT3rSZR8LAD7xiU/c4bIcY/aGN7zhSIH0//k//yee+cxnYnd390iO9973vhc7Ozv40z/9U7zjHe/A2toaHvvYx17ycV71qlfh5S9/Ob71W7/1DoF+RITrr78eb3vb2/DSl74U/+k//afLYpFb+5M/+RP8wi/8wpEc63KN4mGaUndT297exvHjx3H6/74TW8dPlDUngIJ5p9nSUaUAlblGLFHDH3IZDLMbdTUykqFuOUuwyG6XrGg33wVU0vPcaYTdbXS334Z2d1bU8XNNhWrasJzniWvg1jbg73UfuOkGsLbJdWaqBrHZkLZXiQFDfVv+vp4bOS6yPlbHYxDUjr5BpPGaMwRgHnJR9mG9wAgJMIWI/Y4DALttwM5Bh3MHHU7vzTHr+P9zCQbsz3v0IWDejU+6CvQ1lUdTOWxOK0wrj2nFAF/tCOu1R+3l0TnUXuvSGICPDt8c21ocCuIsy9JNl9v0i27whUxYHCNniks322MOgxMLDTNBGJgadAYE0scAzuwPQHo9Rga4goBJQzDLSktOKw8C13/RIIYfcVpVMsvOGDYrXgNQGvzQwKFPbEwkcL1gndoAWS9Z8v08MSh0nFqJqRSU8uzw2+C0gjixYsZfF1nqrI8ZqGQmRAbbhv00Vs/PEiPt+8EEsWwfJ0bekgiPAq/KVK09YVI5eCI03qXrUblyjC1el/JaOGSmaroHqLwfFiQ008EWM8OX2gXmwQvWNbrI36Lh54I5nr5sxkN0FWI1yWPAzt96zHSsbpEFHcxcOQZKjz0uOw/5//bODu714M/GuXPnsLW1tfRcj8J0fbztb/8cW5ubd97v7Ozg2s941F1yTiu7a0zHzq0f/Rscu/a6lACk80MCQEKeIz0BjSee29sZvyg+ykIA07KZFNjr5rk23+Cei9U0fVa/n3yrgx3EvW2EM7eivemjmN9+Fmf+6u+wf3obu5/aw3x3nlh+k60JNq47huMPvg/WrjuBtYf8Y1ZPOHkD+o1TiPUa2uZY8l3UNmqHxhOqMGd5RElKGCacUT/nOUbkq/UcYz1Bf+xeiL7BvC8lJ7WWX2LExy5JMdJ8jwGibpaPp6wskaxGNUWYbqKDw27L9Y9n4n/1kVUUlOU367mW30yC+J4I67XDpPK4dr3G1PP/awP8ERFqB7huxjUT989yEtzsPINGAuIdJheoNXdTTeo1TtrBhOsP8hxdZ79cavDGeprm7UJ+2/g9wDgLTvu3D9lfsaz1VCvWlWCNmhv4gYVfKJ/RoF46nowBVfbIoHS7uOaZxCYIiJbOu1lPgNpczjclVIXsg1nT9VzPp3LAWu3QOEI9OweabcPtn0HYvh3xgK8b1Q1ofRO0vsXJUc1aWiOLe5Bkb+BrTgrU+n19TnbbbwPaAOy1PXbmzPLbnnUs5T9naX/1+yvx69ca9tmPTytMKodr1xtsNh5rtcO6tP1Y49E48L3Q7nOSj5UoHSYvKhPRV3ls1ZO0t7E+tGXXWYBPpTc12SolrxlfQ6/92PhRMMv6VBYEtMcfY/kVbYFNPozpdQs8KoCp+51Jxb52hcD91h3AzXaAfg7X7iPs73KJid1txK7lRFS9X5Xp17UI8xZ926EXNZrYB5B3cHWFZouT7/zxU3CbJ3gcbZxArKYIa8c5oapZx74yQLsyISEi378RGcjnPhvZdyBmsD3kpDeAz/n8zjae9FkPWPlWK7tbm46dO/Oa/s3f/A0e9KAHXRYQpnbzzTfjp37qp/DlX/7l+IIv+ILLPmaM8W7JSDrKdoUQcO7cucuSlzs4OMCf/Mmf4O1vfzv+/M//HL/2a792WayaGCO+9mu/Fm9605vwmZ/5mXj729+O66677g4fb+z4l9t/H/rQh/CQhzzkSNpz5swZ3Hbbbbjf/e5X1Dy/HHvyk5+M17zmNfisz/qsyz5W3/fY2dnBiRMnLr9hR2zz+Rz7+/s4fvz4lW5Ksve85z0JBHr2s5+NV77ylXjoQx96h4/3lKc8BX/4h3+Ia665Br/2a7+GZz7zmZfdxjNnzhyZpOQf/MEf4D73uQ8+8zM/80iOd9TX9NWvfjVe+tKX4su//Mvx+te//rLv/Z//+Z/HL/7iL+J7vud78JznPAd1XV92G7uuu6z16kMf+hB+//d/Hy9+8Ysvuy0A8KIXvQi33XYbfuVXfuVIGJJjdrH+xdUt7wmUASWTOZsCzBLApWAy6FyXwIJ0DGXFha4ADfX9JMVjN5ryW0VASp9LHT/qWwStd6I1EAC4uoLvA3zN7ag2pqg31kDrm/DHT/GmavMkYjVhwK9ZTzKFyWKQ4nQjwemB5E16bh6jYZx1EehHkI7hDa3/C0CSmtTsaMsA25PNvwae2j6mjT9n+4aU7Q6UmZ8M+Dk0lcN6w6DfRuMx8Q5TYe85AoN/Ik81qRy844AHQQMgi+0fM5V36iPLd7qYJRTt18eyfpNQUNB6FDFJK/aIcAB6YRayxA2b1rhL12MZuDAwIu537/g34SIoEqKT6+GitJ8dsRgJjjLgZzfLThmTJmt8DCS1YB+DwplBljbqjrfvRNyezAZjcbaUUW1Yp8U9p68nBGvAJpVM8wTspHvZSs9RAro1CKQBwwSUyl8ARCoMRRBhnOW3eB30tRz4zhKgl8qq03p9vSRie68sgogg19IBiPECGeSxHK96bAeOQNnnhx9oZD4DsEzmsggW6uc0KG5rGlmpM/2c/b1lZucxfUn7WOc0IEsR+iaxlqMwbELIgbscaHfwvkEU+WfOcu9ArirnzWFbxp4vdMpg3l3Zyq4iS4lUkghkE1dClDq7MvkFyGuDRCI+0DgIDqD0SSimBJeULGA/bx9FjYFrJO8hzHbRnz+P+Q5LpvfzgJBUFAjVtEa90SSWX318C25ji+XSqykDLvU0BZRzMkSW36QYuLagJpqYRAHEkNaq6DxYstKsV8jMbWsBgxJuIyy/BIYBBSDGbPZpklNX9jozL8s6ZElCGpb1z3Vkp1r7WP8GCV0pIUIkK/naGGbfmI3WREXBJBoyPRfqY8m40KSUGDM4QBb4iISAWNSYVSCQiBadNul3F1kmNFCElXMHsg+g8tjWp0s1bYeZNoP9wGJtv2681rgmNJmxkt4GTLvYx4qR60DaQJtN1HJy7Zy8ntU+lOU1uGbJtzJtQXYRUpvlM8p2zIzIzJScdUHkZFlKthXmn/r9ACeiqbxjPUiUG7L/eCwT772GfUZL5pf0ZTPmiNd0Au8RENQ3RrrfL5Z5Z5MENblK5wq9FkD5mg6RGJH8Q0cyDnFx/qKD7LkM4EeUa+CReS+d/2F+h/MA2sXXlfmn56ulETzvgX1TMXhf1YCprR0H4x8xcP+gHJ+RxhPqosyPjuIo8Leyla3swnY5wfCh3XDDDfixH/uxIzve3RHwA462Xc65yw7+TyYTPPGJT8QTn/jEI2nTL//yL8M5h//6X/8rnv70px95/cKj6L+jAvwAZm8edU2vF7/4xZct/6rmvb9bAn4A0DQNmqa50s0o7LWvfS0Avrce+tCHXpZE7f/5P/8Hf/iHfwgiwld+5VfiMY95zJG08SjH21Of+tQjOxZw9Nf0O77jO/Cv//W/xlvf+lZ87GMfwwMf+MDLOt5XfdVX4d/8m39zpPPw5SaoPOQhDzkywC+EgCc/+cl49rOffbdYA69u0I/KwMoQ7KOQgxWACWT5BuhbwAujw7nMHrFB6rTBFJhjWYw3GikqPU53AOrnKbMyzmcM/IUeToA+reHnmhq0vpnr9x07AUzW0E82OXhdr3PtFldh1se04Wt8A7IBlpEAWxFAsxt4Afzm0aEPXHtPrZCpSYFy/qvNoNVMWCvxsy8bfAX8VOpn1vUJ8FMJT/4tSo8K9E0qflyrGeCbyvPaEaYVg3zOEQeshN1XORQZ6la+UwNCGgyymd/pXIoXTNDoECs2s2AKhPYbUQ4eeseBEQtC2sCKI3donTtAj0MC7EkQTAG/mIOxfcogphREqYL+Zhwcswwa2fbp8Szgl2vfobh+pGcjgKPGaDQQoYw/qHyusjqU2TesA2WYqgnIGTL8NEDlM+gHVxUyXH0fU2a3sgRs1nwhmRnKoNMQ6BuTbGKwjjsrhLgA/HkBepXRN8b487R47D5GuBAFvNbgJBYG7XB8HqbYGtIYianeUQCV3zEBmxTsBRavD1AEK4c/WzD6hgzOMfBvybHzb7iF19LzAWNCGQmhmqKPwLxbZG5yYCkzUIkqeO8kOOgQh0kfYzZkL8kjYeT1IdvprjKdtO/M46/sHmmxqhFdVU45MYhsoNYhQ5ZiA3LChTy3gAeAImnA1t4kqUlMscosW/Ob9pHUt+pmCNunWT3ByHrOt/fRzTpm+NUe5AmTrQbTk+tYv+4kptdfC3/yOviT1yE0GwjTTYTJBjqq0ppWGYa6PtdzSutS6Nj3ii0DOtWU16b0ucxcs/e+PlPwKMo8nNhiKgef+lOOWzVpfos1A5XzABxI/cEurW0qx52TewLxmuQc1zUDkBQS1huPxhMaLwwhPXe7XvdtlosPHTOEQs9JbMvGj0gGRoDZfhcjo2P9+L4TnzuDNTyHZ2cnQoFTKkFOyDlDfCGCunRaJhdBwEOu18zrgSbVJJAGkQE2AVesj6evIUSQHSOpA3JiE9k11bLOnUlsclVmxkYLLvH5aj1fZYjBrLpWZUIZX5WjvP8RJmaScHSuAGtYBcUomIQue4raPtkvqC/VmtqR8z7Leu61vdTszgl+B12Z4NdfBMKm1zZEgMQ/TozjCyXehAA49jNAWY3EyzmSI8Pm0/4UBuXI3oABuqGvnIHiJGFpAEC7NA7veQX6NBEwNVvzmCJGGax6nDGmYUreAxb8scUDSe1G5/OvmDp/+WMOGiJwwvKjugFNN0CTaQb/nEldCB0oVEA/R+U5uNwFPvdIUUB73h+NYZ3sp44Df+qzhT5eUFr/TreVb7Wyla3sKrBv+IZvwL/6V//qSjfjqrbLrdG4sjtmZ86cwW/+5m/ioQ99KF7/+tfj8Y9//GUd72Uvexme8IQn4Gd+5meODPD7dLTJZIKv+IqvOJJj3XjjjUdynLurOefw1V/91Ve6GcmubtAPGmA2QemCOdQBXZczkp3nAASQg1LkgN581wR7i41vlFRpZ8BAfQ9ghkgKTEXThj5nTjrPbagb+LqBB5Lskds8wdnnJ65FbI6xLE29nqRSDvqItg3oggJNEfAOdbPOBWGFWViYBu7JcYBBNr09stSN1troY960UsxggAWDciY0B6v0ewddru2hrL4CQHFAI3X3rNli7k3FcobM6qtSUOpYw5Kex+S12rmchU4qr0SFFKKVZOL2CtgHMvKlcRTUGnRgAYyl4UCaeUvC5ItFwGgYLIIGbChniFvAJTHmlmQvC66UPgcwyEgkwS/ZSMcIeDk/PjfOgodfDBQVxx+Ao/l1Oa8Qi8AZgJStbc3ZYwmTMAUhLItB/xTws4Fe7YOqSXJpSarWAnwK+MAEb5LEqdS7CxmoZKnT/KhyYReVZb3E2bPgXr6Yw8/IyzHCDyKDWWKVEvjnFLx2ZeBIg5fDwI/9jWEwylrOpM7HGi2vLAHXhQSIJWzUhSCcnUdjyPJmgySMFBQeMA9IA0jFXE0o6mIOmJ8q48q1+lgqjSXukFgwOk60vwh8r6SaoeTgXZNluS6GnTdkRIaQmOAUkNeNYR+tbGV3dxsCfnaOjgFETsC/LNtGjjiwbj9nGU+ATEKGoR2FXesrRAEV0bOkXFob7LrYd6BOGH672wi72+jP72C+vYtud4YgUXTyhHqtgqs91q49hrV7ncD01Bb85km4Yye4BmyzhjjZwBxVkhl3xJKWmqiSmW7B+Hyx8P8KBpey0HW+ch6IAd6xjDMg67hhank7XxPLKZKsbQBynWWZ3zqqpE5alvK0a11qClFKxPHqtAibbyIqCrVjwK9yFsRAmq+pb/l69ANJxTROPAMFzoNcQHQ94EQZI+hc7jBkiFOMWTUD3M9R1wywooa+5hz7q7q+RcQMsKrqQNkqVjtQH0kAlqFF64sB8nk+ZGb5LfpzmjyjzD/BDkuJ7FCugwUjXa5zSlRyPiXh6bF0XHCCDj9Xn2Yo0279YE1C8xjUznQeqGpuo/OgZgo0EwalrQzvSDu5xp8w/TRpKiKx/FjCkSVktX7kQRcSEGhrdJeKHosy8hnUHJhZ8/lauLxOm3uQIvvDeh10nCUQmRy89DOPAQMAS3+qnCSQExuW+Vh2H6KgFF+7xQHnIWNOfPigyiIDtt+y5MQxZqf6jelR9j9JNWOJH0POIzqR3w0esRIwO+RdAMt5yvO6SXVF3doGj6XJlB89z3Hpng0d0HsmRDtC5WSvRXxPxgHYWdw7Y22FqJXIh9UfD5Jgt7KVrWxlKxu3o5K4/HS2FeB3Zew3fuM38MIXvhCvetWrsL6+fuEvHGIf/ehH8S3f8i14znOes7qeK/u0tase9CsywQvGnQSXNbAcAqiSDbVmOg4CWelRN5AocI7Fz0IDAVZyxrJlhtwqcECkkp2U83BrGxn0W99CbI4hTDYA36THgz5i1ueaJkQcQHKSLTmppoAbORdzPhqIsvXNlPmkmaUxctZolF2Yyg4CpbwXoJI+GThsJbu8DaGoH+eJgbo+BHhHCfhLwXbNLhfAr/Fc02NaMeCnNT/Wa5/kizJIhVTLRN+zoN8C2Gcyekm+30ZlfJV1JvT6QwJBGRSllCnuYTPH+VsqDcptjCLzycf3A3koBf4UkMiBBVeOR3nNBq4CmK0FRyJ9lcE/QAMGfCEvDGstWoicQa+BMxdj3nxf4IAaCCENWqUgRA5EFMEwO16FzZcCnb5OoI4F+fI5CgAdc9DTgn5aF0Xr+wTzOZX1VJbfhQBAG8hRgHgZq8+BCmbfWBDIuXxvKQCosqtjLskQ8LOBKP1NC/hdrFvD13qQ9GCvzRgzT20MzNLgfIyZ0SkJGAg9gtaBUsBPWSBOGEQyP8eq5ufO80pFDqQBO51v/WCc+IaliqORIItIcxygrFtmZdZOH4GebILDOEinfeoIGfTUgDUZ8M9JP5AXpvmVYPrdyQzDFZB5j7WobD4gzd/lBwK843sNYNBF10i1AgSxLG4DaDEIIYCfC8IgdEDMks8J0tFEhPkB4myP/+YztHszkfXsELVmXeNRTStUaxUmJ46h2drA5MQmaLrOQWvP9cliNcVckqm847m08ZTu40LuPQEPDHrCScJYOmGXE1VcTi7TQHglTClPWfIbyIAB9zv7k9FnxqQysWI9RSSHuUmy6kKe41R6e7iKab1YJ7Lbqf6xAH4JPJDrkcCUpH5hknNkDi/MeQEM+pTYNny/sAuoU0hHCNAaUpssQy+RKk0iRzq8AWwg/tlQiNQm0SjbEsizdCAGS/lA6qPFBAYq8De2rl/QFEgTH8dKmA9BHg+kBC9Vd1B/Vs+b79HMTvUEBqqN/Cx5Yfa5wOBNVfM4FUAvAWrDdcqAk+pD62MU/0mlPNs+cK3ktL8o/8YSxZZZ+g1tw6V2cRTJ3RigKCBF8QBS0gIQxLeNAvYVigDgpLmStZf/o/1ur8EwQdKODj4XvnYke4NoGIbWDgP8MrirQF++/nqvDOVzAZ5bEgAvbM/E+gR4bOijAfN5zyqgnwJ9zRSU2KJylpog4Spml4YO3lWpjYtJlYtm7+Pyns7A36WMozvNVr7Vyla2spWtbGV3mn3FV3zFZcl5WnvgAx942VKUK1vZ1W5XN+hnA9L6fwO4AUgbmiLw0M4zi2TssDRg84VOgL0IdHMUsjxA2hQvMpYUcOzLQIdspNyEa/i5tQ24rWuyjOdkE7GeYj8QujZgv41JllA3lcpM01pgnjyq5lgOIBmJqHl0LPfU8TH227wpT80lrreAQEk2UoE+olzjrguZQbPf8SZ/PxVql8zemDM5K0+YRCe1RioBGkOR+atBKa3ZtzWtsF57HJ9UWK+ZATitqMikHQY7yGy+zUUwrLeY2sWML8lUNpmnDBSV4I9mzxKUKZXzUrWuigJ/wAD8g14jDjoFl9sdJYVYE/CJqAiyjgJ/cj1VfquPlIBHbfIiR3C52T14BkjZckY5s896AwxDMnedz9fiMGnJ4jcF0Ioi2QSK+T4CklQjjJRZDw4S2xpSCmoqoKMgdkCubZQ/l6Vo9Rq3ISyAfcNaI2O1/PJ7tPB8ofysXyA4jB5DxxhQZo4PAXcrW6vBpzGQ70LZzxq8jDDR0/SmmT8tczoM5tWhDZMvDNAXO65nGhXsa+dpPlTgLzP8OMhN3vN3NOAUREaqRgb+gEIeTeXHlI2grN48PqK5P/gIB8iJA8MMftuPCVg1c0zO6ndgiV4szr8SaAzNcim8la3s7mYcmDZslgTAZfO+QXSUEod6MFCSgEKgZPkBPCFSrg1WuxxGj1UDCg6xb1PtuKReoHNKDAh7Owg7Z9DvnMH89rM4OHMe7e4M/YwZgr52qKYVpifX0Wyu49h97oXpqePw97oP/KkbQcdOoJ9uIqwdx/k24KDnuaKR+nbUz5ntrGCXJk6JPLzOjdFVPOkHkdxU5pavEX2V+ywEEDrEGJLE4NjawsdzmeUOA16IQkPXM6uqC0jJVspMGk7NtXdw4pzUAnw03sE7To6aepekPYfLgJ6/svw4eWPAzNakDIAZQqpk4Xz2e4H0Gqq8Qsa+B1WSLCFqFOoX2OeJ7UcuJR9FQfMi8kCz564JUAjs0wYQPOICGxDI/pIzPgARFYlcmkhVMPyQ1+UQD/cVFmwkWSXVnjXnkRLtXAb5+kjJtwFM4pv4hCnhpp0JUJvHLk03WD62Ft+qmiapWAWpY+I9In0vynhM8ugDth+re2SWXxtUxj8Y0C8AItOqf2pD33EMz0nJlRdjC74IoCUaEoNWEogUgCdhc5KAyizpiQz82ss38HeHoKudNxeaBgHrolzToOOVlt7H0OOJ36dJjlqGQfdQTseA+mvDviO5R6sGqGVebaawbF2EHtRMS7/MeVBdg6oGqGq4jS0GAKcbQCXj2OyXtX4ldXOgAipX5VqppOUAyv7TcwQyqAnYRDegDznR1esFWtnKVrayla1sZfc4OyrAb2UrWxnb1Q36jcjKDQPT5D3gfVGcHECSl0um7D/vC8AvZYfKbxDAgQph1pGrykx20w77m2UQxKUNlMp72sB1FImoPoSUNRsHO8EABuYIsQgWKDDgNTMXHBya9yyFwpm5memXNpOxBACU7afBPLU+RAbMItfzsAy/ViT1hrXLPFEqPBJMAErfc8QBqnWp4cdgn8d67TCpOCN94mkB4NPMVu7vfiFbFilYhiQLRAIKcNZrhBf5SoDrwJAjiNJXAf7lgP7iVn4o/aPP7XvapzZLHECSiBr2f/7SgIEqWcoaPEgMQQEfLZQ9yrqwh4aeZ26bthMC/CJE9PL9VLsQWicly58qq++iLGWPC5skINfqM7XZ8n2QQVsLTiqrT1ldltGXMtKR6xACQNuHxO5rJTCiYJ+OW2XgKStjaGNA3jDjXwEjP4xoaRfY5wWglAOMFhAegn1ax2cZODUMHBXZ6rQY5D3UlMGsNmT9KShtmIHKCIldm0G/nutAjSVCRGcCxBXPl1QL2yX0Oah8sU0eeU3HhWacZ3ZthA9ShxO2v+MCuGqB1V4CWXH0WlUlGOvu+qVWJdnuzOOv7J5pzo73EcBv9DvL3jDjRIPDC4F9+QzXwasZcMOB8elCAuHiwT7ifIawt4d+Nke7t4/QdoghsCSdJ/jGo9lcR7O1gWZznZOr1jdB68cE7FhDTxW6EApmmCeUgJ8B73ndNaw/ST4gClCZxrI2m1m7+7xuF9LwI5KP9jNJAlPWQU1OCgJiDdc3NQUfqqwbmhQiPDH4x7KeWUYSkPlw4EePJXpYGWY+OreL6kbWdIdS3lMSOazUZ6GIIWCVrwp/GyEkp8aRgnIo6qIF4zoV4J+MX169KAF7amHkuWX+OTnGmGu2jJEknVM+6nNluQp4HBPTKgN+6ucXUo4olTZCLGGT5AcLKE62fq79/aoCwDWSo28QqzqPW3uPytnb+snaNk2c0jHHzzlxKgRh+QXdJ1jAb9GUJafnoHuBMZdpVKrS/D+B4+a9tFcjV3y+YAHCpXGl/Rspq4To9V9miXGaAC25ZoOEvfx57k9vzpvvzZhYq8t+U5OSFBRTwM8BhW8y3lDpA+cBZxi56m91ANUAUEuSgv6oS6A9NcLyq+ok65nAPlMTcngdnDARCbmOYcLszDlLT8i56uvy/8hAfpB6gHdIvuQIbeVbrWxlK1vZyla2spWt7Gqxqxv0U0sZnQNgjygHWzWQFCNiO0+Sn2pUAbBSJXpcmA0cSa0Rdcg9b6CT6WZbv5uKo4eUSckBYMmcbKapILrdhGsQoAvKSMuZvRpU1mB0BGfaai0K3ljxeyFGyQhnoA+IppYZJGMSDO4I6Yosk02OgZgL3rdmMz8PMTH3WvmNtg8pAABIRmrFAXJb009BkknF2fW1d9hsvNT+y0DftOIN8sQTX79ungNy/TwDDWTYlyawAl/Bm41pBKEXdkIf+Ew7CeBzn9lNdwZHvAlQDAP89nwsGAOMAzJWytPWhqE4HvCwYzEdC0jBA93/WoCK7HcMSFPUrEyBAC7oESW40ws4GqU9LgAuavBMQDojiQUgBR7SsTVj2b5m2X0qYeYEINdrJFnnmv2utdksqy/EXKPNgn1W4szKQymzD+B7RY+lY3RBdki6yI1cDK25NwT1UmjVBK1YDjW/fpgtBDmKsVYy0OzvLpeRWgwEDwNDBUirQTDngD4sZtbL6/lHRiRah0BfCMzqU6Cv4+zy2M1HOsBn1kjHQeXY94k1QkGy0Qe/aWtBcYDJyXnlkw9Q2Ve+1joG2j5ff66tqFJ4rmBcEpQJuAgI8mdiAl+JYsEU1LniIFxgAKxsZXcjI5g1ZAjykwFu9CUT9NbvA+DPBfNcWSECIoSYGfo9HNcnrqcM7rkZqMu15WI7R5jPmOm3u4ODs+dxcHYH8+09hHmH0Aep41eh3phieuo4JieOobr2Bvjjp+BPXocwPY443UJbr6c6ZHof144KcBGhA0l9QfUvkpQ7wOwpTfCqJqkuW+qjBMIYlm8CDE2N5TRVmRq1MnknJmRUlQX2s7oErqgPGA5ljCtYUDsG+mpPaFxWMUjXcXj+hb9A0NpwC+Ycz9EAqKoX/GsdL0XihrA3tV+jQwb+YgBFZ+r8AYTM9uvB86sCf2oM49jXRUFB6xMjr4u5vvLw+wK+ERU+jJ3T8xUbWPKpXAKc9OPJP9V9g4BvvfFfAL5uHma97vN6SeQYA7Vrs7LwbS1eZdonlqoZm7LXiNWU6yW7Kt/P6jISUn3KKAodyQ9LSVi8D9A63m0an0OGn8r5jydBjUqfL1kuC3l4NZMQx8/LMZaVMgS0F9lYAPBSL1JrXvYh19lTP3IMhLPsTpX1TEw7vYe0jUa+nPuWWYYVOUVxuTak7OFi5AREO7aspKcy/GpHiwmQw7qbOl+pokYl48Pew12LxPYrLoLcz1oP0tT1o7rhOW9kv2VBVtJ7lyQRIYg6DZDkOvV6e+Mv2b0VN4L7iUi6F0Zp4WJlRla2spWtbGUrW9nKVrayT0O7ukE/UwsDAMtyAlLMnYr3dCNUMP4O094z3ylYVprdHXpJAe7SbxSBovkB4nyGuL/Lj22bNlXkPUvWKcuvmUjGbZXarftE74CqAFeM9IkJLgMZTLKgCEvuMFB3YPQLdSOmwIT2RBSwkSjL4wG5PpoGzjXg1BlkQWtkAEDtgTqxlmRzRgTnctH52hOmlWemn8v1ZRT0qxyhId5Au/19Af1mHBgKfQ7IwQBKkrlMGljppA6YAMDkRNbHV+gdy8UkAFA2933IrKDU1xiAJCiBGQvKpGD/CCAzNAsMJCk1O/bscxvcMI8Lx0/BBmGj9l1xjARMp/RaBdwqeFfBO4fOyg+5zCqNkgGv/WPNMtFsV3GAJLc3ujKgHE0bEsNPs9+Rx/MQ8FNgMaTPLQJ+yuwbgn3LavjpWLWAnwWENLCqbysopEw8YDFokV8fHwnKJR6+P3afjzHOFo4nfTSMhaSA/LIBqYFtX4nkGrcrCpuZAMS+SxnpNtkitvOS1ScSngn8G7D7dB62MssRAOmc7Fx6j0ztmQWzbZA2OplfksSngMOdzIN7bV8kKKhpXUXnSBIRKIGAncwPXvufOERvWZhAHAC0EVrPxs6Td5mNgDNHfvyV3TMtKNvN1LQDCsAqJZvI/bJU4lDl0Afjxa6nUf76yHJwuh5Ru89rRDtndt/+LvtUs13Md/aSrGcMAc47+GmDen2KZmsd01NbqI9vwR8/Bbd5AljbRJxuIUw2MO+5Jp6COmlOtQw/y3B0MOtXZkbFmoG6cXZfrqmmr5H0YUQNuHzeWb4xpvmKUPodGXCxfcgHsDLkFuxLkoOyNiu7T/2tYa1WClILzoAHkYgVLVwFVIf4Mwk0YM8vDkEE/Q2dx60fAzDIp6BJ+iMGBl3FgDCyHGNyY2BTPLIfkoA/AkjYcQEst87KBlQoaKi/p8CfTc4qjm0ACgW608fsfQJA61Xqa9F5YbM2hZKBVTFI0tvqSw3BI6Cc1/V1SWpMYLOOV/C+JY/ZibD9hOnn8zaQP2IYW6mWX8y+F6JJAgypjreup5blp2YlPVWOUtdaXm+dJNvk/Q3pPTlMLhomIhy2Do29p8lC2pWE5F8mmU0gy3zKB5OfFkfkcDXhwSQjLUgb9+qvZ5/XuwpwlFi7IvuRxmvuPyRmnx17BeA3ZEKqKQDtKk6gqBvEruVEV4jfhRpLlW8Gdf0wWctsUZHjj3Yvbv4iubS/1PuGFVaAGAcJBwPAz/Yx35t842pt8dTUK4H5rXyrla1sZStb2cpWtrKVXSV2VYN+CuRosLd43Ww6xoqaL5jIDy2EZseAPwAxSB0ScuCKbUiZywgdA336183LDZXKo9QNy3xaiR3j7HPGb0bkxvY2w6C/jS3n2hv8tzfv0ceIaeUSm2W4ueLMVkZMYiSp45alh1RKUTPM1bxmXTvigIuRSlSQb1o5OGIwT+tSKNBXOfBzJxnoBGbyzbk2CbV7DPZ1B1xzR2uDaT/UDV9333AfhgZwHW94fcPXKfQMCFIHVA0HFz3X4uqlDyKAnvJ522uxzBwyW0EBIQvSqI1J9tiNrl6P/IWRcTsG/AGLAUcbeAhG7skGIHWs6biLAfBcq8g7hpqjBBc0AhYjg4ARNFp/hCiPyYU+sxJSRAB5CfQpulUltkTKfg8Z5AuDAJ0yuFR2yr4XYWq7hcwUbIWtpv+3pkHrXtiXOm7HGGDeBCaWsT4vFviNS961wN4Y2Dc2JpcGQs33gBGGApDnTICZFyQiZ3ac2EcgB+S7FnE+43vsYJbBPmX1BSN3bGo9xb5fqK9aSMcZeanhb1OMiCawpqw/MmeXWKABRd1Ry04GDNuPWGrYEzCpPDwBdR9Ri7xwdGXg19blBAZALWUJ3u4ilqCVrexuY8mXCSl5BEDyqVTyD+Cx7wmlxF4CjAbzhzwv2FmxfEx1Pc2xojKI5zOE2S7a3X20u/voZnOEVqXWHappA7/WoNnaQL15DG59C7SxCbexhb5eQ2zW0LsG83ko5v9i7bXrZ2pkAJxmYlW53hxVC/Nh2Yf9wrprzbL6AlCsdwDgoiQPQMHBMtit52ABP53HKkdpnlLQQAG/yjGYlq5xatCIr6AsPPEJUGG8tutwHgfKJLsxEFABmGGQO10Dhyg1EeGqLA8IxkeWyfxZ4E/7KoD96RjB9foGPoM+LgI6OZFEz8smajkFKoPxxfTRgBOpXjG5tHdJ/rQkqMgnobWiE5DeDxhccXHMFddtwPaKDgmoSKw/K0WbftmYsriikZU1iVYRkLrI6p8hSXtawM+agn8q7anAtHP5HmTozyTC2fvRPC4w2i7FQmAkzfrByCD6wscHLxYMULPmJ6AvBDOGzRiQevEKOipg7IkY8HaRRWEIoyw4T/keH02yMPOWreWX/nS/XNWIHQAnMuqhL+9fBfpcCfqhqkShQ0Bjw1pN4xpYAMW0lh8D7sK5HPhNQ8DPJsHp/eqAJPOpwN+K6Leyld0zbD6f4y//8i/xuZ/7uVe6KStb2cpWtrKV3aPsqgb9UNUcHEKDGLpSRkdsrM5UqlFgWSY0KC+uG0oj11QEsBLDxATEQgdoRvrOWX7c3c5tqRvAObjpBmhtA259E5isi7RnkzZOiAGeuOaKbrCXAinQTXgpf9gF3qhaGaj1hs+3D4swQwAQRfaz75ZEUszvqXH2rmM5GrPJU5BEWXyegMpTKkSvAalGAlAegZl7HQN7KuVJfZsZfh2DqUEAhWTOAbPdLEPjPNBMUnZzFCAwOg+SoAd1B4CrUPsaVdUAdYWusjKRcRDkiGl8aDzDBogU5CvAP73u9kNgkGpYs4X7TDv4AgGNJdnLC98NJuhg7wNgUeaTHChKQMAx+OddBeelnoujFMjpk0xW2Ud5TIyAXAoqOgfAZcYXkJkgIv1ma/UVAVAqa624SIjEMqOEMoin7dPafVobU0GeYd3JIbPPE0urOQdMKy8B0yzzaMeyXvvUriVAn5XdTX1ftHfYafq9kd8Y+VwAimAnFcc4/LsphqVsAC+gryZUaJDKB2b7Kbs5RgRh8SUpz45BQH4tM5wPNQX2LKsvBZ3k/1WVxkgOnC6eTYQmJojkcGDG817b46ALBdOvDTGxEzQYWbsA5witgH21i5hEx4kNAv5VTiS4CKBI6ItfRxonUSKXCxKyd4WNsKuO/Pgru0cadS0n2fStzNtVvt9kXOnV95YxNrZWGRBE7wI7N+U1JK+LQ1Nfqj9zC+a3n8V8h+v5KeDn6gp+2mBy4hgmJzYxPSUMv2Mn4LauQWiOIU630Pkp9juucayB/CJR5RC2TEqKkCC3dZM8AUnSTxNtrKynrn+2j0x/KODXD9ZSlVePAgKphLAXChKJj9gnBozxrTwViUhe5q3agROn2lkGJW0bTZt5zte61TFLcS4BMRfmHN1hJPaZBUByYt6oqb9CxBMpzUGuQu0c2kCpo6yPUDRFEzMgvhqUqVb6DsPMJQYFy/rIGVBFZoQrCKPttL4WAK1ZnI4kAEmsJoCv0MMZGUwIM5HBH01cKs5nCMaO9dfQDIsv9Xc1Eb+4WrxeRWKWM9KjVuWD/z/resz6gLbnRBor7Vk0QdZor76+sOhr70Ten/90X6AyoIk9ZwA0+6djKbFDh3PNBdanJA0OUaLQewoCqCP7tsU9aWr9Whab+klJIteOBQVmBQhOiVUA+9nOASD4iJSwZZmm6v8l5q79TXP8dM+K8xEdciIk8rHQIPtWWn5CTV8HmN3nPStwKFAsTFFU08z0G+tr9RGlrd7x+YG5p2lvm352APbpUqB9z0BoBg8h6V31lUD9Vr7VylZ2ZLa9vY1f+qVfwqtf/Wq86U1vutLNWdnKVrayla3sHmdXNeiXwLJuzjuEEUc5KjBHDqgqUMx1+w4Nww43j0Mnf5gJLlndoZsndl/s5kniiKxEyiTLesYhy8/lTHgv+jZJ2gSDjZ5Yr5m4EuTWoLfWZ1O4KoFLjgpwImfuZkYMYOSGaKT2i2mH84Ro5Gg40AQQOMBUqUyeYfhpYKpCAELLAJ+CfRKMon7O9UwE7EvSgX1fsIUA8MbUhQTqIvQpM5Uis9cY1PKgEBCrfCLUAdEHVL6B9ySBDpN5T5C0cg3MKCMy96FlXJZ1/Aa1KfQ302dHNu7LgjqHbKyXfseaYUQVkl6QeyFl1ncMjoYO5LgmIgcSuKUeBgxFBkWBDB5pnZNhWzU4QM6A8yb4l7LGscjsu1TTDHo1PZ7KewKL9WSS5JQw/LSmEgeiMmuidmVAdQjKHWY2EGnbZeszWlsG+BXyeId00Fi7lgXXyy86IHB2OmkkaSGwNnIcDSKFQ8ZkIR9lwD1tn89sP03QiMO5eGxeJifMThQsCgX4hv/PzE/+p0eEJ8/PXYSLXEMqzYUSeNdxzoE5nV+X9+kVCEutbGV32Cj0pcwlOQxrZVkm3qFgxMBsoNcCfvbYQF4fKEZe9+czxNkeur0Zut19BLl3ybGsp68rVBtrqDamcBtboOk6aDIVKcUa0ddSFw8Fk1+DzcN7NEkda1uM/Dr7WPn75RcNuDUEQgdr+Ni8oOCP9o/1FZjlxgkH3hHX3pVP6VSc1inDDtIkJAWpqC+lIBcAP9NebYMyh3KS0AjIq3019JVFIjoOWKMLa4lzi30Uesb8lMlNnEwWI7AsnWTIJB0y/8ZsTM7THkvZfpZ1XwA9Y/3nzPlZRpTI4wboPZB9bpVSD5EBwMI/vJj7zNw/BYvSSjBawG/sGsKw99KjAn/yXmBfStfVsb61gN+kciIFLHW8XZbRrpwFUlVlYXDOh/kTh9lhAMqSMTxmOm/ZWn7pJ/RxrJ2DfSI/5iSDws/WxDAMEunSvX0I4De0gX/N/pwkPYaO90ahT4l3tvZgSsCqqiTlCVelWpAF+Ddyvy80BUjzDwT8Y+CzHDO2fMUys/6vgokrW9nKrj676aab8OpXvxqvfe1rce7cOXzbt30bPv/zP/9KN2tlK1vZyla2snucXd2gX7WGWK9xJqNunobASc+yQJD6b3aLUXzWZjsr2yUxxUzRcwnoLPxO6BAO9jkwdbDPUlStyfR2PtXwo/VN0HQdsV5HbNZyjQ/nc6ALQJP2UZSzW/uu2JxFcvC+QRcJnQB+WsdPM3OT5F+RNcpBagU/FOwLMWLWhcQctPX3audSJi4JywlYBE6spJRKeBIxq88RWFaqm+dseGX1dQe8GT3Y48z++UxqgzGAkOqCDc15AaUYOIgKElR1qp3oJFtVZY244XLdnQQ3ewa7yDeoPANQHYRBJXVMNHBkpXXG5Be5vy0glANEC2PuMLMb9+Frl2rLAnvIATUEZnHFbs5BXZ/vi0K8S4MIEhQMWjNnEPcp9uMSOAWQaproc5WGUgnZMRbhYQE7axEoss0V/AFKwI/fiybIlGtO2vHOErQMXtcuy9F6E6iygdlhUBuDfkkBbr0fByDg0JJ8FMrPl58RcCq1gwbv52MclkCw1A4ZOwtm6+/1PYa1+DKgZz5nZTwVEDQsvzIxoiprycj/tQ6k1u/Tx7bXv5DmRpWLtbW0tDf0OQc2WYaqlwx16P0vmep5Ts19avs5JztcgcDUKht9ZXfUYsfy2gIKRd+Y97LPU9gFxoNdH3Sqs/M0qWSd1tWT34rtHGF/F2HnLOZntzE7s4Nud4bYhwT4NVvrqKYTrJ3aQrV1XOr4nWQ/q5oiNuuIzTrajucEyzLR9bmQ5dO1SVUCiMR3cMLQYn+JZG7wPrOSFsBSV7rZKVEiBnjnEAW8g+PELX0/fQwmMcQBVdRWQxQdAAp57tE1Sv2uVNuPwNdUGZzm+ur1W0hwswASmXq85hraz+UEHgOIxMi+hQWKYcBBA6ZieExhmVIAM8y1LXDJvxr6BcOpdrgmFz8Ry7EwBP7SukmlfGqqo6YSqUMQVftCf8euUyph3jNrTqUyc5sYzGRfiOspUpCxZOv1Dc9l2H+W7Wb72VfFdR52TV4Tc9JMb/YTluU36wM6XVv7WEh7FjX8HKGpHJrKFSy/acWPlbAALdOPx+tA1vRibMxXtmMyzV8oJD75+pKA6yZZzMjIA0AskNj8HevbL0itDti0BMAy/ngu4P2mV3UNe0q4NKBPc7QQIGo44HuwAtcy0DHUAC7GQoaXE6wkuUHBvnrCz32D0KwBrkIHaWeI0NYyKOkWfEsF5wIAhIjakfj4ZJQ8Fk9ndD+h+y3Zg05WvtXKVnbV2Ww2wytf+Ur87M/+LADgxhtvxH/4D//hCrdqZStb2cpWtrJ7pl3doF+zhjg5liRaKHS89ei7LFvkO2H71Rksk01ZyjoebKKTDI6vc1asfA59mUULciUb7WCWalsBGrj2id1HkzW4yVraTKXNuK85GOCbxVpcdqPfzbOsDWrAZwkezcy1cjQupSNnCynIoZvYDPjpZpdBg5ikjLyw/XIWeZY4rFwGvtRsdnmVNvAjAadunoJk1M2A+QHC3k4hFWgzZ2NYUiNGgb+qlgzVmje6IUgWq5fN4iHSVPlHAMkmVzClh8rLZPkZwAaFdGOuG1LTtGGAKAwCAelgNkCTN+8F4HIp4IueDhES785k7uf3FzeYFCMiAgddBp8fSpQRwIyIJYGk4rsDoA8QNqoAVsoaVBbFEPC7ONhPvhPGgUJPlMA+AInZZ2U9LcCtY1lrIemjI6DSYE8aUyIVJeeq56lxCWYdDEByAwIuu7JDwG8Y59DjOtYxGz3OsjBCAUgn6awcAL+QrBg5ZtlS3eSxZdi2cQDUkwX65LPptaoW9p8B/7TWqg0o+7K2TKwa9FRhHgTwC0jSxkFkx1juNY8HluO1810J5DrKY8N+RgPANukhsWzkSS1s6lpA48loAZ6VrexuakOGTQwZeDHrU5GAhJGgtFm3Uh0wE0Dn9TQDfp7AiU1pDuoQZruIezuIs110ezP0szl6kfX0TQVXV2g2N9BsrcMfO5ZZftN10NoxhHrCcwQ5RDMzJjk5mfsRspSpBfoUpNH5vOtjYqIXzJQR0IVZXT73xZAREzpUrspqAg6AqfJqkzc0EaZMIODAuR41SSRSXrc8sW9MfZf9R/VF9PqOyCQX5xT1M4sgkfxy/rxht3EyHo8dQMCvYf8o4GdYfrkumSvHUggAdXACQjNhin1VAIu+zaCVESjqAV+M2WvhMOLPKTAVFgGqoZpB8vdHfiMnCLFvEoDMsHcOMVACiFJ/pAOMA132t9N4PoThZ8EmTRhUdp8m0XQBOYmmD7K2ZulPgMedSmYry68RKU8F+vRxrfJofGb+KeuPloCbuW8142bJ+SwDTszrWluP+9It+FW6l1qmpLDgFy+VIpXxbD8r4F+kAETHwK7j8WF9SOtbHrpn0OMOgT+myfIcZhIRVLo9xsC+lrynvlaRjFpNEasJ+1muQR+BeZ/PSfshQOoTIrMT0/QIVdLgb0UQ+mDKGgwsqaoAcAK0BsRCVl+lYle2spVdffapT30KTdNgPp/jp3/6p3H8+PEr3aSVrWxlK1vZyu6RdlWDfvMAzLloEr/gKg50VEDs5wVIFs2GnAIDgegpBbQLIEjBOGWS2OxMNQVPEoDYA10rQJVh+TkPqmtQVYMma6BmmuojRKKcASw1/WydGN4kcZuY4TeoEyPBpIgsB6OynvkYudUq81koacmjBfwUFBmTrCQJcikIohnlnkowQ+V5CGCAT69F6Pn/9rmcV9jfZUnU/V3E0Bfg6YKFUGSnAm0CB8iFdF5UN3xNKsloXRYoAdLGPz0X4C8gZ5QriDrMTj1s21kAKhrQtNcxdXKVAxkBOXhmg4XL2j8WHBF5xrHvFMG0YeBAPxojEE0fHxZsUgldcqkeXBGIMYEIIMtGATm4dLHsvmENweF7Y98b/t8Cfkmuych6FgC3M8C3BlMp13EqQDJn+pccSOakoRRccY/a++wQNuMywK84rjLOLsCKHGWgRlOjVOpSwbyXnheNcgLQ9Qy4A4D3QDsHgkcMPQg1xmwU/EuA32LArmAnWOaEzJ1cu1RYfaYmZG9A5LK/CGHQmVqX08k1V0vyWkMGJTLIn2STkZmgGsSsVtnoK7uazM5pAM8FrkpSiwvrCkbWJzlGCiIjA37qr9ikGTufpuPMD4COfYN2dx/dPtfxiwNpz2ZrHc3mOty6ynquZZafJAfoHEAwv4uc2EFmPUxMYt8gkkMbAEjb2xG0KNhkD7UxMGIMcBB2PX8/ZwxpNwN5vUs+Yep2BVd0Ps91vwrAT5UVLAvRXt8Lmcy5FENRJ27hfGHmafFbiBRYIVDM51+AfYO+WegjIDEGVUrTEbP9PLgW5GE+g/oXiWlqxmFmn3JdRL6Wg3aonycJH4uAuGFKja2RFtyU3zys5xPYhIgoQBBRl493ocSvMcBvCECiBEUtKK9tUNnOgu0XYmLMt0Fr547V8uNOK2Q9vZX3dJhWPkmnV/Knfpb2r60DuRTgOwJzxk8bmvb22K8uJE3Z52a8AnmfYPebDOzGBfDvQjUJ7T227LUIiCRvEKauqdUcnZH1HwGrjX+liVXRN5j3WUEh94EkP4QonZRlSPW89Z7qZc7VeQv6NXPP8WGoAP5iRCkBa/zzu9xWvtXKVnaH7fbbb8c//+f/HH/913+Nd77znfjFX/xFfM3XfM2VbtbKVrayla1sZfdYu6pBv7YP2GtDArp088U1Imo4AprpNG+6NAhlgJckO2SD3Sp/Y6SdEnBIjoMmPSXGWtjfZTnK3e3M9pMggBOgz21swa1vch0/qTEDCVarNF0bsmSUA1KgmBQkMpJRsZog1lP0VGHWBcz7iIO+lKFRWaMMNBAHNkgDCllei4Pd/MFAEbWBsVTekyVaUASyPZWZ5WnTq1KrlnWpAF/XMqsvBmA+y1KeIouKrkUh5WmZfqaeX7R1w4TNRwCi60HOAQo2CJNI+xmuyoCuZLIqWGUzsbV2SYwxBfNt/TXbP4ANJOrrJhigEqYqZ2oBFTK17oizcqMD0IfMqlgS5FkWGCg28cpwIJ+Dbssyhkd/xGQtA6UkmAQoFOSLAnSlwKkBAhUEi8jBpiyvFYuAk9qQ6aCBBA0CXKzpcWqfAzt6DWvnFmQ9+drpuKYUlLLBVL2mRbAnQAKdkp0v19drDZ8B+De0QWrByHnwI428pucUONIzKi1qP78wPi0oPSJPZdl/xbWuAaoqoOuEAdFndl8CDcbB++UswFwD1QYsNRhvGX4dVWj7iIMuYi6A30Ef0AVgb94zEyFEc91lgg0RtQEXkzyedwn445pDLrGBHBkJMspsvjIIhcRWqB1A/RwH/Wz8gq5sZXdHE0YQ+Sr7R5IcRWEQZCae57RuG4BizSbHQeYAl8ED8UsqCdpWhJQcRH0L6ueg+T7C+bPoz51Gd/Z2tDt7aPe4lh95h2raoNnaQLO5jsk1J+DWN+E2NpOvFSdbCM0awnSTZenUVSBK0uk+SQoSYuDzjQL29XDiU4XU3mWW1jTtrzRHElK9LF3LdQ7tzWxPDt5XvE64rCWgfkgbYlIYKPIOQoT3ea5fqJfc5xrJRYLKEPALARDgsFguxB/ixlhWVL729rNDWU9Igp2uM9H+psqDD4BQ/a30mrYpBiBSYvsROTSuShKBur720QJW5fOcYFT6ECkhToC/MWNWqIyDkJmoyb8dyt2OmbIZBRuJxD55pMV6gyEymBkiWPrRV7yOapIijdTsG7kuaf20AOvAhizcgFzDrwvMbm0Ds7tmXcBB1+OgE2lPAwbqfkL3Lo0Afo1nZl/tCWu1w2ZT4djEJyb81Ds0UtuvcXytFahOY0HHiwBX+fxMMtAgKedQcGYkyU8TAgal5nI/gd/jWnSEhY8pAKz+lPWZtD3OceKp7iXldZa+7fk8lNWp3xmay4y8BRsDhWVfkfqvEuAx+HRfJwUZHS++4qQJ5xEnx9DBYd5x+Yc+5trvRDnJM286je9u2uJJ2JSBck6iAdnTV6ImooqMOiLnQ1g/S/ae7Yrpt7KVXTX2sY99DM94xjPQ9z3e/e5348EPfjA+93M/9+LqzK9sZStb2couyaLsJVa2sqsa9FPJmTbkunVeNtE2W9c73lCl/a6TjaSrZHNmmIAeBqioc7A5hiIQoxJUiCHX8ZvtIbZzBq6CqWXljFydgk6y+dIgdpI4FMmokH5sEJzRIIwGvmMOOFnAT80Gqng/lsM6hCVF0M3PaeH4IsPSyt/J82QWIBjIHpGR8tRgSZjPmN1n+q2U8BwAfjBAwViWutYJK/4cSw8O6n8VtcG0X8klhtB4fy4PCAEZ8BtOsBZ4LvrqYswCm8PfvZhj2ACQXlzySz48PL5SDNxi+03wMn2eHOCDyN9KHxPXXkpZxeQWwmqOclYvMJ4Fr9+y2b8E7nPLpGOgm8e5PoaCvZqBbgAJ8Esyb8jBPRJgh1IbRrK6B32V+jik+F46b23fQnA19cMiSGffO+w1GrymgbyxY6avjQB6SwG/kUCSZpKDHFDnAJoe32bpl8xcAKEXCd5xQJCqRurLmOx+yYC3tZFazTyXP85ER5IeY+afAn/8F5Z0sjMdOowlKQM0tY8y8KfPVeq4HrJsuhFm751skejCbIHLPP7K7qGmyS9BgPxunoLixZhK66ZfWAuSBQAUAFIARoK6egig9Bn6Oag9AHUz9Pu7iPu76GZzZvnNOzihh9QbU9QbUzRbIuWpEurNlJOrqhqopoBv0GnyDjhxiWLpzxAAlbwDgB4OncwlSVbTyPg6syYAeU4vZPnSm4P12zkpYFv6AromWPCUBODwROh13bJ96zLDLTGOSev3daXPNcLoWTDTjjGmPgavL0gsFofq+FwE4KBh4y91blJ2kibvAQVLMicScU08fa4MP2XPqV93KUlDY20Zs1GWnzVZY93gdU0qs/9PP6WPxL4tBUClGkflQzGYmymPuwutB6XMuvQdDAiYmH/l/0dl1GWt9KKgkFh+3jL+NIlwUT3B+iKWhZv6YmgF0DkYi3acDr8m/Xipq5nW9i2PZdqrSY/WB0KAahoX99nQl0QpRZzfOATAvIT7qbwfpU/Nb9h69rHiBAj1r7qQpVyJSJxMAUEjRHKXLUQR/rUgPjl4kmuiE+fAgszNCBGReL4GYOZsI118sfuoI7SVb7WylV26ve9978Mzn/lMPOABD8Cb3/xmXHvttQCAqrqqQ5EAgK7r7pTz2NvbQ13XqOtxxZw7Yn/8x3+MG2+8Efe73/2O7JgrW9nVYtvb29ja2rrSzbjL7I1vfCO+7uu+7ko349Pa7i7A61W90mpQRkE/DvbHVG/OE6FznJVpA7jeeTh4+LrhTQPAdfnUUtaosK8U3FPgqndAO+Nszb3zCDtnEGd76HfO8KY+9KmmXDLngCrXiokVF0WPNQel2j4mAA9Aqh1SFkcThp8EuzvJAE8BJwf0gRaAKjvOPAEuamZ0TDG6MkZA6TUF+FL2uLMbLg6CKZuQt31iyhjSrFeVJg0dqJ8D7ZwBv9kuMyM7A5aOgHkJ8OvkOplAGjnPMp7OcdCvasoA4GQNwTeI1QQQiZpYTaQ/m5LZFzLQp61YFhwa3r9DwE8TX3lMjG/gownULM1WVjM1KceOVTbGjb9vP7esjo/+VgyGIRjzdy1ApFnuNujQtywlVLGMLQcxI+A8YsVt8JKVnjb+QCHRNZa8q8FXivq5iBg5GMpZ86U8k3NA3yNJdjoQQihr+elvMdtPM4nz+E/HMkzYwgbBKQCLck2Us7JtgNLiTnaIjYF0hwF+BYCn/9fACkq2r7UCvLT3q56TZaMOx54ZT0WdJvve4LyK7xswsWiHHdvab67KDF0FGYSlOw/AQcdrwL4ynruAvbZHH4GDjh/bPtcdCiHLvFlJ4yCd5UcGn5X11HlR50Zl+lUOaDz3u4+mhlbfgrr9xQuwspXdTS36CWK9BuoOEEMHF/YB9EXykwXhSSkw6QAmWUcTa6xMYDQMWQGo0M1B3QHcwS7cwQ76nTPoz9yC/uztaHdn6GYHCG0Hcg6uqTA5sYnpqS3446fgNk9kRYWNLYTmGGJzDGGygb3OMFIKUJ9SngIAzINDlODyvA/oA7Db8nmw76OMFnYabf3eGKPI1omKwDC5Z+BPpnnW9pWCc6b2VvSBj0kp3aSY97XHdY639fuoPcj+1uCajPoSvazhDgWIlJnWfhFAGoJO5pjUz7kGpHaDXVOAErAatsW20buy9lokBlPsd0UCs3IVAhEzgxJbqGT4KbCl/RYXVyn2LwSkjsiA4bB9F6p3u8wIEKnQyOXWCAvgh/U3Ehjsw0LdQgCL8p2H/vji++onKOCn96my/FTO8aBjpl/bC8tP7hNbzw/IighFHT/PNfzWa4/12mNaOTTC8Gu8S6x4GvFF7HkSXHGP6DktyMVexHkP61k6vZ9lbScymzLoeGC/M/WZXJAisW94DtoEYdyxX5N9sEJuX86NJLmpOE99HkJOKNNzG845Fnwc+m0YAKPD+90ZxnNEUpPpAs+NOk6gKijih/to/XOz/1GgXn5DlS90rzk6ppH3VME8t2AfdZwgsrKVrezubW95y1vw1V/91XjqU5+KN7zhDVhfX7/STTpS297exnd913fh3//7f4+HP/zhR3LMGCNe9KIX4Zd/+ZeP5Hj/+3//b7ziFa9A27Z429vediTHvFz7q7/6K0ynU9z//vc/kqD4u971Ltxwww148IMffAStW9k90d7whjfgH/2jf4Qv//Ivv9JNudPtwx/+MH7kR37kbgX67e9zPGxtbe0Kt2Tczpw5g//xP/4Hnve85x0ZUPfrv/7reP7zn38kx7ocu/NS1e4C0wxolZusJShjL1KQzX5v/roQMRfAcNZH7PcRB6hxgBr7qLEfPfZ6wqznz/TIAQ0O4M7g5ruIe9sIO2cQds4i7G6jP38e/d4e2t39LFGZGmICBLq5Euafbm5UrkT/VD4ITjIufZ0lKKU9Cg5q/ajGEyqf2Sdj41VlikgLolNmNimwx5I7JBvy/Mdyd1rDb3wAUdpwmqzXUG5CYzdnVl8/ztqLfZ//2rkExPiz5DzXSGymcGsbcBtboPVNuGMn4DZPwm2egDt2AnTsODDdQKzXkxwq1/iR+hQVs//S2AjRSBtJ8CMsz2S2NuyHIcgSjSQhfMPtEBYCBITkWo8+gxsawBgD8IZ/xY8PgD3zF5N8WSVMU6lbpH/pNakzaSRoFWxh6dtq8fcH7DCWdu0KZiczD5jN4RFQEYysUznW7L2gQLM+H7KslpmCNbXLY3dScfCpdiwrpYCf1gfh+Ft5UDsGikDLQmDUXKakHbaEKXeRtlQKVA9v23MRlsBo/X8CK+PiNRwD/CCZzkkyV/50jqpqxHrC0nrNGsJkI/9NNxEmG4iTDcTpJkKzgdDIPWr/pBZXepR7IzF0vcjExsze6wP3SdvHBPS1EpgM8n+VI2tDwKzr5TPyPAR+r9fH8r5XRpCjPGfqa1yXiIrAe5I2Vqmvu9rG5omj/lvZPdJiPZE1wgA9EsRm+c1WxvYiYy0xdXTuN2bZx0RCDhaQys134Q52QedPoz9zC8KZW9GfvR3znT30szliYMa4X2tQb6xhcuIYr/WbJxLYR8eOI9RriDL39FQlNhL7WJTWAk340r8oANHc1AVlRpiRhkReb0ZrxwFZNcD2CSRIbQABmLWxWCP7diFo76WvhjVlK0J6r0IwzOI5Myb7ubAnu8XEIzPv618yK9EtNae5pleVantB/IWeKnRw6CLQRWZJRk0G8VUhLxlphG02XBvtmmPamZNR+vynsvdGeQNABviQAbsh4HdHbJQgPgBS4vA87fmaNirAVNYMzn9j/rX6XsN1N6l/+GrB50u+35L5Opi+6UNeS5XR1cpzZc2HqDVzY2IDDi3LoOfSAE6kslnKk0G+vK8wYLXML+m+GPoh2qdmjBaA3x1Zn0b8s0QOHFyHUPQZyiTLwRhNCY/2POz5mLmhOIaeoz7av7HrObhHFvddA3A6HYuTp5Jcej1BbNYRm3X0ruH9cRdw0OueOaQyFIV8biiTVsOgT7QtZPYEFDp4hDyPIeQ/ec3uDdJ+ICU28Nw2Wh/9zraVb7Wyu6G9/vWvx5kzZ47kWN/1Xd+Fv//7vz+SY91+++14znOeg2/6pm/Cf/7P//myAb/v/u7vxic+8YkjadvZs2dx/vz5yz7ONddcg4c85CF4znOek4Lal2sf/OAH8Z73vAfeX5wy02EWQsDv/d7v4e1vfzte85rXXHYw+6abbjqS87z22mvxrGc9C1/2ZV+2QFi4I/awhz0M//Sf/lO84x3vuOxjAcBHP/pRfOQjHzmSYwEMSr761a++7ON8/OMfx9d+7dfiB3/wB4+gVdl+7dd+DX/7t397JMc6c+YMPvWpT132cT70oQ/hox/9KPb29o6gVcCjH/1ovOQlL8H29vaRHO9d73oXXvOa1xzJsQDgO77jO3DzzTcfybHe+c53wnt/JPcWALz97W/Hm9/85ss6RggBL3zhC7G+vo4PfvCDR9Kut7zlLfjoRz96h78/m83wX/7Lf8GznvUs3HDDDXjJS15yZGspALz5zW8+knvhcu2qZvqpvj8CgSimLG0gS+VwNmaZrdnLd4NkpkezcbVModopKEYsERpDkp7CbJfBvvNnEfZ2BOybIQp1ztcVnCsX6tj3gBeKvnHuQ+TfcMRSKJl92OXsSK14bjYEmi3Mn49wntAHVlrpQ7kZHQY6dCMbIsFJPxBM0Jpy7YQU3Kac6a7fX+Y4aD2XhYBM6ICOA1Cjcp7WwkDOEwz4wXmQ98ymFPCPJmv5ea0ggQAE9SQHqRQ49c1C3T7NXl0Ad6Sv9fVlEp9LTTdwzgGoFvPKDUi3tEbf2Mtj7MGBLRzPbijlMQ7eT2cXtXaMkdOKBIoi++ZUamv4oxrIcCnTX+uXcHA113+KJOy3YdsAgJCyqbVGXVCEPEQMR81YQM7J/KDmhfWnz9PnDDvzsMsbgNzWKFniY5J2Cu7bLG3bN5SlPpctxYfVkFpqg+u9TCo0tWXINhkGjOx7y37L5fGU2B/K5NQ5cNCuqIGoSpIAltXA0e86l+9nCfApWB9kvuujMA9iBu5U/jmINFWQwKYG8dU8EUIfZS50qAH0LjIrOmlUQaReSeY+JPYQISduFOdwmYDvylZ2JSzVNO47FPU4o6nvB/ANMJxHlox3fUWl1638LfUtS3rOz6M/d5r9qp2zODh7Hu3uDP2cA7uuqVBNJ6g3pvBbBuxb34SbrCGkBJ91lvXsGcDwac4fzKsmCM7JPlHkgoFgZmYG/9i/qhQDBRIjrDANzEe32A9DyfNBIJ4ZQFWW9wsBhC7JKHv9YdN2u14raJIYfoEBiKwe4C7sNyhApwzNxDTM7yeJVgN66FG9rNUVmbqvSYN0pE8uxYbrFZDYU8lfUbnCqOy1stb1wumClrD94uHOwCUaRZYqTOsCHPv9MVoXpVCMGMrI6hghn7duo+AiMN7P5rPJn4Lxf5HXUl0703oqMtltr2z53GdDll86F+k/b4D22lNKkvFucR4ogD5gIWEm1VgfSnaO1IhMCVmXYMrA5PaXe8LUBrnxw3ACMPMjLNgHHadSUy+qfOeIj7jMDlPmkN9ObRgCfvb9dKIu948A+yDH0saSOKVKOvM+g8HKmuVz4vMn4ltQQcCFOdH6dH0ACcNybL9j61SSq7AQ7u5FRSHV07yIWporW9k93Nq2xX//7/8dv/Vbv4Xf+I3fwKlTpy7reJ/3eZ+H48ePH0nbrrnmGrz3ve/FQx7ykCNhTjz60Y/G5ubmEbQM+Hf/7t/hb//2b/HOd74T7kJz7EUc61u/9VuPjMHyWZ/1WXj/+99/JMdyzuGVr3wlvvIrvxKf/dmffdnH+6mf+il8/dd/PR71qEdd1nGuvfZa/OEf/iFuuummIxkb1157Lf7kT/4E9773vS/7WADwm7/5m/iZn/kZfOADH8D1119/2ce78cYbj4QJer/73Q+/+Iu/iI997GOXfSxrn/M5n4MHPOABR3Ks3/u938PHPvYxvOxlL7us47zgBS/ATTfdhEc/+tH44R/+YTziEY+4rOM9+tGPxp/92Z8dmWTuDTfcgIc+9KFHciwAeOxjH3tk89s3fuM3HinD7AlPeMJl99vGxgZ+/dd/Hc973vPwsIc97Eja9b3f+734u7/7O7zlLW/BF3zBF1zy9//mb/4Gf/3Xf429vT00TYO2bY8k2ULtTW9605Ed63Lsqgb9phVhrXIi+0fC9IiFVIgFaGgQ8ElZijFLhPYhchCXgKlkFjYUQLPzcPNd+J1b0O+cQfepv0d/7jTmZ7cx39lDmHeIEkwh5+CnDWrvEPse1LWZrQbkbFTHQIgnx3JwIZSboxgkE9Nk/8kGUTeinpDAvjZENJ5SFmpvguG6SRsDAZTVpCBf4zOzSoMOFwL6bOCJ5Sy7YtNLoV9ku0j9PaoAOJ/ZfCqRKs/JfJ6qJoN9axss5dlMgVqzVRsEG6xSdp1uapXZFxaBvpylmmNUduzweILUlJMC85r1r00EEmOAKIUOGEzzFeKSOSSP10F/arcOvxADAxEXCqCNAHwKNCVMZyQYxteYpSgXamGSAwUB/kgkvC4QzCOp64PIskWJtantGgZw0qbflcESfhWRMBqos6agnvdl8GkI7A3BNQW/1WJElg4NUdTPWMotxsABuL4rpVDtOS2xwwC5wwA/AtIct+wYh/VMCiTq9RoD9+4I0Gf+lCGUmMmuKu6vYpxrgMywYKxcav4dyiw/qeVn5zY+N2UXOAABIXB9VBvATzKfMcsbt4jwTscJB+Nqe1xnju1YPtrKEyqzIZ2PaXshg3dX2oVosEdx/JXdI60NMsc4lxhFFLAQkE82BGPsmBcpc/QxyeFOKlYScLtnQN2ME6lu/yT6nbPobv0kwi77VQdnd9DN5oh9gGsq1BtrWLvXCTRbG/An7wW3eRL++CmEmiW8w9pxYaxMMQ98Jyc5XgEdEAOz0QAADtXAJ5wHG9SmtJ6HwOAM9Sb5ybFOvzPf9wp2yWsUI9C3nG2W/CEFBkxf+ir3rfZdDDlIPmQADfo/MYuUAWOlmnVe7rvcJv2qWXuT3LnKyIu8n11PVAI9QBg+w7Y4XjO9V8AvIAZJlTB+yCj7eSzpQ4GRw3ydi5hf9Voe+hn1MGjczwXyNYZz5XldJKBJAk5qgDESFcDGob5g8pdGgD47ZvT/9r4cAfz0MdVH73PNNgV8VC6bWfMxlRQAjDy2owLss//nxEFdO7mmn6qJNI4w8QQ6mDHg18/L5B+1EeAvnY9XqfiBHzLSb6MWclbA2BV3Rkg3RMk5owxuBXsc8WEYcOd7UNVMyMs8Km0lON4zusE1HIBxC1K6h52P+lFBfao4PmdbeX3xqZgVzfKdVsqzj7mMRD8Aex0RopOeczxP+ghEW59S56CRUgC07DzsfmCYNKf+oe4xbWmOu8pWvtXK7mZW1zV+53d+BzFGtO3ls1+f97znHUGrsn3GZ3zGkR3rX/yLf3Fkx3rQgx6EJzzhCZcN+AHAsWPHcOzYsSNoVbajrBM4mUzwxCc+8UiO9cM//MNHBkycPHkSJ0+ePJJjAcB97nOfIzvWe97zHrz5zW8+EsAP4PH2oAc96EiOdfz48csGXYf2yEc+8siO9YIXvAAf//jHL/s4Z8+exUc+8hF85CMfwbOe9azLBv2Osj4mADz4wQ8+UjnZb/iGbziyYwHL9zJ3xJqmOZLjEBGe+cxnHsmx/v7v/x7/9//+X3zTN30THvOYx9yhYzzykY/EIx/5SHz/938/uq7Dn/3Znx3Zud6d7KoG/VQaJkSSzWcs6gkMx7mWavJOABswW8iDOB5DMUnSVA5JbpBmO3AHO6CDXZad2jmL/swtmG/v4uDsefSzAwSJHjvvGKjwjtloAINVnuvOLWzedBPTLwmmqdnNIFAAmwxiZKCC64/lemdqFwIZHFSyLtdNULAPKCVubN0xG5xYYLWEUASYAN38ci0+eC/Z7R4UHIB6AfTTPoTKeiqrb7rBdRKTNKXnmn3CUkjAg2SwqpRnkuqKJatPwb4xuSIN4YV07hn4S+dlAwAAMAAkQn657A8dl0Toh4DIEnPDTfMhWd1FRnc6z5ydPxoHi1yrIwp4yWNCgpKmrozWeIoAmAU4CCaasZ7YgtpeE5TKm/9eviPshChgqXzfGcD7MPMuX4wExkDw80FtpDFbkHQaXMd0D2rmvbkOBdi3JAB1EaeQP77k9eExDmX1wYB99tgpm344KBk0Gz+QCd4uA/x8lWTuolwzy6gt2QwO3svi6sw82JsMbg2yCpBvmSb2/LxjJqcPQCCCc8ykcI7gR+5t7+z3MzBcnq6uCYb5bSTYUn1T4KKCvitb2d3dVNKSSJIuPLO1hjWiFJxfmHqK+SAD/l6AssYR1+/qZqCDXbh2H+2504h7O+i3z6LdmzHDr+0K9QQ/bVBvTJnZt7EFt74pzL41kaCcZOBK6iSrlGhxfshzBys/IMlAjq2HfYziJ0ZZf3jVi2AfIBDgNAtoiY8HIAflh6AGwL4SKVvNfFceczJJmQwxDIIXbMLipAMSPDkAgXIiWpZPTglSpj/Ub+hlLtX3bAKU9ocSpJXNHxGW+zQ2AWU4hw6BkTtg6q8mZt0FPqu2MG6Wsf9MQt6hZq8nSj9af5vGPj/8rQv9f4zVNWwKzJocYwn+hQzuJbnsEBIDUE0BPYDXwf4QJ8Q5Xo85nQwii02y/2lzkuDYGBg/IJ+HHccDYOxSWX53xNIvCsMulTUIUppA3wZQMKNNW5fWx1Qbvj9mg3ljKNeuvl4cjlWph6nXvgsq58rSrgoGB3m0Vnv+LBHgZO+tn1BGd7qeY+Dj2LnomgMg1cbWt+w52bl0ZStbGQBe4+6Jwco7y77oi74Ij3/84690M646OyrA7+5sfd/j537u53DjjTde6aZctXa/+93vso8xm80AAF//9V9/5MkIK6bpYZ0AAQAASURBVLv67a1vfSte+cpX4vu+7/uOBOCsqgqPfexjj6Bldz+7qkE/v38WdQMGenyDObkBqy1m2T6yUpX6PknwWBly/HmuIQC43dNcv2/vDOLuNsK502hv+ij68+dx/pO3oZsdoNvlyUhrzVBTo1prUE2bVHOOphsgDU5V0yyXFAJIww/DAI1u8oxkjWZcApwtD5RBAwVpykxMfk0FujhMRcX7CnBo8NpJP1l239CGbB1PAAYZ5ofVsSLnESukejq5QR5DWc8k6VnVQDNJtWRCNSkZfVp/RgJWyojUaxz6mDLTs6zRIvCnrxftJWYpUGQwiwiJaZbaP/iO/lfZRFoDQ4+fwT5hEbmYWJX5d8u2FAw15AyOBALa30/3QQZb9DUN3FhW4zCDV3/PUTSMJoeqmjI4EzpBOqsSrLlQwEkDxsMxjzKAQwBfT2EqxNolWcyh2V/UtytHpo8p1avMY57M92PRZ+lY5rf0PnIGbKps4GQ43M17BThmz3fxVBYy/YvzjIv/H8qrDZMe7DhJx7eB52X3qeM5qggOKdinNauGmekCEOgc18EhhLIGzDAwqDLNHAT08FQxcDdYnTQoGXuVHZPMc+lFz5EnrFUengitY7CvdcSMBQpwPY9nK/OZ26EyZA6V1H6cmhpElbP1YzkxRBkMKleY+lMDa0nm9AostWPBw6M+/srukaa1j52A8dE3JeseKOYNnct1LU7rscwFfVS2nQB/s21Quw+3dwbh3G1oz9yK7lMfx3x7F7PT2whti9AHhDkf1zUV/HSCyYlj8Cevg9s8AX/yOq4JOt1ErNeAqkHrp3xPBxRykwoycODdoe2zn9jLSan8b+Uo+ZGOgFbme/YP9ZjsgzkC4BgQBMmcPQRFFcxQ3wgowIgE5qGci1MixlDusFhL+vSZ9BumJuBSkzkqEuUEDZU/F39a/c3hmqiqCFZZQ/1BBbK8CCQq4LcAWFgGmmGaLySfAFDVfiv7lzsprz1pjTXrH18vBmu9+sILfpr6ccYvxgWSryhLl8bQZTC1OPCS/g9BlgdWSPAWpFu2Hg/7zyR0FR+znx8kVtnv5LWTH+dGzlGlHA86lvLca3vMpPatSmWnfiKkEgvK7lsG/On3UqKM1mfT+pMq77nMf7Qg3hhIZl5bSLy6SNOWW985IC5PjoP4VnqPquxkz+ouWsIAACuUALzr1gQ6y/Ec86WsrLm0B3HxuntCBsuKe8sw4YwSTTS/r/eNjgOudR/QhYiDjiVdD7qQ2M5qzhHaANTOQXeJnoCeiOdUe2rDeVA704CPw7qM2q6CDWgBTO1vrQt/V9rKt1rZyu4R9qQnPelKN2Fld1Pz3q8Av7uB7e/v4/73vz9+9md/9ko3ZWV3Q3vGM55xpOzee7Jd1aAfdTNQdwB0ByBymKiMo0rKkYPWAitYGLL58a4S0IzldQgEjwCa74K6A/jztwIHe+hP34Rw/iz6c6ex/w+3oN2d4eDMDvq2Q2g7uLqCdy5lolfrU6k1swm3eYJBv+k6A35VXdaWS0GQQRA9nSSzgtoAqSuTQUs1uwG08qYkUS6tUWFhoSEgoECIvh5Gfif9XowJREiAn826NBml5AW0gcsZ3w6guuEgXNUkRuQwK5e0PwAJIopUZ8WgX6wmoyAfB6QUZMhBqSHAp+fJ5yT/L+r4mH4SgJj/zz+i7R0CQ8NrYWVkbQ0V7fdeQdeYg2bDZIUEAMUc2ImUa49koIfS8fl8SlDPZubb9g37QEeggjHBSfa+NMZLf3OMM4B6DjbEZcDfiEVyJTCsm3kJXBR1AB3McV0a22Nm+84C/USU63SiZLFqPdBhn8jpLgfiTFsJg0x/W7PEgGT2+Hoaw+MvufU4cGm+z4TGRdmyITtXz0Hv1QV28SD7OrXJl8GHpcw+IAePyWSOKzgnWeIaLAZ0nHEP6Nj3MYPMiyyLMuCsNWaiAT4JXN+19i7VKq0F9PME9JVD24eFmn7ctzxWau/gCVivPQN/lUfjKUmSKRNckyRSnbBhQMr05yqIs7KryVTSso9gFq5vAJFoHgvCZka+vKwse1kjUg2/fg5qOZGKugNOpjpzK/ozt2B2+hza3RnmO7sAAPIOrqlAzqHeWMPkxDHUx7fgj5+C2zyBUK8hTjYQmw3EZh2RHOZdBhaGQfrhcpEkvc1cGonru8XINZ+1JmBOlpF50UWunRwBhCzzWf6gzAdDMA4ZuGOwihnyFI1UpmEGpmOo0aLcXwEqmnloWbJJcSwjgx5FilX9TQVwSh9z0X+I0aogAITIfrWAEUtr35rzHZN0XmrWLyRK/p+Cfgr4+eQjkSTtxMT2U6hgLAmOUsLbiI83bAMpi3EAaF5ozreMLx0Xym63x3DGHzrkuKOA/IgfpvL/CvgxwJ8lPbsQMe+NpGcIac1U0EeTAuGAOupay6UWLP4ZYvbDASwAhgrckNZns/uIBAa5DJKRz32i7x0G+C3pg6Gfpq9Z5uNhzF8gg5dOkn0o9PLXIfY9YtcidpnpR4ETGin67CtrW5YlT+lrct3y/VYqJoSUeGSkhQeJXakOYh+SQgXM8XstcRG11AVSDUcF/Oy1hAB+fQycpOU8f67waHM7KAZQ12Ywcsh41Osc2KekAfiXEyky4Kfz5spWtrKVrWxlK7vnWdu2+O3f/u0jqzG6snuWrQC/i7erGvRD14LaWdospgCA59puGshI4NEwsO0DUqFwCazQfB90cB7U7iOcuZmlPE/fjO7s7Tg4ex57t5xBP5uj3Z0h9oEBD+eAGnB1jXpjimZrgwG/9S3Q+mYB+EUL+GkWPXLw38rgEVGKMHSShe4GG1oLIo3tfRQccUCxD8sMMaTgRs6M1mPHVBdtzBSgGs3gNFnFhbSTvq5ggq8Rh0EssyHX4GFiD9lsdFdl9o8wfjph7wwBgnROg9BfCdKNnKT2L8UkYaNyYH2UyhVxMSimxw6II/UD+X0FJbxcA4GzEqvQ9jP/ZhmAioig1D4FYUumq57C8kDdYr1HBlB4/ClDIyKmgCigQQaRZXQmwDSUFdLXF/o1LP+/bu61+2MAxQwqLq0DAu1X7TcG+By4BpuDBWuyjKNDZoVkVqb2RT4uETB6N+jcMpAgGw1IYXGcWcAvHX9Z30lAR5mdyrAIyLJlw1uWQX+kelYFMDVkV+i56D05Bl7Z4NSw5gxlVvJYrSB7P8gJpnFGpKAfj+MSlOVHBc7t2NXAdGYl88ghpnbyPNYL6BeB4J1huuY5Vee62jPbb1I5eU4J9KvM2Kndkn4dyvZp393FlubVO/H4K7tnGvsiLO8GR3DCoi9YW8UXFueJlNQUQwpCu/k+aH4e7mAHcf88wrnT6M+dxsFtpzE7fQ6d+Fa+ruCaCtW0Kfwqv3mSE6mOHWfQr15HnBzDPGTGElFOWNJ7OwCgaHwWMWWrASwX7x0Tc3StbyXQrUFxQObXwPNAHwBygyA3UMj8sW9pAA1kIIOToXSSk7nD5bWTLINIzVc5OSadSJnAQcPrMbQ0f7vEylTAT+fsWIAM5dfHfKsxS/XvbFuGLL8xn8GsQ2pLAR4FgcxnHZAS3xx7RTwGAAxcwEONCAs+d2ofUDD8VTR0lPVnLQYU+UGmHyxDlOXNAzhLzimeWbQh+UjD49vHwXc0SSYl5MgYbwP7z/sdg32zjpl9y5JkaselFYCQXMBWkFT72ZB+a7EGbwFMJXnPLrU1neNwvAOjgPZFy3kOj2eeJ18w8p4i+86AplQ6KPCpyQzSbrlfYyc13LVUgSiYxOCYmeYPuXGMH6XPFfBTdqb68gpce9ngefVBBwBZYtoZkM+Oj3R8c626YGU9Q0oatMBtHwPqyGoKngJq5/M+I5p5MV1n3SvOF8E6J2AoORAavua+WpRNt3VJ5Zh3ta18q5WtbGUrW9nK7nz7zu/8TnzhF37hlW7GylZ21dtVDfq5dh/U1qBuBnQdYjeHc1o7j4MBsZqiYKMAi0GCGEDtAW9IZueZ1be7jf70zei3z+L8J2/DwdkdHJw9j3b3AKGPiH2AbzyqKQek6o0pJiePYXJiE+7YCQb8NjZBa8cY8JtsIDQbQNUgVFORuStry2kQxcosatBFAYHGAwAlsMdR3gimzHXk92Ik1iPUOmZDpl8CSEpQyQ2CHYfq5Gp2qdfaPb4AEyI5zn6V80kBDaCQAjw021WCUZ3UGok90M773GeIeWM+ANjG5JvsuedzXDw1Z849RiAQ7477yH05lIUcyi1q5iyknfb6aFCql2vrI+AQFwBZbkcGAynytcnMw3yOMKCiBf/GgD5ta2/bblqoAVJPET4yYJaOTfLnmPnha5c39iYAuZC9Pwjq8Xgos93Li2KCksL27ESmNbO8bJ/mvlNwhojrcypQk4AblVOzv1tVKShiAfiiSZJVPWznQq2UQZa2XJ7ya/KYAL+QM/1HASRXHtun+4WPZhmA0L6FBHx6ZY30i+dd/IZhHYsNZbUsmzpKUMqC7yqjq1nimZVXjjduazTj3I5/GzjM9zWQ7+1l5gWh1WDYpAIAv1S+lftfwD8DFCuTr/b8bi3jyBNyYL3vymtl+pXZuA64UCD408h+7ud+Dj/+4z+Om2++GY961KPwmte8Bp//+Z+/9PNvetOb8EM/9EP42Mc+hs/4jM/Af/yP/xFf+qVfmt6PMeKlL30pfumXfglnz57FE57wBLz2ta/FZ3zGZ9wVp3OPtLaPmHiHxoFluwGeh6sJEESyVtdoIM/fOlcrMGXWA+pbuP1zcAc76G75BOJsjxl+t9yK/VvOYL69l2r4uY0qsfv8tEkMP3/qRuDE9QiTDYTJJuLkGPa6LEmYZMeFgWNfiwR4nwH+SADJeq6+SYhSm6zPIEWIQNsHA2SwWoF3UrMu+VZiQxDHSjYqc0/XP9+UIJEkn8HOLZrUpvNtWhOWrLE2IL0EpLXJcVbSU2t4DZOngHLtOmzuTb8RjYOy9EMjgJ/2oQUmh4DfWNKJtFGTYFh2MvLaGBgei5EQKa9Byky0sq36usN420OEjB2V+ERGisDvLV1b7fn1JchnQVsAObnJVVzXLFofW4wGrbSA35I2hOiY5ReR7ptZx9d8d94zwy8E7LU85jujh1qZ+0fN9UBwgA8BtZN6gH0s9jEhItUJ7EL231BJmo76JKEDui4DZdVgbwDASl4eyvDTfl12CQbKDDoHKBg6VOhI+whSH1Bq+RowK4GWXYuodcmBBPgR6sV26Hi2CY7i91pfNLOv871pzyaSSChrn9iEg7H7K3VS9lGVAWrvb738lu2XLAAtkBidURi+3E+mznFiIjPgh/kBgkqfyt6cqhpUN4DjOu+ktWB1vz5s++B+WdnKt1rZyla2spXds+x7vud7rnQTVraye4Rd1aAf5vugmZMNVY84n/FmYT7LG4lmDlQVyFVmc5g3ECkwNdtF7Fr0O2cQzp9F2N3BwS23YL6zh9npczg4ex4H2zN0M8ky9A6eETi4poKra1TThus21PIogZToa96kmloptp5Gb4IrFpwBMkCjtch0u6VMQN2kaVY2UG7YNGNdnwMlo8iyafQ9AEvCHUtMgZnQAVWTAuARkKBOlSVnAMQYMtBnavEdxhjqOg4iqAwRS8/EpQCfZqCXW/6Y2kAkwJ0J9tvQWCnZKeJNVG60e3P0w8BGBfv6UAZCuJYZiYxoBAKzCL3LQKJKgAaKcJGMhBYzlDIDqrwkYyw//py2abGti4G8CAgDlDQAAiH2KZAcIp8Diexb6HLAt+8WZa8M0M4d7caBPzKyY4PaUPkvjgZAhhJdnjJQw/XY+PdIalAusFQAkM9zRpSAw1Dyc3gew8cxsM8GGIv26rE04GfbZdsX+Ng5IJhlcwFTRiWiBA5DKf02xiDk4xkZ0mFQH2CwTz8n96cF84bJC5bdlxMctIlcXzRItM0Rjy+VrB2yLZeP09yPQAnua3DSgrSZEQgcxni2NchUHjbV7huyakYAvwRsi4TfXW5LAv5HevxLtN/+7d/Gi1/8Yrzuda/D4x73OPz0T/80nva0p+Gv//qvcd111y18/o/+6I/wvOc9D6961avwZV/2ZXjjG9+Ir/zKr8T73vc+fPZnfzYA4Md+7Mfw6le/Gq9//evxwAc+ED/0Qz+Epz3tafjgBz+I6XR62af56Woqv1ewxpzLsnTDoptAOSaUxSWylizHPkPY3UbY20HY3Ua3fQ7z7T20uzP0bfatXF0xu29zHW46hVvfkr/NzPCrp5hHh3kfMO9zbU+IlGORIMCvGg/A+kOU1stS6QCJSTxkLkX5jYUkAmBpUNqybqJNOBneRyHkCSou+ZweU+d02/8DBtPo3KPrq4AMSY455lqnY4CfnXsPwVP4JyyDcdDudJ72HAftO2x+sWvTMglTJ5/Tc7C9oJL3+jmiwXvGVz7U1H8ZAH9lY8Po/4u6ZmPrsl7z0AkAwusMOZQs/KFPNRxzergEGsdiXHdG0nPWB7QhMMvPMPzyOsqPddEx8vudJuowKwwh930fuRZgEB+YgSXxIbUr9d6Q/RwBoOhTTcfiXIddfBjgtyyZbPB/Bb0ApHvb+irWinIIA2AtyfTahC3nQcPEU2AhiWvI8Mv+e+4zC0YC3IM9I838v7E+knk8gWjDcw8Xvp89lUOcWbyLn1tIptR2CrBNMSbAT4FRch6JBSlAaoys8BFtgdbBMa+YrXyrlW91ldr29ja2traudDNWtrKVreyirKqubqhiZSu7uxjFIY3lKrDt7W0cP34ct/z//j9s+sisuskUsZfMyhDyZqJqAOdAaxvlQUJAPNhnoHA+Q9jfRZzP0J7bRruzh/n2LvZuOYtub4a92/YwP9+im3UgT3CeMNma8N+JDWzceArN5jrWrjsJmrK0pz91Iweotq5DrKdcd2ZyDF0EZl1YkFy0AW4FMmygaeodKscSVE6Czxm0GQELzWtqCi64BIaUoN/YNkP3WmPv2YA5UAZVRgPjfVeCBxJ46iMDeMoOCjGDoQoUtCEDBql/JIDAAQX+XSvbNzQnmbkAM3cUdCMgBfoJub6hPe8xpuOYZKeVxVI5sFYaF8KSOmICLNQieepG2qL1xvT5sN/tUW3gwjIRlwGTOm7sMfRsuf4dgx+1sOZqrWUm7bbtSwwoZSyMyFZxgy6Qka6MkXoK+AZd5Kx0zRRvJVNcGQn5Guf6fdpmT4RJxY8NCdjXzUHdQZKlTYwDILEgFoDHITANLIB8PAbydbCsT6AcT4fV2SsAuqByRqFIWMBYOwb9aq/BqISaBoWrJjFqU9a5rdE3CEINM89tjT0F+bgvyvEHLAbQxsyOSQuW23td42U89pjBaQE7oBwLeYwuzn/2niLzOBq4HwJ8ywBU83x7ewfX3e+BOHfu3J2+4db18VM333yn/tb29jauv+GGSzqnxz3ucXjsYx+bCnKHEHDf+94X//bf/lt83/d938Lnv/Zrvxa7u7v4b//tv6XXHv/4x+NzPudz8LrXvQ4xRtz73vfGd33Xd+G7v/u7AQDnzp3D9ddfj1/91V/Fc5/73CM4008f07Hztx//B1x3zYnE9HPzfZZk00QJAHFyjL8UOlA3Z4B7upUYK0nSspsBfQc330U4ewvCmVvR3fpJtDvnsXfz7Zhv72K+swdXV3DeodqYYv1eJzE9tYXq2htBzRQ0XYc/eR3isVPot25AaNax30XsdwH7bWAZUiDN+Y0vef1O5oCJ1Ou081cG/PgbIUbsC3tw+4BZT0PCde0Ja7XDWuXQeMJa5bjm59j60s1BfctMoO6A/VQAqJskQ5/MJluYdYBCVyoiAGltKGr3qcT9kB0DCJgo/pdKpTuPODmGHg7zxMLK/iP3Rz7Est1C9p+4b7zMxynBpp3lxBNlRHXzJDu4eMAB88mce3qd3KB+9/LaZ3pedq0aY/Gr5Hfp0+T1wjtOYdIkvGEda6AE2saAzQVWn67T9vNAea1sgpxlgw3Bh7FEFH2r4rGmzNiZgOVdiDg/77HXBpw/6DDrAzqp5ac+bO0dakciee0S6OeJUp23to9JDnSv7YXdx21wRFivPdZrj+s2Gpxcq7DZeFy75uHme3DnbwV1B3DtPmI7Z9BPVVucz+Ua6skCy88CfqP+pX2UsVWMJWG89nDoQsRMlCTmQWp6FgAbj+nKEzZqh4l3mIQZaLYDmu/BHewA7Rxxtit70HzPUdWAmilQVaxAo+cjNVNT2QdXleoJZp6ydZLt0K2czEmVQ4UAmu+B2n0uf9EdZNBPx1E9YQWaeg2hWce8j9htA7qANC7aPuKgC6auH19fy/bT+sfHmgrTyuFY4zGpuB0btUMVO7jZDqjdA7UHoPl5ZvnNdnkP3s65prvzoKoGqhpuspYTZX1TJp/JtbT30Pb2Nk494gtXvtXKt7pbm46du2Kcjtl8PscrXvEKPOxhD8PXfd3X3eW/v7KVrWxlK1vZyo7eLta/uKrhc6pqgPoE3MX5LAdUAJD3iI5ZfzTbA4AsuRICwv4uwu422t0Z5jt76GdzHJzZwXxnD+3uAQ62D9Dud2h3WwBA6AMmGxPUaxXWr13H5MQxTE8dx/oN16DePAZ/6ga49U24zZOgzWt4U9Wsp0A6YoAnh0ZqSvWBazJozRFAM9IlS3kkwBJhQcES5NP3+f/l9yzgNwbWDM0GRJThtdiYmBhz6XfksQNA5ODI5VqBVT52AgvasLCRVQZfHzPYpzUNdfM5rC8xBNOspYC+E8YcMaXIuQjAJeBvaGNApw0SjYFnCfwLMQU82j4DkQuMPGXwEdDHPoF/MYLZf8KT61E+Z2nRsfZp22L5fxMAGzMaIIdJzRHLxkcERc6WzzVzKNWbI2BR+kpYTxTjIlilH9ENvgT6oqtEgqoE/SwonNpsrmJiag3YWrBBWAlgU3dQ1rgzGdckgTbSOqH6mq+zzNQg2KiBMh0fRb9ZlhkymyYBfkHqYvbzop5TypIuLpoyHcoLVNaSKms9FWaDieRA5nwg1y+9j1Lu1MpMaUDV3reW6Tq08bstf14Bw7F7HsjzjQ/C0qF8L1eO7wutk2kBP5bmhMi7Dlh7oc2B0pFA4YUyywv5U/soIHH0fvyL9wDb3t4u/j+ZTDCZTBY+N5/P8d73vhcveclL0mvOOTzlKU/Bu9/97tFjv/vd78aLX/zi4rWnPe1p+N3f/V0AwEc/+lHcfPPNeMpTnpLeP378OB73uMfh3e9+9yowdQdtWrklNZkGwfOhad06M/9Qe8Bz7WwXcZcZfu3OeXS7M3SzA8Se6yJX0wZ+2mB6cpNlPY+fgtu6BjRZY19vbRNhuomuXsd+y4HpeS/B+dQsgkNEO/BtdL3q5Zx0HfcG8AfEp0JOvsqvl/d/bRlK6ThYmK9tMgnA8wTJXHBo3aZl7LgCUDJ9bQEQYWBGAyZSDKp1nAP/mshCWucN4sOMA37LLPsKkkQBJJCMpJ0LSSdjiT7pgAbMGqg/FPPrQPbTNlXXVq11q22iKG3TVClRyyhqKNNyaU+9JdJvDa+hScyhKIDPEDEegn2aEIdyndE6dkTizYWOW0Ux+VYXcXnYXO4jVUjQ+ydEYK8NOOj6UcDPy3rpHCXAT8EeC5S3joFElUxtQxAGWjZdxzV5r48A+YrHbOhlTGZWXPIHxwDOQd8XMrexXMfHGI/8R0h1NM3xdA5Ixzay8URcHzrNL4Pry/tOOQeVr3SOE1ArASyNZH26b/UaGcBPkwhtomO6N8X3vlgrGITJ93NJLcaaU4DbAbVWvebNKvtbcl01qa72eUzYBMDkT5nEs9StwewqQw+MSJ+WjXJ64llB5LB55B5gK99qZUdh73vf+/CCF7wA//AP/4BPfOITV7o5K1vZyla2spWt7C62qxz0a0DUIc72EA72Efd2EuhHk2li+SFIYXUgZReGtsN8ew8HZ3fQ7s4wO72NbtZi/8wM7e48Mfu4fl9EteZRTSvUaxUmWxNMr9nC9NRxrF13ApPrb4Tb2II/dQNo8yTCZBNhcow3dgoOkIMWUq9dBRcZwIkis2hr9MVICLRY240BBTmNWGZ72rh/sVk1GdjekQFDkCQPARRbRw31dQZQHAsCuQFQtHB9EIs6gZaVaGt9RQP6KYgQAcy6vsgeXgb0jckuZqCP0CJygCISgjDreB/L/exFqks7YUwNawj2ASjYTENWkjKSNDsWwGI9DCC1p0VEbcC/2jnJOue+8KAC+FOwcGhjYJ+2P2FvcbzP7PZZX8uyh+Y3Im/oYwQiSUCVOLjWp+CIgGMxgNAtAH/c0UrV0hpRJihBLB3ZSiZ6K8FdZfcNZT15nJuLaNpuAR4yNZKoa/mxn4NCnzLMNTubNABVS5Z5NeVATdVwyDDmoOlYVjaQbw9tlQOl/1g2WcHwkzbZek5aN2aMVUbalzbgJXVxopVRSl8xmdXNRIKTi7WCFPDrR87NAvUKwmo9KA1OqQ3ZtMMxpmOKQFIXJoPmev+HUAJ/jggtkANOcAikgqHlbyjg13geB42nQt411cyyLIkRK+pkDR4JrhzXytIw/XhX250tK6rHvu9971u8/tKXvhQve9nLFj5/2223oe97XH/99cXr119/Pf7qr/5q9Dduvvnm0c/ffPPN6X19bdlnVnbp1qADEQsgav25hftiLOiu9f9sokI3A+Yzlk3f20aY7aLbnaHdnSHMGezwDct5+umEAb8T18AdP8V/kzWWaWw2EJsN7HUh1SBjphL7CiEAG40HRKIXjmu46byjfgYcUq1eK3XeS6TfShFbs3NPYv6QsoXlWAtz9uJcsgBkHNafY3ORnafGromsn/BN+ny0n9P5yFWIvkp+VzB+zDKwT/04+3/uE8OadhYQKUHgpeNojH3uzNyprKzU/pJxb5uroFMB/PFCy3MiYkquAxYlCodASoiS2GT8JI/sBxXnMACOMvCHBQB4LKmnSDRRSc++ywk5+n7oUNT4OwxALs4FaU1Vv+qgE8Cvy4CfJqpZP7B2lAC/qXdJoQLg+6V24m87AVLF5VC5WLW2Vx+BE4UqSe6CYxlTVCHX/baJR0QL7MaURJY63fSP9n96LUi/DpK6BuPxsPyepCQgeKH6b+nakkOkwL4VAKBmX0uAQAXclGFYju/Sl9TEUN33jSm6qLnhOj+83/U1vbcM+Dd2ugRO6KudQ4sAFwm15z1UK8AukBmg08qjcnlPqbUOC79V2pR8bG1q36dEiIs2ckDsy/O7C23lW618q6vBlN33yle+En3f4/u+7/tWsqwrW9nKVraylX0a2tUN+k3XQdQj7O4gzliSM4aA2Ae4egbyDqHt0M/mmJ3ZSe+1uzNm9W3PMDszw3x3jr3b9tHNOsz2OwYXYsSad6g9Yf3aNayfWsfG9RvYut+1mJzYxNYDb4Q/eR3c1jXwJ68DJmsIG6fQT4+jrddT9rDdFngoo2eOylUg54S1UqYaEyJnyLoceNKs2B4lsDP2OwBSbaokSUQiSYS8MSNgVFqJTIA6g1kl+DdmY0ASTA0uC/ApC8qCN90g0K+BiBCBA0nlb81msQ8xyTHppldlh2rvJOjEgYi+7GL+jisz05XVpgHBMbPsRz67EtwZAn4aKByCFmXH8UMLC+YFhJhZf4G4rkqkmCQLI8UlAKW2pWyzmo4L+d/C9xMTbQA623pnqekxB1Q5AzhKHR3AG8YYPzVZ7zZoZ5hkfWKNhcT6VEmueR8LUMn2pUqjEgzLy7adsBh4VCZK6DlpYD4DujbLBAMcsJEEAlo/BvINYug5aOibLLMFt8CAKwHJ3NMpQKxtEOZhAfYpEzF0LBfVzbltXSsdn9tXWOhT+xXwU7njVFOmqlk6a20D1LVwaxuIbg5CsxAAAzTrvTw3C/jpdTno+P496EJi42V5VWbhgRLhJN1nXJ80zzVp/MqcE2ResIA/BxzlOJEB54J9I4Gn6v/P3rtH25ZU5eHfrFpr7X3Oufd2Nw1I89YeIiYGn6goRo1vDAziWxkjibwGGQFFUAz4+qEoweEjIEJGIhJGBPEZogZQBAw4ImoGvuIzBjEibRse3ffec87ea62q+v0x56yatfba597ue7pv32bPMfbY++yz99pVtdaqmjW/+X1TApUqjewJcONqk+15qXqHyl5VeTXLipwG7vV/QL62T2T1XOP2N3/zN5WcwFwm+s6uLaM4YjFcZIk4DdqSY4lAyNw1ruS5LwBff1yAPpN4kHqu5ZfWPMeSd2j2OpC/jusjtw26c/vw+/vwNz5AWH43Iu1fj+BbpG4faXGA0S8Rh2iSl1J+HZFywtAYk0g68xwk4WZQIqTIa6lHynMxUJjFOscB7CvFxIlDapxE4DJjOLPahNVYsbRtsJscrxnyOjOtxWZlCi2oNQGVyocLcy81nQnoS13rVEC1DclM1xTZS5OsMrXKd1Lwy/hUNaOeyngos97Kes6BoSaBQtmK1fw6Zf/Z2rPY9D3dpI28VhDXS46UE9JOYjJuUzjwhh1oQfEKXNE+zbDGYYGQydpD5lrRJEFQkuQpZICPu8PXOvkGmWFoQV/H50DfV/nKMULq+AFHQ8B65Bp+/BwyC08BvYV3WDQOy8Zhv/UiI+krWewkkt6L6Iyv7uCGIBKZKfsCADCEiNET+w/ksOgOGPAC+1XJjiE5pKatADId61mpVGVOWnaptcD3Acm1o36caxrxeYtqh36zTiCjrMaS/coMVDd8zhauuqbjBLzO96lK7Ir0ak6iSnXJgzDpAmAk/wGjYIAaRHYOiAYkVbBR5EThGt6rGAahA5BkvwgA3nl4inlfY6VdleG38OxrLTwnV3WeQGEt9z5f4xTHSokHQAH7DMMzERUGpLAk8zUNgIjPcwJKwtu91Ha+1c6uxH7lV34FP/3TP40QApxz+Ff/6l9d7SbtbGc729nOdrazq2DXNOiHGIAkAaXVCnEYEYcR43EPksjycHjMoN8HzyPFiBgi1ud7hD6wfOfhgP7igMPza/Qx4aJsWD0Bex7wnUd30GF5wxLLG7hu396NzOpz526Ev+5GYHnALKBmidR0GGJiGcZUgtoREPaWy7UVTsootYy/ZACEoZL/EUBJ2F9TAEizr31+FvBvAoBkm2yeIpCleKY1Are1XQFJK2Op4KSCflYOUwP9Q1CwLwrQl3IgIiZUwQhrgwTwHKUcoHPEn7PZrxkgMJmogLCQqASKiGqwq+qbnE+VNa3kMOs/t9pGRu4WixGAS3AKbCYgUUJU5kLKAjf8+5dx2G1b47n+2uNp5vtJv6HZ8JEAmgbENLiiQQjUQU9lkoWUKqnIaa04DYLYGomAgLcxgRwDoXNAJv/oFgZXkMxjYQFXzGAFyqJkb3fgwM6EPZFQ3y9pcv1PWaQk7eGae+ZhmWcKSCrgp/LFCvxtdE8CKlPATwItHOASyammBYYW1LRcV6VZlMCyDbhhnvWh4Fw0DwX8VI5TBpCxXmGDUp6rjGwsERCTXDuXeydtNwXu7T2dEx2m8q42IK3B2KmZQGoVbFXZNfu5iRydzplTWd+7w1LaPk+f1vEB4Ny5c5dVo+S+970vvPe49dZbq/dvvfVWPOABD5j9zgMe8IATP6/Pt956K2666abqM5/0SZ90uV3Z2dTGAdQfck0old0jBwjoB4D/l1K5Z6xcYb9msG/s87yaxoHnKOfhlx1c26DZZ5afaxu4g3NwewcM+J25Hljs59pTcXkWyXfoQ0mEmtx9HN9OCVFutsZ5OMe+l0scHQ+C1hAKqDG1S0laOlcSqBRQqqTs9DHL8qsBrCtmi1jWsZ8BycAB/ExTmwB+mtCxbZ6YA750hnZmLdMkG5qOxxYW22wfgHk5R8tOciX4r/PrSaasP2gbE3ICU0xptn/Tvk7Nrucn2rb+Tphn+p5lQFKqr26CQ4qxrDkZTKSsTFA+bMbTgKPqZ0UIqBQiq2hEfd6sJc4SjyrryUmQzOqqa18DJXkOcBiigw+s0OFJ2GGulopXtl9widUTYkBqOYmg8kG2JdjY8TRJXIjx5CQeIK/jSBFJ/a7L2A2XJDISf8YeVMB3BYC1rU7ldguAVbFXtX40UJLfhOU3Rj5fVQKZlRqFYdYCNYvWsCHz31k2n9ui93/VDVLZfgYTY5IkypgQqSRXaR1lBRyZBSoy6go+KqsvX/N1nUMrf8qPUq9Ta2hnwM8kQHALttxfd4PtfKtPutyu7Owq2iMf+UjceuuteNjDHoZP+ZRPwUMf+tCr3aSd7WxnO9vZznZ2FeyaBv3C+Q9huPgBDLefR1j1GA5XGA6PsfrgefQXjtBfHLA+v8a44rp8oQ8IfUR/2Be5QMlw7lWCEcCBJ+x5hxseeAaLcx1u+JgbsP+A++DgATfi4GM+Gu7s9fD3exBouZ8LsoMcaFzBrYD9ZsEF2cmhR4M+cpF0gDdsHUdE+PcmYNrWvhp2nLUpAKSbQpVaYXaLAF1JNsRzbB4TSClyfiW4r21MqWYaYtL2bTKYmgmc8nsp938K9h0NUZhDMQcmmD2UKtnAzjsG0TwAEJTz5IwMEWf6u/we1yZBls9sHIcrvCvMo8JEKlv6KBKh3D2pCeQYmCVwnT1CvTl2CYiRWUhwNJtFb8EABSbz5yJhMIw/jlcmAYPNeZ8EoJQ1VWQ7ywdyoI42mXuUP7MZ0drGAEiSDc3Btbn6fmDmqGac50aW4J0F/LRuXy+ynla6bRViZkxqwLb1BJeY8aE9sCzNjaCM+W2OhZqzbOUw+5XpPANt0Tm+RlyDFBZ8vBiBCdmu3BepGk8ngRkrNVqx+uT1VHJUGX4aOIcGz2dkkgDkQPucrGdyHtQtgRhATcfHa9pNqTj9m5yA6PV4aoBPr52URDYscF0fNS8IsBOQz7sSpCLzXfKSze4045wQPQFwCClyXZkY8w/aRIZSR8bIF8tvNDIHto54TMeeaziGHjQOzEhKERQKw3NWCtV7iWh3cs10SM7VYL8JTtm6PCkVid+PZOu6Dp/6qZ+Kt771rXjiE58IAIgx4q1vfSue+cxnzn7nMY95DN761rfi2c9+dn7vLW95Cx7zmMcAAD76oz8aD3jAA/DWt741B6LOnz+P3/7t395lNV+B+aMPwwfwHBQCsGS5crL3Rn9xI6CeVkcM8vUrpPXxBjPZLfaQmg7dcp/ZxoslqOOHOzjHc9PeWUmgapHafaRuDxdjw0lbZh7iRBD2PzpZtHuTPMTsb0KjeRuasCTAn0sS95e2R9TpBiUZqMzfALAnoEfnXZ5bIHPLrKynBqxTrJjCG8ww/U4ytW+tTQCPukaXz0DeHEiWGWT5/y7LelqZdTXLTLcS7cAWH5TKvK61cysG+5ThN71u9OAKgNi+qCziBNTMPqm2o25WNTer/HiSbK0kCSYnBe6n67eV80wpWcTLdGTSP9PfOdbZxjm2wJ/8rcAdRTDjjxxSA8D4MIjgenHGr+J+uwwk9cHUwQxJWH4hJ9exrx1znxnAYSnP/dZjvxWmn6ypFdMvAdEnjEHY/a7NQB8fl9vkCaK8UeoKAkDbdWiWDWhokdqxmmdKZwpIXgHJen0Z1uSJtYyBmmXbRAbrQg/vug01Cx5H5P2VFz+mJC0pG7GUk6jAKgO8Vow7vaalprLKo6tygvrC6k5pgkHneY9i93bK8qNxnVmO+Tcbwy62zELfYcgJdmZoGBFFEia0Jnc1jt8v6jLcjlYYfq0nLLxD5wAaV9yWKP6tJrVV54DnpyI33wFtx7Wzlbnsu2rcuCOxYr5m2d+PYNv5Vjub2gc/+EE8/vGPx2d+5mfi1a9+Nd7znvdc7SbtbGc729nOdrazq2TXNOgXL96O/kO3ob9whPHwGP35IwxHKxx/4CL6wwHj8Yj1+TXCENBfHJAi1+dTibjOyM04DSZ3Ht1Bi/agxfUPuw7LG/Zx7uE34eCm+6C5z33RPPCj4c5cj7R/PaJkoVeBF5VpAQdVjseI9Vg2ty6XIKeNmMFcBvtJQYmcwFkFuzlYwBtDkj6ibIw16zJnoc7ILEaInF9hWk0BvzmbAn5Tht9oAt8aUFFWkEpL6d9jSBXL8CRTmRlHhMYXwM8+V59xOk412DeVsax+w9T9IwHeOGAorL+oXxDGGQgupsxyAjZZikAdG6wyrJWJKGDGHTUF/oCSnQzUrD0N7Gktnin4x72R91IJ7lzKGLuxMp/IQU8+GF9vGrBTMHcIJdCxlppNR0PINR0tcBKJmZw265gkEOHkHOZgbTUwk4x4clJvhbOMyfnMRpk1+/5lyArlgKl+RRluEgDdyEifBqhyQAQ5cJ4A0GgC1MrmmwuUXa5N+zL5m4jl8HIoVSgUiVgyLQFYNuU7CswuGmYHNDkgXEviJR0cRrGBKLWTXGKgj68mDBThnM9zN4DM2lUGQuNqELBxRV60EZBVZaYQJAglQcMK8LNAaozg4kQdXyeR6w4lDWB7k03vSgZ/MHNfiHWywt1lPH/edb97Z479nOc8B//iX/wLfNqnfRo+/dM/Hf/u3/07HB4e4hu/8RsBAP/8n/9zPOhBD8KLX/xiAMA3f/M343M/93Pxwz/8w/jyL/9yvP71r8f//J//E//hP/wHAHxdPvvZz8aLXvQifOzHfiw++qM/Gt/1Xd+FBz7wgTn4tbM7YedvRVp2mRHihuNNFtbQF6YGkJMjAGTWBrmY51PqPNAtOWFAjkuLvQz6oVsyC6VZVPKOiRz7IVHXUfYlDoeYFQIOOs8BaDiEFHK92wRNkmHZbLuUag27uAEXISc6OAmot6bwGwe6OQiuCQYUJnN3ZrIAuZou+RrUAhigyN8BzykKEBL7GLPrjGX4KYg4MQuSZVb6REo7ovh3+dCogT7rF+T5emLKNHLG19xY2+Lm+lYxHU3CxBzD7yTAD7AMoPL3Seao/sJ0irZJX5flgpn1u0qs28b4c3xeaFtDU7kW9G+t80dxLDX+rD8vfhVgwOyEqv4lg34xJ+kMsfjaet5VwnbRiLSnd1g2PgNNyvRTP5WTF3lfo4xBgsN+m7AmwIeSrKXrc0oMOBFxm6Jz6Lp9FOWDeYC4AlYV4NO1PfQFCExpQ04SQF7HQY7BegDJj6AwwvvO+MUosvUQ3038ysmlw+u+ArS+BuPnGIrJG/nvnGgpvkIqiW86lpD26M1IQPZ3WociaWoBP9TJnLk9juU9M9iPslcDCsCJPP9A1Grqm0D3m7lesiN0DlzXVRMglLUpoHcUH1X97Kq+dCNAv2s44WOaxKDnT+SKE6Tfl+GHn7btfKsnnlpfd3b6NgwDvuqrvgpN0+BnfuZncMMNN+BBD3rQ1W7Wzna2s53dK+0P//AP8ahHPepqN2NnOzvRrnnQb33bRaxvu4DhcIXVBy+gPxywvp3ZfeNqRH9xQAwR/REHo4KAEK0n+M7Dtx6+c2iWDXzn0SwbLK5bYHGuw7mHPwDLG87i4OEP5TozN9wfuO7+iN0e4vK6nC0ZUr3BTjGhl9p0awNkABIrB4N/mhmd+4N5mSUbjLB1VABkCRaHIrPC2aCON4OhZ0aR2XgBQGLK0nxdtRyoBqxc4bb2qVkp0zwW8h0O0tSZ07y5LbW6cs2wLO04lXhMuY6fF/aQZtw6ErBPgL6l1GNUQEBr/GmgQje2Wd7TAH02y92aKmSxDmEN/sGl6n8JRV40pbmjlWtG69PlQ0zUmk6yaUayA2Vglqh+Hyj11PIYTIA//mwxBW9JqxeSYUxMkt31HE9l0xI44zybkTKKEnwcI7Nt9V457AOGGBn0M7X8tM+RCK0vfdG6ld5BJFpPCNSZgCKBA4/MgnNVsLoMni8Zyfr9Gdt2bxRw3lxbU6AvFiaABjISRWaYNS1SlDMoQZIEgKJDkuRpxhENaLUNAHQMclpJpUv1S2NNEaUOVpmIEtrEAd+gzFtjy8ah9Vz/qkhBlestj5njeltax5SImbHM4IlwzmEIqWDrQGb6LRpl7VJmEjZO5njiAP2shKoJDkbDjtywGPjajx7oFvl7UwA4105KpeZhlqa9Gvqe90D72q/9Wvy///f/8N3f/d34u7/7O3zSJ30S3vzmN+OjPuqjAAD/9//+Xzgz+X3WZ30WXve61+E7v/M78YIXvAAf+7Efize84Q34hE/4hPyZ5z3veTg8PMTTn/503HbbbXjsYx+LN7/5zVgul3d7/+4tNr7v/yDceCNoeQBaLBGNpDB1S56DlA3tClBesWS9zwkKAJhR3HagpmWgr+W6Usl3iL6tATEL+qDUnFKZ5z4kfOCwz3Vyzyw8PBECElrnEFUCWoLZQlKpTP0dmigXqDkIqOFUQrwkUjXCbGkcJxRkGUugZo/nvAVJDlDmigFskq1FpkwhBXvs89QsKGZZgxaYlTphek/lRJtYEq2UScRyl7WfMPUN5sYJQCWhrFLZsP2Ktf/J4zQB/KbSyETsl/qaKaV+wzazPvIUGIT0Zy6onll8epyZY2/9WdtXoAY4t7D8spl+kfmOnnv1F2ydP77GRmFxqm9W+1VALRPZR06mGwJL568CP8ZQJ6Vogpwm0C0F+LNJNG1OnMudyPcSVdcQ35eDSxWLUMeSFR6A1RjRekJIhNY1IN/AN2Vcq+sIAJIBlC3gF7i+aBp6BphsVl0G4i17X8Yw9EhxAcQRnjyC+PdFzr8kBRKVPsD6arbOpMh2JssonDnvOgfpHot9B665OEjSm3Zh0bh8Xet+h+eessergHZzbeU2NR2SzAm2hvY0IVG7Z2t/248QIQO4rWfWMzMNTb1kkU5X1uVGPT8D+JEw/JIvawLf+22eG/IakyIo6jg3m5ugj1Db+VY7U/vmb/5m/P7v/z5++7d/GzfccMPVbs7Odraznd1r7fz58/j2b/92vOlNb7raTdnZKduHP/zhe9UaSmkq6H8N2Pnz53Hdddfhr37wWfAXjrKc59EHjhH6gDgEkDi3YQhIIWFcjXCtg/MOi3ML+M6hPejgOwffNmgOlvBtg8UNZ7G4/gy6s/toP+pBcGdYyjPtX891ZZYsPXU0FpmcISZhxpXNdmYnhZQ3RbYOhmZJap09oGz8gJPZfhak8ibY0mrgmwDqj7K0ykY2sNZ0yJI0hZmoUnS6IUyTdl3Kck2/zBYsgW8NNvH4xEqyUd8LCVnKUyWHtF6YHte7sun1jiVllNG3EJBh6V0GA9qclVxAvszwNKCXju1JllIBL+3YWMnTKOPI43Hp4+mGW4Nv06EmWMBok51o2+1m4cryfW8CMtpvT/WxrBWAr0gAaXtt4G2DNQgZa/O/3GcoC4oz0FdjRC/svqOBpaZuXw9ZKnI0gIkjllBdNHy+z3aNZKNTritSstGL5Fgr/a2DEiuWeByOQGFAXB9zIFukNPkHfQlULw+YjdIskbq98uw7jHC55uCmVFKpAdU4goeATVIvS6Xhci0/zV4HNgKHKQSksed2xlgk9LbU8auuAQEQqVtyf/YO+PVyn2X0fIvU7bMElG95bjAByek5n84XKkul935Emd90XlKgXq8Hew9NmcWjAf117rDBWg3C63XdOuRA5UIyz5eemHUy9qDhuIy7Sk6ZIGHF8rPgnwF9LVCRmgVS0yG1exxMa5bo43xtyttuP49PvvnBuP322y+rRsuVmK6P77vl7+7S3zp//jwefNMD7pY+7ezuMb12/vplz8WND7gf3P65IsHZtHAH5/L9EC/cBvIiF9y0IhfM8yY5X2r6iYSwWx4AC54vU7vHQfF2v7BhgAz8ZOaKb5CaJT60Chhjkb8bYsIHJJHLE3Df/Q6tY2AjRJ47GlnrO095PdJkl9YV/8kmKoSsNlDPR31Ief09aFnWc9GUwHvlb6lNARxNrhIwSxMOIOykPOcboCLLHE6Av0RUavjpe5klI/LyJiHNruMRBfSz/gxkjGxilL+EX6Cm/hWlCBpWZSyM9CKAedDPMPkySKI+qgVOJoDfNpfUNnfqs2wDLK1lHzxtSu+rL6O+Z0Moa3Ywa3Zm/KXyXv4Bw1AzoFX13mzHjHSnjo+sO8m3SIsz2afS63cQdp9ljt2+GnA0hOxr8ef4N9lfdmg84WznsWg8rls0WDZ8zS8bvodU3lP9UB638rt94HtnFWJeC/sQJakPWXWjFV9Nj1t8NlOrzlxbWTY2BmBc8dreH8sav0ZcHyPXZNYkHknOoaaVsRNmmfNAy/uf2O4hLc4gtXtYN/voQ8TxmCrZW5031L9ceCpAl1FQsfee+kjlWqPKHy7+D5+bMbI6zGpkwO9CH/K52W89zi0aXLfwWDSE6zpmKrr+uAb6pmbmnCSgXx9qn226x7MJkEB9T9nEAGUbdsTnxvXHAkL2oEHlPYtUfSX3rCzyxR6r5BgfNLO9NUnC3kMqoSzXwfnbP4z73fwJO99qZ/do02vnrj6nr3jFK/CsZz0Lb3rTm/DFX/zFd9nv7OzusZTSibU57w12fHyMvb29q92Mne3sTtlP/uRP4l//63+N22+/HV3XXfoLO7sm7E//9E/xute9Dt/3fd93tZtySbtc/+KaZvoNR2vQMCIOI0JfNjyuLQwMDw+0QHvQZibf8roF/LJDu7+E6xq4tkF7sIdm2WF54zm4s9fDHZyDv/EBcPvnkA7uw4Df4gCjX2I0dTH6wIBFTIXNwXXpQmawLZLDVC4pKcvFMxPMsoG43VSYVBW7xYA1biYAYepWVZKeAAfUJMOcksOUgKab9rlgyqXAsNw2PahjFhxHTIiVdJJKXwIxUa6TpzvLFo7r1Dmut4PGcfZ+SkwmSMza0Y3osvEVCORJmEVOM5OV+bMJss5lsNvg1rS7yfxDa8Ewqw25Pox+jkQTJ6WNcm+VKRNS62Yo1c9uwGv24XwA7lJWMczkb9v/aSDCmkowKssrAlmO6qTxUkuTZ/u+sjTGWBgc65HZfauR5TzHHJRIUrONcrtav0U6Mvd5S6tm2ARIkWuKxJjZdNxJB2oY9EPLAZNcPwmo6xTKmCbTW9sCusRNpMzD3MaJbFdKESCC81ybLwdQnDcgVYs0DtxeG/DKzD4O0pP30q+uGo8qK91ki2u7jMqdXBs89/C9WUC6KThnr0F7Xvi8KuOmXJuQuY8S8f2UOAhoUXQFlfO9ba4DZ6+FWKQ8N+qZEgsuw/m6Pp/JLLd1ZypmjUrOucZkzxep2hyAvUrynjvb2Z218eIRwsWLfN0bADxLsmnNUKDIdTZNWWO8kW/rONkAC0mUaBYcYHceaXFGvlDmu8Q/VAE9vAYV9r/eTp5qBgzA84YCftP1MiJl6c850/lJZUETcdA/+w0ksp6aeFN9WRh8ZbDkR8fSF1vTLzlmLFkJx8swC/hlZtxEsl2VG/J3NLFI/BYL+Onn1BXjObX2C6bjaEEL/Ztsn+W1nW836tdtM8tgtH3Y8nrOpn23QN5cgpf+UpT3s/8FqJL1yU22oItlNG6TpgQKuE0uO1WZJToFcSzrD8J2Ska21fQ7AjkBqc+AG4NKK6nhx9KeMY+JVx9cAD9OoPNZMWPqR1tAOLeJeKCYpc+taVO5UpzS9MVP4H4wO3eILJIRAbRyrCS1F3WP4HVsyAEIOTFA1/dc/zgGpLWwkFVa2HtWRZC1PMVQXa+UUmbcFsCRZzfh+XOipZ4Oc+0kctykiFyjb0w1mxYQPwhFyWCbL6/7Aj1HqhIzaBKmzroKNFvFiImiRj4vCka6pqrjOVVysUmDNklwuk8i1Al1CGOpaRqLzKjeF1US2kRNIbfPlMewyajlXuR5nlyTZda5ePk1HcbY2c5Ozd761rfim77pm/CjP/qjVwz4/e3f/i1uv/12DMOAYRgwjiMe/ehHw/t7Xw3NP/zDP8S5c+fw8Ic//Go3ZcNe+MIX4gu+4Avu1aDfu9/9brzvfe/D137t117tplxz9sEPfhA33njjqRzrlltuwU033XQqx/pIste85jVYrVb4vd/7PXzGZ3zG1W7Ozk7BtCbu05/+9KvdlFO1a9pbHo6O0YwR5B06Ye2lkBD6gOMPrzAcDmgPWizOLXD2wTegPViiWXZYXH8WftmhWXZwXQs0LfzZG0DdEu66G+HOXA8s9hGXZxEkKz1vRsAbtsZpuIt3e5pRnBIwRt7gqXzlah1x+3rMjDYnjKT91mO/9WgcZ43bGgma9TplYuXATJami8DQ54xxCj1oHITBYrJPJSBFNCC1C5EDKgFs3vRtZn1zwGt+/KfZoNZiImYKesqbSmW99CEJYAOEqLJTKWcBL5uYJR2HWOQ+rdkafbZuX5H6K/IzjSl4r5nsU/DhpP6paRM86ThRDi5ZyUtgXj6qOlb+DuVs4phqedQ5hidQZ1dPbbpxn4ItKttlGXlusqHfOKZcA1pjyDlk2c/LaZ8NmGm/9ToYY8rZ4JzZPGI9RhwPMQcl1DwRnLA5NSC1EGC484Xl2noD/E36ksixPJbUNqGGO6LvU9NkRh3J5kY/m3yXs+tzpr32XdqXJpWFMogv96+zyJIG+zQIHEwAbwacykEcQMC7BahbZjZNWuxxID7UEnuQz2e2msjzaZa7SiqVQPI0Ul4YDFaqTYOQXgN4lrnRCCjqmjzPlHNfGCfT6G2aZJy7DDrzZxtzgdq6nAX01eAkci3TSuJL26dzuY51y2O7AQpOxj/LTjVtYVi0SwS4LE+r9ZNC5GDmGFnm+e42lTa8K4+/s3unHf2/27BeLhD6Ec2yQ3MfFBlgDaS3HTP8lAHtO6DhOUL9JIwjzwW+48QpZXMszmBMyLWOiRI8OaTIaz6rFnDGpE6LISIzwocYs/Rg511OXsn3vbG59VLXxamsNekcHgkkksPsl6WssKDspMxwAnhNUHZilhFU0M8wGX2TE0VIZQG3nQTLFNS3iJjdZQFGmxjiPM/nIjMMyDyA4oMp2DdlpGutMpVPV/CPEyZMO8hxMtNUtjCMxi81B5/Ov3NmAUw99kTu81IMvzxGKOwp7bcFNy3wpz3IkpSQ78m5BthXsYwnJ+eegArkqFiZlwJxdS13DYjkfonI62S+JqbXgJV8tXKX0u8gaw/fIwnHIytlHPVB6iNHHGXfSvsm+w1P2adqvcN1iwatZ7Y8K5UgM/Faq+Jg+tp4vu4cgEHHTJ6jl33ByOPEPn6Q+n4O3iX4SAgOcJTEnxOlFPCPNZb1peBqHIGB1Q9Sv2JGmUoPx8jy6ECWFOb5ixOgpklOFAM8IlrnEDzxXq66Twrwp8kIKrfKznRTMeiGYEF1LenAPoqCfzxG7BjmsUrI+5/1GOEIGBxhCBEpuQ3fe+Pasn6cSuRWYKSymAvgl79OtYKM3Stoeyv2ZRCGXxyZ9RiY5ZvrK44jSr3kGYlPYfimdgk0S8TFAUZqxH8qLSssbQdyHbwm3rn2hMG4a2znW+3snmZ/+7d/i6/+6q/Gk5/8ZDzrWc+64uO9853vxNd//dcDAB72sIfhNa95zZ0G/NbrNRaLxRW3CQDe97734dZbb8WnfuqnnsrxAOBd73oXnvnMZ+IZz3gGvuM7viNL4t4Ze9vb3obP+qzPOhUJ3N/4jd/AC1/4QjzlKU+54mOllHDhwoVTYZn+xE/8BN7xjnfge77ne3DzzTdf8fGuv/56PP7xj8dnf/Zn48EPfvAVH+/v//7v8Td/8zenco2klPDhD38Y97nPfa74WADwl3/5l3jYwx6Gtj2ddeu7v/u78dKXvhRNc+Xh/Be/+MX4+q//ejzmMY+5ouP86Z/+KR75yEfi8PAQZ86cueJ2AcCtt96K973vfad63x8eHuLg4OCKjvGe97wH73nPe3DmzBn8j//xP04F9PvZn/1ZfM/3fA9SSvi2b/s2PPnJT74kaeAkO3/+/Kmxy1NKOD4+xv7+/hUf65ZbbsGtt96KT/qkT7qi48QY8aY3vQkHBwf4vM/7vCtuFwC89KUvxf/5P//nVOaj1Wp1j5FEv6ZBPwAg79AsO8S2gRtGpBDhuxFhiCLl2WFxbom9G8+hOdhjCc/rz8B1HKzijWALd+Z60GIP/robWeqlWZSgrmXDpJI1DUjgOXJqMAN+mwW+FbTSjE0fCdGwUtA4dImRGQUwrHwMQg8ydVKqelQ2q1I3WnEE+nVpgPPMDvJaHF0AD5PdCjg4cFBJGWw8wHIImXCmdfmAeXAlAkiOKokkH4FBYxdJmXscZIuJ8kZnCITQ8IZ5GpyKMcHlrGRh9TgFAeu6fRnkA+Wg1jaW3+VYnRk+CSKqvKeCPKCtG7eSuaoBAcoMwVwjEKgYY3U7Lq/hl9s/badmtE+BP/1b/xdBcCkBVIhX9vxP21cATmSAU+WmxqgPlnVV8Lf+/RKUynUbBfzTTOMCqlE+vxqkqPpjWRbKhmgiuIiLy4wwNCW4qPJiWW7MBIoocU0mrzWbJNvd2klMytymKcsO2GBNzGVxM6jXFXaiZrDnwZswFoUlmAEs269J/RkN3tu/t85B+n/TvkQOpECpa4CmgzcBqWjGSe/ziBKMZFnPkoletcV2MY/zlvFVlFqCSxmUAIPAtRwbP6fpWGu7G5lDVd7Tcza6ZfSFWNf0G2NhVexsZ9eCDUdrjMc9/LJIlaSxZ0nPpgUt9uDO3cjsZ8+1jTkppPgUBJR5xncM+DULpLbI4GZfKZW1czTZABF13SmV+h6C1LuS73fei/+0OefP1a9Vec+p3+JQ6vghygpNYBDQHANgPzDI2u0dsxyTZZ04GQdTRykDfttAoVyzbAvgNweOkeOkDVVyMHOXgl8pwSQXsWdR4XKowVIFdCqGtBYbnpsjYdYHbfclgC9KaTPBRPtkXyv7E2UNsD7L1E4C/MpnSu3hTUCY/Rtx62sGOMrv5gQekwTDjTyh79M13lgiQq59PF37t33flevASqb3IWIQn2ol6gml5EA0TFnuu8qlLxqXa/i14m81meknctpADfhNAEnvGiSngGlJGnNkGJdyPzO6ymskoPed3scAQHDqnqVJ/+24i7y5SkhWMpLKOJZayHAOFGsG88Zpgu4REkRooAb7DKDuTZtU0lNrZSqoBkh/xInWnNFqnnKEFFNOYtKaxPYzOgfGlJCI/TYG2Qvbt0q+UoafzBsxlrqBUeeDzD6US8rslfR8axKA/n8KeJeayZO6gvba3Qb+awKDJtK5RmRpU1ULOboyFrzvcxsSxzvb2UeqPfCBD8QP//AP40lPetIVBYjVPu7jPg4PeMAD8OVf/uX4kR/5kSsKGn/N13wNvuZrvgZPetKTrqhNb33rW/E93/M9+MRP/MRTDf6//OUvxzAM+LEf+zH85E/+JL7lW74F3/qt34rrrrvuDh/rVa96FR7+8IfjYz7mY664XZ/zOZ+DN7/5zXjgAx94xcd6zWteg9e97nV485vfXNX2vDP2lKc8BefPnz+1WlcPechD8Ou//uunEmAHgD/4gz/AO97xjlO5Rl70ohfh3e9+N37xF3/xiu+rv/iLv8BLX/pSvPCFL8R973vfK24bADz3uc/NNcyv1J75zGfifve736kd50/+5E/w7ne/+1QAyT/8wz/EO9/5zlM5p+9973vxfd/3ffid3/kd/OEf/uEVndeHPvSh+IM/+AN84AMfODXG5cd93MehbVv8x//4H68YRIwx4uu+7uvwjGc8A094whOuuG2vfOUr8Wd/9md42ctedsXH+r3f+z387u/+7hWBfn/7t3+L5zznOXjDG96Apz/96acG+n3Hd3wHvuiLvuiKEkDUfuqnfgo/+7M/i1e/+tV40IMedAqtu/N27wD9OkZQY8+g37jqsQdgWI5Y3rCPxfVnsHf/G+DaBr5t4M+cZbBv7wBueQBaLJnd1y0QTWAqtXs5Q1IDDyEWJhYRgzsaElDAbwpaAKikChXoWwUuXq8SOI6kZoRj4M+tL3JdlNAXmU6T3Uq5FkzItb6i1iQTtg95Uyx9ecCZrq7JQX4FLYicZC+XYIjNgi7AT5kc56Sf7Ma4MNk4CNMLGMeCelZyiaqaiI3jAEYywIuVx1MpooxL5mBcXTMxg38OmfkF8x3bR6AAWFObdY+o/rw3B7eMQO5ffT2o6ilnOAv4R2DZUEDkQcHyqNq2S2R+2k37SZaBu8SMQI65lKhEMp+Rbmaz72lggba0ayqJpfeErXWmoN961PqNImlkvqgBWucoS00tG4f91ktQSoNR5VxfCtBlMKrJ1z0HghoJWrQinaUgrgmg2KCqBhpiBIjvS69Z+o42goQ5SFa9eQLTw2ay23ZoW/T7jGyCkpEktbKe3khSugZRAyyZ7egyGFYFkvWnpkGbaR0qqY/ISQd9xQZhkGwp9VqWPL5NB98sMxsgQIK04GtYg/v6rGDf3P2vwcRtlvIYuyIFFcfCrlAJTzvO2Dzn1fjYWlPSj17AvjEi1x0bJiCFrUt5dxkH+++6370GywHv7DJtvHCEOAxIAlanGID1CvH4EO7gHFyMiA+4mVlnWTKcfQlb3yrXmGu6XAe1j1zfawrgKMCfwQCU2sJDVNlgZgipDLSytvZbj5QSGs9JPo1ZtHW+sKyVaS1la15D/AL8xZSy6gKRSYACr52xSvApP+zJMSYzBUbUrAykNSM5WINHZb6u1gAinl8NOJYPlUryQQYLIEF/I1uYWy/JMhbQyRKCQMVGp7lgu/ZpG+h1EoBl35v53/R60dfW/9wG+Nnv2HVYz+GlNv0W+Ks+atfpOaBj2reZ//H6pIkpfJws/TlncyoAzpU6fiLruR4TLvQB65Hr9ynYp76YrWutgJ9VH+lMklWuyQujlqF9t7UoHUuOet8BjuBjysw2CjV8qn4xb1UiYiK0cIDjJEAiAC7xbShgGjP0Juy8lBBtXWN5pBCKL+RF6QDg102Xxy5N1Q1ShHMuM97gWLVk6lfma88oMGT/JelzyrcMq2yQ9IevOeXNEErCgQMzi1WmvPUuJ0jFfHzCEIHOM8M4hRG5mACVMUrG36slPUuSlZ2CSO6NDfUEggFYZX4SsC/vR+NmbUpKieVW5wBWYVtWvlXTyfogoJ/1+8y9ngQQ7lyDdBXkBne+1c7uaUZE+MZv/MZTO94nf/In4w1veMMVB53f97734eDgAI9//OOvuE1f8AVfgC/4gi+44uNYG4YB97///fHQhz4UN910E2666SZ88IMfxGtf+1o87WlPu8OMrNe+9rWn1jbvPb7kS77kio+zXq8xDAN++qd/+ooBP4Cvtec85zlXfBy1c+fO4VM+5VNO7Xhf9EVfhC/6oi+64uNcvHgRn//5n49/82/+zakA6Y94xCPw4z/+41d8HGunAS6rPeIRj7jiY7zrXe/C2972NgDA133d1+H2228/FTDstM4pACwWC7z5zW/G+9//ftx+++24/vrr7/SxmqbBfe5zn1NjggLAox71KLzrXe86FTadcw5vfOMbEcL2JLc7Ys94xjPwq7/6q6fCHnzc4x6Hxz3ucVd0jAc96EH4mZ/5GYzjiA996ENXdCxri8UCn/M5n3Mqx/qt3/ot3HTTTfcIieZrGvRLMaLZL5TJcNwDAHzX4OCmB6E9WKI9uw/nHVzbgBZLkPNwB+cY9Ns/y+91S2B5gCTSImiWLIEpMkmDMDeUoaQASw4qJGXy8WYoSoatbv5aT9hv2wL4gd/TTXbrCGc6h84T9hqHNqxAx2tQf7FIdWpAvV9zJmu/QtQaFlK0Pj9bwK9pGfBrOlAM3G+bTUwOcCN8u+QMXRhAzQQxaFsmSZwPclCKcPI7vtvnADsAOGCvoZwNO0Zl+ZDJOi1BfUA3QNsXXLsYZ2CKOJhV7eXNYTLgOP3/xrFrcM9teV1/qbzkDHrKx48pzdaJye+h1HNkYFEDc/PAn8141+tyKvFZPsvvB71+wYgK/zYzr6yEjx43g6vmPf3tacDNvrbBNw06quzRGEq9mfUYMcSYg1HK4FRrHck95NE6h/3OSx1LZGm3KbhrWX5ToA2QzO2mQ4pRQDPDetXgtX5ljtkQxwwYcl2eEck3EjCZyK6p6TFVlrdicUjgzwY9qzYkIAUOTs2w/hJiltQECoCXaw9qJjXRZj0oE0TOxwc2UfDJfZ77oBnf/aoE3lRKdBkBbwJsgPzt+NoAivRvFFA4ldfTJApPhCjywMwOSpl5yjW4JPBGAGKC9wz4JW2/sCnyuXZybs1YZsBS5b8MC1Jl82ICQkiF0WeYfdP6fZ5ol4y+s2vOyHukEBGGEW51lKU83fIA7uz1iO2S72WZq5Kdl0wCgNbxG/2yBHMnILhd4QkCXozC/kZZQ4YQ83pRf5+P4ElBik2WSg5cA0AK8qOu+Cal5yI7ysAfmaQsnlYySomAAlYCdfKNsgk755gJngGimNUZtoFEW4GzGDm2r/OTSH3a2ldqdk6ybOmYil9xh2zaHpvgIX/PtnsqXW2M2X4oa5p93u5hVWan222A35TVaP2l2XYZtp8OlfpJ+ppg1kDrK58A1k3ZkZU5xyDVFPicA/nk/Ocaw75DP8Zca/x4YFD84nrESu6ZKGsTg3bsG6k6xn7rsWgczi4a41tpmYGimqH3UrU/sHsAnQIkCco7ZuoxkJyyr6bXn4L2kdFXuJhAYIZdSAAi38OBscCJeLoMj4B7uaZfv0JcH+c9ETlWN3AAUjNw0LNpCwhox3TLBeGsDw+AoAokBA2lsC8jrNpo7jlNKEqaVFf3Qn1/L5uCzhNCIuy1DmdDg5WwM1vR5lffaB0iQiJ0fgk/s5PXS55ry9e+VchtM31EYffpee/kXJLsLStVBMPos4y/CvCzgSY7z4ryBBrxS5tWkqk6BAP4jaEk2Sp4DCAndXgvoObOdrazU7fTkKo7d+4cXvva154KaHJXmPceb3nLW+6x7TsNWywWeNrTnna1m3HN2ZkzZ/DYxz72ajfjmrIXv/jF+XXf9zg8PDw1Btxp2U033YRf/MVfxD/+x/8Yf/d3f3dFoN9dYUR0KoCftdOqxeqcw5d92ZedyrFO05qmwf3vf/+r3YxZe9SjHoVnPetZp5JwcaV2zXvL5H2WT/J7HFwm57B3v+vRXc/gHgDeTCv4tdxn8EsAP7fYQ1QZOqlvlVyTA7xWHkkDupvyjrxRCilJMiohBM5ybV2pPQbw3qd1LmfT6mPhHZo0goY1MK5Aw1pq9K15Eysb2lTVryign8rVJJGzScLwQ9sBXeSNLgC32DOBgxVIg/JGKsUGpmaz0fmH6ufZE+Q4AOgaNK7JIBEHzglONvcuapBmThazHuu55sxlrc/5cJbpNv08/1/eN5t8eyzt6bZb1zqOKSWp+FjM0cn1/hQMVFB57n9zNifLaU3DDLbfCv7NMf8S0UZwJfcRJaBQ525P2oQSfFM5IRt0qEAeBS8F+VQGlzL8tJbfsvFoHCqGn2Yma1BOz80cYFk6oaw8ADCg0EzWfpaA3BjUKHV4EjP2TMCykmA7wTburalsG064/+a+N/k7t8MAflmCzoKTl2rrVvaJ+b8CfjoPAZz9TQ4ptbPfy+wTBYnNdTHEWGTA1DwHDUNMVX0/gOcAv3E/I0uKlrHVm3pyXm3gj1yRdZY6PQru5WtaDhPiplweUALLKk97d1tMl2YIX+nxd3bvNNcW1zCFiDT0IuvJCgm0PEBV15Q8gFIji4AMrCvTT6WcuZZX/XsplYCuJ+KArzCWypqtTKaYQcPpveUkMKzspA3GmgULnWEBTwAW75hdQ2Zd1V/RplswSRkpCiQVGTogSPvylmvCLr5Tlv03CZqn0iY1TazSdtrXl/0z08STidkkkctiu12hTQE+oPhcU3aMBfzmZOnV5wHxd7cF/azPpglFToHKk2SV5sBc/dfcep2i+F1b/I2Ng7mclMJMrljWTlFNWAVmmU/lpbU+sgJ/KuvJYHnN8spyjygAVQVwTvcBEYCLObmQqDzskq3jaqVWdQ/A84cA6mDALNfRJjfPMgXvfVIIvFeKgYG/GAEXkIaWjzsOVbkEy+jP44qiRKBma0HqlaYSv/kzKNdamqy/7Ndz/6z/rH4q+7Ek9U0pS3yGRIhRzoXjMQkRZQ4EzzEnSd2WhKTib02Bf2X5OXuuJKmr2gvOgdxhrP83MXKez4fs1cl5UaFw1XWcUAOmdp0ISBkEZkUacJLqVcio2vlWO9vZ5dlp1ZK6q+yeEIjd2c7uDfa//tf/wi/90i/h5ptvxstf/nJ86Zd+6dVu0lb7jM/4DPz7f//vccstt+CRj3zk1W7Ozu7F9k3f9E33mKSSaxr0I+fglx2aZQe/7HDu7IM5MNUtM5svy4qMQwH6lgcMhnULwDWIIkFV1Qcglx17lb2kxAy9siHhTRQRoXUJZzovUnUJrXMZ5FCATzdShCJHuNfwxu5M60Chh1sfgtYXQeMadHweqV8hHJ5HWh0yw+/4kDe2IuMZ+wHjihmOKXL9GPIOzjNzyC2X3Nf1MQedxgEByGAnjSsGOH1bNr9qJoN3I+Az3ehPNsxZUtCL9Bcx80c3eCSyqd47hEQILpmN/clmg25Avbne9m3NsHWgAtzRBFAwNn3fylg64uxeew/rqF1KlmWafW7rzdQBrbnvbv8NIr4unbQrIsGJRNLcBlL7ZwOaGjAlSNwGhSWxwZjTds60S8dXwRCV8wyJs9A1MKXADiAgn3doJTKqUmws5+nhiDPPHZjdZ+U8NVgCbDL8qkDINCCR2W56jXMgiIPVpG+be0CDWmMVaMhBKH1P6zsRbdwXth0VgKYB3AYcYIHI8G4Lom5jAWz0zQBZOr9tY/hNbebYOShJWrOvYZAvRZYOBpBG1PPIFkt5HlUJv1KPKCSW8ANK4McTEKUGkNuygMbEASGdtxMEfCSubVPqFHoJ+LVlPAUEUDmsUdoVxlQHzDB/TxVpLIbCowQvifia3dnOrhXrrjuAX3Yg75AiS6Z3B+fQfNRDQWeuR2r3JekhsO+gyQQ6Z60v5nsrdvsYInAsTKRtay6lErgeY8LxGHE88FywbDxWYxBZT2a+nFk0LPXceWiKijJUFi6Bwhow8sOWhaX3OSlwOQkcN+0S5BzWZo3WVtt5QOX8VAEiCnhp1ykgymtnJBHD1rl7NrFi0r5EhNTto08OKaS8/unYWVnBuZp220xlBzVZIlHxmLRm40ZbgdoftOvlSeDVhNWX5+YUAXhk6ey5r5p2KLuPX8/7WLZ/2iNNdiIBtC4F/AHFvyDTZwt2VH2e1mcECihu10ebfKKJPnnsy3fyeJHL9xzXll1mSURm9XG9y6MhZIbfEI3KiPhHS6+1+4pceuekhp8wZgsAaGQ9gaJWYKVf58YNChrK2IGEsRZrRM1YiAnkOIGSiMw5tdcllTFU+cgYs/KJJh/Fnmv7kWdlAYoBrmmRlOk34xMCyoYtpteQvX5CTAgoyQra9pDEL1Y2o47FhOFXWSry8J1X1RNCSHx+PIXsUoUIDACOR1ZK6UUKdAqqlraX/er0NVCDfF6SJVpHaAigfiUJEwOq+p4W8DbzmbL88r3hfQE4BegDZP5yDavpSJ3k5BrZP5c9ne6nAV4fEiWpNcrXoo73zna2s53tbGc7u3r20pe+FN/7vd+Lb/u2b8Nyubz0F66yfeM3fiPOnz9/tZuxs3u53VMAP+AaB/3a/T20B0t0Z/dBiyWaG2/KwB4AwHlgYEAMTQta7InUZQtqu7zRgO84aKUsP2F4WGmRJJuRacAnIoksI8EjCUOLsGiYyQaU+jGaJVzkUyDZtQSnzL5xxYDfuGKgr5fn1RHS0COuDjOrL/YDwjAixVhq74QIig5Aw4EM6T8BfKwYGKAaB4R+xePlPEgA0AwWwIAc0039CUCDBT4gsnpkghUQgJGcQ4ojvO84IJAKmHqpHOfMvpPNoNbD0//lTNuZ7+bsblz6d6ZWWIoigppKMChO7ukpw28K8G0D/GxMTYMG04zh8n/zexlwowz8TY9nj6nGtdUoy33GwH1KkCBRTBk8mU5bJbBW3yfcnzoomuuzmU08IFJPxM+qnO8k0KTSU63IS02DUQqwWKAPpp3V33MZ6caqa90GZW0gY07KVgNzWvvNSJRxNrrKdc1EuLaBeb7Jx8myo1vavQFInmQx1hGqbbaFhZB/UiQyaynMBqA+A3+5vqAr0pjV55MyT1J1zydIQEresICfGoO9JCCbOf8iT7UNEJztnwX7qJzDTcnOmt1nrQ60cVRUmbpOwPcpK/Husl08bGd3xjSZqj1YZml0t3fA/1T2xzRhQRixXoFziVIPERnIV4CsnbkfdB5QGcKKtRZ5ThgMaJhr9EED16VeH4U1KyaEHjSsNpMstC6na0Cq8iB90T5qXS9mxdeMOZvMkiSZJeNfQFZ9QCQEx2tkgmHPX45vNU2oAnhMFVwlB+jcGcsaPE1QULsj7BGbLGHlOeeAv9m+2NeXYsORL2uTBQKnYJd+ZZJcVN7fDvjV41AYfvl7dJJ6g309z6iyzHqaGYcCjopEdzB9mvthXYsg58ACfmbNTc5v1I8bYi2XHgzgp/5UK/WRW++w9E4kPKlaR7XfVf9Nn+x5LTVx63Ol3yEZO+8YqHJEcFIzE9jMEar9ZONrmrHO4yT3blU3TuucSyIk74ti3jtBX4N9pzS5RhWkDPLaSnhmmXz9X0yZtWgBq5RqcDo3beLr2zmByMkeEfBR/YaEReOqa1nVMVJIaFCSEK3PO7e30Bp+di7QOa6qg04oIF8MsLKdG0xlXQ+2WAX06TnLPmPZF2rfcptIgXnej7CMMqs8kFdglq4a6LfzrXa2s53tbGc7A2KM+K7v+i489KEPvdpNuUN2T2ci72xnp2nXOOi3wOL6s3Bnroc7OIfmQTcDLWe+uv4QcX0Malqo3CVU7rJblCxDzVJvllwbo+ly8GqIWimmOPg5oII6s5uD0lTeS7rJ5e8psKKbKpYoJCw8cWCqP2bAb32Y2x4PmekXDy9kpl9crZBiRDQgnz7U9HUcABcifIhwXeAgSdNyofsjranQ8tgIEJprLWifp8w/azGY4L4rY+w94JkxReiAcQ0AnFUvEqNJ/wYHNRrXIOUN3rzljSqhyspOiQHABOQaeieBf9ukPed/s3zbsqEyqJazUGUY5B9TiGfKEJr+ncEy09qN2NqWYJd9z6WUgb9I9QHsZt+OXxDgWjf/RCKzSOXaVsB6Kl8a83HrsZ6yYVMqsj3T4GPrNSBVasY0JstcQb6qtswWNt/sGZ0B/DZAMlOvJAeHNdBhpYvMMcqPOj7h5HigjFxUlvG6HAki+3k3+a1JZjX3IeU2F5DwEuCfHYtLtWkaXLPHqAKQxBJ5vpO6XlSCoBqg9iVQXV33KHOplU+2kq9qHChErkekTE8FAPV6IarlyDYsz2mbdQ1VdlbbEsx1PJXGs3KE+T156eQ6JwIolho0O9vZtWDd2X20B0s0565jZYTFEm55wGv+0Es9swMOcgt4prUuE4DGsIr7MWIUgExlO7Xe3UaSS1IwrV4nmK0URfKX/9E6V917/B7XoaL1GjQcAWMPNxxvJFiQa5BCA3iRHc5AmkhbC5vbk2NZOQMi6ZygfVFWstZsbuEQpX4ZBC8LjiTBBvPJG0A9v0/naJuU4Fsec/NV9VVjKgz76Xw1He+NnxdgsiTuGLafAeAuCfxdTgKKsaquXz6Wz+uZ+uC61tt1wSYZAaj6fVLyl/ZP2X5cZ64kT6lp8lZeMRQrnCbBsENbA2JTYETX7XxO+Vorzpv4DRGZ7VcGyZV1XhKDdF0NSZQTBBBX+dwhpsp/VSaXEyZX6xjwaz0Vf8uVdbSSR5/cZ7aW4Ukytfy9JGt2MjK+fHNEsiB+/Rt8/YmfmcreKktxbvNfYtkbxRALphpDBvxSDFzjfAKM5/NELgO8IRVQD6iBv4haMcTKZ+ZDVvsWzDsl5nryzqEBS9iP4twGByQw4BXkwu1DyvcDUV2P2/rEl1IIcaDqvHsCy7IrSzqOXFM+ThjKc6zeyX3B/3Qbz7lGstSltOfzpF1ZEgUFrdsc4nbm+M52trOd7WxnO7vrzTl3zQF+O9vZR5pd06AftQ1o/yzXmOmWSP1KNnISmNg7g8zEAXKQOi3Plo2Gvt/tceCKGgEpaqaS1pPRjG4r45RS2aYo80kD0a2AGF7q93kidCSAwrACrYbyOgyg4Qjx+LDU7OtXQKwhJHKcDRpDZClPNEi+Bv9iP4qkzchZrsMIL1I3qWszWEeNvBawj1que8gBCM9AHlAC5RqYqoqyS30GAVVT04KaAGoarudnspYRCETynm7WXMlo9jaj2/RZAwD5fQm0JEb8sgyQfosDNSTfnYBfQFXfbptZAAvYlPwsoBP/jrLlgHrjOgXCNrLRJ2GpKhA62bDHyWesRA8gidMSdJgCdPr9Av5t9gfCVvUSpCmvNSiTNmol2gDbbH8hzA3zOxGQLGbKx7NZxpbV1wiwk+8lzUKO8yDXRr0cMZXv5A+V56oWiZGtygEOfU+zmWfkoGZlMxUEtJ/Lg10H9NLccXQs9fd8eU1JZEhtcDGO8/UHk6k9CIAwlvtRfotQatkVBsIM6zdfgGZOIpYkU2k/sv0RFk3SmoLSb51fqyGRoGRMlGv32eHyRFkyTyWoMvBHNfuzXLMwwcl6XKbsviItlTKzO3dR2mcIIlnuzJv7Xy1EyPrB0sXrqyDvuas7s7M7awcP+ii0970f3NnruS6yrvESLIcJ8lKKGWQYhVWkdYITgHUIGCL7TmPkJJCLfYR3QOf03qF8w+laZVm7mgQwhFQY4J4yYKH1kReeQOOKayL3xxysXh+VusdiLAO/AMWOZf+8rMKu4bkQAFKEE1ZytZ4nw6xSdlXQ9TUBiGi9y4kuwXGNQiKgcQ3ICdvIstqmNgmU53qsvsuJaeprpgSMOVGhHEKTKXh8WU2htNH4EMRsZBCMPLIw2jITx1XZRrqa3+G6hNv6mw9cjpsIfM40AUbgG1IWpfZzJoFqLgkJMH5SStX6wv7SfJN05lZZTyFfbfoB1TmzgzUBRAygy4x5A/456wek+hqx+xUFTHwnNftSBt2HmDIwDsh6CAbJHYGlPB3XSGZ5T8p1MD0VSWrr255o1Tj4SX8ds3GlTh0cJ0TCsSeSQHATcNKaJs4UwF0YsxMwOu9TUMaQvIOLXOaA3/fVsVOQ+n++BjCTzGmOXE7mjJQy4xcowB8SqmRAu1fUpKFo7kHKe4BavWAK+kUiNK7ULQ5Ojim/G0OROFe/h38jTZLgNs+fVZ3R1yqL3DqCR6wBP018s+oXc+D+Nr9Ynqs616bGNP9P1gFI8p8mXwiYqePIzErAuSRgN1XX+t1lO99qZzvb2c52trOd7Wxn14pd06Cf7xoGqqQ+A9d2cHApITWLUqtOTTJkY3dQB+HJcTCFXAb5OGBTAim6iRtjYXwkIEsgWpM85SyZokHppSdQilyzLwZm+I19eR16oF9zTYpBglSSlZqb6l0G9px3GfizZqU+9Zli+R5Wff5Orv3XtRm0U/YfS6DW4F9mTQIGGJQ2Kng4CrAWgxzDDLVsqhHHDBJIsQ1pfAlu6IY+wQZrkJmXKquqQcK8qd5yvaSZ19tCGnPA1cZnUpHFFLxMNqoyPDPZtqU2yMxvXuZGbwr42dc61Ar+Tb9XGFZpow2aQW/ljBjYVhA7VYxVa9sYi/qb0zCfAihACUw4YjCnSHoWRqyXGiOcdTxWzLzKLCthG0t1mqFu6pBUAQ4L9mlAKGzKdLJ8UeD7R9qASVBqgzUXQwHZJGizAVaewMRLOnwSX8zMgLlAjDkOxSDBbcwHadRcDUQmAHOyZXPttWBiEhm9Kbi28VUCKHHt1BzMMjG61ruKAdpKoN9L8F/BPs1293qNzmSgz7VZ2z29VhWgtqxgvf6nv2nvCQ37EhIQqQ7w7Wxn93Dz5+7DdZGXB0UBwAbNJ/MFg+UlmSRkuW1h9wWVnIT4UAkuEnwr64kFcWw75Aa0IA8Dfg6tk3pXMh+oDDTGnudxeaR+xaDfOJTAfwx8Ry+cmRObMl/GWBZT7bI6Htpn6W/CpH0JaFEAC/YnJSlJ2VyGObdhZh63bJmcqOZY/tku71pTsAbDyv8dUDH/53yNyj+yKJiuZTqpTcHKmb6cWM9vAmJtrSebqPrslOXHfawl06fv8d+b/STzv5xMlpBl2qcr71ZZz9yfiV9gJFG1PxsJSoa5x6oXcdNRsv6DYdjbRxQ2pGXK52YIQ03Z8VwfDlneU2U9lSGvkrnTvur1YMcO2AR9mTlXOpEBJmJfMkYBmUCA4zlB73FgkjyWgMQUSgawcxIfZ5UDfO04qRtHzm946WQAQN2vVKa16HRfMmWXkvoksp6b/YV+Jk7uuW0+r7L8GIxX5qDxK/R3E9f3U/Ar0CbDVRMOAGZFF/lWBQIVRCt7Iz0fMbHPAnNuFPjzBK7TmKU9ZwC/ueS3etCr13MJbVWCmZ0/yGWlhgz42dMl92tIvGUMblfTb2c729nOdrazne1sZzs7ya5p0K85ew7UCqgXI9I4gJoWsd1D3L8BqTtAapc50HwsMlPXL1zOYBz9Er3s0JLJ3I4poY8lU3qQDHVlcAwhAqHeZGmGLDNSeJPbycaaJacuMpvPgn6BQb80cPH5NPZI61UuSJ/NeZCcLfJSr0Kka6IJLqTA9SnyewOq/+mzBQo3AcAGrm3QLDsGNBTMy2CfYQZ6D3TL/De8R3KxbMCHHljsyQ+VjM5pMF43vFVwQzeB2n7opnUT+GNZSxvkKKy2qVXBsC2BHCtPqZm7kOPlDTRSDgb5fB2kLPU5DWZNs8+nbctDM2lfBgwn7d8G3OVM9Jk+WwZTkOCr9i1OgquAsPxcqbHHQAcDdRosmNq0X7aNNhynx2D50DrbnFmxAvIgAnEoALkF6XJwdDOwMBdUrGQ6M5gXK6APKZb7MoTMElHwvQLhnS/1oCYStzT57Wn2c2bEZXkjm/lsgiFbLOm9kREmO9AzTAP5bQYyHTMoVIZU22MTCOZ+3963ps5LDv5NAzu+2ZBvskYowSbnODjZOIKtgGXlxpQtrezP/F0B4bifCcrKzOzOKcCpgLBNMgAfR4O/AuFy4CwHzagKkDnSAHcowDI5NAKaKsunuwqoH7OA7rqI2F157J1dXWse+NHw+6U2cq6D7FuWQvctku8yAAUgr7uEkjgVc9IUg319iBhCwipEqS2mMoJ13WNBqbJqAge2gUXjZC0iHHQei4aw1zjsNcwCpvVFrok8rDmJauhZGl38KzXqlqAY4JwHuQZAxwAhUM1TCuwz1iWMLBSZamXzaQ0/DeSHmNA4ZLCPaxkCwTt430lC1whKjtUQgMym47posqZ4o0ihSWzkKmZliJDnJFJ//PGavVZq/U6ThhQA0O8kA3AElBqNKY6Ad7CM+YST5R1tsk29Fs4DyNkPTBGUuO6z1n8m39UAnfmZaQ3DbUwZZQvpMQDuo0JKWR3CgH9WMWFD3lPAvVznFih1/ebAwLm12K6/c0uuPd70f0CWpI6pBqAUzNO1c7/1aD3hTNeg9YS9xmc/SyXUlb3Oa2Hpd5TBI0eV5GuiyG6HBXzhgMD3kvcdotzUgbNn8j0DYdAh8e9qbTxA7ilHIlcLJPl+iAnJUZ1QBWBaZiBp2QL0eQ9Dsm/ZKCCo7dZrL0aAuN649ohzGPj+r4Bm833dJ2QmcCx/A+I3JwCREJ0B/FLkRLYU+R0f4ZulJDhxreveMPi0rumw5SLPEuN67o3/znNCGWeHwvBrHVjWU1VnQs8JFOIzl5p+6lslzEp4mvtgyoCtPm8Ty2QcimpDEjUd3gBF1v/lsdVdRAT6AKyvAuq38612trOd7WxnO9vZznZ2rdg1DfrRcl8CUh1osQQt90EH1yMc3Ii0OEBqlhiTxHwNG6yPgKcGrmnQh5RrIwC1nGfO5E4q95Yy4KcbL6AAPiw35eAkO5UD2kaOcCLxo9nQDCwMyAXmtX9NhzT2DKYJi5GA/KxWErA3My8t0JdyrYtQgYH594Ra45cdswgHDrj4rgE5B/KuAgERI9cIFMAvxQAEyaCPoc6svQMspg1pGOCSwZxLWdry3W2Mu1xnLNq6dRIcUhYDMSDAGfzIbCVAwbokrzcD/ia5Octu5TZNMvKnwOZs/6os4Pn+A8iZ4Ar2xXy9ixTRTI2M6vp2TgK1VAHdc+2Ymv1cJUFkgFM9HikQKJnHZEE5ZeCZ4GOWcLKAmQFz6sGY3IMmkzmDWBbwGwe+tvX+UtDPeb6nXJCfZsAbLiCNqJkxueMiq+t9CRRKxj+/TuV9t3kfnGSJiANuU8aF/Yz2XwDDNGEP5MBjBAhjLe9pa7pkFuR80DdncueGlNfOnNvoAESu8RWBTPhFBQLUEq8ZDCZMglB6fsr5rZ6taUxWxp+EScLsHJZDy1K9RJXEW2b5md+vpGMBJN+AXANPLNXlttJEdraze55Rtyz1fp0HFvuIAv7ExQEgNZDzvAEGDrS23JjXGeRkKgYopDafIABDYAk7Ep/JAlBAAala57K3umiY5dd5Tg5pbdBagtWatBHHvvhXBvSD80wltlKlMYIoZsYPUmQ3J/HaFIy0tVPA/zJu6wogTMxSIueA5GqGNjmDeLIlp5lehTUdxC8JsVahyFKC6jOY11oLy5k2VW0Uv8ZXazmvRQpIWSZ3Zv0B5chTBuB0Hs4ndbKeTUCYLF2d+LgpRj7sFuBrbry5/dvPD/uDqfLN7Hv63Qhmoaq050Y/pn3W9+VcJulDBvbyj8X6u5r8Fs34TBKbtpll07mcQMP+opPXCpSr/9Y6l+s1ewX8zNpWjZUZC7mkasCt+vDm/saTK7X8hPUexXe2yXkK/E1/OxGDPnptVh/Z0g5NOqRWgLuJYglp7fHLMB1fSsUfmWOTJrNn5OTQWiY8RAFTs7Sp+MEq0a5JZeSAOPIedXIy1G8fYpI5tAZ6HREGlGS9SAktHOASXCxyoVSdd9R1/GzymybJTXwp9fk2gL8tzD7uVw341WA//05KKqtKiA6gqLXHSx91rJUpuqvpt7Od7WxnO9vZzna2s51tt2sa9HP7Zzlje7EELQ+AM/dB2L8Bx8v7AABiKHI3XrImPYA+FLFGrYWhG8pgsjVLwIrfGyOwGgNiBFYhbgAky8ZhH4CTzYwCI142dlVwPAfTkwAKoWIQwTkOuunf0QMhiGxmkbMhHxD7gYG/AYXBZ9h/ceCgeOgHpBg35D/ta/IOdHgM1zbwXQvXNiDv4OWZvOP/tQ3cMiKNPQeSomSp6mZbu+k9S1FNMj6rZzsmk+BS7oP543K2eAW0rTfnl7M/LKBvCaTZjF1I8C+BQT6uPSObU8cSmBElK3xar0S3sHlDT3X/PCj/VqS0tc1584tNUK/+XHlvyuqLKVWZw3pNaxZxjCkDFsrCs2yLdiZr2t4TFjDkZw4QR9Q1TVRaqvUlO13BcgpFqs3WGAFK8CHLZQKXB5gZBpjWw6N8/MK8RSyAfJb2rO5TA+LByMdJnco5yyxYy5htinTbiWzFzPqYCZzq/WyDhvYZBhgjxwFJ/c2xZ1aesAqS80ByJSBtxqmq8zINCtn73G1nLCrDMxIMNZWQXJk7lHVQ6vdJYCqOwLpkoF+25XlFg9IG2DTAn5qOuUdpU8X0CObaRLkWE7GMM3wEfAcvQay72yKwNUngtI6/s3upNS3cYk/qcDaIe9cVRt/BjRipYbllMQdm4UYw4Kc17hjo47WqJEtJ3bEUsPYOg2NQLzlhwwjoopJ2jgDnCY3zWDY+A3yNJ3SuruVH44pZfuMacX3MNZHzPN7XfXQOSWpA50QPQICYNs+VOid4R0gxbch8escB9kwPm7HMehQacec7BkvHPs/7KY6biRL24Zg1PUaRTI2iQCEswgQOghPViT8M0mjS0MnzUEwM/hW2c2HHkcqf6jEN8IcUNsEvYBMMk3HfWBPsWqXHUQn4OAJoAMetJzipAVmA5WmdZDvuwHbwLyat1VaAs5AK62/rtD0L9KkzFgsLnwywq+M2ZeGD17WNhKU7sraJ8ZrJ66UFj1vvsJB7Z9l4NI7XVa2drLLqc+uUjhG/5vca9bFsE6cgZuA+ed8BjscYLsEn+Vo0DDoDjmV/NSKDVEkS3lTic9Y30iRDVVvQ8Z6UI6gkPs3rDXl1AaBiUiye7/8IbN2E5L2C3It9iNkfbp3LtTX16wT1aQzQJm31TSPXe31sBfysvz419c8dEdAALZwkE/J5dlCfW6TzHYGGntsQ+uJjW+B5AvilEDaA0zSds/T8nATW63Uj9Tu9jJOX9iW5r/QaGUJkYF7O8HAVmH4732pnO7tnWEqaGLCzne1sZzvb2c622TUN+lEj0p5NC+qWLDcFYEEByTV14EP3kQLe6WvA1PSQzzpo4XZmWBFStcljhlQyck6SYRkSVsQbkj5wjQjvIIGpDqmNwEhA03GQ3TWAG1k2KwZQlMxzYdFR2/HGSqQ+KUaksQViRBraDEY48AasARD6EclLYCZGZs8EhzgUwC8/jAyoBf2cdwirHqFt4ITl57sWKv3p2waxa9DEmBlWNA6gEDiLHcj9SSEA07qK1Ul0GxKAet40iHOSKYNTN0lTkA/5fXMNTD5h98xzYJ++X39JssAlC5k381KrgxJcqoG/Kdtvyo6zAFhMCbCgovygsv3SBCRM2GTwARXxqLLSpwJoR7mmbUAB4A221gdZCxg3xGTqw4QsqcrHZqBQzTmVQdwEDDvvMqsPkbh2CafWwycTLBBWxNQy4KfBCA1ypVADc3Nmg4wmqFEYfjOAnzBFKnBeKbNTVt8WCSkAGfBT1mxyK2bzOge32JO08jpwsvF3dJcOpth+ah0WlOzszN7Q4GoYAD8yEEm19BKZQF4G/+zxtY1zr01b9DgNOZAjCfAnRHMDOKDUcEwRNByD+kEA4L4EFKe/pVKl29qRWUmuBv4cCrOEHMvZbR1Hc72EwgrNn9Hj7/agO7vGLfkWqd1Hahf87FukdokAqcsp9yfAiRYJEkwGKgBGn4dQ1hmA14qjIcA5QusSlo1DW1XPNIAbFVndLEmYpdMJ1B8VWc/hCPHwPNfyG3qWTB+HakFMkefz1K9kzh0LUBNHmfNHgHqeE1wjAWhe8zkpJbF0IfhvuCRMpvrmt4kEQRCE5LiOGzyzWyhtzlMACpvSOaRmiZCQJVIt6DcKw9ImualEorgTYMLPTFIQttccVcAvA39mbuRLIMqcGyt16aovmcVo5mc7xyrQoYk4DpJ4woBfBm8i1zX0klCTEktzOmJf4STGjwX/pvLuCvJBfA4F/6KiDE79LirjoMclTfFCYdpbgM83BQQUVmkG/lT9Q/sOAVZl3HQd2wCj9POyDum12Dj2w1pPaKODp5R9r9Y7LL0Tlizl0gOdSkg6KtK6wIY0fmFAptp5JYeq6pqugUm4pYHvK+8aBvCiTfaRhEsj88nnpPiTdg+Q3b18QUsTJj4/eS+1yB3vqXSP0nSgVtnLDlaFpKoxNwNKK2iec5Tkvtq231DJ3SEwsxkAogO885n5m8dLpX5F5jeRA4LnpCH9mJlD1T9fj9H4+rXPneVpHdAmvu8X5PIc2ghDOl8DSZK5wlCrFphrUO9RTmpKWa0if27yqPzTOX9Y7vtS491n8JslZCGqOfJxlMQRvVgWjbsq8p4729nOrr796Z/+KW677TY85jGPudpN2dnOdrazne3sHm3XNOgHAf2oWwLdEkn/Hnug5Y2DbkcjbW5i1XSTqRs6xh0K8Kd7isxAUbk2Ax7GxIGfmHiTN8quq5cvExE614BcHQDJARNThD5npgoDEN7IaTrHtQvlsyk6ILKkJoUI3zUI/TjL+FOzgF9m/cXynIKrPus6vkycrzduIwDXNnCjMJxEApEBv0vkKlabw/I6JBswrAE9tW0wxx1VeSlAsP69KeVpj2kDatssTgA8a1YarP7H9PsloKCXmeZt2/2tMiHKd4tkJ3Cy7M2UfaifV5Yj12KS10GB7YToCC5A2H8xBz1t7UMbgAAgQV0OPOnfAMvgNo4QIuA0AJQY6IyQxHlX6sdMQbBcT8hml1s7Sd7RWJEKHRncs7X7FPCzTNwZQDFdCmSsBsQDGEQa1wtLkO/7KM/UdkiqdemaUvvHjEEFXp1kKVbZ2RsBMmWm2QAtuSq4VzH6JpnfFTP3UiCk1MuBSF/y96liB2eGp2Se0yj1uQIHpfT3pzJSKXGAvJJ6zR2omRN57DTDnE4Yx6lkWbTjUAPPs18/eUTuMtNA4V15/J3dO40igy4Z8GsXSL5Dn5ypNyR3rCYNyP2o/lRK9RoUbJKJrBErRLSJ1wMfEgBm1+gamet6EjIjxdYh05qvFAZmqIwrruM3DqZO8lDm9KBSzOw3YRz4PefL/KZBeJkzEgBl/WjQPJLUHENCipwAwwtZygoPU9NQNY8Ls6VybTTEAvjAJLwI+1p9IwUSdBy1dpgGxIP6oeoYRGEoIiFEqpynS8zU3A5hB24D/rJfyoM6Pw+qvwts1vma+7xKdtO0vp/M0ylmtp+Cdpdr04/qr+vpssw/rl0s36GTfbuN/urvyd+EUYBLbK5FplGW7UmYAfyqRJwkbH3PDD/phxemXxSkt/hfcr+48hlVItF+KWPCpYTL8mi2rXtyjiqpXHJSv5l/LyVZhan4snMSnwCfJ2WdnmS8jxr4WRIoyagxYAL4qRKJZaVVfoVYYR2XmuFzf5dSEMUf11vRiU/dTG+8LQBw/jd0P1L2mnqvx1jXHAfYh46UWHHGHCrXa1SGH5VzT0bifuOc6vhYH3Hm//beviTgN2cZ9C9zLaHGmFUeOtuILHN6d9rOt9rZzq6urddrfP3Xfz1e+cpXXu2m7GxnO9vZznZ2j7drGvRzZ66DX3ZINz4EYXkdaDiuAAAil+VhAEgmNIM5XoJJhYVVPjdovZTEdVy8pMASgP3Wc42ZQDmL08qLBMnGPB6igBwRQ2RgA22DrmtAYeBgQCztxIJl9WbrYw09Z63HABwf5lp/nMkaEWMEnIdzHmnoQc5hBJA8g3vJOzi0Fcg3B/jl35O/FTxUhmD0Dg2Q/9aLJ6zW8ACzlsa+BN3VdCMoNWmSa8BSVR6p6fKGMdeqOSGDtj5nk6RjQq63kcHbyd8lW3gTWNS9o26gdfOupoE8/ck57M++p6+dyR4n8zdQB970LGiw1EOuGxQGIdeWSRsAoIJ4Kttp36vbR/mZgWoIcId8vWrLmMWYNr4fJQoxAJArGaNKhKrs0ARUV9BPM83XjcN+9Gidw37HoJf+spevtg4AhCWr97UyH6SOWsogDG0HpuYCGUAVwKCUSg2/GEstP2H4KQCooHYG/i4FbleDp0EQb4JSnsG/EEqNKamPqfW0yKccRMkyYRJkp2mAakv/K+AyxhIEl/Zo+4oUlsfM5V0Pn0g72dpeOTBkLbMowQkO+h6QgT8LpmVgb1yBRgnmS42uNBSJPq3Lk3wrvy3AQ5S5BsJE1uD9NHsfLgdZSZmDcwEqW5vKSlzZa0oC9joWqekA12BMqOrv7Gxn14KF8x8C7vsAxOV1GNp99CEhjAkhxgwaABBwfoXU7SOa1cwTMJqlQwPWQ4hYjxGrvNgyeNdm9h+vEzHxWuldqiToOk9YKngRe1Dfc1LA+iLPG8cXEI8uIF74cGFpDzMFjGNkQHC9QupXRUZP72lnarA5j9QAjcgUjlqTK4HZc9JDgsNo62bNyENyv1gdIADwruFpK5Way5UssihWhAT0Y8QYgbXI1g8RmeG3GkNmUlrTGm66niNSAbhqDJB/eotMlgJ+Qc9LtQ5JEobKWEqyReUHKihgmGyV2flfWHUIXFO6JG5QZv15UfUIVJhilLDh501VHfLP6dspH5r7D64vyTlNev74fHsUtp+36+3UZtZk7ruAySMK28+szdwAWdPc5Dg22UmSdFKKoLFH2+4jJaAT/y02Lvt3oUmirOC4Li6VOpiW6VcrTQjobKQsi79NNStzOgYz55XCWM6ZI7TqgyPBi2wFybhbtt8lbZtfN63f5wvQR03LjD/PrzPI5+Ta1IdcwxZk5Fq/KcsPk0tZjSOI7K+VneX9oV37nYB2pm/WV9PxVNEISQZQUG81BqzGiNVYmH6anGctOpbdV5BaWdGOWHWm8YRlw3OoyiJj7AvwSI4Rb014kNINDFqrDC3KZ7cBfnMyvtrnPNfxMUvyA0CRpU0VdOZ9WMosv5X2OwGti+j7nW+1s519pNnzn/98/MEf/AEODg6udlN2trOd7WxnO7vH2zUN+tH+daCzZxCbJeAbYCybbIqy0bRZt8SScmULJscxG8wgQIjKBTloTQGCcyzbGJIUR0dESJTjxDEmUxuQC6eHyHWqOPM9IYGKtJ5sNHNwSUEMaykW+c8RgEp+8g8yyNa0SCMAF6RwfZ8Zf65tsoSn846ZgfI6AkiBA+NT4M8GbGKIIBdB3iGGCD+p40C5VoY+62sH8h5RN4SatS6AH3yT+6+AX4StqTdzzgU8K3JD88AfwMG1JMGbKjsXDObm/sGAgSqmqcGgmd8HFMCj/HpbLitNXmcAMB+vfMJDMusJMPGPku16hdmfGkjx5jmkhNYDjgeK64+FhIE0CFsz97S+nx7DZhjHiiVoQUeWb9TDRFeAx+AZQNezNiIB3pSnI2BMBO8aoOkkuAgOECRlZEjwTMG/YIJRU6DGWgrmZQH18jNggLIJ4KfMWBuw2zrwEnziAazen2ah578z4Oc3ZTynAcdtDAvDRksG8Mu1rXIfV+Urpr2lna60Sf/vHBDlXr+cAN20XYEDRpSKTOi0ZiCNRdIz11mMytTxJ/zAZbYBKIw/DWBtq4tov5Mnh8IOyPObBAwDXJH4iikzvu9Oq2TE7qLj7+xebJJc0SBilZDndm/BrClzS4zXuVStfzkIHus1JRKxtHTgNJ3VGFhNgaiADJB1lpBBChpWLEc3rhj4G1eIRxeQjg8Z8NOkDQOoTC2NnFAF5+C81lptGHBSJh4AhBHJNSC4LOXpk5CEhTEDAQMvC68QYKsKdjuUxASZR0bxi3QO0XqIY6xrhunY2hpfvEbzmDpBxBxpDWtuZATQGp/GPk9NgT8AeQ3ONpfwMfG9597nf26y3lQeEhHM+NN1TFnewhxzQGaKJSMLabAXM+alHxumgKGRZgcESPSiBEKUgcU5m6vZnV+rHLaCdlvum6zsoWNmgNKNur6yviuw4x3QoIBqMRFaFP+xcZQZfo2yZPVeMr6EsjpV0lIlT4ECIlX1kC9lJgHJU6nXFqRdzNhMeR9g2X4n3koWZJrsQchIY1Yg4HSfYs+DvV6xeZ0woCy1MScJhoko7x3yHsEBU7rkNlYq701rkFfbkNU3YvGxRwH4VY3DtrFqs/HXHelcVR6wtZlNW7QdXJvYZZA6q24YwM+ejznAT49Hcz5q5ONvMx2DmDbXjyBZF+twCf/7LrCdb7WznV09e/Ob34wf/dEfBQDs7+9f5dbsbGc729nOdnbPt2sa9AsH98F4/f0ZPFJLETSuJYgy2WT4Bt41WMomo2R2p7zJckmBpMIO6xwJCMX1zCgV0KmVgEBIZYM1tSzNom9k5lvD2b9WRs/U3tKNVUpRan75zADKQGGW/ZRAwdiD2i5LfaYY4dsGASPIewaWQkTsR4EtL99SiEC7+T55l1lCnEkr2bRNxywcZfW5BvD8nmXC6MY25Lo0yGOqluW+JAu5ZGCXAFbeOtLkhUs5G1dVuKp+JT1Hm2DjVJLTAn36f4cC3inouAFEznze1obR3pS6fSWAdTmWZYYkChbk+rTGsrTlNQC0oCwJ2jrNDsdGzUqgZhYC/BknYzcaRDJEYIgR/RjNdx0G4mO2rgCArQDlbUjY7zwAytn5eqklyVhuXMdjJrV+EJkVVsA/DmBUGfIToCbNBQgmDLha1rOAgJkFuCEBWgDCzRPjGYxvuvyW1pTJz23HdUknQakK8JsLEKrNJQqY91KYAH42GK5A5my9wtLWHDRrWs6S75ZILoIagJK/dA07A7IVsDWUtketjydBqBhy/b4p4JfbZsZnI9P8Eu2oxipfL/YCv3QgKf+2spVdgyECKSb0Mcp9wNf8ehfF2dk1ZNQ0SERw/SHfd+11mfnOKRoF+EkNM7GTAAWElNc8XTNCTFiFiKMhYG3WBU+UwT4XE6LMDSpH6J2Hywx3/u2GwEDfcAwaVnDDMTP8VofM8OtXzOCzc7PUGq6AvxiQRiCtjwEASWSVoWyWsc8MF5YvH+F9J4ldCvSZBJ4E+Tx32k18gKkVNqOAOgZ44SSokjQwCujHDB/+ez1GDDGKukQUJmWZZ1pPCIkAcTNjIpHFBIAavHXi2VhJPevX2DbPMv58IyUeR16fU2QWdR4At/k8TaiYMLdsQkZmAwGZNce1FhmhsjUV1Y+zZpUdtvm8ev0mAatTouwv6uGlUl123PI6OwNo2HUojwnAfopZa8j7nJRTJbPkfUJh4pdjmwTHFNE4h04yDpPk4aifq5KOCvaxPK6AfwRU0pLSVq+gEwq7DTDS21LPFrb/cyw/PYci28g1c7k2pnei6iDS7pQK28/K456UVKfjlFn/zoNcRHKGOanjJz4WtR3Xg69KLBjwkFzes01NgT8PvhaU9ZfrebqEFIpf7ylXfa6HRg8+BzoKaKZyviHy/b0Sppve90NIMyAi3w+tGbDWuVxTu3UF9O0cmCkdQwGlbZuAIq8Lnp/yntnevzNtzz7Z5Zg5vtIcY+K9ab5nE+8p1mPA0cDnVqVN7Xqys53tDHjve9+L//yf/zOe+cxn4oYbbrjazTlV+/u//3v8y3/5L/Pfp8n06/sev/7rv47rrrsOn/3Zn31qx93Zzu4KW6/XWCwWV7sZOzP23ve+Fz/7sz+LZz3rWdjb27vazdnZziq7pkE/GtYs6TmWDUfyLdB0ORNVNyE5KKyBZaCSVEm+kSACF5wvjDI+dIjMGgsCCjnJnI2JpVScya5uPcvpOGKJQt1kta5krW92htuWNLtyak0DSp6DbTFw0L1fgeIS0XnQOCB5j7hmENABSEOPBkBwLtf4U0nP4B1iP/IGOcbCfpzJuiTn4LoG5B1818K1DXzXwC87+K4BLZagbmme97jOYtshGZAvNQsGAJsOSWWroqlPY8C+lFBJM6Vkz4kyMMvmei5AZaokgiSVO8vwyHfmCDjb2HzV/6rP09bs70uZ/fnMhAOyvKk+59o9qdRKsoGtSk5T09+BDZDP1huaa7IeRTOta9AvZflQPU8q5wkAHGdh9quXHw4qlRsT4JCDkj5GNJEARAzByTNJkEiCyhL4DFInqcnPDbxv4BsghZ7vl7EHggk+mIAExdEEVKkGAC3gh8sA8eT9DZnMmc9acI+8AGZNy/eG83y/NBqAKkErDaLEaTDocmxm7iDvgehrNm8MhQ0z9iyBp+9p37X9IoeVtO3a3gZI0eXrqKqzd7ltncqRRlN/UIO8Xn6vDCyPj9YRm2aY62f0Z7YxS6bBqzmbYahUEp7kEKjh4PwQMxunD6XeVkgJhzsJqp1dQ0YH14FSQugOkBZnuIAvNEGHY9xDZNDHuyavRS4DYkm/wqD3yIDf8cBB2/3WM9M7qZx3qUfHsm2ENrocsHYQVpIjlvsd1nDrQ9BwhHR8EfHCbUirQ8TDCyWxQedomYNTU9a8nDQF8JznfJn7dK6JheFig+CN63LiFyXxLUBwCQJxFAUCnT2S/UEI/y4lBPOmU7AhFYn5PiSMkjywDpxIcNiHHPSvQb9pDUX2Q7VWIo+lyI8aP4ZIE5Mo1070Mz5N9k/Muea6Zg5kFCt4zMy8uQUQmpU2tJ9X0ChGcQjkO3EEybnxrsmOekwMJMWg4Aw3JqXiz4RU2m8F5Akqt5oy+KdADrmUWV0s8TgxZbzNyVzm64b7kvcj0kdOODHsdSeJLDMS+BtmfB3vGExrnLL0WPpUz5OCfvwZeSaIdLYB/XQf4ho0rgGrb5prQVj4/DwBi6bKA9U5NOeM5LiECYDO4x1QELdp8lzVd9dktZCcOCW+VVVr0kh7FiUSAQMv01+xP61qLkmuBb62+HrxouziXWKJTQcEX38P4L1NiML09Q0QmuynJN8i+U7mAAb+VmMswJ+596fmjX+vAJ/WctR6jizzilmgHcan4saIX6zXKqV6jzpJuLoiM8mwCnj2IaGPCUc9rxtHQ8yypnksd/lUO9sZYox4y1vegpe//OX4b//tv+HlL3/5vQ7wAwro98M//MM4c+bMFTP9xnHE2972Nvzsz/4sfvEXfxGPfOQj8Ru/8Run09id7ewutB/8wR/EU5/6VNx0001XdJxxHPGXf/mX+Ku/+it86Zd+6UYs9Z5kFy5cwF//9V/jEz7hE674WH3fo+u6S3/wEhZjxK//+q/j5S9/OX7lV34Fr33ta3eA373EUko4Pj6+1zDKr9BLv7pGYWDpt3HFrBBAgL9uMyvWFXyTEteZgG52pT6YbnA0s1QLnTsJiFS/DcqBEucgQB9xzTKpn9FK/QR9rVmyW6X45v62jB0iyVLtCnggQALkkf/XtKC2ywCdaxv4/LqFb5sM5JFzcN7Btw3IuerhzOf0//l48kxNBydAH7WdtKvUx1BWH1yDJJv0KeCn0mEJBfBLErCxf19qg+eoPEjlwcz5c5cA6Krvg8yxeKOs1wWZh/5/yvKbBdVSYVfO9UVBvYTCEGWwr2Sp56BV/v/mgfSaVBkfZqVyjUuSfnIAoGR/d55rfCw8Ydk4LBuH/dZjr3XYa/n1Qeex3/Jj6flaX3qXa/a1jl9rZnEO1M6YZupq4DdkoBNZxmyIvOlXeTN+RPQxoY8QULmTa6stgSC95zOjdgIQzTboMthdM0zBAgBOgpf5ZNSAHy324PYO4JYHcHsHwGIP6JaI7R5SszSPhQSAJg9Xg4MbmdbbmG+2Zp/01zIAU8/1rdLq0Lw+4ueBA+mV9OmdsS3MCDv/WnZjciyZqed5dnzaBSd6iLxmco1k7m/PNicJNpNKic48dG3Ic7YNXsv1lVwj12LCOvDjeIw4HiNWY5JAVcDxePdLUCWVlL4LHzu7l5rneldoOvSp1FXT9TjoNQBUwd4qLo/CsGKWhtSjCipLVxJLhhgxBq3bFHMtLCvX6EjW3iCgQ+iBoUc6usCP1VGZuyShATFUAGBOzlAmt7zWz6YYDLA0cp3AUEAOCmMlM6q+odY59I4ykKaJSCcSjzXJCci1+4Iw+8aIDPgp+Hc8BqwCj6M+ryePMSSR/+M1VoFBTdbRNVbNofgwDLCq/1M/5kDAfCydr2XtPZGhPkn22JBN1s/MDpgm5MTMECJo4pX6NvPJdZk5BJOQoY+U+P2oiX2p+J3603PtibFe07b15wRAhEG+IuddMc+8qTM3qTk3Hc9cv83VrC597e11KnuRirGX17uY1z+S4xKQ10tl4s+dw+ocTZ9lrNQ3d/lR9lzW395mUcZT5XF1rDQ5Kct3Kptvtr6ffGbLuZlzWe3ewN4fKpFaJDR579A43htmSVTwZ/SyUKnOpOUO9NxKGYQgKgF632ZZyzhz/cbNturv6fzkqPztLOinZnwbHluq79+5Wu2Xssv8jJUUZaZzSZhS5RAFPocQcyLhNNHh7rKdb7Wze5r90i/9Ep74xCfiV37lV/D5n//5eMYznnFFx4tXss8z9uEPf/hUjqP2CZ/wCXjve9+LL/mSL8Gv/dqvXTHT721vexu+8iu/Eq961avQti1+7ud+7oqAgHEc8/07jiN+/ud//orv5ze/+c1X9H3g9M7naR7z4sWLeNrTnoYnPelJeMUrXnEqbXz3u9+Nn/qpn8KFCxeu+FgATnUuft/73oef//mfx9HR0akcb7lc4ou+6IvwgQ984A634/u///vxDd/wDXjUox6Fg4MDfPzHfzze+MY3ou/7K2rT7bffjuc973l4/vOfj9e+9rWnNn5/93d/hyc/+cn4B//gH+C666674uOllPDsZz8bb3zjG6/4WL/wC7+Axz/+8fjlX/5lPPGJT8TXfd3XXXHb/vqv//qK26X2zne+E3/xF39xKse65ZZb8MxnPhNvf/vbMY7jpb9wN1o4Ran3D3/4w/j+7/9+PO5xj8MwzOl1XL7dk/y5a5vpd/ghOD8iLc8wi0wzEFMsmycN/GumomSxlhRsWWR8VwVDtCB7AkRGpQSwIkpGu2uABbRmF2+uWgFRlN1XpFSOJIjUz26ebeB7W3CcA3HM+su2iHkDS86Xml0Z/BvglwFx2SHFCL/sEfsRYRgRVj1SCLnu39Sc57p8rmXgr1l28MsOzbJjwKJp4c/ewGDG3gHc/lmgWyC1+xyUbzqgWUpwfolEDkPezJVxzUy/GXBPN7BRJKn4PHGWvaPNLHqgBKmEYJbrvkSA67KAEMkEFMEZzVXfCVXwzrZFv5c/Kw2wLMGc+S7BPCIOOLFEqWRjm36qvGk0gOgoG9vRbHCnYJ/NetVNPcDBCNL+mSCA7TNJQKB8t4yftQxCpiJZOMaE/egxhIjWE4aQsGoimoEwhoTWhRzwDRESeKAMCjpXA7DMAgxoncuBEe8SKJBIvGEjcLVsOMO96/Y5CKwBMTcCoQeil2AW32cazFKpIstOgH2tTDeAn8UZVSmuPC5ToDDLdnJwyS32gKaFWx5kJqw7OMfAXbuPKLUtp8AkgA1wMrMJ5JHsnCH1DPlzruojUgQ6ZpakaZ9iRMKQA+Jp6BGHETFEvvcd103KgKiy/GIQll/R+70slp8N7sxkixMJ8JciQIty3Bx4mgSVrU2DrVuDsWbM7OemTdV2OgGOoQFaTmTok8MoIF8fElZjykH4oyFwrS25R1ere5ZztLOdnWTJNUiLMxj9Eusx4nCImWkOOHQgRCZ+ACaorfNo6wi9AE+rMWA1FvnJ4yFkafTWkQB8CcsmoZUDxQRER6Y+Hc/5nQPX7+uPWNLz4m0It38Q8fA8MA6Ix4dIIVasZp3DEsCS44b9ZxMYUgyg6AHZyJD31byROgn0xxGNMswAljMVxp8HIRBlPya3wUzlmtikNc2Qaj8opsIUXo8q6RlxoQ8YRCJ1iAzsZUBAHAllS+q6GkSD1FEEwKw/RwnO0I5U+aDUhhMWGA9KnmeZ1VdqL0P6wdcL193Na2qKSEGBIzcPEFmW35y/m99jx41I6irGkNlGWtsvobDGiJiZp9eOJhGpH6W+U6WOoICtXtBQP1GYc3JuK8t+e9pM5hOJ2BN1ry0o2sj6Iokk8B1Su5S1ZpHXoa1rVopoXJPr401l7AvTj9VHMoAXx9L+3PbAv6dS6fq7KZb9igEFK4afNmcC4lDk9yiMgMiHQu/vWKRymbHJfdB9lkK4OeEgJgRHaJqO92jNin9jH3xfjwMD/yphvqVush077kfiuSGxDKk9c4Ycm/ubj0JcK54S0HlubWqAPgBEPtfO5uPwUUJkJmYfEzrXwHf7gEjYJt+ZBDcG+zO4H2Jm/FqmH9fnoyzr6YTh5yTpTpNUNxISSSfx8nflW5m61xvJXupPAUVJQ+7VvKnYBt4bRYa8R28WSO0SfXI4HgLWgRUSLvYjPnQ84Px65D3GGPNeYjl/9J3t7CPOvvRLvxSf93mfh9/8zd/Eq171KrgpSH8H7RM/8RPx+te/Hv/wH/7DKzrO8573PJw5cwZf8RVfgc/5nM+5omOpPfrRj8YjH/lIPPrRj77iY33u534uPvVTPxXvfOc78frXvx4PetCDruh4b3jDG/CMZzwDj33sY/HYxz4WP/qjP4qXvvSleOlLX4pP+ZRPuUPHOj4+xlOf+lT88i//Mn7rt37ris7Fc5/7XNx444143vOedyrspp/5mZ/BG97wBvz0T//0nT7GmTNn8CM/8iN4whOegL/8y7+84msWAD75kz8ZP/7jP45P/dRPxcd//Mdf8fFe9rKXIcaIZz/72XeaAbder/FDP/RDeMlLXoIQAv7iL/7iVJhDn/7pn47Dw8M7LPH54Ac/GI973OPwF3/xF/izP/uzDGq8/OUvx7Oe9Sw84hGPuNNtuu666/CsZz0LT3ziEzEMA570pCfd6WNZ+6iP+ij89V//NX74h38YD3nIQ674eK9//evx6le/Gq985SvxTd/0TXjJS16C5fLOeRSPe9zj8Dmf8zn4/d//fbzyla+8YqakgvwPe9jDrug4at/6rd+KP/iDP8ALX/hCPPe5z0XT3Hno5x3veAde8YpX4Md//Mdxv/vdD89+9rPx3Oc+9w5fgyEE/OVf/iXe/va3o21bPOUpT7nTbVL7uI/7OPzar/0aPuZjPuaKj3XdddfhrW99K/7tv/23Vwwyv+QlL8FisbiiOeS07JoG/ZJkZyvLjzSo3XS8aVYJOpEfyjaR3gGQa8vZmh8l2xBV5qGCUi4HGYQBluVzFIyQTWAcuXbCcMyb7LGvN9DTzbM1GxzXt2ZQY93QJsebXGo7IASpbeiBGOCalrN4nUNoR7hhhPMOMcQs+5km4B95V1h+3qE9WDIAuFiClgegts2AH3VLoFvkun2apcwMJcleNVnUGvzSjNc4k0ttQaEMrKEw9i51+6gElX4v90uiU0RcQy8SNmivZN6bynva9lTHnJgClE4yfDWQEVKq9toWVLMA6CYbch7sK/0tzLopYxEowVkF+5wA1ZS/W15PTdvgo0q7caDTyX20onLtDI4zuoeYWPZTJY4E8CvSQy4DkQDHFQYeJRkXlU/jOEIA14FpHQMwfUhoHPenE+AecTQ1SMaC9AJZMpJfRwbxMAP8OQMKao2YCjAzklz2uwbwo6YrwLtK3y4PhKHWIXV7JcCnkpFan3Saya/Pk+Bb0uAbFSDLSokl+R4RS2yhW3D/TR8AgAZJFBgHGfkRdAkHfCN4dkfN9lFrHtn3pmxFcjkLfgM8NAHkpH9jbu6MHMwESsB2GnC2Ge+IMJejtJVZfkGC7spI1TpbK5Hh0rqYnhgEv7storCK76rj7+xeasRS6bm2XEgZOPFSgytEjd1SDeCbw6TEktAxFXZfEPZZTAQ0LkvVtYngIvJa4ap1jJOnEHpJmmIp4rReIR0zuw/jkBMWrHnnkEJgRQSbzBED4CdzWKzrtVLbATQy4qd15XJ9OQETievIqXy4dwAlmvFmNk39H0ATeZATakIsgB8H/YOp6VXAPlt7N+TxIwEFuJZum5glBK+/pyCAjrEw5ZQlN+uP1uuBBS7Vl/EEkGuQ4lhqpVrgYAr4zYF8l2umTpyu6ZrgpNNtYVUVJYEhxCxVDhgCUwScl3MJYoB7kgilbLRtIObUKMIAKFt8fKCsc1kdo82+c7XmkUlcmYAwOdFMzod3ZZ+iyV8b4zdtl4K8mkBJDikWSfSq9u5GG/KEUMnoZtNjJ5YjZaCWAJfgEx9KQUsF/qbGTFWWAGZp15GBfAAUO7iFSWwytYp5gC7hr0zOpzO+yBTwm55H/awnAvxERta7fN15x/cd74F4vhgBOF/Ym6pyoX6/Mp9zomTURDozPm7eZ1eWZm6+tkn7RI79YPW355KpgHKv2vvWnM+SUOUY59akurlEsCk4nEG/DgGOZT2F3ZyVEobCEte1Y9HwvdmeJN1yF9nOt9rZPc2WyyXe9KY34U1vehMe/vCHX/Hx3vGOd5yKPOhb3/pW/NVf/RXe8Y534Ad/8AfxBV/wBVd8zOc+97lXfAy1xWKBt7/97XjVq16Fz//8z7/i433mZ34mfuAHfgDvfOc78fKXvxzvf//78f73vx+f9mmfhqc85Sl40YtehI/6qI+65HFSSviv//W/4ud//ufR9z2e8Yxn4L//9/9+p4CxGCNe+cpXYr1e4wMf+AB+9Ed/9IqDz1/91V+Nxz3ucVd0DAA4e/Ys3vjGN54a+42I8BM/8ROnFlz/L//lv+A3f/M38ZCHPARf9VVfdaeOsVgs8B3f8R34lm/5FrzxjW+8YjlOtc/93M/F537u596p737yJ38yXvOa1+AlL3kJXvGKV+CVr3wlfuzHfuxUAJOHPOQheOc734mLFy9e8bHUiAhveMMbcPbs2VM73mq1AsDA7tvf/na87nWvu1OyoQcHB3jLW96Ct7zlLZd1b1/KvuzLvuyKj6F2fHyM+93vfrj55pvxUz/1U/jzP/9z/NAP/RDuc5/73KnjfdqnfRqe8IQn4Au/8AvxxV/8xfjYj/3YO3Wvvetd78JXfuVX4tZbb8UXf/EXnwro97u/+7unJmntnMOb3vSmU6mZ+frXvx5//Md/jIc+9KH4yq/8ylNo3Z23axr0y7Y+BuGYwa6mQWqWHEhvWqS4L0H4pjA2soRIkfIJEnzRjGug/K1Z11MGGgAB9hxIAL7M8ksiD3V0nKWoaBx4MxT6+Y22tRPAPpWlyiwjdQKatgT0pS5NissS3I+Rg1/LHl4kr9J6hRQiwjBuZMiXpjDDj7zL7D632APtnwU1LdzBuQz4pXY/s/pg6vnNSXpqgEvBLhugAQrTjseZ8ng7kcyygR5AAVvKwY85c1SCdCkREsl3BPirPgtUkxmDgPXf9rjbLEFAPvkjoN6g54BdBfox+KwMv5g4aACgClgBEAkhGR/D7tOMf2+Ce1NWH4FlQAm8eSYiWDkiG8wNEshUgGMImlHOYHfnHfomoh0ChuDQ+igSbb6S4MnSU74w/vT3NPN7PQ1oOStVRBgdgykRhDYRFJlpfZdZBhp0IBoF/JIsbAEFFdSZBf4aII3yXgwM4EWGpclHAB24Bp58Hihgn4Du1Jlal/tnQct9lqVs95j12u7xveG7nHQQbFRG+w5fQNqGAzBJpbmi52QHDcK5mBkQEkWT4wm4FUeQazho51yWwKIYQOOAKIGyjW2FYRJP/55KuW2VT9XraQrs2WegPp4zgVCUYFoy4+Rdk+V3cx8D13GkCBOQipvs6lgAwGmbkuM6r0mYERp43ag5I4/DPuBCP2bgL8pE1np3VSSodrazO2vJeaRun8GnxEHYEHla8jKtNI7r2OUAcooIcKZmGs/KqxArJlqICcd9yMkpISX0I9ecU9N1qzW1yjoHuONj0LACjSvE1WF+pPUKcRgR+hEpchKTSpcnz68ZJDLJDs6zP6Q/KvN7BRRAk0B6/q5hQDnfCUjGLDACM+hs8k7ly5hjWqBvmuwTEs8nQ+SgtzKHL/QBowTCgwB+xXdI5tjK9ncAIppEMv4RITm0piXFPyiAmSfkxAg77zlnzq0BK5GAJN+LxDWuc42/MDKoUCHBU4Z6xKy0p/jAlCT5AvUxLJjmyGV1jqmxf5+yD6Ws0zwGiRAJaH1Re/CTJWwDFJU1xrLvq1rcOm6SzJLbPAHKbEJiIlf2LV5qYHvez+h14iQpK8lab9ct0naLxGxEPRzqz1pQd6P9M+ugXZtPOm+JqNALZe2ufCrxUygys9+TK+xMsD+XKOXrMRiHXPO2QuJuj5S4rnO75H0QOf4Mif/lPNd5dkaZIRZQ0p4naCKQE6BaQX03sz2eA25TBAl7sfOEIQKd53up159OAMFldyxCwf0yQl7uL/UrVOJXZS2V6TfEiH6sQb+ucfDE13mLIulfyhqU5nKSJWXQFGgKU2+uv5nVWa6VKUBa/GnjR7t6zqsOa/1FAbYDNUUefWBG3+3rERfXI25fDzjsA/qRgdOu4fPoHM3Wod/Zzj5S7bQCxqcRPP2bv/kb/NVf/RUAZg7eWYDirjYiwlOf+tRTOdaDH/xgPP3pT8fTn/50vOhFL8J3f/d349y5c7juuuvwO7/zO3jqU5+KH/iBH8A/+kf/6JJtevSjH42nPOUpePWrX43f/M3fxH/6T/8JT37yk+9wm97//vdjvV7jEY94BL7ru77rVAAx59ypATB7e3unWgPttAC/lBL+6I/+CN/3fd93pwE/a/v7+6dynNO0BzzgAfje7/1ePP/5z8ctt9xyRSwwa/v7+6deB+20rjcAeOADH4if+7mfw9HRUX786q/+Kh7ykIfcKWYXEeGLv/iLT619p2V7e3v4lV/5lVM73s0334w3vOENV3ycz/7sz8ZjH/tY/MIv/MKdBiCndto1bE8D8Ov7Hn/yJ3+CF77whVcd8AOucdAv9WvE1WH+m3LtrBFoO9DYcDY4EW+efcfBiGYBOAffMJU3gBCl7sk4RfWALCEZUIAoZfY1jrBsVMKT4BFBYQ3qj4HQw8mz1ofJkjMbPyL1PIDNoLkBAG0dGssqQoygpkUaB86Cl8+RBfyiBLhMfZu0XgExwNuaNlNECWDwwnlm9DWG3dewzKcWn8/sJZX0nKnhF00wrOpmBvjkp5VFSRbUQqnbkj9nM9bnjsugns3Ej0EAsMhyUCkRponFFnS0tfq2SWCqbQvv6/vTOhTMACygsgb/EnjTn5kRcbN+hSdCjExY0A2+jlmuL6IAH5V+KHiqdf0chJVqQJNpgKMR4LzxHYJzGF1CK8Fg7yLGADTOo/OcrXs0hIqNYNtsgb4sR0olCKyWg3NR2ktcJ2XZeERK8IHHj/RDcOiaJUB9yTbWcxVGjhy5ppK+ZClJYslcuT9TdFkql4ASHJZAEkaRDjXZ43r/5lqb3RJOGbB7Z8v90e4jtQvEZokhAqNIuKW0eX7LtZ6y9JoDsxrhO2G+jFDJzdwfDQrmi0zP5YIB0ZSAZgkXemD/HGjvAGm9YmCyXyENQ5YJVhATto5ot2TmjDJ7JcCZATudN9KEvztJbshznWXXmZqMCvSFwGFfDXTpOXVEQEwF1LYsQA1YaY0+BfxsDdeowe1U2kOFrZBcI0FnVMCmgo/ThAVPDGaHRHmiWDYO1F4hK/JOWMJsDPxUj7+ze6npXCZgVYhALwwpR/y/1vEV0ESCF1BI79NRWGohotToixrcplyTKZj3AL1/Sj3ZZeOk3qwDjStgXMENx8D6iKU818zwG1d9TlraUCvQQH9+Fj9LfSj1jUy9P7XENByQHwEzvyrAU/uGhJh4nuY8FF7jdA2bqhMkGVdNLtN1vyQTRBwN0bBdmOmnoJ8dvw0AwFFmTo4hIbgpGKB+k66r4tNK4kQGsGJk9xOb/o69/zN7EAku8fWQgT+gTEQWXLDA0ZQ1p5+dYwrxSZ1/f2Iq7ZnE/1wpS9KwsJ0DM6USjFg1cj3GIileHjQBPuaYb+TcZn9irfABTAAQA/ipvGcfS5KL+nKeSrmCij0prEf1eU+scEEOgO4TxtI2bat8pjrvW5hu3LYJQz5EwBfJV/bHHCcW0gjyHd/3MeXnRAzAupRYKdKAzEH84fJ7QOsaNN0e38c5UYiTdahpWKp35HrE1XoVo+yDFOhrGOyTA+u1u3XoJv1PIufqjeyvZViGWPYNueSA+nsC/OneSGt5smR4yHNn1KSAydwJqOSp+Vuul1I3tQD1ypQEAO+ZybzBHDWyrxRHTljdto91niMJ5Cp/W4E/TgSbJHoZfzG1SyTXYD0q4BdxoR9xcR3w4eMBR0PA+dWIoz4gxIRF4wTkJCy9Q7oKoN/Ot9rZzi5t73znOwEAz3nOc/BDP/RDV13a7O62b/qmb8ILXvCCOy1befPNN+MVr3gF/r//7//Dy172Mrz4xS/GE57wBNz3vve9Q8d5z3veg/vd73544xvfiBtvvPFOteUj0W655RY87WlPw/Of//yr3ZS73Pb29k6F5Xet2D/+x//4ajfhI95e8pKX4Jd+6ZdODfS7J9qf//mf41u/9Vvxghe84Go3BcC1DvqNPQd9wAH35HQjJ5tX53mTQlwnAX5kyUkgy+k531V11YBNNh+gGbIpM2AU8GsF7GscwcceNLL0FA1HHGQeuI5fXB+zHClMsAkooIELGURg5tFmwGMDDJyyb2Jg5o7ZvNssV7LBrMBBrtQthbEkoN8w1KCk/sZCQL/lfgYA3PIA1HZFrlCAPyvpWbOYdNOJOlNcf2rCotNN81R2UkHX2Tp7k/OW5P/K5ksamHMstwmXshQXg3/l922bbHuAmqlnE8yjMvpQg3y5PdNrzYB9Ki1rZWStFJXKeIVUZ6JbeUxJ9M41PFSOygJ+KpemY5pZfwr4yTW8UaeGBs5cj5GzrIXpMFICREbMx4ReAFXvmgqwnNrUD65iDob9qBZSQiQNKka0nutDAlzjRUF7DtIVdhiBM+PJK9inUkYiYyXAT9Lz6z0w9KUGFPgeU8afgud8H5tOSIZ5ZveJnCfaThh+zISN3R7gO6wCB276UAd98+EMCK5ArTfn3jtCI4AbRcNmTG4jOFdddk5ZgcKA9h2890jLHrHtkNbHDPwZxgtpLb/F0tQK7UoNIivBOWfTIOFsEMhVck8hITNLdb6YztUJqYD6dn40weSK4acAYIpVckSa1P6ZzrU5aOUkOCttqpgUTjLPk8rW8n22bByouftBv53t7E7b5D5Wxt8QEmIjf0eei4IAPgrQl+A0ivxtqNlVmlw1xsSyncZY+tlh0Ti0ThUUAAwijR56np/kEfthK+CXu+Mn/pYxBvxEAnROAtCCNFP1BRTfkPOKJP0kpSKvaBKGrG8QzZxmWX7M9Isi5cl1/EapZTUIGDgN/Ntx1PeGkCrfoG43s9aqtk0/m2vtbQ9Y1b4y19LT9xqbgJE/skXWc4Ytd1k1YoHZNScmnZ9FSQIQWUAG/IbK2WDcJRpmWfaTVAYVBXCDJJBsMJ7ipF9h4sNPGX+TPiTnZZ/SAU1XMcq1TzkRTZKmioTqluuesOFzbgC4G0zEwvLMH5my/SaWFIDMCTPiD8SYgZ8KnEyiVkCNqEuwb67SwRHccMECM1BmgS4vh2PgyhxfwFYdG4oBqQEok/pD9udcSiVJShi8FEM9RpcbsJVEMmW8JgLgZQ+iH0mpclO4O7wHUaBOQT6db/X+t/LIQdQ2yljU/+djlwSpBGb3KVMyOC45kMFmOUUb9TWn92YckYa+Tg517A+z32SA6Gl9P6AG/Ew960FAzj4krEKqkh2OhoALqxG9IL62fEHriVncO9vZzu5x9o53vAMvetGL8IIXvOAjDvADgHPnzp3Kce5///vjRS96Eb79278dt9566x0G/W655Rb88i//Mm6++eZTac9Hil1//fV48YtffLWbsbOd3Svt5ptvxrOe9SwcHBxc7abcZfaQhzwE3//933+1m5Ht2gb9VkdIPrGEi75psg8z889zIN5lCcpjwDUcfG+WcO0CvlnmDbYCU5qFrZJNug10xOwolp8iLDxxFuSwAoWBpTz7Y1DokY7OI/arzKjTjVJyDrn2HjionjCUvkhNsI0AtHy+SOm52cAC2Y27buLGsR4joIB9IuuXmYRiWiswB/27JdBorT4GHFK7KLXJmkUG+7Tuj2ala6DLAn66Sc6ACwqwZ6U8rRxlJZVlMpK3XifyG1HYfClx2ZuUgUhk4G1qU/acttn+LxaqGRJoo48auCiktWRAwgL2ZfAPpeYhgArw22Yqe2nZiFnOyQSvLMPPU5Hy9Ij5+mXgb1VnpcsYJyIGvuR8L3yLrumw8E1md4wm2BsikJKr+sfjVNie2lcNWrjEATgOgqQJ8IdcExGBZb0QCSQfUuzHk0PX7WcmXK5pZ4MSQaQxVYJL2XECiDkBhahpBfCTZ7lPplsYajvAuQoMj+0eBza6M3yfNEusQ8IwRKyD1mdJsOxOKwkHmPPkCD5yDCUBoJjQeQdPDVzTgJwJRJ6AsmoAL9e3SxFxXHOCxJljZv+FHujXJbhjZEs1OJk0cKPMvi33oAYJOb4VS+CHHEC5gJcBD11mBke5LnTusJavZR0fRAYFxh4k/cHY85wsjD+kyPNgDCxlWrGade7reI4TK+xFrq2UgCw2p/dU5wlEHsvGC5uYZdS8AzpHOCLLIbl7LF5izjiN4+/sXmyuwTjEzK6JMlcNMu/yHM/B9xGc7KJzfy8ybbevWJ7tYh+wGmMO3FrQqmsE4POEhbw+s/A403nst8z6o8A1kV1/iHj+Q0iHFxAv3IZ4eB7jqkcUeXLXlfuWHNcjdl1b/EGZxwrLbwsXyoB/KQZeC5SplRKzlRKPQ05imiTisFuQNnybUn8vZZ+Tk2OKJOhRZvXVwe9+jJnxEiYTYjDgadc4ARRrlr1zso44lIeuLSbxZ3b9MJYBBbs2g9cvRK3tx4y/5BthDtkDKJCQNgG/CcNP599UrRv1mqN+VPGh2M8K4k+tpB6iZfrJ0Vlm3HlOHNFxgsztZnxax341PyQpSpNIZhh8W4G4KZgmgF+uRS4qADYpiFuqbZEhJGnTSb5vsvM0wWMi7RjAYyjXNYW+XifzN2f6YZRJyLIVNZmKWBobESAoi07el2N6WWe9Y8n9GDSJLWWGnPpEIQFBEp9CTEjJMUgYCZ1v0C3PAf0REBgETGNfmI8jy/7mep1jz/OAlEHI50uUCSg69geIaxrOSZ3OgoECNraO7/UQuZ/B8TwZImX/XwHZiCLjnxLPsSzvWQOAU//HsqTLfApEx+zeISSsKGI/cKJbEEDVpaLCCsfXQgZhIfeiJkgNq5LEOpRki43E0C4iOQdaUmH7kdT3y59rMruP61ayD7lSefQh5jXjw8cDbl8N+OBhj6M+4PajPvf17LKBJ8J+63Gma+Bw9ydU7Xyrne3s0vZVX/VV+MIv/MKr3Yx7jZ09e/ZOSRz+s3/2z9B13V3Qonu3nbY85c52trPavvM7v/NU5UfvaXb99ddf7SZUdk2DfiSstgykjT0QI9LQZ8ZcZqUMXFOPxgG0D8B3ue4FB2e5JoP3UrssJQxICFJs3UoRkoB9GgxQubgcCBj7EmQeB34om04tBq4bNvQM7EXNkEX1mQRwAHpmU13J6U1tJjudfNwISri2K6y/GEDTYI8N9utmTTdvnuuBpWZZ2H1Sv29MtZyngqjA/IbGUwGs5sC+XJB+LqO6uijcxt8kzzkmRpSDQSTBOZW92TicDoMB//LQQBJZ5U09jiP5YiygoJOk15QYgIuGVajHsuFHDQg4IrS+/qcFg7TWnYIMluWXM9Mn3yuZ68g1/PKYatBPJSCDSDymKPXxnDRMQZQlEEf4dgnvGjTOoY8sl9poICJRljBlEJDrKOYAHXFAgxwV+SZno6jzG2DLttS6nC5x4BkOCIngfWfAJir1cFKR/uS+MhiYTACSWWcimxmECRYD0HQVa5YbU4BxdEtEkRHW2pbK7usj0EfJ4I5Fzk2lnvSa0EAyd1FCZFkXihAk43+MjIImiPyq9oE2mQQ2qzsftZGxaTqW/fQdB/7GHtQORbZMjhEVmJN7f0O6SX9uGvgkV8C/mc8nE1SbBrc0UG5Bdzs/tI5/rwKtFexV8C+OGeyrstStRBXAc51krGfWoZ9nMirg13gCxXLPwhuQlgiLhuD7XTb6zq4hCyHPkQAnOQ2ySGkAOkQOzA+hzMWZoRIT16Aydag0ML0e4wZopfeKynu2znFilago0NDLPc3qDnF1yCy/YcyAH4CKeU3eba33RM0kAGLk1bNkOlCDX1Nmlx4LyGxjWXr4q5Jsw3NX7VvYhJcogX9N9mF2PzPT1lLPawhc91AfU8DPjmPukvnNXA9X1kAHSQSSz6kiwB3RldMm5OQmhgxAKHLLJyVjVbKeatv8OnMsTYJLZj2ziRhFOaG0M8YSqFdpT1k6uaaf/oHiJ9nx0XHLjDD1E2YAv9k6sXN/qxyi8euT85nhF3IClQFXxS/ypGCYSrfL2OQx4GvRsr0Avc5g5KrrGn/5MY5Z3ntbAlEakfcGSIEBPgD2Msr1GCNAFIsvlij7nI4cg6zQa7YGlJXpl88tJUQiABGNSNYCfJxG7+vQ8H4nsDoF+cSJljbhURIduaG8t6EYWA7UNzVIOb2MK1B6BhCE7iv5nnByDsmlXBs7f1/v/6QJgQkJm6D+NgvC8tWkuSFy7U5NFBxCgqOEJhKSsI/VV45ps35lVkiIVh0hII49q+sMfZ1cC+S/SZmTtn/V3rUAfhCG3xhTfjC7WeY9WSt0vssS0FIPvPUsAR3DHCS9s53t7GrbDvC7Z9gO8NvZznZ2T7QbbrgB3/AN33C1m/ERY9c26NctARo5+1ACQRgHxJ43cuRdzb6R+lquX7H03sE5OMlwdMICIufRNB3L7IDgUmFqAQz2WdCPdJMfxsL2G9egcc0yKP2Kn0cjm6kgpcoFQsAl5zkbFcisE2oAQDMkHdcjJMebJgP+ASUYkjdeAuAo2y8f22RYW5Ajb/7zALsc/K4ykr0B/iRDWWX+hggkATQSkIODQB1PsmCasug8bQH6tMacBaa0nzaYdkI2rgJWXv7vncusPG4bbYAN1k6q46f9BNUbeGiNDQGvCKbGigA6DBxSlv1xgARyUgbwEvjvObBUGX5uMoY2/GAlqvSzJcgq4yuMKIQ+v7bsKEpJAlQk94lcA5JNncY1B030/nENUlPXcrQSZrH6G7meHYOwxEAY1aw/BVcV6CxDySAix325nRr048BxB98x24DlSUdha6jMpWH6BcOGtew/AcfmpK1yQENkpirma8vSt31yGMeUJYwsE1LBPgbG63OsMrQl0JUQBfArNWI4IBKJ4OQ635BOlUBPmgDx+XdoAd8s0C3O8FygwJ8mMkz6agOwWg/PBg0TUI+VZeReBjtXTTDOcu5RwOyG5Jz0K7mG18wIGgdmqqbI9WdUjsrIK9enz8gkS/a/laDSuU5lRxV79YKyd9owUJ7POs/3V+cA6o/gwiHubsvB3bvw+Du7d5oqFjhagCih84QEj6NBastKMofSsPVO7gWwWoeE21cjbl+PWI0RqzHiuA85iHvc85zSNS5/W5l+S8+sv9YRll7uoeEY1B8jHV1EvHgb4oXbMFy4iCAsvxhiBvhU7s15xyCgAfH0Hqe2MG9JEsPQtBUbEECWds6flXVDmePkO67rB2QJ8Sw7LgoAVtYqT5UyHzMbrcih9oHHaj0GHA0RK3k+7EdcWI0IMeWxmwbCAVeBfgAykOpcAVR9xfYrCQrq/9HEB8xtl2e7Atp1JMoi5jzl/mVAihxAqZ7/K19uSyLXlNWXk06aDPgpKKQ+hEp7xlT7GoPIaCswLT/AgJ+vk9GIJuOjftJUJtoCfkY6Wn0mW4M7m15PopjB70n/VO4wFNlvZfrp2Ov6H821ZevFBfFB1e8qlrikMZBZmJVpwle/5lrokkS50X7tg9xbyprlxMBY/EPImLmGgT9NtEqRwUAdM6nBx9dKvaro+VOZe6AA2MGTSHxGtFxEE3tNA986XvcBwPM5Sbacgkm+rOoWSx1oaBt9zOoDOf2MB698f8o+RblP8q3oeG+hzEX1ifXaTdW1W9RHFKCuhl38fX1YeV+A5w9HHkOIWI/cgEUT4RzQJhI5fN6/hpTgE+83vCZl6TlR/08SLeLxIdLqMO9p83WhiW7OcxKrJsdZM77UdK/I8x1f60dDxMV+xAV5HA0Bx31AP4Y813lHzAz3Dmc6rvk6Nnc/6LfzrXa2s53tbGc729nOrsy8v/vVGj5S7RoH/RYgvyh/S7F2CgFxGBGGEW4YOfCjLL+mZZZdt4SLAe4gIIUeqRmKdGEYAN9jsTgjUnMFEMpglAJRYcxgieuPGTQJfQH8JnKZ2aZR+bwpdSUwpVKaXmqCOc/MIWXaZLCh2dx0qrxnGA2wl0pQ3koqTQDBvKm1TJ78nmzeNFuTHAJYjm8cSwb1dFNbnTdSRhoqwM9LEKN1BuxTqSktLD9lL+aDmjbajPCIsmnX4BEEBARK4INcOdhcMGSb5d9S1ppIiBoWIbkkIAhJdm0S9pIEFYkRQ5vZnF8LsQ5eP28CUyhMPgX6FBAhKmCfBQNnQetRJJ1Cn7N7M+AnWb3RBH30+nS+ACQK+iXnkZqOAz2+hXONgOgNQiIDdDEjzxM/xySyTRLwYNYUbQXC7LUj4UqR96IMbuXTR2DgX+pNZvmq4KFSRvl+0POpjFJ9DWRwawNoVmlKy4LzLH+r98Y6xCzlqUFHZfhZYHwD2I0S0E1AIg60e5UtSwwJwxFSFBk5cx1YK/Ui9buAlX0j8DU55ODKEu1yme/DOZZCFbzN96jLc0+Sz83JmZ0E+GmSRQ5oo9yaXoE+A0rSuKolPW3gcsrsyz8iIEBT5JWtdGnFXM71SmWeTSYoLI3SMe8cjxf1XMuVxhWoP0Zz/rat/d3Zzu5pRsMRqD9Gt7fEEAl94CBx66iQ4HT6CBwCjVDWBnD7asCFfsTFfszMvqM+4OJqwHEfcg06G7jOdfwE8GuU5Teu+H4ajhBXh4iH55FWhxnwCwKC2Tvctw0nfXlf+1oC+FG3LH93S1DbsZy6ypjrJkRUI3hdL35TZoMraAEJ8pt5V3mSygDkMVO/qNQETbKuK9NliFq/r7AkC8NvC0vSuQoMUFCv9YRGxrP1VPkC1ufKCV8zvk6a+pbyrIkqei2oTxcTpMLjCcF4y5Q8SbWh8s8M21oeWfZRWXwGTE3GF1Wp08zyyw4952aFlNCa9lZS8qqYYJPOMqvPAoAT+WiT6GcTTajhREQCgLZOgknap1ikX23ttpQYeHLSuDGv+8UHKuNR+05J1ixl+22sw7JHYNlrkyQTwkbCogJ+8KJ+YGoec01yCENOkqwycx5m/LZfH1b5YBAg3PpGjgjL5NB5hz5ApHQB7xI6VSdJRS5845eqEgd9OR+NJj9KPegYa9afMhe5FRvnzVpR5NA7gedOzQXk7hRp/6k5Besj++vRM2in97c1Pc9aQ3uI7Mu5yHLMgyPEBoVZnBJU2NSJT2XrHmdZ9HHN9ehXh0jrFTP99Lp2HjSC6yXGAKCWMNcE0WrPKCUhepnnNAmuF4bfSlh+tjahAn17XYOucTjoPM4sGpztGnSe0FxuzcWd7WxnO9vZzna2s53t7CPQrnHQb8lMOJUWWa/4H+MAiOTTKKAfDSOaEECLpWxWhgy0UbcELYXxEwPgR66vQQ7kG5AzoJpujoLW8yhBZwX8IMy+NAFKGIwLZcMMTAJSrgQEnC+181yD1LSFPSTASgmE8DFsVi85DmZ731VAn63jVTEC57K7J4wem4lcgX3RZKunUpvC1q4DSjzMmSI3FvzLYI6VSdK2TuRKp4GiNG0zOa4hQ3aTjqwxWtVBmWwarfzg1oDU5DeJHNevATIjbgP8gwRojGSs/g2UAJqnEsC048d74Hqzb0E/gIE+/bsErjjYQ5jU7zHBKoqhXBsmgJWzvRU8kXZqPZRce7JRpmyDFAb+X7sszDoBAuEaDpVIlDDEJDVhGLCDjItLBeR0OaCFqm88GrWEaQkWCSCGTLQUQNmBfMfBWgCIykrjeztZMMtNrjH7xyTDOzNgyfE9CuQaTSEWloG9VzI7YRvghzo4E1O5V5T9F2TMWDaJM/kTlb7ntkugx2aZZ7ld+awDzyFcNzBhjBocbhj/n7vuAGZMkgTBETGtMTQTz6ptBgBU4K9i2CpAbRItSMG+0AM9B6UwDqVW6ZYapfyz8mwAv1x3RufYHLCS82yOpezDzAQJPWgldV3XF/l5OGJ20m23XWoUTt2sLNpddfyd3UutXwGhR4PIgJEjhCSyagIwqbwggDxXjxHCTgs4HjiIG5JKUzJz48KqsPz2zEVUwClX1ZrVADT6NQee+xXCap0ZfinEzO5LsbxWlYfM1FMwz/nsZ+nnMttP/K7cKpX7NUOTwb8YAWJ/xG0J+htsyQB/NVhWHhKwDyJtZ2Q91wL4KfinNg3+63tcg86Zmqf6t85XmhBUVAK4rlss4Jadl6V/c0lc2kc7X1d2GYzuKdiYJv5Z5d85s+4mTfYpQCSg/mcx9Y0V8IvmBESRj9bPqDw6vzY+6pSZOAEBSdabvAZpsonWBtYaeKZGbgaPzVgxhG4BGpvsxRdVENY/jzdVJ6X4FzWbP7LTIPWtUY0jVY5m5GQZBfuMMkldM50/RwCSi+wTjj2ALjukVd9S5JOidZNnarHpvkFOTQbCVaZSFR+cA3xIACK8Y3nQIP5kIELjm7LX0j46v+EX5MtVE0YBsMznKH4d2CfW6y9FaB1im8Ck97S9BywQru9xIiKrNSSSJlK9d7PGdeMdnCO4SBsywd4R+jFO6vpJ8oCL8HJtD/J+nNygOTnMsvx0PxB6xPUxj80g0p6W5SfXwYYZdm5VikL845CB+gLYB537ooLVlsns0DW8Xux3HgvPbPBF49B5wngVML+db7Wzne1sZzvb2c52trNrxa5p0A/LM3D7HVK35M2I81znxZXS3qEfM4ttXPVwg0jyrFrezBxdYNBPpT/3DpjVMTTA+jBLkgAzYJBmRirYtz5mdp/J8KVWtLRlc1yZkZLSbPOcEanggQafmyW3S5h+wWwyUyzZonajr7/HknM+B+99g6q22SyLB0CWDUXZhOgmLUXkOj1jRK7XpuyhucxVzSpPlECJqjqJOescEOaZbkCNdFLemG6yEnMtH8tM1P8J+IdkAYAw+9KO20nGQFUdiMoSohmMBaKw1QoQys/adzu21qab+8tp0/S7lvXlFQRUSSqVbkyxMPzGUg+NYuDaHQKg5DoeGgDKP+ZKMFXk0ahbcq3IgaUtk2+Run2Qa9B0e2h8B++E9UeAdxCGXglKlOvISLDOXVQ6PhMJtQ1ATAJkKWlQVIB8F0FRwJxttSIN83Uq52TvQ81MD0OcyI2ZrHVhOmr9phJ7TLnffnLuNfjBvUgIkeAETIYTgJO0RhFliazZAKxcHzGlUjcSBIbrOCjIY4fM/vOSmq4AMlDuZZaKbTi410iQzyQV8AmJ9fOUtWFNJMBAjgP+sYD/XG9nVYC+cQUMPeLReUQBA2BlyXROkHqL1HQcpFzsbbCpk++KZLGVZ1UwV6Rb1TLQl6KRdF6B1oegcYV4+wcQ1yvEi7chHR9iuAqg3852dmctHl2EWx8idQfYW5zBEHm9C13tMiZoIgEH6C/2LOd5oQ8s07ZmWcrjPuD2owG3HQ24uBqLTJswODKrlwiNYwCw88TSuP1hZvnpPR5D5Ifx79Scd3Bdu5lE5UqNZwjQT92yKCrY9VznqqbJfl8FRKlPAk2CaQqEYZKwPAFB5LstMBVTYaOVWn6lptXxEFgSdQg46gOO+zHXtrKynmrVeLY+S6UuGof9lmtfLZpSJ5HBhCJdmWXqp2vgDMCmjDp9hvRpsmzJZylLqm84WtvMJtMYZl9WFTBrb4ildp0CXTbxTH1iYD6p5iTLfpRtmqpNhAKM5Jpn62MGzPoVA0tDnwG0XJ8byNdkhHDFFq4CWjP7PxVFAP1uQBK2HktaBknQsW2s/U3xBTKQKWt7AhIo+81J2J5ToA+owb5Sl3zI+5d6rXWyZguA3iIz/iilOgFvYlpz0fpNDAQVpl9IwIAksp6QRAQHIIGCMJEpoXMN4MYiLTv7g5LIpsk/MnaqYMFj2vFgKQiobD8z4OoD6rUYEp9XL7Keuq/hOowAZR8XiK7URiW5HlQ2HAAWDacLLCWRYQgRrXMIjpnS42QuWEtCAPvXBB+j1PTjcVxkRRFTAzyOxYcJPag/Bg1HnGRxdAFp6BGPLrBvpdeH83zt5ARVkzChKgnqQ4liQnJNJVlrmX469ykDt/WEvdYjLMs9e2bRYNE43He/ww17LQ46h2VDGJod029nO9vZzna2s53tbGc722bXNOiXmiVLCapS094BBwbGAXEcchAmhslGMwak6HhTLhsXrB0QA6JzoCaAJAhE5FjuE9hgsOSsSKkbpTUFbVZvkZEz0iczUp55s6QbJaLCqjMbpwCXg0Yq2aexjDAJwnCbJUs4Icv/RZWAhIObkUbJWasaq081qDit0zYGlQ4sGckWyLISjCr7lKQ9mye1AAQboMGcTTPS5+xOZJqf+H5mEZqaghEMnfhySzEAWGrbaM0frfOj4+kn4zAF7fg1TT6zvStkPqMMqQyaWgkffW/seawF/EMccx1KC/hlMDtOgoJ6DQ890AqwMg5we2b8Qi/SuyMSOXjXcI06BwHmAEpF4tRXgJicCmWVmPz1uTieHSoOgCVwzUYGyrRsjNf7W4O8tAX81qAjyr3B7Ly6Pl7Igai0EXQLsTASTrIp4Kc1XKYWAThhNHLtQwKX2DLMQGxeJ3wdyu8IdZAvZf6gtk+DVcoOUHalBk6JSGTrqMjkKtPVNxzsVJCePF9f5j7M980M6JefldGrALWVUB5XSEcXOcnj8DzP+ceH5ToFSkJFy8zLHKgS4+B/YVPDMqqV4adz8GQMeZBk/h/X3KZhzcy+1RHS4QWWIrxwG+LheYwX7v6afkjz98dpHn9n905LIvmstV1b55ES16qs/QFm/Ok8q/KU67Gw1I4FtDrqQwauAEjCUMqSlWoESQJyBBoDM236dWFPzbFLZqwweUXC1/sC9iu7TwG/yT2egb8Z1YP8kaSy3TFLQNvvW6afmv3bzrUJBeBQVo6OnwX77NjZdcGLFGqX5VGd1ER0/Nrz/5X150Xmu5L2tM/TsTBtV+By2o85c4qk2ONue21+K7+eJnEZxrX+rIKpNuFNpT3rtvB4OUeI26hVl2PTNcomSCnAp4DfaIC/qjGsNpLGAdQtJuNvfXwDKCVVPciC5oiynk8FLKcqG5HYL7BjFgWc2liDnQcwYMMm9f3yvSgAWRp6TnKU/VW+rk7yv/UcC7BpAcsQE9cPNfuOGBOco/w6EgNhjlwFSNtrZ9ZVnvqx2uYRvKcEatWHGCU7TRMlJ+M9czmpD8U7npmum6RHBv7ZlyNRanDC8osOIvNZfME5n9BalrSNCoKb+VVqIjpNtDQAdq6H3K85gXVt1BMs4KeKCc4k+zUtbO3jrJggEp9TZh8nwqW8D1JTCeLWE/Y6n33i/dZXD5WAPkEl9q6znW+1s53tbGc729nOdraza8SuadAvLs8gLRdA34B8x/ItnUh8Ooe0XoG6VQYukkTlUwhF6kUDSDHyBjyEHBDKNpXiNJvfKLUvUgic4avWLTnIpJnmTbeZTS4ZsFVWszcBaN9Ca+cl2TQNsWyS7L4ggwyp1NIDSoCaAAQB/RwlAwyROQZ/aRNsKcwrZfMp6KcSPDYwMTXNxNXNHFziOl3C4jqx/ouVU5raNJgwDRDZcbUfqyJWM8GukwJg9ncFNAICAwTaiwkjbCr7yW1ggNQTTSuEZGYeUNhESKGAIGrTOmWTtlc1EA3oN5VLpeozyQB+vWH79RugX5ZI8h5JGX/CnkqOQXTqlqCFtMmPJZjaLtG6Bj4RApVAshnkGRkxvbbL//S6nJMdy0MCvm6JUADGCfjN56r+jgLrzCCIhdEXyz1mma0WiNf3StvtsUtAkiABnBNwbf1c1SfG4kASuItUwDnO4ZdOyARQSb3KwVpH1XwxDb5s/F45XP59lxj4DokZf/y/iCLhJUHxOHNPGQbl9H8bTOpBZDP7i0C/Rrx4G+LxIdLqEPHwAl+f6xVS4N8i57im12IJioHvraYFwAwgZfoowy/5FpB5dkNC2TXVXKvBTK2FqXX7qL+IeP5DSIcXMH7wFqTVEfrbzmM8WuH4/NHJJ3hnO7sHWepXcMMx0voiHIC9/Rtk3nDZB+lDBBHXZFVbhYijgeU9DwXoO+4L06+XWnVBIrUWBNR6ayo36YkE4JcA9NADYLC+WXYYvEPyboPpR3J/Z4Zv21ZrEwTso24JdMvigwH1emjW+dlapLJmAg0IUsd0ClrNmLKZAGTwdBApT36UWn7rCeA3V9+2a3xm+i0bkb9rfGb37bcenWcpPA6UF4lPT0CpcTuWfll2o4Jsqfh8um6cFPiupAPtmM29Npas3+amvhzPx4UNh8zy03W+cu8mx/ZEDJQpG96pvOl25CAfw1xnVoFiI0HKgn/GZ8pS01oDTwGzpgV1kohlfzeVunxDjPLzCYNjMIhZmwk+Fna/mr3yiLjmbAKEFVnAqAoUI1ekbG1yjNTry22XvpD3UHn0DPzJZwhAajQxyUiYTkF0ubbKdVXLPYaUZL4o84P6SsFxslNRUyiPRI7ngZn71kp+Z3nSoc9thkqUOg+KI88PVhUlxUoO3slYDsYHDQDXXhb5efX/M7tOkqbU9aMke7PESigAg1mNA6LctwNpEhj7wAr82TmhniuA6HjPuERt3oHrpSqzT32YcYV0fJETl44PSyLr0OdrgFShRuZQWixBywNO+DOKNKlZZJWEQZjgOaEhltrefP7L/dh6h/0WCMlh4Vm61BHhTMdz2n0POiwbwl7jsPSEcVxhZzvb2c52trOd7WxnO7MWY8Q73vEOfN7nfd7VbspVt2sb9FucQVzuM+AXehA50GIPTrIPU7/iAuTDUAC5nJ0qLBBvssYVwNPXYhpsqjJ2dfNrN0JAYT4BRTZG6/NJEfOcVW7li7Qe2ATsyxmSBmyzQXqgBuum4EdIlvnEO6waBCsfngP9lK2koMYYi1zhNCARZiJAngghBXgidN4hQDbqTFVCsgylRMwYymNT6oNpMI0A3rHaHthAggIPIgWV388fjpwZqoAETEb/RO6U5gJUVDb71ihJO6VuCTkzspdgGm4EaMiw9CQgRwLWVWCebdcGgBe3SKOO0t5UB6HAAZCkNWhysKrU8stDMfM97ogHDMM1ibQtOQ/S8xL6EmRqOnjXwDmHOJOuO01mtlKpWUpTgyxx8/rNY6rtThyMSdDadzrm8/dSkQsr94Ey9grTpQbxNDN/atoGe+vZukE07ez0+5eZzZxm8HML+E3Zo9Pgc8jjsHnsqr6eMcsA4d/hIJuCYxwUvASQDnO/idQZBZGbNfKZlt2XVkcF+OsHjKu+HMs7eDSgmfozJABAIirzsU220LnDBCUrs8G/GEodzHEs8qISKIvDiNCPiMM4Pcpdblmy9S48/s7upTaOiMeHoHYPcA5u3MOiWTJLOCqr2ZV5WBn+kSXaLFilNekU8EsxwXu3wVRzwkTLUne6TlNhkys7j2KAX3Yg5xDEH3DeVUxey0LPEnRNy0HqbslBal9qI+vclKfQGabfNIEI+jnrHsys9wy21PdLxeTKQIc8hOXH4xdmwT77WDQus/yWTQH7FlIHq1Omnyvf1fp+uZaf9sWa9CWk0ofCrKs/auuXbUhjGr+Kf29LcskMKDTH8uM1ORUGmGH2RWmrTYvzmuTjGGxpvctS2uWxuba5GSYkqU8mSVIxSh0/6y+Zen6IsewhAKQBIC8swKFlX8usKfYSZt+CGW8MgvH9N1DKbKjWu1zrt5wHZYRB1CVOADVVql6ub2pabrey9oae76MJuJ6TvpzKowdmyunr6PkzU2auYdGDnAFuU55PYpLhS2VeyYmHaVMC/Q6TvSayqyT9IYCTf8aeZT5lXmB/meVCs4JBGEsNQMheBzWg7xLLzOf5DPV9Y5MvVfxU4VPKSZpUXadO5Oq1jp8+AzUDUKVurXkBDRtHaB24BrEAfq4/RFwfIx1dYL+qXyEdHyLXpszKCSyVruUwcvKE71ga3Xe81234WQE/myya97OW4ecIrSCdmSXdemY8eoezAvop4LdoGLTEOMNKvYtt51vt7J5kMcZZ1aad7WxnO9vZzj5S7e1vfzu+7du+DU996lN3oB9qvOGy7B3veAce//jH44EPfCCICG94wxuq/6eU8N3f/d246aabsLe3hy/8wi/E//7f/7v6zIc+9CE86UlPwrlz53D99dfjKU95Ci5evHiHGx9ch9TtIy0OELsDpMUBUrsPOjgHd/Z6fuyfgzs4y4+9A9BSavct9liKRmtaWBBPMxvXIm+yOkI8usDskgu3Id7+QYTbP4hw4cMcfD4+5E2SAQAreU9bi0TrHfgWqWl54zuR8UTDG6ZADUYB2sY42TCpfItkXNuNVESRT1HATgP6NsA0DTSNkbP2y9/yHEzthVjku456zuhfjxGrwIE9feTglQEEx1jqdVhZl0o2ChL4cfPBnzR9aJa+74qcjNRF1LHNEq65oOGEBTgTpKMJmMYRCJvpWwOEU/Bt43iXcT0r4KcZwfm3BFSgGDLzCXHMtSRpXDEwMki27vqQ6yCtz8OtL4CObkc6vA3p4m1IF29HPP8hvnaPzvNjdZjrJWlNSmbGGgmkOVYhFCiUhzAD8zGkzhrXuJE+jL3IiAaWFRJJUY9YP5L8zzx8GtGkEZ0DOsc1nzqR+Gkc1wHxRsIsjyP0OpPrb3KdT+uLrEPCakx8TUuNpVWIOB4S1iFiJf/nz0UcD3zNM7uFazAd9QEX1mN+3YeYAXONcyi4y3Xx6ocGfErgpw7mZRJFqo+Zz8vs9YVS41HqOZUaTzqOJQDqJo+56zhKGzIrwTzGBJYjVmDNPuaYM3qdRcPwCz0z/KR+X1odciBKAb9+hbBaY1wxwKYPZfxtXLvOyFIpyEeU55FcN8rOEygsBHPhV8AfpYhK1i0GboM8wnD3B6Z2du3YPcmvAsBzeb/i+29Y82NcVfMtM8YELDETQ0zMApwCfwr4JXMjKQC11QwbmP01lebs4NoGrmvgW364lhNLyLsM7GdJT6nlp9/Nkp7WZ5ibj4DC8rPt0eZZNn3+wvbEhgIIlDkzs/wkMM7Mpk1m3zzw5zLLTyU9G2EGMQDoMrjnzflyMDVJk/FtrKlvbNrOYN/lBaTzaZ0yuDcGJdZjb1lZxk/T15ZRn8yYlgS1TRltRwoqQ4ATSP03Yfs5ykClI1RymVU/Jg+d7ysmH2BApfJ+kmSqNLJvZROrcq1lTTgzt0T204UFuhIfO7Nq+4DjMWSm6GgAFSXGTZU/gMlabllxuUabSWDUz8xZBjfrZLAqcXKGuZky4Ff2JjmRSpO5Un29zSUjKQA4xTXTlvt5s/0FqLU1Dfn1WEBee6/bfUGK9bhuJKsVkFxfAzlPoPKt7HFOanmurXyZcp8AckJF9o9Vvlnm9qh+1eH54meNA4Nq5lzy/GmSJ7pFYfgp4OdbwHfsB2bAr8h7lv1fmde8gHut43lrv3U403mc7Rpct2hw3bLFXuuw9FzLr3Mktcl3vtXOPrLtXe96F/7oj/7oajfjbrUPfOADV7sJO9vZPc6Oj4/xpje96Wo341Tt/e9//9Vuwt1iwe4hdnZF9sd//Mf4p//0n+Kf/JN/gttuuw1PecpTrnaT7hF2h5l+h4eH+MRP/EQ8+clPxld8xVds/P8Hf/AH8bKXvQyvec1r8NEf/dH4ru/6LnzJl3wJ/uRP/gTLJYuMPOlJT8Itt9yCt7zlLRiGAd/4jd+Ipz/96Xjd6153h9qyDglHY0LjlnD/P3tvHm/JVdWLf9faVXXP7e6MBE0CyBCigoAgEAYRSZgSUEEGzXsgAgLCAwGBh6D4gwcRGfJAjMEJGSKD8hgEZRBEAnwwhEmU91R8+ECGEBBi0um+fe+pqr1/f6y19l5Vp253p+/tdDo5K7l95qpdu3btWnt91/e7Vmao6hmoXQfP9wIrx4D6FnzsmmStr+3Oi9NxvQ0AgwVursNhj07ucAH8YAaqBmQ9yQG0Mst1DrL5wIax+jyrzbH7ehJ237yLA8YRUBaPYwZesoxY3zR9NNAggEbMv2EQyl77oIrV6esUaNzoYgYKLfvW2mYsIckel4VpFE1FcCIEstpqyJI2ttDvY8pATeAKKXYgrjSrlqZBMw/aKVC4IO/pzQJOrNnaANB3kMpkJdPfs+UGv4N2CDHAKHJGgzYVuVjPSMwL/RygWpSBJD2fJosV3HEhMUziMwfqeheMUPCM+laydX2Ns/n6ILM5m2eiAiWoM1UzqapBkYGogEns5Xk/ZAOm2AOaMZ26edkeM3hlFZFYauCkCPQWeNX9jRiZA0ajfm7nu9JrqApSr61nKnKcGiiysWZAMyBAn+/z8TVUglDyfueAa8u0j7FknHsbXwdyDuU6sGCGMTEEkFSQl4YBnnFY1O8mZ/Pn1zTBiCzBIwv6BUKW5ByfXbLP9VVCymNwKr47Dmrl49Uv00TgqrRTw6kqi0U6L1Ivc6ux+xC7Tev3pfl6Bvzi+jr6eZcBPtJrLjQCApAG+4dB/9rVUFXGgQMkDfAz1slCZr5ef14it8wtyiKvGnDTIswapBhRt6uLHXmYzZI9Duf2l7Y9dl3yqwDI/WJjH9K+PTJX7QtAN0O1EhGaHehUWnDey90zZDdKrvMmiPwggMzyM7CPFOhrlIW22lRYrcMCeycmSAC5WQWv7pLtxx5RZdirWYNUV+jrDuzutSXhKhQpT/XJeLZzKOvr7n0JkLKu5OW4RwlCZsaaJ60Lm3c/TBQQ/8az4wpz3ALgvg6iJUwZO9LqHXrAz/rOHldU1nOm7L5jmoCVKmBXU8n9JpCeD6nJaMkeNAIuBuaS1EoN5yKjOU40MXZmSboxP7DLNcOyjOhAJnGCWWjJF8ElYuQEDc7ggcl6ZrYfCsjlzZhSkRJqZjANZegNLJWkIWQQOz9PcZB8YnXPSMeiZ/n59QQBSKy+0Limn/t+6tpyP+k7uR0RSr3FPG6K/53HggMzV6ogwInWcMxsRtAA4RuD9IPxHZqSLDUC81IngE/q5ouA4ZTPqKB7vobC4j03g0Im9xilQmiCJjBaoqNLDpRdyjmtVQ5SmGvyZ4zNrI5xMGb+LCDno5L3KAVV7+hkDMYIoljWJ5bMxhVqZiCqVL2x+oBc+9hbGt08yS0maHi68vHa3BqI0KLU9KyY0MW0APzZuJd+4eyDNoHQUARtrIE29oDme5D27UG86nsDHyuD0ubTQP2nZgZa3Qnecaww/KoZ4spOuT7r1VyOYt7LuZPHVNh97pxaNwQStnKM0HMox2LjuGJgtZLzvKthST5p10CtSJJe27b0rZZ2XbLvfve7eOELX4gPfehDRVb7emxXXHEFXvayl+H8888/0k1Z2tK2xa688kpUVYVdu3Yd0u9TSnjnO9+J5zznObjwwgu3uXVH1p75zGfij//4j3Hccccd6aYcVrvwwgvxmMc8Bscff/yRbspRbW9729vw2Mc+FvO5xKBf/OIXo67rA/zqhmHXmOl3zjnn4LzzzsPP/uzPLnyWUsLv/M7v4AUveAEe8pCH4A53uAMuuugiXHbZZTlz/Z//+Z/xwQ9+EK973etwt7vdDfe6171wwQUX4M/+7M+uMZpvC2FjpKUgzD9h/e0S5l+zC1jZgXDMCeAdx4B2HAPeeSxIZUnyX9UU8MOYGm3rmEvzkvGYgcM+v+cX9Wk/aL3PWM5ylD7YnAuej2VQMHpdFsn+c2MyjYMfUYMinvl3oIWFBKqGMp6ZGag1Z9pY9l3AkGtyDv3+Uqn74eScpv7GjL7k2JIWHEpwNT78dl1A/4Cm4JP/84Ey8gH/EVA1mdUOl6Vu4K1joWXmlPUj8SDbfQxm2v5Igw9xY19h7M2FpZpZUcpIzX+OpZrlfDqVwh0BhGTsKANPLNs3FPk0+ysn166jIndIvTD90AlICQumKYtRGIBtCRKO/grbsSsMyBRzkNEYbIEXgWIPANo49izWuWP9GWtvz7zDno0eV8+7XKtqs7/1TrLwhbkRBwyOGBfrXRrgJxnYRaqt5sJW9AFUY2h4IG1wfCMI2mNxGQg1cDOmzKzN5xgl2Gj7GTMOF7LpDTzVMWwyXXlso4zp3piOfgMmq+dZtP28MEL7OTDf0Jp9+5SF2k6MT63hxywMoLoSZpABfZndV5jXiyxiKm2aAPyGnT1KCNDv5+tBwYZq1iDMGlSzpcOxtM3tuuRXASgJICoTLYCHzNXo5woukIJIpS6aBeGnaqQRU/5rVI5yzPRjfZ7BMg5ac7PJ9xyw1B6jugE3Ut/PrnmuSxKJZ/ll8K9u8jazzLpjMPl77P6YQmPp74FfsB/zSU7G/GljzOy+qCDgVA0/L+fpX5uspwXIM+PP7iVO1pNsfp9g4bkTNegTS9Ly8pmLTLrCPrJ9WILEmBmVfZYDADJDP3nE8hsk93hfyrV5oY1FQpb1r2Z2ABKDQLmmctB7sqgqiK8Cp7ogyYCaGHigDF3zjTZjy436wu7FHsex9Y6MGfErTGFD2H/ig8h4Eh993Ae2OcZw2wvJcpUkzHhmPLmEloF6xoJ8Z3DbpU3utfpnvi+K/zJ1z7VErLEcZJl7HJPNztk4Icf6IEwAlCPL6hW9q6U9oe4hawL1W7DoP+XtpTT429/In1qbxVGnjCocZJWGys0RgQtoVocCGAZCAbH7VhUU1obrBlvX9sXPyuPXEqkseUJZfaJeI4BfG8sYHQN+fi6RY6ECUCo7ecX91UxF2UPVPUQJpNU1xDI7fmmb23VNReFw2BVXXIG/+Zu/wfve974j3ZRrxV772tfic5/73JFuxtKWtiX7+te/jgsvvBD3v//98eQnPxk7duw4pO38wz/8A84880w88pGPxI1vfGM86EEP2uaWHlm74oor8PjHP34hWer6Zuvr63joQx+KjY2NI92Uo9rufe9746Y3vSkA4A53uAPOPffcI9yi645ta02/r3zlK7j88stxv/vdL7933HHH4W53uxsuueQSnHvuubjkkktw/PHH4y53uUv+zv3udz8wMy699NLJoNfGxsbgIti9ezcAAbfmfcqZqykBgQKqlWMRINmZXCtbZL4GWilyLRQ7pPU1DBh/sQc21hGjyXqKZGfaWEfqC5PEnqc+CrOkayWbd2OUcWjB5thjoasV8Mtyclq/z+Q8DczsY8lgBoYsPAtyTNXSA5S6w5JpCgBJi5hFSgP5IkD4N1HrSGAUOCmBqUWmHzDa/0CzRt7nNGQjMZWgw5QqTQIke55YYm4RLpMeORDnJfhSDgiVti9sE7Yo5wI25C+4BX0/kvNZ6FmW6hsRhfGnWf+y+Ne6HxjuwwKZwqAstRnN+pSyvKf+GmBG0FqQVi+FUtS+YRlbloncdaUOx3wdce/VWaYtM1b7cQCEB0BFZqiuzOS9xgd7HHARe5BeOxRjrpnpg185I5yDtCv2AiQ1K3IMsZfzXNWbArHjwKAFpyhV5RxqFnkw2UimXLMvYcjes+BDcq89m2+9j4gaTIuxANz+OvPgnQ9uW3ayjWm2gR6BnqWWZfmdZdxLcIZBk2V3PHh/MP5OtJqZSebEaGMqST1DVgaDH2NjtiATABKW5FR4bBy0sqCdtz4lDfpKH/WwbHbp+8C2/ZjHL8VepWo3gH4ObvcJiL2+lse0SU6lrs1jLTQq62fAn2ajgznXnDG2DzWzgbRflla2eWQk6+lNprRpVgyAvF8GBORWOUKerWOlusb5NVu2qcD3dm9/aYffDpdfBWzuW1kNr9TNETdYEgxSBEKVEzsqACE0mAfCvJeA86xitDFhpoBeryBYYMLOlSqDWAb6NVVhBA4BDk3WqWdAikj1KihFcOzzHCD3ozrfYwC957gEFS9DR7MdheEX6nzfkB9GCWirr0F2iU+BfpbUE7sClth93z53SQMR5V5TwAu576wrUGMsPwNtxoCfNwP6Gq3d12hgfCWIJJ7V87M6fhYsN9aPsdcGtX7NPOCpCWiZqZiG9778E2NW5eQUmmT5UZxg/PlunWD5mRR0yslw7m/EGDJAwdQpvDEDiHI/jpRHTQYbVirOdc4MIK2YUBFyTVmeK6uonyNu7JOEP7sPeZafG48ZMEPxQY0BtwCWOfajMSaJhvXrYhI52A3nx9kxtDGhZkJMCSsVADDqoDXwJOstf7+8cuc9VAL0KIBFtTnTyvSD+br9ItMPGCSCUaWMei7svlzCQGu9+XNZ1EvErHzB/szYmbUb4wYKUasS8puMNZjvPGWx18/jkO2nbMxELGw/26bW9kvECCQrqx7C+Ju66x+sPxcP8J2xtKfV9wsszN4VTQCYBWUDB8JKJXM07dsHmq+B53vR77kSce/uoYqCyqNbjVRubD1Q/KhUzZCa1fLIFTqqctKCMf18HfjN1qyBCZWTrjeA0uqQNnpvaQKh6tdVcnpNyhl0136AbOlbHT12nVNROAz2n//5nwCA5zznOXjgAx94vWY1rK2t4TWveQ2apjnkbcQY8brXvQ73v//9cctb3nIbW7e0rVjXdfj85z+PM84440g35bBZSgkXXHABLrroogxc3+EOd8C73/3uQ6rL+aY3vQlPeMIT0HWiXvaiF73oesf2Pf744/HOd74TF1xwAZ7+9Kcf6eYcNjv99NPxsY99DI95zGPwtre9bVmn9RDsW9/6Fs4880yccMIJeMlLXoI73vGOy350tq2g3+WXXw4A+P7v//7B+9///d+fP7v88svxfd/3fcNGVBVOPPHE/J2x/fZv/zb+x//4Hwvv50AKKfgUkyyAOUk2IDeoVhgUV2TxabUZOl0Qhkbl47rC5ttYB3EoIYMYEVuZTGMfAVc+wCSlKPRIHQDPkLI/zYa1KZgcmy1xkAWwC2r4zMgiBXnAri9tGgfvUYL7XvYvogB/Bpga8Od/P2btMRN6BVrBhBiHQQn7jskalkCTMQAcsynLecp3LTv8Gt+uDsDaMyDNHnOdjlgW8r52R35vKlig+xPgMMnC3wA4ZRdCF/4JAIUqfwcoC3ljZXrZVsDaSRKoIag0I6HmCqgAikV6DCkiOQmoHHiyGjPGTu1axHmLXmudyU81cKlACQcesKOkdpJu0wJXrgZmBsktOGKPJukJDAKxWe5KAXI0Cl5ypY9B+tTXctyf5WxrHcOpBGHIhVpiKhnyHugrsmqFjddaHaok4LaB3b0LWowBvwHQp/OBZxTUgXOtkpkGYQbXAJcaQ1PjPhFAeow+G36As9tzvb57G03uOyIhKkHEPsk+RQBVmp3SEPwjqDzbuD2AiOGOgD+XK1DYBFSATM9OtHqBJpuGbg7qNkr9sG5dZGr37haZWg1GQcE+z/IT0HoRuOaV1cLu0UDVQNovOMbPONCs1/FBBV6MIcgVwJ3sEwDNdkg7qhroWgReOYiNLW1pi3a4/Cpgc98qszpilKSmlIDYCagWNMiq820T6lyLdKVitH3CSsVYbUIGpgAoCCgX1a5ZpYCV3CO8fJ8BTH1KGZxPVYPUtyCtxSwg3s7SXmXvxA3OoB85VjqvrA6u+3ytlwOWR5dkRGmYtCO+W3SJTW7eG7B/hpxrz4Y2tovUkHVgn2OG91ESvrx5NuS4lt9M5T2FGROwow6og2P6jRjj5jvkWn6+D+webPNiMpbf0E/JPyFL3ioM+7yf3jP0u8ySy8oEU2YJXY59neWW4VUuhoCCl+Y2dQp/jwxEACcg0sBfZS5ymIFcPUQFAEXOc0OBP3meE1FUwSCf5zGIxEFlIiNSBbk32XcUQMmsqZzMJD4lI8g1FZWp1SdI2g6y8ob5JLlGYWLEDKDL40oU1u3Yi10ICuV+p5wUl4jkfmkgX5bAnWL3cfYRs6R2s1IYtVWpuQ2u8v3V/LLxGsfYX8xy2HJYlM9lBvxU2tPqjDZMOeETKhme6y+Ozs9gnTc6b1OWr2ufIOjnkBRBJP6drANoIOefNw8H/KEAnr5PTC0BWATaPavW+soAP2NPm+Tvai0JAKtVQBMUNEMsY3mf1Uh2Sh8K+MU+IrDzrWqZS3MdP63hZ489uNSC97UlR2D8eF1rCQM2xZU6sZTliA3QraBrDPUb89yytKVtYueccw7OOeecyc/GKgoAcNFFF+H7v//78Rd/8Rc499xzs4rCZz7zmZxUdcEFF+BBD3oQzj//fJx66qnX2rFsZldccQUA4Etf+hJ+//d//zobGL/wwguxurqKRz3qUVhZObQ10Rve8IZcz2/Pnj2HJIfIzPjBH/xBnH766XjAAx6Apz/96Tj77LMPqT1L2x77x3/8Rzz+8Y/Hb//2bx/pphxWIyLc/va3xxe+8AUAwEknnYT3vOc9hyzr+YAHPAA3u9nN8JWvfAV3uctdrncsPwBZ7vI5z3kO7n73u28JFN67dy9e8IIX4Pzzz0c4CNWHg7ErrrgCJ5544pa3c/rppwMA3v72t+OmN70p/uf//J9b3uYNyb797W/jrLPOws6dO/GhD30ITdNg586dB/7hDciOCvjz+c9/Pq666qr89/Wvfx1AqYlizDiT51vvEvZ1CetdxBwVotYcSI3+rexCXNmJWK8i1TuAZiYL1ZH8jtVAi31EP+8Q2/KX9L2+7RDnTgbU1ZsyhtWC5NNI3tPLoeTaFpYVObEsNfkYM6byR/rfFIMOwEIwJCva5eC81Rk7OOiNmQbPFwC/UB4FSLDgELIUjwEMmfnn671sJpd1AOR+2CdDwG8gz+nBvV4CU/49csxQ0sx4k9Icy03Kb50cpd/P4BykDPhJ4EplW6M8t3HQKQtNmGjIMme+vo2cPNcXJktk9WJinwE/G7v9+jz/dWvr8qh/cX09169MbQloSfa2Lvp14S9gyqpk/lqWd9VkkHBgxjS062K+oUwAXbibpGffDQI1A6bkwQKBrp8BY1giX1sWlLCA61imc187fi9OSHimATPDQEMzz6rwNYOqnMGv4xPkxqiXZiqSRxkks+9vygocysL5v06BTZNCbrUPjDERUQJTdr0YGzGMriULyAhLcMgcGAN+JnOV5UstIOwBv76VoKoCftA5NAek5sK8llqVUl81j0mV+csA32ynsPtmO8D6PLP9nLTfJJtkP3J+BzT9bQYbdb+84xjwruPBu447tO1uwSyYejj/lnZ022a+1aDGsZe5swQZlYajbp5BJYJIJBZZNmGhrTZBa/cF7JpVWG0KQ82C1GOLKFLAJntudY8HzN2V2YDNywYKWoB6ZRVU1U4mtAZUgs4kBgfmpCQH0oRZsjsN70v7kf4DXJKVHksfDUSDSjAWmek2FnnkKXlPYCjxuVIxGpVSrVQWz2Q9gyWU2J/ztcZJTwNzdZE9A0sAicUL3u4Jdh9gO+bofCl7rn03YF+NWIabyUGCOAMhVtOvgH+O8YeU5e2lv0ubg4JI9icsMR72FxWZRIqd3J+U6ZdBkvU1JGX6yf1oXuoXL3RQUUnI/lMuKVCXJKsF8KgkxBG0zSPGn09c2jAAuS91IU2Cv7DoFhn53ob1Kx3wZ76dgXrWZs+k1fezWkTduPWN1PXz91xfp8/G1TjJxsBZY49mn94B2aZckOv5wdVfNP/8ADKyWzInW5vXDCjgevabNMnrgJszoA9uzjiIG62fF7K8r5PKFNYvlAWpvlZfSlekVtUT2rmsFZyyTUmg8D5+VWohVwL4GQhvgF9ez6Yh4Df2HcwXJhR/dwC+6/OKMH1uD+f53cSWvtX1ww6kogDggCoKm9nGxgZ27949+Dtctm/fPtzrXvfCox/9aPzwD//wtmzz8ssvx7/8y7+g3yb53Kc97Wn48z//c/zSL/0Sbn7zm+O8887D9773vWu8neOOOw4//uM/jgc+8IH4t3/7t0Nuz33ucx/8yq/8Cj7wgQ9kltRW7Wtf+xre8573bMu2AOBzn/vctsrIPvWpT8U3vvGNbdnWvn37sHfv3m3ZVowRr33ta9G27eBa3IrltcwW7Xvf+x6+/vWvIx5Auv9gbc+ePXj4wx+Ou971rqiqCu94xztwi1vc4pC39/KXvxzHHnssXv7yl2+Z5ZdSwl/91V8hxrgtUprvfe978eAHPxj//u//vqXtHH/88fjBH/xBPP3pT99SvbtPfvKT+NEf/VG87nWvwxve8IYttQkoSSMPfvCDcfXVV295e6eddhpOPvlk3PSmN8VjH/vYbZl7L774Yrz61a/e8nYAOd6vfvWr27ItQK6tQyo/somdd955qOsaH/7wh3HiiSdi165dh3w9pJTwoQ99aNskZb/2ta/hjW9847Zsayu2rUy/k08+GYCgraecckp+/9vf/jbueMc75u985zvfGfyu6zpcccUV+fdjW1lZmcwKskWFl0EhEkmVPhBCJCREzHsgcI26bko9A1ukz9eGoEwsGaHEAYkjQl2h60UasVuX37HVj2qqLPNZAZmFkhT8SO1cFkyawQsgL1JSqGSxFIF5H3MA3jI+AQnye8TI2DcRQHBBGW/jQb6VQcsElU0lIOgCPTBiTFk21COMgQrwJ/VSgFkVwJqtaWBIEyTIYgGXmpFBvkFAymQ2/fGQRM6yTCeLtGaYCtqPAcTYZdZCDkD1Q9mpAeNvou8yu24qOGXMuGDnOpVAGrgEo1IBdy1T3REIlHWVtH8S6kDokwapKpG1MIkuoWkZU2E6cyX1EakX1mrsI+K8OLk072T8zhqkPopsm9VzAUoA2AV+ZJsCeKaulc8U2KOuzYB5OQ2SLW61MJPV7+MAamIBXwzQtP36wKzVYHIBqbEcWSIuoDkK0OeZAXNlVKy1PdoYsdYqyyIHyiRwYcyMeSdnZvP6SgxwQlAgug6lFomxLuwayGOdCvhngN9C8r0D4k2m0+Y6G5Z+/jOWAzDNUmNKGpgHepJAGZQeyAlAbgsWginWNLvGmICYaJCpLm02psEE2B71OlSmNXoNqPYtqF0Tedp1AfvQtYjr+rixrzBXzfG2YKpJTpmkp4F/CkR7Oc9o9T6rukiNqcxfZlyrTDCkZxaOf8FUFi0BUk8UAK0GYRY5h61a3Z7F0dJueHa4/Cpgc9+Kcr1WzhLlds3ynJF4nhNvmpVdmRnFtYAoa22vbGm5h210EfMuYt716GPCalMhMGUwcKZswJiVDijP2ZVn+8UevPPYUk/WWep7ud4BaffqTglQNyuu9pST8zUGEPEQGCBFylJEMrAKKMAf9N5jDHc3V1JKSL34I8xFKcHmZ0tSW+9KUokkj8jjvJd+kvvWMMhQ7jkFMDUpT//nZQ4t0aRiymDc2L/Kx+zBHjg/JRUANneRJYtZuzSZw8AyUmAY6mtPqSnk/QJD8HVcy9lJe1rCikgHpsz0K+y3/fu6Vs8PQJYONKCvZsKs0vph6MAbe0GtSCBSt4H+qu9JXVmf0Ofb7Ov2+frGJovuak1mqURjxBkDNbdT+zVB28aqlNE7IFlrQHbC9utjRK+qANz2mAXGRhf1+Bh9FFC2T0nkz1FYfQOWZ1RfzM5ThSKhq8dCduzm7wKlBmBVFRndWo4Noclge5eQAe0MKI8URlJC9nlRBTBF9YEoqyYYmNUYyy+QSD12VjvaM0v7sgeXlJbv63bO3Jxnx2O1fjdLOrO1HWlZgKRKIMEl5SVSP8nYf26c5gTAKP2QwW0FtYfDjLI0es2MQAlzXTkEVpAvyJxwTBOwq6kU+COs1owQ56BWZWrXjeWn9fzaeVa1ySo2xqpeKYkWMGnPaoZUyfnt3HXZuuS6sr7BAuBckgWQ/VJTvzCGsmf6ibRul5MINlViWdrSDtKOhIrC4bAXvehF2Llz57bK+v3qr/4q/vVf/xUPechD8IQnPGHLjMYb3ehG2Uf99re/jd/8zd/ES1/6Ujz1qU/FC1/4woNmOj360Y/Gox/9aADYMhDz0pe+FPe4xz3w4Ac/eEvbAYCrrroKf/RHf4Rb3/rWW96Wbe9BD3oQnvrUp+L/+//+v23Z5n3ucx+ccMIJ27KtSy65BGtra/ipn/qpLW+LmfHa174Wl1122baM4RgjPvnJT+Lnfu7ntizt99KXvhS/93u/h1/+5V/G7/7u7265bbt27cLHP/5x3OY2t8FFF12En/zJn9zS9l7xilfg6quvxurqKlZXV7e0ra9+9at4xCMegec973k47rjj8JSnPCVLHB+K3fe+98U973lPnHTSSVtq1zOe8Qy84hWvOORzub6+jhe+8IV45StfmePhn/3sZ/GEJzzhkNt05ZVX4vGPfzze/e53g4jwpS99aZAYcii2urqKj370o7j1rW+NqtoeeObmN7/5tt0X9u3bh/ve9774x3/8xy0z6D7xiU/gve99Lz71qU/hNa95De50pzttuZ2vfOUrsWfPni2PN0Aks5/+9Kfj1a9+9aZKAQdrMUY897nPxf/6X/8Ld73rXfEjP/IjW27fodq2gn63vOUtcfLJJ+MjH/lIDkbt3r0bl156KZ7ylKcAAO5xj3vgyiuvxOc+9znc+c53BgD87d/+LWKMuNvd7naN9mcXrwVUAAFMIklmITih7SWgnoPhRGi4ElCm12zFvp3ewXh/CpykPgJNJfuad2CtKSXgny7cR6AHgIEEYd4mSo2XklE9BeQNXwdIwN0kIEvQ3fqmfDduErK2+l9+2znDfpPgiQRMFCiY2K6x/ArDj1ExcmZurQt0C7ZM1piZqqnnAYi8mE4CACYu7/mbQg5MyEJ2AdiLjunnAlLkCthP9oIL3iQt/kPGFgKKDJC1KdYAx9JGQGv9LNa6AETEKXCR90na5wovoQoVUogSVLNsbN8uky06iBtkck5z6iMwVQrAAyx1k4MgqCpQCvlzk7pKgMiQTiXPaZ8SB8mU5yjPKxdkShESJUGWzPSBqQz2ARLEdYCfjV3PCIgoTADLRBZ2nsqpuYxzY1t4wM/APnv0MnUSgEkITnrKGHJ14MwmMMAvsAG6Bzw10+crlQCZjKFhQHkquOJ/SyTXfICChRPX8OCdqaBKiiCuEAilzo9j+2WgT6/lAeBuYF/sQF2baySltT2Scb7PyUxtrGeJ2oE0F48k/Azgm+0oQSpj9oU6S4wJU4gzW9aYB57hkA9xdMiDY7JHxwDKY9PkaomBUJfv1de+BFVEmmSKb+f2l3b47dr2qwAMmH4Ds3slIL5MkPk6sLAz0AvIZwCUySU3XcTavM9zpzH8GmUE2hwJKCuERXy4jwkhCEuIjDUUavCKBnys3ljsJUgfyv1IQJUmM3kHCSTOPOBnn5Ndt8RAOkC25YilhUST7CJL1ugj8n1H6iOXmrEm7bk/hp/VQbRafuXPWGt2rxkyogaJHPsLmOt91PuhU+4gU2GfBaZyX4veh4tlzLj7O6VU/Ajfj24eFtnkEQAZS7+ZMoLIpcZJUQhzgYoMNxV1CTZ2EbJfmll+7RzojH2uDD+T9VSARLqxL/eiWu+H3u8agEkORM/MKU0+GamMlP6FstnKeeUJ56HUG1bwMyb0LH5P2yc0Qa4pYZBR9hmCtXHkX0lNZC7JbebrWSJLvh8XQFP8YYKplwyvO8rvRWW7ThlDzglRyusI+yTGBHbsryxby+WcDnz6MZt0fwMEDoz1oGAIQwWAzdQA8j4AQnSJfvIbAwH7VEDNqEspoACfKTmWX1pk1xpozZTENY7lOgeQwWFJPAvqfxYQjbqiRGKKILkmJXTs9lLLj4LW8WYesPyyUgJX+Tgz2zaiAJapAPLA0H/2Zso0Xu6+MG8xOLd5TjnCtvStlnYge/7zn49nPetZ+fXu3btxs5vd7LDs61ClATezPXv24Gd/9mfx8z//8/j85z+PW97ylviFX/iFLW3z137t1/C///f/xr3udS/c5ja3wW1ve1vc9ra3xc1udrNDDuZvFdBZXV3Fz/3cz21pG2bHHXcczjvvvG3ZFiBAblVVgzG0VXvkIx+5bds644wztlU6j5lx05vedNu2de65527Ltm5xi1tgPp/jAQ94wLZsDwBud7vbAQAe97jHbXlbTdPgRje60Za3A8ha8/Wvfz0e9ahHYXV1FTe60Y3wmMc85pC3t3Pnzm0ZI1uZN6+44go897nPxb/8y7/g3ve+N6qqQlVVSClhY2PjkGWG/+///b948pOfjOc///k4/vjjFxJHDtW2i6ltdstb3nLb6pbu27cPr3/967flnH7gAx/Aq171KjAzfu3Xfg0f/vCHt7zN2Wy2JZDa24knnogvfvGL+2XzH6wxM/7kT/4EP/ZjP4aXv/zluOiii7ahhYdm1xj027NnD7785S/n11/5ylfwhS98ASeeeCJ+4Ad+AM985jNx3nnn4fTTT89FkU899VQ89KEPBQDc5ja3wdlnn40nPvGJ+IM/+AO0bYunPe1pOPfcc69xJlFMJZParyNSkrpWqS+B7hAJTYAsgCqWzNPQAX0lgI0LcBEHIATNvC3SiPYIAP36HKmuck0pQACU1Pclw1eDUin2EiQIdQaXDPyzjNd5X+Q8PZiW2T6O8MejYMmY1ZPce1Ybbhy0kX4pz4Gy6DTWmd8u5wzpCIAW1s/me9X6ZJxFbcXYAxvwp0EMKDCgGZyehVey56cXeTkoASwuxP1vPYg4rimj+8wgnzHXgAI0WEZ/3pXKnrlATQn8NCVLGkCqoPusSjk8F/yzDNkxWNNFOT91AgKlDNwmaNH70CjbogKRMAssSzkDdNYmq3sWeVCTcr9mgQ8N5lDVZHnEgbSoBn6pqoBO5bCqWvpuw2pcuvOnz43xl4MrsQbVKR9jlpYzBiCxgDbAEKjx9WGik/uNyHKWVkMpJqj0VSwynVFkOscMv3kf87U5DsBa/RT7LIyK3wmYzYNAbM0CrucALLnrDxLcmQLRoZ8lmDScjBUbMz6w0vYSYLfutno/gAwLA+ElQEiTTOE8jw6ukcWLPddPJMYAGvCs2qTAtJfyTCkDfZhvIK7vFXBvfe800Dfat2XfL4B9K6si6bmyqsBAU+T8bLwEq5/qwD4H1o/ZLHmf49fj+YgYYCAlqUvpt2EAdey2tjhd2vXbrkt+FYB8jZml2EvAn0OWZAZXWjerQ8M1+sBoKSHEhONmFWJKqFlkKDe6iMDtYD5tKsbOJmAlSP2pQFQSMyKhMmm4BAk6pyhsobgDKXQgBdVzko4ame/mgHgLUG9qBvLZSwP/gAyClM+osKIWAL8ISizJNCmCwIMkErsnmbT0hkpFr2tyiU8yGZux/Kxeoq/lt6MWCVVh+UmilflZRUWhy2ykATPR+5MGuEH9QJcQA7jkOipMnQwyKrBIY1nP2A8SqgDI+bJEH55YhjiQ1oAsA/m6XhlFma0/lNYGCjhWJ8r1/Gqtg2hAmqlMGDOyZsJqINB8DbRxNXj9alC7Jgy/fXsR91wpEtMjll8yUGTAekPxn7zspQF9DhhLHIrkrP6eXZ8Kg1LqZRr7rQCuzj9haNm/mIGVjU76u+4pKxF0nLReNyGYqgLN5d6lAFXikrxG6s96v2zqfGW5XH/frWb5+Apjs8g+2viyYzZQDCAkBgIHBIqodfz52uB2zuwvy+q7hD4DmtNIJonCsJ6fB2W9okX2d7MEOE1e9zI/uH3o2qTU+5W+CZacRsNFWVkTuPIOKOCZHHsZF5EJdWIF/zhLwe+oZT7Y1QTs0Hp+hREJlamdAyq1D6uRnPuBEVCBm3qomjDbkaXSY2Ztip/VOl/batKbD56AhWsT0ZIEDNCWoVsP1oy6ztF1Ivr5YP6wPt4UhF3a0g7CjoSKwtFgTdPgve99b379N3/zN1sG/Xbs2IF3vvOdW23a9d4+/elPY3V1FRdccAHe/OY3bzugu112XW3Xdttpp52GO9zhDtvCBr2u2/r6Ot7//veDmbFv3z5ceOGFWwL9rgt24okn4nWve922b/eud73rtm/zum43utGNtsxMNXvAAx6A3/7t30aMEQ9/+MO3ZZvbbXVd4173ute2bGvnzp147nOfi927d6NtW9T1FMPm8Ns1Bv0++9nP4swzz8yvLQvlF3/xF/HGN74Rz33uc7F371486UlPwpVXXol73ete+OAHPzhAX9/ylrfgaU97Gu573/uCmfHwhz98S7TpBfAq+fVvEsooyyLEFruVyrLZopuqqsiXmG3sK6BOYHBgpHqxy0zec/CeBslS14rcYad1CEZBYTLmHEpA3/PLhN1FiCiBFpO5sWMPLmiTGX/KbUyJEJFga8wBkIfhvrzMpIBQZUFesjN58OA/s+xpQAIqjOHCTSQ9h3KeJsMzKa9pgRXP8kvDvpm0QUZ5LNtNSerKxV6Ccq72nXSAC0iNWJpyzBrwMDanz86uGoBFwhKVQnSxQpbfTMPcTTnXKbOzOs1Wz33qZKiiSj/2BLBmtodAoFABXQk+JiKVpC0SU1ZfJejCO/URxEN2X2iqLFUbGlnwS72Z2gWr6hxITU5mM1k/A0DN4BAEFo4F+M4An77OwJ+ew1wPx4JkSdKFk53gCaCmBAOh/VgAMA/6GSPAQDFj9xkIGA9CEmzGCQniAAEAAElEQVTKDPjb9POD2KYB7zHZvLTIu8tglDs+OyarX9TpMRTWYtI2AFGj1TUYIUi7Kx7OFxZ4I5WeMgxzQfbOnvdD1urgMwvM2DXnpTx7eR737V1k9e3bK9dlroNaAvmTATlfx0ufY2W1SHha0NEDfRZ88wwEbf6BAL/M8pti8FjyQagGzFkvT5vCwaLt22fGLDqc21/a9th1zq8yCWbHHs+BcrsOiOXe3c2xsqLStr3OZZFw3KzO97A17gfMagFggF1NhZop1wMGLMkhISVSoKxIfEIlA9Hr9WxBfpXTtlp9co+gwsadmq88E03NWLqTiUae7cPuPjj+3Lbvd4cyj7cxDpgwdq+YAvv8PSYoaBVYgD2rnzjTPwOyKoarH+sAvxHzaUq6PB/HJh9ZcwjKmlMmjrHbh7LsI6bVBPiywKByNQXtz4BfA03nsdTT3TPv8v3ct9HqSEcWoGyFBXytQA5QK4CRyUTSfA3U7gPP94HaNaS1PYh7d4vs9N7dUrN73uZ53hhR5tNQBU100utGAT9qZkCzovemUfKJG9sexLDEIJP5NEZsSwl1YMQUM3NxKjGpJQGTmQjrnYyZFkATgRYJQdl0WT4XkMSwUEminIF9ByOj6JmCHIpctgPN+mRg8vBxYVMEXTvINdOEcp/2gG1WDbGESEvs0+cDVqljBOf9eHlgJ+dJVVMSB7haGI/ZJlRIBs8VQEXs5XehziAqEx/0/dMAbFYAuwqyKgQiOApzk6nIypc5IWQllUC0kEwpx83F716ZCVhao/j9qzs1wWoV1My0NmpQeXST01cZUl0/2jpTEtWmD7L4VWWdSFTWkLZOZMLkPG0ytMkA2WvZlr7V9cOOiIrCUWBN0+Ciiy7CTW5yE7ziFa/A3/zN3yCltK3yoUubtsc97nGIMeInfuIn8PM///NHujk3eDvttNPw67/+6zeIsT+bzfDa174WO3fuxB/90R/h05/+ND772c9uWbZyaUsb2z3veU/s3LkTa2trOXn5hmDHHnvsEd3/NfaW73Of++y3RhwR4cUvfjFe/OIXb/qdE088EW9961uv6a4XbBHAKu/5z0hlLC3wEpNKKNqijIUFQnWShY2CPlQ1QNcK2Ncz2AF+cZS9mPqIqIAK1ciBawteS9ZpKotcW8g4wNIyPb0RERIlldNSecD8mR67YwFa7QikkonNCvyNbbwvO6IxOOhwyQwY+Puf7L8wmGwRZxItFnyqNWBVETLjEVP1XqyOzgEW1ZOAhF/o9/2QNdS1BdBztRvl4P3ziQCHgXvsAAgvAeTYgJKozJrpThnAtPOR+xjIIFVUhlaWaSKAOSFwAFAYD5yACBL2g41fl6GdpaSqBlTXMv6YwU0tTNRapEFzt4UIVsZqNWsQZisZKDR2X5Y7oiFgAjtOywjXR15ZLfJX3Vwypj3w5/pYgiBSMyp1c2EMjgInmdGnwakiI1Syxa0eZskkLzVFRE7NAs5QKbASdAUW2bLAMODqr5VuBJwdjBmYJqA6qQxxkdf12e7+N8BQ/tWe2+Nc2X0+8BmHFF15SAmcJFAbkyUPFIDPTKcOGUubBfqmAsc+yGbXtbH6YieZ5et7EQ3g6yTTXJh9rYJ9/SCgCiBLpRlz1TLwaZCFvhOoG2EVhFqy0C2wajX7wqLE3xjoG5+DnOZAcPNKYTFqwwp4SAxSuV8v7SUB3Wsf9Fva0WPXJb/Km5cvdG9KmzQ5g/oWqZ+jCU1WXmgCIyKCqcKG1kTd6CNaluSW2sCYiktwV82zl/so/lvPjBAqUG/1XhXw64Mk1xizfly3D5DEItfuwXOfXMSQa3jK77CfEQkoAhwUw8Tm0xwQh+zOkk4sSeOAteh4KO9ZB1LpvlLvLTNlsqxnAeLG89YgeSolEXvez/HIfSMNXpOyzRijpIgR4DLwydw2ySf25Dd54XVS/8gYUH0E1toe692wHq+1r2ZGlevBAQCj5pKrJfKRhVlUsapQpC7XOxsAfmtXI27sQ9y3F9261D1L6v9TkHVB6GOWGrOax9JR6ic6wG8wRo1BZszUCYlPqf0rPkcdCHUsgIgAY5ST8PqUwJEQGVmufKOLqANjLjUP1E8gtFFUQIhJri1lXaYk8o4pmacycV4mzlm+NvI9z2S1C5ve6i56v8sPfdLsRCIpYSBsuZTPm4HLgQsbjIxl6lis4zqcsrPNE/lyPULmRcDPs/z8YbtEvoU5w60Hynm242H13UmLNBycefaqNh6sfWPjYaUq9Q5NrtakYcklPwJw6iR1XqtmANDWAFp30uTSY2hcsmw1KEvRpzSQJrVz7RPgfIKqAbhEpoLh5y5alFRXs6SMRK4m5dKWtold51QUjhJjZrz85S/HTW5yEzzzmc/El770pW2Xnlva0C6//HL80z/9EwBgbW0Nf/VXf4Wf/umfPsKtumHbrW51q22r03g02LHHHos//MM/xMMf/nA84QlPwIUXXog3vOENR7pZS7ueWdM0OPPMM3HVVVftlzG/tO21az9FbhstTPj641g8lfVwXox0Sq0JXCFVIr8Q+04yMO27gC6GZgg7WoSZBKRN4pOtth8wkPjUjbnnsTDIiIayhMRlocMp17owFg8wDDwTJdQKElgmLOl27VjzWtcdb2YQxsLis/7wliYWoDkAbjKB5MBGfb+AfUWypeKSUW3PM9jXF1DA6ntltp8x84AiLzgBMiRXWySz9UyaU+VUjTWU5usCACqwMJDtBBYCLYMg51i+MwQklT0rgQINFtRNyUgOASaBlmIHJK3vhUXQFPA1MNw5iJLBzYEGgZFBfNAAPw2CUlVL+3oFrQHpVw4IHMD1XOpSzobACjeajV7V4JVV0I5jymK/mQFVNWRIDVgUodR4IgYwB5oV7TPOtf7svOS+dwzL1Cmwk5LLLDegMSCFEmQosp0lCDjFgvP1fqS2lMhdZUbciB0AECInIEpW/JitZ8CfB/z6KLVzOBLaPoFJAm2sUkfC3mQ535GQCFpPjzLTtoRwKAfNgXLdWsC4d8CfyZBafUKp3TMB0kVM1gCScVTq+vU6YbB2RtAxJefUgsZw12apm5ODSV2bwXvqNpDaeZHvnKuEZ9squ0/AeAP5+rlsd8ygYCAzT33mOe88FjRTual6Veoi1SslqFo1WdIzz4X2OIHwjrP4CBgGnezPA37SUHh5uoUtk9Uz2kQWbWlLuy5ajEgb68L+0JqrC/XwjEHWroGJERtgJTSa5BMRmNGEhHa1VllCZVjHVMA+NzdJra4CAhoTnhKhiwkcGqQ6CtAHud8Ii0eD/R7krwrwQu16YR9bu4Hid5hPYfcvLswtD+4bg977cFOW742OMWe3EqtJB5QEH3agpwf3jM1jsoyBSaX6lOXnmD0W4DfmWsUk0ulR2NZZOn1/DEY7XgV7zLdTSDD7d0Cp3Vz8PGRwcTMG4Rg0sdfJ+RSZpan3fGH6yb183kfM+4TdGx2uWu+w1va4aqNTedQ+J+nULEynlUrUAFYqYT4aKFf8UlGfmFWMqpf6fbx+NahbR9pzJeJV30Nc243+qu8hrq9jfvWagH7zUp+VmwqhrpBmDWqT+e9nWmNZZD+pmcl9qJohVTVSNcvH6IG+cW1ZhtVmFL+/Y7lu+gSsVHIe5VYWB8qSeWz1QM0J64jgVr4wC1ILrk82vkRCflWZeQAGwFSauueNx4wHt+xc+mQbU2ZQFyIBg8Ql7+fYcVtDgvpldn8uNd+KckjNKCy2CUZr6p3P79prz32NRQrBtb0Zgn12zDHmRlJf5hQPcA/6iTsBylIEgoJ0tfQT6/mVJD5bT6XcRzWzqjUwWpKrMJCcs5bjIFGpDiLnOVO55FJiQRPYtAsSEViBTapqoJmhlCbgAvpVTfG31M9KlfOxuELvamJb4urUOhIo6iWF1WdMYV+D1MuYYuh3WX9q8qGN0yz7v7SlTdh1TkXhKLOnP/3pOOWUU/CJT3xiCfodZvvYxz6Wnz/taU9bAn7XATtSMnxH2h7wgAfgi1/8Il7wghfge9/73rbVDVza0swe+MAHoh+rvyztsNpRDfp55hQwBPgG36MSrBBQhSSrXuufJQ4ilZiS1p+oS1F3zXZM3RwMC0pXiOhAgSelPbONGGF+wTL4mh6LVoHIspoSO0nl2AZgG1wgRmp0kGOueMDPWFEG+MXRwmwcA/cYgWfwlX1PA30EL/Wk2aUm5QkIwJdiYfc5wGCYHb6YLW4gXxqDdvbaWHxdW1hEbXnuAYboAFsA+fxxMIBPa+B5WUED9wxQU6kz4ojEArDleo5a2xEVhowC7T9/Hsd4jGXUS0DUSaeOxvnABqwireWn4CQBoKrNUpoAlInqJlqTTDTmVNVkSR9j/Q0kPadkDgZBIAUgQwBiQOIIsO5PWX3eSr+ppOMmAEmp7VOAvlzbbsTySyg17ozh50G+TVSHRI5OgT95Y/E7U9KepeZRARTbPqJlAhBRcVApXb1ejPGnQL1cz8NGZQDfZ1BrgAwY1nuJmxwQe3mnCSugIoQlbJ1jwJ8yVqXlsQC9HvAzwF4lPNHOEecC9MW9u+Xay1Kec6SNdcS2Q+xjrpE6vh5TlHmVvPxU1QyAaJObQij1ZSwY5VmhmWEw2QPaTweSrtmE9UMp7TfTfAAAXMt2MAyirW5/aTdws4BsFJnP0FRgZqRUOCwrFSGmkGuoxpgEhNHrzcaR+Qy+ZnHxY+Q5KbBgzBph2IcM+nkGld3zEiA+h7XXP7rjILDAW7F8vgBgbSb1N/7OQdh4TvYyjWM5dQ8GskpXsoKkdXDMLyr+2CAhYwqQ8E1GqdOa+xIKRCTdnmEfQJb23DQ4v5/+MFnFBdnEKZYfyjwmiS1yz1/vIzb6iLW2x7yLWJuLT1ExYbUp/kPNBqiGcjwGNHjliW4udcP6OaiTWrNxfS/ivr3o1wTsa/euK8uvR+xj9hc5cGb/0UT9WwoBMQOZzSIw7Y/b9UNmvWm7jQFl/V7+5Bj9fGzPLelog2KWeuyiqEdI/U2Ukgcm2ZhiTuIS8FL/GUtX+/Pkz6FXg9C/sfSjZ/stDJfRdeHXH3b8tSsVQAdaRwyGVJD1wjiZzxL6RooWC4Bf7uByrQyuLduvmkl/JwC51rf+MXNWUbH1X6Kk11xhd0pSVo9akwAABqdkJwYxJpkL9Pwy67jQbY5rwEu7KLP5SvKjXjdVvcD0M7/KWH752sQiW3Nsfp4LTDmhqqyDaDCWgbJW2a8Rw9jZ17Ytfaujx66rKgpHkz3ykY/E1VdffaSbcb23iy++GCEE/PEf/zEe97jHHenmLO0GbscddxwuuOACdF134C8vbWnX0B74wAcOkmuWdvjtqAb9gi50LFDSqFadVlgaBNF5YhlhCzHiKFmdMYK4gtVFo0prMDSzDPwEANz3iBNAnzGmcgH0ui4Sic1sFJQOmy5WTNrKQAtPSKu1rgsb0DeS1VzYFoZAn1+geTc4A0I5sLMo12mBhzHAZ0Eg/3muJZN6UNeVwOCUlOcYANSs9Fx/DxgCfCYNqZKRqWuzhKdIBwqbqF9bQz/v0K2to5936Nc3BPCLI5amZdI3Nay2Hev7rPXuqlkj31VpwaQgGYICgRxAHeS1SeWMgcsUQVSJ3CxKDYvKgioKS8RIOShaAFQne4OJRbEGXtiYiLHXcRsUrJ4VYBRAlvIBhNFqoJ+r3yFZwCsLGegiy0WbZn8XaTXH+LN9xh4HXM5ObZdKbZ8ulnp98z5pnaSUQUGR78SAWWIsSmP5AQKIIUp9KSb5vNRIke1xJAQSNp8BjuMaOn1MAJcAW86uZ1JpO3nPrts+ykVlwJ+d/bFZoMyu4Xz9YjEoIMeChfd8EH1Q+1PZhqRAKaeCxQpMCUQdf5XV9gGENaLnKMt49q2AfesC7MV9e4XVN19H3Ht1vha7dWGZdusq/ToC+uyas2sy1FVmoFrdPprtLJKeNqfWK1LbpVqRQCpX6CDMApPO9ckQQAk42dwVqQB/FpTKIMEokJfHJDCoO1k6dz+siKUt7WgwBSuydd0gKD64LvpWABNiYL4GChVWqpnK7skcbOxo78/4+RmAylXyIKEiKUDQR6ADNODs2mmEfLJtanJIX5IjmtAgrDRSU9RYf9ZeYHifJhaUy94389KLXpIxN3TxuzJP61sGWvAQuLF7etT7QyPOQJZEtd8AIt2XpT1ZZPxM3jMQMrOnViCL2rkcU98OARHfTm+JQH2HFCqnoiHJR35GG/iABviZgoPfh4ImZEBQ7IQx6vafAVqtb5dlIfV5G+V+L0y/JOy+9Ra71ztcPe/wrSvXMe967JsXpt+uWY1jZhV2zao8Lto+CSMMpc8n6/itXYV+7Wph9131PfR7rsbGlXvQ7ZujXduHOB9JezYSDJH8rlh8PyAnqgjYVyPVyvirlek3BkPtNPj3DNQDROkjMFZSwkzHSUwAuh6BKNeEtttq9ndin30eUzzYUTNiDambGaVqeM2ElWom40JB8uSVN2gIZg3Gkfszmc/M4NRjitomk4M0tp/5OeWY1SfK649S1sAA7jzeSX0S80N0TTHwuydkPckDXJrcmSU9fc1FoFz3I/bvAtgXR/u1sZ8ikApQJhh0pWOxEjzVEjHNN0RCAGXVitUqaPISZ/82n2/tukAyR4g/uwieyrHotRUaoAFoxzHSB06RJtc3rGrQbIf0STUbKCmkqkEbkdl9EYsgbpjwSYcA9rCWn312jZKviItu79KWtrTDZsccc8yRbsL13j71qU/hXe96F37mZ37mSDdlaUvLVlVHNVSwtOuonX766Ue6CTc4O6qv5JK1WxYJnnkWDPwbBXqHGxEJGuIKqWqQug1QswLqWgFNAJEnNMaSLhIZwLgQfJbzqWoJTDczkUdpZrmmh2SilwCHMJYwYCkZ4LfexZzhDADogUBRs6wFGJLjopxBaf3ibSAN6d73WeUDJpirnUFkEjGLmeT23AexLLuW+i7LAg1kO3OgsCufmSSPLZq7TgA8k+p0QF9mg7m6fMbkMzlBdC3avfvQ7V3XDO19iG2Hfn2Oft4hjbKxDWgIWtvOJJsoBIRZIzUdYyy1W2YQ5hozgEZAQ1cPI/+Ns9dT1CxclXOFqP0k7fXAIQNIFbu6M/oosl0WOMRikA2awctS0w8xlvMa+7K4z2OWywKfg4xZZTPyyqoEb3LG8/A4aBKmGpmBhAqMpg75+skynxhdR24f4+c+4xtwAQd9zPVi4mJA2UCyAo5pzzB0gjC2xMGBf1PWR6i8ZwQTo+0lkOZlPo0hoYTjQyqObkAVIDGiQCQAogt8W60++x5PnC4DEvtoQUX5cVAwFCzfIWX9Fck6B9alJIBf7ITJpzKeBvbFvbvRrc/Rr88z6GfsPqBce/m11kjiwODZbIHZx3k+neUafik0GphqAK0r5pmfmS3kJkLL4JeaSALCR8oYwmJfOXbRQrDcd24sLJnNWDXXlvWxBIAP1/aXdj01x4ABIHK8aJBZdWNLUVhSUKCAK9RcgYkwCwxGQleXwLDJE4c+qWydJDRNzlNAlh/2Zq0YJ/Gbz2Tvx5RQM6EJjdSC6sXfSLEr+OFm16pjvSHGaU15YGFOsPvlQDWBpIaXgXYtJVRaow1g9CxHFVSe08z8W2OmGXuNFUC0ul3ZJ7N2j30vYMhCGh9C4lKbN0WZ8yFzpc2Lg4SIKD7eoF7gZMJOKhLkfv8jwC9xVZLibB5XNnsbE+a9yHRvdBFXzztcudbiqrU5Nro4AP18v+2oQ04QsvNhkoc1iwQqdRsCBnfrIke9T/7ivr1o19bR7l1Ht76Bbu96VomgwOAo967YtohNhdhH6SdXG06A8irXujNgydhvZkwY+OBjs+Q7AVpl/PRBkp3qxHpvU0lyLhsWX0TSeNagzEem7BcxBYgSgZ57kusk6RiiVO5laXTvHyhM6GNmtudzzzmJ0cA+Y/kZUGSJTbkvLPnGAX7D+uAKGEHXEa4uuMn1LiTp+L7MYF7ITL9BDT87jqnJyLaxCeBHuj5Juh7IiiAAEAX4TsTSbmKdTigD0gSRMw4kcsjmK0b1xXoGqhhUyYKyksXYfL/aurLiSsZmkJrHBIBXRN0mda7msK5jUVX5u9nPCk2RbO03r0dqDEZ/TRqoZ358PscuMW1/NlWnMY3XJteSLX2rpS1tadtpe/bswWte8xrc+973PtJNWdq1ZPP5HLt378ZJJ510pJuytKUt7QZgRzXox1J5RGseDAE/HxinTaoMxAQJbGiwgVKUx77KhcsRI1LVDhfjY3kYIIMnVDcCmhgjxWqieXafLlQkmJVybYs+Jg3CpwFoYTW7gFJLJRCBe8rvAeV9QJo3lo8aL6xiSmCizF4qC7ACpg5BPQGdDLgwoDFne/cl25tGTLe8MPZAIDBg/eUFczcvzD4D/jxDTcE+Y/ehaxE39snj2hr6tkN79VoO1sx3rznQr0fs0zBbOxAoUAb9wqxRllEl0pMmR6UBnwpQRmej2bHXTPPbMlutX5PGSfok9fuAArJa9n6lf5bFLwG3DoMaJvlEhwxKUt1IJmw/zECXYw8YgH7NLLM5khunOtDlcSqYMs76Hj1PFHU/yEw/ubY2qbNiNho/WV5sDGrr9W3Smsbqs8DWuDaf9a+F2YLWm4oHAP9aa+4mMQYLZvVRpLOsrh8QEYPUZklJJJxionzdHopZFnUddNyAEBX149HxjucBk8uzdwO0pgwg70ZlckKz1W3u4QrgDnCBwBxQnm84lt9aZvrNr15DnHcK/AnTdtC2ptZHYdRyXQ0Yfhnom+3IDFQ0w4zzLD2lcp5W91ECYzY2hv2XrJZidHJ4kEDZpHyt2cQ49UGn/NTAvyMM/C1taYdsfqx3bZm3zfy9oe8K2BSaDDJVXGGlkmusT0XmjwnCeFa5uj6lSZ8FcEkd9gIYXNNjQN8+L4wnDACNnASTAT13H9oM0AQwmRIwxfAd1wjG0K9iMmlOQhuN0Z80AaGw/UJMg8SszBJU4MZkDnPyW07CwkKN5EmpQ2v2mPEYAbB8NwN/9n0F+LwvN2A++W4jYWynJAoaSX3AvD9TBHCSrOKLC/gigXUn5d2LrOda22PPeoc96y2uXu+wb16YfqUeItBUjF0rknhjvgFgQJJjiamkJ3UbAvaptGdJVtlAv68krRjol2q5Z8W6GrDWZScFTEpObhq5TuEQdB0nJvr7VVbfIJPy1/MfxR+MTGg14cjOqrFjAakbnIG2QOC2d5KQjBiAKsh9Muj2q9AI0Oev6zS6p419Q3vu/MZkf6m0QV7b37DO+GBgwvnKBvw5wI/6eRnnVkt4tO7IANyC7GqR7acQCnjE1bQkd4ry/b4bvjcG/DSZLRnjMwZJ+DR/vVf2X+yBvkMIMp8mUpJfIPU5hekncx+5uoekiWdQOWJJBPUM6jymUJLiIpCvL3Bf6uClCBCB6yavdSzZT8BqZapqQpWBo71uc2pdLUmjKfuYKZUxbOsahs6JIAcGbuJ37U9FYfz50pa2tKUdhbZr164l4HcDs1e84hU455xzlqDf0pa2tGvFjmrQr81BdTVO4ESIkMWwX2gAZUE9KSFiEkRVgxR7CdTsOh6pWReW0nwdZPXi2raAVrpoLNmRWg9hdSeoasCrO12WZFWyfi1ArUGNcT2vGJFlCaWWiSx2NkbBAbMC9hUgMIODTubPasX53/g6C1ZcnfS7FkSqg3xH+rUUXM+yTm7xPZC6iQ7g8wCgAwysZl9UsG8A+tki2jJRx6Bf3xeGUTtHu7aOfn2O+e69CvrN0V69hn7eo93XIbYG+pX+pkCy2K4DOBCq1TmqWQOuK8R5B27kMuG6yjKfABAaB/hlpqecf2gdvJJBXM5LVJamxtUQGHlBn88ny4LYmH1NKDU7qFtXgLXNNdVyn5LJsjVlIGGUQc5cJI1GbL6xlGdeZI+Dhj7w49kQuVM51zOR6wqiz1bVoBjkHDNL+7JsmkijUgoyjogHMmzVSqVBxBIk6hWoiyp9hT5mxpsAYlBATK4l1oEeU0INymB6jAkIBSCMxhIz2dCU0HIsz3v7XhlHU0GLGIWBZxKaKX9Xu4mGNTNz9yb5TkRh8VEqwRQk5HovBjkb01GOazEIZMeWQLleqATPJajEEDyv12s7pkJsSdpvgSthi3qgKyVEu1aVcZushmYf0bfdQlCUtWYm1xVCUwmjVq8vXt0piRM7j3WgnzD80myXXE/NagH+VC6tAwurIyZ00TGnXR1E6fMyLgKnXLOKkhxjBjkJEqADZDwfBICXd2P1QTMQsAWE9xBtWXdmaVuxnNBkFiOwsW/AYLHgPqUk174yzVK/IvMEd5hVs+xr2JDZ20b0nEDEmR09lvY0pleAsKWZBJ6zUVfq/ZW6xYPm6v4qJnQRiCkCFaOpZoWhxiw0Gg9mbCaB6RNQACBtck3HWPAQuCQpEv8JAGaBEWPCSqXHSKSlVPXeUpX6tYD5AJxlvyV4XiTALeEtEKEoLThfLI7u33ZszALEac1eij0SRZEqd+Al2fmfSuga95PbfuIq1zSj5PwLQGTIiYWxbX5SPUMixrw3dp/W7+sSrtoQWc898x7f27OBK9da/MfuDbR9RDfvwUwgk5GNCU3FOH5HjTZyvs8TigSqyL1uiKxnJ6z0uHY14t7d6PfsQXv1Gua79+aksXbvurS7TwhaNzA1i0soClojTo8vg5maTDWeNW0UjW8RNp79sGYqZQ2isv1iApidP9KXudnAzjYCLSXUUZL8uipk/2ZHHcAENGyAIRWpTxYm3eC8TyWzGDvOvV5gv8KDUCmz8bO0pZ4fQBKRrGdsvAcCGgasfij1ra4/HPhndcNtXTEh7Zl99VEdv8EfW8IaD87R5LEPEhbdOgVAYhFwlyQ38WlT3wFBwdRuHYErcKgQWEE+Ful1z1ROCgBKn5FK2yODf11MaK08QJKksk7njrkO/kCEptZSFcZ05iBtjx1Q69zqwc+qydLppqbQ6/66WOpn57GpvmxK5PqsfG6M1SzruQnYZ0kdQfsX7nF/LM5ry5a+1dKWtrSlLe1Q7Utf+hJe8pKX4P73v/+RbsrSlra0G4gd1aCfFIbXxSLJIilSyrKewHAhbQEYHn+mi4ic4akSKNSURRGFgLSxrj8MA5YSVc0Q9KubLPNpmb6DBaXaAK90Nd6CrjmZCb1bsBmLCSh1HMxaW3r1QylOZkKdSIEmKChgIIutKjGQk/Jm0ivWdwZSsAJ+meFn2bb9vLw3lsAZA0gpZsDPA30DwM8AvswMLK/L7+S7se0Q553U72s7ed126Oe9MPzaHv08qkTTEPRLWbYroJ9HcJCM3tRUmt3dIzEvSIPmbWwm7TkRDLNzAza5Mznxts4z5lGgAvoNGH4jgHUzNlFu02DnYZHNN65R5AIbhGGQY7zgHgzDKeDP/YZCAFSaiDioBJL7TeyRIgsrkQ30CwB3IGKkKJnRwjI1UErQqUiElAT461MPBikzr6DkBv4B5XrKmfUWmNBAbowKnrHWpEKRXGr7mJ/bNdxHvW4nAhgHki6yj30tUnLjoWynsA7zWXDfWaihwovAn7RVmL3G+GNlIArxRmsWorCAWZMASkBomOG/X2YcBOCLkPMfnJSrAX5cVwVQN3b0ygw826l1ZXbm5AmEBqmqS33UsaTnSNbTg7j+XsEk9woBUsv5SxqAjCAXjNUzczDMvdF3TBLtSEhQLW1p22IqsU0AUgdQpdMOB1FHAMr93ILa+R7PoBBVjpAyK682pjLbPMcLPojJAhJJtkRKNJgXfT0wAw+8ZX0HS66JUuNTmExaY44rgLpSe+tgLAefGUNhQiyAhEzsZOjLvZ0ZyvSTe5EkrQiqGVmZ4IEwV7lPACPAryRtEWiYNDJOzonF1xr2j7L6gkvcSREglXDth9vJ/pyXabePva8z4QcgVOI7mawBMPQ7TF6fhrV7Oz1n8z4K06/rsa/tM7tv3vbou4iuLaDfvnmPpmLMuziQ4va1CIlQgFFjqmttaHSt1ILeN88JK1LLLyGaPz4xLDiwY5Bx8QNHfWEJdmN5zynbLPafk/mi/dapfChbr4dbL6hcN3r5HF0PJqAOUnt4R5RVUxVTVkEQmdkKKQiInBigqUtkfIzOp7Nr2P/MJ20Z4FeYjjL283Xu128GaGfW3LzUFU4JA9UQW1eMWX7eH+aQfZcFWc+RbGSW97YCl2kCTMxrlD6DjQQUnzY4oFyZ0OjLarXiShh/6oMa8AVQnuMCyTXBuQ9L+YdAJAkDnIBe5h5jyfZ6HYXAotgQatlv1Qibc5S8l3QcW31DY+BaSYqYz+1iLT85VxgIYhQfFwPp1mucC3UdAPyWtrSlLW1pSztUizHiSU96EubzObquO/APlra0pS1tG+zoBv2gQeuJz6xOk/6fs63JL/zTSCIGyNnGUNk4ij24mQHGJpuvZ8BpUPhc65blmn5aGD7Xmwp1WUTGCLDUTJGAjgN/1KgKiFEykje6KHVvemTWUNcXGcOpGmN1YAGNtPB9zVI7pmZGn6IG3SgDf5ZJejCSg4G076ymi2Xc2gJ8qk4fkBfi0ge9doUD+byUp0l3emafPde+T8q8RNeiWxfQL8sIKvgX5x2iBmvkTwC/IXjH6PseJLpY4EDo571IN/URxAIUsini9LEoenIoYK8CFqibgfxglrHSMxxIa/YlDfilIj9r3R9YwOlK2X0iQTXP2cxIsbweAX+SuUwYXN4OhIy2iLegVJDvDWQKszxSl4G/AWg7am/y+xmb7S920l8AUgVQDIMASYo9qANSjOX6TFq3MczBSeR3V5odCFWFihOY5DoIvWSz9xHgOiAmoOY0kPWKEZPROi//aRm2ngFo11mbr7mAti+svzHbz667WiXcTJaqZs5sWqKS7cyjoG1ASWBgDNl+jAL82QnIl38y9h9KlI2Hc0M+1qggoQZYwQKAJZJjzoE/MkaqBjuDnEvS80k6pgrozTDJWG76zOBjlUCz645YamdmOc8dO4TVt7IqUp7NDHzMCcKWmO1EtPp9zQ6do1Xik1jOe5LMczlHJdA1JR9GGigzZiPpVBiTshxZvmPBdgtUaYcuBKoWpsyFQC/kPnAtmx/7h2v7S7t+Wk7KAHLQPMUIxHXAJMs928uC2Ar8Ud/ahvI8MQsqC5cIXYyD+k7B3Y69O9OnhNTL9SeJCAXA9yyhLooago13LxMqiSAJMtNGxETYVTcKbLUCSAHD+9tmIH2MIOqGDKCBvGESOetk82KF2rHTM9OvCnp8yjq2oDglUeZWeenaJUnULEy/KhDqwAL62BxF5U/8qW4IgpifMDjHFZAkmYmAzGQWYGdCncH8Pa/ckKU6tS6fB/4MSLFtMMpnwIJ0YJdEjrlThl8bEza6hH1txNXzHldtGMtvrvX8WmzsaxH7iG4eQUwIFWFeB+ybC+g37+Kg5pnJ0tdMoHkL6kotv7h3N6LW8uvW1rOsZ78u4F8/L0CP+YW5L4NImZKtAWpL+nN+lutPIt7U104oaxtjFCWftAIFjZOw/VBJv/VZGlKYsYGhPpGsEeadtHfeM2ZVyNeZJDEBO+oAICCliCaIj9ET0Bjwk2Jh/Y1WXZOSmCPLIBEsIacAftaWmICKpc5wrn0MZMUR6ueyxrA6jMbum6ipl9zaIo9RYMjyMybm/truEncIABhlvsh+su7TJy9yEF+WRdmCqmrIGLRJL/YAzcWvAsA87lvOfnqfCHNTh3EJZ5GBqP5p31sSW5R1YG/9KKvliis0zQ6ZI9oKCJ3M0Tn51diO1RCQV3WaLroxiZFvRVL/M6WUHSPznbzSjvd57T6gXSvv2fezbzsC+3JS4sRn14ItfaulLW1pS1vaodjrX/96fPzjHweAJei3tKUt7Vqzoxr0mzJbiIgMVAIiKSso5QUJ4IIj9jsiyRa1xQ40S1NrOBCxLNqqWsGJdiAdQ1oTDRyAqhrU8UgaEMkLS10omsyQZx7mjEgC0ATJziTCek9gktTnHMfv7aGwWcyK/AgL0BGFOYgYUYPRQkCFGgyw1B+zVRYnAkHkCA0Y9OzJLLsCDAN/ro7MeBGO2AvAlzdSFuN+cT4G/AwIHIB99loBP5MRlGxsY/KVc8uBkFTCE03QjG1yn3P+HtcBoZE/qffHg0dilvpjgQXoM2lXZXca4OfrtwyYUC7gwyrpZZm8fnEcFJhGP88Z6QM2ZYwDwC8HLEcZu/k94kGAwzJ5M9NvbNdgIZ02C5hY1jAwkB6V+MMoU9oCy/6zjsErjl0ACLMLQFXPpO4MTOJI2Xss4F9KQM8SALM5wdsUUJ6bkoG/Ahb6GpvynB0gWH47YNgyYVYxamasVKwALqFmDORzczAEpZ0Rwjr0wJ9FV4zxN12pdHMrTIDFaKNJy1mtQWFRU66VSMlYOlLbL3EQ4E8f0ayAVmZA7MExag2ZgJqDyH0q4Bf7KNcS6/WkwVFyNVB5VaQ8aYdKeVazAvJVKxJcDiIz1Udg3sccjDI5z64XsM+mAd9XZJn0CnAa+GeyplbfMOiUWPLuh33ns9cX+tM9ZxSJraUt7agxnxhj92pLdupaAforlAQfneOT3qPsfkMuYG7BXClbpxJ2egHZtcqUMs9Wkh+Q5z5jJ6eccCHPW2WCtTGCiYryQTZGoIQuStJXl4AqNCDeyBKUOXq9v3ufk5hLJkE9kriUWluasOLu91aXLJIcY82MmpX1CNb7iATmAyeVay67rgPpb4pcaA6Oa99asow/hwPFhYljkSQ093rEFByAfe5ebOfV2GsJAI2BP7MpfyTUwh4ywE/Bvl4f7e/qeY+1Vv72bHTYN5c6futtnwG/vo86rzN6ZfiZBKEZ5/tukabPPqsy/FI3V3UIVYloO6QYByw/dmgdsfiG5h/6xJcppp+NiYGP5p8T53OeQRVgQf6/HBOBE2UQsAXAkQagN1D8HQMA2xgRIrChfvC6AoJNkCQaQOr7VdpntV7HCFGT8GwMHNp9zQBMA/y8P0Yqu53UNwbcPVbHM4394RSBrhuCfcCinwnkBCUDKgcsv7EvOxrLti7EmOGrbZuaM+2RUsrnnrS2tc0fuc7fhJH568QIVYOGK3QA5grKFh9Fd+fXfoioFEQMREXqM1SSwAUAHeUkrmxe3lPncVNTiDnhYpHlZ0kZY6BPjgNZzYIm2j0pGDFiFE/2z5L9t7SlLW1pSzsK7PLLL8eLX/zi/Lpt2yPYmqUtbWk3JDuqQb/xerMsQJJmLhOIC/Bn0m2T9fwAFXUriz5Z7HQic6LBbdLFrwezAGSwr2RIVjmwkWv5uUU+9R0SRNIlBJZgBKcs19NGWQRVkVAxgTug1RVUGxNiiogkS9DItABitH3SzO8k4GCQwFgkAhAVUCjPoRnwlhVPSUJXFCRILptPCAnSsJGMlc96NfkdSkkC/qOM2wGDYBxMdDX9Buw+X9vPFvYK+JmUZ4ox/+VTGxgUSIC7OoBCQuojaAD66QK1NrCPMjuJ60oZSXUG+8hAPlZ2ZzMrtfwM6NUs74UsYl2ghjzGcktL1qqODw+i5lol/r0x4DcKPg7GsQU2HLtvUL9vlIl+UDaSQFqQ9zrY7fhs7DxGIsCM2LUCBNWNSETGHsnYf1WH1WYHOiatCyWgj0lDmQyRgVVAAX9MSo4wnAuShnXGgTegMEkMBDT2H4ABw8TX0JxVDCbH+OPC8LOYyzhAF1V2dAz8WQB8f3G2hBJA25/UqK+dtbAN3Q8lgCjlGoAS7FGmdKjkXCkARylK0oPtB0Cqa7kO3DVryQL52qkaqYGk7D5a3Qme7QTVDWI1EzZIvZLrPuX6fRoktgBxAvL59yygXMdw0Gcp1/ULROiBXNMwB9FoDPpZ38mGfNf6oDtQxkuRLMOw9uy1ZMasOJzbP5x2xRVX4Fd+5Vfwl3/5l2BmPPzhD8drXvMa7Nq1a9Pvv/CFL8SHPvQhfO1rX8ONb3xjPPShD8VLXvISHHfccfl7C/d/AG9729tw7rnnHrZjOWrNBaItIQd1owk5cwGZ2Ml5ciU14rp5AYBihOkCWlDYKxxQKleOzDNjwKI8TzRkuCTIvLzeRTcfe6CHgE7mbUJAxXLttyp1l0JdJEorSFsT7R/MmGCZmIyv+X8ElQInSe7qDWxiSUYIREisQF6U4HxkAhODoyQtZDa32lC5QZJJTDp0PKIppQU5dfL3dgVnM2gZAUKX7+n5t71jCk6pN5ifC+iNd+Ke75KKPLBiKgjzKAkbGfRLBfDb1xWwb63tcfV6h6vXBfTr5n0G/Lq2zyBGSiIHOpT2LPKeJldviVSpnWsd2nmW9kyaOGZJZLFPSDGBmDL4Z7WdpYaf+IQm80+aEDa87QgoROjKeBmb+k6e+WbAtv+2gceRRNocDMQo79lxlvtbAfy6mOR6GPkIa22vsrsMoqC1MMUXYl1n1GxJYzGP/QV5203MmK4J5V5sx5VBSJPaVP8rwsBOByDZ+LNEuH5ezuEm64xBbVIu52zB/yXOn0/K1dr1ncQ3hcfoBgmNfVY0oSCMaQJkDdOsyPxorEGrbT1Omhv78raWSBGh2YGk9U+D+mh51+b7RIGKe13TifJNzDWKiYQ5XBmLs++GTk1eL8j1neXTkzI0nV+cf6K+6RjoA4Zgn51LSVigBfDPGMube6ijfjoiTL+j17da+lXXL/u3f/s3nHbaaUe6GUtb2tIOwo4//ni8+93vxl3uchf8yq/8Cvp+OtlnaUs7VPvGN76BU045ZVBOZ2lLA45y0G+tjVhRcMtnPAO2uEgIURatFQvDxgLtkvFbIVQVfI2SFB2YAgXwDPwLFVJfa4F495vx4tCBK6lqFoAVAW0SrLA7EaPRuoApMNqAHPjoY8I8JqxUhD6KBE8bI9baiI2u1+x2XpAbzMF9KnUTDLiDllKLJEF8kZiUejFtL7Vl6sBZYihQYTD1CgZsuijLASYF/EyCE5CAIbAg8Vnkwxy7T7/vpTw92Jf6fpCRbc9T32cWUQbpAqOaRfmeRhDHNf2MfeQlB8Nqg2q2Aq4r1DtnCLMG1azJTCTeeayAFas7QbOdwvKrZrnmGELj6kTyoP5RrnPnjt+AUrjAGpmc0hSrb7PFroF9FmzzmcyZfTgB9rnzZwEq/9mCdNd42+NmeA5UDmqMgiOuPiNG7Fn5WSh1Mmc7JKC2sgOp2lBAaQN1qFHVM/ShyA+ZvKNngwDTGfM8Gsnj7xhomKqS453fm2DZmoRnZmwq2MegzPaYynTO+9OglwfakjQUNNhnYfv5+IMH9Iyhu7+M6gJa0YBt7I9dJDGljiEiieSXbzMHkcGc7UBqZkirO+V8bqwX8H6w0zA8twqcY2VVpXEV7ONK2H2VvNeB0av0W+cYmL52Te+kpywzHRjKuAJD5qP1mch6Wl+kXJtGZKGHgN9UIGuhrliSz9e7ax/0O9rtUY96FL71rW/hwx/+MNq2xeMe9zg86UlPwlvf+tbJ71922WW47LLLcP755+O2t70t/v3f/x1PfvKTcdlll+Ed73jH4LtveMMbcPbZZ+fXxx9//OE8lKPOUjfPiTbZbL629zXIziHkoDuvrIpqggFBLP4VQe5HROxq2qpKttO3NB9mAAy4+ThLeiZj+Ja6paWWp7vWIhCZlcFMOdnDGC+rzQ75HgeR+vT3v4l7IAzEtI3D3nb3XAPTFFxKxAhUAVz23wc7ZFmUxcCoQxTGYkyIYXMW+UolMp/MGiDPn9NQwWKcGOSZPCkKuwfqj1K+q5R7xRjs6+dFot0AlTrJ+da+EV85LvoFDlTJzCHH1O4iVM4zoo3CPFtre+yZd9i93uHqeaeSnnPsWe+wsdGha3v0fUTfRaSYSnLMaBK2WrtZepupyKX35qO2SK2qRljyWG+Pw+2x+YtB/EXzN3Ndb2X6Zd/P+jIp0Jo4gy9lo6MaciofaGDZ2CSRCDA2Fycq474yOduIGAgAZ4YfgMx+3KdypbZeaGNZN+xoAogiusxCFeZ/E6rsx1J/6LJUEXJdJ91/jN7vEllKO3Y5Lsr9aONSFC86xI19g2TBhX6NvbL7VNZTSy9kUMupsRywNqWxlqd8b+/LGvAI8WGTfk5dB1SQ+TFFUKrEJfYgsH+0NmjyKNIMCBUqq23trn8AGfBrY8xrO6DDrGLYWOmDzM99TGgCozZlEj8/5OtXmKddBvxkHMl5MzC6+KCDZKhNgD55Pvws73akirLQJ/7cWO3YZdD0GtnSr7p+2Tvf+U6EEPDsZz/7SDdlaUtb2gFsNpvhr//6r3GrW90Kr3nNa5bynkexXXLJJTjttNPwfd/3fUe6KQNbX1/H/e53P7zpTW/CD/zADxzp5iztOmRHNeg37yI2OlkgkrI3AMsIleeJhosORAWuXAZp0AWPLNCgK9KowYySaZlQFmvoqci1jBeHwCBrNNtYNok0io8+L/CIGU1oJJOTSOQ9Y1JpqgSAUUUD9ICWE9b7mNkkUmaEkNU6odnuE9YbCAgMgEBbHJIt27TGBjg5MEIWjuQXyf5Q+wLoAICX57TX0g0OABxJ8gzlPJ2sZ98XGc9YsrEH3R8YjAq57koNqStm+3Xfz+y9wFJfzIC/pka9cybPFfAzCcL8qIBFruNnTDovF+TGAY0DiQqQynFbVn03zLAfZ+ofAOwDFgG6BemiqTE7BhWBInm72b78tsfbmrApcHB8ngeyu9znYAoAoGqLhFlUQF2DFaFqELgCBwkm9mRB5GF2+ZQNWXEWgC5fVrhnItAxAXY6UMjAvTHQx0Tue9o225eCbIkgLJiYJC6EcmWSMdNS2af8brotcoyjz/aTS+3PZkoyj5Zs/YRkWf/cAaGR4LYFiFcluJZiL6AekAF/2bgEuqmq5XMOQLMic6zWwkTVZHafAX6JBPCzbPMM8sVS08+YEa1e30NQYHjyTQIwg38KyLIyw0n7L9AQAJR+LTXGoMEuHxv2uyIUCeZr06xvDuf2D5f98z//Mz74wQ/iM5/5DO5yl7sAAC644AI86EEPwvnnn49TTz114Te3u93t8M53vjO/Pu200/Bbv/VbePSjH42u61BVxd05/vjjcfLJJx++AzjabVwLazQv++9Ycg9iL4oIVi/PEpvMT1LLgV+UuTCh1BE9EKPZ5AD9tT1mXdi1KHK9Ilne9gl9lQprhYCeCaFqQJELAJaKBCORgX8H0V+W2DNSQQBk7k3ankRFvrBiwqwKyiInqfPcJ7QUUYMG9eiMQV4HZY8zQ+rD0iKr25KKxtKeYx8UGNTys/uz/50Bhgb45SQuQALu7rralL02AvxAXBI1kqvJqqw/k/M08G+ji9g373Odvhi1PnNMWSpyygxkDZrYke/BLrlv4F8AyvCbkoWcvl9aYpnViQNPZNe6RMIFdtyUfzUy77eISoAwDpmhSh0pA4F9KpKxAuTJ9y2xxcue9lHYsVLXj3Q9FcEdUGkiTxtFHYWTsQfV72fOa6VrYv5YsuyoO4c1VLlkVPRQ/L7heJYEzIip9YPMW3FhHTbwhc1/5YlEuE182jHgRykNSxcMDna05ok9KAVYndN81A70I1cfdeDTAyCaI/XiKwUOgq87BQKTnM8JEJTEn+kT6lAUZDpdINrcGIhBoRmAawklycLLeW4G+O3PxoDfJOPPugIjP3Z030nEpY+AQ5aY3Yodrb7V0q+6/tlZZ52Fu971rvjmN7+J888/f6Em6KGalHhI27a9I20xRnz605/GCSecgBvf+MY4/vjjrzfHdk1tfX0dl156KS6++GKcc845OOOMM7a0vX/5l3/BK17xCmxsbGA+n+fH+9///njmM595SIyjtm2xtrY2YBNfX+yDH/wgzjnnHJFOr+sj3ZylHaKddNJJOOOMM/Ce97wHP/qjP3qkm5Pt1re+NQDgDne4A37/938f/+W//Jcj3KKlXVfsqAb9vrM2x0Y9l0AIa/azY7gByKw1qe8F1CyLl8CEQPaoNSsApH6u66xKFoKALta6AvilKCyusWSSN+LJ4I9/7hlbNP4dMbhawQpXSPUMXR3QxoR9rWRFr1Ski3dgbd6jT5IlvZnsoDcp9i5BNoXAJKAUZfHV6+KwTwmzSrJUWyqyVEyCVQaVm8mSUJudKGPsuVp9sH7FCPjT9xfq9zmZT2PsxbbL2dhmXNfgGkiN3EingjeyKw3GjSSauKkGwF81awoTqZmB6hq841igqoXpZ4BfvWMgPyg186phwMGzSD2YZ3JZvTL9HIu0PE+bHotIFZWM+rFsUbIglJcuMvNj0AcG42hcu2DIAMyksk0ajfEUo6m6DcYHhYBka3YfsPGMTqdzTiEgMYPW16Tf5+vlfPRzYYPV68KqDQ3qUKMOrqYmMAhWTNl+1C4HdjCyO2O5x/H2p3aVc9xzO6XOYwTQkUhNSpBUAywRWYrUtpeczNOYtbfZ0mYqtjD+btTaqNB6iaaMG7hCtbJLGdArEjSKndTbaXblsb0wR7oxY0zqVNUyVjXrPHHILOnElexXg5V9hDL9rD5RYXRabS+bB4vUlavRmJl/w+O0GGPNXALszFIvSeW0DAj0dcfy+d5kDDEOb4DoSNvu3bsHr1dWVrCysrKlbV5yySU4/vjjc2AKAO53v/uBmXHppZfiZ3/2Zw9qO1dddRWOPfbYQWAKAJ761KfiCU94Am51q1vhyU9+Mh73uMdNylPdUC3NN5DWWUH5xft0lttu58C+vfl3rN8jvUcQgFSJdJzN1XL9s/gTTLn2ciStHczIiQ7eLNBsMsvt6AIOBIyrY1hiU4wCpLW9MA3neSKPaEIlNa64Ep/OpAOdogN45OuNGYCyKesAAApIQO/vxALoaZZAk0iTQpLWOGTxU5WlE2PILObxMZqvW7HJRWMo8+nbOE4W0qxikR3syjnS+3tmoHnfZEqqPR+zghi5b0pwJ/sJnkGVa4OVWqwm5dnFhHVN5Nsz73D1vMe+tseV6y2uXu9w1docV6932GMsvy7luR0oyW3EhCZIHd3A5NYGyOyoLJMeO/U51P8YAQySDCaARFKZeAAD+XdLGrN6flbnuQA5YQhUgYHUjxKgOAMtenvPEoJeecAY6Ckp+KsyjjESUImsPyrzU6KCf0BMvMD062LCXKU+S31pGzpBlD2CpBmlxEhBxlgkQqN+FSgWxt81AP9MQQCYui+X9kz5W3k8x24xYcxb7IuigD5SLclDg/ILrH7HGPQbNHgi4WGUIDdWqViw2CNFFqDcwHUP8Nm2um7hWKjpQFEl7qtW5pNqR54/iAxAlznDkp5sPrTrZGcj12fg0rdJ8dXAllZWLnFjm7bRpJfTAPAbLy/HbL9NE90m3iMy+d3SH/ut1+eYyNdX227faulXXf/sTne6E0444QS8+tWvxje/+U1cdNFFW/a/AeDDH/4wbn3rW+NWt7rVNrTyyBszo21b/ORP/iS+/e1vo6oqnHTSSbjZzW6GP/3TP8UP/dAPHekmHlbb2NjAK1/5Snz4wx/GpZdeio2NDfzGb/zGlgG/ruvwrW99C5/4xCfw5S9/GQCwY8cOvOpVr8KTnvSkQ77+q6rCIx/5SHz5y1/Gne50p8HfKaecctTOK1deeSX+7u/+Ds997nOPdFOWtkU7/fTTsWPHDvz4j/843vzmN+OhD33oIW+rbVu8/OUvxxlnnIGzzjpr4d56Te0JT3gCHv3oR+O//tf/ive973248MILtwVAn8/n+PKXv4zb3va2W97W0q59O6pBvz3zHrTeyUKBCTNbkI9qaq2kBKqCZFsnrdwXgSZIwMlYLIEJwbIdjSGWIgiacWlBdVvMe8mRCcafLVjG69ayaNVFi+4nBwUMrNEgGfUt6noFVTUDwOhiAhFj3gsDkBoB5uogcp8xJbRcAt8ABsHvzcwHyMCEGCXARmD0nLKEUJ8EhLDs2Mz2C5VIZBEPgZ2xjeQ8F94fAX4eFBr0o4JYmdFXYyFo47+XX4fp18b2q2ZSZ6zU63MMv6oWOc+qAe3YJQBFNctShFnScwz4mRmzzwfVRqy+DAKmUjvSAgo+sGDBjGTZ9sYWHbMNgByESj6o5/tgHND0wYzBKn4R7Bs8t2OUE6PsCeSAFpEEvETyqF0ERafqssQw+DwCoL4HdS0YANcCwKe+RQpzQGsFIdQ5kEnBSZ36R+uL8aVxgADWQjDCj7sDBb/8vnnYnuBYESbraYw+JpmWTO4qQvrWmA6WOsBAdojHcp2DQ0xlSptiS+S5EhAGcJSAdSBCq3yFSIS6moFC1OtfAlapKqzVNNUfY3Caq3zdpFBqo1qA2GSlCjskuRozi4BfG1POerc5S05TckHG6X6pOTnQL+m9BRn8A7Quo7LLbTNWm+xAIOC1ZXEsdXgYtg8AN7vZzQbvv/CFL8SLXvSiLW378ssvX5DMqKoKJ554Ii6//PKD2sZ3v/tdvOQlL8GTnvSkwfsvfvGLcdZZZ2HHjh340Ic+hP/23/4b9uzZg6c//elbavP1yQYAn7/vckDq2kUgEHI/SvN1ZfZ2INbaZZYw1avMp0oD2ywo9clSrkMGyPxFSIvz8gGs1Ed1YJC7EGMS1YI+Ah3sWo0ILEAGM+SeEa0mH+VEnMz+S3GRnn0gS6LsYLKmIVkbjVlnQXBhWHUqS1w7NiNQ5qAM+JGAh1azbpLRZ/tPRSg19b3WnIvl/FgSl5r3CbwPco2MRuy+5FjaySVy6Jy90UWsa43GDWX57ZvL35oy/WKU+nrD3RCISUEEysy2phKQt1bljFzPb+DvyLENxnIImcHHgRER9TnlGtH5nIQi7znu88lH7RdKKftW5jsZ0FVY9UMTCWkBi5EKUOMVOgBGzfK6NlAwEPrEmHcRFdOA6QdAaupFoI2E9V7qEq900t6KA0iZ7kGB7T5RVkgRxt/i+mdwfg7xXljupbTYhwf8cRieEy+5amOSCwCYfRDAAbYYMsk8gK5/myXj5TaMzYByDMfFgEk7YlqTKohQ7DURoS4A2XjzsSQLBCKt754ym5YI6KPIA6dkoCsNHKIIZJAvoXxvCvAbSxCPgb9BdziwD8BIPh1Dlt9U8iGwkFx4jcbENtnR6lst/arrn4UQcNZZZ+Gd73wn9u7di3//93/HD/7gD255uy996Uvxghe8YNtAv29/+9t473vfiyc+8Ynbsr1DsZ/4iZ/A5z//eTziEY/AJZdcgssvvxw7d+7ccl2173znO/jABz6A97///bjb3e6GZz3rWdvUYlmHf/e738VJJ520JaDLEgY+/vGPAwB++Zd/GS95yUu21K6nPvWpeMc73oH/+I//wG1ucxsAwN3udjf86Z/+KU4//fRrtL1/+Id/wJvf/GZcdtll+OY3v4nLLrsM3/jGN7Bv3z585Stfwbve9S7c/OY3x1Oe8hQ8+clPvlYZgB/60Idw73vfG7PZbMvb+shHPoIQAs4888xtaJmwLD/ykY/gR37kR3Cf+9xny9vr+x7/+I//iDvd6U5b3lZKKbPBD3Xs/vVf/zVe+MIXoqoqvP71r9/y3JZSwute9zo86lGPwo4dO7a0LQB4yEMegpe97GV405vehDPPPPOQx2Vd1/ipn/op3P3ud8cd73hHXHzxxVsabw972MNw/PHH48orr8Sxxx6Lth2nxB6a/fqv/zrue9/7bhvo97WvfQ0/8AM/gK9//esL/s41sf/4j//Al7/8ZdzjHvfYcptSSvjEJz6Be9/73lveltkHPvABnHnmmdsyh2zFjmrQb2/bIW60eQEhdQvEapbF/nGzKhe0T9AAQAKCLpJDznKUQEzFBFagIBAGWYQDuU8D+WzRMQI+Biw+Dwi6hcygXopjdOV9tSZ3tw70O0BhA6uzY9AzgxARCOgiIVBUUCAgVhr8dqBf2yfUAFr9HrAIAMYowe0+ASyrOrQUwSwgYxVFmKjX4vEWvAnKyqHomGWaPZ6BHWCQjTvJ9hvV+dtfgImYc3gjv+eAO//aavVJAMctwP1i3H5jGcFaZwzMoNlOyRBW0A9VLXWLOAjYZ1KExlCyzGEXACF/zo3Jp4yonHF+MGDfWOYGZYFMKWBBOtOki/ZjC+w+e2778EGQMXA4lRltQUKT4GEIe8L9JlHUgEzI2dgDSc9+CPgKOChykdTORSqya4UdBsj5amagRthmqW9lTBrbzwPTXi7JS5mNbRxY2OS9SfaH/67t7wB9R3auHOgXtO0hcAG/qACAMcnrlGjABvC1VKbqBiYgMwpJWQMGGI7Bv6TBWSISJqEycqIC/0HrgjIIVZhJbNcFxDZlAbh+GTNTbY7qY9m/SXlaxrmxHseAn9SygdY4LUw/+55IAhbQz7MMxnVQ68AZ9LP5sWapeWo1UANTDn6RAy4C0X6DX9cn+/rXv45jjz02v95flvHznvc8vPzlL9/v9v75n/95y23avXs3HvzgB+O2t73tQpDsN3/zN/PzO93pTti7dy9e+cpXLoNTIxsn5ZC7Zw4Z+DqHV3UJsnMAVVLDj7p5kUYnBjiCiSWhgNT3cEB54ZvQprK4A7JZZgbJNXogIC6hsJ3k9zKXGeOl4lLrmWIBK3OQ3xhyHvwrnbSfHctxA1Jf2lpDJH6Xze2SgCbgzlRiggX7PdMv1/Pz96PR40BeHQ5Q0oxSYyQOfgsMwMLFxjg/anzs5jPo8wQvy5wU8CvAX9uLVP1G12NfK6DfehexNu+xb95hrhKfsYsiPaadY8EEYgJXhKAMP/urA+f6h0Jccz7XfgL33qdkACmkQf3nUFelFvQokcz7UpRknZB8H9mawPsktoaYABQMTOlhQI/cj2OS404gGOAHRKwkYQEK40vqsrWmbDIh8zlXgM9qHwLISZRN4Dzm+ihgfG8gjfn7rMfMw+PzQL4/lmv1vugYmAOWH1FWg8jqHH4Mk523WPI9LVkzpUWZT7/esf3uxzIT2MA+WxtlRRTdPkvtBVZ2rrBv+0UZTGhSg/dxkMAkMq+tsogrDirrqefGlW7wliB+lgF/Y8Bvam6yshKGI9qjMfo8u8+DfUBJepgC8hZkimNJntwvG/Aot4P1rZZ+1Q3bfvqnfxrnnnsuzjzzTNzoRjfa8vZSSnjLW96CXbt2bUPrhMVyl7vcBd/97ndxj3vcA7e73e0OeVtvfetb8cY3vhF3vvOd8dKXvvQagwmnnnoqLr74YjzrWc/ChRdeiHvc4x5bqsm1vr6Od77znfiLv/gLfPSjH922Glpt2+LSSy/FL/3SL2HXrl24+OKLccwxx2xpm2effTbe/e5342Y3uxkuvPDCLYGIRISVlRW84AUvwMMe9jD8v//3//Cxj30Mz3/+8w+JpfQf//Ef+MxnPoNTTz0Vd73rXXHqqafi//yf/4M/+ZM/wdlnn42nPvWpOOeccw5KKvR973sfzjvvPFxyySWHcmgL9qY3vQk//MM/vC3n9uyzz8bHPvYx7Ny5cxtaBuzduxeXX345Hv3oR295W1/96ldx5pln4qSTTsJnPvOZLW/v8Y9/PE444QScfPLJh8xsfOADH4hTTjkFz3/+87eldt773vc+PPWpT8XrX/96/N3f/d2WGaMPechDcNppp+ERj3jEloHoO97xjrjgggtw6qmnbhkcWl1dxWMf+1jc7na325a2mT3rWc/aFiY5IKzX29/+9njEIx6B73znO3jmM5+J+973voe0rRvf+Mb4zne+sy3tevWrX43nPOc5+OQnP7ktICIAXHTRRfjRH/3RSQn1a9OOatBvbd6jDR0aBfuu3pAAcx8TmopRsyx2VqoebV9pEFfYf3VgzGOPigk1k4J9CU0Quc+gK5BgdatsXZGKLOMCCDAAFBblDsfAj4A+c6kJocXoo6uXkllmdSMLwlCDUwRVK9jV7MAGy0IusNRHqTS4XXNAX5VgeF9JtngbKQfEwTRgAYYDzHu6PM3Bd1nME5iVRRV7yYTnoEwzKsAOoI/tKFo3ypIdswZiXwC5CgBqWewqGDS49TsJS9IAZAbx6jqDefZdGi/IfbDSAD8OuW6fyXWm0KD3kkAjKc/EjlE2CsBlKZoJwG8he5hIM/F1c9q2STB0dCybsfzGdiDALx+HB6w2C1RlAK1kSCc7bmN56BgxAJCing8LcmTAdsgSHQSfDfxTpkLUgDOv7pRzXqkUa1VldkHO5B6BoOT7ZCrIYO+Pgqc0OlcD6dXx+bFzU7k2uPbkNpFKhLl+JidBxcyotK5dTJKpH632DaBBP207hpnU3nzGdh8TktYpNBmnPrmpzgV1CAq+aZBa6iIlYZlwGgZzCJocwWCuF9oxFRaPCTZgXAZ5GgSePPhn7JA+DgG/aaZfAftiElZDH9MC6Ac9hgz6RamNVAdGjAr2MWfAbwz+QfuOR8fqA1vXphkj8nBuHwCOPfbYQWBqf/bsZz8bj33sY/f7nVvd6lY4+eSTFxy4rutwxRVXHLBmzNVXX42zzz4bxxxzDN797ncfsGbC3e52N7zkJS/BxsbGtjmz1wvz9+NK7onggLS+F2ljXWQR7fNGFygxChOw78U3mAHUC7NP5jUCRUalzCiJOQu4F5Tdx0Qqh1tAdC+Py0RglgQEkwLM76vfNsXCYNLvx4TWkiZSQoji+1W5Zh6pX1hJ8ILlnp0Z+QYGuHvDgF0/ToSJEaZzHdjqJMtcmZg0kYHQc0JM5mMVeefxoRBZjTpk3zXLVg6C4cm1Txk7/pzao2cdefPAXwiZcZ9QAGCR3g5Dv2d8/BOAXxeBTpM45jFhoyt1/ATs6x3Lr8u1/Ox+RyTMvuAS/UKQBJlQMVabgNUmiLSnSnwGS4BxjE2pDz0Ct0c1nmNTIc47iAKkvt+IrCfX9YJyROr7XH8ueV/BfMLRfR9WH5fEj4+wxJzFMZzlEfUMEpDBPwHnUhYpCX1C1LIAAAbXioF8BvwFlfoMev8DgLVWygbUgUAUQCRrDOg9P9eC0/O7P7zb2u4UyPM90ZQDfNJNUECbaARouzEFUiCvqkGx+MoefCQD/EYsvxRqUSZQWc9Bsp6eJwH4bKMTfjpGbQLK2sLXFXRri8EjUHxHL21vj+pTWp1ISWSrB/OO9WtupetfA+WiJj7Jn5zzJpHWNVXAry8+o/xmGujz2/UJYtYa1jYQqXSx80uZzL8ypYRRUtpgjazPVVq4JEO6ddOUdPy1ZNc132rpV92w7TGPecy2yh0SEW5yk5ts2/Y+85nP4MQTT8Q3vvENnHvuufj0pz99yCyb2WyGD3/4w/i+7/u+Qz7mpmnwe7/3ezjjjDPwoAc9CCeddNIhbcfa85SnPAVPecpTcNVVVx00Y/ZA9tnPfhYPetCDcPXVVwMA/vM//3PLoN/tb397nH/++bjLXe5ySHX2xvbqV786Pz/11FO3xIy53/3uh/vd736D99773vfiec97Xq5RdrB24xvfGJ/61Ke2zB4ye8tb3rLlbZjt3LkTd7/73bdte3e+851x5zvfecvb6boOL33pS/HVr34VX/3qV/Gtb30Lp5xyyiFv7zvf+Q5OPfVUvPzlL0ff93jc4x6HG9/4xoe0rTvc4Q74y7/8yy3X4Iwx4jd+4zfQti2++MUv4v3vfz8e/OAHb2mbd7/73bf1fD7hCU/Ytm391m/91rawGb1tJ2h1/PHH413vehd+4Rd+Ad/61rfw6U9/Gp///OcP+d7zIz/yI1tu06WXXopXv/rVeMYznrFtwDwAvO1tb9u2bW3FjmrQb94lJA0EBCbMu5jrVOxogmb5SgCoZsIsJQ3aJtR9wo4mwAeZLRNR0CSCfApZXO4vffxAq11gKFMyBgyUvWRZnmZJfyesOTlVqZ8riDJHxU3+Xqu8L04Aa40cqTslTD2T/+Ekj4NaKCO/KZDI+/DIoco4CGwxmFRqSIKBnu036BMOUscCkAXwQcpEDYA/lRAqC2q34HALa6qcNKeCQKjq/J5tdyELd8z202AWuEIkFtlODlKfxgcKiIscoT9uvxCNw/NOKZb6ff69cuAD4E9Oish4ZrDS2BV2bjxLbPzeVN9uFfAbgX2DNhuso3JhKXYgDfBSquS1jhnJYg7DPz3PKS72YQJAnV0bIQeiIyCgbt3mvqFmJv1Hc2kLUNgm+zEaB3TctQpgKD/btSWYOiFBCxd0MiCaaycBm5l9lcSVOGjfaP1QTR4QNm0EtPYUB1am3yJDb5w9bWbXLaVyLfdxKKuXvzvO5LZrnwQA9HNmkeRLA5YhALBG+Xw7/LwyAAPd8+jaaICfMfySAnnGVrQASe++P2bzGeBnQef8vgKHZj0SIicF8qReIKAMnV6OhxMBSFJHiW2nJZA1PrYjAfhdV+3GN77xQTn+97jHPXDllVfic5/7XF7Q/O3f/i1ijLjb3e626e92796NBz7wgVhZWcF73/veg8qW+8IXvoATTjhhGZiaMgugu8VWaluk+XoOTg+YNNyXBJX5ut5/db6PHRCrIvOpiQ1MrAkDMn/ElIovtolJPWIBCvuUnKxn+dxs/HlUvwVIMrU75pHcz9xcqolfSlgR4I+SzNnGjEsjOe2x/+PMAt02KyakDFKSBuKZKCsZ+Hk4JZdwYcDfKIg+SEg5gKWoMtte6tPa779HKnFqQB+Qnw98jM3uq8SD+mF9KoykpPOvyeVFZae1mrCxof59H9PUZkFucg3K8muCMf1YWJDuO0w08n0masFNHUKu/cwDUHBTM9DGuakLsuvZh9JHlD7Km/GJN0Tqvws5C4CWv5Qx0EOAa4ARGQAiagXGa01uBCT5L7CMNZP6tDVUrgun54CVgRmrIrPNCaIukGQshCmf19mUf2EWmBD74fWZJVjzcU//VvxS9RVRgGj/3Nf0G/iuDuBLHIY+vJ0HYHLdt8A60/0ljuLPDg6eXVvc85GNVVA8EC2+bl8Aa11LGoh2KBaT9KvVaxZmsSWQlXFnvpXNQ1NA32C7dthJEhlynVY1IgUE4QC/cXLkOEF2IilyEoC9AdvSr7ph23W9vtk973lP/MM//AO+8IUv4E1vehNe9rKX4cUvfvEhbev+978/6rrGz/zMz2y5XY95zGO2vA1vxx133LYxau5xj3vgRS96EZ797GcDEFbMdjDN7nnPe255G1O2VUBmyg71HN/pTnfC6uoqPvnJT+JBD3rQQSel3pCtqir84R/+IX7oh34I//2//3e8//3vxy/90i8d8vb27duHP/uzP8vSuZ/61Kfw0z/904e8ve0YX29/+9vxxS9+Eb/4i7+I8847Dze96U23vM3ttu2cy7cb8DscRkTYt28fAAGKf/7nfx4f/ehHD5hUdLjsmGOOwVe+8pUt11S8rtpRfVR9TKCYsDaXSWXfvMsBgn1zzjI2q01A2yfsqANWKsZKxahZaofMgrxuoyyKU5BFT8WSOW1BH6Q4TDcHChhidhCLj4EkiRaiz7IuxnzTxZ4PP3EI8jtb4BIh1JAahNBsWc2gjkwIUQJbHaUsFVUH6bM2RsRYgt5jy2wX1looLmhSFn+a0ZlE4hNBAyihEjYXV0AVQbFG6uYlWxUAccxg3mB6m8g8Gtfj087Qzwrok0GVuhHwx2Q4ibTOHmdWngG5sq1RgEs/yyBXZosp4OffQwEeyvm1PuThgtZnqo4WswuA3yjwkM2u1qlxZoGjEYtsXH9vM/bp5PbGgJ+TRZoM9Nl+8gKehPln29AU9CwJRSyMPAhgl1vhwczOXQdR2puy7CdAndQFpI31wuhsZmU8hDBgffrzPQ7AFClVbclYWtUHY0zaLksxFTkmz14lvXFRM8NAOtYAaWOUKhDoZUiTY/qRskjJjUcmRmXnydt4ngLga9f0VCQ0RbJLgohjKT0TdfMB20V/ZPSbTYJ7HgAjDAPX9pxBkwE+n2leGImFrZjnsjjMvh5nYtt8Nwb8xnOgBan6mACdO6UPSmAarAHYSFPTljvuIQh6bVrECLg9DNs/XHab29wGZ599Np74xCfiD/7gD9C2LZ72tKfh3HPPzZlm3/zmN3Hf+94XF110Ec444wzs3r0bD3jAA7C2toY3v/nN2L17N3bv3g1AgmIhBPzlX/4lvv3tb+Pud797zhh+6Utfiuc85zmH8WiOUnNJF8bkS/N1pPk6+j1XY371GgCA6wrN8ZJFSVWN1Lagbg5mmadIpTLzfTA2QC/SnynUqJod6EnmoEQAJUKKaQAUZNANWscsQth+ABAluWt/AfCiaqBzhJsjjak7JwEfagaayKg4IVVcJD+D3gNjp+w9GjLCvY3uk5RiYb5DgJJACthAHlMS+fSYElKw9wrrxnqAtM1EwvQLBFSEwkb0DJjNguIK5hrwN2mDBCK4Saxy/lPxiSSRZQwYFkCrz+zsUs+vjcBca/itaz2/VoGmuQP8vByrZ/gZcQ4AqjqAA6Op5C9khp+c38FietwnMbr7txv3gcF1hcQRKcQM9nFTgetqAAKW7Yg/T7GX825+Vig+k40NA5uSAqP+HjU1dedavTnrpqh0sN6nRPI1oO31u469t6Gy1/bag6nW161eeBvKBtwIEXXoQVWQWnAQhqax8MbgpR2jN4b6GrB7vfgBxuzN1zIUwHag9iQ45HxSqpMkFZoKhENas99XmYRnVZipKsefglOFyP3MZX+9u47i4jVlihxUoYz/LM25mEwofeXmjlFdc+9j+iTJZMDffiwQEDVpc7Obv8wnyi6OEKZoFP/LZOLHIJ8d7di/28y/kHkcA1UHmfc0SQEe2JtIPLQ10qDkgbL8+ra8jp0kw13LdrT6Vku/amlH0u54xzvijne8I9q2zUlM19SOOeYY3P/+98fZZ599GFp43bJf/dVfxcc//nG85z3vwZVXXnmkm3NUWEoJH/3oR/HDP/zD+J3f+R185CMfwR//8R8f6WYdFUZEePazn43TTz8d73jHO7YE+t385jfHxz/+cZx11ln413/9V1xyySVbAv22am3b4uMf/zg+//nP4453vOMRa8fShnbWWWfhn/7pn/D2t78db3vb2/DJT34Sz3/+83H++ecfkfZsV63C66od1aBfU8nipleWn5kHqWzhajX+AGNeSC51IMEiKg6ZPVIkTaxuXQEpEkVhMk2BHqPFoAFIufZLhIpkooAeJptUNQKO1U1ZBCpYQVU9qDchAR0BjVKKCMxATOh1oUxICiBIqMyYPX1KIBACB2W7TEuUWECgDlqbTwP10wH55LJmJQBAoULqKz3uDlQ10hftXI6de3kv13NzUjjwWbq8+fu2gDbZTgN2mhUNpjSIys5LVbM5aDU2BwZO1RrLQF8cCmLZ1jaT8hvUjpkIHPh95/369lgQYtD5ceG3g+PbX2bMOJvWf+RO9LgPxnXnJs3jihhdL6GwJpIGQG180Ipmj+bgjW7DgL8p5l83L6xAN56Su27yYxgyPNO4/WOwrx+Bed28gH0jkH4zpl+aC9DIFsCpGpG/M1aigpPJwEtlqiZjJNh5JwZYmSVTzNL9mYKExNKGUEk/91RA6vxVIiCV+i6ecZcwDPpYwNAH1H03jo3ZQD4JwrIGLO2RqMRFfTrAuIbMZsEWX8+Ldb7NcAEL6UIYPhEcodKcms+fNqvpR1rLzx45v7bECDuekr1eatYs7dDtLW95C572tKfhvve9L5gZD3/4w/G7v/u7+fO2bfGlL30Ja2sCPn3+85/HpZdeCgALcjBf+cpXcItb3AJ1XePCCy/Er/7qryKlhFvf+tZ41atehSc+8YnX3oEdBSb3VE3IyAxsnRe7Ofq2Q7cuUuShj+jX1orctrJd0nwdkQNCM5PAvFmM5Z5MBMQOgSvxt2Kp7+fnmvEtNTBlKczxNTi2BAHybb4SJlMafUNkPeugcITOC60C/JQk05SINakJAKostUejq33TWrrG3rauonJ8MucX37MAfvIlm/cMBGEyWXoMEooOiv3iwQizKb/DP5/wNyYVAA7inmRJYwlFjtkkmY31ZxZUwtSek/sLVQHzWKU9m4q13iGN9plym2mqjbFHnEqYyYesgJ9j+A3Yftl/GCVW2dOxD8Wlv8Yg31TyjMdxpoKlAQLEGcOqCSzjvJJxVEfZT80JQESfeLBuyofhEmPsL0ZkVr0x/TIwRCjJSgcYd8ZiFcBQDsau5dyneqz2Z68XalrrdZSIgKoCJVUU8T6YqXb45DXzn1SS34Ouvq8nr+DJayAO9z8yD/Ql/zut5zdYA0122mj9M/4Y5kuRAqhAiISgaLD5LsZ4NT8nq7Xo7GVJX2OwrygujLrCjdhx31kSgy0aDSC2c4pooJ2bq/TRyxIvAH4+aVbXLvur/760RVv6VUs70rZVFsfLXvayGwR7i4jwhje8AT/2Yz+2BP0O0ogIF198Mf7+7/8eAPBjP/ZjR7hFR5/9zM/8zLZIJd7kJjfBxz72MdzvfvfbtvqKh2rMvOU6lks7PHbKKafgGc94Bp7xjGfgK1/5Cv78z/8c//RP/3S9B+COhB3VoN+sDghNyMCeBXWsWH3gsqjdO+8AVLnQeEwAKkboE5gSVito9qMCfbawhQRdTHIPUYMzltnsjPya1KUgJ10ME7Q2oGWLs7LiamHrmYSlB8GMGTQsQO+ySVlqzBBZHUKRiUqQR2jtjT6m/B2r7SHHt2h2VJQX3SL/Yot1oAQGgi5kKS+iq3LsxBI47FTWpm5yvRPJhLaAyVCqc8DgMxqNSXNOsKNKBm+Va3X4enupnuV+6x14kEaRFbsZDCRsdP0ZLVMfi4tfH4QLCnMRgIUl+kRAjsZBIWARnJyqyZcmAkxAAeKmsq/9b8fyOblBLAtpKq8tQJIs6BCqzbcNAIFLPRRiiLynSn3aNqOTeGsADqFghSFIzY4R4y9LJ/lrJPZFVkl/Y0AgtDZjBv6AMr42seS2ezAAX65rpSzAMUvAgMbYCcAnNSIVhFRGYjL2H7MwB00uz6RqFezL0qQd5yDboEahG7cD2VVmqV3DfU4eCKGRa5cgsnoaLLTAz1iSy5gaHuCzoWfsBAukZxad+32gwjgwVk4dCLNKeHUSpE1IvVyHaYRGWiBqMybh2KwmK6LuOwEtDIjjHHCcYjpbgogH93ybx8dRMQ3mSjYmg2MxHgnbTBZvO7d/OO3EE0/EW9/61k0/v8UtbjGYq+9zn/sszOljO/vss28QGcJbNmYB/izgHIKb9yJi26FX0C/OO1BgxD6iPgbZj0l6744rM2E9VU6iLsUSfO/mqJoKUZFyY3VlgCODcGoOILEEpfE1WL6acuKCSQkCwznKpIDrSKj7hJWKATBSknrJgF7D0aQ0FeyjwlJJo8D1psCX/54D/0K+X2vAXN/PwXOUXBqfYFSz+B7UzXOtKy+PN7YsgegYfpZk4ts9qE+4mTmAz+5ZB/qdgVsRxtS2IeUkmA2cdfNLYEJTMfqYMO8jQhCfNw7kPQX0K/5/+Uy2pbUevW8DlHs7gNRH+Rv5VRR4kQE4Ie9pyh2kvkNugffL7N7t/Pmkx51G4N/YJhPKMkiuoLFKxvZJxjJHq6co32tjBMBoKeU+MnlPb1luNVq9XMJKsOTIVJQ+VF40H98mwJ8lENpxkN0bUwGi7Hgsccb86vFhmypCApBLH6QIYd+OFBzG0p7mzxJlxl8el259GPTcTN6+B8lYun9g4LPm/fvfLHy2H5B0SpVitI182cL8E0ZMPWo9/7IZyozghTw3ADzy1cZAX9pkRPrbfxr9nknXQT5xQ88nORBvcg0yBvx0PjPmn6nkIMWFepzXlh3NvtXSr1ra0W63v/3tj3QTrjU74YQT8Pa3vx3/9m//dqSbctTY85//fLz+9a/Ht7/9bZxwwglHujlHpZ122mnbsp2TTz4ZF198MR72sIeh67ojJpu4HTUsl3b47Za3vCWe97znHelmXG/tqAb9jp/ViI1cyPvmfV602oK/cot/kfvBgMExC4w6UAauosuw7pNI2FjtjIorhKqAWuYCkwNf0sSC18s/ZYnDFIE+IKUaFBoB/qqVEhDLP+YM9g3qyRmYZUEz+zrJItyyPk06x8A5k8QLaXFhZ+bX/VmGxwWvfQ2ZgZyWgQsWEAsK0gGgGYCuE9BDgRKaWqg5sGMK2MuAp0pKResTk+phZUxyhZ4qDXQkdH3SeiS9BJmchI0/1mtqJkdo/R1YAktBmZb7zXrfLBs/B4Q0iMbVILAw2N7+mH9T7/t9jiRGF9oweu0ByDTVltG+0uBjY0RQHuMFuA45ACasVmG8MgepdVnVwHwd1LWIGxpM7uZlfxmUU/APbQaMM3uOg2P68abBmQIkbgLwuedxLmBf33ZyvC4gmKLIgAElOMh1lVm8qVNpT2uH1fJkVqAyLAKAyrZhC9Bq/202bDMoaONH2TWERvo/dmCq8viV32iTIDJcm5kxZXzQ3Nga/rl0ZQk8jRl0syTHUDMLQyHqdZT50MWmgg72DWH1yWQlZfYIMSbUAHonG1YAy+nj8sAkW5CMCqMvaFAtB+rZfuey2VHmS9+nS1va0WTUzMDHHF/ecCw/SXCQOTLPfzEizFRqvK7ATSvBX0BYfV0LNAr+NR0oNkhR50C9r3CYDa7NIvsLQOvfDX2V4u9l32ScG6P3nN7VIY0Rea5qY3RgiLCg+iQASAJQKRDFpAcDyow/xE7uJTnJpSR6DXQnrU/TyEecuO/mZKGcAMR67KOv2u+jY8HEPgfDx9tNFLMMobzlAD8HigykvAFsylgctA/D3x4k089sSm2CSXz2jS6iUSlPA/1WY8A+KFDY6dzLhKYOWdrTfmPbt3tVn5CT06SmpKtHaW3T+78BgMDw/j44Dv1O33bgbg6TY0ztXEBz7195P17bUGrVJlUWSRlUG3Q3TfurA0hKE3gAAkVZC6ACQiy+jiT9BbBmKMr6QIBUYMKvz31YmH8hAX0iTYwUXy8BRRLTr0nsz9A9FPCHoesSFMCRUK5jViArA1vmWxpTc2otZq/HjDznVy/I1aMAfp6dNmnEMCRrUz94f2zHzGbTfoqdJjY24jdWkLnWanibOkVeC3FJlrAmqQ9XKbiHSs6vjaGiTlCkbnNzEvIadwz0eVn3/dnCxwkLUKYlQgWCzJMG+MV+ej60flI24GBucyUSliy/pS1tadd3u+td73qDAjq3ascccwzOO+88PPGJT1yCftcBO+mkk/Ce97wHe/fu3baal0tb2tKuuR3VoN9KxYiB0Y+OYpzpa7U9ambUobA42C1++phAo8VQShL8Zl0RJpb6fh6cy/JOjMxsGkr6yHcJbpFqWd4pIuniJ4M6C6AflSABCWPHAgd5ITuxKJNgmcnWSdOCZsxbRjBIFrlhYo2bJZOobEsCa8gyLWYxFVabsSDBjJQCkFTms4LU94saaBkv1hy44YG+6ECvhb6wIApXuS5HB5HpmXcx142ZawZ1G4tM4FT2quUTT2VTW58CJdM+sAEPArAiEoiFUWn9uyXzgQmf6bsZ8DdgEKbhe56B0HflvU2lwKayo11gz7blbSRHmaDBDWXIAkBiSJ2/2AnQRzHLfFqXcQiIXmrT6ve5rF6T2hyYG1O++y2YYnKyIqm7WRBvEzafY/KlGBFHjIBxUDD1MUt/ybQQAcy120Kuc5lYma+VtDe189xGYzAKaGlsRpO43X/WUpYIBRQoD8P6QhRdbR0HoJHW6iSAVP7SB+I9ww/AIHDe9jHLgnWOUQNoogElBdMSWiCzrIEo8srQmjSQ7PODkWHI0mDsoldIWdqK0/63keshUal7VNtvqUgGDhgKGDL4DgT0MQib8AUOq6WRTN7h2P7Srp9GzSzX8ZM5EJnlZ6yoPAf2EXpHQb8+R+ojKgBczZHauiRpGNAEACt6H+kDEBS0CuXeerDmZXUtsOwt6o2FR6BJTk7oi8R5FDofOBLaXny23m5fGeBIKguMgURkTnKJ7j57kEbuHp2TYUaMQfLbHCTuGPDXYzNfwLaTEItygn3NJSflesX+fWCRHjRxfGnsl2zBvN9eqWJH4DRg+/Uxodf7iDSRFvz+yvv3qbAIExWgcyz5fbCW+ojIEQEF+MvXx1RSm/OZcm1dBcwM8BtPp2Oyz9R9Zfx+gMhuJnVAewjrStY/CZGlnnlkQgyEPhHaHmhGaLn4+MMdZpcMZX0kih9O2WI/TD8mAe5NVUDAqrQgGz55vBPbFP/Gravcvmn0/bHfmutPEruSDjoPpOIXjLcz2K9ubxEY13nOH9Toek6x02MUQF46SADjwTyZVU+kzVNMWs+arPNijgcJVyLxiUlAd5z86dnR8nrzpYwdoVfZOaA5ac6F9cf4/I0YywNllAkp1WvLlr7V0pa2tGvTZrPZkW7CUWWPe9zj8Hu/93tL0O86YsvzsLSlHXk7qkG/E2Y1urpGHXrs6BlrlSyWApcgbpZlY8oMk5WKM2MNKJnACVLYnJEABua9LKTmfcpgV8X2yCqXwrk+WWbxjYySkwvyixb3elxXDXCLSQPRRtmptjDbLGhg2e9JF7FT9WGSr6Xh1ms5gJ1fe/ks618f7Fa2FnVIDCA0RY5QF+M0AkzLjgsrKREjhhqZURaaAno5wM9LdXYWBOqANvaF3WcARBRAou0LQBHHERUgg8Bjho+8RwsgQIgy1gJJ3Yyg0qqIUmeoTwISI6iM6xhEm+qL0phB30AzfA+YTe/A6Mw+iF0OCorUJouu1mamAJ89twDgWF52gSFKi9BGlrZlQKhYBnqrRK4B3ib3w5W2MYBXdyLu24s0X0dq50jVXgmoda0Gk9sSUB5JfkpXONBsrsEUlgD2lMRnBvrgQEVl9aUY0c87ZQGIvJ3sapMAV2BQLzWATMaWI0tQ2rEPfYDHal1mWVEP/jkG45R5uTZwQDLGLLGA8CnlBAULFrO7fpkk2EP2nkq8xqTSegsByeSy//X6ikUOLKah/FELYfiFBKwjolZJMI5ynXUxoVJJqgBSxt408CdtRS7ZZwHKSFJvaByiMjBvfP0aQ1e+U/rCkhzMDoatt1msywfPlra0o8X4mBNAu46TebnrgPW9SMyIG/sQ2y7PgXHeCctJnxMzqlVh8DVNLXX99l4NqmuV+KxBXYtQNwDmCvzNAa4Qksz/bUzZP4skLD0GtO5XuZgG92L1T3yyUkzI89g4ccHXketVxrAOnAHAWhlPJjUaol3jRbIujO7Bcm+Ow4D1yBZAhFH9t/1ONWNAxQLnQGG/WJ0s+z4gfpmxoCYYUN6nyKw/L+G9WZLPVPv00XxXSzQxCXqTVR4AVXbOmFApECXgBWO1CehUerKPCfMuZmDPXputNgFB/ftGZT7t3Bq4KwoeQFM1SKFGCiqt7aRsc/dqUs9Y7jMC4D4iohOZ2yD3eUnYYWHo659np8F8QWX4lSS0NPDn7bWZ3YuM0zYG+xbuPQRJQKOEOpXEl1nirPhhbHYmQsuxMPNTyvLW9h1vuY0GDEHkZzNIPOHP2v1aEolku5ESAmspBWjSjvu+jTRyz+WNkoBmNdJBAUAYqqroo//dQrKkY/lNgixTvjkzAFbfcvH68b+1a82uPbg1oLEiM6BmSV/2GkAue8ABvLJaFE3c9QX9fiCR+p1VAYEiQl8yMX0tZZNBNoujeRGw5IZFiUkP6C3AnKNxYucwq6AYw1DlORcAv6m+9mvjTZMTl7a0pS1taUsbWggBr3rVq3DVVVcd6aYsbWlLW9p1wo5q0G8lMJpaFlqtgnpmXkrOS8qNpdvsu2YJCb3Kw0UDuXJmqizJiFKW2ymZ5lzkbbxZBirtf8GysOT0gRYP8sGtHVOpCDEOFljQC3DBAbdUMxBwvHqzPhkHFBbAP3jAb6LdXOUFbF6AT2WxO1BpQarTMfg8yCe1T1IOmvTJ5DvLe60GetoYsdHFBWBiSlLKjw07VhtTNl5MDlbeT6YnqGxQlVQNBFtzJz1OD7wYSEwRRSLI+sct6Ac180bjwY+JwfkzWUloNjE5Fl3SQIOCr/49v58xyJfPA5XM6HHvWTvGDAgz+/4C+JccGy2PHWM1dgL8aX2pCAjgN18vAJirg5ni4n5Lx1yDbH5fL7DvC7MvRkQF/jzLxZtn9w1a4+uWx3hw4N0mn+/vsyzbtkkNmPzdifclzCr19KYkMIesuvK+AehWkwkoASM/Pi0zugIpI1AZN5wGCQmDdo6AP5t3ddRIwN8mobiYyW7zszyX35X6pJIM4hnMBgZO2cHKXdmxJiCzrI+E+TpOh2v7S7t+WqpXkUKTA9J5vhrNhX3baQJEBeKI2LboA6PagVJvKTP91vVRpOuypm9OTOkQqirXo7JrMUvQJSzUGgU2v16nLLP6UvEDci25lAbzx5jJ4RlABhZM+nxTZiDB+D1g0mectKkEMc/Wt98Zs94V1Mp+yECFovhqAArgxyMg42BBv/1YZpDD+5LOz2SSJCpL0NOTGhNrjUVkyc7M9BvVoTOgTx7Nb6Msl2nnuE9pAMBRVaPIfDJodJ6m2Pyx7cCo0M9bEIviSGw7hNCKtGcsqgTWd1ZDzhQo+jgE++R4N59UpxJRxv45UAA2Tip3GxNi9llj9mn7lLQmorwv+xefXmQ+i5z/lHn/MyZofeDpZDbbi9TiK2y/oKA+jZIPF5J9FFwbJM6VZssuSfdLCgDatTket5skzdmmNvW+/O/CSAp3DMQ5oCqrwZCBf1AB8+E1lmU+c4NiSRBzjNyptudkpbzmLUy0qWSncd3T/Dz5mqoeiKV8vfkzMzU0zM8iBfv2mzTlz9F+gL/Jn5ok7yEwdbdqS99qaUtb2tKu23bWWWdh9+7dR7oZA/vABz6Ac84550g3Y2lLW9oN0I5q0G+1ZqAOkq06CvgXkIaViTUdsLC3bVFDUMYI6SISlm0qwWcB+so+IpBrAhr4521yEayADYAFAGVYHN2C6CUjeMo2Wx9MZgA7Cz6IkH8zAv3ycSxmck6t5VKoQH0nNdQMOOJuIVNzEHCyLFwDl4IsfFsF8vouoU9xAPBFyGNyjKOYkGuNrXdRgT8D/QQQ7DTje6ovM7uP4GqQ9UPGaBTwuO0TVipGYul/hgQmicjSl6WdRGj0eCQzX8HQIGBwluUaA6BTwJll7jvgzcsS5XOZGUsVAkP3w0h9JzXdtBaRxEhiGejAJuArZ+A1brLYzbhLcoGoCdAyMw0cI8Ikbkn3kfsj9kg8l4BcO5egkdW+62pQ1SLNJYCcujnIpC+npGN9//jXU1KzQGH89QXgM8Cvn3eOBVB+SyHA1/OLQA4+TpkH6eS5++5kzcFR4NiCxuN95HqCYVATNIO4U4CfBtVJr2siBbCBAasu70IZA1MWkwP/TN5T51fBe8tjz3Jd1imBowTdLUBoE8xmUkd+fktpKOfmg6Imx2m1SK1mUCBhbgcu1z2NtluOY3g84/l4PIdbgHazPlra0q7LllZ2IFUrAPQy1DkldfM8JxrgF+dd/l3fduBaMhxSH0FdKyAfMKhPlbo5qFIGWuzlvtB3AM1RcYMQUw5kB81gGjP2LIg9aLcmCUzdF71ln8EDfnq/7pMxmVPeTkwGQtmdc2QuaJ1G4Fq2KUnpCabLgjT31Pf38/v8ETkZQqcAMGizPg4SrxyQMQAzpu4d+2PqjL7D6pPbXGzgT20yk2RgH6OOcteJgTKDT0CHsh9TeDDzLD8DAM0K0KZJY4EF1OYNYflVUmeXlbVHIUgCj+qHblbfj4KWGAiMbn0OYpbEpK6Vmmyetac+VdtLu7u4yOrzZol7lO+Di+MdWByNdg+LKPfywOIn14ExC6kk/QGoE7Axup8XJqC9XnQzzCImgDK/7tHn2adQMBFsyUWkcqR63O74bftjS6R32IUinnqOTBaSXPKTV8rQ90za84CwvfO9fWLjgB2rYHlWdhnJWCZj45LMdQkxr5HU/QLIgWoDtYdKlVAW1wdsQCos2TUpqDdayzmwzyeFAsj9IM/liffd4ighwm/Xm51bmbcLyBhsvegZj75vNwP+iIHkZPsN+AWyXPH+kuSWtrSlLW1pN1w79thjj3QTsl122WV44QtfuAT9lra0pR0RO6pBv10No16tM9BjsjWe8ecDQyZb4j8Dhu/5LEfLkGS3cAHKAjGR1KhIlmXpt01lcSXGhSli+8Aww3cq69eDg9486DYG6vx+9/udUXv9b6YyiL0thL7c4j6FCsQKbMYIpDr/RjbqssldfZMuqvzSPEqAxoF8fSqsPuujBAngtDFKbfiUBgCfMfs6Zfy1DvCbUmX0srCW7WxSRxnsiwmzwFn2KAZCrSF+zmBxXsIXqdDQyLH28yxj6bOAB+bZfC6g4Bf7duw2hvxYIds3kwQCuEKoKgEaVV4HfacSoGkRkCZClpRVeVUJmmHALh2c/lTGfGmHmAfCB3WQMjOhKqAwB31Umc8uaFZ+I0HirkNqZsL2iz3SxroEorXuHkZA3NgGNY0simVgYa7np5K9HcCNBJD6cc0+3c9A3rOPYD/3jAJSxFZDiCWYUzU5EE4qL5Yf66aAd5sFNcbgpa+JqSBf0hqgqWpyPVBjz44D4wbkGdvPGDZW2y9CgoboIyIBkYRBwBphZCZEF5W3a60AhBJpiso+MIafya8BEYGFHytbWZyzSluRA/FMaWGi8nX35BoQaSsmAQdrZfRWxuyNHajvFgJSrPNUyPMULc7TGM7Zwl7Qa2L6zF0rFjdJbtjO7S/t+mmp2oFUr5R5WYEMkR6OA3lPS3QItTLG7DvD4p8CgrjX6DpN7uhA3TzfB8KKsP26iCzvCZTEBGDxutI8ggVZ4IgS0E66W5tz7P0xMG9+XpiaeJzlILZ/nOzMg+f6blqLd2o7k0CfA0NM5jAqLJMm4JkMYJi/UW3qf0yxw6XNLpA/JV8aOEMQBgb0SZPlyJLzCLPEeU7JEvzqZ1nSiK/3N+/iAPQrTL8wqO0nzShrBGPpBE2G4aoRtl9dI8xW0KtM7Zj1Nx7TaXSviLMGHc/BO+ZA18qf72fmDD52EVlS0075FMt8kICyyfvlPXe8qbDpwAmpR2bQrlQMjtIHkjCZMgjkgR6pA2c+MO33ejBgfLI++KC9tlAS56Licvzmi0wpiCRAAbIwXLdtAowTLY5HW2/4en7jOxhjum8Lw6/Jr5MvQzDyzQfsW0tq07nO1D6yvGfisp5KEagN/Cpy+paEB/XdDLBkoJRxYMAEQ4MD9UoS3uKxjteVvVNBsRwzW1fnrnDjdFwH3drChKysIyCgO6fMSJEgjMwR4Edl3rIZfCF5wa8hCEj7Saw7XLb0rZZ2fbSrrroKxx133JFuxtKWdr20z33uc/jMZz6Dv//7v8ed7nSnI92cpS1taTcwO6pBv5Ug0j81l4WKybeZMUpQ1gCj/JkujAjIspE5w1upLcxSJyElAJwQEtBH0tpcyaJMoAQM6ywMF/SAAwtTcep9ANmAG8MRojL8pkyC2ZJRacJ49npsXpJTflsWagWYcYG0qSDOiLW16fLfgkUZ0JnYhi5kjTVmUkddLBKdBvT1UY6vi4vnJwN4CVmqy4N9Vl+s7aOwABX4A2zbLrOZCW30tfuAOjH62GdZpD5RqUWWAxycwT6gsJkoETgC4IRWGQQ1c65RmBewKQ4DfTmjmEqfjYJtOSMXi+N6kA0etdaKyjsFYoTQlDp6E4DjmIHZ57FZgFfZtDyxQJPIr2ntpVEgYBxYyJ/lDHDHArRMaX0/GeDZm2xlBw4BUWvnAJBMfQUBc22//chgLrDolO1HzEitXAeJI6iC1v+z7H9eCPT518SM2EeEMGIWBi5gIAcnJVaAOgP8BOwbvn9AM3BQQcTMmuVKAlPBszYdg2M/cQUiLNT2s4g7E4E5gZPUvAxx/5ny4yxxk9Gz65+TgX/KtmbKLAVgmJww2dbRbORl4zKAT0XSeQD4QYJy1M2H16JaiiTjU5m5QVm7BLlGo7YfgnuCQTngqreQpS3tqLM8V4QK6FuRFXZz6oDtpPPjgWyQcBH7zPZDiqDYS5KH+g6Bq8xOEml1ACmpbNy0X2Tym3LbLQkxSf0ou98fjHQaex/JzV05ecp/eQTQDVh+BwP4Heg713B7dg/PbQwGImxyjjyLyb2XWUVe7tN939qT/FtxsY1W149JatoaAGjzdKWy0Ww+kvpagNRFtve8vKfZ2Iezv4pLwl7+7oCljXwfpBDcfVcYftxUoPXNx7SN/9hHsLJe+7YDN9Ug8Sj1vUh7G3gD8/lTro1nxknue5vJ1U69X/z3xQ8NZGW9fkhlNAPL/dxYlXbnijH50noK+Jnk/dCny/1gyS/6vns6AHGIJHku6W+M6ReTJMiwNtaUIkxxYHhA6reQqEJMSnfqe1lSc+L3di4OxoxZlkjlOIEif2+S91rKYLi25Azy5fqD1u7YFXCeuDD+/LpqdC3Cs/yAkrCW92e1EhWAJQwA2PHRRgyTKGx+NL9+ipS8KO1JA19LAEiTT/eKCqP1pR1TcoDe2JIqoZhUceoHoCq5bUwpVyxtaUu75vbGN74R97jHPXDGGWcc6aYsbWnXO/vMZz4DAPjDP/xD/MEf/MERbs3Slra0G5od1aDfjVYDVndUyhArwIdfYBiI1saETrO77TE5YM4YY2ttzDWqrM4IIPKgJpoStKZVhCxYQ2Z0lOD2ZkmxU4AfUAAVD65MgSX5+KgspkxOJSEhOOBP2kqD+nsEXQxaYCaKNFfO1h4DUdbuMQjlM7+tLp3PdCXHbCQDNpOTzoq59l6fjI2noJ8yN60mn9Xj8wCf9ZnvPwP5ACwAfQby+b+xWWa4ZYy3lFAHyYSuWRik0WWeSyAkAh0AiNQnQIgobETSxN3ACV3UjFfUCHWTz5GdU7PJgKL2qQVNvBSPAdaLgQeonFKpo1L2FzBVvSTmDQOe0+e36zPTewU4kAoQgpT0M93Xwl6GVkBoBnEj7QyNBIH7DuhFkhRVIxKlfQuqVoQh0s4lgDxf18B0RNL39IAOsHd3XLEHNbqN+boE7DqRFeVGtueD25ZhbDWuBsekQJ+xBbiuJLhY1aBmBqobUDNTWTEF+1RiDMyFBTjVzhGTUWQ8Sdh7yopMHETOqprJuKlXcqZ7D85znl2P+zs3JvOpZ1hAuUR5+MRRoBYQsHwctjO2X0xyrTMrIB/lWup1rEeV0k0WY6bFzHKzwXUzCnINAlDs6xSZpCcE7IuxyOzGbrB9qyGULFNfg0zG/DNANAGS+KEy0Da/7E8GdWlLu65aWtmJuDIDG1Mlxsym9uw+boTVzwqUeCPmgXQiqjoHq42pTcbcSVHk0yHXZGgqVCwy2lO+1Pi94suVJBS7TxrLr41RVQGM8TWs7WtyhsZqqgMP5OnCyO/ybJ4B0BfHvlXZh08MWwDRLAg+ni+87J1/PrWNQZ9M2GYAogNEcjvHgJ+CG4Om2fdNutAwJL/9GAGW+TOw1ju2ZBES36jWO0xMCayJLqFP2c9qo/pdCuatzXs0FWPelWMw/80kPjcDqvLhE0mij4F9zQxU1ahmDbq6QmhqYfm3HSI6oNV7fZ9AIYJdSYEKQJx3iHWXk44W6u9aolvUdYkBLAp0RRL/PQNfU232m5s+rEmTvDUB2hLE12IibHQxK1iMa12bxOcsMOqg18MosXBynePHqHskYlTEICZQQi45Gez4zWc2EMmxw/oEVAq0EXORz5wyL6npvzOWxyQedOjCFeTWOkAZ+x7sMwUMAAPVBLIko1wXO4I6FJYf3HOf5DZOxDP/w/y50CCFOgPI0k8F4K01YWqQ/DfuHu37lIpk8gFIzYN1bUmkcutKKkmn5l/lesmmpjA13wCOsRn03JX5UPrO+qokS5bfoNRFX9rSlrYlO/744/Gwhz0Mn/3sZ3HyyScf6eYsbWnXK/vsZz8LAHjLW96C888/H7t27TrCLVra0pZ2Q7Kj2lvm+T40aYZGF2G2CMpgEzSwTZpZ61Mb4RbfUWQhBSDSwHPSQIQGnmKMqINkJfZJFVh0sWQMjykQZGy+jZbpa8GpAopJEGszhlRMpLW2pBw8kbBu/A4JfqHmFmmABKMsUGXSM7pInmJ/yTZsoZyQKAKJQUHlvDxDDSXItgj0YaEu37wvrD4D+Ta6OAD8DMwzYM+f4xI4USAwlho9Xva1jykHiHoFgDezeRedPBSjDtLnUSWM5KeyXU6UmUpd1MWzBlcQLdhiclYFwKDR/qdqKcp5c9m/o35NqWROl/fLNg10IzIAwu9xOL4W4owTg3ccPLOgTaISCMrCjH7bB4ooOJBQwB8UVuKI6TcAmJUZIsHGoGyRHlTVGQDMtj/mH6DSc3VhCyqTkEKQGGbXogbQr28Is4Xn6FsN6oaS2U8hKMBXwL7QKOBnIJ8CfBnwW1ktsp7NLLP1DDwvjbTA77A/Uw4MFUZfqkRONoV68JlJm/nrsgDJ4zGJoWwekOv7eZlPZpG4bUm+HGMSgDwlyYoYbTOfksxAlYC8bENmU+YEQOsypRJHZh1YY/DPXzfy2skV72f4SXb9KBg1IaMnElzKCrR7jWbzB3/vgcwHxl7o9xPAPdy2WXLDdm5/addPS8CAFQwAJjNsc1toKmE6BUaYNaAgEp9cV4UdbTLGdT1gLqdeAL8Ue2ETK9svy9+lCAYVyWEF8fx0kv2vVOYVe559Ksfyi9HV8ksuwcWZgUzi68HVydI/uOvZpLLHiSVu/hgDePZ68p6YfajCKlo8MSNmyxj0OxDr5WDkSEcAYAYesJiIVuZYz8KZ2Hb2KwuQKoAAEKLV0VYpyZRyIgkA9EH9ME7oGeg55fqtYeSUjF8PPhv5WINzk4t7Sw01bnQcz1twYMR2uK3UJ0REkNYYTGO9+HythGtM9t7f/eKa3kc849yui6DroIpJEmy0geK2lj0YM55ZHkm3xwCcVzp9f91EdlPalO/SUhoBSRlfqdRR1+36TScAxJWuN1xNaDfm7XoU8IwKgIQhkD2+Tja9asx3Inc8XgUDyJL3PhlOfAAdo56dZ4y/DK6PGH8YXtuDWpsm8ak1tgHvF5GuB7G4FnTPUxJwWa4uGRCkyVxBB0ufROHGprV83eR9oShB2HWc15gO+KMCfhq4Wfpw3ONlh8Z8HPRPqDLQm/vIfLUjAPotfaulXR/tlFNOwTe/+U084hGPwN/+7d+iaZoj3aSlLe16YSklfO5znwMRYWVlBW9729vwxCc+8Ug3a2lLW9oNyI5u0G/jKoS9ZSFhwe/gmC5VNUOXkOXpbCHTp4R5HzM45GvBZYtRF8GMFsJOaXthf8XAJQM8AYxSb2QSMHGrMJP0GUtOjSUUx24/QQFGEvDJFpebhQJsMZblVQzs6zsNWKUCABrDZSzt6ResugAmn/VNLAtiskztElgbAHypyHf6en1dFFbeehcz8LreKUvPZDr1N2OgL/ene52lUfcD9o3loey8+ef5c5ZII5NFHCO4F9BDxooEskKfMjgIrVditVT6HORKg3Hgx4K9nwNiKBmyY3lWG/HG8rN+t/62194sC9l+a7Kp/re2f8DaUKRyB2zRDLAMgW6CSTUNr4ODYTrljG5lJiYCeiIEknqE4FL/CVwh9XNQ7JD6WoLFoQFpEJaik9aayrgHFgK1+TcG+rWy3dS14KoGuhaRA0JVgzfWhcnXdujDXLP/HePA2C+NAH5cVxnw49WdyihoQCuzzILJYJ9lcY/Ys4OM+Yls8ERcavbVs+FzYvTgXJOxd9eRZ4zu9/wAg/p+KWEgexsJWEms1ysARL0WyrUHGNtuWB/IwHM5J7JN1t+2UVjWdl1aoJ9JglU+4G9jFCiBqSkh4gjNdCcNVjMjpTCI600Df8hJD5Sz0MucaMG9qLJXUcFsOvDwX9rSrlMWU/GnACwEaUmBvmDPlcnMdYVKAcBcr3RllpMaAJljqdMgflVr3TgSn6TjLPUZuAGR1sjMQeYyf43bCxTfCCiJU52qOFhS12ZznTGb6lAkvkNmqxQGi82/OWlKDqr0TXbgRgDbZsDcJEtPfrMA/k1J2hk4sL99WDu0b3ItrXH7Rr/LY0BZamPgz7zP4O9ThhiM25AiAjOi3t8DJaRECFobVpjjjFbvAxzFozAGeR1J7jWJ0VcJwXznTSQ+7TUAZXEW0DYzNR1jjFRiO1V1Bq1DU6PT5J0474qD6Q+rjyLhObJci3fUr5Z7SArCbXavGkt2bjZup6Q9F74DAVYDE2IURiE4oYElrAgzv3UXV1DAr2ZWudQC9gD7ASD3J0ebjPFZZUl8OSwpUjBWqhjWKdRx5tlewKBvRbpe1zNxxPTzzFUM11c00YcJKMmO7prLyYtxWFN78EOmDPyx+hkiVwkgcXEKFEynDAhCHZPRuktrglutZhAv5BoYKDs+MfYy6QtKJH42yli0BC/AEu8E+DM/zebBMlaxAPIxSY1G1jVnVlPYjBGNEWBbcP7F/gF0AeRqxUvL5P6xtKUtbctm7L5PfvKTeMYznoHf//3fP8ItWtrSrh+2traGd73rXXjMYx6DZzzjGXjoQx96pJu0tKUt7QZmRzXoR/N10AZnMCszXeoVoN4hi4PgajGgACMCKiWs9xIIGoN+A2mgti8BIY45E7xmkb1pVAYqRFsIaft8YBspB6czE3EiAG+sLWAYKB9kWepCMgMxsJodyAswWXBRzriUgNpcHjWjnmJXmH4mbwcsAAqyY5HGoaD9nCIIDVwirUoW6XEZsBena/UZk2+tjRncE4ZfxEYfVW51yNbLfXkQWZA+E9Mz+/qJQJHv5822HdOwmP3kPlNCDWMkCPCXM5nzZgv4ZmwE/77PnA0ssqKkgLItrK09CSXgYKxRYwH67SeUsdXFRelUL+tUxnlhPIisk8hw+ToerFKG0ndyFGwSkAeOQw2syDfqNcTIx80AAleoGq3j0s+B3lh5nciARpWci32p5+KlayeCT2OjlEotHpV9zNKh7Rw026HPW/D6XqSuRVxfFwmweZGFNGlPrisNeDfg1Z0C8DWzgbwnVTXQrGAgzekz0oEBCO+Pr3SeMp2rWq5Nk4DS7PBcNzOVceOBXntvs/OSazSmBJFakoBhzMBfAXZFkg1oWQJHVZQgo1NCKyDdBCsjA4BIaHsZj60CiBJUokENIaCAy1OAnwfRzZJeE30iVFzlbPJk7FELiE8FL02Sy9gvkYTtodn3FVfoYYCiRNISUb4nXJsWU9o0ULxd21/a9dOYhvNmTkzYcQyaudCeYtvl5AY29p8CfvXOVdBsZ5njtJap1QZMAChyfs0pAf1cA7oi6xxCo3XHJLEqUcrX82YugCQBDJNZpF4o8vxnct9m3tcL5EAOY/3Zc5Op67th4tQm6giT700Bfl4q3d42FlMGpYLbXhj+frydzdh+FnRXMIQmmnewVpKIxB/IU7JnSjugJNfpRennoJIYvc73xjL7/9l783jbjqpO/Luq9j73vPcyMgWCDAYBIZIWlAaMoEwKhCi0iEoAgYggYv9UVCaBQARaBW2Z5BPaxpZBZNCGBltQkMgQRgcEg4rYiBggJECG9+49Z++q3x9rrapVtfe59713z8t7L9nr8zn3nHuG2rWr9q5hfdf3u7zzSJzOxssaOaBzDr2LCJ7QR5b2VMafXdeltZ9Hek5rYsdzSEMA+gWoXwKdAEQmOMjPGvi2QewD3IxZRuQdXHAIhsmouX714WaNsPlb6Q/KbRGDsOZM/jXwuq2eqzTQy5qCgoc67irbz0UB/oQVBuR5fN54DnozESq61kyAHwzTVdaimek5AgTq2FEDPUBSaWidkwBGGmXM8Xnn32VM0gB3pjkIDk6ZfC4UdajzVWt9BwoW8lyOMa5QZAlYLakPOYdAfKcmuVYFsMjkCFQA0jUgymxYrohLAF+t4KD7rIFCR/nvKHDsiddwei3ARc5TTzFfLNXvtRTdiZJcn+V+c2TfGcM4I3rA9EMGRQPjooiU5U/rtZg2UQysmnEd27S2muz6aFbS84Mf/CDe97734X73u99RrNFkk10/bN++fTj77LNx2mmn4ctf/jJuetObHu0qTTbZjvbFL34RH//4x3Huueeiba/7tdZk67XjG/TrFzkyvO9Bjc1n1eUcCYaFpg/N4aeA32bPuV4S86WiZ3B+P8KS2NHdR0LvgaXsRZxEKvMmUPPnxSJOu45QD8gbxu0Av3S+BvAb5FMwm3Bl0iQAUgE+BfyEJYW+S2CfAqcDR5WRdiITdanAX9LjEUsbYgM0LENEjMqyY+Bp/7JHiBFbXZ/A162Oo/D3L3vJwxgGOfi2y8VnTb9nZTwPR5KlDxF+B4+9/bgPWXqKwTZ2lNRcM8vI037XYrzjZtV9nxfwAE6dNUi/5eOUpo6TJGNWXO8Kpob0bBmUet3MG875Mm8cWmG3tnAgROOcYkcBERB7vh4TuB1xSMBfHzNIExEReiTJIk8MHMGx28k3c2b+hQ7oiCWf5PqGkayFXqPA4NquHVGARIn7lu+HpskgoMhuBpGqi4tNRO85d6CwGly74DL6kFkuCvI1Jn/fbJ6AQH5PAhX8DCl3i3NlnhKHxCrhIHFX3qfC9CjkPX2W81T2rZXY5esnFtdPfWtYJkKIMt7EKM4sSr9svYMTYM9RlByXma0HR4nFDGQnO5AdoNa0rxMQblxZFd5XsPz0/4O57mq2X3QNCB0AvpYSw2aVHJ7MK0nqM7rUIt41iC4DfkRx4HybbLJj2QjI4ykAOAfamMNt7EGcz+GXHfx8Bucdmr3z9Dtl+eVcpRnwS5Zy3vn0f+x7gIRFp2uVGNIaxzJMUh1X3FI5qEqBgiwNXq/BSsCPkpxhcmi77Ngm8DhMdaCUnVvG5IGLhlXWkB8Cdea3UV6TsFuKdrOmORJtGatYfuR4bFPHOoms+051rota5ZC25TgncgJabh5PCS6xNlNAhONxX8fz1sh7MvDrsCWy6y5kEHbMusAswCJfI0k+bmH7pTVxDNAcfCoBTs5neVqTl5e8A/WOuegiawsg5e5Vhn+Ssa0YSCQBIwz4xYSveORrsAb8LMCd52M6bFDAgRBJ0gJAcoBzx4AcIYKKvQeh3Hco2FMEaW1zvLFcl+QUrAsg1ySWKOezRLHGrRl49VpF9xvOfD+CGYEpB68F/MbquE397fFqSXQ7zqRThPaZgKsxomDhJibsqsqY+1n/13WhSWMRoftHm8s+XxercstnRREG2aMwpx1Jbj8gyXwCeW9l+9yZsvJ+M+87U85TqywDlGOkSuiPmY4dJIzIFYEE2q9TTr/JJluP3eQmN8H9739/fPGLX8SDHvSgCfA7BizGOJgHD8c+97nP4du+7dvWUKP12+c//3nc9ra35YCd67mddtpp+MpXvnK0q3Fc2je/+U2cfPLJR7sao3bppZfive99L25xi1vg5je/eXrs27fvaFdtV3arW90Kz3nOc/AzP/MzeNzjHofzzz8fd7jDHdZSdgjhBnHPLxYLXH755bjlLW95tKtyfIN+6Jeg0LLzHWBnM7mcG8ZsBlROMrH8hOW01QXsX/bYEtkeCwrVEZycnJydD613aF2Ac5QkPzcalxxHdsOcN8h5c58Bm3Lznh3H5e8t2KfRlDbK0hFSdLojcCSzRMyjWxRgH3XLDPxJtLq2oY12VucFy3SJvKKf8S63kejUmKV2QkSKgFWG31JYdsraUwnP/UsG+/Yv+wQ+bfUMSh1Y9inP4hD0y7tlbwaL7XK5HIxZSajGZYmomWfwS1md/OzQChuulc+tDFLdZ9bGQN5anqrnsGwAxNH48jqDCPzdWlYomuhfBvqytJnmSdy/7NHHiAPLkEDA3kSt6sZ9o+Fz3Gj4fOfeJdkzBQQ5+lvrDQAxXeNc1rCds4Miv5cjuqP8rw6mCC8RyJ4InYtG9rSB9w18M2fWQr8wDIY+RwdbWSF1HIRQ3XXagBYUjMkxhTk7oP0JJwOLLcRuibh5LeJyAbfYROyW0FyANrefOr7hfGb6WbBPcxZqoIJzCfwbyKwpyBQDYt+VYLvP0eBwDf/vGpH0tMB7yeqz41vtQ+RMd9H0Te4sdcx5IrnfgcYBjeQhnDf5ulqGmJikKe/mwPGu18T6gLFVLD+gdMj1Ek3uk1wxO2YLJszYNZTaw+W2Dz2Pk75B42fwnvP+EAHtUaD6af7GI1n+ZNdTU2UAvfbnewEATsY3uMxmdnvMpkbGMWU302xeldvn7wEFm5rAUsTULfh+6heY+TkHzQRmzvSk40Us5pJ6LNN5NbPaMwhUgkES0CWs9nnD7COd3xNrxRE8eJ6hfpmBSZERHEh4WlOJPiAHo1jAz2VnfjAQinNVkMM2zEFdf0X5pw7g0NzA3s9kLunMONcPgT/LkLKOdmO8NFEASfpFZQzrugYGMGMMad3WiIpBiACpXD3yuKKMvzYwAMh5YllyfVmBfnaNaKU9g0lJpCBu44hVQrolqNtE2LwWcWuzYPuRXNu+D/At7yM0h2/sOZdfkridz9Du2wM/nzHorWz+jXkJ5oYA7wMal/MaWsa91rGeszzl/rXA38GYfssRdAnJQJ+APn2QOT4wAMTfzfsULoPXeg4qc6tzZvndYr030v/2teZ91FNPsp3OISZGY1lEusYrpl15vjGBUJxPziWnQt1i6ZqtzN5LKuGpx9JUBfUaKgG2cmZe+kkPGmIpTcr3e85dZ9uiZO9WOZrJYRmQ1FQihkFcqRgDHNuT5rrwEjIEJOYny+5GWc9RUY7uZzgIwki9ps8zu68INLU54/U8NR1EH5K8+sB0XPM6Lsl8UV1XFCPgrvu8Y9PaarJjyb7yla+g6zrc7GY32xUbwzmHt7/97bjooovwwhe+EBdeeCHm8/nOP5ws2WKxWFsuxK7r8La3vQ0/9mM/tuuy/vzP/xwXXXQRXvziF8P7Q5dEDiHgs5/9LC655BJccskl+Jd/+Re8/e1vx0knnXRY9en7Hu9617vwqle9Cpubm3j7299+zAI667TTTjsN//7v/360q3GdWYwRXdethSX25je/GW94wxvwC7/wC3joQx96WNexWtd18N6DiPD7v//7eOQjH4m9e/cednnf+MY3cOGFF+KrX/1qeu+UU07BC1/4QjzpSU86pLoeOHAA/+t//S88/vGPx8bGxmHXSW23Y9LLXvYyfOd3fid+4zd+A3/4h3+I97znPfj2b//2XdXpda97HU499VQ89KEP3VU5al//+tdx6qmnrqWsdVsIAU984hPxrne9ay0BHLux4xti7TvE5QJRcm+hjrxOmysFRKqozGpXWQN+KQ9MCAm0WgYGp5QdqM+bHUtVam66za7HQlhUXUCStWRZOX4eAH42epbyZsvLJqtxhMaLPJEDZl5lRgkzBaEcsnRRt2DAr1+m/xPgp86rxRbC1gFmL8kD3RLolgxudAsGBOuIduvoEkvtDGEWIYpjnQEHbcvNzjDODOC32QUs+oCFtOEiPXosuh4HFp15L+DAopP3+vSsDwsWjj2sWeeRBfyy80idGpl14KxDUEBBm/fCm41xTmqfy9LPnQACjdNcQtmxohGzGlF7KGavLgZgkK5jBV7T9dxx2+drl5/1OrfX+DLEwTXeCcDbCcioj6V5LHp+jH2W3+Prpkv9hMQO1fuP76X8my4CPRxiM0dsZvzwbXrAz0Tu0jyaFY92bh4b+dFscPntXmC+D7T3BLgTToE78VS4fSfx670ngfaeCLePH7T3RLj5PtB8X87jJ4/E7lOnDlFyDOfBYIS1UeV5YRC+SY7WOudLug9jCfgp+3NsTLRmGXbKfNFrmK91lb+TsSmNSw4bjcO8YbBYX280Hq0TADmNV5RAdXa8O7m3MgBtTZ1MZT13NuskXekwrR3bY6BxECapeVDoGHTuNbgiAwIsF3h05D0nm+ywrWa4+hncxh4ey+Z74eb70Jx0Mo9/+07KUp6zef6eAh/OOHYFFExm1271+q3vBuwiC0TY/wegw4gFYfsBWcZa53FHHLSTAraqsU4d2XpvJ3UEUUsYe8COGWrGoZ/Hf5ecyPaR5kN9D274kM/1uyqL3quyQsy5jZOj2gAsyhIv6rbd9XAQVkj52f4M+bUjBZGybKqVULV90po+2qmPe6MOYU1/n4AelR3UYJ2R3L9JsrPlHH++lfy8IuPp2ia9lx5zke5WdqttUzn3dK4JUC7Xhhmoo0JecTyIanzytr/V3wNmXwG9xqkC87KsbXrtcl9Zdlcq286nY4Ex1qp9BPWdmVvzvEmpnUpWIZCVLHQt0wfziKJwgczMs2oHEfkxWidTdxvEyLnHdb2aj6frpwS478bseo/MvWnWgpZxqOfPdV19dDIPAEX/aX+mQDtQsdbLe1Iqxlpbjkvfle+Y8TsBfnYtNTY+1ixmnTOcrHHrwDZd47opp99kk81mMzz1qU9dC5iwb98+/ORP/iR+7Md+DNdee+0aage85jWvwaWXXrqWsi699FL81m/9Fq6++uq1lPcrv/Ir+Lu/+7u1lAUAd73rXfEP//APaynr61//Ok444YS1lPVDP/RDaJoGXdft/OUR+8AHPoBnPetZeNKTnoTf+73fw/vf/3586UtfOuRyvvKVr+CFL3whzjjjDPzwD/8w3v3ud+Piiy/Gk5/85MOq15j97//9v/G2t71tbeX91V/9FT7wgQ+spawf+qEfwsMf/vC1lAUAf/AHf4Arr7xyLWV9/vOfxwc/+MG1lAVw3R7/+Mfj4x//+FrK+/Vf/3VcfPHFeNjDHoY73vGOeNnLXnbY48Af/dEf4fTTT8cTnvAE/M7v/A7OOussvO997zvsup133nn45je/CQDYs2cPnvGMZ+Dzn/88nvKUpxwyOPnud78bP/uzP4vb3e52eMUrXoHNzc3DrtfrXvc6nH/++Yf9ewA46aST8LrXvQ4nn3wyzj333F0Dfr/7u7+Lxz72sXjc4x6Hz33uc7sqC+AAgoc97GF41rOetVqJ5hDtyiuvxF/+5V+upaymafC93/u9a5tPd1WXo12B3VhcbCI2yJv10PPmxs+QcqGQQ1/lcFHT95wjoFeWVUxSdDapvSMgOM0/AiwpMqnDEZY9O6otG6r1jh2+fU6IXjMAU7kjkb4ADJtPN97sVB/Ln+AI4nzmyHPql7LxMlKe3ZZ8Z4m4XCAIqIcQMlsSEl3uheXXtIgusAOjaXLkt8kro9JSQaJgdTOcNuEBWPQloLTZB3SG+bfZBRxYMFi3X54teLfoVjsSLGiXwbpQRHznz93obzXXiwX8ZqY/ta1bn5l+zIKjnAPI5T5zMJtgPRakrwkyMNFA5cf+TvteQUK9PlZZ/ZlKejLbQUC+PgN3CvothFFp2y/EiDbw/dCK83DuHZYhZiecAWcAI5V2kKEEWVotg6t6b/TqoHNAL7kDuX2RcqR4B/iY28lTw8dO0rNDZyPtMCGoAzQC+buhE0ZEh9hvsMOiWYBigNuzD7HvixyAhRnmi4J9yeGr+fsscDcmz2ajwTWfptDQ7G+sBJQ6gFXSs87lt4LryFW2bM0qPj5dY2QizOWtXt7riUHAPka03tZDc2vxD7dzWALqmK/GzsoJZa/5nRzChwScG9BjZe7TVLDJdyqSZYnR0wCNazA7CqjflHdmssM1ZWqRjJmxmSOSg9vH6y44z8FBzoFmcwbuOs71h6YtmM0AUnAWgPSevk/tDLGT38WYlBoQelDo0LoGC16iJUe0AxBqafFqTEtjjgH7AJFXjDlHbyvgX+tN4E0CPmROF5k6mw+ZumUGAM2YoNJ8lIIv8lqUP6cicCODEjk4Q22VTF9t9ncWDCjWkoY36F0jYxXAUp80PjeGAFAYzklYEWwxyhaM3AZAWj/y3O5AEWgF7FKdQT7XyOwzx0FRIfLaaxkkv7HL0vo5mEvXMQG9rF1U2QNQphKJAsYS1HNAW1wu+Nq1Khfew3mHkKQ7WwRh+qmsJzkHv2eGZr6BRth+BcuvafN8rnNH6OCbRvqFZC4eBt4U8xpKOU+i8vshrpaPtu9nNQdlaRLgImLM7P2xYhJgSnmNVqx1qQSVDtpi6sD8WxPYpGsbIgdnAC9AVFt0v2GudZbs5ms9Pce8plYJy7QWqAJEAST2nc1NrkoJOfd6llzLQC0fjyVUpc1hdkoHC5zXUr2GKZxBzHz+9V1LQDG+qVn5TyJmfAaKSaI1L1HKNAJlHm8DGstrHSNTO0QjgRwjCploc44k8p2J+adfsZLFtk3sgGPmJ+wi2v9wbVpbTXYs2amnnoo//uM/XhuL4EY3uhFe+cpXrqUsdXTe6la3Wkt5l156KX7nd34H5513Hk488cRdlXXVVVfhpS99Kfbt24ezzjprLe33sY99bG2Sfje96U1xzjnnrKWsW97ylnjRi1502L//vu/7Pnzf930fvvKVr+D1r389Pv7xjx+y8z+EgI997GPYv38/7nznO2NzcxNf/epXcZOb3ATPfe5zD7tuan3fwzmHV73qVTjjjDPwIz/yI7suEwDufe9744orrlhLWQ95yEPWUo7axRdfjJ/4iZ9YS1nXXHMNlsvlWsp605vehKc97Wn42te+hnvd6174nu/5nl2Xefe73x3/+q//itvf/vY488wzccUVV+CSSy7BAx/4wEO+d+9zn/vgGc94Bt71rnfhb//2bwEA97///XH++efjJS95CU455ZRDKu9v/uZv8IIXvAD79+/Hc57zHJx++umH9Htrt7zlLfGoRz0Kt73tbTGfz/G5z30O3/Ed33FYZZ133nl4xCMecdh1UTv77LPxx3/8x7uWfP6N3/gNPP3pTwfAQQ3/8i//smvZ4b/8y7/EX/3VX+ETn/gEzjrrLPz4j//4rsoDgJNPPhkHDhzYdTkAg37Petaz1lLWbu34Bv26JWInTnXnQfN9QNMYNs8GFgFYGFZRbwAQgAG/NhLQeoQQsST+nP0PPIiEKPnQzL7FAhbLhvNzdI1HI8y7NkRheUQDBJLk+8vgX9rnjDizLeDTiJTOzGfAr5GoSpU2zJHLXXpPpahGwb5umcG+KtIyhj4546gBO+CiLzeYJhI1gjfCEdnZpo9OWZKG3cdyk8oeq9l9fQKiFl0wzK/xjVAN7lnwTt+3/9vfKdhn/58ZmVaO/gc2DNin4O5cpC85Ip0dIPq8nbyg9rRa7diro73tZ1bKh1Om8UaeiJ04vAmPoMiOMXWM6iYyxLxhDbFs12HeRAdH7EQDmBnbeocgTlL0KEC/1K6rHFB1+6fzHYI8ref8gX3U+4AdllHzADnO+xfEYRjlwc4ddih4BwaoXWZrRT6RumLppXU0aCtoTpIo9xc/byD2HcjPMjA0Ei0+jNw2IJ1Gcdc25hyqyk3SxZYt6BtEZY4EcSaijIpXwG87J2Nt9roEzJVbvYhyvalDLqqDSgBAK7sHlI6NfoWTCkAaK/P7Y3Us7xMydbU5ktJ9Jb9JDkBlfqTnWEhUJSCicvADMEBf5Bk1AYYc4R6bGZcz2WTHk0UNLCCgmQHOI5CDP/VmoL0HMsjnfJI7Vjlwmu9L67LYtHzfaR4n54fBEfK5yiNnJhbLireOeC6IKncYR+ZEnicCAMj6zQZ2uUhpqLdCMxq4w4xkz0EtEmCSWLqaM1ZkT4vcyNXYTJSd9MzGNkdLARocANIZQGFMgpk0r6kZwwbdVLF97NCuP9F5kh3nssaUXLgp99mgYAP2Rc69EGIeaxX4SAy8SgUigYiR8/hFMPuHwPNs42fokedtAqsFuHQCPOe3ziE4wFOP1hE65+AoDOaLen2o/zuSy87WUx7R5PJL6wKVtXci8TnjuVZZfmHRJRZgu3cP2n1zNPsyk5/me1Mu32jaL7GfaAFyDZ9XpEK20pr2iK4nGdwrgwT5/IYXxti1UjD3wetHinqFcfRUlco8tZ8CfAz4ZcDH13PoKpPcx0ne0t4vxXn3PN4EAc0F/CM/S4w0pLVsKW/ZR8ial4Mzo+QsDKRf0nWmOZxh8qf7VSwgK090EgzapbULkPJZR26TCNQpzotcw/wFE0ykDLja7H0na8bMBs5Adp0XHNInun9gsLOqD5nrSOsFyoF10DLt/iQDfsr2VGlXm16ikPW0AVLBsDnVzFqYgyMM+IeRADgFYlMvSlPFMOX0m2wy5Hn5WLN9+/bhiU984trK+8Y3voF3vvOdOO2003Zd1iWXXIKXvvSl+Pmf//ndV0zseM/htZOddtppeNrTnnZYjBrnHM4991yce+65AHjNc9lll+Fv/uZvsGfPnl3X7YorrsAzn/lMfOQjH8EjHvEIfOlLX1pLHi0iwk1ucpNdl3Mk7IUvfOFapDMB4Mwzz1xbjrVHPvKRePe7343f//3fx6c+9aldlxdjxDOf+Uy89rWvXYvk8K1udSv8f//f/4cDBw7gr//6r+G9h/ce7373u/GZz3wGL3/5y/Hd3/3dB13eySefjOc+97lrkai9+93vjte97nW7Lgfge24d9xaAXQF+MUY85znPwcte9jJ8z/d8D+5yl7vgrLPOOmRwdcze+MY3wnuP5z//+WuRQgYA7/3aAfpjwY7v1XJKDM+b9JQny3mW9WtmyQFdSsDkKD3Pu1gAAYEIS3EAeCJm9pl9Ss02Kxlk/NxGBkUAIJDCei45mwLji0Ao8yYAw4jKFHFu2H2NsvzUgdJ3mdU34qROeftCj7B1IMt2LhdI0lpA6YRznjeNfc9tO2aVHKFK4ajMTQIYNCo1MX1M9H2KWI1JslNf14Cftv0Y8FeDe71Egyv4VxrnlLEAof2tMvoUpHWEJDnYGjaA5vZTwK9xVPSXBRtsFVaBeTUzsK61nrWNMubzFMdNiGkTv5t8ENp2fYhYIBiHEl/HIQb0niTXHrDEOMg3CvDVWFsB9nHUsYvipJFje/GWREgUd4TQPKRtoxbL7gBlM2g7KACIaHL8+WpRUzkWBvn0lF0XHDOIgwdCA83fhr5DjG0pWVRbBf6ttIOJBrdlJHnQJjmSe7lG7P23HeAHlA6iINHqAIrrd9W1W5QhOXWEQAIKXBcXScYG/pzrQOm6Hluy2uJLQPzg5G5XfaeuNwGFw7qWo0rSYwru2u8Tt1QMHTP7erkmdDwGkgTodW0hlAynI1H+ZNdvY4d7k3ONQu5r3/K6QyyEHonVLAoBmrePnEd0GfAjb4KHUo6/akyUICb0fF8RNfAUE3ATQHDq9DZzaAH8iVnwz+dBLP2v+XoVwMgBV5RZ98WayqytxLltc7kWrDZyOT+smmH4JZaavjbjNJ+TjrVxdG1g1wUARsd2XoZm5hMcBwg1tWM9GqZbjFn2szIFnrIkoK1QFfhiQQ4F/mx+P3KITsAmJzN94DVAdNwO3pGAdqXcc1JgWDHQ61rOtgMBw3yDqUABqQWcJu/gggO5zPbrF0u4WcMsv1kLN2s4r1/bJIYftbO8H7HyqdoOMTDY6hp4uQ6cXHi5H2MCjLZbERws4Fe/X4CIENYXIuqVSR3AliXuaWem5w5mQeHyfXC7hQ6IMv5YIMxYMPcK/5brpwxKMmuZKMBWxpTH12pRH2YPk3L7WaBNjgcmrIFIQMbIzyutDjobCxQbOd9Q1yeO3+8hVsDmNpaGQwH+gLz/HHx3RWCVlpPWUaFcN6W8p/q5sPYUB81rKZPn1ef1su4na+O9Mge9Xdc2ra0mm+zo2HnnnbeWPFcAcK973Qs/+IM/uJaybmi2DpCZiHD66afvihVl7dRTT8X//J//EwDwMz/zM/jIRz6yFtDvWLab3/zmaytrNznyanPO4TWveQ2uueaatYB+RISzzjprDTUr7RnPeAae8YxnrKWsG0JOysO1/fv346d+6qdw4YUXrjVAZXNzEx/96Edx8cUX4+yzz15buddXO65BP5ptcA6ZPfsQfYsw2wc0M8R2L8JsD6KfoV+GtLlVCyE7EDxRcjgv+yBgHyH07JzuYXKiGFZUF2ICjBQoWfqI1gdseJckCPfCo4+B2YRwDGyofAlIA2wLwM+yxhREmikrDbKRWm6WrD7rlIqBo9BjTEBfCD3i1ibKHDoBgzwm4qiTGOv8gfOIRMwk0lwlJsdClvTM8jdBNqjLPgpbUnJiJNZfmcNPJT0V9NsyQOAYI01tjNHnHaF3LMFZy3tahl8t5wnkyH9HwFzYmzY32d7WY+ZdYl0qu5PktwSNiB0CeWkzrg5Dzf9lgYayQ3J7C5MLvmHGlKPkKARqB0BMbNWc3yUax43mDxx3mmkbb3Y9QnRYkrJXKcnaWofTKPBnnUwDEJDSd2qpUE+ENlByxrbeCaAkkdSBHZce7MxlhyGfnzoPlSGokcFOo4qrOg6cpvLHXmIKHLqmEcaKsj5CYv9ZULHI/1YcbIQBaD5Tp0kcY17UgKQF+7zcg3CJMWIdyerAUsDPOhbt5KvSTemQlVPHsk3U2Vs7HRPo74AYCb2L6AOZY1OKEBxzDx5s8OChrhlsfTViPd2PCuaJJJXNLZTGVduvQHZeqcwU2IlN0TEDFEgR6Az8jTCbJpvsWLUQee5p9/CYqJK1zRxRGW7dIjNiiUCLrQT0AQyAAQBmc5H6bLPEp7L92lkCWwrT+y30QLfAbNbweBKN8xsSaJDGblZX6CPJ/QcADiEE8WK75ExtkeckZe5vNMwonMl83jiWntTAqZQnWdZcSTlB1lIAEEXKmbwfjuMyl0fPY7Xmtl1Izl0NcNLz0rOqA4PGzEZd6xHt+iMSgUjG+8DgDjmCdxK8EoBII/LXhgVFCtLBBoDIqamDv5b1kzKSxCWjfYjRZ8afMLr6KMEiDqA+l925iHnjsdXwWl7XWyHGwdqPX+c1HrP8hrlhtT/IeVG04DUxtSbZfQg8V8xnIFkbulbGdO/gvEOzd452H+9BaM8+zuOruXtN7rHcljI/GCaoN8xQBUICCKsi6RWo246tPxZoZi2SAH0JSIqI5n9bpp07k7wnDefQMeDKKiewlOwKwKueV4HMAJMfk2vgBCS152n/r8G+SEjypwHgQIEdFg8Kqtmc08sULJrXxnz+0msuBzbVgJsFxHQNMchzZ1UcUmOVddL86Dk/5zCAywtAu90yygZB2Da0wRTBXkvI44hVn6lTTBT7mdDzvjQGlkG2fUxOtJqF9e0lOAIzHrNNMILNXVjfD1qnbsLHJpvsBmPrAvwAzlU12fXH2rbFiSeeiKuvvhqPetSjcPe73/1oV+kGbU3T4A1veAMe//jHD/xNk92wbN++fUeEBX355Zfj/e9/P25605uuvezrox3XoJ+b7wPtPQGhmTPDb7aXn32bGS9QZzWzoiKAjcZJrjMB7SLgAzPzGs/gHBDQd7HIHaIMqNoSGOUiXCAsKcKHgNZJDjRwNHoQJDDw7hPOAIdAjrDU6FoF/BScagjsdFJ2X5VbxrL6BjKefc/PwTwDA4Zfqou+ttH5VppQwT/nErPIyu0EsBNO86r0MUcvjuVX5HYMBahq29oCfqtkPq2pA6gLEbPqvTHAT0Esla1RQEw3uAo+KcNP5b8aYQgo069xJaiQIuF1cy85FwsJQdkcQ2Qkiyh737CDyM+4r1zHcoHkxGHHTrxA7MjpY76OXLEpZ2DNORKJNGayhkg7XteLlB/RYQmWq10qACjVtCrgFkRMbL7KURMQ4Zw4cQniaCBmwYKZCC5GuIh0n8RK3CcgR1Qb9abEWosxshODMtBVgP/WWWT+H8ps2fNhZxQ7ZkJyXCY5I4Aj1AHAGWdOZKYkFyjvrZBRGMhH1rlNDBCsgJ9Keto8NNE4TCzgZ89PX5OwDGrgb8wG7A5oe2MgH0URmY0Kvmf07FZFqdd1A8YBQm2VnRaSGagUiTIDOOeIdGXnxOycqh2UO5mWJRHrOf8phiDudWAsAXZky5/semqOmLEK5Hlf5igFtBXoRk+gMAMMXqLgEQcmCMDTABQY7GMgUZ37otRQj4cV29Y7Bx8Y2LPShETi/JYxTHmEpHO4MMMDRXgvMqEyJtjAEmXsW3UFAoN+FPrssJeAndj3Ig8ZynWUnt+IrGfB8ossZ9kJoMDBGrxuKoNOpAhQkhRMTWS+V9+PkTsIuqQlsNIFHMEDidHEwTDbjE+Gmcft6krWYSXhNwhgGhn7KED6TsAhYb3petKrjLesvwLFFKxUA3iNBHhpvmYvwV4zCfhqZW2nV1dU9r+CDs7zNSigNLUzfm64//yM53PfB6AFYggs/Tlr0MxnzO6bzfn76VGx/MaCfGKZWZfMdxSA2U5CS+fc2oq+wTgbEECap0GAysTr/xq0qOWkuTO9NnPoygpmsHOlVaBf0Sbp+qE8l5LjNQqG4NX4OcqaIw7lw2HHNfM/g65ITD/dw8TIgaG5cJX15LyI9f2na43URrYdasBvjOlXAYEBOajKshDV0ro6lgu0ctWMw84Xx+eDxIbWeyqto0IF+NesvzFgN4ADJTACVBJv2FMQKfL1TjK21W1wXdm0tppssskmO/bsRje6EZbL5a7yF062PpvNZvgf/+N/oOu6tcmQTjaZ2rpyxd5Q7LgG/fo9JyPsOwWxmSP6FrGdp8jazkRkKmsOEEdxAzTBwxNxbr/AUoIMODgsKcL1Gu1ZHlMBklqaM8lSuojAIeYMAlI6KoCAFg69i8kJpdsxLS7LSyIBfq0Thl/fgZZbHHHeLzLY13P0OboOsVtIrkOOXI7LxZDZZyLTk1mHm8pytbMk1cUR6iydGpsZt7PmEANS/osgEaicZB45f1wC/zLwx3KrQxbl2OvtmH6j10aIBfjkxSmkjqDR/H0C4KUcjOI8ar3DRuMT26+RfpkJ+NcIO8ABCfzTyHcAOfrd5gXrM3hbgLXCCHLCkoi+ZQernyE2DGZTv0R0XhgFM3jvWO4zls5CBtQinGMGYh8pOdBaC26b/B1j+f20HReOwb+lhAFru3H72sYvHU2ehtc4ALiQ68PynnL5OXb6MVDOCOZYj9e8PeuoigZMZCA0S37Wv+Hv84tR95RIbfUwzEF1eivgFwPLgsXATkRxfBagT3KajhxlBctz8L86EvXek+CGXtgiIUZ0ASKpqwB8TIDgqnN0Jgor6nUTV+eRKqpF5bNKh3lFvbXM5GxaXah+gx09JPXNEe3rcIYoaKkO60EOGhN5P8i7U0tvjeSdoRj5WvDZ6Tlg0Ew22TFsUQJNIrkcMOGQmMWa5xQxAD4wwE8uObCjvWeEPeV0PRJcVhHolrAyoOR9Zsxq+TKXej9D63IOtD7UeckUANRAGHYlty4rLISgLD8M5n1dayn41zoCZJ5WqVENtsJygbgQ5QRdT6nzOrhCMjgSJeA0NjNE16DrmOHHTD/OIb3oOayFc0rn8ULrqSyrlX1mXuv3iDiYLYBBj5648XoJDCJHCWxDZPlNHatYllQDIMAe+qr8IpipWuMUDKZoGOzKutf2cR5EnLvRNzMOZkr5hIFlYECl9QTfmYCsQIO1HID0unU2SMu0nAlco3bGrLzQpzVw7JZZnhqAn28kic8ooI8XiU+3dy8HH+7ZB5rvA23M4Tb2pLlZFRrScbXNyCUwVUFPAHByv42udySgpgQK6++U/ZMk5ovfMJjohf0WIwAXC+afAmV2fUfIChb1HLotsGfP2zmknAljwJcAqgASw5iiyKpT4Jx9CUjjltB1YBhttSHoNVa/KKolfP/Fgumn+xqbCx4AnItydBJQXMaesYPpOVYg2JgqBKFLkvKDYmIO4qr3QQFUsAxDFGlTyv+P2diaSq9/y/JzqIMhc9qJlGrCSh/X6jNW4lwC1Zj1R0kSPY37YhpQqmvbDAhD1t87A7+TTTbZZJPdMOxGN7oRHvOYx0xgwDFk68opN9lkk+3OjmvQL5x4U4R9JxW5rIAyAlL9zl52vpyfIqJ3EUQOrQeWPQkrL2CTApxKQIWIEJmxp67y7QCnlA9NgS6J/uyFsdRus/UkqSOZzVQC/AjM8OsWQLeZ2X0qs6XOp8VmAfYhBGb6qfTUGOCnYJ9GOzsHalp2hjQtRzC3MwQvQJ8AfgpGRdekTbJGxLLTgh31yvKzG7NVm0/LOKvZZ+q0sTnnhqAeJVBv1rgU7c0PP3jPAnwZaOVroUkAoEuOwNZzn1g2gHUUEkyuRWXuWTAhdNyPMYC6Le7HfskSrCEgLDalgUx+pIb7AbMNoJ8zy6yV/GGhB3wH8g1mzRxdJNim7UjYc84hOKD3QOt4K89O017ATQdPnMPPXuNd9QygyJNoHW7Kmk1gnxvr4+R9yP1pHEuOkGU+FRg0jIt0f0j728jzlAJEjzRyq626dcei6WuJqwBhIxrmoLL+kpObTLR6sI6dWDjGRtlfCTAy0d01sCT3HVx2yKvslGX3JRmqWLL7bKT0oG2oPL+iHUgizcWJ1MeMZwWlEyL3vb2/U1mEIs9N3T3q0EzHBZIDUp2gVnpslYPP5h/Sa0OPVzs/a4tEOZ8QslOJXAOEjhkioRuAsZYBXbA7igj3bZyiR8g04OJIlj/Z9dd6OHQ9jxssp4gsRegbceY6dub6hpkXQA5qASSnnYx7ALCxpwDUA2BUBXyZ/6xm3wrbrwGEVZzBPx2jPEjYbDJGBBKZT+J5TG7dNKeokgJRkutO8p6y7tLAnPScgD4jlS4WAWYzWrlSy852DZYBCVBY9BELYfgd6PokhW6DU6zstS7XLGtpldNbwcIoQTNRxkUIw95DZbBdZr9VQIMF/qjveO6xko0KWBiZ6xTsVOUxTXOb/F7LoWDGzxgAWc9AgkdaxyAVA3ghKS70LiZJdiCvW2aNw57WYy7rvVYDsQzrTvsiegv65esQzsOBAUkAcE0Lp4oZQLk+3rMv5/ObSQCiKo74JgFKaW6og3kAExDkivWFlVtMLCfkedUGOqV2Rp73PJVss/RVkgAmmeg0XzIhiiRmGaylZSoIZOuw0iqmnx0L7HmntbJ+Vy8TMnmYDYDMfSmPiKwygPEgJdseasX6TSyzb2PK/a4MXAX8lmbv1MJhiQDvPLNzVXXCtLO2HcbuEwv42XYJcu6m7YDc/8ryk6/ycYanPWo6UuhaKkp5oWIX16sVy/LT/TQJE5cU9NfgxTrdhMp8Vus7UuBdg6Sq9TDXLQrDcZjGgIj3mUcj/920tppssskmO/bszDPPxNOf/vSjXY3JJptssmPOjmvQb+nn6PycHcL1XlIWzepM9iLxRI63ZIpJdCFCMomkXHth2ad8eomdRDnReW0WgKrBqD5GMM6yeoesH+lmKslLyqYKJmoy5Z5Stl/oEVTCs1sK2Lcib18h5Vky+8iCTO2MHR8C/iUHifMJYIXICurmsTcbSM3nN9Zah7pBG2tX+3pVPr8SAGTAb+/MF1Hg3iHl7kvynkQZ8EtgX5YA8w4i32nyWshmOEW8ihMjbe77RX5PZVm7TaDrEBabmTGwtYki5xEAbMwRm5b7fg6kpPWuy8BEDIBr0LgGPTGA00ubBDArdelYwraVUOAlAa13CDGwzGeVckyBvprxp88109V6HRyx49UCQyrFpI6kPkS4OvmJ+b11dKpzVkEbdUA4ypHsQHbujDl+rIzl2PsHa7VkqCMwa8FIMkVwhbXPIwkeOOJUHZQ/5hBUh6HkgwK5lL+vyPES2Oneh8yOqwG/0ahucZoVwB+E7SfR/84Cf9K/Cd9NDshh4ba1EwCnv9PvpD4h08Za3zLXkDq4qQK40/Eq56ZeJ2PnvKM5BwSwO08d1XUEfsXyG7WdmBCTTXYMWgQ7f/sQk1ICgOKa13uD1wUazEBAzOymJAtZlx8DqMkTD3k/XDPY+0ZAv0BZ6pnAg1NmAue1XiAAzjKjcg2YuUR5Hnc8f7c+z+mWsZbyfcYs65mCqNQMW6xoJ5dBrd6M2V16MItoq2NgoTcOZUeEJZDz3gZ9bzj42d8AMp4LGwmSdyxEyioLJNXV+tXtbt/TfggopUAVkKlZ0hV7qaoprzVrJz+5vJ4hh9bPWJ3ARfioQXAOnvpizabAnwX9eL1GxTq+GPNlPqXggKaRHHw+9WeU/lTWWWKk6vpMgrGStKc83MYeBJPLbwD47WQxJOBP86yVOezK4JjitjTfyex7MqBd+b20fpR3I5jt36uE7s61TXUeWC1Jvup7VTkFu9QCY+Z6soCaMr0U+NP3D7rq8qxyjene0OeQ1x0awKjmdNG0wlILFMBlTO9Z5l8BiJH53Ta204rCXgcWiMzBVLGQfgfy2mu8PH4m5HVVAjT1HOyYWUn+RpPXmABEhyzbGle3o4krKxQjVJT4SMpsTjbZZJNNdvzYi1/8YpxwwglHuxqTTTbZZMecHdeg3xWbHeJGLxHaQxkbIEe6xogE+HkHxEhoVHJSAIo+sLSLE1koJ8CI7wOWXWazdSuAqwQ4VcBd67OEpGUw5TwZOe+aJ6BxHFk/c2DAqFswM6xfsLynRk8uFwidyEwlwE9Avo6zrMUC6MuRzAlUkrwlyu6D8zk/ycaexPCLM5Esavdw9LKfYRFyLpoUHWsYRxodG0JMG8xVbTZrHPoQMWsEYKrkPS3gN9rm8tgQoG/PzA/AvnnjkyMo94myyzLAy59x/8yNTFQCYg3TTPObFInsY8gyYDFw/2nUa7fJQO2Ba1mKdWsz958w/WLfg0TekxZzBl9nc7huya/3BGb8xYjYdICfQWWkOEKeTTfDvQPmjTfv9WiDsFcbB3SAivko269xNAr8rTJ73SvYZ/PvJPDclcxK57JjjvuGHXYb0u6JWSn3RHbQZrmzMQmrVbYdyGedHaNMQfAxFPgDSscYgZ2nzpVg7GhE98GAQOosM2yBKI7jZTg8sC+OuPL6yGOQ+MqHwF+kdC0RKTuzPPna4WgdRLZ/9F4pHEaIuT1IGk/OUwE/ZS32sXRa1WaBRctQsMcfgA8K2kIcUeZ9zcsYxyLyV/XXmBP9KJiVUz5S5U92/TTqezQElsisL2UzLvE9EaHSlQByXi6VjVSlAWwU9w+FvpTB1N9UQB8zSFoQT1SYuQZRGMd9QArgsgECKsTswXNFjBHBBqbA5LuV+cQy/ciuu0Ipw615ktN5KfvLl3mQEztblRGaWQL6tnpm+W31EVdvdViGiP3LHss+pPEOQJonFfTj9+QwFQCUzi2xvEjm18jrGDCjH2CZzz5y20Uw60aDHIq2jznggWlhVXTQiIM/gX2G0TkwBQnVFPBrOgaRY0RsA+bNHJ3IezayvtvoPTYa/u2yiYVaAAA+V0/Y03r+vqg6JACEHMg3nA5A6knk4NoZgnMgWYvFpkXslqBmVqypAfA6zHnQfC9oYw+vmed7meHXbKQ1sg3YKeaHMbP3BpiBGaKwUiPP3QwE5n63YE4x96Gaa0fmLz0G188JIJwvg7rb6jWRvXcPygyDvjjfShFBsy/GEBLbj2JMrD9HLgVh2rXGDrGVyTSIyYJFaS1l9jP6WCSWXyxz+rF0DJY910kZcwNFAwuE1/eKOe9kkSqgMCSgDeXR83F0PY2sgFFbBIrgqWJdFTLYF2N5LC2XX+f8mGnPE0ZkPc14ia7LQRKpEzwHlAJIEq5VcBQDkhmczEFg0mcxguLBpXtYt01rq8kmm2yyY8++5Vu+5WhXYbLJJpvsmLTjGvTrZCNGAIKAAikvmHyHiOVxQJlR4mTzQEToAn+X85pxrrM2OCwD5xnzsQQvLBCiQJRKHiqzTPPBecesMQssAcoQo5QbgaOW9ZEBLAb38oYqyUVKZHXKJWMiqtPGSpxtZGWm9H0Alt0H55NcEZyH27OP89D4GaB5/LzJ4+dn6OEyyyjkDaRunCPyxmVsA+OJEFxM55plIx1mzTB/XxeGOSxs+1tZT+8c9sx8kvKcNx7eARveJbBP2XzWoWadaRaEUonJ7RwKq5wZxeZepcGWi+Q4jIvNErTVPgzZgUjSX3Gxya+XC05MFDqgJ3bKhT7JJ2mkuKPsFHEEiZIPAmJGjqKPmTGn0fO2bcdsTFbVAn61XKptU/1pDfY5ylKqnhigJah8apa7daRybFoml2clrIAho82CfbvhXFngj/8v20mdSQw2ZQagyn8mdsNKFgTSPVrLRqqTu4/sjFJpOyvrCeR7ZhXgZ9tmVTdH8fxwMHuOrNZmtA4zy/hTRwzM/aJOyCzTioFTK7WHOXfvlNUIKBNB2T0pl9cKx+QqwG/lZW0c3hyBHhNgq84oIuOU2i4nKhkJPO2/ySY7nkwctp4a8D1nxopVzB5haEUA5A3Tqchtme+hlONN5sbCavZHDMIy6xJIxbnoCBCHtScdDyIiMUOMIgGOc215ZIc/iSNb5T15HaAASw7eqVl+FMfCJsatHr/hGvR9lTOsD1iGiK2ux1bHufw6M7h2EnzmHKHVHKfJIz9eEx2KAkW0cMz8jxEuRARHOfeYWIgs9Tlo//q1DcwYADbDwJZRsK8utwb+OjBA5jug9yAfZL0c8/rYrNlaT4lgCqBYt1mVhhowSYEevgHCDMrQp2aW13Ii85kYfnYt3bTC9NuTWH68Vs65rlUdI/e/G2fyr2ifWuqzFzDGNms9ndVzXQL8xuZaINWJXMP3E4B+TFJ0ZN5MYPGhAH91fQ/1twaEdiTsRAzvhFrOs7Ykn2qCozQAVNdMdh+jeckHbFoFX0cu9bTekD1aIfMezXsHadyvSKCeVa3QMS2v98vgKzXL8Iu6bgyl9LuqII+BvOn4Gjxl7/k4PE+W7jR7Va0HAPS9XNA7BFNB6xUTcGnPe4LHJptssskmm2yyySabbLUd16BfECd4h+zQVjAgyObXmRBJPVn+JkcPd8L2U+d96xzmHgjikAmhlCSsrQCcVAbSuQRgzL1LAIc+q1RkBjAkh4x8PhPHBi05nwy6zZxbpl9wtPlywXKench59v1qR7Q4KwqQz7MzQ4E+MlKeid0n0emQ6OXoG8DP0MlGUdl9XbAygzElvQ8hb4priSIgJiBo1mRHSGdkJGdNKNrc5vvTZ+8y2Kd9ofKdyuqbNx6OkCK+izx+FdjH11DOndM6lwA/ZmTmzXCKPI1AiCROSMeogUbEC0CrTtS4ZGamMvzSawUBjbwUOY8YelCzBIWewaa+h3OeWRLKTgI4WhZA9A28myGKYywQk/lC5GppfsqWtc+wiYA2OngXoZCWJwNsbwO2JpDVAndOwewM8rVenTQlwFqDq3pfKNhXs/sU7MsgY2Y91I4f6xatc5Vs52fZwVeUykhWFabXeRF0IA7GhAvFMOJl1R+4Mu8K1ClzaHn7xph9YwHR+p4jln5LbD/7mZ7bNmU5c194Ym9aJILe2jb/C0eCm3yX2iamDdhj1QCOwT/vGvSRUu4hBlYZXdWqUKpLdvKqA5TrZepqj6ltrpKsweTTMZH49r3C0zxmByP7eYQthCOb7+Zo5NKZ7LoxCkvQchO+mcELwzjdM6sctAbg0jkp5SHV39p8VgCDLt0C6F0J/MWQ8gVGABQaRBLgzwVZx7CkNTnK4zwxIY0ggB9Eoq+uKnIgT+N43Jt5BfwWRoZ7UeSqG11nAXk9pcFUjZF0dA1iw4FSyxDQBWARIpY9s/uuMUy/EPl9NQ2ccYHQVUEzK63PDMHea0AbgAZoI8FFEkY4JXbkyjxz2zjjLYCrOdkGrKVtykz9bb+v0pguj59Ns5fzLPaEzsXE3gNkjHOADyGt3zSP37xxmEv+P4fMVmL2XHVdAtzf5FjxYsk5tGk252ArXWPrubczDsgSSU9eK8+LALkip5+dD8YAVW1PmGtV6uMl4McrSkXAGJdrNcOvK0DsQkoySNCYDxxo45mtG2UO30a90hz4MIE/lXStfyvBA+n1SHslkCsiMR9XBjABKR+xKhkAohog3+lVqUSYfgoA6j5GZXfzrSlzq6MUbFWCUbkyBcvPAq81mG4tBECCj2IITBhV8Fv73+TNVoafrqsV8KvXwQqcWYZfUk8o1nfCt9T1NeXAwLSvrs6rZjLqPROVIVvnEAg9KPoMuJsAq0EfxvH/Dx9u3p1Na6vJJptssskmm2yyyY4XO65BvwTGkLI6hswOwDqD8291TS34ExwITvKfBU9oAzsNAJbSy8fsC8nJGviwYFMrgJ9GJuf8Isp4GQJ+jSN4qGN8mSRTypwpeXtGziO6kEAPShpW4tBQwM9r1DLn7YNz7KBqZ8kxFUQmMrYb/CxODDQzli0ih0VvZHDMa45c143lihwYhMSe1Ajt4Nmh5VtKYJ86dFbJtgzy9xnQad44OKLk+PHyesDck37Q8ux1kq4Lc80U8lAxOxL6yM6RBCwo+BYD90WVFydFvWrORQVuJScjX5wcVc79KVHmoUfsFpxjJvSIQZykrsnXgzgUPAG9MB00otk7rjPn9HPYaAAGJrPzDABax06P1nPetjAiY5MlO0lyIJVAqs2JqNd/K048zSlo+0GBvsS6AFIZGeRjAFAdWoktZqTEyDjWvDiebZR8Gc08fm05cxUM2AEWPLSOs5Fy6jGnZCS6gcsuAX2hBu+MdGccgn7bAX1Hwm+QjhHL3C99NGOui0aCTK+VDLRRtyjBcLluLTuOGQSRnZINt0+SIqPynhvIUSGP/YltWL1OZhhKACrwjz/XnDNFVP5Ojs6J3TfZ8Wy65ugWnAzWmaWiOq77EqRL9xE5DhSSoKEoQAKFjudFBSAAqPwhyEn0VsjyoMpkBxBDZ5j01bjgGhkfdLznuUMd22P5yXg8yPMLSf0odHzOKk23ioloyzJrq8QM01xuvkk5kfvIQU0sHwjsX/bY7AI2+4Cuj4npZ0E/78DrIw1ykYAaHYhTQFrMwWkAs/wYGORAnjYycBEbDRBBMT4XZhz3K8e4MQnP7UCMus0KEKScSykGoGtSG85mYDl+CYibNx7LNhbn3oSspNE6XvPNBRxkpl+Z3yxJr2p9yTFCKuM6kQM1srZqWg6+MmCvBsuhnYl864y/Z9QxEuOvZnsbhms63gj4B50ThYXHQLqVry0tK4nY9ZEAfqErmWZ6LOIAtSivlWHPrEJh/MGsmwQ06+28frBzXXGO2yxO6raqy48sAerA4CSl3MOry6yBv/R+zOuqzPSLKVAg72NiUT5L40YMV3LVqdhzrx/6/ti5W+WAGNL6xcv+ycUC8ytVDSj/D5RsRF0T63kuA68XdUwAFOCLEsi47ekNWKQ2ZyEfPOeTt/KefHk6Zvv5UIy1agOWIlDUU23CxyabbLLJJptssskmm2y1HeegX45oViCNKL8HjAN+uoGKYJZael83kCTgEAVh7AUET9iIWdJy0YXE8GM2GAzgx2BfisqmMpefdxKV6cxDNuqtQ2bCqOMpjGwWa3O+YDvl/DI+AXwW9CPvOSpZovgLR4WCf8rucw06OI541dwX4rgqGEjIkoOr1AuZfSY5WBzQRicbTMCRHwWZ9HfalqoWpnn2lFWZmZSueqYCXMob43xRrIoSXgUmBBEW6otoX0oOSHIBoGWRL4x/yEBVDBwBq8yBKMAeAM7pl76vAF9Iv4+hBymoq1ZdF3bjz04RPl9m3QX0kRCceU0kfRDhHTseW4eBs0P7QPvEgng12FfLdxavXb5fW70fpG8dmCGWpLycAQDVmdV3Q6kqAElCSySrFPCzZvOCFH0tn+n5rfIh2d9nEGz8uzyeZGc0Sc69QZnI5VgZo1WsvoLhNwLyHQqbcTvSSFHHCvAbOF/EIcgMEhQOowT4BcPw6xeFLBwB0Hx4CgASOcSeHf7RNfAuOz9JQE/rdxsD+/T/4jRrZqFh/QEj4B+AnOmwsuq3x4r1GF776y5/suun5ftUAlga5HuyZqrobxSk0HtIVAIWchHOvATE2Putt7ngMiNe1zLKdKmd5OSaPC5oEAW5BPDp3JcArur8dC5Xtjjp2NQLOKLBVgbMUqm6sqBM2VYlBfIeUYITkswjuZJNJLKeyxBFpj5gGQKzi8ycFiLXMaRACp6jAZ4X7dxspQf7KArgRJmV4oZQUQH7GWm+Vfn5ahsDkrY1W7a+1VdggIMAviLzGTpme5PKfHNQUogcnOciM6B0jZ3XflmlQeeDNK8SALke4WMGAs25kgZ8CPhl65mYnFYVo3idQcsxwLk+VrJqPcO3igR3yXXuaPX8ns5VgZiUEiCOAisKqlHoWdY6dIjk4IRdOGYBMs+n6xE7y3yme2i93CxdywdgiOwj3/va/wr8qeWgqig55PJeRvcxxT0mi6CaaTvWVOkrNjDN3FOHIu2JGJjxCQliIMCilzXYV6zxzGHy2tGeZ1X/FbEAkLcLac8VdV1pIWRAM8iYnwDCwAoYFhA/Rm1aW0022WSTTTbZZJNNdrzYcQ36zZ3DnrZkBBHATLkq8jzCGSYMb/JUYqkLwKIPiUWjm7wEUDlCG11qrWUfMRPkyQJUtfM8RJEBISSnC4DE8nMo8/vZfDJjLD9KkcHMBKOGoyVT3r7Q59eysarZfCCXgL4iGtnkIIkNOy84b58CfCHLoYbM7gtAyi+msp5jkbGeAHiHECIcOTjpjA0fU7+sZpQh5URUkE/ZY1Yy1RNLrJYAUgZZVQKnBoBrs4BOlpHJ0bL6vBTp15ii7iMAB08NfIPsvCRaHQ1dOxExZA5QwSJwWULMMp/S6/I42hbRKVCJBFx4IiwDg9R9ZIZlEGkjZbeOycyow6POiViDfMq6VKlOQu4TMr8lyrJECuSzjGrN6usyw6RiCKS+CwQSeTAGX9lZGMRxt6wAtbqtuP/jyvjt+tqoATogXytAZg5q9LRljKYyC6dMLs9KMCnIlhl+uT7bMWKB8hovQOwEjA3z72n/1KB3iAKsFc5m8zkhSXCqqlySg+o5gMGObcnJnkAAIDpfSI4RBHigDrGZgcihcQ2YZDvsqdH72zqiDsbxeCiSZTUzobY1Ozonm+xIG3UScAQBP+R59FqPRrJT1hWxmaF3M3R9xKZ4R/uoEs187/KSoGQQoieWMDfsX4qlozg2/F3CTBj15nOoYzqzhlMgQX2O6Zgyr3QLHov6BY9NYYSB4jzgJMtogwQCalAVNbMUTAVZR8VmhmUAFn3Eog/oQsRmF7DV9di/5McyBFy7yCoSarWigc2bu0zrjnIMVFBQRmQBcMaDdxRESuepgWZ2jt2OwbcK+E1vDMfDQipWzcqaQ+rTO+6L5Saa2V5R3ggI0QEzYN74tHaxwTo2yKvVtYdZ9/UxwkVCI+oVaW0VAzSPK0u55gCVgeygrrdUCcM1iM1G6m8F+8aAAW3zdI1a0MO2NTmgDzkvsMhc87pmCOCmcu1aSfcPOt/WgVLm3sigfQfysxQwZfdEetAoYLKurwGknIBF35rnQZCWDZYhl4Jqol7PJoCrNifrQxdjvoZdzvFrAXAL+vM55SCpMaaf7mU0ALE331llK+OmFHy1ssb1fSVtkM7bV+xQ7UfXoHEORJyXM5hm0ZdUjQVRwFm9R3g9CSxT8EEcBFqyKgeJ5H/eg6Vu0XtpVa5CqXMCyeucfiKfzst6lwIuYgoK43G9thzAUQK3Bxu0Ntlkk0022WRHy/7pn/4Jd7jDHY52NSabbLIbqB3XoF/O82UitoG0eR44IIBSjrJn4K8zMpW8UefI61qe0h4XgaOsC0mlyIGKfYyc8slBZFgo5ZGwlhK9QxztwLZR08kBTsTR5PqbFG2eJT2V6RedTxtnlRqKvuUNtYJ9+p4AgNE1GeCLeXMYwUCXAhF9zBtkfW+0n1IbxRyWqz68GgFB3sQpmARglDW20eRIbr0GGjcOJtlI2Hyc4bGTFA5qpwDl9427RR0KlvHnFGB1DTuzdAOfHEx2t+4BLJNMa35fwD3JtUjNLL1Gyr+Y+886DbTug3YFAzxEMTH+kmNQouV7x06AVgDrVbnnbF4+oARiU34/X/dFBpLSa6DoQwv2JQeWZfWtcpqIESQ6PwBMUxmPGC4cWelNYYdgyBSr21WvdR0f1Bmh7Ds1zYPnnUiM0tBJUTP0tBwF+zgSOwPrevycL3Po/HVESYIYKB02fF5U/m/APn1fgfJUT2GMpnoK9mfPJzufVjjCYh6bBw4j/W0AQBHRuULaLzqA+o5ZFCbfke1Fqo819vpg7FC/v8rRfRQBvzDi6F93+ZNdT00dsYndFxFRjafK6KPAzDsgBRLBzyQ4aGRd4NhpTURwzvH35X5O82TtRDZSolb+k1zDTKhoHORkRi2ZcxPjvr43txuLKnaSzq8k0tuAGW90rva+YPfp/NzLuknH86UGnEmusGWfFSQ6014bjV0ryDNTqrfpPCrG5O3k79IaqAAkzBw7lnevPtpOwF/x4ZBVrWvZIgDK9oGAHp5aUdbQPIURe2e+mAv1nDTQyMm8ZvP56dxKzigzABmMiDGrBcSQwT+z5ogadFUHzYkyhq4VRi79nW0MZA0AoUvgu5O2s+Wn9e2qa9qsocisc1knM5R9E80iXatlXvcxcnAPEazUdgForrpmtrmWUgAbUNyrNrBteN45p9+YqoMCf1LVXGbMQVYx6iMirlBjSMczQW9jxkEHw/3coE9Gf7wieC9yvjvvdMylwdK8rg3vVWSRlv6PxTnWOUQBXudF0PgeYhCxdojrG3OPRwAUPN9vsUxbYc+H76XxDjlagN+0tppssskmm+xQ7Jd+6Zfw6le/GqeffvrRrspkk012A7TjGvSbeYe5N1IjEcNNiHEwaE6VzY5ZfVtdwDLwexHsCNiSHCtbHedbUfklBQrTRs/FBPyFGDkNG0WgB1rPkZQ+rdt5Ax1iRAiHuEsRMCdFBUskcowBVlrKfh/kEIwDrXBQkANUvrNi+jHAB/R9luxMzxIlqqCfZSKpk0ojRm0wrxcvgANHBrcA+sphlRh9aTONgRxkU0lAFs+UgSPNCaeszwLoG2xQRzZWpAxOZNkfUMFwRKAMzCRWGDP+EojiCN5zX5HP4F9iBDgPNC3n9mtajnjVOlhG38acAb/5XtDGHtBsDprvLfPIiKSUOp5Ccl6k/T47IYijd11kcJLg0HqgD27Qd2PMgFQ9ItNnOT9iDcBaac4agC1fGwCQkJx8AHL+p4NhHWgXBnVEO2GIucIhZBm/tdMsRO74uA0jb4yNp20eiu+ZsoOCaFH6g0y5cv+Y49T3l+2fHKkeU38ASGpPiilbyWNu35LRB5QR4hboq0FCAKBo2AUO7PRDzu1nGYJO+pf7WOphnY8a/W5y+gEonV19lxz46piNWo6OZcBqFu3hAneVs3/0eyPgB9e/vGhG5UAnm+wYN+o2mfnmszoAQa97Zo/Eds73LpBY1SrpudXHLAWuQEuMhaOdRxOHmZ8BfcfHEEaT5tCj0BXOfpDI/XoF+8L2AIOCNxUjPlkNcNWAiS0nhiLYigETGYNUUUEl051Pkuk9HPoga8nAbL+trsdmH7DZ9djqAhZ9wP4Rph8ggW2Oc0f3YJUJXm/yWNMjJslzU2FZm9bjUQ7oUFn5pC4R+sy+7pcFAHjQlkDikGVcR9hakVzq48gTByhW61mR+KSecx3P5rN8GHIpP2KMBMtU0nmuDjQCMtADMALUEzNPXSNSn8IyRe9BoZF1dsX2lHNKoK5VyJB9hq4PbO5bDq8SudZYyl/nhhkqGCSwPWrQXwaw/ci1DAmUsszNAate5zggrZH4evYZVNdu0GDIkP/Xa48opkAmDdgCqnMz99TKgC3nDB1Pga3cvilgDpB7UeMHWTJdc/qpCIEF5WuWYm5XAwLHrFwSdO1f9YOnXEVPOfBN17fOBEk5ZDbcAHi115IN2DMBCioNmzshMOgbQ+7zmilqbQQ4DBEmECPvhS2LkfuNUy6Q4wA4inXhFcB4iOusQfqC5QKYbQzbZgdTYHUV+DrZZJNNNtlkx4ptbm7iUY96FN773vfC+xUR9ZNNNtkxYZdffjluetObHu1qrNWOa9BvEOWXZGIMI0Y33n3Eoo+y0WEH+qLnKMetLiSQY9lHbPYByz5gqwvJ+V5KVZbAn1pywpsgWReQ8qpo8vcImEhL3rT2kXNUedewQxsA+RmzXFTmLvRlPpwqYnpsA2kZfhx9TmVEMrHsadfHxFgbA/u2Yx1ZxlGhHGRYQyFGeM3/BkqfMZuPv685WBSosBKdjaci15sCfINcb2HJDobFiDxVbQNHVELs0Aio0EgbLYnQEbfJEprPT87feBPUiRCL/nAC5DVw7Qwx9KCNOf9gFhjgC4GdRuDIdzQtOxJnc6Bp4fbs49ezDcRmzg7FpuXXIiUW/Qw9XGJojkWZK0PLg8R3EtE4Ss7BFf6Rstn0mUpWprLFUs5KKpl+xWtYWSgTFW1YukUuS9tnFnAZsSJaXMuFE4fUMCK8jgK34B//n3MnWUdR0HwoBuwLAoQBFeMv1iy72rGXf6OHVgbDss9yuQm4lMKdsEu96QcLwo61e83mA7KzahUjNsgxojoshTEaYxlxnu7bBMrLvWmcjtvl9bEOR4rOODxDZgPFPssNciOsLG9lclHzm2idZ4ZplEBF9tbyvZzqbs56VT2CBGdEAui6n2pVZvlIlj/Z9dRCn9cbvgLdLHimYIGsWaKfoY/AwgTJpLEykTxk3Hca8EDlvSNzJtl1joJ/AN9PfQcaud8GZnKE0ggrfhBIMuJ0TnUhkSBU4M/m8/M+S6jrGk6CqlIwVeQ2WfaZ5bcUEHDR8YNBv8zosQCgMrd5vKcCfOkDBsCfMwEYOieQAcN4nUVAWMr4vEiAX5KCXOWEHwl4KIMfRj7X/8mAOjHneR2T9OM8ijzuU79ggFhMAReeh4eO/1G2euS1tq5hHQG9Xato+S7n06be5fpVYI0Nnkv/V0FA6djg9f8gmggYMPBsmxNfbDwX9twWUQLKBu0bqzx+NYit/Wq/D2fmXFmJVGXn/RS/WGqAW4QESDH4x9egruvc4FiD/4sOs2OMS2CfjjerpD63Mwv+lUCmfC5jU4gYzVOcqiZ91vr8f0oxYIBlXj9V/ZvWLzvsR5wrrivA7EliELbnyG/r9TGA6BuQn6W9VJJljTGpuCwlEEEl/b0s/lza74pc6gpMbdv8jCS5TS17N/RJ8lOBPw12VLafHcMjxvcjNk3DIH/hdWTT2mqy66vFGHHFFVfgJje5ySH97m/+5m/w6U9/GovFAltbW9ja2sJiscDtbnc7POIRjzhCtZ3surKrr74a//RP/4Svfe1r+MEf/MFdlXXllVfida97Hc444wzc6U53wrd+67feIECwk08+GW9961tx4YUX4oILLthVWVdeeSVOOukkNM36fAuqwHJ9twMHDmC5XOKkk0462lU5buyaa67B1tYWbnzjGx/tqlwntlwu8exnPxsXXXTR0a7KWu24Bv0IZj9iwL4CrIo5B9ZCnCxbfUAfOI+fsvmWfY54XPayGYoqbRlSXhWEaMABgvNDKY6gFXEM6vUxohWmW41UxqiOdK0vSw4RwOww10gkrjjDIwHwRTRwDfIByECfOiXIZXbMNmBfjEbCM+Y2UIBL21VBCbVV+fjq/3Vf79Mm2klblgwxGmHy8Sab0mt1Fmn+oQFzqNpEJqscjIPXrgH6pThXWPa09TNwHh0538AyOADgRvKKBXHUpD5JzkcCOY/YtIbZ56BJ7ZMjsWlZLmxjDz/P5sDGHu67ZoP7spknhl+U/DQ9+wbS9ZSAaHB+SiIGaSKibJZLsC/VfRsrwSKMssRWSaxaVp+Vo8qMC0VSt4n4HQB61XvWxHmlbe2IRjfUxfmb8wTKHDD6XX3PAn76nt4r9VH098qX06j0VAdkoM+y+nRcUucMl8UO3743zhpzGab8ptU9VQN9NaPPtpPWUb+rx1Xwr2D+iWl/ZzYupYj3wum3Xf+OmJX0A7nE5gS53OAHXZjL44Jz43J0BghU4IOiiebACsd2ASCGJE9bMwAnm+yYtr4EBwqQTM3Ol7L+6mOWrtT1BIDEWFYQitc3hOBygMxozkBlJMl9z2xAAUAC8uernOnBKCX4kICD4lh1EJU5Z7u+TO8L8Fd/FyKNXuTYdU1iESm7RnPmLoOsOUVhQgE/XldxXSyY5WUCscDfmOmYDygbiYqcyMpCSuuoLgdjFLnHQpdlIPu+OOcCzjoYIKYGMQCAPPelBf+AMvgCMvaHjsEsCcSagcE6DeDx1VxaHLpqpj7ExOSPUJlK/l7Q+TKtn3OdSAGMxPSjcl1t9iAW8NN6aT1GtgGlrQBaGZyT673voHmL0+fmOt6OXVaw6rWtD4JhFcxeCjCBU47bMFEYHa8fCryozuVXH6sGjOuHBQRTfczPZe0ZYQKj0j4Ng9/oOahiRxy5eDinO6/zbRqCnGucAxZ5rZU5/SRrLQazVgQU1ICtDRCsgL9VbTYGEKc9hrSjM2zBqMAmBPDrGfBbSt+EGEXm36VUF2N2yCsZ52Uv62EiP9hC4CBI+151nnWf2XpoYNlkk91Q7etf/zo+8IEP4KyzzsJtbnObXTvtiQjPetaz8PCHPxwPfvCDD/p3d77znfGOd7wDL3rRi7BYLNL7F110Efbv34+9e/fuql5qOlavC5z4xje+gec973n4yle+giuvvBIA8JKXvARnnXXWYZX3hje8AW9+85vxzW9+E1dddRVud7vb4eUvfzlufvObr6W+15V97nOfw2//9m/j0ksvxT/+4z/iP/7jPwAAz33uc3H22WfjhBNOOOyyb3SjG+HMM8/Ewx72MFx77bWYzWa4z33ugze96U3HJKjw6U9/Gj/3cz+He9zjHrjnPe+Je97znofVnwoyveAFL8B97nMf3O9+9zuo3/V9j8985jO45JJL8JGPfASXXHIJbnOb2+DP/uzPDrkOY7a5uYnXvOY1uOqqq/DsZz97LWUey7Z//348+9nPxqtf/eqjXZUjbiEETqOxS9u3bx9++Id/GL/4i7+Ic845Zw01O7btbW9729rur2PJjmvQrwsRi5AXAUuR6VSHUx9jArW6ENEFYLPrsX8ZEEJm9AUB+kpmTnauKyPJCVXGg9lqCnopsHcoMiMaWRoQQZF4o6rsHY0aBjg6VBkmPm/8UlXt5nCFI0IfpWRlLIC+GuCLMbPFNCq6ZiDZdlIbG1vsJjmBEQ4gZCafykDW0p2NSuhAnFDdIoF7pJHpFuwLpRTTKDhatVtyBJr303ebGUiANmpmaFyDznGOniZk6TLtemU2cRR0FRXuONcRNpykUekZ3KuBSedYxtN5lr1xBuBTsM/5xO6L5LAMSNJgdZ9qNDP3gV43hGbF5epWbOvt5T0GEtk2sO8VwCAqsM8+V23AqHl2jsTB59gW1C2u/5jBSor5uWb9BcQE4o7l87R5+/Qe4e+W9whQ3idjZiPCx4A+rYOCfcPggsiR5gqIe76HZt6l+6t13PatYf2ROfZYn46ZtrInSs4XTxl81LIU7FWgviHIvTqUFrOszWI8k+dRoKxmQKA/OKez/pyodLgFDActOzbY38I4oOzrGrkFO41Te7sOsd086Dquy8KIVOC6y5/s+mmxW+S1RspZ5hBDgOYDzjKGDjGUwQ+LkMdJxQJ4DLO5ojIskJh0rsmAXHGfdgU4ke790A1kg7nAPI6QOtP7hh3Pdn1U2YC9onLqklc01VzndzteWZafnzH7yzXolyHnRg6sILHsQ2L87V/0wvRjh3jN7gMyK17f89Vg7V0e0xXQa/Xh+XmjcWgcofG8rmodwSOwfGa3CVpuAf2C11XLBRB6hIKpozQnn5mNqeHMnFu9VwB+vinfg7RjCIixLecIA8pS6IBuC4gBvplhj5+hdyK7HqmQ3NZgpxroGVvfZ6UCfm5cTCwu7xwz//S6S0B4yfKuAb/DshUAXfF5AqsjS6L2oWS7h2EZ2+bOjUaC1VoINraFDy/3uq4liyCpkAG3SPzsAMARGlt+HGe6De5DAapSsKLeWxZUtV9HDnSr21+BLn0N5LWwXccF8z+Q10jM7GO5yyDBm0ifkTD9BHxyORd1Wv8b5YpRkA7IubhHAL8iyGIEQKzHKx0/+b5q0pgVTL8ltnHI4J9a63XNKWOPJ2EHHuTe1tSfAyFMzvIgLOka+FtRRohlQK0GyeXAMh7Hmm0R9CNj09pqsmPFTj31VFx22WV42MMehve97334/u///l2X+aAHPQjPeMYzcPbZZx80G2ZjYwPPe97z8BM/8RP4mZ/5Gbzvfe/Dvn378OQnPxlPfepTcckll+Bud7vbruv2ile8AmecccbaHM8nn3wy3vve9+Izn/kMvvu7vxtvectbcNvb3vawy9vY2MA73vEOAMDP/uzP4iUveQnm8/khl/Oe97wHt7jFLXCrW90Kp5xyymHV5bOf/Sy++tWv4p73vCfe8pa3YHNzE+eff/5B/fbbvu3b8PM///N46Utfig9/+MPp/Re84AV4wQtegJvf/OZ43/vehzvd6U6HVbcHPOAB+Mu//Es85CEPwde+9jXs3bt3bYDfYrHAF7/4RSwWi8Oun7UvfvGLuOSSS/D+978/vfegBz0IL3/5y/Ft3/ZtB13OySefjLvc5S4455xzcJvb3OagfnPVVVfhwgsvxEUXXYSrrroqvf///t//w61vfWs88pGPxEtf+tKDroM1Bfv+23/7b/iP//gP3OlOd8JP//RP70rSMMaIt7zlLXjlK1+J//yf/zN+8zd/87DLsnbxxRfjIx/5CJ7+9Kfvuiwiwic/+cm1yTfGGPG5z30Ot7/97XddFgB89atfxU1vetNdBzZ88YtfxLOe9Sz87u/+7q5AeoDb7Id/+IdxwQUX4D73uQ9OPPHEXZUHMJvuqquuWst9/8///M8IIeCOd7zjrssCuO1OP/10XH311Ws5109/+tP467/+azz2sY9dQ+0O33YP/x5F0/woy8BOpoVIeC4kcnqr09cRB5YBV291uGarxzVbHa5edNi/7OV7AVt9ELmTMGDT6MMTGTkXQutdYrK0zuayUglKk+PMUcpZZy1G8wBvUPvIG8CojjbneQOcnG/qXJoh+pYfzQZiu4HYzvnhZ5JTpkEHh0VAyoGiuWWyjKeJQg8qXVICfrrR3g7ws5Yk/kxbNY7yBlmAvdY4oWaeHxsNYeYIG54wowAfFqDFfrjFAdDyAGhxDWixH7R1Lf+/3A9a7odbHgA2r0U8cA3ifn6EA9emR9zcD2wdALYOsIRVl51c1C9ACUhciMRVzyCj/r/cBPULNITkNJt5kRylzEIkcRrxRRSSYygSZcfgjEFEN9/H+fokV5/beyLc3pNA873AfB9iu1ce0rfNXPqaX/dwKS/lKsDPGlF+OLBkqk/XNj8ah/TQPpn5/PnM9Ffrcxvo58qKVfktvUcU8NvOYmorefgmP5pZfiirwr7XzPh78lkdLW330bYe9bxaZrQ5iPe32Z8PpJ6QpYG2AwT5e9uX6xKDA3l88i4xKluXnb9ZapP7zEv/KkitEqv2UR4vv7bXUJJyNeWrQ4aABMaPOeRrmcCcu8cEPYwBerVzNHSrHwOWw7jjLR3LOHCzLB/naNW5Jc81nIsrvSePZchS0stgQJPJJjveTBnqKfdvBrfUAa/rBbuWsA51NdqJI1Ld6wMwoHZ6a66yIDnolgug6/ixzMFBxfcMk40EMKQ0NqxmH42ycOoAIhsspPMXMlgS1ZluA6yqOUAdyds5lLcD/PJ8m9l9rXMy5ku+Yx2jJSCjbB8Nouo5KCmE9Ij63gqLdXuYNqwDrnR9m+ZqnyVR01rXBL5pv1PomeUWusLxn4LGdG1jWJqOcvtrm6dHEBBErt2uWv/2EWZeGul/EzBij0dUM79WNdo27LpVAKD9nT6HFXPcdoFVuzDdE4SIvGcIGXgL8p2xdtq2PvW8b38rQNDKgJsVlu49xFHArzZn1jaJzWf2fno/aToCL32t191h2U6R2NVah/SeVTBaHkn9RJi7Y6w5bj+V9SzHIQ0y27Yq+rzdGk2MvM/npvlPLUu6Pu8d2kEDqRIwL+PdZJPdkO1JT3oSPvrRj+I+97nPWsp76EMfio9+9KOHJX93hzvcAX/xF3+BP/iDP8C5556Lyy+/HG94wxtw5zvfedf1+qu/+iv8wi/8An75l38Z11577a7LA3iufvrTn46nPOUp+OAHP7grwA8A7ne/++GBD3wg3vKWt+AVr3jFYQF+APCud70Ld73rXfGQhzwE73nPew759+9973txr3vdC7/5m7+J2972tjj//PPx2c9+9pDKuP3tb49Xv/rV+MIXvoBnP/vZOOWUU3DxxRfjve99L57//OfjW77lWw65Xtbufve744Mf/CC+67u+C7/1W7+1q7LUtra28KhHPQp3vOMd8eY3v3ktZd7+9rfHjW50I5xwwgk4//zz8eEPfxh/+qd/ekiAHwD80i/9Ev7u7/4OL37xi3G7293uoH5z0kkn4Td/8zdx2WWX4Q/+4A9w3/veFwDwIz/yI/jVX/1V3P/+9z/k81ksFnj5y1+OM844A//1v/7XxOLc3NzE1tbWIZdnjYjwyEc+Er/927+Nhz70obsqy9q3fuu34p73vOdayjr11FPx8Y9/fG352i677DI85SlPWUtZAPCOd7xj14DfFVdcgR/4gR/A61//erzvfe9bS70e97jH4cMf/vBaQDCA2+21r33tWsr6kz/5E3zv934vvvSlL62lvF/+5V/GRz7ykbWd69VXX41PfOITa+uLw7Xj2hO5v4sg0VuMyemE5JBlaU52rm8KuLfZB1yz1SWni0oeLWWj0zpCjzKSujWvnSNo/GQIHAHpRjKda7mtdxkMIct6y9/VnBIUmT2YomSdYesBxQaueF8cyr3ZEEdh89mo5zoKWttA285G6gOavw/yOssObmc1q0/PlUGf1Tn6LGjWKKtv2TEwZ1l9/ZJfhw5YbCF2S4RuwU4piU5fyf6SCHU4D2pajkJ1jhl1+hmQ2QChg0p9ou+AZpZeN75JUeC9yzKHBKT8gmlTDmR2gb4mBxLmZroU1LHocj6g6Gf8v7L8XIOemsRoTY6soGwxuT7iOEg1lrvN9pNGOOt8U2/B64lo1X6btvmOPd+aLVE8o4zotiyrlcc0x1KGCerIcKLE4OXLOV/7ScZyxVHGQFQHpHEgmAqmMWSFf2s7BxF/JnlAHQGhlMy1YF/N8EssDuL7y5lny7wcnlspSWbzGHKdqnM3dVWgMV1T9h6opDwjOZBvWPbSKxNA7j0qHUQDBsnhODErZzRhhEVoHPXZ4ZvHz1Xs2fHrgeBdFIcU5z1cxOM6vmayG5opw1rvCz9DiIBvDNgn6y2VA7fsZH7m93V5NDZXOJghe4ypNyZrpzKQAlDF5QJhsSlfzxLZ0czv1M4MiNIAMRay6EXVyKWJj6cHAumwQy4f39SzlFPPwQu9BUVizqOlDBvN5bfo+sTUB4bAXm05b2sO+MgMP2Yfzb3DRsMAxbzxEsST16OkygndJqiTddVik9dT3TKDfuqsl3USQg94M17XgGjVNrlNXRmMU0gPGiA3BsTeSH3qcy3d7hs0AkD3opYRI+B0DRsoS2Mbhj4fL0EXhex1kDVDcNrOADnKMok0ZFdZU+UAnrIlP5p+Zr5je7cobzumn/2NSn3WTL2x+bFg2q0IdilOQvp75PxqOcyCRefADYiInvj/XuQid7QC5Mu5x+FKJmXex+SJV9dudiq2dR/bs1iGX6o/dM2Y93TkCN75AQivgWwaxMggr+TMPNhz3c5i1bf22pd7IwUwmHKjnHskB4QWnGDep3OMRuFFU1dY5nWt8iFkv2H1gOGYSVlKWXOoxtCBmhl/v+OhFyGkMZqcZxZgHQBWBUwAlpVrgwzB+8XJJruB293vfve1lTWbzXb+0jZGRHjMYx6DRzziEdizZ89acvpdeeWVePOb34zXv/71OOecc7Bv375dl6n2yEc+Eo95zGPWUtaNbnQjvOUtb8HJJ5+8q3K++tWvou97/Nu//Rv+03/6T4f024suughPecpT0Pc9/uzP/gzPfe5z8eQnP/mwQY7TTjsNv/Zrv4anP/3puOqqq3DLW97yoKUpd7I73vGOePe73702lt/GxgYe/OAH421ve9va8gTe/OY3x4te9CI84hGP2BVj6vTTTz/s3+7duxePecxj8JjHPAaf//zn8dd//deHfV/NZjM8/vGPx/3vf3984QtfSI8vfelLa5PNXQer19qtb31r3PrWt15LWevOW3j66acfNttyzA6WjbvKYox429vehmc+85m4733vi1vd6lZrqde6cyDe+ta3xhOf+MS1lPW3f/u3ePOb34xb3vKWaylv3Xave90L97rXvY52NY5v0G+zC3BLdUxnRtpm12f5EsnVt2kYfVt9ljRRx4m+Vjmq9JmwZ/j/8viBWNazGdnmDUA/71KEKDNpjAPf5lVj/z4oAj0AJ5snPkcxdTQhR9qWEjWrQb7aWW3BvlVSnkApszJ2rulcNOJcmD4WDLC5+lr5fOZzlHbr2AFCy80UvZ/Avm6LnW39IoF9cbHJEenLBWK3TIBfHY1O4vyDOq2cg+aYoKZFdAFwPQBe6PKmOQhAII4+ckC3YIlVgH/fd4Bv2KlhN/LBbM5rRyY5RAcjwZNBL3VSZjk1AfscO1178G9szkUL9lmgbwyMsK/LPHtDQKhm5h3qFLkdNFx8VjlAapDOOnhqB429Ji2ApudR2yrXgJ6n/Xw0TyNikYePSJx66XiaS8U4pCLgPFX9MQK4geBCRKAIp+cXCC3EaVWzOyQCvXXMoCUFzYkB9SHoNyKvWhyfnV1pnBBnqHXmWyskPVFeMwr4DRyXlukBsHTuiBNyTHZvzGjMwTlyvGLctCAzUDoVhUmgY6JlPus9lwGOPFZye/Az57GM8IEBv1bkDJeHmndwDWZBhCNV/mTXU3MlKJOZ/2whKjsKhRNZ1zKeOHjKzsQaaBIjUrBPYXIfc06q8THASnkq4BcXm2n+j70BpJqWHcpO8kY1M6CV+R1gpzTr+8pxh6ytpB1oxnT9HtnfWEaTYfwFy36KRq5Zg6jMPWqfvaPR+ysxi9I6io/f+qxGoeN/63ndudEwCNi4zPBuHIGWomDQLZO6QQL8FETVXH7OG0d/azokByrl/8fQ3dKhn4Cd1Oae+5RYyr4Agk3f65qJQp8BDkgOPgA9+ALTtWcivaX2HwZqIMa0NuXJl5E/J1FEHgp+uNVzjV6T5ET2OgN/tdspratqIO8gWGu7MgWF0n1m1q0GtCl+IsFHRHwLOPB9DZTrJb1FiFhuVcGaqMeVNXREGK5+6ntIza0Ak80xdzINqIwxFo6e7RiCFvgL4P4iM1ZpMXlfQwL86e+HnGY+DxnXKqBuYCtA4GLsi5GBQP1OWtvQ+NprrG0O4nrbkURHDpFCBvp0f6P1cg3IRwAC/AWfWMTkPI/RrSp3eGEAS9BlRLqPc+BIDhRtNHChX6ys3pGyaW012WQ72549e9ZW1o1udCO84hWvWFt51jY2NtZa3m4BPwC4/PLL0bYt3vrWt+K00047qN/0fY9f+ZVfSaw57z1uectb4iY3uclaWE0nnnji2pgv1tadx+8JT3gC3vjGN64llxkAnHDCCXjc4x63lrLWYWeccQbOOOOMXZVxwgkn4M53vvNaGLiT4bBzgI7ZOvKy/vRP//SaanNkbR1jJQBccMEFuMMd7rCWsq7PdlyDflt9hOt4c2MlJzVnn0ZTL0XuU//f7EJatM8aJxs83fADuulvxVmiTD9l16jZfFu1td4Jey1LwagTJoGM1W+IslMtSiQ6Ow/yNy0oUgJ2BvipQD6gjHS2IMoqsA9avmnXMasBv+3YfTZvn4IQid1HEMdTye6jbsmvu01Q6BG2DrBDqlsibm0ywJei0nvE5bJsU+8RHbseqZnxa2X2AYjBxLYqWOg9KEZ2CIoDKmoEuv3fNfybwnmlHTjMX5IYTrHikVVsAQb6WNISfiZAX3ZYWWaFBfsyGFZWpZZmVHkcBfyS7JcAsY4MoFKxtAqH0cg56Guq38MQCLT1jNV7YyC2fj5+Kcbi/AB1ro2Ad1KAAn0OwvyjDKTrz+yxHCgBf3pmGs2vYL0nIBrAkKoX0bSCuob03iNHCfwDgCCeDs0jY/OG6r1mnU4W6FP27My7DLgnNsXQIUSugYdco2DHO4ICmpTarc4FaAE/lQgtpTdXgH96ze/EPFjBeCikq8aYvVo5GOd8DfhVx1SnfI6C5/tLZZFVAjmCQbx6TEzyqhBQw1yri+20Wieb7Bi0SJTGeQX27NhnAb8uiGQlYgKYiAjKgVcARU3Hp9HNjYIDY7dMYoPFFOwTuyXi1oE0DsRumdh7GtCT2PwCFiUmv2/5d76aqwywBweZ98sxpAgmkPYaMFVk7orKGg4q7YzUrmPPY2ZBUmX4KbtvCPhJkJkw//LcIEFWoqYAUU2g0LFSgrZn36f1UASAvgdtzKUL+nJ+rwGjsTG2YgMmJheQWd0hCCjlEvMvzSdF5yjoq2AK/+8U1BIksAb8Uh/Ech6WCvK1GngfoAGABFXzQNGvZOZRcmZ+AUDkksrAjqDJEZLfHJRZAZYHG1gzZqT3rSoBRAVYmdWe1lDSdpGQcw9qXbR+I/UYy72tZdYMvcFv4/B//b6rgL86iEzPTWOsQsz7PRvIpes23dc4WSPpnqewCuyOCGVbGCO7L9D+qgG/kdzI9ncUI2IITO5bcU2ptOfBmq7vhh9IkEO0YL5LCHBaS8peC5hxrljnGPwTFrbN3TgG8DqCMAj5/0LKV8asySabbLLrk331q1/Fy172skOSNPzUpz6Fu93tbvjABz6A29zmNjj99NPXxnY7noyIcNFFF+FP//RPj3ZVJptssuvAJsDv4Oy4Bv2u2eoRZ30CPywgpwDfNYs+yZgw86/c7PQhwvtxmSR1oGw0LuXlU1YUkEEXAMUmyhl5PY3A1ihrBcHUUW9zYKkDXy05hlYAIdtJztnNsQWDLDsFGIJ8el583O03hjUAoVGZCvYpEKB5v5Td10pOOAdm+lEMoG4BdAvJo7fJr42UZ9zcP3DwxYUB/XqTh0aMk8cLo8/77KwC+DfyHTibY0KkcGogTzfYAcByi0/UN8ON6naAhTKcTFQ85H91gkUvDD9imcFVEp51X3Nflv01kOKEyf9WXXsJFBJwU1mKq3KgWadf4ewEhhHaI2y+EsSrQegStLZys/y9IcA5ADZFaslRLCRLa+eFAn+eMhtPHdVRTtTeL8oAVEDOgnzbLa3zeVLqF61LiFTeq57Sb8ZMpcgskJ6YfgKop75VqVztT70/Khm9qHJLvmEwmxy8z0AYgMo5NsIGtQ5MKyWWnOQBID/aUqMsBxvBXgPPFrxMzq9YMhUCskN5zAxLRcG+DPBFbPU5/+lWx0Ek+5chscftVKL31bzhvD97W4/GEXrH4+rWUYjcnqLRJ9uVVeN3iBFdL3J2yExYzYemt7CnzCpzMTtrEbM020wliR0YfAohgU9jjuMikKDrEEOfGH5RA4Eq8D92y8QmQZvlqqidAU4BpsgSw3q+9hlADB0S4z8EWKnPsbay816ISLnNVHp9LJ9fvyJoQUG+Im+dy2tKBfcU7Et5eSXIbG/rsdE4zpFs8yY7klzIBzhPcbcJLBcIm9cibklAVSfsGedBLkifcJs5l3PUpEAlVVCo2i99r14PCEBQss0koCoGUJTgqNANpDCjXg/yrMFYOZBEg3cyUzuB0nKd1iNXIGH2OQBBcrQhCktfUC1ThyKYpRdw0jfpXJnxlwGr4lJJjbJN0IoF6WomHLkhMGbLs2XoI/ZmvVYBasKwGlzDYo4IEVGAlwgXSUAwWbeMAEIxAlGAwKASn6nMkbyQI9fHTsCkXTLonsWyO/l9s64kBv6A1YAfkPeRpaxleZJWEtaZPZ3ugTSwgNcXPl8fdcCfnntxYiWgB6Bcj9cyt6Z2HES1vZNX833qWlYBal0zcv75cYnhIP1JYOZrUkGRdVSMAUQytmKW70vquB2ayuXgTAqDhvPQw2WQN0vxo1jnNgSRJd4C9bvLQXQ4Nq2tJptssiNp5557Lp70pCcd0m/uete74q53vesRqtHxZbe73e3whCc84WhXY7LJJpvsmLHjGvQLkZ2yYczzj/xW4Zx1AODQusxc8Q7Y8C5tcmxOFM6H4jIg6EpgTh03dt+u39F8W2OMN6Ih6GIlFoESxNHi+2Dy8cnrPjAQMpT5zBHNliF1sCBffU7W9Pyg54MS8NNzJcqghH1O+b+iRpxzfh77miRpPTp2CkaR80IIiN0is/tGAL8dbQwMqAG/EcAqb7Id550hB3L5e9s6Kmpw0DhdQI43vbKRXkpf2j5eBe4CQ8CvOGwBguW8IxqxnRhaNTikfWPOuzgXeeZNf0ySqCnVTPV9Qvme3dfWgJ8CnDX4V4OEanqP6zXHeTkZkGMnTATFHDFsh4oEvun/htlWfB6zU8nm/qvJKnVeFO4DdZLl8UPL8oTinq2tlqaqnU0EzeVUOkWSE13vLes4B4pIe4pNclBSCIgC/nnDWrCAGtm6aFmVI3bYCKvvjUJN1dxj5ZdMmcEeKzfaAPhbZRqVrvdgVAelyXmT2H1ITPEtkY62bB1th9Y7oAtovcMyBACO+ykSusmJM9lxbCrzF8R7TpTnJGVQ9TGCYPPBEuxQGInHSsvUGHVwp+9XMnlyr0eZ63X+Z8BwRNZbckhRcLI+4Gf0Pct0W3BvVeCOfc+BT3oEkBkbcyIE/JBb3yonHK4laU/KsugaLNY6koAzVZUgEwCSA9FSIEjQdVaf8iKndZSOryFwGwLcdm4k0MeMpXGsDW1bGsBqVC0gAb6uDPrQNt0BCFLTwCKVdwxm/VD3gyMCORpljyloJNN3tjGGXloPluscGyBV/3blPFmz4HYC/MzrAZvMqWTqGBhL+bdV2w7ASspBUmS+4NLnO8y7zuX7Jx1kFWfPHvjg+hzI9dO6WXanrrPy/mrk99j+PMaCy3QNtm39bQCDb8aBXvAYOLjGbRuten2Y5olTVDgB+/i94ffS/lE+U1AzgfdBpEt1DWmvOeKUBYU6g/y+yGEuwY/1+SvwZ/fMuk9Ma9zJJptssuuRPe95z1t77rEbmq0z7+Nkk0022fFuxzXot0oxrXWE4ByAgL3wzPBzxABhzBJ1manGDlt9VufIRqOgnx9lsel8XG8eLXCnAF8GXZCYRzZ6cWyzOSZxaNleQ7nHMpK5lufUfaYF97ZzQhXSiIESTpZzyqwG+2pg0+ZiKPL39RXDr7e5/BYc1d8tOPJc8/YkR19fbJ7JVRGuib1kotA1t1/6jvzvHLMBiZiJt5MDK4r8GEWuUwJyzHHMbwe5fyzYV7H7epEVDDGiO0R2X2oLypI8eu0xwMzXn8qqeoI4/7rUD1lCLTP+ABTgSnI8GWcfyQYeJE4A14yy/hRESpcW8jVes1f7OLy2VzFWk7OZ2PnsAoNtyqplBw2Df3C5ffQqz1JO8r680LrV4CAA+MrdU5eZWcHmN+bY6oTc3m2k41QGC3UcsdKsma25ZPA2ZCCdJDp8VG4TyH0puSSpE5lZcpy3Ur9T3wsDtt0KR1TlhKwtkit/G0KWeLPR+ANwcXj9rwT+xpzOrkljZWb7aR4/ZjNtdj32L/uK6ZfHUXW8hwigcQixZwejBJgAOCqgXx+ObMT4UUhTONl1ZGkulftF5/BO9MV1HupDxCIFD5kxNOa5RgM3ECFyk5nhR8ryM/J1ypjS4xdM3r7P+Xu7pUh8yvpA5T17Af/aFggl8786ySF4pe/J8eCEneMarqtvEPuuBFDsnF/P/4dolt1nH7PGiVRzVpBonSvkPFVOXtetGw2vZWdemX7AzAHULZgh0/E6SyXTlTGZ2hdgdk5wGJ2h6nWMa8bniLH2tusj/b4AHtzllNZSo+Bg0V/8HKp1Ao9/PPZaWeZa7cO5CIBz4/YBgIusjrjdnByH8u08UfW57uY8i/WfnSvr+Xg7wA8V2OpK4JTLs3XB4LoF+TRnRqKhWoV92HnXPDviYCo4lvO0MZdk9kWFpHYTBgABAABJREFUFWvpfvx9c+/Vba3fY1CP0qZL90Vk6uiIjxBjZtpGxBQANjZt2T1YWsPZ4AWU7+lxtE10TWf3TBFI10F0APkZM4eFDbcKtBvkKg6hvN5qsNgGIBwkQOoJCMr4AyGEmMZ4HWN0b2eNAxk4M6PKtkbXgJymPpgBocv5/WIAgpd6l/ddWodpsKNvgYZZfyqNm1mXZr/SL/IY1h8dec9pbTXZZJMdSZvNZjt/abLJJptssskO0o5r0G/uCXPvBmomzLTz6KNLQMJSVtFjoB//hjcU7Chhp0mOoh4H72ob2+taWUGq3vM0BB50G1GAIgYMUYCvF0ZKQM6nY6OY1bHRRwv8SXkhf1Zb2SZZto5/hyQtZQG/sXYaJLmHyHsawC+x+oxkTWKXmah+jeQfdLKCegAwpluegD7PDj/5DUnyePs+NbPCoVIAVdtZxUpK2UYybWy0jOTcMGyjPubI8sxyG2afAZBABiJKwN/BsMH02YJEVDEt9f+C8WfOlY8l9Q5O2HMuRZRDAcAYB+AfibSp5qlUNoTmjovagsX1X0Zsr8o/CTk3zSej12GSoiI+EDMAASWiWDBNb4kixx8kIlqOMXbvp3apytPvKjion+uxCHEUgMz9WDqcbDABUObRo8TW6Erw1jjW071m+9M63lyXgD6otF3fjEq4FrYqr1713cKBWzMLjPMX3jFwCSQH5raR7bWjtC63fpg6BdPnuf/FUThyas4RwjY5+uo8kqsYnJNNdkybmZtskJOqCgCa149f63hX34UEWUd4DXRCnuuBPM8YdtnAkgNc8swlhn/ITD+Th46/Oy5zx8E9NvimyWyTBHgEDv6IBIouO7IB/g0q5hTZMW67AI6heQlQK3P2EbxzmDUM4mnePmXwWcDPsvg2God545LspzdrsNaRBFItsopC6NhprtLpCvjZPgiVBLo5Xz3Xeoxd2QZ2DtlpbWUBsBFgozgGuUINQeVT9dGZdbFdEzsiIBACRURZfwBU4xN5zhegrsirVoF2qnwA9IhR1qj1vBmGa6qVbVKD0wAHilXfj/pvACJVLCsB/hBCZtU7c/3rMfQ9LbNgRELyjLOKAkn+PpL52QaXkaxL7JqU758gDN5DA8UVbLSKDj24/1LwjaMieMxKDlec4VGWn5XstOej527rkttkRDmiKFTWGspedbyuivLZyiApNc0bWQPM9TGoBIDT+/VXNTCTRLVGivQqGSzjxlgu7FQlAC4a2Vbw9Uh9x9edq2RMXRjK8urvZOwFOaCZ5cAB7TcTmNvIvEEanGj3LZNNNtlkk0022WSTTTbZqB3XoN+scdg7y84I6/i3/4+ZAiAWNODcKcDM5RwpBaNmBCSw2yrdoIzlVqs3jRYEtBbBG1tHwNIAG0nm0bD7+sCO6U6iDpchICSWCtfBgp3AEPiz5hxhiRzxybu73EoWEOQ2MZtIlPnFDip33AjQRKFPbJ7YGwkvmAh+rYPzkiPPl84+fU++o44WamaG2WdAQM35J5vyOmK9iPxPHTUus1OAf+R40+7NVbIiSl3ZEApuJXbEQdgY2Adg9Pq10mpj0qoDOUhl/FnnlnHaWYZfjAEUAkeVy3dIHUjR5dfiPPX6vyCYgTK+M3ZvWMBP22ZpQmKVnWWZqDE5qARk8+ysIsQMxCUnTz4wg6lacg5lV6eSebeorzqBUl/U7yGPB30Q2c4Qi0AAayvHjQSQh3FJVo0Grz/T91ex/YKCuT0DgE6chvV1axP5rHBU1jZgu676rdZPnGWRlFV7cI7ClfJnI3VMQRYHVXJplinuXDnGqzFoP3QiXxc25Z2Z7LDNgjwyXuudFGNe44TIQUdNdfGXjBgDZsm4jLAsmU5Wom1HR3ifnpPMp/k/Wd9nWUq0cioq4S1jkGU6aa5d5/LZ9l2WS6QsXYfAc/xoLtIVpsBdCoZS9oowzErQzw1y+elaqi6DgUCXAD5VrfAOaU2r5dBS5va+Y8ZM6BGEMZmUFJaZOTPq+rdtqO2VAFRfjr31mkG+v0qmM10Tgw8canDNHlsZSMo+ZUUMpGcF/JYhcHBViHBOACMHuAg4uY5XdqmdO2N53SZZRht8QpkjlqTPxyg82wB/A/nNQR7Esh3TNBldIfOZpSXLPilyMlJZ9tjwroFkqnwQkAOaChUTKu/7dMw0pw+Bv+JcV5iugQJ4DRfT0ow0HSNggyKjYeStwqGrfZwz68JVyg22PPudsWMU7FXIOKN7nLouY2Pfocp4uur6SABa7iNPDHS3zhWKBV5wXw3etBZiZKBXgvR0r5pyV/qG99QxcLCEqX8B+Gm9qjEjXYv2VCj3uZUkhrL9FPy7jm1aW0022WSTTTbZZJNNdrzYcQ367W0dNrwAGIbxBJQRgtaZXjtms0wicj4sV8kfqgylkTq0Toja8TMW5TyQnLObMrORJ9cUYCTnmEKKXO36HMWq0kVbXUhAH0vP1Uw/lffM9az3FOq4BpCixp0jtJEQiLDRaPtRwebTNtP/6xyFif0HSC6ZilnWL7bPO2bb0DPIRw2A4BFDDxJn3uC76rS0bD+fwT4S4E9ZfzmnhAXkDHgASOS/OPpWObS2YRwNrJIAtZHMutHVyGrrYNGuI6N7WEsQOdM/ruqr+pqmfinAUNUvFesSyMArKbPSRqArWNovRfZH2tps6KmrnIO+QeMaNN6hY/8bnDpylGbnIvpAcMTRxRDhLXsJW/laOI3mZwdoH8Rp4zKIbr2ZFohT88ggrAKAhMiSRnG1Y7B2FNVgnjI0laXpwVHW3jhkuKENE2BMDkzfs0xMC9Aqs08+T/KeCfiLGUS3Um7St5GEoTniGEwyoBYIBPI9kxrD3jvme+Jct8B2NF3iyALE4mhVubLkSA3yP+X7EdXYu0pib+T+dGDmAFF5/wSK2GjYMeYE6Fz2Eb3L8rL6XWXgFM/erXQGTjbZsWy0OIDoZwA2GCgBYwaJkAdhYMiAp2soHS/TcCwA1ExBLEIZqKCMMz2uXSuZ8YoLkzm8WyJJfMMEBGnlajlvmevhPI9jfobYtIh+xrJyrmGZOaBc20mZaXwS9jMjDMjMfv3dWDvKmKLO9jRWBJHtbPh31tE7E3afft46Z8YWSjL0G41LTD8ddxwZGdXEDCQ0CCyd3m2Cui0gdCztuVwIy2/BgJ8FTr2ss0J1blUQSMGUFBbayvx79n19Xa33RplN9dpMA7Rck6SZlyGvj1XWkyWa87pYmX4ukozxQBC2H7OXysOm2tYgtc6rwczPNRipbPmdAl2MDcCvGuwzZRWBR3LdssyilKXrOXJJLrtoT8mjBtdkmXlAlEXiIBgmrUNJ8xPb4+c1jKp6MBBdV1Trvg3L0Q2vC6IsDe8ICXwiM+4g5jQIWl8L6oVUljkU8vrPzv8W6CuDGEyVq+9oWfV0ryoXCRCLbrjPMcBxupbMOJNAUvkeVeUXe5bqGmMGNrHcvRPFFmEYtyD0Me/7VNrTnmeMvP/RoDcHSoGyfQS8svvIpXsiKrg5ZiPXtI4XOgxqHXTcZSWLjvcmErBAun+cbLLJJptssskmm2yyyUbtuAb9ciS0bmiy88kmV9d8cvq6NpvXrtUyEUD9ljBplhmc0o1/LdOj5sxmbCTyNz3bzU4glpcEZJPk0nlEqV8UppJKQCrgtwwhRS8v+8B5C0MU5we/1k2UlTUCMrsxtUuICZwIxLHxwQAW6bRQbqbHAD9llDkyEoR9zhOXnH115LQFJWqZqdTGyuLbxnli8/np/wr2AUDTJuBvTKJqkHsG4LqQO6gIf4oxs/0OwdhhoMiUtnEG/oDsuKjzyQFltLUFLxQY92YTXQBIRtKzjmhPAJFxBsbQZyA19pyzpF8UIBGDVCL/6UPe2JMDRXYSUgyAZwdB45qEigVS+SYgRpbm1OtNnVKQ86zzUtrb017jY11WRG6PtCXkvhBdtwKATW0Rh6BO0Qe2vJFjWInOAcinJ1Q7v+t7BsjOD5XMXXFvoWN5qdgtcvkAgCU7YjqAnAe1UUA1uY6jOAXVqUiVIxIAQe6dMUDc/K+AXw5uyO2jr8mMn5btZ9k27GiiEigw9+2o7JwZY22XqFRYBsl5PAziJAOA4BwcZSY1YFjN4sx35lkvobG+P9I2RaNPthsrAgX0PX0mgosRPQQIlPWJvdszCIgsK00wgEkG/GrH7cq8nDuZma917icb8NNkKc8kLaeARw3MRMnlhw48wcU05sloUII8+jt7HnIKaQ5wkkeLMhu9YeQUiy63s1dATx3xLkurK6vYmeCsgjloAk0y0Aio9LOdB3Ju5LzWUrYkOY/Y9wyWbtPetWTztrmQx963AS6rcs7Wv7OBJshsvlixvCLAChgV4MfrBx6/XORrWYONRhlupq46t9ognDwvS37nnq8NMjmP0/VVB4YVwTH1QmIc8LPzJiDrJCAz+wRw4YArgALysU3ZKa+auf41KCmkY5QNosBburZ12kVe8yiI5ghDcGusDarzHTU5n3x87re0fo+W+cdrRAvExRgVQ8vnYg8ta207V68C/Op1Q1p6mOCxwT7Q/D+QB65PFWb951zJEK3L0/dqsG+kHR0AzknPtVOmn655nBvfI9d1CxIMR3HFtafBWivOcex6HgsA4zeq9W89ThwqE3INNq2tJptssskmm2yyySY7Xuy4Bv32eMKehpJ0YeOoiNosnOn6rJt1a3ZTsTQyhyIfQt0mqF8ibB3IScPrHCdACSqZz5U5A8uc0choE11LhmmmTgxl+SmItxRnxrIPKXp5swuJ5WdBv2CcH0Vuv5E9Eud8kSjxyNHjipQywyomp1IG+XKuGC/Aqpc+aKRPEshnwT4BmUZzyYkzsJbyTBKdqcKm/WtWH5D7ofrc9k/dL9sCflq+OsZIpHp2s+FU6U9xZiiYGiO/WMUu81Q6YqzcLJCBP2/uhxRxrW2s0ji2L6wjdgTwG4KwS0TtF2VgAHDargrwkUPs8mu+BzxIZUD9TEDZBk0zQ+MbkAFglmBZIUAcAwGAMq0sAueGG2Vlmdr2Sc/yWr/j7HhhwClykrtGHNsBOeK5Zg2O/Jv6RE0j6NUpS1XePUgdkiPR5uCzgCyQ+yMBhSXYl5l/XerLKLn/9JnLKe+3aEByms35fhGHOZFDDDO5R3xmNSjYmxw50o6VxK0F/JRNCaiTjpJTzwsTgc8lO935RB2sBy/aRrfyVtuZtA0zC0XylcTx6iJmIHSI4qD3CA3Q+j5JKFtTh1nrXJqP+HrKc9Nkkx1XJvMEXIM4O4FBPeI5JVIUaTe+Z7sQTSAG3x98G/F175Vp1hl2eehAy808zyhTLAropFLfQAL2SZhn1sh5oG2B4IGlBDI4z4E9TQvamINm/ICfMbuvmQHNHLHd4P+Jc+paRhLBwbsM8llndAzdIOioZrbpd520kXcxMfA2Gs43HWJEP/PojURqlvMkzBuf2X2Sx2/uHaswuDLIoPVO1mBGscIzqEjal2nN1TGzr1tIHr+RnIgjRmPrXv4gr5lGAAht1oEz37weSGeOra3sOk2AqszsywoYfYQw/HqW9aykPQGeOlrPa2nOTUYp129xSAVyDMtP52tVSbDnQ3aOqnIeF6CoysnqeqMGArcB/CxAAsjcKa+VKW+ZYwOgxIDVSdaecl5pTSOwKqe0MvxqhltaeyrYZ45Z7LvG5uex3L/6nAKPMvDHS0FK2hcBeV3siUAeRaCX8h/dCkUYzeGX23Eo1a7v82/lf1OWtks6f5hnDVRK5wgA3rSPOdcYEjNTg65qoDCdhb0+nM3RqNdUTPVN45AnUc1Qpl9M0sGN03bIRwuyBid51n1KrxKraQ1trj2MmFlXpzXgyG2u7a3r8SIVhJX1DB1Go/kmm2yyySabbLLJJptsMgDHOeg38yxfpJuzmQJNki+ukOMUoxgrUEOeu2V+TzcTy+wUCYvNJIMEYOgckdxw+X+X3/figJrN2Ym+sSdHmEM2NqEZOI00opI34DlPiQJ+CvRZec9OnBtFVHPI5QDDKELveAPYhyCMFnbS9zGixZBJpSy/nK8vA36tG5fyTIBS7TSpGX+hq0Cm8Vx9to0HQGv9vvlMo/8j0VBapnZEWMqY/i92KLl8DtbqCOYgjgiAwT9Q3ujXLL/aUeGQcwTl/H2hbPcEBslnh3pOBfMvAE4l1sr+IO9RyIAqmBN6fj8G/t/z8aMPaP0sOYwBwEnTqzOKiAS8jgLKEdzIda3AnkuOKXktDg4nbWdlhOSEuO7kcu5BsONI+0Ylrur7Q/thZbPVb1jGgLZp5fzcEegz/w9+o47z3jA6zPOos1dBcW34puX2aSB91zEAGOSadOb4YgSRFKvuqzHAL+O24toy0ePRSB6rEyjKM9E46D7Ie7QNCGjB9qgMP7DsGzyghwgRcK3PErFVGXpt2Tyn6gjdOgqgXzjC0ehjeWEnu56YSALq+NEQEJw4hyOAQOgrgb4UQJEc3zn4ygFclgX8QsX4p41BNRKrTp3eMUqgQY8k3QmwUkKQHH4ph28LamYZ8JvNs5xnM0f0LO/ZwwlQZAJpYAEBnq/SGKROeNfkMXbF+OJSO0QQVNaTpTk757DhI/qG5xCb02/mXQbznAB6PjP79D0L+KWxR9cAlNUWLKMyO8xDngNWgH3kfRHQo+8Vd75ljG3DRuO2lJ/Ux9E5sLYRRhMHZZQAWB/yWreXtW4XWPI7xFioYah5opQ/Gxg/fF3HAdNoLJAwlvn9lG03YP+pVKycF2LITHkgrz1HAL+xttX2VQCwAGBiGM7RptxeApr6EFNQk94PloUPaNlDsK8IsgxVu4wootQ2kIOt/6+APztva521fRoHRFBiIAcIyy/m+9q2Wb1cOxjAz4J9WWHDtMEYaJ0UYobnGdP6zch/hm6YRxSu+J2+jkQG8KNB+2kgUjT92Uowlgvl+LNquRKR25vAMp8qsar7F1bIkDEY5VrJrv34nOM481IbfywQoLrnjsRebCeb1laTTTbZZJNNNtlkkx0vdlyDfo0jzHTTQgw0UbcALbeyHKeYbihreckE8G0dYMdHt0SQ57i1mYAnjobuU86TBP6pWRYZkJ1RbYuUO26+D9TOQKGH29gDqOShgh5Ooj5JNkMxb7L6CEQIm6+PBbNPWYBbXZCcfixp1Ad9joUcid2s6EZPc8gAzPILIg3aOwb+5BQl+pK3nSqFo4BfkpEEVrL7EqiqEoSGZZbYSBWrbOCQGmGX2Tx9QAb3uO/JvK42zDXgV0cW186KGIYgy27NAEy62Y4inaNx1la6dszGoq0LIEs3yX0JsA6cEwdzTrY/Rl5HLKUf+B6J5t6gdsbnJqBRAr59KEAdkEPjGgQijohOOdQI8MJWdREILP0ZIkDSeDZYn2V+rbxpBgKtDFXtqCHruBOHr1fGh/ZNBf6t6pMxU0ZBYsNaUHwM7Csi5s3BxvrOOtvMb/J9xeNXXC4S+Dd0+i4ByYElrY4IMIszdML2kz6MBASAyER3rwLZyCUvkHX4JFIjMfDXS3+yQ07YNl6lRQU0VmfmKqvu60GuJPtVoGL7IV1vKb1QZDAw1dU4zwb3H2XmAAA0O0hmTTbZsWRJ4lGDpPoFWj/Dohe5bxdZf1mMGa0xXfcBRjKOWHrdAk4pGMjO/3FW5vKzpmMx6rdFolvzzllwajbn9dZsDsyE0afMPt8iNgz4KUvRBm9EUgYR18errHERtNINpeiMKcNPZYKZ6ecw98B+75LjW5lVNlDEOwYY5o0AewL0af4+XWu1vszBxb9DUmEgBWPSekvnlYjRoCrbroMT8vYL5fha9VXh2DcfpXxsQJZvruaw0Xx+ekwtvwCreJUUweAOB+JksG8pebCt1H2qWeA5p48YzQ6dGdsYAn11vlxbVQVyVGYztUtgxMQZQMQAhLWtzIuIMmgmvadr9Qr8g4LndfnQ64/L0vvA9h/Maz8y36V8xLZt+jJ4CnqeAEYZffbzgzQbJIeoQY78mcpx8x6I8zcGitsIamJUhtv+XwN+Rd5481ys4YChsgyQ2qDYlyLvNxK4p+sc8PoKfTcqDarKDPV9OQhOotxnulqOsVQiSCy/qi1qFmwsPsvBWpZ1Wn9Pr9l63Zc+r9fMdfCaHR9WAauTTTbZ9dauvPJK7Nu3DxsbwyCxySabbLLJJpts3I5r0G/mCTPKTnHSKPJus5SOlI3DQD6o6xAXm8zmu/Zqft7cv42cIZIkXtjcRAwBYUwrE4DzHNHr5nPOidK0cItNUDODCz1Ct2QQkByAGTuUKikdjryV/AEiX6QyRVsqWxQE7AshSxgFYLPrC7Cvq0C/xAKz0eWNgyc+lgsALIZJWc7TO6DxJbtP8/UkqcJukR2GoSsAJ8u2LCQ9lwsGXG30+YoI4QTwGbCPjKxkcqao1SzKEfAvfTXGMi8ZRoCWVZtN3WxrpO1ItHbx9VjK4Ci4FKEynhnIWmXqcNDXGdQLw3Y3gN8AUBpx0EQKKbKfAHGwGjBUgb7aeah957LzgZxnsFxlI9sZ4BTwFfApBpBGK/uAmZ/BCauPwA6FPgA9ATESehcRIiVHgu2mwjHlkB2mzsjTjgGjcm6UtCMZACTXpL6x4B+fXz6wuG9MPYxDBSy9BunfMeblGNi3Emje7n8D/MU+30/J2Tv4X76fotF96ssYHChwjic0zbiTUpybCSwFGD0jzu8YXZ5urEOxYEtGaVfSQAcBW/0sswYteI3q3qxsAPTJPTnG7OVu4YsmRgCeyafRMAds/WvWis2jSSidhGF2aA7NdZjmtjqS5U92PbVumYNC+g5u61rEWcC8mWPRRyzqOByZqxK7D6zC0Mj6oCEASwPy1SoMMXCg1grgo3CcOwdqZsBGSPOP3oMqC05eZImbFthzIgN9sz2I7V4G/GZ70UVgIWuoofqBgFPygCMG/rSukbL0qP7I1j2EDMQJGNdGwkZD8M5jGUICSxS8WJq1jq63EruPkAFA7wwAKHn/ijlN5bwFENR5RUBXEjWFmFjeFWBllROcM2usElQdWCWrrO1SM/1q5pi21wDwG12PULGWYqllXScjMf1YESMm6U+b59oUCiAHtfXV+pvMo56bC8BPgtWsReK5SgMPdT3KoHFMc1li/Zm5kdl+3CbkxoG/elhPLWWksoHV65D89bxusmBfrWCgcxvPx5SUJJKKRC/qKhaY0XYza0kOpMNwDj5EwA8CwCsIqUAvq0AAGxIM2rsIdDJXBZ6YdcWt3+V2Wh2UU8/lK4Pr9BoBhszVCuSr9wKppV0O9opB1+kuA39eQMC6bAXCq7QR9johOc9IUYLgkILmyvPNgUu1cXprZviB+BpRtp+qNNRWqznYa6u+37RcSLsqG3kQIHqUwb5pbTXZ0bavf/3ruOyyy3DnO9/5aFflOrVPfvKT+Pd//3c8/vGPP9pVmWzEvvzlL6NpGtzkJjc52lWZbLLJJpvM2HEN+qkzw242reNeHQhkHB4FoNctGfRbLhA2r2WW34FruXAreWSd4d0Sse/RLzuEZYfYB8QK+CPvEAT08yHAtQ1oY86bsRAQtzYZpLLMHiDXHSjyU/DrDP6pMyOz/ZjNN8bu2+rCgOE3Ju9Zm3csI6WR+iliX+SjHAzQR3nzX7P46kjNQXS0svuWiwxKdIuy3bUfpP2ydOo44DfYUG8XTbzis1EgoXZGjUUxj9hAtnVsN61lJZknfp2cGivKTiXZa0gZrhJtna4zoOyTQWGmnuqskeh0K+lFWp8QpE96BvTq+6V6rRKg5DxiA6AXOTbqisj/qNcFuO28awAnEJx6DEMUxxgzKWIkeAydYZpKLsnOJucoktO1HjeK9jSR2CnHimsGTkvr7BjLGaPtBpho+Z2ilRV8XDezdCcds9pGcmauZHisuL/imIMLGER62/cjshOJxxm9HsX5JY7iUiBqeOzitRupI0pndI5op5Q/UwG/MYe1OgVrh6B+BrD09GSTHS8Wu3JNpeBf03DABbPIKI11ytCwgHeS/iagYJWvCFSoA2BWzankZC5yDrVMIDVt+s6A4aeSns0Mna6fJCDKAgZ8Qhz4Qy6CiHIQ0SqgYmxsjszIcWa+8cSMo9Y5bJjV9zJEZmQhjzEq46mSntqmCfDzlCWsYQIOqAxAGAT8bGcynydZehNclU7rEFjL2/rF7by7ndVAiZH2ZEUMeSAmhQxd4waV/TQTjUrneWWD7eBfT+CDqfPgNSrAOcY8L2nQiz0lGNafN0Eyeq5jQTUrzIIptVlsnkZWkduBfXa9p+COxXQSIGoCyAZ7GmtUAcYHC/qtuj5G2kgvTS8Apd7TZAG+yEzkgwH86rJHAb+Rvedgb2D3LWYcqUEv/q5KegKIsv42zD3iShflK+CXyjXfrwPh7NlqsMbhGOddRsr3HlCufYvvyrO93g7JjjLIN9lkx5qdeuqp+NEf/VF8x3d8By644AKccsopR7tK14ldeumluOiii/C4xz1uNJBlsuvevvnNb+JP/uRP8MY3vhHee7zzne882lWabLLJJpussuMb9FscAC3dthtNK98ZFpuFrF1U0G9rE3GxiYFsp4CEFtRTdl+/uUBYdugXAvzZfG/Owc0aOO8Qlg1c28AvhdGj+WWcQ5zNB8ntY3JmIEUpp1x+QeU8g8h8al6/LGO06AMWXUhgXynraduHN4VjgF9rHUySL2ajcRK1L0w/B3E+Aa1DyfCzEl7R5Ess2GUZ7EtyqjbfGLITJTUPMBppzrnHfLnh3YbJd9C2DciXovxXlWucU8Um3wKTdVnVMYrixt5LQF4NHBsHn7kvasnHAcCl9QZy5Lk6rKIAfDGAoge0b4QFpsyGdB4F8FeCthEAdepUmrGDMQbE0AG9ZCsMTfoeGqBxDcgRemJWX+9YgjZ6CNOvjExXU+enJ2TnqwKANu9k1YaFaS4edNzfIm8JMAtt0D8rNiIpIrxfZAbmdua4XZlRGBMgXNjY9VcByAdtNg+pkzyknqWJ83siyWqjymv23ApWK1ACflqz2gmU+5El9mIEIH3mNLdWDCkqHsC2juNR0H9kXEhAnTIx849GXtVyXplRzh5Ucy2RQ7M8sLJ+R8rs2H+kyp/s+mmqekAqwRw6UO+AxX74di8oRLQucaFZblnuBw0GmnkG/KjbLGW+gXKeSuzdRXZ8j83b5IDYM0PcrA0Sa1zrrmx/P2O2y8Y+oJkjzPagdzN0IWJT1keLwOsrK5OYmeEAQyVKm2K2H7zIF64CJWPkuSx08H6GxmleZEIfeT7aN/Noe5XtzDmZAQalnIxBFvTTOUwlPTWPn8p6Fiw/E9xCKaeylfaumN2hz5LoPrP6yOWcfontZ9ZYcWSsVwDpkKxer4zMczZvYCQn62PzCCpTCZPHTyUrM+NPrTd6hUGQVh3fFTh1Ip2qgNaoLG3XwSodRAFNAVmbKutP20wlP13DwF/fsXS1tgMEFIwCCtq8teSSPOoYW17fyjnTzE/HYs2ifj8Dffa3FkRDai0JChQwn/ocVDnaf3LeJABWERxkwClb7yJoZrt1TAySu5DlhfU3qb6O4AXA16NEAkgVNGJum5qhP2YW8PMaMGbyYmemnx4rVWSQQ7zOb6fn7HWNI4Fv6FVGncsvlBRSMJOqY7Ry7zbpuldzxDlZOWATKX2AVtG2BTAMYBptfmnvdB0d4jWW61b+r4ByYinr2qqvAkmPkk1rq8mOBXvWs56F+9///njDG96AF77whTj//PPh/TZs/OvYYoxrB+YuvfRSfOYzn8G73/1uPOhBD1pr2ZMdmr3nPe/Ba17zGvyf//N/sLW1hVvc4hb427/922PqGpxssskmm4ztMJCQY8hGAA6qI8plsx67ZQL84ua1LOO5/2rEA9fy/90Cw7xWepiAftnxY8EMv/pzIINUMTD7LwgLsJABtQzCMSNXbLqDbMz6EBECsOxDZvkVTD8kwM+CfeOAH5sXWc7GEWaNw8y7FD3eOsJG4yQHDQN+jcufqWPPOyoZfjWTr5D8yYAfuo77oltmwE/7Rx6Qz1SKUFmahTk/zNmXIsIZlIjynfTaPoiKh722xp6TTKx214hzIxINnGPbASG2LBKZpOLRLYaP5SYDrMstUL8sHugX7ECVvJYJkO3VYRVGAb/UDs5D8+1xGzaSB2kGSD4ktDOgaUDtDG5jD+esFDk1amb8fyMOCJG3LaXCxIlYWxE1HfO1FLoEMOs12+q163d+6HcbvW6RnQnbAn72/WCuYXNNqwPIsgfHHsl5WN0X2vaj10iShaLhd8auqyrSe9RUpk3lcVOfzVJQAm3IY5YfmG1w/7tGnkVGyvM1UoDbzjBv3c7XftHk9nU0TOc0HqK4x9WRqs7g4lHVaxWgQOah/djYBwIaBPjY8SMs4LpN0GJ/erjFAQ5EWRwALeXRbfGj3zqoc59ssmPCFBTS+RpI4xWPw3lctbLJGlihAUGwARXFvBlHnbcUYwoaGss5m8bBpsljVqs5+jYQWpHwbPcitntE0nNPAvy2+ojNPrJEaYjoemb6LQPSoxMgsBNlBQYFIXniYIIcxjzcee5Sh7WukzhYilLbbTQOe1tfPObepee5dyzl6RUc1Nf8vjKNVG5RWX4a1JJYSPX8pq/rIA9lThrAT+dtZf2lOftwgqhW2RjzfNUcJ4BGSP3BQSH8yHmwD1Ye72AJ2FRfo2k9eBjO+Rr4Tq9HgM/q/e2qqzXR61WVQfTRGZl/fSgLMiKDMdqegHnGMKgq1UXPI2QwND12AHJz2SUQFOxcb767yvQ+0D2KbQ9HeWxSEDeBilSCoat6U1l+luGXzrc3QLCOX6uujxqwtn2k/WLWOBlcH1n76dpG10Cy79A1z9i52OtdX2vL2rbY6e4eC7Bbh3EdKCtwjDFsKzsU5vFkk12f7L73vS/ufe9742tf+xqe9KQn4R73uAe++MUv7rrcD33oQ1gulzt/cQf7h3/4B/zUT/0UPvaxj+26LLVLL70UAPCSl7xkV+Vcc8016PttfHGHYSEEhENV0jmO7fa3vz3e//73Y2trC957/NEf/RFudrObHe1qJfv0pz+NAweu+4DbG7r93//7f/GhD31okHP6WLAYIzY3N492NY5b++xnP7vW8q688sq1lrcuWy6XaznXGCM+/OEP4x//8R/XUKvd23EN+lEsI0st0GRzZCmzD0bSMxy4lh+b1zLLTwEmNcPyU/AuLLsE6KU6iH5gAvzScwb8kgSoBawMo61mgalkEZAdGbo5DxKtrLJFIWqy+gz22fx9wBDw8ykPTN4ke0fZoSROpVbf8/o9CGhSAn+wgJ866/ryvcS4VMCvWwgQa4A/y8AUIDAq+zJU7QeUoFG1IS5ABwEmioc6t7wBBbBiA6ngzkFElqZjAgPAL65ymCkIZ4Gg5FBYcoS5Bfa6rfSMfgF0mwzydZv86BkYhD76DtRJOTtEYxftpw/fsmNBgL8aAIzOA7MNUDvLgJ8FAA1jgJpZlgtTCbHRhjR1DBn4oxjQEDBzktPTycMT5o3DXMDr/Dp/PnMM9pFx1gylgWPxSHUxfUWxCi6w17a5VoqHPSe5L4pj2GvHgsVV32wLWNd9aF6Tz47d9OyZIUPtLIN8G3vGwb7ZXOTxZhnwaxgAjAYgLtl/TcEAtM69nRaDtZMxRpZuY4euMDrGnF/1Y8xJtgqALK45HccMgL7c5MdivzwLqKcA33I/33/6bB7oF9ue75GwOvjjSDwmu56aymxb4AFIr3ntABNQgZwvVdYVxThZm95j1djLa64e6HidMAoUyH1MnpnHDPjNGfDb2MdA32wP4kzAv9neDPh1AYueZc+3Ogb+NK+fSn0qQNIL+GeBk9oZv+040vO6R0FQC/jp642GsHfmMW8c9rYM+G00Lj3morKg6zDN4ceMSgY6FOjT3H8u/Y9yTLPgqeZC1Pm3CAQxTG9lUaagHb/9WgkoQJUaZCDzPAoEDwqr1sZmLomJxcfSnlrS4YxLzg3PJYEgFjitwFMbiBZtQN+qwL6x4KIakK2CnupgIw1OGQMs7V5hkOtQHp08dB4N5nscZBOLxwC3qvtvDKAMuc0ssD/IfYgM7tUP+52djJD3Jp6ykGk09WVJYgyAv53Krc1KelpZ07T/2Q7klKDODPaZeboKcMrrm22AP7u3cE0B+Ok6awDWGnCPiBhkA4qHfjbWRnV/2LXawfr4RsFHG7wAlPcBAJuyw7bn0bBpbTXZsWBEhOc973np/8c97nG41a1uddjlLZdL/Oqv/irOOecc/P3f//2u63fmmWfiFre4Be5xj3vgoosu2nV5APDVr34VzjG7+3Of+9xhl7O1tYX73e9+OOuss/DJT35y1/Xa2trCeeedhy984Qu7Lktt3QBiCAGXXXbZWsq66qqr8JCHPARN0+DUU0/Fi170Itz73vc+7PLe/va34653vSvOPvtsfPSjH91V3a6++mrc+973xmte8xrs2bNnV2V13Q6KTAdpfd/jwgsvxI/+6I/iX/7lX9ZSJgB86lOfwgtf+EJ88IMf3HVZ//7v/44Pf/jDuy7nXve6Fx71qEfhgQ98IBaL9fg+1nXdEhF+7dd+DRdeeOHaQMlrrrlmLeUAwGKxWDtY+ulPf3ptZT73uc/FFVdcsZayvva1r+GCCy5YS1kAj2/rurc+9KEP4Q//8A8P+/dbW1t43eteh7vf/e44++yz8fnPf34t9dqtHdfynpGMczk5akMJNHVdyfLrFhlAsrKDIqVnTR0iHg3AqWLQLzugZVDPtQ1iCPDyHPry9847uJblPZv5LDGeSJlPTTtwWHMUaMzSnhJ1u+w1KldkiszGMcTtNwre5GNRgE8dc3tmHt4R9rYerSPMG48TZh6tpxR93hq2VMGaEsCPxiQ9DbhkN8ax77kPLLuvt/Ke/aAf4FyWken7xBTLHWVADtfw5tg3KJxzKy+iADgUzgrOJOby5yZq1gJ/hUSRkcOEz/KHaUNuN/DWqkjWQe6d2plS/WYATO0ETJIDCVvBSg8V+Ubsuem52nPXdqrqG2MANcbBU0kdFeVZqa4qYjn1VwwsCRU6IDqQlSODOL5SeVW7xrKvAIxGDRd1s84Eba4o0lAF8CefIQz7c8QiuaEUqz1WDb4CfB1r39Oqvi3HrIgwbOvUnyHL4mneptncfE3kyNqZYehlQC/KGDnGnCuuI31WhqL5fnLuRXbQ1pJkAAY58zSXkOYM089j/V0aycxTt9cq8KF6PdZXY5K5Vha2cEZV7R+JWOJ4ssmOJwsB6DqQ4yAdAniM7hdo/AzzxnEu4ZxpKgcDYXyMszJ4ZS6smAOmTE5fZ+QR6yCINHe4hkE+ZaHrHCyfHegjuj5gs5McxxFYJIZfGCw3eL3jBCTgsSoBLQAaP7KmMOenaw4CeE4E0PgZyDmgESlUAH0gLEOEp4jWeQYVDbupD7GQX9dX3gEEEnYTJKdyGZTlKYOuVpZyTBaPmhYI5ZoqBYYI4KesSjQmyGM74FPqG2FYVRhz6I8AJLY8nV9cg+j5uJkhhSSDn9h+IxBRyt0LKsIcfQVaKoDqCCnPm9O66vM2qgApCK2Q9qxmJXtues3AQdVCiEzuvxgKmU9yfD0BgJP5lIgS0pJYeuBrTAG7sW2Bk85ZRZKqpR5DzJKZxemYuS/1pZUtD8iJldPB8/9at7qOzhzXzvGjQJp8v1w3mDWCfKEPEZ44z6EDS+7a4wAlyFeDUSmHuew1t5U1tWOVnjOVkrS6n1OLiLw2d1wTAsvHR3Ip33Vdrg1sTPsf6D0h66yx9gXnFtWe2I5DOriEV35z/HrSS9QRt/n4uECJ5TcAlHVfmQp0ZsYBxlI+TDbZDcXud7/74QEPeAAe/ehH42EPe9hhl/OP//iPePSjH41PfOITAIDnP//5ePvb377r+j3nOc9B13X4sR/7sV2XFULAe9/7Xnzta1/DHe5wB2xsbBx2WTe+8Y3xrne9C8985jNxu9vdblf1+vrXv46HP/zhuPjii7FYLPC2t71tV+WpPfrRj8Yd7nCHtTnG//AP/xC//uu/jve+97246U1vuquyTjrpJDz1qU/Fj/7oj+Ktb30rnvzkJ++qvDvd6U64xS1ugde85jW45S1vuauyTjzxRJx//vn48R//8V2VAwAvfOEL8c53vhPPfe5zce655x52Od57POtZz8JLXvKSXbe9NSLCjW98Y3zP93zPrsv6p3/6J3z0ox/ddVmnnHIK3vCGN+Cqq67CbDbb+Qc72BVXXIEzzzwT//2//3c89rGP3XV5z3zmM/HRj350LdLDV111FR796EfjpS99KW5/+9vvurwPfvCDeOYzn4l3vetduMlNbrLr8kII+Mmf/Enc8Y53xCtf+UqceuqpuyrvFa94Bfbt27fregE8Bj/taU9bS1lXXXUVnva0p+FP/uRP8K//+q848cQTd1XePe5xD5x55pmH/Lv9+/fjpS99KV75ylfiK1/5Snr/z//8z/HgBz94V3Vah60d9Lvgggvw/Oc/v3jvjne8Y6JJbm5u4mlPexre9KY3YWtrCz/4gz+IV73qVTjttNMO/WDqUIXdEBnnUgwZ2Ev54sym0fmUJ47zcMgWKIS8cQcKNhJ5x6y9NhfTt11iAgJITEAS0M/PGrgZs5+sJBUlpkzOi5UidUPMMp96nEMA6r1scu3/+lxLejph7s0bj0Yiyjcajj7XiHSO4C9lDLOUT7XprcGVArAq2Y4aKW0BPyt/SpIHbtXGjrwvJIBG2VL1BlxNr5kY8ibSO6DvMoBSOWkG4FINfOjme+yRpBoNCGTA5lFgwbZpDS5Uz4ec10zNAH6j+RC1DHtMF3KuP1O3aPpa895FW8+BQ68CR237FQc359YPz/NgWJj8vRGQrzZTz5USblq/g4jES5mvxoDGumgdz+x7qZyReriyHvXv03VNkk+naUDRD8pKIOxYvj4r3yngtmW0Fn1lrvECGMTQ+TSWg0hfbxeFbz+3rwtHkNZrJ+Bv7N7ZCexTx+bY57WRA7mGx8jr2LoQ4Y9gxHg3RaNfp3Zdrq3yfKzBPD1i3wE+5131zRxwOa8fYBhmY+DI2HMF+OX1moxny4UEIiCNu0WwjcwbCvjFdp7GKWU6KcDHjD6+bg90Pculh1CsqxjYcJIfjNC7CBcpj1/yTGmOMmsVPR/kuZICuN3IwbsGrYyPTQTIjFgxRiAQIuV1H1nAzzDnFPCzUnjKZlIgsJDHq8cyKKjnEV1ehxRjp2H+WWZ4MTcfRMBLDaIMAL/t5s86iESB3sSIiknecxzYYvAzxCgAXzlDqCRkWs8Ofm/+2W59oUE0tup+GJS2rZm1KAFl3raokx2g+f1oRXmq/gEgpwcYWW8EuYbiCPB3yAHBNSi66txg1lPaJgbkHvwM2wNMRXDWyG/rc1fQyRE3pQMlGdh6vWHbRO+zdJwV49jKa8Ssq7MMbUy52wFhyrncJ9EisuY+sIFdq5QhVE0hYLwvHfFeUoG/WK2nxtpjJKSqKnP7z8eAv/q3Dga8VUWNndbrB3NvHQGb1laTHStGRHjTm96EG9/4xof1+xgjXv3qV+NpT3taIYV4zTXX4MCBA7tmSs1mM7z4xS/eVRlqzjnc4ha3wC1ucYu1lHfCCSfg5S9/+a7K+MIXvoBzzz0XV199NR7wgAfgW7/1W3fdbhdffDEuv/xyvPWtb8Ub3vCGXdXP2nnnnYdzzz0X3/jGN9ZS3s/+7M8CAJ7ylKfsuqzb3/72eOc73wl3EOvKg7HHPe5xayln//79+MQnPoG2bXf+8g7mvcfTn/70NdQq213uchfc5S53WUtZ97vf/XC/+91vLWV97/d+71rKAYBXvepV+IEf+AE88IEPXEt5+/btW9t5zmYzvP3tb19b7tJPfOITeMQjHrEWwA/gMVOZzOtgDq9TvpeIcJvb3GYtZbVti+VyiSuuuAJ/+qd/uusgkz179hzWGL5371780i/9UmLL//3f/z0+9alP7YqBv047Iky/M888E3/xF3+RD9Lkw/zCL/wC3vWud+Etb3kLTj75ZDz1qU/Ff/kv/wUf+tCHDv1AyaEcQK5BDB1HU+v+O4bkvIrLBeJikx/CdqEGAFrE0IOCbNJDXwJMbg5q2gRE5aDT/J0G7CRTmdBodMKpzXmy3J59HDW970S4+T6WzPMtYjNDbDcQm7k4qGCYfjEx+tSGUbGZvWdNgT9930p5zjRfn0hHbfgsLbW3ZcBvT+swc4TGk+T1A+f9c2Cwr1vAShtS5WSqI7ljr+BeEMlOw+yzgJ9xoCTAI7gh8GfkMlP0uTL8TDT6gIlkzTjDEmAloFwJaLn0fVtWnbesOK6AJtE3g+On86qBURvZqqxVfa92LphNcXHNicPJnjdZ54A1+76J3t+2zeTYESVQmUDBWAGCtt3qYwPcZwC3nalT+lpiM/a5rEMAPkfPeez/sSjqse+Pnc8OoGPByBw5v8KBEwMQia85c66xPucCPJY/wlQF+fT7CGQmqgWubV/b+6YC/aJK4NZttN05y/d68ZbbyHMNRsiOueqnlME8O6IFAC7mxPDqFBwFCWuHXGrrCtyrvwusBvoE7MuyaxVzxpZdOOqw4/Ux2WQHY9fZ2iqElHOXpZk3+RrWuSz0IHJoXAPvs7O5AHaAFOCSQO+Re2Q0Vy/06z3i1oHEMsuAk4xXTcuAX7sHsZ1jEUUpYRkSI2xLc/j1DPYt+4j9yz4xbYKMRyrx2LqAPnrJbyxqD0ToBRwKEfBlJQcsJyIHuC4DOaFH9C2aZobGN/BESUrU90CIBB8iQqQkW6ltqmbzkDnwhsk7dsinvIGUZdeViVTkVbTmPOB6ZvNJPySwSqQ9eZ3c5ryJtYRz7XTX8ZJcMSZT9fl2gKReGwWLSVjn2qead5ElEWPRVt4RQhR2uCO0zBGHi4Rg1tKeiD93HPjmHAyYqm1N4+O71JG8T/sJoFp7jQRQbRdUxeXr9dKl9RBFl8RLyayznMwtdTBgjEYNJI7Lc7oYE7vLYQhy1bajFOZBrAUOxSyzsLh26mOSK4G46lj2F3zflMAfmcDK+hSVgQaMANb9MNiRzJindYlEiaHaybXLMsJl0BOABGbreUc9n+q8B8Cp5kyW+4PBRAXDy85P0LcGEUQG/vT4O4Gflv1oy6yt7vFgyvJEA7DZoWRSphzkqwBVs5dJ+4bJJruB2uECfgDLyT34wQ/Gwx/+cGxsbGA+n2NjY2Nt4Mv13fbt24ePf/zju2Id1vbFL34Rj3nMYwAAH/vYx3Df+953bSDASSedhJNOOmktZa3TVGr6WLM9e/bgR37kR/CgBz3oaFflBmvnnHMOnvOc5xztaozafD7f+UuHYA95yEPwHd/xHWstU+36PKbv2bMHr33ta/Fd3/VdeMc73rEWZvlu6nK3u90Nd7vb3Y5aHVbZEVktN02Dm9/85oP3v/nNb+L3fu/38MY3vjGh7K997WtxpzvdCR/5yEdwz3vec7S8ra0tbG1tpf+vuuoqfmF3KAOHgTAsmhlvctpZdvKKxGcEKuaZkfsEoPmvqBVpJo16ls/y7wSs2thTMtcAYfUJy2++D9TOGPDb2MM5stq55MiyskUK9mnUZrlx02jI1rMzgz93Ih3FIOFYFOKscckxpPlhNhrO3aeA31xAv1ZyoTUmZ493xICfbsoMy6+W6lqnk9vKeZIv88FxfjMD7q0A3gZ9ZvtuBGSppRUzgGUB4SrKVsESkaOy+cxWgSWJNQBA0eoCPE1yqWYDrBrjoUcYc5aGLNUIIAWKW1sJ9FhnlRsBCQcFBdSsjprxty0ooq9XAW6oADN9rhwt6bORSBYmb5TtP2ATVo6a+r3R8x57PTi4no++Ya4f+ztzvWj/F9elPW9zDSXwOJUjf0KQY/r82+o+sNKbA1k8ua6tTJY9oJO/etzaWaTf79NzdkJZsK8e24pSSY/NEp+rZKYKcLC6Rqi+Lkb6bcfryzrzR2SLt3MK18e6Lu1I54aZ8s5c93Zdra00j27sFoBzcApmdG2Wrew84EOaO7wBuKnv0jgzuh6Q75JrQD7yemmEMZUYfyov2prAGZ37/QyxmaGHE8lOzW3M485mFxO7b/8yYNkHkfmMZQBVULk/h62OAZg+8NjSO82tDBRubj0vy/wFgLBEjD6BEtGCAj6g9TM4Iiz15zqESICEVmwUgAESw88h51G0KgzKRt6JiUzOIzb5deqzZsbrX5X0NOsEXt9srwqgwN+gneqxE2b8zZXKz2Z9ZRlMui5Osp4C5Oh8wyBeRBsJSzC4AQibSvqPGZEM/CVJVMOTHzR9VS8ALD3bNClwJ458b1RlYVW7hZDXKnZNBQH+QuDp39TPwfJNx61mcOlKdozRZQRGd2R4pXNIbVIDy27wnSIv44oiBzkhV82t9XyuY5AxK1VaA3+61rPrEAtuARnAUknUVVbL/WuePT3vICC1rmnsumoMVC3AzLFz1zW6yeOnZW632iA5XhDgzcniVMG/2uq11wAI1XJpCPblMrJZADDdayLrmc55RIo/H8ht//91YNPaarLri21sbOC2t73t0a7GcWvrAuOsfe1rX0uv9+3bd0SOMdnB2c1udjP89m//9tGuxg3ajkXw5EjZkQL8bghGRPi5n/s5nH322YiGIDBZtiOyWv7nf/5nnH766TjjjDNw3nnn4d/+7d8AAJ/85CexXC7xgAc8IH3327/923HrW98al1xyycryXvziF+Pkk09Oj5omOUgOb4GLhoE/ms1BG/MksYmmBbVtBvhqZ5M4PBJTb74Pbu+JcCeeIo9T4U6+cXr4k28Md8pN+Vn+14c76cb8/RNPgTvhFGC+D7Hdi9huiMNqAz01EvnNDqA+AFFycvTV+XnrqJBIZQXxZt5h1jjsmXmW75TXe2Yec5Hs1Fx9G+a1fWw0nLuvEeBP8/i1TvLE9At2KI3Ieg4cfIfj7FaQz0pLOQP2aX4xy45SaUgD9qX3fMNsSl89nLzfzIBGItnlAT/jvtHviWMxPdp5Kid/V37bzPiYfgabqzHqxlycAtuCScIOoMT664DlAlhsMVt181qEzWsze1VYptD8lZI/cdAH1kFjAUvrpNA2S+xJ0xbmwdfuHHG2lx/tXB4bnGOprR6zffl1s1G+1rbXfivqWDlZxwCZvgN1S26zfsmP0KeHfic7Z1dFD1NqjyKSWh+2HQ/WFEwz1w8k+jtdH6a99VqDb/hacr4c00Ye6frSPHy+KY+pfTPby2PP7ATEjRMR5yci7DkZYX4SutkJ2KQNHIge13TANcuAa5YB1y4DDnT82Or5ocyZRdDcWDlXjT6UydKFaIIZOD+p/q9MBXVYqUMMGJdsq20A+AlQTpZ5rBHjKb9Vn6+Pfsn3Vr9Iz9QvMpO5uq6ypLFhOGuZMQ7lqFY42yfb2a688kqcd955OOmkk3DKKafg/PPP3zFh9vd///eniFF91Lkm/u3f/g3nnHMO9u7di5vd7Gb45V/+5bUlaz/Sdp2trazMZrdE2DqQxlV0i5wrbrkFtzgAWm7yvaAF6703xjADhuO7yYeW5vpCYjQwu1DrRU7GyMxAUwnPRc/svk3J47fZBVy76HHNVo/9S35sCvC36rEMQRg5MYGDdowqgndssJMZS4r26hc8tnRboOUmqF+gIXBglQRVqYx6I8FWGqDlgPRITD6nr7MT3zvKObFCV42FVWADUKynSBl9ulaW12jKecqqKYzOz/YYdvyr5+0VLOtB0I/Lc7EymPqQcx+GERSGYFiPIl3fela20HWsAn4q78ksP5VKpdXMtlXy1TXz0QTO1O9tG0xU21hgibmnVlVTpS1r6dNVzHqHDIbqa/0/S8vmo62cml3ZLratRqXSx4o41H36yBybsEIBh+0eSvseKM9NH0U5dV22WTum56LPud9zLvYoe7uyL2wfUX0NrgqWM8C7BmiV/T4SUGVkSi1zGCivgfT9Sg5X/6tBwEFAVjUHWxaHHc9IxrMk61mPF9uZ3edNdtA2rasmm+zYtssvvxwA8OAHPxjPfe5zj3Jtbtj2hCc84ZiR5ptsssl2trvd7W4T4LfC1s70u8c97oHf//3fxx3veEdcdtlleP7zn4973/ve+PSnP40vf/nLmM1mOOWUU4rfnHbaafjyl7+8ssxnPvOZ+MVf/MX0/1VXXYVb3epWDDg0M3FCYbChjq7hDVrrQE0DWswRZ5sIV38DWGwibG1uz+6bzRkcNNKcbmNPZnKpVSwnAAlw4TwoJKCKPETSE37G4IifscPKOJmso0nNEzuGgkSjLymiiYTWyfebMhegNXUKaf4+doJQYvVtNA57Gg/vgA3v4B2SpGcrjilPyI7wmvGyyswmlbwHAufgY8lUB/SZEckyq5UjRJ2AIjFF0h/UzjLAoUCVlZ4SICUBWhgBEaiUoBrdcK5yWtpz02hboDiWHq4mn+nm2RGzUSPAzAh7/do6hQ7oOmZdCKCXJFFNO0WJEqd6E+xKMDQ5TMfeq5wIzNzIzoRVxlH9jn0++bD587EfjbVrcS9l5yCLUY1Ed49WxuU+rKPsLTvT9tkqhuMgqljZCcwEQQilA9j+xjplXJPaM+UPCl36vm1vApiGr+WSyM32HZQymFip5jtFM9px0DJP5R7JoBzQ9xHLRY8IYBmyo1D7OzlriB3NREy40Yh4ZkhkSbpUB8PsY9ZyxfIzo1vU8yKNQo/COlgdBT+Q3FIZO9O2BdNFv2ueC6ZosNdeyZAZyPBq2SjHen2fJVav+wh0a+EIR6OHI1g2wPknLrvsMvz5n/85lsslHv/4x+Onf/qn8cY3vnHb3z3xiU/EC17wgvT/3r170+u+73HOOefg5je/OT784Q/jsssuw2Mf+1i0bYsXvehFR+xc1mHX6dpqsYmweS2PuBtzltEF0twSAQ4GAcxY3eUxNRpJTzVdkwE5Z5nMQUSLdN9FIM9z+lMgrxc0tYbM9bGZYRl47NrsAzrJ3bcMEcs+4JubHZYhJJbfMqjzvQqmcur0D1g6AtBj2Tp452U9RsM1hJx7UjzoF4O5gMgh9g3P9S2fM8WI6Dv4ZoaZa9DLcSlqLmbJ+EcxjY2pSMrsmyHABQlckDWalfYsAD+bz9qsGYyCAszcBRI2uJlDyiCZkXXLyP+1BPxKVYgqKCmrYOTAEO2LAjABgYi/1HrOzRhihO8j4DmHozVnVC8aRwOWl62LBfgoNszEpyyxPVgzjKznDhb4YnafuU+ALPNp1jeOnAC95YVZsh5LRpkysvLr8fO2stmjtU79Q3yNu4bnYFd/vuIxYvZ4tVSwlgmYfZGUlXNfS+ABNSKTG9N6o3EZaCJEeBW7pPIayuvz8b6Kek+TrEldk9dgmvPYtwNZT71+7bWr7U7y2lPOM6ng/QBkrMBUmLWjSqiPtau+r/Ka3M4y3tprxIw3GRTNbWLrbAG/1O1mvTiwkTZNfV3tJ0clf2UfUKhx+Ose9Due11bTumqyyY5tu/zyy3Hb294Wr3/966/XknzHg61bvnGyySab7GjZ2kG/Bz/4wen1WWedhXvc4x64zW1ugze/+c2Hndh2Y2NjVC87RVzrBkw2mZy/TByyCvzFAGpVRoqdFOR9lskxQAm1LG1kAT+azYHZBkIzH25czcakzrtlN2ZRGWoFM4xlqZYhJIafblItxuKJJWm8bHZa9dFEyTcTI0KICG6cGQggSYIy6McOrg3J6TcXwK/RSGiHLBslgN/AgbQqItMAEREoQUFxNkUnGUWkD2D6IYYehYyqgrBNCzTMzoxEBchX55rRDbc6jBIAVwMT5thOHQi2H90OoJ/2rbxlc/Gs2oDHqHlnAG8jgrfTaFKgDygAv5SHR+TRLOCXAOd0DBOlbwE/AYQsQNrH4TnkqOTawUTpw1UOAP5efVJlXDE7ofI9FWMAoUveigT8GccP6cECTN6baBwjxjkwJsdqHZd63QAVYmlAPXkdzUfcKHoSQ4eMXoMJSHMkztomnYd+XrarjGWhK2Vgq7oMgUnjFBkD+3ph3JncSAz2xSQ3Z7PNqFPGE7c1QcYdAeUIWX6zvkZ6BfgM4Kfjk72MVFrKxdVAH4AiAt4CfgUDYgS003YrxqIxoK8q82DzaOr/KSdW3Sc7AdXHsSW5bbFV8/Wh2KWXXoo/+7M/w8c//nF893d/NwDg5S9/OR7ykIfgJS95CU4//fSVv927d++oBCYAvOc978E//MM/4C/+4i9w2mmn4Tu/8ztx4YUX4ulPfzouuOACzGazXdX7SNp1urbqOmGNL/IcDGQgyf3/7P1ruCVJVSYAvysi9z6nqrq7mobmDo2KgEDDiAr0gDxiA60jqMgnIyDIRUcZQUZA8YIKjoj6KaAjzDjIyKBcBgR9xLuA4whC46eDDDw0d56GprnY0FVdl3P2zoz1/YhYESsiI3Pvc86urq4m335O1965MyMjIyMjI9a73rWa+I4kGZ/Y5Q91aaiWOYFpfNhsdmDnPIECgLoFem/M6PQBn3dZE4l6fNVREoJDw07I37fTpZCefp7F2TgUT8V+XuXYz6d8DjgkEq5mh63NhfRYgs6TRAhzCiKQdYkscQY25GW0ws4YArvQEoxsXC1JGlGnWU0eDIxfPeesEE6VGt/nZb6Qz1klUgGhp+Iq5sGDocDHCL/a7lrlp+ZWDokwqR0pZJaQGJ0L89dAWXfMIb+fagLj58eR8JOypC7Mcb0gcwcyBswhZ65rB/MkZ9eifxsiOVeBHUB23++SGDITgfgq+pKuKWP4HZwh9oHwTqQ0p6sSfia1TX++E4pc83q0k5RVRKgupFQ2yvyBGXAUq53Cnq7iWuQ8xvg5J1E+Jw1zSVl/MPmwng5CytX7rqGkPtWqt0wVmx2Qrz1kbuWQ5lv9c6RrlDHFhHVl9nvlmI0QfmVTFufIMLLmiu+Ioj/dHLHpudU0r5ow4aaPEydO4M1vfjMuuuiis12VCRMmTJhwM8EZz4B94YUX4m53uxs+9rGP4eEPfzgWiwWuv/76zCP985///OBkcgxL50k9zPziG2RArol5ZdC1nghUhg+zdQi8dQgxfGQgU2IOPgkn2cxBR873/x4+D9yE3HuzrczTGcgX9jVDRpl4XS8IF52EwPOGd63yE1giwHiDhQmrVMeMrgkeh5y2AegpbkRZJOGMxLN5ZgnzoOqLZB/5cJ7W+NBTmvCTvHLVHCyR5KSwqUnbAvHqjTEE4qAWCB7mJDkQnRgRu0jCUuPd+mm+7bc1TcyB6MNwNolE1SE+Jal98LKVxbBuJ0Ev1I//FrY1g1YIjvcpz1fmzyH7qGZS3tSW/X1hQ2hCX4BzKf8Ptd4iEY0lFjBdrkx1imTQ4U+DKlLytLFumxByNFNDanK0aK/Ut4bbrzRjaA9pUnlCysU9qf3lECGoLZmkdgvEK3VtMpwGUodFLWdUzqSydpqgz/ISVfIuBiNKvH/xfyp/nVGGLTFOCNdaEMCOAQ55pqRc6zi75rJdDRGYQri20BbhxCK3SddNystZ9pMxhpLCsHWM5dKhdYhjjh57dtou5JzhrB42OAY0EnoOyRMdwVueSSnzhBBD6jMynqW8fnLdQhQTQrolOBIPfGRODhq+TYDo2S8hW3W44dJDXodEK0Pdsf7NDWznSPJxqWACMqeRcrzP7s2NCHmXnMnyAfRCn/zCL/wCXvCCFxyo7He/+9248MILo2EKAB72sIfBGIMrr7wSj370owePfe1rX4s/+IM/wG1ve1s86lGPws/93M9Fr/R3v/vduPTSS3Gb29wm7n/FFVfg6U9/Oj74wQ/i67/+6w9U7xsTZ3JuxYsduJPHE0XiHLC1DWcs6IhFDB8J+Gdvcco/I/o9I9DPgHJ0iM9XCBcOMl4ph4GQYK7zcwZ2cOKoEca5LpB5EuLzdOvDeS6dw4ndFrudw07rsOzyuVKm5rEIjjcOjSNYYiydw4zJO2NZr7pzwjQoZ4MsBLfr4NplKlfmk02YB7mkkJTB39o5Zsb49nYcyARFNqpqaoV1CvOpHLO0ym9ATRfnDcoxSIcKzJ2CFAEojkHrKOPVOQcJvzHjfijfvzsSYRIPLQgdA0/kNEHJRAbRecXIXL1CungHHFGx90MVxmu3s6i+i+uKrjI51G1Q9dIP24bIHF0Uu4xU9LkDw/aaww/CPJaRKQCNbEciviQfpBA4cmz5yvD57/J2cQxfj0gOA8R+zs/6nur+oiMflJedkUvpc1/plci+TGllCI1S+psw/nQuzckNEkEOQyBOUyopS6+Vyn4QnfRMA3atJ8udileg55IxR7ufe0lYz5Jg832PYhQXawiNXm+VkUb086Yc9PScvZyflz1UrlEEfRQ+aPVfatbUL2oEoG/XRPaV1zaG3n7aeasEmcQsOsTnJEZ7uZFxrs6tpnnVhAk3ffzMz/zMlNtrwoQJEyZsFGd8tnzixAl8/OMfxxOf+ER8wzd8A2azGd7+9rfjMY95DADgwx/+MK6++mpcdtlley7bh2/xCzGyc4gCh+zcG8XJpNCf8h1dzFnC1oKXi0i2APAGmpnP+We2j4Bmc6/us3NwM/MhRY31uba0UVfqNFZhteCNYfXEQ537+Tdk8T6zBsYxHDFmoLAgCIZxtT4qyb94SZFIIRgDzIyJeRRmRsL1pRCemQEExeJ7RNUniivfDiocY2gXIQKZnW/jrkuGPFcJ0RJz+dmYWyZT9DXzYIwpSK1AYHlDUQqpo8kcja5YiAN6wVq/ozVyTxMeKH43YohBuB+Go2cuDHljQlCZETt/LQAQwqGhcRKMKBFOKhSqtFMk/LT6UchRaSui1HZk0MH0yD4dltFfR5/QlOuOdaC8Pcs2zdtVGxQ4eg97Q5LfkxGe7dDHWNrGBcMSU64MDP/mJ1REWGmoLEKy+rrn11iGuATS/Q0+zdnp5BkWhaQYncTgkwj5vvHHBCOuY5+AljgYaJSSggwQQ4rK9WZGdWUQIuMdI9iHDl52HEg/l4W/22lDyDuXGzJm1vgxhwmsrlbXmah/T/X4E58JZaAtw8QOJdzV+WdEhSGKlh6xp8i+TOHilJG5HMO0+mVNoq9G+JFJCiT//rGZYqlnvLyZ4dOf/jQuuOCC+P2gKj8A+NznPodb3/rW2bamaXDRRReNhqt8/OMfj0suuQS3v/3t8f73vx/Pe97z8OEPfxhvectbYrnaMAUgfh8r96aIMzm3gut8btjlwqvrAUj0A+M6/w4XFWDn89eBHQjzLAID5HP4NxuDAegQxxTKNdaCR0IaDSnKNCGw7DxhtxPykC47P/6V+b0EetyagfxYaNY06mbjj283iCMT/PgQ39uihNIEm5sB1ILMPBIyRN6JomPuETFZbi2kMZiKumREm4ZWy6i6aGIvhve0TUZkVN+jKxwbBtVKlTExU/mNIAuPCMCBwMzpbaznEeE96okqzo6PTlhKxaTjD4hCK5FbWrVZZkDTF63mvhlpp655XT+QkvTh4t1SO32YLMqUXNpJ+leeD1IROJSUXhzbKCfAShLajwF1AlL3l9p6Kdt14HO8bqTnW6vags+Qnx9x7jCgn+u4nmHn55UAOhCMl7zVlbwBEuo09gVROpeXUyHiSidOuTaj7kEZUYXKqCplf1IE6GqF3zD95h2rQpuGbTVHKz3m7IXwWzs/4wj5H3+Xvq/VfWfBmerGxKbnVtO8asKEmz4mwm/ChAkTJmwaGyf9nvvc5+JRj3oULrnkEnz2s5/FL/zCL8Bai8c97nE4evQonva0p+HZz342LrroIlxwwQV45jOficsuuwwPfOAD93wuB2+4dmEh29iUkwzORm9nHybQg4lAh88DMcM1M1C79EYaQHlkJ6WUMw242fIkSbMNbrbAzRwdqdxta3j8EfmQTUBOQnVBXSP/ShL2dBwA9kq81b6TCbpG0UBEsnjzi02DoCYSL9OwCG0MRQO7KGqqhhshY8LCiwDPGLILoW/giZmQf4yVMcoTEz5UkRFioMiNlYddnCdjlCawCsWaJrCknYVoKdV4Y6i1dI3MiyotcPa9WmYwKBF5I4CE8/Ie6oS5necKCEWoxkV0yLfj8x/aaBzT5Gj0fg3tE/P3ST5JpTQtvYR1DpShHGy1axwK1aSvPf4WVZSJ+BLDB4N9yCUTzhPUbjr/IUjytYUa1QyK8WSJMNPGTQD5tSlCSpcWrzUYXwFv0KmZGmqEqW5P/5wnkl8MSRJebGaAeXg2vSKBV+cA0tejQrdqL/OOGTutV8EsHWM3fD617GLIu2WXjOGZ0ceaEBZNnZKEfJNQVP06xhDFEKNUbviSNu7ZzNSTF42wRJnBUozbJGpuUfgpNXIk+4SkG1PzxfLUOOR8jsNI8o30sXKszYg/GRvPAukneYTOZPkAcMEFF2SGqTH81E/9FH71V391dJ8PfehD+67Tf/gP/yF+vvTSS3G7290Ol19+OT7+8Y/ja77ma/Zd7k0BN+rcatmClws/N1K5j2k2D2FsW79NctkFpVsM7xecrLLcomXobcCH9KQQoSHk4+JuEd9nPDa2BzA04ccplGdwqlp2HBwbHBZtKs96hieFTg9jT+c4KrdlrI7tUijUSDkUEDOchETVIbixhITnNhLO3LVAG+Y34bpteP/LuNeF94OE49NIofYoOWKEMbEX2rjmsKWN5pQrlJgoKihXqvv2YnTXdamhQhKDTHAY4RjaczB0YdFIMchA2Ozf7RQ+q/0UIUaE2K6yiyi82DbK8Wqe5r61axoiptX+WW0P8H6QcJWG/JxZO7E5IbWQE35CFJchG+U4AFhVo9guVOS406gR/+jPF/W5Y7tIm1BSBuo1VyS5uE5Upfbxv8X1TNeCTBMjKDD5h8zFfpL+pUCCGtV3ZD5a3mM9lxQnTj3/03XJ1LqEXOHXLurPSnju9Bwvj6BQJ/zyubeqL/r5/WLTyykHypJyynCeQ2Tf6Cx2jb7vCd00n8ocDc6G0u8mNrea5lUTJkyYMGHChAkThrDx2fJnPvMZPO5xj8N1112Hiy++GA9+8IPxnve8BxdffDEA4KUvfSmMMXjMYx6D3d1dXHHFFXjFK16xr3MtO2/AFqWQX0DN0MzmsHA+bBQ7EC0A5jxsETtQswW41nuuh3xMEhLRqX0jyQRkHsiy4JIJem0xmxY/yegt6h9NpmjiQYwQNWN6zcNyzJuyZyxCf9FZfraU55WIizJjAE7J1CMpFX7P8hmaZOjmQnHDhSEqNllgFrKcMmJosoXKj4wnszTJUSy2RV2l2xpYj6ArkavblBEwbuvvnykHw2cJrWNNCvHp1aqe6bJmDjsPuffaRTQKcmujgpWarZy4ACKJ12sfUfoJMRoMi0sHSN6gUo02RPSV6rcqOVag3/8AgLPcbNLv2HH8LCHOXNjmDW3+aAnd5Lf1oY11elvg3AP53r93NaNJdg71c6n30m1WC2ep++XpZVLXzQxhuzE4NDNgJlhjAEcgw8EY7cnzlXS/yvEiz4KEupNQnkL6LYK6TxN+S5cMhQB8COCQ/3O7sWiMD/c7twaN8b+nMJ/BqSEQpgaevE15DLUhrZ7n0gRjqx6LSqMYIRF2FMLYZeo+Ify6tmf4Xofo66n5Yg7NkbCeCIQ74EmKuP/cv09EYX4zVvrtBc95znPw5Cc/eXSfr/7qr8Ztb3tbfOELX8i2t22LL33pS3sKV/mABzwAAPCxj30MX/M1X4Pb3va2eO9735vt8/nPfx4A9hUG88bEjTm3AgJx3S78F3kGmjncqeMwrsuMsFn/ZuedqYwFBaUYGwuebaddyIdttM02YLxjlmEHNguYbgGau0QqCkwRDUA5IsWx1yE6MCwdow2Kv6VzOL3wZXWOPeEXyx0nDQbbR78v2PkxpF34XIhdXnc/lXJw4lgmc0vnyVMhgwg+xKdwWKSIPyCRIpGoIUQFk4x9OkxeVseSiNJkX6nuW5fsq5FbK8a6oVx+WRF7IBOH5ri2d0tpdO4nTlm6azAn9b8NTkdkDLgbDp06iGK/SKwUSq5snzFitXJe6RfMBEdB+aiJJuTqslLB5ZgjsTl6KeznZ3F+VK4NYv1pvK8o9NYxan8d0rN0UAP8XMfaMHcOTp6NIRxqpF0IlkMo8K4FrAObJt5TCffJ7Mk/TfwBfr5HYU4z5PAFJLKmK46PlxSIRxscKyW6ClwLWob1qnZUiiQXINEpJGR7V8zfs/NAEbqorxVtaFcQBqMtaAwRfXK+oW37AoW+pBWO6rf4LxnU78hXFqZ51YQJEyZMmDBhwoQhbJz0e8Mb3jD6+/b2Nl7+8pfj5S9/+YHPJUbsRB54MqVjeOO0nQN2kYyuzmYLZQZCPrkW1HhzQDR26LA02pPQOYCcX+MiLD6BSJoAOXmglTo1QkWTKQKv/Mq/A8lTNynGwu+Fl6XfNrzk0sfVFocSQq9aQlRJibeyIv+Kf5MhC1EByE4kgHY4/6Es6mqhpYq8fYwUSkcrqIaIVWCYwCtRmlSqx4elaD1sqLo3kjeDgmLMEZhEyeX/NcRxcW9FtQoAThkIuxRKsOe5r9uHTFJCSijPAXJ0TNkn/bMk+mptUUXxe8wjEsqmcP3Jh9or/Yz1BigDQpftO26Qqt3bVeTeHi+hes5S4eeg1btJhSJGaQmtCRjYjrHV9PubQ0oVWDPYRhR5CdN95Uh+c/jMjCzMnfQFTcAaQzBEkZBsjDdONZbQmOCVbnL1na+vVxXUqbHxZy27tPJfErUnovNAL2ynMnT7kxWEX6yEVvyl8J0ZsVeQfXE70Ff8GZPC+OntQSkYFX8T6QcAuPjiiyNJNYbLLrsM119/Pf7pn/4J3/AN3wAAeMc73gHnXDQ4rYP3ve99AIDb3e52sdwXvehF+MIXvhDDXP3N3/wNLrjgAtzznvfc49XcuLgx51YRQbHGzgBLr2Kjdgle7ICM9f1bv7MFHN7zXeudeQryQoziHXsyBew8SchNVKVTeHdl5JmEyuS6Yt/PB/T4xnFeBqCqzigJP00I9omj2kn74wX0OALvVEBAUgByPo7JnzEmvhMNJeeSjKOk9LbUKr/MqWENRV1P/S6EX83pShSacry+3yXxux8nh54hHxhShcVDCDCciDzt0AbUneEsSBEd9TI1XHhnMlEIr92kyCEqzDiwgswcIvbCPHkTSnD93oxto3MqI1f4lYSf/Ftzfho/8Rr3eyRc69gjxupfHcZSz1u9UI/RMYXnJziHUYhaIkT5YpE9d4Q2kLhNnF+40DGI0pxOV8YQ9eY3tTzGtRQLOkJCdGZCcFLqvFKamBFDHgP5sxCeDRfm8CXhN+RoqOdoZVtLaE8QRWVsbZ5WC+FZK6+6fUgJW25f1YfKMUePUV/hmOZVEyZMmDBhwoQJE4Zw48fF2CBOtQ6mdT1SzBJhbr1H63x2vjdSHzqKmANKQ8LDdYukFBHIQkvgWlBnYjlkGthgCAmaNr/wKpZCNTKgJCR614Cc3CsJOlk8ijIPqIfoSXUYb8vqQk6r+EI7+H9MNGix7Cf0hDKoy3dW20vSNZ00N/owUTD+pRxlWsXEzOhaMe6lhXb8XBBWYiyotUfNfJiTR3l9teFwHQOJ5O7wn/2/kVglhnUEaxhL5w0BSUU1w3x7HkKrprA/pI2GZfuJZ74KeSphPJ3zRHnZZiXR569Rrp0z0iyq/QbIzrHmIFJEKId+yz6nCHkrFRA87TuHkKvFk4Ad+n1atz9n2/p1GbpPY2rafL/yvOkcQ893VLohGagYwFZjsNUYT2qS5HLxJJrjQIiW5pR1jILKSCoKhahGDp9bdcNsYPptNJJ5dd+2NZhZwqGZgSXCVuMJwLklzFXuGZJ6heNbeKMZgXtMqZy2dhUSVk1UFvK8GOWA0AvrWRB+On9VVX1RPC/rqE3SoSMhPl0+nsldY2eC4d/6/KV7NaRuADdWCKozga/7uq/Dt33bt+GHfuiH8N/+23/DcrnEM57xDHzf930fbn/72wMArrnmGlx++eV4zWteg/vf//74+Mc/jte97nX4d//u3+GWt7wl3v/+9+PHf/zH8ZCHPAT3uc99AACPeMQjcM973hNPfOIT8Wu/9mv43Oc+h+c///n40R/90Y3kIrw5gttlVP+4kzd4g/n2YVC7BDUzn+dv65B3MIkHOVDI88XFHALIHyVGGINm237/2ZY/fstB8mVqlW0M6wwAxkQnn469I4W831x415VoldJP/vUkiB/nZByUeZUmBYUsGYRzQeXnkkrSWFAL5WRm/FzTNDGPoURAEPLBEAVnDIrvFyCf3/n5H/XGxSH0VGPBmU0charhPPUcTP8hH+IJyMm/mgI01oP8+FuQYJnxfg1VWFQ/FtErNJFVEoDl3KCcP9RurZMDDYFgYOOcNH/3ZPPceFFChNad3OJucn5xlitR3jvnoiMdwcR1g4QE5+CWZ6H7LnoKPx1ucqh6sp1IiC2vChOlmAVSWN89YugYTWT5uVU9KkXc2RAWnQvREnyoTHItLLtEqHUhjQM7wHSB5HZg62BDagAJq1ubG+uaZY6dqq4ldASVmVGhPBHm9MudRPS5PB9hfM6MzdIXtMHBUaIzZPNcpLFh0Imzsm7Qcxd5aMaeCbm2rLr6y6r5aumIJWWUxw09C+FfSe9wY+NcnVtN86oJEyZMmDBhwoSvPJzTpF/rOBqxCV4pBfhQiR37hdYiGNS9QmVWLHgBss4r/ZYE8NyHBAVyr2UN1/r8FV0bFzlNUFD5MHxerVUjHoYwpuQbC8EZF3OFyqXmNWygjG5VL+0VldTqPQCw2js5kYBxERlJPmmlYaNHNPZoj/JI/iXPVnb9fIgl4TJE9An5IlhFVo0RfJIzRRsuq/nQ4B2cvVdyMi5aQyGcTgjFFAwNMwN06jMRYrjExsxht+bJOCAKASA3sun2K5SQDqm9xpR9qU3WJ/wKh+g6FPEjRjtDSdEmpJiDf44MKO6r21hjlRFvDBT218RfSfiNGngr9Rg9n1jOkeop45E16bMRZZuqZw9D49PAeaWvNYbgrPeIN5SM4uJ9vtUYr+oLJN/MJNJP8g4SUDVQDTWVMtF7YlN9ljFPqxCSh3px/QOGovpFF57khUE6KpWt9Ub6QC6IkigrCoH4MwZV4k+BQxlwblTdMGE1Xvva1+IZz3gGLr/88hi28rd+67fi78vlEh/+8Idx6tQpAMB8Psfb3vY2vOxlL8PJkydxpzvdCY95zGPw/Oc/Px5jrcWf/umf4ulPfzouu+wyHDlyBD/wAz+AX/zFX7zRr++cgAvPBoCk+uuAdunz+7Xh+TAWmAfjHodQuwhzDtd68js+e2E3LkLKRbV6A9iZL6NxILbAMhFomhThUJ68x8TBQcMaH85a0IjjgjhbBCcDqx0wCFHxbNQYVUWhuh4Duw6UZUhdH/shVZgoC5GXF2jy/H1aPRNDFvcJv9qrLiMNBi8gjcmR+FP1jPtU2nBo1NXdR97VZd40XS9RbsocXec0GyqXGXCEGGrVh2NXfRBQ8+GBdwS7nOwsVZHl/PqAkPVNf1ud8BtCPh9E1pi+/fZXP03a9s6pypfnWuby5bwfoUrMaqkBJGegdpHl/AWU0wD5STbBjyPGmBi+U0dv0ecamvO64qnQkVjESdNSCudJ7SIo/JZpvFR9Qp65qLQV5z2XR+oonRr0+iPOreQZqDkFhL5HSH16r3RTrwtUHDwPjPK5yNpmn53wKxTTvOrmi49+9KP42q/92rNdjQkTJkyYMGHCTQznNOm32zLs0oXFK6PJJv+c5aezBDQqD5U1Ysg2MGaOZiuE7Vlav0gsVX/FQobYh/Yh42Bn3vsXoKiwoUCNdMXCkAMhJagRflk+K5MIPvHajDkgOk/+RA9WMcQPhFOJxp8ibJOvSFjwlR7FRRkamYGC8nPWiEfWxpDa+QJJlZN8e1fzaYJPe+mWi9khb8ohci9+j7/3j12qs/j7FEImiUHKMJYOytjow1cSeQODv++M1nijQetCnw053ixZWNuAbMV7Vy3avfohtYv2kJZrydRqqi39NmWQywi9nPDbS9vGsGksxJ8nQy1JPcnn52QKRrkQ3rPSziW5l3tj140wQN9IuFcv5rIq2iBUQkg+cUiQJlk6B+e8ohMQD/Cc9ErhtsoT1pQE9edVDD4sbUwADGMOb7idGVZKl0QKCsE3t/647cb48ce1oFYRztLfghe61EP6kYRG69Urtk8a88RAa9TYJ6QgkNo/Kvqy65eCTergQO7FXguvFgyxaJosXGDtXkfFkWwYIP+ozDt2FnGueqMLLrroIrzuda8b/P0ud7lLNk7d6U53wt/93d+tLPeSSy7Bn//5n2+kjjdrhD4v4T0J8CRfIP14dwdoZpEY9OE4w3MhBnYA1PkQ6xzUbWPzC7YNyPictszbwaDvsnGa4UkiyUHn+3l4p7mg8Is5Sv2RQugJhPTzqj4//s6s/y7zRBmXbXgXy99Q3Xs5B40Kxa2hnLMYqBvjkb+bss97tXEPqGWy0J0qmkLM7VeGjQ7lDCma4vhZOFx4FVhBckERfUP1DA5f/XOJClKp/ZSDiBQr0TCAeptF0oTyeYQuT+/r1XMUiT8jc+pw/+L168soX9dwVZWjJv7KNqiiIEbje5QRnQb99aTnQJOgWgWpoeczpSMVABDXla4U10WcXQPB+HlHSIsA+P5Re3NEJzA1pxQin4XQd2kNoK89loEQcyQo/LIoLl1yVCL2Of1iXdiH+NSXJnOYcr2hf/ftpNonzF8cMSxSWHIJiW4RiMjlbljDLVIeToFVY6R6DpcupbSQdAa6DeIcCTnhHZ1CRZVaIHMGrTlvrUNE78Uha+D4wRC3JUkOZNFMNkWU7wXn8txqmlfdPMHM+LEf+zH8xV/8xdmuyoQJEyZMmDDhJoZzmvS77vQSp8wifp+VC2FDceE/s4SZMZHsmxkfZm9mPLHmjd0Nmvlhr/brLGKouKD+I9emRUcIs8jwRi0mg7mdo4syP4okkSxeBbWE6ZrwkzpL/iwJC0Ps/CJRiL6g+qKg/MoSwGuoMDEwudd2NPQ4SsSgNvao4/ey+KsuWcQmVpBTcXHf5XnmdF6+LDdaJLLCKYvwiUBaNGmDRVUpNrDO1PtqI4MYE8vfBVo15oS4MYkMtC7fz+dQQ/pMYozkSMIYICoQhAQW7+GUjzEwTHJu5EYbacdePjvuE2S1MvaCcsEqajrZboOFUIg/Zk+KitrPEUfjUsdcJWE0yTdUf0Et14+UWRq/tAGpZlvNWxkx/JooFg0TYPw1EXuSV46UkEzOeeIPMJhZfx9lHIqhMwlZ6LZBo4h+JoPhUBsALREohE21BugcgVWddEhkIkSiT4g/75W+E1Q+baHOcJkRnxGezfCsMvqGslhtylXN2slBjFbaUz1eX+36g+GOAE8WxLyhALFJBtmK44Z8ZyAZ7cN2I+N31wXjfSA4JC9XrFaXyD6lkCHjQ3vyKFswYcJNDyaE5OKuA3cOxljft4PalQMZSC3A7dKTf8bALE/745Y6tKV3SjKLxofutHMYMnAoQs9Fpf8sf1bL5z6QgjJ/6dqUu6/2jvfzQoeOTXwHzUMIYwnp6edZBk1QN8+sD/E5s7nqWiORZl5NRzYQn7O5H49a5OOB/K4dntR1D94Looy8AervpgyimHOeaNLhJZNCZk3CT4XPk/ladiokwmb4IpQCTquudX1rc085HCnEt3eqg0R1zOpROsskArBfOYZ3pNLzHBqZS/j3eyL+5JpJ6ivknyIp4mU4xPdzlg+75oxSQqsKV7xHxAFSKxn99uE5D9T1x2utEH79c4V2ruTRzeq+zjWq82oClrlYEzBncz+iNK9ddMBW6KrzxUmg3fGKulA30gS8M4BxKSxp1wa1X+rr4sCp8yI79VsJotQ/ZjKvivOoQPh1gfBrd4ITlQo5WjpjmgbczAE7x6LzaxD/L8fIJurscSwF/PNCCI6HXZuFP+/XOzl+5o6cledUb69hn+RfnIu5Sh9S58zbpz8+TZjwlYp3vvOd+Mu//Mssr+JNFR/84Aexs7MT80pOmDBhwoQJE84szunZ8qJ1aBdpIWfJ9QwPPjcLYvi6mSUcnlnMrTcEsTWwkUwCyBpYO/cEWNf60J+6wMIYRRxCfQbjiTGNV28V8iQWb9FisaiNE2VOq0j0EFL4l3YRiciMANTEQGXBxGRA3ADOLyxJQjqF38k0SEvaysJPGfZHsWJx79gXEgm/Qommyb7ovc/JmNc5RLVZIv36JF8tBKfevgqlmk+TfWNKv445hjyKZFfH0fji4MnoWD/iqAZcBiOUMYkAdEHRZ9krAMlRDP2Z8gIOq9ZyA0q+7UxDG4xKo+UqcCQF+174Jdk3ZIAB1if8tFJgHeQqgwrxRwAFQjMiWAm3GgO0CEZnUZ0kha/kY4lGtaEbNmYohjeQWMNAyBlJLARzqpP0IyEbxQDeEMIYI44FXT6ulIYWMp4Lg+QSzI1zNW94GfO0wlHacyWC8ZQY2dhEcvFxbFAG78IAyXosl2LV+Bl/JwKxDwPqy7fwOfvEsFsqfEww7tu+ovpGRMcO3YpwpActf8LNFJUQi7XQlJJrj9olYGwSNoXv1MA7J3XknaeCQoPIpKgFGmLENT7aAhsLcvAhP4Gg0AnElG3ysWbFm01ymALoEX6Zws8a7yBG6R0hY1YVUQlHgRjt+m0YSdNA/Gkj9qahxzkZC0XVLL9rwq8kHylX+AF9xzUNGXerxF+pXquodvZ9mUivxlorRgIwfs9/F/2oCWo/h/XCfQqkTbK5l277QACSvt4ambHXcXSg3QzgI0rEeYk6RPbRcx7kQsReuHfltLiylzpXD9/P/v277jWWhJ9Dfy4B+LmfCZFViPy8u+0Y1nhHTnQLny+vW6Z+rEOSm8aPK5qoVIpip52XsnWK1DO/Vp/tMW8oT0L7dpboLGgXyVFTFH6150EIeB2in3PCTxOfuu0k16efO6p8x2VI9hIOXpFr0jPfw7r9dc37nTmyDZCSschi/CrHqBsT09zq5oUXvOAFeOELX5htu/vd746rrroKALCzs4PnPOc5eMMb3oDd3V1cccUVeMUrXoHb3OY2Z6O6Vbz61a8GALz73e/Gd33Xd53dygzgQx/6EF74whfiyiuvxAc/+MGzXZ2veDAzvvjFL+Lo0aP7zr3ZC9M/YW20bYurrroK9773vc92VSZMmPAVgHOa9Du22wJYYtnlhI+QZoD38hbDuhB/h2YG243B0a0ZDs8tGgPMjTf6HGpMyGU1R2OapG4JIaqy8J6BiCNugnHKL0AIJiptZMHqohrN17Fm+BYjk6ht5sHLHN0i5aZod/xCqV2AumVQ+XmjvChSWHm0ivHZWKu8uBuvVCHjw3GJUkaIP+f8pSpjkDb67JU00mFwxKtaGxg0yeeQvGo7IWNlG0JuurD41WE3h4g4N2SpgiffRutdqPpWKf7icWoCJHY/T+jJDgP1Ucd58i+FGIuhx4LnsFFkjShEx0I36WqWPJIPviXPjiKz4I1I+bE+cK3sR+H3MW6qdn01SIhPf14f5rN3P0eIPiAn+zQZWub58deyd2TtRulWirFQE776DOx3AgCcPzdVhSEhjFNAUvnFCwtGDp07CKgrcsPxNpjrrM096KVcOVdUF7IDdbvAos1DHFcUGTG8UjMHh5ymi86hYx+CSp7lmsJZq5qz6yaVv1S1SbUNQqgutiZTOPeIPKU8zgxMpVHFpf2iwlv2C+VSo4yDah/u1HhrbbgvFMdaNg24mYw4E84d2PMuBNwOAHjlrOS0BDLyG8Z4cjso+/j0ST8HaZeg+TZoaxtmvgXqmqSUIQM72wYA0HKR5aD1Sr8mM8Jz10bFHAPg2TZg52A7R+s4ql/0WGOMf3/aQMqI2m8rjJHbjYnzQR3O0xjCtjU+CoRNKp30Hgkn0Co4G0IFsgM1ihQVFU9oK2pm/nc9DyvG75Jck/drid4mcewi4yeegSwAOxD1iYUyrHqmMiqiQUQiqF+N/aFGBAI9ld+gr0uYMzsVBQDIVX7lu12/83vvf/L6PgtIkI7MSSU/d3/WkCn+KlB+KdncbzDUZ/XEw+RGmouRJzBNn7jM5hqVMlyc4w7PrbJ8gEAMh6nDR9bmC8ReeUiigizqDeT3OqoMgRgxQNYMZYQHIf46Bywco3GMOTNocQpmeRrU7oKbLf986vQHEt6zXYAbANzPs8lIcxgh3Wrt4+eUKVS9X28mp825JT82BBKS2h3vJMGKiNNjgJ2D7QzczNHCYNn5MW7pfA773joS/kbE+xHWBiTlB3KRaqGGAT9eAD5yAxnfmKRC8irnhezSy/5YzKkGw3UCfW/MXhSGoQm+Ivwkz+HGBqYJX8m4173uhbe97W3xe9Mk89iP//iP48/+7M/wpje9CUePHsUznvEMfM/3fA/e9a53nY2q9nDy5Em86U1vAgC8613v2gjp95nPfAZEhNvf/vYHJnWuuuoq/OIv/iLe8IY3gJnx+te/HocPHz5wHc82mBmvetWr8C//8i+4973vjXvf+964173uhQsvvPBsV62Hruvwl3/5l/jQhz6Eq666Kv77rGc9Cz/3cz+3rzI/97nP4c///M/x1Kc+dcO1vXnjU5/6FF71qlfhNa95zWio5XXw1re+Fd/xHd8BsyKf+ISvbDjnpj5yE4Gev9/YOKdJv5khOCLsssOic1i0+UJC8rZI6CZr/MK1Y+vzIjCw0znMDOGCrSako3HoHKENBGBj52DbBnfSwlt0xNNQFqurUBJ+XvETyJywaIu5KZSHKLkuEn5YLsDB0AYAWeg544LXvISac4mgMY1ffFmTFutUz0k1tjgfg3RuWcD741KeDL2Qls9xYavIvmVwsRWyryT6xki4IWgF3uA+ivCL173iXLLdEqXcQqaem652HJBIL+coHOuJP+cTs3kjTrCaUFAHuriJqznsSghpxRyMaKS9h0kNTGEfMS6tSfyJEU4Uj0NwjMxLWr47qjfYGOEnzxOQ5/QRJwCpRVmfsTBWY7ct443VZ4uy/GSMnAf1MZCMSUOGXX9cuJtB7cI1g6modnV1AvEnCoZYZ/lNSFF2fkwpVX2Sl0sb66ISp4m5/JZOvNCFlM890aUu0l5ynzKST4xWlOpXhRiMpcMDYIM4dhGrsbgwOvGYEaokBLXxMoacYqAwHIIdYJPBkEMds1xYxvqQhTcy3BnOOzPmUDHh3AZtH/bhKZ2DtTaSbjG0rbVB+RfgPPHH7TKSfgLeOQWzdQigneCElEICSz7iSPQB+ZhGxkcl0L+JUZyMHzuRvxOMIVgX3r8U8pY6jg5gAKLKz5hE+EX1X8z9LBEX0nu0dELIyTYDms3TdWe7JqUfGkX2xVCf9fGufC847s9Z4i6xPsF1RpNnQXFWDSlaEH6rcmRl5Op+1yzlOcrFaKUOkstvv8hIW/Tr7oLTjm8vitvGIK49meKvUPtFpzr93hJnFKJhgiNW3NQ/jx1SXNs6Rw2SrFBOU+gryTKF1h4USvt1IszKCMSfoZSP2BL5MN9dA5jifa0/KwdOv2ATpyHlrBUItjLKhNQ5J539vIWUg4CVPuGUI1UZwlLNEyD5kUNYz67z73C/Xs2jngCiUlURIihFifDqS3FSkooX90f1UZ/XWPVb+d0l9V82hozc6x7hN6SKqzhiZXUlNT6q8VY+d1yPuHKmMc2tbn5omga3ve1te9uPHTuGV73qVXjd616Hb/3WbwUA/N7v/R6+7uu+Du95z3vwwAc+sFre7u4udnd34/fjx4+fmYoD+KM/+qN4rk0RkV/+8pdxv/vdD7PZDHe9611x17veFfe6173wYz/2Y7j44ovXLoeZ8Q//8A/4wz/8QzAzHvSgB+Hf//t/v5E6HgRf/vKXcfXVV+PSSy/dtzGciPDUpz4VP/qjP4of+ZEfAQDc7373w9vf/vaNEX9d1/k5+AFhrcVVV12Fn/iJnwDg6/6KV7wi1nuv+PCHP4xv+7Zvw0tf+tID1w0ATp8+jUOHDm2krK7r0LbtvtWLNTAzFovFgct85zvfiUc84hE4ffo0nv/85+NBD3rQgcr7jd/4Ddz1rnfF133d1x2oHMATQ23bYj6fr975Rsbu7i7e+MY34oMf/CCe+9zn4la3utWByttkf3POYblcbrS/bQpd1+HXfu3X8LjHPQ53uctdDlzeJtvtK0kl/Ja3vAW/8zu/g6uvvhpPfvKT8ZM/+ZNnpR7nNO17eGYxC9bzRetw/aklrjuxiy8c38EXju/g2utP4wvHd/GF47v44old/OuJBa4/vcSxnRZfPr3EdacW+NwNu/jCyQW+eHKBL51e4oZdhxNLh5NLh9Otw+mOwfPD4Nk2eHYIaLyXuTZcCWQx4o31QlyhavxO38Uz1OfwEyVXQ0iEX7dM4TyF8OsWXn24XIAXO/6vVZ8XO+B2CV7sAO3Sf14ugFbUiQOeucW1aDJBSAj5i/sy9/4kFI0DIhnQsXirpoT04r1afm7DfovOYaftsOwYO63Dbuuw04V/w99u22Hp/HYpZzfsq/+W6jwSvmfpXAqbU/kDgLFwnijaRP9lZTiu/i07V/3Lr7XDqWWHU0u/fdE5LDu5FsR27VSfi6GZIoknfc0bBhL5lRuIMiWcGBCQ9kle8v4/HZ5WIES7YN2Qng6KJGYxuPT/aigVsxK2zefrTM/VzKgcmaSvi7J6svobAqm/aCRWasy5/rOELWtwZGawtTiO2anrMNs5hq32lFcZhzppwkvupRg2Ohh0MF45Jn/kt3Wxz5Xtoozaqi5zA1i3gGl3vFf84jRoGf7aHf8nTgaxMFGuWa/ws3MswjO66NIzK0q/znGmmNAKP52b0sRtNEj4ieE6U8/ZQDzaYCQzjScD5K/Zyv9mh4b/5key725+BDw/Dzw77Mf/+WG4+SHw1hHwPOwzO+R/L/5cLFP+DoPnm5kkTZhwY8AcPh/myAWe/Jtv90gZ7rowr/AhxuPcY+ckeOcUeHcnfnc3XI/uhi8Du6dA7S5oeSqONT7v1q53OCgM4TG8XVD1cbPt/4IKJjobuPS+0DmRDSGG6pwF9d52Y7Hd2KDk86EARdW31VhsN37/rcYE5wx5B1L/PaYUcWxsGJO9c5VX9c3jH0Tl10juvCYP/VtxHht692jnqUhISD1sqdprkpLP5IrKOJaWBnWo8pDP9cZs0fGVP6aorh5YqA9HiK2cZKnPK1apEuN7Oyq2/Z+FgyWgCXOI8s+ov1p9ADVnKK+FCjJVKxpr/aB6PKVj9b+xPfI5m/6jQNTpv71C5o/aSadHpul/pU3CuWpk8kGoDZkPuhAWf9sabFk/v/HjxZb/NxDeKAiwsTWQ7vcO3gExrueQ0gmU9TdhHppycFMeJn1ozUXGE5VxDjNHy169KHOrLsytynlw7BYyvwoOo9FBNBCaWeSGAXVm3LckSJ2LxN2ogq/2uyb89PlUfch1IObkcDZGIodxK815J4JswsHx0Y9+FLe//e3x1V/91XjCE56Aq6++GgDwT//0T1gul3jYwx4W973HPe6BO9/5znj3u989WN6LX/xiHD16NP7d6U53OmN1f9CDHoS///u/x8tf/nL8/M///KBSfS+46qqr0LYtTp8+jY9//OO45JJL8MM//MN7IvwA/+557GMfi0c+8pG44IIL8LKXvexAht7FYoHXvOY1+NM//dN9l3Hs2DE84hGPwK//+q/jnve8J97//vfvuyxjDF7xilfgp37qp0BE+MZv/MaNEn7f//3fj/e85z0bKe+Od7wjjhw5gvl8jje+8Y37JvykrIc//OH4lm/5lo3U7X73ux8+9KEPbaSsN73pTXjKU56ykbIAT/I/6UlP2gihfv/73x8Pe9jD8I3f+I34+Z//+QOV1bYtHv/4x2+MpHv961+PJz/5yRspi5nxv/7X/8Lb3/72jZS3tbWFN77xjXjkIx95YMIPAO573/viIx/5yAZqBrz2ta/F0572tI2UBQB/+qd/mqnOD4LTp0/DObcRwg8ALr30UnzsYx87cDm7u7t4ylOegs985jMbqJXvb694xStw3XXXbay85z3vebjmmms2Ul7TNLjyyivx/Oc/H8973vPOGtlJvInZwY2M48eP4+jRo/iL930Cu802rju1xA2LFtdev4NF2+H0okMXiKMmrNDnjQl/FudvN/G7NwgRjm41mFmDo1sNztuyODyzOG/ujUKHGq/MaYxPxg5ZUAVEsi+oXoS46oIRXFQvpYc0BXKCgCyU1NzALxAl/0O3TIRftwzbfLhPFtLPdTGcFHddDDMnIbjI2GC8s8BsrgznM7/YtLLNAs08hk8Rw08Maxk9XvvdRi/PMsNU2OaNc/VQng4pX1/r+mE8PTkXPCwVGadVfsDqBaB4yUtoT51TaFyNNhzacy1DmDr3UD3LcoQMMUbyrZkYfmxmfe6hxngCbmZSOCFNpAA5IafvC5AbN8SAku+niDhVRvLSzkk6KRPAoCesEIJJdYFIIEYSrjimNkDm+3siiVAPFVk7Xu6Bvx7utUUJyvoKFHGaziN1LcOJRW/vEJYXrvXPXjOHa7ZTKFvmrJ1rqHXTofYarEcZ9skpZwAg97IWZZ+xPrSeaXzYKSHpO45hPXV/Kesh90Ormqlov7LeUvehbYNG5RUG9KFnthf6LTMWKlXDCu/5aKA1DWAMjt1wAre5w51x7NgxXHDBBfWTbwjyfnz0K96O2aHzzth5lqdP4I/+4+U3yjVNuHEgfedL/7+/wnlYwp087ucXp08CgFf3NWmBSdZmhCDvhrmI86EuaTb3845mBnPkApjzLgS2DnvyTsgACdfZbHnSClBG8jBGa2IqEPynO8Zuy945q3VoHXBq0UUnmN3gLLRUcwZ5JyaVX/5eNQZxTmjIh3234sQRwq5vh/kgSdj1bglanvLzs3bXz9XYeQcrBbJB8VuE8IMQmbPt6MCx6FyaJ6k5VDZehjmpAaLTSBzbQw6vtccowKv84uegKkci0WpjZvX9UlNSAX21j+43BcnFZLLzpnDv9bxzgFKjKVJKh7Cu1nGgbWr/6stfZ87Xe4eEz1l7lIRLWR9NDMp3Re5qMrGco9fqpH/Xcw2dU7ucd5Tt6Z2IkCKRLHd83YXE0XUXUlkiA8g2XY9KG+q5UOf6kUH0/FnC7zaGsB3WUc3ucf9cLk5la6ZYP1nbNNvgZuYddpot8Gwbu46wcIydNndCLJ0dAWmT5Gwwtz6izJYNTlaWYNodn45hedo7UklozwBR+EWHhq3z0MJgERwdxflR1kjxusN55Tz+vMbfG71+LCI35Bdgqv29SjBrcr4so3iGqs988bz1+vsQwSf1CXMpIXI7BpaOcezYcXzVHW83za0m7Bt/8Rd/gRMnTuDud787rr32WrzwhS/ENddcgw984AN461vfiqc85SmZag/wxvuHPvSh+NVf/dVqmTWl353udKdz5p62bYtv/dZvxUMf+lA885nP3Iih/U/+5E/wnd/5nQcqg5nx6Ec/Gpdccgl+8zd/c8/HHz9+HFdccUUk0p761Kfil3/5lzeSn/E3f/M38axnPevA5Qg++MEP4jGPeQzud7/7HTgMJODb7ld+5Vdw//vfH5dffvkGarg57OzsYHt7eyNlbUqVB3hC51nPehauu+46vPSlL8V/+k//6cBlLhYLfPKTn8Td7373A5e1aezu7m5MsfbqV78a73//+/GSl7xkI+Utl0vMZpuJmrTJ/gZstt0+9alP4S1veQue/exnb6S8TWJT7Xb11VfjxIkT+Nqv/dqN3dNHP/rReNOb3pSF5j4oNqlG/MAHPnDG8nfK3HTV/OKcDu85bwjUeK/spfMk3qL1hN9u69A5xiLsu9s6HJ5bdM6He9ptnffmDn+WCFsNZ4vjmSGwBQgOM+vDMzbGwJCBbZpkPAjEWApzx3Gh6ipLWh3izn+n6FFuCZknZOb5qI3P8ORezN/n0jb5lzYQEiDzeB0g/MqlWkn4ucp3IUJLwk8vqnUoTzFo1MJ6ZmExVxhj4r4ubbSbeZ43Dgk35RzDqkp2zDAMzNQ+KD4PgQghVCfFdpQwnXvBfo4pwZwILH/vKZJXThNsqBNJ6XcdUinlndFEUi2Ul9ShNCT63yvPbREyldH39Cd1fm1g1CrdSKhVPN9dUSdd16wuBQHWIRmDAR/2SdqpSvi55GE95HkeDbHKGQB2Ho0tQviJY0NJWsrVyXgnxkOpZ0n4DUHauUaK6zbM1cfobdd9aMxXXZO3gB/va+oGls9IRi4uDWjBwMl2gQkTzhnMtmBC3j1uZv55aZdAmF+QtX7+0XVxG4oQ46xtzK4LCuFZygMmKjTX+ueGZ57sy8ignDwXBwQhASSigsAYX52o9jN+LIxhnoX0synHs+TOFcIv5cwVVVSfNCEAVkdDMA3IuThewrVA02SkpXYOY8mHqA3scs0jjkv+PRXCd4d3OQhRgURh3CYDgA04zCepGGEjiaoarhy7NOE3Bnm/VH8bIvyyulRIBP37AfwSmf1cgvTcqEc+FHWrjeVkYuhORrre2rvZsf89zg+kHLVWyM4ZSJMY6rMXQSQn/ErVLSGdS9dtCKvmbjJHlM9yjvi86HnNSLjIKnlKlTDkxXe5BpnzG/Lh5J38iDQXk+sR0i3ODZu537WZD+eyG8FQE0ob1/ahYl4jRLMPs5mumon888gu5lmXcS3mqYvOCogRU8rHwCCfW9VI7Z5yD+jlVtQOXmkfHgw5nHYam7UVGCP8xpTBvWfB9yEhYGUNOWHCQfDt3/7t8fN97nMfPOABD8All1yCN77xjfsOZba1tXWTDPe2Lowx+LM/+zOcf/75GyvzoIQf4MfZV77ylfjt3/7tPR974sQJfMd3fEck/ET1ftFFFx24XgA2SvgBPs/k+973Prz4xS/GF77wBdz61rc+UHlEhOc+97kbM7JvEpskYIhoY8/e4x73OLztbW+LBNYmMJ/Pb5KEH4CNjllPfvKT8S//8i8bK2+T/XaT/Q3YbLvd5S53uUkSfsDm2u3Od77zRsrReMlLXrJRwg/o25sPgjNF+O0F5zTpd3SrAbbmMEQ4PPME1/GdBtdefxrYaXFq0WV5/oTwW7QO1hAOzS3mjfXbth3mjcGppcXppcOppcWyYxye+ZBP2qsyKvSiITh5ZcdwUxV1n0AIP6KkfpGwLJYQ82vJgk2M9HEBB/QXR5JzIX63cTs1c/+7sd5Ypw36IaRMDKelfyu8rYG0wBoy4dQUfloBpsM2evJPHxO8eZ0m9pShfgXLVBKGGpYoGmNWQQyD/TLCB+Nz9VnyOWZqBqAhhZ8oDLXRQq6rJOy0GtEoxZ+EkazBgWM+uVW59IDciCH1Kolqye9n0L/vkt/P79c3TNQgz6EnTbxxxz9LidwlTvWWjzVCS7ZrMsmqf7UiT5NoQszLc6sVqEBO1ieyz99r4mC4ITGgJSOstKkm2cAOtNz1n13rwwSbxitclAFDOwto7/ZSfanVjZqMSuEzff18Pj9KeWV0qCbODVFaYaFD1kVljYSbakOYXqXuE0Vnp+onZKs1iCrMntIv3tc+KSv3aQhDjgdDDge1cWkVaV0SqxmpStFlI3wPz5wqlAG41oEZuGF3jGY8M2gdQAdl5leUP+HmCbd1PvjIIZjtI6Bu6cNztku40yd9WM+Qz4+7LgvxCZXLD8aC2wVosePnIyEcKG0fgTl0xCsAt5RBTeYhbOP4FMegMFbKM61VX0D+LM+sz4WLBpgxsCwGF62e1zn8ZtaoUMMICrEwbqnjWQzipgHbMJaaBiyzaddE54roGCDzKgmtqeZdkcghU83hp8cxIZNcYLPIeLpH0t9aMr4u7FS+RclVphCdEvohPXU7rztWAsU7plS0DaB2bjmfvu6hErTzkkZtrpftVSq3pe76egqVk7wbSbWRJtB0+fp+ZWWpulA5oQrEXw8VtdUQ4bLO8jTWGwQTCCVpx9hmxfxV3ukS5jRzHqrVU8hs6euV+6tRc8ryYSoR5pd+puqfRQ5OTohRUuTw1jGYANNsgyjkqXNtX90ZnsfY3lkfsLFOcS4DgiHfVoTksCbjRWPyfOwSEjYLqanayc9pbRoH7Bxufgiwc+zEUOkc51d6LRnneUZCyquQ9nJvukooT1QIbo0K+RfvZxmCt7aPascYUtWY3vPfI/zGFLfxfiVFOYe/lIZh+JLOFKa51c0bF154Ie52t7vhYx/7GB7+8IdjsVjg+uuvz8I2fv7zn6/mALy5wBizUcJvk7j44otjbrp1cerUKTztaU/DhRdeiF/6pV/CAx7wAHzTN30Tjh49eoZquRlsb2/jhS984cacG26KhN9NGcYYvPKVr8SJEyfw//7f/zvb1TnncN/73vdsV2HCVwi+6qu+6mxX4SaPc5r0s4Ywnxkc5QaWvHrPEOHUoou/nwqhPqNhOqgAG5Pyjllj4n6e2PPHzyyF/G82hFJktM4vrNoilGIMNxRCVgKoql40WaHDG+plFA0tgjTIeCWfs2DjEnmjvIDJWOjwnmjU4lu8zdV3WaDXPHJXkX1+n5Efy+qHa2b2KiUN6y0S6DgRa1ZYDbEjFeF9hHyTddgQKaYhhj8gGe31cbUynKqLEH8AMvKvPLYWSjS73oEfdJ10KDJRKUiTrCIya/elFt5zP2DVS4bufy0Elyb+xIii+5YhVsdresV77ntjlW8TrRjUbcFhH03KyjPJwXAsIaPGiHoOdfH19M88g72hNahCXCAiTTAIa9UlKUMT7Nx7oZvG5+dThJ/UScICxxyNahyJ7RNaPyfWUp3E97+hZCQhqYMxQOeSx7lsFwjhJzlmQtjimDtS1a9zed2iMiAQeZrwy8Kvqn1WYUihV6oyS5KvJAWSyJczR4Paecpx2n/lntoz1V/316J+YJzuJivOhHMHPNsCz7znIpsF7PmA2z3t39nLBRBCihPgFX0hpGeKNOBA1kcbYCyBZga3a/xo43yoOnId7GwOdIsw3rR+HsKkHJyUIg45EaWHaRPeAzNj/HhgGIZlfMmN1EL2+c+iXqL4LpX5mB+nZHt/oGIEMsc2/voBwLSecAsK35rqqZbDjSsKqDEVma6DC+8OB0oKRMCTCez65FJstHzMLwm/sXoAyrElVqYyb3XFtjHCQF3TOhAnoyHib22UDjCxrkrtFG6jPgtV7lksAojq9ESkGfTm9JoUqf0eT5baqJZ3cROotaNW4kvY9EigKWRzifC9R/hVz5k+U7Fd2s4F1zISoi04muncwEAigRxLuNvimQvPQzphQTjL5ugsxsk50zCY4yoL3sdLFMGIc50YaUKPF5pMI6MWgyYSfmxnEAWzX6+iGk7U10+cqhL5Z4JiRbchVZ6JWl7FXnsA/fCetWNLtaBsG1u3lvsPfV+jjvHQs0D6Tbh548SJE/j4xz+OJz7xifiGb/gGzGYzvP3tb8djHvMYAMCHP/xhXH311bjsssvOck2/cnHeeXsLrzubzfCGN7zhrOVSOijO1XrfHNA0DV772tfiCU94Arqug91AFLUJEyZMuLFxTpN+25awPTMgEA41fhA+NPOLgsNzixt2WnzpxC46x5EI7Bxj0Tp0YbW5aL3Cr3MOi1CGGLcBoLEdTi1tzKO2pfIAipenYFSZIl6jEo8pgMIiOi7YSsNJLewegsGBHGg2D7sZkFMvIiH6ANBsnha9FHIiKKN+JAD1d4g3fU46rLPAKg0HqxRgkQBEUv5ZQ57sMYwU6zFaAfLvQL4Nq1V3Rm0sCT9TMBGa3EvwITdjCKIBH2ttYBw7R3lMrW5G1KbWxFwiJKEtB9VSudpP35vaLamFo02/rQ8h2cu+kEKKUtxHyHd96sKkFIxAivxbc/4rxF9O9nnDUBuMKzpPTEkGJU/9ZGhJod/8jlx8RkFoAggE+8wrBppttMG4I9cpyj6p06JzKbQT958frfCzDBgwGlA0iDElEpAZIG0EA6LxxNuxlIGHUr4gDkaoZcexXpIfq+36IaekmyWlDKExyvPdpPxKcgvLsHn+bqdxB0B2npLgE3Kvt60gcPP8RXKefKzI25h7ORx9nam3TSNdQ04Cnlre+KRf5xjmDHqjD+XsnHDug+0W3PwIyDQgt4Wu2QLNj8BuHfJ5/mZz/1wudvz+rgNcB+4c2Dm4zsE4AwrfySxhnINrl8Duaa+lcR1cMwcdJpBpPPnHDoR5n1RATvhpGJL3IEeHIa8k5ug41DvGIJJ9EoZPlOFJrZMcFeQ8gB+PiBEJNj9n8moitnM/N+tagBR5GYk4m6nGauEaB+9JdCIQUoRBgdjsGLBh3O8YSokc2sz6c/RC+5WEZNHOtdDIQMUpIlayovArw/cNEX/F+fcyvKyKaACEebbUoVS81xRH0oamAZMDOKj8dO7DERKlJP6o2J8BpYYaq3g4RqsyK1gVcUJ+E+oqU/uF/fQ8Ns150nNBWsUpKslA9GQhY0NuzqhmDdch97VH9pUqS4RnK3znWGGv5INLc4uZISw6xqLjWPbchuOJADtPIVCjMwF6ZJ9uMyIhPBnWANz5ZYittI8eK6xJ40i8LgoFBAWudgQQso+bOToYP8/iRGBmjqPxHvVVflpZSCGiBCrtmjdy3eGr91mPV+U+5b6qP6zEiAqRpYzKuXRoT6fmfzcmprnVzQvPfe5z8ahHPQqXXHIJPvvZz+IXfuEXYK3F4x73OBw9ehRPe9rT8OxnPxsXXXQRLrjgAjzzmc/EZZddhgc+8IFnu+oT1sSkcJtwEMznc7zmNa/BcrmcSL8JEyackzinSb8mLPi48fn0bnFohq2Qn+/wzOLEdovDc4tTiw4ndpZYtA6L1mE3xM5og3FeICpAvW1mCG3H2JXcgR3DGIdtazLDkUAbtLUapGOgoXzBKEbvVQYLn6PFeIOY8uImwC8i51uBTNTHJEO+ziVTEnzcbPnfmjliPomC7OM1Fla1a5CFdrxexdkZJjgKSiUAXQhL6UvxBvuZNTCO4YiBLrR1MFAY9ko7wz4UqBBwYrQoQ4Fqkk2r+2okXCm8EyWelK9JwLLds+NU2TVFobb1lYbJsT4lRk4dfihXuCXDoK93v465wilsK8JIDhn8yuM1KcXZPiMEoiIje+RfUVDg0ZT6j0If8t7XjhgMiiHPEMiuThUmxiYd0nPpvFGlJNfKWuftLqRfUtmJpzcHI5ELfRyG0CAYMLbO8+ExO4YrCCCpT8fecNU5xtKFUFUAlp2L+S0FmgDu1HPl+xlFspwCMS7kdDJSwxuHpA7RwJPGgGXnn8NFJ3lKUztpIk6TfUKIivGrMb4vS54sgnq+2EHc3pVNL/WR8K/cMyARdtKnhawFkhFI7qn8LrlCdR+VEMJ5+GBUMfacloj1Kup5cmfveYUmTDhrUOGH4TqvRnFeiUfGgnZPe1WfsZ7Mc+HtsVjCdQ5u2YI7A7ImqP4M2J2CmTXeCSmE9eStQ6Bm5okyO0tzhgFjfEn8ydhsDQDnQ/AZojCOE2YYNqAK2Sdje0n4pTkaqXdQcBxBOI8QQ7YJpEJSNsXwnjIYmMJwLqREcb2rzOX6/S6fo4tJmNN6pblJBvgyT50+J6Ux2Klx059rD8ZnTZzVwmaOqYyGiqyo3OUdDKS2KqvZUyGO1bcSBjH+TsYTKWTAsfpKFYb8vVV2NU3EZXWKRFkRBrHWRjq0ohy7Bvk39Jsm/gDECAVAmD7p7WrOE0msIbVcqGtGaFf6lkavzKJsMk2MrOGjGPifGjkd/Pxk6ZKjZmxlMt7RkVWAUCErBaJEFLBTZJ7UmQCbzy90++j5jown5S3wc8OCYA+OmBwiP7RdCu0ulyL3Ru6LjuwwM0I0UlRh9iYx6yj7yjrJNiG4i3u57nMc74ZBmDAOj2xl2NHMOSGOlckxgMO6b1L5TdgEPvOZz+Bxj3scrrvuOlx88cV48IMfjPe85z24+OKLAQAvfelLYYzBYx7zGOzu7uKKK67AK17xirNc6wkTJtyY2G9+zwkTJky4KeCcJv3igosAGITceylEzSysfudhhXh6kYzcrWM0ldWxKAEBX7ZTDJAYcG20RudKLh8ONBnjOSyutYFmXYVShPaaJOOZs2CIYNNEz1sG+otxRRAC8F6lZPxC13qSz+elUQnkZdGJnOwbWlvJ5WgFlw4TpI0iOm+IEH8wnHL8hYUthzYDp1CfMxvav1D9CQmXwnuGe7RGyEwgkX01dV0fqWyt8BsiGOvkYk4Ua0IvtmlWF7U97KXJPr8/9Y4TY+AQ9GJZq/vWIfxWYcjIWqoudJ+R46Q9tPmmr3DoE7MxFFRQP0Dtq+vUOsQQvK1LCj8hBEsiCCgVIRxEE4FQcwSmcF06vKYQw+EZ3A2GqdMtp5x2JhGmnoBMir+OOdRP8uelPJeAkOa+VYRUI/8DDCOSnxZeUWkMogrEhjBXUa0AxPHCP//6/MgIPz02aAjxLAYwIflKj3R/s1qUkFBpYhxkZdBJ29IYXIZjzR0UEtGn77/cW03IpdyhvSolKL5uKESvLkPGA92PTiz713ymMXmjT9gv2NpgkIafV3QW5Jrw3vbhxM2hI/4d0y5BywXgOpA1wBJwixZmHuYdzqv9uPNjoQF8qFBjwMsFeLEDM5uDXJeiGADZ3EdIitIxwJBXDxswYIAuEH9Q+9EIC6KdaCz5dykRFBE4fKyQJzYYpMmGXHoOkGgJxC6q7ID1iBuDfsjz4fP7EIQi1hYVoiFN6qw2/GtSpvZUR7XRUDm1kJ7l9jVVfkB+j9fBGkK/fP9aeEGtUkShNmIH4pCLzTn4m4w9E5nSv3OF3HD+syrhFy9ivXOrGXMPkQSU+WjxW43wo5I4q9VR1hxhl5WEX03tBXjCNTg4+sc4n9sx+3mVRCJQj5pva+NzhEbCjx1I7l1JRqtz+8grohxOzmV+6pfm2xK+XMg+GY/yC5V1GGLIzZirNKy9xJGq5ignxF8ZMj06JYRWiY9AzXtpTJ0n9VuT8BtdC5br1SGib91wniPg4t8bE9Pc6uaFN7zhDaO/b29v4+Uvfzle/vKX30g1mjBhwoQJEyZM2BzOadJPFlqNAYgJ2w0wc0BzeIbDM4tTyw5bjcHppcORucXJRYfTiw437LTonIskw7yxODz3y91M+edduiEmj1lw9Z1lyhlN6pho/EfnIlEA1iocZHlieraK0oDEakFtXDKYOxsWy2KpqXt4+vA2KVxfFlLGhnwS8EpJSRivDetAfyFq1KLXf0/3QxRcZUhJS/AhTjkRPCCg4+CjrjxpJSShGO+95z71FDrReG+4Wk9tyB8i9TQZtxoUzymBIjrm2AB5Hj9VNlFPVVAq9Xx7quPXMGINGboiiTeybhzi8obWmtqjPhkHOdt/6HSa/NCfpX1qqj8AvRCZZf0dBQMnvPIi5ttTZWiyqCSwHHsVnSbVOs5VoprAnRmDMsSqV7J55Z+E1zTkh41F57AAwDDYaV08t/fQ9hcg55TfYu68jrF0Ds4BO52LxLZuO2MIHRNmNoTChTLIGACOsFSGK9HVsfH7RtWwtJPLVY9C+mnyT9o+PvvhPhGF6wrqvpnxOWZmwSAG1wKuYlQFegZfIfA0GSu/x9yHinwsVZrlGFHe26QS5GyMKMn7dZGpBbmyzTFO7dz4pN+ECfuGadDCwFofPhzWgV0LsjM404DszD/78224EOrGGQtqlzCB4O52FgC84R2AJwQBuM5hfmgJ3t0B75yM7xayM6DZBnetN9abBsTsQ2WaNiplhNDS8z/AeMef8EvpmFA+2l4bnjvRaKO6VmXp92xGtIHRhb1sCN1HjSc5ozoM6Ds59BQ/A+QfDZN/Mgb7cVEcXbyTByOMaybl/ZXz1QggTfaVzj6laT5ps2uVUuN7qQgr9hu77rFR2Kg5g/9e34+QFJpmaD8d2rP2m3zUZOUQcYn63CmqQ9XnVMlUZkb86WpU1Fj637G2Ki9ZE+e6vmXbyNeeuk/fX1XPUtkX5xOqDfR5stNJuV0/HGUMpcoONqxTYohR16I1c+x0jGO7IYc7EebGz4fkvGTniXwiH26X1fXEtpTng11cK8wMwbCQ/+ma9PVIP4v5itU4ImWzbSJRGokqyZ0uefzUPEdgJGJDaLWYX5BSWNOkTEZ+f8ggZLQeJYd7RF+4tzryQyovv58l4v2Vsly7mvgrx4y4bq309/DnXBqXHPNgfSZMmDBhwoQJEyZMmHCuk35hceUXLiGflvEXtdUYGCLsBNXe0uWknvcM9QbxxhAOVUg/UfiU3t4uGI9NJP/qeWM0tGFpCIzkCZz+dSDTRMNRVPfZ4CUMBM/OSozpEE6KxfPWNL6NQjiZqOyJpEMKfeiYq4YhQ37fQBv434DMENe/9mSosWE/GApGOY5koL9mT4YwOOQC9IarDv6cZAgMSmE/odbFipQDEjFXNolgLKRmiaQgTNu8kbFPIEpZq4i+GD5JkcHAMJknGCTsKiagvZJ76bgbdyVd5h7cVxlgGE6EvFaCRbKIk8pPSLWlc1VCyBCh6/yz7UKJWmHnQ2v6Xk8USHLjDa4SXlQTegCy+y3PmChda21eD80aL9CHEnXJOCTkl/fe96btzjGM9f9KzsH+eRIJptvJxdCYdcKPKIXCS3ltVN+vGQzH7iH78URCXCUiLyk0JXynOAV0rk/2LUMnkJBfcm810Vcq8vZjPCrJvbIc+X15FixTkzf6hP2CySSlHEKoyBANAK4FZtvgdtc/44sd0Hwb5DrglIkkn+scuHOw8yZuYyehPkMOwHYJapf+32DMh7GI1JJro9OAKJOJTDS2Iyj+OaiuEdTThhmgZBy2VL4L07smd7zJ26H2SopEW1DgMIUwnzJvQwtAlH7s516VsU8M7r08e+HaOnkH7eFdHOuFRDLpudsQ2Zd9535ITT3ur0QvVB8ncqHEGCExct0SNWKoSrW5RJzar6My2qOCT0OHfwQqZJ+UXyiiBvP1bYDw09vlOB3qUyMj52phTwNKYkiPGWPK+apyMFP7hbmG5FG0jXc4KMhyiYqgyxVHStbPpRDyxiUlbtYoRbuzgyGT1H6BfCP2c6heqH4DNR7Vy+bwsReyMvxW9nWJhqJzCEq7SShPWU/GPTQh27lI/FVRU4xqxWalr40pgQk5wU1yrBDaBsmLdF2UCsSbEKa51YQJEyZMmDBhwoRzBec06WfjIgiZp7MlgoHDzBCObjc+7CcRtqzBbuew3ZigqnGRqJFcgLIQLlU1QgY6xzF0pBB/QFKT1NRdflFI0eNYwrLotYw3tMj+IfxeEwxCXRu9VMF+4eoPCv8O5ZSVhaWEkmnmPe/S1iVDuCb9gEQiaYMPcyKmGIn8E+IPCORexbVa2sbb47y1oWOK34X8M0YM/4jkn2Gfp4aF6CMCQNGQt5cl0qBBJF5j/ahyc0kqSrlSjlVGgZLkk74mxoJSNVkiGeTkO2fGOma5D6z2SccfhMTTKr9YnmoNIWaAtFhdlQMSqIf73CvxJwSQGNi62GfzcI9C+i1CfrxlCLc5RgYJQexiLkGv9HMhuUzHHbYbCwSDszHsw8shkeg7nSevGgPMDWHeJJo4J7NCfdX9WwddMHAb6Z/qehHqQuR39P2M0VWaWIh+If00wabvf6nwK/P3aQ90MeL3woHpxhXvba45H4SchqJAdonsk/vJSDkPl11f0afDd6Ztuv0SKbgOyt00SQz0jTXytd2rwWvChLMIPw6ksdUbcg2MMcD8MHxIOgezaGCOdGDXgVwHmm/DtEsAAHcO3c4uAICsQRNUg9yF8aBdAu0SvNjx48FyEUIfzj05SF5dCHaACSH6AsFgyKT8pYH4swQsQsh1GfuJ1btPjXv6aRTzd1KqjBjxkYzbnWbVTJq7eaN5q0IKUozYMJjfLijL/LzQkw0UxludC7n2bmDImB2M7sJ/hjmlXNcQETOk7ivPVSP+MgKnuB4gETjZ9jGib/CXcr487qRVOqb0iBEgkBLjgVR7RFxBlggZUqs3BwJWQ7pM3BpVaCtIxj0QflWoc5TEX3aa8phK6M1eiNrQDmUucA0DZH1Ryo9zA626lPORAYgjYQeT8pJ3MP697zjORRoT8t2Jk1Z8Lg2sMSpUcRvmJVqtSKmNw7NoySCsTABQzN1ctpeOfCJz/HiftaJX3+eizbLxSMpTdzmqVsPaQa8hDCG2WVSM2gaDOSKBcUVpJQKDXmtUi6P0e434Q/l5XWhCUkhSTs58EyZMmDBhwoQJEyZMGMc5TfotHWNbfY/evwQ0lkAOOG9usd1YGCIcnlnsdA6Hlh3aED4v5uMzPlyekBA+NFw/dIgJXpbGyCKQMqIPEAN4f5FGauEm8Iuq3Gu8Y2/UEg9VhIUcu7pHbCxLGxhk8WZUWE9R9nViPGef40xIP7WYGiKJkmoNMZwiQrgrh37Oq+RBL4SfJ/uiMcSNk38de04zGRMScSiHr4t1QmbWyuQ1TSykjBpazedJvxSaRy/gNek3ZPbRRjgh+yi0l86RqG/ZqjBdJVaduyxXPutzloTfOgqF/RJ/jiERLD3UuWIfRsrrViOGloGIqRF++4ELJz+562LY0K3GoDE+7FSZr1DnpSsuIcKSzyUjN1nUH5FQVk3VOZ+n1BOHKcxbMiF5601md5O6h3uZQmXyoPFXjGykCOyo8AsGVjEE99R92ps8GMhLwi+pDRFVkEKK6lCetZyHQu4tA8mow7Vqwm+M7FunC+QhPRPhN3Ts2TBQuaBmP5PlT7j5wsE/69l7F8EpaeYVfw4AtTsw20c8gXfqELrFjt+3c3CdAy1bGDQg61WApHPcdSFApnPgdgFqQlQDrYIDQN0yqXWaeSDHDCwCseL8vKERA7VLObhKssFx8pPSxIx/Bw+/e7L3KxLxJ+ppKSfO3YBIasR8YsPBMSEKIyHxmMJYTnku5FVwwQlEiD/tFFO/rv4cYd8+QrV8YkBS+2lSIf6Yh3cuUW2xkbbQ+2ehFitgoiznYa8KmtiS+pvKNZSHqfZeOe+skSEjZOOqW9Mj7srPNNbL1X5FnXohFwMB37E4MNXnDESiWqucQxN+QvAXxCxJtBL4tYzMFwA/5zg08/XR4eIRnh/9XEZnSsj6ZaAHhe02zlUodk4i6j0bmmDWqBF/8RTI+7sJ5/EOk2F3VusJ2Y9IKQ0L8ljKlv5kK310lZo0fB4i+4bWhfJ+0ITnnkm+Un1Yabca1l3XbRLT3GrChAkTJkyYMGHCuYJzm/TrKqQciR8rAYbRhPCQF2w1WDQOW61XAC4dR6M/kHJkzQxFI64Yk3WIOG1sF+O7EICAtwfUvDJF5TcE8c6O9n0h/iRECjuAXKaa4WJxHCsApBAtwfjkGGg7jqHzxKAeVTSFsb+2tiNS3uPwiifDBKZkaGK1EBXSK90bX4gY3di7a/fCfHYuGbhsUK+ZUAERXQkRJ179UOccQs3wVVvElh7l2oySLdQ1x6oW/lrRZ0BZ+B/9WROD2QJe1w+K9AwGTiFHOwR138BCfMyYVzZF3TO7jjESdB2F37qQ8Lu18xMoEn/p3Ol3Ifsk5KN+lmsqMGDvC21b9G0hG4/tLGNOvlvZeSB+SRmlwvlU+FHd1kTemOXYj2EmqJhLq2/KN1gfWBzQy3ko5QOKOEdO9uvnf6i/A0i5r4S8Dvtoz3NV2fhR5wASw5JW7sl4JP9Gws8lcjIplPuEX6fu614Iv/3YWYYUfhMmnMuQR12eYhPfJTLmGFAzB1wHtnOY2Rw03waamd8OgJ13VOoWy6xsI8ZoY8N+PtSnV/61QJMTFX44CTny2IFCqE9Dxs9FwjzCzxuS45cPxcy90J3ZO6NUHxUQdVvuUJOcnAz1nbbi3E3CCqKNDVkNt+ect4+zi6FLS7WfvOd1uE6B9nvRdRLiD0j3bwhDhF/KnXcwy3ovRN8aSZT9XGe43uvM9eTeDs2v/M4jJal31SApMXDulShVUGvUr2wN6Qt7Ij6GlIU1gnCoLkpxKOuLFFkh1dLIfLU6uVXkYi0MeKhndFoMEUrkmq0hFdrfIxJOkHoQLHzuzUjAUZ5nbxDsYAMBBhOe6+C0WJsv16DrU24voZ0OJBWCP7V2SsjXC71+re/PHvsWUDiBqXoOOQ/K0TVV636h1bUybrCaL8Zz03DO0wkTJkyYMGHChAkTJnic06Tf6Y5xqGXMrQ8t16oFp5ByBgRYhFCVJqpGlg44teyw07pI6uVEHmIIwGXnIlkgBN/MGMwsxX8BIXUokn3+X1/XqPJTSzRRs4haLlTRK/2gvZRN9Dr1HpyoewZTSrYeQ+0oVV8tX1ctR1Za6ClDWeAdvJqNo8KHQw6+mlOpoPRWjXn9AolXkllygNTVBoKHObVnf/GZLzh7NqaB9ajj/H74sjn/rhaaZb4WKVu+WmVsSmF4Eslnawv20ru5CAMEAGwMXMh72LmkbhDij8WcWBpBimsrr7W2Tq8RfmM2Q6mTxl7yEJUo1X7edJOMmgBgqCDsONVR6iM5+zQppNV9JTR578+RSLVS2TuzPten1GvRccwTeGzH59+cWcLcGmxbr/wF+uq+IVUtqfPDAs55o7Ir6idjVRaCmBMJ14VzEVEMfRrbVRlysvqE/bRBUQVIi2MZqX7cvwDpt01/uwptlQi9FGJYj1NiTJRQv0IMCuHnlFK5Fqq1BkshX1ZR8VVm6Jwk1I4AnpwWZ4Xe+QyhLSXQNwI6x6AzSEZOROfNF4aAbUvZMwokpx8AIDsHbAs2NuWnm2+D5tuw23OYnQWwswu39OOhW7Sw254QpGbm/7a2QY0PlM1BIUjzbaAzyfhr4Ik+AMRNfB/YpgEB6ECwEB6JfdSAMI4Y9NmeFK45V0qLGl+TBkJayPc0v/KEHIexEEHxJ45hjECMksrzF1/GFXLFASAXiQYPb+U3LOdTc6QVkDHST6coGseH7PJjr2tNOqwNcVYj6of41PtsUjFXO37gvAxEBVZ0YxtQ3EWFn20yhxUgV0PV0Fdq9esyhiGSbwhZ8WMqq738ViE7hRCSuZYm/HQVxyI3pPDf7P8VpV/Xht/9M0XWwc0vxqmWcWLR4by5wZHuZKzPDbQd5wyAniMm4o/gnacISvGnHSmLerG0g2liqE9CIqHUENKbv5VE7Kp75n25/BgG8uXZ+Mz1+3/Zl2Q81Os/aZuyTj2U6wK176pwvwCSUwHEYSyFN93PlGcozOgQZA5+Y2OaW024OeAtb3kL7nvf++JrvuZrznZVJkyYMGHChAlnEOc06RcNv+zDQ2qvZFkrNXG9kIw3HTMaxzBkMbcGixCKD0A0oPuFqs/5ZwmYBcJAFmNi9Bdlnxjca0bwFNrSG5wkbEusL7xRRsJFiZGGwJEAlELFIFQLSaS9M6PaxaU8famtkrInC+mJFDpPyo2fAwFoiOHYK/04mEssCdHgr22dxR5h2FiivdllvzK0la0s9TLlXbZ9uEJW3Qfh/yh8YHiCxpJa8KqyknKAFOkX/jWJ3LOGIokreUEysq/ImYLwm/Z49f3Ke7pqVUFJ9vm+lFpWCLxSrVBcSrb/EPQ69MZak4raTxN/RPn5S7JPE0KlCqyG0qPabyOlpEsEG5CLFDTBuNs6dIzoDHB4ZkOeGfX8K1Wd35bXSUh1B99vlLXdk+uBsNJ1GVL6CRxypUep6qgRfZoU9t7q/rm3JHWmEIIuGXhEWShtRqWxZsRYGEN5qnYSQpT12AQhBtP9XQe+jdI1rmqzahmW0vkc98sZeHj8++hsmKYmTNgfCHXCJOvHpQrKWJ/T79ARzI5soz21Aztr0HYLAEC3bGHmDcj4fWEsKKj9SKn+yLXe2O46n4/YhVB1+n3pXGb8lveinlWU77zaOxDoOwxpaOcQPc+C3ioTFLBywKHk2BPCE5JBSphcIiicJHqDDvPpSOY7HEKuinNHnVARdZG8M/W8eK++OHtW+BnTu8ao8tMh59cI2TekktrrSDpIFkryaEjLIgu/2qu3fJbfVVF7nQ9Fombgtxr2dI6BHIsZVqn9avsW7/Ahhd8Y4txXn1M/19lDmxR+8u61BFC76/e3c5jZoTjP08rCsv9kz4VcT5hnV4k/IYED4Sd9ktXnXvMpxZs+X3btle9ZH608c/l6pnJe9bms17rdZh2yr+eQJ22N1U5TQ6i1f8QK4o/o4CrkCRO+UnHve98b97nPffDt3/7teM5znoPLLrvsbFdpwoQJEyZMmHAGcE6TfmKwXjiGcbIA8L/NjFfXzExaZHYwWe66ZcPYbRk7HWHRcc9A1BgbFkw25AQzWTQYIf6S0i8cW4SMkoWgY62+oXgeRvIa99sUaRMIIlEEdkAvT0kZlk+r+rLPgXDIje3+XF00fvRzs/l6ePLCL/K8yYJIPP8Rw3wKESDGqhgOdGBdJl6uomLKwlihH+5maAFdeqPXcmHUIJ7AYsCAakMQwEFliIoBsTzPUPhOa4KHMSEaNmIYI+lQlUUvFQYyX0YwuiCF/9LKQeknJuyjSaWxtXG5pJbvI/7gZxTauCD9UZNnAu0RW5J9aVuf8CsJPiBX+pUkXy9vZ1B2LTuHndbhxKLFqaWDIeB252/hvLnFocbENm/jM5UTfbX2FeIPAMgQWufPKwqVWWE6kmeTkI+BMt4AeRik2M8rJL9u1/i8h3oa8uOWozBemKQ+6RAUKUJqRkWrMpJyf6yKjggVhZ8O8enCsULqjhG6KcQzCquf/yfLgxgNiasNR53qk44SAWhErMP1MgwBW81+TWL7BzODzyA7PxZ6b8I5jmCEd2yialaPLTInsTZMIcl4ws913mHhFrfG1rJFt2jRLVuwc3CLFrwdHhZjQNYCQeUHwOf3M9Yb/btFIEbmnvjTYT4l13HIgddxMsJb41V+0UhfuTRN8pWPvVbPlJCxUiIBaGctGQOFziFF61iljtJkg5AbUUfpfChQDqFBLRnApGgIUm5ytOi/D3Vd9fUAQS1YuS6/33qG8zK84Nrm9prRvlCN9evUL7907Fp3BOrVU+5H9IoKqr+BkPmZwo/6Oc/WhSaixo7dy7C9UgG5InfdIEqFH5Bdt3bKqR6OfB7cc3YTgk/Nhcl1sV5sLLiZA812COcNzC1hyxLMzg3e6W3m4Jrzo+MQ0Cf+aISMh8rzB+SOd6S2A37/bG41QvwJnOp5mojX24aw8raqz2lOl38v67MKQ0SfPufa4Vt7FU73u/dTLXSuev7k3HEXSg5mZ4Pzm+ZWE24OuNvd7oaf+ZmfwfOf/3y8+c1vxmWXXYbnPve5+K7v+i5Ya1cXMGHChAkTJkw4J3BOk37bjcHJpc/RN7fALbZtUJgoo4SEjela2GYOa+fomKIhy8yAubU4uXQx7KWAxfAOQmNMNDwDXvWnFX4ZyQfODMDeBu1zy5QGtGg8k5BzypJBQAwfxcGj3AZ3de21rnOTCWGVFuU52Sf7lco+Rk721RQ0rmPMrAGCcT+wdBkBQHG7J6RgKAsJVoa8KUMM6rrXUCP6yvbUv4nBYQhahSlhszqE0KVckI3F4lKfS59fK/skxGs0drgQuiiEMOqF9oyFBy/jEOmTZRsSwecJVopkRC2XUcx5dBOGVq5plPmEypA32vggZJ98lt/LMI9ZHr5CyQcgEvhJ7YteXkFtbNxpk8JvqzGYGUoKv3BvtPJ2FdLzkRR/EhpUTLa6HbTCWDehznuot/myw/fKc6/hutT+QgDKcy7KQ/9vqncWura4pXq8kmdcK5FLpwQJ6en/HQ7ZqvP2pXaUk6u2qkiQawRq1gaqzBnSe8M5jvmEnDpG10GH+u32E5tuwoSzBXagxSnM54dhbQobJ882AFC3AC130iHtIuXnA2C3t7B14XngzsEtl+CB/FlkbCIBVRhzyd3nJfhaXu2CZ1HKgRemGt7pRZVdOkdl5x0g/DQyR6DQCvKId2FEZiaQSTmNY35jiBo/EAwDJEsk/1wLoEn5/VCEFXR5GHQZQ8eM3uU196JQxP1GwjDK2K7KyPashYMs88bVCLTauSrbSgcufb5s/0qZvbduLYeZ1UojrfQrQg1WiC9geK66lyF/qIyhWYMuekgR6QvYh9vWgEJfzpXWFinsr35P9ohaIOSoDPV0geTTc98K2cqzbaDZhpsfQmMIh4gwN4A5fczPo+eHkHKW91tKyCj5vZ93M9RVEfG9pqgQf0P3pDxaQl4OqfxXPkdj6kw17tX6or43q1B3Gsg3rjN7dfCZJw4E/ZwRDY4TgmlaNeErGW3bomkOZsb7iZ/4Cbz+9a/HBz/4QbznPe/BDTfcsKHafeXiC1/4Ak6cOIGv/uqvPttVmTBhwoQJEwCc46Tf3BCuW3Q4PLcw5L1Am9Ko0XnCj7olAL94sdbnlekImAdWaOm8QbdFUqHIksyv2fxn8Tid2UTAlJBweP6zX3UaytU2ErozkhBGDEqKrKJK7hhZfqlVWCT3tDFdDEmcFoJOfa8Rfok8DHXXq8ZQPxMUQGBPinaOYIxXAHVQ5FMIYdrFGDeJ9PSGq0T2pXoWC82RlWZN2UfFdlkukjK+azgO6RFFzhTvZmhH5ErDWh3k/FlYz3DOGMoz5u1pk7pPGTtqOW8YbuWCVyCEj87tVxJ/e4XcHx0e1GDYOCXKt/hdKaHWwRDxB9TbXh8HoKfu87+F40uVXyVsp9iaZ+GDKOdKSBWlSssYapJxeGYj8Tczvm9IvcSZYMhIQYEkk3K9F3ki/vxJ/T+NSVmdxu6szq6Te34PP/Nxf4csjCmc3GOAyOfHScS/KJc5PoNE9fx2tbFKq/l0flFR+jEQlNZ9hd8q1BSd1f0Gbkyv70gfI/+M6X20ElCfzxiK74sbE87xYEjbTZU/4WYKdn7O1C18DqwyNycAahdxXpVy04UwiVvboO0jmHUd2lM7aAG4pd+XrAGc88o+KUyF++SuA5omkgLEw4b2Ej6SAuI8JF5OWXc1bxhCSfglp4X8fI44RATIQ7fbQC5YQjJmS0jJSg5fYk7EX6H46zi8k9iTjToiQlmn2uuSiv202myoCfRwWc6ldBnxGrKDK8TfgGJuCINk35iTlHyWzSPlZ6E6S1K2DCtYIb7k814xphID1iNYxu7bxlDMP/V1l5FFanXS8+NYZO08xf1MORTnYDvzITwBWDiY0zeAlqdS36qMS2MEtlxHVAYPkfFqAlSG+qyWObItzhlUxJPqszPWvzUG6qKjSKQ17Pj8eQxDR+n7vKqt94SakwAw6CQgY/zZIP6mudWEmwJOnTqFH/mRH8F3f/d349GPfjRms9nqgwrM53O88pWvxEMe8hDc5S53wYtf/GJ88zd/80RY7ROnTp3Cox71KPzhH/7h2a7KhAkTJkyYEHFOk36CU4sOXeOVeOWqku3cLwxt4z1MO58s3pLBdiD/HAOHGkLrCAtwZoAGcpLKRo9VzhbtmkCT70BSj/WMTGJQKkiEjMQKRI5W+mlooxSQDFPl4k/2WVfdVyqm/EZfsSVcICMIAEUCwDkhbZKXuxAAnVqYkb549NurtlDtGeyQFpvizZv9Tsm7OLYl+hAFn5BiFik/nngly+I5hvVTi1x9r3QYI1H4ZeetLeQljBFRRvzxHhbR0ke6oPKTcJ9C/IEQw8WuWvvr/qz7fA3aaOXvumxPxtb9EH/rotZHS7Iv1rUI2yl1E1WfJvCTYnP8Hui2mVmD7cbgwu0Z5pYwD+UuOsZu56JSTZfvz5f6Z3npqW1TqE/9LNdqFxUo2gCsfi8VfUPqyFQJhPqlUKcS0lieO0MM6/w4YF1QWBNHsjxTH6pxSr4PhftkTiFRx9R9uu7AsJJTrqPcx+/Xa8rUBAPtkykMDVeP1aQfN1OonAnnHmhxGkQG7tDR/m/L06Bu0Xu3Nbe+I5YXfy22PvtBLD72fmx3DstTO1gcPwWyQdEHeEVguwTmFmR9PkBqZiAbwqprJVBh9CXmXohPQXQ6UKOkDhnu9+mPofopLkN5lvMqAHnIdhfGusAopHdwyIFM6Oc4lWtU/xIM0C3BHLIWWwcOxB9THFX9/EpFIxgjkaT+Gg7jDigCTfbpOVwvVCOAXl4uITmBqmpujDjrzZ2KiAgU505KpTUUQhT5vaUBcqGG3vx0oM5jiq4ayvLK7atQCxW5NkbIq5LsrF1vXz0aCG/1XatDdcQLHdpzSNnGs21P+DVbviqLUyAOoT9lvLFzsJCC4Zzi5FTOz2PxjJAfU21D3h8YoW8Zk3s+SZupttPOlFJ+DSEgSg/Z8yMhToHC21LBFCS5qnc5j9JErN6nV+Q+OlBJ7MbPaxdQJ+jjdzKJ+K1MzkSxKekgzgLnN2HCTQIXXHABnvSkJ+Hbv/3bcYc73AHPe97z8IxnPGPPOcQvu+wyvPKVr8SjHvUofPd3fzce+MAH4k/+5E/wwAc+8AzV/KaFY8eOoes6XHTRRQcqp+s6POEJT8D73vc+3P72t99Q7Q6GT3ziE/iXf/kXHD16FBdddBHue9/7HijHPDPj3e9+N/7qr/4KL3jBC/Zd1okTJ/DJT34Sn/jEJ/CJT3wCt7rVrfDEJz5x3/USfPSjH8U73vEO/PAP//CByjl58iTe9a534W//9m9x3/veF9/3fd934LrdlMHM+MQnPoE73OEO2N7ePtvVmTDhZoXlconPf/7zuOMd73hW63Hjyw82iI4ZR7cbHN1ucGRusgVMx0DLQAufCJ4rCwgKCy1LkgMQPidbb7GYjO71HA45ibbsXPxrHUd1ILM3DnXOlyNqFseh7HAuVoZvF0hIOS77C0Rd/HP54q9T56wRfoIa4Vdra/1ZlDpiqI/1Uf+28TNj2aVt+i9dD0flURfK9vkXU1sNGUrWwTrH6EtnlIaOpIraGIpQNvKX/WZyI8wQysW7Dq+oCRiifv8eKkNjv5e+Tq60vcC5OhEzRPhpCHk1s6TycZpoxLRGSP1E2tb+AIScn4TDM4Pz5xbnzRs0Qd2X1IAcveNlLCifR3kmyz8Bj/RcLv5krNF/ejxy7MNjLjuOf51scxxUi74N/W/hs5N9wrHOYdE5P345/7ucb+kYbcexvEWX/pbqd9mnDeeUY6Vt2jAW7IfwM4Yi0etVl8b/Wcr+thoTlJkjf8UxM2N8vzF6u8HMmtC3TPwzsk9Qfk6YcC6BjY1/GboFaPeEJ/y6NnNYMRfeCm5+BNTugl0Hms1htrfRbM9h5w3MbMTPrDTwFgqPjOApUD5dZuBd13PGqaBP6OTzKj1XqyHtH76PnMvv4Ib/nA9zKjkNY/2JVk7e5b2o/1ZBkzUxLKNqxIzwk7qXMCb9oU74pTpWDh8h/EiTI+o7le02gvK9OfYn7+PyeywrzNfls0b+Hq+fX+97o4p7VoSOBPr9tpwTA9rxjeKfOL9pwm+tt5+QPabx445tfB8S1XF4DkAG3Mz8fjaNJ4Q+4Vd77cb7tao+Y95AZVlCAO7hWfMHqvsgk9vaOBB+pzX6dyxaih3pW7W5Z20eKqgRfjqqyp4wQPitG+EkHrbhNcaECecSvu3bvg1PfvKT8dnPfhb3uMc99v08PPnJT8Ytb3lL/M3f/A0uv/xy/OAP/iC6rlt94Br4u7/7O7zwhS/cSFnvete78M///M8bqxsAvPzlL8fDH/5wfPnLXz5QOf/6r/+K8847Dw94wAM2khPxc5/7HI4dO3agMu5yl7vgox/9KB7xiEfgRS960b77x5e+9CX85m/+Ju5973vjQQ96EF71qlfh2muv3VdZzjn88R//MR772Mfiu7/7u/HsZz8b73nPe/ZVluC6667Ds571LNzznvfES17yEnzxi1/cVznvfve78dCHPhS3uMUtcMUVV+BXfuVX8M///M8Hqpvg9OnT+B//438c+J4Cvp6XXnopnvKUpxyonM985jP4wR/8QdzpTnfCpZdeiuPHjx+4bsyM1772tfid3/mdA5clePvb345f/uVf3khZ11xzDf7qr/5qI2UJ3v/+92+srB/4gR/Apz/96Y2Uxcz46Ec/upGyAOAJT3gCrrnmmo2U9ba3vQ2/8iu/spGyAOBJT3oSPvvZz26krL/6q7/aaN1+93d/FydOnNhYefvFOa302+kYdz9aD2ewGwzNjhlzSzjPBu9CY8DNNiRnDTdeCTi3BkSEjr0xmykYmIEYmk8M9BoZ2acM1CUkhGCZK8wrZtivZUNOGC+Y4/Cb39eHztSKuERE6roIEkHG6hjZNpzHS1C7hk5cZQHAeIu/97D1aiR2EloLsJIPMap/Vi8M3YpluAk5tSKpImpCUPRcl3w6PvyetBFFz+jeOYv2LAlR7TWbjuFYnxQ2KygLSO8nBrrckzjLIeNGvK4HwmHVjC/xkOB1LT9H4wfnEz1H/QKYx8NOrYKE+CxDqwkZsx8lX4mass9v18dyVeUlhJ/kqCP49tG512r9VE+SvUe/b0/HwNGtBhTIpaXj7BkUteRO22HZhXBv6vyyL6l7MZaDatXzO6rUVfvWwp6OKf1S3QhL4kiomdaNKCa5dz3ldZXXI4S/rv9Y3j4NTfjpsK1St/Ke63oRkkqgX0/KxgRdT4DgHDCTesU+p7zfw5jflnGnbwRog/SZKn/CzRe8fUF1uzn1ZZidG3zOPRViz2wdwuI294BZnAZ98p/QnTwONDPQ9mFYY9AsWjTbc6BJczZ2IcSnEIvG5kp3MoF4HJiqhrx+JYScAhDDdMvYockBgYvjXyiWkYX0lHKy72GO4VTEA53TT/KIZcoiMv51X3u/aTLLhWt3LcApfxeRiXn+ujBy6VCfq97dOlKBS0NWVZ0F1YZUfO6FJ61dTkn2hc96/hLnKZB6FOWH3I6+wrkqMpatPq5S/q1qn3XmiKPH87Dirzy6phZcF3pOu5E3Sy3XoZyL6+0i3bhHrCPvP7HvlIo2MsiTHoRtEk5YFGHsvKNBCLPJ4Xc3PwS2c5BzWVSUWA/dXwtI349zdHV+BjJimYv+W0ITfnm7pXVCqTDMC0gRaBK5nbc4U+jPDt47bagoTvUo51m6ToJyHByyBw+RfUBfDazHil79KEQ1qYUoLVV+NJz/U6v9zgbnN82tJtyU8JKXvATf9V3fhYc97GEHLmt7exuvfe1r8cUvfnEjxBUA3P3ud99X6NEanvzkJ+NjH/sYnvGMZ+A3fuM3MJ/PD1Te8ePH8dKXvhT/+q//iiuuuAJ//dd/jQsvvHBfZd3mNrfB7//+72/MuPsHf/AHuPTSS3HFFVfsuwxjDH7yJ38SD37wg/d9D06fPo0/+IM/wN///d/HnI9d1+1bEWaMwfd///fjcY97HP7oj/4Iv/RLv4SHPvSh+yprd3cX/+W//Bf80i/9UiTTrr32Wuzu7u6rvMsuuwxvetOb8OY3vxmvf/3r8X/+z//BPe95z32VVWJ7exvf8i3fshEl3WWXXYZ3vvOd+yZeBXe84x3xG7/xG3jQgx6EP//zP8f5559/4Lq99a1vxTve8Q584zd+44HLEtzjHvfAkSNHNlIWM+Oud73rRsoCfG7Vv/7rv8ZXfdVXbaT9vvd7vxe3vvWtN1Az4LOf/Sx+9md/Fm984xs3Ut7jH/943PKWt9xIWZu8pwDw2Mc+Fre4xS02UtY973nPjZUFAE9/+tNvEvM64ptCLfaI48eP4+jRo/jAJ6/BV9/mFtVF3anWk35bjVdZNG6RFlZAMCZ03oudDHh+GB0MTrUuqlIWXVKulWQQkAzvy2CMWHbDuabK/GGiCpG6a0O0KI2GFjNpkVlsHzBclx7ntTB/QE60rCJodB40ITJLo7pch1wfsDeP0KH8MeUCPy00SZ1bfa+UVwt/KtcsJK9exOu2zg1hw+e0YUdZBMez13J31EJjFf/KQt4rOPNQPvKb/l7Wu7ylJcla9qt6fpDUz8b6lv+93odW9a2S8Bsir/xvo0X1nruZMYP9dIiYLg2gZV+Q/ij5/cQY1jng1LLDTutinSVnoIwBvoxAFqlOLkberB2Kth0i+fQYVMsNsg5xqo/V+e5KErUk2cprS+1E1f6g8/TV6l4lIwvU6jKzlI2xmtwVm1k5LuhXoR4XairpsXyI+r7KPT15w3H823vcGceOHcMFF9TJlE1B3o8P+uU/Q7O9uQlViXbnJN71M99xo1zThBsH0ne+8KmP4vxb36G6jz12DcypL3vDfFDhuN3TfiA6emv/LjvxJfBiB7y7A3f8Ov/vqRtAzcwTgc08hfTcPgw6dCSE95z7fH5i9J0dBjcz76glCL+5+aGeQ8zSQeUFzd+DWc5ZNabJO1PGHSH85DMQnnfk71D9vrBB5W0pjUGNkX/9NkuI5AW61ocrHFKn6esS9ZNtIinS6XEo/KvnAP6a62OmnrMA/XlLGcrTH1PMX8qwpBXir3RYAlAl/KrnEaJPCJHyvLqd5F+tLKycFwPn1lhFxA1prPS8oST9SgKkVAquU355nrXukf53FVaE9azPJ/utWAupn91TIf2cA1zItc7Oq4al3Gbb9/fZFiQfO7U78Vq42QZvHUFrt2OEAF3HXp3Cv/LMlwrEXqjaIRVk+JNnT9S/5VhTnrt8tmKu79Am5NrYHkAg/CrEdsp12ERnCyaTrQl0XfR6YBU0+VeODfr3GtGXyiicA6JjYxvHvJjDfERpKuMdgDTmBQLQqbYH/DUfP34cd77D7aa51YSbNKTvTPf0YLjzne+MT3/603j1q1+NH/iBHzhweZ/73Ofw9re/HVdeeSXe+973YmtrC29961tvEvfommuuwR3uUJ+Hn018/vOfxz/+4z/iQQ960EYM4zKOHz16dM/HLpdLfPGLX8R1112X/X3Lt3wL7na3ux24btdccw2uv/563Ote9zpwWRMmnA187GMf2yjJOeGmh3XnF+e00o9Q8dTUvxOwbakX/ofaXWWs8IsKdi2MnXsDcTielbFXG3kFYuxddrnBWhurAW8IcvA2CUeMGQys9WUKAeENTR4+X2A/fFMt7N9Kz+UVhN86qHmHdszoumDgNpIHMVcx6nyGpbImL69+XlLL6KSK8u3Sjan81Hdw8C4lVUhR5hBppg1oWXuoetfOKZ69cByNCx2rBXFY3GYKhbFFcPiajJLJILmqruti6Ng1TUYRovYDkqGg7D97Uf4NEVT+txV1oVSHOhG1mmDP9lUGjxrxHMPpsq936xDCYXJSHzqvju06JNGskWdSG1DyCg0p+saIvjHCrNZ2Q2Ezu45j2/lxzD//LqhblkAk3QDAqk5TXkftmmr1Xidcq6X+dyEAxRGBEEI2g2BNQcaHe5hA/Rxe8MpOZgrqGj+mWEOwSMpWuZ6SwI2E477iXx0M7Bh8BmPGncmyJ5xdULfbV2IIQgg6JgNyLdzJ4+B26Y879gXAGJitQ14FZCx456T/zXUgY0GzuR+QjfWqPx0+1HUAtKGX+g4wUsdQh/gdaRzW2+Tr2FgkqBF+ZTeX7wbo5etiRsrTvOpcRMgE9wMEDcGAwxw2XUuYU/jBacWZEnrKnsq2/Lf0uUckFaE2NXiEdBurbXmeLJxhRQXFCH2UHeIkW5cB9PpNjcCKv+2RhNP7yVlKtd/QMFmL1uG359+Huq1Wap4JrBrea4rGUSKyhpJki/eyBdwsJ+LEu4l8tJY2hCAvcwoy+udc59mP59d1K9Sq2eMaHO9GLy/umyqVNWutb5fP1NA4PIIa4TdESq5CqQIG+o5wUlZPXVm5t0yEuLpV15alNgBQC68q456oNM9WaM9pbjVhwtlB27b4zu/8TjzpSU/aSHm3ve1t8YQnPAFPeMITAHjl2KlTpzZS9kFxUyT8AK9ofOQjH7mx8ohoX4QfAMxmM9z+9rc/YzkU73CHO9xk78OECetgIvwmCM5p0m+nc7j2xBIXHWpwuAiftmUJW5Zgdk9ET1GmEFaxDV6l4jFZMQr0VFGccmDJ97H8UrKPhjYwx33VmkgMxczwIT9jOapue1wLDO0+pLwpUSNapO5WiLWKkb8Mcae36QV4qW6KnxWZ4n8Lns3MhXovJ2QyL1rIgrV+nXpRzFAKvxDWaziEJiIR4OvAsOQDFdlAiKQ6JCJAr0/7nrKmb8BRddAGhhrZp6+jGtan0hP6fTydyx+T76fbYy9r0r2Sf8b01X57gVaj6fOU8EaznGDv0DeySQg3G/tY/36WbdM64IbdNnqhCyHmDdI5GSehV5dgVdd6A9eIvDL85VgYz3UwFNo3En8u9Q95tpdgoOsTccPnyL/XwnfWqm30PYgKGk84aoWfD9fsw65GBY7xY7AnAfve6/7UKTxuF+zIzN5ZggiAIzAl1Z8evyxyok/qS6DBPjhhwk0RtDgN++VPw513MXh+OPutO3o7uCO3RPOlTwG7pz3h5zpw18GcdyHQNGAAZusQ2FiY828BXi5AhTKCjAWMAc3m/jPgCcBoBB4IqydkIJAbzUPoyxTnMr1fDA0TOPHducY8qxZy2RFAHN7plMq0MpYw0oAgpIa6lkE1VhYJwDuMkIEnACnNJg28wdyp66zVvxzv5HhSY2oZznMtsk9e1gNKuzhPkd2Luhk1VmbnUCEPI9FXtBXBIKNehANcQZIMzXU01plrZ3MA5MRf2iftVFOTrzpXcnjLz1HdFwWRo8Od1lCGV8Q4KTtGMlJln4z8KSJbCMFFrvUqMMG8SSpieTbtHLAAN1vg+WEsnOT79vNrPcccI/xEbaeRheEXEqpC+Mm+0p9rLRodArK5YQh5j5EQnyUq9yv2dRkHkN9rh7QuyIpa85T5nKWvUNS/yee8fkX9y2swBkATiL+wD4UWKcd50w/tScivOc4FzyTzPWHChJsUbnGLW+C///f/fsYI/62tLWxtbZ2RsidMmDBhwoSzhXOa9GP24ZxOt34pfKhJGrJofF7DK3mvycOFAARyI3vpTVrzLu3YkwghIUxRcKh7SNZRU7mUmyo80doQ4s8G5d5eyZZa/SIpofOU9UgNjmRBTd0Ucx1CvNk9OePVcqL0Qy/3oSGvxGFO4as6ZtQ4P22AEu/+qO4ZMQSZWCdv/PdCR4q5DEX5U+YeBJDVo5PrDSTmUHvG+kbirR7Gq0b2aaKvqljsGTD7BqmS8KsHMVoPQ0Sz9L8a9G8leWjXaLdVZIsm/iJUeSaQuQSO0rwO7JVbnIh8B0bnfB9tHWPZuUj4rYIeP2rtMKbUq6n6xnL21VC20V4IqpJQXGf46DtHDP9W1mtI3SDkH6CMVYGcF2O2/rfvVICo0IEL42KQ8jjimEPG5+zyOUwdkPWVmkFqXSXpmYALCtMzWf6Emy/IdT4qguRBFpgGPDOAa30Iz+UiKPQAsjYaa5mdJ/Tm257csxbcdXFfAJnqj7Tib505mTJ+x8NIwvmGTJ0DBNNQ1x3LK1xzkqmNRY7FISq9oxhpfGAyiWDYD/ah/OnPTfPtWXhKVK4ryzcm3lHr13+I8KvvnM/Vhwg//TuPDbAD7bUqL7avr5rHVs7BjEHirzxPVu7AOWvvejmvPtdY7kAA42Sy/H5A1M4+SPgNlaEUfJHgi+o+VVjI8cezEPpzYNI35sy41hUPEX+or61KNZ1gLRXmOmT/PlGrVy1FQYkhwq8arnUP9Y0Ot0BO8JXPpnYcqECPTatUw2cS09xqwoSzg9/6rd/CbW5zm7NdjQkTJkyYMOGcwjlN+hF5EuILJ1tYQ7jL0Tnm5VrBNGG1bGJOgTwPSAO2M0ieFF02kVdvOArepIaxbL1RfelcprDxSh6O4eXGEJU4IcSfEGDJcJ+M12OwhkYXPV1l4aC9IoWg0sRfXg9NGqj6rxE+UCNT9omRSUIBFsanWq5AXW8CFx6pQrhpBWAKqwnUDe6azMrz4yViq7ZI1opDS4r8C23kQz5ydkyJtDkns4bq2q9zrgIV0rK8rvzYccNTeY6yDE321ZSE62KM+ANSn9IEdO+3AfJPn0OOq+WdjNcUyVGgNB+UVfRFJiNyY7xibNv6XH6SW8YxcGxnGcJ6ujgelP29hlpIy/3k5Kup5obQC3la6bSWhonVIWKxrHb2mxvebwiGABQhWiWcp6j8JGejz6GVCL6o8Av7a6Vf+bwxAzAMYgrhl5HUffBEghD/QP685kaxPE/kWeD8JkzYP6wF2xnM6WPA6WNob3FHSJ4lAF6tceoE3A3Xw5243ufjm297Us/OwLPDnhRkBzIWxnXg5SKpAiUcqBCCIexnzF9Xkn6lwbiAhIFkMjBBXVaOWWOhBvfyKmNwL0S5wMEr/Djui1zho8kYMgB36TOwvsGfXbzONC6ld6sfd/p1jM4QyAm/wRxn8q8m4kbIviGVX6msA5J6qHdOIYLYpbyHtbapkQbykwqFikAE1+5x37mpmAfofYvfamSclDlGMJXnHDovqd/0HFGXP5beIMMqkm8gl1/8eeCwKsmnUShxq6Eri/tM7SJc/zyt0Yz1kVnsHEuX2lC/w9dRt/r9/PpgZdupdaL04/jvQD8ZI/rWpln34BAgzgS1+b3G2Dyrlpdc1jbVMcENPI/lNRTXw2RinHMZS5gqJoihvir9I/zeCyU6YcKEmz0uv/zys12FCRMmTJgw4ZzDOU36AX4xIyHdqgsuWVyEfH2QhOJhAcnNFni2jZYRlToCUZQZ8qST5Kzr4iK87s6pQ+FldVWxX7pAGgGe/NNYQsiM0iCfPluJ9YNEiNVIvl7dQh5BOU4Tf/EcLl1jz1t0Rd6tGkmh6y0KQFH/uUgCpnCh6Tdk+c5KAtAbWwLZBYCYonMwIeT7qLRBHibTG/G0cq5qHGIEBQEHg0QiIUVpCCASgGW79NokW6pyZpQYMhzUlHwleVle3xDGukpNzVcLa7YfrCKIM2VfefMc9fatofecQMieZGTU0FWSj11GTKXPQubNjMHMEnatJ/x2WxefiZ3WKcVdYhZ9LryhKw+7DRB2Q/kM11Xz1VAS8H5b+j0Li6WNmvo84QfncseBsk4xJCj3f+uVWZxfhxW1drjONtxjqbYYstJn/9sQ4bcXeFKPB0PV9fY/C1K/WoStTZc/4eYJtkGB57rhfRY7YNf5vHyA/zzfBjfb4GYGdORVWABALWAszGzuyT/J79fMItkXFYKmmJbW8n6NgMKfzN9qiBFAkTui+Py8PEokAMPEn36vc2BuZJ5BCORfWf+SCCx/27OqL5yPUv1zRylVPPXJN1/5CuGnyb6x+zEQ1lOcq/TevZyBnOffzu59LSxqee4yt98aKAm/obeobK+RcSXxV6IcKocIP31ufb6S+NsY9qn4Gw3tOHSeGvEX+3d4UovfohOAbaqOmYDvTw55fy/BjF7+zd4+ch1jz+Meocn1vR1Y1IFUflMJfVmAFSkZt2F4rq/nQUOEn5XxwoXwq6vIf1Hq6evQ/0q9KmFl8x0q5HDhGFGSfzcmprnVhAkTJkyYMGHChHMF5zzpB/i8Tc3QzD+SfjPE/BHyk52Bmzk6GCydD88nBhqBcHtpQeSVf25AsaQX59ownRnzk5tqRv7539NnHw5TdiV0nSI6AhlmiNYi+/aKsZCLevuqvIYA0HWJxIyXLuE9g4qy63ybimIywqXrjWSqIjsjUceJkPMbEFWANWiiTxM9ichMdSgVh4lsJIABJkQFkAvbVqErdso8plcsYnUfLXP9Aeq3kYro8EN76T61LrGX3lczgOwl198YeiS5VoWZIW2GR0n2edJOCKtEcicFn2/1mTVwjrHTpVW6fh6szceAmpKu9vysq7KN29e4CfrU6yhuV8GFsatjX05J+GvCT36qXt+AInmVoTOqKNW/FH9LBj6DAYcQKMUE6n07M6BViUlNHvh/eyq/s0D4TZhwIIRwehEVKyQvF0C7TGE5XecdqewMbBqQhPgUlQc7gFpPyonSbzb3Kp5o2FZKMTmvA2AUYaBzQfUq5fdbxxCsn10gkQY6ZLNgvzZYCfHJzMloz86/zDoHJvJhv2tKvwMQDpr4q/+ewvfJ/vG08qFU+JX1W4EhZdQQ8ZeRiyrn217P28Ma4VDLXGi196nUM5JDY+Uhf7dk1RkkkldDk4tliM+1QkqOlb3mfvtWWCnFF2mib6g+mvCzc9+XnL/uXkAX1J/RIZWkbjvpo1nbFf1l1fyqWLXEbfHzKpK0pu5bU50J9K+9lttPMBzak6ITQMyNroh3/WwCqI9VgXDPx/zCcWHVdVUIvur5hPwbL23ChAkTJkyYMGHChK9onNOk3wVzi8PbFtb4RUpTm/2TT35OwWOdyYCaOdg04Nk2OmqwcIxF5wm/jvM8MPqzNSGEXFhiGmPhHGeG7GyBWRjoJcdXVPIN2DG0kb8e/pJg3WoDvQ5tqOG6ujG9RvSN5cjo11vaYWgPzV7EExSqx0T+dcwVZaAvxxifz1ErueJ1JCsSgP6CvCT3xGFVqxcFhijmIjQmKQ5tDOfqtwn5Jx2m5nW8ynCwDvE3ROzVlIp7JYPtQSxGRTl7OfeqkJ9Dx5TnFIwZW0rUiZ6c8FuGDiIKvmXHOLFofYhfx5hZg5kpSbRANHVBMCMJHdd45sdI9JI8qx2j6yGo5cMzYdwEEoGWX0O9ngZUKPuSQrocQ8YIzb32Tx/GMyj6SI2FRBkhPJRnS0LgMaVcn2J2TMbpZDDrHEKuxkSu1/pLbjBPX6K6YL+y2AOCmVeG/Tpo+RNurhACjkCuw+xfPwG3fT66o3dIu4gK0Bgf3nPrEHh+KCj9tnxuLgTyo2tBLrm60NznCGQ7S+o+rWgRBGWL3zkZm4kN2CSSMPbESAr2Dcs6x552DpCxy5D/QcYBA8qcc3To71i9QJwJyZ+fj8O46wkGCnMECuSmKFWqSYd1aEEhCwdgSCuZKI5ftXGQUCf8SP07mL9vzNhvcsLWvxUS4Vd77cn5snOGEPwx3GN5zpI8WJcYZQcKZPCY2n4odDlRnVQ7qAJvlbpQfiN1LlGQ1vbJK71+mMh1sWfCb+j+GAN2ntTrEWbheSfXZc93bP9yPcPj7yLdbuIzWI0AwvXrG7s3pJ65WpvoiAJlbrx+YWGtir7SMI4BZHptIn3WcR6xpAZ9fVK3GPacENfS8kxSt0jPfo2IL8YwJgdweC6tcuBQY/TQtC8+WyEcb08ZWhKAkWhscWNjmltNmDBhwoQJEyZMOFdwTpN+M0s4NDNJTVEuCmrGA1k0SWJ4+EWSGHuBpO7TEBLHEgFGQlCGVeIa62rH/cTfY4dFA3k0xIdjyJ/bhUoaQ4lQQE6GRGWgyc8rhqghSD41bcDXeb3GVIAHgTagiCqqYxX2k4QgpBRZxiTyLoYBDBUtjTGriL66MUizMeHeq5BODoAhH+oru72aOK7kw1txJohSMd/UL0cTfVqlJte2Dsp2A+qKvL2QgmXuyFUo8/SN7VM7R7QXaFJrRXV1CDT9yMt5dA5BIezLPuJYjC0UN9TUfOuG9dTn1efIy6qTarXy6jn68m0l4Vc6GuRIFnIh/uL2itqvVv9VkPuaFHyI5J7Oj+ivpX9c7/zwYyaHcUTUw0kxkRtfhewTRW2Z83MMEvZPDIhyusmIM+Gcgw691y1Ai9OgnePe8CuGVmMAY0GHjsBsH0EnSr9mngijhRh+ORJ81DTxHBlZtILgGq4j6sbodYqokGVDBL/GOq/DGkGTCtBqv3CysXpXpO6VqWp1eyI588qMqpDXyOHXP3FORgwRfrVzUY1Y0PXYD/ah8qsWI4SRjOsYJ75M8fmsRevbJ/FXqmBlWw8DYTCH+mQisQLpLQS46ttcqow3gJL4A6Rv1qMAyP1d961dc1zLwqPvtcLq+jXhB5Mrof36lQfHqmGCTeZYucIvI+Fdm1S3ToVfHVPf1hTYakyQOvd2wYDacuxcexznJ0yYMGHChAkTJkz4SsQ5Tfodog6HsIwTf+qSxx8tTntD1XLHLzrmhxETwgOe9DMNnEPwxA5klhjL1ZJPvCfFO9IbozmSR53xxNEM1FPsCKkk27vgjVnmuhqDMRQlKVYt1sYM9WIcd+SZKW3Yd0Epp1ESeZr4820Q1HgDxv5QcNp2ACRiLxF/utyYf4coy4coiryEPvlVEnxaqVlDL+SoS21qg/XHqXPVQmbuVYFXKhVL1MKQjqkVV6GmGJVcill7qnX3fgnAEmVbjHnNj5F8OrSi/j4Erfpgyc3GQGPIt3p4ZpzIwQzgwr13ABpLMGQwKxRmxlBU/ZUEFbC6v0ndStSOWTV2aNWh1E/XKXl4UxxDyr6Q5ePMHAF8fr2OEfvF0HXVDIhAvV/0zq/aVNdzZkxQ/lHIB5oTeZBqOa+yYfJjtSlMeRLCb4jg00ra2nXVwHHcCf1xDbLwTMC5vqPJpsufcPMEEwF2DnSLOPeg08dhv3ytz+W3XIDbJaiZg7a2YS68NdzhW4Bnh3yOZPIKHkuI+ZSZHcg03lhr5+E8ygHL2DTg1MK7wSvPpNexJg1ixccJDjHmi5E+TGmC+qdP/JEcVICwOieoGNO9YR4hpDjBitFeGa2jwqesrzb4q2seV/4NK4+ApE4sc/lFY38NK8JssqqTzuMXfxdOkxIBIueFa33Y/a5NhF9FbVjLY+a3K6JY7RNDDFZQU/xplVQ1x2y456uIv9oZNfGnnU30vRobTctzDanVavuOEn+qD5azzqH326qZnxw/9M4Usg+2iWphlnoCfjwAUtjfQAhbregdKH9dZOFXOfV/TTzp4vuhgAkmzBXEcaxPqIdjZX9SfV7Bj1/KKbWkiBXhp5//LswrHBJxXa41NLTKkEgpbSlFT8ieR+eJv+jkMfZMhjoRh/oXCj8d6rcGvW5aNXfX4YYZOJhTwD4xza0mTJgwYcKECRMmnCs4p0k/nxul8SFI2kUyGCgjgoC6Bdz8CHi23TOY2LCiYyCu7jqI52OafOeLQIqkgOdHvPpMQrk5vwqM+ew0uqD60+RfCU22LFsXt7WKXIrGe5fC3kGp/0QRqENlxoWpy436tXpEg7/Tq7Dc2G9l8ctpu4T/G0MthOCqcKWi+hMM5VQcOjbuU4RkHazjiFXPMcNK71AGrTK8Tkn05bna6udehjXsUG47TfDpMjWROXSOnspLhVfNQlWpXIpyXEa2Oh4MqTmG8or3GlJ0XbJvpUFWffaG2ET+EQDS5J9hwPnnaBbNdyaQ/f2yyzCZ8Tyl0nfNdf2qUJ4lciWcHhuQkXtD24bCAlsrOUnz8cCFfR1xspaL6dH4+voqDd+UjHCWe2soq39J/snnMp8fEIxHCFXNPOGLtkRO8JVhcjUpXfb3jlf3s5rjwYQJN3mYJlN6AABcB44hPS2oCf/Ot+HmR+C2zveGfNPgxK7D3JL/E2LONmFMrRi1h75ro7KQOKIIDGShd+DQ3hfNMOGQfa+rczTx13H+ryi64xiFME9U753REH5QKiIyQUjp6sb+WtuMQMY7TSbp65L65dc7VMkBZU1Zt1q4TRk/R1R+WahDnStsgPCrnlf6gd6+BlYRbLrdNIHmmCPxtx/E6XS5fU3i74yhUEyV5J/eVoVWpYV/a6ouTapJiFsY2ycoKwSu1FNC4tbaSSv2x8BrvbeHoQlAmQ3WyovktiLXhws1YTJSUa9VCD+5yloEgSphXayzEgFJMTyxIaQcfkL46Ug5So0bqyb1KMcB5byhCb+yvlppvLKPVcaC6vgwYcKECRMmTJgwYcKEiHOa9KPO53ygrgV1y+CV6BIBiOAFLLlCyIDtPPPsTAYdCqSHP4bBMTyb7FMzAFsQjBPSK3IkWHYAwDE8J1Qo0F3n4BgxJ1gJa/rGqM4xluq8JjBMQgTqEHiG2edTEINzFxRwQbFWhsmsqf7ieTmoAl25JFMuma7YDiRicABlvrBVhF+qT72coXMNEWDjIT1TSFFRHFqbt4moQm0ghwK3EKHJvlq4zdLuGctU+yxLgiIjDPPrGyIxhSiN+xehYKVvOOQEYDqpJ4F0+NQSeqt4PQ8hp4vGUTvbumTfmPmvNNIkj2+CCe0jvJV/3gyWcJgxYYmU03MGwrLSgWohNQWr8172983qPnJgSfYBWFvdp5V9tTyZ+Yk88RfDn5pE8luqGDXD72Isq/Wjkugr617WO5F//Xr68T2EXJXxW/0GKDVeqGxbPKe1iHbSNnGsMhQV4KvQnQXbFDsGn0GP8TNZ9oSzDEOZ0ZeY4VyX7UKzuSL9DoO3zgO6BToGTrfeQG+9BwVi3j6gPzhLSMdiItIzJId9a3miOOwjxAmjPw6tvFwoRVooQ/L6yRQoGfKRqeUQ9t0XhpRYBfkxpFpbhXJ8LOvdwxjhVj1Bcd+Qv9/K11hJ+PXCesq2dVGSj5UwqENIyr6cNNJVls/7ubu6nYXw0ERRr22Kc5bbzxgqYTpHzzkQ1hPoE36RINNkuH6GTTEvzCZnxXMf/s32L8hlTdSuAjPHe7TOcXFcIMDJ3FPP66Xasj/l4UN7itry2a894xXCT+fyY+4/c/p8uu5ZyFGqh/WsPo+i9APS4sXkauUeFEFbC/UrR5Xts1ecDeJvmltNmDBhwoQJEyZMOFdwTpN+MORD+DTzECrGAd3SL8wU8QfbgO0c3HjCb+EYrWOcXnL0zC7Dw2W8R/AMndlk5JfffWQhv2RiIBKAUShllPLNuaohShR/0Rg+oghK+er8v0IEGiJY9qTOzBqvJAQw0ykWHCLxJyiJv9LgL2Solaio/jISuQhvDNOhTYUAmIVzjKlcasqzvaAkXfr50PpEWfytsrDa6/mFtBUydkjRN6TO0/XqkXx7ICzH1H1LXaZe9BvZxhkBqMlhX3baVxMeQP4cCKyUO7JuXZcsoYpBYBXRN2bQZOaex70Y33IlByfBGkTJ5k8ixF/HjJkdJqKB9cPIllgnb59GLZQnAMzCzRgj+2pE36ACM1q1WDkC+MYz0nbBqmkp7MbiBMCDZOxY2OJ16i31FS9+32S+3VqJTBfOJUR8x4xll8Iv63DMvfYdqYOuv6C816fbnDCZMOEmDzKgdgd88np0p24A7+7AnToO2j4Cc+gIzHkXgmbzFIYPAOwczMDhmQEzcLplbGsVrhiCZ9v+a8gNyICKzOD8fE7tHz9LGNCgGhQVydL5sd0a40VjzD3lTz6nqz3jouFOyisi79jl0/lyQfwlg74owLSCpjaXzK4FQjSsMJwb0zuutxtRT0UzFqlArncMTCYZ1PV5h9RIyMmqcq4rRGlGfJQqP3Hcq6kK9WcayP+oc8MNhQMFYljEss5Z/dWXFH7Qq/0c6u03Rqjq7RaBBFR9Lp13ODRrDTok4qrQoysxRj4P/S4/VT4nYirlz4tOl6YBW5eF7B08r5y7FxZXq/XHczNmkSwg7Z7quep5kLqT+u7Lye8rit/1/kOniKrfynZfiPT5pJwTEm0ddWM5T49jG+VhPcVBllwXlN5trv7TKD1G1TMpJKWEIB1V/YZ/47x5U/k8J0yYMGHChAkTJkyYcI6Tfo79wgRhcWTDgtA0IOvAsigpvCQ7x+icN8wSAOYQDjMYeLJTFKe0hnrqPFlIydpryLhfU5XpoqL6rrKflJly7KVFZSex7HxtvHGdJf9WERIzHFgq7XTZPc/weE4hhYTJKAsJdSxVfiPrtr2SbBqlyi37bYDs20uuhDGjWZb/kGg0fKdzw6E4h+pUI/mGjsv3Hb4ej9SPJGeinGOVkVAwRvjVvHZrdRpSBNaqMET26f1Xqhfi/tTzuF8FG6zBEuYzD2MJrNYshnKUklOHiFq5rxkPlzuUuy+VtZo4q5F9itLb0/Xp3J/6WsdUvHLavRJ+tT5b9isZ84Xok+fRBdJP510dImdjHVxRp/C9JOxLnBXO7wx7ox8oqdKEmzS8Ks93Wmrm4GYGLBde2RfyIgvhx8022M78c8bA0jE6598n1gDUqn6i5mJgBy5DxenccYXBW5ehjd9ZmHR5D2OYBKiF9VwFUfYYpQzSTidDBF/pmFIv3PQN6sBKBQ2w/ti8sg57wRr1WnVuTzLosIG82sBf9oPyvKaiiFxTHVkSxLWhbYxIE0JTY9BvhuUYik5IMpX2zkfr90+tVDtj2APxUrZbSUTLusXJuiSsyWjVfSoIv5JU7O2O/an9SuitjHxOW5J/sr1U18k+cfNAfkzpt71clHrMxLCqr6w3q8++Dmncyj/3VX5r3XOTiMhsrJbtyMlJ3RVEYRidJkrCb4xc1s4IZwvT3GrChAkTJkyYMGHCOYJzmvSjbgkS73DjQ3fyzHgv8HbhQ34C2aLJcVJ+HGoMFh1j6Tgu/LYbHerH71euv3VIx5LYEEOyGJEBYNk5r/5z/lwABo3/JbkXz1kYmvQ5Aa8ANMSe+BQDs7AaSt2ny40knoI31OWkjpzGgqIxD0AMFRk5QEUCztS1lNaPvRrcgHGyqxbSMjvfiAouyyWmQiBqaGJScppJyMtauevm3FtHlTis9svrOJT3rU+qFnc19I8aCSyoESwl4TfG3Q6Rf0O2qtxoQtn2UtXny+k/GzWUxrZ1QOHRkTCfjhjGGK8wNRSf53iO3r3lrD+WZJyg7ihA8bcxcrwcG0qSDPCKPznfLBhHZzaR/OUzX9ZC1y72g4raTxOh1nr1n3McQswOt3lJ9sV67ZGoRFHXZYirqVV9y459aGWWsTn9WxtTdPjTxiaCdWb6dR7CyWU7vsOECTchLNAAxHDb5wOzQ6BDF8C0OzA3fNnvYKwPg95swx25JXjrPCw6xomlw07LOL7b4fbnz3C+aUGLE1EV4iMubPkyQti4GDoukD9MBDgHIufJR1H3CZRRWVQkPjxvPqZq47g2dGvosULmc+X8QdR+RHUqwShnFJ07T6v+tFNKdaiQnF617UBuWEff8K/fZ0MONSUG1U06lOoqSOjBEZSOOQRFMnR53rCqqsj2lykxBKQi/NYJf6qVZ0L0ZfPoypxflOPr+KeV4Rxrh1hSSrgiRKTOyZjqtGECYK+EyRrtqvtjScilKUEgOIPir2PAmsaT/3KqdepWqEq5OO8YYl5qyBqmT9bG04x8z+4h59s1en1mlZISddJatumwnppIqzrWZfXIlcii8sueRVH5CQE/9DwCfcJPK7CNz+sqKr+4lolrXkpqaSjSMTjwrtM/dRvtN+zxhAkTzhyYGadPn8bhw4fPdlUmTJgwYcKEr3ic06Rf1UoiuWNmBmjmaVESvCfZMaxJxN5WQzgU1HVaIeO9xP32dpZWVCcWLihD+qfulEqkDBfXcW70tiPevEOKmHWMDnFh5UJYR0X8lcdHb88RUqf8rMNYlkY0awsFHKjqwF7mrivrXmJdwm+d8J1DKEmVmoFQq+H0dengRKVCbz9E39D+q4i+8lj5fBA1ZS2E4l4Ivz2da4Tsy84Xz9/fZ1X5e3Gg1fsTkuIPQIzjpPuyD4MbVK5Zv+eVOS6jEcrpMSIQfusqMCvEmd7uryk98xo6PyWwd5VfrR5xLEpxsAYxlHOwvy2NVWOqxJqyb7f1Y/fScUb2LWXfQBwMKf0AYOYUURoMvKXS0v+eH7/b7tHQugE45hTi+gyVP+HmCR960M+liAzc1nneCHz4Fn4H52BOfQnoFnFutdM5nF56Ml0cCkAGbuv8+DkahYGk2hED8/I04FpQuwTDpd+NBZp5fOnKnE4IP59vORSpnjuZ460zfNZ2yQhBQgxf3DuWcmN/Sfjp9+Se3pX7VK7t57GMIRd1jjFRYRnsTYkYoJX0smdsgzJv314URlpVBPRfZrX9VqjDVoWGXAdUmT+vUvvpEJFa9afv4dD7X4enHMUa6qmxHH2Dv1VCug4RfvIcOuQhMLOyCoVbpngr/hXyiyvrgLIusbzaedfBQLuJOnHVMz34s37War/pf4GobAZy0npVvy3XdUK66WexF2YXGK5b6YggY3QR1rNzefjRustEIh1ruURvqoTeNLeaMGEcRISf/umfxjd90zfh8Y9/PMwecu1OmDBhwoQJEzaLc5v0G8r7IQtD+FwvQBDAyWciEBgtA9uWsGWpsiDpL08YPnTVbpv/2oUFmEBUfZrwK5UkNfRVd4O7DkJCfTr2jIWjvgplTJViDeWEX/gi1Sb4dsjUNeo2lKGJSiIwHZNXQpMb6xJh+wmVqTEWbnAMg2RbUe86cZcfuwmiby/kpkYk8yh9L1VVgqhgCN/3Q/gNqVRrZN9QCE//ffw5KftsDVlordrvIDC4F8XTGoIFwUgYOeJM6ervT538q7VV2R9KclCTgUNhPktSVyv8aqq/dbBujzIG6Dop21uOvbKvQhuOqhVll/HceaJSHFMmloRfVPWFf3dbF8fjnbbz5F8wandOjysc6paIPnmfaNJP6qLbvLzks0H6TZhwELCxIA658w7fIjf6uxbmhi8GWZnD0gGLjrETBq+ZOBmZBrx9wfA57Dx+JmZwuwOSLLQukQ1ePYJIBAKK8AvzLAZAipgTo7h+HmvD0hDhV6q7qkI8NW5J+eW2nsKtPJeEq4tJdotcWWti1QijCdDRsJBjZITeZw3o96u0QV6pCtlXIRiHyJ8qMbpm/aTPlPOE2vexVydRTmpS5XOJMvqBhI8VEmtdtaYgkrYo+u06RF9t2xD5t0c1pf8X8V9/XSm/H0MRzYXCtHpPFXkLrO7zWfWR7sfa4ezLNtLtIyTVusTUmMqvbNuCUNWhjIfy48l4V4Mel4jSHDoj3DTZN0T8SboMqSMZIKixYb3Cj8nncxUlYlmjXoQZrSoEsuc/Ns9AG1PpPDBhwoSbDH7wB38Q97nPffDbv/3beNnLXoYHPvCBZ7tKZxw3SsjtCRPOANL8czP9d5PPwu7uLra2tg5UxrFjx/CzP/uzePCDH4xHPvKROO+88zZSt5syvlLGo7Ztsbu7iyNHjmyszE213fXXX48vfvGL+Nqv/doN1OpgOLddb9j5EJ6uBbp6+LRF59A6Wdz7xdKWJcxtfunr3lZLhK2GcIttiyMzA2tyw5AP+2aiIV6TP/K7CQaxmTGw8TNFz8vSI9wain9DEONzVOJxUq10gYTM91/vipn7xgdSf2X9/PXkfzNjQsL49GdMbteqEXBaJbfsRKUTQqVGMtVfm96n/KuhFtbTb+/v6wnccC9FKVT8eSKBsdM67ARSYelcrK/sJ+UsO451X3ZOXR9HL2Z9/8auxRga/cvanZKi1RjCzKZ7NBZGUZPBm1D41fp5xSRYDedZllOi1meBnGgsw3t6Q0heLwr/xe+UDGtN6OszazAz4c/69ozfQ/tuN6b6N7MGW42Nf3Lctg3Hqr+txqR/5TzhzxKyv/LZlv6b5SxyamzgpHRb9efbsSAqY9/ydZlZ6VupbXwfq/9th/22GovtxtSv1RhsNxbWEBpTN6AzEuEnz9dO55/H3bbDTutwatlht3XY7cLn8Hv663B66f8WrcOidfH7yUWL4zstTi463LDb4oZFi2O7LY7ttLhht8OJRYtTyw43hH932i6eY9Hd+IYpZp935oz9Td7oN1ssHftICc0W2M6qxn7eOQmQwfLIxWidD+G72zqc2O1waunfh3sBLU7ALE6DZ1vg2bb/szNwM48h42Dn0bAsKhL9aMk41QanK6lBOVaUTixAMqoPIb079R+Fd4F3LLOk5kdhH1LvXX+ivqItM2rXCL8ip5dG/l7T29PYnu1fHMvq2uOe5f3WubH3CCE9I+kA5MoiOWXNeG/6REj5nUsSYoA8idc6QJoAe1dJ6ndtIntTP0gqqjb9BaKCoOcaMsdQoQ83uU4vCR2lqtJ/vf2HUNyLrO8gtbG0Z1R7DbXvUA47TSqpkL4dD6v89Py5fJ6lr2f7j19pOLB4ZnU71v7Gjq1hDcJvPyjDelqTVH6WUt0o/lsu9hS5bpuo5pPxmO0s5HWdg+0cHQyWLuV17cL4LH/ldfTIaVe0o/N/ZT8922TfNLeacFPDZz7zGfzv//2/N1beH//xH+PkyZMHKuPSSy/F937v9+LKK6/E6173OrTtwdMcfPSjH8UHPvCBA5cj+MxnPnPg542Z8fd///d46lOfiv/6X//rget08uRJ/MVf/AVe/OIX4/rrrz9weYJTp05trKxPf/rT+Nu//duNlfeWt7wFJ06c2EhZV199Nf7u7/5uI2UBwJvf/GacPn16I2V9+MMfhqtFztgH/vEf/3Ej9+Daa6/Fr//6r+Pyyy/Hzs7OvsvZ3d3F7/3e7+HpT3867n//++P5z3/+geu2WCzwW7/1W/ie7/keLBaLA5V19OhRPP7xj8cTn/hEPOhBD9rIPT19+jR+93d/98BjyEc+8hH87M/+LB772Mfi137t1w5cLwB45zvfiR/6oR/Ccrk8UDnveMc7cPnll+PSSy/Fc57znI3UDQCuvPLKjTwL733ve3H/+98f//zP/3ygcj796U/jNa95DZ761Kfivve9L44dO7avcpgZv//7v4/LL78ct7/97XGLW9wCv//7v3+gum0K5zbpB/SNJ+gv7KJBmxH/lQe0Y2BRWYwMYW4Jc+OJv8aIatAvrCLhZZJBqAZN/BnyxnEhzPTnVUSfQO9iovHdb3QuhSLVyrix8CF7GbpqxvdanfeiMgLqOe10/XX41BohNrToXwfV0J6OI3miv5dkoxjZNMEnddV//jpSvXVda9dUHj+m7isJVn9NieyrK6nWI/xSG+29XcdQI/w2Bd22PiySateyHtmzlOpWkn+kjLua7NaEdnQAKEjwRIbnhJ2QZSVRq++jr1dejsbYcybPfXIEkO1141m9LXMHBiC3yya1WyAgTSIAy+uZWU98Cvk8U23oycxAoIYxUY81pO4DkBN+2bOoSUB5Xp3LDFLrEp7yJ2Rg/BOCv0tkoybwl10iTCdMOBfgGFn4toy0CKDtI97QG7q2IcqevdbtbW7FpgE3M3CzDTTbnuALhmYgvUuT4kU+5yHu5PfaIzfkEJJfe3+HGmnYy5MFVMflTPG1R0P1fsLbVYk+UUNWHF9qxMiZghGiITvhXttkzblCJUTimUZWs1UkELvBWc9Barzu3KxGmuyJ+Kug9syNkc75yUeuWoX1jGQY+s9y/fzr1QvYZ99fcY8PAl1vxzlhXau/KeaJQ7/1xqMxw4t2OCgJWaXwE1JWQo+OOVFUz1arg67fhgylEybcHPGpT30K//AP/7Cx8v76r/8a//qv/3rgcn7hF34BP/3TP40XvehFaJqDBRd7//vfj2/+5m/Gs571rI0Q41/+8pfxb/7Nv8FrXvOafZexXC7xspe9DM985jPxP//n/8RHPvKRA9fr8OHDOHz4MD7wgQ9sjCB6z3veg0c/+tEbK+/qq6/Ge9/73o2UBfj+timC85Of/CSuvPLKjZQF+Lpdd911By7nXe96F37yJ38S11xzzYHL+pM/+RO84AUvwNve9rYDlfO+970P//E//ke8+MUvxpVXXokbbrhh32VtbW3hYQ97GG5961vjmmuuwaFDh/ZdFjPjj/7oj3Dve98bz3rWs/C2t70Nn//85/ddnuDf/tt/i5e+9KV46UtfeqD6AZ6cf+QjH4kf+qEfwvve974DlXW3u90Nj3rUo7BcLg/8jF577bV44hOfiG/+5m/Gq1/9anzuc587UHnf+q3fipe//OW45JJLcOc73/lAZQGAcw7Pf/7z8ZCHPARXXXXVvsv58pe/jKc//el44AMfiP/7f/8vXv/61x+oXvP5HMeOHcOHPvQhXHvttft+xxARvv/7vx8veMELcP/73x9EhFvd6lYHqtumQHwOupQdP34cR48exRc/+RFccP4R74luG7hmO3qB2mCMWQRj726XyC+5YqIUmvPiww2aFYvlsqF2OsbppcPp1nuWL11a7JxadFg6rx4RozOAjOTR34FEcC3dsBG+ZjzW4eU0yQN4o/vMJEN6jeQB0CN6dLnA+kpIga6lrnMZOq9GPGhiTYdI1dtSeeo8FQVSDXJZOgea/673Gb/iMrToUM6+/nH9bT3Ccg2CIFcnlov7/vaxnGkzM94HRLVQlr9pjOXxA/qGsFpOv6GmiwbOfdQreaurbej3QdlPfhlSxgHlc5+2rZujclU/K1HL95eNFybfT6Cfg+Gcm2X/Hf6trI+cW4fQBLBSWVoawwBE8l1Cenqiz0XCL+b0U3n8hKDTSsbyXwBoBzqWKD6lztF5I4Yh9dd0+sQNeNbll+LYsWO44IILqmVtCvJ+vO9P/CHs1uZCHZTodk/iX/6//58b5Zom3DiQvvPRqz+LO9zqQpBr/d9yNyqWePt8cLMNc/I6wDRwh46iZT9v+dT1CyydC8pdwpY1uM2R1XOrEgw//lG5zaVxvHWetF86jsSa3wdwYJjgjDW3FB1eNGqhtpkTiei4rvwvyT6dt09Dh882hKimSRe0QuWGuopNG9O7+K//oOuvoauX5RlEeo9mijOpn1YCFco8r8IKZIAof5CIGT1kZmWHfkStzwdJ3TKdx/WVADEHpJyDSKnAzHDoT/VZ6iMOfw6+L8X+wulzDeX9tup9KWpOS/W2A1DPU1a5p4nQ5iqppesDrLh/uh7yuahLfFlX1KV7aVf9WRNT0h8Bpa6lVO9em+l/VRtFoo+Vgw/6/X0d3xqtOI3PMpIatfcMlHUaC9O5DsrwuWuG9QTyflFed62v1FR+RISZCdfXLdKz7Vqv9Atq1N51ljn8TAMW9Z9pQgh1rj5X0h8lsk0ja1KjxoNu4LxlGxXfjx+/Abe+81dNc6sJN2lI3/lKvKfOuQPn9Nvd3cWrX/1q3OEOd8C9731vXHLJJQcOvfaiF70Ii8UCP/ETP7GRUH833HADPve5z90kwriV+Nu//Vt80zd901dESMMJe4dzDh/5yEdwySWXHJgMA3y4xS9+8Yu43e1ut6/jF4sFrrrqKlx33XXx7zu+4ztwxzve8cB120QY07Zt8Z//83/G8ePH8fVf//V4yEMegrvc5S4HrhvgHQlms9m+j7/22mvxhS98ASdOnMDJkydx3/veF7e5zW02UrfTp08fuH/s7Ozg5MmTuOiii/Z9D5gZb3nLW/CpT30Kp0+fxs7ODu5whzvgR37kRzYSkvNTn/oU7nznO28kF604gtztbnc7cFlDWHd+cU7n9OPZPIZ/gmnSgjB4mQN+cQEwTi5dCC/CmAfDbEMEZr+APL7bQfLRHZoZHKpYe3re3vCLmLnNSYeOGVuNAVpgSX5wmVnEHF8+f10oQxvVg6e8rSzeZOFsDUXDlybleiH92NfJOcYSUCn02JN/7DPnzMR6YXxuLkeAMSHnglp7jeXQKrEXwq+Wn08TaEL49fP5IfvuyyvIh1CTMudZugphc8JXRYyNqZ6GiL4hg4PODbZuTr5aWTWyrSR5s20FmZmTPHVlX9xP3e8a6bIflEbAEgyOxJ/0X+Z0/nibZH/14s4IuVobV+pT7rbqPZGTTvrZCwVRuMbwNZJB6qmpKr6MJ6q0UdrnPekTf/tFx/6ZdtKwALInVQYIk/qq1CNWc80XqbXpnKYyYtRyRZbjy6o+Zyg3dEUywIlStvxXj0OV+qz5Xh8i/0ronEGOOY5FEyacC/BGZcACgHNgIhAMYOeeDGAHnh0C28ar+ZSKzDkABpgbwnYTRvRA8shcbR0QgIVL49EQWQOk98UYxsay2tNZRlAo5wQlaVCDDt+Yn3A14TcETbIA6X1XIwLkGmQbIYxNgRB1AAzneQuGyM4e2IUCTPy+Kr/ZymKHCJF9hhYVrCu0Jlp/XlDOjQbJ0vI4yVmXbVx9bfqdoudFgxhT8pWexCPk3zooSd6NYCCE6KB6LBJjA8WpNdrGHdd0rr91990QhtpDrpeybbTetdeuQxN+QviH8KsSRUecXjXhByAj/sq624HzSahRJuqPCXtp7wkTJpxVbMJwurW1hR/+4R/eQG0SfvRHfxQXXnjhxso7//zzcf7552+svE3ioQ996NmuwoSbMIwxuMc97rGx8pqm2TfhB3jV1X3uc5+N1UdjE6RQ0zR44QtfuIHa9HEQwg8Abne72x2o7cewCUJ4e3sb29vbByqDiPCYxzzmwHUZwqYIXODMkn17xTlN+rkm5H3RhJ/zqrtF573ALzpkwQCu32mD0gO49ZEZGnhDknj/Htt1YHg14K2IqqRfCe+pypjbpJ7pHMOwJxPRGCydg+FgbHcuWKtTGZqQ8vsAjrSJRl2vIv7i8Wrw6nuyJ+IPBvG8QgAYQ3AcyE4XvpOvf0b+wR9rDWWGiloLDa37NeHXszcUhF+p+su3pWsAcsJs0BgQmJPYbHH+mcgPIUSMoZVES1Ig5nUp69M7bqB1xowlcq/0fkOL9hrZlys58+2y/8zWyT6gTr4QCKWWQMi6vsagfn1DpKYcT6CM+NN1GSL/eucovq8Sx+nf9aNUzg+4ZzRKpKMlgMn3DaqcUxOBGQFoxGJOwSmADkz8lR72GfHnf0DviXWeANb9bgirSM3SsD6kHK6pSMfCvdb6GJA7EEhYTx3euFRSSh2dXKuoN4JjhXawkGtrzpTM9QzAOYA2bonNy59wM4cyrrJRhl4APD+MloFF6+JzpOcojSUcaoxXQnUtaHEyHLf+tHPReUPyVpNIrVYZlDXxr7sjKyszc/6bdxgIBuU16iDjWEn8lSohDeakPiyJIMHaZN8aYSrL94w89qWjjTSLEH9AcPRihkNSOsXz7YOgEIeMtYfKofNsKDwnF/8CGA0/WCP+gJzkFci9zy61VEfWUDGCmpF3/Tqht6tQSrVBwi87USL/iF0k+KH/HcgtOYQaYZ6dsiSZK8o33Z/HVJBjzVTOI9eq/16fgbF9h/qxtOsBUeu3NefS7LzVgky9TjrHokl5/YTwc0i55PX4rOsiY7F//gJzLeczJkptdW7BMs9g1JHuc3zaBKa51YQJ5z42SfhNmDBhwoQJN2Wc06TfTsc4hCZadGSBI0nDl47xuRNL7HSME7sdblh44m9mCOdvNZhFVYs/zjEl1c4asATMrUEbwkt1soZxfrFDBByeWR9CzjCM8wb3ztTP4Yk2ikYTIBmyjfP1WxXCsvw9GhEcQ6vCjCFYF/JoGSiyr/ye7BOuS57GJQFYQoc2HArlmX4Pn11/e43UKwk/UVfKcatgnCe8WnBsi2Ug/oxS5fWuqUI86jrsF6tCNWoCplT0yXdRbNaIvqFwjntR9pUkzBApUyMEa6h502suSsrQijoUhsRu5VkSavdniLisnWudMmvXPWRArIX+1Av9Vf24phpdleMF6JN5EgpZVAQdc6b6q4XdBEZIuwFniVpb1sK5Zr9X2s6TqcN9TNqk9kyla0mMgLHpPs7Cts4BM2XvKhXKNeTPEuLneH1nMGflhAmbxswSLLc5MWUMXJO884gdLJkUXhyEo1sWC8fYbb3T1aJjHHGnQe3ungzb4owlpF801IdtHXNf6cTpOdS/OWF4EAgI5CRDTSFUcYdYOSabYlCM56gpv1ZZdUtSqCBaEuGZ5/gqidCs/kVdnZrriOJPiL84Qq5rWC+UOHoOq8/Z27dyfXG3CvmzJwwcI2cnJB88AwrvwHyfrN7I1VOmmJNpcjcj/GrXKzxaqGetv8Vdi/ky1LlrSs1R6H631zCVld8c5/UecsAqQ3vmvw2fEqiTtun8FfJ/1XOK0N9l3gPKiG9LFSJyBGPkfdaH1yHZwr9j/aF3PepZG+sGvbsnJF4WejYousv9gKjwK0N6yljtAuEnSuyxNZELfZbDdUaFsK7PQHtJ/VgeogkTJkyYMGHChAkTJgzinCb9RNWnDUJLx/GvY298Ot12PpeTS7mdZrbD4UZyMJmY12np/OJfQlYJfDjQfh18rhgA8Ko/gMDE0XBiiDCz8AyFEa/qsGAPyjJBVANaE9VkRkIXGoRV1N6MxzokUDJaiyt2EBjJCliHu8y+U0YAjKEWztNfG7I6rJsfbOj3McIvr0N+vNVGvlEAAIKCSURBVCGgA8OJoZIIVggeIyTI8JK7pu5bVf+hdhsqY1VIxTJcaZ47aDiEp9+29/xpNYJPh7PdC4Zur/bCrpF/Uo/9kqurFIj7PVdZbo8IFIOy2lYSfrofjPV/YygjwUvib5VHfQmtbNXEn0ZN8TfWV0qM9R1dxuDxmofl3MC1X9hYyABBafPvmhSs5Xyyyu6kCfdsnw2Ek9grfMjFAzbWivIn3DxhCT6/muRxsw3YzrEMwQoIPiwbIYXe7OCdoIgYjp3fT3f7kLsJ3SJtU3miNBgp37Kfm6XtPncyV0PV1XqkbPPPcVC4UV/5VWLVE+u47oyTcX/AesqvNbBf8kOHhATSO1buoyZVy/MJ8ec/K2IiO7nLBsHa7GmU8CvPuwlD/h7VaCVq91OTfKL40/n0qvnIarngyuvbp9Jr6L71d6yEGd2vQmqNdpWSx14PmwivVJvrlNv0t/KMq4i9dVZbq9S6td97/btUUm5I+bfnoAQDYTPLHJry2bFyPAjjz6opgSvmkyvJ1aHch2cZ09xqwoQJEyZMmDBhwrmCc5r0O7bboVm4bOHbOp+/T7AI1qLDM4tlULstncOJXW+suu15M1y07a28Ox3julMtTi0dTqgcgJ0DbnnIxv1KEBFsyJWHjsHkCUAiAhqvVppbpAT0Qf2m7T8dMxwBM2uzkJZZiEvjSUtgXAFXoiSR/H7e8uOE4FqT+EuFVk+Vna+Wu6+sz0Fylmnjv5S57NK5EwnYP9aaRIZptZylEOI0hsLskyBjyFVUq8mcvZA0UaEZ6qyVfCk332qibyiEp5xDMBZicV3ipaaiAPpGo2gTrJB/w0etxljzjhl9ifQz4/8duuZV1waszm2pv49B+qU8/2V/Wzfv3hiGQnuW5HAtFOcqQk+bbdY1/IkBxJEm/rwqeyjMl+77Jox3jnz7LJ1XJMp9mA2Rf/toy6Hnv11XNjBhwk0A1hDQLkDdEgCwnJ+H3Y6x03bYbgzmlqIRdmaAjv2Aff6WQecY2w3h8Mxg2xIYhwFjYE5eB1ruAKe+DGKOxmR36Ch4+4Ls/J3zIdpdCLm+2yXnrkXH8LmZ8/kDhblM+djqdxCTEGTJqaOaZxXJ6K/f0YykDqoNGwQ1LpYKP6dIoZIMKFVtzvXVfuFa0vXUx78ydGkvpLGE9YRS/LH/ImonuRar62VMelkVBnjiPHg5BbVf/K53LkmokmwoiYczYOyXaa/cL632K5Gp1MJtF8VaLYRrdq+HSCH2ZxxDbV5xoLfImPJwP8Vx/lkTFVUSvaj8SlJKE0+x3LzPl0R3uX9tm85pKc+zfm/32lnqUbRblfBbg9QnVJ5tCaUq5wv5MXWo3GooWtSvtQaHkD+vAJPxlxgdKypEm218/XQOP9aOGYgOGkD/niQHsYGbvtdciGdwbJgwYcKECRMmTJgw4eaEc5r0YwQv8Ej0pJwC2jvXEGE7yPR2rYvEiBBEcV94Y5djbyhuwWuRG95oEMLCGE8UNkEVYgziIlWM8swUCUAgLEIZcE4IOYa1BHTBiKxyATpVWTH8y4KwFvKvl1dLeyuPrLq7sDD2TZQTf5b6uW3WJa/2ErbQBouMvk4g5DwzFHP1xTq7nPATsk+HGE318MtlCcfn28nBGcrUfw482k6CGjE0pu5ZKwypUg3p+1Uj/CS850yRlUL2DYVmXGWEWSdMZw1DajdfZh2yvST/9oOxQ4cMvLZ/8RHajLPKuDRUfk50KwJwDcKvyhcV9XUqfO86YSg3gTJvU43wK00ymuhbO6QnvJLIIBF/cn7m5EgxBFHuyvMzQ3+8tJV6DSlqV0HKbEAxp2CzgUT2EybcWIg93TRgIrTqGZN3q7UEYgdyLaydA4ZAHIh28qHD43guRBY7kOvWMtZKDijmFAKcwjPL7OdaQFI8mfBBK6A4G8cZxCl8n/wuxIEO0xhJBtUW67ySRkM99nL6BXKOqE961fJpKWjyY1VYz4yMQ1J0Myfij6Qs1Y696y/IgVEE0kJ/z/4t9u1d616JqT2QAHIJet6qib9e0ZGwEJIw36f3HnOuV//evZbnwe7tvaCJaLmHoEC4bVhNzvsgVg5EJ1bu+16mgbU8nWU0hyHspx2ZTE787UXFW+4r84MhNegK7DsCQhZWtxh35LMxPcKPIWq3/lpmvTVzOd9es+1CPaL60Be23rETJkyYMGHChAkTJnwF4pwm/QAf6ql1kj8mLRwMERrj/6wBGkMw1MAxsOycD/fZcZaTgsIxPpxUMv2sDEGHRFBpaPuIN07776IgbIyEiyNfdxPyS0Rmz6FjYAbx/CbMIEZlyog/wSrDdGnQLtdLnTJ+leFHu2QVys5VM7jXcg/WQ1+q8waCL7+GyL5VCM3+AlMTfkL2dS79xfOG65o3JqiGgiEl5DmM5J+SnNXIv1J5pXcpCcB1lFwltBIxhfFMhN/M5Ln8hsg+XfO9elzvx5gwRo6VxJgm3NY51RCxprEXBaU/IP+akYADpGVZj3VD1pb2nqHcfONKs+JcWcer1+HGCjFZEn5DRF+mMFU91CmVZVT2BeIvbq+Fa11hN7KqYgZhfIuKQMr2EWI9/228fJ2bNHq8h+f+bNilxoQmmyp/ws0XxOzzONmk7AACSSY7sYNZnIabA9bOfVjQGnESOgsxe1VLyA81eG7ymTtTvqh4wux9JM5UNuTlkqHDOwjUx2OGdhzgSJYJAVYeNfS93E76X/3wKcKPei9Gl3JUaULo/9/emcfJUZV7/3equnuWTGaGbDMZSCBEAgmrBghRQDCRLMqVRQGNslwMGhO9IYBsmgCyKHh9MYpGfIXoewmgAiqgYC6QRCQEDERZA4mEkGWyDTOTmcxMd1ed94+qU3Wqurqnp7snk0l+38+nk+laTp06tfRzzu88z6P200Qh3Vb1l0lvXVQd9VDZweWZwp+e30xI377Svf0kutEjcnq2ISiChskSXjDyex7ksgDUBBLdu88QUAEwgtvqQq62v3OPqoNpAq++DAhcc8/DtYhzCQt/ymcwPAmnYLqZoKIE5nwJ/s7n7+GvsGXw/1yE7/seHQe+zRIl+meE4ESE8JdRoTx+JF0B2HMo1Fe53n6qX2bD9cYN3btAz4Q/PbyvX4B67yBwD+iCn3TrZ9nSy+OnvPzUuxQI2v+6h6yqJxD0lC0IN9xoScIB9xDaVoQQQgghpL/Qr0W/MtMJM+UIZ9LNJ+MM1pqG77kHqAFcG0lLet5/FTGBpCXR1GlleBgZAkgYTijEmjLTCWeVBbVKz/VhCACm41GmylS5aSxDegKgcJfHTQOGmk1vSFfjc4VHQyAOw+/sSYGU5e5tqMHrYJ2iBrGj6+4PbOunqIQlhZ7vyxuvMXresw4LGarTqo5tmo53oYGgh586b9t2vSIl/JGOiLBMuuCXTDsVVjkgFY7oKt28jgIJ0/AGv+LSgCEkYBqO8OoKknqb6CKK6Z5AQMDxPDn97S1vgCm7d2SUh58e0jNu+oJkWPxT64DuPftC1QygqpZN5Oou72A2Is85Rwc3l3iXNWRqFtFLERa/DCNCFIsow3Tvl6h6RdWlu3p428lgWE7972xCXWZYT+1ZMkVAgMpVTr7Y7uiwJ4aq8gHnOQFc71k1sOcPoqn8Wzq+/hYhpHvh52SG55/UBADfS8d91qUfDlkP7+uV6w1iu8+HKQLrsuXC1N+NUfe6H67V/R4KzZyybJTF6OlH+hd2RQ1gp90vrg3h/haZQkDYaYh0MuB1IdT2gDdyaXTudsoxDEjXA0xICWkasAcMhjQTGcc2BVAeM9CWspG2nbDtarBZvS8tW0B6IqD7MlCCPgSk+6LQ865ZkJBSeLaLLf0QdQKAlCLwztG9vBS6uKfmXwXWZRGAlOCZgRKHYGQKfxFEhTgEgl5/6twQ8bfXFlI6nmtuRAfT8IU/p53c3zwJGMJwPPekDdndiLfnyebfC15bRAmgYS/AHMJfOL9YYMA/D880JZwA8OxpU4iACJzN1Fe2PYBAPslcv6xhkTfooZT/b4LbE/DLCR03W+jGHpFLPMmyLpsAqJ6hsODn/Z2z0fJrF/2+L5SoaujhNNUkABEW/PLxRo3K55gL3dlXr6MwMq6/ATdNvNuPUOvC7SoQtFmcd4fW19OeGe+5No2MdRCG5+Fn2c77R8/jp9chLPgF6uOKfEJEiOZR/3s7avVUz5DugUgIIYQQQgiJpF+LfglDIGEIJA1ncFUJI8q7T+VtUBhwO/huHyFmON4jSStzEFp1SmKGIw6GO4eBgRUBqHBGwj0GDKfzY2qD8UI6s9fdaZqALSGFP/Pc1DzaDAOwLK3uKv6Qe3QvF58r/AFBzzxdJMpF2IulOwFHF0eUqJFr7EIXPrKFH80qfgXOGdC9/gLnrjx2DHg5DzPqrQl+UZ5iapkjariCnysmOnX0O9QZopEICnuAuobCE2Qj14c8JaPEnqiQnrp3khInnO9+nfIR/LINloSFvmw56Lzz18TfQsSl7rzyChX3spUfFtm8mLlZRGy1bZTgFxWqs6f1iawTQiEnszxjtp3Z5mrgUnnPOttla0PnPrW0d5WX99MWAUFUF/681570H1FDSAg3V1Y+wl8uVIYoJfKpgW7/HJ3/LdvPj6rnP40sM8ttpj9bufJjBp6vwLtMOJM0XKFPhWZ2cEbT95aXpY5tS4iCYn7lXz7ZP1HeJaYr3ghhet5fpvsRlivqRIVxhObdZyUBK535EhMGZKw86/EBZY85A8oqQkP4WZLSqasQbhQFAZhaZipD/W5pg91RHinOK0/CkCKwTGgCgFf1fB/niFCPTsHaMq39PE+wboQ/rxitnjm3C4mYuqARtnGyefwZAsGQndnOJ/Q94NmnC35hUaQU7i25vEcBlbrQ94CEO/kuZP8D0XaZ2qek9RRGpHgWnvDSq/RQaAv/HVlkxDOi2wCRj1BEHr9So4u33RF47ksk+GV4e4bFxMg6a2KkG/UgMDkBme2V6xTV/eY907lCCrv3JxA14covL4w+YUKJj2EvP+/8gaABnSW3py76e4JfH4h+tK0IIYQQQkh/oV+LftVlBqrLTdfDwka5aSBmON5OpvBnbCctidakMwqr8rcJ4ec/U50TKYGU6zUjhLNttlm/qpNlSQkJ4XWcTACmYUC6g98AIN3uV8qWkIZAZ9oGbHdQBU5oKhhuLkJDwDRMV2CwYdtaSE9DeAPbhrCRsqXn+QbA60WFvVOcVd3n3dM9zKIICALadvmmsggIJDnqEvA4DHn+xeF4StrSCc9qSLgCqI2U7VzTlO3PZM+og9aZUiE/TUN4Xn/O8SVSFgATMFzVIqpNdBFQr7OlDaJ5I0QRXn+Z5+0LfECmh5+6ptkECT2kp0IXZ/Rl4fbIJuzpHkvO8uB2YXHSWabaJ/s5hgmPBxcioFkRt1T2zrMvkKswcGqw1xTCE52U6BVFvmE68yGbd182sU+JToH1ttDuPV/4CxP2evTayB+F8be1/Pbwoj9ZQYFShY91Bt6lJxgIN2yuBXg5t5xthP9YRITg870u1Hffs0XlbLWlc++mbNsN0+y8D/T71fe4iz5vdcphj9ng38q71vVkirgWEr447nvyBj39EvT0I/0MQwAi7Qh2iUQClvvOMQ3nuZbCcIwkN/SbQkjb98CyNM8/929pmE5evxwD50owVDaYZTvPd8p2vGaVHaIGn1NuaPe48tYXjheggIB07TmVtE4K1+aCM7lATSxwKud4A3peXML5XfA8vADv/JUd2a2XlS5y5cptB/RoADsc2lP/vYkSNcPf/cka7r7S8UQTwpFZhYTXdiosozOAb8AwDN+rMeJ8AU3s08470A6httA9I2V40F8XJVwvH+9v/X/kFovU+9sQ8Ax/of3OqA1sREzu0v8Wwf/1egvNZUtJzwGhR+VRixAsooQ/FcYxStjpdaI8KXOgRNTwMsAX/PQ2CwUOyMifGQ7tqeyAqHs/vE+gDtrzG14WJQLq4S914S9DIHPfdULazj0ZNgojQrzqIX29RSqfnnRCwIS9/SBUFBJVD6dzq+4P1Qb5oLz9AuK3O2FVz/Gnt4US+FR+1UBY5NBxRejZUIKf6dpSTt9a+Gkxsk2MCD3bnuBnxgIhR+npRwghhBBCSHb6tbUs3I8lJZKWCvEGL2RT2pZIuR/LhvcBMk+8ImagMm5gQNz/f0DcCR/akZbYk5bosGRgEF245ej5bgC4nTqVZ00baIDbiRd+CFC1TIXMckRJxwsxbhiImyL4MQzEDWcQ2vnf/Vv76IKQ+gDB71GfbEQJB3DbXX2yLdc/tvZRbRb1CeOJSAEvRv/cDQGtPQyUx5x2K48ZSJgGEjHnEzMEymKGF85TfWJZYg3l6kQb2jUzDP+j6qfEurgR2kZddxEWDbV1hggIfurcda++bNfDE+9sP49hOvRxBlBt52Pb6Eo7wkmn+39X2kan5Xz2pCx0pm10pS3nb3dZV9oObNeVtryPKielPX/+dY++Z2w7+Im6n8LXRg+rqN83tiuGdzdbVm2j9ldlpmzbO2bKyv7RzyVl24H7u7t7PNszpc433C7ZttHbzFumtYt/jrmfVdUeenuGr5ltwztv/XzVPaYGhpx8mn7ZanDIkmrQTroiGQLvZfVRop70ju2uk9Jbr+5d5eHn10tdH9t/90ect8LPk+m+Hw0np6f37jWcd6ozcOWKENpHuO/3mPdONrTn2RUP+8jTT9qy1z+9SVNTE2bMmIHq6mrU1tbi8ssvR1tbW9btN2zY4IQMi/j87ne/87aLWv/QQw/16rn0N5RtBcCdzGQjrgZtvY2CHn5C2o73n+3+r0S+eCVkeXXgY1fUQCaqADvtf3TsNERXmxu1QXie65GTGNxnvjNtozNtaeI/Au8f6b5TbOmIfWHBT0rpv3PsYO5A1Sb676/tio49JodXXLGodzDQMy8xXzDx20Tq3+G3nfeelwh62wQ8FjXBD47oIaR0rrMu+qn7JiQE5syTpurcg7x/ur0FBO9vtU69CwDXEyn08fZVYmH4IDkEsrDgByPYbqpdAQRy1/r1948ZeDbV+sxTdgvrXe/J7si2Z7ZfQ4k+EDZzEFmXfIXQsOAXFroD3rAy9yQI7W/dWw7QRbbubYxwtARdKPXCd0rfBrO0511NvtIF1u7QBT+lsxvecgTeAbmEv2wefqq+e5v+bFvRriKEEEIIObDo155+irQrVKi8e0pok3AGi1O2H9pRzTIMMyBuIBbRZ0raQHOn5e17ULkZyJ8QM4RXtmkawU4d4CVit6U7U9qdnWkKtyNjODONpZu/ChBuiCoJ1b2XgOPhZks3/JTvXaaHs4sKfaWWd0fQuyh75zFbLrd8vJzCwkxWPPFMr19wnfL8c0J3GW4YTScvYtqSQMwRKAzheCWlXOVEefaFQ3xG3RPOeeSexR8OjxoYWfL6sBGef2pz+G0RzheohMVsxwzU087tlabQxaIoL77uPKXC1y0qd6TnOWX794jtLnPu0ei6hc81q4edtjzXPZXPfe/s6P9neIPLPe905xOSR792KqQm4J+TF0rU/Z7LmzbbM5fNKzOfmeCWpb1DQp5/nqev5v3nnJNan5lPUkrH+08dWUjH8wZQbzb3UMj0EPAHn/2B+7Tte/U54qwjAAY9/fT7PGIA1fDvVfXdcL35lCetyo0ZM/wJGgIC4UdPwHlX29IdmDMEYANxE44Xsi0A285rQI4EmTFjBrZu3YqlS5cilUrhsssuwxVXXIElS5ZEbj9ixAhs3bo1sOzee+/FXXfdhWnTpgWW33///Zg6dar3vba2tuT1358QyT1ALAHTcPLv2dIN/amw067Qk87Y1y4fGD04Lm2IVKf/Ne4LRyLVCaNrNxJVA5G0HKHRMAA75T/PUhOmOt1JLHHbQLkJmIbzq62eT0s6z6+zTAJ28FnWvYnV77Qhnfx/tuYZCER4d0nVHnqDRZ9vhqef5jHk7QoDspvXRXev8p78fKlXpO/Z5Ib3VGV5Xn9OeHqpbFmheUTlCHnoeXWGhL7IsH6AZyM51y7Cy08t9w6Qf0jI8LXTfx68dUIElusiXF4hqnWPr6gwhUamcBFZDOCFHpVSs6VCtoJef12o9+oclR8tS3hZ5UHZnaCqi0VAsI1UvaOE0hxdi6zkEkN7Qq5DO/d0cJm6t9XfgNbGEe0RJVRn5K3MOHCW90DY20/4E48sqcwMpz/Zk3vELToQ8li/JmEBTRf7vFNRVZeZNmVYWNcFPy8qidDaUb0L7OB7UQqR6R2rPHw18U8XLkl+0K4ihBBCCDmw2C9Ev460xNbdXdjWLlAZNzGoIu51LCxbiX+O2AN34NgUwgvrk02E0FFCXWfahmkIlJtOGMmk6zqo8lypzpkiaqak4Qp+phOb0sml5W4v3cEjKYGUV4YrWBoCEgKGEv/cDroeilGhCwBKIMtGLpFPPw8g2Fb5CntRokNOjz4ZFIr0dXGtrrYA4hCIa8KnLgSo8KeO948NO+Gut/X2CQqlqpOqOqhq8F954JlCeHXQQ6ga2qCGKjscmtQU8EKx6pfDjLgBAyGQcnj5Bdo0JMZ4y7X2z0fgs6QrnLrtCSDS88HzVDREsENvCJi2aic3jKahCUohMUkXuvRrEZUDsifoZeUqxwq0YffHyyWe6UKejneOVlDMyxwFixbysh0rsK2tP1/5PXPZjqvq6txPPiqqWgqaeOuKu05qRCcMaMoOhpv1c0zKwOBa1KtH91RR727lcRgVylP38lNtoAvVgC+i68fVn28l+CkPP13w0/cNP4IGlNjpC38qN6vTNNLz+Nvb9PaM8d4s+6233sJTTz2Fl19+GSeeeCIA4Cc/+QmmT5+OH/7wh2hoaMjYxzRN1NfXB5Y99thjuOCCC1BVVRVYXltbm7EticBKQ1gpZ3DWtoAEIM0ELAmkYcA0DBjpTmc7PWSn7ef7C98lElkG4O20F7JNxsthw/m9r4gJtLm/vYDr2Syc4J0qp6f3ey+dCVpG2g/LqwbvJVzxTwoYhh9qWNlljq3o/s65eZdFxPs8KAb4oY4D5xnydMs56N9DT6qwt46qhwQyfqMV+c45UNdGhfs0RPfin9rHEIYj/tlpd1mmZ5NQnn6A5wmqhz6UwjXaheG0P3wByhMA9NCeEXnHekJgjpYmaihBE8gt9AVET3UObtW8hguJamHBLyxcqONJKbUw2P76gL0aUb8o4S9DWM2RQ05qbRpFd6/9nvgW6s9ST4+TDf3aFSQyymD+4fD7Kuv7K+8DZBFcc22vXSsB/35RE0mV8AcExb9AMd797F8jFXI9G3q/NCp8sAolHz6aHs3GC+kpVD8GQc/eUAhgr77Kuw9wnhUz5v8+wPdE7OWAA5H0V9uKdhUhhBBCyIFHvw7v2WG5YTfTTtjB1s40du5JYmd7Eh92pNDaaaE9aaEzbXlh6JxBZGcwWM0c1WcwZjO1DVd0cULNOR0lJ1yiPjDtd+D1T3i2pAC8sHAqRJwpBAz4wonwxCdoy/xBZUOoUJ8RIUDdsHRqsNl0hRg9BCUABENS+p9cRIUmDIcsDIcUDIc7TFl++MWUZWt/uzmwtEH+KLFShc9T519uGiiLqY+JctNAZdz0PmWx4PfKuBFYrtqsLGaEQqhqYlaWcJvOvRFsND1calRoUv9+yt3W6rhh9DCMAAIhJVO2nfGxpQpzCC+spxOSMxjOU4Xs7HLDpKnQnntSFros2w2f5nxSto0uFULRC6fofE9r4RWjxKdcAnRYnPPC3uYxahm+v7OVE/XJRjhkp2pH/V7WP7ath//UtrP9MvTzDIchjQr/mS08qB7GNBzCM1f40ygCz2+onoFyA8cLnp8Ki+q0AzxvPCfvnu2FAfXekzJ3eM+0rd1Xqo1tqZWrLVP1cusSDumpe6xmiPwimMPPEf79QStFrlswygvwQKG1tTXw6erqKrrMlStXora21huYAoDJkyfDMAysWrUqrzJWr16NNWvW4PLLL89YN3v2bAwZMgQnn3wy7rvvvqK9SPY3PFvIjEGacU8kEOkkhJ0O2gm2KwgqQcdOQ7gf2GkIKxnyZNMPlEMiMGKICTdEtpH5O6smA/ghfm3/t0iFX9YmAfj2mRumDkG7TPcoBoL/616Fqjx1x9iaLVkQPQjBGPBAU/+HDt2Tqig7JLvwIr3zt7U2kO57X4VqVnZuVsKD/LYW+jDgASi7DXWYQQ9FUxHxAfw2CH/Phgz/rYt5yivJCAp+0Lbr7jJ1F25UP2vlOeXPp1Lt2xMJLqoS+XtR5oMurJYa/XLlY18repJ7OXjAArrPOfOYhuph2/71U88HIq41wl6VuW0V/TB2Nx//2ZeB/mw2VN/Hj44AL6Rn2MPP8/LLRjiHZ4RQLot99+7jlNq2ol1FCCGEEHLg0a89/f61bQ+6dvmdhs60hVRSYntbEoAjqI2oqfByvSkGJkwkYMA2nNCNwp0ZGu2hA8S8gWBnUDppS+xJWbDh5JhKmGoGo+P9ooeKVJ5T+kxJ6ZYLN8yk6gh5IZ1sZy6nlN6Ecy2ki9vhC8aScsrVBqJ07xjbdsKDAq7Y0G1vOCgqRRG1TgkPznlHeZVpddD2URiGACxf5IybhhvOVMCMGVB5/Lx8WZ73ULjm2b2DgiEBc4cBBODdN4GwlVnEvjD+eqkWuCftt0PUTHOFJaXjRWUIty3d8Irucq2oDMJen+papFzP1KBI4ggynthq+x5+ybTv6adQ7e7lRVQeU3bQOxKwETcN55im4bWDEl2UV5zuLZEPeggj/TFQ55gzPG2W6xz2KozykIvypHO2zXaw4PVNQd1D7mrX1dgJmRkRmjd0HtmOn62+2fbNiXbMqFCsetiosNeiG5gJhiF8T0DXC9D57g5gdjNWppwSPC9T6YfwVGKnEvbU36pu+iB/2BNVx9TeJX7uvmgPv4BnoqpjqM6eh5DWTsV4qZYCWw2g92L5gBP+SWfBggW46aabiiq7sbERw4YNCyyLxWIYNGgQGhsb8yrjV7/6FcaOHYuPf/zjgeW33HILPvWpT6GyshJ//etf8Y1vfANtbW341re+VVSd9yeSlnM/t9oJpBFHbaWJWGoPzN3bIWNxSCMGo6LGGZhOdwFW0hX5gp5tAoBIJx3PvaqhwYO43oNS2IAR85cl90CkOiDSnbCljbKyKpSbBroMX8w3JDzbxtJ+37y8nrbrpeyFSde94gBAQsAJNewJgbrgF+HPo94pjs0mvKgNva71uwPd/iC3mpgRHJDPurtnr2jLIrZTXjthvHc+hOsd6dsuTps4fyiByjRiiArz6hTmi3tR3j1+BU1nWwPRHmmhPILZxIiwKeB9DXlbhYU+XdSNCu/pe48GvcqEqpO0IdX5aX0P5akEbT+92pGmSze2kV53T1DR/w89k5FiVbawnqHtg79xmZvaEX8byPSeA3yPOb0c3UtPbZMPYY+zQn/17FB99eus6qp/z4tc3r7q3nb/d0JaIvqe1/CfPcB2a6I8/iLPK2NmQLg8EbltuB3D11zZSXq91DfTEDDc/z2xUvf4zeUJrc7djGXkDVUefp6Hdh+YWf3VtqJdRQghhBBy4NGvRb8y08Chgwd4nYykZaOlM40NzR1o7kyhI2nhoPI4KuNOfhcl2nSmHY8UIUwMiAuUm4bntQc4Il5Ufj/A6b/bFtBp2d4AsNPpEM7gs+GEDVWddzVgoA/MeJ39iM5ttrF5N6ASgByzsrWC9BBzpumLBEZ4IEvra3nbuB1oNeCfT6cqm6gQDhsZ3jawmzd6AkCFxIwMfSncgXzVyXQ7vdqATdwQ3kCeHfNFwHAoQCUC6nXV8YUKd0ArT8HPP6WggJoZ8jTbfuovqY2e+DnxwkJZrlCr4WvQndhn2TLwSYcqGXPFPsuWSMQMV5V28pgBgKFGTiDcwUE3vKwR7Q2Vb1uGhbnwIGWUAJhxrByek3r52QZAM/aN2CR4DwUHRHTBVpuWH9jfD2MXdU3zE/vyEfmicoCGRfhwO0blILQC10ALJRw6Pz0nYDb05yUqBK0u9qn7WQnZ+qQCUxuws2wJGCEvWyP4XCshUnn46Ujpj7vaoeVhwUBCeYL7goQuuu+PfPDBB6iurva+l5WVZd32uuuuww9+8IOc5b311ltF16mjowNLlizBd7/73Yx1+rKPfvSjaG9vx1133cXBqQji+jvbMCBjcX9ANp10Bj6tpOsB6D7Yejg6YQDSihaBVLg6KwWkOiAHDAYMZ5BXpLsg0ikIKwVpJREzTMQMFYZXeM8W4Apgti+6qwgCcUN44bSl9G0AADCEASckr8gQ/HScgWUB4eb1U5PD7NA7plvCObvCedWyDfJHCC5BWzJoS2X7ycr0YnMnTIXqbgqRUUb4d1C9Dz3RwRVIDClhQU2UcL1zpHv9o8gx4O+JH3lQ9Ks1lxgW3lQTr3QBSP8OIBDuM0DIwy+q7j0RkTOEXN2jEsgU/MJ/Z0MPndoN3Xnz6EKa891/p4TFsyjBz5bZj6G6Oc5cxmibTQS2z966+u98oP65hD7tOZZCz4RZBKF3gZC2n9sSzr2ln6UulOpRZf36Z7ZJ2ELtbuJAlDkZnkygv1uUd58QmuCnzk3P4aeWBQqOCC+refl59wWUJ+L+a1zla1vRriL7GlJKrFixAuPGjcPQoUO73yFPNm/ejLq6OsRixQ9f2raNtra2wDNG8se2bS/NULFETQzaVyhl3Xbv3o2BAweWpCyyb2BZFkzTLElZUkq0traipqamJOXty6TT6aLe41JKtLS0oKampuTvjo6ODlRUVJSsvFK+K4uhX4t+tRUmjjgoEeiI7eq00NKVxtbWTmxt7sCwgWVeh6UybsIUcMIa2s7AUXXCCffYlrSQdgeSBiYMDIhHh7VxOjACnWmJuCGQMIUTlk7byrn3/FqpweDwoEy4o+UN6rhLvXwNwq+EPnCN0L6GkIDqAErXE0sJYhG9NltKeKNiAFTiECXUqG1M0fPZlGqQXj8vILgs0xNGU+0Mp2qG9AcIvHCM7kCk6XpferNJMwbr3fOxne66pUQvd7DeckU/2/ZzAYa9FaPoTqTKN1yQLlIAftvowpQlnctiiGCONcMIeabl8ELTw6QqgS8qJ1oy7Qt+Xe7fgO/lpzz8LFf0ixkCSTfHZSJmeMKf7bpP2E7CM9giOqdZ9LLgQsMIjg90J/zlKlu1Z0/Id/sory7bvY99QiJfN+JfNnoi9HXnbZZN5I4qL0oEzFlnzRtV3dPZ6hOV/zMq12RYsA5MJgjc9+odFsrlp33x83AG/3f+Dp1lxkC48gZCIG+s7l0cOI8+GJjaW3lnqqur8+40X3XVVbj00ktzbnP44Yejvr4e27dvDyxPp9NoamrKK2fM73//e+zZswcXX3xxt9tOmDAB3/ve99DV1ZVTsDzQMARQGXOeIQCQRgwyUQW44TpFusvx2EgnIayUE8ZTDdTaaUgz4Xg1SRuQTqyBsC0FACK5B0ayHXbVUHRZEkLEUC4MiHQnRCoOQxgoj1ejIy1R7npNhScDhPFDFAt3sgW83zrA+Z2SEjCNTMFPaGaI/juiRAEZ+t4jdLGvB8KfEj5sZNqSkbYlZMYEL2W76mGLw9WX7kJ9YppuCypU2crb0VKTeqQjpBquR5uwbUhhOyJYlPCkhfiTujDqbZ+7k6QLQ+p7WETKEGVz5FcMCyqRm4WEP4W6z5TIAZXjUD8nqN+OTIRWTnhZ7gpFCygFh/aMyvOXx265jha+kvq8oPA1i+oTdYcQoWc4v92042h9HmT3TlR1CtxT+jUOG6yKXM95tudev/+zbKN1Db38pLkIPy/FoHv16ZMIlNin1psCATHay+cHZH8W3UkDupefJX07C1CTrnKLlb3FvmZb0a4i+xpCCFRVVWH06NEYNWoU7r77bpx55pkFl9fU1ITvf//7+POf/4znn38etbW1RdXvjTfewMyZM/GTn/wEH/vYx0oyaPzSSy+hoqICxx57bMFlbNmyBa+99hrefPNNvPvuu7jttttw0EEHFVWvVCqF3/zmNzjuuONw0kknFVTG3//+d/z+97/H+++/jw0bNqCpqQkrVqzAyJEjC66XlBKrVq3CvffeiwsuuABTp04tuKw//vGPuP3222HbNhKJBH71q1/hqKOOKqpuL7zwAu655x7853/+JyZPnlxwWQDQ0tKCW2+9FV1dXfjxj39c9P3W1dWFRx99FIcccghOO+20ospSvPLKK/jYxz5WdDmdnZ148803Ydt2IJR0MXz44YeoqqpCPB7vfuMIpJR477338NZbb+Htt9/GYYcdhvPPP7+oOqXTadx3331Yu3YtfvjDHxZ1TdetW4cHHngADzzwAH784x9j2rRpRdVNSonVq1fjqaeewo033lhU3Xbu3IlXX30Vr7zyCo444gicd955Rdft/vvvx9atW3HjjTcWXI4QAs899xxmzZqFoUOHYvr06bjjjjuKEtdeffVV3HvvvWhsbMQjjzxSUFldXV147bXXsHr1au9z8cUX47/+678Krlep6Nei37DKeEbnTg0CH1xTjiFVCcQNgYMqYjh22ADs6rCwoz0Jw3C8UVrSKVQmTKRs6Q1cdFoS7SkbphBoqIpBCOXBB8B2BIek5YeRU2GOlABl2YAlnL5fRjgbNdYvs3v92W5YzzCGJiRGPbvunHlP/LPdrdXAlGFG1UUEwmHCcDu/ASHB2S+O3F5GcNtUiTKm8JepDrIl4bU9kCngqPNU+fNUri0VztNZ7wt+cbeNVc4fAV+Y0me/qkPo4oFlOy+elG244VdNJC0tJ1loQDFb6EdFd+JQlIdfWOzzRF+tMGcms/TECltdRq1dw33PQL64kCiii31O/jTpnLftiH5pV/Tzw3rqnfLMl5+ZMaJWGFFiX65t8w2dWKiHlbrGhQjegHZNNSHXr3Px4l82sa/YkJJR+0d5Hvoinr6d325hD0DY2esWFjHD4YD1ULwqhGfwe2aZpuF74+SLLR1vHinVYLZ0B3G1CRxaGE81mSPlvjfCnn36M9/XoT73FYYOHZrXrN+JEyeiubkZq1evxvjx4wEAzz77LGzbxoQJE7rd/1e/+hX+4z/+I69jrVmzBgcddBAHpjRihsiwrSwRQ0eiGpVWB0TXbohUJ2QsAVldB7OlESLZDhkrA+w07JadkF2dzo6jx0OWD0RaAp1pG2kbqEoYiNlpiI5mR9SqqEGXLZC0JWJeDN00RKoLEAbKymtQHhOoKjOxJ2XBshCcuOK+M9RvgyHVO8sXWdT2gGPvON7qwXMMvy5sOGHzLFtAChU+UHgeboFJEhKQAr7AZRjeu93xF/TD+OXlXWYEvVuUZ4sS+JQdqQt+tvbboQt/SvBTdpIS/4CQGOP+r8qH8EVFXUSMCvkJW0IKJ9KFBBAzYpAxQKThV9C0ASsNZ6OQ2NcN2bz/wiKa/rdQdY/6GQiF+MxGlCxhZLEBldihxD/AFwB1YTKMvq0I1y2XeBexzhP7soVPDHtS5Rna0ztk9tr42+gCnLJT3e+q7cLhM8OE7Vp1/2fDEZwyBWogOKFHf8YzBEf3Xg4Lfzk9erOF7cxFlIinL1N/23akMdydVZOrr6mvjyJbuP0ozz7dqw9AIOWC1+V0c72Knnj4KcHPiAXfgVJ/D/aN4LcvQruKdHR0YN68efjJT35StBfcgw8+iHfeeQdXXnllUV5w48ePx+9//3t8/etfxyGHHFJQGe3t7fjxj3+MO++8Ey0tLQCAr371q/j9739fUHmdnZ24/fbb8f3vfx+pVAoTJ07EK6+8gmOOOaag8gBg48aNuOGGG/DAAw/g61//On7+858XXFZHRweefPJJLyfm1VdfXbDoZ1kWHnzwQdx8881Yt24dLrvssoJFv49//ONoaWnBzTffjFdffRVlZWXo7OwsqKzOzk783//7f/GLX/wCr7/+OgDnvIsR/T73uc9hxIgRuPjii7Fu3bqCxaH29nY88MAD+NnPfoZ//vOfAADbtgsW/dLpNH75y19i/vz52LlzJwDgG9/4RsGC5Icffog77rgD9913H3bt2oVZs2YVLfqtXLkS3/3ud/Hiiy/i3XffxfDhwwsuq7OzEwsXLsTChQvxxS9+sWjRL51O46c//SluvfVWrF27FoMHDy64rC1btuCXv/wl/vSnP2Hu3LkFi35SSjzxxBO49tprPQ/6r33taxgzZkyPy/nTn/6E22+/HS+99JK3/IEHHihK9Hv88cdx3XXX4c0330RlZSUuueSSjFDh+aLEZfU8XHnllUWJfps2bcLMmTPx1FNPwTAMXHDBBTjiiCMKLu/cc8/FyJEjcfbZZ8M0zYJEuj179mDJkiX4xS9+gX/84x/e8g0bNuDwww/vcXmWZeGNN97A4sWL8eKLLwKAZ2/1Nf1a9KuMiMGpOiRVZTFU2s5gUFUihoPKTexOBjsZnWkbu7vSMEQMccPp2HWlbXS6gkd9VQzKYdeWjkSWsn3BT4WVS5iOW5plC1huh98EcuZYUGUCmYJfzlAq2coSalY23JnWbiGBWcPOFwnpzeZWwmDMcARAE8LzCrSlhPJYtqQzgKT6bFHegLrw556ZF39HH5DTCeeDUMuU+BfwvgmdvHAHsUw3dJ8QTjgyvb0Uql2dXBACKSEhpRO2y5KOWCvcgRlT2EhZ0vOytNwRt3Dd8xX6/O2jBb9sop17lvB2QlAAzJW7LSz2RQklyivJ0u7p4N92wMvPsm2YJXZPNlxx1/tewuJ7en32Frr4p0J+qrCYYW+67kTHUgp+uerbE/EMCIp/qoxs2+TK/WmH7lvlqRMW/MLH63ld3Xra0nkXesJfUOgDfLEvnDdV9xbW66jXb28iZS/PRu/Fcxo7diymTp2KmTNnYtGiRUilUpgzZw4uuugiNDQ0AHDC/EyaNAm/+c1vcPLJJ3v7rlu3DitWrMCf//znjHIff/xxbNu2DaeccgrKy8uxdOlS3H777bj66qt77Vz6I1Ge0pZ0cv1VGgaEyscHQMbKIYWAtCzAdJe374ZMOgMCwkxAxsqRspwJU0lbogpuGVJCmgakmXBy8VkyOKDtehKasJEwncgKKctAyrIyvPP1cODOMmeykpR+yF2vWPcBV3YPkH0g3QagB25R74Zs3uROYQX8kOUSnvRB/AjBz85z+Ds8UK9EKm+9J0xJ13NIiQBBQdCvl2MHSglYaj/pHEkJoNJ0cvwJ4QzkC8OAdL3qnAJLEBLRq4//d4/mI0WIMMq7MvI4EcuUSOTlWAtNyooSX5TIF/C8DOc6VGEde9JOUd5m+RAVWrFIVBPq4l+UoJbtp0p5uPaEQuyAbOgef2GRMG/Cnq5Adu896Yjn0ZXx9wm3WT6/x+FnNxvd2VPZ3iPqXg7cx7rg11MPP2hiX5bnsS/M+P5qW9Gu2n9JJBI44YQT8Ne//hXTp08vqqzVq1fjv//7v3H00Ufj85//fFFlnXXWWVi5ciXq6up6vO/KlSvxzW9+E//85z+RTvsh4i+88MKC6rJixQpcccUVWLt2rbfsE5/4BA477LCCygOAZDKJZ599FmPHjsVVV11V1AA2AIwePRoLFy7ELbfcgkcffRSjRo0quKy1a9eipaUFn//857Fp0yYcf/zxBZclhMD06dMxbdo0PP3003juued6LHIoysvL8aUvfQljxozByy+/jJdeegmnn356wXVTfOxjH8Pq1avx9NNPY/To0QWVEY/HMWHCBJSVleHVV1/FmjVrcMoppxRU1qZNmzB37lz87W9/Q3NzMwCgpqYGVVVVBZUHAAcddBDmz5+Ps88+Gy+//HJR9y7g/NYccsgh+N73vodNmzahvLy8qPLKy8vx7W9/G1deeSW2bdtWVFmAk3N27ty5+NrXvlawkAs49++pp56KU089FWvXrkVXV1dB5di2jV/96lf43e9+h7KyMhx88MHo6upCIpEoqE6f+9zncPzxx2P58uXeZ9KkSQXVTXH22Wdj4sSJeOGFF/CPf/wDQ4YMKbissrIyXHHFFZg5cyZefPHFgs4T8L37rrzySrS2tgJwfrMKnTigM378eKxatarg56qiogJnnnkmqqqq8I9//AMvv/wyduzYkVf0gyiU0HrJJZdgzZo1WLRoEWbMmFFQWaVGyH4YEF/Fu93W2JgxC6m5y8bbuzq8gSBTCAyrSmB0bQIf7E5hy+4kTOHM+t6dTGNQRRxViRjakmlHrNMG2Y8bVomKmNMp7Ujb6EhLNzQo0NKZ9gZ1DSFQHjNQlXDyzpTHnMEp1TEKz7YMD9oAcL3Ncg80hAdown0Ofb/c/lnRs2YDHUgvD5cqOxhaRR/sVvX3l+sD+MHvqsxsHkXKi88wHA8+J4cPUBZzcvkkTAOm4QzWxV2PwIQpPOEvZjjegXroGed8/BnhljsgqLz61N9Jy7kGKVdoSFmO+Jey7aCHnCZS6OeeC71t9XCb+vX3t80sT+98RwkwUfsroc87Zkgk0QU/PaRnWPjTMQ3DCRvrfioTJkxDoDxmwjSAuGEgbjpibdzwr6N+TdXfznXyRb/uBL+o/JP5el12J/DlkysvSmCLvH7dHMvPDxkcHNSXhbfNVW6hYl/4WN2VE3XfhXMk5hwIjyBXCE9VH+XpBzjCGgBPtI5CebSoeyvqfoybRuD9EjcMGEbQqzjsxRoW+aLeb9lCDu5p240LJ4xBS0tLr+ePUL+Po694AGaisteOYyX3YP29M3rtnJqamjBnzhw8/vjjMAwD559/PhYuXOgZdhs2bMCoUaPw3HPP4YwzzvD2u+GGG/A///M/2LBhQ8asr6eeegrXX3891q1bByklPvKRj2DWrFmYOXPmPhFvva/JZVupO9ps2wGjo8XxwEhUwKoejtiuDcCHWyAqqyA79yC9eT2MqloYNYNhDx8Lq6IGOzss73e5usx08iZLGyK5ByLdhZ1mLZK2xIC4gWp7D2JNG5zjxsphDRyGVFk1WrosNHVYaE9aaOpMeXk+1TtCTRaKGwIDEzGUxQwMLIshZdnYk3JyyxnuO8F5N/j+a/rrTZ+AlDGwLVSocTeEujvZSAjN9pC2E+40NNAtbCu3yKVyV5lxZ/A7Vg5LAmn3PCUc2zUs+KlXoe6RpDyflG2k7CYvHDoy7ST9Wuv2ku3+HziuDHraKHvXO56AZ4+JdCdgpSHSXY4AaKed71IikO9RE52kYTptYMQcjx8j5iwzY94yFfLPt1P9Gun2coYYof8f0f6OTegLTj35idUn6OnRPsJ2fUYIRFUnaTvtk8srKl9yiSsq7yI0Lz89l58mugBBj8qo9o4ShSMPnaV9vCqHbbHAOnV8mVG38DY5jx2YDOkT5RGo11N/XsKebAAgLPde9sQumXm/ZfMw1e99IQLXQpoxfxstv52t3f+6OOqFwESwDxduNyDYdtnaRg8LrOxAU0SIfWGhD4gU+wLCZpSHnzpHFdoY/nso3H/e3dqKI0Y20LbKE9pVfYO6d3r7Pi1F7qCvfvWrqKysxMKFC0tUq+KwbRu7du1CY2Mjtm7dirKyMnzyk5/sURnt7e14+umnkUqlkE6nvf/T6TS+8IUvFOVBREh3SCnR1taGDz/8EJWVlUUJMWTfQkoJy7JKkmcU2D/zPqZSKWzbtg2GYTi5noWAYRioqKjYJ8/VsixYllWwyLm3yde+6NeeftkwBRB3RaKa8hiqEkEDKG4YKIsBVWUmymOmk7smbiJlSexJWd6gkdA65HFDQMYASwrYkCiLGa4Y5IcbTNkSQgApG1qoSadHpf5SM6YNCH8GdajTlS32rj6YA0SIebowFLFK7ww7IZiCB1adWNurMQKDSRL+rG7pegTawhXzbL3O0j9R97tpCi3kVjCEqHPsTCFIF4nUYLxzTYR3Tl6/0f2ebca0EAZMt6MsvEYXXt45wDm2BeeaSKkGAJxzNgzA0tSksIiZD9lEle5yCKp1ubym9O0Av9Mf3l6FoDW0m0htG4sQOTLqatswDTOwTVSIT/96KkGvh2pQiCjBL4qeCn49Ecz00LTZ6In3oOfh5+6ivP6A6JCa2cooFWaO+ytfT7+wl2J320Xl/VSEnwc/NF/2csOCX8B7WHuXRGE7sfy0BcF65Cv2Rb0XUn0wHV3askf3YyHl9yaDBg3CkiVLsq4/7LDDIifI3H777bj99tsj95k6dWpRIWQOZLynxhWlZLwciLmzQqUN2BZk5x4gnYJIlAO2DXt3M0Rtsyu6V2WGjhMGZMwxqjtSEmlbolz9vKhBc8vJGRhzvf1MV6swdRPDxXZHgm3tGGryjprUVQgqzKft2glS6g3iewN6d2MuT6korx+1XA34q8FvZA7Q5xL81P/hn9tAkwuRKYKF6qPbSzCckJ0wlD0oM35n1TE9W9BwGkh5DppGzDmWbXnnJGzbCXsabivVBnq75EHB8xez5VTLQa7wkVLCCxEJKTPsdNXSYRs9Q/ALXRehHTOrJ1iIHu2Th6dp1vXqGD28BOqaReWjixL7nLrkPkhGX6cHz3y20JY9poTeqz0+tBL54L8ngKBIn+t6ht8fuuCnT5DI5d0XCOWZ4z72F2YKftAEP7sH139v0J9tK9pV+zelEFmPOOIIzJ07t/jKlAjDMLwQtoXmyxswYEDRuagIKRQhBAYOHLhPChykOIQQJRP8AOyX90g8Hi84xHNfYJomTDOjl9bv2W9EP2WiClc0qoybKI8ZGFEdRzgKaGXCRNwQqEoYXg6zhGkiaUm0dKVQaTr76pS5oaUsGzAgYcUA0xZwgho5qNBzUgKIAaYEEqbfMdJniEoBwPaFPykFbJE5QBDG12r8HliuPmqUZ6CUmQNmXi4MbYgpIPi5A17OLEtHABQQAfEPcJrDFAKwbU/o8/KaqQINkRE+Jiz6qWVxd5ROzcg33M6n4dZZzbw3hDODXW3jDEJqM8jdgR1vMMvwB6QMAcCSMA0AthOWCoZzho7YF2zTqDCE+jqvTSOELn19rn5dWHAztTbzytcufMAzLSSkeHmOXK9TJfypdWHRLmYIdKVtmIZAR9IOlBlVN70+Ruj6hWcG9zRMJJBd8Ouu8x/VKS9EJMsWYjN8/XMJWApD+Nvpef6yiX9qXW+QLdRuj/Lg2TKroJtPWFK9DlFefvnk7wt4wYYEP9MQjhefCIYN9iY4uCNchhJgNeHPv67RnszK69D3Ag6eh04ql1JJyD6MZ1sBzu+oEYNMVMIeMNgXCqQNmeyE7GgHABgDD4Ldsgt2yy7EKwbAPCiFskEDnZzHMuRdZiZgmwm070kiZdnuJC0bSKchbcuxnbraIGJlKItXudEUTBhOsjjv2TQE0GXZiBtOnl41EctUz6eth/8EdHteDWr7Ylr34SHVe0e1j3TVHrsn4mLI00d5+KjBb2+Q3j2OZ39pgl/Qy8kPmWi6tpE+KO+9A0WEV47+Q+sOwAthICYMSMNwbCM31LHyIlTvPSm1970hHW3PdCouBWCYTm4u2LZ/7U3n2N79FVBqXGVXa598hUB1FkqkzXkddQ+kLGWGvcsijxm2zbQ73ELQRleioLpfvOMrwc9SOdBkUDzS8kAKr9GKDyOrh1LUt8l2tmEvv2y/bHqbZBVFAS/Up7efXrcMcTmb7Rl8DnpiMjnydDfbSD/EZ/cF+t5sRRG43nbeuS+B0MQAzbtPFwGzIUSWiQPQBED47xT9XSJUnyskWmdtC92bMeThF6xzzz1uCSGFcdVVV5V0EJsQQgghfct+86tuS6Cp00LSkqgpjzmCUaiPJN3B2p3tSQBOqLiasjgGJEwkTMer7ODqclQlDAyIG0hEjBYk3NGcTgvoTEu0dKZRHjMQNwxvJrQlJdIWIN3Z6AiJWbaUricgIKQrpAnpeMJl6VeqoRE1mBD2alNlZ3i7qXPX2snvOEUdTCAqdI50PRzVIJOaNW9LIGYAtnsecdMZqI9LERg0z4aalBYOq6fPKAVUB9Od5a+2CQiabg4at76mSgKfZcZt+MyVaChFdK/SFymCy8OCT9Q+keWFBaMI0U4R5UkXno0cyK+ottcFSG8gULjfnaEOR3R2Rj/90InSzeEnYdmmW5/MdoyqFxAcWIwK6+lsk3kO6j4xjOh7pjvBL3xt1LH065DNmy2X0KVyVmaUrw00RQl+0QKjJtRa0h9YMaK3CQuApaDUIqIS/vKd+ZzP+ZieQK0a2b1v3YF6W0qYXu6koMgMOJMEAHghPZ3fg+C9p080UPXyhL9QXXN59qW8d0Nm2F6vjfpgsEpK2at59/phZHBSICnbnbQhbUghMr2zAEjLgt28AzAMGJXVkOmk80l2wrDTzkCxIZAQmUKMLYGmjhT2pCxUxk0MrqkGEuUQVso5nm1BWCnEy4By04Bl2xiYiAFIo9OyvfeP+q0yhOE+l7b37rakRFwXY6SEniFCwMmFrAt+mkN8BrZ08xzDtS2FE0EhUvAL2yERYf28kH5KIFNhJm3f7vJy+oUEv6hn0dDejwEbESGvHFdkCoTes10bTNXHiMEUMUjhnq8d9PYL5BS0XdvMkrANt12EgCkMmK5Xpy52qeaSPclDl0Wks0N/696ZGWJsnjnWgKC9kc9bz5LB3JTepDrlbaVsMVsGIyzY6pqkIj0wPcK597oThISRO1dcuFwN7x7TlkXdb9154+ledGq1Lv5FlZOtrKhrEBU1JYooMasU5My52EPPv3yuVdS56iKftwwhjz+39QIau/eeE54oq6O/D/3JfNp7xE4H8/Z1I/Z57zqgWw8/XfALhxQuMoBIwdC2IvszFPwIIYSQ/Yv96pe9I21DSkeYUzlMLG3KovIY2ZOykLIlWjvTsAYCcVMgZjg5ySpijuBXGXNmuqal3yk0hB8C0Qu/l6WDZQPQ9SNd4FAeV2qWpDfIpHVaw7qNCo2po8+8VPuoeqnZmArVaXIGP3J3KCT89tL3kXA8HYUrUFq2EkOcASIp3FnghuMBGIdq/8xjKMFIF/d8sTIoeqj1QFDw08PNAP4seEO4s3ejBkLcGczhFih1yJjwdewJSnDLJqplbK8JWZHin9pO+9vwQhcKVwB08vU5Dkm2t28iZrjin3sMt15qfcYx3O+G9r/u4ZePB0R47C+ffHs9pTvhS3lHhslX3OoulKs6hrpfw95//vsl/3uotzwC86EUXpWmemm53n16CFrYylNV21ZDefU5f/uefkrw0z384u5MgyjP4qj65xPKU/dSjDr3Ugu3hOxNpJSwIJzfEDWhxk5DhapUHh4y5UyokolyyFQKsG3Irk7ASnqTeZQIpw/qWhJo60qjLWlhdzKNDiuGWLwSkG2+176d9iZLxQ0nz++eVGjiiHQdzNzn0xZ+3tAorJDoIrxA7L4doqwI3dMn/KpVAqBExIi5Tji0Z1jwE8EBcNt2bDcl9oXPNRvqlPTfDt3Giqya5lnmzAkyIGG73nlpmDEnh56KqCDcplL1Uv9bngUpvB8w5/0nYBoxSNMV+qQE7DSkEXMEAwPBmT/dCVnSCZSfsVgTlLxlyFPcCQl/GYJUjl3166FsUH+d9Pb3t/MnqunHV9fB85qKEIxl6HvWPHFauRlCsxZCNlx+NjIms2TdMtv+rp2jiX+e/lug2KeTS/jrK4EoJ/p1yTfMbN7hbp029CamRYh9+raOACsDXqrB/qX+ngxNHMgl+OW6N8P3pNY/ixL8otgnryshhBBCCCH7CPuV6FcRM7wOTVXCQMIU2NKWckN4Au1JC6YQ6EjZ2JOy0NyZQnV5DJ1pG5aUqEqYGFRpQgLYk7bRlrRhS6Ai5ogcpgCqjDQShoHOmABgoixW7odKNHxvNCU8xk3hD5C4SOkPUFu29MREJbAplIcRACjNTxeTVP4qJf7pidWjxBW/MywiO8/hMFF6ng5v5qXhev1JCUudgxrQkCLQUVPLvKNqdTLg1zV6fXRPLpuHo9NJlEjbzjaWt30olJHmsacG79X9kbKlOwDpfLdsmSFAqbConidSyBXAE2/0c4kQitTgRDj0pnecUE82W54R33MvJCxrcf2jxKDIEIS2RFfaBtIAYs75V5XFvOubTNuRXojOJ1gX5943PFFF96gKn093gkgukU8Xy7Iti/L4C++TcUxbRrSbc78YrlIfyL+nmtsVfrKJhjr+er/dvDpqHrDhwcQoMkSwIkdCCslXUoyw5VdXwDSdtotBPTdu7s3QsfRzVqIeEAwrGzdFhlef+g4EPY0zzsP2n82oc9MFv6jlfY20Za/mhuntnH5k38EJySmRNmIwy6og7DRiTRsh0p1AZztkshPCNGEOrgcAiLIK2LubYbU2O2E/00kkag4GjBikGceudBnaUhY+7EijpSuNtq40trcnYUmJzrSFuGHg0JpDUd30LsSeFseGspKAlURFzPHPS9oSlXETe1IWOi3bF9xtCUP4HriGyJQmVO65FJwctfrbUr0/1Gtdt1OcZdpvjRL7BALRBnKiRBol+Cn3ZTMWEPwsqTxKfPvGkr6Xn0L3CgnX1atz6F0ZGKyXNoRtOcJqRG4/SAkpTcBIO6Kd4Xv7+XXwB+m9NhaOp7X/OykBGIibCUc0disnAEhVj9C1CngARZBL5HEvsRNWPvxbHsgxFgqdKB1PUSVwpjTBTpFtcp533Ii6ONsFf8xVu8HQhLtcYT7dELBCC3/q1V21U7Z8kdmI8Bbs6Zu9u5+8sGCji3+RbVNAHRT5mj4l04qiRNeeePaFxb5c4l8h4VzhXB+/naO3cd51EcuFL/KbQtlQ8N8h6l516x4p9ml11yc4wDAgjZh3H4c9/Czt3tiXrA3aVoQQQgghpL/QZ6LfPffcg7vuuguNjY04/vjj8ZOf/AQnn3xyQWU5oZvghW8TIdHLEeIkymIGDAOoKqtAynIEjrKY4eX/K3fFPcsNXZRywyqlbVfUMATSRsKdvGzDNCRi0vdLixvOoItpOCGoTMMR88LdNFvTCKQm9oRnvEblbFMdMF3wM91jCK1D5s3C1DBzdBgl/DZzxtn9UJlSzUh2B/6kAIQUMGQwZI/yDNTFwjB6GBndQ9G5TvkRJYD5IYicXqPwttW38QdvVF09gU9Kr5PpdzxlYD/v+K6IFxxcCIp/GXWOEqU04S9fojycokhb2jHg512Le/8bMOLBnVOW7YRDFEDKDm4PAKl4ZnuoaxE3lMjn/O97GoiIOud3vtnEvnxDqRYiXHW3nx7m01SuDgDg5jcy3IuqX9dcIlApvPPyEfy687DMJzRq7v3Dg3qh8vIQKx2BTW3vhqN1BcDwYxXT3hZhsTks7mV6+AXDCQNBkd2wHW8+GG74PhtOzlLllg1fdNT/jzqnXMsI6Q1KaVulpcqr6YvsAdtC2pDpFKRtAYYJs26EI+wZMRjtrTDamgHbckJ8JvdAxhLuk1vm2GVCIGXZaEta6EhZSKZt7GpLYvjAMgwbEEONlYS9Z7fz/Jc7IeRihkDMEE44zhxj4JZrjMRD3kVRuI5oIXHPF9F0uyUX3Q3JSyGcQXF9ABzI8PCT8H9ZlLeO+h5tV4nQd/f/UN0iax8V5tKtU2CZ58UTfZb6ZC/AEdssCZhuyEAp3He8FIgp0QoxOAkA3TIAL0xivjnMvHbKa+veQRcfM/RFqXLSqgl/wVDUUroZssPnqwSUUG5qIQxIA84Ju30Ar62iBKOeeI9ly2vYfQkF0Z03X74mXE/mOO3Tv8RZBL/IvIsFEBUOOFuuQiX0Ae7EPai/g15+3szIXIKfi/6+C4f1jArp6dRZau9Cvyx6+BFCCCGEENI9fSL6Pfzww5g3bx4WLVqECRMm4O6778aUKVOwdu1aDBs2rMflpV2RzvGuc73m4HQQEqaANICYYSBh2LCkgeFVcSTcHkxzlzPgNLDM9PKOqcEJy3ZEpJQtUSEEEobA9j1pxwMqYfqzHg1HFCuPOfvHDKA8Zng5D3zxzEEP+ym8cCrarOmI2ctqwE1NWlYD3Gr2peqImdoMzIxZ2+q77r5mhDr7wvDFQWG4dREBoSwYbkUEOmROn0+JgJniUDahLxA6RuvM6WMCUbkG9W1sKeEPH3klRG6v58RRHUwl/tnuQKcl/fB9hjbgD0iYpvBCrjrCSPTAYuSgheYVGCkchjfP0bmNEhNT7iCt7pVnGo7IEU+YiJsGBiZMR/jTxhFSlhP6tixmeF4SlXHHC8EUznqFH/ows55RefxM77pnP5nu8vT5eRWjR4LCoaOi0MWXsEdgNoErsI90vW+9Kf5qjTtqDECFowTyF3XDnqHdbRNVt/C+YaEvW7voHsVAsM3zCWcadS9kHiN4f0TVOTygro4bzzFMF763okU/3/MU8Aezsj1XjveB816zbOkIfjBgSIkUAGSIgEoxcC99REP0xQCVbcv8R00LLZ/sU5TatupI20ha0rGj1O0k0xCpTkjDhDB80UbE4kgPGwMZKwcAxDvbYO1qhN3eCtnVCaNrN6RdCRtALF6FuAHUlMewO5nGdpnE7s40WvYk8W5jGw6uLcfogypgt+xEunEjYvUjISprgHQSifJyWKYvmhiGgJ2SgUkDtpRIW86za7m2GKDebZr3ri0yQnzqEROiJikBmZOJhBQZ79sAmvdPIFeXypmn/tYGwG2ZOYEqyltHH7QP1Dn0f4/RhF0IM+emqq76T7OlVhhOTkhbuhPWAAjDCfUpkHbuI80kDbxVeih2FOJkLYWRVWiMki7CXn6KbF6HtpRIWo49ZgMYYBie3Q7Am2gWnpTnefhpHplKHBbOBo6HpBnLFP6AbtstHyGp0Fd8OHJF5PHhW7zdCX3hosKmjG5fZCOfxyCbjRQWxvJ5pjyBH8jf+0+7Hk5evyxlI7eXa2Bbt7+Tfb3MeX7KXvLeiYB/X2r5QAMeqWqb0Dlly+OnelDZPPz2NVODthUhhBBCCOkv9Ino96Mf/QgzZ87EZZddBgBYtGgRnnzySdx333247rrrMrbv6upCV1eX9721tTWwPmY4uWYEABM2RLLdCVcEoL5iICwY6EjbSJiml4dOURl3woAm3DCcysOvI+10WGwJtHZZSFoS5TEbm1qdehxSXeZ5BDqD3hKWFKiIGUiYBmKwIdJJR2AzYzBVCBOEO5/C86RTqJCf4Y6scP9ReRYCM9GRKfgJfRYm4M0YzuicuYMIGWFXhAHhioBSGG5uv6AACPiDPb6noCdvAsgU9FS9nXX6lkHCAwFhwVG1kd4xDAt6gD+rVR+UCngGavt6A/1eSCptWMIQruePKxZJ1wNCEwCBYIfNFEGRSheBzNAoYU89woIDjwAMoCvthK7tStvoSFluPj4/D19DdTlqygSqEjHPY0IhIVFdFkNaq3/M0PMn+tfMC6WmiaS2K5KmQm5j4VCKfjsF86YF20LfLij2dZfTL+h1lbk+XK+edLDVYJZXbkD88+97dW0LymuXcczutwEyxUJL5j5/v/xQaFmRKfwB0e2ULexlPh6puUK+Zt3Jq6O6p6JEP9+TT93f+j2sh+4Low+yq9Ce0s17aUsJ05KwDAnDdt4DluGHEfRy/EXU24oVP0ufkO4otW0VRkoJacaARCVkusv5bVXhMY2gMGQNGYXESYOQXPWk4wkIQMbikPEK53dWAk0dKbR0pZGyJNo6U2jek0LSslEZN1FTbsJu2QXrw+2QyU4kEuUQAwZDStv5HZbOb0jKsr2JLt6xbQnA9rx7AddjzjWqIt9H7nvByxmM4LsjG4Zrg3kTm1RbCcMZ444a61cePSEPPy+sp9femZOcousQ+h6yvTJ+MsID8dKKFoCyhNfU7SjH3graXN7uwtnIFAIWnGtuSGfim2nEIF3PNQjNJg2FPw3UN7QsynOpJ4g8hBjHJgz+1il7WQgnykHMEIi7VUtafr/AmTjo9Cks7VDOpD3nb3/ekL+BFIbzXNn+98AlVHnTYEDatvNjpt1TuU/aCBpjoX160pLq9hYiU5jLJfzJ0P8K/f7JdUk9LS1kA0c9p7lMoOwin7s+x75ORTInVzoF5BD5coXy1MP+qv5YhDuzQPR1Uu8tS0ov56YQcHKwezn9/P5Q1PkFvPuE7+Xn3bNhIVoX/HKcc4ZXs9snVu87JYqr/l2U2KcmvQL+9S50TgMhhBBCCCEHAntd9Esmk1i9ejWuv/56b5lhGJg8eTJWrlwZuc8dd9yBm2++OWP57t27AWgzRqUNYSUhutqcPAPShox9CBmrQFeswuu0tyVFRkch7Xa2kraT+29P2skRk7IlmjvSqIg7Yt6m5g6YQmCArPAGOZRIYhpAKm7CihtIixREOuV0gowYZCwe6Fx7IhUyxSkvl4tbN1VXIUSgM6oLfl7nRwl7dto9QDj0iuYFqHc4vboZAfFPmmZg4CdXvbMNn6iOpS70BeqchQzRD5kdQ98rz13vhVFy2s/z5oPMOqCg/lSecUq8cpstMOs0IEZp2+mDG+o4ukiST/jDngzqAZmhEW0JtCfT2J20sCeZRlunBUtKJNMWTMO5f6tkBYyKODrNlDNQpaku+j2nSMMZBJXCCdMVFmmldPIe2rb0BFjbkoG2CddV1Ve1bS4PP9t7JvxB3cjZ9FHeVXrIRm0gI0y+oRdzXWu9DmGRt1D0a95dHfPxDuypl2U+7Rzl5ZdL9NPFOrVc1asnYWCDQp/zd8wUSLvLlNin8qyqYwnteLpXjD6ILbVzUu8Oy3be8ynbhrTdQVzpvwPUuyiqvQxDINXR7h6niBuih0jb8sSW3iqf7Dv0hm3VlvI9/QTgeXDHBGB0dEF0dUC0tTvefrE4rNZWyFjSLcUEzGrItIRMdqJsdxssOwFpxdGSstCesrGttROtHWm0dyTR0tyK1vYkknuSSO1pR1d7Ai27dqFjZwvEh7tRWT0MqBwGOyXQJU3sbk+htcPCno4k9iQtpC1/ohbgPN8x00AsHffOK+Z6n5e5Xr+mAFIxA6YhUOZ6D5ruNuF5HdneRirUunrvmGpw3Eq6E690YyOU+wsI2FjhvFZpz2vfDTcP/7c+WAe3SE2sdOrhh6Y33PPXbUUnH5fl2odWcODeq5NTPxkvA4wY0tIRttK268EG16tSe1867aVEA+fdbEK4YcCddkoYbh1SXe4kPduzXwMClm6XGjHAEE4eMCMWKRjoqMkdpiGyT4zTUGKEFAZSrtCXcr300lrZKoqH6Z5PwhCIWZ2QwoBlJJC2pftxrlt7yvZsUFFmosx0PM9VlImEIdwJix1ufsUkRKrL7dOkMurqCcVGzMkJqdokLPoFBMBQG4bEZiBahAvb4UDwdyxs9wds64jfu6hfwChxx18XXJktKoi/Pvg96rnNFtIyXG647+LbHUGPN3+SpWog/zkK9LvyIep+z3Kt/MmLzt+WJ7533zcKt4+a7GDCj5QQM+CljVB57VXfWqj3hnp21Xnncz7u8+vUK1h3vX765NLwtVHXoU2NA9C2IoQQQgghJIO9Lvrt3LkTlmWhrq4usLyurg5vv/125D7XX3895s2b533fvHkzxo0bh48ccUSv1pUQQggpFbt370ZNTU1fV4Psh+xPttXf+vTohBBC+hO0rQghhBBCCMmkT8J79pSysjKUlZV536uqqvDmm29i3Lhx+OCDD1BdXd2Htds/aG1txYgRI9ieJYBtWVrYnqWF7Vla8mlPKSV2796NhoaGvVYvzkYn3UHbqvfh+7Z0sC1LC9uztLA9SwttK0IIIYQQQopjr4t+Q4YMgWma2LZtW2D5tm3bUF9fn1cZhmHg4IMPBgBUV1ezc1VC2J6lg21ZWtiepYXtWVq6a0/OQie9CW2rfRu2Z+lgW5YWtmdpYXuWFtpWhBBCCCGEFEa3ecpLTSKRwPjx4/HMM894y2zbxjPPPIOJEyfu7eoQQgghhPRraFsRQgghhBBCCCGEEKCPwnvOmzcPl1xyCU488UScfPLJuPvuu9He3o7LLrusL6pDCCGE7FdI2+7lEFR2r5VNCoO2FSGEENJ70LYihBBCCCH9hT4R/S688ELs2LED8+fPR2NjI0444QQ89dRTqKury7uMsrIyLFiwIJCPhhQO27N0sC1LC9uztLA9Swvbk+wr0Lba92B7lg62ZWlhe5YWtmdpYXsS0v9Jp9MQQsA0zb6uCtkHkFKira0Nzc3NaGlpQUtLC44//nhUVVX1ddUIIWS/RUgpZV9XghBCCCHF09raipqaGjRc8DMYiYpeO46d7MCW334DLS0tzF9ECCGEkP0W2lakUNS9s69f061bt8K2bQwfPhyGUZoMQK+99ho2bdqEadOmlaS8UpFKpfDaa6/hYx/7WEH7SymxbNkybNy4Ebt27UJTUxOGDx+O2bNnF1231tZWLF++HJ/+9KdRXl5edHn//Oc/MXbsWCQSiaLKkVLi6aefxgsvvIBbbrmlR/u2tLTguuuuwy9/+UtYlu8pPW3aNDz55JMQQhRUp66uLqxfvx5r167FO++8g/Xr1+OWW27JO5d5Nt5++208+uijOOGEEzB9+vSiylJs2LABhx56aMHnCgCdnZ148cUX8dxzz+Hoo4/GBRdcUHS9Ojo6sHTpUvzHf/xHwWXs2bMHmzdvxpYtW7BlyxYMGDCgqPIUH374IZ588kl8+ctfLros27axevVq2LaNCRMmlKS8V199FePHjy+6LMB5Rpqbm3HooYcWtH9XVxfee+89rFu3DuvWrcN7772HG2+8EcOGDSuovM7OTmzevBmNjY3YunUrKisrS/YsSCmLeg503nnnHSSTSRxzzDFFl9XU1ITnn3++6Ht3x44deOutt1BZWYnq6mqMGTOm6LplI1/7Yq/n9COEEEIIIYQQQgghhBTGc889h+985ztFl1NVVYWvfvWrOOecc4qvlMv27dvx+uuvl6Ss3/zmN3jhhReKLqe9vR2f+9znMHv2bOzataugMoQQGDNmDJYtW4arr74at912G1599dWC62TbNhYvXozTTz8dgwcPxpe//OWC66bz+OOP4xOf+AQeeuihgsuwLAu//e1vMX78eEybNg2LFy/Gli1belRGTU0Nfv7zn+O5557DRz/6UW/5X/7yF1x44YUF12358uWYPXs2zjvvPFx33XVYsmQJ9uzZU1BZb731FubPn4+jjz4aY8eOxY033ojnn3++4LoBjnDy0EMPYfLkyTjxxBOxY8eOgsp555138LnPfQ61tbU488wzccstt+Dll18uqm5dXV346U9/itGjR+O//uu/sH379oLLamtrw89//nNMmTIFX/rSl/D0008XVbfXXnsNV1xxBQ4++GBcd9112LZtW8FlvfTSS5g1axZGjBiBk08+GU888URRdQOAnTt3Yvr06fjqV7+KDz/8sKiytm3bhuuvvx4jR44s6jl9/fXXceONN+Lss8/GlVdeifvuuw+dnZ0Fl9fZ2Yn77rsPZ511Fr7whS/gL3/5S8Fl6bz//vv44he/iJaWloLL6OrqwkMPPYRPfepTOPLII/GnP/2p6Ho98sgjGDduHG644QY0NzcXVdbgwYPxyiuv4BOf+ARuv/32outWCvokvCchhBBCeg8prd7NOyN7r2xCCCGEkH0N2lZkX+Poo48uSXjEgQMH4k9/+hMaGxtLUCuHSZMmYdKkSUWXI6XErbfeis2bN2Pp0qX4+Mc/XlA5O3fuxGc/+1msWrUKgCMunHHGGQWVdfDBB+P+++/Ht771LVx11VU4//zzCyoHAAzDwMUXX4wjjzwSv//977FixQrU1NQUXJ6UEv/93/+Nb3/725BS4oEHHsBXvvKVHnvXvP/++5g7dy7+8pe/oKurC4AjBhQarvW0007Dyy+/jMWLF+OGG27Aj3/8Y4wePbqgsgDgrLPOwllnnYVVq1bhrrvuwtatWzF8+PCCyjrssMNw2mmnYc+ePTAMA6+//joOPvjggusGOJ5Dtm3j6KOPhmVZBXvQjhkzBg899BD+/Oc/Y8mSJXjyySeL8mzasGED5s2bhxdeeAHbtm1DZWUl0ul0weUNGzYMP/rRj3DllVfie9/7HqZMmVJwWe+++y4WL16Mf//73xgwYADa29sRj8cLLm/8+PGwLAuDBw/Gk08+idra2oLLAoCVK1figgsuwKZNm2AYBjZs2ICDDjqooLJ27NiBH/3oR3jttddQV1dXlPA6fvx4PPLII3jrrbfwgx/8ABs2bOhReo8wtbW1uO222zBnzhzceuutmDp1asFlKdavX4/vfOc7SKfTaGlpKfgd9+abb+K9997DQQcdhJEjRyKVShVcp8bGRsyZMwePPPIIAN/jspj7xDAMzJ07F2ecccY+E9qa4T0JIYSQ/QTl5j/8Cz+BEe/FEFSpDmz93Tf3+XBFhBBCCCHFQNuKFEp/Ce+5L7NmzRqMHz8e3//+93H11VcXFBrugw8+wMyZM2HbNo488kgcddRROPnkk3HSSScVXT8pJTo7O1FRUZp3g5QSUsqCRaIHH3wQTz/9NEaOHOl9TjvttILrZ9s2Nm/ejHfffRfr1q3DtGnTMGLEiILKUrS2tuLDDz8sOJxhFJs3by5aqNPL2rlzJ44//viSlFdKWlpasHPnzqIEU0VXVxe2bNmC2tragsWrMLZtlyREsJQSjY2NGDhwYMnyPnZ2dhYcNrepqQm/+c1vMHDgQBx00EEYNGgQRo8eXfSzoChVuwHOBIchQ4aUpCygtCE5S017ezsGDBhQ0L4rV67Erl27YNu29znuuOPwkY98pMS17B3ytS8o+hFCCCH7CerHv/78u3t9YKrxkbkcxCCEEELIfg1tK1IoFP2K5+6778Zxxx2HT33qUwWXUcoBdUIIIaSvyde+YHhPQgghhBBCCCGEEELIPsOsWbNQVlZWVBkU/AghhByI9Mtfv3vuuQeHHXYYysvLMWHCBLz00kt9XaV+wU033QQhROBz1FFHees7Ozsxe/ZsDB48GFVVVTj//POLSpy6v7FixQqcffbZaGhogBACf/jDHwLrpZSYP38+hg8fjoqKCkyePBnvvvtuYJumpibMmDED1dXVqK2txeWXX462tra9eBb7Dt2156WXXppxv4bjSbM9He644w6cdNJJGDhwIIYNG4ZzzjkHa9euDWyTz/O9ceNGfOYzn0FlZSWGDRuGa665pqgY7/2VfNrzjDPOyLg/v/71rwe26cv2lLbV6x+yf0HbqjBoWxUHbavSQtuqdNC2Ki20rWhbEVIIxQp+hBBCyIFKvxP9Hn74YcybNw8LFizAK6+8guOPPx5TpkwpKvHlgcTRRx+NrVu3ep/nn3/eW3fllVfi8ccfx+9+9zssX74cW7ZswXnnndeHtd23aG9vx/HHH4977rkncv2dd96JhQsXYtGiRVi1ahUGDBiAKVOmoLOz09tmxowZeOONN7B06VI88cQTWLFiBa644oq9dQr7FN21JwBMnTo1cL8++OCDgfVsT4fly5dj9uzZePHFF7F06VKkUimcddZZaG9v97bp7vm2LAuf+cxnkEwm8cILL+DXv/41Fi9ejPnz5/fFKfUp+bQnAMycOTNwf955553eOrYn6U/QtioO2laFQ9uqtNC2Kh20rUoLbStCCCGEEEL2Hv0up9+ECRNw0kkn4ac//SkAJz73iBEj8M1vfhPXXXddH9du3+amm27CH/7wB6xZsyZjXUtLC4YOHYolS5bg85//PADg7bffxtixY7Fy5Uqccsope7m2+zZCCDz22GM455xzADgz0RsaGnDVVVfh6quvBuC0aV1dHRYvXoyLLroIb731FsaNG4eXX34ZJ554IgDgqaeewvTp07Fp0yY0NDT01en0OeH2BJzZ6M3NzRmz1BVsz+zs2LEDw4YNw/Lly3H66afn9Xz/5S9/wWc/+1ls2bIFdXV1AIBFixbh2muvxY4dO5BIJPrylPqUcHsCzmz0E044AXfffXfkPn3Vniq297DP3dnreWe2//HbzFGyn0DbqnBoW5UO2lalhbZVaaFtVVpoW2VC22r/gzn9CCGEEFJq8rUv+pWnXzKZxOrVqzF58mRvmWEYmDx5MlauXNmHNes/vPvuu2hoaMDhhx+OGTNmYOPGjQCA1atXI5VKBdr2qKOOwsiRI9m2efDee++hsbEx0H41NTWYMGGC134rV65EbW2tN4gCAJMnT4ZhGFi1atVer3N/YNmyZRg2bBiOPPJIzJo1C7t27fLWsT2z09LSAgAYNGgQgPye75UrV+LYY4/1BlEAYMqUKWhtbcUbb7yxF2u/7xFuT8UDDzyAIUOG4JhjjsH111+PPXv2eOvYnqS/QNuqeGhb9Q60rXoH2laFQduqtNC2IoQQQgghpPeI9XUFesLOnTthWVbA0AeAuro6vP32231Uq/7DhAkTsHjxYhx55JHYunUrbr75Zpx22ml4/fXX0djYiEQigdra2sA+dXV1aGxs7JsK9yNUG0Xdm2pdY2Mjhg0bFlgfi8UwaNAgtnEEU6dOxXnnnYdRo0Zh/fr1uOGGGzBt2jSsXLkSpmmyPbNg2zbmzp2LT3ziEzjmmGMAIK/nu7GxMfL+VesOVKLaEwC+9KUv4dBDD0VDQwP+9a9/4dprr8XatWvx6KOPAuj79pS23au5YaRt91rZZO9C26o4aFv1HrStSg9tq8KgbVVaaFtlL58QQgghhJBS0K9EP1Ic06ZN8/4+7rjjMGHCBBx66KH47W9/i4qK3gtVQkghXHTRRd7fxx57LI477jiMHj0ay5Ytw6RJk/qwZvs2s2fPxuuvvx7IKUUKJ1t76vmNjj32WAwfPhyTJk3C+vXrMXr06L1dTUJIH0HbivQnaFsVBm2r0kLbihBCCCGEkN6lX4X3HDJkCEzTxLZt2wLLt23bhvr6+j6qVf+ltrYWY8aMwbp161BfX49kMonm5ubANmzb/FBtlOverK+vx/bt2wPr0+k0mpqa2MZ5cPjhh2PIkCFYt24dALZnFHPmzMETTzyB5557Docccoi3PJ/nu76+PvL+VesORLK1ZxQTJkwAgMD92ZftadtWr3/I/gFtq9JC26p00LbqfWhbdQ9tq9JC24q2FSGEEEII6X36leiXSCQwfvx4PPPMM94y27bxzDPPYOLEiX1Ys/5JW1sb1q9fj+HDh2P8+PGIx+OBtl27di02btzIts2DUaNGob6+PtB+ra2tWLVqldd+EydORHNzM1avXu1t8+yzz8K2ba9TS7KzadMm7Nq1C8OHDwfA9tSRUmLOnDl47LHH8Oyzz2LUqFGB9fk83xMnTsRrr70WGOxbunQpqqurMW7cuL1zIvsI3bVnFGvWrAGAwP3J9iT9AdpWpYW2VemgbdX70LbKDm2r0kLbihBCCCGEkL1HvwvvOW/ePFxyySU48cQTcfLJJ+Puu+9Ge3s7Lrvssr6u2j7P1VdfjbPPPhuHHnootmzZggULFsA0TXzxi19ETU0NLr/8csybNw+DBg1CdXU1vvnNb2LixIk45ZRT+rrq+wRtbW3eTFMAeO+997BmzRoMGjQII0eOxNy5c3HrrbfiiCOOwKhRo/Dd734XDQ0NOOeccwAAY8eOxdSpUzFz5kwsWrQIqVQKc+bMwUUXXYSGhoY+Oqu+I1d7Dho0CDfffDPOP/981NfXY/369fj2t7+Nj3zkI5gyZQoAtqfO7NmzsWTJEvzxj3/EwIEDvbwmNTU1qKioyOv5PuusszBu3Dh85StfwZ133onGxkZ85zvfwezZs1FWVtaXp7fX6a49169fjyVLlmD69OkYPHgw/vWvf+HKK6/E6aefjuOOOw5A37entK1ezjvD2ej7E7StCoe2VXHQtiottK1KB22r0kLbKr/yCSGEEEIIKQVCSin7uhI95ac//SnuuusuNDY24oQTTsDChQsPuNmnhXDRRRdhxYoV2LVrF4YOHYpTTz0Vt912m5cjobOzE1dddRUefPBBdHV1YcqUKfjZz352wIafCbNs2TKceeaZGcsvueQSLF68GFJKLFiwAPfeey+am5tx6qmn4mc/+xnGjBnjbdvU1IQ5c+bg8ccfh2EYOP/887Fw4UJUVVXtzVPZJ8jVnj//+c9xzjnn4NVXX0VzczMaGhpw1lln4Xvf+x7q6uq8bdmeDkKIyOX3338/Lr30UgD5Pd/vv/8+Zs2ahWXLlmHAgAG45JJL8P3vfx+xWL+bH1IU3bXnBx98gC9/+ct4/fXX0d7ejhEjRuDcc8/Fd77zHVRXV3vb90V7tra2oqamBoOm3QIjXt5rx7FTnWj6y3y0tLQEzpn0X2hbFQZtq+KgbVVaaFuVDtpWpYW2VffQttr/UPcOrykhhBBCSkW+9kW/FP0IIYQQkon68T9oyoJeH5j68OmbOYhBCCGEkP0a2lakUCj6EUIIIaTU5Gtf9KucfoQQQgghhBBCCCGEEEIIIYSQTCj6EUIIIfsblgXZix9YzDtDCCGEkAMI2lYHJPfccw8OO+wwlJeXY8KECXjppZf6ukqEEFIw//73v9HV1dXX1SCE7AUo+hFCCCGEEEIIIYQQ4vLwww9j3rx5WLBgAV555RUcf/zxmDJlCrZv397XVSOk31OqTFPbtm3D//7v/2LXrl1Fl9XV1YW///3vWLZsWdFl/fvf/y66jN7AsiyMGTMG06ZNw//5P/8Hb731VknK7ezsxIYNG0pSFoCSlpVMJtHa2lqSsizLwssvv1ySsgBg586d2Lx5c9HlSCmxc+dOvPTSSwU/C9u3b8fKlSvx17/+FY888gh+/etf46GHHirZs7px48aSlAM455tKpUpSzttvv12ycwSAjo4O7N69u2TlFQNFP0IIIWQ/Q0oL0u7Fj+RsdEIIIYQcONC2OvD40Y9+hJkzZ+Kyyy7DuHHjsGjRIlRWVuK+++6L3L6rqwutra2BT2/y/vvvY+nSpSUr7+GHHy7JQGVLSwvuuusuJJPJossq5UBsb/HOO+/gf/7nf0pS11Kf70MPPVT0fWhZFp588kn84Ac/wIcfflh0nSzLwi233ILly5cXXMbTTz+NT3/606irq0N9fT0uuOAC2LZdUFmvvPIKrrvuOpx22mmoqanBqaeeWpRH7+bNm3HFFVfg4osvxurVq/GLX/wCe/bsKbg8nbVr1+Jvf/tbUWUcccQRePjhh7Fs2TLMmzevYNFPSomVK1fitttuw6RJk1BbW4uHHnqoqLoBwN///nd89rOfxQUXXFCSdluxYgU++tGPYvXq1SUp68QTT8T8+fOL9pbcsWMHrr32Whx22GF49dVXCy7nnXfewfTp01FdXY2hQ4di8uTJsAqMHFBdXY1nn30W5557Lj7/+c/j0ksvxapVqyCEKLh+gCOAX3DBBbjssstK4mW6Y8cOnHvuuSURX4UQeOmll3D44YfjlVdeKbq8Z599Fscddxxef/31ossqBRT9CCGEEEIIIYQQQgiB4xmyevVqTJ482VtmGAYmT56MlStXRu5zxx13oKamxvuMGDGiV+u4efNmrFmzpmTlvfDCC0WLOn/7299w5plnYv78+Vi/fn3Rddq5cye+9rWv4dBDD8WCBQvw/PPPF13m3/72N/zwhz8suhxFRUUFPvWpTxU9MA4AS5YswV//+tcS1MrhxRdfLFr0M00TxxxzDHbs2AHDKG4IeevWrfj0pz+NBQsW4Be/+EXB5UyZMgULFy7E+eefj8rKStTW1hYsmB5//PGYNGkSDjvsMJimCQAFCYjNzc24/vrrccQRR+CXv/wl/v73v3sC0fvvv19Q3XSWLVuGuXPn4umnny66rFNOOQUPPvggzj//fEyZMqWgMoQQGDJkCKSUaGxsRFdXV8FiE+Dcq5/85Cdx6qmn4sknn8Tbb7+NpqamgsvbsWMHLrvsMnzyk5/Em2++iYcffrjgsjZs2IALLrgAn/zkJ7FmzRr87W9/w6ZNmwoqa/v27bjmmmtw2GGH4c4770R7ezueffbZgus2ZswYPPbYY/jhD3+Iww47DMOHD0c8Hi+orPLyctx44414++23ceGFFwIAPvrRjxZct+bmZlxzzTUYO3Ysfve732HVqlXYtm1bweUBwJNPPoljjz0Wf/zjH7NOwOkpF198MW699VaMHTu24DKamprwn//5n5g0aRLWrVuH//f//l9J6lYsQvaHqTOEEEII6ZbW1lZnoOHMayFiZb12HJnuQstzP0BLSwuqq6t77TiEEEIIIX0JbasDky1btuDggw/GCy+8gIkTJ3rLv/3tb2P58uVYtWpVxj5dXV0BL4bW1laMGDHigL2mtm0XLRIBwOjRo/Hvf/8bsVgMW7ZswdChQ4sqb8uWLdi2bVtRg9m9xTvvvIMjjjiiJALivkY6ncavf/1rdHZ2orq6GoMHD8a0adOKPtfm5mY8+eSTmDFjRtF13L17Nx555BEcfvjhOP300/Pe73//938xd+5cvP32257wJYTAX//6V0yaNGmfvZ5tbW2oqqoqSVmvv/46kskkPvaxjxW0v5QSTU1NWL9+PdatW4d169bha1/7Gurq6npclm3b+O1vf4vNmzejs7MTXV1dqK+vx6xZs3p8Ldra2vDLX/4SjY2NaGlpQXNzM3bv3o0f/vCHBYlEHR0d2LhxIzZs2ID3338fGzZswMiRI/H1r3+9x2WFSaVSWL58eWCySjEsW7YMY8aMQUNDQ0H7r169Gu+++y6ampqwa9cuNDU14Vvf+hZGjRpVUHlvvvkmlixZgrKyMpSVlaG+vh5f+cpX+vz5SqVSuPfee7F582ak02mk02mMGDECc+fO7bW6Kdu0O/uCoh8hhBCyn8CBKUIIIYSQ0kHb6sCkENEvTL6DciQ3X/7yl/HAAw/g3HPPxaOPPtrX1SEkK7ZtY9euXdi2bRu2bduGgQMH4uSTT+7rahFC9jPytS9ie7FOhBBCCCGEEEIIIYTsswwZMgSmaWaEItu2bRvq6+v7qFYHJhMnTsQDDzyAyy67rK+rQkhODMPA0KFDMXToUBxzzDF9XR1CyAEOc/oRQggh+xnStnr9QwghhBByoEDb6sAikUhg/PjxeOaZZ7xltm3jmWeeCXj+kd7n4x//OOrq6jBt2rS+rgohhBDSb6CnHyGEEEIIIYQQQgghLvPmzcMll1yCE088ESeffDLuvvtutLe30+NsL3Psscdi1qxZiMU4fEkIIYTkC381CSGEkP0MaduAbfdu+YQQQgghBwi0rQ48LrzwQuzYsQPz589HY2MjTjjhBDz11FOoq6vr66odUMRiMVxzzTV9XQ1CCCGkX0HRjxBCCCGEEEIIIYQQjTlz5mDOnDl9XY0DnsrKyr6uAiGEENKvoOhHCCGE7GdI2wJ6MTcM884QQggh5ECCthUhhBBCCOkvGH1dAUIIIYQQQgghhBBCCCGEEEJIcdDTjxBCCNnP4Gx0QgghhJDSQduKEEIIIYT0F+jpRwghhBBCCCGEEEIIIYQQQkg/h55+hBBCyH6GbVsQnI1OCCGEEFISaFsRQgghhJD+Aj39CCGEEEIIIYQQQgghhBBCCOnn0NOPEEII2c+Qlg2IXpyNbtm9VjYhhBBCyL4GbStCCCGEENJfoKcfIYQQQgghhBBCCCGEEEIIIf0cevoRQggh+xlSWkBv5p2RzDtDCCGEkAMH2laEEEIIIaS/QE8/QgghhBBCCCGEEEIIIYQQQvo59PQjhBBC9jOkbfVu3plenOlOCCGEELKvQduKEEIIIYT0F+jpRwghhBBCCCGEEEIIIYQQQkg/h55+hBBCyH4GZ6MTQgghhJQO2laEEEIIIaS/QE8/QgghhOxT3Hbbbfj4xz+OyspK1NbW5rWPlBLz58/H8OHDUVFRgcmTJ+Pdd98NbNPU1IQZM2aguroatbW1uPzyy9HW1tYLZ0AIIYQQsm9Au4oQQggh5MCCoh8hhBCynyFtq9c/vUkymcQXvvAFzJo1K+997rzzTixcuBCLFi3CqlWrMGDAAEyZMgWdnZ3eNjNmzMAbb7yBpUuX4oknnsCKFStwxRVX9MYpEEIIIWQ/oj/bVrSrCCGEEEIOLISUUvZ1JQghhBBSPK2traipqUFs3AWAGe+9A1kppN/8LT744ANUV1d7i8vKylBWVlaywyxevBhz585Fc3Nzzu2klGhoaMBVV12Fq6++GgDQ0tKCuro6LF68GBdddBHeeustjBs3Di+//DJOPPFEAMBTTz2F6dOnY9OmTWhoaChZvQkhhBCyf7A/2Va0q/YuLS0tqK2tzbimhBBCCCGF0traihEjRqC5uRk1NTVZt2NOP0IIIWQ/IZFIoL6+Ho1v/rbXj1VVVYURI0YEli1YsAA33XRTrx87zHvvvYfGxkZMnjzZW1ZTU4MJEyZg5cqVuOiii7By5UrU1tZ6A1MAMHnyZBiGgVWrVuHcc8/d6/UmhBBCyL7NgWhb0a4qDbt37waAjGtKCCGEEFIsu3fvpuhHCCGEHAiUl5fjvffeQzKZ7PVjSSkhhAgsK6WXX09obGwEANTV1QWW19XVeesaGxsxbNiwwPpYLIZBgwZ52xBCCCGE6ByIthXtqtLQ0NCADz74AAMHDsy4rqVAzfSnJ2HfwuvQ9/Aa7BvwOvQ9vAb7Br19HaSU2L17d7dRFSj6EUIIIfsR5eXlKC8v7+tqZHDdddfhBz/4Qc5t3nrrLRx11FF7qUaEEEIIId2zL9pWtKv2fQzDwCGHHNLrx6murubg7j4Ar0Pfw2uwb8Dr0PfwGuwb9OZ1yOXhp6DoRwghhJBe56qrrsKll16ac5vDDz+8oLLr6+sBANu2bcPw4cO95du2bcMJJ5zgbbN9+/bAful0Gk1NTd7+hBBCCCH9AdpVhBBCCCEkGxT9CCGEENLrDB06FEOHDu2VskeNGoX6+no888wz3mBUa2srVq1ahVmzZgEAJk6ciObmZqxevRrjx48HADz77LOwbRsTJkzolXoRQgghhPQGtKsIIYQQQkg2jL6uACGEEEKIzsaNG7FmzRps3LgRlmVhzZo1WLNmDdra2rxtjjrqKDz22GMAACEE5s6di1tvvRV/+tOf8Nprr+Hiiy9GQ0MDzjnnHADA2LFjMXXqVMycORMvvfQS/v73v2POnDm46KKLuo2FTgghhBDSX6FdtX9SVlaGBQsW9FlObeLA69D38BrsG/A69D28BvsG+8p1EFJK2ac1IIQQQgjRuPTSS/HrX/86Y/lzzz2HM844A4AzIHX//fd7oa2klFiwYAHuvfdeNDc349RTT8XPfvYzjBkzxtu/qakJc+bMweOPPw7DMHD++edj4cKFqKqq2hunRQghhBCy16FdRQghhBByYEHRjxBCCCGEEEIIIYQQQgghhJB+DsN7EkIIIYQQQgghhBBCCCGEENLPoehHCCGEEEIIIYQQQgghhBBCSD+Hoh8hhBBCCCGEEEIIIYQQQggh/RyKfoQQQgghhBBCCCGEEEIIIYT0cyj6EUIIIYQQQgghhBDST7jnnntw2GGHoby8HBMmTMBLL73U11Xab7npppsghAh8jjrqKG99Z2cnZs+ejcGDB6Oqqgrnn38+tm3b1oc13j9YsWIFzj77bDQ0NEAIgT/84Q+B9VJKzJ8/H8OHD0dFRQUmT56Md999N7BNU1MTZsyYgerqatTW1uLyyy9HW1vbXjyL/k131+DSSy/NeDamTp0a2IbXoDjuuOMOnHTSSRg4cCCGDRuGc845B2vXrg1sk887aOPGjfjMZz6DyspKDBs2DNdccw3S6fTePJV+TT7X4Ywzzsh4Hr7+9a8Httmb14GiHyGEEEIIIYQQQggh/YCHH34Y8+bNw4IFC/DKK6/g+OOPx5QpU7B9+/a+rtp+y9FHH42tW7d6n+eff95bd+WVV+Lxxx/H7373OyxfvhxbtmzBeeed14e13T9ob2/H8ccfj3vuuSdy/Z133omFCxdi0aJFWLVqFQYMGIApU6ags7PT22bGjBl44403sHTpUjzxxBNYsWIFrrjiir11Cv2e7q4BAEydOjXwbDz44IOB9bwGxbF8+XLMnj0bL774IpYuXYpUKoWzzjoL7e3t3jbdvYMsy8JnPvMZJJNJvPDCC/j1r3+NxYsXY/78+X1xSv2SfK4DAMycOTPwPNx5553eur19HYSUUvZKyYQQQgghhBBCCCGEkJIxYcIEnHTSSfjpT38KALBtGyNGjMA3v/lNXHfddX1cu/2Pm266CX/4wx+wZs2ajHUtLS0YOnQolixZgs9//vMAgLfffhtjx47FypUrccopp+zl2u6fCCHw2GOP4ZxzzgHgePk1NDTgqquuwtVXXw3AuRZ1dXVYvHgxLrroIrz11lsYN24cXn75ZZx44okAgKeeegrTp0/Hpk2b0NDQ0Fen0y8JXwPA8fRrbm7O8ABU8BqUnh07dmDYsGFYvnw5Tj/99LzeQX/5y1/w2c9+Flu2bEFdXR0AYNGiRbj22muxY8cOJBKJvjylfkn4OgCOp98JJ5yAu+++O3KfvX0d6OlHCCGEEEIIIYQQQsg+TjKZxOrVqzF58mRvmWEYmDx5MlauXNmHNdu/effdd9HQ0IDDDz8cM2bMwMaNGwEAq1evRiqVClyPo446CiNHjuT16EXee+89NDY2Btq9pqYGEyZM8Np95cqVqK2t9cQmAJg8eTIMw8CqVav2ep33V5YtW4Zhw4bhyCOPxKxZs7Br1y5vHa9B6WlpaQEADBo0CEB+76CVK1fi2GOP9YQmAJgyZQpaW1vxxhtv7MXa7z+Er4PigQcewJAhQ3DMMcfg+uuvx549e7x1e/s6xEpeIiGEEEIIIYQQQgghpKTs3LkTlmUFBg0BoK6uDm+//XYf1Wr/ZsKECVi8eDGOPPJIbN26FTfffDNOO+00vP7662hsbEQikUBtbW1gn7q6OjQ2NvZNhQ8AVNtGPQdqXWNjI4YNGxZYH4vFMGjQIF6bEjF16lScd955GDVqFNavX48bbrgB06ZNw8qVK2GaJq9BibFtG3PnzsUnPvEJHHPMMQCQ1zuosbEx8llR60jPiLoOAPClL30Jhx56KBoaGvCvf/0L1157LdauXYtHH30UwN6/DhT9CCGEEEIIIYQQQgghJMS0adO8v4877jhMmDABhx56KH7729+ioqKiD2tGSN9y0UUXeX8fe+yxOO644zB69GgsW7YMkyZN6sOa7Z/Mnj0br7/+eiCnKNn7ZLsOeq7KY489FsOHD8ekSZOwfv16jB49em9Xk+E9CSGEEEIIIYQQQgjZ1xkyZAhM08S2bdsCy7dt24b6+vo+qtWBRW1tLcaMGYN169ahvr4eyWQSzc3NgW14PXoX1ba5noP6+nps3749sD6dTqOpqYnXppc4/PDDMWTIEKxbtw4Ar0EpmTNnDp544gk899xzOOSQQ7zl+byD6uvrI58VtY7kT7brEMWECRMAIPA87M3rQNGPEEIIIYQQQgghhJB9nEQigfHjx+OZZ57xltm2jWeeeQYTJ07sw5odOLS1tWH9+vUYPnw4xo8fj3g8Hrgea9euxcaNG3k9epFRo0ahvr4+0O6tra1YtWqV1+4TJ05Ec3MzVq9e7W3z7LPPwrZtbzCelJZNmzZh165dGD58OABeg1IgpcScOXPw2GOP4dlnn8WoUaMC6/N5B02cOBGvvfZaQIBdunQpqqurMW7cuL1zIv2c7q5DFGvWrAGAwPOwN68Dw3sSQgghhBBCCCGEENIPmDdvHi655BKceOKJOPnkk3H33Xejvb0dl112WV9Xbb/k6quvxtlnn41DDz0UW7ZswYIFC2CaJr74xS+ipqYGl19+OebNm4dBgwahuroa3/zmNzFx4kSccsopfV31fk1bW5vnIQMA7733HtasWYNBgwZh5MiRmDt3Lm699VYcccQRGDVqFL773e+ioaEB55xzDgBg7NixmDp1KmbOnIlFixYhlUphzpw5uOiii9DQ0NBHZ9W/yHUNBg0ahJtvvhnnn38+6uvrsX79enz729/GRz7yEUyZMgUAr0EpmD17NpYsWYI//vGPGDhwoJf7raamBhUVFXm9g8466yyMGzcOX/nKV3DnnXeisbER3/nOdzB79myUlZX15en1G7q7DuvXr8eSJUswffp0DB48GP/6179w5ZVX4vTTT8dxxx0HoA+ugySEEEIIIYQQQgghhPQLfvKTn8iRI0fKRCIhTz75ZPniiy/2dZX2Wy688EI5fPhwmUgk5MEHHywvvPBCuW7dOm99R0eH/MY3viEPOuggWVlZKc8991y5devWPqzx/sFzzz0nAWR8LrnkEimllLZty+9+97uyrq5OlpWVyUmTJsm1a9cGyti1a5f84he/KKuqqmR1dbW87LLL5O7du/vgbPonua7Bnj175FlnnSWHDh0q4/G4PPTQQ+XMmTNlY2NjoAxeg+KIan8A8v777/e2yecdtGHDBjlt2jRZUVEhhwwZIq+66iqZSqX28tn0X7q7Dhs3bpSnn366HDRokCwrK5Mf+chH5DXXXCNbWloC5ezN6yDcihNCCCGEEEIIIYQQQgghhBBC+inM6UcIIYQQQgghhBBCCCGEEEJIP4eiHyGEEEIIIYQQQgghhBBCCCH9HIp+hBBCCCGEEEIIIYQQQgghhPRzKPoRQgghhBBCCCGEEEIIIYQQ0s+h6EcIIYQQQgghhBBCCCGEEEJIP4eiHyGEEEIIIYQQQgghhBBCCCH9HIp+hBBCCCGEEEIIIYQQQgghhPRzKPoRQgghhBBCCCGEEEIIIYQQ0s+h6EcIIYQQQgghhBBCCCGEEEJIP4eiHyGEEEIIIYQQQgghhBBCCCH9HIp+hBBCCCGEEEIIIYQQQgghhPRz/j8IYGfXPEAH6wAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "\n", + "def visualize_sample(batch_input, batch_target, sample_idx=0, time_step=0):\n", + " \"\"\"\n", + " 可视化指定样本和时步的输入/目标数据\n", + " 参数:\n", + " batch_input : 输入张量 [B, T, C, H, W]\n", + " batch_target : 目标张量 [B, T, C, H, W]\n", + " sample_idx : 批次中的样本索引\n", + " time_step : 要显示的时间步\n", + " \"\"\"\n", + " # 提取数据并转换为numpy数组\n", + " input_data = batch_input[sample_idx].numpy() # [T, C, H, W]\n", + " target_data = batch_target[sample_idx].numpy()\n", + " \n", + " # 创建绘图\n", + " fig, axes = plt.subplots(2, 3, figsize=(18, 12))\n", + " \n", + " # 绘制输入序列的u分量\n", + " u_input = input_data[time_step, 0]\n", + " im1 = axes[0,0].imshow(u_input, cmap='RdBu_r', origin='lower', vmin=-1, vmax=1)\n", + " axes[0,0].set_title(f'Input u (t={time_step})')\n", + " plt.colorbar(im1, ax=axes[0,0])\n", + " \n", + " # 绘制输入序列的v分量\n", + " v_input = input_data[time_step, 1]\n", + " im2 = axes[0,1].imshow(v_input, cmap='RdBu_r', origin='lower', vmin=-1, vmax=1)\n", + " axes[0,1].set_title(f'Input v (t={time_step})')\n", + " plt.colorbar(im2, ax=axes[0,1])\n", + " \n", + " # 输入矢量合成图\n", + " X, Y = np.meshgrid(np.arange(0, 256, 10), np.arange(0, 256, 10)) # 下采样\n", + " axes[0,2].quiver(X, Y, u_input[::10,::10], v_input[::10,::10], \n", + " scale=30, color='k')\n", + " axes[0,2].set_title(f'Vector Field (t={time_step})')\n", + " \n", + " # 绘制目标序列的u分量\n", + " u_target = target_data[time_step, 0]\n", + " im3 = axes[1,0].imshow(u_target, cmap='RdBu_r', origin='lower', vmin=-1, vmax=1)\n", + " axes[1,0].set_title(f'Target u (t={time_step})')\n", + " plt.colorbar(im3, ax=axes[1,0])\n", + " \n", + " # 绘制目标序列的v分量\n", + " v_target = target_data[time_step, 1]\n", + " im4 = axes[1,1].imshow(v_target, cmap='RdBu_r', origin='lower', vmin=-1, vmax=1)\n", + " axes[1,1].set_title(f'Target v (t={time_step})')\n", + " plt.colorbar(im4, ax=axes[1,1])\n", + "\n", + " \n", + " # 目标矢量合成图\n", + " axes[1,2].quiver(X, Y, u_target[::10,::10], v_target[::10,::10], \n", + " scale=30, color='k')\n", + " axes[1,2].set_title(f'Target Vector Field (t={time_step})')\n", + " \n", + " plt.tight_layout()\n", + " plt.show()\n", + "\n", + "# 获取一个批次数据\n", + "sample_input, sample_target = next(iter(train_loader))\n", + "\n", + "# 可视化第一个样本的第一个时间步\n", + "visualize_sample(sample_input, sample_target, \n", + " sample_idx=0, \n", + " time_step=5) # 可修改0-9之间的任意时间步" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "485cc7f9-ed2b-4d12-94b2-e51567aa6715", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.20" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/kuroshio_animation-checkpoint.gif b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/kuroshio_animation-checkpoint.gif new file mode 100644 index 0000000000000000000000000000000000000000..dd06eec2e8006fe60f84d747ccd22f8a0043086d --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/kuroshio_animation-checkpoint.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76f4227a35cb70c8ed629c37435beb5f9d437c935fe15ffb70f4c640d34d1675 +size 2279935 diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/main-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/main-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..363fcab7ed6e9634e198cf5555ceb88932c9a245 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/main-checkpoint.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/ocean_currents_animation-checkpoint.gif b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/ocean_currents_animation-checkpoint.gif new file mode 100644 index 0000000000000000000000000000000000000000..13c422582a1c8f4252bcfa9decb38f81ffee602f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/ocean_currents_animation-checkpoint.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bec473f412ddb5c4cf4401956512b98558b10d930c8dc54f6aacc50f5908d328 +size 3481580 diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/sample_animation-checkpoint.gif b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/sample_animation-checkpoint.gif new file mode 100644 index 0000000000000000000000000000000000000000..a150cee069c2b34ec1c4a1ba34ecdcd856fce875 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/sample_animation-checkpoint.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff5110a29f1fdba80867171e44697dfb8e18bd45f9d3bfeecddfe467b3687cde +size 6034751 diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..7d409e767cac278b9d4cea680ba42dc8bb2a81a3 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton-checkpoint.py @@ -0,0 +1,177 @@ +import os +import random +import torch +import numpy as np +import torch.nn as nn +import torch.optim as optim +import torch.utils.data as data +import torch.distributed as dist +import netCDF4 as nc +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from model.Triton_model import * +from torch.optim.lr_scheduler import CosineAnnealingLR +from dataloader_api.dataloader_kuroshio import * + +# Setup logging +backbone = 'Kuro_Triton_exp3_20241111' +logging.basicConfig(filename=f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Kuro_exp/logs/{backbone}_training_log.log', + level=logging.INFO, + format='%(asctime)s %(message)s') + +# Set a specific seed +seed = 42 +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False +set_seed(seed) + +# ========================== Distributed Training Setup ========================== +dist.init_process_group(backend='nccl') +local_rank = int(os.environ['LOCAL_RANK']) +torch.cuda.set_device(local_rank) +device = torch.device("cuda", local_rank) +num_gpus = torch.cuda.device_count() + +def reduce_mean(tensor, nprocs): + rt = tensor.clone() + dist.all_reduce(rt, op=dist.ReduceOp.SUM) + rt /= nprocs + return rt + +# ============================== Data Loading ============================== +train_loader, val_loader, test_loader, mean, std = load_data( + data_path='/jizhicfs/easyluwu/ocean_project/kuro/ft_local', + batch_size=8, + val_batch_size=8, + horizon=10, + num_workers=8 + ) + +for input_frames, output_frames in iter(train_loader): + print(input_frames.shape, output_frames.shape) # [B, T, C, H, W] + break + +# ============================== Model Setup ============================== +model = Triton( + shape_in=(10, 2, 128, 128), + spatial_hidden_dim=256, + output_channels=2, + temporal_hidden_dim=512, + num_spatial_layers=4, + num_temporal_layers=8) + +model = model.to(device) +model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank], find_unused_parameters=False) + +# ============================== Criterion and Optimizer ============================== +criterion = nn.MSELoss() +optimizer = optim.Adam(model.parameters(), lr=1e-2) + +num_epochs = 2000 +scheduler = CosineAnnealingLR(optimizer, T_max=200, eta_min=0) + +# ============================== Training, Validation, and Testing Functions ============================== +def train(model, train_loader, criterion, optimizer, device): + model.train() + train_loss = 0.0 + for inputs, targets in tqdm(train_loader, desc="Training", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, targets) + loss.backward() + optimizer.step() + train_loss += loss.item() * inputs.size(0) + return train_loss / len(train_loader.dataset) + +def validate(model, val_loader, criterion, device): + model.eval() + val_loss = 0.0 + with torch.no_grad(): + for inputs, targets in tqdm(val_loader, desc="Validation", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + loss = criterion(outputs, targets) + val_loss += loss.item() * inputs.size(0) + return val_loss / len(val_loader.dataset) + +def test(model, test_loader, criterion, device): + path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Kuro_exp/results' + model.eval() + test_loss = 0.0 + all_inputs = [] + all_targets = [] + all_outputs = [] + + with torch.no_grad(): + for inputs, targets in tqdm(test_loader, desc="Testing", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + + # Collect results + all_inputs.append(inputs.cpu().numpy()) + all_targets.append(targets.cpu().numpy()) + all_outputs.append(outputs.cpu().numpy()) + + loss = criterion(outputs, targets) + test_loss += loss.item() * inputs.size(0) + + all_inputs = np.concatenate(all_inputs, axis=0) + all_targets = np.concatenate(all_targets, axis=0) + all_outputs = np.concatenate(all_outputs, axis=0) + + if local_rank == 0: + np.save(f'{path}/{backbone}_inputs.npy', all_inputs) + np.save(f'{path}/{backbone}_targets.npy', all_targets) + np.save(f'{path}/{backbone}_outputs.npy', all_outputs) + + return test_loss / len(test_loader.dataset) +# ============================== Main Training Loop ============================== +best_val_loss = float('inf') +best_model_path = f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Kuro_exp/checkpoints/{backbone}_best_model.pth' + +if local_rank == 0 and os.path.exists(best_model_path): + try: + logging.info('Loading best model from checkpoint.') + checkpoint = torch.load(best_model_path, map_location=device) + model.load_state_dict(checkpoint) + except Exception as e: + logging.error(f'Error loading model checkpoint: {e}') + +for epoch in range(num_epochs): + if local_rank == 0: + logging.info(f'Epoch {epoch + 1}/{num_epochs}') + train_loss = train(model, train_loader, criterion, optimizer, device) + val_loss = validate(model, test_loader, criterion, device) + + scheduler.step() + + if local_rank == 0: + current_lr = optimizer.param_groups[0]['lr'] + logging.info(f'Current Learning Rate: {current_lr:.10f}') + + if val_loss < best_val_loss: + best_val_loss = val_loss + torch.save(model.state_dict(), best_model_path) + + logging.info(f'Train Loss: {train_loss * num_gpus:.7f}, Val Loss: {val_loss * num_gpus:.7f}') + +if local_rank == 0: + try: + model.load_state_dict(torch.load(best_model_path)) + test_loss = test(model, test_loader, criterion, device) + logging.info("Testing completed and best model saved.") + except Exception as e: + logging.error(f'Error loading model checkpoint during testing: {e}') + +dist.destroy_process_group() \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton_G_uv-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton_G_uv-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..3b288763c7feeea4725daf0541e4eaf2e37c84aa --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton_G_uv-checkpoint.py @@ -0,0 +1,177 @@ +import os +import random +import torch +import numpy as np +import torch.nn as nn +import torch.optim as optim +import torch.utils.data as data +import torch.distributed as dist +import netCDF4 as nc +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from model.Triton_model import * +from torch.optim.lr_scheduler import CosineAnnealingLR +from dataloader_api.dataloader_kuroshio_G_uv import * + +# Setup logging +backbone = 'Triton_Gulf_uv_20250218_exp1' +logging.basicConfig(filename=f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/logs/{backbone}_training_log.log', + level=logging.INFO, + format='%(asctime)s %(message)s') + +# Set a specific seed +seed = 42 +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False +set_seed(seed) + +# ========================== Distributed Training Setup ========================== +dist.init_process_group(backend='nccl') +local_rank = int(os.environ['LOCAL_RANK']) +torch.cuda.set_device(local_rank) +device = torch.device("cuda", local_rank) +num_gpus = torch.cuda.device_count() + +def reduce_mean(tensor, nprocs): + rt = tensor.clone() + dist.all_reduce(rt, op=dist.ReduceOp.SUM) + rt /= nprocs + return rt + +# ============================== Data Loading ============================== +train_loader, val_loader, test_loader, mean, std = load_data( + data_path='/jizhicfs/easyluwu/ocean_project/kuro/ft_local', + batch_size=8, + val_batch_size=8, + horizon=10, + num_workers=8 + ) + +for input_frames, output_frames in iter(train_loader): + print(input_frames.shape, output_frames.shape) # [B, T, C, H, W] + break + +# ============================== Model Setup ============================== +model = Triton( + shape_in=(10, 2, 128, 128), + spatial_hidden_dim=256, + output_channels=2, + temporal_hidden_dim=512, + num_spatial_layers=4, + num_temporal_layers=8) + +model = model.to(device) +model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank], find_unused_parameters=False) + +# ============================== Criterion and Optimizer ============================== +criterion = nn.MSELoss() +optimizer = optim.Adam(model.parameters(), lr=1e-2) + +num_epochs = 2000 +scheduler = CosineAnnealingLR(optimizer, T_max=200, eta_min=0) + +# ============================== Training, Validation, and Testing Functions ============================== +def train(model, train_loader, criterion, optimizer, device): + model.train() + train_loss = 0.0 + for inputs, targets in tqdm(train_loader, desc="Training", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, targets) + loss.backward() + optimizer.step() + train_loss += loss.item() * inputs.size(0) + return train_loss / len(train_loader.dataset) + +def validate(model, val_loader, criterion, device): + model.eval() + val_loss = 0.0 + with torch.no_grad(): + for inputs, targets in tqdm(val_loader, desc="Validation", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + loss = criterion(outputs, targets) + val_loss += loss.item() * inputs.size(0) + return val_loss / len(val_loader.dataset) + +def test(model, test_loader, criterion, device): + path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results' + model.eval() + test_loss = 0.0 + all_inputs = [] + all_targets = [] + all_outputs = [] + + with torch.no_grad(): + for inputs, targets in tqdm(test_loader, desc="Testing", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + + # Collect results + all_inputs.append(inputs.cpu().numpy()) + all_targets.append(targets.cpu().numpy()) + all_outputs.append(outputs.cpu().numpy()) + + loss = criterion(outputs, targets) + test_loss += loss.item() * inputs.size(0) + + all_inputs = np.concatenate(all_inputs, axis=0) + all_targets = np.concatenate(all_targets, axis=0) + all_outputs = np.concatenate(all_outputs, axis=0) + + if local_rank == 0: + np.save(f'{path}/{backbone}_inputs.npy', all_inputs) + np.save(f'{path}/{backbone}_targets.npy', all_targets) + np.save(f'{path}/{backbone}_outputs.npy', all_outputs) + + return test_loss / len(test_loader.dataset) +# ============================== Main Training Loop ============================== +best_val_loss = float('inf') +best_model_path = f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth' + +if local_rank == 0 and os.path.exists(best_model_path): + try: + logging.info('Loading best model from checkpoint.') + checkpoint = torch.load(best_model_path, map_location=device) + model.load_state_dict(checkpoint) + except Exception as e: + logging.error(f'Error loading model checkpoint: {e}') + +for epoch in range(num_epochs): + if local_rank == 0: + logging.info(f'Epoch {epoch + 1}/{num_epochs}') + train_loss = train(model, train_loader, criterion, optimizer, device) + val_loss = validate(model, test_loader, criterion, device) + + scheduler.step() + + if local_rank == 0: + current_lr = optimizer.param_groups[0]['lr'] + logging.info(f'Current Learning Rate: {current_lr:.10f}') + + if val_loss < best_val_loss: + best_val_loss = val_loss + torch.save(model.state_dict(), best_model_path) + + logging.info(f'Train Loss: {train_loss * num_gpus:.7f}, Val Loss: {val_loss * num_gpus:.7f}') + +if local_rank == 0: + try: + model.load_state_dict(torch.load(best_model_path)) + test_loss = test(model, test_loader, criterion, device) + logging.info("Testing completed and best model saved.") + except Exception as e: + logging.error(f'Error loading model checkpoint during testing: {e}') + +dist.destroy_process_group() \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton_K_uv-checkpoint.py b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton_K_uv-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..c0bacae5a1f5e0aa273ab98a3319de9c5d48eab1 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/.ipynb_checkpoints/train_triton_K_uv-checkpoint.py @@ -0,0 +1,177 @@ +import os +import random +import torch +import numpy as np +import torch.nn as nn +import torch.optim as optim +import torch.utils.data as data +import torch.distributed as dist +import netCDF4 as nc +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from model.Triton_model import * +from torch.optim.lr_scheduler import CosineAnnealingLR +from dataloader_api.dataloader_kuroshio_K_uv import * + +# Setup logging +backbone = 'Triton_Kuroshio_uv_20250218_exp1' +logging.basicConfig(filename=f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/logs/{backbone}_training_log.log', + level=logging.INFO, + format='%(asctime)s %(message)s') + +# Set a specific seed +seed = 42 +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False +set_seed(seed) + +# ========================== Distributed Training Setup ========================== +dist.init_process_group(backend='nccl') +local_rank = int(os.environ['LOCAL_RANK']) +torch.cuda.set_device(local_rank) +device = torch.device("cuda", local_rank) +num_gpus = torch.cuda.device_count() + +def reduce_mean(tensor, nprocs): + rt = tensor.clone() + dist.all_reduce(rt, op=dist.ReduceOp.SUM) + rt /= nprocs + return rt + +# ============================== Data Loading ============================== +train_loader, val_loader, test_loader, mean, std = load_data( + data_path='/jizhicfs/easyluwu/ocean_project/kuro/ft_local', + batch_size=8, + val_batch_size=8, + horizon=10, + num_workers=8 + ) + +for input_frames, output_frames in iter(train_loader): + print(input_frames.shape, output_frames.shape) # [B, T, C, H, W] + break + +# ============================== Model Setup ============================== +model = Triton( + shape_in=(10, 2, 128, 128), + spatial_hidden_dim=256, + output_channels=2, + temporal_hidden_dim=512, + num_spatial_layers=4, + num_temporal_layers=8) + +model = model.to(device) +model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank], find_unused_parameters=False) + +# ============================== Criterion and Optimizer ============================== +criterion = nn.MSELoss() +optimizer = optim.Adam(model.parameters(), lr=1e-2) + +num_epochs = 2000 +scheduler = CosineAnnealingLR(optimizer, T_max=200, eta_min=0) + +# ============================== Training, Validation, and Testing Functions ============================== +def train(model, train_loader, criterion, optimizer, device): + model.train() + train_loss = 0.0 + for inputs, targets in tqdm(train_loader, desc="Training", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, targets) + loss.backward() + optimizer.step() + train_loss += loss.item() * inputs.size(0) + return train_loss / len(train_loader.dataset) + +def validate(model, val_loader, criterion, device): + model.eval() + val_loss = 0.0 + with torch.no_grad(): + for inputs, targets in tqdm(val_loader, desc="Validation", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + loss = criterion(outputs, targets) + val_loss += loss.item() * inputs.size(0) + return val_loss / len(val_loader.dataset) + +def test(model, test_loader, criterion, device): + path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results' + model.eval() + test_loss = 0.0 + all_inputs = [] + all_targets = [] + all_outputs = [] + + with torch.no_grad(): + for inputs, targets in tqdm(test_loader, desc="Testing", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + + # Collect results + all_inputs.append(inputs.cpu().numpy()) + all_targets.append(targets.cpu().numpy()) + all_outputs.append(outputs.cpu().numpy()) + + loss = criterion(outputs, targets) + test_loss += loss.item() * inputs.size(0) + + all_inputs = np.concatenate(all_inputs, axis=0) + all_targets = np.concatenate(all_targets, axis=0) + all_outputs = np.concatenate(all_outputs, axis=0) + + if local_rank == 0: + np.save(f'{path}/{backbone}_inputs.npy', all_inputs) + np.save(f'{path}/{backbone}_targets.npy', all_targets) + np.save(f'{path}/{backbone}_outputs.npy', all_outputs) + + return test_loss / len(test_loader.dataset) +# ============================== Main Training Loop ============================== +best_val_loss = float('inf') +best_model_path = f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth' + +if local_rank == 0 and os.path.exists(best_model_path): + try: + logging.info('Loading best model from checkpoint.') + checkpoint = torch.load(best_model_path, map_location=device) + model.load_state_dict(checkpoint) + except Exception as e: + logging.error(f'Error loading model checkpoint: {e}') + +for epoch in range(num_epochs): + if local_rank == 0: + logging.info(f'Epoch {epoch + 1}/{num_epochs}') + train_loss = train(model, train_loader, criterion, optimizer, device) + val_loss = validate(model, test_loader, criterion, device) + + scheduler.step() + + if local_rank == 0: + current_lr = optimizer.param_groups[0]['lr'] + logging.info(f'Current Learning Rate: {current_lr:.10f}') + + if val_loss < best_val_loss: + best_val_loss = val_loss + torch.save(model.state_dict(), best_model_path) + + logging.info(f'Train Loss: {train_loss * num_gpus:.7f}, Val Loss: {val_loss * num_gpus:.7f}') + +if local_rank == 0: + try: + model.load_state_dict(torch.load(best_model_path)) + test_loss = test(model, test_loader, criterion, device) + logging.info("Testing completed and best model saved.") + except Exception as e: + logging.error(f'Error loading model checkpoint during testing: {e}') + +dist.destroy_process_group() \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_high_kuro.cpython-38.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_high_kuro.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0491a9076081b9e1699dd6428ee073ae3740c0de Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_high_kuro.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio.cpython-310.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0655e640c66eb66e0fb918f2b28d4deb4c1b2558 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio.cpython-310.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio.cpython-38.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d598bf02bccfcb267916c9bf3f36fe836b6b5bba Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_G_uv.cpython-38.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_G_uv.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..74abbf5111842b31987e8dffc798f158d3466ffb Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_G_uv.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_K_uv.cpython-38.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_K_uv.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da9f56fd687feefab7fb88df8a0b7a19bdf306cc Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_K_uv.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi.cpython-38.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a80d9a99b969f44b9d0a7936e497fb4b9bdb90b0 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_128.cpython-38.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_128.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..599e186b9c591c0950ad24bf620f225b89cd4525 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_128.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_64.cpython-38.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_64.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..98b7fdc5acdc870e13b289e013c50c29e94a896d Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_64.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_single.cpython-38.pyc b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_single.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..08efd8ca37e5e681a3efc2c461a5f9dd088cd35a Binary files /dev/null and b/Exp3_Kuroshio_forecasting/dataloader_api/__pycache__/dataloader_kuroshio_ruiqi_single.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/dataloader_kuroshio_128.py b/Exp3_Kuroshio_forecasting/dataloader_api/dataloader_kuroshio_128.py new file mode 100644 index 0000000000000000000000000000000000000000..9b2ebf8ac5f0f1f77b009bd3ae56bdd125c9a113 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/dataloader_kuroshio_128.py @@ -0,0 +1,101 @@ +import torch +import torch.distributed as dist +from torch.utils.data import Dataset, DataLoader +from torch.utils.data.distributed import DistributedSampler +import netCDF4 as nc +import numpy as np + +class OceanCurrentDataset(Dataset): + def __init__(self, data_path, input_steps=10, output_steps=10, transform=None): + self.data_path = data_path + self.input_steps = input_steps + self.output_steps = output_steps + self.transform = transform + self.total_steps = input_steps + output_steps + + self.data = self._load_and_process_data() + self.mean, self.std = 0, 1 + + def _load_and_process_data(self): + with nc.Dataset(self.data_path, 'r') as ds: + def process_var(var): + arr = var[:] + if '_FillValue' in var.ncattrs(): + fill_value = var._FillValue + arr = np.ma.masked_values(arr, fill_value).filled(np.nan) + return torch.nan_to_num(torch.FloatTensor(arr), nan=0.0) + + ugos = process_var(ds['ugos']) + vgos = process_var(ds['vgos']) + + # [time, channels, lat, lon] + return torch.stack([ugos, vgos], dim=1) + + def _compute_stats(self): + return torch.mean(self.data[:10000]), torch.std(self.data[:10000]) + + def __len__(self): + return len(self.data) - self.total_steps + 1 + + def __getitem__(self, idx): + window = self.data[idx:idx+self.total_steps] # [T_total, C, H, W] + + window = (window - self.mean) / self.std + + input_seq = window[:self.input_steps] + target_seq = window[self.input_steps:] + + if self.transform: + input_seq = self.transform(input_seq) + target_seq = self.transform(target_seq) + + return input_seq[:,:,::2,::2], target_seq[:,:,::2,::2] + +def create_dataloaders(config): + full_dataset = OceanCurrentDataset( + data_path=config['data_path'], + input_steps=config['input_steps'], + output_steps=config['output_steps'] + ) + + train_size = 10000 - config['input_steps'] - config['output_steps'] + 1 + val_size = 500 + test_size = len(full_dataset) - train_size - val_size + + train_dataset, val_dataset, test_dataset = torch.utils.data.random_split( + full_dataset, [train_size, val_size, test_size], + generator=torch.Generator().manual_seed(config['seed']) + ) + + train_sampler = DistributedSampler(train_dataset, shuffle=True) + val_sampler = DistributedSampler(val_dataset, shuffle=False) + test_sampler = DistributedSampler(test_dataset, shuffle=False) + + dataloader_train = DataLoader( + train_dataset, + batch_size=config['batch_size'], + sampler=train_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_val = DataLoader( + val_dataset, + batch_size=config['val_batch_size'], + sampler=val_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_test = DataLoader( + test_dataset, + batch_size=config['val_batch_size'], + sampler=test_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + return dataloader_train, dataloader_val, dataloader_test, full_dataset.mean, full_dataset.std \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/dataloader_kuroshio_256.py b/Exp3_Kuroshio_forecasting/dataloader_api/dataloader_kuroshio_256.py new file mode 100644 index 0000000000000000000000000000000000000000..9c46ebfc280a41b428738dd53393dcf684a4f13c --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/dataloader_kuroshio_256.py @@ -0,0 +1,118 @@ +import torch +import torch.distributed as dist +from torch.utils.data import Dataset, DataLoader +from torch.utils.data.distributed import DistributedSampler +import netCDF4 as nc +import numpy as np + +class OceanCurrentDataset(Dataset): + def __init__(self, data_path, input_steps=10, output_steps=10, transform=None): + self.data_path = data_path + self.input_steps = input_steps + self.output_steps = output_steps + self.transform = transform + self.total_steps = input_steps + output_steps + + self.data = self._load_and_process_data() + self.mean, self.std = 0, 1 + + def _load_and_process_data(self): + with nc.Dataset(self.data_path, 'r') as ds: + def process_var(var): + arr = var[:] + if '_FillValue' in var.ncattrs(): + fill_value = var._FillValue + arr = np.ma.masked_values(arr, fill_value).filled(np.nan) + return torch.nan_to_num(torch.FloatTensor(arr), nan=0.0) + + ugos = process_var(ds['ugos']) # (time, lat, lon) + vgos = process_var(ds['vgos']) + + # [time, channels, lat, lon] + return torch.stack([ugos, vgos], dim=1) + + def _compute_stats(self): + return torch.mean(self.data[:10000]), torch.std(self.data[:10000]) + + def __len__(self): + return len(self.data) - self.total_steps + 1 + + def __getitem__(self, idx): + window = self.data[idx:idx+self.total_steps] # [T_total, C, H, W] + + window = (window - self.mean) / self.std + + input_seq = window[:self.input_steps] + target_seq = window[self.input_steps:] + + if self.transform: + input_seq = self.transform(input_seq) + target_seq = self.transform(target_seq) + + return input_seq, target_seq + +def create_dataloaders(config): + full_dataset = OceanCurrentDataset( + data_path=config['data_path'], + input_steps=config['input_steps'], + output_steps=config['output_steps'] + ) + + train_size = 10000 - config['input_steps'] - config['output_steps'] + 1 + val_size = 500 + test_size = len(full_dataset) - train_size - val_size + + train_dataset, val_dataset, test_dataset = torch.utils.data.random_split( + full_dataset, [train_size, val_size, test_size], + generator=torch.Generator().manual_seed(config['seed']) + ) + + train_sampler = DistributedSampler(train_dataset, shuffle=True) + val_sampler = DistributedSampler(val_dataset, shuffle=False) + test_sampler = DistributedSampler(test_dataset, shuffle=False) + + dataloader_train = DataLoader( + train_dataset, + batch_size=config['batch_size'], + sampler=train_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_val = DataLoader( + val_dataset, + batch_size=config['val_batch_size'], + sampler=val_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + dataloader_test = DataLoader( + test_dataset, + batch_size=config['val_batch_size'], + sampler=test_sampler, + num_workers=config['num_workers'], + pin_memory=True, + drop_last=True + ) + + return dataloader_train, dataloader_val, dataloader_test, full_dataset.mean, full_dataset.std + +# config = { +# 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc', +# 'input_steps': 10, +# 'output_steps': 10, +# 'batch_size': 1, +# 'val_batch_size': 1, +# 'num_workers': 8, +# 'seed': 42 +# } +# dist.init_process_group(backend='nccl') + +# train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config) + +# for sample_input, sample_target in train_loader: +# print(sample_input.shape, sample_target.shape) +# break \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/dataloader_api/kuro_vis.ipynb b/Exp3_Kuroshio_forecasting/dataloader_api/kuro_vis.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..f1cf10c1c10273c3bb207b96e70ee4ea1fcb6a24 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/dataloader_api/kuro_vis.ipynb @@ -0,0 +1,260 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 3, + "id": "8c9ccebe-017d-40d8-9d0f-bde54956849e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\n", + "root group (NETCDF4 data model, file format HDF5):\n", + " Conventions: CF-1.6\n", + " Metadata_Conventions: Unidata Dataset Discovery v1.0\n", + " cdm_data_type: Grid\n", + " comment: Sea Surface Height measured by Altimetry and derived variables\n", + " contact: servicedesk.cmems@mercator-ocean.eu\n", + " creator_email: servicedesk.cmems@mercator-ocean.eu\n", + " creator_name: CMEMS - Sea Level Thematic Assembly Center\n", + " creator_url: http://marine.copernicus.eu\n", + " date_created: 2024-10-23T12:55:06Z\n", + " geospatial_lat_max: 89.9375\n", + " geospatial_lat_min: -89.9375\n", + " geospatial_lat_resolution: 0.125\n", + " geospatial_lat_units: degrees_north\n", + " geospatial_lon_max: 179.9375\n", + " geospatial_lon_min: -179.9375\n", + " geospatial_lon_resolution: 0.125\n", + " geospatial_lon_units: degrees_east\n", + " geospatial_vertical_max: 0.0\n", + " geospatial_vertical_min: 0.0\n", + " geospatial_vertical_positive: down\n", + " geospatial_vertical_resolution: point\n", + " geospatial_vertical_units: m\n", + " history: 2024-10-23 12:55:06Z: Creation\n", + " institution: CLS, CNES\n", + " keywords: Oceans > Ocean Topography > Sea Surface Height\n", + " keywords_vocabulary: NetCDF COARDS Climate and Forecast Standard Names\n", + " license: http://marine.copernicus.eu/web/27-service-commitments-and-licence.php\n", + " platform: Cryosat-2 New Orbit, SWOT Nadir science, Sentinel-3B, Altika Drifting Phase, Sentinel-6A, Haiyang-2B, Sentinel-3A, Jason-3 Interleaved\n", + " processing_level: L4\n", + " product_version: vNov2024\n", + " project: COPERNICUS MARINE ENVIRONMENT MONITORING SERVICE (CMEMS)\n", + " references: http://marine.copernicus.eu\n", + " software_version: 8.0_MIOST_DT2024_baseline\n", + " source: Altimetry measurements\n", + " ssalto_duacs_comment: The reference mission used for the altimeter inter-calibration processing is Topex/Poseidon between 1993-01-01 and 2002-04-23, Jason-1 between 2002-04-24 and 2008-10-18, OSTM/Jason-2 between 2008-10-19 and 2016-06-25, Jason-3 since 2016-06-25.\n", + " standard_name_vocabulary: NetCDF Climate and Forecast (CF) Metadata Convention Standard Name Table v37\n", + " summary: SSALTO/DUACS Delayed-Time Level-4 sea surface height and derived variables measured by multi-satellite altimetry observations over Global Ocean.\n", + " time_coverage_duration: P1D\n", + " time_coverage_end: 2023-12-31T12:00:00Z\n", + " time_coverage_resolution: P1D\n", + " time_coverage_start: 2023-12-30T12:00:00Z\n", + " title: DT merged all satellites Global Ocean Gridded SSALTO/DUACS Sea Surface Height L4 product and derived variables\n", + " copernicusmarine_version: 1.3.3\n", + " dimensions(sizes): time(11322), latitude(256), longitude(256)\n", + " variables(dimensions): int32 vgos(time, latitude, longitude), int32 ugos(time, latitude, longitude), float32 latitude(latitude), float32 longitude(longitude), float32 time(time)\n", + " groups: " + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import numpy as np\n", + "import netCDF4 as nc\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib.animation import FuncAnimation\n", + "\n", + "data = nc.Dataset('/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc')\n", + "data" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "67fb44cc-3e54-45cc-8bec-2166624c38b5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(11322, 256, 256) (11322, 256, 256)\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import netCDF4 as nc\n", + "import matplotlib.pyplot as plt\n", + "from matplotlib.animation import FuncAnimation\n", + "\n", + "# variables(dimensions): int32 vgos(time, latitude, longitude), int32 ugos(time, latitude, longitude), float32 latitude(latitude), float32 longitude(longitude), float32 time(time)\n", + "\n", + "data = nc.Dataset('/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc')\n", + "vgos = data['vgos']\n", + "ugos = data['ugos']\n", + "print(vgos.shape, ugos.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "561bed40-5e78-4f11-a66e-ded8032f727c", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAa4AAAGiCAYAAAC/NyLhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9XawtS3YWCn4jInOutfbZdU65fly+hatsqwEb6Ma+18aFBbygQpaREMb1gC2EkEFNP2A/UOIBS4DxkyVAwvw/IMTPgyWgJZAQkiXw1RUtVAZUSN2X5qfB7W4bXKfsclWdfc7ee62ZGTH6YYwRMSIycs659inbvXzWkPZe8ydnZmRkxPjG/yBmZjzSIz3SIz3SIz0QCr/aA3ikR3qkR3qkR7oPPQLXIz3SIz3SIz0oegSuR3qkR3qkR3pQ9Ahcj/RIj/RIj/Sg6BG4HumRHumRHulB0SNwPdIjPdIjPdKDokfgeqRHeqRHeqQHRY/A9UiP9EiP9EgPih6B65Ee6ZEe6ZEeFD0C1yM90iM90iM9KPpVA66/8Tf+Br7+678e19fX+MQnPoF/+2//7a/WUB7pkR7pkR7pAdGvCnD9w3/4D/HpT38aP/zDP4x//+//Pb75m78Z3/md34lf+IVf+NUYziM90iM90iM9IKJfjSK7n/jEJ/Dbfttvw1//638dAJBzxsc+9jH84A/+IP70n/7Tv9LDeaRHeqRHeqQHRNOv9AWPxyM++9nP4od+6IfKZyEEfPKTn8RnPvOZ4W/u7u5wd3dX3uec8cUvfhEf/OAHQUS/7GN+pEd6pEd6pK8sMTPefvttfPSjH0UI9zP+/YoD1xe+8AWklPCRj3yk+fwjH/kI/vN//s/D3/zoj/4ofuRHfuRXYniP9EiP9EiP9CtIP/dzP4ev/dqvvddvfsWB61Xoh37oh/DpT3+6vH/rrbfw8Y9/HD/3cz+H119/vXz+f/8fX8YUCL/lf3oDAPBP/5+fw3UMWDLj9/3mr9mc96f+P1/E+64iribCmhnvHBO+8HzB+69nvP8m4ukc8UsvV1xNhEiEF0sGAMRAuJ4IVzFgIuBqCohO8csAvAE2MyNlIDEjM8AAMuQzQCSPzEDW3zAYBELYUSblHPJ7BpAyY8kZnIHEwJJZrsnyd4+CaquRCIEIc7D3AKkAZN/FQCAARPYZEAggIsQABFCZA3Ln9nMg94ZmDtp52h3qkNidU95v58heZ5Z5ysxYEyOxjGnRiya2Y3kzNwDK3MyBQAGYQ2jmJBDKMyOSObCxMbtnr/duv5sDYY4EP13MwJoZz4+5PNfDJNezezAq49SxTEGfBxEOQZ5JDCSf6/MKbnz+tVw819e6COxq/vmwe2/PwX7pn0P/jPrzNJfu9oy/lv+drZ3x78bn7mm0t3rjTXAzc86uU8bo1rSfH79GbW3aegDkeds921ocPIqyHwF51jZuW392b3Yvss7qugzlKNnngaj8vrzWcwcCiDOQV1BKAGdQTgCvMjjOoLwOJjKAKQBh0tcRmGZwmMBhwpLlft969gzf8pu/Ee973/vOzO6WfsWB60Mf+hBijPj85z/ffP75z38eX/M1W3ABgKurK1xdXW0+f/311xvg+uALWQz22Uc+8BK3a0ZecnNcueb6Fp6HCd/4xhMBkauEZVrwwScHXE0BUyT81g9NeGfJeHaXkY4rsi6m6RBBAaBAQCRhLMokekqZFWA8g9RFrAt9yQZE8r0Hrj1rqJ3PGJ2cVwFLx5n4/E425hxCuzFGjDmGClzGDG3B22ujDVjx9v6ByoSMUdgpgtt8dh77LdDOn5yPi9DQM4fVgfmS7DVw2AEuTw3Au3sOJKDTA7tnIv4+RpS5PkO7xyVlJGa8WBcszEiBEZmAJMcujjtHIlxNATMRrqeA68OEOYoQcogVsOYowpcXPAgdcHlOabQDXv79CLiyY9b+ebX3vp3rU7/ZAwV/7f43e3unN0x5l8Pevgsn4KsHUj++3H3uBSoTZJJbnwCG+9fvU3vfrz0bfw9iwd1jA2IEtxbqe1srAcIHoq2TLKBFyYEX83btUACHKH/jBIQJTAFLBuYs93m35s3cX0q/4sB1OBzwrd/6rfjJn/xJfPd3fzcA8Vn95E/+JH7gB37g3Z07UDMJ3/mN1Rz5f/3ffx6/9OKIL71YcLdmfOOHn+L1qwlP5ogvvVzxO77hg7vn/fwvPMPtmvG5t+9wPQVcTRFhBW6mCALAQTdLt0BK3ItKypBDwPIRMoDAhASWjRQAZiflbTYOth9kApPJ8HKFEVSlThQ9yaBDZbsNo9PXBl4G1LbQYzdeImrAyt+/p+g3lBuLbaJ23I4xEkBu/tKF0nYkqppVIOTMiERIzGUO/Pxk/TwJUuqg9PsEhMDIXJmI/JTL81sHeGBn9xrUkqqWnBj6T8D3bk0F5Jp5i0DOXCYqM4OZGsbPJPNGkPtk5u3iGoGWfU5h+Nx6YaFcH+1zwuC3/hzN5Tqg8p81Gp8DhM3v/Dnda7+Wkv4t08Bc9ldi3cPc/ibT6QX2lQhzs3Uo42hPmFien4EagoJc4rL2bBxEdd6JSfaK7pMMuTcmBkHuV0CMyx5MBm4ERIYDsUlex4NoY5zBunbIrSEWqb5oX4mBpJYOEdL54v06ol8VU+GnP/1p/JE/8kfwbd/2bfj2b/92/NiP/RieP3+O7//+739X5/31Xy1a1c998R187ANPm++ezBHxtSs8PUxYUsbTQ8STOQIAvvRyOXneYxLpYMmMmZVRKBXTUNE8nImPlIEo4yPi4Q6W3xMCM/KFEmL5zMAunF4FU6Bu4+si78fixtSbv2ThViDrQctLdJlR7mdvv/ealQGYaW/yXf2MWT5PzLIZ2e5AwKuZwzObQs4rm6cHL/v+HHgl5iI8ZDBCYNiTqgBWhQZ/vpFknVUbtNe2zkzLEpMzl/MDQMgA4vgeM9d5NMHK/u7SyD7VH4JW0/L3lrgFsdSsuf3L5sED29Ou5PjtOTcg2J0zc3vjgbrf25rTaxHVPdMfu8H9ezBhAm3GdinZuikAFuoaMeEJqMKnLFX5DirEkBP0ipB9BsRiQPm+mpwDiAJGcRVljUDWeNK1a9plGrPCi+lXBbj+4B/8g/jFX/xF/Lk/9+fw5ptv4lu+5VvwEz/xE5uAjVelm2m7Mz/0ZMbNFHFQf8Lbdwm3a8Y7xxU/+9btyfPdrWK6+cD1jBDEDDOHgEMkXE2E6xhwHamo1J5y5W2y+JUh2kMjEq0LQSTl6H53LlOBSI9Xqaj/ridvVunNFs1vOzPlRuMiM5VVk0ID2DCNUoAklfeyKTLzEJgam/voHqgylKLJKUoxi+aZC3duKdoAACCwjCPL+ADHtJgawcRoAzp6rgUiiQciIBMW/WLpFJjs3qeO4ScVhrwZMGd5nRyA+XsJJKa/OQY1X1IxXxoVrSQTKDASy1wREciBWv1B3r7fA6/GLFu1LGNOI1Pw3nI+55c6Zw5sfW88/Ny+NQpUNfRiVrPfDbSw7Exvdn2/PP1r5iq8FWEKVfjrx0Wo69OvMy9I9VS0slSPNeEpUl3rJkD1GpjtSbBYBhLX+/eWjrIXswmyVQjvLSQ9jbRvE2x6k/d96Vclj+vd0rNnz/DGG2/grbfeGvquvvDsOT70+mvNZ//7z7+lARgrXiwJT+aIL75c8M4xFWD6P3/71w2v97/99BfwYkmIBFxNEdcx4I3rCU8PAl7X6kvwPhmb1GQAUcw/3DxIO67VhlrfTf28vrGgCKDaqj01Jo4T5+5NLnK+fXu4Nw8StkDTX9fOb5qCLeRy7u4cJime8m8lN59mdmDIa7sv8xuNHOGMVgvy2o831clnVaI1im6ygxNW+oAOT70Pbe/8S+YCYnYfOXPzDAOhCFCRCE/mqH6tgCeHWASKOZh/gjCV1/WzaFq1Adaej4tCK0E381uZ0YZRnfBJGe0YKId0Dqz2fGl71Gv81IGTrUl5vf0O2Pq9RubNkcAIuCAM3vq5Rj6uvUCr1geL+nrH9O/vo8xFdx/b+2yDPOze987XP4ue5wDAs7ef4Vt//cd2+fgpehBRhfelxMD/+NI7+HVfVc2FRPLgb9eMF0vG1RSLYzszI63bRfH/+Pm3cLum8j5QdXTPAcUBfoj9Y6/gVZgwqQhmhgJCMXeZBsLux8TiqyJym7/bWCPnqr9foJrXgNYvVM8tWpsHE//7U9cYkblbemHKJL9e2/KgF9110DFTUsk/GBPVeQw6wSmrFkWsph7VcgE1oTBgUjAgQTUAGISgg12QG00sm+gsk7+hcCrq4itMjTbrQGt2atM9U2HOk/kpOmIHWlWgGIPWKEhB3t9vKOe0Km6+P08Ez1TVGgLeMHBP50DLPsvq37Tzk/kV3Xn29skenYoONjM2IMLXnunOvxAz4fCI8r25cgmmobHej96HBZKd4AmnIk7TfaSWjn5NAtcxMY5nPH9zIHzoyQFLlqCLP/y/fKz5/t/+7BfxZI64niKWxLiOAVdTwJNZwucPUbStOZCEhJpZxW12AhpAokDq76GiMYiERZuNTLJiwKDCDCIJQ7ANM4U2iGFPba8O8nouYTStE9+DnZek/Hm9GaG5BsSn5f0CQDWV2ITEBly76DbOxabWh9myzSkFcRDbLAnKgYkQg6BVpurvApl/zH5h56tSuoFYDFGkXmIERgNgMVLjr+m1LPlsfwP3wR9lVXT+tWiTke0YJ/bD+RdJIhyD+wuI1B42NsByKTke3TOkcNosiFbb2nzvvjul0TdAdg8Px6kADnt5Lvio3xPmIzQAK1ZkMl9R9QdSs2YrYO097sAVvMw06P2vJg9dGpzQa+nNfZyQnYpfNnMN3HDf72GhHV+ntB7YLBtqv9/TuvbokojnPfo1CVw/+9Ydlsz4P3y4zQ94sSTMMeBDTwI+9voV3lkSXi6E/+l921D7G9XIriOQcsSSGW9cT84EI4tmIgA5g1iNBMYAnNmFAIACIoBYTC+kwEWN47Icj7rQWqmybhqvBfXBES1Rw0SqCW2g0iuz3yPb5IHs93KiQCKVWUyjN7FszCz6XaNdZQut1XnT9+UOKAAhNFos6zwQ178xiEYpTmu17+uPWIFBoRHlVqmaEwXE5C6WlDGDiulmHsyHpRDskd+crYRO5ZzJosO4+rsQgUVBvGdY5gs08LJcMDEJtjl2RLZW6rUzZE685O+FLaMesPz68Sbv8ettagJQwc3OP5yzExxvpHnclwF6Zm7raUT+sfYmc/tsOEYAUdeXAJ9aTdTqYj40UkuAySi9Hzaf0UhOgVaJQPRqpY3vzBzuXffUPPd7oNf6GvO6aYevjlu/NoHLzFH/4XNvAZCNsmYuAQVzJBxTxhQI3/qxr9o9jy3MKRCezEEk3QBMlh8TqDLbnKvA6jSH7UklPJTChEihglRAIwH1pr4RoIz8S0V7cccx7BzUScZO+3KbqfUljFeXMTzb+CbdmbYmgRjtPNLovYbUngKthna0Ah/4EpyGlUnNsMo4FBPEhEF+nsVIxKT+JAZmdSIGi7Dp5qPRtjouZsz3pGk1yPkMHA3MEIVJzFy1PPOFmXnSg5fPJZPoL5dUit7vWEPizfR8inl70DILAUPMPM1r1bJS3s+ls/OdirK8D51j7EBloMbI7bqnnov342ySdzvQGp2lWFlMMGAqofTeAsFUzdg2ZzEQcrJnDbRhxuM52ruX/p7774x6f1r7XOyYfbDbghYBqbVKlOAnt31PCSjn6NcMcP3MF97GMTFeLBlzCCUJc8mMNUs4u0mlcwx4+5jxP3/t+3fPZ88iMfB/+ugb5fOf/sW3MQcNyFBtq1BS85Yl5Smxf7BZtYeYgXgQDQz6cLnahIt5omgKZD8vtAtYuY5DziVaXqSqsYBIQ1JVMoOaCNX/Vnxeg6259Q3oe+bym34vbbQsG18PUJdwI8j4Tq37GKAgVSVfoJN+IZqHBzEPYHadED2Yb+djxDfIocUpYPDf2CV6xt6Hzu8loFqqApGP9txekVn9f8X/IAeNAnruq2WVYCTUyi52T+cSbOu1t5+d86168ozU+3w8I7cxeYFD1qZ7r38riI0Ba29sxXdNTiAMNc+u5FUpeNltN+dT8/el99xrOpt77gDLg9VewNDou9G55foVtIaJ+ywgFsJpv905+jUDXMckm+erbiKAiJRR/FxTIGAK1fSST5dCAlAipw6dJH0ziYR7iASko2SQ51VKoQBoggoUMKjLwmJkATkHKJKoBxSznv+Bl+z9x/q3MblZGRYPALqaWfMuCliGULQwUmZjoeYl04RagOinrZiaXpWcf4UBIAbVvOqYy3H6z3JALEjAk+VfMdy43PhIElmc47wyEu8TM1NiDWy5733Vl97pv4nmGs6dAzNjGEVjUcHGna86y1sNwVfIsHOIeVAm0AQqM+8ac/Jm46pxtRpWD1YeyHyVkj5PzZs9R1L8PnETDNObyULHnPfAy1Pxc1EFLfEfjjWtkb+3Xn97frJcSScQ+sCowGKyLAJT8HtQjjNtv9XM70f9b/aeyaLREn2KhvFC+W0V6HoK5C0CrS92ZkImjXoMY4H4PvRrBrjMlHeIwoRSFpPR88U2H2MONTk0n/GMEoDf/DVvbD6f1UxIVvbEQMtKoShwNQwXndZlgMYu7ZJzEzlXGVUv3bVjtN825rbe1Jbt+FxMlVYNwa4l5vXqQLZ8K7umjcebMItZsEiKW83CTFL2XQN0ZirtggM2T8ZADSZQVE2gz4ljruA1oj5I0Mw4paIA0AST2HGn/AnnqZXiR+WFbPx75DU+zzRGkW7enNWf0j8rY0ajvPgerHzghS81ZkzN9tyS825awas644u1wZlL+0CZ3oQ70gb89z5E3IMWnQEtn3d4ylxo4wJajWsDYNGZVXWveDOrmd9NvJjRmtgu0VpOmQXr5612NUrJkO9PmXVJq3hA5U4CwECGS+62Z3l22Cfp1wxw3UwaIhxEill0op4v9UG3ZoHTnMibBz1J7TcC8iKgxazgtTa+LYpTBS9Po886cxlRKGDhNxfQalb+tyf9Q/beHPAOMP21LgWv5tQO0LxEL6Hp1dziAWssrW3nZRs4In99/ktPHrz2yPxgAErZLQMv+azVcPv8ObjP+lDnEfVARd0mPmeCOlVw1s7ZXK/7ffmt+82e3DaqL9lHCY40LA9YPonaF3q+T1Rcr0HJsVsAs/OOztODVw9kXtOSQtFb0LKKETamkVAwSkMxqjIQnwYw1fyr5aOarS14CFCe4CIEI+hiIBv5svbWwdiEe9oXWQNC9GAFL6l2067Td5u68WsGuOYg0WsETbgjCY+ej6oBsVSAn4JKUK8oQh8CIeYjaD22xSV7TWfkv/F5Mab52Of++LyCwrTVrqxS80irupR6u4lqeoHGjKWUObLDC4h0BxvI6OZOzE5K1XsoDPuCYfpTu0v11/XvRqbB5pymbZjUp2MsUWD2802Nqu0JfU3JU6rSOZAaMUMv1ffX35v/ETsZRox2x2+O6UBKPtv6tfaKFi8pN0A1SuAGHAgBGOXDldSAU9Q4fP3BJmypBm1bzmlajSlVBaxovGGgZZ17RsNKLwC8j9rmETCNymu2/j0NUgpMYKramP120ujIpAFovW8UOO1T7CkSkMu9sNyEAnvIp83m9hismktPvq7mffyWPf2aAa6XKwOohU3t+VxNBErA3crq4wqYgpj7/svnnwHwmgLwm77m9ZPXibyWyshMBFAEWdADra1248LiCerDciaxYlbU142GxnnLLvdMgXvUazE9WDog3VuMtgl609HebwywgIG2SO37PbqP62NoOh0Qu2ONgdgm782H0TGJXSIM6+ttxoeOwQ2Y4V5C9t48yfy0mp6N3YOU15K9yc+O81UeRsEYxiDtmmYO9P6rUkfRaVe9qWl8D3Xek5OYGt+Un8dADZidMwfely6pUN6bXv2zsu+BgbncghRQgcj7kkEouZxm9kzOF5sYJYioWjEIgEvqZhRrE7AN6NmrwFG0oUCafC/PTcu4lhJoJS1kJ9DDU3Rr16dsWASsVG2hTSTufehBA9fd21/Cy3nGl24TfHFPTxIaUb9Ycsb//LUf2Bz3337h2UkfgxEVXxZL2X7I8iEt278pUjqqPsDZOVlCATAPZEM6B1YjM6TTrtiPxR07up6BFLDv7wDGzNv4UGXa9ZhzfoFzUNCbbfaCQ075uUbnbIBNf3bO5+QdzBeB2HCcdbymBdjntPObqojV9AYL2mFlZqFz5DfS/Uaar/fUgxWrJjACrJGWZUWC7fenTUujHLjt8VJQlotv65QmNkoMt3OcouKLBcoE551reQGjfrYV1Dx53yKINsFQQJuSgqx7DSJImSkRuSYxi1/uhHBwBrSM7B5HABYIu3U8T9X17lsA+bSNEN6dtgU8cODK07Us5EBYVy6h717rMokCkMlcdoy6UySsl6SyN9pWKMDApgX1/irzGfQPKmtUYXIaGtzLPZDK7vjOUHzSp7YDWD4IpGdk9rkHLZ+z05uTtuQWuQ2nk06Ht9g9Bq+xmRmHwZpwPD7RObPY6Pv+eqfG12v2l9AIZL325fPeCNhI+EaemTLqMYytudfutc+1KkET3JqdynkcUAHYgJUPtjCNC3CBGuV3FcR6Ck5r9f4quUd/59yAV50HKr/tw6/l83qcb/vhyVwxlh6QVYwhKPjr9aWUU+vDlQjU9px7FoASqOG+YyKnhVVLAEEWir1OqH7YvQ4I9sx68j5GP79+TKP3MZJqWTpPmSUBP47zvXoysAJQ8wwVtOYQmtqur0IPGriIMyZkvH4IWCbgmDLeWXLZnAvLprpbM966XaXh3o64xiyL8P/1C8+Q8gmTode2XIQeOrAp2tMofGZUXsd/ltbTx9KOFjU6ZhCpl7sFb5/tVfzuQSvl9tjye+wzKaNS7+yCcNgWsOT5MDGiYyK24ftN0N9vf0+XXvsSOlX9vPRwUg2tZ34NY6RqTgqO0W2GMlgTjRCiwOHD1q06SwaaqEAPUgCKRlVeO6DqPwNaADsFWGPNqwoxfcCFUXBaqDFAeR0a7cqYoi8uC7SlnYDz5mrfqyyDi+/Tg5WRBPZwA157FoD2jqtrorzugqK85lxeaweEU/J1rxV76n9XhYTtefrSZNHXxLTk/p2NNEqS98+mxBm8Z4ErHUHHF6A44UAB83xAYo1yIrO/A7drxm3KuJoCbqZx86Jv+NBl7aNptAFPmd9CF6ZuiQxA1Z5G2lUxH9qyddc6A2bnWq576n0b52jInDup3Q7pJUBf/2zc7rISUY1E8o3woGaToD6AEsGI7Ua4r4mzNf2d3lj9eTyAt+d049Fx9+DFDZDV0HzP5Jqh7KQN+OoW2f018PKgteev2gus6CMDe8AqxzURbpcFA/TkzaVmcpL3NDRBeabow9yB1txbzdT7Wn+NGkWJUG3A6gww3Yd85LAFRwVAS6fVz74SdOo8vfnVA//IpGdrN56w2Q41LhcYY9Gcr0oPG7hun4FmBjSxFmHC6zdv4PmSm8rw/+t//UUsKePJHHEzEf7Hl95BZouMkmP6uoa7ZCa+rnsfu9wrT0F9WMwZoM4sSAZoO/fHLKAHD4gD0Co/qAytNwE2Y+2YycixP5SPe/MEV9CyKgm9iQmo+AxsrJsAxv4H302YtNhggFyLzWzCtaWIaV7t2PYrlvdzYNqc3WfQcksjU7ydq52z7TkBdawbIwak4SUgoc5q8kSur6OlETAjgQrz8iBm1wO2Jl7ThkTrajUvD1pLypuIwL3w9d4MWO/NP+MWsPrvz5FPXLX79H4RoGpZwTn5RwwR2DLFjYnWM+ozYxuBVZPyweNcxQ7zdsnv0UuERzMX8gXAdt/qFKPWPKM9m89U8+i13xFovXc1rryC1iP48AQIEzjEJlTV6I3rSSu7B0yaoHx0TuR7Pdswib/K8rT0nzUO7HN+GFJYl0aBGw7QmjwsJSZqQKt+YcfqaZpJ2fqwTrUWAM4vbh99FxW9rK2Iv/gpCb5ebHx+o+qfYABB60FCawhqGSOVEJlRtBWg05gaBn668KsWeWpCowsTHIBXr12dm1/wtllfFBQW341rypdD5+Oi1j+YBtf0IO3ByQQzA61VHf5LyliSRNn64IqtloXyGtiahvZMrucAy4NT/7nXsipIhY2GNYfQlLrqm5/2PtVLqPFPDX7Wa/eXgld7jvqa3We9md73ODtFFqDBvC3SiyzzVYIsLtjnwNhH2B8zqt25OY/Tfnst691qrQ8auPwqMDABgA+870lzmLQiCZXZQTbXb/zq06HvI+IQBbTCVEDLKjkAbt3AFqM8IQJKqaVC9l5D4YnVlOiYuxnwGhNl59fahNIPqPVHtYvtEtw28CKycHF2wCHmO9s4p2zrI1+GhTn3Ic5Z1ahpsMq9Ccde9gEXphkZ8zbA6gWWUXfYGLgAzblu0v5cu/4zZWQJyhSLgwONBobMJRfOAlCkFxI15tVeOzat0uda+ftOBdhEsEgOqPZC2PuKCXt0qWbVMsD6+V7OT/8bbxb0EryvejEKT78PtZrYq3HXuh73v4f7fuRb9uQtBv74zbg78IoaSCFRgiggtkcjLasKGRWMmt/szJEHLPn9Vw60gIcOXC5n6uqrPrL5+kvvvMCLJeO1OTQb8uMffLo59uJLTgfAQAsGgltFQhyrLXgBO1FHvTbmwcs0M3Lv7ZhmYK0J0ZbnJZuiH1dPZusPVB3FwjS0mSNac4e1EW+u00vr7n0t4XO6aveIRpJtbxpkx7wzO1OWG4P53whatSDLRpPcOx5K4BvNy41ixLTsI9863XpxMXkTotduUU2KXNfM8H65Bena8BElYCmrNpy55l69eomf07Rfckn/dubBV6FRAI//bPd32K7R/vue6p6S7Tly8fTrcfS87LiR1ly/O71fN+M1vFLwsooaJnB6ELuUPGjtBbmcopHZ9ivlH3zQwMVXryHfvCFAMqAv3yZcRSpawpOZcDfodHwviocB09hKwuS0dtbVxlQ3lmhg3f34II89BaqvtuE+N/Dwtfz8BvDBCT3t+XPK6RW8zIQFEBAYnIBMKL2FbAeFpqo1lxXbA9heF+FTIcxGGbzxRRUwcdGPvkRRH5RgtOTtRjUNx/tONmMoUnO500Kj4BQ7yjOAGh0HRAt5JtEERwVee/LP1vvxsn2OWq/Ta1tmyh2Blgesc2HPwGltao9Gv+nNhKNQ903Vig60qjWrnr9fH+bHPEf9nhCTuXxoc+rNhQAwKB2wob1gqNFM++c6HCNKSWzVQOU3Zqmw1Wb95PbaifTP+RKT36U0Cpw6pcWfowcNXPnqKfj6dSQGtq0g5QEdqUqbianUUbsP3b58CaRjAUjh2zWtmYhKTbveZ9naqQkR1RbOep6N/0s/B07YyBmw+n7eNLQHWNuxbGnU2dWH7Y7IO4pZryHaCwE9eAEnOwl723rvdN9z5lpHaA9Ydq/yvtLIVNpkK/iMSjO3aIDIaNpGINXPr6+z5ws7e6bAqtHJsxIBgFTjYq2AKxrF9vm099oGn7CCln9+XtuS8WEIWuX4nfXS+yXPaVGnJG2fjyXv63rY6ztmAtQlDPSUtmy0F2XKOveXRBNW91I9V29mHgXw7GlX9RyoPb3YxoauFqgKjt1YgApkRhXQdm5AaaRlfSUCK94NYBk9aOBawwEvVpEWX9s5xkdU3aZ8v0AMpWMGYjggIleA4dzUExQQqlze1kDDJNklEKJdFH2R2cyt+a2nQCgBIYD34WzByo9jd3Ps3bw/RpmnBWqw5pVYoAapmUtA1TiZgJcxJ18SysgDVnQM3UCrkeBR3xem0t2VZ97AVrrbq6ph2QqmlXiwGZkw28TOHTC0z7oxBgXJJaPpWhA0TyeQaehVmyhRj9vTb6IbTdvyZsLe5HdKiLmP3ypQC1hee2rv+ZT2pcfQIIyaaFeYAcbA1IP8JUDVkxeKhsJCETa4uZ65CeS4He3Gn8d/viMkGYlBQ4QdAy9mEzC9MNGarm3RNPyku87IunEKtC4RCE7RPlSfpwcNXM+PGekgYb0fHnx/M0uI+t3KuM2Mt26XwiTuQ8nMA7nWKQQFcJQCtREoGtIUA1ZFjkbat3OZ8x3V+T+i3u4NqIOebExjM+AecO2RN7eYGcRv1NPalpoO5R0QzHTX9hTy2sg8YLu949cD1p7j3Y/d3+fI4V3uD2gc2NKZdb/dhv/8FDCdA4RhtYJc7z3rCft8pKT+Nm8m9KYwoF0/nhkWEyG3/MnGegkwnYwYc4DVRwPafY7MiHKfp8WkUWKxVVuw+YhuL/RmQm8iHN1mb3Y7JRyWZ+BM0mzGt1zHUD5Toa0E4ZygkQUAqGZ5oJrmM1CSoS09hBkgA1CudRzr8qTN/RMcYNDlfKKn0RrM7vO983l/8HvWVPh8zciqdvynN58hBuA6BmnyCDXPuBmcQ8Btyvhf/+svanh8xBwJr821BIkBwBRQckWmIN2OyfW9alrNUwBrRCMoIIZJilEWJtGO2zvarUZG/xBHD96c9kYjsDJ7+P452veB25p/o9Byv5F6Evu+BmiIciW11QCVSH3ARbuRNmNxUnTrq6iA1fO8EQNqUxJQusxKN1rZMLU1BDU9jk5V1u4/98d76p93k+Nk9xqqBmrVNWp3WC4+t3P+tpGpzBhTn1vn76PfGz2di/Kb4haw2pD1cRkmuffteff6MzVRhAR3ze2a6Ic8YqKboKETzLMEWjDaZ6TfW9QngBIJGkieqQexUzSyhIwei/Em0v1lGl+tA1r3lm9uUKrZ2Hn02BqUxeW8cEKOdYfmeiI9h5zRmmRapOw5yoMxvRt60MB1TBkxccnjADSnw0sDBPx6F/b+r3/ml5DJQuSp9NfyDnLohjPJPxLg25aU0HROtYvsTki6PSIDGAn5rt/7hXtqEwVdWH4b9JqVlyR3pUj32sxsPXiRG1dtZNeOtz2naQfq64OvF1nnNutmK/fTkYGMvD7NlOye67jss+34/Hk9gPnfkW3SHZA6V6hUjtl+tlcWp/ncBW1kludhkZkjEMvNOthGPJbliQrIIw2rTT/QhyMoj8z7fqpey2pNelIJHGjD10dlmDztCQ72W9M8DLR892JgHIjh3xfNW6MJTwljnjx4+f0CODCgai40rcxu0e+j9rzbi5/TQLy/udy3Mw8auEZ/b+S6ewOiZaHuBzNLj67dR92yrjsDsN48eS6ASX7Fm89ehR40cK0ZeLkmfOB6EqYZtvb/Qxe69zVPD4hBpFFpHie/A9rFJSYB51NxxW0ZFbyINTDBJSN753jKVSMC0ASHeLA5C1rYNz343+8VvfULVc5VU5utFJH/nV2zSI92re68xgDEdLOV/vZoo22d8Fd48holcP6+5dwoC6MuBxqa1gyoV/3SM9BhQjXsuNPjbo91DIFqxe0MlPYdC9T0oyZNea9gfiox1FFff3AEXtEQweyoCl4BFVxHVS3aKhahtK0wsx5gPjuU34y0Q2Oac9BUCx6YtwpgUQEtMxWOotVG5LWv0o+NuGXy3TlsbOW9fm/mQ+i5DKzMf0uMKtD2Wpgjf9k9n3RPvcXFP/bo9x3VfRJ0IOU+FIHM/CjgzGUJcDm/22ddYFGfPmL3cypYpget+5onPT1o4EqZcRMCXq6MGBiUCMfAuJlkIx3iNorwZjL1t4Yay95tQatskIIaYg60moPs+24FBa0wwWol+nBj04yYfYhx9cMA24fozWSeKV6SDDs6n4927PPLSimibjP6cZwiO6ZJRuyYij/NtlGifd6N2UtqrAIA1ZuxKKtzoNX3tvLXLaDFEnVKMG2MCnghVDPbXmDHuANvZfpewzqVt9SbFQ3Iyv26Gx1VHNmjPrQ9FLAC1KZV2lmU4MqwPb+ZAOcQNn2W+gKqvX+yJ7+WbT2mXM25Y1N3Cwj9XfdV2D0Zox9pX3UcXlva/t6mxc6RzISLFsRKeHoHYns0smrs3cMp2gNowPui9Fly7e9lUYm9H9jGszGRJy4A1mtiwFj76vfzu6EHDVzHNeNuzVgCcD1F/NaPtpUwvvDsOQLZX9lwBw3H9ovQBz34z6p5SatekFWDH7BIDY9naEIyV+ejB63KJJ3k0S1R0nAlsx83C57bRQicBqz+c3KmAmto580hgGOQMrjdpMV+zur4t4JAwf/Bd3vkpc8ElIhGYz6t72If/Jtx9PdC4iewXkdyvdOVQC6lBsxOBCTsmRP770b+MbvOqUCK8vvuGA9eZu6KkYb5TSP/lYDXtvxSiQylsW9ybPaVHlUI+iw0eEl1QBBod62M7rPZMt3xPXgBVTtpvtsRpEZW3j0/2H18Xn3ll9G1L6U9ADNg9lrkSfCCgJSvulEibsX8UX1hTgPs0wf68V8iEJ+iBw1cX7pbcTcvuI4B3/7xD2y+PybG9SSBF3MAKB1xmA84qjphUjywZaz+AQC6gWhqjiuk/q3MNezYXrNdB23dON9WwsjOaUEYUlyVNqv4Uoeojd8voGqr34JXPaheB6hSpYzNjYNbZuLn7lxXX/vZKcmM9XXSF6ZtMWprh77+YBnbDmiNzEuk0Y/m37FrM9ectIx283q6pNpHu4kHD7QjD1jGKArj6OySeXB9r6mNtEH7zAOVXXN2x7X9rzx4VcAy3xOp2d38X37Od26z3oMKIhHAmlh9J9KKZZpIzfqV2BBNqTGZETVCj5ypZdgbLcsBWD+dIyDzIObBC6iWAK+BlXP5veSvMZyVd+8L6vermRDFROr8X2SgXa0O9kzmGHaTloEayLFHNfBrf2z3pQcNXJ9/+w5TmvDG1Tz8/lrrE84BCMcXoOUlOM64mq8xaQuUvuyKEbOCQ8dkxkw4NGC07YNUtawls/pQpMjpiLwECycJGqVuI40qYftF4jeXN0H21Ac2jPw2XoI235iZW42JEcRnYXMU9TMfkVnO515bcEt0/sKkd4ggG8TMmeXe0Pqo6j1TkSYjBozOrtncm9yfz0ljlZKDmgtHJrltuPd23rpZbN71nWpN8ynz32l9PfD1KROAgFvsEGPUHTgSVaAadPwZ1azzuVR9lJ/5jU148ULgyLTnSeQTwhoYN6iCkPcRe0uJF5T2fD8FkByIb5a1Bz33puwhr1V5v5GjXvAT4bJKYDae5ECyXP5dah/AaaGgXkitR+oLqz6+fQATudl8v216C7BNUh6Nx/s57z3mHXrQwJWYMQF4sYz1j0iQkk/rLSgtQF5BFhV4AGKYgODyrqjVwvrNAvgN04FJrtW5DZx68OpbSniHuS8lk3O+qCleleasurht0nYeToFW7T/kGONwNvW3BejbNuflfDBmYuHQaNIIyJpkDnqQkavDyFqlJKgJto3ErBqsve/zQoJWk5c6ijJRJSJsxLyw41PRv5EIOAFeHqzOBU30VE1M+j6btuS49I7JcitccLmuL1jcj8e0ptGYN8e6dTfyX3kNq6l2ETpwcec+VbXduphHIhwCsLJZMmRv2V7zNFIIvNbl6VJ+6UHOwDDwNiqx9d2oVnxC6Evgdk/6dXtmdP207fmRR+S1TrsXACWdxQI4fGSw8EOtymG/HSDOSKs+V9XkPduPaw6E/8snvn73eyKSRpPrHZCOoKxMMx0R8gqOMzAdME3XJe9qL6G3nFP/RrcLW/+VAVXtNmv9qkzLGrVB1zMBEPNMzNUsI5+hCYuWMbSO0Uu6C/fh5jL+LWi1zmL9LVWfW+AKBn2FaGNuBbR84rZPJwDgiwMz6esM8SPSdhtzxyTYgVbv8zLwIgKIzZ7PTQ5Kz/BG4fQxULHzN2Wh/DFOI/G/A1qGMjZDiSRr+WSiyXYA5sDLtJARc7SmgF4QkvFt11O/lsoYB2P1gpHXsHrwig6wYqA2ORitSb5OQm7+HqKUQKP1CFpeYo4zpvkaPB1ACVgJxdxf57DSyN832sNyL7S5V39O70oYgaOPaj3ra9YPiUzz0vE4K0YJkrLx7ezlc4FPI+rdIjX2iDoTogimLYDVtcw03gP3Lab7ntW4ztGBsoCWAhYtd/IFBbHPzNcAMzgeAITic1rSuChtk7xovhCVBs1k5avQ2/mSApb0QFKNJZtkxlsmlAbO8FLpAQBYCsizK0TLbiESN0zGa1ojSVeqXLQRjqPAkRJpqB2JkWuSIjebTaVzoGpZnEE5bUyF0GPENOhsVa5lTFIpuzRGVA02oxUYmu3EFbySVEBV/14FHz/nvY/M5s4c1Wbn32MMe3Xd5LPTUY1A1Rp7k0xmafZYIsCYsCSZv0j3C8PflFJyJj+73xHz6ddP77+yCNwCXApqkVwOpE/a703kXqDJ62Z9UFpAOYGnFTfzNdYQkHIua+GS8OoehIHTGoppG3tRpM0e4W2wlecdI9D0misguah2vJ9jKzvVazR2nn49jpZn9dGphrUDYD6xOZLxO2rXqB5wyipzKb0bE+mDBq4/9L987OT3KwLmww2wHmXhh1uXPOwYKNeUvD4SsKfiT+EWrKxKAVAZDtDm0Cy5VuKWa7QMtE9KTWxqAnSVyfeS+wPEiCayx9vS9RSNicaD1p4EWe4TNsZ2HmTDmfMcmqUvWgyzANjeombSyDFfVNhXu7fIzTjBUguaf9mBFO9Xy4bNlAMvtqoUrEAGt3E7qbk/r5lzpkDoL9lrJz1Y7QkN42tT469TbxsCmeCjNxWDW0djzasnY1qjmn8+P6s3J9Pwu9YM6DWsairUuVEgKoA1ElyA0u27ac/Tteox26ZcC0hJ8yR3QxvcT229UuvzHVWK8dSXUOvJ14X0gOUFjz5vDzpvkjqggVga2tf7zsJIkCjvx9qiHdsHomzWNVrzYX+uEl3aaGN+348tFJfSu8Cthw1cp+hnvvA23ncIiPM1iAKunr4B4KMAgOV//Od2Q2Drs9ojH87uAcv7rYCtM91Mgj4gY9Rg0b8PwUl7A/Aa+TBGNGKY9nprKtuCN3ffi+Yiklfi+jcyKoDpSMny3JIDKs+QKEieCwWUrtINaJmG5ed8W0T23PbJJgOQ3ESfHwdsAde6y3pnfz/bvU3faymbIB7aMshesrUAFNbq+tWnIBJMziQAUNRBwO7ezIT13DV0HajmQbuP3j/lwWqrbbVSv08A3tWy1KfcAxaNmJ0XZLwwUyY31OatKkQcE8Pa1+w9E7uHbReBFgj2En77td+TCToetFYHWH3ityWS+w7fzEAMrXDlmZHf4nugNTK79cLRKS1yFDE5ArFNdY4B/7k0EvI+3al7+jULXCkz3j5mCYfvQIrna1gzyMI0IRt3NpMHGxN3Epdb9IkZa27NOb6ygteivIa1Jx2PCo/2folTZIxIXm+Z0Sl78qilfV/nDkADlIEIHODMhKR1F+VYIiATSRWFeAC0wzPn3DKpEGrVEQdYa64mwcW9rsLCjolwc2/OZKjAijLm80SgBgz6uRxpVL0pzUfTjU2NNXIuq6YtIGY5gQoyIaqZMDSmQ1tjp55xNLOzSvpWJs3CzEdFaltzZx2/vyfTsorfy5dGS87s10SPUgEvLovWzMQRCJNo3bYu1NSfaEIEEJExh7GOdGoOvGbW93G7lHxZNd+o0/4tKTe8wF4bedN/DtVsS6rWR5D2tzNBizdpJ18JKh3Nsd0/wzlUIdf7s/3v/Nz6/XJKIXv0cQ3o13/16/jCs+eYCBub+uGrvx4AcPelzwPToUhxUmuPXaSdBESIb8OFoUM2PYGL4z6oOF/zbbYgFXRx7vWj8tQWJaVyrAeyUbPFUxK/fe5t1iPKA1OHTWFW/5rco3qzgqBDIJkD1qgp1mtNISDEg5yAO+ByUYMpVa1qSa1Wex/Astsd2vvVBOcDTkbUaxzlc3+dgRmtMbthy+iB7bMW0KJyX8GBFWkqhaqKOMRQTIde+7KqF2WcoS96681TTmMCSgHbU0DrJXu7L69llUAczjUIylMvPFL3mQtx5FBNxXeJAcwgAg7QZ06h5LJZnUyvXfSWhFF2gq940dOu2RBtvzPfy0z81xJ4dZuya87Z7ncDIsm9UyEkMGKIsi+DgFdC55Nl0cLt+VhSc80/O009gJwqsLxHvVY2NCva906APTeWV6EHD1x/+9/+f/G+w4Tv/ZZft/nuZgoSTZhWHL/4JpBXHD70tfUArYQhhhp9MCTRNFk/TCxtSBKrqYGsEaQrkWRAou2xkVtV3CePGngZteHJ5H7T3kvP7LyvAoBjOPtN3saLSCVfp215Gpk7UlI/G7GWRZLNtWTlr5DaejnYeyfRo5qCGCqcc5vjZv5AD1g+YXu/s/AWtM5Jdd7fMZqjka+nOX9nQhuBVZPL5M5lc+8FpcTiL0y2AM2EBFlXVo4KCFiQMZv2xW3uVw9afYV1D1pxMN7RHDb3pr8ltAEYlNcqKO40R21Aqvdx6mvTvG9XJ8vPoRTwbZ6RAy3/1+ik6WojXLYJ9z3VosWdq8AFXy0pN/5sLyRlfR5ichPzrxRVzjrvpPlVEgBF4LKJmFRDOgFg56gHrEswxJ/Zjh+ZIYEKSn1FkuZ8O0LhfejBA9f1FPDBJ+ME5JiPCLdvyxuV4toD6mf9frCILWPQKAzBisnqg2HTYnRxARtzDoDSh6oPxBhpU+cSWH2zRQCbJetNhJ78mvWbuUQTOm3Gt8LoJUcLt04sNeoyE2YEYZ66mWYWc0gMUrC2b8liZEWIi3kwt5pW7y/wVHw2GU2wgQ+QOEc9aJ3z85TjRsy8By53nqrB+KvXNcT6N3L1VUlASs3XW2FNO6vp0CJWswWc6EP2EYTyNxR/VP93k2/VjNfdc3c/BbBS58vqyQQV1/rHA5UtRR8dmHUNMCT0fcmM25Xx2hxwNUmNxDlqYd6Mxnx3SvvY2wNGI/+Y/603pVt6y5JYSs/lLFriav7umsfpNXGrrj8HxpypmO2kYLHVfgxuPXMRSEXLlfFYsreMc7u/Rqv/vsrOhrd059rTwi4BsHdDDx64ft37rvFVN2PgIs4FmMyufvzim2Ai8XNN10XbMju6D8P2klIkAmkYNYveIBJP5pqJzmaKCjjEsY8IUIE0dgva5dV4QDr1uG3NXKJteeqj6PxngDHRbfSjkQGYfb9Ajp1DQA6V4QbNzPf+E0+eQfUJ2mZ68XluNl/FDMZUqj3Exsw1Dj8fmSi8JiTvz/t6RtUgzoEVAaqV2M3XAJVY8tiApDX7WJEsRELIgJbyFlOiSuHG0AzA+md1qsqFB62RxuXvpb1Pt1+yRgs6X5YPvPD+41KI2u2v3mTd5xOKmZmKJv5izaqdB1zF2t4lgLAwb6JCR8C0Fy072mc9kwbgBIVqElxyxu0qWtZq69aBllGZ/yD7fA6EkOTvkmWv1OLFudGW2wLG+tx1LfT761SLl1M04h1VRD9NlwZkAF7IuPw3PT1o4Pq//b9/CV/zwffjyRzw07/4dmkkeTNJZXiJaKu5QYxcN0881MXah+gaI3EzaxtfmK2YCQnSUM/bvU0iluu1eTkGrz2T6cHKM0o5bn8OLtEuGlPFaDOfWUCjKCSLerQNtyQg51yczbmESO9pG06q5lZ6NXPLNkFbpNVZfQRzBHImxEGpIr9hi/ZJrS+g91f1m3/PfNZrJD1g6aVawNr766IsiUIxEWcNnJA6cPbcak1F8bcQSP2NvjFmMw9uzHZ/pGO2Nezv6WTScNOTzr22/Dx/TwAIYRsE44pR1z2jP93RgKSZq5iSj5mRjhnLNA5asr0oc7E9oQesPcFyRD5a2MyAfr0WjSvb+t22wLHUgaC+8BwJM4eimQUCFmLMkbAU60utwL8EXw5OGzo6t4U9N0sQJif5njIl2jSe8/t6ug/meG3rnMZ7KT1o4Hr9KuLJHPG+g0YFkgGW2PpjPIjGZZUbKICnA1gdvwQ1d+QVtB5RotzCJBtLr1OOg0jFxATOVsFADmCuUqLv+dQnJwLSXRkA+iZsfTi10aWSSa899NRoVQOTSuPQLifj6qhygSXeH1ec1CR+rlGFhhFZEIYB1N2axNelPgIAzaYGxOSV1UwESKBCYCCodmJz2PumjHqMa4NZtuDU18LbaFzuGhutBGiFopEZrU+2VfAqYkzQ5x+BYL5Gzfkx4A9aa67vd+SFmmpmagvhBmcy9FpZWfM7YFUShgf1J9tcLNG6LHKUUXPyrIuCX969zxGQjg6ypwKe3SW8c1wAAB96csC1AlifT+XnAWjBCqiaE4ChObwvciznq4JUYgGrzKgal67lY8olad6TtUmKQaJtExMWNfHNmeszSeT6m/WvGaWNTBZAby0NFcRkPalAM6h2Ubc4Ne/79WO/rFxgS3tCsadTGvZ96UED14efTPjgk4hDqCaRiAxwktWoIdYcD0jhgFWdCTlxCXsvoKb29wJaAyZeKjY06r/8FasVbR6eOda9JGhLoHfOXyLpeHoVieXUYmnMa0GSX0sZIudnGm1kAFguXIg+ZUA0K00vcGHEZmrxY7PmdUlBC8iYJTytGb9pFSMhoLnfzqQy1KTOAJb/3o+hmFi6XKQheO2QnKvK1gha/FfDplMWILY8JhOe7LW/9z560O610bqACnAA9nKxNhUu9oIwBq89aPmqJ35PJbQaLnTs1xNwlwh3a8Bbdwveul2xHGITsGH+p/q+krd07BUG6MFJjtffO79V1bYyUgZu14SUGcc1Y9W/nsyicjWJxnQMGYcpFDOgB64YCGsIxR/me57lEDBHavzLPpmZ9aaLEFx4TF0H5XHo9fpefHZ8L0Ts7e4tz9PP9ReNFuw17J3zXUIPGrhev4p4OocSyeTr4ZVcEFQJzzeVDBDmHMMEtllQydBXjQcco82thOj5ob3uw9vlt+RMIxj8St/dE7j6BdJ8do9VYUzLNEBfUBNoQWxJuZTrK5Ucguvi60AJaP1Tfly989qDVX0vx8Zg5kq520ABKXAJeMkSC7N7b8DWVNKYEjtwKnMyADKg1bJ68tJqlVPc1buw8L46xN5jI7IcGplvMyGSJioDIiSVigbuTHtjPVsAeFCaq2hZdi8j0Or/hZqjlxVccwda/X1btJyP2gWAQyA8vYoKOBlv30nrokMMGiTValWe+mo23ofqX/vi2LZGy+90nXpz4DHl0huwlCezdZ8zoou2Wr0ml7ms20aozQxorhoya+i8aNaBrVs5N2ZyA6wMW3u1ug1Qt0fTUFZdG/b9aDmMNGD/XU+XgJYd0yeO34ceNHAdbp8hHpz01yS2JiCEYj6w8FpmZUqMssmD21ScewdvTUDuQ1s9jRonmrnHnq8Hrr4Iak/2lQe63nxYq9hXTU+W/LYleQEhMx0wN7w+U2vi9Nf2PoPg+o5BKzkEZixAya3pNanR5gcwBKqRvyGz+LMAMZchts90Lse1aQS9v8qo0SzLZ/uAVT4v5+00PNj8VNpLNN8+8VD9EANzSyDxd1mEaoRoXAhWYLmarBnQiFeUKNfm2rSdi3sRBY1rNDNgC7p9+S4OsZjeQVoLlC3loQWtPUErUCv0XE0BU2QEmvBLLxa8WBLu1oynV1EYvdI2oEjP4XxVHqj8mm3WLrPT1FA0rN4keLcKeMn7liMnlaxioHIu/xrq96oFnGUhBFedJpAKhxeYZeyw0oXZ8Ye6hhXkmBvBxiqMnDv/iHqDi38/KnIw8kFeSg8auMLzLyDQi9LTqdjRDzclP6QktGbW0Gr5rW2aUb8fL6nZpmLeRgj1TM7e+zyZXtqtmtyZxWH36F7Xc9Sx9mNskllRJZ9eIcnu+oFa38+Iscm5nc/OgiR0o0fl1KX0FVfzXx9l1Wuwqbxv/5b8LN3YYRCEsUf1p1uTn3zfmqJG3/fmlT2Trl87MlfK6HDeP7l3jeb87iTVF8fV36oAZu+bMXXMwVq76I7RrsPCKFntQ7mYrCa5EqtNsvdn7fSTK9VQwuQarJqp3TVXHWhal5jLAwg3U8CHnhywZMZbtwu+9HLBHAKuplBSAPrh7fmqajCQi2wdrFm/Xs0sCKDRsGTNtrvN+7Ym99qbCs2q4M2FTcoC1VB5S50ZFUv2fs3RnI7Wm//+ksaf/nMPYqbxesrcgpY3D79nNS66fRsUxFFL8wEIETxdl8x7DhOSApbkCDlGzkBO9SH07UwANOVdbAEDKDXvpLWJqwVH1antKwp0o969n3PmvSKZkiyGSFQWhGdcVukDjrHlzjm7SXB2tm27FxutZ8zMVBbfms2kI/CaWDZt5qSS8ugex5rVHmiVwq2hJtHa+EpdPH8f5Xluv7sPaNV50e+6Db69LzT34wWdrYl4a6ok//pCjYhgGpk8PxHE5Ho9E2m0b63eUEoeKWgRaRkpHUMq961RglRDRM8BTBGcMlyAEm80LT8Hdk/ynrrz1XsDZC3czMCcgHSYsKQKSnaWEFoNy7736Rx7FSSCSp65W8SN8NppULY2+zXcg5WN34DH3htgGYBNltMV5NhZ/Vvlb5PrVcFqz7fb+7fs2JFlYmSVMBo9N2C87spv2At2co71HMM7QQ8auPKzLyItV6D5ALp+gnB1UyW9+QqJplo7LKP4uALJa3lQbZXxvqzLKC8kAYhBNj8DAhAGWrZIjRmNIq56snyewWf9cWybl+TapGq3tCZBY0Yqmx2oLQv6U3agK6+3C7jce66BATKPhU1U5hACUgQy5wYw+3Bjk1w9+SoYVeq0fzK24D4HcEHH4Uqjexsxy9402B9r5Edfa9fVpqKnTGGjwA82LQhb5t3fh2e6pon1DL6nzDLoEjIN9Y2RlemyBHsDftszl6GpZ1y+IWtvJu+Hd27tRaoCmflxDurrlIjUiCXnk+P081UKAJAvVn2+0r6vggO4/b6zjis/2NesbBzRrTOfMH6lmtkc5bWFxAcHWL3A02+JUaFtD1j2fm+dN+cafOYU9yYMvz2Gi0DT7/v70oMGrvTsi2B+Cjx5H8KT94HjjHz9PuSbN3DEhLs143atBVqrCYcAxm5pF18Xz6uz/sFau2urouHV+0iQgJG+42/fRFGJ/YrbASwAMCd3TVgNWsVCpGcDMJGgq7ZllS56t4d/a5sKqKZOoN0AGQAHbWIYhDFHIiQt90QUsYSaVB0JuNXzLMQAMhYxrBcvcezVom4slqg5x1Ck0OsYaoWBEIomdomZqV6jnQPPMO2+R3lZPdlGtPnxoGWJs76CeWv2M+YGQH0LbRWLKmjd49bq2DqLgY/o8gw6mC+EXCkzoPF1ZGew9tKzpz2G6D/rj+/n3z7rnwcga68P8pmI8MZVxM0UcEwZt4mL9mUaUeZtY84YqRQoRkApWByJsZBFHYqfqQRk6F73pm5Pfs2ONKhAKEKXn6+aOlL3jSUi22vTzuSvgFVfIPnUXPfPw44baVaXCik9mdAEqGXG0hLQalujZPn70oMGLgCgwzXCa68DN+8roHXHEbcp45gYx8SbEPC9NvWhfK/f2WTbtdBKN4GctKK/LxWyDbTyus136ZKdSf9i9BdoOgMT1uLLs1b3gcyfJU5807jMfGgS6946GTGM0YawGo7Mgj1BJXLy0UskpYgA4DY5rSgLoEXzHYStqaY3mwAoOS97ZhOr2WjkTYRmIu07NPd0yjRYN7MDDxeUYFJmUE0j6Rgyt6DlTYjFx87awiTV52BmP9OACKYF+fHW173Pttdu7BhL9Lb3I8e4v1c9a3OOUb8pTyUlJdg+4fIsRg0o/fz7NVi0ELSaAkhKYvW+QyLJjZxCQAyMY+AyXtFMCSFz6SwNSD872yuNzyuwHKuvZ9ZgjQDMztzoqV+329wr970Dp57auqVtxYxRNf8ClJ2Zf4/2zH+9peFVSKayxguYBSrod1IVRo5tAlNekR40cNE0A9OMcPMa0uE18OEpUjiUXAprjWG00R66ucuFYaNRdf2Gts1nWolsuNqaIBCAtFbQcl1/yee+mPE9VKAq7T26RosEieaygVPQQJSmSoH5O2oEmjcd2T0bMddFegq0+nUcCaUhZDFpbayfjKspIATgjmSjxyQSa0iifWVm/V17hZHppM9l8RKo7y/l741Rb5C5mpf2qOGPe6A1yFsiEpNOo8GQAJhdu4yhCELVJEVMmrekgkaqgoStsd586Ntx9GY5D1re5F1eO/DZ0piZGEhZqSOf+1TuWSfXCxYtw1XLBMbChF+DwIk1SD61pGpWc5BriMaYS41HKz3mAQyAAzGUyFivdaVQc7q8ybI3d5eakIEaDal97bSqE8gwqp4zAqu9gKNLqL/+nkB0X6rBZFUjtcr95Vqda+ZVNTvgoQPX9ROE970f6ckHkJ98FfjqKV4uuRTlTLnrkksVsHrA9w8tAKXHVPUFtBUHfHFS+bxqW42mldcKWGlt82AA9YAHlaol54WDK0Zaht6Vz+lDkd19jMCrv0d0i34PtPrNYUwj6vwgWDBKZbYLMQhSaHcOYpO/TVkrYoRNqHwzfi+dEhqp1dcpNIbgSTQZyV8hkg9ioIZRZhaT3AjIRvdeAGuvbBMFUJjEHKu/tfBlAwKv4ffAUcLX9T9f/cAHShizKn23uH0mdv+m5XnQslSPvsL+uVJHfUWJuzWV/lJ3GlFnj88LGNfqh7maAp7MUSvFkDRPJGoSoAP15tH2OdQqImj3TQhNGkggMTtfR8JVItytYm1ZiIvgkvK2ao3dugUKWA8tmxe597jJRwQwXI89YNs9AJ322K0Lo3M93frEeJnZschRNPsd6r96FSCxNRTJrD6mAbLEAiiPQFYTtLbnkRza9yhwxfd/GHj6AfDVU/DhCY65fYBEWtNLPyxJsZ10LMfW3wDARLKwvDmulsVB8amYT2sKJFU7SqtyB1rJaV5azLcP4ScKYI19J8qbdg9WsLRpsEdhs2CbewMKeMl3noG2oDUiAykzRRJaMNMBinaj85HIEoaFUcxBTB1XOWDJkqS5pLCpUlDG7EwpNq45huazUQV9Y/wCSAxrhW7gJf2NSOv+XdY5ur2APC/qgmtYvzPNywA9kyQIl/JMmV2St3U5dhqT3b8CrwVQVEZemVnirXmonqdtdGjnNdOgB62+4n6bOyevDahKPT4tx+WFD6BGya36YFKE6+BtY61WinOFfTeaVm9mL7Uda1sipCNoucXN4QniPIEog5L4WzIYHGT+fbqCzd2k647BOLikXj9XPfmi2EXIUl4QBve4Jdrs340g6eZH5lC/737XW1NkfMOLbsyB94ht2lzLuyCKdccBFNvrIBaaEGqJslGLmkvpQQMXDjfg+QY8XUlfLc8AVVvyfgebJ++XsM3iyT4XPWdrfy+LE6q+EwpokYFUFq2rBy3qfFykmhOHCcRqEuyrKSho9RW2DViAPSNPf19V+zq1ZKxYsDeRePDq50rUA/mWQvV5JaISyRkDMOUoGlng1twUx5KsvG5t/0C1rtr7rCJ11EEmlqjPlEk3TtW8iKkkZ2fsb+7RHPWgtXe8Xy/e1m9YWjQktGY9m8ZAJv3LGhPJVTs4K3Ms0Zrdg9/4Z7l9PaKeKfvqJyZg+IoSVvViE06eDHi31zAzewleovpsPTPutRICtqDlP1ONt//OrCBzML9LbdTpAaudK1n3kapZ1XpkjeauN+eNrTGXRaT297yX+D7i9bqFyj34C418+O091IP3zt2MmWvhAju3CWE2di7j1i7LBO3eIMIjMqvwPxjQhfSggSs9eT/yzRvg6VB9BmgZBvQ1dKOZFDS7h9SrrCYxNZKgAy4JEnCbyuodWqXsbP6tFZSW6uMaBWiUYqR6DJwW5X1ecar5abCNJeQX19j8UBfWqNK7J1H16yaW37QmISMPbDHUdhwWuGHal3SRlujDGGrLF0tVAPZ9B6dC3TcMN9VqJKwmN5krWQClm6xqiRGkwStaLaDbuD1QM4UGvHhgqiWdbFsrBuiIKJGqUCYaiIeSsruCe8a2Jl1nXCX/TFomfBn1Id6n1kgoggSXY+0cXusw0/kcQwGRycxq0TWvxPhe7BNhgm6PoBUgTOMtHR806ZmLRC+gZXPT+hr7OdPjiqmVmnnsp8SPfw+4Ronsp2ivKsslJj+2Qbn72isq3f92j8wEWM4J2vAY/3a0asQvDhFoyfkFT0mNZ+hBAxes+jtUcg5UnKneAW5EaDdLX5HAyC8+v8FiIEwEAaVVtKkGrAycei0rtyHxAEpYvGlYRAHsNS1fNseBVnKbzjuoe+qZVg9egHfoW/sV/W3ZCWrecgBmVfXtvf9LeiEr/JrI/GECWgZeFiY+KYjZ/ZyLNBqWg9IbXXRe+6r0kqQpYBHMMRyVSWlYPlEFFAMGK+grGqVK9egYJdCYaxn1eQQScOTC5OV6JnHK8yAYawW25p6iyKI3IVYjUwCKJlZ/265rM1FGjbaz2pMWaQfURqdGpdeaaixpAmZXvf9qChsTr0V9Pplj8W/NQTo22L9ZwcsiEOXe7PluHnklzc8E11ogVPbbETxd4y4xXq5A5lSudYihycf0tUP9Nb25qwW6UZHsOm6bdg9anpf0oNVrlCNtbGwmHcxHdw4jG6cvjHCKRl83YMXby5fjuO5L40mevBui+vrk7827ULkeNHAxxWI+y7qwfHQVUCVgE36ZzfzqNjdXPwJQNTbpA1QrYBRQ6iMGDZxK+wdutSzAvWdwqrE2FGObxwU0i7JvCQG0oHWpVA13fz2AbeaV6/EZKF11zXzYayI2ZyMNTIC2VjUnDZxIWebdGHdmIMSxSaZ81Plj+kr1/m9gUgDOReplHSllSNCN1oeLGqxB0MANgoILuXsNlQG45+M3dco9g6zaflCgEt8XlcRxOYcJBuzW7WgepIFgduvVa8g9qcu0Aa+i5bH5dVo/SyO9A5hD9fGY2S1xLP3SyrWoRn1eTQHXU9RADdlDc2hBqwm6KOeoc+rnt/qhW0bnAUy+F3OU5fUZOFoUbMlzhDdtKdO1a1ilGXbJzvbcCMP16clr7+yuY8JgKOui3n0P4ARszaLl4Fb79AWcbWh7QHUf/cbErb33/TofgVYB7Y32qFav96zGNc2NaYDZTEPtDEYixyQgUmtlY8V8FODDVmXi5zAALObGHNhoWEYdYHnQQtZg6RDl/VQfQwExMxHaa/sel4FWH/rvqQewPRptOg9exU9o59W/Zl4QMJN5NfNhyPraPgOr+aFa1Tb9lEyCDNv8j70eSiGIH8jaPwAS7SValmnoKOBlNfss9JxJgDbxWFoe5QLubWQzvVk194SqyUbHDgqwDJ5L9TMKeNn1PHgZjQopWxFl0/LsYTX7YEDmQO9zuBbNvvXBGZZzZFF1cyBcRes/RaWvlsgTqxtw0HFvg40YvZav+Yu9uTCvmMNUwgbNXOn03QJg5X6wDS7wZi1WoauAF2pQTbk2V2tGM6K6aeTeWNeB20u9ibS826u206XJ9HQRDFzSVsel2HRbcHzKnc89YBlY+X5v1+9V4LoL15g4Yl1qxWYvdQT1RZnKf5u09bdq3nKMOfFrCLKZiQqZ5uRBq3y21bC405iaxWKgZV/HKMDrgKpoWETweV4bNRznna+nPs+DdTPUeDyD5DZwA3AamA86AUrEF0LN+zKtK5I6/4lKkq6vbMKM6iQ3Zq6aXF8/DkDTPwkAUtJisRpFiEmAYUZAJvkuQ1IGLITXjETmw9lqPfX8fp4uqQDgjxCtrgUvA59swSMNoJ039xg1krxz5puvYwuKVVCT39QxnqI8WFUWyOS1qjm4Emi6X3ZboaBqsN5X5++naFkICCE0wRvEGYdQg5YsbYMgwVPstRPTwjSYwdZA/yz7WRiZDMFShcfmMFHViJfcmuzMOkaQ195fuTER2r3ZHPm//es9ugSoPHVC8uZ0ndnVjhmaUWGaeKdlpyOIM8Lx5f3G5uhBA9cxZUxrjXbyJi4fscSs+S8JsCQ4mVQ5PukDIJZck5RV2iZgIRJJDgCFDM5rK/EF8U2ZGaNsrEGyqhxf3aXFTNj5tex3xAGcM8S2JX2ojImZ32lUMP2S6CD77cbERFsNrZx35/Nd0nlp2qVoiHoNk2Wd95quYGYdi/pj6z8EqNlNTthXPjcyn11SMThGkiCQCIQsOSRcQuZFOjbTIVGbK7U9t96aBy534LkyR56KCQ8+786u46XxqmX1v5dr2vvzILQpPNy998xzVDfQ/XLwqv7GfMJ9U0qg7o0+uKVngr1VIXAXWWkav+6bxMCa6m8CxPxcKtnoWvTWglGwwWhMm88Hvym+RhW2CNAkXKh/uK6xYJrbJQLJiTJwfhS7oHcpXQKE9yBbCxZ5XYR/s1yl5ZXP/cCBixFSbmqGBUKJFrMFkqHSPmkGv6oa3iQlDnnRJhILM12zMA2AEGlCnABaVcNgJ8GRJikA28UyWDyk3d84xApWe4vGAaCA7VaNL+c9MVelDNJmMNuuzeHERt5rSthQZ9oYgRczCmhYaSp7LhY2axJx7UMlkm3Uh2o5IYpmpR9Y77uzgJ2cxXzI0O7BGiyR1PmOUOew72e11xhvcPOtBL2jwUgSdKtNGHjJ954uA60RYI2AxwNTf86ReceCmUbnKPcz+q7TsIbpBDvgNYrm82a2xGgCaDJLEvExsz7bavavJ5egmkuwwpt928/a43qLh7caSCqDABirb7f4UaEWBb2fjZ/baKRhDUDL3p+9t53CBXvPwdOZ2KlCJvzY2inCkGrFJdI6Hy874YAeNHC9fczIS40mK7ZmEmZlFeAtKuiY2uZlXktjWNHMCmaiyYmPZAqE6zgBUXWdnGABE2RaFzTyyUcRur9MBExTU9qpvA5Taxq03zGJhKI9jkyzi/eVjk6YAMwkV49tHfajUPiLyJsOFbwUmoqj3Ax0me1TASmfc8MsgQ9UwE20JtNXrGSPsfFRQrNRYqlFV83BGpxBcqMGOr5UTc9ELcdnL18J6oeyUj2AMxENNJ6iZYGqFnxWW/MamT8X1dfUAVAHSn4MwFZzi9QynUZjGpn8gC1T3Mln8KbxTU7ijuDU+/SSC6ABBMgOeiONlN9bPwZ7x5u+vNvBt0Kqx/IGsJop6ISIS6Dy7BGDKNaRea6aUlEEx+F5TtBG8x0Nh6pplbEVXMy3bc+hNPu1LvW+/N0r0FdWNwTw5//8n9fonvrvm77pm8r3t7e3+BN/4k/ggx/8IJ4+fYpPfepT+PznP/9K13r7uOKdY8LLJeMuMe5WqVJ+TIzbVd4/XzJeLvLeKLMtzu1Dse+sQrzVO7OmcqNE4JJnZWY/D0j9Pw1rt6Rje89EgFXFcFR8ahocQutR88OOJ/+VBOj+n0k9qsHZP8u7CfqvlLBC+9r/C+6zUR2/horGWH0g1ZxU83sm97704LIoMfVZWjWOORKup6hRbEGqxgcqzQTn6KPZauFSH4rdm+BOFWQ20KrdBqovxf7JWmnXT0ZdP/Y7/8+ozH2g5p/fS/ZZPRbV6W0+pmDzbMfbM6wRfpP7V+Yo1CjaWX9TzTvHsv6wHkHrHSgt7T+XFtLMoe4H1h557PZAn5eYzdzm9wDVfyPy4d9XkXBFCQesiPnYjsVF55ZrqvZmXZBrs8gKWuz5hT6w4P7JGP0zq+vWPxPvvjBGb8z9JA0A3oPKHpA1vz11bqfB7Z3Lv/dnDEQdHyF4E2ETvPUugKqnXxaN67f8lt+Cf/kv/2W9iIua+5N/8k/in//zf45//I//Md544w38wA/8AL7ne74H//pf/+t7X+fZXQIOCVdTwJK4RI958s3KLPzSIqP6hm8qgDf+j8Ri0YogNU9Mkm8VYmOrJ0DAK62Nv6tZkgZo+tpy0ApouePkCzWxIACcpEo8UO0TJxZlEzocRPLqC/OOzAZFUlPqExA3x3o6AVrebGimnZKgq4s85W1UF6HWPPOZ9732JQaZjFlNvVntSn2VjbLRHPNo523MRLymVT7DuEK6fcmaexVg7jxnRvIS6hm+dSr4atQAtNey7H0cMJPhHJQbtIAk1bBGnQ6AZr00NTVHdTcHmn/DFMlp+Nj3t5ZzeMc2II7/9ViuNSqR1vvQrBKIaV2l2SXa/K1eqLm0GaPXcP0z2fcftvNU7gUtYDXzoNep0af30OK68/nn0V/H8l9tTTOPtffYrbNLtN770C8LcE3ThK/5mq/ZfP7WW2/h7/ydv4Mf//Efx+/+3b8bAPB3/+7fxW/6Tb8JP/VTP4Xf/tt/+72u8+xuBV2tWLIU9QyJMIfc5AdYMdBAhCeIhbEszjHmo9QkcVSTVXstW40UMRwQDxOQjqoJBbBpMxQEvDQ8njfBGQ6svPaGznxiZAyCgrjRLg1nLeckSBFEBbNQzQcFyOw3/pruXGc3wCVj6sCrhMyH2hAzhMqokgZwRBBCMd1YDzSUqMPaUVf8f7yzsT2j8CkPfduNDakPMKImDcMFivRdbz3JmDrwar6rx17ammJEI7+U3Gd97xlJY/4bSSXexOt9rB60eqJQfbaWMK+CWmPZ4O29j8gzSKO9VZYBEWYCIQZ1RLtxjbQ635WZgRLAVU3UbaHifmwASqV2e93PP9A+G6B/PoNn4sddpwxw47fX/TWan+/M1anz+/PuCatA92yo/dzG09yTF358PEB4dfj5ZQGu//pf/ys++tGP4vr6Gt/xHd+BH/3RH8XHP/5xfPazn8WyLPjkJz9Zjv2mb/omfPzjH8dnPvOZewPX3ZrFTLjm0jF0jmIyAlAqN1ty5N2adyth14oLDEBsLeZqWM15DyBxVlNLwGG6BsIqDyNHydPiDIRVfF6WlOxpD6hGTliglVQ6qWVT8LWPTqSuJQqgDoJ7SDuXqvevYAYQ04JIkuJnqy0rDMSYAQoVtKQLbvWBBcdcfJfnkf0fQBNsAFTf0x5wlLB8VIC00lZ2HQaXIq313lpp3JuULgmiuC9+jSR6H1zRMJM98BlQKXNFWgSaCcShO4bU5B3A8VDAKunzGm25vTlpj9mP+DM/F9CZ20zLKhMjoOULbHtNq5rexqDVjweoz9QDj5lve/Aa7bQ++tOIy/dhAyY9YEkBcCqfhW791JPu+7j2tKwR2XntuL2SY+Teb0yDuqltHXGYT1/0BH3FgesTn/gE/t7f+3v4xm/8Rnzuc5/Dj/zIj+B3/a7fhf/wH/4D3nzzTRwOB7z//e9vfvORj3wEb7755u457+7ucHd3V94/e/YMAHBcM25XyYuas2X2yzGRIL11LCExSKXrUzk3fdFWY0ZJV/dCNaAjazCAJD0ClDV0HRAQy0mkTp9oCbRAtQdWRk5L8Z/thdrb+tlqG2E/amnv2ufoVezVO78hHUMxIRKQLFBBjR6l8nRoE0O9CdFXo9iGfbdMB7gQMEwz1DprzJZG4JngeG79rF4CWF4q98fdh0agRWoe3Uj1lzjuSwASisrTd9L2GlZCKDl5tUhvOz4ANcmbUHID/fc9jVaOgZefWxlrLbproNWbBz0bMNfBHm0Shcl8WF6jrX7bPWAantvuxZQQqlqhjd9/3/pceTO+Rps9sUcvNQ32dEq7G+4h707hXCVS5yp5FfqKA9d3fdd3lde/9bf+VnziE5/A133d1+Ef/aN/hJubm1c654/+6I/iR37kRzaf36YM0qhCA65rnXlpaCevzVFq4bS+2oJ8j8Z570mixxTQEiOAsQapAsDIWDNhChNinMTfDLhgCB+J5Rackw53iTNAsf6u17JM9fYUzpwTgEUnNtc/N47R6wFRv0jPnLM3VRIFyUtzIMZEsGi/jc8LXP1hqJrR5pZ3/EDyuj7vPb8X4BkHYEAqDIbc5xjmlo1apts45Gxbjck+36ORKbRcrwMtC+QgoGrubj2NigX3flCxDrRrsI+OtUjelLlqW4NxVo2kltiyShIjyX/0TC3CsD2vG7sCro3Dn8ubk09pGrYGW219m2Ttk2wbjcOP165/4nq9ZljvdVzot6wjrkUBmjWz4wrYgGE3jnNzAmzByn82DNTy/n1bWvPV/oXO0C97OPz73/9+/Mbf+Bvx3/7bf8Pv+T2/B8fjEV/+8pcbrevzn//80Cdm9EM/9EP49Kc/Xd4/e/YMH/vYxzZl8TMDS84ICUhMAAJeIGEOjCvWSDMK6PNXbMG1jG0rNVmoPTLjCK23R7xZxFOYEOIEipVh8Mg8M/Jn6d/CHNJazTN5FclWI/S2cbjbaMYmV8yc5V2U0imijoHtRQ96oCqve2DtfmeaVvkqVkc+hWkTxGG9xSw5uenwrM/Jd9bd02CGmw1bECufd7dgITOANxnKC3ZnbJlPe449P8i70bb8eDcRXcAGtDZkz/iMJaABLCg4aOLvmrlEVVoEZh1XLWgcIYm59toHr3gToWfYG2sJoyb3UjXdAa0fa0SjChlBBZ+g2nWG7G0v+BSgCjUFp2pf3Tz78+t8NePDVpPqAeqS6hRWnkzAvApx5brtlDXnOqXRteOnbp26e/V/hz927hF3vfQugtp/2YHrnXfewU//9E/jD//hP4xv/dZvxTzP+Mmf/El86lOfAgD8l//yX/CzP/uz+I7v+I7dc1xdXeHqaovOV9pl1dqV2MQumTEDSIGbqte+O2mvWXmbtK/1BnRSmgOvBC4PNGZb2BLd6CVLDY1ADNNJKToEO7ICnWhwWfPEUDbFLhA2UjBV30MBr+mks3fE1AUYdoI0RlqWB6xzGpuLbCz5avq5RSDWigQ7m2rw2TkNZlRgeARWe20mmltw35xjRHKMY+Y9qDbjeXX08j6tPdrVigfRpvKDFtCM8WZufYAFxLvHlaFNBCGasxWjtcaeZvobJh/zVnuzuZb9rz/W5yDpCO0v+q4I8hmGQpAE5FBznO1pC8Twfq7iO7T568Pwu3Hb3Nk6Sd19eg1/z71hplarr4lARfsaaUT9vPXv+zU79M8BW4DeW0eDaE5/b3tCxSX0FQeuP/Wn/hR+3+/7ffi6r/s6/PzP/zx++Id/GDFGfN/3fR/eeOMN/LE/9sfw6U9/Gh/4wAfw+uuv4wd/8AfxHd/xHfcOzACAmzlinkJ5sEnNFAjVhAOgtlyINRfoEMOuVFulIsKaZVGU6gqQxbQw1J9ibKsC4jGzSpfcLXhfC09oBJgS7aZdkSkAtFaTTl6rc3wAXD6XrACWRXhhu2n6JWeLyTNuhgcxB2AjH5z/zv3tTZ5+zBLqb5OuHw1MHAEiYZ+ivSAF+6w5Fq0ZatcnBecz6zfqiEGRcbZ2zgHbtOOF1zOZ+8CWnb8F5858M9KK9XUx45x6psCGCTNkHZ1isJuxMkofNNI+YxZ0k4ASNQqMNY4NILJUnpBO3PKZmSv9PFgpsB7ADLys4K4V2/Xk0wu8pmWRqdQx8b2yVjZnvUbYRDSOwMvBdtUALX1EeI9E4krty4RaFsvucW8cco0WtPzrjWEHaEBrJAAVy47Tyj3PYW5Tle5LX3Hg+u///b/j+77v+/BLv/RL+PCHP4zf+Tt/J37qp34KH/7whwEAf/kv/2WEEPCpT30Kd3d3+M7v/E78zb/5N1/pWk/miKtDFBNhysgBpf2CgZX1BrqZYgGtKRKuprbNgpFfUIkZxyQmwcB1M2Sd9DW3wR6+jbe8r4AWUJNtDdCAyljkd3KutgdYAIVDEw1mGsmGTkg4nC8rjlnvxcbHjeZY7fhdlW6vbZUFzS3D3BszxKRRgtUyAMqNlreXc9OOeRsKLufealDlN2eYbdFa7J40Gdzup9xjdz8mQNhcNVrvuaAcPfdF5H/f/2ag+e6ZcUlqaA3Pw/013DMjVFMuEYHVKSSxS+yqrHdDYxTA8pqDXKMeA7Qg5r7uzmemEC2xhK0JttdqvdWl1fC2xXb7tWQVIZoozT3QGjBw0wgriJ0Ox28FrGrpYeUnmSVPNauvsPd79fv8HGSMhLgiwPWgZWsp2Npv/Z6Vb9YcOaCmKr0KEZ+qMPn/p/Ts2TO88cYb+Cef/W+4ee0pllQL7ZpEJFUTQgGu2TWymwLhtTlIywVkrTRRNy7HAxIC1sy4TXWBHROr81nroqW86QtlQR7lNcn1iCRgpHeae4dvWRyhDa0dJY4CaCQqYKxJ9RLcKdOLJ6+p9DkqcWchNz3JrM3LOcnMM/NS9moSf5dKbatb/FaRor8HPz4/j0CrzW6uP5A0C2NzzvZNgVADrL68F9BpXs5s6z/3m/yXiZr53jPfDn+4M6aOMdm/pAKSZ1Dyfrz2gKoRA+1a66kHq/OC1lY7KtdDq0mPryd/x53ET/h5ujXeFxqwOVidAGlruZ8jAzVvfpU5kFJilW+4yjOhdm6PJ3iGn8tz9+vv+dz9lnt2oLVkOe+SGWveAtfbbz/D//Ebfh3eeustvP766/sPZUAPulbhHAKupgggabFV4DqGkr81qw+slhGqXVqnQIi8gpZb0HpXNzMFYFoRpwNCPAAAFqqSkkS6OY0lS0BIYgxr5Fm5oTkSsuaaNQuBRIKKBdSoVCcX5iuOZ9N8jPw+3zOreK2kNz0ArfmhJ2/qzMmYQfUlMMumYQB9f6QNYPkyQJ6BalsK9r+jeJG2cQ54ba5GzOqUH4oIxdeyF1rcgJZtXpM8fY4cVUZPHbPnUlA4NMe2N3EG1EbBL0b9HF6qwfXH+nG5LeLn3roWNJqXalzI1Xfk51m0CnfJnad5aYCLX9tRm4OSY/A9AwbGDN1MhHkT82/36n6759NqBlmtH778Vw/sbXmpatnxfdBslNbXDQFaPNxptidAuSfvYxsOfc+XsqeRd6CVWKxSolj4UmlyzWO6x5rs6EED19NDwJULLfRgBVSJ2+bftxGfA0CrVrgAWumBpYIxxyOupgMO80Glh1xqmgFW34xLO3OTKDyAzVG0rjkGzCEXjWwOoYlmjNmAjNuNpv4yuY/xIuuBqJHSuoXZL5VeCjYyc4TMo21+DQPmGnqeVbqLumAJ7WYuprXO18UOIQp4sbZygfY1c2apS5e4MadzEnafz1NMQ/aHrE8WqWY5BudN52vXb838dqViCVA2NnWbvbz2vz2nGPmH926AqpwwbN9zquMxsseivjGiIBUrAFglFCuACwhTtUAJv1Z3ef0FzDeAiqBhlgfTVpAJMZq1YmAdyG6NNtf1vGTPhGt/23NsNA+ZDIBq5RB25kEfedlrWfad+YF6d4ToXYJSQTscnAKsnmt43rC3SoJ+PwQv2wvmg3f3ajUejyljzeZuqZYquy8ATf3Y+9KDBq45hlKf0Ex1iRlLFoCwJOJAtUICYCorgTTLn9ajlG0yyUkfDKWlqPri0/Ghrx0gONBqqnMkaDPDhBxUG8xADrV2YiBCDo7hdhqZLb3RVmpt861Z4d6kPyFYDyEtc5ScaSeIH4OVKVkkVwihMuiMurgdFU2FglQZQW7Bq8x/VO0F8hwQSskkMmbV+SFKYU99vWce7E2pe34T7yvYBLxkajWofvsbMysJK8mBcAadq55CoVTFOenD6n2HAyZ6jhrG4wSF/n15XvZ5hmjM5nPNUkHGtC+LAvVPYOS72aO93mKAD05AA0qW3xdDLWrbhKnndTtH3j8zAp8yoP3nsNuuxf3GIi/La67pAr2WZYC2OFdEuXet7CPWDhWsnHWFWECt93PZdYF2/XurTLkG1SAZfw6zQPQh9naf3qS/ZqiJUEDrmKzIdAXi96zGZUxqjmqVULPdkoTJSQJyy7wMtJbMQAiI8VAXWB9Q0JkAzMxn/+YQkHPGAte4EL0dXldsICBnCQcmApCR2KqWaz27ouHY4mvvd2TuKldpFYaTc3aeFLD0hJFqngigfgNrAFnMGVS1Lgo4Gf/nwWtjrzGtqx4nm6WCiM/jAipT85UMjO4zZ30ElgrvWuUBJcpTQIvrurFSNtD1Y8zdmxdLoWTTpHJh/EVydUKTX5O7c1jmywHXjjbRkBt3uX7/vR8DOo3LqDeNcm2oGgjF98X615vCWm23fU7GMGWWNAjBCSx2jFE1oWt9S7IgKDodTODnKLX3u1G4Ot/VnnBwH5+lv/+MClgetJY+ZjxL92wTys+ZB31llz3fdzk123OrTTAtv64HLws88nvHzKFmiVo1ylu0Rrkni0cApOnnq9KDBi6jOQYsx6SgxQAYIRCWbItazD1HZXesG2gNkj9yiBOm+RqUV7DvM8QZPGntNbPvQzbGIRByBKq2J9L0ArfpismOpclhDCiPWjWxBaxmwrr6at3E0/c9cnH4IBFPfZX0U+QBFIC0mSeZRd/uvoxDTxmiJA6LSpmqWSwEIGVNlOxuqkjvaDWHvFY/QjyIkKBlnaJu9JHPo/dnlMvYX/ZO7zbsGIPjyz2ymo+KHT+DrFMAZxBlIK0w1lC1ra3WWdiHfRUwBpA9GoGVF7rs85659r4IAyfTeD3TLfdJzftCo8VpWiSc6S6bFYSxplarACogWbNPWyG+IDFQQcuG0wQvBXvWdd1GD1pW5DUNAmm6eS334bUBD2g++tJ+1s+bnz+cFiaZzYSIBrSMwS9ucZr/LbBaadgsFWpiZIcsmdVqQmWjnMoRM/wzcdN82lt/fKsNey1usQAq1bJM47pbudSJXTIjZ+HPt+9V4DqmjAMzljUX0Comw8RYACRetfwT4WaKSJw1CZkRMyGSqt6T5R1sawvag7PahVVdzrCeR7drwhJEk7vTjWhmQw8WSYEsM5eAkgUtoJTmh+eSlk7QgpqLEoiQEjTJk88CYtUc7X6paIEM0T4a94r5MkgiOREPwkxJTGoMlACORjAAWqbgzE4CWixmuSzmteJLCaGR9Dz1jnf72hjjaK/4DThy3LNyV7sPjlMxbRIOQF4LCJBGGZb7pLg7zyMqOVWdxqMnLcc0wS4etHyzxx3gsty+WoC5TVBvoiDNPLqnSQx9HFzKP3lzkUneI813o3FBGKRVYPdmQaBq1j6CtG+lEUjmh8zHavM0oEaj7Ndn/95HIJ/wVRpIeLOoaaBGo+AT01gsnyxxBS3TJj2ZIGZ701IMZC65OfdelKcnsbZwU0+SIRcwXmjH2blrD7Pay8z+LTmXADaLBQi8TTm4Dz1o4FqTqNJV05IHa5MEAEtOJWgjZ4APEYkZE1vkoYBNTMBkpsPBpo/uiZljlGOoARNTRKQMrBk52OJuVROTNHxwR41ikg8FZF79gdoc2HVGvZx6C89ecJoHuZwsqlHMhACBskrFajY0x7iA11RMCYQDatuXjul21PtMGgADCoMgZaZW27CQnzozZzizxl4FjoZx2l/Py9BqXRynMtaiGTrQae5zb5J3IhDvS968vVu9wR8PNIEvPtoR5MqE7eWeDU/qyj+ZuUgFt2SO+YwSuGHJwEAVCC3UOwAlytZXxDENawRiASpcmpblTa33Aa3NlwMBovxuMH+oa8VeDwv7uvNkoPFJ+7qqdnSJkjXLiROiodcjkv1XgMZdqNGyXBBXT+a6ZqYhgHlTYneJNmVF/zGg6UIVtGQ8XSzAPelBA9dtYtCSmgmYY0BKNkm175aophkvloQ5Ep4eJlxNJBE5CCWM9hBIq2o4P4VqAJMyzEQuWTioz0uBb44BwIqQSVR+HZdFGvbBG/Z9laIGTPVU4glacCpVARQkt+c6eSoAbX25lJwp0Fq+QO3sSp49zkE2xCEeRHq3qEJr+5Kd9Nsz9xPBBd4hbMzUA1ll/FtNgTS4ZmQSbK7hXvfh9HVuIFofu6hHM9e5/K6mJBcbwo/vbWhqGg6wMsYCliMa+Gc353d/m3JgITbV3g2Q9jRboyaxVn0za1bhMtfQ6F7K9gBVOgcHX5XCNIctWNnn5dbs3l1HBnIRrZtp8mbQV6ViUq3ry+TOUeSqBWYAFUBk7NblQIE41GjiYgIlKtYP87OXe2GJLkwQUKljsGvX4+TaW/Jm9kyiFUm5tQpgxfzYUdG42Akpyn+XnJH12du4TjVHvYQeNHD5TeDV50hAVs3LM28DscTScHIKEYnEpCE/r9F9yKuYfaybKgBoFfgYJvHnkETPEOThpizmtCdzLFnhmVOTIC1RQnXcNr4FVWP0FAiNBlb9X7aY7ThqvvN9yHzVe7/obP5itHGdnmNkwqKmpaiaKmB2epSooVi0PUIMB0yqxXKXwMvZXiuTTa6SfvbVKRwAAK120mkKFCf5PB7kvSZ/kubk9dTvn1G1jb51iQCHXLMIAqZx+QCAJoCimopOBk4MB9mZqxwwElklFWqCWvZMhezmqgCUzhXCBJ5E4DDtKefK7JpTdX6O4mdR7YohjLm48cg0K7dPgzDsObSJtKVyzACo6vW30ySmMsfu7f6teWu3fuS7HdAaBXDY7wal1MxMZkC1lzjtpzKAwKTWG/0bSLSrKbQmZu/bA6p2WoYLNHzFX+tcpLFVNpSoelesV6OKPYABKHmm/X15HsfuakHVOPtrn13ib9+jBw1cQU1iyGi6Hs8hyAIYaBzi75JcrxjkYZTQWWPweZX233mVkHgjM8nELMzYLXpbppFlgy5BYhSMTPvzjmlAAg3kXvQ49OOur/vbacGJCojNoTJUL6W1ZwMsWNlybAzAkGuEpGcYibmGoWeAgh1HZQWXmnEQTS0xkINEHAaN4KSsXaIBYcIQJuP9NOQ0M6wrLD+K9S8FKYFK88EVE56AfABCrCZKQMyKnBHUB7NHpyqze+Zt5yCi4jsDzAfnwMmZ7xgYM8NygTNgpr6yPmrRF1/m7F7v+bh60DItazqU92buG5m4bH581N9eWHuAClBB008cRtTcQC29pgLjXkPGc5QBCfSE04jtQufImxUB1zdqYCZ0+XheG/Wg5aOL+6nxgiDZfUJ4hu2lU76nPer9ZmUPujHYIQ13UZufCCAuMEaFtQBoMXF3ArS+YhNa28Cn+r0VYAhMBVxDIKQzlqRT9KCB61pLOEGDLwBpFnk1jSckM2MOkvt1NUlk4BRJzYNyrjkAdDwKYOVVykHpguYwSdRcTkDOEomoCzllVaP1IRtgAM58Yuqye6q52KqrOUD8SXpAGGhhG42KSoWOPrkZqGaW8ZwIADGoFDpdkAWglO/5SMWcoT4u6G/kfeYacWhmIzH1mC9Pxiy9y1RLWo8gnoCkWq2BVjoCyxG8HoF1Aa8LeDkW8PJOOpoPwDSDpgPCzWuixZmEDQWvnIELYySKZNvPk3tdmETHhMRIbOYWAbIQnGnP7nFgHgXGZsR2EK5OpbW6afxqK9gYbufTKSbUYD66QwExnq8FwFxwhWlPPQ9VPlcWVAUwdwxJPpX5QjOqgFbnagxW1oxxePt2LwPGLq5LTZcgZ+rfCXAZfm4BRHCHDMwQ3rQrIIEGtAwsRsEHEg1oxbbtvRxvS3QkKDTXd8DQ5iQKYPkAGJ831QeK1aAraMYOb8Crdmbw13f8azQm1NQZS1cCAmag3GQIAM2vbqJ90MD1vuuIpzeasa8xbzZ5VlDX+ycsEzwGKQ1lDP0qBjmWM2i9kxJQzlRYGEAQbcukXI5TKQvlF2ldwK4ob2K193aZ8NmiobhoRVEXswcbs/dbJQ5fj9HAymoi2rHGTE/V6fMS0hTsb5TKzQEaAdT+zhZ/MQtmiH3ewJtq5QQfvRkDYWapzB913kjD5ouPaL0FjnfIL94G370U0Lp9Dl4WICfwqhqwAle4uhHgOlyDj6+Drp8gXN0gm4/LdVkdhQGPaARa9huTsL3fQp6P19ZqGLF9VwGRAMRqagv+uHrNzRNjBd8++MIVNQYfmu+bOyy+GAWwvmsxA5y3tQabUzhzFavpKOt8lTkLVDpEw7RztMJZvy5H921jbyLy3HOz51B+r9rWomtSzh+wQV6Ecry/Xul6QAGWqCz+0e4xDIJVcrcW6lTUG/NzN2puxM1ottSv3epPkvxG621mdVTFr1T50GhcDT8JVolEe44z1RxLD1QOHJnr9PYtZAovC4QJhEOs8yB7gxAP94u49fSgget6CriZfHuS+kDMpu6rhAN103jTWYT4V8z/UkAruWKqAHy2rDEA/wCt/NOaxV+0JPFnlWTConG5myiooOa2TsPyTuoQtqBVS0qRO9ak/irdeuwqTBetmp+YNAqv1ptjkPQ2G0Q22Byy/mdSW1KmRWBwAjhUE6KAmmh5URmpmbdEUFjBx1sBrdsX4OMt8svnRfvKRwEuUhMQH29Bh2uEdVE/jZrC5pvWz6RMjN1G9qaTtndapSrNVtDyVb3rfHJZX0Dvn8Hguwp4Xsvr28QbmR6y6dkWuiCQvRDRLkLQggkEoFoNa6QxBKollprT2viMGdbLlZH7eWg0T48qLvAENgZ3nYyx4AC0ZisiOcAEqJ5ILQJ+3OanK+PrTIdDUg0tqDLCqoWWtdCBcd+V2uaj3F+3vfa1GtmnQdU8JvW/Mjfn8KDly9AFNUkWz0pCKYAwq53b7+Pi/+rAKjlwBFqrjDz7avVp/JZuztcdy9gl9KCB6yYGPFF10xZBJCqAVSq/A+2G7k0I3FYzb/wre5FZzsZdI2nMJGh5CwZWrpZh3gY8IPDWHEhwARjVhzXHUML75yiLLZAV80XRKH1Oi5d0vabAqGaKDICyaWFUAU0XLLld1kOYF8yqE5d1/6p4q/aHRJCiwTyoB8cZvB5Vy3ohgHW8RX7+DOvtEZwy8rKCUwbFAIoB07IiXi/IOYMO18A0g6eDJJN3z073+vAeNo+YqJH0PWilMme1fJF/bvUcpmGdBrUe0PaaS3qfmrK+Fsj6m3Dr3AOtaVd7muRIG83MQz+pjasGDbjxdX6+RpAoPw4NqJYedP7aaJ+BH/OGGCV0exRubWOz6DgLRoiBWvA6RR3/IJsDNsBqz+AFydjPjZ+H5nUVCrNff/rCgLn3PcnXvAEtn98ai+/AmIHwoKDaajtt7R7wgOXTkIzMlxWJMaFWrbfOGLVkHiFd4n/coQcNXIeJcDOpmU+1JFrugPW2VH0Py0twUt/IpLcbqmPaiOOs3+lkllIZnXklHsBRmGNCkCTLZJniKFnit2vG3ZprOCijAa3e3tyT9RMzACtgFarGdYihgJRfHN7ZPdIkhBFQp0WIZpS1yrTZ6oEaqFFBD43z139nr2sAA8P6JCHUPJ/Ibq4BEDMoJ+TjLfjlc+SXz5FfPAPf3eL49gssz2+RjyvyUoNlwjwj3RwwPVkxpySa1zSDZwEuYm6CFAgyD0w1+bGXhNtIrWqG6TVrA6x+bjwVAMdWk6tSNzdacdDf+Gcnx421tNG5Zex2x9wJK2NTp5F/V8G1jfIbzVkDWJaz51vA+A4Bxvh9NKi1sulMcHWu9xstbu79EsDSebayRpcarZoE8bxKQA6AGEIRBI2rNACeF1BWy44Jyc2JVcAwXuP8kdFSFQgoJUUMvCAh8J5q3dYWtM5RZq75mp26ar4yS3VYcnV9GIkWF3A1mfkROATC1RRwiDVv1jQxfq/6uIofKx0LUNHxBcLtW2JmevE2lufPKghNM2iaQfMBdP0aEAIoROBwJQV3LcrKuAIFWSw+xFpBC/FQtKyidTV+LRcC7xaPOWdrtF9rygwd8/QSiu/kPDvQqgvCQKw6uX2QhifZ+FQYQ6RWq4gYO41r9JCajuw76tsvtGRRX8UfMBpTcpGDGgCTlxXp9lhAKzmNK5vmFY6IhwnheCvBHMuxgFapMp9XhDCJRopa9XrEfIFqOspq8gw0rrph82K/2UyyUl8ExfIAq1ZVmwNWIKu+sloJQYQGOnFuf+mRX2+P6Y98TedAa3vh9sluGm32WkbHpIv5HYDvrMzdPZyj/rAM/2y7IAU/3sE9bFrxWLCWHWvBMegAMOdSaopyqm6IgbblNc4C6BxhlVp6La88X7fImFuTfima0N2Oj0DeoxJpHGjsJiAgU5vPWa1AKlgHEa6vJ6pxBCUlJmNaXp4exAl60MBlROtRQGu5Rbh7G/nLX0B+/gz84m3kl8+BnMA5gaYD6HANmmeEJ68LkMUIun4N9ORprYRgIcLm3DeJZ7oqDQ7NOeol2KKp5BqU0UTkBTEV+gaNRoGqz8h/Jn9r7bUQzMQhZjiiQRJncMfTYDMW6Q5aeqWaCw3EfMM3Y9j9cRKAYQxFAkzsu8sf3ljqMvDKKSMnCfXOKSMftaSSgdeygg8T0nHFlBN4PYJykijEeW2ZBgXMISAxFQbjwcrPU9QIuxKIAAEN4q2U602wzT0MjFk18qocJNCoGpWdR56raMaAJMebAmemsD3qtSkPYuW7oglWQOxNZZeA1saP1OWclfqU3tfYaVs+NL8PxR9pV3vLywsdRlm1Klu3I79X/3u5yI5/y4FYWbojoCsmUsbQb26/Mz+vDyZSfkNqBTHhy5LwuVtD3Pm3du+vuB7sfmsk8h6NQUuffQBmB/sSrR1wPUVtHyWa1lUMOFCWFKP1WFKMaH2PAteqABFIasTRegd+/gz57S8hP38bfKsmp2VFOkoB3TBPmK4PCO97jnDzmpiXACAEiVCjgBxnWUTxULWsqImaqMmGpZTNiYUjgCVcwfLL+u8NtNqcLFkcpfWJ+740naTq9PR5aEUTBeqGMfL1nihgchJvAWFUKas3K1oxUAaQiNqkQxfc0UR8deSZN+k4mAghRsnP0n8cIkIMCDEUrYJiAHfhbgZsEja/SHDH8RakzxNRl3lcQXGqUi3Q+l8csSaagwIoaJcx5eyi8WjDTwN+1Gr+FThcThu337UBBYzquzLtj9t2MrmClWlcexUMbG7tuuaP89c3shydwN4sWQWri7QsVC0ukAh+JT8vBElH6BJqixVDw/CLudpZLLxQ6O+N9Hp7UZimZDBXYEbzff2sGFcG97RZFz1Ape13myajfZCXqyVZKs3bPAAgrAJe3j/p/Oml8gW3czMiS/iNTr1qCxW0gWBNoe/Bxo0gBBdtfI3W5XGIQV0WwI12mL+eAqZ8RLh7CZhlTNNawstn+4M/Qw8auMzWOmnyJIVJTIGHa9AqDMyIc5XWVwDz1S1wdQOEKMdfPwHHA/J8Az48AU9XwHRAnq6L9sRp23a7mgCh/qWAOUrLkhwISfuFBRKbcM9sas0xHxXYlrsx7cnnhhWTkpqVTAuzHkRmjiiNHGUS5K8Jv1QBrLGnA5jiVENd2XxfjBxQa86RgTgQGg2MNowkoGU0ZnYMcQLSov5DfXY3r4FuXwPlhAhg0ucW5gnp9g45ZQRVV8M8Ic5TyaezkHk+3oLuXtbcqCSgxWHCpp/QwHxFaSnaQAwTDmEqQG1RaKVCBCu4u2ANSb0w0BfTYOaxdla1IDuOlDOjJIKSluDxy8fX+6tzXfOE7DIGTKc6Xpfbd7+5BLTsXqHfVwAOQDgMTVJVQKqmaTMDek1/zyw4Aux2TJoHSa321QfF1CAYvzAHmtaeqRNotScfjKINRn3QVxGSmhOEIrgVjcsnhkdzX4gFIKP6/UYkQSZ9rUP9zmlXnpf0ILVRootAI4EWfndX0zYwxdpd/joSriKB1lvQ8SVoeQFaFwEvA+/1Fq9KDxq4rIAnJpPcCGE+gK5uQOuiAHZEwC0mAClIJFqcJ9B0KPk/dP0aeLoWU+DhCfjwGng6YKUJx7X6qoLbCD66jLmCiNec5khY1PMbMoChG70CFWCaVRv+3lcAKZvP3hcQoxpgYOaKUcuLOoN6AtccUd8jrGVTTeoYFg0C2sAFABgWkWROY6lnxpuWFJ6q34JU6tZyQ/EAOqygdUG4fk1MhQDm1yQyNMzrRuOKhwnhMJXweAACXstRwAsK0JyBNGnUmonZYQtalqAbJhBL2xIKGXGSigyrJmhaAeMSYcjbKE0BNgGwlEn9iG1PpNEcyZnaSWNGqR+XgQYQ+vk1LSM7kPXH9tdtA0OweT2iUbi2ndiHmyfq76Nq8nvh93vwakDaj7GMw7T87v7735/BPTn2DIA1gGV/u/Jlw4r9dqrOZLqpD6mFju04r2FlW2/dRNmztaRfq3TjaQRYQ23TgZUcQw1I1SjJavGZo0V0k5gG756D0iKgdXypQnTNi6W0Dq58GT1o4DpmKeTZPPzpSsBoOSLcvAYAYM3zmQDRsKYZdPMa4hsfRHj6fvCT9yNfvQZM18iHGxzpgGPKuEtJTGPgAk5GtoA8ycNlXE8BkQi3SX6wJEbScFOjvghurSXYhr+3wRnVnyX+rcpwvAZGAJqork7qG9GmlTwFRUTVQEqHY6l8Ycxpgeu66irEFxMZWmndM/mUudQxBDN4EoCi1wJCloZlNB1AISLciPkvH2uABoAqiMQAmub6fNYF+eVzEWDWRb4LERTFZLUpsOrym+R+J/A0o5aSkooc03TAFMUXY/fgzakGZD2wWQORYk4V59bQxLwFrXbxed9n88zteHuTjTFJg0Xi2njQfD899YFCm3HYqbHVjMaAU01bgH/+KJ97U+ZmPPBrfOtz82RRl9aWKtK2fNIp02AzagpD8GoqzPdalgctn17TAxaA4jf3vnSnafF0KMesXP3le9pWIKdhB6mEEwaLqwerjdDTAL3nSSipNhbZbKHuAZoziwxKd8C6aiGHW91/t1IdZ10l3UXNr/n2+e78n6MHDVyemEh8UNM16On7Md28hvzydYT1KFpHTgW0MM1iGjw8RZ6vkK/eB756ihUBx8S4XRLWLP2+fGAC0D1YYyD6T0w7hDAHHCJwlQPuYmh6hfkKyaNiuwA25ZxqcAaVYILCvFAXYtE53CbyochlY43s9t58FjppsEiCBGgRVquUD0gk1UoANJTWNJLM2zDjDAMtBT0izNM1KEwSqp6OoOUKIUyYXnsd+e4lggXZqAlwWo4avJHreAHxjc1aTHddwDmJ1vXyORoPtPpbKEbV9jS6VEtHURT/GtKhBOqQmaPXO3CImLQv2GTMBkDiCmRiRq7mxIWqsOOjMmMHXo3WgyrZmmZ0qqaf3aGBCgVXBYGVsSuDix3r9ozMR98V7aXT1PaSgUcgNvKzeRNjDw/VkiDafOCKzK2UP9BK9a/Ma73f/l73Es43Wvge7ZgGm5xQO3QgFJagFNtffWV+V+jYJ4j32rOU1hJLBzmNTJL9T8PzqUAVfwxRFZjnQBraLn8ny5W9k6ALSkct5qCv8yrl24632FS+yaN42MvoQQOXLeDEwDQdRGqfNeosrqDpqiYTA/D5W3m+asyDd1mqyR8T4059Z96HBchD9NnkwljkOwmzBkDQ5FoNlggRSyKkSfIfmqTAvpaSuy8ATdjqqcifk+tvIA0CA8nRBY2Upoh6YanQoGa29QgKWYIX4gGstigJQql+Lg+kJQTetJCMYl4M2sphChNwuAGtYjbMACge5BlOkr7A6xF8pyHvKpBwqV+ofxWUSkh9qbvfzZkW4sU0ayBIkGCOSXxbdLjWYqsqCTMDdBSfQzwAIVWTquYfTZ02iswI0eo5opbG0k7OZjYc+YFOgZalTNjr6NYggJIIa9VPNhoXxv6jU8EeaP9sgib854D7bgBW2b0fEds9a8APyv3Vtd736WI9oJ1X2oDqOWZ+EY00rUF9yEIOuEqvMwdQm+hKqhXnswNjT4Go3meZASqmadZ+g2005s6EX0gBFs3sEqlzjdylpFGD9tppWQW0HFhxvkBA2KEHDVyHIM7AxEBQqZ1ClBYWXhJSKlGCWliUNfz2mBhHBau7NZf+QcwSueidl6KScwNeJcpPP7NabZkJU2DMQZpX5qkm8nkNDNjmCA3blXSb7qzE1DuVs/N1edt8R20uSZbEyai126ziOkSSlI7EskOyqYNZjkjYmjXMV2O+sWMSgE8sPbymK2k6SXGuNSPnG1BaENRGjuOdBGBYEd6U2o1hgGavew2tTKCaImc1I6pJkeaD/FZBLcwH0QTDBE4TKCyN4xz5oJx2AqZDo41mqKaTSTUg2gBYnZuaXwZg2P3Xt6qvNfna3L8iTekziWQFlFHMkyMz2mb5dO9P5YSNgArluxasfE3RoZ8GArJFkyAxQ/tj6nx0v4dYX2xevcl6U51mcH8iKIRyb2QO3R2fFzUA1u0toNWyiIqQ01Tm98nXvqUMtwnY0Pu1s0dQI5T46vI279Edb6kVXykwCwSQTzdR0LLw/9LVQQtlvxsNq6cHDVw3s+QJyAMGiCbEq6fgw5MtYw5TLSaqwLEukvmdWIBL2kwzbtc04nG4nqKYIUK1+8ZQq1YAKNIOAFhdMQsVl4AOeX+IKI31+l5YTdsT822dkRQZujmB1m/Tp6eObPIjUn9PCdXlLFF5QAWv7CIR+5BxB14BdbNY8d0EqzzNCEmK8R6TJVFHTPNTzFeSW+clOeS1hNSSVpKntJRgDD7eSg7X8RZ8TOBUowyR9b3zj9F8KObjsC7A4Rq8LqDlKC1TzLxsvtEQgdlMiBMQZ2DSivRaAr2E0gdtoxLkCRXQCn0ghz1vJyCdACzTsnzi9IhiUA2E9ZkoAyzJ5n5ZNJrJ6LMWqOz7EVDJ+61m5f0zq/f3dpqQFIuuwg2y7IGkGqyds2ia7j56LSw7YdLGBaAREDYCoJkLO7NhUwHklHlwkDNZXu+B1kDTqmOtQjFQ79sKGFfhoa2G4yNdgXpOH6CTlVd5ADMAFwFDTf8aBi9WEvkFEeEwX9dnMIl2VTSxaZIQ7hBBE8A5NOZ9OmVGOkMPGrgOWDFxjX6zqfeROKBam23RthDGMDxoHVOtKWgmPaNAhJSASBlzDGXT+ERfYyoAygsGIXLr5yC1yhGgSZ/iRM3kSrOMYlkdZf19VsZvfbWKpEUQU18fuYRuM/q/A19XMcHkVbUNKXHD2hGak8w9hQmEUMop2YiCgngqzK39ay0TDMSizo9Vkj/qHM/xGnG6LiDG01VtO7Pcij09KqjYP83rqj2mAU4JeVmRrSt2DtWfAhRHeblvQM8l2pf9DdH6fcl82Tx4gYA5IwTLvUEpMxV185eIP4hfFEATMHEJaL0KWbg84JRjR6f8VXuA5cFK7gPNd6MCrb70WU+sjqkE8W+xCTlMEp0brHIIlecwsj4MPz8xb5tj+/3R+7R85KDbQ6WyhgfA0fkQy/HsjytDpdI2ppdOyGnMVUDoBKLu/chM7YVKuNfVp88F3KDgJfUStaNFDIgKXsgH2TvrEYgHBTOAcK2FAWKrdfW5ffegBw1cdHwJOqqkMh0aRl2ShLkmCPdVvS0Xa83V/9T7oXw15cQMi1srEX+hAli/J8y/EFQrBGShBAaCmo8ABgXC6sVf5/vqqy/7QGlr150VHCmYWYFqMz0Nc2czX/XqerPhGOBUI+5YsvVZo+pINa9ea+O8Nv4uYOzvytjWPbQXRJYMq2G2GeqAZxwtwlJBbIpqTlyP8rzTEVilWUSIUTbychRNSatplPlMuWhcKWWJloxJpcEEyrlIjQyIPy8nBbZcAE4aWG7BapPnA2EyhNqE0yToTbuJHdCi/nXn33lVOqVZAfcDrJH2dgq0PGD5Hm9m1owaJifCGUrggUWtEoSJigVk3AbIBLxLqAEt/wyz3xutptWDVh+F2NQ1BACKjSZXamnCAZv6lsv4B5phFZBNWxKyLBEL5DDQStn5OPUXVoezlj6skcCAAZ1pXy14Lbqhl6zaX1DwMj7ixlTBC7IXsxOMp/docAalO9Ci/bjWuwJa+XCDGA9IqKBlG8fsxoB+xgIFUwDWDITAmBHqQ9XFIdUoQolq8mWWekmtPBqqkpHYoqvpQ84tTD5BN2qujCNv9w40yxdzDGAzr+QqPZPGARORVA03qcdLzrTWvCY/l46REPtsI9QOvhbqq+eqwRpiQpziAaTmqazmKe/vCjDmp/fFzkRRLi/Poxaf9YIBu6imGYerg/iollvQtAhArHfil4JeJ0SEnJF1EicAeXnF/JEQikZXnOxhah3tmixq0vNWn7B7b70LPhy8z9XqIw2NNgpCc379rDHZjdqxnAcrP8aR78qOK78xvxLX3xtojeiUxUh6TNm+UaYMDcjSJRYZF5lOgQv8wgNq6gy6YIShyb0zE5KCUQmvZwZIAnrE/K7+1CgBT6R+Y0aNOG0Aa3QtXWtWNCCz5M/Z87FCAQ3wU+3lZTWwN3PvNDoPXpQBoOa2in86YJqv657wEcnpCAoustlo2dsd5+lhA5dJ3e4B8nQQBjlnHKbrZtMSWykjUv+S/Cyoqc90g0yM2YIvDLg0/Njah/SA1YcOG3mGIce192DdQr0kar10ekoJYl3QChzkRLNEJoXKvYlJyRhrrpu5+KnQbjJupR9ibjUvQDYaSwkfwqrBGtgGa0DmKdimS3IEA8U2MZL06tzoJwwQTONtg2QmZXYHbUjZmEApINxowEaMyDnJtUIQ6TBW8yEaH9ahBGxYQWaodlXSKKaD+MGClgCbryToJ0y1ALMyqpGD3SwBXojyTD8QSqKxt9DsVaDoaaQFvSpoAdgkLI+oBy1/jaoN2DOumlVPviOvXdtfw0CMXceBaMKgBSrwV04jlWjBzqflQKsEfvWa9gBYSP9aPqCk5wgYSrK7XksjDq1zRSTnF/KMf+CEt3qGtgdZBVsz32eom4Kg66um5ggbIVhJusYAtLEkiaVHhBGx9KQMxDgVRCnCbl5VdZs288LTezQBGSnJIrKeWybl6kIhCpjDoYAVUB8iOSnDpDX5DcQO3Eu7qM0pLUTZnqdJHsV34LaLZxhFmxqwnB60RtGGkYAlyZ6doyw0iyRKCszItVkjAknVi5gr+ISpaGEcppIuIJFYI7Grs9FzluKfAIjWCloQQZJjLoEJllsCACFXaa+vsNGbKWxO9aIoM92gvlyViHCIFiySwLNtiivtABBBWnU+aJPJkkvic/tCBM1SSaUEYsyHAlb2XupWyl9Mh/o6TAJag/wbD1q95m9CS7lvWOt03uRajeene1xDP9N50PLU+748eF0CZP4awAlNq4mgbAFLNCcTHlHuyUpgwUztof4lloPrMQPz2oC80FBvIFdz4EDTatJsBtpWm26iXQ9ITYEGXuwAjHPV2FmqtcignDrq+5n1ASBmgdTvggpOZqYm5VENiKH60Uzrsk7y22hNKs/Gmy+LHw0mYGk1+7yCcCgBXqzz6cfN04JXpQcNXJKdTQJcnWrKAGghTFdax07BawqVIYgUW6Uzz0yG13Oalig7XWdh/et71PSMwzQ9y4L3PjYDLHstY6oRVwukkaQEcghYXU+xtBbIbJqjUGQgB2XsFCQyz+zuFECLg4UdHw30O+hx1pyxgJf+jlhznXRTxumgJg/CQoSkUlkKLjmXq7TuX3upXea1tbETQTmzSYiEOR40Kp2AMCFb0Mh8QJxm5KtrCcu9uy05YIXUBLirYU2T5PyZVjVftxqWryWX981zpQVOEU7k2Kbgrj4Rc5oTahHTgvs7C9SDla0dr0U1Zr/BOTyvsshDf+4IjegjLj7L1k8yHFbTGsNbMEZAVfPW9skSuC3yMOsaIAI4sybqc5kLr4HtjbGZA9NsRuZBX9ZpJyLXQKukaZRcw9wAka0tMbm7ZHft+9cGVu3MRa+56rFe8/aP2vyFFjiFoLlyGnghx1IzT72pugjuVJ8n6x4OQfYdpoMELYUo2pbOazP25dX14YcNXOkIWkmqKmj1A5mKJ3pAAJZbxOmAoGV6rFisN1X0JokKGpX5eBrVO+sZg3zWVhew2ooZWtneQKt0THadko35ZHaFMfV78q0IEma3GbxPoVQPQMAcJkAbt1HSCMwpiwFcTRiU2+i4DXX29WI+tDwvNVXAfALq/zmECRwCspoxU7DySJYiIJuozg81hbc3w1CtJdk8qevgYJvdgjGscG88IByuajKkVtXYVN4wU6EFX8S5hi57k+B0VQBrLZpV2zvK1oSNt9G8HSRktOuvRDnSvj+mn5o9QOrH4bWl+yTi9iH00fkszYPvTb++vY1dZepupgerxkdl1g4zvTsQOjXGoJI/QRZIICpAb9qH3Pv29+UjW+MWMdhrWjsRuJ4YNRG5rDNfNUKBrESxatWWcHXjQuQPm6TkHsh800kfUn+qNUwPYoEF+BvNC+06tPej+oQW7dqsVw3oIgDIuVa793MWAvjuPRqcUYqprkepfjChLWuU19L7BROaYrH+xntzgmgzXnKmhiGVy3utwEm5PbEyK2FaKK1BCmgl3oBW07m0BGeYUU4+y2o6BLJU6MiM7GygWZmBmejmoPZzzqLGczG6iOZktva9EF51MJc7ZDFsMWyj57pozRGtAEYqicVoPZcUqKia1UhNtCkDIWx7e3lem2E+SxQfR2KImdIerncUhwmYFJzzWrQWTqmtX2gBPj7XZjqoxlVfWzkny8UzxuDXgl8PI8YBbDULn8xuWn2gMdCcAyygXaP9HDbjGHxuv7VkZV+0lrk2EfRMrwevvWv12pW/776je2lqauMamNplfrX3llpECpjpNSzBe2gaRN3/bVJxpyWQdwR01KSaOKbskuN5UaHKCU0WpZrXpZqm51UqtGjgRlMeypKZ1R+2aQ/DrV/Vr83+Xo0kNqyG33s//R5oWUcKK/PWkwDuYBUUsD3szeRZetjApUQHTYKzvADNMwLcQnMMNACl9NGGOGOCglyTGEhYOjNQAYgd0OqZmantZgoz0PJtsA2w2oXmAEzD84VphI3mxSXIhPW10Kyfmz8I61G6PsPMIwIyDMD6KTUA5uanzK1qaGJxDUC2pngrNvUO1XTHIWKKUwkcSSGUjSZaqJgVF8M9nbfho9Lf5QRYlK1I7hPmg3QAoLyKv8vMPNbmxaUA2NkLMwDamnG+6zVDk9Wr2W8k1HgaAZptfs9C+qRjX4/QfuPvXc5dgbG/3oj2mLV8V9+ZllXOTSgloywfrZSQCjU/qHYGGHfe9manYnZ3r40xtoFPrdZqQLZHPhIPqCZWoJpZDcCq2d8Odr6tUfSeyxctn8H9Vv1frDdJOYE1l1CE7K66i53GJ7lf3UjJscM1MB8AvoblENbcsK5EFIXCn/p+ZjYHMjd19D6P0Jt/bZ68hkrNc0PJLfR5hc3ztnnx86OvzZq0njQIn6aHDVxxBm6eOimkU6GtjIpqBkiLxLyzJrIWtdVCYZzElBZQmsq5KE7C9EOVaqJqUHUt0GbBpMxIqAwic/V3ZGV6ObcJz3ubUjQnaG6ZbsLsU2yz+oKCjEW3WHSLLmmOF0+ompfNQ9KNZsDfSJBOcnI+rxIhpOegbsES1CSinzU9scIEirNEI4UJawhImTU/xEyHdY6rgCB3mCBzGNShbPk/gRhrJkSKouVdXcscqN/C15bbmESLr7RWOEiqHae1+iX3zC+XkO8EYBKrPZ89/6kwFZ1+3gLWOd9Nb1UYNVP01/F+NTl/TZjuS0iZn6k8cQL6ckN2brnefg1Gn8hvydKRsKm1uA3nqaTbogRMBdUYyRg1dHwO3Uy4LUEXfl243nRycO0iUC9azYhsRWbzChwAaB5g1bwUtNR0yElfT3Mpj0TLEXS8lUa3N0kCgFgEdAa0EEDWrR6KgHfKNNiTAXbxXwKapIxmURQLgK7PpuwY1edZpgKOD6Bdo3Vc3MQC3JceNHDxdI0831SAss8983ShoSUfw+UjIQhzbhaiaRpZK0UEiJSuETI+3NTnfmcVTUv+hIp0nNlFKp5/WOdyTSwxuoCdmg2zRlcxRKsrycmM0uiRAzWlnGRHV62IdUGy9RDy8zF6rWRmwiZqyMDLS6w5wYrVWlQjJdFqpvkaMQaV9PSmSt5bWzqqmHvQApg1eozGGImr9gIC0UGUwbhl5v0GS5mRVbvyeTCeIYzMVnvGJB+p2jPqvlp5XxljpHW+G9Dyvos6viptG1Wfh6v2wRVUjMeZ1B7JgEN+6PdH6+TflrTa+o3bGzMmW8GrzsOpLTMqrmv3WrQt4wt95JvnC50FYRMYYbld1ssuaTLxlZpNcwJNc43ADaJ5makaOWl0bBAAA4AQkO8CwhWaMVSNzgR23Tdm6ttZEzYPvpt0G7VZTcDlN92zok7g6s9T5s5PTQdavRvgvvSggSsfrqUuYaiaFYDtoioLTx+W1yJK3bqpOZbdby0hl5KYwJizmLp6M4GR9WsKhGMJGavt2fuCuj2ZtnEfqhoYYUmiYVIw86QxGZGUKWhljbCCOdYgCwBIawEvqD/La1abe/XUfb5rOiDRfs10iKiFajkD00E0WyYAVlxV7qsvHZXtvu0SHhj0WddN5zdqq+F48qYVewa+J5s/xq7V3CZtwcwDmZnDfKHcNlJ1MBYoEBSmtF+N3Y/Dj29z3+W4dmx2QBEMnM+jAS8M3lvpKqpmxtE8+HkfgZbdCmHL/OpYqWG+9tk5svv2811aACVLkvUXcs0didAUxO33v1XUSIuASZyAxSwRBApREuGtLBmA2sUA8CWQOCcBN7t3ADSlqslMWcaiAnVUsz8zasj7CRbSm4VtTor25X7rn5f5XO3zTYFnbIWIPdDacwFcQg8auGDV3uMMc/awX0xAWYyFXGWDpr22qe1EQJSImHIuzoJ5zOC8td1vugtT0GCRg7Tm0B9YVXBrARKCbPbyl7wt/vwmPEcKt7AeWCU4RAEUGvkj4HRo5oUplEgqC5MfaVWnwueb+fHFfl0QRHFAm89Rr3WYrkEkUYiCx9qOPm8rjfs7bq7bsb6zmqz7+amNvxe6LRJvex0DMt8QdApdnUuq9+1vwSooJLfJ2TEBez8iM431NAKt0bxUKVwsCKLZjqkArmpjsQNqf5x85iT1E4JD/ewSK0V7Tn/dkbZFQGs+NtDyxXLD1IJVPJSmjtz5oSNNCHES409eQessJvd4AK2ztK2fjjW6VQtCW+HnphRbUzYnSSufnMHhFiEn0NUKpIMcN1+LeT+umOZrCYISSQe+/9opKsEvXCORe/ZjAoYUwa5Vg6LTxDb7Xvln1dLRpFC8ar1N4KEDF1S6cJnmw2i4jC2DLb4MVBUfAEFtx7E7j/2s4ZO5uY4vrmmtVeLVJEETDKyBMasGkRgaxm7nl+tZc8nGXDPYuL4Ule/b5b/zZAvTgkMSa3Iy4MALdfy0gkPQaMHQAtjm7G4+gAbcmpwWm8MYITURtRaijcFMs2kFRUmiHK3t3mS3vzG33zTJvm5eSwqEe9x7VfrnKF4127zWPwrG4NmBAlrm7E10wRhsnxfkzEHmE61g3Zkqd5i6+XXM1+PnzqLsRtRrOWWudo73mg/ctag7prn3wTPdCyLYe7aj89s19rQ5eRZU5x1o+cTIYmNJ5hr8YCkb9mxKlKWugUxUU08svQTQJqQS6o5pBc0HhJvXassdn6LRg1lO4LuXkqKhYBeubuSZcJbuBOr/svxJeS6D2qC781nByvsS+3kle426fkegVeaU2tqLBl4R42owl9KDBy7zY1kCqAoZwvQATeRTiaqR+l39MKCb+NguZKNO0yglX5xdnJhF5ecs2sRyi+nwBDlI+5OkrptI0Kg/D15jjcvATEofOWk2GHipNEQ1v6sxvRQp3fLDZBWZydCDV42E0ntUIAGTAJhqY02Axh7xoCKHnRsCYKXSvNP0bP6IwobxA5WZM7etYexRjMgndPscudHn9p2f50giaIRAuI6MOUpnag4oEXaBZIWJz4d3JUphpjSeP2cxMP+kRaNu/Gs7zJ3Q+nUKw+h8PX3/r3dD/VlGYDXSuFDG6v2X+8EFo/Odus4ItMrvyp71Pi3j3ubz1XZI2dJX2jBzA8fi3yOZjWjmOxOQs4S3czrKOk/a4WC+KSka5PIMsUqrHotABCDJ8ymV0PnwGsCsgR52D3mVnnAaSBaZGg19RJbXasKRB7Eyv24+vWm7Aa2RctCBl52L8R7WuDhIFYMVASnVMv1E9WE1CyivavJrJ5iJagg4oEEcCSAFJo0ya6MOV4x68JSuud78BeAwHcDqhI1ZHqI1UZxyxBwqAwU6DSCOaxcCLkIt2IKiEi3ljZpZtSxrC2HglYgQaUKcxOdlgSvIUXx7FvJuGlfSc6t5tIxqNA+mgWpvHh96for2jjMNwuo7Wh6cL5PlQcmoL6El+XL6ea6VTLJK0/Ic5LeBxKw3h4ApSt+1uylgDgFzJFzp6+qzAsBw1Rs0x6Xbo6y2vKRr1PtWfXCIhTbXyiJbsBoxJQMqMV3Wor2j3KaM6qfoz+v9EN63dkrjawCDxpqPJ6IapZh1ECUEvvcXchdtSW3gQPMZtuZQY6D+cZRgpRJlHEpuVEKQiFLWNkhO2/LzbPlj/lnZ3gqHqQY8eV+alavjXNvz5BW03pWiClLppYbQ43gLtq7d61LC5ulwDV7uSuoGaQJz0PJrTcBaM/nC2/w91VYode6b0Hg/hyNNq7+GAy+gVmWZ99T+C+hhA9fhBokmyatxjtoY6sbqgzQ4qO3VnKnqROW0CotxDLeaBSVRyM7V1C/rNTPTHODMX2kBQsAUJ8xlw8s/680Vg21WYXczLCpRNsOM/YAN3yHZSun0Tn6L9vJ5T1k3PBPAqn1V7UdNpqRtUcwXoHPHANoIrFg1K/veTIv2G86N+lQSJ7vKAJtoLf/MuTJ2SSWooLOkDvQdWPn8uMSMdVBiS4ALzW/Nrj8HxpwJq6q0KUpqAYDirxTTb40elRBjAShmnWcFHyLVeomGYJHRFuPdA63R/JAzy5j2Fan2rrov9RFglzr9e9Dai5gMzKVfmazHWk/PWx48GEbaj0r02hW6e27MlD4SeToUBmv/ShoEb0vB9UDpo0M9Ja7Pi0j6ewTt2sB5BeUrERTDBKy3xdceboB8F6rpcAUsjF6CNmpuGK3671p8apwWcDyCXEqHT1PptUp52fXvC5K72lMBq4EC0M/pBsDst3qePrn7PvSggWtVacir71FtDqyOwzrRzn5tpUzNXAiMwasHJG8is/bUGoXkK6nDa2/GsHNGnFTqJajpiUpvrsQaSkDV6UtBmE2ESaTKKDsA6+u/EaEEDxiJb4MhLQ1Q5yjUih5ZN3ssQLLK4l+PYiakUKRGACDLijfNCqgqIuetb2wwp+zAiv3GMR+Pjt3fMrNpI7zRtE6ZBPdAa8m55GaZD8PTIZo5V/5Omcr7qDWnZgQs+tkU2rYkJjiU8UNBiWpFcyPvy0ruvs8lOdu82N8evExbNQ3hXEftd0OnTj0KCMmo/cq8AFo0MKUepGxOJdWhnr8FlfE4Coj30YH6Xa3sj+JXBKpP71TuXc+ue+3FKAaxdFD0gHmrg8jSr+qQBZxC1vB57S8XIqD+rqKN5SwJzIcr1bi0DmJpwRPa+1XBsb6uxzUVQvaip3saAVUzsa2r5eS5ztCDBq7nxwwsbVfjNTMOUYrpHmJAzEdRzfPaAgyAkrhs2pMHLyNjuHndTPymhxWRk1imlhkrRWVq0pCyljmibFKdxcJxMZvY7woNnncc7NCqadVabWzReaw9eUzjYyBoxCORaA8UDsIcDLC1aGYxE458gJZrbNUD9He1aR6qZAvUTaVRW6U+GwUtWFuB+lXyFXvQyt1JvBZroGX/bE6PDrSAgEBZ+6rY/QckzpiZELUjtcoGwmDd8wCq8GA5Z5sxd4Clv9ol8w8RtQD/buhV5rrXtvrP+u+az6ACHQgUahSs3Y+dpgcrD1S9CbCJ0nSmWCMTbOyzPly7LyZgIBVBG6AaaXh23r5LgJ0z5JpjeJifIE4H0DKDwqTh8xNomsSke7wFVAMTAVGL91pghyYrlyLRh2tYix6aeouGe+1BzVk++kICALaWkAYABxVF3D4fBcGUGIFXoAcNXC9TxpS2BXOJZDFMhAa0AFSw2eulM/rMNAaL9jGKcQyEWteOpwMwXRe7c+nF5Mxdpk2wY85911ijvqeRp5y4+rdYNDf5TRv1xgMAA6AB8+LCMlOrBJAQokZIQhOGeafqhAfyUljTAjyK89j9JriN5OsChsmFHOs94zLw6nPggnecQLlMrkaz6ylgSYygJbQisVayr+eQahztps1Z6kLmLMf7clxBA1+MgxkztAaRIrDodxdqPqeOYrTgVe69Y+SnwKQnHywRsB8Kf4r2ruKj8cq1NNJMWg5RWX+e+maRjZ9lz3zlGacy5CLo4jxYsZvTxkQJNGb5Mo7u2qw/tHJYZe+z+MyCrldmKTM3Xz1FsCjp9RZYpLM7TbP0kwPEnNiXjcpZwA0CcqQ+MgMyX8yXLI9smsS3b5anvg4i0OzRjYZagDADbEUNOn/ajlYlysCrS1kPGrjMTIjQltEJpq4XExVX9O/NgJz3ba3+O85tiKpLFvQRSFI9fK6NBl3ri9rOoy5gX3yXB6DVg9Wm5blSqVmYoW0kTEKUliDla0JTLZvJ2oRoWwjVCCyAIyuXIQREk6rIBWC4QRQNC0CJQvT+LjjzIlAlP1cIt4CYzYmdF2Nzi1FUDuI1qkhtsrcwS1kcAUAQhR1zlNcyz0AopkA7j62tlpMm9T2a6U2eUXuMfWSm2gpeMp7E3IFL+/s9kBkHRWxBYe+3FkVW1sWZ34ngc/q874Yq8GqhVzcWMy9ucoayM0HvhZOGsLF6GJ3yJ8r31SRo1zXwbPLv/DiUSk4kBRCCPut6WwaQ0kfPgSYTDtO1aFkhSmStRdiGKInIGnHI6yJVNoBiKgQgwRzGn0IQIAsRHIKE1DvwshqJIdau3tRpZfKPGnOj37ul0DYAGJvtNS57ZnB8471qKjwmxjEJCFxN4jOagiZ2BgI4tZPjQauPfsvb49oq0QxfFBNA21bAgGqaReOyRoPTtbS+0GK6pnWZ49dMWCkLaKRcAWsUYdgHHhhJqRYAqKHaMViNNgvdNW0MJWTbGHux1ZPgVuaqfQHQjUQgmmTtAqJFhW7eUBem+Lao+ga5ixHzwRiuqLHdv4E7cD8TmAFNCbBwzpJomAz16QWNLAxAzFnmPUjkZzO/VCtcnCOrUmJ9xMyn6DVduSd55SV6HxRyiry/02OKHx9tjq3fFz8YUEL3R+Al2k8dkwVQnKv+0p7jsuPKtc1/5D4jAKUyhZmty3obD4aJWk3ABCRHBlo+atOfzUDeTHpRUyNsDKXAQW99sLUdJ0k5oYBMtfqLnLuuazPbr1k0sUO8xuH6GpgOyGECrVeg6RphmoDjHVjNgxYyzxZCr73mcrpthGyKsZgOm7/zQUDONLJQjyOzKJmvzFlGSM37IshqAW1DLaBVFJqH4kDrvQpcttGmQAW07G9Ra7WhGQ0aPBVgstB2I+fXMvNgvzXkoeqDtBbuYRLToL0OE47ZIpMkEGDNomVZM0Fjzvbag1afm+TDuuU93GupGB+IgARhyLwFMGGO1Jh/RmbEErihB5FerzXXBATrZaXzZuBEzgXkg2E8tT7BKt0ZWN3Ht2WRg36e9kLjjUQ7J2EozAgUsFipqNRe1xhoKdGkf3vKGSil51g03mKuRZ1juX9WAUKBBQM/DSrj9/x5r43JiHyhWdsF3JkrJTiiTZ6Wa16GUH3e1uZ7tOftaWTSbM2AuQGJTSpKF/QjYzEtXk1ZUIMxBViH4KD331gyuB2HvwcDLUrHNiWmDzoyaxnXPnW23hjcXDODhc/L1lWrTMaRIAD25CDh8ocFdLiRzg5pQVhvAW3rxLcvwBZCf9TPS9DGrev9dStaV7gVQNPPy2ugATTfUJUmNWHG2um77F/OQJh3n689J3LPj/hVDNBCDxq4CKJF1FDUak6QA0LJ0eAQW8lsJAno3xKMsRyRfbdcbfWOEIAkr1nPza78lGXam1nQNCp7byaKEU/opeTE0kgyZwwY5fYEtfBuVS36FvBNR2E9TdBNU6Lc1FtOJEpVMSOCGgZsxjHR1vajj3wi8qV+nUC1aO6l1KcMNH3NTlBUx8UMZVwxwOd1WaCGr9UWSLS5kXZkNSEFjKq51ncwVjjXAsLtObyZ6tTwT92ZN1yWdcE2r1XLtjwknzzdjAWtNtIC+vbeG61v8OwueZ4b0OqsH8Pvyo81EMMBGAfIJjIzNznznc1DmZ/2/gxwSusO1RqGoDW8mXBWuABkjq1gtIyfwMjgQJjmJ4hzBoUImhZJZF5mULyThrpqRiwBHMuCtoyUNq8EgOWoWliWsHorl6fAVXLEQhSz4nwADhmUZ+CA4icEUY2a7k2xexGGXtt6rwZnSH4NimkwusXlwUsezCTRg6d6wHDWauXVhlw65mpQBl1di5ptZBFxrm+TZdpbjylrFOlBq1/qQVAYmUWDZAjgBNW+TIoPbgfkXMHtFJUIOSdxB2p/I/b3akpElnpiFglujulaIcAl2Oq4Lynhsil+PFjgQbU63BO0jPpmnKbRehppSwZePngjuJsyU6FpW1arDdgHMKBqXg14wdYBafsPLgKGXX1U1fyUptVrpt6k5+exhJyzPscilGjqBVrAudRMawAAmDCjc3jZz8s4h6BlfiQHFiWPcuQvoQCyzg/a2LQ8VfV7SU84WbzRnHiBmsAc0vGQCsUlf9PncNq1e7+QdkAoQqzu5czOmoDWJC4TDlhbGMpACkAMMsZpfor5CsKnji9Ay0vQekQIEn1oARyUpUGq8DF3bheViBw37g8KUeZplsobNB2Ag/QD42mWZ3ml/jtvVnXBHJtkZ/8cgVbweEV60MBFoCYMtf0OaELYQwA4oOlMapJTp4nll8/Ffnz3UlRwfbgUokT0XGmo6eHa+bhM5Z4KaElYNUr0IGCSXXVAV/NJ9SVkd3zubs5b4L3/xz71vjG5njP7lP+2ocA6AvlL6v/R3jwcqhkjhvrXStxIC2+X4NrZsfu0AUABzG94k4bVrGLjsAFeoqR5E2HO5lOs1THsmEgEDdav2pOb56DMrAnv1u8NvOYYmjJQkWo9Q0/mS7qEzJSnMkYBr+Gx7rVnev5wb0peep8ozLTHLkpP28e4ee8p7zyPEWCZHCCv6w9Gpl/7fS8ANaDlGJ4HrVEFG1lT2tjU8qR0gqwjAsKEKUwgjfqz/nqp07gme77IpRFpc03PAyiUYCwDrMU1Hl1S1VpPCZwe3I6Ji/85krhDYgi4OryOw9VT0HILnq8R4gHh+jXg+jno6kb4Vynmm2tEtGlfrpyUhdfbiGhd1Ne1gHLSDn/Qah1XbSd081PHWsB8KJT2QP8u6EED15403n9eTAbqTKQMN7HORJiOgD3sl8+Rb59rZWY1EUYDp3HHJVYVunY6rloWoMyChFkA1LR5AFCc9QBK+Pd2bW8ZQI2MkvSirEUyR9vCf9Yn2gIo/jCDyMxSXT7AtK8abGAcjkg3fj8pntk0txBq3pvf/PobODOOv2NSMLVxLu601b9VNa0ayNIGtbSajOOYqADlgcxHrJp/a3a+rvp6UHaLTItoBaw+Edfar9yX+uCVvkTSHm9M4CIYWK8483sxteuyGWc3Rirft7/xLOtcb7G9wJD2R47hed90ZzZsjregAWu46HrMsfsbSXy1WavWNPVAVTAjQAJCen844ATXtoq8BWStuVoBRDhp9/YIyJtSW6h7QSJlGdEiX2PA4fBEBG4WU2IgqfHJ0wyeD+DpAKm4YYV8FbCWo2pfAyDJCRspbPRYHEidBC09tlTneJfg9aCBa89+XqR+oJlAy0UQ8LLchQBYt9Ll2IKWal5yYsmBMEdlEwY+DLU1k0A71kA1gVHejxlWA2K9tKw/qMmpWrw3SiCAj1Ls88FOhdfL53LuOYYS0AENLsjefKh2zRCphHy3N/DupSq404oPpmqEzMJglkYD3WqjPjDD+6y29qsWWHwEoQGUvJb5N43LFze2yvE27h60TANpklWpBa1XMY160GoDOE7/zqbAwvTN32M5fV6w8sAyAquRZjUea7Uq2BjOFvvtAMu0sAJa1tXaH2t+GM6iyAMgclVc7DNUhhoLA66XJqBYbmgUjGGgpfmHHKQEnUUQt5V9tqBdrrNnZgYaU2ICIzZCYgYz4UrBCxRqzdbDFeh4jTw9F9BKSaptKIAR4MCrA2OX7uMDOEo4vOVdduXaStBGc28DoNoBt0vpQQPXHAmH2PY1KqBlDkNz1KIuUptsSgt4vQXhDjjeIr94hvz2l5Hf/rIA2HoE391qKKlG2ABF6+ofmI+IA4y51veSyFrNKbEwr9Nh1qPoNtj9oG6IfrOkTDXXjdtK6j603tMC1iReWci1jYcmJitocBDJPBCAwGpqoVow9lKpqo8q1IVfNAl3qEm/WVUFA43M3IS871HwHHaHDLDkvqn8zgBpBFa+RuQ5sBoBVf/oL0kQ3oRt4zxz9GRaDoASIAKg5JmVoA1qAXtzPzgNtpboCzihqRm3jsH5+wqpVlV8W+ZfMgAxwBr5moCasuHXYqO5kUToeSbaW1N6k7f/vWuEamkvS2LcrVzAyyKKfei7zH9da0QoKQk+JSCzfN7H3lkQh5gh7fCAw9VTYJIgsTwd5N7mBeHqBpQTslmT7m4FyNaj3G/OUpi3D5+fZikhdf2a+PYP16Cbp+D5ifjzZ5f2o6Btz7sXdGKYRAAIQUy1WebvVelBA9d1CLiehIlMgQpj885admDSbIqgzhpbsK4Xjpc+fNY5zYeSsEcWUUjUMl7UzRhDjeiTh1fL1AwbsI0YPYWWs/XmEDsmTtI2hYFjkp5Z5pPNutABNKDVt/KwOcrMrput3JGE8RJ8ZXmKasLI1CT7RpsPdnUh/S0YY3agVZK3UZmvugEVGETzk8LAwrSnQMiSeQzkrEyWXQ8qQoxWzaJM6FAQGJn/RloVYMV0qyBi57kErLzgAoytBueox6URaO1h+CUybr2ncafmkscEoPgzPTnnfND9JxoPNaB60v/XhU4baNG6VLO+qxXa/HR0zj5gYC8CNm19MKNGsQAqaIVJQCtzyS09powli7B4TLkREk3YQUQJwgI5Rm/zb28yt2ZbtBpayQUl6YQhfOzQajV5hXVhZo0axCLWIzMj1gtoUIZGFNLhWnqHRc1Rna8kcnq6As/XxZ9npntfiQRw/m9roZSza+n0avSggWsKqAnHBbTWIQA0S7tn/v3xIQIhuYQ8l89wuK5Je8CuyuuLcRojK8mLdt3kTBw+MsqHl44cnP09AECYEDXZMWgdPVFChGnb/RtoLXlbtw/Q4IBAGsSgHyapLiFdVakMKSqzJJX+ovlH1N5NcQLSik0Mpbs33wa9gFY3JiKIRhfgNoSAaMiSRBy45lVZf07zBwRFDH+7xki8SXCOoYDXHEkDL7Z+q/uAlY3fayz1t1s6pytlY/Y8/j2Ahmn082hmJKMm9NtpgqY9+m63c8A28RbYMnUTRtTHZOB1b3LmQNO0fFRfk2OpfmhpUtruH0mi7XwwPfUAOBIQ7f7cfZb2J0m1K7VweNC6XfMmYGqOhGjh56hCtV830P0bg5TD6vPLvC+uuANsT+W5hq076xNBTX/rETTNWn0jbRu9ltytGThcIU8KUPNVU2DBB6H0gGW9Ba0DhuwLGR+ltYTfvwo9cOBqpUBb1P0CtXDtBqQ8UFAokgVdv4aoUohv4lZAS1VmzPLQTjkwm4gkA1WX+d9EJvmq626j9P6zTXkqXZSl1liYMM9XiPM1jokBSJ3AYxKgX4KkoAH7UU3J6j/68DIFL4YlTkrYMJlmw6Z9aYPKeEDp5ZW6fA3PRBotq2oNGX4Tiy8t2wZA9eNhAmKOiJqfs2Qu9QbPUWsKa8FqDlLlvTf9oYzrvEY1eg+M5Ux7qtVhLy9Gd2HHjL5rGEd3QPVpoWl02Y+/FlruIuqOtwUwkI771f51HSJE8CSSdjTBRCNq7fnauJrOxN70l1fRrtYjaNWcpZzAyxG51xJsTmbRNtib8mxMBXCoMSH6ljy7wqEHwxBLpZeka9/nanrQultzsXC0ZueMQwxaV7SCUNl6EG1slOYgv69V8m0uRbPVZ8Asf60IQxEoV+FhXYh6YwlRQGKX5mMpP940mBilF6LsXwfQJb2DkWCBL6GA13vWx2XLyhYshRqOWfwkXoiyqBZdnBwiCAfwIctGmq4QD9c1IMM7LEMsgMXxgGxqsqnMmnTcY4E1DCwLym8GM3X0IbZu85yyJG0AzjZqeoIwr7iar4EploCLzMKQc87IxKViRE99jtMozBuoTJIh0qWZ58zAHVx/r3bglQFYv6LezFV+oYzUriWh98IkViIIKwSCnnMe+O/OtfDwJkADq6hmMs9UgfuBVO8LKtfTN96PRwbYtAWmTeQgt3X1Rr6tem7/mWqMqOBVDLkbTUsEw0OAgMdyKzlDeS2VGzzjK7X5XK1OioeyF62mnVgBaJPkbHNk1ghyfizbG5SOtSiARcgpSQmjWCratBXQfS1Mahmmvfbl4fY0SaVNLuKOdnvKV7mX5hBAzToCWm2sny/flbjMnxtnBasJHAKQI5i7Chd+PoIHZzdvTgAwX1Zy+2wDWgNTeHlpfPBVIpGUHjRwAbJZLeLLVOWRY59QHYakD4WCBqZO13L8KjW4aNYE497koJUxzBnL05W8jjXpeFQRw5hMCVxw4FRKx1hkY4mS4raoLwAf6dNHAYVZWoJQmJoE56urp1hy7Qi95uoXSpwL+nvTGYCSm7Q775CCoDLX0p4lZTQ+sFI9WwHMzwdgtnn9jDcGxSZk3EwlzHKJIp0jgEhCjjEF3VDbXLZR25dyHf0r/pyqwUfTuE74p7yGMgpaKFdtTE96PaCAtzF0jMDHMQXuPz9hXOxz/iyVYFPFw3EXCxoys3YBrfWudulNRyAdgbVq0laklbVJIkIW8MixCC8Scu7udXfgPiCial7y11VFN2tI6EClAJYw2yFoFcAaWDDcX+I8LhjbHB/EnK3rIUDXhfOZ1lqZtqadhuVMyjQQhuQYNL/rtdWm6O9IE9a/RLlNEPamU/Pdec3La0eqYZmgVEvWjddgvye+kvSggUv2XO+7sO8qI/STZ99PYRIzhnb65ThrWZMEWl7IQTm3SXWlpJOClT3AzEi59gVrF5tULA+AlG7RxWESJK2LmF3WW2BdSzRjqdbRSZYjKu0KnCMVnMHpiMwZN1evK9PKSFr01phYCtwEYwBVwhtFz3nyi9fPv0jrtZyOz4Oy3/XPyFMPAGVDCgpqfhxhUVNlzEDKhJiyPJNADdP2fqg96k1/5je9VJtqAm26sG3Y5/WBNebS6EKIfWK4MQWvXQEVrHyYdO9f8M9o+1qECwMvYoBIKneYphCJ1Dz4QsDq7h2E5WVpK8/axJBTan0ih2vg6kk1oyvTpCyvzd/VMzL/nPtIQalrJz6o3JvwywMMsEaIxSw4MBH66EFJeg9D8PJmUBuqRSiKWQ2l3FEIodQxZCJMsbavuZrkerbPALevnPmYqOZ19h2Vbb2V4zZg5taeTw0ANvddLFLehOqsVKO1U9Zf6nxYe1rjiT3ixzVKI7qUHjhwiS3ZRxH1m7d2VG0nWaShSXrfxHaDx5uvksk2/xGEoVh4a05AWpIDyZpobBJ4DCaJ18oEpKat4mR2oMUv3gFrwnO+fS6AZXll2n8nH5fmHiiGEulY/G/Xr4GPryO+AeS8IlDAFCaEwxMAQTU/8QPFQFg04mmUg2iCbA9azAZKjCbHq9y/lZBijbzjjcQ4Ir/YGwnSggEg7ylKteopTJhD0NB/xhRqfUifB+e1plPUaE+dJjXsvcSp+iy7itdNJJqP/tS/nLVqviXFq1YagCb82ZiGL8gsz8BpU6dvS49v3yeFdotos/u35zcHgI6qaR1fIByfg188Q9Zirvn2OZrq44drWYc3r8kznNuotiLxq7+rj2YtTG0nSlEAaJUoOKiwZl+r0MZxLj6ZHrQ29TGdj7uY6L3p3o6xGQ6hNkcllw+qR0wltJtxcCZZmqiUcDNLgFkAPK/wAT9AC1ptTmH9vswXYwu+Nm/I4vuG00adyW/J0E4MPASiUdf1EVz5/e338d5xNr5XpXsD17/6V/8Kf/Ev/kV89rOfxec+9zn8k3/yT/Dd3/3d5Xtmxg//8A/jb//tv40vf/nL+B2/43fgb/2tv4Xf8Bt+Qznmi1/8In7wB38Q/+yf/TOEEPCpT30Kf+Wv/BU8ffr03jfg22oD7WKS79tNW6SeDAmxdsf43BhbOEAuYeMWNQSMAdJU/kPUMwSRYjMsqgwtg7NNYpqWglZ+/kxKTS1HLM9vkW6PSMuKfNQmjkrx+oA4T4jXBxxefw20voaQxXeXD9eycJYDaBbzziEecExaVUPvaY4BIbO2na/UVg9oF6DNMXNlgMRyvyxYpn6qWiS3bkSTOuv57GXfU60wkvXYBbIkRaMJ03RAjBOSAqQEbdBmE+4lerfHjBlGiQT1wTWjkla9mWaHmcAc1AHwpa4CBQlZRq3cDp3nPnBlj4jUnEpqEqR2f/RUfWNmH1CQdoERYh5cGtDil8/1FpMm5CepaacNVEOMQDqC01wAC1y7ZoeOaRVhYDBXUiA7CwjNapb0DV2nyogRD60m0VMXLeiTmE9qyeZ49c9WJDaAsuQ4Bt3rbB3OAdstDKvQX9dhqX3pnt0pou7vrpkTEBOpapS+CG5pHUShRD6O8sxsPCb4nzL1nRJK97St5u8r0L2B6/nz5/jmb/5m/NE/+kfxPd/zPZvv/8Jf+Av4q3/1r+Lv//2/j2/4hm/An/2zfxbf+Z3fif/4H/8jrq/Fl/SH/tAfwuc+9zn8i3/xL7AsC77/+78ff/yP/3H8+I//+L3Gwvov5a1j0ObQaoSl3JpYTG7IjolbpNqqO938IqaVLE5M9AEL1nW3aiYBzIwJsion/by0RGBuzCGS6PyygFb68hexvBDAOj57juOzF1hvjwXAACDEgOnJNabrA6bXrnFze8TV+7Ui9DRrMeAZmI7AegsiwnR9wCEKU88gzFwBN4a4WwLKU39MDBbtx8VXQ5DNG1CTKqsUNjC7kduU5mT2QStpaRkLUAEgLcLYpgPidEBSv9texGTAvobipcShxudMV5sgmt6vsKfCmokkTG31BqCEjjObn6G9hz1zINAyvfJ6cJy3FppAJfde29FHQvW7Lneg9VbMgw60ROPSe5wWIF/L+xDB8wHZch81wILzCiT1d1l/p8EzaLRTYjnWj5uCSJ2Oc5UcQKL9CMLu/JtSUba2UpcbZkyfkwZYWZFajdLLa/HhxTABgcrzIZtXrfnZPzuvmfQ8fzQ37P4OccSbAzmjNNPrfFpWjsqsSL6Jpie/L0+VAWuGcAZ8/XP8FTUVftd3fRe+67u+azwoZvzYj/0Y/syf+TP4/b//9wMA/sE/+Af4yEc+gn/6T/8pvvd7vxf/6T/9J/zET/wE/t2/+3f4tm/7NgDAX/trfw2/9/f+Xvylv/SX8NGPfvRe46m5DVQ0o1JzzR2TwVidjXbJAkRLlnBVe70kxm3KpUirD2W161jo9FUMJVn1egqYQ8DVFDQZmkDZqte7pM00qCyds1Siv7sF377A3Zffwd2X38by/Ba3v/QML7/0EsvzBXfP7pCOWtHiEHH1+hWuXj/g+quelPm4AsRcqGZGun5N814SOB0RacYc1XRhXDyIxEUnxCq/pn25KF8v0DueayUJdmY3dlqMvQfgAkGKtuXrwqVjBTFlOASoNH4rEVOLSNvB/Bx9uO3O655BVDOg9n/KqnlYhJsBal/49RyZ5AuUmpnF5ESSMsAApniQ55AZU1BNFmLKte7AFnnYO+h7H6JowPIcrG18FRDqXBN535YKDetRta1jLYWmVoHStDANfK/HW/A0oySxxqNebAVl8xfn5hnC5tzehwBkgCnXSGH1WQEo81jmtQ8waHw65ECoK9fk/UEesHohpIAXlXHXgI1Y14uuH1vTxfQaRPv1EXhl+E5wMMrM5blL7cgKcrnxcYUKHm5NZ0U2A6Mi8GQgcy4BFiao96XCbEymHcpy5DLORkjCaTL3QW8azoyGd9yXvqI+rp/5mZ/Bm2++iU9+8pPlszfeeAOf+MQn8JnPfAbf+73fi8985jN4//vfX0ALAD75yU8ihIB/82/+Df7AH/gDF1/PVFADI6ucYJrYHmU1u9ytGbcp48WS8GJJWBPjdk14sWQsOePlMeHtW9FwrDVIDISrKeAwBdzMsQDY06sJT2YgBCmzFINoHcUEQ9RunFwlP86pdC/Nt7dYb49Yb484vv0Cd89ucfulWxyfH7E8X5COGRQJ4XYFRUI8BKy3R+SlNSMCAAYJfsUUBRRNiNSk1K9CH73Wy/+jslFWLgrgjX9sCihh1jFgWPPQrrkp+wOgJKN2CebEk2iZQQUC02bWTtr2jM36D6nJriEDl67UUE183Wpcp/LuPEOxCiKcc5Xc9RocUJz9kUIxO/lIBpMzUpZp8/k8gGc4al3IUt8x6LPzq0OkaTWJhspgYyAgL0XTLJF8pygncA7AumgpoaWUFJI5k3I/bObH0TyNzKtqTiUWkGA1N/bzuhuq3lOvYTlf1kiLHp0fNvbRvwHJ/qoJ6AYqI/Jyo2logBPEqfbCk3NTsXIA40Cdprivf41xiTDhU8pXtYZlhr8GF/AqUdp+blSgEnnYBNxqyfBjWwcWnkvpKwpcb775JgDgIx/5SPP5Rz7ykfLdm2++ia/+6q9uBzFN+MAHPlCO6enu7g53d3fl/bNnzwDUyXDRpmK2y3wSuAIJEBlovXO34p1jwu2a8fy44ssvFrw8rnjndi3ABcimPqhGdXOIuDn8/8j7m1hLkuQ6GDzmHhH3vvfyZWVVdTerW6IA6mfBhX4ACiAICANSrRGbWknkhoAWlASQgIAWIHBBgNqRIsCFtBG1EHcSF9RWCy2GGkEcQhiAIERiOAIGAgbkEPo+Uc3uZnVlZeZ7796IcLdZmJm7uUfcl5nV1ODLaQeq8r77Ez8eEW5mx44dGzANAc+ux0J9PWoDwsxUjqk0H+xaqJQamJwKkzAtK9LpXHJb5xdzMVrz3YI0J4QYEMaA8SYhzRlpzmDX4dm0FRs9xUtzAfHo8o5OTiR5aHqI4zHZKBO9jbmeeyTComSVKaqCvkYNiPKdlNVg6QPhTW7DkjJDsUd8SNZinIqBKi3H9zxy+56977fnIcHXaeT5Y+0XVM47i7Tz3MsiWiEg1nYbCGLqrJZaMiVtdGrwXmO02GRyxXjVBqDm1vni1dpbzKIF6uFQoBgvilHqgUIEUir3HYWIwjJUYhGtSzH6SBGICq+p0Who5pfygTpd7R2BR+/pTbRk//ZGa6fHF4B9R2THUBWVmK4QOaNNXdSOC9iQxC6NzKwGhDYGLEGvOaphAPy1r+mTSlZSckj5Xru/wrrlthM2F2KVW2ud8ZLjkk99p20ARTnDDs7et5za0odhbzHeCVbhL/7iL+Lnfu7nNu9bkST3XjrQ4LY1JG4vWs6MJWW8mhM+vp/x8rTimy9OarjEoK1zKtJIIRDCEDDGgKsp4slxwO1RpvD2MOAQJYI7rLkYKyvSy0zlZveLm1HeDS7My4qcxBClOelrluhqjPLfFDAchwYqnJ7eYLy9RnxyC7q+Rbh5CjpeqzyL1p+FAWznolAdESGCm4XPvy4LniX5A8m5BMay9g0b643o2zBUWj1hDFlg1Sgwx6T7sYhMD06UN8ok5ZoHYmsIimJUALfYKPzWMPj6BcfamPef+dHlPTZQYfm8ffiY8iYa6HsXSY3eWqFC/z3OoJCBsGIIA4YoC+IQqscKVGjQImeUe93udxVLDrVjQHYIgGe0DQp3D4FKgbHVEpahzFXOCZSS5rW0r1PKoJikYeE6g9cRWFVOaD6jiNimWc9vqASbS3CuDYMH95yPnetlrz3JopRZ+2uaO8dkzxGyf33+zNrWh1h0+hIDq+aMvCNXI6N6b18qhi+LfXc/mZEAanmJQG9cIJCeXObXPP+vj3I8uxFQiNNO2cpHDF42A0YCH8aguXGLAp0j1eSW3cv+GFMGTnt54Dccf6KG66OPPgIAfP3rX8cXv/jF8v7Xv/51/JW/8lfKd77xjW80v1vXFd/61rfK7/vxsz/7s/jpn/7p8veLFy/w3d/93Zgi4Rj367gCtRbdCoPtQi7dpM1rxsO84n5OeHVacadGazmvEt5mxjBGDDmCB8Y0hAIdFhJH5pIbW3LGkGNhuaXAtRiyHJTCMLaQDqOQLY4HDMcJ+WbF4emKEAPSnJCWhBAJw3HEcDXg6v0jDs+eYLy9xtXnn2F6/32E22ditK6fOEFMkWxhCgIR6O6DMh9TdkoG7rkyjykAhaFmkKxpAPo+V2bE5DrIv0bg9+K1S2YcY61xMWciUL25gYBRlRfkuILAgSGA1lBhJye0updzoRjrAmn/+boei0xtH37Y4nfBG2++ugdJ+mT57o+q91+UJ3RCOFMxYJG05YZdH84gK/51C20IIrLKIWAhY1kCSaNYu+4+ShPqu0o7IYOWuS2GB1BEpfUeRU5inKw1xt5QWSZeF4RxkoLlJUgoHtVA98YIqSkgLnPnnQ6o47k5Hzd/ZT5zKaNguP29LifpHZrSvsMJQVstp9HJ2XQK68JcNoWav+qJDXtImeXqdw/LOSj2+02UhdZQ1RZHlY7vjUtO1YClbIaxdutOAGI1/dJFILe5Lw8d9sMb5N6orv9Hibi+53u+Bx999BH+03/6T8VQvXjxAr/1W7+Ff/gP/yEA4Ad+4Afw/Plz/M7v/A6+7/u+DwDw67/+68g54/u///t3t3s4HHA4HDbvh/WEsE4KdwV9eAe3EEq+qU5efYD32nqkzGLAllSM1nJOSAqH5JU1YhkKu86gQ7vZFo3icqwPWbabJ1L7sAJ14dQi4ng8YLw5Yj0JNMopY7wakBODU0acIobjiHiccHj/CabbG0xPr3H43IcI732IcH2LcH2rRutKPETVGTN82wYRpPA4eBWMOiIUk6ZqvEjrvzKLUnumSs+ukVed25xF2WSBNL/LOlf23WsHAZmeImANI6XWLo7aaTrHuoitc0OkkJ1JF9ei8q9tyI0oIHJAVOvzKGiOzMGL/eiiKtbISCaw7d1k7+0arM5hIeg+tclhibxM35FCNWB+8z7Pp9GDX2QxZGCYMIVB5s8ZLhs93X8g2S+pHqDJLBVjraoYNJryeAYNihJAYUKd52ZosTAvStCw/Bbn0pW4j3hFQVznV6Mt+9zLgwFt9GlwVSDZRjW6AYWZ4K9hCPu5u+6Yms6+zmghTg2dfEm8/3zZ4q4Qmr0HtJEyUfvb142CGjmjZZG25UFLPz7sGy1jQvthuTUzXuU8uJUHY5twtFFhe4yyL7tmZkiz+/ezjrc2XK9evcLv/d7vlb//4A/+AL/7u7+LDz74AH/mz/wZ/ON//I/xC7/wC/gLf+EvFDr8l770pVLr9b3f+734yle+gp/8yZ/EL//yL2NZFnz1q1/Fj//4j781ozC8+hiBzuINRelDw+MBQzwiQ/tP6cUrODGr0oVeNGMIWs7q9jjg0/sFM4CUMs6nBWnNyCtjmAKy6nylq1FJGhHTEC56SkK1p4J/B3u4Lf80DKJ28eSZqDUfrnA1jJieXmO5O2G5O9U8QgwYjhPicZJ/n9yWXjnxvQ9B17cCD05PkA83gEZceTg2D5l5hpYbjDAtRR+h6gsiZWgqnJhliZ2kHTISU1FjB6DYdW4p6e4OHQMhJIEMTylo9CVszMS1Rc2chNARA2GKEeN4LfmXNIv0UFQvfkZZDAvJRf+tN0oEhqVChMMk8+69b1v47fXeMIMUp4ufPfr7/iesGcQMAFkW0y7SIB/pecKIjwJd0a38OANxwBQnmRwKkNXG5XdSl9+5pJsJhYOGsdRpYRgRQgAfjqVQ3uaZopQm2GC7DjmBjnIsFAZwMsq6OgxmHOx8Cmw8lNZEyd1TzaPGrEKzJuQKbHRBKaApjrPzghqkDSlE2amDiWlXsVmLtGZVarFoy3fcNhjXoDUv6Oz4NpvxumjLGztDC23f1rByry5LFOYJQUlmZbs7+/CjJYyIhZWO6NScZ3t+hrpA/61EkP71Zx1vbbh++7d/Gz/0Qz9U/jYI7yd+4ifwb/7Nv8HP/MzP4O7uDj/1Uz+F58+f46/9tb+GX/u1Xys1XADwq7/6q/jqV7+KL3/5y6UA+Zd+6Zfe+uDD+SXCgSSEH7S4kSQ/IirEdpPXG2tVGM+GKYIfh4jb44B5HfH8fsGcMtYlYRgjODNYM+ScuWHhNMejHuwYA8aotG+leo+O8l1zLQOIM8LhSrzXcQJPoj4fru8wrDOu7MHX1iqlodt0BF3dyHcPVyL0OxyRh1EavQ0H8DAhkXRkFWO9rcOqdFooBbf1pgBoHYoYMGhfLCbGiFAf1iDah4ElwsqpQohAZWWek7QiWYkqtDoEnFLAdZaSgjES1lA189ZcVcqnOGE6DMByknlMbsG1kXNdTHXu5F9XLLtqJKHXQxC4tYOuZEg0ZpFhrJ95QdKd3zWMQ5dz2UBVRlCwFvPoIMkdEkEvblteu31RPtXj6NmRj7S93833TAdYXRbnJEbL5nltFV0MQRCSUKjnMJ9F4cIiRYMLrU9TqIK8fpSooSx6W6EAccBQKOklKvU0d3+P2HWzOKK/3k6fr+S0olNE12OwRdhrRta6xdcXvffjsgMs/3p0rc1jbbuel9O0fwkI8bJuoI/t29rLyyLTfvQ5buYKXRqb0B9j6h6Dtxlvbbh+8Ad/8OLCDchJ/vzP/zx+/ud//uJ3Pvjgg7cuNt4dy1mgjWA0aIVXWFQIineCOlkCU7WTPESpw1o06rqaIq7miPUwYNUWo0FpcjEGhBgar8UYhfW/asRIF19/A0t/oEE8S10cwtWNqGpPR/DhCnR9W1tsW/NKa72iPXJYe+QkaztgWorjsXiFa2o9MT/6+9cbsQZaYhJqLPSBCJC8GLE2VcxYMiFmIBMVZqENM5blXzByEPT/BKmTO+qxjYFx5ACOoZQUZJZ6OIERM8IQMIzaliFaol8KXnM+VQjI5zHLe0Ldpp1SgXqBPBRI9d4q77V5l+Z7bhuk92IhAJC9Bh41Xt37vYHpjVaz0Hrj5QxW0cW0NiFeyNkNg1PtXMu/FIBpgBFlwuGqiECbJJns9jJ1nnMCllmjNtRaPAqQEMsdu0HIFADeQmNlm/pvAGoN4COlC40zYfvvS9J1Lk19o4j1qip6zS3VdSSAShdp+9sW94vPmfv7MdisN2T2V2O0uDVa3vf00Zpvz7M3yndRDVZ9jzaRXwmMXbTVBcPyvn/9bUCENt4JVuGlwac75AGymHsYh68aZhyrdT+tSQuNa44FAG6nKLkYksX14emhUN/DEIRZqEXJcQgYRoEH/bCHJgTfyl2EM9nd6KSq9AgJPB7kIc4JPGRgkgcrZJd4J5cUVq8vO+jC98ZZ1LNZEmNdqlSVeT7A1lu6pB1oVFtGCyfCEBeLvIK8eYxQYobAh9kXrATD2Fs4JeWEkUOJvpbEGCPhvAp0aNGwRWDW4RkQZuIwHsHrWbxqVVigEMFYOqOVhab9po3remMU6iJWkvUGb3UipfZQBoLr4GtMPVlEKc2PR2D63iUpqVKQS0GcHcd0g+V3OAvJYpU27bTcA6c7Ya6e7iRS8nNkUZI6RwhBjFMnXLuXC/SMy2D9ssyodYK4rIQO4igRrh4vsYhdy4q5kyO8dKmgaCP1xdOVil8V5i+4+E3OOWyeuaLtR8GxhPV5Ii03cIiFX9Q3au7uM79++5Ieyxf1Pdmye5Yt5WEGwcODvV3wBsuiqM0UUHvsVi5Rf2+vqftdfc3cRoxVqcjVjJX3dg7iLcY7bbgAlIet8YLDIJBgMoafiuNaztdyO46m/d5RID7JqQR8+OSAl6cVr05CjT+vUpAMSAR1exzw3vWIqykiBlJ4MBQ4UERfhawBaKTHhOMwYJgGzcuN8pB3VN72/OqC5HXGvFxLWoE1J/dQYRMVF8FL7RbcQoGtF5i5fXjMIyT9mWYiYC1MZNPy7pJN3ooKUWXJ+rE3Zqj5MOHXAkvxWDNCKQKRH4snLce5Ztl2jGK8pdPyIPfBuqAQXlzbiwJbKUOuRBbFSF2IoCz3Yr2lVIvP671dEik1IedxkOjaaOZyWl0E5oe/BzxcZwt61xyRh0MbFSZVVF9PoPOdSDa9+Bb47qUYrbN2ILAi+BLRj5IzXWbQ4ahRfBWuLW3atbSi7m9tI7vlXKIeSnPbosc7Dw3MScVo9CMQCiwHVxMFoJBMxgDZnxFMdtRWyj5tf+7v3T5U5qjo673nCpD9Z2aMRi13z5J/rnoX0f+tp7YhajD22YLeCdwzVq8bvaHqIUAzupeixr3BevyxUAyrs4ssOXJDakDtPL3teLcNl9aW5PFKGHSjMOlmDjiljNlwXxfSN+3ddUgOKuB6BIABgQjXY8TT44i7ecSsXUwf5tWxCSOupojrKWIMoVFwBqqhSkbTKT5WFvpxPCIOqLTu3iO0thcuOb1mRkpA4qyUfu5w7goXANXTigGbjrfy+bbnT3mAsO8VGSTiYUORwZBPgz8FjWJDFrktYSpxk2czyDVQ+5D3Qx6ICs+kDORACMU7jpVEYGy2VItmMYwScanxeiujtQMZWf+1BqrpFlT1AIBMmMIAjrkwGi1CKzb/sWhgB7osenwlEgxFOqoYkeWMsDwg371AfvkcfP8S+fwAPt3DM+rYmqSqYaEQpYuwfUfnokDRBkvbIQ6Q2rq8AosYVOlavCj8uYrB6ph9bwS76nxJZFUlr2zaCpzlo9K+YNzet7mkUFmd3Vw3NHhvTBWyBC48F7SF0WDXt/uejW3NVt3+JbbgmtvIak9fFNCiZ92/PTvbY64GS35TDZa97s/t0rBTsXJ309pkAhwtGZZmsP1/1vFOGy46XIOPT5Cv3wdPN+DxiHMmvJoT5sSYNeIyFqH1lRpBiGmHpBEjrseI0xqRmbXt9lgih3PKJaFYC/iAQwwYYu2xY8YxMzAnxhjQsAtnEkis3CQ0Ikaq+X9YctOKfHOBA5bcEk2sRqNvTyLnI323LMKyolPzoHqjZf++ifdWcncMBIviAMQQyzwsmbEEEi1IVW7PmZFD9RbteCRapQoPBnkdqbaAsMH6X2Jg0GjLFlYaVULLcoN2vMMEzyis7S4uLJqh/cxHOQm1fcrF+h3vQgeAUduYWE5OTqZGheXi6QJbdPb2YEsPZakXZoruMObl/Arp5ScSaX36MfL9yyIpxllZjACG44Sg6u40TgLneaNFoYm6OCjhh91zQBExDqK1uJyANImO5BrVkVhbAg2wySE1RcZ6PkbgsOsvc1K3Ifc0JJ/lDVV2+T2HZrQlCo7Gv2u02o7qe8/FXnT1mLHaG367l4yW1YsZStG3IvISawCA3Apki0PbNof1UZbvPdf0oYNDay4dP0sEZSiW3LKy3rFLK1BEaTbL7I71M4x32nClp9+F9PRz4ONTnBNjXhl3S8J55ZIz6b2aouIeUFhx9vDZonk9ym+vR6G7Cr27lTayQmNAblgT2R31zkjMCMoXnVNteBi0Aj2u1hp+e7MA1bOyVirGyFkV9jyvoqfolSsAuRkOQ8BoXhQpIy/UQtPHMOuCq7sHyLOUPHvKjtsYhwbBjiFiiqEwOEWo2OYsN/Pom1YetaxgjITjEEs1v0WNpvRgR8zMWqeW9ZqIKgMNB9Bx7uqvqFmEN8YKqHAb1Wh3r225h2xsHnwdizkERtQxweEmz1KMU6iflYP1kcn22OrF0kU5LTWvNd+DVom00iffQPr0Y1Fzf/Ucy92DaFp6ebBSnlEXcYlMo0amoULVCpEuJdq3+8Lub4HLpulaYLs4AvFcojF2tWHuALDbhkQhVMJajJc13LTIW+4bZyiC63MWAphjq1pi+/P/hu090RhSh3hkA09sd91x9MP2Kx0Y9j/fi7IMBuwdVHvufdF/eX5z7VbRily3yEvp/+X+vmSsHoscm3PUY7Au5VbnFWH5t1pLmEj/zt/BERcfnoCPT/Gg0ZWPsjJv1TGAauUjEWKE6sBZPZd4dsyEMQIpBzVYYWMEpeCw/u3zWzaywoRRQXrfu4pV1cD6dCVZveS8eHvzshoxU7Avhc7st6PqFBptDUHYeGOo0j69gSxz6eDUzFuj5VvCNF4mZOUwo8XunIZAGFmMUPUS44bdWByJEMoD5x8q/yDV3Js+AAiIVjek9UucVlCaJPkP1CJd4DIk1Xvd3vN20FyPzBBQNBaB9uEv50KQKMi1Rrmo3rD3/h5JwUUvHhaj5R788ArpdI/88rn0dptPyPPS1ANSCCBlyIZp1BY4I2pBfNDC49rJuEQe7KngXO6DBHmelgyMYairS44QFtW8bf9ic35pHgzh03O2Av4+Ajf5LgoDELm+B1zOccVt1F3IKJ3RMgSkhwn7xdcjFq9jzxVncOdZa+SasjmydYONFiK1XcoNofCkDHOgfJPb1nBtOyu/yei/a2ugRcZS/6niwCBJJWS5pYY9/PINxzttuNZ4xP0qkN6seRQjZRjtHdgJoyGGBqgN3+xCS6JV3jMVh8wADy0kN+48a42UiobE8ofuP1j1fGvAvCHoGUJ2w5qh9Aar6Q+mxkparEQMaqiGWAt5rZ7Mh/6WbNa64rIg9Q+SGVMAhZXUM48i6tzFgMLmTCxRmM+/+WjFfu+ZT82D5AyWfd8WzTUDCAPiqF72qoSXVBdJTtv6HdlpNWa7uZauRYqHizR1VXOmqMa1GCvfQ2yvbYZ9tmOspD38nhufm+ittH3R9iP51fPSLyu/fC5G63SqfdxGeeRFqHmQgmGrC7Ryi3GSTsPGnnQQXh91AHavWFGqIhOZivGiHMSZIOlftTHcG4jQbpIk2o8cahSlNV59Py+mAIqTc1BIjWVwua8OW9szWjskKCNl2K97FGdv+S3PTvddz7rzEZeP4D3CYvVipnpRGztWB7ykQGIoUVZJCZDVkFJ5beuB1ZZuIn5/XWx+d87x0rnb9yUyllxbgNw3lnnLzBi/U6FCu6CvliyRVpJQGpD78jCEJmT2w3tKBqf1ycpmkYQYxhRj8YT8NvuIzG6wJrpw0VeGwIZJV0Ofq7LeYL2AbQ8L2rGHQDgMEdejFFIfYgsPmtHyuooFbjNDvZMo9kZU5kGNnKsFKQt16MkpVOfOeZSba9gZMH9t/CXzXmypWUnAmoEYTAXlgHg4ysN4KUH/2APqjJU3WF4uyzPL4o6hIt9HzJc1dPvdaB4+Jjja13hxy+LjuxelCWl++VyaPK4LksqGURDFFRprnq8wCY24Mozyvha1CyVeSzAcnBosAkZdIC0KD9RCzyIO7HJXvUzXHjzaGTSJsFgMmFhB+ZpGQtv7akAch0LYoCT5vCIa3BtMoOSzzDBakbHJwhkBor997UwfI1z0v7OaSpkfboxiub+wdVxtjC5KuWSs+uhqcEZqilSfjfNcc5meNGRIhMs7btr/NCe443jZMeo8x6iRKxGiSsydvlMjrll1lCyXwEF0PK/H2CpVQLxhf4O0cBcaozWEFk7z0QZlFP03G5kBRIEYAblRR2wZP02ylGsVPMPqnLhpZGkRlheu9ZusNRf2X4tr1662CiN281ejKg9BqKHhNp/1WNF5KA/MPgRpHtbe8O9egla88bPNLAWi0muiGooxs3qTA+IwiCZeUuq3VzzvoCMA1Wi5ui0PFe3NAwGI0Oinrx9y/Z7kYLsrYNHA3jn7ibwApVn7kTxrh+LTfaW7p1zgQBon0PFaFFpCUKOk7EFXv2WfWf1WAxP68w1k/Ucl0nI0cA+RBkIbWXbH78+jeb8YlArZElyE1cF3RhLxw0hSUdm7cig7OTabX82fZX690jsA57i1Tm4x5MqoIyUo9OSdi7qCXNeDftQO62ggdZ+/sufQoyxTDLVc4HwqepTm2LXzUHO7RcHfjPoF47VReWkuhKEbAyIFxCDSbgyU8oHPMt5pw7VmlhbxJLkcuagRN2MoXgbgWTrUdPvcS6wS1QveVoO3Ia8pOjA7DT9CKbDLDFhHYbsFFY1rb+BcxS9Lj6u8F3HVzwDoYuF7MxFex17yo38s7IHxn/URxmOjGExUyGz7je3w2zenQt6vD3ovIt0UMYNLjjCyGnBdAMTzDBiGo8J2A6p4bGwWSQAts62Lsvq8nP0qEBSys75TtX6oKSIGpPTBw5T+gTdW4d7i4A1etzCU+qhS9Fs/D9PYyIOF441EV4djJWDo9q04m0alvHuqvR2TGqFIAZn0fmc0NUHk/vW/2R0eKvXF2Lo/MgfCz5tFBc5oef1Nt3HN97DL4wxih/vDAJBcd3QP1fvrTsCujJO/72V7ss8aRVWGXZOnQoUDM1djtWeEAUfyweMRlvS9k7VhCITIarC0IB2rRFqlEN7mQa83hUEK21XdB8yV9HIp8rpUh5rlmlHOxfhF3c/0nRpxGQmACHgyxeJlAHIjHSLJYhIGrAw8rLnrTVRHU8zHgBEOiGrBoxUaGkfIJ1AX0iLU7rHow3+yfaA1WvWcBPoL1rIddeH0bQByYkDJJYHluISMEkoTR1G50IRo0DquXOu5+k6p9vA0D5M7tiaPY//BIjzCBjoD9m/ofiFSjTqfT7AozRvTrFFgD69UbxdFJoq5vs6ByqLllSU2I9Rcluk6+vlpv6s1RUxF336bv+LdyO7i8AbKR4I+4jGoTL9DU5b+a2qIaBxRlEIORwQVYTYBZigU1g/2+zJSiqqyFEPCWRiCFFR9HhDr5a6xvi5eeN6Zjz6/1xcJI5XrUDpF2/xo3ikpq9eL3HpdPBsmt+ZZdH7sESO8Y9vnWWNgRCUkeZX9shYB1fvSa2RlMNYLa1GMk3VdYN6uBSV3qmkMKttv88CFaEGo5SMGCaYZNM+g5aEUhtMqBsxazpRaPdNCHTW3mSYxYCGW7hLlvnyd8dobdg85OD6sp8vff814pw3X7foKtzTgxXiNqyFIewYbeZWLlhYwKXQ0XRdZJO/dA1XhwhY8QDF69VpqBEEFW5f6MFVxyJLX6T3zGrXoYel7iz4VuQsnSq2T9ybVaG1ZknYDZZxWPcABiDnCx1RBI0KBL2rnVA9h9MbKL9plSglSyBxEyqpAkFSNRzFa/QIF7MII7B+IbtiC4ufydaM4Bcx2wECGwDZEiCR9q5qIwKApVOPpBV0vFRcbcyrYOYS1EASIgtK/bWK2hJDd46fu8+575TAUZhTlkAnhekZ49jlgPsP0AsPhqkRPPBwqyYRca5bXkRZ81JVzZfglJ2IM1AL6xnj5i9d/lrf7buZBnStX6+Vr6EQNZ1vbuCnCz6yLfVWQL/vgNsoCWhTGM/KI5F6KXdlDKXkgoJBxPO2fhPUag7SZKc+YOX6uVZAow9Toyo7DEy3614YMDS6fPRCkLCItwHpCmB8qgef8gDyfwF7VHxAG6TCKPNgwgqZcIUNmgOYKJb6JE7a5nvV+pqx5s2V+/EePjHfacIWH5whjwu0HTyQE1q6yZcFU2Rt78AKAKbriU9RkP1HrWXtVibIQa91MALS9uoa+CVjIjFkbKVSjVWHKpEZrySwUfDVUXcWJGrAKF5renxwfoBQPAPIgR6Fzaa4rKLTBSPrQESspVbfReJoejmQX/YBhAEWQ0wDQRluEmids8hquPbrssIs+MoCuY7AtJn092aX81x46miHnSnDGC2q4uYVcguZLbD4u6b/5iMuo3zaXgUlICMMEY/2xTdVe3ZKda78A7BmtS4tEUIHmmMHDVJmLx7q/ZB17SeTFmm09RljxTDtL1Jff5e0h7UVV/blfgpL2RjGaoRgtUyyx6+IhQqOL900TbVjtn68BBKpzwqhsYcCTHlh+Q+S6xukhorvn98hAdi4AeBCoUqBUyTcHzYMpDlNSCwCaCEvqrvQ4QoUrrcyFIK+nSFWr0WSvlpOujTMwnwtxp3ReL5cnNVR+MiFk1p5rxXmx/ONrYD5/k4RQFWPY9Y/j9fLvXzPeacNFyz3iQwJ94wzEqaoIZPcQWZt3ALyc0Gi8xREhDojDsSQMLaKyUdhivske9EIqpHIMAw7DVDxVq3LfenIkUVoQzysmNRCBFEbIQCeubYunNyzyvm1TjNc9EnJmjCqnkphxHKTRA1GFHH1OrIdJvHxR29NH23bDQxgGD5qRV2IIsDFalNuTYjhjpTexwZSC+ztxYG4Lv23foFZBobxv+2AU45KTPG+l3CFzWQg86aLNSbQRqb8nEnOTx2G9hqP16UorkCOQqhN1cVwwVo3B6C2FT5R740ahcZh6QonBvCZGWwuYX1Mu0I/cGaQdGHADGfaji7aa1jFOnd0LSa8l0rL2RIZEwL2Xa66Ya4Gup43b8CUm2V3gMRIOLJ0PSqpAoUIzgFYvVRisqkPZsEltLlXuKgR7VsQgDiCQIkARWw1GD83vFQobAa1EWeq803qu4sqrkDHy+UEMlUVaWfvXmcwXAF7m1nhlZZ7u5Bh7s3URKSDtVl7mwhmxvs3MW4x32nCxaqdRmsWjiBPy1XsIy13xgCgtVbNurOKofLyV34QByFmKWMtk2w5yYeEIXvywvSnNo9JGfjwcEQyWibWAEUDBuS1Zu4YKccxJjGFQaSSsQOKtIdvkWhKQcsIRESdtL2JNK3OWkgAGIWYqzDt7KACUKK9v9+3hzXK67nbNjMK+NHzfR1sNi2+HgixzNhQSRP2vKob4vENzHHoYsTNU/nVWqavEstAkhfYk8kQDmZapzFtDXqKx7jiYNUmfRMJKrksQ46VafQy0jRr70RmdxmBZxIELi4LletjykiINtjdvvYcurLOAECZZeGPeHlc/vEGKoom4O/r81d7oDWN2r6lTZnfdD5L1lbMoyxmt2ZWR9OIAQM0d++HZus33HFwXirFAIW4ZHF7U6IsKvTPe7jqV09bfIVRjlINgGrvPm7vPvRxTDBKxDYGU0bo0jrVEWedyXPn8UA0VACPliGrKWN4TIeoaW1oUhrmKI1Pc77BAQHvvOrjZorXaNBWSf/9ONVwSNU3iaSwzaJkRKLT9hkr/n0F6AWEGDQN4PYiqOOeaeAwO/7eFdz3XG2G5B+Zz2b2o0svFoqSwTNKbWB88CoN0iw0DYhgQiTRZK5h7Cij6h3ajHjmUvFkkwrpT/meaX4Dc1CIxBSDIA7mkLDd8lpxXUtjDmHcGRQCVEGH5uHJ+JTqDSwr7/Fb9jo+20P/bQ4T+5gZKTsqzwyzZ3h+T36cfBvXaMOMFQAgqJNsl1WvbazfujZZ3EPaMZ4YYRtb9Msk5RJY8WpFyMujajpPzFiLU+683Wo95sd7QW6PQNVfhZTt8cVK4FL9HtsJ6+SyTCBVvJsLPLQRSLV2FOVdkY3MhwusNVv86OgNtSIYVAzvFkgy7RlaMKxF17QSxrYEEgBCkR5y3z33DxkIuUnbeqHnQaGiCc87E+VM3zpAFYHuf+1Nn0V0cQhB5NEaRQbIpv2S8KlvT1WP2kZ4xBpO+Npbr6hRWQgQPCgMCDTFDJ6p+1kfb1m/N/64fpeuCRFSkJQbNnJRo6w0g40fGO224pD/VCFpP0pvr4Q74+GtSRBlibdV+uAJPT4D7/1lD4+kJwKwMl1MNX0MUz0U9KZof5IaYT8gvPpYamSy9nejqpkjl0DCBxlUM2HpGz8wijQ5jlHoGxEEhQxSY0CIh5oBjZO0R9voLbDd/Zvm9wR/QYmxlomIMQZmX1WMs29B/ZYH3NPsWtojl91Rw/pLw7hLv1D+FqItzE21l50mr0ZJCzS1zUF63qhp+N42R85+731tXl0u1ZXsjaI6yH8IU5VKvY4a4Gq36kPcNDC9FWY/CLrDcozgbNndmtOa0Y7hIH/QAmEQKWyTanZNf0HvtOvH8tVs0BTksW7Otk7A+R27Ky2jOay8K90arv0fKfVJhcoGXRdrtlLK20DFlGdlkUszYQ9w2qqHSc9cSmtEV8BemKtVGsQE17/3Yvd4s1jmDMKvzGtTTCzXasuvKm7aWDWQ4EFAK3V3doNUQFqc9J2EOFuOkMB1GXBqbfnX+76YRqzNcjQFawGb8pgNKvncvggfah/Utx7ttuMYD+PgUOYphGG4eqqioUT111aZhQvzwIyAcJdw9v0Qer0CsU+BzV11/Icwn5PsXsm3tZUTDhDCfpEbmcBRGznSU97XXE1EAr5MYrXACBqGYFpw4RAxxwBQGTIehaC0SqRELFcYIKxCIEZIaKI0m5EEL3b9VpV74Glko8zmru62QGdWi5Ggsi25Uw9Vqm1mOy74TDBy3h/QSbNItSEveRgtVmw0NQ6wcT3Nc+8Ozx3yEVgwg2tPtHyFbwLVcTvKDLg/hYZyiTKLHRh4qc3OwyUs5FfJGjd6+Uw46bI7RIhB/XuU6QKTEJKdpGo/tyMwXHGcfhbTEBpsxi77YDs2MF8mB2CLOe8ap2VVHYDAD7gVuUY20RVuVoOEk3ozglKuCus8J22hytEG0NKHPjVHKpxiK4bKuD2bAyFAHwuujBk9SgqqY9N9RQ9b0WOsgRtlO2qiylAjLarKs99kytySLnf5zBcrbux7+uS3OaKw1gzZybgyZGT4GENjam7j73MHf5SJ8xvFOG65yAYajwCfDASGOyM+/ATzcgedU2TLrDJ5PIiSatdOrhu9SS+LgHOe5YF3BysShEIEYJfGuw9qR27qNnItYKcUI8AlWS0ZpqR62YzdSGDBOVxiGI4aOBmsMp/OatM0KNQ/kGMRrPMa2Y3BfjGzJ6d1pdBGM/N1+7o1WvGQpZDK218e99udt3WSZzYPmUkPHjojCXOtfAMsx8KauxY7bH54ZXKBCLeWzfg6gDxx5GN5/Xxdjqt+3tjZeBd5abDSLWr9AdEarSEvZZ5c81G4EyGIfA8CZi2KCrydsImVqOwPYuOT3GrQaNGplOP1EZ0zlpeWr9uLSncEZpQjcba9vb2KRVUaNNA1C9pCuN1CNZqh/PzNyeb4k9sik3bpjhQs9qhDQKvwH7Ky3FLBJRus5etp/Iz2mvytqLeMRyJN4qLFbln2+2DfItHXKcqjrCu4NiaZJ2MGuTZTvD9cR2tA5X4wsxg/6rFyCC3W/bb87qufko9BvY7zbhstGGJCHA3AAeLpGzCs4RIkwyneiGK6cJPIZJ+cRtfRVy2XxLLBgoY8O2izEmhU6S8DLDJhygcKRbH2Suq67JVnpk9BpBsUHHIYDwnStGmOSl5piwJwiXs4J5zU1yeemh1VnuPxDbUlnK2Y0ooUvKO5bcwAVhjOvO9Jl+SYPAfleUgCaFvAmXmpes0Fse6zGlNv6tSXXBLokz1WwWICq0rOuMWbkYK8SsdVztfOxd6rCtTs1blcr205RLNCIC07ct4GPOsPUC/h6g+Vnl7rX9lmgWsZRWGfBlhXaEDPsOzYndmit4nhzisVgZYjh9n3mPKjURJKPejaoz5w9b3tNpfx8dMe0lwe6NAoD15WRWN2k1HWJ7uiItqtCdAbMSydZbqs5HVIQmewOdOeY1y16s8xoi361QWdO4FEcXOSpO+m1bk9VL0q3aVVM8R2ty1xabj9Orlt2p4rir4vlxXIUUlsTMaIar85oUXDv2X7d+tao8Ntv5IdvdiF3xjttuOh8B5qlPoWcV8PTE9B7E+LxBvnVc/my6rCZF8JE9aJ0obF0iG0fLIoRdPvMJS/ro0vDKDfefBIo8fzQHujODVXFTSeEYQQOV8KSHA6YDk8wjle4PR4x84iHVUSE3ztEnNI2CgGwqa635dzXffmqe+AxmK0uzHCQYMpCePDMJuoXM4MDONSF33I3TsDU57X26rSYbcFxUlgu99K3QhHotOYriGqE4eHNPT1FT5F+k0dpSytfRNLGFZ4Wo2VGyubB57EuGCyBglHeI6DkjgiyX4l0qJQocJAoq0YhNer219vuRGNQei092245T9R7Afpb0mMrjLHuuE0/EG5fPkcTlfRQdANjaHJ/3Oe/9IrUaBw1Gnff8p3No3ovkVvD3KtS2OsxCmLhuyqYirqpUfj7qfw6DEBewXHQeqdcSjTKemSkiWWuefh1qXnyg0hyhZunyOlK0grD3DiBtC7FsJQIa10FRdJtlWtm0KAZrSBMZx5GIY9prt3nUv2xshkvvecoJzHOer04r+L0K2zIWtdF0Dx/jMo9mNp8pVdr2SMoveV4tw1XWkCrKGMYZR3LDByu5QvDUIwKAM13Ka2TGZs0qEVfRcdN8lbsc2VONsc/cKRKApID08I+/V2pl+jEVovY6XREuLoBHW9AoyRUeT2B5wmH6QbjdC35r4FwWF2h5SOup0FFLbusGprH4GVPZjBKsEU/1n9rd+jDZsoRZXtOZd3Xaz3mOJMaR68s0jPBLv92Cwn2EFnY+a59z/9u9z1bRDhXj9rebw5kX2nbPmv+3dlf2WeXK7P3G0fBrvfm6OvwJAAEuc5eDBZoo5m9CLUoivSRB9cavN4AW7Rucb44TzU6aZQ8uvlDgY5rkX5/3xequzpWo+G9ETBzHQFh3cLuaWm4Kv9FHIawMVoWTfcQtM1hIEjOOiu8l1ax7Hu5I4Pw1qUaHEB7qMnaEW7UoGdXmuOda4MDNboqUZZnB3rNSUM4imCydrF2aQpouoSwotEjDAHIAAdIrZlKpREEUpRcZqw0eKCkABokye77/tJ+J0OFVhFOAMLyIArZ8wnRIDllDXpM1jwdTkkx2xppEXOp+QJQQu3g3hfv5QAej0JF1d8RABrUG9Gb024uXpZ64/ro6+FOvJTDUXJo66JGTA3XcETmjJBXHIcJh/GIKWyFRRko9HFAF6SSndVT2YED3zAb4Rb/qtXojVv7ZfUUe909qtR3Ywru7oskXjSYSppjMqAak48JCRsJIeh5erjQFt9LBtwbuPK3h7MM3vJF1baY7OX2fP7iUtK9P/e9N53RuiiddWkbHsqmKm6qGRBYkz8zYJcOwubcrlnyf6DegwA2YreWg0uZJRAvGHRriJvFrYtC7VuMCl32EfoYSAiTLq4M5TtBz6NGXEMUw3WMAddjKEzCqetfZ5GW/L7urzFeCoPJv71aKbbX3IyN5QNzAo8TsjqxNOb2N0Zrz0kMnjNYpUbV11eF4Eg/Q80rh9gYLUZ1IIqzqYaXwwDCWsNsO29z2NmtHiUdYPlr2sKD3unaccTedrzThotfPQdCRri6EUM0jOB1xvK//b8L8y9++FH9gRmvWcQdy03ThdolYrIWEMNUwm7EqRIrosJDWb8/TojvfQh+8kxgw3UG370sr3E+gc1T0ryZFf3l47VEXdMR2YRRpyPi1RPweF2KMa+Go3hN4xFpGFTqBkqDrl61p7obM6xv1VJFbdt59bbB5KnM8Bl9NzMjETAFie4KfCWT+KhJtFwUsXriRtEmURVAlvooChZ5xV0RUp+zM8+Yyr/1nPvo8qKauTdM7nVvqBpVEGNP+iS0h0bs4XU1LR4KbAykH+7B3jSidPD2Y4W+xViGQb1xIYJErS20Jn9iBPYdgl7Pz9e59dd4L0dm1yEGKh24N4bY9tXlAT2kvGYzinXbzNCu5fXeGAOkCH/ApstCOS6Nqm6nQSMuEq3TLtryEOfu3MCuZW10Caiauo9OOIMOQAhRTOowAvMJPJ8E0ssZfNY1SdctX1NlYrjF8S2dALqCYi3NCZZ2UFiQCzxYnafN81mIFFmiSM5ipEzkmAnENULb/M5Ui7pcFuCiq97xSm2u7G3GO2240re+gXR+iWQJQfVI0sd/JPIloyQ9S9O8odYwsNPp8n2JoH2J7P0NVmxht12EEMAcAVh4rwxDvfE4dN6WbrcUAhb1AL15lZHG80nELucTwvEGwdpNjNeCV6cFONwgxEkp9Nhgbz6v441XZtVIK8K720UHqIbIG6/ymX4oIvUm3uv2bQu/zZE+3EMYSm4mQAkGQZREAPnXpoTZ1C8k0mKuWhe2r74fkWm5PQ6F8sZ4NfBfr/rR69B51pVtk1zBZc8K68bGaPmFwBurXgPQJ+n3DJkfdjym8xenygiDQFwGNfLOXFUGHxXPplcUueTwmNKDNTGUgl4Ug7+nuuFJKkxbBfjErZanXHPW+STAlSowWkfHcqRybDU3ehhqlHWMoYEH99iXwOYRK/MS9doTVjBHEKY2jxknIM4IwyDEr3URh9YJzXJSBrS1AGl2Yot+jbK82gVZOc7YGis2ZXczrJbX2jsXNbysAgoVRoS8zrkaMKChuG8Qhd6xcpqllv/drX17w/FuG65Xz5HyaSMWubx8hbysAO5xAKrhmo71e6vcMGRN9LQLLHICHa5qYfFYJWeaGwGo6gjDJK3JAUnWeuXsjuThjaKxD/37In6pzCP1sLJCiBhG0FUGspwHqRcVQ1AV7O2w/IJ5vQBKi3WLdqxe6THjZRCNh98uju5GRcoVkoi5QFYURG8wcYV1AtsiIzVopP9ZUp67xcvXcxlEWCD3HSLGxeO1h8wMgjdQPTusNxRGUuCSWHnt2BxWb7CA1nju5dX88e15wXALVBgEClftTg/zWJTsfx+pah0GVLK3Fz8GBG728HNT1xYIUykRWAu0Xo51L8/nIy2uhcfeUAb9nzVu7ds6ZTZ1kEpOanK96sxNDh6cXL2Wdyy4227/Xu8MNHNOVGBEKYmJYsAGobSHtFQ5JkVhZEddvVQ3GlhwGNW5HoHpoOtUa7SaGjF33H7OAXO+AEDPwwIlu0cUhS2OTm+w9nJ7QItO7H3+GcY7bbj4/iXW0yucn7/C+iAtuikEhEkNS8p49YffBABI+/KDdIRVym6IQd6/OWK8uSrsnvD0AyAExMNTyWcZK2e8blW28yqwAJEsJOdXoOW+QoPK+PEaYb46fVf3y6R0lNTBq+iQ4eFOji9nhKuEzBmkyt9xut5spqGBo+YpymdqvPzrPeNlvyoSSST5hN4gbKIHHyk0JJal5H6iwlYcQqF2pyyCo4k8lFmV9fMOLd3OyTxlf9x++HPYPW5vEBwkSDnV+hljWrncAr8JQYqr8S7z083ZRXiy/O2Opzdaj8GNTNXwmUHLa2s4HiuAdsPkxYxS39Ptfav4Rvh1t4ap7tsgTRPSNcPle23lYnxErb2w581RucgaqsOOtcKCamRV86+lVXqdvXZsWLBl+9J5G7E6G8gVXvPzQOsJdFyANCOsJ2EzWyTWwWhlrXAoDQ1T7aGlUVahvA/HmtcyHdYdGK8et8LZnAFyHQ4surJrZz+37fi6MPvIP/MX1oFvd7zbhisnLA8r0rwiLwvSsiI9zEjLirysyPOK84sTcmLkxRYa8fTjFDFeDQjjgHiccHj2BOPNFY4fPsX04R3CzS347iXC7TNl+03g4aENhW2EQVQ8hgnpyedB1+8LhTWvGKwmrE+q+kZufWt3L4ipfzfilymBBosERJds0s+DwiJmrEzJGnBestW37ERY3njZNjy7LOoDH/36zxngWgtH5d82H2PDPHzSKJaidFqNJoOVuRguwGrD23YxfhQHsKd/v2Yd80aLbHH1BZ4+wjIZndRJ3tiC4r1O/19wr5udb71Tqwl63WCiCuUYGQaoOojuP/ZwtO26YGqa8xXamBg1XwzdjUrWq3qP8n4rAGtGC2l+xGh1xJW+TKIzWuzzgups7eVuexakZwRW2SqZK0pnYNm2IWnUTLp6p71uAv0QUkeQ3FccELpVttyWaS7iuHR+hRAnUXIPEbCaU6AYK4usDCUqvdZ6NMjWIzNGVIW+H0s+l1xdXlFKWpAr01BzuTVCo4I+NWuXhxfDUOHPzunk8GboxN54pw2XFPVmUAwIo+Sv0sNcjNZ6WrCeVqQ5I80JaakLznAcEKeIEAnDcUA6zZhurxViBEatyQLQsP38KDUKrIZlPILHKzFMURu3hUGgAfOOtQCRNTm7kVEBinFDF6mZ12UV7NVzmzGMEnUtqGoHfaQF1KR6D6H44X/byDzBsa04lxKADcvNRQn2d1GC9nkvl8CmkIEBJQeGXBlvMdoi2eu5t+N1/na/wAWS47EWH00uyzTgfJTltd+AJnquO+kiFz8te8fsIxzOMBqyvOd+YYtG1AR5VljS59nqgbWJ8segHP+ey4HwzvEStIZMnZum/k2j+4DuOljCH6EaBQCNYojTI8ywWjCoYkYvGNyppLt71d+jhC5/6SLWpv2Iy1WW2rQ4aYRU58brQwLYpf7bSARlxm6p9H4MYUKcdA6yqF6Ac0lZcBb0xeforUOxhwQR3WsKFSJUg2Vz6cdjyMRmuHt0V1ezgXpz7eGFSc4LAFFdCzbb/Qzj3TZck0gkccrAcRKDpdJNaxDoMIyyuOSUwSdGmhM4M5a7Bda4LU4R892C6eYeV/cnpHnB9PQVDs9eSY7sdINwvClsRNM+pOsn3YM/gMejPBB6gTlOAF/Jw7HOoEEiJJoOoNnViLkFkdel1AaZuj1FpcoOY3347SFMATHOiHHCEKgpAgWwyworn7k/fOTSe9C2aBVIzVTwPQTQQ2F+wUhdvoYCiBbwoGoB2svKcmAhhqbuaI8Gv5uT64pfoede4aS6sJVFzeu9qTJBr/2Wff4BAII6D8nlKb0RotZw2HnsR4yh0qqBQkMuCz6Awkq0uTab2eWH+vcuNfwzVZPdOjP/e0ukUwvFPeZCMJS0o4sn2EWHdnx6PoXwkV0dGHf3pYMCS+NEl5NqIimggXmLkTKo1EpYfBSoxwMiyVdTkNc5A9F1MMhvzrCMypbqm1f67yUWVZxhuhYIXY8tjFqgnDUPrkIFGAbk4QjrPchjfW3Gq+QIGeDcRoX+Gcgdgak38r0zuutw7ECPRUaPguSLQwTloUKN3tEaLgv+vm6804YrXN9iGG6xnv4n8iwe/fH9W8QvfggAyPOK+29+gjyv4JQxv7zHepqxPqw4v5ixnhbkxODEOL84Y31YJUI7zRhvr5FOElGM7z0Fbt9HPBwFNrx+gnx8D+nJ52vXW2VK0TqDlofy8LAuykxBFuc0y0XMGTjua4M19FGt3wBQvK1yw2jUw5wROIPDCYNCboY9F8gFKMW/m3ns4EDfPsEgn42h8obpEnZtENbqoheLvDz8EBy5IJshMxVtffL9nPSjy9UAuGgoigHuFLYpzaBFu8ZqIXspFDU6sq+ZsaR4iK8/Nr0OWR2KHqJtotmS7F5bwkfvHMRKECpwjYuW/PmWefXRhf3WG1mgcWyKTBL2I3M/z4DWFhJA2UOIUY7fLZKc3Y/gCB/NfMjCXyA/tAhAJFR6vV1LX2O3R6wxGNhaHlktpyo91EhLR6jXbq/BamO8ujst6XmY00WdkWgncWskyr0FIV2YbBNPV1UBYzxIpNUcI29QlbJPjZKlKTiXMhZv8P065I+ngU7V6bCef75RKSAQaQyQ9Y5VbcMbQ9t+rGS5tx3vtOHKp3uYuDvnjJwy4qh5q3EAaxtz++zw/i2WuweBBZ+/wvxqRloURpyTePmJcfr0LBFayuCUcLg74fDsHgAQnjxDyAkhTuDlQS/AgKY/kSUszRtSL444g5PK/Rd2lkVNs+LALhfEDIwVrjLvpkq1cGEzMgDKASIfv5Z2KjEMSOSKenuShv55EQ5cdUH3UBrQ3oR2rHvDzm9vwTAKPWs0QUHU+gt9zHlzfc+jfnijpVh9MXz98fiI0YRK17k1WgrlWj6y6dZrws0DAH3dHFf34Je32WrtWg/dGltagVe0/B9QH/ju/LxjUvIu+XJLjEhDnc4OrmmgY1QD6w3WY3Vb5gxtnYR9JwlAA9luvwOAJJfmh5deMnp9o5heooW1Lr5drtIayxZBAn8wbk58NFFFfi8bLX/72+aYobAqNyQWM8JDIbDM21ywV3Tva7KGA6wXYUJw7YCcGIE7ICHUKAgEbiDeMvr7tctp7cG6mzyfknYSQ/u8KZqgBgwRjbPLY6fJ+BbjnTZc66efYI4RnHKBCLloCwaE4xFXx0NVvUgZy/0Jy90Jw80Rh7sT1tOMdJqLEeOUsZ5W5CVhfRCSx/ziHufnL3GbM4ZndwinOwyQ65CnG/B0LTcTIBfZEqPuQtvwiVrJE0Au5Hzftv3WBaXok/VQm31GQe4cWuviFid5L2bwAEQaVB2hXTB6Y+XhQFprYp2W+zbv43MCwOPyLWUhUcjNFowVAn+Qo2Ovc3FE5DCoGh4z9nvRTYfBN7I1e8fZtYagddFE+amtsfFUZU+g8Xpw2sKlQCE5t0kN3actfkDN4cj7lgthVX5Aa7w4o/CS3YJq3q4XJt7zHWSx5A5W85Bu809jtPocTk/usd/1C/gehKunBSJu8lS8E+3366odbXGogEps8CxQfw24QzP0O1w09hKq8HXNv/X0MiHy1QABAABJREFUbobMB2v07rtkW21Yf84BkA7cSsf3jVt74wuDp1Uwt9yjw1DKGDy9nYeDOKVxworQRIJeSccXats9Vq4Zo4mAy8s+2oR7fjpHyXdLt/vD9gdAG9dqKoakWSkBCKZVCYDjaf9GeYPxThuuu//5MWiMuP7oQ0yjq1FIGetpRkwZ8cltk0QPxxOm2xnjzRHpNCPNK5a7B6xqxNb7E+ZXM9aTwYkrhk/uMT2ZsNydcPWFFzg8e4Lp5XPE9z5EuH2G+MF3Id98KHT56QprPFZGlBlUd9y91wYAV8M14uAYWeXLrsup6THu1BSVepEwwKRk7CGIY0CIIkbr9ePa4tulRiE9u87pQBbF/DKhmuu51BEXkIUCKL8rQsfrLL3L5E0xOJ3xqhe1GqxLhrLRSHTRVvNgOvhIDLQ7t9NdKQrldSnlDOjOlwZIzU2MFUIkar1J72BwRqAg17y5D9RDZi5RF4O1S7EYGHvQgWokar8yVZTgln1noyUzsEJt3NX0dZcKthBXY7hH6PHyYj73Y7V23tDZmllahpCUVBjJzOjpTbTvrle57qmLoExw9pJeZEEDnOEfBgBDcZYKK28QVnDJH8W6UFu0lXJrqP3oRfF7EokVZRf6PUEcJWsGqakGNnKIc3zzdIWiM6h5LV+g7aOsHiLcG320y0Chw8s1zc3zk9Roe0ep3+dmHwBCro6Td0oMBZjfqI5kf7zThmu8OWA6HsBZIiUACOMAikEgw+Ox5iJsqIzTYZyQTyekZcVwnDBPI+LpjDgNoHAPijMo1gr/NKcSnaXTXCWdlhlYVxH7DTOQRsQB4CB9fqxGCqg5Ju+xWZi/5npTt4rUA8ZpqHU3FEAk+Ze+WWFJ7lOQXBoA5Fgis6HPFxVGXdaHp+314+GVfH4A1kXUrXsK/16n1FAZkJvW4EbvHaZKDnCDFBfv3/PedPuhGqhOH7Fpo+B+3+RANlHWqfZuy7nNa11qaPbIqAwreVgNss1AuTf2HnxmqQ9MjK3hYi6GJXdGq1m0GGCywnQxijnJQsr6RVtQbPT5pnIeMO/djq9GZ1VqrBosi06AGs0zsaqfyIwYAlC7C0s9FVmEWxwBbu9zf492edY9QdfCFmzyNtUwlFonR3Qozxqj9D0j4qKOnwEE15xtEyW6CNIYl5ZPCv4ZBFDIMUB1BO09y5GbOK4ZLa4Q8aMRLtocdmF+dgdc9qn/ejZiL/vVG8m9HJ8Ybi7HYWQVM+YAbzpcvM14pw1XvDpifHIlhsQaQY4QozWJBEo4XNUfOO8VVzeg4wPCfEI0Y3eaEDVyoxgQ4hlpTqVgOS8r1tMZ4W7AdHtCWJay4OE4g1bVgxsmDHECtCYJaFXNt0ZL/rOHd8oBMVhjQJFBGsOAYTy2XqSPBOz9vMpDmRQyy4Pkbxp4qPVKS4Tl2393jLoSiZzutnJZOre7/YCs9sTwbE9qGC4Iz3Juo6ReLmYTcSXZRk41h0eh2UbJwXnli7xujZY5JOcussQk23OG9+LwDoX+HYLBTvUhNuNV5pIAv5xYdFwXiLqAsPsXaBcZP4yXmNRYBFLnyeWaSpdtqvv1o1flt2LgohT/iNGCvrYGqbJPW0hre5hitByzc0MW8M0YPTToIirSaGGvJKFprUFd12FTmDAmpBvmcBi0WVrA7MFt3bzZNSWy7XTfpQAQl+Lh+uOhHKOXbVq5gyl391uPqSde7aW3/PVm9169vq2R6gkqwPbe8/lzokpW8Xn271jDBQAhBvA4IN4IKYMOR4Trp0Wkli5RLkNEMLXl+YR49wI8n7B8+gLT0xssdycsdw84P38lrMSckU4zzp+8KgzGGwBRKfIRQLh+Al6FyMDjFYbxiEE9JBOnnVPGmgFkXdvVo3tYGUk9koyMkanI7gx6k1AcRCVjUWw4uwfWRSjlNbN4pp6Bpt8vQz1YWs6S70lLUdn3Yp4GnVnEtddMro2yNKIaAMJYIl1rC9MsLi7HUM/LQ4O8XcR6j1W3ZxBQM8xLdx58qclS1iAXFuFcW9P46DGnUmxco0YXTV4aWXJUxBkDBcRIGqHQxsgYrRuo0Y2PsvZgu7IoAoXm7Fletnh4Aym/5/qHvb+3qvlTcRAZd0arnoNsszVUFQ70cKGpbAyBWqO1nrdNE83RsOvXC7QOnaix3RO+VKGvbfOqD8qaa094LXBtZAKrIypz3Ob6uqlsRhUAYCSlrBACwnAEDahlDka+Qnvt5WOGOTu229Tt3zsYngRjMKWH6faOeS+/eSm6smtv96X9vp+NSv6q92QMgkSd9xQQ3nC804YrTmKshqfvIdw8lYVxOkqbEKt98BXda9UQ9AstcgK/9yHywx3i7UtMdy+QH+4wv7xHGIeS/0onYf6tpxnzyztQDJhOMyaFlsL1LcLt+0JNX2dwWsCHm0Jx5iBU0SUzYgaWxKUP0hCc5wu94UKVMWrWFHvooiTvN+rLQKWeUwA0QNrL/zQ09WVGtsLoh7u6sDvdxEb9o8v9WDTFgBZNCjRI41Si3+ykaDyBwpMw7HrtJtgtQgRa2SUjZfhtue2RM8LZQZ27LchDBII/t9DqWYZQoUN9vd9zSFXC09rk8QYAjetdILDcQLBkihLYVzmxHJZBbwwoPFNvFjNYvp7oUmuYS3Zrn/RBiDDpp0pCgJ0bWo87ABgilVqsImZbdtJF1HrtKc0l72rOBneRPQ0azWvOSspPqMJtnYNUjJUj8WycOp9bAwqhZYiu/gxeC5Gb/KCPXExOTb/ZXAcPzWaDO7v59tFLP4oDQ1B1kWqw/DV47Lc2vHNkOTTvONmxWTTdQ8R7x27HklwUGMVSY055++U3HO+24bq6Qri6Qbh91kRZjSyKexppdVGJ1m8ULDvNCPMZ+dVz0PEa4XQPOkgN2Fn1De2BycuK9U6iHovGDkDx1AcVw82ASBoNKNInkUJ7NyWBbcYgGn3MXlZJWixYx9+o3uzF0RswhU48Vb2cs33PRyDW5M7VMFn7lRJ9eKPlCqd5gBRMOoWPmssSJ0Kq/KfaPryHc3y+IgBwN7Y3zr3aCKtYcaEPd1Bi8dB3jrsZIdZFEGgjriY/V1mFTf50J9IzPMnXxjSNE72orycRUABGi5LrdvvLTyS08b2GkA1cg+p1b067e6/YU5sGVANpuR0miTwCCxvSC93a0RJ5EeRaPOx7XRXv36IN74DoHBajZcQga+rqIVszWgXy2wrA+uf9UW1G7u67Xu2FaqmFqYgkNxcBtClQTjvXDm7B7yMXX6bSS1v1hcPeePn37Vr2jkpbd1X/9ka3ibhchNUa6C1EDOzXihoZyNwsAII8fcbxThuu4XNfQry90SjrqkZcuoCZLEq5KQ/14SiV5nZzK0uJnnyIYX4Fvn+F8OnHoBAx3Bxxfv4KALBqPm3RKGx+cY/1dEaeV0xPXyHevwByQnz/CwhPs1DlvbcHfXjVDZUbCxhKC3oU79VEQAFlIZ1flWOFK6ws9SmdJBEAiLr8I5GmU4TwsGAxWrbNro7J9wWiGNEzCkmjE7sumI4iVjweSnuYBp5xrUSsfTgBYCWQ7HXJbViKylzcG76J6CXmoxUVl8hxqHk8y9cV6Z3DEUU3zorCO027sl3W5d618ijOhOVqkiMjAC7v0m7Xk3yafZDcT8wWdwGg1lAVBin2I67yvfJ3XdRqYh6FYESscBlaL9szGZuSC+oWX7cP+aHUPZLsVIlFxpYlmOB043AME6ztUBGY7Vp5lGsDNIoPHpazOa0RYihR/qbLgR6rGUbTJGSgGHI7QmYVDFajZOxL+8z2a4u/v65i4FFU7KNCbBHUdGlA99p+Wz9rI7Dm7vcObWO00Bgt67buDZbMHRff0oxd1UHVe7VLewOGIhGW79QcV/zgC4hPnwIhSPM0IljfoVKbYVpeYdguLt1rDgMwHmQxuYZg2znhcLzBePscIQbML+40/3USiSlI9DVPY4m8aPoYCFHyXsMR+eq98mB6BYVRI6ueog64m68sbi5Hkxap+1oeCisune7a5pQXBoeMgh06Y9VAZyXSCCWyKJHVuhQ4UKatNrETQyXqIuHmKeh4DTpeg6cntcrf6lBcBX45X6X6g2o7CCLTOqs9vaACsxta/EWvOZZeR0LDX2reyj1ZRiApc9+zUS3SKgZuKHTqTfuIC6QTOZ4Kh5XjNI1CzyQbj+AwCIknt3mGvciJ1Nkpf2O7cO0NMyJGlGi/WptNJu2NZgLI1etujaI3VG/akHFl1LxPGECrROcZAK1DjcIsWg7apmgYSlfy4owaVLgjT9SzIuV1u4DKsQ7iEFjx7N69Zgari0aM6ZlYjNaa2lorO2cbfZNUQPqFAbS5hXonwN57m9HDeV50m91x1qhx32g9Znd8D7SSC7RbXQ/4NF9u3fK68U4brnD9VPQCw4B8abHwHlbpo0W70vtoYIEBNEwIN0/Lon14do+sxc7raRalCkgUFu4eQCEgjCfE63vw/UvkEBGfvFcWdx4mEA2qDOAV1p1grY6Gyu2bBnKuxYppFlacCQKXPNQ2WgBQDdoeTAaX7wkRFJTGPULmJOriPwCcg8CCfpq90bq6Qbi+BY434OGIfLgBrAnneCwGy+dtIgG+n5EA9tv+RlKnZuSTy1hDA81xFmPI7hz8HJW5altGNL3TbF4MjiwevRoti+Kdg+QZjv5fLlw/oGGxRdQ6omFCoqEset5o2fTYlABbh6eJttwp9jNmRsWrpvcLtUFjIQQEliMPxUOveZ7XCd76YQavvHar6RAGxGkApxmBlcWkxBty966po0uPq2nrQOgzb/ObuDVW2+iH63m4uStK793cwZ1Dz/hszhXOuOl7e3CaDYmunANANaruhQP86Cnu5TDd3Prr3+bXqtHKjMIcvWS0HhveAFu+rPydWTQSaD9v+6bjnTZceboGH55WsUYKTXi/27iMQk34G1vJGwa4h3cYJN+lD8vw9D1MqnuYTjOWlIsSvTc78SiLHqcEOhwR32chBIQBONwUA2rFxZQW6eNlxtVgoj2PvTCvTsD5ATyfKgsQaHNRAIzN10YPricY4KKnUCOvkPTvrkbLJI7Ke5pvGCaJsA5XCE+ega+eiq7aeC2Gy9WfsN6xkjupD9xQouQBlh8ioNHj471Iy/+ds/ueg+NYi4JDrOSOFRCJrIyaj5tqBO8ZaQbX9Un+bpHc5FP2BmlHAd900s47TlgVVlrWfFERo5y6i74eheIgi5Adkc09gFJbVBT/XQ1VOQfVvxRmpGkvUmMAgK2xqoawPf+g0Y9XYTCoasmsxboTpqsBNBxA4xl0HrctSLzArK/J0uvRRIadsepJFNWAtfeXdwIq63ObL7x0nQIA1oXaflJq2+xvEEKuahODwoSFiUkVOjQFFO8MPBZ0CYW/M2A9U5DbnNZj8KDN19uMnF1EmVEi08863mnDxcNBICgzBP2ixlptnxMQVvhELoDWYLnfFUjK3hvGAn9N6qmn0xnraQYWIC1C0OCckecVFIO2SbkHrzPy3QuE2/cRP5xBx9vS5K1Cf2KMAgtRg8djYZQ1C6AzdODas4tiBKajqDmEWCOnvvtyIU0I7OVzRKztVmwUkoKjCO9BaH3rcByuJMo63gLDEXm6whomLKlW3PsRSYkFDFAghDDINYzigLDNkzMCZRt7EZe7nqZeQXkFJ0+lnkFxlNzgfG7ygqbCzSpqumGhPUappoBG/PaS4ZJJL/+aQV8zY5lzUx9j836JCdh/xy9q/hcMFKq8n7WmtsicPhNTNlYq1BEIGv2EAUMYlJhQFQmLsXLbarQtO/YeQxbHlEUBxKt1EEmt1BQJY7zCMF4jHm6qzJM3Xo4h6DX19pbFgBp17EUarNGGX6ztjrNoR8gSjDFS4ywU4wYCaT4qEiEFMQQHlxMC9iKhCqt6I9UKErTX9nXGy18b08O0u70hjzgHoir12zb2YcES/e/sd280bXAeozu+wXinDRcu0JA30RZnUFbIyIY3TP37Ci8QdCFTmjIdr0HnB4w3M4abK8QX96V/V5oTcjqBU0aYBuRFIrODbTdnoYRzBg9n8Hioi5cuzKUQn/N2cbbFWxcO5FWYejk3NPTSx8uTMIzObfmZcqr2m1x7fNno2YG2HQeblUjLKWxbgtyq/RGGktA3ii3gFuJQ5Y5IoZRIQeCf8EgOa284aNBH0uAJiK7ouOS3Vsmn+OsfHMRkTsPrKNW9seqgwd1DRV04lsw1ke9yWXvKB8DW2/W0c2+0Ln6/O5ai5OBYpg18TkGeHwAil1TzjpHa7sAEoKm52yPDaLQlkBSa/I8ZkEDAqvOUGBgZmOKE4TCBfduSchJhO/96boV4oZGHESj2lk4zWl7GyuaWWScvC9u3nE4f4VL9XQ5KdogWoXqIztPp3bZ8fpCqHFbvkLzJ2u+jur1hkRa7yNSf9+uo7vZWn6fb1JipkpC9/nbHO224mu6kexCSvl8+b7w99XHKHaOeJREQBJJgADSaRFDSZpI3QM4Yb+4RryYkNVzLgxkw2W6eV6RZorMDIA/ZMCLmJK1RDteOXafN3y6eaK79k8IgC3oYpD4KEGOmkSAvs2jp2TAD5BUrbKjQaOOV2etxgickNOQEl+cpWm+9CoFJ1FAoizFzxbUL5JA0UgjyxBABmWij7O4fA59TaYZ+vzz82j7daqnK65zK4rdR63eRVUOysDwWgNLioTdk/TE+smKYcfIiuXtGyxYv81aLagO2xi0AjdGy6+kX70sFytsDzM2zgzBU48XttaGd3xWj5Sn+7l8xSFw7Hud2Luw6ZgZiYKyBkNigswHjMNTIrnc8u+PyBtzmIKIayNrVuH6vh8igRqunPdm8G/RX9+dmJToDhQpbmmSXve+Pu4d838Rg+ee4f++x0ee0Kulm32j5bZojakbL+xJ2bpbP8gbLiqI/63inDReA+pDYg1KgHABMAhXq9wCUSIu83+mNVlm0AgCJGCgMIM2NBL0y0+0LHJ7dCjX+5T3ykpCTNKg0XcPpdA3OGWlZpVB5PoHvX4KubxHf+xB0+76QFqarQl4ocEd/moAsviEWxhktB9DoJJqMqGHEg67dt6fBy5RITsvXIpkhNKjQoq6LhsrlFvqePbIYmzemh6SLkV0WIn1AkningUyeaOdSX7gF6gNdH4RKyx70lpjK/v2Ct/Hc++EV5m1RdDBUSbi7RY6ZLybKy7lw69EHAoImUPZgPxuWV0pc8y1+gWvgOlw2Xui+99qhfdIoq91Ka2u8gfKMXTJaVYU9ICVnsDsDbosmab2YjwbGIIYsMRXV8Rj2iBP1mtpnUY+hwGZQoxUY2qSq7BNaceRJGjFUavoQpAmkyVVtokzvMDMXg9cURjtmrSdu+GPeM1J9BP064+TzW76tjieSBNTaPILlQi0PxwC1eUCgjbJ2WYRu7JExvmNzXABayK8kyeGSwow3YaEBijBzQlFHDhUOonFSCaOxRF7jzT3WuwfE44QwnpHTqoxDq9m5B8UACgGcsryejqJbd3VTddW6+pPVeX4N7EMBlIP04LLfpRm0jqB8BMWTdlY+t8bLGSG2jr0abe1KFrmoim2OFUIrtGMn/ml/s8GCjLIwMaqR8qwtv2gXaCjzaxfS/lb3RouoKhSUHlclutDv2e8oAAgIcShutN/3pizB7X/PewbQ5I7giBB7w75rBra+rsaqp6azHk+BVbvIqTdaxZCo4fA1gn6UP21BxXYb5fMO/mxgbf27+T5QDVyo7eSzno9fu4oIsTsqb/8LzJblewgG37XRDtm++2hMtxo1r2jqH0A1WklvRnZRl12jGCTnNgTXxcH1BNuFWXskyPKiaQHiWOZ80zuum2fvKO39XedInKa9Dujy/a1kmD2PMjdA4F4RxGoEAaaqkg+gMVqeVg9s7zNvvLxI8GcZ777hwtYbBswL1tyUu4HKXHUJf7j3pZ8T15yY3WyWK1oX0PEG4/VLLDdXGI4T4hTBWbopp9kiGkYcTwgq0jscJ4w3d6IcYVprLqnMYcCSRc/Qh/0mjzPquZkXDRrl5h+OAn3NEZQmoe+O025NV4mgur+3c0r12EJNeFsvIIShEmPU2C5pH/YCWkNQIgC3v179xS+H3p/uW3dY0lo8dJT+TkCLs28918tPzV7E0xuQx2jVgMJc5fj7c2tHjZx2OtIC5V71OSWynaAzsn3e1jl28vsLZ+6iInGKuu3pd/bU/P0oheLGUO0gVXNsemeb9MbQFFIZTe4INvdCw08lhORynbyzd/kgc5FwKg08LdKIhJDrPoB6PzRRFjJoPmnUXgWBNwhQiULNUhCK+rytQdb1ey+619flmXf/PuY0+fnt2ZP9KI4x6o3kCIANKzOxGHn/vPpuBa8b3nh950Zce14KdVTkMADrCbScJTrxnUabRLt7wFVhvTQktPeP17rNgJAT4jrjxjXiOT9/hVM4Y7lbigGb7+YSda03V0inM+goqhR7SXyDgWzhJwAjACZ9eMyQaM+eRb2dq0ig8UoESpeDNEdkbZutfYuIqyo4xqmeN7C74BUowwzUToS15BpdzVpztGYuhAMApTDW/rV8jdzEtW7E8hzb+9l5j91nQdlnxsQSSKf9zmPU3fqAtobS1Pnt3726mT2D5bd7CS3st7OJsDyMWTZYO2AThTbX4ouovaEx6Nw24X4vL3aeH2NTWj5wB6Xg/jfNh/U4hX5X92X1aL2KeNRFM0aAeX/S+rlMmg/lbP3FoPApleemqb3qSU5qvBgABdL5VNp4aGLOtiP4co+mZ50jAW0avfr9lvMIQE7gEAviIszUxa1HtM2jhnap7mus5L06r5sIbc9gUcv0s9GSesghJ1KAPicAQQz7kmUbft++ZKAflqNdUltw/bbj3TZcZoT2hjNMsgCvoFQ/k5ssgUm9WVeouGEnBetEG0BHCMS2LuD5KQDgSmu5wjggjK9wivfI6laESKUtSnt4aXtTQxhEEyD9vAq05oqVHRFgMYSUCCsDcboGTdeg9Qq0nGFyQqwU4qbHVe/V9cdnpBFTInA5LYN7rDh2yVyMlVcJKLvSvIzd0Jnqw+HrRtbctsvYvaxurgB5+JIufmYsrE05sP/A2uhrW8rxki2CQGSAwIUkYd53u512G2+SdPZRxKUIoe/cvCem3OdT5HvueKy/IlU4rRJOLhBMei//MdLT5sTC/t8U4POAgPP0AyG8wRq2UWPneq3NgLE5AMWABS3i7Rw0fW1RaHNQhVKnDsRyocGqPcM+ynpkTtr+cCwlGhqheiKQdTXnTLIuRWURU61/86QSQ8T79jOXRoHN3X23p8Jhm7IoiTTvmBkQsXfGcYhYkrTt2ZN32tm7fi+rOshnG++04SL1Sjf9czJQmIN2M4XuAVX9PqGB682HHaMF1L85C017guSqjjdAThieLjicZHucpKllWlZwYsQpgkIoxqtpdriuwKjeWpLziGHAFKw2ppWDEpHdNm8B6E3s6jOm4QiKE0rdVzxXOaX+4dIFrIm+AFghdN8PyBd0ZnhmWGvIGjgvWNKdkMEVDqO6LZPHsQSweHg+0to+kMJG1HwEMTIRAvFudNSPS9ES6cIluROxC1HhJIMhmzYer9lPu+3225fyaJfyG5toqiMA7OV0do/TnoGsgraugNqakQLKBKN6PE304pCI1w7dZn8FbaGN1B9gO+r50250EVD1++R9VxvYRGDtc7NhTuq/TRTlO4275pUNcmPz74+5UOOr4TbNzfIMQ9eoDBBczZw+a6UxLIBWG1GeGSLRRgTEGfTGy5yO+l0/73pIaA3WFpbVVcjgVN3YaDh/kPmeYtg8r1ZIbaN5lokRQmjkyd52vNOGCynJzdQVFrN70FrGoUs8D7WvD8Uo0ciebJTH9cNYvaKjdD22bx7WRSKuSZpS5mVF0mLk8eaI4ThhOE7S4FJrqXidRb5pvkeRMqLwZheFAsJ4XZ53H+WY8kCkEXGaMByeoCle9g+dzZtFVd4T1yi0SOUU2IH138qGSswFtvSJX6DNB60Z8J1R/fBGa1EQ3VNse2ghEjXCnWN0152225fPqDE8/Wew55NQcidMos8XdLt76gkXjdDO6L+7+/zuRcH2UR81eM+/gxeb0bcDMTQiVUFfHiaY9BawzVE22+4jQbioYuf4zXgYcw2oUJX/drN4uhyiNMBs70U5RrlpvGNR8p0uJ1oVyp0R6w1W3w3clG1yjbqaTgM6r80d1ZeO2MnZ/jrjBdbtGRHMSFskZCxEi74yOFZ1+hACrABcaiBlrkxwuc4776IAnrC0WxpBte7NouKgz1UMAh1GYneN2g148pUZKUuAZAbWw2t62T0y3m3DZTedU94GzGC5cL8YNA29g0uOhrWSMDiXfFDZhcKIIIYVrQIoclAIQR46LTCO44AQA9K8Ip3OcjwxFqNWlC3WBfnlc4R1QXiySv3EMNbjcrJVDYXWiBGUMfBabvBI1YiICoGcg7VIIYqIw7Wktuw8nWo8KDRSOQwU2npP77bX1obeDIUNo8nawxIDlfeWLoM7RiqRk7/te9jDU21tEwukr5l9/7TmuhAGUxmo25aPuHlA6YKpYYYWqTrPndST5XaBDLRV7N6yGPW4Ctxo+8frI5edCIw2FA80izCALTmgz/NQAGgGLKIOrkWNwlLlEAA0lO+0tvsq59nBkP59HZs6J+Ci4fcMSiaqPet2YLHMXKIvmadqwGQ6uFVv0GhSDqc1YtBzLB3B8wqsq3RaKIo0rfGqGw7ou3+XlIRFu33E6o8j6b8G1yukKfJkjsgRKhvRq5jk4p2XjaMfPanD2IP+WpRfk+QAGTJ/ydaaCBx2omDv9DDX/LF8TOWzZfgOjbj8w7h5MP2/5SGN5SamKBX4oFAbKRZD6NuAaKLaw4WAwCrHa1CIQiXNCUHd/4M2m0zTgKTdkkuey5ozzidgGGXBC1H2k3UB4Qzf9dWMLQMCVdo5OaNs5IRaW4EiZZMsAguMNWsdSjyK8vWFvFfjae8mWbuIK8uNLe9xgf4AycXlDCxZtPd8BX1iwhgJQGjghehoh9kemB24sNJrdV/myWfLDcr2xyg5ljdIP5VtWQTGdNmAxVAXzNeFW3Z6G8p6b1Bs7EQtj44L8J1vxil/y7+FQQjUTrw5SslFGLAHq9Fe/zB/3MSySHMAKbXePjPYyiCs5hhR58aPTT4H9e/eeNVOwyjFyxU+ROkpZvChfW5zJ04q6nMQQnsNvDh119vNd2SWKCujbUbqoi8zROUH9br5iFX0NWMxaGaPGJD5te/rdTRHo3ynm1sbDDSF6HbHFNjxwo1cGLbOALXbaI2YzbE0Gm1xjszAYSf3/6bjnTZcDbbs8eYd5qA9iBa9sC7IpBNK+jBySrXLLyAafCqXVNXlQ4l8KE4I00EM2HQsNVPh7gXWcQBeiqI8AFHTOJ0RAeQQxdDlpN0sRtC0imHi3PTXCocrXYm1liqOorIOlAdrCsCqjKyQgTnnQlkVWIQRUq11siLKMYyIribFHqB4YdEUWBDKfhQm4WqMwixU/iVxY6Q8XdYgQIH5CMcocIfciaGJvGIgyGObxZgUnjQ32wawWcAWoBgtIAj7sIvq/F/7KgHsiBpUDJgCPMgkyeq+XqUch+2HduqsmvxIC/3typi5z3cVYuxj89x7h87DiD5Pas9HWvV3U3ldjI79fp2r0fLRl9t3IToFaIIwNPtrGWs6T28As1qNl61+npCwOw+d8bJvZcgCndHR5w1pUEkwYiVkBS28BkAjA4t0QZeNOaPVtwYyQ+XasPClwv5yki7qMkPKGWAuLERheqo+JmdxeINr+aMwIlCmqplXb7B9WUI2ZAeKpLh5t2tk9+9AcEFBf6Gs1Ytvh8OFLeqf0/U7NeJ6LBkNwEVanXejyehyI+i2ikdjQUhO4Bwkl0YkD6b1+lFKeKkRG68Rn9yD71+VFu80vhT1+LsT8izMw7SsWs91xgAUJY48ThK1HRxlXXthcYhVrWIHhqlRj0QFMQCjtp8oOScGmET4c2HGKYsBO0RRA7galGJtC5TNr5s3jmKoU2bMmXG3ZDwsZriqweoNlzWMk3wVi24ZAYdgD0WFFM2oAqjJYQRhHBIjMLAk+dSObltDUmHKxLpQCWYBChXW65Wuua6LflOOkFEfNElmPyKZVL63Y7QuqSzYb5o/uqhH/71IxvAJfe5qi/w2nEPnOwZXx65FGMp93pEV2l2HQqWnjPqMNfMh51OMPWeZ9J3zAFBYdDZ8lGXG621GT0AAdozX4I7Vjj/VaAaA6yzg8lzldQKgPd8AES2IBh0u1Yh53U94Y1ZhxWY+ARDlktogDmJoWb9rXRXMgehPfu89+OioRqbSyVkPz9+/DRqUW8TG9kESbfvItcKHVZR59oKPbznebcMF1AvVLbL23kbh21GAi6HSaMq8TDpet9p1BtV51YiiCu4gO43AooMUwstXkvPKGevpjLgMyNoaJYwDkJO0PVBPLAO1KabVWmkNVa/UvvfMEgEDWeU7I3l0gyXyYgeRsPtPKvqd0fKLnMImRaIno9ZdAQ002A/zbAGBB2tfIRT9skhVrqh65VV2ZwgEBgEpl8ArBFScw82GhwjttR9qwzbv2bz282nGlKiqg1uXX9L9vQ6CLA+9RT6udUgvCF13HlD47P3new7bBahw8x27b81ouaLy0nyx0wHlftHbM4RvMrrosn+vMO/sbwczVp1L/ddFC/0ISlBoAhrajyCAnWiPghiQMIBI80p51fUgVhgwRHFsVXbKN2ItEc8q79M4gXOdRwbg5db6/V+cU7/mkZNre120jv01Y/f8u+GRAm+0Sn+0YjAZQKp/AxVytWiwd14+w3i3DZd7sPx7vTEpRqvvTqvhdpMv8g+2jl6XT4xJ22+JQwQNR9B6QOAs0UtOGD79GOvdCZwS0sOM9CAFyevpDIoBk+F5wyjQoQnujlOrtm7eLFBvGs17BZI8kiVRI1V6PHUWLuV2ISYYCYMRI5WcXlMa7xa6NUkEteStofLEiBFBIh2Crr01ChpVfcDq0+x3e4s/QSjuWTahzMFc6n5EYk5hCJc7GwOVdhA+onMo4KPDDBbBFQdTbTfha7ouGS0fbdlDD/fQA9jmijaGy72+ZJQuLVa2kPgcrb0fBlFw6QvL41ScOolEFIIyUgCz86QhHsAlI/rYaKLH3JAbCLlBFbzRKgQA3pIB/KgCr7X1SMAbGKwyPwFgXT+iwYaDtMKJEcgSQTHQdFYoj1tnwAgQBnMoIlOtIxpr/r1Zgy443eV7j83vG3zPkANfM7r5jt8mAE8c2+RodX/k9s+ZWghUozHqo7W3GO++4Xr0oQ0NDNJUnxdvQPNWw7F6csbis+910Fzz+zImgR2HA5gCQhgQQ0T69GPEuxMoRqRlLW1QgtZ9racZh2XFpBT5kLNQki3acrJK5QYGCoSxZCncTcyl6Zwlrkd1xqTVuuSgxiC5Lat/smFwHlFAjEcUnV+9AS3KmrXY2IzWGJQyO8aGym6V8UtmjKEalN7LCka6SEDiVGjthNZ4SHQmvznEqPBo1U0zUorfbp/TslEiTLT1JUCr8m1Gyzz1lo1YjdZeXYwtkqa4IAy1jtjQ57l6o+U96D1EwS9q9pESMUz1ouRB9DoWB845RdaVujT6dHNSVD3CIKtFCMBa70GEXD19H8V1kVsDF5YL0S6EzcgoEiiZKxlIrnWFwI211tC/qUbElYFRGYVNnvENBhv0pc1gGZDnE5D3hkkL/DXP1fW2k51KzouGEZuWQJaGcPBtc22jQ3f6zxwcVw/YGS1/Hmhziz1b0M+GQaqBuvm68N9jJRHletvfSSBpWs5vNP974902XED1RC9g5I3R6mRTmjYV+l0ANbJxkCKw88DtHAdTAKUjeJwRbp4i3r6P6f4l1vsThuOEWVU2OGWsd9K1OMSA8fZec1kRPJ+0fcpaBXX7m5FCSXwmpZlnBiJJQeAQSFUfhB4/gqqYrN6M/izECEreiwlIZaEOANcka8r15hd5HS341Yp6BiOQkCGWnEsjPau5MnZgL8y5ZC5MwMRSkW+uqY94vPHILNtmBnJo67N6mac9FLM3dvLDarxYSSCmGC4LIgCVJrJmh9wbsB1vflf1oh/dYrNpRukXhOKFD82iJWoMVNry+IalDUSovdJYW94nd437GiDWsH4ItU+doRrEXO7NAqurwSo1gHbqzC4KFVX3klvuoq5SFA91NIypmtH869VP7NdBr4HYPak/Yr3fI6DPyeXL0Mx3Y1AiRH0gQFoLCWGi1IOq8aLpiIYqD6BpD+R72O1EV97h3vvsktHaONcXDFiZYrL0gTAM/Tc3Gp0+19nto7+3L62T3gkjCqC8XJ7714x32nA1jJzypgtR/cXtPFMAzYVvw/KaEG4fYrS1H93+aig8AuMRnBaE22egl59gvH1A/OQl8PIegERaRpFfphHLy1cYVcSXlxn5/ABrpcLeu6GxfajNGwWq6Cj0wXQepr3nIZee4WW5qpqslvcLItSt8THIAm6LgVeSls9FDmZEracCgKBEC6RKrMiZtTRAfjsGiSABW4So5JdsZIsQnEcOd6xmSHsjBrSR1iY3l2XSSt6LjHkqzEIk1i64ANTE9XT4WuvWDQoAp817m0PsYR5nxJq6vh3jZiosDbnA78upoZjepe8P1URHrPvTm6I0+VQ4qDDsdorWU2L0EJ/dV6YpGElZiJwB6qI3c84yl4i/NV7b1vIyP2JwkcVZyizsTzmONzNaYqBzrf0EpGTAWgtZiQGgSEuNPIydTBeirsZgFae5EmN2u2zbtXsTNAio69JrcpC98dobe9PVK4Xs5i43v6nHxBSasqO3HW9tuP7zf/7P+Gf/7J/hd37nd/C1r30N/+7f/Tv87b/9t8vnf+/v/T38yq/8SvObH/7hH8av/dqvlb+/9a1v4R/9o3+Ef//v/z1CCPixH/sx/It/8S/w5MmTtz4BK1CUPyrkYg9u9h5Mvxj0WlnOY+g7u9r7/XdteFZbHKQ+Kh8y4gffhWGZQSEinWbkZcX84h5pWbHcnYo8FAVR2zhow0lAYEN6oscYM3isEaH1NIoEDEPAFFBETAmiXg3OiCFg5nqeUaMIH0X2Td2yRUL6pYT6/RjsJm9rM7wRSdnJL6GdR98BOQQuuTDzvV6f83LzrMddjRY1bdcrDbcucGupc1MvPbeGq9SBuXwdAIwhCClDvZecuYm+ovZ0osClXsjawhR2GOk9k1y5BnYWATf6vMdjDSwza2pGa/366fPfM9iIc2uweiMfqDozGVobSwFB+5tZFG+/E/mvvOkQUIkS4jANjkE6hKC5NDTOlKEAaxZjVV+3fbu80wJUw4ggLFRJtLIgnKHOTCPCC2wjhDjU6DJEgCdADRT773uo1/I/nrTgnWl3LS8aqkuRFdBG1/161pNc2kvZ1o6hvW6XjJYd8+ZesnWXwvbzPWPpo7IsTG5a/38Ycd3d3eEv/+W/jH/wD/4BfvRHf3T3O1/5ylfwr//1vy5/Hw6H5vO/+3f/Lr72ta/hP/7H/4hlWfD3//7fx0/91E/h3/7bf/u2h7P70F+EWOy1C62LgKlRe/1n/reX4EEKjSGQbUj7DzCDpyeI738BAHA83SEvIsYLAOv9CetpBqeM08efIi8L8rziWg0XcpL6riu9iQ1PtsJpChVGO78Sg2k6Z+tchEAPvkFlyTnQbiQiC03rffVec1Q40bTjLHpjJiQYHMRVAJW3uSRAjEEmxgjCyD4HUTUIQdWjlsinr8WyKBiF9g/Ig8hcPXQzWJZ7y935GbHDG7ExBsQsx+lxFO+1c/fUNtFKf8LmAYcAmBBi95tHIe8++qJKorB/Vz0PHyE/JrzqjVl5z71uBIstD+qINp4g4aMjm3sP5QEoEFTKUKKLfM/IQs2xsRgr26Y3Wl5abJPfcgbQn6cZOO1tXhbcAonZ9XHrBcehFmz3k+egzfJsKmHBoySX6u7eGgJ8BPZroDyrQVXw1Demjd09szcy0NS6+X2XSB5oWt949aHd43RrrUA4O+mdtxhvbbh+5Ed+BD/yIz/y6HcOhwM++uij3c/+23/7b/i1X/s1/Jf/8l/wV//qXwUA/Mt/+S/xt/7W38I//+f/HF/60pfe+Fgao3XJo2l+cAEP9pOYZOJLqjKE5gb1w9gxgMsB2zGoIj1PV6CrW2mDcv8SV+dTia7SaQZSxnpagOevCuwy3BwxWW3H1Q3CdIAxmqTgUQxXDFPpvkppabxvWkRYl9IMyllgpeHobkBC6PMjlvgNYtgAXbh08SFqCR0WyVgOzPt4pIYtkNWWbcMn8YPl/XHzPsq8ivI7a5TnFpqd79tvMlqjZfVlfc2XN1hL5vK3bIcxBsnX+Tow6cortS6Pqa01U+UXQ1sgH1EO6Be71xktg818lGOvbU72SCRvMuya97VyNrxSiyn9m35d32fNRJCz5pyyOhzZ7LM7rN4Q2nXthz+VvnSBqP3cVE7MeDVFyEBrvHxe8HXGw5AeYFPuUCbwglPSG6u3Yg76zbmIr3nfulU7yNGf9ybK7nbTwKu2htouiOtc9fBkf7jY3tefdfwvyXH9xm/8Br7whS/g/fffx1//638dv/ALv4APP/wQAPCbv/mbePbsWTFaAPA3/sbfQAgBv/Vbv4W/83f+zmZ75/MZ53NloLx48UJemNJD8QCMfVDD74aQwRlAV1xp1HLmraEDgHVtDOSG0IH2YWsW1TggTzdySHHCGIRVFG6+CYoBnLL08PrkHsvDivW0YLk7IaeMm2XFYZ2lkDlE0BPUm1tvkjjWm9XOndIqPblOLxCWB+DhJXhdhM2kBrAMcnkJY5f5hTFEhMJAUwVog2gcq8u8Yf8AhPI/KjqKALYECuf5EWqE1o9cftDCTPJ2NT5C10cpiq7ElcuN7iwC88XSzZ7XasRyJIyQSDGSAKEU0ShoZDXyKJApQLocmCyYv9MuxkOuULmp0XFGq297b7CaGeIt2w5K6+eG1m/jUmuMPjIzI8X+fkCNtOy7/f6rColGxCT2O2Q0UKH/ve2nidxQ59vyZvXcUFiFvmzBziOxaBhaB+W+tgs781z+dXk1P2QbUY4hDKWxuEHxDQuvH52hsn35ebdtNceJul1Kvti8PbgiwQWNmNRB9cYL1HY97ou0GaiRFqk0lk8DMIEsJbFD4CjnZohRwONG+TXjT9xwfeUrX8GP/uiP4nu+53vw+7//+/gn/+Sf4Ed+5Efwm7/5m4gx4o/+6I/whS98oT2IYcAHH3yAP/qjP9rd5i/+4i/i537u5zbvm7ST1TmVZOdjHkvO8hZnNK0LoJFFiM3E9wlWgkZyBgGkWaKfSxMSJNJhCgicMXwXQMcbXGnUBQhR4/6PH5CXhOWh1jYIbDiBhkmu89OhNqDLaqDMeOcMWh40wkpSLA0A0wF8upP/UhKNRDvfQQwSjRPoeIMwHYHBilA1eT+McvxxBIUBNB6RKGDdPLht0jsgNJ6usdUMhpT+PgLb9O4ClcVG54HlPk9QEod5yNTms2zRtgirIamReddm5Opxle8EKmoepvBh3wlMGnkHAFnryfT9xKXkIGY1qmUh5k2916XH1XdAfmx4B8Kftxkr3xPN55dKaYHm4xAYprfYGy+Z9/Yi94bEDFbpHgBsyDB9NGRAr3/f4DtCH9XVz21+AsuN03Snplpb5wk8jzkHFqWagjwcdOi/Y3PcRn6X84G2X18qIZEfIdJQAI7+EhfHwBC3S0SJQmpBgekaVZMdyK60T1GGsucAFJFef0yd0drQ4YGaZnERluS9uHlPttfe8aYGggvd199k/Ikbrh//8R8vr//iX/yL+Et/6S/hz/25P4ff+I3fwJe//OXPtM2f/dmfxU//9E+Xv1+8eIHv/u7vbr7jKe8l9L4UluZ6cZuLTWGrYrBzASQ6Q5G8eS3oYlHNdAXKq9ROzCccP/gW1ocZ88t74I8fkOaM5WGV9uGaB5ueXiPc3ALDiHi8EW+JGaC1EU8lzuCX30I+3YGXGcMX/rQY4XUFn0/g0x3y3Uvk+xcVAx9GlaeaEK5uhMY7jKCD9hqbDqB1Qh5XYDwCwxEcB4Q4qUcmc2UdbOsCsyV8RLbFQhenzFpnY57w9kH12ayySDAKNR1AY7gySw2Zb4Uix0fNazFW2wisYZsVo2XGLSMTIXFGYtnGGEKR2DLGZiyLFTqPv85NP5rveFboZkLa3G3fSZjK/G91/fZGn5/ri6mNurAXgV0yWnY+5bgKEaI9FouONsfzyPFaZGDfsagqkrFONT9Kddubtih+f6jQobQEqTuuUGftSG73gjfafjSRG7F20vb3QpvL88t5v1JtIzr9jXohmR1M3cOD3njZ78146c4ooBJMvBFzc9Nchj1Y8rMMH1mGz25+/pfT4f/sn/2z+NznPoff+73fw5e//GV89NFH+MY3vtF8Z11XfOtb37qYFzscDhuCB4A6maHCXaWOBGgawm1+04fUPQnjUrLRkqAZbfjfM2n6nEQYwOM1MoUiCzW8fI6rZcVy/4C7r7/E+cUZy92C9bQiLRnpNCNOI967vpZNHY6IMYLTLHUQaWl6A6Vv/iHSy08k+tIoitcZ+fk3kV5+gvlbz3H6+AXyImye4eYKw3FCPE4Yb64aI8ZPnoEOR4nEbp6ClZZPcSwK9aY4T9okiR2E4TsGAyhkEOuiSqQqHsEioH2yiPy2Qk+A1KUBKLCiec+bKCu0rU1K3ZYG0UYK2JOqMkNURILVwBZVjhwQSbq42n6kcJqLuoaHrwDejaRsAbYFTie2nbsugZ+59fajORFBumcnPXaJDbdwXUCF18pcUVuLJl+2aLfChx6265cuX7AN1HPfM9pKNm9+f4lD4h0P7wx4bcsKH9uiqhsLLXnK5wADVeMFZQHZPWXQcsrSQ86iWeuCYIXvbqoAcBdlSQQelWkqRpbb6Lu7Jy45wYX/SC7n1KNJl6BI/awYL67Q4SXyT42+ugDAk01yu/Y1guf+mPx+vPNlzNTPMP6XG67/8T/+Bz7++GN88YtfBAD8wA/8AJ4/f47f+Z3fwfd93/cBAH79138dOWd8//d//1tt20sw1fdajbXGQPX1M8XwDc33d40XnJfRXzigjdwAWI4IwyRQW4hAPAjkFgX6Gz6Smq7blHH+5BWAT7GeVix3C+5xj1VhQwoBN1+8x5Qz+OEOCFGM1XxCEfdU+C8crpDvXuD5//3/htMnL7G8vId1YA6T6CSuSssP41AM13A8YLiaEMYB0+014vkB4eYpQkqI0xGmWm//+V5NUrS8lY2xxTX4h1SBdZPgsYWVSmTjoK06mRodtQvGmsygcRNltYbELaSRkJmwqIBj1OMJoEYuCqiRmy1YZ2cRvd7iGKWT6542oh2HnxPfzLCF7+S7S6aibjKGCKKhJtAZJccCyPxYYbcZDZt3RsvIs3/9NSOY8+FedytnKot4vW4pAxT3owKfN7MoyJ+/DYHbNKrN9b1LwxvWum3ZV+lskF2uB4DlumO0omGLwgCAG+NVnBdu68UKBZ/FgJ3W1BTP25z68/T3whCocWTkmLmZK2BbAuJhRz+//rOLDNRLOSbUqLx4b5a36mSZLhY1v27sRHvNx4aGkda7fsbx1obr1atX+L3f+73y9x/8wR/gd3/3d/HBBx/ggw8+wM/93M/hx37sx/DRRx/h93//9/EzP/Mz+PN//s/jh3/4hwEA3/u934uvfOUr+Mmf/En88i//MpZlwVe/+lX8+I//+FsxCgEIocCJVBIzkNZKYc2dntYFmmbBZoHOg+g8B6/R1X+3a6XCTrS3Hu9Q6it4ukJ870Pw+QGH+YTrL3wsSvJLAuvTkOaE84sTHr75CThnPAEw3L0odV5muJAT8rwgHI9ACMj393j4+FOcPn6B+cUD4iSNLK3gmVMG62qx2t9Z/otHMV5hPomqwnSSTs2sxabmDFAo8BSjLlo2WrgI7fsa/RAB0XILxA1suOnImrlELt54eU06hEqpH2MotGhC3S4AHIZQqPG+9bkNIxAsUCkcQhMdZEu6gbCkjJyp9Baz0cxFt4B7FREzHAYzMjNWXZSH0Hrnfg5JX0TUhZsWaXgohxYwDBMwSM3fnLYFxmasfGToDRdDry+jdL+1qLnPTPoIaAjilJjhKt/0RCp1RjIDHIzwYA7PvgELbvGP5EgPaa0tVxoiVQYwAGStSULJlTofqhlGLjHjtSphJ7MYrfO6bdfjj8/a9SSW157MIw5ezc9ZjlGur+ZCNTLzLFBv9D0ceZHVs5cmeSwaIxd9uX5qhV3d17s+tr/HUjQ+6gqhXSTecry14frt3/5t/NAP/VD523JPP/ETP4F/9a/+Ff7rf/2v+JVf+RU8f/4cX/rSl/A3/+bfxD/9p/+0gfp+9Vd/FV/96lfx5S9/uRQg/9Iv/dJbHzyPh6KqXi5Mtr5C9e+NF+CZc+oBMHLNEZTfdhGXtXTYCYU9bGc6ZtxfUEet5XyNfLhF/PCL4HXBzZ/6Y2Q1KGkWijznjPOLGfEbn2K5O2G5e8Dh2XPEaUAYB6HUz6v+u2B6eoPhOBU5qXSakRMjP6zAg8hMUQwIkRCnCE4ZGWsxYjbSOCDf34NCBI8jsC4wJWjKCVhnxFExcWNmdU+RPWx+0TUIxiK08uSRPLYSgSlhoFuwPaSXS56hXTS8PmHsohDbEUEC7JRFy7E23qwyVAa7jSx9vQIzxhg2kGIqSSJGSqztVqDH2HwVY6BiwARe9DkZMWRjCFhD2zPNw24x1IV71JVrHEjKHpaTkHWWk5u0oYjoHh1rtC9A37DUynRJ37rMUmwaWSLktDGkNa80BlTy0LIWxm4PGXGIErWHllAl0B05x8Tvx5ESsmo/epXyPg9DtaeVyVL1+b9y2mz7a4kuxkpdcsZ5zU0dYN/NGxBCT8xA1mts+VBDABAY2RWsW1NS6L2aWFoP9c6Kh3g3xAybV0670B76716I1HxTSuuyLqOVxKsXJKCILJc1lNsAwZyV3mjZ/j7jeGvD9YM/+IOPVln/h//wH167jQ8++OAzFRv3g4crYDgKs4+zPMA5ALTs4r+NEGScihioCIe6dh7GNuwxW69m7G+C7kbgJI3jivHLyuZZW4kTnq4Azhi/9D1AThjfe4rrL7yP4eZ/x+njl5jvFqwPK159/Q7AHcJ/f444CewXx1CMkA2KhDgOmJ5e4/DsCY4fvgcAePWH38Srr73Ap//9U4Qx4Ml33eD6c9fd9ORy7GlZVZJKYMn0yTcQ1gXhyYyQpTaN5wmH4aACrXqj67Z83YfAMFXE174zaMRi72WW97zXvYWXxJsfg0JgKrYLuAeaKh0aqDkZ+70YMip/m3dteQtf8wUERGKMgTY1Xn54+rf1HMssHZ/l2EytXqLAhSpj0ZNY+vYrABpjdz1FDAGYAiGMAYMtecmcCreAcwbWM6Sp40mEYNczOETRHHRFqU3Oot05oEn7CIgh1INu6sLMUK1aT7ie5FjU4dldKLs+YF48NviCef8c566lhiMnbUhUbrH0dVF7a1fPFDQWIStsmDuyj1xz/7r+kZKUQABZuxbIfiWiz4A2Na2sTrlnmSpagMBFassYkj4iLoa7rxcD6uuSJnFOOOfWifAoFAWxjhTkOmv3CTGEWirEYbuuvm4010HPL0sU9+3UdL3TWoUF5utyWhfHY59pmFw8gozqQVz6rTeERNLuwJrBwR7oBKS51i/4m0YLiTlEhPc+xJAzroYJWZtNnj5+gbtvvEJ6lbCeVsx3M9Ksi6FGTcNxQJwChuMohmsSFfo0yzbi1YTDsyegEDDdyHcsr7Wq5BSAUldmlcCcM/KyguYTeD6BH+7EixykEJrjBB5n8Houi47JagWgJHuJAmIYhJEHdhh7B4G46Evmb7uIRxBGSLTki14BgyorXAUY84uKcSLduRk1IqGzq69Y8k5DiBhDG4n1HZ1l+5rjsJwIt8ZtdB5qeAQW8ULDFYKybUikdj0CYyIQAnKwBVYixsG6YdtwjldTm8iuIWF/EL2XTgFIrsV80BocXYikbY8uhkkNVl6lHGM5l9elRlLziqV9h3UkD4Ma12rEyAyZKsTs5qy7BboZHk1xDQ2Za06VUY2SGSz5vFXnEMdLBX7dvWv5UbmA22tqOVnp+C3IRGAgZEZfxF6YgvoMSOG+3Rs+f+aujzNGe3PSlMk4Z6bvgu2NuxgqVhKWUNbNqWhYiReIbI+tlf19KNXnlwOg14133nCZ92aCjY1G12cNRcloAzqa5OYOdmwve4+5eKIQnB3u4trnukiE4w3wXgaGEdezFCEDwHw3Y34lTMPzixkv7hbMurhdxYCbMWC6mXB4OmG4GhDHWODG8Wookdd4c4Xjh0+lqaUq1K93JxdpCUxpcGWeV6QQEMYZ+eEOgHh9HCLoeAMaRJWjRK5xKNBMuSYW0RqsCPKMC/hZjkT+o102XPmNRmseLvRwleUujLq8ZACoQqs1UtD8mhINgtZlZRaoMXOl+K85FNaiT8wvKYvOYlav2k4i+6iqTcL3Oa9mW6UOSxmPUSDLrFClTQnrQpsYWBEwTNclQkJcy723N4qiez/8AqREJuvGaxRqcK41PF6Xz1Ra1gW0nmTfyyxOD1DIQ2agKESB1IP2+Sr/ZSDmaiTDAPhmmjsGq0gy9ZCUh+aBGkUxF6O1p/iR3HdS3kZbdt0KfqAXt4/IzXjJ7SDM1KyyZZIrLKnSUqtIhFKmkPUesIL2iAoR+rnYNVq+RtXSG5fuCXUwuX8d1UnVOW2M1+49sxPJ9cM+e12Q8ZrxbhsuQG/O2nOI/YXpJq+pG/Beg803OSVo+2w5tyF3gxnveJIdvktpFuik+37zGwCYDgghgqYjKETcTkccnonosAjxJjx8csKrNeMuZTwkRqSEqzng5rTiC6cV482I8WrAeloRX5wxHCV8Gm+vMd5cYbq9lmjs+cuiTu//C9OAOI2SIzvNMMLGBIDnE/LDHeJ8kmM8HEvdV1ASQI28pFs0WY4lrxjCgOjaXRjzza6ID0jsM+8G7BVBwpLz0MWLAanzCgVqMq09M2bq3NZLxHV/OVBZxAqMyR4+Cp0SByPHsInGHmvKZ4wz+7zkSrIYwJy5wJmBUCDGwxBUfkq2k5hxSowlEx5WaF5vwjQdlOUIhQ7rItbc9/3rvQVIjgLgtE3Y+0WzN1pplvrBWYg9RiCSk1Yy1TCCc5Ii+BGFyESAtLW3BXPj1bd/b84DQFPLqcYrabTliReF3u6MlZUa+NxnvXaAsVD7kV1hVfubeq39v3tD8lhtZ+2gUHJvBqi/XnatFcmhtFQj5qNe127FWq3IOpR17u2J7IqDHR9gc9y9M27/uu82HeX1vtmo57/FeLcNl4tYOKBKjgCt5+XH3vv+IfTDu1obL4/By6wfJQDaY2fPI1lFYzCvS/sAW1M5W/SnATQMQpUHQNMRt0pdpxiQ5oTPn1ZMC2EKslhOupKd5wRy+a6cAnJifPq/v8DDJ6cCKcZxQFByx3t/7k9henoNCgHrvUR5eV6RlwXpXhyAOA6YX9wXeHG6fymGazpKrZgVLQ8j6HBVP5uOUq4QR4nMDBKKUitmkFOZI3dNmkfGHoDsFq7kcjm9o0KhRnoxYwgDKASRGeKtXA+5nFfNdbh5VGNnifqUNcemC+CcMkIIEomF7v7pxh5NXgqZgcQBh6GFGn0n56shNtRqO945c8nbEDkqfSQc4oBxGEADmlzTBjXIdf5VWrVOvy20/nrtDVvYgjBnKbIYoFC7A7sTkwXTMYI32qHhkePcGOEsUZxthzN8S3vAQYHsCBi5GrJV67t8j7Z9A4TNe/JH+33/Hc8gHUKl9Tc1b25TZqzK1CuSsBk7UVaJsNRg2frDvtHlHktwJ1o1J7QxWjvOTsPK7o/P/rWorfnad2rE1XldDI289sYlQ9Z7Lq8LYfXBsMipNIvTxn0+iipeRrlpnLcDgLM82P4GQRhANzVPdLx/gfnFPXLKpUA5fHJCnGXRaqjBADgxcsqgQEhzUkr9GePVgOE44vB0wvH9awy3E66/8AzxyROBbT75BMvLe6Sci2I9AIEMlxXL3QPiNGK9P2nd14R4PJSiZZqO0vxyGBGubhCePANNKyhP4KyQEAVRzc9J8xtbr26Ta/HXzJwL8yItmrChTRENLrYILYYBMQwFovHPjk9VACiMtqIGocYuZsmtpcCYky56BBCFxkPvR+9h73Vltl/tdWO2Bc66W/vFTuBCr14hBnUqGcNcaNmjlmfAoHTvhMnNVgwDu0j24nPjh0Y1kheR68MhgoZJrsPazbk1SSze/s4z2TFd+0hQ39TjJeM9lEiNOGzgUGOjMhQadNEXo9bu9cPWeR9p7RFpxu7K+to9y59aeYZdRyNd2PYvBWTcQwXNhy102Bg05rr+7Ay/XjWF7pf6fDU/7hzPvWO69DdwEcp+k/FuGy6gfcAoNA/d5mb3DyFX+ntvrJo26mpMSsdXnWyJrvQh7PUM+5vffad4mfpgl2irsBwDOB0QwoA4HcHrgtvzCWEawCkjThGnTx7w5IXkviwXFqeIEEmZhbKPvEjOKo4Rw3HE7Zee4PqjDzDeXGG8OSKMA/gskdb5+SucPhbafV5WTLfXpe5rfnFfar180fJ4c1Wo+WbMwtUN8vEG8XwCXd2AjtegqycSbVEAKZmDQ6xQ4ib5/ojjwFnYagaPmNfnm/2FQT3P6iTYtQyajwPkQe3FSYuh6VrPr1p7lkGYorVHgdZHcSGB9EXXnpLv83TN/UJ+IaP2uw42svdrZAgkIhXT9dJGKCLIkRgxEGayuipthRPqMlxacGgOqxzXozkIMXRCM6c2vwnItQmCIGBda3dgE4MeJzQt621YOYtd635RbiYutP8pREh6TgTpi4cwOLIDq6Ohhh/GHrwM7/nOBt509NGSd0kuXXMfOTW1jiTGNHBpRqJCwFQ0FaHEjmiOAgUA6VFUiZFlrSk7deuPNrXs53FPbJyYt2gScNlo7UX33VpM36mGi5hrVNMxXpoIqvxgP9pqPuNce3HFALa2Jua9WFQQcjt7toD2GDwFgAghatfZnOrNM6qQrSM4GItHHUgpUv7cF/FknBBiwHCcML+8x/nFCecX5xJhBTUyQq6wSC8jJ8ZwHHB8/4jjB0K3Nx3E8/NXWE8z0mnG/Tc+UcM44PDsGd77ni8KhHiacf9HH2O5O2F+eY9F/6MQEI8T4iRQXJgGMWTHCdPtNQ7zSfQPj9cIN08dpDi1+UBzCnbyFADa6NXDIt28V7YUS3Rn3YH7e4YCsIb62l8rzrUQ06BIkoL2IQwYoiStF5Li4JQZU6RNw8RtC7/qTe+RC80wGTNyt+gY7aLnYU9TgfeGE6j7YlZ6g8pyRYJTrQeCLYJdXoIvLDq099wQo5REjABSBLIa/kGumT8ldr8tyX9Pbfc54QsRg91LUIjSjFRxYACFjVeNukk0MzNr925oXdeWjHPJKPXGpr823gG5dL1rNL/9rB/M0Fo6IWgk8ZYR41TvMr/WGArhHW0TtW0mT4zfpimpy2MVoptuQ0TI6+93Rwcl+p6H7Yl9dpgQeMcNl1C5NAKyie8p7G8KeXTvs1vUSAvxpA8WqffRjeaB23qHbFiBTTmFqsDujZbdRHEAZ1Vtf/oBEAKu9eFd7k6YPnmJ4/sn5HlVAyUGK80JOTE4c2EXWu7LaPIAajPLuwespxlXHz4FxYjp9hrXf/qLiJ//U+D5hPjyuURan7wCxYDTxy8ESpyl3gsAQgygEDAf7zHeHJGU+DHe3IGub6WtisuLGasMwL4xI8dw6qLiTTRsC64+pAyUglNSyOPSte4NW1PnZw+cWxCtXm0MAyITkuXNIjVRkF8Ay7a7hdBHZq3hqgrxzZrHGQBX40Io8zRkU9yvUV+/ANvwDLX6nh7Xjudu8KKf5zo3Dl4MEWCqeSabt6T1jDy1z2N3bp78RDlJJ4OcwNYltzdcISrxQ9iJVmDczhfqNmGF227OIRT0ZFEsWmOyFyH3uSYfWXljVbQqN/PvImbaN2J2LMEMFlvzTZYO16Rs2DCAB4CS3veWcghrNWBmtPbm3TmMl4gXzXxS2JZYlN/L/xp1eHe/FKfwT2i824aLq+Gyi0D9zXsp4uq8/IuS/EDNCRhz0W/LQyOc5WZRRk95IL1n77wcHg41MjA6sH7P2q/j+ATxwy+KbuD1Uzx5+jHy/QukV69wfv6qKGesp7MSK1SLUA2a9PlakeaEu689B6eMw/tPcHh2i5uPPsCrP/wm8rzic//nvw4ACO9/HvzdfxGUZoT7T5C+9ge4mo44fPAcy8tqvOYXDzi/mJHmVKK+8WrAeDPh8PQV1tMZ0+0NxptPpY7scCwCvkbmKAl6ba2CYZRc2eGqzpNdLr8Q9Z+5e6F2WXVOgH843TXbPEhcnR5CfZgpVCfDIF4Kg1D8bSMUALSKDz5X1uzGwXrmrZcCajuPZHm8rSJEf04HNaocxPnxLU9skbRR+lJx2+G2drludyWfBy01cMbLrknTVDDUlhWQiK3pCNzQstv6osZoLTMKkSB3uWTblc0lACSHYvTwoR4npxlTnISkA8IUqgTWpXYll4x/P3o4txdZ9iPDhH5J03h84R6pxqu0XyEgsDAYMwlkGONU59wXJOvcsuse8bZjt3mpJ2ns5cFsP5Z77u8B7xz+H1kd/n/lEAquJnrtoQFaL9weIve7S95FyY91jCQvY1J+6/IkAECrFhmnRejAyvDhNDeyLKVxY1Do0Fqx+AePM4wZx2EAjVegmw8Q3/s84ocfSeuSu5eYXj1HfrgTqvrpJPVZy1pqsjjnRhLK2Inj9RXiOIDGCV/4a9+PcHOL8N6HEhFdPUGe74E4IB9uEb77exHe+xD5k2+CPv4arl7e4/zJK5xfnPH8v3+K9SE1ebY4SV3Z9ede4Pj+EdPtNY4fPi35sOH6iPHmWFqnmEGTKt9ZWrDcv6yR2DCKR+1JLOgMWXMRnaNAoUSzZW79bx7RnjQP0efQ/HUrrEhPvY5KgMi5XbB8V1tHEgFcZGXGap1BaZF7e5b+akgzMJ/9FkuOyO4Ry5HydI0QRyGqxEmp4FvlfSsPyA3BoxbZ+kXbIghRYA8lP1agxFjn1hvu5vxYSkPYmG92XmltnDsm6WyAZDlKpc1jrKxCi9gdI7cIWXtRa4uS7TrO97LgcZY6+zgAhJLLNCksL0jstR39HPZciT5y9g5J81vNo0rkC0FYqY32MrTVCqOUcyRWqTS2a1KJWURBrsugz4Zdhz2o19ZEX4PnnQf/3c7R9/d5ZRuG1kkztCDWOWG3/bJvADx+drjwnTZcSEaJBh5rg35x7EELO6Px1H2E5L/jQnagXrBSzGe/d1BgY7Si355AMGxFvMNRPKd8hTAcQdfvId4+IL28RdBeW3y6x5CSwCdA8VaLp7ouSKcz0ryqZNQg0c3NbYnmoBR2ygkcBzGscUC4egKaTwinu5rXigFpzpjvZpy0z4jVKM2vFqQ5YX614PCe5NCGmyOG44TDs1v9e8a4s8DDFqSUgHUBD6PUB01HyWd81tFH2+yM1p4R1O80GD1nhUO6qNvGXvkEAHAosKXdE76XUhNdpRlkxmu5B84PpYYOFoW4+bKSBPnvAE6LaHiuEzAcSnTIYWgYlZ41uRQqOBqosbLgoEXf8l7f1Zndqe9FeVFrn6bhCIpZztHgXEDuU4MlFe7HMIA4glMldPhSEq/AwXESwe1hEgM+HBpnwaSRTN2jTyEQiVK5SWENmj8SY96WURRD5tEytHnJnjrv59pv41KOq68TqwZNZtvkoDKxKG9otJ7QQ5mh7JdI1pegIEEcIHOQrM7ORfcedeqQqQYxooAlX47sqwEPmkdtodM8faeSM3KCNXVswlr7tzca/nWf/0ivsf7uYiEMSG7JDQQU1lwOAJtsSt7cnUXUt/Pgt+QE2++xeLaUV6TxSgV9Z9DhFmE9gdJSFjb2i5sRQTQXwcuM/Oq5GISUQONYDcV0RJ5uZD/rGRgPzXHSJBHSoOrxcRSdxLsl49WasbAKiRIwzQlP7hccX8yYnp8wv5pxeHrA9GTEepoxPb3GeHeFvKw4hCDXcVja48lJ6tzMgK1zod1jGNrrDOx7iZyFFp3WKv3Tjz3832/bMPqdB7h4oc6zl3nPze8rfJI3UHYxVlaH46ST+OFVkdvi0728NjUKx1Kl400lwrz3IcLxGjxei6ZkOgJxBMZjSegnRrMY+7Yna+YShTUdhp0xM6ZiOQ2uBq/pFAzrQ8Xa3yxgigHDcKyRFmdhmALVkOncsCEPdv+zyqkZG1ENcmO07LVdL+90mBix1jrVfKG22IjVCJrBl0islkh4WNHGpSgLqBFtb/iyN4J2S7nf+WWjkH10/zangaAQIrvuBuz+X3NqADeOiDFMx+EodXcaDZd7sntWKpnKUIZQ7hevRFIP23f+5iZna5DqQ99G/S3GO224mKhGK2YIbILVs+296CaxuKflot/bfS/URTIGtyjlNvwuGH8YwNOw3Q5QjxUaqcWd71FAgsOR4wCKx624aVrF2LgFsBnWWDOtGE+fSkGiKhoUKA5AfHheiqopzQK5xBH56j2ENJfFMkwDhqsB482I2+MAnFZ8siTMmTEDOJmL+fJcasrykrCeJsmHLSvSwyyFzqe51oU9eVJyXAZ3UMhoWnznJL1YbJo6UkczxzY/pM6E5TF7aKS/1B1Nl72Rcoubb1pargeg98kOY8t9zxZQml+BzncCCWp0xesixISz6kTOJ6SXnyDf32O5P2FR5f9yeY8Txpsjxusjhs99hHD7TKLoZ58TAxZH8OEJeLwSsV31vs3gWOsOK8SVyIwhi4/l31iV6+vCaaNvA2KLJaPCjFJkDSQmHGLANB5r1EurYGZ9hNuNPZr2bg6liZBzhV+XE2hR+HWp80cxyhzFCRTOErGmBVYwH83paYwY4VIHZKCNsnw7mV4f8XVjj6HqafOkzkHPueyZrn7EAFBijNHarwTEeESMuTwzmwa8veOGany9Egk7+NnPSVNsrSbs5dIf9ZuPd9pwIcaykPQyLyAtQHQ9knYNEtBeoNckDAtMZLmrS0ryl7bjseYMlDoMTzIBWpYh2gehem9AoBExjpim6yrx06nQF0POjByHotpNizSyLPA7kbC1TncI4wSa74Tddf0MfLoH1gXxiRA71rsTrt6/x3IntWQPKReDVVUhAM6MtCSkOYLCiuW4Ik5yfGEaQDHWHNlxkf2rqgjlCh36olXPMGNAIjCgZWX6cy/Xjtv3L0Rg3P2+5E7MOTLvvPfq4SJ/PRbLm/gWGJF04QYAXSzt+pdC9WUGr/rfIk6GaUzmeQWnmlsEUJid8XQH1vocno6gGz2eNEskkUUoGXCLTm6jJoaHCytTMmlexuEYSjaocNGsRbzmEx6GoBRqwsBciriLE+idDM4tJb6/ZmWOqfym5FBSS7cv19cgMC9wTUGhSN6vJWO3gJNAw4gDmKWYXZwYlNxhb8D8exlvZ6jK4Ws+a69T9JsMy6MFcsQbNXLGIk1ZDJ+I2MvaGT3E6nPBwD4ZQ4/VDtGTggAUq9n6HPLeti3Mm4932nCxw7ebhajANq/JQfSjhw/3Rs5Fq203sQmURashgZQHMdSHwv2mQEgekqSMEOqNZ96b72wLyM05JMKoLSvi4djAFZYcZ84CLSmBJPiFwo57AHhdkM8P4NM98stPMHzXn0G+fylw3SSEi/XZE1x/7grznfT8ek9UYgG0qgKcuND04xSQl4TlYQWFUGjzgEhL5XmRAuEQxbh6ZY1OImj32hl8tEe+8Quhfrah9XIGEGttoBWEx6kWiJtDYb9x2/cRlkEpdp3MGwUgDSKHgGE8CuwbRyCvCOMEnO4unyNQtSVzrGhyCCLXtag+4HQsOoFBF20uZKEVFA2ytMW1tvHwRktvr3KfXaL0Z1aBYO1b1XQFXgEMkt8wo5UsJ72Bdd1zBRRKfBma22qKYc04aURbrqjfdsc2ZaDJPXvCVA8xCmzGtbwialSorFICkECav6rGCqiRjo98Wojx8nXO3Bovu079kG0z9pQ8ylR026n70M7WsLVEafZQtqKta+TWNjevJk21bSsKF7W7A+0+n79jDddwBR5cM0lUuGZD5QQAWnejJL/Q7XpfftveYHmqqeLlbA+AUZS9x59WcF5LbRIlR6QIzmCFQTw9Y0Tp7zOqN2P4MiA3QQyMWVlGJhVkD4u0gSdMpuAeJZfEnAFlronahIpxnh+w/I/fw/qtP8arP/xjXH3+/4U4DYjHA2gSVuDVF95XoV6S/NXX73DzYi5RAFuV/xQRpwiKhJwYac4YjlllpE7lHAFZlMcsMGw+BwT7TDs+W96r1H35hcauFYvWHseWZWoGmtNaKe/MDTHGrlV5nMxYDRPycGzEfz0bsJ5ANVw+32P9nWwkJsyJkUPAdLgRwQplmAYiYD6L4RknMULHRVVIXoDnE6b7e3AHc0sR+CjK/VZaECwnpA6ZzU+aEeKxUTiX2i45OwLAVO+tQg7o/t1vwllHYqlFFqklJTkw6ZwAQxzkmuq/hcxE7tn0hisnIHthWNSoSwuc7fr0OW8OA3BwRbvu875Mogzn7Jhifm3tsgLDJBEYgJXhqO7tPAF1Dguj0O1m23dOcoIMbGrLbFtJjUXpoJy3/euAeq/aDn2eyZMlbJsSZVcDFpzCSkNo0W0NRomMcrwBQFBHhy0iRVtbaOvWeX0Nr+CR8U4bLlDo2Hg7n/ew0Q4E4T24pi0KqiEEUHNY3fbZGa7ansEtnMDlCK8k7qXanUKoHuE6ayJVtPZqa/jqWzI7nUIWZhHnmhi1b0N/QctD3ac3yg8vkO9fIL98jvXr/xte/ff/ifnFHZaX97j/xidFnJcUkrIx3V4jjgOG44Dzi3OpGbMRYkCcAsIYXf+wWJQ+klL0eRxKW5Wo0RafT9WoA8Kiwyjt6KOD7vzC4xsTevUNK4CNazU25mzs5ccMIjTSDVCcgUap3v3WPOsq3spN1FXmhM3bhbA3xwNApHU7ARhm0OEKQQtxjXTDp3uBEE93UphbdDJzgVFJa+Ho6gbh5qky7AZ46r/dPWTEC6IS2ZPCXFnp2GbEMtcbibHVVQwBMIZAIxgb6n1ojpfl0uJwBJPocgqxqXbblqLvVRRnCjVeh+qCbgqsLDXgr0t0ZQNOwssg3B7qs2aNgVByyEgtfFmjMWERxzCA9eZgoo0B7w1P3x28j7yIa0G72QW5b1rYkBnIJCciBox3o7jeWHl1lhK1u7nsySXlvS5tYUPWHxF3JtT2QRU2FJfCG+Feef9txztvuHyeoRAktLvmhmm4ZzzsJreRc/3zTaBF+7dEXBGeirv7WwcnbbS8Uq6eIw+VoBBWxDhJ0WuAfWO3zsQWV+si7O9lsm7RnkjCGXy6Q375HPnlc6RXrzC/uMN6d0KaV6z3J6xBIKq8rBiuj6VJ5ag093icMF7dYXlYsdwtzen6rs0lArP23a4PmP2d5wVh0EUsRJArRCVnTHwXXYt2y9xa1AvI9bDrySPgIV5/HfV1X2QpkypwVOy9coWcKuOMi+e9ukS8HwWiyYyFhCZeIjUKoDSC02FTfBxudEE/PxTDVZQlypeCGK5BaOFFTswNdjCZUamZZMGUxURriDIrzEQbAxZVQ6/x8gNv4KgxBBGZdc4VQ6BTIsIYJ+RJF86cwHEFLScwj7Um0piFa41uTSv00WHXUmvaEoL0sssZq4sQ+5qzMZKKGgdMWgu3aQ+jzxDBVCsGlY9Suvpjh+UMljcqtR0OVPKt5qm8sWjm0p553WllHdZ92PatMNr3rav7fzwHl9/gnEpzTV2AklhWbSBLCNQ6cN/OeLcNFyp817Q8sGfdmuDtGI4CR+zBgulycliSuQRQ3EZzthh3kVbTYtt/vyyOss8Cj1ii2WjRFj3EFdN0jUTSpiMF82Zq3ssW0djdqEZf5uEIWu6bfj2UFmQnShuvr/HkT30e6/1JDJhT4rA6MM4ZFAIOz26LAPD1F85YTzOWuxPWu5Oc5iP1dZavkWkS+DBMg8BgZqw02gjjJIXK06HOu8FDmofi8arxqr1T0j8vBeYzhwe4CBP3El6+mLgvWvUMu77CYgvNAOeUMScghhFxnDAenrTqGe446v2UtO/VWnsa9ccYBvAgTLkyB64w2R6XSFT6yGT13L2Ab4ZnisFR6MVoiSGSkxr93KJfoKsXvigikDhjJmAMI+Jhqj3ElAXIHX2dYp0PibgcvO/+bic9FKN1v2acVsac5D/fysQU3GOQVkFTDBgCcBwCpjhgGAaA5vaa6FpDWBGjwGpBn0cx6jVX5cdesbJdixqVcGO8zLmIrnidUY2vCSvbvBubM0PSBLYWDEEYnv7+B+Qe7/PsNp/JGa09Mko9L3G5g9Q+yJ0QxHiJ06OlEYGkm/nr+ASPjHfecAGAFdABCvupwephv35cpNdeWAxKC2vzyLvcyC5Gvms0AZgXa/sE9o+VnQYfBSDNiGEQ9XiuEkORUDTz7G+DPSIykJeihM5hkOZ966KsPEgH5pyFjn77DMPnF2W0Lbj/g/8PDs+eiOCvFsOupxnjzVXJP1GI8tm8YLk/4fzJK6G7K/U9maKH9dXSCC44w2YU7zyvGFLGePsEpmzNi3reIYCuJDleFufhWHKdpdCWASAAbItEndJS6Q9RTmhKsGxRYCnaLHUvBFgrFS8+GoM1qxRv2y6ILYLlsneX1fJCdQ3gDsYZEGnYhXQCAQPcveVklmg51UJb/azUNikxIbHNh5xcNK8ddjxqvCKQuULSvji51G15Blk3Ngu2nXsGZog19DVJ4mwFTPEKcbzGQADN9+iZgXtaoP7ZayTUlMpu7Mk1V/r/y/PaqIVY77PrMSIjYwIhJCWlBCqFyeSdY1ciIxE5kPWC9XAqsDVaNmxz7O4fH3k1p6o3WdLvbR0lERHOpPJTLAbP71+af67NuinLEtX5VGe8MilD+U5/akFxZVJnJgegSmlYFF87jAPiFHzW8e4bLi/bYw+vbzHdQ4GPjI30T/9w+G0ZTKmv32wHHnpqe0b5+6CHD73gb6H9drBVjNWjrYrWkId+nev5GO5vUUmKQAqg6YDxe/9PAIDl//F/URmmhOEv/01c6T7W/+f/Vfbx8jmG092G5cfrjHA+IT45IYwD1rsT1tMMI+cnrCVS64cIAi/ltUCLJyForItAYpbDUaWFMqd9Ih7bRcMXzPoF1+DU+tm2iDIGUq/VRXJuf+XblxZw/df200Zl22YRAep0hAr7uB6hCESYIoFImIVplTuJCJjGa8TBXXNlQXoNw7odO0B5QeWGdBCUOzf7fft6m9PpR0tSkD/WZNvmzTkvgcqcT+N12805u3otdM9sM+kCFxu7U4gh2w7HS/K5Fs3hDCj91rKyIAPkGSvsRR+Y++eTQqGgG+NuD4LLzOXa9rkfH9347/thf7GemzkT9Qc12knqnJR7yMGdfReNcpsZ5J4JFDJYiV3RGTB/HFBjFliKoRvYUI0Xs4g723mN/qZ+y/FuG66Cu7T5mhIZaSdVALuQ4aZGxOPXPVuwwBJUDdClHFq3H6HPdlBUACjrMXIGZSew2xvNfj85A7Q25xTUqO2ZUN8iW/IesgBTXkHzA7Ce4Nlx4fop8pMPxXP344M/hXz1Hmi+x7CeJV/m2rPzOpei2cN0xPjqOc7PXwIAlhBApxkrUJpU1mnPWpsklO4wCgkkjAPGGJCnBzFgOcliFyLo+snmegmcNJWHyksZ9dX7BqmK8cq7i0WJtrjCSQFiwPxz2S+MZd7ht2WOhSyE1lF5Tlzo5H3H3TEGpRxXqM3GFGu3Y7/tORCmGDHqgm/ntKZW2BcAcueFj83N0xp6QNiQNYfn53C7ANlC6+c5scGp1Xh7J8PYsAZzy/kZ1B0whoNo4Ol37RqY89fQ3lEjL4mu6zUNZPtSUopLvGR1JoaAAoPa7wWC34ee/ayRznEmgHa+WO81idBtjoDWgPXz1w/v+JgjZvPJQfcTdL6z3a8knan9muXnzjllpecXLaA0FJIL2e8tEnPnLkX+NgECG0a9R3rju3ynGi5WppDcrK+pwu7zBJ2nsWd0amjsoIeedPEao+WPtWeg+ZYD7MklxXBuDStlSH4ur9tjcb8pVPEQhLmm5zBzkAWSgeMwgMIAWg+g4Vh3c/VUjFYXUazv/2nMNOEwXoHmO9nnjUaI6wxazwiqSpCGEXQ44jgdQeFjxGnA+jCDXt4hnSpt3tqsABA5qWlAnEYMqqZBh6PKGd1ItHU4gq6fgKcnyMdbUYMAmnkQXTQBX81ISH5mC2sZZm9er8FyPuJhMBKAkB30QhW2g35/DCiMQS+ka5GdyQUtClcl9fjPa8YpZamFcpZPoKtQ4EM5Xvn8MAQch4hDJFwNocCSq57rrDJLvTQTUJ8UUV1Ag+kVI+AWsKj/loUPba0esAPFOpKKOQ9z0sUcLEpeWeq+2uJsgeuszX0MYrSNRGJED5+7jWR5nKkq7JcTkhU8QDx8dqcbjkMTfQmBQRyCMQBDJN2PqDIGuaX2R5cfJSIEthYgO22QgHI/+AJlf5+aUerVSMp1csM7APZ3+Y06CyEDq4bVYxikGzlQn/UuF1/0NXWdojAAzGDWJrDWp0vP3xw5u18jVXUReb5aZOPwnWq4NuQI91q8gy4qumBUdmFB+8gz1S7kxN7oOG0b+lZlMsn7BEikFSqZg92x+ZYrxYsj8aw22w8dww7mdTFmyzEByAp/8YAm0cOHm93zOj55D2LergB8CAA4vfpUErvnOxhhgQChYq8LaFkwHF+KMr0zUsDlyCsFYS+mZUUwooYVJO/JdFkErBqSa2I1DublV4JBn5PZYPXQ3ADa7xhL0yCYQoun+jsAZVG1RaREGg4a9AXJtQ7KoKtcjmkhIK4ZQRdTP8zgXWu366HAmbIfYiBoztOMce5uXzPQjw4PK4UBlne3NaeBpcv1QDHcqdDDWSJXdkoOmufrPXGxrIwlKFVfDYrAtqaKzmJYWIxWoHqhhs6ZJD3PWCAsAAMQ03ZBJTOCRm4Kek11/w0aY1PUwceB1HCAYJJMPoICfLTvpKDQ3qd2z5zX/eLuoEZe/rbomco599R4c56IgcCSs7M0ha0/hLXN3/UwvCFO7j371+/TDFhtBFvNd9bjfe2998h49w1XMSpSeFqN1rCbg7LwV1iF3eb05us15jZGy9hEYXvTbgxkZ1Q8Iyi7hxgArBNtjEHcI3tA0orSINMb2TBAeMxhU6NickumimCGy7zlIVS2ENFQZIAAYPyuPwsAOH/y9ddeguOT9/Dpq3scjlKPY7AjHW8Q5hOwLlXtwbEVjKhhJA3Jca0lEptf3IGCkDfoeF0h0GEUWvQgOQ9rIMhqtM5qtObEu96rlycql0gvre+hFKjml4BqvIziK79jvW4ouTA/zGjZ8Xj1dZ9jsUXN99HKuRpHM1xSbyUL8HnNCIlwvyQkHsWADWFzDGZIATHGj9X5VGPcwUg2NMq3CEw+xy7aIPMxdEw7qIq5Cb4SQrCEvczBkhmLLe8ORJGoKmCMEoVaXp+DXRcxTqKc7gwp55oL1pMXw6vSUztzYdRxu6ajzjlcns0/8xTQRFRG0jATwuBKWmhyWJeN1ppZo9SMl3PCeU1YEuNk8mhEOAyhROXHGErNnEWlhJYAUtYcXYQyWSnGXM5dkJlcy4nsP6csYqmGPsq0bdj1sn3Wbdc8mMzTZvrfeLzbhqtbxMsitjOxDGio6zwMCo0KRjFC/cXa2595GWbsvAxQH/1ha7BMvgmoBIGyyFCtWLdzKqob3fkTawG07mvlCkV5qMq8Rs9qkgekbu4K7TiNtzi8wWX45Jzw/mHE4eZD0PmVLPzLgxiZEBCubjCozh5FkSfinBvIUJpR1sf/8PSI5e6Ew90tjvOKw+c+lAjh+hbISYVSz6Dh2Gg6lqQwzFBrw74Axd7Z/I7GeEUiHAefU9mqfPu5NAgmM9f1NVWvOrNI2tgitGbgtCbkDJxSRs6VFGCLdiTgWhNNiaGedpb8VGZcjxGRgMMQcXBPrkQtBr3V9+04TM3dDFjvGfsowC6BOVHWBuMxSL2PQIDq0JkRlOiu3oejRQ4hIlIuJIlUok5uIEQx3hnHIWBMjOspKiyIQrhgfbDL8Tr9zwgt5CdqYFxga8z7z2KzTcsH2ZeS0vvIrRuydkR9jpcssOHi4OKiqO4MFoBKad/hGZtDA6AUIscxyHtRDLvBq30jS8svFuckyT2xBkKkEXGaClPVWMzlWnbojb9nqNwvaHNkHm4kJXWgNWCPyVS9brzThovSDEpV7qkRPrVhC3/zQzFulFZwgEhx9bDjnuZd97Cip0J0cKCN3mi1nxm+bSrPdnld4tmOF9AVM1cDuzOY23/tNXlPFHXh/dzTm93tvPfkevf9fqQsLQqICNN0DU4L8niFcJzBD3dCsR/vJH81qsDrvGJ+JTqHbLCagw5LYbIqa8RpwBAi6PoWPJ8U/tGGgnpd4nQNBPG6iYQVxgBysEiretv7eS1L3pNem3aeAHWAysNaZ7NG0DXCs15FmcVoWXRlRstHWn7kzIW80cwzc4lezMP2ixUgJAzLvRXvuhSFAkD97NLoF3KGi8KaD3RxsufCwbglCtGIJyu8Z5BnJXyI8G4IjGXlcu4eMgXES4+KJli9mDmAPTmkPkHdYprXGn01z7X7MQVwaJsjmqG/2F3dDLQ9/56JFwYEPW6gzfnV8oKWWWqXnQgYY8D1yC7izMV4jZFcFNrmBBuqPddzZKqak0Y6kiiNpVEooh5z+3tDb/wwqLlhK/ZOjIvcjJVoUPNe9P+m4502XKWRJFBvxD3DtfcdzuIZpdy+56HBvs+SCeReGl10VX6m//ZeVf8dM14Mdl6J22cIYI4Ak4j1Avt5Nzscao1XezPuL5qfZaxZIIwYAETR36PzK9BhFo29g3Y8Ps0I44igBcxpyZhfzWCLVBxVfnlYkRYtSh4HaV6p0VuejvLQQKNoQOZ+mIqXG5nAoap3p0DqzYtBAwj9Zbhk2JuI2Qwd1Ui5GDWuhaDeaC0pl1yOMeouzb8RNFYfbVCNKgCFzaIVycpiVYgXDNEcVGiKSRcnzXeZY7S3aPj3+o/L0ZgT2D8HXgCAAkyBpjTO1EjLBKeY7TkQ2PqswZGQNWQObIEOgQBQXfz3nh+7VtwdfUd22pzXTt0S4oRAYeNwxjAAYZV1AAm1BqpNMZAJ+ep7ZvDMfvRtQIC9NaPKNx2HKNDmwDikUO4jn+caYyjwoP3OVi6/3vj7LZEZZEZIYrwMjWkgxp3nxNifA3V236JS/14HpVr09W1wM95tw0U5SYW9Ya8dNl8EdcsPDOsOzQ39GDmj3WHYfd3QbhnoH6vM2DVWBg3aQ9ffMEFx9XIul2rWnMcjMjUE6xdUF8p9L3vnsN563C8ZSxZI7jgwnowjjlfvCUxIAXz3stzMeVmRF6HZLw8rTp884PxixunTc4m8qjhvwHQz4fxilnqwF3e4zRljSuDbZ4jvfQi6uhWoNM0I5ztpSkiabzPpLc13Zibw0F6Pvr7Ls/28w9E7AJb/MJ02i+xiBlKWRWDJRkMOGCOQsi06oYjSLplL/kJe52LYxhBKjuuguYxRady2UFVWYzWqfliSXCKH6hjlzRJezw2o93BvWKl8Txdk86ijyZd1zwUAY3pGIjBB5Inc8c1JWJXnNeF+yZs5QGbEWD17g5jsPAwGk9yjSkk5ynfT1qSLCMy4GMpS14ihCOja0bK+T7miHT7nVVh4kOisapi2c2d5Z8tjiTGrzozMbyWkiEh2TVnYnVl8bjcvnjC0F8W1awA3r3tnxhOPLIr3ijwIrOiDEmfISGE75BUfkeLbH++04eqHKRqURd63o/a5KxuXjFYYhHYOiHdVcOuAzYN5Afu1cdloSTEesxXsKTyon4mPidYAc96ejxyIwJ76m8F5jyt7mKwuyB5B+pMY90vShV+inOH4VI4jZ4T3PgTnhJASDs/EaIVRAB8T5E1zwsMnpzq3mQEMWKNAiqePX2phcsR1yphOd+D5hJgTwvVTYDpK08RhrBJZFISJqNTfWDcu14AzsORq4OIkM5jFGzWDD8ji4euMKryoi6jCPZGApIH8EKhTnKhRVnbX4z5QMWD94mH3RQ8L2TDDBGBzTBmMqEZVoi1Z9BKbqZXh1Tn2DNpeU8BmeAPm3wNgslimWmH510Wh1DUzTprLWxI39WxBF0sz2GNQw+0fYa7RqDXYMEg1apsemxyTOfP3QLkPzNBYRBmka3Z0+Z3NWlGeM4iV8Cw8zjCPUoyJ5Zdr4bHA3awlErUsoFDzlYBR7zu7BrJdkXzbidY6B8YbLU/M9w6ZGU8/vAyW1bVRrCoctm2L4mvd1+W8oV8Kvx205/8vDBex9MsR3bBcF3hVme4L6wC0ArfdzWhkB8qKCgCA70Rs23FGK7mb462O3Rkw+9uKJHeNllc2L+cvx0y+gaTm+waDOx1Zw27YPynBS0CIBDkDSeleUyQ8PTwB5YTw5BnCOsO0Bw8AKAhcuNydwFnanaQ5Iy0JnKQlStAVmRNjeVgRpxPOn7xEPEpec1IlDT6fEG6egq5X0DqVdu4obVymtpi6d1iCfm/MGIcjgCoQakbBDJfVDwE99FrrmwyarDCjGbcKPYq2JGEMkiRfUi55DBu2gPvaptKk0zwPyCLtE/FyPFsoZu+aE8liJmw8a23SDrvTjA2W4Yp/93JfDpL3BcfeaFlkOysRw8oCQhCKu81tMVpqsK2uqoV0tW0KuXtb52cIAzjmJj/cOLcNOzCU55+CtlxRA9bnzplURcNqnMjBhTYHbo2xSMi3sjc2ZDPXDiVBEIM2cixGq+QI9bqbELIVGF8afl2yOfKRnjlRdgx2r42RMIl+f3HEzEkgrjJnTK2x3M++Xz6mtx3vtOHy5AlRh7A2D7bQSzfgDY2cjBLv8G0bQcL/glfDGa+uUjyx3vtdROU908z17340N1pHCGi2542wp+NSkIfKsw3tAUta90TSsDDGQSICDbeYKtTw7Y4lZ9wvCdcjkBYufXbmxHgyPcX1M0IcrzHevo/1j78GOhxxOL4sqvJXX3iJJ198gVdfe4H1YcV6WvRUAigSpicTxquhKGrkecV6d0KI35LvTS+RXz0XvcVhBI0TaBhh2obWnBKDtkHpGkGakUNeEdYZhzBgGibAa6k1Ubl51XUbUR0NuRD6rzke1MJR1kstsczRKWWsSTTyPFW/94KXLM7Bkhg550LOEEaXLNwFTgu1SNfo3XtkE7Atwgq9cUs68SOrcWSuOdhivPyiDiOGtNDrmivjVYRua5QxhoD/L3v/FivZlp0Fg9+Yc65LROxL7ryfurl8kW9gIf+W+3eBGqFuZKsFTxip1Q9gJPrFKlsC84BAvIAEJVBLvAFvvLmRUBvRMj8PCIER4G6Q27J+jMsGu8pVdapOnpOZO/clIlasteac/TDGmHOuFbEz82SeW1bVkDJ37NgrIlasyxxjfOMb3zBVqZBOE4etTmtO957bHNzgU+QSUYSimwR/eW0Y0jWR1gbrOAv3LmfuCXlxmTmpgbMamf3yhVw16TxI/VWdDjmLykwbstVJ5++WhXczkUveuzgUN5W950NCy+GfN/eJMTlk4azAgZjUTv3k3fM1liBsFBk9lVAzpblcr2JvtONKNmf7aZalQwPL55AXEb3hIgLSXC+92Oa4rPy8aY7PITt0XvZw5OJ5jZLLE8z7O7tFjeEwOAZg7FMkXG5fZhsAAFejtRw7BcuRWo+IT50dPfc7vIxZ4kX3tBX4LygkFrEbI+rmBDAOoV7AGstEjeUVzGKFo7pCc+sIy3tnWD28Skr0fpBR9oV31X6vsdvx+w8j6n5kxY22RajbRKYhnaRsLahuZQBlBWoW7NRk0CJVxbwmt5tOOS6vAWCatReBT6JGz7PhMivWWpuxcDqA0Do4w7qDurgnNQXERPRQBxciYYiirmEABEKgiO2Qzz/3OEW4yOoPEEJE8HkBIUKK3CfXGXJ2VDLbUk1Pv+9sZSyvPq0TBgg0KN9JnZXSwNVpaVZZWUIF2nNYqfH3BiPK+6OwVqmoQYWYrIoP64iSEnaf12SIjCAEAdHVINTihGQtcC69LobZ+dZaV1H7TvsLqauGKOedA+Mgmo9RlG1KB2DTecvZVv7+lPQBgcOZjpT+eBf1/ESBaIXBOWdyKkQLBAyGkQGSDNFIwF5OZlbCiX5GCVtr03j5XSLiC9fP59m3h+NSKy/C55Atkrp3mdbLzzyBuFicbM60Djmtl+kAf17jZ14Y9r9L3lj2x4jElQHSKJTZeA6oNIs6POMYbjQOzgg11yCpgr+u/cRnz/AbXz+HK6JJhoEAIIAGYFUt4USeyxgHtMw2hDGg5RrV8Qb1yZKbkAf+55N+4YCx6ycU+SEEUMfZl6kdOy/JyNSs/E4Nz6eCMTw9uarSzCpq1KnV/FxV56ysbImYCwPTrMViTgRC4dCQgwkoBCVZXm1rVI6JI94qPTz3nyktnusTuqjlwEBD3pJpRzC5ATQiXWi5iJ+zKiq2K9mRubfs+dd2GZxphljWU8fiOwwhsyy7MeyTEQqGXILLb/gs/bv4Zpbn0tqwZJoko2GSw/J9dljPUZov1vnCOY2cdU021IBW770D9e/iPbNEGKeqJuZ7kNcVmjTNl06EHcF+QzHXyAizPZsesxj3YEm9jkrFlmESaRP/jZCg3BgzVGjAta3E2o1lLXca8FQRIkXD0GIm17y6vdGOa6LldwPRghdviXzmY9bVSomkokM8RU4l2wiSHc17rfTzXrTPxeP0WskI+TvlInfC4ckwR0S2VbYShQBEjhppzMQG8j0vkDKGgR0xAYGHUQJ8cc1lhF7HfuKzZwCA33nnEiFGbIeAypo0+2g7BrTWYXn0FprlGWi3hlmdwy2PEbsNwvoSTgYjxp0I94oq/HB1jd2za4zrLfrLDdfFJBM76Kj0nyhvmKpK2+nPatWiWrawR8fssJqW1T4WDDeadgUYyxN4y+GVRfa1t+gVDa9zBmoiAJBBdA0PerQ1yHCWZ4AsVSTXnHcOfVAVkADjAQJTo3XhH0JMMkwAYH1EI6dbGWAa5c8n35YBV6KpYzoHqnR8cyuhoXLxKrOsIeRovISllDmoaiBaxyop/sp54PfXT5RDilKrsJRoknlTOtdr3AG+l9+HaRnhhhr35GdhFKO0bMwcU1EX3z9IuR0AAEzB+NVvAjkXpa6f9nsBU4c9eWuOvicBgB6l7EAiymGO8xUqMTL1s5K35MzKRMpZuGxP6b01S1S1jywWrbAuo+3icM10EsPrrD5vtOOaWHkRac1HrnwmzRphjWWbTrqVrEoi4T24UEypxaqknRzPjPxxaP/KG33yWh0aqA7ped9xfoNMsqy8D3NRXwoeGDpEMjzLy9wcpb2OVUbxc+BqN6Z+HGMIy8rg/qrCqqrRLFrU7TGMqs33632l+TXXrhA8xnWHIA7IX/WcdW1H+N4jhpgcWam+YSylycuHnq8WDm7Zoj5ZsqjvaoFq1cJUnMFprYzqlskfdct9aVI/46DAIPWtBJ/qaiVUmU6VFPQpjIjjLsOHZRhdBEtULWDrJeqKNQI5owG2Y8D/fLrFde9RGcL3316yRqHsRusIjSW0ziQHpqw0YD/QChFJ1VsXJd1Sd23u6OYsSY221VnFWIgJh6mg7k3ZXFkLSc/lXeGMRX61JqucaPOtSjOR79lp6QDKsi5cBBw6NSF6SVtF9ixl28amwZupbmWKulVR7w6yZ5NgVH9KQDO934oWF93uIGORG6IPokPluUBmqqoj8UGgW4gTkuCEiBVLQgC85X0JgTCA79NwwFMGcUoEFR4HkJAACU5mr9MApDJ5mKVC4wYsFP2q9kY7Lp1GTJAFnfLPFCiq8yLkbdRmI99TQXUmcTK3gxdn+fmlzRzZJMmZQ5taLD7wPefPU+koSwgR2MsM0nsHAGMP2MACmwB2V+f8nHFp2GC7mIs/vbzV1oAoSiMka+nt5IIefMRmCPABqG1E6xya5T3UyxFxe8GZoky/NcMGIQRQ3yF6j7HrsXt2hfU7T7F+tEG/7jGsh0SnDz6KbFRADMpKZHIHOyr53eTHtjZwbYVqVbETayvYtoatmQjCEGSD+niJ+mQJu1yybmKzyDUzgSAVRiKrRBBxbjMnphoJ5Pt0/mh27hlWrBHqHtEPINfgqF5Ca6zW8BC+IbASxVFtkmMLkRmdOvYkyRXp4p0uoFzn1foqX5vaJDy1vbpWzMojeZHMUFFJtS4t0/vzc0rzT5JklB1W+blVAZnpIqjK8LWlDA0OuyIg5ECoHO1TOonofQo4eAhtPjYH14byb8itDWo+6vpgMnHlQC/mpLVlznwOASW1fl5v1YBb+8bSiBFj4CVDoshn0kuWRVGDkJiKXpUl+EgIgfLjCFhb9GyRfq8IIw5Ssy/SDH0C4WaoV8fSlNdjOYF5F6fQ+vuxN9pxpRN7yHmV26S7IH/d6Wwsl+EgqUPMs6MJJPg8iGH++wznnrxOL9i54vkse5s4rZucY+lkCwet5JRE8R93aE7vFC/OTqp/+g7fFK/huD535wiPLtbofeRIy7LzAvhGuOw8KhuSUKolHstx2t4GEaE2AO2uYa8fg9oNsL5E9AH91Rrb957h6f84x9OvPMPT3uNyDJjj9hr9p7EXRDyK3RAqAhbW8GNxXqYyycGxI3Pc/FxZ2NqiWlVoTxs0t47gVi2q5YKdmIxdMYtVro1JdkbGIroqObFoDBNEUKeoXh1V2F0j9l2aY4bg2Rm6CvbsPtAsEV2L0LAslzEOx8sz3GqrVBc6qvk7AZzZ6IKeMnltp5C6Z2KjyoKnzguARNP8y80QVXZaeThjQeMGMK0Ba6O3QaCIqsj5psrvh2tq6sxKlqQzxbwu8HSClGF5GaLpC03BQ2UEdVqHviNRdh5F9lOStYaQj0WZbfDctoIgovdsCHk6tUKZpfOak3sOOK5oeKRIcqrayiGkIiuN02OUfQAwEngkT1RCRYCJhGhlxBEMp2VOmdIxiTnPGaZegkKvMGRhpeRU6awax/efJTBsu+PzY3bXB4/9y9ib77jmC3kJl+k2B36mOhawxyJT5iDwAhzWHD58JfPQh5guYocAGkQ9PfhpLU33S2HN+ffR7yTRe0Qj8N+GI8wwck2LRA6qdF7DDqA+kUxu/Dqbc35w9uB53/qFdlQZmBoIkeG06z5M2GRal1hUhN5HXO4CfufxGo2zuLOocNy0eHDn87D1Am5xhHDxBLb6VmpS/vp2xNvbAedDeOG+PM+0hmIA1IawsJQc25EzaOW5I2fQHjdwC4t6VaM9a1GvKtRHNdrbJ2jOjlGtWrhlyxCjNey0hLkIV2UHV7egZgEEj+g9Yt8hXD1D7Nbw18yqBMBQ5vER98CtjmHP7oPaFVC3oGGDHzj7NB5vLb52scNujDAOaCzB+S2ScnIZ1Wvgouc/ePYG4sT0ukhj2sHXvo/TxXkCT8kvZVGeP0OaVyMvlsnJzWqqyVkWNbeSiMD7g0mNjiEnvZe2wDCCxh1o6Pi7Sj3rhXUrMoBzoDiFcgGkWXcRAFGf+r7SelK8j363yXEJQkCQRd7K6xNsqE6rYDmSHxCHHlGYjPkgccau7FcqiENK8GG0RWSmQkAURMUKa9GAG+rL9oVR+r+ILIIDWpdp+Br4ycfzeaBpKwIH8jQ5hzrBwBlgURm0kgWb3RoYuz02p7m+xKvam+24brLyQi3T6/I5U6T9BWU1FBeh2h40WH4GkGFJMYVtCJnpZAgc+YZM0adoEnS057w0EiwGRs5Ff6MB4Fp+buzz9wqMjZaZGsUI9/AHnn/Yxm7aZP2KtlpyxtZt1gCAo9oktlnvuZhc0vA/DeBbVzsMPqD3Ab0ndD5i2RwjDB3svU/j+HOPMHY9jr95jbPHG7y3e53SLlsofnYhogtx4gw1QzutDI42AxaWndlZY9GcNGhOaizvrtGcPEW1cDkLE5jRVg6mquAWdXJqpm0l+0KaGL17do1h3aG/WqO/3KR6XX2yRHvnFM2tIzQPnsHeeQhzegdka5juEqfNCR6sKgwhwgWGCE3ZaF3IHCUr0AIqrv0kV6QkITJZ8kgWYc1NFBY85MQS11JqaiW5Yp7BTR3XtNcnT0tQxiFSdoUwsKOSKdw0DkxOkoAw7LaTxV9rkmTttK5NM1JFQnBMvj/V2WijcZxLzaavC2CqYpJYhDpTLk4dUox2ymAUanscCiEBYMpmnGWAk1p8aSUpRIIIL/UoYArJ+siSUrGaw36HvufUWZUyU4pkOAn4aHfN56jf8M8wcmARI6L3oO13quMylDOum+CzYhIwgD1nVWLy5dsCSA4nOa0b6leHIEQCshJ1ggmKvjKAbzTjpn1kofism0gfs+8TbQWydYYeLR+TWEIlL2O+h/v+//Xlt3+RidN1cYQjg/bo+MZNzxYVNoOHIabc9j5iUS8R2xPYe59CffkEpz6ge3KJy29c4XwIeLR7wdTr17QhAsMYcCkZTCU352llcXs9YPUu4e43rlCvatjawNZWoEaunbmFg2tdIoFUq1acGw+LYeUQZkr2VxvsLnvsLnbwAq3WqxrLuwss75/i1jBiIZG3WZ7AbC/QALi9OMZ6KBbTkjk7G2VfWrk9w4MBiMVId8OLtrX1QYWV8t4pbU6ucKCJgyttep9lIkjprBLcFgPI71INlMYuIQ0kU7fj2DMrVeqiqXYlkC23QHCDehrAamblA2DqCAKrYkSFWoEJmmEAaczOX7qENcuAdw/yF7g2KXBooCETD8qap65b+efMAScWTbHmyLG1ZNIx5udYCoz/VlLwKe1/OibFLu81FxeOy1KGBx0B1F2Chi1MvwUNm0y8GgcEySrD9RVe1d5sx1WaOrDyRErB8tA8GW3u1FQfyBEIKN8s2puTalFzB1hClVqAFemlsscKwBRaLBwuwSAWPUAJq5e/60XN/VsuLTLc2e/S/tAIZlTq4qV1jJe0eH3xfo74C42GDrE9AXUSWa1udlx3lw6VqSY3TR+Auj0Gzr4H7kdPcPJ9fxTt574X9fG/x8n/7+tY/dYj/H8LfcMP24bIBJO1H/HN7vmF5dYQTiuDhTU4cgYnzmAhTL96VYMsIfoIP3isNwO2PuJyDLguBqTdrS3eOqpx8pln8P2I2z5gGTzqO28Bb38ZdnmMo3vfi1VzzJ5kF0D9ZrojZeBTkpEUEiyz+EPXSmSBXM0eDimzA3hutlSSLV5k2oNl1FkNW2jzMI1dorTT2AG7LeI4IEjmCmmn4DYKYadq9mIs9++1DNmaxYqh17Jv79Ax0FoZxNkTjyyBAyrjktTXpOeq+Dm5j4VVOm9KntSmY9hvkC7OT2qQV6db9hraYn0p4WFIEE0GzhpEK9OpJZGba1Hq91BTBmlZ11QrZdBsHEEDtyCkbEvOU+jWfF4GDi4QPML1+uUuigP2RjuumDIP6dEiOy1Az5xWWbuaz8BJSGLptGY0dQAJ+0gyN2VjaqneDkwvRABpWKVedEVkN2c9ZUxcxmkDfOFKljYhY5Svp8wwTBf3DVH33vEchxdv9H4t9ac9P/P77O0MHV6ut8xiIgDgjDIsz1h14q3vxVs/A5z9yFdw78d+Hw9/9cv4+nmH373aYf1Bii++pnUhotsVMmSvYD5GLCyhudyhv9rg6muPMKw7HF08gVmdcO1rcwXz8Pu5aK+B0pxi/xzjLCJMF+yZqn45iuVFKj1KnCjhvXIRzx98g7Ms77sw7sGBNO4Qhx6hW7OzEieVHJb3kx7A0HWIISD4wEzS5VLaG45hliepf4/VVdzefqVsyA9cU4qB22piAKoWduaEXnSsb6q9lcxhbnY+APPSDBqcZ17pcw7c7wXjmEgGa6YXQNapAtKc72cJTZb0/DDw63w+V5D+OfJMPokAyFhEI9kkhpQZv6q90Y4LkBNhAECmG+v9OoPWInAjZKFmMIUGaVaTmpoXR5LhyuYGUsPwzv+UDw4gW1zo5T7OIB51WhzxFXi9dSkLo/nr9YJTp3YoK3yOVT/xZ164zctY/+5X2WG6ZvK5/dN3AAD17YfPfX1NAZ5MdlyuZtkdY2FiQHV0C/bOW6jPzuDaGp/6yrv4wT94ht/6H0/xlfWA7jU00D5J9rT3uNc49OsB43bE9r1nGNYdds+ucPaDn+WG7V2Hql3B6MJrRZqICvKRKRfIA/WQ0g45LenLmgRKkKh8BvfZmdPimlTOIgBMkJESwUgLrrLtfD+pZcXtNYIyMGU6QByG7KhUomngMThhGDGsO4R+hMoyaR2yPl7CntySzGsJszqZkGlI2xuqGqB+EgCmmrbcy2TctFb1IrvReelzIuB7yIHoz5dFUuazsZBRo/mssr3RLyXRTR35TIyhDM4TdDurZSVLn+slO57V8t6HvfGOCyidl8knyuzfpOXFpars+gd1WImyqU6rZGXpCS2MgBcvBpOdzdHtROtOMzOFJg46SyBJOdGYxjDo+2pmF107hYgO1ec+RLObcwx3vw8gabat2kxM8S/O6sz2AiYGxGaF6Fr0cOh9QDRL4HiJE38Na2tUxuAz/4f/Cz63u8L4ztfwv/yP/x1XX3uE67cf4+n/eIL1u2usn2zx++sB7+1GbH1EH6aZ9ifZtj7iegwYOh6qOWxHABylklDszeoE49u/z4vv8RnMnU8l2EgbZyeBy+w6nTbh7yMUGgMQAY7y8M3DOpw0qXsoMWECfYUZxJqCK8rXvDYQjzvQsGFllW6DcHWOsL5iJub6kp3STB4shgDfD/x8P2LYdEn70vcBtub2B7ds0ZwdpfaG5tYRZ191O+3TExaowosBkADZ8DWt9zIypEZEyblZ47g+lg54gcAA6XjT7FxEe0Ow+bx1plj78kkxe87roMMSpmNiopb10bkDmzmutG4NPAEiKvpTkkpcBQqWA4tuzT/H7+Q+LrkpUy1HJYBuOMHJOR0woynz8xZ5/VtBfHgRKFMVbL7+va+l/Z4IAAsckorNxf5Pmh4VAopCrS96OlITchnRlvv8IZr/3f/EH9UeIRzdS/W8FL0bh3a5eqn3YkJNJYvdBnXVAtYkVYZLe4TjeyewzQp+eQY/9qDTT6H9oZ/CsrvCw/4a/tHX4c/fQ7h8gj/+zhP0l2vsnl1j8+4FnvyPp1i/u8Hjqx6/ddF9oiDG0u41FmeVQdU6+N6jOanTgjtsOpB9wnCLq2BbhsCiaxHqBeBaxKrZlw8D9q+HEgaS3zMrFgDRZKHghmVKjydvVb7/bGxIJBkXUgZSqc6LHLn3W74HdhuEyycI60uE9RX8+bvorzboLzfYPbtCf7mB73qMHTeiBx8RBv7pex6PM3asrqLjcgCALMG1Ds1Jg/qoRr2q0NxasQzYqkV9soJt6yQJZlYnoHaJeHwGc+RBzZIdWDFoUqXU5ro3ibGp3z/9ISTCVlJKKcoIe2oq5c+5lXX3+WuAwwVGWefi2E+Qm3kvHM1YjjR5CyFZaMY79BPnybqfrDaSYNy+Q1hfIWwuMV6cH/4+L2FvtuNSu+mEzU70TdJ8qb9Bfk8ZXBSNwGhypFhGr0UmBgD+q7+Znrff+xOHP6xMtYtRJTQIPqw9XgV9P41lUGenFkPKwFSD6qCq+UdhMsgxNCuExSm8Cr2+wn6UkOvu2XtAGFHZGier5XTDxfcBALrtFjEGbAOhXt2DjSPMyVuouivQsEF98Rjh4gnfMNfP8PCdx7j62iOc//67qP/TN/DVzYCnvcenFw4nLjfzng9MltBm54/SWkP4/LLC3dahXlXYXe6YsVj1IKMq+T0qaycjXJLSva1yxgVkNlyJHMzRCWDvXO0xawEghIxWhOcEevNIn8y0T3HuvERPkMYO6DuEzSXC1TP4q3PEzRW27z3D9skF+ssNNu9eojvvMHYDdpc9hu04UU7Rx6pA7yPQi64jwOzQ1bKCk+Pbnq0nTqw+WaJaLdDe6dDc6kDL43SsDMQZqT6oOBtlEYeY1xSe+GAQrRMlH6n5hBGpYK51bK2nKWHrQP1qLvZ8kAlZBr3l6YhTJifXy2U+WThQaxxHRCW8lJlUIXGWyBYFIUb3c6IqU+iQhksOJPuL7/QGZHm8V8Oa0TeBwjkVbzORvFFHIVkLRaaVgygXGssO91nqr9mO/+pvchbkmgzBuHZSRE8sKd/zzTqOXGgW9mJqPATfKJMaQ8kaIoNAJkk2RdewAgggFHzednj0B0lFofr0D7/OkYf/g/8KkEkO2q/uICzPMJoa2zGAENA6HmQ5RsC9IkDX3Lr3wm3axQKX6y22g8dO6gP14m5yYnS7gzv/GgAgNMdonnwdd3/s/4zvBfC/zN5r8//6fwAAdo+f4OrrjzCuO4xdj/5yi349oL/usX2yRX89oF/3ON95nA8eF0PAO9342jDkWWXwsHX4VOvwmT9yL7EPH//uE+wud1jd50V1lKwAxrKyvbHiRIrZUMQTsDOEZWDJSLfECNCBzEsfYr8VJJOMZlDTc2oxc9o2pHaTPlqv4cJpxc0lw4NX55xlnZ9j9+waV197hM27l9g83uD6W9e4fLzFxeBTgNEHhoLVDiEr2lhbG8JiM7ADk169ZeNQrSocPViiPVtwn969W1jcP0Nza41GyUvjwK63ZAkbA9galsxEMZ3k+xtb5+2DBYIDqM86ijwnhKF1YE9XtXRYe86KMls6okisZ6xpHRCpLE9rHFxlJLtSyJCdEwsK+0yikIb5XEuUemLfsfPqu+zgvM/SZ7rv44DQDyLddo3+ao2Lx6/OYn6zHRewf9IO9IrwqPJ9B6ab5lHq+QY3ZKBNlOSQsiOMPeBNen1yZlq4NTNmUgwA1AH2GRIZe9Cwlehyx3TRWdNkNAFRJIDI2skCMS98mhgQfYtoRe4mVKnWsRexfQDJQ2iOJ0xIGjrQ0KEyI48v8SNi4M+2xoF2G+AlocJXMaVpq6K5DxFMDCdYWqI/+SFukrSEs3p54/u4/+OfB4xDvb3A6e4KYbfN7CeJGP3FE/jzd7F97xmuvvYI2yeX2F32WD9aY/N4i+5qh0c7j69JJveiw31WGXx6UeF7VhXu/tAd6d06QXN2lLKLy29wz8vYjdg9u4apHHzXo153aO/wwmG6NRwAe3wbYegQYoBrjjCCWwuCXF9MnrAgcnvz3+YEjAmqcEhnT7eZm1z7hKLJPtG1BYrUlylxQ4r6se8Qt2vEbgN/fYXds2t0Ty7kOFxi/WiDrz/e4OtbPr5D3P/4l7WyN+9oO+LouseDZx1WdxZY3l0keDH6AFu71DwOV8FUda77hAawmDgtpo3HdMyJCFW1zH1pQ8dZjzoNPW407T89SNUv170iOFFHVTaIK7crCFNXWxUAwFojtHoVOhBiiCI+SqQYCvFrdVAiUxb7Dr7b5ZqjDxNCzLzmqEHg+bNZ68b7sDfbcRWpeVkcLU2jHnVaiREkVo5lSNFJ6msAgjbYycWjqTViANmQ3m/yqXNorIimEvli3IG668SIUgwYAHf4B2lpNIYvEv0c2SYCmOis9Z3AP7XsglzwGrmZ3PtBwwbD219GrFqYfgsUNTX3uR+78XD7r/4mQrXgBcjViDEwPBq51yeOS0Q3Pb6atdK4w/iN3wH117Df95M3fsar2tnREmcA3j6/TirlgAYkIY02/+ztEwA3O9D6/udxud6iqRYI/k6qQQIA+R5mHGDXT+GffAv27D1Ux0uOIC/XuCX1l7Eb8KnzDj/4aI3dZY/uaoeLIaCTjGDrQ9JPPKssbssiefKZE9z5kc+guXWM+oTfN/qA5qTHvT9yF2SIdRTbOqltDOstYghohhEOQFgew7qKmW5+kMzKJS3BGAEinrNkDf9eUtYnkOD88dxK2Lx87jmWam7KyJu/PEZeIFUSK4Q0h833HuN2RL/ucSkQ7us4LSD35m39iCPHmVdFgDnvQJY48+p6jN0Ovh/hVJLppnohclxYBtLa3hGjDro0cPUyB8TeTt+krDvOIMASWZp/fQOkhmh1Xqk3Gbm/zop0FilEGzz/jDH9DAIPcjN3mLYdDLnZ23c7jF2PYc2P/TDCb3t4Jc50DOWO3YhxO2J3ucPYjbi83r3yeXujHVeOZ6YRB6A3KD82MaaiaZl56UUVIiZabGXWZikW9F7HCUzgOz4CHK1ouj+nmZYWAhDlAvE9sJWRHbq/pcyLnV7EOTW3+ff5e2MABQ9TecQieouuRWiPM4RYL2HCCNNvEeoVgnGgsYPZnCNWS4xf/224z/6Rg8fbL8/S+6Z9k9ej70D6OaI035YNx8sVO7mhx/iHvwX3PX/s4Ge8rn367AhffXKF7RjgDLPgLnYjjmuHH3pw8lLv8bQbcdYs4A1LV4UYs6Br6GFdBVvV3Eu1WKFZX2bFhqFP9adx02FYdxi7XUEk6NFfDzCWRMS3QXvnBM2tYxx9+i6qz3w/U7ONRfjN/4xK6Ny3fvCzKYodux2e/Pe3EQaP5mQti/qIJQCzOgY1C5i6AfwxL0LiJJLWYASrMUDEiMsv7/uba1Zltl4KrN7krEqo0BZNvsikhUghkwNu+FydoRZDRAxRHE14badVWgBwKXCjBhX2skd/3cMLtT6WDqv4jgffL4kN58cerBloDUCi7G+Ng3UOZHOAdPC9X+C0lEQDcH/WVEuyeBvKqh4W3LBOXssV/BNh5PVoHMRJ+1y/GgeW1JLrfVhvM5x+td6D1jXQ2F3u4PuAYT1gsxvRh4gL/x3KKtQiKCdJ+eyk/l91TKS/8Ikts675ta/CuOm9iEdPB83c9OazmQmlBdXE1HFuMgNoogAtTXlhHABXpZ4UXazSlF4tuKcdy0VS/lgPMnMHZ5iSaiwX5m2N6CqkplTrYLYXiLaCv/WppMkGAKE9hdldPTfCjtWCB/NpcV/HoVRLxOY4R+BhBFDvvf69Wz+Iu5d/wKzDD9EqQ7izyJf2UW2wfR+CvKOPWA8Bp40VSIX4Bu/lBu+3AACqW9g7D2HvPGToqFnwPLeyyC4tAGF9mVXgVVlFJjCbo1tAy6QWkGGiTn+NxQ/8CJoHzxiqNBbjs6fYPrnE9dvvYftkg2E7YnveIfiYsq9TANWug91tWa6JDFwdsKyW6E2e1cRU7ax6kBruhRKdTKErhZzVxyTI+vBxTXUYgc79bKG1ht+bRgA0cBYvwtNUt6zA0LSwyyWqroc/WWFx1mJ3scPRdsSD3qMPEe/tXgzFvh/rQsTFEHDkAurNgNXlDv26R931CH2hbuMqlFMlyhqUAa8bPrKQrX5xDoojK1Fp4BDVmRgYUx8mw8zq2cBstpeuPWWfHJAdX0noGHNbwkSJxPdZ6LcIwtRBxbHnnjmBBkOnzooDslK6rL8e0J136J516C52uL7ucT4EbD3XIrfiSTc3KPO/jL3RjkszrpQ5YRrhAEjadwCPmjYA85pmhVu92CKYpTefKnrw84mmSu4obk69yQOAKBCgMgal34Ebki1Q8/h4jeb2uvhjSCrWcQRQOi1tlEwabBaxWiacPBpxosMGZuxgvvV7wK0HCMcPsppB5Gwx2hoURviv/Aaia/cyr0MN1sM3fw9AHrBX3/3MjccrxIjxHrcGdJv1S9Pj368RgFVl4KS+eT0E1PaGrOCA/cD9nJl1W65DUr/OCg46NkMX2apmdY9qyVR0WyeCjLL36KyYxFu0UkQiBFcjupYnI4+ixwfAPfgc4tl9hmPO34Ude9T9iGrZojlpQLIgGUsJktk+uYSpBNZerGBjQGg6VO0AWy8AV8uMJiRZJfisEJPqlkIyiuV9UhAB1ObQe/l8qhuHfWm1QITaOMSyfmwZ6qa6BY09zLhCaJeoj9lptLdPcNx7kCV81kcszre41zAp5mVqie/HehGFDgNDlF56wUp4vlSRSBqCgt6UGddNFiT9pQgJCPbXpUk/1bxPa15zLOTlyknq+nsqUxSOKxHCxoGbu5WAcQgWlLKGQoO+6zGut4Iq9Elvs7/u0T3rsHm8xWY34nHv8bT3yWkpgeZ1ztgb7bhSxhVvbirVC0gdWKAMHepFYoidmZXbMFBMopkJbiSaFLEndgAuSDe0gQh0grMzX9BKAV78FiuQTtQFsn6a3AwUtNmYhMZqEjSpQwrhuOE02hqxLuZpaQTvuaGz+4PfRvV5D2pWCbrRxVWzBeqvEY1lxQ9ih3aTQ6o+9YM3HPl9U0X4p1cbNNZgd3Weh/4B4nRbwNYs0umH5zrCm8wQwQVuYnXWwVCN+qbmvRdYBGQ4YVZwSJCOcwDkuEv/VGyOEesltp6n/ypkQ66FrWTfjM4KAybq/wCsH9Lv4eQ+1xx8D7NjlYh6HNDeOcXq/jVc28EPAba2IGvSsE0AWAwj99GMA8zRLUQ/AOOKj62tii9YMAPl9+kBoAxTCWNOSUyllTp3h0gCeyLWBhgjwRmul6Y6bvBAVcO0K4QQYBYrYBxQn4xo75zyYW8r2NqieVTj7mWPT13u8LT36KR+qAvk68KJPmZ6fRi0wbm4d2fsvvn3zN+XA2Eqfp8bK5PwumT5Js/IRjFPDcBkbM2E8VkKF8RZ07MiISpG3HcIKpkV/ITOPpHN0r95nxq9FfL2AoErLK4i0f26x/a8w1U3plrk095j66fr9OsIzL3Rjov7M+JellVeO3rylAqqzssWf08OiWR8OeWbrlQDcLrQlFRgYJqa6z9xNpQWpsgZDQBjrYwFzwsIGQtTZEyxajJDsaAMYxEmnxcLqj1GJmgoDZ+Ch9leYPef/98Yrq7h2hrN/+n/xvJJpRiv7kOM/J7NifSSGcRm+fwC/SsYT1AlwI9pZDx8D7u+Svvkl2cvLPTfZMvKIJoa18ElUsarTgknALFq2aECSNI/yvw6YAxds8K9/tsMAZvBI8SIxhksK4vK8BDNZQW0DnCh5/O/ugN/wtmt2a1Bu2uYs3ugpkU8vYOzOw9x/Ln7iW2Xot+ux/rtx9i++wyXlUN751tY3j9De+eUR6Kc3El1uejy9TWZoFA4KWgfmGFSj4/AEGIaUTP/1vPa8Tzb0PvJS6YRI9d8atdywjFaBJI+KeNgpP+HatYTPFkusXrrNsaux63LDfqrNfy2x6C1xC0rjIzbEWM3oL8esD3vElT1Tjfivd14ozOrCDhyRhq/LU5qnr/mWh5ZQ9bswfNlHU8dejm3TBN9W9TY1ZJae6Knl0r4/VRtRDOv0onNHddsXVKF/JRNDT1LZXWbpPeoNHVWHBmTg/bqqEWNJHrPbEFhB3olXax3GLYjhvWA7fkWw3pgxqAc8+sxBxLlKmIANET7tZqXtDfacSnt+dB3PxTdlCrVExXnmBsqrTGIEPXkSJPt5th/pJAW+wyt6MVoJth0NBaEmp1KDEC1yBR6idaiYOaxXqbaQKx4BAZCYKHP8vuXhW8yQLMCYuDBbUCuNxmD9jPfg+oHfxy+aqcSPIeyRSKAbIYkPmDHde+EIcJdf13MJlNnIFlgGBGNY6URAPW9z730+5+sOONcLICLayHFv6To7NxanQa9Osbw6A8QZwrrqU9P2JNRWJzOEBrLGTIRMARCGw26kZtidxQQrQFRgDUGhiKcq5Mz8ZHVtqOxIG3mVCp2t+bxJjLvK6554e7ON9ied4nCXR89xfLuu6iOl1g9/BZWb92GPTqCPb0Dc3QLtFjBtCtQs+D9lsblico4kDL/EOSem913qV78gmNcLtQAOzIivtfIOG47iRGxGtP7miPPckHtCmaxgh17VMOAdiakqww2JcaEfsTY7TBcbbB7do3teYf1ozWuH62xXQ94LNCVNifrINHWEG7XFotbLZqTGkdvHWFx5wjNrSPUx8tCkLcIOqV3johV9JWtWdr80JjJ33Q0CDJ8P4f0yt9DhnVTS0wJYerPstdK2jrC1bMkUDysO4ybLjmssdsxAUiO4YTROWiDt6iSBP45rDNjsL8eMHYj1gM7Kh5Iqe0GJgU7eqzJROAVBeLfaMd1EwxROq3ygjHIF0nptOYNlLlRs7i85ou3MUAxCDJ1uqeIPO4VR3k3bXq/udJyWgwFsoNhVmD6/EMjVQrH4wWfJDcmOCtaHgHvHnwO4+3PF+SKmGEqAFmiKUNDmDnfD9peprn4Zax/96tJ4ii4FotFhkoVIvwgdHerB9+XHu+evYfoanhT5/4/mXZroEIm7JCMj/BOZxaZRNWP4B4bHySTicQqC/ohsQiMHPcQZfUCpoqHYRAq8g67yx7X37pmFQmZvrs443pY9+QK3ZMLNGfHWNx7hub+fZjVMeLRLdY4lCxMs/hJph+nwxMNZc37uerM3DLUPnseAr+nJ7ROJPeQk89uA2xVI9RbxKbNdR4lKRVZRWqKVXhr6BE2VxiurjFcbbB59xybd55ie97h/uMNuvMOw3ZEEOKOqXimWnNSy6DQBqsHx1jev8UtCrdYTV5FeGNxf1AYYZ1L3+5QOH1T8DRvRSinJc+l4EoCRZiJ1KayQWFMqpDeuG6D0K3hr2Vw6eUGw2YrTr7HuO6SpqPv/Z58FmdlrLSvI3nGrWZiIbEF+xAxxJiIP0cCd+jvOpDVfOc6rtxcN3FQRXd46aj4b7K91HUSG6dIs2+cljyb65U64cvFPUjmYA3Xr3xJcy+aCWNArLhYHm0NjylGXpnCqer26tiK72+QL3gr2+7cEjV4hMq4vAP3J74Ho0Zq/TrBDXMrZ/2krGzs9yLwT5q9Y+/CEXB39wxm7DnVEjP9hr9rc/Scd3j/FpojhgElg9KaJquFEGoD1BhRWyNwoEmwNgCsh1CMT+fr0iKAumuYGPlcFpBRdC1H9LJgD+stuieXWL/zFNvH19g83mL97hqP392kuV59iFg83mBlDe79zmPc+txTLO8ucfo9Zzj+7CUW92+hPjuD7TvE1QloeQxangCh5hEejrNHFYmtXQtLhDHwd9W+MCB/B3VIJbqhC/Zh54VUf9GsG1aQCTsyey8EUOvz35Gzu3Rf70mhZYfSChPu9OIJ/MUThM0ldo+fontyiUHIBb7rE+3etTXcqp0I8JrVCczxLdjTOyz/1Mg1FoKo2HPd2VoHa+uE2pT36vMsITp633kmAdGwy4MY+w5+fZnJE8VYEHJ1FgZWjUDthdtcIWzXiN0aw8Ulds+uMa632D275n7A7YhhzW0afvDcL9dlBzZ2IwYhVTBpRYgrN30XsGSZJUqtBe3CwVYW9VHFmptthc4B+N9+9/kH5gb7ZK9IL7B5Oq6TOC1hOpAuvaDow0gFyxllvSxqJqaQ5V6YGJLjCBHpJlqtFrjZ9rOK/uk7zGCsWq4dwEykagyA3gPgIQTynEUtA+BKG4KqRhAWhmtrHjFpo1kgjV6IAMi1vCiUbCUgO+bydw3ejMPu/NGNY1s+bltUBB+Ap+6M61vbbV4EhNlHYUR3fYH26PQD+UwC1yQoRFhZkdMoD4HuA7kEq5UtFj4CrSOESPAhYlEZVIbgwb2C8BJhD126LvVckathT++gWr3NgrDHSwzrLk1bXlhCHwheMk0fmeL9tPdw31pjlAGYZAzCMCD6gLbmTIa8hzUW1PB9EHlDCYwi4FkVxZJBI1JkpYI8MEc8clCl994eQ+5ARp+hapsa6CeTEIAJLJlkruL+e1IMQMOBmjm9DztsgH6H+voZVlfPeEEXp5DgNlellpQ0r8tVDK0uT5iUUyjkUBhZFUevOVknrNQFy1u2JLWUQfWkrqXj7v3A6vjrS8Ruzfu6yWoucdchSuRO1vC+lq002kDcbeA3G4aTn1yk5nWFlseOa1Sl4+oKCS0mvWTHVa5VtVzvDP9xJrWwlOqD9VGF9laL5rRBvapQnyzQ3DqCa2tsQMD/9u/wKvZGOy6NVoH9cd8kz2F+QQPTmyfe8K+oWzF/N0yA6dcRFD80j2oFZtsBJQxTEE6kqE3YvxESe8vqkDjKsIPUSwyJ86oaIFR5YboBBizFfDUT69ZX8LIYl0zOs6ObJZQ+Cls6g52P2ErNIur3F9g2fRMy6NZXGMnhaPm8YOOwddcXeYggsYhwZQhGPkCZgqVKSyyyLABCTmCGWSUbVnLzhwjul5KggvwwvSYBkLUwR7dgjm6hvT0IbCO1Us8NuvXFDqebIWVdAC8swQdpAi2aozcdgtTM4CqWjqpqgEYgOO7VszK6PhjADzJckVXFjcn1vmSz++sgM+4mCFqDRYGsozFTiFy3KT4rAlM6fwGBa7Cpj6leAO0IWp7C3X4A9LvsuICJeg3rQFYJGtS2h4msW/mdg0xK1nEnkRVldK2YMzGBnHXCzzItmfiM3Zad1pr1G/3VOcImM/n0/JPlcS1c93QgaxKxQjPKQbIsbQ7uzrdJoHhYD9hJb1wJ9/UhTmjsmnHx9U6pXpVhQMKRM1ictWgFpj56sEJ7+wRu1aI9O0Zz6wi2bWBfvf/4zXZcOvNHo98JMwdI0c9elKf2vNpN4ezUWVCYSrAcuhBfx24fZwfw6ILB39bmrMuHiNqyA+t1RINCTSAMgceJN2OX8vhoHKI4GwIA20Jawib1iUTNluMVyvqZPPaRpZQCMGFdXq63iRDxcdhquYC9vsCyrbM+GwxWr+Ccnmc7VOj6gFVlcCRw5Ot8wnuXa9RC4AAYKgSZSWbrv/Ibk2AqugaxPYX7zPfDHN+Cu3uF1UPWTbz+xns4eXaNsRvSYtSve2HZjSBLsBWTPYbtCLfu0F+u0Vxfg1zNTmixQnAVTIN8/oMHqIC8NQtTWNnWmZmosPIsoyr7jOiQ05qxGQnCmLU6nt7sqUUQpEQMZOeFAjnROrKgCVHrUuJUSmEA3X4e1EZIRof82ZNSgu6Lflb52bMscW5lS0SaPzZ2qQE9bq55pMv1M3ZcG65Tam1K1VjUQeVDaSaOy/deankeu8tdIlH06wHrzSAyZJxZKQzoY0y9bFufyRYlI3OIqrbPa+7KGpzUFouzFiefOcHqAcOsq4d3MjHo+IyJQU2Luv+IGpC/9KUv4Vd+5Vfw5S9/GYvFAn/8j/9x/P2///fxQz/0Q2mbruvw1/7aX8M/+2f/DLvdDj/zMz+Df/SP/hEePMg349e+9jX8/M//PP7dv/t3ODo6ws/93M/hS1/6Epx7f35Ue2LKWlZSARB46KD80iHTC6zMtvR5MqKWHAATRISXe78+aOelphkkP849Z0EcULngJUjG96BdD+rX6WaFrWCbowQplmyw0nGp/AwVN1M6HulzDAcG8jtpz8lcqubjMFGhL/v6Puj25ushCJnC44NIMkPMlHnN3gIs2nIjXeRNMRgyBtDtT8Gd3kXcXsPeeQj38BlWn3uCGHyKyJUu319tsH18jX49ZaXqhODds+vUzEyuBg094oLZc1rwn9MKyNo8QkWmHkyGV5b7D+QJyKXDmLwh0/JJGbIzjU2l4petL2WrSm0laJGsh0oHooQp03BrA+UhmXr/lnW4MpAjEdZOE5l1TZkcyCDCj4WjCgEg/nxLeVDk5J4NIyuECCRIvYhud2v4iyecaV09g794kuaQbd89l0bfDt15l3T/hu2Y2H460iXv3tQZqUPqwn4WBeRyi473qY3FkcujYfJ2YHFiZ3DiTNLcXN5d4uRzt7F6eAfN2RHa+/e4HWN1wioxi2O+ZtZbvKq9L0/xa7/2a/jiF7+In/zJn8Q4jvibf/Nv4qd/+qfx3//7f8dqxcvEX/2rfxX/6l/9K/zzf/7PcXp6il/4hV/An/tzfw7/6T/xoEHvPf7Mn/kzePjwIf7zf/7P+Na3voW/+Bf/Iqqqwt/7e3/vfe28PUDAmGDq+rj8CTw3CppAhMW2E6pvzDOFiChRroEMU75utH/35OWX3W6zzlFb8IUckwGwlCjSwIeI+VipAD6OAQCJI4tApvPPokpHecggBAKiGLA7f7SnGP5+KOyvax+WCkdpnz47wu+/d/WBKTSU/VA6xWBu9vt+EuMf/lbKGCAEjegaJves7sAumOIe7jxMit1Vt0b95B24tk5NycA1fFKC8Bi7HlbEUU3l0FjDtZyxB/Vdnqc0N2MZSpNaUOoL8w6w4/5EAq1vCfSZeo7UpL2Aokl1NXaKjHBo0FU2dAMAkQjXSpDnFMaVZub0GZIRRsckqNHzQNIxZGZydihMZ+fF26J2MuBx7ItMUZ1iQeRSZEYccyTDExLAEL2dHw8hPikkSP2Ws6xuw5T1yycIW3Zc3ZNLdlbnV+ieXmJ73mF3scPmCStVdNsRF4OfOKLSwUyuuag/szNTqwsJsKTXWDA/E/IjjEEDpHEw2jpw9GCF5tYKR5++h+X9W3C3bsOe3YM9uw+zOkGoVzzZ3NYIvsKrGsXXSBnee+893L9/H7/2a7+GP/kn/yQuLi5w7949/PIv/zL+/J//8wCAL3/5y/iRH/kR/Pqv/zp+6qd+Cv/6X/9r/Nk/+2fxzW9+M2Vh/+Sf/BP89b/+1/Hee++hrvc17uZ2eXmJ09NTfP2b7+DW6clEOLektydsHbgRmtC/zRv6JtvpRFLR+yujwLlyx8dR7+muL9JNoNCH2a0Z6qgX8McPMJLDdsaA05tUM649Qkt5HMpjWtqhUReyzUfpuN50+/33rtKC+dnbUwbk8OgPMlQojM9oHGK9wtqucNQ/S1mv6a5S1D5+66sYn3wLu8dPeZbVO0/RXewwrIc0Bbg9bbC4dwv1yQr1sTDo6lzXmdOr+UNM0lmkuk0Tgk2zEKmrOjmfiZKDTtUtZjalWpZjolKsGoR6xWxb16IPSJOv9x1XJmTVlhKbU/ue0vUros+eHHbitDTTnavuaLlBSTatM1g4YvWSfssMP6lFlahMCZXqVIbUHlMM6kyEsDAmJZYEDV5fJGjQXzyBv76Wqd3nMvWZ+9A40+pxedGlfrSnPc+Ee5WgyoAd1ZEzWFgmWZxVBgvLv1ctMwLVSIg/tjaoVzWqVcW1rLdO0d45weLOKRYP78Ke3eN67Okd4Og2YrVArJf809W4vN7gwcOHuLi4wMnJyfva59eqcV1cXAAAbt++DQD4jd/4DQzDgD/9p/902uaHf/iH8bnPfS45rl//9V/Hj/3Yj02gw5/5mZ/Bz//8z+O3f/u38eM//uN7n7Pb7bDbZQn8y8tLAIfptZNmYABk3X5qD9xcGJ6bOq2y50rwdnVaefbOx2OJfaZY/dCl/UQIMN0VKhnnHmJIcEuZRHINjKEYzRq5DwlZZUObIMXmPV/RgMdUaFT9XXtpYxIHYTxwHcVqOT3WfkhK/0MfEJZnoN01qF9jvP05mO0FjLuAewtQWTAvTblkrhFDTHWvbYgI/il2z65h2xq2cjD1/rLAqhFKAKjgFjVsWzN1/OQ0O6/jM1C75ExMphyohFBUCSF9z7pNjdVR6PcckR8x09ZH7MYoPW6RYdoZpFWqovDoFoI1bfK3QbIK7yN6z+QDHxii9TEmUW295omASu7typBsI/C4woaaJZX3gjiuaByToAT2TMFGUYpI91JBwFBnFdaX8M+eYvPeM/SXa3Fcl9hd7tCds/7f+nKHi8Hj0c5/INO5A3KT8GllE+ynPYDLuwtUqwamcjyTTGpopnZwbYNq1cItW9S3b8EsT2BWx9wbeHRrMhkdtmZZNMu9j7vXYLi9suMKIeCv/JW/gj/xJ/4E/ugf/aMAgHfeeQd1XePWrVuTbR88eIB33nknbVM6Lf27/u2QfelLX8Lf/tt/+4X7lFL+kjYLANZMqO8HX5v6QqbNl9NtzJ5zCzjcCP1RWlnMH97+8gSyoxiAgaFM1xi0tppQmA/ppqklnL+gZZfHhW/GQn2jzFhjyFqHZFDf//wH+ZW/7awU9p1bUk/R3y3XlcYIPDgViHSxQGq9WB1jeNSD2hXs2X0AwLJnhQR1StvzLYJQn3eXPXaX02ZWAImtRoZgLAlzLc8Eq5Yt7KLG4s41quNzVMdHMOsrHq2ifUXWHlR3KDM5Hc5YNtZzLVHJAhGjjxnhUN6oqN0TP0TwgI08ighAFvoVSFYzNy9ZHGdwciMEDtiQ7gtMRoCQ12Zg0avcbeHL4a+qG5qo9Kqm7/bFbiXA5IbgDeJui3D5FP7qHP76Gtsnl6mW1Z1vsHm8we5yl7KsiyHgcgy4GF7faQHAichcfap1uLWo0J41OPnMCZZ3mbq+vH8mTMAa1WqRviOMgWnkd828a87C0SxFO7XiuqJruC9QnNZh2bCXt1d2XF/84hfx3/7bf8N//I//8dU//SXtb/yNv4Ff+qVfSr9fXl7is5/97GQbJRpMek8Klk+UXyEZwU11rqgUerITSvhkG/0nVOeI3Aj9cVv16R9Oj/1XfmPyPQ0ZNC33jfmYiSW661ovBGZOa9y9mIBxIKJMz7sa/Xtf+y50+IoWtXajVhBRbjJV+fBf+Q1YYxH7Dqt+gK1dYpv1132iRZfNpsHnAr+xBmQpOSwdZlmtKlQLh2pVY3d+jfpkifp4hfbOJStMyAJG1ayOYWxe4HW8uwwuTKw9yL2FrCxSkjN0qwh2LLoCGlAa2KjHRjO1GGOqlUVlx8aY6jVqVDgsncEGcVgYe9DIChTa1KuqHSQjiZRCr03A8/FEQYVsk5rFOtWyUn/Vkwt0Ty6xu+ywPe+wfbLlUSEykPRynI4HeR27W1vcri0eNBb37i4TueL407ewvH8L7Z1TLN56AHN6h+XBVsc5Uy762ZRQwxMpTCbpKJvTsaRY6bReh9j2So7rF37hF/Crv/qr+A//4T/gM5/J6t0PHz5E3/d49uzZJOt69OgRHj58mLb5L//lv0ze79GjR+lvh6xpGjRNc/BvQKap6nkscwijrEAAPA22aDxOG5kDUVGAjtXmmyok6ZsyivMxprrRJ83IDwxZlOMOiOBczROd54X3MIJ0WF4otNF8n7PVQ7XB4vdDj0nmLH3XXtHKWo08VdZ1n2f+6B6MrWFHntNmj56ksSfAM4zdiN0lq3mvN6zhpz07WsAHlLqdlRBOnMGRM2gXDss7vNg1JzWW90/QnB0l1Ylq1cK2zaSpNxoLqiqE4EFDD2MsM3XTFASDpmoBEAbDNajUCwW+90KMMCHXaG1gB1QqdgAoWjfU1cn3MQQLQiXH0hl+r0o0JltnUFtC5TtQdyVix5cIzx5zHerqGcLmciJSC2QqOjuuYnpD2iF2dHHoU39VOYRxWHfYXTJbcFj36dwM3YjrMU/R9q+x6LcCC356UeHTRzXaswbHbx3j5DMnaG4doTnjoabu1m3Y0zuw9z4NOrqVYNzgmlzLex7RTY3yPDadLhDi662Z78txxRjxi7/4i/gX/+Jf4N//+3+P7/3e7538/Sd+4idQVRX+7b/9t/jZn/1ZAMDv/u7v4mtf+xq+8IUvAAC+8IUv4O/+3b+Ld999F/fvM4zxb/7Nv8HJyQl+9Ed/9H3tfOqDQI6wNBsqfwfk4taG1BgwmRk+gwD178nZ6Tb6M0Jupph6S/Tzvv70GoaYgfZxm/mBn4L/3f/Eka3vgcGADA/sgxaTgT2oL+kYltCgMUhLwiG1kZIpNr+YpSF0/Ppvp1rA+xmH8p1uk0nShV1cb3C53qKO/Y2KINE1iM0K9vgMccfqEG3fpejeVDsEH9LCeFEoenc3pHTKJjutLI66EQ82A47OOyzOWvTrAcurDapVy4vgrWO4thaYqc0LuqtBwwDqO8RxgOk7rpGNHTD2iM0R2oYp+TsfUJkIg4Ah8L3M9S++3nyMqKQJtrImETYU7kstHsiz+eZqH85whlVbwsKJQPLYgbYXMN0VTM8Udf/kWwjrK4S1SCeJMr/vc6uB1gHJmtQYrBZ8QOjHpDGpOoG+69GvexGuzf13w5bFbUuB2iNnUiBxE4uwDDSUcLGwHHDcWrDs0ur+CqsHK7RnS7R3TrC8d4b6ZMn9Vmf3YZbHDP8d3UJojmVKghAsMJusjJlKUXkNIq+RH1T70PtyXF/84hfxy7/8y/iX//Jf4vj4ONWkTk9PsVgscHp6ir/8l/8yfumXfgm3b9/GyckJfvEXfxFf+MIX8FM/9VMAgJ/+6Z/Gj/7oj+Iv/IW/gH/wD/4B3nnnHfytv/W38MUvfvG5WdUhS/0a2ldVXJR6gHzM8BeAKS01vZFJZA5AajdhzIMAZZt44LUfIyfjpYyn7dZ895YTbot6wtxx7zmtve+dKe/JYYURGEVOSNlihSl0S1ToNX7XPhCjsUf/9J2Diiza/xWqBU/ZDh5hu0Z9vER/uYatr9OmQ4Q0o97stAA+++dDwPkQcOJYruxsDLi9yZJBzcmQlMY16wrDmAr8pnKgbs2ZmOjoaZ3EnnmEcYfoezTLM1jXwhIHidzPQei9oB0hYvABdiQYQ6hMwLKyIldGk5YZPlj8o+zfUsdVW/7XmAjarUHDllma20v462cIonWobL/tkwv4bZ8cz+SwlyQGIbYASONCWI29nwjaqpMau1G0AvlYAkgUdR95/ErZWzVXsjBAclaVpSS95FqH9qzF4qxFtWrQ3jlBe+cU9fES9S3usTLSv2eOb0mtasHkIKlPReuS0/IhpuNXHNq9dWN+KalgwksABjfa+1pB/vE//scAgD/1p/7U5Pl/+k//Kf7SX/pLAIB/+A//IYwx+Nmf/dlJA7KatRa/+qu/ip//+Z/HF77wBaxWK/zcz/0c/s7f+Tvvf++LoX7aK1FGASmnirEYp83wIZUZFjDJnDhSc5JkFIeIpiPIlYXEEksfL0HjJivnB5Hv+WJS2Z6CLaljLSaOTOuD8/oVvzEvilEVB2JijKlqdfnZOmIByMX479rrmarERFcDQ3dwG5LBlLE9hgkj073HAe3VM3hZPJeP1vB9wMmTLa6dSUoJL1O2vRwDLq973K0tHvceD4aAu5c9w4Z3llite9SrnHHpgs61M1bxYHaabHN8BHv6Di+iZ/dgT7ag9hiuXsE1S2wtibSXjogJ2InUV2VZ87GyBGssDEWZ8iALZdmKGfPvliBOy6CmALO94Blo/Rrx4j2M5+8iXD3D+PQ9rN95iv5qje7JZWrqVoZm2fRra8uEFqM1QlnERVl9rrQeQ0yPebuYHpuKHVAFbqon8RTGGtjapNaGelWn+uPibJGOeXPrmJl/qxbV6QnMklXuzfKYh9jWLbcyyFQK1S2NukaoGkoM3HYTA8g4nk8ISI/egWBXmsmtvFdqwhbY1rwM1n2DvVYf18dl2sf17je+ipOTk4msi48FHi49Guqi5iNN1OZwn9o89dWsTnF2/ZwQtdg4pcV/6hMAF6r5P/iv/MBkbDrVnYqL7EVZWKp3ARNYkXT8+iHW5sgjz9WxkauBqkasjxAWp9JM2wpphM+bMwQbJUOUWiPCCOo3MN0F4vYacXMFf/4u2p/+v7/0cRj/8LcQm9WkoAwyh7OVN8C69VU6RjeJIPfvfhWm36bzF6+ewr/3Nvz5u9h+6xGe/s4f4urtZ1g/WuPyG1d4d81DAN/eDli/DwLAyhLuNQ53a4vTyuDktEV71qBaOJjKolrkILBkK/Ji28C2NeqTJVYP7/Dwy7P7cG99HnR8htCeIqzuYKyW2IwBl7uA9RBwtRtx3Y88v4yAyhgcNRats3BGmmoN5Si/uKFLubhWsi2j8OD2AtRdw7/3NsYn34J/9hTrd55i884Tpqg/3uD60ToNThy6cU981hgS2JDvo9J5AUCU7aOPqTeK5DX6e0mKIWV0Vha2tnALh3pVwbUV3KrlmuJykWBas1zCtCvOnmSeGR2dSuZUp9lr8dD9PhduKNbYyboxLy+kg5sb5rX3NYk5gNfRy8tLfOqtj6GP6xNlE/ZcQfeebaYZVemk5rdmZttRej99bfo4IlioYyQAkaeci+L3J87Ki0waWCnqhSSd/DYX/+eKIVMljeycIhH3rRQX/qRJOQae0RIDTIxMjXbKRKIcpYUR1jiuRRCA6CfZXUSY9MIgBFaJ2HVY//O/n2SO1BIcVdVY/V//Rj4MkkHOWxt254/SZ31Qc8I+Crup/lWatiIM3/w9RMMivRac+S5chZOu5+nYJ89ga4Pq0Qan5x0sAW9vx5emXK99hO09LLGOXX++xcm6TwtvWesBkNiKrnWojypUqxqLsxahZymqVT+A6jbNOItVC2sdFq6WBmLCsrIIQpACMInilQgAgbQixayQIZMXtEZOKscU+P6gGKT3jNl/Oqp+WHfoLnaJnr673KGTmuAQs6oEkBt7td6kPw+ZJSRHBwBV7ZK2pCr/29qmY2Urx47qeIlqtRBW55Kdkzgrhf3o+IwllmyFUC8T+y85rZlNmqS1gVzXg3Jatn7LENLoG7W9oaJ6n0vmZcmkY/8q9kY7rqj1kmIRUqekWoKT7WOeuKrbHrJDA9/mWRqredOEKcP+jmVgQoz45vk1i/9Snvr7cVk0LmdFZPJwQDIsaDp3Xrqd0lkNO68IyUILunskAKYCwDVKfv2MwIFZgFA6uuBZogp9it4ShbZiJRUairETQMrgNIszdYW6bbOat6tBFf/c/Yf/JxebFysuLquyga2mzMoQ0Jze+cCO+SfNYsUQra+XoMUpzK2HaB9+Dvb4DEeisHH0aVbY2Dze4PY3r/H5t6/wtOdm13e68bm1L4Chwz5ELGxmIKrNJYZK8sCJE7LHWYuz8w7Hb/GsrNt1hTj2sOMA41om9jQrLKtGmoQBZyr0nuFCBQMGH2CIcp8Xce+X7o4lGUcj6v4xpit734KI1Qq0qvWosRuT01rP1NPVQWltSh1XJT/1++vj2hBqQCa5sIPijKrm+VULlzKparVIKif2hB2UObo1rUuJo4quRqiWiBUrmowi/aZsS+jxQV73jAFPlVHEKZaBqtlDqIzF4Wkck4tvlsEBXOp5RXujHZcOnAMOFABxOFMqkdF5RnUoApicvFkDs9VxCcbASKalskl6cl+nAPmBWsqGIhA9IoUpUYJsbgsQSACA4Nrj5H0mYyyU2FLCB0Ci0scEJRymymu2pfg3wFFdrHhumKraW+u4XmMY3qN2BTMKk6tQZOAekwrULCZODM0SwdaINcsSTaR4NOq0+9Hnt5PFepW+s+pZhvYY7vsa2AefRXXxBO2n3sb49HGSGtq++wy7S1ZsuPrWNbbPOjztfZIa2noee1EuVV2IL3Rwh+ysMvh0N+L7ZSR8DAHt2TGW0uDqbt0F9Xzu2kWL0REAgyDZ+OAjBgT5GdP0ArXKEipjEvW9SWiCNB8XUHckYT7WXAtqgucxMGuuJWoNylYW7bpHLW0EOgqkdNJeAtoy2yq1AC0BrlJnZdP05XpVoTlp0d45SZJc7R0eaMkklvtJnSLUq5xN2WpPmm4IEX6MGEOYCCbk0gmjRnuJUlEO4RKJT2Ld6vzU8WWpLK4zqgC6Hu+5pN6efNz7sDfacZWkCjWaPY6YQoPloTKSgZVWZlWTaEOJCWG66KqcizUOZHIGFmPuISnHlXxsZhzg+/0UXp2O9mXMMtgyxdfty8eJ4DGHHfzIDZiScc0H/ZUK/km9myxUD1KdFoA8JRfImZzl5k6zWLHunToumStl2hXPTzKW8Xypo0E08TTT0v04BJl8u1m0+dzGGEAOQIwI7TGIDFyzAIyFWZ3A3b7E4t4tdA8vU1PsyXvP0J13uP14i4eP1ri+7nExBDzuPS4Gn+CyV7XzIaA2I+5e9ajPOywuGJqLO6bMJ9iqMCOLptaXB6HJc6tKLLYjVMGgtQCcSUiITpTghvswkXGCc6CG2ZgxeCzu7BB6YUZWl7zJwqFeV7DnHY66EVvPrEydEFwy/pTtpw6rqS1MxUQVt2DHVS2cyCwt4ZYtmrOjCU3d3XmLM6vlMXB0G6FZcXbVrBBcO2nwDSP3n/qQ6/DagD1HnYiilDzYAnKztzopldwafEzHuvx+lRBcSoYmtxiw8+LpEibfc6/BLn6jHVfZ1KYnYJ7lqPM6ZAHsvEIBIehrJqmtLrSzYXgxENddyACWxxfwwgDAGLSLj29Gldru/BHL1Bh2CntTj9Xp2DoLg5YsQpXAOuSstPB6oLibZhP5UcgbxU7pPsjAvXSOXIPoOIseZBNLyEocYZwtLDLgD8Bkeq2xQLNAKIRbS+hk8v30fQHAOOyuztEcn30Qh/4TY7tn77HTngckIfC5FAjOuwb2QQV7dh+x7xDWl6g+s2ZpovUl+vPzNCrl6mvvYfN4i+35Fve+dolHO4/3diO+vn09jcpHO4+Hrcfx+U5Gduwwdj2cZtdy/an8k6pqjCGiGwM2g8dm8EmjMARmr1kiNI7Hx5gANLBZ0skQM+WE5MIjSfhYUbuECZ6DoWYB2zbYPZP+tNUTDFcbdBc7tLc69OseR92I/npAGALGwc/GgAjjUQgW9VEFU5midlULZZ0zq/p4xb1w9++z/t/RLdg7byG2R+yo2lPE5gh9AHY+oNt6HlSt/aWFMk7Zv6atvznjUuQp72t+Dz62GgzsxoDOM5tzCCzFpeYsobUGjTM4bR2WFU9tb60REWTOyirz+m0xb7TjAg5DhGoTkobUvMxMDbp0XgkqPOS00jTXmJ8nA1Bk2K2YuprsE+C4YiUZhq0Rq5ZrRQBDRvq8QHOHptjGMq0vdNnmTE5BQ3IjojouY3Kaq1mrL1Q5wpiGEQaAF46qRU0mbztsWXpKpsMCDCfS8qQgkeT9CUT8fRQ2cQXVX7/bLHJnOv8ImBdPJ3jTLLqaF2Ug1x1Vqb0kq1QtgquBBZ9rc2tAUk4ZelSbSyzXVwjXz3D70dtYv/MU23fPcfR77+DoDy9w99EGlghf3QzP36HnWCV1ICUj2MqhWi1A7TIJC8d6iW4M2I4B2yFiM3g83Q7YDB7bgZ1XmXE11qTGZB+twHPyz8gk6lCovRuTqOFkHGh5AiN9iu6tz6ParrHq1jg9f48d+tUG3ROuyQ3rDsPVJs3H0n6sWCzwyg7U71gtHIvUnixT/Wpx/wy0PGblijtvgVYniPUR/OKUsyxb43pgZ9X7iJ0PIkgcEAL2Ms6bbDLzb1ruxRA4c1Qnpa0HOx/gA3+mZl0qVFwZpu4f1w5HtcWysjhq+Kc2eKvy/qb/DoUK1eYki/L3OYMQYGhhfsheuaVAsxIAiK8+0fPDMm9qGFvLKPieZziJxVL0MoJ7SeRvBpbbB0zhjIBE2gCAsmkbmG1XOPzJyBgdCSFOi/uMCPA9S/6ISngik8QAGodUfygp7Npjkgg6pmArynughByhDng8KLqsenndZv2RzPf6qIw8j9AAgNTSUIz9oALqLWV8okK5fgRVA0zdAu0KtOBjc1xXcG3NhAUfYazBp752ia0PeLR7//eCAfC5ZYW3jmqsHix55PudU5AoOKSs2Tj0PS/YQ4i46j2ue5+yrc3gE7PXTliGU9mxSQ1cr58Y+dqDkr+mr6HVbRhx5u7B5xA2V6i3aywuniBsLhE2m6RKMna7JOkUpPE4ChWerEnN2LatUR+vJr1W9vSOEC7OgJO77KyqJcLiFLtA6AduB1D1/M3gsZOxRepw5na4hl86ruJYCclEHdYQOHvl37Nafy9OTI93bwJqZxhKDJKdBXZ4jTNYOM7CLPFk5Ve1N95x7UF8mP5e1rg063ph65pmC/JYr2tEoWXPSfavUWT8sE1HBxDtj3sYQ+TBgmHK9gJywZZIZ3UpC6rMyqafVTq3cuJtImCEfacVdtvcmOy2MJXAlaWwbIqEGdJUhtSkEC0OWHdJoZLgMwYPsKBC2Sy5J6QsU66vN1v4EHH6McxX+8AtBh6foYGEjsBRUw1EyUxjIQeWBiOGETRUIFvD1A1Ds67CwlXorzYIA7/fvYsdOtEN/Gb38rDh3driXmPx+WWFs+875dHvn77L/VzHnH1Ex0HNEFhAo/cRm97jWnq5NoPHuve46sa0kC5rvuANUYIO9VoPMV8nDJdr/bTOdVeggMWrCapC4w40djDDDnZ3ibi5RuzWqK+eIawvEfsO/voKw6ZL0k7swIQJW1Wp+bpatbBHR6nvypzegVmeILbS61ivEOslNp7QCXNzO7Aj6UaP652fQHihiNbVISWa/myhTM8fcFxBMi6FXgef64dlRsdDarn/sh8DfG3hA9CNAaNnGLd1BrsqCIRosB2+Qx2XXnhlo/C8aVgfW8r1r0h5/LxuM3ldsVDq71F+pzAiSo8DgIMK8nsEiI/RNCpNGLcUXXWYnl6YGkUpZKDsK2cIrcvpfWUpNW0qhdYSuIt+PrdLHFYJEQIFIcJ7xL5jVuDA85qgChuuSirbKgXEwxMXCMszBNei03lNfYSP40ShP0+x5X2tRQLIGAO4Oo1kh7YI6H55Bxp3qOoFrPs2UfgQBuGkPljUu5Sckxbnsim1yJS1HgoysGcRqvR+8nmeoFwfL1GvKiz/8ALfc77D4/PthH2oDoMp8IablJ3B7ZMGp589xuLOEkdvneL4cw+4xnPnLtyDz8Kc3gGtbjGt27UYx4jeMyR41Y+47j2udh7rfsSzzYDrbkiO69oZLGqH4zYvdUMIabEeA2EMBB8NnKlRNTWPLymDLhlEGY1D53mB9gGAbUHuFNWStQ1t6EFDB9OvYXdrVpJfX6JZXyJ2a1aD33VTFRm9xtslqF2JooU4LNewqG17DG9q7HyULIs1G7WmNwTObkqnNUGZ9ppaaRKoDpg6On2oDkqPpT6vx65xBCOtP7Uzk8xLjzNgcNWPGILBbjRpnxtn0W9fHaF6ox0XsA8L7jmfwghZq/AgE/HAayIwZdRpJBbGNJp7b+THJ4ih9vAWwzpfeXyV2Eaa7l/33IcyFMVXAFLIZvmc1hmcthVL6Rigifx8hKhbYHoslWUZSTJT4zK8mBqcObInMswI67nuFseiNhI8oh5WVyESseOSrGsIwG7kG3kMGbogYYq5mJ0VQsRI4H4zECoJSMjx50DraWMPE3ccePgepuqxG7s3nqzRnD0Azh5g+Obv8RNztYQDE77LeqAqtqf7owqIvodZHgPBw57dxxEA19YwtUNz0mJ7vsXxkw0enncYtx5+EM29ysLWBs1Jg1YGFR69dQuLe2dozo7Q3L4Fe3YvKT2YkzvcylCxJNGo128A11vGgG5kp3XVjdj2I7Y9w2Y+8ILajwH9mBfJTCio0TgDH3gFcIbgLaEVKDqxiKX4E5SxF/IgSjWu3TjU1THa9gSm33BG1p7CHq+Z0dttELsNX9uSdeloF6pbZsgam1iCjC7w4MXR54GaAZlgYYkwgGn/VnjpgaaO6aam59LBaTaV/hana6s1hKqAFUsrHVz5frotszcpH8Oibvaq9sY7LiBnTMkONbvN/sZrmDm8XeHc9prtmH4Gq/NmtGP8hvf4pNjcaeV6QL75ywupdQaNMIR8BI5ri0VlYNM2rM9oOO2afpgxU1hVaydyrBLjUBiFxto04gLBsjCwzm0ylvuwNGiQ3hS9kXuJgLUBlW8UAjmCJwCG82wT5HHgfVfnBWMT0zIRRuRxjAExeOwuAqJ1NyqwvylWfeoH0b/71aycoI6qcGLpWk/Xu0mi0wlSjAHRtaA2cP/OjgOPRcNahNVqgcX9LXbPrtFfbllE1kdWyVgwNNbcOmKB15Mlmrs8PsMsT0Cr42krg2tTK0N0NTPZ5F8nDLedD9j0PjkthQp9iOK0+J81TIPn/q4sDwVYEEUJdDgxqdNxGbNzl7VAh1sqYhHBKh6WmFm3swYLt0BdL2Fk38n3oPoIZtllggxQ6IU67jM0luuzBQtWqfVqBnz3WcO3WsUKAAAy7T3MnELpyMo/lU5nCFP4rzJMaFENSBYxplQPs5Tl9UpoUSHZ8nMMYU+b8HWmxr/RjsvSPiFg7rQmTW7hgCMzBSsNmLDUNDrQ3gc9+Vr/ccYxJGXcnobfJ63uZQ1NLuZ51MSQYb5wBx8xOGYOAXph2snrrBHZHAgMWzh8bROIM8ceS/g1BpCt+CauFnCrkwwZAkjzjJoF6xrWHHXPAzVdTAYfEQioLGRhAWwUxakImY6rr2LnRa7mgnwY0/5zc26XIDZULcg44A13XAAQlmdMfSeXJwInJxUmM614kZf7y3ANiABEB4QYWCbJOFhjQatjxO0a9uw+2k9dJlislOFybc1QWLviGs7xLZh2hbhkvUol2kwAJHUarkFPNbY9MwfXQsi43o247EZcbHqBCUdcdyP6InuorcG2tthJ5nXVVhiOeWHdDBanrUNoK/SeWW/eEfpAMuqkZucko0UGH9N05iFkqN3HmOD1nY3oPPdrtW6BZrWERchDWQuC0GTdKTVEy6wYcj5INBBBsCbCeMAZmyY6zxmF6sBKhiFLY3EwJwWQ9F3K+7+ynCm1zuJEqO2tNVjWNvW/KY0+Ykqd1x4vJYmUdUUgtwXE12j4f6MdlzYPHnJYwIzCfciZkOFGEDKcPOliWzitsYwkyoInccSgFFBna24Qkff9pLHSaqsU4IjaRrTOYFVbbEc/ycDKwq5GWMvKonHmoLPTn3oeErwyP97lDVqeK6MyUkyHJ9dMgw0yiZABW2elaiDRa/kmstAyhu6lj5F3IwJOWflGRt1EAIFQW6G/EyGQAfmKWXRSo6MYeT6UA/rH30B9Nw9OfSPNuCRmnP7JiWS4MMKCEIiPkRKbWENSQnw4wLV8vwihwzRL4KSH6TaTumVqLjA2aeehbhDrI8SqgZfm8L1WjJIcVbXw5LDpAzZDwHaMuBIyxmbgbOu642zruhux3Y3MkA3cw+VNSI7MGla4aZyiCTEFPcvKorKErcDkWh8FMoljSI4B2PR+4iQqwxl/lNTNG3ADcBQnWC3h6mWu+85aTNLXnwXRjgAy3G9mjdbYCM5kiJx/muREdL1SZ6ZOpDLZoRgCMAZYssDoJ/f3srKJzn62qLCsTKp3p+CmYGVqYO8D19FHcfSagWm2qkYA3PDqw2XfaMelRf4bs6ww/VsiUiTM3hdEihzdKDwYE3tmqgYPaPZFCWsmw13hwB7Z7hNhtWXozBkggtIFtXAkEKJLURLXu/h1loDGGbTOFioDMufoAHQeJQgoe7f2ZnNNrmADGK0lNpgPOEuUd+0306yIkBiPaofgEP4ZeRGOxDc58ZmDATwZJmEU9R7YERg6hgtfkmgzfPP30rVWffqHX+o1H4cxTPuS26qej1qiy6tTISDIyIqxAvkaZGvQYtaor/Uz1yIo+aNaTqSJ8odKI7Bm70KKUJLRIBTsOaNNf/oQk9OKMcL7CJIa1rb3qB0vwBuhz5fW+ZDqupUxrC52qJ4jjqBUjgCAEDiTGDyrRACAjNPi685EYegyUnNICxDl77q2kUniBiRQnTcRNgA+8OMQSRwE5ensBqiixeAjKvm7Ph58TPfyEOKEeW0kWD2uLY5qh+PaYFFxE3Fjzd69r+tlUuyIEaPNDl4dWtkQbQCR7Ho1e6MdV2KtiU0c1my7SS+RvpZEYHZCQzQcPejJl2iCHRlSeh1jhp9Kts1NC/rHbY01jERgf/9KSJRprUhMRP2+OhKiEt2xkrGn72G0/wXI50UjSwAo2Jjp+aLpE+QQSnUHNaVoF6QBS0wWiZYZhD6Q7HNM+obT6BO8Z0YVwzno0LXZkjDKlBlpa0Rp0tV9gHGsIk/7CvJRmYohYPjm731iJzwvFgtst1sA4JEZBARLk3hhLrrKUCzXu5LSSTFNnHwDuAHR9xky1+BQG92Nm/Toab2srB0TkK8PaXJXp6W1TA0UDVGavbWsLRYCBW57j37gVbjMugCgd1zr2vQeF5se1hBDiI3DbgypnnOIzKD3zE3U8oqHfgEeGEyEoSytFCJgTYQNhNHIY7l/eNSS2e+BLOXlCsfmLM8dhHHwJo8B0vu3DLLVYdRWHAf0Z87GFB4sA9XKMBrTOFa9WFaEhTM8ZmjYTISz+TXcO1klco8BqixMUDo0PecxArH6Ts24tJG0tPnJnmda5U/guSQKTYcDOEtgJIWxYSJZ1AOTFHpILaVYzD9JVpPg6qUCxQGKdGreleJwynYEPr3JIoBpWwZfWoqAaD0S2vwaLBDr/X4iLVan7DnuyVBZo3U0+afORxyYTZI2WQ2lhDV8AIJM1PWk5zliDFy3rJxjxYTB5kK61B6iO6ysEZZn3Og7bJO6xyfVKAZYOb5OT9Csmbw81+q4xuJJpt1Yzh6qGtaNoNAAwy6f0wS9a8aljeP719Ik8yj2KRQLamNZYq02rInXOsO1F8fbL2oncBqJA/PwY0jOJghZA0Darh/ZafWtQ20NDM3WEzHOkIScYKhwnKa4ttghKXwYhXaugRJMhI0ARV4ziFjJB0QTx132QCYnocdmlIkNRHCuTk6MjyntObIMI/J6lTMfQpj1oyoDsNQarI3Q/HfFuuFz8KnniUwus+h5N7KflTKKTd5XAIj1d2iNi8OZ56TZwL7TAqb1Fn3ZzIERERBjioQYMsozt/RtiXI2pvAhReB6s2X6vWBmH1fNa7vdZrWIoUvySWbYAtsrLqKrQK1h4VpVU0/NvtWS5zgVi0/JQMtR1L5nS6wjuSmsTlrWbNnbyQ0w+Vk6LzmO5GWW2N6ktamRwJrledJXBPBO8xRcjeAhqv4Mt9TWcb+XtjwAwIxZqJkL/634XmTQv/tVPt4xTrJM9z1/7Ln7/VGYXou7q/P0nE6ozRntlKQEYC9qBiBThvl4WcctDuTLYGS6mB0KFG+CnHk/eAOuJUfUIcI7wNkAa6pUl/IhYlHbFDRue5/+ldRsNZ+c2CgOrHBowkx9npVKExlpoT3m3OR7ziQSNKslFNmm9MyVfZBUXD8gk4lEChcYB0gmZo0ThykBmuFmbS9QYDQc0JXHng9xRlKcIdQGgO9Bg6iu+H6/R1P3CUjnNZGyJNikIuNOf5NxQjZ8p441OUS4uMlhzRdHeTyJ5vVp+WmANFvLmuyw0gJXvMbHmKIqXgtJHN78cv1oLUVxYw/yA8xuDeqvEa8vML73NuJ2zcrbrmJquMywouUxzGIFuzpBaI6zWkUMSfsvwEyclmLXpQWI81BGJkFYUbLQ2cyw4hfMMmU5x0kX0iDDUJjCuNlJSSSbDkB+HlDYEPCzYo+hyLVAebqxNWd2umDMrpOFaFFeXG/SQNG6OWIkoN/wPvqeGdXCUvR/8F85a/z+//Wlz+GHZmUjPbB3b+gIGUPmYASvZk3EEDgDcaaGlRlqzwMe9hRvinpOOisxAL6HExJDBaA1BtEYHFU1NhVrFS4qrkk97Qa0zqB2BtcdMwyvupGVHMK0OdYH7l1kp+Xhg016e7qIp94o2d+SFq7ZSSU1H66J7Q+LNBIwGVJUIEPs+nieaU20JCeN+4w+pHMlLQqgAeSz4gkZzsqcBA0DEaKJ8JEmtUFTwP2WkBuvx0LhZu5IdZ8KfsFchCFSIT6u903JmFRCT7+5+QJ5gX3bOK7JwZuxktLPuaNSDL48sJjWrkkuRGWm5Y+hA9HndPdSJPVxmjRKU/B8YQ4bxM01wtUzhIsnCNt1kushY3ikurVJH85s17BnHmiPgCoUF2Ceg3bIaamT0B5g3Ub9ewgRgQiWDIw4xASxhemNsr+gukkmlaBCEUpmdZDpkS/3U5UPFH/XKN0J1FjLYsRqISZBl0modmZDyNeBj4AlC9ecwFaspEB2x5H1uJMa2idD0zJaB+PLpu+ZOK4ed3FeAJJ+nTbxBgDG8+Kn90qWCMsKK9bQYYUaXfwK+GmSbfSbDFFJozpZi1AtULfHOKlXuH1yguPaYjM0uNiN+NbRjqny/YjzzZB6vK6lv0shz0acnPZ31S7Dj8wUnvYt6WMAyUkB2cExhZzS/CnNXrQunB7rtaXDW6XtIjFZCy3JicOIRR8kMIENYZ00/ksdMkGIhEZnz5Uji2IAoudremBHRWM3EcAGsD9HT/ZJVW/iKDVNeY6sNsPZHAyLOkg0lvfXD4i24s97RXujHRdFfzizmmdhwNRplQ6rjAyQi6lz2CtBTcV6aIkyeS5On9deB8TwscGE3fWF7JzCchIplSNArAV5nu4KH4ABMNbAukpqOjyBlnzPhIUZuQXAxCEB2YFlGAKAMBnnYOIes6rMtIDsqLS3RTQJy9qHsQSrl4GeP+QisEKDAax4MBcj1Yi5cQa2EnZWeaJ1AQGPiYFxk0nJrcjdaAN3gDgwydgmx0schf/Kb+TvB8B+/sdfcDY/eKM545PsJACMROkeCSEf8yjHUgv+AEBB4dYIL60XqrBiiXEq7QtLLSw3RfRafx13oI71//x2jditeQdkZpg9vQOqVyA/4O7qDtaOmFDgLK52Iy52I45qlwRor3cjU9hDVjO3hlVijhon7FmDhSiZa4Y1d1y5bqvZGCXHnAhMKZPJ4zx07pfKp7HgdDGu5wbJNJqtaVTcG1HRCKnDogzMy/EhpI9t8T4xQ4CCyKQp6YfWUAAYRwR1WOOQJayk9zLq57oaUdYX1CydRlWdpPNuXKdf0t5oxzVnFU4zsFlz3/ylsyzrUK2mPKyKR2tWVcbzeqtPFlPCx55taRS7V+A1PO3YNAuEEGCwhhf1aoC/tx0HRPmHcZi+PmVCxbESzM4Q9wBVcpOmOWbQ7GsaKExgorL/K7HS3GQ0SWZ68ueWdHjd9yjUXx2gNwbCGFkaavCsJr6T/jUfoyxQAYBD4wgh5sbb+T4xrOPRP/5G+kzXHnPtbmZ7zkuGeSYIUReJ5xCEPlQr5k6pxXIxKc5TjFOWnEKGymLTXlJDQFWgEy5ynUX51tZQgl8nTqusoQxb1vnrNvDn77Hq+pbngvGHGG5c3q5Bq2NYz5qTx/UKi8URauOxqngm1HFt5XyHLHGmQYs6L8q9io0SPqzZc1aVZGYAkpPS70xSGigd1iEWbnJaioT4IUGiyiRMGU8smLeHFnl1WnKu9soSRfZV1pwmNeRyxJDvUzaVgtukyi1KH+PAuotDzxlXt9nbnqxFdAP37EnWNfn3Adib77iKNPqlzRxwZHix00oO6XnRgl4U4TnbfFQWxkwOiIEzlvoIZBysintePUOoW1R4wq8pdNOobkHLY1C7mlDR9YZQJzIU9QNmJYmivO6DkhSAXMcgVl1INsfLlT5tuf9njICXpkbNfitDLO5bRqZAaoztA8OC2zEgeC5SX/cjLrpBhEo5GwgRgDSjqul5prHP88DKTFCOQzSWM0zj0ErRGQAiDECOLyzjEKXxNPqGe51GbtKlYfOxXiexaiew77w/x0ck1YjJ67Av0Ayw4kLrLF8TllBFghPnVltBKAzBkcmL9NhltuuwQbh8iiBzv/z5u+gv1xg3HcZtD7IGtpb5VdfPeGbVxRMeY786gW2OcXd1B2G5Qo8aZ61FNzKdvlNtQx+wG/1EGqmyeZZU2cNVWZOcVOmgFBbVx0YkmEqiA6Eke0wDuTRq5lCWpXUkYL/ksXcCtTY8prqu6iACDKtO6mJzgkwJ/w09gjqgRHqTTKrIsEK35kyr79iJlbqLQu4yixU7LfA9QXULVHWW8HI1YvXqof2b7biA/Zu+LKTfwGJCCMxv1eyMTCqSRlnIyrhAs6dSpYPKTO9A9/vHLcy6O3+0B7lFnWNlK5BrYeqGxUy7NXxVQQVFqW6TyCm1K564qkP8qgVi1WKEiJf6TL3lXi+m+foQYUVJftJDJ+dHWVElVJWcFhXUc+PYaQWuT6nTsuq0tKCsThpyHu2IumphrMEQRMlePkcXJCCkqLoVerXWIWpD4rC2idCCceTIUqNKYwFb801vLKiQ6SF1vGRSk20axVK1oHHBC3VfgcYB49d/G+6zf+SjuDSmJgHITaxQIAdxunjrAk4qGRQzTMrKK+wIYjSIlt+7MopWRBgAzuaoX0fc0Nil+mvcXMFfPMH23XMezrjZIvSCIFjDc8A2HWx7xWNErp5xTfb0DuzZBqY+gmmPUS9O0dcOvQ/YecPXa8i6nfP+NSA7pPSd09/3nVapIpGcFhXbolg35tBgmWX5GapRnJ8bbQava+YTixpqHAEyRd1JalB639EBZ8fbmrSuTWDBvkuErth3CH2ui1JVZ2Zy3fK/xQpmeTJlKetk8tdIvt58xzW3A+yvQw6MYpjUW4jMpHt8si0UNppJtZTQGQCmvH28tjt/dPgPCvsYB9jAlFpjYaxER8YiDn2KmszyJEdI9SJHSvUyZTLcFJobl3MvFRDsLACYQx6lA9P9AxKkkaBBmOS0gANOS4vaM1yeIjth59qCxZWlfqoY4Z0RZRCGhpidlqe00m4nLQScBcQdZ0lwFahpmX1Zt4D0fUWl/GpNTqYwA2BVe3ESIM4aKAa+0fs1KIysvmEMqoc/8AFeEc8xhY0K4sqhhVwf60KcHhPSWItSEy+EiCqwOgNqm5wikWHkYr4fPk9Zjt2andbVOfpnPF1YJwv7QvfQVA795QZ2UaNaLtCcbVAfL2GuzuG2a3ZgqxPQ2KGtlmhcg1W7TJJN2mg/J1jtHaLyGIC/r37/ua4jFa8pIXB2Vjmz0VE6e9PAZ05rbx0DkIgVKEoRKesqnE/RJhRV4T54IGQnBuQyh76WjJ04Ph0zFIceMXjEkX9C/pEEIGTFYcloFmqX7LTaFVC3sn5UuT/U1YjuO3WsSeGQJid5Bmnx32nPgaW5UPIcR0ezTC0tuJjq8MnfpmzGl5cI+rAsVm1yrDEGsCxIsVSk799w9uSPGNtuT/mnfLcgmVWsFoj1Ep4c+hCx2wUcknIJAIz0QKn+3QjA2BrRjgDRXsc973BIN2OpWL5PwijgFsmyyA8oR9CnOpkxufYZAhbVEhFIkjWnjcvva5DqGLWV4r4lmN01qLuE2V0hnL8L/97bXGdRZptElAqJKHMKEm3aZoFQrxDrBVAtWeUbJqmbO8NiqXArNM2RQIcfT+OyJYCk9lRSxsuabmkRLCcUtH4FvhYUhrOUVSiGELLWpWGZIU9AtJT7C4PnoaLdmlmu60v462v0Vxvszq/Eee2wu9zB9wFBarHGGtjawLUO7dkS1apFc+sIy4fvoL59C/b4DObWPc7EFitE16BSBfZycjYVQyLnI19o2pgNFE4pDDmI9Zhe2yXJqCxphJkTO1DqeC5aBOT7RfeFZBJDVTOzz9ipEyvrSgkKvNlp0NypGUEZDN+jplmwUkzdYtJC0y5BzUJKDEfipIosS8kh1h12yu/D3nzHVTR9Tp4X26O/A3s1rjnlUw/qTWMsNKuhGFHffjj5W//0nVf5Jh+IbbdbBFNPI74bxtQnEwgxkgHFJl2sSUuuXmLndfJpLsgDQGZYZaVpLQ+PQeogAOpqKdTffg/Ln+8LpLu+jIBThpVqToXC9kyVn2t6HECQFL0rP+CkahBrB7RuSgoJIyjKYL8wgnY+1eVMvwZ2W442FYYZeIqtGXpQz9AJ1W2B7/NNHBcrmCWzMVXtvnINXNWiL65PHyN2PsLaFs61gO/RXV98NCNUgpACkIkzLs1MY1O/FcFrc262jRMCUhC4cDeyjiBDZoHHvfuI09Yloec9pZ8Y8lQAIQT5fkxTg9VpbR5v0a/7yXwvMoRq4dJsr8XZBXbPrtHceoLm1jHaO99KkHcZZPCCW0MnEKj4b5qyrTCvZP6Hg9kZ4qLHrHxenVQJ29/wugmZbB50p/eO+68hkxGkm1b0ohZWwooHTRyXOjByQo4KHrQ6TvdbggQrmUTu2gTxB5X1Uqm20ryMDuq3eFV7sx0Xbj7BAF7osABMUur0MoQJ3XluzdmDG/82d2QfhXWbNfpoUuYDKKHEwJo6sZgSbKFQQ1FTSrOWNMqUhmOdBcTMsgKzBybqAiXVXbOx4IUybVlayNoW1uTJwxN1nVnmrO+lWZZCKzTsplnWDQuDsv+IBqbzd8jnPxTRbtGvEmUB4G1Gdlp9x+9TVYgjQ4QGXe5Xkc9RZxjl/QEgGB6USZFHgEAIGnVzlGpL+k/72mrpaevWV7wNDFbLxcteCu/PymOoT1lt9j4ArxMBMaYaD9cz59dBTKMzDPF8KmMIzciZV7BxotygDkDhJ+4L8oghpH/BR/g+YOxG9NcD+s2QJipbItTrAb4PGNYDxu2I4COGdYf+coP+aoNq1cK2NaplC1Pn3iJl15LjcSspi16dwCiZQOuS0iuVv+jU+exNQi+dU9pmf62J5XVfHnNlAsrjvL28J1lQUZpIbNBDmUyZ8QFcJrAZEiyVc9K5FlIHmwPVDT+vnyHQeBCiBdTZazZVOisyk/Un9avN+wbfh73Rjov7TGYQ4CHnlLYXSDAtTvuRz1w89ZNu15sthkBpgrH2LpU4PEe6lueHAalAPKkL6cVIlIVQIfRmaSINBiKTxC8pFQKiNqZK3SAN2YusTqCkh4WzaOolO5+xnxz7CYwrPw1BHFaXGWgl3FKyFcuel5j/HtQBFawoHbvBH2JzxlS3fANrD4pE5ubkDszRLc4KitHrk+utdAACKfpdJ+PYe6Bes0L6sGVSh2M5rd7r2AfuH0vZIABrjJzjiLOj5SteJVPrtltQd5mPd0FsSezTpH+XgwglbxipZdrA2VfreCx7ZaI4MWUZEg/8NCFN7/WBEq3ezJzjHkEgPc/woO89whDQh4jrMUiQxmSP/lmH1bpnx7buUa9q1EeXaE7O4dqKB1wuedClqRyMNNqbyoGsQbVq+e/LJezxWVKOoeUxQ2NF5rC3jqQdDS9+PHNOk+fIYE/J5+B6VgR3E6d2GF4stQ8xIjkvGBnaOnnrgnhkshMq+yjVScWqxS5QIrr4EAHVSE4kFlZWqY2DpSA6pUK9/47NuPRElZjp/OQduIAmkfUbbushYDfqBOCc7eg6ZA0Pn3MimLmqKtR1nVQq9ijeADcjQv4OoZ7rcTUmr2Q+L95RJJy6MWAIQDfmycQA1zucARbOYOEMaluhqWuGEEvYRc5dej6EJFc1ESAt9z3VDsZpV3/fMQQlxf7YbTBcXWO42mDc9ly0Bhf6lWJdrVpxNCc86FAXr8VRijLnkFHqxxrHrCYwDhle3K4RxiFF9HbsGFapGlQxwDVHiW5O5XeMATAOzjoEOizu+6oW2pN0fsuCf5Tv6CPQj9OxHVT8dIawcLlPqbYG29GjcQabwafrUHuiWhGj1ZpZlOtGAybS4EGCBtfWqWZFxsDWFsYa7C53WHYjji52CMO03kUCBQzrgR3cusf2vINrHWxt4VqHauFAlmArB1M7kGGGYn2yxLBaoFp1aEKA8dxUawAEJeRIH5Kxe/SSic3p6ACmAbaSpEpR6Xlt7UU2K40cys4S21DhxLGflFZ43/J1pVlWVAasOCyePt0ArkZwLTpR6++7mNoK8pDdXItW5ZEa4mqNgXV1uq5j9R3uuF4k4glgsjjf5LTetGwLyIMuSy05HWOvPIXWWVSB9WwNSd3J1LC1pPAHGJJ7TZCYYfDlT2Eq2tpNIu7eBx5O6Xm0RGsNfI00yM9Hne/DfTPzWuO8ITP9XjpbfT4GYOgR1GmMA9N2+w5hfYlw9QzDusPu2TV2z64Q+jE5LtvWMJVD7RmaqnzgmgfAC1WzQJAJzAkSEVOCCIURNA6gRjJCZchphgaBZMYB1HcJjqNxxwKpspBxrS1nizFwPcBWNdab7QcCG7aisZiEoLVJWuDSIWTWaDn1u5L2BmuYGRgNN3o7Y1Bbnu22cDycVGdWaW9UbVnVXZl4ej2l41DVGaprWWmmkf1VJ1afbDCsO4zbEbvLHr7nWlf0EcEHRM+jTNTCIM/5iLEbJWPzMJWFrQPsMMJWgizUDqaqEKoxQZZxrDgrV7KDqwHjpxTzmZWMPDI8p2ACK1NWspgQFUqiyDzruinIflEpRAhaqrUJ6/i76fghMhPiRhqkW/ZQ2jq1wHhy2I3cFznI9ZHRAtkViK6rnOTUsB7BGq7kYOslYhgRm6PD3+sl7M12XIZy1KYHfbYJFdHkjfO63kD75vk1AKGex/wvgsdzdz4kivLOBTTOYOGsQHpGxhZYVNaBrGQ4ZTd/sRijYBvuMa6kGEsoajlguLIbQ5pSC/DpOh4cjhqerNp7g4WLqKxCiSY1aU6yjtI5zaFA7fbXTn5xFMoAjLsOYaPMtA791RrDukMYuPhP1iD4wBG4NbCVQ2yL4nO7RKhXCItThGaF0dQCgzIs5ipR0lY4c9wlGISqGpT2S3UYlUqcIRMM0qJgZzPK1GJIUwY+SDtaLnC53hbNxSZp+Q3S6qCkG0ssq6UMzMpoBkaIFRLNnBczO2GcKjux1OpLX80wLBVtlVmawXPW265gj46wHAf4boex6+G7Hr4fJ78zgYMdWr8eMKx7+J6hxRgifO9BlmAsYQBg5QsbS4g2pADmkMVxyPCZ9jkai2hmr5kTHYxu7wHUgHOTILvsU5ywGkspurQTzzn3s8xr8hoKMk3B5vqrC0zrPCS7NN83hQTrJfdtShO3oilD0Homm+pVEqTpGpmVygo2jCQSEWKssE6hyfu3N9pxKayRnwhTyZOSavpJULL4AK2Wu7+yQB2Ih8VFFj91xqIZTRoUp9Z7Lrz7GLAbOZVXlhg7shp1XQP9Jgtujh2w26BsbCSp+3BUVqdInXyP1rUYHWHhCBEOIUaBD6MMrRsxBB7Bflxb9JVFbXkkeCOReSXBCDvDmGtxRbY1gQa1x0QL/ArVqJMwBqZycG2NsFyAjEmLFUNFDUzt0Nw6QnV6kmjU9t6nEOsjdlrLM+x8TE3XbExrH43UEOsj2HrJzqheiXSRqPKrgHAMGSbSaFeds54oXUBKlMD302v9A7Syhys5HPlbOUC0FujHxRHUbSYjWzRuX5WZhHWAEHzKXilDMo5egyFXI/oWtDCwVc1KLcEnNmYyzVwFxlMFh7C5RP/0GdPnn12he3I5cWT8mghbG8m2LGyd61p2UaO5dZx+T/WtumUSx2QfxJWXdc7imitFZtXJwVhesasDaFB5vA4M2ORtMsR6U7PNTQE7tw0aRM81rXS/HmI2lgFpMZ1andYQsoKKEnSsTDEu5a6yXmPueYvIdbBeRJovtiNe1d5ox7UXYRzKrCbP59O7hwmTwe7iyXPZhJ8kW7iiKAskkdfRspBsYyN8NElTDtDGSP7ePvJ/Xgrt/HfJIkRpOpksIilrkCZF0yBf7L4Hhh2IDBauxlBFEEUY4htfIUOFNZUm7Z2qGHBE7mOEjTRtXD7wWGGmaMC0ZnVWxnKTcPBZ3DN4VKuBs6rKIYguI9ns0Ezbwp4yAYNWxzCndxGWZ4jVEqE95paAMB3nUVLBAQJChCdhT7qWSS5lBjvb/4Pw9gTuMXlx9CMoBOwungBhfC6z9f3YyWofenx0sYYFgYxkW5IlVYbgEHJmWQ48nN9bWrdxSnapp/VBbcoFcsYhRis7DULnx0kcve13DAVfPYNp30W9uUJ9vIRrm9S03Kw7FpDWIyrn3Epdy7VNqqXZ5TKxCtVpUbM4CMHxT79HK4+irAJRSo8ODA87gd+NwnemcCA2f0/sM3SBEoqbNkVPdqsIDCKQm+EVKkzH7jnEkTLjU23QIIMxy/ePUI5IlsQzuV3CmmI4pux/FMhQIcbN8OqJxJvtuAAo1XI+Hv5g34RuD4AgWHJ5UgFeGIz52CWbnme7q3M+ccXAvso6NI77scroSOVtcjRdRteMSUfPkTVr9wFu3rcC5AgzeIZJROYIOgNoNIimg4kBVR1wVLewJiQR3MYZblAd87nIIq2MXvgQEaURdg+/18fRp+eiYfUJVQMh51iWaeylH0yK6WBIisaB2X0F5JOkreoW5vQO6OgWYrWEb48R2mNEW6ditB5DYCr1A7Dz4oVDgwSCMy2sFTp/6bhKNqtef/qd9uqIuYWB/KvTh9+PGQJAPCYm1bSocFp+yCzPAzBuWleNS0oJ0dVAtZheV3qfkvZLcfax5wTtFGbjP+R9casTUNMirC9B7RPY2iU6vEpFRalfkjEwtRNCxioRcszxLZjUPHuce7zKidelnJJk+vw4zGpbfH2lfkFjEYPh4MqIqPKM+ao1xtJ5AUhMTtboZGfg482ZV/laPpcvV/eXHeen5T2U/akZc9pMM6uY2YOq1aj3hQ7WlA6KpKriI8OMuzFg+x3ruOQCSFBgMR6gJBWo3RjlFsXN1+3o/iiMtheZaZeezJCDNQ6LQ2m/rRnukmynL7TaNAEN4OMUXQPEgBADjMhDQZh6ajF40NCnuoyRwXZxd422atFUC8S2xXFTYTtYVt4Y40Qjzpr8L8kHaZ8PGUQKIIFPiAKP/5a/ATOIJIbU4Gw8j8WYUOGL6FhVLszyJEnShPYYsV6lpus+RPiizSBpzxma3Jg+xFRb1CAhICSYzRIHFk4cQHJmBeGkHFef/pVogc6kmgVaH4blVoQcPTsCk0pS/TMripd0+rSYA4AZ94ZEppoO0bSP0ErPVDU7pyVZQTKCNADTOGDsWLXl5E4SeCVjYY/WaG516K82EyJO6bjcyWnOsJbHExmvso9p0rDbdxOoMMkfFaZZF5mQMzDZxlgLkEzVDhVg5Lt4m8+50t2Lvkx5QlpdMjz3PAcmoMpzxMOnr2YnU5Bb9Kdc/+UQypssSG9dlMcyUUiQn1yT12vrVe3Ndlw39UrcZHOnNYNoymi3W1+hXR1/cPv6AVj/5JugsUtOKy1kAGc9NyxoUWVuXI22OUZTLzE6i+0Y0ugPIEMAIYIjTVlwYvCs5WgtQgGbKIWZb0KdrdQzhDfuEPsN4Gosm2MsmiVG8Gey0CkmahvZgWlBVxapmGGL+YKWFjV9SrLsqH0iYwvUK64vFYoBkz4VoaVH1yI0K3hTYyyce1lvUKelBAMuMrNQsGa4Y4iFtmJMUlUF/4H733QxT7CTOK85s1J77sQpfxR12gxTMbGCIhBJaiWB0lwnzXYJEHkxlh0CMlyodRvNmkpHVDbTTmx+f84CSjKQCGsKezFpQhiK4igqHxLZBuDWB6pYnsisTmDalQjBHqdhh5NrTeA83tXnaOsdIHjE4FlFZej5uIyDZJa5tkkKFxaPjWRrWk8qZbhY6Z171zS3fR50qM5P32NeawQUfuSbf67hUPoqQzRpj9DPMAACRSBIhhUm0+wKxxURY0xEndq9egD2Zjuul7Q9HcNywTvE5PmkZl2FKCfr9Q0HmzYnU0nl5lElAPgRcdyhqlhDT+s2CdaAzJEyjum/Qsnm49bJQiY3Ac1gvaRCEUHY8SIwOGDsEYctSx7VS4zOpZpceRswE2lmaZFUKK2IvgHOJPV7A4V2XI3opmKmum9hVoNRKMubOrGl9JhkeaucabkCv/egyd0dItKQRd5f1ecjricqvBieM4pnQiqS860ogiyew9tfBqz7UAR5752s8OhinQIZjwgTeQwNw2Cejx24jpKOuy7CBZQLywMFy76gSeY0v9d0LMweZFpkoGVLxCEWZkGKMG07fb5u09iNRMIQIViVKdqD8sKISCFBz8/T+UsEJv197EEicBuDyaoRcSryfeP7zWA6H6MEEs93YKXTGkOemzZ3XOq0SCW8JBuam0KUfE0QAsXJvRqjOK+INH1cv6EGw7o/BBa8rl4j5XqzHVd50aYLPCaq540Rui5a5U1RPH6ZC+pjsZkDjkS5wdH71PiaxGCVUFGMGrBn94HFMUK9QrUY4eolYuWY4i1vHaOQDFyLSCxbFG3P7zEOKUJMViyyiCHvCzgKtuMOsV8zC7RawFQ81qBVuRiaRdQAwzBkQAjivGbRdwmFyutSYGrBY4AUSin3d369FCwujXA168s3eM6wdIptCdlFAg/PNJn4Eg0yvEg8o8wZpPE5CSIsB0kGgGIxwr4cdRH1OMh3lHrmh2W62Bni6Hm0hNpY1PUSZB3XTuzI2e0oi/msZqfNq4lWLec6Hf/n1KDnUHBa4lKdcEyTAcj3ueF8HDKEZyzIITH8TLNgIVgZt2GPzwRGrxkaT2r54hy17CD7w5l6QYd3FSgY6ClDYLX/PQshid/G4FmM+jnHnpADoBAzISgjIwRov5QcJYWt02ESaC+jAEjqJWXSNJ/UzLPEYlLFUfYglyIjKLLDjKBUBwPYSamDGmPWNQXyfcDfjdEVA2YRv6q92Y5rbnpjR5rqeM0w8oO6YLLtnMXzSbBufcULnYwDKBUHUoOh1BNiyXhKhApx4saKKGwDGh03v5IBXA1n6rRwR7mx0gTfKiQqbbQH1NhjkDtFshuBFUthWnZGDCdGP3AW5erDkbhawTrbi8SBtIhp1M+QhLAW4zy65NcRDN/kEDZUyDeV3sTle8svxYIZkq4hyEjjMI9u9yGi1mhzssjkybdK1Nhr+i6i/EQzL0lHZDL9E/jQqPFqnz7j5tD/+e4liIjH11i+JmpbwzUmQdaRDMqWBb0+o3FJcSHaGiNMvr8C5/hcrzF7cFeJvOW5VgcctWShYeynNUwRhQWqTLQoiDjUslp8nGvsQZ3BvjONFLL8k6tS1khV/Ryiht1nJep+3+DA4uzx/Fr2ArcFX9RZ5bl0/GK+F0oUQSnpqQ9Rxv04Q6ihdSdmlOq1a+SgWMqN57pPWmrwkTAW5IsxTCHOSgWWDausWAPEPbXll7c333EpVGEwgR5KHc8JrPS8SB3Ti+Z6w5IkRx+WyOlz7HLNn826cLzfFIIIfsp38FYWC883mwugUCU8HUAReWbJmmQKPfkxjZ6PYOhBo6QIcO+HLqzGcu+UZlhGFnJRfFbIMcZQ1L/8VAYncj9ZDB6wI9c/CpjmoKL/3GaLvk5T1ii1L4gSpXEEGVNW5eSutCRN2Drsb55VzrJdMkhMMCvXkpUIsvw/76fP8OC8p3D+HdUhF9tMgq2PgKCh9gP3T/C7jy7TPK20y9YlzVmCOFK9JlKQ6FLGNUakxu1yEQZiEY1Pzcyifs5eX/C9VWfSGCBkEV24KsGC5CoW0E3q5S9JyqI8PiS1XwgkD1flxwfge5azen/n7BAXIibnxcclgqFDAJN6bzl6SEfp+AB0omqiAsXBEirGVDjLMhEmMqMUQKK6z6ntIZIgM9zKgsBjjDTrGkS9BwBPlCYkabDGcZ04uO9mXHzh6VCzm9iEJSz04de4X8muN9vJDU5QPTgDVy9TlBeVyRUErhmsDOoTHN2woyuHQzJEcpTqOmnxE1JFkv4pjLMwA9JxKTqEUhl8fsjFa9nf9DPIHK5os+MaR5BVfGEEeY5008RgDS6UUAIkVe6kMTnrz+NRFAHW1vDgXk+NNPXm5W+B1G9SgbMgAMlpKb06seVKOLDMDOc1UTkeSXUk1aamIsbldThx0OV5ENSAwiyrKrNSgD+HuPcQwIfaf+iF6Uk+JggKMKitg6sdolL99bwosmEcRnKiwhETKadsy0hfD0hsOYWSSOqDDGVFGDLcqqGU8pTZ2QltfdI0LM3yCpUfGl1SBgETkWYxhqyRggYO5iRA1MypmD5M6sBQ1LyKUSGTKRbvg2zDbNVplK31JM26yu1UucQHXu/GEJMUF5DPo48erbOwrMmUBLUVyZvA4+X+Gp5eYAnoCejDPrmDx9iwtFvrCAtnsHQE+B5+3Lz0d5/bt4fjmkVLB8kYmFJDX+ZyKW+mj8o6Yd1NC7KACZzOV7bltR3gBWPsQaGo6RmexkvLYsEslBqCyswcguckE7KU6z0llRZgtXIr3fcps0pMhOKmTy0J+7N/EpRiLMh6vuvIAKHmf8bmBmPCxEnwraVFcx1NIoMIa6CxNRNIEDAGSosuUEBORUNtbSkJ26rTKusaSR1bp7bqOIeIFAQZYugDAplRECmqccgQmnECt7p0nkB8/CdIgCwMEWDHp8dMHV46hoXqgP1gBXhvMh+l8AJKOoYMgdb8/S1SbVBh224MCarSVojBRxHizUoMgLQNiLahijKrQDTXDwNax1Ot51krrQATPOJyKnAbiYpA4UDQIEzOuWjBRMB5DumKaWARAaASJEG3k39Gr381p/eew54m4cyklJXaRKwBKNIkE0uOLMH88nyxzmm2Q0RwxiLCTuDCzOQ9tA80RSLK1gciOFfD2hpO1pDKRFQmoraU1s/aElYVy8y50MNs1oDvYXYXN373F9mb7bhm0dLe35AXF72RMgHhBW9dnMjL9fagwsAHbe9drnPDXxFdhcg3tRZDFTqpFMIbeyDUyZFMHBKZnC3oTWxucPRFzUgzp7mTp3ig3qBZgNzsdMBpHaISp+zMWoZglJU2H+7+vPNcEAJo4B6b2jgYZ+R8UzpmQJF1EfLIlIJ1WNYOS5KB1mm0wTsgs7TUGVbGcVY8dJLFji8BbeXF1CfMW/7k3OQ4lZC2Qrt7JKQPwX704Sl+551L/riYlU5C1GszTmA9PS5aU0mOy0dseo9OxJfTKJ7AIsxcAyEsK8tRujNYVhY1+Lq3xA4TlhdMlM6bDDvzMsMFkrOfs4j3rBxGOp9MPCMiTbLnAtKO5Ta6b1qbLvdz4kCfc22Dz702+AatYxVrAdOXikwrObB8VRABjmji1CrDFIv8ObSXLaUG+5jbMkrCCtetGe531mHpWtQmYrA5IGE5OYOaAkugDdtCTu47VfIJOIj5l86qjDzyicsLzk1mkEHdm6KRD9oSjBJzmg+IJiYBNgCjMH6siYiReMZWpfUnqTNUKPqU6kmGACDJxhwUdD1gqU90dhwShFJGpPpvHBFLgsih90VelOH2L8X5KAjeCcNitOWOAVCtP5K+qso41jwsD+7kzcP0Z/o8i4kEj2ZEUqfx2picIC89NjHNG6ubIxhVe5g1GZfj4nXhiuK0SmIJkAvvACbUY9I2BP1L/PCDq/Ie0KK7J8AEXeDyHik8G2N2XGMArnYjrvsRmyHguh9Zw9KXkT9nXEe1TU4rBGBZW3gDEIV0JMiyyjjCyAK9rmBgHgiqDl7nM8dUDhedz3Wbv1+q62n2ZmfXb6jktdNsuMz0Uo/bDc7LyPYRHC2W2RYhwiM7r3Iyeem0JjUqmjs6miAqut1BS/XwPiMB2tQtxJbKbuGMRavBsRDDaJiJdpdO/RXtzXdcBbQCTB1WSf081PVdRomTt5QI/UDs/5GYASFSTNOHoVEuIoxHqtMMhrKyen2UqNb5+wsVtli0Vb0BkKzpwAKuf+MEgCbHQD8j3fTFAgxRoUYMTBSJrNn23BHhqpqtA+pclfp/GKIrshHdByuQnXWg0U7UUkpppb2Bk3qtaEZaCJumBUhuujnlXmHmgHxt6QgZPXI9EYbAWUPjGrSrVm7WQuUk1bFsem/NXLSJWS9Va2LqqwmTskgeNxJiFlz+MK1x04g9M8am9xVnokCUGgtnWwG7YlLAZvC42nl0o0c/8t+cMDMXNSvLL4PlzMAamBGAs7CeEYUh6HGW4ahNO63BxAMkmNk1cPDnvDapryksIuw7GSoCEvk9b4+JY5rUt8rtC4KIBpYBBIsoje7srKJkW4qJqPNSWHGyW0XNMJFb0ifw5qxiQ1NRX0gQRez4nOUG8hgDKGhdtxg31K9nx34Kj+7NJDNFK9Ir2pvtuGaRyjzL0kLy3GVJtn3z2xI+sGmz78eImM0TKDKrxzA0N1HvBpK+ICCUdYoYbVZjLr93CS/uRVOHnFZZN0ABCZY1gPJ3LY6jmOk0WpCtOdtwN0Su8to0nNG4idNSEoSe37KvihshhdDhwDAfUb6RFKabR9DpO4qjigHkMK053WBZ/YKJAoKhwKGc6Iuky9Z7vumtKQgMhbZkGXGXQsjAfiBVmr5/qerxEfgt1IYm15M6ct6nDCfrd1FmWTcG6IQAzrD4XxCChtoYYh17Rl8AAC5OSURBVGopYImhKN8zYCfQuNZR8udmdhz32fFsN2sgUG/BEC1tfj3OZ7wdsAnUWEB8k+t0kklTDo5miMFNwWK5lmmA6EEwUTIu4uNjSAPa6cvLe2Rv/yXo0mtFz6UezywJxde1BU82NjG3xSjKwUhJn1txIvccpv7RUsdRyTEqWlzl6erPg0hfZG+042oXizQUb7vN0zQVh9+n3uYFqLzXYwRuH3/0jmpuekFW0idBkaDjzk3k6CpEABEcefmYajVjQOp+L00XVMuUhsln3Vgcmd1YKXpNLzbTx6ZGDAGwlTiDNkFkMQaGB26CXZQkomPBy99nF3Z5Y/JPJopEMtII2k8L6fMIWj8WEIFek5upA9ejmOYv60vx2QTAkcgUwciCyQtqCdOo+RDRE2AjEIzUI22W0dLFTxs11W+V548j7/3ag2bTmvVVZn+bD9pqO3VcQ8iOI8jiGsCZVum0mIzBPzWzVFNHBWSGJyC1Mc9F/iFEWB/RUeDM0+bF18cIGziQ4CGXMUlyOXUWxkxh4nlWdqj1QY3MNMOaoAtaMy6yraKOrCNB9Bwn5xABIpcJE+U+FftIlHvbIkl7isDgc9mlMtvSTI0D95ytBWL1E82D5szD9OlyLfriTYmA2rAgAZBRHQIAYdHGoUdYX+ZG8F2XWgBUDxJ1KyhLPpavam+04ypNhYZ1EVDGUnmOifJCzjdhLip/Eqwub96oTX6Uotgh8ByblH0FIJJeXPmL6uWgmLX2Z2iBNoIbBi0V+obzG/dQ5gJIpiJsuKJ+5ktfRpqtyfsUSuLz7Gdy0xekkglhxPcp0LC6rb4eQBLhNSFHhEJaIDL7sEQZFYfA4r2A4PBl5Ex78A8BaHT/rOFGMLKJtaoCwrq4+Bi5LjZBS3V/wp5zTKrzaYts+r5ePsOABXs/ikv4SBYbVQrXRlYfScgYfJ324r5InHuIESoFnZT1BeLWb2n3oADeths9zA4YnMEQDELkydmVMYgguMgNrVEWWWsIRrMxICmwJMivvP702p5nWgpbF79PYL5Sqb6oT02CLa2HBh3jE2fIRyzaXOQ+BPZgSyLDMD0ZhgR1ARPXod9bkYAIvqIMNHPKQTywf52UDmtex9YJ6goLK6LjTIu6rWUA6i4dIwJATctlgXEAJnPJ5J+rDtayX8W+bRzXyxamH12sce9k9SHvzavZaQFPrjdbqKyKNxwZWSIMNCVvzC88QOBE0idpwkgsZ0eFohirKoHpd+M4ayLD76g3bzH0zpOT3pxpkMCsxxnTzveAtyA/k9QqYZXCwSRY7VCWRgakka7aIdjFOs4ED63s+llKh5b3KGXAaOa0ys+h8n3kOxjj4KoWHgZ9yFI75cI1tzTDyCApGZTruEVBMNLM2QC6YNmPKPCifgOQgROn7WwNb6jI/CK8kXYJGatjjWV6uwnoJOvZUYDxEMiLMyWd01aaTgkYQmQ4AXx8WH4rSgMsvw8jvC84DmWWJQjCvH41YSBqtlbAgpOsqszogAnpJjutHMikGnsEoJJiwD5Lt7zei2DRag8qSWYJDkaVsKH1L4ssxSSh280MaqIba/ilAwOIB3GSfA9rULmWs8KCDGOWJyLzVSNs17LrllX3a5nLVn7P72hyxvu0B6efTKc1t9VygW6zRjQGJlJiEpKXZkJI93yxqAHqnJRNlJ2bFTqtjznRJ/DNVWZowjbmG0mzn2jzTSSQnofh0R+y6OgNqswlZ7jJN0aArEkYeQQOOiIAEybSpDg+oznnRaPQ+SvfVxYbQfWACSfvBithUf0cIDc9l9vN63wAM6uMA8IIW7WoRLBXtebmOnGaVTkQ+1hob9kMxgakr06DDf49jdyQjT/s65rGIromg2hHWBmh48nAE8NTg2TbzhDGEEHgXi8zAiHw4tfJPiuMaCiPgC9JVFrjsmQFZsy1rxsXY91ffVASLG6o5U6Cp0Ku6pCjmvTcyWuBXDNSZ1pCwGWd/ZB7PfRVJqgEkKBsdnBKZaeDDksdjGq0HwqaynjnUHlBtw/Q4821d0Z2A2LkiekqAYcqgJolT5BwFUyh0EOuyvWtyRe/uab4IvuOc1xvlAnFW/H6aEyqqYwhYvB6w1Bxk2DWMEyTuTsxckY21zYD+HYwxCrmXDdxsI6HNKqNMBhCRO8DdiP/1JuUoNAkywPZCBHuxGFK/Bym9PvO6tCUXYVsqFxgStMaUvkZB2zSDnCAdJIyMWDakFqyzoLPYzqEkBKDh2sNnLEyvoWbcMvMSzMtJ+fIGRYd1R6X5EilXuJsDRhCCLnu8FEa7a4nCzs8N7pHY7kJVdiYjTXwLjNaF46Dm2XFNHduQg7YDAG70admZHViythUFlxlTWpA5uyN/1lFcomvb1NQvm+qG+UvY3IjeEISZrBfUafSa2leq5I33jtWAdkBayCXPlpq0vNMWWHttK9lX5nup+HMi7MvcVSFcwLKe58OojElDH1YyonfY5AWBp2hVe5nTN8RMMI2RAwI1QJkHMhWrOk4YxYmgovcQ+SL2W3v077ruD7B1h6d7j23AHBxvUnFbMWgDZUQQTaddaWFfiBfyMOMUVCON8jZE6CXdgAwhiASPkwFH0KOLK2JQtXnbvnaEmpDoiLRT5oXkx1yOkAmVSjttliIuHbBE5jJZqLDIZryjRYDJn1o5ZiNMtvTv5dOK4zcpzb2SKLG5bTcEGAALJZnwiw0sDSFw5QJ1zqDhZNj1HWc2cRCyNc4IAjbEjm6TxHxi1KPD8DGr/3vEiSALwKD4hgZ1qksjoMDZ+y1daitSb1c3UhYuIghGCyriMG7RJPvfEAIcXJNGjk+lTGpr6sUa1VV8+TEDu186cDKemeRuR/MqASa81EAgJhry3M71GtnTYbwyh0rW3DK7bX2PvkOxXUYKTBpiMYEle81t5M5fBCSZ5oSUg7W96QU0FRtbqcpoM4kzEui7VlCrmX9mQwHvPL+e5lWDPmaeQX7ruN6A62shb13uQbMFOemQmF4fjPoBRix35ObZvIACAZF/StfeDrl1xf1Ne3m1x6xOkXIBBvH/THvhe3NXdJ9uQlGiNm5UDTsOEyxjze8300WgZudlz4/z7TGMdN+x2IKdR1ARfGZjEVTL+GcgzP7MKE1BIcA2q3zZOGhKxYC4mkAANcbTc0QMMrFkiHgRxfrDw8uTDCqsioxzZYjFQGH/BkAjINzNZx1GEVvUJ1YJQoLjWOFjEECInVcyhxUMkZl8wBPdViavXBQpsfjOTZzWhMyUJlxAXtqO5M6ldihBnGSulHkG3JPMq7cDsV+37jLmrlpLyRFAJ7Lzs9/abYJsjDu9zfqZkQJ8ia3A9kK1tWSnWbhbZoHcfreRlpMSqi+vJfm+/Ia9l3H9YZbSTR5dLE+uE0ZnZc06rGIbolyk6LCkcDN+DcJnFiOsK8E8qotYaGw19iDxp2MeM+DECcfrNBNkTWp2jhR4FlcfgRgc5SokTIwzbjmjusmB1bcQDE5wsAYXuToVojeeXsgj9CQ2WdlvwpkjDvVO1DN2VO0Fax1qGZjSOZjYSaOPdUT63TyaOhgawNjHXTyMoDU8Hv3QyIc+a/8xpSZeUD941BmKjstY0+EpONqwDn4yIKsSl7ROmkJc+vCrlR/vc7S8Tuwak8YwsX5j4adLmIhh6Y1SfkOqcE9TiGzMsuaZ0tzqG16r0whvEO2D9XP7EDwNCGYAHsQ9149b/4ahbdLUWCpRxmdAE2GhbgLIhZSfW22f2VtWPYlihBBLPZhrtJDMeK5E6VfYN91XN9GNr+5AXFWyAy3zockwdONPmHttYiaWgNAyAKTdQAKE2iUSynyNbKwsO7fwArwfVazoPn8KTGtVSX1ilJPEeCFBphAhXFOpz8A/Uzoy/q3uUXCxBHGML3RSG5CuSHLrJOcyFjNx8QYAxgrwww9qN+JM5PFQm9UkX+iNIKjeJ9SgDg5txEmBsRhywuI7CNvJwLA+JCyrSIoKNU+9uqTNy2spYOT/WeVd1kUKwc0bpLlTD6+eFw6lBK+Ki0gC0TrtIM0bVhltxSC1Toopk4kNXVT7l1DsZ1meNPMY+pA9LszNygTfebHdmJ67GTAaLo2Z/Xe8vnyGplog8o1qJJrsRwtVDoMVa2302tRxyDx8Mx83kux5zS9YXYt7LE1D3zniBuGbr6kfddxfRvZ3ZMVLq55VIBG5UQEr4ytJMPDTmvwzFJLw4VJay95Giq/V/67woGWwDDg2GMivqk3n1oBI0BQpfS7kiu0h0v6wtJiV9yU6WYWmv4e3FAuAgon6vM3kDeAIqsqPw/FIqNZH5AhkCrwoMyh596UyW6IvE3waSJvHPo8kVqzMhkISq7m/pdi4GC6oV0FciPIuQyxFosUwItHtDVw9uDgNfG6tpfJ6j7cAP/sLeJUUBOKxvV0LAsnYoHDiuklnGdcYlhqVqYIQNqdGOGlr6kUI07c7+L9D2U5Nl0b+9skaam5Wnp5TG66/uaB1E313bL+NHOOU3WYHhjH5KjCyAFiHPp8renPmaUBl8ZCJzPDe+6/Mpab+Y1NyiDZoRX6ikGdmJ9CgrOfBzVRS8TkFey7juvbzOa9YCoJpZpmaRZS4IhVHRX30mZSxVyNQaGRykhm1Xegfp1mWE3mV5UOqWzadFOIiYvhVWJveUiDa4jSSCnMRh1aqJi6t3uEjXmk+1yb/T3KYpRgQb0JdQGLQUgS/NlQ51ktDquHxwAax+y0ujXC+irDi8WMJqrb5LiYNlwzNFpGuxqx79j5hc0l4sC1NaoqmOUJ8D1/7GUuj5cy/9XfFFitqPtQjnBIMt90OOew5wSuKhbN8YaMDJjAxJN+Kg1mjGNlFlfDEkOmc4k3QNhuALK+ZyYYJRHZONXcnGSKui9g55oyXNmWnVWYjMApa0YTK+DVvUz1UEBVfM5kn5RMEbygFxwoht02B0QSHMVx4Gtkt2UnJs5sEhRpNlXVMslZn5PpzWkOXm4ijiVC4GpRxbD73zmddxz+2+Q8fzfj+q4dMBt61m5zTmbk8JycsilWadmt42FvtQHXpgrGT2JaAaBxBA07nqfTb0H9NTBIzQdIN0RMivXVFAoscXPjpObGzkqVFpQwEaXZUmNebcKkENKwxb26ijoc40AQyR7escMHqXBSk5pWub0urrYGXMgyUcC0JlXCOJIpGVchSgNmKYcTx4FZid0a1Hcp4yJXg6oqRcLpHPRdWqj89TV8P4KsgWtr2LOCIPIB2ERLTp3WHA7y4w2vxmS76QJcZAh9h70R98BkYYzGIvqWs2xXAaFOAzS14TtnYvsq+/PG73IwJYQYw0NDixlsyA4rZYhlHVLV0cuhoTc4Y2DqkEnh1tk2ACbZ6IQKP2sNIc9OKoyDXEtbvpbSdZWz+9APiCEg+ABjmYlo6kokz0IKlib7IAETZpmW/j0hCt5ndu8Mgtw7rxqElexNDW5f0b7ruL6NrT06xe7iCaxl+KOpHBYiKqsacTpUkXzPWZTcnMmM4ynFinPPHQUZwDmZuiwXqK2yk9J5U0X2NXdaAEfJZc9LWQBXPcYJ7OgDO7abIMLSysj3kJUJU/k5h6wYe0Ix5pH1MRRah/IYAC0dqF0iiuPCKGKk3RpxGBI7Uf8leKrCXv1MFwFTOZjKgSxnbGbxwda3os6Pet5xM/uOnV8cEANjwmk+m8K90kIAXWiLyQERyItm3cpU4yqxExkmHtLCTwVpoJye7Wyd1CVI20MK1QpjsyhyyoSMAXP9Z6YOSxwU+Z7JRoeINIcyfjJ5ggKQm9nJYJKJksHeBIUSNizYe3Hoc9AjjyH1LK2n7p0qO0MYCkLGbMPktEgzsRImtNPXTM6ffvasfhYDC1mjeO1kEvgr2ncd17e5HRrp3j/+Rv6lLPqW40HkworGIroapFmTvAYQKMfW7DwavhiDOilZSCYzi3ScgWROZQk3NWQWFGegeHzIYR6IcMtFY5IpFAvtXpHcZggMwOFCe/n+Ct2VkXgMSRUkMSGNFQXtCFRLGIV5NtdZkHRzxbDOOMuaSuKG7n+htK3bUNPCnN5B+J//H4T2FP70IRDCwfP+Ihve+Z/8QCn4NFMNEdPnDxJjUg2OYaqENsvvCdradaneV0JZcDVH89YiuorhMVdzTVHH5szhaFcBroXOwKpcCwSSempu/yCwwyJE0eykXGuzbrrox+Lc6iypYTeB6iaTjcV0KGp6j+I6TZkcDlyDB+wQE1CdFgqHNZ95x7WpAFOzc4k+gOxzPk+c08RpzTLfyT7rPlmujc2JIOk9gWnwNSsTvBDSf45913F9B5p79o0cLZXSLMWo85xJ8Gh7XZQnjqFqeaGR189nWB0qPlMMPOKDjCiCFH97Xq1gHvWXqvS6bwdes9eY/JwbJjnS5MDGfWX8+T5o5heRFqn0mbZYpIoiO9VHsKsTYOgRrp/BbNe5FqHyOM0C1C5zPWG+r0pbNg7RNfCuRhT17udmjM+xaGtE10yPT1lLLKDDSevCTAaJho5Vw8mk9gemPx8+lkwi4Cw/KmwKhZnM9Hc57tQsuD4o/2I1IFYNZxNkUNsaQBaqVshQ2YMBLMxtyEwJHCmQk5rS2CVonNs7GOoMswwpLfLPY7KW2z+vDlRmW8IUVAeh18lElcJYntQDILVlCARIkgUdhGVT4MAOqKRKJEZhqcu4t5/KgOWMcQorFlmbKwLYomQQX0Nw93298ktf+hJ+5Vd+BV/+/7d3rcFVVWf7WWvvc04ukEQIyUlQaECLpQRqrWZSp9QZMgHKOFb7wyptsdOBmoZOvdRhcFTUfi2WzvRHO079J51RaeuMlKlTnaKQOGikSmEoUDMmpcZLAhUlCYEk5+z9fj/WZa+9zz5JuCU9sJ6ZwDn7utZ79l7Pei/rfd99F8XFxfjqV7+KX/7yl1iwYIE+5uabb0Z7e3vovB/+8Id4+umn9feenh60tLRg9+7dmDZtGtasWYPNmzfDvUCZgy3Ghpi5SXOgfOBYIim0JCZTFxmLM5XJQ5OXAguvnwqFS0ePVfB9Mb7L/WbBPxYdJNU9OMRIE0dE403aTK0AyAlvjocoKcG4TGfD/FjNQ1eAHS/81xzk9QvsgLkZ8EQSmDaqBxcm1zrp30HJWMnCNLea/QME0Y6ekabeWeN1MgeUKAa5SU3AjIQ/T0VdRv2TQCBPz1Chk8kSIDsszufDIvpMBncwNwHKQhC0z0FZQAcFqAHZH5bizR1sVUABD2UsCcy1ZrtFba7I4nzAyCBDOou6quelgky09SE7KkjL9M8ZwTV6oHbdsAYa9xzEwbQcRAgLQGi9Veg0qe0wNyn+93lAWr6ns1Kod50hEZjyjICf0O/vibFAa8JK8/Vzc4KaxSIVcZoyMSfBmrDMceI8cVZM0d7ejtbWVtxwww3IZrN46KGH0NzcjCNHjqC0NLCzr127Fk888YT+XlISRLp5nodVq1YhnU7jzTffRG9vL773ve8hkUjgF7/4xXl3yGJ8MMcBfGNmJP1Suh6WKuiojjcd0HpjZD2MGtRMk5E6N2aWra9lpp8xcgCKtinTpGHyM4lSXksRkZnpw0T0OIo5JwqxlscwL5ptBvKbE82BKLpd/k+qb34W5KbAksGg5JumQfP+Zr+lD1HdX2fdyIrITp0QNwajJz4OfU/OrA3ukSgKXVdVB9CmLyPBsiqvA0CWNZHdg9BiXLdI+IMcVw6WrhhkEZnxQ/4+irjMyEvDZ0KAILtsRmhY0kyqM53kIQ0OiCUfMXkmfEDWEWMyQ7vaIaMFvVEdMavW4Zn+IZZIhjWSGHIKmQQjgz+AwOcVeWbMgAfTBMe4Y/hBpVnV54HW5HNAkpRoKxfvtyZArq+TE9WnCEtmkBHa3Rj+LSCkDZsaGnE3xwITCn/P52+eIM6KuF555ZXQ961bt6Kqqgr79u3D0qVL9faSkhKk0+nYa/ztb3/DkSNH8Oqrr6K6uhpf+tKX8LOf/QwbNmzAY489hmQyGXuexYWDVzIj0G4Yh6/Cjs1CjkAw6zSzXshzRHoYPxiE4eSGNCuzWb5nlHztWwhKqESgrmloW8R4bCi0PiWS0UDfLuZ4j5CTlgcQhOaZ5CX7kqN5qYFdBm3kZC7QPg3jHm5KbuNBVJyhWSmfHvNGw75H43pafl4GbHgAfHQIdHoAdHoQ3pkhuPnC48kX5sDotQBkmQvfl9UBgHD1YGMwynqEYVnORgT6yEOYGvx9JB2OZGqaKGjoJMEyCTC3KPARuQnhG1GDZzYD4pKkVHi358PLZIOIOBUsIINSWFEpeHEpfLcIfrIYkKZORa4eBXE3cfkETejacSrTi7HMQ/uzuCOyS5iDMw+bSSe6NklrVCE/VmR9oqlpqXRkys/HHZAfRPlBmvq0C0Atmo9qYDkmTiOCVV/LD9ZxQfxGZK7B45FJr5SH70ht3cyonzcgyp+I6SMvzss219/fDwCYMWNGaPtzzz2HZ599Ful0GrfccgseeeQRrXV1dHSgvr4e1dXBgsnly5ejpaUFhw8fxnXXXZdzn5GREYyMBLPIgYGB82n2ZQ+/uDzQcLQJy5HRf9JZ7WVlqhxDszBNGYjU1SKxxsUMnQ+90OreUU2HfID7ALnCLGOYSQCEAyyMgV0Ri+6T/J8jMAHFkZeqEBtFtPJwdFvQHkle+oUMv5i6SWMlEDVJmLsiUbFP8Hxfp89ypJ+GAaES6QDAPCELFVTDRwZBZ07B7z8B77P/Cj9RHiQrrwx9H/3kQ20idLir89EF03rZP7XWjpATcq4TMktbHEFkauEOh5sqBcsmBKkok5uXBecueHYEvvSPUDYjcjC6GcCVa9QcDzyZ0P4SRVSsqAR8+hVgpWXwEyWgZLHoQ6IoVMTRB3LyQ+bkDjSCf0K590zCdqSvhsngIzU4K5MbizFhm7IzycgwZUJFX/qR4yGtIo4jFgUr6Ag++R74puaaCGmD6tizWuIbE/Ye3DswzZr3CQjKWO7CeGwF89B6v6hczhLnTFy+7+Pee+/FTTfdhEWLFuntd911F+bOnYva2locPHgQGzZsQGdnJ1588UUAQF9fX4i0AOjvfX19sffavHkzHn/88XNtqkUElCwJBkMzDU7Id8TDWXgZB1geQjGgfWGGOc9M5aOIyyM1aHBwLn1orpjl61lo1FQWvZfxWbnBfISphJD/5c2XKy7W5GiYc2LNP9H/I4NVfPCIa2TglmuOZBg3OMQsl3EdjaZTZ8lrqskH8z34KkvH8BBodBinnn0cfHoF3Jk14NMrwEvL4J8Zglu/LNwOM50QE+mYdBSnbqcgWEVYOlGzsUZKdxXyseGy6rPjwk3KoqQZNTkalaVBfDDXE9GDrhx4VTSh8tco0kokwUqmS+IqBSstAyVKREBGoiREWqo2HIBzq3IuB16o5Q7GdkFiQZBBjhkszmqgzYKiz+r5ZkxMPNSaRJ2YFhB+XfJ1xWBGpEPKtWYFhM1/ESIDECKjWERD2E3kOTeH1EziNkkrxjRqfh/TNzwOzpm4WltbcejQIezZsye0fd26dfpzfX09ampqsGzZMnR3d2P+/PnndK+NGzfi/vvv198HBgZw1VVXnVvDLZBlrmHWQexDpdLxCN9VMnfg1iY8Ny+JKXiRzAbmdjUL5kxU1dUJb80FrjyXDBjjmmBIlnThpqZmaF3R1yOfJgZEtkd8ejmIM1tq84dajyb6pkrL5ASfQJrYoBZbqxpqkbZwN1j0Kq+jZrDEGHhRKRLXrdSHD237P9DwaWT/+xFY/wlBYrNmw+t8A/7pATjTr4BfVA6UXCECMgDAz4pIT3N2LQdoRQghf1aMHEX3Sfj2WVBby2EOkqlpIqoyOwqfcfnbc3Dfg29oqFxFxOlMDSLKkpdMB1LFIGkaJLdI+GaTJfCYi6yXm8PQzOAeRWjiIrVf5kIQFhPBMczPGgmGw1rFeNYFfWn5e5GOWJRane8CXKwHIy9rrAOMZCUxr6meSScR8pOFgjkA7ZMyAyQABESsJ65BxYOcwBMtqPhgCmEmjPiweISs1Puco3XS5Gtc69evx0svvYTXX38dV1555ZjHNjQ0AAC6urowf/58pNNp/P3vfw8dc+zYMQDI6xdLpVJIpVLn0lSLc4F6yB0XTA1eQJCE1jgmbwJRA8o8R0RaM4qDcNzLoAXHDafgMWdwxr04gzZvqXtos6FM+wMEJqIoacWZCMVNwz4q/dk0ISGIqDOJWS165XIhLJG4kQgACL/YLuNgshGh0hE5bfCkVuFqv5f6PShZChQhXIadO+DTKsTpjgMaHoL32XEdZk9OQmQ/GToBdiaIICW3CEG4cjIwDxpaDIMkVwIcCptsiYAMqWNJV+5W9eNcHqTv8smXhSABzh1QUYk2cyrS4qlinY/Rd1PaF0tuSgeTmFqWqmoQx1W+aqDRVgWPMXDmgjkuHLcomGAo055JXvI38SITFh/xFY8FeTI4senLBGHAyUInkDbMaSr6FkC8hqKegQgxmFYPqAmoYdbU5kqV7SUhCDQ2r2AULHjno5UCQodF3AuhlGATuc8YOCviIiL8+Mc/xvbt29HW1oa6urpxzzlw4AAAoKamBgDQ2NiIn//85zh+/DiqqqoAADt37kRZWRkWLlx4ls23OBcoH1HeB0c9hOoFiE648p2XZ7u6n/It8TwmAj1osxgyNEyP492DS/IyW8M1scT7vvT3ib606nC12Sg1YmYAiYKAIOxa3s8xr2tqfurinIN8BuYWySCWUZCatTOuCcZE6R0b9eeR17eFzUF+MCCLUG+Es367RSEtQWiMTGeiAACuB2gG7geDtUeKuKXWy0RvGAgeF7WqwF2xyFlqNr6UCfOzYMXTtJyJu/CdZBDlqjQsGd2Y8QEy6sMBwTNkmohNs6GJsIEsMCnq0HhwtRQvWP+lYm9ITVoCwlRrxaJtUZXFFYE7msSC9wyeA1BC2l/98B8QEI2JfKY4079q+uBM06Y+VhKm0vomQirGBFJbW2J8WtoffB4ElQ9nRVytra14/vnnsWPHDkyfPl37pMrLy1FcXIzu7m48//zz+MY3voGZM2fi4MGDuO+++7B06VIsXrwYANDc3IyFCxfiu9/9LrZs2YK+vj48/PDDaG1ttVrVJEGXZDCLOpqzM6gX1Pxsns+D0g5A/IOuXjhp0tNOf7Exp1RE6FQWGfZjfGVAQFbq/OjM34SvospkJMFYvi/zvvp6+Q5BuF/aDciAaG0p1QdNzjGz+rg2ERM5DH01U84mwhGebiq/DwOAO2s2/GFRq800J3r/fjsw5ajBzHMEMarPUg6uNg2L+whtg4kgDRaUePdZ8CMJV5cgFkZiMbDnA4wzUXKEcTBPBm6YQRGmGVom1lWalQ4KyfjifjHajY+AvMyq07pCeMzEiYx/A19nuBaYE/lxzNp24f8pFCzEJGk5vqrcTLICAwfnSUGUUgsL+Z4B8dsY2nXIzKwnO0HDcqJaVf+UeVOt09M75LMnEwdrUyX58aRjTiqjf8Y11Z9+zy4weTGKxhKPdXAeY/EzzzyDu+++Gx988AG+853v4NChQxgaGsJVV12F2267DQ8//DDKysr08e+//z5aWlrQ1taG0tJSrFmzBk8++eSEFyAPDAygvLwc/f39oetaTAzDQ4NBlgozMEOahQj5K7+ql5nDyHMIBLZyhVgizE8A+jR9n2BbNPQ9avaLOzYaVaavb0SXRc/P0bryEFfOWzCG5qrOy0e6yucTjWTTPhQZgu5ROCJTyx0BYZeWFMc2I7P/ZdkIH4nrVwXbj/1b+HWMCUx0Zh7bP2YEKTAOOEkdbTjq+dq0mFXdYaIgZNKRz4uxADgffBgkJf/P+kGxyZA2DfEsKk2XsdxnQsiJtKY0EYgJhir1I82jhual+ugRYVRWb/YJyBhBTbrUPYckq+Bzwvisrh83IdS/T0STCgVA5CMPEyxIRKz6YL5PjvJ7S/LSyyFi3utQtpSY+5q+vJySR5KYBwYGUPmFG85pHD8r4vpfgSWu88fI4Gdh4pIDpPLZqJljVOMKBnuzjLpBXgoGaXl5nrBcTU6eGjnOHPxz2xE+3tTMzJlv6D6IJ664e5v3D+03Z8CGOScU2BByXIeDONS9QzXNlINdpd2SJT0EIVDIf+Mov5hsHGPAFUZJm4lg5LNj0Als8y1BiPNJmM54ZSYygjii2dkB6FpuaohTxCR+J7nN0J5Ms5u6XkZeTIXfq+uGBn6DvHR/CIb/KZf48kG1Ne66qm0ZWcl5JEvI+D4yksAUHAYkOEfCYUg4PERWiRChBUE5IQKLmAwBBD63PJND3X7jfTLlqt7HuAmAei+cPPdWkxpzggsE0bw55xvZ96NJBgYGBzHr6sXnNI4XZI4lxbV2Pde5Y2RQyC6ftpU1BpWoaUXNPGPJS8IkkLOZ4Y6FOOLSM0QTUcc5hU03AEKzzLj7R/sS2hYx8UnvfDDIwxj8VeSV1EqUfFXWewe+yIXnZ8FU7jcIZzoli+GBY9QnjGaDkHmTrJUpxuEM/f0DcDlDWWm85qUwMHQGSbV4iBlrlyIQPjDP6FfMcE8+wFS5GifIRQeEyEb0SsAH4BvaVNYgOfNZC4hLaFsZQ+NwmCACBkGIauCP8y9KTy08XzwLBBh+r/GfOwA5EZSEoKr4qOfjTMZHRrYxK5mBywunHA7OGIochqTLtdYliIvBNUiMx2lfUfIwJxDcHfNdyzuxo3iLhOqrk2dip5dEIPgNgWAyoc5xOQvGBUVgkedp8HRWnnv2ulNBEtfg4CAA2JB4CwsLiwLH4OAgysvLz+qcgjQV+r6Pzs5OLFy4EB988IE1F8ZArXWz8omHlc/YsPIZH1ZGY2M8+RARBgcHUVtbC36WuQsLUuPinGP27NkAgLKyMvvQjAErn7Fh5TM2rHzGh5XR2BhLPmeraSmcHc1ZWFhYWFhMMSxxWVhYWFgUFAqWuFKpFDZt2mQXLeeBlc/YsPIZG1Y+48PKaGxcTPkUZHCGhYWFhcXli4LVuCwsLCwsLk9Y4rKwsLCwKChY4rKwsLCwKChY4rKwsLCwKCgUJHE99dRT+NznPoeioiI0NDTkFKa8XPDYY4+BySJ16u/aa6/V+4eHh9Ha2oqZM2di2rRp+Na3vqWLdl6qeP3113HLLbegtrYWjDH8+c9/Du0nIjz66KOoqalBcXExmpqa8N5774WO+fTTT7F69WqUlZWhoqICP/jBD3Dq1KlJ7MXFw3jyufvuu3OeqRUrVoSOuVTls3nzZtxwww2YPn06qqqq8M1vfhOdnZ2hYybyTvX09GDVqlUoKSlBVVUVHnzwQWSzWVwKmIiMbr755pxn6J577gkdc74yKjji+uMf/4j7778fmzZtwj/+8Q8sWbIEy5cvx/Hjx6e6aVOCL37xi+jt7dV/e/bs0fvuu+8+/OUvf8ELL7yA9vZ2fPzxx7j99tunsLUXH0NDQ1iyZAmeeuqp2P1btmzBb37zGzz99NPYu3cvSktLsXz5cgwPD+tjVq9ejcOHD2Pnzp260ve6desmqwsXFePJBwBWrFgReqa2bdsW2n+pyqe9vR2tra146623sHPnTmQyGTQ3N2NoaEgfM9475XkeVq1ahdHRUbz55pv4/e9/j61bt+LRRx+dii5dcExERgCwdu3a0DO0ZcsWve+CyIgKDDfeeCO1trbq757nUW1tLW3evHkKWzU12LRpEy1ZsiR238mTJymRSNALL7ygt/3rX/8iANTR0TFJLZxaAKDt27fr777vUzqdpl/96ld628mTJymVStG2bduIiOjIkSMEgN5++219zMsvv0yMMfroo48mre2Tgah8iIjWrFlDt956a95zLif5HD9+nABQe3s7EU3snfrrX/9KnHPq6+vTx/zud7+jsrIyGhkZmdwOTAKiMiIi+vrXv04/+clP8p5zIWRUUBrX6Ogo9u3bh6amJr2Nc46mpiZ0dHRMYcumDu+99x5qa2sxb948rF69Gj09PQCAffv2IZPJhGR17bXXYs6cOZetrI4ePYq+vr6QTMrLy9HQ0KBl0tHRgYqKCnzlK1/RxzQ1NYFzjr179056m6cCbW1tqKqqwoIFC9DS0oITJ07ofZeTfPr7+wEAM2bMADCxd6qjowP19fWorq7WxyxfvhwDAwM4fPjwJLZ+chCVkcJzzz2HyspKLFq0CBs3bsTp06f1vgsho4JKsvvJJ5/A87xQhwGguroa77777hS1aurQ0NCArVu3YsGCBejt7cXjjz+Or33tazh06BD6+vqQTCZRUVEROqe6uhp9fX1T0+Aphup33POj9vX19aGqqiq033VdzJgx47KQ24oVK3D77bejrq4O3d3deOihh7By5Up0dHTAcZzLRj6+7+Pee+/FTTfdhEWLFgHAhN6pvr6+2OdL7buUECcjALjrrrswd+5c1NbW4uDBg9iwYQM6Ozvx4osvArgwMioo4rIIY+XKlfrz4sWL0dDQgLlz5+JPf/oTiovHLiZoYRGHb3/72/pzfX09Fi9ejPnz56OtrQ3Lli2bwpZNLlpbW3Ho0KGQz9gijHwyMv2d9fX1qKmpwbJly9Dd3Y358+dfkHsXlKmwsrISjuPkRPEcO3YM6XR6ilr1v4OKigp8/vOfR1dXF9LpNEZHR3Hy5MnQMZezrFS/x3p+0ul0TqBPNpvFp59+elnKbd68eaisrERXVxeAy0M+69evx0svvYTdu3fjyiuv1Nsn8k6l0+nY50vtu1SQT0ZxaGhoAIDQM3S+Mioo4komk7j++uvx2muv6W2+7+O1115DY2PjFLbsfwOnTp1Cd3c3ampqcP311yORSIRk1dnZiZ6enstWVnV1dUin0yGZDAwMYO/evVomjY2NOHnyJPbt26eP2bVrF3zf1y/g5YQPP/wQJ06cQE1NDYBLWz5EhPXr12P79u3YtWsX6urqQvsn8k41Njbin//8Z4jcd+7cibKyMixcuHByOnIRMZ6M4nDgwAEACD1D5y2jcwwmmTL84Q9/oFQqRVu3bqUjR47QunXrqKKiIhShcrnggQceoLa2Njp69Ci98cYb1NTURJWVlXT8+HEiIrrnnntozpw5tGvXLnrnnXeosbGRGhsbp7jVFxeDg4O0f/9+2r9/PwGgX//617R//356//33iYjoySefpIqKCtqxYwcdPHiQbr31Vqqrq6MzZ87oa6xYsYKuu+462rt3L+3Zs4euueYauvPOO6eqSxcUY8lncHCQfvrTn1JHRwcdPXqUXn31Vfryl79M11xzDQ0PD+trXKryaWlpofLycmpra6Pe3l79d/r0aX3MeO9UNpulRYsWUXNzMx04cIBeeeUVmjVrFm3cuHEqunTBMZ6Murq66IknnqB33nmHjh49Sjt27KB58+bR0qVL9TUuhIwKjriIiH7729/SnDlzKJlM0o033khvvfXWVDdpSnDHHXdQTU0NJZNJmj17Nt1xxx3U1dWl9585c4Z+9KMf0RVXXEElJSV02223UW9v7xS2+OJj9+7dBCDnb82aNUQkQuIfeeQRqq6uplQqRcuWLaPOzs7QNU6cOEF33nknTZs2jcrKyuj73/8+DQ4OTkFvLjzGks/p06epubmZZs2aRYlEgubOnUtr167NmRReqvKJkwsAeuaZZ/QxE3mn/vOf/9DKlSupuLiYKisr6YEHHqBMJjPJvbk4GE9GPT09tHTpUpoxYwalUim6+uqr6cEHH6T+/v7Qdc5XRrasiYWFhYVFQaGgfFwWFhYWFhaWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgqWuCwsLCwsCgr/DyFEGMryrtmOAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.imshow(ugos[30],cmap='RdBu_r')\n", + "plt.clim(-1,1)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "fbb2ad80-31bd-4d96-bf32-c0766477edb7", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[]" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAGdCAYAAADuR1K7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAACpSklEQVR4nO29eZwcdZ3//6q+p+c+MjMZMrkxByQcQUIAFZcsCbAKu6wrbhTMIvxAsoJxAVEOBRUUl0UUZUVQ+ArisguIVyAGAyIhkUCAQBISch8zk8zVM93Td/3+6Pp86lPVVd3V13R1z/v5eMwjk56a7uqeOl6f9/v1fr8lWZZlEARBEARBVAmOcu8AQRAEQRBEMSFxQxAEQRBEVUHihiAIgiCIqoLEDUEQBEEQVQWJG4IgCIIgqgoSNwRBEARBVBUkbgiCIAiCqCpI3BAEQRAEUVW4yr0D5SCZTOLw4cOor6+HJEnl3h2CIAiCICwgyzJGRkbQ1dUFh8M8PjMhxc3hw4fR3d1d7t0gCIIgCCIPDhw4gClTppj+fEKKm/r6egCpD6ehoaHMe0MQBEEQhBUCgQC6u7v5fdyMCSluWCqqoaGBxA1BEARBVBjZLCVkKCYIgiAIoqogcUMQBEEQRFVB4oYgCIIgiKqCxA1BEARBEFUFiRuCIAiCIKoKEjcEQRAEQVQVJG4IgiAIgqgqSNwQBEEQBFFVkLghCIIgCKKqIHFDEARBEERVQeKGIAiCIIiqgsQNQRAEQRBVBYkbIm9e292P3719uNy7QRAEQRAaJuRUcKJwZFnGpT99DQAwpdmPk7ubyrtDBEEQBKFAkRsiL4bHYvz7Vz84VsY9IQiCIAgtJG6IvDg8FObf7zsWKuOeEARBEIQWEjdEXvQExvj3gXAsw5YEQRAEMb6QuCHyojcQ4d+TuCEIgiDsBIkbIi9GBEETGIuXcU8IgiAIQguJGyIvRsOqoKHIDUEQBGEnSNwQeTESUcWNWDlFEARBEOWGxA2RF0FB3ATGYpBluYx7QxAEQRAqJRU3L7/8Mj7xiU+gq6sLkiTh2Wefzfo769evx6mnngqv14vZs2fjF7/4Rdo2DzzwAKZPnw6fz4fFixdj06ZNxd95IiOjgrhJykAwmijj3hAEQRCESknFTTAYxEknnYQHHnjA0vZ79uzBhRdeiI9//OPYsmULrr/+enzhC1/A888/z7f59a9/jdWrV+P222/HG2+8gZNOOgnLli1DX19fqd4GYcBIWGsiDkXIVEwQBEHYA0kep3yCJEl45plncPHFF5tuc9NNN+H3v/89tm7dyh+79NJLMTQ0hDVr1gAAFi9ejA9/+MP40Y9+BABIJpPo7u7Gv//7v+OrX/2qpX0JBAJobGzE8PAwGhoa8n9TE5h/+vFf8cb+If7/P//HOZjRVgsAeO9wAL94dQ9uWDYXk+q9ZdpDgiAIotqwev+2ledmw4YNWLp0qeaxZcuWYcOGDQCAaDSKzZs3a7ZxOBxYunQp38aISCSCQCCg+SIKY1QXqWEeHFmWccH9f8H/vH4QP16/qxy7RhBlYTQSRyRO6VmCsAO2Ejc9PT3o6OjQPNbR0YFAIICxsTEcO3YMiUTCcJuenh7T573rrrvQ2NjIv7q7u0uy/xOJkM5jw8TN0VG1ud/hoTEQxETgzzv6cModL+D8+/6CeCJZ7t0hiAmPrcRNqbj55psxPDzMvw4cOFDuXap4IvHUBdzlkACoYufYSJRvo4/uEES1suadHsQSMnYfC+K9IxQZJohy4yr3Doh0dnait7dX81hvby8aGhpQU1MDp9MJp9NpuE1nZ6fp83q9Xni95P0oJpFYSsw013pwdCSCYDQlZPqDauTmwABFboiJwfbeEf795n2DWDilqXw7QxCEvSI3S5Yswbp16zSPrV27FkuWLAEAeDweLFq0SLNNMpnEunXr+DbE+BBWIjettR4AQCiSEjv9o2rkZjAYTf9FgqgykkkZ7/eo4ubgIIl6gig3JRU3o6Oj2LJlC7Zs2QIgVeq9ZcsW7N+/H0AqXXTZZZfx7a+++mrs3r0bN954I7Zv344f//jH+J//+R98+ctf5tusXr0aDz30EB599FFs27YN11xzDYLBIFauXFnKt0IIyLKMqCJuWhRxw1JQxwTPzWg0jmSSmvsR1c3+gRDGYqoHrTcQLuPeEAQBlDgt9frrr+PjH/84///q1asBAJdffjl+8Ytf4MiRI1zoAMCMGTPw+9//Hl/+8pfxgx/8AFOmTMHPfvYzLFu2jG/z6U9/GkePHsVtt92Gnp4enHzyyVizZk2ayZgoHcxvA6TSUgAQUtJSgyE1WiPLQDAaR73PPb47SBDjRDSexF92HtU81heImGxNEMR4UVJxc84552Rsy2/Ufficc87Bm2++mfF5V61ahVWrVhW6e0SeRGKquGFpKdahWN/cbzRC4oaoXlY98QZeeC/lATy+vQ47+0bRO0KRG4IoN7by3BCVAevl4ZCAxpqUcGGl4KM6caMXOwRRLbzfO8KFDQBcduZ0AMCxEYrcEES5IXFD5AxLS3ldTtR6U8G/oGIo1pd/k7ghqpV3Dw/z7797yQKcf2KqYjMYTSBBXjOCKCskboicCSvmSa/bgVqPE4DquWEl4QzqdUNUKzt7RwEAnz1jKj794amo96lZfjruCaK8kLghcoZFbnxC5IZdzEcj2s7FI+HY+O4cQYwTu48GAQCzJtUBSEUyPa7UJZWOe4IoLyRuiJxhnhuv2wG/JyVuWIfiUeWiXuNORXTGotZm7YzT/FaCKBqHh1P9bI5rquGPNSjRG0rHEkR5IXFD5Ew4xjw3DtR6UyKGGYqZ94b1vwnHs8/Z+dKv3sSSu17EWweGSrC3BFEajgynqqK6BHFTp4tkEgRRHkjcEDnDIjc+t2AoZp4b5aLeVpcSN5FY5sjNwcEQnnvrMHoCYXzr9++VapcJoqhE40nesLKz0ccfZ20PKC1FEOWFxA2RMxExcsPSUpEEZFnmIodFbiJZIjev7urn37+5f4iLI4KwM30jYcgy4HE6eK8nANxUTGkpgigvJG6InAkzz43LCb9SLRWMxjEWS4BVwLbUpgaVZhM324WZPPGkjD3HgiXYY4IoLmyGWmudB5Ik8ceZuAmQuCGIskLihsgZFrnxuR3cYxCOJREYS13QJQlo9ruVbTOnpXb0BjT/39tP4oawPwPKUNgWIWoDqGkpfTNLgiDGFxI3RM6ITfz8iqEYAI4qnVlrPS74lGqpbJGbAwOpipOpLX4AwF6K3BAVQL+puGFpKfLcEEQ5IXFD5Axv4udywON0wOVIheX7lJk6tV4nvEq/D2Y+NkKWZfQoE5QXTWsGAP5/grAzA8GUkG/VixsveW4Iwg6QuCFyhkdu3E5IksR9N2LkxutWxE3MPHIzFIohqjzXvMn1ANRwP0HYGTVy49U8TtVSBGEPSNwQOcOb+CnRGea76WPixqumpcIZIjcsStNS60FHQ6qclsQNUQkMjDJxo514T9VSBGEPSNxUKC+824Mf/GlnWTr78iZ+SnTGz8VNSqz4PUJaKkPkhkV62uu9aFVWwCRuiEpgIFvkhloaEERZcWXfhLAbsizjqv+3GQCweGYLzpjZOq6vz5v4uVLRGTY8sy+QEit1Xhe8ruyG4sFQ6gbR7PdwY+ZAkML5hP3JbigmcUMQ5YQiNxUIi3gAwGAZIh0RXeSm1iAtZcVQPCjcINhNYjAUpTlTE5A/vdeLW5/dmvF4sRNMmLfWacUNG0cSipK4IYhyQpGbCuT93lH+faAMxkWxFBwAH57JDcVep+q5yZCWGgil9r251s1XvImkjLFYgj8nMTH4wmOvAwCOa67B1R+bVea9yY7qudGKmxp36ri1OjCWIIjSQJGbCkQslxajOOMFKwX38ciNkpZipeAea5GbISEt5fc4wRq90tDBicVwSBXo7xwaLuOeWCMaT3JPTYtfJ26UFC2JG4IoLyRuKhBx/lI5xI1Z5CaWSKWT/F6hFDyD54aZMpv9qRb24pwqYuKwo1cdwdFXAX2OxGhpQ422Woq1RRjL0pmbIIjSQuKmAgkK+fyBUDnSUvpScKfm53Vep2oozpCWGh5L7XujcoNgESCK3EwsWEM8ABgsw/GcKwHluK33uuB0SJqfsXRsPCnzHk4EQYw/JG4qEDFyEy7DCjHMZ0tpIzcMv8fFU1aZ0lJsuCAXN8rz0GTwicWQIGjKYZDPFXbc6qM2AFDjVoU+RW8IonyQuKlAgkLaphziRk1LaT03DLEUPJOheIStgBUzMau6ClKlyYSCRfCAVBVSMmnvarmA7rgV8bjUcSTkuyGI8kHipgIR0zaZ0j6lQp+WYqKEoS8FNyvtZt6FhrS0FN0UJhJDgrhJylqxY0f0x60eFr2hyA1BlA8SNxWI2EMj03iDUhHRpaVqdWmpWo/quUnKKf+BEfrwPhvjQGmpiYVezPTbPDUVGFOOW5+JuPFQrxuCKDckbiqQ0bKnpZTIDRu/4NGmpWqFaqnU9unRpXAswQ2XaWkpEjcTimGdidjuhnI1cmPci4mJm3KcmwRBpCBxU4FoDcVlSEvFtKXgdbq0VGudh6elAOOLPLtBSBJQp0R+eDifvAoTCn0jypDdxY0SaTKN3PDjmKqlCKJckLipQMSbf1mqpeLaJn7Nui6tbXVeSJIEj8u81w0L7dd7XXAoBkwrk8SJ6kN/DAdtLm6zem4oLUUQZYfETQUillePt7hJJGXerI9FbiY3+vjPPS4HFynqZPD0fRwJs4oT9QbBUlnliEYR5SOkEzN2T0uyoZgNBtVSABmKCcIOkLipQMRISHicG4WJjcmYeGkUVrBiGS8TOYaRG4NeITVu8ipMRJgIaPKnjgW7twLgaSmTyI2fRjAQRNkhcVOBiGIhGk8iUYS+IM+9dRi/2XIo63ai8GDiRpLULq0+oYkZ+7mh54b7FtTVr5Vhm0T1wURAW50XgP3Hb3BhbuK58VHkhigBdu//ZDdI3FQg+jRPpi7AVjgyPIYv/epNXPfkFhwcDGV+bUVYuRwSXE718Pn8mdNR73XhoctO44+p4iZdrLDQvpiW8rHtyXMzoWAioK0u5d2qnMiNcVrKzz03dBwTxWHdtl6c9M0X8OBLH5R7VyoGEjcViD7NU2ik48/bjwrf92V5bW0DP8Y3PnkCNt/691gyq5U/xjw50YRRWir9BsEjN3RTmFCEdJEbu3tu+LGbpVqK0qtEsbji0dcxEonj7j9uL/euVAzjIm4eeOABTJ8+HT6fD4sXL8amTZtMtz3nnHMgSVLa14UXXsi3+fznP5/28+XLl4/HWyk7siyniZtCB/TtGwjy73f2jWbcVj9XSsSjEzzs/0b7Z1ROS9VSE4+EMGCSixubi1tW6ddoWi2lTLe3+fsgKgO9SO4bCZdpTyoL47hqEfn1r3+N1atX48EHH8TixYtx3333YdmyZdixYwfa29vTtn/66acRjaodSvv7+3HSSSfhU5/6lGa75cuX4+c//zn/v9frLd2bsBFGUZCYwWO5IDZROzCQLS1lHLkxQhzBoCcQTp/PQ56biYfoS2FpKTv3uYnGk3yfs/a5ocgNUQT29gc1/z8wEEJ7vc9ka4JR8sjNvffeiyuvvBIrV67E/Pnz8eCDD8Lv9+ORRx4x3L6lpQWdnZ38a+3atfD7/Wnixuv1arZrbm4u9VuxBUbVSoWKG3Eq88HBsYzb8qGZBpEbPWwbo/lXwwarX5/b3IBMVCfMTCxJar8kO0duRoSGg3UmpeBULUUUk3392gXn4SGK3FihpOImGo1i8+bNWLp0qfqCDgeWLl2KDRs2WHqOhx9+GJdeeilqa2s1j69fvx7t7e2YM2cOrrnmGvT395s+RyQSQSAQ0HxVKmJKio0rYH1n8mVoTI2UHRwcMx10CajCw0rkxqMYjo2iTWyekFbckFdhosEEQI3byTtd27n5HauUqve64HRIhtv4SNwQRWRQN2utZ5jEjRVKKm6OHTuGRCKBjo4OzeMdHR3o6enJ+vubNm3C1q1b8YUvfEHz+PLly/HYY49h3bp1+O53v4uXXnoJ559/PhIJ44vJXXfdhcbGRv7V3d2d/5sqM0zceFwOuJ2pi2sxIzdjsQQGMgwu5KMXLEVuzJv4ZRY3lJaaKDB/VY3byQew2nkqfLYeN4CalgqRSCeKwKBu9trh4czRdSJFyT03hfDwww9jwYIFOP300zWPX3rppfz7BQsWYOHChZg1axbWr1+Pc889N+15br75ZqxevZr/PxAIVKzAiQiRE3eGyEgu6KcyHxwcQ2udsYeJp6WseG4y7F/AUNyYe3SI6oRF6XxuJ/xeRRTY2HNj5BXTw9JSVPVHFIPBkHaxOaQTO4QxJY3ctLW1wel0ore3V/N4b28vOjs7M/5uMBjEk08+iSuuuCLr68ycORNtbW3YtWuX4c+9Xi8aGho0X5WKKi6cPO0TK7BaigkNVq2SyXcj3oyyoUZuMqSl/Kq44WJtnLsuE+VDFMu1FVBlxCqlrEVu7CvSiMqBpaW6W2oAqNdrIjMlFTcejweLFi3CunXr+GPJZBLr1q3DkiVLMv7uU089hUgkgs9+9rNZX+fgwYPo7+/H5MmTC95nuyPeDJgYiBfQuVKWZT7C4fj2OgDAgQyN/HKJ3Jh5bmRZNkxLcbFWoIeIqBy4h8vtRK0SubFzE79sPW4AdXAmeW6IYsDSUtNbU77TQJjEjRVKXi21evVqPPTQQ3j00Uexbds2XHPNNQgGg1i5ciUA4LLLLsPNN9+c9nsPP/wwLr74YrS2tmoeHx0dxQ033IDXXnsNe/fuxbp163DRRRdh9uzZWLZsWanfTtnRpKVcKc9NIWmpWELm4xuO70iJm0xdinMqBTeZLRWMJvhrasSNqzhpNqJy4B4ulwN+JXJj5yZ+2boTA2ITPzqOicIZHmORG7/yfxI3Vii55+bTn/40jh49ittuuw09PT04+eSTsWbNGm4y3r9/PxwO7Y1yx44deOWVV/DCCy+kPZ/T6cTbb7+NRx99FENDQ+jq6sJ5552HO++8c0L0umFRDY/LAZej8LSU2DBvdjsTN5nSUuZN/PSYTQVnJ6fbKfEbQer/qe0TyZTgMqtGIaqHsCCWxeq/aDyZ1hTSDliJ3KjjF+wr0ojKgRnsj2tiaSk6rqwwLobiVatWYdWqVYY/W79+fdpjc+bMMS1HrqmpwfPPP1/M3asoWGWUx+UoShqHpQUkCZjZll3c5BK5MUtLsaaBjTVuzdBN8WYWSyThdGQXUERlExHEMhMFQCp643F5yrVbpljx3NDgTKKYMJE8uTHVuI8iN9aw39KIyAgTCm6nmpYqpBSc3Vxq3E50KidPpj4KoqE5G2aG4mGTclpW2g5QamqioPeQMUFsV2GgRm4sVEvFkjTJmSgYlqad3JiK3IzFElR0YQESNxVGjIsbiadxChE3Y0L1ExM3o5E4Rk18D2q1lPXITUQfuTEwEwOAW0hPFloBRlQG+uo7dlzZtWLKUp8bIQJFc9KIQmHX4o4G1XZBZvXskLipMGJi5KaIaSmfy4E6r4t3ie0NGEdvcovcGI9fYDeIJt0NwuGQhMaEtOKdCOir75ip2K4Xb9ahOJPnxiecG3YVaURlkEjK3OfYWOOGS/EhUpuB7JC4qTBiccVQ7CxOh2K9QZitDnpNUlNqh+ICPDcmkRuAet1MNNRScCZu7O1XsVIt5XBIPAJlV5FGVAZiW4Rar4vaDOQAiZsKI2oYuSlOWgoAOhoU341J5IaF2X2WSsGNq6XYLKsmf7phtFhdl4nKgEVuWLSDHYd2rTSyUi0F0GRwojiElEopp0OC1+VQG0SSuMkKiZsKg0U03EUav6D30HQq4qY3EDHcPqfZUi7jPjd9ynNPqk8v3fcUadI5URmYRm5sevEOGEyzN8JfAd2WCfvD/Da1HickSRLM6nRcZYPETYVhZCiOF8NzwyI3jUzcmHlucigFdxmnmI6OKuLGYH6Vh9JSEwp95KbGxmmpaDzJ9ytb5Ka2AuZkEfaHRTBZDygfRW4sQ+KmwuB9bpwOeIriudGKGxa5MSsHj+TTxE9XMcIjNw3p4qZYk86JyoCLZSVyY+ew+4jQ9r4uQyk4oEZugjZ8H0TlwCKY7LywuyfNTpC4qTCiSpTG7XTAVYS0VFRXrcINxSNFjNzo9q9vJEPkhkYwTCjU8Qu6i7cNRQGrlKr3urJ2z+aRG5t6h4jKgM39YzYAMhRbh8RNhWFYCh7PPy3F0gLsuSbVpyI3fWaem1xKwfn4BVWohGMJ9AdTz806bopQtdTEgoll5vmqYaXgNlyZWulxw2CRG7N+UQRhBb0nssZt3/PDbpC4qTBi3FAsFSUtJc6qAoB2xeR7dCRiOAIjlyZ+XoMozMHBEGQZqPO60FJrXi1FfW4mBmGTyI0d01KsUqo+S0oKSBlAAbXahSDyQe1Dpo3c2PH8sBskbioMJmS8QuQmniw8LcXEDatgiiaShjNMcovcpDfx29efmjg+tcWvmSvFoGqpiUVa5IaVUNswnTPK0lJWxI2XeW7s9z6IyoGPx1FEjd9N1VJWIXFTYYieGzevRso/yhFNpE4SVqXkczt5mSvzxohwcZND5EY0FO8fUMWNEVQtNbHQR27sXC3Fy3K91sUNrbCJQginpW3Jy2UVEjcVBvfcuIrTxE8fuQFUU7GR70YfJs0Ee86kDMSVfdx7LAgAmN5Wa/g7rFqKDMUTA71B3c5pqWAO4oa9jyB5bogCMEtLjUXp+pgNEjcVhtZQXETPjVM9FNqZqdigYiq3yI0qgNjv7VXSUtNbTSI3lJaaUET01SBu+1aDsLLuOo8Vzw1FbojCYSIm7fyg2VJZIXFTYah9boozFTxiELlhpmJ9WiqeSCKRTImhXErBATVCtK8/FbmZ1moWuaG01ESCdyh2acPudkxLjYRziNx4KXJDFI4+LWXnVgl2g8RNhcH8NcWaCm6UlmLN9fRdisOC4LDSxM/pkPgU20g8iVgiiYODYwCAGSZpKU8RBBtROfAOxbxJmX0jHkyo1HmzH/u1HjIUE4Wjb7JKHYqtQ+Kmwih2WioqdDxmqGkpbeRGHIApbp8JcQTD4aExxJMyfG4Hjw5l2p6oftIiN3ZOS+XlubHf+yAqB2a49+mbXNowsmk3SNxUGKKhuBhRjpgwiJPRVpfqP9M/qhU37ITyuBxwZOnQymA3rXA8wf02U1v8pr+vDgOlPjfVjizLaR4uO6elWLVUttELQKqPE0BVLURhRNKa+NlX/NsNEjcVhui5cRVBCESFvjmMNmUsQv9oVLMtO6HY6sEK4snYp6S5JjfWmG5PhuKJQzSRBOsT6dPNzrFj2J2lmOoseW6UtBRFbogCYJ4bJvrtLP7tRvazlLAVUWFcAptvEysghWPkuWHi5pgucsNuOLUWqkUYYkfN/mBKLLXWpXcmZpCheOIQEf7G6Wkp+0U8RiPWj/9a6kdCFAF9WooiN9ahyE2FITbx85SgQzGgio/BUIz3pwHUlWtNDpEbP58VFMcxxcPTZjAwk1GMkRJEZcC6r0qS6uESPQVG4z/KSU6eGy9NBScKh4kYL6+WotlSViFxU2FoDMWuIlRLCc/HaPZ7wCwxA0E1NZVXWkpoOsUjNwYzpRiUlpo4iGZiNoqDHS9JWRvZsQNqtZT1yE1UqRIkiHxQS8FZWip1fbRj2tZukLipMLjnxiUVJYVjFLlxOiQ+1PKY4LsJFeC5CUXjPM3VmiFyw96T3W5sRPHRl4ED6vEC2C/0Psr73GQ//v1C6oqGZxL5wtNSulJwcaQNYQyJmwojJnhuWA+ZYpSCs7JyRmutYioOqr4b5h/w5+C5EdMMzKDcZsFzQ1PBqx99GTgAuIR0q51C77Is52Qo9rjUVg2j5Lsh8oRXSynnCB9GHE/aLm1rN0jcVBgaz00RUjhxgz43ANBWzyI3orjROvetoDUUW/DcsPdEkZuqx2zCfI0NK6bGYgkozbktlYIDwmRw6lJM5Im+iR/z3sgyLQCzQeKmwlDTUmqH4ngBBzk7QVw6ccMjNwZpqdqcDMXKjSoS58+VqVqKTwUnn0LVo+/hwWCpqbCNIjesx41D0qbOMsGqqkZJ3BB5wrrCM8EvRjkpNZUZEjcVRkyItBRjgjartHLpmuoxAXJ0tLC0FLsR9ATCiCtL35YMhmK3i6qlJgpmkRs79roJCmXgzPycjTqK3BAFop8KLkbYy+1LfHF7L77//A4+b9BuUJ+bCkOslmLX2ILEjZDmEpncmBrBcHhInS/FLvC5VUulDrEDA6mZUvU+V9rNTMTjVKtMiOomEjeJ3NiwR0wuZeCMWhqeSRSALMtCWip1jkiSBK/LgUg8WVZxc2w0gn/7xesAgFOnNeHv5naUbV/MoMhNBSHLMk8juYWp4LKMvNUzE0sunaG4u9kPADgwEOKPBcZiAIDGGrfl52cprP3K80zK4LcBUJRoFFEZsEqQNM+NjdNSViqlGEwIjVK1FJEH0USS+7y8QiqUj7Qp4/nx+t4B/v32npGy7UcmSNxUEKKBzO1yaARJvmkclipyO7SHQndLStwcHFTFzVAe4qZB2fbQUCpyk8lvA6gzrihyU/2wyI3oIwDsaSjOpccNg9JSRCEw8Q9oo5tM6ERi5btG9gZUu8IOEjdEoYgCxuN0aARJPM/ITZwbio0jN8dGozw9MJyHuNFvy4zKZniKYJImKgN9Dw+GHT03LHKTi99MjdyQuCFyhxnuxQ7egLoYKKehuG9EtSv0DIczbFk+xkXcPPDAA5g+fTp8Ph8WL16MTZs2mW77i1/8ApIkab58Pp9mG1mWcdttt2Hy5MmoqanB0qVLsXPnzlK/jbIjihu3Uxu5iecZuYkljdNSjX436pWS14ODqagLFzf+HCI3Pp24yRa5YX1uChgpQVQGppEbG6aluKGYIjfEOCHOlRJN7Kq4Kd81sk+I3AyGohm2LB8lFze//vWvsXr1atx+++144403cNJJJ2HZsmXo6+sz/Z2GhgYcOXKEf+3bt0/z8+9973u4//778eCDD2Ljxo2ora3FsmXLEA7bU0EWC+ZDcUipLsJihVM+PQ8SSZlPZdanpQA1esNSU0OhfNJS2ptBpu7EgOq5oWqp6od7btz6PjepY8ZOkZsQb+CXi+eGDMVE/oRNDPdiI79y0TeiipuBYKxs+5GJkoube++9F1deeSVWrlyJ+fPn48EHH4Tf78cjjzxi+juSJKGzs5N/dXSoTmxZlnHffffhlltuwUUXXYSFCxfisccew+HDh/Hss8+W+u2UlZiuskmSVIGTz/BMUUA4nenlrd0tNQCA/f0hyLKcl6FYH7nJ1J0YECI3cUpLVTtmkRs7p6Vyq5YiQzGRPyxyqe+rxBr5RcoY2WRRfAAYCkVt2S25pOImGo1i8+bNWLp0qfqCDgeWLl2KDRs2mP7e6Ogopk2bhu7ublx00UV49913+c/27NmDnp4ezXM2NjZi8eLFGZ+zGuBzoIT8K0sn5eNRESusjCI3M9rqAAAfHA1iJBLnkaNMfWr0NOiEUGeDz2RLZT/4+AWK3FQ7Y1Fjz40901JkKCbGFzNPmh3SUqKPLJ6UMWLDY7yk4ubYsWNIJBKayAsAdHR0oKenx/B35syZg0ceeQS/+c1v8Mtf/hLJZBJnnnkmDh48CAD893J5zkgkgkAgoPmqRHiPG2Gly0RJPmJAFER6zw0AzOlMiZsdPSPoVUxjDT5XTqbKBp8LYn/AyY01GbentNTEwSzsbsc+N6N5eG5Yh+Kgjd4HUTnw2Wtp4qb8aSk2RJYxMGo/343tqqWWLFmCyy67DCeffDI+9rGP4emnn8akSZPw3//933k/51133YXGxkb+1d3dXcQ9Hj+ifGimqhZ45CaPainRtKvvUAwAczsbAABvHRzCJqWvQWdj5siLHpfTgfZ69Xc6GrN5bmhw5kRBPzeHYce0VJBXS+XT54bEDZE7YybjSexQLaU/pu14jJdU3LS1tcHpdKK3t1fzeG9vLzo7Oy09h9vtximnnIJdu3YBAP+9XJ7z5ptvxvDwMP86cOBArm/FFojdiRmuAtI4vAzcIRm2lJ/bWY8TuhoQiSfx9We2AgA6sqSVjAgLJ2FbllJwSktNHCK8GqQSqqUoLUWML/rRCwx3mYcLJ5MyFzMNPvsK+JKKG4/Hg0WLFmHdunX8sWQyiXXr1mHJkiWWniORSOCdd97B5MmTAQAzZsxAZ2en5jkDgQA2btxo+pxerxcNDQ2ar0qERTNEz43bkb/nxqw7MUOSJDzwr6diUr0qSI5rypxWMuLs2W0AgOmtfjgMIkQibiESZUeTGlE8zCI3dmzil5+hmFVL2ed9EJUDF/+6yI2nzNFtMc3KbAb6NJUdKPlsqdWrV+Pyyy/HaaedhtNPPx333XcfgsEgVq5cCQC47LLLcNxxx+Guu+4CANxxxx0444wzMHv2bAwNDeGee+7Bvn378IUvfAFA6oZ7/fXX41vf+haOP/54zJgxA7feeiu6urpw8cUXl/rtlJVMkZt8qqXMuhOLTG+rxWcXT8N//el9AMCCKY05v87XL5yHqS1+rDxrRtZtxenksYQMj8vakEKi8lA9N/q0lP1KwYPR/CM3dlzVEvbH7Pwo94gadjy7HFKqb1mvPX1lJRc3n/70p3H06FHcdttt6Onpwcknn4w1a9ZwQ/D+/fvhEG6ug4ODuPLKK9HT04Pm5mYsWrQIr776KubPn8+3ufHGGxEMBnHVVVdhaGgIZ599NtasWZPW7K/aYAezRzQUswM9j9LpeJbIDePS07u5uFk8oyXn15ncWIMbl8+1tK1HI26SmvdKVBdhk5WpHdNSoXwMxUJaSpZly9PECQIwLwVn18Rype55itbn4gJ+ZCJGbgBg1apVWLVqleHP1q9fr/n/f/3Xf+G//uu/Mj6fJEm44447cMcddxRrFyuCmIGh2F1A5CbGRy9kFhAdDT784UsfQe9IGLPb63N+nVxwa7ouU1qqmjGrBrF3Wip3Q3E8KSMST6atwAkiE2ZNLsvtS+SVgx6XrX1l4yJuiOKgb+IHFNbnhgkidxYfDADM72rAfJTeq+QU9oUmg1c3ZitTO14w2b7U5jJbSqisCkbiJG6InAibVEux6Ha5hguPKYuOGo8TdRPVUEwUl5hBWspVQJ8bJpaMuhOXC0mSBMMciZtqxqxJGRvZEQjHbGEqTyZlBKO5p6VcTge/MZGpmMiVMRPDfbnbZYiLEjv7ykjcVBBRA0Oxu4A+N8xzk8lQXA7cBUSjiMohYtLEj43siCVkLoDKSUjw/uRiKBa3t+PFn7A32Tw35Ypss3Rxjdup8ZXZDXvd1YiMqNVSQhO/AiI3bPxCNkPxeMM8QJSWqm7Eqccifo+TpycD4fIP5WMXboeULsSywS/+NqwmIewNS//oG0eq8/fKlJaKqWkpO4yCMIPETQWhGoqL47mJMXFju8gNpaUmAmZ9biRJ4s3BAmPlFzdij5tcK56YR4ciN0SumKelylsKPiZElJjZOWKDCKsee93ViIwYNvErpM+NQSTIDngoLVX1xBJJnko1ioawgavDNhA3+XQnZtjRHE1UBmOKYLBbKXg4mh65CZdxFIQZJG4qCCPPDZsJlY+5zGop+HjjLnNOmSg9Yg8boyoi5ruxR1rKOD1gBbVLMYkbIjfGosbzzNy8Wqo4iz9ZlrFuWy9e2XnM0vYhQdz4KHJDFAM+ONNl0Ocmn9lSSrTHaGhmOVEFm/1OGKI4iEZhr0GjxkYlchMYK78oKChyo4g01htk99HRvM5VYuLB01I6cVPsatLn3+3FFY++js89shEHBkKW96vG7bTFEE8zSNxUEMbjFwqplkrvm2MHyHNT/fAGfi6HoY9FLAcvN8wMnEsZOKNe8A49sXE//u4/X8J/v7y7qPtHVCdjUeNqKXeR01Kv7x0AAMgy8H7vSNbtxSouH+8mbr9rtb3uakRGeJ8bTVoq/54H2QZnlguWUybPTfUSMZmbw+BpKRt4bvIZmsloFLxDX3vmHQDAPc/vKN7OEVWLWbWUh4/cKY6gePdwgH+/t99C5MbAc0ORG6IgjDoUs8GS+aWl7FktxdJS5LmpXszmSjGqxVDcpLyPY6ORou4TUf2MmfS5KXZke7+QitrXH7S8X16XgyI3RHEwNhQrB3ohTfxsFrmhtFT1Y1YGzlBLwcvvueGzdHKYK8VgkZsdPdpwvx1EG2FvsnUojhYhsi3LMvpGwvz/Q6Hsx6UYdaU+N0RRMDIUq31uChi/YDND8XikpX68fheuffwNSwY6oviYNfBjsMiNLTw3RUhL7ewb1Tw+FIoWvmNE1ZJMqt25a/RpqSJ6bgZDMY2lwUpVH7sPeYTITSRGaSmiAPhBZdDnppAOxXYzFJc6LTUaieN7a3bg9+8cwaOv7i3JaxCZMRsKyGi0kbgZUfaB+YBygb0PPYPCCjmeSOLKx17HP/74rzgyPJbfThJVhdg3xrRDcRGuj72BsOb/IxbEDYvSeF0OitwQxYEdzF7N4MwC+tzYtBS81GkpMa/cH6QVdDlgF29vFkOxlTB5qRktxHPj9xg+LkZu3j0cwNr3evHm/iGs2dqT304SVQUz7QLp0c1iTgU/OqL1glmJ3KjiRq2WiiaSfLFsF0jcVBBiOJDhKqhDsb2b+JUqLbVPqAjQn9zE+BA26b7KaKvzArCHEXcknLrgs7LuXDiuqcbwcVG0HRxUozX7KU1KQGvadegWn8yWUJy0VEpkM8FkZUxI1CByIz5uF+x1VyMywtI0orhxO/IfVWBbQ3GJm/gdHCRxU26ypaXa6lMRj/7RKJJlXhEGuLjJIy3ldxuKIjFyc2hIPR7JA0YA5mXgQHEjN4NK5HpKc0qEW4vcqMJLFDdhm/luSNxUEBHuuVEPeBdP4VTf4MxSeW5Ev8NRG0QGJiLcpOsxjoa01qYiN/GkjKEyVxaNKp6bfNJSANDZ4OPfnzSlEYD2GDwkRG7EKA4xcTErAwfEtH3hop+dW8cp4oZFKTPBIzduB1xOB7c12M13Y6+7GpERo7SUm3corp7BmaVOS4kpgYFg+SMDE5FsFUgelwNNfnv0iCkkLQUAJ3Q18O8/PL0FABCKqjeRAeF4HKQqKgJq5EY/egFQr//FWPyxa+GUZj+AlEDJVnmrX2TbtZFffmcrURZ4KbggRlwFpKXUwZn2EjfFnp2iZ3hMewMJxRJ5r8qJ/FB7x5h/7u31XgyFYugNhPGhjvrx2rU0ChU33/zkiXA6HPj43EnY2ZsqCQ8JhtERoSLMDgZqovyELERuEkkZiaRcUCsPlh5laSkACEYTaKwxj3tEhMgNkOp3E4wmbNfIjyI3FYSR58ZVgBBQB2fa6zAodSm4/gZCE5vHH7Xrr3ljPGbGHe9UzWMb9uIXf90DIBXdZCmCfDw3QMp385//chL+YWEX91CMacSNevxF4knbeReI8SecwXMjLm4LXQCy9OikOi/YiLdsERh9SxKK3BAFY1QK7i7K4Ex7RW7GMy0FpCoEOkrySoQZoxaGUXa3pELl41lB1DcSxm2/eRcAcFJ3E2a01fKfFSO6x25WYuRGPz9reCxm2rmZmBiYdScGtIvbWCJZ0LHCezjVuOFxOhCJJ7MalbmhWIjcAPYbwWCvJTuRkaiBobiQnjBqh2J7HQal7nOjb31PkZvxx0rX3+7m8Rc37x5Shwj+aVsvj6p4XQ7NTSVfahQDdShmHLkBaDQDoYpfw7SUQxQ3hS0Ag0p6uM7rstSQL5mU+Wt6lf47HptGbux1VyMykrHPTT6l4EmbGopLXArOut6y9JeV3g5EcbEyjHJOZ8pns2X/0HjsEgDgvSPChORjIcFvk19KSo+allKPObZ6ZmkB8t0QLDVplJZyOCQ1dV9ghRJvUOlzwaOIlUzPKVoF2H2IIjdEwRhWSzkKqJbipeA2EzesGiBe/LSULMv8xtqhlOiy1QsxflgxFC+a1gynQ8KhoTFL04qLgdj3aG9/UBi9UJwMfo0uLZVIyggq37NIFUVuCObJ0s+VYhQruj0itDmwErmJCAKGbW9Xzw2JmwoiktFQnH8TP9t1KC6g63I2xmIJMHtSe0OqlwqlpcYfK4biWq8Li2ekSqd//86Rcdkvsbnevv6QZmVbDPxuraFYjBp2NvqUx0jcTHQyeW4ANdpeSNGFLMv8+Kv3qeImU+Qmkkjtl0NSF8UUuSEKQpZl41Lwgvrc2NRQ7CxdWoqdzJKktvintNT4o4qbzOmeT57UBQB4bsvhku8ToO05MxqJ4/BwarBgvmXgevzMc6OIG9bvxuN0oEWZQ2WlkRpR3YQyVEsB4CmkQq6R4kKvzuuy5J1hkRuPywFJyaNS5IYoCDEy4xUNxYq5LJZHCsf2HYpLkJZiKahajwu1BmW5xPgwyg3FmSs9lp/YCbdTwvaeEezsHSn5fg3pmujtUl6zPosIs4qalkq9f3Y81nicXECRuCHCGfrcAICHLQALuEaKCz2/x8mH2EYyRGDEoZkMK79XDux1VyNMiRkYuQA1chMroEOx3Zr4lTItFRRuqiycarcVR7UTSyT5RTJbeXWT34OPHj8JAPDcW6WP3ug7BG87khI3jTVFNhQrNy9xhhAzLQfClJaa6GRNS/Euxflfu0bDqqlfkiR4LYy9MfJ9+pTvwza7jpK4qRDEPKjh+IW8PDcsLWWvw2A80lK1Fg10diKWSOL/bdiLQ0OVPX9I9DhlMhQzPnlyKjX1u7dL77sZCqaExVSlx87Ww8MA1EGehcLETSwhI5ZIIqhEcPxC5GaUIjcTnpBFQ3Eh0e1RXcUi61uTMS0lDM1ksN8jzw2RF0xNOx2Spt02SyllmwdiRIx3KLZn5KaQkKsZYgmyaoSz14rDjNufexe3/uZdfOlXb5Z7VwqCXVQ9LoclYf0RJXKz51iwpH8rWZZ5c8Hj2+sAqDcZNsizUMSbVSiaECI3LkpLEZxMpeBAcaqlxMgNYG3auJqWUs9b1netVK078oXETYWgb3nNUNNS1RS5UU7cEqSlRoVp1JUWuXli434AwOZ9g2Xek8IQG4dZodnvhk9ZHfYoBt9SEI4lISun0SxF3DDa6osjbjxOB1+cjEUTmshNg5KWGgnHMBSK4oV3e5Cgoa4TkrEMTfwANXpfiKAY0VUCqpEbK2kpdb88FqqsyoG97mqEKRGDXCcg+FPy6lBsT8+Nq4RpqaDQX8VbYZGbasGqmZghSRK6GlNzpg4Ply4lFxQa64ljFwCgrbY4aSlJkng5eCga11TF1AmRm3ue34Gr/t9m3PLsO0V5XaKyYMeFmeeGGYoLERTFi9yUtulqvoyLuHnggQcwffp0+Hw+LF68GJs2bTLd9qGHHsJHPvIRNDc3o7m5GUuXLk3b/vOf/zwkSdJ8LV++vNRvo6yoZeC6yE0BU8FZE79CpsqWAs+4pKWcFRW5kWXtZ5GPmLULQSF6ZpUuZYjmkaHSRW5CEVVosNdjtCsNH4uB2MgvFGGRGzUtNRqJ43ElSverTQeK9rpE5aCmpYzPEbcF8282mJhnxx2rgLISudGIm4kaufn1r3+N1atX4/bbb8cbb7yBk046CcuWLUNfX5/h9uvXr8dnPvMZ/PnPf8aGDRvQ3d2N8847D4cOHdJst3z5chw5coR//epXvyr1WykrUYOhmUBhKRwW8p6QaSkhcmO3EkYjxnTRpf5g1GRL+2Nl9IKe1rpU5ERfzVRMgsIwz5m6yM20Vn/RXkesmArF0qulyHNDjGUpBXcX0LyVMaKP3FjpUKwYirVFLYULrVJQ8rvavffeiyuvvBIrV67E/Pnz8eCDD8Lv9+ORRx4x3P7xxx/HF7/4RZx88smYO3cufvaznyGZTGLdunWa7bxeLzo7O/lXc3Nzqd9KWYkZdCcGxLRUHn1uWFrKZpGbUlZLaQzFNi1hNELfkr+Sb4DsveRSXs1Wl4ESvm81ouTElGZt5KaYC4AaoZGfGC1i71FfDZck382Egx2LZtVSxfDcjOoaaVppxmfU52ZCRm6i0Sg2b96MpUuXqi/ocGDp0qXYsGGDpecIhUKIxWJoaWnRPL5+/Xq0t7djzpw5uOaaa9Df32/6HJFIBIFAQPNVaWQzFMeTclrqIhu2NxSXIi0VrczIjV7chKITTdyoZttSERQqlyRJwsVKCfqX/m52UV9HHJ7JDcVel2kX5GAF/62J3JFldd6YWXTTU9RqqdTxyAzFGQdnUloqxbFjx5BIJNDR0aF5vKOjAz09PZae46abbkJXV5dGIC1fvhyPPfYY1q1bh+9+97t46aWXcP755yNh0tDorrvuQmNjI//q7u7O/02VCaPmSYDaoRhQPTRWYU3y7GYoLtZQOCNYxEPsc1MJkRv9pOhQBXdVZuKmIQdx0zAOKZuQzuh89yULselr52L1eXOK+jp+jeeGdcxWq6X00GDXiUU4luSWAbOZZu5iGIp11VKspNtKWsprkJaym6G4OANTSsTdd9+NJ598EuvXr4fPpxr6Lr30Uv79ggULsHDhQsyaNQvr16/Hueeem/Y8N998M1avXs3/HwgEKk7gmFVLicIknpBhkqI1hOVr7Td+YTzSUkKH4gqI3ASqKHLDOvDmIm7UHjDjE7kBUpUqZtUqhVDjVsWNWgqeEtsuh5S2SEkN0iyeoZmwN5qxCFk8N4X4XNLSUqwUPNP4BeVnbFtAFToTynPT1tYGp9OJ3t5ezeO9vb3o7OzM+Lvf//73cffdd+OFF17AwoULM247c+ZMtLW1YdeuXYY/93q9aGho0HxVGuzAMUtLAbkbcFnFjf0GZxZuljNDUwpu04FvRugjNZW8mh8eS11U8/HclDJyM6YIDTMTZ7FgkZtwTG3iV+t1QpIkQ8FXyf4qInfEXlwOEz8kG79Q0GypsD5yY2H8gsF9qJQ2gkIoqbjxeDxYtGiRxgzMzMFLliwx/b3vfe97uPPOO7FmzRqcdtppWV/n4MGD6O/vx+TJk4uy33aEl4JnSkvlKAb44Ey7em5KPH5B7VBsrxWHEfpqqUoe9pmP56ZhHOYu8QoVExNnsRBLwcXIDQBMbkyP0NDU+omFlWrConhulNep149fyND3yyiDwPYlMpEiNwCwevVqPPTQQ3j00Uexbds2XHPNNQgGg1i5ciUA4LLLLsPNN9/Mt//ud7+LW2+9FY888gimT5+Onp4e9PT0YHR0FAAwOjqKG264Aa+99hr27t2LdevW4aKLLsLs2bOxbNmyUr+dsmFmKHY4JDBxn2vvEx65sWm1VD4m6Wzw3g4VNlsqLXJTyWkp5rkx8RMYMR5zl5jILUUqSqTGLVRLCZEbAJjeWpu2Pc2amliovkDz47Co1VK5RG4qqFqq5J6bT3/60zh69Chuu+029PT04OSTT8aaNWu4yXj//v1wCNGHn/zkJ4hGo/jnf/5nzfPcfvvt+MY3vgGn04m3334bjz76KIaGhtDV1YXzzjsPd955J7ze4rRItyMxkz43QCryEo0nc8p5JpMyWGrfbpEbcX9iCRkeV/HElxiK5U2rKqBDsb6LciUbigN5RG6Y4NBHsIpJmE9iLu35IKalghFt5Ebsp9Pkd2MoFKsI8U0UDx65MTGYA+oCsJBjQ9/nxkr1qFHkZkIbiletWoVVq1YZ/mz9+vWa/+/duzfjc9XU1OD5558v0p5VDmbVUkAq8hJFbmkp0Z9j1w7FQOqEMXrP+TIihHwdUuEXiPFCn4aqZEMxT0v5cxc3pUwhjnFxM15pKXX8AuvWfN4Jnfh/r+3DpDovuppq8MquY7ZbEROlZVQoejCjGIJCn/7KJXLjqYBScFtXSxEqZoZigEU6Ery02wqiELKfoVhbAVYsovEkPwHrvW7+eUUTSSSTsql5zw5Ui6FYluW8PDc1QrSjVDDhVGpDsaZaikVulBvZyd1NePPWv4fTIeHqX24GYD8vA1FaRix4bgoVN/FEkov5Or3nJkOBhdF9qBj+n1Jgr3wEYYpZKTgglk5bFwJiuandSsHFSFIxywuDgjGz1qst87V79IZdiFg3abuVXVplLJbgx55ZXxcjfII/qlQdeyPjnJYaEz03wgwhl9MBSZL45OWYzY9NorgEhaIHM7zcc5PfuSAujtjreC1EYKIG4xfsGrmx112NMCXThTefEQyi+dhukRtJkkqyGmDhXp/bAZfTofEv2b0cnEUsWLSjEnrzGMGiNi6HxG/yVhgPITreaanhsRgXen6DFARv1FahQpbID+YLrLcQucn32GBVhz63g4sTKwUWRmkpux6nJG4qhHCGCy/rdZNLnxtxIrgk2UvcAELFVBHTUqqBLiUQXE4Hj4TYvRyceWyYT8VuFxKriCmpXI478bgvVWoq0zlWTJh5+NhoRH3M4DX56txmK2KitIxaiNxwcZPnscEbaQrRU7XAInufmwk/foEoHpnKVFmvm5wMxTYdmslw866XxbuR8TJwoQS5Uhr5jSl/fxa5idp8f81gYyRy8dsAKRHOonmlqpgav1Lw1PP3j6YmnHtdDsOKRSsGT6L60JdoG1FoF/eA0khTbBrpsdBpmPdbcxqIm0Sy6K07CoHETYWQKWTOh2fmcKDbdWgmgzeGKuJqYDScbtSrlEZ+4ag2LWW3VZJVBoKpG3pLrSfn32WGx1JFbvg5VsTqPCNYCmokywq90NU5UZlYauJXYJ8bNXJjsNDLcH4Z9Vtj38sy+EwsO2DPOxuRRqYeHMwQHMvhwLLr0ExGKUKdI5H05liVErlhwz1ZGLlSV/P9BYibUgvR8Dh1KNY3LzTzHllZSRPVx2guHYrzHHkQMBhea2XsjWGHYuF7Ox2rJG4qhLByUPlc5sbDXCI3dh2aySiFuBnVeW4AoXFVEV/nyU378dX/exuHhsaK9py8hF25MVbqan5QETetdbmLG5bOKdUUd3YMlDotVa+rEhMrpUQocjMxycVzk2+bgIByLRQ9N6JJ2Sy9xEvBDZr4AfaaL0V9biqEzIbi3MsC1bSUPSM33NxWxAs7C/caeW6KleoYCcfw1affAZC6ANxx0YlFeV5V3EzctBSLWoZL1J2ZNUo0WkAUk3p95MakWVsxWuwTlYelaqkCzeYsciMei+K9IJGUDaP6RtVSLocESUqlpSKJBIDc/HSlwp7LdiINNWRulJZis5hyiNwk041hdqKUaSkx3Gul5XguvH1wmH//fu9IUZ4TUEUeuxjZvS+PGSwt1ewvIC1VosgNe16jc6yY1LidGiO/WeTGQ5GbCYmVPjcFG4rD5mmp1POaRG4MPDeSJNkyymjPOxuRBo/cGKwq8xECMe56t2nkpgSVIuJcKQYzjxbrhvnuYVXc7OwdLcpzAurnwISZnXLbuTAQTJU/55OWYsd+qT033hJHbiRJ0qyYzTw+5LmZmIxYqJbyFmooHjNPSwHmbUWMSsEB9Xqdb1PBUkDipkJgF3SvYT+M3FM4MbtXS5XA6DsaSa1WjCI3xVpxHBpUfTb9wWjaTKh8iehapdtphZQLrPy5pTb3Ibc+obNvsZFledxKwQGt76bJpCxeXbTY54ZBlBZZli1VS1kx/2ZCjdwYp6XM0l1mMw7t2OvGnnc2Ig2eljK48PryKJGNGRjD7ISVVuC5wlqOixeNYpecHx4Oa/7fH4yYbJkbPHJT4YZi5rlpzcdzU+Qom4j49y91tRSgvam01RsLvUK70BKVx1gsAVb0akXc5Hvd4tVSgsiWJEmwOGRJS+nuG/pZV3/ZeRQ//+sevH1wKK/9Kwb2vLMRaYxlKAXPJ3LDLph2j9wU8ybOVivayE323g65cFhXIcVu5oWiDvysXM+NLMsYDNmzFFxcGJS6zw0AtNWpgmZSnbG4Uc8Be7cpIIoHS51LknmLAKDwwZm8WkoXNWQmYqPrbjIpc9GjH+CsRtpTv/fbtw7jm799D6/sOpbX/hUDe97ZCA2yLPNQvN/AfKjeoHNJS9nbc6M/WYxY+14v7vzde5ZLrnlKRPB7WJmnkgu9gVSkhk0W6C+CuIknkupqroIjNyOROA+jF1QtVYImfuJgUqNuwcWmu9nPvzeL3HjyGIhLVDa8x43HlXE8SaGz99TIjfZ+kkk0iRHE9MiN1uDMjlm9CBpPqBS8AojEk1wx1xqUjXKjZQ4rvJjNIzfZREc8kcSXfvUmxmIJ7OsP4meXfzjrcxqlRFjUqxhiQZZlDI+lXmP2pDrs7BvlgqoQxItKfQU38RtQPotajzMvXwvvc1MCccOiQUZp31JwXHMN/77NxFxtxwqUiUwgHEO9N7PoEEkkZTz/bg+8LgfOnddh6XesjF4AitihWBe54UOYDdJSmcSNR3cdtUNmwJ53NkIDO+AB47LRvCI38fIr60xkS0tt7xnhq+2Nuweytv2WZdmwx0oxIzehaIKvWGZOqgWgVgcVgvh3ZSm1RFK2VatzK/DuxHlUSgFiWqoU4kaplBoncTO3s55/v3BKk+E2LIJUaX/nauQvO49i4TdewI/Xf2D5d276v7fxxcffwBWPvo6X3z9q6XesNPADxEiJnPM8p2RS5q+j77nkzpCWEh8zS0uxbdRqXBI3RAZ43wOPEw6DQZf5RG6Ysrbt+AWnshIwWZm8c0gtuR6JxDX/NyIYTfDnahUqdQoZv3BgIIRP/PAVrPjZa4jGk9xP4nE50NngA6CWXBYC22+HLg9faSt6Li7z6HEDqMKjFJ6bTJ62UvCxD03Cf336JLx8w8dNjaP59K8iSsM9z+/g/1oRE9uOBPC/mw/y/z+xcb+l1zGaf2eEW4ic5BrFDcUSkE1MyxkjN0KPG330ypOWliq/7YHETQUwEs6s5gvz3NjzEOCeG5P3tPdYUPP/9Tv6Mj4fS4nUuJ2aaphCIjd/3HoE7xwaxl939WN7T4BPvG6qcfMBlyz8WwhihYJmjkvFiZtUFCsfvw2gpoxKMRU8UzViKZAkCf94yhRMbfWbbpOtcoUYP8RFxdHR7NHY3799BIBqHN+4p9+SKApGrYkbj4WGe6avoSyWHVL68Z7Rc2NSKSU+FtV7bspYjWvPOxuhIZglD5tP9IGXgttU3PBS8ITxe9qjiJuFUxoBAE+/cSjj8x0ZTpmOOxq05s1COhTv7Q9pvh9WTHpNfjfPZbPHCiEirJhYq3OAtTqvHAaCqc8inx43QGkNxZFx7HFjFRZVpbRU+ekLqIJmMJj9nP6zsti6cdkceFwODIZi2D8QyvJbOURuNPOccrt28dSXgWk5U+djo7lS+v0hzw2RE9mmxOZTImv3Jn7ZolEHlGZ5V310JgBg/0AIQyGteffI8Bhu/81WvPrBMRxUtheNnIAoonIXN/v61ejR3mNBIXLj4eImUARxwy4YXrcTkiRVbFv+QroTA6UtBR8b58iNFZzKUNs4VUuVnWNCtEZ/ndEzGIzivSMBAMA5cydhZlvKf7erL3vH8lGlF1c2z43TIcHpyG8EQ6bxDmyQslE0yGj0AsPj1EduSNwQFsgmbvKJ3LAD1e2yp+cmWwqiL5BqljezrY5HY/boUlXX/WoLHt2wD198/A1s70ldbI5rMhY3+XhuDgrdiA8NjnHPTZPfzZtjsX4ShRDVRdns2A3UCoOK+MtnrhRQ4lJwNjRzHBr4WYU8N/YgmZT5SAQAGMqyYEmloIDj2+vQXu/D8R0p8/hOS+ImfaClGSzKkmtKXTUtpx/rmQZyRjKkpfS/R54bwhLZHPS+PFIrdlDWmchU9htLJHnlTXuDF9NbUyujfUKa6OhIBJv2DgAAhkIxPPSXPQCAKc1aj0M2b08mBoQy78FQVJOWaixmWorPPHJo/q20cvAhQfzlQylLwdXIjX3OB0pL2YPRaByiXSZb5Oa13anrzpmzWgGkRA5gbdZckEdusovsfBv5GXVq58+ZQVBn8tzoZwHaoRrXPmcyYYrawM/4gK9Gzw0z/RpFbliI2OWQ0OL3YJpiyhTFzaY9A/x7p1Bh9vE57Zrnyqe7M5A60TWruVBMuHl7eHv9oqSldLnuSk1LDfHITX7ixlvCqeDjbSi2AhmK7YH+HGbHsRksJXVSdxMAVdzs6hvJ+loj3HOT/RxRG/nlZyg2WiyrIz/M+9wYpqX0peBs8VxGQzE18asAsl14vXlMS7a754anpQyGJDJzX1udFw6HxKMxh4ZUcfOWMtNkxeKp+JfTunHfn97HhzrqsUAxIDPyTUvpV28DoSi/6DXWiGmp4lZLif9W2ggGlrZrrMkzLVXCqeDsOBuPuVJWIc+NPdC3cxjMIm6Yt+ZDSjrq+A4lctM3ClmWMzYBVIdmZj8O801PZ8oEMDESz7FaSi+K7GAoJnFTAag9OIwPeOZFyMlzY4ODLxPsJhMyEDe9it+mXfHaMB+NOIbhrQNDAFKrp5O6m/Dzlacbvg4zLud6gRjQiZuhUJRf9Jr8bjQq0YlwLIlIPMEFaD7ojXyV6rnhkZva/CI37DgvxVTwbOdYOSDPjT3QL1BCUXMf3UAwyvs5zZqUEjXTWmvhdkoIRRM4PBxO8/2JjCiem2wdigF1YZZrJDPT1HF3BpOypVJw8twQuTAWVVrDm6alck+txCrEUGzkr+gbSUVu2utTjfKmKBVQzOCbSMq8qd/JSmjYjHzTUsxvw4x/g0JaqtnvUWbDpLYttJEfE6JMiFWiuJFlmRsxm/KN3DBvWQnSUnasliLPjT3Qp6WMFlwMVu7d0eDl12u304EZSsXUzt7MqSl2rRCndZuRb8fuYCZDcYZUF2vL4c0QubHTbCkSNxVAtgtvPlUkZtNd7YIvQ7UUFzcscqOIm8NDY0gmZXxwdBShaAJ+j5OvnszIN8XDVnPMzJxIytinXNia/G44HBKf4F1oaoqZnXnkxll5aamRSJzfpPM1FI/HVHA7iRsneW5sgTj+BsgcOTygXAO6dYULx7enUlTZysHZtaKxJhdxk2tayrzc3JWpz02mUnAav0Dkg1XPTS43O7unpZh52uhCcnRESUsp05Q7G3xwOiTEEjL6RiLYoqSkFhzXqDETG8E9Nzmuftjqrcnv5vt6VBFdk5SupCw1VWjFVERnKObDPiuoWmpYSUn53I68Uz9G0Twjb0A+2NFz41Y8N7JM0ZtyEtRdgzKlpVjkZmqLVtzMtlgxxad1WxA3+XbsDgqTx/VkmjaesVpKJ26iNjAU2/PORmjI1oNDjNxYHaLGlLVdZ0uxm4zRqoQZillayuV0YHJj6vuDgyG8rZiJT8qSkgLyT0uJ0TR935ZWRdxwU3GB4ka9qDiVf8cvLfXB0dGilF4PCim7fGHHeTwpI55IYn9/CIu+9Sd89f/eLnj/7Oi5cQrnJvluyseYImbYeZcpLXVwMCVupujEjWoqNk9LybLM+2JZS0vl1/dpNJqhiZ8wkFNPxj43NFuKyIdsaSl2g07K1kPYldLnJppIpq3OVc+N2safhYH39ofw1oGU3+Ykk2nLIvoVh1XE8nwxzeKQUrOlAPUCVWjkplyG4mffPIRz//MlfPJHr2RcrVphMGQ93G6GKDzC8SS+9fv3MDwWw5N/O1DQvgHAmCKi7ZSWcglRR4rclA/WF6ZNmYmWKVKSNXKjVEwZEYom+N+ZtZLIRKGeG0NDcabITYZ7Bk+VJ5JIJGWww5U8N0RGsoobofEYU9eHhsbwsXv+jPvX7TT8HTsYvjIhpgdCupO3b0RbLQUAcyenctpv7h/k3YgX6sq+jeBjHnIUCiGexnBpBkG21Hr55HZ1eGaBhmKTUvBoCYy1Ij9/dS8A4P3eUctTjc0YKkLkRjQyjkUTODIc5v8vVHyFbZiWYq3wAfLdlBN2/WUR2UyRmwMDqaKGbt2Yl+mttZCkVB+bY6PGTQCZ38blkCyJbKM0bTIpZ43eW+lzY9R+IGMpuNChWBRG5LkhMsI9Nx7jP5d40Wfb/mrjfuzrD+Hete9r5qIw7O658bqc/CQS0zqJpMwvDiwtBQAndqWEzFObDyKWkNFa6+FVVJkQZ6Ikc7iBiIKzSbhhs/QYgKI18tNXKei7gZaCaDyJd5WKMwD407begp5vSCiTzxdJktTy11hCE7liN5V84X9PW4kbIS1FvW7KBhPObcpMNDNDcTyR5O0o9NPefW4njy7vPmrsu+GVUjXujL1wGF6doTieSOKyRzbh7O/+OWO0eDRDF+SMgzMtzpaKTiRx88ADD2D69Onw+XxYvHgxNm3alHH7p556CnPnzoXP58OCBQvwhz/8QfNzWZZx2223YfLkyaipqcHSpUuxc6dxhKIa4J4bEzUvSVJa1c8OoeSQ9XwRsUNONBtGIwwGglEkkjIkSb3YAMCJx6XEDTsBT+puyukCAeQmFsS0VJcgaE7oauDfF8tzw6qlvGmRm9KJmz3HgppowRv7hgp6vUGhe3MhiOXg/UFVtB8eKpK4sVFayiFMgCfPTfkIRfSRG+Mo4ZHhMBJJGR6nAx3Cwosxa1KqsvKDo9oZeOxanEulFJBuKH7hvV68susYDg2NYb0yldwIa2kp88iNUSk4uybFEknNXKqq9tz8+te/xurVq3H77bfjjTfewEknnYRly5ahr8/4w3/11Vfxmc98BldccQXefPNNXHzxxbj44ouxdetWvs33vvc93H///XjwwQexceNG1NbWYtmyZQiHw4bPWelYufD6hBUtkJpSzdAPlATs0R47G0bihqWkWms9cAmrglmTajUn3Yent1h6DfF3cpkvxS5wNR4nTp+hvpbYV0dNSxUauTFLS5XuhseMj6dMbUKd14VoIon9A+nHkVUGlcZmLXk28GOwcyAYSfBmaUDhviZeLWUjcQOoFVPkuSkfLA3FptmbpaVYGfiU5hqemhaZqbSl+ECJ3CSSMr7w6Os46ZsvYPfRUbVSykIDPyDdULxFWMS+trvf9PcyTgW34LkxbOInjIRRu99LlhaYpaLkd7Z7770XV155JVauXIn58+fjwQcfhN/vxyOPPGK4/Q9+8AMsX74cN9xwA+bNm4c777wTp556Kn70ox8BSEVt7rvvPtxyyy246KKLsHDhQjz22GM4fPgwnn322VK/nbLAwo6ZQuYsAsFu0Mx0C6SvFAB7DDbLBhcHGnGjlFvrVkYup0MjaP5+foel13A5JLDrUCRh3cMiGlDPnNWGeZMb8NEPTcJFJx/Ht2ng+18kz43yt8o0/6VYsEjI1BY/X3FaGfxnxkCBE8EZ7IJ+ZDgM8X6fbZhhNsI2TEsBQq8bSkuVjaCykGEtHiLxpKHYPGBSKcWYpRM3r+w6hj9t60UomsAft/bwRZCVMnAg3VD83uEA/9neYyHD3wHUvj1GkRtPEfrcsO3KbXko6atHo1Fs3rwZS5cuVV/Q4cDSpUuxYcMGw9/ZsGGDZnsAWLZsGd9+z5496Onp0WzT2NiIxYsXmz5nJBJBIBDQfFUS7OD1ZWjhL85ICscSmpWsUcje7tVSgCpujo5G8cN1O/H63gEcDaRXSjFu+Yd5mDmpFjctn8urE7KhSenlELlh5aF+jxM1Hif+eN1H8Ni/na65OTLPTdGqpcYxcnN4KBUh62z0YbbFBmSZUCM3xUlLHdId00OFRm5smJYCaHimHRjTRW4A44opds4c15SekgKAOZ2pa9K7hwOQZRmb96rDfV/b3c97QVkpAwdEQ3HqOrC3X13EMqGlJ5ZIcuuCkbhxWUhLZZstZRc/Z0lnSx07dgyJRAIdHdpVdEdHB7Zv3274Oz09PYbb9/T08J+zx8y20XPXXXfhm9/8Zl7vodzIsmzJ7Ch29GV9YBiZDcX299z8csO+lIdoLfCpRVMAGIubuZ0NePEr5+T8Ol6XU5kBlUtaKvvfpFhpKX0TP9W8V7pqqSPDKfHQ1VjDxyXsLEDcsBRSc4HihkUoj45oj+lsk5ozIZ5jdupzA6i9bhLkuSkbasNODyQp1VQxFI2niQM2866jwVjcnNDVCJdDwtGRCA4NjWHz/kH+szf3D+HUqc0ArJWBA9pZa7Isa6L1R4bDiCWSaQIjKHRbzlQtZRS50V+HRMQKTrssnO27bC8iN998M4aHh/nXgQOF98UYL2IJmYdAM114a4WOvsyXwtDfCFLPa48DMBMshSGao5/afBCAtgy8UPKZDG5lpV/sJn7ecWzi16OUWU9u9PEoWEGRGyVt1Fpo5EZ57/pjvJDoWCSeBKuetVtaipWDU+SmfLDBlDVup2rizTDQt9NE3PjcTsxXCg5e3zuIN/cP8Z+NRuJ4U/HMWI3c8LRUPIHAWFxzPUgkZRwZSvegspSUx+Uw8c6YD2u1ErmJJWR+b/GUeeFc0jtbW1sbnE4nenu1ZaS9vb3o7Ow0/J3Ozs6M27N/c3lOr9eLhoYGzVelIIY/M91I/Uor7WA0gV4lcsNKofuD0bQyZ97nxsaG4qkt5qXcM9qspZ2skE+vG7VaynyV1WBgiM4HfWdQT4bQcbE4rIibrqYaHN+uegXyMbbKsqxGbopULZUeucnfcyP2CfHZ7Hxwkeem7LB0tc/t5KNWjEzFPcp1t6PRWNwAwClKwcETm/YjFE2g3uviBQkvv38UQH6eGyb2G3wuzFQ8ckapKSZu6g2iNoCalorGM6SlDBbEYjNUuxSrlPTVPR4PFi1ahHXr1vHHkskk1q1bhyVLlhj+zpIlSzTbA8DatWv59jNmzEBnZ6dmm0AggI0bN5o+ZyXDLrxOh5QxhcR6FgQjcb6CmD85JeISSZmvnBl2GGyWjenKJF0jFhyXvUGfVfisprzSUuafH4/chOOWx2IYwZr1jZfnJhpP8lTm5EYfulv88LgciMSTODSYe8n1WCzBBVqhnpsanbhhxRiFCEi2gPA4HZoKPDtAwzPLD/c8uh08smckbrJFbgDg1Gmp1NOmPSm/zSnTmrFIeYzRZeLZ0aNaEZLCMGEf76dzYCCE4bEY3j44xBe3mSqlAKGJn0HkJmYlLZVIcmFU7ntLyV999erVeOihh/Doo49i27ZtuOaaaxAMBrFy5UoAwGWXXYabb76Zb3/ddddhzZo1+M///E9s374d3/jGN/D6669j1apVAFIG0Ouvvx7f+ta38Nxzz+Gdd97BZZddhq6uLlx88cWlfjvjjliimqmsjkduInF+oHc11aBeKSvUGy6Z58aVZbBkOTm+o55/f0JXA645ZxYA4LimGl7BUwzymbKtpqXMIzfMc5NIyhm7mmbDbPxCqaaC9wbCkOXU67TUeuB0SJipCM1dR81n45jBojYel4OvfPOF+QyYuGEXcnZ8R+IJfOt37+GO375nOcqk9pGyl7ABVE8ceW7Kh1jQ4VfOd31aKhJXWxNkEjdnzmrT/P/06c34yGztY/rRDWbU8ApZNXLTXu9FtxLxPjAYwuce3ohP/uiv+NbvtwFIdUgGMomb7NVSRn1ueFpKjNxUs6EYAD796U/j6NGjuO2229DT04OTTz4Za9as4Ybg/fv3wyG0GT/zzDPxxBNP4JZbbsHXvvY1HH/88Xj22Wdx4okn8m1uvPFGBINBXHXVVRgaGsLZZ5+NNWvWwOezpngrCatGRxa5CUUT6BOMbQ0+N0bC8TTfRyYVbheOa6rBqo/PxlObD+C2f5iP02e04IITJ2N6m7+oK2yelsphRovYxM8Mn9sBtzM1rXx4LGZ6QckGE6LsopLJ9FcMWHXd5EYfF9SzJtVhe88I9mQoMTVjMJg69lr8noL7XrDzoF+5kXS31GD/QIhXmqzb1oefvbIHAHDOnEn46IcmZX1OO3YnZrDITSlTkERm2CLC53YKkRttewdWxOFxOTJ24Z5U78WZs1rx6gf98Lkd+JcPd6O11ouWWg8XR90WxQ03FAtFJO31Xi74n37jEB9R8od3juC2T8znc7LM0lKqSDFIS1mI3EQSSdt4bkoubgBg1apVPPKiZ/369WmPfepTn8KnPvUp0+eTJAl33HEH7rjjjmLtom1RL7yZb+a13HMTRw8XN1401rhxaGhMM99IHGxWbnWdjf9YNgdfOe9D/Ka4wMK8qFzx5hgJkWVZ08TPDEmS0OBzoz8YRSAcQxeyj4MwYrxLwdkFsatR3V/WTn5ff+6N/AZCxamUAtJFfupC3o+hsRhkWcZbykR4IFXdZUXcsAt+Jv9UuWDiJpfRIETxiCeSPCXoc6uRR30peI+Qksom4O/9l5Pxs7/sxoULJ/MRMv+8aAp++vJuzO2s5/10sqH13KhpKRb5EWev9QTC6A2EhbSU8XWLRfJjmQzFzsxjG+zS58Z+ZzOhIWyxc6pfUeKhSEKT+zWab6QdbGbftBSj1F0umefGqriJJpJcHGZb7TfWpMTNcAGlymaG4lLNljqslIFPFnL/01qYuMk9cjOgjEloLmCuFMOrSx2xVW4iKWM0Etc0MvvAZIaPntFI6m9Tb7Ez7HjiUI79RAGeLSJ/wsI1IZOhmFUXZkpJMTobfbjlH+ZrHrvu3OMxp6MeH5/bbvl65xP63HBxU+81jfwcGAhhJJvnxmUeFc5ULeVVBI8sq8Kv3OLG3st2wnJzMVYKnjIUq6591dRqJm7oEPDmGAkR8+3Z/i4sRK03dOeCPtftcZnnxYsBS0sVK3JzJIcLfzb0jSwn1Xv55zIUimma+1k1PzMfglFTs3Lj4p4bY3Hzq0378ff3vlTw1HbCGLGSzutyoEaJ7unFDe9xk6FSKhO1XhcuWTQlJ8O9Ni2Vev1J9V5eLQWkqqdYNdbhYTVyYybk2cLJqDpPv8gScbtUQRZUPpuqrpYiCseq54ZFbvpGIrzcLxW5SS9HFvP3JG5Eg641z426MpGyfn7sYjUQzD9yk5aWcuZe3ZULrD9GV5Mqbqa3pi6YBwfHEM9RVHGx1JRfWk5EHylr8Lm4gBwei/EVNGC9PJyJGztGbpySubiJJ5L4zxd2YGffKL72zDv4665j4717VQ8TNx6XA5Ikwc/73Gg9N+y46zBoLloq2MIqGk9ycdVe74Pf48LKs6YDAL57yUI+2PfI0Bi/N9SapGBZWsooKsw9NxmmggNqRVa5PTd0ZysBw2MxbNzdX1D5L2PMQidcQB229r7S8K7e60Kt1yU0klNPRnZTdDokntOfyOSalgrlMGRRFTfpjRStYlYtVSpxw3rciGmpzgYfPC4H4klZk8u3AougHNdcuLjR96Gp97l5B+X9AyHNinrQYipQnbVTeNqs2LABjEbi5t3DARwbVQXco6/uHa/dmjCw0QbsuKvJ5rnJM3KTD+KC94ByjrHmpl+/YB5euenjOH/BZExWFhWHBXFTZyLk801LuZwOPqOPiZtyL5xJ3BSZoyMRfPJHr+DTP30NP17/QcHPF7aYlmLN0VjulYVHuedGSEuxCIVRSd9ERK2Wyi0tZaW6pqU2dbHpD+afltK3PWc+qVKVgoujFxgOh4RuRZzszTE1xS68xxUhclOn697a4HOjUYncbD+inRlnNRU4Erav54atpJMGC6V3FX8R22aXRY8RYZ2wLnLOxU1Ue+5lG71QCkRxw8QvG0vjcjowRamaYhFTMS1lloJlU+iN0lKZSsEBVcwwgz6Jmyrjjt+9x02XD67/wLBNdy5Y9dzoc7XM32A0AiAsdNwkch+/wP4mVqprWnnkJj9xI8uyabVUMTw37/eO4OX3j/JqnFA0zuc06ZuJTVNSU/sHrJuKo/Ek9h5LiSGrw0wzoRcg9T4X7ye0rScVtZysCPuRcNxSCm3Uxp4bZig2auL37uFhAMD5CyYDSBlG8+kgTZjDrglc3PDGedq0FPM5jmfkxumQNL2ZfG6H4THM01LDY/xYNzMUu5zmrQcylYKLj1PkpgrpH43g928f5v8ficTxl51HC3pOtkLwZYkS6MtsWXhSHd6onoz8hKXIDYDcOxTnkpZqq1ciaoH80lJi7ptVJHADdIHipmc4jE/88BVc9sgmPPLXVG+Yg0qUpd7rQr0uSsJKTPfnUDG1+9go4kkZ9V4XFx2FoO/P0VDjRhMTN0rkZk5nPe9cbGVaODcU2zBy48ySlgKApfPaeT8lFnUjioO6EEydc35hhh9DlmVNKfh4wlKyQMpvY1RpNVmJwB4ZCgsp2MxN/PQdihNJdcahkedGfDwYZfOryHNTNbz6QT+SMjC3sx7/ungqAOBvwlj7fLAcufGbRG5qzCM3XorcAMi94++YhR43DBYaNprzYgVRcLH0WbEMxb/Zcoi/5wdf2o14IondSmpjpkEH6Kl5lIOzNvPzuxqKUtKvF1x1XtVQfFBIfzERZGUsAxNAjRZn+ownTpO0VCIpY3tPStyceFwj75diNCSXyB99WkodeaCKm6FQjJ+LxRzoawWxYWC7iZmZpYP7g1EcVcaqmHpuhE7DIuK1hiI3ExBWrXDW7DZ8eHpqXsjf9g5m+pWsWPXc1HicmhAlm8vEjMbkuTEn/7RUdnHDBMHhobG80kiai4pysXAXqRScCQ8AODYawcY9A/jgaCqFNMsghTRNKQe3mpZKJmX8nzLF/eNz2wvaV4aYlmqscSsdYbXCfnKjj6cMraSFmR+q0LlXpcBpMjjz2GgE4VgSDilVycZuclYiVYR1eHdiJbpr1OeGRW1aaj08CjxeaMSNibBqqHHxSM1u5fzWL4YZrPN7TBcptCJumJjhk8dJ3FQPV39sFu646ARcdHIXTpuW6i2w9dBwWqvuXMjFvDqtRV1tz5qUujmpkRt1HyhyoyXXDsW5pKUm1aX6sCTl3LwqDHEGGKucEaeC59u5NtXNN+XZYHO6Xnr/KD7oG1UeyyxurFQC3ven9/HWwWH4PU780ynH5bWfekRxw/xM+hXrlGa/aVWLEaySrdWO4kYyjtwwX1STPzX7q9EgQksUDltcsqhpjdAVmMGGzFrtLFxMmv3atJQRkiRhiq5S0UzIu7mY1l4LI4mE8lzm8wiZ6GH3mnKPMyFxU0Smt9XisiXTsXBKE6Y012Byow/xpIwtB4byfk6rfW4AbQOp2XpxYxC5Ic9NCibySlEt5XBIWKiMjLj4R3/FA3/elVOLAKPyS/H7fH03w2MxflG+6qMzAQB/2XmMV9wYiRuWYhuNxLMapAPhGH7yUqpa8FsXn4j2InkRxLQUOyf0VVhTW/38JmRlYOnAqP0jN/o/M+vhw/xGPHJTQCdsIh198YXPQDQPlDHyJ0ZuJmXoscPOXUZrXebITVLW+rzEdhRm6WW26GL3mnIXrNDdrURIksTH27+5fyjv57HquQGAS05NrY4vWzKNl8eytFQ0nuQ3ZYrcaMnVoGtlaKYIS8mMROK45/kdePqNQ5b3TRzaxxBz2fmmplgUqb3ei3PnpYbYbjsSwNtKNGd2e7rnxud2ci9XtijU+z0jiCVkdDb48E+nTslrH40QhR1LB+ibA05rESI3WcRNOJbgHVVby7DyzoYqbrR/Z+4TUs7zRsVYSuKmuPDIjXLc+Q1E82AZxY0YLZrTUW+6nRi5Sc3IylwtBWivLZl63DDcPHJD4qbqWTQ1JW7+uPUIzrnnz/j7e1/iFR1WCVscnAkAnzypCy/f8HF885Mn8MfqvC4eRmTmSorcaMnVcxPKIZoGAP921gzccuE8TFfSOv+r+FCswKJJoj9KzGXnaypm4mRqix9tdV7Mn9zAf+Z1OTC1JV3cAOoYhmziZqeS3vpQp/kFN1+WKmLss2dMA5BefttS6xEGHGZOCbPoldsp8YWAnTBr4sdmlaVFbsby76dEpBPWl4Irx1VYEDcDyt+iuXb8DelnzGrl32caKiyKm9ZacxEvXlvE9gOZuhMzatysWop9ZuS5qVoWKZGbrYcC2Nsfws6+UXz9mXdyeg6rhmIgFS2a2urXhA0lSUq78FHkRgsXNzmmpaxGbnxuJ77wkZn42eWnAQDeOjhk2SvDBJe4YnI4JF6ymW9aShQ3APCR49v4z86Y2Wq6QrM6QHOXIm6OL0JvGz0/XnEqfrLiVJ5O87md+FBH6nWWzuuAJElqVUs08+fDytq7m/0lH9CaD2xhom87ws5lZqZmIqeQAa3F5M39g7j7j9v5cVCp6EvBawyqpZhny8ykW0pOn96CCxZ04tOndWdsICimpTJ1Chf9NPEcIzf6aJB+Dtx4Y7+lShUxv6sBfo9TE8J888AQBoJRyyHMMW5oy/9Aaaxx49hoFINBitwYkev4BVXc5Hb6TGuthcshIRRN4EggbKljb8SkK6jb6UAskcg/csNu6opYuWTRFDy2YR/C8QQuP3Oa6e/xXjcWIzfFaNynx+Ny8MZ1jJ+vPB2PvLKHCx61qiVz5Gav8jkws7TdUGdL6dJSIW35ujhfyw585am3sPtoEK9+cAzPrTq73LuTNxEWpXVpIzfatFTqMy9HWsrldODHKxZl3a67Rb3WzM0QTRXH8YiN/KyIG70HkQzFVYzb6cCVH0ldbP9ubjtmTaqFLKdW7lYZy6Eyxwy2uhvWRW7KnRO1C6VOSzHcTgcv0f/A4opWLdvXvlahXYqZOGE39Q911GPdVz6GP173Efzd3A7T3+NpqSyRmw9KGLkx4rimGtz6D/P56pWvsLN4bvYcS+0n675sNxwmhmJ2c631pt4n99zYQNwkkzIvOWYerkolbfyCW10IsegrMxTrG6naibmdDVzUnDW7zXQ7SVKjwoaemwxpKb/uekhpqSrn+qXHY+2XP4qHLjsNx7enDi7Wjt4KTIgUJG5qtJUUEZ1JbqLDyjwtdyjmk3Vz/5uIfW+sYOS5AdSLTL7zpZi4YZEbIGXMndvZYPYrAFQRsG/A/BgORuI4pLy/UkRurGC1FJx1+Z03ufjeoGJgNluKRaRY9LCRn+PZPTeb9w3ikz96Bd9ds72Yu8rpHdEOVh0Jl19w5Ut6h2I1WsuOLTbDzI7VdgynQ8LTXzwTT151Bs6bb754AdSCBbG3kn6+nRH6NH2501J0dysxkiTh+I56OB0SZij9RPYYiJs39g9i66H0VQ6vliogxNeoa/DF2mP7bThLpxywjr9WhQL7/Mzms2SiQ2m0xSp9ssHTUrpVUCGTwWVZ5oP+9JVG2WCem95ARNPrQ+QDpZy8rc6b1mBvvDBqtqZHlmV+zp14nLkZs5w4TJr4BXW+L6tpqWg8iS/96k28fXAYP1n/AfpGcpvwbgV9VC+XjtZ2Qz9bSlxksGszj9yU6Vi3it/jwhkzW7N6y5igjiVz89zU6D03lJaaOLAbwwGdX2E4FMOlP30N//DDV7Bxd7/mZ0VJSykha7bCYE2W7FgdUg74VHADocC6bYqwqbcsJZALLG3Sa1HcqJN4dWkpoZFfrgyGYvz3cm081uR389EG+uOYsbOX+W3Kl+oxaramZ/9ACIFwHB6ng0dV7Qb33OgiN3pTu9jnJlMfpWfePMijagDwl/ePFXV/AbXjM4NVpFUi+uILhzCsciyagCzLFRG5yQWjyI2ltBRFbiYurMnSsVHtyb+tJ8APnhe39/HHZVnOqYmfGc1+bSUFCxM3+Ma/dNGOqNVS2hvh/20+iBNvfx7f/v17msd55CZHQzGgzvzqGbYauTFOIRYSuWGr9Wa/O+NKzAhWkQeYm4pZI8ByCga2iswUudl6KJWSmju5PufPYbww63OjT0uxBUw8KfOojhEv68TM3n7rKXLGvv4glt/3Mu770/uGPx8M6cVN5Zan8+uvcHzw0R6xBEYicb5QqBZx48rkucklLUWem4kDEzf64Xbv947w73cLKSsxklBIWkrfvZRNQa6nyA0A82opNin7ob/s0XTkZYPh8klLsfkvRy2uZs2qpbi4SVgzQYuwCeVm7dqzwUzIZukGNXJTHr8NYFyyq2dnX+q8m5fFZ1ROzDoUh3SRG5/bwY2gmTwurFv6x+dMAqBWi+XCH7f2YHvPCO77006eghTRNxKs5MiNOkdOPddFszpr4Of3OKumQIMPzxTFTcL4OiRC1VITmDYlBdAfjGhCx2KOWvTjiJUehZRtN/pZJYWSllIufvoJyxMVdsLGkzJvljYWTeA9oeHiW8IIjVAktz43IsyDwspHsxExSUuxC1A0nntaqk8R1/lOMJ7ZlhItbCq1HnbDK6e48VvoUMxSg5ObijMaohSYTQUP6doRSJLExXbQIJWa+h3V6M1K6fflEbnZLpwX24+MpP18UJ+WquBJ5eqoFfX6y9NSsUTF+G1ygaeljMYv5BC5YSb3ckHiZhxh8zxiCVlj/BPLN3uFdAVbNXicDj7zIx/01VIsctNQQ5EbQDerSTmJd/WNQryfMOOpLMs8LVWXR+SGXQStVLUAQmWb3lDMxE0epeAsLZVpFk0mTu5uAgC8sX8IsixreskkkzIODpa/d4yPt8k373PTq0SwOos096oUmE0FZxV74g2FHY/s/NbDKvTqvS7ekfqIxfSoyPu9arTGqEnfYEjb96UaIjc1bvVcFyfOV5vfBhAMxcK1JWahQ3GdVxUzHqejIJ9oMSBxM454XU6eChJPeDGMOxKJc5+F6rcp7M+kT0ux2R8UuUkhhlrZZ89SFgwWUQvHkmALmnyqzVgX02DUWgO+rGmpfDw3BaalTp3WDJdDwq6+Ucy4+Q9YcteL/CbXNxJBLCHD6ZDKKhrU8Qvmnw/zPWXq7FpuzKaCh2Lp0UMmboxM8ABwcDAlbo5rruHv+dhoJOdeSYeHVUOyUUsA1k+LDe+taM+NwZBcMeXZP2r/Hje54jIoVohYiNyIAq+hxl32jt8kbsYZdgAMCoJmWDcPhomQXKZPW3nN/mAEkbg6KLDcYUO74HI6+AqZncRslctuGHuU8H1QiATom1ZZod7nAmsCaiV6kz0tlbu4YZ6v9jwjNy21HnxBaU4JpMqPH3p5NwDg0FAqatPZ4Cso2lgoalrKPHJTaHpuPDCbLRUyuDawhdOoSeSGpaSOa6pBa60HbqcEWU73AGYiHEtoFmNG0+GZuGKRu0qO3KjGbUHcCClPHrnxV8+11ONk0cLcDMVtwqTxRhtkBUjcjDOq50K9KAzqDHhsNZDLXKlMsBV6LCFjm5Ij97gcvIqKSJ8vxVabp01PzQdj5tmgkA5wOHJfmTgcknoMWJgDZFYt5S2gQzFLSxVyU79h2RzcuHwOJitDK1/ZlarC4dGBHPvnFBtfFkOxLMtcXNo5pcBSBKL/QZZlfrMRrw3ZIjfsujKp3guHQ+LXBas9l4D0Kr9+g6gMW5SxhpWVLG7GDK7B7PtQLIEBPnrBvgI5V4wiN+rgTPN7kXge6Rdj5YDEzTjDFL5YLqmvLmA/K0YZOJASMszM/Ob+QQDA5EZf2cOGdoJ9xmwKMKtmOmlKE4DUCjUcS/AeN7nOlRJpMjgGzOAdiovYxI9HLPJMSwEpL8gXz5mNdV/5GFwOCYeGxnBwMKRGBzIM5xsPsjXxG4sluGCwcwSTG4qT6SkCQDtzrjaLuGHHGxPXbKU9kEPaSO/R6TcQLuwzZy0DBoLRtMhTJZBMymqHeIPITTia4EMzq2mhaOS5sRK5ET2Idvh7k7gZZ5p1q3ZZlnlaiq10WBOsYqWlAKCzkYmbIQDgK24iBV+NKZ85q/CYOamW/6xnOCyYiQvpO5QevTMjYtI8K19DsSzLguem8NWm3+Pi3X037h7AIZtEbmqyVEsxQ7/LIZXd+JgJh0ETP1HciMdFtrQU63PFbsQNNda6GouwNFSXcv04FoymNQ1k59CU5hpIEpCUjdNXxeToSAS3PrsVr+maoBZCWJg1Z5SWCkXVaqmWOvtG/3JFrZbKTdxIksQHdC6e2VLCPbQGiZtxRk1JpE6KUDTBw39M3LA+FUYh0Xxh5s7N+1KRm1zb7lc7+pshE5iT6ry8VPjw8JiQlso/ctPMIzdW0lLaDqkMtyt108s1cjMaifPjKt9qKT3sQrZpz4B9IjdKdUs8KRum7tgNvdEGxsdMuAw8NyxVKUngvW2A7Gkpds1h4roxD3HDnmOWUuYfjSfTXo8dX3VeNzfQlyI19bu3D+MXf90DWZZx3ZNv4v+9tg/XPflmxq7UuRDStOIwNhQzcdNq49RmrqhN/NJLwbPNI7zrHxfipuVzccuF80u3gxYpv+tngtFSq9zYlJOClYF7nA6+kmYrr2J5bgC1IoTdfGbYdApyuVAvWKnPnl2M2+q9mNzow+6jQRwZCvP0VT5l4Ay9wM2EaYdiJfeda+SGpaRqPc68mhAacdq0Fvw3duON/YM8wjClzOLGJ/QlCUUTaKzRfn4simHnlBRgbCgWh6mKwoyV4o6YihvlPSviOh9xw7btbPCh1uNEMJqqGBIrL0UTbludF/3BaNHFTc9wGNc9uQWJpIxQLIFXP0hFbHoDEWzcM4CPfWhSwa/BFjo+t0PjrxNHe/DITRV5bgzHL1goBQeAs49vw9nHm08dH08ocjPO6M2kzNTY6HfzsDLrU8FPrmKkpXTlrjMnla/Bmh1RIzdJxBJJ7oNqq/NicmPqRn1keEwYOlr4OIxCqqXy9dzwSqkilj+fOrUJALCzbxS7j6aqymaV+fjyOB28Kk0/VgMAArzXk73FDQvMGKWl9MdEnS9zE7+hYkRuWNO6Wg9ahaakDL1Ppa1eqdQscjn4H945wgXf99bs0Pzs1Q+KMy/LqDsxIKal4jzCa2dTeq648xy/YDcqZ0+rBF4KrpwUbAXZVOPmqx8W5mU9OoqSlmrUixuK3IgY9a5wOiQ01bi5v+DIcJg3T8tnrhSj2aAdgBmmfW4MLkBWYJGbYqWkAKC1zosZberxVON2lr0xniRJGUcwqL2e7B28dior5YSFFAHzgZl5bgZ1nhsmbgI5iBsWaW7yu3lTUrGPjfhZ+z1OtNayeXrFjdy8dXAo7TE2CPjVXcXx3fBye931l/0/MBbnC9GqSks50isxrfS5sRuVs6dVgr5Shl0smv0evvIKlMBzI3psPC5HWVvj2xGxLwq7ELfUeuBwSOjkkZsw7xGUz0RwRi5dik07FOcZuelTyn6LYSYWWTStmX8/p7M+rzL5YsOjcQbihkXgbC9uDA3FxsdEprRUIinz60pTAZEbdsw21Xi4cBGjMmIfKJ/LyUV0X5FHMLyjdAwXuf0TJwAAth4ezjhfyypmBR3sWsGaGTodku3Tm7nAPDea8QsW01J2omR7OjAwgBUrVqChoQFNTU244oorMDqa3qpb3P7f//3fMWfOHNTU1GDq1Kn40pe+hOFh7UEsSVLa15NPPlmqt1F01BtbTPOvmJZK89wUIS21cEoj/z7VwKtyDtLxQFzlc7+NEnbnhuKh8TcUR3NMS+3sHcGzbx4yLcU8WoQycCM+cVIX//7v53cU9bnzhX1mYYMuxaNFiMCNB+w0NSoF199o6jJUSwXGYnycCLsR81lUGRod6hGjP6yUXCwHH+Mzr1J9oNiiinn9ikEskeR9p/7r0yehq9GHCxZ04p9OPQ5djT7IMrC9J33mVa4w/11a5Ea5HrPKwGa/2xZivlh4uOdGTEspo4AqKHJTsjN7xYoVOHLkCNauXYtYLIaVK1fiqquuwhNPPGG4/eHDh3H48GF8//vfx/z587Fv3z5cffXVOHz4MP73f/9Xs+3Pf/5zLF++nP+/qampVG+j6KgdiqNIJmU+zLKpxp02G0Y1tBUubup9biyd14EXt/fi6xfOK/j5qg2fUN7Jwuzs4t2lRG56AmH+tymKoTiHUnB9CoKJ04hwARoMRvHPD27A8FgMf911DN/754VplUCl6sr70ePbcP3S49E3EsHnlkwr6nPnS6Zy8EImu48nTkf6EENTz02GaikWKa7zuvgNyspwUT1DQq8c5lcKCFES/bRy1hKANXcsBgcHx5BIyvC5Hbj45OPwj6dM4T+bO7kBh4fD2H4kgA9PL6wc2agLNKBej6vRbwOokZuoQSp0woubbdu2Yc2aNfjb3/6G0047DQDwwx/+EBdccAG+//3vo6urK+13TjzxRPzf//0f//+sWbPw7W9/G5/97GcRj8fhcqm72tTUhM7OzlLseslhaamknBIx3HPjd6NB57kpZrUUAPzoX0/BYCjKDbKEit8gcjNJidwwv9JQKIYjSii6kFkyzflUS5mkpWJC5Gbd9j6eYnhq80GcM6cdFy6crPk93p24yGkpSZJw/dIPFfU5C0WsatHDmjEWkl4cD3jkRkxLmaQq6zMYigeF6wyD3bSDOYkb9XlY1Gs0ov6+XhCwqrlDRRQ3e5U5b9Nba9PE+4c66vHi9j7DgZ65MqYTagx9KrPqxI3DIHKTqDxxU5I93bBhA5qamriwAYClS5fC4XBg48aNlp9neHgYDQ0NGmEDANdeey3a2tpw+umn45FHHklrIqUnEokgEAhovsqF1+VErXKyDIaimq6hdbxaSuu5KXRwJsPndpKwMUFc5bMGfm2KAGjwufjfjI2vKKQjKfvd4bGYJt1ghFr2q0tLGTTxe0PpPs24f93OtHOjEiZhF4vM4qYyIje8iZ9h5EbvuVGuHwbiRl8pBagpOauRG1mWNR5BJgxFMcUFgdJnqFsYwRAogg8GUMdFGPXqYk3kipEGM/M8snQ1o9rEjdvIc8OOuQqyM5RkT3t6etDe3q55zOVyoaWlBT09PZae49ixY7jzzjtx1VVXaR6/44478D//8z9Yu3YtLrnkEnzxi1/ED3/4w4zPddddd6GxsZF/dXd35/aGigxLSwyEoqrnpkbw3JQockOYw1vXh1VDMauAkCQJk3XeAfEmkSvs75+UkfWCn8tU8G1HUqL9O/+4ALUeJ3b0jmD9+0c1v9c7zOZKVb+4YZGNTIbiQtKL44HLIC1l5sNix3A0nuQRP8aQQeTGL5Q0W2EkEuciq8mvptFFccOeiy0WGmvcXEi/XwQfDKB2D59Ulx59nNKcElO5psESSRl3/XEbHn5lD3/MLC2lr4wy2o9Kxs1nS6WXgrurNXLz1a9+1dDQK35t37694J0KBAK48MILMX/+fHzjG9/Q/OzWW2/FWWedhVNOOQU33XQTbrzxRtxzzz0Zn+/mm2/G8PAw/zpw4EDB+1gIzbVqnxOxtJJdLFgPjrEiGoqJzIiVI6rnRr1ozWzTls43FRC58bgcQvTOXNzIsswjM+ml4OkXoAMDKZPlwimN+NfFUwEA//3SB/znwUicr+o7bDwJu1hkKgUvxoyw8SCTodgscgOo74+hnysF5J6WGlKGRPrcDvjcTsNZVmpvGPWaNaezHkBxTL6A2GAzfYHB0mC5ips/bj2C/35pN+783XvYfTSV0hozKQVv8nsg+odZdKpacGUQN1VbLfWVr3wF27Zty/g1c+ZMdHZ2oq+vT/O78XgcAwMDWb0yIyMjWL58Oerr6/HMM8/A7c58E1m8eDEOHjyISMS81NDr9aKhoUHzVU7Yqn8gGBP63Hh4nxu28iqmoZjIjFbcaNNSQCqXL1JI5Eb/emZoZghlidyEonEuyrpb/Fh51gy4HBJe2z2Atw4MAdB2JxY7ylYrVgzFhcwIGw+MZ0sZV644HRIXFfqKqSFdjxtATUtF40lLgw5ZlJEdu3UG1VZ6QzEAzJ2cOnd2FE3cpC8+GKwT+2gkbjkiBQB/3q5GOF/cnrp3GQk1IPU5i6moqVUmbtxsEr1Rh+IKitzktGyZNGkSJk3K3tZ6yZIlGBoawubNm7Fo0SIAwIsvvohkMonFixeb/l4gEMCyZcvg9Xrx3HPPwefLHjrfsmULmpub4fVWzkpU7HPCq6WEyA2QujgVs4kfkRlRbAzy7sTqBez4Dm1foNYCB+U11LhxeDicUdyIXhH9McAuMkwAMcNmg8+Fxho3Gmvc+OTJXXj6jUP46V9244F/PRW9ilehYwKkpAD1M4sY9AIarRDPDatcMfbcpF8X6rwuhKKJtIqpTJEbICWOswleVinItuOl5EKUiInGGiEiNpdHborjdWTtDIzETa3HCa/LgUg8if7RKPwt1v6+HxxVDch7+1OGZbXPTfpzNPs9XGSx6efVghq5MWg/UEHipiR7Om/ePCxfvhxXXnklNm3ahL/+9a9YtWoVLr30Ul4pdejQIcydOxebNm0CkBI25513HoLBIB5++GEEAgH09PSgp6cHiUTqIPvtb3+Ln/3sZ9i6dSt27dqFn/zkJ/jOd76Df//3fy/F2ygZYjm4mAvXrLwi8aL2uSEyw8TNYCiGgWD6xVNsethW5y048mElcsNWjm6nxC84DG4oVi46R0fTxyr821kzAADrtvUiHEtwcVPsMnC7wiKeRpEbNcJgb3FjaChmJnODQoM6X3qqCDCO3Hhd6oiKkIXU1GhE61NihuJRA0NxrZiW6khFyrf3jGQt/rCCvg+ViCRJ/HGrXZFlWdaIG9ZDh5vODa6/pygjRwDg+Pb6tJ9XMqqhuLLTUiU7sx9//HGsWrUK5557LhwOBy655BLcf//9/OexWAw7duxAKJQ6kN544w1eSTV79mzNc+3ZswfTp0+H2+3GAw88gC9/+cuQZRmzZ8/GvffeiyuvvLJUb6MkML/GkaEwV8RsRVXvS628RsJx05wvUXyY2BAviGLoWZyVVIwyakviJkNa0qeLShjdvE7oakBHgxe9gQhe3zvIxc1EqJQCBHGToVqqUgzFRlPBjSY0q71utMeVGCFmSJKEWo8LI5G4RXGjHVlRr3REFlNgIYMF2az2WjgdEkbCcRwZDhtWOeUCE/KTDDw3QCriemhozPI8q5GIOkYBAPYr3rWALlIl8h/L5mAgGMWlH54KZxU18APSDcWZvH92pmRndktLi2nDPgCYPn26RsWfc845WVX98uXLNc37KhWWltqjhD9dDomvDuq8LvQikhI3MfLcjBeNOoNws9+t6eLsczuxZGYrNuzux9IidOC1Mtcn0/gNn67MWa26Uy/4kiRh8YxWPPfWYby5f5Cb1ydaWspI3KhpKXufW4q2ySktBUBzswaAwaB29AKjxuNUxE12f8po2DhyMxZLIJGU4XRIhr1hvC4nZrbVYmffKHb0jBQkbsKxBH9vRpEbAIYDPTMxoBNBR4bDkGWZt+SoMxjR0V7vw88u/7Dl/a4kXHxuXeqYiydl3t26ktJS9l62VCmsAdwepRlVk9/Nm1GxVcJIOEbVUuNIvdeFep+LXziNBMCj/3Y63u8dSfPf5IOVyE2mtCRbQbFtBnkfE61IO7m7Cc+9dRhbDgxxsaYfolqt1Hi0nxEjnkhygWD78QsGhmKzwZmAeZdioz43gFgObiVywxofujT/AilTcYPPzUWSPt03QxE3BwdDWV8nE6wrsNtpPs+JlWofsxi5YSKos8GHnkAY0XgSg6EY/wztPn+s2Lh1TfyiGQob7Ezl7GkVwW5AYo8bBjuRhsdi/KCitFTpkSQJM4XUk9FgUY/LgROPazRcMecKFzcZSsHZTCSfwevxyA1PSyk3L10PjgXKTLH3jgS4UXK6rqy9WjFr4ieWPleKoThplJYy8NzwSkzdjV0/EZzBREguaSkmoLwuB1xKSoal+cwmaU9WBPVhpc9SvrAeN6213rTuxIzWHD03TARNbvJxYdQzrI5aaZhg4kY/OFMjbirIc1M5e1pF6FdPRuLmqHBikrgZH2ZNUm/6pTYJNvpz8NwYRG5Y1+pEUkY8kTRsrw+olSpHhsO8z8iM1okhbswMxexG7HE6bL8SZYZio9lSRjcaFpU7ElBFRDiW4FHgphrjyM1YDmkpdo2SJEmomNLOw9OXT3ey+WyFipsMPW4Y6kBPi5EbZbvWWg///HoCYzwtNRHaJoi4dcUKzG/jkJBW2GBnKmdPqwj96nqSYFBlqyJW7ghUlomrkrngRHUO08fmZG95UAi5VEvVGKzQRR9WOJ40TTvU+9y8JT2QuiEe1zwxRnCYGYp56sTmfhsA3KyaNKyWSt9/FiE5IowfYMeYQ0pPsfBGfpHskZsRAxO2mgZL/b5ZV18euSlwLEKmSilGW46em/5RNRrEzPZHhMjNhEtLcc+NNi1l94WAnon1V7MJ+tCw6O9gqwTWcM3ndsBRZW58u3LuvHbccuE8TKr34uTuppK+VkNO4sbccwOkVuZmaQcAmNfZgAMDqZvKSd2NGqN0NaOmpbR9brh3xOZ+G0AVN0ZN/IwWPTxyI0RIxB43+msJ+wxCBqZrPdxQLNzs9fOlQrzxnfazZQu4gaC1aIoZmRr4MVgPqmMjVj03Uf57LCWz91iQR8vsXlFXbPhQXsVQzBv4Vdh1Y2L91WxCjVttNAVoxQ07kdgMoIl2YpUTSZLwhY/MHJfXKtRQLEkSPC4HovGkIm6iyvOmh+vnTW7AC+/1AgDOnNVW8L5XCuxzS/PcVEilFCCIGwvjFwB1FMDe/iDiiSRcTgcXFEYjQ3JJSxnN49KPYAiZ9IaxcrxbIVMDP0bOkRsubrz882ApXJdDmnDXYOYpZCJajdzY/3wRqSwpViVIkqRJH4hpKRYCZZNv7d5kjMgPS6XgWcZv+IQuxcyYzOaWiVxy6hS01nowqd6LFWdMLWi/KwmztJRaBWN/L4XToImf2eBMIOWnqvU4EY4lsUtpTHc0w6DJXNJS+lJw8Xv2M7O0FDvehwoUN2paytxzwyI3A8GopbES/cJzsoXme4dT3ZSbaz2mxuVqxasb7ZKpOs/OVNbeVhHiKqqrUfVAMHHTy8VNZallwhrsYi9OWtbDq6XMxI1gmGU3DaOZV1Nb/Xjta+fixa98DO31E6MMHFBN13pD8YjBTdquZIzcGHixHA4JC6c0AQC++dx7CIRjXNwYTYLnkRsLaSkjzw1La7GozphJWooZ6FmkMV+Y+XdShkaaLco5kJTVKkIrz9la68Vk5VrMozm15iKqWtGPdqnEuVIAiZuyIR4o8yarlTlsNcnynZVwASZyR6yQM4veZPLcAKq4OTYa4Tc/s2nlbqejIiIVxcSsid9ohuZsdsNY3CieGxMPxOrzPgSP04ENu/tx67Nb1Y6+BpEbtRQ8hyZ+vvS0VJAbilmfG+0xW+918fdSSGrKiqHY5XRw75mVXjcsfdVS60Fno/Z5WyaguGERQX3kptI8N5W1t1XECV2p/iNtdV7elwFIFzN+EjdVidvp4DcAs4t9OIu4YWFiVl7r9ziL0oOnWqgTypTF7uc8LVUB55bxVHDzyA0AfHh6C375hdSA4t+9fYRPhTeKdvAmfhbSUkH+uakiuU4wFCeTMo826tNSkiTxfjFDGXo7ZcOKuBF/3p+l100yKXNPUludh5esM/SVrRMBfeQmzDvlV5ZcsP/ZXaXc/on5aKvzYNG0Zs3j+rJDo6FtRHXQWONGKJowFTdjJv4FBnucldc2mXRsnaiwSFVSTjXu46MJKmSuFCA28VMfU1fS5teG02e04MxZrXj1g368tnsAgFqOLVJjMS2VSMq8+aFoxGaLr2A0rnkOo3R6k9+DwVAs78hNTOjnlMlzA6R8Nzv7tP3CjBgai4EFxZprPakIp9fFj5G2CShu9J6bcIbWA3amsqRYFeFzO/GV8+bgnDntmsfTxE0FXICJ/MhWQZJtthjzOxxk4sbAbzOR8bnVDrqsIVvq+8ozFIsTmln/Ebcrs9GVTYVnzBXS34xM87dEgkLaSkxLidExto0kGUcbrbQ/yASLsDgdkqG3TKS11lrpOYvsNAmz5KYoFWcANF3LJwoschNNJJFMyhlbD9iZytrbCUCdV3vBpchN9ZLtYp8tHMyE78HBlLgxqpSayEiSxBcL4iBJI++IXWF9aZIyeGqN+fGy9Sv6u7ntmgnwM9vSb9Q8cpNl/AL7zNxOSZP6rBWqrUJC/yCjCiNeMWXB5GvEIUXEt9d7s/b+EiumMnFsNN04fJIysgRAUebIVRqiiIkmklkLG+wKiRuboY/ckOemerEauTHz3DC/w6FBityYwQSkGLmpJM+NS7iJs/RJzGJTNYdDwh0XnYBprX585e8/ZFjtYjZ/S49Z+byYlmLbmFV4Nhkc77Is45Zn38E1v9ysmWFkxP7+1NDNqUJkxQxmBO43ETeHhsZwdCTCzcSi7/Gs2aleUDVuJ/dGTiTE4yQiVLdVWuTG/mf3BMPvccIhqRcyo46zRHVgdLEXyWYoZpEbtqKlYyUdtlgIVHjkBkilppwOJxc3zI+TifNO6MR5J3Sa/rzG4lRwdc6S9jMT01Ih7skx/lyNejv9dVc/fvnafgDA8q1HcNHJx5nuwz5F3ExrzS5uWCTGyFC891gQF9z/FyRlGcuVz6ZdMFv/w8LJaK3zYFprrenk8WpGFM3ReJIbiylyQxSEJGk7YrbUZq4KICoXy54bk5Ww3hCrH4pIqJU9YloqwErBKyBy4xTSO8lkKtJhNS1lBaueG7PeQGqH4gT33Jh1fmZtCsRGfpv29PPv1+84mnEf9hxLNSWcZmHwK4vEGKWlHt+4D6FoAuFYEs9uOaw8pyqYJEnCmbPacFzTxJjBpod1PwdSbQcqtVqqsvZ2giA225qITaQmClzcmJTG8mopkxWTvlGaWY+biUxDTeozEgWkmmKpAHEjRG4SgrABiiRuTEZU6Bk1qTCr5ZGfOC8VN+uqbiTmPzgW5N+/e3g44z7s6E2Jmzkd6cZoPZnSUm8dTH+d7ubs0aCJhFgxFcnQEdvOkLixIWLJ5kRsIjVRYF1bzdNSmcPB+hVytgqSiQiLfA4IzdwqVtwkZE3VVDGaqjHhnD0tZey5qRXTUoqh2Cwi1sANxerxvueoKm529Y3yyhw9sUQSH/Qp4qYzu7hR01JacSPLMh+tIGIlGjSR8Aq9bihyQxQNsZMoiZvqJVtaKpvnRn8ToWqpdFg/FGYclWVZmJFk/89LTEslZBmxuBi5KXzmkdjnhlVjMQHAvD2A6lNK78OldigOmnQnZjT40s3dh4fH+PdJWa380/Pe4QCiiSQaa9yW0kUsLTU8FtO8j6OjEYxG4pAk4GMfmsQf1/cbm+iIXYrVUnCK3BAFMqlBFTftDeS5qVaylYJnq5Zq0JkdqVoqHf0KPhxLIq649SvFUMz0TSIp8zk/gDaqky/s2JJltSPtt36/DRfc/xf8+M8f8O3MGh+y6OFYLMGjO7UmaSmWImTbReIJHsVh0er9A6G03+sbCeOvHxwDAJw2rTlrGTiQMuuzzQaF1NSBgZR46mqswfc/dRKWzuvA3f+0oOLmJpUasUtxhEeQK+szsv/ZPQG59MNT8X7PCFaeNaPi1DJhnUyRG1mWBUOx8UVF306f0lLpsBU8a9s/Ekl91pIE+Cuk+sMpSYjLMhJJGUkluuJxOooyrVoUzmPRBDxOBx5+ZQ8A4Ccv7cJ1S48HYF5hJlZG9Y2E0x4TYZEbZuhmgtPtlLDguEYcGQ7zcm/Gmq09uPaJN/hsrY8c32bpfTkcElpqPTg2GkV/MMp9jAcU8dTdUoNJ9V787PLTLD3fREP03ITjmZuJ2hUSNzZkRlstfr7y9HLvBlFijEpjGZF4EmyckFnkRj9fp4XETRqsmRubjD0qVP1YiQDYAYfSGyIhy0jwSqni7LvL6YDH6UA0kcRYLKFJ4YRjSQwEo2ip9ZiWgntdDjgdEhJJGX2B1GdsVi3Fxc1Y6m/A/iaT6ry8WmmfIG5kWcY9z2/nwqbG7cQFCyZbfm9M3IgVU6xtwnFNZCDOhFgtxSI3ldbnprL2liCqCCZuRiJxzdRnQFu9YrZiEiM3LofEDcqECjOK7h8IIZZI8htdJVWWsUZ+SSEt5S7ijYalG8ZiCX7zZ2zvSZlvzRofSpLEK6b6Rpi4yZyWYiKKi5t6L6YKfyfGB0eD+EAxHF9x9gw8edUZmkrSbDC/4jGh1w373miIKKFSDZEbEjcEUSbEBmH66A2rlHI5JNOSX3E0h2y4BdHV6EON24l4Usbf3/sSdikVN+311m+S5UadLyWrDfwcxbt0iyMYDg+FNT/bfmQEgFAKbuBTYmKGiRWzkTGiX2ckHOdDLSfVe3nX4f0DavXU1kOpku3TpjXj1n+Yj5O6m3J6X0a9bkRBRZjD7BCpaimK3BAEkQNup4NXluh9N9nMxEBq1cwuOOI8HEJFkiTMUwZG7u0P4bbfvAtAW5Fod1j6LCGIG0+R0lKA2pdmLJbgvhkGq14KZKgwY+KG/a5ZnxuX08GFT2AsxtNYorg5MDDGq7a2KVGjeZMb8npfzExO4iZ3PJo+N0q1FEVuCIKwipmpmDXwM+tOzFhz/Uex/IROfP3CeaXZwSrg2/+4gH/OLK1TSTc3npaSZWEieDHTUmrkZkjXULInkBI3oyaeG0AVN6zBoFlaClAr/ALhGI6OpsTQpDovr5YaiyUwqOzDNiVqZDTN3ApqWkoVNywtxVoEEMZ4NR2KlWqpCituIXFDEGXEVNxYiNwAKfP5g59bhEXTWkqzg1XAvMkN2PT1czVpkfYKEjcschNPFHf0AqNG8Nyw43DmpJQHpmc4JUDMOhQDQENaBZX5MSuaisUois/t5IKTDYLdfiQVuZnbmWfkhqelVM8Ne81K+vuXA49BEz9vhZWCV9beEkSVYdbrJlsDPyI3vC4nPj63nf9/YY7+jXLCPDeayE0xxY1HjNykohzzFEHBxY1JEz8gvb9S5sgN63UT40KDVf2x5nwHB0MYCEa5QXmuhY7ERqT3OErw9Jq+0pDQInYo5oMzKXJDEIRVzCI3ldry3M78fx+diRq3Ex0NXpwxs3IiXU4Dz02xSsEBoMatem7YUEsmKHpHIojEEwgqaVL9+AUAaKwx7lpsRL1PTEspURSlUemU5pS4OTQ0hh09qZRUd0tNRrGUCb3nhs2ZcjulCTntOxdEz02lRm6ozw1BlBF9YzMGb+BHkZuiceJxjXjphnPgdEgV1RyTiZt4UkY0XoK0lCfdc3N8Rx3vX7NTGVgpSTAUBfpp9JnTUuogU2YoZpVrxzWzyM0Yv7l+qD2/qA2g9jhiokaMFBWjAWI1I1ZL8chNhV2LSNwQRBlRG/nFNY9zQ3GFXVDsTi59UuyCUzAUs8GZxY3cqJ4b1pKg2e/BpDovegJhbFO8L001bsORD/qeQa215ikfloY9PBTmN02WIprSpEZu2Ps83sIEcDPY0FQ2X4oqpazDxGU4lkCUp6UqK3JTWXtLEFUG8yDo01LBDAZOYmJhnJYq3qWblW6HonGelmrye9CpVDCxqiWz8R5iNKfW4+SRICOYZ+eDo6loUL3XxbcXIzfvK9GiD3XU5femkD5fSq2UInGTDea5YXPAgMorBacrJ0GUEbO0FPM4ZArxExMDZihOJNWp4MUUN+wYGwnHuaG4ye9GZwMTN6nITXOtsbhpFcqq27JERVg0hQkmcUjwjLaUkNl9dJRHLD9UQORGP1/q2AiVgVuFRW7E6xJFbhQGBgawYsUKNDQ0oKmpCVdccQVGR0cz/s4555wDSZI0X1dffbVmm/379+PCCy+E3+9He3s7brjhBsTjcZNnJAh7YzZfipXe5mumJKoHsYlftASGYtaYr2c4DDYFpLHGzY2+7/dmjtywEReAauI1gwkmPgZBiKJ0N9fA43IgEk9ieCwGr8uB2e35R24AtddN/yhFbnKBeW7YdcnpkOAqoqAeD0p25VyxYgWOHDmCtWvXIhaLYeXKlbjqqqvwxBNPZPy9K6+8EnfccQf/v9+vDjhLJBK48MIL0dnZiVdffRVHjhzBZZddBrfbje985zuleisEUTIaTMQNpaUIBmvil5BlxEuQlmIjFVg3Yp/bAZ/byXvBMEOumXBh3YUBZDXqdjRohYVmPprTgdmT6vCeEik6a3ZbwZ6zlP9nFP3BCG/mR+ImOywtxdLllRa1AUoUudm2bRvWrFmDn/3sZ1i8eDHOPvts/PCHP8STTz6Jw4cPZ/xdv9+Pzs5O/tXQoDZweuGFF/Dee+/hl7/8JU4++WScf/75uPPOO/HAAw8gGo1meFaCsCc8chPWRh8pckMweORGaOLnKaK4YcMwDw6mhlay6ie98VYvTBhupwMnK32DvnjOrIyv1aEzdOtf48KF6tTvz5w+NcueZ6elTo3csNLzbKkzQk1VMkGYyUdlV0oibjZs2ICmpiacdtpp/LGlS5fC4XBg48aNGX/38ccfR1tbG0488UTcfPPNCIXUKbEbNmzAggUL0NHRwR9btmwZAoEA3n33XdPnjEQiCAQCmi+CsAPMUGwWuSFxQ7AMVEJW01KuoqallGNQEdhMcOuHi2aqNPvp5xbhd/9+Ns6d12G6DZASN2LFlf41Vp41HVd9dCZu/8R8LJ3Xrv/1nGkTet0YpcIIY5jJnH1mlRhBLske9/T0oL1de2C6XC60tLSgp6fH9Pf+9V//FdOmTUNXVxfefvtt3HTTTdixYweefvpp/ryisAHA/5/pee+66y5885vfzPftEETJYIbi4bEYZFnmYf1gJGUoriND8YSHTQBPlqhaSj/pm5V2dyml2Qx91EWkvcFnqcze43JgRlstn87OKqQYfo8LX7ugeHPSmIFZNBRPqidDcTb0jRiNpsHbnZzOkK9+9atphl/91/bt2/PemauuugrLli3DggULsGLFCjz22GN45pln8MEHH+T9nABw8803Y3h4mH8dOHCgoOcjiGLBVsnxpMwb9wFCWipDt1diYqBoG8RLJW50q3KWthG9NIB5WipXjhdMwid05Tc3yiosLdUzPEajF3LAr1tU1RtMg7c7OV05v/KVr+Dzn/98xm1mzpyJzs5O9PX1aR6Px+MYGBhAZ2en5ddbvHgxAGDXrl2YNWsWOjs7sWnTJs02vb29AJDxeb1eL7xeOqAJ++H3OHkn2MBYnIeDyVBMMDRN/JjnpogGT/28KJbKqfE4IUmArFRQFVq5xLhw4WT8cWsPmv1uTBcqrUoBey+sbw6NXrBGNURuctrjSZMmYdKkSVm3W7JkCYaGhrB582YsWrQIAPDiiy8imUxywWKFLVu2AAAmT57Mn/fb3/42+vr6eNpr7dq1aGhowPz583N5KwRhCyRJQoPPhcFQDMNjMd44jTw3BMOphG5KVQqu71/TKkQ2ZrbV4oOjQQCqD6NQ/mFhF2q9LrTWegw7HhcTVgp+aChVCdZaS6MXrOD36CM3lXcdKomheN68eVi+fDmuvPJKbNq0CX/961+xatUqXHrppejq6gIAHDp0CHPnzuWRmA8++AB33nknNm/ejL179+K5557DZZddho9+9KNYuHAhAOC8887D/Pnz8bnPfQ5vvfUWnn/+edxyyy249tprKTJDVCxqxZRqKqZqKYLBdIyYlmI+nGJQ73VpBrSKaZuHL/8wPjy9Gd/754VFez0A+Picdiyc0lTU5zSiVdewr6Ox8sZvlAP9dafqIze58Pjjj2PVqlU499xz4XA4cMkll+D+++/nP4/FYtixYwevhvJ4PPjTn/6E++67D8FgEN3d3bjkkktwyy238N9xOp343e9+h2uuuQZLlixBbW0tLr/8ck1fHIKoNPS9bmRZ5h2KKS1F8LSU0KG4mGkpSZLQXu/D/oHUtVgsz57eVounrj6zaK813ujnXHXrDMyEMfrITSVeh0q2xy0tLRkb9k2fPh0yS+YC6O7uxksvvZT1eadNm4Y//OEPRdlHgrADLHLDGmZF4kkklFaxNH6BcApN/GIlGJwJpEYSMHEzc1JpfTDjSZPfjQafi5uJpzT7s/wGAaSa+DEvIFCZkZvKaztIEFUGny+liBuWkgKoWorQD84s/mwpAHAIPpRpLdUjACRJwrzJakWWvgKMMEaStMbrSqwwI3FDEGVGnQyeEjXMTOz3OHl3WmLi4tAMzmRN/Ip76b75grnwuhw4c1Zrxc0QygbrngwAHzm+rXw7UmGIA0b1naQrAVoWEkSZadAZislMTIi4NJGblLjxFDkttWhaC/5y08d5FLGa+OLHZ+Ptg8OY3V6HborcWKatzstL6CuxqzNdPQmizOjTUmp3Yjo9CbOp4MWPruhHIVQLjTVu/OqqM8q9GxWH2CKgEiM31RV/JIgKRG8oVnvckJmYAJySOBW8NJ4bgtAjFvyYTYS3M3SGEESZ0aelBkOpSbzN/sq7oBDFhw3JLNVsKYIw4jOnT0WDz4XvXrKgIn1YFPcmiDLT4GOTwVMRm4FgStw0kbghoBqKtbOlyGhOlJaPHD8Jb91+XsV2dK48OUYQVYY+LTUUSv3b7K8+cyeRO2ITv2gJZksRhBmVKmwAEjcEUXYoLUVkgombUk0FJ4hqhM4QgigzrFpqNBJHMikL4oYiN4TWUEzihiCsQWcIQZQZ1sRPloGRcByDQSUtVYEVCkTx0c6WYn1u6NJNEJmgM4QgyozX5eRTmQPhGI/ckKGYALRpKea5cbsq1wtBEOMBiRuCsAHMVDwUipGhmNCgidxQWoogLEFnCEHYAGYe7g9GMECGYkLAYeC5obQUQWSGzhCCsAFs6u6hoTFEFV8FeW4IwHi2FEVuCCIzdIYQhA1oUYTMrr7UoDq3U0Kth8YvEOoE8GhcRoyPXyDPDUFkgsQNQdgAvbhp9nsquoEWUTxY5CYcT/DH3NTEjyAyQmcIQdgANpju3cMBAEBnY3VOaCZyh82WGouq4oY8NwSRGTpDCMIGdDSkxAybK9XZQOKGSMHSUqFonD9GnhuCyAydIQRhA7qaajT/n0yRG0LB7dBGbhySWh5OEIQxJG4IwgZ0NWnFTGdjjcmWxERDjdykxA1FbQgiO3SWEIQN0EduZk2qLdOeEHaDGYrHYilxQ34bgsgOnSUEYQN8bic6Grz8/3M668u4N4Sd0BuKqVKKILJDZwlB2IQzZrby77ub/WXcE8JOuBypy3RQMRRTjxuCyI6r3DtAEESKqz82C6FoAld/bCYcZBglFJiYCceoOzFBWIXEDUHYhHmTG/DQZaeVezcIm6GvjCLPDUFkh84SgiAIG6OP1FDkhiCyQ2cJQRCEjXHpIjduF6UsCSIbJG4IgiBsjIsiNwSRM3SWEARB2Ji0yA2JG4LICp0lBEEQNsblJEMxQeQKnSUEQRA2Jt1QTJ4bgsgGiRuCIAgbo09LeahDMUFkpWRnycDAAFasWIGGhgY0NTXhiiuuwOjoqOn2e/fuhSRJhl9PPfUU387o508++WSp3gZBEERZoVJwgsidkjXxW7FiBY4cOYK1a9ciFoth5cqVuOqqq/DEE08Ybt/d3Y0jR45oHvvpT3+Ke+65B+eff77m8Z///OdYvnw5/39TU1PR958gCMIOUBM/gsidkoibbdu2Yc2aNfjb3/6G005LdVz94Q9/iAsuuADf//730dXVlfY7TqcTnZ2dmseeeeYZ/Mu//Avq6uo0jzc1NaVtSxAEUY3oDcUUuSGI7JTkLNmwYQOampq4sAGApUuXwuFwYOPGjZaeY/PmzdiyZQuuuOKKtJ9de+21aGtrw+mnn45HHnkEsixnfK5IJIJAIKD5IgiCqATcDl1aipr4EURWShK56enpQXt7u/aFXC60tLSgp6fH0nM8/PDDmDdvHs4880zN43fccQf+7u/+Dn6/Hy+88AK++MUvYnR0FF/60pdMn+uuu+7CN7/5zdzfCEEQRJmhyA1B5E5OZ8lXv/pVU9Mv+9q+fXvBOzU2NoYnnnjCMGpz66234qyzzsIpp5yCm266CTfeeCPuueeejM938803Y3h4mH8dOHCg4H0kCIIYD/Rihjw3BJGdnCI3X/nKV/D5z38+4zYzZ85EZ2cn+vr6NI/H43EMDAxY8sr87//+L0KhEC677LKs2y5evBh33nknIpEIvF6v4TZer9f0ZwRBEHZGL2YockMQ2clJ3EyaNAmTJk3Kut2SJUswNDSEzZs3Y9GiRQCAF198EclkEosXL876+w8//DA++clPWnqtLVu2oLm5mcQLQRBVicMhwetyIBJPAiBxQxBWKInnZt68eVi+fDmuvPJKPPjgg4jFYli1ahUuvfRSXil16NAhnHvuuXjsscdw+umn89/dtWsXXn75ZfzhD39Ie97f/va36O3txRlnnAGfz4e1a9fiO9/5Dv7jP/6jFG+DIAjCFvjcTlXckKGYILJSsj43jz/+OFatWoVzzz0XDocDl1xyCe6//37+81gshh07diAUCml+75FHHsGUKVNw3nnnpT2n2+3GAw88gC9/+cuQZRmzZ8/GvffeiyuvvLJUb4MgCKLs+NwODI+lvifPDUFkR5Kz1VFXIYFAAI2NjRgeHkZDQ0O5d4cgCCIjH7vnz9jXn1oIfvOTJ+DyM6eXd4cIokxYvX/TEoAgCMLm+FxO/j15bggiO3SWEARB2ByfW71U13qdGbYkCAIgcUMQBGF7vG5V0NR5S2aVJIiqgcQNQRCEzfEJ4sbvIXFDENkgcUMQBGFzfC71Uk2RG4LIDokbgiAImyNGbshzQxDZIXFDEARhc0RDMUVuCCI7JG4IgiBsjuiz8ZO4IYiskLghCIKwOVNb/Px7v5vSUgSRDRI3BEEQNmfxzBb+vcNBs6UIIhsU3yQIgrA5J3Q14scrTkVLrafcu0IQFQGJG4IgiArgggWTy70LBFExUFqKIAiCIIiqgsQNQRAEQRBVBYkbgiAIgiCqChI3BEEQBEFUFSRuCIIgCIKoKkjcEARBEARRVZC4IQiCIAiiqiBxQxAEQRBEVUHihiAIgiCIqoLEDUEQBEEQVQWJG4IgCIIgqgoSNwRBEARBVBUkbgiCIAiCqCom5FRwWZYBAIFAoMx7QhAEQRCEVdh9m93HzZiQ4mZkZAQA0N3dXeY9IQiCIAgiV0ZGRtDY2Gj6c0nOJn+qkGQyicOHD6O+vh6SJBX1uQOBALq7u3HgwAE0NDQU9bkJFfqcxwf6nMcH+pzHD/qsx4dSfc6yLGNkZARdXV1wOMydNRMycuNwODBlypSSvkZDQwOdOOMAfc7jA33O4wN9zuMHfdbjQyk+50wRGwYZigmCIAiCqCpI3BAEQRAEUVWQuCkyXq8Xt99+O7xeb7l3paqhz3l8oM95fKDPefygz3p8KPfnPCENxQRBEARBVC8UuSEIgiAIoqogcUMQBEEQRFVB4oYgCIIgiKqCxA1BEARBEFUFiZsi8sADD2D69Onw+XxYvHgxNm3aVO5dqijuuusufPjDH0Z9fT3a29tx8cUXY8eOHZptwuEwrr32WrS2tqKurg6XXHIJent7Ndvs378fF154Ifx+P9rb23HDDTcgHo+P51upKO6++25IkoTrr7+eP0afc3E4dOgQPvvZz6K1tRU1NTVYsGABXn/9df5zWZZx2223YfLkyaipqcHSpUuxc+dOzXMMDAxgxYoVaGhoQFNTE6644gqMjo6O91uxLYlEArfeeitmzJiBmpoazJo1C3feeadm9hB9zvnx8ssv4xOf+AS6urogSRKeffZZzc+L9bm+/fbb+MhHPgKfz4fu7m5873vfK3znZaIoPPnkk7LH45EfeeQR+d1335WvvPJKuampSe7t7S33rlUMy5Ytk3/+85/LW7dulbds2SJfcMEF8tSpU+XR0VG+zdVXXy13d3fL69atk19//XX5jDPOkM8880z+83g8Lp944ony0qVL5TfffFP+wx/+ILe1tck333xzOd6S7dm0aZM8ffp0eeHChfJ1113HH6fPuXAGBgbkadOmyZ///OfljRs3yrt375aff/55edeuXXybu+++W25sbJSfffZZ+a233pI/+clPyjNmzJDHxsb4NsuXL5dPOukk+bXXXpP/8pe/yLNnz5Y/85nPlOMt2ZJvf/vbcmtrq/y73/1O3rNnj/zUU0/JdXV18g9+8AO+DX3O+fGHP/xB/vrXvy4//fTTMgD5mWee0fy8GJ/r8PCw3NHRIa9YsULeunWr/Ktf/UquqamR//u//7ugfSdxUyROP/10+dprr+X/TyQScldXl3zXXXeVca8qm76+PhmA/NJLL8myLMtDQ0Oy2+2Wn3rqKb7Ntm3bZADyhg0bZFlOnYwOh0Pu6enh2/zkJz+RGxoa5EgkMr5vwOaMjIzIxx9/vLx27Vr5Yx/7GBc39DkXh5tuukk+++yzTX+eTCblzs5O+Z577uGPDQ0NyV6vV/7Vr34ly7Isv/feezIA+W9/+xvf5o9//KMsSZJ86NCh0u18BXHhhRfK//Zv/6Z57J/+6Z/kFStWyLJMn3Ox0IubYn2uP/7xj+Xm5mbNdeOmm26S58yZU9D+UlqqCESjUWzevBlLly7ljzkcDixduhQbNmwo455VNsPDwwCAlpYWAMDmzZsRi8U0n/PcuXMxdepU/jlv2LABCxYsQEdHB99m2bJlCAQCePfdd8dx7+3PtddeiwsvvFDzeQL0OReL5557Dqeddho+9alPob29Haeccgoeeugh/vM9e/agp6dH8zk3NjZi8eLFms+5qakJp512Gt9m6dKlcDgc2Lhx4/i9GRtz5plnYt26dXj//fcBAG+99RZeeeUVnH/++QDocy4VxfpcN2zYgI9+9KPweDx8m2XLlmHHjh0YHBzMe/8m5ODMYnPs2DEkEgnNhR4AOjo6sH379jLtVWWTTCZx/fXX46yzzsKJJ54IAOjp6YHH40FTU5Nm246ODvT09PBtjP4O7GdEiieffBJvvPEG/va3v6X9jD7n4rB792785Cc/werVq/G1r30Nf/vb3/ClL30JHo8Hl19+Of+cjD5H8XNub2/X/NzlcqGlpYU+Z4WvfvWrCAQCmDt3LpxOJxKJBL797W9jxYoVAECfc4ko1ufa09ODGTNmpD0H+1lzc3Ne+0fihrAl1157LbZu3YpXXnml3LtSdRw4cADXXXcd1q5dC5/PV+7dqVqSySROO+00fOc73wEAnHLKKdi6dSsefPBBXH755WXeu+rhf/7nf/D444/jiSeewAknnIAtW7bg+uuvR1dXF33OExhKSxWBtrY2OJ3OtGqS3t5edHZ2lmmvKpdVq1bhd7/7Hf785z9jypQp/PHOzk5Eo1EMDQ1pthc/587OTsO/A/sZkUo79fX14dRTT4XL5YLL5cJLL72E+++/Hy6XCx0dHfQ5F4HJkydj/vz5msfmzZuH/fv3A1A/p0zXjc7OTvT19Wl+Ho/HMTAwQJ+zwg033ICvfvWruPTSS7FgwQJ87nOfw5e//GXcddddAOhzLhXF+lxLdS0hcVMEPB4PFi1ahHXr1vHHkskk1q1bhyVLlpRxzyoLWZaxatUqPPPMM3jxxRfTQpWLFi2C2+3WfM47duzA/v37+ee8ZMkSvPPOO5oTau3atWhoaEi70UxUzj33XLzzzjvYsmUL/zrttNOwYsUK/j19zoVz1llnpbUyeP/99zFt2jQAwIwZM9DZ2an5nAOBADZu3Kj5nIeGhrB582a+zYsvvohkMonFixePw7uwP6FQCA6H9lbmdDqRTCYB0OdcKor1uS5ZsgQvv/wyYrEY32bt2rWYM2dO3ikpAFQKXiyefPJJ2ev1yr/4xS/k9957T77qqqvkpqYmTTUJkZlrrrlGbmxslNevXy8fOXKEf4VCIb7N1VdfLU+dOlV+8cUX5ddff11esmSJvGTJEv5zVqJ83nnnyVu2bJHXrFkjT5o0iUqUsyBWS8kyfc7FYNOmTbLL5ZK//e1vyzt37pQff/xx2e/3y7/85S/5Nnfffbfc1NQk/+Y3v5Hffvtt+aKLLjIspT3llFPkjRs3yq+88op8/PHHT/gSZZHLL79cPu6443gp+NNPPy23tbXJN954I9+GPuf8GBkZkd988035zTfflAHI9957r/zmm2/K+/btk2W5OJ/r0NCQ3NHRIX/uc5+Tt27dKj/55JOy3++nUnA78cMf/lCeOnWq7PF45NNPP11+7bXXyr1LFQUAw6+f//znfJuxsTH5i1/8otzc3Cz7/X75H//xH+UjR45onmfv3r3y+eefL9fU1MhtbW3yV77yFTkWi43zu6ks9OKGPufi8Nvf/lY+8cQTZa/XK8+dO1f+6U9/qvl5MpmUb731Vrmjo0P2er3yueeeK+/YsUOzTX9/v/yZz3xGrqurkxsaGuSVK1fKIyMj4/k2bE0gEJCvu+46eerUqbLP55Nnzpwpf/3rX9eUFtPnnB9//vOfDa/Jl19+uSzLxftc33rrLfnss8+WvV6vfNxxx8l33313wfsuybLQxpEgCIIgCKLCIc8NQRAEQRBVBYkbgiAIgiCqChI3BEEQBEFUFSRuCIIgCIKoKkjcEARBEARRVZC4IQiCIAiiqiBxQxAEQRBEVUHihiAIgiCIqoLEDUEQBEEQVQWJG4IgCIIgqgoSNwRBEARBVBUkbgiCIAiCqCr+fyNSdcR53dCfAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.plot(ugos[0:1000,213,200])" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "58fe1617-790d-4260-92de-417258ee37a8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(11322, 256, 256) (11322, 256, 256)\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAyQAAAKqCAYAAADLx5oYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOy9ebwlRXk+/lRVn3PvnRkG2YYBZBMVwhYEERQVXACJoqKIawBRNAaJikti1AAaJd/407igRoOICxiDu0ZFENyCqBhQgaiIIIoKCrLO3HvO6arfH1Vv1VvV1X363Lkzc+/Qz+czc8/p00t1dXXV+7yrMMYYdOjQoUOHDh06dOjQocNGgNzYDejQoUOHDh06dOjQocP9Fx0h6dChQ4cOHTp06NChw0ZDR0g6dOjQoUOHDh06dOiw0dARkg4dOnTo0KFDhw4dOmw0dISkQ4cOHTp06NChQ4cOGw0dIenQoUOHDh06dOjQocNGQ0dIOnTo0KFDhw4dOnTosNHQEZIOHTp06NChQ4cOHTpsNHSEpEOHDh06dOjQoUOHDhsNHSHpsGjxta99Dfvttx+mp6chhMCdd965sZvUYZ4477zzIITAlVdeOXbfww47DIcddtj6b9Q8QPdx0003rZfzn3HGGRBCrJdz5/Bf//Vf2HLLLXHvvfdusGuuC26//XYsX74cX/nKVzZaG9b3GOjQoUOH+yM6QtKhNX7605/i2GOPxc4774zp6WnssMMOOPzww/He9753wa91++2347jjjsPMzAze97734eMf/ziWL1++4NdZV2it8bGPfQyHH344tt56a/R6PaxatQpHHHEEPvShD2Fubm5jNxFve9vb8PnPf35jN2OD4rbbbkNRFHjBC15Qu88999yDmZkZPOMZz9iALZsc6+v5lWWJ008/HaeeeipWrFix4OdfH9hqq63w4he/GG9605sW9LyHHXYYhBBj/51xxhkLet2Fwr333ovTTz8de++9N5YvX46tttoK++23H17xilfgd7/7nd/vK1/5yka/h1122SXbt3/zN39T2ffOO+/ES17yEmyzzTZYvnw5Hve4x+F///d/s+f94he/iP333x/T09PYaaedcPrpp2M0Gq3v2+nQocMCQRhjzMZuRIfFj8svvxyPe9zjsNNOO+GEE07A6tWr8Zvf/AZXXHEFbrjhBvzyl79c0Ot97Wtfw1FHHYWLL74YT3ziExf03AuFtWvX4phjjsFFF12ERz3qUTj66KOx7bbb4o477sC3vvUtfOUrX8EJJ5yAD3/4wxu1nStWrMCxxx6L8847b6O14bzzzsMLX/hC/PCHP8TDH/7wxn0HgwEAoN/vr9M1jzrqKHz3u9/FrbfeimXLllV+/+hHP4oTTzwRn/nMZ1qTErqPG2+8Ebvssss6tS+H0WiE0WiE6elpv219Pb/Pf/7zeMYznoHf/OY32GGHHRb03OsT//d//4c999wT3/jGN/D4xz9+Qc558cUX49Zbb/Xff/jDH+I973kP/vEf/xF/8Rd/4bfvu+++2GuvvTAcDjE1NbVBrVl1GA6HOOigg/Czn/0MJ5xwAvbbbz/ce++9uPbaa/GlL30JF154obc4vvzlL8f73vc+bMxlf5dddsEWW2yBV7/61dH2hz70oXjEIx7hv2ut8ZjHPAY//vGP8drXvhZbb7013v/+9+M3v/kNfvSjH+EhD3mI3/erX/0qnvzkJ+Owww7Dc5/7XPz0pz/F+973PrzkJS/BBz7wgQ12bx06dJg/io3dgA5LA29961ux+eab44c//CEe8IAHRL/ddtttC3ad++67D8uXL/fnTK+1mPCqV70KF110Ed71rnfhFa94RfTbq1/9alx//fW4+OKLN1Lr5gfq/42JdSUihOc///n42te+hi9+8Yt4znOeU/n9ggsuwOabb44nP/nJC3K9hUBRFCiKDTMtf+QjH8EhhxyypMgIAPzFX/wF9t57b5x33nkLRkgOP/zw6Pv09DTe85734PDDD8+6DyqlFuS6C4HPf/7zuOqqq3D++efjec97XvTb7OysJ/iLCTvssEOj9RIAPv3pT+Pyyy/HhRdeiGOPPRYAcNxxx+GhD30oTj/9dFxwwQV+39e85jXYd9998fWvf92/PytXrsTb3vY2vOIVr8Aee+yx/m6mQ4cOC4LOZatDK9xwww3Ya6+9sgRh1apV/vNNN90EIURWm5u6PJC//HXXXYfnPe952GKLLfDoRz8ahx12GE444QQAwIEHHgghBE488UQAwHe+8x0861nPwk477YSpqSnsuOOOeNWrXoW1a9dWrvezn/0Mxx13HLbZZhvMzMxg9913xxve8IZon1tuuQUnnXQStt12W0xNTWGvvfbCueeeO7Y/fvOb3+Ccc87Bk570pAoZITzkIQ/B3/7t30bbtNZ417vehb322gvT09PYdttt8dKXvhR//vOfK8e///3vx1577YWpqSlsv/32OOWUUypxNNdffz2e+cxnYvXq1ZiensYDH/hAPOc5z8Fdd90FwPb5fffdh49+9KPeNYL6sq7/Aaupf8tb3oLddtsNU1NT2GWXXfCP//iPFRe0XXbZBU95ylPw9a9/3cf77LnnnvjsZz+b7ZO5uTmcdtpp3gXjmGOOwR//+Mdon1wMyezsLM444ww89KEPxfT0NLbbbjs84xnPwA033JC9DgAcc8wxWL58eSS4EG677TZ84xvfwLHHHoupqSkAwPe//3086UlPwuabb45ly5bh0EMPxf/8z//Unp+jzbOia/zVX/0VtthiCyxfvhz77rsv3v3ud/vf0xiSuud32WWXQQiBz33uc5VrXHDBBRBC4Hvf+15te2dnZ/G1r32tYn2c5P0FgG9+85t4+MMfjunpaey222744Ac/mI2DaTuerrzyShx55JHYeuutMTMzg1133RUnnXRSpS2HH344vvSlL20UTX8uhoTeA+qPmZkZ7LPPPvjmN78JAPjsZz+LffbZB9PT0zjggANw1VVXVc77s5/9DMceeyy23HJLTE9P4+EPfzi++MUvjm0PvQOHHHJI5bfp6WmsXLkSAHDiiSfife97HwBErlKEtnPTpO98HQaDAe67777a3z/96U9j2223jayX22yzDY477jh84Qtf8GPnuuuuw3XXXYeXvOQlEZn/27/9Wxhj8OlPf3qidnXo0GHjoCMkHVph5513xo9+9CNcc801C37uZz3rWVizZg3e9ra34eSTT8Yb3vAGvOQlLwEAvPnNb8bHP/5xvPSlLwUAXHjhhVizZg1e9rKX4b3vfS+OPPJIvPe978Xxxx8fnfMnP/kJDjroIFx66aU4+eST8e53vxtPf/rT8aUvfcnvc+utt+Lggw/GJZdcgpe//OV497vfjQc/+MF40YtehHe9612Nbf7qV7+KsizHavlSvPSlL8VrX/taHHLIIXj3u9+NF77whTj//PNx5JFHYjgc+v3OOOMMnHLKKdh+++3xjne8A8985jPxwQ9+EEcccYTfbzAY4Mgjj8QVV1yBU0891bso/OpXv/LC8Mc//nFMTU3hMY95DD7+8Y9HfVnX/wDw4he/GP/0T/+E/fffH//2b/+GQw89FGeddVbW0nD99dfj2c9+No466iicddZZKIoCz3rWs7LWoVNPPRU//vGPcfrpp+NlL3sZvvSlL+HlL395Y5+VZYmnPOUpOPPMM3HAAQfgHe94B17xilfgrrvuahyPy5cvx9Oe9jRcdNFFuOOOO6LfPvWpT6EsSzz/+c8HAFx66aV47GMfi7vvvhunn3463va2t+HOO+/E4x//ePzgBz9obF+bZwVYt6DHPvaxuO666/CKV7wC73jHO/C4xz0OX/7yl2vPXff8DjvsMOy44444//zzK8ecf/752G233fDIRz6y9rw/+tGPMBgMsP/++zfeWxOuuuoqPOlJT8Ltt9+OM888Ey960Yvw5je/ORvv0mY83XbbbTjiiCNw00034R/+4R/w3ve+F89//vNxxRVXVM53wAEH4M4778S111477/YvNH75y1/iec97Ho4++micddZZ+POf/4yjjz4a559/Pl71qlfhBS94Ac4880zccMMNOO6446C19sdee+21OPjgg/F///d/+Id/+Ae84x3vwPLly/H0pz89Szo5dt55ZwDAxz72sUaC9tKXvtRbgmgsffzjH49+bzM3AZO98zlceumlWLZsGVasWIFddtklIuWEq666Cvvvvz+kjMWURzziEVizZg1+8Ytf+P0AVFxBt99+ezzwgQ/Mkr8OHTosQpgOHVrg61//ulFKGaWUeeQjH2le97rXmYsuusgMBoNovxtvvNEAMB/5yEcq5wBgTj/9dP/99NNPNwDMc5/73Mq+H/nIRwwA88Mf/jDavmbNmsq+Z511lhFCmF//+td+22Mf+1iz2WabRduMMUZr7T+/6EUvMtttt53505/+FO3znOc8x2y++ebZaxFe9apXGQDm6quvjrbPzc2ZP/7xj/4fP/d3vvMdA8Ccf/750TFf+9rXou233Xab6ff75ogjjjBlWfr9zj77bAPAnHvuucYYY6666ioDwFx44YW17TTGmOXLl5sTTjihsr2u/6+++moDwLz4xS+Otr/mNa8xAMyll17qt+28884GgPnMZz7jt911111mu+22Mw972MP8NnqeT3ziE6Nn8KpXvcoopcydd97ptx166KHm0EMP9d/PPfdcA8C8853vrNwDP1cO//3f/20AmA9+8IPR9oMPPtjssMMOpixLo7U2D3nIQ8yRRx4ZnW/NmjVm1113NYcffnjlPm688UZjTPtnNRqNzK677mp23nln8+c//7n2HuiZcNQ9v9e//vVmamoq6rvbbrvNFEURvWc5nHPOOQaA+elPfxptn+T9Pfroo82yZcvMLbfc4rddf/31piiK6B7ajqfPfe5z2Xc+h8svv9wAMJ/61KfG7jsfXHjhhQaAueyyyyq/pWPAmPAeXH755X7bRRddZACYmZmZaB764Ac/WDn3E57wBLPPPvuY2dlZv01rbR71qEeZhzzkIY1tXbNmjdl9990NALPzzjubE0880Xz4wx82t956a2XfU045pTK+jGk/N/F7HffO1+Hoo482/+///T/z+c9/3nz4wx82j3nMYwwA87rXvS7ab/ny5eakk06qHE/v9Ne+9jVjjDFvf/vbDQBz8803V/Y98MADzcEHHzy2TR06dNj46CwkHVrh8MMPx/e+9z089alPxY9//GP867/+K4488kjssMMOrdwKmpDLrlKHmZkZ//m+++7Dn/70JzzqUY+CMcZrwv74xz/i29/+Nk466STstNNO0fHkomCMwWc+8xkcffTRMMbgT3/6k/935JFH4q677qrN5gIAd999NwBUshN95StfwTbbbOP/kfYSsNadzTffHIcffnh0vQMOOAArVqzAZZddBgC45JJLMBgM8MpXvjLSDp588slYuXIl/vu//xsAsPnmmwMALrroIqxZs6Z1H6ZI+59Sqp522mnRdgpCpesTtt9+exxzzDH++8qVK3H88cfjqquuwh/+8Ido35e85CWRm8hjHvMYlGWJX//617Xt+8xnPoOtt94ap556auW3cUHFRxxxBLbZZpvIbevGG2/EFVdcgec+97mQUuLqq6/G9ddfj+c973m4/fbb/XO577778IQnPAHf/va3I202R9tnddVVV+HGG2/EK1/5yorb43wDo48//njMzc1FLimf+tSnMBqNxlrubr/9dgDAFltsMa9rl2WJSy65BE9/+tOx/fbb++0PfvCDcdRRR0X7th1P1C9f/vKXKxr5FNTuP/3pT/Nq//rAnnvuGVmlDjroIADA4x//+Ggeou2/+tWvAAB33HEHLr30Uhx33HG45557/Pi7/fbbceSRR+L666/HLbfcUnvdmZkZfP/738drX/taANal7EUvehG22247nHrqqa0y/bWdmwiTvPMpvvjFL+J1r3sdnva0p+Gkk07Ct771LRx55JF45zvfid/+9rd+v7Vr13p3Sg5K+EBuuvS3bt+cO2+HDh0WHzpC0qE1DjzwQHz2s5/Fn//8Z/zgBz/A61//etxzzz049thjcd111837vLvuumvrfW+++WaceOKJ2HLLLbFixQpss802OPTQQwHAx03QQr/33nvXnuePf/wj7rzzTnzoQx+KCMQ222yDF77whQCag/U322wzAKjUbzjkkENw8cUX4+KLL8YRRxwR/Xb99dfjrrvuwqpVqyrXvPfee/31SDjffffdo+P7/T4e9KAH+d933XVXnHbaaTjnnHOw9dZb48gjj8T73vc+3w9tkfb/r3/9a0gp8eAHPzjavnr1ajzgAQ+okIcHP/jBFaH6oQ99KABUajWkBJEEy1wMDeGGG27A7rvvPq9g76Io8OxnPxvf+c53vFBH5ITcta6//noAwAknnFB5Lueccw7m5uZq+7TtsyI//6YxOSn22GMPHHjggZHb1vnnn4+DDz648uzqYOYZg3Hbbbdh7dq12euk29qOp0MPPRTPfOYzceaZZ2LrrbfG0572NHzkIx/JCtTU7iYyV5Yl/vCHP0T/1meAdzq2SWGw4447ZrfTmP/lL38JYwze9KY3Vcbf6aefDmB84pDNN98c//qv/4qbbroJN910Ez784Q9j9913x9lnn423vOUtY9vedm4iTPLOj4MQAq961aswGo18zA1giVbu2c/Ozvrf+d+6fbkSq0OHDosXXZatDhOj3+/jwAMPxIEHHoiHPvSheOELX4gLL7wQp59+eq2AUJZl7fnaLhhlWeLwww/HHXfcgb//+7/HHnvsgeXLl+OWW27BiSeeWKvFzoH2fcELXuAD6FPsu+++tcdT1pZrrrkGf/mXf+m3b7PNNj5Q+BOf+ETlmqtWrcr6/dOxk+Id73gHTjzxRHzhC1/A17/+dfzd3/0dzjrrLFxxxRV44AMf2Oocdf2/PlKa1mUnmq9g3AYveMELcPbZZ+OTn/wkXvOa1+CTn/wk9txzT+y3334Awlh4+9vf7relWKx1Oo4//ni84hWvwG9/+1vMzc3hiiuuwNlnnz32uK222gqAFYr5OJnP+9sW48aTEAKf/vSnccUVV+BLX/oSLrroIpx00kl4xzvegSuuuCJ6BiTMb7311rXn+81vflMh25dddtl6K7pZN7bHjXkaf695zWtw5JFHZvdtSzABG1Ny0kkn4ZhjjsGDHvQgnH/++fjnf/7nxmPWx9w0CYi08Viv7bbbDr///e8r+9I2ssxtt912fntK/n7/+99HqYQ7dOiweNERkg7rBAokpEWCNN5phqEml5y2+OlPf4pf/OIX+OhHPxoFsaeBlA960IMAoDHgeZtttsFmm22GsiznVefkqKOOglIK559/vte0j8Nuu+2GSy65BIccckgjCSM3r5///Of+XgAbxH7jjTdW2rvPPvtgn332wRvf+EZcfvnlOOSQQ/Dv//7vXgiZlFjsvPPO0Frj+uuvj2ow3HrrrbjzzjsjNzQgaHj5dSjgdCFqdey22274/ve/j+FwiF6vN/HxBx10EHbbbTdccMEFOPzww3HttdfirW99a3R+wLqdTDoW2j4rusY111wz8TWant9znvMcnHbaafjkJz+JtWvXotfr4dnPfvbYcxKhvvHGG7HPPvv47W3f31WrVmF6ejpbfyjdNul4Ovjgg3HwwQfjrW99Ky644AI8//nPx3/+53/ixS9+sd/nxhtvBIDofClWr15dmRu48mCxgMZNr9db0JpLW2yxBXbbbbdoHqwbS23nJsJCv/Nk1ebEZ7/99sN3vvMdaK0jd8jvf//7WLZsmbfIkBLhyiuvjMjH7373O/z2t7/1CVI6dOiwuNG5bHVohcsuuyyrxSb/cHJZWblyJbbeemt8+9vfjvZ7//vfv85tIE0jb4cxppKhZZtttsFjH/tYnHvuubj55puj3+hYpRSe+cxn4jOf+UyWuKSpaFPstNNOOOmkk/DVr361ViOd9tdxxx2HsiyzLhSj0cgLgU984hPR7/fxnve8JzrHhz/8Ydx1112+bsbdd99dqUS8zz77QEoZuS8sX748m4K2Dn/1V38FAJVMY+985zsBoFK343e/+12UCejuu+/Gxz72Mey3335YvXp16+vW4ZnPfCb+9Kc/Zfu5rWXl+c9/Pq666ipvxeP1Gg444ADstttu+P/+v/+v4oIHNI+Fts9q//33x6677op3vetdlWcx7h6ant/WW2+No446Cp/4xCdw/vnn40lPelKj1YBwwAEHoN/v48orr4y2t31/lVJ44hOfiM9//vNRJfBf/vKX+OpXvxrt23Y8/fnPf670BQmbqTvOj370I2y++ebYa6+9au9xenoaT3ziE6N/842ZWZ9YtWoVDjvsMHzwgx/MWgTGzUU//vGPs7E0v/71r3HddddF7oRUYygdT23nJsJ83/k77rijYm0bDof4l3/5F/T7fTzucY/z24899ljceuutUTrhP/3pT7jwwgtx9NFH+5iRvfbaC3vssQc+9KEPRef+wAc+ACGEr2HSoUOHxY3OQtKhFU499VSsWbMGxxxzDPbYYw8MBgNcfvnl+NSnPoVddtnFx10ANsXnv/zLv+DFL34xHv7wh+Pb3/62156tC/bYYw/stttueM1rXoNbbrkFK1euxGc+85ls/MF73vMePPrRj8b++++Pl7zkJdh1111x00034b//+79x9dVXAwD+5V/+BZdddhkOOuggnHzyydhzzz1xxx134H//939xySWXVFLFpnjXu96FG2+8Eaeeeir+8z//E0cffTRWrVqFP/3pT/if//kffOlLX4qEgUMPPRQvfelLcdZZZ+Hqq6/GEUccgV6vh+uvvx4XXngh3v3ud+PYY4/FNttsg9e//vU488wz8aQnPQlPfepT8fOf/xzvf//7ceCBB/qA5UsvvRQvf/nL8axnPQsPfehDMRqN8PGPf9yTLcIBBxyASy65BO985zux/fbbY9ddd/WBtTn85V/+JU444QR86EMfwp133olDDz0UP/jBD/DRj34UT3/60yOhAbC+4y960Yvwwx/+ENtuuy3OPfdc3HrrrfjIRz4y9pm2wfHHH4+PfexjOO200/CDH/wAj3nMY3Dffffhkksuwd/+7d/iaU972thzvOAFL8Cb3/xmfOELX8AhhxwSaXGllDjnnHNw1FFHYa+99sILX/hC7LDDDrjllltw2WWXYeXKlVG6aI62z0pKiQ984AM4+uijsd9+++GFL3whtttuO/zsZz/Dtddei4suuqi27eOe3/HHH++FrjbxAoAV1o844ghccsklePOb3xz91vb9PeOMM/D1r38dhxxyCF72spehLEucffbZ2Hvvvf07BrQfTx/96Efx/ve/H8cccwx222033HPPPfiP//gPrFy50pMawsUXX4yjjz56UVRKXwi8733vw6Mf/Wjss88+OPnkk/GgBz0It956K773ve/ht7/9LX784x/XHnvxxRfj9NNPx1Of+lQcfPDBWLFiBX71q1/h3HPPxdzcXFQ75oADDgAA/N3f/R2OPPJIKKXwnOc8p/XcRJjvO//FL34R//zP/4xjjz0Wu+66K+644w5ccMEFuOaaa/C2t70tIjPHHnssDj74YLzwhS/Edddd5yu1l2WJM888Mzrv29/+djz1qU/FEUccgec85zm45pprcPbZZ+PFL35xoxWtQ4cOiwgbNqlXh6WKr371q+akk04ye+yxh1mxYoXp9/vmwQ9+sDn11FMr6SXXrFljXvSiF5nNN9/cbLbZZua4444zt912W23a3z/+8Y+V69Wl/b3uuuvME5/4RLNixQqz9dZbm5NPPtn8+Mc/zqYqveaaa8wxxxxjHvCAB5jp6Wmz++67mze96U3RPrfeeqs55ZRTzI477mh6vZ5ZvXq1ecITnmA+9KEPteqX0WhkPvKRj5jHP/7xZssttzRFUZitt97aPOEJTzD//u//btauXVs55kMf+pA54IADzMzMjNlss83MPvvsY173uteZ3/3ud9F+Z599ttljjz1Mr9cz2267rXnZy14WpYz91a9+ZU466SSz2267menpabPllluaxz3uceaSSy6JzvOzn/3MPPaxjzUzMzMGgE8h29T/w+HQnHnmmWbXXXc1vV7P7Ljjjub1r399lJbUGJsC9MlPfrK56KKLzL777mumpqbMHnvsUUlFXPc8L7vsskoK1DTtrzF2TL3hDW/w7Vm9erU59thjzQ033FBpex0OPPBAA8C8//3vz/5+1VVXmWc84xlmq622MlNTU2bnnXc2xx13nPnGN75RuQ+e8tWY8c+K8N3vftccfvjhZrPNNjPLly83++67r3nve9/rf8+l/a17foS5uTmzxRZbmM033zw73urw2c9+1gghKulS276/xhjzjW98wzzsYQ8z/X7f7Lbbbuacc84xr371q8309HS0X5vx9L//+7/muc99rtlpp53M1NSUWbVqlXnKU55irrzyyuhc//d//2cAVMb5QmI+aX+f/OQnV/YFYE455ZRoG6VWfvvb3x5tv+GGG8zxxx9vVq9ebXq9ntlhhx3MU57yFPPpT3+6sa2/+tWvzD/90z+Zgw8+2KxatcoURWG22WYb8+QnPzlK0W2Mna9OPfVUs8022xghRGWstZmb2r7zOVx55ZXm6KOPNjvssIPp9/tmxYoV5tGPfrT5r//6r+z+d9xxh3nRi15kttpqK7Ns2TJz6KGH1qaF/tznPmf2228/MzU1ZR74wAeaN77xjZW09B06dFi8EMZshFK3HTp02CSwyy67YO+9924s7tdh/WI0GmH77bfH0UcfjQ9/+MOtjyvLEnvuuSeOO+641paVNnj605+Oa6+91mcvW2i88pWvxLe//W386Ec/2mQsJEsJ3TvfoUOH9YEuhqRDhw4dljA+//nP449//GOU6KENlFJ485vfjPe9733Z2Jk2SGs8XH/99fjKV76y3jJZ3X777TjnnHPwz//8zx0Z6dChQ4dNCF0MSYcOHTosQXz/+9/HT37yE7zlLW/Bwx72MF+PZxI8+9nPbpWVqw4PetCDcOKJJ/qaKx/4wAfQ7/fxute9bt7nbMJWW201b/LUoUOHDh0WLzpC0qFDhw5LEB/4wAfwiU98Avvttx/OO++8jdKGJz3pSfjkJz+JP/zhD5iamsIjH/lIvO1tb8NDHvKQjdKeDh06dOiwNNHFkHTo0KFDhw4dOnTo0GGjoYsh6dChQ4cOHTp06NChw0ZDR0g6dOjQoUOHDh06dOiw0bAkY0i01vjd736HzTbbrMu00qFDhw4dOnTosAhgjME999yD7bffHlIuPp337OwsBoPBRrl2v9/H9PT0Rrn2UsCSJCS/+93vsOOOO27sZnTo0KFDhw4dOnRI8Jvf/AYPfOADN3YzIszOzmKrmRVYg3KjXH/16tW48cYbO1JSgyVJSDbbbDMAdsCvXLlyI7emw2LC2Zf/Ckpaq5kUAtJZ0JQEJASUEHA/+/3IylaX36HU8XY6jh+rmKVOCoAfUhoDYwxKbaCN/a5h3LkBbYxvJ7UVCO2lc7ZB0tToOH6/6XmlrG5TGetj6frI34e7p/Q+c21JUfcc6q6dQ9mQk4M/z7RN6TOwv1XPlduWQkbPXkBJoCckpLD31ncPtKdkbd/y+6jr2/SeCMPSVPpBCYG/PmDhlDaX33g7htpeR5vQLu23AUPXkbkxKN2N92T1fWkDPu6i7zoej+n4S8dY5bzJO0nvIz33UpvK+0/InVNJO+f0lICEQE8KFO75TxUSSthtUtp5SLJ3sXqv8T3rMS+UlMk7Hb3nAoptB+C/16FMLuf7xITPQx2eOY0F3mZ+P/aa8UXHzWvR+1rzrOueHz23wSg8S8LIfS5oPEr73tq/wq8dNBf3hPTtzc3/KZrmzqZ96vpj3FyaO386V6TjOD1nNAeBz5259uTXLAAY3Hcf/v6pj/Ry2mLCYDDAGpR4PnZAfwNHLAygcf4fbsFgMOgISQ2WJCGhSWDlypUdIekQYflmK+NFmIR7kQhEbOaXEwpHKfkAgjDAf0+FpqEOgs6wNNE+ufPnyMQ41AlP6TmCUBKuNY6UpGSE7s0LBBkyQvvVCSKcPOb24/s2LqANxIH3SSTgeKEzf46mZxO3L2zrqTDOvHDqBNAmcpkKcKnw1QQuPPt2SrGgc+OWW4wwLI0dwzqMXW2MFVC1/S1tP4ETEt8PbVk2kFwTlevQtYelriXlfDsJ0qUjc6U2GIw0oJJ3SNlASxUJru5fMs/QswYs+VRCoFDC33NPEVmRUIyMSDlegNW6/rcU0fzUUubi5296r4ik5MZAur0JcRur956ehxPe9FmHuQeuLfZ5Srcf/c0REkLhnikA9ArbafSM+fPNKbRypDJF27mKzjcJmpQbTdfMrRWTzrFpW5UQmHODbjG70/chNzgh6TAeS5KQdOjQBG4Voe/jBMkmNBGQJiGTzq+FQGkMegC0oR3t4ioz2ifb9nkSpmSO5YsOfVZSZBYXZ9ZxwhFpl1NCwResJjLSJJCMW9Dr5FRueUq1yOm9toEUuX6Iz89RJVXxOOP7EBnJjbtcM2Phs6plzrWF+iOMwXpN9LpiulCAc3PQ7tQ9CAxLA2mM32bvQ/hxqLWpvCv5+6lq5DmkFNDauHMYf99ck1v3LHMkdHakPQmZG9mXcOD+8veE/k5xITUlGBnyyUlIT0q/DbCklQRd0tAL0c4KysfEpAn7affwnlbJQ+69Cr/Vn5ueX/ycqogVNw2N9SwytInOnWt7SkZ8mzNkJAdOUGgc9AsJSLqGRAkDKAAQ0bNqmrvSy+aUBylKVH9LFVJ8TUjn6BxB4ftTG9JzltpkxmBV0ZbDfJRnGxMK4y2EC37NCd/X+yM6QtJhyeOd37nBCwkqYxXJabXqULuQZohIblHiC27QEtsFgYiJ1gY9Ke1CoeqFYn4PbVDRZtaY6LWTLKSwi1/ok0BK/HePqmaak5H0GusC7mqTa/9CkBHlnoUUApDUMe36OkdEZINQ4ttoqoLaOCLC7zUdzzL1DcT4NuRw8+33gpolBbDDFisq+wgAPSlhibRgGnUNaUhQ1IykuKaxPvJkhQQudw4pRVbY5QIDCbuAtWpUbr0BvA+94Krr/mkoZlaILCJSoF9I9KSEksCUklDSEo7CkQwiLtNKOqsYHCGh/rPaeCIgwgm3InC4Rs0yd8PRflt1v4i4IIwrY8IcqGsYBnfNqvzGhf2KC1L8nJrAn21OWG4Leo+j80g7H/C5XknRao7gJLQN2rz3hBwZaTOGpcjMb8kaB+TnF6BqsU/XlDqCAljBfVy/pcqz4SK2jHRY3OgISYclD/LRT03rdpvdJ3UN4mij+eEuN6TpJIGj7rx0cOR7rQ1KyYR5bYW0VNieBE3m+Nx5+f5SCAydRpP8o6W296pFncCSCnn110qRrnl1hKvJitB2Yc9brfi3IOBbcsj6poGctCG8qXWELAU5dybe/iZ3C20skeXudUqFa7dx78qhNAYCgITAsOZ4Iaxmv5DKbzMASi29ZnpY2s+zQkMbExETr8nm7j307jhBqkl7zr9rEd4rrjXP3RftB4RYm1IbrB2UmBtpDEYag5G1/ATLCAmzEv1ConBEREmB6UJiWU9BCuH+Ast6Cj0l0ZPCWz/6SnrSETTX1JeBhPB5SsCOG04OgUAz+V0SueD3ZxDGkTE2DsAYa5W1MRc8/iK2EEbvs2kmHin4edpqneveezoXt5ipjCVTChFZErxywA0qpew9c2E5J1hPqsiou4dUYTUu3meS+Z73Q+UEiVKkjpRUjm+xndBr4dnE59VebuJdZFBivJvdgl8TAhnjVweGjpB02CTA40VyZGRSIsKP54JSTFCaYx+qcKp/7T471xatg8aXa/w4JtEcpots3qJgF/EgSDhSVGMhyWmkJyUjHOtq1m8bON8ELshzi4k2BkrmgzlzZKQtmogI0Owu09T+qH3zXGQle9Y33X6PF3ZLbTBk/RCd3mnbFQRQ6uDKpSSGpUbPe3kFEtIE7t4VtS0RSImESCa00n6QALSpHRdkCUljCDisNcQSEIot6CvpXLKkt4jYIHVg2llNeioQESKpFdIhbF8HC0m4pp+v3P68r8k2RbDkwto+hHMDNcYSAm0MjLDbSxhI2t91T2mS58j7x6TfN6wE1Wbs07uq/KQUCKqSqFcmzDNkYF0F18glTpvGuTJVFqXn4a6zZDlpmkvHuZBNgqYpO1ojlwAh6bA40RGSDkse5MvNtfxAPKFPsqjw+TRnGYmu5124Gs5ngtaM/O3JzUWaEGMSVgtREU4n1eTl3JtSATv+rp2GjRY7A52Y4nMuUmmzUkJV1++0kLYRQNpcty543rejYZEktwQf82MyFhNUBQQuaDZZ4HKocz+bJKMTtSGnlZ1EKJgpgqQ21FZ4LWGD1I2JA8mBIEQLAa/xsxnErPueKg160lrepLOWKCfpDuchEWWDaaVAWToBTcLFZ9GPiARVOkdwtQuxG2T5IFC8iJICM33lycaynkKhBKaVdBYR4S0jhbRuWBJA4bJrKRlbPDj5AMI4YV5GLGMf7RMfw2F1B8JbRYwx0ACzgtjP3nXOdoKde5iihliOFiTwBlIyKUHm9zXuuMYkFJl327Y5EE47Hwk/5npgLlmqfn7IXb8pyx7dk8qQmXh/9w4mjY+TZwTLeP4cTefPgWlUgIhwjXM9rkPTvNE2K9h0rrMWGazlcgNfE+gsJGPQEZIOSx4pGZmP9jqci31OLCOSCZ0kCDa6bHnQghEEgJy1RKO6aE7qb5xDurBl3R80AGm82d/HkzikgZZNbZlPv49bfOv6oY0Gt06Yj1xNKD5BM+Gn7nwt/Mbn0wd1ZKvOEsKvoUS8Y91ie9e9ayzRQLhD4iMGVhgkUsrJCA/05sdSEwQAIQWs85f2YTmA9PElpbHuWQsRZ2TvkRhREFR9y/z3WMDlsQRF0rFEVGb6Cn0lA+mQEjM9iZ4jI8t6lqRMFeSeFWLXCmfpUNz1CnFcSO55CvabAACjAa3tXwDCaBgRBD0hJJSQgJAwgki0PVHpWIoRgC6DZUYIAWkMBASMe1KTojT1Y2vS7FB15yfkLIpEQDV7T4PLZT0ZiggLbH/Z6wXFg4LIWkXbtTs/qFMykvttEtAx3D2LrCVt0FZR0SY9c2Uft9Ows5B0mCcmorJnnXUWDjzwQGy22WZYtWoVnv70p+PnP/95tM9hhx0GIUT072/+5m+ifW6++WY8+clPxrJly7Bq1Sq89rWvxWg0Wve76XC/BPlu95xbBRfWpIj/tUUavJ76gU+COs36JOdaN1N7SF1JMTb0r6fC9rR92lSvm9vWrg3V/q/z6c79q7s2zyyTPnMgn52G16dJvwdLR/D/T//xe1oIEPGk+ytdTAb98wrQOuuPACjDU09aIXpFP69r0u48/EwGwEjDp/UdlNoLWAJhX20M1gxKzI5KrB3ZGIxhGdrtDRTOtWm6kN6iMFUoTCu7bcr9o/bmEkIQMcr9A5p99EOq1pARy9eHkRJ9JbFiusBMX2Gz6QIPWNbDViv62GbFFLZe0ceWMz1sMdPDVsv62GpZH1vMFNh6WR9bL+thy5keNusXWN6XWNaz/2YKiZmewEwhMa0EppTElBKYdv+mlEBfwv8rRPinkn/COBJiYjJCf+kf34e4aKNLDcsMZUmPJUy+5oaM5yVuGebPJPds/NgyJvrX5lnWnsuTyRqrhbQKocLFUNFYon/2Gdh/5GY3XbhYH0c2e4ofIyvzIf9n+40TnjBXpfdN/4alzeRG70h4n02WjOTIUKnDPw5fa8VbseL5dL6udrnYinRcRXOtew45t+YOHSbFRBaSb33rWzjllFNw4IEHYjQa4R//8R9xxBFH4LrrrsPy5cv9fieffDLe/OY3++/Lli3zn8uyxJOf/GSsXr0al19+OX7/+9/j+OOPR6/Xw9ve9rYFuKUO9zf03IrRlII3hzYBgCSU8piRJusIPx9fFCRpcyX5v1vtPLmeAME3vupegOw5c9eke/feCzkzu6psyp4rd31qQ1vz/Ti0yYU/DnUkYVzq5PRZ8Sw9aWMWSqufIhbcTLTNCyEKmBJ53ZGUwX0w1LkQmCnqH7JwY5pbQMg9a25kMCiNczli41oKzA001gxDhWNKZcvT2NK+9pNAX1mi0iuFD3ynWiZDEUtZ47TjKdJ3gbKOUUwQoacAqeFdfNJUplKABaUHsjRdSKzoF9aNS9n3vVACfdfnfWWF+p6bCwQAlAPAAEI7CwdgiQN/fi7blhESIvdcEyJSKUICezp+90JIT0yMAYyIY6LCftY+IsGOl9b9CdJ499KxtW9YvM+4+KA2zzJF2zorANArmieebJwWvdcybAuFPZv7IGx3ipuGDhjnntXsRlv/G1cg8fm41AaQwYJCbWwzN7chIv43r7CrnmchLGXrGxstqL1DIyYiJF/72tei7+eddx5WrVqFH/3oR3jsYx/rty9btgyrV6/OnuPrX/86rrvuOlxyySXYdtttsd9+++Etb3kL/v7v/x5nnHEG+v3+PG6jw/0d4yaXOqG+TbwDoT4jVBBmmwIVx6XRrFu4nZzl29imzXwxaSInk6JWY1lDBOYb+zIp6q5vf6u/77aaRP4MJkWwcMTfQw0FVvk8ifXRTqhIKyNzEEEmMkJpaX9+691Y0bdZongCBgKPERkxC80+22/eeD+XXv9H2zYXk6BtUFRoD2K3LnvvTngTsNnbpAixVSkBz5DyFE1CZg4UAB0pD6jPnIaXUvVaNy1LSIhs2eD2YI0VcFYNIiNkrUhcraK/RD40ACmtGxYQkxUGI+w+ueqG3H0LQsIgZNnSCH1Kge9tYDPrNb8TbearJsxXc5+7VPTON85tEXVzB/MTJxm5GKltam/TPNw+uUfNuefpOjYOdYkjgKqiaRJXsA4dFgLrFENy1113AQC23HLLaPv555+PT3ziE1i9ejWOPvpovOlNb/JWku9973vYZ599sO222/r9jzzySLzsZS/Dtddei4c97GHr0qQO90Oki0FdQGNdResY1foi3BoSn7d6jagdSUPS1Jp17eVtzaGVfy+PJ6g9Ux519UvGXTsX2N1UZ2XdUm7G1wzb26+gTYGuTddMkbahTjjiAd3cTYvcOKhGRnxNEWKJmFuN9G41IckCBVsrITBdiCjbE7/uyF1nrjSYHRkY2G1AU+RMHqUBylLD1UyEEsCUoTTSVpAXro+Uiy8BJKQxCPElzgVFBxc1nh65aZhUa2FUrZUhKUXoN164cKpQUAI+nS/FhihhU4orCWsNAXwRRAFAQQOmhChHnoQI7TrCZCRKYWCofY6UeDKSEAxC7a27fQzCPMQJJgm03sXN1REymTFuLSvxe6oEoqxePBVwI2FZT5ZEjhwRadLY9xAsOXQPUZ0cR1R9NjmWqS0lJfOJ+wjtzlmf8+ebT1y473s3V8QI84bvg8yEliYH4aQkyvKlKY25a28yxyx2kIvkBr3mhr3cksS8CYnWGq985StxyCGHYO+99/bbn/e852HnnXfG9ttvj5/85Cf4+7//e/z85z/HZz/7WQDAH/7wh4iMAPDf//CHP2SvNTc3h7m5Of/97rvvnm+zO9zPMC6L0bi0iZNiXP7+qIgg0wRzX+AcUu16HXL3MmlxRR7g3RZNWaZC9ipTOWY+pCT3uNbVTWAhFtG6yuy5a5VMuIsK9tVoXbVTf6c1AXx8EGnxXawEkZEUW69cXtm2LkjHuxbOLUrZtuV6VbrUrEPELosAIjJCn+328c9HMuGxjjiTe2dP2ZS9FNtCKXspMJ0IC7llURYtJQUKARd0PrIWjHLkrCOmahkhOAIhjA02z6LGUkJIe4DidohoGEOxBcEyQt02yfBWokGJMGaeWmg0Zc+rIyP1c0GQrqm4Jk+9npKP1Co6aUasNnNSXZHCNmgqithmXWuyltQhR0rs9g0v4HfY9DBvQnLKKafgmmuuwXe/+91o+0te8hL/eZ999sF2222HJzzhCbjhhhuw2267zetaZ511Fs4888z5NrXD/Qw5EpK6yXDQbz2vwmDVfw2cNlOAiwRl4n+VumWNK9bGr92k5a1ousa5pSRom/rY32+DMFI9d3zdpmtFMRoOnPywjZXrpC5PuWu2ubf02v73ltJVrh2ENm5hTVYRKtjH29Kn9FdaZIuTKRF8oXsqpJeVsEHMpC23mZbi8bsuuGtuVKslV0JgqrTCPtXmAGJSSprqoYuNmB1pG8dSat83wZ+/mZTEz98Kmb7adsWaVHXPmnYxI4UM5MNnzRI2A5kUwmfK8k+ZMlwBEKoAtIYxGsQ2hGFWDhobdIwq/Gc6hzYATLX4IdBcnZ2IBwU1+88mFELkKYEB7ipov1t3PZ5EwfgikvQZCBa+Ni5yQF4DP19wYhIu67JMMW091WEZRwbGuWMB7clIDnWJTMZhXd1qq9NpyJyYy8hVlyo839/22Og3XR2bozZFhzp0yGBehOTlL385vvzlL+Pb3/42HvjABzbue9BBBwEAfvnLX2K33XbD6tWr8YMf/CDa59ZbbwWA2riT17/+9TjttNP897vvvhs77rjjfJreYRMEabLGEZFKsDAS03jJSIlLl6lJkPOkBCCxIQ3C5RmRJvWV9mZ0dy8+lWW6QLU05TeRgxwH8EGRWaGvyhPqyMi4JAH8Gvnq9uFjyfokOpespnbOxczk0KamyThM4ltdR0Zs5p2YiOSsd9QfPEA1hfSZ00J613AOu22+vvs53DU7rPXpV1JgbiSjQPGcyyORktIYzBEh0ZoJv/YlJcHY3ktC3BnxIOLTk9K7rHANeghej2NtbMYvm0uMx4mQpUkxywjBPwkiFHoEKBniRwBLTgjkXqXccisLT0S8AsME4lFHOsJ+8XbaRpXZS238OWiuozMEkuPcvXRMOogwesLsP7d3o/NIdloIglKxBJj6xCQ58Dorba8XfW+I7ci5Wa2PrHyTIkdKcu3JkZPUYsuPHR/0v3jRBbUvTkxESIwxOPXUU/G5z30O3/zmN7HrrruOPebqq68GAGy33XYAgEc+8pF461vfittuuw2rVq0CAFx88cVYuXIl9txzz+w5pqamMDU1NUlTO9wPUaf1qiMj/jsLyJUatuaGkgAEhqW22a8EnN87kZQ0g0q8YNM2jqxmPWvVoE9jTO4tXAhybix1wsR8F5ImMpJzNatbCCoEbD3W11qX+JUc+KKf+83+DWORk+ZGQgJAqVgAq1iaNKCFQamtBt+Idbu3/7z6logcpDjpwJ2i7+//3k3+sxXo7XE9pV061eAqxe+D3pU5lx6VSEmd1ShHSJQzCUlNxDROMFGf9GG8cECCfklfYMketUK4ASpV3/a70XYCAWKXLXLFkoVrFy9gaM9mv7vPiIlD7XbkrSDWlcv49tOxBOpHIn1au343IdMUZUIrjfGWEXKjs/dQP8Zy8QcLbS3h2v7SmMDiEqVRfJz9m3OVbQslJws4rxuDC+kmnJvLxmUYHIdcnEmd1aRy7OLnIx0WKSYiJKeccgouuOACfOELX8Bmm23mYz4233xzzMzM4IYbbsAFF1yAv/qrv8JWW22Fn/zkJ3jVq16Fxz72sdh3330BAEcccQT23HNP/PVf/zX+9V//FX/4wx/wxje+EaecckpHOjrMG6kWP1cVt64ir3dpkTbQ107gGkM4DakOGtboOH4974JTJSR1rkZKxI1OF+1exqpQQDAtZRC86sjJJK4GdXE2TfdRh/TnNutvmue/zmrTVutYF4w6X8tI3TXHVZ1P40WGpa31AQBrBzYIepQQEiUFFB0nw/O1ZEa4sQ1Aawy1QOnGgoFN/zpk9zgp+VozLMe6uXHcPTtEqW2mLkLhyAJVQu+5TFFUB4efj6wgpTYYuOh6ajPvlxwhoarqZWHQVxLauPdCx4XygJo4hORhkkAvXE96NyW/W7UPBOw9CSEgXeiqEPHSaozx6WEj1yodkw0iGrwtRDbsse6vMZ5sjHQYG5bohX1oO0cqmPMkGykhAbUzmWN4oUwOromvQ5M1NodcrEM4NowlKYIrrc5cn8e/pG5n/B7HvS91pIS2KRkrhKrHN5OFtooduqe6gpDzdRvjyLrlNSiKtA7ulosZXVD74sREhOQDH/gAAFv8kOMjH/kITjzxRPT7fVxyySV417vehfvuuw877rgjnvnMZ+KNb3yj31cphS9/+ct42ctehkc+8pFYvnw5TjjhhKhuSYcOk4Dqd6wLtDGAFn4xIWGf3D/q/JJTIT6vQQxuRTzYkNxwKi5JDYtUaUz2ftPYj5zA0ORb3URcci5b/LrrA/yabfy9FyuayE8qdEdjo2E8e19/pyK2MpjT+pY6eiZDrXNlLBqRWkDGgcgIkQkAGCAkLSDCYEmbcM82kLTUSkQCdkpEcn3CSVBpDKSOa42UxqBosDSW2jjlQNhmjA0FKY3xbhZNbj7kEidFUnFdxMHkabwHnZdbO4A8CSGXqzTuA4C3ZEWWYEY6ovtNSDp3nQNQsYa0SQoCtNf4p1nQ6t7rtN2TxDrUuRPVvVKp4mpdUerYfautW1kbRUt1e7P1NHfuOmS5R+awdV1rO3Sow8QuW03Ycccd8a1vfWvseXbeeWd85StfmeTSHTq0AmWIUV6iZbEY5Jql8xolbUIAcOmkOCUBlBiLJqWQlkFbpoSALo2PVUn93O1+1Qwy/lyk1eSLeRkWo1TDVxEgEASISTTnkwSw8ybP11XDFo/kW+qvER2XEMKKdpgLNikRnEdbm4TltC1ccOQoKBbC/eXflYx9nSmuZFQazEJD6RDUu2ZYJlamEJy8Pv2liYysHYxCG13717p76RfK30+R9PMcs4qE/gwvVL1Liqz0vxLBrWhYaigpMCqNrX0C6cajdUPSwkBqQEnl1zb77hkI5gIGcFcp9521hWRPweJ47Hb2LBjhoBiPnHuVod+ZtYNbMXJWDU4o8gqRvBDZRDjaTA0UM2Q/uzGbWJJDBqx6QblNW8f9rjOWq8ZzsN2bkiZUU8rT2K7O+ZGbrHchqyqmoONkHmnyhXTbuPvwMXnraJjIKZ7mwz06vtJhvlinOiQdOiwGKCHAc8fnfucCO2mvSl21LAAJSUnIyMQ+x0IEAiSDNs9rjhOtFpGR2rz6UgDabddBM8szg4X7yLeJLDWTIiewt6mJMh/Q/fNg/3FoIiN1AdHRNVtpEavnaarbkrq61B1fSIGR++uDtdlnDqvVN0AJGztiBJQOAgzvhxEjJOsrqDa1cKTWHm4pUVJW7snuH7tqNX0Ox2uUOrYoldJaSey7JnxfkSsRWZXo77A0UEI7lytynRKeiBCIONBnIFg/PAERxle3t8QknIPIBxDcrSYlHzYzGbOGsPmuDQHn9+J/bxDGU4zL2FeZCyawmkyS8SqH9WFBbbIMp0jfcR/HpGMXrnGFbNsopYAwL6pIzxasgul+0bUWwKOgCUvBmt0FtS9OdISkwyYHTjLCXB5rnkpjKhYT7kZCaLI21CESxCKfDQGpgjWj1MYVDoTVmrnPSuT9pm374QUqIiJExqiiNwmAdQuqk8Va3wfQ3iISbV9HCXiSRXPcrk0FFNu4StD5s/7vSWawtoIMHycA0EdsFZFC+JgLIhT8PkonqCoDjHhKahN84YdO4KXg8vUBajOlKeauW/x3lak4HvaRKLWOCB53X+P3TdfpFwp9l7I3Z02ica6NwVADPQBDYXxyCqUBTbEtQng3TdvvMdHhcwN3gePWJxkVrswIkYx4pFnEqiQkJh1pfEPeulHt1yaiUTfP5bX3QfINMUD1fvHjLCN17WhKClJ3TNg/f0Bbt9KgsArPnt7lVKk1DuNSD3uCniEddUqpyPrpPpbGWuJsHRVuKbe/U0KMtL/49RaanGxoQb/DpoOOkHRY8kizWwG0gIbv6cIpNZgAYnwMSVP2o9S33Z+budpwbXC4GM88Q0JE7EtPFg5a+KpCfzieSIn1G7FEhFccBoJr1iRavjqhvU5gXxd3rIVA7jRN2rm6VME54Yl+S69BY40v+qE9prKv/8xTTQsRMmep0LYc+WgT88MDkek3EnTXDkqU2qBfSCzvr5/pvpACRV8FYlVUA4PbuMNxUkIIlhVOSIL7FwW156xJRNgA+z5QMgD+TvRkqIfSyxCmitBv4vGbvqv8naBHXA0iD+OInhmvwZKLDUnnIt6/oxbv06TKlIjkuX92zrLEpEnTPwkZ4agjI7VxJg0WUA7uphraWdd+Ns8mip2cpZ3e6zpLO7lveau8sfN1NI8y6whPkx25wdVYSXp0Tmmv34OIxqh352Ik0vcL/cas0XXzXltIsXBW8vUJAazPJI611+zQjI6QdFgS+M+rb6kIkCkmmUQVk0pK4/x+G/yQ0yxIaQCyci43/twtkGoYrRsJYKcug1ztE2rvOKRavnH7EcZZDhYyfSch99zGXabNsyYhIXdPOcEpd02+jYQSv5L5RvA4Jf4bS7ksqy6CXuCoISJ1blZ02ZymVhuDlx60S2O/LDTe8ISH+s9v/cYvaok7EI/LHJRUiba+ejyPRYmEZpEncERCyH3Lvj82nTe5PVphs2piSAU7bpkgDPm8UeYFca40IasVzwRovwcrSp1CJEdI5uN+mUNWmcK3J+mnuespbQMY2+MKoaRP6uJGJiUi80Fd9il+PW4tmcTqOV/Mx6pArlc5l7em39YHlgIJ6bD40RGSDksCXLM0LkDTfo8XlxysuxRbCJUTXrSAMz9kF/umNKR8n9S9xl6ItrlsSKW10ORSDEsZxwaMg5QCPSTavDG5DdsGpLdx02pjsUgFj7rFkpOupsW6abHlfsJ11hCuncyBaw+964R/lEGQoYBSbZwAwNzowskcOUEQnHtOmO5JiULF1cRtu/i9IhJqScidg14woXRdscWyfvS9yQ2yLmNT7nvqskXfeTphAgmTPFUybR+6oLC61KqtxnCTG1RNoDORDLovnlEsJh/hWaYpkNtYRdKEAXWI3LKYdYm7w5Vsu5ICA2hvKempYAGwTbFCu5/HROhMJUTkUliHSeJZouMaxn7OKlKngEitCHwOtRb0+H1uYyXhaHLjqks8kW5LrW6ceARY0j0JKal6GIwnYLm1QglRqTm0GNHFkCxOdISkw6LGeVfePMZ1JbetWUglRFptbZCzPNRlo8ppEicFd9dyGxAqc9vYAC1sLElp2mvRuIuBPVdCnBpiJupISCtClDH3Ny2GbQSQproGbTV/k5KRbLHKyFRCH0xkNdFMAOMxTF54YBYbIp09JaGEQKEEppXNAtWrcXmxMRBM4tWALwroCpSvq/Z4XdFTovKOAXGcQ88F+ZYsA1GFwNdo6gGg7wQecnOjzxw54ZAXehwiFiY5mlzv6tAUjJ9aOPi2waiM9qvbP3fuFJY0VLc17Q/E7lmc/KTWrPj5uA81WaRKhOPHxVMQ2rpgtUVTRsCcMJ0m0OCKHTqfHmOezlllx4FN+xW0yZCXC1KftCL9pMgNq7aZwTp0qENHSDosamgDv8BBST/JpsL2fDRTlcWGTiENAAklDAbQViPLNJopSUkXdr6NX4uDZ1/xvsbGar6jTCuSrCj12r0oIJIWYXatujoMk2bIGrcwlqa6oNNxsfUK7HNeWx4a1dympnbWafDq2u2/lya7b0pU7O+MVLJMb0R0y0RDmbpmkTWkJwV6yhJvS1Lylr2ei3saylBFm85v4yE2rjSwrKdq3XP4s04tZLEwmj93jnzUjck4DTb8NcpkvKXWCv7bJEitGCnRsNtsQ0bJtcd9bos6Apf7zL83WVVKbfzvPHsaHT/UuXmEX6tm+3xclBoOacqal7OSZq0RaQC4Zu82aB0JRKXOZZL/zWXbUmRV0i5bHmAtrBKABqSxad0ppEkLA+ne++aCj2SNc8fpOB20/5yMqXXhfjklTxfU3mG+6AhJh0WNodbeLYPqCgAZLWZmVm0VSBtZAaxJ3m7TgBboK4lSslSm7m9uEc+la21qAxe8ePzK0AdhAlQDJc3zz+MMoE2reIjcPft7bzigbRB6ShKbzP48eJcLqNWLN1dBJzRpJOvIyLjbIgGEjvUa1MiNi+JHAE5KUu1kam0it4ZARqTNhOW+KyeQ1ArchQ1ctxm0bEXxodSYMhvXXWKqkL79ORLKYylISKpoxllgFR2XDs+696pkQmTqwlmaULxxMNK+fgqRh8FINxKBunc6JRJ03tTqkbN0+L+JsGjYvvRZZO6Zb8tZN9M2p4HqFPtWR17S9tZ9T7fVkaMos5RvY9ivqbBfesncEGiyhqS1nvg+/h64GxqLB6M4w1TpMB+LJLd68CQn2tXLkVLAZq52vyXkpP68bh82pureu7Q9bdBENHh/ro84w4VGV6l9caIjJB0WDd76jV94/3Dyq1eST5hBwh0XHDsfUKB70HCbEPPRUs7LLex1qUB5PZQcou2pa4S2vs1+AWWkZBJXpIXG+gigbOMe18ZNwlvDWvhU0352LCRCBOvruvORBrXJzWMSP+bsvSlAa4GhIJ8P6TNubSwcu8/2lW1fvu4P4JYk5cat1RCLEHcDVN4zhYVxj+TIx24EEhFVf0+uXWTe79z5OCHxsSAZDXWOeES/s3ElmPWLxp/RxpMSzX4LWv4x5EBmkgOwWjHjyMm4uJa06Ge/kJE7l21jICaTujvVgSeE4AqJ3NzIXZ5SBcMkc0aTxYRn29KZ9czHFLptGmGOSckJv6foWvy5jCEj85mn27iPdeiwLugISYdFgzRrVaiczoR3MpeXQQs/CcZNqiH7FrnWGB/oroTV3qZuH3X30VoTKOPzcBcTgvaWEkBLWrRDkKlffEWspapz/alDpK1mzZp0Aau45CRacX/eBkLJn20uxoCDxguPOSEyUXFRYNaoJj5bjd8Jwgm3loSgUjtuIAFdBpehttr9OvC6GHSkgU1gwDXscyO9wbV+bdBTAj2Q9cxu0zrEkNS5k9DHHq/zMm4cOjc5SyKdIIjYgsEJw9rBCGsHZdaqkUOdgB6d1xiUIw2jDcpSo3SExDQNNgfTcH+CvcdChnecCIlw773h250bUM4yQjVc+oX0iiCeUjklE/x+iXzwwPtccctARJT7G+rG+AB65dX/WVJSl4CAXyNXKT61iKhEmOfvSnDXgq/rkaZUpzpPbZG1hJRhTGo270tdbT+PgancFyennNslc3aOiMw3ziw3t/LrLgHDiIcS+XG1Xq+5Qa+2NNERkg4bFf900c8iLR2h1Fbr692ANAAZC5zAwk6EUWEsHTRMFWsJ3GIjqwHv44TNHDmQyaI1CUptIoEtagsjI23jP8J3+3cSf2O69YUwWs33mdaRkup+4XNd0L82iNzhKm1saGROIE6vkR5urQb24NIYSOa7nmq6Az2CdefQwqexXcygYFveVvKrL42J3OFy46kuUcN8wDNa2e/19T5SZUndMcPSEZGRhi61LWzoNAxtXR/T7MPCv99sLGo3VKSIrSTaQCLv3kXIBbNzy0g6H9dZo2NXNZ0lctRfuf6ss3xNSkZyyLln0XZ73vh6oZJ6GJ88lkz7uk6BoDS9315pxSwsBD/PJ5ahcCy8K669aGZ7NOdnSHLm/Wh6Z9qsY7m5lc+P2mx4N6gOmxY6QtJhoyJHRghlOvFq4cz97PiM0JlL9cj3bwsy24cN8UJit+WPtQJnbBlJNV11vuiaFdHKkRQfO5Kcg7sARdq/ujiExMfebot9kO1v7QTA1DOpzk0gF8g+qRUhFZK4oFPNkpQ/R7qfZMJuqk311xXVlLzWBSloVi13FZDSOOtJrBuTApF7XZqVihc4rATmZsYbxZIsdMXl9YU0A1DoYhb835KYABlCScIuEUunTEiDua0AXmVwOUKSCtJ87A4c8SAiYozBaFDCGINyZLfTWONWkibSYPtFuGPA3LJcO5QlIkRK/DFSQEgBVdjsbcHyEawgVExyxhWz5BXveUrl3PxE951L7lFH4kJ/x2Sn79pIyRx4fFwbpMSFv1f+XJn3NT2Wf6fCs9wqqj2BdhYTAJTVzp6YlAb182wTQYgTAwTi49cNlgpe07zBt6FeacIxzu2sTb/XkZIQb4ON7jbaYemiIyQdNije+o1fRN/ryAihQkoAcqr1X+s04W2zuqRZkOoKYqVF7TQTXHOasLQdaUB6uj9lXPHXYNXk+UKSIyPZVLUN99w2MwvtO6mwm7NAjM2o5dBEKPl5cojOPSbuJxWc0sw4beQimQgSkAISwi3KLF6C2u40iirz/EOFbu20soGAUDArZduhY4m0loYEH+ArP7sVWhs8Zc/V429gA+DI3bcFAFz081uj7fl0pZNbQCaJCfKfM9bNNojICo1n556ltcFoWHpXLaPh/ppsfAiQf2+JfJQIlo/AmwSEtK5dInk3uAuXEolblCMkBSMiM/3CExQeu8eVKQB739j7VEhRibVpg8gq48hIT8poXmyaq+usmZFCKiEjTUSkmohDeFLiY3KY1SS4Cgs/X4eA95DRrQnZhACJlYVccen65Kbr1zn/LEJtrnQs1c3ZuWD8SdxIa0kJEBO1RYwuqH1xoiMkHTYomia+1BUidZMIEyBpPN2+CxBsN1Z7lPk9NvXnrx/HjeTiS4IgUXjfZVp4TERO4vOKRsHZrptxe3PxITlrCH3P+fO3Qdby0nCunHWrydUsehY8vqDGBaTSvhq3ErpuLvhVipAZi9cJ4UKA1sbGdRgDJWSljwGglPFYzQanMstYWab3Dt9Ovj9ZtoaLXBrglpGFtOiEYOSq2yUJi7pk758T1ksto+x5udS7UfszloE2EFK0iiHhVhPB3nEhLRGRUkAI4WNIhBRQlLGtsFna+ioQDCIfRDr6hcRM3y77y/oqWFCUhHQEgfozum8ivq6aPPUBxZnMjWTWQkL/ppilpielIyLuN3qvlKxVAuQUHBxprAgnIvNNN0xuhZGLsGBZuJSM4/xMICl+HtXxOlHn6ptzDUy9AyADAVAIro6N7UewMmoTvyfA/NyE6TgZrYnufKXpLCQd5o2OkHTYoJhEE5OSEgDZSbopLmOhA9faasLqNHGcRJCmPBJ+VUwOeLBvnSDPhVqeXSdk3UmEYpOpA5G4V0XnaoG6NMzpdfLIW45y90dtJaGgjpzk0CREpmQmdamK0/EKH58Tja/o2Y0nf/5+EisVLej0/HPPntoWzkHXWryEpC0ZqauXkILun3ZLSUnYx+7YUwAQp/EutdVbcmFaSRGlB+a/831KdkNam2xAel2MARCP65SIAPBEgxMR5Swe0hXRbHLP6hcSy7xblvSuWbRtulBQEp4gAPAp1uvGliUlBkMtI4JSmiqB49YWKpg5VYRCoPQ+TRWBkNjnGM6Rt9zmi9jyY9N3s631IEWIdTJRG0tjnJJIQMv4vS+1YdYNNpc7cjLUOhtXk37296SCEk4KF2/FlHRj3ZJl+EMKr/kgneNjMhY+L3alCBC7N2+wa3aV2seiIyQd1hve/T+/8p9TH+zFjNRtKweq8pzTNOWyakXHMs06Cbzp7xZs8fCZXtqBa/W4xiolObSNHxf2nfw5NRGRXB/lkPMHj5AssFlywuCtWGPGXm5h5wSEExPfThn2jQLRdXAF8YIzPUojInKSc6HQxj43T0oSS1MdsS+1wYkP36n2HjcFpLdO2l8gHgPN7689ibWSGAChxlGwnhj2ewyKUeExLy7JlLVeGBsIzeM/gCpn9hmy+D7OAkLB7ErJyCJCxIS29VUgImQV4RYJcs+KtjGSQG5TAFCoOBic3zq5IWoJKHezwzKM41xMFr3LtthnICK27k64vt836SDq01IG10Ruiaw81zHWx/S3NN0vtZvHksTJIoyfa3idEm81ASCV8MQkF4NIZMUXTs2M0zSOh8eU0HZOuuqKPVbujT1Ufx8O45QZWTe+DJoyxXXo0ISOkHRYb6gTDiaxkkTnS7VcxkBFqRiDVn0++exzAq2KVuVUEgr7KojIalJHRuqQ+jrH6SfhA6Xtb/kJn2IICENU3a6a4jn4mjifRzSOhFAeftqeez6cjHBfcKBGy8kcc+Prhm2xhaGq8c5dPwhSduGfLqhwYdAiF1IgvQX+aNIrWM2pcW2y24ZaozQk7Nlih0Nt2zxXagxLV5G9DMfm3qPUArdU0WQdaRqTqbUkdd3iVhKpBVRPePcZiodQ7rmuHYyizFm5miWU4hcABsLGDAn+Lkn77hpl40mAavYsDiIfdS5Zwn3uJeSDB6vnrCE+gL2norEsRSAGgTAwC0kyH9lnEz7XWfsIsaAcyIknHzK2DEuByrsEhPeJW3WDOxQj6jlFRKa/KxbMyBQUt58rEyDteErTV9vrcFKWXMfNPdy61JOxC1zUHgau2OL9B8DH3dSlNq6ci/VdpSI9gmJnHHLEP11LhnXmq0UE21cb+Job9nJLEh0h6TAx0lS9XJujZKypSUkJ0D4AciEwaXxJzsc21WDV+eHmSEhuki+NQTHGfKvYQggAZRn7AdN5UuTS9OYWjvGuWVxj29jU7LnCteJ9OSnh5yeNXyqk1GXJAcLCSuhBVFw8pBDQQngrkX8esn4cSslJSXA7ocrpoT4DIJLnaES1/wlCChgISG0rMpfGuCB1A82sJF6I0cYSFg3MjspIGI76gb2H8yX7GwJ/tce2/rMtlrjuqAuMb9SesxTeXrhnfUtWESIdSgpb40UGVy46NvyVvq6K0QbGCCoBAuVEESOrySF8ILoIlpJcbEiOiNDfOGuWjCwkMz3lXaa4ZYK7IPrA8oQ85DK6tUFKjpW0b4kQ9l0X5KJJ77rvA6eEYVTeExLmgkpzij1MwJhA/iMFiw865+wh1vLzjG85q0l6X8GKwub9JMseEAjAEFYQldpm05LKzkVUENTHCWYydOWyM45z+a0rmBhb3QlhHRmXpcvfFyP+fB1M3Uw7dJgPOkLSYWIQGalDSgJSAXo+QlO6OHBBf5xmZz6kJAWZ4QG47FtERsabvfnvZBnw7lLCCqRUfyIFCdZZ60ONdYPvNxkJyaOJPzZZQ3IgbR/ASAdzP6BFloSmWECqEpP02afpi0kTOZROmHAC5VBjrMpKcoHNCW2WhFuhSsncKdy4cN+oS7QJZEWAEROvxs2QS39sEJbnRtV6D5RJiXzxlwpyiRSAvCth3ZTRNvaJk3k77ijgXWGYqNLnRtIXSewXOoovImsKJy48FfDAjUeqRwIgFEZseOeoqCFZQgBEJCQlItwaQtmy/DYlfawGWSTqrCFETLjmnQgECcREIBiX9wSKnguRCfqcPjP++HygfrKfSPY2CM/VGPse0DZjYF3iTCwEl5qOtXEcKVkhRQBc2tzIpZIRjDr0MoqkJssRdzOjebwsZKVQbKOFpMZqXFf8tqKwIqI1xsLTCJXO9TyOBgBcgL+ebK3t0IGjIyQdJkZdBV+A+cTS5J6ZKCdF1rVHxr9xTVEOufbE16i/djpfS8WsJ3T+OreutB3awKZG5IsGIyWZNkdWjjECWGo+zxEZvh+/T8K455QL3LbXrD8m1fYBqHU/CJ/htLupO0mshU3HYeoaNdQas0IHC4QBAG1JCbsPxcaS/Qcm0En32Y5/Caf59UIaEZFYu2uFJ0ojagUkSoqgpKgtahgELPt5MNK+mviAWUusMKy8ULyYLSQc6XjkWd1y8lGOnIx7Bwg8hXdAICZKSjt2+2H/KUc25jwxscX/+oWMKrqPMhYVIKlg3kuE1IyVxI89Zumqt4LEMSEzfQUKDk+tINNKRtYQHgMVYkikt2AEgdeRJMHme1IOSDtjSfeO0H4C8H8BNFV4tD9jPDQjFPQuadh3iwR8Y0IgdencoOzv8Nu0qVooAWcsc83MKqIyHL9JAeVjXhJCAvBkFYGkxPsm185YPnIZ/uqKPHLylBKQHrtOKds8iVjZQ+cZeouwdmva0siw1QW1L050hCSD8668GQA2+eDQScFriNQJPmlaW2Dd/NrrYjHmm8oxPkfyvcZCQZtTcpLN8DRGqKJzkdVGO4ICFwvDm1BHROoIAVAlInStFG0KRjYJfjmXrLqg9SarCF0nF+RP5IVrAImMULYbEqQ4rIbZarKt4CFRKqelVFYwkFJAZZ7ROHleQEQaYiCQEf45VHYOCzgRlDYIAqornOYaNgCcAB3nAyYyUkiBf7nseigp8NpDH9zuYhsB6ZjicT7j4hKy1sIxFr/KfJUoEKTLwOXjTthxwVXLOmFxAkKuXmQ14celaYT936QgQcXlNSEkvE5IxRpSSB8TQnFOdRmsAilBlogAQE+GNilhCXfPEe9grQxuWAoaMCWgNUQ5cn07yj8EIXmwjFXDJCQlJS2KvgsJq8MJbpmWrNOzp8nKzhGlM6VoAMLYNpew7EY58u/T5jYI5ONisyoWWkZ0AEBrEaUCB5j1Vo4fv9XA9fi6is2LAe49YXNzlASAWUzsb+3Wz9QdVgvuMind+UPGtg4dJkVHSDJoKzTc35Au6mkmEE5G0oA8f8wY8rAuCt4obWuDwO0JRpY85RuUGxMUC2KFKVq4jLec5MiBdgultZJY9y1lgFFGX5imkkzPNamvLg9+9NvGdHiUktLEtQjq2qCE8PcWrCDCk5DUBYHalGoC6XsaEMvdpsiVhEgCd4spjSWQI2UwpSWGWkMOBbSSWDMsMaQ4Dh2Txbp+S0FuKkJUyZs0AlqY4LIF0vba/htpqq4eWwekJGuJ8T7jPSlRFiGegZ7bUo0lAaqpje3nfO0Pf0xmW93u6RivCHAs1oeuSwqGnhI+mQCRD7KYpAHuAFBqHW3jv438PuMJUyGrhARAtoAhkY+esnU8yBpCroXespdYQey14PuCvzvcGtJz7xjFTEkBFNCAHkKMBhDlEDAaYjRryYguLRExGiIlJEIG4iEL+10qQFnxw8jC75cSEv7deOHb7l+oAlAFjJAYams9KRxJGWmgFDYeZQRLTKAFjIitJQAgdRh/dmzE6xn1G3/P7fb4HTMsKyJ/3DQH0KY4kUj9uEjbkbZlEuQuE1PyFudA3vpMSTmG2mAoNQa9xV8CsCuMuDixyRGS8668OVqA6tCmYNu5P7y58RwnHbjpWlDe/q1fVrbxoPRaC0mDZhxoEPpaCFDjUngSmp59HRnh5vHKdU3q7hH80X1AYKJxbVpoPCkBGuMYckSkSSs8LpWu3cd99xaJ+mNIW6zLUAOAW0TKzDuUe45KxGSkYMXXcv1et1CQe5YQ1cBYEqiilVobCGEXUit8SPSkwRDWXYXc5TSqz5djPkGa3L+dXEwMeKagQOjqxoqSllBpaSJB1b5/Eiqj2V3sRITAx3NdnFMb1BPiZuUG97OXJJXrIFD2FIASte9nbAGxlhO+zWdC44R+zPxVtZCoDBGxtUOmXHyJJR/W5ZHHjfCMcJTRKn3XeFA5kWsp4ONHyDpJ/8RoAJQjS0bKAYQeQQznLAkpB4AeQRgDU5YAt+BJBVE4UUP1YYSEMAWMc+kSSgfS0kBIBH03BkYIcnSFkAWUkDDCWk8MnPVZ2KQR1ghiYIR1lZXCukzR8b6ZmbkwJSOchFTHV5U4GJhgvDF0SyJ7TOVs7Cc6JCVB/FRN706abKP2mpn2GIQ1SCoBYwQMSKkgIU14BwDZxZB0mDc2OUICBD/97HbMX8t8f3rRSDuXLdSUuDQAwSpC2jdeYCunoaTfUqSZToDwvChdYluMs4qkC3RdphGZTNI2a4n7zTXHa2+ZUJNmcuJt9+ZvA6Te1HVuUU1mfcC6JORia+x3RKSAttUVDOOP3WqT4zSPPP1sDjzFKACvxeWWD9u+2lNE8IKrq+tgNZ2JD7uIn5VQlghI2Cdim2KDmMn9RBuDWSYwcPJpNX8GUsIFPksY514nnfWDhlhqITHkJ84sADx1b/gcXCB8jQUpfIpnKQx6JvQlCb2DkY6qY3PB1fbr4p6r1gyDwBqTErutbc2aNH20knxbfVxZ2M6lPqvFtBYz+4yHpU3TSoHvdW5Y3BKS/t6EnJWZ3KyIhFhSAT8GeJB6T4ZigxEhcRaROksi9ZW97RAXQuSFLCNKAMqMrP+Ps4yIcmCJiB5ZC4nRwGAOZjBryf9omCEkNmpB9KchpAJ6fQhprRueYNgKkFVS4sCtLAJwLF8DSkMV0zCg+cSREbEwEQ3cMkL9aPuNfm9610RjMoO63BqiZp2c5LWOL5shW8mmdJ3jIAUL4BQHAKaUwlDb7YNSozTSWhanNkmxssMGwCY3cvwakCwG4zTMtBBWU5KGl/T+lM7Oa9FUPoVmui+REe6Sk7roAC3dpNjGyKVDx9mt2oJfO3URCtv5/uFLlgREain3hwRSr22vFrvLna8uM1XqEtWkXY2yiLmGSGYfzj2LNGWkvy5ZfQCgbDfmU4uZFPxawWWEpxv17RfjxxcJmGUifHJw9yl/blhNnr0ZAw0BrQDFOmdYGh9fEhWiBPPz1pRbP4gPQti+ksJZYUws/JQ6WES0gU/hmyMi+Qw79hlNSYFRaYDCjYG+bQvvb0pJS8LrUgClMyYQUSOMU4Tw/fh27cgbpPGWktRVQorc55iYeMWConeRiF7s+58jJ6m7Voo0KUjFQpLMp9zdMRes7kkKywRHQnTOLSt9V/hc6K0m/njY2BCtrWVEj4DRwFtIMBzAjAaWjMzNWiKiNQwjJEIqGCktMdElRNG3Ll79KQgh7VsnJCAL0sUjF00unEWF96qg73IEIaT95+ZgeqKWmNjcdq0i6WsgEPqSP7fIkpEV6uNUxikiJUp2bLZ3qwKabzE9D59Pm8iOMXY9MwC0W3NIIWP/ShgD9KTB7BLI9te5bC1OLGlC8sHv34TlK1YCYNoeb4qP9x0XhBs2pmqDek3df/zg17XneulBu4xr/qJGVIeDuTdwpAF/KRmhfVJC0NYyATgSklghKNC8DSlJiVDl9zEuLjxrCYHnrufX0QbZughAXgsM5MdgjoS0KSpZqeZbQwybqvrScTwrmfadLaz7EETkzpImMogrQFMcSCB+6XPn+fvrYOMtBIaeFAgI55IBtzgqCcDkF1bhVk7qvp6U0BIAtI01EbD3hEAElRC2SKEwKIcGPS2hhK64wwDVhd4g+FtToTDyt+ZEhAeK1j0H625jJaGekZAiJmY+1sAVv1sKmGX1PHIWhbaEhPb1v/nkEgIUJyIraujxfaTYmLcnoQcl4rlRhQKL6TvK3bVygfJAPIdyhQ4QrIz8vU2rm1t3LUqVHcajFcgD6RCiqnzgINIfU3L3NghpjRbkUuX+GVlAFM5aQRHUWsMMBxDoBSuJVBBKOUtJH5ASomddtyALRHElREQyVhJvIaE2yCLEojiXL6ODq6RBnOmOu03Sk8kVVwzKHffcnLLBWkXtXEGcNSK3SdpjjroMS+kzCSSqnihE+rCWBCv37Ntcy/afAYR1eyXlihSuELAMbdBu7u0vAULSYXFiSRMSzYQ8XdoXq0SscRyndeMgbTPAJyU+47TRFjfHDywV9KjuRqZgE0cby0gufzrPnMRdeAg+JaI0PqUjoK3Gk6w2GfeiuG2Tbaf7adoWUsRaUlJHQOpAwmlKOtKxOZG7R9Lmts8iPZaEX4qP8RljnDBsNTxe4rOFCJPq9NydhCwjUsBrcUmY4m0lwc62wZ2dni1f9bV2hQS114STlo4sO1yDSUIYD9cppPCuB0oqDEt7nqE2kEL7KulDElSi51A2jmGgSnCJfABAahnx+5j8mCRLm1IC0hXVIyF7KDWmCxkRXQpm3pCFR+eLe2dD4HNqSWgbKwbkrQ1B4LeuTkOetCBy6XLnZc8wssI5qdOOUUcSmNuodyV170FP5ud+nuCDvqeKm/QdtdeK50ee+pqTFXqffOYr5qZFt0Eg68g42OxUzo3LWTEgrVhtir4LXi8B1QeEhFQKes5ZD4teZCEBrJXEu25J5chHAUOkQkhQgHo2uN2exP81QjgyY4mRERKlgftHcTzWYk3afJ8SGCGpAgVn02cgLHlSGj/PFNJl93IKGS1ijXea3IL3cDY2pYZk5I5rsly0Ff1Tt7L89arHWfImPKnTAErhFC6CspwFhVxp4N10FzOUaFbKrZdrdml/x2JJE5IUucw/bQS9pmBtnnkjV2W6rvbC2Zff6D+nPs5LyXqiZLjH1Fow33S+XInLF9sIXgiFK2ZFgkMIfCUy4HdtKYfFMq4NSrfnXz9kMhcQbr+30wwT6rKaAeOJIZC3BjW5S1H/hnSrsYWhNCZyDQupaoOWtg35y1mh6ixXZCkh5cDIaTENkRER2iGY3zOBfxesDUpYC5ytFxLAM2BFbSuZBYr3b1l1BUhJFtVFiO+L90t1G8E/CycBcGGZW1IWO9I0uRxtCZWSokJmYvctp1DxiqSYnebmL24djPqSKT9SQcYmf6AvmXM2EBEea5cqCyi1dVocNFdNPVaa0FwWxrM01mVRAiw1beziqI2BMOH6xsV2ILJMmJg8GO1+H0BOzdjjpAJqCAmKglk3nHWELBzOUuKvxf86+PYICaPCuUgw1iZYQDRCTRJynTRwbnXeDbgasxc6xI4dSmdLSSYcr7XWFmTWLoZxLlF8mhN+W5WI1L3STfErTVaRumtzGASiIR35kIBLHmDnW+F+o1fF1ChXOnRogyVNSCRbPHJCXVstc46IRH75CJNVMs82CnTZjE1Lo25QFrn4B+6+YEEzGCrf+eRM7gah+F2yyFM6VAHAZfIYAk5aNOBB1vXtrX7x9SGkCLKDIzXkEhbFt4yZXeusI02FCIMGr36cNqVUBgIBAaoJBPzvft9q20hITsdo2m4prRBT+Nz2sXaYjkljgki4aqOF4vvJyHKQKBPc17LUFcE/trbZbbmq05VrSzv2tBa+RglgPBGZK7XP/hQCm9NzhOuHonOytv/5mMqNn0b3Qrf/lPImqwyqxTwXG+6dzSXxjaGSOIJcnQ/67MHiafpFqI0Aafx8pUQ8J6Xjl8c45S0jbg7x4yD0dy8ZZ94SKUU0RpqsxrnYtnh71V2WYNx/pZvnqDahQIh3ChZELkwKd7zNR8fJjBYSUkgUUytg9AhCT8GMBjajFk/1a7SduCl7Fnt/DRCTG1UEAiKLiGhAytg9K/lrEJMLXRqXRpuIrn1WQ/ZZG/tu83S1QNVizcGfi3bx9uTqKQV8DBlZablyguab1BrHx4R9LuH5+m30zNkxtUHuY84bIS1UqTPCCOtngSReR4QAfeGuZ5wySYkQr7cUMv0pbIQYkkU+Jy8GLGlCQsgWn0p+y6GOjPhjW2rLl1KF5A0JTlLI5za2joSFtiK4SuO1jbaatdPsSbtK0iTYFCwYt4WdWlhBUMN44S9YYGJLAKnL64om2mNZOtcJJcG2ZGRSIsKb20SqJiXUwbUqHFcky1+dIB6uGcYBF7IAZDXMFSLsyWW4h5REciFCCYEeQhG4yj0JYcdbGY6nz0RGZkfaBWJXiSSPCeg7n/6esyzlLFW6bE4zXDeGfI0S3m4GeibH7rN9/ckXCQYshiSbKloKlDmBCZao8L7n7xC5cPFK9mQpofeG3LIoG6NsGK/ReyQSosclxob3OEdWUyLi32VGTIDq+5EjImn/Gf9feEd8qlsjXIFBQMFtEzZrndV42zFrDGAEfB8ZAwgpbA0QIg5GA6Wyf03f/Q2ExKQCMFCNQxHCWkmAYPFgLlvUq0RAYNg67ywiBpZYGMBnfSIyQjEjZBVpSipRiTt1MWo2411wF3WDBtJl+rPTh7WWCGFsTByzFORICZAnDbn5KUdGRPI9Ol/a7+O++xPI+LeabGd1ECIQlQ4d5oNNgpCki1Kb/dO/df74HPPNspUWXFoqCL7UptayE8WZMC2k1xg6LS4FlkoTV4YljXa2bxwpISJiFwTjAgydklNSYcJw2LjnFC08JNgIgRGrR+HTDzvNV1maxrHhmyxjNycK+lNuUQt9FcZtSmhzqZTHuXvwNtOx1ft2gjzrLO/B0kBguOaPMOWunQrPdX1fplKSFM4NwPggeABQPDuYiZV4/NxtNJxkgUsLxQHx/XA/ciUEhoj7h4SWwcgSkjUDy1bSZ0cpW0k778kJC/IP43zyuYCITG1yCNPebXFjY1ycSFMSh5ioBKGJu3BFFhQXD0c+3Ny1kGKegNCvQF6ooiZn36Ma2Y2/mz5YXVYtIjQuU1esOksIxzAzOVetlnTPYeQRQRPuswRcWmurMFDG/eaE65IsARBQsm/3K8I5vDCb/s0hDVCHPT/1aFkGpYNhxAMIFkpSBBkTPlOciP+ubXY7sopQwUtOSvi5ODhpLB2LmzYSQ2F8RjMlnSuTsH1m3ZrsnEB9JYSzPLnz2xTHMbnwSQVMICWUi8G6o9Y//woRyfV722fh/nIrlO3jYFXn/c6fy3rwdu5wP8OSJyTk85suZLX7c/KRISLpxD8upoBrS3NuX/zcm0KwOxALf0ow/38nbJcw0NJphUqrnQMkeoCvr8ChRKaytFftmUBE3LWkFH7BqmsXkLhJZYRwDu5aAQBSW8FAC1MhJ+MQXGtII2qv33OmHp+lClZYojSiuYw7TRXOU4E0hzQAN+0D7/LILETQ4Z5zLiXRNVU4jzbGBviaKlGxFqlAUImbpW58UVyMNk5LaYkJxRLxmh2zpfaElNeKSF1kcrUa2ioItDEYlNrV/tDe3YgIChDGzUxfQUmJmb7y5GSmp9y1bIYsbvFKMY5MS+EILyNxnkB6d8bFjzrLYDqPjrdwa+/aRfsW7Dyltml60/5O0+jm3Key7aapThtAxc+raV4B8q5ZREQocxt9pjbWIdTUqsZDjKv+TYSHKzy8UO0E4hIGirT/mlySjI+7Ej7hhT2viK5BL7mqj4Mw4S9Zuf0ywoVfZg0xJsSF2G08o1acYpuy25HrFllDKEEKkTj6nq+/5OZaLTAUYa2TQkCNBKYKE80lhSTCFyz3lD5Z0NgSjny4eZKISUpKAJesA/WkhI6znafbEcEUCTH0pI/6NSEi9pmEtTUQlvaXXAzgFusNec0OzVjShIQLR174H6NhSwkJHV83WHjALb8OP19bvPxRu7bed2Pj5EfsDCBUqy9hfF/Qi1y6xTAlXFFazKymfv2adSuxB8likxf6gjDux5BxpIIJ6Lqm7cRBfKaehJSQzxoFg1uSQseGE6bWEApsTQUnIAg4/ljWLs/X/Npk21In8OaeU6zdre7HM4zVke6UlNOzLx15CdfKEFVpq8Sn0CYIGJyMkHsVCR5BALAWO6oQTxpPHjQMxCSKuwNyokTzRy5ZRtiuUWpZFa6Z60aqwa7DOOtqas3a0IvsJHjZhVdXtjUlapgP6twdoxgjRgh4XRwe00b7pvAxTlJEBJzQFBOUxoME0s/f4eb750QEgHdDom25LG7xuY0Tson4GF9I0RgA0gS3Lm2COxdsLQohaBzbc1Jr4xiI+Nr8kTbdnZ+yTD0RyWnq6RggHyfCyUhw04KfP/w1K31m5x9SSoXsW859eBTmEiUEtCLXUONmWwFISkluM4AaQwTQuAxm7dy7GjEJAYlur0pGUquIRrCC5KwifLi3dZ/u0KEOS5qQ8GBSAJEKJhUa6vzx6Tz8O8AmfD2elGzKsN0m4AuE6argmbUE6TDxpstQNQuRgUw16sb4hYUW2ly/VzTxGasIX2y0ybufDV2Vc+kXCC5MOnLBA/WBCjHxAjz1AxM+tTbQMm1HaB+30KSEhBOTNONODlob9Nx1SXCSRmBYakghXHIA6qeq1o27l6TvSoUAEVHTQXAOVhfuBkGSQ7CMBDcuJszEeyfnI+EikA5Kzzs7CkUHB8kDVjK4V0wpa7WYUjKymkT7O61lT0lMFQbD0tYemekrDEYag0JhMCorcwy5cFkiEsdI2H8S2jiCS/UrUs09u18+llNBxbYvkDwAlcQQixm1SRtqxnS9i1fsskV/yW0OCClyeTrqqSK8VyGxhozeLwC5+nyVNLFA3vqd4dK2fQkxqbtO7p75vEh1bYalrpCRVPPPC9JSF1Ox0pzl0M5DZBWhOcA0VCnPEDFORPxxIvqeg2HzNcWE0BxuDFwAe+iHNA4krfPDSUjFMuLX+Vhpld5DLkaP5mTbd2EM2XFm9+sraa0jmtziguVJCQGjHdkTwY3LKyEokYhIrEvrgoyrHCciVIW9QggTUgLEBGSpiUQbpTDi0pmeNxqWNCGxvqyBNADw7ltNfvlAcIuhz5Pg/kRKTnz4Tv7zf/zg1z57C8C13kEA3RAa2jaB47kFhpORuvTFcZrnIDimlhI6d05+8lYS/5vwFhIufEvv2x63IyUjNui/SkRqJzjmwhPIInytDhvE3a4f666TbqtzoyPQpUJRyXbXjoTyxCqiSTBxf0lIyQWdlzLMB5KK24207WNtIs24bafVJithC0MqyYmFJRdKjncVrdTYMMHSOOm8o904rFpFgCN333aic20M5KzVub/2M7lhtdP+0rFFpW/COxWsJYjeK+suxS0lVaUVR0iby7YlyRji979+vsm1NTc2uEWEkxH/DnhiEv7S/u4E/hqkdAA0tHYkhGL1ICGl/QtJMQ1Un0j415YC40XDe0zTQZwtyk1PpkpSuOsWvU9ETux9EUnRERGpxoLE27ibVrCo1BMRvraTKytZaqUw0E5ppI1mSigiKNqNBekULzRHWKVQCZcqF5SaOEzX1hKUuGJNijbWEhaszsmIdtc3BlmLiP8+hoi0XFY6dKhgSRMSwE3uSRVdXmitsi/yFpHcvpRfPrWSAFVh/P4AJYSPGbBmdfuZ9w3vYx4wSoI0X6RLY10gyhLQieCR+vtyn2BubucLb7rIZO+h5nkSsq5DLQTH1JWJFx+zFgsrFKeCAicFqRaz6mMepwGtA2nZlLKJAEiQ7yn47zZfQJ5Yp9YRIhBSWCuLdWcKiKw+TCOZa5c9L5Gr2L1PiiDecD9wrwl2QalDbTA30t5dhf4OXIpeyuCUkoFCCpR9Bcq8NKUket6yE4o3Wk0nXOwSMGVccLrUGBbaKzzWDEofR5IKzlxADgQJgDQY+vioap/H/VUVrrn1imcrWwrgFZyJ1NFnvp2j1KRcqp6Pu99SvI63jihZsYzUxRJR/AZPE02Cs8hYQ/mwIueVdMqh4U9aZ7sPF3Sr7wePBeGEnb9fJJRT7FRp7LugTdVtkVv5c7VQyFpUKIFhaRwp065fdFSE0f6136l/OGoMWxXQbt7lS8T3CsQWkpGGD0wnCygnJrZ/YkJi+ypeIzxRqbGaUz9xNK0PaapvssIWjsxOldZqMlUqbzGZLpRPqWzn9xD8TvGJCnxudXOTK8RImbwAp2By5xE8O1ZTZqxM0Dr1ny96qI0bs3kikvDbDh0WFEuakHAtVtviVLQ/P56jiWBw4SltQ9PxmwppkSK4NAGAVMHqkIuDsJ/Dgp72ldbGP6p4sY7N7mEBMpEgn1tUovOP6fZxmsoowNprFcecM2O9IHJMAq4nKMZkV/Km4NfJg/HsNSTIZQu+noviXJ71Ve4266waqauE/VwV0toILJpUhODCRkw6htq6UJHwZf/CW0ZKbTwR4allAaDkjZgufBspPqfHiFdPShcvwIRoITCkxA3ajg8iJHMswB2IheX4/mynk+tWrkBfquwI81xMVu0+cd2MxYx+EVKo5WL5gGDhGCV9mSoZ+LEFc8/qO6Lhn6EApgvlBXAinfQ3ZEuKLSfOiOhdknJdbJsUajMAfvhGglshSaizJzGATdiQWD0IFUGZzYcUB0HvgTbA7KhkhCS4dfEU1bmYGkpN3dPCF4idVtLHRQyF8Z95u6KYHElrIip1K3IglQMf8jmyZxVe+QxZ3B2NK6homz/eBIVWuq3OWs7vs658QDpuyR1UCu3JL1mi6f3saXK5FQCkk0fsHCCETTLgkiX7bHBkhaIgeCIlwiluoiHJiUhqJUmyaHmSQX8dGeGWEU5GqD+stcRdYonLNGridXRhrtmhGUuakADcPYbFeTQoCRYi00Gddp23hz4vpUD2ceDuW4T/+MGvY98FVAlf2uc+HkQC5Sg+li/CuUwofILk+/tzc4uD4K5CfAVs0HyRwCdF9bgMSEtFC1Ba+T2HSmrgzO+EhZjEyE3Ku4QIaz0pDRAHvefB7wmoEuyUiAB5MhisZPWLgfaCBY8TIQsJkZGYpJTGMEtFfcFJLpwNRto9a3uNnrsPTx6lgE9xA9jK6IWEdNnRiCT3lcSgDFYTLkin7kN+XkBwA1TsnnPxJN4lpIHYL3acefHPUWrjSUPqXpV12aoRBjliFzomaCdplgvFrSTknhWsIoV0LnlCRNmQLCmxbfKuRgxBOHPvFXM54oKbhoiyQRGZpXpK/j5N7G7E4yhyhCSyIOpgIal7B6h/g6VKQZN7FjR6RmIWGkpbN8Wet7Da99andBdwbl3OjQs1rm01a2SsvIpru1B7eVrv1FJaR0SCxj+2lucsRpwMptfm39uQEnIHJYuSdoTAz7eSksEwqwqkT2cvbKox/25rcpEr7f5ChLTBRgjAnQuwhNla8gBZU92eg4gIvx9ez4WsIfwzUE9GMsbeJU9WOmw8LHlCAky2QLfdd1yK3jof30nbs9QxXUgvFBK4Rpf3EwlhnmiwqvfcxA7AL7h2P5OcI74OIafF5NuChav+uFzwIhCThLQNQavn/MZDNTK3D6K/AFiBwWqbc25ZnOBMEqsTzhFISM8tH9IECxW33tA1osU6m+2qzfXt3zR4mNqWWgNIU0wuKUOtMetcUtYMS+QC2MlNa+QECLKMDEZhgIUsWDZNLCcL1sJgtZrSkAXCPqMpQW4r0mmkpW/PnCuWOCxtm4BYKOTw1zcG0llJAPhq35INDj7mUosId9tbKi5by/pxSflclkNCzgqdEuBcUhJuSeRuWtZCIv24sxaAYDWxsSVWoCqU8HGJwp2f1+qoU2aFFKhhLvAxEBBWAPZZooBS2PlNSIEBcz8iJQwpYMgtccRqZ9A4o99ovBMZTglJ2u9KCsy5c5R949zbDHpaQAod+koKyNL1qWxjsa1eb1LPgJy1tY6E1FnIiYDQ9VNLSGr9yBE3+jxiv+V+T5MoTBXSWuikwFDbfhxq6V256H0daoWe1JEbVyHJ/SpkBwxWOnIfZG6vflwaT1oAVILfycJXZ8Wry6CV1nqhc6QWQN8nCFavpUBHuqD2xYlNgpC0wbqQBIo7aEJuIb0/4K/33zH6/h8/+HXj/j5APPoeWz9CoHKs2WpC7vnW1TOo83BJyUgbpG419jLEUOILLsSERFrKSRMIhJTNtj3cQtOmf5vIR53AWPfdbmPn1rAuC0n2opK0xZVxEQewp8IYAB/TkQ1w15a8KCbkaPpLJq9MeylrGglgUlDqVI3SCHcO4aWCOlJiExmE/a0lLx5HKbgrYPQsFynOvvzG8G5wy0fG+scFf/+bmyNSd1B7Dvjj6DxRrJUIGaSCNUR4X34pmHuWr8cR/yUhMMRTheunQh7FVBkTXJJswgokqXRdNW83diqVwZklmBQCRMq5RZATce4umBv/oc9kNB65G1wpA0kelga8bhQVsy1L4zqepH0ak6EYborUDXcc0sB0Oi6XOSwlI7nU7mmWulzCizoLSI6M8L5N+5N+H4w0+oX08WKltv0ptYuFFAI9aTCEcwF1mQ+tFQQQxj4HY5BNuexjlhLrCYxdVUvE70uadIC6hIgH9VlKTgBu8XPXTM7B+1sKREUdO3SYD5Y0IUnlhjYCWs5fNb9fEsTegpTQcfd3BAFVMP9dm8mFkCMddf7PHN7twC2OsTATu1tFaVJrKnT782aeWzRWNKtFovlCZ/+OfM0KdjwJkVr42I1WJMKfhC322nkQyaCNnE+mJt42nqoXqBbA0sl3IL53Ok/TbeTiYXI1TdKsOXOj0v3VFTctHjMSrCEhuDznvgUEIYxctmjbsLC+WXasuqxbLsjXWytEHOhO7S+NQcGSaQxlLDhyjWxU36akfteguCzK4ENCNQ8+pvSs6bNbrKDkAADQi9Kxh9+B2C0tjtmqH1eRBVYGLT7AUvyyPguF6+w/KeD/KiGcdtr+te0V3hXGu8bkis5RE6NgYRH559N4NbAW4dLYQG13MkgtPPGmAHVrCbSxIZTAgQjImkHpxzVZAGksN82XlBGu1DaWh/YlSyG5HZWuuN9QGx+sPefeW17bJ2exa/IaaKrVknP1bIoJyWXIygWop25ZTe5s3NUyR1o4wbPfSzZeAzmppAIvgJ7rS9J1kDJpTsVkmY9VRa5ciBMs0CXJemI/p9uF/wzUK+B4n5u0300cW+K31SVxoLYYUbGeLEZ0MSSLE0uakLTFuBeyLZTXfC7M+TZ1RNYQbXzK4FSLRRmSgLj6dS5LEpnHB9Cgmh2lBmTsFeLR5NZVXUCr+0S/J+3hC2G4Z3Zdf0IgrbDRZnIiE39kwWB5Iiu/oR0hppgSipNocq1IBYMUuSxhCE2Mir8ROcm1s+o/H4qckfse7UNktupaERORuSSofaSNrznC97Na+KRYWmY8EXGDkjYxgfOxB7TzExcodQlAWN98CoA3cQpy33fG7eNjmkTlskTgOLF7zIO2zj2KRYFzf3gzhlpXLB7VlMqxOx9tz717ueKP6bEk0OWEu0KG2BByz7J/qbq262Mp3G/2uwAAPQIACPcXWueDhqWEERJCSCgqNCcsgZVwmn6X+rt0wqRgYeA6GuPWVWmu1H5uXDsoMdIGawdlRLqBvLa/rp4LJ+V+f6fVdzforXawzfVEGS7uISoSaIy3HKa1VPjckEt0kZtbcr/b7VXCkLOGpIHqqVUk7asmIpJ+rv+NCGHo88FIO9etEIimtFU+ULpxf6wGtCMmQ83nShrrRCoZB2Zkw9MiT0qMT8jAiQoQyAqBxz3Zfqu6ZhERodvNLRe0/2K22nZY/NjkCcm6kJG64PVc0DwnKU1B9Zs6KgtDJoCcfuP+9jwGoC6jDv9MQayp/zgQu1xN8vzb7FvR4JnqIgbALeCuLcZaUEijC6CSCI5fuyyZOxazloSTVw/y1cbHqKdSTSUnAnRf0UKfLP4pUuLB0US6UncKahuleOa1RqrXjDNYFVJgwO8xERp4EHWa3Sly6zMGhQs4H2rtyIbMLrJSEhlxBMWRmWkoe6yQGDqSI7WoWEuInCsnhPSkhJYGpRFA4SwM0qZnttYUg7IMwt9ixrjU2dwiwmO1eErjVAtvzxGfP9IoOwGOp+61AlwgIcITktgqQkSErCbCaKAc+b8AILSLRzI1hAQ2/aohzYgqAFmgkAWEFBDGWTRdLIGRVhhUjsiSciBYVUxERmyK6dIRkthNMQfKPkaf09/4PpVaOm79GpZ2/iYLnrcKlPAB3NH7X6IW6TySzp/R9VE/33DkrCLR93mq6nl/BALHCERl/+YF3yrdACUMAAMUEtpoDAXQ09YaShnN8kUzq/M87WO3xYSQng1ljov2YcUtUxD5APIExKDBCkf7CNPq2XXokMOSJiTjBv5CWEaaMmpx3J9JCAc3lQPAMMmixQkHkZDBSHv3A/57yAqjvPBIi0Sa2pNnOeGCPyENnubICb11Y2tcEKW/b02kwvj2ETkB4IurAYgyWFFbKUDek5MGYpIe34RUaxmyWgWBoS5zDW9fDnUkMBXoU8tPXVpTHt/Bz18iFHeMxoSUzkJRT0p8m5jAlvaPrc8CAI4YIXZN8ffjLCXQNt0ncW8iI6RtBpWnZ/1Y0W7LMK7pXgMpDX2w2GPUbJeKzDaLXNII7naVuvelsTP2c9Ae95SMyIcUwZ2FB83TK0Sfe9xqItwz1SNAa4hyBOgRhDF2myMhgshIWqyRJEEhAemWVN0HxAAo+lCqDyUljLGxJj3phD9hBcSUwJGL4tqhrXNzz+wIawcjDEY6ctnKuSTSX+t+JbxFsI6UEFKrSenI0lAHLb2S8K6GQyoOKfLnS9HGqtGEpvW1jog0WTry14iJWe6eVI0pPpcpzj8f1/bZkbMcOmNbTwkfF1OXQKWxvQnJp+88niquYxXeGSBffJF6KTyvKrnTft1x301wM1sqZIS7lG7Ia3ZoxpImJOPAXw5ugs5hvtqUOrz0oF0W9HxLBX/7yF3853f/z6/855zZfJyGj/7mspgEX/GQVYcXFmyLnBtWDjn3gWjBW4fx42M5aEF0PsbWXcKdWzPrShI7krUi0L6Z36JgUFPdTsflyAhvnz2HbaN2C2+u78vkPmwDQxwEb0+dRYSTEu4OZQNIM/eoTVSHhMbTTF9BSTuO6B+vzM3vH1JA0rOgG0WipXSWkhBfop0Vw7pq2D4WgDTQZb0fO43xfiH9vXpLC+BjWpYC6pqZCnhcexs+x2SkjoiQACRFLIwpwVKhCmsNSckIJy10fEo6hDHBGmJ0s8tWqZ2VxJatM0JaQiMLmHLkiYqStiCJoOtTu5L7zCGdP7nL1rh+5tuVDAUkuXVkHKHw7oWAHct+DnLzQGlqn3u9VRnx9xqlB7nlhvtAdHzlPoXwbpJ1sR2Euro3kyC11o47D82fnKzY7e64zGMdt75EbozM6tJz8W2lApRzB7NFMJvXyTTrYfb62r6PpZl/LGOHDhybNCHhqBOu8vuuGznpXs6AnPA1xwKR+XaAE5CwcC7rK/QLielCYdoJkFOugBcV8koXdB6PAIRaJv73BveAdIw01TqZRCMUnyejnWeLugYJoEHQIqE4FzuSIp+mlxMot41ZItJ0mqk7RVZjyIiTPa9h9xYIyJDaQJYhXW2DbWPVt5wWzqnCEpOeUt4yNeUE+BDwXk2Dyl22ZlyldtIeUnpYJYSvsuyfgbbZcLQIiQUA+GfgLSaF9PcylBTcLnzdFNtWgWEZrDdzSQyAj40a6aiaPP2WktbFil4LU3HqisItI2mQb0pABFAlGKhaQ+xn+1eKGgsJ7eOsIGI0cOSjdJaQkSUiWlddtqLgdmYh0aVtoCrsd9W3+8oCRX8ZhLQFOOnVspY+g56UKBVcOljr0hM09qH4Jo1tnriBEOJA4s9ATEa41aSJkHBrCT/PUMfPkfq4DmkK56bYF36d9K9XgpT566WKCx67lc5fbawhufa0RV2/UvYtaqfMFHEmpAH6ub7LXS8UahQ+McGUXyerlkggfpZtyA8AV0zWnlMLO9eLRT4/EYQSG7ytouEd6WCxSRGS+WqpF8oN4v5qFanDKw55EADg7d/6Ze0i1KTl41aRvnICpMtMMuUqMtNfLqj5lLZae028FIE8LBQZWR/gwn10XWaVyAa7T4A2ZMTvW+f2wDMnOVICTQUUqymJ0+KKtI3uraJFTRdcIWJNnAQUAKnJZSAkSODt5cI+YOtPKBncGWS0QMeLMu/vXDKCKIsbPQ9y44INyJd0bR1rEXMWw+pv8K4xQ0Fjev2Ov3VF+tzTtNgEbh0J24KgxF3XUjJCpFGw/VJrCD+nt4ogWEZSiNTqQd+5e1ZCRugYw3+XhStg5wJXnLXFuOOkkJGFRAibmUiSO5RrfJOsFLlsZd5VPu5T5CzP48DPFc0HiDPH1Wncm0jIRPOpe3DBpXUMiRCsSLEU2ftIXazSbf5c62A1CdZkE1l7lMzP4WndFN5XpMRLE74AwTpDXgR9JaGVzW5J926Jr/CZH2ntHE5QPYQraIhP9bIOYB06TIYlTUgoV3sbNAlvXUD6+kXTZB4WiHiBJKtIT0os69kJdkW/wFRh03ku66koqJWDLDCzpXC534FyVM3930RG8gtFdRvdWpRFyAsVVe1T2hW59dhvi0z6AtziMC5AKhe34c+fEIGcZaQuNiZtoxQup750z8GRwTQbDwCbkYedKhc0n14rSvEqxrvl8XPSef0zksECkpLYtG8q36Xrf1jBl9zQSItvIWz6YqrrAOtqNSotQQHyPualy8RDQgN3pdFGYigEeq5fe+mDXWSYSjTzdfMzfw48Va/yz7lKRkLKYPc3OaeGLWzJM/5oYyAhfHI68jxyRbJt7QchYWThsmppV1CBpN8CMBrGaIjkipU7E9KSEvqr4RjzyL69zo1LCQklDAyRYRhMFwra1aqgPuwrCfSBNQOFfqEr5NrfN5H9GhLi+3wCoTpVRIwjDk3nTt10c+dPz8WtmpVzy3pSkvvurSYqJijptXNWjSqRzswZDbJFsxte9RycgBD54HVmcrGWufaTWyqNJW4Z5tYT2j+3VoV7cH/Z/NlTlHFNWTJaGvSULQRqJiA3HTpwLGlCAkyuJR4XQJf7vQ1J6dy06pELoEwDikkQ49odIh8rpgr0pMBmREiUwEyhIm0pYIVPAwMlFdMe2voSXFuWoskq0uROkKKS9YRN5DmklpvafbxwClSKLubaIeuD3OvIiC86xqwMTYGnPHsar6MBAFDWhUmZ4KJFbWqqK1C5j6QfqfI2F8x9IoOWBC1XK8G7WxF5NbEbmd0RnpTQM8ml5tTCWG3hSEO7oolEpmx9B7j+DGSkdBYVKrY2N9JVodNId43FrTGZKmRE6niSPU4OOSHk9Wl8hqCMZQSgVLnVAmzG2G1ESoQIhIGTEgH72cC4iA9AuXS9xmjHJQSELAA9sql8AVdjpEpKKvEkDsK4K7hzwNhgeQNAyT6UtG2g5AxSGGcBtnOelnaMl4bNiYmr1YC9lBTDBZ23PuTcleqQO55/zmnnAVTm9Elqe3CQa5k7afJbvu2NazTTilTqizUQfJ5ul3+vol3im5xskcbPcFc87to8GJWRhWSuxt0ZiLMJ0thZMwiJYabS8eTftcy9s/WVu7ZSrJwUbm11GQGlNo3r2WKBVHmF1Hq9ZicjjsWSJiR6AibeNBlwTDJoaN+TH7Fz62Puj1BCZLVT0T6JKwH37/d/mcsWT+kJWIEktYDUtQUyJgOpq44XLk2zxrGptkKcqrF6DnLnaSIlJMB5YQNAWs8kd3+cxEwCXvE4JSPpu0Iu9VIIlDA+PagS1ioV+o2xRcQfc5nKCFI433VlKzbw25EypHpN076G51AlvBz2mnERQx/nooMrm+8LAV9zgVLwKgEbyM4S78QJA0x2PJIwYAWPqqBIAkPaZp4wYmPiKz+7tZbg9Sz7Cj7yrpCkdaOM34tQzFBG52rKomq1ryJkqaL+hmUgNG4EbHVrAL6atS1ALsLzFM6K4oSuQvWta5WQQDkCpIIopX3T5MjHlXg3Lgp+d5+9VQUI6X9dTRKqVcL34fCk1RNuax0uZVXApBoXfSWjwGgl6t2wuBvXOGtHjpDkLBt1x407HxGROmUPWQzTtqZuV1yIbly3GSsudV4p4XeNLN3Ib88ot/iakUOd/JEr2shjzMgqkouNqyOeJVtL/VgpdCCyhR2b/UJmU6HTX05U7P1pDAFMQ7k6TLaIrBaGxcLoJUFIOixOLGlCMl80aTP8QlozweWE0A7N8PVBtIAS+UmU/k0XVhM8XUjM9JS3jPSUddPqK0tGCil8bQHAChyliYuNNYGTgRjV55uzrNQRkDhjUJ6IRNeRsZtQ9VqxQE/B7qmgG64VW1BSYjKuynfstjU+mBJgz0+EzGdkMdGOPMSucvZLkyWGL4JESsK9hurlNsmBtPUkhKjEGKQwxt7jyJEgK2iV0Fq4uiGubTq4sQGppSf4yyshMNTB/YHuj0gOPwelqeazbpPLSCRELlCc20KAWzJ47R8OntIZiMd4rq5ICJSNLU4EY7hR0J5LGOFLX0iWRlv4V8vtx8aD1LzytSPR0jiXMOFqlPQhVd/OJKWtcCPKUXDdoqB3+p6t4m4LJRIZMULY7wmEcLEtrjZKlKrVWUhm+gpzowJK2pokXFDnWvOUjFRJuEapRdZykaLJtSrexjPZJe56SXXzuvPx9gbCYUkJIZeiG6h3i+Wn5vM4z9rrXbky8Q9N67xtStK3TG6oxh3G507ds1LLB5EQnsBglOyXSwiTIlXy8c9kKQFCzIn9HMgKgCizZV9Jbw2nlMU+Y5ixCoaelhjMNRSkWSxQEmJM/ZgFh1g8c/hixf2SkMwXTRrxDvXgwf7v/95N/nOTtq6NpcomLDH+szaOmOhqfY3sNaSo7JM+z9SCErcxnCcdDwtFVisEhhrF2xEJrIGYzMdawq1MuYD+OiECsO4P2lkW+P03BaBy8pPCnqvZHYLOK5BW444DnL21A1ZTT8QkbVZJhIxZiuCuQCeyaX1dzRRh0IP01hV+X3SuFEoEd4nWWX42svLjy9f9IcrMw92tgGr7ghbZszH2G7LHtpUPAsHjRN7+FZGlxJEP9yyEEDAiWE/IcgLtMnNpG9cBbWwRRQEolyXLj4KSuWA5M6GhlMC5zFuyCBYSt71JLJGeiAeLIxci+4X0ldVp7OQzatW8b4zkjnTVdXY+RKRuW12yhtxvtG2SOJfUTSuyoibvPVBVMNVZNcaTkfaoixOxv8WWjkA64loz3CIy7rnE1zYJyYsJSfp7ui9HdLwGAImeCtZ7ykgI6EpGyw4d2uJ+SUjaFjsEYv/RitDZsZGJQf2Zmyg5SLs8BDBbalvLAfA51HuJ9MJTv4bietpv8wJp5rmPS1VaiTHRQVvOMY6M1KW2TduWi2FJBV6CcpIXjU/S5NvUoc4ClIuLEmRJEoBEyH0vucDX5IIQFkN6dgNYtwCphRMWBfJlxPKLKHdhoGJbpCmVwqbSlVLYAEpp3W60MV7DSXUleMC0cOQjZCUTLuWqvY4x1gNnqLUbL3ZBpUV16CQK3v/KbaMxmKYLtu2nqvP23Hb/eD8eTJr2A2ALqQ3H+SBuIJQuDoPHe+Rc5TikAbRz5ejx7czFKL0G/wsAmt3/2KxKksa0Bd/bHhosomkRRariTu3y9yfhrCf2DlTR94RK0YulXVX3tEaJc9MiS4mRhRsPJGTaezWGXNF4HwWXxOlCYrPpgmnMy8Ttr6ok4DF5Kca5b9W5WOV+S7fT9et+S7fXWXTI2sIFaP89saCl7x6/r3EKpqYaGrkxnd5KmgiliYDQPcdxNMYXB07JCC8cTPvzvymp4Nfgfyv3lSEnEeEFojgTayGRmOkrFFJgQPWbpMSssjEkc5TtUgDD2brVavFASAGxgROEiIwlrkOM+yUhASYjJWl6T+7DCgD/efUtAIDn7LfDArdy0wMFqrbxY9YucyYRE1UalEZDsXSohJSQAEEgBPJEBKhaunJCEpmpw020Pz5F3W3Hbkumdv+0eBjVLrFxHEFgjDX79Sk5/S05N4RcAcJKYUSm0Su4hWQdCHrFGkPfHbnxz0AbQLnn4khqqU3kMmXdcYJrDoSLx0F8L8aJgUEQtvtpHWcaqyY9sH+HZWnnA83iWJhmtsw8TymCuxkQAvX5eclCQ5a+jZlO/AvX/j67PSUjtWRBxn1XlzZZe9ePqhBHGOe2RlXFc23115dkUbPnCu5bwZ1L0TZhQnpU7+plmGugS7JAtUYUQlpgXpvEWUVofJF1zltx3W3R3fksdWSNErHAyAVKLtQS2hY8TAlBXqjVY4XdOnIy7tq5NuTci9LfQrILUXneuftO3z8g9Pl8LY/jMjLWVY1PCQkPTqe+TmNF+HFNJCN9JnUp1IcU/+c6w7pBx+tySnSUrD7TUhqfbKM0xtc8GY4yGrAOHVrgfkNIctVd60hJffxIvE/ntrXuSCe+YUlB3lYYlCNgzmX3uFfE7lFAvaWhzgwfpZGV+ZoZHJXYDADZooYkPzcITePaSn0QWVCS2IrcfdHY7hnp4zZKVrCK0u/WjVMlhI1NcQJQT9m2SCGcz7VNU1rKeFFUieBB2kvlhdTQRroOT2ygy9D39prxfdn9XPt8n1jfZYw0NC2CTmAzCBOaENbqwwUQ47TTxhgMtMFIA7Oj0vptG1fE0I07vzCz50DtS3P2p1ZUKfLEk2rmFCoU9KT28+sMS+MW+/zz2pDQJi5cGLYHwpiSjjDOMyckwSYz91oiFp9jUkxST4HawS02aZphsqRQBXg+tgMxofgTQIjCGh35uPNjiayAJDDaMW/Sd1oKSGN9/KUO/TUsgqvWYFREgc9Angg0ud9wNFlM+D5V68n8nlOujbngaiCOHym1gfIKiuqcnKsYv6Heo7rK83Ukri5RgO+HkUYh28X88GfDM3WVToFDGdlyLsz8nBQEz5VOtI3aRtsLKVAWdtwMdbDozQ1GY/uqQ4ccNilC0pQ9g6BkXvDJ+bmrjCacNDS0KI2ri9ChCsp0Vac1CpOfdv1aVhag1A+3cg0yQ4sQaM2zMpHaPM2GVRdvIcGDwavEgZCb8PmmKtHIkxAeUJ7ro8p9ShuU3XOFsGh89pT06XebxioFu3vC5lI78nvwSQmchSLVbvIc92RJTIkgvzYRHxImpKoKD6kWHSA3KumsV6U/51RhsxKVkly3TEh6YAw0QjD73EhblyhtMFtqn/p4VBrvYsW12GmAf92z6DvSwQsC8kxgU642gK2jE0gKCeOlsVpTasPGVnaUxmCZkpW5EYhJCUVZ1KVy9sdrSwC4IoATmDhLmTtkHhJlToM9PgOf/ctTSQdLFs0fLPW0DCSFYpZ49WshQkX2tB3GkLuW8fFvAnReASWsy2XPhG3aKCzr6chyx4lybn4Eci5YCRF0wmWd4JvT0udS/6bzQRqjkhKhVMi185jM7svPz62ntjhjuPc0gUZbtF2/2xLlcZaRaqasap0icp/qUxsz/Zc7/xwLhidy4tvCkqiEGkuwWQO1iGKTmkjhYBRqnQCh/pCSYkm4bEkl3Bq3Aa/ZuWyNxSZBSBYiv/NCnIO7Nzxtr+3W+XybIihF8rv/51fRdq6ha9LW5Sb19Bw0kfIMIYB0qWmld7lal2eeNq1t9fdxblmpHzLXeqXHVSAtabDpcoE4Pz5T248JHiYB2fsoe5LCzP0iZEyjfgy1QZimc0wfK29OyO+Xs0Jya0nIiqUhXbClED4Pk6044W67NNYqUmprhRhqaxXxaX41E2oQPreJo+HbeAXr0FdOgeGEVm/Z4e5OzkVOSoEjH7JtY79tKBy7z/YAgIt/cVvcD76quKkI/DkCQVYILYKrnU4eeWQtNMgkFqhHtpApaw+P36oDPbIhy6jmCYm2RRt7pbG1VqSPwIIywc3LCNsn3l0waqP9awwqKeulsPS/p6QbiwJTBXy65KETxAvXaU2xZ5MoOegvzZ1zLGMXF0ybNPN13ylwHoB37Wzr1plzJ4vawUitLsMcVIJfv/kafP4fZyn358xY56P3F4gHdk0b6F5oDaPsVk3ze5MbniUJdnLuM0JC5GTtIA6Ur1Pi0d/UXW6kDVRlXbZZ2wD42kkjbTAazYMRdqjg29/+Nt7+9rfjRz/6EX7/+9/jc5/7HJ7+9KfX7v/Nb34Tj3vc4yrbf//732P16tXrsaULhyVPSJqEyjphIrWS5FA3mZXabHBmvSkimth18KWl7/wvTbS5olF8MQXsZEoBeTN9hZl+gX4hscxVqkXfujb1lA2chjYuOxRAldDHZaXigkBTob860kHHht+CFh4IQdD8flONGt0r/SW/cTKhU1D4UGdStOp6d0PiByQs92R8X9NFVegIxwYSUteFdO7omiq8p70GLRJ/10elzXC1xrfZYHakfXwGj2/gGvih1igNMDcqvdBL6Ss5lHQB+SW1V3tBY8DqKKRuFzQWiQjzeik9JTFdSEwVyqe3LiQF3luXszLTlsUAShTRJLhR7FalL9l4oGrsQNWNcpxVI+tuklooM+8mL/rJrQtNCK6ERLKtZWTKpZm21i3laiRRbaRAOtOgeYJgfUeuWhJWYDfGtr/oK2gDLDfKi9clu4c6cCsbWZhsHF0g4KWxVkByUaS+oDTVVBG+cOSk1AJrB6NKvEpOM1/pwxoBehJSQn+VFBgN7DpQ1Jx33LW4koSUDrk6Jrnm5a7F5ypu6dXkHl5ZDxB/z1ixcu0kq3OaECM9dqiNf9aD0lpH1jgisnYw8haTudSlC1UCmCMlOfKZs6aM5ha/y5aQGz7tr5hwXr/vvvvwl3/5lzjppJPwjGc8o/VxP//5z7Fy5Ur/fdWqVRNdd2NiyROS+aINKRkHbeqVzWQtaWsp+dw1v68skqSdzGGpB9K//FG7AgDe+o1fZIU6CvAjTQ+REL4t7BsTkrKv2IRqFzEyKZca6I2Zh5pS5eYEmaomMq+NbQqErCxWrC+aCmHRPY8yGkR7QitMSyGcBQHJoE3IAdPs0qlyBMJfu2YhT/svWB/yWupJrFVVdx/hs6/BZeUawqBnhI8pSAU0+hzOEUDnpixl2sRWk2wAaYP21/ap8H9DBidLRvbebvPW976xwItF+uxtqBKAnLBPcTPuCJc2GY4wNls36Np0jahNmXGUawcXvHPFPuuePxF6AOjJMBaGOiRRGApbe6EnjXfJA2yBcbKScHJC5rp0CpIAIODJqUrek0KOz9JDmbpKDU9uh1pDa5upkDLIzQnt3yEiJqE/pY8Vi+bUHBlMyEgqnKZWjSYikv7WZCUncOtL2gbe7sp1W1rIm9qbHsuVKsq1gxJxUD9bl1SbFTAt0EhrUi4ejbJAhliuoEzi7x8nmkMtMSziPuwXJiKXqVIvvfe6+68bC7Q/KRA7rBuOOuooHHXUURMft2rVKjzgAQ9Y+AZtAGzyhKTJ5WISUkKCj2QLqBIi8sOsczvpUA++eAF5awhpeQY1hITOk54rNVH7342BZMSkjbmeu33QMVHhPEYk+HxdFaKax2Iw/8eTPF+g2y4U5DpkrSXMz5rFmAwRx3rwquhSxMSiyBSNlKJKWGLiBX8/qYZ7IeKufBV7DVtJXdugdyWAuahNbP8EPr5FxeMDEN59Z1jqIFi4OUVJEY1DbhmheJqetPEiUtrCntMuhkTJpZUEkrTrOjO+iYTHloiwD9fw8ngaGl85a13eaoLqtoT859pD2v9BWW9hzYHPKWRh7avSx/1MFdJZSKQXFtNkBZSinFtPlBCOfHAriogyftn+sJ8Eex+F285fHTf8AYTYFG1ImWHdwwZlSDW8dlRCa2DNsMRQ21iquZElbbSNYF1x+BXg+y33l38eN1/RPnSM3Z9bytN9mzVJddYRHt9AMRKKKVVy1odcEhSOVOHSNAcCzXFQ/B3gCSR6Mh5L/t1hcwe3oKXkc26kMVtqjEoTPWty3wrrauzOReer69foPpPnPo5EdgDuvvvu6PvU1BSmpqYW7Pz77bcf5ubmsPfee+OMM87AIYccsmDnXt/YpAhJbhEDxpMSQuy7H37jAe/aIDGNOGEv2rbuaBPIuRhdO+aLnMaZ52Gn756UMJcMADarlI7TY4ZJtp510mMjwjkuRa7XtOqqD3cTGRlX3M8KuG5fp2DiCzr/PE7j2ERM7PVgU/rCLnq6NOixgiE+rWbkOgD/mX7jAbxxHxkWA9AuW1TuVpqqyltNPfy7RxXsh7DBmSl4kHl6PRISuU940OIb58evWJxK0CancU/ThYoEcApiD+5ackkmwCAN7JC5q3GrQ0nucIlgA8TuHzzxAcUa5ZKEZB4hc70LJCR1iwl1h4yv4bJ2WHoSkqv3QPfD/6btvpfVYqDYNHrWU4rcuGw9BingXfIsWQnWk0IKmwlYWE06ZeqyfUGukuH6Ai7Y3hVfFKOBlUL1qOICYjxzKWBUAfQKjOCycmnbHyMNTBVWQz9VSAy1xpqhtgKryzLXY+m0AVIOZZ5HjQCbA5+/yJrLj2sjyA5GeVegOtetlMBMERnJ7C9FOI9PaiBQSQSSI81xYpTGW0iuyYiICslXiMCSVSyXPIEuydegUisYYzDUwQ3PEhHjn+/aocbsVNhm3bnKqEJ803Nt87znk4RiQ2NjBrXvuOOO0fbTTz8dZ5xxxjqff7vttsO///u/4+EPfzjm5uZwzjnn4LDDDsP3v/997L///ut8/g2BJU9IOFloEtDrqrJyjAuC4wJLmZASANmUg//141tCGxo0DTnwYwlL4F2fCP/wuIf4z2/46v9l9+GuAyHtpY60XJyIzJA20wkO/UKxAk9u0Z/wWQD1LiJROxkp4QS5yT2A4jXoXD0Fn9Eq5x5UOZ71T6V9jMQQMbECWyg8yN25pIun4QQk+uwXbVHZBvDFyJ6kbEh5w0kCHVaniWyzyJGrXVuinhKD+DudQzgVtHFxBAaAdv0XZzkii4gU8AJq4Qrb9aT9R37hAsA+22/eqp2LDTkyQlaJHCHhpBoFvMUOsO9UIMP2/WkzT/LhwN0kORkhiwj5zVuFRmxtbXKH5BYSIiGDUYmZfgElBTabdu9TAfR0cIGi8SwFMG0MtJRW0DQCI6/ookx0dvUoJBxhEygEbC0TPYQoR5Z86NJWhy8H9reSERJX98QTkqIPIwtAFugVUyhUgaLoYyQtMRFCundERkHySlhL2NxIe018mFskgNL30Ti3nUl+a0LddQjp/JizkvD50RPjJDaIyDIQiEjOCpwjH6k1Jd2P75vuT+3oK+mytAVi2pNJWmnWDrqUjTsyMM4NTBv7jEst0JMhe5oSwLCUUKJEoQRGZSDySgooZy0h0HuS9ntTTEndtg4xfvOb30QxHgtlHdl9992x++67+++PetSjcMMNN+Df/u3f8PGPf3xBrrG+saQJSep3n1ZvjfbN7JNDHWHh1xi6xQdOyNPsJc2d3/ukJ4SlbuFt0grze9nUEFIO6tiHlWm2aDFMKxCTq4ySAsv6yhGTAjN9hWV9heX9wmmfglbWCw+JEFQa44Pbc/7rTcKxksL7EBPi1LWJlo19oSDJUhtAxRrgYREWkDbPnwtY5FZUaacU6CsJwABOcz/0pkHh+TaRDl6d2wfIMy1faezNUQA0NMULsBSuGa022HXS/vF9mFhLHPerkMScUiC19PD7qQNlghqW2ltMSmNQIGReSq/NBZmeFN4aYl22guvO0rOPWJDWFSBrSBiPbRMw8ArQVFQNCPVzelJ6s+U4S1LqnpUSkVxAb+r+GdVsSKyuQBg7M72g5CAryV1sniHCMtNTkEI4CwmwrKe8hcxrwE0wRxZuHE4XEgU0RDkHMVhrSchwFkKXEKNZoBwAoxHMYBbQJcxoCD0c2JOQSVUqCKUg+tOQ/WmIXh+6NwPIAnJqOXrFNKZ7U1g2PY2RNuhLjYE26DutvK3HAz8P9KSNJ2mKIaG/dZrzHAkpMttS99T0Grlz8238OikBoYQfM33lLXRTKrhW8veWzw8AIitwSko4ISH3PNov1FEK7ztZNkIKdPe7sAoeAaCvQrr6ngQEkVCtAa0tMWUwQtjim6qA6RWAkBhB2RTm2rrqlRpY3pcoNTA7stawuZHGTE9iqA3WDkvM9hSGpUG/GLl3J6y1aUzIOKvWUiAlQm28Su0rV66MCMn6xCMe8Qh897vf3SDXWggsaUKyPjCugnvqviVF/ILyY1PLTaoJnsS0uQTe8XXG6Ydbdv/aL10TERJu4gcQERPaToRkyltF7D8SgKiOQNY/uKLNqtfK16UPTS1wdZokb21oILEUIKmAEIjuoI21ntTBx7IkaUXrtIqljMnB2EBPUd/uJvBA4/T9oncq1S77QPiMANP0PuSsLSp55m3bzOFjSNyJU4MqF2hICKUsTKRx3neJWkVi98j483wEECLd4XwCqvGIyc+f/otdtsJfH9Q9qhaPo2e9Fu75u9/SGLZSKx9k7V1/lMBcaTO/UTY3JQRyy4t1y7KWD5QDK4iO5iD0CGI0Bz23FhgNodfeZwnJYBbQGkaXlpBIBSEVjJQQwwHEaAhR9CCXA0YqSCGh4dIUqz6kkhhom9ltJKwmfShJAG+OG1gfGHe9tmOMW3S4RYSEfCV5TE8IGs8pKqLixzUkxP7mri2qMR5EQnhiA5onOBHpKSIkNoObErBEVGuIcugsZiP7l0EICcjCjgGlASlRFNMQTGkiXIIPCYNSCfS0VQLMlrbg7cgXYo0TGVC/d3EhSxdXX301tttu6ZSgWPKEhLtLxMWRAgngf2vP00BCeDwJEGt2SfOrhK08ba/vfkO8QMXuZdU20XG5d39cLYtNCf1CYTAqQwBig5mYu2r1C+VJCdeIcd/9QolEqM6b2rllxAvSOpCRuvHSk1W/Px48D6RCfdivrnBhqrlNLWV8PJCmGK5wINdekuaa+pOnzxwyy4JyJEXahFWQJvi/cnAiwdtBCQBKE/rP30ciwHpSj9Slg1mVJqx0Rqeh7DQkdAB5H+804J2TzjRugZOjUDRPek1qiB2IU8EudVhrVPU5KCGsNYMrCWq05UBsyaSUpjxgN9VW0zVCMgF4Fzpeh4ZcxyjdKXfRWjsosWZQYsACetcOS5QjjXKkoV0tGqNdoUJtIrlPSEC58VP0Fe5RErKQuGuN8hYTsprM9BX6hcJm0zbd+PK+XWJ7zm1La1vHpNCx4GsJyABiNAdJFpK1d8MMZlHedzfM7H3WKnLf3ZaIDGZhnIXEuEVQ9nuWmPSnIWeWA0UPcu19EL0+1ObW0qJ7M5BGw6geZvoroIR9P4daY6ZQuFeWGIpmF9MceIwI38Y/133PuVkB7d28OOngawC57nqXSSUw5ZIOVJMPxBZgIBANIK+IobY1kQ+JYAXhCgsiI2QlKaCtS95o1hIQPQqERJc+hqgyMGVhrSSqAFTfbitHUEUfM6pv4yOlAKAxEgIaGoDCUIY6TDzt+VAZn2WNaouApU1oI3NsaDI7H1gLyQZO+4uqp0IT7r33Xvzyl7/032+88UZcffXV2HLLLbHTTjvh9a9/PW655RZ87GMfAwC8613vwq677oq99toLs7OzOOecc3DppZfi61//+oLex/rEkiYkETGoCUKfpOBRFNSaLL5p1pgmKwoXpuosKDnSNI6I0L1xnPvDmyvHnHTgTg1tW/x461F/4T+f9oWf1k5wFLhIhIS7g6RZcIiQpAQEQMV9J0o5mvlMgnXORbBpLs4t8rksQ+MIEm8PD7Dn8Q1pIH/OjQYAVEJStAGLj6q/GUuo8+ScW0Hocrl+rHz3182jzXtMfSgZEeCEgbuXxcH39UQkaq82/pmRIENjjCqv96R0biL2EAGBv1i9YUz0GwP+uTBiAtTHO3FtNZEQOo+3PrH3VEqBsJZbIgJZT1S5qwkJVmQN8bEjg9I+21Jj5Kp9jgZO6Co1DGu7kAIjaV33ylJDKQlVSOiRxiyLaZvpK+dOas9DAnFPCZTSYFgqABpDrTEFgej9MjpowcsBRDmwJGQwC7PmHuj77vaERA9HGM0OYEpt/2kNISVkv4CQEr3lA5jRAKI/ba0mRQ+i6EFML7eiZdEHjEbRX2bjW9xYLY17bzxRCP3Z+PwzZCR93oRC1hMT/vz4edPP/Fj6S4UF+y5xwExf+aQDy5wbHV8PpimVrlMcUM0ioEo4gPxM6Ie9CO44OSsIfS5kWH88ERHw7nkgq1g5sDFCzjIi9MgTEoob8q5aLlbIu+0Jaf/BvimF7EMYqqdjLSWlBowkhYpET2r0ZKhdxZ9VWdPv6XOJlIaZvuowOa688sqo0OFpp50GADjhhBNw3nnn4fe//z1uvjnIf4PBAK9+9atxyy23YNmyZdh3331xySWXZIslLlYsaUKSIrWW1JGSdN6k942TEo4wMVevE86RJxqpMBMagcr+OVSJUVVTvimjXygvXFPWFL6IcRLSZ37apHX1QcYZTRiQt0ZIYdPhRlYSpkXSyV/ubsTRJji3LnAyDZhMBy0J00Cos2D/CkDbjF26zGurUn9tv8BI6w7mU90KA+XHmHZxFNr2nxEYJudNq2yXLN4lZx2JEgLQu+L9rZL7Z/uPIyaexJng+qWcGxzPxhTSwvI6DHm3stAW4bOy8cJoKeHh1zdY+i8qZdDiUBK+poKSPAaq/jy8zgLFc3E/fv6eAsGipZSwAjNcnBOErfouRMU9kSMNsuexIsYYayFxFhEiIvRbSkqEEJBDAVlIKCVRjjSEFNAjjaKvsHZQYuAsI5akKP9+TRcSa4YlpoyN1ZguFJSw7bJyYRGESS5UliWMLr1rlimtRceUGnoYsk4ZpoAwpZ53nFLkmhdlnVzHol0M4ywfOfdc+hz/Fqxt3CpOyiiyjMz0mILKuVFOF0RIrO2XWzfagprC+LhP5ew9M2RsEeFuWgrWKsbdsjwZ0aUlIo6keiJSlhBqjHMjG0MGtgAn1abRxs5H9nNY17wiah5In+WkyXs65HHYYYf54qk5nHfeedH3173udXjd6163nlu1frHkCUk2ZsMXgIu12HXviRRVUhIHItdp4nIny++bxo/4BbLiqsLPXz1X6q6zqcMu7kFjRUGKFNROQaQ8YD0n4ERZVWTI887BM/RoYfzAIC15raucDKQkTZkLNAvQOSKS+i6PS687lDaQnMaxFMZ/j+4vEjbyRDe4cACAdgkbpA/8p5S6uSHLT5laGHJVsnMk3usiE2JC+6UYt/bZ0xigjK1NsyXVXjBRqtgcQlpaAygJqavPN+wTC02bitJgdqRr3V+lClpgAFC9WNOcIrUIch/+XLIEfx33jChRgpREvoVXy9YF1aeB60RGylJDjwxKZikpR3YbUJ37pRCQhSVMRU9Z68mwhFISc32F2WGJnpKRxURJm2ijJ20QMQnKoq9gQ1EMRgVQqAKCERPDA5i1hhkNUQ5H0PQvWYBkuiDNk0TYdzajQKixUKRxBpO47HByUldnRMm8+xetA5bwxSmYpwpb/0cJayHpSUq/HbtN8XGbG6788de9y/x4Sssbip+6uB23FhERAbnoEREZkYVk6GOIULqkBaNRRLlNWQJFUXXfEhKGsq1J+10ba1wkl8bS/R2W2illgsuWn6fdTY8mmLz48zFLgJBszLS/Heqx5AnJJCCNZQ45UhJ+C3UixhVSbBPTkrZjnGYi/fn+QkYI3DRPJv+cHzp9pzgRKXgcwXgBnxC7iMALPjlXvnTxzY2xcZp9bl0pSYCu2T/KaDUh2gQn2qKAApDGW1sA7euVhEDkeosiEDStTZdL+7Piuiargu0417g0W9qx+2xf2efTP/0d5lwwsyVfoc5CxRXPvcd1zy5d07SxEoCyphEAeUFnqYEE1KYsggDCO9niOaXWQSnjhBMVTSulpnYu7UNQEoTxlhI/hwgB7fYj9yTNjjM6WEZ0YiUBAC0BjAAjjbWaaAEhhZcNhRAwhcHagZ13pgqJtQNLLFZOa8jSkrtlPY2RtgRFCPteFqoPI2dhpAJMAdGfttpz53IFXUL17JJtSg3BMl4IJSF7hf3b70EUfXs8HVv0gKKAkYVPCWyEhHHCqDGhqGIT2gipTW5WPvA/u4+ukJI6IgIgpHJnVvHpQtlU284tq6eEd9kipRVZKYBATIB4rFKTQsts6ty0e4J1JK4PQsUrhfts8xa637UOLnr0D6gSSCEtSVGqYhkxjIBQymdDLlyemLBaXCaQkRGzXpM1O3Z/b86a1uSeBywNQtJhcWJJExLpNJLN8RwxxpGSgLqd6EXl58xYMkrubtOgCa7BuNiR+wuW9cMkrNhClEvfOJUtShb3cy5gkVAyhaR0WtieFJAGTisb6k8A9c+Eux5V0gmn1zQIQhVZDoRNIamFQc+aXwBUY1asoAzMJbEP3sqTaDh5H9Ln1Keb7ssqZ12wrHPjkgLZooMpctbFOiLEi+Lx9qUact7EcS4B47LX3XbvwGsCyf+fXzuKc0DcX3R9LkCnz5jqvPj72AQW6LtnR/69A2LXK/s3kH+qjF3nfpUipE3NuCoyDGEFP21KQNq6HlqH94I06eT7zt06bWyHdM9aQiuJEhpCWEIhjHXJSmOYvAtXIiwKKTGcKyGkQNGzMSVSSfSnSshCoBxp3OvIyNxI+3mM/mrjrE5yCoPSjv/SSMxMr4QwBqYcQOkRxPQyO1lJZTNrFT0oF9QObkFxweyQEnJmuY0f6U9DLl9pt89sBlNMQ/dnYPrLYYo+ZkuDuZHB7Mimh107Kn11b7Iapi5vKXKpf/lvqQCbbuPWbvs3uOfmXHN5jCC55PaUwExPoSct+egpiWU9iZlCQQhgWtlnGxWfZJmxUtCc5V2cTJWgpCDiQacLlm7uxgVPQEQ5qhISKWGMhpCF/evObYwGVM8SDgQyAiFhVA9Qjmj2Zuy2/jKMDDAYGcy6VM5rXZHhuZHBvYMRhqXBvYMSpQnFE3nK7FHyPJXTAtQlLuDu1HOjhcyXt35A7/0GvWaLtfP+jiVNSDgmzQoy/nz2b86Vq621hEA+1rm4lsniR/L7pckiPvj9m/znlx60S7tGLlJQ4cS3f+uXsaAog7BHVhESimiRIq0YEJRPucwpjQU1nbVEm+AiEmWASp+RqaauHSdAV0mqs7NLYUmKRlQEkh/DM0SlcRpN91UXW2Jv2o135vpI12oi9P48DW6IlXZktOt8+yREhOO4v9yh9rfZUVzgrdZqxOIiPGFiBCoVsA/eZcvW7VtqGLiMO4riNnR13gGY77yor1eToqnmA6F05Ls0QVlAMUx0rTRtaU67ntPaCxc7Fb4Lbykhcsvdp4wuXYpdBeEmFKMBWdh3XRqJ4dwIxiisZRYS+rumrzFd2EDi2VEJFLZuhCpt+l2p7KgzxbSNb3DkQkgFjIbWZUdKYDS0/nK2E61VxNUhEdPLIXp9iOnl1jKi+jBFz1pGij4gC4yG2llHrACqdRwv1/hutESOpBA59M8m0canc0dOUWBjkELmLMqaRe5w04XyiSXIYq5ESK+rZKizJNhYM8b478bYlLlGAMIIX4QwBY1ZspBwckJkRACBjJgxggMRD1n4opfRb8wqQmQEqm8/C2mtITpYQEpDBAPOMmwc6dTOTQuRAqvpmectI3FsZ5vkIx065LBJEJKUjEyaza0xwM79LWsE0TjLUOx+knNxyFWWz01yFXfghokwzQC2KYJiRQBE8SKUxpECFX02FR/YbrWjWuSsE2Fbai33gqcx0AK+MJ/N106uTfCa8LQ2g/3rzpWZ37PJE5gQTvEa2rmVlCaOK7F1DWyj1wzt4jJXaszSgsMqUqduFjTeU/c3/lsTWskoDS5blbTLksdgwN9jk9sPnTdNy0vPpgl9JeN3NSPwpJWcydpG2bvq4ik2VYx0XGBTSYGhtv0zhLHvmyOuRWL9JWslUCVxOXe8JihB1krm1qWt9a4nJYbCCrtKGwwKG8Mx0zcRGSFheFg6C8mwhJYaQIGSUuhKA+EzrRoYqXxwuQ8ydyilgiz6EFJCjwyEhA107ymfWrjfCxaS0r27a4Z2+bUWpSnMKWup2GxqBr3eMvSkslmXejNQy1ZClEPo+x5grz0a+rS/RErElCUtoj8N9KcAWUD3l1tBtbcMppiC6U1jTgsMhxr3DTVGpcFapzWfdemS58hC4jXl4bkXjNCl1pE0cUD2+Q2S2iAyxIFQhizrimWPn+kHEaXPxoySYBYSGRUhpXfVFhkEpl0VdEqzKwS57cZts0u88PO2oYxu7hsYMeHgLl+cjHjCA2/kroIFn8NoCGGtJLZByaIkmZsWuWaRhUT1MYJd62aHds4fOotIaaw1ZM4VRaT1giwjcyPtLSNpfR2Au2nJaFv6l1zpZH/xW0g6LE5sEoSEI3UlAKoCUNiXbx+zGEp4KcgLmjJYzdMJOGjKqxrFcQQkp3VPQedO21+nuVzq8MXlRLCEEOGQAlGueSImhXQLgxKBODD/cEKVjICZyGxf9wBfPGpY2gxLQ8GrO4tKYLS/VpM1AsHqQBrfnjWn+bSM0MYTE3uw/UMarrmRTSVKVXhpYcmb3qtkJP1tUiE7L3zUW/9SopE+VyD4eHM3nrheSLAqpel6x7k39pSwfZikqeVZ2uIaI81xSfbam75LZakN1g7KeNwU9nkOSzjriRu3zLoEBCJSV+ehDvRu0jHBUmmfY+myvfWUtNXFlUBpgouWFZRlcBlyhKDUBn0tcZ+7jilscIgYuf2cYGY0oGVsKTG6hE5IiZAKUiro0Yy1nmig6Jc2aL7UGPYU/uiEbXo/1y7rAQgubpv1CwynFDQM+lJgRX8F+lMrIByREHoE0V8OSWlgszUppLeGGFkAxbQlU/1lGIEqc2uMNLB2aOeH+wYl1gxLS0hYsHMb60iFlBhycaP+is8xgstYJoVXOnBC0i9kVC9JyRJKyihBQEgXbd9LSuVrSYgjNcoREmGrn0eEBHD9xxompFU+MmtECQEFYwtKkvFYIOttTZs4GSHrCPKHODJi2OdATAAEbSi1SUgbXwQARd8SE9XHyFlEBi4Wbra0RHPgnvWwNLhnMMJQW/estcPSKa3sukFrRbpe5NYN/pmTEE4wi3LxCyBSWRfLDXpNs/j7ZWNjSRMS6zM8Zp8Jhavc/j57knQ+sHxxJFeBxP2qmtO8/po5MpKNN+C+uDWBtpsiGQFioTWY7UNALFDv0pN2ZUpGuKUqLZKXa4eUzhVLBCGZI5Aed66Gwn7cmlIdV+F3aYDSnYfG2tC5lMyOSm9259rKUc2CUmcZqRuzTUHK2lTfGZ7udz7gVpJxqHtPcviPH/zanV+gp4KbDrkh9VSV8HprlYgtI4THP2SbSW5tyaJkQuJIGz+GyJUrWJ1C+uZSxqmRQ6yI/d40N+cUPHUIKVYFpBYuzkX72BGgiKw7qUZ/rbQxH0YbSGWcbCigHTnxRRO1gNG2IroEUDJSYnQJFH2bOUkqjIbOaiFtBi6tDdasHWLQV5iZVSFLYF9hqK2Gn+YlJQRGLkPkSAnMTK2AVAVMOYJQfZsOtqxW7YYirbl1ySIiAiEx50jGwFlEaP6YHQUyQnEEREZS9x0e+8HnjdRFtOLulj670gnr0tW5YGsZv2aYp0K8yWCk0VcSPeVi3bQJgvu6gggB4NPlaoT1g36jLmkxPXlEthYhLcdw9UDg4kayxwkRExJ6xrKANsDIpS8vDTBwcT9zI+Of5RpHPsgaspaedY0VvSmYPf3M4w9pPCspYNpoGjp0yGBJE5Ic6iwducmjLltEdX+2qKqQB99uMIAOpCTXlrYkgafcWxff3YWOp1kM6LHJrycl8wtmVbFFfO/G/ZfrSavJCxp1XiCvDkQElHAxHSWgyYhRhudG1dLbxhgRNKWpLgFKMUvWkLRtVGVXG0SarjWDEB/h2y3j4PXIZUKETGVA7MNP7wCP7YjaawwUqgQs3YejrmZL+N39lblsaO46/tyhb0Kfm2w7APjaA6A2M0GGEiMQGfH3LgTGBWXfH7B2EGpeUMAxEArRlc7FpqcEho4UWEKdxgGBjbnwA3m05OKfmlxb7TgyvrCbtdgYKB3e177TAM+NFGb62rlxWY07VXAfjDTunR3Z/eZGNg3woMRwrrTjvBj5uiO2hl2JcmRdpvRwaK0mwyFKNetctwYYFn2MhjMYDUoUfWs1URTwPjvCiukCawcjzPQL3LeyxGZTBTbrj7DFTIGpQmGrmR4KKTBdCEypHpTsoz+z3MdDkItQ6Cf7d+QEVG0MBnMGpSm9sDpKtOYUxH7v3AjawFpYTSykUkAzWTEw0q76d9XFU2ubTpnSKntLSeJaSe+UEAKqkBBSYG5OQrpaIbbiva3ZMlWEbFFkRSmNc9eS8IoZLRyRgHEkouGFdRmsImEf8GOWyAiNR/otutdJiQnFf+iRC0wP8SSeCLH2+FgSEd6l0hiUpcFoaFOWD7TBqAyEZKRD0Lp1zyq9ZYRIyMC/D7pCOglNLlmpyx0QUuj3lMBgsPgJia3UvmEndGHuxwtIS2wyhGRcOsp1O3c8GSkpoMvgkqUknGTavp0L5eLx8kftuiDnWew48eGh+vx/Xn2L/1ynRY/Sr6LqqsXJyLCsCkHjkBo9uNtQ1IaW4ONCZT6Hc1aPbboOJyMVbRYjI7lg8rqMV0CwGjZdO5t9jlwYM/FVdH/8kaZxWN6C1WDJKo3BuT+8GVLE4+av99+xsu/5V/0WACpuWUoIPGXP1bX3dn/BaV/4KYBY8LQxBbIijNrf3N+GcUFkJD1agGnhRd7lr43igK4hidAWNiifXKPIjSun/R+MNEqjgAGAvisqNzLQjrHrkUFZ9KhwdgTuwqV1CYwGKEd9yEICgxKjXglAYTQssZZZSIggAXbcTRUSw9Jg2sVGGAhoI6GkQalJkVAlAxR0TUHNBlZI1UxYHemq1nzOWUd45rkUloQSQanPHJe6aNl2cZc3+GcgpCWoRPQIQwBqFOaquZF2rm4apY4tMvyfNPY3KYS3bNjM5cZlZwvxHIIL/nBkmCwhjIxwq0hd7Agdn/aIMQYQwpMZTyCZW5gRifCeI0jajUNjyZdxz1PD+OdaamDtqIysIrMuTkQb+DgRrrQK8SKx5qxN6uU0gN0nmWGKrA4dJsWSJiQ9EdJQ1qHp5WibCSicJ+zfU3bCoutLFjTN64/Un2+ydMW8vfeXQNo20G6ml25BAizBkJIIhyMkrq99NhFDMRi8MF69IJXrc7JQlCZYK6imBbd2NZ1LJhO63RasEhX3pcQCYDX61lUFRexWYc8rxhIP2i9qT+7aDDagn/ZzcRusbXEsDbt3GX4b+tof2t8zHT90QmnORYwHsvNq67xf6ghPihVMEIyLHY499H6BOheOfsGeqwsi5+CZyJRw1icJTBeqUhmbxD5t1dte4QNULZj57+G6wR0vjPGesXFl/L0sdc8TkDlnIVk7GHmBba37d9/cCHqkMbdWYjTUEMJGepWFhNEroEcDjABoFwBGpESPBtbff/ZeACugCwUhhxgNSxhtMBqUmHNkpF9IrBmU2Gy6wJ3L+rhnRR89KXHvoETh6mgsc2ltp50FQbL+I1A/UlVurwU3PLWrxtoh20auXEkcQfysgzuOba9G3/VZNDYGlsdZdy3XJh9T4rbzObEMz0xIAV1qlEpClbY+yuwwJCGgZ9UvyqhNdv6O486mlcSyvoIx2mZm05ZMKSFARlI7N5BdLnbXJSKSIyEEGr9AHMhunEneK5W0ibJ52WOk+x7IkO+ShGxpYy1e9HfoyAc9S+5ux61cZDW/d3ZkrWKD0hG6ODkFIc5Cp6GkCjEhMiQfAEKsHV9PyIPBKgC6oPYO88OSJiRNWB8sPZyTJDUwlXV8wXGkoY2GOTpfTijsiEkWpTHBYiXzFhHra6ujWAwSdnPJAoB8rAS5CZFgHAs9iZBG1prE8sFd+hRzZWl2a7IZuEgjaO9TQgnjXZH4osHjb+ruK3atqdYBiW7Hjf3gtpAvTJe6rdUndyAh1Oca9pnRUkKkdZX4pGTy5EfsXGlLDpQAgXD4Q1e1Om5Txmlf+GktESHUbQ+uf/TXZSaT1q1DuH0ERHU+dQHE6TBKycgwIivxc/djWYW51dYPFNAyiRfTwEwv1F64Z9YFw88OMeVcgwCg7Nn5QUhLJnSpIUbAqOi785cwWtt0vIBz3wKEsmNZDNbCaGsp0SOeVtbgXueyBMC7jpEQro3NXra2p3Cv4tmkXPY3KbLEOadsISuIFWLtb2tdAoyc605d1j0rsHJrmT3eC7RaIGM88mSEF570v0kBlIDRErIIvwshbHwP23+kDfrFCKVWkRtqb6ShpcCcS+ktR4AUCqUAhDAuRi/vZqWNiVyyqFAkkMx5CMcKAMYEZVJpDBQEq6drLFGBQAlHYBjpSUEWHWpLqcP45u52I1dl/Z5B6QkJPdt7ByOUGrgvItajYPlzN8MJJ/Uhzz7XFG+YKrd48g8gWJoXOzqXrcWJJU1Iemr9mQdT15EUXlgjYZK95NVUv+2v6+MU0oXAC43doAacUAorhNuCghLalKAUo0NXWZdWCBJqhlr7BXtuxK0aIQ4BqArSOetCKtz4zya4NmTdHwSrt5EhDLmsU344MCuCtB4gngzl4jXqUum2ATPKVKwPbUECKu/PCkmBgfa1TmwbS6dppkBlIE8I7fkmszRy1NW9uD+Dp8ht+l1JK7RPFdbnf6qwmY54kbplPQUlbAVtcoVTElH9QRoOwgSNNQeREVIccCLC0z379iXPtEdjkI1bXrBzqK3wT5r4vqsdMjMY+cD9UhsMmFZejzSMBkZDFTJs6RLaxZUQhMuKZHQJPdJAIaFLjdHQxqYIIaBLibvddTghGbiCijN9helCQcmQhprPEylyihKe2nXNgGfiC2QkdutU2eBlOpfVmBu3bxxTBMDFWcIqTUYagIbQwhajlCJLTGw/AUba+BPpjh3AunDRNda6uB+eOY0X17RB28pmQ1QCZWEJAY09gC3bdF03xxkEZQef5vxyLwKZVk5hooy1jpQI8x09F0FKlZrphV+LkxGNamwIpe0daktIRixOZFBq3DM7iix9ZP0DULGK8Hd8ij0/eq//f/b+Pla2bbsLA39zzLlWVe29z7n3vmu/94w/wEHE4JiOCUnaBhHRBLA6iRQBrQgJEZCiqGXZqBPnj3yqExIRdxIpUUsNkdUKICEj1ImCLBk6CSFAAhhDjNMKdiCAcJ5N/Pxx37vnY+9dtdaaY/YfY445x5y1qvbe536dc+8e0jlVu2rV+l5zjt8Yv/EbFoTo9bV1Iv38ZAVA4uNY+mivaG80IHGu6tsD65z1+zorltsOINeHyHf9YKLRYi2qjimBDNo+2g9Oa2/zujS6jRLV18xJE8WmFox8FmRGT9nMbGouuFwPdXiE7lOXt7zzfeRC9dEMib7qIA7giLYAHEvlrmWt1iKKzd8GiADrDsaaxKwWeDdUqEA4ZWtN5jTToOfiSKa4v23LOaiTdN/3oyy7AoZOd7Kv21ebzXxpwfcaLcCuv4nquvW6g3PWU7UeTZxROa+8eo/rMp6cOMyDgJEnY0DwDk9Gj4FIAMkoFC0rvVpjORXYcwJAQu2xrpMCfLa0SHV20zE4PgXA9bP1LKesUwMSl6PHfmEBJaMUnntyuJ0ingXJTCxzFo+IHvPeYwoDEjPidFtkgWUfKn0lLrE+ewvDB6EmeU+IC2MePW4PC17uF4yB8P7NXACfOoXqMAJ3U44FaFSH1BYz1+9beufFWIGI9gCx25bPjXRyBk5jWLDJ9R667pcZUJBnuExV01ry5BIiWieZk/R2igsAMBK7kpW6Ntuz0fzbibAbQ94HwsyMgQi3A+NliCCHAorXhDLqWNSOi71ZOXJ1yMfc38QzCkBxnBshulTu794/t6vnXIDPqdaJ6D2p89LLQ8x9a7gUrL+/nzEtAkJemvcx07N0HptM9Eez7/pvl6/10twDcq0VpIyBGjn0u1QIAazK3L9u9ij7+3raGw1ILCAA1gvGz0X67mv3yZZ8EJnTNVuTXrXb+6xbT/eRHgQS6ZRmhoZWYGg9CkbmmOlaMTVNoU5N1gAQqeXUqrUFv/frUr7WL8YWVJdoconMVVUxtWFlvb0i1GrhuH6XXDMJC7CrE/TRPp8AI832HaC9QSwo8XScGSnrPULp3c5SBeenzq8NEjzEPiuyvQ+x2r/DrZ5v67DsBl96/2yCgOtNkAyJ/q33sXOuRKqtOcglj0lrIlYCSysxnrOF88ZJsoD8qDFjfgbIiXoUJ4eN6WQqBcABF2Pttr7kJodhqNF+Th6JCYmFxuVMpsTRaU59YnHMKTq4xcE5hylwOe/TwpgMfawfe6z145B9tbSsvvbDOqkbE+Tor71mdjml8oxWoQBf1mcBy+QcZnJIyfQmoYS4AB7UjOOnsv9ah2GliPU45BxFHJZa3wAw9ouu0+z7yvW3Mu9rwiT6+yF/uPWUVeRqYARe79hcEZXkXk8uCRjpblNLB0sWEBkwonSsmHJNSM7oa32IZkOe3UxFKU5V4m7n2PSCAbKMdWYUlGPn1Mxnp/5pYKin/QJ1vunP86M92qvYGw1Igq9KKnaC6qOzp0CJHZCBu7IO5x+yu4DROSvFzN28dZ/C48+izdFOYklUWcjhEPn0pJavhXKqrd7+7RRL4d+UtdnXooc1he2bCdxG7NROcu9hJnmWgnDb5AtA6QiuDb/0M0AHfNT3rjooDsfUAjVbOMxJ9s/y8bXru9LZCp8//+ZcVFpNnzMFJWvnv5wHk91Y4697kpqU0VMBJZxaiqb2Biq/W92rR3uoXXSdlnvwrUpQ3gkta5PBiGZGLkehF4V8D5fMCEzxb3HKEgC51oRS51xM5WuBKu9qqZVqVhBCi74litvWW6yNnX1t2c2sDQSjdH9nzjSuBRejx/s3AbdTxIvBI0bGfFgwHYSSNW28gIy4FYoWKoBXRy1xQnKuNF9UhSleEpY5Yj4scORwc0OlX4dVodJ1qPXrX7O1niBay+IDYRxqb5SxZGRqJsbnzEAtBhebg5y7acuYlgGHhXG1nyVDsl+KYIA6y/tZzlOMXGSB1ySBnWuPObH0tVEQYushdAz25HAzxZJRGgOZMbI9Hz3Fdk3+1tZN6LFvA2Fj6nmK0EBuyjt4rZOqEKW5bibb3I/DtdZHmtyq+MCz/YKZGe/fzHh2M2FaGO+9nMp5neaIuHARTLCqZnpPhMELeB68XG+0J6QHpZampZkR25zYzlVAC/imN8Et+QRqSPBYQ3KnvdGABJAHjjsnaM1hOpcp6YEJcKyUda53wn3tvoma+wKR4B3+i//5fy+/+7/86l/ygfbvTbK+oSDH3KhwtetIPfeqgmWVZWr08HwUsRS5mgupE+FdmZH7Zk7swG4zJIUPX5wCGF34ymuudJjeXCnsrNkLB1tErsou9T5vKQwWjJws/DegpL/fz2VJrNlzbtWyzp0+XeZ3/ZpvuHsDj3bWLLjuFeG0jkEByW6gnB3JSlC5U7YsJ9FXzVI4Z65hvj9Is2lQKpd1tGHenx67fecU6XsFIAruFczbGi5O0rxR7jNxDjkl+KavkMNtBmmaRdXsxRxrr42Yvd4YGZgiyHl5TlZUjaSHjyvRf82UcHKlPUYp7u4cc6AFFv25uW+E2jr9M9UM5Kpz7qozCvTPYg2M6LXQ8XNckZmdSLJBagpMvJdMSg9G9LhFURFHgYymqJ5kDLcdx9dqoqyi2Mlj7hx0FRqInErGZM67Gb3Uv8m5cUgFiqQGgAOVDjwzF9VHC0i007rWi1xPC26miGc3E96/mXNmZBYZ3/0i9UiRsUwaNEqZHifXxZGDc5LN8ys0pZ6uZa+5zYz0NYk2Y2+zT491ro/2qvZGAxL7MBwNyu7hsrqrgMU8v/eR823lTo9TwKtg6Z4ApIkOf4bTorb2wdb9nGmI3hSrH9UedBHg/jP9+9RnzT+9Zr4Fx2vUBx3szwKZZOhXmhnJ0WYFJAPlBmOEk7xl3ZeUVMFF7ksH7U7MpZRplj/Lc+U1E8FVJaavc2qeCd1PICscAVwma4mIWyGAkp3psySuNr3UiLeeP3suH+3DtSe5dsCOR31908UgvPKLwRcp2iGDj1GbllIFIn0w0j6qKQmXPqZUmtppBnOOnN9rkW+tJVHjmDCoshzXZ0/32YKRITtdDsiOoiv7410FDjOlcv/PnMAXQ+HcKxjR12eeMM0RyxzhQ/7NQaLWImWrmZHT5zxGKfomciWzok65ghA22YSmE3o3b1ln3s4T+rlmXSgrzBWQExKmpQKvaYnFkZ/AhZ7U0EYNAFR55d3gEVPC7RRMhkSyJko1mnKNTuSEaVZ1srQKrlz+pzS8NYqsjh3NPndAsA866TIWlKhZMOLJ4Wo7wJPDk23IIgPUjH8+97nRzwbfzt3aiBdAI6yiWT/tF6IgZL9IRiRywldeHnAzRbzcL3h2M2OOjOl2zlmRmhmJXbSnNJ1MDj64JlvTz1uaESm1I7luRKXndTwmhwLw7f113MT20R7t4fZGAxKgUqWoCckec9iBV6snuQuknPvNXUAEaIswLRDpQUjbI8EMyB9y7cqbYPaargsZrC8LtOfrOCJGiBxPgg991X+nJBGtnRqkB99e3/s61uRckU5Vh2/IEWkHew+1v+MkzhenhCU/M3L82geCQJzAzGBX6VpErpGrXKMm9vvfLONrJBqARKPzdlWGVc67AKuyXKEEUMkK9ZNgc3wPfK4f7bQ93bbTgjr0Cg6H3BuDnMMmaG1TptBCej0oNUsdFb09WqpWfU0JBSxLv4UKRLTmq1fDs/cfSa9G9FTBUkuSwUigro7F4mhHZf8UkACVrjgQCygj12RJPLnSt+TgCTGqQlQqHd+1c7maRrCBWiORkMARDdBIXCPegGYTGIlj+SfLVrKb1qw48t17ahz8ceObwt6UPG5zNuOwMHacMC0R0+JLdoBTG0RZG9/0fGnvl5lFLWzhhN04FzCial+3VvXrRJ1aD0Z6UFIBiQoxcPO9frdwFS+J3AqZ6H7rdR99zYzcTBGBHKZlwG4MuBg94rbWAQ7kSqCMnMPAVYwEqEXzVi1OMyFSsB4RGXi+n4tc77ObGQsnfOXlJPfWYcF0WMCRcbhdCuC1mRF7TSjkcxVI7rEM2vXeVxCi9DwrXmCL2PVvBSM2M9JnRfo6x9fVNHP6cW/z0c7bGw9IPmy7l19zIroOrAOYc1kRu55zTelOPeRvwsP/YZsFmmuKKVZqtv++nC+qxbshgwv5baVfHUfM6gBeP3OFa2u3fU4RzQJOe93L8pxylTZjAOWplXPjR/W8xAmMSJLZYAElxeHDaYqTc1pvok0NHTRmTVITKhOPZjeck0L4nCXRmq32mPq/+40rMGllWEu2yreUMEtRs81P14D5b/u2r1s/0Ed7kP2J/+nvlwyImmamtBajZqzkmXGZauXMbxLqs6bvbUpEPqsN32au3bZbMCL0KI0ka71TD2baZ609Js73llqTyFvL2jhgkBsdMWl2JatHBaE1Xm0D/BQxLQGe4jHFkEU9S2gyed0sVKRS3H3i4dRiZAtEEifEJWbpYCmWj8t0BEYSa2YlN94zgMSHEY48mDzCOJZtOarZ5gJ6OkrUwgk+XyPSurc8NpDJBGhM0AdfpOvnmGQc2YZmvWNYikKXzWjcRZ9ai+qHlTFZrV9Pf2yRazG7gkTm1IwzulzI+1kDjkK7K9cuaZbeZHoM6OzV4paYsF+kYeV+qV3UX+5nqVHK2aSXhwXLFHN9UW6sqVmRhY+uH7yDoxxMyuDT+wpG+6Canre18/dQ+ywGSB/tw7NPHSB51UDpuWzGcT2JfK61C6fsrodTU6FW1hSoUQhd5j77/Fmyhruaz49G7gBz3qmb1OBK9kQi7nJtSzFnp7CltuTJCGgnxr5j7Vq265ytARGhrjj4LB85Zx37gXNRZSIwAewFIRTwhYQAZ+hSrtynQk2p2xRaTFWBEXgj+8wpO5wpAcHniZULfSVqo8m8vh5A2GtiP7ccY/vItBLE7TOjjsEa6H5oZunR7jYt0u0LwBWQbHLxrjgyUrekdEFylS4I1E7hybUZMgUjMQkFiyF9FjiJopVSVm5mASKHTNlSIQpdl9Y6qWw6J/GGh5x1o248t/tGDqXeyia7vZPGdg6SLRm8ALA5CgifWQqot4Ewj6EENN6/mXG1XXA7xfLZM0+Yc3akdGefuTQIVFqW0mxKgTfLZ9LNfSogRAFIPOyb7IhKDNsMCYAGiACAH3egYQSFAYmv4MjD+x0ARhiSUXuqVKZS82Ga6sWQwImAXEOk581KlffjsKXbvb0datF2zoyIXC2vShMDXZ3QWma6/KugROlZmsXq60VKhiTLwCfzz+nvXVsbaKXgFcgQS3BsZoAoN9iVuxT2pTarRFO4fjOLQpbUhkQ8u5nxYi/30rPrCYkTDrdzoWbNhwVs7iWbdSsgyus9ILQ87wk+iMztOEhfG5WTFmDX/i3nsD4ra6bPX204aZQQ3wBQ4jzBfcyyv44fZX/vsk8dIHkVOwVG+sj2Qwrd10DNWgZErXXcToORdnsykX/MmcdPxP7o//ilVTlGIDv/DTo7PiF99qR+UZdVZ+IoO8JtQboCmB6MNPuDh6doa2IkSfFmygJTDjk7ItQpyg6XOH35aHPBOAGlOFi3Tmipi2tA1oIWa0LZyhkUcoYJcRydLse/AkLsOVK54phknSLVLN9ZNS9/R+PTf/YfesyMfBj2R//HL+Gt7XByTFJwovQNh77Tuhgj5XutfmbV3YBMyyrgE43iXfkXU5MVUSEK23QUMGMsu5J1exWzndMJKoYj97x3DvDIFBzGQA5LTiNugy90JB0vtGZiN3pgAjCiZEWS1hEsAHIPEseuLVDPy4qTXMEILzNSlL8TxwJQUjymbVmaVjnG8hkhcTwrRay2cMKIlayCyarYrLOCkQJotZO5S5hdvXYU6/2kfTJU5VAzTlV6+piNcJyhbvuk6HdVtlrWEcghUgsyPIscrq27cd161EJ3w2udlL4/Z8w14KKgb44V+Ak1Trury79CycpgJC6qTAbwokDK8JOb6+1yoECyJaUG58S//rjOHYfWaKmtgZJHe7RXsTcakOjDvdat9762BkbawvSV7SIdO7Z3WF+gbp1WzYzYrtz3ifwqd/tNiEh8UPPkCvOir7Gx6XUd+IG2+H2NSscdfUg+0+XX9mF9+6dECPT7te1ai8bRKtkDlol+gShWzflv7T5MzuHGc4lcC8efmsyBRoFPAdzIqe1GDL0HGQMchuxADVQBzTnhgKPz1YERVTiyxw2ImlKlM1DznZyK421/3IqNn3abc91DZisBJLVD9npJhqKV1pZnomoKAekYqKQKTGyNiDR+y4W8uc/CQbuJz1wASF+crNZLbffG6XTACFDwboCVLss5c5gcaKCSfY1JsiM3IWKOCRsv8sDbQLjahpwh8ZiWiIvRV+dy8JhytoQz3YZzZF77mACSJaF8fhIzeJly9/cZ8SANFxdtvDhPhb6lrxaQ6D8FHylnSuz3FBzC4OE9IQxST7JWNK6F4r0whwoJxJSwcToGuSOZZbmOsqxkCYSipLUUmgVTMGqv+ali86P3K9fZZnsip9yvo61bsf1Z7DYANE0pdzkj9mQrr9tApY5KXlGEHez+FBBGKLV4WovDqfa+0maGpRbpsGA+LEVWOpZsGxvAWp8LR1Rqg8Io2ZAw+vp+8CU7ouIMtmbkVH8b1pQmVWESHStEjT2VzCKRK7TLR3u0V7E3GpAA68Ws93XQz4GRc4XR8qF6W6cjxaeMjhy11mG0n93L+NMPStZASC9BCCBH7zJNg3t1p2rC/61mi60BYC272tOx7gNAVq8jp7W3JzNyADAkwoKE6J1QulztSD9HpTbFUm+hoEUmGnEYC6DS9Xf74HIiZPBUefFeb3En4OT4aE6aBSCqz1+2g5qRWTKNJaZKudHjtx2UKSX8ln/w8w/Yg0e7j4lzJG84O5ADAGQHQzNkVqxAMUIyQGTN9D6amcGMpj5kvwiFRRSGxEnV7tS2nkAdnJ42GU201gYG7iP6cW649uTgMoiSug9IRiMp0Jdnz66fzLN3yEpVtZ+H7OtLc/6YXIl2R3BRvuJG9jgW8KHZkub9ou/n7ghmKWDPdSMAmgJ4W+AualvuKIKu164vGNfzo3SlyKnUkRC5LHRQx6AeKGjA4WI4vg+K6lSqMrhrc+99xl2dAzhJL6OYtKu8x2FhjMEXmpjNANnjl+snzvuTbYAnh8vca2fw0hTUO+3JU/vdyL7lAMqJudnWTClo0uzaNEcBIHPu15LBSIyVoqXCBvZaek9SO5IpWhZoap+ZXk1rLbsk5wKNbxM5lTmUnCtZfFVMJIfKUnsDAAl5V+7bj22b/PFu7020NxqQ2Gi42jnH3EZB7sqM1OXuC27uTlc2xep9zYGlcz0AjOjD/2nn0iu3HajKUkBVYyrLIWcR4ApNgA3QWFOIUrMD1LmJUPehfn68v+fECLyvjslakfippm/1ewBIAMscEMWryVmDKFm3jD62nhpwAqDpB2GpNAlWIc5EDPXe9MYZuOc9akEIua5ZY57gxOlzR9tXqcxZ5Vj7rnmP9qFYzdC5milhAEt+DwY7h5i4CB309+Rq5ssASpV2tpFxlTrd58LcKfIR37+sn1xTy6WflWZtZJSAXAUOp8bFlFAivIpOLFAmJx8oMBFqIeAcZUDCAEKhGPryWZWxnhbGGJaiygXI/f0SwDJFyY6klGlUKb9HBiYEIo94gl7VqmpREy2nMOQMyCj/hhFhewW/2WLYXmHYboqzqv+8J6k36JxTVaXSz6cFNbruaqS/n3dljG4llgFgE3SsEZqbBR+qptY2bDVsBaXdrmR8++tsa9KWvK79Estn5Xqk9QJ6oIo4VNDhSp3V4B02wRsgsr4P2mfE19Q3Dnn+0n4+Fgh5coVu5ag6sY4ciOu8wqACROSaEyhkUFnACBVwcg6M9GZ7PwF1jqgNSmv9LLuUMyM1RvtpD44+2kdnbzQgmTlhOBNVtrYGRs5RtO4CIgpAIlcqzylQsqaopWYj/RaMWD/7FE3mTYhEfFimUTc9R1tfo3DWYgIoO7CkE5xzxXFYqy+xY3LtabOy3D2d8H7CbMBLXr8W3bJJ7UtGp13XUdYkR4XVEQCkCLanLWonXQtI2s7Vx/u25lTIcdTagZ4zvebvrT06ulybeJJUv8FmGMjlbsaufLZf8m/dZ+d+/zhNlX9mZJENljEpehFXmLux6eR6uqyBRr05aUPSSs+ZIjcSszUa39KzbKFyXzugal/q/CptptYx1Nfzx3/aufUZm3mdMwqFkbANct62gbAfJMI9kETjt4Fy4XbAblwKjUui84TbIeLW9N+wPUpiTMDClXrl7673sMpaBYiEEcPuChRGhN1lBiM7bHYBYfDYXg4SPd8EbDahofHIsdbMQXWaUykUB2RcpvaSFeqWNshUqWS5dsf7rkOz9klS0KDXZi1I0jdjPV5nzbq2lNCasZNt9vt+LOZATvqrACIFrMEVOwYq0xFAOYaEhIVz5odSWX+h33KtlbH1i2T+JRWNIIdECZRrj/SYtfmh0u+IXAaZStmSZqVayH6RX9eUtvS8ibhJQezmpOq5T2DLEuCWXmwFCV5Xc59Ap3b3mCG5095oQJJOpHR7O5UZOV7uw9ir+9laYXtvj0CkNXUweiUgSyUBUo7m2mhaMkDjtJVo0wOBx+q6zITVr49I+jXI/ug+yaxGwFEmYE0G2poFI+U8RNkHVekSf0WczugBnyWR18ZI6pwGG+W0HeF1Mdedh3SCvnO0XJIi4sJ+1ONJKkdcj//XfuM7q+t8tA/H1CmPhYoBAchIGHxu+HaPsbZxJrn2DNEsiIIR25Cu1gusD8A9z10V7rQbvGRHlDJU6YpljDANSPvHx9aQWCUue6t6ZPlipCKg4ZyMz0oHUtMo/8Z43urkFdUqU7C9eBLKFrWFyImcgAvyYKICNFyuF6Ewgpep1ImU/e4yI6quFcYd/LgtGREKtbZgLTNiTTNTa5K5Ug8hY1mRKzfnUGWhdcwotW1m/akE9Fxtksk1UJNMcMLauWGaycEKKtgMcJ/RYW7HvMGAYJtxAwRQ6D3juvuGUWvy5hLUSlC4MmTt9IG4NJxdkzEePSES5yxJgg+Us2eSJYFvZe81M6LNLn2oWZbBt+DD1t/cZRaUaANir9l8vRhHAPNYEv7RHu2+9kYDkjW98lN2F0Wrz4zc1TukUldw9NkpI3d/qksPRqwzvbZvn/ZBQAsG+wyJOhsl48AOyD07OEVYF4fovEwzcLoAHFg/x/1yawpTspxrvpc/juktADD3IUccgxvVtS/H1e2cRgAPkY/ogUrnWgN2RNJxWfeviQI6ud8JGaQ4mYyPb/vak0DN3rJsQRhqRBFOQIpzyA5JyhKsj5Glj9LU2RC1HH1GEma9DSKv378nzPaXsYBEi4lPARFvvELrQFV50p42Qw2VRgQeKo3GUhUbZ9i1z7nvxvLT91wu0CaX60mEZhUoYBcEaG0DYY4JL6YFMyfczrF8tgmUMyQTXuyXkvGZsswvkdAtU0qgxSHxDnEZS6aEl7kAEC14t6a9RiiM8JstKIwYL96CD5INUTCyvRzgyGGzG5rmf/a8l2tpQAiAXIcRcVhkeRU6mJkK7Upll3W8GMhh4xVIZrU2l1kAidsW9i6LkDsqEtHW/00rc9+atZnX+hv7az1KsmMcDOBAHd/LGUnd2Gz2U1XjhHIo76eYnwPyhuoHDL6uZyAq9SMKXvV6xIVBNCMMvtSS2POg2RMKmaJFDsMmCGWrK2BvnqMTtSNADUoI+BBQokI+hRligJcG/ZDP12F5/bm1j7K/r6e90YCE091O4n0c9fvStD4OKwW8K07mKW6mLvZf/M//O5aY8Du//es/0n38JOyf+pVfKO//0t97rwKRjnKkjf2AHD2iTIGAfnb3PWGdFF2vfn7KesCw1sHWfl+2pdE1thHo44Grzy576Yoov3X5J1xBjQXqEanScZwD50k1egEfM6SrshQzU5YaPj5YB1cm7N6pk+/7Y67PVDLnXeVA1dilcsQxB+gjgG/7ureO9uHRPnz7F//xXwoA+H//1f+tk9WVVwl8Z+C88hD094rSI1W2V9WEtDbkXEakl3a1TpTUJMi9qsXTCky2ufGb8vtVdc5BnGLN7NXnUl6diXTb+9d1y6m5JMEPzjU3ucQYgINz1Agz2LoRcqk4h9NSmwQqbW3yBGJpVOg12JLPQeIRXqV9s8yvIw9ayYw47yUTMu5AYcRmN4KCOKnDRgDJsAlwlIuyqVVbusssQFkUfOReHKtzMWp2SoCivDpe4OYJSAzH8QiUJOdKP5hcWIPk7uHQ6TL6O/njeDndngFELi7NZy6ldrmVbSTn4CiAiOD9iESESE6ASD4fWQNNKJDk831BmAPhINXjuBhbIYTIkhkBAPJF+g4p5PE0n2xV1tLGh+RJQEqoimnnJH7PWd/WQOnpa+Irtkfbpz04+mgfnb3RgOSUnaVlfUSg41WiuJr61JoAG7+JsX/o6/vVDMlnaBTw7jQvXPjCKniQ7nVeeg752vas9aBiLRNif9dnIep6ZH8BmIaDCUM6vp69tLFsS46xz5pwAmZyjdJMPdZjKVRbK1JlO4FtkEkyZPDnXJsZWTMbbQTqclIoaQokDb1BKBXSUEyDA4/SkZ+crQVmNHsC5bkboH2cJcxN+ZjhXUuPGYOVWW2nn17OVQMO9yks7qWvyYkqVxPxNhk9C6IVgPSfAcc1UimDkWSygASprZB715f7ehsYQ64T0cLqg6+qTkrjmhbGpGICEGdTawWid3C0AYUBgRlxvM3LSENEMt3YVVUrjAFhyMXMu0HO4UbqCcLgsevqRe5yVldldu9wbHWMFqodMHoBi4EnuGWCWw5wyx6IC1ycMwAw2ZLs9CcLMNZegdJ8U4GLvl8DMGUbzC0YSgzHGZDwImAEAgCb33sv26OAREG2EzaAD0h+RAobkA9wfouYHACGA7A4EYWIKeFi8Ng7ySrN7DFQwn6Ua6L3hha7F/Cee5L4UMd6wNCMc/G6c+6obqQtZG+VtcId1x6o44HWzPaf56tQltl/nNz3R/tU2RsNSGz/BuA8neBU9/VXsQ9CISmN94xjWPft7v1fc7A/a74b0fo1KMWL2TF/SM+M9e3cDTzW7rm1LIntw6FF4mp90aYWYq7ZWjNBIN8X3hRvkit89nklA6ibV5pWpXVV5SLr2FnhBQBHEeTeidPPi5RAqnBbwYjet30xa0qPgOSTMqFatZ9FvXIeAMQLH0ItIu99Gb2O5KiAZq3zGrw7Wv8a7dX2ZtJ7rwEkmcKp2ZIhN9Hs71Xn2kLlHnjYbVrKTt2f42MDhFYVFZC4TNdxDi4DMOc85kil8F7VogaqDq4CEqXr3LoKSJSSwwvDh1glgsd2ytaiZkcQh5RcVc7SQudM3xm89tSo0fgeYKz14+jllvvPe6sKZ7XAfSQHDwYdruGWPdx0CzffwHEEXz8XWeJlRpqnfKI1zW1vjtzc0Xt5bwr59W99X4CDtWUBVP54mQXUHfby2TKXz5CbUMp+xHa7YYALA1wY4XaXABHo4ikwjEjDBdK4Q6KAsAU8BcAHOHC+R+Tej/mG5+QRE3BwjIuB8rOh1LilUTi7naRXTxi89CAxY7mlbRG5hoZnQafKGGv28dT1s2b9lFN09p42/yZQtuQ2eXU/7pW2+YjT7rQ3GpCci5S/imkTJ133ndt/ICXQFo5SkX41YeM71neuAaQdJH7wx3+mvP9dv+YbHraTb6iVJpn9YHmCSnDOOGW6EacGXDRR4Q509Nb0TVkBIg4o3dF1YtCeDsQip9gfh+ycO5oQZBv5t6VYnkuRL2nBKaqTpw5by8GXV1XH0QizRqutMwesw2f72Ggxqew/TMFqzYgoEBFlLZNBeQ3ok58182b8az7PAQClR3nnsBtqI7xjWWor9yvP5cZXGVa7HLAeXNBnJ/gKMBR8bDN9a/CuqB6Nvr1XvcnqkWszIUA71CoAOQYqx+fIIQPrnC1yToqnXXJwkHuasuMJL4Xa20DAwgjeYQuPQxSlrYVTcQ6VpjNHAQ9xYfgs7RWA0kDRLd3+kCvUnjD4Qt8Jg4AT8rXfiAUdNhp/dO7NgduGedWx9eW9XoeickZWYUuyI4EcfFpyZmQPN0t2JN2+RFpmASQKCJYJYF7NTMhFIQEcCkj0fZAOSW4YG2BiTdcNjkiHvYCgKQOSea7vta+LrdFRwBNGASSbLRxHuDAghVHeU0BaPFwAUlwARyZ4ljI9UEArGdBGDlnZyoI+qsqSVGtKZgCOU6Fs6T3gqEq992CkzY6sZ7t6YHrO/7EiMg01mI8/e7RHe4i90YDkvnYXTUuBReT7Zz8eAkba7bfRhuazeNzluDcbVb6rV8an1exgqeeAuTpCM7OhbdWsiSzfXo9mXbmYD0Apkico95mKxPAp2Vxrtl+Hgg9xjKrzY39qMwahNJyq3yXIQD+DUaWxqkN3XCyfJ7CcJbFm5X8HLxOZ0rICiePpsnNhHTpra09UPw+lJNtPqQJDbQZmwYcq3wA2E7h6Wh/tIzZyDrYhmoIOANgNHttcSH4x+OKI9pFWvVd72VW1RkzE3DNWqEGtSveioRKq8xsy6C7F0h2QdjDF00jHRclA/Sxv2q0tk81pbYKTXg+cqoOWyMFnOppzIsygNLLNUiWO9dwqCNH6GnUin5EEFXxYClUnLplCdOSISjDCOdfUHFB2UvWzthbAyviezn7YyDqAEl3f5CyLzxkqoc7JPXExeOwGuSd2gbAJDjvvQDcv4JY96OarwLQHv3wf/Ow9pGUCv3gfaZkRb26w7CekyOBZkBebsYt87r2Rs0RhO+a/B7hxK+dk3ApIGYYjQKKZj7RMGXww4s0N4rSA56XZdoqxbJs8wXlpQDhcbkFjwHCxhb96AjdukZYZbtzCQ6aPlBJckP0JmxHRZVAfHZJLBYDUjDRlZS/GNnjEUa7xtNRsRqH23dFZ3lLxNgaU7MZK09sYEGz3Q9ZVT5cN0Mp9I68249nfU7Kvrz8gURD3cW/z0c7bGw1IqIvYflBg7ul+0r8fhjhDo/et63XVCV27dy0YWbPf/Y984wffsTfA7gu67lMvZKUMyWRFbF2JSoj2Dtq5/VBnSN7LqxbWAseZFXY1aqs9OLzLmQOIcy/3S+3m3G+/ketttHTbG9YW/mqEWakuQ45qijqR4dcDjWKZcunL3zi+Z7/49mXz9//688+FssCyvAKlUxS1v/T33svnI+E3/ANfc7zAo32oRq4CBKvMptkRBSPb7HzaPhPWOAmoVhlXoALYVoL9GIT2Coi1J0RttKfgWYuk6/2LHLXPQETv2LhUkHEfpHsKkDjphJ3yMo4CvCmcjrkPA8hJ3xJK8EnOqYD/LJ+sVCZqnUOl6IyeEF0Cm4nGZUWvymTKmdYTQMQ6Pz0Y6W2tfmCttuSo5qS7PwZfGwnaLBXiJP+WCS7O4GmPdHuNNO3Bh1vEly8Q5wXL9d6AgrkAgsRc+qyQJ9AwwHkCTwucJ9AQELazKCdxhAsjUpZEBvmS6WioWjc3SMyYr/fgeUGcFszXt0jM4GlBMo6AKjKpKlPgDIS2OZOyzEAYBJhojUqvHtbfSie/QVPbAci9HQ0wXFMXtcDRXqP+s6OMV8ncy3p6KnIDPGrzrJMZ7MfsyKN9EHujAclRMVYz2Z3/bY/+6zpfbV/O0Q9OmYKS8nfD19T9vN96PkvW852VrqVZkbXsyF2ZJTHTk6A4Da7hqwNVNvccZelUNqS4L919ovR8oEo6ChCRtaR03N2jL1andEwTG+BwsUIV0+NTJ+7IoXNoC4LRZkUsl75tCJZyhPz43OwXydZoT4o1W6v7+SwJNnzSpvfPNpCoMDmHq1x/8GT0haq1C1Qczn7MtJHU43u2vlcKX18/ZCl8anq/Kj1Ln015L5+5xHDxAEyLAIao1BuJtp/LfJxVcOqUm1xWVwIFgAieAjwFJDjMLsv25gxJFoWFdw4zyziiSkTkgHkbMBa517k4jJETXubXOTI4Z1bss1BASffa177pGDjH/F1mImnReS/7q9ZTtDYl2u4LbU4yZZodIeyCxybItdkFQuAJdLgG7SVDEr/68+Dba/Cz9xCfvYdlP2H/3nPwvGB6fl0ASdxPAg5MlkKOkeC3I5wnhO0Ivx3hxwHD5RaOCOFyyqAlCH3LGkfJfJhsiAKSZX9AvJVtxmlBMsDVkWzLeQIPM6In+C1noDPU9VPNnsm9EbIUsDwTKgkcWaSeZ+aSwdbvVC5bzr+EgfTanKur0+sDtJkSvXYXRv5X1eq034rSLk9J3vdzZzRzq6roRaq9aebHTMCjvaK90YBEm0gBOY1oHFVy9wMlr2qnHKpT3WTvsy7bfGjN7nNMnzXT6Loqatmi9n45AM2AX6zxRVyRqiQDRpTaBJwHiQpEehDSAxC7fNlyASTZwUhSUEyo9SbHx2//yhOZmVxGT6YotT1cBSJknDznqnKS7toaINHvknNSsJ5SzvK4k6DvZorYRy5FjwPdD/1/VMp4j7ZuGr3XjMhb25CdTypAZJf7fuj9Y42TETA4ovFZsC2fxVxIz0n6N+hnlbIotRrSAyeDEALGDEhcYrjpVtSR4gwXRU5WMiPpfLQ6gw23ptpkvtf0oyorOUdASkjs4IiRPMM5wkAhZzEhNSUJGCFdrLfBwzvGzVydx0MUsDKN2otFtm0lg6eFMfmaTeyv1SmzVFWlejUZKnJZCeo4S7JWL6KO7kDU0D2H7Axvg8eYC9gH7xDAcPNeitfnG6T9jdSL7G/ANy9weP8llv2Ew/svEPcTpuc3WPYz4hSx7JWylZBigvMO5F0GIhPIOwyXGwEkQ0CKUQrZmUFDAA0BfsxANF+7OEvmI7EAnjgtiPsDlv0EnjJI0QyJyco4T4ieQEzgyKDITQbF1rKkDFYFsGqPkurIxyTAYo7ZmS/01dO1jk3gNbRj5prggPYZsb1HdqMv2f1S72MEI/q5rRfcaYJ7JLRoKoEHCMj9EJgjH5cRUQG5H9s24xt0gj4he6MBibVTGY+PWub3FDDpwUgj96oP/QMjCavF7J9BhNI7x70ROYDXr82589UrSa391lOlodjF9FJaWpY1dap6s5mwu27Vu+4WbcwlBfHCt0+QaHPJ1qCTQTXAg1MCQYp0Y5Llnl7u7tjq/e1mjl0mq07qSpXzDviub/nCmbU82pr9oR/5qexsiFOy8YR/7h++Xz+iP/zXviRUG3LYZmflYvB4axuwCYQnY8BAwG4gbPI2Nt5lALAA0VRaOwKp8970gtDv0fZxgKgPqcNTs571OdM+B9JPREC2d5BC6cNepFuXvfS2WGbJiCQWYALUnhLGGqnYM5KyFZDkqZK4RMITBMikfFj6uXfi6IEAn+QnIgyQTM0ASx8gEwWXhpEeY+AyN6jK0poK1ikrNSJ5H5hTKYqXQ5J12SJo69D2UrDn5ilyyM+trZXLARGe5drERRSullmK2A+34H2mSu2nCg7mBfPtgsQJ863cUymmAg6cd/CjFO4n7+DHTNnyhDgv8JCaEwWIJbtyYt9drg2hIdekQOhhPASkyIWi5YcAvxOa1vj0EsPlFuOTC7iLJ6DNDnT5FG57CWwvkYYd0rBFGi+QKGBapDnixAkzCxg5LIx9lH83c8TM0jh0jtJIU5XXIrf1In32Sl/bf7RaQ1LU6KgKUygo7hvklm0YulYvVBGSwxITfN5HVdObY8IYHjMkj/Zq9qkBJB+W3Udd66Egp9fq7wf4+2RS1ihHLR/7Qbv0RpvNFijvfI3qQ64rmjXRHruMvNZIUZ1k6++YnTj6XOtDUqqg5Bw9S+1cl2FegRunlj+SsDbUNemcC1wMACWHgVJVcwGaSPM6/7+4V3cjpAfai2lp7+O5pdJppPXRHm72umnN00NMncin21DoNwpErkaPkLMiIe6BZYG7vZW+DXGB49hmFXyeVtZ6Qaw4+55C7e2gv82ZiGKGl+8mASGq2ARe5D1H8OG21AmoWtKqYhNV1SYrGyu7Vb8v8rEdFUeKSdh8FpHIy7jgCIMfQRlURRIVLpUy3nppigcAh0CgKIIBQI14R+ZC3TostXdJXz/Qv+8BS4ymT5Fe65NR9fV+FXa/9G8ywMO7TBElFcaQ5d2cVbWWA9L+WqhaOUMy3+wxX+8xX98W2tTh+YTldukyJGwyJASeY/nbdtqmDCJoqBQqzXAwDOVLMwFDgMshfkeENA6IQ75fuH6uNSvD5RbOEzZvX8FttqCLp/BvvSvvP/cFpLAFby7BmydIwxaH5BGXhOtZwMXMEpCZY8Kzw4z9wridFZAwrqcKRG6niMhcepConRIh6LMhYyCMvirS6bxm6cfaRNQq5ak0PbCSIeHqe8wxFeGYwyJ0s0Om4c6UsIydmMBraHL/fLzA6ePe3ptoj4DkjJ3iUvYKE6dsjZP50KzIXfZ7/9Fv+lDX97ra//Ll5wAk2n/K7rom5FztqYAWjPTWR4tiSqWmbw2U2CyEfvYQoSgy+95nUr7lC08fsKbX1/7Zf+jrPuld+NTZf/w//N1SnPoQ+8N/7UvlvaokeecKGLkYfGlmN6qMa1pKYbKb901juWbr+V5OzjVUKPQUPW1+x4ssR7HUe5SMRFmnaWIXp9zpOzfXSywSssxI0156WVj5VgC2n4QFHyAPDCOAGS5LxyZi2RcA4AxOguyPS4ykmb0s7argxCXK0fx8eKgZAx2bSJ1350pmgalKglenk+Czw7wJVChcAApQ6d/bv/XfaMIi0aXGoW1rDajJjPQZE/2NvGLVXB9UMRnQxLHK6HIU6pQBirxSdLb2mdp9e0hoBkV+UwGK41qkTp5yZoWOfus8wQ8B4XILPwTQ7hJuewm6fAJ3mVW2whYpbOR12AJ+xDwzlpwRUZrWfuGaDeGUa0gkM6LqWdNKZqS3UxmRooaWxQVsT6nSOJSc+a6qLZ7qkTUgg95M05K7mkHkpbaJHCgJ/Y9zI8jhXD3Woz3aGXujAUnquOq1wdyrre9UUZeafs7pVGS5dWItNetBxe6NY92Cn9UaiM+AedKiaZfTw/K5Rm20M3ulA7Wc3NqYjSoFxFxvdQ4sZat0eqd6DWZIc7eZ2wFcvk5HDQNlPfX9XXQv+fuD3ceP9tmxvtGq0nCWtZRhZ9rx/GLw+JqLEYN3eLIRILL1UjPiHRDmG7h5zrUAOSOR6zRcLhQufSKAklFwHQUqOVotLD/qxt18aTp45w7afLgFMvjgw6001LP9JJZM1bKAZCUjIpkS6S1RXgFpfpezKEmXDwMc+dznIgMRPwpw4qWGOZwDELIzbiL4OXPqkqtghKXPCkAYPIOzVPfF6LFw7RUyLbFRWLKv/fu7GoquyflqrYi+Xwch95u/NHNizSVuw0jkC13KOv1+WJAMz16K2vO86h1ImzzuAvxAGHYBfjuW4nbyIgkctmOuIQlFFli3C2QBEVXeisdN/Mq9rA0QwwC6fAoQgZ68A7e9gNtdIY1XSGEAb99CCiMWGrFfGMvCuJ4lc7BfUqmde7afMXPCy8OCmRNu5oiXe2mAaLMik6kf6s99BR9VaEABZd+nhxyO+kyVGpJc1C6+SVv7CLTzk4iXAJwICQm74ItKomZLVKhkvzDiG5EhoSbD9nFt89HO2xsNSNROUZceQq06BUb6gfhUwXxfw9LWjLijz+67f6f0++XvO3/+qbJMiT6iZx3/XSlMVpXGd9dBL60t6LNND8u6WACL0qHmmNeRUybSPyZHlgpQavdJ/+wnGZeXlUhqWv3toz3aObNqfd45/M5vv1/tSOmREQiXoy+9IwSQOIyQGhE3XbcZCV6AnIVgjgIOLN0JqA4/DEDRDXegpHd1XUpNp+yk29CmeRmEIAORFE2DuyztCpymaqUMPhIgWRHOdSHlNWdTMjBJJBmgpBSfYUQiFJCEKJmgkjlJ2msERZACKY9dUKpTnRfKuONshgSI+XUMHpFbJ/UUIBnNZ2uKWRpVl/XSKvjo3zfreGAm7pz0bW9VXjdleguVbIxStpx38ENejuo/7RXih1DWo7UhTRf3YrmRYr8TZlk3jNLXxAAS/+QdpLABDzukzRWSH5A2V1gShIa1SJ+lQ2REFiGPm1nFPCQzcshKg7dTxM0UETnhdlqazBYg4DKcuCYWVKpyVm1OSQg5u2lBChlA0gMRPc/nlLZSpoVGlxkDjkAuYUhK75JlH2m3j/aq9kYDEn14e8f8vkDkbDfSExGh8nnZqBkkuuzIuajSOaCxuvxntF7knLUN1oTbKp+f/o29JBZ49GCkB5GlsSLEZ5mR4Pl4OZv1P0ft014jtoFiv9+PjNNHu8saBzYl7JfjiO+aXQwe7+wCrsaAp5uAt3Jm5GokUUearisQmbReZJLi5DU6VLZknXpuazGsHQEOoNR+KOgAIIBjmUv2o2w7v6Y5y8QaBSVZf9dHgqjQdyRqPkN7VCSOLZAKEkBwC0rBemJqXjVzUx7ZFSUvTq2aWG9FDY+Qef7ihMfQOqUyz+VrfI/B394TR03wSiY415KtCm1YsGPXK68bT10tAjVzX6lncoREAYmCOPbMoN0lmBnDxRY8LaX2g6cFNO4R95M0gpz46Br6Ua6fzw0Rh8ttkf0dn1zA+dy4MEv+lmaJYWgyYgDqfWlrh7oMmRu38m8YwcMOoIBlcwmELdKwweK3iAm4OWSgsTD2UfyRZ/sZc5QsyM0ccdAC9igZEq0VUSByM8VyfXswqde0Xtu2XkTBhmZBdkO9PueAyJGqojsGJLoXCXXOEnluma8GkuMdPGGOjIEcpvGNdisf7RO0N/rOEbm8V8uEWLPP/n0Kzq3E8H3RQdOcyihXAOcnmVVlrc+wDKpGGys9z9K1UnM/NJr9SqUykeS17yw1ee2aAVIoCgBzvx5yReN/zXTd5FztW0i5MaNJi+i2dAJ5tEc7ZaOhAWhPgPvY1ejxzm7AW5uAXSB8bucRHEC3zwo1y00vpT5jL03kGvBQO/Tlv41Tr3SYXIvhyMvvTJM6ALXoXLMbh70AEI7SOC9nQHi/R2LGshcqlsqyspFf1c7eAI4oZBop96PIwTpPoHnJzu3muL4EEKWmILUGCeK4JgCOfZWY1dqZlDIQy9mdezyylAcyebxdCUgw1bllDFQ6v1sqVu+wrkq/+uqI9oXLwR/3nGgosGx6OHUUYaUf25oElUe3Q1VKqRT8Jz/Ab3ZgZrhxK0Q2jhiZS9d1nhfQGMq1jZNRbsvXTkGl0rPC5U7qOnLndOcJbrMtNCu3yV3bc22QUvMqOPEFrChlz3kvWQ8KgB/BYUDyI9KwA3wAh21Rzbq9FdXA20XqRTQbMjPjxRSxZEAiNSOM929m6S+zn3NWJBbBgnMF7G2WpK0Z0czIJjcrJYfSG6YHJBaEUN5GA0hggI8N3BkKs9wjSuFKWFga9Q4RiN5j9ITD+PpnSDSz9nFv89HO24MAyfd///fjv/wv/0v8zb/5N7Hb7fDrft2vw3/wH/wH+JZv+ZayzH6/x7/yr/wr+BN/4k/gcDjgu77ru/CH/tAfwhe+UKU8v/SlL+G7v/u78ef+3J/D1dUVfs/v+T34/u//foTwMHykDfHuY3cBkePvTn+51vsEOJ4o7LJHykj3VMo69bv/6//xl53e+U+R/d1feCGNxnAcbVR1rSIl+wCcttbvRetFdDA+de5P2hl2ApE0RFurxbTb+ce/6XPnt/Fonwn7gR/9qSY6fde9972/7psftP6rjahpbYLDJuSeEcsk8rlKzZonpGXKgGHl5u5pWkB17PP3rnP0YUGL0qQgICIZylVapBaE9/vSH0Kj50n/cS2MtgXQiRnONNRjiDOgkrAASj+7FGNxWO9tHAGb8UkMwNeMyQnKln5WshT5VWvaVBZYsyZr84nWlqxReVaLmrNjqzQeBRFyeY4HozkyQO5kYAdAkx2x9QreUM+cc7CNApNzAhJypsSNW/jtXhSuIoPnRa5R7tAeuvvNEcGPgzQpzBK8kh3JIHOzFZpVzmhI/cfQgA6p/6k1QQBAm12hFCY/Sh8RPwA+IPkR8KMAlPECCcCtyvjGhH2mZd3OQsHSbEhfJzItjJsp4naKmJZYgIitF5k60YK7zAoMlICaq1k3nzNug5eMnlKzLACpGRLka1bnp6KmZrepIDaDkZQF+FOuMUGmmG3D619D8mivpz0IAfyFv/AX8D3f8z34x/6xfwzLsuDf+Df+DfzW3/pb8ZM/+ZO4vLwEAPzL//K/jD/1p/4U/vP//D/HW2+9he/93u/Fb//tvx1/6S/9JQBAjBH/9D/9T+OLX/wi/vJf/sv42Z/9Wfzz//w/j2EY8O//+//+g3aezygu9c7mqWf8rozIWVpXR8m5j/KHBSI2KmW/k+9x9Nln1YrqlJHtlcwISnbENkMs9Kq1U2c/LKOtSUwz8s3S/liL5Y+tymrO3W9sRkTrUGQXch2Jc42f9xhAeTS12WTMyK2LYtj+HA+1b3gy4mIgXObeIvTyF+DmPejwAthfS2biOivb5UxGobeQLw7gkVwu0IISHNO1gPx0GdUlfU2HvQCRa+3efSON63I3baX3KDWrV2KyHb2dJ6nZyNF1n39DLPUFDIAG5NqRhzlRKZqsipEkBi9wFEQ5zI+5548EPzw5INcgDl6aEg5eHPs5ahE8g70DsQNCW3PW1Ar1WZEcBQdQIuZazCzbo0bmFTgddKuR8Px33gcdv+x6Ri91HmPOygRSBzevzJE492ELRwH05B2kzU6Aw2YLmmf4q+sCRHk6lt4FIIXpmskYt1VgQMUGlJ5lsyJGiEBlpYsUtZfPohdKmWZy4EiUsihgScAUZV7Z52zIIQOS2yXi5UGyIc/2FXwoAHmxXzAtjNtp6eR8q6JW5ISpu3/1PvEkzQyBCkKb653PtzSolOttm1QOXoBBIAEiA1XJ9/OAxBVwCaDQExtzhAQBrQslMICQO7VPnHD7BvQhIaO+9nFu89HO24MAyX/1X/1Xzd9/9I/+UXz+85/Hj/3Yj+Gf+Cf+CTx79gz/2X/2n+GP//E/jt/0m34TAOCP/JE/gl/1q34V/spf+Sv4ju/4Dvw3/81/g5/8yZ/Ef/vf/rf4whe+gG//9m/Hv/fv/Xv4V//VfxX/zr/z72Acx7VNnzSdlK3dp/BuLQrRNwU6/l63eXpb5zI2p8BI/Xxl2c8QIPlfvvy8Ka5bA5FSXFezYzZL8mGaZjNOFcyrCag43rZmRAC5b2KSpoPy9zEV7Nd/87sf4t4/2mfBalH0/R2AH/vpr+LtbcDohXs+epf7eezh4gxMmTZlajcASF1HWKEd2D4e2VYByMo+CpUrg4EYaxaGJQPCOQvC84I4zQJETL3ImsXc0E738l7KNmuRgA8jOpAYlIv5ndN/NROiWQwNavgEkVFlB1ASzn7ejVN1BbbQXLMrStOytQK1IR5yM8ZzhyiBkpI3qgJl8qerkffRiyMbfHZ2SbIjdk+TIwFpQM1GcZReMmHOMrxSE+THTPfL950FvVrjUcBwBhwIg4ATQICJKUpPzgngyPUs0J43YSzABDkrkoK8X0CIUXo6CSDRfhvAIVbVrBdTBSKHKL1EXuylJuTZzVRqQyaTDSmAJAfRkpk7nKbUVFX6mf608QABAABJREFUgXRwzVDpP4eqnKXNcAmWngVz32TA4lC+b4B2c3uIgp53hEQOLgHeCbtgwMPGo0d7NGsfqIbk2bNnAIDPfU5oJj/2Yz+GeZ7xm3/zby7L/Mpf+SvxTd/0TfiRH/kRfMd3fAd+5Ed+BL/6V//qhsL1Xd/1Xfju7/5u/MRP/AR+za/5NQ/ah3Mg4lRtyLkH5j7UrlOOcv/7tcJ1BSOVn9uCj3ONr/Q4Pq11BdLrw+XGh22zQ4YWeAKTKpbEVJRLYkqrUqdH126lhqRsQ/gWiDHXdazs40NUZkr6u9Az6r3XK3492qNZuxj8KvhVyo/amqjHOXtr43E5SCT7Iji4/XO45QCargFekHJ2JB32nWxudfKg1JgwSn8OG3lGzn5o48BsrlOgwkDSV8TWlYxboY0BoO0WzktmxPaISCyAwxawW9PlXJaAVaqPysBqQXTYjhJ1N9KuDaWH/LE8sO1fYk3ljwGAcpf4WSLyYdhiGyiP45RVGgneJcy5kGzOKn1SFO2KYuPecZkj5k5ly1o/n2mT1yJlvhZ86/pOlIRGcUhrobPQztrCZ3VaB+9K7cGQ1+PTIuBymVD6ymgDye2lCCWEQbqbcwSpSEE0vUp6MQQFH5aC5T0SeUMLy9sAwPo5UQEkoFCWV/CRKEhAi0UURXqGxHzOU8mQlIL0aSlNDV9MC+aY8NWbCYeF8XI/4+V+ye+XAj6myJLJX+Q1pRaIyOkRIOcDgcmVe0Wv+ZofUK6zk6yVyPm2mSvNjAh1TzMw+drpNi0IYbl2Li7l3l4DJHpugzYKBZXxavyU+ieP9tHbKwMSZsa/9C/9S/j1v/7X49u+7dsAAF/+8pcxjiPefvvtZtkvfOEL+PKXv1yWsWBEv9fv1uxwOOBwOJS/nz9/fnbf1oDIevO7D+/Bsdu04KQphj4BRiwQ0X+n9OTDpxiQpFwHQgDYBIv0FHISUDLHVHTPZ07SoMlkSdakC88Vs9tTzWVbqXz/Qe4T6sDIIwB5tPvY4B1oBRFvg8+9K8S0cPa+thsIm5Cdx8NL0HSda0YOoliVwUhapjZKDVS6VhgK9z6FbXEC0wrtyeVnMnXRVscLkmNxKsMgbPQMDoAaSadhQtiOYE9tzci0FGBStmW6cWtvC+d97VExSI8KRwTabkvU3Y1bcXxtxF2j8r4FJ7JTff2IOVbOPUo4ZwTihBC2cOQwAoiukkG9A1Ly2Ab9u4IHzboKdU/oXJxSS+qHCWwYKs6arQfaKhjxVOVfCZLxoPy31quIA9sVQbt8DpJkuFxiaRiZWDJuqkZGHkgO2rvFUSj9ZWxTWCtIoLUdDeDQTAcy6FCgqo007fIKSny+P3M2RBsVcgKWWepAYp5LkqFpLQzslwpE5DViv0TczIzn+xmHhfGVl4dC03qZqVr7OSJxwpJfmVOhFzZiK+qfeKpzVCBEWlfbaq5pzkhZ4DmQ1vBU+pdzQMiKakF/A7mu5HKwgDlfs6U2IQXOApLETsQfPGPwIygXuw9vgn/yCfQheSVu7WfMXhmQfM/3fA/+xt/4G/iLf/Evfpj7s2rf//3fj9//+3//2WX6wfgUEPmoHULr3NrakBLtMPSsHoxoYZt25e3VTdbev6n23/3tX1ht0pQgVIVSfwERrlJFtdLdlmvdyF1Kazpgl7/PZLxyXXux+94vNgqp+67UiL606Dt+2WPh+mfN/sCf/V+Pntt/7f/0K04uv/EEXpm/tCZA78tI8gz84I//DN7ZDXgy+kZxR4eQrXe4GISiNZKDh+l8DkOpCgMQo2QaOmlUp9z/MBS+vY06q+NnrelqDi9Oey7+dvn70hNks5UI+TKAmJHmCQMAP+wRc9GzKjD1Re1qzvuqxqQZku1YAEoBIit1CChqTFYmttbN2CJ9530BFuvd2yc4phxlYQQi+LBFJAdPCYEk6OScRMwdAqbA2CwinTpzAs0RG59wiC5H7620ea7ruGcNkaWM9lYi564CkdFX53VUx5UXkX9eluqw8nEkvcmGaS2ILm/Bhd575eJlcKGgAgaUUAtKmn8wYFizIh2QkTlC5tSUUMCHfVWQMuf5+JAL1Wdm3MyMOUv37hfG9bTkOpFY6kVe7hfcThFzZCxTREqpvAo9S0+LAWDlEDg7+K4AmD5A2WdJTjWyV2lfpQkKTatS6vRaezCQBDQXQKJjggUiJwCJy+fWxQWgAO8oU7/W9+vRHu0ueyVA8r3f+7344R/+Yfz3//1/j2/4hm8on3/xi1/ENE14//33myzJz/3cz+GLX/xiWeav/tW/2qzv537u58p3a/av/+v/Or7v+76v/P38+XN84zd+YxlgLRhZAyLnnNHe1hITvZN6H2trQ44zIzPzKhCpg1AvA5gVVAIdDUxvou1zEWdMjCE54aCyKoFk6hY5GcxT7bFwMzMOS5ZLzDSt+4CR+4KMc7oEa9SH/neaBbESwms1I4/22TLbFfs+pkXJpz7X+8v23vncdsDTrW8i3VpoPGQgEsghxL3QaZZ9iWYDkN4Nhqbl1JlUB33cAoNEuFPYVi6+cvP7YvYMPuTAs+PKeXvRAywdzZ0j2XYYkcIo2ZkwIC0z3O4SdNjDLxOGt0TtK82VytXXkmjUUzMi2k+iULOGeiy2SF9+u1Kkr9+F2u3bed/WxCQWRx0AODtrcc7O8A2cz5Q2P4IoIPgBu3ze9lEc4tuFMTNhigmHJWDhhGeHBXOmp97MEZwkYm/nEqCOL0rn62laqh6or1oboo6rdwJApIdFfg0OLk5wy9Q2xoxCw3I9xSqfGwBC4wNaKp/NWgCA3xwDDK3vMJSgIwAC7YmB8h7275RKVp05IQG1qaSyEiDzBgMlO1LOLcvcpOd9HxlLTHgxLeCU8HwvQOR2WvDsRjIkz25mKWA/LFjmiLhwzoxITVPKtSJ9rSGRzHEAEAZfjiaFBOKEKX+n/sEmz/36bzjKlsmrQ6bcofpGCi4HcggOcg2XqclkNfexpWz1ZqhwLrFcZ5LrFygU0YTX2TRo8XFv89HO24MASUoJv+/3/T78yT/5J/Hn//yfxzd/8zc33//aX/trMQwD/uyf/bP4Hb/jdwAA/tbf+lv40pe+hO/8zu8EAHznd34n/sAf+AP4+Z//eXz+858HAPyZP/Nn8PTpU3zrt37r6nY3mw02m83qd3bS/qBg5JzdBUrWMiOr33XP93EDLAEjx6CDCygBgP/Hn/vbGAPh+37DL3/IYbxWZqN2es4ShNMgxet5ckHSQGPOilS9fDsxA+vUvLgifACcBxhrheprqfbeHilZr4f9R3/h72AMhP/br/8HPvZt/8CP/hTm2Cro9OpId9ng2+yaiixYaobca4xtBjubQELDAApVQyPdQwYj3gGIS6FSFXOEBG4lcLXGo0ilKvDI4INqobB1GNWSk/9cSrJMYomoAqK8BMAl6XDuEgOB4XhA7ZpeMzRpGZByPxPpMRHhMzhpDkNrDmytgaVkjaYgeg2A5N/K6wow6cFIOVgWIJIdu2SiyEhJZG/zMuBRHMIwYhe2WbI8AwkkpMTwBFwwYaY6Ps6cMKRM3co4oJ//1ua+c+McUCPpmikJ5DIA2ef6olvZ3/kGmA5Iywye9vUe0XN2QlLX5Xsm75UBJ6EFIhSQfGgASDTAQ2N0SZkHeuobym0qgEU/t1n0lLMkKhO/ZNAyRy59fOYoGfiZBbjMnOnBMTWF6TaAOBngcXxrtJ+V/edcn5OzIiUAx0lKqzghOsOuKL6BK8d6VFtirr+qUzpk6jBMnYgqwmnQgJdy75ZsyYngWQLn4OEi9zkvAA+AY8Elq796tEe72x4ESL7ne74Hf/yP/3H80A/9EJ48eVJqPt566y3sdju89dZb+Bf+hX8B3/d934fPfe5zePr0KX7f7/t9+M7v/E58x3d8BwDgt/7W34pv/dZvxe/+3b8b/+F/+B/iy1/+Mv6tf+vfwvd8z/ecBB132dp4u1ZHcJfdlXjQ79fqRdTuyowoTctK//WZEVvE1hawqjTkm29zZHhHRu5S6kaG7AvEJCluTijZkH3MdC0tEizXo73IFRAKZ4XIrV47tbVMBpvlevnLmGrhe7Hs7K32N7kDMD3ah29rhaAfl2mx6dhF4FRV6T5S3p/bDi3lMwMSpThWpSOPdy+GHNWW+hDvgG3I9Akb+Zw6br8pOE5+lGW3oVJu9DsKpWA4kUfp0aA9G2zxcP4NgErJUcDBnJ2h7AT5Rb7jQYrVeQsMso+0u5JllgVpEUpJygXvWOYandc+JtxRtywFaxiPe1IosNB9dXeMq7ZgvzdLcVFHzmQPEsdaNN7J0KbhQq7l5kqUnrYXmDBg4YSXg5cO4JHxcmprGbR2Tu4PlCJ2oJV1PW72ClPQLkB19A5bL3S+C5/g4gHu9hlo/wJu2YOf/SLSYQ9++X7TsLLPjqjULu0uAfKgiydC89tewO2uUKR1swywfa91HSqvHpPcN/UZaIHHKdNWAFqPCLQ06QSZh7VGU+eWWouYciPDLJiSwcl1luy9yd3VG8WsO8YZBRxHt00S6XrdUZcckCsnF8j1nDhhyo0QPbmSLYmcEHOXdM7vY6rKaFrs7kvWLNefppwZ4QVuOdRxQIMUhoJ3JERhnxUjIFD+zr8d35QMyWNjxNfOHgRI/tP/9D8FAPzG3/gbm8//yB/5I/i9v/f3AgD+k//kPwER4Xf8jt/RNEZU897jh3/4h/Hd3/3d+M7v/E5cXl7i9/ye34N/99/9dz/YkWSzhcNHtQK0LtEK3A1G7lp2rfu6LqdgxC6nZrmhFoxoQVvkhP/nb/s/3H/nPkL7Y3/9pxuVKD2/w5nUZ19orkZOHf1c8Nddq5SAJVn1E5kwVoS07jTmBH9ikFxTvtL3ffalv3Y9+Igmi/Ybf/nXPHxHX3P7gR/9KeyX9h5VfvO//Vu+5e4VfAz2H/2Fv4Np4U+8zsoT4OGajKg2quPkju4ltb/yU1/B4KWXgAYvmB0o32ul43YuRg5UaTbbINHtUWlZ0ToXqUZGrbOh1vH69TPZ8RzB1p4NNqrtQ6XVaHG7+b1ehcQLQJDO53GRzIl+5wigDFaiy93Oc3bCL0K5yvvsUipgJLGVCu4K+23mQztwZxAAR2Cb0VkDI2tUlZXvmvOoACpL2CK/rmZxsgPvtnuQ9+A4I4UNUpyx2Vxi9CN4cFh0YByBJQe0ZpLgzJooh60vsmDklHqk1hgEknvJzbdwcQZNt3DzDdLNS/Cz95CmPeKz98A3N4jzguV6nw9Zjp+GUEQD0rSXcy5hfoAoZ0vac56C0P+mTJtSwZIEqeMAMojoMiLNMXR/91kTTsh1iRkHI88pzOBMnS7zS6ZVaWakiqakBnys/Vszua0dIiT7seY4pJTgXAUsyQlIcS5nSvL+Tgs3dO2z223iZFWZjxxKUMBxFCCSWGiaPWVLX4EjQOI0wwWISoMjgBZ5nuNyolLp0R7tbnswZesu2263+IN/8A/iD/7BP3hymV/6S38p/vSf/tMP2fQr2V00K7vch2l9AXvdTjuY9E2v9P3a6+titu/LKSqATopy/K70C1GT7sSSJeFMOwEk8rtfYi3YzRGkfc6IaNRK1yWbP70Pd0UMe9Wr/ngI2rBMI9QKUlD2rzRDjPm4yL0RHNpXMdtF+nW2JYOkT+rZ0boOKSKtn7eBkhP3rVm+AGUPDN3ygUReU4uO9XX0mft/uG4dDEPFAFDpGH3xahcFVdBRKDa5oRyyGlbJlMBkErtspANANpqa6VsFMLEUu6dM20pA4xAdRWoBw9/htlalPw4I4GFbHJ33I50CJOfkiu055JgzPEvZD9Zszv66qpZNtfbFZnJcGEtPDXrytnQvv3wK3jxBGjZ4snsHadxiEzxuZ4mWDzTmBn1clAb7DC6wPjZbpT/pSeIyRStnSggCSJY93OE5+PlXwC/eR/zqLyBNe+x/4SuYr/eI+wPmDEjKurcj/DjAb0ds3l5EzSwMQhPabOuClDuiDxskP2JmqZ+RwAaKhLvNZgBtZsoe2V0JZwUhgAYJW3GAes+2zXXXrGlKGAhjzlyUOtBRsiFx4RL8ZGK4xQAOljnN+lIpz5OJE5gSHDskSiWDcg4EtfNqe1608aHL58xlFa2mcD1a+haXoEVRx4srCn4hVHoiUDOkWg+2HI5/82iPdg/7QH1IXldbo80AJ+oCVihXp2xtnecyI/bzsvwZ9OOJELmN7vrXMM3HKRkOe7UiLKCR3OSKYpi1kkJP0t08JgcEgJLDDVrePOdoljanOizcgMzmvT1vBoQAaLoTr1EZTh6rOuHMJfqm+79WUL8JhNfeY/8Apl3DvUtFmnJaucafpOn+hE8KkDDne8blXgC50NqZQMMd6+CUxGnM7pdDje4CmhnRbtxyj46O4ZaDFB8fXorjoY4GcEy9MJbsZ7YAOTuQhZ5lG8pBlYuQ31e6DNBFaoHcy0LqKohQ1gFVqTqVlbhPJsPSwuznp9alcrEr63QGfCQDPuSzmmVCLgrWwnxAuo2nZQZfP5du97fXSDcvwNOM+WYvDR9zQb4W3tMQMD69hBu3oCdvg56+C9pdwr/9BaSwweXubex2b2FJwEuf6asLVenafE/oqzrd+RCaMWrwtamh0HmEYrPRBpmHl3DLAfz8K4jvfRl8/Rz7L38ZcT/h5ue/ivl6j+V6j+m6zfqMlyP8dpQMSWSE7Yid6VmTYgT8UAUQwha3iwSZBJCI3K4GEzTwZAGXWp/VtlPk2Tna3Ke63jUQ0vsJqk4l7x3GIE/vxehxWBi70WMMJIXtLiIGwjLJ/bLMEc6xgI4JYErAwsf1MErb4lSyJHFh+CC0ZgU+Ot5aZkWTIaMcjIOCkkrZQlxyvcdSwEgZIxSIZLGC1GcgjUmdl6/1WiVDmtXz5tcfkIg0+F2j8Ie9zfvLs39W7VMHSNY6t59etv37LgWkhyokrXVgl7+P11MHvI/3ITllf+yv/3Tztzqi58xSBbQD8UAO1AC09d/puplTk5qPKxOHzXyd6y+y1pDQFgufk4QuWRiSbZNOfCz1JWvZr9/9j3zj2fPzJtp/8T//77jNUpdCYTieoDy5QpUC0PTQsRQDa3/g//yrPsK9rqbiD9vgRXlqpejXglU1fXZrJPX0s7/WU6hgUpbO3GIOHFMBJj/44z+Dq9Fj8CLTu/XUOFdK23Tl1xVkafGx1JUYh2OZgDiVKKg6GgBOAxJLrVLalpFatf9KLwigSKlaZSN7TqzMLDtdQD7Td+RQi79X9jGV9bXv1ajQSPLfAbXLtH3tj9tmUMz5LX8nFhqZlfNNDDhXnDgZHLp1saltYQaWGTzNWPYT4n5CnJbSeV4AyQDnCTwv8NtbDFp3Mu0Rxi3cZgd2JM3twoiLYSvZP5dMQ9i8i8kVipLOOQsfi344VCdbe1WA5d5xcYaLEzg3xkzTHnE/YbnN+7+fEOcFcWqdqzjmvjBjqMpnpY8LZcoclToSFSaJrEqJsq9TLi7XIFQPGCRD63LQMW87mrGbTj+nvXknl8/nui4i+YAyvcpm0HV+3mQBCQ0c9spXSqvikEqhuiOHtMI17rMkWoTOSbIkcj+1rIqFE8Y7jkuUKs18KCtql1krWk9VRrupyeqaVZaaKO/bgACjAp5He7RXsE8FIOF+pmrm27vByVpH9fuadbQsled4H42zkk3Tv+cByidH/1mrFwEqCLCZKHIofUV0GeZjaULL1VXTvgkAStGm3Y5mWXRy6ms9BmozHnYf+oL1Cprq76mbdKKZxH2UTMDMCZwimuYOnwH76q1IW06RS/MvTw5vXwzi5LsE+HwvmPN3CogsH3FGZQyEi7GC+sgJt1PE7bQenVL6RZFLXQEltSi2j5zW49Vj0s9up4iFEzaBcLUJIOdwyJP34B0u8uSu1MVDjmZSctIUtAdPhNI92ztgFxwuBylcp+mmFKmKVOsMt+zFWZinWgzeg0nTlVylWh3QFKfbYvYURsCPJSuSNFigmRGzanWMiyVZd0QNGK3HN1wuSOayTgV7CoJSqsXL5VBQ10lOwY4rQOd4GK0F4WUdTvdA//bwTq5TGMbaRC5OlW5GXCR+ZYVcqFp8e4348gX2X3mOOC3Yv/cMPC+Yr/fgWX6vmZJwsYUfAzZvX2H77i8K9enZe3C7S/h3Po/hrXeRwga0fSIiA8MOaZSmlBFU6i6U+qS0JwUtck5robPLIgiBpEcN3V4Dyx40XYNvr8Ev30d89h6m959j/9UXiPsJh/dfYno5YdkvmG+r00meQN6BPJXmf3L7ZDWzzQ487KRGZtghjRe4nRm3s2ZIxNG+mUXOXes6gKqQZa+fBpVslr4EFdJKgKpNiknmMtNvhyKo5jOFWO6mSCK9K/dIffXkMC6MTSCMIWYQspQaj9tc/A7IuJDKvyzxGzWL2EoBE7nyvPhAkiWJ0puEF0Y0GRL7b00wRQvaNfDhnANSXAchwHE9mRpzrdVC60klAJinLNDAcHESBb3EcNPt+vpeI9PmqR/3Nh/tvH0qAMmaaabkVFH1B7E1AHHKR9XJtLe1uhH73drnH4b90E/8bHnPyaazU/NZLw6wlolQK/UZrtW/X6NFUXLgFOG9K6l4pXgJvUAmUUk7VxECW7uh+6mDrqxjnY61VrDegxEbhQYkCr2UIF/Cd3zTu2fO6KfbVL7WylwC9d5sskwr461d/lwjwA/Lepnf3/9n/lazH6dqtQAUYGLNPuvHjclSUb45B7x2YxvltfUgVpJaRR7u6pNTuOzqINsi1WgoGYlNtD5iTYkqEQMUQZ2cbXmvdSRGhjWhpaquZS7W/tZTG1PKWYzj42syIkkDOfK+B4bc/bLSwuq5AipIucsqEFFwI2Ib5CQI4VwOuFDIBbwBpTN7fxyl8J5LVmTZT+V1uVVAIscyRoYfMijM3eWHy/cFXKqk7rgFQep2UmKkOMu+DBskRwh+RCQn9x5DGmsuQg9UAOmc9qipjfJcjmq7Zap1L4d97fcya1YnIs4Mjgk8y71EgwdHLsch+68IQDrci7JZyIpao4iVpNqMULMiFozMZtyX1zqGi0KjwwyUMUeBCFDn/VPPkapKt9+nXM+IJhMuTSrrsppR0kCGBCPqe6Vu6fyuGRJrpyTlde7UWpK7frNmjezvqYU6AKJA4k6luXOWWIIZbJpmPtqjPdDeaEByLK/avbnn89UoKXUP/n1pXce1Iu3nBRg19SHH79uI/f32/yFmaVXMCZQjZ5Vickx5s80F+/4IFgSs1pV0x8Asg/zaPul6++9FjrFdj4IkBTMDUVUgMrzi+wLRSiaxTlFao9B+pux6Wsoz8fbFgF/y1rbJmFVQ22YWeiD+SWX6Lkbf0B1sIEBfbZ2TLVwFqnRvnwHSVzteLJzwYr80annqqHhyeLINGD2ZOpz6D8Bq4zoH5MJUcSCDl+dsUBBtC1AdSRO+lKCF427I3zHBlfr2494RCjpKXxEj7VvqRiiUPg4WLAAVPJwycRLXv7O/03dl/Mz0I0sL0+BESm3Bsp4zNT13sv2TuwYNR/RARrNmzuWaOZJ6t5ECfAhFUah0qj8z1hQqTGSkmBBnaeoYp5wJ4gRHc6ZDLU0txubt9+GfvSdF4k/elmL43SVo3IqzP+Ymg2GLQAHjsMEubCWrtb0AKBTFQgVsDoDn2viQbp/BLXvEr/480v4GfPMc04sbyeZMsk+AFG2TdxguhTgUtgHkHbbvXGB8coHx6QUuPv82/MUF/LtfhH/rXbin7yLu3kIKGxySx36Rzue3i4zrN7NIGktXdKWhrTjtTp12BwGgrgQerUN/ypp5RjMjeTOcqZWcUsmKkwbGOJWMis43oyd4iqauIzZjjN4/JUOSUm2SaLIjSsmyoMVR9QEU0JAZl+y/gUjGhCxSIFlBBeEnzocqZJGH9Ani2uRwQAEXcmK65pdA05enBC601gqQ5pmP9mivYG80IDllR8AEuBc46elXp6hcfU3I8fbNsueK5LusQ+NQu3aZD8tsTQU7QBpsGYdy5TzZAnEFHXY9CgB6ysv6DuDIsSspcZhajWzqfMzMR6pYABowoj0aBrOCU9u6r33WO6tbetU2EL7uSnoFaQfjmRmIaO7XyAnwwCEypuiKQ/5JWA/6ezBiJbYPC4ts7gqFq9Q4pVSekR6QRE54uZ8b+ta0cImYbrSWJd/HwbuTjrLe5xrdl4xh1+CwV8gCTDF6qOo5wyjULZJMievr1IpDbZS1jLSvSv3G7NQy2gL2NcpWb+1Y2n7X1J6g0ov0d/q3yrdy3g+Vbj1X/AyccczMedbxC0At+qZUGk0mT3AMjF6uQyKHwQdRFrK1Nqbze29aW8ExIU4RKSYs+1mOc4pw3oHniDgx/EhIzKAhYHpxg/HJS9AYMD5577j7/GabJYQvpefK9kL6uFAAj5eADyA/iihBNpeSNDqMC2i6Bm5fgPdC1RKq2cuiqBXnpcr7egfAw3mJ/o9XA/wQMD65wOadJ9i8fYXw9uekKP+td+GevAPePAFvnyD5EbcH6X6+X1ILRGIqY0ovt16vocupcQConec1q7gm794Hpo6CbU5AjfcCTpgdAM4ZFwZA8C5hDxZlau9y3RIjppoVASp1M57IiFT6VgtG7HL21rF1IJqZCRmIbLLKl87L3slc6PP9qhRGQjlFsg+GxlhqoEgyfQkokr7qGbrkpUjHmBaDHzUILVnZ17+GhIhAH7Ng0Me9vTfR3mhAcpePY7mnfe2GtXN1ILaHSF3mvJN61K9iJd26lhGxHFBLifnBH/+ZMqCugaRSsJ0Hqr5Dr1WtskbOYciSouf6e9iJvc+Q9Ot7FcdT5X1Lmp3MOc7b20HqTC4Gj5s54mWuCYgJoJQaGowVNjinaqZyxJzac6tZke/4ZZ978LF82uzzTwSAcEq4GHxD8fud3/71R8v/sb/+08XxH4gw+oTv/Q3f/LHus7XL0TfZGu2evkaz6rMj9lnT+i/7r+nUvNTGaRaAVXqHz0CnpRqqKd2xPMOacSQUKpJ20i6/tWpY2dFISp3Iijnwo9AohizTy3VSbOpHcvPD0vhQ+4zYvhErlKxzlK01Wxs6bT1I/z1BqDOE4/ELqGBkNo7sQ3oVrY1tA3EJcLDXa8Wll8OcfxMowAUgLSMcZfpUGAFmkbrlCL/dY7jcwnnC+FSKwhWYKAixZnnmy34CRZZi8RjhvMd8vW/UuZwn+PxKu8vSmNBts2rX5dOaRfFtObRbDkgxgm+eI12/kLqXF+8jTfuSGbE1IX70ZX/JS2O58ekFaAjYvvsWNm9fYXyagcj2ooCRtH2KhcYCOqaYTOPBlAUz0ICRU3Tmc3YqQ3IKjJTfudqslEjpX5nCRcAMwOebkMyrzlWB3BEIscEPIgeONQviMmgA1cyOfucDwXsCBQcfCOQJ4+CLzLAqeo2BciCOpIZSsyNogQyQn8tcD1YAh+wIQCYcmbiqzuWMSQIAf0zxWrVzvXse7dHuYW80INEJogcRrR0PQnxixloDIZZydc65bdZzZrkmYltqGEymwaEMMkA7yGraWNPZfY2HRl7b37RAp2y3AyunZBStVGKvQmWBmTaRlwFR13l8/Eq10HMRWcDIjXKSyWHI9I45MgZyeGsTMHiHr70YcDUSvrqP+DtfuTWditsutWvWU+esHKYWG2uHXntePuv2rV97dcTtjkm6R6/ZQCR1EV4m+Sv3yarGvbMdmvFB6Vkz89EzrmZrFSJaqc1pYdxmGtvtFAsosR2UlV+ODCx2YyhKXxsvToQ+r2reSbdsaYro4CANDws1C/KMBuX9O5dpFwEYcp8HXqSo2g9wOeqJmVCUb8i3ClQF0GSaltKz7N/agwQZgAA1Y6F0F0PbAnBUcG7tXCzn1LCpoMQlpdXk5bl22r6ZYxEIEAEKLfptr2lvJfub6S/kUII6ktESx+9y9PCUwEkpfIAfAkIIQJik8SMA2l2CiUAXTwTcAdgC4KyqFacF4XKL8PxG6jL2EzhTt1IGHwAylUt6fcwvbs52edYC3bAd4YjgdyOGi13OqFzAjwF+u5GsClA62GOR7IwoaR0QpwXLzR5xWhD3BywGPCkAAuS4aQyZTvYEfjti/Nzb8G+9C7p6G/4L3yjiB1dfC94+weRGvDhETDHh5SQCGS+miJeHBTEl3M65W7qh5up8o6p0yGC8JEjusH4e64VLjpaHK0XsAB9lSphELZIpNzalBPYOiG0gwwY0AANC8lzPBHjpmloyIgpIHDmEQQCJD4RhE+AD4WobsBs9nmwDrrYDLkaPy9FjIMImULlPgwYwHAptiyE1RCWDp1m9RPUpXVPiw5mMp+UxW1lszZB8kFqUj8kei9pfT3ujAUlvtRlftfsUtd+lsnWfjMfqeh8Y5TmKmnZg5JytfW8pVf16++/WVKfki5xx6A+Z+3Wqo9Qei+sAIbl01E13bZ97qceBqHShHr3DxeBxOHNKTjmZa8vZ5lgF5NxFPfsE7L/+Wz8HwGTEOrUZC6hPRYz/qV/5hQdt89d/86sX9FOOIH6c9if/xs829S2Uo5Eq3cy+Zm+0LNp2VLf9hPRVueHnMiRrxfJKqxiz0zB4h23wCN5lid/6HEp/CNuwTu57lfm1lDiCfb46UxoGciRTMydWWrcsW7usax1E6abdyfyq9bUiyX5+woVZy6zc15xzRR5Vgk8O7sR2VMFPHVuVqD4lLCLrzE4ci5Op12/wBCy5nwwYCxMAl3vvyD5wApITmouAuAWJfKZR7SRTskxwyyWI9ggXE2hYKnVrWjCTULP8sCBm5S2ViNXXGCMyt7YE05IpJKccPdEMRtiOmLY38EPAcn0L5z2Gy23JqCiwKPuhheuRcwYnFjCSsgoYgAKK/G5E2G4EiDy9gN9uQBdP5d/lU6SwrYpafsQ016zIwpIRmSOX66XjVE+TXguCfZjiNOdMMyW6TSsJzDHfJ7GyAgQMEPrMqDP/KLhGzleX1ZoRR66AEcqvo69jiG3GWMYMqtRS547nWzWlY+k4kMCGuhVKv53VLMfRuJFBiKGMJgAu5eDHGwBIHu31tDcakDilCJX6gHVQApyvBVirD1nLjKyp7Jyy+2ZJ2v1A40yV6BDQOHYhT0DKTR08leVs5qN25a0pa40caZ2FOjwWSPSDmnK3bRGpHmMZVIFGmrSsDzWFrB1pY4mwpuK4KSCaY56cOCutUM3GaCTz6ejxKz63xRQTfuFmxsuD0rcSwO7IMVq79lZ6WNd/M/NqI8fXyRpZ2pzu12uqvGFmhznGB9NXPgzTWh+14ePm6TqriKN0xNQCTK81YlQj6SQZsphSqfs4BiFcvrPPt1IpAjnsRg9PrkQyd6PH53YDBiK8swu4GgO2GaDIPmYH2AD6rRdq18bTkWx2OQptbNZ1YAekPqA4BT5UZ0TlfHVZpWVlihZyw8NC28pF7UuqNXVK0eqBSW+vAkLsuNEfdUpalK1jkMNMMt5FdXZj7WA+MxcJVgscrdmItr12mhXZBMLMUtAek1wXiTczhuTgF8ZCDhebq1JzwwDcGEHDCFxNoOnd0rXdv/sCWGbsbq+R9telWSJPKgUsRe3L9b6AA55FYnfZC5CZXs5IzJhvF/DMIg3bPeR+9OIEe4dhFyRrMnr4kTLFi8pyAOCyZC8goEOjxzQEcYy3I5wnDBc7hN2IcLEtjRz9O58Xatg7Xwt3+TZ42IEv30UKI24x4HYfcciZkYUTvno7Yx9lnJWsVqod57ml2wIAk/lb/eecjadUxzwAqzUk5bt0LNbyEPMOADl4dlJc7zSjVmtIdBywTQw9Odx6yRovU5Take7hcBnwOHINCLnaSmb17YsBY5AMyVu7AZtAmTVQacyDrwwL5+RZSnnOjkig5OAdwfmx9hqyjUR7s41DTzQkdabTu+OYFbYW8PCmNEZ8zJC8bvZGA5JXtVMT6VEDw5PLrQCee4CTc5kSNtGf0gPjxADquwF6bVmrflUj6gpQqFCrFICUiKtZdc1kCERJrgITQMBMD2TKgAhNU5udyil3B2lqpXKe4hT4TN/Kk1MGJV0iBg7AxjuM3hcKwM3Mpk6mvQ6twID53ETm9Ls5MvbL6yFZ+AM/+lNNsf7TbThSH7PWXP+V5mAPzYzcx37wx39GFJ+8dBM+xOyoGwriR2k/+OM/U67fbvCN4EJjDKgbq1mlmJIUqWrxqnl+j7MfjD47oqYO7YUBItqn5XIUitbT7Dy8sxuwCwJctsHl58Q1vTN8zgB6p53Xq2JN4W47ylKtXJyBxvpmgD5IUXJyBayU+hAfauf13I09+VpTwskUsnObTUxoqVtl8/Y5S/bzY0dM7Sg73AVF2GWFqLwO547HPK0JmzNwvJliBpCxodTpdQOq1LMn6XweyAHbgGgyxtE7DHlcGKIsa9e1JIcQcn2G7YTtRykyD4M0k9vskOYJ7uIJ0v4aNM+gm+fAMiNc3yLuJ8zXeziikrVwkbIaV0ScIqbrCTwLMImzfKbZUDkHKNk1IAuleAINEnV33sGP1HzmRwINHn70CNsAP3oMO3ENyI+lTmW43EpW5O2noKu34TYZiIxb4OpzUrg+XIA3V5hZevEcYsJ+SbhdauH6IUuIl0xWtONw22vKZyUtTg5r5E8FIffNntwlsFEEVk756S4zDErwsDaX3ATCtBDGILS/hVMBKNPCmHKw1Er46lilan62RuQiv7/aCgi52oYCPiTjmimFObO6Bui1Qalu0jsInTOxZEV6H9nWjCE/3yvnoYxZPgObeS/1aJGAj7kD+qN9euxTCUjWsiQPVVey69GIx9pA1k9wp4DJfWlbnFAG3kZOlY9lDW0NCVCpWPqd/m2zIiol6qldR49xnHMYTLZG1G5y7iR/npCOwIwFIupwWXMuFbrXULIfx3VAmtkh5yRTsjBeTtxkkQDgYiBswojDkvD8kLX9zeRczqt1OLt7QYsq7aV7CK3ko7A5JpBL2DiZaCw4q30rEsDAHox9RM4qZccr1QzKR2U3c8wTJEpdBFCLUz9qhTIr6iDnxwFIiDE1Y0CJ7nc1BZod0XqY/RIbwFHpF35VolsVb2wn+KsxwDuHq7FGLq/GgEBSGzJ6oR3uBgJBwHVDc9Du6pGl0eGybw/adA7vgUjyA7CWkVLvKmdLbMPDQtdSaV9tfJhQatX0GY1cC9itLG9fS1L2x1Jhj/eqgAv5/rTi2BoVrAU91cljk+Hqa31kv4/Ha7mGeZwnBz9FYASwyFgzpFrTpwAcSJg4wSfAO8Y2BPgxiAJXYjg/Ajli7MYrIDHocpLrG2fw4Va6uF8/R1pmUC4m3+6vSwNF5wnLtVz/Ss+iJiPCLPsBABMfAxNp1tFmDccM3DejBw2EYRcQtgFhV92BYSc1I347YnxyibAdsX33KWh3CXryNujpuwJI3v48kh/Bu7eQxkukYYvbJRetZzBiC9iVqlUa5BZaaWquj9aMRK5z1dF9kY8x5jkhplQK0mUBHTA7XyDWbMmaIqZVblvr/+GdZEkGLxnWTaKmRuWwMDwRosnS2SDH0fpMlq5SsyS7qo1Vh0xT1sDLk7F+NuaMqja8tHNuQqUlC4FAfYRWZUp3q4AQTqZeLJXvgJqlFSVCAjlCGC9kbFompOFi/YI92qPdYW80IOkjbpbudIq6tWZr9bl9elejMECroFHXcfo7yyvtjTPNyFPlyGoxbbu8K6CkX00vwwvgiJ9OmY/eZ0Uszb+PWIa8rZkTUpRdaE9L3lZ3nDZ62e+rSy7T3MWRcU4aeekp08GTyGFDXpzsyJgj8Is3VeY35AH8ay4kCv2V/YLryeXJDg0dqz/fMbVdfg9LLYRV+6Tlfm/nWDJaZX9izXJV3nXCPKdSsK3OtU5c4NrF+MO2F4cF3smkOJDDVZa0fXZY8HL66KUf55iaY9NrL8WvqQEhej21RkhqDeT8TbEWpi/ds6t0DI1WDiRRSXIQRyFTfC4GD3IOTzYB5CDAQzOS+bnTpoZb70D753Bxlv4PcSqF5FbZys23oulvMx7TAYmjKCId9iLBGQY48vCXT4W7b8GGro+opWU5J9vL2RDt9j0tte5Iz0XkCgp68HEOdNz1CEWYQEhKAkp0Xa798al12Yauc26wp9mR25wh0UyJHEsdpzVyrb1i9HNdTqLbsu6BckG1i6Xg3bsE7xIAQkySpd0OFxLw2VyVTElDq8uARMEKTbdwvCBdvy/9P158FbvtLyBNuaB9Kyp3Wl/ivCtKVynKdVIgMpv3U85m6TNhbcxzw9WBMDiHt14SLi8GyY7k1uXunZ0U7WcgErYjwtd8HdzFk9JbJPmx9BdJmysc2GFaEq7niMjAbQYhN3MsgiGHhcu1UpqWVbDTa2AzrUDNaFrJ3zInc8UenB1pIhRqVYx5TEAqeL2pG+X2MzZziAV4apolGZAzrAEYSMaiOeQxZXs8pqxlV/XVBjdUYEHHGQlsCIVQx9qLweffodacUe1BUo5NwRonkfxFnl/tM4T6fFVg2AIZWS6DYuh+i7/i8njnXcAwBvDmCq+7OUdnhSI+qm0+2nl7owEJcJyReJXI9loERkGKjaRY5/BUxuNUJuUUKNGiyo/ahGLhmpYj1o7oVdBC1fo90Ec1T9cs10FsbZ3ppIPRF7QrRxxAUcBiZswEbIPHfkkAtGiSm2LuU+adSc0nAXm/6x8+lrC9r/3pv/lzDRiUbcj73/QrvvZB6/qBH/0pAMDTbcAmp+E18/DPfOsXX3kfPwrTXhCHhRG1TilHCK3C0UdlNQCRpJty3t6CFoz2heqFvpDlm/SZ3OUmitZGo7ik0fECwDYBW69KNznqnF9HX4GIKmWVzuHTjYANXuCWvTiobhIqFlABSZyRZkPZykAEzEiHW+morZ2whxGcgYkWr8pKcr61z4jkGpGU+4vMLM/krLS0pFnRWu8F3A90POS6S0Y1jyUZlOjn/Tq1OaLaQ4IGa4Gi+1rJEJlo/syMmByQa5QiAyCHKaZ6zV1oE1ban2GZMrVLmyUuoN0iMswcBZgQIVyIKtZ8fSuUqV1C2MqUHXbSIX0DBR0OiAwih7k4l8DEmkVQsL5+jI6EyiW1Jh40Bilev9xhuNzBjdvc30QkhVPYCr1v2En3dRAmHX+53jtzpuDabIOez3PXqJ8nlRZ1n2tV52kApnO7XAN3tLyaBSJ1Py1wMWBCI3mce2d5B4BLsEilhC3YPSf/rw1TK63ayfhPNeCjtWdK5fWulQJXcKD0z3pcKgiRgJTn2Ob7ln5paw/tGFDOGVLO2gMgwCVgcSLwAEig49Ee7VXsjb5z1vqH3O93p9PAJcNwRIOswGQwqjw+R/JEfeU0UOmbINpu7FZyt1++7tf5Y+onGm0aRS7TfwjScTZH+vrldUADaiSEoSlw4/zl85BSWnUa5EupN9H3/fc6CdiBzuVjthmAKsHryjEduGYDiBye7SWt/fIQ8SxTtnqFLt33Gtl01ZFOH1y5pdI4PrjtF8Yve3uHt7YDDkvEzazSxh/K6j9UmxbG8/2S6WVVvlozD1og/lGZJ9fw59c6xAPHoN/2/SEPbIMMg1b8YSDJOqoUrxaPjp6wC7luZKBCwfI8wcUZbr4VOtV+qlKY6ogq0OAFiBOwLNID4rAXcDG19Cw3bpsmhhWQxPIbkK/9J2IEMcvvNpoRGSr4CBvAB3DYirMac3YoP2uRJStiaZSRj8E9nRinXuUpUgdKfi+yvs6hRHGtIh8noYkunMFwoQbWfVHVI6W+AChyzZoR6ZWQwonBNbICjVQi9PuFMRBLTYNzmbYDLCQ1LgpC1Sm050ojpN5t4YcMXHdvAbwg3V7Abfegbb6Wt9cAM4br5+X38/W+NFUM24DxckacIjb7BRxTLnxPWOaI23xubqOAOEvn0oj6pSeM5HB5MWD3zhbj1YCLr9lhuNzg8oufw+7z74is79d+PWh7Cf+1Xw9sLsDjJXj3FhBGHGgrylkHqReJnLCPcv/cTBEzZ5GBmE4CEb0W/fzpzZiiH1sGQHOtynrzfJxpm2w+09/1NKyjwIViF7NcD35JA1DkMOhXpk9T//s1s9LfOt7oXOJdKz/dgw+rvtffZ9Zq5qP5tDneBozkYKHWjaVkz089dkCeV73vFxY69kAOt/PrLQwDPBa1v672RgMSnTTvXu54oTVQshbhPmnKS7URF00Hr0R57PrOgZG1ubHfF4143Mf4jMNtgYmmevtTdUrK08pxqunfzrmj62IjrJb2oQpeappaX7NCu2EpjkSumyByma5RnfeYZHAUlS5kpRz5vUo4xpTwz/5DX7e6LWt/+K99qYlY9f1idII/3l/gz//dXyz0BJ2U79rmNhCeZAWc/cL32sdPwhZOgAEcaw1HPyxA8od+5KfK86IqSBL5bBV6rCpeu1/ZGTS1U7q7CiZ14ieHTJdwpQ5kF3yhYV0M4jBcDgQXJ9D+tnS9LpmPnN1IHAsnxIVBIh0ckZZZXm+vkZYZ6eYF+PZadohjBRraOwIZkBwyIJkqIEkhtwXN64TtCQBUCldW05rzva9SrPpeAcnCtdkgcOyM2R4PXAIWp4MpwHF0uCyTKuiQHiMuR3HRRXHrWKEgqY+8r21fAUfsHN5Tr71xSk1hNaBywoQYkmQlHOVMmxwMOVs/V/dNT5F3ADEQKAGeMPgRadjJ6YgzaHuZ1bq2II7w2xuE7YgUWWo7ckd3AFk5yyHF3PsnJri9A+Vid4ALuJuTmWcg1K2RXKkfCdsBw+UGw+VWMiMXW7jtJejiicgYj1vJjIwXSMMWyJK+MQ8DCmqVklXos6kFI3fVc3rjqFu7r0rWufXH1jM/W294VwbO1uWo2Qa9q1X4qGDKZtUVhGj2o8mWGAU+zbaqzC9w7AvYx8Eewppcdw9ENBNZlDBRe7Klbp1aD0pO/R0A3EwJj/ZoD7I3GpCcsj4N29spIGKdFFnu2MEipxmG7OAaYFLXk7oJen1b8vfxtpr+IDrodBmWHpT0ESJZKK+ATYOnpIocuSBdf5UkFdsbQQBCD1bWKBypG7GagRH2mtTFEqqDEVNqFFfqsVUgososWnD7bJHos3a45gS8zMWsAMEnkUm29SKaGbmvFUfVnHBOMnHNcpKwcdQV4ut27r0ZAMAvf+cC7+wGjJ5wMSScb/f4ydq0MG5ZVKgAYAwyA2vncm0K+GGYZimso8Ldc95fU0uDaAAGVZUay8fWzuhKuSInReda9+HiJIXmh2u4ZQIdXgDTHnzzvHS45mtRTUpLpVtpwztVXEIYQJtds6/xxVcxP3suvR/y+dy8/QRuewkQSW8L76VmxHsBNJmu5YZR6kg2W1E8GjfiOJIX59EPSMMWkaRg/Tbz2vdLBiIpNxZkESooBccr0Z6+h5EV0ljra3TKZm6zxZ5cHjZqhsRaSmiAUiyv7Xg/eFcyI5GHAkJ2SkVbo+QVyp7cq5uivOWOnGAJdgBgxmFxjUO5sPDqgZrtsVYBiYCXITkkMBZ2uBgvAEfgxPBxgh9GpHeuwTdbbDjCEWG52YPGAJ4WbN7eS88QFgUuzZwAAM8RcZLPl730F+GZy/dagzJejvAjYfvODhfvXiBcbnH19V+LsB2x+yWfL40Ow+e/AckP4It3sorWJQ7JIy4Jt0sFtJPJjGgWW2m0GiQ6N+72tLr7AhCtIwHylJev51H94Mr93Ef/134HrMzXZj7onwf9zB6DBfH2vtdMh0rm9+BDehHp+o1gTN4vu6eFGpYMDQvHWY41ANIvF1nWrcBSzl97TqxQzsyp3Nf7E01zH+3R7rI3GpD0A9xDCtmBdXCgk9PaYKjdpz0sONHlqyPt0a6jzY7kfe0AyKnleiBylylPWKOHlBzYRjSd7KulKWhGI3HKvV3ytgufu4KSHu8ALZ0CWI/QrA2cVupRo7FzN2mwoYzMWTLS8rmVDvDWNuAqy89KbUMqzkFAnShu5oglptKc7j6mPSN6i0kVnSoYUcUTLIQDNGNz/34g3/T2LtMBpEbmdbZpkcLhm+zoXOSsjhYR698fhm1NQec+cgGkvdnspILDi0FofVtPeGsbSuft0ef+E0GWK/UfBFG3igvcIWc8plv5bNqDX76PNE+Yn78Hvn4Bvn6O/XvPEecFh/dfgCeRbI3TUtL02s+BhoDx6SU2b19Jx+uLCzjymJ89x+H9l/m3uYN2ZGzejgI4RkYiEiBDJFKyyyQZlzAITWvcwg2j0LN8pmqFDVIYEWnEIWdEbhcuRcfat+PltBRZVlVJs4CvOFwOhVoC1MjuEUDpnZeV8auIIFFCn2npVQttxmYfuSjKWeW00neJCH5oe0FYCWdrKuGrNC99X//VfdL5Rumwsj86NgktVs6RDJR9QMtBBT+k3qnSfoFAhHHYAryAhz0cBdCTd0pGbQQwXIokcGLGfL1HnKQniTYw1EaHcZaeJTwvucFhKqDF2ng1wA8B49MLbN6+Qrjc4errRcbXv/tF0Fvvgi6egrdvIQ2b/LrFlEjuoQw6evqfvT4VPKqSodYo1ch7ez3ac3YKlMg10LMK2Bmmz4IApwGHBSlrbIuyO2UKsBTgTO/sGtT2fb4qha+CD/l9FZaxVCwtUC+1Z2AgSdbTLa1YSPJZqMJJh3dOUtMR03p9rT3vWh+i2SsbIJRny9ZNteeKcgpzyHOsBnwCOezfgBTJI2Xr9bQ3GpD09ipgpP59DEYsCCh0H5dBSRfNaeo/VtYNPByEnPr+LsqWLV4sToVRIpmZ4dmDnAAW2/mYksj7ApIxOZcZWQMiPQBp63wqAAFqxEUnLn3fp88VpOikdkQhyc/5HPmo7sQ74Qyr7Cdzwu/89ocVsG9zYys9RqCLlJlrtLCIB+hxHA3iDPzQT/zs0eR3MUifCuWlf9vXvfWgffw47f/+X//N7OgRPHFt8pdPyiZQkbAEgH/tT/0kdqPHv/1bvuWVt3k1BlyNojZzweloolTqEVBpfNoNXeo/RJp3m0HHLtTCc5XeddMNHEcpNJ9uAV5A8y3SPIEzpSpNe8QXXwWWGdNXv4q4nzA9v8H+vWeI84L5xU0TsSbvpMmcdxgupbu1ZkD8EDAyg4aAFFkAiieoy8HzgunFDWiYMFwKjSuRF4BCHnTxFG4YhFIzjLnYeJPrRUaAgrz6EUusnbLlvTjRewNKqgpSakQJxPdI4JwxUPEA74QqqQEPdjUAotZHq1eLqtlhTZrV/q6XYlU60Go0O4sV2IJibXKJFRmOXsa5qB2tgShOiJmaNTuGypLXYweQ1ZzmAjgqeNMsENiBctd3rVGJjhDCKHQoR6CLK5Fm1eL3wx4775FixPBEmiZaIKL3FUcGT0vOngho4al2iFcLl1uQpwqQtxv4d79YGx4+eQcpbMHbJ3IvjReYGDjkcXbhVOh/C9fMlV6fAkbYZEjMfQUcMxgsrU67tysrYY6co/H5nHLKylnubMDnHOi422eok66dl21/L6tmqUBE69HWQIctPtf3uo4CSByAOJXmg0Xiu++krpLhWS3PF0ELB2E/GvpV+ad9hSol6xQQ6Wv06nbzdeEc7FukoJ+9e6RsPdor26cKkKzZubRvGSAMEDmVibAKW7ZzbOkZwlrYez/QAbTAwy7X0MZW9mdtUu/T0TZ1DajzJsfHyYFZUq7b4Jsu68Lsquvqa0VKwV+qnG5d3FKv6n7B/LYd2OyEpWBD6VTWKdK/pW4gnwON0JIczxwZzw82oovabTlykaF9lc7hV5tQwI7lF9saktJpnhmHBbkBWDyaKAfvQHl/LbgSeUyRDn1TVNyVkjWGdgbajaEA9mmJiByx+4DZkq9/usHV6M0E3j4rWoQJVJdTqQ+ihJULRuMeWCap9bjZwy2HBnSkwy14f1P6Q8Tn7xegsdxOWPaHDDoELEwvZ8zXMw7PD4UiE6fqGGqTOUcO23e2GHYBm6cbbN+9gR8Ctu++hbAb4bcjwuUWKQpA4Vm6d9++9yxHsS9BQ8D2C9K/hC6fwmUgQldvAyEgDRcCSIat1CT4gEhjodPcLpL9uF0kmv1iiriZpUeH0hyluaWcP9vnxRNAbOqnUq23IZIO1hakA/o5yjoAFLAiTmQq2Vx1cOaONmo5/raZaUxonkn9nLJjJ1oAHr3S0Snr6/30mNfGCwEg+lfOxuqU0GfdDaVtyFmUgfLEketOmCXDID/cImwYacgyv+MV/O4KdPtu6VsCZgzLVAbX1DfGBHItESNFqSmytUx5x4QGOAxSJ7KTzuvuyTsABfD2CTjfT8twIVS/3G19ikL3S0g4LFVpT8dzDf5okGCJtT+SHc/XwEh51Sw9uwyGK12anFBlyzW7o5ftXeI3a8ImJfhXgIcAINsJXjOCA1GZSxVkDIRG5MA7Gftd3m9CzYaIFLRcTzeLJHTzWVcTJjsorlsKQ5EMR9gAjhCGLaIjgHJNkTk0pRwKiGxByFpg0AYET1HZyLWZ0un2o5d8/6BGXpqCftzbfLTz9kYDEr8CIO6bJenrRT7wvpwBIudAiF3OLtt8Zn5zl3KHNY0U9/UllFypK/E5yifcVKFO2FNKKfXsiwaMJJMaLpNOp+suvzkGNZarXl/5CIhUCeYEmzIv+2ic0t/7j37Tvc/Pfe03/vKvOfrsh3/yy8XRWJP2/e/+9i8cZUgAyCSxQkuLJMf/Gx8oE/xx2n/0F/5OoWdZVRxPtXYEEF6+dirW5+Ji9Ph//eW/h6+5GPHFJ5vVc9rbT733AksU1Z6r3PVcnERXeuRUyN1mDu3Ev/EObroB5gV0eAG3zHDzjTSou73Gkus++OY50jwj7a8RX77IFCyhUU0vrhH3E5bcTTtOjMPzA5bbBdP1hMNzocYoZ1+Nc5icvMOyX0CeJIOS+0rwPCN6Ag0BtKWjArcUGRgE3NAAUd1SmpbWjAw5G+KHNjNCoWQGNDMi9JrapK4UH+szt6JSJtcWAFXHrS7j8onXsy5XQzOTyE4Xpyq/qt/p2HSUze23bcCI/fuUKXVXMyUAmj5SusyarQmO2PFGDzUWEAXJPuMYZEkPjJQzSLnLPEtTvZQckktZXMWVcTQ6Bx9GuAWSKSES53QHcUw7UYRTlmL+bsn0PxU7KAda649E0vcSGEak8Uqc3PEKaZBapCmrZ01RsyK1SF0DSHNMxYnV7wq9Ta+jvYbdpHIKMNYMf82iyT3jcoPD9eO/KxPS32t6varVeeacMAyAQtVzaIOczqHUpHnKfb1Q+3u5xHAZWLo4C+CIU1bhWwoQcRmgWEv5bwcAIav5c0ByAlyIqMyz5TiQJDvSZagUjNRrZ7JdJmhm6ZFlvaTzec2Uztx7DI/2aPezNxqQrNl9ai2aDERH0zpla0Bn7Td3FaOf2o9+mVMdttc+X5MntFK5ZUDJ3GYdfH2mn5FzGDhzrwlZwm+dFlZSv6gZkTW61V2NCRWEaJRTMwYCSOqy/bZlnfb9/QHah2lrhe7W9pFzobBGnVOVJTWRZeXrS8r7jlDfJ2xjIEwLH8mk9jQX7dmxDR5Xo2+kNt/ZDfjc7n6F+l/cCTd6yU4rTTdCrZpvQdNtjSKqtO4ivR3StF9XpGLGfHtdPuNpxrKfsNzswZExPb+W2o8MOlJkHJ6L1Op8PWPZzyKversgccL+2QE8cyOzWgLdLqsYcUKIoUlrckxCtSEWoJGL3v12A5CHzykyGgKGy23uln0Bt8nc/sunkhXZPUGiAB53EiUNW3Ei/YgDS0O467kWsO9zkfHLg8ix3sxcC495vXePlUw+N0Yqp99mkuv7E1StbKfoNv0Y0jes4w6YSH+FdvwTrCzj3MY0Jjs1T/TR8haM5P1Zum2sBJwsPUczRUyUvxc5oiFX6cWU4BzDs8PCjBg8fLjAOF7k+31fHdani9B4eupOZ05OoPyRGM3R6nnIqmtac5RyRk1B67QkxJxB0zqRhSUDonUi+qrBJaAKjpzKdPfn++gacNskUW2tN8kpO7XeU2aVvci5LiMDDOUZ0AxIpWhpZkQDJZ5y80myzVEhohiJ4fYHua5xkmxtSlKflptmqjqfyoAnZqjyHogkKJF7DmGzA+II50dRZsv1JBi2GCggJpfZG8hUbH2WasG6Zkaamh9um8r2vZzK9YhyvhYzp83z6z2PAdJ352NvjPhhRsA/pfZGAxI64zSvLdvbGhh5VQf3LiBy6l5cBSoPuHH/qV/5hXst9yf/xs+2RaqZ8w2QUcRhzNwW5PWma9DBTKMr8lk6ch56s86EVWFRIGIpXyXi7lU1Bw3X/F/8x3/pvY79o7Df8g9+/uz3sykO1sJ7jbp6J3UN9kblhI9UneSHfuJnyyRzM8d7Z5L+6P/4peKs+kxJ7OVTFYhIt3KhLgTv8NYm4AtXGzjUqPzVRuo4fu7ZNd5ZnsHtn8N95e9j+dm/JxSTeYLbbDH8il8DuvpawBFGRwAvoP0LmchvnklheYxIe+nXwLfXSMsk4OMgYGK+2RsuvRb6SsQ4zkvh1c83eflroV3FmTG9rBkPS8XiyJIFYenxMLH8Y7TBgME8OmNkuNg+SykykhdFrcQsk6M6GUEA23A55detULTGLejJO6CLJ8D2EpxpWWm4EEUtlWJluZciq9KR0Ge0cP1mjqVepKogHfdwuStIo/eEzUzJOTDRZa7c++ModF3mlK2NJ3cpNdXnyjiz3Th8Spr41DHrPd8ru6mV33E+J1lha/AkWWYnqRTJSgMKSsgREoAlAknH4QXwpBTbAD9eFWqP9rRxfcT8RBfofrmybJaCTkCpA4mcMM1CMy2ZEK7qhlpnpOIiQB2zzjmvNsq+FmyyTm4PGqxs+H0aI1p7SCNMDaoAMt9wBAZ/en5WWh+R9gHJtKwMSKpkr0NwAOJUgygqER7nAkTS/kbGsWkv6nx9Foy5jA+JSGSYtREqcoZkyQ1PNatCAQSUDu1qWvOp87dmRmJCAyotoARav0rncECYC5zP3ZCo3BuP9mgPtTcakJyyc+DjY9n+SlbETtrW1virOkGfo2o9BLSsbVOpBDFxid7NLJPl7GSfZkITNZLjSGYgO50hmTvn2mY0dPKyERlpMiWa/5RpFkM3x1oaxetuT8aAwdfeKDNVDvWp++AuLvQnZZUayaUWRCfk2j04AxJyeDJmVavgcZGbCGpTuK0naSjoGO76JdxyQNpfC1Vq2mN5/yuiJDTPoCdvAxCaUuKIJS/HN88RX77EkgvKeV4wPb/Gsp8KrcoqCyXz7PRRqsSSrdAi9BQlKmzfy7oEjCROmX8M7BAxZpqOZgH0edmMHjRQqRlRedXxUno9jE8u4HcjNm8/QdiOGC6zSlYYhYpFHnT5FIGjAJEMSNyTdySiPeyQslRsCpvMGxmxpFpkLM4kcoYkGlpNW+eldRf6/BX1py4zsuacrY1fgD7vr9Z0dI3q2WdGdLmefmLtXCZ8PWNtwM+KMzuvRViKteezFmajRI61hg8Aoq9AwDuH6Ak+U+JU3tU2WdTfOffBp2w9TTPHorIkdCpxSFPOkOyzwEH72oqH9L2frFVgsvJZB1pOvS7ddXgIyLjPsjawEgwo8STzkNKEFWg3v3Vtt3RVZROlvtx0My1wszZNNb2K4gTMk4x9yyz9iDQrsswSbFmmZnta8wPyIGakMIDyXjkA8GOmbkXJqiSGc9SAEbVknyFu+/kcX0esfl7/tkGAem882qM91N5oQGKjGtZe1Wm9q2HTXdu5CyPUiEL7eU+zKsuvULF0ORt5vK9ZDjYAzEjwrBM1myK9DAhMoVo9hjUHQSenlneq+99HVmxBty2cLQolJj2+NtHZ/Xhd7WrjccFeFM1claqdmQ24a3/zUXJvvTsuGL6PcUKRz6UMmFRK1zuHq9GX5oFvbQfJkBj5Y6UsXAQn0cBlgrs55IaCz4DDLaKqVz1/Hy++9HMZWPztIptLngQM5B4d8/Uey+2CZb/g5c9dg2fGzS/e4sV+wcQJ17FVavMOGLKjd6W0sgwYgONiQwUep6z0cRiG5vfOO/hR9jdsA8I2wI8e23e28CNhfHKBkIvXx6eXRXLVbzdwF08EdIQB9OQd4fcrrz93WU8+gH1XL7IS6dYidpW/3i8RNzMbOdZaCG7vD629oBX9gbvpMcYpoS4Yk73yc7Stdl3H4wxQo7c2qLGmALQmHav7E7qi5AaYsXkm8y0RV7ZzilKq25J9kPezSwWgLFmUY+aEgRhEVSLVjrWq0GQL6ntVx7vGv5NyuQaMambM1n9oTYiNkM9NtLxdh56jV7FzIMR+Vt/zUS+Zh4COc9+NwRd/omSARwBWuUZ/kwEmmYy3SDcLqBkyVcslFuW+5SC0uwxE0u1LyYTcXoNvXkj9mmZ4D/tSXxazEIuOgTQECVaEEeAIF8Zcu5QbDYRFsiW8AOwzre80LcnWetYsSSsSEe0zd+Kes5lPTsDyBvQheZT9fT3tjQYkzh03rnqd7Hz0sFpLd1g3TsBv+7ZX79jdKl+Z91Bgk+tJUgIgk6M6ElqoKeup6+gBzto2+uiKSjlaW2sECVj6Rf1Q1/M6X3dAnAl2CZQ778Z0XMz+Udmf/ps/d7StRjwgAT/woz+FOfcOAOrE/rWXI77mYsScqV3aoX7whBhEclezIAORAK8MTnaDXMdtkN94B9HQ5xl0/UI41NMt3PQSmA6Iz96TAvKv/gKm934R84sbHN5/WQrHl06tRaVgtZ5jvl2w/+oBy37B+7czni+MiRNedpQB7UitWZ6RHOIhYZwzv3/0JXNSwEX3N1CBCOVX5135XsGNKGpR7nwdMOwCxqe73H/kAmG7KTUhfgzwV1dwYQTtLkHbS6FhbC/gwoi0vZKop9KxFHw4hxQEqHCSCHTtrlw5+5yv6ymcqzVkp2hOa/aQGEifJYn577LtPvp9FIE9riPpC6XP72ubDWnGlu67Oq4Aeg7uG/RonHMd87M6FHINi5wMDUawZEvYFaqLdeoGzjLajrvt1Pfnuolbha/e7Lhtx4S2hi+Z9xWAnAIQR9t/wE3Sr8NmRGzvmBaYnN6PU1S8u/ZxWmJp7LqY9Q6kc1Z7H/dSwlVlTV4dIBmKlIpCVilOX2bJgmg2ZM5U03kqvWMAFJlmx4QU8z3hJayU5twYVcUK1pS4Et/1OMs6c7DqXNDqWMzi0R7tw7c3GpCsqWytWVuYWN+fUoE6vZ71bXkTHSCHI4f/7DrptIb6Wobhg5hdXxNZjBUUWN9Esya6n6fWB7T0iX6CBmoPkf57lfLV5lKN1GguBtRJUuzYuXgdbRMcAlGJpt7MjGf7GYfI5Rg1QqvOyIcFsux1sZkZ2+HaZmM8OXxuN2AbPN7ZBbyzGzDHhK/eSr3F1SbgYiDsgscuU7CuRuk1svMO7vBS+nfsn8vr9BLp9qVMsre5xmMvxeTLz/99PP+pLyNOsyhYZXnb6eWMZb/g8PyAFBOm6wnLbUScI/YZmOikP2V6ic2GPMuF5VrTYe+xnacCSm5zxuYqELb5syf53gvbAPIOwXuQpwJKJPPh4Qdf3pN3CNsgxegjIWxHOC+vNASR8d1uQGOo4OPiojQwpJ10YKfLp+JYbC/gtheAH8HjJTiMSOOlABFrmfp2YIfrfcziEjmwAKH4LJwpWxrlNE68SvSq6hQ5aaamWcmHjolWPcvWjrRF7RkdAfnBTY0D3m+vByJ6O6/Rg85mKbqsSK+s2GZIXAkmK/DVInlyHRgw4OiUc64O78xtsbSlnd5VX1i3VyPUNjtzFoyYTE1vPcgQeW5ZsfL/+0aSdzn1tsFk/739bK0uUW3hZLbbAhHdR7vMqf1a27/1fa6d0lUaWr4PiF1NyUwCKAM7eKeqiBDFNCQwXOnLVZJwuebHca4XiTP4cCuS4tNemqreiMLf4f0XSJEL7RSoSmk0DGVsCZHhh7lkSVIYkDY7GUMSF/Wtc7Z2yryTR9Q7eQ580nvJpg3P36ivQs/8pOwxQ/J62hsNSM7ZqXHPOtx9yv5V7IhmlR4YRTwzoNqJ9yH2gz/+M83k29SimMkNMAM62Sgdt1kTh9L9to8o3seOGiLm18FMYv157MGRptJfdyCi9q1ffKv5+y/9vfeOjrE/h945/In/6e+DHPDP/cMPa954ar1WSlkjni3AQ85qeOwG6Ya+DR7eMS4Gj5gSnozy3cYTrsZcL+KTUBBur5ti87TMiM/eKxK6fCN9E9I8IUXGi5/+eXz1f/1pLLcLDs+nUiSuNRsim5tweH7AHBNuI+N5dpK8c7kUuNIMbjMgeZmzIxMnzN2t6VkyfBNrnYeAGYJkTwDJiJCv2RKfa2U0K6LZDpfpWAJIBtAYihqW84ThYievWR2rdGPPWRDtqi6AxJdsCMZtVTvaXElWZLzAkr1khzquaPR6YuH712agsozKq/bPus+OdRN0aLIjuqVju29klFNLElkbD7XeZlVeuAsYrQUxzpl19tcyImvvY9Jx5fh9T5Vq9tU4xecc45i7tsvxCCBb67lirRf3sP96p7wcS1cPYT9b2+/DwkfAY+oAySnrwYbPYiNCe2odLyuCoWaBic1I1N/0wKge9137qNtaU+qy+xI5AdrUtcvM9PujfxeJ9nx96NQpssBA+8XMuXcMs9SNLBOwzIj7A3haELPCX5xUqrmuw7EXefBM5/IckZjy+rriQ922ETrQ3eTub2tFmhoiyiDPrgCUci5P+ExvEhh5tNfX3mhAQu4M8DgT0V8DJdqBvV3/+iR2l50ax++aVNf1/18dMJWfcl8IuqKcwVVFhpNovC9IR6CGz5yGAji47yOiDRTXopnt3zZqqM6SbUTZc7rfFLvJUogbT8WhtLbWp+VV7flhWVW4ATRiShiyEsw2kFCvRqkD0QL1jff43C5k9SyHXSCEuAddvwe3zKD9M+E9768RX7yPtEzg6xdIy4T48iWm5zdlgk0xYtlP4GnB/ivPcfOLt4hTxHw9Z1WriDhxU0RuqSVbdXqaZ1DeX2bA8NZAhgfdng/NjhCAnZf3FxuhVIWdx+6dLWiQBoY0+Ka7utKyJONRsx9+HET9qmRDxtwt/aIWpoehvAf5XLQuvR+Sl4ZmKWyQHBWFrOQH8Jgb0S2MOda+L0B9XvZLKs309PLe5AiyFZ6wsqw282CpSbZHB6E+v9b5WBsf1dZ6jOjKVIWruEBNpqRbz0pWZK0WrS3IX4mCr3xW9i3p/uq+t8usrofaZYdSUyBfTOAj4HKfCL793jrlLdBpswTWOe/X33ec1896MLBGeTq1/VNWay+qIqMv/9i8p6Pf6PGey5b0dt/z2F8H+7cFIv379jOpvVgM+NE6upkTiBN8FJU6ThLMARJmlxByb5rkCE4bFvoAxFmee45Q6V5HXorQzTlak6J13sMR5axtbuiXf48gRe6g3KFdt5mV1FIOWgAtW8NBdDDA2iMnZy6dvA6exC9igJP2KMtjwR1JmI9TQOjRPl32ZgMSOt0H4tTya6CkfH9mcrtPWr23c/SHfoDtf9//9iF0HksJAtCoXliAcLT9ik4AZMqCAjcd1NwKqCB3xKntwcgagCCH0ptjLZtj08enFFzeFFMqxCaQcLZZikp1opPi0Q+nGPBF7jOhk6wqYildRIuWn4wBT7ehFNMKRU6K0XeB8DU7L8Xnt8/gXtyADi8Qv/y/gac9pq/+goCR/QHz9b5Qr1KMmJ7fFCldVbyaMvjQRoI2GyKNAuXYlwzclJYF1CxG5f3X91oAXJ3Z4/MxdJkP8g6bpxuEbcB4NWD7zlYK0S+2pYDUEeXeIGMGJBuhXW1HDBdb0ChF527cwm22pf5DsyC02SGRF6dAHQQKMuF76f0AR0h+kLqQYYuYnaD9lCV7YzLZj5aCoz0hAEBEPROup4hnB4muWlU8PS9HSn3Ovnf18SeUIMapiOiRg0iApW7ljZt0icvUpw6Y2HWWDOpxcMN+f87uKr5fGxPXTOcHC0o8UBy0mBIGL/SnMTtvwHpUHTjOApyiJa1lQU7967fXgxALRs5lStbWdeq9XT9QAckYqHy+Me891eapaxmLu6zfj+Pzd3pe7cFI/96CkmC+l1dGZCoZmdFnQQLjjcvYQ5gjw4EQs1QzZSnd4AjJOcARXH7vdPDtgIfzBGJqHot1cCJjk1TUZ1CSAU4y24OXMYczPZJTDVz05h0yEMmrhctzkStjgYwPKnrRji1H+/gGZEuco4+/D8kJae5Hq/ZGAxJrd2UwSqSP7ldcbNd3n/FzLeKmdkpz/S4Q0q/jB370p05mbXT7TXr6jBN/n2Oy9RynCvSB41qXaJzrFpy0nGZO6AVMiq1RuLQAl18xhfDn/+4vwjvgN/wDd3cJ/7Dtn/nWL5b3f/Jv/Gw7+Zlz8mF0mp+Nk2PvB1sn5MkheIetl6J7UcoiXA5Cy9p6B9o/h1sOoNtnoOka8cVXpf+HdjPfHyTzMS+5z0dEnJeGaqCmlCitwUga3WeZ4HhMSDEhbGVI0soJ7XpOnpqi8kKpWrmRVSFLv/OjF/WrnAVxnjBeDgjbQQDG5RY0BoTtpgKRUYrTKb+G7QjaSoZD6j4IdPG0gBC3vYTzvmQ82I+Nc2B7PwhIyWBFO6oncZIXzgpZScDIVO71fL5SpVvoPTRzBDPw7DDjRjvD63jgXBnz2vFlvbbulNO/1hjNrqO/j0tmhG0fEpR9W1fOq/twLityztrl8nZLTQjurOlbs2YsIt0pBSqppo07f+MUGJkWPgtENCpvX5vl8vnpleCKEAOpHO06IFmjoZ3L7hyd+4iyfpthUFACSMZEwYpdl/1NL+lrzRMhMjfLPzTyftRIsTsHp5az+7xwpdsBKBk2zgETSlKTRy4hsCs0rpgfjiFnLFIY4OYAjJAeIsySRV0uQbSHn5dSY9DXGuhYFC63GC53JeDRBEM2OwluhJp9jUnpmzXrmFI7hhydM1eDOiWLSnWOt5Tt111Y5tHePHujAUlf1H5qvLKRrtg55U0UzFgTPbzHQHhKMWatSVS/H+f02uv+5P1HbbC3tq9H+3UCDAE1O3HXcjb6eVyI2h5rnxHpj63vcRC5Rlx0Ir3LyLWSmPe1XsL4kzK972ZmOY7u3vigdjvFxgHwvurjk0MBH1ejx1vbgE0gvLP12HiHqwChZc17+JuvgA+3iO99GfOL95H211ieP0OcMtd5lsaCKscr2RAuIAIQIAEvTqFSoRRMaN1IYq7fkwCPnj4FAH7Mr0MoQGGtULAqX8nwpgXmWtuhBaLaGd1l2kN5HYRqBaL6ursU+tW4lXqPTLdCGMH6XgGGgo/c9E6vq75PSRoppgTpB5QdzsXI9nIC9guXrtiH3Efk2WHBHBlXm4DPbQfsI+P5fmkajAL1+bgYfCN129r5AANwmuLZK+wBAqjkHpNGafKsSQDBZlb78bTPrtr1y2f3fzDs2DWfWa4H6sBxgKfdzbpM36+Es/AGacFzSo2jrvtVAUYsIEPrOCzo0PdFAW8RGWrmVJ4tC0hSSiUq7TJroIBxoxpnAUt/vEB3zvP6dbu6neaMOAevNRiBstS3w2705bUHJv25XwNK9TMqGZZpYWxy4XmfPTpn50DJ2t/N+TDbmBYBR5oVIZcbizoUelMghynKc72nhCEBYbzIJ4vAuRkiXb2NFIaS5cAyw4VB+pDkpq4ASsNUGodC+aTdZW6Q+jYQBvgn74CHnQRCtk+l9mxzhSVJFvWQxxKRca5BD061l9hafK+vrQJVOmc+oPZcvWGMBec9yJ8Kh35023y08/ZGAxJrDwme3DdLosv29tDC7n4yt7Y22Z76rO8H0qt7nVrHGhDp7RxHXKOfSus4da6PaWjHQGvNONM8OG/nocpp97XXAYycMgviHmL/5v/3f2mih2u8bFXwUnrY4Ck3LhQnQmpGpJ7EHZ4X7XytESn/pj3itEhGJIORNSMvMpWaEk+R4UdCii5TDmrmQtdhAYsjh+1bG9HdN/SpwqHOheSyjmF1H/Q3FXxk6tXFhQCPHGWUk1Y7pBcKxDAWJwBEQsHSuo9hJ5ztsDVRSQEiMRngwamhYKQcXa3ApP6tHbFTUoc2Nw5laa55M+trxGFhDJ4yUOHceb2qqHnnEJmz3HQ64nzrmHakhHVPO6Vw1VC2uoxJ48AcjRN3r/vkvpwYx+8c3u0CXWZnzWwG3NLYNGOrjSW5yx4c7+96ZsQ6vgpcLCiIsYKSlO+Tdg7LWaDkkNgdZQ49pJ7Akav35EpMp4CPDvDoZ9a043fJwLkus6JBEQMctD5Dz11PnertGKQ83M6pbdni/1PbWDhJSxLIc6r1FNHMW+xyXQw7EFJtOJkcvA8AL3A+yEMfgmQ3lhluyfK9MUqPJvLSoV3NBEVos6s1auMWCIOMP36UMYg8EMYCPBphC9QxR+2uZ765zxWg5GutQbVHe7QP0z41gOSUnXpozj1Y9x37mqLPbn3euXs1ujsHBHrrQYkdUEqzwy61aqlcpxRoZN31fewSuv1Arcu2lI8KuloVr+PjWOu4XjjdZkc+DBW0Zrsf8vpe1WbW2pHqEZBJlT/Ent3Ubr5jIOzGAE8OT7YBYxDVrCdjQPC1d8g7uwGbIIXqKt97EW/hbjM9Kxes87P3RLr3sJdiTAidwHuRuLWAJEWG345NxkQ+l9/xyo0gNCxfgAuNAT6DEC0YF2BAtYATuWOx9zXC2BvHyq8GirKV816aDeokH2rxJ4Cm1iNlYML5sxg28nmu/+CU61xYwENcEhJiud/ZUKzKbmmmJFWgElkcO0752clR85mlU7b2iNCGdYBw1w8L472bCXNMeO9mwpSzKZETLkaPq20AQOCgyk443hGgOKWnAjRlHPF5nGJxwuLK863jBmfpITqngPEB7BRoeGidGfUBnUZpsB0/62/0nc2mpGb5vknjKWrSyToRAwAUjGiWJCqAX043qiPnckZEnjvnHGJuxOjIlaxJomPgYrfLGfj0QMiaW1jWGQmcgwkKIKaFMQbCFAhjoFKnoUXv2vdjDQxUqpmHrbUBjutwrJ0CNafsnExxr84FoNRjzDFhIMn87aP0lRnycSWImIRnBwfGNowImwAG4MIM5xzcMIM2O9DFE6RlBuU+JNJTxDxUGhzJAhm0u8x9iS6QyIPHC6RhhxRGRBIwclgYU6yUz5SzJQkind00ukxoGyHeYw6yvo6ck4cHaR/t0dbsUwNI1rME9/utBSUfIBDzsVlxAk5qDrYTqxb+KhBZpaAZR+Oc1OWr7Oepovw1QNNw1E9cjA+6fyrBO3iHX/uN77zyel7VmFPuv9LSYQBpWLjvaBz/5j/5D55c102mZ0VOeLINZaL3JAXq2yDZkE0gvLUZMHiHJ6P8vQsOVwMB2jV9meCml1Unf38tjbuWuUyStVFgAExyIkUGjaHSDToAYvnRCkBqncZQshNu3IpaVY4AqiqVbFIzGLWQswEkGTQljqZ41Avw0NqNnMlg8lL4CdT6DqVZkZflqS6/pMrHnqeWYqUgvPjrqNHj5roXQFI53FprZZ2E/SK9auZYm9YVkYJSlM3YL/L6Yr/kyHq9F3ajF/oQS3O3tdoKcqeBSPk+3Q3kj+rdYv39fZZf3/ZaVLv+fu37Y+GQ09vTHiOV8ScHe8557UVRSo1KmUCcgDF2q1H/PjvS7mv7mQIAmyURcJAzbyk1WQxriRxcciB2SAQ4SvCQgmkCECEgxSZIFJisra/PylhFW0cpZ2USUki1d8+KMx+Noz8GwrTEfJ6Oe5j0f9ui+L43ynom6o7o/5msibxfpwRLxlFoe0q5lWdYMpZDlONZYkIiYGEndWCeEIatjCuJgTgBFOAoSJ+ScQtwlMAPM7To3ZEXda48JnHYFklwEcPYIIUtliQUzwRR4FOwK/QsFJrWKTByzuw4UM4BrQsEvSnyv499SF5P+9QAEuCjSSHaCdtSHU4tp4ufUpXqH9ymLkMBhokstoXoOqAf72cfgO6XIQNG/IksS/v7dmAhs7+WKma/P86saIrbNZ/Z+g8LLvrr1xevtwBmdbfvtNro8dV+/2EYWQfGZJZ6mt19aQoaYRwD4Wo7YBMITzYBAzlcjQFXo3RTv9p46SEyZKCiTQ3jDDcfZJKcp5oNCYPIUmbaEgCEsWvUp46/AQPWGmCgcpea9Ri3pXajRAHHrfxms8uZigwmAJmI5QTK33ZDGlXM3ZHtd5rt6BWvStG5Zki8kcvMDQMXTmDoBI9SbK5Rx5oRrNftJPWx4ei3mUTrKNT39Teijpb7AjEws2QkKs1HOk2PgXAxSi+ZwQs9j1ylW5bLdo9bqx/LtD5MvkvNa+zu3aNjv8cDe67Q2DtngE4da/pxrh8H79vcUaxSzaqUsd1+e6zcnZems/lK5mPtWCdYBz3322EHkAALApBSzmSwAACgplMduXVQwlLbInQ9hwgudC4fKANmqV+jlXWcoogpIKqWsx5Bal0cOQE75LBHO46dKoIfg1w3T4S17MTxOKgAJu+rySjY2rn+s3N2LI1cqWV2HRIUSCWbCTA2SUDJTA5zJACMmTwYCYETdPrzWcACiSES35PQR5nhhrn2D0ncjkMq6+tH+R15kQmngImBaZZeTDUbwoW2Nef7RDOsa0DEZkb6+jH5fv2c3aVa+miP9lD7VAES4NVThvpA9Q9V4wSvgJO7CjLXttP+7Y636etk5+GaSfY+YKTuT6uMpWCEnI16ttHP+0Q4TilueeNo1432oKJmbGyh/iHaSWV9u2tdjh9qZGpUPqmakoEcOJ//mSvNzTozaneBksipyGzuxoC3LwYMRHhrI8Xqb22H/N7havAYc4bE8wTa3zbd1R1H8DIV5164y7nYuzdLierRHflawKdAxPCgNdqX8uQKX4EHa3F47sWR8jFqETiAMsETpLCWXG0c6ACApXkY+q7FFnxkwJGg3Oo6Ucs2JUuxsNCvFIxEFpCSUOkPWr8BtCBjzSxF00rxFs63cfp1fSLHrPeBAxOAXOQeOeHlfsa0ML749oDPP9kU1TRyWYJVn/VuoDnXq8mOZWtiFUWkwjjlvfO98HomoLfeCe37aPTL2fdHdLQVW+svUq5RUcgy1yR/RaYQvy+QPz4/eTlus5tr58AGYWyHcAUlnhw8HPZ5GZd7RTAASg4MidIzodSFWJBg3zPJQdlsiRbBcwYjHI/V6uLCZV1rNLGiZJc0s1JpYrpPPhKWXPhuj3E0xemVxiXABKA7r3nZx3Kc1GWi9J7k5ncfJKve9GkhOZ+zq85+8NKj5GaO2CTKhe4AQehcC4lsrieHcbySbu68wC2ZcqtjlppK9wJNlrYEQ+YETjHTRuU+1GzIHGtgQ2njGuQAWgACrIOQ8t0dp8z6QX0m5XW2xwzJ62mfOkByX1ujKqyhfE7r0cT+9/cBI6esrv84ayJSt/3yPapZO5ZKy1hTxtL48ofB+ewpHUpn0OM411Ct7s/dxe/6Gy2e/cN/7Usg9+FI5X7U9kM/8bOtE8M1ytZEVlPr1Hly+I//h7+bm2/VZV7uF/zs+7e4GH2Z5J9sQ2l0qEXrWy/cbSm0rc68W3HWk2MBGZk+APJwmvmINfMh9RstGHGWOtVRqpz3lXJAQQoxtRlgqdeo9RkxAdNisgTcZjwEfKjDms+Tc3BOQUoQcGJxcfdsR67FutFel4RSkMrdq078CpJ0wldZTeDu/hbFKeD6/JXtdE6/tbWJv+k1Qw4Xoy+NLYUSWH+vPX3usvsUrZ7qqWQdvrVu4h8GDRToIu2pXv/eTo1tbZY3jyfi7XfL3aFEZcDZ2rMr3x2fG3sci3nO9VWPr4AQckgpZ0vYgQhIOfthQQnQvl+zlAG2NMWT/XIrF3stK6I0Mf3ems3GJEqAZlLy/s25zkKPzZ6nEZay1ha9P9R6MGI/W1uut5YO3O6LFrdrlkQy3HlJrl3clWapoMRxpsrlc0GQ7JenAAwkFyV1rphmcSHZ0MQpK2ShBEo0W6t1aAvX+URBybkaETsG9bZ2eh7rQx7to7ZPHSCxzvF9lbSs6Rhof1ojZu2y/erXFGLODXxHn+XlWzWtnjp1fh3nbOm6r8v66/uT9K17RjzIOj3NcVSKF7l6TtT5qoPn+e1z1G28eRro0tW3OpGHKCBknyVA+2WtBXJHy4yB8K2/5K2SNfI5Gn4x+FwvEjB4h6vRY+Ol2aEAE/l9ogB4BsYLJF6k0DIlYFhKZsE2cjo627bJU8k6FG6C1GnYvhsKQsJYNPKnfN2nmLAsQJwTpjgXqpTlPQM1ETMQGXWcvMkGkLgCSAj5c7dyDMYS0IARvUfnPNnLpN8Whc7ZCVFQAtSakLXnao0qyd2zsKZcpRlNcjVSPzPjvZcTIidcbQd4cvj81QbvXgxH29F9uU+TV06ondtZna71/j9r45zNCtR/fOSkn46CS4R8QuukA60iUv9q6aL3iYhL/UgNAAFyre+yUwCrBx76DPfnwAYaIkuGIJCIFBxRmxZ5nciBmHLPHkZK+XUNNBhQZWtMIlLJZqBlVTbBL/tbfV2T/e17/ajsMJFDjCQgLzK8J6Qg3c2JHCbnjs5Df10tEDhndj2n6kpeBRC34LC9V232DhCBif2ConpH5MApYshg2cHXZxtOmg7mddYxiUAuS6EbkBtTLGNhqV9jS8sS6V4df9aAxxroODfnn60pewWA+Lqao0+gMeInyRN/Q+xTB0jWrH/GXvW5Ojee3Td6cBeIOPq+26j9/qFOuUYG74rivopV3fLqwBxZt901MHIXXUmUflxTy/zH/vpPF6ccQKmVeTJ6XI2h7N8nYf+f/9/fL4dtpVm1cP12iiVKGlYcKY3MWaWeQKLx/2TjSz0OOZSsyEDS7HAbfMmMFEGDkiIh8fC1piIxUmKAfeUwE7XAY8VSD0y6+ozSlyODkIUT5rkWX+rkOueIn3KdD0vr+AMofWSGLFUM1E7tQ3ZkHfqsiUQkfXGcBKToe/vYssp15slfHYmUKhiJqU76DQ+b2wl/9fMzhe6tMl0bNdfrq8ek2RoFqbucGVFAKuuVY4gGRNxn3DuXGbmvrYGR+1K3eu7+XbYmC3tfx/M++/SQ5RtaT/OeV39ngdPacej7EYQJXLIlzBAalPAJQaT3lisSz5zOU7ns3z0O6wHIUTaki9YROSRkSWEGHCcBQOQR89r1OwG67Tmy1K167OvPgT1/FozEE+d+7XevYnptmiaXPmfHcs0Rc24I7CkHUoCBcg0JOyzQepLa46Nm6jJFDi2FN6VUxkZOEsCyIGSO3GRCgNPgo/8OOM30sFaCtPnLTxMwebTXy95oQHIuom6VIGzWwz5LZx+sM4PXuYe8j3CeVW05Q2e66/cPqaVoCk5XorCn1r32+3PbvHNfOj+DEwCfrwvr8RzvW1/MT5lT30sZy3cohbwDUVPEXpy5tF4D81GYpu85VUAyRS5ARLnmNiq8yRQsAOXV1r2oMMDGUzlW7cg9EIHI5axChEOW1nQJKRGcS5hZMiWeNhi2W3HiHTJ1gI9rL86ZAhJTn2F7ccQk0duYz4FE0MWBmqJMujMn7HNNxMtpgUpXKo2pNtyTTSkgkeN15TMFLHqutO9KoAxMigOQ4J2J6q5kRdXIZYct5fuKE+QTV1XSEkBOHAPpYi2LrBWFrt17tnBd3qD9e8U81XtD7xdLQ1IHRa9B30R2zep+AMjN0MCiCBdTwuAB7dINMDycRNtLcfHxCTyVRVgDEDUyrQ66ZEz65e+yU8veld1YTnx/bj3nvj8Vte+BiFrIDm+fRYmcMLKchzjI33NkhKHtURIXLhkTx64UtvfZjh5oNE0WzyiTrRk5qWmRx9+VOVUyJSz9TygV+lZcGMg1JEfnxZxXVeDSe6Ce2z7zlPLy8vkaKLyv2cxI388pZlptWcYEI3R7AkyQgYnQ4uYMsBMBOmZoDY+sqh0jNABi69V0LJw54bDEAkT0d+cYIed8nLX613I8Jqur390XmDxSux7tVe2NBiTWehrSmlTluTntaLJeCdKtKUqcAyPn97c6mK8sg3lPCka/f/fKQpzZ7l0O/WDWabffa5UPPvOgU+1+PHd0AtnHui/qmA6+d9TzMtlJJcqNAEt2wDhAH+OAqU3thJpUgYh0U28dFgAYg8eTbcBu8MWhLkIEVGsDiBy2+Rw0y1F1Sg9LdjLghSqXGM6JFOUanYmcyzQtOqI4nXSOk7mvEq82/5Pv2lqMBNk/TlIIqj03nu+XJlNiC4aVnjaQywDElS7kA5kMGQng0uzR4BWoodTiBKqF8H0j8zU4lnFMpmyIBKl25RbwpwOGOAxkaBN99sPKWa/LYd8dbCgF6+SKWhFQwYg6LArqvK/npjcd11bHzDLYKciTY1CQoqvrFfbkWFvn0b4/lSHR11Pg5D4Z5vuACaWWre1Tn9Gwvzu33XPLnzteu992/y1w6f81nd3zoOkoFmASM2hGzJmLhQsY0YJ1LVJv+o5o/yA+HohtnZjLgY+kmcck2ZmUs5OpoRoTnOPSAyUujOhMnxJDV9Nu7Fr4LnacYdJ/a71c7Pk+dd3WMlENZS7fgxuzH/balG2Rzic12EUKEhh5bOBMva7qYP1d/P9n7/9irWu2umDwV1Vzrb2f9xz49GjDgYgJ6sVHJ8qFJob0DQjxcLQvEG5IvACJGi4wn3JBgjGttHbsBI1BTcuFiWCCF95wYTAYIk0Tu4kRIjExfumGjy/+yQH6k+ac857n2XutOav6YtSoGjVqVM259vO+73n2YY9k77XW/Fuz5pxV4zd+44+sRcRMLI2BwOvrVgwz0rhlXR9fQxlDFJiwRBtMpLS/lQHnRsD3LslLUPu7Kc8akLDLBKP4opS6Ckpm7P9swresmFZmLSkWuPigYh2s3Pvy2uv5ePvxsSz3gNl5pVjpf/W5AaH4WFabYsmtVmZ2NbCUIqvIo3bPktt67/Cn/sev3L22j0JiVtAvG1VgboNYS8BNsQJKZUyKtaz005bdNhzVBACAWPonAdga1y7kM/PEmBIBk4QEVw+ZAYo6lxJeHJUCnhIDkpohiwFJTDSRvr5uBZBcNyowxjE2VdEXz2B0gKdCf7zMx+zGkoCSCDjShfr8jDJAQKzPPY0XdCz5GMvhgsaRCpaLWpTy/WBrsqO+10XFrHGiUxgOjg/aAHLyLhc/ZHc1ZNDZxqVImYEReey3lRkAsdgIAE22pZnyPnLjkefeY0hGYMQyEByxsu+BoKPt1aBExploxTt4V0GJYDyiI2DRZOPi9QJ8MGvJQCSuVySug4E5IHE5w17ytahpWHwObM9eCQAQchxLZgUQUdbJBADyvjyusYmZs5i0ERA5Ckgs0De6T3vzpJyH9lhIhvXNWMXrUhujxoVRyfUrFqByjW3WOyne1faOavUAKJ4jt8jouiyd6Dlk2XqRd1OeNyDJk29QJk4aGFBiGmZy1I1BZsCaSVNzY3Js6zi7rgDoB8enAqBbA+LlQNcdi+MTeGKVfRDqgD2Soqgag6wUC/BYlvF3KeD9YY14c90KGJEKRVVEQgmWHAESFrbGIaYS4PwoLLo6tfPDFksfcQrY987EmISYWQfxXI2UcykcawFU0EFt60EHxwcxuJCB4K+vMl4kZh9pdkVo44oKA7BRC4jFSQA8QspgrDwL2ToJWn6CLw9PzIoR/yYXinrt3JMcX0KbuWzgoHUp1yFw3iHBYUtbY8SVzzF/WuOCBtNdP4uXQQfLU52RyhJ6R6wh96McX7x30/cP6N3LRuMcJ66IW45HE4pVqyjGXB+F2i2DvOX2gK0g3i3W01eBuz4Wy4hhsNvYApGLauNIqR0xG9b60X5721rXKNv8KBiS84Ws+O+jPjMrAOcS4krAgt2ytjVivRIQWS9UcyiuF2xr/n6lUWUESFwIOSCYQElYzgRO4hlhCZkxITct5x1cdsV1q6MCjVuk9+0C4Nxf32wMtECuZIyAOo9YtVnoGuRc0Z7rLNgZFu26BaBpIzPTHKTO4y4LxZaQIcVLpV08pzJxx8MWS1zIQ06E8rhWQ00FJLpNaowx2i3HmltqiDSlA9igNXhub1QrvmjivPvoGZLn0jlfRHnWgMSaNMMAEMziIEYvV4xsYZ2nwbTEYg9uNEqYsqlBTUuxHE9cQazt3+bcshkjN7I58CMlu2T3QW9d1veIjyfdlHQMwRdT/m+/8L8CQLFmWWlQgeqvLCc57WIG9Ja6mFAeKFZAg6OHLCLhlAMrT8iZmXy919ct4hx8tlw6KsUQya8Zqd47dsPhplggRLMfvJ7dDnT6SRmMWXyjheXPKrT3VJF2ivJe5MMGODA2caBrTqkFZAxKgP79ZTbiqTJ7P7rA08F52DVSFzsdHbcu4/Pst7MPer39ovcUfMtafSsTomtajM6jmZlZG/X+Ovj6KNPcW+FvCdrvXcjk9TXZuTJY9J7GRN3+JhtXTA0YiesVact1iDABJJGAiM+liSSjEqMvzAgtS3DKWCjZkxgTuTwZz4XuV7lOAkcJRAA09VNmEgGyuBR30P17yH+LGKeBbFhwbfwMz0tegZPiYRDbsU4myqg1REQWv8guXQRG9Dy/RTSJXj4oscCIJU9hXV7kRbQ8a0AiRVomWhce+jxNkn6OFHVSkFFdM1jpy8GeZDW1FH907QDybuWlzQOJmOCPUP+83QclMzbBZEOKZbkFBNIqz0yFPsesVkCXKcSorVLaoBgRPse7AERYPvdwLffz9WUb3ldpgWusbr69ppjIKu1dZUb0Ib1D2R+gFJQ+oWSVqtvVY4asdJMRj0EGrZdpPiXwYEkiIDMllDS91dWgAhAGH6uoSN6wIbFtG/dB+e7sOBoZ1A60oFQDVQBU8zFbK4MnAJay6xaDEkDUaxFgjEEJ9wFfL2UPYhdS0c8FxbTvemG55E0U6YvLc+zF+6DeRY6Tku8dnzPGVMY7qw/kHZRj3CgDD4NNWbSzKFBZmdLsiKXQA7e5Q+nMS7lVQC6epw0kDDaOgI76vbZVr7eOw6CEz8mffRxCBR1PYUbadlclltsrz8dxGPy55XXSIsvuWjElbCu5Zq2XNxmUXLE9vikABQCiAUjCcgYA+PzpQizLyd0rIMJnF1zXnDfl9yBFcuHkYovJO7xBdtWNT2BIBCMiUxTzuYE2JTHXafHq3nEclvxekkV4KjobvKt1n4IvjPP9ErAEGovuF0p53Mb1MYtpz38x1vmPE59cYx0rNTNiz8vCSCeYmz1GozvOyDVrcBwZF/oCSl7kbeVLBpCwjJRhLR2DIa3PsTIjbDK2ioVZrId8cc2XO89TOqd/adcElNwyic18SIHb3cnMSVUpQxKMNGl4hVKkpcQ8xASfEmK2PGsluj933l8on+EtrdZvK//X//v/B68vW1EO7hYvMmm1cSL6s1VqeoBVlVn6LYuy8fYlhgMJwVH6Mo4lGRenrOxALVpd2Q+gAhEOvJSH4q9btuTFWDOLXWObxrcBKYOJVYIxAE02NamEt+xA/xzwscxrjuTKhZw1h/qggpIjkthamapS1LhJ8XOcBwn9SmtXUESa0G/JADcyAkgX1mOxKqQksrubbCuDESlsqS3XMgAfvO7o8hHA6GMroH4fM+S0Cm0fcC9/N+ldhYzAhQYi+h2f7a9lEdfO+9R2kFmfj7N4u2YLFzt0/CBmSZkZ4b+4XhGvF8TssrUxQ7IZ2UUAeB8Q10sNcF9OiHErBVSlRAGcOE6QY0k43qUY6QRbMWKdmvunXLMKM5Laay1dBnrvdbyhHHtl1jr+TYCEPhdeFnxOlJEZk5DHpOBLlsMjYITbtAljHBdZbVgTgxkZiZdjoZNzbz+XHBF5G3TfSSmeJP74GPrFlJc6JO+mPGtAUmlR+t0xEoPJeM9a8BSkP3px5TpmVKQLmMmwhLkSeaw9TwMjo/WaIrZc0p6iHFI/kFbs4QowkeecSVHMt6p8f7HSDrILwZvLisvqcMkZnd5c1jzJRjH5UaYmCH9lnoivGz0f0j9/lvlMZx/jeBH+fZ8nULbokcsB4JCV/3wcp4/LfcvrKQS+uDIl9HEi0u0gCsWBreucZUu3n4XrikhGpH2e2vfdet5ujSFi1y2fA9MdXOO6Rv2ft0WtSyKtm1p4e5l6d/48C+uGmreoHf2zzUzLHhupDQJFPxX3gedKWbdEMyPyN/u8X7Y4BAP8rI9ctGYgQmZeQv7O2+uUsEfcplrQEbs2zEAVXwvQFme0XLEsY4MWKy7B2mbE9MzauMV5pXa9TsaDMMiIcctxIVtZ5sU2fjnDn85w3pcYEr9QDInL76vzQFg8FVwty1z5/bYSHKcbJtajBleLa1OZ5Zync59CTav+6hwK+LgrbEgoYzQDEV7GNZ6IDaFYPM76Vw0mehxrn4Uj4HlPZCp8Hv/02CmNopYO8lSZuYe+pPx9kbeRZw1I2DrYxo0cm5RnwnOypiM3w/xuARGdeaOKcPPKbl+0nQUAnm7VeIrYc0RvBZHbF5/Zch9s5ZAtRSOJ0SG7JGPzbcyBlE0BFZnVKbh++UctjxmMfP5hzUrHCqBW2mU3AGnZZZ9kli2mJoWqVpykosMpbHkyOnlfgAi7Dtwv1bWA1/G9qrnwa8FAWbwvpRzMrUBIVGDEqlwufaBjYgYlmcBKMiImEMlttBiBkj1sAkZ6g4R4nzJLkpKMIUldjEgBJEm7alWrpgRhMkmDTNGpGda2YnhuK48LvrqW1B3rtbBl2ad8HcE2BHTXz+8RX2NmjJrrjckMpL0aAeDSXYvuJ5m7JVBYB0zGTDkr7IdS/ltGon0vZseyfmtXLW6rPJcWzWiel2C6b2ngcZTh5nbouiS07liNIOdETIMTQGBLTbYsgFyvfHa78ssZHjU2RAIVv5xLUPtyfkXXdH6Vg9sD/OLpGVx8SfHLn36h9rC7lHSb0td+tJ/YcOjUs9H0gzjPOVS3LA1Ezosvblly2TlUFyyZGGQJlHad3Ua5DhKnFz8JpqzVAOZeEMA4Zix4FBrbqsnF52TGptc/qAVSB9lLJLMneniSrrLvssjn+qM85y3y8z//8/jhH/5h/NIv/RI+85nP4Cd/8ifxbd/2bdN9fu7nfg7f//3fj//4H/8jvuZrvgZ/7a/9NXz3d3/30xv9EcvzBiS+t5YC1V1pz/1BB5zdem6pMI9Ykf6cwhBarDqp+2296CxHwMksuP0WtxCWUXBb2y6AnXj2XK6kNGCF/YaEiVrXSbDSK2sQ8i//598o+/0f//efPNyWtxGp3OwVWaOJ0UZplp+9dmehP5TJ8uR98WOWBRJP3pcq5nWSyhOlq/eJb2Mo8SOoLhWgFLrMlMhnWAdkSgVd1h6RXTBPvDDoXMjYi0JpFOWar2PPVZG7fPT8C5zSLS/EgnapGMzofO187/h3FbofGwOLYmGk5VFYfnWR15Fw/QPptsXnlBZMCfr5nsk26vsl3UVmrMJR6++RGBMd1PxBWJaPsDOWzJiRPXlqzJ/FkBAjZAe583c5Fznf1jklpoJBfKCkDn6rqXyzK5dO8QsA/nRq2BRiRk40D2cw4ryDz9qyBCXeuwYkMRur2ZJZX3Xr8nO+pX6ukMbBChy1K5aICclsCC8L3hXXLDbsSLfRlg2pRVnZRWt2LdazJ9lkS+Scp2Pr2u2G3de2YXAuOVYA1aV3JHvrX+Tp8oUvfAFf//Vfj+/5nu/Bt3/7t+9u/2u/9mv403/6T+N7v/d78RM/8RP41//6X+PP//k/j6/6qq/Cpz71qY+gxW8vzxqQ3OdK1SyV2TAABg/KZaCyQYhUbC03C9q9+ktqkRbdmTSsTuPzjeJ6JNsBSMbANcul6PaOYwe04nbQKiUAYG1DqyQCOXe6c1TdGQCi21UGASAESkDA7jBbqnElbc2EfNhmmb72j9Z9a6ScMQvSToihYUxal5TYZeXiyY0n0JP3eO8U4J3Dx8+huAxIZoSK57nCiCyB3OIWz1ZTul3aghczWyDT+KbkciyEK8HsALqATA5gl7Ej0p1rJvq56BgvAUwBFJc0ALhbUlEKdIA7i1QWaP/2fLJ1q6qB0jyPSpmXLBH3lwzer0p+6652RSpuFjSx8wDmsSVyBdGgxJLIhgBRj2ZLNQUy73dFG+vSZj9LTbtl5jPtnqWBdx9s3LtEtev3wbreJnhXUgdb7lPyc09G76kFAGYB7KNz3mLlH7VpJhTT0O6zxlpM8LJGhKUyh1iAsPkcXxHpe3TA/cebeBIrsxaADoSQqxazHwRCwuIbRoSWVWaGWREGJew+Jd2KWuZr3sfz/mmPx393xvhbAQkbboC7zKSwkUfGrMlg9Vngum6zvLs122ES40k7PppV150DfJ23NZvMp7R0EJkB8UiKbwlMGHTwWGSBlqb/bzR2flHEB3woacn2znmDfPrTn8anP/3pw9v/6I/+KL72a78Wf/fv/l0AwNd93dfh3/ybf4O/9/f+3gsg+ShkNEFrYGKzBL37llRyRxlnZnL0RZQWZv6t15fzJsEM+NtiW/aC45s0hOJ69zJtWFKUIgFKAAIXJTuZeczJQX0CIu/fW62laKtuWfYBWFWPysidQqb15SBJPVnWY/R1EljK9poVMeJEzsHDOTQxIydP7gOn4ERldjp2qTOeAIeE5ABER9+TQ3Qpu3gRKCHqvwIGDsiMMYmUv3EawN71n/GudkxYrJXOOZsYfUcBwIVRC/XZ7i2JtiKpz6uBCABRUyUqYNI+czJuZhSU2j6ersZUATgSZG+9V5KppAO173cPqGq7rUDaklBBPY8WGLFkxKRYoEN+l2D8aND6Lcrr3jFnsR5PYUpuaYfNjtT+sdohPyUrwc+CBgP8nDIzArSuWiz8nWNENBBxrrppeeewnDMYyi5NGojQMVsm5wgYGfX32J2vGoIAdECEXbdkXEjwDnfCFYtdkGQ2P+2KbAERbeTR900bBeRy7ZosxXRHb4wutjFTizUvWu60PHZqUCLXvcht8rnPfa75fXd3h7u7u7c+7i/8wi/gW77lW5pln/rUp/CX//Jffutjf1TyrAHJ4tvMSjpF5hXorIQh1pdNMxFy317ZqgqvmYVq8GKOBgQ9to4DxSQLAVgphzc1kFRqt5/A9O8NxsDk7cFuE+eMm3ArK/1L5z6VCyTrKqWfTQ1LMhJLEeRz622kBDHxfoQYpBHuV13UrU0pGcr3Rd2fERhhIMOTK0+e7KJFPsytexaDkHPexgIi1q1gNy7Zx6Jr6zLQ5LylDadcy2TLn3fJU1pdflY8TGBSFO8txyF54BqzVW4WCMQ3OG8PAFgpzfE1OngXSUnY5nFNADEKV4UjGVdaTAJQ4ygYmGil3mLmvHPmu1YYk5Lm2nEZh/reiwfaUjhYYQBSiXOL1k1rjoumzVe+NlFYUScfqG1OnUsiMyNAH18itzsqI7Cit5HxWFr2AIM2BGg2RH63GJJZ1ryjovtkD4jw+S8rvS8c4A+gSf+7RUoPzkCB3bZat2BRxT2SKqBdqHj8dx4m0+EzE+sXyYzkfhGuW02qXWf315FPDTZGbIg0+tw142+Np+sMO75nPpjBrLFqkqGt5x/dcQlGZgYNGW8n4zqOvDNsWIyJ5mUuEmtuG1s9RrthFjHnaZ7w03S7l8D2uXzN13xN8/uv//W/jr/xN/7GWx/313/91/GVX/mVzbKv/MqvxOc+9zm8efMGr169eutzfNjyrAGJVOLlpCpfCM5UdOL0sq4qxdGNJux6TH28vZftVqPZoQxgEahDGwUee6Gn6OwWVoxFOdSg+Q1FmzvAuxawBF+Py2lmJUApA2FEASUERFBDQ3LV2pF7DrdFkg2bGkC7Wg9ASdUsj/W2AXu3ykW4lMjJU4IQCUSAVqlilxQ+jhSeVO8Xj7vgcb+EwoqcROClBCLBA2dfAzI5kJ3Fq/klqWeGwYn1zLO+e/IeMd+sU/AE3gMAxPJc0v2gyurXjRX83h0tbiCGQzxj1L72Nz93XJeFtnHksqTcGSxLpwy8DCOlWwLg2FotR0AEQBmDZM0OKfxudn0aHXzILFEUSqgwRrCFEmjHiRaU0O+rAUbK/ShtlK5kdjawocKC1gVrlLkKOG7t31O+9gCIFF0kcU9Grlj9ZwtCRtsfyaSlZS+rljxnrSoeOsMFu20BHmvwcL6mx5UFA7elLyIoAYms38HrmoxZHk2MCDMiXriLyRiOGdsxYzn0uGkZeKyUvcxc8PgngYbMQsjB6BJ8SOYj+DYwXdsO9TgKtKnRGYxotlW6bMlkGEekeedjUsBgbNBpjCxRvudqwwnoGBkmy65fLIvgLeI9dq2jH8Y5AfyX//Jf8OVf/uVl8QfBjnypyLMGJCz6ZbZeCMmWbFt+mWEDAA1EZsyIlFvmICsD2PC4Wcnn88sAVwlKZBulMiddHuQ+1jXpoFt5bXLik4kD5PljTDXQsLhv0SDmXVa4BkquBCJSMWpcYHbGuvn1fLhiW/Rqikk5qc4UJal0yb9ztkZ2k7mrk6dzlLbWZzDhHU2mzrVZtaSUCTVb5xNy1fZIrltbooxbyaXCrLSgEORa5+gmh0jAv3XVG9+HLaaabUq4sXdt9RIMV4BOjc/vbD5OTImsmtlSzHUOriBDhM8xFtYYYGV3szK7aSZkNA8zEDiF3CdwpXBc2UbFV21KyZBuE9xGDUpk+622RNdbR/mdboFUy1S2wKNn8aSr4hGLruWONXJHeqpYldtHbbF+HwEio+9HY1u0a9otqX4r6Ivm8cu5F49tjSWmI0ZgQUBMNWUuhGt7A0h8ZQL4t8uV4HVsCAMTTqvLbRgBPautTbsVINGuVwxGJHNc1gnmA6hxHzIgvc2KJQ0VfYas4ta68zjquTWhZUZ0DFr/vb67R+Yt6TIlxwvLTV3XDaJl7bFaEfursVWfQ84F0jDyIrZ8+Zd/eQNIPij55Cc/id/4jd9olv3Gb/wGvvzLv/xZsCPAMwckm3iZubIpL++C0aNw03DVesNvjx5M5DlYtKIiKWgerA7HkTxhsq1uSa4qK7pNBYio8wXBmiiFpVpBq2KkxUqBytZpCVAqW0NtvcZUFECAJoGrUDpZL7PcYzQjMrO8aJe6QoN/xIBEsyEAOteCpt1KAdNAhC1+Hzsv4DgQrlQOqMmgWCSrO2PwtNwDJWZq9OjVmKo80XieXCKuSHA5DXAC1XxJLiHBETvhMwAFB5bn9gUHH4FrBjbBA9e1t6RrRdaykMZU318WZgLkc8vK/uapL310xQ2R2CS+3rYjZu+kTp4glQYNALTLZABK5fRy3QGd4qHbcy1MZRs3U7Yz3geL9dDSWklrm2WsiP6UQEQu01XZ9Xcp2vVIftdsmbXfTGbsiXXsfSW4WuFH7bDS+o4ADdAajXR7ud/2QMmMUZIMQr0/AdviixErpVSLCObfLJI9lS5WJe7DtTEoQMuCjP50H+2JZkP4U8Z/AFRDhNPy3kuGxNWaTJ6NNSYQQXFxdahjCBt06Jp3mwsxreYYu7pOMiPS1ZN1Fp7nmhTpylgwk03Mj9cD25uJfAyW+Np4RsiN7fg7OXa9udrJEd4lcSHAhY847e+HfL5v+IZvwL/8l/+yWfYzP/Mz+IZv+IYP9bwfpDxrQAL0ysDwZRYWyJhg0pxSZkBEigVAdDrfPekKlh3ax870VVw6MsDS7lx7IkFJn2GsTqjSYquVAGZJtMRi4u0t3XbMTg9GNCPTtV+4pND1fLSAxHIxsBQVKSOrMAMZsgi2Psu6ro73yPEh9Fh7h8KOeAj3C3c0n1o9Hn0OXLfEd58B83VwvGC4EkpldyRFYfX9McozKSdVz8+UK+85gxmewGXMUZEDJR4k8L1VykTOYxHQMEKlGZ3lUhhOgmveP7t9rQtWf/xqgNDuc7TcBiZyvbXuKe+aVsr3tr1FmCU5csz2HfXNuaRhYdYe81gj5Y0LURoGiacURZTn7yq3RyAuHlgj5BubjHNI8AFgGIguQYi8ZguM3Hrf5rEhgjEWaXmru1XN3CVT83L2Peme5R2N0c7xOEdARL2mU4lAqdF0RKyaRcyM0PrbjWiHEoZMxrbZ/t65bt/NMAKNEuS8yNPl/fffx6/8yq+U37/2a7+GX/7lX8YnPvEJ/P7f//vxgz/4g/hv/+2/4Z/+038KAPje7/1e/MN/+A/xAz/wA/ie7/ke/OzP/iz++T//5/ipn/qpL9Yl3CzPGpA09QDivv9l++K51v9xMPoM84KLQZatMLyclcSa5aff/0iBxmY7Pp+IJ+lpWTSZhfiSfLGCslIqQVtlNW7JSieVIl0hllPQ6utkN646vrUgQwMQ7TIn5aoyWmm/eKAG6H5UworVnXDR6iZnY9AuSpmw5PKflYrybqH+vVso7fV7J7IUngOl+aXUvjUlJLtqlXuRYlucQApFryIhFwAEpwIWQBeUbUs/1/L+bWk+MVkuQFp00D81QB2ncQvjhtTnvKQujrm6N7tqZXZPph0FUvMeAz0zokUHiWqR72gJ5pXumk48+8pCWs4h2Mdr1g6u6F1JZ4oFW2UBG0RYoINlFdu0+0ZzXy2WIm8xBMG63zjuViW/L8ay2b7MigDVJYiPM1Ksu2M4mW2JtrGL3iZi7VIqDIDuWwugrdFyP8u+iKjjhtxfxpSMQOSof0bAQi6z+vkoELGem/78NSZEx4ewW5Z3ukCqqhPCLliu3o+9GkhaHXBqe2aWau+3Iq9MFlCl8aJ105rNc6VflEGodcFqt5XulnWbD3cilPf6MkM/L3JYfvEXfxHf9E3fVH5///d/PwDgu77ru/BjP/Zj+MxnPoP//J//c1n/tV/7tfipn/op/JW/8lfwIz/yI/h9v+/34R//43/8bFL+As8ekABah7aCtbwTL6Thl102zHLUnUrn+5ZgpN2uX2anu60swUg4nsRujxyEjNE1tut8qArOyG5+i7XDu9p3LWCr3y0LkMxgJAfnkQLY/DaUwSiOoYHLhynaNaSZXIWy0ghby4UCwm4IhRXJ1kjvKLMW9zMDv1Mg0MKxJMFVkMxsiQMqCNkDJKC+i8gFEtMsAkRcv5hwC0CMxxiFkcuOjjWQUhRY5fZUs85YBghyf+J+jRsNIk9JX7kHViwwQsv7ba6xWh51vRLasI4NMlmArJ4OVAXzpAI2t9Qqpc06Q/G11rduWXG6zUg0+NDL9/bV3/XnCIhYv2fHkqm6h+dWyi2/r7zOOl25RE+xRLXaaLudBdA0c1LbRGpxDXT3pT8Z7EvgMxJ9vaM+sPqCvh8LEh49O/pYDD5aprgaaSTTUcZEMf80AES8816MuZIdGYkGI/Y26AbJrTGqtYVjG0bk4BhpeTtIl0s+Hp/b+nwb2Xt2+PP6HADJM6hD8o3f+I1dohkpP/ZjP2bu8+///b+/tWXvjDxrQMJKV3Qgd47EE0FVevUjMLOMWEBkBE6CUjYkGJnW1niiyPTGU8BUxoJWGaMdeZuaJlDyrlZGHdvCV/ssNAO9ayZiXZiuOUc+xZEEAlLxk+v3/ORZYfuH/69fw5YS/qf/wx/oN8zyf/pX/3P5/n/+1P843G4mMvsLIKx8BhixLHWnfCs4K4x3svCfL5Y/tgzeB/KD5vdAB63HRMHpOdYcyfkWmABwKSI5dp4mdmTLAITASEJKPVzlPk/gSdXuEwLQWbmK1dIPrxREQ2Gyqt3vKZXy+zn3nXdALAXFCIRwjEtwuR6Dn7+3I6A8mqP1sSSTI/fRbN6WBvVKtvrOXVGz9ljxN3mPRoGRjJQl60Rx2XPL4ufcig0p24jjP8UlaQZGZkHke99lsDr/SUXcYjdHLEgz9hnP6RaZkefnoHVz0e0zFXXjPhHo8B1gPKqUWsyHrpUEtGyR3nfvuWmfL39Yub1TgewcI2fOS5Fi27yr7sFbSoXGkAktgIiYyJhDr0gq99Mlh+xMkIHG/FmtRhuai1Kq196mrh+7avF6S/LQORQrAQXQs26jd27PrfHWd/WjdJN+kS8teeaAJAergQyIxfUoAnXQt/ZTv+WAfxBM8C6jqtC3WFxlZizKTHTMzUunRQWQM1yBakmUi0/N9vC9/7kM0OWAW15uuRnJa1yE/+6o3oNkRnTaVOvYpS2wQYisIj1ql1bGZrLnXnBEzktoct5rFw5WNJn9AGofWQBvlhufqrCzxbCyIloST5T5fXAAAgOQDEwkKCGAQS4JMaUhM5JAivMW23oxLN67UhODr3PLLlGnkLNMLfXehKjdgeq9uxzoe0tJvfjYKpbOib73hS25y+mKm9odTdrpFoxY7oFWXIuWLl5KHUdmuNqzdMq4G14mXQXPiy+MC/fxo8GQSMX3FmtqVeTF9Q1Qqb6vW0w4g9JbS3BisSf6vRyBj/Fna7UfARupbEuXrZO6Tl2Yzrt6nCbhgCHFUBYT2FW2Pmiexn5ZzPPgmDRzt9tzi2o+xfshxyV5ffLauZ1A65lgZWyzlOVZ+0ZGHd02Fj7MdaN6RJRdnsabKwjEU70kYkh4HKY6WfnaI48ddCwxXdLvAYuSUN/jlKqLa3Enz/OU5aql579mLhdgr17rOEOlrv+jk1CMhMfXEYi2RL+nm7hf6/oMAIn3XwSG5CNOM/wM5VkDEnZPyY7t6DJQAUPTgmYZuow7xmCn140mn1mgu5RSUdqgYmdiWdubgSwDGs6sYgGKvjl5QMlBsz63r6kCKyy8vL/0wddg5BaxJjnZVr4HrNhqq5AUXYBuBkj+L//6//2kAFIpP/z/+BW8vmydRVFbUuUy/n4ytp+lp5QZYjggk/5ckxFG3qMImpwjZ49xokEMRvJfZDACnljta3ZwcCUOA0B0uHKBzKxjSfaSWRLOjx840NwYo49mGaLj9Bba4HsLfMjWUg7SpNgNhyCusQSQU8c0+0vwoX93bIYwMFjLZXtvefQ0GNHXLD8tEDB6D2wgOHa7GLnmWMtlWlrtficVGglKtByJU5gBkT2WZOiaNGFC6Hdt44h537u/xQiV3yEAaNNlj4/Pwsxqeb529CyL4eHzcTwgj00AOuZbSmMsirlgr8gmxVmnKMtlwubrM8zPwOw5ZjAyE6u/6rucgJjnQu6fko6ef+faXrk+FpeBCp4gyJYYnBg1N/IhGIiU9yezxhKEVJfW1rAhr0OLdj+3rl3OgRqM3OpKOZLZuGG9zy/yIk+RZw1IKLA3EAXqByli1eBsKf57A54ZA1Imrfx7cAy9XbvSlbYWdkexJG2Ob/rUecYbRUeDJ+8a4DNqn5RR31kir8+yoDXb5muTQMnqlxNbgbwrg7cH4COKm5lUCLdI1iFWaqQCcwRs3Al3oaeAE/bP5u/aqlquK7QTv7T0yUmfY0OCqy4LnNbyHDycQ3bfIiAS2MLnakYslphnyuQcHPKk6By883DOwy80BHDw9xqJfSruWuU4NOm6jCGCB3yicy4xVGt9Sojek6+Cz4UxQcpNTORO6bPfPAd0V+sh3Ue2nmsGoFWc2RpY+zd4306Mi8caE+5yBWtaHnNwOLlxbcnh6kjRCCUYXiitpqJT+3auMPTAeibeOaJ6o6tGBt8CCiuGoGHlvGu2Z1bkskZc1v10nEfceghk+E55tI9XwQixhLFRRJ/iime7ZvnpNkeYFemSpeNBaLndrtHY34PY+dxTDbbO3KYYwiaMRQOSvN1+y8XYYrZlcUB9zUBmR5kFiLXy+OO6YUvVDfG6xcKaSNdEXfPGkhl44jawsYH7riSr8Xlcc9kgkxlQ710piCqPp+eyuhzNsyFFGsZ0nRFKPd9WY7f6gK9jJppJoX3abfQ7z657cp0l1rt3BFwcAf3vqjjv4T5ixuKjPt9zlGcNSLxgSDwomIqsUmmY4cofTnq6L3uZsmagZeaaNRIKaK3794G783ZYYgbhH00BxsdXDIkWCbh0m7jAW5f6OA/utBl3IIDYZ3diRa1Q1h/wgPg//eR/KErdefH4qt/1Cq/OoSgxwdXCXBxwWa5TKbfeIbtZ+Q6IcP/dBy8YEnbT8iVXPgO5JdBz7xwGYKQujymVZ39DjgtxrqjMWyRWhK1tCRN2JIOS6Mjf2jmKC7jmhVbiBY4lAVwGlIIhyC6CPrpiQR3JzHJf11WAKa13Unkp6SxFsHi93moJjZNnyWI55kxnqzjK/aRBoqQtHuAHqcR38Q755jQub2uEFVPQuFxMFKKZK9UMlFj9L4832k+fW283Y0G0G9bsGJoJsVwp6XvfHpZRrJB+DmZ1JcYGnPacVqyKZKg14KDvvdLNY23NTFWBR+2z6qLkXV+PIyU6eDUKAWv0uMZY5rWrd7jGhJN3eBDaM2UZQzFQFGXbeLVHY6kWng9l9kfEmhafzlEZEi6IuqVUM+7lwJEGuDhaHkt/q/uqCvlaaetl4eZZrRH9eJQxaGeKP8J+zgDJUXbjyDaLd8O07y/yInvyrAFJtcrL2gJoJoQPUje1lP+jrkkjNysGVJbSbokGJbzsVtEgaer7rDrxaNYwHpAlS8P3i9utM5M1aY4FKOGx1HuHkPi+VjCwxYRtYnWdWWAfM5C5W3zDdrB84uN32GLCm8sKrrzOmbBkymNu9khpkWyIjrfhjDHcDgYhPoMNCloXTIiDSK3cghJ9P/Rzm7LVECmVeM3CLKpnqXo5pBq8yRNtIv9pdlfQ10zNo/gMn9AwXjEmRF8nZnbvQK4ijXNr8QOQWY7QWP5m0kzAa7v9BXGoTOvYH74eub45z44VvH/3s0KstjvBZaszbx+LqwtfTxDXpF0EuS3EhsQC0iUrYV1vcy3q3WldmSorwu/JXkCsFr5/zJbIc473ke/xcSZkyIaI+0vL6XOPcbDaqt1DAXQK2ezyLLAzYmYs40XLaNTftS5HLfxXx4i+3oYT44o0cgB9QotiM8rvckr5XU2UVvt+oXt8jRHXjZiBuy0ippSZ0pYl4KxM8UhK70GfUXuMCuUxobqFOrEdSowb3y/NlGA7Nk82cTS5c0p67lhds9rYmv2K7Na8ruMwL1tlQ9hNS77zFzF2slieBJbL52yckEzpi7zIByXPGpAAcoBIJaAb6JmQI0b/mWI/isWwj2MPpLK9WzN4pgaUAK1iI31NyarTt20kFmMxourLPk73nZigvT0oS2H6OjhXTUgZYLSyj6Sq5RjggL6i9Efqw3Pw2BZSXC1XEGkttYTTod4tHh8796/E/+7jd7jGiPcfdNV0UZjL2+wQXwN3gWZBpALBysOSrZPBV2WBlSbpmlUVLDoGMxdSItp3oTxX/JGBMd8J+WhXsJIKEOFUwHVZneD4VgfHVkaA8w5L8MyJE67ZcrgWX/OE4FLjutinueTffghKeKJsrIO5r/Zc8iyresM+oHcjoXPU751i5OtvfnZHCo6sS+KdL6DN6g/dnquoml7dNmIHMui7NwFBf92+W6azLUklZiaaaWF3riP76rbJT6tWyBEGpF23e/pGdHP3QKkU7XbEz4Nsi2Y+LAAiiwJaLKo0YEijhY6NcejHD52xT197ASJ5HCCFmAwX60bxamt0mTnJz2NKOHn6vEZfXbq8a47LfXh6ojeDnn+ZhW/cnVJdp2WU5GbWH3xePqZcZiVzof0qu6RFu/vK7XTSi/4vljmNDROW26sMZrcAvTZijGJG+PtzcNUq4r4IaX/dR3y+ZyjPHpA0fpjGPDBTtLWQjjuYTOTLZgwglqIwEw1uJFNSt6ksAjAHHmaTO8uvWKetQegn6Xqc2uaRPzGglAqfchjBMbcyC2TJ7Ea0TZ+zPXhybQme4i1kcKylsGg5Lx7vnWmgeHUOJVZDynsnj2us+e/fO4WhyxW3cxSvY2XN8hnccJC6VCYsIOLL+nxcoUxwznydv1zHg9R+T3n7ug0DDTnp8SNfJ0U0zAjFBqHc81PI91QEuNOGsXnOKfA9odTGcVV5jwnAhsKA6UKJUqmWoq3ovI+cVEfC263qfBYbgWg/U9r6LsFI005zuXxJUdzcYq5bwQX1pHJNbbSvh/oimn2i2QoNTM5LKN+1C9TM71wrLlYmrb4tx+uQHGVDLAB5NC6ERbfJendGReqs5AZ8z3iM6EFTO6bwuhkYsQwZJ98DEnLDsscLHltkn+je4Kt0/N8BiIDLBjXyVHBIHnAJlDrX53d7IXeumFxJu8vsKeCpL2MyztrKkbmkbluvZzR3WoujmIxWI5B973xWbIgGW9Zz0h7MPqlOjKHZEf03C263xseRHHHrss7xIi9yizxrQPKwtS4MWnSgtVSstcigOC0xAie5rTdAkKEQRDkiKjYgqImRJBlMQj2PBhhHmJFRquBREKNskwQffBi2slkpEBfPiiopINEllMJfggWS0vrcyuVtetVmuaC8Wfn1LnQKEQ/EM+vNx84BX3a3gAPI7wxA8nvfOzd99WXnpSjfsl3U1vE94exYQAV3EoQwI8KxIdLSyQpD8K6xYrI0ejJb/7gPUs9qRFQgIvtMPno8acr8+gl1e9M9yTMjIy2U9b55H8gy6mq/nQIpJD4CIVuRQlaQGaCUukLq3vacUK9Yts/DsaJdR+tTtBZUZQAw0qfqRAZAPx5tKWHhtN05vuYa22dbXmfNZuSKC1GroPum/bpfuI6F7kMr+5RWoLm98nh71tRb3bx0u0afIwBiuVPuGaf6zGn0ObJ47ylj3HcxceKK9rmxjBv8fFjp1O+X6jLK4OOcx43F1wQai+fjUXpvpAgX15xCzyiQyr9d/1455wFPyTCCJ9UhBI+UXQ03T+/pGusn9801OtyB3LmuW8R1800QPPenTrEN6HnONfdiJBYbIsUCDNa+t4p+bnThQnn8W1wV22O2oIPZ0DeXFQDw5rIpltSOMbHe671z921p2Wjg9rolXxR5BoURfyfKswYkM+WvjUtwppX/iGzZ8lvOGbUC0QdZv43INMZvMzCWY2HHBaswJTYQAVD8j+W+3tVl3EwOUixuc64HYlI0GLEmID35jIohcm0JmdDgktVrHnB/5P/5vxSWg6/x5H2J2VgCuVL93K/+b/i9753gHfC5hw3vnULDbnzsHOBcm+pRZ1uRMmKYuF8bJcSwagLCGiwspwrKmpJQ2Q/JfHD/STcs3afMqWyxBSJ8H1iKtXf0XuXMUXzUKJ65Ll13FFblDHBj3l9bxqVYVvd2/bF4hbr9VpR5LY01X7w30lWrDcblzzkY8Y6utyzjY8SEk8+gLbjmPLQf9Z98tnQf3S19ViwJ2C1lZc/9ia9Zt9dy59i7P0dlCEiadwtNG+l7D0ZGz+to3JWZoeQyywI9vF7fj8fdNUpGRe9enqEKNopBw+V4tJDBpMuZ+ABguxAgiRGIpLi6lHpAUq+sgJIk27tFuLAgxRVwHsF7Ghv44YVD8ImKCyK/955j0AgwU+2uiC3R/MDP9imAAshjAiWWyOCD0xqXsaG+J9a9ekra7T226xYx0/ca556NRdY7I7eXxgTrmHK59XxqQ8GeHGFH+Ji3enK8yIuwPGtAQkFzWeksSk6fzrBZ73prAFli7ReuW+6rfyspny6nFKwKthauGkvSZgORbatC28viht6ywIvrKNsNmJGmD8Q2vQ+7a5RlPo3MuAKg8TcWumZZ17VVSUxtxdoS2JjawVtashrQIiadAi4W1ygN90to3Fs41iN4V4CId+SCFZzDe6eA984BiwfuckD53cc8Ulq6OA6gVe7ZpzqmqshL0f1Xlud+ZAuvc9kimhXdxsIJkAKhlYhcQyQhM2kOuKa+wDAzI8yaSMaktUGq/fKnBUaAmq3HElZYI5J4/ohF4boEOr6E2JKU6+BQmmA24FPNBleCNB9VMTDZzi3G4YSsvwO9CxF9yuKKpNSzO9eMLQHU+KMtkK59HupYYLC1mTnz5ba38Uqc3OAUXHE5vBppkbXI+ziLi5llOpJdyIXlShB+SqYypWVP2bH6te5rLNN9OwEivI3VPGn4kP3J33XAsK4NI6+vYZsWAPCInuKEqGAop5oDEHO7cr0M2T1sXAu+MqvMkPC4cfYO55DH8PUBiBFuu9axQ4whsihqI1ybCJkZcR7FcXNb4cJCNYzy+uA8nHdwKbtrJW4vcBVunvzp4Avz+eAo4P26pTKX7gGTgn/AxzQUfvSGq1H8hjQoNcf4AJVrfazRs8LrRqBe728dR8fC6ePL75ygInjXFFfVoN86Bp2/HWser2vXnhd5kSPyrAHJmjN4sPJt++2L7wYYAeYTorWGKWo6aGv9lW5JNT7ElcA6oFpSdaxBby1NXeX11ipUFTkroNqcpBVgo2XVwijByAiIACj+xymlAkySI5ZkZK+vLlgScLRKvZUeUyoFcgLxzsEHwSh4Bx9pAvQh9x9aN5n7JZQK6O+dAk7B48vOodT+OAfhgw1S8hxYucznzu0ixd5lX908eSSyEuo5QvafZCPYwsmuFmT5rADl5LOrxbaWT6RYXSqcRwoL4DycXzDq/QSUTFlbytlfMhDU8U9WnNAIjIzeKbkPgMbFrbyvwWVgT8/GCcDVRQpmXWNmUrKveaK+pkP6ZoLWLnqbUhi3mBqlUbdNiryWvnq3UBLE5K33065CrRWf3kENRKiP2sQW2ms/NG4+wkLMCrd3uAv8LlHbZr7s5bjONQXxeL+ZMsarZNwUv3MQ9VyKq90kq4jl6nZkO3n+uo08TntNcvuOuFAKrpRNsCIycYAGIiO3GHZnkvWOgqN+YqB9jZGAPYMS0HgTIyno+jod6tjBhgweNxbv4OJKf9dHGi/ybzBLoo5FnSDGFKAdZ0AgIjnXHsNlNzC/IDifgVRmR7wr4yHQJskoy1C38S7VrFQx0jMjXX4FQB8VFZYgcsZ6WABkxHIdcUG6xQ3xbVyaRm0cyUy30YBDJ4cYAZKjbXuX5aUOybspzxqQaLpWB2hLBoInJYumHAV6SWnmg5RfWDga4bRLCh9rQClb0jyrUdQk4BMCNYvYjdmqNDMCCOp/AEZGkhKtj/xDNjvVNLCNC1Oqblyb+r4nUoHiIlqSFZHKlLxe2X8yCJQVuVPwuM9FDEuaXREMWtwhcl85J2I2TKU8VykuSt7kolxdX4FfBT/UhgxGshLh1kv2/96qouA8kosEQsICxBXeL9XQKtCJ1Ry5jPULrlhMfd+7N1rufHwd3fG9Q8pbnNCDmuqiB8GUZKUtsNLhEbM19+QdgU3XPzua+RixItZvvc4aA2buEuWcSDgHn91UhMLEFk5WxGLKxSmHzWhEW8iB+i5zBjVOb8rucJ37Y2jdU6J6PxgoSdEJJEaZpCTDHBM9tzohRX9N/Ts7uuZb5YgCFVN7jiM61IjxtYCIrInDoESCYU47HcpYTtSIdw4hISd6yAr+BgARJ3iq0xPdzoj/EUgegwrDIsCLd0AEWV54PvGuTlseZLxyeXyS82SpK5baIrosMraw/BbgWc4XIxaE96N1NhDZM1xIYdYUuD0N9i0yY1T4e8OqiGuzQNAoKYSOKRml9tVusPx5XTsF5UVe5JA8a0DC1hS2vs0KPkkZWXtpIX2wcg7UAbWIsKadgoePCVfEahkU1LKcaEeuW1qhroHB6GJWTqjKG9PatwCfUSVezYzQZdqDK59KjnEy8HmNtXovg5FrzkNvxY3IjFpSWtaEj0XbMcC4W3zNbR9pko9b2++cica7WuPjvVPAe6eAc/B4lY9Rig76DEhEW2QaTHJ9kusIiOiilXJ92U+4mgFkDWbQwyBocaggZL2Qr/fGgGSlh8J7JL+QO0VIQFyB5Ux+3c4jZZNvAZiqe0mpr7+bSbxkalYZwXbYM0vkM3JSG8oYlTXmeiQ+u2z5XOiUJ7vyfLBrnwMXAKz597diud4Gk/HMijcCIpaLAis5m+8ZIhm8HD35yMeNfORJpOVYpPrW1nXBosoAZ7m+9I1gX7trEItGaU5lTRs+puVK1yh8PI4qfeWW2DfLfYrbpMWKLbv1fJKV1l1hWtSt/pw8WzppAv2mDrqsEVtWXjmmp4BVn5ldV+/FfT50YVZSJAXdURIC71xxw2RDiAe7ew2C1fdEum8JFna6iwAlzvmGvfEgtoSXBU9JT3jeSi7hBJ/f84jrBsg4s9G9becGzbDzNu3YMWJUgT7mguVtXA0tcPJUdmQv9oNBBK1jJi5O28/70afN9spz6rbI73LMfX0ZVHR9l+QlqP2dlGcNSGYyAiMserBq9s37sMog30+XB1hJ8bMCEm+0W7HimvXL4XU0+whlkRW3WRCZ5arVHH9GhzRt7ZexQsnrUsquQRmMMBtSXdiQP8d9D/QDoL6eI+mb31bY84Sb6Jj5QR2ER73O7Rs1kxkXOm51B2OmhF0ssK2FEWncLVIEIuCyu0TK7lopRtRAA19A1J7CFpzLMVC9QqxFgpECXCf9PWOKUnbB4krvawSxM9HhmhX16GoNmuAcoqfqz8XlMTfiMj7NBypaMdDLqytnXhHJFYWZEg6CZ0bDct2UIrMr8W9rm5k0TMBgDJCgh5ovxpXGfab69IdcqdujBQtN2uad9gAVfOxdBxueaopY+rjFIFP2P3C+aVsmY1R3PlFAbhQPwC5c8LUwphwnfRJuuzmxQUpt+0ml5z7xeWyguA52w3KAeDZ5MmknmSTiR0wwsgNQjoiH8MY6KJp1k8yIvv+6Vof81N9HbMitrkfa3dMSvXzErFixHvK3HIfG4ov7owTKmvE44sY+e941sHsOLlsv8m7KlwQgkZlHgApGpFhZkKQVkI9DE3ZqrMJMbLBFuFqGa6Vr71xRpqIrC+nc0rIX23UlDmQz3BbUuB+ca9zDmvNMDGAzVy3pejOan2WQtgYfALKFqk4A1626aVnMSKlYaygVBWyUjEu5or2v7lDyOmJKuMZ2cgq+xo0soQVs10g+2Y/rRgGXW4JzwMVR/Mg5AJeNrJF8bQBKJiqy+qEEuXNV46Y4oepHZo5ou8oqONRYkeCAkKp7losb+X0zM7LmesLF0rkhcWAqgJRiVjY8kvMI4WzeSzlXOFeVR8pww0drgd/iuYZBTU0s6xdo0c+RLlIaBTOSMtNXY28cQgDFi2Ttid23tuDgizJElueLn1t+RxbJvUlYKwj2JEtMCae9HAW8xxzPcQoOp+SxxY0yDcWE6F22ete+tJJPMOvL49LIfehUxiah0GiGeGQIEONnucIIbCnH8Cwy+UQ1MnB64iNypDaSFp1iPUZOhlCvxRer+Ni1bHRcS8pYwuBK1IGRs+YZKMXnAOCyumZ/65jMlHC8BwBsS01PvjVxj/L+RWJPNtp+jTQurhuQPHDd6vt99gv8khlUjkGLeczgP+ueaSDifM2ypdflsaYsy6CHjFK5xZM+lixt8K4Ev3f9lpk5HZupmRGgT8U8Yjz2WNNb3KNm9XBmssUEe5Te39+K7zur3/r7LTLK3sUsoGZGZIrhh+tzYEj8F4EheYkh2ZMvCUAipXUB6F9qttwDKIpxFQFOeKBk16lkW3udsKJzNqGZwlDbIYGJbZmWzIkZsJ/PU1IFy+PyNkKpsYLYgRaMyHFQZ4tixRqoLjZABXlNat7EPui9m5Zsp2xud40ZlPB1WkoWW515cvIODRg5eYfoXFWkYkJEwtU7bAt18nWjitjsTrClhMtW28tuaNwGUjgTluwvH+BqvGX9OmFQ6jbsYuEd4DZyxyoZcTJLojPjFP/tCFIC5PoYwWaxGcgs/ZgtX+XZkH2cXbR05WegnCJfQ3sSec6WXcysI1t2fSruGy5Rn3LhPwab3mc/e+8QcgpRH5kt2U9bKYvyzfrgVmkn+uqWw+tkADOfg7JQUfDydYuFWZWBzLop/N7qmkHDazFAhd5eVsCesWKcevkaY8kmyNdZsqE5m3XRMgtIt9yzpMhMg5Hd0yiIDUcCnaUccdWztmkC9Ms70Cufm6EoHmrHWrO5YZHX4HCNEd55RNEG9gSLibrCZQNRSllpdBRnsvilGCsSACQHl/y+C5dO+WuAEb3tSPZ04n4GFPuqnUdAZ5aKmdbX7xYYmYnFVkgwuRcILt+pJoHI4Fm5dTzS7MWtQeij36ti86zttfsbucw+DQS9yIs8a0AyYkFK/IXBinAxJituIbq2XknMJPgp0BG3A1VbfYkmBpBrlEi3qi4Yd1N1GVCZD+9cYU60O5hmd+r5xxZIvq5yvYmsfjRR8XFtJVYCEYCVlN7XHGj7dlT4kPtk5L5BFk8HDsadWXW9Q9nOuxonIq3Jmyp4eB887rNW/ViC8ALWuGGNwG+9uVLmpyAnE+rP906hWEr5djtHrnxxa12yuNkxpcKmlGt1FLzuIALMC7CoWXFKvQCZqlNeTIxwLhZg4rYV8Au885RSOANWH5FBQO7bfBRXboJrgCkzgZIVkffLq+35qFKsWgDeVcWCGSfO0LZ4ZI9z0rh8SoiePrH4RkE5ZWBz3RLwHvD64kUMSSzWuxBvdyOQrhSjibtVRNjCXVMDy222mHBi9wkBrmWgO4MSjiuRAGT2HkuxDBh9Fqv6+4Qj4sl1LglDgG+TVQDKbWtnHJoxOCzFwOGgknm0oKTYdvKDpZVWGfBsiazMPWuHVwz1ORf0PC/eVOZm8Uqz5ZcMTso54UGFLyO8o7jFh5wd7z55AL669PmEU3JIFFaOc/DwziMsC1yoYwtdeLSBiQQYAng06YELMCEVIiEbrPJ1sWsrg6SyPlUQxZdf9pExhxwwnWo2QJ4H2Lh0FUYxOddrZmTGiox+m8BCMCJmILgw6sxSZcv2WnJL/ZMjWey64/M+Ymyq556Bjtgs4z+ZZY7H3Bd5kafIswYkLPxSnrJ7T2RfXPFi8wB33VLzXQpbY2v2GQAi+8niXaewy3GF/cFp3xaUjNrcfFfBxMRkCKClwMRMLFcIa+K3fFGtQ/N2EohwUgHLfxfowcgoK5pOxRmcm7qASHAj9R72g6e6Iq01WQescnatmBI+f9lw3SIetojggM9fNvzm+4+ICfj4ObTpXdnFyXsEHzKDkLDG6pKVQDNwfQJqv5DlX98ftPEhKZYiZk0MCVBBCYSCIBgSFzcklwGNXxC8p8cqknuHSw6bo5iYwJBDtUW6pTUMh+rrsj1aEGaB9nK/wGxXjiFByqAs5XostC0XTyOmMhe83MhVhytaF9AbCDi+OgVctog3lw1rnhx1oDswDyydTaZ6P+2qEbzDeREKhUzz6l0xAMhz+eA6UNJlNhPKqcWAsIxAiM6IBhhudeK37gICrK4ojrSNqKpdAvz12Mft79u4Fwdm1Xq6gp4fUkhbUFITBbQi3Xr4uHVdu62VbtjKMtekFI9UTwcBJaZJg+CR+4tept24cM73buVzowHz98E3c0MCsYibp2tzjlxP2e2LWM4FISxtvJr8lDKJH6ljWr6ORO6X/D2mlJ8X+i7BSEopg5R8b2J1+a1zBjPsApTEWM5pJTuxLPZW/+vvWkYghH/rTFTsTinTfZe5zPfv5CxbG+3U9i31ab+PHk+OygmuefaDS+XZlanSddu0m5YGI5aL3LsqLgS48NG6bH3U53uO8iUBSFhKhfCUsG39OlaOrexCHGjKAYOcEcunVNy3OIWp5pelwimlo5UNNsEWAVCEy9UMhIwMJKOJnwNpNRiyxHLNusbU+JMfqVZvu52163Tw/Z51yUofyhXVmxo0NWda2Y4zcrFct4jHRP6wjxvFB0iGhH39H9YILMB1o5iGFKsCn1wq2WV004OrsST6+s2Ch9YyvZ6O3G7vArEoOesNMRDUrw4ETEqtAO4VBhNo40OatnIMlWsZkVHszEwcHydRn6WUmabkSmxWmcC5bZ4BzYaTd/AJWD1bj4HiOnVuJ9TLujWKAYsFTCyr4a3CLIk+5uhZtpZTIHPO1BVTZwG1EmG0GaSqoqqNDg79cyiFmy5ZLADgdOe0Urgx5QFSu+4BNgA55FIiH3sRm0csZWtkGIm+hzIDE/9u1uc5o7kffv85qK6zVIlcu+LM3AblvdHtZaYknFypU8Jswck7XB0971dPqbqXGGpcIUhBXmMe8QLAKVkIvLE7YHbvnLhd6ZZLZqN8T6nE2dUYjwpMUkKug5QKO1JBSipzYzFglblafGdgocCI7Ef+fgQM7olmRHhZ+V5c92h7CUb0Oy/nPtMwmMg4Qf3Lc7Jsq2u2lcfeM07a19YD8iNM3h6z9CIv8jbyrAEJD1ic/aVmrXLddqNYBha22tbUHxE+ObB1VrMfliuKPue43dW9wbL2s+sYuUm4skyLdIuYKfGdaxsHgDZTTW2PJVZgumY/Rik8tcuJjmuRmYR00D3A1nISPQAuvlf8yxWJiVO2i62MrPQGB1wBvH/Z8P5lxevrhv/t/QvWmPCwLrhfQk4dXCejxzXiYY2lyjun8eTjcwA4X793GRT4BJdair1YKjPD4TjDVrmAiRUzu3Ql4bblIpC2ldib4OGDL8xeSq5Yb7fUWpWlkjoDH7PlUvidSKh9zmCNlifaOTq4kCdIn8BuS1R402XAQQwJ9SdKeuBTIDeuhyVizXE/j2ssrjTEkgS8vjBTshWF5YzjVtM90ekz2dJ9XrypgJdTZVak9GQkJoANJAWM8b2SLqXG49Bn+svKiyeXOIBdSlx5MfaASRFjjJGZnvSZ5e4We6bTisusfexGWLwpfQVoVwCUZZ2UW2lYkdZz7mMuqKozL+3db0v5bIFVVex4+Tm/9JtvLcbBO+EaKt1e2CItz9umBAaALXlcsyWb05xTjFzMMSYOj0sszC8nouD05Ut08KipzYnN5XviGoPETLiZVWdOjTsWtVUAkpydsLpx0X2Wqb4f1th8XnPBY36XOWEJx3yyZ8NVuRDJ709J40t9b6fV5XXSPYtdszQQaZhMN/YEaPpV9V2MqaTTbmIzo6z5Y1/DEYAycguTQK5/RntXLWv5LOvnOyPe2zTzh33OF5nKswYkAL+UXIuDvmur4wiM9Mq364+VmRL6XrSDsocMdhwq87GNqWDpXZ34WxLsiF3VfSR7YETLkYw0Mjak0ObNBJGXIQmQ1AfoSiACyHoxLavBg32lyesxuvSIro/XkNcWU7UIAxUseFdZHf68bgQyrlsqCiy7a8XkKYYh8fVS7vzHNVaXGk/XRCApEVkByr4WQctcasFZASNABSBPESvwPUbArXDhTAwbW3AdhO8HK5ZoNEj92DDokIolb6LTHMvLmF2Ry+td/kJuXPToy3sUsmuOz26Y3rmcFcwBiDnGIN+DCMRA8TIn78sIt4rJlV0TWPGYWan3ZBTw2q2bvIebAiUe9OxyFXseh7xvk2WMLK2miGKN9BslaQH3OWArS1Y9Ip8t8bHZrr33FhMoj6XPFZNIqZ6Pxfeexz/L7XOmAG3Ckt5Y2Q8AEt4ueNdo6iX+x1DqmuD3G5gSeS5ZuwQQMSWeruMagSC2CVuicZMDbVYAi8+Mfsp9Tswo7+YSGUicI4DAxEoSN03eP9nt0tBQ5tLYLquMSGZDgMZNK+XjyIyM/CmZkXrOeu7W7c6OF9Hf+3XtzdNpmY+INDjRb5Hpk8dJ34KTcXrvvH3urxBqIhZ2R+QkE9xOHsJvSXutn1kJ0u3t+359kRf5sOTZA5Lid+pQ/IyB9sXXjMgwA0vRygQgiMSUROewJYoxYHcFeR6t2PcsiFgvAApb9HSbpUiGpglKj6lxcbIy8Fig3MqIxccb9Y8F6vSkIdvEwuxGBRw9+OCkAc5VNiE4odg5HQxev+sgfWp/dR0gJgC4bGTJe1wpOLRkWtsSPvtwxTUmfPZxxecf1+JHCwC//fparGJ3i8d58XhYF5wCxapwYcbHxWdFmX5zMcaQA5UDW3jZopYtl8GB2JD1AnCtESuzFkAZtXSNAE7LKeNJgMyarCXWxOWCicE5rKCJz7uswKeEZD96dCz0blnW5im1ygrdk/Hyso9QXLpzOxWn5dG4Y0oWLbiEKyvKuTuuke4DsyV3iy/xJQxQZr7PIwVSAmPp5sXK5EVZw7XfubSwAsApZdcz78BaxwMiTmLswVZZRUvmKVad6KtqxdXFUfkQFqMhpSQ8cJXZyCum2+/pelznybouc1yKvWtPLO99KgqsDnaW9/roPe4CmQ8YiKxjtgC4rtOKsJWFiVnEa4ny93hARIgEzE7R4eRTYU3eOwVicRIQomR06N7xO13f8doXhbGXBgYBDCzw0bQ1v9O6aG5lQSjBDKeGl8wIs50czK5ZrhEQsVy0ZkBEL5fARN4Dfd91VkJrTvK+Mie8XBvlyvnLwEjrmP3dysDYg46OVRHMVN1mpuv07BKzeDWWJHZ9PJPg23pGL/Iit8izBiRR0IPsqsAvwxVVyZc0qBTrxZGghDNgSfepUUasun+v2FtKPy23zi18Q2PvOz5iSqSScgsz2IKMOpFIsdzLJBiR16G7lBWoXlHv3ZsoqLmmlz356lpguRXIoGua8MgtgCY9cvfYnMNli0WxfX3dmnTP1y3i/cuGxzXi9XXDb78mSMvKpbSkU9BywOU+4rx4XLeE9050LdcY6PoiMRAn7/HeKTPDpwBEV7IssaGLrL8rZcXizFolqD0JcBHbfP+c2YY7xPL/ThEOPrtwrVSbxC8EYDwVS6NHl1gbCRq0yL7XT572Y2YlBeiBCCslQOtadJQY4ndZJowoLjuOEBZljaKJ/JQ8gk8ILiFkxuS80P2sge+uxJiMAjpvEdq3ghLpqmP5orOCs8WIU3C570iZ9JHck9jYAqC4c5XzNYaQXmnX1lnvXWGTTsHjlKgIJZ27AhMkek5jIjZEFsLsxXVjmRa5r6XHpyRIu7eULb//HGdggZEjgc6buFe6voy0pGsmzAJOI/aNatjY1npLYkrgYpvYgC1u9Jw7hy24DMAT7vIybtPd4pEyMDn5fG95/HXIMW8166EDusKF8t3ma2R9WcaQaBCSUFn061bnGv7O7r8cl1gBSGXhR2BkBET0d/o9f78lOzVy2ZKyB0aafV3rlqylzGMuz78RAKe19iigRLomAq2xU1eo5/VSrP6R78QIfBxhFmfubu+cvFRqfyflWQMSlpjECx1bYHILGBkdk44DNA4osflopLNYpPb8FXhUyrW0y7WWFCsobtR2Hci9J00QoQBNZb3RZimlHarPWzbEw3uH++AJjATPyYeKfzMrZRJ8NP7P2qok2QNull+QctBnShS8vaJVfLmPoqsWuutGk+CWUqOQ7lnSSv/lWXptTFyUpnZLqcRLaGbBqUlp5q5lptvUv723gYnals/CmW6OSsoKKu/CrWdLKX2v244ACmCDET3H0btRA9y5kjutJKWMrI88IecUwU5s4l05ht+INQmxTtyXNRYlU0+4kuEYKTtaUQVQUsCeFwDg7zv3xbeuPj671XCNktYVshpbAJhGAfmual/2EF0pxgjk+iL52NFlt7jUukzCsYJpg5IjIIK38W4MQC1FtjAdQtmSyUnYSMIZmfidlK5aUkYKmSVyPd9ffb878e2+UqTC1roKhm47ACUujQwhbSpzGbMgrfC1L9s56JoVbcoUSUYITgEefAb0CZAmB82QyPf3KBAhcJiauA+eDyVDAtR7yaCD7ydfx63A0mJGZso0fbZgZFRxndtE723tq02+w0BOkgNoN+yRjLwQpLGhMlAtGLGAyAxY8KcdC9LH58xEP9sv8iJPkWcPSPhd0e/MKLc8T8wzqRmoAGZKQua2i6sP++J3+4o2iIFjJJLOBdBZUY5mn7JYE22t0xXq64TeupjJfhi1V05cp6W2ldvJQOS9E02k51ADLZn5YOAiwQf7tQdEIF6JPbjmAG+uWs5uTdQxpGiHM9JyBx8WINxjjdlNSylC3iMHCEd8/rJh3RIec+yITGdoDcRSCQ2RrXtkzca6kbIXCBQhpxSWCRZcvq8NOFH1RTQ7MgIZjZvWERSawUhMWUlAVd5m7AhQJ96UbADO+0tgYoEP3rY/Pq9LbXKlDEpCjn3gIHegTulF4fHV2uoduYDw8x8TsOUaJtdI9+UUHOKZFKU3V4oXen1xGYxU2/Cby4YlsxwjBVZOwjUOICH4WKyurOhcxPZNPYMMYJABQUa69T0X52SXJGkl5eVa5DjCCuzJe3iHwlpKN0pWeCm7XOEi87Ob2/YWYuk2MpidL8Gqd1Qt6VWpZUWNa1LsuWrxsWcWX4uxaNysFDOima+6X13WuO0ZyrPeTwdPn0IfOA3ABCOc6IXjSurwEBGTwwm+OMY55GK+qFkCAeSUwfn+qDl2lFFLAhAGkZIN4ftJbRSsSbmv9R7W7xWMXIQ70ciaf8Q9S0rb775ZZmXW0lLfufzOFldRujfXLWZWsjKdMkGNjoGSXgi63op852tAvzR89mCj7xM74J9ByAzYta6GqY/nzMvj8u4DEucD3EfMWHzU53uO8uwBydvKXkAYWz+2rI1JMDNLdXsk00Qf72HTvPQ9b3OD3/IeGOFK6rP9yv5CsZef8jokGCmKjWcrH/kvcz58BiEcK3JiIOKAUhRwW6kOBxcI3C7gWh0u5+hMKZRYChcXYhMCTa7BOyy53S4CzmWg6WgQX9mPma1ZjaU7NFYiaUGbWkiFeMcxI9Itgv21lXvbXuVkJS6puA8dX2IsjwmCIbnpdNV6bwQJWJl2jgCRo1LjDwRgEddC/ZxyEVFya+K+164PIe908g5X9qXvFBBy9bIsg5bVXE7UMh5BKqLSFWR03OBScccpY4tO7auAyCiVbQm85jTPuScRkzg+tcmnhOByNkEfgJzAg91RE1Ek2cCbmnsyEx0LMhPZJRXYVkVrNE5Zwc/SVQtoFbJ5UPl+fMFRdz6dLEH/ltvp/SQQ4SxOp7wdx+TNsjgBykMgkjLM9blOaG30SSrL/K6J9RKIACjZspp1GLMhpR2oc2YUz7Fkt2bMCLBv8a/L5+Np3+++W350nK/X58REyWv0slqHCQA2pUQwwObv9nnydwVGjoAJC8hZHgF77KEUPQ6+yIs8VZ41IDn6/MsxxRq8LZ/sUaD5LSntZkBnBDYsH1QNRmbBlHJcsVzH+BqsCWI2EFpt12l7pZX1FLIVNm+3hLr+HEgxPzPrE1e4a2Y/tmuNp4grEPMygACJzCLlPBDOFFPB1ctzITAA+B/OHtgueMQJ/9/XK9W8CB6nAHz28ZrjSWLxV/743YLf/R5FIUh/XPksjFI9stX543cLTp6CSb/sjgqQ3WVL9DnQX3AOC4jlcdcHur4MwMxgdhYNNrgPJBgJZ2JPljMt8wTSaKJrWQh9m/dABdDpx00wK3D8nWzOa7wnOl6Iju1KEDXjDEvBl0YG712Z9Nldj6tfc3wJHZs+rUrDVvGv5nyKHdFuJBxLssVNgR5XiuFtMeGS/TzYxctShvaswrpNVjDuKW9fY54crvy5pfwux5woADhxFifnikLskHYD36lfx+v4tvMmfB2WhV0HP2tXLVnJ26pPcdT1hMUGjRtkILpUYvk+StG/y30tYILvTTu2c+0jzkK4hDZTkzyGFj2ndHNbzFZ75+By3EgCqGAqcryQYEcAgyFBBcEcpC7vl8y+WFzGNgkue1ekETNy2SorwvfkUcV63cKK7AERiw3Rv8s8udF925jh43o84n2i3+1xpoWKJRAr70NvfNAprGefeyDk1jFFGmP0emJInkF6W/dFSPu751L9Is8bkIxkz6ihB4q9eUqyJHoQ2cuQNTq/rsVhLZdyBIzoNllAhNcXC5RYRvv0x7LAm3TLqtY7BhzEIniHBnzU9ZkNSTmge70UIMIsCFcoL1XKM0jBupaKp8nFAkZoQQRE7Vr3+D7c5Qu4+9jvwRKoCKIsCPUgctwD5LP93qnSqtJipycVeV+CcwVwnbzD/eJxv1CsDAfnEyPEDBHa68zgi1mfXWEgokVk3EocwO6oBkkUYCQJC2Q5JH+mHmRQX9DnZoAUC1DcKiM1Qj7ufMUcF8LXwhKcTotLzz49s/k+8vvFRktH7lvXrXWVYZmBEemeBbSMiFyvt5WMCa+TCq08x4hRWQ3lQT+boyB6CfNiisU9a0XCFhxl9opsyXXAgpzQA2A3suBdeWhKaM/OeKofkxEQAdCBEWZzZdyI/KzXM1eqjshsv3ZdufJuOyvwXQIRadQA0IAQZj1qxsHxvDCSGZNeAtIN5ismWmbdSys+hF2HZPpeXTBXx0QAMMEIMyNWmmbAfuafeo9l4gDL3U6zI937m1I33nAAG7OSmq0sREnu6yvG0jIlfK22e9YMiOjl9bvtgnqUEbHct9bcN7d4cbzIi0j5kgEkeyBkVpxIDr5WGl9ex6DkqMysWCNWZG/CGWXZkuupvXZmr4byNS7Fu32A5vOgw2DkvqS8pQl0EWzJ4mnQOheGhJRxt60EQrYLAY71QrET7J4lQYZkCxay+KflnhTu0x0p3+GMFE6kqKeYCxN7YLkHUsSXn09kvcvpfxf/Cu+dAq5bwucvK64x4T5QUTHJALV9005A3H8VnAGvlpCvPwelOuA+f56DI+CxrXCXNxWASWaEr1mLyLJV4kfCUlgQZkrglxJXgwxGttgCkYTKhPBvq4iZxXxIf3+gKidAm4FLW8+PpH19m3mMYrwoEx7XKIkxlUD3+ju79bgEwOXAd7JuNr77uaF3i8cj14HAgJFRir+UEYCxGBPe/6KObR3L+q33seJVAGBb+B76bM0FTp7cSE7JY80Ae0uuKF2cKa9m6JJuRbTMGhqtFkoAAqABIfxbJwTZxHjGIhNyWF0xcpOS7WiXF8jbtPOoWJbjkQsWgw2ZFp1BiIzrAeaAxIpZlKLdgJklgU/AVsEl3ztO+7ulfi4oRgkFRIAaB6JBZIm55HuugEh7jD61L5/Peue0QYCWV6BxJHuZxSRaKb3N91AcbsYolN+KGaNtut3EMenzaIzIUSCil43aPxPL6ML34u45MCQv8k7KswckDbDYQyXGPo17h6GM7wGA3XN1gxK3wZ5snmpdmGX2siyIenKfxdLIPiiZtIIvmbPuFrLqnYXyc87gpCxDhLu+ITZgfawgRDIjXHU8F5pgJoSlWP+Xe6S7jyH5Ben0CgiL2MYTqKHGIvk7uBTxsfiG1p/JxevLzgGfeBWwxoTPPm5YY43rICBVrZPylkgGgVkHgAZjJ66XXbQkAMN1ba+XY2SOAJH8PfnQAZHEwXIZjKTMjMTElrYKUE1QIlww2OVFumUQYBkHp+rMdtKVr1h+i6vPGKxIy6xzvd15xB8F76qxOgAxUvrfq4sNMIF3RXFCTg0MeGxxw8lTJezy/C6hpAPmCVZm3mrOjbHCy58WU9LuV4991G/9iJBC7MV3CtBfvMPFx+JGdA6puHMF5+BXru1S3+9TTJUNjb4B4oCtcAE9G2cBELmdVcPJzD5kjNXW9QNjNxNbWmDC+89Eg0157sIqi3gQ7lt2x+KU6Az8uG8BFHc587yyL0Pfl1IYWG6J4q1kVjUADbik7dtrH7nSjYBI2T/1maLMsUSBkZGLpJZRDMMsfbIVxzOL7bFE1xqy2jA79y3nGxkjLAbpKBCZAZKRS5a1TDO6eAaA5CWo/d2UZw9I3laOMB5t1i3bymsBlxEYqb8/WDBSf9vbzTIpzaT3X87tNWIpKjtSfwcHOAYfca3K+HatcSFXsgunuCHFDc4HJHZhyi+yC4GiB5L6Y83G+6ZmR8r1Oqgmh1D644qTX5CCw9UB75081i2VuibM7LBFU9+VTdTt4DrIXKvjXBQMVDbk+tjGxUggwrVHlHSpfpkZ8SJexC+VLck+seyqFTNzwYoCgwpqfwtEdNacWcYcmemIj1U7pgUjXPOCwO5mWtSdOx4oXV4P8R7yI92AEvblzmlqr0BZx/VKvLC4B0/ufNw2/iNQQjsueaI9YgkFanrY0jXKxWHknqWXjWS2Td8eyvJl7VPiWzxbsz2iTzh5LyzYsaQJBkhZjkwZ58KxzFCZbdVs8wSI6P1uGbOkC00J6k99vx8HJ00Ek33OQWam7kj87DvJmEAwJRWM6MKxGiiwbJHTziaEDEY8HGKcG5ikkOegcD8yLrNJLnAQjPD2dI6eFaljEf/uwciemMrwjaLv2yzN70ikgm+lIdbn4rHhNpA8BhAWEGk/e/esp7hpHV33UhjxRZ4qzxqQsBXmyPMvwcJoH2nRAVoKXNYCqH7pVbwTk59aNwpKH4GRWbCb/t6233bTOio8iUmmSPsx3weKG7lbPO6XgMVnJdwD94Eqk58lELm8rqzI9QEubnBXWobLI9J6RVpz9iwRQ5F8KEFnbFlIy4ksG0BmA5gJWUg5j46+n85NmlvvUWJNCiDABfcA7r3Hx9+75zxMNW5lzUBpi12NkOQ5sxfo4LECI3e9FuDh1msLOlTAenfcQh0sBKyEK1ZaTiW9cQEiHLguAtsZiFwjsThXtjKmWjBSsyAJKLVYYqzZcdgX/HGNBYzoCspSGJSevG8svzLhgUyGIP3mF89Vovs0plyzJYDAk/co2YBCflYTOFjdiWc3Z0faEjav6pXESsFs2c1rS8QmsNIoFdjLujVpoYF+ItcZnLSiEHybhctSfmZMi3UOS0rdHi9dyaIAW75bzyzQudS+iLjPy+4CLVu9L8G6J5/MxBajbIA9SyLWxX5smyXiYKExl5X9CkR8yPtHBgH5WAIwyXt4hq2gtffPN+uk6HumY3fYsFHAR34/pLurdHuVhWP5Oi2wfvI1eS83KyU0xgdpUJD9K78zW5KP0J2Ht5X3RKakBQz2ymBENnNZC3iOKMoWs/D0WJJ6LPneWJ+6XRqI2PEcFqvav39WO7RYMTR2e8bMzS1MTm3vMSanGJqeAUNC1tSPujDiM+iXL7I8a0DyNiJBiXw3LTBS9snLyPKbDoEclrcBI7x8BkSeKjoPem1ve+6S0lek82WgUtL5+joZh5xFCjlYm9yWmCG5Il0vSJcHpPUKxA3pKjznfQC2jVy2fECKkdiPFUgL4FPKFcgBxLXGMPgFKQd8J2TXj6Kc5sEgVlDg4kbK/LbC5diTUgdE1jyRDIYAAFJK4L1gP4obWtzIDU0FrTe97kMN1ldB68VVrWFKlpYpQXXPSnliZ8WEGRGODaHJswUiOihVZjOSQKSAk623Xp+Cg49kbefq0Vg8EFMpxscV06WbT3SVaUK+jlEsFdUiSdWpJuMKmYGL+pBcjraY4HMhRdnrnH2L01AD+6mAzwsyOGmVDJ7cObBTL7d+a3DC+1uKwp4iIo/Jy7US0bqK9cHYJU4mf54XTxm3nMPVsfsNMSQUf0PAjpzgGTXkLcr4KK5dXdas6OqtrMiWBPNRjCqCsYnGw6R0A21tlyLv62y7UfxBOaVjgILy3HnxyWCEwHkFIvIwPD3ULnLlfUlIJcus8w7llgGAT1PmRC6ztrGMXTIxigYj8pgzMCLPv6csc99aoPxt3RyPghFL7GD7WRxSff8+CJakXWZnznpqvMgeGOniZLyfusq9yIvM5EsakOzFflwPvJjWJuTe0bImJK3So+uWSBct+r17+iJH4kCkSKDRpS7OygLvP9q2tWb7zqJ38pSSkpkRSm+bXZUurwmIrA+kpK+PcCsxJPHNFwiEXB4IiMRY3LOcDwQafK4qzsAk96pbWS0iRd1tl1x75EwsQjgDgRiT4MXjrYLGXdwIdADwwvVLulIBAmhIYbAgYz/Wla4pbsB6JdezlfKocExM2T0DrdzJdM0+AMuJPnkdx4fIrFk+lFgRjiFZUwUerBgwG7LGVNiSSw7of1i3DCyAx3UTIKRNn8rfORvZZavFI2UqSRZpZefK0pwkgJUt71CWlc9E8Rwn75F8VrqiK3VWHIMI9OJpA6oinwiYeEeK2RZ5wvS45mIMp+DgkwMQEXLQPzOCj87hlBKunjJPxZRK5fU3lw1rBiPB0/28iPSjM7bkqAsKy0hBORLHIPffYg0wtdxbZKavsv/iG2aAjhVySlOKt9FxBqxgM+PFLly2k9PTAIinATdLAteVQUwlMN/ne8ZVsmtsBCvB9XgncB+N29GwCRPFbhSPwH1S46l8+eSYEc2QcPFYBiLOtc89s4Ul7EoAP2YNU6qsIcF8AlTRZZYpugYYaLESd7Rp4QcxPbk/CmuyA0R0sLaWkXvdBxljNTrvLXIUsGrhgqnWe6vbsRdDMmNFjsoIfIy/95nKAOA8i9R/kReZyJcsINkLAgRaxkMvs1iT9jjZurvVIkeyGFJhX2JCMJDHLWBEirRU8fFHPpu8XDMgxe9YgRItsq6IVCK9Q6mpcfIOd0tO5+uyq9N6gbu+gUuRsknFlYDJ5RExAxHEDenxgRT4uBXH8pSVPbecRWeFwi6k7NoV8/Yuu3HhdAbiPRBWpOUObgEp7Mhsj2RGUsptygzG+khMiEi9y4wNMzgsJa4lt4OZDwYfDEbkNYkbArecSb3mdi8nur4CUDa4ZWmyaiEsNYOWiBVhFy3tlrVFdtlKuMYKUl5fqO7KwxpxjcR2VMYjA5IYCzPC4IOrlF/WWKqYW0q2jL1gK/tddgF6dQoFlFxjDY724uWJMQJ5+8p1UI2EvEn5cOr99ikVYLJli7hzCVyrJCaymJ/Ysg9fgt01MLl6V+oheEe1OYJ3uORgcKl0WJO+Vlrln7WN7L+3kVEsi7W+MiYtW8IxJY/iWnnfzSMDNQClijzFl0gS4hSqsrnnurV7TXn3LYlnJQLILnoxW/NPYGYtp4ROCSFfJ4139pxwkpmSJkDJAi56e3l8ncaXXBmr+2IB6pkt5LpNDO4YiPBzPnw0JLte2sdxYQRH2K2rEHu+BSV0Lf2hLaaDt525aB1xz5LHtcYRSym35CnvzFFF/anvIxtBCHD0AGV0bRp8zcaWGSsy+26d125/+30PiDQuis/GZesjbucLc7Qrzx6Q8HtiVqsV751+t5v4EOPFZ+Xkbc6vhYMO6XudbOXkreNQZhloZu1/G5GsSKm8nl0JOGBdAhIucOguDyjxIuuluGpRDEVC1Eq693nCFKL9OpkxyMq8YxZhyY+uVNwz8+Gub4DtCqwXnDh2JBdXLGmFc/tKoLlyrSrsBse3AA0AoXXXAjzSmgHM9Vq3layPvDYf4KLPasKp9AVfI/xCjIhfIDNqJefod17HLmmFHRFsiGRILhuBjNfXDVtKuSAkuWE9rFthQa4xZtARCxtgxU9odkQGhLbuQaF850kaiDglsmhfIymSm8/F1Dw953FLOAXfBb2nDEy4qjTQAxOgfXeZOSmKMbtveZR0wOQPnxAdPY3eu1KnJDgHhOwSlkFWtYbS9Ujh6x3FehwBMkdcI6x1jXVe3ZNZbIoFTMztXMJ1A6JP8M7n+5U721cPKWx1PDwSWP0UCXqszGwJiSvrmTU54i3uO0qnxmYwcKFz5ufQuLYme2IBIjm1LxeLDYI9ye6uDtVFawZGdkh/2h7ElviUQLeH4rMW76j2yBbre4CeMdfudKN4nhkYqX3Vu2fJdSM5Cgae4u40igk5ekzN3GwxDeI++mxttP9YoZ+d32KLaFl7Ht22W/poxkZZ1exl+7kP0ofMYL3Il648a0Aic7cDfdxDKzUNocWCzN4hy6fWAh56GceZ0EBO+1+3WLfTg0uZYC33ATkRjAfzWTzKLA5F10VhSx65FYRS3O9Vrqnx6uQJiCDCXb5Ayv36ULJpMRDB9UKMBgevA5VxyGDD4VSXGUGA7nxPQOTVlyGFc0l9K7d325VYmLgirA/D/mncrfgvu1tFjmdh4MSxLexSJsFHjBQHo5Y1bEp2N0sCUFF5X5Vy0IcMts5UZ4WD9JcTBbE7TzVWPAW1b/DFBSumhFVkwLpslSG5bsSGPGaw8foacd0iHjNAeVgjHjLYeH2hzzeXtbgnvblshRmxrPxAVX7lHwdG3y0er86hTOB3i8fH75eabcg5bIIqjMLMzu8Ou7FQP5ELikfRpQowsaRR5LIy5gXw4WBfVsa8I9e0sCVcXeuiwgpkqTgdmTWpQe/cH0AbDyI/dT+aqWqdPeHL/raWaeXA2la2x5aYXd1qJfmy35KaY2Kj+iUcOxTz8hNVhTuUbGR03RZjW+6TYEpCfTQyoKx9WgHGfkOs85lxhGLR7N7x4RiESPctcnellOnn4OHy+uArENkDIV5ck16fkOcQAC5n0fPJFdBwCh4+puLGSNeVmmvWQKR12arXvm7t9pIZGVUWB46DgD236yOZKfk6QujnvyMK+956Tp0dxHttJUTYO/Zehi+5flVjCgkDhsqa3Mry6PiPGVtiJ89wWLZ3nwlwIXRlBT6Kc77IXJ41IBnJkbohR9kP3m6mzEtQxPtpkYxISSOsGiGzeHXtjakfTMXPW9iWPZFsj7Te0e9q4QsQbk7ZBYq/MxNRFXwRR8FpfMU5Ry2jeAvKiJFETEUBFjz28eE5PgR27EZJkytqnkgw0YCRbWvASHHLipH2E/EiEPEiSZkDy3XGDWBGpO1w+lyqa1ZzneymlQPbU2ZDSjrfPPGXv+yCdd0IpDxsETG2AenMiFTwsWZAsuF1BiLvP1AczWXlDFyGVT85hNgXFDzngoI8WUtmhRV86kqZAQvFTSpG8mQDSsRAjlegugmkuKHEE43iFfR9sFwofGZGWKH1noGOfK/re6Un6TZg/LhYCTQ4IBzIY85AYbIACv+WYGRkedUuHTOrrJWmlB/xk8/HYrYENU2wlY1Qij7lpp4L7o+6Xu3v2zGUx+tq8Knbzlxbh4lEgjx3KocsbRvsJ8dPHje9Z/csLipL+xMD2PYFfz/ChlhSWEE4JMeuW5QIIoHWSTdGNUx2rlyNN4FgReT2lpvWU8GIdT/epvp3rZSeurlwBtT3WMmWYbyNiZAyS4Sghc8pK6Nb22hgMcv21S5zw98aPI2MJNtb3KsX+Z0tzxqQyJztQDt5jShoFgk2muXiGDH2KXDl/hKInMR+o9iNTQzWV1Q2pJ6Qj21Y60TsiGm5U+eesS3cFksYdPgcL8JFDl8tFLj+sRMtC/ECd3mozMQmCh7GtQ3yjrFlBfRAGAWIMNyc/N2roqC7lIDUZ6ySgefx8U3LWohzSmYi8fkkAAEqEyJZEQE6uuPnfTsgwsF9nMI4u5u504liSJYT3N09/P3H4E5nxOWewMj5VVt93udikACuMceJpJQZEhRFn92grlvE5y8brlvE6+uWGZLsqpXdtN5caPn7D1dc1ojPP1RA8v7jSrVGHlekSMpHEs+c86RgOe/Impu/n0+tBYjjLuhzK7/vlwBfGAi23NIdKe+IR3b9qRbGJSu5zmWv+FxnRBdMvBEX7DCOAMeEceYmy0J4WVs25FAmoAwe5XghFWedyc9iooA+Za8819DyrADUTBGS2/H9LDPHShZZHykbl3f5WFvrviRFn6oZc127zba1RpWRaOBIxzC0fLX9Xq2oWWyFJbqiOru+ApQWHUAZV52r80ZwlRl5qj7XXAqDM+TxPBsxHIPvhMKUABHXDbScrzGNA9cl6LCACLOIVqyN9cy18yo/t+iWPUWadzu259HvvRXrOZNm+zC+Tut3d6wDYEaDnhDHY05/vmMsDYtma0bgo+g/nNDCOSzLM2ACZPKYj/KcLzKVZw1IpPUJ6CcTSfFvIw0cY3cttsDxen7HLWuWBYZGwESKVGHrMVoLlTzGLsuhDSE7A10UF6XdtkKeNBdPhQJPIVtfEammyPpI6Xxz4b9S5HBdhXuWAQj0ixk3pJjX4wTOssXbFraARafi5WVcpZ3Bwnpt0wkDtao5MmMi0/FyYH1slxWGZKsABQrIaDAiz8HX4ji1L4MzdtU6ZeCR3dEKGPEBKQe0bwmVDRGMSETKQe00IT2sBDoey2d10Xp9Jeajgo8V7z+sWDMbclkj3lw3XB5WxC1ivW6kgKwtGAGAsBAYCYtHSg4uOlwzUOHsUzLeAqiTZMwKjmZKwO+bemaZNSlHSpkZcbXKdHOPDb6NrcYjqRnKxhsFpcToWBHLRWsmwbsCSqRMrfkKlCwCmLTrxgrIUXcO7Yuu1wFAcAmIDhtSji2RBWTz/ZbGmuLCWhXRUWKQuk/bRzNwsuvio8ZpbtNo/xOMMXen3yTYkXVa2GWLwYgMXuexFmjdsVjikENumyT3DAKsO/DzlgrrnVIqTImuQ3IkcB1AA0bku/EUFy2u18Lf6Rra58CS0WtW0j+XA8j29EBnFhN0RBomRikj26Bo6NuIZmetmJZRLNvRGkgjl62SsjrfL46T8g5IT83Y8yK/4+VZA5IZGLGErZ011oxeVttfucwOtKliSyQ7Uj/l/r117SiY4Fz+ZfGOYtOuVorD1leQlyL7j10KStClp4rjMoA9pLWNE+FaIyKDVcIgkFu6TOl2MEPCbZUF/2T63lJ08LFxyeJju5QKy8IuV21n1d/FBUv2nqoOT03xQMxAKseAIG7EeMScbFMzNnwMDsYPAe58L5iRV3DLCf7Vx5ACxYqk5dQwIynk76iuWGvk+iA1k9YaKY6E0/lWl6yExxxHct1SCVh/c9lwySwJx468/7DiukWsl42YkZRwedyQDHaEQImHy5NsWDxccLTdwJImP/UkT1mSXPmOSNXVoyPfd7Y+1jSuLitSNSXwTHgLdhfj83DVaatGRlWy2s+mdoLxXh4BJVJxYFBCF6a2c318jmRHOINZC0j8LtAIPry1xbZcX67wHpxrgAkLpeSVY1AFH1d1Hu+qwrwWF7m8TljTq1uqaO+O+1TdTpwL7Rwi10s5wXXPyEis45eMW8FnYFDT+uqMcUfBSERqthWvXiM8Z3lQvR/nqvtWJkuKy6IWfu5lKnDJiPA2dP4+na/ZP01ft+NB8D0Q4c2750Scx7I7c3Y17hs+ZpmDFUCwAIrdfnu5ZFyCVcQSrgvsn4nVh1YMTG1X70LG30eyB0D0b3m/+F7J5A38O75kk3qRJ8qzBiQWPc7SWGu0dYYHvQMopmRrEWyJVVtk71A8uPN3S6S7gnZVGO6jjmm6GEgjEQ/w2hKYwch98NmlBrhfyJp3v2SXLUT4hy+A0/i66yN9l8yIVug5G5ZfEL3IjKUYj8SMRwEhInAdIMU/MzK4PGD9//1mAwLcq4/B33+M0gqrbF6zNLztbwlCQreMz+fOyqXLEh8oEB85hbH3cKdzSffr7j9GIGq5I3es5YR0eg9wHvH8Clz48JqDUq+RgJ4MWqe6IglvrhmEZBet6xbx/mVDTMSKPGQg8vmHFVtMJXD99WUrzMjj44r1umG9bgWIrNetuGxRd1U3LZzJTQeIGaBEhIVS6YZTfbZqPIOHBCk+W9hYGIiUZ5UoEAApu5OgseLKwopHpcZstIoULauFIXlbthB3xSPjvK6IrtAuZepvHtvt+JNBh/y841ovvqZZ1jEj3fUfdNHaE+3CpY83iwnQLjk+SgW0BRoNgBVAhcfgNkalHS81yLAK01rgobSz9yqdity+VdxqwU8JRIK6RmJKxvdED//O9aBk2LaMeX2imJJEpFZZHjzFtMhYEgbl1y2WFNgWG7JXU8RSjEdARIIQbbywEtdAAbL2mlu2httSQYqaVw8yGCM2rRg7ZxLmbI4UvR33cwfGQr2OWTxM04zBu9+7Vsp3WoF4BUL4nQzOIZ2eASB5cdl6J+VZAxIto8xUUm6dh6Oc9IxJB8Cw1givOwJG9Dm5nTowXQpR0uNB7ojwZC2zatGEyZmQslLpQGwIFw3M3512neKAdS5mKAOzB1XOrarkbYcIFy3niW04nVtAosBE1yUzjYLbLGNc1HUAqDM2AxNR0FGL47gRn9taUvrmz9OZAtSlm5YK2o8pV11P9CynVJmSlF0iUkIOXo85kL0qENeNXbuqEs0pfNfyO+KyRWw58J3jRTQzMuw6L+JJvGAsDat+d0vY0h1RU+4mYa0VoKTfOU/MxrqZMjmrqaCzCcntRrIHTiwZuVqMrJO6H5kZsf5GmXpGAbDyOo6IDnC3Avo14NrQnzvmGBNaJ54FV4EGu3Jt+R5HzyBW1H8SLl7e1bgbdntrWWsbjNTl7XZSeBgYZym0QFcuS+i4hRV80Od8Mpq5aaUkQEmq7l68hz6y1WyuUtKdl91CNWhvWBI064C3AyMs8jHRIKUsN8G8vg5jG07dLIE5GxxdH1NyVBqmbkfB4No4vewoJhowCSujdH0FenDSnH/EJFrP/MB9bgZGXuRF3ka+JACJ5XJR19nsiJ6sRgHjwdVjaArYi3VdZC2Uy0dMzb4szbghJkR5jrJaDzJF0UqQA9qev7Vkde6WgOCA905BVGOnweW9E6WlPLsI9/gFctO6vga2lb5zJi0GC6I2SNLuVlxHgwv9IYMVXhdOJfsVACqseH0oRQzLlfoF6eThP/HJvCA2n25dkdiNKoTOLcsKmjfBw/m+xnmwq9lAnDGRNcAq90EBHc5TVXnun5xVK53uKV7ELSUmRFZZ59S+XPzwzbohRuDzl7UErb/JcSKvr7WuSHXLutK6C7lrvblseJMD19fLhm2LWK+xsCLOkxsWAw7qUvYVdvA5jsSHzK6dyFL/8fuluBS9Ogecl9C4GBFYAoJL2ZLpEVOs7iVOPKOxnxCB+i4crcHTWyBbQALUCtPSRYWtwpEVMcGQzALXLYV9xiJYx9AgBKC4HGJGfBPUrkGftpaKMqNl2ez3yPdcrptto2UUJDv6Xj4Fk2a6icTsJuKSUIpSBxA0k1183n09R2Erdq5ldNVyv2LDchWIeIcSM2KBEQ1A5CNrzRu8nl0btzQPiJfHzzkjOoOWTO0b8/jDhVKZIZTb3QrGpQQFALWiy8t0jS/z1eHxwLgWX8BTW5umtl+B5QPAhJ9Buo4WZMm2WnKkGCe5eMl1rrm2k5f7um7fkYzc0uzMZnmdwVbxulOZD4Q3xzOo1O68J1fsj/icLzKXZw1I2KJryYyJ8MYAUqYZMdrxsbX1wRoQ2TLXWldVe5PVrraqO6f9nAdu8oWQJZAnYekruyel+KHngEuPxdMkwYwIx5AwAGFmhDJdxRaMlMsRsR/Nd9cCEcGUpHAqynhhXbZrex59bAY6JXYlpyAOAcn7yp7I7yPhgPtTLbzo715Rm7PrVGm3IcMnrQMkGfhw+2WK35ALIea0vkn4aydAKMKoYCUrCFx1fd2E8hBldfW2jkgteEjgY9siUkqlm+U75cTDzkpUjSNpM25pBZqU51As91oJr1m26sRaYjo4l++AIZklqRgpFHKXPm2pHbhbgYl9riOxGLO0wDOfbQtwSDAi+9WK2dFtOCI6reho/d7+uh16uTy2lfZUgzIS3wXPc1X4k/ccLAFOeW65M/GYL9kQ7Vq1N3zyan7MrO25yRqMHJUZGJHLytyR3besNj3R8F+AeH0X0gcCRKTM4jY0GNHzrj3PVUAlPRtqcpo+DoNB2R5bcijGZAaaTJlsqKcb0efN9R2UMTtibCsWWgxJ1aHaY1Na65ua9SIvUuRZAxIpMyAAKFpVvUT8nY4hJs7gOiWGt9MuWBxs2bRDsSKjLD4y64x0RdhPdQk5IzVsjjwvX79M0XeXix2+dwolte85Z9F6tWSl0ieK27g+AtsFLgetl4xWElRIVyx2PcrfTSXcOWAhtyWEM9aU9QlPQ/RyiogMNnLmrFK1XJ4rB9Sn9QK3PgJuhb8D0nImt6vlBCeybTmZ9YuzXnFcx/meMl45Ryl4ZVstZmcAUIro7RiY5WOVa/FLBh10mZzW97rVoofMkHDwOmfN4jgRrrou64tUNmQt3xmIvMlpgWVKX+cBHx2Wc+/r6h27ZdGks5wCXP48n4gBYWbkvXMozMircyiZoLQVv7wLW1Us5SxMISpJpNRWVv2dd360LdACEuv95NSl1/y8aHZEumptGfxJuSVglD+tYoYViNA9OXMqbum2pdxgaN98TaJeSONe02QcmtdRsWJirKB9q9bBnsxqIci+WJq+8AVIbImvP5KBBeT+1473lR2h75Uh4YxXQM9mjKRcct7GzOpmKJqV0UAp6Mljv8WYJP3Miu/ca8SK0L5JxE3oo5Vz82d3tuqmK5MGWG5Ts8DqkcxctaQlXjJg7X7q9+hckbdTDAJ3WGZEynwvlmtQsnctUqx+0rGmWgg0q+OIG6MzCLasCG9Tf8+YkZEcZUz0Nc10kw8Iq77I70B59oBEUszytxTL1YPjJqR41EJb9TitVt9ZVhl0QA8UVht7f3PLvxqcRN7Tdw2m2kJPtW25IeWc+rA8KZ+8x33wGZjQQHq3OJw9V2anauzu8oaU/Fz0UDMiSQWpNwyAVNYbZiMUIJLCmeIjtoQLK8XI9+b8HjyfL270yeDAL5SBCoBbL0gpwuUaJRUsODjcU3uWc8mIRX2Uq6VzXZDlREAkx3Ik4UKWlnPLZIhChUAFJzqWxgQtAkjV4GkgbZT/P2WL5Brr52WLiEAOYG8zaXF9kcdcjV1XXucAdpldi5ZtuGzEjrDS47KihgWdZdl7cs2SbAgDkvsTgw+PL8uA5NU54NV5QfAO752rIk3Pb312s2aWO4IfYVIssZErSm5B0x7LBXIvNquLLzNmTQuYbOJ90kBkdByWUQC7VrKBtoYIg4+RyxaDEJn7H7CVBO3aUeN2xBgyqGBtiQQiDEBmGcX2Lekte6ndtyo4aV3UzsX9z+fr9/AuIbhQmLfmuI7H/Kx4+xrfUZKcMKEp9ivKfuMG2yr0SSiOo8uNqQ7RKdFVtwS9vePosY5qXw8FbjSDcqOSuGflPgI+2uOpPhYKPB9PxyTothzJiNYYEYVLc0lH7dEjStjL93QJfW2yrTKmYtRVbRKe2n4WMx242ueDiNuY3esZqHrWTIj7IgS1u5eg9j159oBEypGAceAY9Sq33fMpnQ32R9rESkvJnMFKWyRlgQYePh7QW4rnxy8Tbp6UT4H/cpFDVwPaF++wOKCptt64TKX6HSJWwgsGRAeuS2YgAxHkDFIcmM2Scp89bEA4fTxbLeeDn+OK8MsKxFBdv/yFYkJShJOFGoE2PoSrwIezyd7IrF9dkL5otxT5u3k+Eq3jZSlVMGKmmc3MUQJq9XUOYi9MShTBpvbfGntluulDT1qWnBDZLWtUBDF4l8FHKJZry01LWvGBXjkBqvsWYspuOVVBeVxjc/+5b+h7b+nn75ZYhoK6rgUhvEyCERbtzjQCH9L1SRZStAPW22B17Y4VvCuW/eDb3P98LEuqD72wAGdGKoh0pCE4SuMrGJM9kNEyRceA2ky0a1ftp1SWrTEBa8R58YXpYav2lhIWA1ADlQHwrgUj3tXYDqCCkKY7XevDzwxHzMfhR8m7OShp5G0syQpwSIDjxXOdo+kaQMXjTVJuvmxUa2JuUmVK+Bp8nM+JQ6u7QSh7Nwc3FhjZy2gp64CZGTaVC1c9Wf60AMuoXUbbR2BkDh6s/lSgWhkOPGC6rB999fYAhTbYaldyCbJLu6Ldphd5kSPyrAEJv/gzpb8EQLp2MNODgy5maBUss2SkHAH9wOzFrDcsNOZdY+FrYlNKvYJ+JBkNAqMA9rslB7Dn2JFXJ4+zd1T08OHz5Jq1XWrgepaUU9k24EMwF5o50G5LnMp2i9kNKVt9Fo9SbyOmhM8+bHizVmcIBxSXsuCp6nHwwF044xQA5xe4cCLwsV1rbAkHxG/0yS5n3L7oHLliMaujs12FpQSec33Aa0YIHOchZfbEWOM/gxMJSNZYP5kZeX3ZcI2x1Be5bjEzJLXw4ZvrJtyytsKMSFctViD5eXHOwQUHrinixMTvs0Wa37Maw1DBR40V8YUVaVLSBg9ZQMuSaLw/XpiAR+4Oeh9aMT6PBTRo+ZwNkN9lMHsFIBzwkturQNErAT74U7pgAegzZ7lx4TEr8BeYuLEAJTYGMBjbUAP6OYNSTKmAEyuIn93ULBe2WbD7URZF9hUDufMSMwhemjiUkI0p15hd/7IhpxYlpP47BXoeJRCxKqRLhpCXpcTMSB2RUiL7TEqJsHTejkEJj17V+NBe41G1zbqr3C42RnlXD9i4nYmTSuA0ukU+GyZOobrD8fUEdk1U6WuPGgJZuudWMHzMjmh36pGMihs283mT/AUAXFHuWaknw2Mb9D7OiFWfz1EAPv+2XMxGtW4A27go5/aTjoHxrrsXXejJEwDCaCzpmFR16OjqWPJOCxtIP+pzvshUnjUgkeJd/3J82GnoWj/0Yy8hMy5WwGezTFgjtE/rbICxaeb6yfUfQp6og0dRgDi1L2e20mCkccfi7FACiDQuXPITreJNCndWYhKt3URe+JiAS0z43GPNrgUAX3a3kAU0OQTHPr8E3k5+AZYzxZMwQxI3alN296KGxBoDw21TzE5zTZkh2VKrlHIvW8rGDJgWpYdxb6quWlx5vbIjOaA9irS0mRG5xhq4zqmA7fiGudJds1XV3xy0zhlU2liGCkgkGNGWfP47wkZaAeOcDtaSlhXZPXy3nwYiRwCJ/n0kQF0vs1gQyxVLFx4DWiACtMob/+bt2mvO5xbWVR+FQun7beFT9Qny9Vo53a8WC4jcEvy+tz0Dv1DuVwTgy3PexyX1hiDv2/ewASCOGRJaYD2ylX1wbbYqRyyEjveo7aFPXs1b6ettwLjRgO7Z0jg8yethINReL40zvHsqY09zHlcuCpwsxWdEcvLkFhflgcBzmm7PQJFXz+mMHZFisSO3eDrwudmVuWELBZOiA9vn7I29Tgfgz5gd7YGht2mMpIBIYsPrUzOXSJ1Bnv9WUCJrT83Wy3Po877Ii9wqNwOSn//5n8cP//AP45d+6Zfwmc98Bj/5kz+Jb/u2byvrv/u7vxs//uM/3uzzqU99Cj/90z9dfv/Wb/0W/tJf+kv4F//iX8B7j+/4ju/Aj/zIj+DjH//4TW0pE7KodCxpRGAcBNcwDkAX5FdcYNiixJOhcKfRMsowE9RECCOFX2mT2p4zb8WsjI7GRz3eSKspFz3k1L53OXaEgtopQPSMFe7xgdL5rg8QKZeIJVgyc7Dc1dgKUbyP218tcAnSnign5gI6cqC2B1kpqQAgTZafe9jwWQVIrjHhPpAivMYEB4fHXIl+8cApkLPGabkrVk8e4Eu3SYClgAmcr8Apf24x1YxX+RoYkMgJPSJ11sd6r2iBVHqae5f33SK5bzFDwnEiW6LAdY4XYUDysEYCKHGcSYtdtaSwVV8GNvMzJ8GFBhocTN1uVwGJZeVnsYDDrIYBgPJc1WPM37l6bce2n7lgzaRhM1lZNvyRddA6g48g+vHkPXQ6WwaCHMxvuYDIcc0qDGsJP8MyzXHJLObFmBcTro5SM3vnqlvUpD/b2JLjwGTvuNJtqz0XgZJ6TlCts8zw+Wx8OQWfXVLJPXXxdVwI3pV30rnat1YvlnEMyLVygOhSp9DzpfDzzAYYWsf9PXA5ZPCoGrBXuNF2hyQwMXsi2NjBNYv0OU+BrjU4YsvktY1iuGRtId0nbyM1A2Xtn5j2gs/LxYgD1XXS0BeFIs8uf6NU/VLkPEvHbT0y9gpwWr+lnNCOjWwnKMwYJNio7S3Jdxqj6fg6RhIH/uDdeC0A3eN6e3KLF3kR4AmA5Atf+AK+/uu/Ht/zPd+Db//2bze3+dZv/Vb8k3/yT8rvu7u7Zv2f/bN/Fp/5zGfwMz/zM7her/hzf+7P4S/+xb+If/bP/tlNbWE6HsICIAcDwEb4PJBZAKBxZzDAiPycyRYTIKxyeuAq7qoTS1OxfORjBcMSpdstj8WsRy166OEdWTpPrGDmAdM9PpQAdqyUkapR1jMjIgPSrxFIsQaks6sHgCGDEFFTqDIgofZWJfwaIz5/WfH6Wv1vZFwNxU94nLxHyhlTFu9wiZT6cslpQakycu2PfFHCNSM0lsYWVFTQwe2vlvW6raWAaKnns62vHEeyZTBWa5CQS9aWUNy0rhu5aLEiIQsf1jiRGmysGZNGiRbqigQaEmDwslmsg9yOz7HHTr4NW7EXSL63nQYfT4130OeTv3W8SBMP4WpAOlmd0cSHLMGZik05x0DB0bWVmmsUYxkpl9UyvHlKHe1jBoHZsBNF3IZ1reXYBvC9JcD9aP/rbF8WQwLUWBEe/07BVfDhcuxI/i2t2TSVDCzCZZyqrlkjYf60YYMFEJHAsM0UR+OWrJwkWYorRBITUZ/n2lSpT/Ce7iOzbSPp3sHGUl/nLco9UcffE1wBKJp1qzFKroDbkRHtKDsyk2M1Q+hTsw2tFZLvb51jpVsXYHsm6KBvzYzYBTj7625jRJWY+j3fs2MMiFz9tizGrM91ptF3VnSc60d1zheZys2A5NOf/jQ+/elPT7e5u7vDJz/5SXPdf/pP/wk//dM/jX/37/4d/tgf+2MAgH/wD/4B/tSf+lP4O3/n7+Crv/qrD7elDrgJ8K5YC/QLb03SUU0EUprJO9lV1mW9gnIedV4LjHTWEaNxoxdaB5U1xzGsLsyOcEYtrityvzi8ypm0QlrhrpQy122X6rIFIDlUVywu3rfcY0sEJrhA32VLBWiw6wKPoZI1kD7VKVEGKZ6oi+K9kdX/YY0mbUzbsaWsAhOOLXHI7i6oSgfQMxMzi6jsfQtQNdekgIi8znIudkfrjs7n4+J7pGRdYyx9UliQ4qaVLdix1hxpgUdsAAiLVIgZOMh12hWrXRZMK79WVKXCXftPv1u90jqyrs/Szcq278mtVvrRMfeWL6pPrHiQrrCfYkYkQ1JjIGwDiwVMRrW3ilXeu7YAZFY2Y4o5gUbMx5xXr34KiGMwfMv2ViyJXFf/KhDx2Qjjc/9yEDsDFOcqM+JdC0T08DoFHmVsqIkp9LvM7MOWkiq82bJVs4xOXCAUop1yPUux0ksv24FVnkUzG7JN7TES/6DPnGbes1FGZakqxj7Fbh0RaYDjNrZGQ3tO3xO9j8zIJeeHxkgom+1tgyB3yR4rcsTtrHSv1CsC94NmRAByd66eIjMwMEoE8jYS1WMBYOi++CIvsicfSgzJz/3cz+ErvuIr8Lt/9+/Gn/gTfwJ/62/9Lfye3/N7AAC/8Au/gN/1u35XASMA8C3f8i3w3uPf/tt/iz/zZ/5Md7zHx0c8Pj6W35/73OcAKKuDoxfNUvC1jKqy1/VVcbbo6SNiKRIzq6cUCYT6IDJdhbjfny2wJ0/K5XvnkAPXq5vWq+CA7QL/8AUgrlQVfbvqI9Wg9dMdUjjjccuF+LYalP6wJdMa2PZlPwHzssc1FjbkKnx7WGG7yxd53SIQPK5b3Z5cMqi/74UbDPWvtPQpwObbXPe6H+XmI7uGBF3ymenH4yT+536Jsl8qKKPrrAoMV0tm1y0OZL9uiQBdJPcsaaG2gpCBCkS0O0zwlJ6XQYgMTm9ct4TFVcd3aJesWUXnUXyLzAQ2A1dSjlpZrTit0fHktQTvxfK+2noNuBZskgIg7HolAchSGJKa0Uh+12BEAw39XHO7RleXRB8w4A2OnjWfErB4GneCAzYgDpQvKda9uVXB2Qsatr5bLB0zTcwunXybOZBi5XpmJDjXxXWxWE3j9zsK40pN2V3Xa8MCjXNbGQMbdznD4AW0LMUsVmgITNwcwLDsJYXhzyZegQ+1CWDCXgcxQT6JMhZj7z2MqV4bJ3fxDo2LtXbdaq59h5nV13VSyxm0t23Sc/DomH07LCAyAoneo8ybel2NfSEdxyfDVavsl5pnt+gxaiwePXdHZBYnqd3/3kVJujTBR3TOF5nLBw5IvvVbvxXf/u3fjq/92q/Fr/7qr+Kv/tW/ik9/+tP4hV/4BYQQ8Ou//uv4iq/4irYRy4JPfOIT+PVf/3XzmH/7b/9t/NAP/dDuudmiJ+t0aBm5YWnRYGBvntXnHsmhrBvZ2kEDjX280YTEx+WsMvcLZdGSblrBERhx24omk5auiO7ppS31OZxvXYgyQ8IVw9mab1n+9HKWLVVXJDlwysrL65YQXcIpgxF5b2KKRbEoLg85qHPmvyuLoyXfB5szq+EdSuIm69ZKty0JRqzHRYMQoFqupR8397FUXCqAqy5vRxVCrQycFcvBln2ZsrdhSESmrOHz3RTaszeZARF5PRqMHFVyZxb4vWMcUZg0E2Sm8c1g5CTdrhQTwuADaAG1BCblWHkO65SXARAZDj+p7R/viRnxOUHYVTyxviiytysWGrhZjJbFRllgjz9HfyUtsuoLnebXuZp5Sl7jMdW1iu4NrfSlzJ4mtIC8umdBLKvGBsjtxfNX3cQcAo8xYmwr51YXUlISK8Ysutvup3RrfhuRTNutLBnQghQptzAlRxLceO8MBqKCsSrj9ltulPTbBiLa0GAxnDH27edgfAZp0gVzJhKM6Ht7NDGPJfJZfRYuWy/yTsoHDki+8zu/s3z/w3/4D+OP/JE/gj/4B/8gfu7nfg7f/M3f/KRj/uAP/iC+//u/v/z+3Oc+h6/5mq8p8RFtMUAbjFhuWFUpHIMSPq485kgp61gRZdniQXXPtzTGBISWRbBEH0NaVimAnZSbVxmUfPwccPKAv7yGu74B4gp/oU8NRtg9C35BOr+HCxasa8LrK/XfmzXizZVcit6/rLhuCQ9bxLoJ5VkNfgwYpItKcK6krt3YmhgTgo/C0rw1++nBVCp+8v5Ia6IOFD6FqiTK78yqSIXPQWXkEaqMTu0pZdPKRgM6kngWk3BXq6xcTLU/WHFhpkQq6xxEuOd2JBU5oI0NsVgRBiJaubZki9IqFwFRq0DHsoxiAej7fjpZ7bpjfT9quR8pwXVZ23cAupgZCdjYUn8KlFZ7BD7086jBhwYcLPKXZdnXincS1lMGJZx9iWMSyLJKBwqxzSIlZQZ2R6LXWzEfJabJuBcyUQI/r6/OS8nwdr8EBI8mjTnHy/F7Tc9tBSjUT3QfnLOV3RZstEks2G1Tx37FVJ9RyXKyixa/3+x+Kd25rBo41A/0aSqzTm87AK0Gq1K2UQvltnJO0SLnpFmiF6Cfi3U7ZUxniZPkG9DETvIkXts+AiV7bo5lnTWXT55pKwmBJVaiCc12WiDJSmMcAp8vZcCEQtsXl8uYgEhul2RBI2NmRMoMkzymZkzs66vXuWPMUc/k80j7+xJD8i7Kh5729w/8gT+A3/t7fy9+5Vd+Bd/8zd+MT37yk/jN3/zNZpt1XfFbv/Vbw7iTu7u7LjD+FhnFhBxxxbL8LveACa3rafZ+mwokaDtaXnxbi9/wnCKVE8dddhlhMHIOLtfvoGxebrvUTFoxoqT3zZJK2tsKTOAJjBRWJCU8rpT56WGNeH2twdZlIt56ZZwVD84sRI33pdjflmp2qCbmIXgANVBdBs+XfvSArEGh06OeYq7nUvxxa9/L76fkaJAvFv/KJjjQs+RcG6i5N/7yM8RMkoyVsYBI62sugKmR3teKs9DKnRRdbI+UPG8sq25H2tJfziOtYdmHnKyZDpsu3inaa8eOjAPxj6STtSyvVjzKnsjjWH3YBasrRoSVYJlEQoKPnhEZu18BvRVfDiW8mQQhDNjr/q6LaSopTZlVFEadD0KsfrP61FqmY0Z6ZkQDlJypLL+/pWYO97F3HUg7YCyfSnHPEssYrND3OvbFKILYU1Xea9rz+m5rN0c6Rgsmou/vJQuPG3W+oXHqiraPm3krJjSApehMDkC+SKXQazAiRbqg3SIz74JZ7KQUm4Howcg8DmfQPuFKVa95fqEWK2+dd65HWIApFVqs6CI+u3GJ9/kIa7IJhgWwwceey+wLI/IiH5R86IDkv/7X/4r//t//O77qq74KAPAN3/AN+O3f/m380i/9Ev7oH/2jAICf/dmfRYwRf/yP//Gbjj1yDdLreL22Pltgw7JQlGMopc/aT1t5vHeHBlTt7+sdykQwAiPaoh+yNdY7lLS+51AD2P3j+wWMuOtjLRyYYq0hwpm0/IJ0flXiRh7WiDUCjxt9vn9Z8fnLhsc14rMPV8QEvL5uuGyUglZa7aXiyYrE4qnK9zn4so/uY+5nmQXmYbXX6xSxpX+yUs20f4i0LLLyEv47nN8AAQAASURBVOv3E0XxI7qU0106XEFMDaKY1FNvHeOfVmxIz4JIy2iNm7GASOPGFdt0vjXVb9/X3DfBO5xVf1WFLpR7YblnsUsbK9ncp3tyJGhSsyJW+1kYBKyGdZ2Bxh7gGAXDrzE1jAuva9ywfO/aJoE1u0d656pBwAtwIgAJv7O14nq12B+RlKpCXYzIwhWnGF9Su08SfaBjG65bdRFsFGZhRdUyc2mbyZF4oMYYIQCzHDeCJ3fUu1AzwnmH0u+nvA8Pox5jw3ftx/Z3SjY7Qt+TcM9sXbXYJatcszEXMaPIYOS68w48pb8tpVfGflkMCh+TmT6gAmqrj7SXgWZKbnUDaubPfECeQ2U8yWgcssDILHZjzyWyXId4xxgTRu0rN5Aj5zjy7jQIODfiBA+f39PIDGCMgHfwSQa8t88f0IIRK9bP+q1FGxU2JKzxA7RuvMjvKLkZkLz//vv4lV/5lfL7137t1/DLv/zL+MQnPoFPfOIT+KEf+iF8x3d8Bz75yU/iV3/1V/EDP/AD+EN/6A/hU5/6FADg677u6/Ct3/qt+At/4S/gR3/0R3G9XvF93/d9+M7v/M6bMmxJ0cCClTjJhrDS11qc6yAKcLyAbR2dvagMTixrhpX1i84lfJ+ZGRGWlBmrwrL46nLAvtLnbKm9X2pa32V7ALYV/vHzwLYSKNkubU0OADKTFn9eInCJsQSzs5vWZx9WvH/Z8LBueP9CQORNBiiXNeLNZTVjAKSy9+q8mIHWQHXV0IBGsyeW24/lf77lSYEt2TE4+JjvweKz37VHTOQedr94xJRwghczEJrjy+vSFqZRbIgOWNfPo/wts2hZQOSybt216+uWy+izTeurlWsrC1TzbHaPpRPvn16Hrp/07z0wwm3fBHC4pQDfSPaCa+W5tbVeurIV96AMamtGOy8ASVXyLBdALfqStE4nfzs33l6CZMttkBVltuTLBB5a9pRki6HS2+xJCwL71NLyWeW+v19CASK+3AtPBC/22RAL4EmmI4nttKtWTHU/BnxshGADRAtMkgFMBFtijJdA+7xr8LwnM5fE8unaZBUch8djwDWzKHuA7m2ACO8jXbfKcjGHsmGJg9xLGv2O6ZjHDza/DWaSxYHurUyR7llvcO29tGTPLWvGhnbHKmhQHiwVtuQU6J2mFM0EUKICcKzfSBZPgpHNGFuPjLHymXrqmPyRinMffeX0j/p8z1BuBiS/+Iu/iG/6pm8qvzm247u+67vwj/7RP8J/+A//AT/+4z+O3/7t38ZXf/VX40/+yT+Jv/k3/2bjcvUTP/ET+L7v+z588zd/cymM+Pf//t//AC6nWggtRgSolmagn8h5sAPGL6Fe7F21/MsBlWQ8iJf91YBqxTBI4V/O1cGu+Ed74D54OAfcBV9dtK61voiLWxszUg6c3bPCUty0eEK9bjxRcp0Q/iNfaMmKXNaIy7o1CrTsT1buWC5rq4RY18xiWVBH28yOY8ktY6gVG1KOY9QakNmz2OVNx9pIyyJbTflc2t1pjxWxP32j3EnlGoBQmmvf6ix2Vp9pK3qjZAnQb7lrWddgTYL6XVy8K8+VBQ6PiH42etegPpOTdmWTYKS6Z0mF2TdsCBsQdH9ablUsUs+xtrD0oBFQjrFdJutgSOONvG9W/9BvX+7fkXdxvsw36+V59LOqgSD1ue9iHSxFMCYy/qREKc2Lu4tSBXl5ggAdou6QTPELzFMDWxJFX8/kSNrrPZH9Ld0N9WfdzmNDQvSc9pkYuOhdp/RLeZuA6Jk0c/KAGaE5fz+hjJQ9ptcZ3/kKWemW8aq3uJSVNhhgZHqYDHyDdyYooXaAgEhKHTMykyPMyIiFXhQIeRaA5EXeSbkZkHzjN37jNM/0v/pX/2r3GJ/4xCduLoJ4RKyUitINBkCn+EkhMDG2BluWoDKU8OSB1FiWY6puuTwharHAiLSkelf9xL3jit+U4pIKAVbL/6vFwaUId3m/1BYp7llcgT1FuJQoViRwocMTuWot90jn9wC/4HGl1L6PG6X2vW4Jn7+seMyB7J9/JIbkt19fscWE9x+uBYi8vlTrvfyU36Xy8d6Z3Ic+fn8yLfwsvPwklJiYEiAC/7p9rEksUvGueg/67eUEIt0wRimNaV0yAEkFITJIXQKP5lpQlULuT3aBo992BqrW1SV0fSyDsVm5u1907Yuaklb3XQu82vsZE4P9Cvq5/UAOxh+AqZFlTosEH7daioHeuqwVs5q6NzTPJQC8OofCIhU3oeALIGFXrSaGK5Cqyx4vThgouFtrl1Y3FS4SKpmOPRDMMsqco59PXldZkgpKGuup5QpZnqlgsl2WYUH3NWAryM134XIp0ycXVkQxU5xAwBvZ4CJQXFicAxArONRgkC9JsiJ0P2oQO2+XUpvufI99mulqluGBl+s+lvtY33Vfa8OE7vdRIU+LPWnOsaOIW6/pzFCkjXpWkDvPucGNPRCG7dFZrQzD3+hoEpjIsUj2ySzWUwrvr4HI7FJi3j4lgy3xqRSvZHBGSX7oLCtSB4Qto5H1aQFjKXI+X2PCuk4e8ndFvB8HDH2Y53yRqXzoMSRfTGknV1o284u2sn2U937wjo3YEMtqMhtsZAYYC4xIRaa41AjXmuBoUnE5SL0UOlwvwHaBSzmAfQQmHRVA1OwIKykp96UsziddDS7r1kyogG35lt+ZWRq5bR2VmWVsFnB3xKqlpRSYQ6vssYLOfvkAmqw63JaivIug/1Eg4VMpdK3gSZcjAI2lWT4/QH1Grb7p01+OLaOy7ZZlbeSecot7gNxegg0rG5e1L/8eKc5aOS4ubb7GhjArUhNLVIWY32ELiLD0wKQVuViniy59YBhYrFpLo+e2vOcNWLHbw+wUW4mli4YGIvpzZqHnT2kMGLkQepeDeB0OMyOsx/J3jhMD0MXzczdaYEQzI9xNmoHSVdifKvWd72Ps9kC8HE/rvdoK+3fk3BKYIDrh0kzbjBK88H2Qc+gHLZ1r10Gm4sOUIyzNkfnNyuTIhIdzKGxJc1wGId5hm6QA+yDYLAv88vNyNIHIi7yIlmcNSGTaXyvbB0sZ1CdgBLAHE/1ujSzwRZHz1aIklTsGEOwaU7I6+bqM92NFhr67hhXhycE5jhchBfPsItz2CPf4BWJFLm8IkMQVuF6AuOV+2eCWM1wIBYRQ4cN7pOUO6URxI1sidoT/3qwUH1L+tog31624aGnrXtdHvs9exEDk1TlQgPvi8eocGuVZH4PlGmtaYFrH/aeSDURbseGgTVZs9D1mP1wA2LZ2vbQslwxaDEhUak+gpuqVcSGXdb/on1XsUPeDdqORfWtmKXI12DoI5ZoPyUo1MA9gvYp2sXVd+sPzOovZuYhlfJ2l7wdWXnYL2AMUAJogfn2c2XJmldg1Swb7k4WeA6c9luCaAGoZL+JdLcYn+5HfYSmdMoxqAEjg/k0qLi41IFiOa016Tw32DOMMb8fL5P3T/SSBgzY8jJiPDhwrwMHfgXYclYxdBR60klkRma1MJl7g60hwlIACHJju4F2q1uj8fjs17rMHAAMQ7jN9bygZQJu4wqolJN3htJhpVjtLdTTHgZElW8qouj0Q1b1paxPx90X9tj4BdPe1jalsxxPrPR6JNg5aVdw5ExhlACtoUiAhwbpwILrKVha3lumw2qffi6co9t7Z41gZI1TGPDqPXFbfc8ftZCBsOHWWsVm1dVQr6ojMwC/Qjzsv8iJH5VkDkiNyFIwcOtaOO5AeoGt8SP1d3bNoAJXMiD6Gd3VQ8qCJ0zlSvh04aw9w9oC7PBAr8vg+XNzgrq+B6wVpvQCrqMDuAxA3pGWpVdgzK8J/HN/ATMgaU0nNS4p2rhSewYl0LZrJSJm8E9XBR2BEy1bMnsgKtTOtYz67crW1IGodlNIWy60rAZbnfqto1OxZAIEPngQYiHC6X66szor6iCU4MhnydlqR4H2le9bI/YUV7dA8k63yoEUrr9Qf7A5gGAHypwStFnidgVi2umlQYjEbo/4byWi/BswJNukUfKkGzu+z9zVepLhbuqpcOPEOHxHuxsJCphb4Wm6BADrFd2akAdBt24BJdpnxpBBtaaxEWnVh9PNYs4q1xgHeVi/T9ZvkeNrWbqnAhPYT18TPSsoKnMu25kT3ZkN+zo1xnftG1hkagUQNPqj/WjAiP/cUunZ5a7jQ362aPs3+4n7wO2SxgMycbLFlui5qu704FLrOPM/5scfBTHSCGGt/mXWrbgeMYkk674dIz0rrGkbfGTSM2ibbdVQqY5Q6UKKbq/uoApMab+bc7bFLW2yBydsyeMNn9gNgYD5seanU/m7KswYkNIkhm06IhoendHcc0HUKXrhtHVdU5CDAYmfMaCdUua9mRmrtAdekAC3XMhitnSMw4h0FYDJDsuTJ2K0ERsA1RuIKFzfE9QLEiJTZEcfauQ9NAUSEBSmckJYzNnhsiQAHxTyQ5Y2D2GNqXQH4j9kOrNX0wgqltuzzPjw5MjvCx7EUTn0PgmCVgre3kfdIpl4trjXO3p6UDWJgLEOSBiLMiADIBR65IFoLRC5rLEp4TXPcWj+PKtYWEBkxIjP3F2mBbvvAPq+2pMdUYw10tpbqC9+Cr1FK6Pb6WpcgC5TI/tLuKXv9d6Q/b9mfra/RkcXWeXJv8Q7YslV2NPxI6zvQK71sHAD6+jVAZed0jMJRxcAy1GgFip6VhM3392v03M6ePaCCFH7+mvSzynBDy+tx9bpyLUrRlJbv4GucjhOGBj2sy+7grxIcsmuWTFahmdIOjAgQCWhW67gCNwIjQ0BivCszoW1LYMKTjhNCVu5jBSXWdcyOMwMliG3WLa6XUpfxIMWXwcdJZb13DtsmnnPxPHhv3w+dzXbvvjXuZEh9CEGkOTxjq5vlVlDCz39MqRgaAByylFhumreMjy/yIkfkWQMSFgYlMt0dPOgTETE5nPyxgT+mdmDPuKYRadmTrj9NfnNh1avgQ1Zt1u4L2VqCGjuiz8nsyOLp3GcPAiGXN3DX13DrlZiRdUW8PCBdHvJFbQRCMiBxIVC8SDgjne6Ly9bqluKidY0Jl5iwRqov8vq6FXaE+6O6tYROIZTuODx4yToOQK0xcF58yabz6hTgXet2Ja1Ytb9QvvM63Wfcp8wISCAiLa5SZJa26yDzUY0XqdWXH9aNgIlI7fsmB/a/vlR2gFP1XpRSbqX21O4T1jJmmCSYk66BIxBC61pL9Eika48EHdXiWwEK3/vKnG0Ng7bG1F37ByUWSJmDOu7HNvahsfi7/exCMabOE2TNdWtStswD9Gzp5hQQoqzvklmb1a+p1vcKEOlc+/2qxytrPV1/ZQlO4L41tlc1LiwXLH4P+btcJ5kOCTpqe/pz6nuzpVRcLOX7O3u+rXCKWYyOxZDSPi0I4WXWPXoKW6/BiGZYLVBiKZGbAtuWkkm/K2tCy+p7IpkXoGUUL8IdjEFJlf3Mkyw6PkXGjLDrFh8TSNg2mvu5bkl0DDxSY1yqdU3oGemeK+PZtkIyRq5J/Mzy81cANQOAULdNqMBqr19YF3ElGfFxHFPH/Ar0uvNxOmTfM0TWMmDMkrzz8lKp/Z2ULwlAoqVWHiZLigcNHl3mldm7lJ8d+cLJF5iVuwJODIVYumXJ77w9HbMFI1J0hWEPLvRF10TZsihrFjYjlW/ZsTIjbjlRjRHhqlUC2VkBiih/HAthZYjRluSZdX+v6nJ7HIDTGMq+qv1lKC6DkVmyUhKM7MmwHoNwiyl9k2rWLB0jIhWHPbelco6BIq2XzazSzbULMFL75RgYkf3BzYypVbJkthbLjYRlFBNjSadExZ4t0X1xZBK1FDFr/RGGJMaEEEi5OeXYpSgVMAlK+JzGJY+s8G0WLPpuuQBZNZbqNcprq9+teg9S2kuvzwkrcj70+/A28hhBPWPScFM/a5s0EDkCQlhmSv40dnBAXY2SBFhZymR8DjMi8jfQug/T72GThjJ27eoTYOjnXz/7o7gsfVxr/dtYyFn5PnoMzZbw/pxhS7tvtXVLjKxdqg8lUGmWT+aAmTAAKlKOyyA1P9uh9kFKHPaSCnMxNF5MKsQH5zLIchmQAQB931LLEiKgMlnq+rRRh58jK6kFb/ciL/K28qwBCVvUpOIaHLlsRUfWvPvBvjoLjbZscRwFfGPQANAqenoQ077O2s9Zp/YF7NR/0v/c87Xl9L7FVev6BogrBbHn+iIupeqitZzyAQPgPfzdq1r4MFdhb6qxlwB2in34wmXDwxYLQxJTHXjIBa2mRyVXHN8p35c14gwIC75dDwOAGNxqPzd9O3E7smKEJHt1n7UxvhdW8K8EG9LCWbYT6xh8cKrbB3G90i1ri7EJYNexI1qJkP0ggZpkQXJrsMVWaS794eu1j1yzZkBOzi2sXGkrr+wDdknjVM9vLhtkAPsRJmQGJiwXrPKsDADFKK5BHuspClVhRJDgnQe2bGFfY34OI4J6fo8APq3kAmP3LE4qUNwFSwYm3t+2lF9jHXsoPTm69mngOmr/rV1nsZMMNorrlljWBBgbLAiLldZYbmOBij3hNujt+2PX9kgAUpYZbCK3yarVc4vsjSFHjildIIHjLo5aSR2xLTXfvetctyQoYTniwiVjdIJz2SUPgM/MFT8/KXtKAGDDJIAh600HVec0+vCoe10DrLeW/dsC8hxU3bbbWkUpt7rKrFZR8C67rOW5OTlcQVXcsVRm9pTHkVO+L1eRuU2O7yfkWCKXgIDyjJb7bTxj0kAUtQ/1uygvDMk7Kc8akIxEB4pbckIeXHwuIpTIcuITDZZUhbVaWQDbPWik2LVsSFWkdbscxjnIfQYjHMzu8vZ0bBBDEjdRWyQisb+C9EPwHs6HyoiEU82uleuQlNoRZZJk96PYuIbUtrElOA96YpB6XGOm9yPOC4GU8+KbLEbafQsYT0jSPUuDkVPZx1aaam2C1uIqQYm2bgIwLZz8HOjK61wYUgMOXUmdC0cCaJIAWAwJ9xPLKKPRqN9GYMSKc5J9wOv0/ZZti6mNG9HKkVUn5dZUkNZ1jlwGjlhwtUWv3e5YmyLNz9VCmwD2Xy9GxpgtnZ4ZiGPuOUfrhKwiXTQvYwB/jeNnqohydZQiwYhmfFvXqadZxiUYkW2Q7+YRECeZGil9nEa7vNs+2cDq2i9q9qFj9+NEs1wADXkrJBgBbgMQLLM02tYxtRVbvgMMSj4KsRhvKXuMySjFcEyonazcuOpG++0bPSN722hpAuRTdRc7ZcsmGTTq7xJ7BpTMcLLJ3rWsyOhRke+F5cZOc58vAPrkfTG6InIhzHwMnxBQ3fWOZuR4Sfn7Im8jzx6QNClem0kzL5tMcDIokVkVspLUoHh+SelY9TxWAGZd156nydaULarcrgp7qshUvxzMHpyy6kr3LM6WBcCdzkCK8LlfknN1/XJPDMnpHun0Cmk5Y4WnAPYIXGIuhLiS5f9hJXbkqiZ62Z/kukFV4aXSR+0MZaK00tByPQwtwffuDLO0vcNkA4qdkiJrBRTrUBwX0qwKRGvZPFK4ULImI6VBfrdo8AJSuiu9Xfb0D9kfAEQRR9k/1TVNX59ludXyVHcPyxVF9pv+roHI3dLOrLSeAXTrygKgBHGX020AEPPzFOG9Q3QQioBrrKI3X58JTOr9sFgq2qbvjyNSjCY74FUCCX1dfcxXfw3yXFpuLW43cpnqf9ftNXDYk1Fz5P4mU2MAFDl21Hbb7MbI3fFtXGKscWUE0KVUZrE3Glluuvr3rcD16Jig2RISxyvpQ7AiQT2fUpp7qPp4dK+5DVosVk++S4Xd6RgSYi+9c6SLCC8KoPaL1TvcCmZJ6Ae5hREYont39fTMnYx35LpFRMGanOCK0Y2LtPpISS0AdIkt9PO0fkQA90W+9ORZA5Jq4W4nyRo3IAeG3kXjunE101RcIYCYrQqxUL7a11qCkN5la9ZWnqlrNh7dpr7uCODBygIt88r4k5wHnIPLjActjHVdOCM5Vyqwp+UO6fwetgQ8rJSyUtYcYRDy+kq1R9g9RF4/9yldc/7McSfn4HFe2jTAjWuWqzUbZL9q0KN9h72raXtL6lVDWbJ80VlqkUK0A3KqFub63VYedMD+iBngPysAFRhbR0dMAH9K0HdkEtdsyExY+eXv9NkG42pmpLqotYzQ7HqAfQVk1C+j4+2BEYDSMlsV2+neuRKfwvsW15ONLJr0fPri600TtUpXm0nKY/EZrVhK0F5g9EjB1cL9IGOMuD38zljvlXynZgCf2iyUHYdafDC3S76PfSYloKkZofrQCi7XrmxbqqB5FUwmUNm9PbHuj3ZLaq+9fu/aKEAIr9fvMn/vjRZvUSxiIEfASOtGa4MR7RqpxyL9rOnvbytjgFGfmchsojsGNDSonG07ko6B9v0YEZMC+ZtK8LBBvXOtHsPH7c7tHQIcfB6zpPsmJ/WRoL0YZL0r7413Pl8zrfMR4Po93BeeY2AEQJHP1Xl5BoDkxWXrnZRnDUhYZmDEeoGlPl+DwFpWlwaAVI5Zl9lMjCUMZGrwHeoxuQE5RSg3jNJ/ulJZWIo5HPKLFRakGOHS0mxb0vpmIFIyaiWeBFEGI1LAa/aoq1B89ETuXfudBllHZU4SgBx8oxULK00v95UUDmzneAhaVgf2tq5IC0JO4qZI622x3HYKd1UU6nfaTtYOkUoDALG8gi+ZQUqyJrxM7levdZz3Xm6zp0BwnJHl0uCdeB5TvX/thNxak2Usgu6XERuigYh1Xdb1WqzQqF/2lO5bAZBUzrTCVo6VQQklsqhxHhsSohfjjDi1VoQAiHFgXyy3weZY+dku93sy3/HzQd/zMsE4NqBKgRF+/2TxQQ222ALsXXZ5KoMp9ZX3NA5qYCJTtOoxsusPwxWrvMsMgtIYwFlxZpZYlu64c9NsxVa32wbpkhkZrf9iiM4+N2NFuri3Zu415lDX7m+JZt6Oip5zZqBixmzpbWi78Xm5ICS3PTiaGOUYEZzDdavMKrt0sZtgTBtdd9Zhosvu5K71wub3awRMeJvShz7BwxXmhDAFuWxl72rA87vbu3M1ykhJ2kHHDmw9fZEXeUt59oBETspaIZ1lsJL7MqXrffW5hHfwkBPT3P3LCqLkDEScehBAZl1q8ajgHGKZ6F1O+asGACERND4EBiEAHM40hQcgLZkhYTSeY0YQFsTlHtdICvJl2wozsiVy03qzbnh9jfjswxUxAW+uW5NjvxVVU0C7bQR7ItGTg9VvLMygsCJ0txA7whWyiWXxRWnS950nGY71AFId+FmBKYpMy4zIIHWdwtZSwPlz5rp0qwuTxYLUZbWauHR/G2cgyjq1ACVSumQOUNbnVAPYAXRFMfn69qz01vVqEKKXyd9HWJKnuoNd1g3Be3K7y/2q3QwB4Jotnlel4AO2MlvX1e+WBXk2rgAVCLbHdMXf20dXAla7uLdJHJZkRmQVdCr2OE5VrkVaZJkdoWcodSCM/eibFK2ZTbk2faGeA+kClXqGpL7DrXuhxVCMxHI7utW6r0HIaMywChvugREdiG4B6iPXdmS9Bht8fguk7AERyRoAfZ9az78V48ai59rZeqBnPmZslpUYwvpd2qnHaGWAkyxJyKyIlYJd9pNMlc0Ggu5dFMCEW1DsE8Uw50q9kgQ2MCU8uFrMd/P5HVkj4B2xJYIdKf2SwQkZtWzW5Lq8++gkOfdFKIz4tHnpd5I8e0ACKFpYTrwTMLJ/vKR+2wzJKDXgSFghtNiSEtB2oMFlM+eRXITjl0t+5sKHaTkDfilFDtdI405EtvhnJuBxjU2a300o67ovpGgXEEAOhiTVEtpaopp1xjH5OycEYIXJi98StMiBme9/TK7JrrIlW9GzGAAJRi4G4zFTKCxmhOVWpdlSjveq2m8xNfdBpsnUE33vP92m+ZWWQVu5ak2HH6RVdwZGLKZlxsrwp3bbshS56TV4Ae62+rxuXHvAo3tvWClnyylvf+RZsN6Rykpkw4qIvRoBEfreghFmRJrLc04xwU4d1+4rZpx7H3+xrXj+TPZ4sq/MQtakolZgJKb63FoubaNnhNeV6+P07ymZfaTbpq9zdC7rGW6XxenzZzF6vHx2bXvHq799p2QDMAPgJRipy46DkSOupHbyjWMMimY/+Dg6LpCX792b0TmaPmzYDFcm+QaYN/owZQyLLhVgwsV5ryCm5ARfkmfI9OIjsCybIwhLcLzJKXDFevod5Xvp0LAjhXXKzxcdW15v6saeF3mRW+VZAxKZyQpoKU3Kr93vIwceWXG3URbE5ygVZTmWoEelcjCTLSVsG1nr2QpyFZOxc65k30qJBqOYAJdcKaAGUHyI8wCSJxaEhdmTnEnrEmnAfHOtrMhlI2X6C5cNW0p4/7Li9ZVAyesrLZOWxeoWkjqlRNPwVj73a0QXh7Kl3jplpfZlEMLMCLMh3jncL74DIk1fC1C1pdYdjdkAqcCwmxYHasvUvW8um1jWAhBLaS7flQsB953lhtVZHH2b9leyI811qmePEzG0z2JVhHkyHKUr5X7jeyP7BdA1RVrgJfveCtKdWR2n/WgdS/WtVgxGrljjTEVb6WPeXxaSkz7z+hz6u15muS16R9ZIet57cDUSL8akmOqxT75dL49juc3odLwF2Ls67mk/9plIq3OTMlu8C/yulWsprAofP485huFHB6vLOBF+j5nlZB95mXp6FDQu5ci9Hr23e8+2ZFL1shG41+fRho0jrKAeV/i7ZjuetJ1z3TPdsAFiGX3P26n3YiQNCCksBsq+Y/cqe2wDaqZECVQttlsfaySjZ6YZx3M/lTk19n2i2RPvSEfwDri6hFPMabsDECKzmORxwboEUmVJSr9VNaWAkpgI1DRpgkG6kOf5JAOjNrlP7scMVupJiKl9fAEkL/JEedaAxHnlV6mkzfySB6dYf+tATCk6mJOWoTkGSe+O0Bxnx4pGSjq5HXEbU3JILiElV1xtEjI4STQIkBLiiSkJS22U9yWQfUvkgnOJZL143GjQfdhSZkMSPvt4xZaA9x/XEsz+mOuQSKsbS4DwG83XJycYaQ2j6wVCcLjGLdO8bf/MsmlxADsPyBKM0EBM2zB408L9FWN172BLqs5ixG3h666uV7FkkpIuSvKT9pVAN99H9UxFL4Lv0efxtz45O9m51CEJzSTXHJ/QK1nKmYnLDxA/h3wPRsye9Lnn39LCzNe6B8SOMCQjIDIFIyNGR2TVkW4MlmuLdnHR2zYHgW+3HyikI8ux/t4rcvlsEujzw3ygD4E2ZopllPEHaBVBXsbKjAxe92rcG4m+N1L0GCwvqW7fg2fvUNxcaZ04hlA0ZaZEaUTZUmoYzVGWO0v4Hl3Q3zdL4dw7nqXojkC2Bvd77TwCRnR7Z6CZvreB7LqmTwNIBBiRYzdvq5fNUo9bUg0tedtciNRDjgFObZu6YwBjxswCq0eZEdkvQD8+3C11/DgvOeVuqrWAomI5pHGC405iimU+pD4k16icB5KAhE/wB5+Fpt2uxtDCt2mCSRJOIceTqP6uDIpYhsrKvtPyEtT+TsqzBiQj6QLXohqUWNkyBhsrW9PehMwyG1ilewW3Y2+f0VhIYATlAU+A8M1YAOdL0Po1EhuyRRkvQiwIpfUlN633L/Q7JmJSuvZny6mcYNrMO7brB8XLoAMqeyIVNj4uu5F4w2Irbzl/pRoqqdZTiW1BOckExETbSWsqAxEGIxqIXDJwA9CAEAYiOoA3xYTkHZx3uCB2jElz7XkCa5kRG4gAKMCDTuyqHyCzIso9QINGLd5hCrRvEdslZQ5kmuUGCNFgz/maPEKeYzRB8/FZ2ToCTOQxNZi0wPsqzmG2ZeDWIa6qbmpchj7eERCi1zXPoDoHx7+RW0c7ruogb2acOUEGA35d60d/t37Tsv56dZC/BM8yKx7HOUlFs1U+bUav9IOhnI8UTqsAZ3Md4hz78SLzdunzrJPne7SPHD84BfbZYF/5z4qhsliRUtxSKM5l/FYgRLs+zxLD6CxtWzbI0cq8kVCKe6aYN2nBiAar/B0Yg8NZv26RnhX9nsusfnx8yZhELhtWQJx47xIaYIIcee6L7sCdIDKDcmyqJ28KPR1LD4van71wEopync4BPrtYOupr8OkBIIo23+wk/yIvQvIlAUhGDLdF0Ur3LH7hLDeFWj24V7Kjikeg7YTyIF7I7qU+IAkVdKQ8qhQQIrZJztfYkbyM2B7gskWsEYLxQOOS9fnLisc14rOPK65bwhcua2NJBNBVUudr4EmHXam4CrrMwlPYp9WymqLZphzbowE7JdCWA9fzOZkhke3iIxEIq8F6VwHCpLsWf/J2PEkx+HhzWQvoeBTAZIuJKpGnhG2NDfhIzHCpCYwnHecdXKQ+S94hegeplUogEjy5ab06h44ZseIfihTF0VM8QwAkKKn++YbytONqOHIX0e2wXGNGAER+b5YZTJPuVxbnHVJMBZTAeGdH4MTKbmQBjuDb6+77xqvfAhgJRU6nSZVKnqx3JBWTfJVlnRbLV1+7w1jW6CMFCct7vNG6CNVfytBj1QSh9Wp7/Y5MHr0++Fhn0WoVTX6ORrWB+Bjl+MbJ6/3bJgym75b1+4/duLTie7Rde6zIiAGxnr23ASKjBAkShDAA4Wft5NVz2cwt+brFZRcg4kUVcTF/U0KEyprIausczxXF88NZExkcyEK21jNyDLTy96iAH/Upv/8MAHWfys9r5L5JtZ+za1cM1b2TwAqwBYDrmchEOaMso1uU72sydacSQ+sdECG8N1xhqCoAyX0fsucEgPtnENSei7x99Od8kal8SQASLRYbUvzhUz8ZckDlLT7Se8obixVPAbQDspXXn8UphaIchwGLWrZFonI5cJutlFtmAK4x4oED2GMqDAIr3kDrWtBdjxMTk2+z8kj3NpY6uLYKrO6/6n9cj19+D/qHJ+YEVlJY2YG67lSKO3LGqJiq1UzS96MsWc2fdBthZXkARiyJhkKhJ38dsH7UEsp9Uq1sdQKjKuNp+KyVe+ePWWmPujOMQAl/10AEyABvB4hYMiqUeYsrg6XwNW5bCpTRsiiW+Y5xkVZti5UBUNzsaF0LSmR2NOtSbKDRMmEzJfCIjFyyOgXxAAPSHmN+Lis1qwVGZoC4ObZSNPU2x5/tnCtV7Ts77+ice6JBsgTORyqtW/VCNLAagZHQPE8VjAA4DEas+lCj2EzvhLGKr025SfMinl8qOzJ4nmL/HLZjfezHoskzZIl+12lMqM/H4SrmI+Z0q9XdOXUwP4OUYpzcmOHVfkDpPyuGVr6vMiaH3bhiTKWf+RIkg3LU0PoiL7InzxqQpNjGhLDo2BDtRiDHBe8qo7ElSj3JFgErs4d2/2LRtVD0+rJd0INvHaSLW1IeyIMnpcTDld9y8N4S2U23RApxBLllpQQ8bLHEj7y51niRh5Xcs96/EDPy2TdXwQCQeYmtOzzZbVmbpfYQO3G3tGyFTEko70PMfUppBMlv9mFtzTJca8S7tvAhZ9DSEiNwBaf7rPekBK5v5HpGYEvWVEnFLU2msJXB6pza982FPl9favpftr6ysrytsQMis6xrPv9z6prkxH9ePN4rrIgvzAhvZykfUjkpn8ViVZXZZrJMxwC4jBGCb91FuE06A5dk2WY1WDTI4/47CkS4fg9/siKk3VNuAXP1msYK5cj6XRWRTXxvXTm00mIFC/Nv6WveMia2+yMrg0CrJOwBETOFr0A+7fingGPjijXapm9rOZ4yGMljSgCizzmLBRgxI/rZe5sA93qfYvM8SCX/iOgAdoud4zbq82iQPGJG7HYD5yXkT2+CkJIoQaZTF4wIUD0Jbi2oKV2hR+OQjPssaWlR53UOrub00t65UkfDcvnj+z9K5T7LnjjqV2D2bFRmVd9XfV9mCRTOzKw4V4LaT/kYq/d4XGP2HqDjS48Pee+0yP6kfq7XWo1ZaEBhEDgnNrpIwpL1qOtzCCJ5iSF5J+VZAxKWkQWuqTEhwIjMzGNb59iHPgGxDpxy7qgxIO2+Ol2t3NbaRg7MMlUxASUCI8wuOhg+oahgpCp3FMx+zel9HzIb8pAH4sc14iEr2K8vW0lp205qrXVHpvb1jn2FuRZISxXLvjp5D8SY3eFyCsOsSFS/475AWxATmWZe+L6U7+grNceYCiijYNcaN8JuXL0CU8GHVKylGwjQxolIpfnWFNBaLMVUT1wj6UBJASGATA+5xdSlZB62R1jLniIzECK/77ln6dgQYAxGStsNIDLqQ8lacLvmbIhhaBiAGFoeG9bksChLKc/zxeVuoISW3d3TwEg5pWrvzOij19M27fGOPEsjNkSuG2VI0mBYxgBodmLkUqhFv1f8rIwApriS7lgapIyyac2ePbl8BkZGDCy3Y48RsdgQ7Z4FVBAsx21Wgqvbs2tctWZFi+U1FqU40BAWI49HqbgTkVS3LUuatL6WMUSBVSvpwAcjbTyaJXosAghYB1YINoEPEh2TA98hXKW4qCKfSz56M9c4KRKUcHHT5jIa/4za5tNzACQv8k7Kswck1iRJy+syy1oiXYJirBZlmfsbyC+1yq+9VzdDMytmsKmwDvH6xRP4OGVrEk0WbGVqB+6Ur6sAkUSfjxuBrkukYoccM3KNxJQ8rBS8/uayNbESckDWE8R58bVIoaeYjvvFNxmv+JosJVZa2U/B425p17NVu4AdL7IPxVTysCOiFKNjkYoRpxV+XGMJdJUxJAAVe+RrrewHMUPMgjBbwssaJVpcW5LP3mTi4oHceUrp7D0FtnvvVEBpr0RLhYhlltHpCBswc9uSx4tbdhPyFQBa7dub5C3r9IgVAXpmhPuLRYIQoL5TGsBZ361rOOLywu0eHcNSEtv1W2GTOpcu0c4ZY8JuGB9knv++Vkr/HB9hQWi7+blGALdlPnpWRNeLkO5Z2qqtFUkNRPTv0bZHZcRMSKnPBAc1++ZzLHPAckR0xiz5POmgdgA5Nk+5VykQctQ1S/5mkFKBTp+mnR/rlOo7mUQ2Qq42fs1K+VV0EVcbv8K+h7ZbbsuevU2Wre584v3WoFWyJvTZ3qOL+M6fGjTGlOM2oyveBfwe8RztHaUK9t4hCB0m7o394j2tY40Y+5h1SfZxrjc8ny/yIlKeNSCRoMNarrO88NiiAz3lPuQnScovU8A9iKj7WWBjJJJBkEHZDjQY8/40+FRrk0NlD1KqwdNcx4OD17eUSiatN1eZSYsACf/+wmXF5x9q0PZoopbWMh7k7hafP0NTlFD2xzBzh6Orfe8UGuWDJ7s6iSnQETkupld+pFJbKzO3gITrZzAbwiBEZsu6JXYkRSOGxJi4tBLtnENYPFzuS+5fCUq0pd7yU+dnRFppuR/LfTMpejv+QAtbxnhb70gRjn3ytaat1qRuFZOU/chiZc3S361MUTOXlL1lbyNaURkDESvFcA9s2Gp9QXXRuJPBoUYGNcl4yQx+M5mRY6NxtKyXIDLJ7faVttHzaJ07KlZEAhGgB74Amvg3S9mUooHLUeXzCPjQol11Ztv1y8JN55odU44tFhvSsRquHZd52aiaeHB1354hqd8ZiDhnj0UOlPKeJaactAIOPmZvh4LPaUDbLIsjdKxRHZ8sVz6eF4DezfRW0c+Jdf/bcSgay3i7Oi5oMBm8w2VzOAd6909RAJKYiuszNlntnZmTue6ijQd6bAnO4QRbLs8gqD05/0Wo1P7u98sXW541IOEYBSmWP+S45oJ0zcrL8rpa0VruwCvddGJn0S/5aGDOh4R3daDmjF8OmWHQbQddfwIDL5q4r5FiSIgZyC5KsRYBlECgDcRtFVxplWGLFme8KgyOAFi1T6vltGTyaAY2oEsZaPQVNXEOQHrFpaYABchVjZXitoZImzXrEBiJvRIthTM9yd9Ate5rZkRbK5+iINtKzPHjjOJItHIIkHV6eJwBoNW/JRiRYvWb/G4xISxHQMeonzUz8rYgZSSaWWnYkcgMWK5H1CxT+x0EHYCt7M3GrFGKZ3mvngpGjm7H7285twAjchsLYHyYcgSIyHtmfY6s4Vre5hm09p3FN5xF7RnpggX0qXvlMp6faD3KMYD6rsoaNvK9lXMei+ZKHBzSgOngY05qRwJAM/doICu/741derk0Lujv1jKd+puPcQTsyHHBas8WE86LpxTy3oHGD4o9u26xZHH0juI9KEsZxOBQxxNrLtgzsGo32qP7vciLjORZAxLOEmWJjB2x1tXv+Vhot7Vqa5S0thHdS91muann0hQ1u1c6EZjqHA05wUuXLQ5qJzDicltiDlJOieJDmBWRNUauWyppfR+2iDfXDddcDPHNdWusQVq5Oy8er84Lgnf4+P2Ck/d47+Tx6kSMyH2oblq6IBtn7pBxHNJlrvqg1pSBpc9cexx5byT4YODBQeqsKEtXDiulo44PadmScbpHbdFndiQl+i6zjUhlWroZhUCsSFh8UQIkKyJdJkofNM9tVValmAp3USx4XbMLKXwx5UDpvoiYPL/sc74vchKXSQC4P3lf2adbrAkERkHrs5iQGXibgRD5OasXcUQJPGI9PyIaZLSfXEAt0HbC0niLdb43hIz3sww4IzesLkZkcM2z81nnksaEGTNyUSwI0GZKquvm2up+7Zmx7G2nFVMLjFjP8hFwvHf/R+tHY4N0o7IYEs2GjIPT22Uj9yzJikgQopsdU13vuQBgytehsmVJpprH6SsbkPIzJNlxix2xDFIsI+OKZVjQogGKdU9YRgU3mUFpn6fq8vmY40u4eC6QXayz693JE9PNjMkWHE487+aCh3zvRgYq/T6PsqOx3D2HGJK9qtof1jlfZCrPGpBoeUoArs6TX1ekbJhwYEs9VzGl9fsTr2UZ0kAEQAEc7LZVt+0liQm7pq9lZREljW/NM17djUb9oxU8TeXX+h++KUyoY2pkGkFZGE0rLqzEezd3I9KF0NgVi77TumvUrho6y04qjIjcbsSK8DZl21SV56JI3/CcOZcZkgIy7YlHyp7yOYp50ArHSLjPrfM0k7Hy3ef11qRtgTl9vFlhw9G1NM/jEwAJMM9gc1Tk9keV19n2lhVdbv9BsDWsLNL3g209wIK8TfIG/U4P2xH1737bD5oZuQWYvM055DNqAeUjgPkp4FQaKTpXLMWQWAY5maqXj90rq2MwwlLBht1mnheOSmVD6n4x9eMWizWG6W2s33qdxZZY20mxAIpkUvj3jHEbufEigxO5jXcJQDU8+UhsiXcwk4XMxAIiFoC5JZX4i7yIlGcNSB63iCBmrj0rn3T3MZcrn8mQgDXX0OCc39G5kgscETW9qqKovXMlSJ0HdhknIkEJZ9IqAYH5GHJ7bq8MYmeGhJRrKobIcRMl5W1MeNxisRRdGnbE45yfAPZV5bSz58XjvVPAXa4Wfr/4ElxPwf9AZDCQGBzkz40BUXtPZLC/rgSr76FUWDhYnVkRmbJXul3VeIWtWTeyqM6U61HAdYwJzlEsD7tpWRmgpItWWHwBIyOlWFL8PUD0TVyBpaDPmBFJy5fz5a9caKsuby3UtE3PKjHrZLm9yX7k7yzcl87oM6AqQiMgMvo+sjDq79bvkTxFOf2wFVot0vptKZEza6ZVL2Q0Vh6VWxSckchCdjKAHdh3D7RkBDS0QjmzaEs5es76vVqugfoO83YzhpO2qcd9SkKDjjErBnI7RoT34edoVECzD2Kn487iRfbACFDn5ZTrXqSEptZUYeGz8W0TRrky/+WaW8zW6rTulmHqVrFAyew54+1GooHJ/NycWrymFN/UmLj5BCDkQH8GJxT9773jr/lmKOB04Dl7cc16kQ9anjUgibFa4oEeYIz24W2t7C5FVN5/xJSzU1Decx5jtkQDtaQ7S8yFpwFYViDnbQo74mqcCG2vFUw015YSnTMVViRlt60KRKTL1DWDER6c9YAnJ8u7hRTfj50XBI8KRoIvtUZKPzauPLTMAiJtYBxdR5vFw7aGWa4c1636BOsgdVaQgRaIzCqGz6xmVhpaqZxJUKJFghHJjFjKdHPNE0ub3K9jA5wzgQj1b3s8y0WrU0wHbnCyj+TEPkuhemSi3wMjM2CyTNbJPpoJvwOzmhDyem5hS0brR+zIU2TPzUJXYZ9luhoZbbrjG022wMiR1NFHlcGnKI3atcY6jgVGjtwTbeXe4rhIIT+Xeyl2AaX0u/YYlsyyP1rbleMNgAgvk8/VETCiY7wYjLAcASMsOoaEwQgz/fU7zTuFPc/z4XWr49ajYMr1OP9UMCLbNWI95DYjsVgQnVp69imBiTwXu3Bdc8p9egddnreVPgNX9JijMnJRfzbyUofknZRnDUhY8ebvZi7tPDDcaunjl74O8uKljpSKFrloIdct0cLuWR41YH0ERADhWsKMijpeygNxC0ZIgdyMwZov2SzQqJQ4LsZ3vwS8dyILHhc/lH0YgYbdaGMMbCByi1ggsVkmwIi0brG1HkAXQwLYSvKIKTnadl3gEKjPGzMjh655oAAxOyKVGbkO2LdSWfdeu8OMKmvrvrFYEHkNepkls+B/eV1HZFhROvcby2H3lpzVqL2GcTyCBU5mv/vzDazkxveO5XHtMu/az7q8f072QMKtYGTktqN/j5hRDfy4GGTxa/faZeXtZfGVjdTHfsp55u9wD0bOmTWVtT6K0YrfCcFe0O+9Vtgb8KXsgcjZWEJGp6elnD76SnM7GYxEwY5QG4gdKUavxHGKkiHpjVZ6fGf5oFz+rHv/QRgabjmnTIoht9liKsZVgFy22I0LyF4XMeUkPgDkeze41918sbXv+ChBxou8yJ48a0ByjQkhW0bod1UeykSsiY9m4q6WpZE1lDbml9CXYkM+VfozOFdSEXpUv1myMlXGg92yAKbLUbNpObY4tdNKHYxJNUqoKX6vJag9Ni5azBxRLIkAL2LC5QmSA6o/dg64C5TS971TgHcQblp1sOGUyMzExFRdqoC5ctJY+8hXrYklacBN1IHsKJMMF3QEUOqosCUMsF21RvdWAxErjS9/1+BWXqsMyrYqhlvnLn2htrOYAa3McB+OmBEtkvXga9X9oNvXp1K1WaiZlVG+V6X/xHqzsvhACZfrFtEXdfvene1ojRGgVVBqv2iAUZmUt1GQ7Xs+KVgn7rOs6yNr90j/fe3rP5M9ADKSkTJrdXlMFQh5fuHlSOdRlhEIoWeW97FYjKeKvm8zhmQ4JxjHBGphO+uZ5Gf1nBODyIrbXEtC3keeH4DbXOFGrsvmtpPVZd8D8ZIsPB452OzIrA0aiAD1nWRXLFnYtsx5W3XP4m16o5Vdc2Qmt7zfmoWTy6zjfliix+I6jgHwxI7UAqPkuuUTIDOIavetdl0+bmozkLaZGZ8BInlhSN5JedaABOiDIyWzcTxFphuCEnlMeT7vyNJXgEgJeG/PGQHobPKlMjmDFF8nIAcAqapszvlmeChWo0QDOLMlxVoUW3bEmpCkvz1b6rguyin/6QrpOvMVn0OeqzSQr9NXS6ycZPfG+F4JqIyJnEyGDMcEjOhle6yIFXxtxoz4qjho5dr6PpJbFGiguhVSwb1+vWRDdLYsWm9/WrUarMrXR62MjZveALTOQIi1rGUSemV+tK8lWxy72/D6eqzq4jVTYo+wI/VaekZH78/pWY8mLxhei+m2N/9d25A/DTAy6+q6bt+FK5TxGOABdjPGZ50t61ZhlqQ7v3fdPTzKnizGM6zBpbyP9XcdO7iquQwiP3Kv+f3mwnVbIgMZ0HsISJfZ9hiDY8daPBiYewYA/UyYkAooOXq7eDOOG5Ftb923qgFLXpscw8q+Nz4rtxodjrz7H6Y85bzU5CTmepRYUcmWaEPciAl5YUhe5KnyrAHJ4xrhhJLE40bcclE3oaC1g3yL9H0EYsoTZd7HBCbixSwuW4lfWBqdY5SgqBr+nEsIoMnHuTxxZUtmYBPEtrYnFCg+CYW8Mh406NYg9lgsRg8rFUN8VIHsgAAinooUBudKAPspL9PXXTNptTEqMVVmigavul9IrX9y5N9COQm5qFupCl7cs+p52frF1eXpO/XVkVoi8n5y//G1SLHS0VoySlFrud5Y0iujA5cOoWQ394Pbp1P6GoULLSZEfp/H2PQARFsZZyyUvMYtjl0ARgyRtSx4V4KCz4vHeQnlu3bhkvvviQatZ7Xc7ivbGrqXdnbkTmaBKanENrFovI2rLpVs5ABapUKLdrGU8SPDVL4KiFgg5FiQazbmSICax1/vatzYyfs8HicI+rmREWjYEwtMW6yJdQ4p+tnUqVc5GcWdYEa8c7hfaur0JVQQUlgTUWzQcrnrrsewTut0zptvtzllQDFysW27kjV9vv+iMCw3SxTsRKQ+EQQLvOvjQrTEhCaAHRBZFJs5h1220BjgZNyhfh81oJQuezPZonaNOv6M7RlX3ga0aGa1frdjlXTbkOuVxC1li2k24rKrVgEdqR9HdmrAPG6TCrrvirwwJO+kPGtAwko60DIlpNjmhSI9La9r30/XuAsU8WNQUoMI1YAf24GfsoRwdXWiP/hd57iS4FCAiGNA4vsqonIMiKm6b3FwuQzwi3mwZmpbWxV5kDp5X1wGliAmxWyZ01mytjLooytMyINcL67ch9BNrnXy9Io5aVy1DAXYUozLvoZVTDIho+rqOpuWFBkbopfPskKNxFK0n6xM50nEDFY2gUZvQRwBEb2PZqlmYrEH1m+57V6fmUHBBpgDpNLcH1Mr6gyM+TzWu3/cWmpPPEcVlO7ZEHFmzb5iLLMU1hEokcqqVOaGV7PzGB51K4qxPqe0aZvcQhZMZQMFgGE8CbDP0M3GhSNigR0LRMrlrCTqZ5QBh3coLlpcc4oBiveuqaoNaHc4nntqe8SUg5gRgmejC3dpsrIapmxEs/ulGOBSfQZi/uHLmEPH0c8f36OCV9L4OeLT60dQppO3UsmXuagwJe18p4GJlqMMmb73bwtKbpGjx5DP4RSMpL4YYlTz85ZSUThqbAkKW2cxIPIZWl8okhd5ojxrQCLFS2tSsRjWQMFT8MWCqHPzX7OVQAbJx5gQmS2xrEesgEdWplsLVBAZRZwMWMztXLLSj7hWIJJi/vNwYUECg46aXYsybKUyeBcgEtmXtrIKOnsY9wsXTuIaI0EFT3Lw+rS/vWuUBq9SdMRG0Whlpvww0yV9hXsQYrfuFoXjKBsC9GDEihM5D9LxatHKjHaf4+/BSF2iwcPI2mYpYdoqbLNHcyAyOrYlWnEc9YFeZgE1nS5VMiOcjKH0vxFXY7u79EUhTyDXNwA45bgxNjDI6w9Gn47A16hvtGhWpHx31cWHrgWFMdGHiVk5pB/NR5GaIEL9lmA8f5XHn2VhOlLrZEsCsJRGZSMQP+oxZeOEy4G3GVQN4kmAsVLZnNt4Zq14IUtm7zLfM/3uB+/x6hwKY1LG2wxC7pdQDEAcp8e1nhicALUyut2trvTgSVxHzP1WyAoBUGI2MrUPBVuD6m/ujhkr07lupQREh4jUpsX3xJQ40Ln1IeUrmFDvRa2jVee4cu6YpnOIHntCLhLI8W9adIKDmVjvt97nFmbEer5mRVx5WXttY2YkqLGwgI6Y4IsRxlV9pjxt+f0Tc/xoLJESU03//yIvcqs8a0CilW2dLlHm5SfFW4AU8aKfWOkQgEQW4fMlILid0CXdWa38VVmRVW9dnsCLxdMB2C4ERuIKxy93ikgewLYqUELKIRsvYsoTUIQIYq+ghGt1yD7iifPkua4ISsFDmU2rB2B25gzuQskYyX3KduI+jIT7lj+5rsoWax75meVrz22onEesH4ERuY0VLyLBSOuiMQckI6VTLuMJRu/PChQHzeprHU2mFgCR+94CQuygb6GoC0XgFuug7oO2P1oXreAdXp2XbhlXJtZ1FeaiQTQBEQBloq7vcwUoHTAcPIv6+o72RekD17pq8frmGoXSwddAn6pNE+BhiQYjmo2xQMiMiZLWVQlMyCjDByDlOiYgejWmDFy32P1GPndvK7P3Vn/q95ZdB80AduGmdR+YDaHPe7Ed16/yrk2Vqy9NBoEDbDBxVHWCFXsBUGJ0A2CSqosOuztDj+e0XfusVbb5ms/lnQO5duVttgROgw/0LAiLBCIASuFbGR8pXYeBnsUGUJKlaGMPAJyXABnczueVz86t4EQe35I98DF61kbHavdt55w7PRc1hozxu0lCBsbyvIhxZRSk3ukJQnd61yU513mhfBTnfJG5PGtAAthsiFXcqdLgfaCgpLWDS5nK9IUavrr6oskXuwE8nn2BfWVAPM2d5fxZsWiYkRQJjKT5S8yvfsyB7OQOVgdtbfm0+qiAIS8srWoAZApe9g9LCWpzVbFolHdXl+mUlbJt1oTXbtdOPC0YafvJClyk7caTyR4rMirayCKVQ/4cgZGZMiP3pe/2AKmVe20RZoVMyiw2hD57IDL71Mfr+mQARm5hDGRfSFakKHeZFWnYE/a3V6lT5XGPyBaTAjBqX6EQS/euD0K0gtKMacLl1Dt7nBuJBiF866bWZWFc4HPolMFaZgHXnDCk7t+7fFjKNu9Hl5ifJeG6VY7ne5cawA9Z1FvFen/1uy6fy7uG5WwD2HU8SCj9Swp8qWxugBH5CVB/1fX5/vLKVNsbB+4zwdH20n2WXPxyXyf9Ptj3XqeHLfdaxpQ0jRtLTC3zUd2w+nP211OvA2jfo625EK4GOJcjKaHle7tnhNhjTTQQmYEWBsD1e2VGLNHunvxuS9HueezGBfT3fdsBhS8EyYs8VZ41IFlCpcHZ9xZAAzx4MrjjvO++VpJlkcoTUN2FgJpukNgT2k9mm2Ag8t4p4BRIQToHmlTuA1lt7zI9v3iHkDIQWS9wcatuWkAJenLJIzkgOV/iQYg5IAsT/24C2WPCKtIh8nVJ2lammpSuWgQwUqFni8GSrVyxp8i5r2VwKvfF5lOx0FmTmHSL49/MAD2s5Gp22SLe5NS+0l1L3rM9lqRhBFT7k2iDpdiNMml5qQwLVoS/y4nhCPiYTTzyukYxDfo6x30Ru2319yMg5Ih7y96y0TZycj3vuGcxI8LvPyt/5Zg3WqNO0upewH0O/o0UC3byVUmS7l06lfLbiFRcT0G4enrXgZGRggigifHSqbR5uzY2IV+LbxkYuV4zI3sB1zU+pHeP01KAh4jno+PG8huBjRWuC9PRymHwHrVoXAuMpSVc78tiJRYYsSEWQOaxll2wvJiDOFavxPBlQxYzI3yPuVvlpfLYHFxlG2IygEki1bwBi5JlQh3fS7yAg8oamVeL+a70cUKJT6JgaJcL7dFxo0vZndnhKpiVURE9xo/y3ePzSHakxD+xQSwzMFxb4xQcfCTDIgvPFdV45cWcYifusNwyyzpj3D3CkjwFgPRAuA9g1yDYclvVEhP/I6AffM1MysCUwWx0qd9XiB73PkhjzYcmL0Ht76Q8a0Ai3bHKb98yIOyqJcFI8G1W9DLwB84MUjO+sEWFBmgavKI4PvsA3+XJiWNDaHBAce3h5W5byR0rbkAUsSPiYZXUXkIbMxJTTfcLVGujRWnXPuIJq7oajVyomsFE5J/X7AYfWyZ4LApLbOa98fHFcbWyJCeCIwzIyGVL/7YC10eghMWqxs4iJwararj+1EBkNpEddRnYWzdiQ6xlTwUjUm5hJuT2enKVmYokSOH4J+lvP1Kib5UorMT0Oysg5dpbBW+LVeE+AoLm1t3KivDyERiZHTuKd4mBPoMTXsafVWGdu1S27Z2zIkf2txhWFq5TQtbaClJ4rCvnFolHNMD4IFy47HfZF/auq4XDbIhi67oCh/l+FobdS6ZfucvpvsmfETRvzXQ/J/qinhvgJEiSrdJJMeS68fOGxmWvDZLmFaqBg4lBZwrTWcL4fDPhvmbgJZ+DyV581r5N0Xblkuv4+0hmYFevH7EqNkNS5xy5rN0uX+HgXZVzerkGDfQza6LFcutmSc8BkLzIOynPGpC8Onl8/G5pmBCgDqAyW8n9QpF2POA7MQFb7+s50YuYTgkxhSYdIb/4DjS4eAecs4K05MklODq/cw5nD7j1AYgRbrsSAIlry4w4D/iFwMhyRvILkl9wWQloXHIRqMuWsEZyH3t93XCNVBCQWJPUDMh0vf0ECVQfeS6OpMWKo7BcNyooaUUfUg5q2nq7xdQE47O1qi3M11Zdt/50pfayvfB1BvpChzORgeyn0MYtnIU70d3Sx5AArSLT/t4HIrcqU5abyhEQIr9/kEBkbzKW2/G22j3rbqEA4eBdU6hTWqAB4dbyFBQipKgnMaen1dbnJqassiayyOpMToMQZR3/BvRABBgzIyz63aoFTFPH6NB58zIvwElRSggYhNCe72jVbqnkjlgSNmDIzFulDTE1vytYrBaP8+LLux+8K2MG78TBzFIxt4CKdIO0rNeSGdEsHoNkADUgneeg4CuwLJ9zBk/q8xE2QwJUthxgF966nK9mizX7FFCZCCkMDJkx1zVKRlna5Pf/P3tvG2vbkpUFP1VzrX3OvW9LS0NoREGJEuWzQZAWNBEVw4emAZUAEum0sTua0MR0ogEVEDASaEIaGmIbE4ImYMAfCgbTgijRCEHFGEGFhASVvHIbebH70n3v2XutWfX+GPVUjRpz1Jxz7XPu6bvpPZJz9lpzzY+aNWtWjWc846Oen9b0yo70a8Ta++kVdLRMyaLtZcy4MUeIOEaVOtisF4Au/hrUvLiMMwHQsSYcI3tSB28BkPHf5ZrBz26Kc2V41KINDz57oQwwJYZoZnzSxr1d6i59L/eyJncckEyljobvPqAZEQ1EbMaJGMbZP5i2F+BiEOoCwYUpBnHLopWsVlzPCcgzwvWj5p4138hZiqtWju0R5BCAECsYOaVSlb0o7Ofy74WbubiQJTw6p65SLWrbUKxESzBCqYG7c1uArCV1TrkGvdsUyvVaBghSyWjX6D97YIQVdqlcXJ+Tu0Dof5u1R5TFeE82rZFYV63qnqFrDCjlRPp/XGvCbpN79JmKPb7we5iSNVctYBuI2Gts3dMeK6C3EF8Vw4F2z3p4mED3LO12CPhgeY+srbMpQxWXK++GAiEanPS/+e0YuTB4LlMEIfw9hiUI0W3fAvoajJwMUzbFVvSMPv8pl0kgNutoA1y5Ftp70kJFmHMQ0JgSKkzHSdoeQ0CcgBOTJimGwYIJz0Ku75/fvfHav+89GOHYBACbUCEGn9UiKGn3vOyHlJfbvRnAghG9nYYzOZ9iHpwiqZQOKAymHJ2gwGMw7KHV5Ve7/FT3sOXNe4B1UU8lm0K8UMwIZIzYujaEdFNoSSn4j4xsX1NJnifXHcu4bYFbK7cFIfa3kTuwPv/IVeuS6uk6lkjHIDWAun78nciyFRyl72lc8zeZ/PIv/zJCCPgdv+N3AAD+/b//9/j+7/9+fNzHfRze9KY3XXy+Ow1IjjEWJiIu3LFCQLcATEHS7/KlEk+AUD8Dy4U+ZyloaANcI+jr2/y8hSEpexJ0nMtfsiI5CRChKGYEIQozUgCJZkTmLMq3fE81ruXRudUcoVuGtoQCkM81i1i7N7FwNguILmqofc6ftHRWsLRcXDyXqz1gxIqXVYgSYrgYoFRXNwM+lv/WY0MuYUZmAzK3jtv6bQ2IeNcfnX+0wHrV0UcLb7+td9MCFFMyNWWvKu1mYt8LRkZdr49fBvNycihfE5kTdd5SwZrZTm1/rTEJVpmwbIhtnwdGRvVn9Hxg3+mujfXemi++3KcCXCUlKN8nVu3WlZy3RLMk1iVoT40Sus+x6UknGhgA/SkG3NR6s6luG80b+rgt2XIH2nLjm3OfspdrF+tXqeRX5rzyV2fZ8lgRuUZLncs28drdOS+YD9cKKnb7mZhELaNYklFb1pK2AEuXM44Z+cIB07/LMjb61vXb9gXAb4GSfYDEGrSW8+mQfR8wI3ulZ0x7Q4yWveVF0kYBzHvZL9/93d+Nt771rXjuuefwmte8Bm9/+9vx6Z/+6e6+3/u934s3vOEN3bYHDx7g0aNHL1n7/tyf+3N405vehD//5/88nnvuOfyJP/En8PEf//H4vu/7Pjz33HP4uq/7uovOd8cBSSjVxcVy+nASFylxleoBiX1X7UQvC0ETTvK1gmkI9RziClYmjQAgnRFuFAuSzg18EIDQPMWZOJTih/GAPB3l++Eh5gxcn5uL1ovnhDkBjwpr8Ki4aqUkLk2n+jdVxaP3BRbr54xlfAlTUdb9itQCcbSS5GY5TaEFwV9qldbnT9m4lgxodc2O6PS/q/+Ui9YIeOi4kFFBxBj7QPYrw4aMMkABY7bAis2Y5bmn6b/DPr0QuFzKiAD7gIjtg0stfxWQTLGOTx0rZuMo9inC6h6ML/9iX5sKuPTDkQqyUoh70Nu296mDfRm5mlkmc5ThRrfBSw7R0pf3tXyAfkxRmbo6lMroiMp9qylmUwYwAay7pKt2r4GSySg4urDfKBVwN/8oINiSkcj7IQkNaNRoGu6cMq6AhbJJtxyyjtrirfultt0ogCNh3SkCkJSlvWSgYmhxFXPOdSxFjqkUahA4YlCGsKUzbAUjFZT0bMgWALkEeHiig8zZnjXjD2Xxrj2BYt42e1wdfwvjAd9deU426B0o74I7F/djw0sO4n23YufOPdXVtwDHKGB9r3jZeecnACbuQNbfOxHU/gM/8AN4y1vegne84x147Wtfi7e97W34nM/5HPzCL/wCPuzDPsw95oM+6IPwC7/wC+2SLzEr83M/93MVIP3gD/4gPuETPgH/7t/9O/zoj/4o/tJf+ksfWICEvrh0m5oKEGEwOVkMYN16wJ96/SFXA0vIJetJOU8NXA8A5huEnFqQejo3IJIUIAFM4HpsL0WIyNMBLHzINL6sx0H3LGFGUq2RUhUQZQVdy3jhKUinWfqLoMsLirdS1tbdoKQCj7RkRIA2gT1pRuZx3LT0AqrZka3FA/DdR0ZuI2uyZcW9LUjxCsltgREt+n6tG8ESlOx3Qaj/QgsOBlDfbfceDXhebfcKGLFuMjaglxZ8EfbPUgHqZdwmD4jo7ZcEyOvLatbRdYFxwEjnghJy577F9sw5d0zJFKCClx3waizWlwhjDlJWrlyxGERymctNjEntNwUuPFcbkd7ybV24Fu3ZAUrmJPOoNyeym9q8Wb6nrLJacedyvVy/VtHdyY+aDbG1O+RzOU9arg2XiJd+dwRGvOljlH4YWHef3G5X+7wvnfUSdFpQumZAUi97u25aZ8r0teTvPjDiZcva6+r2pGQ7O54d60vD573cTr79278db3zjGyvr8Y53vAM/8iM/gu/5nu/BV3/1V7vHhBDw4R/+4U+tjafTCQ8ePAAA/Mt/+S/xute9DgDw+37f78Ov/MqvXHy+Ow1Iqg9/kODSK+U+RZAiIKIsWyablSd1XcjLF38K5RzphHAu4KOwImG+KTEiZRuvBzTQUUIUczwA0xVynJCPD5GnK8wZlQ25memaBbxwkniR917PeDQLS0JG5IXTDPqGLwK51UztWQDXXBw4GbLibkyorhxt0m9MSRVjGaG/rwYjnn8727Z0x0qD7eM0uJVyj2YxvwCckBkB0AWy72VGRrVHrPjuact4GW9fylaV6jUZndNrr70/D3ww/sMutHZb/W4sf7rmhlbQbXtSRg2+rNuQFyCSx3ffKwBox3qL/KILYgPrTInqsSdsnxarKLmxIxvMjVdLhO8U2UUdwE7llMVF7TtjXfXquwMJDk86q2C9NWFKJCagxE+UvWIkS1Lau6GUePqpdQOiEq8t3xHLGIGYGnCZYpR7n+TeeX96DpFt0WGMfNOuLlaq+6z2p3J5a8iCCASLWJweiLQ5VbblSqKvxSPpNLkEC32cxRKQrMlWMgiPFdEGJi/D45phw/40uvxo7txKJb3uQgV02b6i/q1357OFEtvfqR6z15C2dNPq45HaulvWHpXSfHTfswLle8GZ7Js7hkWzGt35F9falrtRGDG+Hwoj7r/ezc0NfuZnfgZf8zVfU7fFGPHZn/3Z+Kmf+qnhce9973vxO3/n70RKCb//9/9+/J2/83fw8R//8Y/V7jX5+I//eLzjHe/An/yTfxI/9mM/hm/6pm8CAPzv//2/8SEf8iEXn+9OA5JDeYGn2BScKcr2YyyvaToL2ZHOi+NzrfuR6ne++pOl9HIqVdXTGIjk3Fy29HkZuB6mes4cJ4kdKcHrc2ZGHFlIb1IJRC21Rh7NjR15dJ5FEVHV2O2CC2xNyksrPquAc3KMYUKceiUoOqmAgSVjMufc/dXWNNuuJ2FRsZZRpqfhQkyAYYHJGlAJntUqLmNI9PXXqrGzfbrNI9cpq0SOgMcloGJ03F53g6ElL4oLAt2t7H5Ac8UaWf0sgLD3oH2bm7UT6nd06ZvX2DuPNb3E/VAU5awC38XVq8ZXGKC+GkNigJJu36ggnAX4c7Jgf/yu6XG0CmYjxC0rtLiS9p6q+I5qwak3Oz7nLYSns8BEsyP2uwS/i9vfqtU8Wt9/7faVMCpUynlyFIdjQUkM/Viccy6g1q90zrS8NkOuvhebGteCkLUgcC3sX8Z7jESfx46/2h4Dmr12c1+/LZ71f9lWYL9r0RL4hKqMM2U329gxhWYc78mmtSXeWqHnTJs90GYSlPYD2j20Be63m+3elSL2GWjXSS22ptIl68Sl+32gyvPPP999f/DgQWUZKL/2a7+GeZ7x6le/utv+6le/Gj//8z/vnvf3/t7fi+/5nu/BJ33SJ+E973kPvu3bvg2f+Zmfif/6X/9rDTp/0vIt3/It+KIv+iK89a1vxetf/3q85jWvAQD88A//8DDWZU3uNCC5mgIeHoQRCWiuWnXe0IHks0qzWyQQcGTmz/IlVIfd1P3T561gRF2jIuIQBYBMB+TpqgWwT1c4IwqoyMA50V1LJstzyng0J6kWT3ZkTjUr1c0szMic+hS5lDUquVoFnd+vnP2bS5dYp1PoLdK1mJKxrnjMCL/XzFq5B1SjCU2DJ6vY64XEghJgv+/0xAWiMG8agDCjFq/jWU49sW4x9h62LLRrfbJ1zZGsLb56H8AvEscaIXrbokBX7N2vRqltgcsCMhcxUlBuMSkPrb22RpE9x5aVmOYK7b7VMkP16XG9wnLD81rru9rdKpa0Ptp4ER28PufcMaY9MzBuCxVt/Yx1XAlQYjZiwJzl2bK/a/E7ZUnXsqbs3kbP0xm4mAp4grAnp4SahUsUuj67EoAuhavtG2sBB5agfE3mcn1Kl41NJQco/JP8VgsLtvF7Ukq3ZY/WwIeOK9oDRgD0z9Dej3HL0kCE41GDla59OyzvVnoFXL2nBh/eJoh7BIYawzaeCzVrAjRAfwlQGRXc9Aq96qLPI4AhGf5YYb0HrFpY4FULgcucczE85g4Fj9iRbVfh7X74QJaP/MiP7L5//dd/Pf7W3/pbj33ez/iMz8BnfMZn1O+f+ZmfiY/92I/F3/t7f68yF09KXnjhBTz77LP4rM/6LPzar/0ann/+eXzwB39w/f1Nb3oTnn322YvPe6cBycNDxLPHiIdFEeB8H9BctEKJ5eiqomvXrRB7IEFwor9rRbH+nrvv+q9mXOo1mM53OgLxgHx4iJskbMj1LMrEozkhZ5RaIyVYfW6B69dzqsHtrGR+TqJ83JxbVXNPtiZNWgNpsaHoTDHaPzSGIIGuUFbMspjahcsDIgC6miPWpWTN5Uxb3keuW80nvvdlX8uwpauxXxVXrWdU+llPGef11uJFrFhXrNF366a2Ry5ZHNf23apUrSvTd5mxDAA5qvOMskh54ilUVtlZSnBByRSCe601Ny5g7K+9B3hEbc3Wlt2duHKkZHpAxLpnAS2g+2zG1kg0gCUwqXIQFylhDYCY5LmeUJ6rU+zOS6ChxXNb81zu9O+L34oLV0ot4D2GjKmAD777KbPNvQXYvnO6j0bs0VohOh24r9kkum6lXFKsl+QAXf9EBUCcgO8lIGmfz17syAYgWRgEmK1skyVp59dt8OrcaCv7mkLrzfX2u94+DceJPXZ4K/V3t7BptPv5BRZtEPwemcz44XypC71y3mTBZ5viHFDjIfbMVfutGWooNtmGBsp1v8LuJTUGl4azdaPG9fkOIJL3Y1D7L//yL+ODPuiD6mbLjgDAh37oh2KaJrzrXe/qtr/rXe/aHSNyPB7xKZ/yKfjFX/zFx2i0Lx/6oR+KP/bH/hhe97rX4Qu+4AsWTM7v+l2/61bnvdOABGgTawihpt2MAQ1k2MByYOGK5abknVsV9QX40JKXYKUCkeKqlQ/HGjfCooc3hQGReBGtfAIvnmecZnmxXziJe9b1nDpG4VLZsph7LgrMHlMDWrV0uwZ90MKv2LqU8NyXxEoA6BSmS0UDJQ1KdLatmlHLCWK3LlqXyN5jvHsfbfPOOVq0L5FRqskGxlqciFccbqseA9Bb/Gwz28LYNlzyuMVSeJs772VkfdXbdaao8qNqiPxhliW7/wiY2OJwGoykjO598tyzRozI1vvlMXgoxQZZdHCKAQx014xpLXpHw4RzCW29lfPoYm2FXdVxQSqOYvT8vYD37hraDSnmTvnU93zpu3Lbd4tuhKkAFG0s8VymehetfpsXv6FByBp4Z4p33ecz8uK5aFkDOovU0g4Q2Qv47HZvPurYcKiA9Gz60ABlC1C0Jd9N6BK3WeRLZc951sA602XrrHTDJBvqY2csse+UcXnsxgBBzAYIuZf98kEf9EEdIPHk6uoKn/qpn4of//Efxxd+4RcCAFJK+PEf/3F85Vd+5a7rzPOMn/3Zn8Xnf/7nP26TF/LzP//z+KEf+iH84A/+IL7qq74Kr3nNa/C6170Or3vd6/CJn/iJtz7vnQYkVhc4FCASzje9K1U9IPaMiM2IBbSAdJ2618afWGStA6S8iuuHB8jHh8B0hRfnjHnOpcq6gJFHJUj9vddzCVYvVdhnYUUYuO4pFYcYAMVqaKZk5NbkCYOz9fnnmCF+5C2DC4UWHfa9nmj1dTQQ0ZZcaWtzK7GTmpxvnAd+jn2Q4Zp0igtkctZARGc6ouKlmRFd/FDa1ueMX7bbt6DuaauWtWxYWpEaAZGRa8mWK9cSjLT7t0GYTLkNlHTYcelqEEPPUowy31k3lDlLmunmgqLSqCqFa8t1Y84ZULFP7n2bNlnl0AsnSEbhOUIpglUPDrAuWwQmXtE3oBUW0+6O1lVLp/Ple9QXE7V/+/doOTfwc+y2279rCRu2xhWgWKmgs6j1ABYAjll+ZFG9UZrnUcD7nHOXlKMvaNkYgcqaTL0ibfvIu5fRfVE0S5JmaQ/H65SBM2zNm14WIGQFdFhAsMews3gW8z43qBEb4hmXLMPrjccRy91/j+54BMZxe/Y+F0E5Rtyiv171yvrj6uk2Rb8DNZ25Ym17A047puuaGLp500qde1Rb3eQEsc0zTGgxIQhb1wW/92vuXQUjOQTR0Z7yNS+Rt7zlLXj961+PT/u0T8Onf/qn421vexve97731axbX/EVX4Hf/tt/O775m78ZAPCN3/iN+IN/8A/i9/ye34N3v/vdeOtb34r/+T//J/7iX/yLT/xePuqjPgpvfvOb8eY3vxnvec978M//+T/HD/3QD+Gtb30rXvWqV1Vw8kf+yB/BNO1/Ue40IFkIwYOT6SoXp96Q8xJQpNTAC9mSAkjAlL76MsFx+dLxIiGgxovEQ82kdUqobAizaJ1SAycvnCST1ounuWbQejQAGIBvwZliXCgfPP5SS/1cLHmenGaoegWhM5npZupAW35fXGOHXKrMrx0zct9ZY0VGQMNVTjb6ee+9LAJnzTnWtlkwskdZ5Gd+Jxti3bOmGGqdEB23peuEHKfYARG9oFpXqRYMTotxoTmK9Y8WQVq85x0ghLLFmOix4IGRtQJu9reU+mMrI6KsmQtApcBIZ3VOPavYgEljSS2gv43it9yu547Y/T7FgBuMFUIra+8G2RYAnaJer1wZld4Nby1ZQTtWjgOUW0rZVscOi8Ualx0d3LznPQbW3Zy01H4escvcr2NG2vP0QEj9nH2WebRmaHZBxpg2Lq3dg2nXChuyrBmV3LZtARJp89ytcdyu7+ecWuX1Tmqms33zxt65ZdRPe2MoNCDXQ01/ti5pHotWQUxYMmIWUPUJOOTYE1AZkiQnqe9EdN6JS9fhe7mdfMmXfAn+z//5P/i6r/s6PPfcc/jkT/5kvPOd76zuUf/rf/0vRLUI/d//+3/xxje+Ec899xw++IM/GJ/6qZ+Kn/zJn8THfdzHvaTtfOUrX4kv+7Ivw5d92ZfhdDrhJ37iJ/DDP/zDeMMb3oDf+I3fwNvf/nZ8+Zd/+a5zhZxv4f/zfpbnn38er3zlK/Hf/8f/iw/54FeWrFoBh3wWIHG+6ZkRFEBSwEPNkHV6YQlGyJqkxrDkeW5OlXFCmCYBHbqOyOFhY0UODyWl74NXYEbE9dxcs148J8w54/lHTOM744WTxIq890YYkucfnfHuF06L+96jCNBKenOe3cVIi7WS2iBlWyei39cPWAaWk6KtkWIDboFlob5+gVvGmrT7XKY19c7hib23ZZzIvhS/q4BF9YvnWqOVSbmXlsL5Rlm81yxRa/ErlwCR1v5lsHrtF1Ow8Dg1RkSDEJ26EmhWPi+YvFOyym2KpU6+s+6Ol92nux/jBmYZGq8dlrnpAdPy3FvSXGrad5f5ydl8b/fXihqmBRDRWfWu1TgBOK7GAGSvInGJkWBtm1UguU3PLwAWbn8AuoKYU+yzDcUwZru8wGuPWehTp5pz7FwS1wKu3X0cVxw5trwHjlGnj9vYZiSAbau1N1+M/o5kba697uavcRrzvdb1LTdSYJlyfFTXA1Dv/pqxwWNLBs9vJPb2tuYsPc71fCqGHtn/qN6lrSQcXVs0EDTzLeegU8o1HomZPEfJZ4AxoHz0vvfib3/Bp+I973nPpmvS0xbqjs+9611PvW3PP/88PvzVr35Z9svjyKNHj/Bf/st/wa/+6q8iGWP4R37kR+J8PuMP/IE/sOtcvykYEr7nIZ2bJqB/J3iIEsWWQ0A1wFkwMt+Uc83I84yc5F+VNANpQjheSV2ReJCc1syidXwW+fgAebrCC3PAKSW8eE64PssLT7esX3/xtKi0/n9fkG3veeEG737htMh4Q2URKMrw1dRN0rRg8POcltGRmkGx1o65VHunQqozcGkQpNvELDxraRiXedOfrHgLuv1tdFy/qPW1Rvpty1S/y6xbvcV0aGkjTR9DV01azjHVe7k6tFoJkzP5j5iarfv2jvWKdXV94AARHXypF07ZL3aKPwGKp9izjkdKAZGMQN0v1+xDQFj4OwOO77gSGZ88HqU6tpx3jyV0LWh2VZj6dZC1Rm/jwn8ublhrQMRzz/ISWmy9Z97vI8X2Ehcmfh+5dum/awxcyo2BO+aW3csyslYpY6xYVIwIr1ffpfIcWyyJ7oPxc94DOtaky1pUPjKOw2dF2ndPGbRMBLd310zt/ik3WL7/9nnZz+79DBiRZlCZazv1uLXtt+fzrr1mCNJrH8eUPZZs/+SAkT0JF3jtvanCvee5iMM0bfQkFcCAYkjhXHdJIg59IynnMt/KeY8ATiEhZslG96gwpMccwRo/bAdmYVyYsW7EmmyNm5eD5GwLYT+da/5mk3e+8534iq/4Cvzar/3a4rcQAubZydCxIncakBzKxHSMQeJHvAB2iJtWTmcgx5ZtS4OX7B8HQBgRlcMxTJOwH8EUOLx6Fnk6Ih8fYo5XkrL3nHBOwPU512KH4p5VUvemPki9ZSua8OxVWkzCQGMLrCLMAk6aHvdkvF0v3A2YtG19Fp4Hh1iDXYG024K8Nlndxh+1UzTMBGknSn19z9qmFaN+Wx9Dssf6Juc2jU1tnynIcyfo8DK2WDeHPWzPXrHWa90fNn2vBiM2TuQ4RRyjLJDH0g/8DPRApFkmBwAy5tZHKnyIn9dSdmop63fn5qNdf+SkDaTwOtOkAp5DkOkh+mDH6+859SBH4paAPXOyZk48MGKzZm25wWzJ0hjhK5fcdukY9Czt1qjRzinv2NUhtgxfJff4lVSxq4G4MWRgijUo2wKTvem9x/fgb99TSwbwQSdFZyi0+3uMjeeKpVlVva0d61/fU/71s9FuT0DvLro2t3jjRbavF7VdA1CegquNbdxPu2c1I5zfZs7RvVugNSD1+6/tM8oIB8gzPUCPx+20vLzWJfWQ7H2M2uNLrjfDmjiIAccsbT3GVhw18V1DqmNzlIlMPl98C/dyR+XNb34zvviLvxhf93Vft8i0dRu504Dk4SHimSkgnB+BBQsBlDgOdPEfgWCFgepObIjOsiXuV4cuPVyOB1FhpqvmnnV8trpnnbPEiLx4LfEfLxYG5L3Xc40PeeE04zznjhKlXE2xSzer5Ua5Yr33kdznFM+dIq1lZAEdWdKbtVUDFmu9b5Z0nssWd+r+jiZK7QrjgAbdTgs21sRbOEf3a5kRKuLc/szVofttBEKocO/yTe5qEzA3PL9nPHO1bgXV/TOSNcv3qB/4196jB0QelNXmQUnxW122DAjh4k/Lq9c9sfRdzkBGQEylvo0mF+YS2ByFPel9npeBvwC6zEw1c1MsLlOlHa3CONp4nKlo9JZ0y8CMFLRuu3LZqrEJAUAMmGetmDSWpKXzbYks+N6/cCNzlXWDAS5nHbeMAtaqPbrWnuuOLN3WEMB5hO/b9flQ30mdQGGKCnDG5uLSZejS1zNsCLAOGva4WK2JzWDkAQ6bIMRr32ge8BgGGpkedxysJTQYHVPb7IBkjiGmpp+T72K71vbRfM1jOHa4j/SFGHmsS7N2L7ZJFOxz18BDfx+xE4sK6px/+IzyKD1v79rlAZ6RXOpiOuesaikFlZo8ivGmeDtIu2KJTynsZPnM4PfVOjPHx4z2fwqiSxg8zWv+ZpN3vetdeMtb3vJEwAhwxwHJFFCD2IcMB7BM7VvT84YWa1IyZYV4cILUY82eBUgaX4SIPF0JKzIdcM6oWbPoi8l6Io/mVOJF5B+toKfO37tvsq2g3vZb+t56VqE1dmBL1hc2Ln7qWgNr1p4AVE/2gI/RMZbN8fbjX08Z1/7s9jcLRqK2+t/iPgEsXHs8Shzo+9jrG1uk6xJlcXSPbvascu9cmGNoQeuyDQWoyPtTMeige9jMUNqRjLIWY2hVrUMbU2nFRbC66VRlVF1IpVtlCtaklA17nugEHWvhr1u9PQ0Ulu56qbAjqYGTpWJ6ezDiKZd7jrfXuuTa3hykgUj3rlbGFZV97Sz1QRAlK8cjZcRJPcu8DzjodLta3KQGjtV8635j6JXMNfGAklXU7ZxvwchtQeKShaArb3TP01vBx/OMN05G//R9e6LTIds5UZ/jauXe9XPTAeB6mxef5AERLzlHvY4eNwQdAV0geXMhRTWEsJZOPTb6409fY+GqtSPebTTu+31QjT+AzLOdITHJ9nquqM6nPADu5QND/uyf/bP4iZ/4Cfzu3/27n8j57jYgSTcIp4gwmwDw4Lx0DD4HGngh44HGjugjOyDCOBEA+XBVY0celaxZL1y3NL6SsjfjPdcnnMp3ZtK6LkHtOmDZTqSa/rb+uPo4LtZ6UbfpEdcWq1EqUK9dVpEX69TUAQAvwBvYjqWwcjbXvbTy65pVTytENqhWLLRT97tlDAA0lyZjZbOyBgiPXnYds/+apdWzTHmZdtaev32mmvVhfAiByIGxI7G5ZDFORIAKigU7SGHSIItpwFIB0Pc3BSCHYj3MbawQJGwtoL7Li3zmrNApJKlZPJcVkFOtmE0j32kuC3y38Lbzeq1zlTTnPqqFtCoqzfqoreS0MPPcW9blLau2Z7jg3wcFJFwrcOApgdbKC2BYcFSLrvVD0MHra5etZ65am5h6ez6wVkZj7lJOMraqn0hwx5pk1kIHeEeuLlUJjb2Lz56YgVjc/ibwubRjrIVUu+t6Sjb/rrnp3QaYjsCGatlijMjf3oVX/7ZcW0Ztb/+Yyprjxhs/IQbMUOMm9WNS3787nynjkXU5BYBD51K6ZEAqm1LASJ8MY9zHNqkFYzb0NvnM/f3np6+r22aZkVGs28gqL26wPXPbHCIyMBXWJESckrigMT0w78E7/5wygpfp7GUmZbl56tf8zSbf9V3fhS/+4i/Gv/23/xaf+ImfiOPx2P3+VV/1VRed704DknA+IcyTsCQ2DS+Fn3WqX35Wv3WDhal7AQEe06HVF4FMInMS14qbWSYbume9cJpr4Pp7Hp3rNgKRF2/muuBvLSJ68SEQ2aqCqidifue57Lm9FMF6gudiMsXQVW+mknBznosiMclxxqd3JDbNpj5mjwVu7d7bvXkLausXL2GABiPafUsvajrNrVVYlje6vO/aJue4ozOPW0W2Zgyalu5KW6lAvWfCbQRbBCNkOaYQOjBSwUqJHdEAprlOLIGIvd1crNne4xXWJWOeRSk41b6jJTx3hS5rX1HBN+e0BfdSLpW9UwZiwxrVl7p+ljZKHZOwiCfx+rS1wQMlS8WjD341+6t3UL7f3ho++s5t1vqsDQ8jSSkPlck1BStHSS86517BrPdl5jCv/VdTxGkGpL5Cn9J1rVtGY07/rg0pXnyBL2XuyS1l6lQuJsCbbRsDaKtg2zgRvR48DiDxxJt3l/3fAgi8+XkElpb3IGDEjh13zCRVP0ouveiv0VrDMaTnbX7XBgkCEe1+a0GIVf5tPJzty1o/JwOAxKNJVfVc55bmJqUNMKr9VEG6dYN/l2DEW4uiGXcj0e6kTPUbCVLKVMlaPwJUeN7+mscInO6DSD5g5B/9o3+EH/3RH8XDhw/xEz/xEwhqDIYQPsAASToBeFhdqTogUb5X8BFN8UKgY0rq8ZCXj69vykBmoOmJgaWoafKkrggqK/IbN2dJ36vS+L7vZsZNcdd6sfqCz53V04q2AGmlRCvLexRpfX7vfDe15mOfLnTLPcFTjFxlyclm4lXEtdfVLkij2iprC5H9/DhABLCK+ngB6GIN3AVCfY7r+1oZ+R63Wi+9BcsrYKbPoxcy7VutGRKfDQkLMDLFUEGHBiP6tlScOkJYzzqiH7fnaqArVevq5Talq5bev18CpafcrsFsNoAoTFMIfS21Akr6hpiviungfdg2pZw7VkQfZ6UPLo6Y4roSRtliRbz9aWiYU15kdePcUeesuYBCiEIZYuhAiQ0y3+MTP1K0JRV2P0/cIOFqis0TryhOwJLp6Gp9RMEwXsajNSCy1v6W1ctcq1yPjZwzmRoBuBaUeMYg/dsaM2LXEC9ByKhejBa9zoxAKoObJ0MR2PYN2RF13znlOka8CgShvJe8UgLAIn8LJqSsFZzHr1Scm453O0yhS8ChMwMCqLFw2gW1znG1XWPWN04BuSjqmX0am5GosidxyZQAqMk4tIzYmTUw4s35o3TkvK6+Xq0In8r1+XM0ld6NzHcghuRenoz8jb/xN/AN3/AN+Oqv/uquJspt5W4DkvMjAL+l1RhR4AOQIHSyGqdUgAUAZCpDJcXqmb7bM+byG33U9RzJl/BcJtVzAl64mTHnjPfenPHonPBiqbL+6Jzw/CNJ43tzTtXlSjMkut6EFUtJc7F8xcMDfstDCfh85urQVc+eYsAzx6lTMKXdcg5dKd1eU4DJMh0wFzGbArQ/di7HKKVH76cKUy3uM7TCS/rep9SzSFTGKGuLqz6XreKr/xGIdHU2DhG9BUxZzEJjRUYAYgQ49P3q/Zqli4sNz+OfX9fqsAGTXp0Oq6yvtYf3ZrNmrbEiXKS5QAflMlO8ZPr+wUKHv0hGjFHKLSaL49wGujPV7xQh1vkKLiPOyMAhStrhEGqxPiB2ge7zXHCJAn5elqWRy8aWC5oWrRRaYH3JOexnq0i23wEdIG2VUmtAqYoWfFACLN+BYL5rFk+fW7uMSdtKOxb90GrfkPGy76YHFuhaJb83RU/vv3TpG4juznovYfG79w567IgNXLeK/hqTsiZ7WRQy4gBMwHju5k+mldfrhR0nuq1Am6Ny+ZdSrkDEdflj36X1sa/nb4IRJkGgQaX/2+Y07XZqwYee14DeyOK9RZzb+JhTbo4DKYe6vQIV+MCB9wy0uQZYByD1OMcowu8jEOK5YHWgBHDj6bxC9tmj+l9mkvL+OK8nec3fbHJzc4Mv+ZIveSJgBLjjgKSL8YglDS/QxYvMuV8IcpaXLoPsR6/QzalZaqzixHf1RAVolvohp1lSdEoqX9m2J6OCnuSBJW0+2r52TLV+68U3NJcGuhLQEvqg+G3L8aIuancNu4iNFgULnhafHUXB6w99T3Yx3NMOfZzeb2RJ6wFKVAuSHF8L+w3ACC+V8tKausaQWCAi25bKkScxlIWjXhzQVXcjxAJcu8yxCuv2Auisg17gunz322WZEb1NGa5bUzfkNtlImi/++u9ygVAzns0pIwbtPliASw5g3n+5iQZogF5Z8JQIXlMDSP1ds13eQlWDV/lO1LnidpBui03UwF/XKQKWoF4f6zElWxLM+7EXbLGNnK/mlKu1nM9uwhIkUuT01vK8fJe991c3zT4vzeDpOiij5+r53VNG6Xy9fdcAxqUuXLrv9xzrrUF72JE18QAtICxJVYqd+dxNOhKbUY6up32K8p7p3YqBq1XVVZs8sS5YQpARRORWQifLe5Uha7XrJeEQDVsB41sMrQYibvzdyiPaqnlSvQmekHJ6Ly9/ef3rX48f+IEfwF//63/9iZzvTgMSTFdShPDwEHMGblKuAGMuoIAqBt1shAlpcR85C8DIWawWJ0ejsUzDo5IpiwHrc8548SRA5NFZAApf9kOx6tU4C/TWR6+o1driwOPItAASlP1iyeP/bCmWOOelcj1Fmfx0utmrQ2NprEtXs3SN/YL7v6J+3pxT9QO3VjV9f5NSvm0siRZaRaUPlz7UV4sn1p9HX9tzz9LUvmZDANTFbKQrcYE8aqbBWmhHyiAXrY6KD7tYEmC8uCwDKJfuXl77bGCndWsYWRF531py7kGJ3i7tBDJy+1zev4WylvsF1d4T2RG6p3lF5ChTDEAFEBEpEmAkpALIU6C7ROqYEga7x4jqHw6g+oNf+mzWdDM+dxoNrMI6p5b2VP/t+m1Qb2gk/TsXK1tS56SBIjydGzDYo8j6jM1SmezvpZ9fyDA/OETczGRS5HkCUWUdDQuQoVWlB6FXnOwY9kCNxIjIZ327I1BCALRQFNU45T16wev8bdS33vph9916Lp7BR/8G+MUUvWvotuo1hWuJjjuihBCqEVAD2g6IHKLMxVFS4vPvg0PEM1cTXvHwiCkG/JaHB1xNwpI8PEjyg2ePE2KQMgHHGFWqcuDhYaoAhPMfmd0QerYX8FkCLR5D0gycYfEbRbMoFGuX8Z6QZlk8EKKZWW9tqO3O4/HvB/Tzs+NKdvPyByQ5Z9dF8KW+5m82mecZ3/qt34p/8S/+BT7pkz5pEdT+7d/+7Red724DEggrcs5tQk9ZaoFot6uINtHMSdSgc8p1v5s5Vd9OnYrXWqv5Ap9SLuyIsCIST5JqITPrhmAZAwCdrzYpesuYjERP+FOUbDVXB3G7YgaoOWVMyqSxUG6bQ6hc+xhqu5cZwJZWWa+dsq8oDXsWOW3J1K5brrXI6UO7fXQdMiLtc++mRWrfc82iRU230frFa8CiJ+vajpXH2QdMBnf74r5KO8QKKysJ00rKsajpJrX/b8roxkTfjr79LY3vcjFiP2pLoicalHQEhflMMDKrhXItFmRLLGj22MSUc40JSVnMlkwn29z+cxfcDkDFmUi/egCq3ptqh+e2pWsTUKYQkGKL/anbN+YEe//aLUuDk5G7lr2W7ivNlFiXqUvaNVJoPTAyZmIT6BqqlV/tJWIZ2S3Go7Zjh3uWF8/0uDIyTNnPetteFmPv9T2xc6v3LnnnGTEjNnZEiwYl/F4/lznYstmsXWOZkhoLV4q3xsACrlG5aqng9iBMyBR7ICLAJJT2LPvHA6Y1YJ1B7WhAxLInU9DgpM0p1VCjrpVzz+3p3uvi0wwYoYH1ROBSwYnsr59JLTZa7m3INBqjmY61Ae4Zkg8k+dmf/Vl8yqd8CgDg537u57rfRizimtxpQJIevgI3OJRYA+DRLH/pekWZYq4+41S6T0l8dbW/uXav4DatdPC305y6wNRkFoipmFg4eZ+TVt6bck8wwVSber+D+avFMigNyBjKv1iAY50oxos9Lc1zzjgdCLySy5xoC95oMbMLma6qq5WdkU+1d78C4sYL4ug4reh08SKq8B9BhQ1sPE6xjwsJoauHwd943Ba7UdvmsCijY+38nlLPLumFTmdwOQI1cPKIsCtwktf3ssywHWxf0dlb7p3Q3pPKnSysfLToYeFPbYM+2c5LZQ28jiTl9p63AOlQg9tPUEpGVTrUe2jG8TpTgg7YurE9UZQUXZcGaIygVV71e78E6UsFYU4JXr0JDV48BdkaVPh3JLbP177r99Wb96wR5Jxa8L24yQYAqWZQi0Hm8yMUQ6KAN9T2LVl7Vr3LnnnGuVfIyazPOZfYwmW8hS10OGLQ57Qs/tevB+tp37dkbf8RC7P4a5hZAhG6Z/X9H+pvQc2/nK/JjLQYyqnGUZIZefY4VQbkwUFctB4UduUYQ2VNBJCguqIG7UmglPI9o0NAhEidCxUwaQH5BphgGY+SytyZ4bv98audLz1G5JRyB0BGMYaUjukIhekL8v4gLr0E+rWzMebz4XJF9F7upvzrf/2vn+j57jQguckR+ZxqAcIXz6kGK53mVF+ScwLmWIBJfXlR3K36c1oFSFspNEOiwYhnbWCKQaBlymkLRayTH9CC2r1AwhF48EQAgpzYujHplIdTCF3+dbad93IqQOOUZALnAqrZEwGBoS6gvfXMRg6gulistd8TH9Ttt9COgto1K8Lc9PwMLIFJLxmWvtaZWbTsVXgscaHTOlpZbEuqLcYKZwEKUMapB3QXFuV1poZSrXz91tV99R5riyuwL6YkhoBZXXM0Ltz3qYBLWtbJmBCUEHUlSN9YQ0U7T/us5xEPiOg5QyutvBedxhZozNYcl4qfnlsOMXQxFp5Vuym8S/DR3Y/Z5oEJz1q/BgJHz8Uq1mvvt6dkzymX5y8GH7riCRvWnqet56DZv1GqZk/snL+a+Y6GHmX84jy6p8bISOwz19vW+mxNto7dCzj2iI034vcawxZ1JsRYA9afuZpwdYh45uqAZ8vn/+dKkrloEKJdtqbym7AoMt4iCtMblLsW2njQ054HDgDL/Cp3Ph6XQ8eWyG89MAFszIkPSmi4AfoEHmtAhJ9PqR+PwBJcsw9OKEAwA5iKM0WNo2vvDIEfgArwAnxjwstNqCc+7Wvey7rcaUByThmxTOqnVALS4U+gGZJpi8rPac7uYmPTVdbzKBcSbXVoljAs9h9ZL3W7RrKmUFmgQpck7YrEuAhmGaESTsvHws0oo1r+gYhjBKbihhaD9Ncx9m5lzIIlKTlXb6f2wxKcLS18nmhFa6S8rPVj7bdqzWnUsnXPWosZYX8BveLuyV4w4gmVZM0WeEyJPWbZVlGqdN2O1YB502aOeeu2qIsDVtmYcPXPfM7at55gRL6vn0suK/UskORdniLQ5ekdjEmdRc2TGtxuQQlQ73stQkMDkO67Ulo9C6XeF5D7mVMDKGy7dm0cjX0L4PcoqXut3lZGoMOyvd6+Ixm5dFlZMEM5Iw5c8ZByYbUEYNa5fgWILLMQ6X3ac108awNGUm6g23Np0szIWp94zJ/HlKzJJQahIfhT975W2NBKFyMS+3kUaIYz7ZrFv8/W2BFhSAhGHh6m6o5FQKJdtGjFJxipCTsKEKHbqf5c26jabrs45WX6cjVTtONKxXNPvJgTQJjkZLaNDDcEuqeUqpsWx925GBP1Nq+4LoAaU6cTL8SE+s5YI0l9ZqpjCOru5V5uI3cakDw6J8znjOviqvXiuaQhLAqHVnZ+7YVz9aOUfVQGpRg6pdIqQ1pJorWBL/sp2WJlGC46wHKxFteJfuEeLb6cmJ+5OtTvB7WvBiK0XtQAvtDcj5jqEOjdg6qlZZL7e5BjtejSwnKaY+173jtrq1wvYk/W2Jxljnv7u75v+3dkQV2m/OR+fowI0GeWagwJFfBeKdHB7971ALvAure/Krp4mn4+87z/HP3CUfp40BivGNdyn1wKFcriGiMXwX2+9zyHFp2OEujdnKwi2LcxVFNiDAFxEmt4ygHH2BZdnbzByghAkyXhNaesXLViwIl9ueauZazl7j3u2IftJCih1Gx5K6yJriNia1LoPtFz0x73nvNgHgP6+Yt/vXd3JJe4fi32P6vsgDVFMF3foN7ZJO8nPWeNUcGyXB649NgQ+d76kmuNjivUad6v1Wf5O2/2gdcP3RxqnuMo4N0zinnuYPbYm7kPTNdpe0dV1oE+ON2yILym/UcgQgMb3bO0y9b/c9WAyDOFBXn2GDtwQmYkhlKDqsz/h+gDEmlvuwcbPZmzcu0s+yWw0Ku8lwGNUbGApZ0nm3OWzwWI5Oy7slq3P82OaL3EghAvvtWusdpgB8QC5GOZ6/11lm5aDeDtNzq8v2U/LL+XpyV3GpDMWWjPOcnkWw1MXKBzWxQYiE5JIYDpPu0598haxWVgaeEbWTN1Gk+9n1002oIeXSBSPwcG66kMMwElfSutQ8sg5XpszjUWQSrJ0kLSfLTnlGv9BiC1AH1nodsra9be1lf+/VIWaQhVzIP2CyYY6bOINL/ZvbJXEd8rHHsaFFjr7VZsym1kCzRpYOQxN4tigd45HGDQZ3lZAo89wcOaMYCyVLcU1+vH6/tiViaCkubKUxSM1ICJFzfAfVfvb2PfvYH8e1gTXktbz+kyaq3xawYEr8geZZRim3/7z1s06nbiDO8cQ+YmZCDJPK/jglJaMiWeu511tdO/jcBIs2orZT/3Rqk+te+621x/7/vffY81GRmB9szdFUCvgBG7JkaMGRHt0uwBkiVDMvWB7KEFpx9jq48Uqwst65A0V6KIBjxkG7NryfvEqdWOsOZqVb6HZepmD3gQlIyEQIayB4yc1NjUgesptwB2DUbo2uVlH/Se9VTq81SWeEWe9Pp3L/dypwHJo3NCOmfczOJa9GhOwzSbj+aE85xxmKiIqkUnLV0wvCBVPeHGMttEWmtRJp8JOE6TKEMhDCcBWi5lwj26jIDen9tIXettFoxUWjq2fOti2Y/K/7MFYVsl11qqTylhPsTyWViT63PCKSakHPFCmKtCdHNOnV+0vac1tw+9MFpAN7pPD1jxUO6vmRGbcUdbxvzA8lxPGBHc/rLSK66lLeYQD/hWZWf17MsD10DUXoZmC4jr9+Fk23ABc+PJCNx7IKW9u7Rys75OqEH1e1iIYVty+zAjl3TATWLotYy1mhJWaR0dI8etNsuVPawJLfDM5sd3lMYEq6RSbgNClgpm3MWQtGv3AfJrCrrXbm5j++aDfE854lSUV+5DkBljwJT8l2SLDeE19W9e+mkvXkQzInuByAiceeCyc68ya48tpDhSUBfjogwb1pyJUerQ+AHqQOQac4g1U9ZV2aazY+l/nctxbMV/GTtyjIwNkZS9OmZEGJKppvPl3H6MLWj9GItFPxqGBCbNL2wmK2BCAQ0obliKKbGGkDXR7IgeOxaIkAWxMSFAD0IImHVGLX2856mh3+/OYBH9zFojIahD+RsUsLuXe7lU7jQguZkzUMCIpN1N3YuorQZ8SWOI1fpJy++2ktJbu6wwoDYqZZVsQkwyIfIcpL2pKOiARIqeIOxvOksUpSnXTUlnW7RFSO9PRbW5remrlHtIMkHFJNmGEANwTmIQLwvAKSU8yLGfhM++9dUKY0pGMgIiQHOp6mJhFADRv2n3gLrvKggx7VCb9blH44bn1pmshsGxA2W8/j64Ru/KtVxEvOte4j42ym61h7V4XNkXxA5UUCIHlXda7yR/JugxaKzGw3e7PMO5KR0AusD51l7/fN28Yf21d93j8oGtMj6OYq2zYFVFvRoHHKUT+8DIHjZEK5oeiLHt6goeOmK3b7mD0o2rXVM6Tw4rSU9y75IH+IyX55a1BUT4T7tnkRXZAwb8v7H7Lp8nc460YEIeh7X2wB9BiE3Xy98oMYaWtjf47IcHRlgvinEiVyVIndmz6IpFQKKzZ1XX5FgC1mMLXCcYaQk7VJpfttm8RgSv0rPKiwA9+Bi5Z3mi2ZGMHoycUqolCBoD4rMhOli9fW6JafQ/C0QpOq7ztvIYh75fJOV19uqluua9rMudBiQvnmecT/PCkkA2hAtFqzTdMiIdS8rXkTXAKl4jhdK6SwCo7iMSu1GCyIqycDUJw6AVhVFhP++3SVmZKDpOomW9iPV+CUo4CbcUmC2Vqxfoe0JShaBiU7CT3E4KuetDnRkF59QFonuy5nPebXcAV815PqnfDOjQrlgeAOn6Tiv46tl7Y2SulLbvUtUpuQ5YsNeg9MDX/LaYzXqL5Am5vy9VYKtef2XV8LJC2TZ57Vpv41I8Nmcct+Jv5ztXQQngApMONOq8mvAXh2TuuwN9a/c9AB5bhoyRVEBbLbB+By3SQ0/tupOKn2HqYC1rbkM2FqG2y3s3B0BE/3Yw77V3ProYaVBigcsay2pjJOaUa3FWHispsGN5R/t4MgASDG9kxLbZ4GDP4GRZEQ1GVt1m3Hkxur95x/V1WpbFFWt/qu8WDOo22xTPc8jSlykjpOKyZaqKM3Uv5+Arw3zUdQJYsCJXh0kFsLcMWVy3NSDRAewxtjjJnvlo7DaZEbpoaTDCLtU9m2EZEB+U9PvItt42ElxjRnXTwhKMWL1Gu51bFkQHt9/MPRgB+ngib8wsvA/summG2yLBSpZ5PBem517u5TZypwHJ89czrqZ5keLuhdOMa6UZxBDwygeHmou8q8g9UtJMwK0WTeUD8tJWf+5Y/FhrZfhcM1URmMTQWJMutSeDaS8MxNVKtb43uqc1YMLvreq2zjRle4KWtykk4JxKsb1Q/opCPOUSMJqAVOJKtPIzDSbCdo1eefHYEB0L0n5rfW3dsXQKXltLA1gqxYuAVpXtSScA4D7zrJQRc2su0AjeGFr2xaiC7ui8dJXyFg09tm39DH2Mx8jYDEN6P/soRwHdVmqfz8v26noei/3NtmWmsKa0p1yU7yy5/9OANbHjcZR1Zsb+4ow1+F732YrivKpQYgniF+DDAdKynewsugD/mEItfkrFkwBgThFTbKmAb+NGpIEIFU0b57a4Tz2Hxt4S7zHHS4ZkHH+hlXj+9uAQcRNTU36pfDnzSz2vA845TnTSFM8afSkQ6fszuH2rt6+xyw1wtj5o1/ZBykiBvTEB+HRDw6SAYO7fS+uKpQHJwWzjs3rmaqoxIlcO6NBAZArAg8NU1/Njja1sRQ4rQxJ6MKI9BDQYqb2Z27gSm4dcL6OxJUNQgsZ88LzcZhkUWulz7t20+Jcu6F4SnS23rJFbnh5ndkxdFZZJjH6tv3R8jme4vItyX6n95Sl3GpAwroPuWZQpBhxz096prLa/25biNYsms/yIwsNJKiyj4UC3KVHyZB89I/VxEO2g3ClHdsH02hND20dbN7SiPgJgFoxcQj0vLCdqQRpZNq2y5bmgbd2rZU/075YN8cCItcrL9cpz4ipyS7GxRmvgQ8tWatGh6KxQSlGv7Exs2xbtcBiQUarTPYHcQ9E71fekb5d+ll7hRmDpVqOBFd/FNXe6PeKngR2fbwRC1pT6NX99yxCMZGi97L5zPpA5UlgAGdyayWxtkvo8tjjf4tpKWeZ33d6RsrwFTPaK174lq0JFWq6layHdqKxcEn/U5uatGB8NRHitLTCi27oX7MnnuFAeAQxZp63z8hmXO3HHnW2fVwSToJP7T+ifqwdIdLt7pmSqTBq3s05US1lf4iDV5ym0+Z5rWCiJKciMAP370K9zTv9lZ+LPCQjNV4BAY85yHRtCR0ZmrzEDaK5+QDF6Kd2mz5zV/sq+Oi6p1TPzxpo3pyy9FOiJsFyDrf5gf88ZNVv6vdzLbeSOAxJ0NGaM8sI8Eye84kop5UoxBbBQTgHU4O1HDAI1Su1UdFQdVKutsTE1yyTQU/qdLhZovQzAtFTOxVLSAnQpniXUYwhGLkrR9EOMy3NS9Dw6p7xLuSMIpOIDRLGg5eXk6AESfY97mRHdF1EtVMsK436ciHf/bJoX56qVdfvdYzWWX8aK7aIgpwEBvVvQUpmVttt+LGPcYSUW17fXS35tBXt9DzzVdplnxLbG1D9PjltdWGwKYQhKAIdBqXpErqCkd9NaghTt+8/7s0zHGsuxtm3Nuu8p7iMA4m0bpa/ecj9tmaDiatwD26Ot5Wv3bi32Hui4RHnWSrK+7paFX2/r29DAx815GdviuUhtib02+6ivvL6PFbHXbkzT1G0bZVa05/BAsbdtlBbYa/eIPdlyAzqovtbAhCBkiqEmaXnmKMUNj5OKCYlh4dnQxYvEVuyQzAgLHkYUtyzQMNhctSgEFwFoYMQDJby3ECtTQkATkDuWBEA1QGrmxMszm3Nx18q5uGplXJ+lYDN1m/6vvLd0y/KSyNj3RJ5FVJ97UHt1iLia2N+Mv5G/rHI/Ba0LjV3dH8OO91Ql4em39a70zftT7jQgoaTcK1qcxHStEaB3+QCWSuCctYIlH+LCkamcy7HGctuW0KLjBkizVKtzXX2PGozQNcsCETn30pKhJ5NLrWyeeJlFumsa5mixkHaK6zoYscdphbZt78HI4twr7Et7lrKP55o1snxZBb5uXxkU3k9r6WK9/VvgNTMIMVkDMGW6DeTK1qzFtKxVEffubzVwW7GBlaVK2YyHftsi45sCJXsyldFgcKmVzoKRkcLlKWBbrkVWvN/XAktn5drZ/hrF33k/KDqmhq6jR4TqSorymW6k1ohwhaViy3teK3q4V7G/jVgleQ8zJVZ+GadkHnScyShgf9TmdcU+XXxvlhnhNqvYj9rYYsX6c3JcjcadjR+Z4rRgdvQ/va+XAMHW0+r/LV23+I+AQ1iQFgsZA1o9Lf1bOZeADs2MMD4ELggBBuzImhSWREtUU5d23cq4zMsAQKsxknI1dNlMWik3l621GCUt7XmlBSiZFmOqd9WqiQjqetqMmTyHSzLduybdyy3lTgOS05yAYoHQFnKiekCUmP4F733kGXtynnOZ+HhcU2ZPRvPRQbVc6FP1Zede7U09TsKgAEvwBPhWcC3c31rC1wr6eWyQB044YRECZbW9MjzVitrHGPRtLPn+Q0vBqmNmpC2j+9MLcb9dK1oWxPG5s49srIzHjIzAyJpw/MwGnGh3wTVmARi5gaxP3PpnnymRv12fJfPcleLqBe3a9s2pry7N63huKluima8YSuB9RHWnPE5y3mOU6nUx6niP7fSTHUAp16oplwPgsSLePT8uQ7CmAHft3VDQCUrsvs0vX38Oi/fDyybnSQc4Uz9+yTbrMbDGIN1G1kCe7fOR65MHSPRz8tKa9kp3n4FrFDS+9sy8sbIngxbFYzu87GQWiEzGADOKL6Ica1r6/h322uj157Kv19359P1ZYKXvybpnPZjad7rWMV6ExQ41u18V5/JPQMgYmLMPIgIySoYwtDU5hLjKjlAClMtW8A0ja8KYkrmAl9Ocqh7CbKHyucSJpFwziRKIXJ9tFi1boLl3XxyxrxxLrZAyCyvHur7q2JFJffaEjM+93Mtt5G4DkpRxRAMjpHIflnSANBbEnPHCICVwysBv3JxxfU549jjhg59pFhmgFVXUCm9KrcYIxbIvVIjahvJX7TOqYaDn+Wax6JU7oM8osideoinqvWKuQYkGIqSR59ysNp6wL+qklwTe0CLLe50GbJMnXfYuBcSswjXpfomNUrb3rffdK7qOTVXOVD9oMKIVu1GNAntu756939b81wGplt7ujwp5U2IxL5UU7155LS91Ke8HWE8LCyyt5vpfyrTWAUAszzFVUAIzNr0x7LWbhgf2wwiIzKnvvzmhU7L5by0o1Acl24oMLfMUbb0G1tNg61izqN5zbcm0cwFl9Ow940zKkzueNVCtx5rgbsBXdvs+GP8+6nffrWgZszE8rxmPwNKKD8xDgGCPXd7HfhCiz2MBkbVYP6juTSqoXTFlW7GFS2n7bWWG81xtPZC+5z4BLNgQDUToHsTaIXTRijHg4RTreNaJWKYYlLtVy6hFsZ4NOWPhdJBzxpI6KWfR7lt2m2FK1oSAJ+dWkV3/7cBILWFQAtiLfnJ9ln1uzgkv3EjEik4hrd0EPfGYS/08YuA/ZegLdM3Sc0rPjlghsLsLwufxtK95L+typwGJ53LFwGSdrUIrkwC6hRVAtRCItUNmrtNMF5dct03Bjxnw2qL32Wsd3zI6WoV6rrOsWH2axWZsXZ4LawHjTtPAULOCVhq5UslNIb/UQOq5de07rh2/iI0w+3RKmAIx8tv2TOkxGhaM6P1GYMRT3oB+4ef2LrXsrqDzsSKgszNxDKQCRrxzd21zlPJ1lxT1/phViAqgttDRd5w1gKSsTXmv0MZyLJ8rOEt5wfppsSDOGgb2ypry7LnjvBTipsE21kgaKPgZgAtGOgOGAXWUDlzHHnDPURSjAwLOxZ1LgmnJoDQWVKcYbuderynS7TtQckdWeQ846uNG5/dEg0AfkOS6bZT98BIwQlkDI6NxAIxdWoHxHKdfTzatZ1jy4nddP4tZE/X9aTe+0T33Lmh9304FUFVFOIYKRrRBcC3u0d5uAmz24XJPOhV425fe0TEolqSePC7dtELcBCPsBQEduX2uVdjb2pDBd6p5b9giiFyPbwwjogtrjvrfY0Z8pmR9fbQGDv++MwJCBVr3ci+3kbsNSDIVljZJsHjQaRbLwgsneXE50VkwEgPw7HHCXNxIWMSKFhoqm5I6tZ2jXc8AE6W0ruWw9++nKFVKmbCWsW7/lItZKFRFoTnjA0i94t5iCzLmWbua+UqvnhyprPA6VMRH98N+8uIy7P32xylFyrGKa4swgM5Vzbpq3YYZ0TEjHjPChQJATckIeIWqfH98yh53npFFdu37yMVkbN3tQYd1AdijbPkW36Zo8fzMojNHLpSxxJlEHIsmEcuYTcW4sBbvA1jDQmNJ1iRl9M/IKMC2L+S3pUvESNaeJX25Ry5B3nZde6dz1TLMiH4HqNDRqqyBjQaK0h/NCEFDji3IVtOPFmB5njlHSL+kmmLYAO+aynzZdx4Q8cCffRY2m9BtAIHtb37W/b8n4H3rvfSOte+H556lFXcNQrw5rQJT07a12EErIwZ3tHZ5RpaR8URfu7obT72rUAyoaX37eJGe1WcGqK0ZPSEj5iDAIzRXrRmiPE/l84QsKXzVuj4RdFgXrhLQzv7g/nTBSpmgQxt42ntxTu3vOYkL1gsnSe17fU4loD3h0Xmun+ec8eLNXF20BJg01y39jKyMXLYOZnzp/bWxby0jafWqyA0YsvLKBa/f+03083ua17yXdbnTgGTOfhYebfWlwhhjxppPORVnHntKwKR2bUCmV4TWmBEA1Vru/TZ0zdHsBaspK9ajWrFCD0r43WYP44o1hR70bAGjkatSMv3in8PfruVS1mSga49rybj79t9Hxu52rz0Y0b/XBSn3n/dkLrLiAQq7kOxRfi6xElvmwwKRNVcNio176Ns712w6dZ80CNxOudXuSRnTikluD8CcLhxba33rueTsOdcIHO6VThk196xPvfhNKRIjRS6EVlE7IyDW+0pISWosoBRDRUpADOIey4V8Qq3zskjEETOQbt//HIft87LvLwEjlwAUz6p8SdsvPad2IbMuW72yqI6xbqsrQMQDLp5Es7bUOMkpFBasvKN1P7ahzZN0yeU65imzvI/mGmTcDiPjRKz7bRvTm2AkNyZELdlmn8KaQFy6CUoCmnkuAC4jkrJmQlohwIyeHUh5yYzkorbXmiOJngftX8q+2+zIULTHuLUm+13+fBEGqI2vPGDj7+VetuROAxIGWNFKXq3U5fdTyvU3oAcR2uKj3WxOZSH0UqhOqbcYjBZcThDVkr7istMdp8AQr30y1rFRsDIn+OMkvvgpKqagVCXuLz+eNHyGRH7zg16XQGUrUH+PEEtpyl3PtXvm3RZP4/8e4xKUWNcsfq73nNrnmgklpQ6IaIp9y+WpttW1jq9YNS9UvpZKd1osbGssgXcOa1W2ll5uZwpTMpBTDJiOAZL0QLQGAXbLoNRZKRgj2QK3a8Ht+t6Wiu7+1K28r9E2L3DaU0jp3sY4AS92BDAsYlHkmF2QVmWmRQ1hqchSUgboUXdIUw20PWamIg0LxuQkPneIiQlB6M7VgxLA73v2NT/343A8LoFx1WnvGmvf7bYlgxE698NRJrQttsxjYbz6IpcwIx4YGab9Vtv0sXY7MGb89W+eYcrzCLD76DbrGCjNjHAMc922blre+5WRkQtAmiFp+OcUkEMW632S8Z+RETKQg/MduVj7Qw9IFtdq96XXuIyeDZkLUDmlXBmTc5K+ETZESgxoZoTbGMD+4s2MOWW8UP7enGeTVjpvxvON+kxLssbOstCmJIUfadSMubzTNJgmde7c1s0L8P+93EsndxqQyAKNzrUK6CdVaz3X+xF86DgTfrbBg1SIR4o8L7MEOutUdt3mKDx6sTolbemU35mtKIYMTLFMCEmUkwxltshVqZc2rilu7bMGHx4QYXttvz8OCGnt6N11Ut7OurRHumD+PUpqxsI1TfdBAybyG/PDc+HxrFur7VtRXrt2bShbXpyDB4r0Z6uM28Vvvb0KhBwa+rPX1G4Euj+nEIoFVsCJjiNZk7UAdiuayeJ37VK3bPPlbmt7LewaiPD7yDoO9AYIwMRQGWuy9rfXYKTGHaBPe5qLhVhIEemjqymKiwldVRVjoj+nAIj5HEhiPQBrMDBuaQrjGCb2Gf+u1cAYJRrYAotrgPpJiZfVy4p+tzV452++G83Y9bQapAwjokFqZ8Cqxhm9f9/GY/UA6LenlNtvsb23FO3Cm2Lb5vaDYUU0M6IBtb5nMnxWiq6smB1U1gMhVIZEW/HpvrX4HgDkBkzYb/Z6TGvL22uKuHLZSrKfBimPzjNSAh7Nkknr+pxqNfZH57mss/JZrx835fvau1D7NobV5BgA+jTPUT3zlGtGtpHMpa94v7Ry8tkE4E7EkNxXan95yp0GJEyNB4iV4b3XZwDNAjMSa8nRYOTFU1931VMS9lKc2jdbi7dAbi2qvbW5VMmNesGSNJYph1qXQowZGclhCfoCf/41q+JtQIdlRbaCt/cIlaWlS1lbbaisokyklWAvK0sMbcJkggNgn4uPvjbvU39OBpgsEybogMO+WJUOQNxSaue6WDDrT1zsr12gahud/PP289o2P25i39i04l9Hwkh5frsgegAklWeonyfHhY0heVwQbJms0T3dVqG1cSO+1bxXSC0rqsW6tc0ZON6qZUX5ymJNDgxWL+2SOLRcJpPCPicIUKHVFAEpNPctzglTKdCQ8nqA+9pvl2zXoFCf14LFJw1KrJV6SyG8VHTsn5cdkb972ekqKxGWyQ3WAuKPMOtCbAzksb6PpX0FrFig4q0rFiDpeL9agM+sV2spZgOXgyBB1bxkCkCL1ZexbWNKkJdsCdAYE15DC2dZsh78TOZwViBEgxECkDkD1+dZgEfZlnLG9SwM+6PCgnAd0WuKBiNr78XIJRZImFNYvANMMKLX3Dr3ZjIjlfBEMklxuo7BdhbGe7mXkdxpQHLmCx0D3nsz41ffe405ZbzymWNNmbgmtK4ykPPF01zpUY+p4Dkv8dEc7Wst0/y8Zc0jIGGA8BQDrqaINEkFbAYMz7lPCwqIy1k9t6HfR7IGRPR2ae/liiEXmwWIqYtbW2xPszzrGMSf3YKSNmEWhWRSSmsSqzEXdbtY1iBEBTyYXazdJ+p2Wvhr5VwnPzyBiM2QAuwrKCafm4sT/y4yGu0EuN61PUu0/j4615rcnFMFzVN3rCyIy0KCQJxQfaeBEjulYko4hrVytiZrv1NZWD3eWOrbPayLBzo89xw+6ys1p1hXnT1CoMyMgKNCrlqs9bfiewCIQM6MNcklziQKAJnld3nPhDVJYlYWxbJ0TzUcAOJCQ2VaKUK05K7J2hjWYhUsj63SYMQDJSOw6MltFK4R02lltkAzCTBdzqvFAOMooJoZqck/VKY6zZRYhmQtRXTXTquPqnlydIy9jm2Hl5beaw/PSmu8BiU1tgWo7lsJoYCPXI4JXSB2HdoFnIhxpF3Du3bKPRAhQJF1oblq3czFDSu1CuwvnGYJZC+/pZzx6CyB6jfFjUs+01VrWQTREz0Ozuo9s2NkLoCZ52E2NfYdY1kA5bZVWNEYA5ASYtmXa25UAOU0Csx8Gcl9pfaXp9xpQELLQ8oBp5Sqf7pWkhdpQQcuOy0rhu83P8XQ+b+PxNZgWLNkjApPybY+t3i7Zm9lBoRCj8VqkXIwGkZjGGzFdM8X2G2r6k+7vbV3zDB4MpnnAphnY4r+WQu66LdLK/EUNOgAaFUHILG50SyayW+rToyg+4eLDpXnkdtPu4/ldg8QaKp9ZOEdnc/+tgYkPKp/tMhZBe5JW5frfYZc3ba638uiyHTbFaA4bMqTlCdtQbdsiLdtzVVn2M7suzEmMouAKniaqyKWsxSFu+Q2xQ0sY57N9hgwz9KOc8mKpl23ptgbGzRAYJXwkVwy3rx3ZI0Z8ebwteekf+McP2q7Tdu7V2o7zZxIUGJFlPDt8+oU6FtMiWx3+ibYdrENzfgzZzR3nrTuYqvb0Keo3r4fKx5TQqH7Fse7kHa5KdTqfsiaMNi9a2/o10mCkczPJXg9oYGVjJa1Tgoz5+pxcEpcR2T7EnT0RiwPjCx1gyZbDN05SepmoLhDKzcsqyNJDCoNe6hu4Jq10xhkR23Je7kXV+40IHnP9QkvxJtqmabPO/9qf9oqMXcToLb8W0DgAQVgffET1mJZ5Erv31uf14Nml+eXyYrXqBPPAdBF53Sge/U1XqnUvSYjZqT9ptqb/fuwomtm2GMBBVjmfvHjfTEAkkvKcYrVPS1GtehMUc5LYDMvF1ebSUt/T8XyJYtIn9r3NOcuZuScclewStPt7BOPbvfAr/6st+1h3Ow5R/7GawraCIxYtxivHVr503+5nduu7LE5I5YCARpYV+PbrP3fzb3s0FtlvFBpzkjz5iGdrCm1lzAjdptlRryMSltiA1BPc4nvQMJxijU+BAR+xUXLiihaLVNOCP19d0kiEnCqqZmbVVm7bskGZX2lG2U551JxkmJ5ZBcvVer1MR6wX5uX/Gcofz0Fb03p28u4VFBzTl27pxi6ObJltbLgpL0nXtyRB0ZsoVyguUetjbcHJePUFgu+N6aLMrqmBUp2LeRVApprVSq/ZIj7IQDkgIULVqh90L43dtF/N7QQhOQs7UoQ4M1g9hdVvIh20TqXGBFWX2e8SAtcX2dGLjGW7F0zMBWWOrT59zxL/NhximL0Netq4jsfA05mLn50aen694MQSD7ta97LutxpQHJzzjjdzF1ObkBZfJWVsVpyNlwaZvPyb00EXFCoaMwp45krecGZWWjRLviAZwRK+numO0zCnJpbzJwyZmRcTcWnIrZA9mpJUwvZWoD7mmV2LyVv71uLp9SugZN2wVC1005xrfvLJKljSioLpBbhRcyLAlk65XF3ae6TWrICHTfiWUvtmPSybenfLeXuKTx2P32uNTbGs6x54rm7eNey7jd7RM7ptFsxfF5umzp+od2T/GuMXBBrGlNj6bxURpb1pUV9WWvETe3qMCOAURbNJTnWaxHJ3JQ6bcWcc/HtT4w1kON5Os8YvdY3kqnPvMtBWZljX5QyhvZuMQvaiLWwRp4RAN5iVbq2XQhmPFbEMt5aRqzi3ut7TA6/L4wBig2bxey/VNgHjJmXoU67R8k+ofttdE/A9hq6J5bpcVhIOzfpWcOyJYCsERzrdR3gPM8js7h0AQWscNyae00K2PAWWIckg3V4miuvrb4+51bw8JRs0pN1ZuRx+mxN5qonLOdf/ubFZtabN3IXXLbu5eUpdxqQXB0CookVOajJfU4ZNyXY+2qK7gRegUpsKTc5MdT91OIwsjYDovw9OMRqEfeEx90mfR/baRVQ7UrG+5X0qU2Bn7Oh+Deo8c79Ki2zE8l2891hR7x+unhi1cH4HWMCMPh7zqEVswyo/u3Hqb/hZR2S3AEUW3OE7Ih21RrJ1n3psTlSVtYssmvXsWNiVMF6S4nytulze4zHllwyBlKu/+EYY7X+zhltHKjXa1Fzx96Ho0TXYwfAe6rofX1h9RmScbyIBiP1d+Ur71XfXhMdW0PXCioLp5QxBXEZiTG3+QBAZt23MHb5YVeu2RzEn7z9TaXbav0mw5I0bbDMVYUV0HFGTQGTfb0MQ3rcaYPQSM4r75sWD3ysHbf3N+9cHpM4xT5DkmVZyJhMIXTGGbIkozo+c+6f89r4ssUzy9kXcqnHpDUExKkFoHPzJevCiC3RX5LaUHuqMnVsVztS31NbJoxhilsVICErwmKHKaEWNWRa39OcKgCpxUfTMgGKBSNb/eIxcW6BzSippu0xVjj/6gQyJ6DGcLJ8ANdRYUjb8XO+D2q/l9vLnQYkDw4TDlfT4gXTL/X1OUme84dtQe6V7Xa+KQY8qIBkKhPG3IGRLUsFJ5kpBrx4M3eLjt5n7/n0pKHz4Ot7vDpEXJ+TOxGlHGoVbBE/GFL3jc1ClvLYRWs1hfCOCXWP6MWHbT7NMhkeJzJARWNNuQMmLZ4kycSZlgsygQig3bYaGEmZ/UJWpLmx7b23SxiFSzP0bI3H0X57rbnWYnsJCLFxMWIB7DO9zCljCqKwdpnUkFoCA6AujCk39yAPM1h2LVZtqMU3EKyn2QeBcr9jUKIBSP99HYjU7yZo99Iq3FpoaGhFUMu4ZQel3iLe3H9am3j2Guw7eMSRmbNCBtP7Arn20hRCSRWsn1M1SzdQghbQr4EGwcn1uTcIWcuxZVDW5BJQsfX9NjI6h7cuePOEdfOzTPhUWHAvTnJX+zp3wX48AEv3Ji1ezfRRl2nFNSlGImUZb5Yp2iPenMYt1T0L2qVTPvCeGoOHXT41Gqhz99OcugQndM9KqblpscYI19ebOXVgxGZi3LN2esDC0wFkn36e2opzaut6qEYh/U7PiqI+mWNPWxlDXgai9Zynec17WZc7DUgeThGHQ8Qxxh5kxP6ltm4RuogaU1PKD/1LXhfJgcJC0S/1luuOFc8SbbePFzSJJdHWNbtQz4y76HLDh6HFTAOTkQVcB6p6Belcl4OVCXavpZ3Hk/GSdkpbj1F8XQnC0pyBCeiycAE1G9FkFiRgCUZ4/jWfaKtA2Gd4qfLE47zP3jW975deQ4sHhs7JV5RGLMmee10co/ob6Mdk3YYGMKl787jO7WnDPUvGCKpLo7ewkymlrVTGPJnI2J3L+0vFwJ7XgpE1VuRxdGH2C4N3m9k4oEvZqeoIBCyBSMZ+BdH2N2uRyHNCBYGwsWxxOYZpGEI1EIVqHFpjoLXcBqw8CQDSn287Srubr81cwnl9cUxszzMGW7OpzfOaRWu/LwOXgfK8yjWLWeCJSQlbesllLwOswQrQrwNrwt2ELWeQOqorFouH1rUkLZOeXBJjY+da7/cRcNXHW9DSnWNhoCtLp95WQAnnXsCfn2Re3XVr93IvC7nTgOS3PDzg2WevqhX7hVJD5FQzVAHPHFUQ+yCGYoriFxtTAA69xe7m3CaEEWCgWAueJwczMXjuBGtWjNF5ycqQKanuEDFIFqMZOE694jMnCVqr51Z+6J3Pd3KWp06p6X8bTbjWzWIvY6Cluj0dyjmyqcmSAk5BlKAp9RWBYww19bG9JB+tTYc8uhdbqKu2q27rretr1r9LLLg8hwWye0DJaEHT7V8DQIcYOss1r70m9RmrZ24VrXqO8vwQM06p+YEz1TP9LFJocSRtMVXvZnnmLX1nkHOXi8w5F9YwVmbtqJRispyAHatLIOL13fDvChAZVtKOevtyX7rnSDM18BaL9PV5lne5sCMnZEyFMUkBNV2ndukaGkCK8uUJs23xc8dIsa2Z1vgefEqcS0s/ymtZF5bWn5zft1zqxsDRbn/S4r0Xa9s8UA+oeSO2QooEZTTMnGYgRRmfLJLLGDsWyD0BbUoqRhlE1CQfNTbPASV9XMb6fdvfta77UoGSLctzmvNFIGBxfBlmNvaQxXCXBZaZ/VN+43btqmWZEC1r87QVa/jwmBG9zWNQdPFnoH+G2oBJUNJ+8/vrLjAkGU8/yPwep23LnQYkD6eIYwxADDiljGOSyfRYFqKoXAcoUSkr/N5+LNlginJCsCAsxNxNCtp32eb8BsZshxV7vN6+dqynkOq2db+rYEcvHbItqtWUOQtKAAtMCOhmKjm8VvQX4K0sU7ulLMg3NTuNXoFLO7sJtP/eK7Pc1rdDN6t/nr2rn2zzWIv158T91sQDMmvg5hIZtcVuH7Fd+riRsuWdi0USgTJWFwALiMVER2tdZTQ5bh3L7+q9GoCi3YmSUso5Pmy79Xu6sDAOLJIeENmSLTByiTANdp3/dI5TxZhQb681XpxneUlKby2aQeU0oueXtmNrT9vWn+vBQarHW/ZqJCNgCPjxIlsymtO9/tpKDey/L/2kotnvkbvloh+LpCz/1WxIeh0scT+XMCV7QYn9/aVmRbbS0Q+P26EzewlcdJ2qavgs206KFdGZGBmrupYiXp51b8Ca1Xyjx5Edux4g4TlHYIT7Xypr8+5WTbN7uZctudOA5OoQ8exxQsoZDyEAhWJdb87z0uK9jJUQxTamgPlAhqQFjE+xpeObzOLUUbKDhWvkxvHMysSwZi3RfqGjxVZbnzE3t4lTKoqSUtJ1Vh9m52rbeutQrAHt7XfmM6/KugIlc1rGzDTldJ/o+51VP9YMZ4U1oTvXsRSL1OmP+R1A8XXfuGZoRd06pqhYJKeQMcfcMVK81z3A4dJFwQJe+tZz28jqNlJoRtZ9u+/IFY3nXrsP6z6pt9s+mgKtj2L1JTMSA0qIkALQBnDaJrTicIr143iegVRq0hyz3E+s1851DO1NY23vzcta5DG0XiatNSBi9++C+jOwiOitv6v2q1oCgFjJFz4aRrRrinzfr4AwDbBqpDKI9Pd1jG3uJnNCRe7mnHCFfjxtFRi1fy8FiZf6ffe1mey7upwL7b7duRTT34ETez8H4JQAHDhvpy4teiwGGy91a5ciHViwZrqP2Hcpa7PUMkWuDaL3RN+u7eKRgWN5DmPY0mu7LR67MDbZY9VvfE6GEdFj3osz1CUE+u9tDFvp59KIq4PDjqVWM2TMwi6z+nFt5GcLWI51PW2G2rpOqvnJm2us6LngHpfcy23lTgOSh1PsXI4eHKh8yHfWjpgzkKaeThXRAdGy/RhjUa5j89NFs+zXWIyB4qdF/+5ZNfZYKtZ8kNcmILctOvehoeijkxtSn0oDlDnL/gJAeJ+9tTMVVwBvceF3BvLpbWsyxTZRW3BiLfic4GNoii2fdXUBUm3eWkCrUmVBSbHoNqst27rMBKTlUgut5+4m9zkNgR3drEYxIJcKWUN+fhyGhiyJfY/mblxiyXgBC9ZrtFDq7bH40xOcHEt9mpjorpQxldgzzaBSodDuRHtEK7penMhW4PpasTqrIHjpXqvy47Ei3F62xaAKm/Edcyzn9VBj1FkDJy7bqn/H8t2Xfblf6txpAQNwB0Cbn6c4jtnZW+PlUncfnYUw5QxMvQuae42Uq5Xc30dZeSBB/1bhPM2F9ZuwqOdDl625VKtsLlutdlONqzNuXLUF85KNmWLoAAV/vbQUhd3dY1bXpLnaLrdRbJp32T8v9vfZEAVEUm/stCCkbWvX9ZiRKXou283V1xp++rG9NEZ6RqYFe+K8C82jZGlUueVS8bKXlJ8+cLoHattypwHJM8eIV1xNy4mnTASnkBDjJMAkJLEKpYwYYhczEIu/OkoRoBgC0tQvgtoHXv6m7jvFW3SWE8OSSgXGqVrXZMSM8ByLiV3R9EIjF/9Q4xIDOBaR1Cpq97nLqST1rnDrGbj6tMe6zVv3Kv0m+10dps5yOMWWKY379pnGgrGsY6iYtNtXfTKFNkaKIpsyEFOoSuuIlvfuZa0NI2urzzqFwt61wN810LCW4tRTBniuvRnAPNA9WlA98d1J9i2QetxqnK2rSaeUC8kl7p4HBJznrCz0VDJC3WbdP/e4SnsVtru2boARr9jd8h4fT2vQ735KQIz92NM+9F5q7Haey667Z45rQKZZ7HUQ/OI9WihUvX/80YzxPa5wF7Mk5b6SGktHCGDAhFpMdUq5S9neQElvqOnf4wRb44p1qKbI+bcl/IDKVGfTt8r38q4pxgQo7HFxw23FLwEphqf7xjNkLbfd1ghij7+tEcSCkduwILJfPz/obTzPKB2+J3atoivi6D7XwMfq39AXXrV1jvT7MGJF1tYo7if61fB27+VeVuVOA5JnjxOeVTltmW18TswOE2te8FMSZYRZMIA2+cyxTTxdFe9JjmMWr6tDAl22bHCvdUWxv3HiYKpL/tUTBUUXTPLOd6lYy33KuVr2Y2gpb7FhyRWFjpOz8gNXrlk1MN4J4WouRcl8bwvzHkCiXd/0ZE6wp/vt6kAqO1broQYl2nUkBp+W5vpLN7UpyJjTdWxoIeP24T1suOhoWVvwmDoSQFfR14IQ3ccjdyndBq8t6wzeKBNb/6z6f2MtvT7PSY3TzlLbA0qx7re6C9pNywsCZwrhObb+PJbTn8oD1kqIfqYLF8+4/G0k3jtlwcgWgwLcHogwJXDLLpcVSwL07IkFgmWzASNrdXu2ZGtOi+Uxd8+uTPUxEVwsj7NucdYNpUtw4byLWm477Y4U2FPMhZ0LNbZAvz/2Pb1x5kT+3irZx+4YMcwIwy8F7eQ+k3FftQk+WsX2NkbchAsxgMklLKm+yuot3IKXc8/aaGYP3IaZ9cBIN5+aMUw2hLEgdb/ksyCXxld1z6o8O7oi7jnOngNY9usoaYaec7zfYnSe+eo0YwfB6i28fCQ//Urt91Ht23KnAckhFleK8j3n4lZUAAaKdRyxFQ2jD3m17MdQTlAsoKyDMEWw9kQqrjl8+a8Oy7eOytZIOavHTqRHm+Wuq7Isv9bYBE+RHMmeiZquW7cJkgX2+Y2PlLM2ifYBfJ54oI59MXIbku+tzkV3ng0feQtGXGXP+MEDKkFCRHU/2bIQrlnAtRyjXfzIaFFRFteL6i52TpUlknv3+9hzA/AWO72/t31NRr7LI8ZECy29j2v5B3pFq5OuazhGYhnfMgdMhQ0T0aiA52iAZYsRtLLWlWvuWd4+e865R2pQ+4qrS1IKGrA9H1gLNLBvnlrbRYMOPp5RfM4eMDIaZ16F8z0iy4Wa0+kChYSYJN7DgtG+aKrMjxaseEYY/d1zfazjs+WRAKDTost3HXtk06NrtnKeVYIU9OOSz5r91rGcrUR666cL3m/Vwu74x63tYF3L2F/aGOHFhQDowMglQKRe2zEKrRl39Oe1GLUt48cIpFgwYueTpZ7S9tHNviTZyL3ci5Y7DUieOUQ8KOAgZ+3nKZNFDBkBkbwJUpJFoS4QVNgUKDmqYNaUxIXjVNxhUo5+YNroBVaWz7VgVAoD75kqsLZDTYCAPwkuqf114WJRFYvia4yILuDdVrkeWYzX2AEqn9olbVR0bsQK2fgQ/tVsiXVT6oBLbVc/kVslpU7cI0UWvt/xmthzWQvkSGi9A1rc0yk1y32t/Jskm8szx1zdQfqiW32a1O1FzzdzaYCz5mbnW/J6N0VgmTe/+/wYYGRSz3DY5zFUV5rOchpz1+/HMnZ61qSBUBn4vQtjX2y1nDuvA9WtOWLLmr/sg37HvUqCn7Wo/GaYkfZ7bzmW8yzPrRk/HneJaFdTytGgBgvmRmCEQMSOj9E1vWtsBflaFunRLPtfnxNOMRVWtsXW3JSU7S/czJji1BXL038pMge2TJAUugHJvBfFPcsYwGKAYkguH3uj8aX7sQOGUZiXWBgbDUwmY2X3Xv2c28ytgZjO7Djn5k6WkjeH9ONWg+o5NzbkrDJj2dS+HgjZw+qzH7rfpqV3RH9s/30Py6r32wIc3fEXzBexPIneGJHrtZ+EIemlloSlbvM0rnkv63KnAckhhuoTPGepFpxzoeIiMKeAEDKQ5YVLJYAzRvo69hYWeZGU9btYSFuguygwWva8/KOXXkvvLtaux6BbCSSXyZCZrLr2D9Ls7hX6yCNJwHr9Xu9zCUYuVTC01XwNQI3uwwMl/n4tYHpPzAOflbaWrikqk9rI8cCFbuTGsKocD0QrM8eiQE9zczHEubitnGdROnjfDExO4gpHRu/mXNoYl+yJBSG+69akrLNtu8fEXFLBfHS9x5F+8fbdT9r7VmkOFV9SAHfZJoAwdCBkj3VWMiTd/j72KINbsjaWmcbTjt9R1qEuoL0q3evX35or9vRj7JSoMUiQ38s2a/HdACOrsTuxH09r+885IwVUsHuaUyEIxKAltVlieb9DAXPLd0EbbdYMNCIJN2csDD9zyl1l9xm5S/Ih95FrvZjGaCgmpCR96TK6dcADtQZNUn1NhqU0rzyY3E4QlyxoMOdlG0PYdrHR7tayxq/vD6BzreN3vb410FF+z9kFIl48k21buy81bnaADtmuzvUSgo+9rOCc23FJjZVLjQz3ci9a7jQg0WM/QF60DDSlIQI5NTt8LIWgKKMAzQX9WN1wArYs6Xqx8vxytYwsj7SejTJ6nGYyN6lLsztNobp5AdsARceS0GI1ZyDlhBhKMa0iCxcoLJULj8oeAQjPWg6gy5i1Znmn+K5BYo1/cFBW+dBbSo+sYYMW6HqcYlVe9wATYAwqptACRLV/r/WXHlkDgVYlm4ugxEK1QlsPDuJidJhC8XeOrSDXQYJfdTyS7tNRfYS9Qet7xD6vVTexjQV7zfLWgX+9WNvrbyiyGpg08k7eyWasCK6lyxv7bDdBiceSjNJr6u+6nbd1h7hE0dDigREvq1bLcrQdS+XNF4v2GkXNAy3LpAfF8FR1dFpt2SBAMl5RH2bNJJ6nWXn1+bp2qflBv9/2uWor+qkYEx6FVK35j+bU+ja1azEurM3hEi9Co8K6sQaNES01W/rfZf5JxX25c/WZ4SrDniK8tl91h0sS3LxILawyfBGUxNLOQwy13gmF5025gRK+iVwntGFOK8Z7QYkdu9pFS67dxusIjHjyOPEd7f7XAcrIzWpt/VqL9fHEc+HUhosYQzcfPGHb0r18AMmdBiRAm7ximaaklEiooCRAfO71IkMAwMW3LZDL808hVIv4MYrCc4yhU17t/lyojspKbN9RLhYMuu/o/TJpE3xoK6RkeKJyEBVzgg6Y3MzLLEsdMLBW22I9A1ALxWmJaTnLWCXBupRZqaBGWQCvDuJqxTiIB2YhHZ1nFOA3xdASBpTPUww1Lz/BSHMjCPXzMfafgfY8xwpz+2xBRgwo469tjwCCs9hr4e3nnJEgizTHQMoTTnPCnIX9mDPwYE7FbSvhlGIFJzJ+2liy+fAvCaKs225Jx2+xQSMWYanAo3PD0du9Y5YA07nIYmznBSjRek1VuJWxwLOg6vZ4TIlWWGJoyoMee9LmpUFD3+NeP/q9sUvA2C3RGnD0EPLiSfoUwesuqP2BzthbASk1xqE7l2w7zamNZRqkNoDJcl5v1znW9SDWuV2/87x2yqJAH4sbIBXnR0Fqg9ANM8ViPwsqVsMYDnSKdP17v62lBtYB8SNWkt+7v848V/tdgZaREq3rPB0mydB17DJ4SYav+niTxG3G0IowrskaKAGUISwoF640jouSz73boY0XGc2be1y1emNU69cuhimO4zqAMejotqk5YpiMAOuGtZG7JlljLXZfgpK74K4FKE+ap3zNe1mXOw1IUs71IWu/Zk4qc5lUcobKtqUqpzouByPLGBWGKRjr+uRNPPLZX6go3BCL9QhIKYirSEBNUQy0dMWSoUVo/FbEEdWqM5XJlz7JI3eo7jeVKaj1q7330GXN8oJK5bilEjOKV7BsiSdrQX52gb2qbEisv3vsSPveFEGClSk04KKfK5+pbqX3XLmHB0Bi4PfQHa/3b0CkfC+fUwZCmUDnrOOi6CsNTHOuf+didRS3RJXC1gTJy/Nxu970d/99y2XmSdL2o5iAum2nOY4ukUxrS9EpbYEla8rvncLC+UMpLaPg1upCMliorQuZ/rzXxc/bvlAYRpbWjf7zgtJH+7w/xbvfLm6suC7VwayKbdL9TgMT7W4kym17DnYMaSyjZ4kpADPEbTSDGQoD5glIeQamKHELSTJvoRQDvZpiz3THUNObw4m725KRu5f+bAPlddydLpBojVYNCEgf6D6uxXNDgQ9JE/LNUFPXXBQPh9yqwss1Lr7l/n6HTF3PjljR8WB7wMiW25YVzmeULberLfZjr3vwCDRMIRg9gHWbPBbk8RMK3Mu9WLnTgGTOwA19VyGTR85ShZwKxAs3s7Im52pJBvr0fp2bVl2o0CmvtKBLQUZhSerkEZo13FrC+XsHfKIAJVmscrWmZfRuOqeZlu4WeMdzsdo0YwdQemKCLG4VbChXndvEmXi+/t7Ee0k8CEEE62d4qYC3LFD639VhwtUh4hADnrmS9DEPDxOmKJbMCjRie24PD7GAD2G79H6HGKqiMXquPfBoQKMpJzJxt/O0haExe01atrjSd7mNi96CJ+P8GA/IyJjThAyxBOtxMkrPql1qlpavZV971jpgP9Vv/bT1b+262+PSc2+a1Da/DQAt3hqULNqoDBosqAqg608GuzJF9aNzqu/pmoshUxjr4F0CK94Hx2Rj7to9XwIg2Ne3tVTuLRS3V7TiIk3XgynXWDh9nZF44Hl8TK88AaInn1T/WIBLI5N2mTtOsbPyHyc15oqRgs+N84OWqTCcKQPhIO9pPKMqegBqHM8xZpxSqOmBqSBenZvbpU6PvjY/2uxc/H6j2+aMJb1tVOPCbmtGm3I/vK8yl6acxK20KtqAzHZijDsiFjfI1Ax66tZomKHou+5diPuxm4yxTM+B/KzZEb7zep7V77e9npd9cAuUdEkFVsae7LsOPrzfPDbYuoytiTaspMosNWQ4ev9tXZeXuxQ7wFO/5r2sy50GJDKFyVOm0qaVt0fnuVZq10panYRSr5wBaD7Iqa8AXRepKNRzDG0x8hRWftcyBRUfkAEEUtByrlzSuYYYcK4Ldeqyg6WQq8VJx7fUAn0rlgs9mY7cokZK1aXWH88qp8/nV6gFtAVwjSHh36vDVEGJZUUIRqKaqKn4yV+6bDVXDIKRSU3mo+caEYx7Vg843G3qPBR5mqhZOAWEZIRQ7j8ASEVJrYXrAHElkrEzZ7GskhFpwe9qITYZpFLKYBVpXnck9tGvxTRwcTqyUGYM3XvGhBL6mnuCIkcWxNrGHePTO78XqC37Kle5Dtw1VkSDEcv67X1nrCV0S+kY31vbbw9weNz0nFuXsBZXd5/YgMZeq6vvWrJjtS9uqZUkyc1Vaoqhyy5YmROCaT6TLCx2hN932ggl++fqQUyiQIxbEuBwjKKoz5WVZmHEMh+V4rw3qoDi4t43HsQlRiiPPTknYXlG41n6lIViZS0C0KUa5udj6dMUUGvjkJmqzzC1+wwYu7qsgRH92WbTondE/d2ADH2Md701xuk24rGkFoxssSAjINLAeLuGbWl3NyoGju5ugN4WRBW5I8DjXu6W3GlAcj0nHM69lWNOGS+c5sqEaGaEhY6q7+6KNfg4RUxFS4whVOUNKG4e5aUnGJmKxYPWcq2o8jo5o2YsCSEg59xi+8rZprL/FPm3UPVnAJDK88Bc6qokqbabAZS6KfSRjmFJM7eq3tlYevx0rjrbi2YjdKrd2wqtb7ZK/ZzicOLX7eL1Gbh+dYhFIZeFnO5XBCSWFZlCCXoPvZtWY8TkGU2xPc/qwkFrVuhZEGDJoBC0AmUhyAlSlan0eVZmvBDb3yhuWYy3oWX3kESRyrlPR5tzSU2tLLKaeQPawkMGDugtiXvcc/ZQ/yPWQbM13NbvvzyHJws3rq5Nbb+qaEQdpMzz9/tZJon9XeePEqeTcsajc1+Qck65jl+dFGAUg6MBMg0djE+bFr/xFsb93tzCyv3kVijSBqk/DWntUEpVLXTZZ3iSH/m+N1Cy10XsEubXtfAr675nveZzYYHB0mAwOF3e/WKcQivOyzkghOaGO4WAHDKuShbFYw71fvn3NPdJKhgHtpak4uz0wVh51nP9Vn9F9Tks/up1QK8PmidKWQpaMq38KQjDH3MAUirrWKxB7ykAMebOTczKCHzsdb/sCh6mxo4wxS9dtdjXgF+kUveNNrh5zMkesdkevZiyNRDCzyxdELDuWkzhmKUxNCOIy2/OKEV16p5r88k9SLmXx5U7DUjOKeNmlgmZigL/npLQ28w69KgEBVKp0NlwNKXPl/w0J6QYEENcmochx2bjglGt6cFYxssrnyATsaQfRlEeM41xlRsQJoXTRkaIUmkXoFUtyiLXkEwFItreMYXm88vsK3SPsgvWGhU9xVRiNET5ZyasSzMyrbl7aascMGZwgL5+BQPY6W6lgcZBAQzPLYv7X00RIaC45PVAZIpYPFcbjAw04FHvLKcGNspKGXJqq6YFJKEt4gQkCBGHeABCLAuFPO4JZASLe0EqbGEMKv4k10KhtUmZwdkNtAANuADNbZAysv6tPfmM9hx1hrCF+1jU+7Cryju5c21bY2qai5ZSWpx9OA/oBBJ63tAKy5wzXryRl+36nFYVYdtv2lpp45g0GKFCsSsgH1B1AbxftfI2bOqm7DnW+plTaoBxUPNVytCj6JIYlbU565JEDQtlWoOTxMQWxTJ0iKIspzKTTxGnOSPGjClOhZns3bZojCIoQZR5XQZ3qOuHZuRiCXiXuiWpxoHRPXdKWLgSaWXZmzdb39gMXlt9NTZOzSmvrAOpeRpA2JEYcmHw5N5PNctD8VQAumxczb1r3MaR6yX7kmOx1hDJrZ4TmZGmD/Qpfus2Y8BbW5fs+87kMbd1n9QxTAAWBgvZpkF1AzT868U1WqkFpYOwXYEH1BwJAkr2xozssA283yXn+6D2l6PcaUByfc7AecZp7jNSkSE5z7lTKDjR2NSKnj/sceKEKvsep4hTsejIRCHB5TEUS3ZhTQhBgGZ5AJRfLXKnDLYrbEuMqBXn5btMGqfOX1quf8L66PcAiZ50LUUvRQczrg5+yfND7PPf20XKUwTsZ92eKyyfj/7Mivc2NkQDEW11PsbYZdJiDFAobSUrQpbrGEMFHxqIdK5bUCAkF1SggEjQgMQAkGBnpyxKbuaKkQSX5HQGQsQUD5KFJkqAbUYuLFtvka6ApIKNJrQ6ZjMWW0agvPDXTub5ULzNGuAcIs8Z3PTF1p3siKIkcDFViRXW3I9clzFlaFiz2lnlhVZTzYaQgTrNregkFT/PTcv7zLZodoQghKBEKxpURJilz2Z5G3cGH2xTdPuUqKj3uyV7rJ1UuLW7jQYlHlMi38u8kpY1TGxygLW4NA+QbGXo00yWjq/QFv6kAq45O0+B2aAAzv2t0J9M/jGI4pYDMClrsnbjCgEIOWAqtYSOUxSQc0CXoGKOufblKTF1MPDoDGDqDWt6jua8acGJ3GOEVzNorZ+Xxgi5V91vts/tMSmjsUBFSa/ucDEIUxJkLdPABOjnAU/0Owz0rKs2NLRt8vfcsSEt3W/dZgCJl0LZk8d137LsSKzAo80R8l32t0lXmuGsARHN3nui7Zq8y5zLc9zhnmXLJtzLvdxW7jQg+f9ePOGZeFoE8l6X4F5r1WRQ4DD4NJraFTHg4UEsYccpYc5TLVD34ND8fqnYJjBLi5w/a6VKxbpQ+ZPt2xLURJRCUcJzQMqSKubI68x97RCKN6HaydYqVzeDfuF3Wsr43Spmay4s2uWqd7/xJ3IvXaIGIvI9dmmZuU3HhzCGJJRzkvnQ1mpun2IPQnQAayDoyKmxHmuAw3PPys6TD8XWHYQdySkg8POUSnsOmGJERlAAQH2u191YyNmMsrt2m8qZDCDqti2xzAyPa4plS97A8/XWyl6J0BbO2VhJ91vq1Ji3im+nuDSLKY0XnDfosnVzTrg5J2Flz5phTK5rCz9PcT3ltGbuevDs+4B7T5W3xnoOKUjbJV1nDxiYOcfrk60+XBMLSsxJ1JdQ950LU2yrvuu5aBSLRrHzmrePJwzuti5IAGrGvlMKJbVv25fj9xSUkp0zjrNkyiLbmmi0CBB2BIo5icKOA+VdiwEPwHNP9b0g48+1bM7Ag7OcRYNmpvY+xliVaYITjtnWpwKmLJgb9ZkFbfK3VYfncdYANecs8S/MxlUMdnTdKs0AUi7zdYvZnFRMkZaRqyLgMyP63QZQQUhlalOfKY/AhYYH9p3XRyPgsReMpAz4pr0mHhiRuWNZUkADkTru4KeYD8HUcMuFJQcwlXHIgtK1LeV5TaHXWfYkRXk5yn2l9pen3GlAck5002pWj1lN4Nqqea38vW1OdyoUV4eI63PCITZXIJmkZDY9sgL2nBGLVWyaM+Y8Ix6nYh3LqAxJyIuX3wMjW2v+6HdrAY6BVg0J3Ewz769Y0AZ9aMWz+i4X+95SZvcdTcwWjCxy3at7q8eoSZW+3doVi2CiuWLRjaufwDl5C9ho7hWee1YobdBgpMYneGDEApER+PCAiCc5AWGqn0NOyEKZyHVCy/AmFuiya3UT3Dg92rHS7rK9vEtTB3LacSMFlZg4FEYkIde4qKzcVcqbIfUZpqbQIOZSEb21cJpKBXHjmqHHxjCBw+Cd8eJW7Gcb2GqBuyd6vNvkCnafagWl5ZLvQFE6iG80GNHPU+tlObdnzXeRx825FYfTblOaMXncGBOyJHK+dt1uH8OajJQWKonANriw89ZeALNHJj3/FdbjRIt9kHii4xTBxBJzLqBjlt9ljhIXrj5GUCmIHHOhPAcmrShV1TMIksXUBYibLkoxVGr0Ov07UPp2Rq3rpMdlAxVLI9Kovzxjk92fBqg9yriMjQZKbDA7/ywqvWP7Xbf1cepnAzzYbr1tVkyABz72M0jrwtijvWL31WzpiBXRboOA7661ACV2w477WG7bffi93IsrdxqQ/PoLNzjkq87CQesQQQj9a1+8OS/8Qa1oZoTZm64OEc9eTcViJtTyw0PEMUYcJkkBHGPAs8dU3IYCHh6mqvgCyyAywHfnsu84v+r2xggcuTghYgp9/QkA4mscJX3kcZL70dZdb1Hx+sXrJx4v/5qlzPaj/WzjPYC+hgsPIeDQ23UaTh3sq1P2auuRR2O3SRwVZDSGBNV3fMmaKJes+QwACPNZgRIFQCzY2As+PAkGoaUkLlw8b2h5fibu6wCfLm7Fk/IscjzU77mcLxUjWc++yFUtmLaMivgl021FlAuyg7VeUAIOxWqay7WOXVxJc+/iNsnc5VnmCDR0l/Xgg23T23SmPQaxiqW6V1Io2los3/ssb7Su6wQLRzIlZc7Q7oSdC2FhRvh+MPU00CscnYRlUCqALjB1YiY1U2xQ1xkYCQGMrUegwYeeTgQcO+0sQ3St3oNmR2x62y3Fz3Pf6n8fvQOxHmOv8aBT6qfqwpemUOtZSbxhrs+P24RRlud2jEGKxOtnae9nIjMZquukTuvNOEnL3J1UX3kJGDheb4qh7fqcak2TyTCPe5RubXwSdnDLzi+SOJHExixNGTjT/dgbMhco+zq1eBfUrt7vHpDItlPqx9nIa8DrGzsX7BWOEcaYAMoDIqMDYQC69W3NRcsyI/xO0ZnhEnL7bQeQaAkDFHDr+rV9vgvuWznfx5C8HOVOA5IXzjMOp3kR3GfZEP7m+YNaoXWzWZOmuh2Q2IWUM44x45hCzSEPyMTx4DDVwlnNKp/hrdFAnwFpzZpnLRL0aUYU9y1SqsdS5X3OYqXjRHZ1kOq9rUr6toVstI/TuvI3DidnzYrolLoWiOwBIZoNWQMh3kRt2xHqtXswwkWhghGHEQk59+5YTxKAUAr4ANBAiWJK6j4Yx6y4rI29bjy375NMC5MGKfKjchMrl8oCVKYQavxJQEYOwpY0s1mpxByUYhyzm/I6A3VITRNK0dCuU1SAwhI07AUi3G7jFWwWp5GLp/7esSKxT5rA75oZ4Xjm2I5qHI/GMGXJuhare8iNTMrNB7xaZWN2semlTIlmPCwrYsFJu0bo9qlWbQVorVKolcG9it/SqLL+Tva/N3ACyDrygEx5amwE40XqHKxSlkdWHk9yvhDlnZE5SM9FYbkmlOfK90jeuoA5iCElZTIvBQilhGlu43meQr0ukHBKZQ0KrS9tzMye+f1x4iIktrL0lbZuxLamCdPvHGzG2PAa3Xveu2JxG9vixSjZceZlo7TjSBvhHqd/tKTUsuN5wnIDAHaBke5YY8yICE/EhegugI97uTtyMSD5N//m3+Ctb30rfuZnfga/8iu/gn/yT/4JvvALv7D+nnPG13/91+Pv//2/j3e/+934Q3/oD+Hv/t2/i4/5mI+p+/z6r/863vzmN+Of/bN/hhgj/syf+TP4ju/4DrziFa+4qC0v3MyI4VwnEQISghD9T4OUtUlYT9T8Sz/x68IOPDjEmmaWGZ2uz2RIUtnW+4Vrq/9IvJfbS+tXXToY4M7q7U3PA0q7piR+yDE1hmJO05Apcdtltp9Trgv1nHI9Lxdr9qPtU4IGF1QYENLHfyxjQfYE8mk3CW6rTEk5X8eQKKVBzocGNLaYBmAMKrzfnX1rQLu3H5kM5zgPiAT66+n2jwBTiAiJ1zkA6dw+A8jToca2VDYmsD1NIZ5Ff5Kg+wJM4tQUEYIU8aEvwCQATFs8hfYOlCcrmcVSLtl39CMoygbaM11zI7BARP9Gqe99pSX6qtnWfdHGRHFMA8tinJoNscxIY1Lo5IaOpQOcwFQDUIDmEkd/8AACPKV4dmk82Q8+AzIrhdHGnmjrtWZMdF97zImnBOn0s2ssre13T5YuSlpx3Gcw8J41rdkxsc5GY8z1vBdDKvEjGYAwCBI71dpF91BguR6kwkJmoGbNmwuDxLGekXGcgNMsLDifmc7OJX0hbRDlPC76cqIVa0c/7O03fRyZEGlOKv3Dd1Uf+eQUY7Jt9bPjBmjbCsCNFRl7Uow9AkaSCkPZauDIc9a1p9ZO47l+rl/PvH9OH5Ox1slM2I9krBNZ6twSkOhkJLxWxzjvMmLey70s5WJA8r73vQ+vec1r8Bf+wl/An/7Tf3rx+7d+67fiO7/zO/EP/sE/wEd/9Efja7/2a/E5n/M5+G//7b/h4cOHAIAv//Ivx6/8yq/gx37sx3A6nfCGN7wBb3rTm/D93//9F7XlNx7NiJmuWKljQbSlA0ANagfWqekab6HqW2jrJyD+uawMXsFJceMSl63YKSLa0g+gY1TWRMdDPESslqYqE5BSqO5bMhEkHBGAs6RexBl4MEn+d23B5b0czMJr+8TrI4pfc6HR+NaXXitodGEhEFkqaLFuI/hhwUJrEQJ6q1BT3kL9ToBBEKKfhwYiss8SjIQtJsSyFg6oWAUcQEOaRfIagLFtsMxNOvdtt8H3tu1RAY367yRgZT5J27m9tDWHiKBASmRWqMwMW6K8TqHFDnDRmxBakK9aGJk5TNzTNGsSOmDCWBM2ezYmVutGsFUtXoMZGUcFWEylwGPCAvDo+CfrhmjHtvw2DmD3Ypys1bO1zTy+0qwMxqw15y6m8dTFVhn0jtTHlay7ZTXQt5i3bKPMEOvi3BS7YkUrh/uYWV8584CLBSeybV0Zt9/nlMuKGZGizLmxMiSFOeEYTZxbJBOj7rOg5iKytACBn3zWZMKcgFyAvbhuBUwh4xAjribgOJfneGrARANK3stJvSNtLQyLmMrWX+MFyoI2z62rC2Rg5reiGHtZIPcmUFi0ZYXtAHqwYe9h/ftyfHAM6TUUwEJHGLa1MEY1zqsA3TVDpZfspS4lWLIhWkZdyvkWaIxcym2OposgXbBYTFIDE5sQhOe9C3hkq9bRS3XNe1mXiwHJ533e5+HzPu/z3N9yznjb296Gv/k3/ya+4Au+AADwD//hP8SrX/1q/NN/+k/xpV/6pfjv//2/453vfCf+w3/4D/i0T/s0AMDb3/52fP7nfz6+7du+DR/xER+xuy0pZeSUXUbEyxwF+C4Y9jdNZVs3J63AzykCOLRrHTJSjtUy9PAQIYGOPQuA2bgrmQmnWR9zXWCYNYf+p21fYJ4LuInMvkWKPHfX8ACJljUKX/eL3u8wWJT2ChdP7bpCMEI2hL707DOPmqYbBD/X89eJuwcj3CWEpuztEhWvkUNQarWzDxwQssZ4WNCxxbiUduzeZ5ByODPfJgCmrrajICACeUZWZFiNaeE+oVn4Y0CpIp/LNbmtKdBBPxtuSLk+nOqiIo2sLkjVB1sp0Y8rVSmPDAyXltVg8IktbWKTLQA9ELEuh5oh0QHsPE8AlVUFpGv72nX1cM3sV6UDMog6md/0e1uzASY/NfIIOGhg0vpB/vI5XFItXouel2Z1w97cspclGR3vAZTHFSpm2u3GKpOcfzgXaTCi5zCguD7Kj3VMiltpS9eNmCWmMIkb8QnAlBqoZu0XYXb6Pm5/b8+U6PXAplKWG2t9M1K67fjbC0a9/T1jpMeMjM/1GG63o3Ou3Pse8QLhcy6um4V94+MRvCOGnbVbbTWpFFNSko2QGWEMHxmR2VHi19xi7+VeLpUnGkPyS7/0S3juuefw2Z/92XXbK1/5Srz2ta/FT/3UT+FLv/RL8VM/9VP4rb/1t1YwAgCf/dmfjRgjfvqnfxpf9EVftDjv9fU1rq+v6/fnn38eAPDi6YwQ+mB1LxhtTjZV5/YE6Lk0aYaEEzsD3p+5YhD8VF2YaNnXIIBpJKm4cBuApsiUxYPU/zxnTCEKG+KsoW2yysDUmBKRiGOxdjwsblxzypivDpIa8pCAArrsYjJiSDSbNJvFjUKL0ZWqhq6rpk9B6oVoFsT+JSNyiC3bFeM8NONB8eZ8DUiAtvBLzxhQg17x64FFBMIyVqP2UE4AJpfxyJpZYBrfsou2iO6R2kYGteeEnM5y+oQFQOncuYA+7qXuw/+ArAPmS3FG3f6QD/U+dFpixFiPncoxfD4po7hpNQUaQM0M1BgSIJTg3pC5aAo7oWNMUCs7kynJSOodoGvLlkXKLvL8ftzQU2v7Y+hBSWj1A6ybppe607oetvTWS9DdgWwdnBpyvR4LnLXK4GIBZ8EzZNYL0jeTjQtXefcdpoRiM53pfun6vDu+rzkyhSA1bpIEhJ/mVltIzzNb8X57ZCuToJ3ntMJ+SfFX+/z12OA/oAFPDWQrC9ZdrgW451iSRER5zqdAdyRhS+YIMPCe46zV4SguXCFjSn08D/v65tzckxegwvRL32/NXe36nFb3H4m+zih72p7j29/9a711LR7/1rsie14Gq23U74VKh4yScYxugHOWeU26NXTHR5RnmuSaxetLTpmb8SIFDKkRbiYAAZrLZIaMFZ2GnaUU+BloRWMB320r3wFAMqfeVfRpXfNe1uWJApLnnnsOAPDqV7+62/7qV7+6/vbcc8/hwz7sw/pGHA541ateVfex8s3f/M34hm/4hsX2OWUEA0bs75YRGTIA9iVK48mLEzaBif5drjd119JBr/OhTPYRy+KLMdaZRVsidRpPpICpOJ7qhWWUclP/VgNrFxayBpi2LUiPH8DnubA1pa0oa7G5Z+n96eLQAQvTHK91NRjQgJFdQnes8pesRk9geDEhKi6jHMDetUG9W5O4dtmZC4kwFWDC84YIIEfkWuVA2hAuYFECIqrtrOgaok5i4ZZm962MSQUjykqPUBXlBUOi2BOp0pyLE4wc0+JU2vuUngQtMhBveDdFswcg/K1lUOpBiHxuv60lYQAaOzJs247gVJ3Cs38GqAXP9HseC62ylynxrL023mTteE+m0oZL55ZtF5m8UBh1itrRfLZ326XC50eDimzrDSOdhFDrRIQQkBMBN0CGZM7AhIBDBM4lkH2eRHGkAehUlN6orsP76VP2ypunx8clwGKKy0KJa3JJHZm1tUlXoV8ClMvmiq37fanGxpakhLr2E4vIu01QIM92bSnRQIRGoAzpoy6zoWJD9Gf9Lvup0/e7W97LvVi5E1m2vuZrvgZvectb6vfnn38eH/mRHymLzc5zaOXbAyhU7ivid2zWzW9dFAsWSxMLU6og5RDPC/co+RcriCGzcjVFPMilWFqxkmjVtoERoNrRjf+8/L7tEylWyVxjX84p45mrqSYDYH94rmt7hQUTdexIrP+k/6uFmSAjNKVOF5Hjb7VYoTq+urlcsA7ofvXASEZT9usxitmwR+SVIHGeD+AiUEI4aPHMMnnrfYB+Meldz9o45fYK1KIEned0ljYW5oaxJBlACKmwKQksGrKoFg/lYkZJqYKSCsYUEqugJCcAh6Yeq/gSLp65KFdkS2LpAwa+6/iSOaG4pohGHQLAOj9zUoC9joVU34vjJM+R1d9FcTMdrcSmF/XcKS3oGLEh/TFYbgtN6dZARBTUJRtiZZQZRz/KPRZKur8BWBg1bDrgPS5YNt5kb0rhmg64uOdJsb3mMgJcrlBascdfmW3e+a3RRqcsn4rBSMfEVTfTyLTOEk94iI3V5b+oxkLHgtV+YZ+o7+XdkXo+ZXuxpB9j892XUzP9MAslNrehKSUAtHBPuJnb+tW8CxpbovvH9lkzwM3G/Uv6zat7NXou9lprz8Y/ftnGEdvixUFusSNrzIgdK6NratetRfYxlHcnobC9ucY5xDiBq0msyn/rZ46HEPLwfeM+1SCWRpXsbYzIeswIWRMNRO7xyL3cVp4oIPnwD/9wAMC73vUu/Lbf9tvq9ne961345E/+5LrPr/7qr3bHnc9n/Pqv/3o93sqDBw/w4MGDxfZkGJDDwDqj3Yy0eEp3Xnmb+FuO4m4w53asduPi4qVdt/jbM1eNPXlwiJgPcvxxCogzgCmWqrZLBQcwLhGpKQG6T6zEYk2LoSlTV8WcdnXo88jPKddsYnt9b33w1dKdturnoS3GihVhhitx6WqLOAHIsViRjxOrpau0vI5l8dIJ0duf5/Qm+HY537dH0+Jyjly3NyWrARIPjNRrdQorMBelRmKGSvsiXX4OBQyUQopICMyUlRNCOgu6KwHuQjrs5JHpypWXKjEhdE5n2Wc+o9YzKWDJAybV6kuwXUCHxHKUz0H2mUC3LfGhP8SSPSrPNfNWLEo2leGWgYtXhvvINOvB7xZsjNyu6v4GRXjKCbf0QLOxIvoQ28ymdCzbb8fbmijy5GJZqwuh0wBrUDI8VwErFZQkHsPUti2ZwLGCFqNg7ryRrZiS0dzWKaGhzWU1TigwEUdsmdSYuGCSzxWEQNgRAk7bkwG9SxfQmDXOEZzHM4RpZFbtHOSdmDIEtEfg4SFKfZSYEXOuiRUAyRpXCyqGqdbimNTcL0aqFjDmrZVevxGcbLmnrIGJS2I+vID1vW5aI71gy1ULGAOZkdT+U0wi0ICi9YRgKuQplHcryjx2KvGnJ5SkNam12Xsd9L1rkE8WpG1voANQQMMAEW6rwEQZDex693KW+6D2l6c8UUDy0R/90fjwD/9w/PiP/3gFIM8//zx++qd/Gn/5L/9lAMBnfMZn4N3vfjd+5md+Bp/6qZ8KAPhX/+pfIaWE1772tRdfc+Rq1Kw3/uQ0+m0OvutCSrkWs/IWZn0uy5qMaOlzypjKCx9TQIoctGI1mYIaxAWkdNt0+9Q2ZsLwRLuKsDK9BiVsH9tsJ21t8dHWoscRrdBNUcWIBO2W1ayKIfQuTFb0Nrue7fVv3eMRpFkWe16yINKGBjzIjOiiUmsKZVVSAkrtAlG2cxDFpCkvEj5+iAdhMg4SP5Lnc89uMPNW6APdpQFGi9Am/popTG3zsorRtY01UwpIkXuIQzeuqfhHC9jimENlRWj9jSgKGuQDg7NrCuwYymfVfO4srXD7l324F4jo98iedcRw6GtZ1yz9nNlkj9SxQEX/ZrPm2N+By8DIk0oY4IltV51HFDABtDuKMGdA88XWIPCSxX6a+jl+NIf1rJYGIMt0zi2FeTOshHIOxr+5QGQnxcv3pn1vwC8EGf+R9X8CGRzgmAIwo2RrLK7BJfEK3bOACBwMICh1q8rVoEFJ3UfJXhZri9Ww68/ea6y5Z1n3s9F3C1L3rG22T7z+sb/XsadiSWJ5DKmgzBTE1VGeQCqurPJMpUBsREJGrEVj/bZqAMLvuoisZkLsNjlvD0T0No8VuQtg5F5evnIxIHnve9+LX/zFX6zff+mXfgn/+T//Z7zqVa/CR33UR+Gv/JW/gr/9t/82PuZjPqam/f2Ij/iIWqvkYz/2Y/G5n/u5eOMb34h3vOMdOJ1O+Mqv/Ep86Zd+6UUZtgABBq22RpuQrsrvWxPYda1b0gfB7fVltROpnYQY6A5ASlJDg5WWalHuIeGYy+IRUYPbovKdZzIknYWrXl9NCDpFn5aprGqSmnTqrBvX51j7YZS1TN+rthRplzTGy+wVWgGp6NFSSFAS0VweyJqMXBwolXFAv5BrhY3SEU7qy75p1QOG/XkXYKRkMpmTdt1itpTlFbTVHsiVNQoh1+QHhyyK2xRCYVAijlN5C+JZ2BFdaT6V5Js6hXFOEBrCN20GFTuz7IYWfyJfCwiZE8J06Fy9dJrgGQCKJZBsifZTEZ0pAynUgoq58SxSzXqSDENA6lJgl96X/wMqSOGiunTTagquB0KqwhnHtW/K7TwxERyVOxBSY4/Mvv04VtvVuNL7aJeNNdkLSrxYEVurYHnuZWdFlaFKFK9ewUu5MSb9/LadxEDLcQENlsJnD/jpnB9OjQEnS1JTvteimA2M1MxqoRlW5Dr6LrB4B5m9LgbGaJVkASWGRFgSYMqhxmcBEvw+51z7awroCinGkBFnIEUA5z6ZAyu7s77XFKe6Zm2zJOtgxQMde+I+tkDP1nptwcecdA2t/trNEIdu2+j8I6Dm/T7FoKYoPkPGxSUcYyyJCFrbYkRJ1y3nOYWsMnYO+oNzRQcw8mC7DzzkPO1Ye496nxpn8gTqyrzUotmip3nNe1mXiwHJf/yP/xF/9I/+0fqdsR2vf/3r8b3f+734a3/tr+F973sf3vSmN+Hd7343/vAf/sN45zvfWWuQAMD3fd/34Su/8ivxx//4H6+FEb/zO7/z4sZrC71nyV+zcOhJ8eoQ3QlxNJFe4s60dm07ATK935yzeOPXlx3olN9quAoXWzHlOpA8+iqI9BDHGbO0MABSU9aPy5KMXEH2KHd7L23BiAYMdR+M3V72ziXe+TQQke+FIldAJGcfBNXTmawsoTADUnMiS+xHFGU9QCymAeLGhZwElADiqhVRmIwJIfcB+4t6KkADFICN5B+LYkp4WHd/A7YkG1efqvznwpSU+wvFtSdnjulQwQszT6VuADWgo8XGhQBwGREvG5ZXD+dS2eMw5+3jjUcLRPQ27p9hFMqXaJG8iLEYsNJd7ZOo0zGjMMhlrjduMI8rXVr1wLi1llzjOOnUziq+TbFmsRpUGru5JnvnMZqnIsCQqeoeF/T1AprLYhQj1JxDia8Sa3vKCZhE+ZUEK7lbTyktA2PEnrS4W8AAWLpa7XHXug0L422/DRNi5Zz6ZAkjt7DRteeUO1BCgxLA8dxiTiSZDarbVhWde0Rdzw88XwKRDkysMB1bQETvI/vhXu7lVnIxIPmsz/qsVbeXEAK+8Ru/Ed/4jd843OdVr3rVxUUQPXnm6oBnrg6uVX4UkK2tMj0z0gfV6d/0dgBd1XfS2hrA2EBIts+d7NV5jiUffCx+oimoBbi6iqD5mxrGhFVUKdrqAbQF+zgBMUSkKMrVnICrQ8JVua/rc6xtYgpF/mNAaO9n298PXdGkDblOuLEaffwFoAZ/qsD+NR2DCr9+9PaREwBwjuQz0uCDblTZnGM0zNcsQNpVxis81VmkUsYpJZdOp7Rn1iy0h8KQHMsCdYhtQTuUolvnmIvyHIqr1wHTdACmooQX0JF18UQ2RDEnNfB9K9bEAyqdO5gBJTl1Qe9yDgW2kriflByW9TOZEoIxWojJlKSQEWOUWPxQgkJzs94fnab7blq+ghkQKiDRsSd7dWHLZEzYw9yp7Wrs2bHujVe9adbvJHx3Dk88C+yaEcSL9bD+5/p6kwKD/vXZz639dDIVkHK5QukpoZohk+9tbNjilqyXRBctxoxwnNhU5ZoVqSAb+4GIJyEUV8eQC5Nc5rUAgPP61N6vGEKNDdCMOoPdO4Y/BNzMyVn3+vTAlCcBQvawJGvbH1c0yGgsSYmvidPFAMkbY1YPaAY9KbY5Z3EBZIxqHTtRygEAljUfy4j98MCEdb2y59DymwF8pPz0GYuXaNj+ppI7kWVrJMcp1mxVgGFMHKubnUAfKGYE6CsEUxnXx1mG5PqcFud12zJQ3q1QeU+xWUlOqeXx9+JIptCq89Z7y/5E0k1gKpBUir45fsQAdCXfkb/t2n3pSTHlgMndqw9XWBNJTpI7DdB70Qk4qOxpCzHqb75Lld5/dP56HdPPSwZmyYJoIKLzu7tudtVKXP5GIBX3Id0NIRRFpMSZCFBrcSYxgMm1kMsCGADEKH9zTuLIrKvSp9Qyia1k5totjCvh98LGhJLBrCpOQGVKaF2unwtTUl6VypTI6VSNDVVfI9a+WSrSPkPSKq+zKGcMDTB3mZGU9XsVPPO6obFlZIgksJ+pjpfxH5bt6M4HDOGxVZhsnBmwDUZGYplZAj6bYOO2DIydTjQj0u946XnNvGXYEIDMRxs71oXvODHjVnPnO06xxYyUMaLBSAUhYQeT47GU+meU8ZczZjQ2pLIiEcgzUJNeZBq4ZFuKUWK3YqjE65yl0cccS2yKgJk5tvWgNzwZa78jW2DkcQHJmguVJ14cpBUa00axk975Rte9hH2ZlKGF7luMF4khYMooxpbQmI6NU4+Yj7VA9DUlfQQ+7DH3rkn3clu504Dktzyc8IoHhxp0qAsMWvGQ/8KSp9JNalZAu3eRObg5z3gmZbx4My+KMXoZOSxA0akEdZv49zQnWTBCVKyCFElMgcG3SsEYmCynIIlYPVcH8ddOzYWrqES2/VeHqYt56c7vdPZisYnofMFTXOoROse6FoIGKqURoatlsfasvRS7GojMZjKe09i1S5930caB1VoDEJvjvS8y1bKc6PYA7VkdEy21GcfEivai8KRimRUlMasg2hZnEtAWx5gIUrSCHRFD7IpOdvElObUMW9ptY2+WLiU1HoVuXeiVc0DaG5lRCOg+B95fhnJbA3JJm50RGlvCxZvgZAD4dMV0ZnoDRLmja1YXv1SAiI0DsMOxga82NlhrBah2gaFoMKvP17tP7FMALDjwwMhlrlZjpmR23httnV0T753W21hsU9pQ5pWVdo9Ah/2d4IO/67giC0IsaK01kyLU+9cyaun6H56kAlBlBGPBOI7uLpS5MNbrSOzIVAbWFATAXyEihmIIyZL4gewhpNJeLah7hBRbPKWEmMLCUNV7GcxVeX8cRuNSULL3vJeIByDaNXSA//7re65as9IF6n6R4L14SYSAmt4/CHgEIM+jyBpDMor76NgSte5RLgUTWwzKvdzLJXKnAcmzVxNecTXVyt+sjL418QODF0ktmKdCSfMFTjnjNMuk8eLNjOuzULhXh/MCuFA8S8yIWagTcGb+cdFgTiWdI60jli1pbg+8r3Lesl/KvUtXu3zZFslaxGJpXjIlzMQ1lwXLy1Si71lnFmvnoctFawvpEn5mxinGCuRy37mAkaxAyQxRvGelxAKem1QPROg7y89s4yjA3CqC+p60rFmfmVZzzksAYhkSb42LAYgzAUlqVtkYS5aftLTUxlwUpKws+rlTpGPQwCQrpYZBuMXlMEi2gQ6oaLBSxFaFdyUlIMYelKBl4OLtkwGZyJ0EiMsWCDrQ3LcCx06QWPostU1yDg0QFgDgBTN3Gd7QB6wTfEwOENH9VfvAnF5PMwzJF5avvPfFWCDjvQe2HrM2cre6jdwWiOw5323FAwzufuVvTVKwopxdUtRynGmtd+GTdwz1vAT2NUugGiM6gJ2t1BnUtLFIvwMjsb/Xui/RJIKYZO9U3qEQAiahw+u4SSnXYPe5ZmwCRCmOOJU14ViC82/OCVPKixT7nhsXt3N9GGXOWmNN9H5WnkSGR0/WQZBvmNsSGiFHwmQ3YiCU50VDK9ADEWC/uxawBB52DnkpgN3LXe4rtb885U4Dkgcx4uFhwmFiMapldWRPrKuBVtipsDL7iE5vR590oE3OrMrOzFTddVLuJmTKKBC8+a32ynvN61/YhhraqAe4ugcsNpcUkQaYyALIHOjSLxNTD8bmotX/bfcw7F/nnpnOWN/PXCx0nl+/HNM/Ry7YmikBequ3ZjY8VqR+rspez5RUay63DSzRI2uz/s0rPNUzI41W9yxXFFL29VpFAceh3PBhKgp3S+15VRLkSq9rMKIU7qo49WBkiqIBM4VoLqCvPcFYXb00MKlNnLarww8zdjlSRnvti8qaFPctoAGYWI7gPsjlfcO4yCfH6giM2HgmD4yM9AP2nxa+a6PfvWZ27qYGjNylVJucd4ClUuVNKVuK15bLlldjBlgyINYtq21bApERYAV8wErRn3UOJc5rQX32xI6L3m1LMYi5JYKQv83tN2dhdpix6QTgiIg5S0DicYqdpb5dW4xkV106YACd23N/jJY59fEZFM+NSq85er8tVy0ro/0uScRi27LlpjVqA415sm0Zj9LdrxrTaZZnW/IiVoCyx4DgMR8eCHkSAOKlAof38oEldxqQfMizV3jlMwdlKd4uWEbxXmjrUpOS0NZkRrjtepZtp5TwyKQOJqVN1y4A0JlJdKElm63Ka9+pzCpSMKz5kgLG7cAJeJvqwormusK1pn7hfqkuePUcsRVQ5MLTmJL+vhZ9yQm8ah99n6fcs1Vz0SjFElyUvShVg6lEI+VSBKyo2ln2szarrK6h3bM0Q0I2Rn8+Kwu0rV4rn/vreNlM5Lr9/l6VW828aSZOH6+lWmrPALP+XJ8TYgx4dE5VodJ+7UBJUxr7d4FxEUBTpKbunemVLCppsq3sRxcChMK+9O5eGeiZFH7XQMTJ3MVjqWylouDJxdihzCwmMSQ5A2GyiQPCgvFiStmOteCpCTYMEPFctCyTpI+3knIPOtr9OUBEfWabG0Bu43LhcrGiT6wZZvaKN4e2GDa5PotS+tmyAKHY6sHLfRwWg7IGSlzGS8+L9fksgQe3j4AH0N4LD6BWIGvGRrnbDqx646PGwwGFxejB6l6pRoNyPFFfLv2eAIQkQGUK3L/MD4X5Z9rmR3OSdS9kHHLAec6lujtwirLmHWOsrO9y3WsARacJrkDDrJEjRmUESjy5ZLt1R94ThzKKJdmjyHvXc487p/r7S6HcbzFOHnsFYJjC3wNmWzFFLye5L4z48pQ7DUieOU54cJi6TCe2WJlVGNq70r9Q2i0CkIwimIBHc0AM4maDcyrpEgFS2oAo0NNRLP435wSUHO6o52sK157JhkXAqusWxLupDWjZlmblgkVqVy3eujZAq6Asx1fGJDVWgL+d0GeuoQvWaKHYI3NuNQWY3hhArUJbUx4rS1rifZc0tuLFVtgR3qO30JemaZbEsiIanGilT4PSUc72dh0NSNR2AraqOO4DIid1En1uqWZdMkaV8Uf3OjInKVC5iTgh1wwtc06YUq94EdwCkBTAoO+6MA5UunJsygtZlAoIUy5xGwUc5J5FEeVtPwsyGk06valW5mtMUWguUJJtSAWFl/tg9e/at/r8avx4Sid/1/Eidsj15+jvh4xACHBvkm21yRf059mMJwDd2AQcZpTzzwWv6SXgZasS+xSWtvI6B42UHAM8JjOXrYnX9kXWLMWA2NihNQYEwAKgtm096Ajm2iMwMpI1hkTvY4XjS19/lAiC78yRKRZpLIoBx9yY5zoBg4HuudRhkXkm5ZYyn8a3Qww12Yt28/XEgo/R9rW1Zm9ylT1u06P28fNon7Xr+fe23J8Zvp5GzMzeopPa9VqLbeddAiL38vKWOw1IPujBhA9+5rhYRBhYqK2bnlTFRSkFtFQ9PERkZDybJpzmhDlLLMWcgevzvGBNTsW6/ugs7EnKGS+eWoFBCl/wkeWBQeCYuagx/qDFj2gGA1i69DAvPtAv5BEEKE2x1dm2+JkTJrOs3JxTzUg2d+3uZ9a1DGN9YOng3qmUQSzEOZdYEQYBp8aOUL/MA+tthmJIsGRDJHGBXIvPN6USO5Q1q9Gs0GNGZHkPQNvfghAALW6ktEMXalrzl27KE2oGNs2aHKOMl2MNdB9XHwd0OlN+j8pHvoFZDg/rIz8FVouWz2xre//oRAXEgQ+1vl175wlLhqGCEBDICmiJuWiE2hIfeotwyiPltZxfgwulVNr9bPu5PZvtgAEZ6Mel3U8zOjSQMH4N6OORvCQIFJvkYsv1SWfvkf1Xd5djgur/3F8nFeCh5xwe44kGDfr6nWFkBaBM3TOyQKQdbxmQS8AH0ANTzYbYa691N40qAJryX54h4+Ioeyq501Gq9VVjDytTUmJK9FA5p4wYYnknBCxel2cZQ6rsyam816cY6jrHd11iLctaUeY3ghTLmghwEfMCYyY06LD/RmCFssaAeL97v3nbbPpfT0aMwqg9PbhJXTzJKG7GbrtU4X9cpmUPO+Uf91iXvZcPYLnTgOTZ44QHU5+KEyiZUCQOd/VFaoCEeZJ0MUJRiE8p4xgnzDnjYbH4XE+xKq4MUn50FpenY0x4dJ6LkhKqolknWTPBDK0ztNZngg6UTFht4aFSy7zlMYRa6yPmZr3rlIuFkiCdVwFNRA2gs8DHfvZE/+65o20pRpL6uLjDoCzQqRUCjDmUQHa6NywnaQ+IMOOWgBABAXTRsuBSsyOjarZAD1A8UDLKcEIQwn30+ND7j/qWgE9XkY5Jvp9K9x5TAxPVyh9bGtJxEcDUuT6K+1esoF/YFghjwrYEuniVdmYqeSqzF3oFbyTaOGClWn3Rg5JyRGHQIOwiWGxUjLz0r2cbPeN+Uzr3KYJeu62QpZMW+mCEY5Pn4D4EI9WlET4Q2eMKUGsXOcLU4UDPrFzCmEzlofRB5qVdZc4B+ndmy7VK77MGUDTg6M5RjuXYLU25FfNB8cAH9/NEH8s7D2jPzBZMtKlcbVa4kUR1rGYPEVugeyhJHhBRjVE0zgS0eiUpKVe8kBDrM4Vkr0tS/X3O2Ym1bHPazTnhCgI+6KKlGQ+9Jlo3LrsPZSuWZA1oeN9H65k2HO5lFNZkC1x55x6VHbgtY7Q/9mZpOBrpKpZF2WJ5Xi6i59Snec17WZc7DUimWApQBRirLS3GbZGyC0YPRkKnUOQsCj23RVAR5qIeccwBpzmribsVnQJU8FhReKcgWUysBajuPwAqLZgtVFebGY3FqExHymgB7+Xe0tI9YmnNFFCSVBD9MfbV3K+miBuVDphttJm0+LtX/CkG/mtKcgy+K4Y38eUCEGv8jAImbZ9y36XPNBjhPWvrs1b2Uh6DEQ1SbV53bq99nvvnqPe1iza3eVawrUmdlkj50gBrq26dyrampE0ZOBeGrSp6ST2TGDAlIJUFiS6QpznXz3TxYprThFwVIbItGc0lj3VQ6No1c5huiJ92uSn2W1IzsQX9HrfzknW5EHdUNoRKpVYoNZOj25zVsSOgnDNTEDT3Ue1CyHvXYGRPDEkvPshrmfqWwGRLrOsWY0n6fVr7J4Ny9gCQPcCDZ12yF0vwIZ9D3W8P++GBuT1jR7/GPovWP5NNgLJ9yXKt3qVxQou70gVGmfaCBUZpdIsJZd2KkoFLUA2mIIzJnEuh29IiMcyJa2TKYtSiG/MUQ3XpYlFh2Tb366EDTIAehPD7SC4FJFtiwcgWi7E/WF5YkmVsRwMhHii7NG7F68Ot/vBiXvvfe1c2ffxe5uhe7sWTOw1IHk4RDw+huZSExorQ6jWp7V2AbakQnYrVqAck4oOfyl+ZIIWRyDlXhX0+ZKRcWJOzuP2cjrm6dD06pEUQ/GnOwIRaBXfvZFOt4uXfKYl7zYyMFMW3F6mlCQaguXw5R5k3piCKwZzld9keu0VGFrNYrV/HKSyoedtuL4BPXIBKcHUQV6KaFW2KzRqvFA95BpD0vyzIJxpgVSRy9dnq+4lKHsqtWzeYlHs3GMuMjLJg8dxr7IeW5X7ov6vjvb70Jnbr7uf1u3WRa0ob+u8EhxqwlG265gLQgAmD5XUNFHENK22OxSCQWlYvXQclqveUcgke6BW55e8BjTnRYENeg3KfGjzuvHjOKDVNCtgr22OgdblvoAYgPF6O84EIxxWBiB2ft3HZ8uSEpdtTAyOGRVHzxpa7lXYVAhroYH95Yl2s1kCHZTos2LBAw4IMwAMqy/bbfawEjMGwHY/eI/GAY+j6oX+Wsmb1bfTGbDR/uc8UhB1hfaKpGN4Y6TUHcWWk+yoZ5EMEDmnCscStneKOebLUMUm5jVE9fh8eJkjMSVv3rs/SYgbGW4BijV6XKOP688g9ek3WgMjeOA8PnPQAIS22sy8AdIWXL2XQ+df2gedOvcY49daFlpBnDejcBVCiGemnec17WZc7DUho2eUiVRcxNDAyBbRsPwaQhBDFnSP0dqkcQlN+k2R2QqQCEdRIliUqxCDVs3Nb1I8TcCpRtbEoaMcYMac0dLG45EUWxqQpEHMeu2QMz1E0uFafJFT3LWbeknst/ToBp5kKRGN8PIuLzt5EZoT1YrQbEdBbY+090IInSiEWWWm6fRUQke/GJ1+BEU3ZNsDRf9eyF4y42bY2wEg91gEj3gLnBUAugj9JnmRhK3T6SEntrOKIALD4lo5b0vbYGFFTC1NpFMUnVQAzZ3ELkWKFqG52OfexP9kogHJ97JbRa/Ik15ec5f1PubUtoWdE+Jt3XQ1GPMbOgmULRjRztyjgOQAja9MH76EDL+UxM37k0oxcjDto19B9k1cDzXm8XN98d0AI72HN3Uqz5PqePfDB4/V+dfvKPXfrxGAfb3pfJKkw+4bQPz89rgLa+6r3H4nLBKrz0o2rujGWeV8XGpWMdqHwm0knuKvFFFPOwCRAJGYBvLG2AHJcQjEqpVJTaByXKEHwBCeFkT+P1TirmO8BI2sB7I8ja+7Xe65n2RDrvuaBkUuvOXKnGrEd93IvT1vuNCCZgsSK6KDbBRCZz1ITYT5Dqk0X5YAzegEmEQBirPURDvEAhIhzeYnnHHBIojicUh8YLYG3sVijQnXf0orDFEKJOynsSm6TANCqwOtt9T6VIsrij8DS/cHONfJdlCidX19LDLLIHBGam1JZfR7kWBkCntsGZ+vt+rlwXmvB1qHGMPAeHk4tXfPR0V6o8FWlGWgpfp1VvyqBnX9+U/QAsd7ZLFpW0dNuWhacePeu77932eqPs/3E/rEUuPZb1tu1JUqDFh2Eqc9rZRTUqVMCk80CgONUAuQLu2WLMMbCkExhRgwBDw88ruxfKsUDGaxyDgC6QCNlDZysLZG6Ny0jYT9rWbNWsUZEA7iiHcYgLpRVuUMfCC77NvDBNmU1vriti0tSIGSUWGErngnwgbQVDf51HZ8pSOY6iRuS8XpsqfsWDMqk/vq5CpbAQ07Vxp3ecwt06GO0G65OQ12ZcHWdPTYar/l2fHQ4zvZ77l2AgXU2rwdxvACWRrEiNLhZNs6yJvo+untS7x7vpWVyZByJ/HYqNz6ngLkYFg5RAt/TQbN3cTE267idZJzq9eOUhHWfp8KiHFJ1XaW3gI41uanp9OeFsm7XRxvDQFmbA701djbnGRkIF8Yf+HOrlpH7mb2363PvObEFSvT1PGaEbr1TDG6FeHuOxwEldV18THD3NGQv4/akr3kv63K3AUlUND6aL3EFI+mMQCAynzqGpHvlQpR/czlZiMAklaQP0xVCDDWrkyw4oVmDgZrfPYdcikzJzscYShE7shChW8hnLIP89F/An8DWJowaLJmWgIXX9bYxJXCzgMnEcoQsTBZ4WJDSXdu4BQF91ieCIwZU6z6x9yaghOdftj2rpd8qpBaISLuxWEh127Vo5W97Hx+IjI4bCZ/32oLoLUh23zW2bUTfM06p+jEzdXCaReFLJXlCDPU3Kql6XM0512xdjPlJoaXebcYD2T8KZqnfJc5kqVCOhr2+1aoYXsCX2McTQs8RabaE24ACToIXN8LzNiYE6McT99EZ3jz3rDUgMkqqsCZeqvDOGam4atHNR3YbMygajCwLHS6VNAs+ZFuo37WbnXW/2lcXZ1/yhJGwH6fyvYLSwO9YxM20Y/t4Icra668LZAI9k2WBb++u1rMmHLM2RoYyqXshQMm5uXOxnk8LeC/ZDbOsbzTAyb0zVkTG6hHAKSTEOFWDVvst4BQyYsy1vgndgU+Ba0gEJtRYE65xep5jdi5vvVyLj9jatsoyr8jWvjbecgSE1tyzbiw4cd57oBk3WYuMWc7Yn979WwOWbeM9U3Iv7w+504CE1H7LHFSARjojpLOwI2mu4EQYkqVtNKsCbYHgJM0yy89nTNMBUzxgmiIyAm7mYllP9FWXuI25FGwDqHhrK2Ko2YdqULkzqbXJtg96W5sgtP+/TutqAYBNAdudwyj7KWVgctyUsvodS6W9bxf6+y/t0DEKOpuTlaZc1i3DffQvtOKxfVT2eB9LJW+5zd5HGigiDcy135jevx2vfiv3yWDPrcnfMiW8Py2jhXG43bHmMRGB/leLcRa3wyO3GWBynFj7pFjasyghXYHGqfEhVKyqwhXadoBKVYsXsr+vSW/J5jaluDv7rZ4jtI3NEUU1pFM88+IaKfcugwTJ2s+e449FUPtsbxzD/XvmZ5fbp0xVgwHfr5ArewnkCkpQ4n685BgAuvfWy2wFXBZk3m9vgJRjZVJzV41HQmPFeZ09qpTuKf2asA1reukirXj2ExfUa+14LGyzB8aZGAK5d3fUrAld1Sy7p4GJ/szMc9zO+wiVuZe25IzqrsVtKbOfxDV5ThnHEotChg9AjT1h+mBMKJ4DGedZtusYlRhCjVHUwATo10pdcJF/b6tA2zVYf6fSfk4+oBiBjpHYtdxzz9Ig5EaDFDUHZDM461JDN13nfuxa460r3n3dy708TbnTgEQKIipWhKBjPhdQMrdt6SzmceW2Rak6B9mRGBtrEiJCPCDHCYfDFXI8IEwHnBMwByqzZUGMolgcY8QJSZSxGSV7UbMW00VmFIzaMm20fOWjCa8FK2v3mwaIxM2mz5rU+q99fjBI32KLsbuMgAEpnoyqI7Mdy9gR9dk5bXObMsCILgcrQGr02x5hvZY5tQxOe0GJvmYFq9N4HMh5+gVTF6i0fbH83D+8EbidCpvB36YYOoBydZDYn5MGGOW3c4xd+ukpBBxTC4KPcwGirERsnjv7xg9czgsFFvBdbKysu9zsOEE9UFmty6awAowrCFIghIu/dhfsg4J1AHsDIB0bwrF6C1bESmUco+r/3IBJjdWaIixzwr6Idd5p46Z/dttB517AeQOpfaISDUB4rbq/ihGsBic99hWNQ+NTNTwBmAIhcDk0966i9VgFPiwYqXFiJnEBu25LdP8wOxxZQ4KUEMhSoIvFmiJZjmL8CP07olmVTsrx7b5DS48eUO+T9ZrmEOr90VU5ZxlHfVKG4p51yN141y6IerwzMP5U4kck1b7cwaPissV4EwsORqlx94gHELzvqzXDili3KW9f/kagod20LRDRrEhKuYIQC0a4LcRWLPgGqRm+DGjbMnD6br3jGdfTS+5Cetucn36ldi+V/b30cqcBSQhtYaqgAxAwknO/SGkw4rAkAEou9iSz7XRAyAk5HuR8APIsf6fDATk2N6xcFpBZKS9b9Ta2pE0gaXVC4LUYqxEDKjvSgQA90ai2WeUQ6JXnaUInR1CBDnXNr4HmG7fcuXI519X37YkFIUAPRPaAJcuKUDwwNRUNyFa7B8aUvQUl0u7+93rN3J/Xs7h57guj63tgZB+bop09lkLa/2qKOM1yX6kkd4gho6YdpoV9Rg2Cl6KW7Xl32arq2NT1dmQsV++S0KzPtN5qMs97zey8791ydkCFJwE68N8/v74Gz0slTn7rx6jNVmQzF51VdiI5V+7u4fEX/NA3Wrlu6VGQimY+Mpi2xBV+hfM9QMQDIfK5ByK8Xtve2HAAau5fJi9pE5UZ4zlVULIla2CE3ZiQ67tOxZ7Hbgm9hcnEiYtWqIA4BdH2I0IlsKpbV2rgjamoNShpjEbpBvM89fhOCDKfl3cwoH22tUzk1S/JC1IoWRGBECUYPqZc3vdynSBMvMynSX2mmzMz9hX32pxrcV7NGOCg0+U23tLt1xWAssaOeN+949dkb3avkUHppRIPPC2NVPv1lzWXtHu5l0vkTgOSKQATVOD6+UYAR2FFNDNSXbdUti3PfasuUPOpuHCdkOMETIfi+iV20sN0BUSZLEOWlLgpiLJ0iJIcJNIdKaEBhBiQ5qaIXkqPnlMrrhgPVOIa68BYDQk2FrZGV+H2qhYDvm+3qBZ++7ZiN4DbT04j9gPwGZAR6OjP0V/DAyAjRS925lK9sovrw5z6jFWL7EMK1GmwNKlzRYXmmMEM6C1clinp2l73S92+exc77arF71cHafjVIeIQA25iErYkBsxRgIlYM5mKOjfXQcXWsU/qvfIadbzptK99Agbt0jcKiH4pJQTVf0ax092pxz5dBr3YEAIP655lt8n5+5o10obbvVN+YDnquGWWNSBWpoTpmnTBPZ5Lzx+xKMM6xsMCEIJzDTK05V4HodMNS9pqAAjn79Qbmrr4wNJHOYS2PURA4Y/MbUqSeoZkOMgCtMx4S1aEln4ve1p33pVHp4G1ZTTYj/IMspNVLPf9H1s2u4mzsUHu9VtOlSGSPpNfpf1ieND3zr6prEkZHHMxKnR9EENLt140jRYzNXXFhQHUdyDlqbImLDJ8Sqlz5wL6gou1HWoelL7wQYXHFLxUsRNsg52/R/O0JyGGyoQAjSkJTnsnM0/aeR1YxhHa3z0j6Frf3DUgIslsnv4172Vd7jQgqcxIXaDykr63FjMoIOIAEr5ymb/Hg8zS81k+pzOQjkAU5oIMSbX2lX/rMR/FyjwPd7lYdLFB+d5b462rVlUAeN9sP3qL6JjoKRNjRsnAI5v0IhynsAApI7ET2porlgYZGlhsuWKNXMr2KHmxMmIA0LtoccHRuEXHinTnMR3qsSk6lbNHszPofcl+JBeA7F30uCA314IZtnhXbUvISDNwnERpSXPGsQIv+V4VWwAntGKJjVEjCOHvYkE9odVCIaPC4yPZGac/XxLfZ9VlQeNS9AqmBuEWjDTQ0YLUU1FyyYboujcMaud5eF4rlyhQTKDRiZiseXeN8TJMyZrU+UOxHlph7lmNHojYorWrQAQYsyHWDbcwHy4wSVgyJUaW4KLNaZoVSWquq65OXFpM3JAWz9WnMoIgO6Jc63JQTAl3DiXrYMmWVa4aIEwi2ZIQZL+UmwFAg5F/3MH9AAEAAElEQVTuL38L0YCi9oA81kQaKcAoZ0kAwzUxZ44M2e84xcqcABFHAK1YrzC1mjU55lj6X9BxTAEo2b6uiuXvEEMNCmf/TiXFvu5jr98v3XYJE2AZCB4zNiYtz6lZeoISwAcigDL2eEDFASN75D6m5F6eltxpQFJFW8k0GNGLlZp0c4hLdsSClrKfsCxAjrIQVheuJD7IhxgRMnCkS0qd+HOdTE4GGMwb6rlYJ5L6/GRE+317bhY1HgVNqQCM9U7ZpL0Up0BYuCtsWQsz+glbK2J7AtKBHnx4IOW2ooGFBSWdkHlCv7Bo5mTr/ICwKUzJGVMBPFPAjXFJ2Apkrz7IqS8uNmyHWqz4WVJGJswp1jiTWgPlUMBSbmwJM29VhTMGMPDCArHeYq9ZkxIDVRQMzZisVfP2zmtlz6u0limq7uOAQY5DggkLRPibDurVQKQWTUVfNBXon9tWraKRm8jIWpqK8ipppZq7YUzNBU9c7HxgzflDLPQyb0yxsSIEIpxbNAhZzDNYsgMd++EZmVZYbp3avW6bypIXD/W3VueFf/PCJWtO6rNmDDgn5SUI2cP0emPSK2pqDUczmgGsFkIs/Z7L2hOCNJhMyVyMTWT6QlnfFv2YU42xYWFhYdFamuCo+oxrXu2/BNBok2zfJvEgSDngapJ+Os2y7ymGyiQSvJNtPsaMU5L37npuTJV1a+3T51vwsB3NY0H+VpzJJcI2MoOi5yZF91j7/s8FnM4xL85bjw963ibLHTsW5IH53v61Y7x2a7lrbMi93B25+4Akr4CR5FjO+JHFEG3BRGMtqvbDHJGrC9gBeT4DIeI4XSHmUCaKYjkvdDaD2gGZLNrC0bZbaS//7YHImlJWFQI0C6Zct/+s99VWyl5BawoMF2tArGLLqtT0SS7gJDRfbN3PySza4zoMfdpeQAOX3V01FM12sC8ISvqLLPs6Tn02LgtS/Os5wCXmqm2MrHcNeCSMwMjI7UtLPf/AejbF7O4zRbFYnhKTOeTqCnQy1xqNSw1gbMrolv2puQXtBSQ2M9Q00Ef08ZrJkXMs7yEp0EGhgulVqdauWto9i0BkTi2gV/vKW7cOYBtU3jjb7OcHRmGR9gIpFqv1pF11VuaSSPYjdMyITqxBBdrWiurnFPlb52PzdxGo3rndLvujAyL8yxpTahvnrPq3shzLWBFmnNJxIgQuzWWrAUbLknHbQpwxaWu9eMk/DrGBExKGOaNm2krlN8QAFpcNZU4WIFreHw1KdL+rzyFETKW/QunfUPpLZ+hiXRPWrNF1TiY0Nl36sRmrYqnhpSvDn1JLNT1n4DokxBlINPxFeWdimLp3h4yJD9y9NdUyK8H9/jhAxB6vQYk9p07Zy3vSBhDPSLF0t1oCDrrd6ne+AZe4OIcWL7bGu7e7JEl5WjzNa97Lutx9QGIWGYQCHIBCMZdJlZMRAxm1b/FaLIknJqNLZ70qPr2hpEyNMVclKqoByQB0YKkMeq4Ylm4FygBPASmiKg8yN/RWTX5OxUXDWn1jaFN1DI0F6S2XzbLZtSuoAOSy9CVeUzSMGijJQMuUuSfcOg6Uaok0v9uA9BEQ2cOOjJRkb44dsyUAR0FlOoIBJRsL2WzORZcwxKYAeceMtmkwssd1Sy98bOt1Wdw9hVgv+C3XfSoKuwYZ7RqjCbklUQBQEkTUDFApdODkBCm2GINUhmaq2t41rJzXprK24z72z4zH6QKA89yPEa++yYjB04U3U26uWik3ly1aeteAiPWLH4keJ2vZ+fhc67kLgJyTML0yFjMOBowwyN0KN9Fib38LaNttjRANTDwwMpQyb2fv/dXrARQrojIncv7IUP2be1bEgg2CEe2eVWMkyvk0EJFz9i6mwNh1tPaJSgABoEsC0QCyKJGyBDTWNiBXVzxxlxKGBLEAhICSjcswJVtSWJPGlqD6MKayZsh7I/O/1C+Sd1EKtcuD1m5dNiBeVhCZR4Aoa0bk34CRG1f72xR6ijevcTuAhWvXS6VcW9dbgpK9+9p7sO0cMaBkSDQY0ayIFz9CGYEme/2Xst/u5QNL7jQgSRlAPCAfYCxqB+SSGQvTAUhJsmeFCSE3AJIBARUWoFjhtpRk7ssSJJ/nMxDPmOJBFoeUqxUwB77EEcepVGhPfQG5KUIC9dRE46V1lX2D+xmg33mS2hCKeWF2o6msQJEAJiwtn53ft7LINZcuEQ1mLJjQ1ka2nwW39AJejgbQZw+aolif9YLeMhQ1Gh9ogZA8vrdWXzA5qse9ZsG3bIk9ljvY4Hbbtj3tYEBOy+jVLNZ2bGh2RP/ThbZ0Pnv91xO9QF0dIm7QKsH3i+PUWeB0mmCe5xLpxreyBJNtadvbvgtLMh9L7Lfr30YMiq7Ls7RQr9+LTqagXbfW0pqy/shalWrLeum/WzLFpD5rZYVJCcyzOjQWkMxnKmCFQOSIYvl23hNhPkQx1SCExgzLjKwCEfV5NfGIN1eTCeHvNFKhzU8o85OOERFgIodpEKJjeLi//j3DL26p2VsPlFDsXBVj6NhzywK2mk6pujEynTtZkylAal7RGAT5PpX5/Vj3C5X1iCH2K8KK1wDZEuni0AE4fY9TaOsB76OCstyY9LmsI3OZZw8x1vom1WVrirg+z50bV1SsyZSwYBo1WzJSmhlvIp/TcL+9Svdo3rOAaAtgaIOPZxDa0g/snNyASFzM2/r4hSF00A677a6BkjVvgZfymveyLncakOTyL9gFSi1CAdAGFDVFo1jYYgMllNHnRQP0QtqUIklHnBdKPNPxclJtAc19MaYtP3GKZgaYhjVFrUDnUuVXEEEKTcniwrwMhZAN3rSqU3DKvs1/mcfSSpbrdUo8SbGqJeU6IAH0fTVrT+yivaxUm93tdX+12a4XuujbPFC2eJw9vc6mRcbE29dm3dojtHheQvPaCW/hh3yBUuu5CejPYlVcvhsji+RaOxcWwRKArdmWGvSfm4WW+5HRqO8Vg8JDezb2nYsBmOf2vBM08LBa43pfeRnfupoimWyIZkhy3eY/Iz+O5NJFban4JExxWriCtDarPsm3dxz1Ym8eW2Jsblt2exHrlrXmmqXdsYAlO5JqTIgPRmps1gCMrGUCpNh5YVYZGIE2LmVuUvuoGjGxtGuKEpSRCwuRqOhDGPug1oMSUw4g1PlK1q649CKwUtiSCkrQ5nb5XtplWBMAxY2rbYu5sCOhTZqsBB9iQCpuz8ICiRGLc+OxGPxOc6pscspiEbRGvpE8SUV6zQizZWRc298yFaO51XpRaPesERjx3L22RLuceWzOvdzLbeVOAxKxNorFJsYopp86mR6h0/TqDC15PgO5JbTNawAkxFYwcTrIvvFQ/JIP5XdZHAKKBalo6lOJK2E6XiBJlpFCux9zwCkuszVNA/Tu0aYSIB+LotEWlmOxLp1SxsMpFmUrYs6N4j5OsbgcSCA6glisprqklC5Ar2Cwh0IIxTeYCqBy4dLZVwrymZFLFWFUxJOKO5n8X+6ztGNLqOgB/UJvu65f9J0Jl0AiKlZCu+mUPo2h273bT8eXaFcu7nsRW2LuTbviUEnVbj0jZoT/PIZktDhysdNsB3/XFd5lv7Sg//W5LgEklIO5Jv8uzm1YFGDMmOjzr7EqU+q3dfutLNS9JVy29VmzSlxJ2mZGLNul+2vNimrFs3y6z6MoLC2YFqguM6XGzBzFoDHHrOoQDfqCWZ8ccM85BlAKMNCU35qTt33u5uac5GE7c3TdH5o1lQ0WgGQs2RB+zrkHLTZeJGOdEWEM0ZIhaeepfbWhvNUxN/fjlfM6kz0cS2AFsdpxaunfJWufeq+DsCUBhUUJ8pwm9a4ElCxbI9aExSXRz4lkRDjHSqat8pl9HwSUzKkAl9BY9DBBEsjkUr8oAygxTY/OzQ2W8WkxALGsczjPZS1pmbg47+k55TYK8+MwI2v7rIGKPW2wblP6M1kfApIHHTAxbPaK8UAH0XNOsm5k1oj6uDXY7uUDV+40IJFFJuOci3JMYHJArT/SVebNqZEC/Jxzt8AtfJJV5fZMIEKQEpeLI90Tcsgl2DPjOAWckrhQHWOQWOUY6gQ6l8l3zpe9yFVRiXQPKRNykL/0uz2FjJhbWtUpADFK+kWJZRHeKJRFhcqC3I/qitCDEZjPukAiyzfQjSODVjsGQrYJjKwRgc8eVkDPz1zY9baR0qS324lTsyVrMgImT4ItGS1+W0Bm6ca1Zn1vilI9Zu4VyDnlupjpc18XxZWgue0z14XQs76vtVkvpjfAAnysKddTDMUoQeupxJnIb3LOqFwlmUr1pM7HSuXnYn3VLl+VUVlTwpWyqccildUGTJbMSA8s11mRUbrQNaG7nXecvtZU2527+ji8F+1yI2NaQIo+t8Ibu6XOxw4YaTulDnRUcUAIP+tMf3MaFzQkE5Kzfi/IqCyzZp1SWgUiOsuaBSGXzFOAAAaKZvtOyDiqgqMpzwpoAzQ4xRAkO2RBDilJvIfATXlfGF8ic75MVqFz5SpdrZ+RfgTlL+e5DqSgGay0sUqMbw2ISDsAHfsi7WjP81gyT6YUME9ydiAi5QRMqHElc+b99HPEyIrfM77Ld+5JgRFvvx5ERHef24ieH10gUufC3qBjhSno63oc7e9LUDLFsGsNfX/LfVD7y1PuNCCRBUKUEE6hGcAUD8Je6AxaBaAgTcAcF7QzF70KYBhnovyQmSoyT0fxVY4Hyb6CtmhpiWVCjYEB7mK9ovWHVPOxgJO5xHvMcWnVtvethftOUdKwTkGo7phCtXBwjjhOEdMsQaupuG/kTNe2DKSyyJXe1DntbQXgNdETWSgAJYRG0dN1K0JYEgl2VMdHYL5lnZbOCulMArrOh0izPhKUWKZka10aWT7XlI+u4J05pio8KuiZ+3lKLH9bAyL1+xZYS21foFnP6QKgXQn0b3KMPLRLFlZaoW0gdgPc2wDF+9vqleTF4ksr61QAOQEKlbIphApQKN5Ca59156aTSgxJSm1blnf7xmGy7LPj78DYp3wkWgFbL6IZ+m05I6ZipMhqW1EkqdQy6QKVUL7jEc2gkbPE0qVCiYqdpBX2oxGi9m+xyi/E2ZYyhP0w92SZkC0QosELv2s2RJ6hX1fGc9EagVNuk+/7nmVS462OX7U7EzwcYw9M5gn9OlCAwBSBGcCxzMXkyiW+ROZgZuei8WmUEW1NBOr0xhjlNa2YjgaOpGaKrA80WsXKshFUS9FOMdwVgGIMfHMUr4nj1Ax8ei5aYxUf1z3Sky0Q4hlZLq0T4p9bndewyZYl9mTOuUtBX+foScc2LgPuCU7u5V5uI3cakJzmjHNCrVSLKItc02MDQpBbnKaDeFilcwMn86mdrAISeZm6qYgLYsnWkgvgkQWrLGhmseG7zvoerJbOrCE4xI5ybhmlSmG0w9LqbT97k+sim4ZSvEQ5aovpcYqIZy5GsVqoYrV0FaUwOqAk544lkXsW5oOxJWRGAGGeGE/C5SRnVICTIYvTVFYpHc8RGVjriLVI62fg9R1jDiixYyxo2isgzgEmVhYgwwEjI4BkQQj3rYogwUj2lVbr3qNT/Nrz1/2MtdaVMtxb9qwmN7oAWVwPjLx8Qe2fU/scu+03GC/0I5cvb5u2FE5RmBSA7mDLdoxSdQNKITbPUT9Dccnqn5XnpmXBpz6/vd6WWIVBg0i5/twpRnJeUROnIIaLY5RilTRkxByQEnCCGDRQMjvF2NJ706AhBfMAooccWgwZ0DOta3ekb7e6iEIHUrf+vxSA2ExZay5ZBCTcD2hzKq/fv8Pr88Ga8DqN/WvMXlW2Q8Bc4iyOar7n52NqlnIqpvMkgHMqxQyn4l7casc09y4LTsxMOZQOlNDgU5gSYBlf0hgbYU56pkS5VyN0a0Pt/9LvxyxAhWnI6Qqtn8WSJRnX/fHes8dxz/JAyCHaWI8liBjJKFZr5LY6SpFu1yjmxGAyFQEnyng09Uy/nmPS4eUPSO4rtb885U4DEqbUDGqhA4zbVbG8dOwJIM6qdZ8EoJSZtta4qHyZ+RvBiF7cYBfN/jSsQxLLDMx6BymIqwjAySMgqeA8YDlBbhW60wFwV4coriIlt3t14SqKxykGYKJVs7hV5aWlM6FNbhVIVPBVQJxSFDwJUO5ZT2nOWlPkCE6W1dN7tgTYViL2sCIeGBnta8EIZSv7iv3sfb9ERgvy6JzWWmZ/23OtpYsFGcB4cXt4Xvfa6lX3gud1gPyWDuJZwUfPkG3yACM/A3hsMKL35/3rQFS9XV97UmOeLEhEU7RTyiWuRIMJld4bZEKbEYNklTZqyIG5PoatBZvzSiXycsvZ9zhAZIsNARgTtGRDyISMgMiWUeIyUQYTvUyVsXuCPJIj+DkDkPjB05xr2mCy4oi5Kv9MvRuyvkYpsoiAGZDiirmxJqpFjy2BCMZuc/aZipvZVNygddrjKchLnHKSujppGX+2N2nMbWQPI6Lbs3CpUkCERkwvW+DeNnhsiDsVBlMHi+MrZXRPufRpjRFTOsqTcDe7lw9sudOA5HpOeHTOxYqTVVBqP+HQ2h+C+NhOcULEhOl4pX6XfTtQoc7BBQ0A0pwXlrmmfKBmZ7HXj5G+v0InnyBKfQycEMQiFEPAcaLFVlmkjYLipQflpPBimfyuDrFWZ50TMMUZz6YJpzniOIkF6RgjUhIr2qEExlMxCwHF/xfV3xhAA4EBS/Tlb6oSIUHrpOU9w/OewHZtFd2ySrrC/jTZmkTk75ZCas+/5pbhgQ/NhvC7TjVqGZBmZd/PjnQK/kYMC6DAVV2UzD2mPvC9KrBqofWYiksBir6WfG7uYLY2i2ZRRudfi02xigJgF/F9i612c2L7vefEd/fmPLv78FjdD2uy9vvYbz4XtqT1cWXADgAHwJQkzg0Hcfecc5I4qZDx6CyWaOlLiZsDsnIJEiu8jhELYfxONUt4L2y+BSN6+xb4kHuW/XUBQ8Z9eEUtPQByNvvPybhqOe907fsdYMSysewnFh0FlmzeFEO37VCU2WNMNVUwWZPrc3EVnlrClViU+kMsaYJjwJRQ1sxcWUSulZo1se2k9AY6/755PsE/jTUBmptZKOCJoOkIiX/EQdZ63rfOcAegFVI8yz0Aas2soHycQm+LHdn6bONCNCNivRiYuvlYfmNK51iMmJbxqNe6wLKnXU5HBYXnLPWHCLK162ZMwpjEUjZAmBSOOXW+47S7Te8vuY8heXnKnQYkfIFysdrkIAuhtrTVNLahpaOdy6R3qPug7KMsfjkvLP17LHM5j5XxKbSAW/kulqxWZE9m+ZT7wasVCq2s6EA8C1aoZPC70MIJqWb2ygAS5kMEUsIphULTRkwRmFNALIp6RvMFD9VVoFk7H5ftoOWLC9OTEg8AUOjCBhTWJjVQYvez5/LkcYEIt20psvJbGiqrj8OGbIkFvXvYCl1Y0QKEvdv3XJdB9f02tmkZm6L/6t+639GzBfUezYD3lEz9Pup3dx48R48x8T57sud3fV8MdPcYknNqWf6OkUxPBuuRMOMWa5OkJJZ4cZ1tCTJizpiLC6hMeYwbyaUwX6jz9Gj+0N3K92Ux7xqgscaC8P4AFdej3LI0MNnDhMzqO9toQcjszAdbosfdQqrPkz6XMHkxqOPm/pCYM1ISxZfxJRKTAYAZF7P08BSIEJTrbhZAQPc77boLyLN/KaYeAp+cuQaF6lKWinFvlBKYBj5mvdwjt5k/t8CIBiR0z+L2Y5T19hjFhfs4xQpEjgV9HsvxAkzadR8ntXaXrj+L/jGV1zVlNBeJjh1p28ie6myaT6Jd9/KBLXcakJyKokarHNAsCHwnGLbeaGYWLwwVpPD1udQKahdHAK57BlDcgxyfBIKUGEMNWBS/7VisE6FTUvnX+6f3aVZf8VG31po55xIM2awvcwaOMQOYiiVOrn2IzdqZUVL0FnYkF99wr+cyxoxT7UtooNe2z7lZKakYaCXBY0du6yah3XPael7OtZOG9lgZH6T07fEUFs9aboOg7WfKyGWqU3AYzapEW8y84G1PebWuVXo/q/zac9xGLlXMLbOit9m4lDX2xMree7DvZA8ylswnAAM8/ft9EqDT1hBgXZk5tdTRVPpkHgJIlUXOG+UW6K+PUj2cVu6pGIcigDk0yzqgjUB+++zrSvYD0KxIDz6AdTcsAMO4kEvZEBsjBCzn/q3nOJK1d5efmbAhhsac9Ikayv7JWNvnoGJNyJTEyqQImy7ZoKeMWvyWTDmNelNs835LKrOYVtTz4zMr39UaaruHd6vduFjbS9gbOYap9AHJrNkYEnXdSPYlI+VmmGhuzbF7F705TYudJ/jZY0P4VxconGLA1RQ7hioG4OFhqiCE6zKBiIAU1OcIVE/yoVjix1sLdW2cOcp4jmVcnUq+6FNq8SSnBMSJzAjPxXHYzn+zJ+vNvdyLI3cakFyfE67Pjcam6AlNAIea2YroF0gq2GoLizpXZyBQlKc535arQLtWqH85MbTKs6X1UywBk2EBkpqymtwg2XodpViJIigzCIPngANOIWCKLWXknIGHtNBEIBymev91USr9MEWxdM5o7nCe2AUo5aZcZL2Ni0kiCFH3bFwekp5Iu4m2398DAvU7+nFDd61WZbgs/Dsj0dwJ37Afej9fSW3beiasPVu6+NhjPfEW1AoKLP23cxHxWIU1puRxQMmefdf6YN3lIjnKQ1Mqbhb7e+cQ2coqY+N+PECpn7lu+5NivLxnZT8TuF0dYkvvXJUisagDCccccR1SnSuZ2YlSXUTLO8YCqECzeAPLpBhWbEyaBiScVy34kG19HIhWwDQAAZp7FtCCyM/aZSv1v41ig0Z/96RqXhtPVkG2yjL/ttTXqO5VQHPtOqr9z7TGJ/5tQfAMGKf7rp4jWeE9heZiRaNUZb3Ufei1g6Ln5LWRHXkNlBgRtNjFUEByNqBEksVMsnaWpDGnlPGgpASmzNEa8nQOsHXx5gzrkgVgAULomtW7yoXKjDDpQN0WAx6SISnuXFMMNaMd+8gTPcR429zE9RVQKaqjV9Cz+eum+p4yW2DLwjeVc1KHSBk4rhWTfplIShmriV1eomteKt/93d+Nt771rXjuuefwmte8Bm9/+9vx6Z/+6cP9//E//sf42q/9WvyP//E/8DEf8zH4lm/5Fnz+53/+4zT7qcqdBiQSdJjQ0ns24QtMGtqufZ0Vpbga0BqkpXPjMrEp/YvfFk/rLgCohUothkCxNuYMlLiKWKyKI0ZlJGsKzFR/kzSfc5LCbFeTBLxLikR5QU+huG/lgGMsrh2lfQG5pmUkfS8LRy4uV8vnYC1hObf+YV/lzvrZGJA6SVbrpurDpICJVgxM/1og0veZ2aDcILxg95GMrrfFgui/+rMFI+33yxZNDzx4MqdxhXp7zsdhDOy+a8e9lEzKWGgF7928bHvWLKgWnGwF0FrXsZcy4HYkHGeWAetSOs9Ao9YS4lyYtHNSSqEYd5qveQDzLdWYtNwC3FvCkaUs2BADQGTbGIS0bb3hYsSEAOjYEAtEqLxptsUrOArsSzyhZe3d9OrIDI9dJGlooJCARdaUVqsqxoCURYFPJR5D/vKEoSUjIagFjTelCCZ/z9kUUmxxIWZzayefZfZ/txKhXLdCW3cig61DLihKapYgAmdlBGyZq+x8tg+U7HHJ0tuuVHYzsliMEznGiIOK45mCMI0EJQQihxg646pmGj3p3p3Q6yEhBrSnVO63oshcFnQU1/GsjKOKOS8nrvGIKrA9hjFQupfL5Ad+4Afwlre8Be94xzvw2te+Fm9729vwOZ/zOfiFX/gFfNiHfdhi/5/8yZ/El33Zl+Gbv/mb8af+1J/C93//9+MLv/AL8Z/+03/CJ3zCJ7wf7uByudOA5MVTRjz5k8iI4vR0uhjbZNVcNfhrmx6Dmm7tS6cXzpT7bdptwFNSa1pHXjtTIS709EY9Ds/NQy9U1+dUJ8pugTxwYSnWjZRr1V+d6UMmx0LXZ1o52/npAucBvzo5OgGnc5L+mpPcA/24md2GmW2ab3duwCk1C6ZWKjw/btvfI9FWxtL6zWNG519z3bAuOtv7p8U+QG8x1S5G1qJqFc01MOR9t8rS2G1hudh7rk+j810qozbvZU3W2kFGcfz7UjEBsKiZMmor9xm1deR69yRlxNyMXFDmHDEX5fZU3BsfHiZcF1ByjKlmdKK/O+dgZgyyCiFFZo9eNNgA1Ds+AB3cx2NEZN+lMWPOuSpc9XfDguj+GbGYts23MSRIv/SuhEAbU3obMHYr9MdpU8qPlTVpcQunuT07iTMR993jFPDwIKUyaZya1DpQ65agPVs9hY7m3S3WRA/5CAFBNA7SdQtg3GjAFCdRiFO/xicDOtkmy25dn5c1eeRzcp+Jnu9GxQetW5aOCaFrFkHHw0Nz42KSCD5fYaZK8gLVx2sMSVbjWwLQ+3UXAGIgA9N0lNOcF4wJ41YmMocEIkABrqG6xQHAaS1P8ctEZEw8/WteIt/+7d+ON77xjXjDG94AAHjHO96BH/mRH8H3fM/34Ku/+qsX+3/Hd3wHPvdzPxd/9a/+VQDAN33TN+HHfuzH8F3f9V14xzve8djtfxpypwFJUouJFsnQJECjVril0u1MkEdEYSdiBv2g5+RZIcq5ggxmDVA8Cx5gwAgNErlNLBaYeBM42R49Ce4Ra8m1lrjqHqUAFGM1ALpOidUplaq6IaLWGUEqmbKAsmDULqqZtGr/GDBCZiRlsic9M5KSYTpy7pQJ7QJG0ft7wE/3yVBuOZeuKfSXApH+GB+MUPSz1dVyvfZZgGJ/G32319KfR+Nxz/bHYUGsrD3XUb0eC05up/hz/oluH46BSP+dx2jXtqchNsC9XbdX0Or2Q3PZACQO4RiLQhsi5pnBsVJJ/DjxXSyGjpi7dMpAU2S9ubmfN3sQAixdsJbgRAOZNteNYkKAvtr6HiCy9t7a39dE+p/xO36Ka73vrdg0mayFVQjsRzGJHwFJWjADMSRMIeI0CzCZYqkjlUNXBDPzaMWQROx/r/e6cNHVj7es1xrKFANon5wCqquRvFINGNT0wHFptKG0tXM9NsRuu1JuVtKOlgVNA5MYindEaOBlCg2MCJtSspyRIal1YnpvBOvZEUpwjzxa7kcEKQ+uODX8/+z9fcx1S3YXBv6q9nme996bpu2xY9wYtc3AALYQCMmOnWaSkMRGBiJrgBbiSwQyCCMNbRE3ICAh2EaKnEj5IIlABAnBRNhjJQpDBqHxyG4+FEUNCpYYDyPTYywhG+I2AWI37r73fc7ZVfNH1apatfZaVbXPOc9z39M+S3rfZ5+9a9euXbs+1m99pisxv0NI89PD4bjmezwlco4lZUG6M9fr63zbE/HrTn16enrCD/zAD+CP/JE/Us557/EN3/AN+OQnP6ne88lPfhIf//jHm3Pf+I3fiL/0l/7Sczb1qnTTgOSnn04Ij331QdWQKAxWPvXg82RbyakbxY9CrcsRMKjKT5rc3EZT20zVNoiJHrJTOxCSDbCvEYAoSgeZXmmMMF9gpRpZYwjXQOrWJGE6rslu9D0XStZfsi0mxoLbhrd+NkSZCaBfAvSU45DM7o7rVkPCE5FZWhHur6E5l/aAwgz1zJxmz8+EcdXusySr2iZavvdO8DMyKbE0ChY4oWMZUrd3/yzNfDvtvbV7JVjvMXfSZEZ736XMUd99t/rt0kx5ZAlQ07zug6RrABUJTvn44OsE/X46+VKuJFWMyWkkaU+TQ/TrNZkCvT6FJsxs0pasQmPNxkI+1CSW0tyKl9O0H3S+XWfagBfauiHXDPp3CfgYzS2N6tjZ+jilY785txrjk5MW4voYajb3EIGwbL/Nw+Lz9yTH65jSdvkM/lxlnHlES6LRNLe6hWvW+W+gGld5V8ExXX/wHkcEPMBjjQEI6R1CiFgXh7ew4BhC8StZXB3rfD+V64EGPqQ2hEyyuEaQa6FIw1FC/C4eby2+9K1D2lcXl7SLh8UVELK4mraA+2xyFqXutQlARAAhg49k+lyFniFmSwyQdpJCOy+5D5LJm2fChMQXoETlIu0LT5IYQiz+SnfS6TOf+Uzz+9WrV3j16lVz7p/8k3+CdV3xpV/6pc35L/3SL8Xf+3t/T63305/+tFr+05/+9BVa/TJ004DkGGpEGE60lpAUAgCwtgjeMwnKGgOWkO1dA1uU17pZpntog43FvIdLKraakG2sa77p1vYkScSazSC8p4hPdQOnhfPx4Mti+XhYcqbl5IQ4Yh57Uu0QSb1N4RsJINSQTBQikmzDKb8AVefgzGTW0pStURE3oCOF3EzmWqkPpZNpiFzC2QciIzMeS3pONGIkeozstQAIsGVOLa1X73oPpGhaEl6PVb8cZ6Os7XtByYwWowcCret7GHtrXtG19lwbwmykaerZrp+Ub6d9yxmy2t0D1pXpYu0QQo31kKIXkeknMbg+OJxczoexMqfqDFDk2rp5PmsW13gAW/CxOcfWCbpPAyD0nhoz2ls/etqPGaBvac3oWn+MVY2cRb11gjRirYCKOYa7iCWiSZ5LGq/FhWJ2lIMwYUUbdh9gUSvhVJB5DcqKnkJcS0A5rEjYt2T08pAZ6Qfvs1mSK+/NHd2BKtgh0tYxbpYFQM0hYgGRxl+ENCYuO8NncPPgXQYp3Pyxmm9p0yeiWmlUf9YUrj/mOVKPa46XGDMsibT/JZ+iFLUm8waCX6lCVIBrTDTh75tG72cekg9/+MPN+W/7tm/Dt3/7t79oW95UumlA8t5xhVtDd4H3Iv44TzBEIW99jDgCjWSjmHgxwEHmB5423mLmVUnaNxPV6E3pmHKP1HbVZ1FsdYrdD1C76sLJ33Px62ZTba8TmFkKeHnM/3xZKLdO6cV0K8RN+PtTqH1cX9P2IYmoGyVPQEaghM69d7KBiBXpBkDz7vLviDmVdKkUeo/2JJ3f56wuz1kMiLynKacwTjMRgbS2XAJCZEJD2Rc9plmS/N4zEurReY05tBJAriFmLcfKGD2o9XBa/BaULN6VfCD02wKPvXfpPVerG2gdqTkwqce+acuTD3g8eFBY1Vex2spT2FkKn+3zOaAvPeevooEPOh9i+623wAT1WNGC0HEPhFhmkyOgawlDRqZWUoOlEwdF23m0h1afnNl5ErxiHuvqPrDGDFI8ydRRsrg7FpiEfD1kEBiL+J7R4xF5t8kVs3cf5SihnF8IsSQFXmNNQry4CCxVsLVZqxoLCTTAg4AID9VLgITABAci3teEos5l8OFQwMiS6yatyOJrwsqMs0xA4pBCMyc8EUu4/oD0vZCPaYWKGXe4LGRstFEeWFc6dqzjKyihCKEh3s22ZujHfuzH8MEPfrD8ltoRAPgX/8V/Ecuy4Cd+4iea8z/xEz+BD33oQ2q9H/rQh3aVfxPppgHJT70+4elw2pyvjpPGebGIcJBSnbqrNK+aFNTF5kgSmNAy8hoIkcRBCV9ZaU/xMRZHQ1o4SXpFauY11k1rDUvRFGmbJIX6pXCcPPLHW9whr9jYZo1J3nx8XrF8lpRUTRG9j74INaFwWQhOsvPmCcgIbLw+hVKuSVQ24WQK6JL/0p6LAcG+TX+PM+slz5z1IdxrWjICAltwYQMQzTF0277YlKv29LbpUg98au8xw1SOrvP3fMp/Xx38pkwyx9Lffdt3Sx7P22SJBEy4j5D2TjMaJ+3dNGBC78Xr1UKZPmUhB+8DSvxG+RY05g3YCkA0koAjndsKfSTwkO+njRN5XgMeFqjQ+tE6pwHkTf1iznDfHjrX+6vl2knn6ZxvvzP7XmuIWFzEcQUWl6IupoiLqbx3oUSiDCEUqX6MyEkJkXOS0Het76l94pnlqoAaBWnQKia7U/tCXNBH0eDS7xydM5tMBq8LEkubDd4BQJPPq5dDpIb79Y15FgGRqiFBDrBDf8mhHXCRzC8q3+NiQMwO6s55LM4DziO6bF4VU6c5RKyooCTmiJnOOSxkGpf7d2G8SZXpVhCS+BAwTUn9ArcASla2TrzkMwHggx/8YANINHp8fMRXf/VX4xOf+AR+3a/7dQCAEAI+8YlP4GMf+5h6z0c+8hF84hOfwL/77/675dz3fd/34SMf+chV2v8SdNOAJBiDqi4s7cbHc08UsMCYbMCVaCMl7ktW93LzghCSz0kFHHMDm9chQQlFLylJ+uI2WSLKgp/f6dButoBu+0obVMkQy9THcnEl4rwNgaAQIpaFhXHMK5XMycLJDrcZGzBCEjkORrSIN3uBiGVq0bRxwJQCxKxdDjD2kAZG9jKdLXHGWGf45TGRpnmz2tYCkjktgXz2OdTL47FXun3usxelDWQWovXtVqM1Dj8qQcleM6692iKtHPd3Ia1JOWbzc/EpuWLRLrua0ZsY15G2hw5VzfMAiNBfCUTkeQlGekBkDziZ1dZZ2ioATdABXn5mrtaxUZ3ltbatvoY55wx6ytxd+73kzKL2xLRnOVe/ZxOGP241YQSdenyrF/s6nxH8EjcFpjb2GM0momVI70zJ/ix7Yy6w5GBai55FAkwuzOQaEQAFjCSNBEqyR49qqeDYX7qnvnTqjQJOwEAJOweWDyQ9J2lKUkCeLeWugXfV0Z2CT/C8afT+9Ftae1y0Rd2p0Mc//nH8jt/xO/A1X/M1+Nqv/Vr8iT/xJ/DZz362RN36t//tfxs/9+f+XHznd34nAOD3/b7fh1/5K38l/tP/9D/Fv/Vv/Vv4nu/5Hvztv/238Wf+zJ95P19jF900IHn3KeC0VElBjxlvbD9JelE20dYZzQeSHrWghAOPlak/yNwL0J01AUsaWHbmHNkiJRs7AnhANY86oGpx1hhx9BSm0m/8VKpErH12k72XaYboHM/YvtlAIoCc1Xc9xXZRF8QXJisEJ0k4yfmU4v7X47SxPOWX4fH+NeDRM624NuPJyWKwZxhv/XwfgFgaifNp2fTFXg3CqD29cyPGjrIo9xjMPZqxPZqSXtu0b0LO3jx3Bx0nDWU9Pwoj2ka3Cuw4/XsUfXCOH9NMX/Q0YPwfrbNFM3JIDu/0+1D6gN3LzF9GpIXvtoCDpZnYlusDkHPnwSWgWIKRMh5yn3FTOmnSJQGLRlWoIkJVhwo4yFE5gQ7kEK8AQEKikH0xQgmXD3CrhHaftFqkak6Uc06pgUe0BBLzXL51bHPRtD6bqBJ9n449vXuIeCjCSvFdhJk3natakMpPSBAiNSJ0L9eK8GMy2XK5vc6RxiQDkPzPRRHfGBV/UOud88W8K2mzks8IAQ/nkgYllmhrlG2sBR7eV7/YhSlEyBROJhO+BbqFxIi/6Tf9Jvyv/+v/ij/2x/4YPv3pT+OX//Jfju/93u8tjus/+qM/Cs/2kl/xK34Fvvu7vxt/9I/+Ufx7/96/h1/4C38h/tJf+ks3k4MEuHFA8voU4JTNhog7YGpRRmhqpUgdABakuNoUys7XhZbMlxJ1BlbYOnVZmy5JGbhmhDQlAVkqQbcuNKBdWdlCjCUOeKGlfQbXfkjAwRdXft5aWEjCWRZ7wyZtFIKTR8Eh53XuV0KaEAIi0hxtFoQ8JxixSJNwSkZhhnmQ9/WAiZY4rVc/bx/dWyT8ihT2Eurd39PAnEPngJFzwKvFCHLNwSMz4SJgUqnVmPSAJo96Yz1fhnse9WWvf6z+4G2lv/Rv4+jO2kLXpTM1Xdu8OxO8zLaZH480pTOakN76IckCg+esQb3+1sjSbNJvXqc21jb3eqYNj8npm9brYrobksAM2bcguOxnQPflPcHa8+j5/PNSycIUN7dGFahwMBLLuf73Iga6MTPyuS4WU1hrO98rCThw8AGwjOrMAmHxrtGIpHvbvCLkJ0Lv7kDakbovlxZJMMI1InRIyCemUNza8OH7+CwRv8KtOlKfpEbew/9enz72sY+ZJlp//a//9c253/gbfyN+42/8jc/cquejmwYkITOttLA+nVqd68aUgEn05Lm0UBKjn9TPZC5FoQ2LTSU00MGeSwy/8LXQ38EVp7Ajklo1hFj8Reg5ISS1OrUP2G7a6dm9dtkL7YjWzjOpffw6tVFPOtbXhpAmhEuFAZjS8tLGASNxaRZsyfjLDb7HFPQZ863/gTzm56wwtNbv7fO21x/Z8UhzcU3iQGiWkda0A3vHxay2ZJZ5lO/ANSVcU1B/x836JOtryYPM7Pj7S8ZVApNev0rNwQwjbmlJuEaInvt0CtVM1PumLKBrsEd9zX/3tBHtuTmhRe+7z2qh5tpznTlkZXGXQGUvkXakhIF3sYR/DS7msK+JIQ8uln3RBCGWQM4Iqy/rst5hq9nZ+iryutdimh1zqNq63z7Alf011anvnTLhp5YA1AIi1Tyr5hVxjplgox7zLqO2OCD5hoBZY3ElCXWoq39nh5pzCXBGlwS0zlWkl3K2RKxrC2Q2ZlwXCq/udKebBiRPa0A4rQ0TALSSH8q4XE0GthsjOXun3vAIPsKvyJFhPFLkDQZMULUDJYxlJkp2BMjkSIbWwUWIQF0l1jdQzZzowZYUSGpBqC0WSfVhLzKYNMNq6hFAxcoB0AKSCkKIESItCAESznjNSETl9ebdLmAALInizH2WJL1lPnUfixEQmdWezLZ3RJeCFItpI+k+kcVAW2DkEiDSu9YcyzEv6jiuYKFstww71UWR7dYQmXkTwE1orDFQ1ziowEQS9zORfSr7spljsZoyRFG385Qwra4tj0u7nsr35WZrtcxWSDRLU99rIirWDFAYBUjonZ8Zl9p9Gpjc0z+a1sTSjmhrW4ryTuHfY040SH6V5BCeMrjLgC4zkvFmT1rpPnbdqI+DF02L1gu1b7WPmkJWCA8iTLHWLg2EkGmWBCJUngMR3g4yzQJq5vVuDzKfkLg9VRzbrbJqlfRMo9Dik9aLrDXIgoM0IvV7VQB4C8BkBWAYeDzrM+/Up9sGJKe1mGxpmzP/Xc0mctzxEDcb6NMpNJmI02mKzw5QMijL14ISGtEx/W3ta9v2NUBlqQurd1l9vrgCTnrEtTIjonoBNilL+L5W6mFpPppzoV7jWhAAaqheTRtiARH5Xa2INdcCI9o3ktf4hq+FSJX37wUEezQce8DIJap0nvxKe8ao/zXmp1c+HbdMtzYOtHtnv/nMmFmVuUAUm7ZGOO9KgIc1pvC2ElQ8nUJj4pSe1Tfh4qRp4Xq/95AEIxKQ0O/0nomeEMp7Skd3yXgTEGvsZbCdI6OxMgIi/TJz64ZGo7bw3z3AMxr/sxouPl54VK4ZoYQ1xkJ2NiDtSDJpckBIYXOLUK4A03pviTzZI0WbEpp1qVoF8PpoD6Rw+5yaOar4jkjqrYNat0jtCLVfAyN0P/m+EBgh8yt6NG9CMQuPdR9Ppmh5LjpXtSQ5ghb5khRHdglGsiaFbe+Nedse4jyB1Iq0/fTyTP6dPr/opgHJT33uiHf8Nuwvp9enoJhoheb308kXCV6VFC5JBRtcyuTuttE0gBaAkHaEa0boOUArDYmRS7zTX5rLfAMaakRE3RbxulUVt0PJwCqfS/4fgG2CtT3Xgg9ixoAc2reAjdBcezqFTiSceYnliEZmAFpZjbm2QMkMENkTgUozzejXbUgaxWY8GzIYSBJEojWIa153PpZ9J/vHBgU2c2lpRvYwnnsZU01rEIPtGOm9wwmJeX/vmNaGx8U3WpLFO7zzuGRmvu0vbtakU2Xse5Jv690k4Kd5TUAkhNiEXI0ZcKWbK4PlffIRcXztFN9YY5jTv/rOPbNF7R1kxLu9oGPvujEDcCwQMtM2SxN7jkBj7738e5HZEmnCjuwc7X88GArRXqn4jGlx9dugMm5z78xzrUAzRA/G7qlpa7gjP/WHA9dwVPMsus+7to6eNiQirZ3Nrk6CAMcBTwId2uuHjEQi6liKEdkvqB7TeclieFfD/3IE433iF1RfHJD/kQ7q3jR6PxMj3smmmwYka6imPjMMpuZYmShkU4jKBFSNSsr/AVTJAEmMlqE4qC5qHIzwv5wcJFABorJsybrkq8tIGSG2dUe4wlGmjLZtCD/LWV1mSu6ZY8kQvfxbVfBREzpqDuzWpk7nziHJIPfKyGdpY0xKKJ+LRpqYGWrtsu1rc5XJ78ElfRGaWUV6bt+MiB/PMnOXgo1zSIIRmS9hXVN8f27qvbo6zjWgRhGQ5jVkKQqZZqYzS2uowgkORvh7asf0YiGUw8K49rQE1rvIsLR9LYIOVmefea3xMKMF6ZWx6rLWGXn93HVAo0YzwzTlPEzzcU1jrYbJr0x2mBSNU3O5loOH0C8h7x3KPhSysK8EfGHP70XIlmBlHPWytqGtpz3PTbOALRgB6pwAKzNDITJAQP1P7YgRgYX+B6CG8I2sLiB1UYiRnd8//mec4KV1xZ3utJduGpD8b5894i0cy0L6eNiKe+Xiz+2WX4lQlElymcJUvf24JIneIUn5HxePh5x1GAda0FIUC4S0GfsMWpLDXwtGOHhwYlHZzQuyY2fcHNliRMc1ZGaS7qyhZppPKvHWEZCH6SUJ6jHYeUK4TwhJskfgg77PkwArlmZkD5DQzo02cY2h64HdS6SYiZncmq1ozCWBHqDmJRi1u5wXm4R3DilhX/1dyp9r1sUBCuWnYcAEfvsdNWZy9M21hHbaX+1eTpbWa/MteTSekLQERVtQpI+teRPlKYqICCvgc0fHQ8RyaLViXDiSvuG6a0yl0Mir2n7t3Syi9nPNSNGUKPeRtsR7hxgdXP7mzjusrn6fR5Z8bw0RUNZo3h+zuX7OBaEz6weVkf5N/NrMGFTH8gzDZnTBDNgcrW0a8Xcs6y8FXWhMjoFiqqWsE+esHY1mhP3QIkNqka6ArSkVlaF5qF3TfFK099KsG6g92j7uS7l6TesWAh7pODaaiMhucCU5Zf4N+s3LtHWXvb8BIVUzElGFivVaNBNOWiSja8l23elOe+mmAcl7xxXxKa2Q2iauMTXcuZLKL94VJo/XQUzgq2IbveSFMGezzVlQacGjxIHpGMkfvllktypdbl+60XQoc9vDXoiAFojQ79oPqb0UQSNJu8aSD2myRYvZcY0qEHl9CioQ0TQlEpDwcvRsol4cb67Z4RvMPGO3Zdo0W+4es3iplsRiIEfARF7X2jxD7cY8Ks36mEksQ0zJR0ljMiMx22pCtqAU2PqNnAtGOFnmTtq9MgGYJG7WRfcnxj0UILOeQpMFnTSxXCs7C4j59dFY0Ug+RzNHo3ObjNk84iDSf6RhOSLPR8G0y3fT2j1rYmS9w55z55Kl9diO43YNm8lDQJqAdMP2ek/wwJ85O+d7Y15+D8sMtEczDGrR1q46EKFn+9ACEsqd5WMsfijJpMpuI/f7oPZJ0EGkWSBwEML38HqPLmz0iqUDgRL5NfmeR0nvySG6CDldLaN9avqUHJwAKGBEM9Xi91lD1edwzzIxIg8DfAv0fmZqv5NNNw1ITscV7rgmqVzUNzS5QPNwnPwfdzStUktfGGVu6rUEwDsy5ULRFABJ0+C5ZBg18VGqu4IQWtRosWod3dp3tZZ1zggCAPm/u+hyMqRkOsIZCu+gOp9R3hAKmUhAhGdQ52DkyPw/CGhULchargHSb0T/R9+rF+nHombvFip2Daju1ZzM0CwYaZmA2nLNXGVGKjoNhKRwOqQkXDy/TDHBiPpGdyn1QEa6No6QNHMemGfKqB7JfElmbc0Ch9Wn8bkcfGbcUYQPMXccjV/vXfW9UNpNzziFWLK8V/OlpfuOIyAKtFJ+WV6OLa71eU66pmnjjPZnTzkqO1NO05Y05w0gokUvIyKp/ktSD8DJOXSOBqZHi3c5khet13necK0MAB9qMs2SiDDmMMTetaH5faqHtCQLj3bJnNDp+Q763suTMkqAAWy1IbycLBsQyz7P+dI9TuDOtUxtj0fg1XJwQqZySVtCbajn0r31dyx1TMwb5xrh1J3utJduGpA8vT4BOVO78ymRkMUAOJ/D9YnIN4CuNUnXKftyzT5c1dgRQMCR6l2z1oRxcrTYVRVzPpdBiMvnKO54XQhbmouclf6SBGQNKaa4y5lYQSpstIsgZbYNGQSsGYzw8L0hoklguAbgPQY4CGw8ndYCTsg5/UkBIo02JApJbKib+EYqqxB3rqXvXlh8zoDTpteRJJ4LSq4hjUznqravnqNn6GCF/sr3OSjntoAbxbwKfgtKelRuj3UjksSDJ2hMm55rRgKU7X3y3CztZaZ6knAsKAEbQogIeUwHl8atrM8yrdRAuaYp6be9+pJQndb7SCCyZOlmIDAlyBnnY4ybd5phprWxaL1T23YO3Pvgone914+aIEAz2+JlN4IVAUJMH5xMsn+dr6ZGNBclyLSsAbR3m3lfXvdI+zSqs0fWfVZeJbleFUCS1ywKNrN4lzKju2SK7GME8t9NJnnXhufV/D96mo9U1/YdZnhwDgY215Rze/a/Xtt43VQnt6CovENswIv1dGu9p2u3QOHMPeTSZ96pTzcNSID6kclsgDtZEhVGNUSw4DQAdGmYxvAQAwWAaQtyOD4qE4EH+Wy3/euRJNMOLRih37w8kWV5XU3cXWmbdxQqMJa65PoRIW2bWZ0FmFSQUvuhSks0ppL+aecbcCK0IJYNu0X0TYkxIrDFr++VNlrmCfLciJnqkSV51Mrw50rb+sSEjhc4adbVMGvCh6RxFkVfS8IfLXPYyAhc8t005if9tp2VeWQti/aYrYy+3Qw4fczhw+EBZG0Jxd2Rmw8BZiks6Y+DNhywVr5+V880KzpD3jOZ4u0kjQ//yFLLqgEsmmtkSjNL23m1Xe0qIG+BiaYhkvW+NONBJIUs16TeuOlpoLS+7fVPT3uyp03afdZ8lRHZOJDmplsUbAYhluPikB9ik18EQOM3Qi3lYEQCkWuAEGAOgPC5JZfPHi5xPDgAbI2LVScHI9rz1vDy0aju9DOXbh6QSEoTOzP1yioSQkzqU598DkhjsnjXJPIiP4eSNBF5A/QOb4fkyO1dZtbXlM32AWiyvSaAkdTO3BGP/hIoWbxrzbiU9+Kv0kg86Ler7x5iBGm/vRNgI1YNCjmnk3bkGALWmEM9xohTdlw/rgGv16QZITOtd5/Wohl59+lUzpFWhLQg72Yfn6dTaLQhMrQoByT8t0bctMR7V3aQECpT1IQpNcgCICNgspc0Jon6Z0S2+UhQ2ylDCT+hZfhkdmz6FyJt8kCgQAfONXl3Nu/FN1EGRmougG2kNA7s2/NbUy2tvHzfpj2MsZmRFu8BLD1tiQRZFuDi/a19A/m8CkTrGmRJ/QmUaI4Hi3cAG28UdpgyqZf2+uTj4pHmEjngy7kJ1HkmNdMugxFaT2sCyJqtnmukax9sM9ZrtPhFzKcWpMg+lHX2mG97run1bsaBIWSZJR4oAAAz89Uz2mtjR18T7DLbd9oKBZ4L0PXmo5Y8k9auJ0/RvjxqmP2A4B288zgCgIcI1JKCzSzJ0KEKCEUW9Sa0Lmue6gOi6BA2rlZF47AFHFI7wcvzumZ7n7fwHLDEn0PfnK/lAEqaAKISucxXoe8taEnO1bJf+sw79enzDpDMUtlcPbCgbvaa2QT95cfkT8EnrMy4zqkseo5ASAUjNYmibbYFoCRAKsmTUMtGACEH/PVMUsQXj7LA5b8lSlZEY6q1sncqfiMUSYslM0xMTXVat7QltFmvp9AAkUYrEng7BxM3r+hFS8LARwiXJSKc2dy1ezWppCZl37so7S+/smeOIhoxUxDm6MkjcAFyA9qq6zXNSKtV60fU4ue041naC0LktZ5kmV+TyTnlusGv8WMNhFjMJt3HgcgIQPU0JRpTvtGYOIdATDFQkrQWTRp7Hs03CUa88m49hjr99WYfSNLeZfTOe7QmI1DSG7NEEoxoQE6SJTyxxvRorOvX9fXAeifr76VkfYO2/Vy4kLWEvo2OyXOmLC4JqULW7IUOU0zP8Q4qGOk5onOi6xow2QNEatlxLjLNVGqb0+U8gdromZMB8O50p7PppgHJ4WHB4WFRr8kFXrPlpWReT8DGt6T5pzFTMcKHysxZxG/12U2OFgzvkmakgBSHBDqg/K0vVv4613kwUMAELYAhpgUwImk6krQDWTsSi5/IaaWFvvqNyGhaSQuyFm0InavX8vEaCuBYyc6e2d7LKD4jO8sUZjQ76ocUqpBv8tqGLxNU9pkjnYE9V2uyZxOfKTtiLFugQSFkuclLLBt7m28HWFFDfYa1avc4aep7AiIjzQjQml5xqeweMCLfd6av+PFh8vvPfGdLWg5sgYusd5uwdTs+Nc3BuF0eluaAM4O8zRvhi+bbxZbaAkgIhLhW0yyPX23OefGeNrDnJLVr9X1aJtd6fwuMyeuXEBeUAPqaxPtPlqG+pHZax/KcHNftPjbWQPF5qGn65Li+Bs35j8TyDvJdS1jpbB3A/5I/J3dsp/fwEznEehQUE6e01+Z9TLvGtCBc+0ECTnoPGrYbU9hOt1s5VnhCx3NITqF7pKg7PSfdNiB5XHB4TLvkjHp8Y3rAJQK+jdTFN1Vu5tJs4i4x/DokSouNtew5JEmMczVOOkJ20A8nWxzhPaLzCaj49Pm888XeX1u0aKEkacwaYlG9JtARixP7cQ04hm3OEQIj0oG9dWoXgMTQihSzLQZCJO5S+8zPR6EhRomot5Fr1+X50TVOPWbZYnhmN3le7iDGpMao03sSOLFsyJv+YEXC2m5wkjRJmmRmSHsmr6XfOhOk0TlABNAZNeuvZNy0cUH1SeZY/6tHDZtpt+5LMX7vCi6W3KdtYAkKY35iAGQ6D5AIerAR3uS1E0jM4it2nK4tTVlpQijfZwPmRJ/LCGLtPTwtJTbXZf2z30hqXKz7rIAAGhjRhCazQETrRwly5b38fbV3kN9f+nBdS1vyxI7l+2zfhQFQ9l6PB4/jmjZiWo8OZCVQfCu3/p2S+PCztCMSiEgQovlmlOPY+m5S0A9uBkUWC9xSQQMkPIofJyurvcx6z8vsJSmU2hMp7E2h0V7zXM+8U59uGpA8eI/DUtW3mv8B3xS0DYL7HAQghfU0JGcpks1SN3dPmpIaXStkMye+aZdFqgNRylnJmdNvpg1xMSRQwovBitSx/V0WQrbQFSf2WN+Xkh5qWdc1pnM0yTUnz0sjT2jaEa4d45JbYF47cg4QkTSzAO3JEyGfzfORUHkLBFUmpI5hIi6lbJiUPI75BiTDTJf7GBjhdVl9QPOp964aSWZKnuc0A0TqsS2xtxjmR+O96G/6PktzTpYbvad1ri/ploy37lvCiXxJ6C/db40p3g7rn6YV4b4kPYZbvqfFPFuAoNX6tBnteyBihmQ9pc0lKUtmToPux8bPSTByDiCxNCPpWAfY/JiPExnpb1aLdGl/ztTBv5vcex5y+HLSfnDtCCUtJrNq73JIfDDLBLRBPAh4aGZZjclVjBsg0jPD4pEHORA5EkgJdW8mRp/2yK2pLFJEMX5OaEQCJVbMPoKBARTVF2Qn3SIYudObSzcNSP53H3jAw9uPZVGSZkJEWmQbNZRliDgiwPtqysIlfqQZ4Iv6g/cIMaZwgyFiWSOOPgIIWIPPIKVukGfPXwImyUapns/aESBrQWJdmNeYFtI1n19DltDEZKLFTbWOITmxH0PAe1lKStnYuSYk9XEo57TQvk+nUDK7S+f1cwAIYa8COJzbSBmXxatOoVJSeA44kXRJHgUruZ9G15KoaFJhSyrKmYPFu2LGVcqoJltbJoH/lWZae99L00jwNs2YfdBfCUDoX5NniP6VEKEybGi/vQXUc6lnx9mf/sp+scDqaPxRqNqWgePHoXken7uaf4xGsm/V/vOtRkQ6tLfJ6VCOJTVauE4/PrJ3oTFX+6D6mewlTRNp9UkBcTlfjVqu46w+AiU9kN0eb4HIWFjRD0su75s5N0OyXxuQd/AlOTHtyWkP9nh9CkXbF2IEgkPwZKrs8jysQVuCA0JwKTQ3HBCB6CLW4BAdsGQHeJ7ccBXO5jyjOZmqEgDhGhDLBEsCDfscWz9C3KwpGslkkr6ZY6JM2SPr/ZqpF6eeZQLf12/BrGsNL6+x6EWfvFOimwYkD0vd3Lgt/BLcxq+DJgxpSUYRmDTiGxwlMgsxFqd4ksYkSYxrrjVtyXau9Xf1IzmXaG6FGBvQI9eGNYgFUkhmpDp5y6iEzaYv6y/P7kz4mRjrGjWhGBlAKdcVMAL0N3DtOqdrJXLTTHw4zSyQGlM0kmJb9cze09O8jGhkirZnU5A5ISwgwo/l9+5F7yHBw+JTXgMgJWErkfEI8DqnbtjNe3ENZEjOtnTuAa4w1sS0SoAwQ9b3W8MoUWbSmGjahiW3g4Majfr+CtXen/qUa0sIjFCCO60/6bEhogmk4BeUIBtA1mgr2rmD3wJu0hSdy4hozLqch3RuDXa0IdmnIxBCx5q2zgLb2v1WW9p269e1/uR0iWBF9qu2tm33maqOWkNsmGDSmPAkfWued3Tsc71pfKX7SgyH7KPIHynNskKsPiHUZycFiEhTLGA/EJGmW7JPqb9qgltkP8BYNSR5zqUySBFGnRM+mH0Afc+jcafnppsGJD/rrQOWtw6qZGezgDn2Wzh9SEQvN5KnUyiOwHLTobC/r08BYUl+C0n9GnBc0wbonU8q0xBzaMI09WNe1GJe/BZ/AGJAjAFwaZVzwoQrOl98R2LWjpDJVczHkZ2jzTsiAYhjSD4i5DtCIX65UzuFAm60TkqCQ+pneY7IMpFLqvJ0rSh7SujeuoCWeig3i2cRfUjKmBke5x0elmqvTt+I/+Pn6BjoM7Z7Sdt8R1qRPQyS1jb5HiPJK//dk5rOSkz5tR5TrZ3TmI3edY3RtoCIJjHWGOTHg2+0IQ/e12zQPOGayxqSAnrTM9vcLcSQ5N+C0WgTjib/LKAmHKU6uHDA6rten9E9/LjVfrSJNrn2ZPS8tr+3pm6atsQCeBKMyM8rJcO0LiEDk2MIeIArPgQ88MarLF0nsiTx2nn5+yDGmLYGav057j97fTp0rmngQ/urBXDgxMHnGiIeD0v+W9+L7pX9Kfuqp03bM34ls937bs2Y9nk8eI9jCCUE8LLmfeYUsC7AQ3QAAkJMe0YKksIjXDoQc+6d7Q9Cz64BYlozaOkHIteEdF4IBuN2rPOu08AJ5WUpfca0uEWIwjLd07WyjoVU/giuQaG66rNmfU7ueUvudC7dNCB563GB43H0lY2BjntMplaWL37aZtNoCEgaE1KEqqNP0ptjluI85M2TEhQG5N9IkhgXUUBKiZ6VgUjUomw5n02vkMsQGImNjwj5f0RUk5ESWSsvkJVJahklqR1Jx1tTjxniydYSCEm2vSmfSFt2Ecas3DmdA5FNqFG3ZYK0zby3UZ8DRLZS2MvDhs4Sb2+P8eBSa6uMRXvnDv9rRZmaoR6jyMvMHGvmWdx06MH7Boh45/CQx9WDr9J8Yp65ZB/QzRs4c9KAjlij2q0x4iFrVB+ia8pxLQqARpNybh8uyvfYMphz4XgsEFz+CXM3DYQclrb/tGHGAUli2lyjbeKO62sAHhePJ7Go9DQAsp/4u2kSeyLOxPP79wASDcjx3zoQsbUfEjRZbe+9O517OgG0OM+sFyPhRe9ar597wkbaiw5N/wPeZb/OHAJ49WnTTSZbMeUpAXLixFAytxMf7Zjwsu6x+W9+rhQmANXEGWid01M9W7MraYrVaFQFMKF3I2qZfpHQluWRovq9S+9N8w8AFgJIrgaKIWxDWe57GpSReeWbTJoA9SWeeac+3TQgAXSTAUBnlOQxr0MuiqNFmAY0JRejxWkJAT4FyyoakuPq4JBAxNHFEv88OpfUvj6DhCYxYpLIwtV4H9TyENtoHY02JDJ71rj1HanZ1/OCKCQw3OluL/DgfbZmJ7pIEhokJz63pLC9i8/ak04/81wHwBaEAOgCEblpj5yc95K1Ce+RFo4AjGybBkL4eW0+9OzJrT7Rnj3zPvyc5sswqkfORY1RHNVDfzUw8oppzx6XmlhtcdkJOzPKEog8LB4Pue4q5df7itpMc6mYjwTmZJvn3jEnVz2tKds0Z7K9qxrO8hxmojQiC0w239j4Lr1+t+ZPD4SQhuSwVEkt9admz17aQUxT7jda43wAQpZ0J/KJ6Vq3kvT2d5uzpAd6rb7Txude8D0CdPWcDkBG4GNmbbPm7SnEnCQYANrkmedSD3RImgVA9JfMpwEUwVqILmtJXJprpcqQHbw9gotYYx6nLA9Tj9HmplgyIpZlgkX30fnNucZU2gYiMklh0x+xmghSZMQVsQEnlKtF+gUCybzLp26p57J5V51ibHy52wEfd7odumlA8nhYcDgsoAgqtKCSatmSRluMdk+yA7SbztOpRiOpi0mVLqZTJzwsHmsE3lpSuxwiczBLUgp67OLq4lGTM22p2rHGDEyqRObEpCukISHb1tcnysQeipSWyh6zQzt39pLaETrXk1QTPSJvYpkBDCHC5fuKGZcVLxnbPAepf/qaDzpHZjlWKEyr3TzMas8BdmSmIBlr6ZS9h9HWAIjJEHYYGc4AjRiXPQBNY3rl+/N6+TUr+pFkKOnYIpthbvuAm8C8dVhAWhECIq8O1TzrrcXDe5f/ImtRHEtqKpOoJcqGlk3UHdJc0jwscy2imk6uW61JMdnwLZMiTTQsxkBqG3vzTSPJmPGcNJqT/+JdA0RIc+kdiqlbSv7qyu9Uh/48af5GgTIoIMcaIw4Z5AEBvkxZX9ZSAsbizbBRzZZ33C+c2Kv1tOafNCuUQhS57gFAa+7G651pd/orNeKP4ImBPQsJvZb34/+Adm/UwPleYKQd8/KpbUnt3mjtswk1csgzn8fB4hzWxeGYzZMeQmzGIoAyPtU2boBG3IzLBkwYwIO/L3/tVkOCtjwDLDPAkI+RY6hz9IhYhAXHEME1k2Xu0nsy3xOpNSm9a3zTC7Hri9BdQ/Jm0k0DEo2shU8yPNrCOcP8tHXlSFr53BOpf13aIF9FnyJY0UYcgENIEsSVfPKCA1LMjyKBiIjZjMthRTX1ItrYqefFMWZwEzMYiWhNuIBWhSzrk+9okcU4SkZ98Y4LMMuhFZ+frhFpQITqtyWKVgx7ybT3d2w9X4cuWe29f09Tkursg2BgThNSj3VtiBW+Vj03YStM46f3jhrNgBLAHlN7nsP/Sr8GIM2vwiw7FDDCzbVIcnrwrtiZL1lgQNmdU13puWTtEZHDiDsAwSG6iGykicIsxYgQiBmiEJ4pClBiGpDXiHS+OOkW8y2U95A0kl5a9uAaI6uZpkkQQtdngQj93tRbfMlQnNk9SWMXn6MlxWzcn5dPn+3ofeqq4GOxPRkBjJk5OiI5l2fu00Az/aXjV8IXrinHNHQaUJwiigJGfbW4Yu42WiNmyNLQ0TVZv6TZ50kTLhoPPs+54gNR/OATiKWAlTTfECyYKrQZBhCx/EBkOyUQ4eUkSOT3ae8sSRN2NRqVUNcPoM496rdKaf6GfL6Uo3XfOTMn2N35/U7n0k0DkjVEOMYk8vC8JOnZqHZ3MIva8wAk57hcRw1FeEhq7kPA8XHB4pJN+MPiSnjCtw4erx8SaHnnYclMUmKWfH5uYnSI+Ylq5K1Y2pP/xlYrQj4jxYk9O6m/dwrlbw37m/1CYhuadNsnOuMItDkMqJ/kJq1JEbV+HjPc9XiUDEwzd+B1j4i3ObV3YcdbgDJimrXre5y05d+eOYfaL4pJgmRAZ5Mgko8DgCqN9q1N/eLdJmqTTq1+gWgWlMyMJfpHZlpvHaq/CGlG3jr4ojF561DNuYixXlxOZOr5HE3PoqdT60KkXAcRq0slTtkEyzshuY05qIRPzNN7JJHMczR47leSQpquMSIPR3XOatH9APHtBZjiQM0qw4UEdF5KmOn+h8UXiTOZbBHAI7t93ncaRSC/M5mbAsfginaJ+ghA0ZSQZDxEjzWmdWkJSUNM78nDIGsgrAfer0FT5pZsvnIzuHRu29ZznI6lBu5hSfP6cfFFMk8aktenqo2gvuNaE16nPCfJuibXCK2f2jrS935d9uG8R/n0/VPUqTwec79KLV6pv9N/UqCngY4e4ODl2ndof0uzLMuqw+o/DejJcTXSmngHBDamltj6mfDcVDIPinznN5XC+6AhuQO1Md00IJEkNw85eaVNu2R2eFmtTn79lJmsyuysm/uKlDMTVc0jzrx1WIqt5gEkdUUxB+EZY7mmJBTtR9aIxK15CIERAh9kGhJiq1JObdtuKL1+luZIVhkNwBRmVdwrv58m5beuSQZdluttbJz4u/eZaIAYac28ywIpe6Sn/LgHRPg7bkGZU6Wpo5jzaruckLYJCWs6ZwMJqz9rX7W5Iqx5uldTsjV1Q9aIZC1IZqrJqT398zlpWmIG6a9H+uvQmlZKqiaVLhsIpXnpXOqv6BOz/bA4YE0bPiUteyiaABQNSfleTGMCoGpNZvtDmGkAW01FD3zQb83cqviGiHC+EoRQazmgAyi6Ue4/lv8haU3T2EmmJD4zQh4hroB3JdQ7ScbXuGXQrXwicn3Sjq2gEZeClM38FnNVap9mQOOIQmRjZ8m+AnnaHQPgF+BI25mvAifK+VHNeFNULgCggCf0LpbplnxveW12H9ZImuFQCH4gBa04BmSBQKwMN4s+dWJO296NNBkt+NgLPLpld4CRUVj1bTvyd+EgXGhNAPby3j6XgEk+dT2sfqefwXTTgGQNAU5BulKFa+V/mDGXseoFUEIBEyOYpEkOTzkZGE3+45oW8wfvcFwXPCwe74QkjSXtyeKS9JY0IyV5ltu2kda2AkhAmpEkNVwL8IgbO/Xi2F40KhWYcDWx7BNiGJe8cNEmxRlGrZ9nwI087mVx5toPTTPA6xlJNzXmdqsV6TPTiapNulantclq7dqCsnmTjlmp6l6JKhHl0KB3IcamOGMHkj7q48iilnlpNXFa2XOIvzMxxsRYkyT/YcmgJIf69Q548EkrkqSrObOzwoxzCjEiOsBlLQl8TMc+m3T5xG6HzDQ/LOm9yeGWmGwy4YIHHsAYJM9/MMZO9LmV20NKiYFqZkXlNLv6DXAR11RAkgFIwVOuAg96tj4M00m+xjnnEGMCHBTZKPhs+uYdfEySXRqXNCb5WoVTaOayJhwZhc+2+3c/V2b55aTf9VvReQkWZfkRqUx2qFqShbScWfjAk3pyPxO5t3LTZf63PEsR2ozWXo2s/ZkLColIs4MDmFYgr4uh7esezUe7su+fFfrsWTdHea14OYpExvkSPqa51iT4WOZzoGAbfBxGspjkJlv1/c5cou90p1sHJLEAEi4d4ZuNxQReKoHhk3rNGxyFEyW18btPJyze4+3HBe88LikaV17sX5+WAlK4U20TYlRs5kS0ZkVUcwYZWpRACI8CUjUlKKZax7BdZOn9tD7raaEejXst0qX/FXBsz42dPAE0THl91vb5a0gMJ1GyU7clVDxs6lbTxseUDk4sUFSP+xoeDZxpkY3Su7eMJ50DtpLvEUlzBQBN6FWKeMSBCYDGJn3qOUXCuo2EtFczIqmOl8qYUDStElEra0gO+ZxzKSqUh8PBU39C9SEhirGGyyQtCYKDQ9rF1/w7urSFJ02JAxCyaUlMgIg0ACHCx5QjgGtGqpWbkGwy4sNNY2x7vh4ASl4fDXR4JiW1NB8ceDjRbMeYa05cC0yvl7JjZxPBAqoWLI5s+PM4zNm5ixQ8/+UCg+IwPmB4LYBPx/Tusp+t+nq010SOfveik1kBAgA0DtoAmuhlHJzIoAoSnABb4R8/1wpwlqGAoT0e58Npy6aPw3OGaRoxTSO1qU+AgllzKU6jtd4qcw7ZIJD1p9i3ATvoS1rLW3MumktFuBbBAEsLTm7BZEvmeXqpZ96pTzcNSIC6EHJVMp3nf+U9nDiz05PATC1MpyB8KgKeToyRzoDjGBxwAnDwOSxhBE5pA1ojlanmXVLKzjcFrg2RUUAomhZQzbTkZAylfH+SpjbYviRtubHavV0M9zPj1J8AYxI6Dp7S5Ghjgx9qXSHGVq2tkAQl7Mns2Irm49lxu1mMNgx6Fw2IbMxlnM3U8HMaUV/RhtSEnJXMcfM7Md0h9qNtjcIC7wEh1likeuhdJTO5FLOiZFpEZlgEOKqvSA3JzcGIfGQgJjxGkNVLKp/9PpAc3GsdlQGv30J/Z+8TA1Ai3Yj+1V5fasMsMLI1s6rgxAIhVP9eIMLbqg0/fs7nPvVZ6xRivh5r2yhDNVBDqpNTbspbkrV5g/nczDUBRKyoYrxvm7p2aEv4d7M0H1wrla7x+23mmtdV177YPjikc945HJG7KQ9ev7jkq+OIUc1Ar2gYYtasbE2kOeij+U6kRRxs5zvtB7p2pUdzICa32ZhrvB4LNPX8XWbWo16Za5Pex9DPKWa9K2lrQ9oXfR5PZLbl3d1H4k6X000DkuMp4IS1ySYOzJtl9dTxnDQpkLyuaUvo/NMp4Om0Fm3JwTt8wdsPyUTkqIcd1UISErXJlvQETFrUD575lYMZuk97N93pusYP3Sstkn3djYjFmG5g3vxohskmamyCF+6s6IpE8MFnAOjbcfCIdkzouQh0Lkh7f94P5bgDNLgGhPqDpNrExPRMbkoLlbEl+0uOs6Oj8RMKo0KMS4oOFU1NiWUKZwFZ7dyMhtOSiJKjeommVZIh+gQCmz4mM62qGaFrDlsQQD9XOCxkZhRi2cvXXIYC7Hn2bXzuM+8dUgQp2umrm04oJpyCaxd9Wy51gAhfZ2jckLaI52WgawTGgAo8JLCQWiMNeHihydGma5URpPCsMaMR51LHUS6FNC881iULY/LSdAwRb2FJIHBFyQVDc1ojbZ3R5h2AxsSNv+csELFCpqY6WDmpjWkAS1umrSPPJWu943uGryFnH0ioxbQmlC+mCLbYXkHTUAtby59HxNuj7dO6lmXZ7L3WvqPWaawNFo2AiARImnBJ2+ekyVSvTbMCmp6QVPdtbNv6hLpOcgEYJY4lbUm6AcVXlWtLEtl7yJtIFi/33M+8U59uGpBw7Qj/R9fOIblQWGBEmpHIxWIN1QmwmnHFAlrePa4AFgABfk22z37NUtAY8iYQy4YoBe29LLB7M8DO2MFqC+mMFEj7PQtCLJvqdKwzBIC92YdQHYCpf4rksDGB4ZUVzoidG7//iEZ26hYYk0AEQJPxWgKRhiFVmBlJPHfFGhPzSOPMO5QsxxRdJTHPGcDRGCncaUTIktbeXLHOnUvW2PMKQ0mvm5zWa+Qs7xKj7PJ9xIx7EDPOmO38nJjPkcSQTGMcqs7DZ+Y6CfTr+y6+BsBYcuU+Vql2qde77oavMjqCmaUxQn0i+2HkhC6ByIz2YwaEaNd9dAjc2RgJ1KV+zOc8sOSQrYsDgku5E4Kr45LPa79s1zxLA7LH14bImluSej4nzdo2ACFbAQM7hoOmYPAuLWlrhNhbKgjmWhOg+jGl/iDzzAqQa7/UfYWivckQ1SHGstZwDYv82/RJPsfX3RlgMkM2ox/MMvX8yuadrYYbgZIZK43Zd7C0StUU1pv92PQ923+qJpI9j71u1dx2m3mnO5l004Dkc08r1qBoSJh2ANiqweXEL9oNUyLSl9iSFsRaaE4ZnKTzp3L9c09r8S3RfEiovdr6xJs0GwFkFOvcAnO9SFq9cpYpEv2t5mg6wy3f32IG6FqPQtaAtNFQ0l8O4rgktYZZRTFVAJAjs7T18b57hE2axD69jw0+eF9510q5pRakhlftOSN3uyr3VwUdKQiLKzHpQ6wRoYLL9sMEfjwQVjb3nGuYvxngkcpsz89oO+t530j9lty/DZPpq2bE+woynKtmVjySFkn/+ZPkqBttxiGiMNgTn6GAk3VV3t3V3CQ9Bhto5wu9e9WG1D7R/Nc4COF9AbRmaxp/LYEIkZSBOLc9B6CJJph+i/rZ/GmCA3hXQdyafJ6A7VrI6yh901lvrHl1DiCRzx5Ru94Z50V9ifFnz2N9HEIyhQsuh/HOQq4HVK1J8hUgTYOeeTw9R5yXmpBiHtu+kxadioefl0kBpUZ6RkPA1xLSEGwByDbk+OhYF86xxFus3EgAY2lHLKDUc2gfgaj07FBAyZCKsC6buC1AWfkEcLnTnS6hmwYk7x1XRIQCSEqiolO7uETvSsK9xvmK2WNz8yr6zf8SaYuQtmjwuh4PKZqWJiF5OgW885ijcsWUv8SfdA2BRVbIQUt1rqnNtfeV79YDHPK3xnjLcJYaACHzAMl013Op/tm+Ke/pBSgjBickp2F4V7RH5KSdntM6btdzuR3Mlpo/RyPN30UDXtWZ2HY+5n1la0Ss59p9VsCsi1U5xBiXYlvuSeM0ltoXe2O/FQZcS5NJ59I/AUby+CoRtkpfthoBR4w3UHxHUvv3MY7AeVLCxbXaAEne1XFLzDYHJVp9nLmm+2is8HGjRcaSGem5OZYEIRbwmCUORngf9MCIfNf0nsADRFAA501Guhetis+1UYb5PWBfPk+S5fzaAx6mpppHYQsCvPqYNSgsAaVrNaNl3iMJJQACLaRZds1YtwJgWO+mJg70LbApjvS+BSQ42Yy3BTDW0Ibn18pov/evUy0wkcLKkTao164ezQQDqNdqG3lb1GiSQlNSqa4xMofKm0x3k603k24akKwBiCQ1ibFk/w4hIrJJ4bJddgUliZIDeQTClvnWzLP0NlQGa6Q+JjMuAEWjQiS1NY+LR1WZjwdyL5659k7yd2+y9BjYHggBuOlH+j3rA0H2/akOuelXYEJ1atRIQdlmmn7HYjZQpP+IAFxx3k7n8jOzc2cix0y98vfPUYCstkhn+54ZmgQhpR8Z6LCZpVq+1Mm+x4htpHFM7zHaXELkUlK9rNTc0fGeDd+SIMp5o/7Lkn/yDTks1STOs75PwGMbOYtTjBGRaUl4q0Osv1NkqMRQR3aNuoiuN/0x0dfAGAACW+0hB7QtcG1BLNeM0FvOYLGAuAElFria4VcC66dUVxsCmPc1J9IokQ9OO7+ZhkADtGx9obo0LUjP2T9VfRk40zRe/FrTZmNtdtj2j5zbtdLYmqQyp3cCJgWw5DWTgxZaizkwDuwZTSCRTSJADrLqOkLlpCP94iowoYSXa4iNn0jar/0Uc87vkaQJUDTqaWnbNrX5u2bAiGyf9FNsLRtsyD5jDqaBpoaYpqTsey4DUlZ/vAFAcqc3k24akDydAqJLeTfWU8CaJSZhDQmU5EnifAUkS8sZlkNLI1KjCumTWd4nJSBEmjbh4JOG5PHQZpldvCvARaPe4qjRNZH5SAsCbM2PACbNFyCEmCW6TpLaZEYj7u1I/TWSEpsQADKzInOEdD42JgpActbmWpO08S5tfVFIWZatZJx/Kgk86PoIfKR+qSFYNfBhRT2SbbD7Kh/E2k6K6ESM5RpbCSjPaUNEjDWXbHKaHYtW6N+2jAQjSdL3ePAlBPfic6LDXKaE+s3JD1PYX5/7TU90GCLgsoN6CSgGnSEOsYKNNcQcmjubucXEpK9BMNhlHGYTFQby+LGkxCDqknbqKk27Jk3WFlcBGoERAmfAvHmWBCDlvSI/VwGGpEZLwu9hSRIJ4AGpf0vGaIoSQP3iAM+CbwB2P0qyfDY4ABnNuVIX+z3z+AJYBbjrCcWs6a2db+rgpkx5XSSNCYDGZJNMusqt9DK5i1XTq+nytZzUsJAj/XENm/DDFLp+xoxpdN6imb1W3xP7qrKTaLNmFi01FZoAR4KRGVPzLeDYb74VVtqb0nPL/HgfNA/n0ClEM/T3cz7zTn26aUBCxDeamJlHAiMxa0DgU7Sb4GIyPchSRtKSzJj+7AUCnLQNRYY85fHUXwsNirzP8uuwys/SbL1W2zhAkI7nFhihY36PFXZUMgJdYo6UZEtNeSAW54qWhCSqiWgT5jJwBysZnbSdbVmgbb9oQIR+azbq9jldC0IMJdXJySksShxo3zSb8BoMoS1H/jZaHeeaP2hmDRYDQMCkyVhfNHRb5pvMloCx5gjIAMIlRs05p76rphlJGdvzucjqirzctrKZYBMESuQ5op6f1R5fhz0kgYgEIfK1NPAhr0kgkq7FVtgAacLW1kWSfYtkf2x8S4QwhM85YDzvvLMfTu/pXHpX2fpL9p62LbVerjEJsSZDTOaYGtBlLZLtUYXzs+X5m7YoeKmNzRpqCieetCcUtlcXBPaTrJ5Dlqlo+1sP6U5k8QIW9YBIOp4DI73na88zzdM9aUZiaxIY8155pzudSZ8XgMR7V9Y3bm5B4MR7VO1JiFgOHi44LAefsIqv0WzkZkMTjhgcfg2wNST8HkulK+OWp39rkfZq9xA9da7x9syStqBbPiMjzYhmkmQBEe5gC2xDHnvf5mIZbf6cfDMW0nFElawmrXOKQsM1JMnuvJXQkRYFQNGaHDDwnWgkrbU/6LcVCetcqax3W9Ch9U9vaPD+oZDQ72WN4xojXp+SRpKSbB5D9d06hq1mRDqlWiSZhjS2io0AO7fd8DkQeTx4NUjEq8XjYaEQ28BbB98kQixaAdFnIUuNV+Rs4RGIxDVu+jUyprn+ThqR1C8xjzHSLEUk6W/VNsXS1z3iZlvyG/Nxx8fbIsZePR5oGpH6hRIUAlDVQxrwqNqk2IALXgUHGESNBH0TYXD78EbwUbSK12GOpO+GBkSK5ojlYNlSe5JPh2quxLQsop8sktett+7Vw0FJOlFLU6Suph/kN/DVzOyBImuxIjwiJJUHUIKH8PLFrw8MYIbUEgApSEEEaGSuPjYm0C1zX0FJj3Sg4Ydl9tAeoYyu3dB9Znqm2BrpAG3rjE+mcNyag+4v/EJstfvB17xnbzLdfUjeTLp5QLL4GoHGsq0m/wCK7x8zygfSMYESXqdmHiLBRk/S0AMjNMEltZLgNkKIpgLWnj9ayKy6eNt6NGOmRTQLRrikmoMRYiiqqVOtlzPeG+Fb2czS34jYKEAO3iHCpYSU2Q/Ew+G4pgLL4lCy2uWoUtyemnx7gNZWWpImqbY0IPLdU9l5LUifEdLbJYdKlWy3zAMxyMdQTbQoKhmZItF9vUzO/K9FUhNC9uCyDP2lY4pi95iBBuUNSeZaCYx4x/JsMEacGErtU5L2wsORorW8m+AbW/OrDDxirOUJjIRYyxZmPNR7St/FeTMjIpkEk465BrKUFS88w3DFrB3iv4E5EEJgF9C1G2ZwDqMLNLyx16xzL40EAWU+Dh5ZFLP53RwcImJz7lzaczufb9x3hY8f0ppw0sIJ0z1tqOpcH/M7Aeq4Lr54vOVirSUNNYUdpjDO5PdF78H/Anx/pMhafWDSS1arHWu0x1TMLmtH/Jr5PSJLA5Jo6+g+JD42Qj8wx53u1KObBiSPBw+fGZGnU8ARgKMNL0t1nVCTLwcP513ZVMm/hBw6aZI+HrYaCkvTAbSLgqZJ6CXAk79H9qfyOVob+qH/tgDFcrDjzndU1wiMnUtk056O8zlhUsMfaW3+BYgU5jCBgIhYEtSRr8TCgcmCEnHmIdtekUOsdIIn0hhGyzRGOubTtRmHdI0JSufngUhps8IwhthqRlotSCjnXp8CQqz22/V4rBWZBSM9BoBr7xbxj4DIg0/g463DkkAI04yQBo6ASgLMW81SQMw5MBhjFZFzYIiyBDhY366xhqut4Cz1+RqqFPcYAgLZwxegV4Eg1aV1nRXqWpoBlvIKKO4RPZO5ZyRiYIGbZ2m+HjSmkh9W9QFI74XynpvQ5J2hos2nB24rGVyzfsg5pJF8nByre9e6GOeCAdTn177T6P2UrkpwQqRtU7qpF7L2hZ3wFZhopmFrzJYLa8xCAKNteXBqe9cWnOj7qqX5PxjrUH/fTcfS+Zy3T3NCt9bJS8GHRnwvl+dbYKJrS8hkXPIE9ffFTbzTz1C6aUDy9qOHf0xZXB8PHu8+rVhDxBOqtiSI2UFghJzcJRghIEIOsZq2RFuoeoBEO9de65tnzVDPptRa1GrbKbHTVkLE/VWeA4QAiqRWSDYd2nwI6do2J8KGHEqytLSPuuwEDEQWuhccmORwt9yxk/JwAKhx+oXpgdZ+oAUe/J00p3SNcbIAiPZoVcLPPj0HIvSbzpHZEGeOX59WrDFdO4aI0xoLEOHmWcfOeJvZQPl84sfSV0oCEAAFhKR8IhWYeIfGZIvMsyjnCGVld66Gti0GYnmMECgpCQ5zU6rGI5dH/Z2Y8epHQow6MeknwaBzIMKDLbRM+rALBfCo5/j441rIet1iIJMZkctzhgmvNz4fMwCEzP/ofQH+zrVfm3Cx7Dl8rFOABwKb6X0oCEbcAHpaP2Q9/Dl0mr4pjTvZ9dbqF1H76twlkmuT+LOlSRsHBTO+j9ek0fMWkeuJm3u1pnbK+pnHC/n/pBwozMcz+/H5fFzX74gQ07mexYK2R/NjLWEvv269u5bLawm2UIbvz5YGmd9zrjO0ZoUhNdDyWc17Zb5gDa3QkguOtD47daJ9vSkUwsubbN1CBvv3m24akNCGQxqSx0ONFHFcA1yIGzMuAiLOu7KxSfBBwIQmnCWllcc96a7UfmgL515mXy6wdaGpEYpG958DMBqNCbM7TgtXLRciUi76shElbmZZKjAASHrWSuE82sywMeIsf7mUJq0FL4kBqeEkW8EdFYzFJIFvkEUqbXTvbFZlaY5FREcaA6U5pXPquR1oWhFuMsQZZNKGEGNMYGRljGVjntXZTHvU2xBVhoBvfhnEUTjfypzWsL5FG5LBCJUH0PqNdIBc4BrWaJQp/bkFIun90nUORqgfLTBCxLtmLj/F9hz3U9pLxGjz9nDHc3q3CFRfonX7PgRa67XWJJCDkvSc7bvWd0h1PWRH52Q2GeBj8gukwBXwaWyNAnVwkOVcfx5pyxCBEAlKevXQfanOrX/NtuwWjGi/Jb00YOnRFpRUasf8uMxZz+/suQex3jTrj3Nz86dj2NDTdGjAZCZimEWaMJHXJdddqengx/VvG4mL8xuHzjp+pzvtoZsGJF/wzgMObz2UCU2ZWylRIj8mkqADaM2zaHI9Mpv0x0MbO0nTmmh2mUSW6ldSTxWs/ZZqYa3MHqJFh9dDKlo6Vk27mHnEGoAmHfCamaSyWNdNyUeAbGJ8jEi5A1KpEByWBdkxOLFEXGoL2GYRroCYyJtXgEmVhGdQkn8n3xKkQAZrwLLUVwnBDTUjRBx0bM65uuml3qA217IjEDLCkFKCTee45JWbZwFVck3aEPobQsTrtYKU45rue1pDs4n2xl0P0MvfKogvGiRm3ubQaEOKj0gO5UuakRa0tA7sMvu42ocMbAD7fCWIsargY6sB2YSfFlqRPQynpRmRvkoj5ormVf2kcXOdzkjNGr0vmfw1AQ9CNfHj/VI1bDYzSu3mY+BhCVUbFj0z3/KAT/UuLvmLwVgreN/JIWy1pAwNAjFIwDLV35oY9YQIvXDGRFzbBIzHAxH1xaw9f89KeC+o6QEPTnzcp9+tOR/XoPF5QUIQHgK49zxNKwDYIESuN7IPikxKAdE0hp8UiZW2n5Oj+GuWkHmPpllbL3uCxoPSFz2/l3bPbzUm8nnE5xxPY2Ho+00UROSln3mnPt00IOGLyBpqXGn6rU1oTRsiFyZe7vGwFMBCpGlN6BkjzccejYSmZh4tHvxdtfKXmF31/EeqI2MFJcXxO6IJh0vhllPTqjYESKZRPNEWbQSaZLLUB1045VybpIlrSyozUTUlQA2LyZm2fijMLUlfEOD5gciWmYrqNc4s88hOkokMEUUrwjf/ozA30CR6RLOmfhZA4YwBUKOtEdBotCG+RmvjmhHSRD0sfmP6R2BEjh2Z2EuGq5VARDKTkoksfSaACD+XjveBEU6zmpEmoagyaWieaWBfAlpqo8yfIsEtAKZhC4XBnAmCUNqd19xjSN81+AjAI0WnqmtJDUndWzG2NAIjlhaPyvI+o6KkNdnzXAlGmmsd7VF6ntuU7REfK1yhvifb/B6aHc98TvD+4cBEvW+HUMQ0BVWEHwCbX7ye8jzW7zngSY/Zl2unJtix+JcZ0gSHs/yKpTXRNCbSxPuk1HenO+2hmwYkH3j1gFfvPJqahKfTupnYUiKSjn0zkThQoTCi5RqTdPfNCirJ6FMj4m4vWijMOcn0NtzhOWCE7iH1r6ohqY+sICLkGPGERNak3Qg+L/gR+VrOgL5kUwtUE4wH+CzhC9lu2OVNP2YmwDXJY3MTzEyxPbmNdyjmNg5sAQ/1Wy9KUi8iCVQaMy0BQgAbiPRI03yY1xnzCGwl9tK2n0L7ckDy3mktEm26h2sfNQ0df1/52/y70R61kskmWSbThrRakBou+oGZbGkgBNCBSOlH1oc98EHSf8thWwMfgO0f0oa6NZhOvk5JZqlo5lqndRlOmpftAezYjLfcRgG4uA9M9T1KWpMTG2fvnapWhO4n6TD9PYUtE8bHCtdePx48Vr7eoJ1Hno2pNcYSwKAIIlhZ6z0Bnfmv306f89TmEMeQSJufMuSxNYYkzQIJbiqrtX9dBVjBNspWrctt2szbTqQFLZCaEf5uR34t8OAZVWOyZyyV9lpAxAAj3qVyuqakvlN6lgOyVl/6tIz8QEYmXPJ4Rhsys99bgKUn1KznEyiRgOXu1H6nc+mmAcnbjwveetyaUxGtIRZVqCTpjCWJQAjZnwNbta3Kk7s5LYg2yenUg+cLPG1uKV77GoAlL4SrZ5IWpvKtz+/7ksyCk12gJD220HHlgMwhrDWCFdZko5+YrGReUYBFdEBIQKRYgIXc5kiWXrEwBR5ZOrltAt06RaQ9cUjMAr1nL1yqJN6vl4CQnnR1I9FVAEgqxzf2rdkQl2aHqJhs5Y2fTLQ0EwP57nK88D5pBAHMNltKJCUIIe0HMdfnmmXxrONt/+X+QgtEOHCQzLhlgpXum9N8qGHKxalGKBsirOhaQAtYZGLS3jlO2qiTDDMxg5pmreSqEWNIAtrXgolcQzDXlDRufBlXT6eAtx8XhJjW5/dOGajmtfBh8VjWWDRkR4RsoukKMKE5Lt8T0H025FLKzaG8hwpURmuFJnCS5llaoAONKPy9JPm9yxqpaLmtUMqWBuUcIJLKRPP9uCaEg1oCIzyq39Na16GekGSGCIxITSz1gZx2SVBFP5AnrhAS5PXw4F3JHcb3Tk2YqmlOJEnAoP3VyEqqPAtMNK2LJRB+k6kvzH2+Z96pTzcNSBZvhOfNjEmIEW8/6hIGfo9uWpN+E0NEZZtY7UqbtA1fI5m1N2U9re3k5k7tjbGoA9ZoqVO5JIUMlWw6R8IyusazueYwVelZrmZ7TZSQXdKKxBzNKkfb8XRc7ydQ4tD6lPAqOTCRtMMKZgNKZsgCI5KoOou37IUBle8gQUi6r83zwJllTUMiEx2GWG2ii9kW2/Dl8zSSpo1cEpnefWsaYQGRdE13Uh8BEQuESOJjhvqfNCJrBm89IGJFx+LO2/X7dJvStiva40QjaaolE3Cmc9v7rDndSGkzGOM5UyiZIzGS5VzExufo6RQaXz8gabLpObY0NuLxkLTH1MaSDG9NQpzjmoQai3P5uJqA+piEHQ8LENaoAoUeCGnOKZqFdaW+r3kYOEixQufyZ8jrPW3bHuLBQZrnrErI3fL9O88J48Go9ZemFeHnORjh44nACAe3AMqY4mtTj4kfMesWSTAik5KuMY+BgXmeZOb1/XrL5Gv7j2YGrv3l5UbBHXrt1ii1/6wq73QnlW4akDwePN46LI0Gw7sabQdoHdD44sYnmRYFqV4DcxB1RTMimQxZZoYshiX49nwqSxIi8nmIAHwCMsYD60LWZrwGdOaDO7RbNqTEkB6URVLWuSLfG6ndQMi+JQQmyIQLCEVTwqPmtMeVoaAIaTEmEy7aGFx2YAV0Jk7uUyMbb96Xe2i29KY9GgBhxxJ0AHOSVTovGWcyjUj5RdqN/z0mweYbv2yHJM1ki4MRDjqA1idEakMoVC9Qw/hS2N5kSrEFIHLuWziENCGkHYlsPqacIdVpu5qHkDYpQkaMmo0WNSJtPRqBEu60Pj7X1s1JY5KsML5cM8IjsZGG5PUpFKbxcxSS/RSKJmTGzIbGEl9vEjhZNpJanpm9SKi5hNs7rHErhNr0gZg/wFaTNTLb9BkoNT475VtygGc2o2vStJ/afao8QwjGSnhzA7wubn5MW5na5bpkmWcBW+EJF45wcMvHhmYVMMpBIkmOH0BoHxkoKaGJiTmASz5OwQgfnIWEGqiYpREg4fOmd++lxAPhpN/njM2Xp7uG5M2kmwYkJGHl0lXKR8AzggNomYYSFUqpU9vAN4tTWnQApKzepT2ynF1vzUKLUhdPhBDi1j6XFrxZiUyPzlmUOOCw8pOM1MZrAHyOVb9JMBdaTUkCGVVrwmPS8/pjJBOuNkzpaP5bjt/PRXt4U1n0HCCSrvVNI6rzKP2tmhF6rqYd0UiTxjWbJDPR8gw0cDBCQJP7hJCEn4MR0oYUhtOT2Z7bABEv4GFjauME40QgBbVPuUaAO2pzMHISkbSo7yT1gEX1PSDpK8vBYJB37Xqjm9yYt6tkadrq761mBODMJguaEOS/LbjlZjZdoMuupXpa4LSGJMxJ7d06uvMofj3qmd3x69p9ZU0L9Q+t/WQSZQELvkdo81beM+skDjBgy+5PAI2u6++WzLWiWV4jCbK0xJfyHaRWZG3m2FZTa2lEZLLBui9Vf4fnoGINsOceBrIPTNhH14h04WEfkFCdWvlzSRc0+c2508VPutPPVLp5QNKYb2Qg8tbiG2dXIm1zkTRjbmVtTrti/Xu32fiKVZKrYRNDqOCnmIqx68BW/WurZrkkoy7cs5nhJc2AEvlobvIWYpYoeRS768JMhIgjskSOuLiw1ZRwm3DqEuf4N7HbP9JOWKNkJOlYvBtCRZ1ZtaWwPdMR3V9Bvyal+VvTrWqf3WMQNVtivhFuNklXHUa5czqAEhmLzkmzrLcoPPfiCwiR2hCa5h7OTIIX8hjxqGY1IYPZGGv/hFg1I0naX4EImbURCJH27bOaEVUrK/qTiIOSGU0sN4HTsppb7ZAUxPhbGaAtAI31RfVDymZap1Ck2KQZIW2blG5LxlKuJfwa//v6FPDq4PG0JonzEgCcsnBqre/S05DIXFXXAALN9xKaEk6cwfexZWp5uGgibTrO7GdqABZer+iP0gd8LWW+MUdRv9UWy3eEz5VTs/Zs1yUCItx/TQO5wHZdbZn5bTxGDnIXF4HgQJHbECIzrXZ65xvvDaQ1KcRWW1H3y3Z/fDoFPKIF5T1/mJ7wRytzLvWBTzWXl+ZhbzrdNSRvJt00IEn253WzrUxNBSqvRMjensPdLKDQFtmeo6laR4gMWKRkgT7SxshAiIhrPyIOSqwwfGmRPg+ESGnN3kzuyVemBR+08HONkM+M4xpRcpTAc60Jr7SCAAc0kXRm1gBtPz8XjMyWmQEf/Lxm5qGBED22f61LSrR5pKjU9ioVnwFeM9eTaVX1F/GOa0xq7hBuqtVkWc/RshafTLO4VjRpRloQorUq5msEPjQiwLIS0xQqoOO+NeWvwkBZEfJS21jLvNaIFpjAt/dk7K6S7idif5+R4EUGQ6BzxFA2oVkFk8nHTwUdbW4oAilAyxxoJlucQWtNtxKD2UrKs39ejuq3OJeApAMeUBnp4Ko2g5zBR3OMz5sR1eAcMfuVDMg7sQbE6Wd2Tbi6yzwfA+zduW+J18uUZ3fapZlsaeZZGqjnEbSqqV9sxg1F0QT2MJhtZCgJgLX34/6cmpBjRHwdlHukZPI1mjG7usT8a0R6e/UEzwfvEG8ImNzpzaKbBiRElI+gMjW+gpJOZvRZxlGaxhCVKkXdM4u0zPhdQJF3ZYOsjCY2C3YQGz+9z4x0ZUS9Ba3nJDezEErJMDGnTT3MvGBj6pY3iPItmLO7bEeMc74ce8HHSPo9Ylg0wCHr1TZzXl4Dxe31Wl7G9Oe5ROhcYPVa7R85RcrIddxvhDuvE0DxLo13+tvkEFlaIYPP2hDSipBGhPuL1OnImfj2XehnjARAKoiIMfuN5DDHIUbmL9JqSFofktgAOYsa86tQAZXldCzJ1IgoF2S0rXp++JimvdqxLCOnibYmSU1IKdsBI0R7pK8hbse0J8F4qABBAoBy/455xMsBLchrNAtMq6WRdxUE8Oce1yGMGTLEIS8QWvelcL4kBJLvkPuBgZO90VylZkf2oQZEaC4dg9SCRKEZCZuxMyNASWWSuZ80i1rzXnJcgeBTqGiEiBXV13GNWwuF9D7t88kSoMw9z8Yxi/xJSZdTFvRt5MKRdqR3bpbmn9HyUvwvX/vDebLOO93ptgGJR84XksEISVX5Xxmyl0w9YsRGj66tZ2uMlVHJZhxkk646hcYcFUqh5DexZSZTuxyAWBY8btO/ikWbq/K1zX6UwV0uxJzOVftaUh9pstO+L7t/xnSEHEU4hMgRuFpmz27fXoCmgQ9Lm6Fdt+ug+22wMS6ntUe/xjfMhslSNCPp93Zc9MwB1W/uq0N61WJus6u/dfA5iWEyt/QeeOuwFK0IByIcmGgghA+jGFsTwdJHJadImm+UY+QUYgEj3DGb/kogIqW4st80aSgP9LA4x0XprIXzgTG0eWOdmwEjvfFexiLTlPD7pP+MBTSkI7JWRqNeqPbyLB/haU3IzCTgsUSUEOPBIWtl5bu372sFKdDNLVumn7fPE/hke1C55qtfRnCt5uAoFpdZ8zqtvKZhX5zbAJYaIYyDK/OxKo3WIwuESI0IYJtn7QUjVE5qRl7nSG2vDr5EbXtcfH7p0PYhmXl2hJtaM6iOx4PHGmLKoyPanfbsxZwTM/uwNSfOOafVp+3xmq/gEW8+rTGglxLhuZ55pz7dNCABtos0LaR886V5RGAEaMOB0jBZXGvOQZvMyFxKMj1y46dxzyUrWp0rW7RXtumT1I9v+NzGfw8Y6dEeNbDmMGeVt/JNkHbEKxs1p2Es/4ASYlM6FxazjIk+sDQfG4BhhOm06hlpO+Q9MyCkOdeR3HIGUWeulOd3NqgegJXH3KxKj+PfMs+LQ8kbAaQ56rI5lkc7f9t6tmBEoxArwxVjRKBzkTQm3IGdO2i3WkpNQ2kx3xawI1Cyy+9sQLN1zfjJEQUxPrZzYczMzDJT0nxGAx5tvX2EJUFCiGkc1XVie4+cU3ROBirQ7m3OsXfZrituUw5A49fEBQP0zq350PnrWVkT83XZF96J77pT4m2tR1IoYoERC2jMahBmc23wvm2OiwAsAVueb8S7nuZ8e27xwKpo7y3izu1a2T2he6nsKBhJj3rzTwMj2j13utMs3TQgkRtrikFffUgSs5uveWzyEZRr+XyIWVoKkq62zOTGdItJNVWntqKurfcFseFVxmebnK4njeX/evHXR9KPGfVvTxoiy3ONCIDGd4BLCItfATPdAbDJOcH7UjNNKcyF8k3o+l6aBRxWWMu95VLZ7fOsvBVme5Tyans0EGJoRoAxc7kZDwyAkoaSf3P63vRNuQM73esd6j+kOZuASasd4Uy4/NQhpjhqBXAgMT+kGSGwQXPrvVNNCsk1IyFus9YD2zwIgL75cyaCMz4Ns8qYH/kOmjZRfgYrutZMYj41R8UZ82bzfIMxmXHClaDk4B0eD2lNT399c03VmsQIIOUkKeHFs6O7DHdLZGlEevNJI+rTY+CZ5FP5o/Lu2twtv5kWQ0ZW3MtrqnUo36kZEzseogk2OMDSNLK9/UwmzewBlj2MN4WrbbQQub7Hgy+azJAjPNI+BswDfxJULh5Y4Iqm7CGbafJEZqP5tkeIsCrfwNLgSuJ9uMd3hZ/XxtObRkGMnZd65p36dNOAhIiiMElbXZoWjkmkOBiRczxJz/Jfdj6CLa7caZVJ5vcQ14QQGGkd/MbSWLl4n6MRGdGMNESW5UAknW81IlRWghGeL2DTDuUcZ6S4uda1tLA9rceMudVeLYcsqzFAm3YEeW1bb1tee5fzx40FRGVyUfIbKdcZGNmATlaWtCMUypeH7x1teQRGym9k00uhGSlzjeZZ2GpGCnDJEcg4eOPz0KKexHYxwi+Fll9RtYezc2VEM9L2S4mvG1xSzc1otLI9wMFt12eofU/7Pk0jQud5PV0Xj17QglSJaFv7DE41Upb9uFmGVq1DnNS1On2yANUIiNC91n7GtSISkHC6RAvA32GjBZYJfoHm247ASZNImWuahB1cr569yQcJ+ISY1pc12omTOa3KOrVHYEnHt2Cydac3k24akMi5QsBkxhQiRGaylecnSU4BtAxJIGZlu/Cvq84EaGZbKwMftOnx5GokldU0JDw5HS2+bZSaOU5cS36Yzm+PpVmWphmRmpBybGhD6JzMEyMZ1KZ8YWZZ/4o+P1eia5tp5esToIOXo2/ZlN8JNIA+2NDul3VY0Z4241fZnEZaEnlOA6H0u4BOPh6EhN97t/medD0LuRFiToCZmTpq4cZpvZyvWpEY6XfEMSQt6Bpan5FjiHh9Woum0tKM0FwE+gn9ZD/1zDA08mweXIukaaNFUhMstbpNO331Q2iECl6uGTUZ3OPBF2m0ZmLKgQbVQfcByRGYn2/KFRNNlHxHTchWX99J00aVPmDS/aZPZsCIvO7be8k5mtNRseElYc7s2tYDEb3ACeS3WMrOhnQs9/P1TReASA2sBua32g89nK9m3ifJ0uLXcaNz+RIsA9j4fYWVmb2hzVau9bF6bml/6yG59XfqtRtAk1g5xIgHOKyhambIeV8KMWfXpi4gcdu1/E53mqWbBiR8AwLywhjyRur16EkxxqIlKZsO2izN6VrryM7ByMZJlm00AKoJkSIl4xmy6XjE/GjSI7lg8/ehvpF9lcqEzWKsaT00B9LmWEjCNRACgDk1tyCEm2Bx05I9IGREPWdytXwHfPB7R+DDYmKALaMjnzdqf6qj8w4ScAw0IDNaERO4DoBIOhbA1dfvvw1Tqz+fz9mQk2b2+EENiKwkXIjp/NNa51gVBLSZx0NsI2pJE62eqaTWX0TSXv0crcYMpfFqm2RZQTmACkZ6Pm+cKCgHHdN9rWbVYxFrIjF+lk9aC0gSF/d48ButidbPqkaEz3G0TNhek0eN5F6gluemvvnaka3ltU1eNePjz7FIa6Nknq369jjPa4IPDXxof+X80bQi2l+gBQz0u3fcjqk2f4ZGpjZB6z8RMQ9o+1DOsx7QsO4bYoWlHbMhxhRdLicIJXPBEMGCLLBxKE2dBwKWzTmnv8ObSmuI8FfQqu195p36dNOARG4gx6z2SAx/QiXJHj0i5oR6AAr6KAwLcyaMsZpoFdON0KrxgXaDllluSRq1MrsvMs3ipiEETHrMjx5txI7fX4GHLkHqhe4D+loRTSMy0oYcWJQzDkQkCLkGAOF9zclyKq/XDQmfAkBSmXqfFUUG0MHHjA36SPo6Y07RAyLXWBj3gBGi0aZKggQAbBNN4MPHNIcBpGhJUa+MgAjAGJky76rGM4GPnPSw/K1ghPy5yOlWmkvOgBGLNFB3LeoBDRKUeCZB1UhLwjlLi3M4IhYzvbIedYBDc39Hom2tUTNU36Udj9q3G31O6ajMaWbNWkNbBw9Qwk1ranvIsXpi3u/R3DDiQGXPuqQFO9CABAchVpn0VwcjqY39vU2e246l/t63fd+t1qB8IzbPyn4fxuZVvDoNiGggZDrHWRBj3GfG27FzAFgG5tL+olmMdt9y2oKsNx+E3OnNp5sGJACKeRaZOoXgss1nioe9+CUxKD7CRdeAD2JOOPCgOi3wQdSY4aAuGvV8ZAxsZYy05GrvnVKug/dYFuPXpxaQPJ0SutFASM/+WltUNY3HrEakFyWL8sBwkxzKxk3ZtwGogCSd3/azRaPM5akM/4btt+DleFkNdPDrJGWnujTTjhnn8WtSjyE+RyvSM4PoARGgNcfiY4NryHoUspbTedKCpESaK9I3qIEptvdG+i6omdfT+yZGiOYYgX4CIq0DO7vG8iHwxGwaozXbhz3au6mvDSMBWNqQwNbIdGKCaebjOLZmpmvUx7F3aR4sPj3DWks482mZB2pZoGmNItMtuT5t3pfWCBCT1TebGpHpD6DQLDDQfAPLuv3goIeFnn/erC9IWMdl9jpLS1NGvWzYXO+RptVoQYg3zuvgY+QzITUljdaE+cgFObd8HWdEfF2kazIUNAcfWphouX42+x/N8UAAlrLO873KlWiffslghBIVh/OBBZ8PD2esfS9NpwC4F9ZYnAZrwp1uHJDI8RTyprzGJEH1jGF0zKSAtCCUeIonOAN0hnbYlg1Du62LpLT8b4meFbABIyNtiMVs2gylvlhr5/aAEemcrmlCbM0IyvEM7Q8gYAORmdweM0CkPQ/lefb3ei461zyLyvU2fUmadO8SLUBySHdAwiY11wjQZMBu7sl/SeuZ6km/CYxIhpoEBKTp4oEkivOt0IpQ//C/16Aeg6sHdehrnCipaPIPqEz6iDY+RnHfOiiJCzxObFz1pdweGpCxzHI00kyQNL8Ofn2GZs2n6HvuMa8k6q3he+gawQosTWtvL9J8Qrb3tx2zF9iPNP/8WDNFnnmGRqsYw1Jbci2SfnZW/TyVQCnn8/zPmpFG6xES71FAiQGyJ3JyqmvWXVNyp0vopgFJcQwPwBIBLCgSz2OIePB1sj0wZpwm6JGYSiH9k0m/iKzQs0TSt4AAD2daCwgK1Tzr3eOKNcRGK0IaEe5DAmyjicjNS5P4WGYQAPBKkzYWYFGl4NR/FMaVkttRhu10zW8ACF0j8LGNpJRIrmO8++lwDWwBZWpn/u3oOzQgIeIqZlcSeHBzhZ6kUB5Lmt10z6G9wERutj3SNyRdAmgRmS8egRyKtWYHonFCmg/nBkx4rMIGoIIN8g1JWdZbrcipaEMiXq+tVoTm4jbyD/3d7tq93BgWyJN245d+9mqetQUlgG7nrpk5akIWHhEQsMFK0ka4KkVmGtsqOa8BNqhMvb8CmRE44e+srdu6f5YmuQY7J75VZxz3fHE0fxG1jh1gS7+/X/+MIMdaF0ZaEPp9DQ2IJE37MTI3tsbIXuDRu2aBEg347wUszwFugKwNITUmANp9tXkjne5nnyGP73SnPXTTgATITGpmkmlzPLokJuXxsI9K3HkNhHCQwoky/JJkduNMj8q0yky7pIFJZWJzLoGPxPS8+7RCxlvnPiS9PgD6zKQ0gdB8RfZqRCjTNpllPfgtEHlYXANCHCr4oCY4JQRnZE54DonRLO/UsXG3wMgIiEgQooOTamJB5SxJoQYceX9z0sLCNqYB4vyIrq0d6ZFmqsXPd5+VGeVkZoBs85OzI6+A9zGbHRAw2eYaIeKglb5Rzd1DJpJkstXm+uFmXAQuSRBA86791rMR7baCgpHd+qXEzbMkKCntcrUsJztqXCzXLWENJ88YszUzQZqpjBV5zDIf1cpyZmoUwKF5VyjjXDgn6xGTutWmauKWKUvMILWtLU9S7fL7mbUj55p4zoAQLXnh6Jk9bb2m1ZemxiMt2ohB3mgFlTXwJbXb1yDv3ZaHca6AkrVoUPTxuvt5Tj9+U2kNd6f2N5FuHpCEmKWsMRYpPmkh1uhwZBOvVwcHEtKHxLskVdBsOnkd6ZnV9IPXJZlZrvmompGtVkTLM0KkSYjkor34ueg0XCsindUpaR3Q+onQsdSQPHhftCEEVrxrAUg5Nr5NjC4vlDVZJZj0W6MKMCroq9/EBiFSCyJNr6QGhB9rYUuHzJDY8JbMnFkJ9Hi5Ec2AkdnkfZokUJLFuGnnU1huIENMICQc8pClcesagcWnb5Ad14NLwOQYwObfltkD6Pu2GlDSipQEo/nc6xw0Ijmwp2/Oo2i1ZpN9ZgvofxvLTr0XIpPA/wxZ5lkclBBJcMLr4CRBPZWRzD/dZjHACwMlc1o3haHsgDjNHHJmLvL2bMBJqCHM5Zov26BpmZbmuTow0ez1eU6aRms26VvHtSTcFGdkdsWpByRmgUiv3tnvr0XC2qPVT+fS35GZHYXFLfe5bb/12qqR5uAe4pZ3qJoVksdEBk55Yb0tvKs1M+Ne2+mbXANAXKrhu9OdgM8DQLLGiJrFcKwe7i8iW8m4Zh+/eT4UaaNIAMVzhshMtKtwnLUks9o79CRF6V/NaPzq4Nn5rTYEGPuIcPBBGhECLFwbcvA8ozb5laRnOLHJc6JXjYgFrKwh+Z6sGZEs3m1i5XNH29YJF8U0B6haqx5glBuu/GazTKokvtn2mDNiYPdqK0YMxih5n/ZszkisebPkuR6K1DdGIH9nuredE0wc5/M3IpC6AkBiqkNcEzPlXDm3MCbLIhk4AuCAhIJW9IEIN8+SGaKt/uV905P00rEmtZUCgF3ffGCetSnf8eSeCfhQAAhnbEeMm9AAlPOd95xlcCwNpaWpJJJamDXE4iyv5cShNmhmiFXSzAEFA3SBTE2ZRkc4qT+4bQdp39CaA9wPwDLdsvpIXgfaftPumwnNO1oTNYEL36/onNzb6Ds9FNDS7luy7h4gKcKoJndHvo8FfxiBFIs/2AgHIuomt/nkqY6wJiFqoDDahhChPEP0swZSiFqfkcuBg9a3rrNOvyl015C8mXTzgASomzKXdFvEGVk5H4lBTcf1r0xO1Wzcg1CImkRdMrRSEttjHLXFmp9vAUld3PmiTtKk1AdVIwKgC0YWzjQ5FPMsKteYZmVmk5eXQERbt6oHQQpKUCx5jC6R2izySZBaESlJJXMdoGpSiDEFtknveiBkduOtTP42F0zvPnk8S7NghFM3q/hEGzQp4KZMZp6pfJH+5Y9dNCTOFS2K98w5cyNlrPXy8UC/CZDSfAyxalX5HLXmJr3/JWR9SylFlX13jikFgRJA9xch0hjeUeS5rdP7fL80WokOuJslnVFu19DZ7zajwdGiIrXX+S/2g2XgLbwos+uf8bUa0WxYYIu0vccGe7qw7JK5wvcrfm6zb3m+z+hARPants/X75jWAdJqUXQqDhpJmxXi2Bdkj1M3F9xwHqMAF97gAckiut/U/u9yd1K/00vSTQOSgFiYyaOp0rwGKm035tmFVwMiluS1V6fUeljhLyUIkVqRx8WbAITq0XKIcN8Qclrn5lmkIeFakcWlY6o/CUlTfbTGVcli29MBKbwrgRItIiWZ53AfAU0izoMHACjhXEkyDmAT0rX+C+L39hv1zJ8skxPNDEqCS/5XHm/6IlSJ16iN5zLWTd2IeCROOjlWidKuSPko5Cr5c5VxF4FTSaSXAMcREUtoGb4SSGLdAhFOWjQ7rinhYbbJcZ37ici5yTWa5Rmsn3m/aH0lBQH8PO8HOj7HB0cSAQhNomoBB8tUC2jNUGX5AvbZmjgztGYYf0k9ST4Hk/w3Lyd9frg50Mq+U1lPXQSCg1/qOJTCmVRuC05kn+nJ6biZ6DZ8OpXxYYoX3ZKPGyZa27u0NaKv8dgCdKs+jfi372nyNW0+18RzH0a5V/Eoj6U7SPjB30UB2dJkm8b0g68aYO5POEuar1ZxNF9pDaxljnhZs6dz1horgMSt0F1D8mbSTQMSLcRqc92wndUm+8wmKZmUHnjg9V5D8qoxQdqiTptqA0SYKYLmqE7XADSbrhbCt6jJXfUV0bQiZKXBwYgEIvRGEph4uBRxyega2WcaU0WbCw/lKh2X6Z/0F5AMqcb0aG3RgIb2/faev2Rz6oERa05Q+0lbojHiPKoMj70vzRPoFi4N3FIFMNJ+OsRYAkkEYgBFf0hJfglMwZgLOtbC+fLvTm3tzU/5fWfWjuZ7CjCyl2TUHvlb+ozIc7IuTryYBUTktWY+CI3lucTnkkU6+NDX2ZaShlIb1722AHVdBFrQXIkYzCzlZtoRrjVZGRjXpPpUxrv2m2hN1V5xxnSL6JxkhT3gIoU0lglq3b+2AVc0MMIjPVJ/caFZulb3JyLPNp6iMYx1XviI8q08kIUfrmhPVvHteICC9F7Y0DBPDdOGpDrb62U9vDLt3Uv2RJe7ZYBypzeDbhqQnELAe6dWfs4XPWsz23Oen+ORriwwoknMpZRJnpcbo2bSINXWAPB4WFRp7EiyxKVxfHPlkj8JROh+HvZXaka4NkRKqgisaMubQ9rGvauSbm1tixmjhFidl7mzMknByTyHfq+x+uRwf4FyLvvx9Hx4pARQ+z78O60hAkyDJcnSjPDva93LaWa8y2ujcS6Jg5KG5KYbyEGY1ZtvW5xj0sDKkAFCexLajZqPy2LSPWCutSh3o0SHIyZr86wQN9+cjumv9l2lzxa9qyUcoD4AtpL5BvAJBqf0bdCjYslX08CHVlaatFZJ/3UZEgs887/SFy+d1zXPRFVKT9+PARO6z28DMnANFv19ULhOzxnXEJPDdIw1aR2S1mRxbVzVg1D/URnLR7GhIEBQqSQJC/i9s3ujBkZ62hFgbBYq1zZ+bqPNP/jWP9G7ohHhIESL8Aig7FltN3HgnOeG30baJOl/0lC1wIRMulIdsfbzBGnf0PquSbAzVa1JKpgI27FgBSaZMSO8g5A7XZNuGpA8nQKiYloBzDFyIxrFWJfPksCiJ5G2qIINvzmvqbYlCCFTGguEaEyOJvGzkhpKKVV6PrUx8aiONDFwDRDpOWQSKOEUsw9JiMnJPeY+TAAk5ZMofxuTreq4TFoR6bhMuV44c6prS2xzLflt2u+0NZ+ytCbym5fzgw2BRy/qllPa3rtnpk6qY/FuCpjwTU9utPVRLBSlACmLc42Jl0ZUjxbpzkp0yLUjPQkw7xvtOP3Wk7Np39MKl1zuM8CI9s780iXgo1cuXd+OH97nvMwek5YeyTFL343/5t+vxzjzcf14SEDk8QCsE5nrCRgvYj1M14xvQ9eZND75JrhNX5PwpvQjY4oBgDvMN/0TU9j78m1CBCXD885lLQs1xN6jdCGFbbZ4Dhjhx9Z+9phByYP3WDzwaqm/DzloCo/uSN+Ch5kHaF7pgITMfbHU/YODk+OagOTRRRWY1Chq9n7W9mO3a1RaLfMAgzSNjAQaltbaKjtso7E+RGUNetMoKMKKl3jmnfo0GVBwnr79278dzrnm31d+5VeW6++99x5+7+/9vfjiL/5ifOADH8BHP/pR/MRP/MRZz6Ls5nxRpH8WU6lJzTjJOp4Ucx76V9uxlZ7vGexSq0GL8uPBs39L/pd+v8r/6PfbDwsel6S1IPV2kiC5nC+kgpPDkhfxLHHyrm6ID7mOXoZ1bqZFxHOJcAZpgAsLRVTmJiCbW4G0IpmxzAAjaUdog68S8WIDHKq5Fs/3QoCDwMhTOU7aktfst/zHx8LTGpoQsaNxtd2M22ADmzFQvpX9D6ibxl41fDtWw2ZMS6ZOZ+AV5l1hSukfnZf/6D76l75brZfKJcahLSP/UV01IAWa9oxo1nRHl/S2YMQKry21I0UIwK+fYcYlSfYLUPs5BPZP6b/az+0//u3lPemZLRhRneYn1kVtPlm/NTBizckRIJ9Zt+UQ4XNR/qO5mgKAoFlzH3z7r66zlemm9dj7pBWgdZq3gQcnAUjwxNeGtr3WGN+7hkjSAmEcvJ7vqh4bQVe4Vt9TP1bwnva3ulfVfY72PZ/6WeyrtA8WU+O8j/FQ9p5pWcozPX9+28903jLD5GBEW//2/rOIr7XaMwE+T9ncNcZ7T/NB6wMnTVhxpzvtpWfRkPySX/JL8P3f//31IYf6mG/91m/FX/krfwX/3X/33+ELvuAL8LGPfQy/4Tf8BvxP/9P/tPs5a4hwgjkCWklYb6GVi6gVUcmctGFlC6xv7qFzklK5oLaL8oVYGhD+PiQ1AnjywVYTwqV6gG7zrKm4S3kGRNLCXBflGQd2eoz1BYpEBVkLElEYTpJqR9BflHwS1TwrFs2IlXGbQMTnnkgbsjbnAOSElC0AJYf3ECJi/q5RjIM1953zrjHfIDDJJVCaRJAcacuGycJYlu9jmR2E1mGVj6cRs9XPnszFeX15BX8/OqacDnzDSuZaVi2tZC4w8wfvUh4hHhEnldlu/tqGKCPjke/QDFXTHlurZR3zc5zJkj5csk4t3Cxga0fqu23PbRiGEDdlNXMgrX9G9WtaEckEWWPSInltVjMiBVQW8XHba0dhiPk6ulkX6xrKKZlrpeNkvkWVGu8cY4M2HnLflcS63pW1sbimhBpSuJBH67OSTbfoGXLOAvNR+Hpk5ttR54Qva98rJoAjzchDLvcqA7JqsgW8OixFW8819UA1DQa2CXcjW2uWnIp8caGYaR1dyNqsAHhXNO4+Jkdz7l+SvkeuOAP0nqO3nA/APOPerO1G/RtTzVWsJ8KsjGtLaBzIiHwjcyxLY3oLeIR4x5d+5p369CyA5HA44EMf+tDm/E/91E/hz/7ZP4vv/u7vxr/5b/6bAIA/9+f+HL7qq74Kf/Nv/k38y//yv7zrOacQ1ReQjBIgJnWoTrvANilcPQ7qhqWBCQrlOpPF2QIlfOGm5E8ykhaADXNDi7cWIauJosU21PJM2rO4CYkAKQRGqFxloNKiPyvQ1SQ8HIyQZiTGWHxIQkQx1aKoWkmym7UjsZpCkORWmudoZlhbzVd7rjBwpwpMJHF2ffXjkJBEG4kgY3Z6ybzofHGqDP1Qnydl7I6oBfU0ln0D8ok03xJNGKBtbI0TNr8usmW31OY52dyLdmOs5/T2te32WDNq4mBEWz80jRc/v2HMDGk1CRDScSsMOJcsIALYGg1gu1nKLhoxWqNzTV2DMamBEX4sNXQzdVrP2asZWFw11WpApVHN4pI2dwgqQ7t+kMkVkCT5YAk/OeOrZeQetZ/Mgay5sIfs+dQD8rZmZGHadw4Eqd89+ysDqgB1T6JjTmR2BVQfxSLQKGCl+vssropnil+J0xlu63x67vlgxCor+1cKgOi51j6iBR7h7e0F27DWDbm+3OlOe+lZAMkP//AP48u+7Mvw1ltv4SMf+Qi+8zu/E1/+5V+OH/iBH8DxeMQ3fMM3lLJf+ZVfiS//8i/HJz/5yd2ABOioHAfn+fUn1IWVS92ItE3LNtnQpcrb8svmGgchXGpuhenlDn08KlYpJyR3Pcc1CVIafxOmGVl8Wu6dazOuW7J0bcGj8wV0ZC0IkBYz0pSsITHVxxAQAvDeGopGhBzWX58C05wkzch7zDeEm2itIeLdp5X5kGxNsVbySSIgwrhZHlYVSNnknXNJIBlisdG2pORkckfSwMVvQzFLbZbss3o8L83SQLZ2Td5Tx2zqVzm2NYZO05qo9UOfX2Wj5BI+Jt0rITP3MGEZvNFzObgHuD/BAi2iUFsX14ZUIMKj21GdEmxYfiOWEzuwZWQ1wKL6jWjAbGNaZZfh5TTq5V+S9cyYSunPqOc1cM2FPxZg5pJ/GYKZ/mrarFGUJM/WzZEvCX1CLfxrKSO+MznH07pyBDJ3rEvj0+3bc2vM7WIO7iMLgjpvq+CsN5dH5/mcISuAx4Mv36MkOmQO7LQWtmZapC1JJlcP3qsJeC1eOimO0kXn2PsHwPuItaw5NYKgz9dJG03JCul6j2ainO0BJj0tuLaGyraQkEcbp/L7Ujkzgmlvfdnp/3KnOxFdHZB83dd9Hf78n//z+MW/+Bfjx3/8x/Ed3/Ed+Ff/1X8Vf/fv/l18+tOfxuPjI77wC7+wuedLv/RL8elPf9qs8/Xr13j9+nX5/ZnPfAbAVmottSKcRpI0AiKyrCUZPZc0yVEvOhY3ywJaplXmCZHlAFviumV02DWhFdHASMM0lfvaZ8SYympdThqRGAmcJAd2bqZFYIQyanMwwh3YCYyM/EW4tmQDSGLEegobIMJNtojWEOF8AiLwQAiAC6n9ljkG35z595aaLl5WPpOkcOWb+mhuetr9PVMtDYDLc+l+L849w+ajOMmn8/xZNnAzq3VbhtR+uMxdwdeBrdZS5pApZQVzOxtRCxhL1Xu5RfYAEQuEzIwt1U9kEozMmGztNSWSY1fzbZBlzes7tFV7NVwhS+AnKsa65rL5uCeNJyogwjAj6u2VLWDZghLNAoHXKX9LrQitgUXwxgD74riJIzIY8UxbUgVkpBmRYMQUjrlqzSb7cHEOwWXNlJ/TOvUYdkma6eK52mtA0ZAIXoWHZS9Cq7DV0vbqGPmAjdaPN5li3O7rL/HMO/Xp6oDk1/yaX1OOf9kv+2X4uq/7OnzFV3wF/tv/9r/F22+/fVad3/md34nv+I7v2JzfYxPe3KdEDyFqJaCW5GdfLABLwzKThVYCEe8qo6IlLOyZZQEkYdbbqW2qMxsssam00MeIlEcEKBFmatl0vgdEInLUrAwwXmfAwMHI61PyIXnvRL4kNZJWCzqqtsQKdkAJ9GKIZaGiDYnOcXLOpcWM9bHz+TsIqSD/xlwqyCWxD8LkR5W0Ktme0z1Q/TMsRk6aII4k1lvQUc24+HUpvZMMS4/525h4EeOjSPpmyWeJcT1BTPhWQo5TKNJ06p/FL/lvW68UUPBvLOvlTrB8/tY22mDEop4mTGOiOBixgMgsCBlKhA3zOXms/baucS3Hdqwl7XH12Rv7DlZJPWmy2tDpJCQg6pmuXINqWOzt86z+DpF9O7YenGsSRGTlC7FAOtC3HtCAu2YFIDWHnu1zo/mfBFvtntaKTayW76emf2PrZM67OygMOvf1u4aZ3Hn36ef5VtZf+2t5QAci0qn+TnfaQ88e9vcLv/AL8Yt+0S/C3//7fx+/6lf9Kjw9PeEnf/InGy3JT/zET6g+J0R/5I/8EXz84x8vvz/zmc/gwx/+MJ5OK9xphRZuU1LVelSTFYpdTyTNMXqLR1/V3T5T1t8L1QvkhINZikrH3CSLm2ql+7zqC7JpX+wnW+ImWs1voR3RKADwMSKQ5JqeSZm6WVcQCCmgRAARAiEaIDkJbQhdI43HKcTspN5GyCKQ8u5TBSjFcf0UkiYka0mAVkMiyfsESsx+FN+ZoqTRMX1XAiP0raXJljr0mMPqjK2+ZYY4a3tvm2po275Oo01YAy8clNRHJgDHN3mBC5v76/la4GFJG+bj4rF68TwG1nqSeakNkUIMLlQAWjByDc2IpBk/kVVhXHvO6JJm7cJngMdehoz6+xS2GmsOHlPdthZQru0AtkICRSigraVkwsN/j8ALlaf8ODL8NR8TG3AXt5ovCUZmcsNIE09tbktTt9J+A6S3ZbaAneocCd9ofvCIY3soROSw8+3qRCOC9hsqy6mJLBdiA/h4VKnNfeY3GLd3LzjZA0TO6TugXYfWoGtaU3l77bgFLQlFGXzpZ96pT88OSH76p38aP/IjP4Lf/tt/O776q78aDw8P+MQnPoGPfvSjAIBPfepT+NEf/VF85CMfMet49eoVXr16tTnPNyEe5YpoDqCc4dh4poSiVxeXpHIGVYKR4iArjjUg4rM5UXkWuyY3qpkFLLHBgzLFjtQZ56vvCEXUijExGzxM7xrrAk/lKOHhGiLb1LWwtK3DOpd8A0LtzI6taFqcvHcNGHF58yzOl5I59dXpud2U2TdTwAj93buGzQAM2Q8zoFubJ2TCNZJMj+aYpWG5ZG7yNnlNW8JyT5Bfibxv5hn0txwbYITTtRzYAR2MSNKlnC1DsfVv2L95zjBXM2Usoc7I/EqCE41x7mkwudYSsDVWXMscMiKm39zxXL83ljosGpUpoZsVSTXRzLoh5622f0pt52g+SkBiJX3VtId0PEMpP0gs9UfE4sxeTIWNeyPT0q/BDqfb6/9r0WiNPaceTr3+1DTQ5hoyCUbudKdL6OqA5A/8gT+Ab/qmb8JXfMVX4H/5X/4XfNu3fRuWZcFv+S2/BV/wBV+A3/W7fhc+/vGP44u+6IvwwQ9+EN/yLd+Cj3zkI2c7tD+dyL513UxK7ohrmWnpKmevlp1t0wxJaZw03ZGakeq4XhfwRqMitBrlOdmnoVlQwvnMEFUTAHiXgIPLZjCNdZaizo6oAASoIGTNQESaYB2zdOokNSUsxC85qdNYWEPAuzmcLzm112u6AztpRzQie2KSWjvninmWzyEp/cHjcdFyxwgH9lyu5idg8falOQ89dwco4QCjPd5GFKNrPU3AKW/4EiC0c6avKeFmIIC9cV6yGRPJ+5vf5T2prxO8TpLtJRchRrD2iWXWWY4FIyUDUKRrc5qRWbIyqfd8RVrTLTTl+L38vnPo2uYo1ph4vPC5Pa2WBiLJudxH0OIHhJC+Z/5NkZmA2reaCRYPBVzIO1BqWLpOpqS07oWIEsyjZ4qX6pBjQbw/MyFK13XtEweB1lohc41YfzkI4fOk5zsHZGm2q+DvWJBCQIqcRcAnVv/GqIf9lRqSIvCiyIxMEEbakaB8L97vgCLg86nPuZ9J9f1TX/Py9c8QfEjiY3NfaN/tec1M83gDNlspkufLgqi7D8mYrg5I/uE//If4Lb/lt+Cf/tN/ii/5ki/Bv/Kv/Cv4m3/zb+JLvuRLAAD/+X/+n8N7j49+9KN4/fo1vvEbvxF/6k/9qbOeRQvEHrMqq7yUZu+lSzdiIu/ahUSzOyeVtmVWoG2CbajIVnMyQxrDmPeHIp3SuiAyky0ORACU6FlkjpWc1Otf2nxr5vVtfhEORshfhP6Nsq4Duo+IJA5G+G8g+Y4QcSmglLxyPwOgtZdutVrbMUsS/r2SuZ4mZMa0ZvFuA0p42RltCN1vXb8W8TpHPjiJWPklz5NQQUOZN4vty9LMP0UjQk2ywMhe0lT+8lQvgtbIAZ3fNzp3TdKFQudpxvaW15hkaV7HKWTtSAgRS5MhPRco9VJ50b8aGBHX6HrLIMu/9TtrQER7do962hD6/pqGSoIY9a/oY2C77nEtMX8Mzb01JpNghDRNvXNpD8tml5Hmc+SfYgvci3ae1sQYyx5UzeKUecbK7yXp/N6zVNhLs2vJKGqcpHOAyJ3udCldHZB8z/d8T/f6W2+9hT/5J/8k/uSf/JMXP8uLxZNPivp7qxmxFs5UjoeS1Gfx2CwmNPeqzB5vj0vMEJfatvbJdYMDdKaVM02jjYiDEc10q6afkNxOLuNdUouDb6L6M+nsGpj5lQAhEpCQVkRLdEjg43XWbpC/CNd+0LUeEOEMgXOugBLvHQK20jW65ryrZlqHmhH47cfqI/Lq4PH241L8RnhEGdKISIaHtGEacYkoMSD1dzVbo+ut8/42chzXjIzMtuaInIpriDELeIzOWccWzfKhNYRwrju3MTELyFpIPh/qXHrQxoLURBogRJadChLR+RbykmRAJRDpaUa0oCDvN/jQtE+A0t87GSxLWtw+o22DpilZM1PsHYAcTMJ7bIJ4HJX6VbMXAihHxiDncyRp1jQi2vdt2jkIy9z0ge+bR86c5+ADgApA2ut2/WnbjnmuZnVHSH0aXMQamXY5OhxRfTxa8KMLTNIz2r1I+o5IIFjaBb0fSeDn2R7cjNGg9+ElQRNm/JZG91skx8+ekN53cHKnc+nZfUiemySjRRLpkdaEOzbyulqnyDBVTm+XdJj3mwlLjOrq2wXPBxRwsifSUK9sddJrf1vlUmFi4upmuTiHsI4XQs708EWfpFA9QFJNtrZAhAAHBSTgWhEZSUtLfriyTYeTc64yq94VPxKuAeFABEjBBMg0iwDJO4+LarJFWYh5aEtNIjjTp+VbiQSQEoykMtv3P4nr/O+5ZAkEuJZFu8f6bTE48ljSiPmnc2uM8MsWwD9Arh3jjyPfwwIrbZlhtblt+nndpltnVDUgQuX5X3l8Kc0yuPJbA2MTOK2ua0fE0sy2vHdVml6Wx5RML1FsBAtHQ1BDoPNIfymUeV77iKx1+hp2/FxrPmsqx+8FbC1hW8aut/fNSm6n3NfUz94jgxDHhHBbEGkZO/A9UO5J/DeVSfV3u2NDHJik9953/wwtcCrw3JQbPHs2sp5cN+TxLVEM70PY3xvtq5ekmwckgD1BpNNePV9DDm7rqmWuRb26CvPo0UhiaSE4ZFDywCS2tDFyCUkIaQHuqYA1MKJtYjISSyPdEeCkeRde78Siz+2hyU+Em2nJvCKtGdZWCyLD+s5QySfC2gwATsRGLtoR70qUMwk82n++Nd3K9ul7JeRSKkf9FeLWtGAr7e4n+RuBEWsuaeAigY/+e/XoXDOuEBPDQ+CnibvfAeiNCaMwcdzbFo2pusRhvTd0LSDC77PAyB4gsofRmO2vkSakZ9YzMn/jTZiJUjZj/ta7j4BJIhp36de6Vmm9pPb7kcCGtMBkqmo3hPqGhBFavZt7OmNRi+g1U8+sBmQPAOFUvkWR0gGln/O8Jo2Jd26jrfLOYV237d741oh9CeC/UX7PkNaX1wDJPaCprbkSYJzj0nFJkIsbcCG50xtKNw1IgsF8ShDSONZ5v8mqTHRSNmgNuMxu1hKIyHoojCXVd1xFWF6fIkt5h6RFCSg27dyR0sr5oVG7iell5OJzBEviNVhspI00D3FpaUPW0GpGyJREmmCR0zpdo3NSK/BkmGwRLaQN8clxnLQgHkmKwbUiNdu9Y7kLWs3HIZtsAcDbj4ditsWzsXNTLUDfqKzs2txMo/7Gpp9OrF+kIztQx7fsn/IsAUCa7xr6jHoax1vTLVlG1r3HXIc2XwIdRWOngBK6rxf1aPacpDOxk0p7wUe51hkr6Xq9zwIiFiDdk4zw4HWhDyfNpwAYAxCuRaRzPBysd6yuUjeaujWynMuliQ6/JrsksMXzxLQge8YGCV7WEPG5Yyi+cbSmAW1kPkCaF/cYVVYO27nb+G/Mahz99jzX8PZ8pbR+kc1f2Vit52rhTRRCZSxQ4BerHfxZqdoWmNA1DhZHRO0J8TJBhEVcqytJNR9joHWGuqZbhnnWzO83me5hf99MumlAIolvpFp0IE69zZPfZxFlsO1dn3mexeiFmP578PX34tLC6R2qLbNPDn+Lm1fZS+fJEdEzRrRVc0vNSNWGlOsNMNGZ7MpY20x2ebeB1J+o2PzKvlc2bqn94BoRNb6++Cc3xlmJciO1I+Yp95OM8iLfXfbTiAnVfmsOrvRbMqKy/Mx77vUXmSFrPo1MH68JMi4lyXgEgymgww1zFTijNQ9GNCAix4UmVNES6pl/GRjRpOtUPWkhtXMciPDEsU39nQ9K65HPa2YS8iALWyLITG/KrHVzbcyUav4gIdZcSiSAAWqfSuEZXdNIswwYCRSITF+bjibkHK0VRRAEbFBu72f1xlBM5Kr2JGRgkuZ8racX8YyIa0ZGJHmENJ7s8nvXGFmXpYFRg1VckOAUGGtEbgl83Ol26PMKkPCETlZ0oJQwLy8mQvvRY+CAuvA+Hnyud+lOzBEIkiFRV08SIg8sQI2slJnnvGkeUQGIdKLcS1MgZqdWhC/qJGXi0n0ySZA+IgREpOnV06mG8eXmWcAc8w3oDO+i+AfoWrUKRICkWeMaEtK6EeNQQItLYORhsSP30Petx7kvGbNCfZXeqe0v0iDJvpL9omWpt/pK6xPZlxojOsP0DLUkgiHiTEQK4137h1PNeO2K9JpH2ZHmXPJZIY7Nfmakn6oUU+lbeWpGG8Lv00BI8zu284PXo4H5WWmn/MY0FgBs1ttmDgkgskcbsgUp9X4eCj1dc2YI9KbfFL+2owt4gCtaWsABXhe0WH1Ep2Y1AvJ7cW0I/14EUniSSEl2pnVFaCAA+ihogKUR0c5zrbJFRdMPJGARK8NtR4qjed2+F9baPgohLLUnM7SZWyFuzo1o5lkzwHANrbUEtaOnLan3tr9He/w1wMcdoNzpGnTTgGQNEdI4xMoy22o9trkTdH+SmnDRAhecWdMYYOs+aYdPjANdp0hb3JckVR+LuRYwt1AuyvrX03jMqhalPTSd42YOmslRC0gqEAGgMtfcPAuA6bhNx9rf0hcaoykYLGkiQf8asOGr+R+XYMpM7DOO6xajQ/2VzmHTZ9Y/3icjMNLbSOR4bucQzGvVZKAfaasHRABdWlt9vND0mU7Kjs4kvZY5l++0y/qOkmEA6rejnDIazWhD+K17gAj9tYAI/47aONDOWd/foo30XJkTUhsiE8HS9yBmkwMTyt3ENSWL12LkJaKWp/UolvUoucn5rDVF0ZbIdVJGMut9vxHJb0d9y/cwOedmGL+RdYAkyyxrc00BIz1zOrqneWem8W39uMQ4V/pY03QcAwEr0njl5wcl0awyr3sCBK2r9wYOAM7T/kpQOtK+EI2CHMwCq9l94VYphvTvpZ95pz7dNCAZMf0a1cmUQEmPaSJbeOs5vKy1eVukSRWpDh4G2GfsdFojsGQg4VEcJ2coaIxdZ2GaWvg6dtZcykXgwwIiXMoPtH4impQf0JkseUw0wyz1tCLcJKv6jqRxQf4jDUhx2wRrfBMvfaaq2fV+LCZaUe+zeq7vG7KHAbX6TZsvezQjszRyBrVMElZmzx9Y2Ooa9YZ+a23Ov4Niyjds79y84W1oz10fiNDfGSCijwc7UqDULPfM7xYDjGzNuHQwQgKVUk+jIQGbcwmI0NDpaim8SxrSNdRoWRmIBBeTf56IdmVpLVMfbJ+1fbbOIJJARgoPat39gSXNsqyodqW8oRnpaUXS7/o8rzD7Pd+eQqWfqlaEa6GaogKMUB/zOb54FM3nGtv5HRgYrtEhqRlz+98eZ25JvTVvlLSQ1zELAHpgpK9RuRxg7Gnnne5k0U0DkoeDx+Hgd0zYdpFfPG2kFZho4EMu7ro2ZZvbYdQGs4wnZj79TrcEUJhDvnibuSuusDhoVWgSQYuRBlpAIjUdbeLCrX/IDBCZlRxqoLUHQFoHdt+ce3UQUbWyWRawNUXRnt30Z0cSaAERXevROvtT3bP5RuR5yWTOgI4KrBOKngUhPdMRYG5z1aT2R9KmZAkq1U+MMUlXi2lX4cgYuvBbhswiC5RoUdN65j/aPOplVz9XG9KeC5vrktawsm9audeeoMYC6FzC3tOKcPBBc+zBexOIeJdyCHlXgQmnGMk8LzHESdsSSsql4IA11oAJPGwvgRECInxt6/WbRXz+fu5pbeYxaWBH1AiyDMGadgy0Wit5DtCBCJXZBhPQv92GKiIAz0zvvWsCBVBX9vyiAIg5LjRxoW2bfIdZmtWKnANArHKzz9TKzeQM2Usz99b99OzHvBjdM7W/mXTTgKRHs4g9lQnq4n8oE6y/0NNzLHOx0fNJqkVtXkNk+332H2mszJikfYemZLpNxsQZRfqR0iwe4Uf6f2jMtAVGzgEhPSaJ/tIxAQ3KC8OvczCyASuLZ5JCW5qo0ajvyvEAjKS6QvNb9o0GNrR+mdGQvCT1gMjonTiAklGGwkp+C3RGYwIqE8OjeQHnO8DLby3bboOVtp5ZR3ULjO4FIrKtBDppzeTrn0Vcor7RjFhz1c0zcrzYDHCk4CAx1mfInCGaX480oaT5We7Zsf7L+Uw5leja4wFToGQPWRHOzqGXWiL4+NfWgMUroYtT4hJ2goBQFTQQzWortu16cxhMTUO3B4z0BE5v0nve6fObbhqQzJlmzRGX6nJ/gRFjSxOZGO5DlrJbDqOSKdCAzdMplGcXSZ2QMnLqbQyXriV9G/f2nOZIq/l9aCBEY6ZmgcgMMLHMsjgI0XxDeJZ1SxsyowmR7QxRB3DUj1q/bMMb6w7sRCfxm7fRAiWjvpvWeohyMxoWwAYhs8y3RVK4wIEn+SGQBpLMPbxzWIrknJtxddadgSkjf0cNiFzDSd3uKx18zDLUcs0js1c6f46pUJk7TMJe2hKB5Bvgyu8UaTAWhtO7BKSddwWUhDjHLIeYZPQE/IuDe6yBOGp+JOZvkstb4bNnicJ0ryHipz731ETXOpRxqoMSTfNB6xUdb7TABhg5N49OiNITM58PlEDS0JKcQaPw1VL4kL5nHWNHxLJ/UtuJePCLS2h2jRuRFVWvnBuYb/IyWh3WWL0W8HgThFkjuof9fTPppgEJp2tLd+WCvrmeHfJIsiyBBY+GwjUf1uJAWhKNVkQ8Lh4IDivixjaeK0k0Z71zHPE2bTAid1iMkgQWEojwcrK8Vl9tx34wop0j0EnH9O/xsGwAKY+YZdnBz+Q9SMdo+k3TJln9oo0h69yspk7TJI40glaZa5K2ocpxM2uOBgCrYNoI9CcKCfSvye+kMMCIGzMunhDPctgFxpvPLBg5xzfkGkBEGxPaOrd4XbhCpM0LCUb2EGk2iHjktBjRCMI9dLOtvRTYXJVaSwDqWJyZH3JN5OaWOHDtU9VGjYQGdLz5zcCIZaIFbMf06DUIfJR3iv3w2ntoj9+DOvclWtpoTdBoPoGX0/rMkBRgAVswot53BhjZQ6OxfW7Ezzvd6aYByeI9nN86hY+Y1BlJRg/gVOfKCkr4fVySyLUlmjZE/ubAhOpavMMTi71rtQfQbVB7C3svNObm/ICZkRtzu2nbZlnAGIjM/Ab6GzbXirQgpA3dy7UiZJa1+BrRh4CIpq2Sfcil4SHaQETrF6kl0vpS9v/MhqP1kcVIWRomAJt+rGVsOxD+nDJvOz4hMyA3XQ/DdyftF4DmW/NxEKLLviDE6KZ5JzUm3s05xs5EUNPM9Kx3782X9p7Q/Nb6VJ7Xro1IM91a2BooQy3zc4GBixr+Oj+fljuSdrsa4jyE7OweaE5GhJx4NMLBIcK5XJPyOiEm8BIBHNfQaD/o73trwGmNeO+0IkTgvVNAiBFPa2h84Oi9z1mb6lzw+MBbhzImqSyNV00bwn9L7Z+mGZn1F9H6qnyj0CYeTeSYqoTqiFjXbOZIz6LxQGMw1nnAzzfv5tKzkptPK2jraXqJ+PpSIlaWCAbW88zqVJJshLWOahHCtOuynnQtn1PXyfZ+C4y8BBC5050upZsGJI+LR8iLr8bcAq1dswVGNMmfRpLxp9C8AIqqWJM4y+dIACXLcOLRuOgd1LZ1FgvzfYx7eu2X7eLnWwZqmy2d/6Y6NCZpBCz3AhEAjUM6/da0IhKIyBwimtO6pBArWKX2EgPai5o1Yrhl/1l/NWmtBMozGg9L+gpswYhVj/Xc3hyQ70THMhIbHVv9Yr3L4h2eTpXho2/OBQDp+0vmuY3cAwy0JDtNs2a1jbJ/0t+gnrfOyd8jjZpljpXq0IMYrCElHSy/VXCCRvNEiQULA0eMq6v5RQicBOewxpDNuPSoW0QM6uT5WMHHMSTzjffWUP6e1ohjCCVp65FpMdpx2Pa/JA7Oe+PyA289NL9H5YHtnnAOEAHG5lnUDO/6oIQnKUz31ahWWlTI3pDzhHNc+yyKygXYa4i6znBQAnSBiaRe//BH8+HP28S/2R6/FF73HjOtHhCZBSazGvEZbfqbSDFExCuAtL3PvFOfbhuQHDzCoSYn5BOBJ0gEtkm7rAWjyyixhFvnhOeTYGQGRFAZnujRKn8OwLCkdpKsjOj8WJNWa4CkZ2qzV5Izw0hzJlqeJwmk/Ndmkq5ghIgea+WY4Exns7EwZ1h6X/o30i7RNf531DcSlFh9Zt0vj6WNulW+RyMwQmXo73a86P2hCSSIZCJHAqhAwBpqW0g71iQpLUwLCx3K8qBoGaB52zgQoTKaZHP0TnvBiDWvRkDEWhtPyvk6prbP5VqSylyicT5Y+LlMlbFFA1A4OEEu8xAdjkjRso6IeIgu85qxgJPN+8VYEiMSGDmGkP/Gksm9AJfQzslR/7f9wzXbuuaQhCFy/nCfEk7aOib/Wkz0nuhSksrrse9bx7sr1/j36/lbAfQt9HUgVcGAQ3DNfTP76B6ytBiWxYDsYw7cmvsn1l25FvbAiHr/AIzs3Ve19t0C0LjTbdNNA5IvfOcB4eEBjwePp1PAT793NCWlfFE4KMxUogDKPSLNpujexVcJjSRNEk1Eplt7wIhs97mLykjS1itnndfs0meYxBFjPcOkjqQ1msRwpBlZMiPa04zwLMCcuDlCCz4qs0mMzTG00lViuFuGpy/1H/WZZSY4u6H0wlz3mKD0dzs59oxbbbzwfqFjaTbDNSXaM+WYeH0KLGpayPMrjQdyNl68w3qIqrSZhxTV7IJ0TUn9PTLLskwYZ+ed1Z9a31jn6Lw6bhofHFoPfPOsNcQSwtw39vuugBKTycohz3sO197xcMHphofFF0fqnhaLhyVfM2B6fQpZa5I0I8e1akbefVobJ3QNFMt+A+ScawHG42Ep0fu+4O2HEiwj3Q989unUPMOah+W4E0FrxjxrOmkegcSVGPiII2uTd+n7Ls7hpGRX71FpW5EXRAAs2ESMWODY/GPjerL9qVoKiKBc7GhNeD9qz+v5pMysgz0gouUZsXzMesc96glsZ0HJJaD3xSi8vIZktz3gz0C6aUDy4B3iQU/WNaKZe7hWYisRtJniEYg4B1xcIuHQmLNZpsQyRdAWO+vvXPjReTCkUe87auZu3Da7d2/RluxYZKV2JGw2lnlndTrm57Xjc8mKimQBmT440R1vNerNkREY0aXU276dpVbSnYQb3Kl4DVyjAtU5VtIoGt0seNfAyLWAyGw/yW+lMSjpeHtfWwbwCzFRbmqDXhWw1yS8i8AJEYeFtDGB5cNI5Y+IBaSQ5RBJ5Y8hFil9yS8SajQtLWGh5cMlSdsLLI1uEnj4yuT6uLm/95doFEHrHDAiQaNn5qgNA04HYhycEzKbm2p5V81gqT1JI9f6TO51ptf8m4BUrxUOmfeFeq9ox+x7a0CEP2/GV+Qa+8SIv5kFJXe60zl004Dk7YcFeFiwOIfFr3g6HepGYTgcJqldXcx52Fe6rhGBE7mxaBuDZLLot5RczywUmqRCu2YBB+u9WunmPjDQY2wuBSG9PtmjGaG/muM1/0fl6B+ZavFwvpy0nAk8vGvVhiTzj/ROKA6xNB7bpJBbiavF7MxuLjPJPEcmVzMmWVIjYoEXmjPSRKgXHtsCItR/ADbJNi1Aon3rxbuyFtQ2hrIePGErlKB/x2BrR0p9HaaB/+3NFQ2E9OaUlZx1zzgaMSWcFrEGFZNEnzSPpb1lp0l9GwaArpd1WssuroXg7mkF6Nsc11CYwWNuv/QXoWMLFKf2btdXybwtzTiqQRXoHY/lW7f1yHqbv0pAkz1gRAMiWwdrcQ8b8xROl7eB1r0mczvXnnTaQ+fJ38gvtX8ApHxcSADnwXMtgmijERVy9K7l/NqX9C+Gdo+DNf4+o2W7B0KacxMaEW09UJ/ZmefSxNXaP/asFXe604huGpC887AgMDDx9Jg2kEemNZGmHBrJTdW0ybLuH0jYexI0Tucw6zMhPTXaAwZmz8/4hpzLYFvUAyncNK9lSKt2pAl64GpYXzo90o7IXBMERkKsvgJHYWo0m5MFOL9/emCiZ8YltUiz9c/Q7JiUjLgEHVswp2tNZN1Ssr8BpWLNWHzSlqx+G51ttg80kAXoc6UeXw5CLgH+s2Xk2ibbdgqxmHbVciGb+xjPnHDM1dqhmS1JKbfm68Pz/zytrTklN9GipIXat9ibDFcThqwxAkqfmIBEmGela/q78vKcRmCkBwqbe1i7eXMJdHrHNSSVgeUaCtk+0wdmycFk6HfRHLjmnfzSfuO9GdCtsiUkuAA81PeaBmXWlGw2xxf/a50bmWhysgQ40v+WztP6qa2pt0IhRrgdY+Faz7xTn24akCTJS3JADUsN1UoMxbtPawNOZpjitOGseDwsTRkNWPDJqP3WnjVLMxqUnj35Oc/eC0iAvoTbatc5bRuR3ODtay0YkYypRlwNrzlghthqRuh3iabFGJmalZmYnzY7s9VfGtOjmVuNJKr2Odvc6rk2GmtMWNqiXmjo3vi35un2ueN1gEsOe8BOa8dsvpAZINKLcieP956TpL2btj617+Gb608MmMjxN1o3LMd73q6Ro7c81p6pA14d/FqaqBH12qOZp2n3npPnY8QU97QisyHk6XppH4tkVcEHXSOEUrUK2ntxwRDdwnNtEUCpKgh2zdWwwaqvyMT7EC0KoGn8SRqtFpqyI5+KEQg8F4xce8/lmmK5BvBzd8b7TufSTQOSh8VjObSM1HElk47MeL53yr9bkw4+mV7nzVL6i1C9i/fqRJRSeE495hKwbfct6cO2nA5Ges+U5S0aRd4Z1XkJg0Q0YoJHDLZlqsXvkc/QNo7i/Eh27/IdmOkHgBImVJp+0Bikb0RZ1tvQoTazOdsfvX6pf7f+M5rGqPdc7dvtdZyWjDg/J02wtlolW5ukPcdiRukdJeNLDPTTad302xP6YEQ+Q2/jnD+IBWZG79v73Wur9k5Wf0mqfRaw+Dpun1DDbvcyuY/y7sy8Tw9sy/OyPi1UuQRG1piz2kHatVOIJbCG1k6+DtB5TciiaUZSGbUZU9STzANbhlgeS5Jzg5s3yuSyYY14YJhfhlTnJrIPwhzqAIpw6BB8FQoBSXMSYjX/gk9rMvdN6SUQlERh/UckHeUt066m7gH4m10b+Dl5rP2+BkkNyXM84zkoxvch7O8dqA3ppgEJkXdkZuPwsKTNL0W02oZSBLabbU/al6ikd2rqWEPcSA2eg3p1jwDIbJvOASFamVkwci3aK70faUOIQhw7ImqhfZs8I0GXqGpJ/CRDbPXr7PuqzAzTglhAxNKwyLZoDCvVcQp9MG2dt5i9c6XRs8Tfx/LxsjZea27agGQ+RK/1/nv71KKZa721UtM8rc23r/lJZteSCkjXzbnR+8v21jG+/Y7au2rP499Aa0uvPVpfyXsk4Om9T4+kI7blsM2va3Vszqnl7L3SnBssBPMaUIJCLG4cdZJrTvjxGmMTdr11eq9RwDTaA0ZkuTK+Y/tcejagm3bx76P19YwmqjcH9oIR6Xcqc+Zoax6B65F/yZ3udA7dPCDxLklRHgC8lc2z/oXHQwmnCLRMANkF02/5d1U22crABfa7FUlZicNmaC/D1WNqetITva7rABF57tx6zyXOZGumWNaiqZ2niC0hosmZoGXTpVcih1gK6fp0ChsTEG6HzscglwzTOaud2gagg49tpmc6p+USsfpJMgDy/bW/RDzMtX5fP9mj1Tc9BnB0fts/Tu1b+cxUrs/Yau84c6zN1T33W7+tc3tIAx8W2KRQyqQp4X0mQXH7jL6pmhwHvT7oCaDkNR5kRHvn0V/t+dozyWyY5h3N/e07hzJfKSiArFNjVC2/BisS3IgZBtDVisyOQd4HdQ7RN2jz+/B1tmhJfD8JbfEL4ZqTFcwpPrc35rJZS2K1d2auqOBajSqm1BUqYNJolGXdWi/m14dR4Js2Z44maOCgRLumPfdNpHtixDeTbh6QACjZW1Ose+R47ikDc9oMUq6BwgQKqZ/GfJAZlzxfiTbR/SBkBrhYm/4eCeseoGOVOefevYu8Vee5khebYa87h/YNki1yMingNs3WO4QINfu6DBXKI/RIMKyZgWg06g/JdPfASA+I8HwbkjaOoR0zEWLAbFCyDTahMaS9+i0aaUVntGRyA9bGuCX1ts715ig/t2c+zYCSGZLAQ7turUn8Hvk+sq9Xw5tdfnttfuwBBFrb5PGTcc8MMzjzbF6OM3H8XbgmKFH1v5Fz01oDzIR+ff6zubfcMxlOVnt37gRtlQHS+9BaC1+FP4tauoIRyzE/XXflmB5b5q8S+WpTzyTI1QDXBrSzZ2naE+0dRu2wwMjMO/SAiPY+dA/tl729R66Td7rTJXTTgOT1KWAhpnCziCTTrXcelxJ5a/HV4Z02OppMGihJ9bjG/llKVevz7LwW8ncv67n2u54f253PgJHeRnotYPKSUhLevyOwt2fRXGMsScDkeQAbIEL9T5G0eIQewE6GCMwxVtair0v9/Wasvjr4bTkjmZoFSigxGY9cs7hobvgzYSetfrB8Cvg7z5ClIaG/1ry1wIhsf++cfJ9RudEc2jufX4Is5nSPWYfGcGlApAcQejRak632WO3TrlkCLHkP155yJp7G4uOBxl2ejEzjVOpXfBosx3BZxrx25r5khfKW37+ZT+TMHlLkrXMc9QE0YGNxDmEQGIDT7FiyAIpcj1SwMuk3MAIi/HhPQAsNjMwD6QpK6D6pJek5ut/p5eif/bN/hm/5lm/BX/7Lfxnee3z0ox/Ff/Ff/Bf4wAc+YN7zr//r/zr+xt/4G8253/N7fg/+9J/+08/dXJNuH5Aw9XdgCxOAlGjqIEHHKZURG55keIhI3U5qdJIyA1IVXTclK2JRT6I6WgxH5i3874wJUO/aOYBlb5kZ2iN10fq6ZTZ1x9ZmMV1ayR2RFYFGApE1bB3XCaBoQORcMCIBs3xfCUZUB39Xc64AbXjjXpdTeE0yh+CZkyUwWRWmdDS2tDGsMQSSCdTOyXvkNe2cBLS8zpnvNZuI8NxrbwrNMCF8nI6c2fk99NeaJ+q5CcbPzHsxkOiXtmlhckMsjter8FlYgmuiPBK9zoCE1gYAePsxZW1/daiCrRKZzNecLqPxPorU1bzP6H0n9oaewEGGjZVt5n4ua4wphO9AIAJUR3cuhPRk9ZUP+Keo5mdtclr6O7tXUrt769G5NLMe7I2sJ8HIzPee1YTIsvz8jGbu/aYQAPfC6+rAYu4i+m2/7bfhx3/8x/F93/d9OB6P+Hf+nX8H3/zN34zv/u7v7t73u3/378Yf/+N/vPx+5513nq+RE3TTgGSNUUirt5uTnDzEkI0AwEgKcuguQtUJfmbTNt9PWaQs4ETU075cAibmpSrPP8ktifa5PjxEJGEsoARVYwC0QITKa6DW+jeS+lrvuQeYyXtHYITnAaBHdJ9V2tqaIiw+5SMg80lO547/0ftpjIH1e+a4N36sdu7Rdo6AxvsJRPaOL2AMSvYwahrosMqNyhCjKpOYmqAlzAEaLeS3PL95Jmsv5bR5EoCkFZjpxMeZZOzPHSd773uu8Wg54IdAQiKR30RpB50Kofr1yWcQjRzGe6SCwMlzs88c7dlz7bwBVHCnq9AP/dAP4Xu/93vxP//P/zO+5mu+BgDwX/1X/xV+7a/9tfhP/pP/BF/2ZV9m3vvOO+/gQx/60Es1dUg3DUg+ezwBx2qTLMMXFo2J51mDF6wh4vHgSyhguajzzWErac+bh2CKpaMwaU1IYq1JFGaoF4pS1nVudnT72e8vCJlhks4po4FObkrAiZu8a2EY6Z/MXcC1IlYiP16X1mYLiPS0I5Q3Q9OMPC5+E3aTgxArSRknrhkBKLxmMpt4WCpoo36kdxiZbvF+sMpKIYEFTGV5eXxp8Anehtlj7feec3voHHBxaZ3We8xqH2Q92l8TpJBQSlyn3yNnUiNH49S9AOBY35ThL9b6DTCh94qx0aScmnJro/EEULT0RLNjeRTc5Nw5YQUGGBFfaxckIVCIpObYJjpMAhADEEY2BgiYZGFRiHWtkhptaofcP2fokiA2GvXWir3rzjmk7zP9cNnUtpHP0JtIMcYXD8P7XM/75Cc/iS/8wi8sYAQAvuEbvgHee/ytv/W38Ot//a837/2u7/ou/IW/8BfwoQ99CN/0Td+E/+A/+A/eVy3JTQOSNaS8I73ER4AuOV1DVLUccpJrzKwlFZT11esBPGpFratmhNbfb5ujwmrrTGjQay8Ye+rTFrT3E8jQ8y0tRE+KJjez1rxom9TPApO995fjbOZduamgBpC98Bfh90swoj0yxK2T5uIzE0EaEx+BkBOdMW0k37zTO3lYDs4jmu8Pm3Hby2Dvs9vurykzz3uTaC/AGQERS0Aw0449/dQDE9eMeBNDbECJJDlentbQaHGkmSMH3LR/kLad1zMC+lY7JFn1zDLc54KS2q5WEw0wbXWoJnEhbtcl/ljqUz1UsfZcHYz0+omTBCUWv2Ddf8la0F9vdGbImj8jMDIirR/uZNNnPvOZ5verV6/w6tWrs+v79Kc/jZ/9s392c+5wOOCLvuiL8OlPf9q877f+1t+Kr/iKr8CXfdmX4Qd/8Afxh/7QH8KnPvUp/MW/+BfPbsuldNOA5BQC3juNmZpzJsiI6dAmt7URk7aEjrf1rk1Z7fmWpHA2rO9IWrtXkkn3WPXM3m89c1TPuVLgDfDofEN5nkgGRliD7rhuaUZ6oJG/Ww+M8IScUoOn/mb+IlwzoplsSbMTIg8uhXbiQgtKSGsyGleXmnJZAASYz63Se1YPiGhrwywwkXTO++65NnP9HLpkbZ0RUkjhDwe55S+qDxhnRokt2ws+LNMsTnyOSDDCw9WuIZagK4H5Ozrv4LLWUs7ZAlyaSI8hM+5V6LCn72fnn1wbrzlm1DYQ3xtc1r7WZIkhRjx4j7BGBBczeGtvp29FVR8zM35coxoBUa7DM1nNrTVMAjnLjHt2b+0JQXr3b7+h74KSHkkgYq2v0jLkTvP04Q9/uPn9bd/2bfj2b//2Tbk//If/MP7j//g/7tb1Qz/0Q2e345u/+ZvL8S/9pb8UP+fn/Bx8/dd/PX7kR34Ev+AX/IKz672EbhqQPJ0CYmb6ekyHZOKtcrw8kCUtoT1vAQ7JiCzeAcwkjG+k2vN6i5bGxM74ivQYKOudL6FzN6+ZzfWSjTH17/YcsHW47H0j2fcVfHBH9jbkL53jdcx8j9l3rmOrMjYb7Uj2GfFM+1EAi5EZ2QyxSQUzAFmZWWRYqw9OcVZFG/q118d7yJrvGgixNKTy+mh+PCcgmSWLSbTGynOAkBnqvete4YelpVYpr9c8aR4BBgImEqDMAJAZct6pfivrKSCGiKfXJ8QQcXhc4ILDITuy8/lR7lP2kzRu1/y3lpVM5B7/AapHC/HaAyvXIgKSyWSrnvflFUJeS+xEh9KPNEScBUas8TgS3tFvCU6s+2foEo1TahN9z7mx0AMi8vf7taZck2JI/176mQDwYz/2Y/jgBz9Yzlvakd//+38/fufv/J3dOn/+z//5+NCHPoR//I//cXP+dDrhn/2zf7bLP+Trvu7rAAB//+///TsgOYd++r0Vr5YkmZbmGZa0wQInfPEtzOkgFKEFZmaAhzw/I43SwMiexW6WEexpEHplr0HXqE/rSyuuOg9dOKpT28A4GOHfRwMfe8DIpcSZnMZEix1LR9IeGKHzXAKdipM5Rc0B4J0r0X40pkbOs3PeTfs9C0b435nnzwB5C4icIwTYOwf2AJS95a4hhZfPsxi8mXunSfgyETCR5lUxRDVqk0YSbFA9HIjQHDmuoSRfWzPX770DvINzCYw8LDoYsZhRKTip5/tWAjPjjIBOflO1nGTAZ8kKAV3ep/ijuRLdkHxIqk+J4fgu/PqOgYOT9Pdp3QqEemBkND5lP2j9cqmPycgMrreutuXmTa+oHuu35LGs+z8fAMtz0gc/+MEGkFj0JV/yJfiSL/mSYbmPfOQj+Mmf/En8wA/8AL76q78aAPBX/+pfRQihgIwZ+jt/5+8AAH7Oz/k50/dcm24akPxvn32Nt91jyROiOWFttRdBLUN/RyprLq2SZDGbvLzMvgvYkhWNRskORwzvNZidXv3n0gxjpH0bDVzY7WNxJTt1WM+nv/W45hPoaUakhsRq36xGZBs5q9WO8PFG4GLx/LjVlgBjMFKeT1mQs/SZiECId0i5Wzw2viRA7V8u5dXAwV7w3AMje8e8BJZ0TtOQPIdmpCeVHr3LJeBkpj5L0DIq06tfY+h6bZYMLX+m+ZeydhPDq7Sxx7pxEEPHNGfI7Kq8dwCenk44HdcCSLx3eHh1wLJ4HB4XvP2wiDmsr1tarodR/2j1WKTPlyh+t2HsZ4Rnsm4ZAlinUIFIRpTeuQIyLK0twIFJqxGh9mpAZFY4ZM1HDZg8JyixNFbnAkVJo7VDe5dbBR8hxPch7O/zPO+rvuqr8Kt/9a/G7/7dvxt/+k//aRyPR3zsYx/Db/7Nv7lE2PpH/+gf4eu//uvx3/w3/w2+9mu/Fj/yIz+C7/7u78av/bW/Fl/8xV+MH/zBH8S3fuu34l/71/41/LJf9suepZ0zdNOA5OkU8Bgis7XlOjipxu5LG+hvb9HtbQqzUlEJPkZJpbRrzy1dn6E9EuYezW5u17g3tbUGGOBgxFrUbcn31nmdH5+jCbmkLzSq47qe49VfIxmZz1oSAh7nSsmvTZf24yiyzUgg0CszS3vBx7XBiPXMS8DXiImzntmrQ3uGnNvlr9DykdaEjnskgYicP/ydYkzaER5Zx7nkN5L8ufQEuxpJLe7s/BmVkeue1W+8vj3fTStjAaxCWXBSo/X134EDEfqtBQnYA0ZaoeWWj3gOJtzaT0egxDp3zrN75/YGBbnTy9J3fdd34WMf+xi+/uu/viRG/C//y/+yXD8ej/jUpz6Fz33ucwCAx8dHfP/3fz/+xJ/4E/jsZz+LD3/4w/joRz+KP/pH/+j79QoAbhyQ/NS7R5wORyw+hfV9+3Fhk7P6b0iSG9o7jzUUMC1W7z6tzWI2O+G1xU5qa2gj6tVpAaKZcvxZ/Pkzbd/L4M+0UbvvEtI2SiC955ztbWvPoUlatfb2QjBLMy2tzJ5xJGlWIinLaZG1enRptmeAMWrMuZ2oZaw8krPueBPeS6NvKsvyv1pSRm1eW4zNNcDWCGxY2gSrry5lIKRktjdPrN+9+zUGmM7LY0tQRMfad+LXFriiMQGq1sQiPh/4HNPasYaWIXbOYVkcloPH228/4PHgyz95L+8H7b3o+swaNysYIJIaRr5HcY0Jd6zfu79I0jQI/J0X73AkLapz0ML+ylDsdMz7cAREen4WWiTMS0HJrPaPiProJPpbzslrtWUP+Dhn/38TiMwpX/qZz0Vf9EVf1E2C+PN+3s9rhCMf/vCHIbO0vwl004Dk3acA/7SWxZ3+lo3H2OS0ibx4h0d27xpaZ2TtXn6/pFnm09KyzEg9tI2rV79276jsSAq5t93PuWBZ39r+/nWz6be5HQfXYFKb5J07NRWadHVGWq5RiP3M7D2SZlv0XOl8ShJhwGay+Hx9SdLAiHZt9H3lPc05BcD1vvm54GMGmIyetQdEWDS7blnS91EbZ++bpsmQ8RKIaGu3fHfyL/FLAiFvM0d2Kq+tE+esrbKOmXLn9V2rQegxxtq1EbjS9u5RBnptHR7l5drn/F99EGfpEq2C9q1ntCUz9Wo0Y441KxS7053OpZsGJBI8PJ22C4zUSgBQpVNyYpO2hdc5u9HSc2fKWHWey5hpjN8Mk37Oc85po7zvGpLbPQytDlDWqe8lmVJt0xvdpz6jo5U4h8mkZ43K18z0KHEzeV6Ens020NrElgz3grw4bzEh2jfcM8akKQgwN4d4mVnNSK8OjWYB5zlAZIZp2MsISdK+x14QuReEzLbNOtcDVrw9VhusftXK8lDfx2w7dHhYcHhYsBx88TH5wFuHRpjQG+cjQVqPXhbUt3su16DMEr2r5mcyOz7kPObndAHRZSGWrHHDz1v5OfYIC4ZriyHQnHnOrE/IHWTc6aXopgHJca0gZPEO//y9UwM8Fu+KE3kNr7hVl6cNJTQbLN37uacUwaS3uEkaMoLGwnFpndeQmM/SJRumlH6d+3xe10x5CdZm67C+fY+B1UDuiM7pD6kpseoJEnx43YyLJyLrmWapTsEl5C9A0beSo3urIWnNQ6rZlhwXpx0ML5Xv5QLQzo+lqPOar3MBeu/cJcej5/TIkmzvBSU9oG7ddw4jpdXR+5YzwEjrV40BJGHYGlOuEecdPpDNs95+XPBOFm7x++TYtkDJHtLKj/pXE+hIhrpXRgea28iX6XwLVnpAV2vrnnefBSF79lxLSyL7QPYhsA0DfC4okeZbvfZK6kXGOvfcy4Lf69Hnm8nW5wvdNCD5f/yej5Twad/0X38Si3f4v/2Of2nq3j/2//p76uInF93Fu+JbwrUlezb+3ma9Z8Htld3Tnp5UZXTvDBO/V7JzLbI2zp5T6F4Jbw+MzNwP9Bl9TueODf47xGQzr1FgmpAQq+kWByWX0uITI1J/7wMMe8mKbCPrnkl4uOf7AvvfYQQkRwzxnvv3Uk9IMvqG58wvTnsZNt5OLVCIxUzLOrRzVn9r+wQCEA4ePu8ZZJ719uNS7tGEUZf0FW9P77d2TQOasj1DJ3Sl3drYqee2DuPP8f7ynAQj11hreuuuvHaOtuSazP4l2pAR33GroORObx7dNCDh9NnXp93MwMHXJHJPJ7rSSmvfyRKup1PA54Sj+yzDoElZOc22e0aSx3/PSBJnwxL2HOreTxqBvdL37PvMhaBsaca3gJ+bZWYvNdVK77llKtoyGRgw07Bm8+TaEyRgIjUgHKD0whd655rM7SGm9oXoSuC78tzMyKW5lxgGAv+99x3RXqffHgCZucZpDwiw5u1eYDLz+xyy1huLEesxts8BSkZzhEuT9/THTD6bNUR87mlthFZU5vHg8YXvcAf2FDTl3brJmL4Ae/pqBoRcsk5rgh1ga1KlCfX4b60dqdw2f8r4m/fNwfZqQfp1jQFV79171A9PP3e/ts6N9vM9gOM5hIZvCoUY4SYFg9d85p369HkDSMIpYJXpuAckNxxaHP6z/9Mv3ZT92H///8ZrxUdF1qP9BvZJ/ai8VVfvORoYOXdh4YulpirubUbPITnZI1V6jrbMSNJHDOtL0hoilkUwUjEBBepCSjg2iq51bgx16UNCRIzcrKSdXwOuw2Tw3z1mbhZcztI5EvmXBCKyLjmPrHk/AiWyrmu0babds0KXmb633lsCk6QZqT4j2v3y2XvWq5mxOFOHpiXR3o+3Tcv3ZGlFNKZea9vMvE71zfl+PMfeswWlbQSunsZEUi+8/yztvfcORu70ptPnDyAJEU/HfsZaThTWl1PaPBa1/OPB49XBN+FdJfGNhy8W55j3nMO0AHr4RlnH7GLdaBg6G6ls97WYDo2svpdtksDAYq7Oeb48HjGum3F2Zv4PrS2y/0l6Wb4FgYGcBZn8RkJM/y3ebUAJNXf283mX6uM5HpZ8co0JlJS6/BY0Px48nk5Ji/N0Wss561vtlXhT31i/ZzQjWjvOHd8jxvecY6JLGBxJmlZ05p115m1uTdDmqnaf9oxzwNkYhGyTAqa/NfIe+Sku3pVgKG8/Hsp+Id9Fe35v7TwHfJyrZZHXNKCktXPGV2IGjFnXR219LpoDUXb+s5egWZ+qmXNEo/naE+Dc6U7n0ucNIIkxYjU0GBqtIeDdp+SkxqVYFtFms4bkSyI3Gslkyc2NylkJ17Tf8vlavfyZWrkZECOJM2Na2ZGz8fu5gVjP1SR5exjK0QJ8qTTdApBaW3ubhZReUhnyJQkighaV0TQlIZ4HSuh5/DnBxwSKjHftvb/si17SUOuec48l7QWyI0bgkuM9Tqoj0t5JClVGc2dWQvwcAotzpb8SeLTn3Ka/q0CqBf4A8PbjAT8rR9Ki+59OQdWsWyY3M5oGSXvG415QYIFLayzI3zOagJ5wT7v+nNJ7C0hr5fRr8lvbAMWqY+/79TLCX9pX5wDbWwAmd6f2N5M+bwDJ//gH/41d5Z9OIUthAxa/4I9/41d2yyfgUhfbJbSS+h4osDZma1Ma0TlAZHZh6kk3rXZzyRkvPwt89lBvc57V4pxLPVDyfi3IiSkKoI1Plbiy7MfcqnEP4CCSzu6WKRf3JQmRfidQoo3J6uNDDWzDbVtjjEgyz5w0Ke8lNCPB18pb5y4FItdg0GbmPLAVRsjvolGPgZXPHklk9/Tl6LfUfsi/B7/N9UPrfhpjh3reu1KeAxk6j4PHu0/t+OPr6DXGZY9Gde8BQHtpOWOf6zHZ16DROBvdY2tLpOBLz3Ul+9iq+xqA4jkB3K0BkTu92fR5A0j20k9+7ogv/sDj9OLLnRYXb2dxH22YPHkjgJKMkc7Nah8OYtOzgchW8qdRXUhbFXSPCdSYjFk1/ag9vftl/4wAykwde2hWXT1b/4iJ4vWNJNAWo7iSBiQgAQPMaUk4aZG3mnPlfV15HndwTzdEAD47u5NTeyhmL0RkwkXhuPm79cYk16Bcw3HYeg6/Zv2W52YZ8b3g45oMR4/xt7RTFvExqGkoZVl5vPe99qyBG8CAuo426+qEeWUKaw2sATjmtfPBeyxZEEBjfDTurgWctTnR+63dr63hvF+0dtpM+Xnv8YTzx/tonJ6zL43mRq/PZK4rLQSytj7MaofOAXCXAgh5P623Wj64N41ifB80JHen9iH9jAIkv+O7fqAAiMdi9+vNSf4ffuL/B6BKBesGm3InzFKPKZEbNknNtMWcgxGrPn5OSgBHbRwx7rxNlnTP2pT4++6hczfmPRLBS+lade1hwLZMedL0yevXkpDtDQMs/UnCyiJ8sdwkGpht/+p5SnrjTZNAD9s7wSzy5/BzVtmZ3721YXaeX5NmAJgEfdY462m2Zvtutr0jEGOV0wQ75bdzeJBBIZoQ1umvdwm4eBdTNDmzrVXLfsma8RKSaO07jb7nHtC5tx29erVnjOb9c5iQ9dqq989+p/jR/LHe51prhdX3I1+pO91phn5GARIAeCfHhl+8w+NhaTYgSRQ/nojATFpcKDdJmziRji0mpC4+2wzwBSyJc3o9llTPq2V6tF1EqtlMb/PZG1Zz78LYAzUaE2u9z0sukucArtl6JbPNz8tnawnNSANS6hQRt0qbcplR0wrAyMCjgJbSjlrBw0Ltc8BCPiYei4tYPQulHbhJDM0Jx46rMzH/u0e63JNm9pgAKqMdW+cuBR/XYtxHNMs8yv7ha4EUUMwASEn2ujnn59f+rQIZfq2nXU7AImk8vHN42EyOWj/NJZoHR7amhxixhjQ3aGxrEaI00GytZVqfjda6a0rBZ4RWM3XtEVL1gIBWbtSOPaCGU88XpgfGtd/bayu71mpORu3n9fBxJNt56TiwAJ42PmfzbL2fFEM8O3LkJc+8U59+RgGS/+tv++py/Af/8t9tNqLv+L5P4VWOG08b02MWgVHo0ieEcs/rUyvd0DQm7cYt7ZUXNpnbhIuziyUHI1T/42GsFdGcVUdMGq9TLq6a/b5Vl6RzNhmtj6x+mz33ftClTOV2k6vX1CzLi2uc27VM7RpJ7Yg0Y9GASQhtxnYy0dqYbwU6ZvV5B5ghtmne2BpKS5LLtY9WmZ6006IegOj93gNCrg1AZuofSYqpjDw3ElDMCCbO7Q9LMEPH3M+DXyMgAgAPC/1O1/k8ofOApjX0WH3ymeKhrmfXmx6YmAEaz72ujcD/7NzZ086XXKtnwLKV+2O0H/UEJttrrd/JOXPp0rDC2nuOxpwlRL3TnWbpZxQg4fSBtx6acI30961D1Yo0aU1CAihrZqxeiQRuJMVNxzoDrv1OC8pSGMrkZM8lwjZTYG2+wPxCZElSUj2+aY9GM9KxnunM6P5ZabYlkdXq25zbIdG5VsjeazKY9Z0DG0u6dC79FmN7QDxze+/9uZkWByUhVmmy9CnxC3BcyewlgX6ghv2lb7l1VK9zJr3THECxQEmqcxx2dcQI946B64CQc8fOXkbh3OeMBAZ7pdn8WGo36K8m/JGkOqn71k9kEcCYzLEAdIAIynUC3d45rKgMWhpzvkjDaXyPgPKs1s+ia4KE0X52bdrTpr2gr9cH1lycfYYEKL17e+tPXdP08dwT2BGdEzRHq1M71/I/QT2+05320M9YQPKz3jrgcfEsL0PEw+LwIDazwqxmaa53DjigOClKshYAyfhr5g35Qem5E9IGSwLIr82Q3BTpHIGSRK0Wh7dthtnobRyzEZJkm3tgRNvUmwXUACG9DOW9+y4FKtb3sqT2dI8FPlTG2nBWDxHQs+/sJwuUcPMt+PQ8HzI2WWpY4jL+fZu7h0e1a825YqmUA/oeILXAuiZt1M71fvPjnkP6uWDkEnpuBtKaj3ROBhvorQkWkNPAiHYPXxMX70zNCBE30yItSHPMADndtmm2B3xwgAeOGSy/d4pNHqqZb3AOKNkD7qzn7aHZ8nvqPkdzcg6Qkvdpc16e3/P9OGnaE6uNI2DCI3VZoJ+31XqOpFnNR/29NTfnv2/Gqf2FTcvuTu1jumlA8jv+wt/Gw9sfwBoi/vvf9XW77i3SrxwdxTJf4QyWvH/ErPQWtr3+F733mDk3qqPH7GqLtwYIiCzGbtQ2ySxaTvMWGBm936icZlPKz+117O61p/d7lnrgQ5aZrQtAkyBR3jrK6L6XOFjZmJH5vkmCDh50rZ42RiX1NnmLrOs9h/QeGHlu0HBtsiTw1vrQAyYa7c+xNK/608ZWaPLw1HlQs4UCDbimumKySQ+RA+kKmmdAl8Vs9tbnXplZmhGG7L13dK1XfvQee99z1PfW+jm7j/X2v5HpVA90budRAHeAt95DexetzMx1Wwtil7vTnc6lmwYkjwd/FSaRIqlozrx8nlV1fnXGnSVL4sAd2HsMk5QOA+OQoHs3KaklaevaytDrxquHCeZlzpGSLUwKZuVBaNs470fCgeascxuV08bcDKM+Ax73buJbDUrKSWJ9A5m5Hajjfg0xSXlF1nbv2nfnAJ2/swbcWy0JIDUlqS0RQI1URKd8SGZcgK4xoWM+JlIZ26/rHBpt9JZWZCTFn6nvnDZei/a0QzKTPWFFuWdHm7U+3GpEtv5zU4wnnxPpIPsNAmtYN+148B7eAQ95QziuAccMRo5rxBojPndc8XQKeDoFMX76ERppHPMkjBr1hEXXonPXpnPWMOv+a4APre4RwOudk/drQiH5mwOT3r1jbVg7diwn+L2ClRktyAiE0O/jLWhIwj0x4ptINw1IOH30z/4tPJ0C/vLv+cj0PY298GRkISJLo8LPc8fGZdlqWuSiJTfSS6S4Wp0zi7vm7zGWvDfONkaZMfW0LnROA018c+5JuWa0JM9Bz7GRS23STJm932OqHWeooQkMUrCIxbtNDhMgz8/gqtS6I/zWgbRHNXewGY5Rgjqrz+T5PWF6rwVE+L1vgpTSkhTvFUyM6t/+1ZMcjqisCWRmKHL1rBGKljzgwfsGtBMYOYaANaCAESscKl+vekyoVk67fgldOu4upd4eNft+s32wVxtyiYBIe57MF9LTJGpzRwOgls+GzHMy134JeLbj1zre+6w73UnSTQMS79rJK8P09ogyVwPJsTbVl6Rejdo9xgIsVjHvm+grfnuOm6IA2b4YKFJfLH2GTlvYRkz1pdKp2brsRXRhx/w5Y6nJ6H2tzXm0aXNq6uGO1pl62hJLG2dpRmYZWe3cOYwVlU/JBJONvJYckKJtcS1J0VyEuGH8iSkDgLAS4zbVrIZ412qgZA3V36QwgT75ljRzb4dDfvaiV6/wsaCZUO6ZG3vD9o7q20PXZgDscbur49HzO7POzbZJApH2XH/uaJLdxScndLon5RNhjFfeE8jvcPFr8TF57xTSnsFC/H7uqU2ea73TSAgzyvMi3/PS9XzvtT3lZ9dm65ylhdCeuVdDcE0avYc045rRkIx+67RfU6GBjT1g5BS2AUjudKdZumlAwmnxroS8nSFihBLVEI9L3mTWsBbJlwQiMkIRj8SSfrfXKVlWSpxFzrsJ4FialiCACmlYNOb7HIakd8+eUIHzCRLnwSKBGe6gPPo7V2d/0wJa0MGjS1mkgZE90u9rSMpnpGz8tzQva8ca01YwkL0Fzts2bsf9XPt1UJKeYQKTwVRvx+E4WhzdI8fzDM2AkR6T/JLM0oj09o3zIgAaE6jdZ4dHtzQAvfZYQMUia83orSUEKki6Lb/lGmIJA8/X5Ho8xxhqa5qMwKW9a48xHz2vd99MXXufZ63VI83CLDA5p11W+WvPS/mOmraEnns5GDlfw8P/Wud6eUluQUMSEnP38s+8U5duGpC8OnhEyhviHd7ZoyHJ5iCLBzPX2r8AyXu0+c8ZrPYCn/R2vRKccLI21lGZS0guiBoTp0nvZqhtv+0LIEEJYAcIsBjzQqx6znj3wMhzABF5/lKpeXcjK11LWqLIgAfTlgCN5Lj7zKj3y7ZcX4qrAsUCWmqbeSZ44Hzpu6Q9Gd5nSRuv8tre+s4tdw1Ga3as90CKZNRH7ZIApzdXNE2DlEjL8hYYAbABHFY5DYRY33dUV496wp/Rfb1z5zLmPSGW1NJysgVYfe1Br+wsXUNIdNn+tgUlWpkRWBs9r9fm3pjnxxYYuQXwcafboZsGJP/CWwe8eucRAGlI5gGJdw4Phzb5FYV0lHNsAwjCNkQkEZ/sPKQwgBLqtNZbGSu/tM/hAKUwY0JSvXdDm5FwEM04zD8H6RKxdsNJJkmSAdg6OvM6ueSmK2HaoeWe3bjGjJbdt/K4J2knqhomYSYgQlVLExW6FDJQ5qD4GKTTPgNu7PTReKdadnt+ow30W02JLNvkNYG+0bftqOBWjgNtg94DSiSza2mo+DNGQoNzaC9ImZOAb7Uc50rht9cWs81758zMfb3vaWVLp79PpzX/1RzVq3be6tsRKNEYP62cZMxntSOz/cV/j/aAWeqNdTnPZqX/l9I5fXXJNU7WWgVszbioXk07dq1+6gks6VjzhbKe3xOgvikUw4ooAla8xDPv1KebBiQchBCz9h3f9ym887jgD/7K/0P/XgEketSad2XtSrnWtmd0P2dWCPxIe3p6htSa9Ki3EZ5DmuSmR5rWZO/zZH3AVrKc/rZ+AdoGbanCtfI6CNrHKO0FIns393OZAcv0QWOQKOxuNYvioEMz20INi8rIO00y3oLzS0ibG9r46419Oaa0es7VlMyCEnrepfRcDNylYGTuGfvrO1eKPyKNMZMM2Bq25lmvWJLDXntn1herLRb16tuz/uwBItdci/jzemHetfv2ALIRnQuAz33uOVpReX+PRgKrmWt7wMgMULnTnUZ004DkX3j1gA+8lV6BS49nfEneeVhUBovm0pLRgneu2K8TUfJE79pFsrc4brQlRRxN92yZw5qFWn8HTUqyRwsyop6Zg8UUzJTTyNpY7SzGJF2tfib8usZkyo1MMqTa5jfb7tnzM3X3NCS9+9v36DhbnFrfHOs5RzbuLFMsTePRAnYa9/I+tboNbe9zjZZk1vyrJ1m+BmmOqhoI5G2ja28CbZlO2zxqtg7gemY055aTNAsEpGaE1gvtftI+8raNgKj1PKuNI4C7l6HeAzrOBYC9vUADZ5YwoDdfz51H1wYY1yItb8lesLn3nhkeYnac9oD5ne40opsGJK8OHo+HmnOh/HUOf+qT/yCfS4zLWwePB+9xWBwevMOrg8frU8Bv/uU/d1Pv9/ydf1SOl8wAEWPFNSty0d5MVq/5mGyByUhbsoeubftuaUrO3Rh5mZlFc1We3/b1dXNOaDQjzbq0L0bXRmBkRNYmMQvKVujfSstJQppp7yhykZL0kAHwS/Z/acaojS2Lodl7foZmzbeuJdnV6uN0jbpfWnr8XPXskQwToOeMmMVsceEGaeZ6oKTXnksk3z2aTdQpf1+ijboW8B8lDz5H23DJmN5LloanRzMJFS2aBR69MpqfyCwouQW6m2y9mXTTgITs5GlBesyi2Yel9QlZvCtghOcbOSzGxCXmyQMIsdGIpOf2F6sQq9kKMXIzC5wFSng+hm0fzEcuOZd6oITa0KNztAjaIt7Pbt9POHYJnQssriHt1ZiDUb6LraZET5y1sP60bJf3tl1rb40yR+dbgDJyhueAI4hjTUMyA0auQdb41/pTa8dLaEnOERzMhPd9SSnyczyrBwYsMDJD9O01rdioHVbZWdPBnpnsSOs6Lrc35DPdl/5qAQxG2stlsh/3zvHeeNpraszJ+kYjADiqc6/p9DXpVgHHnW6TbhyQpMn3+hTw6uA3QORh8XjwDt4nrUi5j7QdBiAhZill5I1bR1psJbMSLFDmXrqP5jUBJIs4KNHqtRLEzZgCXLJY9SQ2GtNzCYM+ItqAeDbjdI42ptaMS25wMowm/6tpaHrtOOfazHWtjGVm0QMliVrzLY2RftrRrl47D6JN2t9jwMaHS3Ok5yQDPljaw0s2UI2x0ZjBnokDB3l7n3UNssbCPBNqh9S1zj0nPYdmpPdbBsyQa0jvu1I/W6CkB0BHY2EWbPSuzR3r3//S70CBSepvbpIcmr6xjq1zwHlA4tpaEKsN1zCdJjp3HMhy54zBPXW96RRDeB80JG9+Bvv3m24akEjiGXYB0iiIzXcmLGlI5lY1IWJaBLdmJ1sKEbrUluV+mNVcWPbxBIYsydJL03OAEfk+vffb9oP+bCpj+6Vc7gOz99psuXMi3mzfibKWt8CE13npGLKYsg0JPxPNkV6vf397LqG9zERPawLoUXSIrsV876nnpQHGS9Ae06z2WhsoQ2pGrKzrsn5r/F+LCQT2Czw0cDECIpdI9oksMNaCDlqPtsCkpy3h7Znpx2us0dazemvnc4QR156/57wkbZzvefYtgpI7vXl004Dk6RTx8FA3ic8+nQAkp/bFObyKOatu3Epke3QMET/9tGKNyVxr8SnLeqrDdlYMETiGUJgmkgAdc3brIyIeFtJ8tLbzcj5rYESCIQlKOI0WyBHtVRXz5/K/l5DccKw6ZXbturmOuVcNnHA6h3GceffnUsP3QFzd0K8vGdKYG007oiWWW9g5Mu/qkRb6WmMezzG56T7XqGNP3RZgAeYYgdG4sRi2a9BeoPMSTMrMMywQoplmyb+kUbXKcNKYaQIns2R933OY6ZFGRM7T9ppT27N3LZL3S9+Ets94GGgSntjaEn58DaFPjzQtaK8NmqYVuExboo2Nc7VY1vy0coxo72/1SbgBIUdcV8T1hTUkL/y8W6SbBiSceJ6Fp/z3Yan5OzRaY8T/8P/9cfz004rjGop2I8SIpzWr7g9A8k6PJcSplu2Ea0aKOUlw7W9I4GG3S94nSYswtHfz69E1VeCz2o1R3TMSqJ400pK+adfluV67ejTbj88hqT5Xkri3Tjpf+28tmqrFe9UMw+wXYY7I564FROTf55ZGyjbMXJsBulZZTiPzjZ7Jy0tpQyRTe21gMltf//vMgxFZXw+Y9ubFDO0RBM1KxWfASA+IaM85V1ui+QFKAJeoak3oebp25WXH9KVj+Zrakmu9+7n7710zcqdr000DksXbEoI1RBzXiPdcwJrNQA6LQ3BbzcTiXEnoRiDgrcNSjkOMCCuplAEg+ZZwIiBzXBOY0dqzeAdkkPywbCMPVRMxbM5Z1JPEaAvWtZJd7V2IRhK+51rYegsp/eV9MoroQnSpLe9LbaTAeZvo6J6R5qgyOJWJ4EzQyrQimtaklFOie2mSu0u0F881/kbmOr37qF0WacKH2eSM1wQqe8f63n6+Rvm9WpGZv9qzrHE0+z1nrs2CAwuUnANErgFIrHGmrbcaMElkO8Zfk26Bye6NCevbXPpePSEep1vQkNzpzaSbBiSPBw9vSAkBFGBwdInhWaMrTu5AK31dvENY62R7WBwW53EMAe9RIqwYU26GQ81FQlJbMstaY8TTaQtIqF18oaUcJ/V6+mtFD7LeUyNNEjOSes1c45urfP4lzM1LS1ukRI6ePSOdPLfvZq5bdB2pcF9rZNGsRK/HzKS/IQOTyhwROKEys2OUiLe7FzN/luQ4tCSa2tgfte9aJNtI/cT9VOQ8lRLmlwAiM/ddq3+uBUTouPfNZzQkVntmmUft+l5g0pafN8+yhAN72t0jSxg0Ji3S1/mOwpdq7V+SrqFp3/teswAEuA0QxynG9yHsb7ybbI3opgGJJ6DBJsMf/jd+4dS93/N3/lGxvSLm37vqp7E4h//zv/TlV23vf/Y//sjmnKYNWaNu8y7f9dqLwDWk9pr6XWOetPt6dfZImlXM9NG1NDx7z+8p0+u3a0matfOjSEKSegxxn7Yhic8xcRm17xwtyTWYktn7Z+aEpmWQ84t/B8nA9Pp1ps9fUqN3DdLAyN77Lnnu6Jv2hVXYlNkjBNGACD+v1SvByGz955AGkrXr2jEg54VcP+a/9Uij9SZRz5RvRuM5K0CRNLsONuvTROCgO91Jo5sHJOfSP3864YFJa4F2Uj33mrRmLYhmIy+Z6nJ9BxjZY6d66QZjMXJycZxZ3GaBCR2Twyl/5iiPwMzifKlphEZ7+9nSQvG6zunP0djYCw77DMOW4RlpTejanlDEsxJyq6xGXIorQ0XLcT6qe6ZPZzWT/NzsmJJM4B4tybUZUq1dl9Be7eHo70jTduk6tvcbzGhJRiCEH8vnaWDkEkELLztar3gZyxdwz/wf5UvZA1i0+mfP9+hcPxJL4GOtSxr11qrZPCojugXhxT0x4ptJNw1IgPMHf4gxRfRRmOZc4BrNa0iVXAjTrHMlGS9NowVak96ONpeZRd9iGqzj5+zPa2/gvfutDWcvQ3cuGJl5hgZE+Lk5zUmbM+Uc2n7z9zf++16Gmagn6dQYtr3apWvTNSIJ7aVzJdyXrgPXAlJ7gKF8vnY8U57TJYkA99JMf0lmXetn69zsM95vukZeEv7d5BoAPJ9pJDAeM3cfkjudSzcNSB68w8F74HH/hDuuEQ8lMVO6lxIpAsDrZ8j6/eA93jqkh753ChtfkXM30nOYkJ5Epbe4z2pmrEWzt/mOwAMHIVvpZlDO9SWdPZIL+14pZfp9PlNd2zEOf7lHQvZSNKNJsbUmwMJ8q3TNlN8FNM5lXC+hc8HdczFXl2hJZNt6NCsBvuT9XgpQzH7Dmbos4LhHy9W/fllSw9HacgntqUMDJbIOi+EemSvJdeNSwLOnf/aawl5Clka1RzMhxafOHy7f9+70M5NuHpB4n3IXLMs2DG6PtLKLc3jIyUNe783ANkH/l4/8vOb3n/rkPzD9RSSN1LDnSkZnFi7+e4905xxJUO9ZEoj0TLMsEDJq07n5QSyTCXltlqQJghb+Uj5jhom4ZtjJa26iGqMM6GYfqfx11d8vAeLO1ZQQWeZamhZSAxwvqUF5zmRw1wIj7wezfW3aY+6lUU/abv3eU/+5Gmpt/PSA+p5r54ISiy4ZR6N9XT5Hu7aIvpLf8xIaAVvtnBZo6E2ju8nWm0k3DUhCBA4s4eEeQAKkJIZA9UUJx7Xx43huohDCPFKIZpIxQzOgoccgWE5z1j3nSGBm2sHr0I45CBlpQqwkTxbxhX2PU7AFRnqb9ej7bk2z6iaqMZraPdY5GebYehciiynZ8+1nALWV3fq5meg9c/0S5uVSBthaGzRpsRwjfH6/RJ++pNkWp3P6twdQLhUAWM/bQz1w2Z4LTIAxw5hvTezmA1Lsf++938YCtbMAqfctL9GUnDPGrhF8Y6S10b5nj2ZD12tWAlq53rk73WmGbhqQEFECxOMOpcbiXAnny8997rjm6F1XbqRCrxaPo4sAfDHfGklBgfM3+94i2Fto92pnzq2nHgflnA0+Zv1JJFmM3KicdU0Lrbnnftne7Vio2hKNYbe+Re8Ze23IrTHJx8/sxso3unKvsvntiQB0Ke3RLI0Ylj0MzajPeoBQY05l3XsB3iX9Ors+XUtD0SOL6Rxpk07GtdFaMrvWyHv49R4wH48tuQnWjUwTLMysF+cIaa5FI7+k3t4znjO8b8KwPK93z7jtgZFz9srZNo2+jSaElOBjBEw0s+R40FJHv1l015C8mXTTgETTiPxn/+OPgEvPAT0U8Md+xf8eAPAf/bUfbiba4sn0y+O//lv/AMc1AYXf93/8+Vdv/+JT5vfgY8nq3pCfAwwazTL/M+XpurXAnVNfW3arIRqBkR4Ikc8eSTjluVmmXl7jEspLaM+3HpXtSXg59eqQ0W/2jMmRpHaGyeoxI3sYwBFdizk+R3r+Esz5c9P7pRU5l3p93pPOn7smj0gCop62cARWap2kVW01JxZTa52jNhEdlLKc9uwVMzRrAthb73oM/IzG5Bya0YxIOmcv7gkGZ9fCkeBsBES0cne60x66aUByDBGHNWLxwBrOY4z/8WfeK2XefjzgZ711wOPB557xTaLDa5N3Dg8L4J1H8CjZ3lO7Uxm5SVhmSCOmU4I0qYUYLSJ7FrdZTYV2TXu/ETix6pfnLM2FJWmelSSPJI/nSBK1DbTW1fqV6GUuk4hb8+ak1D0z77TvZL0jP2760fh+nCxGaW//y/utyD/y+2vvpl3TnjOSsp9Lsl+1fp6591p06fvtA+zj4AeWJkTTHvZAAdU1Sxao0EDJ7P28Lbw93OcqPUf2gZ8ai6PzRJZPwyU0U4+V+R3QNM62BpqXH81v7bdGM2viLPXWkllgMk5k264XveSanJYbyEMSQ3gfNCTvb7THW6CbBiSceQ/C94MWiFOI+I7v+xSeTgH/4a/5qk0d7z6tpRwAvP24AKeAx0M2o8qM/H/01354wzR/26/6xRe1nyauzyZnWOt7LR5A2CZ+lGQtcjajuDWH0n5rz5llBEZg4VzwMVN3711GzHmPuZTx8Ud1XgOUzFJvc7SY35k6+X2SSZN5OaxnzI7dvRLJWW3KSJJ7Ls20d8TEzNZ97rjh950TrMEC8G8CXVs7MVvvrAT+HJJrLI1nzUn5XPBqzZtWO+DVMtrzZgCKpdl4jpDDmomX9c6AJUDYF8Hv/SRtX5ndk4j6IEQHI7214Rac2u/0ZtJNAxKgzW4O6AwYgQqNPve04ulUQ8Y+HjweD0v+Wyfhc2x+3iUtCVXtXQTgmvfSSDLlLXOu53HQwuLK473gp3ddgg7r2SPQot3bXNsZyGBhpnHWQkwLtyYhO4WIV4fWcb3tl2oicSnzsIcsSe25jK02Li6ZB7OAt3deZx76EkJLOshphjHq+ZD06rak2L3yvfZajEDvrwyvrI/72wrVOTsOewkxZ4Qbs8/VhT9zzDu/btWjaSDOYehnGNg1rEoZTI+R3jrRAyqzTtY9oufKwB09rYG2ZmpBRHrl5TX+Tlqyx169z0U9AYUGNPi6YK0vWn0vmdvmTp9fdPOAhDOk2mZNTK8l8Xj3acW7DJS8Ong8HgLeebQds64V0vJhSSZhCZG4lFAoRKw7qq6bqgyBu1VHjgCatkHPUE/Tof3tA6ot2Aj5XFTapZ3TyOWxwZM2rTEvrkEHHxKYaG3UiEscNWkjldlLWwZna7o1K123GBPtPg5G9mycexk+i3nXGQb9Wu+cJR3U5rK2qVrSV+sZPQDSY2pGJJkGfiwZhoNyjh/PakHOGa8aXYPpuiYYuUYb9oxvor1Cit58XcS47GlUenWNx+52D70EqOwd67PPqG3TwQkv1xN49IKIjAR5vLxcN6w19FxQMiPosNY++VcTUljryqi+N5lCWIEXNtkKd6f2Id00IDmuAYuyGdCEIJBB/37f//0Hy/HrU8C7T9sB8vqUFp/PPa3497/+Fz1r+/dK9816DDOset0GALIMp1nQpW3IMwCkuSb6QoIQ+huUNkWjHx2zZdW2zMAvMp+dEZMrbbv70rfWFEKWuZQ0oD2zsT3nprFHiqyV6QER+t1jCGbu157HqSeBluc1c74eg7cHuFjUAxdSK0J/LSnnyDb8TWAwRmNnz3y6ZHzKcrNgpFfHaLzSuR7Ty3/3nM+tunvPG4FmGcyjty72BAfnan0s0t7rHOf4PSHXe2uVZu46Wtv2vOcs9cAEHffAyKXPv9OdLLppQPLP31sRH9bNJKYF592nFT/93nGzaZwyI/x48A1TDKAx33oJ0hhsIEmegpI00bKJt8DGCIycE5KwV24P+AghbgAHnQcq0NDAyIxmxLGFkuxa6Zxzrtq6HjwCItboNtoSGiOzEsU+Qz0nIXk/FvhrSIx7QHcP42YxLkRy85xhpGa1JZwsBknTKljzlOqZacvMNxiBhpcCIntyFhFdE1RY5SVAnwUP2rVT2ApTtHvOBSV7aCQkod+jtePaTH8lzQKhrznR1kmp5em9k3Ze63/5HAkMtOdsn8vfpR9KunfMaa+lRe/dZkkz1dLWClm3phm5ZSByD/v7ZtJNA5IQYjOh5QbydFpLhCyNidCOexuQpP/or/1wKct9hqWEQwAAM2hJREFUJdYQVQf6c+ncTc4CB9oi2AMVozZNAxLFBEuCDa7t2FwLUdWGhBB1R7qwPfRIoCTGiBASUIkhNuBlJM0GdGk4/8vLjL6fpWGxrl+TZpnEl2S+ZN09MNiT4lrHvWf2ru+5JqWywJYBku3e851HYMQGIXNgpEfXZmivAUSuUe/7WWcPgAPzWr7efCAaZWefqXMkoEnnrhMGfQ/11lwJDKxw5qPfWjQuq49mAYrWt8+5D8zWxct9voCRO725dNOA5PHgm82eTLCeTqspsbUkh29nn5G90kpaQE7G83p0ys4ifG4v3uHYWYjkopau+Y30vdFIhH3JAy1A0pMSmnXsACExxK2pVuTn6nOlhiSg/Z0ARoTL+6HP/iIxODiftSMeWNeIJUu/AgB4YHH25jEaH3sl3hr1wHPvnNWeS8rJb6oFKdgDZtUxNDJdVISvIyZplsFv51Gl3uY7W69sb4/pHAkJrHplO3tAhDOH8rpVv3zGOTRaN2bv2ZbR/QJ761avLfzarLbnWgC9B8D59VmQMPqtaSLk8/cIViwmu5cHpfcOHDRZwoRz+tvSlsy807ZfFtZuapNt2qWBlT17xbXB8Mw6JoEVgE0o6js4udO16KYBiVxwKZIUhfIFdBX148Fj8a6JokWaFH7fHrIA0AzRbSniVsTiHI7Q6+ILtrYJaO16fdKdO0eM5Cwwae6Z8AU5B4gUECPMuDRy3oEiA/gcVSt6wHnAZ3+RgKotCSEWhXwBJRhrSLS+7+UNAPrjqmfvfc1Fv9e+HsM2Mu+T42U4Nid9qKxyHDg25ztM1qZuwRzIvp45nqE9ElgizaRjxplUBybzYTt7z9pLM0KNmWvpuh2KdQ8A3nP9mjQr8Z4Z0z2J+h6Qru2PI/Czp0y9vgUmPWZda5sFTM4hbY3QfMP2P4uD/u2YPWct3wMMeTnr96X0nN/lpehusvVm0k0DkrcfPR4elzIhnk6J2yS7fwB4zGW1TfrxwCUcWynAHlq8wx//xq/cdU8JVawkEpJ5SCypmWSm6Dz9s8LuzoCNGSmgBUA0GkXJ4iZZyb+kLROi7nNikfMJ4HlH2hEAPgOhAMBnc61l/0I6knifc76X+E/6Bly6Cexp1yiK2rlM3eJcF5TwsaSZ5K0ZvPO2aZJXea03p6z+1Bj55wAl1NY9zxiBph4YuRYQmTUDta5rdE0Acs7zOWljxxpvVr3nztU99+0BnID9rWe0NNq58W89LHoPlMh2PgcwkXXLZxLtBymNqKt5jqUxWdk7a4LHkSByRqCl9X17jkR2W1oMQcmsIOpOd7LopgHJBx4OODwuSKEHV3zuaQXg8fajbZpAf+UCNyu94nQJgAFSlLCHZeD451xZF+RC9Org2YLVtonAyCr+0TWLWbAAijS9IpoNuyspxtZ0i8CIpRXhpl68HdynpETVCrHxDQke8IHuS3+XxTW+J0EBGKMxMGJyRtLh0ZjTmcstwzHjeG21d9ZUaDReRkDXIglKLECrnfe+vZc0Jr2NvgdGNm1jfT1i5mds5Xn40Hpuf/9rbdSOZ3MIANf1C5kRcrTXrwM8ZsbcXiCzZdT6Y6h3fi/NAOYZgCzPX9MEzwLR1m8OTKQZ1yz1kp2eA1IsIYZVlogDB8D2EUtjYsl/6zrQAwVUhwVKZkjWT6T5D2nPbz0vsbnHOneNlAjPTuuK6F9YY7HeNSQjumlA8tu/+sP44Ac/2Jz79/+fP6Qu0lLCDNgLKzEgH/8f/j8lSaIGZOi+SyQCvQSIi0+hsjVJ8p7nTgGOnRu8BCLSMfwS4r4iXCtSzhntijEWUGI6uk9Sb2OSTC8vPwtIrP6+VOo3k5n8HBAif4/exaJL58sMWdLWWZpx3twLRrRyFkDpSaDnnmODEUnXACI9BqQ/H14GiFx6XSNrHFtakj3fby9gnj1+Dhoxp/J3e9yacc0+R9OanEN7+mnmW/eid7V//QaUyHrPGZPWWiG/Bw9b3xPY1L8tMNEAJ39+z0riTnfq0U0DEo2+8J2HAigW54q5TnU6S3+PIeAnP3dsonBxSSiZc/UYzUsX+9drgA8OD95tJjm1mx5R/EsWh9XHdmFXFiCuHaFz1rF8L0lrVHw8lLL8nAQnjTZkwncghqiCNU0zspcIqHDAop0D7G88A+x6WoPRhtOTyPck3ueMyWsxfyMpKZXZgDkGuHn/9za2Edg8B4iM+nHb7+dHqiKp6fYc/90ClvkxYzN4vL5rgxFLuDECIqPyvfOz43EPSYClSbPpWJ7j95zbhhnNiHa+pz0B9n3v2bE2U3ZMW0n8uYKEWSA4ownR1q9eXZpwSmo4pMZETzbZd3afEWyNiJ7RAyf8XdK5ddMXWuLfa+VXe06K8eUTI8Z415CM6PMOkDwePB68x+ITE78Ixj74zGBH1zDmXGqRfrd/9/qHzNAaAPgIwJaIcCBFoKRe000DLpGuzJCVpPAcjcSsNOVc0zBOmgaHn6PjxbXffoYs8CG/x4x5kvcOa3bKX0LL9PT+nkt7GMCZaz0ymTtFC8jN6bTzL0VcUGGBkT1M5Pz34gzaVpuit9UGSdeWlmtgRFIPjMwAkUvO7aE9ZiYas2gx6DN9bq/9Yyb43OuXUk9jwK9rzLp2rZXEt4xuj+HXJP+ynbN9MQJ0FmkCSr42a0Cwtsujpyk8h3oCU60/NFBC94/nVdt2DaDc6U6z9HkHSJ5OAcuDK1GSiGiOrDHiuKacGJVp1LNpp2vPP7leZ7UNAShLwiA1Br1NQZPy0T3Whrr3XXuMoQYgZsFHCtXrqt8H+yT10HUZVeddARjeufw7lXHltysJEntgZEbaaWqiFO2SPAZ0B27nHUimUt7Fu00bexqTvXTN8T7HQLMNUAHma5wzvePO7edoLPb02V4n8UueVWlr7iXbo9X/XIzpDAPfSnLnEhZqv2fLXNN2fWYeXAJCrHr20kiDKxl2Ts+dT0bTMljAZFtu6/w+c99MlvpLqAe6eiS1JXSOQInm9M7r1rQs2jOsOmbXdRluucc36OByxdPprgm403n0eQdIKjMI+IVpRmIFIyFGFua3hvt9LkmiRcnBLZmPAcBbh/1JpFoNz3YhsBYU+Y4jNTEn591ujYWVfb2p1zl4n8oSKAGyQ/qStEP82dZ3kkAEAJZDPrf4AkA4GCm5SVi9o3HQ04YQEJF5V3rvT0SfsdHekKbPu2KiV9q8boHUS4zhPVoBWWYa+OwUHs5oBp6jb6jOS5i82czNaa5u14qX+Oaj0M8aWWCkBzJmGK5ZX6iXoJfaM65Jz+l8rOWqmAV5bTlda9K7TwrlLAabl+21Z++1kVbGAiWtkKFt596s7rI9vF4+/yyQofXhjNZLPu9NpRjCy5tsXVkT9vlIn5eA5N3jisU7hOgRREjXECOe1iAY+ctjhUv6ju/7FADg237VL+60NbWnSj2QTbj0cgAazY4k3mZNOqSBknMXDwuUjMCKcylLOpXzvtV0ECgBeLJDBkzQakw0Iv8bqRUB0AUji7MTy1mkMVcSjHAwZpm7AUpggMC0A9QnoZWVl5GrR5dsyFLh7ylvXdsDTmbnlyw3GqvnMBeLdxfN9Us1IvKdZhiPc54pf18rtK9FmvaZn7fKj+qz2rL3fo0szfE59NwAZXbdvsZedk69o1wVM9L37bX9uUz2tvsapO2rFqPfAqmqKRkx/5q2ZGY/t9bUnjBSap5G73p3ar/TufR5B0j++XsnAGnxevvxgHcel4a5fDoFvD4FsekknxO6L51rHU730hoiXg00Hp99OjXllwcHhO1iGWIsZlwkhad/p2ZB8Xg8pPOkAeIMDmee6TrP2VLaoSwyxZ6fndN8MiRTTQz4/7+9s4+1o6r7/Xdm9tnnhdIea2lPi1BbRLgVqAr2pNeIJjS0DTEg5gaRe4PEQMA2QYtEayKIyRMI5BKuhCt/KeZeg0AekYhKUguFBz1UqXARKn1oU61iTyslfTntOWfvPWvdP2av2WvWrLeZvc/Z55z+PsnO3ntmzZo1s2bW+n3Xb73I4wFkUQI0bWnpGFGgiSxorU8SZPbrSLs7SaIk0yWrmdFBGCASC2MG2QkNVCNVdz/UexYzbhQirNklT+wDkFlvJRtx9lpYnL8WFjfFFmuJLNbcz5rprseaAfqaZ0uHrRXR5YHwESo+LeOu1kwfIWVLq8ko6WQ3ljLCQXTtULe5rt10Lt32otfoMv5d/9Vtvp6Rsh6RTogTW9gi4tt1r8s+4zZ88qMTuN5BV/cpkyFtM6yTfea1TFzIYyVEenyEjDYtzXJZ18Va1wCopkN3fnGs9nyOa/RtwDG9MyahknueHM8/a8x8TwBnXRjUTgsjOplzgkSs1p68YMlA0ErYmsVKiBHdC9fpwWUAcN8L76TnufOzH8nsqzXyg+hlTGNGirb26eYyb8c7YkMY/sLwVj0mwiMiRIl8jDhONaQZgABSV60o0M6yJRvu4lziv7pPHjOiihEZebt6/33uX3ahR64VItzw2DE011NpTmfMGc/ORyN5TaS/6fWlwi60VJyaRTldFaO8zyVQbMeKc9kMAlOlXshw6KDImOpz2lpX2427DEXESH6ffgahMnH5ekRcBpuNTt5j37WBbPtsxqU8xazuuKkSIb6o77LOY+LjFdGFlT0m6lomvvnWrijRlZu5MAahZaqHRaOi7r2Rw/kumFj0OdA9/762hu3dJQhf5pwgqVaidFBVrdEaYCVekmolzKzQnnyrHhNzi68vaiu77iU9VYvTcEI0pUadvAI1z0/TK3tHTOcU20xeDxGPrfAxzXxkIxkD4l/5Zrp4aRxTucUYDYP+5W5ZQFZ8hNJ9jdR7bTGuBaaKSr1n8pgRVYyo12MSIq245G5rTXESJ+Ikbt7jMAzAkfyOY57xmohrF2lU80S3sKDp+nX3qsj906EzBNSKOl9hu1vFXd4CX+FkSmvZ7UVbv2XvZlEDoR3aESHJtvxMWlMpRNoRIbbjbPmn+69bo8r2vwhF8l+3mng7RmLZZ059b3VjOuT9ZiPdT5So59bF6zto2yVQyk5taxIW2XMlXbd8GifENvl4+Z3zLTNt+1zPkrpf9AggiKLMQUESNj0gMWoNhlqDpWtyAMCZfRWpVaXVX7NoC4sLXbcLlfFaI114EWh5TMR/XaEnp9WErnVJLgTl9OgKuLRAUxdj9GgVApBpqneJE0BvKGdmnZL2WcenhFljXB7o7WNQq78B+32yoZ1ZS1mJ3nVNiXdJ/Ja6sknjRXIz+Ctek8w4FCVunQdFzWP1PvkIkjLdnmzC2Xbvfd/bTomR5Hx5I8jVsmoyTnXXpTNaTPF0ClMfcZNYcHlDfI6zCROf7lllvCRlsd13mxDRHVc2D13PmFrGq8/NVAg33fGuxhzbYHP1v9/vrKfEJSZ00wPrrscVj4ui3gu5DBSeElvjiq2hUU1DEUGrHutq7FHPUZ8VXbaYVElO4zkJK3NOkOgGkW/+9/+XLoLYWzG7RIHWy1VrxIjCENVKiIFqhAf/Yx+qUYjN/3VFqXRFYYB7tu3B2ERdqZQZYpY9d8x4aliq0wCrBdBUVL46vMWIgs4Y9nJ3R4YwlqE9OkOg3RZ9XyESs8SYF2M4QjTLu1Ca1JEhN2CfM/1YHBPq/RRjdsTK9Km4aZ4nCPL7RDc69by2mbrU37r76rPCOWA2nNRK1tTyVwY13rJGRiu+JHNNhoGur7q4xqK0M8OOjuJT9pp++y9w6CtKynpEfN9RHUXzRA5veuZ9yxYfdM+Oy5sg0qYTsy4RPBX4iKmpEts2fIVH0ffXVzybF04UnhIAYMbyS/7WxeU6v08afcO00uKMhiC0zDlBomNeXw8OH5/E2ESj2WUrTA2p1idErREr3hKGM/sqGOzvQT3mmUHoLmRXrFzgjU00EDOO/qrsGUnEj1yI9FdblncUBEBobq3XtWSkx2oqGt+KRyccfAo5k+DwEQLtVN4uQQIUN5qBlpEk32f12luTA4SIw6YnpJGMfomb7nd5oHsQNAVJc1a1dmYmSWfukrYZPSbNTbIoEYJFxuUZkf9XNPmq+18GU+tpmZY/HzFiWiRMj16URGF+BWQ5DWUo4nUqI15sRr5JhLgaRsqIkCJio6wwMR1TRETonvkigqSIYdspQ93V/a9TosRkvBfxlHTCQ1EkvabzCmzb1Lh8tsnYRQmQlDHCE2Qfl+KTh74NBz5plxHnnh0eEhrUPhM5LQSJakQBwP/6wiWZMN/+1e7Mf/EiNliybomYnveebXtQaw6Mv++qVW2lK/uyM7hm9prqVizXOXQGp7e40Ri3pjht/4tsl42HThnLXi1lTc8Ib4YR4zqEt0QE4aw1uF/nSerECuVCdIiB8e3gyjs1bCcwVcCdwCRQTGLCNx6TiJouinhUigkAPzHiu6+oN8TVOjtVmBo1XAKkE2WOTTyI7b6ivNOetjJ0yxOi4hpk7/vedyKMDt962CRC2s1r33KhE40tBCE4LQRJtRJi6WBfOtWtiSgM0pbuU7W4+bJPpvvFCzdea2C8Zle78rS7Ymav8VriHWkwjnFpQLs4t9wPVi4UQ2l6QbUVVhxra0W0GQG644tUGL4FpywKTS3v6nE2b4ap0jfFld1vXvU6+Z81glIRwrJTLYt9ug8A1ES+VcLslL8suzaJ2AZkRYjwovgid8NSx4eIcwlRouuypRtDUhZdpd5O63U7YUxpMbUq6jwc5nPkRvHk0C0q5qLIO6hLm84g8TEyXN6QIuJD3VZ0UUWf1tqiIsVWXsh5JPLdJx/UssgsSIovfNs6Nl9OFSWWjHB14LvP8+l7P2cLRWf+0uHrJbFtB2wD3GW0K1AVbjgxpUV9x3Pvn25Ma9y8T1K90SAPifmchJXTQpAASbetmPHmzFt+L0wSnmFsog4A6excYi2T//5/XkWtwfDkTWtyx/7bxv+SfG//zzQetXIWBaKpUIvCIBUjMpFi4GR/m69HHtzfydYMncFmEyM2QVKx7JN/Z7fpK3qXWGndtyizTa381XsMw/MjwolPOjkB44gDnnblStYQyYqToDmVcRQG2i5YtsH8MjqhIW9LBta3hIoufKeYiS1mPi2hsuBXRYnqAbEZA2rDQRGvSxF8Wshdxr1LiNh+u/a1P2uX20vi+6zpyimfPDF5R3zEiFo+Fc3/VtkTStvEWhz6menEcaa6osj4krLp9sU081UZXI1RLtTzu4Ra2efOhalO9RWFunzO1WOadOnsA1WIZCacEfGUnHWMIGROC0EyUI1SEZJ4QfxfHrnF+9ipGoBkut6asp6Jida6J8n3pEUM1RoxqpUoaSlpipioR//CR2GQ61eeFKbCCI6dhY6Kq3IuWlnpxIdOmFQM4Vr/w8w5dJV+2QXt1P7rrULbvPS5upikLETk3yJO8a39rSymCACs6RkJQsl7EmY9H+l2B/KAdll4MMYzY0kYst4U3xZCGZ/Za3THyGgrxA6LaJ3RYTLmbKIki9lTMtWtyEXvi/4eM802PwGi/i86Q1aRuHX/Tdt0uDxlRXB5RJJtYS68LU0ug1MVJ/JMTCYx4iOey0wvXQbd+X3H9ZnjLO950qETIZ28H2XEiTiulabsgHfbMerMmoXSKtkdunGOJE6ITnJaCBIxiF20WrsMa1VoiP9HT7VmyPI1kmLG08HyOhEThQFqQDrYPmYMtUZLbNRi89R/pmuxtYyZ0qirvFRc8ajpVIWF8BiIb7GSvRyumm4Lpd+BVrTozmlDZwRVkc1LeXBhcj+i5v6sMEnvl/IcqAPgocSv/pa31ZQV3VXvCWBy2tvRzcCVzr4FZLt5NacEU58DV96r1yqOse13HW8zWotiauk0CQ15m++sVEncLWGia5HMhp06XNP4tr6ZZpu5bNNt68RYkCKCxRXehK3V2fQtwqjH2M7R+h1qtvl5YnxI0lmmRMiLF92Md3LYdrBdk6kBw3XPpvr9AaZOlPliK4Nbz2k6h2MGXV1u8/zYrjW3DpjyX+0mPBtgLEZAXbZmHKeFIBGoK6XLqF10BqpRKmBU41eusF0Fo+i6BSTTD7sKBfHRGba6dJgNLGuyMpgqajWdcljfSln8V9NuGxgahWHmGHnRyFyrpFgMUdO1TV3pXtfNXzU+zC7upOC3Ga4yuj78qtiV/8eMo4qmwAzz7e1M2iAqBHW9FhvqTFrqeBIgO/2wTZCqz4vpeSjVItcB47aT2N8xuwGrMwrKevJ8rtPlmSgrRnxEiClcu/nZyfzvlBBsJ55OilH1PUxmasx25dJ5TNRj1fIPyK9hIqdfTYO63ff6XAtJtot6r3X3voxXdyrwHYDu29CoK49c69KYxEmReoYg2uG0ECQx46hGdrduIj6i1Oj94LxeRGEyIF14NsRLfGSslhuU7oOYyjdmPO3KJaOOM5HjFt4CGa0RmDHqkXpnhJfBhs47pJ5HLdRdv2UhIbbZPCNyHvRKHpUoDBAFrTE1UdgSIaaB2Bl3M+foQUuoRQHPTBIgX59p8UhVlPiIM5NBKCoH2Wum/TS7dAVMeEyQekx4U2S4WrAAZSHE5n8xnkRsE2ljYbICfBwng9zla1S/BdPRYinohAixCQ35HDpPSSXUGwLZYxnEVN5yPDpM906Ed02LqzvGtl0VIiYxYjuvTx74CJQi8RcVLgJbI5C83fQ+y8fIXfjENp1IT/7nnwH5XLZ0mq7JVha3yijAtqheWXQrwOvSZUNXB9nKEtNvEy4Rosv/IrNRFS17OpEHpjKptS/fvVh33aYB/PI29XxiHTS5npmqMYfTCY8ZEEyzh4QWaHFyWggSQN+CrhIzhmqlkhqb6arvBiPs3786XCgNiUiwP5QtYzSZO1YUltVMPOZWKV3Lra6y1B1vKryz12CvkFyGq0+FJIdLxUxTjAhdGQaB5B3RJglA8zo5b+V/yBGz7MxlRTG1huuuxehNEWOJxDPGuHayhThuVgbN/8mCi+Y0m8aVqGNIXIPfhUdFzLylM7oy6XQYArMFU7rl7aZBuKZnQIfvQF5bF0AfVCEib/MRI0UM/6nYpwtT1CicymdxKuM2tVjL+03luixM5LCmsstm9Ap03hNffLpmTRWuxgef49s5t6Dda3V5R3QeMt2xpuNt5wPIU0JMPaeFIInCIN99R0FM4zs2kSx+KAaf91bC3NiHM/sqWo+Fi2olxHitdb5ag2WEDyohKmEgDcBvFZqVMMicU205EgVHvuBPWvV13hz5OPVb3a9i8oao33L6Tfta8WS7aunCAHoxoi3s07RnxUcyi1b+elxGt0ijrvVR3+KZNQqS34o4YdkuegC0Y41qYIooQdow5hIpAnkhRHEcR7ItjptduFira5dYRyVG0ipWlxqUfNdE8Z1G2KfFuAiuY8vErWspl/flDUgGeSpvUwu86Ty6WfF0AqNI2tW4fISI7tymuMukxwdbI4BrmxqPus/ViOKDmr+t7+wzYLoWNe2+2BqXkt/lPSY+RmsZfLwsvl4THa576/usqGGK7NPFmxcQxZ8znYiU4zeNKdFhm5wlg3bFXTPkByDKcloIEh9ixtPuWclUvw3UY4YPnFHFgoEe9Eqru/dXo8xK6r7I3opag2G8FmcGbuuMALGKu5h5y9ZqJuLQGUpqAeaqbHy9IzrxYBMi5njMU2NGmm5aQCJG5HBiH+NKhcQ4GG/FI4vTKAgQw9zqBPhWPJb0h62uesLDk+SrNHhe0xItG4IRC3JjS8Qc8Dq4IsB164/oBrujGa8QJwJVgOgqHZ0r3+YUz4xZ4YoBEuhbf224PDQ2o6OIcSDCm/pkZ9ORX83dFKeMECN5QWKfDcs3/bpvVYzYxJAr/e3gG5e+ldicj0VFb1GBYhMlSTxZcSp++6bHhs7DkU1Xe2tYFAmvS4MN3/fUfD4G3UxbpnS0KwbLhvVJhy+qMMnmi3mGSPX8elFjKSc9ehZ0ai2rqYTzLqxDwmlQu4s5K0ge/I99AJIX9/ZPr3SGb0giIWYctXoMzlozY01K0wb3VyNtAehioBphbKIlakR88kddl0R4DXwRYeWB2brj5YJHZ1iphZyt4mynJbuVnmylkjG4mtPfhiwAmr+jIEjsZTldSpaoaY45z3nK5IJVZwCqaWwHk9EqSLvwaFrBojBIV4AXcYhV3n3IhGPan1nvSFOkCdFgEz8pMZyrwWcG1rPWOWKWPV+uJU7j0cqdvqBx5YNL2KhTlhY1iGVME1n4eCbKGkk2MWJ6F6ai1VzGdM2ulm0f497HQ6LbVlQQu7AZy+08wyaRlr1neq+Jem7Xf/Va5PPZ0udKf5H/Zel0OeFD0Tw3dXMz1ddyXD7ekt5KmOt+55qFNMW23pn7aILQMmcFicC38hRi5Ph4HZxx1CcbiBsMYSVsCpAg7WLVX40wONBTOC1nVCvor1YQhQyDAxwD1ShTIKizSVUlr4y8D7BX2qqXRC6Y1OPUsCKM2KduMwkRV8VtLjSzrckxyx8v7jvAwLgY2J5cj9x9i1mM5qzw0KdPoK7E3i7qdSfkW7FEZSBXFOLaxbG1BgMTcbDEk8GhH9wuyG+XPB+KZ0I0GsnCwiZ6XAJE5zWRRUkQBhkho3YXE2FYM83pXPcxz7XEmSpnH2zHuQwG9b3MfudbxeVjBaJBRITVxasTCLpn1PeZ1Q1a13tlynkNi2IqW3wN3iLGblFBAujXy7CJHpH21nOV9ZSI/bpjilyX7fxqGrKoXt1so4taD5jSaGr5L5NW2/bpFBG2hgATtvC6e2crY2wT0Kj7dDNDtr5b09aLbfI7LrY1NNvkeOR02xoqASAo0Z19uuEsnv5B7TTtr5M5L0hM3P70G+lLN9lg+L//4zJr+Ot/8sf0t6hQ/m37f6aGxLFTNZyqxRhoducS62hEYYDBgR4M9ETZrkKSsSGv2J79hLlt4lgb+lay1gxRprD5ljS/Qrkdo08tnJ3nS+140ZKuEWUat7K490KMMM7T/XJBax7M63J/540NX2z3RK1gZEwiRBYQrrEluv1hGBSIQ398ujdueXPS+OTziLhDca7M33ThRvFbnr5YDLgXmIRuJ9A9u6YZlwQ+gl0uA0xkryn7Hnu/NwqqGPE9fqq9IzrKGIfq8ab/voavS4wUed5MXYymClfjVXZbftybrYFK3a7uK5PWItuTfeZ7WUbY+Db6FaETni9TvEB2wgG90PHrxqWLPy9y7J5+GkNClKVrguSRRx7BAw88gNHRUaxevRoPP/ww1qxZ09FzRGFg7M8oG/g+DA70YF5fBVHY8pjIrQ1i7ImgWuE4sy+ZsevIWA3/qDW0Badc+Il4xdS3vU3vDAD9tMWiz7+jcPQx/NVWFld8LkxpMs37rg7klxcVEx/X4oiuvqu27llyK1F2n7nPvv4+iPDm1nETqjGtnicVKDyZdUs21NXZtbIruotvv0qUKSLDNSGEijqjXeIlEWkFxMASISyCIEg9KamHpOm5CViQG78SM57r2pWKUEMFWhSXV0CNV80jMRlGFPKmVzRIx6EB+mm81XjFMfK+5B2NMs9orRGn+22eDtMChq5v3f0wGWpTRZm4fYxRm7gwrSDu89uW5k6KEdd9sZVV9ndDV1eJ4/KLxOrOWZSyosM3rnJip5zg16Grg13iV02f6xlTF/oF1MkxAmO5EUnhzGPYzF5TAXlILOckrHRFkDzxxBPYsmULHn30UQwPD+Ohhx7C+vXrsWfPHixevLituMXYEUEYBPjfI39NW8SPjNVw7FQtNfwBaKdbVfnhf/s4gMQroqvEhKckCgOpi1ESZmyijiNjNQxUo1TUyMhGt/CsiFm1qlGYWXtDbuFX4yjqKi/T+uNTAdpaz3yJmRj0nVRGURgglkSIKk50abOlQy1s1QI42c5QxChThZ/sNfG9D2oculYpGbFeiAmdGHEJDJtwcYmaIGxNEiD+o5m+MAjAmbSyb9oNC2mzmrogJEKz10T2mKjekqnAp1JWZ8KzfXTI+SwLmDQ+5ZlV0ycbGaZnRvdM267JR4zI2zslHjp1vMs41ZUfusk4TMe5wmW3T58Y0YUxeTlc++T9pmuQp5ztFJ0QIu2E82WqPIe+z5vpWLkcyI8XiXKeMNUWKmMnsJmvR4gZSlcEyYMPPoibb74ZN910EwDg0Ucfxa9+9Sv86Ec/wre//e224s4YdNK6FREC9CDpPiVeZvHyFZnCV4SNWdLVa7zWyCycKMainGpOIxyFAQ4encD7YzXM66uk22oNpu23KdJ2ZjOsWHvDto6GzWjNxivSblfqRQtrl/dFF17nDVC3tb5lAz2/qKKuRdN0HXLadGNFkt/mFazV36ro0bV+ifttS4+tNUolCoJkxfYw7xmRUcWILERs65UITF21TGNKdDNzZQauN6cYRswz4iTxeDS7YumEiaY7l06UtIOP0I5Z1hMqFq2U6WkWOAuaY8xi1vKQiPNEYd5z21+NMmWLHM60hpLIz1qzlSIphypp44gQKHILKNCa0tzkRbK9v+00VnTquHbEh/pfJ0JsYYqKEJNBXaa13mc/kL0O1Qi1Cw27p8NVntrEQ96r4lfntiMcfJ4Fgc5rr1utXq2v5f2dFiU6oWxqfNMdp9oEqjBJygXR0yMpLzJlULPckHtPyHWTKQ2zwUNCzEymXZDUajXs2rULW7duTbeFYYh169ZhZGREe8zk5CQmJyfT/8ePH7eeQ7woYRCgJwwz08P2hCHOqEaoxxxHTtZSg8GXaiVMDRIxPfCkJEjGJhoYm2wgCpLfURjgyLEJTI7XMdnfk3g9pDhSYyEMMC61nvZXI1SReEV6lMJbboEWK6nqCkhA36fUF1cF5LPP1roivoXnQ14JW5eO5DvOtTC39uvXaXGlt8iCcbbrM+WBT5pU70yZxcd0cNaaWUwWG9YuXprwunA6ZGHA4pY3JGiO+VEHrQfNSQo4A3gIBGJGNR4gCIRYaXbbkoSJ/EakM4F5ekmKtjQCSIVILWZo1GIwxsFiBsY4OOPgPJlgYKKZ1pgn+Tmvr4KF0rlUkSEaTHqQTYe61k4UBpltAjFzXMw46oyjHif/x+txs9EkaqY9TkVKrzSTjklEp/EbGjps99SXIsf4GvC+gsG3W0yR+JNt5m65OsruE9gGP7v227romgxOH5GiQ0x9XhSXSPJJh7rPdc904dR71Y74sBnzNjKNcGnZYLnuoPmOh9l6rSr9bvUMaAkTOWyaXkODnLae0nUvn2FQl62ZybQLkvfeew9xHGPJkiWZ7UuWLMHbb7+tPebee+/FPffck9uuEyYTJ0+kvxthCB61BEkQBGjEDHXGMRkzNMab3g3OnSJHjj+p2IHJyQbGxybQYBz1ZitlfSJGY7IBHgSoxxHqABoTk6iP1xDwHtQHklmSssYvUAdQrQRgYYDJsIqJoI6wHqI37kFDMkzqTcMDaLWQMq4zpFvdOoQRKYwk0WrKWLabkmlAN+P6wkjGtwIDkM4SJb7j1DhLvmuwDyINlf/532FumynNyXdekDDlXugKYdeq8+r5Ta34unOJiiI1NhviOzGCY84Rxwyctb5l4zj5VgUJmt9uMeIKZyJWrjMVJGIdFPE//W4e0xxHIupXJh0nwkbNlrcgaI0rCcJkv6iYeRDknq/8t3RNnoZBvcZQbzA0YobaZAOcc8QNlt4jcd9EesOgijqqqLEIk30MLAoR1kOwStIFUxUkMiFaRodcdoltQGtBUMaBRvP5nWgwMCSipB4n4mOiljwvjZhhspZMyFlvChPZQ5vEpX/mRVkib1PxvY9lj9F1A4nCIDeAVo4zVow1+X1kynsaG8Kp72zHBYnFoPTx+vka1zZ0RqXtXbetQdEpxH3RTSGrO3+R+1j2nsn3Sa5T03SVaOyT0y3SmV9EOPldl7aFHoJErrdFPSLSqdoH4jp049FsdSGQf37Y5CkA9tkZu05c10zHMvXnJOzMilm2tm7dii1btqT/3333XaxatQrnnHNOx87xPzsWE0EQBEEQxOnLiRMnsGDBgm4nI0O1WsXQ0BBGdz/ZlfMPDQ2hWq125dyzgWkXJIsWLUIURTh06FBm+6FDhzA0NKQ9pre3F729ven/efPmYffu3Vi1ahX+/ve/Y/78+VOaZqI7HD9+HOeccw7l8RyF8nduQ/k796E8ntuUyV/OOU6cOIFly5ZNceqK09fXh/3796NWq3Xl/NVqFX19fV0592xg2gVJtVrFpZdeiu3bt+Oaa64BADDGsH37dmzevNkrjjAMcfbZZwMA5s+fTwXhHIfyeG5D+Tu3ofyd+1Aez22K5u9M84zI9PX1kSiYoXSly9aWLVtw44034rLLLsOaNWvw0EMP4eTJk+msWwRBEARBEARBnB50RZBcd911+Ne//oW77roLo6Oj+PjHP47nnnsuN9CdIAiCIAiCIIi5TdcGtW/evNm7i5aO3t5e3H333ZmxJcTcgvJ4bkP5O7eh/J37UB7PbSh/iekk4DN6bjaCIAiCIAiCIOYyM38FG4IgCIIgCIIg5iwkSAiCIAiCIAiC6BokSAiCIAiCIAiC6BokSAiCIAiCIAiC6BqzVpA88sgj+PCHP4y+vj4MDw/jD3/4Q7eTRJTge9/7HoIgyHwuvPDCdP/ExAQ2bdqED37wg5g3bx6++MUv4tChQ11MMWHjpZdewuc//3ksW7YMQRDgF7/4RWY/5xx33XUXli5div7+fqxbtw7vvPNOJsz777+PG264AfPnz8fg4CC++tWvYmxsbBqvgrDhyuOvfOUruXd6w4YNmTCUxzOTe++9F5/61Kdw5plnYvHixbjmmmuwZ8+eTBifMvnAgQO46qqrMDAwgMWLF+POO+9Eo9GYzkshDPjk8ec+97ncO3zrrbdmwlAeE51mVgqSJ554Alu2bMHdd9+NP/3pT1i9ejXWr1+Pw4cPdztpRAk+9rGP4eDBg+nn5ZdfTvd94xvfwC9/+Us89dRTePHFF/HPf/4T1157bRdTS9g4efIkVq9ejUceeUS7//7778cPfvADPProo9i5cyfOOOMMrF+/HhMTE2mYG264AW+99Ra2bduGZ599Fi+99BJuueWW6boEwoErjwFgw4YNmXf68ccfz+ynPJ6ZvPjii9i0aRNeeeUVbNu2DfV6HVdeeSVOnjyZhnGVyXEc46qrrkKtVsPvf/97/OQnP8Fjjz2Gu+66qxuXRCj45DEA3HzzzZl3+P7770/3UR4TUwKfhaxZs4Zv2rQp/R/HMV+2bBm/9957u5gqogx33303X716tXbf0aNHeU9PD3/qqafSbX/5y184AD4yMjJNKSTKAoA//fTT6X/GGB8aGuIPPPBAuu3o0aO8t7eXP/7445xzznfv3s0B8D/+8Y9pmN/85jc8CAL+7rvvTlvaCT/UPOac8xtvvJFfffXVxmMoj2cPhw8f5gD4iy++yDn3K5N//etf8zAM+ejoaBrmhz/8IZ8/fz6fnJyc3gsgnKh5zDnnn/3sZ/ntt99uPIbymJgKZp2HpFarYdeuXVi3bl26LQxDrFu3DiMjI11MGVGWd955B8uWLcPKlStxww034MCBAwCAXbt2oV6vZ/L6wgsvxLnnnkt5PQvZv38/RkdHM/m5YMECDA8Pp/k5MjKCwcFBXHbZZWmYdevWIQxD7Ny5c9rTTJRjx44dWLx4MS644ALcdtttOHLkSLqP8nj2cOzYMQDAwoULAfiVySMjI7j44ouxZMmSNMz69etx/PhxvPXWW9OYesIHNY8FP/3pT7Fo0SJcdNFF2Lp1K06dOpXuozwmpoKurdRelvfeew9xHGdeBABYsmQJ3n777S6liijL8PAwHnvsMVxwwQU4ePAg7rnnHnzmM5/Bm2++idHRUVSrVQwODmaOWbJkCUZHR7uTYKI0Is90767YNzo6isWLF2f2VyoVLFy4kPJ8lrBhwwZce+21WLFiBfbt24fvfOc72LhxI0ZGRhBFEeXxLIExhq9//ev49Kc/jYsuuggAvMrk0dFR7Tsu9hEzB10eA8CXv/xlLF++HMuWLcMbb7yBb33rW9izZw9+/vOfA6A8JqaGWSdIiLnFxo0b09+XXHIJhoeHsXz5cjz55JPo7+/vYsoIgijDl770pfT3xRdfjEsuuQTnnXceduzYgSuuuKKLKSOKsGnTJrz55puZMX3E3MKUx/J4rosvvhhLly7FFVdcgX379uG8886b7mQSpwmzrsvWokWLEEVRblaPQ4cOYWhoqEupIjrF4OAgPvrRj2Lv3r0YGhpCrVbD0aNHM2Eor2cnIs9s7+7Q0FBucopGo4H333+f8nyWsnLlSixatAh79+4FQHk8G9i8eTOeffZZvPDCC/jQhz6Ubvcpk4eGhrTvuNhHzAxMeaxjeHgYADLvMOUx0WlmnSCpVqu49NJLsX379nQbYwzbt2/H2rVru5gyohOMjY1h3759WLp0KS699FL09PRk8nrPnj04cOAA5fUsZMWKFRgaGsrk5/Hjx7Fz5840P9euXYujR49i165daZjnn38ejLG0UiRmF//4xz9w5MgRLF26FADl8UyGc47Nmzfj6aefxvPPP48VK1Zk9vuUyWvXrsWf//znjOjctm0b5s+fj1WrVk3PhRBGXHms4/XXXweAzDtMeUx0nG6Pqi/Dz372M97b28sfe+wxvnv3bn7LLbfwwcHBzIwPxOzgjjvu4Dt27OD79+/nv/vd7/i6dev4okWL+OHDhznnnN9666383HPP5c8//zx/9dVX+dq1a/natWu7nGrCxIkTJ/hrr73GX3vtNQ6AP/jgg/y1117jf/vb3zjnnN933318cHCQP/PMM/yNN97gV199NV+xYgUfHx9P49iwYQP/xCc+wXfu3Mlffvllfv755/Prr7++W5dEKNjy+MSJE/yb3/wmHxkZ4fv37+e//e1v+Sc/+Ul+/vnn84mJiTQOyuOZyW233cYXLFjAd+zYwQ8ePJh+Tp06lYZxlcmNRoNfdNFF/Morr+Svv/46f+655/hZZ53Ft27d2o1LIhRcebx3717+/e9/n7/66qt8//79/JlnnuErV67kl19+eRoH5TExFcxKQcI55w8//DA/99xzebVa5WvWrOGvvPJKt5NElOC6667jS5cu5dVqlZ999tn8uuuu43v37k33j4+P86997Wv8Ax/4AB8YGOBf+MIX+MGDB7uYYsLGCy+8wAHkPjfeeCPnPJn697vf/S5fsmQJ7+3t5VdccQXfs2dPJo4jR47w66+/ns+bN4/Pnz+f33TTTfzEiRNduBpChy2PT506xa+88kp+1lln8Z6eHr58+XJ+88035xqLKI9nJrp8BcB//OMfp2F8yuS//vWvfOPGjby/v58vWrSI33HHHbxer0/z1RA6XHl84MABfvnll/OFCxfy3t5e/pGPfITfeeed/NixY5l4KI+JThNwzvn0+WMIgiAIgiAIgiBazLoxJARBEARBEARBzB1IkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TVIkBAEQRAEQRAE0TX+P3fBbAqOxeoCAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import netCDF4 as nc\n", + "from matplotlib.animation import FuncAnimation\n", + "\n", + "data = nc.Dataset('/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc')\n", + "vgos = data['vgos']\n", + "ugos = data['ugos']\n", + "print(vgos.shape, ugos.shape)\n", + "\n", + "time_step = 500 \n", + "\n", + "data_slice = ugos[time_step, :, :]\n", + "\n", + "plt.figure(figsize=(10, 8))\n", + "\n", + "im = plt.imshow(data_slice, origin='lower', cmap='RdBu_r') \n", + "plt.colorbar(im, label='m/s')\n", + "plt.title(f\"Surface Geostrophic Velocity (ugos) - Time Step {time_step}\")\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7983ba78-f67e-4f10-8f71-101cb3d90d0e", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd3ece8d-eddd-4a48-85d9-c78617ba9e62", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "979c7023-048a-4561-83a5-237f1ec10e1c", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "485cc7f9-ed2b-4d12-94b2-e51567aa6715", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.19" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/inference_Kuro_simvp.py b/Exp3_Kuroshio_forecasting/inference_Kuro_simvp.py new file mode 100644 index 0000000000000000000000000000000000000000..8dde3d158dd29d8c8cbdc21a55334c8795e72834 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_Kuro_simvp.py @@ -0,0 +1,171 @@ +import os +import random +import torch +import numpy as np +import torch.nn as nn +import torch.optim as optim +import torch.utils.data as data_utils +import torch.distributed as dist +import netCDF4 as nc +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from model_baseline.simvp import * +from model_baseline.kno_2d import * +from model_baseline.U_net import * +from torch.optim.lr_scheduler import CosineAnnealingLR +import torch.distributed as dist +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from torch.optim.lr_scheduler import CosineAnnealingLR + +# Setup logging +backbone = 'Kuro_Simvp_exp1_20250224' +logging.basicConfig(filename=f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/logs/{backbone}_training_log.log', + level=logging.INFO, + format='%(asctime)s %(message)s') + +# Set a specific seed +seed = 42 +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False +set_seed(seed) + +# ========================== Distributed Training Setup ========================== +dist.init_process_group(backend='nccl') +local_rank = int(os.environ['LOCAL_RANK']) +torch.cuda.set_device(local_rank) +device = torch.device("cuda", local_rank) +num_gpus = torch.cuda.device_count() + +def reduce_mean(tensor, nprocs): + rt = tensor.clone() + dist.all_reduce(rt, op=dist.ReduceOp.SUM) + rt /= nprocs + return rt + +# ============================== Data Loading ============================== +from dataloader_api.dataloader_kuroshio_256 import * + +config = { + 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc', + 'input_steps': 10, + 'output_steps': 10, + 'batch_size': 2, + 'val_batch_size': 2, + 'num_workers': 4, + 'seed': 42 +} + +train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config) + +for sample_input, sample_target in train_loader: + print(sample_input.shape, sample_target.shape) + print(f"Input data range: [{sample_input.min():.2f}, {sample_input.max():.2f}]") + print(f"NaN value existence: {torch.isnan(sample_input).any().item()}") + print(f"Inf value existence: {torch.isinf(sample_input).any().item()}") + print("mean, std", data_mean, data_std) + break + +# ============================== Model Setup ============================== +model = SimVP(shape_in=(10, 2, 256, 256), hid_S=64, hid_T=128, output_dim = 2) +model = model.to(device) +model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank], find_unused_parameters=False) + +# ============================== Criterion and Optimizer ============================== +criterion = nn.MSELoss() +optimizer = optim.Adam(model.parameters(), lr=1e-2) + +num_epochs = 2000 +scheduler = CosineAnnealingLR(optimizer, T_max=200, eta_min=0) + +# ============================== Training, Validation, and Testing Functions ============================== +def train(model, train_loader, criterion, optimizer, device): + model.train() + train_loss = 0.0 + for inputs, targets in tqdm(train_loader, desc="Training", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + optimizer.zero_grad() + outputs = model(inputs) + loss= criterion(outputs, targets) + loss.backward() + optimizer.step() + train_loss += loss.item() * inputs.size(0) + return train_loss / len(train_loader.dataset) + +def validate(model, val_loader, criterion, device): + model.eval() + val_loss = 0.0 + with torch.no_grad(): + for inputs, targets in tqdm(val_loader, desc="Validation", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + loss = criterion(outputs, targets) + val_loss += loss.item() * inputs.size(0) + return val_loss / len(val_loader.dataset) + +def test(model, test_loader, criterion, device): + path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results' + model.eval() + test_loss = 0.0 + all_inputs = [] + all_targets = [] + all_outputs = [] + + with torch.no_grad(): + for inputs, targets in tqdm(test_loader, desc="Testing", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + + # Collect results + all_inputs.append(inputs.cpu().numpy()) + all_targets.append(targets.cpu().numpy()) + all_outputs.append(outputs.cpu().numpy()) + + loss = criterion(outputs, targets) + test_loss += loss.item() * inputs.size(0) + + all_inputs = np.concatenate(all_inputs, axis=0) + all_targets = np.concatenate(all_targets, axis=0) + all_outputs = np.concatenate(all_outputs, axis=0) + + if local_rank == 0: + np.save(f'{path}/{backbone}_inputs.npy', all_inputs) + np.save(f'{path}/{backbone}_targets.npy', all_targets) + np.save(f'{path}/{backbone}_outputs.npy', all_outputs) + + return test_loss / len(test_loader.dataset) +# ============================== Main Training Loop ============================== +best_val_loss = float('inf') +best_model_path = f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth' + +if local_rank == 0 and os.path.exists(best_model_path): + try: + logging.info('Loading best model from checkpoint.') + checkpoint = torch.load(best_model_path, map_location=device) + model.load_state_dict(checkpoint) + except Exception as e: + logging.error(f'Error loading model checkpoint: {e}') + +print("\n========== Inference starting ==========") +if local_rank == 0: + try: + model.load_state_dict(torch.load(best_model_path)) + test_loss = test(model, test_loader, criterion, device) + logging.info("Testing completed and best model saved.") + except Exception as e: + logging.error(f'Error loading model checkpoint during testing: {e}') + +inference_path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results' +dist.destroy_process_group() +print("\n✅ Inference process completed! All results have been saved to:", inference_path) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/inference_Kuro_triton.py b/Exp3_Kuroshio_forecasting/inference_Kuro_triton.py new file mode 100644 index 0000000000000000000000000000000000000000..f574620795a675a259ae457176310cd15bba027d --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_Kuro_triton.py @@ -0,0 +1,178 @@ +import os +import random +import torch +import numpy as np +import torch.nn as nn +import torch.optim as optim +import torch.utils.data as data_utils +import torch.distributed as dist +import netCDF4 as nc +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from model_baseline.simvp import * +from model_baseline.kno_2d import * +from model_baseline.U_net import * +from model.Triton_model import * +from torch.optim.lr_scheduler import CosineAnnealingLR +import torch.distributed as dist +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from torch.optim.lr_scheduler import CosineAnnealingLR + +# Setup logging +backbone = 'Kuro_Triton_exp1_20250224' +logging.basicConfig(filename=f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/logs/{backbone}_training_log.log', + level=logging.INFO, + format='%(asctime)s %(message)s') + +# Set a specific seed +seed = 42 +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False +set_seed(seed) + +# ========================== Distributed Training Setup ========================== +dist.init_process_group(backend='nccl') +local_rank = int(os.environ['LOCAL_RANK']) +torch.cuda.set_device(local_rank) +device = torch.device("cuda", local_rank) +num_gpus = torch.cuda.device_count() + +def reduce_mean(tensor, nprocs): + rt = tensor.clone() + dist.all_reduce(rt, op=dist.ReduceOp.SUM) + rt /= nprocs + return rt + +# ============================== Data Loading ============================== +from dataloader_api.dataloader_kuroshio_256 import * + +config = { + 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc', + 'input_steps': 10, + 'output_steps': 10, + 'batch_size': 2, + 'val_batch_size': 2, + 'num_workers': 4, + 'seed': 42 +} + +train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config) + +for sample_input, sample_target in train_loader: + print(sample_input.shape, sample_target.shape) + print(f"Input data range: [{sample_input.min():.2f}, {sample_input.max():.2f}]") + print(f"NaN value existence: {torch.isnan(sample_input).any().item()}") + print(f"Inf value existence: {torch.isinf(sample_input).any().item()}") + print("mean, std", data_mean, data_std) + break + +# ============================== Model Setup ============================== +model = Triton( + shape_in=(10, 2, 256, 256), + spatial_hidden_dim=256, + output_channels=2, + temporal_hidden_dim=512, + num_spatial_layers=4, + num_temporal_layers=8) +model = model.to(device) +model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank], find_unused_parameters=False) + +# ============================== Criterion and Optimizer ============================== +criterion = nn.MSELoss() +optimizer = optim.Adam(model.parameters(), lr=1e-2) + +num_epochs = 2000 +scheduler = CosineAnnealingLR(optimizer, T_max=200, eta_min=0) + +# ============================== Training, Validation, and Testing Functions ============================== +def train(model, train_loader, criterion, optimizer, device): + model.train() + train_loss = 0.0 + for inputs, targets in tqdm(train_loader, desc="Training", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + optimizer.zero_grad() + outputs = model(inputs) + loss= criterion(outputs, targets) + loss.backward() + optimizer.step() + train_loss += loss.item() * inputs.size(0) + return train_loss / len(train_loader.dataset) + +def validate(model, val_loader, criterion, device): + model.eval() + val_loss = 0.0 + with torch.no_grad(): + for inputs, targets in tqdm(val_loader, desc="Validation", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + loss = criterion(outputs, targets) + val_loss += loss.item() * inputs.size(0) + return val_loss / len(val_loader.dataset) + +def test(model, test_loader, criterion, device): + path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results' + model.eval() + test_loss = 0.0 + all_inputs = [] + all_targets = [] + all_outputs = [] + + with torch.no_grad(): + for inputs, targets in tqdm(test_loader, desc="Testing", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + + # Collect results + all_inputs.append(inputs.cpu().numpy()) + all_targets.append(targets.cpu().numpy()) + all_outputs.append(outputs.cpu().numpy()) + + loss = criterion(outputs, targets) + test_loss += loss.item() * inputs.size(0) + + all_inputs = np.concatenate(all_inputs, axis=0) + all_targets = np.concatenate(all_targets, axis=0) + all_outputs = np.concatenate(all_outputs, axis=0) + + if local_rank == 0: + np.save(f'{path}/{backbone}_inputs.npy', all_inputs) + np.save(f'{path}/{backbone}_targets.npy', all_targets) + np.save(f'{path}/{backbone}_outputs.npy', all_outputs) + + return test_loss / len(test_loader.dataset) +# ============================== Main Training Loop ============================== +best_val_loss = float('inf') +best_model_path = f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth' + +if local_rank == 0 and os.path.exists(best_model_path): + try: + logging.info('Loading best model from checkpoint.') + checkpoint = torch.load(best_model_path, map_location=device) + model.load_state_dict(checkpoint) + except Exception as e: + logging.error(f'Error loading model checkpoint: {e}') + +print("\n========== Inference starting ==========") +if local_rank == 0: + try: + model.load_state_dict(torch.load(best_model_path)) + test_loss = test(model, test_loader, criterion, device) + logging.info("Testing completed and best model saved.") + except Exception as e: + logging.error(f'Error loading model checkpoint during testing: {e}') + +inference_path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results' +dist.destroy_process_group() +print("\n✅ Inference process completed! All results have been saved to:", inference_path) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/Inference_Simvp.ipynb b/Exp3_Kuroshio_forecasting/inference_results_open/Inference_Simvp.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..2c8de68129563a21a2fe5435b55ef4324c21e848 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_results_open/Inference_Simvp.ipynb @@ -0,0 +1,1474 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "fd41a2a1-b0a7-4c2d-8526-f02f2caaade5", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/cuda/__init__.py:129: UserWarning: CUDA initialization: The NVIDIA driver on your system is too old (found version 11000). Please update your GPU driver by downloading and installing a new version from the URL: http://www.nvidia.com/Download/index.aspx Alternatively, go to: https://pytorch.org to install a PyTorch version that has been compiled with your version of the CUDA driver. (Triggered internally at /pytorch/c10/cuda/CUDAFunctions.cpp:109.)\n", + " return torch._C._cuda_getDeviceCount() > 0\n", + "2025-03-24 19:11:19,981 [INFO] 模型加载成功: Kuro_Simvp_exp_128_20250324_best_model.pth\n", + "2025-03-24 19:11:20,029 [INFO] 共生成 73 个初始日期,示例:['2021-01-01', '2021-01-06', '2021-01-11', '2021-01-16', '2021-01-21']...\n", + "/tmp/ipykernel_58400/3478194085.py:107: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead.\n", + " with torch.no_grad(), torch.cuda.amp.autocast():\n", + "/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/amp/autocast_mode.py:266: UserWarning: User provided device_type of 'cuda', but CUDA is not available. Disabling\n", + " warnings.warn(\n", + "\n", + "\u001b[A度: 0%| | 0/12 [00:00= len(self.time_stamps):\n", + " raise ValueError(f\"Prediction exceeds data range, the data cutoff date is: {self.time_stamps[-1].strftime('%Y-%m-%d')}\")\n", + "\n", + " initial_dates = self.time_stamps[end_idx-9 : end_idx+1] \n", + " label_dates = self.time_stamps[end_idx+1 : end_idx+1+pred_days]\n", + "\n", + " def load_var(var_name, start, end):\n", + " data = self.ds[var_name].isel(time=slice(start, end)).values\n", + " return torch.FloatTensor(np.nan_to_num(data, nan=0.0))\n", + "\n", + " ugos_init = load_var('ugos', end_idx-9, end_idx+1)\n", + " vgos_init = load_var('vgos', end_idx-9, end_idx+1)\n", + " initial = torch.stack([ugos_init, vgos_init], dim=1).unsqueeze(0).to(device)\n", + "\n", + " ugos_label = load_var('ugos', end_idx+1, end_idx+1+pred_days)\n", + " vgos_label = load_var('vgos', end_idx+1, end_idx+1+pred_days)\n", + " label = torch.stack([ugos_label, vgos_label], dim=1).unsqueeze(0).to(device)\n", + "\n", + " return initial, label, initial_dates, label_dates\n", + "\n", + "# ============================== Inference engine ==============================\n", + "def predict_single(model, initial_input, pred_days):\n", + " model.eval()\n", + " predictions = []\n", + " current_input = initial_input.clone()\n", + " \n", + " with torch.no_grad(), torch.cuda.amp.autocast():\n", + " total_steps = (pred_days + 9) // 10\n", + " for _ in tqdm(range(total_steps), desc=f\"Prediction progress.\", leave=False):\n", + " output = model(current_input)\n", + " predictions.append(output.cpu())\n", + " current_input = output[:, -10:]\n", + " \n", + " return torch.cat(predictions, dim=1)[:, :pred_days].to(device)\n", + "\n", + "# ============================== Batch processing. ==============================\n", + "def process_batch(model, data_loader, target_dates, pred_days, save_dir):\n", + " os.makedirs(save_dir, exist_ok=True)\n", + " success_count = 0\n", + " \n", + " for date_str in tqdm(target_dates, desc=\"Process initial conditions.\"):\n", + " try:\n", + " initial, label, init_dates, label_dates = data_loader.load_single_case(date_str, pred_days)\n", + " initial = initial[..., ::2, ::2] \n", + " label = label[..., ::2, ::2]\n", + " \n", + " prediction = predict_single(model, initial, pred_days)\n", + " \n", + " save_path = os.path.join(save_dir, f\"forecast_{date_str.replace('-','')}.h5\")\n", + " save_results(\n", + " initial.cpu(), \n", + " label.cpu(), \n", + " prediction.cpu(),\n", + " init_dates, \n", + " label_dates,\n", + " save_path\n", + " )\n", + " success_count += 1\n", + " \n", + " del initial, label, prediction\n", + " torch.cuda.empty_cache()\n", + " \n", + " except Exception as e:\n", + " logging.error(f\"Process {date_str} failed: {str(e)}\")\n", + " continue\n", + " \n", + " logging.info(f\"Processing complete, successfully processed {success_count}/{len(target_dates)} initial conditions\")\n", + "\n", + "# ============================== Results saved ==============================\n", + "def save_results(initial, label, prediction, init_dates, label_dates, save_path):\n", + " with h5py.File(save_path, 'w') as f:\n", + " f.create_dataset('initial', data=initial.numpy())\n", + " f.create_dataset('label', data=label.numpy())\n", + " f.create_dataset('prediction', data=prediction.numpy())\n", + " \n", + " def save_dates(dataset_name, dates):\n", + " str_dates = [d.strftime(\"%Y-%m-%d\") for d in dates]\n", + " dt = h5py.string_dtype(encoding='utf-8')\n", + " f.create_dataset(dataset_name, data=np.array(str_dates, dtype=dt))\n", + " \n", + " save_dates('initial_dates', init_dates)\n", + " save_dates('label_dates', label_dates)\n", + " \n", + " f.attrs['input_end_date'] = init_dates[-1].strftime(\"%Y-%m-%d\")\n", + " f.attrs['pred_start_date'] = label_dates[0].strftime(\"%Y-%m-%d\")\n", + " f.attrs['pred_end_date'] = label_dates[-1].strftime(\"%Y-%m-%d\")\n", + "\n", + "def visualize_enhanced(h5_path, step=0, save_fig=True):\n", + " with h5py.File(h5_path, 'r') as f:\n", + " initial = f['initial'][0]\n", + " label = f['label'][0]\n", + " prediction = f['prediction'][0]\n", + " init_dates = [d.decode() for d in f['initial_dates'][:]]\n", + " label_dates = [d.decode() for d in f['label_dates'][:]]\n", + " \n", + " input_end_date = init_dates[-1]\n", + " pred_date = label_dates[min(step, len(label_dates)-1)]\n", + " \n", + " def get_speed(data, step):\n", + " return np.sqrt(data[step,0]**2 + data[step,1]**2)\n", + " \n", + " fig, axes = plt.subplots(1, 3, figsize=(24, 6))\n", + " fig.suptitle(f\"Comparison of Ocean Surface Current Speed\\nInput End Date: {input_end_date} → Prediction Date: {pred_date}\", \n", + " y=1.05, fontsize=14, fontweight='bold')\n", + " \n", + " plot_kwargs = {\n", + " 'cmap': 'jet',\n", + " 'extent': [123.1, 154.9, 10.06, 41.94], \n", + " 'origin': 'lower',\n", + " 'vmin': 0,\n", + " 'vmax': max(np.nanmax(label), np.nanmax(prediction))\n", + " }\n", + " \n", + " speed_initial = get_speed(initial, -1)\n", + " im0 = axes[0].imshow(speed_initial, **plot_kwargs)\n", + " axes[0].set_title(f\"Initial Field Last Day\\n{init_dates[-1]}\", fontsize=12)\n", + " axes[0].set_xlabel('Longitude', fontsize=10)\n", + " axes[0].set_ylabel('Latitude', fontsize=10)\n", + "\n", + " \n", + " speed_label = get_speed(label, step)\n", + " im1 = axes[1].imshow(speed_label, **plot_kwargs)\n", + " axes[1].set_title(f\"True Values\\n{pred_date}\", fontsize=12)\n", + " axes[1].set_xlabel('Longitude', fontsize=10)\n", + "\n", + " speed_pred = get_speed(prediction, step)\n", + " im2 = axes[2].imshow(speed_pred, **plot_kwargs)\n", + " axes[2].set_title(f\"Predicted Values\\n{pred_date}\", fontsize=12)\n", + " axes[2].set_xlabel('Longitude', fontsize=10)\n", + "\n", + " cbar = fig.colorbar(im1, ax=axes, orientation='vertical', shrink=0.8, pad=0.03)\n", + " cbar.set_label('Current Speed (m/s)', fontsize=10)\n", + "\n", + " plt.tight_layout()\n", + "\n", + " if save_fig:\n", + " fig_name = f\"forecast_{input_end_date}_day{step+1}.png\"\n", + " plt.savefig(fig_name, dpi=300, bbox_inches='tight')\n", + " plt.close()\n", + " else:\n", + " plt.show()\n", + "\n", + "\n", + "# ============================== Main ==============================\n", + "if __name__ == \"__main__\":\n", + " backbone = 'Kuro_Simvp_exp_128_20250324'\n", + " config = {\n", + " 'model_path': f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth',\n", + " 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc',\n", + " 'date_range': { \n", + " 'start': '2021-01-01',\n", + " 'end': '2021-12-31',\n", + " 'interval': 5 \n", + " },\n", + " 'pred_days': 120,\n", + " 'save_dir':f'./{backbone}_forecast_results'\n", + " }\n", + "\n", + " try:\n", + " model = load_single_model(config['model_path'])\n", + " data_loader = OceanDataLoader(config['data_path'])\n", + " \n", + " target_dates = data_loader.generate_target_dates(\n", + " start_date=config['date_range']['start'],\n", + " end_date=config['date_range']['end'],\n", + " interval_days=config['date_range']['interval']\n", + " )\n", + " logging.info(f\"Generated {len(target_dates)} initial dates, example: {target_dates[:5]}...\")\n", + " \n", + " process_batch(\n", + " model, \n", + " data_loader,\n", + " target_dates,\n", + " config['pred_days'],\n", + " config['save_dir']\n", + " )\n", + " \n", + " sample_dates = [target_dates[0], target_dates[-1]]\n", + " for date in sample_dates:\n", + " h5_file = os.path.join(config['save_dir'], f\"forecast_{date.replace('-','')}.h5\")\n", + " for step in [0, 60, 119]: \n", + " visualize_enhanced(h5_file, step=step)\n", + " \n", + " except Exception as e:\n", + " logging.error(f\"Main process error: {str(e)}\")\n", + " raise" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9105d1dd-49b8-4e91-9b41-23406f05992a", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c5eaa58f-6e63-4876-a558-dbe5d085ed6f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "322a4033-f4fa-495c-b095-f39099d18811", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8346e2d-5eb7-4a04-bb6f-4a0831016f40", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/Inference_Triton.ipynb b/Exp3_Kuroshio_forecasting/inference_results_open/Inference_Triton.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..6ccbc2e66a724ddcb34a011974c6e53fc5bdbbda --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_results_open/Inference_Triton.ipynb @@ -0,0 +1,324 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "9105d1dd-49b8-4e91-9b41-23406f05992a", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import torch\n", + "import logging\n", + "import numpy as np\n", + "import h5py\n", + "import matplotlib.pyplot as plt\n", + "from datetime import datetime, timedelta\n", + "from tqdm import tqdm\n", + "import xarray as xr\n", + "from model.Triton_model import Triton\n", + "\n", + "\n", + "# ============================== Initialization Configuration ==============================\n", + "SEED = 42\n", + "torch.manual_seed(SEED)\n", + "np.random.seed(SEED)\n", + "logging.basicConfig(level=logging.INFO,\n", + " format='%(asctime)s [%(levelname)s] %(message)s')\n", + "\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", + "\n", + "\n", + "\n", + "def load_single_model(model_path):\n", + " \"\"\"Load the best inference model\"\"\"\n", + " model = Triton(\n", + " shape_in=(10, 2, 128, 128),\n", + " spatial_hidden_dim=256,\n", + " output_channels=2,\n", + " temporal_hidden_dim=512,\n", + " num_spatial_layers=4,\n", + " num_temporal_layers=8).to(device)\n", + " \n", + " if os.path.exists(model_path):\n", + " checkpoint = torch.load(model_path, map_location=device)\n", + " if any(k.startswith('module.') for k in checkpoint.keys()):\n", + " checkpoint = {k.replace('module.', ''): v for k, v in checkpoint.items()}\n", + " model.load_state_dict(checkpoint)\n", + " logging.info(f\"Model loaded successfully.: {os.path.basename(model_path)}\")\n", + " else:\n", + " raise FileNotFoundError(f\"Model file not found.: {model_path}\")\n", + " return model\n", + "\n", + "# ============================== Dataloader ==============================\n", + "class OceanDataLoader:\n", + " def __init__(self, nc_path):\n", + " self.ds = xr.open_dataset(nc_path)\n", + " self.time_stamps = self.ds.time.values.astype('datetime64[s]').astype(datetime)\n", + " \n", + " def generate_target_dates(self, start_date, end_date, interval_days=1):\n", + " \"\"\"Generate a continuous initial date sequence.\"\"\"\n", + " all_dates = []\n", + " current_date = datetime.strptime(start_date, \"%Y-%m-%d\")\n", + " end_date = datetime.strptime(end_date, \"%Y-%m-%d\")\n", + " \n", + " while current_date <= end_date:\n", + " if current_date in self.time_stamps:\n", + " all_dates.append(current_date.strftime(\"%Y-%m-%d\"))\n", + " current_date += timedelta(days=interval_days)\n", + " \n", + " if not all_dates:\n", + " raise ValueError(\"No valid dates found. Please check the date range and data files.\")\n", + " return all_dates\n", + " \n", + " def load_single_case(self, target_date, pred_days):\n", + " \"\"\"\n", + " Load a single initial condition \n", + " param target_date: The last date of the input sequence (format: 'YYYY-MM-%d') \n", + " param pred_days: The number of days to predict \n", + " return: (initial data, true labels, initial timestamp, label timestamp) \n", + " \"\"\"\n", + " try:\n", + " target_dt = datetime.strptime(target_date, \"%Y-%m-%d\")\n", + " end_idx = np.where(self.time_stamps == target_dt)[0][0]\n", + " except IndexError:\n", + " available_dates = [d.strftime(\"%Y-%m-%d\") for d in self.time_stamps[-10:]]\n", + " raise ValueError(f\"Invalid date {target_date}, the last 10 available dates: {available_dates}\")\n", + "\n", + " if end_idx < 9:\n", + " raise ValueError(f\"At least 9 days of data are required, the earliest available date: {self.time_stamps[0].strftime('%Y-%m-%d')}\")\n", + " if end_idx + pred_days >= len(self.time_stamps):\n", + " raise ValueError(f\"Prediction exceeds data range, the data cutoff date is: {self.time_stamps[-1].strftime('%Y-%m-%d')}\")\n", + "\n", + " initial_dates = self.time_stamps[end_idx-9 : end_idx+1] \n", + " label_dates = self.time_stamps[end_idx+1 : end_idx+1+pred_days]\n", + "\n", + " def load_var(var_name, start, end):\n", + " data = self.ds[var_name].isel(time=slice(start, end)).values\n", + " return torch.FloatTensor(np.nan_to_num(data, nan=0.0))\n", + "\n", + " ugos_init = load_var('ugos', end_idx-9, end_idx+1)\n", + " vgos_init = load_var('vgos', end_idx-9, end_idx+1)\n", + " initial = torch.stack([ugos_init, vgos_init], dim=1).unsqueeze(0).to(device)\n", + "\n", + " ugos_label = load_var('ugos', end_idx+1, end_idx+1+pred_days)\n", + " vgos_label = load_var('vgos', end_idx+1, end_idx+1+pred_days)\n", + " label = torch.stack([ugos_label, vgos_label], dim=1).unsqueeze(0).to(device)\n", + "\n", + " return initial, label, initial_dates, label_dates\n", + "\n", + "# ============================== Inference engine ==============================\n", + "def predict_single(model, initial_input, pred_days):\n", + " model.eval()\n", + " predictions = []\n", + " current_input = initial_input.clone()\n", + " \n", + " with torch.no_grad(), torch.cuda.amp.autocast():\n", + " total_steps = (pred_days + 9) // 10\n", + " for _ in tqdm(range(total_steps), desc=f\"Prediction progress.\", leave=False):\n", + " output = model(current_input)\n", + " predictions.append(output.cpu())\n", + " current_input = output[:, -10:]\n", + " \n", + " return torch.cat(predictions, dim=1)[:, :pred_days].to(device)\n", + "\n", + "# ============================== Batch processing. ==============================\n", + "def process_batch(model, data_loader, target_dates, pred_days, save_dir):\n", + " os.makedirs(save_dir, exist_ok=True)\n", + " success_count = 0\n", + " \n", + " for date_str in tqdm(target_dates, desc=\"Process initial conditions.\"):\n", + " try:\n", + " initial, label, init_dates, label_dates = data_loader.load_single_case(date_str, pred_days)\n", + " initial = initial[..., ::2, ::2] \n", + " label = label[..., ::2, ::2]\n", + " \n", + " prediction = predict_single(model, initial, pred_days)\n", + " \n", + " save_path = os.path.join(save_dir, f\"forecast_{date_str.replace('-','')}.h5\")\n", + " save_results(\n", + " initial.cpu(), \n", + " label.cpu(), \n", + " prediction.cpu(),\n", + " init_dates, \n", + " label_dates,\n", + " save_path\n", + " )\n", + " success_count += 1\n", + " \n", + " del initial, label, prediction\n", + " torch.cuda.empty_cache()\n", + " \n", + " except Exception as e:\n", + " logging.error(f\"Process {date_str} failed: {str(e)}\")\n", + " continue\n", + " \n", + " logging.info(f\"Processing complete, successfully processed {success_count}/{len(target_dates)} initial conditions\")\n", + "\n", + "# ============================== Results saved ==============================\n", + "def save_results(initial, label, prediction, init_dates, label_dates, save_path):\n", + " with h5py.File(save_path, 'w') as f:\n", + " f.create_dataset('initial', data=initial.numpy())\n", + " f.create_dataset('label', data=label.numpy())\n", + " f.create_dataset('prediction', data=prediction.numpy())\n", + " \n", + " def save_dates(dataset_name, dates):\n", + " str_dates = [d.strftime(\"%Y-%m-%d\") for d in dates]\n", + " dt = h5py.string_dtype(encoding='utf-8')\n", + " f.create_dataset(dataset_name, data=np.array(str_dates, dtype=dt))\n", + " \n", + " save_dates('initial_dates', init_dates)\n", + " save_dates('label_dates', label_dates)\n", + " \n", + " f.attrs['input_end_date'] = init_dates[-1].strftime(\"%Y-%m-%d\")\n", + " f.attrs['pred_start_date'] = label_dates[0].strftime(\"%Y-%m-%d\")\n", + " f.attrs['pred_end_date'] = label_dates[-1].strftime(\"%Y-%m-%d\")\n", + "\n", + "def visualize_enhanced(h5_path, step=0, save_fig=True):\n", + " with h5py.File(h5_path, 'r') as f:\n", + " initial = f['initial'][0]\n", + " label = f['label'][0]\n", + " prediction = f['prediction'][0]\n", + " init_dates = [d.decode() for d in f['initial_dates'][:]]\n", + " label_dates = [d.decode() for d in f['label_dates'][:]]\n", + " \n", + " input_end_date = init_dates[-1]\n", + " pred_date = label_dates[min(step, len(label_dates)-1)]\n", + " \n", + " def get_speed(data, step):\n", + " return np.sqrt(data[step,0]**2 + data[step,1]**2)\n", + " \n", + " fig, axes = plt.subplots(1, 3, figsize=(24, 6))\n", + " fig.suptitle(f\"Comparison of Ocean Surface Current Speed\\nInput End Date: {input_end_date} → Prediction Date: {pred_date}\", \n", + " y=1.05, fontsize=14, fontweight='bold')\n", + " \n", + " plot_kwargs = {\n", + " 'cmap': 'jet',\n", + " 'extent': [123.1, 154.9, 10.06, 41.94], \n", + " 'origin': 'lower',\n", + " 'vmin': 0,\n", + " 'vmax': max(np.nanmax(label), np.nanmax(prediction))\n", + " }\n", + " \n", + " speed_initial = get_speed(initial, -1)\n", + " im0 = axes[0].imshow(speed_initial, **plot_kwargs)\n", + " axes[0].set_title(f\"Initial Field Last Day\\n{init_dates[-1]}\", fontsize=12)\n", + " axes[0].set_xlabel('Longitude', fontsize=10)\n", + " axes[0].set_ylabel('Latitude', fontsize=10)\n", + "\n", + " \n", + " speed_label = get_speed(label, step)\n", + " im1 = axes[1].imshow(speed_label, **plot_kwargs)\n", + " axes[1].set_title(f\"True Values\\n{pred_date}\", fontsize=12)\n", + " axes[1].set_xlabel('Longitude', fontsize=10)\n", + "\n", + " speed_pred = get_speed(prediction, step)\n", + " im2 = axes[2].imshow(speed_pred, **plot_kwargs)\n", + " axes[2].set_title(f\"Predicted Values\\n{pred_date}\", fontsize=12)\n", + " axes[2].set_xlabel('Longitude', fontsize=10)\n", + "\n", + " cbar = fig.colorbar(im1, ax=axes, orientation='vertical', shrink=0.8, pad=0.03)\n", + " cbar.set_label('Current Speed (m/s)', fontsize=10)\n", + "\n", + " plt.tight_layout()\n", + "\n", + " if save_fig:\n", + " fig_name = f\"forecast_{input_end_date}_day{step+1}.png\"\n", + " plt.savefig(fig_name, dpi=300, bbox_inches='tight')\n", + " plt.close()\n", + " else:\n", + " plt.show()\n", + "\n", + "\n", + "# ============================== Main ==============================\n", + "if __name__ == \"__main__\":\n", + " backbone = 'Kuro_Triton_exp1_128_20250322'\n", + " config = {\n", + " 'model_path': f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth',\n", + " 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc',\n", + " 'date_range': { \n", + " 'start': '2021-01-01',\n", + " 'end': '2021-12-31',\n", + " 'interval': 5 \n", + " },\n", + " 'pred_days': 120,\n", + " 'save_dir':f'./{backbone}_forecast_results'\n", + " }\n", + "\n", + " try:\n", + " model = load_single_model(config['model_path'])\n", + " data_loader = OceanDataLoader(config['data_path'])\n", + " \n", + " target_dates = data_loader.generate_target_dates(\n", + " start_date=config['date_range']['start'],\n", + " end_date=config['date_range']['end'],\n", + " interval_days=config['date_range']['interval']\n", + " )\n", + " logging.info(f\"Generated {len(target_dates)} initial dates, example: {target_dates[:5]}...\")\n", + " \n", + " process_batch(\n", + " model, \n", + " data_loader,\n", + " target_dates,\n", + " config['pred_days'],\n", + " config['save_dir']\n", + " )\n", + " \n", + " sample_dates = [target_dates[0], target_dates[-1]]\n", + " for date in sample_dates:\n", + " h5_file = os.path.join(config['save_dir'], f\"forecast_{date.replace('-','')}.h5\")\n", + " for step in [0, 60, 119]: \n", + " visualize_enhanced(h5_file, step=step)\n", + " \n", + " except Exception as e:\n", + " logging.error(f\"Main process error: {str(e)}\")\n", + " raise" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c5eaa58f-6e63-4876-a558-dbe5d085ed6f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "322a4033-f4fa-495c-b095-f39099d18811", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8346e2d-5eb7-4a04-bb6f-4a0831016f40", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/Inference_UNet.ipynb b/Exp3_Kuroshio_forecasting/inference_results_open/Inference_UNet.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..69c2598ab193a0c8950398f77043543dc2e45697 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_results_open/Inference_UNet.ipynb @@ -0,0 +1,388 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "9105d1dd-49b8-4e91-9b41-23406f05992a", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/cuda/__init__.py:129: UserWarning: CUDA initialization: The NVIDIA driver on your system is too old (found version 11000). Please update your GPU driver by downloading and installing a new version from the URL: http://www.nvidia.com/Download/index.aspx Alternatively, go to: https://pytorch.org to install a PyTorch version that has been compiled with your version of the CUDA driver. (Triggered internally at /pytorch/c10/cuda/CUDAFunctions.cpp:109.)\n", + " return torch._C._cuda_getDeviceCount() > 0\n", + "2025-04-28 18:58:32,675 [INFO] Model loaded successfully.: Kuro_Unet_exp_128_20250324_best_model.pth\n", + "2025-04-28 18:58:32,801 [INFO] Generated 73 initial dates, example: ['2021-01-01', '2021-01-06', '2021-01-11', '2021-01-16', '2021-01-21']...\n", + "/tmp/ipykernel_52182/1335310592.py:101: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead.\n", + " with torch.no_grad(), torch.cuda.amp.autocast():\n", + "/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/amp/autocast_mode.py:266: UserWarning: User provided device_type of 'cuda', but CUDA is not available. Disabling\n", + " warnings.warn(\n", + "\n", + "\u001b[Adiction progress.: 0%| | 0/12 [00:00 244\u001b[0m \u001b[43mprocess_batch\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 245\u001b[0m \u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 246\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata_loader\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 247\u001b[0m \u001b[43m \u001b[49m\u001b[43mtarget_dates\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 248\u001b[0m \u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mpred_days\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 249\u001b[0m \u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43msave_dir\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\n\u001b[1;32m 250\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 252\u001b[0m sample_dates \u001b[38;5;241m=\u001b[39m [target_dates[\u001b[38;5;241m0\u001b[39m], target_dates[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]]\n\u001b[1;32m 253\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m date \u001b[38;5;129;01min\u001b[39;00m sample_dates:\n", + "Cell \u001b[0;32mIn[1], line 121\u001b[0m, in \u001b[0;36mprocess_batch\u001b[0;34m(model, data_loader, target_dates, pred_days, save_dir)\u001b[0m\n\u001b[1;32m 118\u001b[0m initial \u001b[38;5;241m=\u001b[39m initial[\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m, ::\u001b[38;5;241m2\u001b[39m, ::\u001b[38;5;241m2\u001b[39m] \n\u001b[1;32m 119\u001b[0m label \u001b[38;5;241m=\u001b[39m label[\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;241m.\u001b[39m, ::\u001b[38;5;241m2\u001b[39m, ::\u001b[38;5;241m2\u001b[39m]\n\u001b[0;32m--> 121\u001b[0m prediction \u001b[38;5;241m=\u001b[39m \u001b[43mpredict_single\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minitial\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpred_days\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 123\u001b[0m save_path \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(save_dir, \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mforecast_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdate_str\u001b[38;5;241m.\u001b[39mreplace(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m-\u001b[39m\u001b[38;5;124m'\u001b[39m,\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m'\u001b[39m)\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.h5\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 124\u001b[0m save_results(\n\u001b[1;32m 125\u001b[0m initial\u001b[38;5;241m.\u001b[39mcpu(), \n\u001b[1;32m 126\u001b[0m label\u001b[38;5;241m.\u001b[39mcpu(), \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 130\u001b[0m save_path\n\u001b[1;32m 131\u001b[0m )\n", + "Cell \u001b[0;32mIn[1], line 104\u001b[0m, in \u001b[0;36mpredict_single\u001b[0;34m(model, initial_input, pred_days)\u001b[0m\n\u001b[1;32m 102\u001b[0m total_steps \u001b[38;5;241m=\u001b[39m (pred_days \u001b[38;5;241m+\u001b[39m \u001b[38;5;241m9\u001b[39m) \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m10\u001b[39m\n\u001b[1;32m 103\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m _ \u001b[38;5;129;01min\u001b[39;00m tqdm(\u001b[38;5;28mrange\u001b[39m(total_steps), desc\u001b[38;5;241m=\u001b[39m\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPrediction progress.\u001b[39m\u001b[38;5;124m\"\u001b[39m, leave\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m):\n\u001b[0;32m--> 104\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcurrent_input\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 105\u001b[0m predictions\u001b[38;5;241m.\u001b[39mappend(output\u001b[38;5;241m.\u001b[39mcpu())\n\u001b[1;32m 106\u001b[0m current_input \u001b[38;5;241m=\u001b[39m output[:, \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m10\u001b[39m:]\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results_open/model/U_net.py:86\u001b[0m, in \u001b[0;36mUNet.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 84\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mup1(x4, x3)\n\u001b[1;32m 85\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mup2(x, x2)\n\u001b[0;32m---> 86\u001b[0m x \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mup3\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mx1\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 87\u001b[0m logits \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39moutc(x)\n\u001b[1;32m 89\u001b[0m \u001b[38;5;66;03m# 恢复原始维度(如果是5维输入)\u001b[39;00m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results_open/model/U_net.py:44\u001b[0m, in \u001b[0;36mUp.forward\u001b[0;34m(self, x1, x2)\u001b[0m\n\u001b[1;32m 41\u001b[0m x1 \u001b[38;5;241m=\u001b[39m nn\u001b[38;5;241m.\u001b[39mfunctional\u001b[38;5;241m.\u001b[39mpad(x1, [diffX \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m2\u001b[39m, diffX \u001b[38;5;241m-\u001b[39m diffX \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m2\u001b[39m,\n\u001b[1;32m 42\u001b[0m diffY \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m2\u001b[39m, diffY \u001b[38;5;241m-\u001b[39m diffY \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m2\u001b[39m])\n\u001b[1;32m 43\u001b[0m x \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mcat([x2, x1], dim\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m---> 44\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconv\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results_open/model/U_net.py:17\u001b[0m, in \u001b[0;36mDoubleConv.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 16\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[0;32m---> 17\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdouble_conv\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/container.py:250\u001b[0m, in \u001b[0;36mSequential.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 248\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m):\n\u001b[1;32m 249\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m:\n\u001b[0;32m--> 250\u001b[0m \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[43mmodule\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 251\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28minput\u001b[39m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1739\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1737\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1738\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1739\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/module.py:1750\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1745\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1746\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1747\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1748\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1749\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1750\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1752\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1753\u001b[0m called_always_called_hooks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mset\u001b[39m()\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/conv.py:554\u001b[0m, in \u001b[0;36mConv2d.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 553\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m--> 554\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_conv_forward\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/haowu/lib/python3.10/site-packages/torch/nn/modules/conv.py:549\u001b[0m, in \u001b[0;36mConv2d._conv_forward\u001b[0;34m(self, input, weight, bias)\u001b[0m\n\u001b[1;32m 537\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpadding_mode \u001b[38;5;241m!=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mzeros\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 538\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m F\u001b[38;5;241m.\u001b[39mconv2d(\n\u001b[1;32m 539\u001b[0m F\u001b[38;5;241m.\u001b[39mpad(\n\u001b[1;32m 540\u001b[0m \u001b[38;5;28minput\u001b[39m, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reversed_padding_repeated_twice, mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpadding_mode\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 547\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgroups,\n\u001b[1;32m 548\u001b[0m )\n\u001b[0;32m--> 549\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconv2d\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 550\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbias\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstride\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpadding\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdilation\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgroups\u001b[49m\n\u001b[1;32m 551\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "import os\n", + "import torch\n", + "import logging\n", + "import numpy as np\n", + "import h5py\n", + "import matplotlib.pyplot as plt\n", + "from datetime import datetime, timedelta\n", + "from tqdm import tqdm\n", + "import xarray as xr\n", + "from model.U_net import *\n", + "\n", + "\n", + "# ============================== Initialization Configuration ==============================\n", + "SEED = 42\n", + "torch.manual_seed(SEED)\n", + "np.random.seed(SEED)\n", + "logging.basicConfig(level=logging.INFO,\n", + " format='%(asctime)s [%(levelname)s] %(message)s')\n", + "\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", + "\n", + "\n", + "\n", + "def load_single_model(model_path):\n", + " \"\"\"Load the best inference model\"\"\"\n", + " model = UNet(n_channels=2, n_classes=2).to(device)\n", + " \n", + " if os.path.exists(model_path):\n", + " checkpoint = torch.load(model_path, map_location=device)\n", + " if any(k.startswith('module.') for k in checkpoint.keys()):\n", + " checkpoint = {k.replace('module.', ''): v for k, v in checkpoint.items()}\n", + " model.load_state_dict(checkpoint)\n", + " logging.info(f\"Model loaded successfully.: {os.path.basename(model_path)}\")\n", + " else:\n", + " raise FileNotFoundError(f\"Model file not found.: {model_path}\")\n", + " return model\n", + "\n", + "# ============================== Dataloader ==============================\n", + "class OceanDataLoader:\n", + " def __init__(self, nc_path):\n", + " self.ds = xr.open_dataset(nc_path)\n", + " self.time_stamps = self.ds.time.values.astype('datetime64[s]').astype(datetime)\n", + " \n", + " def generate_target_dates(self, start_date, end_date, interval_days=1):\n", + " \"\"\"Generate a continuous initial date sequence.\"\"\"\n", + " all_dates = []\n", + " current_date = datetime.strptime(start_date, \"%Y-%m-%d\")\n", + " end_date = datetime.strptime(end_date, \"%Y-%m-%d\")\n", + " \n", + " while current_date <= end_date:\n", + " if current_date in self.time_stamps:\n", + " all_dates.append(current_date.strftime(\"%Y-%m-%d\"))\n", + " current_date += timedelta(days=interval_days)\n", + " \n", + " if not all_dates:\n", + " raise ValueError(\"No valid dates found. Please check the date range and data files.\")\n", + " return all_dates\n", + " \n", + " def load_single_case(self, target_date, pred_days):\n", + " \"\"\"\n", + " Load a single initial condition \n", + " param target_date: The last date of the input sequence (format: 'YYYY-MM-%d') \n", + " param pred_days: The number of days to predict \n", + " return: (initial data, true labels, initial timestamp, label timestamp) \n", + " \"\"\"\n", + " try:\n", + " target_dt = datetime.strptime(target_date, \"%Y-%m-%d\")\n", + " end_idx = np.where(self.time_stamps == target_dt)[0][0]\n", + " except IndexError:\n", + " available_dates = [d.strftime(\"%Y-%m-%d\") for d in self.time_stamps[-10:]]\n", + " raise ValueError(f\"Invalid date {target_date}, the last 10 available dates: {available_dates}\")\n", + "\n", + " if end_idx < 9:\n", + " raise ValueError(f\"At least 9 days of data are required, the earliest available date: {self.time_stamps[0].strftime('%Y-%m-%d')}\")\n", + " if end_idx + pred_days >= len(self.time_stamps):\n", + " raise ValueError(f\"Prediction exceeds data range, the data cutoff date is: {self.time_stamps[-1].strftime('%Y-%m-%d')}\")\n", + "\n", + " initial_dates = self.time_stamps[end_idx-9 : end_idx+1] \n", + " label_dates = self.time_stamps[end_idx+1 : end_idx+1+pred_days]\n", + "\n", + " def load_var(var_name, start, end):\n", + " data = self.ds[var_name].isel(time=slice(start, end)).values\n", + " return torch.FloatTensor(np.nan_to_num(data, nan=0.0))\n", + "\n", + " ugos_init = load_var('ugos', end_idx-9, end_idx+1)\n", + " vgos_init = load_var('vgos', end_idx-9, end_idx+1)\n", + " initial = torch.stack([ugos_init, vgos_init], dim=1).unsqueeze(0).to(device)\n", + "\n", + " ugos_label = load_var('ugos', end_idx+1, end_idx+1+pred_days)\n", + " vgos_label = load_var('vgos', end_idx+1, end_idx+1+pred_days)\n", + " label = torch.stack([ugos_label, vgos_label], dim=1).unsqueeze(0).to(device)\n", + "\n", + " return initial, label, initial_dates, label_dates\n", + "\n", + "# ============================== Inference engine ==============================\n", + "def predict_single(model, initial_input, pred_days):\n", + " model.eval()\n", + " predictions = []\n", + " current_input = initial_input.clone()\n", + " \n", + " with torch.no_grad(), torch.cuda.amp.autocast():\n", + " total_steps = (pred_days + 9) // 10\n", + " for _ in tqdm(range(total_steps), desc=f\"Prediction progress.\", leave=False):\n", + " output = model(current_input)\n", + " predictions.append(output.cpu())\n", + " current_input = output[:, -10:]\n", + " \n", + " return torch.cat(predictions, dim=1)[:, :pred_days].to(device)\n", + "\n", + "# ============================== Batch processing. ==============================\n", + "def process_batch(model, data_loader, target_dates, pred_days, save_dir):\n", + " os.makedirs(save_dir, exist_ok=True)\n", + " success_count = 0\n", + " \n", + " for date_str in tqdm(target_dates, desc=\"Process initial conditions.\"):\n", + " try:\n", + " initial, label, init_dates, label_dates = data_loader.load_single_case(date_str, pred_days)\n", + " initial = initial[..., ::2, ::2] \n", + " label = label[..., ::2, ::2]\n", + " \n", + " prediction = predict_single(model, initial, pred_days)\n", + " \n", + " save_path = os.path.join(save_dir, f\"forecast_{date_str.replace('-','')}.h5\")\n", + " save_results(\n", + " initial.cpu(), \n", + " label.cpu(), \n", + " prediction.cpu(),\n", + " init_dates, \n", + " label_dates,\n", + " save_path\n", + " )\n", + " success_count += 1\n", + " \n", + " del initial, label, prediction\n", + " torch.cuda.empty_cache()\n", + " \n", + " except Exception as e:\n", + " logging.error(f\"Process {date_str} failed: {str(e)}\")\n", + " continue\n", + " \n", + " logging.info(f\"Processing complete, successfully processed {success_count}/{len(target_dates)} initial conditions\")\n", + "\n", + "# ============================== Results saved ==============================\n", + "def save_results(initial, label, prediction, init_dates, label_dates, save_path):\n", + " with h5py.File(save_path, 'w') as f:\n", + " f.create_dataset('initial', data=initial.numpy())\n", + " f.create_dataset('label', data=label.numpy())\n", + " f.create_dataset('prediction', data=prediction.numpy())\n", + " \n", + " def save_dates(dataset_name, dates):\n", + " str_dates = [d.strftime(\"%Y-%m-%d\") for d in dates]\n", + " dt = h5py.string_dtype(encoding='utf-8')\n", + " f.create_dataset(dataset_name, data=np.array(str_dates, dtype=dt))\n", + " \n", + " save_dates('initial_dates', init_dates)\n", + " save_dates('label_dates', label_dates)\n", + " \n", + " f.attrs['input_end_date'] = init_dates[-1].strftime(\"%Y-%m-%d\")\n", + " f.attrs['pred_start_date'] = label_dates[0].strftime(\"%Y-%m-%d\")\n", + " f.attrs['pred_end_date'] = label_dates[-1].strftime(\"%Y-%m-%d\")\n", + "\n", + "def visualize_enhanced(h5_path, step=0, save_fig=True):\n", + " with h5py.File(h5_path, 'r') as f:\n", + " initial = f['initial'][0]\n", + " label = f['label'][0]\n", + " prediction = f['prediction'][0]\n", + " init_dates = [d.decode() for d in f['initial_dates'][:]]\n", + " label_dates = [d.decode() for d in f['label_dates'][:]]\n", + " \n", + " input_end_date = init_dates[-1]\n", + " pred_date = label_dates[min(step, len(label_dates)-1)]\n", + " \n", + " def get_speed(data, step):\n", + " return np.sqrt(data[step,0]**2 + data[step,1]**2)\n", + " \n", + " fig, axes = plt.subplots(1, 3, figsize=(24, 6))\n", + " fig.suptitle(f\"Comparison of Ocean Surface Current Speed\\nInput End Date: {input_end_date} → Prediction Date: {pred_date}\", \n", + " y=1.05, fontsize=14, fontweight='bold')\n", + " \n", + " plot_kwargs = {\n", + " 'cmap': 'jet',\n", + " 'extent': [123.1, 154.9, 10.06, 41.94], \n", + " 'origin': 'lower',\n", + " 'vmin': 0,\n", + " 'vmax': max(np.nanmax(label), np.nanmax(prediction))\n", + " }\n", + " \n", + " speed_initial = get_speed(initial, -1)\n", + " im0 = axes[0].imshow(speed_initial, **plot_kwargs)\n", + " axes[0].set_title(f\"Initial Field Last Day\\n{init_dates[-1]}\", fontsize=12)\n", + " axes[0].set_xlabel('Longitude', fontsize=10)\n", + " axes[0].set_ylabel('Latitude', fontsize=10)\n", + "\n", + " \n", + " speed_label = get_speed(label, step)\n", + " im1 = axes[1].imshow(speed_label, **plot_kwargs)\n", + " axes[1].set_title(f\"True Values\\n{pred_date}\", fontsize=12)\n", + " axes[1].set_xlabel('Longitude', fontsize=10)\n", + "\n", + " speed_pred = get_speed(prediction, step)\n", + " im2 = axes[2].imshow(speed_pred, **plot_kwargs)\n", + " axes[2].set_title(f\"Predicted Values\\n{pred_date}\", fontsize=12)\n", + " axes[2].set_xlabel('Longitude', fontsize=10)\n", + "\n", + " cbar = fig.colorbar(im1, ax=axes, orientation='vertical', shrink=0.8, pad=0.03)\n", + " cbar.set_label('Current Speed (m/s)', fontsize=10)\n", + "\n", + " plt.tight_layout()\n", + "\n", + " if save_fig:\n", + " fig_name = f\"forecast_{input_end_date}_day{step+1}.png\"\n", + " plt.savefig(fig_name, dpi=300, bbox_inches='tight')\n", + " plt.close()\n", + " else:\n", + " plt.show()\n", + "\n", + "\n", + "# ============================== Main ==============================\n", + "if __name__ == \"__main__\":\n", + " backbone = 'Kuro_Unet_exp_128_20250324'\n", + " config = {\n", + " 'model_path': f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth',\n", + " 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc',\n", + " 'date_range': { \n", + " 'start': '2021-01-01',\n", + " 'end': '2021-12-31',\n", + " 'interval': 5 \n", + " },\n", + " 'pred_days': 120,\n", + " 'save_dir':f'./{backbone}_forecast_results'\n", + " }\n", + "\n", + " try:\n", + " model = load_single_model(config['model_path'])\n", + " data_loader = OceanDataLoader(config['data_path'])\n", + " \n", + " target_dates = data_loader.generate_target_dates(\n", + " start_date=config['date_range']['start'],\n", + " end_date=config['date_range']['end'],\n", + " interval_days=config['date_range']['interval']\n", + " )\n", + " logging.info(f\"Generated {len(target_dates)} initial dates, example: {target_dates[:5]}...\")\n", + " \n", + " process_batch(\n", + " model, \n", + " data_loader,\n", + " target_dates,\n", + " config['pred_days'],\n", + " config['save_dir']\n", + " )\n", + " \n", + " sample_dates = [target_dates[0], target_dates[-1]]\n", + " for date in sample_dates:\n", + " h5_file = os.path.join(config['save_dir'], f\"forecast_{date.replace('-','')}.h5\")\n", + " for step in [0, 60, 119]: \n", + " visualize_enhanced(h5_file, step=step)\n", + " \n", + " except Exception as e:\n", + " logging.error(f\"Main process error: {str(e)}\")\n", + " raise" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c5eaa58f-6e63-4876-a558-dbe5d085ed6f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "322a4033-f4fa-495c-b095-f39099d18811", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8346e2d-5eb7-4a04-bb6f-4a0831016f40", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/model/.ipynb_checkpoints/dit-checkpoint.py b/Exp3_Kuroshio_forecasting/inference_results_open/model/.ipynb_checkpoints/dit-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..50b9b3aeb61e7da9e202c49277ae62e6115aa1ff --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_results_open/model/.ipynb_checkpoints/dit-checkpoint.py @@ -0,0 +1,471 @@ +import torch +import torch.nn as nn +import numpy as np +import math +from functools import partial +from timm.models.vision_transformer import PatchEmbed, Attention, Mlp +import math + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + +################################################################################# +# Embedding Layers for Timesteps and Class Labels # +################################################################################# + +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__() + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + """ + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=t.device) + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + + def forward(self, t): + t = t.to(next(self.parameters()).device) + t_freq = self.timestep_embedding(t, self.frequency_embedding_size) + t_freq = t_freq.to(next(self.parameters()).device) + t_emb = self.mlp(t_freq) + t_emb = t_emb.to(next(self.parameters()).device) + return t_emb + +class LabelEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + def __init__(self, num_classes, hidden_size, dropout_prob): + super().__init__() + use_cfg_embedding = dropout_prob > 0 + self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size) + self.num_classes = num_classes + self.dropout_prob = dropout_prob + + def token_drop(self, labels, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(labels.shape[0], device=labels.device) < self.dropout_prob + else: + drop_ids = force_drop_ids == 1 + labels = torch.where(drop_ids, self.num_classes, labels) + return labels + + def forward(self, labels, train, force_drop_ids=None): + use_dropout = self.dropout_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + labels = self.token_drop(labels, force_drop_ids) + embeddings = self.embedding_table(labels) + return embeddings + +################################################################################# +# Core DiT Model # +################################################################################# + +class DiTBlock(nn.Module): + """ + A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning. + """ + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs): + super().__init__() + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + mlp_hidden_dim = int(hidden_size * mlp_ratio) + approx_gelu = lambda: nn.GELU() + self.mlp = Mlp(in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=approx_gelu, drop=0) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(c).chunk(6, dim=1) + x = x + gate_msa.unsqueeze(1) * self.attn(modulate(self.norm1(x), shift_msa, scale_msa)) + x = x + gate_mlp.unsqueeze(1) * self.mlp(modulate(self.norm2(x), shift_mlp, scale_mlp)) + return x + +class FinalLayer(nn.Module): + """ + The final layer of DiT. + """ + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size[0] * patch_size[1] * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + +class DiT(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + def __init__( + self, + input_size=(32, 32), + patch_size=(2, 2), + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + class_dropout_prob=0.1, + num_classes=None, + learn_sigma=True, + ): + super().__init__() + self.learn_sigma = learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.hidden_size = hidden_size + + self.x_embedder = PatchEmbed( + img_size=input_size, patch_size=patch_size, in_chans=in_channels, embed_dim=hidden_size, bias=True + ) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + # 将使用固定的 sin-cos 位置嵌入 + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, hidden_size), requires_grad=False) + + self.blocks = nn.ModuleList([ + DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio) for _ in range(depth) + ]) + self.final_layer = FinalLayer(hidden_size, patch_size, self.out_channels) + self.initialize_weights() + + # 如果 num_classes 不为 None,才初始化 y_embedder + if num_classes is not None: + self.y_embedder = LabelEmbedder(num_classes, hidden_size, class_dropout_prob) + else: + self.y_embedder = None + + def initialize_weights(self): + # 初始化 Transformer 层 + def _basic_init(module): + if isinstance(module, nn.Linear): + torch.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + self.apply(_basic_init) + + # 获取网格尺寸用于位置嵌入 + grid_size_h, grid_size_w = self.x_embedder.grid_size + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], (grid_size_h, grid_size_w)) + self.pos_embed.data.copy_(torch.from_numpy(pos_embed).float().unsqueeze(0)) + + # 初始化 patch_embed,如同 nn.Linear(而不是 nn.Conv2d) + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + nn.init.constant_(self.x_embedder.proj.bias, 0) + + # 初始化时间步嵌入 MLP + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + + # 将 DiT 块中的 adaLN 调制层初始化为零 + for block in self.blocks: + nn.init.constant_(block.adaLN_modulation[-1].weight, 0) + nn.init.constant_(block.adaLN_modulation[-1].bias, 0) + + # 将输出层初始化为零 + nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0) + nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0) + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + def unpatchify(self, x): + """ + x: (N, T, patch_size[0]*patch_size[1]*C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p_h, p_w = self.x_embedder.patch_size # 元组 + h_patches, w_patches = self.x_embedder.grid_size + assert h_patches * w_patches == x.shape[1], "Mismatch in number of patches" + + x = x.reshape(shape=(x.shape[0], h_patches, w_patches, p_h, p_w, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h_patches * p_h, w_patches * p_w)) + return imgs + + def forward(self, x, t, y=None): + """ + Forward pass of DiT. + x: (N, C, H, W) tensor of spatial inputs + t: (N,) tensor of diffusion timesteps + y: (N,) tensor of class labels or None + """ + x = self.x_embedder(x) + self.pos_embed # (N, T, D),其中 T = H * W / (patch_size[0] * patch_size[1]) + t = self.t_embedder(t) # (N, D) + if self.y_embedder is not None and y is not None: + y = self.y_embedder(y, self.training) # (N, D) + c = t + y # (N, D) + else: + c = t # (N, D) + for block in self.blocks: + x = block(x, c) # (N, T, D) + x = self.final_layer(x, c) # (N, T, patch_size[0] * patch_size[1] * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward_with_cfg(self, x, t, y, cfg_scale): + """ + Forward pass of DiT with classifier-free guidance. + """ + half = x[: len(x) // 2] + combined = torch.cat([half, half], dim=0) + model_out = self.forward(combined, t, y) + eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:] + cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = torch.cat([half_eps, half_eps], dim=0) + return torch.cat([eps, rest], dim=1) + +################################################################################# +# Sine/Cosine Positional Embedding Functions # +################################################################################# + +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0): + """ + grid_size: (grid_size_h, grid_size_w) + return: + pos_embed: [grid_size_h*grid_size_w, embed_dim] 或 [1+grid_size_h*grid_size_w, embed_dim] + """ + grid_h = np.arange(grid_size[0], dtype=np.float32) + grid_w = np.arange(grid_size[1], dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # 这里 w 先行 + grid = np.stack(grid, axis=0) + + grid = grid.reshape([2, grid_size[0] * grid_size[1]]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # 使用一半的维度来编码 grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: 每个位置的输出维度 + pos: 要编码的位置列表:大小 (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2) + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + +################################################################################# +# Other Components # +################################################################################# + +def stride_generator(N, reverse=False): + strides = [1, 2]*10 + if reverse: return list(reversed(strides[:N])) + else: return strides[:N] + +class ConvSC(nn.Module): + def __init__(self, in_channels, out_channels, stride=1, transpose=False): + super(ConvSC, self).__init__() + if transpose: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=3, stride=stride, + padding=1, output_padding=stride-1) + else: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1) + self.norm = nn.BatchNorm2d(out_channels) + self.act = nn.GELU() + + def forward(self, x): + return self.act(self.norm(self.conv(x))) + +class Inception(nn.Module): + def __init__(self, in_channels, hidden_channels, out_channels, incep_ker=[3,5,7,11], groups=4): + super(Inception, self).__init__() + self.branch1 = nn.Conv2d(in_channels, hidden_channels, kernel_size=1) + self.branch2 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[0], padding=incep_ker[0]//2, groups=groups) + self.branch3 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[1], padding=incep_ker[1]//2, groups=groups) + self.branch4 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[2], padding=incep_ker[2]//2, groups=groups) + self.branch5 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[3], padding=incep_ker[3]//2, groups=groups) + self.conv = nn.Conv2d(hidden_channels * 5, out_channels, kernel_size=1) + self.norm = nn.BatchNorm2d(out_channels) + self.act = nn.GELU() + + def forward(self, x): + x1 = self.branch1(x) + x2 = self.branch2(x) + x3 = self.branch3(x) + x4 = self.branch4(x) + x5 = self.branch5(x) + x = torch.cat([x1, x2, x3, x4, x5], dim=1) + x = self.conv(x) + x = self.act(self.norm(x)) + return x + +class Encoder(nn.Module): + def __init__(self, C_in, C_hid, N_S): + super(Encoder, self).__init__() + strides = stride_generator(N_S) + layers = [ConvSC(C_in, C_hid, stride=strides[0])] + for s in strides[1:]: + layers.append(ConvSC(C_hid, C_hid, stride=s)) + self.enc = nn.Sequential(*layers) + + def forward(self, x): + skips = [] + for layer in self.enc: + x = layer(x) + skips.append(x) + return x, skips # 返回所有的 skips + +class Decoder(nn.Module): + def __init__(self, C_hid, C_out, N_S): + super(Decoder, self).__init__() + strides = stride_generator(N_S, reverse=True) + layers = [] + for s in strides[:-1]: + layers.append(ConvSC(C_hid, C_hid, stride=s, transpose=True)) + layers.append(ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True)) + self.dec = nn.Sequential(*layers) + self.readout = nn.Conv2d(C_hid, C_out, 1) + + def forward(self, hid, skip): + for i in range(len(self.dec)-1): + hid = self.dec[i](hid) + hid = self.dec[-1](torch.cat([hid, skip], dim=1)) + return self.readout(hid) + +# class Temporal_evo(nn.Module): +# def __init__(self, channel_in, channel_hid, N_T, h, w, incep_ker=[3, 5, 7, 11], groups=8): +# super(Temporal_evo, self).__init__() + +# self.N_T = N_T +# enc_layers = [Inception(channel_in, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] +# for _ in range(1, N_T - 1): +# enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) +# enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + +# dec_layers = [Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] +# for _ in range(1, N_T - 1): +# dec_layers.append(Inception(2 * channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) +# dec_layers.append(Inception(2 * channel_hid, channel_hid // 2, channel_in, incep_ker=incep_ker, groups=groups)) +# norm_layer = partial(nn.LayerNorm, eps=1e-6) +# self.norm = norm_layer(channel_hid) + +# self.enc = nn.Sequential(*enc_layers) +# self.dec = nn.Sequential(*dec_layers) + +# def forward(self, x): +# B, T, C, H, W = x.shape +# x = x.reshape(B, T * C, H, W) + +# # Downsampling +# skips = [] +# for i in range(self.N_T): +# x = self.enc[i](x) +# if i < self.N_T - 1: +# skips.append(x) + +# # Upsampling +# x = self.dec[0](x) +# for i in range(1, self.N_T): +# x = self.dec[i](torch.cat([x, skips[-i]], dim=1)) + +# x = x.reshape(B, T, C, H, W) +# return x + +class nmo_dit(nn.Module): + def __init__(self, shape_in, hid_S=32, hid_T=64, N_S=4, N_T=8, time_step=1000, incep_ker=[3,5,7,11], groups=4, + in_time_seq_length=10, out_time_seq_length=10): + super(nmo_dit, self).__init__() + B, T, C, H, W = shape_in + + strides = stride_generator(N_S) + num_stride2_layers = strides[:N_S].count(2) + self.downsample_factor = 2 ** num_stride2_layers + self.H1 = H // self.downsample_factor + self.W1 = W // self.downsample_factor + + self.in_time_seq_length = in_time_seq_length + self.out_time_seq_length = out_time_seq_length + self.enc = Encoder(C, hid_S, N_S) + # self.hid = Temporal_evo(T*hid_S, hid_T, N_T, self.H1, self.W1, incep_ker, groups) + self.dit_block = DiT( + input_size=(self.H1, self.W1), + patch_size=(1, 1), # Changed patch_size to (1, 1) + in_channels=T*hid_S, + hidden_size=256, + depth=12, + num_heads=2, + mlp_ratio=4.0, + class_dropout_prob=0.0, + num_classes=None, + learn_sigma=False, + ) + + self.dec = Decoder(hid_S, C, N_S) + self.time_step = torch.randint(0, time_step, (B,)) + + def forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + embed, skips = self.enc(x) + skip = skips[0] + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + bias = z.reshape(B, T*C_, H_, W_) + bias_hid = self.dit_block(bias, self.time_step) + + hid = bias_hid.reshape(B*T, C_, H_, W_) # Now the dimensions should match + Y = self.dec(hid, skip) + + Y = Y.reshape(B, T, -1, H, W) + return Y \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/model/Triton_model.py b/Exp3_Kuroshio_forecasting/inference_results_open/model/Triton_model.py new file mode 100644 index 0000000000000000000000000000000000000000..1606b32bdbc4e7117cf479f9e2ed7ba21c17eaa2 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_results_open/model/Triton_model.py @@ -0,0 +1,507 @@ +import torch +from torch import nn +import math +from timm.layers import DropPath, trunc_normal_ + +def stride_generator(N, reverse=False): + strides = [1, 2] * 10 + if reverse: + return list(reversed(strides[:N])) + else: + return strides[:N] + +class MLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(MLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class ConvMLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(ConvMLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super(Attention, self).__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = qkv[0], qkv[1], qkv[2] + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class ConvBlock(nn.Module): + def __init__( + self, + dim, + num_heads=4, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(ConvBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = nn.BatchNorm2d(dim) + self.conv1 = nn.Conv2d(dim, dim, 1) + self.conv2 = nn.Conv2d(dim, dim, 1) + self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ConvMLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = ( + m.kernel_size[0] * m.kernel_size[1] * m.out_channels + ) + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + x = x + self.pos_embed(x) + x = x + self.drop_path( + self.conv2(self.attn(self.conv1(self.norm1(x)))) + ) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + +class SelfAttentionBlock(nn.Module): + def __init__( + self, + dim, + num_heads, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + init_value=1e-6, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(SelfAttentionBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop + ) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma_1', 'gamma_2'} + + def forward(self, x): + x = x + self.pos_embed(x) + B, N, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x))) + x = x.transpose(1, 2).reshape(B, N, H, W) + return x + +def UniformerSubBlock( + embed_dims, + mlp_ratio=4., + drop=0., + drop_path=0., + init_value=1e-6, + block_type='Conv' +): + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + return ConvBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + else: + return SelfAttentionBlock( + dim=embed_dims, + num_heads=8, + mlp_ratio=mlp_ratio, + qkv_bias=True, + drop=drop, + drop_path=drop_path, + init_value=init_value + ) + +class SpatioTemporalEvolutionBlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + input_resolution=None, + mlp_ratio=8., + drop=0.0, + drop_path=0.0, + layer_i=0 + ): + super(SpatioTemporalEvolutionBlock, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv' + self.block = UniformerSubBlock( + in_channels, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + block_type=block_type + ) + + if in_channels != out_channels: + self.reduction = nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0 + ) + + def forward(self, x): + z = self.block(x) + if self.in_channels != self.out_channels: + z = self.reduction(z) + return z + +class SpatioTemporalEvolution(nn.Module): + def __init__( + self, + channel_in, + channel_hid, + N2, + input_resolution=None, + mlp_ratio=4., + drop=0.0, + drop_path=0.1 + ): + super(SpatioTemporalEvolution, self).__init__() + assert N2 >= 2 and mlp_ratio > 1 + self.N2 = N2 + dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)] + + evolution_layers = [SpatioTemporalEvolutionBlock( + channel_in, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[0], + layer_i=0 + )] + + for i in range(1, N2 - 1): + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[i], + layer_i=i + )) + + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_in, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + layer_i=N2 - 1 + )) + self.enc = nn.Sequential(*evolution_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T * C, H, W) + z = x + for i in range(self.N2): + z = self.enc[i](z) + y = z.reshape(B, T, C, H, W) + return y + +class BasicConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + transpose=False, + act_norm=False + ): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if not transpose: + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + else: + self.conv = nn.ConvTranspose2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + output_padding=stride // 2 + ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + +class ConvDynamicsLayer(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvDynamicsLayer, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d( + C_in, + C_out, + kernel_size=3, + stride=stride, + padding=1, + transpose=transpose, + act_norm=act_norm + ) + + def forward(self, x): + y = self.conv(x) + return y + +class MultiGroupConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + groups, + act_norm=False + ): + super(MultiGroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups + ) + self.norm = nn.GroupNorm(groups, out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class AtmosphericEncoder(nn.Module): + def __init__(self, C_in, spatial_hidden_dim, num_spatial_layers): + super(AtmosphericEncoder, self).__init__() + strides = stride_generator(num_spatial_layers) + self.enc = nn.Sequential( + ConvDynamicsLayer(C_in, spatial_hidden_dim, stride=strides[0]), + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s) for s in strides[1:]] + ) + + def forward(self, x): + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1, len(self.enc)): + latent = self.enc[i](latent) + return latent, enc1 + +class AtmosphericDecoder(nn.Module): + def __init__(self, spatial_hidden_dim, C_out, num_spatial_layers): + super(AtmosphericDecoder, self).__init__() + strides = stride_generator(num_spatial_layers, reverse=True) + self.dec = nn.Sequential( + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s, transpose=True) for s in strides[:-1]], + ConvDynamicsLayer(2 * spatial_hidden_dim, spatial_hidden_dim, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(spatial_hidden_dim, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0, len(self.dec) - 1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + +class Triton(nn.Module): + def __init__( + self, + shape_in, + spatial_hidden_dim=64, + output_channels=4, + temporal_hidden_dim=128, + num_spatial_layers=4, + num_temporal_layers=8, + in_time_seq_length=10, + out_time_seq_length=10 + ): + super(Triton, self).__init__() + T, C, H, W = shape_in + self.H1 = int(H / 2 ** (num_spatial_layers / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (num_spatial_layers / 2)) + self.W1 = int(W / 2 ** (num_spatial_layers / 2)) + self.output_dim = output_channels + self.input_time_seq_length = in_time_seq_length + self.output_time_seq_length = out_time_seq_length + + self.atmospheric_encoder = AtmosphericEncoder(C, spatial_hidden_dim, num_spatial_layers) + self.temporal_evolution = SpatioTemporalEvolution( + T * spatial_hidden_dim, + temporal_hidden_dim, + num_temporal_layers, + input_resolution=[self.H1, self.W1], + mlp_ratio=4.0, + drop_path=0.1 + ) + self.atmospheric_decoder = AtmosphericDecoder(spatial_hidden_dim, self.output_dim, num_spatial_layers) + + def forward(self, input_state): + """ + 1. Reshape the input state to match the encoder's input requirements. + 2. Extract features using the Atmospheric Encoder and obtain skip connections. + 3. Perform spatio-temporal evolution on the encoded features. + 4. Decode the evolved features to generate the final output. + """ + batch_size, temporal_length, channels, height, width = input_state.shape + reshaped_input = input_state.view(batch_size * temporal_length, channels, height, width) + + encoded_features, skip_connection = self.atmospheric_encoder(reshaped_input) + _, encoded_channels, encoded_height, encoded_width = encoded_features.shape + encoded_features = encoded_features.view(batch_size, temporal_length, encoded_channels, encoded_height, encoded_width) + + temporal_bias = encoded_features + temporal_hidden = self.temporal_evolution(temporal_bias) + reshaped_hidden = temporal_hidden.view(batch_size * temporal_length, encoded_channels, encoded_height, encoded_width) + + decoded_output = self.atmospheric_decoder(reshaped_hidden, skip_connection) + final_output = decoded_output.view(batch_size, temporal_length, -1, height, width) + + return final_output + + +def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + +if __name__ == '__main__': + inputs = torch.randn(1, 10, 2, 128, 128) + model = Triton( + shape_in=(10, 2, 128, 128), + spatial_hidden_dim=32, + output_channels=1, + temporal_hidden_dim=64, + num_spatial_layers=4, + num_temporal_layers=8) + output = model(inputs) + print(output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/model/U_net.py b/Exp3_Kuroshio_forecasting/inference_results_open/model/U_net.py new file mode 100644 index 0000000000000000000000000000000000000000..847f9fc4b51c8821b36a80cc8d12a55b3f6af40f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_results_open/model/U_net.py @@ -0,0 +1,98 @@ +import torch +import torch.nn as nn + +class DoubleConv(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.double_conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x): + return self.double_conv(x) + +class Down(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.maxpool_conv = nn.Sequential( + nn.MaxPool2d(2), + DoubleConv(in_channels, out_channels) + ) + + def forward(self, x): + return self.maxpool_conv(x) + +class Up(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.up = nn.ConvTranspose2d(in_channels, in_channels // 2, kernel_size=2, stride=2) + self.conv = DoubleConv(in_channels, out_channels) + + def forward(self, x1, x2): + x1 = self.up(x1) + diffY = x2.size()[2] - x1.size()[2] + diffX = x2.size()[3] - x1.size()[3] + x1 = nn.functional.pad(x1, [diffX // 2, diffX - diffX // 2, + diffY // 2, diffY - diffY // 2]) + x = torch.cat([x2, x1], dim=1) + return self.conv(x) + +class OutConv(nn.Module): + def __init__(self, in_channels, out_channels): + super(OutConv, self).__init__() + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) + + def forward(self, x): + return self.conv(x) + +class UNet(nn.Module): + def __init__(self, n_channels, n_classes): + super(UNet, self).__init__() + self.n_channels = n_channels + self.n_classes = n_classes + + self.inc = DoubleConv(n_channels, 64) + self.down1 = Down(64, 128) + self.down2 = Down(128, 256) + self.down3 = Down(256, 512) + self.up1 = Up(512, 256) + self.up2 = Up(256, 128) + self.up3 = Up(128, 64) + self.outc = OutConv(64, n_classes) + + def forward(self, x): + is_5d = x.dim() == 5 + if is_5d: + B, T, C, H, W = x.size() + x = x.view(B * T, C, H, W) + else: + B, C, H, W = x.size() + T = 1 + + x1 = self.inc(x) + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x = self.up1(x4, x3) + x = self.up2(x, x2) + x = self.up3(x, x1) + logits = self.outc(x) + + if is_5d: + logits = logits.view(B, T, self.n_classes, H, W) + + return logits + +# model = UNet(n_channels=2, n_classes=2) +# input_4d = torch.randn(1, 2, 128, 128) +# output_4d = model(input_4d) +# print(f"4D Output shape: {output_4d.shape}") # 应为 [1, 2, 128, 128] + +# input_5d = torch.randn(2, 10, 2, 128, 128) +# output_5d = model(input_5d) +# print(f"5D Output shape: {output_5d.shape}") # 应为 [2, 3, 2, 128, 128] \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Dit.cpython-310.pyc b/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Dit.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0865dea94e69bbfb8a4a7e43ea84625ae2f04db9 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Dit.cpython-310.pyc differ diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Triton_model.cpython-310.pyc b/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Triton_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..26b66ff898ec63c3cff7bbe0cd5326bc7d10e2af Binary files /dev/null and b/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Triton_model.cpython-310.pyc differ diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Triton_model.cpython-38.pyc b/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Triton_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee007e083ab499817c9f739115bbf3ffb2e5a413 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/Triton_model.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/U_net.cpython-310.pyc b/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/U_net.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7fa6d0d24c31a2c3076cb5437abe967d1d6c606f Binary files /dev/null and b/Exp3_Kuroshio_forecasting/inference_results_open/model/__pycache__/U_net.cpython-310.pyc differ diff --git a/Exp3_Kuroshio_forecasting/inference_results_open/model/simvp.py b/Exp3_Kuroshio_forecasting/inference_results_open/model/simvp.py new file mode 100644 index 0000000000000000000000000000000000000000..175e0fe960a2ec23fa678b52a12d062366f1d814 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/inference_results_open/model/simvp.py @@ -0,0 +1,180 @@ +from torch import nn +import torch +from torch import nn + +class BasicConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, transpose=False, act_norm=False): + super(BasicConv2d, self).__init__() + self.act_norm=act_norm + if not transpose: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) + else: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,output_padding=stride //2 ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvSC, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d(C_in, C_out, kernel_size=3, stride=stride, + padding=1, transpose=transpose, act_norm=act_norm) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, groups, act_norm=False): + super(GroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class Inception(nn.Module): + def __init__(self, C_in, C_hid, C_out, incep_ker=[3,5,7,11], groups=8): + super(Inception, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d(C_hid, C_out, kernel_size=ker, stride=1, padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + + + + +def stride_generator(N, reverse=False): + strides = [1, 2]*10 + if reverse: return list(reversed(strides[:N])) + else: return strides[:N] + +class Encoder(nn.Module): + def __init__(self,C_in, C_hid, N_S): + super(Encoder,self).__init__() + strides = stride_generator(N_S) + self.enc = nn.Sequential( + ConvSC(C_in, C_hid, stride=strides[0]), + *[ConvSC(C_hid, C_hid, stride=s) for s in strides[1:]] + ) + + def forward(self,x):# B*4, 3, 128, 128 + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1,len(self.enc)): + latent = self.enc[i](latent) + return latent,enc1 + + +class Decoder(nn.Module): + def __init__(self,C_hid, C_out, N_S): + super(Decoder,self).__init__() + strides = stride_generator(N_S, reverse=True) + self.dec = nn.Sequential( + *[ConvSC(C_hid, C_hid, stride=s, transpose=True) for s in strides[:-1]], + ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(C_hid, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0,len(self.dec)-1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + +class Mid_Xnet(nn.Module): + def __init__(self, channel_in, channel_hid, N_T, incep_ker = [3,5,7,11], groups=8): + super(Mid_Xnet, self).__init__() + + self.N_T = N_T + enc_layers = [Inception(channel_in, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)] + for i in range(1, N_T-1): + enc_layers.append(Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + enc_layers.append(Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + + dec_layers = [Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)] + for i in range(1, N_T-1): + dec_layers.append(Inception(2*channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + dec_layers.append(Inception(2*channel_hid, channel_hid//2, channel_in, incep_ker= incep_ker, groups=groups)) + + self.enc = nn.Sequential(*enc_layers) + self.dec = nn.Sequential(*dec_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T*C, H, W) + + # encoder + skips = [] + z = x + for i in range(self.N_T): + z = self.enc[i](z) + if i < self.N_T - 1: + skips.append(z) + + # decoder + z = self.dec[0](z) + for i in range(1, self.N_T): + z = self.dec[i](torch.cat([z, skips[-i]], dim=1)) + + y = z.reshape(B, T, C, H, W) + return y + + +class SimVP(nn.Module): + def __init__(self, shape_in, hid_S=16, hid_T=256, N_S=4, N_T=8, output_dim = 1, incep_ker=[3,5,7,11], groups=8): + super(SimVP, self).__init__() + T, C, H, W = shape_in + self.output_dim = output_dim + self.enc = Encoder(C, hid_S, N_S) + self.hid = Mid_Xnet(T*hid_S, hid_T, N_T, incep_ker, groups) + self.dec = Decoder(hid_S, self.output_dim, N_S) + + + def forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + embed, skip = self.enc(x) + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + hid = self.hid(z) + hid = hid.reshape(B*T, C_, H_, W_) + + Y = self.dec(hid, skip) + Y = Y.reshape(B, T, -1, H, W) + return Y + + +if __name__ == "__main__": + inputs = torch.randn(1, 10, 2, 64, 448) + model = SimVP(shape_in=(10, 2, 64, 448), hid_S=32, hid_T=128, output_dim = 2) + outputs = model(inputs) + print(outputs.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_ConvLSTM_exp1_20250311_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_ConvLSTM_exp1_20250311_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..0bba25347666b8aacf82d044e13f5d0041a43dca --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_ConvLSTM_exp1_20250311_training_log-checkpoint.log @@ -0,0 +1,125 @@ +2025-03-11 15:48:06,285 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-11 15:48:06,312 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-11 15:48:06,374 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-11 15:48:06,406 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-11 15:48:06,446 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-11 15:48:06,456 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-11 15:48:06,469 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 15:48:06,485 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-11 15:49:54,995 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-11 15:49:55,087 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-11 15:49:55,148 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-11 15:49:55,169 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-11 15:49:55,179 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-11 15:49:55,212 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-11 15:49:55,222 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-11 15:49:55,225 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 15:51:02,624 Epoch 1/2000 +2025-03-11 15:51:56,298 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-11 15:51:56,355 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-11 15:51:56,375 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-11 15:51:56,413 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-11 15:51:56,421 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-11 15:51:56,468 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-11 15:51:56,503 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 15:51:56,508 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-11 15:53:00,562 Epoch 1/2000 +2025-03-11 15:53:48,745 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-11 15:53:48,752 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-11 15:53:48,815 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-11 15:53:48,854 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-11 15:53:48,862 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-11 15:53:48,871 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-11 15:53:48,889 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-11 15:53:48,894 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 15:54:58,059 Epoch 1/2000 +2025-03-11 15:55:24,972 Current Learning Rate: 0.0009999383 +2025-03-11 15:55:24,977 Train Loss: 0.0447664, Val Loss: 0.0434475 +2025-03-11 15:55:24,977 Epoch 2/2000 +2025-03-11 15:55:49,611 Current Learning Rate: 0.0009997533 +2025-03-11 15:55:49,615 Train Loss: 0.0386358, Val Loss: 0.0325592 +2025-03-11 15:55:49,615 Epoch 3/2000 +2025-03-11 15:56:14,794 Current Learning Rate: 0.0009994449 +2025-03-11 15:56:14,798 Train Loss: 0.0303802, Val Loss: 0.0274663 +2025-03-11 15:56:14,798 Epoch 4/2000 +2025-03-11 15:56:39,684 Current Learning Rate: 0.0009990134 +2025-03-11 15:56:39,688 Train Loss: 0.0275699, Val Loss: 0.0260743 +2025-03-11 15:56:39,689 Epoch 5/2000 +2025-03-11 15:57:04,868 Current Learning Rate: 0.0009984587 +2025-03-11 15:57:04,872 Train Loss: 0.0265814, Val Loss: 0.0253554 +2025-03-11 15:57:04,872 Epoch 6/2000 +2025-03-11 15:57:29,438 Current Learning Rate: 0.0009977810 +2025-03-11 15:57:29,442 Train Loss: 0.0259260, Val Loss: 0.0247812 +2025-03-11 15:57:29,442 Epoch 7/2000 +2025-03-11 15:57:54,888 Current Learning Rate: 0.0009969805 +2025-03-11 15:57:54,895 Train Loss: 0.0253396, Val Loss: 0.0242641 +2025-03-11 15:57:54,895 Epoch 8/2000 +2025-03-11 15:58:19,613 Current Learning Rate: 0.0009960574 +2025-03-11 15:58:19,624 Train Loss: 0.0247924, Val Loss: 0.0236479 +2025-03-11 15:58:19,625 Epoch 9/2000 +2025-03-11 15:58:44,722 Current Learning Rate: 0.0009950118 +2025-03-11 15:58:44,726 Train Loss: 0.0239551, Val Loss: 0.0228223 +2025-03-11 15:58:44,726 Epoch 10/2000 +2025-03-11 15:59:09,396 Current Learning Rate: 0.0009938442 +2025-03-11 15:59:09,401 Train Loss: 0.0232273, Val Loss: 0.0221983 +2025-03-11 15:59:09,401 Epoch 11/2000 +2025-03-11 15:59:34,601 Current Learning Rate: 0.0009925547 +2025-03-11 15:59:34,605 Train Loss: 0.0225332, Val Loss: 0.0214887 +2025-03-11 15:59:34,605 Epoch 12/2000 +2025-03-11 15:59:59,702 Current Learning Rate: 0.0009911436 +2025-03-11 15:59:59,707 Train Loss: 0.0218435, Val Loss: 0.0209132 +2025-03-11 15:59:59,707 Epoch 13/2000 +2025-03-11 16:00:24,916 Current Learning Rate: 0.0009896114 +2025-03-11 16:00:24,920 Train Loss: 0.0213147, Val Loss: 0.0204538 +2025-03-11 16:00:24,920 Epoch 14/2000 +2025-03-11 16:00:50,062 Current Learning Rate: 0.0009879584 +2025-03-11 16:00:50,067 Train Loss: 0.0208749, Val Loss: 0.0201162 +2025-03-11 16:00:50,067 Epoch 15/2000 +2025-03-11 16:01:15,734 Current Learning Rate: 0.0009861850 +2025-03-11 16:01:15,738 Train Loss: 0.0205117, Val Loss: 0.0197665 +2025-03-11 16:01:15,738 Epoch 16/2000 +2025-03-11 16:01:40,745 Current Learning Rate: 0.0009842916 +2025-03-11 16:01:40,750 Train Loss: 0.0202093, Val Loss: 0.0195006 +2025-03-11 16:01:40,750 Epoch 17/2000 +2025-03-11 16:02:06,449 Current Learning Rate: 0.0009822787 +2025-03-11 16:02:06,452 Train Loss: 0.0199631, Val Loss: 0.0192863 +2025-03-11 16:02:06,452 Epoch 18/2000 +2025-03-11 16:02:31,898 Current Learning Rate: 0.0009801468 +2025-03-11 16:02:31,902 Train Loss: 0.0197492, Val Loss: 0.0190900 +2025-03-11 16:02:31,902 Epoch 19/2000 +2025-03-11 16:02:56,815 Current Learning Rate: 0.0009778965 +2025-03-11 16:02:56,824 Train Loss: 0.0195674, Val Loss: 0.0189211 +2025-03-11 16:02:56,824 Epoch 20/2000 +2025-03-11 16:03:21,782 Current Learning Rate: 0.0009755283 +2025-03-11 16:03:21,785 Train Loss: 0.0194144, Val Loss: 0.0187942 +2025-03-11 16:03:21,785 Epoch 21/2000 +2025-03-11 16:03:47,079 Current Learning Rate: 0.0009730427 +2025-03-11 16:03:47,084 Train Loss: 0.0192578, Val Loss: 0.0186317 +2025-03-11 16:03:47,084 Epoch 22/2000 +2025-03-11 16:04:12,530 Current Learning Rate: 0.0009704404 +2025-03-11 16:04:12,566 Train Loss: 0.0191376, Val Loss: 0.0185623 +2025-03-11 16:04:12,566 Epoch 23/2000 +2025-03-11 16:04:37,457 Current Learning Rate: 0.0009677220 +2025-03-11 16:04:37,461 Train Loss: 0.0190216, Val Loss: 0.0184137 +2025-03-11 16:04:37,461 Epoch 24/2000 +2025-03-11 16:05:02,411 Current Learning Rate: 0.0009648882 +2025-03-11 16:05:02,741 Train Loss: 0.0189171, Val Loss: 0.0183151 +2025-03-11 16:05:02,741 Epoch 25/2000 +2025-03-11 16:05:27,140 Current Learning Rate: 0.0009619398 +2025-03-11 16:05:27,144 Train Loss: 0.0188252, Val Loss: 0.0182417 +2025-03-11 16:05:27,144 Epoch 26/2000 +2025-03-11 16:05:51,716 Current Learning Rate: 0.0009588773 +2025-03-11 16:05:51,720 Train Loss: 0.0187419, Val Loss: 0.0181548 +2025-03-11 16:05:51,720 Epoch 27/2000 +2025-03-11 16:06:16,436 Current Learning Rate: 0.0009557016 +2025-03-11 16:06:16,440 Train Loss: 0.0186573, Val Loss: 0.0180882 +2025-03-11 16:06:16,440 Epoch 28/2000 +2025-03-11 16:06:41,339 Current Learning Rate: 0.0009524135 +2025-03-11 16:06:41,343 Train Loss: 0.0185991, Val Loss: 0.0180147 +2025-03-11 16:06:41,343 Epoch 29/2000 +2025-03-11 16:07:06,050 Current Learning Rate: 0.0009490138 +2025-03-11 16:07:06,053 Train Loss: 0.0185222, Val Loss: 0.0179693 +2025-03-11 16:07:06,054 Epoch 30/2000 +2025-03-11 16:07:31,443 Current Learning Rate: 0.0009455033 +2025-03-11 16:07:31,447 Train Loss: 0.0184642, Val Loss: 0.0178888 +2025-03-11 16:07:31,447 Epoch 31/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250224_inference-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250224_inference-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..17898d5f4b4e152b18f49e07f3fdd137739fe15d --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250224_inference-checkpoint.log @@ -0,0 +1,38 @@ +2025-02-24 16:13:59,753 加载模型失败:Error(s) in loading state_dict for Dit: + Missing key(s) in state_dict: "enc.enc.0.conv.weight", "enc.enc.0.conv.bias", "enc.enc.0.norm.weight", "enc.enc.0.norm.bias", "enc.enc.0.norm.running_mean", "enc.enc.0.norm.running_var", "enc.enc.1.conv.weight", "enc.enc.1.conv.bias", "enc.enc.1.norm.weight", "enc.enc.1.norm.bias", "enc.enc.1.norm.running_mean", "enc.enc.1.norm.running_var", "enc.enc.2.conv.weight", "enc.enc.2.conv.bias", "enc.enc.2.norm.weight", "enc.enc.2.norm.bias", "enc.enc.2.norm.running_mean", "enc.enc.2.norm.running_var", "enc.enc.3.conv.weight", "enc.enc.3.conv.bias", "enc.enc.3.norm.weight", "enc.enc.3.norm.bias", "enc.enc.3.norm.running_mean", "enc.enc.3.norm.running_var", "hid.norm.weight", "hid.norm.bias", "hid.enc.0.branch1.weight", "hid.enc.0.branch1.bias", "hid.enc.0.branch2.weight", "hid.enc.0.branch2.bias", "hid.enc.0.branch3.weight", "hid.enc.0.branch3.bias", "hid.enc.0.branch4.weight", "hid.enc.0.branch4.bias", "hid.enc.0.branch5.weight", "hid.enc.0.branch5.bias", "hid.enc.0.conv.weight", "hid.enc.0.conv.bias", "hid.enc.0.norm.weight", "hid.enc.0.norm.bias", "hid.enc.0.norm.running_mean", "hid.enc.0.norm.running_var", "hid.enc.1.branch1.weight", "hid.enc.1.branch1.bias", "hid.enc.1.branch2.weight", "hid.enc.1.branch2.bias", "hid.enc.1.branch3.weight", "hid.enc.1.branch3.bias", "hid.enc.1.branch4.weight", "hid.enc.1.branch4.bias", "hid.enc.1.branch5.weight", "hid.enc.1.branch5.bias", "hid.enc.1.conv.weight", "hid.enc.1.conv.bias", "hid.enc.1.norm.weight", "hid.enc.1.norm.bias", "hid.enc.1.norm.running_mean", "hid.enc.1.norm.running_var", "hid.enc.2.branch1.weight", "hid.enc.2.branch1.bias", "hid.enc.2.branch2.weight", "hid.enc.2.branch2.bias", "hid.enc.2.branch3.weight", "hid.enc.2.branch3.bias", "hid.enc.2.branch4.weight", "hid.enc.2.branch4.bias", "hid.enc.2.branch5.weight", "hid.enc.2.branch5.bias", "hid.enc.2.conv.weight", "hid.enc.2.conv.bias", "hid.enc.2.norm.weight", "hid.enc.2.norm.bias", "hid.enc.2.norm.running_mean", "hid.enc.2.norm.running_var", "hid.enc.3.branch1.weight", "hid.enc.3.branch1.bias", "hid.enc.3.branch2.weight", "hid.enc.3.branch2.bias", "hid.enc.3.branch3.weight", "hid.enc.3.branch3.bias", "hid.enc.3.branch4.weight", "hid.enc.3.branch4.bias", "hid.enc.3.branch5.weight", "hid.enc.3.branch5.bias", "hid.enc.3.conv.weight", "hid.enc.3.conv.bias", "hid.enc.3.norm.weight", "hid.enc.3.norm.bias", "hid.enc.3.norm.running_mean", "hid.enc.3.norm.running_var", "hid.enc.4.branch1.weight", "hid.enc.4.branch1.bias", "hid.enc.4.branch2.weight", "hid.enc.4.branch2.bias", "hid.enc.4.branch3.weight", "hid.enc.4.branch3.bias", "hid.enc.4.branch4.weight", "hid.enc.4.branch4.bias", "hid.enc.4.branch5.weight", "hid.enc.4.branch5.bias", "hid.enc.4.conv.weight", "hid.enc.4.conv.bias", "hid.enc.4.norm.weight", "hid.enc.4.norm.bias", "hid.enc.4.norm.running_mean", "hid.enc.4.norm.running_var", "hid.enc.5.branch1.weight", "hid.enc.5.branch1.bias", "hid.enc.5.branch2.weight", "hid.enc.5.branch2.bias", "hid.enc.5.branch3.weight", "hid.enc.5.branch3.bias", "hid.enc.5.branch4.weight", "hid.enc.5.branch4.bias", "hid.enc.5.branch5.weight", "hid.enc.5.branch5.bias", "hid.enc.5.conv.weight", "hid.enc.5.conv.bias", "hid.enc.5.norm.weight", "hid.enc.5.norm.bias", "hid.enc.5.norm.running_mean", "hid.enc.5.norm.running_var", "hid.enc.6.branch1.weight", "hid.enc.6.branch1.bias", "hid.enc.6.branch2.weight", "hid.enc.6.branch2.bias", "hid.enc.6.branch3.weight", "hid.enc.6.branch3.bias", "hid.enc.6.branch4.weight", "hid.enc.6.branch4.bias", "hid.enc.6.branch5.weight", "hid.enc.6.branch5.bias", "hid.enc.6.conv.weight", "hid.enc.6.conv.bias", "hid.enc.6.norm.weight", "hid.enc.6.norm.bias", "hid.enc.6.norm.running_mean", "hid.enc.6.norm.running_var", "hid.enc.7.branch1.weight", "hid.enc.7.branch1.bias", "hid.enc.7.branch2.weight", "hid.enc.7.branch2.bias", "hid.enc.7.branch3.weight", "hid.enc.7.branch3.bias", "hid.enc.7.branch4.weight", "hid.enc.7.branch4.bias", "hid.enc.7.branch5.weight", "hid.enc.7.branch5.bias", "hid.enc.7.conv.weight", "hid.enc.7.conv.bias", "hid.enc.7.norm.weight", "hid.enc.7.norm.bias", "hid.enc.7.norm.running_mean", "hid.enc.7.norm.running_var", "hid.dec.0.branch1.weight", "hid.dec.0.branch1.bias", "hid.dec.0.branch2.weight", "hid.dec.0.branch2.bias", "hid.dec.0.branch3.weight", "hid.dec.0.branch3.bias", "hid.dec.0.branch4.weight", "hid.dec.0.branch4.bias", "hid.dec.0.branch5.weight", "hid.dec.0.branch5.bias", "hid.dec.0.conv.weight", "hid.dec.0.conv.bias", "hid.dec.0.norm.weight", "hid.dec.0.norm.bias", "hid.dec.0.norm.running_mean", "hid.dec.0.norm.running_var", "hid.dec.1.branch1.weight", "hid.dec.1.branch1.bias", "hid.dec.1.branch2.weight", "hid.dec.1.branch2.bias", "hid.dec.1.branch3.weight", "hid.dec.1.branch3.bias", "hid.dec.1.branch4.weight", "hid.dec.1.branch4.bias", "hid.dec.1.branch5.weight", "hid.dec.1.branch5.bias", "hid.dec.1.conv.weight", "hid.dec.1.conv.bias", "hid.dec.1.norm.weight", "hid.dec.1.norm.bias", "hid.dec.1.norm.running_mean", "hid.dec.1.norm.running_var", "hid.dec.2.branch1.weight", "hid.dec.2.branch1.bias", "hid.dec.2.branch2.weight", "hid.dec.2.branch2.bias", "hid.dec.2.branch3.weight", "hid.dec.2.branch3.bias", "hid.dec.2.branch4.weight", "hid.dec.2.branch4.bias", "hid.dec.2.branch5.weight", "hid.dec.2.branch5.bias", "hid.dec.2.conv.weight", "hid.dec.2.conv.bias", "hid.dec.2.norm.weight", "hid.dec.2.norm.bias", "hid.dec.2.norm.running_mean", "hid.dec.2.norm.running_var", "hid.dec.3.branch1.weight", "hid.dec.3.branch1.bias", "hid.dec.3.branch2.weight", "hid.dec.3.branch2.bias", "hid.dec.3.branch3.weight", "hid.dec.3.branch3.bias", "hid.dec.3.branch4.weight", "hid.dec.3.branch4.bias", "hid.dec.3.branch5.weight", "hid.dec.3.branch5.bias", "hid.dec.3.conv.weight", "hid.dec.3.conv.bias", "hid.dec.3.norm.weight", "hid.dec.3.norm.bias", "hid.dec.3.norm.running_mean", "hid.dec.3.norm.running_var", "hid.dec.4.branch1.weight", "hid.dec.4.branch1.bias", "hid.dec.4.branch2.weight", "hid.dec.4.branch2.bias", "hid.dec.4.branch3.weight", "hid.dec.4.branch3.bias", "hid.dec.4.branch4.weight", "hid.dec.4.branch4.bias", "hid.dec.4.branch5.weight", "hid.dec.4.branch5.bias", "hid.dec.4.conv.weight", "hid.dec.4.conv.bias", "hid.dec.4.norm.weight", "hid.dec.4.norm.bias", "hid.dec.4.norm.running_mean", "hid.dec.4.norm.running_var", "hid.dec.5.branch1.weight", "hid.dec.5.branch1.bias", "hid.dec.5.branch2.weight", "hid.dec.5.branch2.bias", "hid.dec.5.branch3.weight", "hid.dec.5.branch3.bias", "hid.dec.5.branch4.weight", "hid.dec.5.branch4.bias", "hid.dec.5.branch5.weight", "hid.dec.5.branch5.bias", "hid.dec.5.conv.weight", "hid.dec.5.conv.bias", "hid.dec.5.norm.weight", "hid.dec.5.norm.bias", "hid.dec.5.norm.running_mean", "hid.dec.5.norm.running_var", "hid.dec.6.branch1.weight", "hid.dec.6.branch1.bias", "hid.dec.6.branch2.weight", "hid.dec.6.branch2.bias", "hid.dec.6.branch3.weight", "hid.dec.6.branch3.bias", "hid.dec.6.branch4.weight", "hid.dec.6.branch4.bias", "hid.dec.6.branch5.weight", "hid.dec.6.branch5.bias", "hid.dec.6.conv.weight", "hid.dec.6.conv.bias", "hid.dec.6.norm.weight", "hid.dec.6.norm.bias", "hid.dec.6.norm.running_mean", "hid.dec.6.norm.running_var", "hid.dec.7.branch1.weight", "hid.dec.7.branch1.bias", "hid.dec.7.branch2.weight", "hid.dec.7.branch2.bias", "hid.dec.7.branch3.weight", "hid.dec.7.branch3.bias", "hid.dec.7.branch4.weight", "hid.dec.7.branch4.bias", "hid.dec.7.branch5.weight", "hid.dec.7.branch5.bias", "hid.dec.7.conv.weight", "hid.dec.7.conv.bias", "hid.dec.7.norm.weight", "hid.dec.7.norm.bias", "hid.dec.7.norm.running_mean", "hid.dec.7.norm.running_var", "dit_block.pos_embed", "dit_block.x_embedder.proj.weight", "dit_block.x_embedder.proj.bias", "dit_block.t_embedder.mlp.0.weight", "dit_block.t_embedder.mlp.0.bias", "dit_block.t_embedder.mlp.2.weight", "dit_block.t_embedder.mlp.2.bias", "dit_block.blocks.0.attn.qkv.weight", "dit_block.blocks.0.attn.qkv.bias", "dit_block.blocks.0.attn.proj.weight", "dit_block.blocks.0.attn.proj.bias", "dit_block.blocks.0.mlp.fc1.weight", "dit_block.blocks.0.mlp.fc1.bias", "dit_block.blocks.0.mlp.fc2.weight", "dit_block.blocks.0.mlp.fc2.bias", "dit_block.blocks.0.adaLN_modulation.1.weight", "dit_block.blocks.0.adaLN_modulation.1.bias", "dit_block.blocks.1.attn.qkv.weight", "dit_block.blocks.1.attn.qkv.bias", "dit_block.blocks.1.attn.proj.weight", "dit_block.blocks.1.attn.proj.bias", "dit_block.blocks.1.mlp.fc1.weight", "dit_block.blocks.1.mlp.fc1.bias", "dit_block.blocks.1.mlp.fc2.weight", "dit_block.blocks.1.mlp.fc2.bias", "dit_block.blocks.1.adaLN_modulation.1.weight", "dit_block.blocks.1.adaLN_modulation.1.bias", "dit_block.blocks.2.attn.qkv.weight", "dit_block.blocks.2.attn.qkv.bias", "dit_block.blocks.2.attn.proj.weight", "dit_block.blocks.2.attn.proj.bias", "dit_block.blocks.2.mlp.fc1.weight", "dit_block.blocks.2.mlp.fc1.bias", "dit_block.blocks.2.mlp.fc2.weight", "dit_block.blocks.2.mlp.fc2.bias", "dit_block.blocks.2.adaLN_modulation.1.weight", "dit_block.blocks.2.adaLN_modulation.1.bias", "dit_block.blocks.3.attn.qkv.weight", "dit_block.blocks.3.attn.qkv.bias", "dit_block.blocks.3.attn.proj.weight", "dit_block.blocks.3.attn.proj.bias", "dit_block.blocks.3.mlp.fc1.weight", "dit_block.blocks.3.mlp.fc1.bias", "dit_block.blocks.3.mlp.fc2.weight", "dit_block.blocks.3.mlp.fc2.bias", "dit_block.blocks.3.adaLN_modulation.1.weight", "dit_block.blocks.3.adaLN_modulation.1.bias", "dit_block.blocks.4.attn.qkv.weight", "dit_block.blocks.4.attn.qkv.bias", "dit_block.blocks.4.attn.proj.weight", "dit_block.blocks.4.attn.proj.bias", "dit_block.blocks.4.mlp.fc1.weight", "dit_block.blocks.4.mlp.fc1.bias", "dit_block.blocks.4.mlp.fc2.weight", "dit_block.blocks.4.mlp.fc2.bias", "dit_block.blocks.4.adaLN_modulation.1.weight", "dit_block.blocks.4.adaLN_modulation.1.bias", "dit_block.blocks.5.attn.qkv.weight", "dit_block.blocks.5.attn.qkv.bias", "dit_block.blocks.5.attn.proj.weight", "dit_block.blocks.5.attn.proj.bias", "dit_block.blocks.5.mlp.fc1.weight", "dit_block.blocks.5.mlp.fc1.bias", "dit_block.blocks.5.mlp.fc2.weight", "dit_block.blocks.5.mlp.fc2.bias", "dit_block.blocks.5.adaLN_modulation.1.weight", "dit_block.blocks.5.adaLN_modulation.1.bias", "dit_block.blocks.6.attn.qkv.weight", "dit_block.blocks.6.attn.qkv.bias", "dit_block.blocks.6.attn.proj.weight", "dit_block.blocks.6.attn.proj.bias", "dit_block.blocks.6.mlp.fc1.weight", "dit_block.blocks.6.mlp.fc1.bias", "dit_block.blocks.6.mlp.fc2.weight", "dit_block.blocks.6.mlp.fc2.bias", "dit_block.blocks.6.adaLN_modulation.1.weight", "dit_block.blocks.6.adaLN_modulation.1.bias", "dit_block.blocks.7.attn.qkv.weight", "dit_block.blocks.7.attn.qkv.bias", "dit_block.blocks.7.attn.proj.weight", "dit_block.blocks.7.attn.proj.bias", "dit_block.blocks.7.mlp.fc1.weight", "dit_block.blocks.7.mlp.fc1.bias", "dit_block.blocks.7.mlp.fc2.weight", "dit_block.blocks.7.mlp.fc2.bias", "dit_block.blocks.7.adaLN_modulation.1.weight", "dit_block.blocks.7.adaLN_modulation.1.bias", "dit_block.blocks.8.attn.qkv.weight", "dit_block.blocks.8.attn.qkv.bias", "dit_block.blocks.8.attn.proj.weight", "dit_block.blocks.8.attn.proj.bias", "dit_block.blocks.8.mlp.fc1.weight", "dit_block.blocks.8.mlp.fc1.bias", "dit_block.blocks.8.mlp.fc2.weight", "dit_block.blocks.8.mlp.fc2.bias", "dit_block.blocks.8.adaLN_modulation.1.weight", "dit_block.blocks.8.adaLN_modulation.1.bias", "dit_block.blocks.9.attn.qkv.weight", "dit_block.blocks.9.attn.qkv.bias", "dit_block.blocks.9.attn.proj.weight", "dit_block.blocks.9.attn.proj.bias", "dit_block.blocks.9.mlp.fc1.weight", "dit_block.blocks.9.mlp.fc1.bias", "dit_block.blocks.9.mlp.fc2.weight", "dit_block.blocks.9.mlp.fc2.bias", "dit_block.blocks.9.adaLN_modulation.1.weight", "dit_block.blocks.9.adaLN_modulation.1.bias", "dit_block.blocks.10.attn.qkv.weight", "dit_block.blocks.10.attn.qkv.bias", "dit_block.blocks.10.attn.proj.weight", "dit_block.blocks.10.attn.proj.bias", "dit_block.blocks.10.mlp.fc1.weight", "dit_block.blocks.10.mlp.fc1.bias", "dit_block.blocks.10.mlp.fc2.weight", "dit_block.blocks.10.mlp.fc2.bias", "dit_block.blocks.10.adaLN_modulation.1.weight", "dit_block.blocks.10.adaLN_modulation.1.bias", "dit_block.blocks.11.attn.qkv.weight", "dit_block.blocks.11.attn.qkv.bias", "dit_block.blocks.11.attn.proj.weight", "dit_block.blocks.11.attn.proj.bias", "dit_block.blocks.11.mlp.fc1.weight", "dit_block.blocks.11.mlp.fc1.bias", "dit_block.blocks.11.mlp.fc2.weight", "dit_block.blocks.11.mlp.fc2.bias", "dit_block.blocks.11.adaLN_modulation.1.weight", "dit_block.blocks.11.adaLN_modulation.1.bias", "dit_block.final_layer.linear.weight", "dit_block.final_layer.linear.bias", "dit_block.final_layer.adaLN_modulation.1.weight", "dit_block.final_layer.adaLN_modulation.1.bias", "dec.dec.0.conv.weight", "dec.dec.0.conv.bias", "dec.dec.0.norm.weight", "dec.dec.0.norm.bias", "dec.dec.0.norm.running_mean", "dec.dec.0.norm.running_var", "dec.dec.1.conv.weight", "dec.dec.1.conv.bias", "dec.dec.1.norm.weight", "dec.dec.1.norm.bias", "dec.dec.1.norm.running_mean", "dec.dec.1.norm.running_var", "dec.dec.2.conv.weight", "dec.dec.2.conv.bias", "dec.dec.2.norm.weight", "dec.dec.2.norm.bias", "dec.dec.2.norm.running_mean", "dec.dec.2.norm.running_var", "dec.dec.3.conv.weight", "dec.dec.3.conv.bias", "dec.dec.3.norm.weight", "dec.dec.3.norm.bias", "dec.dec.3.norm.running_mean", "dec.dec.3.norm.running_var", "dec.readout.weight", "dec.readout.bias". + Unexpected key(s) in state_dict: "module.enc.enc.0.conv.weight", "module.enc.enc.0.conv.bias", "module.enc.enc.0.norm.weight", "module.enc.enc.0.norm.bias", "module.enc.enc.0.norm.running_mean", "module.enc.enc.0.norm.running_var", "module.enc.enc.0.norm.num_batches_tracked", "module.enc.enc.1.conv.weight", "module.enc.enc.1.conv.bias", "module.enc.enc.1.norm.weight", "module.enc.enc.1.norm.bias", "module.enc.enc.1.norm.running_mean", "module.enc.enc.1.norm.running_var", "module.enc.enc.1.norm.num_batches_tracked", "module.enc.enc.2.conv.weight", "module.enc.enc.2.conv.bias", "module.enc.enc.2.norm.weight", "module.enc.enc.2.norm.bias", "module.enc.enc.2.norm.running_mean", "module.enc.enc.2.norm.running_var", "module.enc.enc.2.norm.num_batches_tracked", "module.enc.enc.3.conv.weight", "module.enc.enc.3.conv.bias", "module.enc.enc.3.norm.weight", "module.enc.enc.3.norm.bias", "module.enc.enc.3.norm.running_mean", "module.enc.enc.3.norm.running_var", "module.enc.enc.3.norm.num_batches_tracked", "module.hid.norm.weight", "module.hid.norm.bias", "module.hid.enc.0.branch1.weight", "module.hid.enc.0.branch1.bias", "module.hid.enc.0.branch2.weight", "module.hid.enc.0.branch2.bias", "module.hid.enc.0.branch3.weight", "module.hid.enc.0.branch3.bias", "module.hid.enc.0.branch4.weight", "module.hid.enc.0.branch4.bias", "module.hid.enc.0.branch5.weight", "module.hid.enc.0.branch5.bias", "module.hid.enc.0.conv.weight", "module.hid.enc.0.conv.bias", "module.hid.enc.0.norm.weight", "module.hid.enc.0.norm.bias", "module.hid.enc.0.norm.running_mean", "module.hid.enc.0.norm.running_var", "module.hid.enc.0.norm.num_batches_tracked", "module.hid.enc.1.branch1.weight", "module.hid.enc.1.branch1.bias", "module.hid.enc.1.branch2.weight", "module.hid.enc.1.branch2.bias", "module.hid.enc.1.branch3.weight", "module.hid.enc.1.branch3.bias", "module.hid.enc.1.branch4.weight", "module.hid.enc.1.branch4.bias", "module.hid.enc.1.branch5.weight", "module.hid.enc.1.branch5.bias", "module.hid.enc.1.conv.weight", "module.hid.enc.1.conv.bias", "module.hid.enc.1.norm.weight", "module.hid.enc.1.norm.bias", "module.hid.enc.1.norm.running_mean", "module.hid.enc.1.norm.running_var", "module.hid.enc.1.norm.num_batches_tracked", "module.hid.enc.2.branch1.weight", "module.hid.enc.2.branch1.bias", "module.hid.enc.2.branch2.weight", "module.hid.enc.2.branch2.bias", "module.hid.enc.2.branch3.weight", "module.hid.enc.2.branch3.bias", "module.hid.enc.2.branch4.weight", "module.hid.enc.2.branch4.bias", "module.hid.enc.2.branch5.weight", "module.hid.enc.2.branch5.bias", "module.hid.enc.2.conv.weight", "module.hid.enc.2.conv.bias", "module.hid.enc.2.norm.weight", "module.hid.enc.2.norm.bias", "module.hid.enc.2.norm.running_mean", "module.hid.enc.2.norm.running_var", "module.hid.enc.2.norm.num_batches_tracked", "module.hid.enc.3.branch1.weight", "module.hid.enc.3.branch1.bias", "module.hid.enc.3.branch2.weight", "module.hid.enc.3.branch2.bias", "module.hid.enc.3.branch3.weight", "module.hid.enc.3.branch3.bias", "module.hid.enc.3.branch4.weight", "module.hid.enc.3.branch4.bias", "module.hid.enc.3.branch5.weight", "module.hid.enc.3.branch5.bias", "module.hid.enc.3.conv.weight", "module.hid.enc.3.conv.bias", "module.hid.enc.3.norm.weight", "module.hid.enc.3.norm.bias", "module.hid.enc.3.norm.running_mean", "module.hid.enc.3.norm.running_var", "module.hid.enc.3.norm.num_batches_tracked", "module.hid.enc.4.branch1.weight", "module.hid.enc.4.branch1.bias", "module.hid.enc.4.branch2.weight", "module.hid.enc.4.branch2.bias", "module.hid.enc.4.branch3.weight", "module.hid.enc.4.branch3.bias", "module.hid.enc.4.branch4.weight", "module.hid.enc.4.branch4.bias", "module.hid.enc.4.branch5.weight", "module.hid.enc.4.branch5.bias", "module.hid.enc.4.conv.weight", "module.hid.enc.4.conv.bias", "module.hid.enc.4.norm.weight", "module.hid.enc.4.norm.bias", "module.hid.enc.4.norm.running_mean", "module.hid.enc.4.norm.running_var", "module.hid.enc.4.norm.num_batches_tracked", "module.hid.enc.5.branch1.weight", "module.hid.enc.5.branch1.bias", "module.hid.enc.5.branch2.weight", "module.hid.enc.5.branch2.bias", "module.hid.enc.5.branch3.weight", "module.hid.enc.5.branch3.bias", "module.hid.enc.5.branch4.weight", "module.hid.enc.5.branch4.bias", "module.hid.enc.5.branch5.weight", "module.hid.enc.5.branch5.bias", "module.hid.enc.5.conv.weight", "module.hid.enc.5.conv.bias", "module.hid.enc.5.norm.weight", "module.hid.enc.5.norm.bias", "module.hid.enc.5.norm.running_mean", "module.hid.enc.5.norm.running_var", "module.hid.enc.5.norm.num_batches_tracked", "module.hid.enc.6.branch1.weight", "module.hid.enc.6.branch1.bias", "module.hid.enc.6.branch2.weight", "module.hid.enc.6.branch2.bias", "module.hid.enc.6.branch3.weight", "module.hid.enc.6.branch3.bias", "module.hid.enc.6.branch4.weight", "module.hid.enc.6.branch4.bias", "module.hid.enc.6.branch5.weight", "module.hid.enc.6.branch5.bias", "module.hid.enc.6.conv.weight", "module.hid.enc.6.conv.bias", "module.hid.enc.6.norm.weight", "module.hid.enc.6.norm.bias", "module.hid.enc.6.norm.running_mean", "module.hid.enc.6.norm.running_var", "module.hid.enc.6.norm.num_batches_tracked", "module.hid.enc.7.branch1.weight", "module.hid.enc.7.branch1.bias", "module.hid.enc.7.branch2.weight", "module.hid.enc.7.branch2.bias", "module.hid.enc.7.branch3.weight", "module.hid.enc.7.branch3.bias", "module.hid.enc.7.branch4.weight", "module.hid.enc.7.branch4.bias", "module.hid.enc.7.branch5.weight", "module.hid.enc.7.branch5.bias", "module.hid.enc.7.conv.weight", "module.hid.enc.7.conv.bias", "module.hid.enc.7.norm.weight", "module.hid.enc.7.norm.bias", "module.hid.enc.7.norm.running_mean", "module.hid.enc.7.norm.running_var", "module.hid.enc.7.norm.num_batches_tracked", "module.hid.dec.0.branch1.weight", "module.hid.dec.0.branch1.bias", "module.hid.dec.0.branch2.weight", "module.hid.dec.0.branch2.bias", "module.hid.dec.0.branch3.weight", "module.hid.dec.0.branch3.bias", "module.hid.dec.0.branch4.weight", "module.hid.dec.0.branch4.bias", "module.hid.dec.0.branch5.weight", "module.hid.dec.0.branch5.bias", "module.hid.dec.0.conv.weight", "module.hid.dec.0.conv.bias", "module.hid.dec.0.norm.weight", "module.hid.dec.0.norm.bias", "module.hid.dec.0.norm.running_mean", "module.hid.dec.0.norm.running_var", "module.hid.dec.0.norm.num_batches_tracked", "module.hid.dec.1.branch1.weight", "module.hid.dec.1.branch1.bias", "module.hid.dec.1.branch2.weight", "module.hid.dec.1.branch2.bias", "module.hid.dec.1.branch3.weight", "module.hid.dec.1.branch3.bias", "module.hid.dec.1.branch4.weight", "module.hid.dec.1.branch4.bias", "module.hid.dec.1.branch5.weight", "module.hid.dec.1.branch5.bias", "module.hid.dec.1.conv.weight", "module.hid.dec.1.conv.bias", "module.hid.dec.1.norm.weight", "module.hid.dec.1.norm.bias", "module.hid.dec.1.norm.running_mean", "module.hid.dec.1.norm.running_var", "module.hid.dec.1.norm.num_batches_tracked", "module.hid.dec.2.branch1.weight", "module.hid.dec.2.branch1.bias", "module.hid.dec.2.branch2.weight", "module.hid.dec.2.branch2.bias", "module.hid.dec.2.branch3.weight", "module.hid.dec.2.branch3.bias", "module.hid.dec.2.branch4.weight", "module.hid.dec.2.branch4.bias", "module.hid.dec.2.branch5.weight", "module.hid.dec.2.branch5.bias", "module.hid.dec.2.conv.weight", "module.hid.dec.2.conv.bias", "module.hid.dec.2.norm.weight", "module.hid.dec.2.norm.bias", "module.hid.dec.2.norm.running_mean", "module.hid.dec.2.norm.running_var", "module.hid.dec.2.norm.num_batches_tracked", "module.hid.dec.3.branch1.weight", "module.hid.dec.3.branch1.bias", "module.hid.dec.3.branch2.weight", "module.hid.dec.3.branch2.bias", "module.hid.dec.3.branch3.weight", "module.hid.dec.3.branch3.bias", "module.hid.dec.3.branch4.weight", "module.hid.dec.3.branch4.bias", "module.hid.dec.3.branch5.weight", "module.hid.dec.3.branch5.bias", "module.hid.dec.3.conv.weight", "module.hid.dec.3.conv.bias", "module.hid.dec.3.norm.weight", "module.hid.dec.3.norm.bias", "module.hid.dec.3.norm.running_mean", "module.hid.dec.3.norm.running_var", "module.hid.dec.3.norm.num_batches_tracked", "module.hid.dec.4.branch1.weight", "module.hid.dec.4.branch1.bias", "module.hid.dec.4.branch2.weight", "module.hid.dec.4.branch2.bias", "module.hid.dec.4.branch3.weight", "module.hid.dec.4.branch3.bias", "module.hid.dec.4.branch4.weight", "module.hid.dec.4.branch4.bias", "module.hid.dec.4.branch5.weight", "module.hid.dec.4.branch5.bias", "module.hid.dec.4.conv.weight", "module.hid.dec.4.conv.bias", "module.hid.dec.4.norm.weight", "module.hid.dec.4.norm.bias", "module.hid.dec.4.norm.running_mean", "module.hid.dec.4.norm.running_var", "module.hid.dec.4.norm.num_batches_tracked", "module.hid.dec.5.branch1.weight", "module.hid.dec.5.branch1.bias", "module.hid.dec.5.branch2.weight", "module.hid.dec.5.branch2.bias", "module.hid.dec.5.branch3.weight", "module.hid.dec.5.branch3.bias", "module.hid.dec.5.branch4.weight", "module.hid.dec.5.branch4.bias", "module.hid.dec.5.branch5.weight", "module.hid.dec.5.branch5.bias", "module.hid.dec.5.conv.weight", "module.hid.dec.5.conv.bias", "module.hid.dec.5.norm.weight", "module.hid.dec.5.norm.bias", "module.hid.dec.5.norm.running_mean", "module.hid.dec.5.norm.running_var", "module.hid.dec.5.norm.num_batches_tracked", "module.hid.dec.6.branch1.weight", "module.hid.dec.6.branch1.bias", "module.hid.dec.6.branch2.weight", "module.hid.dec.6.branch2.bias", "module.hid.dec.6.branch3.weight", "module.hid.dec.6.branch3.bias", "module.hid.dec.6.branch4.weight", "module.hid.dec.6.branch4.bias", "module.hid.dec.6.branch5.weight", "module.hid.dec.6.branch5.bias", "module.hid.dec.6.conv.weight", "module.hid.dec.6.conv.bias", "module.hid.dec.6.norm.weight", "module.hid.dec.6.norm.bias", "module.hid.dec.6.norm.running_mean", "module.hid.dec.6.norm.running_var", "module.hid.dec.6.norm.num_batches_tracked", "module.hid.dec.7.branch1.weight", "module.hid.dec.7.branch1.bias", "module.hid.dec.7.branch2.weight", "module.hid.dec.7.branch2.bias", "module.hid.dec.7.branch3.weight", "module.hid.dec.7.branch3.bias", "module.hid.dec.7.branch4.weight", "module.hid.dec.7.branch4.bias", "module.hid.dec.7.branch5.weight", "module.hid.dec.7.branch5.bias", "module.hid.dec.7.conv.weight", "module.hid.dec.7.conv.bias", "module.hid.dec.7.norm.weight", "module.hid.dec.7.norm.bias", "module.hid.dec.7.norm.running_mean", "module.hid.dec.7.norm.running_var", "module.hid.dec.7.norm.num_batches_tracked", "module.dit_block.pos_embed", "module.dit_block.x_embedder.proj.weight", "module.dit_block.x_embedder.proj.bias", "module.dit_block.t_embedder.mlp.0.weight", "module.dit_block.t_embedder.mlp.0.bias", "module.dit_block.t_embedder.mlp.2.weight", "module.dit_block.t_embedder.mlp.2.bias", "module.dit_block.blocks.0.attn.qkv.weight", "module.dit_block.blocks.0.attn.qkv.bias", "module.dit_block.blocks.0.attn.proj.weight", "module.dit_block.blocks.0.attn.proj.bias", "module.dit_block.blocks.0.mlp.fc1.weight", "module.dit_block.blocks.0.mlp.fc1.bias", "module.dit_block.blocks.0.mlp.fc2.weight", "module.dit_block.blocks.0.mlp.fc2.bias", "module.dit_block.blocks.0.adaLN_modulation.1.weight", "module.dit_block.blocks.0.adaLN_modulation.1.bias", "module.dit_block.blocks.1.attn.qkv.weight", "module.dit_block.blocks.1.attn.qkv.bias", "module.dit_block.blocks.1.attn.proj.weight", "module.dit_block.blocks.1.attn.proj.bias", "module.dit_block.blocks.1.mlp.fc1.weight", "module.dit_block.blocks.1.mlp.fc1.bias", "module.dit_block.blocks.1.mlp.fc2.weight", "module.dit_block.blocks.1.mlp.fc2.bias", "module.dit_block.blocks.1.adaLN_modulation.1.weight", "module.dit_block.blocks.1.adaLN_modulation.1.bias", "module.dit_block.blocks.2.attn.qkv.weight", "module.dit_block.blocks.2.attn.qkv.bias", "module.dit_block.blocks.2.attn.proj.weight", "module.dit_block.blocks.2.attn.proj.bias", "module.dit_block.blocks.2.mlp.fc1.weight", "module.dit_block.blocks.2.mlp.fc1.bias", "module.dit_block.blocks.2.mlp.fc2.weight", "module.dit_block.blocks.2.mlp.fc2.bias", "module.dit_block.blocks.2.adaLN_modulation.1.weight", "module.dit_block.blocks.2.adaLN_modulation.1.bias", "module.dit_block.blocks.3.attn.qkv.weight", "module.dit_block.blocks.3.attn.qkv.bias", "module.dit_block.blocks.3.attn.proj.weight", "module.dit_block.blocks.3.attn.proj.bias", "module.dit_block.blocks.3.mlp.fc1.weight", "module.dit_block.blocks.3.mlp.fc1.bias", "module.dit_block.blocks.3.mlp.fc2.weight", "module.dit_block.blocks.3.mlp.fc2.bias", "module.dit_block.blocks.3.adaLN_modulation.1.weight", "module.dit_block.blocks.3.adaLN_modulation.1.bias", "module.dit_block.blocks.4.attn.qkv.weight", "module.dit_block.blocks.4.attn.qkv.bias", "module.dit_block.blocks.4.attn.proj.weight", "module.dit_block.blocks.4.attn.proj.bias", "module.dit_block.blocks.4.mlp.fc1.weight", "module.dit_block.blocks.4.mlp.fc1.bias", "module.dit_block.blocks.4.mlp.fc2.weight", "module.dit_block.blocks.4.mlp.fc2.bias", "module.dit_block.blocks.4.adaLN_modulation.1.weight", "module.dit_block.blocks.4.adaLN_modulation.1.bias", "module.dit_block.blocks.5.attn.qkv.weight", "module.dit_block.blocks.5.attn.qkv.bias", "module.dit_block.blocks.5.attn.proj.weight", "module.dit_block.blocks.5.attn.proj.bias", "module.dit_block.blocks.5.mlp.fc1.weight", "module.dit_block.blocks.5.mlp.fc1.bias", "module.dit_block.blocks.5.mlp.fc2.weight", "module.dit_block.blocks.5.mlp.fc2.bias", "module.dit_block.blocks.5.adaLN_modulation.1.weight", "module.dit_block.blocks.5.adaLN_modulation.1.bias", "module.dit_block.blocks.6.attn.qkv.weight", "module.dit_block.blocks.6.attn.qkv.bias", "module.dit_block.blocks.6.attn.proj.weight", "module.dit_block.blocks.6.attn.proj.bias", "module.dit_block.blocks.6.mlp.fc1.weight", "module.dit_block.blocks.6.mlp.fc1.bias", "module.dit_block.blocks.6.mlp.fc2.weight", "module.dit_block.blocks.6.mlp.fc2.bias", "module.dit_block.blocks.6.adaLN_modulation.1.weight", "module.dit_block.blocks.6.adaLN_modulation.1.bias", "module.dit_block.blocks.7.attn.qkv.weight", "module.dit_block.blocks.7.attn.qkv.bias", "module.dit_block.blocks.7.attn.proj.weight", "module.dit_block.blocks.7.attn.proj.bias", "module.dit_block.blocks.7.mlp.fc1.weight", "module.dit_block.blocks.7.mlp.fc1.bias", "module.dit_block.blocks.7.mlp.fc2.weight", "module.dit_block.blocks.7.mlp.fc2.bias", "module.dit_block.blocks.7.adaLN_modulation.1.weight", "module.dit_block.blocks.7.adaLN_modulation.1.bias", "module.dit_block.blocks.8.attn.qkv.weight", "module.dit_block.blocks.8.attn.qkv.bias", "module.dit_block.blocks.8.attn.proj.weight", "module.dit_block.blocks.8.attn.proj.bias", "module.dit_block.blocks.8.mlp.fc1.weight", "module.dit_block.blocks.8.mlp.fc1.bias", "module.dit_block.blocks.8.mlp.fc2.weight", "module.dit_block.blocks.8.mlp.fc2.bias", "module.dit_block.blocks.8.adaLN_modulation.1.weight", "module.dit_block.blocks.8.adaLN_modulation.1.bias", "module.dit_block.blocks.9.attn.qkv.weight", "module.dit_block.blocks.9.attn.qkv.bias", "module.dit_block.blocks.9.attn.proj.weight", "module.dit_block.blocks.9.attn.proj.bias", "module.dit_block.blocks.9.mlp.fc1.weight", "module.dit_block.blocks.9.mlp.fc1.bias", "module.dit_block.blocks.9.mlp.fc2.weight", "module.dit_block.blocks.9.mlp.fc2.bias", "module.dit_block.blocks.9.adaLN_modulation.1.weight", "module.dit_block.blocks.9.adaLN_modulation.1.bias", "module.dit_block.blocks.10.attn.qkv.weight", "module.dit_block.blocks.10.attn.qkv.bias", "module.dit_block.blocks.10.attn.proj.weight", "module.dit_block.blocks.10.attn.proj.bias", "module.dit_block.blocks.10.mlp.fc1.weight", "module.dit_block.blocks.10.mlp.fc1.bias", "module.dit_block.blocks.10.mlp.fc2.weight", "module.dit_block.blocks.10.mlp.fc2.bias", "module.dit_block.blocks.10.adaLN_modulation.1.weight", "module.dit_block.blocks.10.adaLN_modulation.1.bias", "module.dit_block.blocks.11.attn.qkv.weight", "module.dit_block.blocks.11.attn.qkv.bias", "module.dit_block.blocks.11.attn.proj.weight", "module.dit_block.blocks.11.attn.proj.bias", "module.dit_block.blocks.11.mlp.fc1.weight", "module.dit_block.blocks.11.mlp.fc1.bias", "module.dit_block.blocks.11.mlp.fc2.weight", "module.dit_block.blocks.11.mlp.fc2.bias", "module.dit_block.blocks.11.adaLN_modulation.1.weight", "module.dit_block.blocks.11.adaLN_modulation.1.bias", "module.dit_block.final_layer.linear.weight", "module.dit_block.final_layer.linear.bias", "module.dit_block.final_layer.adaLN_modulation.1.weight", "module.dit_block.final_layer.adaLN_modulation.1.bias", "module.dec.dec.0.conv.weight", "module.dec.dec.0.conv.bias", "module.dec.dec.0.norm.weight", "module.dec.dec.0.norm.bias", "module.dec.dec.0.norm.running_mean", "module.dec.dec.0.norm.running_var", "module.dec.dec.0.norm.num_batches_tracked", "module.dec.dec.1.conv.weight", "module.dec.dec.1.conv.bias", "module.dec.dec.1.norm.weight", "module.dec.dec.1.norm.bias", "module.dec.dec.1.norm.running_mean", "module.dec.dec.1.norm.running_var", "module.dec.dec.1.norm.num_batches_tracked", "module.dec.dec.2.conv.weight", "module.dec.dec.2.conv.bias", "module.dec.dec.2.norm.weight", "module.dec.dec.2.norm.bias", "module.dec.dec.2.norm.running_mean", "module.dec.dec.2.norm.running_var", "module.dec.dec.2.norm.num_batches_tracked", "module.dec.dec.3.conv.weight", "module.dec.dec.3.conv.bias", "module.dec.dec.3.norm.weight", "module.dec.dec.3.norm.bias", "module.dec.dec.3.norm.running_mean", "module.dec.dec.3.norm.running_var", "module.dec.dec.3.norm.num_batches_tracked", "module.dec.readout.weight", "module.dec.readout.bias". +2025-02-24 16:13:59,753 推理过程出错:Error(s) in loading state_dict for Dit: + Missing key(s) in state_dict: "enc.enc.0.conv.weight", "enc.enc.0.conv.bias", "enc.enc.0.norm.weight", "enc.enc.0.norm.bias", "enc.enc.0.norm.running_mean", "enc.enc.0.norm.running_var", "enc.enc.1.conv.weight", "enc.enc.1.conv.bias", "enc.enc.1.norm.weight", "enc.enc.1.norm.bias", "enc.enc.1.norm.running_mean", "enc.enc.1.norm.running_var", "enc.enc.2.conv.weight", "enc.enc.2.conv.bias", "enc.enc.2.norm.weight", "enc.enc.2.norm.bias", "enc.enc.2.norm.running_mean", "enc.enc.2.norm.running_var", "enc.enc.3.conv.weight", "enc.enc.3.conv.bias", "enc.enc.3.norm.weight", "enc.enc.3.norm.bias", "enc.enc.3.norm.running_mean", "enc.enc.3.norm.running_var", "hid.norm.weight", "hid.norm.bias", "hid.enc.0.branch1.weight", "hid.enc.0.branch1.bias", "hid.enc.0.branch2.weight", "hid.enc.0.branch2.bias", "hid.enc.0.branch3.weight", "hid.enc.0.branch3.bias", "hid.enc.0.branch4.weight", "hid.enc.0.branch4.bias", "hid.enc.0.branch5.weight", "hid.enc.0.branch5.bias", "hid.enc.0.conv.weight", "hid.enc.0.conv.bias", "hid.enc.0.norm.weight", "hid.enc.0.norm.bias", "hid.enc.0.norm.running_mean", "hid.enc.0.norm.running_var", "hid.enc.1.branch1.weight", "hid.enc.1.branch1.bias", "hid.enc.1.branch2.weight", "hid.enc.1.branch2.bias", "hid.enc.1.branch3.weight", "hid.enc.1.branch3.bias", "hid.enc.1.branch4.weight", "hid.enc.1.branch4.bias", "hid.enc.1.branch5.weight", "hid.enc.1.branch5.bias", "hid.enc.1.conv.weight", "hid.enc.1.conv.bias", "hid.enc.1.norm.weight", "hid.enc.1.norm.bias", "hid.enc.1.norm.running_mean", "hid.enc.1.norm.running_var", "hid.enc.2.branch1.weight", "hid.enc.2.branch1.bias", "hid.enc.2.branch2.weight", "hid.enc.2.branch2.bias", "hid.enc.2.branch3.weight", "hid.enc.2.branch3.bias", "hid.enc.2.branch4.weight", "hid.enc.2.branch4.bias", "hid.enc.2.branch5.weight", "hid.enc.2.branch5.bias", "hid.enc.2.conv.weight", "hid.enc.2.conv.bias", "hid.enc.2.norm.weight", "hid.enc.2.norm.bias", "hid.enc.2.norm.running_mean", "hid.enc.2.norm.running_var", "hid.enc.3.branch1.weight", "hid.enc.3.branch1.bias", "hid.enc.3.branch2.weight", "hid.enc.3.branch2.bias", "hid.enc.3.branch3.weight", "hid.enc.3.branch3.bias", "hid.enc.3.branch4.weight", "hid.enc.3.branch4.bias", "hid.enc.3.branch5.weight", "hid.enc.3.branch5.bias", "hid.enc.3.conv.weight", "hid.enc.3.conv.bias", "hid.enc.3.norm.weight", "hid.enc.3.norm.bias", "hid.enc.3.norm.running_mean", "hid.enc.3.norm.running_var", "hid.enc.4.branch1.weight", "hid.enc.4.branch1.bias", "hid.enc.4.branch2.weight", "hid.enc.4.branch2.bias", "hid.enc.4.branch3.weight", "hid.enc.4.branch3.bias", "hid.enc.4.branch4.weight", "hid.enc.4.branch4.bias", "hid.enc.4.branch5.weight", "hid.enc.4.branch5.bias", "hid.enc.4.conv.weight", "hid.enc.4.conv.bias", "hid.enc.4.norm.weight", "hid.enc.4.norm.bias", "hid.enc.4.norm.running_mean", "hid.enc.4.norm.running_var", "hid.enc.5.branch1.weight", "hid.enc.5.branch1.bias", "hid.enc.5.branch2.weight", "hid.enc.5.branch2.bias", "hid.enc.5.branch3.weight", "hid.enc.5.branch3.bias", "hid.enc.5.branch4.weight", "hid.enc.5.branch4.bias", "hid.enc.5.branch5.weight", "hid.enc.5.branch5.bias", "hid.enc.5.conv.weight", "hid.enc.5.conv.bias", "hid.enc.5.norm.weight", "hid.enc.5.norm.bias", "hid.enc.5.norm.running_mean", "hid.enc.5.norm.running_var", "hid.enc.6.branch1.weight", "hid.enc.6.branch1.bias", "hid.enc.6.branch2.weight", "hid.enc.6.branch2.bias", "hid.enc.6.branch3.weight", "hid.enc.6.branch3.bias", "hid.enc.6.branch4.weight", "hid.enc.6.branch4.bias", "hid.enc.6.branch5.weight", "hid.enc.6.branch5.bias", "hid.enc.6.conv.weight", "hid.enc.6.conv.bias", "hid.enc.6.norm.weight", "hid.enc.6.norm.bias", "hid.enc.6.norm.running_mean", "hid.enc.6.norm.running_var", "hid.enc.7.branch1.weight", "hid.enc.7.branch1.bias", "hid.enc.7.branch2.weight", "hid.enc.7.branch2.bias", "hid.enc.7.branch3.weight", "hid.enc.7.branch3.bias", "hid.enc.7.branch4.weight", "hid.enc.7.branch4.bias", "hid.enc.7.branch5.weight", "hid.enc.7.branch5.bias", "hid.enc.7.conv.weight", "hid.enc.7.conv.bias", "hid.enc.7.norm.weight", "hid.enc.7.norm.bias", "hid.enc.7.norm.running_mean", "hid.enc.7.norm.running_var", "hid.dec.0.branch1.weight", "hid.dec.0.branch1.bias", "hid.dec.0.branch2.weight", "hid.dec.0.branch2.bias", "hid.dec.0.branch3.weight", "hid.dec.0.branch3.bias", "hid.dec.0.branch4.weight", "hid.dec.0.branch4.bias", "hid.dec.0.branch5.weight", "hid.dec.0.branch5.bias", "hid.dec.0.conv.weight", "hid.dec.0.conv.bias", "hid.dec.0.norm.weight", "hid.dec.0.norm.bias", "hid.dec.0.norm.running_mean", "hid.dec.0.norm.running_var", "hid.dec.1.branch1.weight", "hid.dec.1.branch1.bias", "hid.dec.1.branch2.weight", "hid.dec.1.branch2.bias", "hid.dec.1.branch3.weight", "hid.dec.1.branch3.bias", "hid.dec.1.branch4.weight", "hid.dec.1.branch4.bias", "hid.dec.1.branch5.weight", "hid.dec.1.branch5.bias", "hid.dec.1.conv.weight", "hid.dec.1.conv.bias", "hid.dec.1.norm.weight", "hid.dec.1.norm.bias", "hid.dec.1.norm.running_mean", "hid.dec.1.norm.running_var", "hid.dec.2.branch1.weight", "hid.dec.2.branch1.bias", "hid.dec.2.branch2.weight", "hid.dec.2.branch2.bias", "hid.dec.2.branch3.weight", "hid.dec.2.branch3.bias", "hid.dec.2.branch4.weight", "hid.dec.2.branch4.bias", "hid.dec.2.branch5.weight", "hid.dec.2.branch5.bias", "hid.dec.2.conv.weight", "hid.dec.2.conv.bias", "hid.dec.2.norm.weight", "hid.dec.2.norm.bias", "hid.dec.2.norm.running_mean", "hid.dec.2.norm.running_var", "hid.dec.3.branch1.weight", "hid.dec.3.branch1.bias", "hid.dec.3.branch2.weight", "hid.dec.3.branch2.bias", "hid.dec.3.branch3.weight", "hid.dec.3.branch3.bias", "hid.dec.3.branch4.weight", "hid.dec.3.branch4.bias", "hid.dec.3.branch5.weight", "hid.dec.3.branch5.bias", "hid.dec.3.conv.weight", "hid.dec.3.conv.bias", "hid.dec.3.norm.weight", "hid.dec.3.norm.bias", "hid.dec.3.norm.running_mean", "hid.dec.3.norm.running_var", "hid.dec.4.branch1.weight", "hid.dec.4.branch1.bias", "hid.dec.4.branch2.weight", "hid.dec.4.branch2.bias", "hid.dec.4.branch3.weight", "hid.dec.4.branch3.bias", "hid.dec.4.branch4.weight", "hid.dec.4.branch4.bias", "hid.dec.4.branch5.weight", "hid.dec.4.branch5.bias", "hid.dec.4.conv.weight", "hid.dec.4.conv.bias", "hid.dec.4.norm.weight", "hid.dec.4.norm.bias", "hid.dec.4.norm.running_mean", "hid.dec.4.norm.running_var", "hid.dec.5.branch1.weight", "hid.dec.5.branch1.bias", "hid.dec.5.branch2.weight", "hid.dec.5.branch2.bias", "hid.dec.5.branch3.weight", "hid.dec.5.branch3.bias", "hid.dec.5.branch4.weight", "hid.dec.5.branch4.bias", "hid.dec.5.branch5.weight", "hid.dec.5.branch5.bias", "hid.dec.5.conv.weight", "hid.dec.5.conv.bias", "hid.dec.5.norm.weight", "hid.dec.5.norm.bias", "hid.dec.5.norm.running_mean", "hid.dec.5.norm.running_var", "hid.dec.6.branch1.weight", "hid.dec.6.branch1.bias", "hid.dec.6.branch2.weight", "hid.dec.6.branch2.bias", "hid.dec.6.branch3.weight", "hid.dec.6.branch3.bias", "hid.dec.6.branch4.weight", "hid.dec.6.branch4.bias", "hid.dec.6.branch5.weight", "hid.dec.6.branch5.bias", "hid.dec.6.conv.weight", "hid.dec.6.conv.bias", "hid.dec.6.norm.weight", "hid.dec.6.norm.bias", "hid.dec.6.norm.running_mean", "hid.dec.6.norm.running_var", "hid.dec.7.branch1.weight", "hid.dec.7.branch1.bias", "hid.dec.7.branch2.weight", "hid.dec.7.branch2.bias", "hid.dec.7.branch3.weight", "hid.dec.7.branch3.bias", "hid.dec.7.branch4.weight", "hid.dec.7.branch4.bias", "hid.dec.7.branch5.weight", "hid.dec.7.branch5.bias", "hid.dec.7.conv.weight", "hid.dec.7.conv.bias", "hid.dec.7.norm.weight", "hid.dec.7.norm.bias", "hid.dec.7.norm.running_mean", "hid.dec.7.norm.running_var", "dit_block.pos_embed", "dit_block.x_embedder.proj.weight", "dit_block.x_embedder.proj.bias", "dit_block.t_embedder.mlp.0.weight", "dit_block.t_embedder.mlp.0.bias", "dit_block.t_embedder.mlp.2.weight", "dit_block.t_embedder.mlp.2.bias", "dit_block.blocks.0.attn.qkv.weight", "dit_block.blocks.0.attn.qkv.bias", "dit_block.blocks.0.attn.proj.weight", "dit_block.blocks.0.attn.proj.bias", "dit_block.blocks.0.mlp.fc1.weight", "dit_block.blocks.0.mlp.fc1.bias", "dit_block.blocks.0.mlp.fc2.weight", "dit_block.blocks.0.mlp.fc2.bias", "dit_block.blocks.0.adaLN_modulation.1.weight", "dit_block.blocks.0.adaLN_modulation.1.bias", "dit_block.blocks.1.attn.qkv.weight", "dit_block.blocks.1.attn.qkv.bias", "dit_block.blocks.1.attn.proj.weight", "dit_block.blocks.1.attn.proj.bias", "dit_block.blocks.1.mlp.fc1.weight", "dit_block.blocks.1.mlp.fc1.bias", "dit_block.blocks.1.mlp.fc2.weight", "dit_block.blocks.1.mlp.fc2.bias", "dit_block.blocks.1.adaLN_modulation.1.weight", "dit_block.blocks.1.adaLN_modulation.1.bias", "dit_block.blocks.2.attn.qkv.weight", "dit_block.blocks.2.attn.qkv.bias", "dit_block.blocks.2.attn.proj.weight", "dit_block.blocks.2.attn.proj.bias", "dit_block.blocks.2.mlp.fc1.weight", "dit_block.blocks.2.mlp.fc1.bias", "dit_block.blocks.2.mlp.fc2.weight", "dit_block.blocks.2.mlp.fc2.bias", "dit_block.blocks.2.adaLN_modulation.1.weight", "dit_block.blocks.2.adaLN_modulation.1.bias", "dit_block.blocks.3.attn.qkv.weight", "dit_block.blocks.3.attn.qkv.bias", "dit_block.blocks.3.attn.proj.weight", "dit_block.blocks.3.attn.proj.bias", "dit_block.blocks.3.mlp.fc1.weight", "dit_block.blocks.3.mlp.fc1.bias", "dit_block.blocks.3.mlp.fc2.weight", "dit_block.blocks.3.mlp.fc2.bias", "dit_block.blocks.3.adaLN_modulation.1.weight", "dit_block.blocks.3.adaLN_modulation.1.bias", "dit_block.blocks.4.attn.qkv.weight", "dit_block.blocks.4.attn.qkv.bias", "dit_block.blocks.4.attn.proj.weight", "dit_block.blocks.4.attn.proj.bias", "dit_block.blocks.4.mlp.fc1.weight", "dit_block.blocks.4.mlp.fc1.bias", "dit_block.blocks.4.mlp.fc2.weight", "dit_block.blocks.4.mlp.fc2.bias", "dit_block.blocks.4.adaLN_modulation.1.weight", "dit_block.blocks.4.adaLN_modulation.1.bias", "dit_block.blocks.5.attn.qkv.weight", "dit_block.blocks.5.attn.qkv.bias", "dit_block.blocks.5.attn.proj.weight", "dit_block.blocks.5.attn.proj.bias", "dit_block.blocks.5.mlp.fc1.weight", "dit_block.blocks.5.mlp.fc1.bias", "dit_block.blocks.5.mlp.fc2.weight", "dit_block.blocks.5.mlp.fc2.bias", "dit_block.blocks.5.adaLN_modulation.1.weight", "dit_block.blocks.5.adaLN_modulation.1.bias", "dit_block.blocks.6.attn.qkv.weight", "dit_block.blocks.6.attn.qkv.bias", "dit_block.blocks.6.attn.proj.weight", "dit_block.blocks.6.attn.proj.bias", "dit_block.blocks.6.mlp.fc1.weight", "dit_block.blocks.6.mlp.fc1.bias", "dit_block.blocks.6.mlp.fc2.weight", "dit_block.blocks.6.mlp.fc2.bias", "dit_block.blocks.6.adaLN_modulation.1.weight", "dit_block.blocks.6.adaLN_modulation.1.bias", "dit_block.blocks.7.attn.qkv.weight", "dit_block.blocks.7.attn.qkv.bias", "dit_block.blocks.7.attn.proj.weight", "dit_block.blocks.7.attn.proj.bias", "dit_block.blocks.7.mlp.fc1.weight", "dit_block.blocks.7.mlp.fc1.bias", "dit_block.blocks.7.mlp.fc2.weight", "dit_block.blocks.7.mlp.fc2.bias", "dit_block.blocks.7.adaLN_modulation.1.weight", "dit_block.blocks.7.adaLN_modulation.1.bias", "dit_block.blocks.8.attn.qkv.weight", "dit_block.blocks.8.attn.qkv.bias", "dit_block.blocks.8.attn.proj.weight", "dit_block.blocks.8.attn.proj.bias", "dit_block.blocks.8.mlp.fc1.weight", "dit_block.blocks.8.mlp.fc1.bias", "dit_block.blocks.8.mlp.fc2.weight", "dit_block.blocks.8.mlp.fc2.bias", "dit_block.blocks.8.adaLN_modulation.1.weight", "dit_block.blocks.8.adaLN_modulation.1.bias", "dit_block.blocks.9.attn.qkv.weight", "dit_block.blocks.9.attn.qkv.bias", "dit_block.blocks.9.attn.proj.weight", "dit_block.blocks.9.attn.proj.bias", "dit_block.blocks.9.mlp.fc1.weight", "dit_block.blocks.9.mlp.fc1.bias", "dit_block.blocks.9.mlp.fc2.weight", "dit_block.blocks.9.mlp.fc2.bias", "dit_block.blocks.9.adaLN_modulation.1.weight", "dit_block.blocks.9.adaLN_modulation.1.bias", "dit_block.blocks.10.attn.qkv.weight", "dit_block.blocks.10.attn.qkv.bias", "dit_block.blocks.10.attn.proj.weight", "dit_block.blocks.10.attn.proj.bias", "dit_block.blocks.10.mlp.fc1.weight", "dit_block.blocks.10.mlp.fc1.bias", "dit_block.blocks.10.mlp.fc2.weight", "dit_block.blocks.10.mlp.fc2.bias", "dit_block.blocks.10.adaLN_modulation.1.weight", "dit_block.blocks.10.adaLN_modulation.1.bias", "dit_block.blocks.11.attn.qkv.weight", "dit_block.blocks.11.attn.qkv.bias", "dit_block.blocks.11.attn.proj.weight", "dit_block.blocks.11.attn.proj.bias", "dit_block.blocks.11.mlp.fc1.weight", "dit_block.blocks.11.mlp.fc1.bias", "dit_block.blocks.11.mlp.fc2.weight", "dit_block.blocks.11.mlp.fc2.bias", "dit_block.blocks.11.adaLN_modulation.1.weight", "dit_block.blocks.11.adaLN_modulation.1.bias", "dit_block.final_layer.linear.weight", "dit_block.final_layer.linear.bias", "dit_block.final_layer.adaLN_modulation.1.weight", "dit_block.final_layer.adaLN_modulation.1.bias", "dec.dec.0.conv.weight", "dec.dec.0.conv.bias", "dec.dec.0.norm.weight", "dec.dec.0.norm.bias", "dec.dec.0.norm.running_mean", "dec.dec.0.norm.running_var", "dec.dec.1.conv.weight", "dec.dec.1.conv.bias", "dec.dec.1.norm.weight", "dec.dec.1.norm.bias", "dec.dec.1.norm.running_mean", "dec.dec.1.norm.running_var", "dec.dec.2.conv.weight", "dec.dec.2.conv.bias", "dec.dec.2.norm.weight", "dec.dec.2.norm.bias", "dec.dec.2.norm.running_mean", "dec.dec.2.norm.running_var", "dec.dec.3.conv.weight", "dec.dec.3.conv.bias", "dec.dec.3.norm.weight", "dec.dec.3.norm.bias", "dec.dec.3.norm.running_mean", "dec.dec.3.norm.running_var", "dec.readout.weight", "dec.readout.bias". + Unexpected key(s) in state_dict: "module.enc.enc.0.conv.weight", "module.enc.enc.0.conv.bias", "module.enc.enc.0.norm.weight", "module.enc.enc.0.norm.bias", "module.enc.enc.0.norm.running_mean", "module.enc.enc.0.norm.running_var", "module.enc.enc.0.norm.num_batches_tracked", "module.enc.enc.1.conv.weight", "module.enc.enc.1.conv.bias", "module.enc.enc.1.norm.weight", "module.enc.enc.1.norm.bias", "module.enc.enc.1.norm.running_mean", "module.enc.enc.1.norm.running_var", "module.enc.enc.1.norm.num_batches_tracked", "module.enc.enc.2.conv.weight", "module.enc.enc.2.conv.bias", "module.enc.enc.2.norm.weight", "module.enc.enc.2.norm.bias", "module.enc.enc.2.norm.running_mean", "module.enc.enc.2.norm.running_var", "module.enc.enc.2.norm.num_batches_tracked", "module.enc.enc.3.conv.weight", "module.enc.enc.3.conv.bias", "module.enc.enc.3.norm.weight", "module.enc.enc.3.norm.bias", "module.enc.enc.3.norm.running_mean", "module.enc.enc.3.norm.running_var", "module.enc.enc.3.norm.num_batches_tracked", "module.hid.norm.weight", "module.hid.norm.bias", "module.hid.enc.0.branch1.weight", "module.hid.enc.0.branch1.bias", "module.hid.enc.0.branch2.weight", "module.hid.enc.0.branch2.bias", "module.hid.enc.0.branch3.weight", "module.hid.enc.0.branch3.bias", "module.hid.enc.0.branch4.weight", "module.hid.enc.0.branch4.bias", "module.hid.enc.0.branch5.weight", "module.hid.enc.0.branch5.bias", "module.hid.enc.0.conv.weight", "module.hid.enc.0.conv.bias", "module.hid.enc.0.norm.weight", "module.hid.enc.0.norm.bias", "module.hid.enc.0.norm.running_mean", "module.hid.enc.0.norm.running_var", "module.hid.enc.0.norm.num_batches_tracked", "module.hid.enc.1.branch1.weight", "module.hid.enc.1.branch1.bias", "module.hid.enc.1.branch2.weight", "module.hid.enc.1.branch2.bias", "module.hid.enc.1.branch3.weight", "module.hid.enc.1.branch3.bias", "module.hid.enc.1.branch4.weight", "module.hid.enc.1.branch4.bias", "module.hid.enc.1.branch5.weight", "module.hid.enc.1.branch5.bias", "module.hid.enc.1.conv.weight", "module.hid.enc.1.conv.bias", "module.hid.enc.1.norm.weight", "module.hid.enc.1.norm.bias", "module.hid.enc.1.norm.running_mean", "module.hid.enc.1.norm.running_var", "module.hid.enc.1.norm.num_batches_tracked", "module.hid.enc.2.branch1.weight", "module.hid.enc.2.branch1.bias", "module.hid.enc.2.branch2.weight", "module.hid.enc.2.branch2.bias", "module.hid.enc.2.branch3.weight", "module.hid.enc.2.branch3.bias", "module.hid.enc.2.branch4.weight", "module.hid.enc.2.branch4.bias", "module.hid.enc.2.branch5.weight", "module.hid.enc.2.branch5.bias", "module.hid.enc.2.conv.weight", "module.hid.enc.2.conv.bias", "module.hid.enc.2.norm.weight", "module.hid.enc.2.norm.bias", "module.hid.enc.2.norm.running_mean", "module.hid.enc.2.norm.running_var", "module.hid.enc.2.norm.num_batches_tracked", "module.hid.enc.3.branch1.weight", "module.hid.enc.3.branch1.bias", "module.hid.enc.3.branch2.weight", "module.hid.enc.3.branch2.bias", "module.hid.enc.3.branch3.weight", "module.hid.enc.3.branch3.bias", "module.hid.enc.3.branch4.weight", "module.hid.enc.3.branch4.bias", "module.hid.enc.3.branch5.weight", "module.hid.enc.3.branch5.bias", "module.hid.enc.3.conv.weight", "module.hid.enc.3.conv.bias", "module.hid.enc.3.norm.weight", "module.hid.enc.3.norm.bias", "module.hid.enc.3.norm.running_mean", "module.hid.enc.3.norm.running_var", "module.hid.enc.3.norm.num_batches_tracked", "module.hid.enc.4.branch1.weight", "module.hid.enc.4.branch1.bias", "module.hid.enc.4.branch2.weight", "module.hid.enc.4.branch2.bias", "module.hid.enc.4.branch3.weight", "module.hid.enc.4.branch3.bias", "module.hid.enc.4.branch4.weight", "module.hid.enc.4.branch4.bias", "module.hid.enc.4.branch5.weight", "module.hid.enc.4.branch5.bias", "module.hid.enc.4.conv.weight", "module.hid.enc.4.conv.bias", "module.hid.enc.4.norm.weight", "module.hid.enc.4.norm.bias", "module.hid.enc.4.norm.running_mean", "module.hid.enc.4.norm.running_var", "module.hid.enc.4.norm.num_batches_tracked", "module.hid.enc.5.branch1.weight", "module.hid.enc.5.branch1.bias", "module.hid.enc.5.branch2.weight", "module.hid.enc.5.branch2.bias", "module.hid.enc.5.branch3.weight", "module.hid.enc.5.branch3.bias", "module.hid.enc.5.branch4.weight", "module.hid.enc.5.branch4.bias", "module.hid.enc.5.branch5.weight", "module.hid.enc.5.branch5.bias", "module.hid.enc.5.conv.weight", "module.hid.enc.5.conv.bias", "module.hid.enc.5.norm.weight", "module.hid.enc.5.norm.bias", "module.hid.enc.5.norm.running_mean", "module.hid.enc.5.norm.running_var", "module.hid.enc.5.norm.num_batches_tracked", "module.hid.enc.6.branch1.weight", "module.hid.enc.6.branch1.bias", "module.hid.enc.6.branch2.weight", "module.hid.enc.6.branch2.bias", "module.hid.enc.6.branch3.weight", "module.hid.enc.6.branch3.bias", "module.hid.enc.6.branch4.weight", "module.hid.enc.6.branch4.bias", "module.hid.enc.6.branch5.weight", "module.hid.enc.6.branch5.bias", "module.hid.enc.6.conv.weight", "module.hid.enc.6.conv.bias", "module.hid.enc.6.norm.weight", "module.hid.enc.6.norm.bias", "module.hid.enc.6.norm.running_mean", "module.hid.enc.6.norm.running_var", "module.hid.enc.6.norm.num_batches_tracked", "module.hid.enc.7.branch1.weight", "module.hid.enc.7.branch1.bias", "module.hid.enc.7.branch2.weight", "module.hid.enc.7.branch2.bias", "module.hid.enc.7.branch3.weight", "module.hid.enc.7.branch3.bias", "module.hid.enc.7.branch4.weight", "module.hid.enc.7.branch4.bias", "module.hid.enc.7.branch5.weight", "module.hid.enc.7.branch5.bias", "module.hid.enc.7.conv.weight", "module.hid.enc.7.conv.bias", "module.hid.enc.7.norm.weight", "module.hid.enc.7.norm.bias", "module.hid.enc.7.norm.running_mean", "module.hid.enc.7.norm.running_var", "module.hid.enc.7.norm.num_batches_tracked", "module.hid.dec.0.branch1.weight", "module.hid.dec.0.branch1.bias", "module.hid.dec.0.branch2.weight", "module.hid.dec.0.branch2.bias", "module.hid.dec.0.branch3.weight", "module.hid.dec.0.branch3.bias", "module.hid.dec.0.branch4.weight", "module.hid.dec.0.branch4.bias", "module.hid.dec.0.branch5.weight", "module.hid.dec.0.branch5.bias", "module.hid.dec.0.conv.weight", "module.hid.dec.0.conv.bias", "module.hid.dec.0.norm.weight", "module.hid.dec.0.norm.bias", "module.hid.dec.0.norm.running_mean", "module.hid.dec.0.norm.running_var", "module.hid.dec.0.norm.num_batches_tracked", "module.hid.dec.1.branch1.weight", "module.hid.dec.1.branch1.bias", "module.hid.dec.1.branch2.weight", "module.hid.dec.1.branch2.bias", "module.hid.dec.1.branch3.weight", "module.hid.dec.1.branch3.bias", "module.hid.dec.1.branch4.weight", "module.hid.dec.1.branch4.bias", "module.hid.dec.1.branch5.weight", "module.hid.dec.1.branch5.bias", "module.hid.dec.1.conv.weight", "module.hid.dec.1.conv.bias", "module.hid.dec.1.norm.weight", "module.hid.dec.1.norm.bias", "module.hid.dec.1.norm.running_mean", "module.hid.dec.1.norm.running_var", "module.hid.dec.1.norm.num_batches_tracked", "module.hid.dec.2.branch1.weight", "module.hid.dec.2.branch1.bias", "module.hid.dec.2.branch2.weight", "module.hid.dec.2.branch2.bias", "module.hid.dec.2.branch3.weight", "module.hid.dec.2.branch3.bias", "module.hid.dec.2.branch4.weight", "module.hid.dec.2.branch4.bias", "module.hid.dec.2.branch5.weight", "module.hid.dec.2.branch5.bias", "module.hid.dec.2.conv.weight", "module.hid.dec.2.conv.bias", "module.hid.dec.2.norm.weight", "module.hid.dec.2.norm.bias", "module.hid.dec.2.norm.running_mean", "module.hid.dec.2.norm.running_var", "module.hid.dec.2.norm.num_batches_tracked", "module.hid.dec.3.branch1.weight", "module.hid.dec.3.branch1.bias", "module.hid.dec.3.branch2.weight", "module.hid.dec.3.branch2.bias", "module.hid.dec.3.branch3.weight", "module.hid.dec.3.branch3.bias", "module.hid.dec.3.branch4.weight", "module.hid.dec.3.branch4.bias", "module.hid.dec.3.branch5.weight", "module.hid.dec.3.branch5.bias", "module.hid.dec.3.conv.weight", "module.hid.dec.3.conv.bias", "module.hid.dec.3.norm.weight", "module.hid.dec.3.norm.bias", "module.hid.dec.3.norm.running_mean", "module.hid.dec.3.norm.running_var", "module.hid.dec.3.norm.num_batches_tracked", "module.hid.dec.4.branch1.weight", "module.hid.dec.4.branch1.bias", "module.hid.dec.4.branch2.weight", "module.hid.dec.4.branch2.bias", "module.hid.dec.4.branch3.weight", "module.hid.dec.4.branch3.bias", "module.hid.dec.4.branch4.weight", "module.hid.dec.4.branch4.bias", "module.hid.dec.4.branch5.weight", "module.hid.dec.4.branch5.bias", "module.hid.dec.4.conv.weight", "module.hid.dec.4.conv.bias", "module.hid.dec.4.norm.weight", "module.hid.dec.4.norm.bias", "module.hid.dec.4.norm.running_mean", "module.hid.dec.4.norm.running_var", "module.hid.dec.4.norm.num_batches_tracked", "module.hid.dec.5.branch1.weight", "module.hid.dec.5.branch1.bias", "module.hid.dec.5.branch2.weight", "module.hid.dec.5.branch2.bias", "module.hid.dec.5.branch3.weight", "module.hid.dec.5.branch3.bias", "module.hid.dec.5.branch4.weight", "module.hid.dec.5.branch4.bias", "module.hid.dec.5.branch5.weight", "module.hid.dec.5.branch5.bias", "module.hid.dec.5.conv.weight", "module.hid.dec.5.conv.bias", "module.hid.dec.5.norm.weight", "module.hid.dec.5.norm.bias", "module.hid.dec.5.norm.running_mean", "module.hid.dec.5.norm.running_var", "module.hid.dec.5.norm.num_batches_tracked", "module.hid.dec.6.branch1.weight", "module.hid.dec.6.branch1.bias", "module.hid.dec.6.branch2.weight", "module.hid.dec.6.branch2.bias", "module.hid.dec.6.branch3.weight", "module.hid.dec.6.branch3.bias", "module.hid.dec.6.branch4.weight", "module.hid.dec.6.branch4.bias", "module.hid.dec.6.branch5.weight", "module.hid.dec.6.branch5.bias", "module.hid.dec.6.conv.weight", "module.hid.dec.6.conv.bias", "module.hid.dec.6.norm.weight", "module.hid.dec.6.norm.bias", "module.hid.dec.6.norm.running_mean", "module.hid.dec.6.norm.running_var", "module.hid.dec.6.norm.num_batches_tracked", "module.hid.dec.7.branch1.weight", "module.hid.dec.7.branch1.bias", "module.hid.dec.7.branch2.weight", "module.hid.dec.7.branch2.bias", "module.hid.dec.7.branch3.weight", "module.hid.dec.7.branch3.bias", "module.hid.dec.7.branch4.weight", "module.hid.dec.7.branch4.bias", "module.hid.dec.7.branch5.weight", "module.hid.dec.7.branch5.bias", "module.hid.dec.7.conv.weight", "module.hid.dec.7.conv.bias", "module.hid.dec.7.norm.weight", "module.hid.dec.7.norm.bias", "module.hid.dec.7.norm.running_mean", "module.hid.dec.7.norm.running_var", "module.hid.dec.7.norm.num_batches_tracked", "module.dit_block.pos_embed", "module.dit_block.x_embedder.proj.weight", "module.dit_block.x_embedder.proj.bias", "module.dit_block.t_embedder.mlp.0.weight", "module.dit_block.t_embedder.mlp.0.bias", "module.dit_block.t_embedder.mlp.2.weight", "module.dit_block.t_embedder.mlp.2.bias", "module.dit_block.blocks.0.attn.qkv.weight", "module.dit_block.blocks.0.attn.qkv.bias", "module.dit_block.blocks.0.attn.proj.weight", "module.dit_block.blocks.0.attn.proj.bias", "module.dit_block.blocks.0.mlp.fc1.weight", "module.dit_block.blocks.0.mlp.fc1.bias", "module.dit_block.blocks.0.mlp.fc2.weight", "module.dit_block.blocks.0.mlp.fc2.bias", "module.dit_block.blocks.0.adaLN_modulation.1.weight", "module.dit_block.blocks.0.adaLN_modulation.1.bias", "module.dit_block.blocks.1.attn.qkv.weight", "module.dit_block.blocks.1.attn.qkv.bias", "module.dit_block.blocks.1.attn.proj.weight", "module.dit_block.blocks.1.attn.proj.bias", "module.dit_block.blocks.1.mlp.fc1.weight", "module.dit_block.blocks.1.mlp.fc1.bias", "module.dit_block.blocks.1.mlp.fc2.weight", "module.dit_block.blocks.1.mlp.fc2.bias", "module.dit_block.blocks.1.adaLN_modulation.1.weight", "module.dit_block.blocks.1.adaLN_modulation.1.bias", "module.dit_block.blocks.2.attn.qkv.weight", "module.dit_block.blocks.2.attn.qkv.bias", "module.dit_block.blocks.2.attn.proj.weight", "module.dit_block.blocks.2.attn.proj.bias", "module.dit_block.blocks.2.mlp.fc1.weight", "module.dit_block.blocks.2.mlp.fc1.bias", "module.dit_block.blocks.2.mlp.fc2.weight", "module.dit_block.blocks.2.mlp.fc2.bias", "module.dit_block.blocks.2.adaLN_modulation.1.weight", "module.dit_block.blocks.2.adaLN_modulation.1.bias", "module.dit_block.blocks.3.attn.qkv.weight", "module.dit_block.blocks.3.attn.qkv.bias", "module.dit_block.blocks.3.attn.proj.weight", "module.dit_block.blocks.3.attn.proj.bias", "module.dit_block.blocks.3.mlp.fc1.weight", "module.dit_block.blocks.3.mlp.fc1.bias", "module.dit_block.blocks.3.mlp.fc2.weight", "module.dit_block.blocks.3.mlp.fc2.bias", "module.dit_block.blocks.3.adaLN_modulation.1.weight", "module.dit_block.blocks.3.adaLN_modulation.1.bias", "module.dit_block.blocks.4.attn.qkv.weight", "module.dit_block.blocks.4.attn.qkv.bias", "module.dit_block.blocks.4.attn.proj.weight", "module.dit_block.blocks.4.attn.proj.bias", "module.dit_block.blocks.4.mlp.fc1.weight", "module.dit_block.blocks.4.mlp.fc1.bias", "module.dit_block.blocks.4.mlp.fc2.weight", "module.dit_block.blocks.4.mlp.fc2.bias", "module.dit_block.blocks.4.adaLN_modulation.1.weight", "module.dit_block.blocks.4.adaLN_modulation.1.bias", "module.dit_block.blocks.5.attn.qkv.weight", "module.dit_block.blocks.5.attn.qkv.bias", "module.dit_block.blocks.5.attn.proj.weight", "module.dit_block.blocks.5.attn.proj.bias", "module.dit_block.blocks.5.mlp.fc1.weight", "module.dit_block.blocks.5.mlp.fc1.bias", "module.dit_block.blocks.5.mlp.fc2.weight", "module.dit_block.blocks.5.mlp.fc2.bias", "module.dit_block.blocks.5.adaLN_modulation.1.weight", "module.dit_block.blocks.5.adaLN_modulation.1.bias", "module.dit_block.blocks.6.attn.qkv.weight", "module.dit_block.blocks.6.attn.qkv.bias", "module.dit_block.blocks.6.attn.proj.weight", "module.dit_block.blocks.6.attn.proj.bias", "module.dit_block.blocks.6.mlp.fc1.weight", "module.dit_block.blocks.6.mlp.fc1.bias", "module.dit_block.blocks.6.mlp.fc2.weight", "module.dit_block.blocks.6.mlp.fc2.bias", "module.dit_block.blocks.6.adaLN_modulation.1.weight", "module.dit_block.blocks.6.adaLN_modulation.1.bias", "module.dit_block.blocks.7.attn.qkv.weight", "module.dit_block.blocks.7.attn.qkv.bias", "module.dit_block.blocks.7.attn.proj.weight", "module.dit_block.blocks.7.attn.proj.bias", "module.dit_block.blocks.7.mlp.fc1.weight", "module.dit_block.blocks.7.mlp.fc1.bias", "module.dit_block.blocks.7.mlp.fc2.weight", "module.dit_block.blocks.7.mlp.fc2.bias", "module.dit_block.blocks.7.adaLN_modulation.1.weight", "module.dit_block.blocks.7.adaLN_modulation.1.bias", "module.dit_block.blocks.8.attn.qkv.weight", "module.dit_block.blocks.8.attn.qkv.bias", "module.dit_block.blocks.8.attn.proj.weight", "module.dit_block.blocks.8.attn.proj.bias", "module.dit_block.blocks.8.mlp.fc1.weight", "module.dit_block.blocks.8.mlp.fc1.bias", "module.dit_block.blocks.8.mlp.fc2.weight", "module.dit_block.blocks.8.mlp.fc2.bias", "module.dit_block.blocks.8.adaLN_modulation.1.weight", "module.dit_block.blocks.8.adaLN_modulation.1.bias", "module.dit_block.blocks.9.attn.qkv.weight", "module.dit_block.blocks.9.attn.qkv.bias", "module.dit_block.blocks.9.attn.proj.weight", "module.dit_block.blocks.9.attn.proj.bias", "module.dit_block.blocks.9.mlp.fc1.weight", "module.dit_block.blocks.9.mlp.fc1.bias", "module.dit_block.blocks.9.mlp.fc2.weight", "module.dit_block.blocks.9.mlp.fc2.bias", "module.dit_block.blocks.9.adaLN_modulation.1.weight", "module.dit_block.blocks.9.adaLN_modulation.1.bias", "module.dit_block.blocks.10.attn.qkv.weight", "module.dit_block.blocks.10.attn.qkv.bias", "module.dit_block.blocks.10.attn.proj.weight", "module.dit_block.blocks.10.attn.proj.bias", "module.dit_block.blocks.10.mlp.fc1.weight", "module.dit_block.blocks.10.mlp.fc1.bias", "module.dit_block.blocks.10.mlp.fc2.weight", "module.dit_block.blocks.10.mlp.fc2.bias", "module.dit_block.blocks.10.adaLN_modulation.1.weight", "module.dit_block.blocks.10.adaLN_modulation.1.bias", "module.dit_block.blocks.11.attn.qkv.weight", "module.dit_block.blocks.11.attn.qkv.bias", "module.dit_block.blocks.11.attn.proj.weight", "module.dit_block.blocks.11.attn.proj.bias", "module.dit_block.blocks.11.mlp.fc1.weight", "module.dit_block.blocks.11.mlp.fc1.bias", "module.dit_block.blocks.11.mlp.fc2.weight", "module.dit_block.blocks.11.mlp.fc2.bias", "module.dit_block.blocks.11.adaLN_modulation.1.weight", "module.dit_block.blocks.11.adaLN_modulation.1.bias", "module.dit_block.final_layer.linear.weight", "module.dit_block.final_layer.linear.bias", "module.dit_block.final_layer.adaLN_modulation.1.weight", "module.dit_block.final_layer.adaLN_modulation.1.bias", "module.dec.dec.0.conv.weight", "module.dec.dec.0.conv.bias", "module.dec.dec.0.norm.weight", "module.dec.dec.0.norm.bias", "module.dec.dec.0.norm.running_mean", "module.dec.dec.0.norm.running_var", "module.dec.dec.0.norm.num_batches_tracked", "module.dec.dec.1.conv.weight", "module.dec.dec.1.conv.bias", "module.dec.dec.1.norm.weight", "module.dec.dec.1.norm.bias", "module.dec.dec.1.norm.running_mean", "module.dec.dec.1.norm.running_var", "module.dec.dec.1.norm.num_batches_tracked", "module.dec.dec.2.conv.weight", "module.dec.dec.2.conv.bias", "module.dec.dec.2.norm.weight", "module.dec.dec.2.norm.bias", "module.dec.dec.2.norm.running_mean", "module.dec.dec.2.norm.running_var", "module.dec.dec.2.norm.num_batches_tracked", "module.dec.dec.3.conv.weight", "module.dec.dec.3.conv.bias", "module.dec.dec.3.norm.weight", "module.dec.dec.3.norm.bias", "module.dec.dec.3.norm.running_mean", "module.dec.dec.3.norm.running_var", "module.dec.dec.3.norm.num_batches_tracked", "module.dec.readout.weight", "module.dec.readout.bias". +2025-02-24 16:39:06,156 加载模型失败:name 'OrderedDict' is not defined +2025-02-24 16:39:06,157 推理过程出错:模型加载错误:name 'OrderedDict' is not defined +2025-02-24 16:43:43,839 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth +2025-02-24 16:43:43,843 开始推理... +2025-02-24 16:44:55,421 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth +2025-02-24 16:44:55,425 开始推理... +2025-02-24 16:44:56,032 推理过程出错:need at least one array to concatenate +2025-02-24 16:45:35,939 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth +2025-02-24 16:45:35,948 开始推理... +2025-02-24 16:45:36,868 推理过程出错:need at least one array to concatenate +2025-02-24 16:46:21,644 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth +2025-02-24 16:46:21,648 开始推理... +2025-02-24 16:56:24,368 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 16:56:24,372 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 16:56:24,372 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 16:56:24,372 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 16:56:24,882 输入数据范围:[-2.09, 2.09] +2025-02-24 16:56:25,407 输出数据范围:[-1.24, 1.57] +2025-02-24 16:56:25,407 推理完成! +2025-02-24 17:00:07,689 加载模型失败:Error(s) in loading state_dict for Dit: + size mismatch for dec.readout.weight: copying a param with shape torch.Size([2, 64, 1, 1]) from checkpoint, the shape in current model is torch.Size([2, 32, 1, 1]). +2025-02-24 17:00:07,689 推理过程出错:模型加载错误:Error(s) in loading state_dict for Dit: + size mismatch for dec.readout.weight: copying a param with shape torch.Size([2, 64, 1, 1]) from checkpoint, the shape in current model is torch.Size([2, 32, 1, 1]). +2025-02-24 17:06:16,993 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-24 17:06:16,998 开始推理... +2025-02-24 17:14:06,318 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 17:14:06,365 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:14:06,366 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:14:06,366 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:14:07,220 输入数据范围:[-2.09, 2.09] +2025-02-24 17:14:07,734 输出数据范围:[-1.92, 2.03] +2025-02-24 17:14:07,735 推理完成! diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250224_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250224_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..1583c05c609677856d2d85e04fc9d12fd9faa6fe --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250224_training_log-checkpoint.log @@ -0,0 +1,20 @@ +2025-02-24 15:59:52,234 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-24 15:59:52,307 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-24 15:59:52,589 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-24 15:59:52,594 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-24 15:59:52,604 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-24 15:59:52,613 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-24 15:59:52,620 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-24 15:59:52,628 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-24 16:00:49,500 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-24 16:00:49,520 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-24 16:00:49,539 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-24 16:00:49,572 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-24 16:00:49,649 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-24 16:00:49,658 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-24 16:00:49,663 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-24 16:00:49,671 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-24 16:01:53,824 Epoch 1/2000 +2025-02-24 16:03:36,980 Current Learning Rate: 0.0099993832 +2025-02-24 16:03:37,195 Train Loss: 0.0205501, Val Loss: 0.0178727 +2025-02-24 16:03:37,196 Epoch 2/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250316_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250316_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..17e9ed7f4c926164e736eab95a1e518dfc03fe3e --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Dit_exp2_20250316_training_log-checkpoint.log @@ -0,0 +1,4 @@ +2025-03-16 16:41:47,460 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-16 16:41:47,474 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-16 16:41:47,483 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-16 16:41:47,488 Added key: store_based_barrier_key:1 to store for rank: 2 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Kno_exp1_20250226_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Kno_exp1_20250226_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..881c02288a71ec5c99aea89daba2023ac57be70f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Kno_exp1_20250226_training_log-checkpoint.log @@ -0,0 +1,7 @@ +2025-02-25 23:04:16,603 Epoch 1/2000 +2025-02-25 23:06:28,425 Epoch 1/2000 +2025-02-25 23:07:44,531 Epoch 1/2000 +2025-02-25 23:10:18,355 Epoch 1/2000 +2025-02-25 23:11:22,774 Current Learning Rate: 0.0099993832 +2025-02-25 23:11:22,780 Train Loss: 0.0107643, Val Loss: 0.0159971 +2025-02-25 23:11:22,781 Epoch 2/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp1_20250224_inference-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp1_20250224_inference-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..c939751ed185ae57535323e4c29e12fc58d26b2b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp1_20250224_inference-checkpoint.log @@ -0,0 +1,18 @@ +2025-02-24 18:00:42,704 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-24 18:00:42,708 开始推理... +2025-02-24 18:09:56,678 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 18:09:56,682 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 18:09:56,682 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 18:09:56,682 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 18:09:57,450 输入数据范围:[-2.09, 2.09] +2025-02-24 18:09:58,188 输出数据范围:[-2.05, 2.18] +2025-02-24 18:09:58,188 推理完成! +2025-02-24 20:12:53,532 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-24 20:12:53,536 开始推理... +2025-02-24 20:22:08,245 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 20:22:08,250 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 20:22:08,250 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 20:22:08,250 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 20:22:09,005 输入数据范围:[-2.09, 2.09] +2025-02-24 20:22:09,754 输出数据范围:[-2.08, 2.09] +2025-02-24 20:22:09,755 推理完成! diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp1_20250224_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp1_20250224_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..4dc9f3e6de9cc12f61ff0eb5f61c77efe34afb85 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp1_20250224_training_log-checkpoint.log @@ -0,0 +1,13 @@ +2025-02-24 16:22:15,927 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-24 16:22:15,977 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-24 16:22:15,995 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-24 16:22:16,018 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-24 16:22:16,022 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-24 16:22:16,024 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-24 16:23:27,380 Epoch 1/2000 +2025-02-24 16:28:31,405 Current Learning Rate: 0.0099993832 +2025-02-24 16:28:31,456 Train Loss: 0.0219006, Val Loss: 0.0069225 +2025-02-24 16:28:31,456 Epoch 2/2000 +2025-02-24 16:33:36,817 Current Learning Rate: 0.0099975328 +2025-02-24 16:33:36,881 Train Loss: 0.0042377, Val Loss: 0.0033775 +2025-02-24 16:33:36,882 Epoch 3/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp_128_20250324_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp_128_20250324_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..5730361aeedea30d42dd41e3c8105d2728656847 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Simvp_exp_128_20250324_training_log-checkpoint.log @@ -0,0 +1,9 @@ +2025-03-24 15:39:09,628 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-24 15:39:09,736 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-24 15:39:09,798 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-24 15:39:09,812 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-24 15:39:09,824 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-24 15:39:09,835 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-24 15:39:09,841 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-24 15:39:09,845 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-24 15:40:09,695 Epoch 1/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_K_uv_20250218_exp1_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_K_uv_20250218_exp1_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..909001efda4c2ebeff81a792f03477b86166a8b1 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_K_uv_20250218_exp1_training_log-checkpoint.log @@ -0,0 +1,66 @@ +2025-02-18 11:17:53,278 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:17:53,439 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:17:53,586 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:17:53,709 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:17:53,741 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:17:53,802 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:17:53,813 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:17:53,817 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:20:47,228 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:20:47,325 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:20:47,346 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:20:47,379 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:20:47,407 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:20:47,433 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:20:47,447 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:20:47,449 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:21:21,680 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:21:21,722 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:21:21,801 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:21:21,807 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:21:21,812 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:21:22,527 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:21:22,564 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:21:22,571 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:21:46,526 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:21:46,547 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:21:46,563 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:21:47,711 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:21:47,742 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:21:47,797 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:21:47,803 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:21:47,805 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:22:11,648 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:22:11,905 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:22:12,036 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:22:12,080 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:22:12,109 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:22:12,171 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:22:12,182 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:22:12,194 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:34:43,514 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:34:43,522 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:34:43,546 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:34:43,607 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:34:43,618 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:34:43,630 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:34:43,663 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:34:43,676 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:35:40,272 Epoch 1/2000 +2025-02-18 11:35:44,883 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,883 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,925 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:36:23,345 Current Learning Rate: 0.0099993832 +2025-02-18 11:36:24,693 Train Loss: 1.3628420, Val Loss: 0.1698331 +2025-02-18 11:36:24,694 Epoch 2/2000 +2025-02-18 11:37:06,152 Current Learning Rate: 0.0099975328 +2025-02-18 11:37:07,935 Train Loss: 0.0687663, Val Loss: 0.0357662 +2025-02-18 11:37:07,935 Epoch 3/2000 +2025-02-18 11:37:49,856 Current Learning Rate: 0.0099944494 +2025-02-18 11:37:51,612 Train Loss: 0.0274687, Val Loss: 0.0195614 +2025-02-18 11:37:51,612 Epoch 4/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_K_uv_20250218_exp2_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_K_uv_20250218_exp2_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..ff6d97c769633aa60ec7580d1dc0e5e1bd39b206 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_K_uv_20250218_exp2_training_log-checkpoint.log @@ -0,0 +1,625 @@ +2025-02-18 11:42:14,518 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:42:14,526 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:42:14,568 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:42:14,637 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:42:14,686 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:42:14,702 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:42:14,714 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:42:14,718 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:42:52,388 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:42:52,451 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:42:52,545 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:42:52,578 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:42:52,589 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:42:52,607 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:42:52,621 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:42:52,629 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:43:15,702 Epoch 1/2000 +2025-02-18 11:43:20,492 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,492 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:59,250 Current Learning Rate: 0.0099993832 +2025-02-18 11:44:00,759 Train Loss: 1.3628420, Val Loss: 0.1698331 +2025-02-18 11:44:00,762 Epoch 2/2000 +2025-02-18 11:44:42,481 Current Learning Rate: 0.0099975328 +2025-02-18 11:44:43,991 Train Loss: 0.0687663, Val Loss: 0.0357662 +2025-02-18 11:44:43,991 Epoch 3/2000 +2025-02-18 11:45:26,064 Current Learning Rate: 0.0099944494 +2025-02-18 11:45:27,993 Train Loss: 0.0274687, Val Loss: 0.0195614 +2025-02-18 11:45:27,994 Epoch 4/2000 +2025-02-18 11:46:09,808 Current Learning Rate: 0.0099901336 +2025-02-18 11:46:11,881 Train Loss: 0.0202074, Val Loss: 0.0180349 +2025-02-18 11:46:11,881 Epoch 5/2000 +2025-02-18 11:46:53,799 Current Learning Rate: 0.0099845867 +2025-02-18 11:46:55,553 Train Loss: 0.0193941, Val Loss: 0.0177117 +2025-02-18 11:46:55,557 Epoch 6/2000 +2025-02-18 11:47:37,638 Current Learning Rate: 0.0099778098 +2025-02-18 11:47:39,763 Train Loss: 0.0191047, Val Loss: 0.0175423 +2025-02-18 11:47:39,763 Epoch 7/2000 +2025-02-18 11:48:21,731 Current Learning Rate: 0.0099698048 +2025-02-18 11:48:23,718 Train Loss: 0.0189317, Val Loss: 0.0174137 +2025-02-18 11:48:23,719 Epoch 8/2000 +2025-02-18 11:49:05,878 Current Learning Rate: 0.0099605735 +2025-02-18 11:49:07,409 Train Loss: 0.0187991, Val Loss: 0.0173049 +2025-02-18 11:49:07,410 Epoch 9/2000 +2025-02-18 11:49:49,527 Current Learning Rate: 0.0099501183 +2025-02-18 11:49:51,296 Train Loss: 0.0186761, Val Loss: 0.0172068 +2025-02-18 11:49:51,297 Epoch 10/2000 +2025-02-18 11:50:33,097 Current Learning Rate: 0.0099384417 +2025-02-18 11:50:34,968 Train Loss: 0.0185759, Val Loss: 0.0171160 +2025-02-18 11:50:34,968 Epoch 11/2000 +2025-02-18 11:51:16,768 Current Learning Rate: 0.0099255466 +2025-02-18 11:51:18,470 Train Loss: 0.0184690, Val Loss: 0.0170284 +2025-02-18 11:51:18,471 Epoch 12/2000 +2025-02-18 11:52:00,007 Current Learning Rate: 0.0099114363 +2025-02-18 11:52:01,345 Train Loss: 0.0183741, Val Loss: 0.0169453 +2025-02-18 11:52:01,346 Epoch 13/2000 +2025-02-18 11:52:43,115 Current Learning Rate: 0.0098961141 +2025-02-18 11:52:44,937 Train Loss: 0.0182787, Val Loss: 0.0168658 +2025-02-18 11:52:44,937 Epoch 14/2000 +2025-02-18 11:53:26,758 Current Learning Rate: 0.0098795838 +2025-02-18 11:53:28,848 Train Loss: 0.0181902, Val Loss: 0.0167882 +2025-02-18 11:53:28,849 Epoch 15/2000 +2025-02-18 11:54:09,794 Current Learning Rate: 0.0098618496 +2025-02-18 11:54:11,095 Train Loss: 0.0181102, Val Loss: 0.0167148 +2025-02-18 11:54:11,096 Epoch 16/2000 +2025-02-18 11:54:52,819 Current Learning Rate: 0.0098429158 +2025-02-18 11:54:54,236 Train Loss: 0.0180223, Val Loss: 0.0166435 +2025-02-18 11:54:54,237 Epoch 17/2000 +2025-02-18 11:55:36,300 Current Learning Rate: 0.0098227871 +2025-02-18 11:55:38,214 Train Loss: 0.0179441, Val Loss: 0.0165739 +2025-02-18 11:55:38,214 Epoch 18/2000 +2025-02-18 11:56:20,053 Current Learning Rate: 0.0098014684 +2025-02-18 11:56:22,204 Train Loss: 0.0178550, Val Loss: 0.0164465 +2025-02-18 11:56:22,204 Epoch 19/2000 +2025-02-18 11:57:03,548 Current Learning Rate: 0.0097789651 +2025-02-18 11:57:05,324 Train Loss: 0.0176387, Val Loss: 0.0162635 +2025-02-18 11:57:05,324 Epoch 20/2000 +2025-02-18 11:57:46,483 Current Learning Rate: 0.0097552826 +2025-02-18 11:57:48,315 Train Loss: 0.0174251, Val Loss: 0.0161026 +2025-02-18 11:57:48,316 Epoch 21/2000 +2025-02-18 11:58:29,538 Current Learning Rate: 0.0097304268 +2025-02-18 11:58:31,217 Train Loss: 0.0172205, Val Loss: 0.0159079 +2025-02-18 11:58:31,218 Epoch 22/2000 +2025-02-18 11:59:12,500 Current Learning Rate: 0.0097044038 +2025-02-18 11:59:13,681 Train Loss: 0.0169694, Val Loss: 0.0156002 +2025-02-18 11:59:13,683 Epoch 23/2000 +2025-02-18 11:59:55,418 Current Learning Rate: 0.0096772202 +2025-02-18 11:59:56,993 Train Loss: 0.0165015, Val Loss: 0.0151188 +2025-02-18 11:59:56,993 Epoch 24/2000 +2025-02-18 12:00:38,790 Current Learning Rate: 0.0096488824 +2025-02-18 12:00:39,918 Train Loss: 0.0158210, Val Loss: 0.0143949 +2025-02-18 12:00:39,919 Epoch 25/2000 +2025-02-18 12:01:22,034 Current Learning Rate: 0.0096193977 +2025-02-18 12:01:23,482 Train Loss: 0.0150978, Val Loss: 0.0137453 +2025-02-18 12:01:23,484 Epoch 26/2000 +2025-02-18 12:02:05,491 Current Learning Rate: 0.0095887731 +2025-02-18 12:02:07,069 Train Loss: 0.0146063, Val Loss: 0.0133372 +2025-02-18 12:02:07,069 Epoch 27/2000 +2025-02-18 12:02:48,291 Current Learning Rate: 0.0095570164 +2025-02-18 12:02:49,392 Train Loss: 0.0141579, Val Loss: 0.0129903 +2025-02-18 12:02:49,392 Epoch 28/2000 +2025-02-18 12:03:31,428 Current Learning Rate: 0.0095241353 +2025-02-18 12:03:33,011 Train Loss: 0.0138481, Val Loss: 0.0127644 +2025-02-18 12:03:33,016 Epoch 29/2000 +2025-02-18 12:04:15,259 Current Learning Rate: 0.0094901379 +2025-02-18 12:04:17,239 Train Loss: 0.0135901, Val Loss: 0.0124894 +2025-02-18 12:04:17,239 Epoch 30/2000 +2025-02-18 12:04:59,403 Current Learning Rate: 0.0094550326 +2025-02-18 12:05:01,238 Train Loss: 0.0133189, Val Loss: 0.0121462 +2025-02-18 12:05:01,239 Epoch 31/2000 +2025-02-18 12:05:43,059 Current Learning Rate: 0.0094188282 +2025-02-18 12:05:44,431 Train Loss: 0.0128272, Val Loss: 0.0116701 +2025-02-18 12:05:44,431 Epoch 32/2000 +2025-02-18 12:06:25,938 Current Learning Rate: 0.0093815334 +2025-02-18 12:06:27,933 Train Loss: 0.0122802, Val Loss: 0.0112547 +2025-02-18 12:06:27,934 Epoch 33/2000 +2025-02-18 12:07:10,039 Current Learning Rate: 0.0093431576 +2025-02-18 12:07:11,798 Train Loss: 0.0117762, Val Loss: 0.0106204 +2025-02-18 12:07:11,799 Epoch 34/2000 +2025-02-18 12:07:53,462 Current Learning Rate: 0.0093037101 +2025-02-18 12:07:55,177 Train Loss: 0.0112313, Val Loss: 0.0101279 +2025-02-18 12:07:55,178 Epoch 35/2000 +2025-02-18 12:08:37,066 Current Learning Rate: 0.0092632008 +2025-02-18 12:08:39,137 Train Loss: 0.0108187, Val Loss: 0.0096379 +2025-02-18 12:08:39,138 Epoch 36/2000 +2025-02-18 12:09:21,129 Current Learning Rate: 0.0092216396 +2025-02-18 12:09:23,012 Train Loss: 0.0102309, Val Loss: 0.0092521 +2025-02-18 12:09:23,012 Epoch 37/2000 +2025-02-18 12:10:04,966 Current Learning Rate: 0.0091790368 +2025-02-18 12:10:06,776 Train Loss: 0.0098315, Val Loss: 0.0088337 +2025-02-18 12:10:06,776 Epoch 38/2000 +2025-02-18 12:10:48,909 Current Learning Rate: 0.0091354029 +2025-02-18 12:10:50,456 Train Loss: 0.0093248, Val Loss: 0.0083890 +2025-02-18 12:10:50,458 Epoch 39/2000 +2025-02-18 12:11:32,518 Current Learning Rate: 0.0090907486 +2025-02-18 12:11:34,189 Train Loss: 0.0087695, Val Loss: 0.0080588 +2025-02-18 12:11:34,189 Epoch 40/2000 +2025-02-18 12:12:16,194 Current Learning Rate: 0.0090450850 +2025-02-18 12:12:17,986 Train Loss: 0.0081769, Val Loss: 0.0073063 +2025-02-18 12:12:17,987 Epoch 41/2000 +2025-02-18 12:12:59,838 Current Learning Rate: 0.0089984233 +2025-02-18 12:13:01,261 Train Loss: 0.0076267, Val Loss: 0.0069956 +2025-02-18 12:13:01,261 Epoch 42/2000 +2025-02-18 12:13:42,414 Current Learning Rate: 0.0089507751 +2025-02-18 12:13:42,415 Train Loss: 0.0076356, Val Loss: 0.0090246 +2025-02-18 12:13:42,415 Epoch 43/2000 +2025-02-18 12:14:25,024 Current Learning Rate: 0.0089021520 +2025-02-18 12:14:27,186 Train Loss: 0.0074178, Val Loss: 0.0064407 +2025-02-18 12:14:27,186 Epoch 44/2000 +2025-02-18 12:15:08,764 Current Learning Rate: 0.0088525662 +2025-02-18 12:15:09,977 Train Loss: 0.0068171, Val Loss: 0.0060696 +2025-02-18 12:15:09,977 Epoch 45/2000 +2025-02-18 12:15:52,174 Current Learning Rate: 0.0088020298 +2025-02-18 12:15:53,426 Train Loss: 0.0066029, Val Loss: 0.0059524 +2025-02-18 12:15:53,426 Epoch 46/2000 +2025-02-18 12:16:35,338 Current Learning Rate: 0.0087505553 +2025-02-18 12:16:37,448 Train Loss: 0.0065815, Val Loss: 0.0057815 +2025-02-18 12:16:37,449 Epoch 47/2000 +2025-02-18 12:17:18,719 Current Learning Rate: 0.0086981555 +2025-02-18 12:17:18,720 Train Loss: 0.0065231, Val Loss: 0.0061614 +2025-02-18 12:17:18,720 Epoch 48/2000 +2025-02-18 12:18:01,487 Current Learning Rate: 0.0086448431 +2025-02-18 12:18:03,649 Train Loss: 0.0062545, Val Loss: 0.0056254 +2025-02-18 12:18:03,650 Epoch 49/2000 +2025-02-18 12:18:45,446 Current Learning Rate: 0.0085906315 +2025-02-18 12:18:47,196 Train Loss: 0.0062356, Val Loss: 0.0054859 +2025-02-18 12:18:47,196 Epoch 50/2000 +2025-02-18 12:19:28,575 Current Learning Rate: 0.0085355339 +2025-02-18 12:19:30,542 Train Loss: 0.0059626, Val Loss: 0.0053286 +2025-02-18 12:19:30,542 Epoch 51/2000 +2025-02-18 12:20:12,724 Current Learning Rate: 0.0084795640 +2025-02-18 12:20:14,653 Train Loss: 0.0060748, Val Loss: 0.0053129 +2025-02-18 12:20:14,653 Epoch 52/2000 +2025-02-18 12:20:56,323 Current Learning Rate: 0.0084227355 +2025-02-18 12:20:58,470 Train Loss: 0.0054856, Val Loss: 0.0050178 +2025-02-18 12:20:58,471 Epoch 53/2000 +2025-02-18 12:21:39,824 Current Learning Rate: 0.0083650626 +2025-02-18 12:21:41,319 Train Loss: 0.0056379, Val Loss: 0.0049119 +2025-02-18 12:21:41,319 Epoch 54/2000 +2025-02-18 12:22:23,175 Current Learning Rate: 0.0083065593 +2025-02-18 12:22:23,176 Train Loss: 0.0057683, Val Loss: 0.0053156 +2025-02-18 12:22:23,176 Epoch 55/2000 +2025-02-18 12:23:05,869 Current Learning Rate: 0.0082472402 +2025-02-18 12:23:07,795 Train Loss: 0.0053413, Val Loss: 0.0048144 +2025-02-18 12:23:07,795 Epoch 56/2000 +2025-02-18 12:23:49,611 Current Learning Rate: 0.0081871199 +2025-02-18 12:23:51,366 Train Loss: 0.0051032, Val Loss: 0.0047444 +2025-02-18 12:23:51,367 Epoch 57/2000 +2025-02-18 12:24:32,519 Current Learning Rate: 0.0081262133 +2025-02-18 12:24:34,487 Train Loss: 0.0049701, Val Loss: 0.0045479 +2025-02-18 12:24:34,488 Epoch 58/2000 +2025-02-18 12:25:16,503 Current Learning Rate: 0.0080645353 +2025-02-18 12:25:16,504 Train Loss: 0.0047992, Val Loss: 0.0050299 +2025-02-18 12:25:16,504 Epoch 59/2000 +2025-02-18 12:25:58,811 Current Learning Rate: 0.0080021011 +2025-02-18 12:26:00,720 Train Loss: 0.0049871, Val Loss: 0.0042919 +2025-02-18 12:26:00,720 Epoch 60/2000 +2025-02-18 12:26:42,328 Current Learning Rate: 0.0079389263 +2025-02-18 12:26:42,329 Train Loss: 0.0046179, Val Loss: 0.0044405 +2025-02-18 12:26:42,329 Epoch 61/2000 +2025-02-18 12:27:25,127 Current Learning Rate: 0.0078750263 +2025-02-18 12:27:25,128 Train Loss: 0.0049569, Val Loss: 0.0044025 +2025-02-18 12:27:25,128 Epoch 62/2000 +2025-02-18 12:28:07,952 Current Learning Rate: 0.0078104169 +2025-02-18 12:28:07,952 Train Loss: 0.0047013, Val Loss: 0.0046409 +2025-02-18 12:28:07,953 Epoch 63/2000 +2025-02-18 12:28:50,796 Current Learning Rate: 0.0077451141 +2025-02-18 12:28:52,687 Train Loss: 0.0044193, Val Loss: 0.0039515 +2025-02-18 12:28:52,687 Epoch 64/2000 +2025-02-18 12:29:33,860 Current Learning Rate: 0.0076791340 +2025-02-18 12:29:33,861 Train Loss: 0.0046267, Val Loss: 0.0039819 +2025-02-18 12:29:33,861 Epoch 65/2000 +2025-02-18 12:30:16,348 Current Learning Rate: 0.0076124928 +2025-02-18 12:30:17,908 Train Loss: 0.0042735, Val Loss: 0.0039007 +2025-02-18 12:30:17,909 Epoch 66/2000 +2025-02-18 12:30:59,781 Current Learning Rate: 0.0075452071 +2025-02-18 12:31:01,427 Train Loss: 0.0040702, Val Loss: 0.0036998 +2025-02-18 12:31:01,428 Epoch 67/2000 +2025-02-18 12:31:42,939 Current Learning Rate: 0.0074772933 +2025-02-18 12:31:42,940 Train Loss: 0.0041402, Val Loss: 0.0043896 +2025-02-18 12:31:42,940 Epoch 68/2000 +2025-02-18 12:32:25,289 Current Learning Rate: 0.0074087684 +2025-02-18 12:32:25,289 Train Loss: 0.0040681, Val Loss: 0.0037126 +2025-02-18 12:32:25,290 Epoch 69/2000 +2025-02-18 12:33:07,948 Current Learning Rate: 0.0073396491 +2025-02-18 12:33:09,164 Train Loss: 0.0041826, Val Loss: 0.0035981 +2025-02-18 12:33:09,165 Epoch 70/2000 +2025-02-18 12:33:51,447 Current Learning Rate: 0.0072699525 +2025-02-18 12:33:53,320 Train Loss: 0.0039610, Val Loss: 0.0035859 +2025-02-18 12:33:53,321 Epoch 71/2000 +2025-02-18 12:34:34,910 Current Learning Rate: 0.0071996958 +2025-02-18 12:34:34,911 Train Loss: 0.0040461, Val Loss: 0.0038279 +2025-02-18 12:34:34,911 Epoch 72/2000 +2025-02-18 12:35:17,593 Current Learning Rate: 0.0071288965 +2025-02-18 12:35:18,958 Train Loss: 0.0038196, Val Loss: 0.0034883 +2025-02-18 12:35:18,958 Epoch 73/2000 +2025-02-18 12:36:00,463 Current Learning Rate: 0.0070575718 +2025-02-18 12:36:00,464 Train Loss: 0.0037394, Val Loss: 0.0035490 +2025-02-18 12:36:00,465 Epoch 74/2000 +2025-02-18 12:36:43,416 Current Learning Rate: 0.0069857395 +2025-02-18 12:36:43,416 Train Loss: 0.0039330, Val Loss: 0.0036724 +2025-02-18 12:36:43,417 Epoch 75/2000 +2025-02-18 12:37:25,961 Current Learning Rate: 0.0069134172 +2025-02-18 12:37:25,962 Train Loss: 0.0036523, Val Loss: 0.0037484 +2025-02-18 12:37:25,962 Epoch 76/2000 +2025-02-18 12:38:08,559 Current Learning Rate: 0.0068406228 +2025-02-18 12:38:10,194 Train Loss: 0.0038443, Val Loss: 0.0034668 +2025-02-18 12:38:10,195 Epoch 77/2000 +2025-02-18 12:38:52,348 Current Learning Rate: 0.0067673742 +2025-02-18 12:38:54,215 Train Loss: 0.0038872, Val Loss: 0.0033315 +2025-02-18 12:38:54,215 Epoch 78/2000 +2025-02-18 12:39:36,357 Current Learning Rate: 0.0066936896 +2025-02-18 12:39:38,490 Train Loss: 0.0038132, Val Loss: 0.0032982 +2025-02-18 12:39:38,490 Epoch 79/2000 +2025-02-18 12:40:20,492 Current Learning Rate: 0.0066195871 +2025-02-18 12:40:22,024 Train Loss: 0.0035646, Val Loss: 0.0031638 +2025-02-18 12:40:22,025 Epoch 80/2000 +2025-02-18 12:41:04,384 Current Learning Rate: 0.0065450850 +2025-02-18 12:41:04,384 Train Loss: 0.0034006, Val Loss: 0.0031946 +2025-02-18 12:41:04,384 Epoch 81/2000 +2025-02-18 12:41:46,486 Current Learning Rate: 0.0064702016 +2025-02-18 12:41:48,643 Train Loss: 0.0033588, Val Loss: 0.0030893 +2025-02-18 12:41:48,644 Epoch 82/2000 +2025-02-18 12:42:29,850 Current Learning Rate: 0.0063949555 +2025-02-18 12:42:30,923 Train Loss: 0.0033879, Val Loss: 0.0030405 +2025-02-18 12:42:30,923 Epoch 83/2000 +2025-02-18 12:43:13,233 Current Learning Rate: 0.0063193652 +2025-02-18 12:43:14,913 Train Loss: 0.0031879, Val Loss: 0.0029718 +2025-02-18 12:43:14,928 Epoch 84/2000 +2025-02-18 12:43:57,251 Current Learning Rate: 0.0062434494 +2025-02-18 12:43:57,252 Train Loss: 0.0032266, Val Loss: 0.0031460 +2025-02-18 12:43:57,252 Epoch 85/2000 +2025-02-18 12:44:39,349 Current Learning Rate: 0.0061672268 +2025-02-18 12:44:41,258 Train Loss: 0.0034299, Val Loss: 0.0029372 +2025-02-18 12:44:41,259 Epoch 86/2000 +2025-02-18 12:45:22,666 Current Learning Rate: 0.0060907162 +2025-02-18 12:45:22,668 Train Loss: 0.0031645, Val Loss: 0.0029600 +2025-02-18 12:45:22,668 Epoch 87/2000 +2025-02-18 12:46:05,573 Current Learning Rate: 0.0060139365 +2025-02-18 12:46:05,574 Train Loss: 0.0034704, Val Loss: 0.0032086 +2025-02-18 12:46:05,574 Epoch 88/2000 +2025-02-18 12:46:48,414 Current Learning Rate: 0.0059369066 +2025-02-18 12:46:49,978 Train Loss: 0.0032061, Val Loss: 0.0029067 +2025-02-18 12:46:49,978 Epoch 89/2000 +2025-02-18 12:47:31,437 Current Learning Rate: 0.0058596455 +2025-02-18 12:47:32,844 Train Loss: 0.0030000, Val Loss: 0.0028417 +2025-02-18 12:47:32,857 Epoch 90/2000 +2025-02-18 12:48:15,087 Current Learning Rate: 0.0057821723 +2025-02-18 12:48:16,952 Train Loss: 0.0029832, Val Loss: 0.0028316 +2025-02-18 12:48:16,953 Epoch 91/2000 +2025-02-18 12:48:59,088 Current Learning Rate: 0.0057045062 +2025-02-18 12:48:59,090 Train Loss: 0.0031305, Val Loss: 0.0028579 +2025-02-18 12:48:59,090 Epoch 92/2000 +2025-02-18 12:49:41,859 Current Learning Rate: 0.0056266662 +2025-02-18 12:49:41,860 Train Loss: 0.0030638, Val Loss: 0.0029005 +2025-02-18 12:49:41,860 Epoch 93/2000 +2025-02-18 12:50:24,399 Current Learning Rate: 0.0055486716 +2025-02-18 12:50:26,199 Train Loss: 0.0031297, Val Loss: 0.0028207 +2025-02-18 12:50:26,200 Epoch 94/2000 +2025-02-18 12:51:08,305 Current Learning Rate: 0.0054705416 +2025-02-18 12:51:08,306 Train Loss: 0.0031764, Val Loss: 0.0029541 +2025-02-18 12:51:08,306 Epoch 95/2000 +2025-02-18 12:51:50,998 Current Learning Rate: 0.0053922955 +2025-02-18 12:51:50,999 Train Loss: 0.0028410, Val Loss: 0.0028266 +2025-02-18 12:51:50,999 Epoch 96/2000 +2025-02-18 12:52:33,609 Current Learning Rate: 0.0053139526 +2025-02-18 12:52:33,610 Train Loss: 0.0031975, Val Loss: 0.0028658 +2025-02-18 12:52:33,610 Epoch 97/2000 +2025-02-18 12:53:16,181 Current Learning Rate: 0.0052355323 +2025-02-18 12:53:18,204 Train Loss: 0.0029171, Val Loss: 0.0027451 +2025-02-18 12:53:18,205 Epoch 98/2000 +2025-02-18 12:54:00,168 Current Learning Rate: 0.0051570538 +2025-02-18 12:54:02,214 Train Loss: 0.0030150, Val Loss: 0.0027333 +2025-02-18 12:54:02,214 Epoch 99/2000 +2025-02-18 12:54:44,237 Current Learning Rate: 0.0050785366 +2025-02-18 12:54:45,999 Train Loss: 0.0029330, Val Loss: 0.0027265 +2025-02-18 12:54:45,999 Epoch 100/2000 +2025-02-18 12:55:27,425 Current Learning Rate: 0.0050000000 +2025-02-18 12:55:28,907 Train Loss: 0.0028290, Val Loss: 0.0026971 +2025-02-18 12:55:28,907 Epoch 101/2000 +2025-02-18 12:56:10,907 Current Learning Rate: 0.0049214634 +2025-02-18 12:56:12,102 Train Loss: 0.0028858, Val Loss: 0.0026960 +2025-02-18 12:56:12,103 Epoch 102/2000 +2025-02-18 12:56:53,651 Current Learning Rate: 0.0048429462 +2025-02-18 12:56:54,713 Train Loss: 0.0029532, Val Loss: 0.0026186 +2025-02-18 12:56:54,713 Epoch 103/2000 +2025-02-18 12:57:36,563 Current Learning Rate: 0.0047644677 +2025-02-18 12:57:36,564 Train Loss: 0.0027671, Val Loss: 0.0026594 +2025-02-18 12:57:36,564 Epoch 104/2000 +2025-02-18 12:58:19,587 Current Learning Rate: 0.0046860474 +2025-02-18 12:58:19,587 Train Loss: 0.0029455, Val Loss: 0.0027470 +2025-02-18 12:58:19,587 Epoch 105/2000 +2025-02-18 12:59:01,917 Current Learning Rate: 0.0046077045 +2025-02-18 12:59:01,918 Train Loss: 0.0029949, Val Loss: 0.0027208 +2025-02-18 12:59:01,918 Epoch 106/2000 +2025-02-18 12:59:44,410 Current Learning Rate: 0.0045294584 +2025-02-18 12:59:44,410 Train Loss: 0.0031252, Val Loss: 0.0027911 +2025-02-18 12:59:44,411 Epoch 107/2000 +2025-02-18 13:00:26,626 Current Learning Rate: 0.0044513284 +2025-02-18 13:00:28,412 Train Loss: 0.0027322, Val Loss: 0.0025305 +2025-02-18 13:00:28,412 Epoch 108/2000 +2025-02-18 13:01:10,166 Current Learning Rate: 0.0043733338 +2025-02-18 13:01:10,167 Train Loss: 0.0027422, Val Loss: 0.0025760 +2025-02-18 13:01:10,167 Epoch 109/2000 +2025-02-18 13:01:51,747 Current Learning Rate: 0.0042954938 +2025-02-18 13:01:51,747 Train Loss: 0.0030014, Val Loss: 0.0025514 +2025-02-18 13:01:51,748 Epoch 110/2000 +2025-02-18 13:02:34,351 Current Learning Rate: 0.0042178277 +2025-02-18 13:02:34,351 Train Loss: 0.0030148, Val Loss: 0.0026140 +2025-02-18 13:02:34,352 Epoch 111/2000 +2025-02-18 13:03:16,218 Current Learning Rate: 0.0041403545 +2025-02-18 13:03:18,552 Train Loss: 0.0026457, Val Loss: 0.0025104 +2025-02-18 13:03:18,553 Epoch 112/2000 +2025-02-18 13:04:00,673 Current Learning Rate: 0.0040630934 +2025-02-18 13:04:02,737 Train Loss: 0.0026000, Val Loss: 0.0024824 +2025-02-18 13:04:02,738 Epoch 113/2000 +2025-02-18 13:04:44,450 Current Learning Rate: 0.0039860635 +2025-02-18 13:04:46,560 Train Loss: 0.0027027, Val Loss: 0.0024382 +2025-02-18 13:04:46,560 Epoch 114/2000 +2025-02-18 13:05:27,799 Current Learning Rate: 0.0039092838 +2025-02-18 13:05:27,800 Train Loss: 0.0026419, Val Loss: 0.0024624 +2025-02-18 13:05:27,800 Epoch 115/2000 +2025-02-18 13:06:10,862 Current Learning Rate: 0.0038327732 +2025-02-18 13:06:10,863 Train Loss: 0.0031512, Val Loss: 0.0024630 +2025-02-18 13:06:10,863 Epoch 116/2000 +2025-02-18 13:06:53,102 Current Learning Rate: 0.0037565506 +2025-02-18 13:06:53,103 Train Loss: 0.0027621, Val Loss: 0.0024817 +2025-02-18 13:06:53,104 Epoch 117/2000 +2025-02-18 13:07:35,289 Current Learning Rate: 0.0036806348 +2025-02-18 13:07:35,289 Train Loss: 0.0027294, Val Loss: 0.0025535 +2025-02-18 13:07:35,289 Epoch 118/2000 +2025-02-18 13:08:17,845 Current Learning Rate: 0.0036050445 +2025-02-18 13:08:17,845 Train Loss: 0.0025651, Val Loss: 0.0025561 +2025-02-18 13:08:17,846 Epoch 119/2000 +2025-02-18 13:09:00,242 Current Learning Rate: 0.0035297984 +2025-02-18 13:09:00,242 Train Loss: 0.0031162, Val Loss: 0.0025939 +2025-02-18 13:09:00,243 Epoch 120/2000 +2025-02-18 13:09:42,791 Current Learning Rate: 0.0034549150 +2025-02-18 13:09:42,791 Train Loss: 0.0027383, Val Loss: 0.0025477 +2025-02-18 13:09:42,792 Epoch 121/2000 +2025-02-18 13:10:25,578 Current Learning Rate: 0.0033804129 +2025-02-18 13:10:27,319 Train Loss: 0.0027425, Val Loss: 0.0023956 +2025-02-18 13:10:27,320 Epoch 122/2000 +2025-02-18 13:11:08,656 Current Learning Rate: 0.0033063104 +2025-02-18 13:11:08,656 Train Loss: 0.0026657, Val Loss: 0.0024754 +2025-02-18 13:11:08,656 Epoch 123/2000 +2025-02-18 13:11:51,834 Current Learning Rate: 0.0032326258 +2025-02-18 13:11:53,759 Train Loss: 0.0024242, Val Loss: 0.0023507 +2025-02-18 13:11:53,760 Epoch 124/2000 +2025-02-18 13:12:35,916 Current Learning Rate: 0.0031593772 +2025-02-18 13:12:35,917 Train Loss: 0.0025713, Val Loss: 0.0023812 +2025-02-18 13:12:35,917 Epoch 125/2000 +2025-02-18 13:13:18,844 Current Learning Rate: 0.0030865828 +2025-02-18 13:13:21,089 Train Loss: 0.0025928, Val Loss: 0.0023480 +2025-02-18 13:13:21,089 Epoch 126/2000 +2025-02-18 13:14:02,360 Current Learning Rate: 0.0030142605 +2025-02-18 13:14:04,286 Train Loss: 0.0024050, Val Loss: 0.0023392 +2025-02-18 13:14:04,286 Epoch 127/2000 +2025-02-18 13:14:46,606 Current Learning Rate: 0.0029424282 +2025-02-18 13:14:48,684 Train Loss: 0.0025779, Val Loss: 0.0023248 +2025-02-18 13:14:48,685 Epoch 128/2000 +2025-02-18 13:15:30,433 Current Learning Rate: 0.0028711035 +2025-02-18 13:15:30,434 Train Loss: 0.0025542, Val Loss: 0.0024302 +2025-02-18 13:15:30,435 Epoch 129/2000 +2025-02-18 13:16:13,088 Current Learning Rate: 0.0028003042 +2025-02-18 13:16:13,089 Train Loss: 0.0027851, Val Loss: 0.0023806 +2025-02-18 13:16:13,089 Epoch 130/2000 +2025-02-18 13:16:55,222 Current Learning Rate: 0.0027300475 +2025-02-18 13:16:55,223 Train Loss: 0.0024214, Val Loss: 0.0023312 +2025-02-18 13:16:55,224 Epoch 131/2000 +2025-02-18 13:17:38,039 Current Learning Rate: 0.0026603509 +2025-02-18 13:17:39,458 Train Loss: 0.0025682, Val Loss: 0.0022957 +2025-02-18 13:17:39,459 Epoch 132/2000 +2025-02-18 13:18:20,980 Current Learning Rate: 0.0025912316 +2025-02-18 13:18:22,217 Train Loss: 0.0023948, Val Loss: 0.0022712 +2025-02-18 13:18:22,217 Epoch 133/2000 +2025-02-18 13:19:03,940 Current Learning Rate: 0.0025227067 +2025-02-18 13:19:03,941 Train Loss: 0.0026540, Val Loss: 0.0023643 +2025-02-18 13:19:03,941 Epoch 134/2000 +2025-02-18 13:19:47,228 Current Learning Rate: 0.0024547929 +2025-02-18 13:19:47,228 Train Loss: 0.0026912, Val Loss: 0.0022917 +2025-02-18 13:19:47,229 Epoch 135/2000 +2025-02-18 13:20:30,279 Current Learning Rate: 0.0023875072 +2025-02-18 13:20:32,202 Train Loss: 0.0023403, Val Loss: 0.0022606 +2025-02-18 13:20:32,202 Epoch 136/2000 +2025-02-18 13:21:13,548 Current Learning Rate: 0.0023208660 +2025-02-18 13:21:13,549 Train Loss: 0.0025837, Val Loss: 0.0022768 +2025-02-18 13:21:13,549 Epoch 137/2000 +2025-02-18 13:21:56,736 Current Learning Rate: 0.0022548859 +2025-02-18 13:21:58,097 Train Loss: 0.0022579, Val Loss: 0.0022366 +2025-02-18 13:21:58,097 Epoch 138/2000 +2025-02-18 13:22:40,345 Current Learning Rate: 0.0021895831 +2025-02-18 13:22:41,717 Train Loss: 0.0023904, Val Loss: 0.0022284 +2025-02-18 13:22:41,717 Epoch 139/2000 +2025-02-18 13:23:23,732 Current Learning Rate: 0.0021249737 +2025-02-18 13:23:23,733 Train Loss: 0.0024008, Val Loss: 0.0022358 +2025-02-18 13:23:23,733 Epoch 140/2000 +2025-02-18 13:24:07,254 Current Learning Rate: 0.0020610737 +2025-02-18 13:24:09,092 Train Loss: 0.0022246, Val Loss: 0.0022110 +2025-02-18 13:24:09,092 Epoch 141/2000 +2025-02-18 13:24:51,508 Current Learning Rate: 0.0019978989 +2025-02-18 13:24:51,509 Train Loss: 0.0023086, Val Loss: 0.0022171 +2025-02-18 13:24:51,509 Epoch 142/2000 +2025-02-18 13:25:34,071 Current Learning Rate: 0.0019354647 +2025-02-18 13:25:35,999 Train Loss: 0.0022637, Val Loss: 0.0021975 +2025-02-18 13:25:35,999 Epoch 143/2000 +2025-02-18 13:26:17,828 Current Learning Rate: 0.0018737867 +2025-02-18 13:26:17,829 Train Loss: 0.0024424, Val Loss: 0.0022221 +2025-02-18 13:26:17,829 Epoch 144/2000 +2025-02-18 13:27:00,090 Current Learning Rate: 0.0018128801 +2025-02-18 13:27:00,090 Train Loss: 0.0022830, Val Loss: 0.0022219 +2025-02-18 13:27:00,091 Epoch 145/2000 +2025-02-18 13:27:43,268 Current Learning Rate: 0.0017527598 +2025-02-18 13:27:43,268 Train Loss: 0.0025422, Val Loss: 0.0022204 +2025-02-18 13:27:43,268 Epoch 146/2000 +2025-02-18 13:28:25,985 Current Learning Rate: 0.0016934407 +2025-02-18 13:28:27,579 Train Loss: 0.0022766, Val Loss: 0.0021824 +2025-02-18 13:28:27,580 Epoch 147/2000 +2025-02-18 13:29:09,208 Current Learning Rate: 0.0016349374 +2025-02-18 13:29:09,209 Train Loss: 0.0023299, Val Loss: 0.0022020 +2025-02-18 13:29:09,210 Epoch 148/2000 +2025-02-18 13:29:51,369 Current Learning Rate: 0.0015772645 +2025-02-18 13:29:51,369 Train Loss: 0.0023008, Val Loss: 0.0022081 +2025-02-18 13:29:51,370 Epoch 149/2000 +2025-02-18 13:30:34,048 Current Learning Rate: 0.0015204360 +2025-02-18 13:30:35,580 Train Loss: 0.0022495, Val Loss: 0.0021472 +2025-02-18 13:30:35,580 Epoch 150/2000 +2025-02-18 13:31:17,736 Current Learning Rate: 0.0014644661 +2025-02-18 13:31:19,639 Train Loss: 0.0022816, Val Loss: 0.0021302 +2025-02-18 13:31:19,639 Epoch 151/2000 +2025-02-18 13:32:01,716 Current Learning Rate: 0.0014093685 +2025-02-18 13:32:01,721 Train Loss: 0.0025645, Val Loss: 0.0021428 +2025-02-18 13:32:01,722 Epoch 152/2000 +2025-02-18 13:32:44,805 Current Learning Rate: 0.0013551569 +2025-02-18 13:32:44,805 Train Loss: 0.0024435, Val Loss: 0.0021654 +2025-02-18 13:32:44,806 Epoch 153/2000 +2025-02-18 13:33:27,199 Current Learning Rate: 0.0013018445 +2025-02-18 13:33:29,106 Train Loss: 0.0025198, Val Loss: 0.0021174 +2025-02-18 13:33:29,108 Epoch 154/2000 +2025-02-18 13:34:11,534 Current Learning Rate: 0.0012494447 +2025-02-18 13:34:13,156 Train Loss: 0.0021932, Val Loss: 0.0020996 +2025-02-18 13:34:13,156 Epoch 155/2000 +2025-02-18 13:34:54,403 Current Learning Rate: 0.0011979702 +2025-02-18 13:34:54,404 Train Loss: 0.0021102, Val Loss: 0.0021050 +2025-02-18 13:34:54,404 Epoch 156/2000 +2025-02-18 13:35:36,836 Current Learning Rate: 0.0011474338 +2025-02-18 13:35:38,400 Train Loss: 0.0021750, Val Loss: 0.0020915 +2025-02-18 13:35:38,400 Epoch 157/2000 +2025-02-18 13:36:19,949 Current Learning Rate: 0.0010978480 +2025-02-18 13:36:19,950 Train Loss: 0.0021748, Val Loss: 0.0021046 +2025-02-18 13:36:19,950 Epoch 158/2000 +2025-02-18 13:37:01,890 Current Learning Rate: 0.0010492249 +2025-02-18 13:37:01,891 Train Loss: 0.0022929, Val Loss: 0.0020975 +2025-02-18 13:37:01,891 Epoch 159/2000 +2025-02-18 13:37:44,328 Current Learning Rate: 0.0010015767 +2025-02-18 13:37:44,329 Train Loss: 0.0022445, Val Loss: 0.0020939 +2025-02-18 13:37:44,330 Epoch 160/2000 +2025-02-18 13:38:26,477 Current Learning Rate: 0.0009549150 +2025-02-18 13:38:27,756 Train Loss: 0.0023150, Val Loss: 0.0020744 +2025-02-18 13:38:27,757 Epoch 161/2000 +2025-02-18 13:39:09,579 Current Learning Rate: 0.0009092514 +2025-02-18 13:39:11,054 Train Loss: 0.0023384, Val Loss: 0.0020734 +2025-02-18 13:39:11,054 Epoch 162/2000 +2025-02-18 13:39:53,041 Current Learning Rate: 0.0008645971 +2025-02-18 13:39:53,042 Train Loss: 0.0024140, Val Loss: 0.0020823 +2025-02-18 13:39:53,042 Epoch 163/2000 +2025-02-18 13:40:35,431 Current Learning Rate: 0.0008209632 +2025-02-18 13:40:35,432 Train Loss: 0.0024373, Val Loss: 0.0020741 +2025-02-18 13:40:35,432 Epoch 164/2000 +2025-02-18 13:41:18,399 Current Learning Rate: 0.0007783604 +2025-02-18 13:41:19,717 Train Loss: 0.0022746, Val Loss: 0.0020577 +2025-02-18 13:41:19,717 Epoch 165/2000 +2025-02-18 13:42:01,952 Current Learning Rate: 0.0007367992 +2025-02-18 13:42:01,953 Train Loss: 0.0022988, Val Loss: 0.0020613 +2025-02-18 13:42:01,953 Epoch 166/2000 +2025-02-18 13:42:44,119 Current Learning Rate: 0.0006962899 +2025-02-18 13:42:45,422 Train Loss: 0.0023153, Val Loss: 0.0020529 +2025-02-18 13:42:45,423 Epoch 167/2000 +2025-02-18 13:43:27,273 Current Learning Rate: 0.0006568424 +2025-02-18 13:43:28,747 Train Loss: 0.0021537, Val Loss: 0.0020437 +2025-02-18 13:43:28,748 Epoch 168/2000 +2025-02-18 13:44:10,016 Current Learning Rate: 0.0006184666 +2025-02-18 13:44:11,047 Train Loss: 0.0020853, Val Loss: 0.0020344 +2025-02-18 13:44:11,048 Epoch 169/2000 +2025-02-18 13:44:53,571 Current Learning Rate: 0.0005811718 +2025-02-18 13:44:55,499 Train Loss: 0.0022062, Val Loss: 0.0020296 +2025-02-18 13:44:55,499 Epoch 170/2000 +2025-02-18 13:45:37,794 Current Learning Rate: 0.0005449674 +2025-02-18 13:45:40,026 Train Loss: 0.0020276, Val Loss: 0.0020274 +2025-02-18 13:45:40,026 Epoch 171/2000 +2025-02-18 13:46:21,995 Current Learning Rate: 0.0005098621 +2025-02-18 13:46:24,016 Train Loss: 0.0022592, Val Loss: 0.0020274 +2025-02-18 13:46:24,016 Epoch 172/2000 +2025-02-18 13:47:05,363 Current Learning Rate: 0.0004758647 +2025-02-18 13:47:05,364 Train Loss: 0.0022123, Val Loss: 0.0020308 +2025-02-18 13:47:05,364 Epoch 173/2000 +2025-02-18 13:47:48,734 Current Learning Rate: 0.0004429836 +2025-02-18 13:47:50,812 Train Loss: 0.0021227, Val Loss: 0.0020174 +2025-02-18 13:47:50,812 Epoch 174/2000 +2025-02-18 13:48:32,077 Current Learning Rate: 0.0004112269 +2025-02-18 13:48:33,831 Train Loss: 0.0021057, Val Loss: 0.0020160 +2025-02-18 13:48:33,832 Epoch 175/2000 +2025-02-18 13:49:15,057 Current Learning Rate: 0.0003806023 +2025-02-18 13:49:15,058 Train Loss: 0.0022247, Val Loss: 0.0020161 +2025-02-18 13:49:15,058 Epoch 176/2000 +2025-02-18 13:49:57,453 Current Learning Rate: 0.0003511176 +2025-02-18 13:49:59,525 Train Loss: 0.0020290, Val Loss: 0.0020113 +2025-02-18 13:49:59,525 Epoch 177/2000 +2025-02-18 13:50:40,854 Current Learning Rate: 0.0003227798 +2025-02-18 13:50:43,034 Train Loss: 0.0020590, Val Loss: 0.0020087 +2025-02-18 13:50:43,034 Epoch 178/2000 +2025-02-18 13:51:24,539 Current Learning Rate: 0.0002955962 +2025-02-18 13:51:24,540 Train Loss: 0.0021380, Val Loss: 0.0020131 +2025-02-18 13:51:24,540 Epoch 179/2000 +2025-02-18 13:52:07,550 Current Learning Rate: 0.0002695732 +2025-02-18 13:52:09,062 Train Loss: 0.0021452, Val Loss: 0.0020065 +2025-02-18 13:52:09,062 Epoch 180/2000 +2025-02-18 13:52:51,219 Current Learning Rate: 0.0002447174 +2025-02-18 13:52:51,220 Train Loss: 0.0022274, Val Loss: 0.0020067 +2025-02-18 13:52:51,221 Epoch 181/2000 +2025-02-18 13:53:33,391 Current Learning Rate: 0.0002210349 +2025-02-18 13:53:34,471 Train Loss: 0.0021063, Val Loss: 0.0020031 +2025-02-18 13:53:34,472 Epoch 182/2000 +2025-02-18 13:54:16,342 Current Learning Rate: 0.0001985316 +2025-02-18 13:54:16,343 Train Loss: 0.0021554, Val Loss: 0.0020034 +2025-02-18 13:54:16,343 Epoch 183/2000 +2025-02-18 13:54:58,990 Current Learning Rate: 0.0001772129 +2025-02-18 13:55:00,726 Train Loss: 0.0024092, Val Loss: 0.0020027 +2025-02-18 13:55:00,726 Epoch 184/2000 +2025-02-18 13:55:43,043 Current Learning Rate: 0.0001570842 +2025-02-18 13:55:45,124 Train Loss: 0.0020361, Val Loss: 0.0019969 +2025-02-18 13:55:45,125 Epoch 185/2000 +2025-02-18 13:56:26,341 Current Learning Rate: 0.0001381504 +2025-02-18 13:56:27,682 Train Loss: 0.0021787, Val Loss: 0.0019946 +2025-02-18 13:56:27,682 Epoch 186/2000 +2025-02-18 13:57:09,808 Current Learning Rate: 0.0001204162 +2025-02-18 13:57:11,437 Train Loss: 0.0019766, Val Loss: 0.0019902 +2025-02-18 13:57:11,438 Epoch 187/2000 +2025-02-18 13:57:53,413 Current Learning Rate: 0.0001038859 +2025-02-18 13:57:55,321 Train Loss: 0.0022846, Val Loss: 0.0019884 +2025-02-18 13:57:55,329 Epoch 188/2000 +2025-02-18 13:58:36,557 Current Learning Rate: 0.0000885637 +2025-02-18 13:58:38,631 Train Loss: 0.0020690, Val Loss: 0.0019866 +2025-02-18 13:58:38,632 Epoch 189/2000 +2025-02-18 13:59:19,993 Current Learning Rate: 0.0000744534 +2025-02-18 13:59:21,660 Train Loss: 0.0021456, Val Loss: 0.0019859 +2025-02-18 13:59:21,681 Epoch 190/2000 +2025-02-18 14:00:03,799 Current Learning Rate: 0.0000615583 +2025-02-18 14:00:03,800 Train Loss: 0.0020096, Val Loss: 0.0019868 +2025-02-18 14:00:03,800 Epoch 191/2000 +2025-02-18 14:00:46,181 Current Learning Rate: 0.0000498817 +2025-02-18 14:00:46,182 Train Loss: 0.0021014, Val Loss: 0.0019870 +2025-02-18 14:00:46,182 Epoch 192/2000 +2025-02-18 14:01:28,810 Current Learning Rate: 0.0000394265 +2025-02-18 14:01:30,201 Train Loss: 0.0021133, Val Loss: 0.0019855 +2025-02-18 14:01:30,202 Epoch 193/2000 +2025-02-18 14:02:12,523 Current Learning Rate: 0.0000301952 +2025-02-18 14:02:12,524 Train Loss: 0.0020557, Val Loss: 0.0019860 +2025-02-18 14:02:12,524 Epoch 194/2000 +2025-02-18 14:02:55,286 Current Learning Rate: 0.0000221902 +2025-02-18 14:02:57,126 Train Loss: 0.0020902, Val Loss: 0.0019851 +2025-02-18 14:02:57,134 Epoch 195/2000 +2025-02-18 14:03:38,649 Current Learning Rate: 0.0000154133 +2025-02-18 14:03:40,356 Train Loss: 0.0020577, Val Loss: 0.0019844 +2025-02-18 14:03:40,356 Epoch 196/2000 +2025-02-18 14:04:22,463 Current Learning Rate: 0.0000098664 +2025-02-18 14:04:24,579 Train Loss: 0.0021702, Val Loss: 0.0019841 +2025-02-18 14:04:24,579 Epoch 197/2000 +2025-02-18 14:05:06,792 Current Learning Rate: 0.0000055506 +2025-02-18 14:05:08,844 Train Loss: 0.0021440, Val Loss: 0.0019836 +2025-02-18 14:05:08,845 Epoch 198/2000 +2025-02-18 14:05:50,477 Current Learning Rate: 0.0000024672 +2025-02-18 14:05:52,297 Train Loss: 0.0022741, Val Loss: 0.0019835 +2025-02-18 14:05:52,297 Epoch 199/2000 +2025-02-18 14:06:33,747 Current Learning Rate: 0.0000006168 +2025-02-18 14:06:34,853 Train Loss: 0.0020641, Val Loss: 0.0019835 +2025-02-18 14:06:34,861 Epoch 200/2000 +2025-02-18 14:07:16,856 Current Learning Rate: 0.0000000000 +2025-02-18 14:07:18,951 Train Loss: 0.0020595, Val Loss: 0.0019833 +2025-02-18 14:07:18,952 Epoch 201/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_128_20250322_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_128_20250322_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..c0ad1b953848792dc4b057868b940a7220ea202b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_128_20250322_training_log-checkpoint.log @@ -0,0 +1,8 @@ +2025-03-22 16:11:51,223 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-22 16:11:51,359 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-22 16:11:51,374 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-22 16:11:51,398 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-22 16:11:51,423 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-22 16:11:51,430 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-22 16:11:51,435 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-22 16:11:51,437 Added key: store_based_barrier_key:1 to store for rank: 1 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250221_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250221_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_inference-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_inference-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..2ba5ad80e44ab4e42ea93ff7db5d32979faf1d42 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_inference-checkpoint.log @@ -0,0 +1,109 @@ +2025-02-24 17:43:57,849 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-24 17:43:57,869 开始推理... +2025-02-25 00:15:14,613 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 00:15:14,630 开始单批次推理... +2025-02-25 00:15:24,673 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 00:15:24,673 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 00:15:24,673 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:15:24,673 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:15:24,675 输入数据范围:[-1.72, 1.93] +2025-02-25 00:15:24,677 输出数据范围:[-1.66, 1.90] +2025-02-25 00:15:24,681 单批次推理完成! +2025-02-25 00:23:52,236 缺失的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 00:23:52,237 意外的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 00:23:52,237 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-25 00:23:52,262 开始单批次推理... +2025-02-25 00:24:05,732 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 00:24:05,737 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 00:24:05,737 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:24:05,737 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:24:05,739 输入数据范围:[-1.72, 1.93] +2025-02-25 00:24:05,740 输出数据范围:[-3.44, 2.91] +2025-02-25 00:24:05,744 单批次推理完成! +2025-02-25 00:24:57,827 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-25 00:24:57,856 开始单批次推理... +2025-02-25 00:25:00,397 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 00:25:00,401 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 00:25:00,401 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:25:00,402 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:25:00,404 输入数据范围:[-1.72, 1.93] +2025-02-25 00:25:00,406 输出数据范围:[-1.66, 1.99] +2025-02-25 00:25:00,410 单批次推理完成! +2025-02-25 09:34:00,385 缺失的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 09:34:00,386 意外的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 09:34:00,386 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 09:34:00,407 开始单批次推理... +2025-02-25 09:34:02,599 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 09:34:02,603 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 09:34:02,604 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 09:34:02,604 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 09:34:02,606 输入数据范围:[-1.72, 1.93] +2025-02-25 09:34:02,607 输出数据范围:[-2.29, 2.43] +2025-02-25 09:34:02,611 单批次推理完成! +2025-02-25 09:35:09,583 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 09:35:09,608 开始单批次推理... +2025-02-25 09:35:19,024 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 09:35:19,025 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 09:35:19,025 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 09:35:19,025 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 09:35:19,026 输入数据范围:[-1.72, 1.93] +2025-02-25 09:35:19,028 输出数据范围:[-1.61, 1.90] +2025-02-25 09:35:19,032 单批次推理完成! +2025-02-25 10:37:36,163 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 10:37:36,183 开始单批次推理... +2025-02-25 10:37:45,889 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 10:37:45,889 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 10:37:45,889 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 10:37:45,889 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 10:37:45,891 输入数据范围:[-1.72, 1.93] +2025-02-25 10:37:45,893 输出数据范围:[-1.64, 1.90] +2025-02-25 10:37:45,896 单批次推理完成! +2025-02-25 14:02:32,148 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 14:02:32,165 开始单批次推理... +2025-02-25 14:02:42,169 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 14:02:42,169 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 14:02:42,170 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:02:42,170 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:02:42,173 输入数据范围:[-1.72, 1.93] +2025-02-25 14:02:42,175 输出数据范围:[-1.64, 1.86] +2025-02-25 14:02:42,180 单批次推理完成! +2025-02-25 14:19:43,204 缺失的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 14:19:43,205 意外的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 14:19:43,205 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-25 14:19:43,229 开始单批次推理... +2025-02-25 14:19:52,934 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 14:19:52,938 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 14:19:52,938 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:19:52,938 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:19:52,940 输入数据范围:[-1.72, 1.93] +2025-02-25 14:19:52,943 输出数据范围:[-3.44, 2.91] +2025-02-25 14:19:52,948 单批次推理完成! +2025-02-25 14:21:09,528 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-25 14:21:09,560 开始单批次推理... +2025-02-25 14:21:11,731 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 14:21:11,736 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 14:21:11,736 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:21:11,736 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:21:11,738 输入数据范围:[-1.72, 1.93] +2025-02-25 14:21:11,740 输出数据范围:[-1.63, 1.90] +2025-02-25 14:21:11,744 单批次推理完成! +2025-02-25 17:17:16,495 缺失的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 17:17:16,496 意外的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 17:17:16,496 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 17:17:16,515 开始单批次推理... +2025-02-25 17:17:18,505 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 17:17:18,511 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 17:17:18,512 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 17:17:18,512 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 17:17:18,514 输入数据范围:[-1.72, 1.93] +2025-02-25 17:17:18,516 输出数据范围:[-2.29, 2.43] +2025-02-25 17:17:18,520 单批次推理完成! +2025-02-25 17:18:40,686 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 17:18:40,700 开始单批次推理... +2025-02-25 17:18:50,365 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 17:18:50,372 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 17:18:50,372 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 17:18:50,372 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 17:18:50,373 输入数据范围:[-1.72, 1.93] +2025-02-25 17:18:50,376 输出数据范围:[-1.64, 1.89] +2025-02-25 17:18:50,380 单批次推理完成! diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_inference_new-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_inference_new-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..e82103b38b34fb4e1e8a63291f8daa62f4619a50 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_inference_new-checkpoint.log @@ -0,0 +1,9 @@ +2025-02-26 15:00:39,982 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-26 15:00:40,002 开始单批次推理... +2025-02-26 15:00:50,376 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 15:00:50,378 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-26 15:00:50,378 目标数据形状:(2, 10, 2, 256, 256) +2025-02-26 15:00:50,379 输出数据形状:(2, 10, 2, 256, 256) +2025-02-26 15:00:50,380 输入数据范围:[-1.72, 1.93] +2025-02-26 15:00:50,382 输出数据范围:[-1.64, 1.89] +2025-02-26 15:00:50,385 单批次推理完成! diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..6fc36b01bf70f3034d4d695236603126438530b0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_20250224_training_log-checkpoint.log @@ -0,0 +1,575 @@ +2025-02-24 16:11:07,025 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-24 16:11:07,038 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-24 16:11:07,080 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-24 16:11:07,121 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-24 16:11:07,158 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-24 16:11:07,177 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-24 16:11:07,185 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-24 16:11:07,191 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-24 16:12:13,158 Epoch 1/2000 +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:27:16,028 Current Learning Rate: 0.0009999383 +2025-02-24 16:27:16,817 Train Loss: 0.0406381, Val Loss: 0.0053257 +2025-02-24 16:27:16,818 Epoch 2/2000 +2025-02-24 16:42:18,281 Current Learning Rate: 0.0009997533 +2025-02-24 16:42:19,303 Train Loss: 0.0047488, Val Loss: 0.0037061 +2025-02-24 16:42:19,304 Epoch 3/2000 +2025-02-24 16:57:22,683 Current Learning Rate: 0.0009994449 +2025-02-24 16:57:23,533 Train Loss: 0.0036298, Val Loss: 0.0033693 +2025-02-24 16:57:23,533 Epoch 4/2000 +2025-02-24 17:12:25,186 Current Learning Rate: 0.0009990134 +2025-02-24 17:12:26,059 Train Loss: 0.0032028, Val Loss: 0.0030099 +2025-02-24 17:12:26,059 Epoch 5/2000 +2025-02-24 17:27:27,267 Current Learning Rate: 0.0009984587 +2025-02-24 17:27:28,176 Train Loss: 0.0029359, Val Loss: 0.0027050 +2025-02-24 17:27:28,177 Epoch 6/2000 +2025-02-24 17:42:29,880 Current Learning Rate: 0.0009977810 +2025-02-24 17:42:30,846 Train Loss: 0.0027535, Val Loss: 0.0025139 +2025-02-24 17:42:30,846 Epoch 7/2000 +2025-02-24 17:57:33,515 Current Learning Rate: 0.0009969805 +2025-02-24 17:57:34,512 Train Loss: 0.0026977, Val Loss: 0.0023625 +2025-02-24 17:57:34,513 Epoch 8/2000 +2025-02-24 18:12:37,864 Current Learning Rate: 0.0009960574 +2025-02-24 18:12:37,865 Train Loss: 0.0025400, Val Loss: 0.0024198 +2025-02-24 18:12:37,865 Epoch 9/2000 +2025-02-24 18:27:40,015 Current Learning Rate: 0.0009950118 +2025-02-24 18:27:40,015 Train Loss: 0.0024398, Val Loss: 0.0027710 +2025-02-24 18:27:40,015 Epoch 10/2000 +2025-02-24 18:42:43,127 Current Learning Rate: 0.0009938442 +2025-02-24 18:42:44,154 Train Loss: 0.0023841, Val Loss: 0.0021985 +2025-02-24 18:42:44,154 Epoch 11/2000 +2025-02-24 18:57:46,279 Current Learning Rate: 0.0009925547 +2025-02-24 18:57:46,280 Train Loss: 0.0022101, Val Loss: 0.0022231 +2025-02-24 18:57:46,280 Epoch 12/2000 +2025-02-24 19:12:48,317 Current Learning Rate: 0.0009911436 +2025-02-24 19:12:49,429 Train Loss: 0.0021464, Val Loss: 0.0019511 +2025-02-24 19:12:49,430 Epoch 13/2000 +2025-02-24 19:27:52,189 Current Learning Rate: 0.0009896114 +2025-02-24 19:27:53,126 Train Loss: 0.0019859, Val Loss: 0.0018656 +2025-02-24 19:27:53,127 Epoch 14/2000 +2025-02-24 19:42:56,220 Current Learning Rate: 0.0009879584 +2025-02-24 19:42:57,122 Train Loss: 0.0018646, Val Loss: 0.0017206 +2025-02-24 19:42:57,123 Epoch 15/2000 +2025-02-24 19:57:59,289 Current Learning Rate: 0.0009861850 +2025-02-24 19:58:00,235 Train Loss: 0.0017463, Val Loss: 0.0016954 +2025-02-24 19:58:00,235 Epoch 16/2000 +2025-02-24 20:13:02,848 Current Learning Rate: 0.0009842916 +2025-02-24 20:13:02,848 Train Loss: 0.0016704, Val Loss: 0.0018268 +2025-02-24 20:13:02,849 Epoch 17/2000 +2025-02-24 20:28:06,151 Current Learning Rate: 0.0009822787 +2025-02-24 20:28:07,067 Train Loss: 0.0015416, Val Loss: 0.0014259 +2025-02-24 20:28:07,067 Epoch 18/2000 +2025-02-24 20:43:10,826 Current Learning Rate: 0.0009801468 +2025-02-24 20:43:11,797 Train Loss: 0.0014084, Val Loss: 0.0013179 +2025-02-24 20:43:11,797 Epoch 19/2000 +2025-02-24 20:58:13,971 Current Learning Rate: 0.0009778965 +2025-02-24 20:58:15,020 Train Loss: 0.0013198, Val Loss: 0.0013071 +2025-02-24 20:58:15,020 Epoch 20/2000 +2025-02-24 21:13:17,915 Current Learning Rate: 0.0009755283 +2025-02-24 21:13:18,833 Train Loss: 0.0012210, Val Loss: 0.0011555 +2025-02-24 21:13:18,834 Epoch 21/2000 +2025-02-24 21:28:21,985 Current Learning Rate: 0.0009730427 +2025-02-24 21:28:22,937 Train Loss: 0.0011413, Val Loss: 0.0011252 +2025-02-24 21:28:22,937 Epoch 22/2000 +2025-02-24 21:43:26,165 Current Learning Rate: 0.0009704404 +2025-02-24 21:43:26,166 Train Loss: 0.0011451, Val Loss: 0.0011662 +2025-02-24 21:43:26,166 Epoch 23/2000 +2025-02-24 21:58:30,148 Current Learning Rate: 0.0009677220 +2025-02-24 21:58:31,140 Train Loss: 0.0010685, Val Loss: 0.0010649 +2025-02-24 21:58:31,141 Epoch 24/2000 +2025-02-24 22:13:34,687 Current Learning Rate: 0.0009648882 +2025-02-24 22:13:35,624 Train Loss: 0.0009016, Val Loss: 0.0009312 +2025-02-24 22:13:35,624 Epoch 25/2000 +2025-02-24 22:28:38,293 Current Learning Rate: 0.0009619398 +2025-02-24 22:28:39,202 Train Loss: 0.0009121, Val Loss: 0.0008724 +2025-02-24 22:28:39,202 Epoch 26/2000 +2025-02-24 22:43:42,880 Current Learning Rate: 0.0009588773 +2025-02-24 22:43:42,881 Train Loss: 0.0008411, Val Loss: 0.0008799 +2025-02-24 22:43:42,881 Epoch 27/2000 +2025-02-24 22:58:46,431 Current Learning Rate: 0.0009557016 +2025-02-24 22:58:47,413 Train Loss: 0.0008557, Val Loss: 0.0008101 +2025-02-24 22:58:47,414 Epoch 28/2000 +2025-02-24 23:13:49,935 Current Learning Rate: 0.0009524135 +2025-02-24 23:13:50,803 Train Loss: 0.0008248, Val Loss: 0.0008042 +2025-02-24 23:13:50,803 Epoch 29/2000 +2025-02-24 23:28:53,290 Current Learning Rate: 0.0009490138 +2025-02-24 23:28:54,256 Train Loss: 0.0006904, Val Loss: 0.0007522 +2025-02-24 23:28:54,256 Epoch 30/2000 +2025-02-24 23:43:56,574 Current Learning Rate: 0.0009455033 +2025-02-24 23:43:56,575 Train Loss: 0.0007436, Val Loss: 0.0011877 +2025-02-24 23:43:56,575 Epoch 31/2000 +2025-02-24 23:59:00,575 Current Learning Rate: 0.0009418828 +2025-02-24 23:59:01,486 Train Loss: 0.0007554, Val Loss: 0.0006824 +2025-02-24 23:59:01,486 Epoch 32/2000 +2025-02-25 00:14:04,480 Current Learning Rate: 0.0009381533 +2025-02-25 00:14:06,360 Train Loss: 0.0006869, Val Loss: 0.0006696 +2025-02-25 00:14:06,361 Epoch 33/2000 +2025-02-25 00:29:09,347 Current Learning Rate: 0.0009343158 +2025-02-25 00:29:10,169 Train Loss: 0.0006142, Val Loss: 0.0006497 +2025-02-25 00:29:10,169 Epoch 34/2000 +2025-02-25 00:44:13,071 Current Learning Rate: 0.0009303710 +2025-02-25 00:44:14,012 Train Loss: 0.0005384, Val Loss: 0.0006241 +2025-02-25 00:44:14,012 Epoch 35/2000 +2025-02-25 00:59:16,622 Current Learning Rate: 0.0009263201 +2025-02-25 00:59:16,623 Train Loss: 0.0005865, Val Loss: 0.0006459 +2025-02-25 00:59:16,623 Epoch 36/2000 +2025-02-25 01:14:19,789 Current Learning Rate: 0.0009221640 +2025-02-25 01:14:20,884 Train Loss: 0.0006411, Val Loss: 0.0006062 +2025-02-25 01:14:20,884 Epoch 37/2000 +2025-02-25 01:29:24,210 Current Learning Rate: 0.0009179037 +2025-02-25 01:29:24,211 Train Loss: 0.0005853, Val Loss: 0.0006425 +2025-02-25 01:29:24,211 Epoch 38/2000 +2025-02-25 01:44:27,395 Current Learning Rate: 0.0009135403 +2025-02-25 01:44:28,509 Train Loss: 0.0005547, Val Loss: 0.0005610 +2025-02-25 01:44:28,510 Epoch 39/2000 +2025-02-25 01:59:31,667 Current Learning Rate: 0.0009090749 +2025-02-25 01:59:32,575 Train Loss: 0.0004919, Val Loss: 0.0005452 +2025-02-25 01:59:32,576 Epoch 40/2000 +2025-02-25 02:14:36,419 Current Learning Rate: 0.0009045085 +2025-02-25 02:14:36,420 Train Loss: 0.0004709, Val Loss: 0.0006160 +2025-02-25 02:14:36,420 Epoch 41/2000 +2025-02-25 02:29:39,939 Current Learning Rate: 0.0008998423 +2025-02-25 02:29:40,881 Train Loss: 0.0005403, Val Loss: 0.0005210 +2025-02-25 02:29:40,882 Epoch 42/2000 +2025-02-25 02:44:43,949 Current Learning Rate: 0.0008950775 +2025-02-25 02:44:43,950 Train Loss: 0.0004746, Val Loss: 0.0005861 +2025-02-25 02:44:43,950 Epoch 43/2000 +2025-02-25 02:59:47,680 Current Learning Rate: 0.0008902152 +2025-02-25 02:59:47,680 Train Loss: 0.0005121, Val Loss: 0.0005827 +2025-02-25 02:59:47,680 Epoch 44/2000 +2025-02-25 03:14:51,351 Current Learning Rate: 0.0008852566 +2025-02-25 03:14:51,351 Train Loss: 0.0004441, Val Loss: 0.0005960 +2025-02-25 03:14:51,351 Epoch 45/2000 +2025-02-25 03:29:55,318 Current Learning Rate: 0.0008802030 +2025-02-25 03:29:56,290 Train Loss: 0.0004549, Val Loss: 0.0004676 +2025-02-25 03:29:56,291 Epoch 46/2000 +2025-02-25 03:44:58,575 Current Learning Rate: 0.0008750555 +2025-02-25 03:44:58,576 Train Loss: 0.0004177, Val Loss: 0.0004849 +2025-02-25 03:44:58,576 Epoch 47/2000 +2025-02-25 04:00:01,937 Current Learning Rate: 0.0008698155 +2025-02-25 04:00:04,796 Train Loss: 0.0004078, Val Loss: 0.0004234 +2025-02-25 04:00:04,796 Epoch 48/2000 +2025-02-25 04:15:07,817 Current Learning Rate: 0.0008644843 +2025-02-25 04:15:07,818 Train Loss: 0.0003959, Val Loss: 0.0004665 +2025-02-25 04:15:07,819 Epoch 49/2000 +2025-02-25 04:30:11,693 Current Learning Rate: 0.0008590631 +2025-02-25 04:30:12,713 Train Loss: 0.0004122, Val Loss: 0.0004154 +2025-02-25 04:30:12,713 Epoch 50/2000 +2025-02-25 04:45:15,538 Current Learning Rate: 0.0008535534 +2025-02-25 04:45:16,632 Train Loss: 0.0004019, Val Loss: 0.0004076 +2025-02-25 04:45:16,632 Epoch 51/2000 +2025-02-25 05:00:20,380 Current Learning Rate: 0.0008479564 +2025-02-25 05:00:21,280 Train Loss: 0.0003756, Val Loss: 0.0003701 +2025-02-25 05:00:21,281 Epoch 52/2000 +2025-02-25 05:15:23,837 Current Learning Rate: 0.0008422736 +2025-02-25 05:15:23,841 Train Loss: 0.0003748, Val Loss: 0.0004063 +2025-02-25 05:15:23,841 Epoch 53/2000 +2025-02-25 05:30:27,716 Current Learning Rate: 0.0008365063 +2025-02-25 05:30:27,716 Train Loss: 0.0005704, Val Loss: 0.0004781 +2025-02-25 05:30:27,717 Epoch 54/2000 +2025-02-25 05:45:31,146 Current Learning Rate: 0.0008306559 +2025-02-25 05:45:31,146 Train Loss: 0.0003712, Val Loss: 0.0003772 +2025-02-25 05:45:31,146 Epoch 55/2000 +2025-02-25 06:00:34,966 Current Learning Rate: 0.0008247240 +2025-02-25 06:00:34,967 Train Loss: 0.0004134, Val Loss: 0.0004025 +2025-02-25 06:00:34,967 Epoch 56/2000 +2025-02-25 06:15:39,445 Current Learning Rate: 0.0008187120 +2025-02-25 06:15:40,545 Train Loss: 0.0003266, Val Loss: 0.0003262 +2025-02-25 06:15:40,545 Epoch 57/2000 +2025-02-25 06:30:44,292 Current Learning Rate: 0.0008126213 +2025-02-25 06:30:45,302 Train Loss: 0.0003011, Val Loss: 0.0003144 +2025-02-25 06:30:45,302 Epoch 58/2000 +2025-02-25 06:45:48,633 Current Learning Rate: 0.0008064535 +2025-02-25 06:45:48,634 Train Loss: 0.0003419, Val Loss: 0.0003240 +2025-02-25 06:45:48,635 Epoch 59/2000 +2025-02-25 07:00:52,035 Current Learning Rate: 0.0008002101 +2025-02-25 07:00:52,036 Train Loss: 0.0003142, Val Loss: 0.0003275 +2025-02-25 07:00:52,036 Epoch 60/2000 +2025-02-25 07:15:54,621 Current Learning Rate: 0.0007938926 +2025-02-25 07:15:54,621 Train Loss: 0.0003328, Val Loss: 0.0003441 +2025-02-25 07:15:54,621 Epoch 61/2000 +2025-02-25 07:30:57,200 Current Learning Rate: 0.0007875026 +2025-02-25 07:30:57,200 Train Loss: 0.0003144, Val Loss: 0.0003336 +2025-02-25 07:30:57,200 Epoch 62/2000 +2025-02-25 07:46:00,913 Current Learning Rate: 0.0007810417 +2025-02-25 07:46:00,913 Train Loss: 0.0002909, Val Loss: 0.0003155 +2025-02-25 07:46:00,914 Epoch 63/2000 +2025-02-25 08:01:04,932 Current Learning Rate: 0.0007745114 +2025-02-25 08:01:06,008 Train Loss: 0.0003106, Val Loss: 0.0003073 +2025-02-25 08:01:06,008 Epoch 64/2000 +2025-02-25 08:16:09,420 Current Learning Rate: 0.0007679134 +2025-02-25 08:16:09,421 Train Loss: 0.0003480, Val Loss: 0.0003098 +2025-02-25 08:16:09,421 Epoch 65/2000 +2025-02-25 08:31:13,486 Current Learning Rate: 0.0007612493 +2025-02-25 08:31:14,500 Train Loss: 0.0003508, Val Loss: 0.0002983 +2025-02-25 08:31:14,500 Epoch 66/2000 +2025-02-25 08:46:17,285 Current Learning Rate: 0.0007545207 +2025-02-25 08:46:18,296 Train Loss: 0.0002690, Val Loss: 0.0002935 +2025-02-25 08:46:18,297 Epoch 67/2000 +2025-02-25 09:01:21,350 Current Learning Rate: 0.0007477293 +2025-02-25 09:01:21,352 Train Loss: 0.0003194, Val Loss: 0.0003067 +2025-02-25 09:01:21,352 Epoch 68/2000 +2025-02-25 09:16:24,402 Current Learning Rate: 0.0007408768 +2025-02-25 09:16:25,564 Train Loss: 0.0002590, Val Loss: 0.0002914 +2025-02-25 09:16:25,564 Epoch 69/2000 +2025-02-25 09:31:28,130 Current Learning Rate: 0.0007339649 +2025-02-25 09:31:28,131 Train Loss: 0.0002838, Val Loss: 0.0003011 +2025-02-25 09:31:28,132 Epoch 70/2000 +2025-02-25 09:46:31,264 Current Learning Rate: 0.0007269952 +2025-02-25 09:46:31,265 Train Loss: 0.0002724, Val Loss: 0.0003282 +2025-02-25 09:46:31,265 Epoch 71/2000 +2025-02-25 10:01:35,150 Current Learning Rate: 0.0007199696 +2025-02-25 10:01:35,151 Train Loss: 0.0002907, Val Loss: 0.0002919 +2025-02-25 10:01:35,151 Epoch 72/2000 +2025-02-25 10:16:38,037 Current Learning Rate: 0.0007128896 +2025-02-25 10:16:38,037 Train Loss: 0.0002937, Val Loss: 0.0003012 +2025-02-25 10:16:38,037 Epoch 73/2000 +2025-02-25 10:31:41,420 Current Learning Rate: 0.0007057572 +2025-02-25 10:31:42,335 Train Loss: 0.0002739, Val Loss: 0.0002700 +2025-02-25 10:31:42,336 Epoch 74/2000 +2025-02-25 10:46:44,862 Current Learning Rate: 0.0006985739 +2025-02-25 10:46:45,831 Train Loss: 0.0002685, Val Loss: 0.0002608 +2025-02-25 10:46:45,831 Epoch 75/2000 +2025-02-25 11:01:48,402 Current Learning Rate: 0.0006913417 +2025-02-25 11:01:48,402 Train Loss: 0.0002618, Val Loss: 0.0002696 +2025-02-25 11:01:48,402 Epoch 76/2000 +2025-02-25 11:16:51,147 Current Learning Rate: 0.0006840623 +2025-02-25 11:16:52,042 Train Loss: 0.0002518, Val Loss: 0.0002458 +2025-02-25 11:16:52,042 Epoch 77/2000 +2025-02-25 11:31:54,389 Current Learning Rate: 0.0006767374 +2025-02-25 11:31:54,389 Train Loss: 0.0002349, Val Loss: 0.0002580 +2025-02-25 11:31:54,390 Epoch 78/2000 +2025-02-25 11:46:57,106 Current Learning Rate: 0.0006693690 +2025-02-25 11:46:57,107 Train Loss: 0.0002533, Val Loss: 0.0002504 +2025-02-25 11:46:57,107 Epoch 79/2000 +2025-02-25 12:01:59,996 Current Learning Rate: 0.0006619587 +2025-02-25 12:01:59,997 Train Loss: 0.0002345, Val Loss: 0.0002552 +2025-02-25 12:01:59,997 Epoch 80/2000 +2025-02-25 12:17:03,130 Current Learning Rate: 0.0006545085 +2025-02-25 12:17:03,131 Train Loss: 0.0002340, Val Loss: 0.0002564 +2025-02-25 12:17:03,131 Epoch 81/2000 +2025-02-25 12:32:05,871 Current Learning Rate: 0.0006470202 +2025-02-25 12:32:05,872 Train Loss: 0.0002187, Val Loss: 0.0002478 +2025-02-25 12:32:05,872 Epoch 82/2000 +2025-02-25 12:47:09,285 Current Learning Rate: 0.0006394956 +2025-02-25 12:47:09,285 Train Loss: 0.0002616, Val Loss: 0.0002532 +2025-02-25 12:47:09,286 Epoch 83/2000 +2025-02-25 13:02:12,213 Current Learning Rate: 0.0006319365 +2025-02-25 13:02:13,017 Train Loss: 0.0002400, Val Loss: 0.0002432 +2025-02-25 13:02:13,018 Epoch 84/2000 +2025-02-25 13:17:15,104 Current Learning Rate: 0.0006243449 +2025-02-25 13:17:15,989 Train Loss: 0.0002385, Val Loss: 0.0002424 +2025-02-25 13:17:15,989 Epoch 85/2000 +2025-02-25 13:32:18,146 Current Learning Rate: 0.0006167227 +2025-02-25 13:32:19,058 Train Loss: 0.0002356, Val Loss: 0.0002413 +2025-02-25 13:32:19,059 Epoch 86/2000 +2025-02-25 13:47:21,459 Current Learning Rate: 0.0006090716 +2025-02-25 13:47:21,460 Train Loss: 0.0002399, Val Loss: 0.0002569 +2025-02-25 13:47:21,460 Epoch 87/2000 +2025-02-25 14:02:25,066 Current Learning Rate: 0.0006013936 +2025-02-25 14:02:25,067 Train Loss: 0.0002128, Val Loss: 0.0002451 +2025-02-25 14:02:25,067 Epoch 88/2000 +2025-02-25 14:17:27,765 Current Learning Rate: 0.0005936907 +2025-02-25 14:17:28,687 Train Loss: 0.0002162, Val Loss: 0.0002375 +2025-02-25 14:17:28,688 Epoch 89/2000 +2025-02-25 14:32:31,720 Current Learning Rate: 0.0005859646 +2025-02-25 14:32:32,732 Train Loss: 0.0002342, Val Loss: 0.0002355 +2025-02-25 14:32:32,732 Epoch 90/2000 +2025-02-25 14:47:35,041 Current Learning Rate: 0.0005782172 +2025-02-25 14:47:35,953 Train Loss: 0.0002678, Val Loss: 0.0002264 +2025-02-25 14:47:35,954 Epoch 91/2000 +2025-02-25 15:02:38,966 Current Learning Rate: 0.0005704506 +2025-02-25 15:02:39,919 Train Loss: 0.0002453, Val Loss: 0.0002140 +2025-02-25 15:02:39,920 Epoch 92/2000 +2025-02-25 15:17:43,009 Current Learning Rate: 0.0005626666 +2025-02-25 15:17:44,040 Train Loss: 0.0001895, Val Loss: 0.0002100 +2025-02-25 15:17:44,040 Epoch 93/2000 +2025-02-25 15:32:46,791 Current Learning Rate: 0.0005548672 +2025-02-25 15:32:47,661 Train Loss: 0.0001881, Val Loss: 0.0002013 +2025-02-25 15:32:47,661 Epoch 94/2000 +2025-02-25 15:47:50,748 Current Learning Rate: 0.0005470542 +2025-02-25 15:47:50,749 Train Loss: 0.0002285, Val Loss: 0.0002250 +2025-02-25 15:47:50,749 Epoch 95/2000 +2025-02-25 16:02:54,055 Current Learning Rate: 0.0005392295 +2025-02-25 16:02:54,055 Train Loss: 0.0002251, Val Loss: 0.0002040 +2025-02-25 16:02:54,055 Epoch 96/2000 +2025-02-25 16:17:59,050 Current Learning Rate: 0.0005313953 +2025-02-25 16:17:59,052 Train Loss: 0.0001951, Val Loss: 0.0002039 +2025-02-25 16:17:59,053 Epoch 97/2000 +2025-02-25 16:33:02,606 Current Learning Rate: 0.0005235532 +2025-02-25 16:33:02,606 Train Loss: 0.0002096, Val Loss: 0.0002188 +2025-02-25 16:33:02,607 Epoch 98/2000 +2025-02-25 16:48:06,201 Current Learning Rate: 0.0005157054 +2025-02-25 16:48:07,131 Train Loss: 0.0002015, Val Loss: 0.0001987 +2025-02-25 16:48:07,131 Epoch 99/2000 +2025-02-25 17:03:10,862 Current Learning Rate: 0.0005078537 +2025-02-25 17:03:10,863 Train Loss: 0.0001924, Val Loss: 0.0002114 +2025-02-25 17:03:10,863 Epoch 100/2000 +2025-02-25 17:18:13,997 Current Learning Rate: 0.0005000000 +2025-02-25 17:18:13,997 Train Loss: 0.0001947, Val Loss: 0.0001999 +2025-02-25 17:18:13,998 Epoch 101/2000 +2025-02-25 17:33:16,414 Current Learning Rate: 0.0004921463 +2025-02-25 17:33:16,414 Train Loss: 0.0002028, Val Loss: 0.0002025 +2025-02-25 17:33:16,414 Epoch 102/2000 +2025-02-25 17:48:20,832 Current Learning Rate: 0.0004842946 +2025-02-25 17:48:20,833 Train Loss: 0.0002183, Val Loss: 0.0002091 +2025-02-25 17:48:20,833 Epoch 103/2000 +2025-02-25 18:03:24,432 Current Learning Rate: 0.0004764468 +2025-02-25 18:03:24,433 Train Loss: 0.0001786, Val Loss: 0.0002148 +2025-02-25 18:03:24,433 Epoch 104/2000 +2025-02-25 18:18:27,755 Current Learning Rate: 0.0004686047 +2025-02-25 18:18:27,756 Train Loss: 0.0001798, Val Loss: 0.0002055 +2025-02-25 18:18:27,756 Epoch 105/2000 +2025-02-25 18:33:31,423 Current Learning Rate: 0.0004607705 +2025-02-25 18:33:32,394 Train Loss: 0.0001941, Val Loss: 0.0001975 +2025-02-25 18:33:32,394 Epoch 106/2000 +2025-02-25 18:48:35,960 Current Learning Rate: 0.0004529458 +2025-02-25 18:48:35,961 Train Loss: 0.0002056, Val Loss: 0.0002399 +2025-02-25 18:48:35,962 Epoch 107/2000 +2025-02-25 19:03:38,991 Current Learning Rate: 0.0004451328 +2025-02-25 19:03:38,992 Train Loss: 0.0001906, Val Loss: 0.0002040 +2025-02-25 19:03:38,992 Epoch 108/2000 +2025-02-25 19:18:41,657 Current Learning Rate: 0.0004373334 +2025-02-25 19:18:42,527 Train Loss: 0.0001806, Val Loss: 0.0001967 +2025-02-25 19:18:42,528 Epoch 109/2000 +2025-02-25 19:33:45,767 Current Learning Rate: 0.0004295494 +2025-02-25 19:33:45,767 Train Loss: 0.0001806, Val Loss: 0.0001990 +2025-02-25 19:33:45,767 Epoch 110/2000 +2025-02-25 19:48:50,020 Current Learning Rate: 0.0004217828 +2025-02-25 19:48:50,021 Train Loss: 0.0001675, Val Loss: 0.0001971 +2025-02-25 19:48:50,021 Epoch 111/2000 +2025-02-25 20:03:52,846 Current Learning Rate: 0.0004140354 +2025-02-25 20:03:53,767 Train Loss: 0.0001470, Val Loss: 0.0001785 +2025-02-25 20:03:53,767 Epoch 112/2000 +2025-02-25 20:18:56,116 Current Learning Rate: 0.0004063093 +2025-02-25 20:18:56,117 Train Loss: 0.0001739, Val Loss: 0.0001800 +2025-02-25 20:18:56,117 Epoch 113/2000 +2025-02-25 20:33:58,736 Current Learning Rate: 0.0003986064 +2025-02-25 20:33:58,736 Train Loss: 0.0002093, Val Loss: 0.0001841 +2025-02-25 20:33:58,736 Epoch 114/2000 +2025-02-25 20:49:01,269 Current Learning Rate: 0.0003909284 +2025-02-25 20:49:03,853 Train Loss: 0.0001446, Val Loss: 0.0001755 +2025-02-25 20:49:03,854 Epoch 115/2000 +2025-02-25 21:04:06,164 Current Learning Rate: 0.0003832773 +2025-02-25 21:04:06,165 Train Loss: 0.0002054, Val Loss: 0.0001839 +2025-02-25 21:04:06,165 Epoch 116/2000 +2025-02-25 21:19:09,503 Current Learning Rate: 0.0003756551 +2025-02-25 21:19:09,503 Train Loss: 0.0002168, Val Loss: 0.0002063 +2025-02-25 21:19:09,503 Epoch 117/2000 +2025-02-25 21:34:12,561 Current Learning Rate: 0.0003680635 +2025-02-25 21:34:13,329 Train Loss: 0.0001846, Val Loss: 0.0001725 +2025-02-25 21:34:13,329 Epoch 118/2000 +2025-02-25 21:49:15,429 Current Learning Rate: 0.0003605044 +2025-02-25 21:49:16,177 Train Loss: 0.0001766, Val Loss: 0.0001693 +2025-02-25 21:49:16,177 Epoch 119/2000 +2025-02-25 22:04:18,936 Current Learning Rate: 0.0003529798 +2025-02-25 22:04:18,936 Train Loss: 0.0001543, Val Loss: 0.0001781 +2025-02-25 22:04:18,937 Epoch 120/2000 +2025-02-25 22:19:22,010 Current Learning Rate: 0.0003454915 +2025-02-25 22:19:22,770 Train Loss: 0.0001525, Val Loss: 0.0001688 +2025-02-25 22:19:22,771 Epoch 121/2000 +2025-02-25 22:34:25,377 Current Learning Rate: 0.0003380413 +2025-02-25 22:34:26,126 Train Loss: 0.0001948, Val Loss: 0.0001664 +2025-02-25 22:34:26,127 Epoch 122/2000 +2025-02-25 22:49:28,199 Current Learning Rate: 0.0003306310 +2025-02-25 22:49:28,200 Train Loss: 0.0001663, Val Loss: 0.0001676 +2025-02-25 22:49:28,200 Epoch 123/2000 +2025-02-25 23:04:31,654 Current Learning Rate: 0.0003232626 +2025-02-25 23:04:32,432 Train Loss: 0.0001568, Val Loss: 0.0001639 +2025-02-25 23:04:32,432 Epoch 124/2000 +2025-02-25 23:19:34,796 Current Learning Rate: 0.0003159377 +2025-02-25 23:19:34,797 Train Loss: 0.0001820, Val Loss: 0.0001678 +2025-02-25 23:19:34,797 Epoch 125/2000 +2025-02-25 23:34:37,793 Current Learning Rate: 0.0003086583 +2025-02-25 23:34:38,657 Train Loss: 0.0001385, Val Loss: 0.0001612 +2025-02-25 23:34:38,657 Epoch 126/2000 +2025-02-25 23:49:41,020 Current Learning Rate: 0.0003014261 +2025-02-25 23:49:41,021 Train Loss: 0.0001461, Val Loss: 0.0001638 +2025-02-25 23:49:41,021 Epoch 127/2000 +2025-02-26 00:04:44,363 Current Learning Rate: 0.0002942428 +2025-02-26 00:04:44,363 Train Loss: 0.0001603, Val Loss: 0.0001642 +2025-02-26 00:04:44,363 Epoch 128/2000 +2025-02-26 00:19:47,628 Current Learning Rate: 0.0002871104 +2025-02-26 00:19:47,628 Train Loss: 0.0001603, Val Loss: 0.0001628 +2025-02-26 00:19:47,628 Epoch 129/2000 +2025-02-26 00:34:50,322 Current Learning Rate: 0.0002800304 +2025-02-26 00:34:50,323 Train Loss: 0.0001403, Val Loss: 0.0001614 +2025-02-26 00:34:50,323 Epoch 130/2000 +2025-02-26 00:49:53,781 Current Learning Rate: 0.0002730048 +2025-02-26 00:49:53,782 Train Loss: 0.0001583, Val Loss: 0.0001651 +2025-02-26 00:49:53,782 Epoch 131/2000 +2025-02-26 01:04:56,705 Current Learning Rate: 0.0002660351 +2025-02-26 01:04:57,442 Train Loss: 0.0001401, Val Loss: 0.0001569 +2025-02-26 01:04:57,442 Epoch 132/2000 +2025-02-26 01:20:00,262 Current Learning Rate: 0.0002591232 +2025-02-26 01:20:00,262 Train Loss: 0.0001626, Val Loss: 0.0001687 +2025-02-26 01:20:00,263 Epoch 133/2000 +2025-02-26 01:35:03,206 Current Learning Rate: 0.0002522707 +2025-02-26 01:35:03,206 Train Loss: 0.0001407, Val Loss: 0.0001578 +2025-02-26 01:35:03,206 Epoch 134/2000 +2025-02-26 01:50:06,740 Current Learning Rate: 0.0002454793 +2025-02-26 01:50:06,741 Train Loss: 0.0001549, Val Loss: 0.0001586 +2025-02-26 01:50:06,741 Epoch 135/2000 +2025-02-26 02:05:09,590 Current Learning Rate: 0.0002387507 +2025-02-26 02:05:09,590 Train Loss: 0.0001322, Val Loss: 0.0001576 +2025-02-26 02:05:09,591 Epoch 136/2000 +2025-02-26 02:20:12,392 Current Learning Rate: 0.0002320866 +2025-02-26 02:20:13,203 Train Loss: 0.0001440, Val Loss: 0.0001562 +2025-02-26 02:20:13,203 Epoch 137/2000 +2025-02-26 02:35:15,294 Current Learning Rate: 0.0002254886 +2025-02-26 02:35:15,295 Train Loss: 0.0001528, Val Loss: 0.0001667 +2025-02-26 02:35:15,295 Epoch 138/2000 +2025-02-26 02:50:18,038 Current Learning Rate: 0.0002189583 +2025-02-26 02:50:18,891 Train Loss: 0.0001358, Val Loss: 0.0001525 +2025-02-26 02:50:18,891 Epoch 139/2000 +2025-02-26 03:05:22,429 Current Learning Rate: 0.0002124974 +2025-02-26 03:05:22,430 Train Loss: 0.0001528, Val Loss: 0.0001608 +2025-02-26 03:05:22,430 Epoch 140/2000 +2025-02-26 03:20:25,745 Current Learning Rate: 0.0002061074 +2025-02-26 03:20:25,746 Train Loss: 0.0001313, Val Loss: 0.0001563 +2025-02-26 03:20:25,746 Epoch 141/2000 +2025-02-26 03:35:29,194 Current Learning Rate: 0.0001997899 +2025-02-26 03:35:30,049 Train Loss: 0.0001324, Val Loss: 0.0001501 +2025-02-26 03:35:30,049 Epoch 142/2000 +2025-02-26 03:50:32,947 Current Learning Rate: 0.0001935465 +2025-02-26 03:50:32,948 Train Loss: 0.0001395, Val Loss: 0.0001551 +2025-02-26 03:50:32,948 Epoch 143/2000 +2025-02-26 04:05:35,584 Current Learning Rate: 0.0001873787 +2025-02-26 04:05:36,442 Train Loss: 0.0001363, Val Loss: 0.0001461 +2025-02-26 04:05:36,442 Epoch 144/2000 +2025-02-26 04:20:39,099 Current Learning Rate: 0.0001812880 +2025-02-26 04:20:39,100 Train Loss: 0.0001362, Val Loss: 0.0001465 +2025-02-26 04:20:39,100 Epoch 145/2000 +2025-02-26 04:35:41,757 Current Learning Rate: 0.0001752760 +2025-02-26 04:35:41,757 Train Loss: 0.0001643, Val Loss: 0.0001544 +2025-02-26 04:35:41,757 Epoch 146/2000 +2025-02-26 04:50:44,724 Current Learning Rate: 0.0001693441 +2025-02-26 04:50:44,724 Train Loss: 0.0001328, Val Loss: 0.0001556 +2025-02-26 04:50:44,725 Epoch 147/2000 +2025-02-26 05:05:47,749 Current Learning Rate: 0.0001634937 +2025-02-26 05:05:47,750 Train Loss: 0.0001218, Val Loss: 0.0001580 +2025-02-26 05:05:47,750 Epoch 148/2000 +2025-02-26 05:20:50,392 Current Learning Rate: 0.0001577264 +2025-02-26 05:20:51,242 Train Loss: 0.0001057, Val Loss: 0.0001442 +2025-02-26 05:20:51,242 Epoch 149/2000 +2025-02-26 05:35:54,519 Current Learning Rate: 0.0001520436 +2025-02-26 05:35:54,520 Train Loss: 0.0001296, Val Loss: 0.0001466 +2025-02-26 05:35:54,520 Epoch 150/2000 +2025-02-26 05:50:57,400 Current Learning Rate: 0.0001464466 +2025-02-26 05:50:57,400 Train Loss: 0.0001403, Val Loss: 0.0001475 +2025-02-26 05:50:57,400 Epoch 151/2000 +2025-02-26 06:06:00,950 Current Learning Rate: 0.0001409369 +2025-02-26 06:06:01,794 Train Loss: 0.0001305, Val Loss: 0.0001440 +2025-02-26 06:06:01,795 Epoch 152/2000 +2025-02-26 06:21:03,812 Current Learning Rate: 0.0001355157 +2025-02-26 06:21:04,758 Train Loss: 0.0001285, Val Loss: 0.0001414 +2025-02-26 06:21:04,758 Epoch 153/2000 +2025-02-26 06:36:06,875 Current Learning Rate: 0.0001301845 +2025-02-26 06:36:06,876 Train Loss: 0.0001381, Val Loss: 0.0001449 +2025-02-26 06:36:06,876 Epoch 154/2000 +2025-02-26 06:51:09,343 Current Learning Rate: 0.0001249445 +2025-02-26 06:51:09,343 Train Loss: 0.0001238, Val Loss: 0.0001418 +2025-02-26 06:51:09,344 Epoch 155/2000 +2025-02-26 07:06:12,322 Current Learning Rate: 0.0001197970 +2025-02-26 07:06:13,245 Train Loss: 0.0001294, Val Loss: 0.0001414 +2025-02-26 07:06:13,245 Epoch 156/2000 +2025-02-26 07:21:16,214 Current Learning Rate: 0.0001147434 +2025-02-26 07:21:17,145 Train Loss: 0.0001394, Val Loss: 0.0001393 +2025-02-26 07:21:17,146 Epoch 157/2000 +2025-02-26 07:36:20,501 Current Learning Rate: 0.0001097848 +2025-02-26 07:36:20,502 Train Loss: 0.0001324, Val Loss: 0.0001396 +2025-02-26 07:36:20,502 Epoch 158/2000 +2025-02-26 07:51:23,004 Current Learning Rate: 0.0001049225 +2025-02-26 07:51:23,004 Train Loss: 0.0001241, Val Loss: 0.0001410 +2025-02-26 07:51:23,004 Epoch 159/2000 +2025-02-26 08:06:25,464 Current Learning Rate: 0.0001001577 +2025-02-26 08:06:25,465 Train Loss: 0.0001252, Val Loss: 0.0001400 +2025-02-26 08:06:25,465 Epoch 160/2000 +2025-02-26 08:21:29,079 Current Learning Rate: 0.0000954915 +2025-02-26 08:21:29,936 Train Loss: 0.0001419, Val Loss: 0.0001385 +2025-02-26 08:21:29,936 Epoch 161/2000 +2025-02-26 08:36:32,243 Current Learning Rate: 0.0000909251 +2025-02-26 08:36:33,158 Train Loss: 0.0001335, Val Loss: 0.0001376 +2025-02-26 08:36:33,158 Epoch 162/2000 +2025-02-26 08:51:35,787 Current Learning Rate: 0.0000864597 +2025-02-26 08:51:36,655 Train Loss: 0.0001353, Val Loss: 0.0001374 +2025-02-26 08:51:36,655 Epoch 163/2000 +2025-02-26 09:06:39,090 Current Learning Rate: 0.0000820963 +2025-02-26 09:06:39,948 Train Loss: 0.0001217, Val Loss: 0.0001374 +2025-02-26 09:06:39,948 Epoch 164/2000 +2025-02-26 09:21:43,008 Current Learning Rate: 0.0000778360 +2025-02-26 09:21:43,009 Train Loss: 0.0001269, Val Loss: 0.0001380 +2025-02-26 09:21:43,010 Epoch 165/2000 +2025-02-26 09:36:45,997 Current Learning Rate: 0.0000736799 +2025-02-26 09:36:45,997 Train Loss: 0.0001192, Val Loss: 0.0001391 +2025-02-26 09:36:45,997 Epoch 166/2000 +2025-02-26 09:51:48,465 Current Learning Rate: 0.0000696290 +2025-02-26 09:51:49,269 Train Loss: 0.0001330, Val Loss: 0.0001354 +2025-02-26 09:51:49,269 Epoch 167/2000 +2025-02-26 10:06:52,869 Current Learning Rate: 0.0000656842 +2025-02-26 10:06:53,769 Train Loss: 0.0001153, Val Loss: 0.0001352 +2025-02-26 10:06:53,769 Epoch 168/2000 +2025-02-26 10:21:56,208 Current Learning Rate: 0.0000618467 +2025-02-26 10:21:56,208 Train Loss: 0.0001294, Val Loss: 0.0001372 +2025-02-26 10:21:56,209 Epoch 169/2000 +2025-02-26 10:36:58,943 Current Learning Rate: 0.0000581172 +2025-02-26 10:36:59,781 Train Loss: 0.0001191, Val Loss: 0.0001345 +2025-02-26 10:36:59,781 Epoch 170/2000 +2025-02-26 10:52:02,082 Current Learning Rate: 0.0000544967 +2025-02-26 10:52:03,528 Train Loss: 0.0001146, Val Loss: 0.0001343 +2025-02-26 10:52:03,529 Epoch 171/2000 +2025-02-26 11:07:06,166 Current Learning Rate: 0.0000509862 +2025-02-26 11:07:06,167 Train Loss: 0.0001237, Val Loss: 0.0001343 +2025-02-26 11:07:06,167 Epoch 172/2000 +2025-02-26 11:22:09,864 Current Learning Rate: 0.0000475865 +2025-02-26 11:22:09,864 Train Loss: 0.0001171, Val Loss: 0.0001349 +2025-02-26 11:22:09,865 Epoch 173/2000 +2025-02-26 11:37:12,952 Current Learning Rate: 0.0000442984 +2025-02-26 11:37:12,952 Train Loss: 0.0001234, Val Loss: 0.0001352 +2025-02-26 11:37:12,953 Epoch 174/2000 +2025-02-26 11:52:16,865 Current Learning Rate: 0.0000411227 +2025-02-26 11:52:17,795 Train Loss: 0.0001172, Val Loss: 0.0001338 +2025-02-26 11:52:17,795 Epoch 175/2000 +2025-02-26 12:07:21,226 Current Learning Rate: 0.0000380602 +2025-02-26 12:07:22,060 Train Loss: 0.0001205, Val Loss: 0.0001335 +2025-02-26 12:07:22,060 Epoch 176/2000 +2025-02-26 12:22:24,897 Current Learning Rate: 0.0000351118 +2025-02-26 12:22:24,899 Train Loss: 0.0001470, Val Loss: 0.0001340 +2025-02-26 12:22:24,899 Epoch 177/2000 +2025-02-26 12:37:27,732 Current Learning Rate: 0.0000322780 +2025-02-26 12:37:28,564 Train Loss: 0.0001123, Val Loss: 0.0001330 +2025-02-26 12:37:28,565 Epoch 178/2000 +2025-02-26 12:52:31,567 Current Learning Rate: 0.0000295596 +2025-02-26 12:52:31,568 Train Loss: 0.0001130, Val Loss: 0.0001330 +2025-02-26 12:52:31,568 Epoch 179/2000 +2025-02-26 13:07:35,722 Current Learning Rate: 0.0000269573 +2025-02-26 13:07:35,723 Train Loss: 0.0001342, Val Loss: 0.0001334 +2025-02-26 13:07:35,723 Epoch 180/2000 +2025-02-26 13:22:38,587 Current Learning Rate: 0.0000244717 +2025-02-26 13:22:39,540 Train Loss: 0.0001320, Val Loss: 0.0001326 +2025-02-26 13:22:39,545 Epoch 181/2000 +2025-02-26 13:37:42,509 Current Learning Rate: 0.0000221035 +2025-02-26 13:37:43,447 Train Loss: 0.0001246, Val Loss: 0.0001326 +2025-02-26 13:37:43,447 Epoch 182/2000 +2025-02-26 13:52:46,364 Current Learning Rate: 0.0000198532 +2025-02-26 13:52:46,366 Train Loss: 0.0001256, Val Loss: 0.0001330 +2025-02-26 13:52:46,366 Epoch 183/2000 +2025-02-26 14:07:49,211 Current Learning Rate: 0.0000177213 +2025-02-26 14:07:50,119 Train Loss: 0.0001099, Val Loss: 0.0001324 +2025-02-26 14:07:50,119 Epoch 184/2000 +2025-02-26 14:22:53,008 Current Learning Rate: 0.0000157084 +2025-02-26 14:22:53,009 Train Loss: 0.0001182, Val Loss: 0.0001325 +2025-02-26 14:22:53,010 Epoch 185/2000 +2025-02-26 14:37:56,894 Current Learning Rate: 0.0000138150 +2025-02-26 14:37:57,763 Train Loss: 0.0001106, Val Loss: 0.0001319 +2025-02-26 14:37:57,764 Epoch 186/2000 +2025-02-26 14:53:00,217 Current Learning Rate: 0.0000120416 +2025-02-26 14:53:00,218 Train Loss: 0.0001201, Val Loss: 0.0001323 +2025-02-26 14:53:00,219 Epoch 187/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_64_20250323_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_64_20250323_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..fefb4348494c5cc97f765c51c5c498546371345b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp1_64_20250323_training_log-checkpoint.log @@ -0,0 +1,17 @@ +2025-03-23 14:01:50,685 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-23 14:01:50,724 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-23 14:01:50,732 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-23 14:01:50,751 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-23 14:01:50,792 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-23 14:01:50,797 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-23 14:01:50,806 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-23 14:01:50,808 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-23 14:02:57,255 Epoch 1/2000 +2025-03-23 14:02:59,259 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,260 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,260 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,269 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,269 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,270 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,270 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,270 Reducer buckets have been rebuilt in this iteration. diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp2_20241107_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp2_20241107_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..e6f75d3115f0472abaf1a93469c501d51870a364 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp2_20241107_training_log-checkpoint.log @@ -0,0 +1,21 @@ +2024-11-07 22:13:57,616 Added key: store_based_barrier_key:1 to store for rank: 2 +2024-11-07 22:13:57,712 Added key: store_based_barrier_key:1 to store for rank: 5 +2024-11-07 22:13:57,717 Added key: store_based_barrier_key:1 to store for rank: 1 +2024-11-07 22:13:57,754 Added key: store_based_barrier_key:1 to store for rank: 4 +2024-11-07 22:13:57,803 Added key: store_based_barrier_key:1 to store for rank: 7 +2024-11-07 22:13:57,819 Added key: store_based_barrier_key:1 to store for rank: 6 +2024-11-07 22:13:57,824 Added key: store_based_barrier_key:1 to store for rank: 3 +2024-11-07 22:13:57,826 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-07 22:15:08,339 Epoch 1/500 +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,064 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:52,061 Current Learning Rate: 0.0009999901 +2024-11-07 22:15:52,995 Train Loss: 0.7143657, Val Loss: 0.0286528 +2024-11-07 22:15:52,995 Epoch 2/500 +2024-11-07 22:16:35,814 Current Learning Rate: 0.0009999605 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp3_20241107_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp3_20241107_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..00a6080e6569914a030338a2e4848939d6dbaa9f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp3_20241107_training_log-checkpoint.log @@ -0,0 +1,8 @@ +2024-11-07 22:19:54,475 Added key: store_based_barrier_key:1 to store for rank: 7 +2024-11-07 22:19:54,634 Added key: store_based_barrier_key:1 to store for rank: 1 +2024-11-07 22:19:54,656 Added key: store_based_barrier_key:1 to store for rank: 4 +2024-11-07 22:19:54,666 Added key: store_based_barrier_key:1 to store for rank: 5 +2024-11-07 22:19:54,670 Added key: store_based_barrier_key:1 to store for rank: 6 +2024-11-07 22:19:54,703 Added key: store_based_barrier_key:1 to store for rank: 3 +2024-11-07 22:19:54,736 Added key: store_based_barrier_key:1 to store for rank: 2 +2024-11-07 22:19:54,745 Added key: store_based_barrier_key:1 to store for rank: 0 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp3_20241111_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp3_20241111_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..2605140f0748012aafcaf94ddb6556c40ace0ab6 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_exp3_20241111_training_log-checkpoint.log @@ -0,0 +1,6007 @@ +2024-11-11 13:57:17,741 Epoch 1/2000 +2024-11-11 13:57:33,467 Current Learning Rate: 0.0099993832 +2024-11-11 13:57:34,075 Train Loss: 1.2661774, Val Loss: 0.0945691 +2024-11-11 13:57:34,075 Epoch 2/2000 +2024-11-11 13:57:48,763 Current Learning Rate: 0.0099975328 +2024-11-11 13:57:49,623 Train Loss: 0.0537641, Val Loss: 0.0279424 +2024-11-11 13:57:49,623 Epoch 3/2000 +2024-11-11 13:58:04,524 Current Learning Rate: 0.0099944494 +2024-11-11 13:58:05,625 Train Loss: 0.0225878, Val Loss: 0.0174877 +2024-11-11 13:58:05,626 Epoch 4/2000 +2024-11-11 13:58:20,987 Current Learning Rate: 0.0099901336 +2024-11-11 13:58:21,766 Train Loss: 0.0182771, Val Loss: 0.0150993 +2024-11-11 13:58:21,766 Epoch 5/2000 +2024-11-11 13:58:37,352 Current Learning Rate: 0.0099845867 +2024-11-11 13:58:38,354 Train Loss: 0.0156225, Val Loss: 0.0133836 +2024-11-11 13:58:38,355 Epoch 6/2000 +2024-11-11 13:58:54,652 Current Learning Rate: 0.0099778098 +2024-11-11 13:58:55,447 Train Loss: 0.0145146, Val Loss: 0.0130899 +2024-11-11 13:58:55,447 Epoch 7/2000 +2024-11-11 13:59:10,803 Current Learning Rate: 0.0099698048 +2024-11-11 13:59:11,606 Train Loss: 0.0142418, Val Loss: 0.0129172 +2024-11-11 13:59:11,606 Epoch 8/2000 +2024-11-11 13:59:26,594 Current Learning Rate: 0.0099605735 +2024-11-11 13:59:27,429 Train Loss: 0.0140829, Val Loss: 0.0128121 +2024-11-11 13:59:27,429 Epoch 9/2000 +2024-11-11 13:59:42,661 Current Learning Rate: 0.0099501183 +2024-11-11 13:59:43,689 Train Loss: 0.0139972, Val Loss: 0.0127396 +2024-11-11 13:59:43,689 Epoch 10/2000 +2024-11-11 13:59:59,735 Current Learning Rate: 0.0099384417 +2024-11-11 14:00:00,530 Train Loss: 0.0139306, Val Loss: 0.0126877 +2024-11-11 14:00:00,531 Epoch 11/2000 +2024-11-11 14:00:15,580 Current Learning Rate: 0.0099255466 +2024-11-11 14:00:16,603 Train Loss: 0.0138500, Val Loss: 0.0126377 +2024-11-11 14:00:16,604 Epoch 12/2000 +2024-11-11 14:00:32,449 Current Learning Rate: 0.0099114363 +2024-11-11 14:00:33,472 Train Loss: 0.0137952, Val Loss: 0.0126092 +2024-11-11 14:00:33,473 Epoch 13/2000 +2024-11-11 14:00:49,904 Current Learning Rate: 0.0098961141 +2024-11-11 14:00:50,822 Train Loss: 0.0137525, Val Loss: 0.0125482 +2024-11-11 14:00:50,822 Epoch 14/2000 +2024-11-11 14:01:06,299 Current Learning Rate: 0.0098795838 +2024-11-11 14:01:07,340 Train Loss: 0.0136998, Val Loss: 0.0125104 +2024-11-11 14:01:07,341 Epoch 15/2000 +2024-11-11 14:01:23,530 Current Learning Rate: 0.0098618496 +2024-11-11 14:01:24,335 Train Loss: 0.0136709, Val Loss: 0.0124729 +2024-11-11 14:01:24,336 Epoch 16/2000 +2024-11-11 14:01:39,180 Current Learning Rate: 0.0098429158 +2024-11-11 14:01:39,977 Train Loss: 0.0136169, Val Loss: 0.0124519 +2024-11-11 14:01:39,978 Epoch 17/2000 +2024-11-11 14:01:55,280 Current Learning Rate: 0.0098227871 +2024-11-11 14:01:56,264 Train Loss: 0.0135782, Val Loss: 0.0124041 +2024-11-11 14:01:56,264 Epoch 18/2000 +2024-11-11 14:02:12,422 Current Learning Rate: 0.0098014684 +2024-11-11 14:02:13,438 Train Loss: 0.0135803, Val Loss: 0.0123766 +2024-11-11 14:02:13,438 Epoch 19/2000 +2024-11-11 14:02:29,142 Current Learning Rate: 0.0097789651 +2024-11-11 14:02:30,239 Train Loss: 0.0135045, Val Loss: 0.0123399 +2024-11-11 14:02:30,239 Epoch 20/2000 +2024-11-11 14:02:45,969 Current Learning Rate: 0.0097552826 +2024-11-11 14:02:46,963 Train Loss: 0.0134864, Val Loss: 0.0123273 +2024-11-11 14:02:46,963 Epoch 21/2000 +2024-11-11 14:03:03,191 Current Learning Rate: 0.0097304268 +2024-11-11 14:03:04,677 Train Loss: 0.0135118, Val Loss: 0.0122758 +2024-11-11 14:03:04,677 Epoch 22/2000 +2024-11-11 14:03:20,274 Current Learning Rate: 0.0097044038 +2024-11-11 14:03:21,242 Train Loss: 0.0133713, Val Loss: 0.0122122 +2024-11-11 14:03:21,242 Epoch 23/2000 +2024-11-11 14:03:36,192 Current Learning Rate: 0.0096772202 +2024-11-11 14:03:36,969 Train Loss: 0.0133401, Val Loss: 0.0121755 +2024-11-11 14:03:36,969 Epoch 24/2000 +2024-11-11 14:03:52,247 Current Learning Rate: 0.0096488824 +2024-11-11 14:03:53,264 Train Loss: 0.0132870, Val Loss: 0.0121372 +2024-11-11 14:03:53,264 Epoch 25/2000 +2024-11-11 14:04:08,343 Current Learning Rate: 0.0096193977 +2024-11-11 14:04:09,135 Train Loss: 0.0132457, Val Loss: 0.0121040 +2024-11-11 14:04:09,135 Epoch 26/2000 +2024-11-11 14:04:23,673 Current Learning Rate: 0.0095887731 +2024-11-11 14:04:24,427 Train Loss: 0.0131983, Val Loss: 0.0120748 +2024-11-11 14:04:24,427 Epoch 27/2000 +2024-11-11 14:04:39,002 Current Learning Rate: 0.0095570164 +2024-11-11 14:04:39,773 Train Loss: 0.0131626, Val Loss: 0.0120470 +2024-11-11 14:04:39,773 Epoch 28/2000 +2024-11-11 14:04:54,297 Current Learning Rate: 0.0095241353 +2024-11-11 14:04:55,000 Train Loss: 0.0131318, Val Loss: 0.0120103 +2024-11-11 14:04:55,000 Epoch 29/2000 +2024-11-11 14:05:09,700 Current Learning Rate: 0.0094901379 +2024-11-11 14:05:10,421 Train Loss: 0.0130881, Val Loss: 0.0119711 +2024-11-11 14:05:10,421 Epoch 30/2000 +2024-11-11 14:05:25,041 Current Learning Rate: 0.0094550326 +2024-11-11 14:05:25,922 Train Loss: 0.0130381, Val Loss: 0.0119285 +2024-11-11 14:05:25,922 Epoch 31/2000 +2024-11-11 14:05:41,262 Current Learning Rate: 0.0094188282 +2024-11-11 14:05:42,297 Train Loss: 0.0129911, Val Loss: 0.0118835 +2024-11-11 14:05:42,297 Epoch 32/2000 +2024-11-11 14:05:57,406 Current Learning Rate: 0.0093815334 +2024-11-11 14:05:58,205 Train Loss: 0.0129408, Val Loss: 0.0118445 +2024-11-11 14:05:58,205 Epoch 33/2000 +2024-11-11 14:06:12,583 Current Learning Rate: 0.0093431576 +2024-11-11 14:06:13,444 Train Loss: 0.0128994, Val Loss: 0.0118045 +2024-11-11 14:06:13,444 Epoch 34/2000 +2024-11-11 14:06:27,706 Current Learning Rate: 0.0093037101 +2024-11-11 14:06:28,491 Train Loss: 0.0128525, Val Loss: 0.0117625 +2024-11-11 14:06:28,492 Epoch 35/2000 +2024-11-11 14:06:43,050 Current Learning Rate: 0.0092632008 +2024-11-11 14:06:43,806 Train Loss: 0.0128143, Val Loss: 0.0117353 +2024-11-11 14:06:43,807 Epoch 36/2000 +2024-11-11 14:06:58,363 Current Learning Rate: 0.0092216396 +2024-11-11 14:06:59,142 Train Loss: 0.0127623, Val Loss: 0.0116933 +2024-11-11 14:06:59,142 Epoch 37/2000 +2024-11-11 14:07:14,647 Current Learning Rate: 0.0091790368 +2024-11-11 14:07:15,449 Train Loss: 0.0127313, Val Loss: 0.0116767 +2024-11-11 14:07:15,449 Epoch 38/2000 +2024-11-11 14:07:31,353 Current Learning Rate: 0.0091354029 +2024-11-11 14:07:32,129 Train Loss: 0.0126899, Val Loss: 0.0116313 +2024-11-11 14:07:32,130 Epoch 39/2000 +2024-11-11 14:07:47,116 Current Learning Rate: 0.0090907486 +2024-11-11 14:07:47,900 Train Loss: 0.0126563, Val Loss: 0.0115961 +2024-11-11 14:07:47,900 Epoch 40/2000 +2024-11-11 14:08:04,085 Current Learning Rate: 0.0090450850 +2024-11-11 14:08:04,878 Train Loss: 0.0126185, Val Loss: 0.0115656 +2024-11-11 14:08:04,878 Epoch 41/2000 +2024-11-11 14:08:19,508 Current Learning Rate: 0.0089984233 +2024-11-11 14:08:20,279 Train Loss: 0.0125796, Val Loss: 0.0115548 +2024-11-11 14:08:20,279 Epoch 42/2000 +2024-11-11 14:08:35,722 Current Learning Rate: 0.0089507751 +2024-11-11 14:08:36,552 Train Loss: 0.0125514, Val Loss: 0.0115522 +2024-11-11 14:08:36,552 Epoch 43/2000 +2024-11-11 14:08:52,783 Current Learning Rate: 0.0089021520 +2024-11-11 14:08:53,590 Train Loss: 0.0125182, Val Loss: 0.0115146 +2024-11-11 14:08:53,591 Epoch 44/2000 +2024-11-11 14:09:09,053 Current Learning Rate: 0.0088525662 +2024-11-11 14:09:09,853 Train Loss: 0.0124835, Val Loss: 0.0114822 +2024-11-11 14:09:09,853 Epoch 45/2000 +2024-11-11 14:09:24,865 Current Learning Rate: 0.0088020298 +2024-11-11 14:09:25,694 Train Loss: 0.0124573, Val Loss: 0.0114429 +2024-11-11 14:09:25,694 Epoch 46/2000 +2024-11-11 14:09:39,949 Current Learning Rate: 0.0087505553 +2024-11-11 14:09:40,802 Train Loss: 0.0124457, Val Loss: 0.0114221 +2024-11-11 14:09:40,803 Epoch 47/2000 +2024-11-11 14:09:55,217 Current Learning Rate: 0.0086981555 +2024-11-11 14:09:56,011 Train Loss: 0.0124224, Val Loss: 0.0114076 +2024-11-11 14:09:56,012 Epoch 48/2000 +2024-11-11 14:10:10,537 Current Learning Rate: 0.0086448431 +2024-11-11 14:10:11,397 Train Loss: 0.0124149, Val Loss: 0.0113851 +2024-11-11 14:10:11,397 Epoch 49/2000 +2024-11-11 14:10:26,503 Current Learning Rate: 0.0085906315 +2024-11-11 14:10:26,504 Train Loss: 0.0124183, Val Loss: 0.0114720 +2024-11-11 14:10:26,504 Epoch 50/2000 +2024-11-11 14:10:41,894 Current Learning Rate: 0.0085355339 +2024-11-11 14:10:41,895 Train Loss: 0.0123957, Val Loss: 0.0113915 +2024-11-11 14:10:41,895 Epoch 51/2000 +2024-11-11 14:10:57,760 Current Learning Rate: 0.0084795640 +2024-11-11 14:10:58,530 Train Loss: 0.0123648, Val Loss: 0.0113490 +2024-11-11 14:10:58,531 Epoch 52/2000 +2024-11-11 14:11:13,324 Current Learning Rate: 0.0084227355 +2024-11-11 14:11:13,325 Train Loss: 0.0123474, Val Loss: 0.0113556 +2024-11-11 14:11:13,326 Epoch 53/2000 +2024-11-11 14:11:28,539 Current Learning Rate: 0.0083650626 +2024-11-11 14:11:29,242 Train Loss: 0.0123528, Val Loss: 0.0113295 +2024-11-11 14:11:29,242 Epoch 54/2000 +2024-11-11 14:11:43,858 Current Learning Rate: 0.0083065593 +2024-11-11 14:11:43,859 Train Loss: 0.0123243, Val Loss: 0.0113394 +2024-11-11 14:11:43,859 Epoch 55/2000 +2024-11-11 14:11:59,784 Current Learning Rate: 0.0082472402 +2024-11-11 14:12:00,507 Train Loss: 0.0122991, Val Loss: 0.0112948 +2024-11-11 14:12:00,507 Epoch 56/2000 +2024-11-11 14:12:15,124 Current Learning Rate: 0.0081871199 +2024-11-11 14:12:15,125 Train Loss: 0.0122957, Val Loss: 0.0113359 +2024-11-11 14:12:15,125 Epoch 57/2000 +2024-11-11 14:12:30,952 Current Learning Rate: 0.0081262133 +2024-11-11 14:12:31,721 Train Loss: 0.0122802, Val Loss: 0.0112878 +2024-11-11 14:12:31,721 Epoch 58/2000 +2024-11-11 14:12:46,994 Current Learning Rate: 0.0080645353 +2024-11-11 14:12:46,995 Train Loss: 0.0122734, Val Loss: 0.0112881 +2024-11-11 14:12:46,995 Epoch 59/2000 +2024-11-11 14:13:02,764 Current Learning Rate: 0.0080021011 +2024-11-11 14:13:02,764 Train Loss: 0.0122800, Val Loss: 0.0112881 +2024-11-11 14:13:02,765 Epoch 60/2000 +2024-11-11 14:13:18,621 Current Learning Rate: 0.0079389263 +2024-11-11 14:13:19,638 Train Loss: 0.0122700, Val Loss: 0.0112763 +2024-11-11 14:13:19,638 Epoch 61/2000 +2024-11-11 14:13:35,644 Current Learning Rate: 0.0078750263 +2024-11-11 14:13:36,634 Train Loss: 0.0122608, Val Loss: 0.0112667 +2024-11-11 14:13:36,634 Epoch 62/2000 +2024-11-11 14:13:52,697 Current Learning Rate: 0.0078104169 +2024-11-11 14:13:53,451 Train Loss: 0.0122487, Val Loss: 0.0112643 +2024-11-11 14:13:53,451 Epoch 63/2000 +2024-11-11 14:14:08,402 Current Learning Rate: 0.0077451141 +2024-11-11 14:14:09,416 Train Loss: 0.0122393, Val Loss: 0.0112629 +2024-11-11 14:14:09,417 Epoch 64/2000 +2024-11-11 14:14:25,244 Current Learning Rate: 0.0076791340 +2024-11-11 14:14:26,298 Train Loss: 0.0122336, Val Loss: 0.0112508 +2024-11-11 14:14:26,299 Epoch 65/2000 +2024-11-11 14:14:41,517 Current Learning Rate: 0.0076124928 +2024-11-11 14:14:42,293 Train Loss: 0.0122102, Val Loss: 0.0112134 +2024-11-11 14:14:42,293 Epoch 66/2000 +2024-11-11 14:14:58,575 Current Learning Rate: 0.0075452071 +2024-11-11 14:14:59,539 Train Loss: 0.0122081, Val Loss: 0.0112017 +2024-11-11 14:14:59,540 Epoch 67/2000 +2024-11-11 14:15:15,566 Current Learning Rate: 0.0074772933 +2024-11-11 14:15:16,590 Train Loss: 0.0121974, Val Loss: 0.0111912 +2024-11-11 14:15:16,591 Epoch 68/2000 +2024-11-11 14:15:32,850 Current Learning Rate: 0.0074087684 +2024-11-11 14:15:32,850 Train Loss: 0.0121820, Val Loss: 0.0111961 +2024-11-11 14:15:32,851 Epoch 69/2000 +2024-11-11 14:15:48,748 Current Learning Rate: 0.0073396491 +2024-11-11 14:15:49,551 Train Loss: 0.0121818, Val Loss: 0.0111584 +2024-11-11 14:15:49,552 Epoch 70/2000 +2024-11-11 14:16:04,629 Current Learning Rate: 0.0072699525 +2024-11-11 14:16:05,721 Train Loss: 0.0121623, Val Loss: 0.0111324 +2024-11-11 14:16:05,721 Epoch 71/2000 +2024-11-11 14:16:22,081 Current Learning Rate: 0.0071996958 +2024-11-11 14:16:22,831 Train Loss: 0.0121434, Val Loss: 0.0111188 +2024-11-11 14:16:22,832 Epoch 72/2000 +2024-11-11 14:16:37,019 Current Learning Rate: 0.0071288965 +2024-11-11 14:16:37,901 Train Loss: 0.0121036, Val Loss: 0.0111138 +2024-11-11 14:16:37,902 Epoch 73/2000 +2024-11-11 14:16:53,012 Current Learning Rate: 0.0070575718 +2024-11-11 14:16:53,013 Train Loss: 0.0120830, Val Loss: 0.0111231 +2024-11-11 14:16:53,014 Epoch 74/2000 +2024-11-11 14:17:08,410 Current Learning Rate: 0.0069857395 +2024-11-11 14:17:09,336 Train Loss: 0.0120593, Val Loss: 0.0110455 +2024-11-11 14:17:09,336 Epoch 75/2000 +2024-11-11 14:17:25,214 Current Learning Rate: 0.0069134172 +2024-11-11 14:17:26,247 Train Loss: 0.0120199, Val Loss: 0.0110349 +2024-11-11 14:17:26,248 Epoch 76/2000 +2024-11-11 14:17:42,284 Current Learning Rate: 0.0068406228 +2024-11-11 14:17:43,352 Train Loss: 0.0120269, Val Loss: 0.0109805 +2024-11-11 14:17:43,353 Epoch 77/2000 +2024-11-11 14:17:58,616 Current Learning Rate: 0.0067673742 +2024-11-11 14:17:59,448 Train Loss: 0.0118804, Val Loss: 0.0108571 +2024-11-11 14:17:59,448 Epoch 78/2000 +2024-11-11 14:18:14,848 Current Learning Rate: 0.0066936896 +2024-11-11 14:18:14,849 Train Loss: 0.0118254, Val Loss: 0.0109454 +2024-11-11 14:18:14,849 Epoch 79/2000 +2024-11-11 14:18:31,465 Current Learning Rate: 0.0066195871 +2024-11-11 14:18:32,467 Train Loss: 0.0116624, Val Loss: 0.0105468 +2024-11-11 14:18:32,467 Epoch 80/2000 +2024-11-11 14:18:48,726 Current Learning Rate: 0.0065450850 +2024-11-11 14:18:49,776 Train Loss: 0.0116413, Val Loss: 0.0104802 +2024-11-11 14:18:49,776 Epoch 81/2000 +2024-11-11 14:19:05,544 Current Learning Rate: 0.0064702016 +2024-11-11 14:19:06,615 Train Loss: 0.0113220, Val Loss: 0.0103361 +2024-11-11 14:19:06,617 Epoch 82/2000 +2024-11-11 14:19:22,882 Current Learning Rate: 0.0063949555 +2024-11-11 14:19:23,886 Train Loss: 0.0111988, Val Loss: 0.0101918 +2024-11-11 14:19:23,887 Epoch 83/2000 +2024-11-11 14:19:40,105 Current Learning Rate: 0.0063193652 +2024-11-11 14:19:41,141 Train Loss: 0.0110398, Val Loss: 0.0101036 +2024-11-11 14:19:41,142 Epoch 84/2000 +2024-11-11 14:19:57,519 Current Learning Rate: 0.0062434494 +2024-11-11 14:19:58,597 Train Loss: 0.0109725, Val Loss: 0.0100524 +2024-11-11 14:19:58,597 Epoch 85/2000 +2024-11-11 14:20:14,789 Current Learning Rate: 0.0061672268 +2024-11-11 14:20:15,557 Train Loss: 0.0109243, Val Loss: 0.0099134 +2024-11-11 14:20:15,557 Epoch 86/2000 +2024-11-11 14:20:30,737 Current Learning Rate: 0.0060907162 +2024-11-11 14:20:31,628 Train Loss: 0.0107117, Val Loss: 0.0098298 +2024-11-11 14:20:31,628 Epoch 87/2000 +2024-11-11 14:20:46,658 Current Learning Rate: 0.0060139365 +2024-11-11 14:20:46,659 Train Loss: 0.0107305, Val Loss: 0.0098862 +2024-11-11 14:20:46,659 Epoch 88/2000 +2024-11-11 14:21:02,896 Current Learning Rate: 0.0059369066 +2024-11-11 14:21:04,661 Train Loss: 0.0105596, Val Loss: 0.0096487 +2024-11-11 14:21:04,661 Epoch 89/2000 +2024-11-11 14:21:20,799 Current Learning Rate: 0.0058596455 +2024-11-11 14:21:21,613 Train Loss: 0.0103846, Val Loss: 0.0095496 +2024-11-11 14:21:21,613 Epoch 90/2000 +2024-11-11 14:21:36,590 Current Learning Rate: 0.0057821723 +2024-11-11 14:21:37,478 Train Loss: 0.0102785, Val Loss: 0.0094894 +2024-11-11 14:21:37,479 Epoch 91/2000 +2024-11-11 14:21:52,472 Current Learning Rate: 0.0057045062 +2024-11-11 14:21:53,255 Train Loss: 0.0101886, Val Loss: 0.0093482 +2024-11-11 14:21:53,255 Epoch 92/2000 +2024-11-11 14:22:08,255 Current Learning Rate: 0.0056266662 +2024-11-11 14:22:08,256 Train Loss: 0.0100830, Val Loss: 0.0093750 +2024-11-11 14:22:08,256 Epoch 93/2000 +2024-11-11 14:22:24,411 Current Learning Rate: 0.0055486716 +2024-11-11 14:22:25,170 Train Loss: 0.0100160, Val Loss: 0.0091938 +2024-11-11 14:22:25,170 Epoch 94/2000 +2024-11-11 14:22:39,379 Current Learning Rate: 0.0054705416 +2024-11-11 14:22:40,156 Train Loss: 0.0099185, Val Loss: 0.0091012 +2024-11-11 14:22:40,157 Epoch 95/2000 +2024-11-11 14:22:55,352 Current Learning Rate: 0.0053922955 +2024-11-11 14:22:56,117 Train Loss: 0.0097893, Val Loss: 0.0090778 +2024-11-11 14:22:56,118 Epoch 96/2000 +2024-11-11 14:23:11,657 Current Learning Rate: 0.0053139526 +2024-11-11 14:23:12,400 Train Loss: 0.0097754, Val Loss: 0.0089586 +2024-11-11 14:23:12,401 Epoch 97/2000 +2024-11-11 14:23:28,012 Current Learning Rate: 0.0052355323 +2024-11-11 14:23:28,750 Train Loss: 0.0096276, Val Loss: 0.0089473 +2024-11-11 14:23:28,750 Epoch 98/2000 +2024-11-11 14:23:43,761 Current Learning Rate: 0.0051570538 +2024-11-11 14:23:43,762 Train Loss: 0.0096329, Val Loss: 0.0091781 +2024-11-11 14:23:43,762 Epoch 99/2000 +2024-11-11 14:23:59,405 Current Learning Rate: 0.0050785366 +2024-11-11 14:24:00,170 Train Loss: 0.0095311, Val Loss: 0.0087892 +2024-11-11 14:24:00,170 Epoch 100/2000 +2024-11-11 14:24:15,090 Current Learning Rate: 0.0050000000 +2024-11-11 14:24:15,921 Train Loss: 0.0094008, Val Loss: 0.0086809 +2024-11-11 14:24:15,922 Epoch 101/2000 +2024-11-11 14:24:32,953 Current Learning Rate: 0.0049214634 +2024-11-11 14:24:32,953 Train Loss: 0.0094966, Val Loss: 0.0090536 +2024-11-11 14:24:32,953 Epoch 102/2000 +2024-11-11 14:24:48,689 Current Learning Rate: 0.0048429462 +2024-11-11 14:24:48,690 Train Loss: 0.0098488, Val Loss: 0.0090301 +2024-11-11 14:24:48,691 Epoch 103/2000 +2024-11-11 14:25:03,879 Current Learning Rate: 0.0047644677 +2024-11-11 14:25:03,880 Train Loss: 0.0094386, Val Loss: 0.0102315 +2024-11-11 14:25:03,881 Epoch 104/2000 +2024-11-11 14:25:19,963 Current Learning Rate: 0.0046860474 +2024-11-11 14:25:19,964 Train Loss: 0.0097765, Val Loss: 0.0087173 +2024-11-11 14:25:19,964 Epoch 105/2000 +2024-11-11 14:25:35,667 Current Learning Rate: 0.0046077045 +2024-11-11 14:25:35,667 Train Loss: 0.0093596, Val Loss: 0.0088081 +2024-11-11 14:25:35,667 Epoch 106/2000 +2024-11-11 14:25:51,694 Current Learning Rate: 0.0045294584 +2024-11-11 14:25:52,495 Train Loss: 0.0093826, Val Loss: 0.0085501 +2024-11-11 14:25:52,495 Epoch 107/2000 +2024-11-11 14:26:07,785 Current Learning Rate: 0.0044513284 +2024-11-11 14:26:07,786 Train Loss: 0.0092222, Val Loss: 0.0085908 +2024-11-11 14:26:07,786 Epoch 108/2000 +2024-11-11 14:26:23,748 Current Learning Rate: 0.0043733338 +2024-11-11 14:26:24,524 Train Loss: 0.0091933, Val Loss: 0.0084863 +2024-11-11 14:26:24,525 Epoch 109/2000 +2024-11-11 14:26:39,085 Current Learning Rate: 0.0042954938 +2024-11-11 14:26:39,086 Train Loss: 0.0093985, Val Loss: 0.0086324 +2024-11-11 14:26:39,086 Epoch 110/2000 +2024-11-11 14:26:55,275 Current Learning Rate: 0.0042178277 +2024-11-11 14:26:55,275 Train Loss: 0.0093343, Val Loss: 0.0085354 +2024-11-11 14:26:55,276 Epoch 111/2000 +2024-11-11 14:27:11,247 Current Learning Rate: 0.0041403545 +2024-11-11 14:27:12,026 Train Loss: 0.0090339, Val Loss: 0.0084420 +2024-11-11 14:27:12,026 Epoch 112/2000 +2024-11-11 14:27:26,618 Current Learning Rate: 0.0040630934 +2024-11-11 14:27:27,462 Train Loss: 0.0089710, Val Loss: 0.0083739 +2024-11-11 14:27:27,462 Epoch 113/2000 +2024-11-11 14:27:42,084 Current Learning Rate: 0.0039860635 +2024-11-11 14:27:42,084 Train Loss: 0.0090989, Val Loss: 0.0084235 +2024-11-11 14:27:42,085 Epoch 114/2000 +2024-11-11 14:27:58,247 Current Learning Rate: 0.0039092838 +2024-11-11 14:27:58,248 Train Loss: 0.0089871, Val Loss: 0.0084957 +2024-11-11 14:27:58,248 Epoch 115/2000 +2024-11-11 14:28:14,890 Current Learning Rate: 0.0038327732 +2024-11-11 14:28:14,891 Train Loss: 0.0095247, Val Loss: 0.0091416 +2024-11-11 14:28:14,891 Epoch 116/2000 +2024-11-11 14:28:31,490 Current Learning Rate: 0.0037565506 +2024-11-11 14:28:32,222 Train Loss: 0.0090694, Val Loss: 0.0082615 +2024-11-11 14:28:32,222 Epoch 117/2000 +2024-11-11 14:28:46,636 Current Learning Rate: 0.0036806348 +2024-11-11 14:28:46,637 Train Loss: 0.0089576, Val Loss: 0.0082832 +2024-11-11 14:28:46,637 Epoch 118/2000 +2024-11-11 14:29:01,774 Current Learning Rate: 0.0036050445 +2024-11-11 14:29:03,468 Train Loss: 0.0088039, Val Loss: 0.0080960 +2024-11-11 14:29:03,468 Epoch 119/2000 +2024-11-11 14:29:17,745 Current Learning Rate: 0.0035297984 +2024-11-11 14:29:17,746 Train Loss: 0.0088573, Val Loss: 0.0081461 +2024-11-11 14:29:17,746 Epoch 120/2000 +2024-11-11 14:29:33,047 Current Learning Rate: 0.0034549150 +2024-11-11 14:29:33,837 Train Loss: 0.0085194, Val Loss: 0.0079936 +2024-11-11 14:29:33,837 Epoch 121/2000 +2024-11-11 14:29:48,437 Current Learning Rate: 0.0033804129 +2024-11-11 14:29:49,164 Train Loss: 0.0084638, Val Loss: 0.0079802 +2024-11-11 14:29:49,165 Epoch 122/2000 +2024-11-11 14:30:04,560 Current Learning Rate: 0.0033063104 +2024-11-11 14:30:05,343 Train Loss: 0.0083644, Val Loss: 0.0077298 +2024-11-11 14:30:05,344 Epoch 123/2000 +2024-11-11 14:30:19,769 Current Learning Rate: 0.0032326258 +2024-11-11 14:30:20,557 Train Loss: 0.0081522, Val Loss: 0.0076117 +2024-11-11 14:30:20,557 Epoch 124/2000 +2024-11-11 14:30:35,764 Current Learning Rate: 0.0031593772 +2024-11-11 14:30:35,765 Train Loss: 0.0081840, Val Loss: 0.0076353 +2024-11-11 14:30:35,765 Epoch 125/2000 +2024-11-11 14:30:51,376 Current Learning Rate: 0.0030865828 +2024-11-11 14:30:52,227 Train Loss: 0.0081295, Val Loss: 0.0076054 +2024-11-11 14:30:52,227 Epoch 126/2000 +2024-11-11 14:31:06,569 Current Learning Rate: 0.0030142605 +2024-11-11 14:31:07,344 Train Loss: 0.0079633, Val Loss: 0.0075449 +2024-11-11 14:31:07,344 Epoch 127/2000 +2024-11-11 14:31:22,482 Current Learning Rate: 0.0029424282 +2024-11-11 14:31:23,522 Train Loss: 0.0079932, Val Loss: 0.0074044 +2024-11-11 14:31:23,522 Epoch 128/2000 +2024-11-11 14:31:39,151 Current Learning Rate: 0.0028711035 +2024-11-11 14:31:39,152 Train Loss: 0.0079451, Val Loss: 0.0074685 +2024-11-11 14:31:39,152 Epoch 129/2000 +2024-11-11 14:31:53,855 Current Learning Rate: 0.0028003042 +2024-11-11 14:31:54,643 Train Loss: 0.0080458, Val Loss: 0.0073241 +2024-11-11 14:31:54,643 Epoch 130/2000 +2024-11-11 14:32:09,356 Current Learning Rate: 0.0027300475 +2024-11-11 14:32:09,357 Train Loss: 0.0077950, Val Loss: 0.0073365 +2024-11-11 14:32:09,357 Epoch 131/2000 +2024-11-11 14:32:24,716 Current Learning Rate: 0.0026603509 +2024-11-11 14:32:24,717 Train Loss: 0.0078814, Val Loss: 0.0075878 +2024-11-11 14:32:24,717 Epoch 132/2000 +2024-11-11 14:32:39,992 Current Learning Rate: 0.0025912316 +2024-11-11 14:32:39,993 Train Loss: 0.0077670, Val Loss: 0.0075323 +2024-11-11 14:32:39,993 Epoch 133/2000 +2024-11-11 14:32:55,396 Current Learning Rate: 0.0025227067 +2024-11-11 14:32:56,191 Train Loss: 0.0079082, Val Loss: 0.0073139 +2024-11-11 14:32:56,191 Epoch 134/2000 +2024-11-11 14:33:11,173 Current Learning Rate: 0.0024547929 +2024-11-11 14:33:11,982 Train Loss: 0.0078441, Val Loss: 0.0071975 +2024-11-11 14:33:11,982 Epoch 135/2000 +2024-11-11 14:33:26,885 Current Learning Rate: 0.0023875072 +2024-11-11 14:33:27,631 Train Loss: 0.0076429, Val Loss: 0.0071971 +2024-11-11 14:33:27,632 Epoch 136/2000 +2024-11-11 14:33:42,905 Current Learning Rate: 0.0023208660 +2024-11-11 14:33:42,906 Train Loss: 0.0077887, Val Loss: 0.0072025 +2024-11-11 14:33:42,906 Epoch 137/2000 +2024-11-11 14:33:59,633 Current Learning Rate: 0.0022548859 +2024-11-11 14:33:59,634 Train Loss: 0.0075687, Val Loss: 0.0072376 +2024-11-11 14:33:59,634 Epoch 138/2000 +2024-11-11 14:34:15,815 Current Learning Rate: 0.0021895831 +2024-11-11 14:34:16,588 Train Loss: 0.0076570, Val Loss: 0.0071839 +2024-11-11 14:34:16,588 Epoch 139/2000 +2024-11-11 14:34:31,610 Current Learning Rate: 0.0021249737 +2024-11-11 14:34:32,318 Train Loss: 0.0076112, Val Loss: 0.0070536 +2024-11-11 14:34:32,319 Epoch 140/2000 +2024-11-11 14:34:48,361 Current Learning Rate: 0.0020610737 +2024-11-11 14:34:48,362 Train Loss: 0.0074787, Val Loss: 0.0071142 +2024-11-11 14:34:48,362 Epoch 141/2000 +2024-11-11 14:35:04,661 Current Learning Rate: 0.0019978989 +2024-11-11 14:35:05,481 Train Loss: 0.0075355, Val Loss: 0.0070531 +2024-11-11 14:35:05,481 Epoch 142/2000 +2024-11-11 14:35:20,933 Current Learning Rate: 0.0019354647 +2024-11-11 14:35:21,667 Train Loss: 0.0074945, Val Loss: 0.0070093 +2024-11-11 14:35:21,667 Epoch 143/2000 +2024-11-11 14:35:37,283 Current Learning Rate: 0.0018737867 +2024-11-11 14:35:38,050 Train Loss: 0.0075831, Val Loss: 0.0069973 +2024-11-11 14:35:38,050 Epoch 144/2000 +2024-11-11 14:35:52,508 Current Learning Rate: 0.0018128801 +2024-11-11 14:35:52,509 Train Loss: 0.0074640, Val Loss: 0.0070031 +2024-11-11 14:35:52,509 Epoch 145/2000 +2024-11-11 14:36:07,813 Current Learning Rate: 0.0017527598 +2024-11-11 14:36:07,814 Train Loss: 0.0075974, Val Loss: 0.0070424 +2024-11-11 14:36:07,814 Epoch 146/2000 +2024-11-11 14:36:23,208 Current Learning Rate: 0.0016934407 +2024-11-11 14:36:24,031 Train Loss: 0.0074360, Val Loss: 0.0069656 +2024-11-11 14:36:24,032 Epoch 147/2000 +2024-11-11 14:36:38,977 Current Learning Rate: 0.0016349374 +2024-11-11 14:36:40,023 Train Loss: 0.0074785, Val Loss: 0.0069548 +2024-11-11 14:36:40,023 Epoch 148/2000 +2024-11-11 14:36:55,988 Current Learning Rate: 0.0015772645 +2024-11-11 14:36:55,989 Train Loss: 0.0074445, Val Loss: 0.0069636 +2024-11-11 14:36:55,989 Epoch 149/2000 +2024-11-11 14:37:12,244 Current Learning Rate: 0.0015204360 +2024-11-11 14:37:13,285 Train Loss: 0.0073779, Val Loss: 0.0069269 +2024-11-11 14:37:13,286 Epoch 150/2000 +2024-11-11 14:37:29,307 Current Learning Rate: 0.0014644661 +2024-11-11 14:37:30,181 Train Loss: 0.0074302, Val Loss: 0.0069077 +2024-11-11 14:37:30,182 Epoch 151/2000 +2024-11-11 14:37:45,669 Current Learning Rate: 0.0014093685 +2024-11-11 14:37:45,670 Train Loss: 0.0075630, Val Loss: 0.0069273 +2024-11-11 14:37:45,670 Epoch 152/2000 +2024-11-11 14:38:01,698 Current Learning Rate: 0.0013551569 +2024-11-11 14:38:01,699 Train Loss: 0.0074667, Val Loss: 0.0069243 +2024-11-11 14:38:01,699 Epoch 153/2000 +2024-11-11 14:38:17,812 Current Learning Rate: 0.0013018445 +2024-11-11 14:38:17,812 Train Loss: 0.0074766, Val Loss: 0.0069102 +2024-11-11 14:38:17,813 Epoch 154/2000 +2024-11-11 14:38:33,435 Current Learning Rate: 0.0012494447 +2024-11-11 14:38:33,436 Train Loss: 0.0073373, Val Loss: 0.0070224 +2024-11-11 14:38:33,436 Epoch 155/2000 +2024-11-11 14:38:48,699 Current Learning Rate: 0.0011979702 +2024-11-11 14:38:49,458 Train Loss: 0.0072681, Val Loss: 0.0068770 +2024-11-11 14:38:49,458 Epoch 156/2000 +2024-11-11 14:39:03,830 Current Learning Rate: 0.0011474338 +2024-11-11 14:39:03,831 Train Loss: 0.0073033, Val Loss: 0.0068844 +2024-11-11 14:39:03,831 Epoch 157/2000 +2024-11-11 14:39:19,716 Current Learning Rate: 0.0010978480 +2024-11-11 14:39:19,716 Train Loss: 0.0072875, Val Loss: 0.0068927 +2024-11-11 14:39:19,717 Epoch 158/2000 +2024-11-11 14:39:35,281 Current Learning Rate: 0.0010492249 +2024-11-11 14:39:36,151 Train Loss: 0.0073433, Val Loss: 0.0068395 +2024-11-11 14:39:36,152 Epoch 159/2000 +2024-11-11 14:39:51,357 Current Learning Rate: 0.0010015767 +2024-11-11 14:39:52,371 Train Loss: 0.0073250, Val Loss: 0.0068180 +2024-11-11 14:39:52,371 Epoch 160/2000 +2024-11-11 14:40:08,373 Current Learning Rate: 0.0009549150 +2024-11-11 14:40:09,421 Train Loss: 0.0073589, Val Loss: 0.0068086 +2024-11-11 14:40:09,422 Epoch 161/2000 +2024-11-11 14:40:25,682 Current Learning Rate: 0.0009092514 +2024-11-11 14:40:25,683 Train Loss: 0.0073513, Val Loss: 0.0068151 +2024-11-11 14:40:25,683 Epoch 162/2000 +2024-11-11 14:40:42,024 Current Learning Rate: 0.0008645971 +2024-11-11 14:40:42,024 Train Loss: 0.0073903, Val Loss: 0.0068148 +2024-11-11 14:40:42,025 Epoch 163/2000 +2024-11-11 14:40:57,038 Current Learning Rate: 0.0008209632 +2024-11-11 14:40:57,782 Train Loss: 0.0073884, Val Loss: 0.0068027 +2024-11-11 14:40:57,782 Epoch 164/2000 +2024-11-11 14:41:12,557 Current Learning Rate: 0.0007783604 +2024-11-11 14:41:13,381 Train Loss: 0.0073094, Val Loss: 0.0067945 +2024-11-11 14:41:13,381 Epoch 165/2000 +2024-11-11 14:41:27,996 Current Learning Rate: 0.0007367992 +2024-11-11 14:41:28,698 Train Loss: 0.0073065, Val Loss: 0.0067943 +2024-11-11 14:41:28,698 Epoch 166/2000 +2024-11-11 14:41:43,623 Current Learning Rate: 0.0006962899 +2024-11-11 14:41:44,318 Train Loss: 0.0073264, Val Loss: 0.0067916 +2024-11-11 14:41:44,319 Epoch 167/2000 +2024-11-11 14:41:59,347 Current Learning Rate: 0.0006568424 +2024-11-11 14:42:00,015 Train Loss: 0.0072393, Val Loss: 0.0067788 +2024-11-11 14:42:00,015 Epoch 168/2000 +2024-11-11 14:42:15,282 Current Learning Rate: 0.0006184666 +2024-11-11 14:42:16,149 Train Loss: 0.0072001, Val Loss: 0.0067757 +2024-11-11 14:42:16,149 Epoch 169/2000 +2024-11-11 14:42:31,300 Current Learning Rate: 0.0005811718 +2024-11-11 14:42:32,027 Train Loss: 0.0072500, Val Loss: 0.0067664 +2024-11-11 14:42:32,027 Epoch 170/2000 +2024-11-11 14:42:46,442 Current Learning Rate: 0.0005449674 +2024-11-11 14:42:47,150 Train Loss: 0.0071376, Val Loss: 0.0067461 +2024-11-11 14:42:47,151 Epoch 171/2000 +2024-11-11 14:43:02,596 Current Learning Rate: 0.0005098621 +2024-11-11 14:43:03,505 Train Loss: 0.0072696, Val Loss: 0.0067427 +2024-11-11 14:43:03,506 Epoch 172/2000 +2024-11-11 14:43:19,383 Current Learning Rate: 0.0004758647 +2024-11-11 14:43:19,384 Train Loss: 0.0072316, Val Loss: 0.0067536 +2024-11-11 14:43:19,385 Epoch 173/2000 +2024-11-11 14:43:36,058 Current Learning Rate: 0.0004429836 +2024-11-11 14:43:36,791 Train Loss: 0.0071943, Val Loss: 0.0067361 +2024-11-11 14:43:36,791 Epoch 174/2000 +2024-11-11 14:43:51,310 Current Learning Rate: 0.0004112269 +2024-11-11 14:43:52,133 Train Loss: 0.0071758, Val Loss: 0.0067280 +2024-11-11 14:43:52,133 Epoch 175/2000 +2024-11-11 14:44:07,358 Current Learning Rate: 0.0003806023 +2024-11-11 14:44:07,359 Train Loss: 0.0072222, Val Loss: 0.0067455 +2024-11-11 14:44:07,359 Epoch 176/2000 +2024-11-11 14:44:22,137 Current Learning Rate: 0.0003511176 +2024-11-11 14:44:23,001 Train Loss: 0.0071256, Val Loss: 0.0067152 +2024-11-11 14:44:23,001 Epoch 177/2000 +2024-11-11 14:44:37,471 Current Learning Rate: 0.0003227798 +2024-11-11 14:44:37,471 Train Loss: 0.0071485, Val Loss: 0.0067262 +2024-11-11 14:44:37,472 Epoch 178/2000 +2024-11-11 14:44:52,901 Current Learning Rate: 0.0002955962 +2024-11-11 14:44:53,647 Train Loss: 0.0071857, Val Loss: 0.0067099 +2024-11-11 14:44:53,647 Epoch 179/2000 +2024-11-11 14:45:08,428 Current Learning Rate: 0.0002695732 +2024-11-11 14:45:08,429 Train Loss: 0.0072040, Val Loss: 0.0067122 +2024-11-11 14:45:08,430 Epoch 180/2000 +2024-11-11 14:45:24,485 Current Learning Rate: 0.0002447174 +2024-11-11 14:45:24,486 Train Loss: 0.0072393, Val Loss: 0.0067132 +2024-11-11 14:45:24,486 Epoch 181/2000 +2024-11-11 14:45:40,812 Current Learning Rate: 0.0002210349 +2024-11-11 14:45:41,560 Train Loss: 0.0071451, Val Loss: 0.0066998 +2024-11-11 14:45:41,561 Epoch 182/2000 +2024-11-11 14:45:56,967 Current Learning Rate: 0.0001985316 +2024-11-11 14:45:56,969 Train Loss: 0.0071755, Val Loss: 0.0067005 +2024-11-11 14:45:56,969 Epoch 183/2000 +2024-11-11 14:46:12,765 Current Learning Rate: 0.0001772129 +2024-11-11 14:46:12,766 Train Loss: 0.0073248, Val Loss: 0.0067023 +2024-11-11 14:46:12,766 Epoch 184/2000 +2024-11-11 14:46:28,837 Current Learning Rate: 0.0001570842 +2024-11-11 14:46:29,586 Train Loss: 0.0071232, Val Loss: 0.0066986 +2024-11-11 14:46:29,587 Epoch 185/2000 +2024-11-11 14:46:44,200 Current Learning Rate: 0.0001381504 +2024-11-11 14:46:44,200 Train Loss: 0.0071744, Val Loss: 0.0067003 +2024-11-11 14:46:44,200 Epoch 186/2000 +2024-11-11 14:47:00,589 Current Learning Rate: 0.0001204162 +2024-11-11 14:47:00,590 Train Loss: 0.0070693, Val Loss: 0.0067045 +2024-11-11 14:47:00,590 Epoch 187/2000 +2024-11-11 14:47:16,675 Current Learning Rate: 0.0001038859 +2024-11-11 14:47:17,469 Train Loss: 0.0072742, Val Loss: 0.0066943 +2024-11-11 14:47:17,469 Epoch 188/2000 +2024-11-11 14:47:33,364 Current Learning Rate: 0.0000885637 +2024-11-11 14:47:34,337 Train Loss: 0.0071269, Val Loss: 0.0066895 +2024-11-11 14:47:34,338 Epoch 189/2000 +2024-11-11 14:47:50,191 Current Learning Rate: 0.0000744534 +2024-11-11 14:47:51,126 Train Loss: 0.0071737, Val Loss: 0.0066882 +2024-11-11 14:47:51,127 Epoch 190/2000 +2024-11-11 14:48:06,026 Current Learning Rate: 0.0000615583 +2024-11-11 14:48:06,820 Train Loss: 0.0070884, Val Loss: 0.0066866 +2024-11-11 14:48:06,820 Epoch 191/2000 +2024-11-11 14:48:21,312 Current Learning Rate: 0.0000498817 +2024-11-11 14:48:21,313 Train Loss: 0.0071572, Val Loss: 0.0066868 +2024-11-11 14:48:21,313 Epoch 192/2000 +2024-11-11 14:48:37,373 Current Learning Rate: 0.0000394265 +2024-11-11 14:48:38,177 Train Loss: 0.0071594, Val Loss: 0.0066853 +2024-11-11 14:48:38,178 Epoch 193/2000 +2024-11-11 14:48:52,743 Current Learning Rate: 0.0000301952 +2024-11-11 14:48:52,744 Train Loss: 0.0071081, Val Loss: 0.0066857 +2024-11-11 14:48:52,744 Epoch 194/2000 +2024-11-11 14:49:08,362 Current Learning Rate: 0.0000221902 +2024-11-11 14:49:09,116 Train Loss: 0.0071270, Val Loss: 0.0066851 +2024-11-11 14:49:09,117 Epoch 195/2000 +2024-11-11 14:49:23,562 Current Learning Rate: 0.0000154133 +2024-11-11 14:49:24,293 Train Loss: 0.0071012, Val Loss: 0.0066851 +2024-11-11 14:49:24,293 Epoch 196/2000 +2024-11-11 14:49:38,864 Current Learning Rate: 0.0000098664 +2024-11-11 14:49:38,865 Train Loss: 0.0071697, Val Loss: 0.0066872 +2024-11-11 14:49:38,865 Epoch 197/2000 +2024-11-11 14:49:54,603 Current Learning Rate: 0.0000055506 +2024-11-11 14:49:54,604 Train Loss: 0.0071689, Val Loss: 0.0066890 +2024-11-11 14:49:54,604 Epoch 198/2000 +2024-11-11 14:50:11,226 Current Learning Rate: 0.0000024672 +2024-11-11 14:50:11,227 Train Loss: 0.0072255, Val Loss: 0.0066871 +2024-11-11 14:50:11,227 Epoch 199/2000 +2024-11-11 14:50:27,207 Current Learning Rate: 0.0000006168 +2024-11-11 14:50:27,208 Train Loss: 0.0071076, Val Loss: 0.0066866 +2024-11-11 14:50:27,209 Epoch 200/2000 +2024-11-11 14:50:42,650 Current Learning Rate: 0.0000000000 +2024-11-11 14:50:42,651 Train Loss: 0.0071233, Val Loss: 0.0066863 +2024-11-11 14:50:42,651 Epoch 201/2000 +2024-11-11 14:50:59,131 Current Learning Rate: 0.0000006168 +2024-11-11 14:50:59,132 Train Loss: 0.0071115, Val Loss: 0.0066865 +2024-11-11 14:50:59,132 Epoch 202/2000 +2024-11-11 14:51:15,454 Current Learning Rate: 0.0000024672 +2024-11-11 14:51:15,455 Train Loss: 0.0071187, Val Loss: 0.0066864 +2024-11-11 14:51:15,456 Epoch 203/2000 +2024-11-11 14:51:30,824 Current Learning Rate: 0.0000055506 +2024-11-11 14:51:30,825 Train Loss: 0.0071984, Val Loss: 0.0066861 +2024-11-11 14:51:30,825 Epoch 204/2000 +2024-11-11 14:51:46,130 Current Learning Rate: 0.0000098664 +2024-11-11 14:51:46,131 Train Loss: 0.0071159, Val Loss: 0.0066870 +2024-11-11 14:51:46,131 Epoch 205/2000 +2024-11-11 14:52:01,915 Current Learning Rate: 0.0000154133 +2024-11-11 14:52:01,916 Train Loss: 0.0071094, Val Loss: 0.0066877 +2024-11-11 14:52:01,916 Epoch 206/2000 +2024-11-11 14:52:18,069 Current Learning Rate: 0.0000221902 +2024-11-11 14:52:18,069 Train Loss: 0.0071672, Val Loss: 0.0066869 +2024-11-11 14:52:18,070 Epoch 207/2000 +2024-11-11 14:52:34,326 Current Learning Rate: 0.0000301952 +2024-11-11 14:52:35,349 Train Loss: 0.0071063, Val Loss: 0.0066836 +2024-11-11 14:52:35,349 Epoch 208/2000 +2024-11-11 14:52:50,950 Current Learning Rate: 0.0000394265 +2024-11-11 14:52:50,951 Train Loss: 0.0072819, Val Loss: 0.0066849 +2024-11-11 14:52:50,951 Epoch 209/2000 +2024-11-11 14:53:06,823 Current Learning Rate: 0.0000498817 +2024-11-11 14:53:06,824 Train Loss: 0.0071576, Val Loss: 0.0066842 +2024-11-11 14:53:06,824 Epoch 210/2000 +2024-11-11 14:53:22,350 Current Learning Rate: 0.0000615583 +2024-11-11 14:53:23,380 Train Loss: 0.0071084, Val Loss: 0.0066829 +2024-11-11 14:53:23,380 Epoch 211/2000 +2024-11-11 14:53:38,583 Current Learning Rate: 0.0000744534 +2024-11-11 14:53:39,588 Train Loss: 0.0071460, Val Loss: 0.0066817 +2024-11-11 14:53:39,589 Epoch 212/2000 +2024-11-11 14:53:56,036 Current Learning Rate: 0.0000885637 +2024-11-11 14:53:57,106 Train Loss: 0.0070951, Val Loss: 0.0066803 +2024-11-11 14:53:57,106 Epoch 213/2000 +2024-11-11 14:54:13,344 Current Learning Rate: 0.0001038859 +2024-11-11 14:54:13,345 Train Loss: 0.0071032, Val Loss: 0.0066816 +2024-11-11 14:54:13,345 Epoch 214/2000 +2024-11-11 14:54:29,228 Current Learning Rate: 0.0001204162 +2024-11-11 14:54:29,229 Train Loss: 0.0071267, Val Loss: 0.0066882 +2024-11-11 14:54:29,229 Epoch 215/2000 +2024-11-11 14:54:44,483 Current Learning Rate: 0.0001381504 +2024-11-11 14:54:44,484 Train Loss: 0.0073151, Val Loss: 0.0066887 +2024-11-11 14:54:44,484 Epoch 216/2000 +2024-11-11 14:55:00,812 Current Learning Rate: 0.0001570842 +2024-11-11 14:55:00,812 Train Loss: 0.0072554, Val Loss: 0.0066824 +2024-11-11 14:55:00,812 Epoch 217/2000 +2024-11-11 14:55:16,058 Current Learning Rate: 0.0001772129 +2024-11-11 14:55:16,059 Train Loss: 0.0071964, Val Loss: 0.0066823 +2024-11-11 14:55:16,059 Epoch 218/2000 +2024-11-11 14:55:33,319 Current Learning Rate: 0.0001985316 +2024-11-11 14:55:34,352 Train Loss: 0.0072060, Val Loss: 0.0066800 +2024-11-11 14:55:34,352 Epoch 219/2000 +2024-11-11 14:55:50,242 Current Learning Rate: 0.0002210349 +2024-11-11 14:55:50,243 Train Loss: 0.0074376, Val Loss: 0.0066992 +2024-11-11 14:55:50,243 Epoch 220/2000 +2024-11-11 14:56:06,937 Current Learning Rate: 0.0002447174 +2024-11-11 14:56:06,937 Train Loss: 0.0073687, Val Loss: 0.0066983 +2024-11-11 14:56:06,937 Epoch 221/2000 +2024-11-11 14:56:21,670 Current Learning Rate: 0.0002695732 +2024-11-11 14:56:21,671 Train Loss: 0.0071589, Val Loss: 0.0066803 +2024-11-11 14:56:21,671 Epoch 222/2000 +2024-11-11 14:56:37,122 Current Learning Rate: 0.0002955962 +2024-11-11 14:56:37,929 Train Loss: 0.0070669, Val Loss: 0.0066722 +2024-11-11 14:56:37,930 Epoch 223/2000 +2024-11-11 14:56:52,638 Current Learning Rate: 0.0003227798 +2024-11-11 14:56:52,639 Train Loss: 0.0072259, Val Loss: 0.0066722 +2024-11-11 14:56:52,639 Epoch 224/2000 +2024-11-11 14:57:08,152 Current Learning Rate: 0.0003511176 +2024-11-11 14:57:08,153 Train Loss: 0.0071376, Val Loss: 0.0066735 +2024-11-11 14:57:08,153 Epoch 225/2000 +2024-11-11 14:57:24,526 Current Learning Rate: 0.0003806023 +2024-11-11 14:57:25,578 Train Loss: 0.0070889, Val Loss: 0.0066610 +2024-11-11 14:57:25,579 Epoch 226/2000 +2024-11-11 14:57:41,353 Current Learning Rate: 0.0004112269 +2024-11-11 14:57:42,354 Train Loss: 0.0070849, Val Loss: 0.0066532 +2024-11-11 14:57:42,354 Epoch 227/2000 +2024-11-11 14:57:57,520 Current Learning Rate: 0.0004429836 +2024-11-11 14:57:57,521 Train Loss: 0.0072774, Val Loss: 0.0066535 +2024-11-11 14:57:57,521 Epoch 228/2000 +2024-11-11 14:58:13,445 Current Learning Rate: 0.0004758647 +2024-11-11 14:58:13,446 Train Loss: 0.0071172, Val Loss: 0.0068272 +2024-11-11 14:58:13,446 Epoch 229/2000 +2024-11-11 14:58:30,203 Current Learning Rate: 0.0005098621 +2024-11-11 14:58:30,203 Train Loss: 0.0070752, Val Loss: 0.0066571 +2024-11-11 14:58:30,204 Epoch 230/2000 +2024-11-11 14:58:46,594 Current Learning Rate: 0.0005449674 +2024-11-11 14:58:46,595 Train Loss: 0.0072198, Val Loss: 0.0067566 +2024-11-11 14:58:46,595 Epoch 231/2000 +2024-11-11 14:59:03,017 Current Learning Rate: 0.0005811718 +2024-11-11 14:59:03,018 Train Loss: 0.0071745, Val Loss: 0.0068617 +2024-11-11 14:59:03,018 Epoch 232/2000 +2024-11-11 14:59:18,849 Current Learning Rate: 0.0006184666 +2024-11-11 14:59:18,850 Train Loss: 0.0071195, Val Loss: 0.0066977 +2024-11-11 14:59:18,850 Epoch 233/2000 +2024-11-11 14:59:35,116 Current Learning Rate: 0.0006568424 +2024-11-11 14:59:35,117 Train Loss: 0.0071271, Val Loss: 0.0066595 +2024-11-11 14:59:35,117 Epoch 234/2000 +2024-11-11 14:59:50,317 Current Learning Rate: 0.0006962899 +2024-11-11 14:59:51,113 Train Loss: 0.0071020, Val Loss: 0.0066361 +2024-11-11 14:59:51,113 Epoch 235/2000 +2024-11-11 15:00:07,488 Current Learning Rate: 0.0007367992 +2024-11-11 15:00:07,489 Train Loss: 0.0070847, Val Loss: 0.0066725 +2024-11-11 15:00:07,489 Epoch 236/2000 +2024-11-11 15:00:23,002 Current Learning Rate: 0.0007783604 +2024-11-11 15:00:23,791 Train Loss: 0.0070665, Val Loss: 0.0065985 +2024-11-11 15:00:23,792 Epoch 237/2000 +2024-11-11 15:00:38,908 Current Learning Rate: 0.0008209632 +2024-11-11 15:00:38,908 Train Loss: 0.0072264, Val Loss: 0.0066265 +2024-11-11 15:00:38,909 Epoch 238/2000 +2024-11-11 15:00:54,586 Current Learning Rate: 0.0008645971 +2024-11-11 15:00:54,587 Train Loss: 0.0072261, Val Loss: 0.0066852 +2024-11-11 15:00:54,587 Epoch 239/2000 +2024-11-11 15:01:10,337 Current Learning Rate: 0.0009092514 +2024-11-11 15:01:11,348 Train Loss: 0.0070137, Val Loss: 0.0065943 +2024-11-11 15:01:11,349 Epoch 240/2000 +2024-11-11 15:01:27,526 Current Learning Rate: 0.0009549150 +2024-11-11 15:01:27,527 Train Loss: 0.0071517, Val Loss: 0.0066673 +2024-11-11 15:01:27,528 Epoch 241/2000 +2024-11-11 15:01:43,314 Current Learning Rate: 0.0010015767 +2024-11-11 15:01:43,315 Train Loss: 0.0071173, Val Loss: 0.0066433 +2024-11-11 15:01:43,315 Epoch 242/2000 +2024-11-11 15:01:58,615 Current Learning Rate: 0.0010492249 +2024-11-11 15:01:59,421 Train Loss: 0.0070518, Val Loss: 0.0065926 +2024-11-11 15:01:59,421 Epoch 243/2000 +2024-11-11 15:02:14,143 Current Learning Rate: 0.0010978480 +2024-11-11 15:02:14,143 Train Loss: 0.0070750, Val Loss: 0.0068506 +2024-11-11 15:02:14,144 Epoch 244/2000 +2024-11-11 15:02:31,550 Current Learning Rate: 0.0011474338 +2024-11-11 15:02:31,551 Train Loss: 0.0070213, Val Loss: 0.0067021 +2024-11-11 15:02:31,551 Epoch 245/2000 +2024-11-11 15:02:48,857 Current Learning Rate: 0.0011979702 +2024-11-11 15:02:49,666 Train Loss: 0.0070417, Val Loss: 0.0065648 +2024-11-11 15:02:49,666 Epoch 246/2000 +2024-11-11 15:03:04,849 Current Learning Rate: 0.0012494447 +2024-11-11 15:03:04,850 Train Loss: 0.0071130, Val Loss: 0.0067087 +2024-11-11 15:03:04,851 Epoch 247/2000 +2024-11-11 15:03:20,549 Current Learning Rate: 0.0013018445 +2024-11-11 15:03:20,550 Train Loss: 0.0069857, Val Loss: 0.0067104 +2024-11-11 15:03:20,550 Epoch 248/2000 +2024-11-11 15:03:36,330 Current Learning Rate: 0.0013551569 +2024-11-11 15:03:36,331 Train Loss: 0.0074562, Val Loss: 0.0067111 +2024-11-11 15:03:36,331 Epoch 249/2000 +2024-11-11 15:03:52,210 Current Learning Rate: 0.0014093685 +2024-11-11 15:03:52,210 Train Loss: 0.0072090, Val Loss: 0.0068691 +2024-11-11 15:03:52,211 Epoch 250/2000 +2024-11-11 15:04:08,081 Current Learning Rate: 0.0014644661 +2024-11-11 15:04:08,947 Train Loss: 0.0070478, Val Loss: 0.0065327 +2024-11-11 15:04:08,947 Epoch 251/2000 +2024-11-11 15:04:24,339 Current Learning Rate: 0.0015204360 +2024-11-11 15:04:24,340 Train Loss: 0.0070886, Val Loss: 0.0065896 +2024-11-11 15:04:24,340 Epoch 252/2000 +2024-11-11 15:04:40,240 Current Learning Rate: 0.0015772645 +2024-11-11 15:04:40,240 Train Loss: 0.0071351, Val Loss: 0.0065516 +2024-11-11 15:04:40,240 Epoch 253/2000 +2024-11-11 15:04:56,623 Current Learning Rate: 0.0016349374 +2024-11-11 15:04:57,379 Train Loss: 0.0068435, Val Loss: 0.0064097 +2024-11-11 15:04:57,379 Epoch 254/2000 +2024-11-11 15:05:12,359 Current Learning Rate: 0.0016934407 +2024-11-11 15:05:12,360 Train Loss: 0.0068755, Val Loss: 0.0077757 +2024-11-11 15:05:12,360 Epoch 255/2000 +2024-11-11 15:05:28,933 Current Learning Rate: 0.0017527598 +2024-11-11 15:05:28,933 Train Loss: 0.0070848, Val Loss: 0.0064744 +2024-11-11 15:05:28,934 Epoch 256/2000 +2024-11-11 15:05:43,919 Current Learning Rate: 0.0018128801 +2024-11-11 15:05:43,920 Train Loss: 0.0069190, Val Loss: 0.0065420 +2024-11-11 15:05:43,920 Epoch 257/2000 +2024-11-11 15:05:59,080 Current Learning Rate: 0.0018737867 +2024-11-11 15:05:59,781 Train Loss: 0.0067664, Val Loss: 0.0063515 +2024-11-11 15:05:59,781 Epoch 258/2000 +2024-11-11 15:06:15,350 Current Learning Rate: 0.0019354647 +2024-11-11 15:06:16,171 Train Loss: 0.0069395, Val Loss: 0.0062760 +2024-11-11 15:06:16,172 Epoch 259/2000 +2024-11-11 15:06:32,221 Current Learning Rate: 0.0019978989 +2024-11-11 15:06:32,221 Train Loss: 0.0067314, Val Loss: 0.0064107 +2024-11-11 15:06:32,222 Epoch 260/2000 +2024-11-11 15:06:49,390 Current Learning Rate: 0.0020610737 +2024-11-11 15:06:49,391 Train Loss: 0.0069326, Val Loss: 0.0065176 +2024-11-11 15:06:49,391 Epoch 261/2000 +2024-11-11 15:07:05,060 Current Learning Rate: 0.0021249737 +2024-11-11 15:07:05,060 Train Loss: 0.0069830, Val Loss: 0.0064641 +2024-11-11 15:07:05,061 Epoch 262/2000 +2024-11-11 15:07:20,826 Current Learning Rate: 0.0021895831 +2024-11-11 15:07:21,915 Train Loss: 0.0068408, Val Loss: 0.0062663 +2024-11-11 15:07:21,915 Epoch 263/2000 +2024-11-11 15:07:38,287 Current Learning Rate: 0.0022548859 +2024-11-11 15:07:38,288 Train Loss: 0.0067675, Val Loss: 0.0064039 +2024-11-11 15:07:38,288 Epoch 264/2000 +2024-11-11 15:07:54,780 Current Learning Rate: 0.0023208660 +2024-11-11 15:07:55,593 Train Loss: 0.0069052, Val Loss: 0.0062365 +2024-11-11 15:07:55,593 Epoch 265/2000 +2024-11-11 15:08:10,051 Current Learning Rate: 0.0023875072 +2024-11-11 15:08:10,927 Train Loss: 0.0067346, Val Loss: 0.0061322 +2024-11-11 15:08:10,927 Epoch 266/2000 +2024-11-11 15:08:26,223 Current Learning Rate: 0.0024547929 +2024-11-11 15:08:26,224 Train Loss: 0.0069941, Val Loss: 0.0063703 +2024-11-11 15:08:26,224 Epoch 267/2000 +2024-11-11 15:08:41,711 Current Learning Rate: 0.0025227067 +2024-11-11 15:08:41,712 Train Loss: 0.0069256, Val Loss: 0.0064368 +2024-11-11 15:08:41,712 Epoch 268/2000 +2024-11-11 15:08:58,433 Current Learning Rate: 0.0025912316 +2024-11-11 15:08:58,434 Train Loss: 0.0069142, Val Loss: 0.0061911 +2024-11-11 15:08:58,434 Epoch 269/2000 +2024-11-11 15:09:14,540 Current Learning Rate: 0.0026603509 +2024-11-11 15:09:14,541 Train Loss: 0.0067452, Val Loss: 0.0063971 +2024-11-11 15:09:14,541 Epoch 270/2000 +2024-11-11 15:09:31,423 Current Learning Rate: 0.0027300475 +2024-11-11 15:09:31,423 Train Loss: 0.0066415, Val Loss: 0.0061838 +2024-11-11 15:09:31,423 Epoch 271/2000 +2024-11-11 15:09:46,756 Current Learning Rate: 0.0028003042 +2024-11-11 15:09:47,536 Train Loss: 0.0068366, Val Loss: 0.0059799 +2024-11-11 15:09:47,536 Epoch 272/2000 +2024-11-11 15:10:02,691 Current Learning Rate: 0.0028711035 +2024-11-11 15:10:02,692 Train Loss: 0.0064761, Val Loss: 0.0064109 +2024-11-11 15:10:02,692 Epoch 273/2000 +2024-11-11 15:10:18,359 Current Learning Rate: 0.0029424282 +2024-11-11 15:10:18,359 Train Loss: 0.0079866, Val Loss: 0.0064800 +2024-11-11 15:10:18,359 Epoch 274/2000 +2024-11-11 15:10:33,310 Current Learning Rate: 0.0030142605 +2024-11-11 15:10:34,040 Train Loss: 0.0064719, Val Loss: 0.0059216 +2024-11-11 15:10:34,040 Epoch 275/2000 +2024-11-11 15:10:48,702 Current Learning Rate: 0.0030865828 +2024-11-11 15:10:49,468 Train Loss: 0.0063961, Val Loss: 0.0059214 +2024-11-11 15:10:49,469 Epoch 276/2000 +2024-11-11 15:11:03,948 Current Learning Rate: 0.0031593772 +2024-11-11 15:11:03,949 Train Loss: 0.0064374, Val Loss: 0.0063072 +2024-11-11 15:11:03,949 Epoch 277/2000 +2024-11-11 15:11:20,020 Current Learning Rate: 0.0032326258 +2024-11-11 15:11:20,020 Train Loss: 0.0063913, Val Loss: 0.0059689 +2024-11-11 15:11:20,020 Epoch 278/2000 +2024-11-11 15:11:35,884 Current Learning Rate: 0.0033063104 +2024-11-11 15:11:36,678 Train Loss: 0.0063676, Val Loss: 0.0058734 +2024-11-11 15:11:36,678 Epoch 279/2000 +2024-11-11 15:11:51,198 Current Learning Rate: 0.0033804129 +2024-11-11 15:11:51,199 Train Loss: 0.0070263, Val Loss: 0.0060165 +2024-11-11 15:11:51,199 Epoch 280/2000 +2024-11-11 15:12:06,757 Current Learning Rate: 0.0034549150 +2024-11-11 15:12:07,563 Train Loss: 0.0064468, Val Loss: 0.0058134 +2024-11-11 15:12:07,563 Epoch 281/2000 +2024-11-11 15:12:22,441 Current Learning Rate: 0.0035297984 +2024-11-11 15:12:22,442 Train Loss: 0.0063803, Val Loss: 0.0059726 +2024-11-11 15:12:22,442 Epoch 282/2000 +2024-11-11 15:12:38,403 Current Learning Rate: 0.0036050445 +2024-11-11 15:12:38,404 Train Loss: 0.0065073, Val Loss: 0.0061817 +2024-11-11 15:12:38,404 Epoch 283/2000 +2024-11-11 15:12:54,608 Current Learning Rate: 0.0036806348 +2024-11-11 15:12:54,608 Train Loss: 0.0064057, Val Loss: 0.0058602 +2024-11-11 15:12:54,609 Epoch 284/2000 +2024-11-11 15:13:09,226 Current Learning Rate: 0.0037565506 +2024-11-11 15:13:10,089 Train Loss: 0.0066021, Val Loss: 0.0057578 +2024-11-11 15:13:10,089 Epoch 285/2000 +2024-11-11 15:13:24,732 Current Learning Rate: 0.0038327732 +2024-11-11 15:13:25,544 Train Loss: 0.0062451, Val Loss: 0.0056805 +2024-11-11 15:13:25,544 Epoch 286/2000 +2024-11-11 15:13:40,205 Current Learning Rate: 0.0039092838 +2024-11-11 15:13:40,983 Train Loss: 0.0062877, Val Loss: 0.0055990 +2024-11-11 15:13:40,984 Epoch 287/2000 +2024-11-11 15:13:55,568 Current Learning Rate: 0.0039860635 +2024-11-11 15:13:55,569 Train Loss: 0.0062811, Val Loss: 0.0060695 +2024-11-11 15:13:55,569 Epoch 288/2000 +2024-11-11 15:14:11,163 Current Learning Rate: 0.0040630934 +2024-11-11 15:14:11,164 Train Loss: 0.0060915, Val Loss: 0.0056859 +2024-11-11 15:14:11,164 Epoch 289/2000 +2024-11-11 15:14:27,326 Current Learning Rate: 0.0041403545 +2024-11-11 15:14:27,327 Train Loss: 0.0065595, Val Loss: 0.0059594 +2024-11-11 15:14:27,327 Epoch 290/2000 +2024-11-11 15:14:43,423 Current Learning Rate: 0.0042178277 +2024-11-11 15:14:43,423 Train Loss: 0.0061185, Val Loss: 0.0057626 +2024-11-11 15:14:43,423 Epoch 291/2000 +2024-11-11 15:14:58,230 Current Learning Rate: 0.0042954938 +2024-11-11 15:14:58,230 Train Loss: 0.0063215, Val Loss: 0.0056026 +2024-11-11 15:14:58,230 Epoch 292/2000 +2024-11-11 15:15:14,010 Current Learning Rate: 0.0043733338 +2024-11-11 15:15:14,831 Train Loss: 0.0060687, Val Loss: 0.0055891 +2024-11-11 15:15:14,832 Epoch 293/2000 +2024-11-11 15:15:29,437 Current Learning Rate: 0.0044513284 +2024-11-11 15:15:29,438 Train Loss: 0.0068635, Val Loss: 0.0066914 +2024-11-11 15:15:29,438 Epoch 294/2000 +2024-11-11 15:15:45,421 Current Learning Rate: 0.0045294584 +2024-11-11 15:15:45,422 Train Loss: 0.0064227, Val Loss: 0.0056219 +2024-11-11 15:15:45,422 Epoch 295/2000 +2024-11-11 15:16:01,130 Current Learning Rate: 0.0046077045 +2024-11-11 15:16:01,883 Train Loss: 0.0058091, Val Loss: 0.0054769 +2024-11-11 15:16:01,883 Epoch 296/2000 +2024-11-11 15:16:17,237 Current Learning Rate: 0.0046860474 +2024-11-11 15:16:18,313 Train Loss: 0.0060037, Val Loss: 0.0054160 +2024-11-11 15:16:18,313 Epoch 297/2000 +2024-11-11 15:16:34,185 Current Learning Rate: 0.0047644677 +2024-11-11 15:16:34,186 Train Loss: 0.0059141, Val Loss: 0.0054338 +2024-11-11 15:16:34,187 Epoch 298/2000 +2024-11-11 15:16:48,856 Current Learning Rate: 0.0048429462 +2024-11-11 15:16:48,857 Train Loss: 0.0058849, Val Loss: 0.0055657 +2024-11-11 15:16:48,857 Epoch 299/2000 +2024-11-11 15:17:04,508 Current Learning Rate: 0.0049214634 +2024-11-11 15:17:04,509 Train Loss: 0.0061243, Val Loss: 0.0054230 +2024-11-11 15:17:04,509 Epoch 300/2000 +2024-11-11 15:17:19,857 Current Learning Rate: 0.0050000000 +2024-11-11 15:17:19,858 Train Loss: 0.0058888, Val Loss: 0.0054293 +2024-11-11 15:17:19,858 Epoch 301/2000 +2024-11-11 15:17:35,862 Current Learning Rate: 0.0050785366 +2024-11-11 15:17:35,863 Train Loss: 0.0057903, Val Loss: 0.0056493 +2024-11-11 15:17:35,863 Epoch 302/2000 +2024-11-11 15:17:51,352 Current Learning Rate: 0.0051570538 +2024-11-11 15:17:52,087 Train Loss: 0.0056770, Val Loss: 0.0053373 +2024-11-11 15:17:52,088 Epoch 303/2000 +2024-11-11 15:18:06,363 Current Learning Rate: 0.0052355323 +2024-11-11 15:18:06,364 Train Loss: 0.0060834, Val Loss: 0.0056789 +2024-11-11 15:18:06,364 Epoch 304/2000 +2024-11-11 15:18:21,697 Current Learning Rate: 0.0053139526 +2024-11-11 15:18:21,698 Train Loss: 0.0057881, Val Loss: 0.0055695 +2024-11-11 15:18:21,698 Epoch 305/2000 +2024-11-11 15:18:37,500 Current Learning Rate: 0.0053922955 +2024-11-11 15:18:38,558 Train Loss: 0.0057892, Val Loss: 0.0052579 +2024-11-11 15:18:38,558 Epoch 306/2000 +2024-11-11 15:18:54,840 Current Learning Rate: 0.0054705416 +2024-11-11 15:18:54,841 Train Loss: 0.0056334, Val Loss: 0.0060894 +2024-11-11 15:18:54,841 Epoch 307/2000 +2024-11-11 15:19:11,104 Current Learning Rate: 0.0055486716 +2024-11-11 15:19:11,105 Train Loss: 0.0059123, Val Loss: 0.0059840 +2024-11-11 15:19:11,105 Epoch 308/2000 +2024-11-11 15:19:26,922 Current Learning Rate: 0.0056266662 +2024-11-11 15:19:26,922 Train Loss: 0.0056471, Val Loss: 0.0054598 +2024-11-11 15:19:26,923 Epoch 309/2000 +2024-11-11 15:19:41,710 Current Learning Rate: 0.0057045062 +2024-11-11 15:19:41,710 Train Loss: 0.0057048, Val Loss: 0.0055944 +2024-11-11 15:19:41,710 Epoch 310/2000 +2024-11-11 15:19:58,069 Current Learning Rate: 0.0057821723 +2024-11-11 15:19:58,070 Train Loss: 0.0056177, Val Loss: 0.0061200 +2024-11-11 15:19:58,070 Epoch 311/2000 +2024-11-11 15:20:13,596 Current Learning Rate: 0.0058596455 +2024-11-11 15:20:14,350 Train Loss: 0.0056118, Val Loss: 0.0051161 +2024-11-11 15:20:14,350 Epoch 312/2000 +2024-11-11 15:20:29,284 Current Learning Rate: 0.0059369066 +2024-11-11 15:20:29,285 Train Loss: 0.0054531, Val Loss: 0.0070531 +2024-11-11 15:20:29,286 Epoch 313/2000 +2024-11-11 15:20:44,765 Current Learning Rate: 0.0060139365 +2024-11-11 15:20:44,765 Train Loss: 0.0058016, Val Loss: 0.0055967 +2024-11-11 15:20:44,765 Epoch 314/2000 +2024-11-11 15:21:00,523 Current Learning Rate: 0.0060907162 +2024-11-11 15:21:01,227 Train Loss: 0.0054130, Val Loss: 0.0049143 +2024-11-11 15:21:01,227 Epoch 315/2000 +2024-11-11 15:21:15,949 Current Learning Rate: 0.0061672268 +2024-11-11 15:21:15,949 Train Loss: 0.0056265, Val Loss: 0.0052043 +2024-11-11 15:21:15,949 Epoch 316/2000 +2024-11-11 15:21:31,215 Current Learning Rate: 0.0062434494 +2024-11-11 15:21:31,215 Train Loss: 0.0054177, Val Loss: 0.0049175 +2024-11-11 15:21:31,215 Epoch 317/2000 +2024-11-11 15:21:47,057 Current Learning Rate: 0.0063193652 +2024-11-11 15:21:47,060 Train Loss: 0.0052038, Val Loss: 0.0055301 +2024-11-11 15:21:47,066 Epoch 318/2000 +2024-11-11 15:22:03,473 Current Learning Rate: 0.0063949555 +2024-11-11 15:22:03,474 Train Loss: 0.0052530, Val Loss: 0.0049315 +2024-11-11 15:22:03,474 Epoch 319/2000 +2024-11-11 15:22:19,409 Current Learning Rate: 0.0064702016 +2024-11-11 15:22:19,409 Train Loss: 0.0053585, Val Loss: 0.0050218 +2024-11-11 15:22:19,409 Epoch 320/2000 +2024-11-11 15:22:35,355 Current Learning Rate: 0.0065450850 +2024-11-11 15:22:35,356 Train Loss: 0.0053360, Val Loss: 0.0052287 +2024-11-11 15:22:35,357 Epoch 321/2000 +2024-11-11 15:22:52,864 Current Learning Rate: 0.0066195871 +2024-11-11 15:22:53,914 Train Loss: 0.0052233, Val Loss: 0.0048462 +2024-11-11 15:22:53,915 Epoch 322/2000 +2024-11-11 15:23:10,018 Current Learning Rate: 0.0066936896 +2024-11-11 15:23:10,019 Train Loss: 0.0053973, Val Loss: 0.0054552 +2024-11-11 15:23:10,019 Epoch 323/2000 +2024-11-11 15:23:25,870 Current Learning Rate: 0.0067673742 +2024-11-11 15:23:26,659 Train Loss: 0.0050311, Val Loss: 0.0045520 +2024-11-11 15:23:26,660 Epoch 324/2000 +2024-11-11 15:23:42,273 Current Learning Rate: 0.0068406228 +2024-11-11 15:23:42,274 Train Loss: 0.0051359, Val Loss: 0.0046777 +2024-11-11 15:23:42,275 Epoch 325/2000 +2024-11-11 15:23:58,454 Current Learning Rate: 0.0069134172 +2024-11-11 15:23:58,454 Train Loss: 0.0051051, Val Loss: 0.0048157 +2024-11-11 15:23:58,454 Epoch 326/2000 +2024-11-11 15:24:13,276 Current Learning Rate: 0.0069857395 +2024-11-11 15:24:13,935 Train Loss: 0.0048852, Val Loss: 0.0045308 +2024-11-11 15:24:13,935 Epoch 327/2000 +2024-11-11 15:24:29,332 Current Learning Rate: 0.0070575718 +2024-11-11 15:24:30,019 Train Loss: 0.0048601, Val Loss: 0.0044837 +2024-11-11 15:24:30,019 Epoch 328/2000 +2024-11-11 15:24:44,791 Current Learning Rate: 0.0071288965 +2024-11-11 15:24:44,791 Train Loss: 0.0047553, Val Loss: 0.0045518 +2024-11-11 15:24:44,791 Epoch 329/2000 +2024-11-11 15:25:01,267 Current Learning Rate: 0.0071996958 +2024-11-11 15:25:03,762 Train Loss: 0.0048570, Val Loss: 0.0044149 +2024-11-11 15:25:03,762 Epoch 330/2000 +2024-11-11 15:25:19,017 Current Learning Rate: 0.0072699525 +2024-11-11 15:25:19,018 Train Loss: 0.0048096, Val Loss: 0.0045326 +2024-11-11 15:25:19,018 Epoch 331/2000 +2024-11-11 15:25:35,225 Current Learning Rate: 0.0073396491 +2024-11-11 15:25:35,230 Train Loss: 0.0048643, Val Loss: 0.0046675 +2024-11-11 15:25:35,231 Epoch 332/2000 +2024-11-11 15:25:51,301 Current Learning Rate: 0.0074087684 +2024-11-11 15:25:52,133 Train Loss: 0.0046107, Val Loss: 0.0042613 +2024-11-11 15:25:52,134 Epoch 333/2000 +2024-11-11 15:26:07,117 Current Learning Rate: 0.0074772933 +2024-11-11 15:26:07,118 Train Loss: 0.0049250, Val Loss: 0.0052920 +2024-11-11 15:26:07,118 Epoch 334/2000 +2024-11-11 15:26:22,818 Current Learning Rate: 0.0075452071 +2024-11-11 15:26:22,819 Train Loss: 0.0047885, Val Loss: 0.0043423 +2024-11-11 15:26:22,820 Epoch 335/2000 +2024-11-11 15:26:39,199 Current Learning Rate: 0.0076124928 +2024-11-11 15:26:39,200 Train Loss: 0.0046367, Val Loss: 0.0043337 +2024-11-11 15:26:39,200 Epoch 336/2000 +2024-11-11 15:26:54,778 Current Learning Rate: 0.0076791340 +2024-11-11 15:26:55,840 Train Loss: 0.0043341, Val Loss: 0.0041157 +2024-11-11 15:26:55,840 Epoch 337/2000 +2024-11-11 15:27:11,824 Current Learning Rate: 0.0077451141 +2024-11-11 15:27:11,825 Train Loss: 0.0045052, Val Loss: 0.0042326 +2024-11-11 15:27:11,826 Epoch 338/2000 +2024-11-11 15:27:27,594 Current Learning Rate: 0.0078104169 +2024-11-11 15:27:28,597 Train Loss: 0.0045867, Val Loss: 0.0040947 +2024-11-11 15:27:28,597 Epoch 339/2000 +2024-11-11 15:27:43,955 Current Learning Rate: 0.0078750263 +2024-11-11 15:27:43,956 Train Loss: 0.0042196, Val Loss: 0.0041421 +2024-11-11 15:27:43,956 Epoch 340/2000 +2024-11-11 15:27:59,980 Current Learning Rate: 0.0079389263 +2024-11-11 15:28:00,772 Train Loss: 0.0041555, Val Loss: 0.0039471 +2024-11-11 15:28:00,773 Epoch 341/2000 +2024-11-11 15:28:15,631 Current Learning Rate: 0.0080021011 +2024-11-11 15:28:15,631 Train Loss: 0.0041709, Val Loss: 0.0042655 +2024-11-11 15:28:15,631 Epoch 342/2000 +2024-11-11 15:28:31,013 Current Learning Rate: 0.0080645353 +2024-11-11 15:28:31,014 Train Loss: 0.0045411, Val Loss: 0.0039864 +2024-11-11 15:28:31,014 Epoch 343/2000 +2024-11-11 15:28:46,956 Current Learning Rate: 0.0081262133 +2024-11-11 15:28:46,956 Train Loss: 0.0043177, Val Loss: 0.0039841 +2024-11-11 15:28:46,956 Epoch 344/2000 +2024-11-11 15:29:02,963 Current Learning Rate: 0.0081871199 +2024-11-11 15:29:02,964 Train Loss: 0.0041448, Val Loss: 0.0041350 +2024-11-11 15:29:02,964 Epoch 345/2000 +2024-11-11 15:29:19,665 Current Learning Rate: 0.0082472402 +2024-11-11 15:29:20,507 Train Loss: 0.0041028, Val Loss: 0.0038796 +2024-11-11 15:29:20,508 Epoch 346/2000 +2024-11-11 15:29:36,213 Current Learning Rate: 0.0083065593 +2024-11-11 15:29:36,213 Train Loss: 0.0041301, Val Loss: 0.0040616 +2024-11-11 15:29:36,214 Epoch 347/2000 +2024-11-11 15:29:52,477 Current Learning Rate: 0.0083650626 +2024-11-11 15:29:53,221 Train Loss: 0.0041402, Val Loss: 0.0038527 +2024-11-11 15:29:53,221 Epoch 348/2000 +2024-11-11 15:30:07,524 Current Learning Rate: 0.0084227355 +2024-11-11 15:30:07,524 Train Loss: 0.0040083, Val Loss: 0.0039554 +2024-11-11 15:30:07,525 Epoch 349/2000 +2024-11-11 15:30:23,061 Current Learning Rate: 0.0084795640 +2024-11-11 15:30:23,062 Train Loss: 0.0041215, Val Loss: 0.0038532 +2024-11-11 15:30:23,062 Epoch 350/2000 +2024-11-11 15:30:40,449 Current Learning Rate: 0.0085355339 +2024-11-11 15:30:40,450 Train Loss: 0.0039445, Val Loss: 0.0039694 +2024-11-11 15:30:40,450 Epoch 351/2000 +2024-11-11 15:30:56,072 Current Learning Rate: 0.0085906315 +2024-11-11 15:30:56,872 Train Loss: 0.0041709, Val Loss: 0.0038109 +2024-11-11 15:30:56,872 Epoch 352/2000 +2024-11-11 15:31:12,185 Current Learning Rate: 0.0086448431 +2024-11-11 15:31:12,994 Train Loss: 0.0038993, Val Loss: 0.0037419 +2024-11-11 15:31:12,995 Epoch 353/2000 +2024-11-11 15:31:28,227 Current Learning Rate: 0.0086981555 +2024-11-11 15:31:28,228 Train Loss: 0.0042222, Val Loss: 0.0046172 +2024-11-11 15:31:28,228 Epoch 354/2000 +2024-11-11 15:31:45,014 Current Learning Rate: 0.0087505553 +2024-11-11 15:31:45,015 Train Loss: 0.0041261, Val Loss: 0.0038533 +2024-11-11 15:31:45,015 Epoch 355/2000 +2024-11-11 15:32:00,327 Current Learning Rate: 0.0088020298 +2024-11-11 15:32:01,366 Train Loss: 0.0037649, Val Loss: 0.0036230 +2024-11-11 15:32:01,367 Epoch 356/2000 +2024-11-11 15:32:16,636 Current Learning Rate: 0.0088525662 +2024-11-11 15:32:16,637 Train Loss: 0.0037887, Val Loss: 0.0039364 +2024-11-11 15:32:16,637 Epoch 357/2000 +2024-11-11 15:32:32,839 Current Learning Rate: 0.0089021520 +2024-11-11 15:32:33,660 Train Loss: 0.0037912, Val Loss: 0.0035895 +2024-11-11 15:32:33,660 Epoch 358/2000 +2024-11-11 15:32:48,544 Current Learning Rate: 0.0089507751 +2024-11-11 15:32:49,341 Train Loss: 0.0037223, Val Loss: 0.0034912 +2024-11-11 15:32:49,342 Epoch 359/2000 +2024-11-11 15:33:04,195 Current Learning Rate: 0.0089984233 +2024-11-11 15:33:05,024 Train Loss: 0.0037459, Val Loss: 0.0034155 +2024-11-11 15:33:05,025 Epoch 360/2000 +2024-11-11 15:33:19,243 Current Learning Rate: 0.0090450850 +2024-11-11 15:33:19,243 Train Loss: 0.0038109, Val Loss: 0.0038005 +2024-11-11 15:33:19,244 Epoch 361/2000 +2024-11-11 15:33:35,441 Current Learning Rate: 0.0090907486 +2024-11-11 15:33:35,442 Train Loss: 0.0037431, Val Loss: 0.0034637 +2024-11-11 15:33:35,442 Epoch 362/2000 +2024-11-11 15:33:52,118 Current Learning Rate: 0.0091354029 +2024-11-11 15:33:52,119 Train Loss: 0.0036233, Val Loss: 0.0037645 +2024-11-11 15:33:52,119 Epoch 363/2000 +2024-11-11 15:34:08,131 Current Learning Rate: 0.0091790368 +2024-11-11 15:34:08,132 Train Loss: 0.0035652, Val Loss: 0.0034323 +2024-11-11 15:34:08,132 Epoch 364/2000 +2024-11-11 15:34:24,458 Current Learning Rate: 0.0092216396 +2024-11-11 15:34:25,437 Train Loss: 0.0034464, Val Loss: 0.0032751 +2024-11-11 15:34:25,438 Epoch 365/2000 +2024-11-11 15:34:40,604 Current Learning Rate: 0.0092632008 +2024-11-11 15:34:40,606 Train Loss: 0.0036464, Val Loss: 0.0034118 +2024-11-11 15:34:40,606 Epoch 366/2000 +2024-11-11 15:34:56,395 Current Learning Rate: 0.0093037101 +2024-11-11 15:34:56,395 Train Loss: 0.0034116, Val Loss: 0.0041605 +2024-11-11 15:34:56,396 Epoch 367/2000 +2024-11-11 15:35:12,169 Current Learning Rate: 0.0093431576 +2024-11-11 15:35:13,266 Train Loss: 0.0033963, Val Loss: 0.0032637 +2024-11-11 15:35:13,267 Epoch 368/2000 +2024-11-11 15:35:28,503 Current Learning Rate: 0.0093815334 +2024-11-11 15:35:28,504 Train Loss: 0.0034644, Val Loss: 0.0032756 +2024-11-11 15:35:28,504 Epoch 369/2000 +2024-11-11 15:35:44,770 Current Learning Rate: 0.0094188282 +2024-11-11 15:35:44,771 Train Loss: 0.0032996, Val Loss: 0.0033540 +2024-11-11 15:35:44,771 Epoch 370/2000 +2024-11-11 15:36:01,298 Current Learning Rate: 0.0094550326 +2024-11-11 15:36:01,298 Train Loss: 0.0033698, Val Loss: 0.0033233 +2024-11-11 15:36:01,298 Epoch 371/2000 +2024-11-11 15:36:16,099 Current Learning Rate: 0.0094901379 +2024-11-11 15:36:16,099 Train Loss: 0.0035610, Val Loss: 0.0034147 +2024-11-11 15:36:16,099 Epoch 372/2000 +2024-11-11 15:36:31,398 Current Learning Rate: 0.0095241353 +2024-11-11 15:36:32,187 Train Loss: 0.0035433, Val Loss: 0.0032449 +2024-11-11 15:36:32,187 Epoch 373/2000 +2024-11-11 15:36:48,188 Current Learning Rate: 0.0095570164 +2024-11-11 15:36:48,967 Train Loss: 0.0033498, Val Loss: 0.0032401 +2024-11-11 15:36:48,967 Epoch 374/2000 +2024-11-11 15:37:04,010 Current Learning Rate: 0.0095887731 +2024-11-11 15:37:04,829 Train Loss: 0.0033526, Val Loss: 0.0030753 +2024-11-11 15:37:04,829 Epoch 375/2000 +2024-11-11 15:37:20,099 Current Learning Rate: 0.0096193977 +2024-11-11 15:37:20,100 Train Loss: 0.0032628, Val Loss: 0.0031094 +2024-11-11 15:37:20,100 Epoch 376/2000 +2024-11-11 15:37:35,793 Current Learning Rate: 0.0096488824 +2024-11-11 15:37:36,589 Train Loss: 0.0031918, Val Loss: 0.0030707 +2024-11-11 15:37:36,590 Epoch 377/2000 +2024-11-11 15:37:51,872 Current Learning Rate: 0.0096772202 +2024-11-11 15:37:51,873 Train Loss: 0.0030032, Val Loss: 0.0031136 +2024-11-11 15:37:51,873 Epoch 378/2000 +2024-11-11 15:38:07,306 Current Learning Rate: 0.0097044038 +2024-11-11 15:38:07,306 Train Loss: 0.0031911, Val Loss: 0.0030854 +2024-11-11 15:38:07,307 Epoch 379/2000 +2024-11-11 15:38:22,613 Current Learning Rate: 0.0097304268 +2024-11-11 15:38:22,613 Train Loss: 0.0033952, Val Loss: 0.0031591 +2024-11-11 15:38:22,614 Epoch 380/2000 +2024-11-11 15:38:38,057 Current Learning Rate: 0.0097552826 +2024-11-11 15:38:38,832 Train Loss: 0.0030825, Val Loss: 0.0030423 +2024-11-11 15:38:38,833 Epoch 381/2000 +2024-11-11 15:38:53,509 Current Learning Rate: 0.0097789651 +2024-11-11 15:38:53,510 Train Loss: 0.0032867, Val Loss: 0.0033022 +2024-11-11 15:38:53,510 Epoch 382/2000 +2024-11-11 15:39:09,111 Current Learning Rate: 0.0098014684 +2024-11-11 15:39:09,867 Train Loss: 0.0031884, Val Loss: 0.0029888 +2024-11-11 15:39:09,868 Epoch 383/2000 +2024-11-11 15:39:24,444 Current Learning Rate: 0.0098227871 +2024-11-11 15:39:24,445 Train Loss: 0.0031045, Val Loss: 0.0031278 +2024-11-11 15:39:24,445 Epoch 384/2000 +2024-11-11 15:39:40,576 Current Learning Rate: 0.0098429158 +2024-11-11 15:39:40,577 Train Loss: 0.0031473, Val Loss: 0.0030145 +2024-11-11 15:39:40,577 Epoch 385/2000 +2024-11-11 15:39:56,625 Current Learning Rate: 0.0098618496 +2024-11-11 15:39:57,505 Train Loss: 0.0030393, Val Loss: 0.0029522 +2024-11-11 15:39:57,505 Epoch 386/2000 +2024-11-11 15:40:13,526 Current Learning Rate: 0.0098795838 +2024-11-11 15:40:14,284 Train Loss: 0.0031005, Val Loss: 0.0029025 +2024-11-11 15:40:14,284 Epoch 387/2000 +2024-11-11 15:40:29,570 Current Learning Rate: 0.0098961141 +2024-11-11 15:40:29,571 Train Loss: 0.0029602, Val Loss: 0.0030039 +2024-11-11 15:40:29,571 Epoch 388/2000 +2024-11-11 15:40:46,762 Current Learning Rate: 0.0099114363 +2024-11-11 15:40:47,634 Train Loss: 0.0029413, Val Loss: 0.0028920 +2024-11-11 15:40:47,634 Epoch 389/2000 +2024-11-11 15:41:03,492 Current Learning Rate: 0.0099255466 +2024-11-11 15:41:03,493 Train Loss: 0.0029506, Val Loss: 0.0029535 +2024-11-11 15:41:03,493 Epoch 390/2000 +2024-11-11 15:41:19,282 Current Learning Rate: 0.0099384417 +2024-11-11 15:41:19,992 Train Loss: 0.0029540, Val Loss: 0.0028542 +2024-11-11 15:41:19,992 Epoch 391/2000 +2024-11-11 15:41:34,271 Current Learning Rate: 0.0099501183 +2024-11-11 15:41:35,082 Train Loss: 0.0027288, Val Loss: 0.0027654 +2024-11-11 15:41:35,082 Epoch 392/2000 +2024-11-11 15:41:50,359 Current Learning Rate: 0.0099605735 +2024-11-11 15:41:50,360 Train Loss: 0.0029981, Val Loss: 0.0027909 +2024-11-11 15:41:50,360 Epoch 393/2000 +2024-11-11 15:42:06,693 Current Learning Rate: 0.0099698048 +2024-11-11 15:42:06,693 Train Loss: 0.0028139, Val Loss: 0.0027993 +2024-11-11 15:42:06,693 Epoch 394/2000 +2024-11-11 15:42:22,873 Current Learning Rate: 0.0099778098 +2024-11-11 15:42:23,586 Train Loss: 0.0028165, Val Loss: 0.0027495 +2024-11-11 15:42:23,586 Epoch 395/2000 +2024-11-11 15:42:38,952 Current Learning Rate: 0.0099845867 +2024-11-11 15:42:38,953 Train Loss: 0.0027342, Val Loss: 0.0029531 +2024-11-11 15:42:38,953 Epoch 396/2000 +2024-11-11 15:42:55,143 Current Learning Rate: 0.0099901336 +2024-11-11 15:42:55,144 Train Loss: 0.0028151, Val Loss: 0.0028677 +2024-11-11 15:42:55,144 Epoch 397/2000 +2024-11-11 15:43:11,346 Current Learning Rate: 0.0099944494 +2024-11-11 15:43:11,347 Train Loss: 0.0028728, Val Loss: 0.0028663 +2024-11-11 15:43:11,347 Epoch 398/2000 +2024-11-11 15:43:27,473 Current Learning Rate: 0.0099975328 +2024-11-11 15:43:28,501 Train Loss: 0.0027439, Val Loss: 0.0026992 +2024-11-11 15:43:28,502 Epoch 399/2000 +2024-11-11 15:43:43,849 Current Learning Rate: 0.0099993832 +2024-11-11 15:43:43,850 Train Loss: 0.0029023, Val Loss: 0.0027593 +2024-11-11 15:43:43,850 Epoch 400/2000 +2024-11-11 15:44:00,709 Current Learning Rate: 0.0100000000 +2024-11-11 15:44:00,710 Train Loss: 0.0027569, Val Loss: 0.0027231 +2024-11-11 15:44:00,710 Epoch 401/2000 +2024-11-11 15:44:17,753 Current Learning Rate: 0.0099993832 +2024-11-11 15:44:18,592 Train Loss: 0.0027272, Val Loss: 0.0026476 +2024-11-11 15:44:18,593 Epoch 402/2000 +2024-11-11 15:44:33,953 Current Learning Rate: 0.0099975328 +2024-11-11 15:44:33,954 Train Loss: 0.0026140, Val Loss: 0.0026716 +2024-11-11 15:44:33,955 Epoch 403/2000 +2024-11-11 15:44:51,240 Current Learning Rate: 0.0099944494 +2024-11-11 15:44:52,250 Train Loss: 0.0025904, Val Loss: 0.0026408 +2024-11-11 15:44:52,251 Epoch 404/2000 +2024-11-11 15:45:07,899 Current Learning Rate: 0.0099901336 +2024-11-11 15:45:07,900 Train Loss: 0.0025123, Val Loss: 0.0027146 +2024-11-11 15:45:07,900 Epoch 405/2000 +2024-11-11 15:45:24,531 Current Learning Rate: 0.0099845867 +2024-11-11 15:45:24,532 Train Loss: 0.0027804, Val Loss: 0.0026874 +2024-11-11 15:45:24,532 Epoch 406/2000 +2024-11-11 15:45:39,425 Current Learning Rate: 0.0099778098 +2024-11-11 15:45:40,239 Train Loss: 0.0026509, Val Loss: 0.0026173 +2024-11-11 15:45:40,239 Epoch 407/2000 +2024-11-11 15:45:56,723 Current Learning Rate: 0.0099698048 +2024-11-11 15:45:57,423 Train Loss: 0.0026102, Val Loss: 0.0025745 +2024-11-11 15:45:57,423 Epoch 408/2000 +2024-11-11 15:46:11,774 Current Learning Rate: 0.0099605735 +2024-11-11 15:46:11,775 Train Loss: 0.0026633, Val Loss: 0.0026304 +2024-11-11 15:46:11,775 Epoch 409/2000 +2024-11-11 15:46:27,259 Current Learning Rate: 0.0099501183 +2024-11-11 15:46:28,016 Train Loss: 0.0027500, Val Loss: 0.0025415 +2024-11-11 15:46:28,017 Epoch 410/2000 +2024-11-11 15:46:42,588 Current Learning Rate: 0.0099384417 +2024-11-11 15:46:42,589 Train Loss: 0.0025426, Val Loss: 0.0025611 +2024-11-11 15:46:42,589 Epoch 411/2000 +2024-11-11 15:46:58,088 Current Learning Rate: 0.0099255466 +2024-11-11 15:46:58,089 Train Loss: 0.0025220, Val Loss: 0.0025420 +2024-11-11 15:46:58,089 Epoch 412/2000 +2024-11-11 15:47:13,169 Current Learning Rate: 0.0099114363 +2024-11-11 15:47:13,169 Train Loss: 0.0027022, Val Loss: 0.0026206 +2024-11-11 15:47:13,169 Epoch 413/2000 +2024-11-11 15:47:29,396 Current Learning Rate: 0.0098961141 +2024-11-11 15:47:30,431 Train Loss: 0.0024756, Val Loss: 0.0024991 +2024-11-11 15:47:30,432 Epoch 414/2000 +2024-11-11 15:47:46,902 Current Learning Rate: 0.0098795838 +2024-11-11 15:47:47,977 Train Loss: 0.0023801, Val Loss: 0.0024905 +2024-11-11 15:47:47,978 Epoch 415/2000 +2024-11-11 15:48:04,159 Current Learning Rate: 0.0098618496 +2024-11-11 15:48:04,162 Train Loss: 0.0023671, Val Loss: 0.0025034 +2024-11-11 15:48:04,162 Epoch 416/2000 +2024-11-11 15:48:19,640 Current Learning Rate: 0.0098429158 +2024-11-11 15:48:19,641 Train Loss: 0.0026003, Val Loss: 0.0025255 +2024-11-11 15:48:19,641 Epoch 417/2000 +2024-11-11 15:48:34,829 Current Learning Rate: 0.0098227871 +2024-11-11 15:48:35,666 Train Loss: 0.0024705, Val Loss: 0.0024665 +2024-11-11 15:48:35,666 Epoch 418/2000 +2024-11-11 15:48:50,252 Current Learning Rate: 0.0098014684 +2024-11-11 15:48:50,998 Train Loss: 0.0023990, Val Loss: 0.0024348 +2024-11-11 15:48:50,998 Epoch 419/2000 +2024-11-11 15:49:05,824 Current Learning Rate: 0.0097789651 +2024-11-11 15:49:06,610 Train Loss: 0.0024215, Val Loss: 0.0023730 +2024-11-11 15:49:06,610 Epoch 420/2000 +2024-11-11 15:49:21,374 Current Learning Rate: 0.0097552826 +2024-11-11 15:49:21,375 Train Loss: 0.0023986, Val Loss: 0.0025001 +2024-11-11 15:49:21,375 Epoch 421/2000 +2024-11-11 15:49:36,450 Current Learning Rate: 0.0097304268 +2024-11-11 15:49:36,450 Train Loss: 0.0024147, Val Loss: 0.0024293 +2024-11-11 15:49:36,451 Epoch 422/2000 +2024-11-11 15:49:52,287 Current Learning Rate: 0.0097044038 +2024-11-11 15:49:52,288 Train Loss: 0.0026629, Val Loss: 0.0026045 +2024-11-11 15:49:52,288 Epoch 423/2000 +2024-11-11 15:50:08,273 Current Learning Rate: 0.0096772202 +2024-11-11 15:50:08,274 Train Loss: 0.0024435, Val Loss: 0.0026035 +2024-11-11 15:50:08,274 Epoch 424/2000 +2024-11-11 15:50:24,311 Current Learning Rate: 0.0096488824 +2024-11-11 15:50:24,312 Train Loss: 0.0023706, Val Loss: 0.0023908 +2024-11-11 15:50:24,313 Epoch 425/2000 +2024-11-11 15:50:39,933 Current Learning Rate: 0.0096193977 +2024-11-11 15:50:39,934 Train Loss: 0.0023457, Val Loss: 0.0023849 +2024-11-11 15:50:39,934 Epoch 426/2000 +2024-11-11 15:50:55,838 Current Learning Rate: 0.0095887731 +2024-11-11 15:50:55,839 Train Loss: 0.0025702, Val Loss: 0.0023846 +2024-11-11 15:50:55,840 Epoch 427/2000 +2024-11-11 15:51:11,786 Current Learning Rate: 0.0095570164 +2024-11-11 15:51:12,601 Train Loss: 0.0022596, Val Loss: 0.0022983 +2024-11-11 15:51:12,601 Epoch 428/2000 +2024-11-11 15:51:27,757 Current Learning Rate: 0.0095241353 +2024-11-11 15:51:27,758 Train Loss: 0.0024092, Val Loss: 0.0023213 +2024-11-11 15:51:27,759 Epoch 429/2000 +2024-11-11 15:51:44,023 Current Learning Rate: 0.0094901379 +2024-11-11 15:51:44,024 Train Loss: 0.0023035, Val Loss: 0.0023651 +2024-11-11 15:51:44,024 Epoch 430/2000 +2024-11-11 15:51:59,926 Current Learning Rate: 0.0094550326 +2024-11-11 15:51:59,927 Train Loss: 0.0022316, Val Loss: 0.0023187 +2024-11-11 15:51:59,927 Epoch 431/2000 +2024-11-11 15:52:16,328 Current Learning Rate: 0.0094188282 +2024-11-11 15:52:17,345 Train Loss: 0.0022601, Val Loss: 0.0022593 +2024-11-11 15:52:17,345 Epoch 432/2000 +2024-11-11 15:52:33,331 Current Learning Rate: 0.0093815334 +2024-11-11 15:52:33,332 Train Loss: 0.0023575, Val Loss: 0.0023146 +2024-11-11 15:52:33,332 Epoch 433/2000 +2024-11-11 15:52:49,105 Current Learning Rate: 0.0093431576 +2024-11-11 15:52:49,106 Train Loss: 0.0022776, Val Loss: 0.0023683 +2024-11-11 15:52:49,107 Epoch 434/2000 +2024-11-11 15:53:04,540 Current Learning Rate: 0.0093037101 +2024-11-11 15:53:04,540 Train Loss: 0.0021982, Val Loss: 0.0023392 +2024-11-11 15:53:04,541 Epoch 435/2000 +2024-11-11 15:53:19,733 Current Learning Rate: 0.0092632008 +2024-11-11 15:53:19,734 Train Loss: 0.0022392, Val Loss: 0.0023363 +2024-11-11 15:53:19,734 Epoch 436/2000 +2024-11-11 15:53:35,784 Current Learning Rate: 0.0092216396 +2024-11-11 15:53:36,549 Train Loss: 0.0021519, Val Loss: 0.0022499 +2024-11-11 15:53:36,550 Epoch 437/2000 +2024-11-11 15:53:51,659 Current Learning Rate: 0.0091790368 +2024-11-11 15:53:52,652 Train Loss: 0.0021430, Val Loss: 0.0022377 +2024-11-11 15:53:52,652 Epoch 438/2000 +2024-11-11 15:54:08,774 Current Learning Rate: 0.0091354029 +2024-11-11 15:54:08,775 Train Loss: 0.0023445, Val Loss: 0.0023493 +2024-11-11 15:54:08,775 Epoch 439/2000 +2024-11-11 15:54:24,482 Current Learning Rate: 0.0090907486 +2024-11-11 15:54:25,521 Train Loss: 0.0023121, Val Loss: 0.0022053 +2024-11-11 15:54:25,521 Epoch 440/2000 +2024-11-11 15:54:41,306 Current Learning Rate: 0.0090450850 +2024-11-11 15:54:42,079 Train Loss: 0.0023536, Val Loss: 0.0021599 +2024-11-11 15:54:42,079 Epoch 441/2000 +2024-11-11 15:54:57,043 Current Learning Rate: 0.0089984233 +2024-11-11 15:54:57,956 Train Loss: 0.0019961, Val Loss: 0.0020880 +2024-11-11 15:54:57,957 Epoch 442/2000 +2024-11-11 15:55:13,785 Current Learning Rate: 0.0089507751 +2024-11-11 15:55:13,786 Train Loss: 0.0020018, Val Loss: 0.0021359 +2024-11-11 15:55:13,786 Epoch 443/2000 +2024-11-11 15:55:30,641 Current Learning Rate: 0.0089021520 +2024-11-11 15:55:30,642 Train Loss: 0.0023603, Val Loss: 0.0021651 +2024-11-11 15:55:30,643 Epoch 444/2000 +2024-11-11 15:55:45,897 Current Learning Rate: 0.0088525662 +2024-11-11 15:55:45,898 Train Loss: 0.0021467, Val Loss: 0.0021570 +2024-11-11 15:55:45,898 Epoch 445/2000 +2024-11-11 15:56:02,691 Current Learning Rate: 0.0088020298 +2024-11-11 15:56:02,692 Train Loss: 0.0021638, Val Loss: 0.0022031 +2024-11-11 15:56:02,692 Epoch 446/2000 +2024-11-11 15:56:18,638 Current Learning Rate: 0.0087505553 +2024-11-11 15:56:18,639 Train Loss: 0.0019990, Val Loss: 0.0021428 +2024-11-11 15:56:18,639 Epoch 447/2000 +2024-11-11 15:56:34,779 Current Learning Rate: 0.0086981555 +2024-11-11 15:56:34,780 Train Loss: 0.0021922, Val Loss: 0.0022111 +2024-11-11 15:56:34,780 Epoch 448/2000 +2024-11-11 15:56:51,500 Current Learning Rate: 0.0086448431 +2024-11-11 15:56:51,501 Train Loss: 0.0019944, Val Loss: 0.0020895 +2024-11-11 15:56:51,501 Epoch 449/2000 +2024-11-11 15:57:08,394 Current Learning Rate: 0.0085906315 +2024-11-11 15:57:08,395 Train Loss: 0.0020744, Val Loss: 0.0021320 +2024-11-11 15:57:08,395 Epoch 450/2000 +2024-11-11 15:57:24,634 Current Learning Rate: 0.0085355339 +2024-11-11 15:57:24,635 Train Loss: 0.0021858, Val Loss: 0.0022044 +2024-11-11 15:57:24,636 Epoch 451/2000 +2024-11-11 15:57:40,688 Current Learning Rate: 0.0084795640 +2024-11-11 15:57:41,618 Train Loss: 0.0021739, Val Loss: 0.0020811 +2024-11-11 15:57:41,618 Epoch 452/2000 +2024-11-11 15:57:57,554 Current Learning Rate: 0.0084227355 +2024-11-11 15:57:58,361 Train Loss: 0.0020215, Val Loss: 0.0020513 +2024-11-11 15:57:58,361 Epoch 453/2000 +2024-11-11 15:58:13,493 Current Learning Rate: 0.0083650626 +2024-11-11 15:58:13,494 Train Loss: 0.0021197, Val Loss: 0.0020636 +2024-11-11 15:58:13,494 Epoch 454/2000 +2024-11-11 15:58:29,027 Current Learning Rate: 0.0083065593 +2024-11-11 15:58:29,027 Train Loss: 0.0020417, Val Loss: 0.0020677 +2024-11-11 15:58:29,028 Epoch 455/2000 +2024-11-11 15:58:44,414 Current Learning Rate: 0.0082472402 +2024-11-11 15:58:44,415 Train Loss: 0.0020983, Val Loss: 0.0021948 +2024-11-11 15:58:44,415 Epoch 456/2000 +2024-11-11 15:59:00,094 Current Learning Rate: 0.0081871199 +2024-11-11 15:59:00,095 Train Loss: 0.0019864, Val Loss: 0.0020734 +2024-11-11 15:59:00,095 Epoch 457/2000 +2024-11-11 15:59:15,407 Current Learning Rate: 0.0081262133 +2024-11-11 15:59:15,407 Train Loss: 0.0020466, Val Loss: 0.0020746 +2024-11-11 15:59:15,407 Epoch 458/2000 +2024-11-11 15:59:30,863 Current Learning Rate: 0.0080645353 +2024-11-11 15:59:30,864 Train Loss: 0.0020444, Val Loss: 0.0021021 +2024-11-11 15:59:30,864 Epoch 459/2000 +2024-11-11 15:59:46,313 Current Learning Rate: 0.0080021011 +2024-11-11 15:59:46,313 Train Loss: 0.0021171, Val Loss: 0.0024517 +2024-11-11 15:59:46,313 Epoch 460/2000 +2024-11-11 16:00:01,747 Current Learning Rate: 0.0079389263 +2024-11-11 16:00:04,353 Train Loss: 0.0019752, Val Loss: 0.0020446 +2024-11-11 16:00:04,354 Epoch 461/2000 +2024-11-11 16:00:18,705 Current Learning Rate: 0.0078750263 +2024-11-11 16:00:19,504 Train Loss: 0.0020459, Val Loss: 0.0020055 +2024-11-11 16:00:19,505 Epoch 462/2000 +2024-11-11 16:00:34,124 Current Learning Rate: 0.0078104169 +2024-11-11 16:00:34,913 Train Loss: 0.0021494, Val Loss: 0.0020054 +2024-11-11 16:00:34,913 Epoch 463/2000 +2024-11-11 16:00:49,508 Current Learning Rate: 0.0077451141 +2024-11-11 16:00:50,383 Train Loss: 0.0018689, Val Loss: 0.0019831 +2024-11-11 16:00:50,383 Epoch 464/2000 +2024-11-11 16:01:05,465 Current Learning Rate: 0.0076791340 +2024-11-11 16:01:06,264 Train Loss: 0.0019285, Val Loss: 0.0019549 +2024-11-11 16:01:06,264 Epoch 465/2000 +2024-11-11 16:01:21,254 Current Learning Rate: 0.0076124928 +2024-11-11 16:01:21,254 Train Loss: 0.0019481, Val Loss: 0.0021419 +2024-11-11 16:01:21,254 Epoch 466/2000 +2024-11-11 16:01:36,962 Current Learning Rate: 0.0075452071 +2024-11-11 16:01:36,963 Train Loss: 0.0019473, Val Loss: 0.0019738 +2024-11-11 16:01:36,963 Epoch 467/2000 +2024-11-11 16:01:52,832 Current Learning Rate: 0.0074772933 +2024-11-11 16:01:52,833 Train Loss: 0.0017876, Val Loss: 0.0019702 +2024-11-11 16:01:52,833 Epoch 468/2000 +2024-11-11 16:02:09,078 Current Learning Rate: 0.0074087684 +2024-11-11 16:02:09,079 Train Loss: 0.0018833, Val Loss: 0.0020495 +2024-11-11 16:02:09,079 Epoch 469/2000 +2024-11-11 16:02:25,453 Current Learning Rate: 0.0073396491 +2024-11-11 16:02:26,538 Train Loss: 0.0019062, Val Loss: 0.0019353 +2024-11-11 16:02:26,539 Epoch 470/2000 +2024-11-11 16:02:42,158 Current Learning Rate: 0.0072699525 +2024-11-11 16:02:42,158 Train Loss: 0.0019746, Val Loss: 0.0020509 +2024-11-11 16:02:42,159 Epoch 471/2000 +2024-11-11 16:02:58,381 Current Learning Rate: 0.0071996958 +2024-11-11 16:02:59,207 Train Loss: 0.0021009, Val Loss: 0.0019330 +2024-11-11 16:02:59,208 Epoch 472/2000 +2024-11-11 16:03:14,271 Current Learning Rate: 0.0071288965 +2024-11-11 16:03:14,272 Train Loss: 0.0019569, Val Loss: 0.0019589 +2024-11-11 16:03:14,273 Epoch 473/2000 +2024-11-11 16:03:31,044 Current Learning Rate: 0.0070575718 +2024-11-11 16:03:31,044 Train Loss: 0.0019271, Val Loss: 0.0019746 +2024-11-11 16:03:31,045 Epoch 474/2000 +2024-11-11 16:03:46,930 Current Learning Rate: 0.0069857395 +2024-11-11 16:03:47,804 Train Loss: 0.0018458, Val Loss: 0.0019274 +2024-11-11 16:03:47,804 Epoch 475/2000 +2024-11-11 16:04:03,052 Current Learning Rate: 0.0069134172 +2024-11-11 16:04:04,410 Train Loss: 0.0018403, Val Loss: 0.0018845 +2024-11-11 16:04:04,410 Epoch 476/2000 +2024-11-11 16:04:19,964 Current Learning Rate: 0.0068406228 +2024-11-11 16:04:19,965 Train Loss: 0.0017707, Val Loss: 0.0018862 +2024-11-11 16:04:19,965 Epoch 477/2000 +2024-11-11 16:04:35,230 Current Learning Rate: 0.0067673742 +2024-11-11 16:04:36,004 Train Loss: 0.0017440, Val Loss: 0.0018785 +2024-11-11 16:04:36,004 Epoch 478/2000 +2024-11-11 16:04:51,096 Current Learning Rate: 0.0066936896 +2024-11-11 16:04:51,097 Train Loss: 0.0019223, Val Loss: 0.0019294 +2024-11-11 16:04:51,098 Epoch 479/2000 +2024-11-11 16:05:06,516 Current Learning Rate: 0.0066195871 +2024-11-11 16:05:07,498 Train Loss: 0.0018153, Val Loss: 0.0018524 +2024-11-11 16:05:07,498 Epoch 480/2000 +2024-11-11 16:05:23,821 Current Learning Rate: 0.0065450850 +2024-11-11 16:05:23,822 Train Loss: 0.0018126, Val Loss: 0.0018859 +2024-11-11 16:05:23,822 Epoch 481/2000 +2024-11-11 16:05:40,373 Current Learning Rate: 0.0064702016 +2024-11-11 16:05:40,373 Train Loss: 0.0017999, Val Loss: 0.0018556 +2024-11-11 16:05:40,373 Epoch 482/2000 +2024-11-11 16:05:56,273 Current Learning Rate: 0.0063949555 +2024-11-11 16:05:57,308 Train Loss: 0.0016655, Val Loss: 0.0018388 +2024-11-11 16:05:57,308 Epoch 483/2000 +2024-11-11 16:06:12,362 Current Learning Rate: 0.0063193652 +2024-11-11 16:06:12,363 Train Loss: 0.0019307, Val Loss: 0.0019162 +2024-11-11 16:06:12,363 Epoch 484/2000 +2024-11-11 16:06:27,663 Current Learning Rate: 0.0062434494 +2024-11-11 16:06:28,418 Train Loss: 0.0017061, Val Loss: 0.0018284 +2024-11-11 16:06:28,419 Epoch 485/2000 +2024-11-11 16:06:43,503 Current Learning Rate: 0.0061672268 +2024-11-11 16:06:43,504 Train Loss: 0.0017286, Val Loss: 0.0018455 +2024-11-11 16:06:43,504 Epoch 486/2000 +2024-11-11 16:06:59,204 Current Learning Rate: 0.0060907162 +2024-11-11 16:06:59,205 Train Loss: 0.0019324, Val Loss: 0.0019866 +2024-11-11 16:06:59,205 Epoch 487/2000 +2024-11-11 16:07:14,745 Current Learning Rate: 0.0060139365 +2024-11-11 16:07:15,568 Train Loss: 0.0017491, Val Loss: 0.0018149 +2024-11-11 16:07:15,568 Epoch 488/2000 +2024-11-11 16:07:29,955 Current Learning Rate: 0.0059369066 +2024-11-11 16:07:29,957 Train Loss: 0.0019003, Val Loss: 0.0019061 +2024-11-11 16:07:29,957 Epoch 489/2000 +2024-11-11 16:07:45,685 Current Learning Rate: 0.0058596455 +2024-11-11 16:07:45,685 Train Loss: 0.0018472, Val Loss: 0.0018604 +2024-11-11 16:07:45,685 Epoch 490/2000 +2024-11-11 16:08:01,170 Current Learning Rate: 0.0057821723 +2024-11-11 16:08:01,867 Train Loss: 0.0017701, Val Loss: 0.0017785 +2024-11-11 16:08:01,868 Epoch 491/2000 +2024-11-11 16:08:16,907 Current Learning Rate: 0.0057045062 +2024-11-11 16:08:16,908 Train Loss: 0.0018862, Val Loss: 0.0018500 +2024-11-11 16:08:16,908 Epoch 492/2000 +2024-11-11 16:08:32,207 Current Learning Rate: 0.0056266662 +2024-11-11 16:08:32,207 Train Loss: 0.0019619, Val Loss: 0.0018138 +2024-11-11 16:08:32,207 Epoch 493/2000 +2024-11-11 16:08:47,512 Current Learning Rate: 0.0055486716 +2024-11-11 16:08:47,513 Train Loss: 0.0018586, Val Loss: 0.0018043 +2024-11-11 16:08:47,513 Epoch 494/2000 +2024-11-11 16:09:03,379 Current Learning Rate: 0.0054705416 +2024-11-11 16:09:03,379 Train Loss: 0.0018664, Val Loss: 0.0018559 +2024-11-11 16:09:03,380 Epoch 495/2000 +2024-11-11 16:09:19,344 Current Learning Rate: 0.0053922955 +2024-11-11 16:09:20,380 Train Loss: 0.0019069, Val Loss: 0.0017716 +2024-11-11 16:09:20,380 Epoch 496/2000 +2024-11-11 16:09:36,318 Current Learning Rate: 0.0053139526 +2024-11-11 16:09:37,269 Train Loss: 0.0015875, Val Loss: 0.0017484 +2024-11-11 16:09:37,269 Epoch 497/2000 +2024-11-11 16:09:52,623 Current Learning Rate: 0.0052355323 +2024-11-11 16:09:52,624 Train Loss: 0.0017934, Val Loss: 0.0018129 +2024-11-11 16:09:52,624 Epoch 498/2000 +2024-11-11 16:10:08,125 Current Learning Rate: 0.0051570538 +2024-11-11 16:10:08,125 Train Loss: 0.0017281, Val Loss: 0.0017944 +2024-11-11 16:10:08,125 Epoch 499/2000 +2024-11-11 16:10:23,982 Current Learning Rate: 0.0050785366 +2024-11-11 16:10:23,982 Train Loss: 0.0017292, Val Loss: 0.0017568 +2024-11-11 16:10:23,983 Epoch 500/2000 +2024-11-11 16:10:40,212 Current Learning Rate: 0.0050000000 +2024-11-11 16:10:41,256 Train Loss: 0.0015728, Val Loss: 0.0017277 +2024-11-11 16:10:41,256 Epoch 501/2000 +2024-11-11 16:10:57,505 Current Learning Rate: 0.0049214634 +2024-11-11 16:10:57,506 Train Loss: 0.0016176, Val Loss: 0.0017323 +2024-11-11 16:10:57,507 Epoch 502/2000 +2024-11-11 16:11:13,288 Current Learning Rate: 0.0048429462 +2024-11-11 16:11:13,289 Train Loss: 0.0016708, Val Loss: 0.0017368 +2024-11-11 16:11:13,289 Epoch 503/2000 +2024-11-11 16:11:28,709 Current Learning Rate: 0.0047644677 +2024-11-11 16:11:29,408 Train Loss: 0.0017153, Val Loss: 0.0017276 +2024-11-11 16:11:29,408 Epoch 504/2000 +2024-11-11 16:11:43,653 Current Learning Rate: 0.0046860474 +2024-11-11 16:11:44,407 Train Loss: 0.0016124, Val Loss: 0.0017100 +2024-11-11 16:11:44,408 Epoch 505/2000 +2024-11-11 16:11:59,632 Current Learning Rate: 0.0046077045 +2024-11-11 16:12:00,691 Train Loss: 0.0015919, Val Loss: 0.0017059 +2024-11-11 16:12:00,692 Epoch 506/2000 +2024-11-11 16:12:15,848 Current Learning Rate: 0.0045294584 +2024-11-11 16:12:15,848 Train Loss: 0.0017435, Val Loss: 0.0017152 +2024-11-11 16:12:15,849 Epoch 507/2000 +2024-11-11 16:12:31,233 Current Learning Rate: 0.0044513284 +2024-11-11 16:12:31,233 Train Loss: 0.0015542, Val Loss: 0.0017474 +2024-11-11 16:12:31,233 Epoch 508/2000 +2024-11-11 16:12:47,634 Current Learning Rate: 0.0043733338 +2024-11-11 16:12:47,634 Train Loss: 0.0017828, Val Loss: 0.0017657 +2024-11-11 16:12:47,634 Epoch 509/2000 +2024-11-11 16:13:03,377 Current Learning Rate: 0.0042954938 +2024-11-11 16:13:05,795 Train Loss: 0.0016038, Val Loss: 0.0016854 +2024-11-11 16:13:05,795 Epoch 510/2000 +2024-11-11 16:13:20,775 Current Learning Rate: 0.0042178277 +2024-11-11 16:13:21,841 Train Loss: 0.0016788, Val Loss: 0.0016779 +2024-11-11 16:13:21,842 Epoch 511/2000 +2024-11-11 16:13:37,672 Current Learning Rate: 0.0041403545 +2024-11-11 16:13:38,760 Train Loss: 0.0016675, Val Loss: 0.0016651 +2024-11-11 16:13:38,760 Epoch 512/2000 +2024-11-11 16:13:54,901 Current Learning Rate: 0.0040630934 +2024-11-11 16:13:54,902 Train Loss: 0.0017742, Val Loss: 0.0016794 +2024-11-11 16:13:54,902 Epoch 513/2000 +2024-11-11 16:14:10,716 Current Learning Rate: 0.0039860635 +2024-11-11 16:14:11,598 Train Loss: 0.0015026, Val Loss: 0.0016478 +2024-11-11 16:14:11,598 Epoch 514/2000 +2024-11-11 16:14:26,196 Current Learning Rate: 0.0039092838 +2024-11-11 16:14:26,197 Train Loss: 0.0016885, Val Loss: 0.0017014 +2024-11-11 16:14:26,197 Epoch 515/2000 +2024-11-11 16:14:41,627 Current Learning Rate: 0.0038327732 +2024-11-11 16:14:41,627 Train Loss: 0.0017836, Val Loss: 0.0018343 +2024-11-11 16:14:41,627 Epoch 516/2000 +2024-11-11 16:14:57,435 Current Learning Rate: 0.0037565506 +2024-11-11 16:14:57,435 Train Loss: 0.0015580, Val Loss: 0.0016799 +2024-11-11 16:14:57,435 Epoch 517/2000 +2024-11-11 16:15:12,908 Current Learning Rate: 0.0036806348 +2024-11-11 16:15:12,908 Train Loss: 0.0015340, Val Loss: 0.0016495 +2024-11-11 16:15:12,908 Epoch 518/2000 +2024-11-11 16:15:28,391 Current Learning Rate: 0.0036050445 +2024-11-11 16:15:28,392 Train Loss: 0.0015838, Val Loss: 0.0016501 +2024-11-11 16:15:28,392 Epoch 519/2000 +2024-11-11 16:15:44,020 Current Learning Rate: 0.0035297984 +2024-11-11 16:15:44,787 Train Loss: 0.0015158, Val Loss: 0.0016460 +2024-11-11 16:15:44,788 Epoch 520/2000 +2024-11-11 16:15:59,636 Current Learning Rate: 0.0034549150 +2024-11-11 16:15:59,637 Train Loss: 0.0016864, Val Loss: 0.0016732 +2024-11-11 16:15:59,637 Epoch 521/2000 +2024-11-11 16:16:16,119 Current Learning Rate: 0.0033804129 +2024-11-11 16:16:16,119 Train Loss: 0.0015563, Val Loss: 0.0016919 +2024-11-11 16:16:16,120 Epoch 522/2000 +2024-11-11 16:16:31,722 Current Learning Rate: 0.0033063104 +2024-11-11 16:16:31,722 Train Loss: 0.0015584, Val Loss: 0.0016514 +2024-11-11 16:16:31,722 Epoch 523/2000 +2024-11-11 16:16:48,232 Current Learning Rate: 0.0032326258 +2024-11-11 16:16:48,233 Train Loss: 0.0014985, Val Loss: 0.0016575 +2024-11-11 16:16:48,233 Epoch 524/2000 +2024-11-11 16:17:04,498 Current Learning Rate: 0.0031593772 +2024-11-11 16:17:04,498 Train Loss: 0.0015001, Val Loss: 0.0016723 +2024-11-11 16:17:04,498 Epoch 525/2000 +2024-11-11 16:17:19,873 Current Learning Rate: 0.0030865828 +2024-11-11 16:17:20,731 Train Loss: 0.0015969, Val Loss: 0.0016347 +2024-11-11 16:17:20,731 Epoch 526/2000 +2024-11-11 16:17:35,853 Current Learning Rate: 0.0030142605 +2024-11-11 16:17:36,882 Train Loss: 0.0015379, Val Loss: 0.0016232 +2024-11-11 16:17:36,883 Epoch 527/2000 +2024-11-11 16:17:52,338 Current Learning Rate: 0.0029424282 +2024-11-11 16:17:53,220 Train Loss: 0.0014947, Val Loss: 0.0016079 +2024-11-11 16:17:53,220 Epoch 528/2000 +2024-11-11 16:18:08,765 Current Learning Rate: 0.0028711035 +2024-11-11 16:18:08,767 Train Loss: 0.0017007, Val Loss: 0.0016654 +2024-11-11 16:18:08,767 Epoch 529/2000 +2024-11-11 16:18:25,173 Current Learning Rate: 0.0028003042 +2024-11-11 16:18:25,174 Train Loss: 0.0015099, Val Loss: 0.0016081 +2024-11-11 16:18:25,174 Epoch 530/2000 +2024-11-11 16:18:40,740 Current Learning Rate: 0.0027300475 +2024-11-11 16:18:41,493 Train Loss: 0.0015892, Val Loss: 0.0015988 +2024-11-11 16:18:41,493 Epoch 531/2000 +2024-11-11 16:18:56,898 Current Learning Rate: 0.0026603509 +2024-11-11 16:18:57,683 Train Loss: 0.0014729, Val Loss: 0.0015968 +2024-11-11 16:18:57,684 Epoch 532/2000 +2024-11-11 16:19:13,202 Current Learning Rate: 0.0025912316 +2024-11-11 16:19:13,203 Train Loss: 0.0015314, Val Loss: 0.0015976 +2024-11-11 16:19:13,203 Epoch 533/2000 +2024-11-11 16:19:29,360 Current Learning Rate: 0.0025227067 +2024-11-11 16:19:30,177 Train Loss: 0.0014599, Val Loss: 0.0015746 +2024-11-11 16:19:30,177 Epoch 534/2000 +2024-11-11 16:19:45,423 Current Learning Rate: 0.0024547929 +2024-11-11 16:19:45,424 Train Loss: 0.0015706, Val Loss: 0.0015865 +2024-11-11 16:19:45,424 Epoch 535/2000 +2024-11-11 16:20:00,908 Current Learning Rate: 0.0023875072 +2024-11-11 16:20:00,909 Train Loss: 0.0015245, Val Loss: 0.0015782 +2024-11-11 16:20:00,909 Epoch 536/2000 +2024-11-11 16:20:16,538 Current Learning Rate: 0.0023208660 +2024-11-11 16:20:17,281 Train Loss: 0.0014032, Val Loss: 0.0015686 +2024-11-11 16:20:17,281 Epoch 537/2000 +2024-11-11 16:20:32,061 Current Learning Rate: 0.0022548859 +2024-11-11 16:20:32,062 Train Loss: 0.0015010, Val Loss: 0.0015706 +2024-11-11 16:20:32,062 Epoch 538/2000 +2024-11-11 16:20:47,831 Current Learning Rate: 0.0021895831 +2024-11-11 16:20:47,832 Train Loss: 0.0014061, Val Loss: 0.0015709 +2024-11-11 16:20:47,832 Epoch 539/2000 +2024-11-11 16:21:04,394 Current Learning Rate: 0.0021249737 +2024-11-11 16:21:04,394 Train Loss: 0.0014642, Val Loss: 0.0015716 +2024-11-11 16:21:04,395 Epoch 540/2000 +2024-11-11 16:21:20,644 Current Learning Rate: 0.0020610737 +2024-11-11 16:21:20,644 Train Loss: 0.0015021, Val Loss: 0.0016078 +2024-11-11 16:21:20,645 Epoch 541/2000 +2024-11-11 16:21:36,011 Current Learning Rate: 0.0019978989 +2024-11-11 16:21:36,011 Train Loss: 0.0014069, Val Loss: 0.0015928 +2024-11-11 16:21:36,012 Epoch 542/2000 +2024-11-11 16:21:52,204 Current Learning Rate: 0.0019354647 +2024-11-11 16:21:52,936 Train Loss: 0.0015883, Val Loss: 0.0015588 +2024-11-11 16:21:52,936 Epoch 543/2000 +2024-11-11 16:22:07,769 Current Learning Rate: 0.0018737867 +2024-11-11 16:22:07,770 Train Loss: 0.0014828, Val Loss: 0.0015647 +2024-11-11 16:22:07,770 Epoch 544/2000 +2024-11-11 16:22:24,511 Current Learning Rate: 0.0018128801 +2024-11-11 16:22:24,511 Train Loss: 0.0014293, Val Loss: 0.0015666 +2024-11-11 16:22:24,511 Epoch 545/2000 +2024-11-11 16:22:40,551 Current Learning Rate: 0.0017527598 +2024-11-11 16:22:41,294 Train Loss: 0.0013610, Val Loss: 0.0015508 +2024-11-11 16:22:41,294 Epoch 546/2000 +2024-11-11 16:22:57,430 Current Learning Rate: 0.0016934407 +2024-11-11 16:22:58,195 Train Loss: 0.0016783, Val Loss: 0.0015387 +2024-11-11 16:22:58,195 Epoch 547/2000 +2024-11-11 16:23:13,081 Current Learning Rate: 0.0016349374 +2024-11-11 16:23:13,082 Train Loss: 0.0014395, Val Loss: 0.0015399 +2024-11-11 16:23:13,082 Epoch 548/2000 +2024-11-11 16:23:29,144 Current Learning Rate: 0.0015772645 +2024-11-11 16:23:29,144 Train Loss: 0.0015553, Val Loss: 0.0015414 +2024-11-11 16:23:29,144 Epoch 549/2000 +2024-11-11 16:23:45,321 Current Learning Rate: 0.0015204360 +2024-11-11 16:23:46,052 Train Loss: 0.0015024, Val Loss: 0.0015371 +2024-11-11 16:23:46,052 Epoch 550/2000 +2024-11-11 16:24:01,416 Current Learning Rate: 0.0014644661 +2024-11-11 16:24:05,218 Train Loss: 0.0014474, Val Loss: 0.0015369 +2024-11-11 16:24:05,219 Epoch 551/2000 +2024-11-11 16:24:19,288 Current Learning Rate: 0.0014093685 +2024-11-11 16:24:19,289 Train Loss: 0.0013989, Val Loss: 0.0015375 +2024-11-11 16:24:19,290 Epoch 552/2000 +2024-11-11 16:24:34,434 Current Learning Rate: 0.0013551569 +2024-11-11 16:24:35,144 Train Loss: 0.0014447, Val Loss: 0.0015220 +2024-11-11 16:24:35,145 Epoch 553/2000 +2024-11-11 16:24:49,936 Current Learning Rate: 0.0013018445 +2024-11-11 16:24:50,707 Train Loss: 0.0014534, Val Loss: 0.0015164 +2024-11-11 16:24:50,707 Epoch 554/2000 +2024-11-11 16:25:05,563 Current Learning Rate: 0.0012494447 +2024-11-11 16:25:06,451 Train Loss: 0.0013869, Val Loss: 0.0015128 +2024-11-11 16:25:06,451 Epoch 555/2000 +2024-11-11 16:25:21,363 Current Learning Rate: 0.0011979702 +2024-11-11 16:25:22,177 Train Loss: 0.0013326, Val Loss: 0.0015067 +2024-11-11 16:25:22,177 Epoch 556/2000 +2024-11-11 16:25:37,131 Current Learning Rate: 0.0011474338 +2024-11-11 16:25:37,132 Train Loss: 0.0013669, Val Loss: 0.0015067 +2024-11-11 16:25:37,132 Epoch 557/2000 +2024-11-11 16:25:52,878 Current Learning Rate: 0.0010978480 +2024-11-11 16:25:53,583 Train Loss: 0.0013728, Val Loss: 0.0015037 +2024-11-11 16:25:53,583 Epoch 558/2000 +2024-11-11 16:26:08,387 Current Learning Rate: 0.0010492249 +2024-11-11 16:26:09,163 Train Loss: 0.0013686, Val Loss: 0.0015006 +2024-11-11 16:26:09,163 Epoch 559/2000 +2024-11-11 16:26:25,106 Current Learning Rate: 0.0010015767 +2024-11-11 16:26:26,082 Train Loss: 0.0014711, Val Loss: 0.0014985 +2024-11-11 16:26:26,082 Epoch 560/2000 +2024-11-11 16:26:41,589 Current Learning Rate: 0.0009549150 +2024-11-11 16:26:42,643 Train Loss: 0.0015281, Val Loss: 0.0014979 +2024-11-11 16:26:42,644 Epoch 561/2000 +2024-11-11 16:26:58,540 Current Learning Rate: 0.0009092514 +2024-11-11 16:26:59,323 Train Loss: 0.0013669, Val Loss: 0.0014935 +2024-11-11 16:26:59,323 Epoch 562/2000 +2024-11-11 16:27:14,382 Current Learning Rate: 0.0008645971 +2024-11-11 16:27:15,265 Train Loss: 0.0015915, Val Loss: 0.0014933 +2024-11-11 16:27:15,266 Epoch 563/2000 +2024-11-11 16:27:30,403 Current Learning Rate: 0.0008209632 +2024-11-11 16:27:31,160 Train Loss: 0.0014926, Val Loss: 0.0014908 +2024-11-11 16:27:31,160 Epoch 564/2000 +2024-11-11 16:27:45,602 Current Learning Rate: 0.0007783604 +2024-11-11 16:27:46,381 Train Loss: 0.0013564, Val Loss: 0.0014878 +2024-11-11 16:27:46,381 Epoch 565/2000 +2024-11-11 16:28:01,003 Current Learning Rate: 0.0007367992 +2024-11-11 16:28:01,776 Train Loss: 0.0015443, Val Loss: 0.0014873 +2024-11-11 16:28:01,776 Epoch 566/2000 +2024-11-11 16:28:17,254 Current Learning Rate: 0.0006962899 +2024-11-11 16:28:17,979 Train Loss: 0.0014868, Val Loss: 0.0014851 +2024-11-11 16:28:17,980 Epoch 567/2000 +2024-11-11 16:28:32,957 Current Learning Rate: 0.0006568424 +2024-11-11 16:28:33,715 Train Loss: 0.0013553, Val Loss: 0.0014838 +2024-11-11 16:28:33,715 Epoch 568/2000 +2024-11-11 16:28:49,021 Current Learning Rate: 0.0006184666 +2024-11-11 16:28:49,021 Train Loss: 0.0015204, Val Loss: 0.0014845 +2024-11-11 16:28:49,022 Epoch 569/2000 +2024-11-11 16:29:06,017 Current Learning Rate: 0.0005811718 +2024-11-11 16:29:07,093 Train Loss: 0.0014230, Val Loss: 0.0014805 +2024-11-11 16:29:07,094 Epoch 570/2000 +2024-11-11 16:29:21,762 Current Learning Rate: 0.0005449674 +2024-11-11 16:29:22,454 Train Loss: 0.0013558, Val Loss: 0.0014793 +2024-11-11 16:29:22,454 Epoch 571/2000 +2024-11-11 16:29:38,196 Current Learning Rate: 0.0005098621 +2024-11-11 16:29:38,978 Train Loss: 0.0013530, Val Loss: 0.0014775 +2024-11-11 16:29:38,978 Epoch 572/2000 +2024-11-11 16:29:54,993 Current Learning Rate: 0.0004758647 +2024-11-11 16:29:55,770 Train Loss: 0.0013935, Val Loss: 0.0014764 +2024-11-11 16:29:55,770 Epoch 573/2000 +2024-11-11 16:30:10,327 Current Learning Rate: 0.0004429836 +2024-11-11 16:30:11,045 Train Loss: 0.0014663, Val Loss: 0.0014754 +2024-11-11 16:30:11,045 Epoch 574/2000 +2024-11-11 16:30:26,011 Current Learning Rate: 0.0004112269 +2024-11-11 16:30:26,699 Train Loss: 0.0013592, Val Loss: 0.0014747 +2024-11-11 16:30:26,699 Epoch 575/2000 +2024-11-11 16:30:41,293 Current Learning Rate: 0.0003806023 +2024-11-11 16:30:42,035 Train Loss: 0.0013511, Val Loss: 0.0014742 +2024-11-11 16:30:42,035 Epoch 576/2000 +2024-11-11 16:30:57,009 Current Learning Rate: 0.0003511176 +2024-11-11 16:30:57,711 Train Loss: 0.0014025, Val Loss: 0.0014733 +2024-11-11 16:30:57,711 Epoch 577/2000 +2024-11-11 16:31:12,475 Current Learning Rate: 0.0003227798 +2024-11-11 16:31:13,269 Train Loss: 0.0014400, Val Loss: 0.0014729 +2024-11-11 16:31:13,270 Epoch 578/2000 +2024-11-11 16:31:29,072 Current Learning Rate: 0.0002955962 +2024-11-11 16:31:29,903 Train Loss: 0.0014251, Val Loss: 0.0014718 +2024-11-11 16:31:29,904 Epoch 579/2000 +2024-11-11 16:31:45,246 Current Learning Rate: 0.0002695732 +2024-11-11 16:31:46,282 Train Loss: 0.0015810, Val Loss: 0.0014708 +2024-11-11 16:31:46,283 Epoch 580/2000 +2024-11-11 16:32:01,929 Current Learning Rate: 0.0002447174 +2024-11-11 16:32:05,203 Train Loss: 0.0013351, Val Loss: 0.0014699 +2024-11-11 16:32:05,203 Epoch 581/2000 +2024-11-11 16:32:20,290 Current Learning Rate: 0.0002210349 +2024-11-11 16:32:20,291 Train Loss: 0.0014548, Val Loss: 0.0014701 +2024-11-11 16:32:20,291 Epoch 582/2000 +2024-11-11 16:32:35,617 Current Learning Rate: 0.0001985316 +2024-11-11 16:32:36,381 Train Loss: 0.0013962, Val Loss: 0.0014694 +2024-11-11 16:32:36,381 Epoch 583/2000 +2024-11-11 16:32:51,372 Current Learning Rate: 0.0001772129 +2024-11-11 16:32:51,372 Train Loss: 0.0015118, Val Loss: 0.0014702 +2024-11-11 16:32:51,373 Epoch 584/2000 +2024-11-11 16:33:07,100 Current Learning Rate: 0.0001570842 +2024-11-11 16:33:07,101 Train Loss: 0.0015893, Val Loss: 0.0014707 +2024-11-11 16:33:07,101 Epoch 585/2000 +2024-11-11 16:33:22,612 Current Learning Rate: 0.0001381504 +2024-11-11 16:33:22,612 Train Loss: 0.0013931, Val Loss: 0.0014706 +2024-11-11 16:33:22,612 Epoch 586/2000 +2024-11-11 16:33:38,755 Current Learning Rate: 0.0001204162 +2024-11-11 16:33:39,479 Train Loss: 0.0013333, Val Loss: 0.0014691 +2024-11-11 16:33:39,480 Epoch 587/2000 +2024-11-11 16:33:54,944 Current Learning Rate: 0.0001038859 +2024-11-11 16:33:55,726 Train Loss: 0.0014591, Val Loss: 0.0014679 +2024-11-11 16:33:55,727 Epoch 588/2000 +2024-11-11 16:34:10,394 Current Learning Rate: 0.0000885637 +2024-11-11 16:34:11,158 Train Loss: 0.0012854, Val Loss: 0.0014673 +2024-11-11 16:34:11,158 Epoch 589/2000 +2024-11-11 16:34:26,034 Current Learning Rate: 0.0000744534 +2024-11-11 16:34:26,780 Train Loss: 0.0013903, Val Loss: 0.0014670 +2024-11-11 16:34:26,781 Epoch 590/2000 +2024-11-11 16:34:41,404 Current Learning Rate: 0.0000615583 +2024-11-11 16:34:42,162 Train Loss: 0.0015155, Val Loss: 0.0014668 +2024-11-11 16:34:42,163 Epoch 591/2000 +2024-11-11 16:34:56,787 Current Learning Rate: 0.0000498817 +2024-11-11 16:34:57,567 Train Loss: 0.0014036, Val Loss: 0.0014665 +2024-11-11 16:34:57,568 Epoch 592/2000 +2024-11-11 16:35:11,852 Current Learning Rate: 0.0000394265 +2024-11-11 16:35:11,853 Train Loss: 0.0014947, Val Loss: 0.0014666 +2024-11-11 16:35:11,853 Epoch 593/2000 +2024-11-11 16:35:26,980 Current Learning Rate: 0.0000301952 +2024-11-11 16:35:27,731 Train Loss: 0.0013346, Val Loss: 0.0014663 +2024-11-11 16:35:27,731 Epoch 594/2000 +2024-11-11 16:35:42,223 Current Learning Rate: 0.0000221902 +2024-11-11 16:35:42,927 Train Loss: 0.0014882, Val Loss: 0.0014663 +2024-11-11 16:35:42,927 Epoch 595/2000 +2024-11-11 16:35:58,503 Current Learning Rate: 0.0000154133 +2024-11-11 16:35:59,237 Train Loss: 0.0014868, Val Loss: 0.0014662 +2024-11-11 16:35:59,237 Epoch 596/2000 +2024-11-11 16:36:15,399 Current Learning Rate: 0.0000098664 +2024-11-11 16:36:16,405 Train Loss: 0.0013868, Val Loss: 0.0014660 +2024-11-11 16:36:16,405 Epoch 597/2000 +2024-11-11 16:36:31,701 Current Learning Rate: 0.0000055506 +2024-11-11 16:36:31,702 Train Loss: 0.0013281, Val Loss: 0.0014660 +2024-11-11 16:36:31,702 Epoch 598/2000 +2024-11-11 16:36:47,029 Current Learning Rate: 0.0000024672 +2024-11-11 16:36:47,030 Train Loss: 0.0013248, Val Loss: 0.0014660 +2024-11-11 16:36:47,030 Epoch 599/2000 +2024-11-11 16:37:03,169 Current Learning Rate: 0.0000006168 +2024-11-11 16:37:05,082 Train Loss: 0.0013347, Val Loss: 0.0014660 +2024-11-11 16:37:05,083 Epoch 600/2000 +2024-11-11 16:37:19,616 Current Learning Rate: 0.0000000000 +2024-11-11 16:37:19,617 Train Loss: 0.0013388, Val Loss: 0.0014660 +2024-11-11 16:37:19,617 Epoch 601/2000 +2024-11-11 16:37:35,108 Current Learning Rate: 0.0000006168 +2024-11-11 16:37:35,996 Train Loss: 0.0015185, Val Loss: 0.0014659 +2024-11-11 16:37:35,997 Epoch 602/2000 +2024-11-11 16:37:51,057 Current Learning Rate: 0.0000024672 +2024-11-11 16:37:51,830 Train Loss: 0.0014399, Val Loss: 0.0014658 +2024-11-11 16:37:51,830 Epoch 603/2000 +2024-11-11 16:38:07,352 Current Learning Rate: 0.0000055506 +2024-11-11 16:38:07,353 Train Loss: 0.0012863, Val Loss: 0.0014660 +2024-11-11 16:38:07,353 Epoch 604/2000 +2024-11-11 16:38:23,196 Current Learning Rate: 0.0000098664 +2024-11-11 16:38:23,196 Train Loss: 0.0013989, Val Loss: 0.0014660 +2024-11-11 16:38:23,197 Epoch 605/2000 +2024-11-11 16:38:38,614 Current Learning Rate: 0.0000154133 +2024-11-11 16:38:38,614 Train Loss: 0.0013243, Val Loss: 0.0014660 +2024-11-11 16:38:38,614 Epoch 606/2000 +2024-11-11 16:38:53,839 Current Learning Rate: 0.0000221902 +2024-11-11 16:38:53,840 Train Loss: 0.0013791, Val Loss: 0.0014659 +2024-11-11 16:38:53,840 Epoch 607/2000 +2024-11-11 16:39:09,180 Current Learning Rate: 0.0000301952 +2024-11-11 16:39:09,181 Train Loss: 0.0012837, Val Loss: 0.0014660 +2024-11-11 16:39:09,181 Epoch 608/2000 +2024-11-11 16:39:25,202 Current Learning Rate: 0.0000394265 +2024-11-11 16:39:25,202 Train Loss: 0.0013361, Val Loss: 0.0014661 +2024-11-11 16:39:25,202 Epoch 609/2000 +2024-11-11 16:39:40,942 Current Learning Rate: 0.0000498817 +2024-11-11 16:39:40,943 Train Loss: 0.0014310, Val Loss: 0.0014659 +2024-11-11 16:39:40,943 Epoch 610/2000 +2024-11-11 16:39:56,618 Current Learning Rate: 0.0000615583 +2024-11-11 16:39:56,618 Train Loss: 0.0013358, Val Loss: 0.0014660 +2024-11-11 16:39:56,618 Epoch 611/2000 +2024-11-11 16:40:12,312 Current Learning Rate: 0.0000744534 +2024-11-11 16:40:13,033 Train Loss: 0.0014237, Val Loss: 0.0014657 +2024-11-11 16:40:13,033 Epoch 612/2000 +2024-11-11 16:40:27,999 Current Learning Rate: 0.0000885637 +2024-11-11 16:40:28,000 Train Loss: 0.0014800, Val Loss: 0.0014659 +2024-11-11 16:40:28,000 Epoch 613/2000 +2024-11-11 16:40:43,907 Current Learning Rate: 0.0001038859 +2024-11-11 16:40:43,908 Train Loss: 0.0013293, Val Loss: 0.0014664 +2024-11-11 16:40:43,908 Epoch 614/2000 +2024-11-11 16:41:00,248 Current Learning Rate: 0.0001204162 +2024-11-11 16:41:00,248 Train Loss: 0.0014150, Val Loss: 0.0014674 +2024-11-11 16:41:00,249 Epoch 615/2000 +2024-11-11 16:41:16,347 Current Learning Rate: 0.0001381504 +2024-11-11 16:41:16,347 Train Loss: 0.0012844, Val Loss: 0.0014670 +2024-11-11 16:41:16,347 Epoch 616/2000 +2024-11-11 16:41:32,143 Current Learning Rate: 0.0001570842 +2024-11-11 16:41:32,143 Train Loss: 0.0013916, Val Loss: 0.0014657 +2024-11-11 16:41:32,144 Epoch 617/2000 +2024-11-11 16:41:48,156 Current Learning Rate: 0.0001772129 +2024-11-11 16:41:48,888 Train Loss: 0.0014848, Val Loss: 0.0014650 +2024-11-11 16:41:48,888 Epoch 618/2000 +2024-11-11 16:42:03,341 Current Learning Rate: 0.0001985316 +2024-11-11 16:42:03,342 Train Loss: 0.0013309, Val Loss: 0.0014655 +2024-11-11 16:42:03,342 Epoch 619/2000 +2024-11-11 16:42:18,910 Current Learning Rate: 0.0002210349 +2024-11-11 16:42:18,911 Train Loss: 0.0013759, Val Loss: 0.0014656 +2024-11-11 16:42:18,911 Epoch 620/2000 +2024-11-11 16:42:34,685 Current Learning Rate: 0.0002447174 +2024-11-11 16:42:34,685 Train Loss: 0.0013422, Val Loss: 0.0014651 +2024-11-11 16:42:34,685 Epoch 621/2000 +2024-11-11 16:42:49,700 Current Learning Rate: 0.0002695732 +2024-11-11 16:42:49,701 Train Loss: 0.0013875, Val Loss: 0.0014653 +2024-11-11 16:42:49,701 Epoch 622/2000 +2024-11-11 16:43:05,760 Current Learning Rate: 0.0002955962 +2024-11-11 16:43:06,424 Train Loss: 0.0013393, Val Loss: 0.0014647 +2024-11-11 16:43:06,425 Epoch 623/2000 +2024-11-11 16:43:21,992 Current Learning Rate: 0.0003227798 +2024-11-11 16:43:21,992 Train Loss: 0.0013753, Val Loss: 0.0014654 +2024-11-11 16:43:21,993 Epoch 624/2000 +2024-11-11 16:43:37,681 Current Learning Rate: 0.0003511176 +2024-11-11 16:43:38,352 Train Loss: 0.0012840, Val Loss: 0.0014643 +2024-11-11 16:43:38,352 Epoch 625/2000 +2024-11-11 16:43:53,361 Current Learning Rate: 0.0003806023 +2024-11-11 16:43:53,362 Train Loss: 0.0014439, Val Loss: 0.0014646 +2024-11-11 16:43:53,362 Epoch 626/2000 +2024-11-11 16:44:09,429 Current Learning Rate: 0.0004112269 +2024-11-11 16:44:10,150 Train Loss: 0.0013898, Val Loss: 0.0014640 +2024-11-11 16:44:10,150 Epoch 627/2000 +2024-11-11 16:44:25,270 Current Learning Rate: 0.0004429836 +2024-11-11 16:44:25,271 Train Loss: 0.0014299, Val Loss: 0.0014642 +2024-11-11 16:44:25,271 Epoch 628/2000 +2024-11-11 16:44:41,604 Current Learning Rate: 0.0004758647 +2024-11-11 16:44:41,605 Train Loss: 0.0014359, Val Loss: 0.0014646 +2024-11-11 16:44:41,605 Epoch 629/2000 +2024-11-11 16:44:58,060 Current Learning Rate: 0.0005098621 +2024-11-11 16:44:58,060 Train Loss: 0.0013414, Val Loss: 0.0014663 +2024-11-11 16:44:58,061 Epoch 630/2000 +2024-11-11 16:45:14,057 Current Learning Rate: 0.0005449674 +2024-11-11 16:45:14,057 Train Loss: 0.0012910, Val Loss: 0.0014674 +2024-11-11 16:45:14,058 Epoch 631/2000 +2024-11-11 16:45:29,900 Current Learning Rate: 0.0005811718 +2024-11-11 16:45:29,901 Train Loss: 0.0014344, Val Loss: 0.0014734 +2024-11-11 16:45:29,901 Epoch 632/2000 +2024-11-11 16:45:45,480 Current Learning Rate: 0.0006184666 +2024-11-11 16:45:45,480 Train Loss: 0.0013955, Val Loss: 0.0014720 +2024-11-11 16:45:45,481 Epoch 633/2000 +2024-11-11 16:46:02,147 Current Learning Rate: 0.0006568424 +2024-11-11 16:46:02,148 Train Loss: 0.0012893, Val Loss: 0.0014700 +2024-11-11 16:46:02,148 Epoch 634/2000 +2024-11-11 16:46:17,450 Current Learning Rate: 0.0006962899 +2024-11-11 16:46:17,451 Train Loss: 0.0014054, Val Loss: 0.0014676 +2024-11-11 16:46:17,451 Epoch 635/2000 +2024-11-11 16:46:33,174 Current Learning Rate: 0.0007367992 +2024-11-11 16:46:33,175 Train Loss: 0.0014313, Val Loss: 0.0014680 +2024-11-11 16:46:33,175 Epoch 636/2000 +2024-11-11 16:46:49,428 Current Learning Rate: 0.0007783604 +2024-11-11 16:46:49,428 Train Loss: 0.0013911, Val Loss: 0.0014703 +2024-11-11 16:46:49,428 Epoch 637/2000 +2024-11-11 16:47:04,960 Current Learning Rate: 0.0008209632 +2024-11-11 16:47:05,715 Train Loss: 0.0013219, Val Loss: 0.0014604 +2024-11-11 16:47:05,716 Epoch 638/2000 +2024-11-11 16:47:21,127 Current Learning Rate: 0.0008645971 +2024-11-11 16:47:21,872 Train Loss: 0.0012867, Val Loss: 0.0014600 +2024-11-11 16:47:21,872 Epoch 639/2000 +2024-11-11 16:47:37,088 Current Learning Rate: 0.0009092514 +2024-11-11 16:47:37,089 Train Loss: 0.0015195, Val Loss: 0.0014849 +2024-11-11 16:47:37,089 Epoch 640/2000 +2024-11-11 16:47:53,161 Current Learning Rate: 0.0009549150 +2024-11-11 16:47:53,161 Train Loss: 0.0014029, Val Loss: 0.0014726 +2024-11-11 16:47:53,161 Epoch 641/2000 +2024-11-11 16:48:09,991 Current Learning Rate: 0.0010015767 +2024-11-11 16:48:10,764 Train Loss: 0.0012848, Val Loss: 0.0014594 +2024-11-11 16:48:10,764 Epoch 642/2000 +2024-11-11 16:48:25,904 Current Learning Rate: 0.0010492249 +2024-11-11 16:48:25,905 Train Loss: 0.0014437, Val Loss: 0.0014886 +2024-11-11 16:48:25,905 Epoch 643/2000 +2024-11-11 16:48:42,463 Current Learning Rate: 0.0010978480 +2024-11-11 16:48:42,464 Train Loss: 0.0014407, Val Loss: 0.0014649 +2024-11-11 16:48:42,464 Epoch 644/2000 +2024-11-11 16:48:57,818 Current Learning Rate: 0.0011474338 +2024-11-11 16:48:57,819 Train Loss: 0.0013760, Val Loss: 0.0014609 +2024-11-11 16:48:57,819 Epoch 645/2000 +2024-11-11 16:49:13,324 Current Learning Rate: 0.0011979702 +2024-11-11 16:49:13,325 Train Loss: 0.0013695, Val Loss: 0.0015007 +2024-11-11 16:49:13,325 Epoch 646/2000 +2024-11-11 16:49:28,425 Current Learning Rate: 0.0012494447 +2024-11-11 16:49:28,426 Train Loss: 0.0013328, Val Loss: 0.0014894 +2024-11-11 16:49:28,426 Epoch 647/2000 +2024-11-11 16:49:43,877 Current Learning Rate: 0.0013018445 +2024-11-11 16:49:43,878 Train Loss: 0.0013234, Val Loss: 0.0015706 +2024-11-11 16:49:43,878 Epoch 648/2000 +2024-11-11 16:49:59,402 Current Learning Rate: 0.0013551569 +2024-11-11 16:50:00,113 Train Loss: 0.0012977, Val Loss: 0.0014579 +2024-11-11 16:50:00,113 Epoch 649/2000 +2024-11-11 16:50:14,718 Current Learning Rate: 0.0014093685 +2024-11-11 16:50:14,719 Train Loss: 0.0013465, Val Loss: 0.0014898 +2024-11-11 16:50:14,720 Epoch 650/2000 +2024-11-11 16:50:30,729 Current Learning Rate: 0.0014644661 +2024-11-11 16:50:30,729 Train Loss: 0.0013614, Val Loss: 0.0014663 +2024-11-11 16:50:30,730 Epoch 651/2000 +2024-11-11 16:50:47,357 Current Learning Rate: 0.0015204360 +2024-11-11 16:50:47,357 Train Loss: 0.0014855, Val Loss: 0.0015023 +2024-11-11 16:50:47,358 Epoch 652/2000 +2024-11-11 16:51:03,667 Current Learning Rate: 0.0015772645 +2024-11-11 16:51:03,667 Train Loss: 0.0014394, Val Loss: 0.0015347 +2024-11-11 16:51:03,667 Epoch 653/2000 +2024-11-11 16:51:19,923 Current Learning Rate: 0.0016349374 +2024-11-11 16:51:19,923 Train Loss: 0.0018136, Val Loss: 0.0015625 +2024-11-11 16:51:19,924 Epoch 654/2000 +2024-11-11 16:51:35,867 Current Learning Rate: 0.0016934407 +2024-11-11 16:51:35,867 Train Loss: 0.0014872, Val Loss: 0.0015376 +2024-11-11 16:51:35,867 Epoch 655/2000 +2024-11-11 16:51:51,460 Current Learning Rate: 0.0017527598 +2024-11-11 16:51:51,461 Train Loss: 0.0014274, Val Loss: 0.0014770 +2024-11-11 16:51:51,461 Epoch 656/2000 +2024-11-11 16:52:08,309 Current Learning Rate: 0.0018128801 +2024-11-11 16:52:08,310 Train Loss: 0.0013513, Val Loss: 0.0014603 +2024-11-11 16:52:08,310 Epoch 657/2000 +2024-11-11 16:52:23,949 Current Learning Rate: 0.0018737867 +2024-11-11 16:52:24,645 Train Loss: 0.0013941, Val Loss: 0.0014547 +2024-11-11 16:52:24,645 Epoch 658/2000 +2024-11-11 16:52:40,023 Current Learning Rate: 0.0019354647 +2024-11-11 16:52:40,789 Train Loss: 0.0014043, Val Loss: 0.0014433 +2024-11-11 16:52:40,790 Epoch 659/2000 +2024-11-11 16:52:56,441 Current Learning Rate: 0.0019978989 +2024-11-11 16:52:56,442 Train Loss: 0.0014911, Val Loss: 0.0014573 +2024-11-11 16:52:56,442 Epoch 660/2000 +2024-11-11 16:53:13,614 Current Learning Rate: 0.0020610737 +2024-11-11 16:53:13,615 Train Loss: 0.0013329, Val Loss: 0.0014559 +2024-11-11 16:53:13,615 Epoch 661/2000 +2024-11-11 16:53:29,847 Current Learning Rate: 0.0021249737 +2024-11-11 16:53:29,849 Train Loss: 0.0015343, Val Loss: 0.0014612 +2024-11-11 16:53:29,849 Epoch 662/2000 +2024-11-11 16:53:45,481 Current Learning Rate: 0.0021895831 +2024-11-11 16:53:45,482 Train Loss: 0.0014109, Val Loss: 0.0014702 +2024-11-11 16:53:45,482 Epoch 663/2000 +2024-11-11 16:54:01,231 Current Learning Rate: 0.0022548859 +2024-11-11 16:54:01,232 Train Loss: 0.0014110, Val Loss: 0.0014773 +2024-11-11 16:54:01,232 Epoch 664/2000 +2024-11-11 16:54:17,349 Current Learning Rate: 0.0023208660 +2024-11-11 16:54:17,350 Train Loss: 0.0013513, Val Loss: 0.0014659 +2024-11-11 16:54:17,350 Epoch 665/2000 +2024-11-11 16:54:33,349 Current Learning Rate: 0.0023875072 +2024-11-11 16:54:33,350 Train Loss: 0.0013578, Val Loss: 0.0014904 +2024-11-11 16:54:33,350 Epoch 666/2000 +2024-11-11 16:54:49,275 Current Learning Rate: 0.0024547929 +2024-11-11 16:54:49,275 Train Loss: 0.0013009, Val Loss: 0.0014890 +2024-11-11 16:54:49,276 Epoch 667/2000 +2024-11-11 16:55:04,513 Current Learning Rate: 0.0025227067 +2024-11-11 16:55:04,514 Train Loss: 0.0016047, Val Loss: 0.0015879 +2024-11-11 16:55:04,514 Epoch 668/2000 +2024-11-11 16:55:19,894 Current Learning Rate: 0.0025912316 +2024-11-11 16:55:19,895 Train Loss: 0.0015072, Val Loss: 0.0016243 +2024-11-11 16:55:19,895 Epoch 669/2000 +2024-11-11 16:55:35,822 Current Learning Rate: 0.0026603509 +2024-11-11 16:55:35,823 Train Loss: 0.0016115, Val Loss: 0.0015441 +2024-11-11 16:55:35,823 Epoch 670/2000 +2024-11-11 16:55:52,155 Current Learning Rate: 0.0027300475 +2024-11-11 16:55:52,156 Train Loss: 0.0013965, Val Loss: 0.0014989 +2024-11-11 16:55:52,156 Epoch 671/2000 +2024-11-11 16:56:08,921 Current Learning Rate: 0.0028003042 +2024-11-11 16:56:08,922 Train Loss: 0.0014858, Val Loss: 0.0015511 +2024-11-11 16:56:08,922 Epoch 672/2000 +2024-11-11 16:56:24,914 Current Learning Rate: 0.0028711035 +2024-11-11 16:56:24,915 Train Loss: 0.0014088, Val Loss: 0.0014852 +2024-11-11 16:56:24,915 Epoch 673/2000 +2024-11-11 16:56:41,031 Current Learning Rate: 0.0029424282 +2024-11-11 16:56:41,031 Train Loss: 0.0013549, Val Loss: 0.0014781 +2024-11-11 16:56:41,032 Epoch 674/2000 +2024-11-11 16:56:56,604 Current Learning Rate: 0.0030142605 +2024-11-11 16:56:56,605 Train Loss: 0.0013579, Val Loss: 0.0015252 +2024-11-11 16:56:56,605 Epoch 675/2000 +2024-11-11 16:57:12,803 Current Learning Rate: 0.0030865828 +2024-11-11 16:57:12,805 Train Loss: 0.0015570, Val Loss: 0.0015282 +2024-11-11 16:57:12,805 Epoch 676/2000 +2024-11-11 16:57:28,459 Current Learning Rate: 0.0031593772 +2024-11-11 16:57:28,459 Train Loss: 0.0014531, Val Loss: 0.0014883 +2024-11-11 16:57:28,459 Epoch 677/2000 +2024-11-11 16:57:43,221 Current Learning Rate: 0.0032326258 +2024-11-11 16:57:43,221 Train Loss: 0.0013234, Val Loss: 0.0015019 +2024-11-11 16:57:43,221 Epoch 678/2000 +2024-11-11 16:57:58,533 Current Learning Rate: 0.0033063104 +2024-11-11 16:57:58,534 Train Loss: 0.0013389, Val Loss: 0.0014453 +2024-11-11 16:57:58,534 Epoch 679/2000 +2024-11-11 16:58:14,635 Current Learning Rate: 0.0033804129 +2024-11-11 16:58:15,303 Train Loss: 0.0012721, Val Loss: 0.0014410 +2024-11-11 16:58:15,303 Epoch 680/2000 +2024-11-11 16:58:30,687 Current Learning Rate: 0.0034549150 +2024-11-11 16:58:31,340 Train Loss: 0.0012667, Val Loss: 0.0014372 +2024-11-11 16:58:31,340 Epoch 681/2000 +2024-11-11 16:58:45,805 Current Learning Rate: 0.0035297984 +2024-11-11 16:58:45,806 Train Loss: 0.0013364, Val Loss: 0.0014814 +2024-11-11 16:58:45,806 Epoch 682/2000 +2024-11-11 16:59:01,372 Current Learning Rate: 0.0036050445 +2024-11-11 16:59:01,372 Train Loss: 0.0014830, Val Loss: 0.0014760 +2024-11-11 16:59:01,373 Epoch 683/2000 +2024-11-11 16:59:16,758 Current Learning Rate: 0.0036806348 +2024-11-11 16:59:16,758 Train Loss: 0.0013517, Val Loss: 0.0015127 +2024-11-11 16:59:16,758 Epoch 684/2000 +2024-11-11 16:59:32,008 Current Learning Rate: 0.0037565506 +2024-11-11 16:59:32,008 Train Loss: 0.0015798, Val Loss: 0.0015486 +2024-11-11 16:59:32,009 Epoch 685/2000 +2024-11-11 16:59:47,634 Current Learning Rate: 0.0038327732 +2024-11-11 16:59:47,634 Train Loss: 0.0013792, Val Loss: 0.0016443 +2024-11-11 16:59:47,635 Epoch 686/2000 +2024-11-11 17:00:03,985 Current Learning Rate: 0.0039092838 +2024-11-11 17:00:03,985 Train Loss: 0.0015946, Val Loss: 0.0015486 +2024-11-11 17:00:03,985 Epoch 687/2000 +2024-11-11 17:00:20,674 Current Learning Rate: 0.0039860635 +2024-11-11 17:00:20,675 Train Loss: 0.0013978, Val Loss: 0.0014972 +2024-11-11 17:00:20,675 Epoch 688/2000 +2024-11-11 17:00:36,709 Current Learning Rate: 0.0040630934 +2024-11-11 17:00:36,709 Train Loss: 0.0014477, Val Loss: 0.0014903 +2024-11-11 17:00:36,710 Epoch 689/2000 +2024-11-11 17:00:51,531 Current Learning Rate: 0.0041403545 +2024-11-11 17:00:51,532 Train Loss: 0.0014564, Val Loss: 0.0015632 +2024-11-11 17:00:51,532 Epoch 690/2000 +2024-11-11 17:01:06,923 Current Learning Rate: 0.0042178277 +2024-11-11 17:01:06,924 Train Loss: 0.0015276, Val Loss: 0.0015127 +2024-11-11 17:01:06,924 Epoch 691/2000 +2024-11-11 17:01:22,501 Current Learning Rate: 0.0042954938 +2024-11-11 17:01:22,502 Train Loss: 0.0014424, Val Loss: 0.0015113 +2024-11-11 17:01:22,502 Epoch 692/2000 +2024-11-11 17:01:37,948 Current Learning Rate: 0.0043733338 +2024-11-11 17:01:37,948 Train Loss: 0.0013432, Val Loss: 0.0015069 +2024-11-11 17:01:37,949 Epoch 693/2000 +2024-11-11 17:01:53,419 Current Learning Rate: 0.0044513284 +2024-11-11 17:01:53,420 Train Loss: 0.0016064, Val Loss: 0.0016084 +2024-11-11 17:01:53,420 Epoch 694/2000 +2024-11-11 17:02:09,692 Current Learning Rate: 0.0045294584 +2024-11-11 17:02:09,692 Train Loss: 0.0016979, Val Loss: 0.0015626 +2024-11-11 17:02:09,693 Epoch 695/2000 +2024-11-11 17:02:25,502 Current Learning Rate: 0.0046077045 +2024-11-11 17:02:25,503 Train Loss: 0.0013709, Val Loss: 0.0015908 +2024-11-11 17:02:25,503 Epoch 696/2000 +2024-11-11 17:02:42,140 Current Learning Rate: 0.0046860474 +2024-11-11 17:02:42,140 Train Loss: 0.0013405, Val Loss: 0.0014735 +2024-11-11 17:02:42,141 Epoch 697/2000 +2024-11-11 17:02:58,552 Current Learning Rate: 0.0047644677 +2024-11-11 17:02:58,553 Train Loss: 0.0015411, Val Loss: 0.0015687 +2024-11-11 17:02:58,553 Epoch 698/2000 +2024-11-11 17:03:14,633 Current Learning Rate: 0.0048429462 +2024-11-11 17:03:14,634 Train Loss: 0.0014065, Val Loss: 0.0015166 +2024-11-11 17:03:14,634 Epoch 699/2000 +2024-11-11 17:03:31,389 Current Learning Rate: 0.0049214634 +2024-11-11 17:03:31,389 Train Loss: 0.0019209, Val Loss: 0.0015936 +2024-11-11 17:03:31,389 Epoch 700/2000 +2024-11-11 17:03:47,452 Current Learning Rate: 0.0050000000 +2024-11-11 17:03:47,452 Train Loss: 0.0014235, Val Loss: 0.0015003 +2024-11-11 17:03:47,453 Epoch 701/2000 +2024-11-11 17:04:03,088 Current Learning Rate: 0.0050785366 +2024-11-11 17:04:03,089 Train Loss: 0.0013756, Val Loss: 0.0015143 +2024-11-11 17:04:03,090 Epoch 702/2000 +2024-11-11 17:04:18,856 Current Learning Rate: 0.0051570538 +2024-11-11 17:04:18,856 Train Loss: 0.0013229, Val Loss: 0.0015112 +2024-11-11 17:04:18,857 Epoch 703/2000 +2024-11-11 17:04:35,025 Current Learning Rate: 0.0052355323 +2024-11-11 17:04:35,026 Train Loss: 0.0014407, Val Loss: 0.0015284 +2024-11-11 17:04:35,026 Epoch 704/2000 +2024-11-11 17:04:51,159 Current Learning Rate: 0.0053139526 +2024-11-11 17:04:51,159 Train Loss: 0.0017120, Val Loss: 0.0016282 +2024-11-11 17:04:51,159 Epoch 705/2000 +2024-11-11 17:05:07,342 Current Learning Rate: 0.0053922955 +2024-11-11 17:05:07,342 Train Loss: 0.0015467, Val Loss: 0.0015771 +2024-11-11 17:05:07,343 Epoch 706/2000 +2024-11-11 17:05:23,397 Current Learning Rate: 0.0054705416 +2024-11-11 17:05:23,398 Train Loss: 0.0013901, Val Loss: 0.0014990 +2024-11-11 17:05:23,398 Epoch 707/2000 +2024-11-11 17:05:39,743 Current Learning Rate: 0.0055486716 +2024-11-11 17:05:39,744 Train Loss: 0.0015504, Val Loss: 0.0014934 +2024-11-11 17:05:39,744 Epoch 708/2000 +2024-11-11 17:05:55,815 Current Learning Rate: 0.0056266662 +2024-11-11 17:05:55,815 Train Loss: 0.0014991, Val Loss: 0.0014600 +2024-11-11 17:05:55,815 Epoch 709/2000 +2024-11-11 17:06:11,530 Current Learning Rate: 0.0057045062 +2024-11-11 17:06:11,530 Train Loss: 0.0013958, Val Loss: 0.0014611 +2024-11-11 17:06:11,530 Epoch 710/2000 +2024-11-11 17:06:28,017 Current Learning Rate: 0.0057821723 +2024-11-11 17:06:28,018 Train Loss: 0.0012802, Val Loss: 0.0015189 +2024-11-11 17:06:28,018 Epoch 711/2000 +2024-11-11 17:06:44,563 Current Learning Rate: 0.0058596455 +2024-11-11 17:06:44,564 Train Loss: 0.0013394, Val Loss: 0.0014677 +2024-11-11 17:06:44,564 Epoch 712/2000 +2024-11-11 17:07:00,097 Current Learning Rate: 0.0059369066 +2024-11-11 17:07:00,097 Train Loss: 0.0014255, Val Loss: 0.0014887 +2024-11-11 17:07:00,097 Epoch 713/2000 +2024-11-11 17:07:15,474 Current Learning Rate: 0.0060139365 +2024-11-11 17:07:15,475 Train Loss: 0.0014388, Val Loss: 0.0015676 +2024-11-11 17:07:15,475 Epoch 714/2000 +2024-11-11 17:07:30,828 Current Learning Rate: 0.0060907162 +2024-11-11 17:07:30,828 Train Loss: 0.0017099, Val Loss: 0.0017168 +2024-11-11 17:07:30,829 Epoch 715/2000 +2024-11-11 17:07:46,216 Current Learning Rate: 0.0061672268 +2024-11-11 17:07:46,216 Train Loss: 0.0017264, Val Loss: 0.0015712 +2024-11-11 17:07:46,216 Epoch 716/2000 +2024-11-11 17:08:02,523 Current Learning Rate: 0.0062434494 +2024-11-11 17:08:02,524 Train Loss: 0.0014189, Val Loss: 0.0014860 +2024-11-11 17:08:02,524 Epoch 717/2000 +2024-11-11 17:08:18,619 Current Learning Rate: 0.0063193652 +2024-11-11 17:08:18,620 Train Loss: 0.0014357, Val Loss: 0.0015216 +2024-11-11 17:08:18,620 Epoch 718/2000 +2024-11-11 17:08:35,352 Current Learning Rate: 0.0063949555 +2024-11-11 17:08:35,353 Train Loss: 0.0013072, Val Loss: 0.0014546 +2024-11-11 17:08:35,353 Epoch 719/2000 +2024-11-11 17:08:51,000 Current Learning Rate: 0.0064702016 +2024-11-11 17:08:51,000 Train Loss: 0.0014518, Val Loss: 0.0014949 +2024-11-11 17:08:51,001 Epoch 720/2000 +2024-11-11 17:09:06,208 Current Learning Rate: 0.0065450850 +2024-11-11 17:09:06,208 Train Loss: 0.0014047, Val Loss: 0.0015171 +2024-11-11 17:09:06,209 Epoch 721/2000 +2024-11-11 17:09:21,760 Current Learning Rate: 0.0066195871 +2024-11-11 17:09:21,761 Train Loss: 0.0014210, Val Loss: 0.0014804 +2024-11-11 17:09:21,761 Epoch 722/2000 +2024-11-11 17:09:37,125 Current Learning Rate: 0.0066936896 +2024-11-11 17:09:37,125 Train Loss: 0.0014459, Val Loss: 0.0018787 +2024-11-11 17:09:37,125 Epoch 723/2000 +2024-11-11 17:09:52,220 Current Learning Rate: 0.0067673742 +2024-11-11 17:09:52,220 Train Loss: 0.0013806, Val Loss: 0.0015081 +2024-11-11 17:09:52,221 Epoch 724/2000 +2024-11-11 17:10:07,619 Current Learning Rate: 0.0068406228 +2024-11-11 17:10:07,619 Train Loss: 0.0014868, Val Loss: 0.0015729 +2024-11-11 17:10:07,620 Epoch 725/2000 +2024-11-11 17:10:23,573 Current Learning Rate: 0.0069134172 +2024-11-11 17:10:23,574 Train Loss: 0.0016074, Val Loss: 0.0017101 +2024-11-11 17:10:23,574 Epoch 726/2000 +2024-11-11 17:10:39,384 Current Learning Rate: 0.0069857395 +2024-11-11 17:10:39,385 Train Loss: 0.0013524, Val Loss: 0.0014477 +2024-11-11 17:10:39,385 Epoch 727/2000 +2024-11-11 17:10:55,280 Current Learning Rate: 0.0070575718 +2024-11-11 17:10:55,281 Train Loss: 0.0013677, Val Loss: 0.0014720 +2024-11-11 17:10:55,281 Epoch 728/2000 +2024-11-11 17:11:10,835 Current Learning Rate: 0.0071288965 +2024-11-11 17:11:10,836 Train Loss: 0.0014198, Val Loss: 0.0015157 +2024-11-11 17:11:10,836 Epoch 729/2000 +2024-11-11 17:11:27,227 Current Learning Rate: 0.0071996958 +2024-11-11 17:11:27,228 Train Loss: 0.0012735, Val Loss: 0.0014381 +2024-11-11 17:11:27,228 Epoch 730/2000 +2024-11-11 17:11:43,332 Current Learning Rate: 0.0072699525 +2024-11-11 17:11:43,332 Train Loss: 0.0015931, Val Loss: 0.0018424 +2024-11-11 17:11:43,333 Epoch 731/2000 +2024-11-11 17:12:00,490 Current Learning Rate: 0.0073396491 +2024-11-11 17:12:00,491 Train Loss: 0.0016338, Val Loss: 0.0015356 +2024-11-11 17:12:00,491 Epoch 732/2000 +2024-11-11 17:12:16,637 Current Learning Rate: 0.0074087684 +2024-11-11 17:12:16,637 Train Loss: 0.0014040, Val Loss: 0.0014609 +2024-11-11 17:12:16,638 Epoch 733/2000 +2024-11-11 17:12:31,840 Current Learning Rate: 0.0074772933 +2024-11-11 17:12:32,671 Train Loss: 0.0012633, Val Loss: 0.0014138 +2024-11-11 17:12:32,671 Epoch 734/2000 +2024-11-11 17:12:46,926 Current Learning Rate: 0.0075452071 +2024-11-11 17:12:46,927 Train Loss: 0.0013944, Val Loss: 0.0015161 +2024-11-11 17:12:46,927 Epoch 735/2000 +2024-11-11 17:13:02,279 Current Learning Rate: 0.0076124928 +2024-11-11 17:13:02,280 Train Loss: 0.0014223, Val Loss: 0.0015125 +2024-11-11 17:13:02,280 Epoch 736/2000 +2024-11-11 17:13:17,803 Current Learning Rate: 0.0076791340 +2024-11-11 17:13:17,804 Train Loss: 0.0013919, Val Loss: 0.0015442 +2024-11-11 17:13:17,804 Epoch 737/2000 +2024-11-11 17:13:32,973 Current Learning Rate: 0.0077451141 +2024-11-11 17:13:32,973 Train Loss: 0.0013839, Val Loss: 0.0015243 +2024-11-11 17:13:32,974 Epoch 738/2000 +2024-11-11 17:13:48,400 Current Learning Rate: 0.0078104169 +2024-11-11 17:13:48,400 Train Loss: 0.0014401, Val Loss: 0.0015702 +2024-11-11 17:13:48,400 Epoch 739/2000 +2024-11-11 17:14:03,439 Current Learning Rate: 0.0078750263 +2024-11-11 17:14:03,439 Train Loss: 0.0018107, Val Loss: 0.0018858 +2024-11-11 17:14:03,439 Epoch 740/2000 +2024-11-11 17:14:18,950 Current Learning Rate: 0.0079389263 +2024-11-11 17:14:18,950 Train Loss: 0.0016465, Val Loss: 0.0016108 +2024-11-11 17:14:18,950 Epoch 741/2000 +2024-11-11 17:14:35,116 Current Learning Rate: 0.0080021011 +2024-11-11 17:14:35,117 Train Loss: 0.0014754, Val Loss: 0.0014901 +2024-11-11 17:14:35,117 Epoch 742/2000 +2024-11-11 17:14:51,098 Current Learning Rate: 0.0080645353 +2024-11-11 17:14:51,098 Train Loss: 0.0013448, Val Loss: 0.0014771 +2024-11-11 17:14:51,098 Epoch 743/2000 +2024-11-11 17:15:06,433 Current Learning Rate: 0.0081262133 +2024-11-11 17:15:06,434 Train Loss: 0.0014345, Val Loss: 0.0014970 +2024-11-11 17:15:06,434 Epoch 744/2000 +2024-11-11 17:15:21,907 Current Learning Rate: 0.0081871199 +2024-11-11 17:15:21,907 Train Loss: 0.0014178, Val Loss: 0.0014923 +2024-11-11 17:15:21,908 Epoch 745/2000 +2024-11-11 17:15:38,443 Current Learning Rate: 0.0082472402 +2024-11-11 17:15:38,444 Train Loss: 0.0013038, Val Loss: 0.0014537 +2024-11-11 17:15:38,444 Epoch 746/2000 +2024-11-11 17:15:54,349 Current Learning Rate: 0.0083065593 +2024-11-11 17:15:54,350 Train Loss: 0.0015127, Val Loss: 0.0015143 +2024-11-11 17:15:54,350 Epoch 747/2000 +2024-11-11 17:16:10,352 Current Learning Rate: 0.0083650626 +2024-11-11 17:16:10,352 Train Loss: 0.0015203, Val Loss: 0.0014412 +2024-11-11 17:16:10,353 Epoch 748/2000 +2024-11-11 17:16:26,800 Current Learning Rate: 0.0084227355 +2024-11-11 17:16:26,801 Train Loss: 0.0014944, Val Loss: 0.0015220 +2024-11-11 17:16:26,801 Epoch 749/2000 +2024-11-11 17:16:42,648 Current Learning Rate: 0.0084795640 +2024-11-11 17:16:43,437 Train Loss: 0.0012655, Val Loss: 0.0013929 +2024-11-11 17:16:43,437 Epoch 750/2000 +2024-11-11 17:16:59,023 Current Learning Rate: 0.0085355339 +2024-11-11 17:16:59,024 Train Loss: 0.0014169, Val Loss: 0.0014294 +2024-11-11 17:16:59,024 Epoch 751/2000 +2024-11-11 17:17:15,535 Current Learning Rate: 0.0085906315 +2024-11-11 17:17:15,535 Train Loss: 0.0014581, Val Loss: 0.0014705 +2024-11-11 17:17:15,535 Epoch 752/2000 +2024-11-11 17:17:31,363 Current Learning Rate: 0.0086448431 +2024-11-11 17:17:32,141 Train Loss: 0.0012912, Val Loss: 0.0013847 +2024-11-11 17:17:32,141 Epoch 753/2000 +2024-11-11 17:17:47,246 Current Learning Rate: 0.0086981555 +2024-11-11 17:17:47,247 Train Loss: 0.0013669, Val Loss: 0.0014633 +2024-11-11 17:17:47,247 Epoch 754/2000 +2024-11-11 17:18:03,462 Current Learning Rate: 0.0087505553 +2024-11-11 17:18:03,463 Train Loss: 0.0013582, Val Loss: 0.0014893 +2024-11-11 17:18:03,463 Epoch 755/2000 +2024-11-11 17:18:20,208 Current Learning Rate: 0.0088020298 +2024-11-11 17:18:20,209 Train Loss: 0.0012477, Val Loss: 0.0014051 +2024-11-11 17:18:20,210 Epoch 756/2000 +2024-11-11 17:18:37,412 Current Learning Rate: 0.0088525662 +2024-11-11 17:18:37,413 Train Loss: 0.0012354, Val Loss: 0.0014067 +2024-11-11 17:18:37,413 Epoch 757/2000 +2024-11-11 17:18:54,361 Current Learning Rate: 0.0089021520 +2024-11-11 17:18:54,362 Train Loss: 0.0012827, Val Loss: 0.0014827 +2024-11-11 17:18:54,362 Epoch 758/2000 +2024-11-11 17:19:11,100 Current Learning Rate: 0.0089507751 +2024-11-11 17:19:11,101 Train Loss: 0.0013223, Val Loss: 0.0014795 +2024-11-11 17:19:11,101 Epoch 759/2000 +2024-11-11 17:19:26,911 Current Learning Rate: 0.0089984233 +2024-11-11 17:19:26,912 Train Loss: 0.0014247, Val Loss: 0.0017839 +2024-11-11 17:19:26,912 Epoch 760/2000 +2024-11-11 17:19:42,660 Current Learning Rate: 0.0090450850 +2024-11-11 17:19:42,661 Train Loss: 0.0013827, Val Loss: 0.0014344 +2024-11-11 17:19:42,661 Epoch 761/2000 +2024-11-11 17:19:59,085 Current Learning Rate: 0.0090907486 +2024-11-11 17:19:59,086 Train Loss: 0.0014266, Val Loss: 0.0014995 +2024-11-11 17:19:59,087 Epoch 762/2000 +2024-11-11 17:20:14,751 Current Learning Rate: 0.0091354029 +2024-11-11 17:20:14,752 Train Loss: 0.0014604, Val Loss: 0.0015419 +2024-11-11 17:20:14,752 Epoch 763/2000 +2024-11-11 17:20:30,831 Current Learning Rate: 0.0091790368 +2024-11-11 17:20:30,832 Train Loss: 0.0014246, Val Loss: 0.0015470 +2024-11-11 17:20:30,832 Epoch 764/2000 +2024-11-11 17:20:46,613 Current Learning Rate: 0.0092216396 +2024-11-11 17:20:46,615 Train Loss: 0.0013135, Val Loss: 0.0014740 +2024-11-11 17:20:46,615 Epoch 765/2000 +2024-11-11 17:21:03,003 Current Learning Rate: 0.0092632008 +2024-11-11 17:21:03,004 Train Loss: 0.0014266, Val Loss: 0.0015145 +2024-11-11 17:21:03,004 Epoch 766/2000 +2024-11-11 17:21:19,068 Current Learning Rate: 0.0093037101 +2024-11-11 17:21:19,068 Train Loss: 0.0013968, Val Loss: 0.0014449 +2024-11-11 17:21:19,069 Epoch 767/2000 +2024-11-11 17:21:35,054 Current Learning Rate: 0.0093431576 +2024-11-11 17:21:35,054 Train Loss: 0.0013752, Val Loss: 0.0014473 +2024-11-11 17:21:35,055 Epoch 768/2000 +2024-11-11 17:21:50,172 Current Learning Rate: 0.0093815334 +2024-11-11 17:21:50,172 Train Loss: 0.0013089, Val Loss: 0.0014230 +2024-11-11 17:21:50,173 Epoch 769/2000 +2024-11-11 17:22:05,512 Current Learning Rate: 0.0094188282 +2024-11-11 17:22:05,514 Train Loss: 0.0013418, Val Loss: 0.0014052 +2024-11-11 17:22:05,514 Epoch 770/2000 +2024-11-11 17:22:20,416 Current Learning Rate: 0.0094550326 +2024-11-11 17:22:20,417 Train Loss: 0.0013460, Val Loss: 0.0014729 +2024-11-11 17:22:20,417 Epoch 771/2000 +2024-11-11 17:22:36,114 Current Learning Rate: 0.0094901379 +2024-11-11 17:22:36,115 Train Loss: 0.0013318, Val Loss: 0.0014820 +2024-11-11 17:22:36,115 Epoch 772/2000 +2024-11-11 17:22:51,343 Current Learning Rate: 0.0095241353 +2024-11-11 17:22:52,057 Train Loss: 0.0011896, Val Loss: 0.0013285 +2024-11-11 17:22:52,057 Epoch 773/2000 +2024-11-11 17:23:07,033 Current Learning Rate: 0.0095570164 +2024-11-11 17:23:07,034 Train Loss: 0.0013304, Val Loss: 0.0013801 +2024-11-11 17:23:07,034 Epoch 774/2000 +2024-11-11 17:23:22,702 Current Learning Rate: 0.0095887731 +2024-11-11 17:23:22,703 Train Loss: 0.0013142, Val Loss: 0.0014194 +2024-11-11 17:23:22,703 Epoch 775/2000 +2024-11-11 17:23:38,813 Current Learning Rate: 0.0096193977 +2024-11-11 17:23:38,813 Train Loss: 0.0013468, Val Loss: 0.0014064 +2024-11-11 17:23:38,813 Epoch 776/2000 +2024-11-11 17:23:55,001 Current Learning Rate: 0.0096488824 +2024-11-11 17:23:55,001 Train Loss: 0.0012973, Val Loss: 0.0014213 +2024-11-11 17:23:55,002 Epoch 777/2000 +2024-11-11 17:24:10,834 Current Learning Rate: 0.0096772202 +2024-11-11 17:24:11,587 Train Loss: 0.0011907, Val Loss: 0.0013259 +2024-11-11 17:24:11,587 Epoch 778/2000 +2024-11-11 17:24:25,919 Current Learning Rate: 0.0097044038 +2024-11-11 17:24:25,920 Train Loss: 0.0013381, Val Loss: 0.0014263 +2024-11-11 17:24:25,920 Epoch 779/2000 +2024-11-11 17:24:42,361 Current Learning Rate: 0.0097304268 +2024-11-11 17:24:42,362 Train Loss: 0.0013340, Val Loss: 0.0013705 +2024-11-11 17:24:42,362 Epoch 780/2000 +2024-11-11 17:24:58,263 Current Learning Rate: 0.0097552826 +2024-11-11 17:24:58,264 Train Loss: 0.0013152, Val Loss: 0.0013503 +2024-11-11 17:24:58,265 Epoch 781/2000 +2024-11-11 17:25:14,052 Current Learning Rate: 0.0097789651 +2024-11-11 17:25:14,053 Train Loss: 0.0012261, Val Loss: 0.0014349 +2024-11-11 17:25:14,054 Epoch 782/2000 +2024-11-11 17:25:30,706 Current Learning Rate: 0.0098014684 +2024-11-11 17:25:30,707 Train Loss: 0.0013467, Val Loss: 0.0014464 +2024-11-11 17:25:30,707 Epoch 783/2000 +2024-11-11 17:25:47,583 Current Learning Rate: 0.0098227871 +2024-11-11 17:25:47,584 Train Loss: 0.0012841, Val Loss: 0.0014398 +2024-11-11 17:25:47,584 Epoch 784/2000 +2024-11-11 17:26:02,644 Current Learning Rate: 0.0098429158 +2024-11-11 17:26:02,645 Train Loss: 0.0012904, Val Loss: 0.0013725 +2024-11-11 17:26:02,645 Epoch 785/2000 +2024-11-11 17:26:19,072 Current Learning Rate: 0.0098618496 +2024-11-11 17:26:19,073 Train Loss: 0.0013397, Val Loss: 0.0013750 +2024-11-11 17:26:19,073 Epoch 786/2000 +2024-11-11 17:26:34,064 Current Learning Rate: 0.0098795838 +2024-11-11 17:26:34,065 Train Loss: 0.0013598, Val Loss: 0.0013267 +2024-11-11 17:26:34,065 Epoch 787/2000 +2024-11-11 17:26:49,310 Current Learning Rate: 0.0098961141 +2024-11-11 17:26:49,310 Train Loss: 0.0013358, Val Loss: 0.0014850 +2024-11-11 17:26:49,311 Epoch 788/2000 +2024-11-11 17:27:04,770 Current Learning Rate: 0.0099114363 +2024-11-11 17:27:04,771 Train Loss: 0.0014228, Val Loss: 0.0015173 +2024-11-11 17:27:04,771 Epoch 789/2000 +2024-11-11 17:27:20,899 Current Learning Rate: 0.0099255466 +2024-11-11 17:27:20,899 Train Loss: 0.0014229, Val Loss: 0.0014432 +2024-11-11 17:27:20,899 Epoch 790/2000 +2024-11-11 17:27:37,233 Current Learning Rate: 0.0099384417 +2024-11-11 17:27:37,234 Train Loss: 0.0012249, Val Loss: 0.0013717 +2024-11-11 17:27:37,234 Epoch 791/2000 +2024-11-11 17:27:53,191 Current Learning Rate: 0.0099501183 +2024-11-11 17:27:53,192 Train Loss: 0.0011699, Val Loss: 0.0013676 +2024-11-11 17:27:53,192 Epoch 792/2000 +2024-11-11 17:28:09,828 Current Learning Rate: 0.0099605735 +2024-11-11 17:28:09,828 Train Loss: 0.0012559, Val Loss: 0.0013603 +2024-11-11 17:28:09,828 Epoch 793/2000 +2024-11-11 17:28:26,718 Current Learning Rate: 0.0099698048 +2024-11-11 17:28:26,719 Train Loss: 0.0012179, Val Loss: 0.0013536 +2024-11-11 17:28:26,720 Epoch 794/2000 +2024-11-11 17:28:42,520 Current Learning Rate: 0.0099778098 +2024-11-11 17:28:42,521 Train Loss: 0.0013702, Val Loss: 0.0013276 +2024-11-11 17:28:42,521 Epoch 795/2000 +2024-11-11 17:28:58,514 Current Learning Rate: 0.0099845867 +2024-11-11 17:28:59,554 Train Loss: 0.0011509, Val Loss: 0.0012980 +2024-11-11 17:28:59,554 Epoch 796/2000 +2024-11-11 17:29:14,723 Current Learning Rate: 0.0099901336 +2024-11-11 17:29:14,724 Train Loss: 0.0012321, Val Loss: 0.0013927 +2024-11-11 17:29:14,725 Epoch 797/2000 +2024-11-11 17:29:30,497 Current Learning Rate: 0.0099944494 +2024-11-11 17:29:30,497 Train Loss: 0.0012585, Val Loss: 0.0013159 +2024-11-11 17:29:30,498 Epoch 798/2000 +2024-11-11 17:29:46,559 Current Learning Rate: 0.0099975328 +2024-11-11 17:29:46,559 Train Loss: 0.0011738, Val Loss: 0.0013762 +2024-11-11 17:29:46,560 Epoch 799/2000 +2024-11-11 17:30:01,241 Current Learning Rate: 0.0099993832 +2024-11-11 17:30:01,242 Train Loss: 0.0011849, Val Loss: 0.0013323 +2024-11-11 17:30:01,242 Epoch 800/2000 +2024-11-11 17:30:16,359 Current Learning Rate: 0.0100000000 +2024-11-11 17:30:16,359 Train Loss: 0.0012342, Val Loss: 0.0013504 +2024-11-11 17:30:16,360 Epoch 801/2000 +2024-11-11 17:30:31,872 Current Learning Rate: 0.0099993832 +2024-11-11 17:30:31,873 Train Loss: 0.0012720, Val Loss: 0.0013579 +2024-11-11 17:30:31,873 Epoch 802/2000 +2024-11-11 17:30:47,663 Current Learning Rate: 0.0099975328 +2024-11-11 17:30:47,663 Train Loss: 0.0012628, Val Loss: 0.0013589 +2024-11-11 17:30:47,664 Epoch 803/2000 +2024-11-11 17:31:03,484 Current Learning Rate: 0.0099944494 +2024-11-11 17:31:03,484 Train Loss: 0.0013250, Val Loss: 0.0014446 +2024-11-11 17:31:03,485 Epoch 804/2000 +2024-11-11 17:31:19,722 Current Learning Rate: 0.0099901336 +2024-11-11 17:31:19,722 Train Loss: 0.0012140, Val Loss: 0.0013398 +2024-11-11 17:31:19,723 Epoch 805/2000 +2024-11-11 17:31:35,267 Current Learning Rate: 0.0099845867 +2024-11-11 17:31:35,268 Train Loss: 0.0013890, Val Loss: 0.0015312 +2024-11-11 17:31:35,268 Epoch 806/2000 +2024-11-11 17:31:51,629 Current Learning Rate: 0.0099778098 +2024-11-11 17:31:51,630 Train Loss: 0.0015518, Val Loss: 0.0015830 +2024-11-11 17:31:51,630 Epoch 807/2000 +2024-11-11 17:32:06,988 Current Learning Rate: 0.0099698048 +2024-11-11 17:32:06,989 Train Loss: 0.0013848, Val Loss: 0.0013396 +2024-11-11 17:32:06,989 Epoch 808/2000 +2024-11-11 17:32:22,774 Current Learning Rate: 0.0099605735 +2024-11-11 17:32:22,774 Train Loss: 0.0013203, Val Loss: 0.0013689 +2024-11-11 17:32:22,774 Epoch 809/2000 +2024-11-11 17:32:39,129 Current Learning Rate: 0.0099501183 +2024-11-11 17:32:39,130 Train Loss: 0.0011904, Val Loss: 0.0014270 +2024-11-11 17:32:39,130 Epoch 810/2000 +2024-11-11 17:32:54,516 Current Learning Rate: 0.0099384417 +2024-11-11 17:32:54,516 Train Loss: 0.0012725, Val Loss: 0.0013571 +2024-11-11 17:32:54,516 Epoch 811/2000 +2024-11-11 17:33:10,881 Current Learning Rate: 0.0099255466 +2024-11-11 17:33:10,882 Train Loss: 0.0011830, Val Loss: 0.0013175 +2024-11-11 17:33:10,882 Epoch 812/2000 +2024-11-11 17:33:26,183 Current Learning Rate: 0.0099114363 +2024-11-11 17:33:26,184 Train Loss: 0.0011857, Val Loss: 0.0013092 +2024-11-11 17:33:26,184 Epoch 813/2000 +2024-11-11 17:33:41,410 Current Learning Rate: 0.0098961141 +2024-11-11 17:33:42,181 Train Loss: 0.0011688, Val Loss: 0.0012712 +2024-11-11 17:33:42,181 Epoch 814/2000 +2024-11-11 17:33:57,011 Current Learning Rate: 0.0098795838 +2024-11-11 17:33:57,012 Train Loss: 0.0013154, Val Loss: 0.0013275 +2024-11-11 17:33:57,012 Epoch 815/2000 +2024-11-11 17:34:12,243 Current Learning Rate: 0.0098618496 +2024-11-11 17:34:12,244 Train Loss: 0.0012388, Val Loss: 0.0013311 +2024-11-11 17:34:12,244 Epoch 816/2000 +2024-11-11 17:34:27,673 Current Learning Rate: 0.0098429158 +2024-11-11 17:34:27,674 Train Loss: 0.0010880, Val Loss: 0.0012854 +2024-11-11 17:34:27,674 Epoch 817/2000 +2024-11-11 17:34:43,558 Current Learning Rate: 0.0098227871 +2024-11-11 17:34:43,558 Train Loss: 0.0014446, Val Loss: 0.0013267 +2024-11-11 17:34:43,559 Epoch 818/2000 +2024-11-11 17:34:59,665 Current Learning Rate: 0.0098014684 +2024-11-11 17:34:59,665 Train Loss: 0.0012407, Val Loss: 0.0012922 +2024-11-11 17:34:59,666 Epoch 819/2000 +2024-11-11 17:35:15,079 Current Learning Rate: 0.0097789651 +2024-11-11 17:35:15,080 Train Loss: 0.0012259, Val Loss: 0.0012754 +2024-11-11 17:35:15,080 Epoch 820/2000 +2024-11-11 17:35:30,349 Current Learning Rate: 0.0097552826 +2024-11-11 17:35:31,163 Train Loss: 0.0011163, Val Loss: 0.0012624 +2024-11-11 17:35:31,163 Epoch 821/2000 +2024-11-11 17:35:45,653 Current Learning Rate: 0.0097304268 +2024-11-11 17:35:45,653 Train Loss: 0.0012854, Val Loss: 0.0014429 +2024-11-11 17:35:45,654 Epoch 822/2000 +2024-11-11 17:36:00,763 Current Learning Rate: 0.0097044038 +2024-11-11 17:36:00,764 Train Loss: 0.0012318, Val Loss: 0.0013586 +2024-11-11 17:36:00,764 Epoch 823/2000 +2024-11-11 17:36:16,369 Current Learning Rate: 0.0096772202 +2024-11-11 17:36:16,369 Train Loss: 0.0012693, Val Loss: 0.0013378 +2024-11-11 17:36:16,370 Epoch 824/2000 +2024-11-11 17:36:32,042 Current Learning Rate: 0.0096488824 +2024-11-11 17:36:32,042 Train Loss: 0.0012024, Val Loss: 0.0012881 +2024-11-11 17:36:32,042 Epoch 825/2000 +2024-11-11 17:36:48,104 Current Learning Rate: 0.0096193977 +2024-11-11 17:36:48,779 Train Loss: 0.0012591, Val Loss: 0.0012407 +2024-11-11 17:36:48,779 Epoch 826/2000 +2024-11-11 17:37:04,131 Current Learning Rate: 0.0095887731 +2024-11-11 17:37:04,839 Train Loss: 0.0010302, Val Loss: 0.0012064 +2024-11-11 17:37:04,839 Epoch 827/2000 +2024-11-11 17:37:19,939 Current Learning Rate: 0.0095570164 +2024-11-11 17:37:20,693 Train Loss: 0.0011161, Val Loss: 0.0011938 +2024-11-11 17:37:20,693 Epoch 828/2000 +2024-11-11 17:37:35,742 Current Learning Rate: 0.0095241353 +2024-11-11 17:37:35,743 Train Loss: 0.0012396, Val Loss: 0.0013392 +2024-11-11 17:37:35,743 Epoch 829/2000 +2024-11-11 17:37:51,809 Current Learning Rate: 0.0094901379 +2024-11-11 17:37:51,810 Train Loss: 0.0012579, Val Loss: 0.0012809 +2024-11-11 17:37:51,810 Epoch 830/2000 +2024-11-11 17:38:08,141 Current Learning Rate: 0.0094550326 +2024-11-11 17:38:08,141 Train Loss: 0.0010387, Val Loss: 0.0011989 +2024-11-11 17:38:08,142 Epoch 831/2000 +2024-11-11 17:38:24,454 Current Learning Rate: 0.0094188282 +2024-11-11 17:38:24,454 Train Loss: 0.0013556, Val Loss: 0.0013783 +2024-11-11 17:38:24,455 Epoch 832/2000 +2024-11-11 17:38:41,277 Current Learning Rate: 0.0093815334 +2024-11-11 17:38:41,277 Train Loss: 0.0012077, Val Loss: 0.0012947 +2024-11-11 17:38:41,277 Epoch 833/2000 +2024-11-11 17:38:57,796 Current Learning Rate: 0.0093431576 +2024-11-11 17:38:57,796 Train Loss: 0.0013527, Val Loss: 0.0012552 +2024-11-11 17:38:57,796 Epoch 834/2000 +2024-11-11 17:39:13,801 Current Learning Rate: 0.0093037101 +2024-11-11 17:39:13,802 Train Loss: 0.0011967, Val Loss: 0.0011970 +2024-11-11 17:39:13,802 Epoch 835/2000 +2024-11-11 17:39:30,820 Current Learning Rate: 0.0092632008 +2024-11-11 17:39:31,854 Train Loss: 0.0010825, Val Loss: 0.0011737 +2024-11-11 17:39:31,854 Epoch 836/2000 +2024-11-11 17:39:47,179 Current Learning Rate: 0.0092216396 +2024-11-11 17:39:48,045 Train Loss: 0.0010552, Val Loss: 0.0011468 +2024-11-11 17:39:48,046 Epoch 837/2000 +2024-11-11 17:40:03,981 Current Learning Rate: 0.0091790368 +2024-11-11 17:40:04,817 Train Loss: 0.0010035, Val Loss: 0.0011338 +2024-11-11 17:40:04,817 Epoch 838/2000 +2024-11-11 17:40:20,042 Current Learning Rate: 0.0091354029 +2024-11-11 17:40:20,043 Train Loss: 0.0010370, Val Loss: 0.0012440 +2024-11-11 17:40:20,044 Epoch 839/2000 +2024-11-11 17:40:36,161 Current Learning Rate: 0.0090907486 +2024-11-11 17:40:36,162 Train Loss: 0.0010335, Val Loss: 0.0011683 +2024-11-11 17:40:36,162 Epoch 840/2000 +2024-11-11 17:40:52,126 Current Learning Rate: 0.0090450850 +2024-11-11 17:40:52,127 Train Loss: 0.0010770, Val Loss: 0.0011526 +2024-11-11 17:40:52,127 Epoch 841/2000 +2024-11-11 17:41:07,702 Current Learning Rate: 0.0089984233 +2024-11-11 17:41:07,703 Train Loss: 0.0010399, Val Loss: 0.0011815 +2024-11-11 17:41:07,703 Epoch 842/2000 +2024-11-11 17:41:23,659 Current Learning Rate: 0.0089507751 +2024-11-11 17:41:23,659 Train Loss: 0.0011041, Val Loss: 0.0011996 +2024-11-11 17:41:23,659 Epoch 843/2000 +2024-11-11 17:41:39,361 Current Learning Rate: 0.0089021520 +2024-11-11 17:41:39,362 Train Loss: 0.0010959, Val Loss: 0.0011954 +2024-11-11 17:41:39,362 Epoch 844/2000 +2024-11-11 17:41:54,416 Current Learning Rate: 0.0088525662 +2024-11-11 17:41:54,417 Train Loss: 0.0010951, Val Loss: 0.0011798 +2024-11-11 17:41:54,417 Epoch 845/2000 +2024-11-11 17:42:09,721 Current Learning Rate: 0.0088020298 +2024-11-11 17:42:09,721 Train Loss: 0.0010424, Val Loss: 0.0012155 +2024-11-11 17:42:09,722 Epoch 846/2000 +2024-11-11 17:42:25,807 Current Learning Rate: 0.0087505553 +2024-11-11 17:42:25,808 Train Loss: 0.0009852, Val Loss: 0.0011684 +2024-11-11 17:42:25,808 Epoch 847/2000 +2024-11-11 17:42:41,697 Current Learning Rate: 0.0086981555 +2024-11-11 17:42:41,697 Train Loss: 0.0011122, Val Loss: 0.0011711 +2024-11-11 17:42:41,698 Epoch 848/2000 +2024-11-11 17:42:57,606 Current Learning Rate: 0.0086448431 +2024-11-11 17:42:57,607 Train Loss: 0.0009678, Val Loss: 0.0011346 +2024-11-11 17:42:57,607 Epoch 849/2000 +2024-11-11 17:43:13,178 Current Learning Rate: 0.0085906315 +2024-11-11 17:43:13,179 Train Loss: 0.0010650, Val Loss: 0.0011588 +2024-11-11 17:43:13,179 Epoch 850/2000 +2024-11-11 17:43:28,776 Current Learning Rate: 0.0085355339 +2024-11-11 17:43:28,777 Train Loss: 0.0011443, Val Loss: 0.0011940 +2024-11-11 17:43:28,777 Epoch 851/2000 +2024-11-11 17:43:44,447 Current Learning Rate: 0.0084795640 +2024-11-11 17:43:44,448 Train Loss: 0.0011059, Val Loss: 0.0011838 +2024-11-11 17:43:44,448 Epoch 852/2000 +2024-11-11 17:44:00,672 Current Learning Rate: 0.0084227355 +2024-11-11 17:44:00,672 Train Loss: 0.0010949, Val Loss: 0.0011581 +2024-11-11 17:44:00,673 Epoch 853/2000 +2024-11-11 17:44:16,009 Current Learning Rate: 0.0083650626 +2024-11-11 17:44:16,010 Train Loss: 0.0010914, Val Loss: 0.0011674 +2024-11-11 17:44:16,010 Epoch 854/2000 +2024-11-11 17:44:31,467 Current Learning Rate: 0.0083065593 +2024-11-11 17:44:31,468 Train Loss: 0.0010065, Val Loss: 0.0011374 +2024-11-11 17:44:31,468 Epoch 855/2000 +2024-11-11 17:44:47,646 Current Learning Rate: 0.0082472402 +2024-11-11 17:44:48,469 Train Loss: 0.0010591, Val Loss: 0.0011211 +2024-11-11 17:44:48,470 Epoch 856/2000 +2024-11-11 17:45:04,162 Current Learning Rate: 0.0081871199 +2024-11-11 17:45:04,163 Train Loss: 0.0011274, Val Loss: 0.0011416 +2024-11-11 17:45:04,163 Epoch 857/2000 +2024-11-11 17:45:20,103 Current Learning Rate: 0.0081262133 +2024-11-11 17:45:20,104 Train Loss: 0.0010855, Val Loss: 0.0011784 +2024-11-11 17:45:20,104 Epoch 858/2000 +2024-11-11 17:45:35,618 Current Learning Rate: 0.0080645353 +2024-11-11 17:45:35,618 Train Loss: 0.0010290, Val Loss: 0.0012133 +2024-11-11 17:45:35,619 Epoch 859/2000 +2024-11-11 17:45:51,041 Current Learning Rate: 0.0080021011 +2024-11-11 17:45:51,041 Train Loss: 0.0012244, Val Loss: 0.0013354 +2024-11-11 17:45:51,042 Epoch 860/2000 +2024-11-11 17:46:07,061 Current Learning Rate: 0.0079389263 +2024-11-11 17:46:07,062 Train Loss: 0.0010804, Val Loss: 0.0011281 +2024-11-11 17:46:07,062 Epoch 861/2000 +2024-11-11 17:46:23,099 Current Learning Rate: 0.0078750263 +2024-11-11 17:46:24,133 Train Loss: 0.0009456, Val Loss: 0.0011056 +2024-11-11 17:46:24,134 Epoch 862/2000 +2024-11-11 17:46:39,370 Current Learning Rate: 0.0078104169 +2024-11-11 17:46:39,371 Train Loss: 0.0009509, Val Loss: 0.0011377 +2024-11-11 17:46:39,371 Epoch 863/2000 +2024-11-11 17:46:54,790 Current Learning Rate: 0.0077451141 +2024-11-11 17:46:55,573 Train Loss: 0.0009931, Val Loss: 0.0010930 +2024-11-11 17:46:55,573 Epoch 864/2000 +2024-11-11 17:47:10,014 Current Learning Rate: 0.0076791340 +2024-11-11 17:47:10,015 Train Loss: 0.0010525, Val Loss: 0.0011675 +2024-11-11 17:47:10,015 Epoch 865/2000 +2024-11-11 17:47:25,041 Current Learning Rate: 0.0076124928 +2024-11-11 17:47:25,042 Train Loss: 0.0011514, Val Loss: 0.0012510 +2024-11-11 17:47:25,042 Epoch 866/2000 +2024-11-11 17:47:39,866 Current Learning Rate: 0.0075452071 +2024-11-11 17:47:39,866 Train Loss: 0.0011009, Val Loss: 0.0012376 +2024-11-11 17:47:39,867 Epoch 867/2000 +2024-11-11 17:47:55,423 Current Learning Rate: 0.0074772933 +2024-11-11 17:47:55,424 Train Loss: 0.0010050, Val Loss: 0.0011109 +2024-11-11 17:47:55,424 Epoch 868/2000 +2024-11-11 17:48:10,209 Current Learning Rate: 0.0074087684 +2024-11-11 17:48:10,209 Train Loss: 0.0010858, Val Loss: 0.0011313 +2024-11-11 17:48:10,209 Epoch 869/2000 +2024-11-11 17:48:25,503 Current Learning Rate: 0.0073396491 +2024-11-11 17:48:26,213 Train Loss: 0.0009453, Val Loss: 0.0010767 +2024-11-11 17:48:26,213 Epoch 870/2000 +2024-11-11 17:48:41,576 Current Learning Rate: 0.0072699525 +2024-11-11 17:48:41,577 Train Loss: 0.0009463, Val Loss: 0.0011096 +2024-11-11 17:48:41,577 Epoch 871/2000 +2024-11-11 17:48:56,868 Current Learning Rate: 0.0071996958 +2024-11-11 17:48:57,665 Train Loss: 0.0008880, Val Loss: 0.0010729 +2024-11-11 17:48:57,665 Epoch 872/2000 +2024-11-11 17:49:12,267 Current Learning Rate: 0.0071288965 +2024-11-11 17:49:13,042 Train Loss: 0.0009114, Val Loss: 0.0010709 +2024-11-11 17:49:13,043 Epoch 873/2000 +2024-11-11 17:49:27,643 Current Learning Rate: 0.0070575718 +2024-11-11 17:49:28,404 Train Loss: 0.0009138, Val Loss: 0.0010572 +2024-11-11 17:49:28,405 Epoch 874/2000 +2024-11-11 17:49:43,480 Current Learning Rate: 0.0069857395 +2024-11-11 17:49:43,481 Train Loss: 0.0009576, Val Loss: 0.0010831 +2024-11-11 17:49:43,481 Epoch 875/2000 +2024-11-11 17:49:58,731 Current Learning Rate: 0.0069134172 +2024-11-11 17:49:58,731 Train Loss: 0.0011312, Val Loss: 0.0011140 +2024-11-11 17:49:58,732 Epoch 876/2000 +2024-11-11 17:50:14,029 Current Learning Rate: 0.0068406228 +2024-11-11 17:50:14,029 Train Loss: 0.0009896, Val Loss: 0.0011163 +2024-11-11 17:50:14,029 Epoch 877/2000 +2024-11-11 17:50:31,767 Current Learning Rate: 0.0067673742 +2024-11-11 17:50:31,767 Train Loss: 0.0009746, Val Loss: 0.0010994 +2024-11-11 17:50:31,767 Epoch 878/2000 +2024-11-11 17:50:47,210 Current Learning Rate: 0.0066936896 +2024-11-11 17:50:47,210 Train Loss: 0.0010055, Val Loss: 0.0011662 +2024-11-11 17:50:47,211 Epoch 879/2000 +2024-11-11 17:51:02,431 Current Learning Rate: 0.0066195871 +2024-11-11 17:51:02,432 Train Loss: 0.0010542, Val Loss: 0.0011160 +2024-11-11 17:51:02,432 Epoch 880/2000 +2024-11-11 17:51:18,395 Current Learning Rate: 0.0065450850 +2024-11-11 17:51:18,395 Train Loss: 0.0009923, Val Loss: 0.0011051 +2024-11-11 17:51:18,395 Epoch 881/2000 +2024-11-11 17:51:34,914 Current Learning Rate: 0.0064702016 +2024-11-11 17:51:34,914 Train Loss: 0.0008751, Val Loss: 0.0010801 +2024-11-11 17:51:34,914 Epoch 882/2000 +2024-11-11 17:51:50,837 Current Learning Rate: 0.0063949555 +2024-11-11 17:51:50,838 Train Loss: 0.0009270, Val Loss: 0.0010854 +2024-11-11 17:51:50,838 Epoch 883/2000 +2024-11-11 17:52:06,233 Current Learning Rate: 0.0063193652 +2024-11-11 17:52:06,234 Train Loss: 0.0009508, Val Loss: 0.0011039 +2024-11-11 17:52:06,234 Epoch 884/2000 +2024-11-11 17:52:21,901 Current Learning Rate: 0.0062434494 +2024-11-11 17:52:21,901 Train Loss: 0.0010031, Val Loss: 0.0010686 +2024-11-11 17:52:21,902 Epoch 885/2000 +2024-11-11 17:52:37,634 Current Learning Rate: 0.0061672268 +2024-11-11 17:52:37,635 Train Loss: 0.0010492, Val Loss: 0.0010652 +2024-11-11 17:52:37,635 Epoch 886/2000 +2024-11-11 17:52:53,867 Current Learning Rate: 0.0060907162 +2024-11-11 17:52:54,606 Train Loss: 0.0009808, Val Loss: 0.0010377 +2024-11-11 17:52:54,607 Epoch 887/2000 +2024-11-11 17:53:10,104 Current Learning Rate: 0.0060139365 +2024-11-11 17:53:10,911 Train Loss: 0.0009252, Val Loss: 0.0010329 +2024-11-11 17:53:10,911 Epoch 888/2000 +2024-11-11 17:53:25,175 Current Learning Rate: 0.0059369066 +2024-11-11 17:53:25,176 Train Loss: 0.0009400, Val Loss: 0.0010599 +2024-11-11 17:53:25,176 Epoch 889/2000 +2024-11-11 17:53:41,353 Current Learning Rate: 0.0058596455 +2024-11-11 17:53:41,353 Train Loss: 0.0010006, Val Loss: 0.0011004 +2024-11-11 17:53:41,354 Epoch 890/2000 +2024-11-11 17:53:57,976 Current Learning Rate: 0.0057821723 +2024-11-11 17:53:57,976 Train Loss: 0.0008872, Val Loss: 0.0010590 +2024-11-11 17:53:57,976 Epoch 891/2000 +2024-11-11 17:54:13,476 Current Learning Rate: 0.0057045062 +2024-11-11 17:54:14,595 Train Loss: 0.0009031, Val Loss: 0.0010181 +2024-11-11 17:54:14,596 Epoch 892/2000 +2024-11-11 17:54:30,074 Current Learning Rate: 0.0056266662 +2024-11-11 17:54:30,818 Train Loss: 0.0008798, Val Loss: 0.0010091 +2024-11-11 17:54:30,818 Epoch 893/2000 +2024-11-11 17:54:45,569 Current Learning Rate: 0.0055486716 +2024-11-11 17:54:45,570 Train Loss: 0.0008149, Val Loss: 0.0010137 +2024-11-11 17:54:45,570 Epoch 894/2000 +2024-11-11 17:55:01,152 Current Learning Rate: 0.0054705416 +2024-11-11 17:55:01,152 Train Loss: 0.0009229, Val Loss: 0.0010990 +2024-11-11 17:55:01,152 Epoch 895/2000 +2024-11-11 17:55:16,684 Current Learning Rate: 0.0053922955 +2024-11-11 17:55:16,685 Train Loss: 0.0008864, Val Loss: 0.0010769 +2024-11-11 17:55:16,685 Epoch 896/2000 +2024-11-11 17:55:32,393 Current Learning Rate: 0.0053139526 +2024-11-11 17:55:32,394 Train Loss: 0.0008386, Val Loss: 0.0010794 +2024-11-11 17:55:32,394 Epoch 897/2000 +2024-11-11 17:55:48,947 Current Learning Rate: 0.0052355323 +2024-11-11 17:55:48,948 Train Loss: 0.0009225, Val Loss: 0.0010566 +2024-11-11 17:55:48,948 Epoch 898/2000 +2024-11-11 17:56:05,724 Current Learning Rate: 0.0051570538 +2024-11-11 17:56:05,725 Train Loss: 0.0009125, Val Loss: 0.0010158 +2024-11-11 17:56:05,725 Epoch 899/2000 +2024-11-11 17:56:22,070 Current Learning Rate: 0.0050785366 +2024-11-11 17:56:22,070 Train Loss: 0.0010058, Val Loss: 0.0010308 +2024-11-11 17:56:22,071 Epoch 900/2000 +2024-11-11 17:56:38,467 Current Learning Rate: 0.0050000000 +2024-11-11 17:56:38,467 Train Loss: 0.0010355, Val Loss: 0.0010150 +2024-11-11 17:56:38,468 Epoch 901/2000 +2024-11-11 17:56:54,282 Current Learning Rate: 0.0049214634 +2024-11-11 17:56:54,282 Train Loss: 0.0009709, Val Loss: 0.0010411 +2024-11-11 17:56:54,282 Epoch 902/2000 +2024-11-11 17:57:10,396 Current Learning Rate: 0.0048429462 +2024-11-11 17:57:11,133 Train Loss: 0.0008987, Val Loss: 0.0009861 +2024-11-11 17:57:11,133 Epoch 903/2000 +2024-11-11 17:57:26,603 Current Learning Rate: 0.0047644677 +2024-11-11 17:57:27,392 Train Loss: 0.0008029, Val Loss: 0.0009636 +2024-11-11 17:57:27,393 Epoch 904/2000 +2024-11-11 17:57:42,786 Current Learning Rate: 0.0046860474 +2024-11-11 17:57:42,786 Train Loss: 0.0009061, Val Loss: 0.0009678 +2024-11-11 17:57:42,787 Epoch 905/2000 +2024-11-11 17:57:58,751 Current Learning Rate: 0.0046077045 +2024-11-11 17:57:58,751 Train Loss: 0.0008727, Val Loss: 0.0009731 +2024-11-11 17:57:58,751 Epoch 906/2000 +2024-11-11 17:58:14,351 Current Learning Rate: 0.0045294584 +2024-11-11 17:58:14,352 Train Loss: 0.0008415, Val Loss: 0.0010212 +2024-11-11 17:58:14,352 Epoch 907/2000 +2024-11-11 17:58:29,628 Current Learning Rate: 0.0044513284 +2024-11-11 17:58:29,628 Train Loss: 0.0008876, Val Loss: 0.0009804 +2024-11-11 17:58:29,628 Epoch 908/2000 +2024-11-11 17:58:45,530 Current Learning Rate: 0.0043733338 +2024-11-11 17:58:45,531 Train Loss: 0.0009508, Val Loss: 0.0009787 +2024-11-11 17:58:45,531 Epoch 909/2000 +2024-11-11 17:59:01,382 Current Learning Rate: 0.0042954938 +2024-11-11 17:59:04,223 Train Loss: 0.0009103, Val Loss: 0.0009574 +2024-11-11 17:59:04,223 Epoch 910/2000 +2024-11-11 17:59:19,515 Current Learning Rate: 0.0042178277 +2024-11-11 17:59:19,516 Train Loss: 0.0008181, Val Loss: 0.0009654 +2024-11-11 17:59:19,516 Epoch 911/2000 +2024-11-11 17:59:35,741 Current Learning Rate: 0.0041403545 +2024-11-11 17:59:35,741 Train Loss: 0.0009086, Val Loss: 0.0009607 +2024-11-11 17:59:35,762 Epoch 912/2000 +2024-11-11 17:59:50,502 Current Learning Rate: 0.0040630934 +2024-11-11 17:59:51,211 Train Loss: 0.0008478, Val Loss: 0.0009561 +2024-11-11 17:59:51,211 Epoch 913/2000 +2024-11-11 18:00:06,437 Current Learning Rate: 0.0039860635 +2024-11-11 18:00:06,438 Train Loss: 0.0008787, Val Loss: 0.0009568 +2024-11-11 18:00:06,438 Epoch 914/2000 +2024-11-11 18:00:22,238 Current Learning Rate: 0.0039092838 +2024-11-11 18:00:23,027 Train Loss: 0.0008378, Val Loss: 0.0009512 +2024-11-11 18:00:23,027 Epoch 915/2000 +2024-11-11 18:00:39,011 Current Learning Rate: 0.0038327732 +2024-11-11 18:00:39,882 Train Loss: 0.0008433, Val Loss: 0.0009452 +2024-11-11 18:00:39,882 Epoch 916/2000 +2024-11-11 18:00:55,021 Current Learning Rate: 0.0037565506 +2024-11-11 18:00:55,869 Train Loss: 0.0008674, Val Loss: 0.0009419 +2024-11-11 18:00:55,870 Epoch 917/2000 +2024-11-11 18:01:10,901 Current Learning Rate: 0.0036806348 +2024-11-11 18:01:12,011 Train Loss: 0.0007995, Val Loss: 0.0009365 +2024-11-11 18:01:12,012 Epoch 918/2000 +2024-11-11 18:01:28,396 Current Learning Rate: 0.0036050445 +2024-11-11 18:01:29,410 Train Loss: 0.0008046, Val Loss: 0.0009340 +2024-11-11 18:01:29,410 Epoch 919/2000 +2024-11-11 18:01:45,619 Current Learning Rate: 0.0035297984 +2024-11-11 18:01:45,619 Train Loss: 0.0009378, Val Loss: 0.0009402 +2024-11-11 18:01:45,620 Epoch 920/2000 +2024-11-11 18:02:01,212 Current Learning Rate: 0.0034549150 +2024-11-11 18:02:01,213 Train Loss: 0.0008981, Val Loss: 0.0009435 +2024-11-11 18:02:01,213 Epoch 921/2000 +2024-11-11 18:02:18,387 Current Learning Rate: 0.0033804129 +2024-11-11 18:02:18,388 Train Loss: 0.0009236, Val Loss: 0.0009714 +2024-11-11 18:02:18,388 Epoch 922/2000 +2024-11-11 18:02:34,038 Current Learning Rate: 0.0033063104 +2024-11-11 18:02:34,039 Train Loss: 0.0007968, Val Loss: 0.0010329 +2024-11-11 18:02:34,040 Epoch 923/2000 +2024-11-11 18:02:49,900 Current Learning Rate: 0.0032326258 +2024-11-11 18:02:49,901 Train Loss: 0.0008474, Val Loss: 0.0009551 +2024-11-11 18:02:49,901 Epoch 924/2000 +2024-11-11 18:03:05,886 Current Learning Rate: 0.0031593772 +2024-11-11 18:03:05,887 Train Loss: 0.0008605, Val Loss: 0.0009423 +2024-11-11 18:03:05,887 Epoch 925/2000 +2024-11-11 18:03:22,163 Current Learning Rate: 0.0030865828 +2024-11-11 18:03:22,164 Train Loss: 0.0007879, Val Loss: 0.0009484 +2024-11-11 18:03:22,164 Epoch 926/2000 +2024-11-11 18:03:37,523 Current Learning Rate: 0.0030142605 +2024-11-11 18:03:37,523 Train Loss: 0.0007850, Val Loss: 0.0009422 +2024-11-11 18:03:37,523 Epoch 927/2000 +2024-11-11 18:03:53,902 Current Learning Rate: 0.0029424282 +2024-11-11 18:03:53,902 Train Loss: 0.0008499, Val Loss: 0.0009353 +2024-11-11 18:03:53,903 Epoch 928/2000 +2024-11-11 18:04:09,285 Current Learning Rate: 0.0028711035 +2024-11-11 18:04:10,081 Train Loss: 0.0007770, Val Loss: 0.0009291 +2024-11-11 18:04:10,082 Epoch 929/2000 +2024-11-11 18:04:25,290 Current Learning Rate: 0.0028003042 +2024-11-11 18:04:26,011 Train Loss: 0.0007953, Val Loss: 0.0009265 +2024-11-11 18:04:26,011 Epoch 930/2000 +2024-11-11 18:04:41,158 Current Learning Rate: 0.0027300475 +2024-11-11 18:04:41,159 Train Loss: 0.0008035, Val Loss: 0.0009290 +2024-11-11 18:04:41,159 Epoch 931/2000 +2024-11-11 18:04:56,901 Current Learning Rate: 0.0026603509 +2024-11-11 18:04:57,656 Train Loss: 0.0008269, Val Loss: 0.0009218 +2024-11-11 18:04:57,656 Epoch 932/2000 +2024-11-11 18:05:12,502 Current Learning Rate: 0.0025912316 +2024-11-11 18:05:13,290 Train Loss: 0.0008099, Val Loss: 0.0009216 +2024-11-11 18:05:13,290 Epoch 933/2000 +2024-11-11 18:05:27,932 Current Learning Rate: 0.0025227067 +2024-11-11 18:05:28,759 Train Loss: 0.0007953, Val Loss: 0.0009200 +2024-11-11 18:05:28,759 Epoch 934/2000 +2024-11-11 18:05:44,188 Current Learning Rate: 0.0024547929 +2024-11-11 18:05:44,189 Train Loss: 0.0008011, Val Loss: 0.0009216 +2024-11-11 18:05:44,189 Epoch 935/2000 +2024-11-11 18:06:00,530 Current Learning Rate: 0.0023875072 +2024-11-11 18:06:01,545 Train Loss: 0.0007263, Val Loss: 0.0009178 +2024-11-11 18:06:01,545 Epoch 936/2000 +2024-11-11 18:06:16,635 Current Learning Rate: 0.0023208660 +2024-11-11 18:06:17,452 Train Loss: 0.0007329, Val Loss: 0.0009140 +2024-11-11 18:06:17,452 Epoch 937/2000 +2024-11-11 18:06:32,328 Current Learning Rate: 0.0022548859 +2024-11-11 18:06:33,096 Train Loss: 0.0007991, Val Loss: 0.0009119 +2024-11-11 18:06:33,096 Epoch 938/2000 +2024-11-11 18:06:49,252 Current Learning Rate: 0.0021895831 +2024-11-11 18:06:49,985 Train Loss: 0.0007170, Val Loss: 0.0009097 +2024-11-11 18:06:49,986 Epoch 939/2000 +2024-11-11 18:07:04,742 Current Learning Rate: 0.0021249737 +2024-11-11 18:07:04,743 Train Loss: 0.0008002, Val Loss: 0.0009106 +2024-11-11 18:07:04,743 Epoch 940/2000 +2024-11-11 18:07:20,276 Current Learning Rate: 0.0020610737 +2024-11-11 18:07:21,109 Train Loss: 0.0007906, Val Loss: 0.0009069 +2024-11-11 18:07:21,109 Epoch 941/2000 +2024-11-11 18:07:36,298 Current Learning Rate: 0.0019978989 +2024-11-11 18:07:36,299 Train Loss: 0.0008600, Val Loss: 0.0009077 +2024-11-11 18:07:36,300 Epoch 942/2000 +2024-11-11 18:07:51,999 Current Learning Rate: 0.0019354647 +2024-11-11 18:07:52,859 Train Loss: 0.0008623, Val Loss: 0.0009002 +2024-11-11 18:07:52,859 Epoch 943/2000 +2024-11-11 18:08:08,792 Current Learning Rate: 0.0018737867 +2024-11-11 18:08:09,871 Train Loss: 0.0007317, Val Loss: 0.0008945 +2024-11-11 18:08:09,871 Epoch 944/2000 +2024-11-11 18:08:25,556 Current Learning Rate: 0.0018128801 +2024-11-11 18:08:26,498 Train Loss: 0.0007197, Val Loss: 0.0008931 +2024-11-11 18:08:26,499 Epoch 945/2000 +2024-11-11 18:08:42,506 Current Learning Rate: 0.0017527598 +2024-11-11 18:08:43,408 Train Loss: 0.0008533, Val Loss: 0.0008858 +2024-11-11 18:08:43,408 Epoch 946/2000 +2024-11-11 18:08:59,082 Current Learning Rate: 0.0016934407 +2024-11-11 18:08:59,864 Train Loss: 0.0007981, Val Loss: 0.0008846 +2024-11-11 18:08:59,864 Epoch 947/2000 +2024-11-11 18:09:14,839 Current Learning Rate: 0.0016349374 +2024-11-11 18:09:15,885 Train Loss: 0.0007385, Val Loss: 0.0008841 +2024-11-11 18:09:15,886 Epoch 948/2000 +2024-11-11 18:09:32,184 Current Learning Rate: 0.0015772645 +2024-11-11 18:09:33,217 Train Loss: 0.0007402, Val Loss: 0.0008811 +2024-11-11 18:09:33,217 Epoch 949/2000 +2024-11-11 18:09:48,867 Current Learning Rate: 0.0015204360 +2024-11-11 18:09:49,828 Train Loss: 0.0007388, Val Loss: 0.0008791 +2024-11-11 18:09:49,828 Epoch 950/2000 +2024-11-11 18:10:05,394 Current Learning Rate: 0.0014644661 +2024-11-11 18:10:06,428 Train Loss: 0.0007365, Val Loss: 0.0008788 +2024-11-11 18:10:06,428 Epoch 951/2000 +2024-11-11 18:10:22,291 Current Learning Rate: 0.0014093685 +2024-11-11 18:10:23,337 Train Loss: 0.0008304, Val Loss: 0.0008780 +2024-11-11 18:10:23,337 Epoch 952/2000 +2024-11-11 18:10:39,622 Current Learning Rate: 0.0013551569 +2024-11-11 18:10:40,525 Train Loss: 0.0007347, Val Loss: 0.0008769 +2024-11-11 18:10:40,526 Epoch 953/2000 +2024-11-11 18:10:56,240 Current Learning Rate: 0.0013018445 +2024-11-11 18:10:57,262 Train Loss: 0.0007638, Val Loss: 0.0008759 +2024-11-11 18:10:57,262 Epoch 954/2000 +2024-11-11 18:11:13,149 Current Learning Rate: 0.0012494447 +2024-11-11 18:11:13,942 Train Loss: 0.0006930, Val Loss: 0.0008750 +2024-11-11 18:11:13,943 Epoch 955/2000 +2024-11-11 18:11:29,040 Current Learning Rate: 0.0011979702 +2024-11-11 18:11:30,024 Train Loss: 0.0007504, Val Loss: 0.0008742 +2024-11-11 18:11:30,024 Epoch 956/2000 +2024-11-11 18:11:46,069 Current Learning Rate: 0.0011474338 +2024-11-11 18:11:46,819 Train Loss: 0.0007186, Val Loss: 0.0008726 +2024-11-11 18:11:46,820 Epoch 957/2000 +2024-11-11 18:12:02,281 Current Learning Rate: 0.0010978480 +2024-11-11 18:12:04,771 Train Loss: 0.0008264, Val Loss: 0.0008722 +2024-11-11 18:12:04,771 Epoch 958/2000 +2024-11-11 18:12:20,546 Current Learning Rate: 0.0010492249 +2024-11-11 18:12:21,276 Train Loss: 0.0008291, Val Loss: 0.0008717 +2024-11-11 18:12:21,276 Epoch 959/2000 +2024-11-11 18:12:36,206 Current Learning Rate: 0.0010015767 +2024-11-11 18:12:37,073 Train Loss: 0.0007533, Val Loss: 0.0008700 +2024-11-11 18:12:37,073 Epoch 960/2000 +2024-11-11 18:12:51,389 Current Learning Rate: 0.0009549150 +2024-11-11 18:12:51,389 Train Loss: 0.0008044, Val Loss: 0.0008701 +2024-11-11 18:12:51,390 Epoch 961/2000 +2024-11-11 18:13:06,881 Current Learning Rate: 0.0009092514 +2024-11-11 18:13:06,881 Train Loss: 0.0009497, Val Loss: 0.0008702 +2024-11-11 18:13:06,882 Epoch 962/2000 +2024-11-11 18:13:22,443 Current Learning Rate: 0.0008645971 +2024-11-11 18:13:23,260 Train Loss: 0.0007587, Val Loss: 0.0008681 +2024-11-11 18:13:23,261 Epoch 963/2000 +2024-11-11 18:13:38,024 Current Learning Rate: 0.0008209632 +2024-11-11 18:13:38,840 Train Loss: 0.0007858, Val Loss: 0.0008676 +2024-11-11 18:13:38,840 Epoch 964/2000 +2024-11-11 18:13:53,426 Current Learning Rate: 0.0007783604 +2024-11-11 18:13:54,195 Train Loss: 0.0007155, Val Loss: 0.0008664 +2024-11-11 18:13:54,195 Epoch 965/2000 +2024-11-11 18:14:09,379 Current Learning Rate: 0.0007367992 +2024-11-11 18:14:10,408 Train Loss: 0.0008063, Val Loss: 0.0008662 +2024-11-11 18:14:10,409 Epoch 966/2000 +2024-11-11 18:14:26,624 Current Learning Rate: 0.0006962899 +2024-11-11 18:14:27,655 Train Loss: 0.0007275, Val Loss: 0.0008653 +2024-11-11 18:14:27,655 Epoch 967/2000 +2024-11-11 18:14:43,990 Current Learning Rate: 0.0006568424 +2024-11-11 18:14:45,042 Train Loss: 0.0007194, Val Loss: 0.0008642 +2024-11-11 18:14:45,043 Epoch 968/2000 +2024-11-11 18:15:00,679 Current Learning Rate: 0.0006184666 +2024-11-11 18:15:01,587 Train Loss: 0.0006976, Val Loss: 0.0008634 +2024-11-11 18:15:01,588 Epoch 969/2000 +2024-11-11 18:15:16,646 Current Learning Rate: 0.0005811718 +2024-11-11 18:15:17,453 Train Loss: 0.0007452, Val Loss: 0.0008625 +2024-11-11 18:15:17,454 Epoch 970/2000 +2024-11-11 18:15:31,720 Current Learning Rate: 0.0005449674 +2024-11-11 18:15:32,505 Train Loss: 0.0007519, Val Loss: 0.0008623 +2024-11-11 18:15:32,505 Epoch 971/2000 +2024-11-11 18:15:47,098 Current Learning Rate: 0.0005098621 +2024-11-11 18:15:47,849 Train Loss: 0.0006804, Val Loss: 0.0008617 +2024-11-11 18:15:47,850 Epoch 972/2000 +2024-11-11 18:16:02,872 Current Learning Rate: 0.0004758647 +2024-11-11 18:16:05,217 Train Loss: 0.0007153, Val Loss: 0.0008612 +2024-11-11 18:16:05,217 Epoch 973/2000 +2024-11-11 18:16:19,452 Current Learning Rate: 0.0004429836 +2024-11-11 18:16:20,279 Train Loss: 0.0006899, Val Loss: 0.0008603 +2024-11-11 18:16:20,280 Epoch 974/2000 +2024-11-11 18:16:34,776 Current Learning Rate: 0.0004112269 +2024-11-11 18:16:35,585 Train Loss: 0.0008203, Val Loss: 0.0008598 +2024-11-11 18:16:35,585 Epoch 975/2000 +2024-11-11 18:16:50,092 Current Learning Rate: 0.0003806023 +2024-11-11 18:16:50,869 Train Loss: 0.0007900, Val Loss: 0.0008593 +2024-11-11 18:16:50,870 Epoch 976/2000 +2024-11-11 18:17:05,491 Current Learning Rate: 0.0003511176 +2024-11-11 18:17:06,261 Train Loss: 0.0007146, Val Loss: 0.0008588 +2024-11-11 18:17:06,261 Epoch 977/2000 +2024-11-11 18:17:20,973 Current Learning Rate: 0.0003227798 +2024-11-11 18:17:21,749 Train Loss: 0.0007156, Val Loss: 0.0008587 +2024-11-11 18:17:21,749 Epoch 978/2000 +2024-11-11 18:17:36,300 Current Learning Rate: 0.0002955962 +2024-11-11 18:17:36,301 Train Loss: 0.0008374, Val Loss: 0.0008588 +2024-11-11 18:17:36,301 Epoch 979/2000 +2024-11-11 18:17:51,839 Current Learning Rate: 0.0002695732 +2024-11-11 18:17:52,660 Train Loss: 0.0008155, Val Loss: 0.0008584 +2024-11-11 18:17:52,661 Epoch 980/2000 +2024-11-11 18:18:07,838 Current Learning Rate: 0.0002447174 +2024-11-11 18:18:08,876 Train Loss: 0.0007135, Val Loss: 0.0008578 +2024-11-11 18:18:08,877 Epoch 981/2000 +2024-11-11 18:18:25,298 Current Learning Rate: 0.0002210349 +2024-11-11 18:18:26,311 Train Loss: 0.0007606, Val Loss: 0.0008577 +2024-11-11 18:18:26,311 Epoch 982/2000 +2024-11-11 18:18:42,014 Current Learning Rate: 0.0001985316 +2024-11-11 18:18:43,048 Train Loss: 0.0007418, Val Loss: 0.0008576 +2024-11-11 18:18:43,048 Epoch 983/2000 +2024-11-11 18:18:58,550 Current Learning Rate: 0.0001772129 +2024-11-11 18:18:58,551 Train Loss: 0.0007499, Val Loss: 0.0008576 +2024-11-11 18:18:58,551 Epoch 984/2000 +2024-11-11 18:19:14,874 Current Learning Rate: 0.0001570842 +2024-11-11 18:19:15,859 Train Loss: 0.0007082, Val Loss: 0.0008571 +2024-11-11 18:19:15,860 Epoch 985/2000 +2024-11-11 18:19:31,580 Current Learning Rate: 0.0001381504 +2024-11-11 18:19:32,311 Train Loss: 0.0007549, Val Loss: 0.0008567 +2024-11-11 18:19:32,311 Epoch 986/2000 +2024-11-11 18:19:46,666 Current Learning Rate: 0.0001204162 +2024-11-11 18:19:47,473 Train Loss: 0.0007097, Val Loss: 0.0008566 +2024-11-11 18:19:47,473 Epoch 987/2000 +2024-11-11 18:20:02,133 Current Learning Rate: 0.0001038859 +2024-11-11 18:20:04,571 Train Loss: 0.0007165, Val Loss: 0.0008563 +2024-11-11 18:20:04,572 Epoch 988/2000 +2024-11-11 18:20:18,843 Current Learning Rate: 0.0000885637 +2024-11-11 18:20:18,843 Train Loss: 0.0007841, Val Loss: 0.0008564 +2024-11-11 18:20:18,843 Epoch 989/2000 +2024-11-11 18:20:34,100 Current Learning Rate: 0.0000744534 +2024-11-11 18:20:34,857 Train Loss: 0.0007812, Val Loss: 0.0008561 +2024-11-11 18:20:34,857 Epoch 990/2000 +2024-11-11 18:20:50,019 Current Learning Rate: 0.0000615583 +2024-11-11 18:20:50,019 Train Loss: 0.0007407, Val Loss: 0.0008561 +2024-11-11 18:20:50,019 Epoch 991/2000 +2024-11-11 18:21:05,671 Current Learning Rate: 0.0000498817 +2024-11-11 18:21:06,509 Train Loss: 0.0007416, Val Loss: 0.0008561 +2024-11-11 18:21:06,509 Epoch 992/2000 +2024-11-11 18:21:21,475 Current Learning Rate: 0.0000394265 +2024-11-11 18:21:22,260 Train Loss: 0.0007115, Val Loss: 0.0008559 +2024-11-11 18:21:22,261 Epoch 993/2000 +2024-11-11 18:21:36,963 Current Learning Rate: 0.0000301952 +2024-11-11 18:21:37,822 Train Loss: 0.0007434, Val Loss: 0.0008559 +2024-11-11 18:21:37,822 Epoch 994/2000 +2024-11-11 18:21:52,460 Current Learning Rate: 0.0000221902 +2024-11-11 18:21:52,460 Train Loss: 0.0007455, Val Loss: 0.0008559 +2024-11-11 18:21:52,461 Epoch 995/2000 +2024-11-11 18:22:07,811 Current Learning Rate: 0.0000154133 +2024-11-11 18:22:08,572 Train Loss: 0.0008028, Val Loss: 0.0008559 +2024-11-11 18:22:08,573 Epoch 996/2000 +2024-11-11 18:22:23,237 Current Learning Rate: 0.0000098664 +2024-11-11 18:22:24,004 Train Loss: 0.0008328, Val Loss: 0.0008558 +2024-11-11 18:22:24,004 Epoch 997/2000 +2024-11-11 18:22:39,122 Current Learning Rate: 0.0000055506 +2024-11-11 18:22:39,124 Train Loss: 0.0008706, Val Loss: 0.0008559 +2024-11-11 18:22:39,124 Epoch 998/2000 +2024-11-11 18:22:55,385 Current Learning Rate: 0.0000024672 +2024-11-11 18:22:55,386 Train Loss: 0.0008158, Val Loss: 0.0008558 +2024-11-11 18:22:55,386 Epoch 999/2000 +2024-11-11 18:23:11,204 Current Learning Rate: 0.0000006168 +2024-11-11 18:23:11,941 Train Loss: 0.0007491, Val Loss: 0.0008558 +2024-11-11 18:23:11,941 Epoch 1000/2000 +2024-11-11 18:23:26,214 Current Learning Rate: 0.0000000000 +2024-11-11 18:23:26,991 Train Loss: 0.0007738, Val Loss: 0.0008558 +2024-11-11 18:23:26,992 Epoch 1001/2000 +2024-11-11 18:23:41,600 Current Learning Rate: 0.0000006168 +2024-11-11 18:23:41,601 Train Loss: 0.0007041, Val Loss: 0.0008558 +2024-11-11 18:23:41,601 Epoch 1002/2000 +2024-11-11 18:23:57,062 Current Learning Rate: 0.0000024672 +2024-11-11 18:23:57,062 Train Loss: 0.0007447, Val Loss: 0.0008558 +2024-11-11 18:23:57,062 Epoch 1003/2000 +2024-11-11 18:24:12,435 Current Learning Rate: 0.0000055506 +2024-11-11 18:24:13,188 Train Loss: 0.0007138, Val Loss: 0.0008558 +2024-11-11 18:24:13,189 Epoch 1004/2000 +2024-11-11 18:24:27,879 Current Learning Rate: 0.0000098664 +2024-11-11 18:24:28,752 Train Loss: 0.0007444, Val Loss: 0.0008558 +2024-11-11 18:24:28,752 Epoch 1005/2000 +2024-11-11 18:24:44,092 Current Learning Rate: 0.0000154133 +2024-11-11 18:24:44,824 Train Loss: 0.0007624, Val Loss: 0.0008557 +2024-11-11 18:24:44,824 Epoch 1006/2000 +2024-11-11 18:25:00,032 Current Learning Rate: 0.0000221902 +2024-11-11 18:25:00,033 Train Loss: 0.0007101, Val Loss: 0.0008558 +2024-11-11 18:25:00,033 Epoch 1007/2000 +2024-11-11 18:25:15,982 Current Learning Rate: 0.0000301952 +2024-11-11 18:25:15,983 Train Loss: 0.0006978, Val Loss: 0.0008558 +2024-11-11 18:25:15,984 Epoch 1008/2000 +2024-11-11 18:25:32,062 Current Learning Rate: 0.0000394265 +2024-11-11 18:25:32,062 Train Loss: 0.0008121, Val Loss: 0.0008559 +2024-11-11 18:25:32,063 Epoch 1009/2000 +2024-11-11 18:25:47,703 Current Learning Rate: 0.0000498817 +2024-11-11 18:25:47,704 Train Loss: 0.0007845, Val Loss: 0.0008559 +2024-11-11 18:25:47,704 Epoch 1010/2000 +2024-11-11 18:26:03,148 Current Learning Rate: 0.0000615583 +2024-11-11 18:26:03,148 Train Loss: 0.0007137, Val Loss: 0.0008558 +2024-11-11 18:26:03,148 Epoch 1011/2000 +2024-11-11 18:26:18,618 Current Learning Rate: 0.0000744534 +2024-11-11 18:26:18,618 Train Loss: 0.0007413, Val Loss: 0.0008558 +2024-11-11 18:26:18,627 Epoch 1012/2000 +2024-11-11 18:26:34,565 Current Learning Rate: 0.0000885637 +2024-11-11 18:26:34,566 Train Loss: 0.0007160, Val Loss: 0.0008558 +2024-11-11 18:26:34,566 Epoch 1013/2000 +2024-11-11 18:26:50,242 Current Learning Rate: 0.0001038859 +2024-11-11 18:26:50,242 Train Loss: 0.0007124, Val Loss: 0.0008558 +2024-11-11 18:26:50,243 Epoch 1014/2000 +2024-11-11 18:27:06,393 Current Learning Rate: 0.0001204162 +2024-11-11 18:27:06,393 Train Loss: 0.0007834, Val Loss: 0.0008560 +2024-11-11 18:27:06,393 Epoch 1015/2000 +2024-11-11 18:27:21,665 Current Learning Rate: 0.0001381504 +2024-11-11 18:27:21,665 Train Loss: 0.0007124, Val Loss: 0.0008559 +2024-11-11 18:27:21,665 Epoch 1016/2000 +2024-11-11 18:27:37,500 Current Learning Rate: 0.0001570842 +2024-11-11 18:27:37,501 Train Loss: 0.0008102, Val Loss: 0.0008561 +2024-11-11 18:27:37,501 Epoch 1017/2000 +2024-11-11 18:27:53,433 Current Learning Rate: 0.0001772129 +2024-11-11 18:27:53,433 Train Loss: 0.0007366, Val Loss: 0.0008566 +2024-11-11 18:27:53,434 Epoch 1018/2000 +2024-11-11 18:28:09,970 Current Learning Rate: 0.0001985316 +2024-11-11 18:28:09,971 Train Loss: 0.0007153, Val Loss: 0.0008562 +2024-11-11 18:28:09,971 Epoch 1019/2000 +2024-11-11 18:28:25,920 Current Learning Rate: 0.0002210349 +2024-11-11 18:28:25,921 Train Loss: 0.0006716, Val Loss: 0.0008560 +2024-11-11 18:28:25,921 Epoch 1020/2000 +2024-11-11 18:28:42,797 Current Learning Rate: 0.0002447174 +2024-11-11 18:28:42,797 Train Loss: 0.0007545, Val Loss: 0.0008561 +2024-11-11 18:28:42,798 Epoch 1021/2000 +2024-11-11 18:28:59,024 Current Learning Rate: 0.0002695732 +2024-11-11 18:28:59,024 Train Loss: 0.0008152, Val Loss: 0.0008583 +2024-11-11 18:28:59,025 Epoch 1022/2000 +2024-11-11 18:29:15,344 Current Learning Rate: 0.0002955962 +2024-11-11 18:29:15,345 Train Loss: 0.0007431, Val Loss: 0.0008562 +2024-11-11 18:29:15,345 Epoch 1023/2000 +2024-11-11 18:29:30,976 Current Learning Rate: 0.0003227798 +2024-11-11 18:29:30,978 Train Loss: 0.0008182, Val Loss: 0.0008566 +2024-11-11 18:29:30,978 Epoch 1024/2000 +2024-11-11 18:29:46,644 Current Learning Rate: 0.0003511176 +2024-11-11 18:29:46,645 Train Loss: 0.0007122, Val Loss: 0.0008565 +2024-11-11 18:29:46,645 Epoch 1025/2000 +2024-11-11 18:30:02,054 Current Learning Rate: 0.0003806023 +2024-11-11 18:30:02,054 Train Loss: 0.0007820, Val Loss: 0.0008568 +2024-11-11 18:30:02,055 Epoch 1026/2000 +2024-11-11 18:30:17,443 Current Learning Rate: 0.0004112269 +2024-11-11 18:30:17,444 Train Loss: 0.0008278, Val Loss: 0.0008565 +2024-11-11 18:30:17,444 Epoch 1027/2000 +2024-11-11 18:30:33,751 Current Learning Rate: 0.0004429836 +2024-11-11 18:30:33,751 Train Loss: 0.0007511, Val Loss: 0.0008568 +2024-11-11 18:30:33,752 Epoch 1028/2000 +2024-11-11 18:30:49,862 Current Learning Rate: 0.0004758647 +2024-11-11 18:30:49,863 Train Loss: 0.0007972, Val Loss: 0.0008570 +2024-11-11 18:30:49,863 Epoch 1029/2000 +2024-11-11 18:31:05,444 Current Learning Rate: 0.0005098621 +2024-11-11 18:31:05,444 Train Loss: 0.0008241, Val Loss: 0.0008577 +2024-11-11 18:31:05,445 Epoch 1030/2000 +2024-11-11 18:31:21,490 Current Learning Rate: 0.0005449674 +2024-11-11 18:31:21,491 Train Loss: 0.0007440, Val Loss: 0.0008575 +2024-11-11 18:31:21,491 Epoch 1031/2000 +2024-11-11 18:31:37,003 Current Learning Rate: 0.0005811718 +2024-11-11 18:31:37,004 Train Loss: 0.0007159, Val Loss: 0.0008574 +2024-11-11 18:31:37,004 Epoch 1032/2000 +2024-11-11 18:31:53,886 Current Learning Rate: 0.0006184666 +2024-11-11 18:31:53,886 Train Loss: 0.0007748, Val Loss: 0.0008571 +2024-11-11 18:31:53,887 Epoch 1033/2000 +2024-11-11 18:32:10,155 Current Learning Rate: 0.0006568424 +2024-11-11 18:32:10,155 Train Loss: 0.0006881, Val Loss: 0.0008574 +2024-11-11 18:32:10,155 Epoch 1034/2000 +2024-11-11 18:32:26,939 Current Learning Rate: 0.0006962899 +2024-11-11 18:32:26,940 Train Loss: 0.0007358, Val Loss: 0.0008586 +2024-11-11 18:32:26,940 Epoch 1035/2000 +2024-11-11 18:32:42,501 Current Learning Rate: 0.0007367992 +2024-11-11 18:32:42,502 Train Loss: 0.0007446, Val Loss: 0.0008596 +2024-11-11 18:32:42,502 Epoch 1036/2000 +2024-11-11 18:32:58,778 Current Learning Rate: 0.0007783604 +2024-11-11 18:32:58,778 Train Loss: 0.0007498, Val Loss: 0.0008631 +2024-11-11 18:32:58,778 Epoch 1037/2000 +2024-11-11 18:33:14,752 Current Learning Rate: 0.0008209632 +2024-11-11 18:33:14,753 Train Loss: 0.0006730, Val Loss: 0.0008626 +2024-11-11 18:33:14,753 Epoch 1038/2000 +2024-11-11 18:33:30,112 Current Learning Rate: 0.0008645971 +2024-11-11 18:33:30,113 Train Loss: 0.0007084, Val Loss: 0.0008636 +2024-11-11 18:33:30,113 Epoch 1039/2000 +2024-11-11 18:33:45,602 Current Learning Rate: 0.0009092514 +2024-11-11 18:33:45,602 Train Loss: 0.0007876, Val Loss: 0.0008646 +2024-11-11 18:33:45,603 Epoch 1040/2000 +2024-11-11 18:34:02,321 Current Learning Rate: 0.0009549150 +2024-11-11 18:34:02,322 Train Loss: 0.0007497, Val Loss: 0.0008628 +2024-11-11 18:34:02,323 Epoch 1041/2000 +2024-11-11 18:34:18,006 Current Learning Rate: 0.0010015767 +2024-11-11 18:34:18,007 Train Loss: 0.0007301, Val Loss: 0.0008607 +2024-11-11 18:34:18,007 Epoch 1042/2000 +2024-11-11 18:34:35,079 Current Learning Rate: 0.0010492249 +2024-11-11 18:34:35,079 Train Loss: 0.0007282, Val Loss: 0.0008592 +2024-11-11 18:34:35,080 Epoch 1043/2000 +2024-11-11 18:34:51,134 Current Learning Rate: 0.0010978480 +2024-11-11 18:34:52,226 Train Loss: 0.0007161, Val Loss: 0.0008554 +2024-11-11 18:34:52,227 Epoch 1044/2000 +2024-11-11 18:35:08,413 Current Learning Rate: 0.0011474338 +2024-11-11 18:35:08,414 Train Loss: 0.0007101, Val Loss: 0.0008654 +2024-11-11 18:35:08,415 Epoch 1045/2000 +2024-11-11 18:35:23,793 Current Learning Rate: 0.0011979702 +2024-11-11 18:35:23,794 Train Loss: 0.0008156, Val Loss: 0.0008671 +2024-11-11 18:35:23,794 Epoch 1046/2000 +2024-11-11 18:35:39,908 Current Learning Rate: 0.0012494447 +2024-11-11 18:35:39,909 Train Loss: 0.0006861, Val Loss: 0.0008554 +2024-11-11 18:35:39,909 Epoch 1047/2000 +2024-11-11 18:35:55,655 Current Learning Rate: 0.0013018445 +2024-11-11 18:35:55,656 Train Loss: 0.0007644, Val Loss: 0.0008688 +2024-11-11 18:35:55,656 Epoch 1048/2000 +2024-11-11 18:36:11,428 Current Learning Rate: 0.0013551569 +2024-11-11 18:36:11,429 Train Loss: 0.0007607, Val Loss: 0.0008591 +2024-11-11 18:36:11,429 Epoch 1049/2000 +2024-11-11 18:36:28,192 Current Learning Rate: 0.0014093685 +2024-11-11 18:36:28,192 Train Loss: 0.0007149, Val Loss: 0.0008560 +2024-11-11 18:36:28,193 Epoch 1050/2000 +2024-11-11 18:36:44,685 Current Learning Rate: 0.0014644661 +2024-11-11 18:36:44,686 Train Loss: 0.0008344, Val Loss: 0.0008616 +2024-11-11 18:36:44,686 Epoch 1051/2000 +2024-11-11 18:37:00,530 Current Learning Rate: 0.0015204360 +2024-11-11 18:37:00,531 Train Loss: 0.0006756, Val Loss: 0.0008582 +2024-11-11 18:37:00,531 Epoch 1052/2000 +2024-11-11 18:37:17,346 Current Learning Rate: 0.0015772645 +2024-11-11 18:37:17,346 Train Loss: 0.0007516, Val Loss: 0.0008673 +2024-11-11 18:37:17,347 Epoch 1053/2000 +2024-11-11 18:37:32,817 Current Learning Rate: 0.0016349374 +2024-11-11 18:37:32,818 Train Loss: 0.0006806, Val Loss: 0.0008589 +2024-11-11 18:37:32,818 Epoch 1054/2000 +2024-11-11 18:37:48,166 Current Learning Rate: 0.0016934407 +2024-11-11 18:37:48,167 Train Loss: 0.0006719, Val Loss: 0.0008565 +2024-11-11 18:37:48,167 Epoch 1055/2000 +2024-11-11 18:38:03,475 Current Learning Rate: 0.0017527598 +2024-11-11 18:38:03,476 Train Loss: 0.0007959, Val Loss: 0.0008839 +2024-11-11 18:38:03,476 Epoch 1056/2000 +2024-11-11 18:38:19,173 Current Learning Rate: 0.0018128801 +2024-11-11 18:38:19,173 Train Loss: 0.0008522, Val Loss: 0.0009142 +2024-11-11 18:38:19,174 Epoch 1057/2000 +2024-11-11 18:38:34,421 Current Learning Rate: 0.0018737867 +2024-11-11 18:38:34,421 Train Loss: 0.0007066, Val Loss: 0.0008673 +2024-11-11 18:38:34,422 Epoch 1058/2000 +2024-11-11 18:38:50,048 Current Learning Rate: 0.0019354647 +2024-11-11 18:38:50,049 Train Loss: 0.0008207, Val Loss: 0.0009349 +2024-11-11 18:38:50,049 Epoch 1059/2000 +2024-11-11 18:39:05,891 Current Learning Rate: 0.0019978989 +2024-11-11 18:39:05,892 Train Loss: 0.0007879, Val Loss: 0.0008692 +2024-11-11 18:39:05,892 Epoch 1060/2000 +2024-11-11 18:39:21,306 Current Learning Rate: 0.0020610737 +2024-11-11 18:39:21,306 Train Loss: 0.0007605, Val Loss: 0.0008579 +2024-11-11 18:39:21,307 Epoch 1061/2000 +2024-11-11 18:39:36,695 Current Learning Rate: 0.0021249737 +2024-11-11 18:39:37,468 Train Loss: 0.0007083, Val Loss: 0.0008546 +2024-11-11 18:39:37,468 Epoch 1062/2000 +2024-11-11 18:39:52,103 Current Learning Rate: 0.0021895831 +2024-11-11 18:39:52,103 Train Loss: 0.0007383, Val Loss: 0.0008613 +2024-11-11 18:39:52,104 Epoch 1063/2000 +2024-11-11 18:40:08,285 Current Learning Rate: 0.0022548859 +2024-11-11 18:40:09,076 Train Loss: 0.0007759, Val Loss: 0.0008530 +2024-11-11 18:40:09,076 Epoch 1064/2000 +2024-11-11 18:40:24,478 Current Learning Rate: 0.0023208660 +2024-11-11 18:40:24,479 Train Loss: 0.0008380, Val Loss: 0.0009085 +2024-11-11 18:40:24,479 Epoch 1065/2000 +2024-11-11 18:40:39,403 Current Learning Rate: 0.0023875072 +2024-11-11 18:40:39,404 Train Loss: 0.0007227, Val Loss: 0.0008622 +2024-11-11 18:40:39,404 Epoch 1066/2000 +2024-11-11 18:40:54,462 Current Learning Rate: 0.0024547929 +2024-11-11 18:40:54,462 Train Loss: 0.0007269, Val Loss: 0.0008761 +2024-11-11 18:40:54,462 Epoch 1067/2000 +2024-11-11 18:41:09,654 Current Learning Rate: 0.0025227067 +2024-11-11 18:41:09,654 Train Loss: 0.0007267, Val Loss: 0.0008554 +2024-11-11 18:41:09,655 Epoch 1068/2000 +2024-11-11 18:41:25,103 Current Learning Rate: 0.0025912316 +2024-11-11 18:41:25,104 Train Loss: 0.0007440, Val Loss: 0.0008720 +2024-11-11 18:41:25,104 Epoch 1069/2000 +2024-11-11 18:41:40,784 Current Learning Rate: 0.0026603509 +2024-11-11 18:41:40,785 Train Loss: 0.0007707, Val Loss: 0.0008668 +2024-11-11 18:41:40,785 Epoch 1070/2000 +2024-11-11 18:41:56,935 Current Learning Rate: 0.0027300475 +2024-11-11 18:41:56,935 Train Loss: 0.0008534, Val Loss: 0.0008801 +2024-11-11 18:41:56,936 Epoch 1071/2000 +2024-11-11 18:42:13,057 Current Learning Rate: 0.0028003042 +2024-11-11 18:42:13,059 Train Loss: 0.0007330, Val Loss: 0.0008630 +2024-11-11 18:42:13,059 Epoch 1072/2000 +2024-11-11 18:42:28,888 Current Learning Rate: 0.0028711035 +2024-11-11 18:42:28,889 Train Loss: 0.0006813, Val Loss: 0.0008599 +2024-11-11 18:42:28,889 Epoch 1073/2000 +2024-11-11 18:42:44,922 Current Learning Rate: 0.0029424282 +2024-11-11 18:42:44,923 Train Loss: 0.0007425, Val Loss: 0.0009174 +2024-11-11 18:42:44,923 Epoch 1074/2000 +2024-11-11 18:43:00,446 Current Learning Rate: 0.0030142605 +2024-11-11 18:43:00,447 Train Loss: 0.0007606, Val Loss: 0.0008840 +2024-11-11 18:43:00,447 Epoch 1075/2000 +2024-11-11 18:43:15,730 Current Learning Rate: 0.0030865828 +2024-11-11 18:43:15,730 Train Loss: 0.0008498, Val Loss: 0.0008971 +2024-11-11 18:43:15,730 Epoch 1076/2000 +2024-11-11 18:43:31,459 Current Learning Rate: 0.0031593772 +2024-11-11 18:43:31,460 Train Loss: 0.0008042, Val Loss: 0.0009118 +2024-11-11 18:43:31,460 Epoch 1077/2000 +2024-11-11 18:43:47,950 Current Learning Rate: 0.0032326258 +2024-11-11 18:43:47,951 Train Loss: 0.0007869, Val Loss: 0.0008806 +2024-11-11 18:43:47,951 Epoch 1078/2000 +2024-11-11 18:44:02,884 Current Learning Rate: 0.0033063104 +2024-11-11 18:44:02,885 Train Loss: 0.0007446, Val Loss: 0.0008635 +2024-11-11 18:44:02,885 Epoch 1079/2000 +2024-11-11 18:44:18,644 Current Learning Rate: 0.0033804129 +2024-11-11 18:44:18,644 Train Loss: 0.0008346, Val Loss: 0.0008997 +2024-11-11 18:44:18,645 Epoch 1080/2000 +2024-11-11 18:44:35,632 Current Learning Rate: 0.0034549150 +2024-11-11 18:44:35,633 Train Loss: 0.0007886, Val Loss: 0.0008844 +2024-11-11 18:44:35,633 Epoch 1081/2000 +2024-11-11 18:44:51,473 Current Learning Rate: 0.0035297984 +2024-11-11 18:44:51,474 Train Loss: 0.0007828, Val Loss: 0.0009006 +2024-11-11 18:44:51,474 Epoch 1082/2000 +2024-11-11 18:45:06,811 Current Learning Rate: 0.0036050445 +2024-11-11 18:45:06,812 Train Loss: 0.0007277, Val Loss: 0.0008892 +2024-11-11 18:45:06,812 Epoch 1083/2000 +2024-11-11 18:45:23,103 Current Learning Rate: 0.0036806348 +2024-11-11 18:45:23,104 Train Loss: 0.0007981, Val Loss: 0.0008976 +2024-11-11 18:45:23,104 Epoch 1084/2000 +2024-11-11 18:45:38,385 Current Learning Rate: 0.0037565506 +2024-11-11 18:45:38,385 Train Loss: 0.0007050, Val Loss: 0.0008793 +2024-11-11 18:45:38,385 Epoch 1085/2000 +2024-11-11 18:45:54,414 Current Learning Rate: 0.0038327732 +2024-11-11 18:45:54,415 Train Loss: 0.0009147, Val Loss: 0.0009100 +2024-11-11 18:45:54,415 Epoch 1086/2000 +2024-11-11 18:46:10,519 Current Learning Rate: 0.0039092838 +2024-11-11 18:46:10,520 Train Loss: 0.0008636, Val Loss: 0.0009302 +2024-11-11 18:46:10,520 Epoch 1087/2000 +2024-11-11 18:46:26,233 Current Learning Rate: 0.0039860635 +2024-11-11 18:46:26,233 Train Loss: 0.0009090, Val Loss: 0.0009573 +2024-11-11 18:46:26,233 Epoch 1088/2000 +2024-11-11 18:46:41,469 Current Learning Rate: 0.0040630934 +2024-11-11 18:46:41,470 Train Loss: 0.0007566, Val Loss: 0.0009119 +2024-11-11 18:46:41,470 Epoch 1089/2000 +2024-11-11 18:46:56,869 Current Learning Rate: 0.0041403545 +2024-11-11 18:46:56,870 Train Loss: 0.0008079, Val Loss: 0.0009209 +2024-11-11 18:46:56,870 Epoch 1090/2000 +2024-11-11 18:47:12,678 Current Learning Rate: 0.0042178277 +2024-11-11 18:47:12,679 Train Loss: 0.0007778, Val Loss: 0.0009014 +2024-11-11 18:47:12,679 Epoch 1091/2000 +2024-11-11 18:47:28,858 Current Learning Rate: 0.0042954938 +2024-11-11 18:47:28,859 Train Loss: 0.0007078, Val Loss: 0.0008732 +2024-11-11 18:47:28,859 Epoch 1092/2000 +2024-11-11 18:47:45,369 Current Learning Rate: 0.0043733338 +2024-11-11 18:47:45,370 Train Loss: 0.0008591, Val Loss: 0.0009461 +2024-11-11 18:47:45,370 Epoch 1093/2000 +2024-11-11 18:48:01,773 Current Learning Rate: 0.0044513284 +2024-11-11 18:48:01,773 Train Loss: 0.0007430, Val Loss: 0.0009078 +2024-11-11 18:48:01,774 Epoch 1094/2000 +2024-11-11 18:48:16,770 Current Learning Rate: 0.0045294584 +2024-11-11 18:48:16,771 Train Loss: 0.0008409, Val Loss: 0.0009163 +2024-11-11 18:48:16,771 Epoch 1095/2000 +2024-11-11 18:48:32,376 Current Learning Rate: 0.0046077045 +2024-11-11 18:48:32,377 Train Loss: 0.0007834, Val Loss: 0.0009657 +2024-11-11 18:48:32,377 Epoch 1096/2000 +2024-11-11 18:48:47,756 Current Learning Rate: 0.0046860474 +2024-11-11 18:48:47,756 Train Loss: 0.0007785, Val Loss: 0.0009072 +2024-11-11 18:48:47,756 Epoch 1097/2000 +2024-11-11 18:49:03,674 Current Learning Rate: 0.0047644677 +2024-11-11 18:49:03,675 Train Loss: 0.0008473, Val Loss: 0.0009193 +2024-11-11 18:49:03,675 Epoch 1098/2000 +2024-11-11 18:49:19,285 Current Learning Rate: 0.0048429462 +2024-11-11 18:49:19,286 Train Loss: 0.0009198, Val Loss: 0.0009047 +2024-11-11 18:49:19,286 Epoch 1099/2000 +2024-11-11 18:49:35,804 Current Learning Rate: 0.0049214634 +2024-11-11 18:49:35,805 Train Loss: 0.0007513, Val Loss: 0.0008817 +2024-11-11 18:49:35,805 Epoch 1100/2000 +2024-11-11 18:49:51,202 Current Learning Rate: 0.0050000000 +2024-11-11 18:49:51,203 Train Loss: 0.0006984, Val Loss: 0.0008786 +2024-11-11 18:49:51,203 Epoch 1101/2000 +2024-11-11 18:50:06,773 Current Learning Rate: 0.0050785366 +2024-11-11 18:50:06,774 Train Loss: 0.0007898, Val Loss: 0.0009168 +2024-11-11 18:50:06,774 Epoch 1102/2000 +2024-11-11 18:50:23,111 Current Learning Rate: 0.0051570538 +2024-11-11 18:50:23,112 Train Loss: 0.0007377, Val Loss: 0.0009072 +2024-11-11 18:50:23,112 Epoch 1103/2000 +2024-11-11 18:50:38,981 Current Learning Rate: 0.0052355323 +2024-11-11 18:50:38,981 Train Loss: 0.0007992, Val Loss: 0.0009590 +2024-11-11 18:50:38,982 Epoch 1104/2000 +2024-11-11 18:50:54,636 Current Learning Rate: 0.0053139526 +2024-11-11 18:50:54,637 Train Loss: 0.0009170, Val Loss: 0.0009772 +2024-11-11 18:50:54,637 Epoch 1105/2000 +2024-11-11 18:51:11,338 Current Learning Rate: 0.0053922955 +2024-11-11 18:51:11,338 Train Loss: 0.0008388, Val Loss: 0.0009675 +2024-11-11 18:51:11,339 Epoch 1106/2000 +2024-11-11 18:51:27,485 Current Learning Rate: 0.0054705416 +2024-11-11 18:51:27,487 Train Loss: 0.0008355, Val Loss: 0.0010288 +2024-11-11 18:51:27,487 Epoch 1107/2000 +2024-11-11 18:51:43,126 Current Learning Rate: 0.0055486716 +2024-11-11 18:51:43,126 Train Loss: 0.0007842, Val Loss: 0.0009114 +2024-11-11 18:51:43,127 Epoch 1108/2000 +2024-11-11 18:51:59,493 Current Learning Rate: 0.0056266662 +2024-11-11 18:51:59,495 Train Loss: 0.0008256, Val Loss: 0.0009426 +2024-11-11 18:51:59,495 Epoch 1109/2000 +2024-11-11 18:52:15,288 Current Learning Rate: 0.0057045062 +2024-11-11 18:52:15,289 Train Loss: 0.0009038, Val Loss: 0.0009367 +2024-11-11 18:52:15,289 Epoch 1110/2000 +2024-11-11 18:52:30,620 Current Learning Rate: 0.0057821723 +2024-11-11 18:52:30,621 Train Loss: 0.0008269, Val Loss: 0.0009090 +2024-11-11 18:52:30,621 Epoch 1111/2000 +2024-11-11 18:52:46,896 Current Learning Rate: 0.0058596455 +2024-11-11 18:52:46,897 Train Loss: 0.0009088, Val Loss: 0.0009546 +2024-11-11 18:52:46,897 Epoch 1112/2000 +2024-11-11 18:53:02,681 Current Learning Rate: 0.0059369066 +2024-11-11 18:53:02,681 Train Loss: 0.0008433, Val Loss: 0.0009075 +2024-11-11 18:53:02,682 Epoch 1113/2000 +2024-11-11 18:53:18,567 Current Learning Rate: 0.0060139365 +2024-11-11 18:53:18,567 Train Loss: 0.0009822, Val Loss: 0.0009441 +2024-11-11 18:53:18,567 Epoch 1114/2000 +2024-11-11 18:53:33,757 Current Learning Rate: 0.0060907162 +2024-11-11 18:53:33,758 Train Loss: 0.0009162, Val Loss: 0.0009002 +2024-11-11 18:53:33,758 Epoch 1115/2000 +2024-11-11 18:53:49,097 Current Learning Rate: 0.0061672268 +2024-11-11 18:53:49,098 Train Loss: 0.0007641, Val Loss: 0.0009208 +2024-11-11 18:53:49,098 Epoch 1116/2000 +2024-11-11 18:54:04,371 Current Learning Rate: 0.0062434494 +2024-11-11 18:54:04,372 Train Loss: 0.0008616, Val Loss: 0.0009157 +2024-11-11 18:54:04,372 Epoch 1117/2000 +2024-11-11 18:54:20,901 Current Learning Rate: 0.0063193652 +2024-11-11 18:54:20,902 Train Loss: 0.0007720, Val Loss: 0.0009131 +2024-11-11 18:54:20,902 Epoch 1118/2000 +2024-11-11 18:54:36,520 Current Learning Rate: 0.0063949555 +2024-11-11 18:54:36,521 Train Loss: 0.0008748, Val Loss: 0.0009098 +2024-11-11 18:54:36,522 Epoch 1119/2000 +2024-11-11 18:54:52,529 Current Learning Rate: 0.0064702016 +2024-11-11 18:54:52,529 Train Loss: 0.0008747, Val Loss: 0.0009104 +2024-11-11 18:54:52,529 Epoch 1120/2000 +2024-11-11 18:55:08,008 Current Learning Rate: 0.0065450850 +2024-11-11 18:55:08,009 Train Loss: 0.0007746, Val Loss: 0.0009134 +2024-11-11 18:55:08,009 Epoch 1121/2000 +2024-11-11 18:55:24,190 Current Learning Rate: 0.0066195871 +2024-11-11 18:55:24,191 Train Loss: 0.0008900, Val Loss: 0.0009446 +2024-11-11 18:55:24,191 Epoch 1122/2000 +2024-11-11 18:55:41,093 Current Learning Rate: 0.0066936896 +2024-11-11 18:55:41,093 Train Loss: 0.0008111, Val Loss: 0.0009460 +2024-11-11 18:55:41,094 Epoch 1123/2000 +2024-11-11 18:55:58,069 Current Learning Rate: 0.0067673742 +2024-11-11 18:55:58,069 Train Loss: 0.0010924, Val Loss: 0.0012449 +2024-11-11 18:55:58,070 Epoch 1124/2000 +2024-11-11 18:56:14,415 Current Learning Rate: 0.0068406228 +2024-11-11 18:56:14,416 Train Loss: 0.0009590, Val Loss: 0.0009485 +2024-11-11 18:56:14,416 Epoch 1125/2000 +2024-11-11 18:56:30,273 Current Learning Rate: 0.0069134172 +2024-11-11 18:56:30,274 Train Loss: 0.0009170, Val Loss: 0.0009529 +2024-11-11 18:56:30,274 Epoch 1126/2000 +2024-11-11 18:56:46,177 Current Learning Rate: 0.0069857395 +2024-11-11 18:56:46,178 Train Loss: 0.0008428, Val Loss: 0.0009891 +2024-11-11 18:56:46,178 Epoch 1127/2000 +2024-11-11 18:57:02,205 Current Learning Rate: 0.0070575718 +2024-11-11 18:57:02,205 Train Loss: 0.0008743, Val Loss: 0.0009457 +2024-11-11 18:57:02,206 Epoch 1128/2000 +2024-11-11 18:57:18,351 Current Learning Rate: 0.0071288965 +2024-11-11 18:57:18,352 Train Loss: 0.0007929, Val Loss: 0.0009199 +2024-11-11 18:57:18,352 Epoch 1129/2000 +2024-11-11 18:57:34,626 Current Learning Rate: 0.0071996958 +2024-11-11 18:57:34,627 Train Loss: 0.0008838, Val Loss: 0.0009955 +2024-11-11 18:57:34,627 Epoch 1130/2000 +2024-11-11 18:57:50,691 Current Learning Rate: 0.0072699525 +2024-11-11 18:57:50,691 Train Loss: 0.0008128, Val Loss: 0.0009871 +2024-11-11 18:57:50,692 Epoch 1131/2000 +2024-11-11 18:58:07,237 Current Learning Rate: 0.0073396491 +2024-11-11 18:58:07,237 Train Loss: 0.0008704, Val Loss: 0.0010355 +2024-11-11 18:58:07,238 Epoch 1132/2000 +2024-11-11 18:58:22,551 Current Learning Rate: 0.0074087684 +2024-11-11 18:58:22,552 Train Loss: 0.0008751, Val Loss: 0.0009685 +2024-11-11 18:58:22,552 Epoch 1133/2000 +2024-11-11 18:58:37,894 Current Learning Rate: 0.0074772933 +2024-11-11 18:58:37,894 Train Loss: 0.0008425, Val Loss: 0.0009452 +2024-11-11 18:58:37,895 Epoch 1134/2000 +2024-11-11 18:58:53,987 Current Learning Rate: 0.0075452071 +2024-11-11 18:58:53,988 Train Loss: 0.0007711, Val Loss: 0.0009159 +2024-11-11 18:58:53,988 Epoch 1135/2000 +2024-11-11 18:59:09,818 Current Learning Rate: 0.0076124928 +2024-11-11 18:59:09,818 Train Loss: 0.0007821, Val Loss: 0.0009073 +2024-11-11 18:59:09,818 Epoch 1136/2000 +2024-11-11 18:59:26,595 Current Learning Rate: 0.0076791340 +2024-11-11 18:59:26,595 Train Loss: 0.0009624, Val Loss: 0.0010466 +2024-11-11 18:59:26,596 Epoch 1137/2000 +2024-11-11 18:59:42,702 Current Learning Rate: 0.0077451141 +2024-11-11 18:59:42,703 Train Loss: 0.0008611, Val Loss: 0.0009538 +2024-11-11 18:59:42,703 Epoch 1138/2000 +2024-11-11 18:59:58,795 Current Learning Rate: 0.0078104169 +2024-11-11 18:59:58,795 Train Loss: 0.0009359, Val Loss: 0.0010198 +2024-11-11 18:59:58,796 Epoch 1139/2000 +2024-11-11 19:00:13,498 Current Learning Rate: 0.0078750263 +2024-11-11 19:00:13,498 Train Loss: 0.0008688, Val Loss: 0.0009391 +2024-11-11 19:00:13,498 Epoch 1140/2000 +2024-11-11 19:00:28,891 Current Learning Rate: 0.0079389263 +2024-11-11 19:00:28,892 Train Loss: 0.0008664, Val Loss: 0.0009756 +2024-11-11 19:00:28,892 Epoch 1141/2000 +2024-11-11 19:00:44,292 Current Learning Rate: 0.0080021011 +2024-11-11 19:00:44,293 Train Loss: 0.0008583, Val Loss: 0.0009709 +2024-11-11 19:00:44,293 Epoch 1142/2000 +2024-11-11 19:01:00,106 Current Learning Rate: 0.0080645353 +2024-11-11 19:01:00,106 Train Loss: 0.0008195, Val Loss: 0.0009350 +2024-11-11 19:01:00,106 Epoch 1143/2000 +2024-11-11 19:01:15,206 Current Learning Rate: 0.0081262133 +2024-11-11 19:01:15,206 Train Loss: 0.0009231, Val Loss: 0.0010300 +2024-11-11 19:01:15,206 Epoch 1144/2000 +2024-11-11 19:01:30,241 Current Learning Rate: 0.0081871199 +2024-11-11 19:01:30,242 Train Loss: 0.0009033, Val Loss: 0.0010105 +2024-11-11 19:01:30,242 Epoch 1145/2000 +2024-11-11 19:01:46,298 Current Learning Rate: 0.0082472402 +2024-11-11 19:01:46,319 Train Loss: 0.0009219, Val Loss: 0.0009805 +2024-11-11 19:01:46,319 Epoch 1146/2000 +2024-11-11 19:02:02,591 Current Learning Rate: 0.0083065593 +2024-11-11 19:02:02,592 Train Loss: 0.0010280, Val Loss: 0.0010458 +2024-11-11 19:02:02,592 Epoch 1147/2000 +2024-11-11 19:02:18,516 Current Learning Rate: 0.0083650626 +2024-11-11 19:02:18,517 Train Loss: 0.0008359, Val Loss: 0.0009765 +2024-11-11 19:02:18,517 Epoch 1148/2000 +2024-11-11 19:02:34,040 Current Learning Rate: 0.0084227355 +2024-11-11 19:02:34,040 Train Loss: 0.0008897, Val Loss: 0.0010902 +2024-11-11 19:02:34,041 Epoch 1149/2000 +2024-11-11 19:02:48,957 Current Learning Rate: 0.0084795640 +2024-11-11 19:02:48,957 Train Loss: 0.0009626, Val Loss: 0.0010651 +2024-11-11 19:02:48,957 Epoch 1150/2000 +2024-11-11 19:03:04,906 Current Learning Rate: 0.0085355339 +2024-11-11 19:03:04,906 Train Loss: 0.0009077, Val Loss: 0.0009267 +2024-11-11 19:03:04,907 Epoch 1151/2000 +2024-11-11 19:03:18,450 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-11 19:03:20,329 Current Learning Rate: 0.0085906315 +2024-11-11 19:03:20,331 Train Loss: 0.0008699, Val Loss: 0.0009190 +2024-11-11 19:03:20,332 Epoch 1152/2000 +2024-11-11 19:03:36,464 Current Learning Rate: 0.0086448431 +2024-11-11 19:03:36,465 Train Loss: 0.0008264, Val Loss: 0.0009337 +2024-11-11 19:03:36,466 Epoch 1153/2000 +2024-11-11 19:03:42,658 Loading best model from checkpoint. +2024-11-11 19:03:59,935 Testing completed and best model saved. +-11-11 19:03:51,275 Train Loss: 0.0007436, Val Loss: 0.0009308 +2024-11-11 19:03:51,275 Epoch 1154/2000 +2024-11-11 19:04:06,794 Current Learning Rate: 0.0087505553 +2024-11-11 19:04:06,794 Train Loss: 0.0006984, Val Loss: 0.0008624 +2024-11-11 19:04:06,794 Epoch 1155/2000 +2024-11-11 19:04:22,123 Current Learning Rate: 0.0088020298 +2024-11-11 19:04:22,123 Train Loss: 0.0007567, Val Loss: 0.0008781 +2024-11-11 19:04:22,124 Epoch 1156/2000 +2024-11-11 19:04:37,362 Current Learning Rate: 0.0088525662 +2024-11-11 19:04:37,363 Train Loss: 0.0008130, Val Loss: 0.0008850 +2024-11-11 19:04:37,363 Epoch 1157/2000 +2024-11-11 19:04:53,117 Current Learning Rate: 0.0089021520 +2024-11-11 19:04:53,118 Train Loss: 0.0007347, Val Loss: 0.0009162 +2024-11-11 19:04:53,118 Epoch 1158/2000 +2024-11-11 19:05:09,548 Current Learning Rate: 0.0089507751 +2024-11-11 19:05:09,548 Train Loss: 0.0008315, Val Loss: 0.0009582 +2024-11-11 19:05:09,549 Epoch 1159/2000 +2024-11-11 19:05:26,131 Current Learning Rate: 0.0089984233 +2024-11-11 19:05:26,131 Train Loss: 0.0008935, Val Loss: 0.0009455 +2024-11-11 19:05:26,132 Epoch 1160/2000 +2024-11-11 19:05:42,270 Current Learning Rate: 0.0090450850 +2024-11-11 19:05:42,271 Train Loss: 0.0007443, Val Loss: 0.0009121 +2024-11-11 19:05:42,271 Epoch 1161/2000 +2024-11-11 19:05:57,768 Current Learning Rate: 0.0090907486 +2024-11-11 19:05:57,769 Train Loss: 0.0007306, Val Loss: 0.0009244 +2024-11-11 19:05:57,769 Epoch 1162/2000 +2024-11-11 19:06:13,568 Current Learning Rate: 0.0091354029 +2024-11-11 19:06:13,569 Train Loss: 0.0009027, Val Loss: 0.0009766 +2024-11-11 19:06:13,569 Epoch 1163/2000 +2024-11-11 19:06:28,837 Current Learning Rate: 0.0091790368 +2024-11-11 19:06:28,837 Train Loss: 0.0008806, Val Loss: 0.0009236 +2024-11-11 19:06:28,837 Epoch 1164/2000 +2024-11-11 19:06:44,143 Current Learning Rate: 0.0092216396 +2024-11-11 19:06:44,144 Train Loss: 0.0008355, Val Loss: 0.0009116 +2024-11-11 19:06:44,144 Epoch 1165/2000 +2024-11-11 19:07:01,271 Current Learning Rate: 0.0092632008 +2024-11-11 19:07:01,272 Train Loss: 0.0008376, Val Loss: 0.0009840 +2024-11-11 19:07:01,272 Epoch 1166/2000 +2024-11-11 19:07:17,404 Current Learning Rate: 0.0093037101 +2024-11-11 19:07:17,404 Train Loss: 0.0009626, Val Loss: 0.0010315 +2024-11-11 19:07:17,404 Epoch 1167/2000 +2024-11-11 19:07:33,394 Current Learning Rate: 0.0093431576 +2024-11-11 19:07:33,395 Train Loss: 0.0009344, Val Loss: 0.0009433 +2024-11-11 19:07:33,395 Epoch 1168/2000 +2024-11-11 19:07:49,114 Current Learning Rate: 0.0093815334 +2024-11-11 19:07:49,115 Train Loss: 0.0009765, Val Loss: 0.0009319 +2024-11-11 19:07:49,115 Epoch 1169/2000 +2024-11-11 19:08:04,723 Current Learning Rate: 0.0094188282 +2024-11-11 19:08:04,723 Train Loss: 0.0008037, Val Loss: 0.0008954 +2024-11-11 19:08:04,723 Epoch 1170/2000 +2024-11-11 19:08:20,121 Current Learning Rate: 0.0094550326 +2024-11-11 19:08:20,121 Train Loss: 0.0007491, Val Loss: 0.0008791 +2024-11-11 19:08:20,121 Epoch 1171/2000 +2024-11-11 19:08:35,509 Current Learning Rate: 0.0094901379 +2024-11-11 19:08:35,510 Train Loss: 0.0007986, Val Loss: 0.0009121 +2024-11-11 19:08:35,510 Epoch 1172/2000 +2024-11-11 19:08:50,960 Current Learning Rate: 0.0095241353 +2024-11-11 19:08:50,961 Train Loss: 0.0007640, Val Loss: 0.0008937 +2024-11-11 19:08:50,961 Epoch 1173/2000 +2024-11-11 19:09:07,198 Current Learning Rate: 0.0095570164 +2024-11-11 19:09:07,198 Train Loss: 0.0008310, Val Loss: 0.0008824 +2024-11-11 19:09:07,198 Epoch 1174/2000 +2024-11-11 19:09:23,042 Current Learning Rate: 0.0095887731 +2024-11-11 19:09:23,042 Train Loss: 0.0008596, Val Loss: 0.0009071 +2024-11-11 19:09:23,042 Epoch 1175/2000 +2024-11-11 19:09:38,905 Current Learning Rate: 0.0096193977 +2024-11-11 19:09:38,905 Train Loss: 0.0007866, Val Loss: 0.0009351 +2024-11-11 19:09:38,906 Epoch 1176/2000 +2024-11-11 19:09:55,190 Current Learning Rate: 0.0096488824 +2024-11-11 19:09:55,190 Train Loss: 0.0007205, Val Loss: 0.0009014 +2024-11-11 19:09:55,190 Epoch 1177/2000 +2024-11-11 19:10:11,353 Current Learning Rate: 0.0096772202 +2024-11-11 19:10:11,353 Train Loss: 0.0007054, Val Loss: 0.0008608 +2024-11-11 19:10:11,353 Epoch 1178/2000 +2024-11-11 19:10:26,492 Current Learning Rate: 0.0097044038 +2024-11-11 19:10:26,492 Train Loss: 0.0007777, Val Loss: 0.0009524 +2024-11-11 19:10:26,493 Epoch 1179/2000 +2024-11-11 19:10:42,047 Current Learning Rate: 0.0097304268 +2024-11-11 19:10:42,048 Train Loss: 0.0009247, Val Loss: 0.0010017 +2024-11-11 19:10:42,048 Epoch 1180/2000 +2024-11-11 19:10:57,387 Current Learning Rate: 0.0097552826 +2024-11-11 19:10:57,388 Train Loss: 0.0009114, Val Loss: 0.0010670 +2024-11-11 19:10:57,388 Epoch 1181/2000 +2024-11-11 19:11:13,262 Current Learning Rate: 0.0097789651 +2024-11-11 19:11:13,262 Train Loss: 0.0009571, Val Loss: 0.0010167 +2024-11-11 19:11:13,263 Epoch 1182/2000 +2024-11-11 19:11:29,838 Current Learning Rate: 0.0098014684 +2024-11-11 19:11:29,839 Train Loss: 0.0008427, Val Loss: 0.0009216 +2024-11-11 19:11:29,839 Epoch 1183/2000 +2024-11-11 19:11:45,016 Current Learning Rate: 0.0098227871 +2024-11-11 19:11:45,017 Train Loss: 0.0007869, Val Loss: 0.0009104 +2024-11-11 19:11:45,017 Epoch 1184/2000 +2024-11-11 19:12:00,408 Current Learning Rate: 0.0098429158 +2024-11-11 19:12:00,409 Train Loss: 0.0007923, Val Loss: 0.0009151 +2024-11-11 19:12:00,409 Epoch 1185/2000 +2024-11-11 19:12:15,984 Current Learning Rate: 0.0098618496 +2024-11-11 19:12:15,984 Train Loss: 0.0007653, Val Loss: 0.0009307 +2024-11-11 19:12:15,984 Epoch 1186/2000 +2024-11-11 19:12:32,713 Current Learning Rate: 0.0098795838 +2024-11-11 19:12:32,714 Train Loss: 0.0007941, Val Loss: 0.0009135 +2024-11-11 19:12:32,714 Epoch 1187/2000 +2024-11-11 19:12:49,129 Current Learning Rate: 0.0098961141 +2024-11-11 19:12:49,130 Train Loss: 0.0007957, Val Loss: 0.0008860 +2024-11-11 19:12:49,130 Epoch 1188/2000 +2024-11-11 19:13:05,500 Current Learning Rate: 0.0099114363 +2024-11-11 19:13:05,501 Train Loss: 0.0007098, Val Loss: 0.0008635 +2024-11-11 19:13:05,501 Epoch 1189/2000 +2024-11-11 19:13:21,816 Current Learning Rate: 0.0099255466 +2024-11-11 19:13:21,816 Train Loss: 0.0008032, Val Loss: 0.0009301 +2024-11-11 19:13:21,816 Epoch 1190/2000 +2024-11-11 19:13:38,502 Current Learning Rate: 0.0099384417 +2024-11-11 19:13:38,502 Train Loss: 0.0008252, Val Loss: 0.0009136 +2024-11-11 19:13:38,503 Epoch 1191/2000 +2024-11-11 19:13:53,770 Current Learning Rate: 0.0099501183 +2024-11-11 19:13:53,771 Train Loss: 0.0007597, Val Loss: 0.0009133 +2024-11-11 19:13:53,771 Epoch 1192/2000 +2024-11-11 19:14:09,597 Current Learning Rate: 0.0099605735 +2024-11-11 19:14:09,598 Train Loss: 0.0008328, Val Loss: 0.0008811 +2024-11-11 19:14:09,598 Epoch 1193/2000 +2024-11-11 19:14:25,733 Current Learning Rate: 0.0099698048 +2024-11-11 19:14:25,734 Train Loss: 0.0007931, Val Loss: 0.0009434 +2024-11-11 19:14:25,734 Epoch 1194/2000 +2024-11-11 19:14:40,484 Current Learning Rate: 0.0099778098 +2024-11-11 19:14:40,485 Train Loss: 0.0009715, Val Loss: 0.0009576 +2024-11-11 19:14:40,485 Epoch 1195/2000 +2024-11-11 19:14:55,865 Current Learning Rate: 0.0099845867 +2024-11-11 19:14:55,865 Train Loss: 0.0007851, Val Loss: 0.0009919 +2024-11-11 19:14:55,865 Epoch 1196/2000 +2024-11-11 19:15:12,166 Current Learning Rate: 0.0099901336 +2024-11-11 19:15:12,167 Train Loss: 0.0008322, Val Loss: 0.0009438 +2024-11-11 19:15:12,167 Epoch 1197/2000 +2024-11-11 19:15:27,591 Current Learning Rate: 0.0099944494 +2024-11-11 19:15:27,591 Train Loss: 0.0008722, Val Loss: 0.0009891 +2024-11-11 19:15:27,591 Epoch 1198/2000 +2024-11-11 19:15:43,066 Current Learning Rate: 0.0099975328 +2024-11-11 19:15:43,066 Train Loss: 0.0007786, Val Loss: 0.0008811 +2024-11-11 19:15:43,067 Epoch 1199/2000 +2024-11-11 19:15:58,443 Current Learning Rate: 0.0099993832 +2024-11-11 19:15:58,443 Train Loss: 0.0008825, Val Loss: 0.0009694 +2024-11-11 19:15:58,443 Epoch 1200/2000 +2024-11-11 19:16:15,059 Current Learning Rate: 0.0100000000 +2024-11-11 19:16:15,061 Train Loss: 0.0009325, Val Loss: 0.0009888 +2024-11-11 19:16:15,061 Epoch 1201/2000 +2024-11-11 19:16:31,458 Current Learning Rate: 0.0099993832 +2024-11-11 19:16:31,459 Train Loss: 0.0009915, Val Loss: 0.0009818 +2024-11-11 19:16:31,459 Epoch 1202/2000 +2024-11-11 19:16:47,431 Current Learning Rate: 0.0099975328 +2024-11-11 19:16:47,432 Train Loss: 0.0007678, Val Loss: 0.0008826 +2024-11-11 19:16:47,432 Epoch 1203/2000 +2024-11-11 19:17:02,996 Current Learning Rate: 0.0099944494 +2024-11-11 19:17:02,997 Train Loss: 0.0009000, Val Loss: 0.0008945 +2024-11-11 19:17:02,997 Epoch 1204/2000 +2024-11-11 19:17:19,426 Current Learning Rate: 0.0099901336 +2024-11-11 19:17:19,426 Train Loss: 0.0008233, Val Loss: 0.0008658 +2024-11-11 19:17:19,426 Epoch 1205/2000 +2024-11-11 19:17:35,972 Current Learning Rate: 0.0099845867 +2024-11-11 19:17:35,972 Train Loss: 0.0007527, Val Loss: 0.0008549 +2024-11-11 19:17:35,972 Epoch 1206/2000 +2024-11-11 19:17:50,680 Current Learning Rate: 0.0099778098 +2024-11-11 19:17:51,480 Train Loss: 0.0007904, Val Loss: 0.0008346 +2024-11-11 19:17:51,481 Epoch 1207/2000 +2024-11-11 19:18:06,040 Current Learning Rate: 0.0099698048 +2024-11-11 19:18:06,801 Train Loss: 0.0007418, Val Loss: 0.0008254 +2024-11-11 19:18:06,801 Epoch 1208/2000 +2024-11-11 19:18:21,420 Current Learning Rate: 0.0099605735 +2024-11-11 19:18:21,421 Train Loss: 0.0007113, Val Loss: 0.0009579 +2024-11-11 19:18:21,421 Epoch 1209/2000 +2024-11-11 19:18:36,840 Current Learning Rate: 0.0099501183 +2024-11-11 19:18:36,841 Train Loss: 0.0008529, Val Loss: 0.0009929 +2024-11-11 19:18:36,841 Epoch 1210/2000 +2024-11-11 19:18:53,220 Current Learning Rate: 0.0099384417 +2024-11-11 19:18:53,221 Train Loss: 0.0007548, Val Loss: 0.0008843 +2024-11-11 19:18:53,221 Epoch 1211/2000 +2024-11-11 19:19:09,386 Current Learning Rate: 0.0099255466 +2024-11-11 19:19:09,387 Train Loss: 0.0008613, Val Loss: 0.0009586 +2024-11-11 19:19:09,387 Epoch 1212/2000 +2024-11-11 19:19:25,803 Current Learning Rate: 0.0099114363 +2024-11-11 19:19:25,803 Train Loss: 0.0008580, Val Loss: 0.0010224 +2024-11-11 19:19:25,804 Epoch 1213/2000 +2024-11-11 19:19:42,245 Current Learning Rate: 0.0098961141 +2024-11-11 19:19:42,245 Train Loss: 0.0007980, Val Loss: 0.0008683 +2024-11-11 19:19:42,246 Epoch 1214/2000 +2024-11-11 19:19:58,284 Current Learning Rate: 0.0098795838 +2024-11-11 19:19:58,285 Train Loss: 0.0008451, Val Loss: 0.0009139 +2024-11-11 19:19:58,285 Epoch 1215/2000 +2024-11-11 19:20:14,202 Current Learning Rate: 0.0098618496 +2024-11-11 19:20:14,202 Train Loss: 0.0007909, Val Loss: 0.0008778 +2024-11-11 19:20:14,203 Epoch 1216/2000 +2024-11-11 19:20:30,648 Current Learning Rate: 0.0098429158 +2024-11-11 19:20:30,649 Train Loss: 0.0007579, Val Loss: 0.0008329 +2024-11-11 19:20:30,649 Epoch 1217/2000 +2024-11-11 19:20:46,337 Current Learning Rate: 0.0098227871 +2024-11-11 19:20:46,338 Train Loss: 0.0007602, Val Loss: 0.0008391 +2024-11-11 19:20:46,338 Epoch 1218/2000 +2024-11-11 19:21:02,049 Current Learning Rate: 0.0098014684 +2024-11-11 19:21:02,049 Train Loss: 0.0007357, Val Loss: 0.0008317 +2024-11-11 19:21:02,049 Epoch 1219/2000 +2024-11-11 19:21:17,302 Current Learning Rate: 0.0097789651 +2024-11-11 19:21:17,303 Train Loss: 0.0006970, Val Loss: 0.0008263 +2024-11-11 19:21:17,303 Epoch 1220/2000 +2024-11-11 19:21:32,763 Current Learning Rate: 0.0097552826 +2024-11-11 19:21:33,486 Train Loss: 0.0007238, Val Loss: 0.0008217 +2024-11-11 19:21:33,486 Epoch 1221/2000 +2024-11-11 19:21:48,531 Current Learning Rate: 0.0097304268 +2024-11-11 19:21:48,532 Train Loss: 0.0008221, Val Loss: 0.0008921 +2024-11-11 19:21:48,532 Epoch 1222/2000 +2024-11-11 19:22:04,214 Current Learning Rate: 0.0097044038 +2024-11-11 19:22:04,214 Train Loss: 0.0007832, Val Loss: 0.0008790 +2024-11-11 19:22:04,214 Epoch 1223/2000 +2024-11-11 19:22:19,294 Current Learning Rate: 0.0096772202 +2024-11-11 19:22:19,294 Train Loss: 0.0007623, Val Loss: 0.0008899 +2024-11-11 19:22:19,295 Epoch 1224/2000 +2024-11-11 19:22:35,499 Current Learning Rate: 0.0096488824 +2024-11-11 19:22:35,499 Train Loss: 0.0007388, Val Loss: 0.0008893 +2024-11-11 19:22:35,499 Epoch 1225/2000 +2024-11-11 19:22:50,695 Current Learning Rate: 0.0096193977 +2024-11-11 19:22:50,695 Train Loss: 0.0007008, Val Loss: 0.0008764 +2024-11-11 19:22:50,696 Epoch 1226/2000 +2024-11-11 19:23:06,849 Current Learning Rate: 0.0095887731 +2024-11-11 19:23:06,849 Train Loss: 0.0007780, Val Loss: 0.0009302 +2024-11-11 19:23:06,849 Epoch 1227/2000 +2024-11-11 19:23:22,766 Current Learning Rate: 0.0095570164 +2024-11-11 19:23:22,767 Train Loss: 0.0007999, Val Loss: 0.0008586 +2024-11-11 19:23:22,767 Epoch 1228/2000 +2024-11-11 19:23:39,711 Current Learning Rate: 0.0095241353 +2024-11-11 19:23:39,712 Train Loss: 0.0007903, Val Loss: 0.0009007 +2024-11-11 19:23:39,712 Epoch 1229/2000 +2024-11-11 19:23:56,029 Current Learning Rate: 0.0094901379 +2024-11-11 19:23:56,029 Train Loss: 0.0008943, Val Loss: 0.0008876 +2024-11-11 19:23:56,030 Epoch 1230/2000 +2024-11-11 19:24:11,968 Current Learning Rate: 0.0094550326 +2024-11-11 19:24:11,969 Train Loss: 0.0007999, Val Loss: 0.0008620 +2024-11-11 19:24:11,970 Epoch 1231/2000 +2024-11-11 19:24:29,057 Current Learning Rate: 0.0094188282 +2024-11-11 19:24:29,058 Train Loss: 0.0007607, Val Loss: 0.0008616 +2024-11-11 19:24:29,058 Epoch 1232/2000 +2024-11-11 19:24:44,139 Current Learning Rate: 0.0093815334 +2024-11-11 19:24:44,953 Train Loss: 0.0006732, Val Loss: 0.0007991 +2024-11-11 19:24:44,953 Epoch 1233/2000 +2024-11-11 19:25:00,751 Current Learning Rate: 0.0093431576 +2024-11-11 19:25:00,753 Train Loss: 0.0007684, Val Loss: 0.0008466 +2024-11-11 19:25:00,754 Epoch 1234/2000 +2024-11-11 19:25:16,710 Current Learning Rate: 0.0093037101 +2024-11-11 19:25:16,711 Train Loss: 0.0008696, Val Loss: 0.0008539 +2024-11-11 19:25:16,712 Epoch 1235/2000 +2024-11-11 19:25:33,031 Current Learning Rate: 0.0092632008 +2024-11-11 19:25:33,033 Train Loss: 0.0006910, Val Loss: 0.0008334 +2024-11-11 19:25:33,033 Epoch 1236/2000 +2024-11-11 19:25:48,841 Current Learning Rate: 0.0092216396 +2024-11-11 19:25:48,841 Train Loss: 0.0007120, Val Loss: 0.0008217 +2024-11-11 19:25:48,841 Epoch 1237/2000 +2024-11-11 19:26:04,302 Current Learning Rate: 0.0091790368 +2024-11-11 19:26:04,303 Train Loss: 0.0007135, Val Loss: 0.0008230 +2024-11-11 19:26:04,304 Epoch 1238/2000 +2024-11-11 19:26:19,513 Current Learning Rate: 0.0091354029 +2024-11-11 19:26:19,514 Train Loss: 0.0008573, Val Loss: 0.0008267 +2024-11-11 19:26:19,514 Epoch 1239/2000 +2024-11-11 19:26:36,588 Current Learning Rate: 0.0090907486 +2024-11-11 19:26:36,589 Train Loss: 0.0006955, Val Loss: 0.0008639 +2024-11-11 19:26:36,589 Epoch 1240/2000 +2024-11-11 19:26:51,627 Current Learning Rate: 0.0090450850 +2024-11-11 19:26:51,627 Train Loss: 0.0008323, Val Loss: 0.0009813 +2024-11-11 19:26:51,628 Epoch 1241/2000 +2024-11-11 19:27:07,013 Current Learning Rate: 0.0089984233 +2024-11-11 19:27:07,013 Train Loss: 0.0008396, Val Loss: 0.0009004 +2024-11-11 19:27:07,013 Epoch 1242/2000 +2024-11-11 19:27:22,295 Current Learning Rate: 0.0089507751 +2024-11-11 19:27:22,295 Train Loss: 0.0007598, Val Loss: 0.0008683 +2024-11-11 19:27:22,295 Epoch 1243/2000 +2024-11-11 19:27:37,602 Current Learning Rate: 0.0089021520 +2024-11-11 19:27:37,603 Train Loss: 0.0007631, Val Loss: 0.0008305 +2024-11-11 19:27:37,603 Epoch 1244/2000 +2024-11-11 19:27:53,458 Current Learning Rate: 0.0088525662 +2024-11-11 19:27:53,459 Train Loss: 0.0007240, Val Loss: 0.0008004 +2024-11-11 19:27:53,459 Epoch 1245/2000 +2024-11-11 19:28:09,388 Current Learning Rate: 0.0088020298 +2024-11-11 19:28:09,389 Train Loss: 0.0007155, Val Loss: 0.0007998 +2024-11-11 19:28:09,389 Epoch 1246/2000 +2024-11-11 19:28:24,273 Current Learning Rate: 0.0087505553 +2024-11-11 19:28:24,273 Train Loss: 0.0006628, Val Loss: 0.0008059 +2024-11-11 19:28:24,273 Epoch 1247/2000 +2024-11-11 19:28:39,724 Current Learning Rate: 0.0086981555 +2024-11-11 19:28:39,725 Train Loss: 0.0007403, Val Loss: 0.0008217 +2024-11-11 19:28:39,725 Epoch 1248/2000 +2024-11-11 19:28:54,997 Current Learning Rate: 0.0086448431 +2024-11-11 19:28:54,998 Train Loss: 0.0007295, Val Loss: 0.0008898 +2024-11-11 19:28:54,998 Epoch 1249/2000 +2024-11-11 19:29:11,772 Current Learning Rate: 0.0085906315 +2024-11-11 19:29:11,772 Train Loss: 0.0007997, Val Loss: 0.0008358 +2024-11-11 19:29:11,772 Epoch 1250/2000 +2024-11-11 19:29:27,488 Current Learning Rate: 0.0085355339 +2024-11-11 19:29:27,489 Train Loss: 0.0006966, Val Loss: 0.0008217 +2024-11-11 19:29:27,489 Epoch 1251/2000 +2024-11-11 19:29:43,157 Current Learning Rate: 0.0084795640 +2024-11-11 19:29:44,157 Train Loss: 0.0006985, Val Loss: 0.0007988 +2024-11-11 19:29:44,157 Epoch 1252/2000 +2024-11-11 19:29:59,832 Current Learning Rate: 0.0084227355 +2024-11-11 19:30:00,861 Train Loss: 0.0007554, Val Loss: 0.0007808 +2024-11-11 19:30:00,861 Epoch 1253/2000 +2024-11-11 19:30:17,425 Current Learning Rate: 0.0083650626 +2024-11-11 19:30:18,514 Train Loss: 0.0006460, Val Loss: 0.0007802 +2024-11-11 19:30:18,514 Epoch 1254/2000 +2024-11-11 19:30:33,545 Current Learning Rate: 0.0083065593 +2024-11-11 19:30:33,546 Train Loss: 0.0006969, Val Loss: 0.0008002 +2024-11-11 19:30:33,546 Epoch 1255/2000 +2024-11-11 19:30:50,268 Current Learning Rate: 0.0082472402 +2024-11-11 19:30:50,269 Train Loss: 0.0007126, Val Loss: 0.0008224 +2024-11-11 19:30:50,269 Epoch 1256/2000 +2024-11-11 19:31:06,405 Current Learning Rate: 0.0081871199 +2024-11-11 19:31:06,406 Train Loss: 0.0007588, Val Loss: 0.0008356 +2024-11-11 19:31:06,406 Epoch 1257/2000 +2024-11-11 19:31:22,763 Current Learning Rate: 0.0081262133 +2024-11-11 19:31:22,763 Train Loss: 0.0006239, Val Loss: 0.0008300 +2024-11-11 19:31:22,763 Epoch 1258/2000 +2024-11-11 19:31:37,510 Current Learning Rate: 0.0080645353 +2024-11-11 19:31:37,511 Train Loss: 0.0007517, Val Loss: 0.0008245 +2024-11-11 19:31:37,511 Epoch 1259/2000 +2024-11-11 19:31:53,515 Current Learning Rate: 0.0080021011 +2024-11-11 19:31:54,295 Train Loss: 0.0006759, Val Loss: 0.0007783 +2024-11-11 19:31:54,295 Epoch 1260/2000 +2024-11-11 19:32:09,350 Current Learning Rate: 0.0079389263 +2024-11-11 19:32:10,081 Train Loss: 0.0007044, Val Loss: 0.0007597 +2024-11-11 19:32:10,082 Epoch 1261/2000 +2024-11-11 19:32:25,353 Current Learning Rate: 0.0078750263 +2024-11-11 19:32:25,353 Train Loss: 0.0007259, Val Loss: 0.0007679 +2024-11-11 19:32:25,354 Epoch 1262/2000 +2024-11-11 19:32:40,724 Current Learning Rate: 0.0078104169 +2024-11-11 19:32:40,725 Train Loss: 0.0007029, Val Loss: 0.0007838 +2024-11-11 19:32:40,725 Epoch 1263/2000 +2024-11-11 19:32:57,374 Current Learning Rate: 0.0077451141 +2024-11-11 19:32:57,375 Train Loss: 0.0006776, Val Loss: 0.0008143 +2024-11-11 19:32:57,375 Epoch 1264/2000 +2024-11-11 19:33:12,321 Current Learning Rate: 0.0076791340 +2024-11-11 19:33:12,322 Train Loss: 0.0006801, Val Loss: 0.0008060 +2024-11-11 19:33:12,322 Epoch 1265/2000 +2024-11-11 19:33:27,753 Current Learning Rate: 0.0076124928 +2024-11-11 19:33:27,754 Train Loss: 0.0007322, Val Loss: 0.0009160 +2024-11-11 19:33:27,754 Epoch 1266/2000 +2024-11-11 19:33:43,418 Current Learning Rate: 0.0075452071 +2024-11-11 19:33:43,418 Train Loss: 0.0007673, Val Loss: 0.0008446 +2024-11-11 19:33:43,418 Epoch 1267/2000 +2024-11-11 19:33:58,983 Current Learning Rate: 0.0074772933 +2024-11-11 19:33:58,984 Train Loss: 0.0007103, Val Loss: 0.0007735 +2024-11-11 19:33:58,984 Epoch 1268/2000 +2024-11-11 19:34:14,391 Current Learning Rate: 0.0074087684 +2024-11-11 19:34:14,391 Train Loss: 0.0006179, Val Loss: 0.0007601 +2024-11-11 19:34:14,391 Epoch 1269/2000 +2024-11-11 19:34:30,470 Current Learning Rate: 0.0073396491 +2024-11-11 19:34:30,471 Train Loss: 0.0006432, Val Loss: 0.0007638 +2024-11-11 19:34:30,471 Epoch 1270/2000 +2024-11-11 19:34:45,792 Current Learning Rate: 0.0072699525 +2024-11-11 19:34:45,792 Train Loss: 0.0007119, Val Loss: 0.0007776 +2024-11-11 19:34:45,793 Epoch 1271/2000 +2024-11-11 19:35:01,007 Current Learning Rate: 0.0071996958 +2024-11-11 19:35:01,008 Train Loss: 0.0005824, Val Loss: 0.0007780 +2024-11-11 19:35:01,008 Epoch 1272/2000 +2024-11-11 19:35:16,516 Current Learning Rate: 0.0071288965 +2024-11-11 19:35:16,516 Train Loss: 0.0006603, Val Loss: 0.0007763 +2024-11-11 19:35:16,516 Epoch 1273/2000 +2024-11-11 19:35:32,010 Current Learning Rate: 0.0070575718 +2024-11-11 19:35:32,817 Train Loss: 0.0006173, Val Loss: 0.0007556 +2024-11-11 19:35:32,817 Epoch 1274/2000 +2024-11-11 19:35:47,430 Current Learning Rate: 0.0069857395 +2024-11-11 19:35:48,212 Train Loss: 0.0005924, Val Loss: 0.0007387 +2024-11-11 19:35:48,212 Epoch 1275/2000 +2024-11-11 19:36:03,384 Current Learning Rate: 0.0069134172 +2024-11-11 19:36:03,384 Train Loss: 0.0006143, Val Loss: 0.0007536 +2024-11-11 19:36:03,385 Epoch 1276/2000 +2024-11-11 19:36:18,807 Current Learning Rate: 0.0068406228 +2024-11-11 19:36:18,808 Train Loss: 0.0006034, Val Loss: 0.0007466 +2024-11-11 19:36:18,809 Epoch 1277/2000 +2024-11-11 19:36:34,356 Current Learning Rate: 0.0067673742 +2024-11-11 19:36:34,356 Train Loss: 0.0006757, Val Loss: 0.0007473 +2024-11-11 19:36:34,357 Epoch 1278/2000 +2024-11-11 19:36:49,397 Current Learning Rate: 0.0066936896 +2024-11-11 19:36:49,397 Train Loss: 0.0005976, Val Loss: 0.0007534 +2024-11-11 19:36:49,398 Epoch 1279/2000 +2024-11-11 19:37:04,483 Current Learning Rate: 0.0066195871 +2024-11-11 19:37:04,484 Train Loss: 0.0006511, Val Loss: 0.0007671 +2024-11-11 19:37:04,484 Epoch 1280/2000 +2024-11-11 19:37:19,936 Current Learning Rate: 0.0065450850 +2024-11-11 19:37:19,937 Train Loss: 0.0005711, Val Loss: 0.0007763 +2024-11-11 19:37:19,937 Epoch 1281/2000 +2024-11-11 19:37:35,185 Current Learning Rate: 0.0064702016 +2024-11-11 19:37:35,185 Train Loss: 0.0006569, Val Loss: 0.0008083 +2024-11-11 19:37:35,185 Epoch 1282/2000 +2024-11-11 19:37:50,662 Current Learning Rate: 0.0063949555 +2024-11-11 19:37:50,663 Train Loss: 0.0006829, Val Loss: 0.0008200 +2024-11-11 19:37:50,664 Epoch 1283/2000 +2024-11-11 19:38:05,685 Current Learning Rate: 0.0063193652 +2024-11-11 19:38:05,686 Train Loss: 0.0006795, Val Loss: 0.0007918 +2024-11-11 19:38:05,687 Epoch 1284/2000 +2024-11-11 19:38:21,127 Current Learning Rate: 0.0062434494 +2024-11-11 19:38:21,128 Train Loss: 0.0007056, Val Loss: 0.0008107 +2024-11-11 19:38:21,128 Epoch 1285/2000 +2024-11-11 19:38:36,436 Current Learning Rate: 0.0061672268 +2024-11-11 19:38:36,437 Train Loss: 0.0007121, Val Loss: 0.0007828 +2024-11-11 19:38:36,438 Epoch 1286/2000 +2024-11-11 19:38:51,528 Current Learning Rate: 0.0060907162 +2024-11-11 19:38:51,528 Train Loss: 0.0007112, Val Loss: 0.0007594 +2024-11-11 19:38:51,529 Epoch 1287/2000 +2024-11-11 19:39:07,152 Current Learning Rate: 0.0060139365 +2024-11-11 19:39:07,153 Train Loss: 0.0006527, Val Loss: 0.0007456 +2024-11-11 19:39:07,153 Epoch 1288/2000 +2024-11-11 19:39:22,864 Current Learning Rate: 0.0059369066 +2024-11-11 19:39:22,864 Train Loss: 0.0006117, Val Loss: 0.0007583 +2024-11-11 19:39:22,865 Epoch 1289/2000 +2024-11-11 19:39:38,441 Current Learning Rate: 0.0058596455 +2024-11-11 19:39:38,441 Train Loss: 0.0007125, Val Loss: 0.0007986 +2024-11-11 19:39:38,442 Epoch 1290/2000 +2024-11-11 19:39:54,221 Current Learning Rate: 0.0057821723 +2024-11-11 19:39:54,221 Train Loss: 0.0006777, Val Loss: 0.0007551 +2024-11-11 19:39:54,222 Epoch 1291/2000 +2024-11-11 19:40:10,229 Current Learning Rate: 0.0057045062 +2024-11-11 19:40:10,230 Train Loss: 0.0006606, Val Loss: 0.0007435 +2024-11-11 19:40:10,231 Epoch 1292/2000 +2024-11-11 19:40:25,814 Current Learning Rate: 0.0056266662 +2024-11-11 19:40:25,815 Train Loss: 0.0006028, Val Loss: 0.0007567 +2024-11-11 19:40:25,815 Epoch 1293/2000 +2024-11-11 19:40:42,175 Current Learning Rate: 0.0055486716 +2024-11-11 19:40:42,927 Train Loss: 0.0006782, Val Loss: 0.0007165 +2024-11-11 19:40:42,927 Epoch 1294/2000 +2024-11-11 19:40:58,217 Current Learning Rate: 0.0054705416 +2024-11-11 19:40:59,109 Train Loss: 0.0006235, Val Loss: 0.0007030 +2024-11-11 19:40:59,109 Epoch 1295/2000 +2024-11-11 19:41:14,177 Current Learning Rate: 0.0053922955 +2024-11-11 19:41:14,178 Train Loss: 0.0006007, Val Loss: 0.0007068 +2024-11-11 19:41:14,179 Epoch 1296/2000 +2024-11-11 19:41:29,127 Current Learning Rate: 0.0053139526 +2024-11-11 19:41:29,127 Train Loss: 0.0006074, Val Loss: 0.0007046 +2024-11-11 19:41:29,128 Epoch 1297/2000 +2024-11-11 19:41:44,751 Current Learning Rate: 0.0052355323 +2024-11-11 19:41:45,497 Train Loss: 0.0005509, Val Loss: 0.0006939 +2024-11-11 19:41:45,497 Epoch 1298/2000 +2024-11-11 19:42:00,626 Current Learning Rate: 0.0051570538 +2024-11-11 19:42:01,625 Train Loss: 0.0006324, Val Loss: 0.0006921 +2024-11-11 19:42:01,625 Epoch 1299/2000 +2024-11-11 19:42:17,627 Current Learning Rate: 0.0050785366 +2024-11-11 19:42:17,628 Train Loss: 0.0006506, Val Loss: 0.0006980 +2024-11-11 19:42:17,628 Epoch 1300/2000 +2024-11-11 19:42:33,534 Current Learning Rate: 0.0050000000 +2024-11-11 19:42:33,535 Train Loss: 0.0007355, Val Loss: 0.0007156 +2024-11-11 19:42:33,535 Epoch 1301/2000 +2024-11-11 19:42:48,820 Current Learning Rate: 0.0049214634 +2024-11-11 19:42:49,835 Train Loss: 0.0006352, Val Loss: 0.0006887 +2024-11-11 19:42:49,836 Epoch 1302/2000 +2024-11-11 19:43:05,428 Current Learning Rate: 0.0048429462 +2024-11-11 19:43:06,263 Train Loss: 0.0006389, Val Loss: 0.0006805 +2024-11-11 19:43:06,264 Epoch 1303/2000 +2024-11-11 19:43:21,545 Current Learning Rate: 0.0047644677 +2024-11-11 19:43:22,491 Train Loss: 0.0005725, Val Loss: 0.0006790 +2024-11-11 19:43:22,491 Epoch 1304/2000 +2024-11-11 19:43:37,963 Current Learning Rate: 0.0046860474 +2024-11-11 19:43:37,964 Train Loss: 0.0005542, Val Loss: 0.0006816 +2024-11-11 19:43:37,964 Epoch 1305/2000 +2024-11-11 19:43:53,766 Current Learning Rate: 0.0046077045 +2024-11-11 19:43:53,767 Train Loss: 0.0006077, Val Loss: 0.0006828 +2024-11-11 19:43:53,767 Epoch 1306/2000 +2024-11-11 19:44:09,461 Current Learning Rate: 0.0045294584 +2024-11-11 19:44:09,462 Train Loss: 0.0005358, Val Loss: 0.0006855 +2024-11-11 19:44:09,462 Epoch 1307/2000 +2024-11-11 19:44:24,723 Current Learning Rate: 0.0044513284 +2024-11-11 19:44:24,723 Train Loss: 0.0005936, Val Loss: 0.0006877 +2024-11-11 19:44:24,723 Epoch 1308/2000 +2024-11-11 19:44:40,655 Current Learning Rate: 0.0043733338 +2024-11-11 19:44:40,656 Train Loss: 0.0005518, Val Loss: 0.0006815 +2024-11-11 19:44:40,656 Epoch 1309/2000 +2024-11-11 19:44:56,368 Current Learning Rate: 0.0042954938 +2024-11-11 19:44:57,079 Train Loss: 0.0005299, Val Loss: 0.0006740 +2024-11-11 19:44:57,079 Epoch 1310/2000 +2024-11-11 19:45:12,025 Current Learning Rate: 0.0042178277 +2024-11-11 19:45:12,026 Train Loss: 0.0006295, Val Loss: 0.0006814 +2024-11-11 19:45:12,026 Epoch 1311/2000 +2024-11-11 19:45:27,415 Current Learning Rate: 0.0041403545 +2024-11-11 19:45:27,416 Train Loss: 0.0005916, Val Loss: 0.0006889 +2024-11-11 19:45:27,416 Epoch 1312/2000 +2024-11-11 19:45:43,210 Current Learning Rate: 0.0040630934 +2024-11-11 19:45:44,034 Train Loss: 0.0006301, Val Loss: 0.0006719 +2024-11-11 19:45:44,035 Epoch 1313/2000 +2024-11-11 19:45:59,092 Current Learning Rate: 0.0039860635 +2024-11-11 19:45:59,093 Train Loss: 0.0005829, Val Loss: 0.0006777 +2024-11-11 19:45:59,093 Epoch 1314/2000 +2024-11-11 19:46:15,211 Current Learning Rate: 0.0039092838 +2024-11-11 19:46:15,212 Train Loss: 0.0005170, Val Loss: 0.0007158 +2024-11-11 19:46:15,212 Epoch 1315/2000 +2024-11-11 19:46:30,250 Current Learning Rate: 0.0038327732 +2024-11-11 19:46:30,251 Train Loss: 0.0005662, Val Loss: 0.0007079 +2024-11-11 19:46:30,251 Epoch 1316/2000 +2024-11-11 19:46:45,529 Current Learning Rate: 0.0037565506 +2024-11-11 19:46:45,529 Train Loss: 0.0005735, Val Loss: 0.0006768 +2024-11-11 19:46:45,529 Epoch 1317/2000 +2024-11-11 19:47:01,289 Current Learning Rate: 0.0036806348 +2024-11-11 19:47:01,290 Train Loss: 0.0005399, Val Loss: 0.0007005 +2024-11-11 19:47:01,291 Epoch 1318/2000 +2024-11-11 19:47:16,945 Current Learning Rate: 0.0036050445 +2024-11-11 19:47:16,946 Train Loss: 0.0005213, Val Loss: 0.0007417 +2024-11-11 19:47:16,946 Epoch 1319/2000 +2024-11-11 19:47:33,087 Current Learning Rate: 0.0035297984 +2024-11-11 19:47:33,088 Train Loss: 0.0006110, Val Loss: 0.0006880 +2024-11-11 19:47:33,088 Epoch 1320/2000 +2024-11-11 19:47:49,882 Current Learning Rate: 0.0034549150 +2024-11-11 19:47:49,882 Train Loss: 0.0005038, Val Loss: 0.0006751 +2024-11-11 19:47:49,882 Epoch 1321/2000 +2024-11-11 19:48:05,855 Current Learning Rate: 0.0033804129 +2024-11-11 19:48:05,856 Train Loss: 0.0005429, Val Loss: 0.0006789 +2024-11-11 19:48:05,856 Epoch 1322/2000 +2024-11-11 19:48:21,447 Current Learning Rate: 0.0033063104 +2024-11-11 19:48:21,447 Train Loss: 0.0005773, Val Loss: 0.0006816 +2024-11-11 19:48:21,447 Epoch 1323/2000 +2024-11-11 19:48:37,225 Current Learning Rate: 0.0032326258 +2024-11-11 19:48:37,226 Train Loss: 0.0005571, Val Loss: 0.0006779 +2024-11-11 19:48:37,226 Epoch 1324/2000 +2024-11-11 19:48:52,382 Current Learning Rate: 0.0031593772 +2024-11-11 19:48:53,203 Train Loss: 0.0005491, Val Loss: 0.0006707 +2024-11-11 19:48:53,203 Epoch 1325/2000 +2024-11-11 19:49:07,763 Current Learning Rate: 0.0030865828 +2024-11-11 19:49:08,458 Train Loss: 0.0005470, Val Loss: 0.0006639 +2024-11-11 19:49:08,458 Epoch 1326/2000 +2024-11-11 19:49:23,171 Current Learning Rate: 0.0030142605 +2024-11-11 19:49:24,006 Train Loss: 0.0005413, Val Loss: 0.0006619 +2024-11-11 19:49:24,006 Epoch 1327/2000 +2024-11-11 19:49:38,500 Current Learning Rate: 0.0029424282 +2024-11-11 19:49:39,297 Train Loss: 0.0005771, Val Loss: 0.0006574 +2024-11-11 19:49:39,297 Epoch 1328/2000 +2024-11-11 19:49:53,818 Current Learning Rate: 0.0028711035 +2024-11-11 19:49:54,593 Train Loss: 0.0005383, Val Loss: 0.0006545 +2024-11-11 19:49:54,593 Epoch 1329/2000 +2024-11-11 19:50:09,038 Current Learning Rate: 0.0028003042 +2024-11-11 19:50:09,768 Train Loss: 0.0005376, Val Loss: 0.0006539 +2024-11-11 19:50:09,768 Epoch 1330/2000 +2024-11-11 19:50:24,475 Current Learning Rate: 0.0027300475 +2024-11-11 19:50:25,189 Train Loss: 0.0005080, Val Loss: 0.0006516 +2024-11-11 19:50:25,190 Epoch 1331/2000 +2024-11-11 19:50:40,046 Current Learning Rate: 0.0026603509 +2024-11-11 19:50:41,117 Train Loss: 0.0005167, Val Loss: 0.0006495 +2024-11-11 19:50:41,117 Epoch 1332/2000 +2024-11-11 19:50:56,357 Current Learning Rate: 0.0025912316 +2024-11-11 19:50:56,358 Train Loss: 0.0005841, Val Loss: 0.0006509 +2024-11-11 19:50:56,359 Epoch 1333/2000 +2024-11-11 19:51:13,217 Current Learning Rate: 0.0025227067 +2024-11-11 19:51:13,218 Train Loss: 0.0005029, Val Loss: 0.0006523 +2024-11-11 19:51:13,218 Epoch 1334/2000 +2024-11-11 19:51:28,576 Current Learning Rate: 0.0024547929 +2024-11-11 19:51:28,576 Train Loss: 0.0005073, Val Loss: 0.0006524 +2024-11-11 19:51:28,577 Epoch 1335/2000 +2024-11-11 19:51:44,982 Current Learning Rate: 0.0023875072 +2024-11-11 19:51:44,983 Train Loss: 0.0004979, Val Loss: 0.0006558 +2024-11-11 19:51:44,983 Epoch 1336/2000 +2024-11-11 19:52:01,889 Current Learning Rate: 0.0023208660 +2024-11-11 19:52:04,546 Train Loss: 0.0005542, Val Loss: 0.0006464 +2024-11-11 19:52:04,546 Epoch 1337/2000 +2024-11-11 19:52:19,465 Current Learning Rate: 0.0022548859 +2024-11-11 19:52:20,550 Train Loss: 0.0005587, Val Loss: 0.0006404 +2024-11-11 19:52:20,550 Epoch 1338/2000 +2024-11-11 19:52:36,007 Current Learning Rate: 0.0021895831 +2024-11-11 19:52:36,770 Train Loss: 0.0005024, Val Loss: 0.0006403 +2024-11-11 19:52:36,771 Epoch 1339/2000 +2024-11-11 19:52:51,850 Current Learning Rate: 0.0021249737 +2024-11-11 19:52:52,825 Train Loss: 0.0005034, Val Loss: 0.0006401 +2024-11-11 19:52:52,826 Epoch 1340/2000 +2024-11-11 19:53:08,283 Current Learning Rate: 0.0020610737 +2024-11-11 19:53:09,365 Train Loss: 0.0004939, Val Loss: 0.0006363 +2024-11-11 19:53:09,365 Epoch 1341/2000 +2024-11-11 19:53:24,958 Current Learning Rate: 0.0019978989 +2024-11-11 19:53:25,737 Train Loss: 0.0004918, Val Loss: 0.0006351 +2024-11-11 19:53:25,738 Epoch 1342/2000 +2024-11-11 19:53:40,705 Current Learning Rate: 0.0019354647 +2024-11-11 19:53:41,502 Train Loss: 0.0005189, Val Loss: 0.0006343 +2024-11-11 19:53:41,502 Epoch 1343/2000 +2024-11-11 19:53:56,547 Current Learning Rate: 0.0018737867 +2024-11-11 19:53:57,385 Train Loss: 0.0005233, Val Loss: 0.0006332 +2024-11-11 19:53:57,385 Epoch 1344/2000 +2024-11-11 19:54:12,439 Current Learning Rate: 0.0018128801 +2024-11-11 19:54:13,530 Train Loss: 0.0005569, Val Loss: 0.0006325 +2024-11-11 19:54:13,530 Epoch 1345/2000 +2024-11-11 19:54:29,457 Current Learning Rate: 0.0017527598 +2024-11-11 19:54:30,228 Train Loss: 0.0004619, Val Loss: 0.0006304 +2024-11-11 19:54:30,229 Epoch 1346/2000 +2024-11-11 19:54:44,678 Current Learning Rate: 0.0016934407 +2024-11-11 19:54:45,428 Train Loss: 0.0004917, Val Loss: 0.0006289 +2024-11-11 19:54:45,429 Epoch 1347/2000 +2024-11-11 19:55:01,199 Current Learning Rate: 0.0016349374 +2024-11-11 19:55:01,200 Train Loss: 0.0005784, Val Loss: 0.0006300 +2024-11-11 19:55:01,200 Epoch 1348/2000 +2024-11-11 19:55:16,580 Current Learning Rate: 0.0015772645 +2024-11-11 19:55:17,277 Train Loss: 0.0005184, Val Loss: 0.0006288 +2024-11-11 19:55:17,277 Epoch 1349/2000 +2024-11-11 19:55:32,510 Current Learning Rate: 0.0015204360 +2024-11-11 19:55:33,224 Train Loss: 0.0005124, Val Loss: 0.0006275 +2024-11-11 19:55:33,224 Epoch 1350/2000 +2024-11-11 19:55:48,242 Current Learning Rate: 0.0014644661 +2024-11-11 19:55:48,968 Train Loss: 0.0005034, Val Loss: 0.0006267 +2024-11-11 19:55:48,968 Epoch 1351/2000 +2024-11-11 19:56:04,962 Current Learning Rate: 0.0014093685 +2024-11-11 19:56:04,963 Train Loss: 0.0006049, Val Loss: 0.0006274 +2024-11-11 19:56:04,963 Epoch 1352/2000 +2024-11-11 19:56:21,231 Current Learning Rate: 0.0013551569 +2024-11-11 19:56:22,015 Train Loss: 0.0005960, Val Loss: 0.0006250 +2024-11-11 19:56:22,016 Epoch 1353/2000 +2024-11-11 19:56:36,749 Current Learning Rate: 0.0013018445 +2024-11-11 19:56:37,543 Train Loss: 0.0004573, Val Loss: 0.0006241 +2024-11-11 19:56:37,544 Epoch 1354/2000 +2024-11-11 19:56:52,284 Current Learning Rate: 0.0012494447 +2024-11-11 19:56:53,035 Train Loss: 0.0004875, Val Loss: 0.0006239 +2024-11-11 19:56:53,035 Epoch 1355/2000 +2024-11-11 19:57:08,426 Current Learning Rate: 0.0011979702 +2024-11-11 19:57:09,263 Train Loss: 0.0004818, Val Loss: 0.0006237 +2024-11-11 19:57:09,264 Epoch 1356/2000 +2024-11-11 19:57:23,711 Current Learning Rate: 0.0011474338 +2024-11-11 19:57:24,519 Train Loss: 0.0004881, Val Loss: 0.0006232 +2024-11-11 19:57:24,520 Epoch 1357/2000 +2024-11-11 19:57:39,097 Current Learning Rate: 0.0010978480 +2024-11-11 19:57:39,965 Train Loss: 0.0004620, Val Loss: 0.0006225 +2024-11-11 19:57:39,966 Epoch 1358/2000 +2024-11-11 19:57:54,537 Current Learning Rate: 0.0010492249 +2024-11-11 19:57:54,538 Train Loss: 0.0005744, Val Loss: 0.0006229 +2024-11-11 19:57:54,538 Epoch 1359/2000 +2024-11-11 19:58:10,844 Current Learning Rate: 0.0010015767 +2024-11-11 19:58:11,585 Train Loss: 0.0004765, Val Loss: 0.0006223 +2024-11-11 19:58:11,585 Epoch 1360/2000 +2024-11-11 19:58:26,382 Current Learning Rate: 0.0009549150 +2024-11-11 19:58:27,213 Train Loss: 0.0005791, Val Loss: 0.0006222 +2024-11-11 19:58:27,213 Epoch 1361/2000 +2024-11-11 19:58:43,103 Current Learning Rate: 0.0009092514 +2024-11-11 19:58:43,998 Train Loss: 0.0004861, Val Loss: 0.0006207 +2024-11-11 19:58:43,998 Epoch 1362/2000 +2024-11-11 19:58:59,476 Current Learning Rate: 0.0008645971 +2024-11-11 19:59:00,494 Train Loss: 0.0005046, Val Loss: 0.0006202 +2024-11-11 19:59:00,495 Epoch 1363/2000 +2024-11-11 19:59:16,049 Current Learning Rate: 0.0008209632 +2024-11-11 19:59:16,786 Train Loss: 0.0005233, Val Loss: 0.0006189 +2024-11-11 19:59:16,786 Epoch 1364/2000 +2024-11-11 19:59:31,459 Current Learning Rate: 0.0007783604 +2024-11-11 19:59:31,460 Train Loss: 0.0005546, Val Loss: 0.0006190 +2024-11-11 19:59:31,461 Epoch 1365/2000 +2024-11-11 19:59:47,761 Current Learning Rate: 0.0007367992 +2024-11-11 19:59:48,511 Train Loss: 0.0004817, Val Loss: 0.0006182 +2024-11-11 19:59:48,512 Epoch 1366/2000 +2024-11-11 20:00:04,295 Current Learning Rate: 0.0006962899 +2024-11-11 20:00:05,153 Train Loss: 0.0005376, Val Loss: 0.0006179 +2024-11-11 20:00:05,153 Epoch 1367/2000 +2024-11-11 20:00:20,427 Current Learning Rate: 0.0006568424 +2024-11-11 20:00:21,418 Train Loss: 0.0005212, Val Loss: 0.0006174 +2024-11-11 20:00:21,418 Epoch 1368/2000 +2024-11-11 20:00:37,084 Current Learning Rate: 0.0006184666 +2024-11-11 20:00:38,080 Train Loss: 0.0005217, Val Loss: 0.0006170 +2024-11-11 20:00:38,080 Epoch 1369/2000 +2024-11-11 20:00:54,110 Current Learning Rate: 0.0005811718 +2024-11-11 20:00:55,156 Train Loss: 0.0004690, Val Loss: 0.0006166 +2024-11-11 20:00:55,157 Epoch 1370/2000 +2024-11-11 20:01:10,897 Current Learning Rate: 0.0005449674 +2024-11-11 20:01:11,730 Train Loss: 0.0004932, Val Loss: 0.0006164 +2024-11-11 20:01:11,730 Epoch 1371/2000 +2024-11-11 20:01:27,625 Current Learning Rate: 0.0005098621 +2024-11-11 20:01:28,422 Train Loss: 0.0005213, Val Loss: 0.0006162 +2024-11-11 20:01:28,423 Epoch 1372/2000 +2024-11-11 20:01:43,386 Current Learning Rate: 0.0004758647 +2024-11-11 20:01:44,315 Train Loss: 0.0004857, Val Loss: 0.0006157 +2024-11-11 20:01:44,315 Epoch 1373/2000 +2024-11-11 20:02:00,508 Current Learning Rate: 0.0004429836 +2024-11-11 20:02:00,509 Train Loss: 0.0005455, Val Loss: 0.0006157 +2024-11-11 20:02:00,509 Epoch 1374/2000 +2024-11-11 20:02:16,044 Current Learning Rate: 0.0004112269 +2024-11-11 20:02:16,879 Train Loss: 0.0004801, Val Loss: 0.0006153 +2024-11-11 20:02:16,880 Epoch 1375/2000 +2024-11-11 20:02:32,406 Current Learning Rate: 0.0003806023 +2024-11-11 20:02:32,407 Train Loss: 0.0005578, Val Loss: 0.0006154 +2024-11-11 20:02:32,408 Epoch 1376/2000 +2024-11-11 20:02:48,159 Current Learning Rate: 0.0003511176 +2024-11-11 20:02:48,957 Train Loss: 0.0004987, Val Loss: 0.0006150 +2024-11-11 20:02:48,957 Epoch 1377/2000 +2024-11-11 20:03:04,027 Current Learning Rate: 0.0003227798 +2024-11-11 20:03:04,849 Train Loss: 0.0004501, Val Loss: 0.0006145 +2024-11-11 20:03:04,849 Epoch 1378/2000 +2024-11-11 20:03:19,727 Current Learning Rate: 0.0002955962 +2024-11-11 20:03:20,514 Train Loss: 0.0005302, Val Loss: 0.0006143 +2024-11-11 20:03:20,515 Epoch 1379/2000 +2024-11-11 20:03:35,539 Current Learning Rate: 0.0002695732 +2024-11-11 20:03:35,540 Train Loss: 0.0005470, Val Loss: 0.0006144 +2024-11-11 20:03:35,540 Epoch 1380/2000 +2024-11-11 20:03:52,709 Current Learning Rate: 0.0002447174 +2024-11-11 20:03:53,559 Train Loss: 0.0004807, Val Loss: 0.0006140 +2024-11-11 20:03:53,559 Epoch 1381/2000 +2024-11-11 20:04:08,275 Current Learning Rate: 0.0002210349 +2024-11-11 20:04:09,009 Train Loss: 0.0004747, Val Loss: 0.0006140 +2024-11-11 20:04:09,010 Epoch 1382/2000 +2024-11-11 20:04:25,007 Current Learning Rate: 0.0001985316 +2024-11-11 20:04:25,809 Train Loss: 0.0004527, Val Loss: 0.0006137 +2024-11-11 20:04:25,810 Epoch 1383/2000 +2024-11-11 20:04:41,164 Current Learning Rate: 0.0001772129 +2024-11-11 20:04:42,001 Train Loss: 0.0004655, Val Loss: 0.0006135 +2024-11-11 20:04:42,001 Epoch 1384/2000 +2024-11-11 20:04:57,280 Current Learning Rate: 0.0001570842 +2024-11-11 20:04:58,004 Train Loss: 0.0005277, Val Loss: 0.0006132 +2024-11-11 20:04:58,004 Epoch 1385/2000 +2024-11-11 20:05:13,753 Current Learning Rate: 0.0001381504 +2024-11-11 20:05:14,449 Train Loss: 0.0005367, Val Loss: 0.0006131 +2024-11-11 20:05:14,450 Epoch 1386/2000 +2024-11-11 20:05:30,359 Current Learning Rate: 0.0001204162 +2024-11-11 20:05:31,109 Train Loss: 0.0004735, Val Loss: 0.0006130 +2024-11-11 20:05:31,109 Epoch 1387/2000 +2024-11-11 20:05:46,185 Current Learning Rate: 0.0001038859 +2024-11-11 20:05:46,905 Train Loss: 0.0005416, Val Loss: 0.0006130 +2024-11-11 20:05:46,905 Epoch 1388/2000 +2024-11-11 20:06:01,720 Current Learning Rate: 0.0000885637 +2024-11-11 20:06:04,291 Train Loss: 0.0005546, Val Loss: 0.0006130 +2024-11-11 20:06:04,291 Epoch 1389/2000 +2024-11-11 20:06:18,474 Current Learning Rate: 0.0000744534 +2024-11-11 20:06:19,201 Train Loss: 0.0004698, Val Loss: 0.0006130 +2024-11-11 20:06:19,201 Epoch 1390/2000 +2024-11-11 20:06:34,181 Current Learning Rate: 0.0000615583 +2024-11-11 20:06:34,780 Train Loss: 0.0005111, Val Loss: 0.0006129 +2024-11-11 20:06:34,780 Epoch 1391/2000 +2024-11-11 20:06:49,845 Current Learning Rate: 0.0000498817 +2024-11-11 20:06:49,846 Train Loss: 0.0005220, Val Loss: 0.0006129 +2024-11-11 20:06:49,846 Epoch 1392/2000 +2024-11-11 20:07:05,676 Current Learning Rate: 0.0000394265 +2024-11-11 20:07:06,525 Train Loss: 0.0005255, Val Loss: 0.0006129 +2024-11-11 20:07:06,526 Epoch 1393/2000 +2024-11-11 20:07:21,837 Current Learning Rate: 0.0000301952 +2024-11-11 20:07:21,838 Train Loss: 0.0004672, Val Loss: 0.0006129 +2024-11-11 20:07:21,839 Epoch 1394/2000 +2024-11-11 20:07:37,788 Current Learning Rate: 0.0000221902 +2024-11-11 20:07:38,536 Train Loss: 0.0004949, Val Loss: 0.0006128 +2024-11-11 20:07:38,537 Epoch 1395/2000 +2024-11-11 20:07:53,896 Current Learning Rate: 0.0000154133 +2024-11-11 20:07:53,897 Train Loss: 0.0005009, Val Loss: 0.0006128 +2024-11-11 20:07:53,897 Epoch 1396/2000 +2024-11-11 20:08:10,114 Current Learning Rate: 0.0000098664 +2024-11-11 20:08:10,896 Train Loss: 0.0004732, Val Loss: 0.0006128 +2024-11-11 20:08:10,896 Epoch 1397/2000 +2024-11-11 20:08:25,236 Current Learning Rate: 0.0000055506 +2024-11-11 20:08:25,237 Train Loss: 0.0005249, Val Loss: 0.0006128 +2024-11-11 20:08:25,237 Epoch 1398/2000 +2024-11-11 20:08:41,681 Current Learning Rate: 0.0000024672 +2024-11-11 20:08:42,420 Train Loss: 0.0004442, Val Loss: 0.0006128 +2024-11-11 20:08:42,420 Epoch 1399/2000 +2024-11-11 20:08:57,838 Current Learning Rate: 0.0000006168 +2024-11-11 20:08:58,618 Train Loss: 0.0004666, Val Loss: 0.0006128 +2024-11-11 20:08:58,618 Epoch 1400/2000 +2024-11-11 20:09:14,324 Current Learning Rate: 0.0000000000 +2024-11-11 20:09:14,325 Train Loss: 0.0005253, Val Loss: 0.0006129 +2024-11-11 20:09:14,325 Epoch 1401/2000 +2024-11-11 20:09:31,446 Current Learning Rate: 0.0000006168 +2024-11-11 20:09:31,447 Train Loss: 0.0005096, Val Loss: 0.0006128 +2024-11-11 20:09:31,447 Epoch 1402/2000 +2024-11-11 20:09:48,001 Current Learning Rate: 0.0000024672 +2024-11-11 20:09:48,002 Train Loss: 0.0004710, Val Loss: 0.0006128 +2024-11-11 20:09:48,003 Epoch 1403/2000 +2024-11-11 20:10:05,157 Current Learning Rate: 0.0000055506 +2024-11-11 20:10:05,158 Train Loss: 0.0004903, Val Loss: 0.0006128 +2024-11-11 20:10:05,158 Epoch 1404/2000 +2024-11-11 20:10:21,151 Current Learning Rate: 0.0000098664 +2024-11-11 20:10:21,152 Train Loss: 0.0004696, Val Loss: 0.0006128 +2024-11-11 20:10:21,152 Epoch 1405/2000 +2024-11-11 20:10:37,562 Current Learning Rate: 0.0000154133 +2024-11-11 20:10:37,563 Train Loss: 0.0005107, Val Loss: 0.0006128 +2024-11-11 20:10:37,563 Epoch 1406/2000 +2024-11-11 20:10:53,670 Current Learning Rate: 0.0000221902 +2024-11-11 20:10:54,735 Train Loss: 0.0004813, Val Loss: 0.0006128 +2024-11-11 20:10:54,736 Epoch 1407/2000 +2024-11-11 20:11:10,847 Current Learning Rate: 0.0000301952 +2024-11-11 20:11:10,848 Train Loss: 0.0005241, Val Loss: 0.0006128 +2024-11-11 20:11:10,848 Epoch 1408/2000 +2024-11-11 20:11:27,126 Current Learning Rate: 0.0000394265 +2024-11-11 20:11:27,127 Train Loss: 0.0004872, Val Loss: 0.0006128 +2024-11-11 20:11:27,127 Epoch 1409/2000 +2024-11-11 20:11:42,746 Current Learning Rate: 0.0000498817 +2024-11-11 20:11:42,747 Train Loss: 0.0004862, Val Loss: 0.0006128 +2024-11-11 20:11:42,747 Epoch 1410/2000 +2024-11-11 20:11:58,066 Current Learning Rate: 0.0000615583 +2024-11-11 20:11:58,066 Train Loss: 0.0004675, Val Loss: 0.0006128 +2024-11-11 20:11:58,066 Epoch 1411/2000 +2024-11-11 20:12:13,442 Current Learning Rate: 0.0000744534 +2024-11-11 20:12:13,443 Train Loss: 0.0004975, Val Loss: 0.0006129 +2024-11-11 20:12:13,443 Epoch 1412/2000 +2024-11-11 20:12:28,781 Current Learning Rate: 0.0000885637 +2024-11-11 20:12:29,566 Train Loss: 0.0004550, Val Loss: 0.0006127 +2024-11-11 20:12:29,567 Epoch 1413/2000 +2024-11-11 20:12:44,260 Current Learning Rate: 0.0001038859 +2024-11-11 20:12:44,261 Train Loss: 0.0005011, Val Loss: 0.0006128 +2024-11-11 20:12:44,261 Epoch 1414/2000 +2024-11-11 20:12:59,904 Current Learning Rate: 0.0001204162 +2024-11-11 20:12:59,904 Train Loss: 0.0005198, Val Loss: 0.0006129 +2024-11-11 20:12:59,904 Epoch 1415/2000 +2024-11-11 20:13:15,146 Current Learning Rate: 0.0001381504 +2024-11-11 20:13:15,147 Train Loss: 0.0004447, Val Loss: 0.0006128 +2024-11-11 20:13:15,147 Epoch 1416/2000 +2024-11-11 20:13:30,686 Current Learning Rate: 0.0001570842 +2024-11-11 20:13:30,687 Train Loss: 0.0004449, Val Loss: 0.0006128 +2024-11-11 20:13:30,687 Epoch 1417/2000 +2024-11-11 20:13:46,706 Current Learning Rate: 0.0001772129 +2024-11-11 20:13:46,707 Train Loss: 0.0005148, Val Loss: 0.0006130 +2024-11-11 20:13:46,707 Epoch 1418/2000 +2024-11-11 20:14:03,613 Current Learning Rate: 0.0001985316 +2024-11-11 20:14:03,613 Train Loss: 0.0004517, Val Loss: 0.0006130 +2024-11-11 20:14:03,614 Epoch 1419/2000 +2024-11-11 20:14:19,892 Current Learning Rate: 0.0002210349 +2024-11-11 20:14:19,892 Train Loss: 0.0004902, Val Loss: 0.0006130 +2024-11-11 20:14:19,893 Epoch 1420/2000 +2024-11-11 20:14:36,221 Current Learning Rate: 0.0002447174 +2024-11-11 20:14:36,222 Train Loss: 0.0004996, Val Loss: 0.0006131 +2024-11-11 20:14:36,222 Epoch 1421/2000 +2024-11-11 20:14:52,817 Current Learning Rate: 0.0002695732 +2024-11-11 20:14:52,817 Train Loss: 0.0005237, Val Loss: 0.0006131 +2024-11-11 20:14:52,818 Epoch 1422/2000 +2024-11-11 20:15:08,077 Current Learning Rate: 0.0002955962 +2024-11-11 20:15:08,077 Train Loss: 0.0004482, Val Loss: 0.0006132 +2024-11-11 20:15:08,078 Epoch 1423/2000 +2024-11-11 20:15:24,080 Current Learning Rate: 0.0003227798 +2024-11-11 20:15:24,080 Train Loss: 0.0004680, Val Loss: 0.0006132 +2024-11-11 20:15:24,080 Epoch 1424/2000 +2024-11-11 20:15:40,211 Current Learning Rate: 0.0003511176 +2024-11-11 20:15:40,211 Train Loss: 0.0005434, Val Loss: 0.0006135 +2024-11-11 20:15:40,211 Epoch 1425/2000 +2024-11-11 20:15:56,512 Current Learning Rate: 0.0003806023 +2024-11-11 20:15:56,512 Train Loss: 0.0005129, Val Loss: 0.0006135 +2024-11-11 20:15:56,513 Epoch 1426/2000 +2024-11-11 20:16:12,728 Current Learning Rate: 0.0004112269 +2024-11-11 20:16:12,728 Train Loss: 0.0005002, Val Loss: 0.0006136 +2024-11-11 20:16:12,728 Epoch 1427/2000 +2024-11-11 20:16:28,624 Current Learning Rate: 0.0004429836 +2024-11-11 20:16:28,624 Train Loss: 0.0005272, Val Loss: 0.0006138 +2024-11-11 20:16:28,624 Epoch 1428/2000 +2024-11-11 20:16:44,393 Current Learning Rate: 0.0004758647 +2024-11-11 20:16:44,394 Train Loss: 0.0004807, Val Loss: 0.0006142 +2024-11-11 20:16:44,394 Epoch 1429/2000 +2024-11-11 20:16:59,941 Current Learning Rate: 0.0005098621 +2024-11-11 20:16:59,941 Train Loss: 0.0004729, Val Loss: 0.0006140 +2024-11-11 20:16:59,941 Epoch 1430/2000 +2024-11-11 20:17:15,698 Current Learning Rate: 0.0005449674 +2024-11-11 20:17:15,699 Train Loss: 0.0004769, Val Loss: 0.0006139 +2024-11-11 20:17:15,699 Epoch 1431/2000 +2024-11-11 20:17:31,241 Current Learning Rate: 0.0005811718 +2024-11-11 20:17:31,242 Train Loss: 0.0005230, Val Loss: 0.0006142 +2024-11-11 20:17:31,242 Epoch 1432/2000 +2024-11-11 20:17:46,743 Current Learning Rate: 0.0006184666 +2024-11-11 20:17:46,743 Train Loss: 0.0005331, Val Loss: 0.0006143 +2024-11-11 20:17:46,743 Epoch 1433/2000 +2024-11-11 20:18:02,340 Current Learning Rate: 0.0006568424 +2024-11-11 20:18:02,340 Train Loss: 0.0004917, Val Loss: 0.0006143 +2024-11-11 20:18:02,340 Epoch 1434/2000 +2024-11-11 20:18:17,752 Current Learning Rate: 0.0006962899 +2024-11-11 20:18:17,753 Train Loss: 0.0004734, Val Loss: 0.0006141 +2024-11-11 20:18:17,753 Epoch 1435/2000 +2024-11-11 20:18:33,356 Current Learning Rate: 0.0007367992 +2024-11-11 20:18:33,356 Train Loss: 0.0004983, Val Loss: 0.0006142 +2024-11-11 20:18:33,357 Epoch 1436/2000 +2024-11-11 20:18:49,180 Current Learning Rate: 0.0007783604 +2024-11-11 20:18:49,181 Train Loss: 0.0005069, Val Loss: 0.0006153 +2024-11-11 20:18:49,181 Epoch 1437/2000 +2024-11-11 20:19:04,667 Current Learning Rate: 0.0008209632 +2024-11-11 20:19:04,668 Train Loss: 0.0004598, Val Loss: 0.0006165 +2024-11-11 20:19:04,668 Epoch 1438/2000 +2024-11-11 20:19:21,372 Current Learning Rate: 0.0008645971 +2024-11-11 20:19:21,373 Train Loss: 0.0005561, Val Loss: 0.0006181 +2024-11-11 20:19:21,374 Epoch 1439/2000 +2024-11-11 20:19:37,910 Current Learning Rate: 0.0009092514 +2024-11-11 20:19:37,911 Train Loss: 0.0004470, Val Loss: 0.0006189 +2024-11-11 20:19:37,911 Epoch 1440/2000 +2024-11-11 20:19:53,435 Current Learning Rate: 0.0009549150 +2024-11-11 20:19:53,436 Train Loss: 0.0004760, Val Loss: 0.0006184 +2024-11-11 20:19:53,436 Epoch 1441/2000 +2024-11-11 20:20:09,808 Current Learning Rate: 0.0010015767 +2024-11-11 20:20:09,809 Train Loss: 0.0005692, Val Loss: 0.0006224 +2024-11-11 20:20:09,809 Epoch 1442/2000 +2024-11-11 20:20:25,028 Current Learning Rate: 0.0010492249 +2024-11-11 20:20:25,029 Train Loss: 0.0004826, Val Loss: 0.0006208 +2024-11-11 20:20:25,029 Epoch 1443/2000 +2024-11-11 20:20:40,772 Current Learning Rate: 0.0010978480 +2024-11-11 20:20:40,773 Train Loss: 0.0005237, Val Loss: 0.0006214 +2024-11-11 20:20:40,773 Epoch 1444/2000 +2024-11-11 20:20:56,856 Current Learning Rate: 0.0011474338 +2024-11-11 20:20:56,856 Train Loss: 0.0005004, Val Loss: 0.0006202 +2024-11-11 20:20:56,857 Epoch 1445/2000 +2024-11-11 20:21:12,767 Current Learning Rate: 0.0011979702 +2024-11-11 20:21:12,768 Train Loss: 0.0005248, Val Loss: 0.0006198 +2024-11-11 20:21:12,769 Epoch 1446/2000 +2024-11-11 20:21:28,844 Current Learning Rate: 0.0012494447 +2024-11-11 20:21:28,844 Train Loss: 0.0005265, Val Loss: 0.0006207 +2024-11-11 20:21:28,844 Epoch 1447/2000 +2024-11-11 20:21:44,071 Current Learning Rate: 0.0013018445 +2024-11-11 20:21:44,071 Train Loss: 0.0004981, Val Loss: 0.0006168 +2024-11-11 20:21:44,071 Epoch 1448/2000 +2024-11-11 20:21:59,511 Current Learning Rate: 0.0013551569 +2024-11-11 20:21:59,512 Train Loss: 0.0004470, Val Loss: 0.0006150 +2024-11-11 20:21:59,512 Epoch 1449/2000 +2024-11-11 20:22:15,216 Current Learning Rate: 0.0014093685 +2024-11-11 20:22:15,217 Train Loss: 0.0004914, Val Loss: 0.0006155 +2024-11-11 20:22:15,217 Epoch 1450/2000 +2024-11-11 20:22:30,529 Current Learning Rate: 0.0014644661 +2024-11-11 20:22:30,529 Train Loss: 0.0005055, Val Loss: 0.0006161 +2024-11-11 20:22:30,529 Epoch 1451/2000 +2024-11-11 20:22:46,394 Current Learning Rate: 0.0015204360 +2024-11-11 20:22:46,394 Train Loss: 0.0005414, Val Loss: 0.0006216 +2024-11-11 20:22:46,395 Epoch 1452/2000 +2024-11-11 20:23:02,337 Current Learning Rate: 0.0015772645 +2024-11-11 20:23:02,338 Train Loss: 0.0004972, Val Loss: 0.0006162 +2024-11-11 20:23:02,338 Epoch 1453/2000 +2024-11-11 20:23:18,306 Current Learning Rate: 0.0016349374 +2024-11-11 20:23:18,306 Train Loss: 0.0004740, Val Loss: 0.0006138 +2024-11-11 20:23:18,306 Epoch 1454/2000 +2024-11-11 20:23:33,983 Current Learning Rate: 0.0016934407 +2024-11-11 20:23:33,983 Train Loss: 0.0004649, Val Loss: 0.0006135 +2024-11-11 20:23:33,984 Epoch 1455/2000 +2024-11-11 20:23:50,081 Current Learning Rate: 0.0017527598 +2024-11-11 20:23:50,082 Train Loss: 0.0005251, Val Loss: 0.0006147 +2024-11-11 20:23:50,082 Epoch 1456/2000 +2024-11-11 20:24:05,610 Current Learning Rate: 0.0018128801 +2024-11-11 20:24:05,611 Train Loss: 0.0004668, Val Loss: 0.0006150 +2024-11-11 20:24:05,611 Epoch 1457/2000 +2024-11-11 20:24:21,303 Current Learning Rate: 0.0018737867 +2024-11-11 20:24:21,303 Train Loss: 0.0004783, Val Loss: 0.0006162 +2024-11-11 20:24:21,304 Epoch 1458/2000 +2024-11-11 20:24:37,834 Current Learning Rate: 0.0019354647 +2024-11-11 20:24:37,834 Train Loss: 0.0004964, Val Loss: 0.0006145 +2024-11-11 20:24:37,835 Epoch 1459/2000 +2024-11-11 20:24:53,164 Current Learning Rate: 0.0019978989 +2024-11-11 20:24:53,164 Train Loss: 0.0005357, Val Loss: 0.0006147 +2024-11-11 20:24:53,165 Epoch 1460/2000 +2024-11-11 20:25:09,191 Current Learning Rate: 0.0020610737 +2024-11-11 20:25:09,192 Train Loss: 0.0004967, Val Loss: 0.0006198 +2024-11-11 20:25:09,192 Epoch 1461/2000 +2024-11-11 20:25:24,449 Current Learning Rate: 0.0021249737 +2024-11-11 20:25:24,450 Train Loss: 0.0004676, Val Loss: 0.0006166 +2024-11-11 20:25:24,450 Epoch 1462/2000 +2024-11-11 20:25:40,590 Current Learning Rate: 0.0021895831 +2024-11-11 20:25:40,590 Train Loss: 0.0005227, Val Loss: 0.0006166 +2024-11-11 20:25:40,590 Epoch 1463/2000 +2024-11-11 20:25:55,846 Current Learning Rate: 0.0022548859 +2024-11-11 20:25:55,846 Train Loss: 0.0004782, Val Loss: 0.0006176 +2024-11-11 20:25:55,847 Epoch 1464/2000 +2024-11-11 20:26:11,954 Current Learning Rate: 0.0023208660 +2024-11-11 20:26:11,955 Train Loss: 0.0004753, Val Loss: 0.0006173 +2024-11-11 20:26:11,955 Epoch 1465/2000 +2024-11-11 20:26:27,909 Current Learning Rate: 0.0023875072 +2024-11-11 20:26:27,909 Train Loss: 0.0005405, Val Loss: 0.0006557 +2024-11-11 20:26:27,909 Epoch 1466/2000 +2024-11-11 20:26:43,062 Current Learning Rate: 0.0024547929 +2024-11-11 20:26:43,062 Train Loss: 0.0004659, Val Loss: 0.0006196 +2024-11-11 20:26:43,062 Epoch 1467/2000 +2024-11-11 20:26:59,145 Current Learning Rate: 0.0025227067 +2024-11-11 20:26:59,146 Train Loss: 0.0005794, Val Loss: 0.0006680 +2024-11-11 20:26:59,146 Epoch 1468/2000 +2024-11-11 20:27:15,435 Current Learning Rate: 0.0025912316 +2024-11-11 20:27:15,435 Train Loss: 0.0005062, Val Loss: 0.0006347 +2024-11-11 20:27:15,435 Epoch 1469/2000 +2024-11-11 20:27:31,261 Current Learning Rate: 0.0026603509 +2024-11-11 20:27:31,262 Train Loss: 0.0004669, Val Loss: 0.0006229 +2024-11-11 20:27:31,262 Epoch 1470/2000 +2024-11-11 20:27:47,063 Current Learning Rate: 0.0027300475 +2024-11-11 20:27:47,064 Train Loss: 0.0005142, Val Loss: 0.0006265 +2024-11-11 20:27:47,064 Epoch 1471/2000 +2024-11-11 20:28:02,366 Current Learning Rate: 0.0028003042 +2024-11-11 20:28:02,367 Train Loss: 0.0005021, Val Loss: 0.0006402 +2024-11-11 20:28:02,367 Epoch 1472/2000 +2024-11-11 20:28:18,150 Current Learning Rate: 0.0028711035 +2024-11-11 20:28:18,150 Train Loss: 0.0004934, Val Loss: 0.0006229 +2024-11-11 20:28:18,150 Epoch 1473/2000 +2024-11-11 20:28:33,929 Current Learning Rate: 0.0029424282 +2024-11-11 20:28:33,929 Train Loss: 0.0004801, Val Loss: 0.0006213 +2024-11-11 20:28:33,930 Epoch 1474/2000 +2024-11-11 20:28:49,658 Current Learning Rate: 0.0030142605 +2024-11-11 20:28:49,659 Train Loss: 0.0005335, Val Loss: 0.0006207 +2024-11-11 20:28:49,659 Epoch 1475/2000 +2024-11-11 20:29:05,508 Current Learning Rate: 0.0030865828 +2024-11-11 20:29:05,509 Train Loss: 0.0005787, Val Loss: 0.0007002 +2024-11-11 20:29:05,509 Epoch 1476/2000 +2024-11-11 20:29:21,614 Current Learning Rate: 0.0031593772 +2024-11-11 20:29:21,614 Train Loss: 0.0005273, Val Loss: 0.0006420 +2024-11-11 20:29:21,615 Epoch 1477/2000 +2024-11-11 20:29:37,422 Current Learning Rate: 0.0032326258 +2024-11-11 20:29:37,422 Train Loss: 0.0005064, Val Loss: 0.0006591 +2024-11-11 20:29:37,422 Epoch 1478/2000 +2024-11-11 20:29:54,498 Current Learning Rate: 0.0033063104 +2024-11-11 20:29:54,498 Train Loss: 0.0005649, Val Loss: 0.0006485 +2024-11-11 20:29:54,499 Epoch 1479/2000 +2024-11-11 20:30:10,689 Current Learning Rate: 0.0033804129 +2024-11-11 20:30:10,690 Train Loss: 0.0005134, Val Loss: 0.0006413 +2024-11-11 20:30:10,690 Epoch 1480/2000 +2024-11-11 20:30:26,778 Current Learning Rate: 0.0034549150 +2024-11-11 20:30:26,778 Train Loss: 0.0006216, Val Loss: 0.0006586 +2024-11-11 20:30:26,778 Epoch 1481/2000 +2024-11-11 20:30:42,425 Current Learning Rate: 0.0035297984 +2024-11-11 20:30:42,425 Train Loss: 0.0005311, Val Loss: 0.0006958 +2024-11-11 20:30:42,425 Epoch 1482/2000 +2024-11-11 20:30:57,934 Current Learning Rate: 0.0036050445 +2024-11-11 20:30:57,935 Train Loss: 0.0005394, Val Loss: 0.0006506 +2024-11-11 20:30:57,935 Epoch 1483/2000 +2024-11-11 20:31:14,981 Current Learning Rate: 0.0036806348 +2024-11-11 20:31:14,981 Train Loss: 0.0005059, Val Loss: 0.0006382 +2024-11-11 20:31:14,982 Epoch 1484/2000 +2024-11-11 20:31:30,773 Current Learning Rate: 0.0037565506 +2024-11-11 20:31:30,773 Train Loss: 0.0005005, Val Loss: 0.0006381 +2024-11-11 20:31:30,773 Epoch 1485/2000 +2024-11-11 20:31:46,635 Current Learning Rate: 0.0038327732 +2024-11-11 20:31:46,635 Train Loss: 0.0005609, Val Loss: 0.0006412 +2024-11-11 20:31:46,636 Epoch 1486/2000 +2024-11-11 20:32:03,296 Current Learning Rate: 0.0039092838 +2024-11-11 20:32:03,297 Train Loss: 0.0006931, Val Loss: 0.0006978 +2024-11-11 20:32:03,297 Epoch 1487/2000 +2024-11-11 20:32:20,023 Current Learning Rate: 0.0039860635 +2024-11-11 20:32:20,023 Train Loss: 0.0005532, Val Loss: 0.0006877 +2024-11-11 20:32:20,023 Epoch 1488/2000 +2024-11-11 20:32:35,457 Current Learning Rate: 0.0040630934 +2024-11-11 20:32:35,457 Train Loss: 0.0005985, Val Loss: 0.0006451 +2024-11-11 20:32:35,458 Epoch 1489/2000 +2024-11-11 20:32:51,293 Current Learning Rate: 0.0041403545 +2024-11-11 20:32:51,293 Train Loss: 0.0005420, Val Loss: 0.0006564 +2024-11-11 20:32:51,293 Epoch 1490/2000 +2024-11-11 20:33:08,048 Current Learning Rate: 0.0042178277 +2024-11-11 20:33:08,048 Train Loss: 0.0006146, Val Loss: 0.0006611 +2024-11-11 20:33:08,048 Epoch 1491/2000 +2024-11-11 20:33:24,755 Current Learning Rate: 0.0042954938 +2024-11-11 20:33:24,756 Train Loss: 0.0005250, Val Loss: 0.0006514 +2024-11-11 20:33:24,756 Epoch 1492/2000 +2024-11-11 20:33:41,304 Current Learning Rate: 0.0043733338 +2024-11-11 20:33:41,305 Train Loss: 0.0004744, Val Loss: 0.0006579 +2024-11-11 20:33:41,305 Epoch 1493/2000 +2024-11-11 20:33:58,195 Current Learning Rate: 0.0044513284 +2024-11-11 20:33:58,196 Train Loss: 0.0005280, Val Loss: 0.0006560 +2024-11-11 20:33:58,196 Epoch 1494/2000 +2024-11-11 20:34:13,641 Current Learning Rate: 0.0045294584 +2024-11-11 20:34:13,641 Train Loss: 0.0004900, Val Loss: 0.0006516 +2024-11-11 20:34:13,641 Epoch 1495/2000 +2024-11-11 20:34:30,156 Current Learning Rate: 0.0046077045 +2024-11-11 20:34:30,157 Train Loss: 0.0007456, Val Loss: 0.0007136 +2024-11-11 20:34:30,157 Epoch 1496/2000 +2024-11-11 20:34:46,606 Current Learning Rate: 0.0046860474 +2024-11-11 20:34:46,606 Train Loss: 0.0005671, Val Loss: 0.0006747 +2024-11-11 20:34:46,606 Epoch 1497/2000 +2024-11-11 20:35:02,765 Current Learning Rate: 0.0047644677 +2024-11-11 20:35:02,765 Train Loss: 0.0006108, Val Loss: 0.0007261 +2024-11-11 20:35:02,765 Epoch 1498/2000 +2024-11-11 20:35:19,650 Current Learning Rate: 0.0048429462 +2024-11-11 20:35:19,650 Train Loss: 0.0006097, Val Loss: 0.0006788 +2024-11-11 20:35:19,651 Epoch 1499/2000 +2024-11-11 20:35:36,023 Current Learning Rate: 0.0049214634 +2024-11-11 20:35:36,023 Train Loss: 0.0005545, Val Loss: 0.0006714 +2024-11-11 20:35:36,023 Epoch 1500/2000 +2024-11-11 20:35:52,077 Current Learning Rate: 0.0050000000 +2024-11-11 20:35:52,077 Train Loss: 0.0005661, Val Loss: 0.0006809 +2024-11-11 20:35:52,077 Epoch 1501/2000 +2024-11-11 20:36:08,282 Current Learning Rate: 0.0050785366 +2024-11-11 20:36:08,282 Train Loss: 0.0005054, Val Loss: 0.0006400 +2024-11-11 20:36:08,282 Epoch 1502/2000 +2024-11-11 20:36:24,742 Current Learning Rate: 0.0051570538 +2024-11-11 20:36:24,742 Train Loss: 0.0005751, Val Loss: 0.0006711 +2024-11-11 20:36:24,742 Epoch 1503/2000 +2024-11-11 20:36:39,922 Current Learning Rate: 0.0052355323 +2024-11-11 20:36:39,923 Train Loss: 0.0005303, Val Loss: 0.0006609 +2024-11-11 20:36:39,923 Epoch 1504/2000 +2024-11-11 20:36:55,760 Current Learning Rate: 0.0053139526 +2024-11-11 20:36:55,760 Train Loss: 0.0005221, Val Loss: 0.0006698 +2024-11-11 20:36:55,761 Epoch 1505/2000 +2024-11-11 20:37:11,953 Current Learning Rate: 0.0053922955 +2024-11-11 20:37:11,954 Train Loss: 0.0005451, Val Loss: 0.0007014 +2024-11-11 20:37:11,954 Epoch 1506/2000 +2024-11-11 20:37:27,969 Current Learning Rate: 0.0054705416 +2024-11-11 20:37:27,970 Train Loss: 0.0006498, Val Loss: 0.0007386 +2024-11-11 20:37:27,970 Epoch 1507/2000 +2024-11-11 20:37:43,495 Current Learning Rate: 0.0055486716 +2024-11-11 20:37:43,495 Train Loss: 0.0005905, Val Loss: 0.0006920 +2024-11-11 20:37:43,495 Epoch 1508/2000 +2024-11-11 20:38:00,272 Current Learning Rate: 0.0056266662 +2024-11-11 20:38:00,272 Train Loss: 0.0005553, Val Loss: 0.0006715 +2024-11-11 20:38:00,272 Epoch 1509/2000 +2024-11-11 20:38:16,144 Current Learning Rate: 0.0057045062 +2024-11-11 20:38:16,144 Train Loss: 0.0005249, Val Loss: 0.0006635 +2024-11-11 20:38:16,144 Epoch 1510/2000 +2024-11-11 20:38:32,382 Current Learning Rate: 0.0057821723 +2024-11-11 20:38:32,382 Train Loss: 0.0006379, Val Loss: 0.0007383 +2024-11-11 20:38:32,382 Epoch 1511/2000 +2024-11-11 20:38:47,553 Current Learning Rate: 0.0058596455 +2024-11-11 20:38:47,553 Train Loss: 0.0006241, Val Loss: 0.0007068 +2024-11-11 20:38:47,554 Epoch 1512/2000 +2024-11-11 20:39:03,014 Current Learning Rate: 0.0059369066 +2024-11-11 20:39:03,015 Train Loss: 0.0005958, Val Loss: 0.0006902 +2024-11-11 20:39:03,015 Epoch 1513/2000 +2024-11-11 20:39:19,176 Current Learning Rate: 0.0060139365 +2024-11-11 20:39:19,176 Train Loss: 0.0005927, Val Loss: 0.0006851 +2024-11-11 20:39:19,176 Epoch 1514/2000 +2024-11-11 20:39:35,681 Current Learning Rate: 0.0060907162 +2024-11-11 20:39:35,681 Train Loss: 0.0005849, Val Loss: 0.0006862 +2024-11-11 20:39:35,682 Epoch 1515/2000 +2024-11-11 20:39:51,533 Current Learning Rate: 0.0061672268 +2024-11-11 20:39:51,534 Train Loss: 0.0005991, Val Loss: 0.0007278 +2024-11-11 20:39:51,534 Epoch 1516/2000 +2024-11-11 20:40:07,384 Current Learning Rate: 0.0062434494 +2024-11-11 20:40:07,385 Train Loss: 0.0006466, Val Loss: 0.0007101 +2024-11-11 20:40:07,385 Epoch 1517/2000 +2024-11-11 20:40:24,452 Current Learning Rate: 0.0063193652 +2024-11-11 20:40:24,452 Train Loss: 0.0005909, Val Loss: 0.0007273 +2024-11-11 20:40:24,453 Epoch 1518/2000 +2024-11-11 20:40:40,274 Current Learning Rate: 0.0063949555 +2024-11-11 20:40:40,274 Train Loss: 0.0006418, Val Loss: 0.0007695 +2024-11-11 20:40:40,274 Epoch 1519/2000 +2024-11-11 20:40:55,638 Current Learning Rate: 0.0064702016 +2024-11-11 20:40:55,638 Train Loss: 0.0006079, Val Loss: 0.0007664 +2024-11-11 20:40:55,638 Epoch 1520/2000 +2024-11-11 20:41:10,975 Current Learning Rate: 0.0065450850 +2024-11-11 20:41:10,975 Train Loss: 0.0006324, Val Loss: 0.0007451 +2024-11-11 20:41:10,976 Epoch 1521/2000 +2024-11-11 20:41:26,362 Current Learning Rate: 0.0066195871 +2024-11-11 20:41:26,362 Train Loss: 0.0005829, Val Loss: 0.0007283 +2024-11-11 20:41:26,362 Epoch 1522/2000 +2024-11-11 20:41:41,693 Current Learning Rate: 0.0066936896 +2024-11-11 20:41:41,694 Train Loss: 0.0005885, Val Loss: 0.0007237 +2024-11-11 20:41:41,694 Epoch 1523/2000 +2024-11-11 20:41:57,294 Current Learning Rate: 0.0067673742 +2024-11-11 20:41:57,295 Train Loss: 0.0006465, Val Loss: 0.0007127 +2024-11-11 20:41:57,295 Epoch 1524/2000 +2024-11-11 20:42:13,255 Current Learning Rate: 0.0068406228 +2024-11-11 20:42:13,256 Train Loss: 0.0005241, Val Loss: 0.0007018 +2024-11-11 20:42:13,256 Epoch 1525/2000 +2024-11-11 20:42:29,226 Current Learning Rate: 0.0069134172 +2024-11-11 20:42:29,226 Train Loss: 0.0005400, Val Loss: 0.0006822 +2024-11-11 20:42:29,227 Epoch 1526/2000 +2024-11-11 20:42:45,445 Current Learning Rate: 0.0069857395 +2024-11-11 20:42:45,445 Train Loss: 0.0006432, Val Loss: 0.0006552 +2024-11-11 20:42:45,445 Epoch 1527/2000 +2024-11-11 20:43:02,102 Current Learning Rate: 0.0070575718 +2024-11-11 20:43:02,102 Train Loss: 0.0005796, Val Loss: 0.0007324 +2024-11-11 20:43:02,102 Epoch 1528/2000 +2024-11-11 20:43:17,896 Current Learning Rate: 0.0071288965 +2024-11-11 20:43:17,896 Train Loss: 0.0005811, Val Loss: 0.0007003 +2024-11-11 20:43:17,897 Epoch 1529/2000 +2024-11-11 20:43:33,769 Current Learning Rate: 0.0071996958 +2024-11-11 20:43:33,770 Train Loss: 0.0005254, Val Loss: 0.0006856 +2024-11-11 20:43:33,771 Epoch 1530/2000 +2024-11-11 20:43:50,678 Current Learning Rate: 0.0072699525 +2024-11-11 20:43:50,679 Train Loss: 0.0007053, Val Loss: 0.0007410 +2024-11-11 20:43:50,680 Epoch 1531/2000 +2024-11-11 20:44:06,548 Current Learning Rate: 0.0073396491 +2024-11-11 20:44:06,549 Train Loss: 0.0007030, Val Loss: 0.0006901 +2024-11-11 20:44:06,549 Epoch 1532/2000 +2024-11-11 20:44:22,515 Current Learning Rate: 0.0074087684 +2024-11-11 20:44:22,516 Train Loss: 0.0005726, Val Loss: 0.0006696 +2024-11-11 20:44:22,516 Epoch 1533/2000 +2024-11-11 20:44:38,820 Current Learning Rate: 0.0074772933 +2024-11-11 20:44:38,821 Train Loss: 0.0005782, Val Loss: 0.0006637 +2024-11-11 20:44:38,821 Epoch 1534/2000 +2024-11-11 20:44:54,761 Current Learning Rate: 0.0075452071 +2024-11-11 20:44:54,761 Train Loss: 0.0005654, Val Loss: 0.0006796 +2024-11-11 20:44:54,762 Epoch 1535/2000 +2024-11-11 20:45:10,162 Current Learning Rate: 0.0076124928 +2024-11-11 20:45:10,163 Train Loss: 0.0005819, Val Loss: 0.0007103 +2024-11-11 20:45:10,163 Epoch 1536/2000 +2024-11-11 20:45:25,934 Current Learning Rate: 0.0076791340 +2024-11-11 20:45:25,934 Train Loss: 0.0006324, Val Loss: 0.0007561 +2024-11-11 20:45:25,934 Epoch 1537/2000 +2024-11-11 20:45:41,402 Current Learning Rate: 0.0077451141 +2024-11-11 20:45:41,402 Train Loss: 0.0005841, Val Loss: 0.0007257 +2024-11-11 20:45:41,403 Epoch 1538/2000 +2024-11-11 20:45:56,820 Current Learning Rate: 0.0078104169 +2024-11-11 20:45:56,821 Train Loss: 0.0006216, Val Loss: 0.0007181 +2024-11-11 20:45:56,821 Epoch 1539/2000 +2024-11-11 20:46:12,328 Current Learning Rate: 0.0078750263 +2024-11-11 20:46:12,328 Train Loss: 0.0006122, Val Loss: 0.0006882 +2024-11-11 20:46:12,329 Epoch 1540/2000 +2024-11-11 20:46:28,022 Current Learning Rate: 0.0079389263 +2024-11-11 20:46:28,022 Train Loss: 0.0006561, Val Loss: 0.0007055 +2024-11-11 20:46:28,023 Epoch 1541/2000 +2024-11-11 20:46:44,403 Current Learning Rate: 0.0080021011 +2024-11-11 20:46:44,404 Train Loss: 0.0006346, Val Loss: 0.0007708 +2024-11-11 20:46:44,404 Epoch 1542/2000 +2024-11-11 20:47:00,499 Current Learning Rate: 0.0080645353 +2024-11-11 20:47:00,499 Train Loss: 0.0007184, Val Loss: 0.0007942 +2024-11-11 20:47:00,500 Epoch 1543/2000 +2024-11-11 20:47:16,735 Current Learning Rate: 0.0081262133 +2024-11-11 20:47:16,736 Train Loss: 0.0006839, Val Loss: 0.0008166 +2024-11-11 20:47:16,736 Epoch 1544/2000 +2024-11-11 20:47:32,587 Current Learning Rate: 0.0081871199 +2024-11-11 20:47:32,588 Train Loss: 0.0006820, Val Loss: 0.0007634 +2024-11-11 20:47:32,588 Epoch 1545/2000 +2024-11-11 20:47:48,165 Current Learning Rate: 0.0082472402 +2024-11-11 20:47:48,165 Train Loss: 0.0006341, Val Loss: 0.0007432 +2024-11-11 20:47:48,165 Epoch 1546/2000 +2024-11-11 20:48:03,330 Current Learning Rate: 0.0083065593 +2024-11-11 20:48:03,330 Train Loss: 0.0006256, Val Loss: 0.0007161 +2024-11-11 20:48:03,331 Epoch 1547/2000 +2024-11-11 20:48:18,658 Current Learning Rate: 0.0083650626 +2024-11-11 20:48:18,659 Train Loss: 0.0006356, Val Loss: 0.0006923 +2024-11-11 20:48:18,659 Epoch 1548/2000 +2024-11-11 20:48:34,173 Current Learning Rate: 0.0084227355 +2024-11-11 20:48:34,174 Train Loss: 0.0005454, Val Loss: 0.0006823 +2024-11-11 20:48:34,174 Epoch 1549/2000 +2024-11-11 20:48:49,519 Current Learning Rate: 0.0084795640 +2024-11-11 20:48:49,520 Train Loss: 0.0006010, Val Loss: 0.0006703 +2024-11-11 20:48:49,520 Epoch 1550/2000 +2024-11-11 20:49:05,189 Current Learning Rate: 0.0085355339 +2024-11-11 20:49:05,190 Train Loss: 0.0006627, Val Loss: 0.0006890 +2024-11-11 20:49:05,190 Epoch 1551/2000 +2024-11-11 20:49:20,460 Current Learning Rate: 0.0085906315 +2024-11-11 20:49:20,460 Train Loss: 0.0007621, Val Loss: 0.0007254 +2024-11-11 20:49:20,460 Epoch 1552/2000 +2024-11-11 20:49:35,724 Current Learning Rate: 0.0086448431 +2024-11-11 20:49:35,724 Train Loss: 0.0005560, Val Loss: 0.0007352 +2024-11-11 20:49:35,725 Epoch 1553/2000 +2024-11-11 20:49:51,051 Current Learning Rate: 0.0086981555 +2024-11-11 20:49:51,052 Train Loss: 0.0006073, Val Loss: 0.0007517 +2024-11-11 20:49:51,052 Epoch 1554/2000 +2024-11-11 20:50:07,115 Current Learning Rate: 0.0087505553 +2024-11-11 20:50:07,115 Train Loss: 0.0006429, Val Loss: 0.0007569 +2024-11-11 20:50:07,115 Epoch 1555/2000 +2024-11-11 20:50:22,644 Current Learning Rate: 0.0088020298 +2024-11-11 20:50:22,645 Train Loss: 0.0007582, Val Loss: 0.0007841 +2024-11-11 20:50:22,645 Epoch 1556/2000 +2024-11-11 20:50:38,129 Current Learning Rate: 0.0088525662 +2024-11-11 20:50:38,130 Train Loss: 0.0006544, Val Loss: 0.0007788 +2024-11-11 20:50:38,130 Epoch 1557/2000 +2024-11-11 20:50:53,981 Current Learning Rate: 0.0089021520 +2024-11-11 20:50:53,982 Train Loss: 0.0006456, Val Loss: 0.0007563 +2024-11-11 20:50:53,982 Epoch 1558/2000 +2024-11-11 20:51:10,423 Current Learning Rate: 0.0089507751 +2024-11-11 20:51:10,423 Train Loss: 0.0006523, Val Loss: 0.0007980 +2024-11-11 20:51:10,424 Epoch 1559/2000 +2024-11-11 20:51:26,694 Current Learning Rate: 0.0089984233 +2024-11-11 20:51:26,695 Train Loss: 0.0006832, Val Loss: 0.0007425 +2024-11-11 20:51:26,695 Epoch 1560/2000 +2024-11-11 20:51:42,747 Current Learning Rate: 0.0090450850 +2024-11-11 20:51:42,748 Train Loss: 0.0005677, Val Loss: 0.0007043 +2024-11-11 20:51:42,748 Epoch 1561/2000 +2024-11-11 20:51:59,257 Current Learning Rate: 0.0090907486 +2024-11-11 20:51:59,257 Train Loss: 0.0006249, Val Loss: 0.0007395 +2024-11-11 20:51:59,258 Epoch 1562/2000 +2024-11-11 20:52:15,419 Current Learning Rate: 0.0091354029 +2024-11-11 20:52:15,420 Train Loss: 0.0005915, Val Loss: 0.0007268 +2024-11-11 20:52:15,420 Epoch 1563/2000 +2024-11-11 20:52:31,647 Current Learning Rate: 0.0091790368 +2024-11-11 20:52:31,647 Train Loss: 0.0006009, Val Loss: 0.0007376 +2024-11-11 20:52:31,648 Epoch 1564/2000 +2024-11-11 20:52:47,545 Current Learning Rate: 0.0092216396 +2024-11-11 20:52:47,545 Train Loss: 0.0006693, Val Loss: 0.0007563 +2024-11-11 20:52:47,546 Epoch 1565/2000 +2024-11-11 20:53:02,887 Current Learning Rate: 0.0092632008 +2024-11-11 20:53:02,888 Train Loss: 0.0005953, Val Loss: 0.0007969 +2024-11-11 20:53:02,889 Epoch 1566/2000 +2024-11-11 20:53:18,419 Current Learning Rate: 0.0093037101 +2024-11-11 20:53:18,419 Train Loss: 0.0006203, Val Loss: 0.0008209 +2024-11-11 20:53:18,419 Epoch 1567/2000 +2024-11-11 20:53:34,229 Current Learning Rate: 0.0093431576 +2024-11-11 20:53:34,230 Train Loss: 0.0006143, Val Loss: 0.0007918 +2024-11-11 20:53:34,230 Epoch 1568/2000 +2024-11-11 20:53:50,097 Current Learning Rate: 0.0093815334 +2024-11-11 20:53:50,098 Train Loss: 0.0006855, Val Loss: 0.0007661 +2024-11-11 20:53:50,098 Epoch 1569/2000 +2024-11-11 20:54:07,549 Current Learning Rate: 0.0094188282 +2024-11-11 20:54:07,550 Train Loss: 0.0006081, Val Loss: 0.0006879 +2024-11-11 20:54:07,550 Epoch 1570/2000 +2024-11-11 20:54:23,029 Current Learning Rate: 0.0094550326 +2024-11-11 20:54:23,030 Train Loss: 0.0005917, Val Loss: 0.0007156 +2024-11-11 20:54:23,030 Epoch 1571/2000 +2024-11-11 20:54:38,360 Current Learning Rate: 0.0094901379 +2024-11-11 20:54:38,360 Train Loss: 0.0005541, Val Loss: 0.0006825 +2024-11-11 20:54:38,360 Epoch 1572/2000 +2024-11-11 20:54:53,711 Current Learning Rate: 0.0095241353 +2024-11-11 20:54:53,711 Train Loss: 0.0005301, Val Loss: 0.0006911 +2024-11-11 20:54:53,711 Epoch 1573/2000 +2024-11-11 20:55:09,628 Current Learning Rate: 0.0095570164 +2024-11-11 20:55:09,628 Train Loss: 0.0005563, Val Loss: 0.0006888 +2024-11-11 20:55:09,628 Epoch 1574/2000 +2024-11-11 20:55:24,931 Current Learning Rate: 0.0095887731 +2024-11-11 20:55:24,931 Train Loss: 0.0006267, Val Loss: 0.0007232 +2024-11-11 20:55:24,932 Epoch 1575/2000 +2024-11-11 20:55:40,676 Current Learning Rate: 0.0096193977 +2024-11-11 20:55:40,677 Train Loss: 0.0005328, Val Loss: 0.0006615 +2024-11-11 20:55:40,677 Epoch 1576/2000 +2024-11-11 20:55:56,121 Current Learning Rate: 0.0096488824 +2024-11-11 20:55:56,121 Train Loss: 0.0005385, Val Loss: 0.0007085 +2024-11-11 20:55:56,122 Epoch 1577/2000 +2024-11-11 20:56:11,419 Current Learning Rate: 0.0096772202 +2024-11-11 20:56:11,420 Train Loss: 0.0006376, Val Loss: 0.0007590 +2024-11-11 20:56:11,420 Epoch 1578/2000 +2024-11-11 20:56:26,934 Current Learning Rate: 0.0097044038 +2024-11-11 20:56:26,934 Train Loss: 0.0006362, Val Loss: 0.0007474 +2024-11-11 20:56:26,934 Epoch 1579/2000 +2024-11-11 20:56:42,217 Current Learning Rate: 0.0097304268 +2024-11-11 20:56:42,218 Train Loss: 0.0006159, Val Loss: 0.0007392 +2024-11-11 20:56:42,218 Epoch 1580/2000 +2024-11-11 20:56:57,672 Current Learning Rate: 0.0097552826 +2024-11-11 20:56:57,672 Train Loss: 0.0006625, Val Loss: 0.0007923 +2024-11-11 20:56:57,673 Epoch 1581/2000 +2024-11-11 20:57:13,147 Current Learning Rate: 0.0097789651 +2024-11-11 20:57:13,147 Train Loss: 0.0005825, Val Loss: 0.0007274 +2024-11-11 20:57:13,147 Epoch 1582/2000 +2024-11-11 20:57:28,576 Current Learning Rate: 0.0098014684 +2024-11-11 20:57:28,577 Train Loss: 0.0010995, Val Loss: 0.0010468 +2024-11-11 20:57:28,577 Epoch 1583/2000 +2024-11-11 20:57:44,072 Current Learning Rate: 0.0098227871 +2024-11-11 20:57:44,073 Train Loss: 0.0007570, Val Loss: 0.0007988 +2024-11-11 20:57:44,073 Epoch 1584/2000 +2024-11-11 20:57:59,278 Current Learning Rate: 0.0098429158 +2024-11-11 20:57:59,279 Train Loss: 0.0006140, Val Loss: 0.0007082 +2024-11-11 20:57:59,279 Epoch 1585/2000 +2024-11-11 20:58:14,221 Current Learning Rate: 0.0098618496 +2024-11-11 20:58:14,221 Train Loss: 0.0006296, Val Loss: 0.0007013 +2024-11-11 20:58:14,221 Epoch 1586/2000 +2024-11-11 20:58:29,757 Current Learning Rate: 0.0098795838 +2024-11-11 20:58:29,758 Train Loss: 0.0006007, Val Loss: 0.0007067 +2024-11-11 20:58:29,758 Epoch 1587/2000 +2024-11-11 20:58:45,046 Current Learning Rate: 0.0098961141 +2024-11-11 20:58:45,047 Train Loss: 0.0006515, Val Loss: 0.0007251 +2024-11-11 20:58:45,047 Epoch 1588/2000 +2024-11-11 20:59:00,279 Current Learning Rate: 0.0099114363 +2024-11-11 20:59:00,280 Train Loss: 0.0006884, Val Loss: 0.0007127 +2024-11-11 20:59:00,280 Epoch 1589/2000 +2024-11-11 20:59:15,591 Current Learning Rate: 0.0099255466 +2024-11-11 20:59:15,591 Train Loss: 0.0006105, Val Loss: 0.0006956 +2024-11-11 20:59:15,591 Epoch 1590/2000 +2024-11-11 20:59:31,057 Current Learning Rate: 0.0099384417 +2024-11-11 20:59:31,057 Train Loss: 0.0005697, Val Loss: 0.0006622 +2024-11-11 20:59:31,058 Epoch 1591/2000 +2024-11-11 20:59:46,225 Current Learning Rate: 0.0099501183 +2024-11-11 20:59:46,226 Train Loss: 0.0005302, Val Loss: 0.0006668 +2024-11-11 20:59:46,226 Epoch 1592/2000 +2024-11-11 21:00:01,386 Current Learning Rate: 0.0099605735 +2024-11-11 21:00:01,386 Train Loss: 0.0006113, Val Loss: 0.0007438 +2024-11-11 21:00:01,386 Epoch 1593/2000 +2024-11-11 21:00:17,057 Current Learning Rate: 0.0099698048 +2024-11-11 21:00:17,058 Train Loss: 0.0005776, Val Loss: 0.0006927 +2024-11-11 21:00:17,059 Epoch 1594/2000 +2024-11-11 21:00:32,223 Current Learning Rate: 0.0099778098 +2024-11-11 21:00:32,223 Train Loss: 0.0005939, Val Loss: 0.0007548 +2024-11-11 21:00:32,223 Epoch 1595/2000 +2024-11-11 21:00:47,543 Current Learning Rate: 0.0099845867 +2024-11-11 21:00:47,543 Train Loss: 0.0006328, Val Loss: 0.0007164 +2024-11-11 21:00:47,544 Epoch 1596/2000 +2024-11-11 21:01:03,076 Current Learning Rate: 0.0099901336 +2024-11-11 21:01:03,077 Train Loss: 0.0006429, Val Loss: 0.0007135 +2024-11-11 21:01:03,077 Epoch 1597/2000 +2024-11-11 21:01:19,029 Current Learning Rate: 0.0099944494 +2024-11-11 21:01:19,029 Train Loss: 0.0005700, Val Loss: 0.0007071 +2024-11-11 21:01:19,030 Epoch 1598/2000 +2024-11-11 21:01:34,774 Current Learning Rate: 0.0099975328 +2024-11-11 21:01:34,775 Train Loss: 0.0005241, Val Loss: 0.0006831 +2024-11-11 21:01:34,775 Epoch 1599/2000 +2024-11-11 21:01:51,215 Current Learning Rate: 0.0099993832 +2024-11-11 21:01:51,215 Train Loss: 0.0005710, Val Loss: 0.0006871 +2024-11-11 21:01:51,216 Epoch 1600/2000 +2024-11-11 21:02:07,583 Current Learning Rate: 0.0100000000 +2024-11-11 21:02:07,583 Train Loss: 0.0006290, Val Loss: 0.0007006 +2024-11-11 21:02:07,584 Epoch 1601/2000 +2024-11-11 21:02:24,025 Current Learning Rate: 0.0099993832 +2024-11-11 21:02:24,026 Train Loss: 0.0005653, Val Loss: 0.0006867 +2024-11-11 21:02:24,026 Epoch 1602/2000 +2024-11-11 21:02:39,275 Current Learning Rate: 0.0099975328 +2024-11-11 21:02:39,276 Train Loss: 0.0006054, Val Loss: 0.0007082 +2024-11-11 21:02:39,276 Epoch 1603/2000 +2024-11-11 21:02:55,958 Current Learning Rate: 0.0099944494 +2024-11-11 21:02:55,959 Train Loss: 0.0005532, Val Loss: 0.0007211 +2024-11-11 21:02:55,959 Epoch 1604/2000 +2024-11-11 21:03:11,381 Current Learning Rate: 0.0099901336 +2024-11-11 21:03:11,382 Train Loss: 0.0006236, Val Loss: 0.0007576 +2024-11-11 21:03:11,382 Epoch 1605/2000 +2024-11-11 21:03:27,585 Current Learning Rate: 0.0099845867 +2024-11-11 21:03:27,586 Train Loss: 0.0006244, Val Loss: 0.0007181 +2024-11-11 21:03:27,587 Epoch 1606/2000 +2024-11-11 21:03:42,922 Current Learning Rate: 0.0099778098 +2024-11-11 21:03:42,922 Train Loss: 0.0005689, Val Loss: 0.0006912 +2024-11-11 21:03:42,922 Epoch 1607/2000 +2024-11-11 21:03:57,985 Current Learning Rate: 0.0099698048 +2024-11-11 21:03:57,986 Train Loss: 0.0006124, Val Loss: 0.0007400 +2024-11-11 21:03:57,986 Epoch 1608/2000 +2024-11-11 21:04:13,229 Current Learning Rate: 0.0099605735 +2024-11-11 21:04:13,230 Train Loss: 0.0006312, Val Loss: 0.0007457 +2024-11-11 21:04:13,230 Epoch 1609/2000 +2024-11-11 21:04:29,189 Current Learning Rate: 0.0099501183 +2024-11-11 21:04:29,189 Train Loss: 0.0006641, Val Loss: 0.0007831 +2024-11-11 21:04:29,190 Epoch 1610/2000 +2024-11-11 21:04:45,613 Current Learning Rate: 0.0099384417 +2024-11-11 21:04:45,615 Train Loss: 0.0006055, Val Loss: 0.0007235 +2024-11-11 21:04:45,615 Epoch 1611/2000 +2024-11-11 21:05:01,489 Current Learning Rate: 0.0099255466 +2024-11-11 21:05:01,490 Train Loss: 0.0006120, Val Loss: 0.0006749 +2024-11-11 21:05:01,490 Epoch 1612/2000 +2024-11-11 21:05:17,316 Current Learning Rate: 0.0099114363 +2024-11-11 21:05:17,317 Train Loss: 0.0006593, Val Loss: 0.0006665 +2024-11-11 21:05:17,317 Epoch 1613/2000 +2024-11-11 21:05:34,608 Current Learning Rate: 0.0098961141 +2024-11-11 21:05:34,609 Train Loss: 0.0005506, Val Loss: 0.0006879 +2024-11-11 21:05:34,609 Epoch 1614/2000 +2024-11-11 21:05:50,551 Current Learning Rate: 0.0098795838 +2024-11-11 21:05:50,552 Train Loss: 0.0005703, Val Loss: 0.0006667 +2024-11-11 21:05:50,552 Epoch 1615/2000 +2024-11-11 21:06:07,113 Current Learning Rate: 0.0098618496 +2024-11-11 21:06:07,113 Train Loss: 0.0005136, Val Loss: 0.0006420 +2024-11-11 21:06:07,113 Epoch 1616/2000 +2024-11-11 21:06:23,347 Current Learning Rate: 0.0098429158 +2024-11-11 21:06:23,347 Train Loss: 0.0005996, Val Loss: 0.0006467 +2024-11-11 21:06:23,348 Epoch 1617/2000 +2024-11-11 21:06:38,694 Current Learning Rate: 0.0098227871 +2024-11-11 21:06:38,696 Train Loss: 0.0005619, Val Loss: 0.0006564 +2024-11-11 21:06:38,696 Epoch 1618/2000 +2024-11-11 21:06:54,865 Current Learning Rate: 0.0098014684 +2024-11-11 21:06:54,866 Train Loss: 0.0005644, Val Loss: 0.0006659 +2024-11-11 21:06:54,866 Epoch 1619/2000 +2024-11-11 21:07:10,971 Current Learning Rate: 0.0097789651 +2024-11-11 21:07:10,973 Train Loss: 0.0006016, Val Loss: 0.0006553 +2024-11-11 21:07:10,973 Epoch 1620/2000 +2024-11-11 21:07:26,910 Current Learning Rate: 0.0097552826 +2024-11-11 21:07:26,910 Train Loss: 0.0004954, Val Loss: 0.0006867 +2024-11-11 21:07:26,911 Epoch 1621/2000 +2024-11-11 21:07:42,858 Current Learning Rate: 0.0097304268 +2024-11-11 21:07:42,858 Train Loss: 0.0005692, Val Loss: 0.0007243 +2024-11-11 21:07:42,859 Epoch 1622/2000 +2024-11-11 21:07:58,421 Current Learning Rate: 0.0097044038 +2024-11-11 21:07:58,421 Train Loss: 0.0006182, Val Loss: 0.0006944 +2024-11-11 21:07:58,422 Epoch 1623/2000 +2024-11-11 21:08:14,521 Current Learning Rate: 0.0096772202 +2024-11-11 21:08:14,522 Train Loss: 0.0005968, Val Loss: 0.0007559 +2024-11-11 21:08:14,522 Epoch 1624/2000 +2024-11-11 21:08:30,611 Current Learning Rate: 0.0096488824 +2024-11-11 21:08:30,613 Train Loss: 0.0005986, Val Loss: 0.0006993 +2024-11-11 21:08:30,613 Epoch 1625/2000 +2024-11-11 21:08:46,495 Current Learning Rate: 0.0096193977 +2024-11-11 21:08:46,496 Train Loss: 0.0005167, Val Loss: 0.0006882 +2024-11-11 21:08:46,496 Epoch 1626/2000 +2024-11-11 21:09:01,803 Current Learning Rate: 0.0095887731 +2024-11-11 21:09:01,803 Train Loss: 0.0005384, Val Loss: 0.0006748 +2024-11-11 21:09:01,804 Epoch 1627/2000 +2024-11-11 21:09:17,477 Current Learning Rate: 0.0095570164 +2024-11-11 21:09:17,477 Train Loss: 0.0006096, Val Loss: 0.0006921 +2024-11-11 21:09:17,478 Epoch 1628/2000 +2024-11-11 21:09:33,161 Current Learning Rate: 0.0095241353 +2024-11-11 21:09:33,161 Train Loss: 0.0006100, Val Loss: 0.0007079 +2024-11-11 21:09:33,162 Epoch 1629/2000 +2024-11-11 21:09:48,659 Current Learning Rate: 0.0094901379 +2024-11-11 21:09:48,659 Train Loss: 0.0006276, Val Loss: 0.0006840 +2024-11-11 21:09:48,660 Epoch 1630/2000 +2024-11-11 21:10:04,413 Current Learning Rate: 0.0094550326 +2024-11-11 21:10:04,413 Train Loss: 0.0006200, Val Loss: 0.0006968 +2024-11-11 21:10:04,413 Epoch 1631/2000 +2024-11-11 21:10:20,284 Current Learning Rate: 0.0094188282 +2024-11-11 21:10:20,285 Train Loss: 0.0005760, Val Loss: 0.0007054 +2024-11-11 21:10:20,285 Epoch 1632/2000 +2024-11-11 21:10:35,692 Current Learning Rate: 0.0093815334 +2024-11-11 21:10:35,693 Train Loss: 0.0005433, Val Loss: 0.0006638 +2024-11-11 21:10:35,693 Epoch 1633/2000 +2024-11-11 21:10:51,424 Current Learning Rate: 0.0093431576 +2024-11-11 21:10:51,425 Train Loss: 0.0005208, Val Loss: 0.0007129 +2024-11-11 21:10:51,425 Epoch 1634/2000 +2024-11-11 21:11:06,879 Current Learning Rate: 0.0093037101 +2024-11-11 21:11:06,879 Train Loss: 0.0005983, Val Loss: 0.0007182 +2024-11-11 21:11:06,880 Epoch 1635/2000 +2024-11-11 21:11:23,115 Current Learning Rate: 0.0092632008 +2024-11-11 21:11:23,116 Train Loss: 0.0006129, Val Loss: 0.0006546 +2024-11-11 21:11:23,116 Epoch 1636/2000 +2024-11-11 21:11:38,929 Current Learning Rate: 0.0092216396 +2024-11-11 21:11:38,929 Train Loss: 0.0006309, Val Loss: 0.0007002 +2024-11-11 21:11:38,930 Epoch 1637/2000 +2024-11-11 21:11:54,777 Current Learning Rate: 0.0091790368 +2024-11-11 21:11:54,778 Train Loss: 0.0005877, Val Loss: 0.0007419 +2024-11-11 21:11:54,778 Epoch 1638/2000 +2024-11-11 21:12:11,117 Current Learning Rate: 0.0091354029 +2024-11-11 21:12:11,118 Train Loss: 0.0006475, Val Loss: 0.0006683 +2024-11-11 21:12:11,118 Epoch 1639/2000 +2024-11-11 21:12:27,165 Current Learning Rate: 0.0090907486 +2024-11-11 21:12:27,166 Train Loss: 0.0005489, Val Loss: 0.0006673 +2024-11-11 21:12:27,166 Epoch 1640/2000 +2024-11-11 21:12:43,178 Current Learning Rate: 0.0090450850 +2024-11-11 21:12:43,179 Train Loss: 0.0007216, Val Loss: 0.0008442 +2024-11-11 21:12:43,179 Epoch 1641/2000 +2024-11-11 21:12:59,330 Current Learning Rate: 0.0089984233 +2024-11-11 21:12:59,332 Train Loss: 0.0006222, Val Loss: 0.0006745 +2024-11-11 21:12:59,332 Epoch 1642/2000 +2024-11-11 21:13:15,731 Current Learning Rate: 0.0089507751 +2024-11-11 21:13:15,731 Train Loss: 0.0005324, Val Loss: 0.0007027 +2024-11-11 21:13:15,732 Epoch 1643/2000 +2024-11-11 21:13:32,312 Current Learning Rate: 0.0089021520 +2024-11-11 21:13:32,312 Train Loss: 0.0006039, Val Loss: 0.0007143 +2024-11-11 21:13:32,313 Epoch 1644/2000 +2024-11-11 21:13:49,583 Current Learning Rate: 0.0088525662 +2024-11-11 21:13:49,584 Train Loss: 0.0005603, Val Loss: 0.0006621 +2024-11-11 21:13:49,584 Epoch 1645/2000 +2024-11-11 21:14:05,625 Current Learning Rate: 0.0088020298 +2024-11-11 21:14:05,625 Train Loss: 0.0006123, Val Loss: 0.0006480 +2024-11-11 21:14:05,625 Epoch 1646/2000 +2024-11-11 21:14:21,815 Current Learning Rate: 0.0087505553 +2024-11-11 21:14:21,816 Train Loss: 0.0005118, Val Loss: 0.0006217 +2024-11-11 21:14:21,816 Epoch 1647/2000 +2024-11-11 21:14:38,370 Current Learning Rate: 0.0086981555 +2024-11-11 21:14:38,371 Train Loss: 0.0005529, Val Loss: 0.0006320 +2024-11-11 21:14:38,372 Epoch 1648/2000 +2024-11-11 21:14:54,991 Current Learning Rate: 0.0086448431 +2024-11-11 21:14:54,992 Train Loss: 0.0005973, Val Loss: 0.0006550 +2024-11-11 21:14:54,992 Epoch 1649/2000 +2024-11-11 21:15:10,277 Current Learning Rate: 0.0085906315 +2024-11-11 21:15:10,277 Train Loss: 0.0005381, Val Loss: 0.0006692 +2024-11-11 21:15:10,277 Epoch 1650/2000 +2024-11-11 21:15:26,085 Current Learning Rate: 0.0085355339 +2024-11-11 21:15:26,086 Train Loss: 0.0005312, Val Loss: 0.0006560 +2024-11-11 21:15:26,086 Epoch 1651/2000 +2024-11-11 21:15:41,190 Current Learning Rate: 0.0084795640 +2024-11-11 21:15:41,190 Train Loss: 0.0005639, Val Loss: 0.0006777 +2024-11-11 21:15:41,190 Epoch 1652/2000 +2024-11-11 21:15:56,598 Current Learning Rate: 0.0084227355 +2024-11-11 21:15:56,599 Train Loss: 0.0005813, Val Loss: 0.0007803 +2024-11-11 21:15:56,599 Epoch 1653/2000 +2024-11-11 21:16:12,226 Current Learning Rate: 0.0083650626 +2024-11-11 21:16:12,226 Train Loss: 0.0005547, Val Loss: 0.0006887 +2024-11-11 21:16:12,226 Epoch 1654/2000 +2024-11-11 21:16:27,413 Current Learning Rate: 0.0083065593 +2024-11-11 21:16:27,414 Train Loss: 0.0005443, Val Loss: 0.0006525 +2024-11-11 21:16:27,414 Epoch 1655/2000 +2024-11-11 21:16:43,153 Current Learning Rate: 0.0082472402 +2024-11-11 21:16:43,153 Train Loss: 0.0005685, Val Loss: 0.0006408 +2024-11-11 21:16:43,153 Epoch 1656/2000 +2024-11-11 21:16:58,433 Current Learning Rate: 0.0081871199 +2024-11-11 21:16:58,434 Train Loss: 0.0005863, Val Loss: 0.0006451 +2024-11-11 21:16:58,434 Epoch 1657/2000 +2024-11-11 21:17:14,151 Current Learning Rate: 0.0081262133 +2024-11-11 21:17:14,151 Train Loss: 0.0005508, Val Loss: 0.0006583 +2024-11-11 21:17:14,151 Epoch 1658/2000 +2024-11-11 21:17:29,690 Current Learning Rate: 0.0080645353 +2024-11-11 21:17:29,690 Train Loss: 0.0004973, Val Loss: 0.0006194 +2024-11-11 21:17:29,690 Epoch 1659/2000 +2024-11-11 21:17:45,728 Current Learning Rate: 0.0080021011 +2024-11-11 21:17:46,492 Train Loss: 0.0004798, Val Loss: 0.0005947 +2024-11-11 21:17:46,493 Epoch 1660/2000 +2024-11-11 21:18:01,198 Current Learning Rate: 0.0079389263 +2024-11-11 21:18:02,052 Train Loss: 0.0005035, Val Loss: 0.0005858 +2024-11-11 21:18:02,053 Epoch 1661/2000 +2024-11-11 21:18:16,728 Current Learning Rate: 0.0078750263 +2024-11-11 21:18:16,729 Train Loss: 0.0005292, Val Loss: 0.0005920 +2024-11-11 21:18:16,729 Epoch 1662/2000 +2024-11-11 21:18:32,072 Current Learning Rate: 0.0078104169 +2024-11-11 21:18:32,073 Train Loss: 0.0004721, Val Loss: 0.0005989 +2024-11-11 21:18:32,073 Epoch 1663/2000 +2024-11-11 21:18:47,577 Current Learning Rate: 0.0077451141 +2024-11-11 21:18:47,577 Train Loss: 0.0004615, Val Loss: 0.0006117 +2024-11-11 21:18:47,577 Epoch 1664/2000 +2024-11-11 21:19:03,624 Current Learning Rate: 0.0076791340 +2024-11-11 21:19:03,624 Train Loss: 0.0004617, Val Loss: 0.0006160 +2024-11-11 21:19:03,624 Epoch 1665/2000 +2024-11-11 21:19:19,559 Current Learning Rate: 0.0076124928 +2024-11-11 21:19:19,560 Train Loss: 0.0004836, Val Loss: 0.0006143 +2024-11-11 21:19:19,560 Epoch 1666/2000 +2024-11-11 21:19:35,949 Current Learning Rate: 0.0075452071 +2024-11-11 21:19:35,950 Train Loss: 0.0004806, Val Loss: 0.0006009 +2024-11-11 21:19:35,950 Epoch 1667/2000 +2024-11-11 21:19:50,396 Current Learning Rate: 0.0074772933 +2024-11-11 21:19:50,397 Train Loss: 0.0004721, Val Loss: 0.0006422 +2024-11-11 21:19:50,397 Epoch 1668/2000 +2024-11-11 21:20:05,722 Current Learning Rate: 0.0074087684 +2024-11-11 21:20:05,723 Train Loss: 0.0005263, Val Loss: 0.0006798 +2024-11-11 21:20:05,733 Epoch 1669/2000 +2024-11-11 21:20:21,494 Current Learning Rate: 0.0073396491 +2024-11-11 21:20:21,494 Train Loss: 0.0006208, Val Loss: 0.0006382 +2024-11-11 21:20:21,495 Epoch 1670/2000 +2024-11-11 21:20:37,203 Current Learning Rate: 0.0072699525 +2024-11-11 21:20:37,204 Train Loss: 0.0005495, Val Loss: 0.0006000 +2024-11-11 21:20:37,204 Epoch 1671/2000 +2024-11-11 21:20:52,738 Current Learning Rate: 0.0071996958 +2024-11-11 21:20:52,738 Train Loss: 0.0004826, Val Loss: 0.0006239 +2024-11-11 21:20:52,738 Epoch 1672/2000 +2024-11-11 21:21:08,673 Current Learning Rate: 0.0071288965 +2024-11-11 21:21:08,674 Train Loss: 0.0005262, Val Loss: 0.0006366 +2024-11-11 21:21:08,674 Epoch 1673/2000 +2024-11-11 21:21:24,328 Current Learning Rate: 0.0070575718 +2024-11-11 21:21:24,329 Train Loss: 0.0005894, Val Loss: 0.0005975 +2024-11-11 21:21:24,329 Epoch 1674/2000 +2024-11-11 21:21:40,397 Current Learning Rate: 0.0069857395 +2024-11-11 21:21:40,399 Train Loss: 0.0004935, Val Loss: 0.0005900 +2024-11-11 21:21:40,399 Epoch 1675/2000 +2024-11-11 21:21:56,131 Current Learning Rate: 0.0069134172 +2024-11-11 21:21:57,193 Train Loss: 0.0004529, Val Loss: 0.0005800 +2024-11-11 21:21:57,194 Epoch 1676/2000 +2024-11-11 21:22:12,837 Current Learning Rate: 0.0068406228 +2024-11-11 21:22:13,807 Train Loss: 0.0005233, Val Loss: 0.0005721 +2024-11-11 21:22:13,808 Epoch 1677/2000 +2024-11-11 21:22:29,606 Current Learning Rate: 0.0067673742 +2024-11-11 21:22:30,593 Train Loss: 0.0004106, Val Loss: 0.0005661 +2024-11-11 21:22:30,594 Epoch 1678/2000 +2024-11-11 21:22:46,452 Current Learning Rate: 0.0066936896 +2024-11-11 21:22:46,452 Train Loss: 0.0004143, Val Loss: 0.0005719 +2024-11-11 21:22:46,452 Epoch 1679/2000 +2024-11-11 21:23:02,868 Current Learning Rate: 0.0066195871 +2024-11-11 21:23:02,868 Train Loss: 0.0004331, Val Loss: 0.0005670 +2024-11-11 21:23:02,869 Epoch 1680/2000 +2024-11-11 21:23:19,416 Current Learning Rate: 0.0065450850 +2024-11-11 21:23:19,416 Train Loss: 0.0004751, Val Loss: 0.0005682 +2024-11-11 21:23:19,416 Epoch 1681/2000 +2024-11-11 21:23:34,856 Current Learning Rate: 0.0064702016 +2024-11-11 21:23:35,622 Train Loss: 0.0004536, Val Loss: 0.0005595 +2024-11-11 21:23:35,622 Epoch 1682/2000 +2024-11-11 21:23:51,120 Current Learning Rate: 0.0063949555 +2024-11-11 21:23:51,121 Train Loss: 0.0004756, Val Loss: 0.0005645 +2024-11-11 21:23:51,122 Epoch 1683/2000 +2024-11-11 21:24:07,872 Current Learning Rate: 0.0063193652 +2024-11-11 21:24:07,873 Train Loss: 0.0003916, Val Loss: 0.0005648 +2024-11-11 21:24:07,873 Epoch 1684/2000 +2024-11-11 21:24:23,939 Current Learning Rate: 0.0062434494 +2024-11-11 21:24:23,940 Train Loss: 0.0004938, Val Loss: 0.0005678 +2024-11-11 21:24:23,940 Epoch 1685/2000 +2024-11-11 21:24:39,544 Current Learning Rate: 0.0061672268 +2024-11-11 21:24:39,544 Train Loss: 0.0004542, Val Loss: 0.0005658 +2024-11-11 21:24:39,544 Epoch 1686/2000 +2024-11-11 21:24:55,368 Current Learning Rate: 0.0060907162 +2024-11-11 21:24:55,368 Train Loss: 0.0004392, Val Loss: 0.0005781 +2024-11-11 21:24:55,369 Epoch 1687/2000 +2024-11-11 21:25:10,854 Current Learning Rate: 0.0060139365 +2024-11-11 21:25:10,854 Train Loss: 0.0004555, Val Loss: 0.0005663 +2024-11-11 21:25:10,855 Epoch 1688/2000 +2024-11-11 21:25:27,116 Current Learning Rate: 0.0059369066 +2024-11-11 21:25:27,943 Train Loss: 0.0004500, Val Loss: 0.0005566 +2024-11-11 21:25:27,944 Epoch 1689/2000 +2024-11-11 21:25:42,949 Current Learning Rate: 0.0058596455 +2024-11-11 21:25:44,031 Train Loss: 0.0004700, Val Loss: 0.0005511 +2024-11-11 21:25:44,032 Epoch 1690/2000 +2024-11-11 21:26:00,235 Current Learning Rate: 0.0057821723 +2024-11-11 21:26:00,235 Train Loss: 0.0004602, Val Loss: 0.0005725 +2024-11-11 21:26:00,236 Epoch 1691/2000 +2024-11-11 21:26:15,990 Current Learning Rate: 0.0057045062 +2024-11-11 21:26:15,990 Train Loss: 0.0004776, Val Loss: 0.0005732 +2024-11-11 21:26:15,990 Epoch 1692/2000 +2024-11-11 21:26:31,978 Current Learning Rate: 0.0056266662 +2024-11-11 21:26:32,948 Train Loss: 0.0004919, Val Loss: 0.0005486 +2024-11-11 21:26:32,948 Epoch 1693/2000 +2024-11-11 21:26:48,656 Current Learning Rate: 0.0055486716 +2024-11-11 21:26:48,657 Train Loss: 0.0004299, Val Loss: 0.0005550 +2024-11-11 21:26:48,658 Epoch 1694/2000 +2024-11-11 21:27:04,898 Current Learning Rate: 0.0054705416 +2024-11-11 21:27:04,899 Train Loss: 0.0004087, Val Loss: 0.0005548 +2024-11-11 21:27:04,899 Epoch 1695/2000 +2024-11-11 21:27:20,389 Current Learning Rate: 0.0053922955 +2024-11-11 21:27:21,409 Train Loss: 0.0003815, Val Loss: 0.0005389 +2024-11-11 21:27:21,409 Epoch 1696/2000 +2024-11-11 21:27:37,184 Current Learning Rate: 0.0053139526 +2024-11-11 21:27:37,185 Train Loss: 0.0003787, Val Loss: 0.0005402 +2024-11-11 21:27:37,185 Epoch 1697/2000 +2024-11-11 21:27:53,586 Current Learning Rate: 0.0052355323 +2024-11-11 21:27:53,587 Train Loss: 0.0004286, Val Loss: 0.0005417 +2024-11-11 21:27:53,587 Epoch 1698/2000 +2024-11-11 21:28:09,170 Current Learning Rate: 0.0051570538 +2024-11-11 21:28:09,172 Train Loss: 0.0004803, Val Loss: 0.0005458 +2024-11-11 21:28:09,173 Epoch 1699/2000 +2024-11-11 21:28:25,196 Current Learning Rate: 0.0050785366 +2024-11-11 21:28:25,197 Train Loss: 0.0004299, Val Loss: 0.0005413 +2024-11-11 21:28:25,197 Epoch 1700/2000 +2024-11-11 21:28:41,941 Current Learning Rate: 0.0050000000 +2024-11-11 21:28:42,658 Train Loss: 0.0004231, Val Loss: 0.0005336 +2024-11-11 21:28:42,658 Epoch 1701/2000 +2024-11-11 21:28:57,427 Current Learning Rate: 0.0049214634 +2024-11-11 21:28:57,427 Train Loss: 0.0004457, Val Loss: 0.0005372 +2024-11-11 21:28:57,427 Epoch 1702/2000 +2024-11-11 21:29:13,586 Current Learning Rate: 0.0048429462 +2024-11-11 21:29:13,587 Train Loss: 0.0004917, Val Loss: 0.0005428 +2024-11-11 21:29:13,587 Epoch 1703/2000 +2024-11-11 21:29:29,746 Current Learning Rate: 0.0047644677 +2024-11-11 21:29:29,747 Train Loss: 0.0004636, Val Loss: 0.0005574 +2024-11-11 21:29:29,747 Epoch 1704/2000 +2024-11-11 21:29:45,894 Current Learning Rate: 0.0046860474 +2024-11-11 21:29:45,895 Train Loss: 0.0004433, Val Loss: 0.0005445 +2024-11-11 21:29:45,895 Epoch 1705/2000 +2024-11-11 21:30:01,533 Current Learning Rate: 0.0046077045 +2024-11-11 21:30:03,920 Train Loss: 0.0004468, Val Loss: 0.0005330 +2024-11-11 21:30:03,920 Epoch 1706/2000 +2024-11-11 21:30:18,254 Current Learning Rate: 0.0045294584 +2024-11-11 21:30:18,999 Train Loss: 0.0004184, Val Loss: 0.0005270 +2024-11-11 21:30:18,999 Epoch 1707/2000 +2024-11-11 21:30:34,360 Current Learning Rate: 0.0044513284 +2024-11-11 21:30:35,097 Train Loss: 0.0004106, Val Loss: 0.0005222 +2024-11-11 21:30:35,098 Epoch 1708/2000 +2024-11-11 21:30:49,745 Current Learning Rate: 0.0043733338 +2024-11-11 21:30:50,488 Train Loss: 0.0004124, Val Loss: 0.0005208 +2024-11-11 21:30:50,488 Epoch 1709/2000 +2024-11-11 21:31:05,230 Current Learning Rate: 0.0042954938 +2024-11-11 21:31:05,231 Train Loss: 0.0004559, Val Loss: 0.0005259 +2024-11-11 21:31:05,232 Epoch 1710/2000 +2024-11-11 21:31:20,579 Current Learning Rate: 0.0042178277 +2024-11-11 21:31:20,579 Train Loss: 0.0004848, Val Loss: 0.0005336 +2024-11-11 21:31:20,579 Epoch 1711/2000 +2024-11-11 21:31:36,045 Current Learning Rate: 0.0041403545 +2024-11-11 21:31:36,045 Train Loss: 0.0004692, Val Loss: 0.0005393 +2024-11-11 21:31:36,045 Epoch 1712/2000 +2024-11-11 21:31:51,229 Current Learning Rate: 0.0040630934 +2024-11-11 21:31:51,229 Train Loss: 0.0003951, Val Loss: 0.0005302 +2024-11-11 21:31:51,229 Epoch 1713/2000 +2024-11-11 21:32:06,839 Current Learning Rate: 0.0039860635 +2024-11-11 21:32:06,839 Train Loss: 0.0003930, Val Loss: 0.0005283 +2024-11-11 21:32:06,839 Epoch 1714/2000 +2024-11-11 21:32:22,754 Current Learning Rate: 0.0039092838 +2024-11-11 21:32:22,754 Train Loss: 0.0003887, Val Loss: 0.0005323 +2024-11-11 21:32:22,754 Epoch 1715/2000 +2024-11-11 21:32:38,545 Current Learning Rate: 0.0038327732 +2024-11-11 21:32:38,546 Train Loss: 0.0004875, Val Loss: 0.0005406 +2024-11-11 21:32:38,546 Epoch 1716/2000 +2024-11-11 21:32:53,734 Current Learning Rate: 0.0037565506 +2024-11-11 21:32:53,735 Train Loss: 0.0004535, Val Loss: 0.0005704 +2024-11-11 21:32:53,735 Epoch 1717/2000 +2024-11-11 21:33:09,333 Current Learning Rate: 0.0036806348 +2024-11-11 21:33:09,334 Train Loss: 0.0004627, Val Loss: 0.0005671 +2024-11-11 21:33:09,334 Epoch 1718/2000 +2024-11-11 21:33:25,199 Current Learning Rate: 0.0036050445 +2024-11-11 21:33:25,200 Train Loss: 0.0004753, Val Loss: 0.0005282 +2024-11-11 21:33:25,200 Epoch 1719/2000 +2024-11-11 21:33:40,572 Current Learning Rate: 0.0035297984 +2024-11-11 21:33:41,378 Train Loss: 0.0003659, Val Loss: 0.0005147 +2024-11-11 21:33:41,378 Epoch 1720/2000 +2024-11-11 21:33:55,968 Current Learning Rate: 0.0034549150 +2024-11-11 21:33:55,968 Train Loss: 0.0003476, Val Loss: 0.0005149 +2024-11-11 21:33:55,969 Epoch 1721/2000 +2024-11-11 21:34:11,759 Current Learning Rate: 0.0033804129 +2024-11-11 21:34:12,517 Train Loss: 0.0003638, Val Loss: 0.0005122 +2024-11-11 21:34:12,517 Epoch 1722/2000 +2024-11-11 21:34:27,394 Current Learning Rate: 0.0033063104 +2024-11-11 21:34:28,209 Train Loss: 0.0003518, Val Loss: 0.0005106 +2024-11-11 21:34:28,209 Epoch 1723/2000 +2024-11-11 21:34:42,940 Current Learning Rate: 0.0032326258 +2024-11-11 21:34:43,808 Train Loss: 0.0003611, Val Loss: 0.0005086 +2024-11-11 21:34:43,808 Epoch 1724/2000 +2024-11-11 21:34:58,215 Current Learning Rate: 0.0031593772 +2024-11-11 21:34:58,971 Train Loss: 0.0004347, Val Loss: 0.0005074 +2024-11-11 21:34:58,972 Epoch 1725/2000 +2024-11-11 21:35:13,471 Current Learning Rate: 0.0030865828 +2024-11-11 21:35:14,299 Train Loss: 0.0003929, Val Loss: 0.0005071 +2024-11-11 21:35:14,300 Epoch 1726/2000 +2024-11-11 21:35:29,438 Current Learning Rate: 0.0030142605 +2024-11-11 21:35:29,439 Train Loss: 0.0004435, Val Loss: 0.0005078 +2024-11-11 21:35:29,440 Epoch 1727/2000 +2024-11-11 21:35:45,711 Current Learning Rate: 0.0029424282 +2024-11-11 21:35:45,712 Train Loss: 0.0004152, Val Loss: 0.0005074 +2024-11-11 21:35:45,712 Epoch 1728/2000 +2024-11-11 21:36:01,409 Current Learning Rate: 0.0028711035 +2024-11-11 21:36:01,410 Train Loss: 0.0004438, Val Loss: 0.0005084 +2024-11-11 21:36:01,410 Epoch 1729/2000 +2024-11-11 21:36:16,408 Current Learning Rate: 0.0028003042 +2024-11-11 21:36:16,409 Train Loss: 0.0004133, Val Loss: 0.0005072 +2024-11-11 21:36:16,409 Epoch 1730/2000 +2024-11-11 21:36:32,347 Current Learning Rate: 0.0027300475 +2024-11-11 21:36:33,145 Train Loss: 0.0004135, Val Loss: 0.0005039 +2024-11-11 21:36:33,145 Epoch 1731/2000 +2024-11-11 21:36:48,090 Current Learning Rate: 0.0026603509 +2024-11-11 21:36:48,771 Train Loss: 0.0003828, Val Loss: 0.0005030 +2024-11-11 21:36:48,771 Epoch 1732/2000 +2024-11-11 21:37:04,353 Current Learning Rate: 0.0025912316 +2024-11-11 21:37:04,354 Train Loss: 0.0004278, Val Loss: 0.0005036 +2024-11-11 21:37:04,354 Epoch 1733/2000 +2024-11-11 21:37:20,914 Current Learning Rate: 0.0025227067 +2024-11-11 21:37:20,915 Train Loss: 0.0003527, Val Loss: 0.0005042 +2024-11-11 21:37:20,916 Epoch 1734/2000 +2024-11-11 21:37:37,279 Current Learning Rate: 0.0024547929 +2024-11-11 21:37:37,280 Train Loss: 0.0004140, Val Loss: 0.0005063 +2024-11-11 21:37:37,280 Epoch 1735/2000 +2024-11-11 21:37:53,847 Current Learning Rate: 0.0023875072 +2024-11-11 21:37:54,817 Train Loss: 0.0004297, Val Loss: 0.0005015 +2024-11-11 21:37:54,818 Epoch 1736/2000 +2024-11-11 21:38:11,178 Current Learning Rate: 0.0023208660 +2024-11-11 21:38:11,891 Train Loss: 0.0004593, Val Loss: 0.0004956 +2024-11-11 21:38:11,891 Epoch 1737/2000 +2024-11-11 21:38:26,214 Current Learning Rate: 0.0022548859 +2024-11-11 21:38:27,181 Train Loss: 0.0004213, Val Loss: 0.0004920 +2024-11-11 21:38:27,182 Epoch 1738/2000 +2024-11-11 21:38:43,553 Current Learning Rate: 0.0021895831 +2024-11-11 21:38:43,554 Train Loss: 0.0003847, Val Loss: 0.0004928 +2024-11-11 21:38:43,554 Epoch 1739/2000 +2024-11-11 21:38:59,299 Current Learning Rate: 0.0021249737 +2024-11-11 21:39:00,058 Train Loss: 0.0003771, Val Loss: 0.0004876 +2024-11-11 21:39:00,058 Epoch 1740/2000 +2024-11-11 21:39:14,941 Current Learning Rate: 0.0020610737 +2024-11-11 21:39:15,759 Train Loss: 0.0003475, Val Loss: 0.0004875 +2024-11-11 21:39:15,759 Epoch 1741/2000 +2024-11-11 21:39:30,817 Current Learning Rate: 0.0019978989 +2024-11-11 21:39:31,683 Train Loss: 0.0003471, Val Loss: 0.0004858 +2024-11-11 21:39:31,684 Epoch 1742/2000 +2024-11-11 21:39:47,134 Current Learning Rate: 0.0019354647 +2024-11-11 21:39:48,120 Train Loss: 0.0003708, Val Loss: 0.0004855 +2024-11-11 21:39:48,121 Epoch 1743/2000 +2024-11-11 21:40:04,333 Current Learning Rate: 0.0018737867 +2024-11-11 21:40:04,334 Train Loss: 0.0003876, Val Loss: 0.0004855 +2024-11-11 21:40:04,335 Epoch 1744/2000 +2024-11-11 21:40:19,883 Current Learning Rate: 0.0018128801 +2024-11-11 21:40:20,653 Train Loss: 0.0003427, Val Loss: 0.0004844 +2024-11-11 21:40:20,654 Epoch 1745/2000 +2024-11-11 21:40:35,474 Current Learning Rate: 0.0017527598 +2024-11-11 21:40:36,222 Train Loss: 0.0004239, Val Loss: 0.0004835 +2024-11-11 21:40:36,222 Epoch 1746/2000 +2024-11-11 21:40:51,038 Current Learning Rate: 0.0016934407 +2024-11-11 21:40:51,771 Train Loss: 0.0003626, Val Loss: 0.0004820 +2024-11-11 21:40:51,772 Epoch 1747/2000 +2024-11-11 21:41:06,709 Current Learning Rate: 0.0016349374 +2024-11-11 21:41:07,441 Train Loss: 0.0003723, Val Loss: 0.0004810 +2024-11-11 21:41:07,441 Epoch 1748/2000 +2024-11-11 21:41:22,329 Current Learning Rate: 0.0015772645 +2024-11-11 21:41:23,205 Train Loss: 0.0003645, Val Loss: 0.0004809 +2024-11-11 21:41:23,206 Epoch 1749/2000 +2024-11-11 21:41:38,018 Current Learning Rate: 0.0015204360 +2024-11-11 21:41:38,019 Train Loss: 0.0003917, Val Loss: 0.0004810 +2024-11-11 21:41:38,019 Epoch 1750/2000 +2024-11-11 21:41:53,584 Current Learning Rate: 0.0014644661 +2024-11-11 21:41:53,584 Train Loss: 0.0003951, Val Loss: 0.0004822 +2024-11-11 21:41:53,585 Epoch 1751/2000 +2024-11-11 21:42:09,970 Current Learning Rate: 0.0014093685 +2024-11-11 21:42:11,020 Train Loss: 0.0003900, Val Loss: 0.0004800 +2024-11-11 21:42:11,020 Epoch 1752/2000 +2024-11-11 21:42:26,546 Current Learning Rate: 0.0013551569 +2024-11-11 21:42:27,473 Train Loss: 0.0003723, Val Loss: 0.0004783 +2024-11-11 21:42:27,474 Epoch 1753/2000 +2024-11-11 21:42:43,426 Current Learning Rate: 0.0013018445 +2024-11-11 21:42:44,194 Train Loss: 0.0003705, Val Loss: 0.0004778 +2024-11-11 21:42:44,194 Epoch 1754/2000 +2024-11-11 21:42:58,864 Current Learning Rate: 0.0012494447 +2024-11-11 21:42:59,617 Train Loss: 0.0004015, Val Loss: 0.0004778 +2024-11-11 21:42:59,617 Epoch 1755/2000 +2024-11-11 21:43:15,679 Current Learning Rate: 0.0011979702 +2024-11-11 21:43:16,399 Train Loss: 0.0003422, Val Loss: 0.0004773 +2024-11-11 21:43:16,399 Epoch 1756/2000 +2024-11-11 21:43:31,117 Current Learning Rate: 0.0011474338 +2024-11-11 21:43:31,118 Train Loss: 0.0003694, Val Loss: 0.0004775 +2024-11-11 21:43:31,119 Epoch 1757/2000 +2024-11-11 21:43:46,465 Current Learning Rate: 0.0010978480 +2024-11-11 21:43:47,262 Train Loss: 0.0003533, Val Loss: 0.0004769 +2024-11-11 21:43:47,262 Epoch 1758/2000 +2024-11-11 21:44:01,659 Current Learning Rate: 0.0010492249 +2024-11-11 21:44:03,839 Train Loss: 0.0003803, Val Loss: 0.0004760 +2024-11-11 21:44:03,839 Epoch 1759/2000 +2024-11-11 21:44:18,102 Current Learning Rate: 0.0010015767 +2024-11-11 21:44:18,876 Train Loss: 0.0003702, Val Loss: 0.0004746 +2024-11-11 21:44:18,876 Epoch 1760/2000 +2024-11-11 21:44:33,367 Current Learning Rate: 0.0009549150 +2024-11-11 21:44:34,092 Train Loss: 0.0003760, Val Loss: 0.0004743 +2024-11-11 21:44:34,092 Epoch 1761/2000 +2024-11-11 21:44:48,542 Current Learning Rate: 0.0009092514 +2024-11-11 21:44:48,543 Train Loss: 0.0005291, Val Loss: 0.0004754 +2024-11-11 21:44:48,543 Epoch 1762/2000 +2024-11-11 21:45:04,214 Current Learning Rate: 0.0008645971 +2024-11-11 21:45:05,031 Train Loss: 0.0003119, Val Loss: 0.0004739 +2024-11-11 21:45:05,032 Epoch 1763/2000 +2024-11-11 21:45:20,482 Current Learning Rate: 0.0008209632 +2024-11-11 21:45:21,257 Train Loss: 0.0003379, Val Loss: 0.0004735 +2024-11-11 21:45:21,258 Epoch 1764/2000 +2024-11-11 21:45:36,295 Current Learning Rate: 0.0007783604 +2024-11-11 21:45:37,410 Train Loss: 0.0003394, Val Loss: 0.0004732 +2024-11-11 21:45:37,410 Epoch 1765/2000 +2024-11-11 21:45:53,458 Current Learning Rate: 0.0007367992 +2024-11-11 21:45:54,230 Train Loss: 0.0003866, Val Loss: 0.0004730 +2024-11-11 21:45:54,230 Epoch 1766/2000 +2024-11-11 21:46:08,733 Current Learning Rate: 0.0006962899 +2024-11-11 21:46:09,634 Train Loss: 0.0003961, Val Loss: 0.0004728 +2024-11-11 21:46:09,635 Epoch 1767/2000 +2024-11-11 21:46:25,404 Current Learning Rate: 0.0006568424 +2024-11-11 21:46:26,165 Train Loss: 0.0003480, Val Loss: 0.0004724 +2024-11-11 21:46:26,165 Epoch 1768/2000 +2024-11-11 21:46:41,317 Current Learning Rate: 0.0006184666 +2024-11-11 21:46:42,122 Train Loss: 0.0003090, Val Loss: 0.0004720 +2024-11-11 21:46:42,122 Epoch 1769/2000 +2024-11-11 21:46:57,320 Current Learning Rate: 0.0005811718 +2024-11-11 21:46:57,321 Train Loss: 0.0004402, Val Loss: 0.0004724 +2024-11-11 21:46:57,321 Epoch 1770/2000 +2024-11-11 21:47:12,797 Current Learning Rate: 0.0005449674 +2024-11-11 21:47:12,797 Train Loss: 0.0004016, Val Loss: 0.0004722 +2024-11-11 21:47:12,798 Epoch 1771/2000 +2024-11-11 21:47:29,463 Current Learning Rate: 0.0005098621 +2024-11-11 21:47:30,420 Train Loss: 0.0003784, Val Loss: 0.0004716 +2024-11-11 21:47:30,420 Epoch 1772/2000 +2024-11-11 21:47:37,319 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-11 21:47:45,803 Current Learning Rate: 0.0004758647 +2024-11-11 21:47:46,777 Train Loss: 0.0003402, Val Loss: 0.0004713 +2024-11-11 21:47:46,782 Epoch 1773/2000 +2024-11-11 21:47:59,503 Loading best model from checkpoint. +2024-11-11 21:48:01,961 Current Learning Rate: 0.0004429836 +2024-11-11 21:48:04,354 Train Loss: 0.0003396, Val Loss: 0.0004709 +2024-11-11 21:48:04,357 Epoch 1774/2000 +2024-11-11 21:48:17,190 Testing completed and best model saved. +2024-11-11 21:48:19,262 Current Learning Rate: 0.0004112269 +2024-11-11 21:48:20,444 Train Loss: 0.0003725, Val Loss: 0.0004708 +2024-11-11 21:48:20,445 Epoch 1775/2000 +2024-11-11 21:48:35,956 Current Learning Rate: 0.0003806023 +2024-11-11 21:48:37,258 Train Loss: 0.0003322, Val Loss: 0.0004707 +2024-11-11 21:48:37,259 Epoch 1776/2000 +2024-11-11 21:48:53,900 Current Learning Rate: 0.0003511176 +2024-11-11 21:48:54,819 Train Loss: 0.0003374, Val Loss: 0.0004704 +2024-11-11 21:48:54,819 Epoch 1777/2000 +2024-11-11 21:49:09,992 Current Learning Rate: 0.0003227798 +2024-11-11 21:49:10,980 Train Loss: 0.0003443, Val Loss: 0.0004703 +2024-11-11 21:49:10,980 Epoch 1778/2000 +2024-11-11 21:49:26,425 Current Learning Rate: 0.0002955962 +2024-11-11 21:49:27,579 Train Loss: 0.0003375, Val Loss: 0.0004701 +2024-11-11 21:49:27,580 Epoch 1779/2000 +2024-11-11 21:49:44,290 Current Learning Rate: 0.0002695732 +2024-11-11 21:49:45,502 Train Loss: 0.0003066, Val Loss: 0.0004700 +2024-11-11 21:49:45,502 Epoch 1780/2000 +2024-11-11 21:50:01,038 Current Learning Rate: 0.0002447174 +2024-11-11 21:50:03,590 Train Loss: 0.0003402, Val Loss: 0.0004699 +2024-11-11 21:50:03,590 Epoch 1781/2000 +2024-11-11 21:50:18,791 Current Learning Rate: 0.0002210349 +2024-11-11 21:50:18,792 Train Loss: 0.0004048, Val Loss: 0.0004700 +2024-11-11 21:50:18,792 Epoch 1782/2000 +2024-11-11 21:50:34,836 Current Learning Rate: 0.0001985316 +2024-11-11 21:50:35,663 Train Loss: 0.0004596, Val Loss: 0.0004697 +2024-11-11 21:50:35,663 Epoch 1783/2000 +2024-11-11 21:50:50,740 Current Learning Rate: 0.0001772129 +2024-11-11 21:50:51,674 Train Loss: 0.0003570, Val Loss: 0.0004697 +2024-11-11 21:50:51,675 Epoch 1784/2000 +2024-11-11 21:51:07,094 Current Learning Rate: 0.0001570842 +2024-11-11 21:51:08,197 Train Loss: 0.0003369, Val Loss: 0.0004694 +2024-11-11 21:51:08,198 Epoch 1785/2000 +2024-11-11 21:51:24,013 Current Learning Rate: 0.0001381504 +2024-11-11 21:51:25,004 Train Loss: 0.0003301, Val Loss: 0.0004694 +2024-11-11 21:51:25,004 Epoch 1786/2000 +2024-11-11 21:51:41,129 Current Learning Rate: 0.0001204162 +2024-11-11 21:51:42,124 Train Loss: 0.0003317, Val Loss: 0.0004694 +2024-11-11 21:51:42,125 Epoch 1787/2000 +2024-11-11 21:51:58,458 Current Learning Rate: 0.0001038859 +2024-11-11 21:51:59,342 Train Loss: 0.0003683, Val Loss: 0.0004693 +2024-11-11 21:51:59,343 Epoch 1788/2000 +2024-11-11 21:52:14,208 Current Learning Rate: 0.0000885637 +2024-11-11 21:52:14,209 Train Loss: 0.0003520, Val Loss: 0.0004693 +2024-11-11 21:52:14,209 Epoch 1789/2000 +2024-11-11 21:52:30,902 Current Learning Rate: 0.0000744534 +2024-11-11 21:52:31,986 Train Loss: 0.0003830, Val Loss: 0.0004693 +2024-11-11 21:52:31,986 Epoch 1790/2000 +2024-11-11 21:52:47,176 Current Learning Rate: 0.0000615583 +2024-11-11 21:52:48,119 Train Loss: 0.0003614, Val Loss: 0.0004692 +2024-11-11 21:52:48,120 Epoch 1791/2000 +2024-11-11 21:53:03,705 Current Learning Rate: 0.0000498817 +2024-11-11 21:53:03,705 Train Loss: 0.0003523, Val Loss: 0.0004692 +2024-11-11 21:53:03,706 Epoch 1792/2000 +2024-11-11 21:53:20,146 Current Learning Rate: 0.0000394265 +2024-11-11 21:53:21,130 Train Loss: 0.0003641, Val Loss: 0.0004692 +2024-11-11 21:53:21,131 Epoch 1793/2000 +2024-11-11 21:53:36,524 Current Learning Rate: 0.0000301952 +2024-11-11 21:53:36,525 Train Loss: 0.0003091, Val Loss: 0.0004692 +2024-11-11 21:53:36,525 Epoch 1794/2000 +2024-11-11 21:53:52,783 Current Learning Rate: 0.0000221902 +2024-11-11 21:53:52,784 Train Loss: 0.0003335, Val Loss: 0.0004692 +2024-11-11 21:53:52,784 Epoch 1795/2000 +2024-11-11 21:54:09,994 Current Learning Rate: 0.0000154133 +2024-11-11 21:54:09,995 Train Loss: 0.0003923, Val Loss: 0.0004692 +2024-11-11 21:54:09,996 Epoch 1796/2000 +2024-11-11 21:54:26,341 Current Learning Rate: 0.0000098664 +2024-11-11 21:54:27,269 Train Loss: 0.0003742, Val Loss: 0.0004692 +2024-11-11 21:54:27,269 Epoch 1797/2000 +2024-11-11 21:54:42,567 Current Learning Rate: 0.0000055506 +2024-11-11 21:54:42,567 Train Loss: 0.0003479, Val Loss: 0.0004692 +2024-11-11 21:54:42,568 Epoch 1798/2000 +2024-11-11 21:54:58,721 Current Learning Rate: 0.0000024672 +2024-11-11 21:54:59,630 Train Loss: 0.0003606, Val Loss: 0.0004691 +2024-11-11 21:54:59,631 Epoch 1799/2000 +2024-11-11 21:55:15,011 Current Learning Rate: 0.0000006168 +2024-11-11 21:55:15,012 Train Loss: 0.0004004, Val Loss: 0.0004692 +2024-11-11 21:55:15,013 Epoch 1800/2000 +2024-11-11 21:55:31,238 Current Learning Rate: 0.0000000000 +2024-11-11 21:55:31,239 Train Loss: 0.0003904, Val Loss: 0.0004692 +2024-11-11 21:55:31,239 Epoch 1801/2000 +2024-11-11 21:55:47,692 Current Learning Rate: 0.0000006168 +2024-11-11 21:55:47,693 Train Loss: 0.0003716, Val Loss: 0.0004691 +2024-11-11 21:55:47,693 Epoch 1802/2000 +2024-11-11 21:56:03,571 Current Learning Rate: 0.0000024672 +2024-11-11 21:56:03,572 Train Loss: 0.0004168, Val Loss: 0.0004692 +2024-11-11 21:56:03,572 Epoch 1803/2000 +2024-11-11 21:56:19,536 Current Learning Rate: 0.0000055506 +2024-11-11 21:56:19,536 Train Loss: 0.0003723, Val Loss: 0.0004692 +2024-11-11 21:56:19,537 Epoch 1804/2000 +2024-11-11 21:56:36,065 Current Learning Rate: 0.0000098664 +2024-11-11 21:56:36,066 Train Loss: 0.0003215, Val Loss: 0.0004691 +2024-11-11 21:56:36,066 Epoch 1805/2000 +2024-11-11 21:56:52,175 Current Learning Rate: 0.0000154133 +2024-11-11 21:56:53,096 Train Loss: 0.0003321, Val Loss: 0.0004691 +2024-11-11 21:56:53,097 Epoch 1806/2000 +2024-11-11 21:57:08,682 Current Learning Rate: 0.0000221902 +2024-11-11 21:57:09,561 Train Loss: 0.0003177, Val Loss: 0.0004691 +2024-11-11 21:57:09,561 Epoch 1807/2000 +2024-11-11 21:57:24,711 Current Learning Rate: 0.0000301952 +2024-11-11 21:57:24,712 Train Loss: 0.0003665, Val Loss: 0.0004692 +2024-11-11 21:57:24,712 Epoch 1808/2000 +2024-11-11 21:57:41,111 Current Learning Rate: 0.0000394265 +2024-11-11 21:57:41,111 Train Loss: 0.0003340, Val Loss: 0.0004692 +2024-11-11 21:57:41,112 Epoch 1809/2000 +2024-11-11 21:57:58,362 Current Learning Rate: 0.0000498817 +2024-11-11 21:57:58,363 Train Loss: 0.0003851, Val Loss: 0.0004692 +2024-11-11 21:57:58,363 Epoch 1810/2000 +2024-11-11 21:58:13,926 Current Learning Rate: 0.0000615583 +2024-11-11 21:58:14,822 Train Loss: 0.0003246, Val Loss: 0.0004691 +2024-11-11 21:58:14,822 Epoch 1811/2000 +2024-11-11 21:58:29,888 Current Learning Rate: 0.0000744534 +2024-11-11 21:58:30,771 Train Loss: 0.0003821, Val Loss: 0.0004691 +2024-11-11 21:58:30,771 Epoch 1812/2000 +2024-11-11 21:58:46,192 Current Learning Rate: 0.0000885637 +2024-11-11 21:58:46,193 Train Loss: 0.0004197, Val Loss: 0.0004692 +2024-11-11 21:58:46,194 Epoch 1813/2000 +2024-11-11 21:59:02,290 Current Learning Rate: 0.0001038859 +2024-11-11 21:59:02,291 Train Loss: 0.0003423, Val Loss: 0.0004692 +2024-11-11 21:59:02,291 Epoch 1814/2000 +2024-11-11 21:59:19,378 Current Learning Rate: 0.0001204162 +2024-11-11 21:59:19,379 Train Loss: 0.0003315, Val Loss: 0.0004692 +2024-11-11 21:59:19,379 Epoch 1815/2000 +2024-11-11 21:59:35,674 Current Learning Rate: 0.0001381504 +2024-11-11 21:59:35,675 Train Loss: 0.0003277, Val Loss: 0.0004692 +2024-11-11 21:59:35,675 Epoch 1816/2000 +2024-11-11 21:59:51,596 Current Learning Rate: 0.0001570842 +2024-11-11 21:59:51,597 Train Loss: 0.0004323, Val Loss: 0.0004693 +2024-11-11 21:59:51,597 Epoch 1817/2000 +2024-11-11 22:00:07,077 Current Learning Rate: 0.0001772129 +2024-11-11 22:00:07,078 Train Loss: 0.0003093, Val Loss: 0.0004693 +2024-11-11 22:00:07,078 Epoch 1818/2000 +2024-11-11 22:00:23,129 Current Learning Rate: 0.0001985316 +2024-11-11 22:00:23,129 Train Loss: 0.0003487, Val Loss: 0.0004693 +2024-11-11 22:00:23,130 Epoch 1819/2000 +2024-11-11 22:00:39,661 Current Learning Rate: 0.0002210349 +2024-11-11 22:00:39,662 Train Loss: 0.0003095, Val Loss: 0.0004694 +2024-11-11 22:00:39,663 Epoch 1820/2000 +2024-11-11 22:00:55,428 Current Learning Rate: 0.0002447174 +2024-11-11 22:00:55,428 Train Loss: 0.0003606, Val Loss: 0.0004694 +2024-11-11 22:00:55,428 Epoch 1821/2000 +2024-11-11 22:01:12,829 Current Learning Rate: 0.0002695732 +2024-11-11 22:01:12,830 Train Loss: 0.0003425, Val Loss: 0.0004694 +2024-11-11 22:01:12,830 Epoch 1822/2000 +2024-11-11 22:01:28,743 Current Learning Rate: 0.0002955962 +2024-11-11 22:01:28,744 Train Loss: 0.0003432, Val Loss: 0.0004694 +2024-11-11 22:01:28,744 Epoch 1823/2000 +2024-11-11 22:01:45,759 Current Learning Rate: 0.0003227798 +2024-11-11 22:01:45,760 Train Loss: 0.0003854, Val Loss: 0.0004697 +2024-11-11 22:01:45,760 Epoch 1824/2000 +2024-11-11 22:02:01,192 Current Learning Rate: 0.0003511176 +2024-11-11 22:02:01,194 Train Loss: 0.0003785, Val Loss: 0.0004697 +2024-11-11 22:02:01,195 Epoch 1825/2000 +2024-11-11 22:02:16,697 Current Learning Rate: 0.0003806023 +2024-11-11 22:02:16,698 Train Loss: 0.0003138, Val Loss: 0.0004695 +2024-11-11 22:02:16,698 Epoch 1826/2000 +2024-11-11 22:02:33,139 Current Learning Rate: 0.0004112269 +2024-11-11 22:02:33,140 Train Loss: 0.0003309, Val Loss: 0.0004696 +2024-11-11 22:02:33,140 Epoch 1827/2000 +2024-11-11 22:02:49,142 Current Learning Rate: 0.0004429836 +2024-11-11 22:02:49,142 Train Loss: 0.0003257, Val Loss: 0.0004697 +2024-11-11 22:02:49,142 Epoch 1828/2000 +2024-11-11 22:03:05,205 Current Learning Rate: 0.0004758647 +2024-11-11 22:03:05,205 Train Loss: 0.0003437, Val Loss: 0.0004696 +2024-11-11 22:03:05,205 Epoch 1829/2000 +2024-11-11 22:03:21,199 Current Learning Rate: 0.0005098621 +2024-11-11 22:03:21,199 Train Loss: 0.0003767, Val Loss: 0.0004696 +2024-11-11 22:03:21,200 Epoch 1830/2000 +2024-11-11 22:03:37,582 Current Learning Rate: 0.0005449674 +2024-11-11 22:03:37,583 Train Loss: 0.0003757, Val Loss: 0.0004699 +2024-11-11 22:03:37,584 Epoch 1831/2000 +2024-11-11 22:03:53,689 Current Learning Rate: 0.0005811718 +2024-11-11 22:03:53,689 Train Loss: 0.0003557, Val Loss: 0.0004697 +2024-11-11 22:03:53,689 Epoch 1832/2000 +2024-11-11 22:04:10,567 Current Learning Rate: 0.0006184666 +2024-11-11 22:04:10,569 Train Loss: 0.0003385, Val Loss: 0.0004698 +2024-11-11 22:04:10,572 Epoch 1833/2000 +2024-11-11 22:04:26,485 Current Learning Rate: 0.0006568424 +2024-11-11 22:04:26,485 Train Loss: 0.0003319, Val Loss: 0.0004700 +2024-11-11 22:04:26,486 Epoch 1834/2000 +2024-11-11 22:04:42,683 Current Learning Rate: 0.0006962899 +2024-11-11 22:04:42,683 Train Loss: 0.0003140, Val Loss: 0.0004696 +2024-11-11 22:04:42,683 Epoch 1835/2000 +2024-11-11 22:04:43,134 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-11 22:04:59,269 Current Learning Rate: 0.0007367992 +2024-11-11 22:04:59,271 Train Loss: 0.0003429, Val Loss: 0.0004696 +2024-11-11 22:04:59,272 Epoch 1836/2000 +2024-11-11 22:05:17,200 Testing completed and best model saved. +-11-11 22:05:16,182 Train Loss: 0.0004232, Val Loss: 0.0004709 +2024-11-11 22:05:16,183 Epoch 1837/2000 +2024-11-11 22:05:32,656 Current Learning Rate: 0.0008209632 +2024-11-11 22:05:32,656 Train Loss: 0.0003067, Val Loss: 0.0004707 +2024-11-11 22:05:32,657 Epoch 1838/2000 +2024-11-11 22:05:48,734 Current Learning Rate: 0.0008645971 +2024-11-11 22:05:48,734 Train Loss: 0.0003915, Val Loss: 0.0004725 +2024-11-11 22:05:48,734 Epoch 1839/2000 +2024-11-11 22:06:04,855 Current Learning Rate: 0.0009092514 +2024-11-11 22:06:04,856 Train Loss: 0.0003546, Val Loss: 0.0004736 +2024-11-11 22:06:04,856 Epoch 1840/2000 +2024-11-11 22:06:20,951 Current Learning Rate: 0.0009549150 +2024-11-11 22:06:20,952 Train Loss: 0.0003297, Val Loss: 0.0004727 +2024-11-11 22:06:20,952 Epoch 1841/2000 +2024-11-11 22:06:38,754 Current Learning Rate: 0.0010015767 +2024-11-11 22:06:38,755 Train Loss: 0.0003348, Val Loss: 0.0004729 +2024-11-11 22:06:38,755 Epoch 1842/2000 +2024-11-11 22:06:54,006 Current Learning Rate: 0.0010492249 +2024-11-11 22:06:54,006 Train Loss: 0.0003834, Val Loss: 0.0004737 +2024-11-11 22:06:54,006 Epoch 1843/2000 +2024-11-11 22:07:10,579 Current Learning Rate: 0.0010978480 +2024-11-11 22:07:10,580 Train Loss: 0.0004457, Val Loss: 0.0004777 +2024-11-11 22:07:10,580 Epoch 1844/2000 +2024-11-11 22:07:27,270 Current Learning Rate: 0.0011474338 +2024-11-11 22:07:27,271 Train Loss: 0.0004340, Val Loss: 0.0004838 +2024-11-11 22:07:27,271 Epoch 1845/2000 +2024-11-11 22:07:43,160 Current Learning Rate: 0.0011979702 +2024-11-11 22:07:43,164 Train Loss: 0.0003392, Val Loss: 0.0004817 +2024-11-11 22:07:43,165 Epoch 1846/2000 +2024-11-11 22:08:00,247 Current Learning Rate: 0.0012494447 +2024-11-11 22:08:00,248 Train Loss: 0.0003844, Val Loss: 0.0004774 +2024-11-11 22:08:00,248 Epoch 1847/2000 +2024-11-11 22:08:15,871 Current Learning Rate: 0.0013018445 +2024-11-11 22:08:15,871 Train Loss: 0.0003830, Val Loss: 0.0004773 +2024-11-11 22:08:15,871 Epoch 1848/2000 +2024-11-11 22:08:31,385 Current Learning Rate: 0.0013551569 +2024-11-11 22:08:31,386 Train Loss: 0.0003410, Val Loss: 0.0004737 +2024-11-11 22:08:31,386 Epoch 1849/2000 +2024-11-11 22:08:47,358 Current Learning Rate: 0.0014093685 +2024-11-11 22:08:47,359 Train Loss: 0.0003742, Val Loss: 0.0004718 +2024-11-11 22:08:47,359 Epoch 1850/2000 +2024-11-11 22:09:02,864 Current Learning Rate: 0.0014644661 +2024-11-11 22:09:02,865 Train Loss: 0.0003540, Val Loss: 0.0004720 +2024-11-11 22:09:02,865 Epoch 1851/2000 +2024-11-11 22:09:18,445 Current Learning Rate: 0.0015204360 +2024-11-11 22:09:18,446 Train Loss: 0.0003742, Val Loss: 0.0004732 +2024-11-11 22:09:18,446 Epoch 1852/2000 +2024-11-11 22:09:33,902 Current Learning Rate: 0.0015772645 +2024-11-11 22:09:33,903 Train Loss: 0.0003416, Val Loss: 0.0004722 +2024-11-11 22:09:33,903 Epoch 1853/2000 +2024-11-11 22:09:50,443 Current Learning Rate: 0.0016349374 +2024-11-11 22:09:50,444 Train Loss: 0.0003718, Val Loss: 0.0004713 +2024-11-11 22:09:50,444 Epoch 1854/2000 +2024-11-11 22:10:05,969 Current Learning Rate: 0.0016934407 +2024-11-11 22:10:05,970 Train Loss: 0.0003528, Val Loss: 0.0004708 +2024-11-11 22:10:05,970 Epoch 1855/2000 +2024-11-11 22:10:21,652 Current Learning Rate: 0.0017527598 +2024-11-11 22:10:21,653 Train Loss: 0.0004123, Val Loss: 0.0004723 +2024-11-11 22:10:21,653 Epoch 1856/2000 +2024-11-11 22:10:38,704 Current Learning Rate: 0.0018128801 +2024-11-11 22:10:38,705 Train Loss: 0.0003573, Val Loss: 0.0004761 +2024-11-11 22:10:38,705 Epoch 1857/2000 +2024-11-11 22:10:54,696 Current Learning Rate: 0.0018737867 +2024-11-11 22:10:54,698 Train Loss: 0.0003185, Val Loss: 0.0004699 +2024-11-11 22:10:54,699 Epoch 1858/2000 +2024-11-11 22:11:11,148 Current Learning Rate: 0.0019354647 +2024-11-11 22:11:11,972 Train Loss: 0.0003121, Val Loss: 0.0004688 +2024-11-11 22:11:11,973 Epoch 1859/2000 +2024-11-11 22:11:27,466 Current Learning Rate: 0.0019978989 +2024-11-11 22:11:27,467 Train Loss: 0.0003598, Val Loss: 0.0004757 +2024-11-11 22:11:27,468 Epoch 1860/2000 +2024-11-11 22:11:43,767 Current Learning Rate: 0.0020610737 +2024-11-11 22:11:43,768 Train Loss: 0.0003094, Val Loss: 0.0004693 +2024-11-11 22:11:43,768 Epoch 1861/2000 +2024-11-11 22:11:59,732 Current Learning Rate: 0.0021249737 +2024-11-11 22:11:59,732 Train Loss: 0.0004457, Val Loss: 0.0005119 +2024-11-11 22:11:59,732 Epoch 1862/2000 +2024-11-11 22:12:15,595 Current Learning Rate: 0.0021895831 +2024-11-11 22:12:15,596 Train Loss: 0.0003953, Val Loss: 0.0004799 +2024-11-11 22:12:15,596 Epoch 1863/2000 +2024-11-11 22:12:32,638 Current Learning Rate: 0.0022548859 +2024-11-11 22:12:32,639 Train Loss: 0.0003458, Val Loss: 0.0004753 +2024-11-11 22:12:32,639 Epoch 1864/2000 +2024-11-11 22:12:48,851 Current Learning Rate: 0.0023208660 +2024-11-11 22:12:48,852 Train Loss: 0.0003620, Val Loss: 0.0004777 +2024-11-11 22:12:48,852 Epoch 1865/2000 +2024-11-11 22:13:04,936 Current Learning Rate: 0.0023875072 +2024-11-11 22:13:04,936 Train Loss: 0.0003907, Val Loss: 0.0004786 +2024-11-11 22:13:04,937 Epoch 1866/2000 +2024-11-11 22:13:22,147 Current Learning Rate: 0.0024547929 +2024-11-11 22:13:22,147 Train Loss: 0.0003682, Val Loss: 0.0004740 +2024-11-11 22:13:22,148 Epoch 1867/2000 +2024-11-11 22:13:37,714 Current Learning Rate: 0.0025227067 +2024-11-11 22:13:37,715 Train Loss: 0.0004295, Val Loss: 0.0004764 +2024-11-11 22:13:37,715 Epoch 1868/2000 +2024-11-11 22:13:54,690 Current Learning Rate: 0.0025912316 +2024-11-11 22:13:54,691 Train Loss: 0.0003818, Val Loss: 0.0004751 +2024-11-11 22:13:54,691 Epoch 1869/2000 +2024-11-11 22:14:11,115 Current Learning Rate: 0.0026603509 +2024-11-11 22:14:11,115 Train Loss: 0.0003718, Val Loss: 0.0004788 +2024-11-11 22:14:11,115 Epoch 1870/2000 +2024-11-11 22:14:27,353 Current Learning Rate: 0.0027300475 +2024-11-11 22:14:27,354 Train Loss: 0.0003444, Val Loss: 0.0004801 +2024-11-11 22:14:27,354 Epoch 1871/2000 +2024-11-11 22:14:43,008 Current Learning Rate: 0.0028003042 +2024-11-11 22:14:43,009 Train Loss: 0.0004120, Val Loss: 0.0004983 +2024-11-11 22:14:43,009 Epoch 1872/2000 +2024-11-11 22:14:59,398 Current Learning Rate: 0.0028711035 +2024-11-11 22:14:59,399 Train Loss: 0.0003376, Val Loss: 0.0004895 +2024-11-11 22:14:59,399 Epoch 1873/2000 +2024-11-11 22:15:15,467 Current Learning Rate: 0.0029424282 +2024-11-11 22:15:15,468 Train Loss: 0.0004411, Val Loss: 0.0005521 +2024-11-11 22:15:15,468 Epoch 1874/2000 +2024-11-11 22:15:31,189 Current Learning Rate: 0.0030142605 +2024-11-11 22:15:31,190 Train Loss: 0.0003611, Val Loss: 0.0004960 +2024-11-11 22:15:31,190 Epoch 1875/2000 +2024-11-11 22:15:47,235 Current Learning Rate: 0.0030865828 +2024-11-11 22:15:47,236 Train Loss: 0.0003937, Val Loss: 0.0004952 +2024-11-11 22:15:47,236 Epoch 1876/2000 +2024-11-11 22:16:03,671 Current Learning Rate: 0.0031593772 +2024-11-11 22:16:03,672 Train Loss: 0.0004007, Val Loss: 0.0005017 +2024-11-11 22:16:03,672 Epoch 1877/2000 +2024-11-11 22:16:19,734 Current Learning Rate: 0.0032326258 +2024-11-11 22:16:19,735 Train Loss: 0.0003837, Val Loss: 0.0005004 +2024-11-11 22:16:19,736 Epoch 1878/2000 +2024-11-11 22:16:35,863 Current Learning Rate: 0.0033063104 +2024-11-11 22:16:35,864 Train Loss: 0.0003563, Val Loss: 0.0004938 +2024-11-11 22:16:35,864 Epoch 1879/2000 +2024-11-11 22:16:51,733 Current Learning Rate: 0.0033804129 +2024-11-11 22:16:51,734 Train Loss: 0.0003751, Val Loss: 0.0004927 +2024-11-11 22:16:51,734 Epoch 1880/2000 +2024-11-11 22:17:07,406 Current Learning Rate: 0.0034549150 +2024-11-11 22:17:07,406 Train Loss: 0.0003335, Val Loss: 0.0005016 +2024-11-11 22:17:07,406 Epoch 1881/2000 +2024-11-11 22:17:23,536 Current Learning Rate: 0.0035297984 +2024-11-11 22:17:23,537 Train Loss: 0.0003847, Val Loss: 0.0004856 +2024-11-11 22:17:23,538 Epoch 1882/2000 +2024-11-11 22:17:40,039 Current Learning Rate: 0.0036050445 +2024-11-11 22:17:40,040 Train Loss: 0.0003872, Val Loss: 0.0005008 +2024-11-11 22:17:40,040 Epoch 1883/2000 +2024-11-11 22:17:56,607 Current Learning Rate: 0.0036806348 +2024-11-11 22:17:56,608 Train Loss: 0.0004442, Val Loss: 0.0004979 +2024-11-11 22:17:56,608 Epoch 1884/2000 +2024-11-11 22:18:12,168 Current Learning Rate: 0.0037565506 +2024-11-11 22:18:12,170 Train Loss: 0.0004071, Val Loss: 0.0004930 +2024-11-11 22:18:12,170 Epoch 1885/2000 +2024-11-11 22:18:28,927 Current Learning Rate: 0.0038327732 +2024-11-11 22:18:28,927 Train Loss: 0.0003319, Val Loss: 0.0004986 +2024-11-11 22:18:28,928 Epoch 1886/2000 +2024-11-11 22:18:44,511 Current Learning Rate: 0.0039092838 +2024-11-11 22:18:44,511 Train Loss: 0.0003312, Val Loss: 0.0005034 +2024-11-11 22:18:44,511 Epoch 1887/2000 +2024-11-11 22:19:00,428 Current Learning Rate: 0.0039860635 +2024-11-11 22:19:00,429 Train Loss: 0.0004128, Val Loss: 0.0005134 +2024-11-11 22:19:00,429 Epoch 1888/2000 +2024-11-11 22:19:16,218 Current Learning Rate: 0.0040630934 +2024-11-11 22:19:16,218 Train Loss: 0.0003791, Val Loss: 0.0005249 +2024-11-11 22:19:16,219 Epoch 1889/2000 +2024-11-11 22:19:32,392 Current Learning Rate: 0.0041403545 +2024-11-11 22:19:32,393 Train Loss: 0.0003771, Val Loss: 0.0005103 +2024-11-11 22:19:32,393 Epoch 1890/2000 +2024-11-11 22:19:48,686 Current Learning Rate: 0.0042178277 +2024-11-11 22:19:48,687 Train Loss: 0.0003920, Val Loss: 0.0005140 +2024-11-11 22:19:48,687 Epoch 1891/2000 +2024-11-11 22:20:05,551 Current Learning Rate: 0.0042954938 +2024-11-11 22:20:05,552 Train Loss: 0.0004391, Val Loss: 0.0005231 +2024-11-11 22:20:05,552 Epoch 1892/2000 +2024-11-11 22:20:21,539 Current Learning Rate: 0.0043733338 +2024-11-11 22:20:21,540 Train Loss: 0.0003930, Val Loss: 0.0005018 +2024-11-11 22:20:21,540 Epoch 1893/2000 +2024-11-11 22:20:38,421 Current Learning Rate: 0.0044513284 +2024-11-11 22:20:38,421 Train Loss: 0.0003513, Val Loss: 0.0004932 +2024-11-11 22:20:38,421 Epoch 1894/2000 +2024-11-11 22:20:55,353 Current Learning Rate: 0.0045294584 +2024-11-11 22:20:55,354 Train Loss: 0.0004034, Val Loss: 0.0005197 +2024-11-11 22:20:55,355 Epoch 1895/2000 +2024-11-11 22:21:11,844 Current Learning Rate: 0.0046077045 +2024-11-11 22:21:11,844 Train Loss: 0.0004092, Val Loss: 0.0005259 +2024-11-11 22:21:11,845 Epoch 1896/2000 +2024-11-11 22:21:28,285 Current Learning Rate: 0.0046860474 +2024-11-11 22:21:28,286 Train Loss: 0.0003873, Val Loss: 0.0005355 +2024-11-11 22:21:28,305 Epoch 1897/2000 +2024-11-11 22:21:44,025 Current Learning Rate: 0.0047644677 +2024-11-11 22:21:44,025 Train Loss: 0.0005417, Val Loss: 0.0005593 +2024-11-11 22:21:44,026 Epoch 1898/2000 +2024-11-11 22:22:00,455 Current Learning Rate: 0.0048429462 +2024-11-11 22:22:00,456 Train Loss: 0.0004578, Val Loss: 0.0005557 +2024-11-11 22:22:00,456 Epoch 1899/2000 +2024-11-11 22:22:16,620 Current Learning Rate: 0.0049214634 +2024-11-11 22:22:16,621 Train Loss: 0.0004109, Val Loss: 0.0005379 +2024-11-11 22:22:16,621 Epoch 1900/2000 +2024-11-11 22:22:32,720 Current Learning Rate: 0.0050000000 +2024-11-11 22:22:32,721 Train Loss: 0.0004444, Val Loss: 0.0005574 +2024-11-11 22:22:32,721 Epoch 1901/2000 +2024-11-11 22:22:48,745 Current Learning Rate: 0.0050785366 +2024-11-11 22:22:48,745 Train Loss: 0.0004269, Val Loss: 0.0005297 +2024-11-11 22:22:48,747 Epoch 1902/2000 +2024-11-11 22:23:05,199 Current Learning Rate: 0.0051570538 +2024-11-11 22:23:05,199 Train Loss: 0.0003823, Val Loss: 0.0005118 +2024-11-11 22:23:05,199 Epoch 1903/2000 +2024-11-11 22:23:21,125 Current Learning Rate: 0.0052355323 +2024-11-11 22:23:21,125 Train Loss: 0.0004586, Val Loss: 0.0005273 +2024-11-11 22:23:21,125 Epoch 1904/2000 +2024-11-11 22:23:38,268 Current Learning Rate: 0.0053139526 +2024-11-11 22:23:38,268 Train Loss: 0.0004171, Val Loss: 0.0005362 +2024-11-11 22:23:38,269 Epoch 1905/2000 +2024-11-11 22:23:55,405 Current Learning Rate: 0.0053922955 +2024-11-11 22:23:55,406 Train Loss: 0.0004146, Val Loss: 0.0005299 +2024-11-11 22:23:55,406 Epoch 1906/2000 +2024-11-11 22:24:11,620 Current Learning Rate: 0.0054705416 +2024-11-11 22:24:11,621 Train Loss: 0.0003814, Val Loss: 0.0005246 +2024-11-11 22:24:11,621 Epoch 1907/2000 +2024-11-11 22:24:28,148 Current Learning Rate: 0.0055486716 +2024-11-11 22:24:28,149 Train Loss: 0.0003620, Val Loss: 0.0005353 +2024-11-11 22:24:28,149 Epoch 1908/2000 +2024-11-11 22:24:44,726 Current Learning Rate: 0.0056266662 +2024-11-11 22:24:44,726 Train Loss: 0.0003887, Val Loss: 0.0005592 +2024-11-11 22:24:44,727 Epoch 1909/2000 +2024-11-11 22:25:00,627 Current Learning Rate: 0.0057045062 +2024-11-11 22:25:00,628 Train Loss: 0.0004154, Val Loss: 0.0005705 +2024-11-11 22:25:00,628 Epoch 1910/2000 +2024-11-11 22:25:16,816 Current Learning Rate: 0.0057821723 +2024-11-11 22:25:16,817 Train Loss: 0.0004081, Val Loss: 0.0005572 +2024-11-11 22:25:16,817 Epoch 1911/2000 +2024-11-11 22:25:32,575 Current Learning Rate: 0.0058596455 +2024-11-11 22:25:32,576 Train Loss: 0.0005201, Val Loss: 0.0006403 +2024-11-11 22:25:32,577 Epoch 1912/2000 +2024-11-11 22:25:48,941 Current Learning Rate: 0.0059369066 +2024-11-11 22:25:48,942 Train Loss: 0.0005510, Val Loss: 0.0005839 +2024-11-11 22:25:48,942 Epoch 1913/2000 +2024-11-11 22:26:04,790 Current Learning Rate: 0.0060139365 +2024-11-11 22:26:04,791 Train Loss: 0.0004719, Val Loss: 0.0005980 +2024-11-11 22:26:04,791 Epoch 1914/2000 +2024-11-11 22:26:21,203 Current Learning Rate: 0.0060907162 +2024-11-11 22:26:21,204 Train Loss: 0.0004550, Val Loss: 0.0005598 +2024-11-11 22:26:21,204 Epoch 1915/2000 +2024-11-11 22:26:36,281 Current Learning Rate: 0.0061672268 +2024-11-11 22:26:36,281 Train Loss: 0.0004500, Val Loss: 0.0005986 +2024-11-11 22:26:36,282 Epoch 1916/2000 +2024-11-11 22:26:51,823 Current Learning Rate: 0.0062434494 +2024-11-11 22:26:51,824 Train Loss: 0.0004075, Val Loss: 0.0005628 +2024-11-11 22:26:51,824 Epoch 1917/2000 +2024-11-11 22:27:07,607 Current Learning Rate: 0.0063193652 +2024-11-11 22:27:07,608 Train Loss: 0.0004483, Val Loss: 0.0005511 +2024-11-11 22:27:07,608 Epoch 1918/2000 +2024-11-11 22:27:24,771 Current Learning Rate: 0.0063949555 +2024-11-11 22:27:24,772 Train Loss: 0.0004663, Val Loss: 0.0005478 +2024-11-11 22:27:24,772 Epoch 1919/2000 +2024-11-11 22:27:39,734 Current Learning Rate: 0.0064702016 +2024-11-11 22:27:39,735 Train Loss: 0.0003983, Val Loss: 0.0005294 +2024-11-11 22:27:39,735 Epoch 1920/2000 +2024-11-11 22:27:55,598 Current Learning Rate: 0.0065450850 +2024-11-11 22:27:55,598 Train Loss: 0.0004561, Val Loss: 0.0005404 +2024-11-11 22:27:55,599 Epoch 1921/2000 +2024-11-11 22:28:11,431 Current Learning Rate: 0.0066195871 +2024-11-11 22:28:11,432 Train Loss: 0.0003968, Val Loss: 0.0005200 +2024-11-11 22:28:11,432 Epoch 1922/2000 +2024-11-11 22:28:27,754 Current Learning Rate: 0.0066936896 +2024-11-11 22:28:27,754 Train Loss: 0.0003605, Val Loss: 0.0005241 +2024-11-11 22:28:27,755 Epoch 1923/2000 +2024-11-11 22:28:44,247 Current Learning Rate: 0.0067673742 +2024-11-11 22:28:44,247 Train Loss: 0.0004936, Val Loss: 0.0005612 +2024-11-11 22:28:44,248 Epoch 1924/2000 +2024-11-11 22:29:00,145 Current Learning Rate: 0.0068406228 +2024-11-11 22:29:00,146 Train Loss: 0.0004738, Val Loss: 0.0005509 +2024-11-11 22:29:00,146 Epoch 1925/2000 +2024-11-11 22:29:15,838 Current Learning Rate: 0.0069134172 +2024-11-11 22:29:15,839 Train Loss: 0.0004473, Val Loss: 0.0005633 +2024-11-11 22:29:15,839 Epoch 1926/2000 +2024-11-11 22:29:31,757 Current Learning Rate: 0.0069857395 +2024-11-11 22:29:31,757 Train Loss: 0.0005060, Val Loss: 0.0005654 +2024-11-11 22:29:31,757 Epoch 1927/2000 +2024-11-11 22:29:47,388 Current Learning Rate: 0.0070575718 +2024-11-11 22:29:47,389 Train Loss: 0.0003975, Val Loss: 0.0005398 +2024-11-11 22:29:47,389 Epoch 1928/2000 +2024-11-11 22:30:03,786 Current Learning Rate: 0.0071288965 +2024-11-11 22:30:03,787 Train Loss: 0.0004850, Val Loss: 0.0005724 +2024-11-11 22:30:03,787 Epoch 1929/2000 +2024-11-11 22:30:19,730 Current Learning Rate: 0.0071996958 +2024-11-11 22:30:19,731 Train Loss: 0.0004567, Val Loss: 0.0005773 +2024-11-11 22:30:19,731 Epoch 1930/2000 +2024-11-11 22:30:35,592 Current Learning Rate: 0.0072699525 +2024-11-11 22:30:35,592 Train Loss: 0.0004131, Val Loss: 0.0005631 +2024-11-11 22:30:35,592 Epoch 1931/2000 +2024-11-11 22:30:51,100 Current Learning Rate: 0.0073396491 +2024-11-11 22:30:51,101 Train Loss: 0.0004963, Val Loss: 0.0005700 +2024-11-11 22:30:51,101 Epoch 1932/2000 +2024-11-11 22:31:06,570 Current Learning Rate: 0.0074087684 +2024-11-11 22:31:06,571 Train Loss: 0.0004965, Val Loss: 0.0006470 +2024-11-11 22:31:06,571 Epoch 1933/2000 +2024-11-11 22:31:22,792 Current Learning Rate: 0.0074772933 +2024-11-11 22:31:22,792 Train Loss: 0.0005311, Val Loss: 0.0006102 +2024-11-11 22:31:22,793 Epoch 1934/2000 +2024-11-11 22:31:38,520 Current Learning Rate: 0.0075452071 +2024-11-11 22:31:38,520 Train Loss: 0.0004443, Val Loss: 0.0005798 +2024-11-11 22:31:38,521 Epoch 1935/2000 +2024-11-11 22:31:53,616 Current Learning Rate: 0.0076124928 +2024-11-11 22:31:53,616 Train Loss: 0.0004478, Val Loss: 0.0005784 +2024-11-11 22:31:53,617 Epoch 1936/2000 +2024-11-11 22:32:09,026 Current Learning Rate: 0.0076791340 +2024-11-11 22:32:09,026 Train Loss: 0.0004659, Val Loss: 0.0006153 +2024-11-11 22:32:09,027 Epoch 1937/2000 +2024-11-11 22:32:24,609 Current Learning Rate: 0.0077451141 +2024-11-11 22:32:24,610 Train Loss: 0.0004430, Val Loss: 0.0006007 +2024-11-11 22:32:24,610 Epoch 1938/2000 +2024-11-11 22:32:40,230 Current Learning Rate: 0.0078104169 +2024-11-11 22:32:40,231 Train Loss: 0.0005379, Val Loss: 0.0006273 +2024-11-11 22:32:40,232 Epoch 1939/2000 +2024-11-11 22:32:55,734 Current Learning Rate: 0.0078750263 +2024-11-11 22:32:55,734 Train Loss: 0.0005183, Val Loss: 0.0006318 +2024-11-11 22:32:55,734 Epoch 1940/2000 +2024-11-11 22:33:11,066 Current Learning Rate: 0.0079389263 +2024-11-11 22:33:11,066 Train Loss: 0.0005867, Val Loss: 0.0006767 +2024-11-11 22:33:11,066 Epoch 1941/2000 +2024-11-11 22:33:26,922 Current Learning Rate: 0.0080021011 +2024-11-11 22:33:26,923 Train Loss: 0.0004844, Val Loss: 0.0006373 +2024-11-11 22:33:26,924 Epoch 1942/2000 +2024-11-11 22:33:42,301 Current Learning Rate: 0.0080645353 +2024-11-11 22:33:42,302 Train Loss: 0.0005559, Val Loss: 0.0005938 +2024-11-11 22:33:42,302 Epoch 1943/2000 +2024-11-11 22:33:57,589 Current Learning Rate: 0.0081262133 +2024-11-11 22:33:57,589 Train Loss: 0.0005101, Val Loss: 0.0006782 +2024-11-11 22:33:57,590 Epoch 1944/2000 +2024-11-11 22:34:13,075 Current Learning Rate: 0.0081871199 +2024-11-11 22:34:13,076 Train Loss: 0.0005106, Val Loss: 0.0005803 +2024-11-11 22:34:13,076 Epoch 1945/2000 +2024-11-11 22:34:29,641 Current Learning Rate: 0.0082472402 +2024-11-11 22:34:29,642 Train Loss: 0.0004256, Val Loss: 0.0005619 +2024-11-11 22:34:29,642 Epoch 1946/2000 +2024-11-11 22:34:46,539 Current Learning Rate: 0.0083065593 +2024-11-11 22:34:46,539 Train Loss: 0.0005152, Val Loss: 0.0005588 +2024-11-11 22:34:46,539 Epoch 1947/2000 +2024-11-11 22:35:03,225 Current Learning Rate: 0.0083650626 +2024-11-11 22:35:03,226 Train Loss: 0.0003819, Val Loss: 0.0005585 +2024-11-11 22:35:03,227 Epoch 1948/2000 +2024-11-11 22:35:19,521 Current Learning Rate: 0.0084227355 +2024-11-11 22:35:19,522 Train Loss: 0.0004611, Val Loss: 0.0005604 +2024-11-11 22:35:19,522 Epoch 1949/2000 +2024-11-11 22:35:35,988 Current Learning Rate: 0.0084795640 +2024-11-11 22:35:35,989 Train Loss: 0.0005769, Val Loss: 0.0006435 +2024-11-11 22:35:35,989 Epoch 1950/2000 +2024-11-11 22:35:51,984 Current Learning Rate: 0.0085355339 +2024-11-11 22:35:51,985 Train Loss: 0.0004643, Val Loss: 0.0005785 +2024-11-11 22:35:51,985 Epoch 1951/2000 +2024-11-11 22:36:07,018 Current Learning Rate: 0.0085906315 +2024-11-11 22:36:07,019 Train Loss: 0.0004961, Val Loss: 0.0005766 +2024-11-11 22:36:07,019 Epoch 1952/2000 +2024-11-11 22:36:23,026 Current Learning Rate: 0.0086448431 +2024-11-11 22:36:23,026 Train Loss: 0.0005167, Val Loss: 0.0005771 +2024-11-11 22:36:23,026 Epoch 1953/2000 +2024-11-11 22:36:38,473 Current Learning Rate: 0.0086981555 +2024-11-11 22:36:38,473 Train Loss: 0.0004278, Val Loss: 0.0005576 +2024-11-11 22:36:38,474 Epoch 1954/2000 +2024-11-11 22:36:54,301 Current Learning Rate: 0.0087505553 +2024-11-11 22:36:54,305 Train Loss: 0.0004320, Val Loss: 0.0005682 +2024-11-11 22:36:54,306 Epoch 1955/2000 +2024-11-11 22:37:10,387 Current Learning Rate: 0.0088020298 +2024-11-11 22:37:10,387 Train Loss: 0.0003972, Val Loss: 0.0005400 +2024-11-11 22:37:10,387 Epoch 1956/2000 +2024-11-11 22:37:26,164 Current Learning Rate: 0.0088525662 +2024-11-11 22:37:26,165 Train Loss: 0.0005276, Val Loss: 0.0005955 +2024-11-11 22:37:26,165 Epoch 1957/2000 +2024-11-11 22:37:42,188 Current Learning Rate: 0.0089021520 +2024-11-11 22:37:42,189 Train Loss: 0.0004871, Val Loss: 0.0005588 +2024-11-11 22:37:42,189 Epoch 1958/2000 +2024-11-11 22:37:57,877 Current Learning Rate: 0.0089507751 +2024-11-11 22:37:57,878 Train Loss: 0.0004784, Val Loss: 0.0006114 +2024-11-11 22:37:57,878 Epoch 1959/2000 +2024-11-11 22:38:14,224 Current Learning Rate: 0.0089984233 +2024-11-11 22:38:14,225 Train Loss: 0.0004367, Val Loss: 0.0005510 +2024-11-11 22:38:14,225 Epoch 1960/2000 +2024-11-11 22:38:30,170 Current Learning Rate: 0.0090450850 +2024-11-11 22:38:30,170 Train Loss: 0.0005190, Val Loss: 0.0005609 +2024-11-11 22:38:30,171 Epoch 1961/2000 +2024-11-11 22:38:46,174 Current Learning Rate: 0.0090907486 +2024-11-11 22:38:46,174 Train Loss: 0.0004511, Val Loss: 0.0006288 +2024-11-11 22:38:46,175 Epoch 1962/2000 +2024-11-11 22:39:02,107 Current Learning Rate: 0.0091354029 +2024-11-11 22:39:02,108 Train Loss: 0.0004677, Val Loss: 0.0005959 +2024-11-11 22:39:02,109 Epoch 1963/2000 +2024-11-11 22:39:18,754 Current Learning Rate: 0.0091790368 +2024-11-11 22:39:18,755 Train Loss: 0.0005031, Val Loss: 0.0006509 +2024-11-11 22:39:18,755 Epoch 1964/2000 +2024-11-11 22:39:35,031 Current Learning Rate: 0.0092216396 +2024-11-11 22:39:35,031 Train Loss: 0.0005291, Val Loss: 0.0006005 +2024-11-11 22:39:35,031 Epoch 1965/2000 +2024-11-11 22:39:50,174 Current Learning Rate: 0.0092632008 +2024-11-11 22:39:50,175 Train Loss: 0.0004980, Val Loss: 0.0006615 +2024-11-11 22:39:50,175 Epoch 1966/2000 +2024-11-11 22:40:05,947 Current Learning Rate: 0.0093037101 +2024-11-11 22:40:05,947 Train Loss: 0.0005937, Val Loss: 0.0006493 +2024-11-11 22:40:05,948 Epoch 1967/2000 +2024-11-11 22:40:21,471 Current Learning Rate: 0.0093431576 +2024-11-11 22:40:21,472 Train Loss: 0.0004971, Val Loss: 0.0005757 +2024-11-11 22:40:21,472 Epoch 1968/2000 +2024-11-11 22:40:37,415 Current Learning Rate: 0.0093815334 +2024-11-11 22:40:37,416 Train Loss: 0.0004579, Val Loss: 0.0005759 +2024-11-11 22:40:37,416 Epoch 1969/2000 +2024-11-11 22:40:54,488 Current Learning Rate: 0.0094188282 +2024-11-11 22:40:54,489 Train Loss: 0.0005135, Val Loss: 0.0005550 +2024-11-11 22:40:54,489 Epoch 1970/2000 +2024-11-11 22:41:10,743 Current Learning Rate: 0.0094550326 +2024-11-11 22:41:10,743 Train Loss: 0.0004032, Val Loss: 0.0005354 +2024-11-11 22:41:10,744 Epoch 1971/2000 +2024-11-11 22:41:27,207 Current Learning Rate: 0.0094901379 +2024-11-11 22:41:27,209 Train Loss: 0.0004060, Val Loss: 0.0005371 +2024-11-11 22:41:27,209 Epoch 1972/2000 +2024-11-11 22:41:43,846 Current Learning Rate: 0.0095241353 +2024-11-11 22:41:43,847 Train Loss: 0.0004552, Val Loss: 0.0005439 +2024-11-11 22:41:43,848 Epoch 1973/2000 +2024-11-11 22:41:59,302 Current Learning Rate: 0.0095570164 +2024-11-11 22:41:59,302 Train Loss: 0.0004786, Val Loss: 0.0006519 +2024-11-11 22:41:59,303 Epoch 1974/2000 +2024-11-11 22:42:16,111 Current Learning Rate: 0.0095887731 +2024-11-11 22:42:16,112 Train Loss: 0.0005079, Val Loss: 0.0005804 +2024-11-11 22:42:16,112 Epoch 1975/2000 +2024-11-11 22:42:32,181 Current Learning Rate: 0.0096193977 +2024-11-11 22:42:32,182 Train Loss: 0.0004874, Val Loss: 0.0006093 +2024-11-11 22:42:32,182 Epoch 1976/2000 +2024-11-11 22:42:49,302 Current Learning Rate: 0.0096488824 +2024-11-11 22:42:49,303 Train Loss: 0.0004719, Val Loss: 0.0005910 +2024-11-11 22:42:49,304 Epoch 1977/2000 +2024-11-11 22:43:06,203 Current Learning Rate: 0.0096772202 +2024-11-11 22:43:06,203 Train Loss: 0.0005030, Val Loss: 0.0006389 +2024-11-11 22:43:06,204 Epoch 1978/2000 +2024-11-11 22:43:21,586 Current Learning Rate: 0.0097044038 +2024-11-11 22:43:21,587 Train Loss: 0.0005297, Val Loss: 0.0006394 +2024-11-11 22:43:21,587 Epoch 1979/2000 +2024-11-11 22:43:37,738 Current Learning Rate: 0.0097304268 +2024-11-11 22:43:37,739 Train Loss: 0.0005388, Val Loss: 0.0006585 +2024-11-11 22:43:37,739 Epoch 1980/2000 +2024-11-11 22:43:53,676 Current Learning Rate: 0.0097552826 +2024-11-11 22:43:53,676 Train Loss: 0.0005008, Val Loss: 0.0006466 +2024-11-11 22:43:53,677 Epoch 1981/2000 +2024-11-11 22:44:09,510 Current Learning Rate: 0.0097789651 +2024-11-11 22:44:09,510 Train Loss: 0.0005345, Val Loss: 0.0006037 +2024-11-11 22:44:09,510 Epoch 1982/2000 +2024-11-11 22:44:25,307 Current Learning Rate: 0.0098014684 +2024-11-11 22:44:25,308 Train Loss: 0.0005008, Val Loss: 0.0006197 +2024-11-11 22:44:25,308 Epoch 1983/2000 +2024-11-11 22:44:41,161 Current Learning Rate: 0.0098227871 +2024-11-11 22:44:41,162 Train Loss: 0.0005541, Val Loss: 0.0005858 +2024-11-11 22:44:41,162 Epoch 1984/2000 +2024-11-11 22:44:56,974 Current Learning Rate: 0.0098429158 +2024-11-11 22:44:56,974 Train Loss: 0.0004540, Val Loss: 0.0005872 +2024-11-11 22:44:56,975 Epoch 1985/2000 +2024-11-11 22:45:12,528 Current Learning Rate: 0.0098618496 +2024-11-11 22:45:12,528 Train Loss: 0.0005192, Val Loss: 0.0006465 +2024-11-11 22:45:12,529 Epoch 1986/2000 +2024-11-11 22:45:28,548 Current Learning Rate: 0.0098795838 +2024-11-11 22:45:28,548 Train Loss: 0.0005380, Val Loss: 0.0006143 +2024-11-11 22:45:28,549 Epoch 1987/2000 +2024-11-11 22:45:45,004 Current Learning Rate: 0.0098961141 +2024-11-11 22:45:45,005 Train Loss: 0.0004710, Val Loss: 0.0005924 +2024-11-11 22:45:45,005 Epoch 1988/2000 +2024-11-11 22:46:00,953 Current Learning Rate: 0.0099114363 +2024-11-11 22:46:00,954 Train Loss: 0.0005370, Val Loss: 0.0006376 +2024-11-11 22:46:00,954 Epoch 1989/2000 +2024-11-11 22:46:16,911 Current Learning Rate: 0.0099255466 +2024-11-11 22:46:16,911 Train Loss: 0.0004813, Val Loss: 0.0006095 +2024-11-11 22:46:16,911 Epoch 1990/2000 +2024-11-11 22:46:32,954 Current Learning Rate: 0.0099384417 +2024-11-11 22:46:32,955 Train Loss: 0.0005072, Val Loss: 0.0006012 +2024-11-11 22:46:32,955 Epoch 1991/2000 +2024-11-11 22:46:48,743 Current Learning Rate: 0.0099501183 +2024-11-11 22:46:48,744 Train Loss: 0.0004263, Val Loss: 0.0006099 +2024-11-11 22:46:48,744 Epoch 1992/2000 +2024-11-11 22:47:04,812 Current Learning Rate: 0.0099605735 +2024-11-11 22:47:04,812 Train Loss: 0.0004198, Val Loss: 0.0006060 +2024-11-11 22:47:04,813 Epoch 1993/2000 +2024-11-11 22:47:21,632 Current Learning Rate: 0.0099698048 +2024-11-11 22:47:21,633 Train Loss: 0.0005051, Val Loss: 0.0005728 +2024-11-11 22:47:21,633 Epoch 1994/2000 +2024-11-11 22:47:37,650 Current Learning Rate: 0.0099778098 +2024-11-11 22:47:37,650 Train Loss: 0.0004463, Val Loss: 0.0005757 +2024-11-11 22:47:37,651 Epoch 1995/2000 +2024-11-11 22:47:53,167 Current Learning Rate: 0.0099845867 +2024-11-11 22:47:53,167 Train Loss: 0.0004818, Val Loss: 0.0005728 +2024-11-11 22:47:53,168 Epoch 1996/2000 +2024-11-11 22:48:09,423 Current Learning Rate: 0.0099901336 +2024-11-11 22:48:09,423 Train Loss: 0.0004305, Val Loss: 0.0005917 +2024-11-11 22:48:09,424 Epoch 1997/2000 +2024-11-11 22:48:26,012 Current Learning Rate: 0.0099944494 +2024-11-11 22:48:26,013 Train Loss: 0.0004970, Val Loss: 0.0005895 +2024-11-11 22:48:26,013 Epoch 1998/2000 +2024-11-11 22:48:41,468 Current Learning Rate: 0.0099975328 +2024-11-11 22:48:41,468 Train Loss: 0.0004721, Val Loss: 0.0005880 +2024-11-11 22:48:41,469 Epoch 1999/2000 +2024-11-11 22:48:57,740 Current Learning Rate: 0.0099993832 +2024-11-11 22:48:57,742 Train Loss: 0.0004842, Val Loss: 0.0006773 +2024-11-11 22:48:57,743 Epoch 2000/2000 +2024-11-11 22:49:13,391 Current Learning Rate: 0.0100000000 +2024-11-11 22:49:13,391 Train Loss: 0.0005090, Val Loss: 0.0006428 +2024-11-11 22:49:16,988 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_multi_finetune_20250227_exp1_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_multi_finetune_20250227_exp1_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..b8a8c5831d8cbff30be3425eb15f6b3b0a3abd55 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_multi_finetune_20250227_exp1_training_log-checkpoint.log @@ -0,0 +1,12 @@ +2025-02-27 11:54:37,779 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:54:37,830 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:54:37,895 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:54:37,943 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:54:37,960 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:54:37,967 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:54:37,972 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:54:37,974 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:55:54,624 Error loading model checkpoint: Error(s) in loading state_dict for Triton_finetune: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.gamma_1", "temporal_evolution.enc.0.block.gamma_2", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.attn.qkv.weight", "temporal_evolution.enc.0.block.attn.qkv.bias", "temporal_evolution.enc.0.block.attn.proj.weight", "temporal_evolution.enc.0.block.attn.proj.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.gamma_1", "temporal_evolution.enc.7.block.gamma_2", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.attn.qkv.weight", "temporal_evolution.enc.7.block.attn.qkv.bias", "temporal_evolution.enc.7.block.attn.proj.weight", "temporal_evolution.enc.7.block.attn.proj.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 11:56:01,581 Epoch 1/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_multi_finetune_20250227_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_multi_finetune_20250227_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..3f0a08ee3f9135bd4b893cb82f0ad7db905fa284 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Triton_multi_finetune_20250227_training_log-checkpoint.log @@ -0,0 +1,59 @@ +2025-02-27 11:29:27,737 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:29:27,806 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:29:27,856 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:29:27,899 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:29:27,910 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:29:27,921 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:29:27,927 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:29:27,937 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:30:51,766 Epoch 1/2000 +2025-02-27 11:30:55,827 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,827 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,827 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:32:44,457 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:32:44,466 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:32:44,572 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:32:44,577 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:32:44,601 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:32:44,634 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:32:44,651 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:32:44,653 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:34:28,036 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:34:28,247 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:34:28,276 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:34:28,291 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:34:28,297 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:34:28,375 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:34:28,378 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:34:28,379 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:35:51,524 Epoch 1/2000 +2025-02-27 11:35:56,067 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,067 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,067 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:36:34,354 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:36:34,418 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:36:34,502 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:36:34,508 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:36:34,514 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:36:34,551 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:36:34,563 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:36:34,566 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:38:03,555 Epoch 1/2000 +2025-02-27 11:38:07,539 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,539 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_U_net_exp2_20250226_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_U_net_exp2_20250226_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..5c5c65fdb7bf01327003f3b8d27209271ad4658e --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_U_net_exp2_20250226_training_log-checkpoint.log @@ -0,0 +1,23 @@ +2025-02-26 22:56:58,294 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-26 22:56:58,574 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-26 22:56:58,579 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-26 22:56:58,594 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-26 22:56:58,596 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-26 22:56:59,149 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-26 22:56:59,156 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-26 22:56:59,158 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-26 22:58:08,430 Epoch 1/2000 +2025-02-26 22:58:11,692 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,692 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:23,075 Current Learning Rate: 0.0099993832 +2025-02-26 22:58:23,286 Train Loss: 51.6283541, Val Loss: 25.5396109 +2025-02-26 22:58:23,286 Epoch 2/2000 +2025-02-26 22:58:37,420 Current Learning Rate: 0.0099975328 +2025-02-26 22:58:37,626 Train Loss: 1.2227646, Val Loss: 0.0883306 +2025-02-26 22:58:37,626 Epoch 3/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Unet_exp_128_20250324_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Unet_exp_128_20250324_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..c7315186ab785d848b3f2d2d6493eed23aa1d10b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Kuro_Unet_exp_128_20250324_training_log-checkpoint.log @@ -0,0 +1,21 @@ +2025-03-24 16:09:53,677 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-24 16:09:53,713 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-24 16:09:53,799 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-24 16:09:53,817 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-24 16:09:53,825 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-24 16:09:53,843 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-24 16:09:53,848 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-24 16:09:53,850 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-24 16:11:41,812 Epoch 1/2000 +2025-03-24 16:13:40,953 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-24 16:13:41,107 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-24 16:13:41,442 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-24 16:13:41,470 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-24 16:13:41,567 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-24 16:13:41,572 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-24 16:13:41,579 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-24 16:13:41,583 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-24 16:15:00,073 Epoch 1/2000 +2025-03-24 16:17:11,997 Current Learning Rate: 0.0009999383 +2025-03-24 16:17:12,062 Train Loss: 0.0151061, Val Loss: 0.0102887 +2025-03-24 16:17:12,063 Epoch 2/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Triton_Gulf_uv_20250218_exp1_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Triton_Gulf_uv_20250218_exp1_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..cb1a2cfe7de2de9059a9b7a06a3b2ed561acdcfc --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Triton_Gulf_uv_20250218_exp1_training_log-checkpoint.log @@ -0,0 +1,215 @@ +2025-02-18 14:22:36,760 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 14:22:37,188 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 14:22:37,334 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 14:22:37,431 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 14:22:37,518 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 14:22:37,557 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 14:22:37,567 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 14:22:37,575 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 14:23:50,069 Epoch 1/2000 +2025-02-18 14:23:54,713 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:24:32,733 Current Learning Rate: 0.0099993832 +2025-02-18 14:24:33,650 Train Loss: 1.2713323, Val Loss: 0.1877115 +2025-02-18 14:24:33,650 Epoch 2/2000 +2025-02-18 14:25:15,291 Current Learning Rate: 0.0099975328 +2025-02-18 14:25:16,850 Train Loss: 0.0585225, Val Loss: 0.0228514 +2025-02-18 14:25:16,850 Epoch 3/2000 +2025-02-18 14:25:58,432 Current Learning Rate: 0.0099944494 +2025-02-18 14:26:00,215 Train Loss: 0.0233733, Val Loss: 0.0184301 +2025-02-18 14:26:00,215 Epoch 4/2000 +2025-02-18 14:26:41,860 Current Learning Rate: 0.0099901336 +2025-02-18 14:26:43,974 Train Loss: 0.0194500, Val Loss: 0.0175159 +2025-02-18 14:26:43,975 Epoch 5/2000 +2025-02-18 14:27:24,631 Current Learning Rate: 0.0099845867 +2025-02-18 14:27:26,016 Train Loss: 0.0187117, Val Loss: 0.0172202 +2025-02-18 14:27:26,017 Epoch 6/2000 +2025-02-18 14:28:07,928 Current Learning Rate: 0.0099778098 +2025-02-18 14:28:09,861 Train Loss: 0.0184526, Val Loss: 0.0170046 +2025-02-18 14:28:09,861 Epoch 7/2000 +2025-02-18 14:28:51,376 Current Learning Rate: 0.0099698048 +2025-02-18 14:28:52,975 Train Loss: 0.0182028, Val Loss: 0.0168065 +2025-02-18 14:28:52,976 Epoch 8/2000 +2025-02-18 14:29:34,082 Current Learning Rate: 0.0099605735 +2025-02-18 14:29:35,359 Train Loss: 0.0179927, Val Loss: 0.0166178 +2025-02-18 14:29:35,360 Epoch 9/2000 +2025-02-18 14:30:16,297 Current Learning Rate: 0.0099501183 +2025-02-18 14:30:17,559 Train Loss: 0.0178226, Val Loss: 0.0164603 +2025-02-18 14:30:17,560 Epoch 10/2000 +2025-02-18 14:30:58,714 Current Learning Rate: 0.0099384417 +2025-02-18 14:31:00,336 Train Loss: 0.0176944, Val Loss: 0.0163473 +2025-02-18 14:31:00,336 Epoch 11/2000 +2025-02-18 14:31:41,999 Current Learning Rate: 0.0099255466 +2025-02-18 14:31:44,022 Train Loss: 0.0175879, Val Loss: 0.0162707 +2025-02-18 14:31:44,023 Epoch 12/2000 +2025-02-18 14:32:25,628 Current Learning Rate: 0.0099114363 +2025-02-18 14:32:27,667 Train Loss: 0.0175217, Val Loss: 0.0162129 +2025-02-18 14:32:27,668 Epoch 13/2000 +2025-02-18 14:33:09,222 Current Learning Rate: 0.0098961141 +2025-02-18 14:33:10,517 Train Loss: 0.0174747, Val Loss: 0.0161656 +2025-02-18 14:33:10,517 Epoch 14/2000 +2025-02-18 14:33:51,665 Current Learning Rate: 0.0098795838 +2025-02-18 14:33:53,007 Train Loss: 0.0174301, Val Loss: 0.0161140 +2025-02-18 14:33:53,007 Epoch 15/2000 +2025-02-18 14:34:34,373 Current Learning Rate: 0.0098618496 +2025-02-18 14:34:36,111 Train Loss: 0.0173940, Val Loss: 0.0160846 +2025-02-18 14:34:36,111 Epoch 16/2000 +2025-02-18 14:35:17,348 Current Learning Rate: 0.0098429158 +2025-02-18 14:35:18,768 Train Loss: 0.0173408, Val Loss: 0.0160427 +2025-02-18 14:35:18,769 Epoch 17/2000 +2025-02-18 14:36:00,132 Current Learning Rate: 0.0098227871 +2025-02-18 14:36:01,307 Train Loss: 0.0172985, Val Loss: 0.0160008 +2025-02-18 14:36:01,307 Epoch 18/2000 +2025-02-18 14:36:42,841 Current Learning Rate: 0.0098014684 +2025-02-18 14:36:44,251 Train Loss: 0.0172659, Val Loss: 0.0159679 +2025-02-18 14:36:44,251 Epoch 19/2000 +2025-02-18 14:37:25,715 Current Learning Rate: 0.0097789651 +2025-02-18 14:37:26,689 Train Loss: 0.0172262, Val Loss: 0.0159340 +2025-02-18 14:37:26,689 Epoch 20/2000 +2025-02-18 14:38:07,787 Current Learning Rate: 0.0097552826 +2025-02-18 14:38:09,042 Train Loss: 0.0171959, Val Loss: 0.0159043 +2025-02-18 14:38:09,042 Epoch 21/2000 +2025-02-18 14:38:49,984 Current Learning Rate: 0.0097304268 +2025-02-18 14:38:51,221 Train Loss: 0.0171638, Val Loss: 0.0158699 +2025-02-18 14:38:51,221 Epoch 22/2000 +2025-02-18 14:39:32,212 Current Learning Rate: 0.0097044038 +2025-02-18 14:39:33,362 Train Loss: 0.0171223, Val Loss: 0.0158384 +2025-02-18 14:39:33,362 Epoch 23/2000 +2025-02-18 14:40:14,368 Current Learning Rate: 0.0096772202 +2025-02-18 14:40:15,739 Train Loss: 0.0171020, Val Loss: 0.0158095 +2025-02-18 14:40:15,746 Epoch 24/2000 +2025-02-18 14:40:56,731 Current Learning Rate: 0.0096488824 +2025-02-18 14:40:58,747 Train Loss: 0.0170595, Val Loss: 0.0157796 +2025-02-18 14:40:58,748 Epoch 25/2000 +2025-02-18 14:41:39,442 Current Learning Rate: 0.0096193977 +2025-02-18 14:41:40,694 Train Loss: 0.0170211, Val Loss: 0.0157428 +2025-02-18 14:41:40,694 Epoch 26/2000 +2025-02-18 14:42:21,620 Current Learning Rate: 0.0095887731 +2025-02-18 14:42:22,913 Train Loss: 0.0169861, Val Loss: 0.0157121 +2025-02-18 14:42:22,913 Epoch 27/2000 +2025-02-18 14:43:03,771 Current Learning Rate: 0.0095570164 +2025-02-18 14:43:05,093 Train Loss: 0.0169497, Val Loss: 0.0156837 +2025-02-18 14:43:05,093 Epoch 28/2000 +2025-02-18 14:43:46,687 Current Learning Rate: 0.0095241353 +2025-02-18 14:43:48,396 Train Loss: 0.0169167, Val Loss: 0.0156643 +2025-02-18 14:43:48,397 Epoch 29/2000 +2025-02-18 14:44:29,936 Current Learning Rate: 0.0094901379 +2025-02-18 14:44:31,106 Train Loss: 0.0168896, Val Loss: 0.0156279 +2025-02-18 14:44:31,106 Epoch 30/2000 +2025-02-18 14:45:12,239 Current Learning Rate: 0.0094550326 +2025-02-18 14:45:13,514 Train Loss: 0.0168502, Val Loss: 0.0155942 +2025-02-18 14:45:13,515 Epoch 31/2000 +2025-02-18 14:45:54,930 Current Learning Rate: 0.0094188282 +2025-02-18 14:45:56,163 Train Loss: 0.0168141, Val Loss: 0.0155678 +2025-02-18 14:45:56,163 Epoch 32/2000 +2025-02-18 14:46:37,373 Current Learning Rate: 0.0093815334 +2025-02-18 14:46:38,786 Train Loss: 0.0167845, Val Loss: 0.0155434 +2025-02-18 14:46:38,786 Epoch 33/2000 +2025-02-18 14:47:19,546 Current Learning Rate: 0.0093431576 +2025-02-18 14:47:20,932 Train Loss: 0.0167530, Val Loss: 0.0155105 +2025-02-18 14:47:20,932 Epoch 34/2000 +2025-02-18 14:48:02,776 Current Learning Rate: 0.0093037101 +2025-02-18 14:48:04,571 Train Loss: 0.0167198, Val Loss: 0.0154825 +2025-02-18 14:48:04,571 Epoch 35/2000 +2025-02-18 14:48:45,155 Current Learning Rate: 0.0092632008 +2025-02-18 14:48:46,210 Train Loss: 0.0166898, Val Loss: 0.0154535 +2025-02-18 14:48:46,210 Epoch 36/2000 +2025-02-18 14:49:27,239 Current Learning Rate: 0.0092216396 +2025-02-18 14:49:28,748 Train Loss: 0.0166454, Val Loss: 0.0154188 +2025-02-18 14:49:28,749 Epoch 37/2000 +2025-02-18 14:50:10,390 Current Learning Rate: 0.0091790368 +2025-02-18 14:50:11,909 Train Loss: 0.0166134, Val Loss: 0.0153854 +2025-02-18 14:50:11,909 Epoch 38/2000 +2025-02-18 14:50:53,496 Current Learning Rate: 0.0091354029 +2025-02-18 14:50:54,748 Train Loss: 0.0165783, Val Loss: 0.0153603 +2025-02-18 14:50:54,750 Epoch 39/2000 +2025-02-18 14:51:36,147 Current Learning Rate: 0.0090907486 +2025-02-18 14:51:37,582 Train Loss: 0.0165472, Val Loss: 0.0153303 +2025-02-18 14:51:37,586 Epoch 40/2000 +2025-02-18 14:52:18,635 Current Learning Rate: 0.0090450850 +2025-02-18 14:52:20,024 Train Loss: 0.0165110, Val Loss: 0.0153032 +2025-02-18 14:52:20,024 Epoch 41/2000 +2025-02-18 14:53:01,140 Current Learning Rate: 0.0089984233 +2025-02-18 14:53:03,447 Train Loss: 0.0164806, Val Loss: 0.0152741 +2025-02-18 14:53:03,447 Epoch 42/2000 +2025-02-18 14:53:44,081 Current Learning Rate: 0.0089507751 +2025-02-18 14:53:45,293 Train Loss: 0.0164557, Val Loss: 0.0152447 +2025-02-18 14:53:45,293 Epoch 43/2000 +2025-02-18 14:54:26,193 Current Learning Rate: 0.0089021520 +2025-02-18 14:54:27,342 Train Loss: 0.0164259, Val Loss: 0.0152163 +2025-02-18 14:54:27,344 Epoch 44/2000 +2025-02-18 14:55:08,637 Current Learning Rate: 0.0088525662 +2025-02-18 14:55:09,938 Train Loss: 0.0163895, Val Loss: 0.0151869 +2025-02-18 14:55:09,938 Epoch 45/2000 +2025-02-18 14:55:50,853 Current Learning Rate: 0.0088020298 +2025-02-18 14:55:51,904 Train Loss: 0.0163617, Val Loss: 0.0151578 +2025-02-18 14:55:51,904 Epoch 46/2000 +2025-02-18 14:56:33,361 Current Learning Rate: 0.0087505553 +2025-02-18 14:56:34,839 Train Loss: 0.0163289, Val Loss: 0.0151264 +2025-02-18 14:56:34,839 Epoch 47/2000 +2025-02-18 14:57:16,179 Current Learning Rate: 0.0086981555 +2025-02-18 14:57:18,129 Train Loss: 0.0163045, Val Loss: 0.0150961 +2025-02-18 14:57:18,130 Epoch 48/2000 +2025-02-18 14:57:59,506 Current Learning Rate: 0.0086448431 +2025-02-18 14:58:00,920 Train Loss: 0.0162678, Val Loss: 0.0150781 +2025-02-18 14:58:00,935 Epoch 49/2000 +2025-02-18 14:58:42,715 Current Learning Rate: 0.0085906315 +2025-02-18 14:58:43,887 Train Loss: 0.0162393, Val Loss: 0.0150428 +2025-02-18 14:58:43,888 Epoch 50/2000 +2025-02-18 14:59:25,216 Current Learning Rate: 0.0085355339 +2025-02-18 14:59:26,516 Train Loss: 0.0161828, Val Loss: 0.0149828 +2025-02-18 14:59:26,518 Epoch 51/2000 +2025-02-18 15:00:07,929 Current Learning Rate: 0.0084795640 +2025-02-18 15:00:09,319 Train Loss: 0.0160165, Val Loss: 0.0146608 +2025-02-18 15:00:09,319 Epoch 52/2000 +2025-02-18 15:00:50,506 Current Learning Rate: 0.0084227355 +2025-02-18 15:00:51,877 Train Loss: 0.0154740, Val Loss: 0.0140274 +2025-02-18 15:00:51,877 Epoch 53/2000 +2025-02-18 15:01:32,604 Current Learning Rate: 0.0083650626 +2025-02-18 15:01:34,621 Train Loss: 0.0147387, Val Loss: 0.0131640 +2025-02-18 15:01:34,622 Epoch 54/2000 +2025-02-18 15:02:16,254 Current Learning Rate: 0.0083065593 +2025-02-18 15:02:17,701 Train Loss: 0.0138583, Val Loss: 0.0124248 +2025-02-18 15:02:17,703 Epoch 55/2000 +2025-02-18 15:02:59,285 Current Learning Rate: 0.0082472402 +2025-02-18 15:03:01,246 Train Loss: 0.0123703, Val Loss: 0.0105863 +2025-02-18 15:03:01,246 Epoch 56/2000 +2025-02-18 15:03:41,906 Current Learning Rate: 0.0081871199 +2025-02-18 15:03:43,092 Train Loss: 0.0110420, Val Loss: 0.0094930 +2025-02-18 15:03:43,093 Epoch 57/2000 +2025-02-18 15:04:24,696 Current Learning Rate: 0.0081262133 +2025-02-18 15:04:26,271 Train Loss: 0.0101265, Val Loss: 0.0088337 +2025-02-18 15:04:26,288 Epoch 58/2000 +2025-02-18 15:05:06,953 Current Learning Rate: 0.0080645353 +2025-02-18 15:05:08,107 Train Loss: 0.0093373, Val Loss: 0.0085233 +2025-02-18 15:05:08,107 Epoch 59/2000 +2025-02-18 15:05:49,330 Current Learning Rate: 0.0080021011 +2025-02-18 15:05:50,968 Train Loss: 0.0090224, Val Loss: 0.0078853 +2025-02-18 15:05:50,968 Epoch 60/2000 +2025-02-18 15:06:32,030 Current Learning Rate: 0.0079389263 +2025-02-18 15:06:33,531 Train Loss: 0.0081421, Val Loss: 0.0070970 +2025-02-18 15:06:33,531 Epoch 61/2000 +2025-02-18 15:07:15,057 Current Learning Rate: 0.0078750263 +2025-02-18 15:07:16,865 Train Loss: 0.0078801, Val Loss: 0.0069114 +2025-02-18 15:07:16,866 Epoch 62/2000 +2025-02-18 15:07:58,385 Current Learning Rate: 0.0078104169 +2025-02-18 15:07:59,752 Train Loss: 0.0075112, Val Loss: 0.0065039 +2025-02-18 15:07:59,752 Epoch 63/2000 +2025-02-18 15:08:40,991 Current Learning Rate: 0.0077451141 +2025-02-18 15:08:42,710 Train Loss: 0.0069967, Val Loss: 0.0062146 +2025-02-18 15:08:42,711 Epoch 64/2000 +2025-02-18 15:09:23,984 Current Learning Rate: 0.0076791340 +2025-02-18 15:09:25,030 Train Loss: 0.0068503, Val Loss: 0.0061507 +2025-02-18 15:09:25,031 Epoch 65/2000 +2025-02-18 15:10:06,356 Current Learning Rate: 0.0076124928 +2025-02-18 15:10:07,746 Train Loss: 0.0064475, Val Loss: 0.0057761 +2025-02-18 15:10:07,747 Epoch 66/2000 +2025-02-18 15:10:48,867 Current Learning Rate: 0.0075452071 +2025-02-18 15:10:50,385 Train Loss: 0.0060769, Val Loss: 0.0054408 +2025-02-18 15:10:50,390 Epoch 67/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Triton_Kuroshio_uv_20250218_exp1_training_log-checkpoint.log b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Triton_Kuroshio_uv_20250218_exp1_training_log-checkpoint.log new file mode 100644 index 0000000000000000000000000000000000000000..9f2ccddcb3f95f09809e4645fe482ed98c74d14d --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/.ipynb_checkpoints/Triton_Kuroshio_uv_20250218_exp1_training_log-checkpoint.log @@ -0,0 +1,245 @@ +2025-02-18 14:15:03,208 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 14:15:03,302 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 14:15:03,311 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 14:15:03,389 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 14:15:03,415 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 14:15:03,437 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 14:15:03,444 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 14:15:03,461 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 14:15:25,625 Epoch 1/2000 +2025-02-18 14:15:30,589 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,589 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:16:09,968 Current Learning Rate: 0.0099993832 +2025-02-18 14:16:11,879 Train Loss: 1.3628420, Val Loss: 0.1698331 +2025-02-18 14:16:11,879 Epoch 2/2000 +2025-02-18 14:16:54,193 Current Learning Rate: 0.0099975328 +2025-02-18 14:16:55,899 Train Loss: 0.0687663, Val Loss: 0.0357662 +2025-02-18 14:16:55,900 Epoch 3/2000 +2025-02-18 14:17:37,571 Current Learning Rate: 0.0099944494 +2025-02-18 14:17:39,033 Train Loss: 0.0274687, Val Loss: 0.0195614 +2025-02-18 14:17:39,036 Epoch 4/2000 +2025-02-18 14:18:20,798 Current Learning Rate: 0.0099901336 +2025-02-18 14:18:21,846 Train Loss: 0.0202074, Val Loss: 0.0180349 +2025-02-18 14:18:21,849 Epoch 5/2000 +2025-02-18 14:19:03,419 Current Learning Rate: 0.0099845867 +2025-02-18 14:19:04,787 Train Loss: 0.0193941, Val Loss: 0.0177117 +2025-02-18 14:19:04,788 Epoch 6/2000 +2025-02-18 14:19:47,265 Current Learning Rate: 0.0099778098 +2025-02-18 14:19:49,257 Train Loss: 0.0191047, Val Loss: 0.0175423 +2025-02-18 14:19:49,257 Epoch 7/2000 +2025-02-18 14:20:30,631 Current Learning Rate: 0.0099698048 +2025-02-18 14:20:32,449 Train Loss: 0.0189317, Val Loss: 0.0174137 +2025-02-18 14:20:32,449 Epoch 8/2000 +2025-02-18 14:21:14,244 Current Learning Rate: 0.0099605735 +2025-02-18 14:21:15,711 Train Loss: 0.0187991, Val Loss: 0.0173049 +2025-02-18 14:21:15,711 Epoch 9/2000 +2025-02-18 14:21:57,838 Current Learning Rate: 0.0099501183 +2025-02-18 14:21:59,767 Train Loss: 0.0186761, Val Loss: 0.0172068 +2025-02-18 14:21:59,767 Epoch 10/2000 +2025-02-18 14:22:42,174 Current Learning Rate: 0.0099384417 +2025-02-18 14:22:44,221 Train Loss: 0.0185759, Val Loss: 0.0171160 +2025-02-18 14:22:44,222 Epoch 11/2000 +2025-02-18 14:23:27,348 Current Learning Rate: 0.0099255466 +2025-02-18 14:23:29,500 Train Loss: 0.0184690, Val Loss: 0.0170284 +2025-02-18 14:23:29,501 Epoch 12/2000 +2025-02-18 14:24:12,728 Current Learning Rate: 0.0099114363 +2025-02-18 14:24:14,554 Train Loss: 0.0183741, Val Loss: 0.0169453 +2025-02-18 14:24:14,555 Epoch 13/2000 +2025-02-18 14:24:57,049 Current Learning Rate: 0.0098961141 +2025-02-18 14:24:59,074 Train Loss: 0.0182787, Val Loss: 0.0168658 +2025-02-18 14:24:59,075 Epoch 14/2000 +2025-02-18 14:25:40,634 Current Learning Rate: 0.0098795838 +2025-02-18 14:25:42,153 Train Loss: 0.0181902, Val Loss: 0.0167882 +2025-02-18 14:25:42,154 Epoch 15/2000 +2025-02-18 14:26:24,721 Current Learning Rate: 0.0098618496 +2025-02-18 14:26:26,667 Train Loss: 0.0181102, Val Loss: 0.0167148 +2025-02-18 14:26:26,668 Epoch 16/2000 +2025-02-18 14:27:08,442 Current Learning Rate: 0.0098429158 +2025-02-18 14:27:09,549 Train Loss: 0.0180223, Val Loss: 0.0166435 +2025-02-18 14:27:09,562 Epoch 17/2000 +2025-02-18 14:27:51,189 Current Learning Rate: 0.0098227871 +2025-02-18 14:27:52,566 Train Loss: 0.0179441, Val Loss: 0.0165739 +2025-02-18 14:27:52,567 Epoch 18/2000 +2025-02-18 14:28:35,055 Current Learning Rate: 0.0098014684 +2025-02-18 14:28:37,323 Train Loss: 0.0178550, Val Loss: 0.0164465 +2025-02-18 14:28:37,323 Epoch 19/2000 +2025-02-18 14:29:19,996 Current Learning Rate: 0.0097789651 +2025-02-18 14:29:21,663 Train Loss: 0.0176387, Val Loss: 0.0162635 +2025-02-18 14:29:21,663 Epoch 20/2000 +2025-02-18 14:30:04,707 Current Learning Rate: 0.0097552826 +2025-02-18 14:30:06,649 Train Loss: 0.0174251, Val Loss: 0.0161026 +2025-02-18 14:30:06,650 Epoch 21/2000 +2025-02-18 14:30:48,728 Current Learning Rate: 0.0097304268 +2025-02-18 14:30:50,911 Train Loss: 0.0172205, Val Loss: 0.0159079 +2025-02-18 14:30:50,911 Epoch 22/2000 +2025-02-18 14:31:32,310 Current Learning Rate: 0.0097044038 +2025-02-18 14:31:34,096 Train Loss: 0.0169694, Val Loss: 0.0156002 +2025-02-18 14:31:34,096 Epoch 23/2000 +2025-02-18 14:32:15,744 Current Learning Rate: 0.0096772202 +2025-02-18 14:32:17,783 Train Loss: 0.0165015, Val Loss: 0.0151188 +2025-02-18 14:32:17,784 Epoch 24/2000 +2025-02-18 14:32:59,680 Current Learning Rate: 0.0096488824 +2025-02-18 14:33:00,999 Train Loss: 0.0158210, Val Loss: 0.0143949 +2025-02-18 14:33:00,999 Epoch 25/2000 +2025-02-18 14:33:44,305 Current Learning Rate: 0.0096193977 +2025-02-18 14:33:46,362 Train Loss: 0.0150978, Val Loss: 0.0137453 +2025-02-18 14:33:46,362 Epoch 26/2000 +2025-02-18 14:34:27,838 Current Learning Rate: 0.0095887731 +2025-02-18 14:34:29,394 Train Loss: 0.0146063, Val Loss: 0.0133372 +2025-02-18 14:34:29,395 Epoch 27/2000 +2025-02-18 14:35:12,328 Current Learning Rate: 0.0095570164 +2025-02-18 14:35:14,297 Train Loss: 0.0141579, Val Loss: 0.0129903 +2025-02-18 14:35:14,297 Epoch 28/2000 +2025-02-18 14:35:56,478 Current Learning Rate: 0.0095241353 +2025-02-18 14:35:58,229 Train Loss: 0.0138481, Val Loss: 0.0127644 +2025-02-18 14:35:58,230 Epoch 29/2000 +2025-02-18 14:36:41,151 Current Learning Rate: 0.0094901379 +2025-02-18 14:36:42,490 Train Loss: 0.0135901, Val Loss: 0.0124894 +2025-02-18 14:36:42,490 Epoch 30/2000 +2025-02-18 14:37:25,197 Current Learning Rate: 0.0094550326 +2025-02-18 14:37:26,963 Train Loss: 0.0133189, Val Loss: 0.0121462 +2025-02-18 14:37:26,964 Epoch 31/2000 +2025-02-18 14:38:08,257 Current Learning Rate: 0.0094188282 +2025-02-18 14:38:09,109 Train Loss: 0.0128272, Val Loss: 0.0116701 +2025-02-18 14:38:09,110 Epoch 32/2000 +2025-02-18 14:38:52,114 Current Learning Rate: 0.0093815334 +2025-02-18 14:38:53,648 Train Loss: 0.0122802, Val Loss: 0.0112547 +2025-02-18 14:38:53,655 Epoch 33/2000 +2025-02-18 14:39:36,240 Current Learning Rate: 0.0093431576 +2025-02-18 14:39:37,614 Train Loss: 0.0117762, Val Loss: 0.0106204 +2025-02-18 14:39:37,614 Epoch 34/2000 +2025-02-18 14:40:19,881 Current Learning Rate: 0.0093037101 +2025-02-18 14:40:21,787 Train Loss: 0.0112313, Val Loss: 0.0101279 +2025-02-18 14:40:21,787 Epoch 35/2000 +2025-02-18 14:41:03,572 Current Learning Rate: 0.0092632008 +2025-02-18 14:41:05,186 Train Loss: 0.0108187, Val Loss: 0.0096379 +2025-02-18 14:41:05,187 Epoch 36/2000 +2025-02-18 14:41:48,547 Current Learning Rate: 0.0092216396 +2025-02-18 14:41:50,333 Train Loss: 0.0102309, Val Loss: 0.0092521 +2025-02-18 14:41:50,333 Epoch 37/2000 +2025-02-18 14:42:32,710 Current Learning Rate: 0.0091790368 +2025-02-18 14:42:34,137 Train Loss: 0.0098315, Val Loss: 0.0088337 +2025-02-18 14:42:34,137 Epoch 38/2000 +2025-02-18 14:43:17,308 Current Learning Rate: 0.0091354029 +2025-02-18 14:43:18,961 Train Loss: 0.0093248, Val Loss: 0.0083890 +2025-02-18 14:43:18,961 Epoch 39/2000 +2025-02-18 14:44:01,556 Current Learning Rate: 0.0090907486 +2025-02-18 14:44:04,120 Train Loss: 0.0087695, Val Loss: 0.0080588 +2025-02-18 14:44:04,120 Epoch 40/2000 +2025-02-18 14:44:45,580 Current Learning Rate: 0.0090450850 +2025-02-18 14:44:47,208 Train Loss: 0.0081769, Val Loss: 0.0073063 +2025-02-18 14:44:47,210 Epoch 41/2000 +2025-02-18 14:45:29,054 Current Learning Rate: 0.0089984233 +2025-02-18 14:45:30,702 Train Loss: 0.0076267, Val Loss: 0.0069956 +2025-02-18 14:45:30,703 Epoch 42/2000 +2025-02-18 14:46:14,279 Current Learning Rate: 0.0089507751 +2025-02-18 14:46:14,280 Train Loss: 0.0076356, Val Loss: 0.0090246 +2025-02-18 14:46:14,281 Epoch 43/2000 +2025-02-18 14:46:57,793 Current Learning Rate: 0.0089021520 +2025-02-18 14:46:59,597 Train Loss: 0.0074178, Val Loss: 0.0064407 +2025-02-18 14:46:59,598 Epoch 44/2000 +2025-02-18 14:47:43,250 Current Learning Rate: 0.0088525662 +2025-02-18 14:47:45,064 Train Loss: 0.0068171, Val Loss: 0.0060696 +2025-02-18 14:47:45,064 Epoch 45/2000 +2025-02-18 14:48:27,630 Current Learning Rate: 0.0088020298 +2025-02-18 14:48:29,252 Train Loss: 0.0066029, Val Loss: 0.0059524 +2025-02-18 14:48:29,252 Epoch 46/2000 +2025-02-18 14:49:11,638 Current Learning Rate: 0.0087505553 +2025-02-18 14:49:13,527 Train Loss: 0.0065815, Val Loss: 0.0057815 +2025-02-18 14:49:13,528 Epoch 47/2000 +2025-02-18 14:49:56,170 Current Learning Rate: 0.0086981555 +2025-02-18 14:49:56,171 Train Loss: 0.0065231, Val Loss: 0.0061614 +2025-02-18 14:49:56,171 Epoch 48/2000 +2025-02-18 14:50:38,244 Current Learning Rate: 0.0086448431 +2025-02-18 14:50:39,788 Train Loss: 0.0062545, Val Loss: 0.0056254 +2025-02-18 14:50:39,788 Epoch 49/2000 +2025-02-18 14:51:21,523 Current Learning Rate: 0.0085906315 +2025-02-18 14:51:23,034 Train Loss: 0.0062356, Val Loss: 0.0054859 +2025-02-18 14:51:23,034 Epoch 50/2000 +2025-02-18 14:52:04,824 Current Learning Rate: 0.0085355339 +2025-02-18 14:52:06,781 Train Loss: 0.0059626, Val Loss: 0.0053286 +2025-02-18 14:52:06,781 Epoch 51/2000 +2025-02-18 14:52:48,477 Current Learning Rate: 0.0084795640 +2025-02-18 14:52:50,316 Train Loss: 0.0060748, Val Loss: 0.0053129 +2025-02-18 14:52:50,316 Epoch 52/2000 +2025-02-18 14:53:33,232 Current Learning Rate: 0.0084227355 +2025-02-18 14:53:35,167 Train Loss: 0.0054856, Val Loss: 0.0050178 +2025-02-18 14:53:35,167 Epoch 53/2000 +2025-02-18 14:54:18,383 Current Learning Rate: 0.0083650626 +2025-02-18 14:54:20,319 Train Loss: 0.0056379, Val Loss: 0.0049119 +2025-02-18 14:54:20,319 Epoch 54/2000 +2025-02-18 14:55:03,575 Current Learning Rate: 0.0083065593 +2025-02-18 14:55:03,576 Train Loss: 0.0057683, Val Loss: 0.0053156 +2025-02-18 14:55:03,577 Epoch 55/2000 +2025-02-18 14:55:45,603 Current Learning Rate: 0.0082472402 +2025-02-18 14:55:46,977 Train Loss: 0.0053413, Val Loss: 0.0048144 +2025-02-18 14:55:46,978 Epoch 56/2000 +2025-02-18 14:56:28,648 Current Learning Rate: 0.0081871199 +2025-02-18 14:56:30,211 Train Loss: 0.0051032, Val Loss: 0.0047444 +2025-02-18 14:56:30,211 Epoch 57/2000 +2025-02-18 14:57:11,673 Current Learning Rate: 0.0081262133 +2025-02-18 14:57:13,453 Train Loss: 0.0049701, Val Loss: 0.0045479 +2025-02-18 14:57:13,453 Epoch 58/2000 +2025-02-18 14:57:54,813 Current Learning Rate: 0.0080645353 +2025-02-18 14:57:54,814 Train Loss: 0.0047992, Val Loss: 0.0050299 +2025-02-18 14:57:54,814 Epoch 59/2000 +2025-02-18 14:58:38,162 Current Learning Rate: 0.0080021011 +2025-02-18 14:58:40,130 Train Loss: 0.0049871, Val Loss: 0.0042919 +2025-02-18 14:58:40,131 Epoch 60/2000 +2025-02-18 14:59:21,637 Current Learning Rate: 0.0079389263 +2025-02-18 14:59:21,638 Train Loss: 0.0046179, Val Loss: 0.0044405 +2025-02-18 14:59:21,638 Epoch 61/2000 +2025-02-18 15:00:04,326 Current Learning Rate: 0.0078750263 +2025-02-18 15:00:04,327 Train Loss: 0.0049569, Val Loss: 0.0044025 +2025-02-18 15:00:04,327 Epoch 62/2000 +2025-02-18 15:00:46,724 Current Learning Rate: 0.0078104169 +2025-02-18 15:00:46,724 Train Loss: 0.0047013, Val Loss: 0.0046409 +2025-02-18 15:00:46,724 Epoch 63/2000 +2025-02-18 15:01:29,451 Current Learning Rate: 0.0077451141 +2025-02-18 15:01:30,719 Train Loss: 0.0044193, Val Loss: 0.0039515 +2025-02-18 15:01:30,719 Epoch 64/2000 +2025-02-18 15:02:13,483 Current Learning Rate: 0.0076791340 +2025-02-18 15:02:13,484 Train Loss: 0.0046267, Val Loss: 0.0039819 +2025-02-18 15:02:13,485 Epoch 65/2000 +2025-02-18 15:02:55,725 Current Learning Rate: 0.0076124928 +2025-02-18 15:02:56,885 Train Loss: 0.0042735, Val Loss: 0.0039007 +2025-02-18 15:02:56,886 Epoch 66/2000 +2025-02-18 15:03:38,702 Current Learning Rate: 0.0075452071 +2025-02-18 15:03:40,434 Train Loss: 0.0040702, Val Loss: 0.0036998 +2025-02-18 15:03:40,435 Epoch 67/2000 +2025-02-18 15:04:21,978 Current Learning Rate: 0.0074772933 +2025-02-18 15:04:21,979 Train Loss: 0.0041402, Val Loss: 0.0043896 +2025-02-18 15:04:21,979 Epoch 68/2000 +2025-02-18 15:05:05,501 Current Learning Rate: 0.0074087684 +2025-02-18 15:05:05,502 Train Loss: 0.0040681, Val Loss: 0.0037126 +2025-02-18 15:05:05,502 Epoch 69/2000 +2025-02-18 15:05:47,627 Current Learning Rate: 0.0073396491 +2025-02-18 15:05:49,693 Train Loss: 0.0041826, Val Loss: 0.0035981 +2025-02-18 15:05:49,697 Epoch 70/2000 +2025-02-18 15:06:31,219 Current Learning Rate: 0.0072699525 +2025-02-18 15:06:33,095 Train Loss: 0.0039610, Val Loss: 0.0035859 +2025-02-18 15:06:33,096 Epoch 71/2000 +2025-02-18 15:07:16,333 Current Learning Rate: 0.0071996958 +2025-02-18 15:07:16,334 Train Loss: 0.0040461, Val Loss: 0.0038279 +2025-02-18 15:07:16,334 Epoch 72/2000 +2025-02-18 15:07:59,643 Current Learning Rate: 0.0071288965 +2025-02-18 15:08:01,314 Train Loss: 0.0038196, Val Loss: 0.0034883 +2025-02-18 15:08:01,314 Epoch 73/2000 +2025-02-18 15:08:44,217 Current Learning Rate: 0.0070575718 +2025-02-18 15:08:44,218 Train Loss: 0.0037394, Val Loss: 0.0035490 +2025-02-18 15:08:44,219 Epoch 74/2000 +2025-02-18 15:09:26,810 Current Learning Rate: 0.0069857395 +2025-02-18 15:09:26,811 Train Loss: 0.0039330, Val Loss: 0.0036724 +2025-02-18 15:09:26,821 Epoch 75/2000 +2025-02-18 15:10:09,949 Current Learning Rate: 0.0069134172 +2025-02-18 15:10:09,950 Train Loss: 0.0036523, Val Loss: 0.0037484 +2025-02-18 15:10:09,950 Epoch 76/2000 +2025-02-18 15:10:52,358 Current Learning Rate: 0.0068406228 +2025-02-18 15:10:53,681 Train Loss: 0.0038443, Val Loss: 0.0034668 +2025-02-18 15:10:53,690 Epoch 77/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_ConvLSTM_exp1_20250311_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_ConvLSTM_exp1_20250311_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..103ab0f8efc4580a198539a99d1517de8cacdcc9 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_ConvLSTM_exp1_20250311_training_log.log @@ -0,0 +1,6041 @@ +2025-03-11 15:48:06,285 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-11 15:48:06,312 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-11 15:48:06,374 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-11 15:48:06,406 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-11 15:48:06,446 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-11 15:48:06,456 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-11 15:48:06,469 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 15:48:06,485 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-11 15:49:54,995 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-11 15:49:55,087 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-11 15:49:55,148 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-11 15:49:55,169 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-11 15:49:55,179 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-11 15:49:55,212 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-11 15:49:55,222 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-11 15:49:55,225 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 15:51:02,624 Epoch 1/2000 +2025-03-11 15:51:56,298 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-11 15:51:56,355 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-11 15:51:56,375 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-11 15:51:56,413 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-11 15:51:56,421 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-11 15:51:56,468 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-11 15:51:56,503 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 15:51:56,508 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-11 15:53:00,562 Epoch 1/2000 +2025-03-11 15:53:48,745 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-11 15:53:48,752 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-11 15:53:48,815 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-11 15:53:48,854 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-11 15:53:48,862 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-11 15:53:48,871 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-11 15:53:48,889 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-11 15:53:48,894 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 15:54:58,059 Epoch 1/2000 +2025-03-11 15:55:24,972 Current Learning Rate: 0.0009999383 +2025-03-11 15:55:24,977 Train Loss: 0.0447664, Val Loss: 0.0434475 +2025-03-11 15:55:24,977 Epoch 2/2000 +2025-03-11 15:55:49,611 Current Learning Rate: 0.0009997533 +2025-03-11 15:55:49,615 Train Loss: 0.0386358, Val Loss: 0.0325592 +2025-03-11 15:55:49,615 Epoch 3/2000 +2025-03-11 15:56:14,794 Current Learning Rate: 0.0009994449 +2025-03-11 15:56:14,798 Train Loss: 0.0303802, Val Loss: 0.0274663 +2025-03-11 15:56:14,798 Epoch 4/2000 +2025-03-11 15:56:39,684 Current Learning Rate: 0.0009990134 +2025-03-11 15:56:39,688 Train Loss: 0.0275699, Val Loss: 0.0260743 +2025-03-11 15:56:39,689 Epoch 5/2000 +2025-03-11 15:57:04,868 Current Learning Rate: 0.0009984587 +2025-03-11 15:57:04,872 Train Loss: 0.0265814, Val Loss: 0.0253554 +2025-03-11 15:57:04,872 Epoch 6/2000 +2025-03-11 15:57:29,438 Current Learning Rate: 0.0009977810 +2025-03-11 15:57:29,442 Train Loss: 0.0259260, Val Loss: 0.0247812 +2025-03-11 15:57:29,442 Epoch 7/2000 +2025-03-11 15:57:54,888 Current Learning Rate: 0.0009969805 +2025-03-11 15:57:54,895 Train Loss: 0.0253396, Val Loss: 0.0242641 +2025-03-11 15:57:54,895 Epoch 8/2000 +2025-03-11 15:58:19,613 Current Learning Rate: 0.0009960574 +2025-03-11 15:58:19,624 Train Loss: 0.0247924, Val Loss: 0.0236479 +2025-03-11 15:58:19,625 Epoch 9/2000 +2025-03-11 15:58:44,722 Current Learning Rate: 0.0009950118 +2025-03-11 15:58:44,726 Train Loss: 0.0239551, Val Loss: 0.0228223 +2025-03-11 15:58:44,726 Epoch 10/2000 +2025-03-11 15:59:09,396 Current Learning Rate: 0.0009938442 +2025-03-11 15:59:09,401 Train Loss: 0.0232273, Val Loss: 0.0221983 +2025-03-11 15:59:09,401 Epoch 11/2000 +2025-03-11 15:59:34,601 Current Learning Rate: 0.0009925547 +2025-03-11 15:59:34,605 Train Loss: 0.0225332, Val Loss: 0.0214887 +2025-03-11 15:59:34,605 Epoch 12/2000 +2025-03-11 15:59:59,702 Current Learning Rate: 0.0009911436 +2025-03-11 15:59:59,707 Train Loss: 0.0218435, Val Loss: 0.0209132 +2025-03-11 15:59:59,707 Epoch 13/2000 +2025-03-11 16:00:24,916 Current Learning Rate: 0.0009896114 +2025-03-11 16:00:24,920 Train Loss: 0.0213147, Val Loss: 0.0204538 +2025-03-11 16:00:24,920 Epoch 14/2000 +2025-03-11 16:00:50,062 Current Learning Rate: 0.0009879584 +2025-03-11 16:00:50,067 Train Loss: 0.0208749, Val Loss: 0.0201162 +2025-03-11 16:00:50,067 Epoch 15/2000 +2025-03-11 16:01:15,734 Current Learning Rate: 0.0009861850 +2025-03-11 16:01:15,738 Train Loss: 0.0205117, Val Loss: 0.0197665 +2025-03-11 16:01:15,738 Epoch 16/2000 +2025-03-11 16:01:40,745 Current Learning Rate: 0.0009842916 +2025-03-11 16:01:40,750 Train Loss: 0.0202093, Val Loss: 0.0195006 +2025-03-11 16:01:40,750 Epoch 17/2000 +2025-03-11 16:02:06,449 Current Learning Rate: 0.0009822787 +2025-03-11 16:02:06,452 Train Loss: 0.0199631, Val Loss: 0.0192863 +2025-03-11 16:02:06,452 Epoch 18/2000 +2025-03-11 16:02:31,898 Current Learning Rate: 0.0009801468 +2025-03-11 16:02:31,902 Train Loss: 0.0197492, Val Loss: 0.0190900 +2025-03-11 16:02:31,902 Epoch 19/2000 +2025-03-11 16:02:56,815 Current Learning Rate: 0.0009778965 +2025-03-11 16:02:56,824 Train Loss: 0.0195674, Val Loss: 0.0189211 +2025-03-11 16:02:56,824 Epoch 20/2000 +2025-03-11 16:03:21,782 Current Learning Rate: 0.0009755283 +2025-03-11 16:03:21,785 Train Loss: 0.0194144, Val Loss: 0.0187942 +2025-03-11 16:03:21,785 Epoch 21/2000 +2025-03-11 16:03:47,079 Current Learning Rate: 0.0009730427 +2025-03-11 16:03:47,084 Train Loss: 0.0192578, Val Loss: 0.0186317 +2025-03-11 16:03:47,084 Epoch 22/2000 +2025-03-11 16:04:12,530 Current Learning Rate: 0.0009704404 +2025-03-11 16:04:12,566 Train Loss: 0.0191376, Val Loss: 0.0185623 +2025-03-11 16:04:12,566 Epoch 23/2000 +2025-03-11 16:04:37,457 Current Learning Rate: 0.0009677220 +2025-03-11 16:04:37,461 Train Loss: 0.0190216, Val Loss: 0.0184137 +2025-03-11 16:04:37,461 Epoch 24/2000 +2025-03-11 16:05:02,411 Current Learning Rate: 0.0009648882 +2025-03-11 16:05:02,741 Train Loss: 0.0189171, Val Loss: 0.0183151 +2025-03-11 16:05:02,741 Epoch 25/2000 +2025-03-11 16:05:27,140 Current Learning Rate: 0.0009619398 +2025-03-11 16:05:27,144 Train Loss: 0.0188252, Val Loss: 0.0182417 +2025-03-11 16:05:27,144 Epoch 26/2000 +2025-03-11 16:05:51,716 Current Learning Rate: 0.0009588773 +2025-03-11 16:05:51,720 Train Loss: 0.0187419, Val Loss: 0.0181548 +2025-03-11 16:05:51,720 Epoch 27/2000 +2025-03-11 16:06:16,436 Current Learning Rate: 0.0009557016 +2025-03-11 16:06:16,440 Train Loss: 0.0186573, Val Loss: 0.0180882 +2025-03-11 16:06:16,440 Epoch 28/2000 +2025-03-11 16:06:41,339 Current Learning Rate: 0.0009524135 +2025-03-11 16:06:41,343 Train Loss: 0.0185991, Val Loss: 0.0180147 +2025-03-11 16:06:41,343 Epoch 29/2000 +2025-03-11 16:07:06,050 Current Learning Rate: 0.0009490138 +2025-03-11 16:07:06,053 Train Loss: 0.0185222, Val Loss: 0.0179693 +2025-03-11 16:07:06,054 Epoch 30/2000 +2025-03-11 16:07:31,443 Current Learning Rate: 0.0009455033 +2025-03-11 16:07:31,447 Train Loss: 0.0184642, Val Loss: 0.0178888 +2025-03-11 16:07:31,447 Epoch 31/2000 +2025-03-11 16:07:56,355 Current Learning Rate: 0.0009418828 +2025-03-11 16:07:56,359 Train Loss: 0.0184048, Val Loss: 0.0178588 +2025-03-11 16:07:56,359 Epoch 32/2000 +2025-03-11 16:08:21,381 Current Learning Rate: 0.0009381533 +2025-03-11 16:08:21,385 Train Loss: 0.0183540, Val Loss: 0.0177893 +2025-03-11 16:08:21,385 Epoch 33/2000 +2025-03-11 16:08:46,131 Current Learning Rate: 0.0009343158 +2025-03-11 16:08:46,135 Train Loss: 0.0183024, Val Loss: 0.0177683 +2025-03-11 16:08:46,135 Epoch 34/2000 +2025-03-11 16:09:11,056 Current Learning Rate: 0.0009303710 +2025-03-11 16:09:11,060 Train Loss: 0.0182531, Val Loss: 0.0177345 +2025-03-11 16:09:11,061 Epoch 35/2000 +2025-03-11 16:09:36,718 Current Learning Rate: 0.0009263201 +2025-03-11 16:09:36,722 Train Loss: 0.0182183, Val Loss: 0.0176621 +2025-03-11 16:09:36,722 Epoch 36/2000 +2025-03-11 16:10:01,892 Current Learning Rate: 0.0009221640 +2025-03-11 16:10:02,187 Train Loss: 0.0181683, Val Loss: 0.0176228 +2025-03-11 16:10:02,188 Epoch 37/2000 +2025-03-11 16:10:26,732 Current Learning Rate: 0.0009179037 +2025-03-11 16:10:26,736 Train Loss: 0.0181288, Val Loss: 0.0175781 +2025-03-11 16:10:26,737 Epoch 38/2000 +2025-03-11 16:10:51,362 Current Learning Rate: 0.0009135403 +2025-03-11 16:10:51,365 Train Loss: 0.0180904, Val Loss: 0.0175399 +2025-03-11 16:10:51,365 Epoch 39/2000 +2025-03-11 16:11:16,708 Current Learning Rate: 0.0009090749 +2025-03-11 16:11:16,713 Train Loss: 0.0180728, Val Loss: 0.0175278 +2025-03-11 16:11:16,713 Epoch 40/2000 +2025-03-11 16:11:42,049 Current Learning Rate: 0.0009045085 +2025-03-11 16:11:42,056 Train Loss: 0.0180146, Val Loss: 0.0174770 +2025-03-11 16:11:42,056 Epoch 41/2000 +2025-03-11 16:12:06,620 Current Learning Rate: 0.0008998423 +2025-03-11 16:12:06,623 Train Loss: 0.0179906, Val Loss: 0.0174511 +2025-03-11 16:12:06,624 Epoch 42/2000 +2025-03-11 16:12:31,549 Current Learning Rate: 0.0008950775 +2025-03-11 16:12:31,554 Train Loss: 0.0179599, Val Loss: 0.0174169 +2025-03-11 16:12:31,554 Epoch 43/2000 +2025-03-11 16:12:56,015 Current Learning Rate: 0.0008902152 +2025-03-11 16:12:56,019 Train Loss: 0.0179318, Val Loss: 0.0173917 +2025-03-11 16:12:56,019 Epoch 44/2000 +2025-03-11 16:13:20,632 Current Learning Rate: 0.0008852566 +2025-03-11 16:13:20,639 Train Loss: 0.0179020, Val Loss: 0.0173675 +2025-03-11 16:13:20,639 Epoch 45/2000 +2025-03-11 16:13:45,426 Current Learning Rate: 0.0008802030 +2025-03-11 16:13:45,431 Train Loss: 0.0178811, Val Loss: 0.0173434 +2025-03-11 16:13:45,431 Epoch 46/2000 +2025-03-11 16:14:11,110 Current Learning Rate: 0.0008750555 +2025-03-11 16:14:11,115 Train Loss: 0.0178473, Val Loss: 0.0173154 +2025-03-11 16:14:11,115 Epoch 47/2000 +2025-03-11 16:14:36,241 Current Learning Rate: 0.0008698155 +2025-03-11 16:14:36,244 Train Loss: 0.0178314, Val Loss: 0.0173031 +2025-03-11 16:14:36,244 Epoch 48/2000 +2025-03-11 16:15:01,420 Current Learning Rate: 0.0008644843 +2025-03-11 16:15:02,158 Train Loss: 0.0177952, Val Loss: 0.0172821 +2025-03-11 16:15:02,158 Epoch 49/2000 +2025-03-11 16:15:26,863 Current Learning Rate: 0.0008590631 +2025-03-11 16:15:26,867 Train Loss: 0.0177792, Val Loss: 0.0172523 +2025-03-11 16:15:26,868 Epoch 50/2000 +2025-03-11 16:15:51,659 Current Learning Rate: 0.0008535534 +2025-03-11 16:15:51,662 Train Loss: 0.0177550, Val Loss: 0.0172275 +2025-03-11 16:15:51,662 Epoch 51/2000 +2025-03-11 16:16:17,039 Current Learning Rate: 0.0008479564 +2025-03-11 16:16:17,044 Train Loss: 0.0177348, Val Loss: 0.0172100 +2025-03-11 16:16:17,044 Epoch 52/2000 +2025-03-11 16:16:41,974 Current Learning Rate: 0.0008422736 +2025-03-11 16:16:41,978 Train Loss: 0.0177119, Val Loss: 0.0171877 +2025-03-11 16:16:41,979 Epoch 53/2000 +2025-03-11 16:17:06,855 Current Learning Rate: 0.0008365063 +2025-03-11 16:17:06,859 Train Loss: 0.0176936, Val Loss: 0.0171713 +2025-03-11 16:17:06,860 Epoch 54/2000 +2025-03-11 16:17:31,733 Current Learning Rate: 0.0008306559 +2025-03-11 16:17:31,738 Train Loss: 0.0176714, Val Loss: 0.0171498 +2025-03-11 16:17:31,738 Epoch 55/2000 +2025-03-11 16:17:56,661 Current Learning Rate: 0.0008247240 +2025-03-11 16:17:56,671 Train Loss: 0.0176546, Val Loss: 0.0171346 +2025-03-11 16:17:56,671 Epoch 56/2000 +2025-03-11 16:18:22,448 Current Learning Rate: 0.0008187120 +2025-03-11 16:18:22,451 Train Loss: 0.0176333, Val Loss: 0.0171140 +2025-03-11 16:18:22,452 Epoch 57/2000 +2025-03-11 16:18:47,296 Current Learning Rate: 0.0008126213 +2025-03-11 16:18:47,300 Train Loss: 0.0176179, Val Loss: 0.0170998 +2025-03-11 16:18:47,300 Epoch 58/2000 +2025-03-11 16:19:12,278 Current Learning Rate: 0.0008064535 +2025-03-11 16:19:12,282 Train Loss: 0.0175970, Val Loss: 0.0170804 +2025-03-11 16:19:12,283 Epoch 59/2000 +2025-03-11 16:19:37,386 Current Learning Rate: 0.0008002101 +2025-03-11 16:19:37,389 Train Loss: 0.0175826, Val Loss: 0.0170670 +2025-03-11 16:19:37,390 Epoch 60/2000 +2025-03-11 16:20:02,400 Current Learning Rate: 0.0007938926 +2025-03-11 16:20:02,727 Train Loss: 0.0175629, Val Loss: 0.0170488 +2025-03-11 16:20:02,728 Epoch 61/2000 +2025-03-11 16:20:27,307 Current Learning Rate: 0.0007875026 +2025-03-11 16:20:27,311 Train Loss: 0.0175487, Val Loss: 0.0170358 +2025-03-11 16:20:27,311 Epoch 62/2000 +2025-03-11 16:20:51,796 Current Learning Rate: 0.0007810417 +2025-03-11 16:20:51,800 Train Loss: 0.0175305, Val Loss: 0.0170184 +2025-03-11 16:20:51,800 Epoch 63/2000 +2025-03-11 16:21:16,579 Current Learning Rate: 0.0007745114 +2025-03-11 16:21:16,583 Train Loss: 0.0175165, Val Loss: 0.0170056 +2025-03-11 16:21:16,584 Epoch 64/2000 +2025-03-11 16:21:41,619 Current Learning Rate: 0.0007679134 +2025-03-11 16:21:41,623 Train Loss: 0.0174993, Val Loss: 0.0169898 +2025-03-11 16:21:41,623 Epoch 65/2000 +2025-03-11 16:22:06,464 Current Learning Rate: 0.0007612493 +2025-03-11 16:22:06,468 Train Loss: 0.0174853, Val Loss: 0.0169761 +2025-03-11 16:22:06,469 Epoch 66/2000 +2025-03-11 16:22:31,145 Current Learning Rate: 0.0007545207 +2025-03-11 16:22:31,149 Train Loss: 0.0174697, Val Loss: 0.0169613 +2025-03-11 16:22:31,149 Epoch 67/2000 +2025-03-11 16:22:55,533 Current Learning Rate: 0.0007477293 +2025-03-11 16:22:55,537 Train Loss: 0.0174557, Val Loss: 0.0169480 +2025-03-11 16:22:55,538 Epoch 68/2000 +2025-03-11 16:23:20,139 Current Learning Rate: 0.0007408768 +2025-03-11 16:23:20,142 Train Loss: 0.0174408, Val Loss: 0.0169347 +2025-03-11 16:23:20,142 Epoch 69/2000 +2025-03-11 16:23:44,416 Current Learning Rate: 0.0007339649 +2025-03-11 16:23:44,419 Train Loss: 0.0174269, Val Loss: 0.0169215 +2025-03-11 16:23:44,419 Epoch 70/2000 +2025-03-11 16:24:08,781 Current Learning Rate: 0.0007269952 +2025-03-11 16:24:08,787 Train Loss: 0.0174130, Val Loss: 0.0169099 +2025-03-11 16:24:08,787 Epoch 71/2000 +2025-03-11 16:24:33,801 Current Learning Rate: 0.0007199696 +2025-03-11 16:24:33,804 Train Loss: 0.0173993, Val Loss: 0.0169035 +2025-03-11 16:24:33,804 Epoch 72/2000 +2025-03-11 16:24:58,215 Current Learning Rate: 0.0007128896 +2025-03-11 16:24:58,216 Train Loss: 0.0173839, Val Loss: 0.0169088 +2025-03-11 16:24:58,216 Epoch 73/2000 +2025-03-11 16:25:22,444 Current Learning Rate: 0.0007057572 +2025-03-11 16:25:22,448 Train Loss: 0.0173750, Val Loss: 0.0168899 +2025-03-11 16:25:22,448 Epoch 74/2000 +2025-03-11 16:25:46,961 Current Learning Rate: 0.0006985739 +2025-03-11 16:25:46,986 Train Loss: 0.0173597, Val Loss: 0.0168839 +2025-03-11 16:25:46,986 Epoch 75/2000 +2025-03-11 16:26:11,558 Current Learning Rate: 0.0006913417 +2025-03-11 16:26:11,561 Train Loss: 0.0173469, Val Loss: 0.0168723 +2025-03-11 16:26:11,561 Epoch 76/2000 +2025-03-11 16:26:36,618 Current Learning Rate: 0.0006840623 +2025-03-11 16:26:36,621 Train Loss: 0.0173353, Val Loss: 0.0168612 +2025-03-11 16:26:36,621 Epoch 77/2000 +2025-03-11 16:27:01,685 Current Learning Rate: 0.0006767374 +2025-03-11 16:27:02,332 Train Loss: 0.0173234, Val Loss: 0.0168510 +2025-03-11 16:27:02,333 Epoch 78/2000 +2025-03-11 16:27:27,098 Current Learning Rate: 0.0006693690 +2025-03-11 16:27:27,102 Train Loss: 0.0173119, Val Loss: 0.0168416 +2025-03-11 16:27:27,103 Epoch 79/2000 +2025-03-11 16:27:52,164 Current Learning Rate: 0.0006619587 +2025-03-11 16:27:52,169 Train Loss: 0.0173005, Val Loss: 0.0168324 +2025-03-11 16:27:52,169 Epoch 80/2000 +2025-03-11 16:28:17,863 Current Learning Rate: 0.0006545085 +2025-03-11 16:28:17,867 Train Loss: 0.0172895, Val Loss: 0.0168240 +2025-03-11 16:28:17,867 Epoch 81/2000 +2025-03-11 16:28:43,088 Current Learning Rate: 0.0006470202 +2025-03-11 16:28:43,092 Train Loss: 0.0172788, Val Loss: 0.0168161 +2025-03-11 16:28:43,093 Epoch 82/2000 +2025-03-11 16:29:07,556 Current Learning Rate: 0.0006394956 +2025-03-11 16:29:07,560 Train Loss: 0.0172683, Val Loss: 0.0168093 +2025-03-11 16:29:07,560 Epoch 83/2000 +2025-03-11 16:29:32,441 Current Learning Rate: 0.0006319365 +2025-03-11 16:29:32,447 Train Loss: 0.0172580, Val Loss: 0.0168024 +2025-03-11 16:29:32,447 Epoch 84/2000 +2025-03-11 16:29:57,285 Current Learning Rate: 0.0006243449 +2025-03-11 16:29:57,289 Train Loss: 0.0172483, Val Loss: 0.0167952 +2025-03-11 16:29:57,289 Epoch 85/2000 +2025-03-11 16:30:22,153 Current Learning Rate: 0.0006167227 +2025-03-11 16:30:22,156 Train Loss: 0.0172382, Val Loss: 0.0167849 +2025-03-11 16:30:22,157 Epoch 86/2000 +2025-03-11 16:30:46,585 Current Learning Rate: 0.0006090716 +2025-03-11 16:30:46,589 Train Loss: 0.0172291, Val Loss: 0.0167755 +2025-03-11 16:30:46,589 Epoch 87/2000 +2025-03-11 16:31:11,516 Current Learning Rate: 0.0006013936 +2025-03-11 16:31:11,520 Train Loss: 0.0172192, Val Loss: 0.0167661 +2025-03-11 16:31:11,520 Epoch 88/2000 +2025-03-11 16:31:36,328 Current Learning Rate: 0.0005936907 +2025-03-11 16:31:36,331 Train Loss: 0.0172110, Val Loss: 0.0167563 +2025-03-11 16:31:36,331 Epoch 89/2000 +2025-03-11 16:32:01,003 Current Learning Rate: 0.0005859646 +2025-03-11 16:32:01,007 Train Loss: 0.0172011, Val Loss: 0.0167481 +2025-03-11 16:32:01,007 Epoch 90/2000 +2025-03-11 16:32:26,056 Current Learning Rate: 0.0005782172 +2025-03-11 16:32:26,060 Train Loss: 0.0171939, Val Loss: 0.0167364 +2025-03-11 16:32:26,060 Epoch 91/2000 +2025-03-11 16:32:50,878 Current Learning Rate: 0.0005704506 +2025-03-11 16:32:50,930 Train Loss: 0.0171840, Val Loss: 0.0167288 +2025-03-11 16:32:50,930 Epoch 92/2000 +2025-03-11 16:33:15,672 Current Learning Rate: 0.0005626666 +2025-03-11 16:33:15,676 Train Loss: 0.0171774, Val Loss: 0.0167131 +2025-03-11 16:33:15,676 Epoch 93/2000 +2025-03-11 16:33:40,197 Current Learning Rate: 0.0005548672 +2025-03-11 16:33:40,200 Train Loss: 0.0171673, Val Loss: 0.0167076 +2025-03-11 16:33:40,201 Epoch 94/2000 +2025-03-11 16:34:05,051 Current Learning Rate: 0.0005470542 +2025-03-11 16:34:05,054 Train Loss: 0.0171612, Val Loss: 0.0166901 +2025-03-11 16:34:05,055 Epoch 95/2000 +2025-03-11 16:34:29,455 Current Learning Rate: 0.0005392295 +2025-03-11 16:34:29,459 Train Loss: 0.0171512, Val Loss: 0.0166895 +2025-03-11 16:34:29,460 Epoch 96/2000 +2025-03-11 16:34:53,970 Current Learning Rate: 0.0005313953 +2025-03-11 16:34:53,973 Train Loss: 0.0171454, Val Loss: 0.0166715 +2025-03-11 16:34:53,974 Epoch 97/2000 +2025-03-11 16:35:18,923 Current Learning Rate: 0.0005235532 +2025-03-11 16:35:18,924 Train Loss: 0.0171361, Val Loss: 0.0166730 +2025-03-11 16:35:18,925 Epoch 98/2000 +2025-03-11 16:35:44,427 Current Learning Rate: 0.0005157054 +2025-03-11 16:35:44,432 Train Loss: 0.0171300, Val Loss: 0.0166569 +2025-03-11 16:35:44,433 Epoch 99/2000 +2025-03-11 16:36:09,762 Current Learning Rate: 0.0005078537 +2025-03-11 16:36:09,766 Train Loss: 0.0171217, Val Loss: 0.0166553 +2025-03-11 16:36:09,766 Epoch 100/2000 +2025-03-11 16:36:34,617 Current Learning Rate: 0.0005000000 +2025-03-11 16:36:34,620 Train Loss: 0.0171150, Val Loss: 0.0166431 +2025-03-11 16:36:34,620 Epoch 101/2000 +2025-03-11 16:37:00,029 Current Learning Rate: 0.0004921463 +2025-03-11 16:37:00,033 Train Loss: 0.0171076, Val Loss: 0.0166370 +2025-03-11 16:37:00,033 Epoch 102/2000 +2025-03-11 16:37:25,178 Current Learning Rate: 0.0004842946 +2025-03-11 16:37:25,182 Train Loss: 0.0171006, Val Loss: 0.0166279 +2025-03-11 16:37:25,182 Epoch 103/2000 +2025-03-11 16:37:50,444 Current Learning Rate: 0.0004764468 +2025-03-11 16:37:50,448 Train Loss: 0.0170937, Val Loss: 0.0166199 +2025-03-11 16:37:50,449 Epoch 104/2000 +2025-03-11 16:38:14,943 Current Learning Rate: 0.0004686047 +2025-03-11 16:38:14,947 Train Loss: 0.0170869, Val Loss: 0.0166118 +2025-03-11 16:38:14,947 Epoch 105/2000 +2025-03-11 16:38:39,767 Current Learning Rate: 0.0004607705 +2025-03-11 16:38:39,770 Train Loss: 0.0170802, Val Loss: 0.0166038 +2025-03-11 16:38:39,771 Epoch 106/2000 +2025-03-11 16:39:05,231 Current Learning Rate: 0.0004529458 +2025-03-11 16:39:05,235 Train Loss: 0.0170737, Val Loss: 0.0165961 +2025-03-11 16:39:05,235 Epoch 107/2000 +2025-03-11 16:39:30,356 Current Learning Rate: 0.0004451328 +2025-03-11 16:39:30,359 Train Loss: 0.0170672, Val Loss: 0.0165885 +2025-03-11 16:39:30,359 Epoch 108/2000 +2025-03-11 16:39:54,985 Current Learning Rate: 0.0004373334 +2025-03-11 16:39:54,989 Train Loss: 0.0170607, Val Loss: 0.0165811 +2025-03-11 16:39:54,989 Epoch 109/2000 +2025-03-11 16:40:19,914 Current Learning Rate: 0.0004295494 +2025-03-11 16:40:19,918 Train Loss: 0.0170544, Val Loss: 0.0165740 +2025-03-11 16:40:19,918 Epoch 110/2000 +2025-03-11 16:40:44,967 Current Learning Rate: 0.0004217828 +2025-03-11 16:40:44,971 Train Loss: 0.0170482, Val Loss: 0.0165670 +2025-03-11 16:40:44,971 Epoch 111/2000 +2025-03-11 16:41:09,758 Current Learning Rate: 0.0004140354 +2025-03-11 16:41:09,761 Train Loss: 0.0170420, Val Loss: 0.0165604 +2025-03-11 16:41:09,762 Epoch 112/2000 +2025-03-11 16:41:35,167 Current Learning Rate: 0.0004063093 +2025-03-11 16:41:35,171 Train Loss: 0.0170359, Val Loss: 0.0165539 +2025-03-11 16:41:35,172 Epoch 113/2000 +2025-03-11 16:42:00,468 Current Learning Rate: 0.0003986064 +2025-03-11 16:42:00,471 Train Loss: 0.0170300, Val Loss: 0.0165476 +2025-03-11 16:42:00,472 Epoch 114/2000 +2025-03-11 16:42:25,684 Current Learning Rate: 0.0003909284 +2025-03-11 16:42:25,688 Train Loss: 0.0170241, Val Loss: 0.0165417 +2025-03-11 16:42:25,688 Epoch 115/2000 +2025-03-11 16:42:50,902 Current Learning Rate: 0.0003832773 +2025-03-11 16:42:50,913 Train Loss: 0.0170183, Val Loss: 0.0165359 +2025-03-11 16:42:50,914 Epoch 116/2000 +2025-03-11 16:43:15,827 Current Learning Rate: 0.0003756551 +2025-03-11 16:43:15,832 Train Loss: 0.0170126, Val Loss: 0.0165304 +2025-03-11 16:43:15,832 Epoch 117/2000 +2025-03-11 16:43:40,900 Current Learning Rate: 0.0003680635 +2025-03-11 16:43:40,904 Train Loss: 0.0170071, Val Loss: 0.0165250 +2025-03-11 16:43:40,904 Epoch 118/2000 +2025-03-11 16:44:05,719 Current Learning Rate: 0.0003605044 +2025-03-11 16:44:05,723 Train Loss: 0.0170016, Val Loss: 0.0165197 +2025-03-11 16:44:05,723 Epoch 119/2000 +2025-03-11 16:44:30,851 Current Learning Rate: 0.0003529798 +2025-03-11 16:44:30,856 Train Loss: 0.0169962, Val Loss: 0.0165144 +2025-03-11 16:44:30,856 Epoch 120/2000 +2025-03-11 16:44:56,183 Current Learning Rate: 0.0003454915 +2025-03-11 16:44:56,187 Train Loss: 0.0169909, Val Loss: 0.0165093 +2025-03-11 16:44:56,187 Epoch 121/2000 +2025-03-11 16:45:21,369 Current Learning Rate: 0.0003380413 +2025-03-11 16:45:21,373 Train Loss: 0.0169856, Val Loss: 0.0165044 +2025-03-11 16:45:21,373 Epoch 122/2000 +2025-03-11 16:45:46,361 Current Learning Rate: 0.0003306310 +2025-03-11 16:45:46,365 Train Loss: 0.0169803, Val Loss: 0.0164995 +2025-03-11 16:45:46,365 Epoch 123/2000 +2025-03-11 16:46:11,086 Current Learning Rate: 0.0003232626 +2025-03-11 16:46:11,089 Train Loss: 0.0169751, Val Loss: 0.0164947 +2025-03-11 16:46:11,089 Epoch 124/2000 +2025-03-11 16:46:35,908 Current Learning Rate: 0.0003159377 +2025-03-11 16:46:35,913 Train Loss: 0.0169701, Val Loss: 0.0164901 +2025-03-11 16:46:35,914 Epoch 125/2000 +2025-03-11 16:47:01,955 Current Learning Rate: 0.0003086583 +2025-03-11 16:47:02,282 Train Loss: 0.0169652, Val Loss: 0.0164856 +2025-03-11 16:47:02,282 Epoch 126/2000 +2025-03-11 16:47:26,297 Current Learning Rate: 0.0003014261 +2025-03-11 16:47:26,302 Train Loss: 0.0169605, Val Loss: 0.0164808 +2025-03-11 16:47:26,302 Epoch 127/2000 +2025-03-11 16:47:51,295 Current Learning Rate: 0.0002942428 +2025-03-11 16:47:51,299 Train Loss: 0.0169556, Val Loss: 0.0164761 +2025-03-11 16:47:51,299 Epoch 128/2000 +2025-03-11 16:48:16,807 Current Learning Rate: 0.0002871104 +2025-03-11 16:48:16,812 Train Loss: 0.0169507, Val Loss: 0.0164718 +2025-03-11 16:48:16,812 Epoch 129/2000 +2025-03-11 16:48:42,294 Current Learning Rate: 0.0002800304 +2025-03-11 16:48:42,297 Train Loss: 0.0169460, Val Loss: 0.0164676 +2025-03-11 16:48:42,298 Epoch 130/2000 +2025-03-11 16:49:07,459 Current Learning Rate: 0.0002730048 +2025-03-11 16:49:07,463 Train Loss: 0.0169415, Val Loss: 0.0164636 +2025-03-11 16:49:07,463 Epoch 131/2000 +2025-03-11 16:49:32,819 Current Learning Rate: 0.0002660351 +2025-03-11 16:49:32,823 Train Loss: 0.0169373, Val Loss: 0.0164597 +2025-03-11 16:49:32,823 Epoch 132/2000 +2025-03-11 16:49:57,937 Current Learning Rate: 0.0002591232 +2025-03-11 16:49:58,132 Train Loss: 0.0169331, Val Loss: 0.0164553 +2025-03-11 16:49:58,133 Epoch 133/2000 +2025-03-11 16:50:23,468 Current Learning Rate: 0.0002522707 +2025-03-11 16:50:23,473 Train Loss: 0.0169291, Val Loss: 0.0164501 +2025-03-11 16:50:23,474 Epoch 134/2000 +2025-03-11 16:50:48,404 Current Learning Rate: 0.0002454793 +2025-03-11 16:50:48,410 Train Loss: 0.0169255, Val Loss: 0.0164468 +2025-03-11 16:50:48,410 Epoch 135/2000 +2025-03-11 16:51:13,704 Current Learning Rate: 0.0002387507 +2025-03-11 16:51:13,708 Train Loss: 0.0169212, Val Loss: 0.0164439 +2025-03-11 16:51:13,709 Epoch 136/2000 +2025-03-11 16:51:38,535 Current Learning Rate: 0.0002320866 +2025-03-11 16:51:38,539 Train Loss: 0.0169160, Val Loss: 0.0164379 +2025-03-11 16:51:38,539 Epoch 137/2000 +2025-03-11 16:52:03,663 Current Learning Rate: 0.0002254886 +2025-03-11 16:52:03,667 Train Loss: 0.0169117, Val Loss: 0.0164334 +2025-03-11 16:52:03,667 Epoch 138/2000 +2025-03-11 16:52:28,513 Current Learning Rate: 0.0002189583 +2025-03-11 16:52:28,524 Train Loss: 0.0169077, Val Loss: 0.0164292 +2025-03-11 16:52:28,524 Epoch 139/2000 +2025-03-11 16:52:53,198 Current Learning Rate: 0.0002124974 +2025-03-11 16:52:53,201 Train Loss: 0.0169038, Val Loss: 0.0164253 +2025-03-11 16:52:53,202 Epoch 140/2000 +2025-03-11 16:53:17,640 Current Learning Rate: 0.0002061074 +2025-03-11 16:53:17,644 Train Loss: 0.0169000, Val Loss: 0.0164215 +2025-03-11 16:53:17,644 Epoch 141/2000 +2025-03-11 16:53:42,629 Current Learning Rate: 0.0001997899 +2025-03-11 16:53:42,634 Train Loss: 0.0168963, Val Loss: 0.0164179 +2025-03-11 16:53:42,635 Epoch 142/2000 +2025-03-11 16:54:07,550 Current Learning Rate: 0.0001935465 +2025-03-11 16:54:07,554 Train Loss: 0.0168927, Val Loss: 0.0164144 +2025-03-11 16:54:07,554 Epoch 143/2000 +2025-03-11 16:54:32,753 Current Learning Rate: 0.0001873787 +2025-03-11 16:54:32,757 Train Loss: 0.0168892, Val Loss: 0.0164110 +2025-03-11 16:54:32,757 Epoch 144/2000 +2025-03-11 16:54:57,721 Current Learning Rate: 0.0001812880 +2025-03-11 16:54:57,726 Train Loss: 0.0168857, Val Loss: 0.0164077 +2025-03-11 16:54:57,727 Epoch 145/2000 +2025-03-11 16:55:22,720 Current Learning Rate: 0.0001752760 +2025-03-11 16:55:22,724 Train Loss: 0.0168824, Val Loss: 0.0164046 +2025-03-11 16:55:22,724 Epoch 146/2000 +2025-03-11 16:55:47,216 Current Learning Rate: 0.0001693441 +2025-03-11 16:55:47,220 Train Loss: 0.0168791, Val Loss: 0.0164016 +2025-03-11 16:55:47,220 Epoch 147/2000 +2025-03-11 16:56:11,938 Current Learning Rate: 0.0001634937 +2025-03-11 16:56:11,944 Train Loss: 0.0168758, Val Loss: 0.0163984 +2025-03-11 16:56:11,944 Epoch 148/2000 +2025-03-11 16:56:36,424 Current Learning Rate: 0.0001577264 +2025-03-11 16:56:36,428 Train Loss: 0.0168727, Val Loss: 0.0163954 +2025-03-11 16:56:36,429 Epoch 149/2000 +2025-03-11 16:57:01,020 Current Learning Rate: 0.0001520436 +2025-03-11 16:57:01,023 Train Loss: 0.0168696, Val Loss: 0.0163929 +2025-03-11 16:57:01,024 Epoch 150/2000 +2025-03-11 16:57:25,990 Current Learning Rate: 0.0001464466 +2025-03-11 16:57:25,994 Train Loss: 0.0168665, Val Loss: 0.0163897 +2025-03-11 16:57:25,995 Epoch 151/2000 +2025-03-11 16:57:50,788 Current Learning Rate: 0.0001409369 +2025-03-11 16:57:50,799 Train Loss: 0.0168636, Val Loss: 0.0163867 +2025-03-11 16:57:50,802 Epoch 152/2000 +2025-03-11 16:58:15,601 Current Learning Rate: 0.0001355157 +2025-03-11 16:58:15,616 Train Loss: 0.0168606, Val Loss: 0.0163838 +2025-03-11 16:58:15,616 Epoch 153/2000 +2025-03-11 16:58:40,365 Current Learning Rate: 0.0001301845 +2025-03-11 16:58:40,368 Train Loss: 0.0168577, Val Loss: 0.0163815 +2025-03-11 16:58:40,368 Epoch 154/2000 +2025-03-11 16:59:05,212 Current Learning Rate: 0.0001249445 +2025-03-11 16:59:05,216 Train Loss: 0.0168549, Val Loss: 0.0163786 +2025-03-11 16:59:05,216 Epoch 155/2000 +2025-03-11 16:59:30,038 Current Learning Rate: 0.0001197970 +2025-03-11 16:59:30,042 Train Loss: 0.0168521, Val Loss: 0.0163759 +2025-03-11 16:59:30,042 Epoch 156/2000 +2025-03-11 16:59:55,001 Current Learning Rate: 0.0001147434 +2025-03-11 16:59:55,011 Train Loss: 0.0168495, Val Loss: 0.0163732 +2025-03-11 16:59:55,011 Epoch 157/2000 +2025-03-11 17:00:20,269 Current Learning Rate: 0.0001097848 +2025-03-11 17:00:20,272 Train Loss: 0.0168468, Val Loss: 0.0163705 +2025-03-11 17:00:20,272 Epoch 158/2000 +2025-03-11 17:00:45,383 Current Learning Rate: 0.0001049225 +2025-03-11 17:00:45,398 Train Loss: 0.0168443, Val Loss: 0.0163680 +2025-03-11 17:00:45,398 Epoch 159/2000 +2025-03-11 17:01:10,931 Current Learning Rate: 0.0001001577 +2025-03-11 17:01:10,935 Train Loss: 0.0168419, Val Loss: 0.0163659 +2025-03-11 17:01:10,935 Epoch 160/2000 +2025-03-11 17:01:36,707 Current Learning Rate: 0.0000954915 +2025-03-11 17:01:36,710 Train Loss: 0.0168396, Val Loss: 0.0163637 +2025-03-11 17:01:36,711 Epoch 161/2000 +2025-03-11 17:02:01,669 Current Learning Rate: 0.0000909251 +2025-03-11 17:02:01,673 Train Loss: 0.0168373, Val Loss: 0.0163618 +2025-03-11 17:02:01,673 Epoch 162/2000 +2025-03-11 17:02:26,982 Current Learning Rate: 0.0000864597 +2025-03-11 17:02:26,985 Train Loss: 0.0168351, Val Loss: 0.0163597 +2025-03-11 17:02:26,986 Epoch 163/2000 +2025-03-11 17:02:51,713 Current Learning Rate: 0.0000820963 +2025-03-11 17:02:51,717 Train Loss: 0.0168329, Val Loss: 0.0163574 +2025-03-11 17:02:51,717 Epoch 164/2000 +2025-03-11 17:03:17,235 Current Learning Rate: 0.0000778360 +2025-03-11 17:03:17,240 Train Loss: 0.0168309, Val Loss: 0.0163551 +2025-03-11 17:03:17,240 Epoch 165/2000 +2025-03-11 17:03:42,045 Current Learning Rate: 0.0000736799 +2025-03-11 17:03:42,064 Train Loss: 0.0168290, Val Loss: 0.0163530 +2025-03-11 17:03:42,064 Epoch 166/2000 +2025-03-11 17:04:06,846 Current Learning Rate: 0.0000696290 +2025-03-11 17:04:06,849 Train Loss: 0.0168270, Val Loss: 0.0163512 +2025-03-11 17:04:06,849 Epoch 167/2000 +2025-03-11 17:04:31,023 Current Learning Rate: 0.0000656842 +2025-03-11 17:04:31,027 Train Loss: 0.0168250, Val Loss: 0.0163496 +2025-03-11 17:04:31,027 Epoch 168/2000 +2025-03-11 17:04:56,005 Current Learning Rate: 0.0000618467 +2025-03-11 17:04:56,017 Train Loss: 0.0168232, Val Loss: 0.0163482 +2025-03-11 17:04:56,018 Epoch 169/2000 +2025-03-11 17:05:20,674 Current Learning Rate: 0.0000581172 +2025-03-11 17:05:20,678 Train Loss: 0.0168215, Val Loss: 0.0163468 +2025-03-11 17:05:20,678 Epoch 170/2000 +2025-03-11 17:05:45,423 Current Learning Rate: 0.0000544967 +2025-03-11 17:05:45,427 Train Loss: 0.0168199, Val Loss: 0.0163454 +2025-03-11 17:05:45,427 Epoch 171/2000 +2025-03-11 17:06:10,684 Current Learning Rate: 0.0000509862 +2025-03-11 17:06:10,688 Train Loss: 0.0168184, Val Loss: 0.0163439 +2025-03-11 17:06:10,688 Epoch 172/2000 +2025-03-11 17:06:35,240 Current Learning Rate: 0.0000475865 +2025-03-11 17:06:35,244 Train Loss: 0.0168170, Val Loss: 0.0163424 +2025-03-11 17:06:35,244 Epoch 173/2000 +2025-03-11 17:07:00,147 Current Learning Rate: 0.0000442984 +2025-03-11 17:07:00,150 Train Loss: 0.0168156, Val Loss: 0.0163408 +2025-03-11 17:07:00,150 Epoch 174/2000 +2025-03-11 17:07:24,991 Current Learning Rate: 0.0000411227 +2025-03-11 17:07:24,996 Train Loss: 0.0168143, Val Loss: 0.0163396 +2025-03-11 17:07:24,997 Epoch 175/2000 +2025-03-11 17:07:50,029 Current Learning Rate: 0.0000380602 +2025-03-11 17:07:50,033 Train Loss: 0.0168131, Val Loss: 0.0163386 +2025-03-11 17:07:50,033 Epoch 176/2000 +2025-03-11 17:08:14,750 Current Learning Rate: 0.0000351118 +2025-03-11 17:08:14,753 Train Loss: 0.0168120, Val Loss: 0.0163377 +2025-03-11 17:08:14,754 Epoch 177/2000 +2025-03-11 17:08:39,950 Current Learning Rate: 0.0000322780 +2025-03-11 17:08:39,954 Train Loss: 0.0168109, Val Loss: 0.0163370 +2025-03-11 17:08:39,954 Epoch 178/2000 +2025-03-11 17:09:05,067 Current Learning Rate: 0.0000295596 +2025-03-11 17:09:05,166 Train Loss: 0.0168098, Val Loss: 0.0163359 +2025-03-11 17:09:05,166 Epoch 179/2000 +2025-03-11 17:09:29,892 Current Learning Rate: 0.0000269573 +2025-03-11 17:09:29,896 Train Loss: 0.0168088, Val Loss: 0.0163349 +2025-03-11 17:09:29,896 Epoch 180/2000 +2025-03-11 17:09:54,696 Current Learning Rate: 0.0000244717 +2025-03-11 17:09:54,699 Train Loss: 0.0168079, Val Loss: 0.0163339 +2025-03-11 17:09:54,699 Epoch 181/2000 +2025-03-11 17:10:19,372 Current Learning Rate: 0.0000221035 +2025-03-11 17:10:19,381 Train Loss: 0.0168070, Val Loss: 0.0163330 +2025-03-11 17:10:19,381 Epoch 182/2000 +2025-03-11 17:10:44,162 Current Learning Rate: 0.0000198532 +2025-03-11 17:10:44,166 Train Loss: 0.0168062, Val Loss: 0.0163323 +2025-03-11 17:10:44,166 Epoch 183/2000 +2025-03-11 17:11:08,998 Current Learning Rate: 0.0000177213 +2025-03-11 17:11:09,002 Train Loss: 0.0168055, Val Loss: 0.0163317 +2025-03-11 17:11:09,002 Epoch 184/2000 +2025-03-11 17:11:33,735 Current Learning Rate: 0.0000157084 +2025-03-11 17:11:33,738 Train Loss: 0.0168048, Val Loss: 0.0163312 +2025-03-11 17:11:33,739 Epoch 185/2000 +2025-03-11 17:11:58,225 Current Learning Rate: 0.0000138150 +2025-03-11 17:11:58,229 Train Loss: 0.0168042, Val Loss: 0.0163307 +2025-03-11 17:11:58,229 Epoch 186/2000 +2025-03-11 17:12:23,446 Current Learning Rate: 0.0000120416 +2025-03-11 17:12:23,451 Train Loss: 0.0168037, Val Loss: 0.0163302 +2025-03-11 17:12:23,451 Epoch 187/2000 +2025-03-11 17:12:48,644 Current Learning Rate: 0.0000103886 +2025-03-11 17:12:48,647 Train Loss: 0.0168032, Val Loss: 0.0163298 +2025-03-11 17:12:48,647 Epoch 188/2000 +2025-03-11 17:13:13,972 Current Learning Rate: 0.0000088564 +2025-03-11 17:13:13,975 Train Loss: 0.0168028, Val Loss: 0.0163294 +2025-03-11 17:13:13,976 Epoch 189/2000 +2025-03-11 17:13:39,084 Current Learning Rate: 0.0000074453 +2025-03-11 17:13:39,087 Train Loss: 0.0168025, Val Loss: 0.0163292 +2025-03-11 17:13:39,088 Epoch 190/2000 +2025-03-11 17:14:04,055 Current Learning Rate: 0.0000061558 +2025-03-11 17:14:04,059 Train Loss: 0.0168021, Val Loss: 0.0163289 +2025-03-11 17:14:04,060 Epoch 191/2000 +2025-03-11 17:14:29,495 Current Learning Rate: 0.0000049882 +2025-03-11 17:14:29,500 Train Loss: 0.0168019, Val Loss: 0.0163287 +2025-03-11 17:14:29,500 Epoch 192/2000 +2025-03-11 17:14:55,717 Current Learning Rate: 0.0000039426 +2025-03-11 17:14:55,720 Train Loss: 0.0168016, Val Loss: 0.0163284 +2025-03-11 17:14:55,721 Epoch 193/2000 +2025-03-11 17:15:20,333 Current Learning Rate: 0.0000030195 +2025-03-11 17:15:20,336 Train Loss: 0.0168014, Val Loss: 0.0163282 +2025-03-11 17:15:20,337 Epoch 194/2000 +2025-03-11 17:15:46,065 Current Learning Rate: 0.0000022190 +2025-03-11 17:15:46,068 Train Loss: 0.0168013, Val Loss: 0.0163281 +2025-03-11 17:15:46,068 Epoch 195/2000 +2025-03-11 17:16:10,846 Current Learning Rate: 0.0000015413 +2025-03-11 17:16:10,850 Train Loss: 0.0168012, Val Loss: 0.0163280 +2025-03-11 17:16:10,851 Epoch 196/2000 +2025-03-11 17:16:35,526 Current Learning Rate: 0.0000009866 +2025-03-11 17:16:35,530 Train Loss: 0.0168011, Val Loss: 0.0163280 +2025-03-11 17:16:35,530 Epoch 197/2000 +2025-03-11 17:17:00,404 Current Learning Rate: 0.0000005551 +2025-03-11 17:17:00,407 Train Loss: 0.0168010, Val Loss: 0.0163279 +2025-03-11 17:17:00,408 Epoch 198/2000 +2025-03-11 17:17:25,524 Current Learning Rate: 0.0000002467 +2025-03-11 17:17:25,528 Train Loss: 0.0168010, Val Loss: 0.0163279 +2025-03-11 17:17:25,529 Epoch 199/2000 +2025-03-11 17:17:50,898 Current Learning Rate: 0.0000000617 +2025-03-11 17:17:50,898 Train Loss: 0.0168010, Val Loss: 0.0163279 +2025-03-11 17:17:50,899 Epoch 200/2000 +2025-03-11 17:18:16,378 Current Learning Rate: 0.0000000000 +2025-03-11 17:18:16,379 Train Loss: 0.0168010, Val Loss: 0.0163279 +2025-03-11 17:18:16,379 Epoch 201/2000 +2025-03-11 17:18:41,343 Current Learning Rate: 0.0000000617 +2025-03-11 17:18:41,343 Train Loss: 0.0168010, Val Loss: 0.0163279 +2025-03-11 17:18:41,344 Epoch 202/2000 +2025-03-11 17:19:06,298 Current Learning Rate: 0.0000002467 +2025-03-11 17:19:06,299 Train Loss: 0.0168010, Val Loss: 0.0163279 +2025-03-11 17:19:06,299 Epoch 203/2000 +2025-03-11 17:19:31,179 Current Learning Rate: 0.0000005551 +2025-03-11 17:19:31,192 Train Loss: 0.0168010, Val Loss: 0.0163279 +2025-03-11 17:19:31,192 Epoch 204/2000 +2025-03-11 17:19:56,249 Current Learning Rate: 0.0000009866 +2025-03-11 17:19:56,265 Train Loss: 0.0168009, Val Loss: 0.0163278 +2025-03-11 17:19:56,265 Epoch 205/2000 +2025-03-11 17:20:21,536 Current Learning Rate: 0.0000015413 +2025-03-11 17:20:21,544 Train Loss: 0.0168009, Val Loss: 0.0163278 +2025-03-11 17:20:21,545 Epoch 206/2000 +2025-03-11 17:20:46,956 Current Learning Rate: 0.0000022190 +2025-03-11 17:20:46,960 Train Loss: 0.0168008, Val Loss: 0.0163277 +2025-03-11 17:20:46,961 Epoch 207/2000 +2025-03-11 17:21:12,046 Current Learning Rate: 0.0000030195 +2025-03-11 17:21:12,050 Train Loss: 0.0168007, Val Loss: 0.0163276 +2025-03-11 17:21:12,050 Epoch 208/2000 +2025-03-11 17:21:37,044 Current Learning Rate: 0.0000039426 +2025-03-11 17:21:37,047 Train Loss: 0.0168006, Val Loss: 0.0163275 +2025-03-11 17:21:37,048 Epoch 209/2000 +2025-03-11 17:22:01,710 Current Learning Rate: 0.0000049882 +2025-03-11 17:22:01,714 Train Loss: 0.0168005, Val Loss: 0.0163273 +2025-03-11 17:22:01,714 Epoch 210/2000 +2025-03-11 17:22:26,477 Current Learning Rate: 0.0000061558 +2025-03-11 17:22:26,481 Train Loss: 0.0168002, Val Loss: 0.0163271 +2025-03-11 17:22:26,481 Epoch 211/2000 +2025-03-11 17:22:51,596 Current Learning Rate: 0.0000074453 +2025-03-11 17:22:51,602 Train Loss: 0.0167999, Val Loss: 0.0163267 +2025-03-11 17:22:51,602 Epoch 212/2000 +2025-03-11 17:23:16,782 Current Learning Rate: 0.0000088564 +2025-03-11 17:23:16,788 Train Loss: 0.0167996, Val Loss: 0.0163264 +2025-03-11 17:23:16,788 Epoch 213/2000 +2025-03-11 17:23:41,675 Current Learning Rate: 0.0000103886 +2025-03-11 17:23:41,679 Train Loss: 0.0167991, Val Loss: 0.0163258 +2025-03-11 17:23:41,679 Epoch 214/2000 +2025-03-11 17:24:06,645 Current Learning Rate: 0.0000120416 +2025-03-11 17:24:06,648 Train Loss: 0.0167986, Val Loss: 0.0163252 +2025-03-11 17:24:06,648 Epoch 215/2000 +2025-03-11 17:24:32,251 Current Learning Rate: 0.0000138150 +2025-03-11 17:24:32,255 Train Loss: 0.0167981, Val Loss: 0.0163246 +2025-03-11 17:24:32,255 Epoch 216/2000 +2025-03-11 17:24:58,279 Current Learning Rate: 0.0000157084 +2025-03-11 17:24:58,282 Train Loss: 0.0167974, Val Loss: 0.0163240 +2025-03-11 17:24:58,283 Epoch 217/2000 +2025-03-11 17:25:23,609 Current Learning Rate: 0.0000177213 +2025-03-11 17:25:23,614 Train Loss: 0.0167966, Val Loss: 0.0163232 +2025-03-11 17:25:23,615 Epoch 218/2000 +2025-03-11 17:25:48,080 Current Learning Rate: 0.0000198532 +2025-03-11 17:25:48,083 Train Loss: 0.0167958, Val Loss: 0.0163224 +2025-03-11 17:25:48,084 Epoch 219/2000 +2025-03-11 17:26:13,329 Current Learning Rate: 0.0000221035 +2025-03-11 17:26:13,333 Train Loss: 0.0167948, Val Loss: 0.0163212 +2025-03-11 17:26:13,333 Epoch 220/2000 +2025-03-11 17:26:38,494 Current Learning Rate: 0.0000244717 +2025-03-11 17:26:38,505 Train Loss: 0.0167937, Val Loss: 0.0163198 +2025-03-11 17:26:38,505 Epoch 221/2000 +2025-03-11 17:27:03,525 Current Learning Rate: 0.0000269573 +2025-03-11 17:27:03,530 Train Loss: 0.0167926, Val Loss: 0.0163185 +2025-03-11 17:27:03,531 Epoch 222/2000 +2025-03-11 17:27:28,764 Current Learning Rate: 0.0000295596 +2025-03-11 17:27:28,769 Train Loss: 0.0167913, Val Loss: 0.0163176 +2025-03-11 17:27:28,769 Epoch 223/2000 +2025-03-11 17:27:53,964 Current Learning Rate: 0.0000322780 +2025-03-11 17:27:53,968 Train Loss: 0.0167899, Val Loss: 0.0163164 +2025-03-11 17:27:53,968 Epoch 224/2000 +2025-03-11 17:28:18,979 Current Learning Rate: 0.0000351118 +2025-03-11 17:28:18,984 Train Loss: 0.0167885, Val Loss: 0.0163150 +2025-03-11 17:28:18,984 Epoch 225/2000 +2025-03-11 17:28:44,187 Current Learning Rate: 0.0000380602 +2025-03-11 17:28:44,191 Train Loss: 0.0167870, Val Loss: 0.0163132 +2025-03-11 17:28:44,191 Epoch 226/2000 +2025-03-11 17:29:08,887 Current Learning Rate: 0.0000411227 +2025-03-11 17:29:08,891 Train Loss: 0.0167854, Val Loss: 0.0163114 +2025-03-11 17:29:08,892 Epoch 227/2000 +2025-03-11 17:29:33,527 Current Learning Rate: 0.0000442984 +2025-03-11 17:29:33,530 Train Loss: 0.0167838, Val Loss: 0.0163099 +2025-03-11 17:29:33,530 Epoch 228/2000 +2025-03-11 17:29:58,569 Current Learning Rate: 0.0000475865 +2025-03-11 17:29:58,573 Train Loss: 0.0167821, Val Loss: 0.0163085 +2025-03-11 17:29:58,573 Epoch 229/2000 +2025-03-11 17:30:23,940 Current Learning Rate: 0.0000509862 +2025-03-11 17:30:23,944 Train Loss: 0.0167802, Val Loss: 0.0163066 +2025-03-11 17:30:23,944 Epoch 230/2000 +2025-03-11 17:30:49,132 Current Learning Rate: 0.0000544967 +2025-03-11 17:30:49,136 Train Loss: 0.0167783, Val Loss: 0.0163043 +2025-03-11 17:30:49,136 Epoch 231/2000 +2025-03-11 17:31:14,367 Current Learning Rate: 0.0000581172 +2025-03-11 17:31:14,370 Train Loss: 0.0167762, Val Loss: 0.0163026 +2025-03-11 17:31:14,373 Epoch 232/2000 +2025-03-11 17:31:39,508 Current Learning Rate: 0.0000618467 +2025-03-11 17:31:39,512 Train Loss: 0.0167743, Val Loss: 0.0163008 +2025-03-11 17:31:39,512 Epoch 233/2000 +2025-03-11 17:32:04,756 Current Learning Rate: 0.0000656842 +2025-03-11 17:32:04,762 Train Loss: 0.0167723, Val Loss: 0.0162987 +2025-03-11 17:32:04,762 Epoch 234/2000 +2025-03-11 17:32:30,078 Current Learning Rate: 0.0000696290 +2025-03-11 17:32:30,082 Train Loss: 0.0167702, Val Loss: 0.0162967 +2025-03-11 17:32:30,082 Epoch 235/2000 +2025-03-11 17:32:55,748 Current Learning Rate: 0.0000736799 +2025-03-11 17:32:55,753 Train Loss: 0.0167679, Val Loss: 0.0162946 +2025-03-11 17:32:55,754 Epoch 236/2000 +2025-03-11 17:33:21,226 Current Learning Rate: 0.0000778360 +2025-03-11 17:33:21,230 Train Loss: 0.0167655, Val Loss: 0.0162921 +2025-03-11 17:33:21,230 Epoch 237/2000 +2025-03-11 17:33:46,846 Current Learning Rate: 0.0000820963 +2025-03-11 17:33:46,850 Train Loss: 0.0167629, Val Loss: 0.0162891 +2025-03-11 17:33:46,850 Epoch 238/2000 +2025-03-11 17:34:11,953 Current Learning Rate: 0.0000864597 +2025-03-11 17:34:11,957 Train Loss: 0.0167603, Val Loss: 0.0162868 +2025-03-11 17:34:11,957 Epoch 239/2000 +2025-03-11 17:34:37,611 Current Learning Rate: 0.0000909251 +2025-03-11 17:34:37,614 Train Loss: 0.0167579, Val Loss: 0.0162843 +2025-03-11 17:34:37,615 Epoch 240/2000 +2025-03-11 17:35:03,198 Current Learning Rate: 0.0000954915 +2025-03-11 17:35:03,203 Train Loss: 0.0167552, Val Loss: 0.0162819 +2025-03-11 17:35:03,204 Epoch 241/2000 +2025-03-11 17:35:28,406 Current Learning Rate: 0.0001001577 +2025-03-11 17:35:28,410 Train Loss: 0.0167530, Val Loss: 0.0162791 +2025-03-11 17:35:28,410 Epoch 242/2000 +2025-03-11 17:35:54,210 Current Learning Rate: 0.0001049225 +2025-03-11 17:35:54,215 Train Loss: 0.0167504, Val Loss: 0.0162765 +2025-03-11 17:35:54,215 Epoch 243/2000 +2025-03-11 17:36:20,472 Current Learning Rate: 0.0001097848 +2025-03-11 17:36:20,476 Train Loss: 0.0167478, Val Loss: 0.0162739 +2025-03-11 17:36:20,476 Epoch 244/2000 +2025-03-11 17:36:46,265 Current Learning Rate: 0.0001147434 +2025-03-11 17:36:46,269 Train Loss: 0.0167451, Val Loss: 0.0162714 +2025-03-11 17:36:46,270 Epoch 245/2000 +2025-03-11 17:37:12,218 Current Learning Rate: 0.0001197970 +2025-03-11 17:37:12,222 Train Loss: 0.0167424, Val Loss: 0.0162688 +2025-03-11 17:37:12,222 Epoch 246/2000 +2025-03-11 17:37:38,273 Current Learning Rate: 0.0001249445 +2025-03-11 17:37:38,276 Train Loss: 0.0167397, Val Loss: 0.0162664 +2025-03-11 17:37:38,277 Epoch 247/2000 +2025-03-11 17:38:03,831 Current Learning Rate: 0.0001301845 +2025-03-11 17:38:03,835 Train Loss: 0.0167369, Val Loss: 0.0162638 +2025-03-11 17:38:03,835 Epoch 248/2000 +2025-03-11 17:38:29,345 Current Learning Rate: 0.0001355157 +2025-03-11 17:38:29,348 Train Loss: 0.0167341, Val Loss: 0.0162614 +2025-03-11 17:38:29,348 Epoch 249/2000 +2025-03-11 17:38:54,524 Current Learning Rate: 0.0001409369 +2025-03-11 17:38:54,528 Train Loss: 0.0167314, Val Loss: 0.0162589 +2025-03-11 17:38:54,528 Epoch 250/2000 +2025-03-11 17:39:19,298 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 17:39:19,651 Current Learning Rate: 0.0001464466 +2025-03-11 17:39:19,655 Train Loss: 0.0167285, Val Loss: 0.0162563 +2025-03-11 17:39:19,655 Epoch 251/2000 +2025-03-11 17:39:44,709 Current Learning Rate: 0.0001520436 +2025-03-11 17:39:44,713 Train Loss: 0.0167255, Val Loss: 0.0162538 +2025-03-11 17:39:44,713 Epoch 252/2000 +2025-03-11 17:40:10,345 Current Learning Rate: 0.0001577264 +2025-03-11 17:40:10,346 Train Loss: 0.0167223, Val Loss: 0.0162559 +2025-03-11 17:40:10,346 Epoch 253/2000 +2025-03-11 17:40:15,695 Loading best model from checkpoint. +2025-03-11 17:40:39,612 Current Learning Rate: 0.0001634937 +2025-03-11 17:40:39,653 Train Loss: 0.0167211, Val Loss: 0.0162477 +2025-03-11 17:40:39,653 Epoch 254/2000 +2025-03-11 17:40:54,063 Testing completed and best model saved. +2025-03-11 17:41:05,003 Current Learning Rate: 0.0001693441 +2025-03-11 17:41:05,004 Train Loss: 0.0167161, Val Loss: 0.0162500 +2025-03-11 17:41:05,004 Epoch 255/2000 +2025-03-11 17:41:30,807 Current Learning Rate: 0.0001752760 +2025-03-11 17:41:30,813 Train Loss: 0.0167153, Val Loss: 0.0162430 +2025-03-11 17:41:30,814 Epoch 256/2000 +2025-03-11 17:41:55,800 Current Learning Rate: 0.0001812880 +2025-03-11 17:41:55,800 Train Loss: 0.0167102, Val Loss: 0.0162457 +2025-03-11 17:41:55,801 Epoch 257/2000 +2025-03-11 17:42:20,796 Current Learning Rate: 0.0001873787 +2025-03-11 17:42:20,800 Train Loss: 0.0167108, Val Loss: 0.0162368 +2025-03-11 17:42:20,801 Epoch 258/2000 +2025-03-11 17:42:46,332 Current Learning Rate: 0.0001935465 +2025-03-11 17:42:46,332 Train Loss: 0.0167042, Val Loss: 0.0162391 +2025-03-11 17:42:46,332 Epoch 259/2000 +2025-03-11 17:43:11,864 Current Learning Rate: 0.0001997899 +2025-03-11 17:43:11,865 Train Loss: 0.0167022, Val Loss: 0.0162389 +2025-03-11 17:43:11,865 Epoch 260/2000 +2025-03-11 17:43:36,475 Current Learning Rate: 0.0002061074 +2025-03-11 17:43:36,479 Train Loss: 0.0167025, Val Loss: 0.0162291 +2025-03-11 17:43:36,479 Epoch 261/2000 +2025-03-11 17:44:01,547 Current Learning Rate: 0.0002124974 +2025-03-11 17:44:01,548 Train Loss: 0.0166951, Val Loss: 0.0162310 +2025-03-11 17:44:01,548 Epoch 262/2000 +2025-03-11 17:44:26,394 Current Learning Rate: 0.0002189583 +2025-03-11 17:44:26,508 Train Loss: 0.0166940, Val Loss: 0.0162280 +2025-03-11 17:44:26,509 Epoch 263/2000 +2025-03-11 17:44:51,087 Current Learning Rate: 0.0002254886 +2025-03-11 17:44:51,091 Train Loss: 0.0166912, Val Loss: 0.0162234 +2025-03-11 17:44:51,091 Epoch 264/2000 +2025-03-11 17:45:15,543 Current Learning Rate: 0.0002320866 +2025-03-11 17:45:15,546 Train Loss: 0.0166875, Val Loss: 0.0162202 +2025-03-11 17:45:15,546 Epoch 265/2000 +2025-03-11 17:45:40,631 Current Learning Rate: 0.0002387507 +2025-03-11 17:45:40,635 Train Loss: 0.0166865, Val Loss: 0.0162106 +2025-03-11 17:45:40,635 Epoch 266/2000 +2025-03-11 17:46:05,578 Current Learning Rate: 0.0002454793 +2025-03-11 17:46:05,578 Train Loss: 0.0166794, Val Loss: 0.0162214 +2025-03-11 17:46:05,578 Epoch 267/2000 +2025-03-11 17:46:30,374 Current Learning Rate: 0.0002522707 +2025-03-11 17:46:30,384 Train Loss: 0.0166806, Val Loss: 0.0162031 +2025-03-11 17:46:30,385 Epoch 268/2000 +2025-03-11 17:46:55,322 Current Learning Rate: 0.0002591232 +2025-03-11 17:46:55,323 Train Loss: 0.0166728, Val Loss: 0.0162175 +2025-03-11 17:46:55,323 Epoch 269/2000 +2025-03-11 17:47:19,791 Current Learning Rate: 0.0002660351 +2025-03-11 17:47:19,795 Train Loss: 0.0166749, Val Loss: 0.0161952 +2025-03-11 17:47:19,795 Epoch 270/2000 +2025-03-11 17:47:44,924 Current Learning Rate: 0.0002730048 +2025-03-11 17:47:44,925 Train Loss: 0.0166656, Val Loss: 0.0162073 +2025-03-11 17:47:44,925 Epoch 271/2000 +2025-03-11 17:48:09,768 Current Learning Rate: 0.0002800304 +2025-03-11 17:48:09,773 Train Loss: 0.0166678, Val Loss: 0.0161924 +2025-03-11 17:48:09,773 Epoch 272/2000 +2025-03-11 17:48:34,300 Current Learning Rate: 0.0002871104 +2025-03-11 17:48:34,300 Train Loss: 0.0166596, Val Loss: 0.0161993 +2025-03-11 17:48:34,301 Epoch 273/2000 +2025-03-11 17:48:59,371 Current Learning Rate: 0.0002942428 +2025-03-11 17:48:59,374 Train Loss: 0.0166597, Val Loss: 0.0161854 +2025-03-11 17:48:59,375 Epoch 274/2000 +2025-03-11 17:49:23,630 Current Learning Rate: 0.0003014261 +2025-03-11 17:49:23,633 Train Loss: 0.0166544, Val Loss: 0.0161807 +2025-03-11 17:49:23,633 Epoch 275/2000 +2025-03-11 17:49:47,958 Current Learning Rate: 0.0003086583 +2025-03-11 17:49:47,962 Train Loss: 0.0166548, Val Loss: 0.0161780 +2025-03-11 17:49:47,962 Epoch 276/2000 +2025-03-11 17:50:12,620 Current Learning Rate: 0.0003159377 +2025-03-11 17:50:12,620 Train Loss: 0.0166466, Val Loss: 0.0161873 +2025-03-11 17:50:12,620 Epoch 277/2000 +2025-03-11 17:50:37,633 Current Learning Rate: 0.0003232626 +2025-03-11 17:50:37,637 Train Loss: 0.0166464, Val Loss: 0.0161739 +2025-03-11 17:50:37,638 Epoch 278/2000 +2025-03-11 17:51:02,202 Current Learning Rate: 0.0003306310 +2025-03-11 17:51:02,206 Train Loss: 0.0166419, Val Loss: 0.0161699 +2025-03-11 17:51:02,206 Epoch 279/2000 +2025-03-11 17:51:27,213 Current Learning Rate: 0.0003380413 +2025-03-11 17:51:27,219 Train Loss: 0.0166401, Val Loss: 0.0161697 +2025-03-11 17:51:27,219 Epoch 280/2000 +2025-03-11 17:51:51,932 Current Learning Rate: 0.0003454915 +2025-03-11 17:51:51,935 Train Loss: 0.0166346, Val Loss: 0.0161674 +2025-03-11 17:51:51,936 Epoch 281/2000 +2025-03-11 17:52:17,539 Current Learning Rate: 0.0003529798 +2025-03-11 17:52:17,543 Train Loss: 0.0166324, Val Loss: 0.0161614 +2025-03-11 17:52:17,543 Epoch 282/2000 +2025-03-11 17:52:42,642 Current Learning Rate: 0.0003605044 +2025-03-11 17:52:42,646 Train Loss: 0.0166295, Val Loss: 0.0161612 +2025-03-11 17:52:42,646 Epoch 283/2000 +2025-03-11 17:53:08,078 Current Learning Rate: 0.0003680635 +2025-03-11 17:53:08,081 Train Loss: 0.0166254, Val Loss: 0.0161583 +2025-03-11 17:53:08,082 Epoch 284/2000 +2025-03-11 17:53:33,430 Current Learning Rate: 0.0003756551 +2025-03-11 17:53:33,435 Train Loss: 0.0166228, Val Loss: 0.0161570 +2025-03-11 17:53:33,435 Epoch 285/2000 +2025-03-11 17:53:58,503 Current Learning Rate: 0.0003832773 +2025-03-11 17:53:58,505 Train Loss: 0.0166185, Val Loss: 0.0161510 +2025-03-11 17:53:58,506 Epoch 286/2000 +2025-03-11 17:54:22,940 Current Learning Rate: 0.0003909284 +2025-03-11 17:54:22,945 Train Loss: 0.0166158, Val Loss: 0.0161487 +2025-03-11 17:54:22,945 Epoch 287/2000 +2025-03-11 17:54:48,223 Current Learning Rate: 0.0003986064 +2025-03-11 17:54:48,228 Train Loss: 0.0166118, Val Loss: 0.0161434 +2025-03-11 17:54:48,228 Epoch 288/2000 +2025-03-11 17:55:12,966 Current Learning Rate: 0.0004063093 +2025-03-11 17:55:12,970 Train Loss: 0.0166088, Val Loss: 0.0161385 +2025-03-11 17:55:12,971 Epoch 289/2000 +2025-03-11 17:55:38,097 Current Learning Rate: 0.0004140354 +2025-03-11 17:55:38,100 Train Loss: 0.0166055, Val Loss: 0.0161333 +2025-03-11 17:55:38,101 Epoch 290/2000 +2025-03-11 17:56:02,596 Current Learning Rate: 0.0004217828 +2025-03-11 17:56:02,824 Train Loss: 0.0166025, Val Loss: 0.0161281 +2025-03-11 17:56:02,824 Epoch 291/2000 +2025-03-11 17:56:27,395 Current Learning Rate: 0.0004295494 +2025-03-11 17:56:27,399 Train Loss: 0.0165993, Val Loss: 0.0161231 +2025-03-11 17:56:27,399 Epoch 292/2000 +2025-03-11 17:56:52,723 Current Learning Rate: 0.0004373334 +2025-03-11 17:56:52,724 Train Loss: 0.0165958, Val Loss: 0.0161486 +2025-03-11 17:56:52,724 Epoch 293/2000 +2025-03-11 17:57:17,996 Current Learning Rate: 0.0004451328 +2025-03-11 17:57:17,996 Train Loss: 0.0165927, Val Loss: 0.0161253 +2025-03-11 17:57:17,996 Epoch 294/2000 +2025-03-11 17:57:43,335 Current Learning Rate: 0.0004529458 +2025-03-11 17:57:43,336 Train Loss: 0.0165866, Val Loss: 0.0161270 +2025-03-11 17:57:43,336 Epoch 295/2000 +2025-03-11 17:58:08,521 Current Learning Rate: 0.0004607705 +2025-03-11 17:58:08,524 Train Loss: 0.0165982, Val Loss: 0.0161028 +2025-03-11 17:58:08,524 Epoch 296/2000 +2025-03-11 17:58:33,437 Current Learning Rate: 0.0004686047 +2025-03-11 17:58:33,437 Train Loss: 0.0165719, Val Loss: 0.0161042 +2025-03-11 17:58:33,438 Epoch 297/2000 +2025-03-11 17:58:58,619 Current Learning Rate: 0.0004764468 +2025-03-11 17:58:58,623 Train Loss: 0.0165968, Val Loss: 0.0160962 +2025-03-11 17:58:58,623 Epoch 298/2000 +2025-03-11 17:59:23,706 Current Learning Rate: 0.0004842946 +2025-03-11 17:59:23,707 Train Loss: 0.0165639, Val Loss: 0.0161174 +2025-03-11 17:59:23,707 Epoch 299/2000 +2025-03-11 17:59:49,342 Current Learning Rate: 0.0004921463 +2025-03-11 17:59:49,343 Train Loss: 0.0165710, Val Loss: 0.0161042 +2025-03-11 17:59:49,343 Epoch 300/2000 +2025-03-11 18:00:15,279 Current Learning Rate: 0.0005000000 +2025-03-11 18:00:15,280 Train Loss: 0.0165688, Val Loss: 0.0161044 +2025-03-11 18:00:15,280 Epoch 301/2000 +2025-03-11 18:00:41,004 Current Learning Rate: 0.0005078537 +2025-03-11 18:00:41,008 Train Loss: 0.0165618, Val Loss: 0.0160808 +2025-03-11 18:00:41,008 Epoch 302/2000 +2025-03-11 18:01:05,669 Current Learning Rate: 0.0005157054 +2025-03-11 18:01:05,672 Train Loss: 0.0165902, Val Loss: 0.0160776 +2025-03-11 18:01:05,672 Epoch 303/2000 +2025-03-11 18:01:30,964 Current Learning Rate: 0.0005235532 +2025-03-11 18:01:30,965 Train Loss: 0.0165465, Val Loss: 0.0160800 +2025-03-11 18:01:30,965 Epoch 304/2000 +2025-03-11 18:01:56,426 Current Learning Rate: 0.0005313953 +2025-03-11 18:01:56,426 Train Loss: 0.0165523, Val Loss: 0.0160924 +2025-03-11 18:01:56,426 Epoch 305/2000 +2025-03-11 18:02:21,341 Current Learning Rate: 0.0005392295 +2025-03-11 18:02:21,342 Train Loss: 0.0165597, Val Loss: 0.0160778 +2025-03-11 18:02:21,342 Epoch 306/2000 +2025-03-11 18:02:45,946 Current Learning Rate: 0.0005470542 +2025-03-11 18:02:45,955 Train Loss: 0.0165469, Val Loss: 0.0160715 +2025-03-11 18:02:45,957 Epoch 307/2000 +2025-03-11 18:03:10,454 Current Learning Rate: 0.0005548672 +2025-03-11 18:03:10,455 Train Loss: 0.0165493, Val Loss: 0.0160917 +2025-03-11 18:03:10,455 Epoch 308/2000 +2025-03-11 18:03:36,033 Current Learning Rate: 0.0005626666 +2025-03-11 18:03:36,034 Train Loss: 0.0165409, Val Loss: 0.0160946 +2025-03-11 18:03:36,034 Epoch 309/2000 +2025-03-11 18:04:00,622 Current Learning Rate: 0.0005704506 +2025-03-11 18:04:00,646 Train Loss: 0.0165581, Val Loss: 0.0160552 +2025-03-11 18:04:00,646 Epoch 310/2000 +2025-03-11 18:04:25,636 Current Learning Rate: 0.0005782172 +2025-03-11 18:04:25,636 Train Loss: 0.0165307, Val Loss: 0.0161227 +2025-03-11 18:04:25,636 Epoch 311/2000 +2025-03-11 18:04:50,261 Current Learning Rate: 0.0005859646 +2025-03-11 18:04:50,261 Train Loss: 0.0165367, Val Loss: 0.0160930 +2025-03-11 18:04:50,262 Epoch 312/2000 +2025-03-11 18:05:15,133 Current Learning Rate: 0.0005936907 +2025-03-11 18:05:15,134 Train Loss: 0.0165336, Val Loss: 0.0161055 +2025-03-11 18:05:15,134 Epoch 313/2000 +2025-03-11 18:05:40,114 Current Learning Rate: 0.0006013936 +2025-03-11 18:05:40,140 Train Loss: 0.0165169, Val Loss: 0.0160398 +2025-03-11 18:05:40,141 Epoch 314/2000 +2025-03-11 18:06:04,538 Current Learning Rate: 0.0006090716 +2025-03-11 18:06:04,543 Train Loss: 0.0165587, Val Loss: 0.0160358 +2025-03-11 18:06:04,543 Epoch 315/2000 +2025-03-11 18:06:29,263 Current Learning Rate: 0.0006167227 +2025-03-11 18:06:29,264 Train Loss: 0.0165045, Val Loss: 0.0160488 +2025-03-11 18:06:29,264 Epoch 316/2000 +2025-03-11 18:06:54,168 Current Learning Rate: 0.0006243449 +2025-03-11 18:06:54,168 Train Loss: 0.0165138, Val Loss: 0.0160478 +2025-03-11 18:06:54,168 Epoch 317/2000 +2025-03-11 18:07:18,648 Current Learning Rate: 0.0006319365 +2025-03-11 18:07:18,652 Train Loss: 0.0165359, Val Loss: 0.0160241 +2025-03-11 18:07:18,652 Epoch 318/2000 +2025-03-11 18:07:43,844 Current Learning Rate: 0.0006394956 +2025-03-11 18:07:43,844 Train Loss: 0.0165031, Val Loss: 0.0160583 +2025-03-11 18:07:43,844 Epoch 319/2000 +2025-03-11 18:08:09,165 Current Learning Rate: 0.0006470202 +2025-03-11 18:08:09,166 Train Loss: 0.0165129, Val Loss: 0.0160302 +2025-03-11 18:08:09,166 Epoch 320/2000 +2025-03-11 18:08:34,265 Current Learning Rate: 0.0006545085 +2025-03-11 18:08:34,268 Train Loss: 0.0164990, Val Loss: 0.0160155 +2025-03-11 18:08:34,268 Epoch 321/2000 +2025-03-11 18:08:58,757 Current Learning Rate: 0.0006619587 +2025-03-11 18:08:58,760 Train Loss: 0.0165252, Val Loss: 0.0160114 +2025-03-11 18:08:58,760 Epoch 322/2000 +2025-03-11 18:09:23,767 Current Learning Rate: 0.0006693690 +2025-03-11 18:09:23,771 Train Loss: 0.0164890, Val Loss: 0.0160081 +2025-03-11 18:09:23,771 Epoch 323/2000 +2025-03-11 18:09:48,827 Current Learning Rate: 0.0006767374 +2025-03-11 18:09:48,827 Train Loss: 0.0164923, Val Loss: 0.0161069 +2025-03-11 18:09:48,828 Epoch 324/2000 +2025-03-11 18:10:14,136 Current Learning Rate: 0.0006840623 +2025-03-11 18:10:14,137 Train Loss: 0.0165031, Val Loss: 0.0160089 +2025-03-11 18:10:14,137 Epoch 325/2000 +2025-03-11 18:10:39,059 Current Learning Rate: 0.0006913417 +2025-03-11 18:10:39,062 Train Loss: 0.0164923, Val Loss: 0.0160034 +2025-03-11 18:10:39,063 Epoch 326/2000 +2025-03-11 18:11:04,064 Current Learning Rate: 0.0006985739 +2025-03-11 18:11:04,064 Train Loss: 0.0164845, Val Loss: 0.0160725 +2025-03-11 18:11:04,064 Epoch 327/2000 +2025-03-11 18:11:28,755 Current Learning Rate: 0.0007057572 +2025-03-11 18:11:28,756 Train Loss: 0.0164743, Val Loss: 0.0160432 +2025-03-11 18:11:28,756 Epoch 328/2000 +2025-03-11 18:11:53,676 Current Learning Rate: 0.0007128896 +2025-03-11 18:11:53,687 Train Loss: 0.0165001, Val Loss: 0.0159928 +2025-03-11 18:11:53,687 Epoch 329/2000 +2025-03-11 18:12:18,398 Current Learning Rate: 0.0007199696 +2025-03-11 18:12:18,399 Train Loss: 0.0164732, Val Loss: 0.0161354 +2025-03-11 18:12:18,399 Epoch 330/2000 +2025-03-11 18:12:43,023 Current Learning Rate: 0.0007269952 +2025-03-11 18:12:43,026 Train Loss: 0.0164799, Val Loss: 0.0159852 +2025-03-11 18:12:43,026 Epoch 331/2000 +2025-03-11 18:13:08,514 Current Learning Rate: 0.0007339649 +2025-03-11 18:13:08,514 Train Loss: 0.0164732, Val Loss: 0.0160306 +2025-03-11 18:13:08,514 Epoch 332/2000 +2025-03-11 18:13:33,380 Current Learning Rate: 0.0007408768 +2025-03-11 18:13:33,380 Train Loss: 0.0164584, Val Loss: 0.0159941 +2025-03-11 18:13:33,380 Epoch 333/2000 +2025-03-11 18:13:58,162 Current Learning Rate: 0.0007477293 +2025-03-11 18:13:58,166 Train Loss: 0.0165392, Val Loss: 0.0159706 +2025-03-11 18:13:58,166 Epoch 334/2000 +2025-03-11 18:14:23,037 Current Learning Rate: 0.0007545207 +2025-03-11 18:14:23,038 Train Loss: 0.0164411, Val Loss: 0.0159740 +2025-03-11 18:14:23,038 Epoch 335/2000 +2025-03-11 18:14:47,839 Current Learning Rate: 0.0007612493 +2025-03-11 18:14:47,840 Train Loss: 0.0164542, Val Loss: 0.0159742 +2025-03-11 18:14:47,840 Epoch 336/2000 +2025-03-11 18:15:12,718 Current Learning Rate: 0.0007679134 +2025-03-11 18:15:12,719 Train Loss: 0.0164490, Val Loss: 0.0161860 +2025-03-11 18:15:12,719 Epoch 337/2000 +2025-03-11 18:15:38,342 Current Learning Rate: 0.0007745114 +2025-03-11 18:15:38,342 Train Loss: 0.0164584, Val Loss: 0.0159709 +2025-03-11 18:15:38,343 Epoch 338/2000 +2025-03-11 18:16:03,347 Current Learning Rate: 0.0007810417 +2025-03-11 18:16:03,347 Train Loss: 0.0164501, Val Loss: 0.0159722 +2025-03-11 18:16:03,348 Epoch 339/2000 +2025-03-11 18:16:28,168 Current Learning Rate: 0.0007875026 +2025-03-11 18:16:28,169 Train Loss: 0.0164609, Val Loss: 0.0159731 +2025-03-11 18:16:28,169 Epoch 340/2000 +2025-03-11 18:16:53,550 Current Learning Rate: 0.0007938926 +2025-03-11 18:16:53,553 Train Loss: 0.0164511, Val Loss: 0.0159539 +2025-03-11 18:16:53,554 Epoch 341/2000 +2025-03-11 18:17:18,994 Current Learning Rate: 0.0008002101 +2025-03-11 18:17:18,994 Train Loss: 0.0164316, Val Loss: 0.0159898 +2025-03-11 18:17:18,995 Epoch 342/2000 +2025-03-11 18:17:43,900 Current Learning Rate: 0.0008064535 +2025-03-11 18:17:43,901 Train Loss: 0.0164539, Val Loss: 0.0159608 +2025-03-11 18:17:43,901 Epoch 343/2000 +2025-03-11 18:18:08,653 Current Learning Rate: 0.0008126213 +2025-03-11 18:18:08,654 Train Loss: 0.0164450, Val Loss: 0.0159930 +2025-03-11 18:18:08,654 Epoch 344/2000 +2025-03-11 18:18:33,122 Current Learning Rate: 0.0008187120 +2025-03-11 18:18:33,125 Train Loss: 0.0164315, Val Loss: 0.0159420 +2025-03-11 18:18:33,126 Epoch 345/2000 +2025-03-11 18:18:59,162 Current Learning Rate: 0.0008247240 +2025-03-11 18:18:59,162 Train Loss: 0.0164339, Val Loss: 0.0159643 +2025-03-11 18:18:59,162 Epoch 346/2000 +2025-03-11 18:19:23,757 Current Learning Rate: 0.0008306559 +2025-03-11 18:19:23,757 Train Loss: 0.0164232, Val Loss: 0.0160096 +2025-03-11 18:19:23,757 Epoch 347/2000 +2025-03-11 18:19:48,450 Current Learning Rate: 0.0008365063 +2025-03-11 18:19:48,451 Train Loss: 0.0164309, Val Loss: 0.0159457 +2025-03-11 18:19:48,451 Epoch 348/2000 +2025-03-11 18:20:13,473 Current Learning Rate: 0.0008422736 +2025-03-11 18:20:13,474 Train Loss: 0.0164198, Val Loss: 0.0159923 +2025-03-11 18:20:13,474 Epoch 349/2000 +2025-03-11 18:20:38,009 Current Learning Rate: 0.0008479564 +2025-03-11 18:20:38,009 Train Loss: 0.0164148, Val Loss: 0.0160768 +2025-03-11 18:20:38,009 Epoch 350/2000 +2025-03-11 18:21:03,060 Current Learning Rate: 0.0008535534 +2025-03-11 18:21:03,061 Train Loss: 0.0164145, Val Loss: 0.0162290 +2025-03-11 18:21:03,061 Epoch 351/2000 +2025-03-11 18:21:27,966 Current Learning Rate: 0.0008590631 +2025-03-11 18:21:27,971 Train Loss: 0.0164454, Val Loss: 0.0159143 +2025-03-11 18:21:27,971 Epoch 352/2000 +2025-03-11 18:21:52,671 Current Learning Rate: 0.0008644843 +2025-03-11 18:21:52,671 Train Loss: 0.0164058, Val Loss: 0.0159189 +2025-03-11 18:21:52,671 Epoch 353/2000 +2025-03-11 18:22:17,794 Current Learning Rate: 0.0008698155 +2025-03-11 18:22:17,797 Train Loss: 0.0164087, Val Loss: 0.0159129 +2025-03-11 18:22:17,797 Epoch 354/2000 +2025-03-11 18:22:42,461 Current Learning Rate: 0.0008750555 +2025-03-11 18:22:42,462 Train Loss: 0.0164069, Val Loss: 0.0159698 +2025-03-11 18:22:42,462 Epoch 355/2000 +2025-03-11 18:23:07,757 Current Learning Rate: 0.0008802030 +2025-03-11 18:23:07,758 Train Loss: 0.0164314, Val Loss: 0.0160631 +2025-03-11 18:23:07,758 Epoch 356/2000 +2025-03-11 18:23:32,184 Current Learning Rate: 0.0008852566 +2025-03-11 18:23:32,184 Train Loss: 0.0163853, Val Loss: 0.0159352 +2025-03-11 18:23:32,184 Epoch 357/2000 +2025-03-11 18:23:57,133 Current Learning Rate: 0.0008902152 +2025-03-11 18:23:57,137 Train Loss: 0.0164153, Val Loss: 0.0159048 +2025-03-11 18:23:57,137 Epoch 358/2000 +2025-03-11 18:24:21,399 Current Learning Rate: 0.0008950775 +2025-03-11 18:24:21,402 Train Loss: 0.0163902, Val Loss: 0.0158905 +2025-03-11 18:24:21,402 Epoch 359/2000 +2025-03-11 18:24:46,555 Current Learning Rate: 0.0008998423 +2025-03-11 18:24:46,555 Train Loss: 0.0163890, Val Loss: 0.0159541 +2025-03-11 18:24:46,556 Epoch 360/2000 +2025-03-11 18:25:11,414 Current Learning Rate: 0.0009045085 +2025-03-11 18:25:11,416 Train Loss: 0.0163858, Val Loss: 0.0158996 +2025-03-11 18:25:11,416 Epoch 361/2000 +2025-03-11 18:25:36,388 Current Learning Rate: 0.0009090749 +2025-03-11 18:25:36,391 Train Loss: 0.0163911, Val Loss: 0.0158894 +2025-03-11 18:25:36,392 Epoch 362/2000 +2025-03-11 18:26:01,058 Current Learning Rate: 0.0009135403 +2025-03-11 18:26:01,061 Train Loss: 0.0163764, Val Loss: 0.0158884 +2025-03-11 18:26:01,061 Epoch 363/2000 +2025-03-11 18:26:25,638 Current Learning Rate: 0.0009179037 +2025-03-11 18:26:25,638 Train Loss: 0.0163871, Val Loss: 0.0159989 +2025-03-11 18:26:25,638 Epoch 364/2000 +2025-03-11 18:26:49,926 Current Learning Rate: 0.0009221640 +2025-03-11 18:26:49,927 Train Loss: 0.0163948, Val Loss: 0.0158920 +2025-03-11 18:26:49,928 Epoch 365/2000 +2025-03-11 18:27:15,035 Current Learning Rate: 0.0009263201 +2025-03-11 18:27:15,040 Train Loss: 0.0164004, Val Loss: 0.0158777 +2025-03-11 18:27:15,040 Epoch 366/2000 +2025-03-11 18:27:39,763 Current Learning Rate: 0.0009303710 +2025-03-11 18:27:39,764 Train Loss: 0.0163439, Val Loss: 0.0159188 +2025-03-11 18:27:39,765 Epoch 367/2000 +2025-03-11 18:28:04,762 Current Learning Rate: 0.0009343158 +2025-03-11 18:28:04,762 Train Loss: 0.0163887, Val Loss: 0.0158817 +2025-03-11 18:28:04,763 Epoch 368/2000 +2025-03-11 18:28:29,990 Current Learning Rate: 0.0009381533 +2025-03-11 18:28:29,994 Train Loss: 0.0163866, Val Loss: 0.0158639 +2025-03-11 18:28:29,994 Epoch 369/2000 +2025-03-11 18:28:54,983 Current Learning Rate: 0.0009418828 +2025-03-11 18:28:54,983 Train Loss: 0.0163420, Val Loss: 0.0159946 +2025-03-11 18:28:54,984 Epoch 370/2000 +2025-03-11 18:29:20,154 Current Learning Rate: 0.0009455033 +2025-03-11 18:29:20,158 Train Loss: 0.0163690, Val Loss: 0.0158591 +2025-03-11 18:29:20,158 Epoch 371/2000 +2025-03-11 18:29:44,678 Current Learning Rate: 0.0009490138 +2025-03-11 18:29:44,678 Train Loss: 0.0163654, Val Loss: 0.0159637 +2025-03-11 18:29:44,678 Epoch 372/2000 +2025-03-11 18:30:09,612 Current Learning Rate: 0.0009524135 +2025-03-11 18:30:09,613 Train Loss: 0.0163662, Val Loss: 0.0159866 +2025-03-11 18:30:09,613 Epoch 373/2000 +2025-03-11 18:30:34,490 Current Learning Rate: 0.0009557016 +2025-03-11 18:30:34,493 Train Loss: 0.0163463, Val Loss: 0.0158443 +2025-03-11 18:30:34,493 Epoch 374/2000 +2025-03-11 18:30:59,864 Current Learning Rate: 0.0009588773 +2025-03-11 18:30:59,865 Train Loss: 0.0163740, Val Loss: 0.0160528 +2025-03-11 18:30:59,868 Epoch 375/2000 +2025-03-11 18:31:25,769 Current Learning Rate: 0.0009619398 +2025-03-11 18:31:25,772 Train Loss: 0.0163335, Val Loss: 0.0158434 +2025-03-11 18:31:25,772 Epoch 376/2000 +2025-03-11 18:31:50,502 Current Learning Rate: 0.0009648882 +2025-03-11 18:31:50,503 Train Loss: 0.0163429, Val Loss: 0.0161110 +2025-03-11 18:31:50,503 Epoch 377/2000 +2025-03-11 18:32:15,492 Current Learning Rate: 0.0009677220 +2025-03-11 18:32:15,493 Train Loss: 0.0163550, Val Loss: 0.0158914 +2025-03-11 18:32:15,493 Epoch 378/2000 +2025-03-11 18:32:41,473 Current Learning Rate: 0.0009704404 +2025-03-11 18:32:41,477 Train Loss: 0.0163271, Val Loss: 0.0158347 +2025-03-11 18:32:41,477 Epoch 379/2000 +2025-03-11 18:33:06,641 Current Learning Rate: 0.0009730427 +2025-03-11 18:33:06,641 Train Loss: 0.0163279, Val Loss: 0.0160404 +2025-03-11 18:33:06,642 Epoch 380/2000 +2025-03-11 18:33:32,215 Current Learning Rate: 0.0009755283 +2025-03-11 18:33:32,215 Train Loss: 0.0163615, Val Loss: 0.0158572 +2025-03-11 18:33:32,216 Epoch 381/2000 +2025-03-11 18:33:57,135 Current Learning Rate: 0.0009778965 +2025-03-11 18:33:57,135 Train Loss: 0.0163371, Val Loss: 0.0159233 +2025-03-11 18:33:57,135 Epoch 382/2000 +2025-03-11 18:34:22,721 Current Learning Rate: 0.0009801468 +2025-03-11 18:34:22,722 Train Loss: 0.0163395, Val Loss: 0.0159593 +2025-03-11 18:34:22,722 Epoch 383/2000 +2025-03-11 18:34:47,991 Current Learning Rate: 0.0009822787 +2025-03-11 18:34:47,991 Train Loss: 0.0163063, Val Loss: 0.0159257 +2025-03-11 18:34:47,992 Epoch 384/2000 +2025-03-11 18:35:12,983 Current Learning Rate: 0.0009842916 +2025-03-11 18:35:12,987 Train Loss: 0.0163369, Val Loss: 0.0158315 +2025-03-11 18:35:12,987 Epoch 385/2000 +2025-03-11 18:35:38,029 Current Learning Rate: 0.0009861850 +2025-03-11 18:35:38,032 Train Loss: 0.0163070, Val Loss: 0.0158069 +2025-03-11 18:35:38,033 Epoch 386/2000 +2025-03-11 18:36:02,889 Current Learning Rate: 0.0009879584 +2025-03-11 18:36:02,890 Train Loss: 0.0163219, Val Loss: 0.0158420 +2025-03-11 18:36:02,890 Epoch 387/2000 +2025-03-11 18:36:28,553 Current Learning Rate: 0.0009896114 +2025-03-11 18:36:28,554 Train Loss: 0.0163197, Val Loss: 0.0158221 +2025-03-11 18:36:28,554 Epoch 388/2000 +2025-03-11 18:36:53,493 Current Learning Rate: 0.0009911436 +2025-03-11 18:36:53,493 Train Loss: 0.0163154, Val Loss: 0.0158225 +2025-03-11 18:36:53,493 Epoch 389/2000 +2025-03-11 18:37:18,598 Current Learning Rate: 0.0009925547 +2025-03-11 18:37:18,602 Train Loss: 0.0163093, Val Loss: 0.0158056 +2025-03-11 18:37:18,602 Epoch 390/2000 +2025-03-11 18:37:43,731 Current Learning Rate: 0.0009938442 +2025-03-11 18:37:43,731 Train Loss: 0.0163186, Val Loss: 0.0158186 +2025-03-11 18:37:43,732 Epoch 391/2000 +2025-03-11 18:38:08,527 Current Learning Rate: 0.0009950118 +2025-03-11 18:38:08,528 Train Loss: 0.0163334, Val Loss: 0.0160222 +2025-03-11 18:38:08,528 Epoch 392/2000 +2025-03-11 18:38:33,746 Current Learning Rate: 0.0009960574 +2025-03-11 18:38:33,749 Train Loss: 0.0163071, Val Loss: 0.0157880 +2025-03-11 18:38:33,749 Epoch 393/2000 +2025-03-11 18:38:58,846 Current Learning Rate: 0.0009969805 +2025-03-11 18:38:58,847 Train Loss: 0.0162872, Val Loss: 0.0158164 +2025-03-11 18:38:58,847 Epoch 394/2000 +2025-03-11 18:39:23,866 Current Learning Rate: 0.0009977810 +2025-03-11 18:39:23,867 Train Loss: 0.0162851, Val Loss: 0.0158007 +2025-03-11 18:39:23,867 Epoch 395/2000 +2025-03-11 18:39:49,039 Current Learning Rate: 0.0009984587 +2025-03-11 18:39:49,040 Train Loss: 0.0162807, Val Loss: 0.0157944 +2025-03-11 18:39:49,040 Epoch 396/2000 +2025-03-11 18:40:14,638 Current Learning Rate: 0.0009990134 +2025-03-11 18:40:14,639 Train Loss: 0.0163130, Val Loss: 0.0158027 +2025-03-11 18:40:14,639 Epoch 397/2000 +2025-03-11 18:40:39,833 Current Learning Rate: 0.0009994449 +2025-03-11 18:40:39,834 Train Loss: 0.0162861, Val Loss: 0.0158254 +2025-03-11 18:40:39,834 Epoch 398/2000 +2025-03-11 18:41:05,184 Current Learning Rate: 0.0009997533 +2025-03-11 18:41:05,185 Train Loss: 0.0162802, Val Loss: 0.0158035 +2025-03-11 18:41:05,185 Epoch 399/2000 +2025-03-11 18:41:30,336 Current Learning Rate: 0.0009999383 +2025-03-11 18:41:30,341 Train Loss: 0.0162822, Val Loss: 0.0157855 +2025-03-11 18:41:30,341 Epoch 400/2000 +2025-03-11 18:41:55,386 Current Learning Rate: 0.0010000000 +2025-03-11 18:41:55,386 Train Loss: 0.0162946, Val Loss: 0.0158289 +2025-03-11 18:41:55,386 Epoch 401/2000 +2025-03-11 18:42:20,924 Current Learning Rate: 0.0009999383 +2025-03-11 18:42:20,927 Train Loss: 0.0162916, Val Loss: 0.0157722 +2025-03-11 18:42:20,928 Epoch 402/2000 +2025-03-11 18:42:45,779 Current Learning Rate: 0.0009997533 +2025-03-11 18:42:45,780 Train Loss: 0.0162464, Val Loss: 0.0158380 +2025-03-11 18:42:45,780 Epoch 403/2000 +2025-03-11 18:43:11,303 Current Learning Rate: 0.0009994449 +2025-03-11 18:43:11,307 Train Loss: 0.0162789, Val Loss: 0.0157697 +2025-03-11 18:43:11,307 Epoch 404/2000 +2025-03-11 18:43:36,101 Current Learning Rate: 0.0009990134 +2025-03-11 18:43:36,102 Train Loss: 0.0162928, Val Loss: 0.0159696 +2025-03-11 18:43:36,102 Epoch 405/2000 +2025-03-11 18:44:01,284 Current Learning Rate: 0.0009984587 +2025-03-11 18:44:01,284 Train Loss: 0.0162557, Val Loss: 0.0157850 +2025-03-11 18:44:01,284 Epoch 406/2000 +2025-03-11 18:44:26,034 Current Learning Rate: 0.0009977810 +2025-03-11 18:44:26,034 Train Loss: 0.0162886, Val Loss: 0.0158322 +2025-03-11 18:44:26,034 Epoch 407/2000 +2025-03-11 18:44:51,341 Current Learning Rate: 0.0009969805 +2025-03-11 18:44:51,342 Train Loss: 0.0162353, Val Loss: 0.0158127 +2025-03-11 18:44:51,342 Epoch 408/2000 +2025-03-11 18:45:16,982 Current Learning Rate: 0.0009960574 +2025-03-11 18:45:16,982 Train Loss: 0.0162646, Val Loss: 0.0158442 +2025-03-11 18:45:16,982 Epoch 409/2000 +2025-03-11 18:45:41,753 Current Learning Rate: 0.0009950118 +2025-03-11 18:45:41,754 Train Loss: 0.0162357, Val Loss: 0.0162816 +2025-03-11 18:45:41,754 Epoch 410/2000 +2025-03-11 18:46:07,579 Current Learning Rate: 0.0009938442 +2025-03-11 18:46:07,583 Train Loss: 0.0163068, Val Loss: 0.0157389 +2025-03-11 18:46:07,583 Epoch 411/2000 +2025-03-11 18:46:32,710 Current Learning Rate: 0.0009925547 +2025-03-11 18:46:32,710 Train Loss: 0.0162260, Val Loss: 0.0157653 +2025-03-11 18:46:32,710 Epoch 412/2000 +2025-03-11 18:46:58,190 Current Learning Rate: 0.0009911436 +2025-03-11 18:46:58,190 Train Loss: 0.0162600, Val Loss: 0.0157584 +2025-03-11 18:46:58,191 Epoch 413/2000 +2025-03-11 18:47:23,185 Current Learning Rate: 0.0009896114 +2025-03-11 18:47:23,185 Train Loss: 0.0162316, Val Loss: 0.0157647 +2025-03-11 18:47:23,185 Epoch 414/2000 +2025-03-11 18:47:48,747 Current Learning Rate: 0.0009879584 +2025-03-11 18:47:48,747 Train Loss: 0.0162369, Val Loss: 0.0158353 +2025-03-11 18:47:48,747 Epoch 415/2000 +2025-03-11 18:48:13,972 Current Learning Rate: 0.0009861850 +2025-03-11 18:48:13,973 Train Loss: 0.0162379, Val Loss: 0.0157507 +2025-03-11 18:48:13,973 Epoch 416/2000 +2025-03-11 18:48:38,490 Current Learning Rate: 0.0009842916 +2025-03-11 18:48:38,490 Train Loss: 0.0162391, Val Loss: 0.0158934 +2025-03-11 18:48:38,490 Epoch 417/2000 +2025-03-11 18:49:03,163 Current Learning Rate: 0.0009822787 +2025-03-11 18:49:03,164 Train Loss: 0.0162592, Val Loss: 0.0157975 +2025-03-11 18:49:03,164 Epoch 418/2000 +2025-03-11 18:49:27,367 Current Learning Rate: 0.0009801468 +2025-03-11 18:49:27,367 Train Loss: 0.0162174, Val Loss: 0.0158072 +2025-03-11 18:49:27,368 Epoch 419/2000 +2025-03-11 18:49:51,682 Current Learning Rate: 0.0009778965 +2025-03-11 18:49:51,682 Train Loss: 0.0162126, Val Loss: 0.0157722 +2025-03-11 18:49:51,682 Epoch 420/2000 +2025-03-11 18:50:16,421 Current Learning Rate: 0.0009755283 +2025-03-11 18:50:16,422 Train Loss: 0.0162190, Val Loss: 0.0157486 +2025-03-11 18:50:16,422 Epoch 421/2000 +2025-03-11 18:50:40,933 Current Learning Rate: 0.0009730427 +2025-03-11 18:50:40,934 Train Loss: 0.0162225, Val Loss: 0.0157411 +2025-03-11 18:50:40,934 Epoch 422/2000 +2025-03-11 18:51:05,467 Current Learning Rate: 0.0009704404 +2025-03-11 18:51:05,467 Train Loss: 0.0162129, Val Loss: 0.0157507 +2025-03-11 18:51:05,468 Epoch 423/2000 +2025-03-11 18:51:30,176 Current Learning Rate: 0.0009677220 +2025-03-11 18:51:30,177 Train Loss: 0.0162690, Val Loss: 0.0158038 +2025-03-11 18:51:30,178 Epoch 424/2000 +2025-03-11 18:51:54,968 Current Learning Rate: 0.0009648882 +2025-03-11 18:51:54,972 Train Loss: 0.0161915, Val Loss: 0.0157246 +2025-03-11 18:51:54,973 Epoch 425/2000 +2025-03-11 18:52:19,641 Current Learning Rate: 0.0009619398 +2025-03-11 18:52:19,641 Train Loss: 0.0162114, Val Loss: 0.0157335 +2025-03-11 18:52:19,642 Epoch 426/2000 +2025-03-11 18:52:44,843 Current Learning Rate: 0.0009588773 +2025-03-11 18:52:44,846 Train Loss: 0.0162013, Val Loss: 0.0157144 +2025-03-11 18:52:44,846 Epoch 427/2000 +2025-03-11 18:53:10,259 Current Learning Rate: 0.0009557016 +2025-03-11 18:53:10,259 Train Loss: 0.0162015, Val Loss: 0.0157311 +2025-03-11 18:53:10,259 Epoch 428/2000 +2025-03-11 18:53:35,317 Current Learning Rate: 0.0009524135 +2025-03-11 18:53:35,321 Train Loss: 0.0162234, Val Loss: 0.0157087 +2025-03-11 18:53:35,321 Epoch 429/2000 +2025-03-11 18:54:00,808 Current Learning Rate: 0.0009490138 +2025-03-11 18:54:00,809 Train Loss: 0.0161719, Val Loss: 0.0158362 +2025-03-11 18:54:00,809 Epoch 430/2000 +2025-03-11 18:54:26,644 Current Learning Rate: 0.0009455033 +2025-03-11 18:54:26,648 Train Loss: 0.0161864, Val Loss: 0.0156985 +2025-03-11 18:54:26,649 Epoch 431/2000 +2025-03-11 18:54:51,008 Current Learning Rate: 0.0009418828 +2025-03-11 18:54:51,008 Train Loss: 0.0162049, Val Loss: 0.0156998 +2025-03-11 18:54:51,009 Epoch 432/2000 +2025-03-11 18:55:15,498 Current Learning Rate: 0.0009381533 +2025-03-11 18:55:15,498 Train Loss: 0.0162173, Val Loss: 0.0157951 +2025-03-11 18:55:15,499 Epoch 433/2000 +2025-03-11 18:55:40,765 Current Learning Rate: 0.0009343158 +2025-03-11 18:55:40,766 Train Loss: 0.0161673, Val Loss: 0.0157024 +2025-03-11 18:55:40,766 Epoch 434/2000 +2025-03-11 18:56:06,241 Current Learning Rate: 0.0009303710 +2025-03-11 18:56:06,241 Train Loss: 0.0161868, Val Loss: 0.0157202 +2025-03-11 18:56:06,241 Epoch 435/2000 +2025-03-11 18:56:30,582 Current Learning Rate: 0.0009263201 +2025-03-11 18:56:30,583 Train Loss: 0.0161614, Val Loss: 0.0157144 +2025-03-11 18:56:30,583 Epoch 436/2000 +2025-03-11 18:56:55,445 Current Learning Rate: 0.0009221640 +2025-03-11 18:56:55,446 Train Loss: 0.0161985, Val Loss: 0.0157161 +2025-03-11 18:56:55,446 Epoch 437/2000 +2025-03-11 18:57:20,565 Current Learning Rate: 0.0009179037 +2025-03-11 18:57:20,566 Train Loss: 0.0161522, Val Loss: 0.0157242 +2025-03-11 18:57:20,566 Epoch 438/2000 +2025-03-11 18:57:45,287 Current Learning Rate: 0.0009135403 +2025-03-11 18:57:45,288 Train Loss: 0.0161762, Val Loss: 0.0157563 +2025-03-11 18:57:45,288 Epoch 439/2000 +2025-03-11 18:58:09,875 Current Learning Rate: 0.0009090749 +2025-03-11 18:58:09,881 Train Loss: 0.0161831, Val Loss: 0.0156757 +2025-03-11 18:58:09,881 Epoch 440/2000 +2025-03-11 18:58:35,081 Current Learning Rate: 0.0009045085 +2025-03-11 18:58:35,082 Train Loss: 0.0161943, Val Loss: 0.0157846 +2025-03-11 18:58:35,082 Epoch 441/2000 +2025-03-11 18:58:59,993 Current Learning Rate: 0.0008998423 +2025-03-11 18:58:59,997 Train Loss: 0.0161709, Val Loss: 0.0156628 +2025-03-11 18:58:59,997 Epoch 442/2000 +2025-03-11 18:59:25,163 Current Learning Rate: 0.0008950775 +2025-03-11 18:59:25,164 Train Loss: 0.0161348, Val Loss: 0.0156710 +2025-03-11 18:59:25,164 Epoch 443/2000 +2025-03-11 18:59:50,266 Current Learning Rate: 0.0008902152 +2025-03-11 18:59:50,266 Train Loss: 0.0161592, Val Loss: 0.0157551 +2025-03-11 18:59:50,266 Epoch 444/2000 +2025-03-11 19:00:15,512 Current Learning Rate: 0.0008852566 +2025-03-11 19:00:15,512 Train Loss: 0.0161502, Val Loss: 0.0157399 +2025-03-11 19:00:15,513 Epoch 445/2000 +2025-03-11 19:00:40,478 Current Learning Rate: 0.0008802030 +2025-03-11 19:00:40,478 Train Loss: 0.0161589, Val Loss: 0.0156772 +2025-03-11 19:00:40,479 Epoch 446/2000 +2025-03-11 19:01:05,669 Current Learning Rate: 0.0008750555 +2025-03-11 19:01:05,670 Train Loss: 0.0161696, Val Loss: 0.0157170 +2025-03-11 19:01:05,670 Epoch 447/2000 +2025-03-11 19:01:31,197 Current Learning Rate: 0.0008698155 +2025-03-11 19:01:31,198 Train Loss: 0.0161352, Val Loss: 0.0157261 +2025-03-11 19:01:31,198 Epoch 448/2000 +2025-03-11 19:01:55,725 Current Learning Rate: 0.0008644843 +2025-03-11 19:01:55,725 Train Loss: 0.0161347, Val Loss: 0.0156767 +2025-03-11 19:01:55,725 Epoch 449/2000 +2025-03-11 19:02:20,210 Current Learning Rate: 0.0008590631 +2025-03-11 19:02:20,211 Train Loss: 0.0161376, Val Loss: 0.0157554 +2025-03-11 19:02:20,211 Epoch 450/2000 +2025-03-11 19:02:45,076 Current Learning Rate: 0.0008535534 +2025-03-11 19:02:45,076 Train Loss: 0.0161472, Val Loss: 0.0156772 +2025-03-11 19:02:45,077 Epoch 451/2000 +2025-03-11 19:03:09,525 Current Learning Rate: 0.0008479564 +2025-03-11 19:03:09,526 Train Loss: 0.0161485, Val Loss: 0.0157158 +2025-03-11 19:03:09,526 Epoch 452/2000 +2025-03-11 19:03:34,204 Current Learning Rate: 0.0008422736 +2025-03-11 19:03:34,204 Train Loss: 0.0161208, Val Loss: 0.0156939 +2025-03-11 19:03:34,205 Epoch 453/2000 +2025-03-11 19:03:59,076 Current Learning Rate: 0.0008365063 +2025-03-11 19:03:59,077 Train Loss: 0.0161238, Val Loss: 0.0157362 +2025-03-11 19:03:59,077 Epoch 454/2000 +2025-03-11 19:04:23,559 Current Learning Rate: 0.0008306559 +2025-03-11 19:04:23,560 Train Loss: 0.0161354, Val Loss: 0.0156791 +2025-03-11 19:04:23,560 Epoch 455/2000 +2025-03-11 19:04:47,957 Current Learning Rate: 0.0008247240 +2025-03-11 19:04:47,961 Train Loss: 0.0161176, Val Loss: 0.0156596 +2025-03-11 19:04:47,961 Epoch 456/2000 +2025-03-11 19:05:12,418 Current Learning Rate: 0.0008187120 +2025-03-11 19:05:12,418 Train Loss: 0.0161269, Val Loss: 0.0156883 +2025-03-11 19:05:12,418 Epoch 457/2000 +2025-03-11 19:05:36,818 Current Learning Rate: 0.0008126213 +2025-03-11 19:05:36,819 Train Loss: 0.0161154, Val Loss: 0.0156725 +2025-03-11 19:05:36,819 Epoch 458/2000 +2025-03-11 19:06:01,255 Current Learning Rate: 0.0008064535 +2025-03-11 19:06:01,260 Train Loss: 0.0161175, Val Loss: 0.0156480 +2025-03-11 19:06:01,260 Epoch 459/2000 +2025-03-11 19:06:25,977 Current Learning Rate: 0.0008002101 +2025-03-11 19:06:25,981 Train Loss: 0.0161164, Val Loss: 0.0156455 +2025-03-11 19:06:25,981 Epoch 460/2000 +2025-03-11 19:06:50,547 Current Learning Rate: 0.0007938926 +2025-03-11 19:06:50,550 Train Loss: 0.0161082, Val Loss: 0.0156411 +2025-03-11 19:06:50,551 Epoch 461/2000 +2025-03-11 19:07:15,358 Current Learning Rate: 0.0007875026 +2025-03-11 19:07:15,358 Train Loss: 0.0161027, Val Loss: 0.0157396 +2025-03-11 19:07:15,358 Epoch 462/2000 +2025-03-11 19:07:40,406 Current Learning Rate: 0.0007810417 +2025-03-11 19:07:40,407 Train Loss: 0.0161083, Val Loss: 0.0157164 +2025-03-11 19:07:40,408 Epoch 463/2000 +2025-03-11 19:08:05,165 Current Learning Rate: 0.0007745114 +2025-03-11 19:08:05,168 Train Loss: 0.0160977, Val Loss: 0.0156255 +2025-03-11 19:08:05,168 Epoch 464/2000 +2025-03-11 19:08:29,931 Current Learning Rate: 0.0007679134 +2025-03-11 19:08:29,932 Train Loss: 0.0161052, Val Loss: 0.0156905 +2025-03-11 19:08:29,932 Epoch 465/2000 +2025-03-11 19:08:54,749 Current Learning Rate: 0.0007612493 +2025-03-11 19:08:54,749 Train Loss: 0.0160924, Val Loss: 0.0156325 +2025-03-11 19:08:54,749 Epoch 466/2000 +2025-03-11 19:09:19,428 Current Learning Rate: 0.0007545207 +2025-03-11 19:09:19,428 Train Loss: 0.0160991, Val Loss: 0.0156586 +2025-03-11 19:09:19,429 Epoch 467/2000 +2025-03-11 19:09:44,335 Current Learning Rate: 0.0007477293 +2025-03-11 19:09:44,336 Train Loss: 0.0160932, Val Loss: 0.0156368 +2025-03-11 19:09:44,336 Epoch 468/2000 +2025-03-11 19:10:09,449 Current Learning Rate: 0.0007408768 +2025-03-11 19:10:09,452 Train Loss: 0.0160896, Val Loss: 0.0156052 +2025-03-11 19:10:09,452 Epoch 469/2000 +2025-03-11 19:10:34,349 Current Learning Rate: 0.0007339649 +2025-03-11 19:10:34,349 Train Loss: 0.0160813, Val Loss: 0.0156201 +2025-03-11 19:10:34,350 Epoch 470/2000 +2025-03-11 19:10:58,914 Current Learning Rate: 0.0007269952 +2025-03-11 19:10:58,914 Train Loss: 0.0160820, Val Loss: 0.0156132 +2025-03-11 19:10:58,914 Epoch 471/2000 +2025-03-11 19:11:23,998 Current Learning Rate: 0.0007199696 +2025-03-11 19:11:23,999 Train Loss: 0.0160860, Val Loss: 0.0156973 +2025-03-11 19:11:23,999 Epoch 472/2000 +2025-03-11 19:11:48,592 Current Learning Rate: 0.0007128896 +2025-03-11 19:11:48,593 Train Loss: 0.0160820, Val Loss: 0.0157239 +2025-03-11 19:11:48,593 Epoch 473/2000 +2025-03-11 19:12:12,984 Current Learning Rate: 0.0007057572 +2025-03-11 19:12:12,985 Train Loss: 0.0160711, Val Loss: 0.0156632 +2025-03-11 19:12:12,985 Epoch 474/2000 +2025-03-11 19:12:37,205 Current Learning Rate: 0.0006985739 +2025-03-11 19:12:37,206 Train Loss: 0.0160808, Val Loss: 0.0156282 +2025-03-11 19:12:37,206 Epoch 475/2000 +2025-03-11 19:13:01,815 Current Learning Rate: 0.0006913417 +2025-03-11 19:13:01,815 Train Loss: 0.0160659, Val Loss: 0.0156139 +2025-03-11 19:13:01,816 Epoch 476/2000 +2025-03-11 19:13:26,172 Current Learning Rate: 0.0006840623 +2025-03-11 19:13:26,172 Train Loss: 0.0160724, Val Loss: 0.0156394 +2025-03-11 19:13:26,172 Epoch 477/2000 +2025-03-11 19:13:50,615 Current Learning Rate: 0.0006767374 +2025-03-11 19:13:50,615 Train Loss: 0.0160623, Val Loss: 0.0156263 +2025-03-11 19:13:50,615 Epoch 478/2000 +2025-03-11 19:14:15,320 Current Learning Rate: 0.0006693690 +2025-03-11 19:14:15,321 Train Loss: 0.0160675, Val Loss: 0.0156071 +2025-03-11 19:14:15,321 Epoch 479/2000 +2025-03-11 19:14:39,767 Current Learning Rate: 0.0006619587 +2025-03-11 19:14:39,767 Train Loss: 0.0160599, Val Loss: 0.0156467 +2025-03-11 19:14:39,767 Epoch 480/2000 +2025-03-11 19:15:04,434 Current Learning Rate: 0.0006545085 +2025-03-11 19:15:04,435 Train Loss: 0.0160617, Val Loss: 0.0156063 +2025-03-11 19:15:04,435 Epoch 481/2000 +2025-03-11 19:15:29,426 Current Learning Rate: 0.0006470202 +2025-03-11 19:15:29,427 Train Loss: 0.0160563, Val Loss: 0.0156561 +2025-03-11 19:15:29,427 Epoch 482/2000 +2025-03-11 19:15:54,161 Current Learning Rate: 0.0006394956 +2025-03-11 19:15:54,161 Train Loss: 0.0160544, Val Loss: 0.0156086 +2025-03-11 19:15:54,161 Epoch 483/2000 +2025-03-11 19:16:19,214 Current Learning Rate: 0.0006319365 +2025-03-11 19:16:19,215 Train Loss: 0.0160537, Val Loss: 0.0156304 +2025-03-11 19:16:19,215 Epoch 484/2000 +2025-03-11 19:16:44,010 Current Learning Rate: 0.0006243449 +2025-03-11 19:16:44,011 Train Loss: 0.0160493, Val Loss: 0.0156529 +2025-03-11 19:16:44,011 Epoch 485/2000 +2025-03-11 19:17:08,717 Current Learning Rate: 0.0006167227 +2025-03-11 19:17:08,717 Train Loss: 0.0160459, Val Loss: 0.0156112 +2025-03-11 19:17:08,718 Epoch 486/2000 +2025-03-11 19:17:33,446 Current Learning Rate: 0.0006090716 +2025-03-11 19:17:33,446 Train Loss: 0.0160464, Val Loss: 0.0156126 +2025-03-11 19:17:33,447 Epoch 487/2000 +2025-03-11 19:17:58,571 Current Learning Rate: 0.0006013936 +2025-03-11 19:17:58,571 Train Loss: 0.0160429, Val Loss: 0.0156367 +2025-03-11 19:17:58,571 Epoch 488/2000 +2025-03-11 19:18:23,405 Current Learning Rate: 0.0005936907 +2025-03-11 19:18:23,405 Train Loss: 0.0160393, Val Loss: 0.0156061 +2025-03-11 19:18:23,406 Epoch 489/2000 +2025-03-11 19:18:48,081 Current Learning Rate: 0.0005859646 +2025-03-11 19:18:48,082 Train Loss: 0.0160393, Val Loss: 0.0156105 +2025-03-11 19:18:48,082 Epoch 490/2000 +2025-03-11 19:19:12,943 Current Learning Rate: 0.0005782172 +2025-03-11 19:19:12,944 Train Loss: 0.0160360, Val Loss: 0.0156212 +2025-03-11 19:19:12,944 Epoch 491/2000 +2025-03-11 19:19:37,499 Current Learning Rate: 0.0005704506 +2025-03-11 19:19:37,503 Train Loss: 0.0160333, Val Loss: 0.0156021 +2025-03-11 19:19:37,503 Epoch 492/2000 +2025-03-11 19:20:02,613 Current Learning Rate: 0.0005626666 +2025-03-11 19:20:02,614 Train Loss: 0.0160324, Val Loss: 0.0156094 +2025-03-11 19:20:02,614 Epoch 493/2000 +2025-03-11 19:20:27,906 Current Learning Rate: 0.0005548672 +2025-03-11 19:20:27,907 Train Loss: 0.0160289, Val Loss: 0.0156054 +2025-03-11 19:20:27,907 Epoch 494/2000 +2025-03-11 19:20:53,570 Current Learning Rate: 0.0005470542 +2025-03-11 19:20:53,574 Train Loss: 0.0160272, Val Loss: 0.0155994 +2025-03-11 19:20:53,574 Epoch 495/2000 +2025-03-11 19:21:19,233 Current Learning Rate: 0.0005392295 +2025-03-11 19:21:19,233 Train Loss: 0.0160250, Val Loss: 0.0156005 +2025-03-11 19:21:19,234 Epoch 496/2000 +2025-03-11 19:21:44,915 Current Learning Rate: 0.0005313953 +2025-03-11 19:21:44,918 Train Loss: 0.0160226, Val Loss: 0.0155957 +2025-03-11 19:21:44,919 Epoch 497/2000 +2025-03-11 19:22:09,714 Current Learning Rate: 0.0005235532 +2025-03-11 19:22:09,724 Train Loss: 0.0160206, Val Loss: 0.0155937 +2025-03-11 19:22:09,724 Epoch 498/2000 +2025-03-11 19:22:34,753 Current Learning Rate: 0.0005157054 +2025-03-11 19:22:34,756 Train Loss: 0.0160184, Val Loss: 0.0155912 +2025-03-11 19:22:34,757 Epoch 499/2000 +2025-03-11 19:22:59,289 Current Learning Rate: 0.0005078537 +2025-03-11 19:22:59,295 Train Loss: 0.0160163, Val Loss: 0.0155884 +2025-03-11 19:22:59,296 Epoch 500/2000 +2025-03-11 19:23:24,220 Current Learning Rate: 0.0005000000 +2025-03-11 19:23:24,225 Train Loss: 0.0160142, Val Loss: 0.0155859 +2025-03-11 19:23:24,225 Epoch 501/2000 +2025-03-11 19:23:49,576 Current Learning Rate: 0.0004921463 +2025-03-11 19:23:49,579 Train Loss: 0.0160121, Val Loss: 0.0155833 +2025-03-11 19:23:49,579 Epoch 502/2000 +2025-03-11 19:24:14,585 Current Learning Rate: 0.0004842946 +2025-03-11 19:24:14,588 Train Loss: 0.0160101, Val Loss: 0.0155809 +2025-03-11 19:24:14,589 Epoch 503/2000 +2025-03-11 19:24:39,692 Current Learning Rate: 0.0004764468 +2025-03-11 19:24:39,709 Train Loss: 0.0160080, Val Loss: 0.0155781 +2025-03-11 19:24:39,709 Epoch 504/2000 +2025-03-11 19:25:05,264 Current Learning Rate: 0.0004686047 +2025-03-11 19:25:05,268 Train Loss: 0.0160060, Val Loss: 0.0155757 +2025-03-11 19:25:05,268 Epoch 505/2000 +2025-03-11 19:25:30,371 Current Learning Rate: 0.0004607705 +2025-03-11 19:25:30,375 Train Loss: 0.0160040, Val Loss: 0.0155729 +2025-03-11 19:25:30,375 Epoch 506/2000 +2025-03-11 19:25:55,832 Current Learning Rate: 0.0004529458 +2025-03-11 19:25:55,837 Train Loss: 0.0160020, Val Loss: 0.0155703 +2025-03-11 19:25:55,837 Epoch 507/2000 +2025-03-11 19:26:20,680 Current Learning Rate: 0.0004451328 +2025-03-11 19:26:20,683 Train Loss: 0.0160001, Val Loss: 0.0155674 +2025-03-11 19:26:20,684 Epoch 508/2000 +2025-03-11 19:26:45,779 Current Learning Rate: 0.0004373334 +2025-03-11 19:26:45,783 Train Loss: 0.0159981, Val Loss: 0.0155645 +2025-03-11 19:26:45,783 Epoch 509/2000 +2025-03-11 19:27:10,703 Current Learning Rate: 0.0004295494 +2025-03-11 19:27:10,707 Train Loss: 0.0159962, Val Loss: 0.0155615 +2025-03-11 19:27:10,707 Epoch 510/2000 +2025-03-11 19:27:35,771 Current Learning Rate: 0.0004217828 +2025-03-11 19:27:35,776 Train Loss: 0.0159943, Val Loss: 0.0155584 +2025-03-11 19:27:35,776 Epoch 511/2000 +2025-03-11 19:28:01,271 Current Learning Rate: 0.0004140354 +2025-03-11 19:28:01,275 Train Loss: 0.0159924, Val Loss: 0.0155552 +2025-03-11 19:28:01,275 Epoch 512/2000 +2025-03-11 19:28:27,305 Current Learning Rate: 0.0004063093 +2025-03-11 19:28:27,308 Train Loss: 0.0159906, Val Loss: 0.0155520 +2025-03-11 19:28:27,308 Epoch 513/2000 +2025-03-11 19:28:52,685 Current Learning Rate: 0.0003986064 +2025-03-11 19:28:52,688 Train Loss: 0.0159888, Val Loss: 0.0155488 +2025-03-11 19:28:52,689 Epoch 514/2000 +2025-03-11 19:29:17,896 Current Learning Rate: 0.0003909284 +2025-03-11 19:29:17,900 Train Loss: 0.0159870, Val Loss: 0.0155456 +2025-03-11 19:29:17,900 Epoch 515/2000 +2025-03-11 19:29:42,938 Current Learning Rate: 0.0003832773 +2025-03-11 19:29:42,941 Train Loss: 0.0159852, Val Loss: 0.0155425 +2025-03-11 19:29:42,942 Epoch 516/2000 +2025-03-11 19:30:08,347 Current Learning Rate: 0.0003756551 +2025-03-11 19:30:08,351 Train Loss: 0.0159834, Val Loss: 0.0155395 +2025-03-11 19:30:08,351 Epoch 517/2000 +2025-03-11 19:30:33,760 Current Learning Rate: 0.0003680635 +2025-03-11 19:30:33,765 Train Loss: 0.0159817, Val Loss: 0.0155364 +2025-03-11 19:30:33,766 Epoch 518/2000 +2025-03-11 19:30:58,690 Current Learning Rate: 0.0003605044 +2025-03-11 19:30:58,693 Train Loss: 0.0159800, Val Loss: 0.0155335 +2025-03-11 19:30:58,694 Epoch 519/2000 +2025-03-11 19:31:23,800 Current Learning Rate: 0.0003529798 +2025-03-11 19:31:23,803 Train Loss: 0.0159783, Val Loss: 0.0155306 +2025-03-11 19:31:23,803 Epoch 520/2000 +2025-03-11 19:31:49,276 Current Learning Rate: 0.0003454915 +2025-03-11 19:31:49,280 Train Loss: 0.0159767, Val Loss: 0.0155278 +2025-03-11 19:31:49,280 Epoch 521/2000 +2025-03-11 19:32:14,600 Current Learning Rate: 0.0003380413 +2025-03-11 19:32:14,604 Train Loss: 0.0159751, Val Loss: 0.0155252 +2025-03-11 19:32:14,604 Epoch 522/2000 +2025-03-11 19:32:39,587 Current Learning Rate: 0.0003306310 +2025-03-11 19:32:39,591 Train Loss: 0.0159735, Val Loss: 0.0155226 +2025-03-11 19:32:39,591 Epoch 523/2000 +2025-03-11 19:33:04,754 Current Learning Rate: 0.0003232626 +2025-03-11 19:33:04,758 Train Loss: 0.0159720, Val Loss: 0.0155200 +2025-03-11 19:33:04,759 Epoch 524/2000 +2025-03-11 19:33:29,801 Current Learning Rate: 0.0003159377 +2025-03-11 19:33:29,805 Train Loss: 0.0159706, Val Loss: 0.0155174 +2025-03-11 19:33:29,805 Epoch 525/2000 +2025-03-11 19:33:54,530 Current Learning Rate: 0.0003086583 +2025-03-11 19:33:54,534 Train Loss: 0.0159692, Val Loss: 0.0155149 +2025-03-11 19:33:54,535 Epoch 526/2000 +2025-03-11 19:34:19,638 Current Learning Rate: 0.0003014261 +2025-03-11 19:34:19,642 Train Loss: 0.0159681, Val Loss: 0.0155124 +2025-03-11 19:34:19,642 Epoch 527/2000 +2025-03-11 19:34:44,621 Current Learning Rate: 0.0002942428 +2025-03-11 19:34:44,624 Train Loss: 0.0159672, Val Loss: 0.0155091 +2025-03-11 19:34:44,624 Epoch 528/2000 +2025-03-11 19:35:09,813 Current Learning Rate: 0.0002871104 +2025-03-11 19:35:09,813 Train Loss: 0.0159660, Val Loss: 0.0155103 +2025-03-11 19:35:09,814 Epoch 529/2000 +2025-03-11 19:35:34,961 Current Learning Rate: 0.0002800304 +2025-03-11 19:35:34,965 Train Loss: 0.0159652, Val Loss: 0.0155078 +2025-03-11 19:35:34,965 Epoch 530/2000 +2025-03-11 19:36:00,673 Current Learning Rate: 0.0002730048 +2025-03-11 19:36:00,677 Train Loss: 0.0159630, Val Loss: 0.0155023 +2025-03-11 19:36:00,677 Epoch 531/2000 +2025-03-11 19:36:26,078 Current Learning Rate: 0.0002660351 +2025-03-11 19:36:26,081 Train Loss: 0.0159614, Val Loss: 0.0155009 +2025-03-11 19:36:26,082 Epoch 532/2000 +2025-03-11 19:36:51,695 Current Learning Rate: 0.0002591232 +2025-03-11 19:36:51,700 Train Loss: 0.0159597, Val Loss: 0.0154991 +2025-03-11 19:36:51,700 Epoch 533/2000 +2025-03-11 19:37:17,654 Current Learning Rate: 0.0002522707 +2025-03-11 19:37:17,658 Train Loss: 0.0159582, Val Loss: 0.0154975 +2025-03-11 19:37:17,659 Epoch 534/2000 +2025-03-11 19:37:42,400 Current Learning Rate: 0.0002454793 +2025-03-11 19:37:42,404 Train Loss: 0.0159569, Val Loss: 0.0154959 +2025-03-11 19:37:42,404 Epoch 535/2000 +2025-03-11 19:38:07,364 Current Learning Rate: 0.0002387507 +2025-03-11 19:38:07,367 Train Loss: 0.0159558, Val Loss: 0.0154943 +2025-03-11 19:38:07,367 Epoch 536/2000 +2025-03-11 19:38:33,124 Current Learning Rate: 0.0002320866 +2025-03-11 19:38:33,129 Train Loss: 0.0159547, Val Loss: 0.0154926 +2025-03-11 19:38:33,129 Epoch 537/2000 +2025-03-11 19:38:58,985 Current Learning Rate: 0.0002254886 +2025-03-11 19:38:58,989 Train Loss: 0.0159537, Val Loss: 0.0154915 +2025-03-11 19:38:58,989 Epoch 538/2000 +2025-03-11 19:39:25,833 Current Learning Rate: 0.0002189583 +2025-03-11 19:39:25,837 Train Loss: 0.0159525, Val Loss: 0.0154909 +2025-03-11 19:39:25,837 Epoch 539/2000 +2025-03-11 19:39:52,597 Current Learning Rate: 0.0002124974 +2025-03-11 19:39:52,600 Train Loss: 0.0159513, Val Loss: 0.0154897 +2025-03-11 19:39:52,601 Epoch 540/2000 +2025-03-11 19:40:18,888 Current Learning Rate: 0.0002061074 +2025-03-11 19:40:18,892 Train Loss: 0.0159502, Val Loss: 0.0154883 +2025-03-11 19:40:18,893 Epoch 541/2000 +2025-03-11 19:40:44,005 Current Learning Rate: 0.0001997899 +2025-03-11 19:40:44,009 Train Loss: 0.0159488, Val Loss: 0.0154874 +2025-03-11 19:40:44,009 Epoch 542/2000 +2025-03-11 19:41:09,792 Current Learning Rate: 0.0001935465 +2025-03-11 19:41:09,811 Train Loss: 0.0159475, Val Loss: 0.0154863 +2025-03-11 19:41:09,811 Epoch 543/2000 +2025-03-11 19:41:34,923 Current Learning Rate: 0.0001873787 +2025-03-11 19:41:34,927 Train Loss: 0.0159465, Val Loss: 0.0154852 +2025-03-11 19:41:34,928 Epoch 544/2000 +2025-03-11 19:42:00,195 Current Learning Rate: 0.0001812880 +2025-03-11 19:42:00,199 Train Loss: 0.0159455, Val Loss: 0.0154847 +2025-03-11 19:42:00,199 Epoch 545/2000 +2025-03-11 19:42:25,831 Current Learning Rate: 0.0001752760 +2025-03-11 19:42:25,831 Train Loss: 0.0159447, Val Loss: 0.0154849 +2025-03-11 19:42:25,832 Epoch 546/2000 +2025-03-11 19:42:51,019 Current Learning Rate: 0.0001693441 +2025-03-11 19:42:51,020 Train Loss: 0.0159439, Val Loss: 0.0154863 +2025-03-11 19:42:51,020 Epoch 547/2000 +2025-03-11 19:43:16,016 Current Learning Rate: 0.0001634937 +2025-03-11 19:43:16,019 Train Loss: 0.0159427, Val Loss: 0.0154845 +2025-03-11 19:43:16,019 Epoch 548/2000 +2025-03-11 19:43:41,700 Current Learning Rate: 0.0001577264 +2025-03-11 19:43:41,704 Train Loss: 0.0159412, Val Loss: 0.0154821 +2025-03-11 19:43:41,704 Epoch 549/2000 +2025-03-11 19:44:07,213 Current Learning Rate: 0.0001520436 +2025-03-11 19:44:07,217 Train Loss: 0.0159401, Val Loss: 0.0154805 +2025-03-11 19:44:07,217 Epoch 550/2000 +2025-03-11 19:44:32,617 Current Learning Rate: 0.0001464466 +2025-03-11 19:44:32,620 Train Loss: 0.0159392, Val Loss: 0.0154792 +2025-03-11 19:44:32,620 Epoch 551/2000 +2025-03-11 19:44:57,825 Current Learning Rate: 0.0001409369 +2025-03-11 19:44:57,830 Train Loss: 0.0159383, Val Loss: 0.0154781 +2025-03-11 19:44:57,830 Epoch 552/2000 +2025-03-11 19:45:23,574 Current Learning Rate: 0.0001355157 +2025-03-11 19:45:23,577 Train Loss: 0.0159374, Val Loss: 0.0154773 +2025-03-11 19:45:23,577 Epoch 553/2000 +2025-03-11 19:45:48,455 Current Learning Rate: 0.0001301845 +2025-03-11 19:45:48,473 Train Loss: 0.0159365, Val Loss: 0.0154766 +2025-03-11 19:45:48,473 Epoch 554/2000 +2025-03-11 19:46:13,254 Current Learning Rate: 0.0001249445 +2025-03-11 19:46:13,258 Train Loss: 0.0159358, Val Loss: 0.0154759 +2025-03-11 19:46:13,258 Epoch 555/2000 +2025-03-11 19:46:37,952 Current Learning Rate: 0.0001197970 +2025-03-11 19:46:37,955 Train Loss: 0.0159350, Val Loss: 0.0154751 +2025-03-11 19:46:37,955 Epoch 556/2000 +2025-03-11 19:47:03,201 Current Learning Rate: 0.0001147434 +2025-03-11 19:47:03,204 Train Loss: 0.0159343, Val Loss: 0.0154745 +2025-03-11 19:47:03,204 Epoch 557/2000 +2025-03-11 19:47:28,406 Current Learning Rate: 0.0001097848 +2025-03-11 19:47:28,409 Train Loss: 0.0159334, Val Loss: 0.0154738 +2025-03-11 19:47:28,409 Epoch 558/2000 +2025-03-11 19:47:53,794 Current Learning Rate: 0.0001049225 +2025-03-11 19:47:53,797 Train Loss: 0.0159326, Val Loss: 0.0154731 +2025-03-11 19:47:53,797 Epoch 559/2000 +2025-03-11 19:48:18,935 Current Learning Rate: 0.0001001577 +2025-03-11 19:48:18,983 Train Loss: 0.0159318, Val Loss: 0.0154723 +2025-03-11 19:48:18,983 Epoch 560/2000 +2025-03-11 19:48:43,791 Current Learning Rate: 0.0000954915 +2025-03-11 19:48:43,795 Train Loss: 0.0159311, Val Loss: 0.0154716 +2025-03-11 19:48:43,795 Epoch 561/2000 +2025-03-11 19:49:09,064 Current Learning Rate: 0.0000909251 +2025-03-11 19:49:09,067 Train Loss: 0.0159304, Val Loss: 0.0154710 +2025-03-11 19:49:09,067 Epoch 562/2000 +2025-03-11 19:49:34,307 Current Learning Rate: 0.0000864597 +2025-03-11 19:49:34,311 Train Loss: 0.0159297, Val Loss: 0.0154702 +2025-03-11 19:49:34,312 Epoch 563/2000 +2025-03-11 19:49:59,038 Current Learning Rate: 0.0000820963 +2025-03-11 19:49:59,042 Train Loss: 0.0159291, Val Loss: 0.0154695 +2025-03-11 19:49:59,042 Epoch 564/2000 +2025-03-11 19:50:24,181 Current Learning Rate: 0.0000778360 +2025-03-11 19:50:24,184 Train Loss: 0.0159284, Val Loss: 0.0154689 +2025-03-11 19:50:24,184 Epoch 565/2000 +2025-03-11 19:50:49,335 Current Learning Rate: 0.0000736799 +2025-03-11 19:50:49,339 Train Loss: 0.0159277, Val Loss: 0.0154683 +2025-03-11 19:50:49,339 Epoch 566/2000 +2025-03-11 19:51:14,712 Current Learning Rate: 0.0000696290 +2025-03-11 19:51:14,716 Train Loss: 0.0159271, Val Loss: 0.0154678 +2025-03-11 19:51:14,716 Epoch 567/2000 +2025-03-11 19:51:39,929 Current Learning Rate: 0.0000656842 +2025-03-11 19:51:39,934 Train Loss: 0.0159265, Val Loss: 0.0154673 +2025-03-11 19:51:39,934 Epoch 568/2000 +2025-03-11 19:52:05,051 Current Learning Rate: 0.0000618467 +2025-03-11 19:52:05,054 Train Loss: 0.0159260, Val Loss: 0.0154668 +2025-03-11 19:52:05,054 Epoch 569/2000 +2025-03-11 19:52:30,288 Current Learning Rate: 0.0000581172 +2025-03-11 19:52:30,291 Train Loss: 0.0159254, Val Loss: 0.0154662 +2025-03-11 19:52:30,291 Epoch 570/2000 +2025-03-11 19:52:55,321 Current Learning Rate: 0.0000544967 +2025-03-11 19:52:55,328 Train Loss: 0.0159249, Val Loss: 0.0154657 +2025-03-11 19:52:55,328 Epoch 571/2000 +2025-03-11 19:53:20,293 Current Learning Rate: 0.0000509862 +2025-03-11 19:53:20,306 Train Loss: 0.0159244, Val Loss: 0.0154652 +2025-03-11 19:53:20,306 Epoch 572/2000 +2025-03-11 19:53:45,368 Current Learning Rate: 0.0000475865 +2025-03-11 19:53:45,372 Train Loss: 0.0159239, Val Loss: 0.0154650 +2025-03-11 19:53:45,372 Epoch 573/2000 +2025-03-11 19:54:10,620 Current Learning Rate: 0.0000442984 +2025-03-11 19:54:10,623 Train Loss: 0.0159235, Val Loss: 0.0154648 +2025-03-11 19:54:10,624 Epoch 574/2000 +2025-03-11 19:54:36,301 Current Learning Rate: 0.0000411227 +2025-03-11 19:54:36,305 Train Loss: 0.0159231, Val Loss: 0.0154645 +2025-03-11 19:54:36,305 Epoch 575/2000 +2025-03-11 19:55:01,861 Current Learning Rate: 0.0000380602 +2025-03-11 19:55:02,084 Train Loss: 0.0159226, Val Loss: 0.0154640 +2025-03-11 19:55:02,085 Epoch 576/2000 +2025-03-11 19:55:27,014 Current Learning Rate: 0.0000351118 +2025-03-11 19:55:27,018 Train Loss: 0.0159223, Val Loss: 0.0154634 +2025-03-11 19:55:27,019 Epoch 577/2000 +2025-03-11 19:55:52,212 Current Learning Rate: 0.0000322780 +2025-03-11 19:55:52,215 Train Loss: 0.0159219, Val Loss: 0.0154629 +2025-03-11 19:55:52,216 Epoch 578/2000 +2025-03-11 19:56:17,269 Current Learning Rate: 0.0000295596 +2025-03-11 19:56:17,273 Train Loss: 0.0159215, Val Loss: 0.0154626 +2025-03-11 19:56:17,273 Epoch 579/2000 +2025-03-11 19:56:42,515 Current Learning Rate: 0.0000269573 +2025-03-11 19:56:42,520 Train Loss: 0.0159212, Val Loss: 0.0154626 +2025-03-11 19:56:42,520 Epoch 580/2000 +2025-03-11 19:57:08,072 Current Learning Rate: 0.0000244717 +2025-03-11 19:57:08,075 Train Loss: 0.0159209, Val Loss: 0.0154625 +2025-03-11 19:57:08,075 Epoch 581/2000 +2025-03-11 19:57:33,359 Current Learning Rate: 0.0000221035 +2025-03-11 19:57:33,363 Train Loss: 0.0159206, Val Loss: 0.0154622 +2025-03-11 19:57:33,364 Epoch 582/2000 +2025-03-11 19:57:58,034 Current Learning Rate: 0.0000198532 +2025-03-11 19:57:58,037 Train Loss: 0.0159204, Val Loss: 0.0154620 +2025-03-11 19:57:58,037 Epoch 583/2000 +2025-03-11 19:58:23,067 Current Learning Rate: 0.0000177213 +2025-03-11 19:58:23,070 Train Loss: 0.0159201, Val Loss: 0.0154615 +2025-03-11 19:58:23,071 Epoch 584/2000 +2025-03-11 19:58:48,022 Current Learning Rate: 0.0000157084 +2025-03-11 19:58:48,026 Train Loss: 0.0159198, Val Loss: 0.0154612 +2025-03-11 19:58:48,026 Epoch 585/2000 +2025-03-11 19:59:13,301 Current Learning Rate: 0.0000138150 +2025-03-11 19:59:13,305 Train Loss: 0.0159196, Val Loss: 0.0154610 +2025-03-11 19:59:13,305 Epoch 586/2000 +2025-03-11 19:59:38,832 Current Learning Rate: 0.0000120416 +2025-03-11 19:59:38,836 Train Loss: 0.0159194, Val Loss: 0.0154609 +2025-03-11 19:59:38,836 Epoch 587/2000 +2025-03-11 20:00:03,805 Current Learning Rate: 0.0000103886 +2025-03-11 20:00:03,809 Train Loss: 0.0159192, Val Loss: 0.0154608 +2025-03-11 20:00:03,809 Epoch 588/2000 +2025-03-11 20:00:29,128 Current Learning Rate: 0.0000088564 +2025-03-11 20:00:29,132 Train Loss: 0.0159191, Val Loss: 0.0154607 +2025-03-11 20:00:29,132 Epoch 589/2000 +2025-03-11 20:00:55,041 Current Learning Rate: 0.0000074453 +2025-03-11 20:00:55,045 Train Loss: 0.0159190, Val Loss: 0.0154606 +2025-03-11 20:00:55,045 Epoch 590/2000 +2025-03-11 20:01:20,213 Current Learning Rate: 0.0000061558 +2025-03-11 20:01:20,217 Train Loss: 0.0159188, Val Loss: 0.0154605 +2025-03-11 20:01:20,217 Epoch 591/2000 +2025-03-11 20:01:45,589 Current Learning Rate: 0.0000049882 +2025-03-11 20:01:45,592 Train Loss: 0.0159188, Val Loss: 0.0154604 +2025-03-11 20:01:45,592 Epoch 592/2000 +2025-03-11 20:02:10,874 Current Learning Rate: 0.0000039426 +2025-03-11 20:02:10,878 Train Loss: 0.0159187, Val Loss: 0.0154603 +2025-03-11 20:02:10,878 Epoch 593/2000 +2025-03-11 20:02:36,254 Current Learning Rate: 0.0000030195 +2025-03-11 20:02:36,258 Train Loss: 0.0159186, Val Loss: 0.0154602 +2025-03-11 20:02:36,258 Epoch 594/2000 +2025-03-11 20:03:01,121 Current Learning Rate: 0.0000022190 +2025-03-11 20:03:01,124 Train Loss: 0.0159185, Val Loss: 0.0154602 +2025-03-11 20:03:01,124 Epoch 595/2000 +2025-03-11 20:03:26,091 Current Learning Rate: 0.0000015413 +2025-03-11 20:03:26,095 Train Loss: 0.0159185, Val Loss: 0.0154601 +2025-03-11 20:03:26,095 Epoch 596/2000 +2025-03-11 20:03:50,590 Current Learning Rate: 0.0000009866 +2025-03-11 20:03:50,593 Train Loss: 0.0159184, Val Loss: 0.0154601 +2025-03-11 20:03:50,593 Epoch 597/2000 +2025-03-11 20:04:14,945 Current Learning Rate: 0.0000005551 +2025-03-11 20:04:14,948 Train Loss: 0.0159184, Val Loss: 0.0154601 +2025-03-11 20:04:14,948 Epoch 598/2000 +2025-03-11 20:04:39,936 Current Learning Rate: 0.0000002467 +2025-03-11 20:04:39,940 Train Loss: 0.0159184, Val Loss: 0.0154600 +2025-03-11 20:04:39,940 Epoch 599/2000 +2025-03-11 20:05:05,171 Current Learning Rate: 0.0000000617 +2025-03-11 20:05:05,172 Train Loss: 0.0159184, Val Loss: 0.0154601 +2025-03-11 20:05:05,172 Epoch 600/2000 +2025-03-11 20:05:29,615 Current Learning Rate: 0.0000000000 +2025-03-11 20:05:29,615 Train Loss: 0.0159184, Val Loss: 0.0154601 +2025-03-11 20:05:29,616 Epoch 601/2000 +2025-03-11 20:05:54,257 Current Learning Rate: 0.0000000617 +2025-03-11 20:05:54,258 Train Loss: 0.0159184, Val Loss: 0.0154601 +2025-03-11 20:05:54,258 Epoch 602/2000 +2025-03-11 20:06:18,873 Current Learning Rate: 0.0000002467 +2025-03-11 20:06:18,874 Train Loss: 0.0159184, Val Loss: 0.0154601 +2025-03-11 20:06:18,874 Epoch 603/2000 +2025-03-11 20:06:43,429 Current Learning Rate: 0.0000005551 +2025-03-11 20:06:43,430 Train Loss: 0.0159184, Val Loss: 0.0154601 +2025-03-11 20:06:43,430 Epoch 604/2000 +2025-03-11 20:07:08,019 Current Learning Rate: 0.0000009866 +2025-03-11 20:07:08,023 Train Loss: 0.0159184, Val Loss: 0.0154600 +2025-03-11 20:07:08,023 Epoch 605/2000 +2025-03-11 20:07:32,868 Current Learning Rate: 0.0000015413 +2025-03-11 20:07:32,872 Train Loss: 0.0159184, Val Loss: 0.0154600 +2025-03-11 20:07:32,872 Epoch 606/2000 +2025-03-11 20:07:57,617 Current Learning Rate: 0.0000022190 +2025-03-11 20:07:57,621 Train Loss: 0.0159184, Val Loss: 0.0154600 +2025-03-11 20:07:57,621 Epoch 607/2000 +2025-03-11 20:08:22,830 Current Learning Rate: 0.0000030195 +2025-03-11 20:08:22,834 Train Loss: 0.0159183, Val Loss: 0.0154600 +2025-03-11 20:08:22,834 Epoch 608/2000 +2025-03-11 20:08:48,076 Current Learning Rate: 0.0000039426 +2025-03-11 20:08:48,079 Train Loss: 0.0159183, Val Loss: 0.0154600 +2025-03-11 20:08:48,080 Epoch 609/2000 +2025-03-11 20:09:12,672 Current Learning Rate: 0.0000049882 +2025-03-11 20:09:12,676 Train Loss: 0.0159183, Val Loss: 0.0154599 +2025-03-11 20:09:12,676 Epoch 610/2000 +2025-03-11 20:09:37,539 Current Learning Rate: 0.0000061558 +2025-03-11 20:09:37,542 Train Loss: 0.0159182, Val Loss: 0.0154599 +2025-03-11 20:09:37,542 Epoch 611/2000 +2025-03-11 20:10:02,576 Current Learning Rate: 0.0000074453 +2025-03-11 20:10:02,719 Train Loss: 0.0159181, Val Loss: 0.0154598 +2025-03-11 20:10:02,719 Epoch 612/2000 +2025-03-11 20:10:27,688 Current Learning Rate: 0.0000088564 +2025-03-11 20:10:27,691 Train Loss: 0.0159180, Val Loss: 0.0154596 +2025-03-11 20:10:27,691 Epoch 613/2000 +2025-03-11 20:10:52,864 Current Learning Rate: 0.0000103886 +2025-03-11 20:10:52,868 Train Loss: 0.0159179, Val Loss: 0.0154594 +2025-03-11 20:10:52,868 Epoch 614/2000 +2025-03-11 20:11:17,880 Current Learning Rate: 0.0000120416 +2025-03-11 20:11:17,882 Train Loss: 0.0159177, Val Loss: 0.0154593 +2025-03-11 20:11:17,883 Epoch 615/2000 +2025-03-11 20:11:42,991 Current Learning Rate: 0.0000138150 +2025-03-11 20:11:42,994 Train Loss: 0.0159176, Val Loss: 0.0154593 +2025-03-11 20:11:42,994 Epoch 616/2000 +2025-03-11 20:12:07,496 Current Learning Rate: 0.0000157084 +2025-03-11 20:12:07,500 Train Loss: 0.0159174, Val Loss: 0.0154589 +2025-03-11 20:12:07,500 Epoch 617/2000 +2025-03-11 20:12:32,327 Current Learning Rate: 0.0000177213 +2025-03-11 20:12:32,331 Train Loss: 0.0159171, Val Loss: 0.0154583 +2025-03-11 20:12:32,331 Epoch 618/2000 +2025-03-11 20:12:57,097 Current Learning Rate: 0.0000198532 +2025-03-11 20:12:57,100 Train Loss: 0.0159168, Val Loss: 0.0154579 +2025-03-11 20:12:57,100 Epoch 619/2000 +2025-03-11 20:13:21,610 Current Learning Rate: 0.0000221035 +2025-03-11 20:13:21,610 Train Loss: 0.0159165, Val Loss: 0.0154581 +2025-03-11 20:13:21,610 Epoch 620/2000 +2025-03-11 20:13:46,062 Current Learning Rate: 0.0000244717 +2025-03-11 20:13:46,074 Train Loss: 0.0159162, Val Loss: 0.0154579 +2025-03-11 20:13:46,075 Epoch 621/2000 +2025-03-11 20:14:11,122 Current Learning Rate: 0.0000269573 +2025-03-11 20:14:11,125 Train Loss: 0.0159158, Val Loss: 0.0154571 +2025-03-11 20:14:11,126 Epoch 622/2000 +2025-03-11 20:14:36,132 Current Learning Rate: 0.0000295596 +2025-03-11 20:14:36,135 Train Loss: 0.0159154, Val Loss: 0.0154567 +2025-03-11 20:14:36,135 Epoch 623/2000 +2025-03-11 20:15:01,392 Current Learning Rate: 0.0000322780 +2025-03-11 20:15:01,395 Train Loss: 0.0159150, Val Loss: 0.0154562 +2025-03-11 20:15:01,395 Epoch 624/2000 +2025-03-11 20:15:26,820 Current Learning Rate: 0.0000351118 +2025-03-11 20:15:26,823 Train Loss: 0.0159146, Val Loss: 0.0154554 +2025-03-11 20:15:26,823 Epoch 625/2000 +2025-03-11 20:15:52,293 Current Learning Rate: 0.0000380602 +2025-03-11 20:15:52,297 Train Loss: 0.0159139, Val Loss: 0.0154549 +2025-03-11 20:15:52,298 Epoch 626/2000 +2025-03-11 20:16:16,779 Current Learning Rate: 0.0000411227 +2025-03-11 20:16:16,782 Train Loss: 0.0159136, Val Loss: 0.0154545 +2025-03-11 20:16:16,782 Epoch 627/2000 +2025-03-11 20:16:41,526 Current Learning Rate: 0.0000442984 +2025-03-11 20:16:41,530 Train Loss: 0.0159131, Val Loss: 0.0154539 +2025-03-11 20:16:41,530 Epoch 628/2000 +2025-03-11 20:17:06,295 Current Learning Rate: 0.0000475865 +2025-03-11 20:17:06,299 Train Loss: 0.0159125, Val Loss: 0.0154531 +2025-03-11 20:17:06,299 Epoch 629/2000 +2025-03-11 20:17:31,101 Current Learning Rate: 0.0000509862 +2025-03-11 20:17:31,106 Train Loss: 0.0159118, Val Loss: 0.0154523 +2025-03-11 20:17:31,106 Epoch 630/2000 +2025-03-11 20:17:55,253 Current Learning Rate: 0.0000544967 +2025-03-11 20:17:55,256 Train Loss: 0.0159114, Val Loss: 0.0154516 +2025-03-11 20:17:55,256 Epoch 631/2000 +2025-03-11 20:18:19,384 Current Learning Rate: 0.0000581172 +2025-03-11 20:18:19,394 Train Loss: 0.0159111, Val Loss: 0.0154509 +2025-03-11 20:18:19,395 Epoch 632/2000 +2025-03-11 20:18:43,653 Current Learning Rate: 0.0000618467 +2025-03-11 20:18:43,656 Train Loss: 0.0159104, Val Loss: 0.0154504 +2025-03-11 20:18:43,656 Epoch 633/2000 +2025-03-11 20:19:07,990 Current Learning Rate: 0.0000656842 +2025-03-11 20:19:07,994 Train Loss: 0.0159099, Val Loss: 0.0154498 +2025-03-11 20:19:07,995 Epoch 634/2000 +2025-03-11 20:19:32,112 Current Learning Rate: 0.0000696290 +2025-03-11 20:19:32,115 Train Loss: 0.0159092, Val Loss: 0.0154493 +2025-03-11 20:19:32,115 Epoch 635/2000 +2025-03-11 20:19:57,855 Current Learning Rate: 0.0000736799 +2025-03-11 20:19:57,857 Train Loss: 0.0159087, Val Loss: 0.0154489 +2025-03-11 20:19:57,857 Epoch 636/2000 +2025-03-11 20:20:22,696 Current Learning Rate: 0.0000778360 +2025-03-11 20:20:22,699 Train Loss: 0.0159081, Val Loss: 0.0154484 +2025-03-11 20:20:22,699 Epoch 637/2000 +2025-03-11 20:20:47,092 Current Learning Rate: 0.0000820963 +2025-03-11 20:20:47,095 Train Loss: 0.0159075, Val Loss: 0.0154479 +2025-03-11 20:20:47,095 Epoch 638/2000 +2025-03-11 20:21:11,528 Current Learning Rate: 0.0000864597 +2025-03-11 20:21:11,531 Train Loss: 0.0159069, Val Loss: 0.0154475 +2025-03-11 20:21:11,531 Epoch 639/2000 +2025-03-11 20:21:36,182 Current Learning Rate: 0.0000909251 +2025-03-11 20:21:36,749 Train Loss: 0.0159063, Val Loss: 0.0154470 +2025-03-11 20:21:36,749 Epoch 640/2000 +2025-03-11 20:22:01,593 Current Learning Rate: 0.0000954915 +2025-03-11 20:22:01,597 Train Loss: 0.0159058, Val Loss: 0.0154466 +2025-03-11 20:22:01,597 Epoch 641/2000 +2025-03-11 20:22:26,808 Current Learning Rate: 0.0001001577 +2025-03-11 20:22:26,811 Train Loss: 0.0159052, Val Loss: 0.0154462 +2025-03-11 20:22:26,811 Epoch 642/2000 +2025-03-11 20:22:52,088 Current Learning Rate: 0.0001049225 +2025-03-11 20:22:52,361 Train Loss: 0.0159046, Val Loss: 0.0154459 +2025-03-11 20:22:52,361 Epoch 643/2000 +2025-03-11 20:23:17,722 Current Learning Rate: 0.0001097848 +2025-03-11 20:23:17,725 Train Loss: 0.0159040, Val Loss: 0.0154456 +2025-03-11 20:23:17,725 Epoch 644/2000 +2025-03-11 20:23:43,140 Current Learning Rate: 0.0001147434 +2025-03-11 20:23:43,146 Train Loss: 0.0159034, Val Loss: 0.0154453 +2025-03-11 20:23:43,146 Epoch 645/2000 +2025-03-11 20:24:08,306 Current Learning Rate: 0.0001197970 +2025-03-11 20:24:08,310 Train Loss: 0.0159029, Val Loss: 0.0154449 +2025-03-11 20:24:08,310 Epoch 646/2000 +2025-03-11 20:24:33,767 Current Learning Rate: 0.0001249445 +2025-03-11 20:24:33,770 Train Loss: 0.0159023, Val Loss: 0.0154445 +2025-03-11 20:24:33,770 Epoch 647/2000 +2025-03-11 20:24:58,668 Current Learning Rate: 0.0001301845 +2025-03-11 20:24:58,671 Train Loss: 0.0159016, Val Loss: 0.0154440 +2025-03-11 20:24:58,671 Epoch 648/2000 +2025-03-11 20:25:24,630 Current Learning Rate: 0.0001355157 +2025-03-11 20:25:24,634 Train Loss: 0.0159009, Val Loss: 0.0154428 +2025-03-11 20:25:24,634 Epoch 649/2000 +2025-03-11 20:25:49,594 Current Learning Rate: 0.0001409369 +2025-03-11 20:25:49,597 Train Loss: 0.0158995, Val Loss: 0.0154400 +2025-03-11 20:25:49,597 Epoch 650/2000 +2025-03-11 20:26:14,427 Current Learning Rate: 0.0001464466 +2025-03-11 20:26:14,435 Train Loss: 0.0158992, Val Loss: 0.0154388 +2025-03-11 20:26:14,436 Epoch 651/2000 +2025-03-11 20:26:40,106 Current Learning Rate: 0.0001520436 +2025-03-11 20:26:40,106 Train Loss: 0.0158990, Val Loss: 0.0154422 +2025-03-11 20:26:40,106 Epoch 652/2000 +2025-03-11 20:27:05,378 Current Learning Rate: 0.0001577264 +2025-03-11 20:27:05,392 Train Loss: 0.0158982, Val Loss: 0.0154359 +2025-03-11 20:27:05,393 Epoch 653/2000 +2025-03-11 20:27:31,044 Current Learning Rate: 0.0001634937 +2025-03-11 20:27:31,045 Train Loss: 0.0158974, Val Loss: 0.0154403 +2025-03-11 20:27:31,045 Epoch 654/2000 +2025-03-11 20:27:56,459 Current Learning Rate: 0.0001693441 +2025-03-11 20:27:56,459 Train Loss: 0.0158964, Val Loss: 0.0154364 +2025-03-11 20:27:56,459 Epoch 655/2000 +2025-03-11 20:28:21,943 Current Learning Rate: 0.0001752760 +2025-03-11 20:28:21,943 Train Loss: 0.0158955, Val Loss: 0.0154369 +2025-03-11 20:28:21,943 Epoch 656/2000 +2025-03-11 20:28:47,128 Current Learning Rate: 0.0001812880 +2025-03-11 20:28:47,129 Train Loss: 0.0158949, Val Loss: 0.0154381 +2025-03-11 20:28:47,129 Epoch 657/2000 +2025-03-11 20:29:12,513 Current Learning Rate: 0.0001873787 +2025-03-11 20:29:12,513 Train Loss: 0.0158938, Val Loss: 0.0154420 +2025-03-11 20:29:12,514 Epoch 658/2000 +2025-03-11 20:29:37,742 Current Learning Rate: 0.0001935465 +2025-03-11 20:29:37,742 Train Loss: 0.0158926, Val Loss: 0.0154530 +2025-03-11 20:29:37,742 Epoch 659/2000 +2025-03-11 20:30:02,949 Current Learning Rate: 0.0001997899 +2025-03-11 20:30:02,953 Train Loss: 0.0158933, Val Loss: 0.0154344 +2025-03-11 20:30:02,954 Epoch 660/2000 +2025-03-11 20:30:28,271 Current Learning Rate: 0.0002061074 +2025-03-11 20:30:28,272 Train Loss: 0.0158931, Val Loss: 0.0154366 +2025-03-11 20:30:28,272 Epoch 661/2000 +2025-03-11 20:30:53,335 Current Learning Rate: 0.0002124974 +2025-03-11 20:30:53,336 Train Loss: 0.0158910, Val Loss: 0.0154507 +2025-03-11 20:30:53,336 Epoch 662/2000 +2025-03-11 20:31:18,457 Current Learning Rate: 0.0002189583 +2025-03-11 20:31:18,460 Train Loss: 0.0158951, Val Loss: 0.0154278 +2025-03-11 20:31:18,460 Epoch 663/2000 +2025-03-11 20:31:43,075 Current Learning Rate: 0.0002254886 +2025-03-11 20:31:43,076 Train Loss: 0.0158886, Val Loss: 0.0154338 +2025-03-11 20:31:43,076 Epoch 664/2000 +2025-03-11 20:32:07,639 Current Learning Rate: 0.0002320866 +2025-03-11 20:32:07,639 Train Loss: 0.0158905, Val Loss: 0.0154319 +2025-03-11 20:32:07,640 Epoch 665/2000 +2025-03-11 20:32:32,039 Current Learning Rate: 0.0002387507 +2025-03-11 20:32:32,040 Train Loss: 0.0158899, Val Loss: 0.0154316 +2025-03-11 20:32:32,040 Epoch 666/2000 +2025-03-11 20:32:56,129 Current Learning Rate: 0.0002454793 +2025-03-11 20:32:56,129 Train Loss: 0.0158894, Val Loss: 0.0154322 +2025-03-11 20:32:56,130 Epoch 667/2000 +2025-03-11 20:33:20,544 Current Learning Rate: 0.0002522707 +2025-03-11 20:33:20,544 Train Loss: 0.0158887, Val Loss: 0.0154326 +2025-03-11 20:33:20,544 Epoch 668/2000 +2025-03-11 20:33:44,800 Current Learning Rate: 0.0002591232 +2025-03-11 20:33:44,800 Train Loss: 0.0158880, Val Loss: 0.0154328 +2025-03-11 20:33:44,800 Epoch 669/2000 +2025-03-11 20:34:09,282 Current Learning Rate: 0.0002660351 +2025-03-11 20:34:09,282 Train Loss: 0.0158873, Val Loss: 0.0154330 +2025-03-11 20:34:09,282 Epoch 670/2000 +2025-03-11 20:34:33,848 Current Learning Rate: 0.0002730048 +2025-03-11 20:34:33,848 Train Loss: 0.0158867, Val Loss: 0.0154331 +2025-03-11 20:34:33,848 Epoch 671/2000 +2025-03-11 20:34:58,530 Current Learning Rate: 0.0002800304 +2025-03-11 20:34:58,530 Train Loss: 0.0158864, Val Loss: 0.0154396 +2025-03-11 20:34:58,530 Epoch 672/2000 +2025-03-11 20:35:23,002 Current Learning Rate: 0.0002871104 +2025-03-11 20:35:23,006 Train Loss: 0.0158826, Val Loss: 0.0154244 +2025-03-11 20:35:23,006 Epoch 673/2000 +2025-03-11 20:35:47,438 Current Learning Rate: 0.0002942428 +2025-03-11 20:35:47,441 Train Loss: 0.0158930, Val Loss: 0.0154242 +2025-03-11 20:35:47,441 Epoch 674/2000 +2025-03-11 20:36:11,616 Current Learning Rate: 0.0003014261 +2025-03-11 20:36:11,616 Train Loss: 0.0158779, Val Loss: 0.0154255 +2025-03-11 20:36:11,616 Epoch 675/2000 +2025-03-11 20:36:36,188 Current Learning Rate: 0.0003086583 +2025-03-11 20:36:36,188 Train Loss: 0.0158828, Val Loss: 0.0154334 +2025-03-11 20:36:36,189 Epoch 676/2000 +2025-03-11 20:37:00,736 Current Learning Rate: 0.0003159377 +2025-03-11 20:37:00,740 Train Loss: 0.0158863, Val Loss: 0.0154160 +2025-03-11 20:37:00,741 Epoch 677/2000 +2025-03-11 20:37:25,163 Current Learning Rate: 0.0003232626 +2025-03-11 20:37:25,166 Train Loss: 0.0158815, Val Loss: 0.0154138 +2025-03-11 20:37:25,166 Epoch 678/2000 +2025-03-11 20:37:49,740 Current Learning Rate: 0.0003306310 +2025-03-11 20:37:49,740 Train Loss: 0.0158748, Val Loss: 0.0154346 +2025-03-11 20:37:49,740 Epoch 679/2000 +2025-03-11 20:38:14,389 Current Learning Rate: 0.0003380413 +2025-03-11 20:38:14,392 Train Loss: 0.0158848, Val Loss: 0.0154127 +2025-03-11 20:38:14,393 Epoch 680/2000 +2025-03-11 20:38:38,628 Current Learning Rate: 0.0003454915 +2025-03-11 20:38:38,629 Train Loss: 0.0158737, Val Loss: 0.0154162 +2025-03-11 20:38:38,630 Epoch 681/2000 +2025-03-11 20:39:03,088 Current Learning Rate: 0.0003529798 +2025-03-11 20:39:03,091 Train Loss: 0.0158890, Val Loss: 0.0154097 +2025-03-11 20:39:03,092 Epoch 682/2000 +2025-03-11 20:39:28,320 Current Learning Rate: 0.0003605044 +2025-03-11 20:39:28,320 Train Loss: 0.0158722, Val Loss: 0.0154442 +2025-03-11 20:39:28,320 Epoch 683/2000 +2025-03-11 20:39:53,194 Current Learning Rate: 0.0003680635 +2025-03-11 20:39:53,194 Train Loss: 0.0158821, Val Loss: 0.0154113 +2025-03-11 20:39:53,194 Epoch 684/2000 +2025-03-11 20:40:17,768 Current Learning Rate: 0.0003756551 +2025-03-11 20:40:17,768 Train Loss: 0.0158748, Val Loss: 0.0154241 +2025-03-11 20:40:17,768 Epoch 685/2000 +2025-03-11 20:40:42,548 Current Learning Rate: 0.0003832773 +2025-03-11 20:40:42,551 Train Loss: 0.0158890, Val Loss: 0.0154065 +2025-03-11 20:40:42,551 Epoch 686/2000 +2025-03-11 20:41:07,425 Current Learning Rate: 0.0003909284 +2025-03-11 20:41:07,426 Train Loss: 0.0158689, Val Loss: 0.0154498 +2025-03-11 20:41:07,426 Epoch 687/2000 +2025-03-11 20:41:31,902 Current Learning Rate: 0.0003986064 +2025-03-11 20:41:31,906 Train Loss: 0.0158857, Val Loss: 0.0154048 +2025-03-11 20:41:31,906 Epoch 688/2000 +2025-03-11 20:41:56,563 Current Learning Rate: 0.0004063093 +2025-03-11 20:41:56,563 Train Loss: 0.0158699, Val Loss: 0.0154195 +2025-03-11 20:41:56,564 Epoch 689/2000 +2025-03-11 20:42:21,354 Current Learning Rate: 0.0004140354 +2025-03-11 20:42:21,355 Train Loss: 0.0158806, Val Loss: 0.0154049 +2025-03-11 20:42:21,355 Epoch 690/2000 +2025-03-11 20:42:46,681 Current Learning Rate: 0.0004217828 +2025-03-11 20:42:46,682 Train Loss: 0.0158682, Val Loss: 0.0154167 +2025-03-11 20:42:46,682 Epoch 691/2000 +2025-03-11 20:43:11,235 Current Learning Rate: 0.0004295494 +2025-03-11 20:43:11,240 Train Loss: 0.0158991, Val Loss: 0.0153994 +2025-03-11 20:43:11,241 Epoch 692/2000 +2025-03-11 20:43:35,974 Current Learning Rate: 0.0004373334 +2025-03-11 20:43:35,975 Train Loss: 0.0158627, Val Loss: 0.0154046 +2025-03-11 20:43:35,975 Epoch 693/2000 +2025-03-11 20:44:01,192 Current Learning Rate: 0.0004451328 +2025-03-11 20:44:01,197 Train Loss: 0.0158691, Val Loss: 0.0154034 +2025-03-11 20:44:01,197 Epoch 694/2000 +2025-03-11 20:44:25,846 Current Learning Rate: 0.0004529458 +2025-03-11 20:44:25,850 Train Loss: 0.0158861, Val Loss: 0.0153990 +2025-03-11 20:44:25,850 Epoch 695/2000 +2025-03-11 20:44:50,716 Current Learning Rate: 0.0004607705 +2025-03-11 20:44:50,716 Train Loss: 0.0158615, Val Loss: 0.0154028 +2025-03-11 20:44:50,716 Epoch 696/2000 +2025-03-11 20:45:15,835 Current Learning Rate: 0.0004686047 +2025-03-11 20:45:15,839 Train Loss: 0.0158962, Val Loss: 0.0153962 +2025-03-11 20:45:15,839 Epoch 697/2000 +2025-03-11 20:45:40,956 Current Learning Rate: 0.0004764468 +2025-03-11 20:45:40,957 Train Loss: 0.0158589, Val Loss: 0.0154116 +2025-03-11 20:45:40,957 Epoch 698/2000 +2025-03-11 20:46:05,686 Current Learning Rate: 0.0004842946 +2025-03-11 20:46:05,686 Train Loss: 0.0158707, Val Loss: 0.0154052 +2025-03-11 20:46:05,686 Epoch 699/2000 +2025-03-11 20:46:30,865 Current Learning Rate: 0.0004921463 +2025-03-11 20:46:30,865 Train Loss: 0.0158761, Val Loss: 0.0154075 +2025-03-11 20:46:30,865 Epoch 700/2000 +2025-03-11 20:46:56,223 Current Learning Rate: 0.0005000000 +2025-03-11 20:46:56,224 Train Loss: 0.0158636, Val Loss: 0.0153976 +2025-03-11 20:46:56,224 Epoch 701/2000 +2025-03-11 20:47:20,362 Current Learning Rate: 0.0005078537 +2025-03-11 20:47:20,363 Train Loss: 0.0158943, Val Loss: 0.0154073 +2025-03-11 20:47:20,363 Epoch 702/2000 +2025-03-11 20:47:44,819 Current Learning Rate: 0.0005157054 +2025-03-11 20:47:44,819 Train Loss: 0.0158576, Val Loss: 0.0154012 +2025-03-11 20:47:44,820 Epoch 703/2000 +2025-03-11 20:48:09,925 Current Learning Rate: 0.0005235532 +2025-03-11 20:48:09,925 Train Loss: 0.0158743, Val Loss: 0.0154066 +2025-03-11 20:48:09,926 Epoch 704/2000 +2025-03-11 20:48:34,632 Current Learning Rate: 0.0005313953 +2025-03-11 20:48:34,633 Train Loss: 0.0158611, Val Loss: 0.0154052 +2025-03-11 20:48:34,633 Epoch 705/2000 +2025-03-11 20:48:58,932 Current Learning Rate: 0.0005392295 +2025-03-11 20:48:58,932 Train Loss: 0.0158915, Val Loss: 0.0154067 +2025-03-11 20:48:58,933 Epoch 706/2000 +2025-03-11 20:49:23,534 Current Learning Rate: 0.0005470542 +2025-03-11 20:49:23,534 Train Loss: 0.0158559, Val Loss: 0.0154022 +2025-03-11 20:49:23,534 Epoch 707/2000 +2025-03-11 20:49:48,216 Current Learning Rate: 0.0005548672 +2025-03-11 20:49:48,217 Train Loss: 0.0158800, Val Loss: 0.0154049 +2025-03-11 20:49:48,217 Epoch 708/2000 +2025-03-11 20:50:12,884 Current Learning Rate: 0.0005626666 +2025-03-11 20:50:12,889 Train Loss: 0.0158608, Val Loss: 0.0153866 +2025-03-11 20:50:12,889 Epoch 709/2000 +2025-03-11 20:50:37,612 Current Learning Rate: 0.0005704506 +2025-03-11 20:50:37,612 Train Loss: 0.0158641, Val Loss: 0.0154051 +2025-03-11 20:50:37,613 Epoch 710/2000 +2025-03-11 20:51:02,314 Current Learning Rate: 0.0005782172 +2025-03-11 20:51:02,315 Train Loss: 0.0158806, Val Loss: 0.0153999 +2025-03-11 20:51:02,315 Epoch 711/2000 +2025-03-11 20:51:27,679 Current Learning Rate: 0.0005859646 +2025-03-11 20:51:27,683 Train Loss: 0.0158598, Val Loss: 0.0153793 +2025-03-11 20:51:27,683 Epoch 712/2000 +2025-03-11 20:51:52,056 Current Learning Rate: 0.0005936907 +2025-03-11 20:51:52,057 Train Loss: 0.0158694, Val Loss: 0.0154000 +2025-03-11 20:51:52,057 Epoch 713/2000 +2025-03-11 20:52:16,585 Current Learning Rate: 0.0006013936 +2025-03-11 20:52:16,585 Train Loss: 0.0158740, Val Loss: 0.0153979 +2025-03-11 20:52:16,586 Epoch 714/2000 +2025-03-11 20:52:41,339 Current Learning Rate: 0.0006090716 +2025-03-11 20:52:41,340 Train Loss: 0.0158575, Val Loss: 0.0153804 +2025-03-11 20:52:41,340 Epoch 715/2000 +2025-03-11 20:53:05,426 Current Learning Rate: 0.0006167227 +2025-03-11 20:53:05,426 Train Loss: 0.0158786, Val Loss: 0.0153921 +2025-03-11 20:53:05,427 Epoch 716/2000 +2025-03-11 20:53:29,698 Current Learning Rate: 0.0006243449 +2025-03-11 20:53:29,698 Train Loss: 0.0158611, Val Loss: 0.0153876 +2025-03-11 20:53:29,699 Epoch 717/2000 +2025-03-11 20:53:53,883 Current Learning Rate: 0.0006319365 +2025-03-11 20:53:53,884 Train Loss: 0.0158743, Val Loss: 0.0154354 +2025-03-11 20:53:53,884 Epoch 718/2000 +2025-03-11 20:54:18,103 Current Learning Rate: 0.0006394956 +2025-03-11 20:54:18,103 Train Loss: 0.0158553, Val Loss: 0.0154113 +2025-03-11 20:54:18,103 Epoch 719/2000 +2025-03-11 20:54:42,805 Current Learning Rate: 0.0006470202 +2025-03-11 20:54:42,805 Train Loss: 0.0158760, Val Loss: 0.0153988 +2025-03-11 20:54:42,805 Epoch 720/2000 +2025-03-11 20:55:07,385 Current Learning Rate: 0.0006545085 +2025-03-11 20:55:07,386 Train Loss: 0.0158707, Val Loss: 0.0154622 +2025-03-11 20:55:07,386 Epoch 721/2000 +2025-03-11 20:55:31,663 Current Learning Rate: 0.0006619587 +2025-03-11 20:55:31,664 Train Loss: 0.0158451, Val Loss: 0.0154048 +2025-03-11 20:55:31,664 Epoch 722/2000 +2025-03-11 20:55:56,083 Current Learning Rate: 0.0006693690 +2025-03-11 20:55:56,084 Train Loss: 0.0158656, Val Loss: 0.0154066 +2025-03-11 20:55:56,084 Epoch 723/2000 +2025-03-11 20:56:20,983 Current Learning Rate: 0.0006767374 +2025-03-11 20:56:20,983 Train Loss: 0.0158908, Val Loss: 0.0153892 +2025-03-11 20:56:20,984 Epoch 724/2000 +2025-03-11 20:56:45,855 Current Learning Rate: 0.0006840623 +2025-03-11 20:56:45,857 Train Loss: 0.0158463, Val Loss: 0.0153693 +2025-03-11 20:56:45,858 Epoch 725/2000 +2025-03-11 20:57:10,687 Current Learning Rate: 0.0006913417 +2025-03-11 20:57:10,687 Train Loss: 0.0158837, Val Loss: 0.0153992 +2025-03-11 20:57:10,688 Epoch 726/2000 +2025-03-11 20:57:34,793 Current Learning Rate: 0.0006985739 +2025-03-11 20:57:34,793 Train Loss: 0.0158544, Val Loss: 0.0153773 +2025-03-11 20:57:34,793 Epoch 727/2000 +2025-03-11 20:57:59,472 Current Learning Rate: 0.0007057572 +2025-03-11 20:57:59,472 Train Loss: 0.0158500, Val Loss: 0.0154226 +2025-03-11 20:57:59,473 Epoch 728/2000 +2025-03-11 20:58:24,017 Current Learning Rate: 0.0007128896 +2025-03-11 20:58:24,018 Train Loss: 0.0158986, Val Loss: 0.0153835 +2025-03-11 20:58:24,018 Epoch 729/2000 +2025-03-11 20:58:48,458 Current Learning Rate: 0.0007199696 +2025-03-11 20:58:48,462 Train Loss: 0.0158443, Val Loss: 0.0153687 +2025-03-11 20:58:48,463 Epoch 730/2000 +2025-03-11 20:59:12,994 Current Learning Rate: 0.0007269952 +2025-03-11 20:59:12,994 Train Loss: 0.0158495, Val Loss: 0.0153895 +2025-03-11 20:59:12,995 Epoch 731/2000 +2025-03-11 20:59:37,423 Current Learning Rate: 0.0007339649 +2025-03-11 20:59:37,426 Train Loss: 0.0159212, Val Loss: 0.0153623 +2025-03-11 20:59:37,426 Epoch 732/2000 +2025-03-11 21:00:01,757 Current Learning Rate: 0.0007408768 +2025-03-11 21:00:01,757 Train Loss: 0.0158268, Val Loss: 0.0153715 +2025-03-11 21:00:01,757 Epoch 733/2000 +2025-03-11 21:00:26,555 Current Learning Rate: 0.0007477293 +2025-03-11 21:00:26,555 Train Loss: 0.0158651, Val Loss: 0.0153738 +2025-03-11 21:00:26,555 Epoch 734/2000 +2025-03-11 21:00:51,048 Current Learning Rate: 0.0007545207 +2025-03-11 21:00:51,048 Train Loss: 0.0158509, Val Loss: 0.0153653 +2025-03-11 21:00:51,048 Epoch 735/2000 +2025-03-11 21:01:15,671 Current Learning Rate: 0.0007612493 +2025-03-11 21:01:15,672 Train Loss: 0.0158736, Val Loss: 0.0153690 +2025-03-11 21:01:15,672 Epoch 736/2000 +2025-03-11 21:01:40,222 Current Learning Rate: 0.0007679134 +2025-03-11 21:01:40,222 Train Loss: 0.0158452, Val Loss: 0.0153653 +2025-03-11 21:01:40,223 Epoch 737/2000 +2025-03-11 21:02:04,443 Current Learning Rate: 0.0007745114 +2025-03-11 21:02:04,443 Train Loss: 0.0158682, Val Loss: 0.0153983 +2025-03-11 21:02:04,444 Epoch 738/2000 +2025-03-11 21:02:28,968 Current Learning Rate: 0.0007810417 +2025-03-11 21:02:28,973 Train Loss: 0.0158632, Val Loss: 0.0153622 +2025-03-11 21:02:28,974 Epoch 739/2000 +2025-03-11 21:02:53,445 Current Learning Rate: 0.0007875026 +2025-03-11 21:02:53,445 Train Loss: 0.0158562, Val Loss: 0.0153729 +2025-03-11 21:02:53,445 Epoch 740/2000 +2025-03-11 21:03:18,230 Current Learning Rate: 0.0007938926 +2025-03-11 21:03:18,231 Train Loss: 0.0158620, Val Loss: 0.0153853 +2025-03-11 21:03:18,231 Epoch 741/2000 +2025-03-11 21:03:42,583 Current Learning Rate: 0.0008002101 +2025-03-11 21:03:42,587 Train Loss: 0.0158623, Val Loss: 0.0153574 +2025-03-11 21:03:42,587 Epoch 742/2000 +2025-03-11 21:04:07,283 Current Learning Rate: 0.0008064535 +2025-03-11 21:04:07,284 Train Loss: 0.0158515, Val Loss: 0.0153709 +2025-03-11 21:04:07,284 Epoch 743/2000 +2025-03-11 21:04:32,166 Current Learning Rate: 0.0008126213 +2025-03-11 21:04:32,166 Train Loss: 0.0158741, Val Loss: 0.0153792 +2025-03-11 21:04:32,166 Epoch 744/2000 +2025-03-11 21:04:56,309 Current Learning Rate: 0.0008187120 +2025-03-11 21:04:56,310 Train Loss: 0.0158774, Val Loss: 0.0154852 +2025-03-11 21:04:56,310 Epoch 745/2000 +2025-03-11 21:05:20,939 Current Learning Rate: 0.0008247240 +2025-03-11 21:05:20,940 Train Loss: 0.0158438, Val Loss: 0.0153815 +2025-03-11 21:05:20,940 Epoch 746/2000 +2025-03-11 21:05:45,401 Current Learning Rate: 0.0008306559 +2025-03-11 21:05:45,401 Train Loss: 0.0158391, Val Loss: 0.0157293 +2025-03-11 21:05:45,401 Epoch 747/2000 +2025-03-11 21:06:10,189 Current Learning Rate: 0.0008365063 +2025-03-11 21:06:10,192 Train Loss: 0.0158980, Val Loss: 0.0153455 +2025-03-11 21:06:10,193 Epoch 748/2000 +2025-03-11 21:06:34,770 Current Learning Rate: 0.0008422736 +2025-03-11 21:06:34,770 Train Loss: 0.0158315, Val Loss: 0.0154387 +2025-03-11 21:06:34,770 Epoch 749/2000 +2025-03-11 21:06:59,627 Current Learning Rate: 0.0008479564 +2025-03-11 21:06:59,627 Train Loss: 0.0158455, Val Loss: 0.0153734 +2025-03-11 21:06:59,627 Epoch 750/2000 +2025-03-11 21:07:24,346 Current Learning Rate: 0.0008535534 +2025-03-11 21:07:24,347 Train Loss: 0.0158680, Val Loss: 0.0153610 +2025-03-11 21:07:24,347 Epoch 751/2000 +2025-03-11 21:07:48,788 Current Learning Rate: 0.0008590631 +2025-03-11 21:07:48,789 Train Loss: 0.0158476, Val Loss: 0.0153813 +2025-03-11 21:07:48,789 Epoch 752/2000 +2025-03-11 21:08:13,325 Current Learning Rate: 0.0008644843 +2025-03-11 21:08:13,326 Train Loss: 0.0158619, Val Loss: 0.0153560 +2025-03-11 21:08:13,326 Epoch 753/2000 +2025-03-11 21:08:38,047 Current Learning Rate: 0.0008698155 +2025-03-11 21:08:38,047 Train Loss: 0.0158527, Val Loss: 0.0153713 +2025-03-11 21:08:38,048 Epoch 754/2000 +2025-03-11 21:09:03,016 Current Learning Rate: 0.0008750555 +2025-03-11 21:09:03,019 Train Loss: 0.0158391, Val Loss: 0.0153399 +2025-03-11 21:09:03,020 Epoch 755/2000 +2025-03-11 21:09:27,798 Current Learning Rate: 0.0008802030 +2025-03-11 21:09:27,799 Train Loss: 0.0158779, Val Loss: 0.0153591 +2025-03-11 21:09:27,799 Epoch 756/2000 +2025-03-11 21:09:52,280 Current Learning Rate: 0.0008852566 +2025-03-11 21:09:52,280 Train Loss: 0.0158432, Val Loss: 0.0153498 +2025-03-11 21:09:52,281 Epoch 757/2000 +2025-03-11 21:10:17,014 Current Learning Rate: 0.0008902152 +2025-03-11 21:10:17,015 Train Loss: 0.0158356, Val Loss: 0.0153412 +2025-03-11 21:10:17,015 Epoch 758/2000 +2025-03-11 21:10:41,763 Current Learning Rate: 0.0008950775 +2025-03-11 21:10:41,764 Train Loss: 0.0158923, Val Loss: 0.0153574 +2025-03-11 21:10:41,764 Epoch 759/2000 +2025-03-11 21:11:06,081 Current Learning Rate: 0.0008998423 +2025-03-11 21:11:06,081 Train Loss: 0.0158275, Val Loss: 0.0154354 +2025-03-11 21:11:06,081 Epoch 760/2000 +2025-03-11 21:11:30,040 Current Learning Rate: 0.0009045085 +2025-03-11 21:11:30,043 Train Loss: 0.0158529, Val Loss: 0.0153391 +2025-03-11 21:11:30,043 Epoch 761/2000 +2025-03-11 21:11:54,036 Current Learning Rate: 0.0009090749 +2025-03-11 21:11:54,036 Train Loss: 0.0158367, Val Loss: 0.0153576 +2025-03-11 21:11:54,036 Epoch 762/2000 +2025-03-11 21:12:17,764 Current Learning Rate: 0.0009135403 +2025-03-11 21:12:17,767 Train Loss: 0.0158783, Val Loss: 0.0153316 +2025-03-11 21:12:17,767 Epoch 763/2000 +2025-03-11 21:12:41,739 Current Learning Rate: 0.0009179037 +2025-03-11 21:12:41,740 Train Loss: 0.0158478, Val Loss: 0.0154301 +2025-03-11 21:12:41,740 Epoch 764/2000 +2025-03-11 21:13:05,767 Current Learning Rate: 0.0009221640 +2025-03-11 21:13:05,767 Train Loss: 0.0158450, Val Loss: 0.0153823 +2025-03-11 21:13:05,767 Epoch 765/2000 +2025-03-11 21:13:30,075 Current Learning Rate: 0.0009263201 +2025-03-11 21:13:30,075 Train Loss: 0.0158293, Val Loss: 0.0154105 +2025-03-11 21:13:30,076 Epoch 766/2000 +2025-03-11 21:13:54,276 Current Learning Rate: 0.0009303710 +2025-03-11 21:13:54,277 Train Loss: 0.0158355, Val Loss: 0.0153423 +2025-03-11 21:13:54,277 Epoch 767/2000 +2025-03-11 21:14:18,352 Current Learning Rate: 0.0009343158 +2025-03-11 21:14:18,352 Train Loss: 0.0158631, Val Loss: 0.0153529 +2025-03-11 21:14:18,353 Epoch 768/2000 +2025-03-11 21:14:42,647 Current Learning Rate: 0.0009381533 +2025-03-11 21:14:42,647 Train Loss: 0.0158381, Val Loss: 0.0153380 +2025-03-11 21:14:42,647 Epoch 769/2000 +2025-03-11 21:15:07,052 Current Learning Rate: 0.0009418828 +2025-03-11 21:15:07,053 Train Loss: 0.0158361, Val Loss: 0.0158664 +2025-03-11 21:15:07,053 Epoch 770/2000 +2025-03-11 21:15:31,792 Current Learning Rate: 0.0009455033 +2025-03-11 21:15:31,793 Train Loss: 0.0158853, Val Loss: 0.0153659 +2025-03-11 21:15:31,793 Epoch 771/2000 +2025-03-11 21:15:56,120 Current Learning Rate: 0.0009490138 +2025-03-11 21:15:56,120 Train Loss: 0.0158180, Val Loss: 0.0153513 +2025-03-11 21:15:56,121 Epoch 772/2000 +2025-03-11 21:16:20,441 Current Learning Rate: 0.0009524135 +2025-03-11 21:16:20,441 Train Loss: 0.0158503, Val Loss: 0.0159346 +2025-03-11 21:16:20,441 Epoch 773/2000 +2025-03-11 21:16:44,766 Current Learning Rate: 0.0009557016 +2025-03-11 21:16:44,766 Train Loss: 0.0158387, Val Loss: 0.0153493 +2025-03-11 21:16:44,767 Epoch 774/2000 +2025-03-11 21:17:09,368 Current Learning Rate: 0.0009588773 +2025-03-11 21:17:09,368 Train Loss: 0.0158759, Val Loss: 0.0154440 +2025-03-11 21:17:09,369 Epoch 775/2000 +2025-03-11 21:17:33,856 Current Learning Rate: 0.0009619398 +2025-03-11 21:17:33,857 Train Loss: 0.0158082, Val Loss: 0.0153624 +2025-03-11 21:17:33,857 Epoch 776/2000 +2025-03-11 21:17:58,338 Current Learning Rate: 0.0009648882 +2025-03-11 21:17:58,338 Train Loss: 0.0158396, Val Loss: 0.0154807 +2025-03-11 21:17:58,339 Epoch 777/2000 +2025-03-11 21:18:22,698 Current Learning Rate: 0.0009677220 +2025-03-11 21:18:22,698 Train Loss: 0.0158552, Val Loss: 0.0153708 +2025-03-11 21:18:22,698 Epoch 778/2000 +2025-03-11 21:18:47,342 Current Learning Rate: 0.0009704404 +2025-03-11 21:18:47,342 Train Loss: 0.0158104, Val Loss: 0.0154642 +2025-03-11 21:18:47,342 Epoch 779/2000 +2025-03-11 21:19:12,144 Current Learning Rate: 0.0009730427 +2025-03-11 21:19:12,144 Train Loss: 0.0158616, Val Loss: 0.0153721 +2025-03-11 21:19:12,144 Epoch 780/2000 +2025-03-11 21:19:36,444 Current Learning Rate: 0.0009755283 +2025-03-11 21:19:36,444 Train Loss: 0.0158167, Val Loss: 0.0155904 +2025-03-11 21:19:36,444 Epoch 781/2000 +2025-03-11 21:20:00,892 Current Learning Rate: 0.0009778965 +2025-03-11 21:20:00,893 Train Loss: 0.0158702, Val Loss: 0.0154595 +2025-03-11 21:20:00,893 Epoch 782/2000 +2025-03-11 21:20:25,786 Current Learning Rate: 0.0009801468 +2025-03-11 21:20:25,787 Train Loss: 0.0158126, Val Loss: 0.0154747 +2025-03-11 21:20:25,787 Epoch 783/2000 +2025-03-11 21:20:50,218 Current Learning Rate: 0.0009822787 +2025-03-11 21:20:50,218 Train Loss: 0.0158180, Val Loss: 0.0153437 +2025-03-11 21:20:50,218 Epoch 784/2000 +2025-03-11 21:21:14,783 Current Learning Rate: 0.0009842916 +2025-03-11 21:21:14,783 Train Loss: 0.0158504, Val Loss: 0.0155788 +2025-03-11 21:21:14,784 Epoch 785/2000 +2025-03-11 21:21:39,545 Current Learning Rate: 0.0009861850 +2025-03-11 21:21:39,546 Train Loss: 0.0158405, Val Loss: 0.0153719 +2025-03-11 21:21:39,546 Epoch 786/2000 +2025-03-11 21:22:04,079 Current Learning Rate: 0.0009879584 +2025-03-11 21:22:04,079 Train Loss: 0.0158058, Val Loss: 0.0153627 +2025-03-11 21:22:04,079 Epoch 787/2000 +2025-03-11 21:22:28,073 Current Learning Rate: 0.0009896114 +2025-03-11 21:22:28,073 Train Loss: 0.0158246, Val Loss: 0.0155051 +2025-03-11 21:22:28,073 Epoch 788/2000 +2025-03-11 21:22:52,388 Current Learning Rate: 0.0009911436 +2025-03-11 21:22:52,388 Train Loss: 0.0158542, Val Loss: 0.0153697 +2025-03-11 21:22:52,389 Epoch 789/2000 +2025-03-11 21:23:16,353 Current Learning Rate: 0.0009925547 +2025-03-11 21:23:16,353 Train Loss: 0.0158247, Val Loss: 0.0153848 +2025-03-11 21:23:16,354 Epoch 790/2000 +2025-03-11 21:23:40,843 Current Learning Rate: 0.0009938442 +2025-03-11 21:23:40,844 Train Loss: 0.0158264, Val Loss: 0.0153354 +2025-03-11 21:23:40,844 Epoch 791/2000 +2025-03-11 21:24:05,165 Current Learning Rate: 0.0009950118 +2025-03-11 21:24:05,165 Train Loss: 0.0157983, Val Loss: 0.0155731 +2025-03-11 21:24:05,165 Epoch 792/2000 +2025-03-11 21:24:29,781 Current Learning Rate: 0.0009960574 +2025-03-11 21:24:29,784 Train Loss: 0.0158809, Val Loss: 0.0153049 +2025-03-11 21:24:29,784 Epoch 793/2000 +2025-03-11 21:24:55,598 Current Learning Rate: 0.0009969805 +2025-03-11 21:24:55,598 Train Loss: 0.0158369, Val Loss: 0.0153837 +2025-03-11 21:24:55,598 Epoch 794/2000 +2025-03-11 21:25:20,775 Current Learning Rate: 0.0009977810 +2025-03-11 21:25:20,776 Train Loss: 0.0157935, Val Loss: 0.0153454 +2025-03-11 21:25:20,776 Epoch 795/2000 +2025-03-11 21:25:45,744 Current Learning Rate: 0.0009984587 +2025-03-11 21:25:45,744 Train Loss: 0.0158255, Val Loss: 0.0153195 +2025-03-11 21:25:45,744 Epoch 796/2000 +2025-03-11 21:26:10,380 Current Learning Rate: 0.0009990134 +2025-03-11 21:26:10,380 Train Loss: 0.0158110, Val Loss: 0.0155679 +2025-03-11 21:26:10,381 Epoch 797/2000 +2025-03-11 21:26:35,250 Current Learning Rate: 0.0009994449 +2025-03-11 21:26:35,250 Train Loss: 0.0158076, Val Loss: 0.0154174 +2025-03-11 21:26:35,250 Epoch 798/2000 +2025-03-11 21:26:59,879 Current Learning Rate: 0.0009997533 +2025-03-11 21:26:59,880 Train Loss: 0.0158425, Val Loss: 0.0155380 +2025-03-11 21:26:59,880 Epoch 799/2000 +2025-03-11 21:27:25,231 Current Learning Rate: 0.0009999383 +2025-03-11 21:27:25,232 Train Loss: 0.0158210, Val Loss: 0.0153198 +2025-03-11 21:27:25,232 Epoch 800/2000 +2025-03-11 21:27:50,426 Current Learning Rate: 0.0010000000 +2025-03-11 21:27:50,427 Train Loss: 0.0158275, Val Loss: 0.0153623 +2025-03-11 21:27:50,427 Epoch 801/2000 +2025-03-11 21:28:15,947 Current Learning Rate: 0.0009999383 +2025-03-11 21:28:15,947 Train Loss: 0.0157950, Val Loss: 0.0153130 +2025-03-11 21:28:15,947 Epoch 802/2000 +2025-03-11 21:28:41,463 Current Learning Rate: 0.0009997533 +2025-03-11 21:28:41,464 Train Loss: 0.0158086, Val Loss: 0.0153277 +2025-03-11 21:28:41,464 Epoch 803/2000 +2025-03-11 21:29:06,870 Current Learning Rate: 0.0009994449 +2025-03-11 21:29:06,874 Train Loss: 0.0158042, Val Loss: 0.0153046 +2025-03-11 21:29:06,875 Epoch 804/2000 +2025-03-11 21:29:32,132 Current Learning Rate: 0.0009990134 +2025-03-11 21:29:32,132 Train Loss: 0.0158620, Val Loss: 0.0153241 +2025-03-11 21:29:32,132 Epoch 805/2000 +2025-03-11 21:29:57,277 Current Learning Rate: 0.0009984587 +2025-03-11 21:29:57,278 Train Loss: 0.0157706, Val Loss: 0.0153549 +2025-03-11 21:29:57,278 Epoch 806/2000 +2025-03-11 21:30:22,374 Current Learning Rate: 0.0009977810 +2025-03-11 21:30:22,377 Train Loss: 0.0158094, Val Loss: 0.0152945 +2025-03-11 21:30:22,377 Epoch 807/2000 +2025-03-11 21:30:47,449 Current Learning Rate: 0.0009969805 +2025-03-11 21:30:47,450 Train Loss: 0.0158006, Val Loss: 0.0153141 +2025-03-11 21:30:47,450 Epoch 808/2000 +2025-03-11 21:31:12,585 Current Learning Rate: 0.0009960574 +2025-03-11 21:31:12,585 Train Loss: 0.0158353, Val Loss: 0.0153195 +2025-03-11 21:31:12,585 Epoch 809/2000 +2025-03-11 21:31:37,435 Current Learning Rate: 0.0009950118 +2025-03-11 21:31:37,436 Train Loss: 0.0157840, Val Loss: 0.0153089 +2025-03-11 21:31:37,436 Epoch 810/2000 +2025-03-11 21:32:02,795 Current Learning Rate: 0.0009938442 +2025-03-11 21:32:02,796 Train Loss: 0.0158155, Val Loss: 0.0155001 +2025-03-11 21:32:02,796 Epoch 811/2000 +2025-03-11 21:32:28,075 Current Learning Rate: 0.0009925547 +2025-03-11 21:32:28,076 Train Loss: 0.0158141, Val Loss: 0.0153673 +2025-03-11 21:32:28,076 Epoch 812/2000 +2025-03-11 21:32:53,351 Current Learning Rate: 0.0009911436 +2025-03-11 21:32:53,352 Train Loss: 0.0157811, Val Loss: 0.0153146 +2025-03-11 21:32:53,352 Epoch 813/2000 +2025-03-11 21:33:18,062 Current Learning Rate: 0.0009896114 +2025-03-11 21:33:18,063 Train Loss: 0.0158098, Val Loss: 0.0158425 +2025-03-11 21:33:18,063 Epoch 814/2000 +2025-03-11 21:33:43,007 Current Learning Rate: 0.0009879584 +2025-03-11 21:33:43,007 Train Loss: 0.0157937, Val Loss: 0.0152982 +2025-03-11 21:33:43,007 Epoch 815/2000 +2025-03-11 21:34:08,299 Current Learning Rate: 0.0009861850 +2025-03-11 21:34:08,299 Train Loss: 0.0157943, Val Loss: 0.0153646 +2025-03-11 21:34:08,300 Epoch 816/2000 +2025-03-11 21:34:33,022 Current Learning Rate: 0.0009842916 +2025-03-11 21:34:33,022 Train Loss: 0.0157977, Val Loss: 0.0153175 +2025-03-11 21:34:33,022 Epoch 817/2000 +2025-03-11 21:34:57,722 Current Learning Rate: 0.0009822787 +2025-03-11 21:34:57,723 Train Loss: 0.0157990, Val Loss: 0.0153059 +2025-03-11 21:34:57,723 Epoch 818/2000 +2025-03-11 21:35:22,933 Current Learning Rate: 0.0009801468 +2025-03-11 21:35:22,934 Train Loss: 0.0157958, Val Loss: 0.0155269 +2025-03-11 21:35:22,934 Epoch 819/2000 +2025-03-11 21:35:47,804 Current Learning Rate: 0.0009778965 +2025-03-11 21:35:47,804 Train Loss: 0.0157737, Val Loss: 0.0153453 +2025-03-11 21:35:47,805 Epoch 820/2000 +2025-03-11 21:36:12,830 Current Learning Rate: 0.0009755283 +2025-03-11 21:36:12,833 Train Loss: 0.0158079, Val Loss: 0.0152804 +2025-03-11 21:36:12,833 Epoch 821/2000 +2025-03-11 21:36:37,409 Current Learning Rate: 0.0009730427 +2025-03-11 21:36:37,409 Train Loss: 0.0157791, Val Loss: 0.0153129 +2025-03-11 21:36:37,410 Epoch 822/2000 +2025-03-11 21:37:02,295 Current Learning Rate: 0.0009704404 +2025-03-11 21:37:02,295 Train Loss: 0.0157810, Val Loss: 0.0153443 +2025-03-11 21:37:02,296 Epoch 823/2000 +2025-03-11 21:37:27,046 Current Learning Rate: 0.0009677220 +2025-03-11 21:37:27,047 Train Loss: 0.0157800, Val Loss: 0.0153147 +2025-03-11 21:37:27,047 Epoch 824/2000 +2025-03-11 21:37:52,549 Current Learning Rate: 0.0009648882 +2025-03-11 21:37:52,549 Train Loss: 0.0157781, Val Loss: 0.0152842 +2025-03-11 21:37:52,549 Epoch 825/2000 +2025-03-11 21:38:17,307 Current Learning Rate: 0.0009619398 +2025-03-11 21:38:17,308 Train Loss: 0.0157889, Val Loss: 0.0152929 +2025-03-11 21:38:17,308 Epoch 826/2000 +2025-03-11 21:38:41,736 Current Learning Rate: 0.0009588773 +2025-03-11 21:38:41,737 Train Loss: 0.0157842, Val Loss: 0.0153028 +2025-03-11 21:38:41,737 Epoch 827/2000 +2025-03-11 21:39:06,622 Current Learning Rate: 0.0009557016 +2025-03-11 21:39:06,622 Train Loss: 0.0157709, Val Loss: 0.0152988 +2025-03-11 21:39:06,623 Epoch 828/2000 +2025-03-11 21:39:31,072 Current Learning Rate: 0.0009524135 +2025-03-11 21:39:31,073 Train Loss: 0.0157772, Val Loss: 0.0152835 +2025-03-11 21:39:31,073 Epoch 829/2000 +2025-03-11 21:39:55,776 Current Learning Rate: 0.0009490138 +2025-03-11 21:39:55,776 Train Loss: 0.0158005, Val Loss: 0.0153173 +2025-03-11 21:39:55,776 Epoch 830/2000 +2025-03-11 21:40:20,610 Current Learning Rate: 0.0009455033 +2025-03-11 21:40:20,614 Train Loss: 0.0157546, Val Loss: 0.0152698 +2025-03-11 21:40:20,614 Epoch 831/2000 +2025-03-11 21:40:45,572 Current Learning Rate: 0.0009418828 +2025-03-11 21:40:45,573 Train Loss: 0.0157745, Val Loss: 0.0152797 +2025-03-11 21:40:45,573 Epoch 832/2000 +2025-03-11 21:41:10,109 Current Learning Rate: 0.0009381533 +2025-03-11 21:41:10,109 Train Loss: 0.0158067, Val Loss: 0.0154737 +2025-03-11 21:41:10,110 Epoch 833/2000 +2025-03-11 21:41:34,562 Current Learning Rate: 0.0009343158 +2025-03-11 21:41:34,563 Train Loss: 0.0157569, Val Loss: 0.0152808 +2025-03-11 21:41:34,563 Epoch 834/2000 +2025-03-11 21:41:59,473 Current Learning Rate: 0.0009303710 +2025-03-11 21:41:59,474 Train Loss: 0.0157597, Val Loss: 0.0154007 +2025-03-11 21:41:59,474 Epoch 835/2000 +2025-03-11 21:42:24,461 Current Learning Rate: 0.0009263201 +2025-03-11 21:42:24,461 Train Loss: 0.0157673, Val Loss: 0.0152819 +2025-03-11 21:42:24,462 Epoch 836/2000 +2025-03-11 21:42:49,413 Current Learning Rate: 0.0009221640 +2025-03-11 21:42:49,414 Train Loss: 0.0157726, Val Loss: 0.0152970 +2025-03-11 21:42:49,414 Epoch 837/2000 +2025-03-11 21:43:14,254 Current Learning Rate: 0.0009179037 +2025-03-11 21:43:14,256 Train Loss: 0.0157453, Val Loss: 0.0152641 +2025-03-11 21:43:14,257 Epoch 838/2000 +2025-03-11 21:43:38,913 Current Learning Rate: 0.0009135403 +2025-03-11 21:43:38,913 Train Loss: 0.0157726, Val Loss: 0.0152853 +2025-03-11 21:43:38,914 Epoch 839/2000 +2025-03-11 21:44:04,119 Current Learning Rate: 0.0009090749 +2025-03-11 21:44:04,120 Train Loss: 0.0157566, Val Loss: 0.0153158 +2025-03-11 21:44:04,124 Epoch 840/2000 +2025-03-11 21:44:29,624 Current Learning Rate: 0.0009045085 +2025-03-11 21:44:29,625 Train Loss: 0.0157528, Val Loss: 0.0154433 +2025-03-11 21:44:29,625 Epoch 841/2000 +2025-03-11 21:44:54,931 Current Learning Rate: 0.0008998423 +2025-03-11 21:44:54,932 Train Loss: 0.0157600, Val Loss: 0.0153859 +2025-03-11 21:44:54,932 Epoch 842/2000 +2025-03-11 21:45:20,092 Current Learning Rate: 0.0008950775 +2025-03-11 21:45:20,093 Train Loss: 0.0157970, Val Loss: 0.0152863 +2025-03-11 21:45:20,093 Epoch 843/2000 +2025-03-11 21:45:45,368 Current Learning Rate: 0.0008902152 +2025-03-11 21:45:45,369 Train Loss: 0.0157294, Val Loss: 0.0153707 +2025-03-11 21:45:45,369 Epoch 844/2000 +2025-03-11 21:46:10,219 Current Learning Rate: 0.0008852566 +2025-03-11 21:46:10,220 Train Loss: 0.0157692, Val Loss: 0.0153682 +2025-03-11 21:46:10,220 Epoch 845/2000 +2025-03-11 21:46:35,137 Current Learning Rate: 0.0008802030 +2025-03-11 21:46:35,138 Train Loss: 0.0157533, Val Loss: 0.0153387 +2025-03-11 21:46:35,138 Epoch 846/2000 +2025-03-11 21:47:00,128 Current Learning Rate: 0.0008750555 +2025-03-11 21:47:00,131 Train Loss: 0.0157386, Val Loss: 0.0152624 +2025-03-11 21:47:00,132 Epoch 847/2000 +2025-03-11 21:47:24,649 Current Learning Rate: 0.0008698155 +2025-03-11 21:47:24,649 Train Loss: 0.0157483, Val Loss: 0.0152928 +2025-03-11 21:47:24,650 Epoch 848/2000 +2025-03-11 21:47:49,706 Current Learning Rate: 0.0008644843 +2025-03-11 21:47:49,707 Train Loss: 0.0157409, Val Loss: 0.0153636 +2025-03-11 21:47:49,707 Epoch 849/2000 +2025-03-11 21:48:14,248 Current Learning Rate: 0.0008590631 +2025-03-11 21:48:14,248 Train Loss: 0.0157426, Val Loss: 0.0152661 +2025-03-11 21:48:14,248 Epoch 850/2000 +2025-03-11 21:48:38,712 Current Learning Rate: 0.0008535534 +2025-03-11 21:48:38,713 Train Loss: 0.0157581, Val Loss: 0.0153268 +2025-03-11 21:48:38,713 Epoch 851/2000 +2025-03-11 21:49:03,546 Current Learning Rate: 0.0008479564 +2025-03-11 21:49:03,547 Train Loss: 0.0157224, Val Loss: 0.0152704 +2025-03-11 21:49:03,547 Epoch 852/2000 +2025-03-11 21:49:28,365 Current Learning Rate: 0.0008422736 +2025-03-11 21:49:28,366 Train Loss: 0.0157397, Val Loss: 0.0154259 +2025-03-11 21:49:28,366 Epoch 853/2000 +2025-03-11 21:49:53,311 Current Learning Rate: 0.0008365063 +2025-03-11 21:49:53,315 Train Loss: 0.0157376, Val Loss: 0.0152599 +2025-03-11 21:49:53,315 Epoch 854/2000 +2025-03-11 21:50:17,906 Current Learning Rate: 0.0008306559 +2025-03-11 21:50:17,906 Train Loss: 0.0157322, Val Loss: 0.0153535 +2025-03-11 21:50:17,906 Epoch 855/2000 +2025-03-11 21:50:42,513 Current Learning Rate: 0.0008247240 +2025-03-11 21:50:42,513 Train Loss: 0.0157550, Val Loss: 0.0152773 +2025-03-11 21:50:42,514 Epoch 856/2000 +2025-03-11 21:51:07,078 Current Learning Rate: 0.0008187120 +2025-03-11 21:51:07,078 Train Loss: 0.0157271, Val Loss: 0.0152745 +2025-03-11 21:51:07,079 Epoch 857/2000 +2025-03-11 21:51:31,743 Current Learning Rate: 0.0008126213 +2025-03-11 21:51:31,746 Train Loss: 0.0157273, Val Loss: 0.0152545 +2025-03-11 21:51:31,746 Epoch 858/2000 +2025-03-11 21:51:56,437 Current Learning Rate: 0.0008064535 +2025-03-11 21:51:56,441 Train Loss: 0.0157360, Val Loss: 0.0152457 +2025-03-11 21:51:56,441 Epoch 859/2000 +2025-03-11 21:52:21,321 Current Learning Rate: 0.0008002101 +2025-03-11 21:52:21,322 Train Loss: 0.0157151, Val Loss: 0.0152532 +2025-03-11 21:52:21,322 Epoch 860/2000 +2025-03-11 21:52:46,044 Current Learning Rate: 0.0007938926 +2025-03-11 21:52:46,044 Train Loss: 0.0157244, Val Loss: 0.0152602 +2025-03-11 21:52:46,044 Epoch 861/2000 +2025-03-11 21:53:10,133 Current Learning Rate: 0.0007875026 +2025-03-11 21:53:10,133 Train Loss: 0.0157284, Val Loss: 0.0152837 +2025-03-11 21:53:10,133 Epoch 862/2000 +2025-03-11 21:53:34,955 Current Learning Rate: 0.0007810417 +2025-03-11 21:53:34,955 Train Loss: 0.0157249, Val Loss: 0.0153423 +2025-03-11 21:53:34,955 Epoch 863/2000 +2025-03-11 21:53:59,350 Current Learning Rate: 0.0007745114 +2025-03-11 21:53:59,350 Train Loss: 0.0157140, Val Loss: 0.0152990 +2025-03-11 21:53:59,351 Epoch 864/2000 +2025-03-11 21:54:24,185 Current Learning Rate: 0.0007679134 +2025-03-11 21:54:24,185 Train Loss: 0.0157339, Val Loss: 0.0152482 +2025-03-11 21:54:24,185 Epoch 865/2000 +2025-03-11 21:54:48,479 Current Learning Rate: 0.0007612493 +2025-03-11 21:54:48,483 Train Loss: 0.0157143, Val Loss: 0.0152419 +2025-03-11 21:54:48,483 Epoch 866/2000 +2025-03-11 21:55:12,812 Current Learning Rate: 0.0007545207 +2025-03-11 21:55:12,813 Train Loss: 0.0157062, Val Loss: 0.0153124 +2025-03-11 21:55:12,813 Epoch 867/2000 +2025-03-11 21:55:36,968 Current Learning Rate: 0.0007477293 +2025-03-11 21:55:36,969 Train Loss: 0.0157334, Val Loss: 0.0152557 +2025-03-11 21:55:36,969 Epoch 868/2000 +2025-03-11 21:56:01,374 Current Learning Rate: 0.0007408768 +2025-03-11 21:56:01,375 Train Loss: 0.0157075, Val Loss: 0.0152426 +2025-03-11 21:56:01,375 Epoch 869/2000 +2025-03-11 21:56:25,840 Current Learning Rate: 0.0007339649 +2025-03-11 21:56:25,841 Train Loss: 0.0157201, Val Loss: 0.0153182 +2025-03-11 21:56:25,841 Epoch 870/2000 +2025-03-11 21:56:50,249 Current Learning Rate: 0.0007269952 +2025-03-11 21:56:50,249 Train Loss: 0.0157026, Val Loss: 0.0153307 +2025-03-11 21:56:50,249 Epoch 871/2000 +2025-03-11 21:57:14,714 Current Learning Rate: 0.0007199696 +2025-03-11 21:57:14,714 Train Loss: 0.0157116, Val Loss: 0.0153952 +2025-03-11 21:57:14,714 Epoch 872/2000 +2025-03-11 21:57:38,870 Current Learning Rate: 0.0007128896 +2025-03-11 21:57:38,871 Train Loss: 0.0156995, Val Loss: 0.0152875 +2025-03-11 21:57:38,871 Epoch 873/2000 +2025-03-11 21:58:02,753 Current Learning Rate: 0.0007057572 +2025-03-11 21:58:02,753 Train Loss: 0.0157153, Val Loss: 0.0152522 +2025-03-11 21:58:02,754 Epoch 874/2000 +2025-03-11 21:58:26,728 Current Learning Rate: 0.0006985739 +2025-03-11 21:58:26,731 Train Loss: 0.0157013, Val Loss: 0.0152348 +2025-03-11 21:58:26,731 Epoch 875/2000 +2025-03-11 21:58:50,882 Current Learning Rate: 0.0006913417 +2025-03-11 21:58:50,886 Train Loss: 0.0157016, Val Loss: 0.0152328 +2025-03-11 21:58:50,886 Epoch 876/2000 +2025-03-11 21:59:14,939 Current Learning Rate: 0.0006840623 +2025-03-11 21:59:14,942 Train Loss: 0.0157005, Val Loss: 0.0152288 +2025-03-11 21:59:14,943 Epoch 877/2000 +2025-03-11 21:59:39,062 Current Learning Rate: 0.0006767374 +2025-03-11 21:59:39,063 Train Loss: 0.0156982, Val Loss: 0.0152373 +2025-03-11 21:59:39,063 Epoch 878/2000 +2025-03-11 22:00:03,465 Current Learning Rate: 0.0006693690 +2025-03-11 22:00:03,465 Train Loss: 0.0156974, Val Loss: 0.0152384 +2025-03-11 22:00:03,466 Epoch 879/2000 +2025-03-11 22:00:27,735 Current Learning Rate: 0.0006619587 +2025-03-11 22:00:27,736 Train Loss: 0.0156938, Val Loss: 0.0152373 +2025-03-11 22:00:27,736 Epoch 880/2000 +2025-03-11 22:00:51,603 Current Learning Rate: 0.0006545085 +2025-03-11 22:00:51,607 Train Loss: 0.0156947, Val Loss: 0.0152249 +2025-03-11 22:00:51,607 Epoch 881/2000 +2025-03-11 22:01:15,673 Current Learning Rate: 0.0006470202 +2025-03-11 22:01:15,674 Train Loss: 0.0156920, Val Loss: 0.0152251 +2025-03-11 22:01:15,674 Epoch 882/2000 +2025-03-11 22:01:40,060 Current Learning Rate: 0.0006394956 +2025-03-11 22:01:40,063 Train Loss: 0.0156914, Val Loss: 0.0152248 +2025-03-11 22:01:40,063 Epoch 883/2000 +2025-03-11 22:02:04,566 Current Learning Rate: 0.0006319365 +2025-03-11 22:02:04,570 Train Loss: 0.0156886, Val Loss: 0.0152210 +2025-03-11 22:02:04,570 Epoch 884/2000 +2025-03-11 22:02:29,033 Current Learning Rate: 0.0006243449 +2025-03-11 22:02:29,034 Train Loss: 0.0156862, Val Loss: 0.0152660 +2025-03-11 22:02:29,034 Epoch 885/2000 +2025-03-11 22:02:53,488 Current Learning Rate: 0.0006167227 +2025-03-11 22:02:53,489 Train Loss: 0.0156866, Val Loss: 0.0152373 +2025-03-11 22:02:53,489 Epoch 886/2000 +2025-03-11 22:03:18,155 Current Learning Rate: 0.0006090716 +2025-03-11 22:03:18,155 Train Loss: 0.0156905, Val Loss: 0.0152210 +2025-03-11 22:03:18,155 Epoch 887/2000 +2025-03-11 22:03:43,070 Current Learning Rate: 0.0006013936 +2025-03-11 22:03:43,071 Train Loss: 0.0156802, Val Loss: 0.0152965 +2025-03-11 22:03:43,071 Epoch 888/2000 +2025-03-11 22:04:07,681 Current Learning Rate: 0.0005936907 +2025-03-11 22:04:07,681 Train Loss: 0.0156834, Val Loss: 0.0152357 +2025-03-11 22:04:07,682 Epoch 889/2000 +2025-03-11 22:04:32,850 Current Learning Rate: 0.0005859646 +2025-03-11 22:04:32,851 Train Loss: 0.0156786, Val Loss: 0.0152549 +2025-03-11 22:04:32,852 Epoch 890/2000 +2025-03-11 22:04:58,152 Current Learning Rate: 0.0005782172 +2025-03-11 22:04:58,153 Train Loss: 0.0156830, Val Loss: 0.0152287 +2025-03-11 22:04:58,153 Epoch 891/2000 +2025-03-11 22:05:23,365 Current Learning Rate: 0.0005704506 +2025-03-11 22:05:23,366 Train Loss: 0.0156746, Val Loss: 0.0152259 +2025-03-11 22:05:23,366 Epoch 892/2000 +2025-03-11 22:05:48,186 Current Learning Rate: 0.0005626666 +2025-03-11 22:05:48,187 Train Loss: 0.0156768, Val Loss: 0.0152473 +2025-03-11 22:05:48,187 Epoch 893/2000 +2025-03-11 22:06:13,552 Current Learning Rate: 0.0005548672 +2025-03-11 22:06:13,552 Train Loss: 0.0156790, Val Loss: 0.0152227 +2025-03-11 22:06:13,552 Epoch 894/2000 +2025-03-11 22:06:38,553 Current Learning Rate: 0.0005470542 +2025-03-11 22:06:38,553 Train Loss: 0.0156705, Val Loss: 0.0152265 +2025-03-11 22:06:38,553 Epoch 895/2000 +2025-03-11 22:07:03,337 Current Learning Rate: 0.0005392295 +2025-03-11 22:07:03,338 Train Loss: 0.0156742, Val Loss: 0.0152463 +2025-03-11 22:07:03,339 Epoch 896/2000 +2025-03-11 22:07:28,515 Current Learning Rate: 0.0005313953 +2025-03-11 22:07:28,515 Train Loss: 0.0156720, Val Loss: 0.0152246 +2025-03-11 22:07:28,516 Epoch 897/2000 +2025-03-11 22:07:54,392 Current Learning Rate: 0.0005235532 +2025-03-11 22:07:54,392 Train Loss: 0.0156682, Val Loss: 0.0152425 +2025-03-11 22:07:54,392 Epoch 898/2000 +2025-03-11 22:08:20,015 Current Learning Rate: 0.0005157054 +2025-03-11 22:08:20,019 Train Loss: 0.0156715, Val Loss: 0.0152182 +2025-03-11 22:08:20,019 Epoch 899/2000 +2025-03-11 22:08:45,793 Current Learning Rate: 0.0005078537 +2025-03-11 22:08:45,793 Train Loss: 0.0156652, Val Loss: 0.0152445 +2025-03-11 22:08:45,794 Epoch 900/2000 +2025-03-11 22:09:10,970 Current Learning Rate: 0.0005000000 +2025-03-11 22:09:10,971 Train Loss: 0.0156683, Val Loss: 0.0152193 +2025-03-11 22:09:10,971 Epoch 901/2000 +2025-03-11 22:09:36,245 Current Learning Rate: 0.0004921463 +2025-03-11 22:09:36,245 Train Loss: 0.0156635, Val Loss: 0.0152434 +2025-03-11 22:09:36,246 Epoch 902/2000 +2025-03-11 22:10:01,200 Current Learning Rate: 0.0004842946 +2025-03-11 22:10:01,200 Train Loss: 0.0156644, Val Loss: 0.0152205 +2025-03-11 22:10:01,200 Epoch 903/2000 +2025-03-11 22:10:26,457 Current Learning Rate: 0.0004764468 +2025-03-11 22:10:26,458 Train Loss: 0.0156618, Val Loss: 0.0152394 +2025-03-11 22:10:26,458 Epoch 904/2000 +2025-03-11 22:10:50,632 Current Learning Rate: 0.0004686047 +2025-03-11 22:10:50,632 Train Loss: 0.0156616, Val Loss: 0.0152228 +2025-03-11 22:10:50,633 Epoch 905/2000 +2025-03-11 22:11:15,497 Current Learning Rate: 0.0004607705 +2025-03-11 22:11:15,498 Train Loss: 0.0156598, Val Loss: 0.0152343 +2025-03-11 22:11:15,498 Epoch 906/2000 +2025-03-11 22:11:40,768 Current Learning Rate: 0.0004529458 +2025-03-11 22:11:40,768 Train Loss: 0.0156589, Val Loss: 0.0152247 +2025-03-11 22:11:40,769 Epoch 907/2000 +2025-03-11 22:12:05,570 Current Learning Rate: 0.0004451328 +2025-03-11 22:12:05,570 Train Loss: 0.0156573, Val Loss: 0.0152275 +2025-03-11 22:12:05,571 Epoch 908/2000 +2025-03-11 22:12:30,687 Current Learning Rate: 0.0004373334 +2025-03-11 22:12:30,687 Train Loss: 0.0156562, Val Loss: 0.0152236 +2025-03-11 22:12:30,688 Epoch 909/2000 +2025-03-11 22:12:56,439 Current Learning Rate: 0.0004295494 +2025-03-11 22:12:56,439 Train Loss: 0.0156549, Val Loss: 0.0152229 +2025-03-11 22:12:56,439 Epoch 910/2000 +2025-03-11 22:13:21,520 Current Learning Rate: 0.0004217828 +2025-03-11 22:13:21,521 Train Loss: 0.0156538, Val Loss: 0.0152213 +2025-03-11 22:13:21,521 Epoch 911/2000 +2025-03-11 22:13:46,368 Current Learning Rate: 0.0004140354 +2025-03-11 22:13:46,369 Train Loss: 0.0156526, Val Loss: 0.0152200 +2025-03-11 22:13:46,369 Epoch 912/2000 +2025-03-11 22:14:10,906 Current Learning Rate: 0.0004063093 +2025-03-11 22:14:10,906 Train Loss: 0.0156514, Val Loss: 0.0152190 +2025-03-11 22:14:10,907 Epoch 913/2000 +2025-03-11 22:14:35,903 Current Learning Rate: 0.0003986064 +2025-03-11 22:14:35,907 Train Loss: 0.0156503, Val Loss: 0.0152180 +2025-03-11 22:14:35,907 Epoch 914/2000 +2025-03-11 22:15:01,008 Current Learning Rate: 0.0003909284 +2025-03-11 22:15:01,011 Train Loss: 0.0156492, Val Loss: 0.0152171 +2025-03-11 22:15:01,012 Epoch 915/2000 +2025-03-11 22:15:25,992 Current Learning Rate: 0.0003832773 +2025-03-11 22:15:25,996 Train Loss: 0.0156481, Val Loss: 0.0152161 +2025-03-11 22:15:25,996 Epoch 916/2000 +2025-03-11 22:15:50,788 Current Learning Rate: 0.0003756551 +2025-03-11 22:15:50,791 Train Loss: 0.0156470, Val Loss: 0.0152149 +2025-03-11 22:15:50,791 Epoch 917/2000 +2025-03-11 22:16:16,108 Current Learning Rate: 0.0003680635 +2025-03-11 22:16:16,114 Train Loss: 0.0156458, Val Loss: 0.0152133 +2025-03-11 22:16:16,114 Epoch 918/2000 +2025-03-11 22:16:40,796 Current Learning Rate: 0.0003605044 +2025-03-11 22:16:40,799 Train Loss: 0.0156447, Val Loss: 0.0152114 +2025-03-11 22:16:40,799 Epoch 919/2000 +2025-03-11 22:17:05,634 Current Learning Rate: 0.0003529798 +2025-03-11 22:17:05,638 Train Loss: 0.0156436, Val Loss: 0.0152093 +2025-03-11 22:17:05,638 Epoch 920/2000 +2025-03-11 22:17:30,406 Current Learning Rate: 0.0003454915 +2025-03-11 22:17:30,411 Train Loss: 0.0156426, Val Loss: 0.0152070 +2025-03-11 22:17:30,411 Epoch 921/2000 +2025-03-11 22:17:54,836 Current Learning Rate: 0.0003380413 +2025-03-11 22:17:54,840 Train Loss: 0.0156415, Val Loss: 0.0152048 +2025-03-11 22:17:54,840 Epoch 922/2000 +2025-03-11 22:18:19,648 Current Learning Rate: 0.0003306310 +2025-03-11 22:18:19,650 Train Loss: 0.0156405, Val Loss: 0.0152025 +2025-03-11 22:18:19,650 Epoch 923/2000 +2025-03-11 22:18:44,294 Current Learning Rate: 0.0003232626 +2025-03-11 22:18:44,298 Train Loss: 0.0156395, Val Loss: 0.0152003 +2025-03-11 22:18:44,298 Epoch 924/2000 +2025-03-11 22:19:08,805 Current Learning Rate: 0.0003159377 +2025-03-11 22:19:08,808 Train Loss: 0.0156385, Val Loss: 0.0151981 +2025-03-11 22:19:08,809 Epoch 925/2000 +2025-03-11 22:19:33,365 Current Learning Rate: 0.0003086583 +2025-03-11 22:19:33,368 Train Loss: 0.0156376, Val Loss: 0.0151959 +2025-03-11 22:19:33,369 Epoch 926/2000 +2025-03-11 22:19:57,964 Current Learning Rate: 0.0003014261 +2025-03-11 22:19:57,967 Train Loss: 0.0156367, Val Loss: 0.0151937 +2025-03-11 22:19:57,968 Epoch 927/2000 +2025-03-11 22:20:22,657 Current Learning Rate: 0.0002942428 +2025-03-11 22:20:22,660 Train Loss: 0.0156358, Val Loss: 0.0151915 +2025-03-11 22:20:22,661 Epoch 928/2000 +2025-03-11 22:20:47,382 Current Learning Rate: 0.0002871104 +2025-03-11 22:20:47,385 Train Loss: 0.0156350, Val Loss: 0.0151894 +2025-03-11 22:20:47,385 Epoch 929/2000 +2025-03-11 22:21:11,608 Current Learning Rate: 0.0002800304 +2025-03-11 22:21:11,611 Train Loss: 0.0156341, Val Loss: 0.0151872 +2025-03-11 22:21:11,612 Epoch 930/2000 +2025-03-11 22:21:36,477 Current Learning Rate: 0.0002730048 +2025-03-11 22:21:36,481 Train Loss: 0.0156334, Val Loss: 0.0151852 +2025-03-11 22:21:36,481 Epoch 931/2000 +2025-03-11 22:22:01,304 Current Learning Rate: 0.0002660351 +2025-03-11 22:22:01,308 Train Loss: 0.0156327, Val Loss: 0.0151833 +2025-03-11 22:22:01,308 Epoch 932/2000 +2025-03-11 22:22:26,004 Current Learning Rate: 0.0002591232 +2025-03-11 22:22:26,008 Train Loss: 0.0156322, Val Loss: 0.0151817 +2025-03-11 22:22:26,008 Epoch 933/2000 +2025-03-11 22:22:51,397 Current Learning Rate: 0.0002522707 +2025-03-11 22:22:51,400 Train Loss: 0.0156317, Val Loss: 0.0151805 +2025-03-11 22:22:51,400 Epoch 934/2000 +2025-03-11 22:23:16,304 Current Learning Rate: 0.0002454793 +2025-03-11 22:23:16,308 Train Loss: 0.0156314, Val Loss: 0.0151793 +2025-03-11 22:23:16,309 Epoch 935/2000 +2025-03-11 22:23:41,406 Current Learning Rate: 0.0002387507 +2025-03-11 22:23:41,410 Train Loss: 0.0156312, Val Loss: 0.0151778 +2025-03-11 22:23:41,410 Epoch 936/2000 +2025-03-11 22:24:06,422 Current Learning Rate: 0.0002320866 +2025-03-11 22:24:06,423 Train Loss: 0.0156307, Val Loss: 0.0151827 +2025-03-11 22:24:06,423 Epoch 937/2000 +2025-03-11 22:24:31,353 Current Learning Rate: 0.0002254886 +2025-03-11 22:24:31,357 Train Loss: 0.0156297, Val Loss: 0.0151770 +2025-03-11 22:24:31,358 Epoch 938/2000 +2025-03-11 22:24:55,994 Current Learning Rate: 0.0002189583 +2025-03-11 22:24:55,998 Train Loss: 0.0156281, Val Loss: 0.0151753 +2025-03-11 22:24:55,998 Epoch 939/2000 +2025-03-11 22:25:21,009 Current Learning Rate: 0.0002124974 +2025-03-11 22:25:21,012 Train Loss: 0.0156269, Val Loss: 0.0151745 +2025-03-11 22:25:21,012 Epoch 940/2000 +2025-03-11 22:25:45,942 Current Learning Rate: 0.0002061074 +2025-03-11 22:25:45,945 Train Loss: 0.0156258, Val Loss: 0.0151737 +2025-03-11 22:25:45,945 Epoch 941/2000 +2025-03-11 22:26:10,375 Current Learning Rate: 0.0001997899 +2025-03-11 22:26:10,380 Train Loss: 0.0156251, Val Loss: 0.0151730 +2025-03-11 22:26:10,381 Epoch 942/2000 +2025-03-11 22:26:35,538 Current Learning Rate: 0.0001935465 +2025-03-11 22:26:35,543 Train Loss: 0.0156244, Val Loss: 0.0151724 +2025-03-11 22:26:35,543 Epoch 943/2000 +2025-03-11 22:27:00,643 Current Learning Rate: 0.0001873787 +2025-03-11 22:27:00,646 Train Loss: 0.0156238, Val Loss: 0.0151718 +2025-03-11 22:27:00,647 Epoch 944/2000 +2025-03-11 22:27:25,380 Current Learning Rate: 0.0001812880 +2025-03-11 22:27:25,383 Train Loss: 0.0156232, Val Loss: 0.0151713 +2025-03-11 22:27:25,384 Epoch 945/2000 +2025-03-11 22:27:50,485 Current Learning Rate: 0.0001752760 +2025-03-11 22:27:50,490 Train Loss: 0.0156227, Val Loss: 0.0151709 +2025-03-11 22:27:50,490 Epoch 946/2000 +2025-03-11 22:28:15,549 Current Learning Rate: 0.0001693441 +2025-03-11 22:28:15,553 Train Loss: 0.0156223, Val Loss: 0.0151702 +2025-03-11 22:28:15,553 Epoch 947/2000 +2025-03-11 22:28:40,081 Current Learning Rate: 0.0001634937 +2025-03-11 22:28:40,084 Train Loss: 0.0156216, Val Loss: 0.0151698 +2025-03-11 22:28:40,084 Epoch 948/2000 +2025-03-11 22:29:05,224 Current Learning Rate: 0.0001577264 +2025-03-11 22:29:05,229 Train Loss: 0.0156208, Val Loss: 0.0151692 +2025-03-11 22:29:05,229 Epoch 949/2000 +2025-03-11 22:29:30,683 Current Learning Rate: 0.0001520436 +2025-03-11 22:29:30,703 Train Loss: 0.0156203, Val Loss: 0.0151687 +2025-03-11 22:29:30,703 Epoch 950/2000 +2025-03-11 22:29:55,382 Current Learning Rate: 0.0001464466 +2025-03-11 22:29:55,383 Train Loss: 0.0156198, Val Loss: 0.0151689 +2025-03-11 22:29:55,383 Epoch 951/2000 +2025-03-11 22:30:20,588 Current Learning Rate: 0.0001409369 +2025-03-11 22:30:20,588 Train Loss: 0.0156195, Val Loss: 0.0151701 +2025-03-11 22:30:20,589 Epoch 952/2000 +2025-03-11 22:30:45,675 Current Learning Rate: 0.0001355157 +2025-03-11 22:30:45,676 Train Loss: 0.0156190, Val Loss: 0.0151701 +2025-03-11 22:30:45,676 Epoch 953/2000 +2025-03-11 22:31:10,840 Current Learning Rate: 0.0001301845 +2025-03-11 22:31:10,843 Train Loss: 0.0156180, Val Loss: 0.0151678 +2025-03-11 22:31:10,843 Epoch 954/2000 +2025-03-11 22:31:35,780 Current Learning Rate: 0.0001249445 +2025-03-11 22:31:35,784 Train Loss: 0.0156173, Val Loss: 0.0151665 +2025-03-11 22:31:35,784 Epoch 955/2000 +2025-03-11 22:32:00,470 Current Learning Rate: 0.0001197970 +2025-03-11 22:32:00,475 Train Loss: 0.0156168, Val Loss: 0.0151656 +2025-03-11 22:32:00,475 Epoch 956/2000 +2025-03-11 22:32:25,485 Current Learning Rate: 0.0001147434 +2025-03-11 22:32:25,489 Train Loss: 0.0156163, Val Loss: 0.0151650 +2025-03-11 22:32:25,490 Epoch 957/2000 +2025-03-11 22:32:51,138 Current Learning Rate: 0.0001097848 +2025-03-11 22:32:51,142 Train Loss: 0.0156157, Val Loss: 0.0151646 +2025-03-11 22:32:51,142 Epoch 958/2000 +2025-03-11 22:33:17,099 Current Learning Rate: 0.0001049225 +2025-03-11 22:33:17,103 Train Loss: 0.0156152, Val Loss: 0.0151643 +2025-03-11 22:33:17,103 Epoch 959/2000 +2025-03-11 22:33:42,488 Current Learning Rate: 0.0001001577 +2025-03-11 22:33:42,492 Train Loss: 0.0156148, Val Loss: 0.0151639 +2025-03-11 22:33:42,492 Epoch 960/2000 +2025-03-11 22:34:07,488 Current Learning Rate: 0.0000954915 +2025-03-11 22:34:07,491 Train Loss: 0.0156144, Val Loss: 0.0151635 +2025-03-11 22:34:07,491 Epoch 961/2000 +2025-03-11 22:34:32,961 Current Learning Rate: 0.0000909251 +2025-03-11 22:34:32,964 Train Loss: 0.0156140, Val Loss: 0.0151631 +2025-03-11 22:34:32,964 Epoch 962/2000 +2025-03-11 22:34:57,974 Current Learning Rate: 0.0000864597 +2025-03-11 22:34:57,977 Train Loss: 0.0156135, Val Loss: 0.0151628 +2025-03-11 22:34:57,978 Epoch 963/2000 +2025-03-11 22:35:23,516 Current Learning Rate: 0.0000820963 +2025-03-11 22:35:23,520 Train Loss: 0.0156131, Val Loss: 0.0151623 +2025-03-11 22:35:23,520 Epoch 964/2000 +2025-03-11 22:35:48,652 Current Learning Rate: 0.0000778360 +2025-03-11 22:35:48,654 Train Loss: 0.0156127, Val Loss: 0.0151621 +2025-03-11 22:35:48,654 Epoch 965/2000 +2025-03-11 22:36:13,798 Current Learning Rate: 0.0000736799 +2025-03-11 22:36:13,803 Train Loss: 0.0156123, Val Loss: 0.0151616 +2025-03-11 22:36:13,804 Epoch 966/2000 +2025-03-11 22:36:38,995 Current Learning Rate: 0.0000696290 +2025-03-11 22:36:39,000 Train Loss: 0.0156119, Val Loss: 0.0151613 +2025-03-11 22:36:39,000 Epoch 967/2000 +2025-03-11 22:37:03,919 Current Learning Rate: 0.0000656842 +2025-03-11 22:37:03,923 Train Loss: 0.0156115, Val Loss: 0.0151610 +2025-03-11 22:37:03,923 Epoch 968/2000 +2025-03-11 22:37:29,076 Current Learning Rate: 0.0000618467 +2025-03-11 22:37:29,080 Train Loss: 0.0156112, Val Loss: 0.0151606 +2025-03-11 22:37:29,080 Epoch 969/2000 +2025-03-11 22:37:54,415 Current Learning Rate: 0.0000581172 +2025-03-11 22:37:54,420 Train Loss: 0.0156108, Val Loss: 0.0151602 +2025-03-11 22:37:54,421 Epoch 970/2000 +2025-03-11 22:38:19,453 Current Learning Rate: 0.0000544967 +2025-03-11 22:38:19,456 Train Loss: 0.0156104, Val Loss: 0.0151599 +2025-03-11 22:38:19,457 Epoch 971/2000 +2025-03-11 22:38:44,880 Current Learning Rate: 0.0000509862 +2025-03-11 22:38:44,884 Train Loss: 0.0156101, Val Loss: 0.0151596 +2025-03-11 22:38:44,884 Epoch 972/2000 +2025-03-11 22:39:09,708 Current Learning Rate: 0.0000475865 +2025-03-11 22:39:09,712 Train Loss: 0.0156098, Val Loss: 0.0151593 +2025-03-11 22:39:09,712 Epoch 973/2000 +2025-03-11 22:39:34,307 Current Learning Rate: 0.0000442984 +2025-03-11 22:39:34,310 Train Loss: 0.0156095, Val Loss: 0.0151591 +2025-03-11 22:39:34,311 Epoch 974/2000 +2025-03-11 22:39:58,831 Current Learning Rate: 0.0000411227 +2025-03-11 22:39:58,835 Train Loss: 0.0156092, Val Loss: 0.0151591 +2025-03-11 22:39:58,835 Epoch 975/2000 +2025-03-11 22:40:24,012 Current Learning Rate: 0.0000380602 +2025-03-11 22:40:24,012 Train Loss: 0.0156090, Val Loss: 0.0151591 +2025-03-11 22:40:24,013 Epoch 976/2000 +2025-03-11 22:40:48,641 Current Learning Rate: 0.0000351118 +2025-03-11 22:40:48,645 Train Loss: 0.0156087, Val Loss: 0.0151588 +2025-03-11 22:40:48,645 Epoch 977/2000 +2025-03-11 22:41:13,407 Current Learning Rate: 0.0000322780 +2025-03-11 22:41:13,411 Train Loss: 0.0156085, Val Loss: 0.0151584 +2025-03-11 22:41:13,412 Epoch 978/2000 +2025-03-11 22:41:37,835 Current Learning Rate: 0.0000295596 +2025-03-11 22:41:37,839 Train Loss: 0.0156082, Val Loss: 0.0151580 +2025-03-11 22:41:37,840 Epoch 979/2000 +2025-03-11 22:42:02,630 Current Learning Rate: 0.0000269573 +2025-03-11 22:42:02,634 Train Loss: 0.0156080, Val Loss: 0.0151578 +2025-03-11 22:42:02,635 Epoch 980/2000 +2025-03-11 22:42:27,298 Current Learning Rate: 0.0000244717 +2025-03-11 22:42:27,301 Train Loss: 0.0156078, Val Loss: 0.0151578 +2025-03-11 22:42:27,302 Epoch 981/2000 +2025-03-11 22:42:51,939 Current Learning Rate: 0.0000221035 +2025-03-11 22:42:51,940 Train Loss: 0.0156076, Val Loss: 0.0151579 +2025-03-11 22:42:51,940 Epoch 982/2000 +2025-03-11 22:43:16,569 Current Learning Rate: 0.0000198532 +2025-03-11 22:43:16,572 Train Loss: 0.0156075, Val Loss: 0.0151577 +2025-03-11 22:43:16,573 Epoch 983/2000 +2025-03-11 22:43:41,080 Current Learning Rate: 0.0000177213 +2025-03-11 22:43:41,084 Train Loss: 0.0156073, Val Loss: 0.0151575 +2025-03-11 22:43:41,084 Epoch 984/2000 +2025-03-11 22:44:05,486 Current Learning Rate: 0.0000157084 +2025-03-11 22:44:05,490 Train Loss: 0.0156071, Val Loss: 0.0151572 +2025-03-11 22:44:05,491 Epoch 985/2000 +2025-03-11 22:44:30,321 Current Learning Rate: 0.0000138150 +2025-03-11 22:44:30,325 Train Loss: 0.0156070, Val Loss: 0.0151570 +2025-03-11 22:44:30,325 Epoch 986/2000 +2025-03-11 22:44:54,980 Current Learning Rate: 0.0000120416 +2025-03-11 22:44:54,986 Train Loss: 0.0156068, Val Loss: 0.0151569 +2025-03-11 22:44:54,986 Epoch 987/2000 +2025-03-11 22:45:19,389 Current Learning Rate: 0.0000103886 +2025-03-11 22:45:19,392 Train Loss: 0.0156067, Val Loss: 0.0151569 +2025-03-11 22:45:19,392 Epoch 988/2000 +2025-03-11 22:45:44,350 Current Learning Rate: 0.0000088564 +2025-03-11 22:45:44,355 Train Loss: 0.0156066, Val Loss: 0.0151568 +2025-03-11 22:45:44,355 Epoch 989/2000 +2025-03-11 22:46:09,444 Current Learning Rate: 0.0000074453 +2025-03-11 22:46:09,445 Train Loss: 0.0156065, Val Loss: 0.0151568 +2025-03-11 22:46:09,445 Epoch 990/2000 +2025-03-11 22:46:34,977 Current Learning Rate: 0.0000061558 +2025-03-11 22:46:34,980 Train Loss: 0.0156064, Val Loss: 0.0151568 +2025-03-11 22:46:34,980 Epoch 991/2000 +2025-03-11 22:46:59,623 Current Learning Rate: 0.0000049882 +2025-03-11 22:46:59,627 Train Loss: 0.0156064, Val Loss: 0.0151567 +2025-03-11 22:46:59,627 Epoch 992/2000 +2025-03-11 22:47:24,369 Current Learning Rate: 0.0000039426 +2025-03-11 22:47:24,373 Train Loss: 0.0156063, Val Loss: 0.0151567 +2025-03-11 22:47:24,373 Epoch 993/2000 +2025-03-11 22:47:49,162 Current Learning Rate: 0.0000030195 +2025-03-11 22:47:49,166 Train Loss: 0.0156062, Val Loss: 0.0151566 +2025-03-11 22:47:49,166 Epoch 994/2000 +2025-03-11 22:48:14,415 Current Learning Rate: 0.0000022190 +2025-03-11 22:48:14,420 Train Loss: 0.0156062, Val Loss: 0.0151565 +2025-03-11 22:48:14,420 Epoch 995/2000 +2025-03-11 22:48:39,580 Current Learning Rate: 0.0000015413 +2025-03-11 22:48:39,584 Train Loss: 0.0156061, Val Loss: 0.0151565 +2025-03-11 22:48:39,585 Epoch 996/2000 +2025-03-11 22:49:04,825 Current Learning Rate: 0.0000009866 +2025-03-11 22:49:04,828 Train Loss: 0.0156061, Val Loss: 0.0151565 +2025-03-11 22:49:04,829 Epoch 997/2000 +2025-03-11 22:49:29,879 Current Learning Rate: 0.0000005551 +2025-03-11 22:49:29,882 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:49:29,882 Epoch 998/2000 +2025-03-11 22:49:55,605 Current Learning Rate: 0.0000002467 +2025-03-11 22:49:55,608 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:49:55,608 Epoch 999/2000 +2025-03-11 22:50:20,963 Current Learning Rate: 0.0000000617 +2025-03-11 22:50:20,964 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:50:20,964 Epoch 1000/2000 +2025-03-11 22:50:46,001 Current Learning Rate: 0.0000000000 +2025-03-11 22:50:46,002 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:50:46,002 Epoch 1001/2000 +2025-03-11 22:51:11,328 Current Learning Rate: 0.0000000617 +2025-03-11 22:51:11,329 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:51:11,329 Epoch 1002/2000 +2025-03-11 22:51:36,017 Current Learning Rate: 0.0000002467 +2025-03-11 22:51:36,017 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:51:36,018 Epoch 1003/2000 +2025-03-11 22:52:01,387 Current Learning Rate: 0.0000005551 +2025-03-11 22:52:01,391 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:52:01,391 Epoch 1004/2000 +2025-03-11 22:52:27,374 Current Learning Rate: 0.0000009866 +2025-03-11 22:52:27,378 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:52:27,378 Epoch 1005/2000 +2025-03-11 22:52:52,715 Current Learning Rate: 0.0000015413 +2025-03-11 22:52:52,716 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:52:52,716 Epoch 1006/2000 +2025-03-11 22:53:18,304 Current Learning Rate: 0.0000022190 +2025-03-11 22:53:18,305 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:53:18,305 Epoch 1007/2000 +2025-03-11 22:53:44,126 Current Learning Rate: 0.0000030195 +2025-03-11 22:53:44,127 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:53:44,127 Epoch 1008/2000 +2025-03-11 22:54:09,829 Current Learning Rate: 0.0000039426 +2025-03-11 22:54:09,830 Train Loss: 0.0156061, Val Loss: 0.0151564 +2025-03-11 22:54:09,831 Epoch 1009/2000 +2025-03-11 22:54:35,329 Current Learning Rate: 0.0000049882 +2025-03-11 22:54:35,329 Train Loss: 0.0156060, Val Loss: 0.0151564 +2025-03-11 22:54:35,330 Epoch 1010/2000 +2025-03-11 22:55:00,140 Current Learning Rate: 0.0000061558 +2025-03-11 22:55:00,141 Train Loss: 0.0156060, Val Loss: 0.0151564 +2025-03-11 22:55:00,141 Epoch 1011/2000 +2025-03-11 22:55:25,711 Current Learning Rate: 0.0000074453 +2025-03-11 22:55:25,715 Train Loss: 0.0156059, Val Loss: 0.0151563 +2025-03-11 22:55:25,715 Epoch 1012/2000 +2025-03-11 22:55:50,420 Current Learning Rate: 0.0000088564 +2025-03-11 22:55:50,424 Train Loss: 0.0156059, Val Loss: 0.0151561 +2025-03-11 22:55:50,424 Epoch 1013/2000 +2025-03-11 22:56:15,885 Current Learning Rate: 0.0000103886 +2025-03-11 22:56:15,889 Train Loss: 0.0156059, Val Loss: 0.0151561 +2025-03-11 22:56:15,890 Epoch 1014/2000 +2025-03-11 22:56:40,704 Current Learning Rate: 0.0000120416 +2025-03-11 22:56:40,704 Train Loss: 0.0156058, Val Loss: 0.0151562 +2025-03-11 22:56:40,705 Epoch 1015/2000 +2025-03-11 22:57:05,671 Current Learning Rate: 0.0000138150 +2025-03-11 22:57:05,675 Train Loss: 0.0156057, Val Loss: 0.0151560 +2025-03-11 22:57:05,675 Epoch 1016/2000 +2025-03-11 22:57:30,918 Current Learning Rate: 0.0000157084 +2025-03-11 22:57:30,924 Train Loss: 0.0156056, Val Loss: 0.0151555 +2025-03-11 22:57:30,924 Epoch 1017/2000 +2025-03-11 22:57:56,523 Current Learning Rate: 0.0000177213 +2025-03-11 22:57:56,528 Train Loss: 0.0156054, Val Loss: 0.0151553 +2025-03-11 22:57:56,528 Epoch 1018/2000 +2025-03-11 22:58:21,675 Current Learning Rate: 0.0000198532 +2025-03-11 22:58:21,675 Train Loss: 0.0156052, Val Loss: 0.0151553 +2025-03-11 22:58:21,676 Epoch 1019/2000 +2025-03-11 22:58:46,979 Current Learning Rate: 0.0000221035 +2025-03-11 22:58:46,983 Train Loss: 0.0156050, Val Loss: 0.0151553 +2025-03-11 22:58:46,984 Epoch 1020/2000 +2025-03-11 22:59:11,888 Current Learning Rate: 0.0000244717 +2025-03-11 22:59:11,891 Train Loss: 0.0156049, Val Loss: 0.0151549 +2025-03-11 22:59:11,891 Epoch 1021/2000 +2025-03-11 22:59:36,994 Current Learning Rate: 0.0000269573 +2025-03-11 22:59:36,998 Train Loss: 0.0156047, Val Loss: 0.0151547 +2025-03-11 22:59:36,998 Epoch 1022/2000 +2025-03-11 23:00:01,874 Current Learning Rate: 0.0000295596 +2025-03-11 23:00:01,878 Train Loss: 0.0156045, Val Loss: 0.0151544 +2025-03-11 23:00:01,878 Epoch 1023/2000 +2025-03-11 23:00:26,679 Current Learning Rate: 0.0000322780 +2025-03-11 23:00:26,683 Train Loss: 0.0156043, Val Loss: 0.0151540 +2025-03-11 23:00:26,683 Epoch 1024/2000 +2025-03-11 23:00:51,781 Current Learning Rate: 0.0000351118 +2025-03-11 23:00:51,785 Train Loss: 0.0156039, Val Loss: 0.0151535 +2025-03-11 23:00:51,785 Epoch 1025/2000 +2025-03-11 23:01:16,485 Current Learning Rate: 0.0000380602 +2025-03-11 23:01:16,489 Train Loss: 0.0156036, Val Loss: 0.0151532 +2025-03-11 23:01:16,489 Epoch 1026/2000 +2025-03-11 23:01:41,949 Current Learning Rate: 0.0000411227 +2025-03-11 23:01:41,953 Train Loss: 0.0156034, Val Loss: 0.0151525 +2025-03-11 23:01:41,953 Epoch 1027/2000 +2025-03-11 23:02:07,476 Current Learning Rate: 0.0000442984 +2025-03-11 23:02:07,480 Train Loss: 0.0156030, Val Loss: 0.0151522 +2025-03-11 23:02:07,480 Epoch 1028/2000 +2025-03-11 23:02:32,620 Current Learning Rate: 0.0000475865 +2025-03-11 23:02:32,624 Train Loss: 0.0156028, Val Loss: 0.0151518 +2025-03-11 23:02:32,624 Epoch 1029/2000 +2025-03-11 23:02:57,440 Current Learning Rate: 0.0000509862 +2025-03-11 23:02:57,443 Train Loss: 0.0156028, Val Loss: 0.0151514 +2025-03-11 23:02:57,444 Epoch 1030/2000 +2025-03-11 23:03:22,176 Current Learning Rate: 0.0000544967 +2025-03-11 23:03:22,180 Train Loss: 0.0156024, Val Loss: 0.0151511 +2025-03-11 23:03:22,180 Epoch 1031/2000 +2025-03-11 23:03:47,377 Current Learning Rate: 0.0000581172 +2025-03-11 23:03:47,381 Train Loss: 0.0156021, Val Loss: 0.0151509 +2025-03-11 23:03:47,381 Epoch 1032/2000 +2025-03-11 23:04:12,378 Current Learning Rate: 0.0000618467 +2025-03-11 23:04:12,382 Train Loss: 0.0156018, Val Loss: 0.0151508 +2025-03-11 23:04:12,382 Epoch 1033/2000 +2025-03-11 23:04:38,052 Current Learning Rate: 0.0000656842 +2025-03-11 23:04:38,055 Train Loss: 0.0156015, Val Loss: 0.0151507 +2025-03-11 23:04:38,055 Epoch 1034/2000 +2025-03-11 23:05:03,012 Current Learning Rate: 0.0000696290 +2025-03-11 23:05:03,016 Train Loss: 0.0156012, Val Loss: 0.0151506 +2025-03-11 23:05:03,016 Epoch 1035/2000 +2025-03-11 23:05:28,188 Current Learning Rate: 0.0000736799 +2025-03-11 23:05:28,192 Train Loss: 0.0156009, Val Loss: 0.0151504 +2025-03-11 23:05:28,192 Epoch 1036/2000 +2025-03-11 23:05:53,189 Current Learning Rate: 0.0000778360 +2025-03-11 23:05:53,192 Train Loss: 0.0156006, Val Loss: 0.0151503 +2025-03-11 23:05:53,193 Epoch 1037/2000 +2025-03-11 23:06:18,036 Current Learning Rate: 0.0000820963 +2025-03-11 23:06:18,040 Train Loss: 0.0156004, Val Loss: 0.0151502 +2025-03-11 23:06:18,041 Epoch 1038/2000 +2025-03-11 23:06:42,693 Current Learning Rate: 0.0000864597 +2025-03-11 23:06:42,697 Train Loss: 0.0156001, Val Loss: 0.0151502 +2025-03-11 23:06:42,698 Epoch 1039/2000 +2025-03-11 23:07:07,103 Current Learning Rate: 0.0000909251 +2025-03-11 23:07:07,107 Train Loss: 0.0155998, Val Loss: 0.0151502 +2025-03-11 23:07:07,108 Epoch 1040/2000 +2025-03-11 23:07:31,478 Current Learning Rate: 0.0000954915 +2025-03-11 23:07:31,482 Train Loss: 0.0155995, Val Loss: 0.0151500 +2025-03-11 23:07:31,482 Epoch 1041/2000 +2025-03-11 23:07:56,016 Current Learning Rate: 0.0001001577 +2025-03-11 23:07:56,019 Train Loss: 0.0155992, Val Loss: 0.0151497 +2025-03-11 23:07:56,020 Epoch 1042/2000 +2025-03-11 23:08:20,155 Current Learning Rate: 0.0001049225 +2025-03-11 23:08:20,157 Train Loss: 0.0155988, Val Loss: 0.0151491 +2025-03-11 23:08:20,158 Epoch 1043/2000 +2025-03-11 23:08:44,714 Current Learning Rate: 0.0001097848 +2025-03-11 23:08:44,718 Train Loss: 0.0155985, Val Loss: 0.0151480 +2025-03-11 23:08:44,718 Epoch 1044/2000 +2025-03-11 23:09:10,240 Current Learning Rate: 0.0001147434 +2025-03-11 23:09:10,244 Train Loss: 0.0155980, Val Loss: 0.0151470 +2025-03-11 23:09:10,244 Epoch 1045/2000 +2025-03-11 23:09:34,927 Current Learning Rate: 0.0001197970 +2025-03-11 23:09:34,931 Train Loss: 0.0155979, Val Loss: 0.0151462 +2025-03-11 23:09:34,931 Epoch 1046/2000 +2025-03-11 23:10:00,125 Current Learning Rate: 0.0001249445 +2025-03-11 23:10:00,129 Train Loss: 0.0155977, Val Loss: 0.0151455 +2025-03-11 23:10:00,129 Epoch 1047/2000 +2025-03-11 23:10:25,154 Current Learning Rate: 0.0001301845 +2025-03-11 23:10:25,158 Train Loss: 0.0155974, Val Loss: 0.0151451 +2025-03-11 23:10:25,159 Epoch 1048/2000 +2025-03-11 23:10:50,440 Current Learning Rate: 0.0001355157 +2025-03-11 23:10:50,444 Train Loss: 0.0155966, Val Loss: 0.0151444 +2025-03-11 23:10:50,444 Epoch 1049/2000 +2025-03-11 23:11:15,012 Current Learning Rate: 0.0001409369 +2025-03-11 23:11:15,015 Train Loss: 0.0155952, Val Loss: 0.0151421 +2025-03-11 23:11:15,015 Epoch 1050/2000 +2025-03-11 23:11:39,349 Current Learning Rate: 0.0001464466 +2025-03-11 23:11:39,349 Train Loss: 0.0155954, Val Loss: 0.0151489 +2025-03-11 23:11:39,349 Epoch 1051/2000 +2025-03-11 23:12:04,330 Current Learning Rate: 0.0001520436 +2025-03-11 23:12:04,331 Train Loss: 0.0155949, Val Loss: 0.0151470 +2025-03-11 23:12:04,331 Epoch 1052/2000 +2025-03-11 23:12:29,386 Current Learning Rate: 0.0001577264 +2025-03-11 23:12:29,386 Train Loss: 0.0155944, Val Loss: 0.0151485 +2025-03-11 23:12:29,386 Epoch 1053/2000 +2025-03-11 23:12:54,043 Current Learning Rate: 0.0001634937 +2025-03-11 23:12:54,044 Train Loss: 0.0155945, Val Loss: 0.0151492 +2025-03-11 23:12:54,044 Epoch 1054/2000 +2025-03-11 23:13:18,574 Current Learning Rate: 0.0001693441 +2025-03-11 23:13:18,574 Train Loss: 0.0155943, Val Loss: 0.0151532 +2025-03-11 23:13:18,575 Epoch 1055/2000 +2025-03-11 23:13:43,161 Current Learning Rate: 0.0001752760 +2025-03-11 23:13:43,161 Train Loss: 0.0155937, Val Loss: 0.0151620 +2025-03-11 23:13:43,161 Epoch 1056/2000 +2025-03-11 23:14:07,855 Current Learning Rate: 0.0001812880 +2025-03-11 23:14:07,859 Train Loss: 0.0155954, Val Loss: 0.0151405 +2025-03-11 23:14:07,860 Epoch 1057/2000 +2025-03-11 23:14:32,887 Current Learning Rate: 0.0001873787 +2025-03-11 23:14:32,887 Train Loss: 0.0155935, Val Loss: 0.0151662 +2025-03-11 23:14:32,888 Epoch 1058/2000 +2025-03-11 23:14:57,652 Current Learning Rate: 0.0001935465 +2025-03-11 23:14:57,652 Train Loss: 0.0155936, Val Loss: 0.0151518 +2025-03-11 23:14:57,653 Epoch 1059/2000 +2025-03-11 23:15:22,641 Current Learning Rate: 0.0001997899 +2025-03-11 23:15:22,642 Train Loss: 0.0155931, Val Loss: 0.0151723 +2025-03-11 23:15:22,642 Epoch 1060/2000 +2025-03-11 23:15:45,893 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-11 23:15:47,467 Current Learning Rate: 0.0002061074 +2025-03-11 23:15:47,468 Train Loss: 0.0155938, Val Loss: 0.0151445 +2025-03-11 23:15:47,468 Epoch 1061/2000 +2025-03-11 23:16:12,549 Current Learning Rate: 0.0002124974 +2025-03-11 23:16:12,549 Train Loss: 0.0155924, Val Loss: 0.0151747 +2025-03-11 23:16:12,549 Epoch 1062/2000 +2025-03-11 23:16:37,049 Loading best model from checkpoint. +2025-03-11 23:16:38,383 Current Learning Rate: 0.0002189583 +2025-03-11 23:16:38,384 Train Loss: 0.0155925, Val Loss: 0.0151517 +2025-03-11 23:16:38,384 Epoch 1063/2000 +2025-03-11 23:17:09,445 Current Learning Rate: 0.0002254886 +2025-03-11 23:17:09,446 Train Loss: 0.0155915, Val Loss: 0.0151723 +2025-03-11 23:17:09,446 Epoch 1064/2000 +2025-03-11 23:17:25,179 Testing completed and best model saved. +2025-03-11 23:17:34,192 Current Learning Rate: 0.0002320866 +2025-03-11 23:17:34,193 Train Loss: 0.0155919, Val Loss: 0.0151428 +2025-03-11 23:17:34,193 Epoch 1065/2000 +2025-03-11 23:17:58,412 Current Learning Rate: 0.0002387507 +2025-03-11 23:17:58,413 Train Loss: 0.0155909, Val Loss: 0.0151649 +2025-03-11 23:17:58,413 Epoch 1066/2000 +2025-03-11 23:18:22,720 Current Learning Rate: 0.0002454793 +2025-03-11 23:18:22,721 Train Loss: 0.0155904, Val Loss: 0.0151534 +2025-03-11 23:18:22,721 Epoch 1067/2000 +2025-03-11 23:18:47,085 Current Learning Rate: 0.0002522707 +2025-03-11 23:18:47,086 Train Loss: 0.0155886, Val Loss: 0.0151714 +2025-03-11 23:18:47,086 Epoch 1068/2000 +2025-03-11 23:19:11,709 Current Learning Rate: 0.0002591232 +2025-03-11 23:19:11,712 Train Loss: 0.0155942, Val Loss: 0.0151324 +2025-03-11 23:19:11,713 Epoch 1069/2000 +2025-03-11 23:19:36,023 Current Learning Rate: 0.0002660351 +2025-03-11 23:19:36,023 Train Loss: 0.0155853, Val Loss: 0.0151659 +2025-03-11 23:19:36,024 Epoch 1070/2000 +2025-03-11 23:20:00,578 Current Learning Rate: 0.0002730048 +2025-03-11 23:20:00,579 Train Loss: 0.0155980, Val Loss: 0.0151334 +2025-03-11 23:20:00,579 Epoch 1071/2000 +2025-03-11 23:20:25,674 Current Learning Rate: 0.0002800304 +2025-03-11 23:20:25,674 Train Loss: 0.0155843, Val Loss: 0.0151482 +2025-03-11 23:20:25,674 Epoch 1072/2000 +2025-03-11 23:20:50,297 Current Learning Rate: 0.0002871104 +2025-03-11 23:20:50,297 Train Loss: 0.0155912, Val Loss: 0.0151390 +2025-03-11 23:20:50,297 Epoch 1073/2000 +2025-03-11 23:21:15,711 Current Learning Rate: 0.0002942428 +2025-03-11 23:21:15,714 Train Loss: 0.0155853, Val Loss: 0.0151321 +2025-03-11 23:21:15,715 Epoch 1074/2000 +2025-03-11 23:21:40,571 Current Learning Rate: 0.0003014261 +2025-03-11 23:21:40,575 Train Loss: 0.0156095, Val Loss: 0.0151314 +2025-03-11 23:21:40,575 Epoch 1075/2000 +2025-03-11 23:22:05,418 Current Learning Rate: 0.0003086583 +2025-03-11 23:22:05,423 Train Loss: 0.0155815, Val Loss: 0.0151274 +2025-03-11 23:22:05,423 Epoch 1076/2000 +2025-03-11 23:22:30,028 Current Learning Rate: 0.0003159377 +2025-03-11 23:22:30,028 Train Loss: 0.0155809, Val Loss: 0.0151428 +2025-03-11 23:22:30,029 Epoch 1077/2000 +2025-03-11 23:22:54,686 Current Learning Rate: 0.0003232626 +2025-03-11 23:22:54,687 Train Loss: 0.0155925, Val Loss: 0.0151306 +2025-03-11 23:22:54,687 Epoch 1078/2000 +2025-03-11 23:23:19,619 Current Learning Rate: 0.0003306310 +2025-03-11 23:23:19,620 Train Loss: 0.0155855, Val Loss: 0.0151548 +2025-03-11 23:23:19,620 Epoch 1079/2000 +2025-03-11 23:23:44,116 Current Learning Rate: 0.0003380413 +2025-03-11 23:23:44,119 Train Loss: 0.0155995, Val Loss: 0.0151266 +2025-03-11 23:23:44,120 Epoch 1080/2000 +2025-03-11 23:24:08,694 Current Learning Rate: 0.0003454915 +2025-03-11 23:24:08,694 Train Loss: 0.0155799, Val Loss: 0.0151533 +2025-03-11 23:24:08,694 Epoch 1081/2000 +2025-03-11 23:24:33,469 Current Learning Rate: 0.0003529798 +2025-03-11 23:24:33,469 Train Loss: 0.0155905, Val Loss: 0.0151290 +2025-03-11 23:24:33,469 Epoch 1082/2000 +2025-03-11 23:24:59,022 Current Learning Rate: 0.0003605044 +2025-03-11 23:24:59,022 Train Loss: 0.0155864, Val Loss: 0.0151286 +2025-03-11 23:24:59,022 Epoch 1083/2000 +2025-03-11 23:25:24,092 Current Learning Rate: 0.0003680635 +2025-03-11 23:25:24,092 Train Loss: 0.0155913, Val Loss: 0.0151289 +2025-03-11 23:25:24,093 Epoch 1084/2000 +2025-03-11 23:25:48,749 Current Learning Rate: 0.0003756551 +2025-03-11 23:25:48,754 Train Loss: 0.0155791, Val Loss: 0.0151219 +2025-03-11 23:25:48,754 Epoch 1085/2000 +2025-03-11 23:26:14,522 Current Learning Rate: 0.0003832773 +2025-03-11 23:26:14,522 Train Loss: 0.0156300, Val Loss: 0.0151251 +2025-03-11 23:26:14,522 Epoch 1086/2000 +2025-03-11 23:26:39,275 Current Learning Rate: 0.0003909284 +2025-03-11 23:26:39,278 Train Loss: 0.0155744, Val Loss: 0.0151209 +2025-03-11 23:26:39,279 Epoch 1087/2000 +2025-03-11 23:27:04,317 Current Learning Rate: 0.0003986064 +2025-03-11 23:27:04,318 Train Loss: 0.0155771, Val Loss: 0.0151355 +2025-03-11 23:27:04,318 Epoch 1088/2000 +2025-03-11 23:27:29,076 Current Learning Rate: 0.0004063093 +2025-03-11 23:27:29,080 Train Loss: 0.0155805, Val Loss: 0.0151202 +2025-03-11 23:27:29,080 Epoch 1089/2000 +2025-03-11 23:27:54,052 Current Learning Rate: 0.0004140354 +2025-03-11 23:27:54,052 Train Loss: 0.0156028, Val Loss: 0.0151210 +2025-03-11 23:27:54,052 Epoch 1090/2000 +2025-03-11 23:28:19,489 Current Learning Rate: 0.0004217828 +2025-03-11 23:28:19,490 Train Loss: 0.0155750, Val Loss: 0.0151318 +2025-03-11 23:28:19,490 Epoch 1091/2000 +2025-03-11 23:28:44,501 Current Learning Rate: 0.0004295494 +2025-03-11 23:28:44,502 Train Loss: 0.0156014, Val Loss: 0.0151270 +2025-03-11 23:28:44,502 Epoch 1092/2000 +2025-03-11 23:29:09,149 Current Learning Rate: 0.0004373334 +2025-03-11 23:29:09,150 Train Loss: 0.0155762, Val Loss: 0.0151205 +2025-03-11 23:29:09,150 Epoch 1093/2000 +2025-03-11 23:29:34,236 Current Learning Rate: 0.0004451328 +2025-03-11 23:29:34,237 Train Loss: 0.0156003, Val Loss: 0.0151377 +2025-03-11 23:29:34,237 Epoch 1094/2000 +2025-03-11 23:29:59,736 Current Learning Rate: 0.0004529458 +2025-03-11 23:29:59,737 Train Loss: 0.0155758, Val Loss: 0.0151386 +2025-03-11 23:29:59,737 Epoch 1095/2000 +2025-03-11 23:30:24,251 Current Learning Rate: 0.0004607705 +2025-03-11 23:30:24,252 Train Loss: 0.0156056, Val Loss: 0.0151278 +2025-03-11 23:30:24,252 Epoch 1096/2000 +2025-03-11 23:30:49,299 Current Learning Rate: 0.0004686047 +2025-03-11 23:30:49,303 Train Loss: 0.0155762, Val Loss: 0.0151164 +2025-03-11 23:30:49,303 Epoch 1097/2000 +2025-03-11 23:31:14,722 Current Learning Rate: 0.0004764468 +2025-03-11 23:31:14,723 Train Loss: 0.0155916, Val Loss: 0.0151264 +2025-03-11 23:31:14,723 Epoch 1098/2000 +2025-03-11 23:31:40,095 Current Learning Rate: 0.0004842946 +2025-03-11 23:31:40,095 Train Loss: 0.0155819, Val Loss: 0.0151224 +2025-03-11 23:31:40,096 Epoch 1099/2000 +2025-03-11 23:32:05,497 Current Learning Rate: 0.0004921463 +2025-03-11 23:32:05,497 Train Loss: 0.0156017, Val Loss: 0.0151267 +2025-03-11 23:32:05,497 Epoch 1100/2000 +2025-03-11 23:32:30,899 Current Learning Rate: 0.0005000000 +2025-03-11 23:32:30,903 Train Loss: 0.0155775, Val Loss: 0.0151100 +2025-03-11 23:32:30,904 Epoch 1101/2000 +2025-03-11 23:32:56,943 Current Learning Rate: 0.0005078537 +2025-03-11 23:32:56,943 Train Loss: 0.0155979, Val Loss: 0.0151302 +2025-03-11 23:32:56,944 Epoch 1102/2000 +2025-03-11 23:33:21,964 Current Learning Rate: 0.0005157054 +2025-03-11 23:33:21,968 Train Loss: 0.0155780, Val Loss: 0.0151098 +2025-03-11 23:33:21,968 Epoch 1103/2000 +2025-03-11 23:33:46,611 Current Learning Rate: 0.0005235532 +2025-03-11 23:33:46,614 Train Loss: 0.0156097, Val Loss: 0.0151081 +2025-03-11 23:33:46,615 Epoch 1104/2000 +2025-03-11 23:34:11,390 Current Learning Rate: 0.0005313953 +2025-03-11 23:34:11,391 Train Loss: 0.0155781, Val Loss: 0.0151201 +2025-03-11 23:34:11,391 Epoch 1105/2000 +2025-03-11 23:34:36,741 Current Learning Rate: 0.0005392295 +2025-03-11 23:34:36,742 Train Loss: 0.0155962, Val Loss: 0.0151134 +2025-03-11 23:34:36,742 Epoch 1106/2000 +2025-03-11 23:35:01,674 Current Learning Rate: 0.0005470542 +2025-03-11 23:35:01,675 Train Loss: 0.0155714, Val Loss: 0.0151809 +2025-03-11 23:35:01,675 Epoch 1107/2000 +2025-03-11 23:35:27,315 Current Learning Rate: 0.0005548672 +2025-03-11 23:35:27,315 Train Loss: 0.0156152, Val Loss: 0.0151197 +2025-03-11 23:35:27,316 Epoch 1108/2000 +2025-03-11 23:35:51,871 Current Learning Rate: 0.0005626666 +2025-03-11 23:35:51,871 Train Loss: 0.0155670, Val Loss: 0.0151098 +2025-03-11 23:35:51,871 Epoch 1109/2000 +2025-03-11 23:36:16,666 Current Learning Rate: 0.0005704506 +2025-03-11 23:36:16,667 Train Loss: 0.0156191, Val Loss: 0.0151215 +2025-03-11 23:36:16,667 Epoch 1110/2000 +2025-03-11 23:36:41,463 Current Learning Rate: 0.0005782172 +2025-03-11 23:36:41,467 Train Loss: 0.0155724, Val Loss: 0.0151062 +2025-03-11 23:36:41,467 Epoch 1111/2000 +2025-03-11 23:37:06,471 Current Learning Rate: 0.0005859646 +2025-03-11 23:37:06,471 Train Loss: 0.0155804, Val Loss: 0.0151228 +2025-03-11 23:37:06,472 Epoch 1112/2000 +2025-03-11 23:37:32,252 Current Learning Rate: 0.0005936907 +2025-03-11 23:37:32,253 Train Loss: 0.0156223, Val Loss: 0.0151205 +2025-03-11 23:37:32,253 Epoch 1113/2000 +2025-03-11 23:37:58,315 Current Learning Rate: 0.0006013936 +2025-03-11 23:37:58,319 Train Loss: 0.0155669, Val Loss: 0.0151032 +2025-03-11 23:37:58,319 Epoch 1114/2000 +2025-03-11 23:38:24,333 Current Learning Rate: 0.0006090716 +2025-03-11 23:38:24,333 Train Loss: 0.0155999, Val Loss: 0.0151187 +2025-03-11 23:38:24,333 Epoch 1115/2000 +2025-03-11 23:38:49,498 Current Learning Rate: 0.0006167227 +2025-03-11 23:38:49,499 Train Loss: 0.0155776, Val Loss: 0.0151122 +2025-03-11 23:38:49,499 Epoch 1116/2000 +2025-03-11 23:39:14,390 Current Learning Rate: 0.0006243449 +2025-03-11 23:39:14,391 Train Loss: 0.0156120, Val Loss: 0.0151923 +2025-03-11 23:39:14,391 Epoch 1117/2000 +2025-03-11 23:39:39,295 Current Learning Rate: 0.0006319365 +2025-03-11 23:39:39,295 Train Loss: 0.0155807, Val Loss: 0.0151122 +2025-03-11 23:39:39,295 Epoch 1118/2000 +2025-03-11 23:40:04,411 Current Learning Rate: 0.0006394956 +2025-03-11 23:40:04,411 Train Loss: 0.0155947, Val Loss: 0.0151489 +2025-03-11 23:40:04,411 Epoch 1119/2000 +2025-03-11 23:40:29,310 Current Learning Rate: 0.0006470202 +2025-03-11 23:40:29,310 Train Loss: 0.0155670, Val Loss: 0.0151138 +2025-03-11 23:40:29,311 Epoch 1120/2000 +2025-03-11 23:40:54,038 Current Learning Rate: 0.0006545085 +2025-03-11 23:40:54,039 Train Loss: 0.0156391, Val Loss: 0.0151223 +2025-03-11 23:40:54,039 Epoch 1121/2000 +2025-03-11 23:41:18,592 Current Learning Rate: 0.0006619587 +2025-03-11 23:41:18,595 Train Loss: 0.0155665, Val Loss: 0.0151005 +2025-03-11 23:41:18,596 Epoch 1122/2000 +2025-03-11 23:41:43,239 Current Learning Rate: 0.0006693690 +2025-03-11 23:41:43,240 Train Loss: 0.0155752, Val Loss: 0.0151017 +2025-03-11 23:41:43,241 Epoch 1123/2000 +2025-03-11 23:42:07,861 Current Learning Rate: 0.0006767374 +2025-03-11 23:42:07,861 Train Loss: 0.0156571, Val Loss: 0.0151023 +2025-03-11 23:42:07,862 Epoch 1124/2000 +2025-03-11 23:42:32,975 Current Learning Rate: 0.0006840623 +2025-03-11 23:42:32,978 Train Loss: 0.0155634, Val Loss: 0.0150980 +2025-03-11 23:42:32,978 Epoch 1125/2000 +2025-03-11 23:42:57,742 Current Learning Rate: 0.0006913417 +2025-03-11 23:42:57,743 Train Loss: 0.0155893, Val Loss: 0.0151075 +2025-03-11 23:42:57,743 Epoch 1126/2000 +2025-03-11 23:43:22,900 Current Learning Rate: 0.0006985739 +2025-03-11 23:43:22,900 Train Loss: 0.0155694, Val Loss: 0.0151065 +2025-03-11 23:43:22,901 Epoch 1127/2000 +2025-03-11 23:43:48,018 Current Learning Rate: 0.0007057572 +2025-03-11 23:43:48,018 Train Loss: 0.0156097, Val Loss: 0.0151160 +2025-03-11 23:43:48,019 Epoch 1128/2000 +2025-03-11 23:44:12,863 Current Learning Rate: 0.0007128896 +2025-03-11 23:44:12,869 Train Loss: 0.0155762, Val Loss: 0.0150959 +2025-03-11 23:44:12,870 Epoch 1129/2000 +2025-03-11 23:44:38,638 Current Learning Rate: 0.0007199696 +2025-03-11 23:44:38,638 Train Loss: 0.0156019, Val Loss: 0.0151296 +2025-03-11 23:44:38,639 Epoch 1130/2000 +2025-03-11 23:45:03,800 Current Learning Rate: 0.0007269952 +2025-03-11 23:45:03,800 Train Loss: 0.0155986, Val Loss: 0.0151740 +2025-03-11 23:45:03,801 Epoch 1131/2000 +2025-03-11 23:45:28,937 Current Learning Rate: 0.0007339649 +2025-03-11 23:45:28,937 Train Loss: 0.0155750, Val Loss: 0.0152726 +2025-03-11 23:45:28,938 Epoch 1132/2000 +2025-03-11 23:45:53,916 Current Learning Rate: 0.0007408768 +2025-03-11 23:45:53,916 Train Loss: 0.0155941, Val Loss: 0.0151956 +2025-03-11 23:45:53,916 Epoch 1133/2000 +2025-03-11 23:46:18,936 Current Learning Rate: 0.0007477293 +2025-03-11 23:46:18,937 Train Loss: 0.0155909, Val Loss: 0.0151557 +2025-03-11 23:46:18,937 Epoch 1134/2000 +2025-03-11 23:46:43,543 Current Learning Rate: 0.0007545207 +2025-03-11 23:46:43,544 Train Loss: 0.0155868, Val Loss: 0.0151125 +2025-03-11 23:46:43,545 Epoch 1135/2000 +2025-03-11 23:47:09,038 Current Learning Rate: 0.0007612493 +2025-03-11 23:47:09,039 Train Loss: 0.0156065, Val Loss: 0.0151277 +2025-03-11 23:47:09,039 Epoch 1136/2000 +2025-03-11 23:47:33,925 Current Learning Rate: 0.0007679134 +2025-03-11 23:47:33,925 Train Loss: 0.0156141, Val Loss: 0.0152554 +2025-03-11 23:47:33,926 Epoch 1137/2000 +2025-03-11 23:47:59,412 Current Learning Rate: 0.0007745114 +2025-03-11 23:47:59,416 Train Loss: 0.0155870, Val Loss: 0.0150917 +2025-03-11 23:47:59,416 Epoch 1138/2000 +2025-03-11 23:48:24,926 Current Learning Rate: 0.0007810417 +2025-03-11 23:48:24,927 Train Loss: 0.0156013, Val Loss: 0.0152503 +2025-03-11 23:48:24,927 Epoch 1139/2000 +2025-03-11 23:48:49,994 Current Learning Rate: 0.0007875026 +2025-03-11 23:48:49,995 Train Loss: 0.0155656, Val Loss: 0.0152047 +2025-03-11 23:48:49,996 Epoch 1140/2000 +2025-03-11 23:49:15,485 Current Learning Rate: 0.0007938926 +2025-03-11 23:49:15,486 Train Loss: 0.0155899, Val Loss: 0.0151984 +2025-03-11 23:49:15,486 Epoch 1141/2000 +2025-03-11 23:49:40,580 Current Learning Rate: 0.0008002101 +2025-03-11 23:49:40,581 Train Loss: 0.0155930, Val Loss: 0.0151009 +2025-03-11 23:49:40,581 Epoch 1142/2000 +2025-03-11 23:50:06,316 Current Learning Rate: 0.0008064535 +2025-03-11 23:50:06,317 Train Loss: 0.0155951, Val Loss: 0.0155489 +2025-03-11 23:50:06,317 Epoch 1143/2000 +2025-03-11 23:50:31,173 Current Learning Rate: 0.0008126213 +2025-03-11 23:50:31,174 Train Loss: 0.0155893, Val Loss: 0.0151466 +2025-03-11 23:50:31,174 Epoch 1144/2000 +2025-03-11 23:50:56,534 Current Learning Rate: 0.0008187120 +2025-03-11 23:50:56,538 Train Loss: 0.0155910, Val Loss: 0.0150844 +2025-03-11 23:50:56,539 Epoch 1145/2000 +2025-03-11 23:51:21,508 Current Learning Rate: 0.0008247240 +2025-03-11 23:51:21,512 Train Loss: 0.0155762, Val Loss: 0.0150826 +2025-03-11 23:51:21,513 Epoch 1146/2000 +2025-03-11 23:51:46,752 Current Learning Rate: 0.0008306559 +2025-03-11 23:51:46,752 Train Loss: 0.0156424, Val Loss: 0.0151006 +2025-03-11 23:51:46,753 Epoch 1147/2000 +2025-03-11 23:52:11,428 Current Learning Rate: 0.0008365063 +2025-03-11 23:52:11,432 Train Loss: 0.0155557, Val Loss: 0.0150811 +2025-03-11 23:52:11,432 Epoch 1148/2000 +2025-03-11 23:52:36,835 Current Learning Rate: 0.0008422736 +2025-03-11 23:52:36,836 Train Loss: 0.0155910, Val Loss: 0.0150849 +2025-03-11 23:52:36,836 Epoch 1149/2000 +2025-03-11 23:53:01,756 Current Learning Rate: 0.0008479564 +2025-03-11 23:53:01,757 Train Loss: 0.0155961, Val Loss: 0.0151083 +2025-03-11 23:53:01,757 Epoch 1150/2000 +2025-03-11 23:53:26,800 Current Learning Rate: 0.0008535534 +2025-03-11 23:53:26,801 Train Loss: 0.0155915, Val Loss: 0.0150904 +2025-03-11 23:53:26,801 Epoch 1151/2000 +2025-03-11 23:53:52,115 Current Learning Rate: 0.0008590631 +2025-03-11 23:53:52,119 Train Loss: 0.0155823, Val Loss: 0.0151016 +2025-03-11 23:53:52,119 Epoch 1152/2000 +2025-03-11 23:54:17,001 Current Learning Rate: 0.0008644843 +2025-03-11 23:54:17,005 Train Loss: 0.0156130, Val Loss: 0.0150793 +2025-03-11 23:54:17,005 Epoch 1153/2000 +2025-03-11 23:54:41,919 Current Learning Rate: 0.0008698155 +2025-03-11 23:54:41,919 Train Loss: 0.0155840, Val Loss: 0.0150999 +2025-03-11 23:54:41,919 Epoch 1154/2000 +2025-03-11 23:55:07,311 Current Learning Rate: 0.0008750555 +2025-03-11 23:55:07,312 Train Loss: 0.0155694, Val Loss: 0.0151086 +2025-03-11 23:55:07,312 Epoch 1155/2000 +2025-03-11 23:55:32,375 Current Learning Rate: 0.0008802030 +2025-03-11 23:55:32,375 Train Loss: 0.0155906, Val Loss: 0.0151526 +2025-03-11 23:55:32,375 Epoch 1156/2000 +2025-03-11 23:55:57,419 Current Learning Rate: 0.0008852566 +2025-03-11 23:55:57,419 Train Loss: 0.0155894, Val Loss: 0.0150851 +2025-03-11 23:55:57,419 Epoch 1157/2000 +2025-03-11 23:56:22,617 Current Learning Rate: 0.0008902152 +2025-03-11 23:56:22,622 Train Loss: 0.0155960, Val Loss: 0.0150792 +2025-03-11 23:56:22,622 Epoch 1158/2000 +2025-03-11 23:56:47,751 Current Learning Rate: 0.0008950775 +2025-03-11 23:56:47,751 Train Loss: 0.0156239, Val Loss: 0.0151055 +2025-03-11 23:56:47,751 Epoch 1159/2000 +2025-03-11 23:57:12,546 Current Learning Rate: 0.0008998423 +2025-03-11 23:57:12,547 Train Loss: 0.0155696, Val Loss: 0.0151068 +2025-03-11 23:57:12,548 Epoch 1160/2000 +2025-03-11 23:57:37,625 Current Learning Rate: 0.0009045085 +2025-03-11 23:57:37,626 Train Loss: 0.0155763, Val Loss: 0.0151244 +2025-03-11 23:57:37,626 Epoch 1161/2000 +2025-03-11 23:58:02,586 Current Learning Rate: 0.0009090749 +2025-03-11 23:58:02,587 Train Loss: 0.0155819, Val Loss: 0.0151369 +2025-03-11 23:58:02,587 Epoch 1162/2000 +2025-03-11 23:58:27,879 Current Learning Rate: 0.0009135403 +2025-03-11 23:58:27,884 Train Loss: 0.0155957, Val Loss: 0.0151098 +2025-03-11 23:58:27,884 Epoch 1163/2000 +2025-03-11 23:58:52,960 Current Learning Rate: 0.0009179037 +2025-03-11 23:58:52,961 Train Loss: 0.0155695, Val Loss: 0.0151193 +2025-03-11 23:58:52,961 Epoch 1164/2000 +2025-03-11 23:59:17,745 Current Learning Rate: 0.0009221640 +2025-03-11 23:59:17,746 Train Loss: 0.0155852, Val Loss: 0.0151238 +2025-03-11 23:59:17,746 Epoch 1165/2000 +2025-03-11 23:59:42,827 Current Learning Rate: 0.0009263201 +2025-03-11 23:59:42,831 Train Loss: 0.0155940, Val Loss: 0.0150650 +2025-03-11 23:59:42,831 Epoch 1166/2000 +2025-03-12 00:00:07,884 Current Learning Rate: 0.0009303710 +2025-03-12 00:00:07,884 Train Loss: 0.0155719, Val Loss: 0.0151373 +2025-03-12 00:00:07,884 Epoch 1167/2000 +2025-03-12 00:00:32,891 Current Learning Rate: 0.0009343158 +2025-03-12 00:00:32,895 Train Loss: 0.0156088, Val Loss: 0.0150643 +2025-03-12 00:00:32,895 Epoch 1168/2000 +2025-03-12 00:00:57,894 Current Learning Rate: 0.0009381533 +2025-03-12 00:00:57,895 Train Loss: 0.0155941, Val Loss: 0.0152590 +2025-03-12 00:00:57,895 Epoch 1169/2000 +2025-03-12 00:01:23,044 Current Learning Rate: 0.0009418828 +2025-03-12 00:01:23,045 Train Loss: 0.0155606, Val Loss: 0.0150874 +2025-03-12 00:01:23,045 Epoch 1170/2000 +2025-03-12 00:01:48,793 Current Learning Rate: 0.0009455033 +2025-03-12 00:01:48,794 Train Loss: 0.0155907, Val Loss: 0.0153331 +2025-03-12 00:01:48,794 Epoch 1171/2000 +2025-03-12 00:02:14,119 Current Learning Rate: 0.0009490138 +2025-03-12 00:02:14,119 Train Loss: 0.0155697, Val Loss: 0.0153825 +2025-03-12 00:02:14,120 Epoch 1172/2000 +2025-03-12 00:02:39,567 Current Learning Rate: 0.0009524135 +2025-03-12 00:02:39,567 Train Loss: 0.0155947, Val Loss: 0.0150680 +2025-03-12 00:02:39,568 Epoch 1173/2000 +2025-03-12 00:03:04,873 Current Learning Rate: 0.0009557016 +2025-03-12 00:03:04,874 Train Loss: 0.0156100, Val Loss: 0.0151985 +2025-03-12 00:03:04,874 Epoch 1174/2000 +2025-03-12 00:03:30,065 Current Learning Rate: 0.0009588773 +2025-03-12 00:03:30,065 Train Loss: 0.0155389, Val Loss: 0.0151667 +2025-03-12 00:03:30,065 Epoch 1175/2000 +2025-03-12 00:03:55,432 Current Learning Rate: 0.0009619398 +2025-03-12 00:03:55,433 Train Loss: 0.0155909, Val Loss: 0.0150823 +2025-03-12 00:03:55,434 Epoch 1176/2000 +2025-03-12 00:04:21,344 Current Learning Rate: 0.0009648882 +2025-03-12 00:04:21,344 Train Loss: 0.0155639, Val Loss: 0.0150672 +2025-03-12 00:04:21,344 Epoch 1177/2000 +2025-03-12 00:04:46,963 Current Learning Rate: 0.0009677220 +2025-03-12 00:04:46,968 Train Loss: 0.0155894, Val Loss: 0.0150575 +2025-03-12 00:04:46,969 Epoch 1178/2000 +2025-03-12 00:05:13,120 Current Learning Rate: 0.0009704404 +2025-03-12 00:05:13,121 Train Loss: 0.0155874, Val Loss: 0.0150833 +2025-03-12 00:05:13,121 Epoch 1179/2000 +2025-03-12 00:05:39,176 Current Learning Rate: 0.0009730427 +2025-03-12 00:05:39,177 Train Loss: 0.0155661, Val Loss: 0.0150711 +2025-03-12 00:05:39,177 Epoch 1180/2000 +2025-03-12 00:06:04,750 Current Learning Rate: 0.0009755283 +2025-03-12 00:06:04,750 Train Loss: 0.0155807, Val Loss: 0.0150585 +2025-03-12 00:06:04,750 Epoch 1181/2000 +2025-03-12 00:06:30,108 Current Learning Rate: 0.0009778965 +2025-03-12 00:06:30,109 Train Loss: 0.0155562, Val Loss: 0.0150663 +2025-03-12 00:06:30,109 Epoch 1182/2000 +2025-03-12 00:06:55,521 Current Learning Rate: 0.0009801468 +2025-03-12 00:06:55,522 Train Loss: 0.0155917, Val Loss: 0.0150767 +2025-03-12 00:06:55,522 Epoch 1183/2000 +2025-03-12 00:07:20,375 Current Learning Rate: 0.0009822787 +2025-03-12 00:07:20,376 Train Loss: 0.0156101, Val Loss: 0.0150752 +2025-03-12 00:07:20,376 Epoch 1184/2000 +2025-03-12 00:07:45,649 Current Learning Rate: 0.0009842916 +2025-03-12 00:07:45,650 Train Loss: 0.0155451, Val Loss: 0.0150810 +2025-03-12 00:07:45,650 Epoch 1185/2000 +2025-03-12 00:08:10,911 Current Learning Rate: 0.0009861850 +2025-03-12 00:08:10,911 Train Loss: 0.0155521, Val Loss: 0.0151408 +2025-03-12 00:08:10,911 Epoch 1186/2000 +2025-03-12 00:08:36,630 Current Learning Rate: 0.0009879584 +2025-03-12 00:08:36,631 Train Loss: 0.0155973, Val Loss: 0.0150735 +2025-03-12 00:08:36,631 Epoch 1187/2000 +2025-03-12 00:09:02,117 Current Learning Rate: 0.0009896114 +2025-03-12 00:09:02,118 Train Loss: 0.0156007, Val Loss: 0.0151183 +2025-03-12 00:09:02,118 Epoch 1188/2000 +2025-03-12 00:09:27,195 Current Learning Rate: 0.0009911436 +2025-03-12 00:09:27,196 Train Loss: 0.0155427, Val Loss: 0.0150997 +2025-03-12 00:09:27,196 Epoch 1189/2000 +2025-03-12 00:09:52,351 Current Learning Rate: 0.0009925547 +2025-03-12 00:09:52,351 Train Loss: 0.0155559, Val Loss: 0.0150732 +2025-03-12 00:09:52,352 Epoch 1190/2000 +2025-03-12 00:10:17,920 Current Learning Rate: 0.0009938442 +2025-03-12 00:10:17,927 Train Loss: 0.0155772, Val Loss: 0.0150553 +2025-03-12 00:10:17,927 Epoch 1191/2000 +2025-03-12 00:10:43,145 Current Learning Rate: 0.0009950118 +2025-03-12 00:10:43,146 Train Loss: 0.0155789, Val Loss: 0.0155016 +2025-03-12 00:10:43,146 Epoch 1192/2000 +2025-03-12 00:11:08,926 Current Learning Rate: 0.0009960574 +2025-03-12 00:11:08,927 Train Loss: 0.0155556, Val Loss: 0.0151786 +2025-03-12 00:11:08,927 Epoch 1193/2000 +2025-03-12 00:11:34,576 Current Learning Rate: 0.0009969805 +2025-03-12 00:11:34,577 Train Loss: 0.0155828, Val Loss: 0.0150597 +2025-03-12 00:11:34,577 Epoch 1194/2000 +2025-03-12 00:12:00,446 Current Learning Rate: 0.0009977810 +2025-03-12 00:12:00,447 Train Loss: 0.0155424, Val Loss: 0.0154832 +2025-03-12 00:12:00,447 Epoch 1195/2000 +2025-03-12 00:12:26,691 Current Learning Rate: 0.0009984587 +2025-03-12 00:12:26,692 Train Loss: 0.0155858, Val Loss: 0.0150856 +2025-03-12 00:12:26,692 Epoch 1196/2000 +2025-03-12 00:12:51,908 Current Learning Rate: 0.0009990134 +2025-03-12 00:12:51,913 Train Loss: 0.0155752, Val Loss: 0.0150530 +2025-03-12 00:12:51,913 Epoch 1197/2000 +2025-03-12 00:13:17,383 Current Learning Rate: 0.0009994449 +2025-03-12 00:13:17,384 Train Loss: 0.0155409, Val Loss: 0.0152572 +2025-03-12 00:13:17,384 Epoch 1198/2000 +2025-03-12 00:13:42,916 Current Learning Rate: 0.0009997533 +2025-03-12 00:13:42,917 Train Loss: 0.0155698, Val Loss: 0.0150688 +2025-03-12 00:13:42,917 Epoch 1199/2000 +2025-03-12 00:14:08,097 Current Learning Rate: 0.0009999383 +2025-03-12 00:14:08,097 Train Loss: 0.0155581, Val Loss: 0.0151191 +2025-03-12 00:14:08,098 Epoch 1200/2000 +2025-03-12 00:14:33,612 Current Learning Rate: 0.0010000000 +2025-03-12 00:14:33,613 Train Loss: 0.0155308, Val Loss: 0.0153449 +2025-03-12 00:14:33,613 Epoch 1201/2000 +2025-03-12 00:14:58,652 Current Learning Rate: 0.0009999383 +2025-03-12 00:14:58,652 Train Loss: 0.0156080, Val Loss: 0.0152759 +2025-03-12 00:14:58,653 Epoch 1202/2000 +2025-03-12 00:15:24,714 Current Learning Rate: 0.0009997533 +2025-03-12 00:15:24,717 Train Loss: 0.0155109, Val Loss: 0.0151813 +2025-03-12 00:15:24,720 Epoch 1203/2000 +2025-03-12 00:15:50,327 Current Learning Rate: 0.0009994449 +2025-03-12 00:15:50,328 Train Loss: 0.0155848, Val Loss: 0.0151161 +2025-03-12 00:15:50,328 Epoch 1204/2000 +2025-03-12 00:16:16,722 Current Learning Rate: 0.0009990134 +2025-03-12 00:16:16,723 Train Loss: 0.0155368, Val Loss: 0.0151777 +2025-03-12 00:16:16,723 Epoch 1205/2000 +2025-03-12 00:16:42,948 Current Learning Rate: 0.0009984587 +2025-03-12 00:16:42,949 Train Loss: 0.0156053, Val Loss: 0.0152842 +2025-03-12 00:16:42,949 Epoch 1206/2000 +2025-03-12 00:17:08,777 Current Learning Rate: 0.0009977810 +2025-03-12 00:17:08,777 Train Loss: 0.0155173, Val Loss: 0.0152408 +2025-03-12 00:17:08,777 Epoch 1207/2000 +2025-03-12 00:17:34,169 Current Learning Rate: 0.0009969805 +2025-03-12 00:17:34,170 Train Loss: 0.0155551, Val Loss: 0.0150702 +2025-03-12 00:17:34,170 Epoch 1208/2000 +2025-03-12 00:18:00,560 Current Learning Rate: 0.0009960574 +2025-03-12 00:18:00,561 Train Loss: 0.0155450, Val Loss: 0.0151957 +2025-03-12 00:18:00,561 Epoch 1209/2000 +2025-03-12 00:18:26,428 Current Learning Rate: 0.0009950118 +2025-03-12 00:18:26,432 Train Loss: 0.0155406, Val Loss: 0.0150500 +2025-03-12 00:18:26,432 Epoch 1210/2000 +2025-03-12 00:18:52,333 Current Learning Rate: 0.0009938442 +2025-03-12 00:18:52,334 Train Loss: 0.0155620, Val Loss: 0.0153108 +2025-03-12 00:18:52,334 Epoch 1211/2000 +2025-03-12 00:19:18,196 Current Learning Rate: 0.0009925547 +2025-03-12 00:19:18,197 Train Loss: 0.0155522, Val Loss: 0.0156708 +2025-03-12 00:19:18,197 Epoch 1212/2000 +2025-03-12 00:19:44,021 Current Learning Rate: 0.0009911436 +2025-03-12 00:19:44,021 Train Loss: 0.0155471, Val Loss: 0.0150526 +2025-03-12 00:19:44,022 Epoch 1213/2000 +2025-03-12 00:20:09,733 Current Learning Rate: 0.0009896114 +2025-03-12 00:20:09,736 Train Loss: 0.0155446, Val Loss: 0.0150417 +2025-03-12 00:20:09,737 Epoch 1214/2000 +2025-03-12 00:20:35,447 Current Learning Rate: 0.0009879584 +2025-03-12 00:20:35,448 Train Loss: 0.0155329, Val Loss: 0.0150762 +2025-03-12 00:20:35,448 Epoch 1215/2000 +2025-03-12 00:21:01,296 Current Learning Rate: 0.0009861850 +2025-03-12 00:21:01,297 Train Loss: 0.0156027, Val Loss: 0.0151074 +2025-03-12 00:21:01,297 Epoch 1216/2000 +2025-03-12 00:21:27,359 Current Learning Rate: 0.0009842916 +2025-03-12 00:21:27,360 Train Loss: 0.0155089, Val Loss: 0.0150573 +2025-03-12 00:21:27,360 Epoch 1217/2000 +2025-03-12 00:21:52,636 Current Learning Rate: 0.0009822787 +2025-03-12 00:21:52,637 Train Loss: 0.0155259, Val Loss: 0.0150564 +2025-03-12 00:21:52,637 Epoch 1218/2000 +2025-03-12 00:22:18,389 Current Learning Rate: 0.0009801468 +2025-03-12 00:22:18,390 Train Loss: 0.0155401, Val Loss: 0.0150574 +2025-03-12 00:22:18,391 Epoch 1219/2000 +2025-03-12 00:22:44,629 Current Learning Rate: 0.0009778965 +2025-03-12 00:22:44,630 Train Loss: 0.0155210, Val Loss: 0.0150775 +2025-03-12 00:22:44,630 Epoch 1220/2000 +2025-03-12 00:23:10,360 Current Learning Rate: 0.0009755283 +2025-03-12 00:23:10,364 Train Loss: 0.0155415, Val Loss: 0.0150409 +2025-03-12 00:23:10,364 Epoch 1221/2000 +2025-03-12 00:23:35,876 Current Learning Rate: 0.0009730427 +2025-03-12 00:23:35,877 Train Loss: 0.0155439, Val Loss: 0.0151246 +2025-03-12 00:23:35,877 Epoch 1222/2000 +2025-03-12 00:24:01,588 Current Learning Rate: 0.0009704404 +2025-03-12 00:24:01,589 Train Loss: 0.0155201, Val Loss: 0.0151524 +2025-03-12 00:24:01,589 Epoch 1223/2000 +2025-03-12 00:24:26,725 Current Learning Rate: 0.0009677220 +2025-03-12 00:24:26,726 Train Loss: 0.0155384, Val Loss: 0.0150541 +2025-03-12 00:24:26,726 Epoch 1224/2000 +2025-03-12 00:24:52,279 Current Learning Rate: 0.0009648882 +2025-03-12 00:24:52,280 Train Loss: 0.0155222, Val Loss: 0.0150800 +2025-03-12 00:24:52,280 Epoch 1225/2000 +2025-03-12 00:25:17,244 Current Learning Rate: 0.0009619398 +2025-03-12 00:25:17,245 Train Loss: 0.0155417, Val Loss: 0.0150691 +2025-03-12 00:25:17,245 Epoch 1226/2000 +2025-03-12 00:25:42,698 Current Learning Rate: 0.0009588773 +2025-03-12 00:25:42,698 Train Loss: 0.0155340, Val Loss: 0.0151395 +2025-03-12 00:25:42,699 Epoch 1227/2000 +2025-03-12 00:26:08,280 Current Learning Rate: 0.0009557016 +2025-03-12 00:26:08,284 Train Loss: 0.0154866, Val Loss: 0.0150262 +2025-03-12 00:26:08,284 Epoch 1228/2000 +2025-03-12 00:26:34,096 Current Learning Rate: 0.0009524135 +2025-03-12 00:26:34,100 Train Loss: 0.0156469, Val Loss: 0.0150221 +2025-03-12 00:26:34,100 Epoch 1229/2000 +2025-03-12 00:27:00,108 Current Learning Rate: 0.0009490138 +2025-03-12 00:27:00,109 Train Loss: 0.0154771, Val Loss: 0.0150443 +2025-03-12 00:27:00,109 Epoch 1230/2000 +2025-03-12 00:27:25,267 Current Learning Rate: 0.0009455033 +2025-03-12 00:27:25,268 Train Loss: 0.0155065, Val Loss: 0.0150225 +2025-03-12 00:27:25,268 Epoch 1231/2000 +2025-03-12 00:27:51,536 Current Learning Rate: 0.0009418828 +2025-03-12 00:27:51,536 Train Loss: 0.0155138, Val Loss: 0.0150313 +2025-03-12 00:27:51,537 Epoch 1232/2000 +2025-03-12 00:28:17,348 Current Learning Rate: 0.0009381533 +2025-03-12 00:28:17,348 Train Loss: 0.0155083, Val Loss: 0.0150342 +2025-03-12 00:28:17,348 Epoch 1233/2000 +2025-03-12 00:28:42,986 Current Learning Rate: 0.0009343158 +2025-03-12 00:28:42,990 Train Loss: 0.0155156, Val Loss: 0.0150133 +2025-03-12 00:28:42,990 Epoch 1234/2000 +2025-03-12 00:29:08,018 Current Learning Rate: 0.0009303710 +2025-03-12 00:29:08,019 Train Loss: 0.0155162, Val Loss: 0.0150661 +2025-03-12 00:29:08,019 Epoch 1235/2000 +2025-03-12 00:29:33,254 Current Learning Rate: 0.0009263201 +2025-03-12 00:29:33,254 Train Loss: 0.0155178, Val Loss: 0.0150825 +2025-03-12 00:29:33,254 Epoch 1236/2000 +2025-03-12 00:29:58,416 Current Learning Rate: 0.0009221640 +2025-03-12 00:29:58,420 Train Loss: 0.0155037, Val Loss: 0.0150125 +2025-03-12 00:29:58,420 Epoch 1237/2000 +2025-03-12 00:30:23,666 Current Learning Rate: 0.0009179037 +2025-03-12 00:30:23,666 Train Loss: 0.0155098, Val Loss: 0.0150189 +2025-03-12 00:30:23,667 Epoch 1238/2000 +2025-03-12 00:30:49,318 Current Learning Rate: 0.0009135403 +2025-03-12 00:30:49,319 Train Loss: 0.0155122, Val Loss: 0.0152059 +2025-03-12 00:30:49,319 Epoch 1239/2000 +2025-03-12 00:31:15,241 Current Learning Rate: 0.0009090749 +2025-03-12 00:31:15,242 Train Loss: 0.0155091, Val Loss: 0.0150927 +2025-03-12 00:31:15,242 Epoch 1240/2000 +2025-03-12 00:31:41,310 Current Learning Rate: 0.0009045085 +2025-03-12 00:31:41,310 Train Loss: 0.0155560, Val Loss: 0.0150644 +2025-03-12 00:31:41,311 Epoch 1241/2000 +2025-03-12 00:32:06,829 Current Learning Rate: 0.0008998423 +2025-03-12 00:32:06,829 Train Loss: 0.0154863, Val Loss: 0.0150550 +2025-03-12 00:32:06,829 Epoch 1242/2000 +2025-03-12 00:32:32,411 Current Learning Rate: 0.0008950775 +2025-03-12 00:32:32,412 Train Loss: 0.0154741, Val Loss: 0.0150262 +2025-03-12 00:32:32,412 Epoch 1243/2000 +2025-03-12 00:32:58,344 Current Learning Rate: 0.0008902152 +2025-03-12 00:32:58,344 Train Loss: 0.0155327, Val Loss: 0.0150329 +2025-03-12 00:32:58,344 Epoch 1244/2000 +2025-03-12 00:33:24,050 Current Learning Rate: 0.0008852566 +2025-03-12 00:33:24,051 Train Loss: 0.0154806, Val Loss: 0.0150206 +2025-03-12 00:33:24,051 Epoch 1245/2000 +2025-03-12 00:33:49,736 Current Learning Rate: 0.0008802030 +2025-03-12 00:33:49,737 Train Loss: 0.0155552, Val Loss: 0.0150173 +2025-03-12 00:33:49,737 Epoch 1246/2000 +2025-03-12 00:34:16,592 Current Learning Rate: 0.0008750555 +2025-03-12 00:34:16,596 Train Loss: 0.0154584, Val Loss: 0.0150016 +2025-03-12 00:34:16,596 Epoch 1247/2000 +2025-03-12 00:34:42,540 Current Learning Rate: 0.0008698155 +2025-03-12 00:34:42,540 Train Loss: 0.0155163, Val Loss: 0.0150113 +2025-03-12 00:34:42,540 Epoch 1248/2000 +2025-03-12 00:35:08,009 Current Learning Rate: 0.0008644843 +2025-03-12 00:35:08,010 Train Loss: 0.0154761, Val Loss: 0.0150702 +2025-03-12 00:35:08,010 Epoch 1249/2000 +2025-03-12 00:35:33,865 Current Learning Rate: 0.0008590631 +2025-03-12 00:35:33,866 Train Loss: 0.0154818, Val Loss: 0.0150537 +2025-03-12 00:35:33,866 Epoch 1250/2000 +2025-03-12 00:35:59,351 Current Learning Rate: 0.0008535534 +2025-03-12 00:35:59,352 Train Loss: 0.0155298, Val Loss: 0.0150852 +2025-03-12 00:35:59,352 Epoch 1251/2000 +2025-03-12 00:36:24,676 Current Learning Rate: 0.0008479564 +2025-03-12 00:36:24,677 Train Loss: 0.0154693, Val Loss: 0.0150037 +2025-03-12 00:36:24,677 Epoch 1252/2000 +2025-03-12 00:36:50,279 Current Learning Rate: 0.0008422736 +2025-03-12 00:36:50,279 Train Loss: 0.0155003, Val Loss: 0.0150818 +2025-03-12 00:36:50,280 Epoch 1253/2000 +2025-03-12 00:37:15,574 Current Learning Rate: 0.0008365063 +2025-03-12 00:37:15,574 Train Loss: 0.0154585, Val Loss: 0.0150092 +2025-03-12 00:37:15,575 Epoch 1254/2000 +2025-03-12 00:37:40,915 Current Learning Rate: 0.0008306559 +2025-03-12 00:37:40,915 Train Loss: 0.0155066, Val Loss: 0.0150267 +2025-03-12 00:37:40,915 Epoch 1255/2000 +2025-03-12 00:38:06,344 Current Learning Rate: 0.0008247240 +2025-03-12 00:38:06,345 Train Loss: 0.0154776, Val Loss: 0.0150131 +2025-03-12 00:38:06,345 Epoch 1256/2000 +2025-03-12 00:38:31,922 Current Learning Rate: 0.0008187120 +2025-03-12 00:38:31,927 Train Loss: 0.0154767, Val Loss: 0.0150002 +2025-03-12 00:38:31,928 Epoch 1257/2000 +2025-03-12 00:38:57,312 Current Learning Rate: 0.0008126213 +2025-03-12 00:38:57,317 Train Loss: 0.0154760, Val Loss: 0.0149940 +2025-03-12 00:38:57,317 Epoch 1258/2000 +2025-03-12 00:39:22,815 Current Learning Rate: 0.0008064535 +2025-03-12 00:39:22,815 Train Loss: 0.0154805, Val Loss: 0.0150121 +2025-03-12 00:39:22,816 Epoch 1259/2000 +2025-03-12 00:39:48,130 Current Learning Rate: 0.0008002101 +2025-03-12 00:39:48,130 Train Loss: 0.0154802, Val Loss: 0.0152334 +2025-03-12 00:39:48,130 Epoch 1260/2000 +2025-03-12 00:40:13,737 Current Learning Rate: 0.0007938926 +2025-03-12 00:40:13,738 Train Loss: 0.0154973, Val Loss: 0.0149988 +2025-03-12 00:40:13,738 Epoch 1261/2000 +2025-03-12 00:40:39,607 Current Learning Rate: 0.0007875026 +2025-03-12 00:40:39,607 Train Loss: 0.0154629, Val Loss: 0.0150345 +2025-03-12 00:40:39,607 Epoch 1262/2000 +2025-03-12 00:41:05,053 Current Learning Rate: 0.0007810417 +2025-03-12 00:41:05,054 Train Loss: 0.0154684, Val Loss: 0.0150147 +2025-03-12 00:41:05,054 Epoch 1263/2000 +2025-03-12 00:41:30,682 Current Learning Rate: 0.0007745114 +2025-03-12 00:41:30,683 Train Loss: 0.0154693, Val Loss: 0.0150035 +2025-03-12 00:41:30,683 Epoch 1264/2000 +2025-03-12 00:41:56,594 Current Learning Rate: 0.0007679134 +2025-03-12 00:41:56,603 Train Loss: 0.0154662, Val Loss: 0.0149907 +2025-03-12 00:41:56,604 Epoch 1265/2000 +2025-03-12 00:42:22,691 Current Learning Rate: 0.0007612493 +2025-03-12 00:42:22,692 Train Loss: 0.0154648, Val Loss: 0.0150089 +2025-03-12 00:42:22,692 Epoch 1266/2000 +2025-03-12 00:42:49,130 Current Learning Rate: 0.0007545207 +2025-03-12 00:42:49,130 Train Loss: 0.0154658, Val Loss: 0.0150768 +2025-03-12 00:42:49,130 Epoch 1267/2000 +2025-03-12 00:43:15,240 Current Learning Rate: 0.0007477293 +2025-03-12 00:43:15,241 Train Loss: 0.0154646, Val Loss: 0.0152136 +2025-03-12 00:43:15,241 Epoch 1268/2000 +2025-03-12 00:43:41,282 Current Learning Rate: 0.0007408768 +2025-03-12 00:43:41,283 Train Loss: 0.0154596, Val Loss: 0.0149952 +2025-03-12 00:43:41,283 Epoch 1269/2000 +2025-03-12 00:44:06,679 Current Learning Rate: 0.0007339649 +2025-03-12 00:44:06,680 Train Loss: 0.0154633, Val Loss: 0.0151053 +2025-03-12 00:44:06,680 Epoch 1270/2000 +2025-03-12 00:44:32,181 Current Learning Rate: 0.0007269952 +2025-03-12 00:44:32,181 Train Loss: 0.0154701, Val Loss: 0.0150219 +2025-03-12 00:44:32,182 Epoch 1271/2000 +2025-03-12 00:44:57,780 Current Learning Rate: 0.0007199696 +2025-03-12 00:44:57,780 Train Loss: 0.0154518, Val Loss: 0.0149914 +2025-03-12 00:44:57,781 Epoch 1272/2000 +2025-03-12 00:45:22,889 Current Learning Rate: 0.0007128896 +2025-03-12 00:45:22,889 Train Loss: 0.0154502, Val Loss: 0.0150320 +2025-03-12 00:45:22,889 Epoch 1273/2000 +2025-03-12 00:45:48,715 Current Learning Rate: 0.0007057572 +2025-03-12 00:45:48,716 Train Loss: 0.0154699, Val Loss: 0.0150107 +2025-03-12 00:45:48,716 Epoch 1274/2000 +2025-03-12 00:46:14,326 Current Learning Rate: 0.0006985739 +2025-03-12 00:46:14,330 Train Loss: 0.0154496, Val Loss: 0.0149826 +2025-03-12 00:46:14,330 Epoch 1275/2000 +2025-03-12 00:46:40,064 Current Learning Rate: 0.0006913417 +2025-03-12 00:46:40,065 Train Loss: 0.0154502, Val Loss: 0.0149830 +2025-03-12 00:46:40,065 Epoch 1276/2000 +2025-03-12 00:47:05,404 Current Learning Rate: 0.0006840623 +2025-03-12 00:47:05,404 Train Loss: 0.0154426, Val Loss: 0.0150720 +2025-03-12 00:47:05,405 Epoch 1277/2000 +2025-03-12 00:47:30,870 Current Learning Rate: 0.0006767374 +2025-03-12 00:47:30,870 Train Loss: 0.0154537, Val Loss: 0.0150222 +2025-03-12 00:47:30,871 Epoch 1278/2000 +2025-03-12 00:47:56,611 Current Learning Rate: 0.0006693690 +2025-03-12 00:47:56,612 Train Loss: 0.0154645, Val Loss: 0.0150151 +2025-03-12 00:47:56,613 Epoch 1279/2000 +2025-03-12 00:48:22,113 Current Learning Rate: 0.0006619587 +2025-03-12 00:48:22,114 Train Loss: 0.0154342, Val Loss: 0.0149878 +2025-03-12 00:48:22,114 Epoch 1280/2000 +2025-03-12 00:48:47,455 Current Learning Rate: 0.0006545085 +2025-03-12 00:48:47,457 Train Loss: 0.0154456, Val Loss: 0.0150564 +2025-03-12 00:48:47,458 Epoch 1281/2000 +2025-03-12 00:49:13,409 Current Learning Rate: 0.0006470202 +2025-03-12 00:49:13,413 Train Loss: 0.0154438, Val Loss: 0.0149814 +2025-03-12 00:49:13,413 Epoch 1282/2000 +2025-03-12 00:49:38,765 Current Learning Rate: 0.0006394956 +2025-03-12 00:49:38,765 Train Loss: 0.0154516, Val Loss: 0.0149856 +2025-03-12 00:49:38,766 Epoch 1283/2000 +2025-03-12 00:50:04,143 Current Learning Rate: 0.0006319365 +2025-03-12 00:50:04,147 Train Loss: 0.0154338, Val Loss: 0.0149761 +2025-03-12 00:50:04,147 Epoch 1284/2000 +2025-03-12 00:50:29,662 Current Learning Rate: 0.0006243449 +2025-03-12 00:50:29,662 Train Loss: 0.0154562, Val Loss: 0.0149968 +2025-03-12 00:50:29,662 Epoch 1285/2000 +2025-03-12 00:50:55,914 Current Learning Rate: 0.0006167227 +2025-03-12 00:50:55,914 Train Loss: 0.0154408, Val Loss: 0.0149831 +2025-03-12 00:50:55,914 Epoch 1286/2000 +2025-03-12 00:51:21,106 Current Learning Rate: 0.0006090716 +2025-03-12 00:51:21,107 Train Loss: 0.0154231, Val Loss: 0.0150139 +2025-03-12 00:51:21,107 Epoch 1287/2000 +2025-03-12 00:51:46,502 Current Learning Rate: 0.0006013936 +2025-03-12 00:51:46,503 Train Loss: 0.0154332, Val Loss: 0.0150290 +2025-03-12 00:51:46,504 Epoch 1288/2000 +2025-03-12 00:52:11,252 Current Learning Rate: 0.0005936907 +2025-03-12 00:52:11,252 Train Loss: 0.0154339, Val Loss: 0.0150330 +2025-03-12 00:52:11,253 Epoch 1289/2000 +2025-03-12 00:52:36,598 Current Learning Rate: 0.0005859646 +2025-03-12 00:52:36,599 Train Loss: 0.0154331, Val Loss: 0.0150427 +2025-03-12 00:52:36,599 Epoch 1290/2000 +2025-03-12 00:53:01,857 Current Learning Rate: 0.0005782172 +2025-03-12 00:53:01,858 Train Loss: 0.0154314, Val Loss: 0.0150206 +2025-03-12 00:53:01,858 Epoch 1291/2000 +2025-03-12 00:53:27,281 Current Learning Rate: 0.0005704506 +2025-03-12 00:53:27,281 Train Loss: 0.0154293, Val Loss: 0.0150309 +2025-03-12 00:53:27,282 Epoch 1292/2000 +2025-03-12 00:53:52,184 Current Learning Rate: 0.0005626666 +2025-03-12 00:53:52,185 Train Loss: 0.0154285, Val Loss: 0.0149980 +2025-03-12 00:53:52,185 Epoch 1293/2000 +2025-03-12 00:54:17,534 Current Learning Rate: 0.0005548672 +2025-03-12 00:54:17,535 Train Loss: 0.0154236, Val Loss: 0.0149848 +2025-03-12 00:54:17,536 Epoch 1294/2000 +2025-03-12 00:54:43,055 Current Learning Rate: 0.0005470542 +2025-03-12 00:54:43,059 Train Loss: 0.0154247, Val Loss: 0.0149715 +2025-03-12 00:54:43,060 Epoch 1295/2000 +2025-03-12 00:55:09,421 Current Learning Rate: 0.0005392295 +2025-03-12 00:55:09,422 Train Loss: 0.0154216, Val Loss: 0.0150290 +2025-03-12 00:55:09,422 Epoch 1296/2000 +2025-03-12 00:55:34,566 Current Learning Rate: 0.0005313953 +2025-03-12 00:55:34,567 Train Loss: 0.0154382, Val Loss: 0.0149721 +2025-03-12 00:55:34,567 Epoch 1297/2000 +2025-03-12 00:56:00,049 Current Learning Rate: 0.0005235532 +2025-03-12 00:56:00,049 Train Loss: 0.0154148, Val Loss: 0.0149904 +2025-03-12 00:56:00,050 Epoch 1298/2000 +2025-03-12 00:56:25,746 Current Learning Rate: 0.0005157054 +2025-03-12 00:56:25,747 Train Loss: 0.0154192, Val Loss: 0.0149885 +2025-03-12 00:56:25,747 Epoch 1299/2000 +2025-03-12 00:56:51,565 Current Learning Rate: 0.0005078537 +2025-03-12 00:56:51,566 Train Loss: 0.0154184, Val Loss: 0.0149824 +2025-03-12 00:56:51,566 Epoch 1300/2000 +2025-03-12 00:57:17,645 Current Learning Rate: 0.0005000000 +2025-03-12 00:57:17,646 Train Loss: 0.0154168, Val Loss: 0.0149769 +2025-03-12 00:57:17,646 Epoch 1301/2000 +2025-03-12 00:57:43,052 Current Learning Rate: 0.0004921463 +2025-03-12 00:57:43,055 Train Loss: 0.0154157, Val Loss: 0.0149658 +2025-03-12 00:57:43,056 Epoch 1302/2000 +2025-03-12 00:58:08,872 Current Learning Rate: 0.0004842946 +2025-03-12 00:58:08,872 Train Loss: 0.0154164, Val Loss: 0.0149758 +2025-03-12 00:58:08,873 Epoch 1303/2000 +2025-03-12 00:58:34,099 Current Learning Rate: 0.0004764468 +2025-03-12 00:58:34,100 Train Loss: 0.0154137, Val Loss: 0.0149791 +2025-03-12 00:58:34,100 Epoch 1304/2000 +2025-03-12 00:58:59,631 Current Learning Rate: 0.0004686047 +2025-03-12 00:58:59,631 Train Loss: 0.0154106, Val Loss: 0.0149682 +2025-03-12 00:58:59,632 Epoch 1305/2000 +2025-03-12 00:59:25,601 Current Learning Rate: 0.0004607705 +2025-03-12 00:59:25,602 Train Loss: 0.0154142, Val Loss: 0.0149794 +2025-03-12 00:59:25,602 Epoch 1306/2000 +2025-03-12 00:59:50,923 Current Learning Rate: 0.0004529458 +2025-03-12 00:59:50,929 Train Loss: 0.0154120, Val Loss: 0.0149618 +2025-03-12 00:59:50,929 Epoch 1307/2000 +2025-03-12 01:00:16,277 Current Learning Rate: 0.0004451328 +2025-03-12 01:00:16,278 Train Loss: 0.0154067, Val Loss: 0.0149621 +2025-03-12 01:00:16,278 Epoch 1308/2000 +2025-03-12 01:00:41,936 Current Learning Rate: 0.0004373334 +2025-03-12 01:00:41,940 Train Loss: 0.0154078, Val Loss: 0.0149557 +2025-03-12 01:00:41,940 Epoch 1309/2000 +2025-03-12 01:01:07,241 Current Learning Rate: 0.0004295494 +2025-03-12 01:01:07,241 Train Loss: 0.0154084, Val Loss: 0.0149737 +2025-03-12 01:01:07,241 Epoch 1310/2000 +2025-03-12 01:01:32,604 Current Learning Rate: 0.0004217828 +2025-03-12 01:01:32,604 Train Loss: 0.0154041, Val Loss: 0.0149703 +2025-03-12 01:01:32,605 Epoch 1311/2000 +2025-03-12 01:01:57,648 Current Learning Rate: 0.0004140354 +2025-03-12 01:01:57,649 Train Loss: 0.0154079, Val Loss: 0.0149640 +2025-03-12 01:01:57,649 Epoch 1312/2000 +2025-03-12 01:02:22,603 Current Learning Rate: 0.0004063093 +2025-03-12 01:02:22,603 Train Loss: 0.0154033, Val Loss: 0.0149724 +2025-03-12 01:02:22,603 Epoch 1313/2000 +2025-03-12 01:02:47,892 Current Learning Rate: 0.0003986064 +2025-03-12 01:02:47,893 Train Loss: 0.0154038, Val Loss: 0.0149611 +2025-03-12 01:02:47,893 Epoch 1314/2000 +2025-03-12 01:03:13,137 Current Learning Rate: 0.0003909284 +2025-03-12 01:03:13,138 Train Loss: 0.0154013, Val Loss: 0.0149663 +2025-03-12 01:03:13,138 Epoch 1315/2000 +2025-03-12 01:03:38,488 Current Learning Rate: 0.0003832773 +2025-03-12 01:03:38,488 Train Loss: 0.0154012, Val Loss: 0.0149608 +2025-03-12 01:03:38,489 Epoch 1316/2000 +2025-03-12 01:04:03,378 Current Learning Rate: 0.0003756551 +2025-03-12 01:04:03,379 Train Loss: 0.0153996, Val Loss: 0.0149607 +2025-03-12 01:04:03,380 Epoch 1317/2000 +2025-03-12 01:04:28,361 Current Learning Rate: 0.0003680635 +2025-03-12 01:04:28,361 Train Loss: 0.0153989, Val Loss: 0.0149589 +2025-03-12 01:04:28,361 Epoch 1318/2000 +2025-03-12 01:04:53,045 Current Learning Rate: 0.0003605044 +2025-03-12 01:04:53,047 Train Loss: 0.0153979, Val Loss: 0.0149578 +2025-03-12 01:04:53,047 Epoch 1319/2000 +2025-03-12 01:05:17,794 Current Learning Rate: 0.0003529798 +2025-03-12 01:05:17,794 Train Loss: 0.0153971, Val Loss: 0.0149576 +2025-03-12 01:05:17,794 Epoch 1320/2000 +2025-03-12 01:05:43,463 Current Learning Rate: 0.0003454915 +2025-03-12 01:05:43,464 Train Loss: 0.0153963, Val Loss: 0.0149586 +2025-03-12 01:05:43,464 Epoch 1321/2000 +2025-03-12 01:06:08,342 Current Learning Rate: 0.0003380413 +2025-03-12 01:06:08,342 Train Loss: 0.0153956, Val Loss: 0.0149610 +2025-03-12 01:06:08,343 Epoch 1322/2000 +2025-03-12 01:06:33,912 Current Learning Rate: 0.0003306310 +2025-03-12 01:06:33,912 Train Loss: 0.0153946, Val Loss: 0.0149623 +2025-03-12 01:06:33,912 Epoch 1323/2000 +2025-03-12 01:06:59,852 Current Learning Rate: 0.0003232626 +2025-03-12 01:06:59,853 Train Loss: 0.0153935, Val Loss: 0.0149611 +2025-03-12 01:06:59,853 Epoch 1324/2000 +2025-03-12 01:07:25,538 Current Learning Rate: 0.0003159377 +2025-03-12 01:07:25,539 Train Loss: 0.0153924, Val Loss: 0.0149593 +2025-03-12 01:07:25,539 Epoch 1325/2000 +2025-03-12 01:07:50,788 Current Learning Rate: 0.0003086583 +2025-03-12 01:07:50,789 Train Loss: 0.0153914, Val Loss: 0.0149575 +2025-03-12 01:07:50,789 Epoch 1326/2000 +2025-03-12 01:08:16,592 Current Learning Rate: 0.0003014261 +2025-03-12 01:08:16,598 Train Loss: 0.0153905, Val Loss: 0.0149555 +2025-03-12 01:08:16,599 Epoch 1327/2000 +2025-03-12 01:08:42,359 Current Learning Rate: 0.0002942428 +2025-03-12 01:08:42,363 Train Loss: 0.0153897, Val Loss: 0.0149536 +2025-03-12 01:08:42,363 Epoch 1328/2000 +2025-03-12 01:09:07,983 Current Learning Rate: 0.0002871104 +2025-03-12 01:09:07,988 Train Loss: 0.0153888, Val Loss: 0.0149514 +2025-03-12 01:09:07,989 Epoch 1329/2000 +2025-03-12 01:09:33,792 Current Learning Rate: 0.0002800304 +2025-03-12 01:09:33,807 Train Loss: 0.0153881, Val Loss: 0.0149492 +2025-03-12 01:09:33,808 Epoch 1330/2000 +2025-03-12 01:09:59,279 Current Learning Rate: 0.0002730048 +2025-03-12 01:09:59,282 Train Loss: 0.0153873, Val Loss: 0.0149469 +2025-03-12 01:09:59,282 Epoch 1331/2000 +2025-03-12 01:10:24,573 Current Learning Rate: 0.0002660351 +2025-03-12 01:10:24,576 Train Loss: 0.0153866, Val Loss: 0.0149446 +2025-03-12 01:10:24,576 Epoch 1332/2000 +2025-03-12 01:10:49,879 Current Learning Rate: 0.0002591232 +2025-03-12 01:10:49,882 Train Loss: 0.0153860, Val Loss: 0.0149426 +2025-03-12 01:10:49,883 Epoch 1333/2000 +2025-03-12 01:11:15,484 Current Learning Rate: 0.0002522707 +2025-03-12 01:11:15,487 Train Loss: 0.0153854, Val Loss: 0.0149409 +2025-03-12 01:11:15,488 Epoch 1334/2000 +2025-03-12 01:11:41,004 Current Learning Rate: 0.0002454793 +2025-03-12 01:11:41,009 Train Loss: 0.0153851, Val Loss: 0.0149394 +2025-03-12 01:11:41,009 Epoch 1335/2000 +2025-03-12 01:12:07,030 Current Learning Rate: 0.0002387507 +2025-03-12 01:12:07,034 Train Loss: 0.0153850, Val Loss: 0.0149387 +2025-03-12 01:12:07,034 Epoch 1336/2000 +2025-03-12 01:12:32,384 Current Learning Rate: 0.0002320866 +2025-03-12 01:12:32,385 Train Loss: 0.0153848, Val Loss: 0.0149405 +2025-03-12 01:12:32,385 Epoch 1337/2000 +2025-03-12 01:12:58,102 Current Learning Rate: 0.0002254886 +2025-03-12 01:12:58,105 Train Loss: 0.0153839, Val Loss: 0.0149381 +2025-03-12 01:12:58,105 Epoch 1338/2000 +2025-03-12 01:13:23,221 Current Learning Rate: 0.0002189583 +2025-03-12 01:13:23,222 Train Loss: 0.0153829, Val Loss: 0.0149395 +2025-03-12 01:13:23,222 Epoch 1339/2000 +2025-03-12 01:13:48,203 Current Learning Rate: 0.0002124974 +2025-03-12 01:13:48,203 Train Loss: 0.0153829, Val Loss: 0.0149400 +2025-03-12 01:13:48,204 Epoch 1340/2000 +2025-03-12 01:14:13,222 Current Learning Rate: 0.0002061074 +2025-03-12 01:14:13,225 Train Loss: 0.0153817, Val Loss: 0.0149354 +2025-03-12 01:14:13,225 Epoch 1341/2000 +2025-03-12 01:14:38,390 Current Learning Rate: 0.0001997899 +2025-03-12 01:14:38,395 Train Loss: 0.0153809, Val Loss: 0.0149343 +2025-03-12 01:14:38,395 Epoch 1342/2000 +2025-03-12 01:15:03,615 Current Learning Rate: 0.0001935465 +2025-03-12 01:15:03,620 Train Loss: 0.0153800, Val Loss: 0.0149336 +2025-03-12 01:15:03,620 Epoch 1343/2000 +2025-03-12 01:15:29,059 Current Learning Rate: 0.0001873787 +2025-03-12 01:15:29,063 Train Loss: 0.0153791, Val Loss: 0.0149330 +2025-03-12 01:15:29,063 Epoch 1344/2000 +2025-03-12 01:15:54,019 Current Learning Rate: 0.0001812880 +2025-03-12 01:15:54,023 Train Loss: 0.0153785, Val Loss: 0.0149325 +2025-03-12 01:15:54,023 Epoch 1345/2000 +2025-03-12 01:16:19,164 Current Learning Rate: 0.0001752760 +2025-03-12 01:16:19,167 Train Loss: 0.0153780, Val Loss: 0.0149320 +2025-03-12 01:16:19,168 Epoch 1346/2000 +2025-03-12 01:16:44,066 Current Learning Rate: 0.0001693441 +2025-03-12 01:16:44,069 Train Loss: 0.0153776, Val Loss: 0.0149317 +2025-03-12 01:16:44,069 Epoch 1347/2000 +2025-03-12 01:17:08,710 Current Learning Rate: 0.0001634937 +2025-03-12 01:17:08,722 Train Loss: 0.0153771, Val Loss: 0.0149314 +2025-03-12 01:17:08,723 Epoch 1348/2000 +2025-03-12 01:17:33,849 Current Learning Rate: 0.0001577264 +2025-03-12 01:17:33,853 Train Loss: 0.0153768, Val Loss: 0.0149312 +2025-03-12 01:17:33,853 Epoch 1349/2000 +2025-03-12 01:17:58,974 Current Learning Rate: 0.0001520436 +2025-03-12 01:17:58,980 Train Loss: 0.0153765, Val Loss: 0.0149305 +2025-03-12 01:17:58,980 Epoch 1350/2000 +2025-03-12 01:18:23,748 Current Learning Rate: 0.0001464466 +2025-03-12 01:18:23,750 Train Loss: 0.0153757, Val Loss: 0.0149301 +2025-03-12 01:18:23,751 Epoch 1351/2000 +2025-03-12 01:18:49,005 Current Learning Rate: 0.0001409369 +2025-03-12 01:18:49,008 Train Loss: 0.0153752, Val Loss: 0.0149294 +2025-03-12 01:18:49,008 Epoch 1352/2000 +2025-03-12 01:19:14,296 Current Learning Rate: 0.0001355157 +2025-03-12 01:19:14,300 Train Loss: 0.0153748, Val Loss: 0.0149293 +2025-03-12 01:19:14,300 Epoch 1353/2000 +2025-03-12 01:19:39,692 Current Learning Rate: 0.0001301845 +2025-03-12 01:19:39,692 Train Loss: 0.0153745, Val Loss: 0.0149300 +2025-03-12 01:19:39,693 Epoch 1354/2000 +2025-03-12 01:20:04,611 Current Learning Rate: 0.0001249445 +2025-03-12 01:20:04,611 Train Loss: 0.0153743, Val Loss: 0.0149314 +2025-03-12 01:20:04,611 Epoch 1355/2000 +2025-03-12 01:20:30,023 Current Learning Rate: 0.0001197970 +2025-03-12 01:20:30,027 Train Loss: 0.0153738, Val Loss: 0.0149291 +2025-03-12 01:20:30,027 Epoch 1356/2000 +2025-03-12 01:20:55,077 Current Learning Rate: 0.0001147434 +2025-03-12 01:20:55,081 Train Loss: 0.0153729, Val Loss: 0.0149275 +2025-03-12 01:20:55,081 Epoch 1357/2000 +2025-03-12 01:21:20,288 Current Learning Rate: 0.0001097848 +2025-03-12 01:21:20,291 Train Loss: 0.0153723, Val Loss: 0.0149268 +2025-03-12 01:21:20,292 Epoch 1358/2000 +2025-03-12 01:21:46,160 Current Learning Rate: 0.0001049225 +2025-03-12 01:21:46,176 Train Loss: 0.0153718, Val Loss: 0.0149264 +2025-03-12 01:21:46,176 Epoch 1359/2000 +2025-03-12 01:22:12,002 Current Learning Rate: 0.0001001577 +2025-03-12 01:22:12,006 Train Loss: 0.0153714, Val Loss: 0.0149260 +2025-03-12 01:22:12,006 Epoch 1360/2000 +2025-03-12 01:22:37,989 Current Learning Rate: 0.0000954915 +2025-03-12 01:22:37,994 Train Loss: 0.0153710, Val Loss: 0.0149257 +2025-03-12 01:22:37,994 Epoch 1361/2000 +2025-03-12 01:23:04,178 Current Learning Rate: 0.0000909251 +2025-03-12 01:23:04,181 Train Loss: 0.0153707, Val Loss: 0.0149255 +2025-03-12 01:23:04,182 Epoch 1362/2000 +2025-03-12 01:23:29,731 Current Learning Rate: 0.0000864597 +2025-03-12 01:23:29,735 Train Loss: 0.0153704, Val Loss: 0.0149252 +2025-03-12 01:23:29,735 Epoch 1363/2000 +2025-03-12 01:23:55,431 Current Learning Rate: 0.0000820963 +2025-03-12 01:23:55,435 Train Loss: 0.0153700, Val Loss: 0.0149249 +2025-03-12 01:23:55,436 Epoch 1364/2000 +2025-03-12 01:24:21,283 Current Learning Rate: 0.0000778360 +2025-03-12 01:24:21,287 Train Loss: 0.0153697, Val Loss: 0.0149248 +2025-03-12 01:24:21,287 Epoch 1365/2000 +2025-03-12 01:24:47,152 Current Learning Rate: 0.0000736799 +2025-03-12 01:24:47,156 Train Loss: 0.0153693, Val Loss: 0.0149245 +2025-03-12 01:24:47,156 Epoch 1366/2000 +2025-03-12 01:25:13,033 Current Learning Rate: 0.0000696290 +2025-03-12 01:25:13,040 Train Loss: 0.0153690, Val Loss: 0.0149243 +2025-03-12 01:25:13,041 Epoch 1367/2000 +2025-03-12 01:25:38,706 Current Learning Rate: 0.0000656842 +2025-03-12 01:25:38,711 Train Loss: 0.0153687, Val Loss: 0.0149239 +2025-03-12 01:25:38,711 Epoch 1368/2000 +2025-03-12 01:26:04,456 Current Learning Rate: 0.0000618467 +2025-03-12 01:26:04,460 Train Loss: 0.0153684, Val Loss: 0.0149235 +2025-03-12 01:26:04,461 Epoch 1369/2000 +2025-03-12 01:26:30,360 Current Learning Rate: 0.0000581172 +2025-03-12 01:26:30,363 Train Loss: 0.0153682, Val Loss: 0.0149230 +2025-03-12 01:26:30,363 Epoch 1370/2000 +2025-03-12 01:26:56,154 Current Learning Rate: 0.0000544967 +2025-03-12 01:26:56,157 Train Loss: 0.0153678, Val Loss: 0.0149228 +2025-03-12 01:26:56,157 Epoch 1371/2000 +2025-03-12 01:27:21,956 Current Learning Rate: 0.0000509862 +2025-03-12 01:27:21,960 Train Loss: 0.0153675, Val Loss: 0.0149225 +2025-03-12 01:27:21,960 Epoch 1372/2000 +2025-03-12 01:27:47,168 Current Learning Rate: 0.0000475865 +2025-03-12 01:27:47,172 Train Loss: 0.0153672, Val Loss: 0.0149223 +2025-03-12 01:27:47,172 Epoch 1373/2000 +2025-03-12 01:28:12,696 Current Learning Rate: 0.0000442984 +2025-03-12 01:28:12,702 Train Loss: 0.0153669, Val Loss: 0.0149221 +2025-03-12 01:28:12,702 Epoch 1374/2000 +2025-03-12 01:28:37,583 Current Learning Rate: 0.0000411227 +2025-03-12 01:28:37,586 Train Loss: 0.0153667, Val Loss: 0.0149219 +2025-03-12 01:28:37,586 Epoch 1375/2000 +2025-03-12 01:29:02,728 Current Learning Rate: 0.0000380602 +2025-03-12 01:29:02,729 Train Loss: 0.0153665, Val Loss: 0.0149219 +2025-03-12 01:29:02,729 Epoch 1376/2000 +2025-03-12 01:29:28,075 Current Learning Rate: 0.0000351118 +2025-03-12 01:29:28,076 Train Loss: 0.0153663, Val Loss: 0.0149220 +2025-03-12 01:29:28,076 Epoch 1377/2000 +2025-03-12 01:29:53,640 Current Learning Rate: 0.0000322780 +2025-03-12 01:29:53,643 Train Loss: 0.0153661, Val Loss: 0.0149217 +2025-03-12 01:29:53,643 Epoch 1378/2000 +2025-03-12 01:30:18,967 Current Learning Rate: 0.0000295596 +2025-03-12 01:30:18,970 Train Loss: 0.0153658, Val Loss: 0.0149213 +2025-03-12 01:30:18,971 Epoch 1379/2000 +2025-03-12 01:30:44,346 Current Learning Rate: 0.0000269573 +2025-03-12 01:30:44,352 Train Loss: 0.0153656, Val Loss: 0.0149211 +2025-03-12 01:30:44,352 Epoch 1380/2000 +2025-03-12 01:31:09,410 Current Learning Rate: 0.0000244717 +2025-03-12 01:31:09,417 Train Loss: 0.0153654, Val Loss: 0.0149210 +2025-03-12 01:31:09,417 Epoch 1381/2000 +2025-03-12 01:31:34,859 Current Learning Rate: 0.0000221035 +2025-03-12 01:31:34,860 Train Loss: 0.0153653, Val Loss: 0.0149210 +2025-03-12 01:31:34,860 Epoch 1382/2000 +2025-03-12 01:32:00,260 Current Learning Rate: 0.0000198532 +2025-03-12 01:32:00,260 Train Loss: 0.0153652, Val Loss: 0.0149210 +2025-03-12 01:32:00,260 Epoch 1383/2000 +2025-03-12 01:32:25,716 Current Learning Rate: 0.0000177213 +2025-03-12 01:32:25,720 Train Loss: 0.0153650, Val Loss: 0.0149209 +2025-03-12 01:32:25,720 Epoch 1384/2000 +2025-03-12 01:32:50,882 Current Learning Rate: 0.0000157084 +2025-03-12 01:32:50,886 Train Loss: 0.0153649, Val Loss: 0.0149207 +2025-03-12 01:32:50,886 Epoch 1385/2000 +2025-03-12 01:33:16,248 Current Learning Rate: 0.0000138150 +2025-03-12 01:33:16,259 Train Loss: 0.0153647, Val Loss: 0.0149205 +2025-03-12 01:33:16,259 Epoch 1386/2000 +2025-03-12 01:33:41,360 Current Learning Rate: 0.0000120416 +2025-03-12 01:33:41,364 Train Loss: 0.0153646, Val Loss: 0.0149203 +2025-03-12 01:33:41,364 Epoch 1387/2000 +2025-03-12 01:34:07,591 Current Learning Rate: 0.0000103886 +2025-03-12 01:34:07,595 Train Loss: 0.0153645, Val Loss: 0.0149203 +2025-03-12 01:34:07,595 Epoch 1388/2000 +2025-03-12 01:34:32,641 Current Learning Rate: 0.0000088564 +2025-03-12 01:34:32,645 Train Loss: 0.0153644, Val Loss: 0.0149202 +2025-03-12 01:34:32,645 Epoch 1389/2000 +2025-03-12 01:34:57,953 Current Learning Rate: 0.0000074453 +2025-03-12 01:34:57,953 Train Loss: 0.0153643, Val Loss: 0.0149202 +2025-03-12 01:34:57,954 Epoch 1390/2000 +2025-03-12 01:35:23,013 Current Learning Rate: 0.0000061558 +2025-03-12 01:35:23,018 Train Loss: 0.0153643, Val Loss: 0.0149202 +2025-03-12 01:35:23,018 Epoch 1391/2000 +2025-03-12 01:35:48,413 Current Learning Rate: 0.0000049882 +2025-03-12 01:35:48,417 Train Loss: 0.0153642, Val Loss: 0.0149202 +2025-03-12 01:35:48,417 Epoch 1392/2000 +2025-03-12 01:36:14,038 Current Learning Rate: 0.0000039426 +2025-03-12 01:36:14,041 Train Loss: 0.0153642, Val Loss: 0.0149201 +2025-03-12 01:36:14,042 Epoch 1393/2000 +2025-03-12 01:36:38,841 Current Learning Rate: 0.0000030195 +2025-03-12 01:36:38,844 Train Loss: 0.0153641, Val Loss: 0.0149201 +2025-03-12 01:36:38,844 Epoch 1394/2000 +2025-03-12 01:37:03,737 Current Learning Rate: 0.0000022190 +2025-03-12 01:37:03,741 Train Loss: 0.0153640, Val Loss: 0.0149200 +2025-03-12 01:37:03,741 Epoch 1395/2000 +2025-03-12 01:37:28,644 Current Learning Rate: 0.0000015413 +2025-03-12 01:37:28,648 Train Loss: 0.0153640, Val Loss: 0.0149200 +2025-03-12 01:37:28,648 Epoch 1396/2000 +2025-03-12 01:37:53,772 Current Learning Rate: 0.0000009866 +2025-03-12 01:37:53,776 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:37:53,777 Epoch 1397/2000 +2025-03-12 01:38:18,868 Current Learning Rate: 0.0000005551 +2025-03-12 01:38:18,872 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:38:18,872 Epoch 1398/2000 +2025-03-12 01:38:44,258 Current Learning Rate: 0.0000002467 +2025-03-12 01:38:44,261 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:38:44,262 Epoch 1399/2000 +2025-03-12 01:39:09,883 Current Learning Rate: 0.0000000617 +2025-03-12 01:39:09,884 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:39:09,884 Epoch 1400/2000 +2025-03-12 01:39:35,184 Current Learning Rate: 0.0000000000 +2025-03-12 01:39:35,185 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:39:35,185 Epoch 1401/2000 +2025-03-12 01:40:00,282 Current Learning Rate: 0.0000000617 +2025-03-12 01:40:00,283 Train Loss: 0.0153639, Val Loss: 0.0149199 +2025-03-12 01:40:00,283 Epoch 1402/2000 +2025-03-12 01:40:25,934 Current Learning Rate: 0.0000002467 +2025-03-12 01:40:25,935 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:40:25,935 Epoch 1403/2000 +2025-03-12 01:40:52,003 Current Learning Rate: 0.0000005551 +2025-03-12 01:40:52,008 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:40:52,008 Epoch 1404/2000 +2025-03-12 01:41:17,357 Current Learning Rate: 0.0000009866 +2025-03-12 01:41:17,361 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:41:17,361 Epoch 1405/2000 +2025-03-12 01:41:43,269 Current Learning Rate: 0.0000015413 +2025-03-12 01:41:43,270 Train Loss: 0.0153640, Val Loss: 0.0149199 +2025-03-12 01:41:43,270 Epoch 1406/2000 +2025-03-12 01:42:08,800 Current Learning Rate: 0.0000022190 +2025-03-12 01:42:08,800 Train Loss: 0.0153639, Val Loss: 0.0149199 +2025-03-12 01:42:08,800 Epoch 1407/2000 +2025-03-12 01:42:34,612 Current Learning Rate: 0.0000030195 +2025-03-12 01:42:34,613 Train Loss: 0.0153639, Val Loss: 0.0149199 +2025-03-12 01:42:34,613 Epoch 1408/2000 +2025-03-12 01:43:00,865 Current Learning Rate: 0.0000039426 +2025-03-12 01:43:00,865 Train Loss: 0.0153639, Val Loss: 0.0149199 +2025-03-12 01:43:00,866 Epoch 1409/2000 +2025-03-12 01:43:25,838 Current Learning Rate: 0.0000049882 +2025-03-12 01:43:25,838 Train Loss: 0.0153639, Val Loss: 0.0149199 +2025-03-12 01:43:25,839 Epoch 1410/2000 +2025-03-12 01:43:51,290 Current Learning Rate: 0.0000061558 +2025-03-12 01:43:51,291 Train Loss: 0.0153639, Val Loss: 0.0149199 +2025-03-12 01:43:51,291 Epoch 1411/2000 +2025-03-12 01:44:16,892 Current Learning Rate: 0.0000074453 +2025-03-12 01:44:16,896 Train Loss: 0.0153639, Val Loss: 0.0149198 +2025-03-12 01:44:16,897 Epoch 1412/2000 +2025-03-12 01:44:41,925 Current Learning Rate: 0.0000088564 +2025-03-12 01:44:41,929 Train Loss: 0.0153638, Val Loss: 0.0149197 +2025-03-12 01:44:41,929 Epoch 1413/2000 +2025-03-12 01:45:07,330 Current Learning Rate: 0.0000103886 +2025-03-12 01:45:07,330 Train Loss: 0.0153638, Val Loss: 0.0149197 +2025-03-12 01:45:07,331 Epoch 1414/2000 +2025-03-12 01:45:32,246 Current Learning Rate: 0.0000120416 +2025-03-12 01:45:32,247 Train Loss: 0.0153638, Val Loss: 0.0149197 +2025-03-12 01:45:32,247 Epoch 1415/2000 +2025-03-12 01:45:57,495 Current Learning Rate: 0.0000138150 +2025-03-12 01:45:57,499 Train Loss: 0.0153637, Val Loss: 0.0149194 +2025-03-12 01:45:57,499 Epoch 1416/2000 +2025-03-12 01:46:22,909 Current Learning Rate: 0.0000157084 +2025-03-12 01:46:22,913 Train Loss: 0.0153636, Val Loss: 0.0149191 +2025-03-12 01:46:22,914 Epoch 1417/2000 +2025-03-12 01:46:48,244 Current Learning Rate: 0.0000177213 +2025-03-12 01:46:48,245 Train Loss: 0.0153634, Val Loss: 0.0149191 +2025-03-12 01:46:48,245 Epoch 1418/2000 +2025-03-12 01:47:13,985 Current Learning Rate: 0.0000198532 +2025-03-12 01:47:13,988 Train Loss: 0.0153633, Val Loss: 0.0149189 +2025-03-12 01:47:13,989 Epoch 1419/2000 +2025-03-12 01:47:38,977 Current Learning Rate: 0.0000221035 +2025-03-12 01:47:38,977 Train Loss: 0.0153631, Val Loss: 0.0149190 +2025-03-12 01:47:38,977 Epoch 1420/2000 +2025-03-12 01:48:03,638 Current Learning Rate: 0.0000244717 +2025-03-12 01:48:03,641 Train Loss: 0.0153631, Val Loss: 0.0149187 +2025-03-12 01:48:03,641 Epoch 1421/2000 +2025-03-12 01:48:28,920 Current Learning Rate: 0.0000269573 +2025-03-12 01:48:28,923 Train Loss: 0.0153630, Val Loss: 0.0149185 +2025-03-12 01:48:28,924 Epoch 1422/2000 +2025-03-12 01:48:54,375 Current Learning Rate: 0.0000295596 +2025-03-12 01:48:54,379 Train Loss: 0.0153628, Val Loss: 0.0149184 +2025-03-12 01:48:54,379 Epoch 1423/2000 +2025-03-12 01:49:18,922 Current Learning Rate: 0.0000322780 +2025-03-12 01:49:18,928 Train Loss: 0.0153627, Val Loss: 0.0149179 +2025-03-12 01:49:18,928 Epoch 1424/2000 +2025-03-12 01:49:43,987 Current Learning Rate: 0.0000351118 +2025-03-12 01:49:43,992 Train Loss: 0.0153624, Val Loss: 0.0149174 +2025-03-12 01:49:43,993 Epoch 1425/2000 +2025-03-12 01:50:08,894 Current Learning Rate: 0.0000380602 +2025-03-12 01:50:08,898 Train Loss: 0.0153621, Val Loss: 0.0149169 +2025-03-12 01:50:08,898 Epoch 1426/2000 +2025-03-12 01:50:33,626 Current Learning Rate: 0.0000411227 +2025-03-12 01:50:33,630 Train Loss: 0.0153620, Val Loss: 0.0149165 +2025-03-12 01:50:33,631 Epoch 1427/2000 +2025-03-12 01:50:58,746 Current Learning Rate: 0.0000442984 +2025-03-12 01:50:58,750 Train Loss: 0.0153618, Val Loss: 0.0149164 +2025-03-12 01:50:58,750 Epoch 1428/2000 +2025-03-12 01:51:23,835 Current Learning Rate: 0.0000475865 +2025-03-12 01:51:23,840 Train Loss: 0.0153617, Val Loss: 0.0149163 +2025-03-12 01:51:23,841 Epoch 1429/2000 +2025-03-12 01:51:49,183 Current Learning Rate: 0.0000509862 +2025-03-12 01:51:49,186 Train Loss: 0.0153617, Val Loss: 0.0149160 +2025-03-12 01:51:49,186 Epoch 1430/2000 +2025-03-12 01:52:14,342 Current Learning Rate: 0.0000544967 +2025-03-12 01:52:14,346 Train Loss: 0.0153613, Val Loss: 0.0149159 +2025-03-12 01:52:14,347 Epoch 1431/2000 +2025-03-12 01:52:40,207 Current Learning Rate: 0.0000581172 +2025-03-12 01:52:40,212 Train Loss: 0.0153612, Val Loss: 0.0149158 +2025-03-12 01:52:40,213 Epoch 1432/2000 +2025-03-12 01:53:05,475 Current Learning Rate: 0.0000618467 +2025-03-12 01:53:05,476 Train Loss: 0.0153609, Val Loss: 0.0149158 +2025-03-12 01:53:05,476 Epoch 1433/2000 +2025-03-12 01:53:30,850 Current Learning Rate: 0.0000656842 +2025-03-12 01:53:30,850 Train Loss: 0.0153608, Val Loss: 0.0149159 +2025-03-12 01:53:30,851 Epoch 1434/2000 +2025-03-12 01:53:56,333 Current Learning Rate: 0.0000696290 +2025-03-12 01:53:56,333 Train Loss: 0.0153605, Val Loss: 0.0149160 +2025-03-12 01:53:56,334 Epoch 1435/2000 +2025-03-12 01:54:21,984 Current Learning Rate: 0.0000736799 +2025-03-12 01:54:21,984 Train Loss: 0.0153604, Val Loss: 0.0149161 +2025-03-12 01:54:21,985 Epoch 1436/2000 +2025-03-12 01:54:47,944 Current Learning Rate: 0.0000778360 +2025-03-12 01:54:47,944 Train Loss: 0.0153601, Val Loss: 0.0149161 +2025-03-12 01:54:47,944 Epoch 1437/2000 +2025-03-12 01:55:13,116 Current Learning Rate: 0.0000820963 +2025-03-12 01:55:13,117 Train Loss: 0.0153600, Val Loss: 0.0149160 +2025-03-12 01:55:13,118 Epoch 1438/2000 +2025-03-12 01:55:38,161 Current Learning Rate: 0.0000864597 +2025-03-12 01:55:38,161 Train Loss: 0.0153597, Val Loss: 0.0149158 +2025-03-12 01:55:38,161 Epoch 1439/2000 +2025-03-12 01:56:03,563 Current Learning Rate: 0.0000909251 +2025-03-12 01:56:03,567 Train Loss: 0.0153595, Val Loss: 0.0149155 +2025-03-12 01:56:03,567 Epoch 1440/2000 +2025-03-12 01:56:29,002 Current Learning Rate: 0.0000954915 +2025-03-12 01:56:29,006 Train Loss: 0.0153592, Val Loss: 0.0149151 +2025-03-12 01:56:29,006 Epoch 1441/2000 +2025-03-12 01:56:54,867 Current Learning Rate: 0.0001001577 +2025-03-12 01:56:54,870 Train Loss: 0.0153590, Val Loss: 0.0149137 +2025-03-12 01:56:54,871 Epoch 1442/2000 +2025-03-12 01:57:20,542 Current Learning Rate: 0.0001049225 +2025-03-12 01:57:20,546 Train Loss: 0.0153587, Val Loss: 0.0149132 +2025-03-12 01:57:20,546 Epoch 1443/2000 +2025-03-12 01:57:46,088 Current Learning Rate: 0.0001097848 +2025-03-12 01:57:46,092 Train Loss: 0.0153588, Val Loss: 0.0149118 +2025-03-12 01:57:46,093 Epoch 1444/2000 +2025-03-12 01:58:11,595 Current Learning Rate: 0.0001147434 +2025-03-12 01:58:11,599 Train Loss: 0.0153585, Val Loss: 0.0149115 +2025-03-12 01:58:11,599 Epoch 1445/2000 +2025-03-12 01:58:36,701 Current Learning Rate: 0.0001197970 +2025-03-12 01:58:36,705 Train Loss: 0.0153579, Val Loss: 0.0149114 +2025-03-12 01:58:36,706 Epoch 1446/2000 +2025-03-12 01:59:01,945 Current Learning Rate: 0.0001249445 +2025-03-12 01:59:01,976 Train Loss: 0.0153572, Val Loss: 0.0149112 +2025-03-12 01:59:01,977 Epoch 1447/2000 +2025-03-12 01:59:26,844 Current Learning Rate: 0.0001301845 +2025-03-12 01:59:26,847 Train Loss: 0.0153566, Val Loss: 0.0149088 +2025-03-12 01:59:26,848 Epoch 1448/2000 +2025-03-12 01:59:51,463 Current Learning Rate: 0.0001355157 +2025-03-12 01:59:51,464 Train Loss: 0.0153559, Val Loss: 0.0149178 +2025-03-12 01:59:51,464 Epoch 1449/2000 +2025-03-12 02:00:17,089 Current Learning Rate: 0.0001409369 +2025-03-12 02:00:17,091 Train Loss: 0.0153566, Val Loss: 0.0149214 +2025-03-12 02:00:17,091 Epoch 1450/2000 +2025-03-12 02:00:42,427 Current Learning Rate: 0.0001464466 +2025-03-12 02:00:42,427 Train Loss: 0.0153564, Val Loss: 0.0149210 +2025-03-12 02:00:42,428 Epoch 1451/2000 +2025-03-12 02:01:07,133 Current Learning Rate: 0.0001520436 +2025-03-12 02:01:07,133 Train Loss: 0.0153562, Val Loss: 0.0149294 +2025-03-12 02:01:07,134 Epoch 1452/2000 +2025-03-12 02:01:32,193 Current Learning Rate: 0.0001577264 +2025-03-12 02:01:32,193 Train Loss: 0.0153563, Val Loss: 0.0149198 +2025-03-12 02:01:32,193 Epoch 1453/2000 +2025-03-12 02:01:56,807 Current Learning Rate: 0.0001634937 +2025-03-12 02:01:56,808 Train Loss: 0.0153568, Val Loss: 0.0149162 +2025-03-12 02:01:56,808 Epoch 1454/2000 +2025-03-12 02:02:21,714 Current Learning Rate: 0.0001693441 +2025-03-12 02:02:21,714 Train Loss: 0.0153560, Val Loss: 0.0149343 +2025-03-12 02:02:21,714 Epoch 1455/2000 +2025-03-12 02:02:47,693 Current Learning Rate: 0.0001752760 +2025-03-12 02:02:47,693 Train Loss: 0.0153568, Val Loss: 0.0149135 +2025-03-12 02:02:47,694 Epoch 1456/2000 +2025-03-12 02:03:12,997 Current Learning Rate: 0.0001812880 +2025-03-12 02:03:12,998 Train Loss: 0.0153550, Val Loss: 0.0149254 +2025-03-12 02:03:12,998 Epoch 1457/2000 +2025-03-12 02:03:38,718 Current Learning Rate: 0.0001873787 +2025-03-12 02:03:38,719 Train Loss: 0.0153568, Val Loss: 0.0149096 +2025-03-12 02:03:38,719 Epoch 1458/2000 +2025-03-12 02:04:03,749 Current Learning Rate: 0.0001935465 +2025-03-12 02:04:03,749 Train Loss: 0.0153538, Val Loss: 0.0149270 +2025-03-12 02:04:03,749 Epoch 1459/2000 +2025-03-12 02:04:29,542 Current Learning Rate: 0.0001997899 +2025-03-12 02:04:29,545 Train Loss: 0.0153563, Val Loss: 0.0149071 +2025-03-12 02:04:29,545 Epoch 1460/2000 +2025-03-12 02:04:54,530 Current Learning Rate: 0.0002061074 +2025-03-12 02:04:54,531 Train Loss: 0.0153539, Val Loss: 0.0149152 +2025-03-12 02:04:54,531 Epoch 1461/2000 +2025-03-12 02:05:20,402 Current Learning Rate: 0.0002124974 +2025-03-12 02:05:20,403 Train Loss: 0.0153531, Val Loss: 0.0149218 +2025-03-12 02:05:20,403 Epoch 1462/2000 +2025-03-12 02:05:45,873 Current Learning Rate: 0.0002189583 +2025-03-12 02:05:45,876 Train Loss: 0.0153616, Val Loss: 0.0149065 +2025-03-12 02:05:45,877 Epoch 1463/2000 +2025-03-12 02:06:10,895 Current Learning Rate: 0.0002254886 +2025-03-12 02:06:10,899 Train Loss: 0.0153501, Val Loss: 0.0149063 +2025-03-12 02:06:10,899 Epoch 1464/2000 +2025-03-12 02:06:36,478 Current Learning Rate: 0.0002320866 +2025-03-12 02:06:36,479 Train Loss: 0.0153548, Val Loss: 0.0149077 +2025-03-12 02:06:36,479 Epoch 1465/2000 +2025-03-12 02:07:01,850 Current Learning Rate: 0.0002387507 +2025-03-12 02:07:01,851 Train Loss: 0.0153534, Val Loss: 0.0149126 +2025-03-12 02:07:01,851 Epoch 1466/2000 +2025-03-12 02:07:26,954 Current Learning Rate: 0.0002454793 +2025-03-12 02:07:26,954 Train Loss: 0.0153513, Val Loss: 0.0149219 +2025-03-12 02:07:26,954 Epoch 1467/2000 +2025-03-12 02:07:52,274 Current Learning Rate: 0.0002522707 +2025-03-12 02:07:52,278 Train Loss: 0.0153671, Val Loss: 0.0149004 +2025-03-12 02:07:52,278 Epoch 1468/2000 +2025-03-12 02:08:18,052 Current Learning Rate: 0.0002591232 +2025-03-12 02:08:18,053 Train Loss: 0.0153473, Val Loss: 0.0149059 +2025-03-12 02:08:18,053 Epoch 1469/2000 +2025-03-12 02:08:43,521 Current Learning Rate: 0.0002660351 +2025-03-12 02:08:43,522 Train Loss: 0.0153525, Val Loss: 0.0149212 +2025-03-12 02:08:43,522 Epoch 1470/2000 +2025-03-12 02:09:09,627 Current Learning Rate: 0.0002730048 +2025-03-12 02:09:09,631 Train Loss: 0.0153608, Val Loss: 0.0148982 +2025-03-12 02:09:09,632 Epoch 1471/2000 +2025-03-12 02:09:35,244 Current Learning Rate: 0.0002800304 +2025-03-12 02:09:35,245 Train Loss: 0.0153477, Val Loss: 0.0149200 +2025-03-12 02:09:35,245 Epoch 1472/2000 +2025-03-12 02:10:01,150 Current Learning Rate: 0.0002871104 +2025-03-12 02:10:01,152 Train Loss: 0.0153634, Val Loss: 0.0148971 +2025-03-12 02:10:01,152 Epoch 1473/2000 +2025-03-12 02:10:27,015 Current Learning Rate: 0.0002942428 +2025-03-12 02:10:27,015 Train Loss: 0.0153467, Val Loss: 0.0149265 +2025-03-12 02:10:27,016 Epoch 1474/2000 +2025-03-12 02:10:52,013 Current Learning Rate: 0.0003014261 +2025-03-12 02:10:52,013 Train Loss: 0.0153633, Val Loss: 0.0148974 +2025-03-12 02:10:52,013 Epoch 1475/2000 +2025-03-12 02:11:17,145 Current Learning Rate: 0.0003086583 +2025-03-12 02:11:17,146 Train Loss: 0.0153476, Val Loss: 0.0149503 +2025-03-12 02:11:17,146 Epoch 1476/2000 +2025-03-12 02:11:41,992 Current Learning Rate: 0.0003159377 +2025-03-12 02:11:41,993 Train Loss: 0.0153610, Val Loss: 0.0148997 +2025-03-12 02:11:41,993 Epoch 1477/2000 +2025-03-12 02:12:07,189 Current Learning Rate: 0.0003232626 +2025-03-12 02:12:07,189 Train Loss: 0.0153474, Val Loss: 0.0149057 +2025-03-12 02:12:07,189 Epoch 1478/2000 +2025-03-12 02:12:32,183 Current Learning Rate: 0.0003306310 +2025-03-12 02:12:32,184 Train Loss: 0.0153766, Val Loss: 0.0148976 +2025-03-12 02:12:32,184 Epoch 1479/2000 +2025-03-12 02:12:57,182 Current Learning Rate: 0.0003380413 +2025-03-12 02:12:57,182 Train Loss: 0.0153438, Val Loss: 0.0149029 +2025-03-12 02:12:57,183 Epoch 1480/2000 +2025-03-12 02:13:22,935 Current Learning Rate: 0.0003454915 +2025-03-12 02:13:22,936 Train Loss: 0.0153493, Val Loss: 0.0149016 +2025-03-12 02:13:22,936 Epoch 1481/2000 +2025-03-12 02:13:48,020 Current Learning Rate: 0.0003529798 +2025-03-12 02:13:48,021 Train Loss: 0.0153604, Val Loss: 0.0149074 +2025-03-12 02:13:48,021 Epoch 1482/2000 +2025-03-12 02:14:12,881 Current Learning Rate: 0.0003605044 +2025-03-12 02:14:12,882 Train Loss: 0.0153495, Val Loss: 0.0149128 +2025-03-12 02:14:12,882 Epoch 1483/2000 +2025-03-12 02:14:38,201 Current Learning Rate: 0.0003680635 +2025-03-12 02:14:38,204 Train Loss: 0.0153728, Val Loss: 0.0148917 +2025-03-12 02:14:38,204 Epoch 1484/2000 +2025-03-12 02:15:03,381 Current Learning Rate: 0.0003756551 +2025-03-12 02:15:03,381 Train Loss: 0.0153418, Val Loss: 0.0148999 +2025-03-12 02:15:03,381 Epoch 1485/2000 +2025-03-12 02:15:28,831 Current Learning Rate: 0.0003832773 +2025-03-12 02:15:28,831 Train Loss: 0.0153665, Val Loss: 0.0148959 +2025-03-12 02:15:28,831 Epoch 1486/2000 +2025-03-12 02:15:53,735 Current Learning Rate: 0.0003909284 +2025-03-12 02:15:53,735 Train Loss: 0.0153465, Val Loss: 0.0149062 +2025-03-12 02:15:53,735 Epoch 1487/2000 +2025-03-12 02:16:18,669 Current Learning Rate: 0.0003986064 +2025-03-12 02:16:18,670 Train Loss: 0.0153681, Val Loss: 0.0148997 +2025-03-12 02:16:18,670 Epoch 1488/2000 +2025-03-12 02:16:43,648 Current Learning Rate: 0.0004063093 +2025-03-12 02:16:43,652 Train Loss: 0.0153487, Val Loss: 0.0148890 +2025-03-12 02:16:43,652 Epoch 1489/2000 +2025-03-12 02:17:08,341 Current Learning Rate: 0.0004140354 +2025-03-12 02:17:08,342 Train Loss: 0.0153636, Val Loss: 0.0149103 +2025-03-12 02:17:08,342 Epoch 1490/2000 +2025-03-12 02:17:33,918 Current Learning Rate: 0.0004217828 +2025-03-12 02:17:33,919 Train Loss: 0.0153481, Val Loss: 0.0149142 +2025-03-12 02:17:33,919 Epoch 1491/2000 +2025-03-12 02:17:59,287 Current Learning Rate: 0.0004295494 +2025-03-12 02:17:59,288 Train Loss: 0.0153624, Val Loss: 0.0149948 +2025-03-12 02:17:59,288 Epoch 1492/2000 +2025-03-12 02:18:24,043 Current Learning Rate: 0.0004373334 +2025-03-12 02:18:24,044 Train Loss: 0.0153545, Val Loss: 0.0148906 +2025-03-12 02:18:24,044 Epoch 1493/2000 +2025-03-12 02:18:49,244 Current Learning Rate: 0.0004451328 +2025-03-12 02:18:49,244 Train Loss: 0.0153719, Val Loss: 0.0149020 +2025-03-12 02:18:49,245 Epoch 1494/2000 +2025-03-12 02:19:14,694 Current Learning Rate: 0.0004529458 +2025-03-12 02:19:14,700 Train Loss: 0.0153458, Val Loss: 0.0148859 +2025-03-12 02:19:14,700 Epoch 1495/2000 +2025-03-12 02:19:40,371 Current Learning Rate: 0.0004607705 +2025-03-12 02:19:40,378 Train Loss: 0.0153862, Val Loss: 0.0148851 +2025-03-12 02:19:40,378 Epoch 1496/2000 +2025-03-12 02:20:05,123 Current Learning Rate: 0.0004686047 +2025-03-12 02:20:05,124 Train Loss: 0.0153461, Val Loss: 0.0148936 +2025-03-12 02:20:05,124 Epoch 1497/2000 +2025-03-12 02:20:30,654 Current Learning Rate: 0.0004764468 +2025-03-12 02:20:30,655 Train Loss: 0.0153625, Val Loss: 0.0149387 +2025-03-12 02:20:30,655 Epoch 1498/2000 +2025-03-12 02:20:56,763 Current Learning Rate: 0.0004842946 +2025-03-12 02:20:56,763 Train Loss: 0.0153427, Val Loss: 0.0148869 +2025-03-12 02:20:56,764 Epoch 1499/2000 +2025-03-12 02:21:22,152 Current Learning Rate: 0.0004921463 +2025-03-12 02:21:22,156 Train Loss: 0.0154086, Val Loss: 0.0148847 +2025-03-12 02:21:22,156 Epoch 1500/2000 +2025-03-12 02:21:46,948 Current Learning Rate: 0.0005000000 +2025-03-12 02:21:46,949 Train Loss: 0.0153417, Val Loss: 0.0149017 +2025-03-12 02:21:46,949 Epoch 1501/2000 +2025-03-12 02:22:12,040 Current Learning Rate: 0.0005078537 +2025-03-12 02:22:12,040 Train Loss: 0.0153460, Val Loss: 0.0148915 +2025-03-12 02:22:12,041 Epoch 1502/2000 +2025-03-12 02:22:37,330 Current Learning Rate: 0.0005157054 +2025-03-12 02:22:37,330 Train Loss: 0.0153707, Val Loss: 0.0149003 +2025-03-12 02:22:37,330 Epoch 1503/2000 +2025-03-12 02:23:02,340 Current Learning Rate: 0.0005235532 +2025-03-12 02:23:02,341 Train Loss: 0.0153470, Val Loss: 0.0148870 +2025-03-12 02:23:02,341 Epoch 1504/2000 +2025-03-12 02:23:27,433 Current Learning Rate: 0.0005313953 +2025-03-12 02:23:27,434 Train Loss: 0.0154008, Val Loss: 0.0148981 +2025-03-12 02:23:27,434 Epoch 1505/2000 +2025-03-12 02:23:52,632 Current Learning Rate: 0.0005392295 +2025-03-12 02:23:52,636 Train Loss: 0.0153428, Val Loss: 0.0148802 +2025-03-12 02:23:52,636 Epoch 1506/2000 +2025-03-12 02:24:17,823 Current Learning Rate: 0.0005470542 +2025-03-12 02:24:17,824 Train Loss: 0.0153607, Val Loss: 0.0149111 +2025-03-12 02:24:17,824 Epoch 1507/2000 +2025-03-12 02:24:42,942 Current Learning Rate: 0.0005548672 +2025-03-12 02:24:42,945 Train Loss: 0.0153614, Val Loss: 0.0148796 +2025-03-12 02:24:42,945 Epoch 1508/2000 +2025-03-12 02:25:08,460 Current Learning Rate: 0.0005626666 +2025-03-12 02:25:08,460 Train Loss: 0.0153737, Val Loss: 0.0148940 +2025-03-12 02:25:08,460 Epoch 1509/2000 +2025-03-12 02:25:33,931 Current Learning Rate: 0.0005704506 +2025-03-12 02:25:33,933 Train Loss: 0.0153557, Val Loss: 0.0148834 +2025-03-12 02:25:33,936 Epoch 1510/2000 +2025-03-12 02:25:59,187 Current Learning Rate: 0.0005782172 +2025-03-12 02:25:59,187 Train Loss: 0.0153795, Val Loss: 0.0149054 +2025-03-12 02:25:59,187 Epoch 1511/2000 +2025-03-12 02:26:23,930 Current Learning Rate: 0.0005859646 +2025-03-12 02:26:23,931 Train Loss: 0.0153546, Val Loss: 0.0148830 +2025-03-12 02:26:23,931 Epoch 1512/2000 +2025-03-12 02:26:49,416 Current Learning Rate: 0.0005936907 +2025-03-12 02:26:49,417 Train Loss: 0.0153628, Val Loss: 0.0149297 +2025-03-12 02:26:49,417 Epoch 1513/2000 +2025-03-12 02:27:14,994 Current Learning Rate: 0.0006013936 +2025-03-12 02:27:14,996 Train Loss: 0.0153735, Val Loss: 0.0148818 +2025-03-12 02:27:14,996 Epoch 1514/2000 +2025-03-12 02:27:40,372 Current Learning Rate: 0.0006090716 +2025-03-12 02:27:40,372 Train Loss: 0.0153592, Val Loss: 0.0150930 +2025-03-12 02:27:40,372 Epoch 1515/2000 +2025-03-12 02:28:05,703 Current Learning Rate: 0.0006167227 +2025-03-12 02:28:05,704 Train Loss: 0.0153860, Val Loss: 0.0149822 +2025-03-12 02:28:05,704 Epoch 1516/2000 +2025-03-12 02:28:30,737 Current Learning Rate: 0.0006243449 +2025-03-12 02:28:30,738 Train Loss: 0.0153693, Val Loss: 0.0148823 +2025-03-12 02:28:30,738 Epoch 1517/2000 +2025-03-12 02:28:56,101 Current Learning Rate: 0.0006319365 +2025-03-12 02:28:56,102 Train Loss: 0.0153527, Val Loss: 0.0149804 +2025-03-12 02:28:56,102 Epoch 1518/2000 +2025-03-12 02:29:21,349 Current Learning Rate: 0.0006394956 +2025-03-12 02:29:21,349 Train Loss: 0.0153645, Val Loss: 0.0149564 +2025-03-12 02:29:21,349 Epoch 1519/2000 +2025-03-12 02:29:46,436 Current Learning Rate: 0.0006470202 +2025-03-12 02:29:46,436 Train Loss: 0.0153745, Val Loss: 0.0149622 +2025-03-12 02:29:46,436 Epoch 1520/2000 +2025-03-12 02:30:11,687 Current Learning Rate: 0.0006545085 +2025-03-12 02:30:11,688 Train Loss: 0.0153913, Val Loss: 0.0149549 +2025-03-12 02:30:11,689 Epoch 1521/2000 +2025-03-12 02:30:37,901 Current Learning Rate: 0.0006619587 +2025-03-12 02:30:37,901 Train Loss: 0.0153483, Val Loss: 0.0148985 +2025-03-12 02:30:37,902 Epoch 1522/2000 +2025-03-12 02:31:03,464 Current Learning Rate: 0.0006693690 +2025-03-12 02:31:03,465 Train Loss: 0.0153753, Val Loss: 0.0148952 +2025-03-12 02:31:03,465 Epoch 1523/2000 +2025-03-12 02:31:28,944 Current Learning Rate: 0.0006767374 +2025-03-12 02:31:28,944 Train Loss: 0.0153718, Val Loss: 0.0149259 +2025-03-12 02:31:28,944 Epoch 1524/2000 +2025-03-12 02:31:54,246 Current Learning Rate: 0.0006840623 +2025-03-12 02:31:54,247 Train Loss: 0.0153645, Val Loss: 0.0149487 +2025-03-12 02:31:54,247 Epoch 1525/2000 +2025-03-12 02:32:19,184 Current Learning Rate: 0.0006913417 +2025-03-12 02:32:19,184 Train Loss: 0.0153655, Val Loss: 0.0148922 +2025-03-12 02:32:19,184 Epoch 1526/2000 +2025-03-12 02:32:44,771 Current Learning Rate: 0.0006985739 +2025-03-12 02:32:44,771 Train Loss: 0.0153599, Val Loss: 0.0150670 +2025-03-12 02:32:44,771 Epoch 1527/2000 +2025-03-12 02:33:09,575 Current Learning Rate: 0.0007057572 +2025-03-12 02:33:09,575 Train Loss: 0.0154084, Val Loss: 0.0152296 +2025-03-12 02:33:09,575 Epoch 1528/2000 +2025-03-12 02:33:34,551 Current Learning Rate: 0.0007128896 +2025-03-12 02:33:34,552 Train Loss: 0.0153668, Val Loss: 0.0148952 +2025-03-12 02:33:34,552 Epoch 1529/2000 +2025-03-12 02:34:00,273 Current Learning Rate: 0.0007199696 +2025-03-12 02:34:00,274 Train Loss: 0.0153573, Val Loss: 0.0150080 +2025-03-12 02:34:00,274 Epoch 1530/2000 +2025-03-12 02:34:25,532 Current Learning Rate: 0.0007269952 +2025-03-12 02:34:25,533 Train Loss: 0.0153792, Val Loss: 0.0150574 +2025-03-12 02:34:25,533 Epoch 1531/2000 +2025-03-12 02:34:50,441 Current Learning Rate: 0.0007339649 +2025-03-12 02:34:50,441 Train Loss: 0.0153810, Val Loss: 0.0149233 +2025-03-12 02:34:50,441 Epoch 1532/2000 +2025-03-12 02:35:15,501 Current Learning Rate: 0.0007408768 +2025-03-12 02:35:15,507 Train Loss: 0.0153560, Val Loss: 0.0148688 +2025-03-12 02:35:15,507 Epoch 1533/2000 +2025-03-12 02:35:41,321 Current Learning Rate: 0.0007477293 +2025-03-12 02:35:41,321 Train Loss: 0.0153819, Val Loss: 0.0149120 +2025-03-12 02:35:41,322 Epoch 1534/2000 +2025-03-12 02:36:06,433 Current Learning Rate: 0.0007545207 +2025-03-12 02:36:06,434 Train Loss: 0.0153783, Val Loss: 0.0148737 +2025-03-12 02:36:06,434 Epoch 1535/2000 +2025-03-12 02:36:31,634 Current Learning Rate: 0.0007612493 +2025-03-12 02:36:31,635 Train Loss: 0.0153652, Val Loss: 0.0152750 +2025-03-12 02:36:31,635 Epoch 1536/2000 +2025-03-12 02:36:56,503 Current Learning Rate: 0.0007679134 +2025-03-12 02:36:56,503 Train Loss: 0.0153782, Val Loss: 0.0149995 +2025-03-12 02:36:56,503 Epoch 1537/2000 +2025-03-12 02:37:20,833 Current Learning Rate: 0.0007745114 +2025-03-12 02:37:20,833 Train Loss: 0.0153886, Val Loss: 0.0149281 +2025-03-12 02:37:20,833 Epoch 1538/2000 +2025-03-12 02:37:45,440 Current Learning Rate: 0.0007810417 +2025-03-12 02:37:45,440 Train Loss: 0.0153946, Val Loss: 0.0149061 +2025-03-12 02:37:45,441 Epoch 1539/2000 +2025-03-12 02:38:10,340 Current Learning Rate: 0.0007875026 +2025-03-12 02:38:10,341 Train Loss: 0.0153671, Val Loss: 0.0150082 +2025-03-12 02:38:10,341 Epoch 1540/2000 +2025-03-12 02:38:35,395 Current Learning Rate: 0.0007938926 +2025-03-12 02:38:35,396 Train Loss: 0.0153556, Val Loss: 0.0149090 +2025-03-12 02:38:35,396 Epoch 1541/2000 +2025-03-12 02:39:00,234 Current Learning Rate: 0.0008002101 +2025-03-12 02:39:00,235 Train Loss: 0.0153822, Val Loss: 0.0149121 +2025-03-12 02:39:00,235 Epoch 1542/2000 +2025-03-12 02:39:25,686 Current Learning Rate: 0.0008064535 +2025-03-12 02:39:25,690 Train Loss: 0.0153818, Val Loss: 0.0148642 +2025-03-12 02:39:25,690 Epoch 1543/2000 +2025-03-12 02:39:50,823 Current Learning Rate: 0.0008126213 +2025-03-12 02:39:50,824 Train Loss: 0.0153584, Val Loss: 0.0149937 +2025-03-12 02:39:50,824 Epoch 1544/2000 +2025-03-12 02:40:16,312 Current Learning Rate: 0.0008187120 +2025-03-12 02:40:16,313 Train Loss: 0.0154065, Val Loss: 0.0149111 +2025-03-12 02:40:16,313 Epoch 1545/2000 +2025-03-12 02:40:41,070 Current Learning Rate: 0.0008247240 +2025-03-12 02:40:41,071 Train Loss: 0.0153673, Val Loss: 0.0149487 +2025-03-12 02:40:41,071 Epoch 1546/2000 +2025-03-12 02:41:06,307 Current Learning Rate: 0.0008306559 +2025-03-12 02:41:06,308 Train Loss: 0.0153800, Val Loss: 0.0149255 +2025-03-12 02:41:06,308 Epoch 1547/2000 +2025-03-12 02:41:31,576 Current Learning Rate: 0.0008365063 +2025-03-12 02:41:31,576 Train Loss: 0.0153638, Val Loss: 0.0148824 +2025-03-12 02:41:31,577 Epoch 1548/2000 +2025-03-12 02:41:57,084 Current Learning Rate: 0.0008422736 +2025-03-12 02:41:57,085 Train Loss: 0.0153658, Val Loss: 0.0149364 +2025-03-12 02:41:57,085 Epoch 1549/2000 +2025-03-12 02:42:22,805 Current Learning Rate: 0.0008479564 +2025-03-12 02:42:22,809 Train Loss: 0.0153933, Val Loss: 0.0148637 +2025-03-12 02:42:22,809 Epoch 1550/2000 +2025-03-12 02:42:48,227 Current Learning Rate: 0.0008535534 +2025-03-12 02:42:48,228 Train Loss: 0.0154370, Val Loss: 0.0148897 +2025-03-12 02:42:48,228 Epoch 1551/2000 +2025-03-12 02:43:13,367 Current Learning Rate: 0.0008590631 +2025-03-12 02:43:13,367 Train Loss: 0.0153445, Val Loss: 0.0149501 +2025-03-12 02:43:13,368 Epoch 1552/2000 +2025-03-12 02:43:39,080 Current Learning Rate: 0.0008644843 +2025-03-12 02:43:39,081 Train Loss: 0.0153529, Val Loss: 0.0149016 +2025-03-12 02:43:39,081 Epoch 1553/2000 +2025-03-12 02:44:04,390 Current Learning Rate: 0.0008698155 +2025-03-12 02:44:04,391 Train Loss: 0.0154049, Val Loss: 0.0148692 +2025-03-12 02:44:04,391 Epoch 1554/2000 +2025-03-12 02:44:29,881 Current Learning Rate: 0.0008750555 +2025-03-12 02:44:29,882 Train Loss: 0.0153668, Val Loss: 0.0148901 +2025-03-12 02:44:29,882 Epoch 1555/2000 +2025-03-12 02:44:54,855 Current Learning Rate: 0.0008802030 +2025-03-12 02:44:54,856 Train Loss: 0.0153636, Val Loss: 0.0149139 +2025-03-12 02:44:54,856 Epoch 1556/2000 +2025-03-12 02:45:19,975 Current Learning Rate: 0.0008852566 +2025-03-12 02:45:19,976 Train Loss: 0.0153974, Val Loss: 0.0148950 +2025-03-12 02:45:19,976 Epoch 1557/2000 +2025-03-12 02:45:45,409 Current Learning Rate: 0.0008902152 +2025-03-12 02:45:45,409 Train Loss: 0.0153601, Val Loss: 0.0150305 +2025-03-12 02:45:45,409 Epoch 1558/2000 +2025-03-12 02:46:11,335 Current Learning Rate: 0.0008950775 +2025-03-12 02:46:11,335 Train Loss: 0.0153901, Val Loss: 0.0148858 +2025-03-12 02:46:11,336 Epoch 1559/2000 +2025-03-12 02:46:36,895 Current Learning Rate: 0.0008998423 +2025-03-12 02:46:36,896 Train Loss: 0.0153880, Val Loss: 0.0148732 +2025-03-12 02:46:36,896 Epoch 1560/2000 +2025-03-12 02:47:01,985 Current Learning Rate: 0.0009045085 +2025-03-12 02:47:01,986 Train Loss: 0.0153948, Val Loss: 0.0148732 +2025-03-12 02:47:01,987 Epoch 1561/2000 +2025-03-12 02:47:27,481 Current Learning Rate: 0.0009090749 +2025-03-12 02:47:27,481 Train Loss: 0.0153767, Val Loss: 0.0150025 +2025-03-12 02:47:27,482 Epoch 1562/2000 +2025-03-12 02:47:53,245 Current Learning Rate: 0.0009135403 +2025-03-12 02:47:53,245 Train Loss: 0.0153756, Val Loss: 0.0148815 +2025-03-12 02:47:53,246 Epoch 1563/2000 +2025-03-12 02:48:19,238 Current Learning Rate: 0.0009179037 +2025-03-12 02:48:19,239 Train Loss: 0.0153734, Val Loss: 0.0148995 +2025-03-12 02:48:19,239 Epoch 1564/2000 +2025-03-12 02:48:44,446 Current Learning Rate: 0.0009221640 +2025-03-12 02:48:44,447 Train Loss: 0.0153672, Val Loss: 0.0148684 +2025-03-12 02:48:44,447 Epoch 1565/2000 +2025-03-12 02:49:09,936 Current Learning Rate: 0.0009263201 +2025-03-12 02:49:09,937 Train Loss: 0.0154116, Val Loss: 0.0148758 +2025-03-12 02:49:09,937 Epoch 1566/2000 +2025-03-12 02:49:35,825 Current Learning Rate: 0.0009303710 +2025-03-12 02:49:35,826 Train Loss: 0.0153647, Val Loss: 0.0148905 +2025-03-12 02:49:35,826 Epoch 1567/2000 +2025-03-12 02:50:00,921 Current Learning Rate: 0.0009343158 +2025-03-12 02:50:00,921 Train Loss: 0.0153686, Val Loss: 0.0148958 +2025-03-12 02:50:00,921 Epoch 1568/2000 +2025-03-12 02:50:26,818 Current Learning Rate: 0.0009381533 +2025-03-12 02:50:26,819 Train Loss: 0.0153728, Val Loss: 0.0148668 +2025-03-12 02:50:26,819 Epoch 1569/2000 +2025-03-12 02:50:52,574 Current Learning Rate: 0.0009418828 +2025-03-12 02:50:52,574 Train Loss: 0.0153974, Val Loss: 0.0149406 +2025-03-12 02:50:52,575 Epoch 1570/2000 +2025-03-12 02:51:17,926 Current Learning Rate: 0.0009455033 +2025-03-12 02:51:17,927 Train Loss: 0.0154012, Val Loss: 0.0149007 +2025-03-12 02:51:17,927 Epoch 1571/2000 +2025-03-12 02:51:43,546 Current Learning Rate: 0.0009490138 +2025-03-12 02:51:43,550 Train Loss: 0.0153523, Val Loss: 0.0148615 +2025-03-12 02:51:43,550 Epoch 1572/2000 +2025-03-12 02:52:09,090 Current Learning Rate: 0.0009524135 +2025-03-12 02:52:09,091 Train Loss: 0.0153625, Val Loss: 0.0148646 +2025-03-12 02:52:09,091 Epoch 1573/2000 +2025-03-12 02:52:34,789 Current Learning Rate: 0.0009557016 +2025-03-12 02:52:34,793 Train Loss: 0.0154022, Val Loss: 0.0148545 +2025-03-12 02:52:34,793 Epoch 1574/2000 +2025-03-12 02:52:59,647 Current Learning Rate: 0.0009588773 +2025-03-12 02:52:59,648 Train Loss: 0.0153570, Val Loss: 0.0149383 +2025-03-12 02:52:59,648 Epoch 1575/2000 +2025-03-12 02:53:24,810 Current Learning Rate: 0.0009619398 +2025-03-12 02:53:24,811 Train Loss: 0.0154126, Val Loss: 0.0148552 +2025-03-12 02:53:24,811 Epoch 1576/2000 +2025-03-12 02:53:49,507 Current Learning Rate: 0.0009648882 +2025-03-12 02:53:49,509 Train Loss: 0.0153500, Val Loss: 0.0151552 +2025-03-12 02:53:49,509 Epoch 1577/2000 +2025-03-12 02:54:14,519 Current Learning Rate: 0.0009677220 +2025-03-12 02:54:14,519 Train Loss: 0.0154126, Val Loss: 0.0152669 +2025-03-12 02:54:14,519 Epoch 1578/2000 +2025-03-12 02:54:40,136 Current Learning Rate: 0.0009704404 +2025-03-12 02:54:40,137 Train Loss: 0.0153553, Val Loss: 0.0148682 +2025-03-12 02:54:40,137 Epoch 1579/2000 +2025-03-12 02:55:05,697 Current Learning Rate: 0.0009730427 +2025-03-12 02:55:05,701 Train Loss: 0.0153667, Val Loss: 0.0148540 +2025-03-12 02:55:05,701 Epoch 1580/2000 +2025-03-12 02:55:30,872 Current Learning Rate: 0.0009755283 +2025-03-12 02:55:30,873 Train Loss: 0.0153766, Val Loss: 0.0148654 +2025-03-12 02:55:30,873 Epoch 1581/2000 +2025-03-12 02:55:56,242 Current Learning Rate: 0.0009778965 +2025-03-12 02:55:56,242 Train Loss: 0.0153925, Val Loss: 0.0151292 +2025-03-12 02:55:56,243 Epoch 1582/2000 +2025-03-12 02:56:21,841 Current Learning Rate: 0.0009801468 +2025-03-12 02:56:21,842 Train Loss: 0.0153503, Val Loss: 0.0150537 +2025-03-12 02:56:21,842 Epoch 1583/2000 +2025-03-12 02:56:47,411 Current Learning Rate: 0.0009822787 +2025-03-12 02:56:47,412 Train Loss: 0.0153867, Val Loss: 0.0149647 +2025-03-12 02:56:47,412 Epoch 1584/2000 +2025-03-12 02:57:12,954 Current Learning Rate: 0.0009842916 +2025-03-12 02:57:12,954 Train Loss: 0.0153906, Val Loss: 0.0149132 +2025-03-12 02:57:12,955 Epoch 1585/2000 +2025-03-12 02:57:37,829 Current Learning Rate: 0.0009861850 +2025-03-12 02:57:37,830 Train Loss: 0.0153221, Val Loss: 0.0150997 +2025-03-12 02:57:37,830 Epoch 1586/2000 +2025-03-12 02:58:02,540 Current Learning Rate: 0.0009879584 +2025-03-12 02:58:02,541 Train Loss: 0.0154389, Val Loss: 0.0152060 +2025-03-12 02:58:02,541 Epoch 1587/2000 +2025-03-12 02:58:27,014 Current Learning Rate: 0.0009896114 +2025-03-12 02:58:27,015 Train Loss: 0.0153646, Val Loss: 0.0150330 +2025-03-12 02:58:27,015 Epoch 1588/2000 +2025-03-12 02:58:51,851 Current Learning Rate: 0.0009911436 +2025-03-12 02:58:51,852 Train Loss: 0.0153863, Val Loss: 0.0149476 +2025-03-12 02:58:51,852 Epoch 1589/2000 +2025-03-12 02:59:16,777 Current Learning Rate: 0.0009925547 +2025-03-12 02:59:16,778 Train Loss: 0.0153441, Val Loss: 0.0148889 +2025-03-12 02:59:16,778 Epoch 1590/2000 +2025-03-12 02:59:41,889 Current Learning Rate: 0.0009938442 +2025-03-12 02:59:41,890 Train Loss: 0.0153799, Val Loss: 0.0151891 +2025-03-12 02:59:41,890 Epoch 1591/2000 +2025-03-12 03:00:07,017 Current Learning Rate: 0.0009950118 +2025-03-12 03:00:07,017 Train Loss: 0.0153750, Val Loss: 0.0153462 +2025-03-12 03:00:07,018 Epoch 1592/2000 +2025-03-12 03:00:32,028 Current Learning Rate: 0.0009960574 +2025-03-12 03:00:32,028 Train Loss: 0.0153578, Val Loss: 0.0149200 +2025-03-12 03:00:32,028 Epoch 1593/2000 +2025-03-12 03:00:57,020 Current Learning Rate: 0.0009969805 +2025-03-12 03:00:57,020 Train Loss: 0.0153652, Val Loss: 0.0149533 +2025-03-12 03:00:57,020 Epoch 1594/2000 +2025-03-12 03:01:22,357 Current Learning Rate: 0.0009977810 +2025-03-12 03:01:22,357 Train Loss: 0.0153719, Val Loss: 0.0149125 +2025-03-12 03:01:22,357 Epoch 1595/2000 +2025-03-12 03:01:47,546 Current Learning Rate: 0.0009984587 +2025-03-12 03:01:47,547 Train Loss: 0.0153623, Val Loss: 0.0149503 +2025-03-12 03:01:47,547 Epoch 1596/2000 +2025-03-12 03:02:12,445 Current Learning Rate: 0.0009990134 +2025-03-12 03:02:12,446 Train Loss: 0.0154120, Val Loss: 0.0149467 +2025-03-12 03:02:12,446 Epoch 1597/2000 +2025-03-12 03:02:38,105 Current Learning Rate: 0.0009994449 +2025-03-12 03:02:38,110 Train Loss: 0.0153491, Val Loss: 0.0148397 +2025-03-12 03:02:38,110 Epoch 1598/2000 +2025-03-12 03:03:03,076 Current Learning Rate: 0.0009997533 +2025-03-12 03:03:03,076 Train Loss: 0.0153662, Val Loss: 0.0149268 +2025-03-12 03:03:03,076 Epoch 1599/2000 +2025-03-12 03:03:28,650 Current Learning Rate: 0.0009999383 +2025-03-12 03:03:28,651 Train Loss: 0.0153676, Val Loss: 0.0148933 +2025-03-12 03:03:28,651 Epoch 1600/2000 +2025-03-12 03:03:53,846 Current Learning Rate: 0.0010000000 +2025-03-12 03:03:53,846 Train Loss: 0.0153716, Val Loss: 0.0149011 +2025-03-12 03:03:53,847 Epoch 1601/2000 +2025-03-12 03:04:18,937 Current Learning Rate: 0.0009999383 +2025-03-12 03:04:18,938 Train Loss: 0.0153352, Val Loss: 0.0148464 +2025-03-12 03:04:18,938 Epoch 1602/2000 +2025-03-12 03:04:43,848 Current Learning Rate: 0.0009997533 +2025-03-12 03:04:43,848 Train Loss: 0.0153812, Val Loss: 0.0149254 +2025-03-12 03:04:43,849 Epoch 1603/2000 +2025-03-12 03:05:09,760 Current Learning Rate: 0.0009994449 +2025-03-12 03:05:09,760 Train Loss: 0.0153446, Val Loss: 0.0148880 +2025-03-12 03:05:09,761 Epoch 1604/2000 +2025-03-12 03:05:35,167 Current Learning Rate: 0.0009990134 +2025-03-12 03:05:35,167 Train Loss: 0.0153668, Val Loss: 0.0149747 +2025-03-12 03:05:35,168 Epoch 1605/2000 +2025-03-12 03:06:00,586 Current Learning Rate: 0.0009984587 +2025-03-12 03:06:00,587 Train Loss: 0.0153894, Val Loss: 0.0150825 +2025-03-12 03:06:00,588 Epoch 1606/2000 +2025-03-12 03:06:26,685 Current Learning Rate: 0.0009977810 +2025-03-12 03:06:26,686 Train Loss: 0.0153434, Val Loss: 0.0150745 +2025-03-12 03:06:26,686 Epoch 1607/2000 +2025-03-12 03:06:51,769 Current Learning Rate: 0.0009969805 +2025-03-12 03:06:51,770 Train Loss: 0.0153611, Val Loss: 0.0149149 +2025-03-12 03:06:51,770 Epoch 1608/2000 +2025-03-12 03:07:16,897 Current Learning Rate: 0.0009960574 +2025-03-12 03:07:16,897 Train Loss: 0.0153501, Val Loss: 0.0148460 +2025-03-12 03:07:16,898 Epoch 1609/2000 +2025-03-12 03:07:42,769 Current Learning Rate: 0.0009950118 +2025-03-12 03:07:42,769 Train Loss: 0.0153962, Val Loss: 0.0148867 +2025-03-12 03:07:42,769 Epoch 1610/2000 +2025-03-12 03:08:08,583 Current Learning Rate: 0.0009938442 +2025-03-12 03:08:08,583 Train Loss: 0.0153348, Val Loss: 0.0148420 +2025-03-12 03:08:08,583 Epoch 1611/2000 +2025-03-12 03:08:33,877 Current Learning Rate: 0.0009925547 +2025-03-12 03:08:33,877 Train Loss: 0.0153591, Val Loss: 0.0150957 +2025-03-12 03:08:33,877 Epoch 1612/2000 +2025-03-12 03:08:59,514 Current Learning Rate: 0.0009911436 +2025-03-12 03:08:59,514 Train Loss: 0.0153401, Val Loss: 0.0148941 +2025-03-12 03:08:59,514 Epoch 1613/2000 +2025-03-12 03:09:25,348 Current Learning Rate: 0.0009896114 +2025-03-12 03:09:25,348 Train Loss: 0.0153840, Val Loss: 0.0149159 +2025-03-12 03:09:25,348 Epoch 1614/2000 +2025-03-12 03:09:50,841 Current Learning Rate: 0.0009879584 +2025-03-12 03:09:50,842 Train Loss: 0.0153146, Val Loss: 0.0148703 +2025-03-12 03:09:50,842 Epoch 1615/2000 +2025-03-12 03:10:16,750 Current Learning Rate: 0.0009861850 +2025-03-12 03:10:16,751 Train Loss: 0.0153616, Val Loss: 0.0149685 +2025-03-12 03:10:16,751 Epoch 1616/2000 +2025-03-12 03:10:42,375 Current Learning Rate: 0.0009842916 +2025-03-12 03:10:42,375 Train Loss: 0.0153720, Val Loss: 0.0150034 +2025-03-12 03:10:42,376 Epoch 1617/2000 +2025-03-12 03:11:08,040 Current Learning Rate: 0.0009822787 +2025-03-12 03:11:08,041 Train Loss: 0.0153324, Val Loss: 0.0148468 +2025-03-12 03:11:08,041 Epoch 1618/2000 +2025-03-12 03:11:33,320 Current Learning Rate: 0.0009801468 +2025-03-12 03:11:33,324 Train Loss: 0.0153288, Val Loss: 0.0148384 +2025-03-12 03:11:33,324 Epoch 1619/2000 +2025-03-12 03:11:59,330 Current Learning Rate: 0.0009778965 +2025-03-12 03:11:59,331 Train Loss: 0.0153828, Val Loss: 0.0148412 +2025-03-12 03:11:59,331 Epoch 1620/2000 +2025-03-12 03:12:25,085 Current Learning Rate: 0.0009755283 +2025-03-12 03:12:25,086 Train Loss: 0.0153607, Val Loss: 0.0148898 +2025-03-12 03:12:25,086 Epoch 1621/2000 +2025-03-12 03:12:50,242 Current Learning Rate: 0.0009730427 +2025-03-12 03:12:50,243 Train Loss: 0.0153448, Val Loss: 0.0149009 +2025-03-12 03:12:50,243 Epoch 1622/2000 +2025-03-12 03:13:15,340 Current Learning Rate: 0.0009704404 +2025-03-12 03:13:15,341 Train Loss: 0.0153362, Val Loss: 0.0149792 +2025-03-12 03:13:15,341 Epoch 1623/2000 +2025-03-12 03:13:40,622 Current Learning Rate: 0.0009677220 +2025-03-12 03:13:40,623 Train Loss: 0.0153511, Val Loss: 0.0148785 +2025-03-12 03:13:40,623 Epoch 1624/2000 +2025-03-12 03:14:06,067 Current Learning Rate: 0.0009648882 +2025-03-12 03:14:06,067 Train Loss: 0.0153457, Val Loss: 0.0148803 +2025-03-12 03:14:06,067 Epoch 1625/2000 +2025-03-12 03:14:31,937 Current Learning Rate: 0.0009619398 +2025-03-12 03:14:31,937 Train Loss: 0.0153711, Val Loss: 0.0149574 +2025-03-12 03:14:31,937 Epoch 1626/2000 +2025-03-12 03:14:57,160 Current Learning Rate: 0.0009588773 +2025-03-12 03:14:57,164 Train Loss: 0.0153062, Val Loss: 0.0148259 +2025-03-12 03:14:57,164 Epoch 1627/2000 +2025-03-12 03:15:22,836 Current Learning Rate: 0.0009557016 +2025-03-12 03:15:22,837 Train Loss: 0.0153182, Val Loss: 0.0151343 +2025-03-12 03:15:22,838 Epoch 1628/2000 +2025-03-12 03:15:48,169 Current Learning Rate: 0.0009524135 +2025-03-12 03:15:48,170 Train Loss: 0.0153457, Val Loss: 0.0148459 +2025-03-12 03:15:48,170 Epoch 1629/2000 +2025-03-12 03:16:13,285 Current Learning Rate: 0.0009490138 +2025-03-12 03:16:13,285 Train Loss: 0.0153299, Val Loss: 0.0149829 +2025-03-12 03:16:13,285 Epoch 1630/2000 +2025-03-12 03:16:38,218 Current Learning Rate: 0.0009455033 +2025-03-12 03:16:38,218 Train Loss: 0.0153566, Val Loss: 0.0148745 +2025-03-12 03:16:38,219 Epoch 1631/2000 +2025-03-12 03:17:03,828 Current Learning Rate: 0.0009418828 +2025-03-12 03:17:03,828 Train Loss: 0.0153155, Val Loss: 0.0148693 +2025-03-12 03:17:03,828 Epoch 1632/2000 +2025-03-12 03:17:28,805 Current Learning Rate: 0.0009381533 +2025-03-12 03:17:28,805 Train Loss: 0.0153230, Val Loss: 0.0149119 +2025-03-12 03:17:28,805 Epoch 1633/2000 +2025-03-12 03:17:54,571 Current Learning Rate: 0.0009343158 +2025-03-12 03:17:54,571 Train Loss: 0.0153337, Val Loss: 0.0148876 +2025-03-12 03:17:54,572 Epoch 1634/2000 +2025-03-12 03:18:19,976 Current Learning Rate: 0.0009303710 +2025-03-12 03:18:19,976 Train Loss: 0.0153252, Val Loss: 0.0148439 +2025-03-12 03:18:19,976 Epoch 1635/2000 +2025-03-12 03:18:45,936 Current Learning Rate: 0.0009263201 +2025-03-12 03:18:45,940 Train Loss: 0.0153160, Val Loss: 0.0148243 +2025-03-12 03:18:45,940 Epoch 1636/2000 +2025-03-12 03:19:11,490 Current Learning Rate: 0.0009221640 +2025-03-12 03:19:11,490 Train Loss: 0.0153119, Val Loss: 0.0151896 +2025-03-12 03:19:11,491 Epoch 1637/2000 +2025-03-12 03:19:37,317 Current Learning Rate: 0.0009179037 +2025-03-12 03:19:37,317 Train Loss: 0.0153685, Val Loss: 0.0153261 +2025-03-12 03:19:37,318 Epoch 1638/2000 +2025-03-12 03:20:02,395 Current Learning Rate: 0.0009135403 +2025-03-12 03:20:02,396 Train Loss: 0.0153274, Val Loss: 0.0148520 +2025-03-12 03:20:02,396 Epoch 1639/2000 +2025-03-12 03:20:27,583 Current Learning Rate: 0.0009090749 +2025-03-12 03:20:27,584 Train Loss: 0.0152980, Val Loss: 0.0149233 +2025-03-12 03:20:27,584 Epoch 1640/2000 +2025-03-12 03:20:52,338 Current Learning Rate: 0.0009045085 +2025-03-12 03:20:52,339 Train Loss: 0.0153142, Val Loss: 0.0148419 +2025-03-12 03:20:52,339 Epoch 1641/2000 +2025-03-12 03:21:17,074 Current Learning Rate: 0.0008998423 +2025-03-12 03:21:17,074 Train Loss: 0.0153215, Val Loss: 0.0149588 +2025-03-12 03:21:17,074 Epoch 1642/2000 +2025-03-12 03:21:42,129 Current Learning Rate: 0.0008950775 +2025-03-12 03:21:42,129 Train Loss: 0.0153205, Val Loss: 0.0148300 +2025-03-12 03:21:42,129 Epoch 1643/2000 +2025-03-12 03:22:06,768 Current Learning Rate: 0.0008902152 +2025-03-12 03:22:06,771 Train Loss: 0.0153396, Val Loss: 0.0148160 +2025-03-12 03:22:06,772 Epoch 1644/2000 +2025-03-12 03:22:31,741 Current Learning Rate: 0.0008852566 +2025-03-12 03:22:31,741 Train Loss: 0.0152957, Val Loss: 0.0148216 +2025-03-12 03:22:31,742 Epoch 1645/2000 +2025-03-12 03:22:56,129 Current Learning Rate: 0.0008802030 +2025-03-12 03:22:56,129 Train Loss: 0.0153341, Val Loss: 0.0148621 +2025-03-12 03:22:56,129 Epoch 1646/2000 +2025-03-12 03:23:20,995 Current Learning Rate: 0.0008750555 +2025-03-12 03:23:20,995 Train Loss: 0.0153048, Val Loss: 0.0148319 +2025-03-12 03:23:20,996 Epoch 1647/2000 +2025-03-12 03:23:46,001 Current Learning Rate: 0.0008698155 +2025-03-12 03:23:46,002 Train Loss: 0.0153200, Val Loss: 0.0151336 +2025-03-12 03:23:46,002 Epoch 1648/2000 +2025-03-12 03:24:11,023 Current Learning Rate: 0.0008644843 +2025-03-12 03:24:11,023 Train Loss: 0.0153036, Val Loss: 0.0148234 +2025-03-12 03:24:11,024 Epoch 1649/2000 +2025-03-12 03:24:36,427 Current Learning Rate: 0.0008590631 +2025-03-12 03:24:36,427 Train Loss: 0.0152998, Val Loss: 0.0149525 +2025-03-12 03:24:36,428 Epoch 1650/2000 +2025-03-12 03:25:01,481 Current Learning Rate: 0.0008535534 +2025-03-12 03:25:01,482 Train Loss: 0.0153255, Val Loss: 0.0149341 +2025-03-12 03:25:01,483 Epoch 1651/2000 +2025-03-12 03:25:26,892 Current Learning Rate: 0.0008479564 +2025-03-12 03:25:26,896 Train Loss: 0.0152836, Val Loss: 0.0148104 +2025-03-12 03:25:26,896 Epoch 1652/2000 +2025-03-12 03:25:51,528 Current Learning Rate: 0.0008422736 +2025-03-12 03:25:51,529 Train Loss: 0.0153025, Val Loss: 0.0149004 +2025-03-12 03:25:51,529 Epoch 1653/2000 +2025-03-12 03:26:16,781 Current Learning Rate: 0.0008365063 +2025-03-12 03:26:16,782 Train Loss: 0.0153102, Val Loss: 0.0148616 +2025-03-12 03:26:16,782 Epoch 1654/2000 +2025-03-12 03:26:41,576 Current Learning Rate: 0.0008306559 +2025-03-12 03:26:41,576 Train Loss: 0.0152864, Val Loss: 0.0150085 +2025-03-12 03:26:41,576 Epoch 1655/2000 +2025-03-12 03:27:06,417 Current Learning Rate: 0.0008247240 +2025-03-12 03:27:06,418 Train Loss: 0.0153105, Val Loss: 0.0148336 +2025-03-12 03:27:06,418 Epoch 1656/2000 +2025-03-12 03:27:31,401 Current Learning Rate: 0.0008187120 +2025-03-12 03:27:31,402 Train Loss: 0.0153047, Val Loss: 0.0148295 +2025-03-12 03:27:31,402 Epoch 1657/2000 +2025-03-12 03:27:56,855 Current Learning Rate: 0.0008126213 +2025-03-12 03:27:56,855 Train Loss: 0.0152819, Val Loss: 0.0148157 +2025-03-12 03:27:56,856 Epoch 1658/2000 +2025-03-12 03:28:21,983 Current Learning Rate: 0.0008064535 +2025-03-12 03:28:21,984 Train Loss: 0.0152887, Val Loss: 0.0151408 +2025-03-12 03:28:21,984 Epoch 1659/2000 +2025-03-12 03:28:47,043 Current Learning Rate: 0.0008002101 +2025-03-12 03:28:47,044 Train Loss: 0.0152998, Val Loss: 0.0148424 +2025-03-12 03:28:47,044 Epoch 1660/2000 +2025-03-12 03:29:12,434 Current Learning Rate: 0.0007938926 +2025-03-12 03:29:12,435 Train Loss: 0.0152829, Val Loss: 0.0148110 +2025-03-12 03:29:12,435 Epoch 1661/2000 +2025-03-12 03:29:36,937 Current Learning Rate: 0.0007875026 +2025-03-12 03:29:36,941 Train Loss: 0.0152955, Val Loss: 0.0148093 +2025-03-12 03:29:36,941 Epoch 1662/2000 +2025-03-12 03:30:02,377 Current Learning Rate: 0.0007810417 +2025-03-12 03:30:02,378 Train Loss: 0.0152794, Val Loss: 0.0148751 +2025-03-12 03:30:02,378 Epoch 1663/2000 +2025-03-12 03:30:28,317 Current Learning Rate: 0.0007745114 +2025-03-12 03:30:28,318 Train Loss: 0.0153004, Val Loss: 0.0148439 +2025-03-12 03:30:28,318 Epoch 1664/2000 +2025-03-12 03:30:53,293 Current Learning Rate: 0.0007679134 +2025-03-12 03:30:53,294 Train Loss: 0.0152831, Val Loss: 0.0148465 +2025-03-12 03:30:53,294 Epoch 1665/2000 +2025-03-12 03:31:18,403 Current Learning Rate: 0.0007612493 +2025-03-12 03:31:18,403 Train Loss: 0.0152774, Val Loss: 0.0148954 +2025-03-12 03:31:18,404 Epoch 1666/2000 +2025-03-12 03:31:43,702 Current Learning Rate: 0.0007545207 +2025-03-12 03:31:43,703 Train Loss: 0.0153056, Val Loss: 0.0148413 +2025-03-12 03:31:43,703 Epoch 1667/2000 +2025-03-12 03:32:08,763 Current Learning Rate: 0.0007477293 +2025-03-12 03:32:08,763 Train Loss: 0.0152691, Val Loss: 0.0148182 +2025-03-12 03:32:08,764 Epoch 1668/2000 +2025-03-12 03:32:34,044 Current Learning Rate: 0.0007408768 +2025-03-12 03:32:34,045 Train Loss: 0.0152661, Val Loss: 0.0149234 +2025-03-12 03:32:34,045 Epoch 1669/2000 +2025-03-12 03:32:59,234 Current Learning Rate: 0.0007339649 +2025-03-12 03:32:59,235 Train Loss: 0.0152931, Val Loss: 0.0148139 +2025-03-12 03:32:59,235 Epoch 1670/2000 +2025-03-12 03:33:24,624 Current Learning Rate: 0.0007269952 +2025-03-12 03:33:24,625 Train Loss: 0.0152648, Val Loss: 0.0149553 +2025-03-12 03:33:24,625 Epoch 1671/2000 +2025-03-12 03:33:49,931 Current Learning Rate: 0.0007199696 +2025-03-12 03:33:49,932 Train Loss: 0.0152881, Val Loss: 0.0148124 +2025-03-12 03:33:49,932 Epoch 1672/2000 +2025-03-12 03:34:14,791 Current Learning Rate: 0.0007128896 +2025-03-12 03:34:14,791 Train Loss: 0.0152763, Val Loss: 0.0148601 +2025-03-12 03:34:14,791 Epoch 1673/2000 +2025-03-12 03:34:40,208 Current Learning Rate: 0.0007057572 +2025-03-12 03:34:40,208 Train Loss: 0.0152596, Val Loss: 0.0148315 +2025-03-12 03:34:40,208 Epoch 1674/2000 +2025-03-12 03:35:05,296 Current Learning Rate: 0.0006985739 +2025-03-12 03:35:05,297 Train Loss: 0.0152901, Val Loss: 0.0148355 +2025-03-12 03:35:05,297 Epoch 1675/2000 +2025-03-12 03:35:30,695 Current Learning Rate: 0.0006913417 +2025-03-12 03:35:30,695 Train Loss: 0.0152569, Val Loss: 0.0149217 +2025-03-12 03:35:30,696 Epoch 1676/2000 +2025-03-12 03:35:55,943 Current Learning Rate: 0.0006840623 +2025-03-12 03:35:55,947 Train Loss: 0.0152726, Val Loss: 0.0147995 +2025-03-12 03:35:55,947 Epoch 1677/2000 +2025-03-12 03:36:20,831 Current Learning Rate: 0.0006767374 +2025-03-12 03:36:20,831 Train Loss: 0.0152668, Val Loss: 0.0148636 +2025-03-12 03:36:20,832 Epoch 1678/2000 +2025-03-12 03:36:46,740 Current Learning Rate: 0.0006693690 +2025-03-12 03:36:46,741 Train Loss: 0.0152611, Val Loss: 0.0148523 +2025-03-12 03:36:46,741 Epoch 1679/2000 +2025-03-12 03:37:12,756 Current Learning Rate: 0.0006619587 +2025-03-12 03:37:12,757 Train Loss: 0.0152664, Val Loss: 0.0148121 +2025-03-12 03:37:12,757 Epoch 1680/2000 +2025-03-12 03:37:37,764 Current Learning Rate: 0.0006545085 +2025-03-12 03:37:37,764 Train Loss: 0.0152620, Val Loss: 0.0149155 +2025-03-12 03:37:37,765 Epoch 1681/2000 +2025-03-12 03:38:03,117 Current Learning Rate: 0.0006470202 +2025-03-12 03:38:03,118 Train Loss: 0.0152714, Val Loss: 0.0148520 +2025-03-12 03:38:03,118 Epoch 1682/2000 +2025-03-12 03:38:28,542 Current Learning Rate: 0.0006394956 +2025-03-12 03:38:28,542 Train Loss: 0.0152468, Val Loss: 0.0148033 +2025-03-12 03:38:28,542 Epoch 1683/2000 +2025-03-12 03:38:53,221 Current Learning Rate: 0.0006319365 +2025-03-12 03:38:53,222 Train Loss: 0.0152527, Val Loss: 0.0148799 +2025-03-12 03:38:53,222 Epoch 1684/2000 +2025-03-12 03:39:18,027 Current Learning Rate: 0.0006243449 +2025-03-12 03:39:18,032 Train Loss: 0.0152782, Val Loss: 0.0147972 +2025-03-12 03:39:18,033 Epoch 1685/2000 +2025-03-12 03:39:43,515 Current Learning Rate: 0.0006167227 +2025-03-12 03:39:43,516 Train Loss: 0.0152495, Val Loss: 0.0148189 +2025-03-12 03:39:43,516 Epoch 1686/2000 +2025-03-12 03:40:08,506 Current Learning Rate: 0.0006090716 +2025-03-12 03:40:08,507 Train Loss: 0.0152717, Val Loss: 0.0149218 +2025-03-12 03:40:08,507 Epoch 1687/2000 +2025-03-12 03:40:33,970 Current Learning Rate: 0.0006013936 +2025-03-12 03:40:33,971 Train Loss: 0.0152625, Val Loss: 0.0148046 +2025-03-12 03:40:33,972 Epoch 1688/2000 +2025-03-12 03:40:58,964 Current Learning Rate: 0.0005936907 +2025-03-12 03:40:58,965 Train Loss: 0.0152377, Val Loss: 0.0148327 +2025-03-12 03:40:58,965 Epoch 1689/2000 +2025-03-12 03:41:24,081 Current Learning Rate: 0.0005859646 +2025-03-12 03:41:24,081 Train Loss: 0.0152448, Val Loss: 0.0148414 +2025-03-12 03:41:24,082 Epoch 1690/2000 +2025-03-12 03:41:48,885 Current Learning Rate: 0.0005782172 +2025-03-12 03:41:48,886 Train Loss: 0.0152505, Val Loss: 0.0147996 +2025-03-12 03:41:48,886 Epoch 1691/2000 +2025-03-12 03:42:13,783 Current Learning Rate: 0.0005704506 +2025-03-12 03:42:13,787 Train Loss: 0.0152522, Val Loss: 0.0147924 +2025-03-12 03:42:13,788 Epoch 1692/2000 +2025-03-12 03:42:39,275 Current Learning Rate: 0.0005626666 +2025-03-12 03:42:39,276 Train Loss: 0.0152418, Val Loss: 0.0148126 +2025-03-12 03:42:39,276 Epoch 1693/2000 +2025-03-12 03:43:04,682 Current Learning Rate: 0.0005548672 +2025-03-12 03:43:04,682 Train Loss: 0.0152476, Val Loss: 0.0148172 +2025-03-12 03:43:04,683 Epoch 1694/2000 +2025-03-12 03:43:30,375 Current Learning Rate: 0.0005470542 +2025-03-12 03:43:30,376 Train Loss: 0.0152424, Val Loss: 0.0148805 +2025-03-12 03:43:30,376 Epoch 1695/2000 +2025-03-12 03:43:55,193 Current Learning Rate: 0.0005392295 +2025-03-12 03:43:55,193 Train Loss: 0.0152474, Val Loss: 0.0148037 +2025-03-12 03:43:55,194 Epoch 1696/2000 +2025-03-12 03:44:20,276 Current Learning Rate: 0.0005313953 +2025-03-12 03:44:20,276 Train Loss: 0.0152423, Val Loss: 0.0147991 +2025-03-12 03:44:20,277 Epoch 1697/2000 +2025-03-12 03:44:45,171 Current Learning Rate: 0.0005235532 +2025-03-12 03:44:45,172 Train Loss: 0.0152373, Val Loss: 0.0148435 +2025-03-12 03:44:45,172 Epoch 1698/2000 +2025-03-12 03:45:10,425 Current Learning Rate: 0.0005157054 +2025-03-12 03:45:10,425 Train Loss: 0.0152418, Val Loss: 0.0148344 +2025-03-12 03:45:10,425 Epoch 1699/2000 +2025-03-12 03:45:36,240 Current Learning Rate: 0.0005078537 +2025-03-12 03:45:36,242 Train Loss: 0.0152475, Val Loss: 0.0147924 +2025-03-12 03:45:36,242 Epoch 1700/2000 +2025-03-12 03:46:01,332 Current Learning Rate: 0.0005000000 +2025-03-12 03:46:01,336 Train Loss: 0.0152325, Val Loss: 0.0147849 +2025-03-12 03:46:01,336 Epoch 1701/2000 +2025-03-12 03:46:26,763 Current Learning Rate: 0.0004921463 +2025-03-12 03:46:26,764 Train Loss: 0.0152385, Val Loss: 0.0148007 +2025-03-12 03:46:26,764 Epoch 1702/2000 +2025-03-12 03:46:52,751 Current Learning Rate: 0.0004842946 +2025-03-12 03:46:52,752 Train Loss: 0.0152344, Val Loss: 0.0148115 +2025-03-12 03:46:52,752 Epoch 1703/2000 +2025-03-12 03:47:17,757 Current Learning Rate: 0.0004764468 +2025-03-12 03:47:17,757 Train Loss: 0.0152416, Val Loss: 0.0149557 +2025-03-12 03:47:17,758 Epoch 1704/2000 +2025-03-12 03:47:42,807 Current Learning Rate: 0.0004686047 +2025-03-12 03:47:42,808 Train Loss: 0.0152405, Val Loss: 0.0147956 +2025-03-12 03:47:42,808 Epoch 1705/2000 +2025-03-12 03:48:08,583 Current Learning Rate: 0.0004607705 +2025-03-12 03:48:08,583 Train Loss: 0.0152286, Val Loss: 0.0147933 +2025-03-12 03:48:08,584 Epoch 1706/2000 +2025-03-12 03:48:33,524 Current Learning Rate: 0.0004529458 +2025-03-12 03:48:33,524 Train Loss: 0.0152285, Val Loss: 0.0147950 +2025-03-12 03:48:33,525 Epoch 1707/2000 +2025-03-12 03:48:58,901 Current Learning Rate: 0.0004451328 +2025-03-12 03:48:58,905 Train Loss: 0.0152303, Val Loss: 0.0147810 +2025-03-12 03:48:58,906 Epoch 1708/2000 +2025-03-12 03:49:23,751 Current Learning Rate: 0.0004373334 +2025-03-12 03:49:23,751 Train Loss: 0.0152300, Val Loss: 0.0148093 +2025-03-12 03:49:23,751 Epoch 1709/2000 +2025-03-12 03:49:49,237 Current Learning Rate: 0.0004295494 +2025-03-12 03:49:49,237 Train Loss: 0.0152330, Val Loss: 0.0148858 +2025-03-12 03:49:49,237 Epoch 1710/2000 +2025-03-12 03:50:14,581 Current Learning Rate: 0.0004217828 +2025-03-12 03:50:14,581 Train Loss: 0.0152350, Val Loss: 0.0147960 +2025-03-12 03:50:14,581 Epoch 1711/2000 +2025-03-12 03:50:39,671 Current Learning Rate: 0.0004140354 +2025-03-12 03:50:39,671 Train Loss: 0.0152245, Val Loss: 0.0147911 +2025-03-12 03:50:39,672 Epoch 1712/2000 +2025-03-12 03:51:05,050 Current Learning Rate: 0.0004063093 +2025-03-12 03:51:05,051 Train Loss: 0.0152250, Val Loss: 0.0148012 +2025-03-12 03:51:05,051 Epoch 1713/2000 +2025-03-12 03:51:30,175 Current Learning Rate: 0.0003986064 +2025-03-12 03:51:30,176 Train Loss: 0.0152257, Val Loss: 0.0147896 +2025-03-12 03:51:30,176 Epoch 1714/2000 +2025-03-12 03:51:55,476 Current Learning Rate: 0.0003909284 +2025-03-12 03:51:55,476 Train Loss: 0.0152237, Val Loss: 0.0147974 +2025-03-12 03:51:55,476 Epoch 1715/2000 +2025-03-12 03:52:20,769 Current Learning Rate: 0.0003832773 +2025-03-12 03:52:20,770 Train Loss: 0.0152242, Val Loss: 0.0147848 +2025-03-12 03:52:20,770 Epoch 1716/2000 +2025-03-12 03:52:46,463 Current Learning Rate: 0.0003756551 +2025-03-12 03:52:46,464 Train Loss: 0.0152217, Val Loss: 0.0147903 +2025-03-12 03:52:46,464 Epoch 1717/2000 +2025-03-12 03:53:12,263 Current Learning Rate: 0.0003680635 +2025-03-12 03:53:12,263 Train Loss: 0.0152221, Val Loss: 0.0147832 +2025-03-12 03:53:12,263 Epoch 1718/2000 +2025-03-12 03:53:38,137 Current Learning Rate: 0.0003605044 +2025-03-12 03:53:38,137 Train Loss: 0.0152204, Val Loss: 0.0147839 +2025-03-12 03:53:38,138 Epoch 1719/2000 +2025-03-12 03:54:03,777 Current Learning Rate: 0.0003529798 +2025-03-12 03:54:03,778 Train Loss: 0.0152202, Val Loss: 0.0147812 +2025-03-12 03:54:03,778 Epoch 1720/2000 +2025-03-12 03:54:29,665 Current Learning Rate: 0.0003454915 +2025-03-12 03:54:29,668 Train Loss: 0.0152193, Val Loss: 0.0147799 +2025-03-12 03:54:29,669 Epoch 1721/2000 +2025-03-12 03:54:55,937 Current Learning Rate: 0.0003380413 +2025-03-12 03:54:55,941 Train Loss: 0.0152187, Val Loss: 0.0147787 +2025-03-12 03:54:55,941 Epoch 1722/2000 +2025-03-12 03:55:21,798 Current Learning Rate: 0.0003306310 +2025-03-12 03:55:21,802 Train Loss: 0.0152181, Val Loss: 0.0147781 +2025-03-12 03:55:21,802 Epoch 1723/2000 +2025-03-12 03:55:47,196 Current Learning Rate: 0.0003232626 +2025-03-12 03:55:47,197 Train Loss: 0.0152176, Val Loss: 0.0147790 +2025-03-12 03:55:47,197 Epoch 1724/2000 +2025-03-12 03:56:12,686 Current Learning Rate: 0.0003159377 +2025-03-12 03:56:12,687 Train Loss: 0.0152174, Val Loss: 0.0147837 +2025-03-12 03:56:12,687 Epoch 1725/2000 +2025-03-12 03:56:38,087 Current Learning Rate: 0.0003086583 +2025-03-12 03:56:38,088 Train Loss: 0.0152169, Val Loss: 0.0147884 +2025-03-12 03:56:38,089 Epoch 1726/2000 +2025-03-12 03:57:03,045 Current Learning Rate: 0.0003014261 +2025-03-12 03:57:03,045 Train Loss: 0.0152156, Val Loss: 0.0147861 +2025-03-12 03:57:03,046 Epoch 1727/2000 +2025-03-12 03:57:28,637 Current Learning Rate: 0.0002942428 +2025-03-12 03:57:28,637 Train Loss: 0.0152145, Val Loss: 0.0147842 +2025-03-12 03:57:28,638 Epoch 1728/2000 +2025-03-12 03:57:54,426 Current Learning Rate: 0.0002871104 +2025-03-12 03:57:54,426 Train Loss: 0.0152138, Val Loss: 0.0147824 +2025-03-12 03:57:54,427 Epoch 1729/2000 +2025-03-12 03:58:19,667 Current Learning Rate: 0.0002800304 +2025-03-12 03:58:19,667 Train Loss: 0.0152131, Val Loss: 0.0147804 +2025-03-12 03:58:19,667 Epoch 1730/2000 +2025-03-12 03:58:45,277 Current Learning Rate: 0.0002730048 +2025-03-12 03:58:45,278 Train Loss: 0.0152124, Val Loss: 0.0147782 +2025-03-12 03:58:45,278 Epoch 1731/2000 +2025-03-12 03:59:10,769 Current Learning Rate: 0.0002660351 +2025-03-12 03:59:10,774 Train Loss: 0.0152118, Val Loss: 0.0147760 +2025-03-12 03:59:10,774 Epoch 1732/2000 +2025-03-12 03:59:36,455 Current Learning Rate: 0.0002591232 +2025-03-12 03:59:36,459 Train Loss: 0.0152113, Val Loss: 0.0147738 +2025-03-12 03:59:36,459 Epoch 1733/2000 +2025-03-12 04:00:01,778 Current Learning Rate: 0.0002522707 +2025-03-12 04:00:01,783 Train Loss: 0.0152109, Val Loss: 0.0147722 +2025-03-12 04:00:01,783 Epoch 1734/2000 +2025-03-12 04:00:27,435 Current Learning Rate: 0.0002454793 +2025-03-12 04:00:27,439 Train Loss: 0.0152108, Val Loss: 0.0147707 +2025-03-12 04:00:27,440 Epoch 1735/2000 +2025-03-12 04:00:53,223 Current Learning Rate: 0.0002387507 +2025-03-12 04:00:53,228 Train Loss: 0.0152109, Val Loss: 0.0147702 +2025-03-12 04:00:53,228 Epoch 1736/2000 +2025-03-12 04:01:18,164 Current Learning Rate: 0.0002320866 +2025-03-12 04:01:18,165 Train Loss: 0.0152107, Val Loss: 0.0147728 +2025-03-12 04:01:18,165 Epoch 1737/2000 +2025-03-12 04:01:43,444 Current Learning Rate: 0.0002254886 +2025-03-12 04:01:43,444 Train Loss: 0.0152094, Val Loss: 0.0147712 +2025-03-12 04:01:43,444 Epoch 1738/2000 +2025-03-12 04:02:09,496 Current Learning Rate: 0.0002189583 +2025-03-12 04:02:09,499 Train Loss: 0.0152087, Val Loss: 0.0147697 +2025-03-12 04:02:09,500 Epoch 1739/2000 +2025-03-12 04:02:35,276 Current Learning Rate: 0.0002124974 +2025-03-12 04:02:35,277 Train Loss: 0.0152081, Val Loss: 0.0147698 +2025-03-12 04:02:35,277 Epoch 1740/2000 +2025-03-12 04:03:00,863 Current Learning Rate: 0.0002061074 +2025-03-12 04:03:00,863 Train Loss: 0.0152079, Val Loss: 0.0147722 +2025-03-12 04:03:00,863 Epoch 1741/2000 +2025-03-12 04:03:26,315 Current Learning Rate: 0.0001997899 +2025-03-12 04:03:26,316 Train Loss: 0.0152077, Val Loss: 0.0147704 +2025-03-12 04:03:26,316 Epoch 1742/2000 +2025-03-12 04:03:50,887 Current Learning Rate: 0.0001935465 +2025-03-12 04:03:50,890 Train Loss: 0.0152070, Val Loss: 0.0147681 +2025-03-12 04:03:50,891 Epoch 1743/2000 +2025-03-12 04:04:16,268 Current Learning Rate: 0.0001873787 +2025-03-12 04:04:16,271 Train Loss: 0.0152066, Val Loss: 0.0147672 +2025-03-12 04:04:16,271 Epoch 1744/2000 +2025-03-12 04:04:41,708 Current Learning Rate: 0.0001812880 +2025-03-12 04:04:41,711 Train Loss: 0.0152061, Val Loss: 0.0147665 +2025-03-12 04:04:41,711 Epoch 1745/2000 +2025-03-12 04:05:06,962 Current Learning Rate: 0.0001752760 +2025-03-12 04:05:06,965 Train Loss: 0.0152055, Val Loss: 0.0147660 +2025-03-12 04:05:06,965 Epoch 1746/2000 +2025-03-12 04:05:32,319 Current Learning Rate: 0.0001693441 +2025-03-12 04:05:32,324 Train Loss: 0.0152050, Val Loss: 0.0147656 +2025-03-12 04:05:32,324 Epoch 1747/2000 +2025-03-12 04:05:57,320 Current Learning Rate: 0.0001634937 +2025-03-12 04:05:57,323 Train Loss: 0.0152046, Val Loss: 0.0147652 +2025-03-12 04:05:57,324 Epoch 1748/2000 +2025-03-12 04:06:22,918 Current Learning Rate: 0.0001577264 +2025-03-12 04:06:22,921 Train Loss: 0.0152042, Val Loss: 0.0147646 +2025-03-12 04:06:22,921 Epoch 1749/2000 +2025-03-12 04:06:47,792 Current Learning Rate: 0.0001520436 +2025-03-12 04:06:47,797 Train Loss: 0.0152039, Val Loss: 0.0147641 +2025-03-12 04:06:47,798 Epoch 1750/2000 +2025-03-12 04:07:12,920 Current Learning Rate: 0.0001464466 +2025-03-12 04:07:12,924 Train Loss: 0.0152037, Val Loss: 0.0147639 +2025-03-12 04:07:12,924 Epoch 1751/2000 +2025-03-12 04:07:38,790 Current Learning Rate: 0.0001409369 +2025-03-12 04:07:38,795 Train Loss: 0.0152036, Val Loss: 0.0147634 +2025-03-12 04:07:38,795 Epoch 1752/2000 +2025-03-12 04:08:03,695 Current Learning Rate: 0.0001355157 +2025-03-12 04:08:03,698 Train Loss: 0.0152031, Val Loss: 0.0147633 +2025-03-12 04:08:03,699 Epoch 1753/2000 +2025-03-12 04:08:28,441 Current Learning Rate: 0.0001301845 +2025-03-12 04:08:28,444 Train Loss: 0.0152027, Val Loss: 0.0147629 +2025-03-12 04:08:28,444 Epoch 1754/2000 +2025-03-12 04:08:53,777 Current Learning Rate: 0.0001249445 +2025-03-12 04:08:53,780 Train Loss: 0.0152024, Val Loss: 0.0147628 +2025-03-12 04:08:53,780 Epoch 1755/2000 +2025-03-12 04:09:18,969 Current Learning Rate: 0.0001197970 +2025-03-12 04:09:18,970 Train Loss: 0.0152023, Val Loss: 0.0147634 +2025-03-12 04:09:18,970 Epoch 1756/2000 +2025-03-12 04:09:44,211 Current Learning Rate: 0.0001147434 +2025-03-12 04:09:44,211 Train Loss: 0.0152021, Val Loss: 0.0147650 +2025-03-12 04:09:44,211 Epoch 1757/2000 +2025-03-12 04:10:09,526 Current Learning Rate: 0.0001097848 +2025-03-12 04:10:09,527 Train Loss: 0.0152016, Val Loss: 0.0147631 +2025-03-12 04:10:09,527 Epoch 1758/2000 +2025-03-12 04:10:35,388 Current Learning Rate: 0.0001049225 +2025-03-12 04:10:35,392 Train Loss: 0.0152009, Val Loss: 0.0147617 +2025-03-12 04:10:35,393 Epoch 1759/2000 +2025-03-12 04:11:00,453 Current Learning Rate: 0.0001001577 +2025-03-12 04:11:00,456 Train Loss: 0.0152005, Val Loss: 0.0147611 +2025-03-12 04:11:00,456 Epoch 1760/2000 +2025-03-12 04:11:25,399 Current Learning Rate: 0.0000954915 +2025-03-12 04:11:25,403 Train Loss: 0.0152002, Val Loss: 0.0147608 +2025-03-12 04:11:25,403 Epoch 1761/2000 +2025-03-12 04:11:50,563 Current Learning Rate: 0.0000909251 +2025-03-12 04:11:50,567 Train Loss: 0.0151999, Val Loss: 0.0147607 +2025-03-12 04:11:50,567 Epoch 1762/2000 +2025-03-12 04:12:15,645 Current Learning Rate: 0.0000864597 +2025-03-12 04:12:15,648 Train Loss: 0.0151996, Val Loss: 0.0147605 +2025-03-12 04:12:15,649 Epoch 1763/2000 +2025-03-12 04:12:40,398 Current Learning Rate: 0.0000820963 +2025-03-12 04:12:40,398 Train Loss: 0.0151994, Val Loss: 0.0147606 +2025-03-12 04:12:40,401 Epoch 1764/2000 +2025-03-12 04:13:05,286 Current Learning Rate: 0.0000778360 +2025-03-12 04:13:05,290 Train Loss: 0.0151992, Val Loss: 0.0147604 +2025-03-12 04:13:05,290 Epoch 1765/2000 +2025-03-12 04:13:29,802 Current Learning Rate: 0.0000736799 +2025-03-12 04:13:29,807 Train Loss: 0.0151988, Val Loss: 0.0147601 +2025-03-12 04:13:29,807 Epoch 1766/2000 +2025-03-12 04:13:55,490 Current Learning Rate: 0.0000696290 +2025-03-12 04:13:55,494 Train Loss: 0.0151986, Val Loss: 0.0147601 +2025-03-12 04:13:55,494 Epoch 1767/2000 +2025-03-12 04:14:20,459 Current Learning Rate: 0.0000656842 +2025-03-12 04:14:20,461 Train Loss: 0.0151984, Val Loss: 0.0147599 +2025-03-12 04:14:20,462 Epoch 1768/2000 +2025-03-12 04:14:45,773 Current Learning Rate: 0.0000618467 +2025-03-12 04:14:45,776 Train Loss: 0.0151982, Val Loss: 0.0147595 +2025-03-12 04:14:45,777 Epoch 1769/2000 +2025-03-12 04:15:11,172 Current Learning Rate: 0.0000581172 +2025-03-12 04:15:11,175 Train Loss: 0.0151980, Val Loss: 0.0147589 +2025-03-12 04:15:11,175 Epoch 1770/2000 +2025-03-12 04:15:36,347 Current Learning Rate: 0.0000544967 +2025-03-12 04:15:36,350 Train Loss: 0.0151978, Val Loss: 0.0147587 +2025-03-12 04:15:36,351 Epoch 1771/2000 +2025-03-12 04:16:01,423 Current Learning Rate: 0.0000509862 +2025-03-12 04:16:01,427 Train Loss: 0.0151975, Val Loss: 0.0147586 +2025-03-12 04:16:01,427 Epoch 1772/2000 +2025-03-12 04:16:26,736 Current Learning Rate: 0.0000475865 +2025-03-12 04:16:26,740 Train Loss: 0.0151973, Val Loss: 0.0147584 +2025-03-12 04:16:26,740 Epoch 1773/2000 +2025-03-12 04:16:51,264 Current Learning Rate: 0.0000442984 +2025-03-12 04:16:51,270 Train Loss: 0.0151971, Val Loss: 0.0147583 +2025-03-12 04:16:51,271 Epoch 1774/2000 +2025-03-12 04:17:16,360 Current Learning Rate: 0.0000411227 +2025-03-12 04:17:16,364 Train Loss: 0.0151969, Val Loss: 0.0147581 +2025-03-12 04:17:16,364 Epoch 1775/2000 +2025-03-12 04:17:41,832 Current Learning Rate: 0.0000380602 +2025-03-12 04:17:41,841 Train Loss: 0.0151967, Val Loss: 0.0147581 +2025-03-12 04:17:41,841 Epoch 1776/2000 +2025-03-12 04:18:06,600 Current Learning Rate: 0.0000351118 +2025-03-12 04:18:06,600 Train Loss: 0.0151965, Val Loss: 0.0147581 +2025-03-12 04:18:06,601 Epoch 1777/2000 +2025-03-12 04:18:31,514 Current Learning Rate: 0.0000322780 +2025-03-12 04:18:31,515 Train Loss: 0.0151964, Val Loss: 0.0147581 +2025-03-12 04:18:31,515 Epoch 1778/2000 +2025-03-12 04:18:56,874 Current Learning Rate: 0.0000295596 +2025-03-12 04:18:56,877 Train Loss: 0.0151963, Val Loss: 0.0147580 +2025-03-12 04:18:56,877 Epoch 1779/2000 +2025-03-12 04:19:22,334 Current Learning Rate: 0.0000269573 +2025-03-12 04:19:22,338 Train Loss: 0.0151961, Val Loss: 0.0147578 +2025-03-12 04:19:22,338 Epoch 1780/2000 +2025-03-12 04:19:48,352 Current Learning Rate: 0.0000244717 +2025-03-12 04:19:48,356 Train Loss: 0.0151959, Val Loss: 0.0147576 +2025-03-12 04:19:48,356 Epoch 1781/2000 +2025-03-12 04:20:14,109 Current Learning Rate: 0.0000221035 +2025-03-12 04:20:14,113 Train Loss: 0.0151958, Val Loss: 0.0147575 +2025-03-12 04:20:14,113 Epoch 1782/2000 +2025-03-12 04:20:39,398 Current Learning Rate: 0.0000198532 +2025-03-12 04:20:39,398 Train Loss: 0.0151957, Val Loss: 0.0147576 +2025-03-12 04:20:39,399 Epoch 1783/2000 +2025-03-12 04:21:05,348 Current Learning Rate: 0.0000177213 +2025-03-12 04:21:05,352 Train Loss: 0.0151956, Val Loss: 0.0147575 +2025-03-12 04:21:05,353 Epoch 1784/2000 +2025-03-12 04:21:30,944 Current Learning Rate: 0.0000157084 +2025-03-12 04:21:30,947 Train Loss: 0.0151955, Val Loss: 0.0147574 +2025-03-12 04:21:30,948 Epoch 1785/2000 +2025-03-12 04:21:56,625 Current Learning Rate: 0.0000138150 +2025-03-12 04:21:56,629 Train Loss: 0.0151954, Val Loss: 0.0147572 +2025-03-12 04:21:56,630 Epoch 1786/2000 +2025-03-12 04:22:21,949 Current Learning Rate: 0.0000120416 +2025-03-12 04:22:21,953 Train Loss: 0.0151953, Val Loss: 0.0147571 +2025-03-12 04:22:21,953 Epoch 1787/2000 +2025-03-12 04:22:47,176 Current Learning Rate: 0.0000103886 +2025-03-12 04:22:47,180 Train Loss: 0.0151952, Val Loss: 0.0147570 +2025-03-12 04:22:47,180 Epoch 1788/2000 +2025-03-12 04:23:12,366 Current Learning Rate: 0.0000088564 +2025-03-12 04:23:12,370 Train Loss: 0.0151951, Val Loss: 0.0147570 +2025-03-12 04:23:12,370 Epoch 1789/2000 +2025-03-12 04:23:37,542 Current Learning Rate: 0.0000074453 +2025-03-12 04:23:37,542 Train Loss: 0.0151951, Val Loss: 0.0147570 +2025-03-12 04:23:37,543 Epoch 1790/2000 +2025-03-12 04:24:03,265 Current Learning Rate: 0.0000061558 +2025-03-12 04:24:03,265 Train Loss: 0.0151950, Val Loss: 0.0147570 +2025-03-12 04:24:03,265 Epoch 1791/2000 +2025-03-12 04:24:28,446 Current Learning Rate: 0.0000049882 +2025-03-12 04:24:28,450 Train Loss: 0.0151950, Val Loss: 0.0147570 +2025-03-12 04:24:28,450 Epoch 1792/2000 +2025-03-12 04:24:54,270 Current Learning Rate: 0.0000039426 +2025-03-12 04:24:54,273 Train Loss: 0.0151949, Val Loss: 0.0147570 +2025-03-12 04:24:54,273 Epoch 1793/2000 +2025-03-12 04:25:19,477 Current Learning Rate: 0.0000030195 +2025-03-12 04:25:19,481 Train Loss: 0.0151949, Val Loss: 0.0147570 +2025-03-12 04:25:19,481 Epoch 1794/2000 +2025-03-12 04:25:44,626 Current Learning Rate: 0.0000022190 +2025-03-12 04:25:44,630 Train Loss: 0.0151948, Val Loss: 0.0147569 +2025-03-12 04:25:44,630 Epoch 1795/2000 +2025-03-12 04:26:10,549 Current Learning Rate: 0.0000015413 +2025-03-12 04:26:10,552 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:26:10,552 Epoch 1796/2000 +2025-03-12 04:26:35,891 Current Learning Rate: 0.0000009866 +2025-03-12 04:26:35,896 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:26:35,896 Epoch 1797/2000 +2025-03-12 04:27:01,337 Current Learning Rate: 0.0000005551 +2025-03-12 04:27:01,343 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:27:01,343 Epoch 1798/2000 +2025-03-12 04:27:26,758 Current Learning Rate: 0.0000002467 +2025-03-12 04:27:26,761 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:27:26,761 Epoch 1799/2000 +2025-03-12 04:27:51,571 Current Learning Rate: 0.0000000617 +2025-03-12 04:27:51,575 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:27:51,575 Epoch 1800/2000 +2025-03-12 04:28:16,948 Current Learning Rate: 0.0000000000 +2025-03-12 04:28:16,948 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:28:16,949 Epoch 1801/2000 +2025-03-12 04:28:42,661 Current Learning Rate: 0.0000000617 +2025-03-12 04:28:42,661 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:28:42,662 Epoch 1802/2000 +2025-03-12 04:29:08,057 Current Learning Rate: 0.0000002467 +2025-03-12 04:29:08,057 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:29:08,057 Epoch 1803/2000 +2025-03-12 04:29:33,380 Current Learning Rate: 0.0000005551 +2025-03-12 04:29:33,385 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:29:33,386 Epoch 1804/2000 +2025-03-12 04:29:59,282 Current Learning Rate: 0.0000009866 +2025-03-12 04:29:59,282 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:29:59,283 Epoch 1805/2000 +2025-03-12 04:30:24,512 Current Learning Rate: 0.0000015413 +2025-03-12 04:30:24,513 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:30:24,513 Epoch 1806/2000 +2025-03-12 04:30:49,906 Current Learning Rate: 0.0000022190 +2025-03-12 04:30:49,906 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:30:49,906 Epoch 1807/2000 +2025-03-12 04:31:15,354 Current Learning Rate: 0.0000030195 +2025-03-12 04:31:15,354 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:31:15,355 Epoch 1808/2000 +2025-03-12 04:31:41,265 Current Learning Rate: 0.0000039426 +2025-03-12 04:31:41,265 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:31:41,266 Epoch 1809/2000 +2025-03-12 04:32:06,213 Current Learning Rate: 0.0000049882 +2025-03-12 04:32:06,214 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:32:06,215 Epoch 1810/2000 +2025-03-12 04:32:31,182 Current Learning Rate: 0.0000061558 +2025-03-12 04:32:31,182 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:32:31,183 Epoch 1811/2000 +2025-03-12 04:32:57,188 Current Learning Rate: 0.0000074453 +2025-03-12 04:32:57,192 Train Loss: 0.0151947, Val Loss: 0.0147567 +2025-03-12 04:32:57,192 Epoch 1812/2000 +2025-03-12 04:33:22,717 Current Learning Rate: 0.0000088564 +2025-03-12 04:33:22,720 Train Loss: 0.0151947, Val Loss: 0.0147567 +2025-03-12 04:33:22,721 Epoch 1813/2000 +2025-03-12 04:33:48,368 Current Learning Rate: 0.0000103886 +2025-03-12 04:33:48,369 Train Loss: 0.0151948, Val Loss: 0.0147568 +2025-03-12 04:33:48,369 Epoch 1814/2000 +2025-03-12 04:34:13,439 Current Learning Rate: 0.0000120416 +2025-03-12 04:34:13,439 Train Loss: 0.0151947, Val Loss: 0.0147567 +2025-03-12 04:34:13,440 Epoch 1815/2000 +2025-03-12 04:34:39,268 Current Learning Rate: 0.0000138150 +2025-03-12 04:34:39,272 Train Loss: 0.0151946, Val Loss: 0.0147563 +2025-03-12 04:34:39,272 Epoch 1816/2000 +2025-03-12 04:35:05,202 Current Learning Rate: 0.0000157084 +2025-03-12 04:35:05,202 Train Loss: 0.0151946, Val Loss: 0.0147563 +2025-03-12 04:35:05,203 Epoch 1817/2000 +2025-03-12 04:35:30,429 Current Learning Rate: 0.0000177213 +2025-03-12 04:35:30,434 Train Loss: 0.0151945, Val Loss: 0.0147563 +2025-03-12 04:35:30,434 Epoch 1818/2000 +2025-03-12 04:35:55,236 Current Learning Rate: 0.0000198532 +2025-03-12 04:35:55,248 Train Loss: 0.0151944, Val Loss: 0.0147561 +2025-03-12 04:35:55,249 Epoch 1819/2000 +2025-03-12 04:36:21,248 Current Learning Rate: 0.0000221035 +2025-03-12 04:36:21,248 Train Loss: 0.0151944, Val Loss: 0.0147564 +2025-03-12 04:36:21,248 Epoch 1820/2000 +2025-03-12 04:36:46,921 Current Learning Rate: 0.0000244717 +2025-03-12 04:36:46,925 Train Loss: 0.0151944, Val Loss: 0.0147560 +2025-03-12 04:36:46,925 Epoch 1821/2000 +2025-03-12 04:37:11,856 Current Learning Rate: 0.0000269573 +2025-03-12 04:37:11,857 Train Loss: 0.0151943, Val Loss: 0.0147561 +2025-03-12 04:37:11,857 Epoch 1822/2000 +2025-03-12 04:37:37,717 Current Learning Rate: 0.0000295596 +2025-03-12 04:37:37,725 Train Loss: 0.0151943, Val Loss: 0.0147560 +2025-03-12 04:37:37,725 Epoch 1823/2000 +2025-03-12 04:38:03,157 Current Learning Rate: 0.0000322780 +2025-03-12 04:38:03,161 Train Loss: 0.0151942, Val Loss: 0.0147553 +2025-03-12 04:38:03,161 Epoch 1824/2000 +2025-03-12 04:38:28,538 Current Learning Rate: 0.0000351118 +2025-03-12 04:38:28,543 Train Loss: 0.0151940, Val Loss: 0.0147548 +2025-03-12 04:38:28,543 Epoch 1825/2000 +2025-03-12 04:38:54,778 Current Learning Rate: 0.0000380602 +2025-03-12 04:38:54,797 Train Loss: 0.0151939, Val Loss: 0.0147545 +2025-03-12 04:38:54,798 Epoch 1826/2000 +2025-03-12 04:39:20,159 Current Learning Rate: 0.0000411227 +2025-03-12 04:39:20,162 Train Loss: 0.0151939, Val Loss: 0.0147543 +2025-03-12 04:39:20,163 Epoch 1827/2000 +2025-03-12 04:39:45,964 Current Learning Rate: 0.0000442984 +2025-03-12 04:39:45,965 Train Loss: 0.0151937, Val Loss: 0.0147544 +2025-03-12 04:39:45,965 Epoch 1828/2000 +2025-03-12 04:40:11,496 Current Learning Rate: 0.0000475865 +2025-03-12 04:40:11,496 Train Loss: 0.0151937, Val Loss: 0.0147548 +2025-03-12 04:40:11,497 Epoch 1829/2000 +2025-03-12 04:40:37,027 Current Learning Rate: 0.0000509862 +2025-03-12 04:40:37,028 Train Loss: 0.0151939, Val Loss: 0.0147545 +2025-03-12 04:40:37,028 Epoch 1830/2000 +2025-03-12 04:41:01,815 Current Learning Rate: 0.0000544967 +2025-03-12 04:41:01,816 Train Loss: 0.0151936, Val Loss: 0.0147547 +2025-03-12 04:41:01,816 Epoch 1831/2000 +2025-03-12 04:41:27,140 Current Learning Rate: 0.0000581172 +2025-03-12 04:41:27,141 Train Loss: 0.0151936, Val Loss: 0.0147548 +2025-03-12 04:41:27,141 Epoch 1832/2000 +2025-03-12 04:41:52,943 Current Learning Rate: 0.0000618467 +2025-03-12 04:41:52,943 Train Loss: 0.0151934, Val Loss: 0.0147551 +2025-03-12 04:41:52,944 Epoch 1833/2000 +2025-03-12 04:42:18,163 Current Learning Rate: 0.0000656842 +2025-03-12 04:42:18,164 Train Loss: 0.0151934, Val Loss: 0.0147554 +2025-03-12 04:42:18,164 Epoch 1834/2000 +2025-03-12 04:42:43,310 Current Learning Rate: 0.0000696290 +2025-03-12 04:42:43,311 Train Loss: 0.0151933, Val Loss: 0.0147557 +2025-03-12 04:42:43,311 Epoch 1835/2000 +2025-03-12 04:43:08,070 Current Learning Rate: 0.0000736799 +2025-03-12 04:43:08,070 Train Loss: 0.0151933, Val Loss: 0.0147559 +2025-03-12 04:43:08,070 Epoch 1836/2000 +2025-03-12 04:43:33,125 Current Learning Rate: 0.0000778360 +2025-03-12 04:43:33,125 Train Loss: 0.0151932, Val Loss: 0.0147559 +2025-03-12 04:43:33,126 Epoch 1837/2000 +2025-03-12 04:43:58,183 Current Learning Rate: 0.0000820963 +2025-03-12 04:43:58,184 Train Loss: 0.0151931, Val Loss: 0.0147555 +2025-03-12 04:43:58,184 Epoch 1838/2000 +2025-03-12 04:44:23,367 Current Learning Rate: 0.0000864597 +2025-03-12 04:44:23,367 Train Loss: 0.0151929, Val Loss: 0.0147545 +2025-03-12 04:44:23,367 Epoch 1839/2000 +2025-03-12 04:44:48,653 Current Learning Rate: 0.0000909251 +2025-03-12 04:44:48,655 Train Loss: 0.0151926, Val Loss: 0.0147532 +2025-03-12 04:44:48,656 Epoch 1840/2000 +2025-03-12 04:45:14,168 Current Learning Rate: 0.0000954915 +2025-03-12 04:45:14,173 Train Loss: 0.0151927, Val Loss: 0.0147525 +2025-03-12 04:45:14,173 Epoch 1841/2000 +2025-03-12 04:45:40,207 Current Learning Rate: 0.0001001577 +2025-03-12 04:45:40,212 Train Loss: 0.0151929, Val Loss: 0.0147512 +2025-03-12 04:45:40,212 Epoch 1842/2000 +2025-03-12 04:46:06,194 Current Learning Rate: 0.0001049225 +2025-03-12 04:46:06,195 Train Loss: 0.0151923, Val Loss: 0.0147518 +2025-03-12 04:46:06,195 Epoch 1843/2000 +2025-03-12 04:46:30,976 Current Learning Rate: 0.0001097848 +2025-03-12 04:46:30,976 Train Loss: 0.0151920, Val Loss: 0.0147513 +2025-03-12 04:46:30,976 Epoch 1844/2000 +2025-03-12 04:46:56,590 Current Learning Rate: 0.0001147434 +2025-03-12 04:46:56,590 Train Loss: 0.0151917, Val Loss: 0.0147523 +2025-03-12 04:46:56,591 Epoch 1845/2000 +2025-03-12 04:47:21,623 Current Learning Rate: 0.0001197970 +2025-03-12 04:47:21,624 Train Loss: 0.0151914, Val Loss: 0.0147522 +2025-03-12 04:47:21,624 Epoch 1846/2000 +2025-03-12 04:47:47,633 Current Learning Rate: 0.0001249445 +2025-03-12 04:47:47,636 Train Loss: 0.0151913, Val Loss: 0.0147509 +2025-03-12 04:47:47,637 Epoch 1847/2000 +2025-03-12 04:48:12,918 Current Learning Rate: 0.0001301845 +2025-03-12 04:48:12,922 Train Loss: 0.0151909, Val Loss: 0.0147496 +2025-03-12 04:48:12,923 Epoch 1848/2000 +2025-03-12 04:48:38,321 Current Learning Rate: 0.0001355157 +2025-03-12 04:48:38,321 Train Loss: 0.0151915, Val Loss: 0.0147563 +2025-03-12 04:48:38,322 Epoch 1849/2000 +2025-03-12 04:49:03,732 Current Learning Rate: 0.0001409369 +2025-03-12 04:49:03,732 Train Loss: 0.0151919, Val Loss: 0.0147585 +2025-03-12 04:49:03,733 Epoch 1850/2000 +2025-03-12 04:49:29,433 Current Learning Rate: 0.0001464466 +2025-03-12 04:49:29,433 Train Loss: 0.0151915, Val Loss: 0.0147650 +2025-03-12 04:49:29,434 Epoch 1851/2000 +2025-03-12 04:49:54,853 Current Learning Rate: 0.0001520436 +2025-03-12 04:49:54,854 Train Loss: 0.0151924, Val Loss: 0.0147558 +2025-03-12 04:49:54,854 Epoch 1852/2000 +2025-03-12 04:50:20,628 Current Learning Rate: 0.0001577264 +2025-03-12 04:50:20,629 Train Loss: 0.0151916, Val Loss: 0.0147575 +2025-03-12 04:50:20,629 Epoch 1853/2000 +2025-03-12 04:50:46,168 Current Learning Rate: 0.0001634937 +2025-03-12 04:50:46,168 Train Loss: 0.0151910, Val Loss: 0.0147602 +2025-03-12 04:50:46,169 Epoch 1854/2000 +2025-03-12 04:51:11,569 Current Learning Rate: 0.0001693441 +2025-03-12 04:51:11,570 Train Loss: 0.0151906, Val Loss: 0.0147749 +2025-03-12 04:51:11,570 Epoch 1855/2000 +2025-03-12 04:51:36,844 Current Learning Rate: 0.0001752760 +2025-03-12 04:51:36,845 Train Loss: 0.0151960, Val Loss: 0.0147518 +2025-03-12 04:51:36,845 Epoch 1856/2000 +2025-03-12 04:52:01,289 Current Learning Rate: 0.0001812880 +2025-03-12 04:52:01,294 Train Loss: 0.0151890, Val Loss: 0.0147493 +2025-03-12 04:52:01,294 Epoch 1857/2000 +2025-03-12 04:52:26,824 Current Learning Rate: 0.0001873787 +2025-03-12 04:52:26,824 Train Loss: 0.0151908, Val Loss: 0.0147542 +2025-03-12 04:52:26,824 Epoch 1858/2000 +2025-03-12 04:52:52,080 Current Learning Rate: 0.0001935465 +2025-03-12 04:52:52,083 Train Loss: 0.0151919, Val Loss: 0.0147474 +2025-03-12 04:52:52,083 Epoch 1859/2000 +2025-03-12 04:53:16,699 Current Learning Rate: 0.0001997899 +2025-03-12 04:53:16,700 Train Loss: 0.0151906, Val Loss: 0.0147559 +2025-03-12 04:53:16,700 Epoch 1860/2000 +2025-03-12 04:53:42,415 Current Learning Rate: 0.0002061074 +2025-03-12 04:53:42,415 Train Loss: 0.0151922, Val Loss: 0.0147481 +2025-03-12 04:53:42,416 Epoch 1861/2000 +2025-03-12 04:54:08,037 Current Learning Rate: 0.0002124974 +2025-03-12 04:54:08,037 Train Loss: 0.0151920, Val Loss: 0.0147490 +2025-03-12 04:54:08,037 Epoch 1862/2000 +2025-03-12 04:54:33,672 Current Learning Rate: 0.0002189583 +2025-03-12 04:54:33,672 Train Loss: 0.0151916, Val Loss: 0.0147526 +2025-03-12 04:54:33,672 Epoch 1863/2000 +2025-03-12 04:54:59,096 Current Learning Rate: 0.0002254886 +2025-03-12 04:54:59,096 Train Loss: 0.0151926, Val Loss: 0.0147501 +2025-03-12 04:54:59,097 Epoch 1864/2000 +2025-03-12 04:55:24,829 Current Learning Rate: 0.0002320866 +2025-03-12 04:55:24,830 Train Loss: 0.0151917, Val Loss: 0.0147540 +2025-03-12 04:55:24,831 Epoch 1865/2000 +2025-03-12 04:55:50,309 Current Learning Rate: 0.0002387507 +2025-03-12 04:55:50,309 Train Loss: 0.0151920, Val Loss: 0.0147550 +2025-03-12 04:55:50,309 Epoch 1866/2000 +2025-03-12 04:56:15,732 Current Learning Rate: 0.0002454793 +2025-03-12 04:56:15,733 Train Loss: 0.0151892, Val Loss: 0.0147590 +2025-03-12 04:56:15,733 Epoch 1867/2000 +2025-03-12 04:56:41,277 Current Learning Rate: 0.0002522707 +2025-03-12 04:56:41,281 Train Loss: 0.0152074, Val Loss: 0.0147432 +2025-03-12 04:56:41,282 Epoch 1868/2000 +2025-03-12 04:57:06,510 Current Learning Rate: 0.0002591232 +2025-03-12 04:57:06,512 Train Loss: 0.0151846, Val Loss: 0.0147521 +2025-03-12 04:57:06,512 Epoch 1869/2000 +2025-03-12 04:57:31,850 Current Learning Rate: 0.0002660351 +2025-03-12 04:57:31,854 Train Loss: 0.0151901, Val Loss: 0.0147684 +2025-03-12 04:57:31,855 Epoch 1870/2000 +2025-03-12 04:57:57,336 Current Learning Rate: 0.0002730048 +2025-03-12 04:57:57,337 Train Loss: 0.0152043, Val Loss: 0.0147451 +2025-03-12 04:57:57,337 Epoch 1871/2000 +2025-03-12 04:58:22,401 Current Learning Rate: 0.0002800304 +2025-03-12 04:58:22,402 Train Loss: 0.0151843, Val Loss: 0.0147548 +2025-03-12 04:58:22,402 Epoch 1872/2000 +2025-03-12 04:58:47,393 Current Learning Rate: 0.0002871104 +2025-03-12 04:58:47,394 Train Loss: 0.0151983, Val Loss: 0.0147454 +2025-03-12 04:58:47,394 Epoch 1873/2000 +2025-03-12 04:59:12,493 Current Learning Rate: 0.0002942428 +2025-03-12 04:59:12,494 Train Loss: 0.0151930, Val Loss: 0.0147508 +2025-03-12 04:59:12,494 Epoch 1874/2000 +2025-03-12 04:59:38,024 Current Learning Rate: 0.0003014261 +2025-03-12 04:59:38,025 Train Loss: 0.0151915, Val Loss: 0.0147440 +2025-03-12 04:59:38,025 Epoch 1875/2000 +2025-03-12 05:00:02,895 Current Learning Rate: 0.0003086583 +2025-03-12 05:00:02,899 Train Loss: 0.0152060, Val Loss: 0.0147408 +2025-03-12 05:00:02,899 Epoch 1876/2000 +2025-03-12 05:00:28,212 Current Learning Rate: 0.0003159377 +2025-03-12 05:00:28,212 Train Loss: 0.0151856, Val Loss: 0.0147650 +2025-03-12 05:00:28,213 Epoch 1877/2000 +2025-03-12 05:00:53,236 Current Learning Rate: 0.0003232626 +2025-03-12 05:00:53,236 Train Loss: 0.0152027, Val Loss: 0.0147458 +2025-03-12 05:00:53,237 Epoch 1878/2000 +2025-03-12 05:01:18,390 Current Learning Rate: 0.0003306310 +2025-03-12 05:01:18,390 Train Loss: 0.0151918, Val Loss: 0.0147484 +2025-03-12 05:01:18,390 Epoch 1879/2000 +2025-03-12 05:01:43,587 Current Learning Rate: 0.0003380413 +2025-03-12 05:01:43,587 Train Loss: 0.0152008, Val Loss: 0.0147555 +2025-03-12 05:01:43,587 Epoch 1880/2000 +2025-03-12 05:02:09,357 Current Learning Rate: 0.0003454915 +2025-03-12 05:02:09,357 Train Loss: 0.0151919, Val Loss: 0.0147496 +2025-03-12 05:02:09,358 Epoch 1881/2000 +2025-03-12 05:02:34,666 Current Learning Rate: 0.0003529798 +2025-03-12 05:02:34,666 Train Loss: 0.0152045, Val Loss: 0.0147429 +2025-03-12 05:02:34,667 Epoch 1882/2000 +2025-03-12 05:02:59,331 Current Learning Rate: 0.0003605044 +2025-03-12 05:02:59,332 Train Loss: 0.0151926, Val Loss: 0.0147499 +2025-03-12 05:02:59,332 Epoch 1883/2000 +2025-03-12 05:03:24,881 Current Learning Rate: 0.0003680635 +2025-03-12 05:03:24,881 Train Loss: 0.0152048, Val Loss: 0.0147424 +2025-03-12 05:03:24,882 Epoch 1884/2000 +2025-03-12 05:03:49,988 Current Learning Rate: 0.0003756551 +2025-03-12 05:03:49,988 Train Loss: 0.0151943, Val Loss: 0.0147487 +2025-03-12 05:03:49,988 Epoch 1885/2000 +2025-03-12 05:04:14,976 Current Learning Rate: 0.0003832773 +2025-03-12 05:04:14,976 Train Loss: 0.0152027, Val Loss: 0.0147587 +2025-03-12 05:04:14,977 Epoch 1886/2000 +2025-03-12 05:04:40,141 Current Learning Rate: 0.0003909284 +2025-03-12 05:04:40,142 Train Loss: 0.0151919, Val Loss: 0.0147568 +2025-03-12 05:04:40,142 Epoch 1887/2000 +2025-03-12 05:05:05,456 Current Learning Rate: 0.0003986064 +2025-03-12 05:05:05,456 Train Loss: 0.0152005, Val Loss: 0.0147595 +2025-03-12 05:05:05,457 Epoch 1888/2000 +2025-03-12 05:05:30,607 Current Learning Rate: 0.0004063093 +2025-03-12 05:05:30,611 Train Loss: 0.0152057, Val Loss: 0.0147390 +2025-03-12 05:05:30,611 Epoch 1889/2000 +2025-03-12 05:05:56,104 Current Learning Rate: 0.0004140354 +2025-03-12 05:05:56,104 Train Loss: 0.0151962, Val Loss: 0.0148222 +2025-03-12 05:05:56,105 Epoch 1890/2000 +2025-03-12 05:06:22,116 Current Learning Rate: 0.0004217828 +2025-03-12 05:06:22,116 Train Loss: 0.0152116, Val Loss: 0.0147413 +2025-03-12 05:06:22,117 Epoch 1891/2000 +2025-03-12 05:06:47,843 Current Learning Rate: 0.0004295494 +2025-03-12 05:06:47,844 Train Loss: 0.0151928, Val Loss: 0.0148345 +2025-03-12 05:06:47,844 Epoch 1892/2000 +2025-03-12 05:07:13,312 Current Learning Rate: 0.0004373334 +2025-03-12 05:07:13,313 Train Loss: 0.0152289, Val Loss: 0.0147398 +2025-03-12 05:07:13,313 Epoch 1893/2000 +2025-03-12 05:07:38,653 Current Learning Rate: 0.0004451328 +2025-03-12 05:07:38,655 Train Loss: 0.0151895, Val Loss: 0.0147376 +2025-03-12 05:07:38,656 Epoch 1894/2000 +2025-03-12 05:08:04,292 Current Learning Rate: 0.0004529458 +2025-03-12 05:08:04,293 Train Loss: 0.0151986, Val Loss: 0.0148531 +2025-03-12 05:08:04,293 Epoch 1895/2000 +2025-03-12 05:08:29,880 Current Learning Rate: 0.0004607705 +2025-03-12 05:08:29,880 Train Loss: 0.0152016, Val Loss: 0.0148193 +2025-03-12 05:08:29,881 Epoch 1896/2000 +2025-03-12 05:08:55,096 Current Learning Rate: 0.0004686047 +2025-03-12 05:08:55,097 Train Loss: 0.0152108, Val Loss: 0.0147636 +2025-03-12 05:08:55,097 Epoch 1897/2000 +2025-03-12 05:09:21,284 Current Learning Rate: 0.0004764468 +2025-03-12 05:09:21,284 Train Loss: 0.0151955, Val Loss: 0.0147733 +2025-03-12 05:09:21,285 Epoch 1898/2000 +2025-03-12 05:09:46,668 Current Learning Rate: 0.0004842946 +2025-03-12 05:09:46,672 Train Loss: 0.0152078, Val Loss: 0.0147350 +2025-03-12 05:09:46,672 Epoch 1899/2000 +2025-03-12 05:10:11,790 Current Learning Rate: 0.0004921463 +2025-03-12 05:10:11,790 Train Loss: 0.0152377, Val Loss: 0.0147381 +2025-03-12 05:10:11,791 Epoch 1900/2000 +2025-03-12 05:10:36,999 Current Learning Rate: 0.0005000000 +2025-03-12 05:10:37,000 Train Loss: 0.0151796, Val Loss: 0.0147416 +2025-03-12 05:10:37,000 Epoch 1901/2000 +2025-03-12 05:11:02,174 Current Learning Rate: 0.0005078537 +2025-03-12 05:11:02,175 Train Loss: 0.0152438, Val Loss: 0.0147494 +2025-03-12 05:11:02,175 Epoch 1902/2000 +2025-03-12 05:11:27,623 Current Learning Rate: 0.0005157054 +2025-03-12 05:11:27,624 Train Loss: 0.0151836, Val Loss: 0.0147367 +2025-03-12 05:11:27,624 Epoch 1903/2000 +2025-03-12 05:11:53,641 Current Learning Rate: 0.0005235532 +2025-03-12 05:11:53,642 Train Loss: 0.0152108, Val Loss: 0.0147363 +2025-03-12 05:11:53,642 Epoch 1904/2000 +2025-03-12 05:12:19,120 Current Learning Rate: 0.0005313953 +2025-03-12 05:12:19,121 Train Loss: 0.0152272, Val Loss: 0.0147487 +2025-03-12 05:12:19,121 Epoch 1905/2000 +2025-03-12 05:12:44,649 Current Learning Rate: 0.0005392295 +2025-03-12 05:12:44,649 Train Loss: 0.0151894, Val Loss: 0.0148322 +2025-03-12 05:12:44,649 Epoch 1906/2000 +2025-03-12 05:13:10,530 Current Learning Rate: 0.0005470542 +2025-03-12 05:13:10,535 Train Loss: 0.0152244, Val Loss: 0.0147342 +2025-03-12 05:13:10,536 Epoch 1907/2000 +2025-03-12 05:13:36,278 Current Learning Rate: 0.0005548672 +2025-03-12 05:13:36,279 Train Loss: 0.0152014, Val Loss: 0.0147448 +2025-03-12 05:13:36,279 Epoch 1908/2000 +2025-03-12 05:14:01,816 Current Learning Rate: 0.0005626666 +2025-03-12 05:14:01,818 Train Loss: 0.0152323, Val Loss: 0.0149606 +2025-03-12 05:14:01,821 Epoch 1909/2000 +2025-03-12 05:14:27,209 Current Learning Rate: 0.0005704506 +2025-03-12 05:14:27,210 Train Loss: 0.0152027, Val Loss: 0.0147361 +2025-03-12 05:14:27,210 Epoch 1910/2000 +2025-03-12 05:14:52,680 Current Learning Rate: 0.0005782172 +2025-03-12 05:14:52,684 Train Loss: 0.0152081, Val Loss: 0.0147301 +2025-03-12 05:14:52,684 Epoch 1911/2000 +2025-03-12 05:15:18,430 Current Learning Rate: 0.0005859646 +2025-03-12 05:15:18,430 Train Loss: 0.0152317, Val Loss: 0.0147590 +2025-03-12 05:15:18,431 Epoch 1912/2000 +2025-03-12 05:15:44,090 Current Learning Rate: 0.0005936907 +2025-03-12 05:15:44,090 Train Loss: 0.0151992, Val Loss: 0.0147546 +2025-03-12 05:15:44,090 Epoch 1913/2000 +2025-03-12 05:16:09,603 Current Learning Rate: 0.0006013936 +2025-03-12 05:16:09,604 Train Loss: 0.0152021, Val Loss: 0.0147303 +2025-03-12 05:16:09,604 Epoch 1914/2000 +2025-03-12 05:16:35,330 Current Learning Rate: 0.0006090716 +2025-03-12 05:16:35,331 Train Loss: 0.0152747, Val Loss: 0.0147380 +2025-03-12 05:16:35,331 Epoch 1915/2000 +2025-03-12 05:17:01,362 Current Learning Rate: 0.0006167227 +2025-03-12 05:17:01,362 Train Loss: 0.0151822, Val Loss: 0.0147301 +2025-03-12 05:17:01,362 Epoch 1916/2000 +2025-03-12 05:17:26,815 Current Learning Rate: 0.0006243449 +2025-03-12 05:17:26,816 Train Loss: 0.0152251, Val Loss: 0.0147333 +2025-03-12 05:17:26,816 Epoch 1917/2000 +2025-03-12 05:17:52,778 Current Learning Rate: 0.0006319365 +2025-03-12 05:17:52,779 Train Loss: 0.0152001, Val Loss: 0.0147726 +2025-03-12 05:17:52,779 Epoch 1918/2000 +2025-03-12 05:18:18,498 Current Learning Rate: 0.0006394956 +2025-03-12 05:18:18,498 Train Loss: 0.0152454, Val Loss: 0.0147436 +2025-03-12 05:18:18,498 Epoch 1919/2000 +2025-03-12 05:18:44,178 Current Learning Rate: 0.0006470202 +2025-03-12 05:18:44,178 Train Loss: 0.0152077, Val Loss: 0.0147562 +2025-03-12 05:18:44,178 Epoch 1920/2000 +2025-03-12 05:19:09,892 Current Learning Rate: 0.0006545085 +2025-03-12 05:19:09,893 Train Loss: 0.0152090, Val Loss: 0.0147731 +2025-03-12 05:19:09,893 Epoch 1921/2000 +2025-03-12 05:19:35,403 Current Learning Rate: 0.0006619587 +2025-03-12 05:19:35,404 Train Loss: 0.0152396, Val Loss: 0.0147325 +2025-03-12 05:19:35,404 Epoch 1922/2000 +2025-03-12 05:20:01,186 Current Learning Rate: 0.0006693690 +2025-03-12 05:20:01,187 Train Loss: 0.0152134, Val Loss: 0.0147418 +2025-03-12 05:20:01,187 Epoch 1923/2000 +2025-03-12 05:20:27,346 Current Learning Rate: 0.0006767374 +2025-03-12 05:20:27,346 Train Loss: 0.0152206, Val Loss: 0.0147309 +2025-03-12 05:20:27,347 Epoch 1924/2000 +2025-03-12 05:20:53,430 Current Learning Rate: 0.0006840623 +2025-03-12 05:20:53,430 Train Loss: 0.0152320, Val Loss: 0.0147345 +2025-03-12 05:20:53,431 Epoch 1925/2000 +2025-03-12 05:21:19,137 Current Learning Rate: 0.0006913417 +2025-03-12 05:21:19,137 Train Loss: 0.0152289, Val Loss: 0.0147726 +2025-03-12 05:21:19,137 Epoch 1926/2000 +2025-03-12 05:21:45,085 Current Learning Rate: 0.0006985739 +2025-03-12 05:21:45,086 Train Loss: 0.0152172, Val Loss: 0.0149713 +2025-03-12 05:21:45,086 Epoch 1927/2000 +2025-03-12 05:22:10,967 Current Learning Rate: 0.0007057572 +2025-03-12 05:22:10,967 Train Loss: 0.0152517, Val Loss: 0.0147517 +2025-03-12 05:22:10,967 Epoch 1928/2000 +2025-03-12 05:22:36,905 Current Learning Rate: 0.0007128896 +2025-03-12 05:22:36,906 Train Loss: 0.0151988, Val Loss: 0.0147344 +2025-03-12 05:22:36,906 Epoch 1929/2000 +2025-03-12 05:23:02,368 Current Learning Rate: 0.0007199696 +2025-03-12 05:23:02,369 Train Loss: 0.0152349, Val Loss: 0.0147642 +2025-03-12 05:23:02,369 Epoch 1930/2000 +2025-03-12 05:23:27,289 Current Learning Rate: 0.0007269952 +2025-03-12 05:23:27,289 Train Loss: 0.0152218, Val Loss: 0.0147732 +2025-03-12 05:23:27,289 Epoch 1931/2000 +2025-03-12 05:23:52,040 Current Learning Rate: 0.0007339649 +2025-03-12 05:23:52,041 Train Loss: 0.0152165, Val Loss: 0.0148335 +2025-03-12 05:23:52,041 Epoch 1932/2000 +2025-03-12 05:24:16,775 Current Learning Rate: 0.0007408768 +2025-03-12 05:24:16,776 Train Loss: 0.0152522, Val Loss: 0.0148845 +2025-03-12 05:24:16,776 Epoch 1933/2000 +2025-03-12 05:24:41,895 Current Learning Rate: 0.0007477293 +2025-03-12 05:24:41,895 Train Loss: 0.0152257, Val Loss: 0.0148562 +2025-03-12 05:24:41,895 Epoch 1934/2000 +2025-03-12 05:25:07,066 Current Learning Rate: 0.0007545207 +2025-03-12 05:25:07,069 Train Loss: 0.0152113, Val Loss: 0.0147293 +2025-03-12 05:25:07,070 Epoch 1935/2000 +2025-03-12 05:25:32,410 Current Learning Rate: 0.0007612493 +2025-03-12 05:25:32,411 Train Loss: 0.0152432, Val Loss: 0.0147298 +2025-03-12 05:25:32,411 Epoch 1936/2000 +2025-03-12 05:25:57,803 Current Learning Rate: 0.0007679134 +2025-03-12 05:25:57,804 Train Loss: 0.0152208, Val Loss: 0.0148164 +2025-03-12 05:25:57,804 Epoch 1937/2000 +2025-03-12 05:26:23,592 Current Learning Rate: 0.0007745114 +2025-03-12 05:26:23,592 Train Loss: 0.0152381, Val Loss: 0.0148267 +2025-03-12 05:26:23,592 Epoch 1938/2000 +2025-03-12 05:26:48,844 Current Learning Rate: 0.0007810417 +2025-03-12 05:26:48,847 Train Loss: 0.0152082, Val Loss: 0.0147256 +2025-03-12 05:26:48,847 Epoch 1939/2000 +2025-03-12 05:27:14,659 Current Learning Rate: 0.0007875026 +2025-03-12 05:27:14,660 Train Loss: 0.0152747, Val Loss: 0.0147271 +2025-03-12 05:27:14,660 Epoch 1940/2000 +2025-03-12 05:27:39,403 Current Learning Rate: 0.0007938926 +2025-03-12 05:27:39,404 Train Loss: 0.0152286, Val Loss: 0.0147873 +2025-03-12 05:27:39,404 Epoch 1941/2000 +2025-03-12 05:28:04,519 Current Learning Rate: 0.0008002101 +2025-03-12 05:28:04,519 Train Loss: 0.0152145, Val Loss: 0.0147852 +2025-03-12 05:28:04,520 Epoch 1942/2000 +2025-03-12 05:28:30,156 Current Learning Rate: 0.0008064535 +2025-03-12 05:28:30,157 Train Loss: 0.0152637, Val Loss: 0.0147560 +2025-03-12 05:28:30,157 Epoch 1943/2000 +2025-03-12 05:28:55,718 Current Learning Rate: 0.0008126213 +2025-03-12 05:28:55,718 Train Loss: 0.0151991, Val Loss: 0.0147300 +2025-03-12 05:28:55,719 Epoch 1944/2000 +2025-03-12 05:29:21,172 Current Learning Rate: 0.0008187120 +2025-03-12 05:29:21,173 Train Loss: 0.0152453, Val Loss: 0.0147899 +2025-03-12 05:29:21,173 Epoch 1945/2000 +2025-03-12 05:29:47,055 Current Learning Rate: 0.0008247240 +2025-03-12 05:29:47,055 Train Loss: 0.0152672, Val Loss: 0.0147674 +2025-03-12 05:29:47,056 Epoch 1946/2000 +2025-03-12 05:30:12,730 Current Learning Rate: 0.0008306559 +2025-03-12 05:30:12,730 Train Loss: 0.0152118, Val Loss: 0.0147734 +2025-03-12 05:30:12,730 Epoch 1947/2000 +2025-03-12 05:30:38,394 Current Learning Rate: 0.0008365063 +2025-03-12 05:30:38,394 Train Loss: 0.0152409, Val Loss: 0.0147725 +2025-03-12 05:30:38,395 Epoch 1948/2000 +2025-03-12 05:31:04,398 Current Learning Rate: 0.0008422736 +2025-03-12 05:31:04,399 Train Loss: 0.0152198, Val Loss: 0.0148515 +2025-03-12 05:31:04,399 Epoch 1949/2000 +2025-03-12 05:31:29,980 Current Learning Rate: 0.0008479564 +2025-03-12 05:31:29,980 Train Loss: 0.0152406, Val Loss: 0.0147464 +2025-03-12 05:31:29,981 Epoch 1950/2000 +2025-03-12 05:31:55,724 Current Learning Rate: 0.0008535534 +2025-03-12 05:31:55,725 Train Loss: 0.0152590, Val Loss: 0.0148202 +2025-03-12 05:31:55,725 Epoch 1951/2000 +2025-03-12 05:32:21,353 Current Learning Rate: 0.0008590631 +2025-03-12 05:32:21,353 Train Loss: 0.0152137, Val Loss: 0.0148372 +2025-03-12 05:32:21,354 Epoch 1952/2000 +2025-03-12 05:32:47,132 Current Learning Rate: 0.0008644843 +2025-03-12 05:32:47,132 Train Loss: 0.0152430, Val Loss: 0.0147377 +2025-03-12 05:32:47,133 Epoch 1953/2000 +2025-03-12 05:33:13,000 Current Learning Rate: 0.0008698155 +2025-03-12 05:33:13,001 Train Loss: 0.0152260, Val Loss: 0.0149879 +2025-03-12 05:33:13,001 Epoch 1954/2000 +2025-03-12 05:33:38,958 Current Learning Rate: 0.0008750555 +2025-03-12 05:33:38,959 Train Loss: 0.0152458, Val Loss: 0.0155259 +2025-03-12 05:33:38,959 Epoch 1955/2000 +2025-03-12 05:34:04,760 Current Learning Rate: 0.0008802030 +2025-03-12 05:34:04,760 Train Loss: 0.0152813, Val Loss: 0.0147342 +2025-03-12 05:34:04,761 Epoch 1956/2000 +2025-03-12 05:34:30,450 Current Learning Rate: 0.0008852566 +2025-03-12 05:34:30,450 Train Loss: 0.0152162, Val Loss: 0.0147397 +2025-03-12 05:34:30,450 Epoch 1957/2000 +2025-03-12 05:34:56,285 Current Learning Rate: 0.0008902152 +2025-03-12 05:34:56,286 Train Loss: 0.0152690, Val Loss: 0.0148376 +2025-03-12 05:34:56,286 Epoch 1958/2000 +2025-03-12 05:35:21,533 Current Learning Rate: 0.0008950775 +2025-03-12 05:35:21,534 Train Loss: 0.0152129, Val Loss: 0.0148042 +2025-03-12 05:35:21,534 Epoch 1959/2000 +2025-03-12 05:35:47,203 Current Learning Rate: 0.0008998423 +2025-03-12 05:35:47,208 Train Loss: 0.0152183, Val Loss: 0.0147161 +2025-03-12 05:35:47,208 Epoch 1960/2000 +2025-03-12 05:36:12,895 Current Learning Rate: 0.0009045085 +2025-03-12 05:36:12,896 Train Loss: 0.0152876, Val Loss: 0.0147207 +2025-03-12 05:36:12,896 Epoch 1961/2000 +2025-03-12 05:36:38,900 Current Learning Rate: 0.0009090749 +2025-03-12 05:36:38,900 Train Loss: 0.0152046, Val Loss: 0.0148399 +2025-03-12 05:36:38,901 Epoch 1962/2000 +2025-03-12 05:37:05,026 Current Learning Rate: 0.0009135403 +2025-03-12 05:37:05,026 Train Loss: 0.0152550, Val Loss: 0.0147615 +2025-03-12 05:37:05,027 Epoch 1963/2000 +2025-03-12 05:37:30,659 Current Learning Rate: 0.0009179037 +2025-03-12 05:37:30,659 Train Loss: 0.0151979, Val Loss: 0.0147755 +2025-03-12 05:37:30,660 Epoch 1964/2000 +2025-03-12 05:37:56,272 Current Learning Rate: 0.0009221640 +2025-03-12 05:37:56,273 Train Loss: 0.0153266, Val Loss: 0.0147249 +2025-03-12 05:37:56,273 Epoch 1965/2000 +2025-03-12 05:38:21,902 Current Learning Rate: 0.0009263201 +2025-03-12 05:38:21,903 Train Loss: 0.0152184, Val Loss: 0.0147255 +2025-03-12 05:38:21,903 Epoch 1966/2000 +2025-03-12 05:38:48,054 Current Learning Rate: 0.0009303710 +2025-03-12 05:38:48,054 Train Loss: 0.0152381, Val Loss: 0.0147631 +2025-03-12 05:38:48,054 Epoch 1967/2000 +2025-03-12 05:39:14,021 Current Learning Rate: 0.0009343158 +2025-03-12 05:39:14,022 Train Loss: 0.0152287, Val Loss: 0.0147568 +2025-03-12 05:39:14,022 Epoch 1968/2000 +2025-03-12 05:39:39,285 Current Learning Rate: 0.0009381533 +2025-03-12 05:39:39,286 Train Loss: 0.0152310, Val Loss: 0.0148229 +2025-03-12 05:39:39,286 Epoch 1969/2000 +2025-03-12 05:40:05,668 Current Learning Rate: 0.0009418828 +2025-03-12 05:40:05,668 Train Loss: 0.0152460, Val Loss: 0.0147546 +2025-03-12 05:40:05,669 Epoch 1970/2000 +2025-03-12 05:40:31,223 Current Learning Rate: 0.0009455033 +2025-03-12 05:40:31,224 Train Loss: 0.0152473, Val Loss: 0.0148128 +2025-03-12 05:40:31,224 Epoch 1971/2000 +2025-03-12 05:40:56,111 Current Learning Rate: 0.0009490138 +2025-03-12 05:40:56,111 Train Loss: 0.0152210, Val Loss: 0.0148088 +2025-03-12 05:40:56,112 Epoch 1972/2000 +2025-03-12 05:41:21,364 Current Learning Rate: 0.0009524135 +2025-03-12 05:41:21,364 Train Loss: 0.0152887, Val Loss: 0.0147301 +2025-03-12 05:41:21,364 Epoch 1973/2000 +2025-03-12 05:41:46,429 Current Learning Rate: 0.0009557016 +2025-03-12 05:41:46,430 Train Loss: 0.0152215, Val Loss: 0.0147942 +2025-03-12 05:41:46,430 Epoch 1974/2000 +2025-03-12 05:42:11,754 Current Learning Rate: 0.0009588773 +2025-03-12 05:42:11,754 Train Loss: 0.0152434, Val Loss: 0.0147265 +2025-03-12 05:42:11,755 Epoch 1975/2000 +2025-03-12 05:42:37,210 Current Learning Rate: 0.0009619398 +2025-03-12 05:42:37,210 Train Loss: 0.0152217, Val Loss: 0.0147213 +2025-03-12 05:42:37,210 Epoch 1976/2000 +2025-03-12 05:43:02,145 Current Learning Rate: 0.0009648882 +2025-03-12 05:43:02,145 Train Loss: 0.0152537, Val Loss: 0.0147880 +2025-03-12 05:43:02,145 Epoch 1977/2000 +2025-03-12 05:43:27,572 Current Learning Rate: 0.0009677220 +2025-03-12 05:43:27,573 Train Loss: 0.0152191, Val Loss: 0.0147654 +2025-03-12 05:43:27,573 Epoch 1978/2000 +2025-03-12 05:43:53,223 Current Learning Rate: 0.0009704404 +2025-03-12 05:43:53,224 Train Loss: 0.0152565, Val Loss: 0.0148722 +2025-03-12 05:43:53,224 Epoch 1979/2000 +2025-03-12 05:44:19,196 Current Learning Rate: 0.0009730427 +2025-03-12 05:44:19,200 Train Loss: 0.0152493, Val Loss: 0.0147126 +2025-03-12 05:44:19,201 Epoch 1980/2000 +2025-03-12 05:44:45,067 Current Learning Rate: 0.0009755283 +2025-03-12 05:44:45,067 Train Loss: 0.0152500, Val Loss: 0.0147399 +2025-03-12 05:44:45,068 Epoch 1981/2000 +2025-03-12 05:45:10,968 Current Learning Rate: 0.0009778965 +2025-03-12 05:45:10,969 Train Loss: 0.0152297, Val Loss: 0.0147134 +2025-03-12 05:45:10,969 Epoch 1982/2000 +2025-03-12 05:45:36,816 Current Learning Rate: 0.0009801468 +2025-03-12 05:45:36,816 Train Loss: 0.0153226, Val Loss: 0.0147501 +2025-03-12 05:45:36,816 Epoch 1983/2000 +2025-03-12 05:46:01,748 Current Learning Rate: 0.0009822787 +2025-03-12 05:46:01,749 Train Loss: 0.0151821, Val Loss: 0.0147153 +2025-03-12 05:46:01,749 Epoch 1984/2000 +2025-03-12 05:46:27,550 Current Learning Rate: 0.0009842916 +2025-03-12 05:46:27,551 Train Loss: 0.0152300, Val Loss: 0.0147393 +2025-03-12 05:46:27,551 Epoch 1985/2000 +2025-03-12 05:46:52,655 Current Learning Rate: 0.0009861850 +2025-03-12 05:46:52,656 Train Loss: 0.0152322, Val Loss: 0.0147885 +2025-03-12 05:46:52,656 Epoch 1986/2000 +2025-03-12 05:47:18,228 Current Learning Rate: 0.0009879584 +2025-03-12 05:47:18,229 Train Loss: 0.0152535, Val Loss: 0.0149040 +2025-03-12 05:47:18,229 Epoch 1987/2000 +2025-03-12 05:47:44,067 Current Learning Rate: 0.0009896114 +2025-03-12 05:47:44,067 Train Loss: 0.0152698, Val Loss: 0.0147836 +2025-03-12 05:47:44,067 Epoch 1988/2000 +2025-03-12 05:48:09,555 Current Learning Rate: 0.0009911436 +2025-03-12 05:48:09,555 Train Loss: 0.0152436, Val Loss: 0.0148183 +2025-03-12 05:48:09,556 Epoch 1989/2000 +2025-03-12 05:48:35,054 Current Learning Rate: 0.0009925547 +2025-03-12 05:48:35,054 Train Loss: 0.0152137, Val Loss: 0.0147658 +2025-03-12 05:48:35,054 Epoch 1990/2000 +2025-03-12 05:49:00,622 Current Learning Rate: 0.0009938442 +2025-03-12 05:49:00,623 Train Loss: 0.0152843, Val Loss: 0.0149405 +2025-03-12 05:49:00,623 Epoch 1991/2000 +2025-03-12 05:49:26,622 Current Learning Rate: 0.0009950118 +2025-03-12 05:49:26,623 Train Loss: 0.0152143, Val Loss: 0.0147259 +2025-03-12 05:49:26,623 Epoch 1992/2000 +2025-03-12 05:49:52,369 Current Learning Rate: 0.0009960574 +2025-03-12 05:49:52,370 Train Loss: 0.0152128, Val Loss: 0.0148666 +2025-03-12 05:49:52,370 Epoch 1993/2000 +2025-03-12 05:50:17,755 Current Learning Rate: 0.0009969805 +2025-03-12 05:50:17,755 Train Loss: 0.0152388, Val Loss: 0.0147676 +2025-03-12 05:50:17,756 Epoch 1994/2000 +2025-03-12 05:50:43,711 Current Learning Rate: 0.0009977810 +2025-03-12 05:50:43,712 Train Loss: 0.0152419, Val Loss: 0.0148385 +2025-03-12 05:50:43,712 Epoch 1995/2000 +2025-03-12 05:51:09,346 Current Learning Rate: 0.0009984587 +2025-03-12 05:51:09,346 Train Loss: 0.0152210, Val Loss: 0.0147252 +2025-03-12 05:51:09,347 Epoch 1996/2000 +2025-03-12 05:51:35,292 Current Learning Rate: 0.0009990134 +2025-03-12 05:51:35,293 Train Loss: 0.0152246, Val Loss: 0.0147276 +2025-03-12 05:51:35,294 Epoch 1997/2000 +2025-03-12 05:52:00,909 Current Learning Rate: 0.0009994449 +2025-03-12 05:52:00,910 Train Loss: 0.0152745, Val Loss: 0.0148017 +2025-03-12 05:52:00,910 Epoch 1998/2000 +2025-03-12 05:52:26,601 Current Learning Rate: 0.0009997533 +2025-03-12 05:52:26,602 Train Loss: 0.0151945, Val Loss: 0.0148532 +2025-03-12 05:52:26,602 Epoch 1999/2000 +2025-03-12 05:52:52,359 Current Learning Rate: 0.0009999383 +2025-03-12 05:52:52,359 Train Loss: 0.0152932, Val Loss: 0.0148309 +2025-03-12 05:52:52,359 Epoch 2000/2000 +2025-03-12 05:53:17,503 Current Learning Rate: 0.0010000000 +2025-03-12 05:53:17,507 Train Loss: 0.0152018, Val Loss: 0.0147116 +2025-03-12 05:53:23,528 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250224_inference.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250224_inference.log new file mode 100644 index 0000000000000000000000000000000000000000..ac264645f07c8a2b3584196aae3994f617af26db --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250224_inference.log @@ -0,0 +1,49 @@ +2025-02-24 16:13:59,753 加载模型失败:Error(s) in loading state_dict for Dit: + Missing key(s) in state_dict: "enc.enc.0.conv.weight", "enc.enc.0.conv.bias", "enc.enc.0.norm.weight", "enc.enc.0.norm.bias", "enc.enc.0.norm.running_mean", "enc.enc.0.norm.running_var", "enc.enc.1.conv.weight", "enc.enc.1.conv.bias", "enc.enc.1.norm.weight", "enc.enc.1.norm.bias", "enc.enc.1.norm.running_mean", "enc.enc.1.norm.running_var", "enc.enc.2.conv.weight", "enc.enc.2.conv.bias", "enc.enc.2.norm.weight", "enc.enc.2.norm.bias", "enc.enc.2.norm.running_mean", "enc.enc.2.norm.running_var", "enc.enc.3.conv.weight", "enc.enc.3.conv.bias", "enc.enc.3.norm.weight", "enc.enc.3.norm.bias", "enc.enc.3.norm.running_mean", "enc.enc.3.norm.running_var", "hid.norm.weight", "hid.norm.bias", "hid.enc.0.branch1.weight", "hid.enc.0.branch1.bias", "hid.enc.0.branch2.weight", "hid.enc.0.branch2.bias", "hid.enc.0.branch3.weight", "hid.enc.0.branch3.bias", "hid.enc.0.branch4.weight", "hid.enc.0.branch4.bias", "hid.enc.0.branch5.weight", "hid.enc.0.branch5.bias", "hid.enc.0.conv.weight", "hid.enc.0.conv.bias", "hid.enc.0.norm.weight", "hid.enc.0.norm.bias", "hid.enc.0.norm.running_mean", "hid.enc.0.norm.running_var", "hid.enc.1.branch1.weight", "hid.enc.1.branch1.bias", "hid.enc.1.branch2.weight", "hid.enc.1.branch2.bias", "hid.enc.1.branch3.weight", "hid.enc.1.branch3.bias", "hid.enc.1.branch4.weight", "hid.enc.1.branch4.bias", "hid.enc.1.branch5.weight", "hid.enc.1.branch5.bias", "hid.enc.1.conv.weight", "hid.enc.1.conv.bias", "hid.enc.1.norm.weight", "hid.enc.1.norm.bias", "hid.enc.1.norm.running_mean", "hid.enc.1.norm.running_var", "hid.enc.2.branch1.weight", "hid.enc.2.branch1.bias", "hid.enc.2.branch2.weight", "hid.enc.2.branch2.bias", "hid.enc.2.branch3.weight", "hid.enc.2.branch3.bias", "hid.enc.2.branch4.weight", "hid.enc.2.branch4.bias", "hid.enc.2.branch5.weight", "hid.enc.2.branch5.bias", "hid.enc.2.conv.weight", "hid.enc.2.conv.bias", "hid.enc.2.norm.weight", "hid.enc.2.norm.bias", "hid.enc.2.norm.running_mean", "hid.enc.2.norm.running_var", "hid.enc.3.branch1.weight", "hid.enc.3.branch1.bias", "hid.enc.3.branch2.weight", "hid.enc.3.branch2.bias", "hid.enc.3.branch3.weight", "hid.enc.3.branch3.bias", "hid.enc.3.branch4.weight", "hid.enc.3.branch4.bias", "hid.enc.3.branch5.weight", "hid.enc.3.branch5.bias", "hid.enc.3.conv.weight", "hid.enc.3.conv.bias", "hid.enc.3.norm.weight", "hid.enc.3.norm.bias", "hid.enc.3.norm.running_mean", "hid.enc.3.norm.running_var", "hid.enc.4.branch1.weight", "hid.enc.4.branch1.bias", "hid.enc.4.branch2.weight", "hid.enc.4.branch2.bias", "hid.enc.4.branch3.weight", "hid.enc.4.branch3.bias", "hid.enc.4.branch4.weight", "hid.enc.4.branch4.bias", "hid.enc.4.branch5.weight", "hid.enc.4.branch5.bias", "hid.enc.4.conv.weight", "hid.enc.4.conv.bias", "hid.enc.4.norm.weight", "hid.enc.4.norm.bias", "hid.enc.4.norm.running_mean", "hid.enc.4.norm.running_var", "hid.enc.5.branch1.weight", "hid.enc.5.branch1.bias", "hid.enc.5.branch2.weight", "hid.enc.5.branch2.bias", "hid.enc.5.branch3.weight", "hid.enc.5.branch3.bias", "hid.enc.5.branch4.weight", "hid.enc.5.branch4.bias", "hid.enc.5.branch5.weight", "hid.enc.5.branch5.bias", "hid.enc.5.conv.weight", "hid.enc.5.conv.bias", "hid.enc.5.norm.weight", "hid.enc.5.norm.bias", "hid.enc.5.norm.running_mean", "hid.enc.5.norm.running_var", "hid.enc.6.branch1.weight", "hid.enc.6.branch1.bias", "hid.enc.6.branch2.weight", "hid.enc.6.branch2.bias", "hid.enc.6.branch3.weight", "hid.enc.6.branch3.bias", "hid.enc.6.branch4.weight", "hid.enc.6.branch4.bias", "hid.enc.6.branch5.weight", "hid.enc.6.branch5.bias", "hid.enc.6.conv.weight", "hid.enc.6.conv.bias", "hid.enc.6.norm.weight", "hid.enc.6.norm.bias", "hid.enc.6.norm.running_mean", "hid.enc.6.norm.running_var", "hid.enc.7.branch1.weight", "hid.enc.7.branch1.bias", "hid.enc.7.branch2.weight", "hid.enc.7.branch2.bias", "hid.enc.7.branch3.weight", "hid.enc.7.branch3.bias", "hid.enc.7.branch4.weight", "hid.enc.7.branch4.bias", "hid.enc.7.branch5.weight", "hid.enc.7.branch5.bias", "hid.enc.7.conv.weight", "hid.enc.7.conv.bias", "hid.enc.7.norm.weight", "hid.enc.7.norm.bias", "hid.enc.7.norm.running_mean", "hid.enc.7.norm.running_var", "hid.dec.0.branch1.weight", "hid.dec.0.branch1.bias", "hid.dec.0.branch2.weight", "hid.dec.0.branch2.bias", "hid.dec.0.branch3.weight", "hid.dec.0.branch3.bias", "hid.dec.0.branch4.weight", "hid.dec.0.branch4.bias", "hid.dec.0.branch5.weight", "hid.dec.0.branch5.bias", "hid.dec.0.conv.weight", "hid.dec.0.conv.bias", "hid.dec.0.norm.weight", "hid.dec.0.norm.bias", "hid.dec.0.norm.running_mean", "hid.dec.0.norm.running_var", "hid.dec.1.branch1.weight", "hid.dec.1.branch1.bias", "hid.dec.1.branch2.weight", "hid.dec.1.branch2.bias", "hid.dec.1.branch3.weight", "hid.dec.1.branch3.bias", "hid.dec.1.branch4.weight", "hid.dec.1.branch4.bias", "hid.dec.1.branch5.weight", "hid.dec.1.branch5.bias", "hid.dec.1.conv.weight", "hid.dec.1.conv.bias", "hid.dec.1.norm.weight", "hid.dec.1.norm.bias", "hid.dec.1.norm.running_mean", "hid.dec.1.norm.running_var", "hid.dec.2.branch1.weight", "hid.dec.2.branch1.bias", "hid.dec.2.branch2.weight", "hid.dec.2.branch2.bias", "hid.dec.2.branch3.weight", "hid.dec.2.branch3.bias", "hid.dec.2.branch4.weight", "hid.dec.2.branch4.bias", "hid.dec.2.branch5.weight", "hid.dec.2.branch5.bias", "hid.dec.2.conv.weight", "hid.dec.2.conv.bias", "hid.dec.2.norm.weight", "hid.dec.2.norm.bias", "hid.dec.2.norm.running_mean", "hid.dec.2.norm.running_var", "hid.dec.3.branch1.weight", "hid.dec.3.branch1.bias", "hid.dec.3.branch2.weight", "hid.dec.3.branch2.bias", "hid.dec.3.branch3.weight", "hid.dec.3.branch3.bias", "hid.dec.3.branch4.weight", "hid.dec.3.branch4.bias", "hid.dec.3.branch5.weight", "hid.dec.3.branch5.bias", "hid.dec.3.conv.weight", "hid.dec.3.conv.bias", "hid.dec.3.norm.weight", "hid.dec.3.norm.bias", "hid.dec.3.norm.running_mean", "hid.dec.3.norm.running_var", "hid.dec.4.branch1.weight", "hid.dec.4.branch1.bias", "hid.dec.4.branch2.weight", "hid.dec.4.branch2.bias", "hid.dec.4.branch3.weight", "hid.dec.4.branch3.bias", "hid.dec.4.branch4.weight", "hid.dec.4.branch4.bias", "hid.dec.4.branch5.weight", "hid.dec.4.branch5.bias", "hid.dec.4.conv.weight", "hid.dec.4.conv.bias", "hid.dec.4.norm.weight", "hid.dec.4.norm.bias", "hid.dec.4.norm.running_mean", "hid.dec.4.norm.running_var", "hid.dec.5.branch1.weight", "hid.dec.5.branch1.bias", "hid.dec.5.branch2.weight", "hid.dec.5.branch2.bias", "hid.dec.5.branch3.weight", "hid.dec.5.branch3.bias", "hid.dec.5.branch4.weight", "hid.dec.5.branch4.bias", "hid.dec.5.branch5.weight", "hid.dec.5.branch5.bias", "hid.dec.5.conv.weight", "hid.dec.5.conv.bias", "hid.dec.5.norm.weight", "hid.dec.5.norm.bias", "hid.dec.5.norm.running_mean", "hid.dec.5.norm.running_var", "hid.dec.6.branch1.weight", "hid.dec.6.branch1.bias", "hid.dec.6.branch2.weight", "hid.dec.6.branch2.bias", "hid.dec.6.branch3.weight", "hid.dec.6.branch3.bias", "hid.dec.6.branch4.weight", "hid.dec.6.branch4.bias", "hid.dec.6.branch5.weight", "hid.dec.6.branch5.bias", "hid.dec.6.conv.weight", "hid.dec.6.conv.bias", "hid.dec.6.norm.weight", "hid.dec.6.norm.bias", "hid.dec.6.norm.running_mean", "hid.dec.6.norm.running_var", "hid.dec.7.branch1.weight", "hid.dec.7.branch1.bias", "hid.dec.7.branch2.weight", "hid.dec.7.branch2.bias", "hid.dec.7.branch3.weight", "hid.dec.7.branch3.bias", "hid.dec.7.branch4.weight", "hid.dec.7.branch4.bias", "hid.dec.7.branch5.weight", "hid.dec.7.branch5.bias", "hid.dec.7.conv.weight", "hid.dec.7.conv.bias", "hid.dec.7.norm.weight", "hid.dec.7.norm.bias", "hid.dec.7.norm.running_mean", "hid.dec.7.norm.running_var", "dit_block.pos_embed", "dit_block.x_embedder.proj.weight", "dit_block.x_embedder.proj.bias", "dit_block.t_embedder.mlp.0.weight", "dit_block.t_embedder.mlp.0.bias", "dit_block.t_embedder.mlp.2.weight", "dit_block.t_embedder.mlp.2.bias", "dit_block.blocks.0.attn.qkv.weight", "dit_block.blocks.0.attn.qkv.bias", "dit_block.blocks.0.attn.proj.weight", "dit_block.blocks.0.attn.proj.bias", "dit_block.blocks.0.mlp.fc1.weight", "dit_block.blocks.0.mlp.fc1.bias", "dit_block.blocks.0.mlp.fc2.weight", "dit_block.blocks.0.mlp.fc2.bias", "dit_block.blocks.0.adaLN_modulation.1.weight", "dit_block.blocks.0.adaLN_modulation.1.bias", "dit_block.blocks.1.attn.qkv.weight", "dit_block.blocks.1.attn.qkv.bias", "dit_block.blocks.1.attn.proj.weight", "dit_block.blocks.1.attn.proj.bias", "dit_block.blocks.1.mlp.fc1.weight", "dit_block.blocks.1.mlp.fc1.bias", "dit_block.blocks.1.mlp.fc2.weight", "dit_block.blocks.1.mlp.fc2.bias", "dit_block.blocks.1.adaLN_modulation.1.weight", "dit_block.blocks.1.adaLN_modulation.1.bias", "dit_block.blocks.2.attn.qkv.weight", "dit_block.blocks.2.attn.qkv.bias", "dit_block.blocks.2.attn.proj.weight", "dit_block.blocks.2.attn.proj.bias", "dit_block.blocks.2.mlp.fc1.weight", "dit_block.blocks.2.mlp.fc1.bias", "dit_block.blocks.2.mlp.fc2.weight", "dit_block.blocks.2.mlp.fc2.bias", "dit_block.blocks.2.adaLN_modulation.1.weight", "dit_block.blocks.2.adaLN_modulation.1.bias", "dit_block.blocks.3.attn.qkv.weight", "dit_block.blocks.3.attn.qkv.bias", "dit_block.blocks.3.attn.proj.weight", "dit_block.blocks.3.attn.proj.bias", "dit_block.blocks.3.mlp.fc1.weight", "dit_block.blocks.3.mlp.fc1.bias", "dit_block.blocks.3.mlp.fc2.weight", "dit_block.blocks.3.mlp.fc2.bias", "dit_block.blocks.3.adaLN_modulation.1.weight", "dit_block.blocks.3.adaLN_modulation.1.bias", "dit_block.blocks.4.attn.qkv.weight", "dit_block.blocks.4.attn.qkv.bias", "dit_block.blocks.4.attn.proj.weight", "dit_block.blocks.4.attn.proj.bias", "dit_block.blocks.4.mlp.fc1.weight", "dit_block.blocks.4.mlp.fc1.bias", "dit_block.blocks.4.mlp.fc2.weight", "dit_block.blocks.4.mlp.fc2.bias", "dit_block.blocks.4.adaLN_modulation.1.weight", "dit_block.blocks.4.adaLN_modulation.1.bias", "dit_block.blocks.5.attn.qkv.weight", "dit_block.blocks.5.attn.qkv.bias", "dit_block.blocks.5.attn.proj.weight", "dit_block.blocks.5.attn.proj.bias", "dit_block.blocks.5.mlp.fc1.weight", "dit_block.blocks.5.mlp.fc1.bias", "dit_block.blocks.5.mlp.fc2.weight", "dit_block.blocks.5.mlp.fc2.bias", "dit_block.blocks.5.adaLN_modulation.1.weight", "dit_block.blocks.5.adaLN_modulation.1.bias", "dit_block.blocks.6.attn.qkv.weight", "dit_block.blocks.6.attn.qkv.bias", "dit_block.blocks.6.attn.proj.weight", "dit_block.blocks.6.attn.proj.bias", "dit_block.blocks.6.mlp.fc1.weight", "dit_block.blocks.6.mlp.fc1.bias", "dit_block.blocks.6.mlp.fc2.weight", "dit_block.blocks.6.mlp.fc2.bias", "dit_block.blocks.6.adaLN_modulation.1.weight", "dit_block.blocks.6.adaLN_modulation.1.bias", "dit_block.blocks.7.attn.qkv.weight", "dit_block.blocks.7.attn.qkv.bias", "dit_block.blocks.7.attn.proj.weight", "dit_block.blocks.7.attn.proj.bias", "dit_block.blocks.7.mlp.fc1.weight", "dit_block.blocks.7.mlp.fc1.bias", "dit_block.blocks.7.mlp.fc2.weight", "dit_block.blocks.7.mlp.fc2.bias", "dit_block.blocks.7.adaLN_modulation.1.weight", "dit_block.blocks.7.adaLN_modulation.1.bias", "dit_block.blocks.8.attn.qkv.weight", "dit_block.blocks.8.attn.qkv.bias", "dit_block.blocks.8.attn.proj.weight", "dit_block.blocks.8.attn.proj.bias", "dit_block.blocks.8.mlp.fc1.weight", "dit_block.blocks.8.mlp.fc1.bias", "dit_block.blocks.8.mlp.fc2.weight", "dit_block.blocks.8.mlp.fc2.bias", "dit_block.blocks.8.adaLN_modulation.1.weight", "dit_block.blocks.8.adaLN_modulation.1.bias", "dit_block.blocks.9.attn.qkv.weight", "dit_block.blocks.9.attn.qkv.bias", "dit_block.blocks.9.attn.proj.weight", "dit_block.blocks.9.attn.proj.bias", "dit_block.blocks.9.mlp.fc1.weight", "dit_block.blocks.9.mlp.fc1.bias", "dit_block.blocks.9.mlp.fc2.weight", "dit_block.blocks.9.mlp.fc2.bias", "dit_block.blocks.9.adaLN_modulation.1.weight", "dit_block.blocks.9.adaLN_modulation.1.bias", "dit_block.blocks.10.attn.qkv.weight", "dit_block.blocks.10.attn.qkv.bias", "dit_block.blocks.10.attn.proj.weight", "dit_block.blocks.10.attn.proj.bias", "dit_block.blocks.10.mlp.fc1.weight", "dit_block.blocks.10.mlp.fc1.bias", "dit_block.blocks.10.mlp.fc2.weight", "dit_block.blocks.10.mlp.fc2.bias", "dit_block.blocks.10.adaLN_modulation.1.weight", "dit_block.blocks.10.adaLN_modulation.1.bias", "dit_block.blocks.11.attn.qkv.weight", "dit_block.blocks.11.attn.qkv.bias", "dit_block.blocks.11.attn.proj.weight", "dit_block.blocks.11.attn.proj.bias", "dit_block.blocks.11.mlp.fc1.weight", "dit_block.blocks.11.mlp.fc1.bias", "dit_block.blocks.11.mlp.fc2.weight", "dit_block.blocks.11.mlp.fc2.bias", "dit_block.blocks.11.adaLN_modulation.1.weight", "dit_block.blocks.11.adaLN_modulation.1.bias", "dit_block.final_layer.linear.weight", "dit_block.final_layer.linear.bias", "dit_block.final_layer.adaLN_modulation.1.weight", "dit_block.final_layer.adaLN_modulation.1.bias", "dec.dec.0.conv.weight", "dec.dec.0.conv.bias", "dec.dec.0.norm.weight", "dec.dec.0.norm.bias", "dec.dec.0.norm.running_mean", "dec.dec.0.norm.running_var", "dec.dec.1.conv.weight", "dec.dec.1.conv.bias", "dec.dec.1.norm.weight", "dec.dec.1.norm.bias", "dec.dec.1.norm.running_mean", "dec.dec.1.norm.running_var", "dec.dec.2.conv.weight", "dec.dec.2.conv.bias", "dec.dec.2.norm.weight", "dec.dec.2.norm.bias", "dec.dec.2.norm.running_mean", "dec.dec.2.norm.running_var", "dec.dec.3.conv.weight", "dec.dec.3.conv.bias", "dec.dec.3.norm.weight", "dec.dec.3.norm.bias", "dec.dec.3.norm.running_mean", "dec.dec.3.norm.running_var", "dec.readout.weight", "dec.readout.bias". + Unexpected key(s) in state_dict: "module.enc.enc.0.conv.weight", "module.enc.enc.0.conv.bias", "module.enc.enc.0.norm.weight", "module.enc.enc.0.norm.bias", "module.enc.enc.0.norm.running_mean", "module.enc.enc.0.norm.running_var", "module.enc.enc.0.norm.num_batches_tracked", "module.enc.enc.1.conv.weight", "module.enc.enc.1.conv.bias", "module.enc.enc.1.norm.weight", "module.enc.enc.1.norm.bias", "module.enc.enc.1.norm.running_mean", "module.enc.enc.1.norm.running_var", "module.enc.enc.1.norm.num_batches_tracked", "module.enc.enc.2.conv.weight", "module.enc.enc.2.conv.bias", "module.enc.enc.2.norm.weight", "module.enc.enc.2.norm.bias", "module.enc.enc.2.norm.running_mean", "module.enc.enc.2.norm.running_var", "module.enc.enc.2.norm.num_batches_tracked", "module.enc.enc.3.conv.weight", "module.enc.enc.3.conv.bias", "module.enc.enc.3.norm.weight", "module.enc.enc.3.norm.bias", "module.enc.enc.3.norm.running_mean", "module.enc.enc.3.norm.running_var", "module.enc.enc.3.norm.num_batches_tracked", "module.hid.norm.weight", "module.hid.norm.bias", "module.hid.enc.0.branch1.weight", "module.hid.enc.0.branch1.bias", "module.hid.enc.0.branch2.weight", "module.hid.enc.0.branch2.bias", "module.hid.enc.0.branch3.weight", "module.hid.enc.0.branch3.bias", "module.hid.enc.0.branch4.weight", "module.hid.enc.0.branch4.bias", "module.hid.enc.0.branch5.weight", "module.hid.enc.0.branch5.bias", "module.hid.enc.0.conv.weight", "module.hid.enc.0.conv.bias", "module.hid.enc.0.norm.weight", "module.hid.enc.0.norm.bias", "module.hid.enc.0.norm.running_mean", "module.hid.enc.0.norm.running_var", "module.hid.enc.0.norm.num_batches_tracked", "module.hid.enc.1.branch1.weight", "module.hid.enc.1.branch1.bias", "module.hid.enc.1.branch2.weight", "module.hid.enc.1.branch2.bias", "module.hid.enc.1.branch3.weight", "module.hid.enc.1.branch3.bias", "module.hid.enc.1.branch4.weight", "module.hid.enc.1.branch4.bias", "module.hid.enc.1.branch5.weight", "module.hid.enc.1.branch5.bias", "module.hid.enc.1.conv.weight", "module.hid.enc.1.conv.bias", "module.hid.enc.1.norm.weight", "module.hid.enc.1.norm.bias", "module.hid.enc.1.norm.running_mean", "module.hid.enc.1.norm.running_var", "module.hid.enc.1.norm.num_batches_tracked", "module.hid.enc.2.branch1.weight", "module.hid.enc.2.branch1.bias", "module.hid.enc.2.branch2.weight", "module.hid.enc.2.branch2.bias", "module.hid.enc.2.branch3.weight", "module.hid.enc.2.branch3.bias", "module.hid.enc.2.branch4.weight", "module.hid.enc.2.branch4.bias", "module.hid.enc.2.branch5.weight", "module.hid.enc.2.branch5.bias", "module.hid.enc.2.conv.weight", "module.hid.enc.2.conv.bias", "module.hid.enc.2.norm.weight", "module.hid.enc.2.norm.bias", "module.hid.enc.2.norm.running_mean", "module.hid.enc.2.norm.running_var", "module.hid.enc.2.norm.num_batches_tracked", "module.hid.enc.3.branch1.weight", "module.hid.enc.3.branch1.bias", "module.hid.enc.3.branch2.weight", "module.hid.enc.3.branch2.bias", "module.hid.enc.3.branch3.weight", "module.hid.enc.3.branch3.bias", "module.hid.enc.3.branch4.weight", "module.hid.enc.3.branch4.bias", "module.hid.enc.3.branch5.weight", "module.hid.enc.3.branch5.bias", "module.hid.enc.3.conv.weight", "module.hid.enc.3.conv.bias", "module.hid.enc.3.norm.weight", "module.hid.enc.3.norm.bias", "module.hid.enc.3.norm.running_mean", "module.hid.enc.3.norm.running_var", "module.hid.enc.3.norm.num_batches_tracked", "module.hid.enc.4.branch1.weight", "module.hid.enc.4.branch1.bias", "module.hid.enc.4.branch2.weight", "module.hid.enc.4.branch2.bias", "module.hid.enc.4.branch3.weight", "module.hid.enc.4.branch3.bias", "module.hid.enc.4.branch4.weight", "module.hid.enc.4.branch4.bias", "module.hid.enc.4.branch5.weight", "module.hid.enc.4.branch5.bias", "module.hid.enc.4.conv.weight", "module.hid.enc.4.conv.bias", "module.hid.enc.4.norm.weight", "module.hid.enc.4.norm.bias", "module.hid.enc.4.norm.running_mean", "module.hid.enc.4.norm.running_var", "module.hid.enc.4.norm.num_batches_tracked", "module.hid.enc.5.branch1.weight", "module.hid.enc.5.branch1.bias", "module.hid.enc.5.branch2.weight", "module.hid.enc.5.branch2.bias", "module.hid.enc.5.branch3.weight", "module.hid.enc.5.branch3.bias", "module.hid.enc.5.branch4.weight", "module.hid.enc.5.branch4.bias", "module.hid.enc.5.branch5.weight", "module.hid.enc.5.branch5.bias", "module.hid.enc.5.conv.weight", "module.hid.enc.5.conv.bias", "module.hid.enc.5.norm.weight", "module.hid.enc.5.norm.bias", "module.hid.enc.5.norm.running_mean", "module.hid.enc.5.norm.running_var", "module.hid.enc.5.norm.num_batches_tracked", "module.hid.enc.6.branch1.weight", "module.hid.enc.6.branch1.bias", "module.hid.enc.6.branch2.weight", "module.hid.enc.6.branch2.bias", "module.hid.enc.6.branch3.weight", "module.hid.enc.6.branch3.bias", "module.hid.enc.6.branch4.weight", "module.hid.enc.6.branch4.bias", "module.hid.enc.6.branch5.weight", "module.hid.enc.6.branch5.bias", "module.hid.enc.6.conv.weight", "module.hid.enc.6.conv.bias", "module.hid.enc.6.norm.weight", "module.hid.enc.6.norm.bias", "module.hid.enc.6.norm.running_mean", "module.hid.enc.6.norm.running_var", "module.hid.enc.6.norm.num_batches_tracked", "module.hid.enc.7.branch1.weight", "module.hid.enc.7.branch1.bias", "module.hid.enc.7.branch2.weight", "module.hid.enc.7.branch2.bias", "module.hid.enc.7.branch3.weight", "module.hid.enc.7.branch3.bias", "module.hid.enc.7.branch4.weight", "module.hid.enc.7.branch4.bias", "module.hid.enc.7.branch5.weight", "module.hid.enc.7.branch5.bias", "module.hid.enc.7.conv.weight", "module.hid.enc.7.conv.bias", "module.hid.enc.7.norm.weight", "module.hid.enc.7.norm.bias", "module.hid.enc.7.norm.running_mean", "module.hid.enc.7.norm.running_var", "module.hid.enc.7.norm.num_batches_tracked", "module.hid.dec.0.branch1.weight", "module.hid.dec.0.branch1.bias", "module.hid.dec.0.branch2.weight", "module.hid.dec.0.branch2.bias", "module.hid.dec.0.branch3.weight", "module.hid.dec.0.branch3.bias", "module.hid.dec.0.branch4.weight", "module.hid.dec.0.branch4.bias", "module.hid.dec.0.branch5.weight", "module.hid.dec.0.branch5.bias", "module.hid.dec.0.conv.weight", "module.hid.dec.0.conv.bias", "module.hid.dec.0.norm.weight", "module.hid.dec.0.norm.bias", "module.hid.dec.0.norm.running_mean", "module.hid.dec.0.norm.running_var", "module.hid.dec.0.norm.num_batches_tracked", "module.hid.dec.1.branch1.weight", "module.hid.dec.1.branch1.bias", "module.hid.dec.1.branch2.weight", "module.hid.dec.1.branch2.bias", "module.hid.dec.1.branch3.weight", "module.hid.dec.1.branch3.bias", "module.hid.dec.1.branch4.weight", "module.hid.dec.1.branch4.bias", "module.hid.dec.1.branch5.weight", "module.hid.dec.1.branch5.bias", "module.hid.dec.1.conv.weight", "module.hid.dec.1.conv.bias", "module.hid.dec.1.norm.weight", "module.hid.dec.1.norm.bias", "module.hid.dec.1.norm.running_mean", "module.hid.dec.1.norm.running_var", "module.hid.dec.1.norm.num_batches_tracked", "module.hid.dec.2.branch1.weight", "module.hid.dec.2.branch1.bias", "module.hid.dec.2.branch2.weight", "module.hid.dec.2.branch2.bias", "module.hid.dec.2.branch3.weight", "module.hid.dec.2.branch3.bias", "module.hid.dec.2.branch4.weight", "module.hid.dec.2.branch4.bias", "module.hid.dec.2.branch5.weight", "module.hid.dec.2.branch5.bias", "module.hid.dec.2.conv.weight", "module.hid.dec.2.conv.bias", "module.hid.dec.2.norm.weight", "module.hid.dec.2.norm.bias", "module.hid.dec.2.norm.running_mean", "module.hid.dec.2.norm.running_var", "module.hid.dec.2.norm.num_batches_tracked", "module.hid.dec.3.branch1.weight", "module.hid.dec.3.branch1.bias", "module.hid.dec.3.branch2.weight", "module.hid.dec.3.branch2.bias", "module.hid.dec.3.branch3.weight", "module.hid.dec.3.branch3.bias", "module.hid.dec.3.branch4.weight", "module.hid.dec.3.branch4.bias", "module.hid.dec.3.branch5.weight", "module.hid.dec.3.branch5.bias", "module.hid.dec.3.conv.weight", "module.hid.dec.3.conv.bias", "module.hid.dec.3.norm.weight", "module.hid.dec.3.norm.bias", "module.hid.dec.3.norm.running_mean", "module.hid.dec.3.norm.running_var", "module.hid.dec.3.norm.num_batches_tracked", "module.hid.dec.4.branch1.weight", "module.hid.dec.4.branch1.bias", "module.hid.dec.4.branch2.weight", "module.hid.dec.4.branch2.bias", "module.hid.dec.4.branch3.weight", "module.hid.dec.4.branch3.bias", "module.hid.dec.4.branch4.weight", "module.hid.dec.4.branch4.bias", "module.hid.dec.4.branch5.weight", "module.hid.dec.4.branch5.bias", "module.hid.dec.4.conv.weight", "module.hid.dec.4.conv.bias", "module.hid.dec.4.norm.weight", "module.hid.dec.4.norm.bias", "module.hid.dec.4.norm.running_mean", "module.hid.dec.4.norm.running_var", "module.hid.dec.4.norm.num_batches_tracked", "module.hid.dec.5.branch1.weight", "module.hid.dec.5.branch1.bias", "module.hid.dec.5.branch2.weight", "module.hid.dec.5.branch2.bias", "module.hid.dec.5.branch3.weight", "module.hid.dec.5.branch3.bias", "module.hid.dec.5.branch4.weight", "module.hid.dec.5.branch4.bias", "module.hid.dec.5.branch5.weight", "module.hid.dec.5.branch5.bias", "module.hid.dec.5.conv.weight", "module.hid.dec.5.conv.bias", "module.hid.dec.5.norm.weight", "module.hid.dec.5.norm.bias", "module.hid.dec.5.norm.running_mean", "module.hid.dec.5.norm.running_var", "module.hid.dec.5.norm.num_batches_tracked", "module.hid.dec.6.branch1.weight", "module.hid.dec.6.branch1.bias", "module.hid.dec.6.branch2.weight", "module.hid.dec.6.branch2.bias", "module.hid.dec.6.branch3.weight", "module.hid.dec.6.branch3.bias", "module.hid.dec.6.branch4.weight", "module.hid.dec.6.branch4.bias", "module.hid.dec.6.branch5.weight", "module.hid.dec.6.branch5.bias", "module.hid.dec.6.conv.weight", "module.hid.dec.6.conv.bias", "module.hid.dec.6.norm.weight", "module.hid.dec.6.norm.bias", "module.hid.dec.6.norm.running_mean", "module.hid.dec.6.norm.running_var", "module.hid.dec.6.norm.num_batches_tracked", "module.hid.dec.7.branch1.weight", "module.hid.dec.7.branch1.bias", "module.hid.dec.7.branch2.weight", "module.hid.dec.7.branch2.bias", "module.hid.dec.7.branch3.weight", "module.hid.dec.7.branch3.bias", "module.hid.dec.7.branch4.weight", "module.hid.dec.7.branch4.bias", "module.hid.dec.7.branch5.weight", "module.hid.dec.7.branch5.bias", "module.hid.dec.7.conv.weight", "module.hid.dec.7.conv.bias", "module.hid.dec.7.norm.weight", "module.hid.dec.7.norm.bias", "module.hid.dec.7.norm.running_mean", "module.hid.dec.7.norm.running_var", "module.hid.dec.7.norm.num_batches_tracked", "module.dit_block.pos_embed", "module.dit_block.x_embedder.proj.weight", "module.dit_block.x_embedder.proj.bias", "module.dit_block.t_embedder.mlp.0.weight", "module.dit_block.t_embedder.mlp.0.bias", "module.dit_block.t_embedder.mlp.2.weight", "module.dit_block.t_embedder.mlp.2.bias", "module.dit_block.blocks.0.attn.qkv.weight", "module.dit_block.blocks.0.attn.qkv.bias", "module.dit_block.blocks.0.attn.proj.weight", "module.dit_block.blocks.0.attn.proj.bias", "module.dit_block.blocks.0.mlp.fc1.weight", "module.dit_block.blocks.0.mlp.fc1.bias", "module.dit_block.blocks.0.mlp.fc2.weight", "module.dit_block.blocks.0.mlp.fc2.bias", "module.dit_block.blocks.0.adaLN_modulation.1.weight", "module.dit_block.blocks.0.adaLN_modulation.1.bias", "module.dit_block.blocks.1.attn.qkv.weight", "module.dit_block.blocks.1.attn.qkv.bias", "module.dit_block.blocks.1.attn.proj.weight", "module.dit_block.blocks.1.attn.proj.bias", "module.dit_block.blocks.1.mlp.fc1.weight", "module.dit_block.blocks.1.mlp.fc1.bias", "module.dit_block.blocks.1.mlp.fc2.weight", "module.dit_block.blocks.1.mlp.fc2.bias", "module.dit_block.blocks.1.adaLN_modulation.1.weight", "module.dit_block.blocks.1.adaLN_modulation.1.bias", "module.dit_block.blocks.2.attn.qkv.weight", "module.dit_block.blocks.2.attn.qkv.bias", "module.dit_block.blocks.2.attn.proj.weight", "module.dit_block.blocks.2.attn.proj.bias", "module.dit_block.blocks.2.mlp.fc1.weight", "module.dit_block.blocks.2.mlp.fc1.bias", "module.dit_block.blocks.2.mlp.fc2.weight", "module.dit_block.blocks.2.mlp.fc2.bias", "module.dit_block.blocks.2.adaLN_modulation.1.weight", "module.dit_block.blocks.2.adaLN_modulation.1.bias", "module.dit_block.blocks.3.attn.qkv.weight", "module.dit_block.blocks.3.attn.qkv.bias", "module.dit_block.blocks.3.attn.proj.weight", "module.dit_block.blocks.3.attn.proj.bias", "module.dit_block.blocks.3.mlp.fc1.weight", "module.dit_block.blocks.3.mlp.fc1.bias", "module.dit_block.blocks.3.mlp.fc2.weight", "module.dit_block.blocks.3.mlp.fc2.bias", "module.dit_block.blocks.3.adaLN_modulation.1.weight", "module.dit_block.blocks.3.adaLN_modulation.1.bias", "module.dit_block.blocks.4.attn.qkv.weight", "module.dit_block.blocks.4.attn.qkv.bias", "module.dit_block.blocks.4.attn.proj.weight", "module.dit_block.blocks.4.attn.proj.bias", "module.dit_block.blocks.4.mlp.fc1.weight", "module.dit_block.blocks.4.mlp.fc1.bias", "module.dit_block.blocks.4.mlp.fc2.weight", "module.dit_block.blocks.4.mlp.fc2.bias", "module.dit_block.blocks.4.adaLN_modulation.1.weight", "module.dit_block.blocks.4.adaLN_modulation.1.bias", "module.dit_block.blocks.5.attn.qkv.weight", "module.dit_block.blocks.5.attn.qkv.bias", "module.dit_block.blocks.5.attn.proj.weight", "module.dit_block.blocks.5.attn.proj.bias", "module.dit_block.blocks.5.mlp.fc1.weight", "module.dit_block.blocks.5.mlp.fc1.bias", "module.dit_block.blocks.5.mlp.fc2.weight", "module.dit_block.blocks.5.mlp.fc2.bias", "module.dit_block.blocks.5.adaLN_modulation.1.weight", "module.dit_block.blocks.5.adaLN_modulation.1.bias", "module.dit_block.blocks.6.attn.qkv.weight", "module.dit_block.blocks.6.attn.qkv.bias", "module.dit_block.blocks.6.attn.proj.weight", "module.dit_block.blocks.6.attn.proj.bias", "module.dit_block.blocks.6.mlp.fc1.weight", "module.dit_block.blocks.6.mlp.fc1.bias", "module.dit_block.blocks.6.mlp.fc2.weight", "module.dit_block.blocks.6.mlp.fc2.bias", "module.dit_block.blocks.6.adaLN_modulation.1.weight", "module.dit_block.blocks.6.adaLN_modulation.1.bias", "module.dit_block.blocks.7.attn.qkv.weight", "module.dit_block.blocks.7.attn.qkv.bias", "module.dit_block.blocks.7.attn.proj.weight", "module.dit_block.blocks.7.attn.proj.bias", "module.dit_block.blocks.7.mlp.fc1.weight", "module.dit_block.blocks.7.mlp.fc1.bias", "module.dit_block.blocks.7.mlp.fc2.weight", "module.dit_block.blocks.7.mlp.fc2.bias", "module.dit_block.blocks.7.adaLN_modulation.1.weight", "module.dit_block.blocks.7.adaLN_modulation.1.bias", "module.dit_block.blocks.8.attn.qkv.weight", "module.dit_block.blocks.8.attn.qkv.bias", "module.dit_block.blocks.8.attn.proj.weight", "module.dit_block.blocks.8.attn.proj.bias", "module.dit_block.blocks.8.mlp.fc1.weight", "module.dit_block.blocks.8.mlp.fc1.bias", "module.dit_block.blocks.8.mlp.fc2.weight", "module.dit_block.blocks.8.mlp.fc2.bias", "module.dit_block.blocks.8.adaLN_modulation.1.weight", "module.dit_block.blocks.8.adaLN_modulation.1.bias", "module.dit_block.blocks.9.attn.qkv.weight", "module.dit_block.blocks.9.attn.qkv.bias", "module.dit_block.blocks.9.attn.proj.weight", "module.dit_block.blocks.9.attn.proj.bias", "module.dit_block.blocks.9.mlp.fc1.weight", "module.dit_block.blocks.9.mlp.fc1.bias", "module.dit_block.blocks.9.mlp.fc2.weight", "module.dit_block.blocks.9.mlp.fc2.bias", "module.dit_block.blocks.9.adaLN_modulation.1.weight", "module.dit_block.blocks.9.adaLN_modulation.1.bias", "module.dit_block.blocks.10.attn.qkv.weight", "module.dit_block.blocks.10.attn.qkv.bias", "module.dit_block.blocks.10.attn.proj.weight", "module.dit_block.blocks.10.attn.proj.bias", "module.dit_block.blocks.10.mlp.fc1.weight", "module.dit_block.blocks.10.mlp.fc1.bias", "module.dit_block.blocks.10.mlp.fc2.weight", "module.dit_block.blocks.10.mlp.fc2.bias", "module.dit_block.blocks.10.adaLN_modulation.1.weight", "module.dit_block.blocks.10.adaLN_modulation.1.bias", "module.dit_block.blocks.11.attn.qkv.weight", "module.dit_block.blocks.11.attn.qkv.bias", "module.dit_block.blocks.11.attn.proj.weight", "module.dit_block.blocks.11.attn.proj.bias", "module.dit_block.blocks.11.mlp.fc1.weight", "module.dit_block.blocks.11.mlp.fc1.bias", "module.dit_block.blocks.11.mlp.fc2.weight", "module.dit_block.blocks.11.mlp.fc2.bias", "module.dit_block.blocks.11.adaLN_modulation.1.weight", "module.dit_block.blocks.11.adaLN_modulation.1.bias", "module.dit_block.final_layer.linear.weight", "module.dit_block.final_layer.linear.bias", "module.dit_block.final_layer.adaLN_modulation.1.weight", "module.dit_block.final_layer.adaLN_modulation.1.bias", "module.dec.dec.0.conv.weight", "module.dec.dec.0.conv.bias", "module.dec.dec.0.norm.weight", "module.dec.dec.0.norm.bias", "module.dec.dec.0.norm.running_mean", "module.dec.dec.0.norm.running_var", "module.dec.dec.0.norm.num_batches_tracked", "module.dec.dec.1.conv.weight", "module.dec.dec.1.conv.bias", "module.dec.dec.1.norm.weight", "module.dec.dec.1.norm.bias", "module.dec.dec.1.norm.running_mean", "module.dec.dec.1.norm.running_var", "module.dec.dec.1.norm.num_batches_tracked", "module.dec.dec.2.conv.weight", "module.dec.dec.2.conv.bias", "module.dec.dec.2.norm.weight", "module.dec.dec.2.norm.bias", "module.dec.dec.2.norm.running_mean", "module.dec.dec.2.norm.running_var", "module.dec.dec.2.norm.num_batches_tracked", "module.dec.dec.3.conv.weight", "module.dec.dec.3.conv.bias", "module.dec.dec.3.norm.weight", "module.dec.dec.3.norm.bias", "module.dec.dec.3.norm.running_mean", "module.dec.dec.3.norm.running_var", "module.dec.dec.3.norm.num_batches_tracked", "module.dec.readout.weight", "module.dec.readout.bias". +2025-02-24 16:13:59,753 推理过程出错:Error(s) in loading state_dict for Dit: + Missing key(s) in state_dict: "enc.enc.0.conv.weight", "enc.enc.0.conv.bias", "enc.enc.0.norm.weight", "enc.enc.0.norm.bias", "enc.enc.0.norm.running_mean", "enc.enc.0.norm.running_var", "enc.enc.1.conv.weight", "enc.enc.1.conv.bias", "enc.enc.1.norm.weight", "enc.enc.1.norm.bias", "enc.enc.1.norm.running_mean", "enc.enc.1.norm.running_var", "enc.enc.2.conv.weight", "enc.enc.2.conv.bias", "enc.enc.2.norm.weight", "enc.enc.2.norm.bias", "enc.enc.2.norm.running_mean", "enc.enc.2.norm.running_var", "enc.enc.3.conv.weight", "enc.enc.3.conv.bias", "enc.enc.3.norm.weight", "enc.enc.3.norm.bias", "enc.enc.3.norm.running_mean", "enc.enc.3.norm.running_var", "hid.norm.weight", "hid.norm.bias", "hid.enc.0.branch1.weight", "hid.enc.0.branch1.bias", "hid.enc.0.branch2.weight", "hid.enc.0.branch2.bias", "hid.enc.0.branch3.weight", "hid.enc.0.branch3.bias", "hid.enc.0.branch4.weight", "hid.enc.0.branch4.bias", "hid.enc.0.branch5.weight", "hid.enc.0.branch5.bias", "hid.enc.0.conv.weight", "hid.enc.0.conv.bias", "hid.enc.0.norm.weight", "hid.enc.0.norm.bias", "hid.enc.0.norm.running_mean", "hid.enc.0.norm.running_var", "hid.enc.1.branch1.weight", "hid.enc.1.branch1.bias", "hid.enc.1.branch2.weight", "hid.enc.1.branch2.bias", "hid.enc.1.branch3.weight", "hid.enc.1.branch3.bias", "hid.enc.1.branch4.weight", "hid.enc.1.branch4.bias", "hid.enc.1.branch5.weight", "hid.enc.1.branch5.bias", "hid.enc.1.conv.weight", "hid.enc.1.conv.bias", "hid.enc.1.norm.weight", "hid.enc.1.norm.bias", "hid.enc.1.norm.running_mean", "hid.enc.1.norm.running_var", "hid.enc.2.branch1.weight", "hid.enc.2.branch1.bias", "hid.enc.2.branch2.weight", "hid.enc.2.branch2.bias", "hid.enc.2.branch3.weight", "hid.enc.2.branch3.bias", "hid.enc.2.branch4.weight", "hid.enc.2.branch4.bias", "hid.enc.2.branch5.weight", "hid.enc.2.branch5.bias", "hid.enc.2.conv.weight", "hid.enc.2.conv.bias", "hid.enc.2.norm.weight", "hid.enc.2.norm.bias", "hid.enc.2.norm.running_mean", "hid.enc.2.norm.running_var", "hid.enc.3.branch1.weight", "hid.enc.3.branch1.bias", "hid.enc.3.branch2.weight", "hid.enc.3.branch2.bias", "hid.enc.3.branch3.weight", "hid.enc.3.branch3.bias", "hid.enc.3.branch4.weight", "hid.enc.3.branch4.bias", "hid.enc.3.branch5.weight", "hid.enc.3.branch5.bias", "hid.enc.3.conv.weight", "hid.enc.3.conv.bias", "hid.enc.3.norm.weight", "hid.enc.3.norm.bias", "hid.enc.3.norm.running_mean", "hid.enc.3.norm.running_var", "hid.enc.4.branch1.weight", "hid.enc.4.branch1.bias", "hid.enc.4.branch2.weight", "hid.enc.4.branch2.bias", "hid.enc.4.branch3.weight", "hid.enc.4.branch3.bias", "hid.enc.4.branch4.weight", "hid.enc.4.branch4.bias", "hid.enc.4.branch5.weight", "hid.enc.4.branch5.bias", "hid.enc.4.conv.weight", "hid.enc.4.conv.bias", "hid.enc.4.norm.weight", "hid.enc.4.norm.bias", "hid.enc.4.norm.running_mean", "hid.enc.4.norm.running_var", "hid.enc.5.branch1.weight", "hid.enc.5.branch1.bias", "hid.enc.5.branch2.weight", "hid.enc.5.branch2.bias", "hid.enc.5.branch3.weight", "hid.enc.5.branch3.bias", "hid.enc.5.branch4.weight", "hid.enc.5.branch4.bias", "hid.enc.5.branch5.weight", "hid.enc.5.branch5.bias", "hid.enc.5.conv.weight", "hid.enc.5.conv.bias", "hid.enc.5.norm.weight", "hid.enc.5.norm.bias", "hid.enc.5.norm.running_mean", "hid.enc.5.norm.running_var", "hid.enc.6.branch1.weight", "hid.enc.6.branch1.bias", "hid.enc.6.branch2.weight", "hid.enc.6.branch2.bias", "hid.enc.6.branch3.weight", "hid.enc.6.branch3.bias", "hid.enc.6.branch4.weight", "hid.enc.6.branch4.bias", "hid.enc.6.branch5.weight", "hid.enc.6.branch5.bias", "hid.enc.6.conv.weight", "hid.enc.6.conv.bias", "hid.enc.6.norm.weight", "hid.enc.6.norm.bias", "hid.enc.6.norm.running_mean", "hid.enc.6.norm.running_var", "hid.enc.7.branch1.weight", "hid.enc.7.branch1.bias", "hid.enc.7.branch2.weight", "hid.enc.7.branch2.bias", "hid.enc.7.branch3.weight", "hid.enc.7.branch3.bias", "hid.enc.7.branch4.weight", "hid.enc.7.branch4.bias", "hid.enc.7.branch5.weight", "hid.enc.7.branch5.bias", "hid.enc.7.conv.weight", "hid.enc.7.conv.bias", "hid.enc.7.norm.weight", "hid.enc.7.norm.bias", "hid.enc.7.norm.running_mean", "hid.enc.7.norm.running_var", "hid.dec.0.branch1.weight", "hid.dec.0.branch1.bias", "hid.dec.0.branch2.weight", "hid.dec.0.branch2.bias", "hid.dec.0.branch3.weight", "hid.dec.0.branch3.bias", "hid.dec.0.branch4.weight", "hid.dec.0.branch4.bias", "hid.dec.0.branch5.weight", "hid.dec.0.branch5.bias", "hid.dec.0.conv.weight", "hid.dec.0.conv.bias", "hid.dec.0.norm.weight", "hid.dec.0.norm.bias", "hid.dec.0.norm.running_mean", "hid.dec.0.norm.running_var", "hid.dec.1.branch1.weight", "hid.dec.1.branch1.bias", "hid.dec.1.branch2.weight", "hid.dec.1.branch2.bias", "hid.dec.1.branch3.weight", "hid.dec.1.branch3.bias", "hid.dec.1.branch4.weight", "hid.dec.1.branch4.bias", "hid.dec.1.branch5.weight", "hid.dec.1.branch5.bias", "hid.dec.1.conv.weight", "hid.dec.1.conv.bias", "hid.dec.1.norm.weight", "hid.dec.1.norm.bias", "hid.dec.1.norm.running_mean", "hid.dec.1.norm.running_var", "hid.dec.2.branch1.weight", "hid.dec.2.branch1.bias", "hid.dec.2.branch2.weight", "hid.dec.2.branch2.bias", "hid.dec.2.branch3.weight", "hid.dec.2.branch3.bias", "hid.dec.2.branch4.weight", "hid.dec.2.branch4.bias", "hid.dec.2.branch5.weight", "hid.dec.2.branch5.bias", "hid.dec.2.conv.weight", "hid.dec.2.conv.bias", "hid.dec.2.norm.weight", "hid.dec.2.norm.bias", "hid.dec.2.norm.running_mean", "hid.dec.2.norm.running_var", "hid.dec.3.branch1.weight", "hid.dec.3.branch1.bias", "hid.dec.3.branch2.weight", "hid.dec.3.branch2.bias", "hid.dec.3.branch3.weight", "hid.dec.3.branch3.bias", "hid.dec.3.branch4.weight", "hid.dec.3.branch4.bias", "hid.dec.3.branch5.weight", "hid.dec.3.branch5.bias", "hid.dec.3.conv.weight", "hid.dec.3.conv.bias", "hid.dec.3.norm.weight", "hid.dec.3.norm.bias", "hid.dec.3.norm.running_mean", "hid.dec.3.norm.running_var", "hid.dec.4.branch1.weight", "hid.dec.4.branch1.bias", "hid.dec.4.branch2.weight", "hid.dec.4.branch2.bias", "hid.dec.4.branch3.weight", "hid.dec.4.branch3.bias", "hid.dec.4.branch4.weight", "hid.dec.4.branch4.bias", "hid.dec.4.branch5.weight", "hid.dec.4.branch5.bias", "hid.dec.4.conv.weight", "hid.dec.4.conv.bias", "hid.dec.4.norm.weight", "hid.dec.4.norm.bias", "hid.dec.4.norm.running_mean", "hid.dec.4.norm.running_var", "hid.dec.5.branch1.weight", "hid.dec.5.branch1.bias", "hid.dec.5.branch2.weight", "hid.dec.5.branch2.bias", "hid.dec.5.branch3.weight", "hid.dec.5.branch3.bias", "hid.dec.5.branch4.weight", "hid.dec.5.branch4.bias", "hid.dec.5.branch5.weight", "hid.dec.5.branch5.bias", "hid.dec.5.conv.weight", "hid.dec.5.conv.bias", "hid.dec.5.norm.weight", "hid.dec.5.norm.bias", "hid.dec.5.norm.running_mean", "hid.dec.5.norm.running_var", "hid.dec.6.branch1.weight", "hid.dec.6.branch1.bias", "hid.dec.6.branch2.weight", "hid.dec.6.branch2.bias", "hid.dec.6.branch3.weight", "hid.dec.6.branch3.bias", "hid.dec.6.branch4.weight", "hid.dec.6.branch4.bias", "hid.dec.6.branch5.weight", "hid.dec.6.branch5.bias", "hid.dec.6.conv.weight", "hid.dec.6.conv.bias", "hid.dec.6.norm.weight", "hid.dec.6.norm.bias", "hid.dec.6.norm.running_mean", "hid.dec.6.norm.running_var", "hid.dec.7.branch1.weight", "hid.dec.7.branch1.bias", "hid.dec.7.branch2.weight", "hid.dec.7.branch2.bias", "hid.dec.7.branch3.weight", "hid.dec.7.branch3.bias", "hid.dec.7.branch4.weight", "hid.dec.7.branch4.bias", "hid.dec.7.branch5.weight", "hid.dec.7.branch5.bias", "hid.dec.7.conv.weight", "hid.dec.7.conv.bias", "hid.dec.7.norm.weight", "hid.dec.7.norm.bias", "hid.dec.7.norm.running_mean", "hid.dec.7.norm.running_var", "dit_block.pos_embed", "dit_block.x_embedder.proj.weight", "dit_block.x_embedder.proj.bias", "dit_block.t_embedder.mlp.0.weight", "dit_block.t_embedder.mlp.0.bias", "dit_block.t_embedder.mlp.2.weight", "dit_block.t_embedder.mlp.2.bias", "dit_block.blocks.0.attn.qkv.weight", "dit_block.blocks.0.attn.qkv.bias", "dit_block.blocks.0.attn.proj.weight", "dit_block.blocks.0.attn.proj.bias", "dit_block.blocks.0.mlp.fc1.weight", "dit_block.blocks.0.mlp.fc1.bias", "dit_block.blocks.0.mlp.fc2.weight", "dit_block.blocks.0.mlp.fc2.bias", "dit_block.blocks.0.adaLN_modulation.1.weight", "dit_block.blocks.0.adaLN_modulation.1.bias", "dit_block.blocks.1.attn.qkv.weight", "dit_block.blocks.1.attn.qkv.bias", "dit_block.blocks.1.attn.proj.weight", "dit_block.blocks.1.attn.proj.bias", "dit_block.blocks.1.mlp.fc1.weight", "dit_block.blocks.1.mlp.fc1.bias", "dit_block.blocks.1.mlp.fc2.weight", "dit_block.blocks.1.mlp.fc2.bias", "dit_block.blocks.1.adaLN_modulation.1.weight", "dit_block.blocks.1.adaLN_modulation.1.bias", "dit_block.blocks.2.attn.qkv.weight", "dit_block.blocks.2.attn.qkv.bias", "dit_block.blocks.2.attn.proj.weight", "dit_block.blocks.2.attn.proj.bias", "dit_block.blocks.2.mlp.fc1.weight", "dit_block.blocks.2.mlp.fc1.bias", "dit_block.blocks.2.mlp.fc2.weight", "dit_block.blocks.2.mlp.fc2.bias", "dit_block.blocks.2.adaLN_modulation.1.weight", "dit_block.blocks.2.adaLN_modulation.1.bias", "dit_block.blocks.3.attn.qkv.weight", "dit_block.blocks.3.attn.qkv.bias", "dit_block.blocks.3.attn.proj.weight", "dit_block.blocks.3.attn.proj.bias", "dit_block.blocks.3.mlp.fc1.weight", "dit_block.blocks.3.mlp.fc1.bias", "dit_block.blocks.3.mlp.fc2.weight", "dit_block.blocks.3.mlp.fc2.bias", "dit_block.blocks.3.adaLN_modulation.1.weight", "dit_block.blocks.3.adaLN_modulation.1.bias", "dit_block.blocks.4.attn.qkv.weight", "dit_block.blocks.4.attn.qkv.bias", "dit_block.blocks.4.attn.proj.weight", "dit_block.blocks.4.attn.proj.bias", "dit_block.blocks.4.mlp.fc1.weight", "dit_block.blocks.4.mlp.fc1.bias", "dit_block.blocks.4.mlp.fc2.weight", "dit_block.blocks.4.mlp.fc2.bias", "dit_block.blocks.4.adaLN_modulation.1.weight", "dit_block.blocks.4.adaLN_modulation.1.bias", "dit_block.blocks.5.attn.qkv.weight", "dit_block.blocks.5.attn.qkv.bias", "dit_block.blocks.5.attn.proj.weight", "dit_block.blocks.5.attn.proj.bias", "dit_block.blocks.5.mlp.fc1.weight", "dit_block.blocks.5.mlp.fc1.bias", "dit_block.blocks.5.mlp.fc2.weight", "dit_block.blocks.5.mlp.fc2.bias", "dit_block.blocks.5.adaLN_modulation.1.weight", "dit_block.blocks.5.adaLN_modulation.1.bias", "dit_block.blocks.6.attn.qkv.weight", "dit_block.blocks.6.attn.qkv.bias", "dit_block.blocks.6.attn.proj.weight", "dit_block.blocks.6.attn.proj.bias", "dit_block.blocks.6.mlp.fc1.weight", "dit_block.blocks.6.mlp.fc1.bias", "dit_block.blocks.6.mlp.fc2.weight", "dit_block.blocks.6.mlp.fc2.bias", "dit_block.blocks.6.adaLN_modulation.1.weight", "dit_block.blocks.6.adaLN_modulation.1.bias", "dit_block.blocks.7.attn.qkv.weight", "dit_block.blocks.7.attn.qkv.bias", "dit_block.blocks.7.attn.proj.weight", "dit_block.blocks.7.attn.proj.bias", "dit_block.blocks.7.mlp.fc1.weight", "dit_block.blocks.7.mlp.fc1.bias", "dit_block.blocks.7.mlp.fc2.weight", "dit_block.blocks.7.mlp.fc2.bias", "dit_block.blocks.7.adaLN_modulation.1.weight", "dit_block.blocks.7.adaLN_modulation.1.bias", "dit_block.blocks.8.attn.qkv.weight", "dit_block.blocks.8.attn.qkv.bias", "dit_block.blocks.8.attn.proj.weight", "dit_block.blocks.8.attn.proj.bias", "dit_block.blocks.8.mlp.fc1.weight", "dit_block.blocks.8.mlp.fc1.bias", "dit_block.blocks.8.mlp.fc2.weight", "dit_block.blocks.8.mlp.fc2.bias", "dit_block.blocks.8.adaLN_modulation.1.weight", "dit_block.blocks.8.adaLN_modulation.1.bias", "dit_block.blocks.9.attn.qkv.weight", "dit_block.blocks.9.attn.qkv.bias", "dit_block.blocks.9.attn.proj.weight", "dit_block.blocks.9.attn.proj.bias", "dit_block.blocks.9.mlp.fc1.weight", "dit_block.blocks.9.mlp.fc1.bias", "dit_block.blocks.9.mlp.fc2.weight", "dit_block.blocks.9.mlp.fc2.bias", "dit_block.blocks.9.adaLN_modulation.1.weight", "dit_block.blocks.9.adaLN_modulation.1.bias", "dit_block.blocks.10.attn.qkv.weight", "dit_block.blocks.10.attn.qkv.bias", "dit_block.blocks.10.attn.proj.weight", "dit_block.blocks.10.attn.proj.bias", "dit_block.blocks.10.mlp.fc1.weight", "dit_block.blocks.10.mlp.fc1.bias", "dit_block.blocks.10.mlp.fc2.weight", "dit_block.blocks.10.mlp.fc2.bias", "dit_block.blocks.10.adaLN_modulation.1.weight", "dit_block.blocks.10.adaLN_modulation.1.bias", "dit_block.blocks.11.attn.qkv.weight", "dit_block.blocks.11.attn.qkv.bias", "dit_block.blocks.11.attn.proj.weight", "dit_block.blocks.11.attn.proj.bias", "dit_block.blocks.11.mlp.fc1.weight", "dit_block.blocks.11.mlp.fc1.bias", "dit_block.blocks.11.mlp.fc2.weight", "dit_block.blocks.11.mlp.fc2.bias", "dit_block.blocks.11.adaLN_modulation.1.weight", "dit_block.blocks.11.adaLN_modulation.1.bias", "dit_block.final_layer.linear.weight", "dit_block.final_layer.linear.bias", "dit_block.final_layer.adaLN_modulation.1.weight", "dit_block.final_layer.adaLN_modulation.1.bias", "dec.dec.0.conv.weight", "dec.dec.0.conv.bias", "dec.dec.0.norm.weight", "dec.dec.0.norm.bias", "dec.dec.0.norm.running_mean", "dec.dec.0.norm.running_var", "dec.dec.1.conv.weight", "dec.dec.1.conv.bias", "dec.dec.1.norm.weight", "dec.dec.1.norm.bias", "dec.dec.1.norm.running_mean", "dec.dec.1.norm.running_var", "dec.dec.2.conv.weight", "dec.dec.2.conv.bias", "dec.dec.2.norm.weight", "dec.dec.2.norm.bias", "dec.dec.2.norm.running_mean", "dec.dec.2.norm.running_var", "dec.dec.3.conv.weight", "dec.dec.3.conv.bias", "dec.dec.3.norm.weight", "dec.dec.3.norm.bias", "dec.dec.3.norm.running_mean", "dec.dec.3.norm.running_var", "dec.readout.weight", "dec.readout.bias". + Unexpected key(s) in state_dict: "module.enc.enc.0.conv.weight", "module.enc.enc.0.conv.bias", "module.enc.enc.0.norm.weight", "module.enc.enc.0.norm.bias", "module.enc.enc.0.norm.running_mean", "module.enc.enc.0.norm.running_var", "module.enc.enc.0.norm.num_batches_tracked", "module.enc.enc.1.conv.weight", "module.enc.enc.1.conv.bias", "module.enc.enc.1.norm.weight", "module.enc.enc.1.norm.bias", "module.enc.enc.1.norm.running_mean", "module.enc.enc.1.norm.running_var", "module.enc.enc.1.norm.num_batches_tracked", "module.enc.enc.2.conv.weight", "module.enc.enc.2.conv.bias", "module.enc.enc.2.norm.weight", "module.enc.enc.2.norm.bias", "module.enc.enc.2.norm.running_mean", "module.enc.enc.2.norm.running_var", "module.enc.enc.2.norm.num_batches_tracked", "module.enc.enc.3.conv.weight", "module.enc.enc.3.conv.bias", "module.enc.enc.3.norm.weight", "module.enc.enc.3.norm.bias", "module.enc.enc.3.norm.running_mean", "module.enc.enc.3.norm.running_var", "module.enc.enc.3.norm.num_batches_tracked", "module.hid.norm.weight", "module.hid.norm.bias", "module.hid.enc.0.branch1.weight", "module.hid.enc.0.branch1.bias", "module.hid.enc.0.branch2.weight", "module.hid.enc.0.branch2.bias", "module.hid.enc.0.branch3.weight", "module.hid.enc.0.branch3.bias", "module.hid.enc.0.branch4.weight", "module.hid.enc.0.branch4.bias", "module.hid.enc.0.branch5.weight", "module.hid.enc.0.branch5.bias", "module.hid.enc.0.conv.weight", "module.hid.enc.0.conv.bias", "module.hid.enc.0.norm.weight", "module.hid.enc.0.norm.bias", "module.hid.enc.0.norm.running_mean", "module.hid.enc.0.norm.running_var", "module.hid.enc.0.norm.num_batches_tracked", "module.hid.enc.1.branch1.weight", "module.hid.enc.1.branch1.bias", "module.hid.enc.1.branch2.weight", "module.hid.enc.1.branch2.bias", "module.hid.enc.1.branch3.weight", "module.hid.enc.1.branch3.bias", "module.hid.enc.1.branch4.weight", "module.hid.enc.1.branch4.bias", "module.hid.enc.1.branch5.weight", "module.hid.enc.1.branch5.bias", "module.hid.enc.1.conv.weight", "module.hid.enc.1.conv.bias", "module.hid.enc.1.norm.weight", "module.hid.enc.1.norm.bias", "module.hid.enc.1.norm.running_mean", "module.hid.enc.1.norm.running_var", "module.hid.enc.1.norm.num_batches_tracked", "module.hid.enc.2.branch1.weight", "module.hid.enc.2.branch1.bias", "module.hid.enc.2.branch2.weight", "module.hid.enc.2.branch2.bias", "module.hid.enc.2.branch3.weight", "module.hid.enc.2.branch3.bias", "module.hid.enc.2.branch4.weight", "module.hid.enc.2.branch4.bias", "module.hid.enc.2.branch5.weight", "module.hid.enc.2.branch5.bias", "module.hid.enc.2.conv.weight", "module.hid.enc.2.conv.bias", "module.hid.enc.2.norm.weight", "module.hid.enc.2.norm.bias", "module.hid.enc.2.norm.running_mean", "module.hid.enc.2.norm.running_var", "module.hid.enc.2.norm.num_batches_tracked", "module.hid.enc.3.branch1.weight", "module.hid.enc.3.branch1.bias", "module.hid.enc.3.branch2.weight", "module.hid.enc.3.branch2.bias", "module.hid.enc.3.branch3.weight", "module.hid.enc.3.branch3.bias", "module.hid.enc.3.branch4.weight", "module.hid.enc.3.branch4.bias", "module.hid.enc.3.branch5.weight", "module.hid.enc.3.branch5.bias", "module.hid.enc.3.conv.weight", "module.hid.enc.3.conv.bias", "module.hid.enc.3.norm.weight", "module.hid.enc.3.norm.bias", "module.hid.enc.3.norm.running_mean", "module.hid.enc.3.norm.running_var", "module.hid.enc.3.norm.num_batches_tracked", "module.hid.enc.4.branch1.weight", "module.hid.enc.4.branch1.bias", "module.hid.enc.4.branch2.weight", "module.hid.enc.4.branch2.bias", "module.hid.enc.4.branch3.weight", "module.hid.enc.4.branch3.bias", "module.hid.enc.4.branch4.weight", "module.hid.enc.4.branch4.bias", "module.hid.enc.4.branch5.weight", "module.hid.enc.4.branch5.bias", "module.hid.enc.4.conv.weight", "module.hid.enc.4.conv.bias", "module.hid.enc.4.norm.weight", "module.hid.enc.4.norm.bias", "module.hid.enc.4.norm.running_mean", "module.hid.enc.4.norm.running_var", "module.hid.enc.4.norm.num_batches_tracked", "module.hid.enc.5.branch1.weight", "module.hid.enc.5.branch1.bias", "module.hid.enc.5.branch2.weight", "module.hid.enc.5.branch2.bias", "module.hid.enc.5.branch3.weight", "module.hid.enc.5.branch3.bias", "module.hid.enc.5.branch4.weight", "module.hid.enc.5.branch4.bias", "module.hid.enc.5.branch5.weight", "module.hid.enc.5.branch5.bias", "module.hid.enc.5.conv.weight", "module.hid.enc.5.conv.bias", "module.hid.enc.5.norm.weight", "module.hid.enc.5.norm.bias", "module.hid.enc.5.norm.running_mean", "module.hid.enc.5.norm.running_var", "module.hid.enc.5.norm.num_batches_tracked", "module.hid.enc.6.branch1.weight", "module.hid.enc.6.branch1.bias", "module.hid.enc.6.branch2.weight", "module.hid.enc.6.branch2.bias", "module.hid.enc.6.branch3.weight", "module.hid.enc.6.branch3.bias", "module.hid.enc.6.branch4.weight", "module.hid.enc.6.branch4.bias", "module.hid.enc.6.branch5.weight", "module.hid.enc.6.branch5.bias", "module.hid.enc.6.conv.weight", "module.hid.enc.6.conv.bias", "module.hid.enc.6.norm.weight", "module.hid.enc.6.norm.bias", "module.hid.enc.6.norm.running_mean", "module.hid.enc.6.norm.running_var", "module.hid.enc.6.norm.num_batches_tracked", "module.hid.enc.7.branch1.weight", "module.hid.enc.7.branch1.bias", "module.hid.enc.7.branch2.weight", "module.hid.enc.7.branch2.bias", "module.hid.enc.7.branch3.weight", "module.hid.enc.7.branch3.bias", "module.hid.enc.7.branch4.weight", "module.hid.enc.7.branch4.bias", "module.hid.enc.7.branch5.weight", "module.hid.enc.7.branch5.bias", "module.hid.enc.7.conv.weight", "module.hid.enc.7.conv.bias", "module.hid.enc.7.norm.weight", "module.hid.enc.7.norm.bias", "module.hid.enc.7.norm.running_mean", "module.hid.enc.7.norm.running_var", "module.hid.enc.7.norm.num_batches_tracked", "module.hid.dec.0.branch1.weight", "module.hid.dec.0.branch1.bias", "module.hid.dec.0.branch2.weight", "module.hid.dec.0.branch2.bias", "module.hid.dec.0.branch3.weight", "module.hid.dec.0.branch3.bias", "module.hid.dec.0.branch4.weight", "module.hid.dec.0.branch4.bias", "module.hid.dec.0.branch5.weight", "module.hid.dec.0.branch5.bias", "module.hid.dec.0.conv.weight", "module.hid.dec.0.conv.bias", "module.hid.dec.0.norm.weight", "module.hid.dec.0.norm.bias", "module.hid.dec.0.norm.running_mean", "module.hid.dec.0.norm.running_var", "module.hid.dec.0.norm.num_batches_tracked", "module.hid.dec.1.branch1.weight", "module.hid.dec.1.branch1.bias", "module.hid.dec.1.branch2.weight", "module.hid.dec.1.branch2.bias", "module.hid.dec.1.branch3.weight", "module.hid.dec.1.branch3.bias", "module.hid.dec.1.branch4.weight", "module.hid.dec.1.branch4.bias", "module.hid.dec.1.branch5.weight", "module.hid.dec.1.branch5.bias", "module.hid.dec.1.conv.weight", "module.hid.dec.1.conv.bias", "module.hid.dec.1.norm.weight", "module.hid.dec.1.norm.bias", "module.hid.dec.1.norm.running_mean", "module.hid.dec.1.norm.running_var", "module.hid.dec.1.norm.num_batches_tracked", "module.hid.dec.2.branch1.weight", "module.hid.dec.2.branch1.bias", "module.hid.dec.2.branch2.weight", "module.hid.dec.2.branch2.bias", "module.hid.dec.2.branch3.weight", "module.hid.dec.2.branch3.bias", "module.hid.dec.2.branch4.weight", "module.hid.dec.2.branch4.bias", "module.hid.dec.2.branch5.weight", "module.hid.dec.2.branch5.bias", "module.hid.dec.2.conv.weight", "module.hid.dec.2.conv.bias", "module.hid.dec.2.norm.weight", "module.hid.dec.2.norm.bias", "module.hid.dec.2.norm.running_mean", "module.hid.dec.2.norm.running_var", "module.hid.dec.2.norm.num_batches_tracked", "module.hid.dec.3.branch1.weight", "module.hid.dec.3.branch1.bias", "module.hid.dec.3.branch2.weight", "module.hid.dec.3.branch2.bias", "module.hid.dec.3.branch3.weight", "module.hid.dec.3.branch3.bias", "module.hid.dec.3.branch4.weight", "module.hid.dec.3.branch4.bias", "module.hid.dec.3.branch5.weight", "module.hid.dec.3.branch5.bias", "module.hid.dec.3.conv.weight", "module.hid.dec.3.conv.bias", "module.hid.dec.3.norm.weight", "module.hid.dec.3.norm.bias", "module.hid.dec.3.norm.running_mean", "module.hid.dec.3.norm.running_var", "module.hid.dec.3.norm.num_batches_tracked", "module.hid.dec.4.branch1.weight", "module.hid.dec.4.branch1.bias", "module.hid.dec.4.branch2.weight", "module.hid.dec.4.branch2.bias", "module.hid.dec.4.branch3.weight", "module.hid.dec.4.branch3.bias", "module.hid.dec.4.branch4.weight", "module.hid.dec.4.branch4.bias", "module.hid.dec.4.branch5.weight", "module.hid.dec.4.branch5.bias", "module.hid.dec.4.conv.weight", "module.hid.dec.4.conv.bias", "module.hid.dec.4.norm.weight", "module.hid.dec.4.norm.bias", "module.hid.dec.4.norm.running_mean", "module.hid.dec.4.norm.running_var", "module.hid.dec.4.norm.num_batches_tracked", "module.hid.dec.5.branch1.weight", "module.hid.dec.5.branch1.bias", "module.hid.dec.5.branch2.weight", "module.hid.dec.5.branch2.bias", "module.hid.dec.5.branch3.weight", "module.hid.dec.5.branch3.bias", "module.hid.dec.5.branch4.weight", "module.hid.dec.5.branch4.bias", "module.hid.dec.5.branch5.weight", "module.hid.dec.5.branch5.bias", "module.hid.dec.5.conv.weight", "module.hid.dec.5.conv.bias", "module.hid.dec.5.norm.weight", "module.hid.dec.5.norm.bias", "module.hid.dec.5.norm.running_mean", "module.hid.dec.5.norm.running_var", "module.hid.dec.5.norm.num_batches_tracked", "module.hid.dec.6.branch1.weight", "module.hid.dec.6.branch1.bias", "module.hid.dec.6.branch2.weight", "module.hid.dec.6.branch2.bias", "module.hid.dec.6.branch3.weight", "module.hid.dec.6.branch3.bias", "module.hid.dec.6.branch4.weight", "module.hid.dec.6.branch4.bias", "module.hid.dec.6.branch5.weight", "module.hid.dec.6.branch5.bias", "module.hid.dec.6.conv.weight", "module.hid.dec.6.conv.bias", "module.hid.dec.6.norm.weight", "module.hid.dec.6.norm.bias", "module.hid.dec.6.norm.running_mean", "module.hid.dec.6.norm.running_var", "module.hid.dec.6.norm.num_batches_tracked", "module.hid.dec.7.branch1.weight", "module.hid.dec.7.branch1.bias", "module.hid.dec.7.branch2.weight", "module.hid.dec.7.branch2.bias", "module.hid.dec.7.branch3.weight", "module.hid.dec.7.branch3.bias", "module.hid.dec.7.branch4.weight", "module.hid.dec.7.branch4.bias", "module.hid.dec.7.branch5.weight", "module.hid.dec.7.branch5.bias", "module.hid.dec.7.conv.weight", "module.hid.dec.7.conv.bias", "module.hid.dec.7.norm.weight", "module.hid.dec.7.norm.bias", "module.hid.dec.7.norm.running_mean", "module.hid.dec.7.norm.running_var", "module.hid.dec.7.norm.num_batches_tracked", "module.dit_block.pos_embed", "module.dit_block.x_embedder.proj.weight", "module.dit_block.x_embedder.proj.bias", "module.dit_block.t_embedder.mlp.0.weight", "module.dit_block.t_embedder.mlp.0.bias", "module.dit_block.t_embedder.mlp.2.weight", "module.dit_block.t_embedder.mlp.2.bias", "module.dit_block.blocks.0.attn.qkv.weight", "module.dit_block.blocks.0.attn.qkv.bias", "module.dit_block.blocks.0.attn.proj.weight", "module.dit_block.blocks.0.attn.proj.bias", "module.dit_block.blocks.0.mlp.fc1.weight", "module.dit_block.blocks.0.mlp.fc1.bias", "module.dit_block.blocks.0.mlp.fc2.weight", "module.dit_block.blocks.0.mlp.fc2.bias", "module.dit_block.blocks.0.adaLN_modulation.1.weight", "module.dit_block.blocks.0.adaLN_modulation.1.bias", "module.dit_block.blocks.1.attn.qkv.weight", "module.dit_block.blocks.1.attn.qkv.bias", "module.dit_block.blocks.1.attn.proj.weight", "module.dit_block.blocks.1.attn.proj.bias", "module.dit_block.blocks.1.mlp.fc1.weight", "module.dit_block.blocks.1.mlp.fc1.bias", "module.dit_block.blocks.1.mlp.fc2.weight", "module.dit_block.blocks.1.mlp.fc2.bias", "module.dit_block.blocks.1.adaLN_modulation.1.weight", "module.dit_block.blocks.1.adaLN_modulation.1.bias", "module.dit_block.blocks.2.attn.qkv.weight", "module.dit_block.blocks.2.attn.qkv.bias", "module.dit_block.blocks.2.attn.proj.weight", "module.dit_block.blocks.2.attn.proj.bias", "module.dit_block.blocks.2.mlp.fc1.weight", "module.dit_block.blocks.2.mlp.fc1.bias", "module.dit_block.blocks.2.mlp.fc2.weight", "module.dit_block.blocks.2.mlp.fc2.bias", "module.dit_block.blocks.2.adaLN_modulation.1.weight", "module.dit_block.blocks.2.adaLN_modulation.1.bias", "module.dit_block.blocks.3.attn.qkv.weight", "module.dit_block.blocks.3.attn.qkv.bias", "module.dit_block.blocks.3.attn.proj.weight", "module.dit_block.blocks.3.attn.proj.bias", "module.dit_block.blocks.3.mlp.fc1.weight", "module.dit_block.blocks.3.mlp.fc1.bias", "module.dit_block.blocks.3.mlp.fc2.weight", "module.dit_block.blocks.3.mlp.fc2.bias", "module.dit_block.blocks.3.adaLN_modulation.1.weight", "module.dit_block.blocks.3.adaLN_modulation.1.bias", "module.dit_block.blocks.4.attn.qkv.weight", "module.dit_block.blocks.4.attn.qkv.bias", "module.dit_block.blocks.4.attn.proj.weight", "module.dit_block.blocks.4.attn.proj.bias", "module.dit_block.blocks.4.mlp.fc1.weight", "module.dit_block.blocks.4.mlp.fc1.bias", "module.dit_block.blocks.4.mlp.fc2.weight", "module.dit_block.blocks.4.mlp.fc2.bias", "module.dit_block.blocks.4.adaLN_modulation.1.weight", "module.dit_block.blocks.4.adaLN_modulation.1.bias", "module.dit_block.blocks.5.attn.qkv.weight", "module.dit_block.blocks.5.attn.qkv.bias", "module.dit_block.blocks.5.attn.proj.weight", "module.dit_block.blocks.5.attn.proj.bias", "module.dit_block.blocks.5.mlp.fc1.weight", "module.dit_block.blocks.5.mlp.fc1.bias", "module.dit_block.blocks.5.mlp.fc2.weight", "module.dit_block.blocks.5.mlp.fc2.bias", "module.dit_block.blocks.5.adaLN_modulation.1.weight", "module.dit_block.blocks.5.adaLN_modulation.1.bias", "module.dit_block.blocks.6.attn.qkv.weight", "module.dit_block.blocks.6.attn.qkv.bias", "module.dit_block.blocks.6.attn.proj.weight", "module.dit_block.blocks.6.attn.proj.bias", "module.dit_block.blocks.6.mlp.fc1.weight", "module.dit_block.blocks.6.mlp.fc1.bias", "module.dit_block.blocks.6.mlp.fc2.weight", "module.dit_block.blocks.6.mlp.fc2.bias", "module.dit_block.blocks.6.adaLN_modulation.1.weight", "module.dit_block.blocks.6.adaLN_modulation.1.bias", "module.dit_block.blocks.7.attn.qkv.weight", "module.dit_block.blocks.7.attn.qkv.bias", "module.dit_block.blocks.7.attn.proj.weight", "module.dit_block.blocks.7.attn.proj.bias", "module.dit_block.blocks.7.mlp.fc1.weight", "module.dit_block.blocks.7.mlp.fc1.bias", "module.dit_block.blocks.7.mlp.fc2.weight", "module.dit_block.blocks.7.mlp.fc2.bias", "module.dit_block.blocks.7.adaLN_modulation.1.weight", "module.dit_block.blocks.7.adaLN_modulation.1.bias", "module.dit_block.blocks.8.attn.qkv.weight", "module.dit_block.blocks.8.attn.qkv.bias", "module.dit_block.blocks.8.attn.proj.weight", "module.dit_block.blocks.8.attn.proj.bias", "module.dit_block.blocks.8.mlp.fc1.weight", "module.dit_block.blocks.8.mlp.fc1.bias", "module.dit_block.blocks.8.mlp.fc2.weight", "module.dit_block.blocks.8.mlp.fc2.bias", "module.dit_block.blocks.8.adaLN_modulation.1.weight", "module.dit_block.blocks.8.adaLN_modulation.1.bias", "module.dit_block.blocks.9.attn.qkv.weight", "module.dit_block.blocks.9.attn.qkv.bias", "module.dit_block.blocks.9.attn.proj.weight", "module.dit_block.blocks.9.attn.proj.bias", "module.dit_block.blocks.9.mlp.fc1.weight", "module.dit_block.blocks.9.mlp.fc1.bias", "module.dit_block.blocks.9.mlp.fc2.weight", "module.dit_block.blocks.9.mlp.fc2.bias", "module.dit_block.blocks.9.adaLN_modulation.1.weight", "module.dit_block.blocks.9.adaLN_modulation.1.bias", "module.dit_block.blocks.10.attn.qkv.weight", "module.dit_block.blocks.10.attn.qkv.bias", "module.dit_block.blocks.10.attn.proj.weight", "module.dit_block.blocks.10.attn.proj.bias", "module.dit_block.blocks.10.mlp.fc1.weight", "module.dit_block.blocks.10.mlp.fc1.bias", "module.dit_block.blocks.10.mlp.fc2.weight", "module.dit_block.blocks.10.mlp.fc2.bias", "module.dit_block.blocks.10.adaLN_modulation.1.weight", "module.dit_block.blocks.10.adaLN_modulation.1.bias", "module.dit_block.blocks.11.attn.qkv.weight", "module.dit_block.blocks.11.attn.qkv.bias", "module.dit_block.blocks.11.attn.proj.weight", "module.dit_block.blocks.11.attn.proj.bias", "module.dit_block.blocks.11.mlp.fc1.weight", "module.dit_block.blocks.11.mlp.fc1.bias", "module.dit_block.blocks.11.mlp.fc2.weight", "module.dit_block.blocks.11.mlp.fc2.bias", "module.dit_block.blocks.11.adaLN_modulation.1.weight", "module.dit_block.blocks.11.adaLN_modulation.1.bias", "module.dit_block.final_layer.linear.weight", "module.dit_block.final_layer.linear.bias", "module.dit_block.final_layer.adaLN_modulation.1.weight", "module.dit_block.final_layer.adaLN_modulation.1.bias", "module.dec.dec.0.conv.weight", "module.dec.dec.0.conv.bias", "module.dec.dec.0.norm.weight", "module.dec.dec.0.norm.bias", "module.dec.dec.0.norm.running_mean", "module.dec.dec.0.norm.running_var", "module.dec.dec.0.norm.num_batches_tracked", "module.dec.dec.1.conv.weight", "module.dec.dec.1.conv.bias", "module.dec.dec.1.norm.weight", "module.dec.dec.1.norm.bias", "module.dec.dec.1.norm.running_mean", "module.dec.dec.1.norm.running_var", "module.dec.dec.1.norm.num_batches_tracked", "module.dec.dec.2.conv.weight", "module.dec.dec.2.conv.bias", "module.dec.dec.2.norm.weight", "module.dec.dec.2.norm.bias", "module.dec.dec.2.norm.running_mean", "module.dec.dec.2.norm.running_var", "module.dec.dec.2.norm.num_batches_tracked", "module.dec.dec.3.conv.weight", "module.dec.dec.3.conv.bias", "module.dec.dec.3.norm.weight", "module.dec.dec.3.norm.bias", "module.dec.dec.3.norm.running_mean", "module.dec.dec.3.norm.running_var", "module.dec.dec.3.norm.num_batches_tracked", "module.dec.readout.weight", "module.dec.readout.bias". +2025-02-24 16:39:06,156 加载模型失败:name 'OrderedDict' is not defined +2025-02-24 16:39:06,157 推理过程出错:模型加载错误:name 'OrderedDict' is not defined +2025-02-24 16:43:43,839 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth +2025-02-24 16:43:43,843 开始推理... +2025-02-24 16:44:55,421 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth +2025-02-24 16:44:55,425 开始推理... +2025-02-24 16:44:56,032 推理过程出错:need at least one array to concatenate +2025-02-24 16:45:35,939 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth +2025-02-24 16:45:35,948 开始推理... +2025-02-24 16:45:36,868 推理过程出错:need at least one array to concatenate +2025-02-24 16:46:21,644 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Dit_exp2_20250224_best_model.pth +2025-02-24 16:46:21,648 开始推理... +2025-02-24 16:56:24,368 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 16:56:24,372 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 16:56:24,372 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 16:56:24,372 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 16:56:24,882 输入数据范围:[-2.09, 2.09] +2025-02-24 16:56:25,407 输出数据范围:[-1.24, 1.57] +2025-02-24 16:56:25,407 推理完成! +2025-02-24 17:00:07,689 加载模型失败:Error(s) in loading state_dict for Dit: + size mismatch for dec.readout.weight: copying a param with shape torch.Size([2, 64, 1, 1]) from checkpoint, the shape in current model is torch.Size([2, 32, 1, 1]). +2025-02-24 17:00:07,689 推理过程出错:模型加载错误:Error(s) in loading state_dict for Dit: + size mismatch for dec.readout.weight: copying a param with shape torch.Size([2, 64, 1, 1]) from checkpoint, the shape in current model is torch.Size([2, 32, 1, 1]). +2025-02-24 17:06:16,993 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-24 17:06:16,998 开始推理... +2025-02-24 17:14:06,318 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 17:14:06,365 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:14:06,366 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:14:06,366 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:14:07,220 输入数据范围:[-2.09, 2.09] +2025-02-24 17:14:07,734 输出数据范围:[-1.92, 2.03] +2025-02-24 17:14:07,735 推理完成! +2025-02-24 17:30:52,791 缺失的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-24 17:30:52,791 意外的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-24 17:30:52,792 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-24 17:30:52,802 开始推理... +2025-02-24 17:39:15,084 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 17:39:15,119 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:39:15,120 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:39:15,120 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 17:39:15,577 输入数据范围:[-2.09, 2.09] +2025-02-24 17:39:16,031 输出数据范围:[-2.91, 2.99] +2025-02-24 17:39:16,032 推理完成! diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250224_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250224_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..93984d35e6ac6488b9ae09343530e36e1fcd9a55 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250224_training_log.log @@ -0,0 +1,203 @@ +2025-02-24 15:59:52,234 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-24 15:59:52,307 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-24 15:59:52,589 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-24 15:59:52,594 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-24 15:59:52,604 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-24 15:59:52,613 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-24 15:59:52,620 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-24 15:59:52,628 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-24 16:00:49,500 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-24 16:00:49,520 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-24 16:00:49,539 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-24 16:00:49,572 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-24 16:00:49,649 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-24 16:00:49,658 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-24 16:00:49,663 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-24 16:00:49,671 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-24 16:01:53,824 Epoch 1/2000 +2025-02-24 16:03:36,980 Current Learning Rate: 0.0099993832 +2025-02-24 16:03:37,195 Train Loss: 0.0205501, Val Loss: 0.0178727 +2025-02-24 16:03:37,196 Epoch 2/2000 +2025-02-24 16:05:17,744 Current Learning Rate: 0.0099975328 +2025-02-24 16:05:17,745 Train Loss: 0.0185941, Val Loss: 0.0186720 +2025-02-24 16:05:17,746 Epoch 3/2000 +2025-02-24 16:06:59,162 Current Learning Rate: 0.0099944494 +2025-02-24 16:06:59,363 Train Loss: 0.0183035, Val Loss: 0.0177063 +2025-02-24 16:06:59,364 Epoch 4/2000 +2025-02-24 16:08:40,092 Current Learning Rate: 0.0099901336 +2025-02-24 16:08:40,093 Train Loss: 0.0179720, Val Loss: 0.0196525 +2025-02-24 16:08:40,093 Epoch 5/2000 +2025-02-24 16:10:20,539 Current Learning Rate: 0.0099845867 +2025-02-24 16:10:20,743 Train Loss: 0.0177616, Val Loss: 0.0173474 +2025-02-24 16:10:20,743 Epoch 6/2000 +2025-02-24 16:12:00,918 Current Learning Rate: 0.0099778098 +2025-02-24 16:12:00,919 Train Loss: 0.0178180, Val Loss: 0.0178478 +2025-02-24 16:12:00,919 Epoch 7/2000 +2025-02-24 16:13:41,202 Current Learning Rate: 0.0099698048 +2025-02-24 16:13:41,383 Train Loss: 0.0176428, Val Loss: 0.0172523 +2025-02-24 16:13:41,384 Epoch 8/2000 +2025-02-24 16:15:21,924 Current Learning Rate: 0.0099605735 +2025-02-24 16:15:21,925 Train Loss: 0.0178306, Val Loss: 0.0172721 +2025-02-24 16:15:21,925 Epoch 9/2000 +2025-02-24 16:17:03,628 Current Learning Rate: 0.0099501183 +2025-02-24 16:17:04,509 Train Loss: 0.0176807, Val Loss: 0.0171860 +2025-02-24 16:17:04,510 Epoch 10/2000 +2025-02-24 16:18:44,576 Current Learning Rate: 0.0099384417 +2025-02-24 16:18:44,761 Train Loss: 0.0175302, Val Loss: 0.0170329 +2025-02-24 16:18:44,761 Epoch 11/2000 +2025-02-24 16:20:25,594 Current Learning Rate: 0.0099255466 +2025-02-24 16:20:25,595 Train Loss: 0.0174591, Val Loss: 0.0182787 +2025-02-24 16:20:25,595 Epoch 12/2000 +2025-02-24 16:22:06,545 Current Learning Rate: 0.0099114363 +2025-02-24 16:22:06,725 Train Loss: 0.0174949, Val Loss: 0.0169655 +2025-02-24 16:22:06,725 Epoch 13/2000 +2025-02-24 16:23:47,915 Current Learning Rate: 0.0098961141 +2025-02-24 16:23:48,089 Train Loss: 0.0173617, Val Loss: 0.0169154 +2025-02-24 16:23:48,089 Epoch 14/2000 +2025-02-24 16:25:28,928 Current Learning Rate: 0.0098795838 +2025-02-24 16:25:29,135 Train Loss: 0.0173108, Val Loss: 0.0168740 +2025-02-24 16:25:29,135 Epoch 15/2000 +2025-02-24 16:27:09,716 Current Learning Rate: 0.0098618496 +2025-02-24 16:27:09,928 Train Loss: 0.0172730, Val Loss: 0.0167979 +2025-02-24 16:27:09,928 Epoch 16/2000 +2025-02-24 16:28:50,880 Current Learning Rate: 0.0098429158 +2025-02-24 16:28:51,061 Train Loss: 0.0172399, Val Loss: 0.0167721 +2025-02-24 16:28:51,061 Epoch 17/2000 +2025-02-24 16:30:31,696 Current Learning Rate: 0.0098227871 +2025-02-24 16:30:32,033 Train Loss: 0.0172168, Val Loss: 0.0167717 +2025-02-24 16:30:32,033 Epoch 18/2000 +2025-02-24 16:32:11,515 Current Learning Rate: 0.0098014684 +2025-02-24 16:32:11,739 Train Loss: 0.0171964, Val Loss: 0.0167454 +2025-02-24 16:32:11,739 Epoch 19/2000 +2025-02-24 16:33:51,667 Current Learning Rate: 0.0097789651 +2025-02-24 16:33:51,668 Train Loss: 0.0172858, Val Loss: 0.0172174 +2025-02-24 16:33:51,668 Epoch 20/2000 +2025-02-24 16:35:31,702 Current Learning Rate: 0.0097552826 +2025-02-24 16:35:31,703 Train Loss: 0.0172961, Val Loss: 0.0169307 +2025-02-24 16:35:31,703 Epoch 21/2000 +2025-02-24 16:37:13,113 Current Learning Rate: 0.0097304268 +2025-02-24 16:37:13,113 Train Loss: 0.0172024, Val Loss: 0.0169162 +2025-02-24 16:37:13,113 Epoch 22/2000 +2025-02-24 16:38:53,792 Current Learning Rate: 0.0097044038 +2025-02-24 16:38:53,793 Train Loss: 0.0171564, Val Loss: 0.0167722 +2025-02-24 16:38:53,793 Epoch 23/2000 +2025-02-24 16:40:34,371 Current Learning Rate: 0.0096772202 +2025-02-24 16:40:34,371 Train Loss: 0.0171704, Val Loss: 0.0172323 +2025-02-24 16:40:34,372 Epoch 24/2000 +2025-02-24 16:42:15,190 Current Learning Rate: 0.0096488824 +2025-02-24 16:42:15,191 Train Loss: 0.0171777, Val Loss: 0.0170430 +2025-02-24 16:42:15,191 Epoch 25/2000 +2025-02-24 16:43:56,371 Current Learning Rate: 0.0096193977 +2025-02-24 16:43:56,371 Train Loss: 0.0171256, Val Loss: 0.0168376 +2025-02-24 16:43:56,372 Epoch 26/2000 +2025-02-24 16:45:36,570 Current Learning Rate: 0.0095887731 +2025-02-24 16:45:36,762 Train Loss: 0.0170942, Val Loss: 0.0167428 +2025-02-24 16:45:36,762 Epoch 27/2000 +2025-02-24 16:47:17,039 Current Learning Rate: 0.0095570164 +2025-02-24 16:47:17,040 Train Loss: 0.0173439, Val Loss: 0.0168359 +2025-02-24 16:47:17,040 Epoch 28/2000 +2025-02-24 16:48:58,116 Current Learning Rate: 0.0095241353 +2025-02-24 16:48:58,117 Train Loss: 0.0172256, Val Loss: 0.0167656 +2025-02-24 16:48:58,117 Epoch 29/2000 +2025-02-24 16:50:38,923 Current Learning Rate: 0.0094901379 +2025-02-24 16:50:39,092 Train Loss: 0.0171574, Val Loss: 0.0167339 +2025-02-24 16:50:39,092 Epoch 30/2000 +2025-02-24 16:52:18,735 Current Learning Rate: 0.0094550326 +2025-02-24 16:52:18,924 Train Loss: 0.0171203, Val Loss: 0.0166975 +2025-02-24 16:52:18,924 Epoch 31/2000 +2025-02-24 16:53:59,759 Current Learning Rate: 0.0094188282 +2025-02-24 16:53:59,934 Train Loss: 0.0170932, Val Loss: 0.0166818 +2025-02-24 16:53:59,934 Epoch 32/2000 +2025-02-24 16:55:39,825 Current Learning Rate: 0.0093815334 +2025-02-24 16:55:40,001 Train Loss: 0.0170734, Val Loss: 0.0166750 +2025-02-24 16:55:40,002 Epoch 33/2000 +2025-02-24 16:57:21,249 Current Learning Rate: 0.0093431576 +2025-02-24 16:57:21,438 Train Loss: 0.0170562, Val Loss: 0.0166633 +2025-02-24 16:57:21,439 Epoch 34/2000 +2025-02-24 16:59:02,226 Current Learning Rate: 0.0093037101 +2025-02-24 16:59:03,240 Train Loss: 0.0170409, Val Loss: 0.0166528 +2025-02-24 16:59:03,240 Epoch 35/2000 +2025-02-24 17:00:43,592 Current Learning Rate: 0.0092632008 +2025-02-24 17:00:43,592 Train Loss: 0.0170294, Val Loss: 0.0166577 +2025-02-24 17:00:43,592 Epoch 36/2000 +2025-02-24 17:02:23,643 Current Learning Rate: 0.0092216396 +2025-02-24 17:02:23,645 Train Loss: 0.0170247, Val Loss: 0.0167330 +2025-02-24 17:02:23,645 Epoch 37/2000 +2025-02-24 17:04:02,786 Current Learning Rate: 0.0091790368 +2025-02-24 17:04:02,787 Train Loss: 0.0170205, Val Loss: 0.0167552 +2025-02-24 17:04:02,789 Epoch 38/2000 +2025-02-24 17:05:41,890 Current Learning Rate: 0.0091354029 +2025-02-24 17:05:41,891 Train Loss: 0.0170234, Val Loss: 0.0167703 +2025-02-24 17:05:41,891 Epoch 39/2000 +2025-02-24 17:07:22,468 Current Learning Rate: 0.0090907486 +2025-02-24 17:07:22,672 Train Loss: 0.0170941, Val Loss: 0.0166221 +2025-02-24 17:07:22,672 Epoch 40/2000 +2025-02-24 17:09:02,546 Current Learning Rate: 0.0090450850 +2025-02-24 17:09:03,120 Train Loss: 0.0170301, Val Loss: 0.0166156 +2025-02-24 17:09:03,120 Epoch 41/2000 +2025-02-24 17:10:42,711 Current Learning Rate: 0.0089984233 +2025-02-24 17:10:42,711 Train Loss: 0.0170039, Val Loss: 0.0166200 +2025-02-24 17:10:42,711 Epoch 42/2000 +2025-02-24 17:12:22,406 Current Learning Rate: 0.0089507751 +2025-02-24 17:12:22,407 Train Loss: 0.0169875, Val Loss: 0.0166302 +2025-02-24 17:12:22,407 Epoch 43/2000 +2025-02-24 17:14:02,265 Current Learning Rate: 0.0089021520 +2025-02-24 17:14:03,161 Train Loss: 0.0169750, Val Loss: 0.0166015 +2025-02-24 17:14:03,161 Epoch 44/2000 +2025-02-24 17:15:42,695 Current Learning Rate: 0.0088525662 +2025-02-24 17:15:42,879 Train Loss: 0.0169653, Val Loss: 0.0165992 +2025-02-24 17:15:42,879 Epoch 45/2000 +2025-02-24 17:17:21,662 Current Learning Rate: 0.0088020298 +2025-02-24 17:17:21,845 Train Loss: 0.0169545, Val Loss: 0.0165674 +2025-02-24 17:17:21,846 Epoch 46/2000 +2025-02-24 17:19:01,050 Current Learning Rate: 0.0087505553 +2025-02-24 17:19:01,276 Train Loss: 0.0169450, Val Loss: 0.0165440 +2025-02-24 17:19:01,276 Epoch 47/2000 +2025-02-24 17:20:41,114 Current Learning Rate: 0.0086981555 +2025-02-24 17:20:41,302 Train Loss: 0.0169367, Val Loss: 0.0165277 +2025-02-24 17:20:41,302 Epoch 48/2000 +2025-02-24 17:22:21,190 Current Learning Rate: 0.0086448431 +2025-02-24 17:22:21,191 Train Loss: 0.0169311, Val Loss: 0.0165367 +2025-02-24 17:22:21,191 Epoch 49/2000 +2025-02-24 17:24:00,955 Current Learning Rate: 0.0085906315 +2025-02-24 17:24:01,166 Train Loss: 0.0169370, Val Loss: 0.0165190 +2025-02-24 17:24:01,166 Epoch 50/2000 +2025-02-24 17:25:39,946 Current Learning Rate: 0.0085355339 +2025-02-24 17:25:40,141 Train Loss: 0.0169250, Val Loss: 0.0165073 +2025-02-24 17:25:40,142 Epoch 51/2000 +2025-02-24 17:27:19,878 Current Learning Rate: 0.0084795640 +2025-02-24 17:27:20,055 Train Loss: 0.0169173, Val Loss: 0.0164924 +2025-02-24 17:27:20,055 Epoch 52/2000 +2025-02-24 17:28:59,491 Current Learning Rate: 0.0084227355 +2025-02-24 17:28:59,701 Train Loss: 0.0169106, Val Loss: 0.0164760 +2025-02-24 17:28:59,701 Epoch 53/2000 +2025-02-24 17:30:39,601 Current Learning Rate: 0.0083650626 +2025-02-24 17:30:39,601 Train Loss: 0.0169291, Val Loss: 0.0164976 +2025-02-24 17:30:39,602 Epoch 54/2000 +2025-02-24 17:32:19,578 Current Learning Rate: 0.0083065593 +2025-02-24 17:32:19,579 Train Loss: 0.0169514, Val Loss: 0.0165282 +2025-02-24 17:32:19,579 Epoch 55/2000 +2025-02-24 17:33:58,683 Current Learning Rate: 0.0082472402 +2025-02-24 17:33:58,683 Train Loss: 0.0169498, Val Loss: 0.0165622 +2025-02-24 17:33:58,683 Epoch 56/2000 +2025-02-24 17:35:39,466 Current Learning Rate: 0.0081871199 +2025-02-24 17:35:39,467 Train Loss: 0.0169412, Val Loss: 0.0164797 +2025-02-24 17:35:39,467 Epoch 57/2000 +2025-02-24 17:37:21,256 Current Learning Rate: 0.0081262133 +2025-02-24 17:37:21,257 Train Loss: 0.0169189, Val Loss: 0.0164849 +2025-02-24 17:37:21,257 Epoch 58/2000 +2025-02-24 17:39:02,492 Current Learning Rate: 0.0080645353 +2025-02-24 17:39:02,493 Train Loss: 0.0169099, Val Loss: 0.0164891 +2025-02-24 17:39:02,493 Epoch 59/2000 +2025-02-24 17:40:43,849 Current Learning Rate: 0.0080021011 +2025-02-24 17:40:43,849 Train Loss: 0.0169001, Val Loss: 0.0164850 +2025-02-24 17:40:43,849 Epoch 60/2000 +2025-02-24 17:42:25,297 Current Learning Rate: 0.0079389263 +2025-02-24 17:42:25,297 Train Loss: 0.0168920, Val Loss: 0.0164776 +2025-02-24 17:42:25,298 Epoch 61/2000 +2025-02-24 17:44:07,114 Current Learning Rate: 0.0078750263 +2025-02-24 17:44:07,327 Train Loss: 0.0168846, Val Loss: 0.0164713 +2025-02-24 17:44:07,327 Epoch 62/2000 +2025-02-24 17:45:48,869 Current Learning Rate: 0.0078104169 +2025-02-24 17:45:48,869 Train Loss: 0.0168775, Val Loss: 0.0164723 +2025-02-24 17:45:48,869 Epoch 63/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250316_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250316_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..9c28eff63ea1f8de995b3383462c6ad837bca5e8 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Dit_exp2_20250316_training_log.log @@ -0,0 +1,6005 @@ +2025-03-16 16:41:47,460 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-16 16:41:47,474 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-16 16:41:47,483 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-16 16:41:47,488 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-16 16:42:43,764 Epoch 1/2000 +2025-03-16 16:45:28,555 Current Learning Rate: 0.0009999383 +2025-03-16 16:45:28,674 Train Loss: 0.0076437, Val Loss: 0.0033789 +2025-03-16 16:45:28,674 Epoch 2/2000 +2025-03-16 16:48:12,265 Current Learning Rate: 0.0009997533 +2025-03-16 16:48:12,385 Train Loss: 0.0029846, Val Loss: 0.0027884 +2025-03-16 16:48:12,385 Epoch 3/2000 +2025-03-16 16:50:56,339 Current Learning Rate: 0.0009994449 +2025-03-16 16:50:56,482 Train Loss: 0.0025382, Val Loss: 0.0024240 +2025-03-16 16:50:56,482 Epoch 4/2000 +2025-03-16 16:53:41,374 Current Learning Rate: 0.0009990134 +2025-03-16 16:53:41,499 Train Loss: 0.0022781, Val Loss: 0.0021662 +2025-03-16 16:53:41,499 Epoch 5/2000 +2025-03-16 16:56:25,739 Current Learning Rate: 0.0009984587 +2025-03-16 16:56:25,887 Train Loss: 0.0020817, Val Loss: 0.0020263 +2025-03-16 16:56:25,888 Epoch 6/2000 +2025-03-16 16:59:10,414 Current Learning Rate: 0.0009977810 +2025-03-16 16:59:10,548 Train Loss: 0.0019107, Val Loss: 0.0018921 +2025-03-16 16:59:10,548 Epoch 7/2000 +2025-03-16 17:01:55,374 Current Learning Rate: 0.0009969805 +2025-03-16 17:01:55,496 Train Loss: 0.0017703, Val Loss: 0.0018020 +2025-03-16 17:01:55,496 Epoch 8/2000 +2025-03-16 17:04:40,772 Current Learning Rate: 0.0009960574 +2025-03-16 17:04:40,897 Train Loss: 0.0016690, Val Loss: 0.0017583 +2025-03-16 17:04:40,897 Epoch 9/2000 +2025-03-16 17:07:25,610 Current Learning Rate: 0.0009950118 +2025-03-16 17:07:25,742 Train Loss: 0.0015776, Val Loss: 0.0016162 +2025-03-16 17:07:25,742 Epoch 10/2000 +2025-03-16 17:10:10,581 Current Learning Rate: 0.0009938442 +2025-03-16 17:10:10,745 Train Loss: 0.0014918, Val Loss: 0.0015349 +2025-03-16 17:10:10,745 Epoch 11/2000 +2025-03-16 17:12:56,377 Current Learning Rate: 0.0009925547 +2025-03-16 17:12:56,494 Train Loss: 0.0013931, Val Loss: 0.0014803 +2025-03-16 17:12:56,494 Epoch 12/2000 +2025-03-16 17:15:40,005 Current Learning Rate: 0.0009911436 +2025-03-16 17:15:40,118 Train Loss: 0.0013188, Val Loss: 0.0014167 +2025-03-16 17:15:40,119 Epoch 13/2000 +2025-03-16 17:18:24,413 Current Learning Rate: 0.0009896114 +2025-03-16 17:18:24,525 Train Loss: 0.0012686, Val Loss: 0.0013516 +2025-03-16 17:18:24,525 Epoch 14/2000 +2025-03-16 17:21:09,108 Current Learning Rate: 0.0009879584 +2025-03-16 17:21:09,227 Train Loss: 0.0012104, Val Loss: 0.0012796 +2025-03-16 17:21:09,227 Epoch 15/2000 +2025-03-16 17:23:53,541 Current Learning Rate: 0.0009861850 +2025-03-16 17:23:53,672 Train Loss: 0.0011425, Val Loss: 0.0012473 +2025-03-16 17:23:53,673 Epoch 16/2000 +2025-03-16 17:26:37,780 Current Learning Rate: 0.0009842916 +2025-03-16 17:26:37,894 Train Loss: 0.0010910, Val Loss: 0.0012308 +2025-03-16 17:26:37,894 Epoch 17/2000 +2025-03-16 17:29:22,865 Current Learning Rate: 0.0009822787 +2025-03-16 17:29:22,989 Train Loss: 0.0010624, Val Loss: 0.0012048 +2025-03-16 17:29:22,990 Epoch 18/2000 +2025-03-16 17:32:08,844 Current Learning Rate: 0.0009801468 +2025-03-16 17:32:08,958 Train Loss: 0.0010203, Val Loss: 0.0011171 +2025-03-16 17:32:08,958 Epoch 19/2000 +2025-03-16 17:34:53,082 Current Learning Rate: 0.0009778965 +2025-03-16 17:34:53,194 Train Loss: 0.0009816, Val Loss: 0.0010725 +2025-03-16 17:34:53,194 Epoch 20/2000 +2025-03-16 17:37:37,777 Current Learning Rate: 0.0009755283 +2025-03-16 17:37:37,887 Train Loss: 0.0009425, Val Loss: 0.0010483 +2025-03-16 17:37:37,887 Epoch 21/2000 +2025-03-16 17:40:23,051 Current Learning Rate: 0.0009730427 +2025-03-16 17:40:23,197 Train Loss: 0.0009106, Val Loss: 0.0010278 +2025-03-16 17:40:23,197 Epoch 22/2000 +2025-03-16 17:43:07,566 Current Learning Rate: 0.0009704404 +2025-03-16 17:43:07,566 Train Loss: 0.0008813, Val Loss: 0.0010279 +2025-03-16 17:43:07,566 Epoch 23/2000 +2025-03-16 17:45:52,117 Current Learning Rate: 0.0009677220 +2025-03-16 17:45:52,118 Train Loss: 0.0008598, Val Loss: 0.0010389 +2025-03-16 17:45:52,118 Epoch 24/2000 +2025-03-16 17:48:36,927 Current Learning Rate: 0.0009648882 +2025-03-16 17:48:37,043 Train Loss: 0.0008386, Val Loss: 0.0009995 +2025-03-16 17:48:37,043 Epoch 25/2000 +2025-03-16 17:51:21,601 Current Learning Rate: 0.0009619398 +2025-03-16 17:51:21,731 Train Loss: 0.0008016, Val Loss: 0.0009644 +2025-03-16 17:51:21,731 Epoch 26/2000 +2025-03-16 17:54:06,689 Current Learning Rate: 0.0009588773 +2025-03-16 17:54:06,802 Train Loss: 0.0007644, Val Loss: 0.0009166 +2025-03-16 17:54:06,802 Epoch 27/2000 +2025-03-16 17:56:51,293 Current Learning Rate: 0.0009557016 +2025-03-16 17:56:51,422 Train Loss: 0.0007359, Val Loss: 0.0008825 +2025-03-16 17:56:51,422 Epoch 28/2000 +2025-03-16 17:59:35,925 Current Learning Rate: 0.0009524135 +2025-03-16 17:59:36,042 Train Loss: 0.0007161, Val Loss: 0.0008483 +2025-03-16 17:59:36,042 Epoch 29/2000 +2025-03-16 18:02:21,267 Current Learning Rate: 0.0009490138 +2025-03-16 18:02:21,392 Train Loss: 0.0006991, Val Loss: 0.0008347 +2025-03-16 18:02:21,392 Epoch 30/2000 +2025-03-16 18:05:06,274 Current Learning Rate: 0.0009455033 +2025-03-16 18:05:06,397 Train Loss: 0.0006896, Val Loss: 0.0008288 +2025-03-16 18:05:06,397 Epoch 31/2000 +2025-03-16 18:07:51,112 Current Learning Rate: 0.0009418828 +2025-03-16 18:07:51,224 Train Loss: 0.0006922, Val Loss: 0.0008164 +2025-03-16 18:07:51,224 Epoch 32/2000 +2025-03-16 18:10:36,372 Current Learning Rate: 0.0009381533 +2025-03-16 18:10:36,490 Train Loss: 0.0006707, Val Loss: 0.0007826 +2025-03-16 18:10:36,490 Epoch 33/2000 +2025-03-16 18:13:21,245 Current Learning Rate: 0.0009343158 +2025-03-16 18:13:21,356 Train Loss: 0.0006389, Val Loss: 0.0007613 +2025-03-16 18:13:21,356 Epoch 34/2000 +2025-03-16 18:16:06,785 Current Learning Rate: 0.0009303710 +2025-03-16 18:16:06,786 Train Loss: 0.0006185, Val Loss: 0.0007644 +2025-03-16 18:16:06,786 Epoch 35/2000 +2025-03-16 18:18:52,167 Current Learning Rate: 0.0009263201 +2025-03-16 18:18:52,168 Train Loss: 0.0006091, Val Loss: 0.0007693 +2025-03-16 18:18:52,168 Epoch 36/2000 +2025-03-16 18:21:36,819 Current Learning Rate: 0.0009221640 +2025-03-16 18:21:36,948 Train Loss: 0.0006103, Val Loss: 0.0007454 +2025-03-16 18:21:36,948 Epoch 37/2000 +2025-03-16 18:24:21,696 Current Learning Rate: 0.0009179037 +2025-03-16 18:24:21,837 Train Loss: 0.0006057, Val Loss: 0.0007344 +2025-03-16 18:24:21,837 Epoch 38/2000 +2025-03-16 18:27:06,548 Current Learning Rate: 0.0009135403 +2025-03-16 18:27:06,675 Train Loss: 0.0005981, Val Loss: 0.0007109 +2025-03-16 18:27:06,675 Epoch 39/2000 +2025-03-16 18:29:51,732 Current Learning Rate: 0.0009090749 +2025-03-16 18:29:51,900 Train Loss: 0.0005875, Val Loss: 0.0007091 +2025-03-16 18:29:51,900 Epoch 40/2000 +2025-03-16 18:32:35,932 Current Learning Rate: 0.0009045085 +2025-03-16 18:32:36,055 Train Loss: 0.0005786, Val Loss: 0.0006940 +2025-03-16 18:32:36,055 Epoch 41/2000 +2025-03-16 18:35:22,118 Current Learning Rate: 0.0008998423 +2025-03-16 18:35:22,264 Train Loss: 0.0005581, Val Loss: 0.0006662 +2025-03-16 18:35:22,264 Epoch 42/2000 +2025-03-16 18:38:07,129 Current Learning Rate: 0.0008950775 +2025-03-16 18:38:07,253 Train Loss: 0.0005466, Val Loss: 0.0006571 +2025-03-16 18:38:07,253 Epoch 43/2000 +2025-03-16 18:40:51,438 Current Learning Rate: 0.0008902152 +2025-03-16 18:40:51,438 Train Loss: 0.0005471, Val Loss: 0.0006572 +2025-03-16 18:40:51,438 Epoch 44/2000 +2025-03-16 18:43:37,330 Current Learning Rate: 0.0008852566 +2025-03-16 18:43:37,452 Train Loss: 0.0005266, Val Loss: 0.0006254 +2025-03-16 18:43:37,452 Epoch 45/2000 +2025-03-16 18:46:21,310 Current Learning Rate: 0.0008802030 +2025-03-16 18:46:21,313 Train Loss: 0.0005168, Val Loss: 0.0006260 +2025-03-16 18:46:21,313 Epoch 46/2000 +2025-03-16 18:49:07,050 Current Learning Rate: 0.0008750555 +2025-03-16 18:49:07,180 Train Loss: 0.0005123, Val Loss: 0.0006205 +2025-03-16 18:49:07,180 Epoch 47/2000 +2025-03-16 18:51:51,596 Current Learning Rate: 0.0008698155 +2025-03-16 18:51:51,706 Train Loss: 0.0005029, Val Loss: 0.0006096 +2025-03-16 18:51:51,706 Epoch 48/2000 +2025-03-16 18:54:37,235 Current Learning Rate: 0.0008644843 +2025-03-16 18:54:37,381 Train Loss: 0.0004930, Val Loss: 0.0006035 +2025-03-16 18:54:37,381 Epoch 49/2000 +2025-03-16 18:57:22,234 Current Learning Rate: 0.0008590631 +2025-03-16 18:57:22,399 Train Loss: 0.0004809, Val Loss: 0.0005860 +2025-03-16 18:57:22,399 Epoch 50/2000 +2025-03-16 19:00:07,272 Current Learning Rate: 0.0008535534 +2025-03-16 19:00:07,401 Train Loss: 0.0004742, Val Loss: 0.0005845 +2025-03-16 19:00:07,401 Epoch 51/2000 +2025-03-16 19:02:52,586 Current Learning Rate: 0.0008479564 +2025-03-16 19:02:52,753 Train Loss: 0.0004668, Val Loss: 0.0005818 +2025-03-16 19:02:52,754 Epoch 52/2000 +2025-03-16 19:05:37,917 Current Learning Rate: 0.0008422736 +2025-03-16 19:05:38,070 Train Loss: 0.0004604, Val Loss: 0.0005751 +2025-03-16 19:05:38,071 Epoch 53/2000 +2025-03-16 19:08:23,200 Current Learning Rate: 0.0008365063 +2025-03-16 19:08:23,353 Train Loss: 0.0004519, Val Loss: 0.0005651 +2025-03-16 19:08:23,353 Epoch 54/2000 +2025-03-16 19:11:07,801 Current Learning Rate: 0.0008306559 +2025-03-16 19:11:07,936 Train Loss: 0.0004458, Val Loss: 0.0005500 +2025-03-16 19:11:07,936 Epoch 55/2000 +2025-03-16 19:13:53,167 Current Learning Rate: 0.0008247240 +2025-03-16 19:13:53,298 Train Loss: 0.0004404, Val Loss: 0.0005463 +2025-03-16 19:13:53,299 Epoch 56/2000 +2025-03-16 19:16:38,139 Current Learning Rate: 0.0008187120 +2025-03-16 19:16:38,280 Train Loss: 0.0004342, Val Loss: 0.0005398 +2025-03-16 19:16:38,281 Epoch 57/2000 +2025-03-16 19:19:23,372 Current Learning Rate: 0.0008126213 +2025-03-16 19:19:23,499 Train Loss: 0.0004330, Val Loss: 0.0005356 +2025-03-16 19:19:23,499 Epoch 58/2000 +2025-03-16 19:22:08,067 Current Learning Rate: 0.0008064535 +2025-03-16 19:22:08,254 Train Loss: 0.0004332, Val Loss: 0.0005343 +2025-03-16 19:22:08,255 Epoch 59/2000 +2025-03-16 19:24:53,965 Current Learning Rate: 0.0008002101 +2025-03-16 19:24:54,102 Train Loss: 0.0004316, Val Loss: 0.0005318 +2025-03-16 19:24:54,103 Epoch 60/2000 +2025-03-16 19:27:39,048 Current Learning Rate: 0.0007938926 +2025-03-16 19:27:39,209 Train Loss: 0.0004313, Val Loss: 0.0005308 +2025-03-16 19:27:39,209 Epoch 61/2000 +2025-03-16 19:30:23,747 Current Learning Rate: 0.0007875026 +2025-03-16 19:30:23,867 Train Loss: 0.0004254, Val Loss: 0.0005215 +2025-03-16 19:30:23,867 Epoch 62/2000 +2025-03-16 19:33:08,452 Current Learning Rate: 0.0007810417 +2025-03-16 19:33:08,566 Train Loss: 0.0004154, Val Loss: 0.0005096 +2025-03-16 19:33:08,566 Epoch 63/2000 +2025-03-16 19:35:53,070 Current Learning Rate: 0.0007745114 +2025-03-16 19:35:53,186 Train Loss: 0.0004082, Val Loss: 0.0004961 +2025-03-16 19:35:53,186 Epoch 64/2000 +2025-03-16 19:38:37,896 Current Learning Rate: 0.0007679134 +2025-03-16 19:38:38,024 Train Loss: 0.0004086, Val Loss: 0.0004912 +2025-03-16 19:38:38,024 Epoch 65/2000 +2025-03-16 19:41:22,683 Current Learning Rate: 0.0007612493 +2025-03-16 19:41:22,802 Train Loss: 0.0004048, Val Loss: 0.0004851 +2025-03-16 19:41:22,802 Epoch 66/2000 +2025-03-16 19:44:07,198 Current Learning Rate: 0.0007545207 +2025-03-16 19:44:07,319 Train Loss: 0.0003918, Val Loss: 0.0004720 +2025-03-16 19:44:07,320 Epoch 67/2000 +2025-03-16 19:46:53,166 Current Learning Rate: 0.0007477293 +2025-03-16 19:46:53,294 Train Loss: 0.0003699, Val Loss: 0.0004600 +2025-03-16 19:46:53,295 Epoch 68/2000 +2025-03-16 19:49:37,956 Current Learning Rate: 0.0007408768 +2025-03-16 19:49:38,079 Train Loss: 0.0003654, Val Loss: 0.0004572 +2025-03-16 19:49:38,079 Epoch 69/2000 +2025-03-16 19:52:22,719 Current Learning Rate: 0.0007339649 +2025-03-16 19:52:22,851 Train Loss: 0.0003594, Val Loss: 0.0004530 +2025-03-16 19:52:22,851 Epoch 70/2000 +2025-03-16 19:55:08,386 Current Learning Rate: 0.0007269952 +2025-03-16 19:55:08,516 Train Loss: 0.0003525, Val Loss: 0.0004507 +2025-03-16 19:55:08,516 Epoch 71/2000 +2025-03-16 19:57:53,183 Current Learning Rate: 0.0007199696 +2025-03-16 19:57:53,307 Train Loss: 0.0003487, Val Loss: 0.0004469 +2025-03-16 19:57:53,308 Epoch 72/2000 +2025-03-16 20:00:38,317 Current Learning Rate: 0.0007128896 +2025-03-16 20:00:38,464 Train Loss: 0.0003444, Val Loss: 0.0004452 +2025-03-16 20:00:38,464 Epoch 73/2000 +2025-03-16 20:03:22,548 Current Learning Rate: 0.0007057572 +2025-03-16 20:03:22,671 Train Loss: 0.0003421, Val Loss: 0.0004431 +2025-03-16 20:03:22,671 Epoch 74/2000 +2025-03-16 20:06:06,730 Current Learning Rate: 0.0006985739 +2025-03-16 20:06:06,730 Train Loss: 0.0003490, Val Loss: 0.0004456 +2025-03-16 20:06:06,730 Epoch 75/2000 +2025-03-16 20:08:51,866 Current Learning Rate: 0.0006913417 +2025-03-16 20:08:51,867 Train Loss: 0.0003522, Val Loss: 0.0004469 +2025-03-16 20:08:51,867 Epoch 76/2000 +2025-03-16 20:11:37,062 Current Learning Rate: 0.0006840623 +2025-03-16 20:11:37,180 Train Loss: 0.0003505, Val Loss: 0.0004405 +2025-03-16 20:11:37,180 Epoch 77/2000 +2025-03-16 20:14:21,875 Current Learning Rate: 0.0006767374 +2025-03-16 20:14:21,875 Train Loss: 0.0003507, Val Loss: 0.0004451 +2025-03-16 20:14:21,876 Epoch 78/2000 +2025-03-16 20:17:06,522 Current Learning Rate: 0.0006693690 +2025-03-16 20:17:06,638 Train Loss: 0.0003497, Val Loss: 0.0004381 +2025-03-16 20:17:06,639 Epoch 79/2000 +2025-03-16 20:19:52,131 Current Learning Rate: 0.0006619587 +2025-03-16 20:19:52,131 Train Loss: 0.0003482, Val Loss: 0.0004390 +2025-03-16 20:19:52,131 Epoch 80/2000 +2025-03-16 20:22:36,862 Current Learning Rate: 0.0006545085 +2025-03-16 20:22:36,990 Train Loss: 0.0003413, Val Loss: 0.0004332 +2025-03-16 20:22:36,991 Epoch 81/2000 +2025-03-16 20:25:21,590 Current Learning Rate: 0.0006470202 +2025-03-16 20:25:21,706 Train Loss: 0.0003387, Val Loss: 0.0004288 +2025-03-16 20:25:21,706 Epoch 82/2000 +2025-03-16 20:28:06,247 Current Learning Rate: 0.0006394956 +2025-03-16 20:28:06,367 Train Loss: 0.0003363, Val Loss: 0.0004263 +2025-03-16 20:28:06,368 Epoch 83/2000 +2025-03-16 20:30:51,043 Current Learning Rate: 0.0006319365 +2025-03-16 20:30:51,165 Train Loss: 0.0003398, Val Loss: 0.0004254 +2025-03-16 20:30:51,165 Epoch 84/2000 +2025-03-16 20:33:36,273 Current Learning Rate: 0.0006243449 +2025-03-16 20:33:36,391 Train Loss: 0.0003386, Val Loss: 0.0004190 +2025-03-16 20:33:36,391 Epoch 85/2000 +2025-03-16 20:36:21,102 Current Learning Rate: 0.0006167227 +2025-03-16 20:36:21,228 Train Loss: 0.0003316, Val Loss: 0.0004116 +2025-03-16 20:36:21,228 Epoch 86/2000 +2025-03-16 20:39:05,608 Current Learning Rate: 0.0006090716 +2025-03-16 20:39:05,724 Train Loss: 0.0003259, Val Loss: 0.0004060 +2025-03-16 20:39:05,724 Epoch 87/2000 +2025-03-16 20:41:50,616 Current Learning Rate: 0.0006013936 +2025-03-16 20:41:50,747 Train Loss: 0.0003276, Val Loss: 0.0004030 +2025-03-16 20:41:50,747 Epoch 88/2000 +2025-03-16 20:44:34,856 Current Learning Rate: 0.0005936907 +2025-03-16 20:44:34,980 Train Loss: 0.0003290, Val Loss: 0.0004008 +2025-03-16 20:44:34,981 Epoch 89/2000 +2025-03-16 20:47:19,036 Current Learning Rate: 0.0005859646 +2025-03-16 20:47:19,159 Train Loss: 0.0003233, Val Loss: 0.0003979 +2025-03-16 20:47:19,160 Epoch 90/2000 +2025-03-16 20:50:04,100 Current Learning Rate: 0.0005782172 +2025-03-16 20:50:04,220 Train Loss: 0.0003190, Val Loss: 0.0003948 +2025-03-16 20:50:04,220 Epoch 91/2000 +2025-03-16 20:52:49,013 Current Learning Rate: 0.0005704506 +2025-03-16 20:52:49,126 Train Loss: 0.0003147, Val Loss: 0.0003917 +2025-03-16 20:52:49,126 Epoch 92/2000 +2025-03-16 20:55:34,990 Current Learning Rate: 0.0005626666 +2025-03-16 20:55:35,115 Train Loss: 0.0003079, Val Loss: 0.0003879 +2025-03-16 20:55:35,115 Epoch 93/2000 +2025-03-16 20:58:19,818 Current Learning Rate: 0.0005548672 +2025-03-16 20:58:19,943 Train Loss: 0.0003037, Val Loss: 0.0003844 +2025-03-16 20:58:19,943 Epoch 94/2000 +2025-03-16 21:01:04,808 Current Learning Rate: 0.0005470542 +2025-03-16 21:01:04,926 Train Loss: 0.0003000, Val Loss: 0.0003834 +2025-03-16 21:01:04,926 Epoch 95/2000 +2025-03-16 21:03:50,201 Current Learning Rate: 0.0005392295 +2025-03-16 21:03:50,201 Train Loss: 0.0003045, Val Loss: 0.0003852 +2025-03-16 21:03:50,202 Epoch 96/2000 +2025-03-16 21:06:35,148 Current Learning Rate: 0.0005313953 +2025-03-16 21:06:35,277 Train Loss: 0.0003042, Val Loss: 0.0003822 +2025-03-16 21:06:35,278 Epoch 97/2000 +2025-03-16 21:09:19,623 Current Learning Rate: 0.0005235532 +2025-03-16 21:09:19,748 Train Loss: 0.0002995, Val Loss: 0.0003787 +2025-03-16 21:09:19,749 Epoch 98/2000 +2025-03-16 21:12:04,616 Current Learning Rate: 0.0005157054 +2025-03-16 21:12:04,763 Train Loss: 0.0002916, Val Loss: 0.0003736 +2025-03-16 21:12:04,763 Epoch 99/2000 +2025-03-16 21:14:49,261 Current Learning Rate: 0.0005078537 +2025-03-16 21:14:49,404 Train Loss: 0.0002869, Val Loss: 0.0003705 +2025-03-16 21:14:49,404 Epoch 100/2000 +2025-03-16 21:17:33,819 Current Learning Rate: 0.0005000000 +2025-03-16 21:17:33,941 Train Loss: 0.0002835, Val Loss: 0.0003693 +2025-03-16 21:17:33,941 Epoch 101/2000 +2025-03-16 21:20:18,208 Current Learning Rate: 0.0004921463 +2025-03-16 21:20:18,338 Train Loss: 0.0002821, Val Loss: 0.0003681 +2025-03-16 21:20:18,338 Epoch 102/2000 +2025-03-16 21:23:03,353 Current Learning Rate: 0.0004842946 +2025-03-16 21:23:03,353 Train Loss: 0.0002830, Val Loss: 0.0003687 +2025-03-16 21:23:03,354 Epoch 103/2000 +2025-03-16 21:25:47,639 Current Learning Rate: 0.0004764468 +2025-03-16 21:25:47,640 Train Loss: 0.0002852, Val Loss: 0.0003693 +2025-03-16 21:25:47,640 Epoch 104/2000 +2025-03-16 21:28:32,105 Current Learning Rate: 0.0004686047 +2025-03-16 21:28:32,222 Train Loss: 0.0002829, Val Loss: 0.0003671 +2025-03-16 21:28:32,222 Epoch 105/2000 +2025-03-16 21:31:17,426 Current Learning Rate: 0.0004607705 +2025-03-16 21:31:17,593 Train Loss: 0.0002789, Val Loss: 0.0003639 +2025-03-16 21:31:17,593 Epoch 106/2000 +2025-03-16 21:34:02,197 Current Learning Rate: 0.0004529458 +2025-03-16 21:34:02,330 Train Loss: 0.0002760, Val Loss: 0.0003601 +2025-03-16 21:34:02,330 Epoch 107/2000 +2025-03-16 21:36:47,177 Current Learning Rate: 0.0004451328 +2025-03-16 21:36:47,304 Train Loss: 0.0002753, Val Loss: 0.0003588 +2025-03-16 21:36:47,304 Epoch 108/2000 +2025-03-16 21:39:31,619 Current Learning Rate: 0.0004373334 +2025-03-16 21:39:31,728 Train Loss: 0.0002738, Val Loss: 0.0003580 +2025-03-16 21:39:31,728 Epoch 109/2000 +2025-03-16 21:42:16,119 Current Learning Rate: 0.0004295494 +2025-03-16 21:42:16,119 Train Loss: 0.0002719, Val Loss: 0.0003582 +2025-03-16 21:42:16,119 Epoch 110/2000 +2025-03-16 21:45:00,783 Current Learning Rate: 0.0004217828 +2025-03-16 21:45:00,783 Train Loss: 0.0002706, Val Loss: 0.0003588 +2025-03-16 21:45:00,783 Epoch 111/2000 +2025-03-16 21:47:45,826 Current Learning Rate: 0.0004140354 +2025-03-16 21:47:45,932 Train Loss: 0.0002677, Val Loss: 0.0003556 +2025-03-16 21:47:45,933 Epoch 112/2000 +2025-03-16 21:50:30,479 Current Learning Rate: 0.0004063093 +2025-03-16 21:50:30,480 Train Loss: 0.0002645, Val Loss: 0.0003562 +2025-03-16 21:50:30,480 Epoch 113/2000 +2025-03-16 21:53:15,565 Current Learning Rate: 0.0003986064 +2025-03-16 21:53:15,565 Train Loss: 0.0002613, Val Loss: 0.0003560 +2025-03-16 21:53:15,566 Epoch 114/2000 +2025-03-16 21:56:00,930 Current Learning Rate: 0.0003909284 +2025-03-16 21:56:01,107 Train Loss: 0.0002585, Val Loss: 0.0003533 +2025-03-16 21:56:01,107 Epoch 115/2000 +2025-03-16 21:58:46,223 Current Learning Rate: 0.0003832773 +2025-03-16 21:58:46,351 Train Loss: 0.0002566, Val Loss: 0.0003500 +2025-03-16 21:58:46,351 Epoch 116/2000 +2025-03-16 22:01:30,610 Current Learning Rate: 0.0003756551 +2025-03-16 22:01:30,737 Train Loss: 0.0002546, Val Loss: 0.0003484 +2025-03-16 22:01:30,737 Epoch 117/2000 +2025-03-16 22:04:15,208 Current Learning Rate: 0.0003680635 +2025-03-16 22:04:15,383 Train Loss: 0.0002532, Val Loss: 0.0003448 +2025-03-16 22:04:15,383 Epoch 118/2000 +2025-03-16 22:07:00,875 Current Learning Rate: 0.0003605044 +2025-03-16 22:07:01,038 Train Loss: 0.0002524, Val Loss: 0.0003434 +2025-03-16 22:07:01,038 Epoch 119/2000 +2025-03-16 22:09:45,999 Current Learning Rate: 0.0003529798 +2025-03-16 22:09:46,158 Train Loss: 0.0002521, Val Loss: 0.0003430 +2025-03-16 22:09:46,159 Epoch 120/2000 +2025-03-16 22:12:31,235 Current Learning Rate: 0.0003454915 +2025-03-16 22:12:31,363 Train Loss: 0.0002517, Val Loss: 0.0003409 +2025-03-16 22:12:31,363 Epoch 121/2000 +2025-03-16 22:15:15,761 Current Learning Rate: 0.0003380413 +2025-03-16 22:15:15,928 Train Loss: 0.0002509, Val Loss: 0.0003404 +2025-03-16 22:15:15,928 Epoch 122/2000 +2025-03-16 22:18:01,507 Current Learning Rate: 0.0003306310 +2025-03-16 22:18:01,646 Train Loss: 0.0002505, Val Loss: 0.0003383 +2025-03-16 22:18:01,646 Epoch 123/2000 +2025-03-16 22:20:46,409 Current Learning Rate: 0.0003232626 +2025-03-16 22:20:46,590 Train Loss: 0.0002497, Val Loss: 0.0003368 +2025-03-16 22:20:46,590 Epoch 124/2000 +2025-03-16 22:23:32,146 Current Learning Rate: 0.0003159377 +2025-03-16 22:23:32,257 Train Loss: 0.0002476, Val Loss: 0.0003335 +2025-03-16 22:23:32,257 Epoch 125/2000 +2025-03-16 22:26:16,545 Current Learning Rate: 0.0003086583 +2025-03-16 22:26:16,660 Train Loss: 0.0002454, Val Loss: 0.0003313 +2025-03-16 22:26:16,661 Epoch 126/2000 +2025-03-16 22:29:01,150 Current Learning Rate: 0.0003014261 +2025-03-16 22:29:01,262 Train Loss: 0.0002437, Val Loss: 0.0003307 +2025-03-16 22:29:01,262 Epoch 127/2000 +2025-03-16 22:31:46,039 Current Learning Rate: 0.0002942428 +2025-03-16 22:31:46,155 Train Loss: 0.0002428, Val Loss: 0.0003299 +2025-03-16 22:31:46,156 Epoch 128/2000 +2025-03-16 22:34:30,392 Current Learning Rate: 0.0002871104 +2025-03-16 22:34:30,507 Train Loss: 0.0002412, Val Loss: 0.0003270 +2025-03-16 22:34:30,507 Epoch 129/2000 +2025-03-16 22:37:15,485 Current Learning Rate: 0.0002800304 +2025-03-16 22:37:15,647 Train Loss: 0.0002390, Val Loss: 0.0003251 +2025-03-16 22:37:15,647 Epoch 130/2000 +2025-03-16 22:40:00,916 Current Learning Rate: 0.0002730048 +2025-03-16 22:40:01,049 Train Loss: 0.0002372, Val Loss: 0.0003240 +2025-03-16 22:40:01,049 Epoch 131/2000 +2025-03-16 22:42:45,850 Current Learning Rate: 0.0002660351 +2025-03-16 22:42:45,980 Train Loss: 0.0002355, Val Loss: 0.0003232 +2025-03-16 22:42:45,980 Epoch 132/2000 +2025-03-16 22:45:30,375 Current Learning Rate: 0.0002591232 +2025-03-16 22:45:30,491 Train Loss: 0.0002349, Val Loss: 0.0003217 +2025-03-16 22:45:30,491 Epoch 133/2000 +2025-03-16 22:48:15,063 Current Learning Rate: 0.0002522707 +2025-03-16 22:48:15,063 Train Loss: 0.0002342, Val Loss: 0.0003219 +2025-03-16 22:48:15,063 Epoch 134/2000 +2025-03-16 22:50:59,926 Current Learning Rate: 0.0002454793 +2025-03-16 22:51:00,049 Train Loss: 0.0002320, Val Loss: 0.0003189 +2025-03-16 22:51:00,049 Epoch 135/2000 +2025-03-16 22:53:45,284 Current Learning Rate: 0.0002387507 +2025-03-16 22:53:45,408 Train Loss: 0.0002289, Val Loss: 0.0003166 +2025-03-16 22:53:45,408 Epoch 136/2000 +2025-03-16 22:56:29,742 Current Learning Rate: 0.0002320866 +2025-03-16 22:56:29,858 Train Loss: 0.0002259, Val Loss: 0.0003153 +2025-03-16 22:56:29,858 Epoch 137/2000 +2025-03-16 22:59:14,494 Current Learning Rate: 0.0002254886 +2025-03-16 22:59:14,618 Train Loss: 0.0002235, Val Loss: 0.0003139 +2025-03-16 22:59:14,618 Epoch 138/2000 +2025-03-16 23:02:00,302 Current Learning Rate: 0.0002189583 +2025-03-16 23:02:00,434 Train Loss: 0.0002215, Val Loss: 0.0003120 +2025-03-16 23:02:00,434 Epoch 139/2000 +2025-03-16 23:04:44,405 Current Learning Rate: 0.0002124974 +2025-03-16 23:04:44,527 Train Loss: 0.0002200, Val Loss: 0.0003105 +2025-03-16 23:04:44,528 Epoch 140/2000 +2025-03-16 23:07:29,498 Current Learning Rate: 0.0002061074 +2025-03-16 23:07:29,634 Train Loss: 0.0002189, Val Loss: 0.0003096 +2025-03-16 23:07:29,634 Epoch 141/2000 +2025-03-16 23:10:14,202 Current Learning Rate: 0.0001997899 +2025-03-16 23:10:14,340 Train Loss: 0.0002181, Val Loss: 0.0003091 +2025-03-16 23:10:14,340 Epoch 142/2000 +2025-03-16 23:12:58,560 Current Learning Rate: 0.0001935465 +2025-03-16 23:12:58,560 Train Loss: 0.0002178, Val Loss: 0.0003094 +2025-03-16 23:12:58,561 Epoch 143/2000 +2025-03-16 23:15:43,363 Current Learning Rate: 0.0001873787 +2025-03-16 23:15:43,363 Train Loss: 0.0002180, Val Loss: 0.0003094 +2025-03-16 23:15:43,363 Epoch 144/2000 +2025-03-16 23:18:28,850 Current Learning Rate: 0.0001812880 +2025-03-16 23:18:28,983 Train Loss: 0.0002190, Val Loss: 0.0003073 +2025-03-16 23:18:28,984 Epoch 145/2000 +2025-03-16 23:21:13,592 Current Learning Rate: 0.0001752760 +2025-03-16 23:21:13,593 Train Loss: 0.0002207, Val Loss: 0.0003088 +2025-03-16 23:21:13,593 Epoch 146/2000 +2025-03-16 23:23:58,571 Current Learning Rate: 0.0001693441 +2025-03-16 23:23:58,571 Train Loss: 0.0002207, Val Loss: 0.0003091 +2025-03-16 23:23:58,572 Epoch 147/2000 +2025-03-16 23:26:43,242 Current Learning Rate: 0.0001634937 +2025-03-16 23:26:43,361 Train Loss: 0.0002179, Val Loss: 0.0003051 +2025-03-16 23:26:43,361 Epoch 148/2000 +2025-03-16 23:29:28,416 Current Learning Rate: 0.0001577264 +2025-03-16 23:29:28,538 Train Loss: 0.0002150, Val Loss: 0.0003028 +2025-03-16 23:29:28,538 Epoch 149/2000 +2025-03-16 23:32:14,040 Current Learning Rate: 0.0001520436 +2025-03-16 23:32:14,171 Train Loss: 0.0002129, Val Loss: 0.0003010 +2025-03-16 23:32:14,171 Epoch 150/2000 +2025-03-16 23:34:59,450 Current Learning Rate: 0.0001464466 +2025-03-16 23:34:59,593 Train Loss: 0.0002116, Val Loss: 0.0002997 +2025-03-16 23:34:59,594 Epoch 151/2000 +2025-03-16 23:37:44,444 Current Learning Rate: 0.0001409369 +2025-03-16 23:37:44,658 Train Loss: 0.0002105, Val Loss: 0.0002987 +2025-03-16 23:37:44,658 Epoch 152/2000 +2025-03-16 23:40:30,315 Current Learning Rate: 0.0001355157 +2025-03-16 23:40:30,443 Train Loss: 0.0002097, Val Loss: 0.0002978 +2025-03-16 23:40:30,443 Epoch 153/2000 +2025-03-16 23:43:15,333 Current Learning Rate: 0.0001301845 +2025-03-16 23:43:15,467 Train Loss: 0.0002088, Val Loss: 0.0002968 +2025-03-16 23:43:15,468 Epoch 154/2000 +2025-03-16 23:46:00,680 Current Learning Rate: 0.0001249445 +2025-03-16 23:46:00,801 Train Loss: 0.0002078, Val Loss: 0.0002959 +2025-03-16 23:46:00,801 Epoch 155/2000 +2025-03-16 23:48:45,981 Current Learning Rate: 0.0001197970 +2025-03-16 23:48:46,117 Train Loss: 0.0002070, Val Loss: 0.0002951 +2025-03-16 23:48:46,117 Epoch 156/2000 +2025-03-16 23:51:30,856 Current Learning Rate: 0.0001147434 +2025-03-16 23:51:30,983 Train Loss: 0.0002064, Val Loss: 0.0002943 +2025-03-16 23:51:30,983 Epoch 157/2000 +2025-03-16 23:54:16,910 Current Learning Rate: 0.0001097848 +2025-03-16 23:54:16,910 Train Loss: 0.0002059, Val Loss: 0.0002946 +2025-03-16 23:54:16,911 Epoch 158/2000 +2025-03-16 23:57:02,315 Current Learning Rate: 0.0001049225 +2025-03-16 23:57:02,316 Train Loss: 0.0002053, Val Loss: 0.0002946 +2025-03-16 23:57:02,316 Epoch 159/2000 +2025-03-16 23:59:46,958 Current Learning Rate: 0.0001001577 +2025-03-16 23:59:47,126 Train Loss: 0.0002047, Val Loss: 0.0002932 +2025-03-16 23:59:47,126 Epoch 160/2000 +2025-03-17 00:02:31,960 Current Learning Rate: 0.0000954915 +2025-03-17 00:02:32,094 Train Loss: 0.0002040, Val Loss: 0.0002919 +2025-03-17 00:02:32,094 Epoch 161/2000 +2025-03-17 00:05:17,484 Current Learning Rate: 0.0000909251 +2025-03-17 00:05:17,649 Train Loss: 0.0002033, Val Loss: 0.0002908 +2025-03-17 00:05:17,650 Epoch 162/2000 +2025-03-17 00:08:02,887 Current Learning Rate: 0.0000864597 +2025-03-17 00:08:03,019 Train Loss: 0.0002026, Val Loss: 0.0002901 +2025-03-17 00:08:03,019 Epoch 163/2000 +2025-03-17 00:10:47,489 Current Learning Rate: 0.0000820963 +2025-03-17 00:10:47,639 Train Loss: 0.0002020, Val Loss: 0.0002895 +2025-03-17 00:10:47,639 Epoch 164/2000 +2025-03-17 00:13:33,053 Current Learning Rate: 0.0000778360 +2025-03-17 00:13:33,195 Train Loss: 0.0002014, Val Loss: 0.0002888 +2025-03-17 00:13:33,195 Epoch 165/2000 +2025-03-17 00:16:18,925 Current Learning Rate: 0.0000736799 +2025-03-17 00:16:19,065 Train Loss: 0.0002009, Val Loss: 0.0002882 +2025-03-17 00:16:19,065 Epoch 166/2000 +2025-03-17 00:19:03,926 Current Learning Rate: 0.0000696290 +2025-03-17 00:19:04,107 Train Loss: 0.0002003, Val Loss: 0.0002877 +2025-03-17 00:19:04,107 Epoch 167/2000 +2025-03-17 00:21:48,858 Current Learning Rate: 0.0000656842 +2025-03-17 00:21:49,030 Train Loss: 0.0001997, Val Loss: 0.0002872 +2025-03-17 00:21:49,030 Epoch 168/2000 +2025-03-17 00:24:34,480 Current Learning Rate: 0.0000618467 +2025-03-17 00:24:34,613 Train Loss: 0.0001991, Val Loss: 0.0002867 +2025-03-17 00:24:34,613 Epoch 169/2000 +2025-03-17 00:27:18,982 Current Learning Rate: 0.0000581172 +2025-03-17 00:27:19,162 Train Loss: 0.0001986, Val Loss: 0.0002863 +2025-03-17 00:27:19,162 Epoch 170/2000 +2025-03-17 00:30:05,267 Current Learning Rate: 0.0000544967 +2025-03-17 00:30:05,453 Train Loss: 0.0001981, Val Loss: 0.0002860 +2025-03-17 00:30:05,453 Epoch 171/2000 +2025-03-17 00:32:50,874 Current Learning Rate: 0.0000509862 +2025-03-17 00:32:51,010 Train Loss: 0.0001977, Val Loss: 0.0002856 +2025-03-17 00:32:51,010 Epoch 172/2000 +2025-03-17 00:35:35,958 Current Learning Rate: 0.0000475865 +2025-03-17 00:35:36,085 Train Loss: 0.0001972, Val Loss: 0.0002852 +2025-03-17 00:35:36,086 Epoch 173/2000 +2025-03-17 00:38:21,271 Current Learning Rate: 0.0000442984 +2025-03-17 00:38:21,459 Train Loss: 0.0001968, Val Loss: 0.0002848 +2025-03-17 00:38:21,459 Epoch 174/2000 +2025-03-17 00:41:06,320 Current Learning Rate: 0.0000411227 +2025-03-17 00:41:06,498 Train Loss: 0.0001964, Val Loss: 0.0002843 +2025-03-17 00:41:06,498 Epoch 175/2000 +2025-03-17 00:43:52,221 Current Learning Rate: 0.0000380602 +2025-03-17 00:43:52,355 Train Loss: 0.0001960, Val Loss: 0.0002838 +2025-03-17 00:43:52,356 Epoch 176/2000 +2025-03-17 00:46:36,800 Current Learning Rate: 0.0000351118 +2025-03-17 00:46:36,925 Train Loss: 0.0001956, Val Loss: 0.0002833 +2025-03-17 00:46:36,926 Epoch 177/2000 +2025-03-17 00:49:22,464 Current Learning Rate: 0.0000322780 +2025-03-17 00:49:22,592 Train Loss: 0.0001952, Val Loss: 0.0002828 +2025-03-17 00:49:22,592 Epoch 178/2000 +2025-03-17 00:52:07,992 Current Learning Rate: 0.0000295596 +2025-03-17 00:52:08,117 Train Loss: 0.0001948, Val Loss: 0.0002824 +2025-03-17 00:52:08,117 Epoch 179/2000 +2025-03-17 00:54:52,499 Current Learning Rate: 0.0000269573 +2025-03-17 00:54:52,635 Train Loss: 0.0001945, Val Loss: 0.0002821 +2025-03-17 00:54:52,635 Epoch 180/2000 +2025-03-17 00:57:37,651 Current Learning Rate: 0.0000244717 +2025-03-17 00:57:37,781 Train Loss: 0.0001941, Val Loss: 0.0002819 +2025-03-17 00:57:37,781 Epoch 181/2000 +2025-03-17 01:00:24,259 Current Learning Rate: 0.0000221035 +2025-03-17 01:00:24,411 Train Loss: 0.0001938, Val Loss: 0.0002817 +2025-03-17 01:00:24,411 Epoch 182/2000 +2025-03-17 01:03:09,297 Current Learning Rate: 0.0000198532 +2025-03-17 01:03:09,453 Train Loss: 0.0001935, Val Loss: 0.0002815 +2025-03-17 01:03:09,454 Epoch 183/2000 +2025-03-17 01:05:54,448 Current Learning Rate: 0.0000177213 +2025-03-17 01:05:54,604 Train Loss: 0.0001932, Val Loss: 0.0002812 +2025-03-17 01:05:54,604 Epoch 184/2000 +2025-03-17 01:08:39,139 Current Learning Rate: 0.0000157084 +2025-03-17 01:08:39,282 Train Loss: 0.0001930, Val Loss: 0.0002809 +2025-03-17 01:08:39,282 Epoch 185/2000 +2025-03-17 01:11:23,724 Current Learning Rate: 0.0000138150 +2025-03-17 01:11:23,852 Train Loss: 0.0001927, Val Loss: 0.0002806 +2025-03-17 01:11:23,853 Epoch 186/2000 +2025-03-17 01:14:08,783 Current Learning Rate: 0.0000120416 +2025-03-17 01:14:08,942 Train Loss: 0.0001925, Val Loss: 0.0002804 +2025-03-17 01:14:08,942 Epoch 187/2000 +2025-03-17 01:16:54,318 Current Learning Rate: 0.0000103886 +2025-03-17 01:16:54,436 Train Loss: 0.0001923, Val Loss: 0.0002802 +2025-03-17 01:16:54,436 Epoch 188/2000 +2025-03-17 01:19:39,739 Current Learning Rate: 0.0000088564 +2025-03-17 01:19:39,863 Train Loss: 0.0001921, Val Loss: 0.0002800 +2025-03-17 01:19:39,863 Epoch 189/2000 +2025-03-17 01:22:26,810 Current Learning Rate: 0.0000074453 +2025-03-17 01:22:26,947 Train Loss: 0.0001919, Val Loss: 0.0002798 +2025-03-17 01:22:26,948 Epoch 190/2000 +2025-03-17 01:25:11,834 Current Learning Rate: 0.0000061558 +2025-03-17 01:25:11,982 Train Loss: 0.0001917, Val Loss: 0.0002797 +2025-03-17 01:25:11,983 Epoch 191/2000 +2025-03-17 01:27:57,673 Current Learning Rate: 0.0000049882 +2025-03-17 01:27:57,850 Train Loss: 0.0001916, Val Loss: 0.0002796 +2025-03-17 01:27:57,851 Epoch 192/2000 +2025-03-17 01:30:43,483 Current Learning Rate: 0.0000039426 +2025-03-17 01:30:43,621 Train Loss: 0.0001914, Val Loss: 0.0002795 +2025-03-17 01:30:43,621 Epoch 193/2000 +2025-03-17 01:33:28,652 Current Learning Rate: 0.0000030195 +2025-03-17 01:33:28,809 Train Loss: 0.0001913, Val Loss: 0.0002795 +2025-03-17 01:33:28,809 Epoch 194/2000 +2025-03-17 01:36:13,810 Current Learning Rate: 0.0000022190 +2025-03-17 01:36:13,942 Train Loss: 0.0001912, Val Loss: 0.0002795 +2025-03-17 01:36:13,943 Epoch 195/2000 +2025-03-17 01:38:58,777 Current Learning Rate: 0.0000015413 +2025-03-17 01:38:58,910 Train Loss: 0.0001911, Val Loss: 0.0002795 +2025-03-17 01:38:58,911 Epoch 196/2000 +2025-03-17 01:41:44,887 Current Learning Rate: 0.0000009866 +2025-03-17 01:41:45,018 Train Loss: 0.0001910, Val Loss: 0.0002794 +2025-03-17 01:41:45,018 Epoch 197/2000 +2025-03-17 01:44:29,357 Current Learning Rate: 0.0000005551 +2025-03-17 01:44:29,483 Train Loss: 0.0001909, Val Loss: 0.0002794 +2025-03-17 01:44:29,484 Epoch 198/2000 +2025-03-17 01:47:14,319 Current Learning Rate: 0.0000002467 +2025-03-17 01:47:14,435 Train Loss: 0.0001909, Val Loss: 0.0002793 +2025-03-17 01:47:14,436 Epoch 199/2000 +2025-03-17 01:49:59,535 Current Learning Rate: 0.0000000617 +2025-03-17 01:49:59,671 Train Loss: 0.0001908, Val Loss: 0.0002792 +2025-03-17 01:49:59,671 Epoch 200/2000 +2025-03-17 01:52:45,127 Current Learning Rate: 0.0000000000 +2025-03-17 01:52:45,255 Train Loss: 0.0001908, Val Loss: 0.0002792 +2025-03-17 01:52:45,256 Epoch 201/2000 +2025-03-17 01:55:30,626 Current Learning Rate: 0.0000000617 +2025-03-17 01:55:30,769 Train Loss: 0.0001908, Val Loss: 0.0002792 +2025-03-17 01:55:30,770 Epoch 202/2000 +2025-03-17 01:58:16,541 Current Learning Rate: 0.0000002467 +2025-03-17 01:58:16,682 Train Loss: 0.0001908, Val Loss: 0.0002792 +2025-03-17 01:58:16,682 Epoch 203/2000 +2025-03-17 02:01:01,388 Current Learning Rate: 0.0000005551 +2025-03-17 02:01:01,389 Train Loss: 0.0001908, Val Loss: 0.0002792 +2025-03-17 02:01:01,391 Epoch 204/2000 +2025-03-17 02:03:46,798 Current Learning Rate: 0.0000009866 +2025-03-17 02:03:46,798 Train Loss: 0.0001908, Val Loss: 0.0002793 +2025-03-17 02:03:46,799 Epoch 205/2000 +2025-03-17 02:06:31,828 Current Learning Rate: 0.0000015413 +2025-03-17 02:06:31,829 Train Loss: 0.0001909, Val Loss: 0.0002794 +2025-03-17 02:06:31,829 Epoch 206/2000 +2025-03-17 02:09:17,660 Current Learning Rate: 0.0000022190 +2025-03-17 02:09:17,661 Train Loss: 0.0001909, Val Loss: 0.0002794 +2025-03-17 02:09:17,661 Epoch 207/2000 +2025-03-17 02:12:01,934 Current Learning Rate: 0.0000030195 +2025-03-17 02:12:01,934 Train Loss: 0.0001910, Val Loss: 0.0002794 +2025-03-17 02:12:01,935 Epoch 208/2000 +2025-03-17 02:14:47,693 Current Learning Rate: 0.0000039426 +2025-03-17 02:14:47,694 Train Loss: 0.0001911, Val Loss: 0.0002794 +2025-03-17 02:14:47,694 Epoch 209/2000 +2025-03-17 02:17:32,538 Current Learning Rate: 0.0000049882 +2025-03-17 02:17:32,538 Train Loss: 0.0001911, Val Loss: 0.0002793 +2025-03-17 02:17:32,538 Epoch 210/2000 +2025-03-17 02:20:18,091 Current Learning Rate: 0.0000061558 +2025-03-17 02:20:18,092 Train Loss: 0.0001912, Val Loss: 0.0002794 +2025-03-17 02:20:18,095 Epoch 211/2000 +2025-03-17 02:23:03,293 Current Learning Rate: 0.0000074453 +2025-03-17 02:23:03,294 Train Loss: 0.0001913, Val Loss: 0.0002794 +2025-03-17 02:23:03,294 Epoch 212/2000 +2025-03-17 02:25:48,422 Current Learning Rate: 0.0000088564 +2025-03-17 02:25:48,423 Train Loss: 0.0001914, Val Loss: 0.0002795 +2025-03-17 02:25:48,423 Epoch 213/2000 +2025-03-17 02:28:34,354 Current Learning Rate: 0.0000103886 +2025-03-17 02:28:34,355 Train Loss: 0.0001915, Val Loss: 0.0002795 +2025-03-17 02:28:34,355 Epoch 214/2000 +2025-03-17 02:31:19,434 Current Learning Rate: 0.0000120416 +2025-03-17 02:31:19,435 Train Loss: 0.0001916, Val Loss: 0.0002795 +2025-03-17 02:31:19,435 Epoch 215/2000 +2025-03-17 02:34:04,723 Current Learning Rate: 0.0000138150 +2025-03-17 02:34:04,723 Train Loss: 0.0001916, Val Loss: 0.0002796 +2025-03-17 02:34:04,723 Epoch 216/2000 +2025-03-17 02:36:49,340 Current Learning Rate: 0.0000157084 +2025-03-17 02:36:49,340 Train Loss: 0.0001917, Val Loss: 0.0002797 +2025-03-17 02:36:49,341 Epoch 217/2000 +2025-03-17 02:39:34,754 Current Learning Rate: 0.0000177213 +2025-03-17 02:39:34,755 Train Loss: 0.0001918, Val Loss: 0.0002798 +2025-03-17 02:39:34,756 Epoch 218/2000 +2025-03-17 02:42:20,675 Current Learning Rate: 0.0000198532 +2025-03-17 02:42:20,675 Train Loss: 0.0001919, Val Loss: 0.0002799 +2025-03-17 02:42:20,675 Epoch 219/2000 +2025-03-17 02:45:05,705 Current Learning Rate: 0.0000221035 +2025-03-17 02:45:05,705 Train Loss: 0.0001920, Val Loss: 0.0002799 +2025-03-17 02:45:05,706 Epoch 220/2000 +2025-03-17 02:47:51,020 Current Learning Rate: 0.0000244717 +2025-03-17 02:47:51,021 Train Loss: 0.0001921, Val Loss: 0.0002800 +2025-03-17 02:47:51,021 Epoch 221/2000 +2025-03-17 02:50:36,255 Current Learning Rate: 0.0000269573 +2025-03-17 02:50:36,256 Train Loss: 0.0001922, Val Loss: 0.0002801 +2025-03-17 02:50:36,256 Epoch 222/2000 +2025-03-17 02:53:20,683 Current Learning Rate: 0.0000295596 +2025-03-17 02:53:20,684 Train Loss: 0.0001923, Val Loss: 0.0002802 +2025-03-17 02:53:20,684 Epoch 223/2000 +2025-03-17 02:56:05,912 Current Learning Rate: 0.0000322780 +2025-03-17 02:56:05,913 Train Loss: 0.0001924, Val Loss: 0.0002803 +2025-03-17 02:56:05,913 Epoch 224/2000 +2025-03-17 02:58:50,925 Current Learning Rate: 0.0000351118 +2025-03-17 02:58:50,925 Train Loss: 0.0001925, Val Loss: 0.0002804 +2025-03-17 02:58:50,926 Epoch 225/2000 +2025-03-17 03:01:36,071 Current Learning Rate: 0.0000380602 +2025-03-17 03:01:36,072 Train Loss: 0.0001926, Val Loss: 0.0002804 +2025-03-17 03:01:36,072 Epoch 226/2000 +2025-03-17 03:04:20,814 Current Learning Rate: 0.0000411227 +2025-03-17 03:04:20,814 Train Loss: 0.0001928, Val Loss: 0.0002805 +2025-03-17 03:04:20,815 Epoch 227/2000 +2025-03-17 03:07:05,847 Current Learning Rate: 0.0000442984 +2025-03-17 03:07:05,847 Train Loss: 0.0001929, Val Loss: 0.0002806 +2025-03-17 03:07:05,848 Epoch 228/2000 +2025-03-17 03:09:50,197 Current Learning Rate: 0.0000475865 +2025-03-17 03:09:50,198 Train Loss: 0.0001930, Val Loss: 0.0002807 +2025-03-17 03:09:50,198 Epoch 229/2000 +2025-03-17 03:12:35,228 Current Learning Rate: 0.0000509862 +2025-03-17 03:12:35,228 Train Loss: 0.0001931, Val Loss: 0.0002808 +2025-03-17 03:12:35,228 Epoch 230/2000 +2025-03-17 03:15:19,885 Current Learning Rate: 0.0000544967 +2025-03-17 03:15:19,885 Train Loss: 0.0001932, Val Loss: 0.0002809 +2025-03-17 03:15:19,886 Epoch 231/2000 +2025-03-17 03:18:05,474 Current Learning Rate: 0.0000581172 +2025-03-17 03:18:05,474 Train Loss: 0.0001933, Val Loss: 0.0002809 +2025-03-17 03:18:05,474 Epoch 232/2000 +2025-03-17 03:20:51,820 Current Learning Rate: 0.0000618467 +2025-03-17 03:20:51,820 Train Loss: 0.0001934, Val Loss: 0.0002810 +2025-03-17 03:20:51,820 Epoch 233/2000 +2025-03-17 03:23:37,596 Current Learning Rate: 0.0000656842 +2025-03-17 03:23:37,597 Train Loss: 0.0001936, Val Loss: 0.0002810 +2025-03-17 03:23:37,597 Epoch 234/2000 +2025-03-17 03:26:22,831 Current Learning Rate: 0.0000696290 +2025-03-17 03:26:22,831 Train Loss: 0.0001937, Val Loss: 0.0002812 +2025-03-17 03:26:22,832 Epoch 235/2000 +2025-03-17 03:29:08,955 Current Learning Rate: 0.0000736799 +2025-03-17 03:29:08,955 Train Loss: 0.0001938, Val Loss: 0.0002814 +2025-03-17 03:29:08,956 Epoch 236/2000 +2025-03-17 03:31:54,048 Current Learning Rate: 0.0000778360 +2025-03-17 03:31:54,048 Train Loss: 0.0001939, Val Loss: 0.0002816 +2025-03-17 03:31:54,049 Epoch 237/2000 +2025-03-17 03:34:39,530 Current Learning Rate: 0.0000820963 +2025-03-17 03:34:39,531 Train Loss: 0.0001940, Val Loss: 0.0002819 +2025-03-17 03:34:39,531 Epoch 238/2000 +2025-03-17 03:37:23,873 Current Learning Rate: 0.0000864597 +2025-03-17 03:37:23,873 Train Loss: 0.0001941, Val Loss: 0.0002822 +2025-03-17 03:37:23,874 Epoch 239/2000 +2025-03-17 03:40:09,240 Current Learning Rate: 0.0000909251 +2025-03-17 03:40:09,241 Train Loss: 0.0001942, Val Loss: 0.0002825 +2025-03-17 03:40:09,241 Epoch 240/2000 +2025-03-17 03:42:55,123 Current Learning Rate: 0.0000954915 +2025-03-17 03:42:55,124 Train Loss: 0.0001943, Val Loss: 0.0002826 +2025-03-17 03:42:55,124 Epoch 241/2000 +2025-03-17 03:45:39,695 Current Learning Rate: 0.0001001577 +2025-03-17 03:45:39,696 Train Loss: 0.0001944, Val Loss: 0.0002827 +2025-03-17 03:45:39,696 Epoch 242/2000 +2025-03-17 03:48:24,657 Current Learning Rate: 0.0001049225 +2025-03-17 03:48:24,657 Train Loss: 0.0001945, Val Loss: 0.0002827 +2025-03-17 03:48:24,658 Epoch 243/2000 +2025-03-17 03:51:09,367 Current Learning Rate: 0.0001097848 +2025-03-17 03:51:09,367 Train Loss: 0.0001946, Val Loss: 0.0002827 +2025-03-17 03:51:09,368 Epoch 244/2000 +2025-03-17 03:53:54,747 Current Learning Rate: 0.0001147434 +2025-03-17 03:53:54,748 Train Loss: 0.0001947, Val Loss: 0.0002827 +2025-03-17 03:53:54,748 Epoch 245/2000 +2025-03-17 03:56:40,007 Current Learning Rate: 0.0001197970 +2025-03-17 03:56:40,008 Train Loss: 0.0001948, Val Loss: 0.0002827 +2025-03-17 03:56:40,008 Epoch 246/2000 +2025-03-17 03:59:25,185 Current Learning Rate: 0.0001249445 +2025-03-17 03:59:25,186 Train Loss: 0.0001949, Val Loss: 0.0002827 +2025-03-17 03:59:25,186 Epoch 247/2000 +2025-03-17 04:02:10,478 Current Learning Rate: 0.0001301845 +2025-03-17 04:02:10,479 Train Loss: 0.0001950, Val Loss: 0.0002830 +2025-03-17 04:02:10,479 Epoch 248/2000 +2025-03-17 04:04:55,429 Current Learning Rate: 0.0001355157 +2025-03-17 04:04:55,430 Train Loss: 0.0001953, Val Loss: 0.0002836 +2025-03-17 04:04:55,430 Epoch 249/2000 +2025-03-17 04:07:41,058 Current Learning Rate: 0.0001409369 +2025-03-17 04:07:41,059 Train Loss: 0.0001957, Val Loss: 0.0002841 +2025-03-17 04:07:41,059 Epoch 250/2000 +2025-03-17 04:10:25,927 Current Learning Rate: 0.0001464466 +2025-03-17 04:10:25,927 Train Loss: 0.0001960, Val Loss: 0.0002832 +2025-03-17 04:10:25,928 Epoch 251/2000 +2025-03-17 04:13:11,271 Current Learning Rate: 0.0001520436 +2025-03-17 04:13:11,271 Train Loss: 0.0001959, Val Loss: 0.0002832 +2025-03-17 04:13:11,271 Epoch 252/2000 +2025-03-17 04:15:56,700 Current Learning Rate: 0.0001577264 +2025-03-17 04:15:56,701 Train Loss: 0.0001959, Val Loss: 0.0002836 +2025-03-17 04:15:56,701 Epoch 253/2000 +2025-03-17 04:18:41,554 Current Learning Rate: 0.0001634937 +2025-03-17 04:18:41,554 Train Loss: 0.0001961, Val Loss: 0.0002838 +2025-03-17 04:18:41,554 Epoch 254/2000 +2025-03-17 04:21:27,507 Current Learning Rate: 0.0001693441 +2025-03-17 04:21:27,508 Train Loss: 0.0001964, Val Loss: 0.0002838 +2025-03-17 04:21:27,508 Epoch 255/2000 +2025-03-17 04:24:12,183 Current Learning Rate: 0.0001752760 +2025-03-17 04:24:12,183 Train Loss: 0.0001967, Val Loss: 0.0002837 +2025-03-17 04:24:12,184 Epoch 256/2000 +2025-03-17 04:26:58,311 Current Learning Rate: 0.0001812880 +2025-03-17 04:26:58,311 Train Loss: 0.0001970, Val Loss: 0.0002836 +2025-03-17 04:26:58,311 Epoch 257/2000 +2025-03-17 04:29:42,874 Current Learning Rate: 0.0001873787 +2025-03-17 04:29:42,875 Train Loss: 0.0001974, Val Loss: 0.0002839 +2025-03-17 04:29:42,875 Epoch 258/2000 +2025-03-17 04:32:27,564 Current Learning Rate: 0.0001935465 +2025-03-17 04:32:27,564 Train Loss: 0.0001976, Val Loss: 0.0002843 +2025-03-17 04:32:27,564 Epoch 259/2000 +2025-03-17 04:35:13,686 Current Learning Rate: 0.0001997899 +2025-03-17 04:35:13,687 Train Loss: 0.0001980, Val Loss: 0.0002851 +2025-03-17 04:35:13,687 Epoch 260/2000 +2025-03-17 04:37:58,738 Current Learning Rate: 0.0002061074 +2025-03-17 04:37:58,738 Train Loss: 0.0001982, Val Loss: 0.0002856 +2025-03-17 04:37:58,738 Epoch 261/2000 +2025-03-17 04:40:44,342 Current Learning Rate: 0.0002124974 +2025-03-17 04:40:44,342 Train Loss: 0.0001984, Val Loss: 0.0002857 +2025-03-17 04:40:44,342 Epoch 262/2000 +2025-03-17 04:43:29,302 Current Learning Rate: 0.0002189583 +2025-03-17 04:43:29,303 Train Loss: 0.0001987, Val Loss: 0.0002857 +2025-03-17 04:43:29,303 Epoch 263/2000 +2025-03-17 04:46:15,571 Current Learning Rate: 0.0002254886 +2025-03-17 04:46:15,571 Train Loss: 0.0001991, Val Loss: 0.0002860 +2025-03-17 04:46:15,571 Epoch 264/2000 +2025-03-17 04:49:00,855 Current Learning Rate: 0.0002320866 +2025-03-17 04:49:00,856 Train Loss: 0.0001996, Val Loss: 0.0002861 +2025-03-17 04:49:00,856 Epoch 265/2000 +2025-03-17 04:51:45,416 Current Learning Rate: 0.0002387507 +2025-03-17 04:51:45,416 Train Loss: 0.0002002, Val Loss: 0.0002865 +2025-03-17 04:51:45,417 Epoch 266/2000 +2025-03-17 04:54:30,232 Current Learning Rate: 0.0002454793 +2025-03-17 04:54:30,233 Train Loss: 0.0002004, Val Loss: 0.0002870 +2025-03-17 04:54:30,234 Epoch 267/2000 +2025-03-17 04:57:16,102 Current Learning Rate: 0.0002522707 +2025-03-17 04:57:16,103 Train Loss: 0.0002005, Val Loss: 0.0002874 +2025-03-17 04:57:16,103 Epoch 268/2000 +2025-03-17 05:00:01,577 Current Learning Rate: 0.0002591232 +2025-03-17 05:00:01,577 Train Loss: 0.0002007, Val Loss: 0.0002884 +2025-03-17 05:00:01,578 Epoch 269/2000 +2025-03-17 05:02:46,954 Current Learning Rate: 0.0002660351 +2025-03-17 05:02:46,955 Train Loss: 0.0002010, Val Loss: 0.0002896 +2025-03-17 05:02:46,955 Epoch 270/2000 +2025-03-17 05:05:32,002 Current Learning Rate: 0.0002730048 +2025-03-17 05:05:32,003 Train Loss: 0.0002015, Val Loss: 0.0002914 +2025-03-17 05:05:32,003 Epoch 271/2000 +2025-03-17 05:08:17,661 Current Learning Rate: 0.0002800304 +2025-03-17 05:08:17,662 Train Loss: 0.0002022, Val Loss: 0.0002923 +2025-03-17 05:08:17,662 Epoch 272/2000 +2025-03-17 05:11:02,772 Current Learning Rate: 0.0002871104 +2025-03-17 05:11:02,772 Train Loss: 0.0002031, Val Loss: 0.0002936 +2025-03-17 05:11:02,773 Epoch 273/2000 +2025-03-17 05:13:47,946 Current Learning Rate: 0.0002942428 +2025-03-17 05:13:47,947 Train Loss: 0.0002044, Val Loss: 0.0002940 +2025-03-17 05:13:47,947 Epoch 274/2000 +2025-03-17 05:16:32,668 Current Learning Rate: 0.0003014261 +2025-03-17 05:16:32,669 Train Loss: 0.0002059, Val Loss: 0.0002943 +2025-03-17 05:16:32,669 Epoch 275/2000 +2025-03-17 05:19:16,917 Current Learning Rate: 0.0003086583 +2025-03-17 05:19:16,917 Train Loss: 0.0002060, Val Loss: 0.0002938 +2025-03-17 05:19:16,918 Epoch 276/2000 +2025-03-17 05:22:02,064 Current Learning Rate: 0.0003159377 +2025-03-17 05:22:02,065 Train Loss: 0.0002054, Val Loss: 0.0002935 +2025-03-17 05:22:02,065 Epoch 277/2000 +2025-03-17 05:24:47,370 Current Learning Rate: 0.0003232626 +2025-03-17 05:24:47,371 Train Loss: 0.0002049, Val Loss: 0.0002963 +2025-03-17 05:24:47,371 Epoch 278/2000 +2025-03-17 05:27:32,407 Current Learning Rate: 0.0003306310 +2025-03-17 05:27:32,407 Train Loss: 0.0002048, Val Loss: 0.0002974 +2025-03-17 05:27:32,408 Epoch 279/2000 +2025-03-17 05:30:18,858 Current Learning Rate: 0.0003380413 +2025-03-17 05:30:18,858 Train Loss: 0.0002050, Val Loss: 0.0002971 +2025-03-17 05:30:18,859 Epoch 280/2000 +2025-03-17 05:33:03,662 Current Learning Rate: 0.0003454915 +2025-03-17 05:33:03,662 Train Loss: 0.0002056, Val Loss: 0.0002966 +2025-03-17 05:33:03,663 Epoch 281/2000 +2025-03-17 05:35:48,400 Current Learning Rate: 0.0003529798 +2025-03-17 05:35:48,401 Train Loss: 0.0002065, Val Loss: 0.0002979 +2025-03-17 05:35:48,401 Epoch 282/2000 +2025-03-17 05:38:34,047 Current Learning Rate: 0.0003605044 +2025-03-17 05:38:34,048 Train Loss: 0.0002081, Val Loss: 0.0003002 +2025-03-17 05:38:34,048 Epoch 283/2000 +2025-03-17 05:41:18,891 Current Learning Rate: 0.0003680635 +2025-03-17 05:41:18,892 Train Loss: 0.0002107, Val Loss: 0.0003060 +2025-03-17 05:41:18,892 Epoch 284/2000 +2025-03-17 05:44:03,604 Current Learning Rate: 0.0003756551 +2025-03-17 05:44:03,604 Train Loss: 0.0002124, Val Loss: 0.0003062 +2025-03-17 05:44:03,605 Epoch 285/2000 +2025-03-17 05:46:49,480 Current Learning Rate: 0.0003832773 +2025-03-17 05:46:49,480 Train Loss: 0.0002124, Val Loss: 0.0003024 +2025-03-17 05:46:49,481 Epoch 286/2000 +2025-03-17 05:49:34,428 Current Learning Rate: 0.0003909284 +2025-03-17 05:49:34,429 Train Loss: 0.0002124, Val Loss: 0.0003048 +2025-03-17 05:49:34,429 Epoch 287/2000 +2025-03-17 05:52:19,982 Current Learning Rate: 0.0003986064 +2025-03-17 05:52:19,983 Train Loss: 0.0002126, Val Loss: 0.0003055 +2025-03-17 05:52:19,983 Epoch 288/2000 +2025-03-17 05:55:05,029 Current Learning Rate: 0.0004063093 +2025-03-17 05:55:05,029 Train Loss: 0.0002128, Val Loss: 0.0003053 +2025-03-17 05:55:05,029 Epoch 289/2000 +2025-03-17 05:57:50,300 Current Learning Rate: 0.0004140354 +2025-03-17 05:57:50,301 Train Loss: 0.0002125, Val Loss: 0.0003055 +2025-03-17 05:57:50,301 Epoch 290/2000 +2025-03-17 06:00:35,667 Current Learning Rate: 0.0004217828 +2025-03-17 06:00:35,668 Train Loss: 0.0002126, Val Loss: 0.0003082 +2025-03-17 06:00:35,668 Epoch 291/2000 +2025-03-17 06:03:21,710 Current Learning Rate: 0.0004295494 +2025-03-17 06:03:21,713 Train Loss: 0.0002137, Val Loss: 0.0003100 +2025-03-17 06:03:21,714 Epoch 292/2000 +2025-03-17 06:06:08,196 Current Learning Rate: 0.0004373334 +2025-03-17 06:06:08,196 Train Loss: 0.0002151, Val Loss: 0.0003076 +2025-03-17 06:06:08,197 Epoch 293/2000 +2025-03-17 06:08:52,948 Current Learning Rate: 0.0004451328 +2025-03-17 06:08:52,949 Train Loss: 0.0002158, Val Loss: 0.0003088 +2025-03-17 06:08:52,950 Epoch 294/2000 +2025-03-17 06:11:38,709 Current Learning Rate: 0.0004529458 +2025-03-17 06:11:38,709 Train Loss: 0.0002170, Val Loss: 0.0003083 +2025-03-17 06:11:38,710 Epoch 295/2000 +2025-03-17 06:14:23,732 Current Learning Rate: 0.0004607705 +2025-03-17 06:14:23,732 Train Loss: 0.0002180, Val Loss: 0.0003111 +2025-03-17 06:14:23,733 Epoch 296/2000 +2025-03-17 06:17:08,634 Current Learning Rate: 0.0004686047 +2025-03-17 06:17:08,635 Train Loss: 0.0002205, Val Loss: 0.0003119 +2025-03-17 06:17:08,635 Epoch 297/2000 +2025-03-17 06:19:53,574 Current Learning Rate: 0.0004764468 +2025-03-17 06:19:53,574 Train Loss: 0.0002218, Val Loss: 0.0003154 +2025-03-17 06:19:53,575 Epoch 298/2000 +2025-03-17 06:22:37,708 Current Learning Rate: 0.0004842946 +2025-03-17 06:22:37,708 Train Loss: 0.0002210, Val Loss: 0.0003139 +2025-03-17 06:22:37,709 Epoch 299/2000 +2025-03-17 06:25:22,574 Current Learning Rate: 0.0004921463 +2025-03-17 06:25:22,574 Train Loss: 0.0002196, Val Loss: 0.0003137 +2025-03-17 06:25:22,575 Epoch 300/2000 +2025-03-17 06:28:07,924 Current Learning Rate: 0.0005000000 +2025-03-17 06:28:07,925 Train Loss: 0.0002191, Val Loss: 0.0003124 +2025-03-17 06:28:07,925 Epoch 301/2000 +2025-03-17 06:30:53,228 Current Learning Rate: 0.0005078537 +2025-03-17 06:30:53,228 Train Loss: 0.0002193, Val Loss: 0.0003124 +2025-03-17 06:30:53,229 Epoch 302/2000 +2025-03-17 06:33:38,237 Current Learning Rate: 0.0005157054 +2025-03-17 06:33:38,237 Train Loss: 0.0002207, Val Loss: 0.0003135 +2025-03-17 06:33:38,238 Epoch 303/2000 +2025-03-17 06:36:23,052 Current Learning Rate: 0.0005235532 +2025-03-17 06:36:23,053 Train Loss: 0.0002236, Val Loss: 0.0003149 +2025-03-17 06:36:23,053 Epoch 304/2000 +2025-03-17 06:39:09,102 Current Learning Rate: 0.0005313953 +2025-03-17 06:39:09,103 Train Loss: 0.0002265, Val Loss: 0.0003154 +2025-03-17 06:39:09,103 Epoch 305/2000 +2025-03-17 06:41:54,759 Current Learning Rate: 0.0005392295 +2025-03-17 06:41:54,760 Train Loss: 0.0002274, Val Loss: 0.0003143 +2025-03-17 06:41:54,760 Epoch 306/2000 +2025-03-17 06:44:39,840 Current Learning Rate: 0.0005470542 +2025-03-17 06:44:39,840 Train Loss: 0.0002260, Val Loss: 0.0003155 +2025-03-17 06:44:39,840 Epoch 307/2000 +2025-03-17 06:47:24,989 Current Learning Rate: 0.0005548672 +2025-03-17 06:47:24,989 Train Loss: 0.0002252, Val Loss: 0.0003167 +2025-03-17 06:47:24,989 Epoch 308/2000 +2025-03-17 06:50:10,080 Current Learning Rate: 0.0005626666 +2025-03-17 06:50:10,081 Train Loss: 0.0002266, Val Loss: 0.0003192 +2025-03-17 06:50:10,081 Epoch 309/2000 +2025-03-17 06:52:55,474 Current Learning Rate: 0.0005704506 +2025-03-17 06:52:55,474 Train Loss: 0.0002294, Val Loss: 0.0003201 +2025-03-17 06:52:55,475 Epoch 310/2000 +2025-03-17 06:55:40,232 Current Learning Rate: 0.0005782172 +2025-03-17 06:55:40,233 Train Loss: 0.0002322, Val Loss: 0.0003239 +2025-03-17 06:55:40,233 Epoch 311/2000 +2025-03-17 06:58:25,443 Current Learning Rate: 0.0005859646 +2025-03-17 06:58:25,444 Train Loss: 0.0002341, Val Loss: 0.0003237 +2025-03-17 06:58:25,444 Epoch 312/2000 +2025-03-17 07:01:10,532 Current Learning Rate: 0.0005936907 +2025-03-17 07:01:10,532 Train Loss: 0.0002323, Val Loss: 0.0003227 +2025-03-17 07:01:10,533 Epoch 313/2000 +2025-03-17 07:03:55,976 Current Learning Rate: 0.0006013936 +2025-03-17 07:03:55,977 Train Loss: 0.0002295, Val Loss: 0.0003188 +2025-03-17 07:03:55,977 Epoch 314/2000 +2025-03-17 07:06:41,686 Current Learning Rate: 0.0006090716 +2025-03-17 07:06:41,687 Train Loss: 0.0002273, Val Loss: 0.0003210 +2025-03-17 07:06:41,687 Epoch 315/2000 +2025-03-17 07:09:27,333 Current Learning Rate: 0.0006167227 +2025-03-17 07:09:27,334 Train Loss: 0.0002278, Val Loss: 0.0003213 +2025-03-17 07:09:27,334 Epoch 316/2000 +2025-03-17 07:12:13,294 Current Learning Rate: 0.0006243449 +2025-03-17 07:12:13,294 Train Loss: 0.0002310, Val Loss: 0.0003223 +2025-03-17 07:12:13,295 Epoch 317/2000 +2025-03-17 07:14:58,610 Current Learning Rate: 0.0006319365 +2025-03-17 07:14:58,610 Train Loss: 0.0002358, Val Loss: 0.0003258 +2025-03-17 07:14:58,610 Epoch 318/2000 +2025-03-17 07:17:43,119 Current Learning Rate: 0.0006394956 +2025-03-17 07:17:43,119 Train Loss: 0.0002378, Val Loss: 0.0003278 +2025-03-17 07:17:43,119 Epoch 319/2000 +2025-03-17 07:20:28,061 Current Learning Rate: 0.0006470202 +2025-03-17 07:20:28,062 Train Loss: 0.0002371, Val Loss: 0.0003241 +2025-03-17 07:20:28,062 Epoch 320/2000 +2025-03-17 07:23:12,433 Current Learning Rate: 0.0006545085 +2025-03-17 07:23:12,433 Train Loss: 0.0002347, Val Loss: 0.0003249 +2025-03-17 07:23:12,434 Epoch 321/2000 +2025-03-17 07:25:59,322 Current Learning Rate: 0.0006619587 +2025-03-17 07:25:59,322 Train Loss: 0.0002342, Val Loss: 0.0003282 +2025-03-17 07:25:59,322 Epoch 322/2000 +2025-03-17 07:28:44,384 Current Learning Rate: 0.0006693690 +2025-03-17 07:28:44,385 Train Loss: 0.0002347, Val Loss: 0.0003274 +2025-03-17 07:28:44,385 Epoch 323/2000 +2025-03-17 07:31:29,555 Current Learning Rate: 0.0006767374 +2025-03-17 07:31:29,555 Train Loss: 0.0002368, Val Loss: 0.0003301 +2025-03-17 07:31:29,555 Epoch 324/2000 +2025-03-17 07:34:14,607 Current Learning Rate: 0.0006840623 +2025-03-17 07:34:14,608 Train Loss: 0.0002382, Val Loss: 0.0003320 +2025-03-17 07:34:14,608 Epoch 325/2000 +2025-03-17 07:36:59,827 Current Learning Rate: 0.0006913417 +2025-03-17 07:36:59,828 Train Loss: 0.0002399, Val Loss: 0.0003351 +2025-03-17 07:36:59,828 Epoch 326/2000 +2025-03-17 07:39:45,096 Current Learning Rate: 0.0006985739 +2025-03-17 07:39:45,097 Train Loss: 0.0002405, Val Loss: 0.0003325 +2025-03-17 07:39:45,097 Epoch 327/2000 +2025-03-17 07:42:30,177 Current Learning Rate: 0.0007057572 +2025-03-17 07:42:30,177 Train Loss: 0.0002400, Val Loss: 0.0003299 +2025-03-17 07:42:30,177 Epoch 328/2000 +2025-03-17 07:45:16,581 Current Learning Rate: 0.0007128896 +2025-03-17 07:45:16,581 Train Loss: 0.0002413, Val Loss: 0.0003324 +2025-03-17 07:45:16,582 Epoch 329/2000 +2025-03-17 07:48:02,205 Current Learning Rate: 0.0007199696 +2025-03-17 07:48:02,205 Train Loss: 0.0002415, Val Loss: 0.0003315 +2025-03-17 07:48:02,206 Epoch 330/2000 +2025-03-17 07:50:46,664 Current Learning Rate: 0.0007269952 +2025-03-17 07:50:46,664 Train Loss: 0.0002418, Val Loss: 0.0003335 +2025-03-17 07:50:46,664 Epoch 331/2000 +2025-03-17 07:53:31,823 Current Learning Rate: 0.0007339649 +2025-03-17 07:53:31,823 Train Loss: 0.0002404, Val Loss: 0.0003361 +2025-03-17 07:53:31,823 Epoch 332/2000 +2025-03-17 07:56:16,400 Current Learning Rate: 0.0007408768 +2025-03-17 07:56:16,401 Train Loss: 0.0002410, Val Loss: 0.0003396 +2025-03-17 07:56:16,402 Epoch 333/2000 +2025-03-17 07:59:03,310 Current Learning Rate: 0.0007477293 +2025-03-17 07:59:03,310 Train Loss: 0.0002430, Val Loss: 0.0003395 +2025-03-17 07:59:03,310 Epoch 334/2000 +2025-03-17 08:01:48,642 Current Learning Rate: 0.0007545207 +2025-03-17 08:01:48,642 Train Loss: 0.0002445, Val Loss: 0.0003373 +2025-03-17 08:01:48,642 Epoch 335/2000 +2025-03-17 08:04:34,361 Current Learning Rate: 0.0007612493 +2025-03-17 08:04:34,362 Train Loss: 0.0002479, Val Loss: 0.0003375 +2025-03-17 08:04:34,362 Epoch 336/2000 +2025-03-17 08:07:19,212 Current Learning Rate: 0.0007679134 +2025-03-17 08:07:19,212 Train Loss: 0.0002474, Val Loss: 0.0003416 +2025-03-17 08:07:19,213 Epoch 337/2000 +2025-03-17 08:10:04,495 Current Learning Rate: 0.0007745114 +2025-03-17 08:10:04,495 Train Loss: 0.0002463, Val Loss: 0.0003379 +2025-03-17 08:10:04,496 Epoch 338/2000 +2025-03-17 08:12:49,278 Current Learning Rate: 0.0007810417 +2025-03-17 08:12:49,279 Train Loss: 0.0002446, Val Loss: 0.0003354 +2025-03-17 08:12:49,279 Epoch 339/2000 +2025-03-17 08:15:34,313 Current Learning Rate: 0.0007875026 +2025-03-17 08:15:34,314 Train Loss: 0.0002443, Val Loss: 0.0003363 +2025-03-17 08:15:34,314 Epoch 340/2000 +2025-03-17 08:18:19,289 Current Learning Rate: 0.0007938926 +2025-03-17 08:18:19,289 Train Loss: 0.0002446, Val Loss: 0.0003364 +2025-03-17 08:18:19,289 Epoch 341/2000 +2025-03-17 08:21:05,214 Current Learning Rate: 0.0008002101 +2025-03-17 08:21:05,215 Train Loss: 0.0002461, Val Loss: 0.0003391 +2025-03-17 08:21:05,215 Epoch 342/2000 +2025-03-17 08:23:50,678 Current Learning Rate: 0.0008064535 +2025-03-17 08:23:50,678 Train Loss: 0.0002467, Val Loss: 0.0003365 +2025-03-17 08:23:50,679 Epoch 343/2000 +2025-03-17 08:26:36,536 Current Learning Rate: 0.0008126213 +2025-03-17 08:26:36,536 Train Loss: 0.0002454, Val Loss: 0.0003378 +2025-03-17 08:26:36,537 Epoch 344/2000 +2025-03-17 08:29:22,002 Current Learning Rate: 0.0008187120 +2025-03-17 08:29:22,002 Train Loss: 0.0002471, Val Loss: 0.0003364 +2025-03-17 08:29:22,003 Epoch 345/2000 +2025-03-17 08:32:07,376 Current Learning Rate: 0.0008247240 +2025-03-17 08:32:07,377 Train Loss: 0.0002487, Val Loss: 0.0003443 +2025-03-17 08:32:07,377 Epoch 346/2000 +2025-03-17 08:34:52,986 Current Learning Rate: 0.0008306559 +2025-03-17 08:34:52,987 Train Loss: 0.0002466, Val Loss: 0.0003434 +2025-03-17 08:34:52,987 Epoch 347/2000 +2025-03-17 08:37:38,064 Current Learning Rate: 0.0008365063 +2025-03-17 08:37:38,065 Train Loss: 0.0002460, Val Loss: 0.0003408 +2025-03-17 08:37:38,065 Epoch 348/2000 +2025-03-17 08:40:23,441 Current Learning Rate: 0.0008422736 +2025-03-17 08:40:23,441 Train Loss: 0.0002481, Val Loss: 0.0003424 +2025-03-17 08:40:23,442 Epoch 349/2000 +2025-03-17 08:43:08,588 Current Learning Rate: 0.0008479564 +2025-03-17 08:43:08,588 Train Loss: 0.0002510, Val Loss: 0.0003452 +2025-03-17 08:43:08,592 Epoch 350/2000 +2025-03-17 08:45:53,858 Current Learning Rate: 0.0008535534 +2025-03-17 08:45:53,858 Train Loss: 0.0002500, Val Loss: 0.0003468 +2025-03-17 08:45:53,859 Epoch 351/2000 +2025-03-17 08:48:38,039 Current Learning Rate: 0.0008590631 +2025-03-17 08:48:38,043 Train Loss: 0.0002484, Val Loss: 0.0003478 +2025-03-17 08:48:38,043 Epoch 352/2000 +2025-03-17 08:51:23,802 Current Learning Rate: 0.0008644843 +2025-03-17 08:51:23,803 Train Loss: 0.0002484, Val Loss: 0.0003529 +2025-03-17 08:51:23,803 Epoch 353/2000 +2025-03-17 08:54:09,014 Current Learning Rate: 0.0008698155 +2025-03-17 08:54:09,016 Train Loss: 0.0002517, Val Loss: 0.0003588 +2025-03-17 08:54:09,017 Epoch 354/2000 +2025-03-17 08:56:55,353 Current Learning Rate: 0.0008750555 +2025-03-17 08:56:55,354 Train Loss: 0.0002545, Val Loss: 0.0003510 +2025-03-17 08:56:55,354 Epoch 355/2000 +2025-03-17 08:59:40,167 Current Learning Rate: 0.0008802030 +2025-03-17 08:59:40,168 Train Loss: 0.0002514, Val Loss: 0.0003510 +2025-03-17 08:59:40,168 Epoch 356/2000 +2025-03-17 09:02:26,011 Current Learning Rate: 0.0008852566 +2025-03-17 09:02:26,011 Train Loss: 0.0002485, Val Loss: 0.0003574 +2025-03-17 09:02:26,011 Epoch 357/2000 +2025-03-17 09:05:11,446 Current Learning Rate: 0.0008902152 +2025-03-17 09:05:11,447 Train Loss: 0.0002475, Val Loss: 0.0003542 +2025-03-17 09:05:11,447 Epoch 358/2000 +2025-03-17 09:07:56,541 Current Learning Rate: 0.0008950775 +2025-03-17 09:07:56,541 Train Loss: 0.0002482, Val Loss: 0.0003553 +2025-03-17 09:07:56,542 Epoch 359/2000 +2025-03-17 09:10:41,885 Current Learning Rate: 0.0008998423 +2025-03-17 09:10:41,886 Train Loss: 0.0002497, Val Loss: 0.0003573 +2025-03-17 09:10:41,886 Epoch 360/2000 +2025-03-17 09:13:27,226 Current Learning Rate: 0.0009045085 +2025-03-17 09:13:27,227 Train Loss: 0.0002529, Val Loss: 0.0003544 +2025-03-17 09:13:27,227 Epoch 361/2000 +2025-03-17 09:16:12,660 Current Learning Rate: 0.0009090749 +2025-03-17 09:16:12,661 Train Loss: 0.0002550, Val Loss: 0.0003557 +2025-03-17 09:16:12,661 Epoch 362/2000 +2025-03-17 09:18:58,088 Current Learning Rate: 0.0009135403 +2025-03-17 09:18:58,089 Train Loss: 0.0002524, Val Loss: 0.0003540 +2025-03-17 09:18:58,089 Epoch 363/2000 +2025-03-17 09:21:43,705 Current Learning Rate: 0.0009179037 +2025-03-17 09:21:43,706 Train Loss: 0.0002505, Val Loss: 0.0003525 +2025-03-17 09:21:43,708 Epoch 364/2000 +2025-03-17 09:24:29,355 Current Learning Rate: 0.0009221640 +2025-03-17 09:24:29,355 Train Loss: 0.0002526, Val Loss: 0.0003507 +2025-03-17 09:24:29,356 Epoch 365/2000 +2025-03-17 09:27:13,448 Current Learning Rate: 0.0009263201 +2025-03-17 09:27:13,448 Train Loss: 0.0002532, Val Loss: 0.0003455 +2025-03-17 09:27:13,448 Epoch 366/2000 +2025-03-17 09:29:58,742 Current Learning Rate: 0.0009303710 +2025-03-17 09:29:58,742 Train Loss: 0.0002497, Val Loss: 0.0003423 +2025-03-17 09:29:58,742 Epoch 367/2000 +2025-03-17 09:32:44,421 Current Learning Rate: 0.0009343158 +2025-03-17 09:32:44,422 Train Loss: 0.0002481, Val Loss: 0.0003489 +2025-03-17 09:32:44,422 Epoch 368/2000 +2025-03-17 09:35:29,838 Current Learning Rate: 0.0009381533 +2025-03-17 09:35:29,840 Train Loss: 0.0002502, Val Loss: 0.0003547 +2025-03-17 09:35:29,841 Epoch 369/2000 +2025-03-17 09:38:14,575 Current Learning Rate: 0.0009418828 +2025-03-17 09:38:14,576 Train Loss: 0.0002517, Val Loss: 0.0003468 +2025-03-17 09:38:14,576 Epoch 370/2000 +2025-03-17 09:40:59,496 Current Learning Rate: 0.0009455033 +2025-03-17 09:40:59,496 Train Loss: 0.0002518, Val Loss: 0.0003509 +2025-03-17 09:40:59,496 Epoch 371/2000 +2025-03-17 09:43:45,433 Current Learning Rate: 0.0009490138 +2025-03-17 09:43:45,433 Train Loss: 0.0002517, Val Loss: 0.0003519 +2025-03-17 09:43:45,433 Epoch 372/2000 +2025-03-17 09:46:30,790 Current Learning Rate: 0.0009524135 +2025-03-17 09:46:30,791 Train Loss: 0.0002528, Val Loss: 0.0003487 +2025-03-17 09:46:30,791 Epoch 373/2000 +2025-03-17 09:49:15,435 Current Learning Rate: 0.0009557016 +2025-03-17 09:49:15,435 Train Loss: 0.0002505, Val Loss: 0.0003433 +2025-03-17 09:49:15,436 Epoch 374/2000 +2025-03-17 09:52:00,657 Current Learning Rate: 0.0009588773 +2025-03-17 09:52:00,658 Train Loss: 0.0002495, Val Loss: 0.0003413 +2025-03-17 09:52:00,658 Epoch 375/2000 +2025-03-17 09:54:46,078 Current Learning Rate: 0.0009619398 +2025-03-17 09:54:46,079 Train Loss: 0.0002479, Val Loss: 0.0003366 +2025-03-17 09:54:46,079 Epoch 376/2000 +2025-03-17 09:57:32,845 Current Learning Rate: 0.0009648882 +2025-03-17 09:57:32,846 Train Loss: 0.0002484, Val Loss: 0.0003417 +2025-03-17 09:57:32,846 Epoch 377/2000 +2025-03-17 10:00:17,853 Current Learning Rate: 0.0009677220 +2025-03-17 10:00:17,853 Train Loss: 0.0002502, Val Loss: 0.0003462 +2025-03-17 10:00:17,853 Epoch 378/2000 +2025-03-17 10:03:04,571 Current Learning Rate: 0.0009704404 +2025-03-17 10:03:04,571 Train Loss: 0.0002497, Val Loss: 0.0003372 +2025-03-17 10:03:04,572 Epoch 379/2000 +2025-03-17 10:05:49,227 Current Learning Rate: 0.0009730427 +2025-03-17 10:05:49,228 Train Loss: 0.0002474, Val Loss: 0.0003373 +2025-03-17 10:05:49,228 Epoch 380/2000 +2025-03-17 10:08:34,541 Current Learning Rate: 0.0009755283 +2025-03-17 10:08:34,541 Train Loss: 0.0002461, Val Loss: 0.0003389 +2025-03-17 10:08:34,542 Epoch 381/2000 +2025-03-17 10:11:19,593 Current Learning Rate: 0.0009778965 +2025-03-17 10:11:19,594 Train Loss: 0.0002469, Val Loss: 0.0003440 +2025-03-17 10:11:19,594 Epoch 382/2000 +2025-03-17 10:14:04,826 Current Learning Rate: 0.0009801468 +2025-03-17 10:14:04,827 Train Loss: 0.0002502, Val Loss: 0.0003418 +2025-03-17 10:14:04,827 Epoch 383/2000 +2025-03-17 10:16:50,000 Current Learning Rate: 0.0009822787 +2025-03-17 10:16:50,000 Train Loss: 0.0002518, Val Loss: 0.0003443 +2025-03-17 10:16:50,001 Epoch 384/2000 +2025-03-17 10:19:35,018 Current Learning Rate: 0.0009842916 +2025-03-17 10:19:35,019 Train Loss: 0.0002486, Val Loss: 0.0003377 +2025-03-17 10:19:35,020 Epoch 385/2000 +2025-03-17 10:22:21,007 Current Learning Rate: 0.0009861850 +2025-03-17 10:22:21,008 Train Loss: 0.0002449, Val Loss: 0.0003375 +2025-03-17 10:22:21,008 Epoch 386/2000 +2025-03-17 10:25:06,047 Current Learning Rate: 0.0009879584 +2025-03-17 10:25:06,048 Train Loss: 0.0002434, Val Loss: 0.0003374 +2025-03-17 10:25:06,048 Epoch 387/2000 +2025-03-17 10:27:50,792 Current Learning Rate: 0.0009896114 +2025-03-17 10:27:50,792 Train Loss: 0.0002459, Val Loss: 0.0003398 +2025-03-17 10:27:50,792 Epoch 388/2000 +2025-03-17 10:30:36,990 Current Learning Rate: 0.0009911436 +2025-03-17 10:30:36,990 Train Loss: 0.0002473, Val Loss: 0.0003380 +2025-03-17 10:30:36,990 Epoch 389/2000 +2025-03-17 10:33:21,720 Current Learning Rate: 0.0009925547 +2025-03-17 10:33:21,720 Train Loss: 0.0002468, Val Loss: 0.0003382 +2025-03-17 10:33:21,724 Epoch 390/2000 +2025-03-17 10:36:07,472 Current Learning Rate: 0.0009938442 +2025-03-17 10:36:07,473 Train Loss: 0.0002467, Val Loss: 0.0003446 +2025-03-17 10:36:07,473 Epoch 391/2000 +2025-03-17 10:38:53,558 Current Learning Rate: 0.0009950118 +2025-03-17 10:38:53,558 Train Loss: 0.0002484, Val Loss: 0.0003413 +2025-03-17 10:38:53,558 Epoch 392/2000 +2025-03-17 10:41:38,402 Current Learning Rate: 0.0009960574 +2025-03-17 10:41:38,403 Train Loss: 0.0002481, Val Loss: 0.0003385 +2025-03-17 10:41:38,403 Epoch 393/2000 +2025-03-17 10:44:23,559 Current Learning Rate: 0.0009969805 +2025-03-17 10:44:23,560 Train Loss: 0.0002445, Val Loss: 0.0003367 +2025-03-17 10:44:23,560 Epoch 394/2000 +2025-03-17 10:47:08,391 Current Learning Rate: 0.0009977810 +2025-03-17 10:47:08,392 Train Loss: 0.0002424, Val Loss: 0.0003333 +2025-03-17 10:47:08,392 Epoch 395/2000 +2025-03-17 10:49:54,186 Current Learning Rate: 0.0009984587 +2025-03-17 10:49:54,186 Train Loss: 0.0002426, Val Loss: 0.0003364 +2025-03-17 10:49:54,187 Epoch 396/2000 +2025-03-17 10:52:38,868 Current Learning Rate: 0.0009990134 +2025-03-17 10:52:38,868 Train Loss: 0.0002435, Val Loss: 0.0003377 +2025-03-17 10:52:38,869 Epoch 397/2000 +2025-03-17 10:55:24,818 Current Learning Rate: 0.0009994449 +2025-03-17 10:55:24,818 Train Loss: 0.0002439, Val Loss: 0.0003374 +2025-03-17 10:55:24,818 Epoch 398/2000 +2025-03-17 10:58:09,468 Current Learning Rate: 0.0009997533 +2025-03-17 10:58:09,468 Train Loss: 0.0002440, Val Loss: 0.0003424 +2025-03-17 10:58:09,468 Epoch 399/2000 +2025-03-17 11:00:54,839 Current Learning Rate: 0.0009999383 +2025-03-17 11:00:54,839 Train Loss: 0.0002463, Val Loss: 0.0003381 +2025-03-17 11:00:54,840 Epoch 400/2000 +2025-03-17 11:03:40,238 Current Learning Rate: 0.0010000000 +2025-03-17 11:03:40,238 Train Loss: 0.0002473, Val Loss: 0.0003389 +2025-03-17 11:03:40,238 Epoch 401/2000 +2025-03-17 11:06:25,610 Current Learning Rate: 0.0009999383 +2025-03-17 11:06:25,610 Train Loss: 0.0002445, Val Loss: 0.0003398 +2025-03-17 11:06:25,610 Epoch 402/2000 +2025-03-17 11:09:10,337 Current Learning Rate: 0.0009997533 +2025-03-17 11:09:10,338 Train Loss: 0.0002418, Val Loss: 0.0003336 +2025-03-17 11:09:10,338 Epoch 403/2000 +2025-03-17 11:11:55,620 Current Learning Rate: 0.0009994449 +2025-03-17 11:11:55,621 Train Loss: 0.0002391, Val Loss: 0.0003363 +2025-03-17 11:11:55,621 Epoch 404/2000 +2025-03-17 11:14:40,534 Current Learning Rate: 0.0009990134 +2025-03-17 11:14:40,535 Train Loss: 0.0002389, Val Loss: 0.0003336 +2025-03-17 11:14:40,535 Epoch 405/2000 +2025-03-17 11:17:25,729 Current Learning Rate: 0.0009984587 +2025-03-17 11:17:25,729 Train Loss: 0.0002386, Val Loss: 0.0003362 +2025-03-17 11:17:25,730 Epoch 406/2000 +2025-03-17 11:20:11,517 Current Learning Rate: 0.0009977810 +2025-03-17 11:20:11,518 Train Loss: 0.0002395, Val Loss: 0.0003373 +2025-03-17 11:20:11,518 Epoch 407/2000 +2025-03-17 11:22:56,188 Current Learning Rate: 0.0009969805 +2025-03-17 11:22:56,188 Train Loss: 0.0002399, Val Loss: 0.0003340 +2025-03-17 11:22:56,189 Epoch 408/2000 +2025-03-17 11:25:41,990 Current Learning Rate: 0.0009960574 +2025-03-17 11:25:41,991 Train Loss: 0.0002381, Val Loss: 0.0003296 +2025-03-17 11:25:41,992 Epoch 409/2000 +2025-03-17 11:28:27,064 Current Learning Rate: 0.0009950118 +2025-03-17 11:28:27,064 Train Loss: 0.0002369, Val Loss: 0.0003302 +2025-03-17 11:28:27,064 Epoch 410/2000 +2025-03-17 11:31:12,546 Current Learning Rate: 0.0009938442 +2025-03-17 11:31:12,546 Train Loss: 0.0002385, Val Loss: 0.0003320 +2025-03-17 11:31:12,547 Epoch 411/2000 +2025-03-17 11:33:58,548 Current Learning Rate: 0.0009925547 +2025-03-17 11:33:58,548 Train Loss: 0.0002399, Val Loss: 0.0003318 +2025-03-17 11:33:58,549 Epoch 412/2000 +2025-03-17 11:36:43,879 Current Learning Rate: 0.0009911436 +2025-03-17 11:36:43,880 Train Loss: 0.0002394, Val Loss: 0.0003330 +2025-03-17 11:36:43,880 Epoch 413/2000 +2025-03-17 11:39:29,233 Current Learning Rate: 0.0009896114 +2025-03-17 11:39:29,233 Train Loss: 0.0002382, Val Loss: 0.0003371 +2025-03-17 11:39:29,234 Epoch 414/2000 +2025-03-17 11:42:13,848 Current Learning Rate: 0.0009879584 +2025-03-17 11:42:13,849 Train Loss: 0.0002372, Val Loss: 0.0003387 +2025-03-17 11:42:13,849 Epoch 415/2000 +2025-03-17 11:44:59,117 Current Learning Rate: 0.0009861850 +2025-03-17 11:44:59,117 Train Loss: 0.0002360, Val Loss: 0.0003385 +2025-03-17 11:44:59,118 Epoch 416/2000 +2025-03-17 11:47:43,904 Current Learning Rate: 0.0009842916 +2025-03-17 11:47:43,904 Train Loss: 0.0002357, Val Loss: 0.0003353 +2025-03-17 11:47:43,905 Epoch 417/2000 +2025-03-17 11:50:29,088 Current Learning Rate: 0.0009822787 +2025-03-17 11:50:29,089 Train Loss: 0.0002349, Val Loss: 0.0003313 +2025-03-17 11:50:29,089 Epoch 418/2000 +2025-03-17 11:53:13,999 Current Learning Rate: 0.0009801468 +2025-03-17 11:53:14,000 Train Loss: 0.0002346, Val Loss: 0.0003292 +2025-03-17 11:53:14,001 Epoch 419/2000 +2025-03-17 11:55:59,295 Current Learning Rate: 0.0009778965 +2025-03-17 11:55:59,296 Train Loss: 0.0002342, Val Loss: 0.0003254 +2025-03-17 11:55:59,296 Epoch 420/2000 +2025-03-17 11:58:44,560 Current Learning Rate: 0.0009755283 +2025-03-17 11:58:44,561 Train Loss: 0.0002340, Val Loss: 0.0003239 +2025-03-17 11:58:44,561 Epoch 421/2000 +2025-03-17 12:01:30,228 Current Learning Rate: 0.0009730427 +2025-03-17 12:01:30,229 Train Loss: 0.0002341, Val Loss: 0.0003236 +2025-03-17 12:01:30,229 Epoch 422/2000 +2025-03-17 12:04:14,915 Current Learning Rate: 0.0009704404 +2025-03-17 12:04:14,916 Train Loss: 0.0002353, Val Loss: 0.0003255 +2025-03-17 12:04:14,916 Epoch 423/2000 +2025-03-17 12:07:00,351 Current Learning Rate: 0.0009677220 +2025-03-17 12:07:00,352 Train Loss: 0.0002354, Val Loss: 0.0003256 +2025-03-17 12:07:00,352 Epoch 424/2000 +2025-03-17 12:09:45,500 Current Learning Rate: 0.0009648882 +2025-03-17 12:09:45,501 Train Loss: 0.0002337, Val Loss: 0.0003238 +2025-03-17 12:09:45,501 Epoch 425/2000 +2025-03-17 12:12:30,381 Current Learning Rate: 0.0009619398 +2025-03-17 12:12:30,382 Train Loss: 0.0002331, Val Loss: 0.0003247 +2025-03-17 12:12:30,382 Epoch 426/2000 +2025-03-17 12:15:16,030 Current Learning Rate: 0.0009588773 +2025-03-17 12:15:16,031 Train Loss: 0.0002331, Val Loss: 0.0003245 +2025-03-17 12:15:16,031 Epoch 427/2000 +2025-03-17 12:18:00,625 Current Learning Rate: 0.0009557016 +2025-03-17 12:18:00,625 Train Loss: 0.0002339, Val Loss: 0.0003293 +2025-03-17 12:18:00,626 Epoch 428/2000 +2025-03-17 12:20:46,125 Current Learning Rate: 0.0009524135 +2025-03-17 12:20:46,126 Train Loss: 0.0002347, Val Loss: 0.0003252 +2025-03-17 12:20:46,126 Epoch 429/2000 +2025-03-17 12:23:32,283 Current Learning Rate: 0.0009490138 +2025-03-17 12:23:32,283 Train Loss: 0.0002317, Val Loss: 0.0003209 +2025-03-17 12:23:32,284 Epoch 430/2000 +2025-03-17 12:26:17,550 Current Learning Rate: 0.0009455033 +2025-03-17 12:26:17,550 Train Loss: 0.0002285, Val Loss: 0.0003175 +2025-03-17 12:26:17,551 Epoch 431/2000 +2025-03-17 12:29:03,096 Current Learning Rate: 0.0009418828 +2025-03-17 12:29:03,097 Train Loss: 0.0002254, Val Loss: 0.0003150 +2025-03-17 12:29:03,097 Epoch 432/2000 +2025-03-17 12:31:48,235 Current Learning Rate: 0.0009381533 +2025-03-17 12:31:48,236 Train Loss: 0.0002242, Val Loss: 0.0003167 +2025-03-17 12:31:48,236 Epoch 433/2000 +2025-03-17 12:34:33,519 Current Learning Rate: 0.0009343158 +2025-03-17 12:34:33,519 Train Loss: 0.0002234, Val Loss: 0.0003161 +2025-03-17 12:34:33,519 Epoch 434/2000 +2025-03-17 12:37:18,480 Current Learning Rate: 0.0009303710 +2025-03-17 12:37:18,480 Train Loss: 0.0002248, Val Loss: 0.0003185 +2025-03-17 12:37:18,480 Epoch 435/2000 +2025-03-17 12:40:03,516 Current Learning Rate: 0.0009263201 +2025-03-17 12:40:03,517 Train Loss: 0.0002263, Val Loss: 0.0003201 +2025-03-17 12:40:03,517 Epoch 436/2000 +2025-03-17 12:42:48,330 Current Learning Rate: 0.0009221640 +2025-03-17 12:42:48,331 Train Loss: 0.0002251, Val Loss: 0.0003260 +2025-03-17 12:42:48,331 Epoch 437/2000 +2025-03-17 12:45:33,748 Current Learning Rate: 0.0009179037 +2025-03-17 12:45:33,748 Train Loss: 0.0002231, Val Loss: 0.0003286 +2025-03-17 12:45:33,749 Epoch 438/2000 +2025-03-17 12:48:17,909 Current Learning Rate: 0.0009135403 +2025-03-17 12:48:17,910 Train Loss: 0.0002232, Val Loss: 0.0003258 +2025-03-17 12:48:17,910 Epoch 439/2000 +2025-03-17 12:51:02,910 Current Learning Rate: 0.0009090749 +2025-03-17 12:51:02,911 Train Loss: 0.0002231, Val Loss: 0.0003197 +2025-03-17 12:51:02,911 Epoch 440/2000 +2025-03-17 12:53:47,858 Current Learning Rate: 0.0009045085 +2025-03-17 12:53:47,859 Train Loss: 0.0002226, Val Loss: 0.0003224 +2025-03-17 12:53:47,859 Epoch 441/2000 +2025-03-17 12:56:33,659 Current Learning Rate: 0.0008998423 +2025-03-17 12:56:33,659 Train Loss: 0.0002210, Val Loss: 0.0003165 +2025-03-17 12:56:33,660 Epoch 442/2000 +2025-03-17 12:59:18,366 Current Learning Rate: 0.0008950775 +2025-03-17 12:59:18,367 Train Loss: 0.0002205, Val Loss: 0.0003122 +2025-03-17 12:59:18,367 Epoch 443/2000 +2025-03-17 13:02:02,836 Current Learning Rate: 0.0008902152 +2025-03-17 13:02:02,837 Train Loss: 0.0002204, Val Loss: 0.0003111 +2025-03-17 13:02:02,837 Epoch 444/2000 +2025-03-17 13:04:47,671 Current Learning Rate: 0.0008852566 +2025-03-17 13:04:47,671 Train Loss: 0.0002197, Val Loss: 0.0003088 +2025-03-17 13:04:47,672 Epoch 445/2000 +2025-03-17 13:07:32,480 Current Learning Rate: 0.0008802030 +2025-03-17 13:07:32,481 Train Loss: 0.0002186, Val Loss: 0.0003082 +2025-03-17 13:07:32,481 Epoch 446/2000 +2025-03-17 13:10:17,649 Current Learning Rate: 0.0008750555 +2025-03-17 13:10:17,649 Train Loss: 0.0002188, Val Loss: 0.0003054 +2025-03-17 13:10:17,650 Epoch 447/2000 +2025-03-17 13:13:01,957 Current Learning Rate: 0.0008698155 +2025-03-17 13:13:01,958 Train Loss: 0.0002198, Val Loss: 0.0003039 +2025-03-17 13:13:01,958 Epoch 448/2000 +2025-03-17 13:15:46,603 Current Learning Rate: 0.0008644843 +2025-03-17 13:15:46,604 Train Loss: 0.0002190, Val Loss: 0.0003044 +2025-03-17 13:15:46,604 Epoch 449/2000 +2025-03-17 13:18:31,598 Current Learning Rate: 0.0008590631 +2025-03-17 13:18:31,598 Train Loss: 0.0002173, Val Loss: 0.0003056 +2025-03-17 13:18:31,598 Epoch 450/2000 +2025-03-17 13:21:16,617 Current Learning Rate: 0.0008535534 +2025-03-17 13:21:16,617 Train Loss: 0.0002166, Val Loss: 0.0003039 +2025-03-17 13:21:16,617 Epoch 451/2000 +2025-03-17 13:24:02,734 Current Learning Rate: 0.0008479564 +2025-03-17 13:24:02,735 Train Loss: 0.0002162, Val Loss: 0.0003031 +2025-03-17 13:24:02,735 Epoch 452/2000 +2025-03-17 13:26:48,068 Current Learning Rate: 0.0008422736 +2025-03-17 13:26:48,069 Train Loss: 0.0002164, Val Loss: 0.0003038 +2025-03-17 13:26:48,069 Epoch 453/2000 +2025-03-17 13:29:33,025 Current Learning Rate: 0.0008365063 +2025-03-17 13:29:33,025 Train Loss: 0.0002177, Val Loss: 0.0003042 +2025-03-17 13:29:33,026 Epoch 454/2000 +2025-03-17 13:32:18,320 Current Learning Rate: 0.0008306559 +2025-03-17 13:32:18,320 Train Loss: 0.0002182, Val Loss: 0.0003045 +2025-03-17 13:32:18,320 Epoch 455/2000 +2025-03-17 13:35:03,845 Current Learning Rate: 0.0008247240 +2025-03-17 13:35:03,846 Train Loss: 0.0002179, Val Loss: 0.0003032 +2025-03-17 13:35:03,846 Epoch 456/2000 +2025-03-17 13:37:48,595 Current Learning Rate: 0.0008187120 +2025-03-17 13:37:48,596 Train Loss: 0.0002166, Val Loss: 0.0003012 +2025-03-17 13:37:48,596 Epoch 457/2000 +2025-03-17 13:40:33,600 Current Learning Rate: 0.0008126213 +2025-03-17 13:40:33,601 Train Loss: 0.0002141, Val Loss: 0.0002996 +2025-03-17 13:40:33,601 Epoch 458/2000 +2025-03-17 13:43:18,106 Current Learning Rate: 0.0008064535 +2025-03-17 13:43:18,106 Train Loss: 0.0002116, Val Loss: 0.0002977 +2025-03-17 13:43:18,107 Epoch 459/2000 +2025-03-17 13:46:02,611 Current Learning Rate: 0.0008002101 +2025-03-17 13:46:02,613 Train Loss: 0.0002097, Val Loss: 0.0002947 +2025-03-17 13:46:02,613 Epoch 460/2000 +2025-03-17 13:48:47,855 Current Learning Rate: 0.0007938926 +2025-03-17 13:48:47,855 Train Loss: 0.0002080, Val Loss: 0.0002958 +2025-03-17 13:48:47,855 Epoch 461/2000 +2025-03-17 13:51:33,090 Current Learning Rate: 0.0007875026 +2025-03-17 13:51:33,091 Train Loss: 0.0002061, Val Loss: 0.0002929 +2025-03-17 13:51:33,091 Epoch 462/2000 +2025-03-17 13:54:18,339 Current Learning Rate: 0.0007810417 +2025-03-17 13:54:18,339 Train Loss: 0.0002052, Val Loss: 0.0002909 +2025-03-17 13:54:18,340 Epoch 463/2000 +2025-03-17 13:57:04,684 Current Learning Rate: 0.0007745114 +2025-03-17 13:57:04,685 Train Loss: 0.0002046, Val Loss: 0.0002895 +2025-03-17 13:57:04,685 Epoch 464/2000 +2025-03-17 13:59:49,628 Current Learning Rate: 0.0007679134 +2025-03-17 13:59:49,629 Train Loss: 0.0002040, Val Loss: 0.0002896 +2025-03-17 13:59:49,629 Epoch 465/2000 +2025-03-17 14:02:35,931 Current Learning Rate: 0.0007612493 +2025-03-17 14:02:35,932 Train Loss: 0.0002043, Val Loss: 0.0002903 +2025-03-17 14:02:35,932 Epoch 466/2000 +2025-03-17 14:05:21,102 Current Learning Rate: 0.0007545207 +2025-03-17 14:05:21,103 Train Loss: 0.0002053, Val Loss: 0.0002925 +2025-03-17 14:05:21,103 Epoch 467/2000 +2025-03-17 14:08:05,634 Current Learning Rate: 0.0007477293 +2025-03-17 14:08:05,634 Train Loss: 0.0002055, Val Loss: 0.0002955 +2025-03-17 14:08:05,634 Epoch 468/2000 +2025-03-17 14:10:51,195 Current Learning Rate: 0.0007408768 +2025-03-17 14:10:51,196 Train Loss: 0.0002048, Val Loss: 0.0002919 +2025-03-17 14:10:51,196 Epoch 469/2000 +2025-03-17 14:13:35,857 Current Learning Rate: 0.0007339649 +2025-03-17 14:13:35,858 Train Loss: 0.0002035, Val Loss: 0.0002900 +2025-03-17 14:13:35,858 Epoch 470/2000 +2025-03-17 14:16:20,681 Current Learning Rate: 0.0007269952 +2025-03-17 14:16:20,682 Train Loss: 0.0002022, Val Loss: 0.0002895 +2025-03-17 14:16:20,682 Epoch 471/2000 +2025-03-17 14:19:04,715 Current Learning Rate: 0.0007199696 +2025-03-17 14:19:04,716 Train Loss: 0.0002027, Val Loss: 0.0002907 +2025-03-17 14:19:04,716 Epoch 472/2000 +2025-03-17 14:21:50,210 Current Learning Rate: 0.0007128896 +2025-03-17 14:21:50,210 Train Loss: 0.0002033, Val Loss: 0.0002903 +2025-03-17 14:21:50,211 Epoch 473/2000 +2025-03-17 14:24:34,670 Current Learning Rate: 0.0007057572 +2025-03-17 14:24:34,670 Train Loss: 0.0002023, Val Loss: 0.0002881 +2025-03-17 14:24:34,670 Epoch 474/2000 +2025-03-17 14:27:19,512 Current Learning Rate: 0.0006985739 +2025-03-17 14:27:19,513 Train Loss: 0.0002026, Val Loss: 0.0002893 +2025-03-17 14:27:19,513 Epoch 475/2000 +2025-03-17 14:30:04,352 Current Learning Rate: 0.0006913417 +2025-03-17 14:30:04,352 Train Loss: 0.0002033, Val Loss: 0.0002888 +2025-03-17 14:30:04,352 Epoch 476/2000 +2025-03-17 14:32:50,170 Current Learning Rate: 0.0006840623 +2025-03-17 14:32:50,170 Train Loss: 0.0002025, Val Loss: 0.0002874 +2025-03-17 14:32:50,170 Epoch 477/2000 +2025-03-17 14:35:34,357 Current Learning Rate: 0.0006767374 +2025-03-17 14:35:34,357 Train Loss: 0.0002021, Val Loss: 0.0002919 +2025-03-17 14:35:34,358 Epoch 478/2000 +2025-03-17 14:38:19,916 Current Learning Rate: 0.0006693690 +2025-03-17 14:38:19,916 Train Loss: 0.0001992, Val Loss: 0.0002896 +2025-03-17 14:38:19,917 Epoch 479/2000 +2025-03-17 14:41:05,611 Current Learning Rate: 0.0006619587 +2025-03-17 14:41:05,612 Train Loss: 0.0001967, Val Loss: 0.0002847 +2025-03-17 14:41:05,612 Epoch 480/2000 +2025-03-17 14:43:51,919 Current Learning Rate: 0.0006545085 +2025-03-17 14:43:51,919 Train Loss: 0.0001953, Val Loss: 0.0002830 +2025-03-17 14:43:51,920 Epoch 481/2000 +2025-03-17 14:46:36,418 Current Learning Rate: 0.0006470202 +2025-03-17 14:46:36,418 Train Loss: 0.0001945, Val Loss: 0.0002811 +2025-03-17 14:46:36,418 Epoch 482/2000 +2025-03-17 14:49:20,720 Current Learning Rate: 0.0006394956 +2025-03-17 14:49:20,720 Train Loss: 0.0001941, Val Loss: 0.0002826 +2025-03-17 14:49:20,720 Epoch 483/2000 +2025-03-17 14:52:06,900 Current Learning Rate: 0.0006319365 +2025-03-17 14:52:06,901 Train Loss: 0.0001949, Val Loss: 0.0002827 +2025-03-17 14:52:06,901 Epoch 484/2000 +2025-03-17 14:54:52,294 Current Learning Rate: 0.0006243449 +2025-03-17 14:54:52,295 Train Loss: 0.0001957, Val Loss: 0.0002817 +2025-03-17 14:54:52,295 Epoch 485/2000 +2025-03-17 14:57:37,217 Current Learning Rate: 0.0006167227 +2025-03-17 14:57:37,218 Train Loss: 0.0001959, Val Loss: 0.0002800 +2025-03-17 14:57:37,218 Epoch 486/2000 +2025-03-17 15:00:23,138 Current Learning Rate: 0.0006090716 +2025-03-17 15:00:23,139 Train Loss: 0.0001956, Val Loss: 0.0002805 +2025-03-17 15:00:23,139 Epoch 487/2000 +2025-03-17 15:03:07,003 Current Learning Rate: 0.0006013936 +2025-03-17 15:03:07,122 Train Loss: 0.0001956, Val Loss: 0.0002781 +2025-03-17 15:03:07,122 Epoch 488/2000 +2025-03-17 15:05:52,244 Current Learning Rate: 0.0005936907 +2025-03-17 15:05:52,245 Train Loss: 0.0001945, Val Loss: 0.0002783 +2025-03-17 15:05:52,245 Epoch 489/2000 +2025-03-17 15:08:37,518 Current Learning Rate: 0.0005859646 +2025-03-17 15:08:37,519 Train Loss: 0.0001917, Val Loss: 0.0002785 +2025-03-17 15:08:37,519 Epoch 490/2000 +2025-03-17 15:11:22,918 Current Learning Rate: 0.0005782172 +2025-03-17 15:11:22,918 Train Loss: 0.0001905, Val Loss: 0.0002787 +2025-03-17 15:11:22,919 Epoch 491/2000 +2025-03-17 15:14:08,603 Current Learning Rate: 0.0005704506 +2025-03-17 15:14:08,781 Train Loss: 0.0001901, Val Loss: 0.0002772 +2025-03-17 15:14:08,781 Epoch 492/2000 +2025-03-17 15:16:54,869 Current Learning Rate: 0.0005626666 +2025-03-17 15:16:55,019 Train Loss: 0.0001895, Val Loss: 0.0002755 +2025-03-17 15:16:55,019 Epoch 493/2000 +2025-03-17 15:19:40,108 Current Learning Rate: 0.0005548672 +2025-03-17 15:19:40,108 Train Loss: 0.0001891, Val Loss: 0.0002772 +2025-03-17 15:19:40,108 Epoch 494/2000 +2025-03-17 15:22:25,317 Current Learning Rate: 0.0005470542 +2025-03-17 15:22:25,317 Train Loss: 0.0001891, Val Loss: 0.0002759 +2025-03-17 15:22:25,318 Epoch 495/2000 +2025-03-17 15:25:11,048 Current Learning Rate: 0.0005392295 +2025-03-17 15:25:11,195 Train Loss: 0.0001897, Val Loss: 0.0002746 +2025-03-17 15:25:11,196 Epoch 496/2000 +2025-03-17 15:27:56,461 Current Learning Rate: 0.0005313953 +2025-03-17 15:27:56,596 Train Loss: 0.0001897, Val Loss: 0.0002730 +2025-03-17 15:27:56,596 Epoch 497/2000 +2025-03-17 15:30:41,363 Current Learning Rate: 0.0005235532 +2025-03-17 15:30:41,489 Train Loss: 0.0001888, Val Loss: 0.0002724 +2025-03-17 15:30:41,490 Epoch 498/2000 +2025-03-17 15:33:26,356 Current Learning Rate: 0.0005157054 +2025-03-17 15:33:26,491 Train Loss: 0.0001872, Val Loss: 0.0002694 +2025-03-17 15:33:26,491 Epoch 499/2000 +2025-03-17 15:36:11,356 Current Learning Rate: 0.0005078537 +2025-03-17 15:36:11,357 Train Loss: 0.0001859, Val Loss: 0.0002709 +2025-03-17 15:36:11,358 Epoch 500/2000 +2025-03-17 15:38:55,830 Current Learning Rate: 0.0005000000 +2025-03-17 15:38:55,831 Train Loss: 0.0001853, Val Loss: 0.0002717 +2025-03-17 15:38:55,831 Epoch 501/2000 +2025-03-17 15:41:40,998 Current Learning Rate: 0.0004921463 +2025-03-17 15:41:40,999 Train Loss: 0.0001853, Val Loss: 0.0002713 +2025-03-17 15:41:40,999 Epoch 502/2000 +2025-03-17 15:44:25,841 Current Learning Rate: 0.0004842946 +2025-03-17 15:44:25,842 Train Loss: 0.0001847, Val Loss: 0.0002700 +2025-03-17 15:44:25,842 Epoch 503/2000 +2025-03-17 15:47:10,659 Current Learning Rate: 0.0004764468 +2025-03-17 15:47:10,659 Train Loss: 0.0001835, Val Loss: 0.0002711 +2025-03-17 15:47:10,660 Epoch 504/2000 +2025-03-17 15:49:55,523 Current Learning Rate: 0.0004686047 +2025-03-17 15:49:55,523 Train Loss: 0.0001832, Val Loss: 0.0002728 +2025-03-17 15:49:55,523 Epoch 505/2000 +2025-03-17 15:52:40,185 Current Learning Rate: 0.0004607705 +2025-03-17 15:52:40,186 Train Loss: 0.0001825, Val Loss: 0.0002702 +2025-03-17 15:52:40,187 Epoch 506/2000 +2025-03-17 15:55:25,020 Current Learning Rate: 0.0004529458 +2025-03-17 15:55:25,168 Train Loss: 0.0001815, Val Loss: 0.0002667 +2025-03-17 15:55:25,168 Epoch 507/2000 +2025-03-17 15:58:09,150 Current Learning Rate: 0.0004451328 +2025-03-17 15:58:09,270 Train Loss: 0.0001798, Val Loss: 0.0002652 +2025-03-17 15:58:09,270 Epoch 508/2000 +2025-03-17 16:00:54,028 Current Learning Rate: 0.0004373334 +2025-03-17 16:00:54,147 Train Loss: 0.0001785, Val Loss: 0.0002641 +2025-03-17 16:00:54,147 Epoch 509/2000 +2025-03-17 16:03:39,120 Current Learning Rate: 0.0004295494 +2025-03-17 16:03:39,235 Train Loss: 0.0001786, Val Loss: 0.0002618 +2025-03-17 16:03:39,236 Epoch 510/2000 +2025-03-17 16:06:23,432 Current Learning Rate: 0.0004217828 +2025-03-17 16:06:23,546 Train Loss: 0.0001789, Val Loss: 0.0002618 +2025-03-17 16:06:23,546 Epoch 511/2000 +2025-03-17 16:09:07,775 Current Learning Rate: 0.0004140354 +2025-03-17 16:09:07,892 Train Loss: 0.0001792, Val Loss: 0.0002615 +2025-03-17 16:09:07,892 Epoch 512/2000 +2025-03-17 16:11:52,898 Current Learning Rate: 0.0004063093 +2025-03-17 16:11:52,898 Train Loss: 0.0001783, Val Loss: 0.0002622 +2025-03-17 16:11:52,899 Epoch 513/2000 +2025-03-17 16:14:37,807 Current Learning Rate: 0.0003986064 +2025-03-17 16:14:37,936 Train Loss: 0.0001768, Val Loss: 0.0002590 +2025-03-17 16:14:37,936 Epoch 514/2000 +2025-03-17 16:17:22,112 Current Learning Rate: 0.0003909284 +2025-03-17 16:17:22,241 Train Loss: 0.0001757, Val Loss: 0.0002587 +2025-03-17 16:17:22,241 Epoch 515/2000 +2025-03-17 16:20:06,933 Current Learning Rate: 0.0003832773 +2025-03-17 16:20:06,933 Train Loss: 0.0001750, Val Loss: 0.0002593 +2025-03-17 16:20:06,934 Epoch 516/2000 +2025-03-17 16:22:50,959 Current Learning Rate: 0.0003756551 +2025-03-17 16:22:50,960 Train Loss: 0.0001746, Val Loss: 0.0002594 +2025-03-17 16:22:50,960 Epoch 517/2000 +2025-03-17 16:25:36,195 Current Learning Rate: 0.0003680635 +2025-03-17 16:25:36,340 Train Loss: 0.0001737, Val Loss: 0.0002581 +2025-03-17 16:25:36,340 Epoch 518/2000 +2025-03-17 16:28:20,967 Current Learning Rate: 0.0003605044 +2025-03-17 16:28:21,095 Train Loss: 0.0001732, Val Loss: 0.0002571 +2025-03-17 16:28:21,096 Epoch 519/2000 +2025-03-17 16:31:05,745 Current Learning Rate: 0.0003529798 +2025-03-17 16:31:05,746 Train Loss: 0.0001731, Val Loss: 0.0002576 +2025-03-17 16:31:05,746 Epoch 520/2000 +2025-03-17 16:33:51,037 Current Learning Rate: 0.0003454915 +2025-03-17 16:33:51,037 Train Loss: 0.0001731, Val Loss: 0.0002601 +2025-03-17 16:33:51,037 Epoch 521/2000 +2025-03-17 16:36:36,048 Current Learning Rate: 0.0003380413 +2025-03-17 16:36:36,048 Train Loss: 0.0001728, Val Loss: 0.0002596 +2025-03-17 16:36:36,049 Epoch 522/2000 +2025-03-17 16:39:20,390 Current Learning Rate: 0.0003306310 +2025-03-17 16:39:20,391 Train Loss: 0.0001727, Val Loss: 0.0002608 +2025-03-17 16:39:20,391 Epoch 523/2000 +2025-03-17 16:42:05,987 Current Learning Rate: 0.0003232626 +2025-03-17 16:42:05,987 Train Loss: 0.0001724, Val Loss: 0.0002576 +2025-03-17 16:42:05,987 Epoch 524/2000 +2025-03-17 16:44:50,954 Current Learning Rate: 0.0003159377 +2025-03-17 16:44:51,084 Train Loss: 0.0001714, Val Loss: 0.0002550 +2025-03-17 16:44:51,085 Epoch 525/2000 +2025-03-17 16:47:35,898 Current Learning Rate: 0.0003086583 +2025-03-17 16:47:36,041 Train Loss: 0.0001699, Val Loss: 0.0002533 +2025-03-17 16:47:36,042 Epoch 526/2000 +2025-03-17 16:50:20,696 Current Learning Rate: 0.0003014261 +2025-03-17 16:50:20,817 Train Loss: 0.0001684, Val Loss: 0.0002525 +2025-03-17 16:50:20,818 Epoch 527/2000 +2025-03-17 16:53:05,701 Current Learning Rate: 0.0002942428 +2025-03-17 16:53:05,838 Train Loss: 0.0001676, Val Loss: 0.0002519 +2025-03-17 16:53:05,838 Epoch 528/2000 +2025-03-17 16:55:50,315 Current Learning Rate: 0.0002871104 +2025-03-17 16:55:50,316 Train Loss: 0.0001674, Val Loss: 0.0002536 +2025-03-17 16:55:50,316 Epoch 529/2000 +2025-03-17 16:58:35,474 Current Learning Rate: 0.0002800304 +2025-03-17 16:58:35,474 Train Loss: 0.0001673, Val Loss: 0.0002536 +2025-03-17 16:58:35,474 Epoch 530/2000 +2025-03-17 17:01:20,086 Current Learning Rate: 0.0002730048 +2025-03-17 17:01:20,086 Train Loss: 0.0001668, Val Loss: 0.0002527 +2025-03-17 17:01:20,086 Epoch 531/2000 +2025-03-17 17:04:05,110 Current Learning Rate: 0.0002660351 +2025-03-17 17:04:05,230 Train Loss: 0.0001662, Val Loss: 0.0002514 +2025-03-17 17:04:05,231 Epoch 532/2000 +2025-03-17 17:06:49,182 Current Learning Rate: 0.0002591232 +2025-03-17 17:06:49,310 Train Loss: 0.0001657, Val Loss: 0.0002504 +2025-03-17 17:06:49,310 Epoch 533/2000 +2025-03-17 17:09:33,545 Current Learning Rate: 0.0002522707 +2025-03-17 17:09:33,667 Train Loss: 0.0001655, Val Loss: 0.0002494 +2025-03-17 17:09:33,668 Epoch 534/2000 +2025-03-17 17:12:18,190 Current Learning Rate: 0.0002454793 +2025-03-17 17:12:18,313 Train Loss: 0.0001654, Val Loss: 0.0002474 +2025-03-17 17:12:18,313 Epoch 535/2000 +2025-03-17 17:15:02,994 Current Learning Rate: 0.0002387507 +2025-03-17 17:15:03,129 Train Loss: 0.0001652, Val Loss: 0.0002462 +2025-03-17 17:15:03,129 Epoch 536/2000 +2025-03-17 17:17:47,403 Current Learning Rate: 0.0002320866 +2025-03-17 17:17:47,530 Train Loss: 0.0001648, Val Loss: 0.0002459 +2025-03-17 17:17:47,530 Epoch 537/2000 +2025-03-17 17:20:31,956 Current Learning Rate: 0.0002254886 +2025-03-17 17:20:32,085 Train Loss: 0.0001641, Val Loss: 0.0002449 +2025-03-17 17:20:32,085 Epoch 538/2000 +2025-03-17 17:23:16,657 Current Learning Rate: 0.0002189583 +2025-03-17 17:23:16,784 Train Loss: 0.0001632, Val Loss: 0.0002445 +2025-03-17 17:23:16,784 Epoch 539/2000 +2025-03-17 17:26:01,234 Current Learning Rate: 0.0002124974 +2025-03-17 17:26:01,403 Train Loss: 0.0001623, Val Loss: 0.0002438 +2025-03-17 17:26:01,403 Epoch 540/2000 +2025-03-17 17:28:46,853 Current Learning Rate: 0.0002061074 +2025-03-17 17:28:46,982 Train Loss: 0.0001617, Val Loss: 0.0002431 +2025-03-17 17:28:46,982 Epoch 541/2000 +2025-03-17 17:31:31,430 Current Learning Rate: 0.0001997899 +2025-03-17 17:31:31,557 Train Loss: 0.0001613, Val Loss: 0.0002426 +2025-03-17 17:31:31,557 Epoch 542/2000 +2025-03-17 17:34:16,147 Current Learning Rate: 0.0001935465 +2025-03-17 17:34:16,276 Train Loss: 0.0001610, Val Loss: 0.0002420 +2025-03-17 17:34:16,276 Epoch 543/2000 +2025-03-17 17:37:00,768 Current Learning Rate: 0.0001873787 +2025-03-17 17:37:00,922 Train Loss: 0.0001610, Val Loss: 0.0002418 +2025-03-17 17:37:00,922 Epoch 544/2000 +2025-03-17 17:39:46,046 Current Learning Rate: 0.0001812880 +2025-03-17 17:39:46,046 Train Loss: 0.0001609, Val Loss: 0.0002420 +2025-03-17 17:39:46,047 Epoch 545/2000 +2025-03-17 17:42:30,969 Current Learning Rate: 0.0001752760 +2025-03-17 17:42:30,969 Train Loss: 0.0001608, Val Loss: 0.0002422 +2025-03-17 17:42:30,970 Epoch 546/2000 +2025-03-17 17:45:15,388 Current Learning Rate: 0.0001693441 +2025-03-17 17:45:15,388 Train Loss: 0.0001602, Val Loss: 0.0002425 +2025-03-17 17:45:15,388 Epoch 547/2000 +2025-03-17 17:48:00,430 Current Learning Rate: 0.0001634937 +2025-03-17 17:48:00,570 Train Loss: 0.0001596, Val Loss: 0.0002413 +2025-03-17 17:48:00,571 Epoch 548/2000 +2025-03-17 17:50:44,984 Current Learning Rate: 0.0001577264 +2025-03-17 17:50:45,107 Train Loss: 0.0001591, Val Loss: 0.0002411 +2025-03-17 17:50:45,107 Epoch 549/2000 +2025-03-17 17:53:30,051 Current Learning Rate: 0.0001520436 +2025-03-17 17:53:30,052 Train Loss: 0.0001585, Val Loss: 0.0002416 +2025-03-17 17:53:30,052 Epoch 550/2000 +2025-03-17 17:56:15,489 Current Learning Rate: 0.0001464466 +2025-03-17 17:56:15,489 Train Loss: 0.0001580, Val Loss: 0.0002415 +2025-03-17 17:56:15,490 Epoch 551/2000 +2025-03-17 17:59:00,674 Current Learning Rate: 0.0001409369 +2025-03-17 17:59:00,800 Train Loss: 0.0001576, Val Loss: 0.0002402 +2025-03-17 17:59:00,800 Epoch 552/2000 +2025-03-17 18:01:45,642 Current Learning Rate: 0.0001355157 +2025-03-17 18:01:45,766 Train Loss: 0.0001571, Val Loss: 0.0002385 +2025-03-17 18:01:45,767 Epoch 553/2000 +2025-03-17 18:04:30,668 Current Learning Rate: 0.0001301845 +2025-03-17 18:04:30,811 Train Loss: 0.0001564, Val Loss: 0.0002377 +2025-03-17 18:04:30,811 Epoch 554/2000 +2025-03-17 18:07:15,390 Current Learning Rate: 0.0001249445 +2025-03-17 18:07:15,554 Train Loss: 0.0001557, Val Loss: 0.0002374 +2025-03-17 18:07:15,554 Epoch 555/2000 +2025-03-17 18:10:00,313 Current Learning Rate: 0.0001197970 +2025-03-17 18:10:00,313 Train Loss: 0.0001553, Val Loss: 0.0002375 +2025-03-17 18:10:00,313 Epoch 556/2000 +2025-03-17 18:12:45,091 Current Learning Rate: 0.0001147434 +2025-03-17 18:12:45,091 Train Loss: 0.0001549, Val Loss: 0.0002375 +2025-03-17 18:12:45,091 Epoch 557/2000 +2025-03-17 18:15:29,891 Current Learning Rate: 0.0001097848 +2025-03-17 18:15:30,004 Train Loss: 0.0001545, Val Loss: 0.0002373 +2025-03-17 18:15:30,005 Epoch 558/2000 +2025-03-17 18:18:15,180 Current Learning Rate: 0.0001049225 +2025-03-17 18:18:15,296 Train Loss: 0.0001542, Val Loss: 0.0002366 +2025-03-17 18:18:15,296 Epoch 559/2000 +2025-03-17 18:20:59,389 Current Learning Rate: 0.0001001577 +2025-03-17 18:20:59,502 Train Loss: 0.0001540, Val Loss: 0.0002357 +2025-03-17 18:20:59,502 Epoch 560/2000 +2025-03-17 18:23:43,981 Current Learning Rate: 0.0000954915 +2025-03-17 18:23:44,091 Train Loss: 0.0001538, Val Loss: 0.0002353 +2025-03-17 18:23:44,092 Epoch 561/2000 +2025-03-17 18:26:28,789 Current Learning Rate: 0.0000909251 +2025-03-17 18:26:28,901 Train Loss: 0.0001535, Val Loss: 0.0002351 +2025-03-17 18:26:28,901 Epoch 562/2000 +2025-03-17 18:29:13,113 Current Learning Rate: 0.0000864597 +2025-03-17 18:29:13,231 Train Loss: 0.0001532, Val Loss: 0.0002347 +2025-03-17 18:29:13,231 Epoch 563/2000 +2025-03-17 18:31:57,824 Current Learning Rate: 0.0000820963 +2025-03-17 18:31:57,940 Train Loss: 0.0001529, Val Loss: 0.0002342 +2025-03-17 18:31:57,940 Epoch 564/2000 +2025-03-17 18:34:43,290 Current Learning Rate: 0.0000778360 +2025-03-17 18:34:43,423 Train Loss: 0.0001526, Val Loss: 0.0002339 +2025-03-17 18:34:43,424 Epoch 565/2000 +2025-03-17 18:37:27,705 Current Learning Rate: 0.0000736799 +2025-03-17 18:37:27,833 Train Loss: 0.0001522, Val Loss: 0.0002337 +2025-03-17 18:37:27,833 Epoch 566/2000 +2025-03-17 18:40:12,393 Current Learning Rate: 0.0000696290 +2025-03-17 18:40:12,521 Train Loss: 0.0001519, Val Loss: 0.0002334 +2025-03-17 18:40:12,521 Epoch 567/2000 +2025-03-17 18:42:56,830 Current Learning Rate: 0.0000656842 +2025-03-17 18:42:56,954 Train Loss: 0.0001516, Val Loss: 0.0002331 +2025-03-17 18:42:56,955 Epoch 568/2000 +2025-03-17 18:45:42,024 Current Learning Rate: 0.0000618467 +2025-03-17 18:45:42,160 Train Loss: 0.0001513, Val Loss: 0.0002328 +2025-03-17 18:45:42,160 Epoch 569/2000 +2025-03-17 18:48:26,134 Current Learning Rate: 0.0000581172 +2025-03-17 18:48:26,257 Train Loss: 0.0001511, Val Loss: 0.0002326 +2025-03-17 18:48:26,258 Epoch 570/2000 +2025-03-17 18:51:10,841 Current Learning Rate: 0.0000544967 +2025-03-17 18:51:10,966 Train Loss: 0.0001509, Val Loss: 0.0002324 +2025-03-17 18:51:10,966 Epoch 571/2000 +2025-03-17 18:53:57,021 Current Learning Rate: 0.0000509862 +2025-03-17 18:53:57,152 Train Loss: 0.0001507, Val Loss: 0.0002322 +2025-03-17 18:53:57,152 Epoch 572/2000 +2025-03-17 18:56:41,206 Current Learning Rate: 0.0000475865 +2025-03-17 18:56:41,325 Train Loss: 0.0001504, Val Loss: 0.0002320 +2025-03-17 18:56:41,325 Epoch 573/2000 +2025-03-17 18:59:25,750 Current Learning Rate: 0.0000442984 +2025-03-17 18:59:25,866 Train Loss: 0.0001503, Val Loss: 0.0002318 +2025-03-17 18:59:25,866 Epoch 574/2000 +2025-03-17 19:02:10,956 Current Learning Rate: 0.0000411227 +2025-03-17 19:02:11,125 Train Loss: 0.0001501, Val Loss: 0.0002316 +2025-03-17 19:02:11,125 Epoch 575/2000 +2025-03-17 19:04:56,248 Current Learning Rate: 0.0000380602 +2025-03-17 19:04:56,391 Train Loss: 0.0001499, Val Loss: 0.0002313 +2025-03-17 19:04:56,391 Epoch 576/2000 +2025-03-17 19:07:41,217 Current Learning Rate: 0.0000351118 +2025-03-17 19:07:41,341 Train Loss: 0.0001497, Val Loss: 0.0002311 +2025-03-17 19:07:41,342 Epoch 577/2000 +2025-03-17 19:10:26,183 Current Learning Rate: 0.0000322780 +2025-03-17 19:10:26,310 Train Loss: 0.0001495, Val Loss: 0.0002308 +2025-03-17 19:10:26,310 Epoch 578/2000 +2025-03-17 19:13:10,787 Current Learning Rate: 0.0000295596 +2025-03-17 19:13:10,928 Train Loss: 0.0001494, Val Loss: 0.0002307 +2025-03-17 19:13:10,928 Epoch 579/2000 +2025-03-17 19:15:55,745 Current Learning Rate: 0.0000269573 +2025-03-17 19:15:55,870 Train Loss: 0.0001492, Val Loss: 0.0002306 +2025-03-17 19:15:55,870 Epoch 580/2000 +2025-03-17 19:18:40,112 Current Learning Rate: 0.0000244717 +2025-03-17 19:18:40,226 Train Loss: 0.0001491, Val Loss: 0.0002306 +2025-03-17 19:18:40,226 Epoch 581/2000 +2025-03-17 19:21:24,298 Current Learning Rate: 0.0000221035 +2025-03-17 19:21:24,412 Train Loss: 0.0001489, Val Loss: 0.0002305 +2025-03-17 19:21:24,412 Epoch 582/2000 +2025-03-17 19:24:09,326 Current Learning Rate: 0.0000198532 +2025-03-17 19:24:09,445 Train Loss: 0.0001488, Val Loss: 0.0002304 +2025-03-17 19:24:09,445 Epoch 583/2000 +2025-03-17 19:26:53,725 Current Learning Rate: 0.0000177213 +2025-03-17 19:26:53,844 Train Loss: 0.0001487, Val Loss: 0.0002302 +2025-03-17 19:26:53,844 Epoch 584/2000 +2025-03-17 19:29:39,033 Current Learning Rate: 0.0000157084 +2025-03-17 19:29:39,156 Train Loss: 0.0001485, Val Loss: 0.0002301 +2025-03-17 19:29:39,157 Epoch 585/2000 +2025-03-17 19:32:23,515 Current Learning Rate: 0.0000138150 +2025-03-17 19:32:23,627 Train Loss: 0.0001484, Val Loss: 0.0002300 +2025-03-17 19:32:23,627 Epoch 586/2000 +2025-03-17 19:35:08,804 Current Learning Rate: 0.0000120416 +2025-03-17 19:35:08,931 Train Loss: 0.0001483, Val Loss: 0.0002299 +2025-03-17 19:35:08,931 Epoch 587/2000 +2025-03-17 19:37:53,554 Current Learning Rate: 0.0000103886 +2025-03-17 19:37:53,674 Train Loss: 0.0001482, Val Loss: 0.0002298 +2025-03-17 19:37:53,674 Epoch 588/2000 +2025-03-17 19:40:38,041 Current Learning Rate: 0.0000088564 +2025-03-17 19:40:38,164 Train Loss: 0.0001481, Val Loss: 0.0002298 +2025-03-17 19:40:38,164 Epoch 589/2000 +2025-03-17 19:43:22,855 Current Learning Rate: 0.0000074453 +2025-03-17 19:43:22,983 Train Loss: 0.0001480, Val Loss: 0.0002298 +2025-03-17 19:43:22,983 Epoch 590/2000 +2025-03-17 19:46:08,071 Current Learning Rate: 0.0000061558 +2025-03-17 19:46:08,197 Train Loss: 0.0001479, Val Loss: 0.0002297 +2025-03-17 19:46:08,197 Epoch 591/2000 +2025-03-17 19:48:53,036 Current Learning Rate: 0.0000049882 +2025-03-17 19:48:53,150 Train Loss: 0.0001479, Val Loss: 0.0002297 +2025-03-17 19:48:53,150 Epoch 592/2000 +2025-03-17 19:51:37,391 Current Learning Rate: 0.0000039426 +2025-03-17 19:51:37,505 Train Loss: 0.0001478, Val Loss: 0.0002296 +2025-03-17 19:51:37,506 Epoch 593/2000 +2025-03-17 19:54:22,304 Current Learning Rate: 0.0000030195 +2025-03-17 19:54:22,417 Train Loss: 0.0001477, Val Loss: 0.0002295 +2025-03-17 19:54:22,418 Epoch 594/2000 +2025-03-17 19:57:06,901 Current Learning Rate: 0.0000022190 +2025-03-17 19:57:07,019 Train Loss: 0.0001476, Val Loss: 0.0002294 +2025-03-17 19:57:07,019 Epoch 595/2000 +2025-03-17 19:59:51,786 Current Learning Rate: 0.0000015413 +2025-03-17 19:59:51,898 Train Loss: 0.0001476, Val Loss: 0.0002294 +2025-03-17 19:59:51,898 Epoch 596/2000 +2025-03-17 20:02:36,335 Current Learning Rate: 0.0000009866 +2025-03-17 20:02:36,459 Train Loss: 0.0001475, Val Loss: 0.0002293 +2025-03-17 20:02:36,460 Epoch 597/2000 +2025-03-17 20:05:21,597 Current Learning Rate: 0.0000005551 +2025-03-17 20:05:21,711 Train Loss: 0.0001475, Val Loss: 0.0002293 +2025-03-17 20:05:21,711 Epoch 598/2000 +2025-03-17 20:08:06,394 Current Learning Rate: 0.0000002467 +2025-03-17 20:08:06,509 Train Loss: 0.0001475, Val Loss: 0.0002292 +2025-03-17 20:08:06,510 Epoch 599/2000 +2025-03-17 20:10:50,699 Current Learning Rate: 0.0000000617 +2025-03-17 20:10:50,809 Train Loss: 0.0001474, Val Loss: 0.0002292 +2025-03-17 20:10:50,810 Epoch 600/2000 +2025-03-17 20:13:35,435 Current Learning Rate: 0.0000000000 +2025-03-17 20:13:35,550 Train Loss: 0.0001474, Val Loss: 0.0002292 +2025-03-17 20:13:35,550 Epoch 601/2000 +2025-03-17 20:16:19,664 Current Learning Rate: 0.0000000617 +2025-03-17 20:16:19,775 Train Loss: 0.0001474, Val Loss: 0.0002292 +2025-03-17 20:16:19,775 Epoch 602/2000 +2025-03-17 20:19:03,885 Current Learning Rate: 0.0000002467 +2025-03-17 20:19:04,000 Train Loss: 0.0001474, Val Loss: 0.0002292 +2025-03-17 20:19:04,000 Epoch 603/2000 +2025-03-17 20:21:48,848 Current Learning Rate: 0.0000005551 +2025-03-17 20:21:48,848 Train Loss: 0.0001474, Val Loss: 0.0002292 +2025-03-17 20:21:48,848 Epoch 604/2000 +2025-03-17 20:24:33,822 Current Learning Rate: 0.0000009866 +2025-03-17 20:24:33,822 Train Loss: 0.0001475, Val Loss: 0.0002292 +2025-03-17 20:24:33,822 Epoch 605/2000 +2025-03-17 20:27:18,202 Current Learning Rate: 0.0000015413 +2025-03-17 20:27:18,202 Train Loss: 0.0001475, Val Loss: 0.0002293 +2025-03-17 20:27:18,202 Epoch 606/2000 +2025-03-17 20:30:02,386 Current Learning Rate: 0.0000022190 +2025-03-17 20:30:02,386 Train Loss: 0.0001475, Val Loss: 0.0002293 +2025-03-17 20:30:02,386 Epoch 607/2000 +2025-03-17 20:32:47,223 Current Learning Rate: 0.0000030195 +2025-03-17 20:32:47,224 Train Loss: 0.0001475, Val Loss: 0.0002294 +2025-03-17 20:32:47,224 Epoch 608/2000 +2025-03-17 20:35:33,074 Current Learning Rate: 0.0000039426 +2025-03-17 20:35:33,074 Train Loss: 0.0001476, Val Loss: 0.0002294 +2025-03-17 20:35:33,074 Epoch 609/2000 +2025-03-17 20:38:17,640 Current Learning Rate: 0.0000049882 +2025-03-17 20:38:17,640 Train Loss: 0.0001476, Val Loss: 0.0002295 +2025-03-17 20:38:17,640 Epoch 610/2000 +2025-03-17 20:41:02,143 Current Learning Rate: 0.0000061558 +2025-03-17 20:41:02,144 Train Loss: 0.0001477, Val Loss: 0.0002295 +2025-03-17 20:41:02,144 Epoch 611/2000 +2025-03-17 20:43:47,277 Current Learning Rate: 0.0000074453 +2025-03-17 20:43:47,277 Train Loss: 0.0001477, Val Loss: 0.0002296 +2025-03-17 20:43:47,277 Epoch 612/2000 +2025-03-17 20:46:32,105 Current Learning Rate: 0.0000088564 +2025-03-17 20:46:32,106 Train Loss: 0.0001478, Val Loss: 0.0002296 +2025-03-17 20:46:32,106 Epoch 613/2000 +2025-03-17 20:49:16,280 Current Learning Rate: 0.0000103886 +2025-03-17 20:49:16,280 Train Loss: 0.0001478, Val Loss: 0.0002296 +2025-03-17 20:49:16,280 Epoch 614/2000 +2025-03-17 20:52:00,741 Current Learning Rate: 0.0000120416 +2025-03-17 20:52:00,741 Train Loss: 0.0001479, Val Loss: 0.0002296 +2025-03-17 20:52:00,742 Epoch 615/2000 +2025-03-17 20:54:45,087 Current Learning Rate: 0.0000138150 +2025-03-17 20:54:45,088 Train Loss: 0.0001479, Val Loss: 0.0002297 +2025-03-17 20:54:45,088 Epoch 616/2000 +2025-03-17 20:57:29,476 Current Learning Rate: 0.0000157084 +2025-03-17 20:57:29,477 Train Loss: 0.0001480, Val Loss: 0.0002297 +2025-03-17 20:57:29,477 Epoch 617/2000 +2025-03-17 21:00:14,114 Current Learning Rate: 0.0000177213 +2025-03-17 21:00:14,115 Train Loss: 0.0001481, Val Loss: 0.0002297 +2025-03-17 21:00:14,115 Epoch 618/2000 +2025-03-17 21:02:58,689 Current Learning Rate: 0.0000198532 +2025-03-17 21:02:58,689 Train Loss: 0.0001481, Val Loss: 0.0002298 +2025-03-17 21:02:58,690 Epoch 619/2000 +2025-03-17 21:05:43,098 Current Learning Rate: 0.0000221035 +2025-03-17 21:05:43,098 Train Loss: 0.0001482, Val Loss: 0.0002298 +2025-03-17 21:05:43,098 Epoch 620/2000 +2025-03-17 21:08:27,647 Current Learning Rate: 0.0000244717 +2025-03-17 21:08:27,648 Train Loss: 0.0001483, Val Loss: 0.0002299 +2025-03-17 21:08:27,648 Epoch 621/2000 +2025-03-17 21:11:12,169 Current Learning Rate: 0.0000269573 +2025-03-17 21:11:12,169 Train Loss: 0.0001483, Val Loss: 0.0002299 +2025-03-17 21:11:12,170 Epoch 622/2000 +2025-03-17 21:13:56,794 Current Learning Rate: 0.0000295596 +2025-03-17 21:13:56,795 Train Loss: 0.0001484, Val Loss: 0.0002300 +2025-03-17 21:13:56,795 Epoch 623/2000 +2025-03-17 21:16:41,137 Current Learning Rate: 0.0000322780 +2025-03-17 21:16:41,137 Train Loss: 0.0001485, Val Loss: 0.0002301 +2025-03-17 21:16:41,137 Epoch 624/2000 +2025-03-17 21:19:25,428 Current Learning Rate: 0.0000351118 +2025-03-17 21:19:25,429 Train Loss: 0.0001486, Val Loss: 0.0002302 +2025-03-17 21:19:25,429 Epoch 625/2000 +2025-03-17 21:22:09,825 Current Learning Rate: 0.0000380602 +2025-03-17 21:22:09,825 Train Loss: 0.0001486, Val Loss: 0.0002302 +2025-03-17 21:22:09,825 Epoch 626/2000 +2025-03-17 21:24:54,487 Current Learning Rate: 0.0000411227 +2025-03-17 21:24:54,487 Train Loss: 0.0001487, Val Loss: 0.0002303 +2025-03-17 21:24:54,488 Epoch 627/2000 +2025-03-17 21:27:38,984 Current Learning Rate: 0.0000442984 +2025-03-17 21:27:38,984 Train Loss: 0.0001488, Val Loss: 0.0002304 +2025-03-17 21:27:38,984 Epoch 628/2000 +2025-03-17 21:30:23,595 Current Learning Rate: 0.0000475865 +2025-03-17 21:30:23,595 Train Loss: 0.0001489, Val Loss: 0.0002305 +2025-03-17 21:30:23,595 Epoch 629/2000 +2025-03-17 21:33:08,281 Current Learning Rate: 0.0000509862 +2025-03-17 21:33:08,281 Train Loss: 0.0001490, Val Loss: 0.0002306 +2025-03-17 21:33:08,282 Epoch 630/2000 +2025-03-17 21:35:52,585 Current Learning Rate: 0.0000544967 +2025-03-17 21:35:52,586 Train Loss: 0.0001491, Val Loss: 0.0002308 +2025-03-17 21:35:52,586 Epoch 631/2000 +2025-03-17 21:38:37,299 Current Learning Rate: 0.0000581172 +2025-03-17 21:38:37,299 Train Loss: 0.0001492, Val Loss: 0.0002309 +2025-03-17 21:38:37,300 Epoch 632/2000 +2025-03-17 21:41:21,978 Current Learning Rate: 0.0000618467 +2025-03-17 21:41:21,979 Train Loss: 0.0001493, Val Loss: 0.0002310 +2025-03-17 21:41:21,979 Epoch 633/2000 +2025-03-17 21:44:06,581 Current Learning Rate: 0.0000656842 +2025-03-17 21:44:06,582 Train Loss: 0.0001494, Val Loss: 0.0002311 +2025-03-17 21:44:06,582 Epoch 634/2000 +2025-03-17 21:46:51,349 Current Learning Rate: 0.0000696290 +2025-03-17 21:46:51,350 Train Loss: 0.0001495, Val Loss: 0.0002312 +2025-03-17 21:46:51,350 Epoch 635/2000 +2025-03-17 21:49:36,041 Current Learning Rate: 0.0000736799 +2025-03-17 21:49:36,041 Train Loss: 0.0001496, Val Loss: 0.0002313 +2025-03-17 21:49:36,042 Epoch 636/2000 +2025-03-17 21:52:20,654 Current Learning Rate: 0.0000778360 +2025-03-17 21:52:20,655 Train Loss: 0.0001497, Val Loss: 0.0002315 +2025-03-17 21:52:20,655 Epoch 637/2000 +2025-03-17 21:55:05,451 Current Learning Rate: 0.0000820963 +2025-03-17 21:55:05,452 Train Loss: 0.0001498, Val Loss: 0.0002317 +2025-03-17 21:55:05,452 Epoch 638/2000 +2025-03-17 21:57:50,331 Current Learning Rate: 0.0000864597 +2025-03-17 21:57:50,331 Train Loss: 0.0001499, Val Loss: 0.0002319 +2025-03-17 21:57:50,332 Epoch 639/2000 +2025-03-17 22:00:35,498 Current Learning Rate: 0.0000909251 +2025-03-17 22:00:35,499 Train Loss: 0.0001500, Val Loss: 0.0002321 +2025-03-17 22:00:35,499 Epoch 640/2000 +2025-03-17 22:03:20,419 Current Learning Rate: 0.0000954915 +2025-03-17 22:03:20,420 Train Loss: 0.0001501, Val Loss: 0.0002323 +2025-03-17 22:03:20,420 Epoch 641/2000 +2025-03-17 22:06:05,129 Current Learning Rate: 0.0001001577 +2025-03-17 22:06:05,129 Train Loss: 0.0001503, Val Loss: 0.0002324 +2025-03-17 22:06:05,130 Epoch 642/2000 +2025-03-17 22:08:49,314 Current Learning Rate: 0.0001049225 +2025-03-17 22:08:49,315 Train Loss: 0.0001505, Val Loss: 0.0002325 +2025-03-17 22:08:49,315 Epoch 643/2000 +2025-03-17 22:11:33,781 Current Learning Rate: 0.0001097848 +2025-03-17 22:11:33,782 Train Loss: 0.0001507, Val Loss: 0.0002326 +2025-03-17 22:11:33,782 Epoch 644/2000 +2025-03-17 22:14:18,398 Current Learning Rate: 0.0001147434 +2025-03-17 22:14:18,399 Train Loss: 0.0001508, Val Loss: 0.0002327 +2025-03-17 22:14:18,399 Epoch 645/2000 +2025-03-17 22:17:03,019 Current Learning Rate: 0.0001197970 +2025-03-17 22:17:03,019 Train Loss: 0.0001510, Val Loss: 0.0002330 +2025-03-17 22:17:03,019 Epoch 646/2000 +2025-03-17 22:19:48,258 Current Learning Rate: 0.0001249445 +2025-03-17 22:19:48,258 Train Loss: 0.0001512, Val Loss: 0.0002336 +2025-03-17 22:19:48,259 Epoch 647/2000 +2025-03-17 22:22:32,831 Current Learning Rate: 0.0001301845 +2025-03-17 22:22:32,832 Train Loss: 0.0001515, Val Loss: 0.0002335 +2025-03-17 22:22:32,832 Epoch 648/2000 +2025-03-17 22:25:17,295 Current Learning Rate: 0.0001355157 +2025-03-17 22:25:17,295 Train Loss: 0.0001516, Val Loss: 0.0002333 +2025-03-17 22:25:17,296 Epoch 649/2000 +2025-03-17 22:28:01,535 Current Learning Rate: 0.0001409369 +2025-03-17 22:28:01,536 Train Loss: 0.0001518, Val Loss: 0.0002337 +2025-03-17 22:28:01,536 Epoch 650/2000 +2025-03-17 22:30:46,161 Current Learning Rate: 0.0001464466 +2025-03-17 22:30:46,162 Train Loss: 0.0001519, Val Loss: 0.0002344 +2025-03-17 22:30:46,162 Epoch 651/2000 +2025-03-17 22:33:31,225 Current Learning Rate: 0.0001520436 +2025-03-17 22:33:31,225 Train Loss: 0.0001521, Val Loss: 0.0002342 +2025-03-17 22:33:31,225 Epoch 652/2000 +2025-03-17 22:36:16,226 Current Learning Rate: 0.0001577264 +2025-03-17 22:36:16,227 Train Loss: 0.0001524, Val Loss: 0.0002342 +2025-03-17 22:36:16,227 Epoch 653/2000 +2025-03-17 22:39:01,123 Current Learning Rate: 0.0001634937 +2025-03-17 22:39:01,123 Train Loss: 0.0001526, Val Loss: 0.0002349 +2025-03-17 22:39:01,123 Epoch 654/2000 +2025-03-17 22:41:46,108 Current Learning Rate: 0.0001693441 +2025-03-17 22:41:46,109 Train Loss: 0.0001528, Val Loss: 0.0002358 +2025-03-17 22:41:46,109 Epoch 655/2000 +2025-03-17 22:44:30,644 Current Learning Rate: 0.0001752760 +2025-03-17 22:44:30,644 Train Loss: 0.0001531, Val Loss: 0.0002353 +2025-03-17 22:44:30,645 Epoch 656/2000 +2025-03-17 22:47:15,462 Current Learning Rate: 0.0001812880 +2025-03-17 22:47:15,463 Train Loss: 0.0001534, Val Loss: 0.0002354 +2025-03-17 22:47:15,463 Epoch 657/2000 +2025-03-17 22:49:59,934 Current Learning Rate: 0.0001873787 +2025-03-17 22:49:59,934 Train Loss: 0.0001537, Val Loss: 0.0002356 +2025-03-17 22:49:59,935 Epoch 658/2000 +2025-03-17 22:52:44,276 Current Learning Rate: 0.0001935465 +2025-03-17 22:52:44,276 Train Loss: 0.0001539, Val Loss: 0.0002359 +2025-03-17 22:52:44,276 Epoch 659/2000 +2025-03-17 22:55:29,507 Current Learning Rate: 0.0001997899 +2025-03-17 22:55:29,507 Train Loss: 0.0001542, Val Loss: 0.0002362 +2025-03-17 22:55:29,508 Epoch 660/2000 +2025-03-17 22:58:14,476 Current Learning Rate: 0.0002061074 +2025-03-17 22:58:14,477 Train Loss: 0.0001543, Val Loss: 0.0002373 +2025-03-17 22:58:14,477 Epoch 661/2000 +2025-03-17 23:00:59,399 Current Learning Rate: 0.0002124974 +2025-03-17 23:00:59,400 Train Loss: 0.0001545, Val Loss: 0.0002382 +2025-03-17 23:00:59,400 Epoch 662/2000 +2025-03-17 23:03:44,099 Current Learning Rate: 0.0002189583 +2025-03-17 23:03:44,099 Train Loss: 0.0001548, Val Loss: 0.0002386 +2025-03-17 23:03:44,100 Epoch 663/2000 +2025-03-17 23:06:28,579 Current Learning Rate: 0.0002254886 +2025-03-17 23:06:28,579 Train Loss: 0.0001552, Val Loss: 0.0002384 +2025-03-17 23:06:28,579 Epoch 664/2000 +2025-03-17 23:09:12,833 Current Learning Rate: 0.0002320866 +2025-03-17 23:09:12,834 Train Loss: 0.0001555, Val Loss: 0.0002392 +2025-03-17 23:09:12,834 Epoch 665/2000 +2025-03-17 23:11:57,271 Current Learning Rate: 0.0002387507 +2025-03-17 23:11:57,272 Train Loss: 0.0001560, Val Loss: 0.0002400 +2025-03-17 23:11:57,272 Epoch 666/2000 +2025-03-17 23:14:41,680 Current Learning Rate: 0.0002454793 +2025-03-17 23:14:41,681 Train Loss: 0.0001564, Val Loss: 0.0002397 +2025-03-17 23:14:41,681 Epoch 667/2000 +2025-03-17 23:17:26,221 Current Learning Rate: 0.0002522707 +2025-03-17 23:17:26,221 Train Loss: 0.0001567, Val Loss: 0.0002400 +2025-03-17 23:17:26,222 Epoch 668/2000 +2025-03-17 23:20:11,433 Current Learning Rate: 0.0002591232 +2025-03-17 23:20:11,434 Train Loss: 0.0001571, Val Loss: 0.0002418 +2025-03-17 23:20:11,434 Epoch 669/2000 +2025-03-17 23:22:55,943 Current Learning Rate: 0.0002660351 +2025-03-17 23:22:55,943 Train Loss: 0.0001578, Val Loss: 0.0002415 +2025-03-17 23:22:55,944 Epoch 670/2000 +2025-03-17 23:25:40,826 Current Learning Rate: 0.0002730048 +2025-03-17 23:25:40,829 Train Loss: 0.0001587, Val Loss: 0.0002405 +2025-03-17 23:25:40,830 Epoch 671/2000 +2025-03-17 23:28:25,444 Current Learning Rate: 0.0002800304 +2025-03-17 23:28:25,444 Train Loss: 0.0001589, Val Loss: 0.0002400 +2025-03-17 23:28:25,445 Epoch 672/2000 +2025-03-17 23:31:10,119 Current Learning Rate: 0.0002871104 +2025-03-17 23:31:10,119 Train Loss: 0.0001588, Val Loss: 0.0002406 +2025-03-17 23:31:10,120 Epoch 673/2000 +2025-03-17 23:33:54,850 Current Learning Rate: 0.0002942428 +2025-03-17 23:33:54,851 Train Loss: 0.0001589, Val Loss: 0.0002406 +2025-03-17 23:33:54,851 Epoch 674/2000 +2025-03-17 23:36:39,847 Current Learning Rate: 0.0003014261 +2025-03-17 23:36:39,847 Train Loss: 0.0001594, Val Loss: 0.0002411 +2025-03-17 23:36:39,847 Epoch 675/2000 +2025-03-17 23:39:24,534 Current Learning Rate: 0.0003086583 +2025-03-17 23:39:24,535 Train Loss: 0.0001597, Val Loss: 0.0002418 +2025-03-17 23:39:24,535 Epoch 676/2000 +2025-03-17 23:42:09,179 Current Learning Rate: 0.0003159377 +2025-03-17 23:42:09,180 Train Loss: 0.0001603, Val Loss: 0.0002439 +2025-03-17 23:42:09,180 Epoch 677/2000 +2025-03-17 23:44:54,340 Current Learning Rate: 0.0003232626 +2025-03-17 23:44:54,341 Train Loss: 0.0001612, Val Loss: 0.0002452 +2025-03-17 23:44:54,341 Epoch 678/2000 +2025-03-17 23:47:39,178 Current Learning Rate: 0.0003306310 +2025-03-17 23:47:39,178 Train Loss: 0.0001625, Val Loss: 0.0002460 +2025-03-17 23:47:39,178 Epoch 679/2000 +2025-03-17 23:50:23,601 Current Learning Rate: 0.0003380413 +2025-03-17 23:50:23,601 Train Loss: 0.0001633, Val Loss: 0.0002464 +2025-03-17 23:50:23,602 Epoch 680/2000 +2025-03-17 23:53:08,336 Current Learning Rate: 0.0003454915 +2025-03-17 23:53:08,337 Train Loss: 0.0001628, Val Loss: 0.0002467 +2025-03-17 23:53:08,337 Epoch 681/2000 +2025-03-17 23:55:53,237 Current Learning Rate: 0.0003529798 +2025-03-17 23:55:53,237 Train Loss: 0.0001621, Val Loss: 0.0002471 +2025-03-17 23:55:53,238 Epoch 682/2000 +2025-03-17 23:58:37,973 Current Learning Rate: 0.0003605044 +2025-03-17 23:58:37,974 Train Loss: 0.0001617, Val Loss: 0.0002463 +2025-03-17 23:58:37,974 Epoch 683/2000 +2025-03-18 00:01:23,129 Current Learning Rate: 0.0003680635 +2025-03-18 00:01:23,129 Train Loss: 0.0001619, Val Loss: 0.0002484 +2025-03-18 00:01:23,129 Epoch 684/2000 +2025-03-18 00:04:07,580 Current Learning Rate: 0.0003756551 +2025-03-18 00:04:07,580 Train Loss: 0.0001627, Val Loss: 0.0002510 +2025-03-18 00:04:07,580 Epoch 685/2000 +2025-03-18 00:06:52,277 Current Learning Rate: 0.0003832773 +2025-03-18 00:06:52,278 Train Loss: 0.0001641, Val Loss: 0.0002553 +2025-03-18 00:06:52,278 Epoch 686/2000 +2025-03-18 00:09:36,798 Current Learning Rate: 0.0003909284 +2025-03-18 00:09:36,799 Train Loss: 0.0001651, Val Loss: 0.0002618 +2025-03-18 00:09:36,799 Epoch 687/2000 +2025-03-18 00:12:21,246 Current Learning Rate: 0.0003986064 +2025-03-18 00:12:21,246 Train Loss: 0.0001657, Val Loss: 0.0002619 +2025-03-18 00:12:21,246 Epoch 688/2000 +2025-03-18 00:15:06,114 Current Learning Rate: 0.0004063093 +2025-03-18 00:15:06,115 Train Loss: 0.0001658, Val Loss: 0.0002535 +2025-03-18 00:15:06,115 Epoch 689/2000 +2025-03-18 00:17:50,950 Current Learning Rate: 0.0004140354 +2025-03-18 00:17:50,950 Train Loss: 0.0001656, Val Loss: 0.0002495 +2025-03-18 00:17:50,951 Epoch 690/2000 +2025-03-18 00:20:35,774 Current Learning Rate: 0.0004217828 +2025-03-18 00:20:35,775 Train Loss: 0.0001655, Val Loss: 0.0002485 +2025-03-18 00:20:35,775 Epoch 691/2000 +2025-03-18 00:23:20,285 Current Learning Rate: 0.0004295494 +2025-03-18 00:23:20,286 Train Loss: 0.0001662, Val Loss: 0.0002504 +2025-03-18 00:23:20,286 Epoch 692/2000 +2025-03-18 00:26:04,850 Current Learning Rate: 0.0004373334 +2025-03-18 00:26:04,851 Train Loss: 0.0001673, Val Loss: 0.0002511 +2025-03-18 00:26:04,851 Epoch 693/2000 +2025-03-18 00:28:49,277 Current Learning Rate: 0.0004451328 +2025-03-18 00:28:49,277 Train Loss: 0.0001682, Val Loss: 0.0002515 +2025-03-18 00:28:49,277 Epoch 694/2000 +2025-03-18 00:31:33,608 Current Learning Rate: 0.0004529458 +2025-03-18 00:31:33,608 Train Loss: 0.0001684, Val Loss: 0.0002509 +2025-03-18 00:31:33,608 Epoch 695/2000 +2025-03-18 00:34:18,409 Current Learning Rate: 0.0004607705 +2025-03-18 00:34:18,410 Train Loss: 0.0001687, Val Loss: 0.0002509 +2025-03-18 00:34:18,410 Epoch 696/2000 +2025-03-18 00:37:02,902 Current Learning Rate: 0.0004686047 +2025-03-18 00:37:02,903 Train Loss: 0.0001689, Val Loss: 0.0002517 +2025-03-18 00:37:02,903 Epoch 697/2000 +2025-03-18 00:39:47,543 Current Learning Rate: 0.0004764468 +2025-03-18 00:39:47,543 Train Loss: 0.0001690, Val Loss: 0.0002518 +2025-03-18 00:39:47,543 Epoch 698/2000 +2025-03-18 00:42:32,584 Current Learning Rate: 0.0004842946 +2025-03-18 00:42:32,584 Train Loss: 0.0001698, Val Loss: 0.0002545 +2025-03-18 00:42:32,584 Epoch 699/2000 +2025-03-18 00:45:17,195 Current Learning Rate: 0.0004921463 +2025-03-18 00:45:17,195 Train Loss: 0.0001713, Val Loss: 0.0002541 +2025-03-18 00:45:17,195 Epoch 700/2000 +2025-03-18 00:48:01,613 Current Learning Rate: 0.0005000000 +2025-03-18 00:48:01,613 Train Loss: 0.0001727, Val Loss: 0.0002553 +2025-03-18 00:48:01,613 Epoch 701/2000 +2025-03-18 00:50:46,317 Current Learning Rate: 0.0005078537 +2025-03-18 00:50:46,318 Train Loss: 0.0001733, Val Loss: 0.0002573 +2025-03-18 00:50:46,318 Epoch 702/2000 +2025-03-18 00:53:30,959 Current Learning Rate: 0.0005157054 +2025-03-18 00:53:30,959 Train Loss: 0.0001731, Val Loss: 0.0002590 +2025-03-18 00:53:30,959 Epoch 703/2000 +2025-03-18 00:56:16,290 Current Learning Rate: 0.0005235532 +2025-03-18 00:56:16,291 Train Loss: 0.0001732, Val Loss: 0.0002615 +2025-03-18 00:56:16,291 Epoch 704/2000 +2025-03-18 00:59:00,716 Current Learning Rate: 0.0005313953 +2025-03-18 00:59:00,716 Train Loss: 0.0001733, Val Loss: 0.0002616 +2025-03-18 00:59:00,716 Epoch 705/2000 +2025-03-18 01:01:45,732 Current Learning Rate: 0.0005392295 +2025-03-18 01:01:45,733 Train Loss: 0.0001730, Val Loss: 0.0002591 +2025-03-18 01:01:45,733 Epoch 706/2000 +2025-03-18 01:04:31,035 Current Learning Rate: 0.0005470542 +2025-03-18 01:04:31,036 Train Loss: 0.0001726, Val Loss: 0.0002570 +2025-03-18 01:04:31,036 Epoch 707/2000 +2025-03-18 01:07:15,308 Current Learning Rate: 0.0005548672 +2025-03-18 01:07:15,308 Train Loss: 0.0001733, Val Loss: 0.0002573 +2025-03-18 01:07:15,309 Epoch 708/2000 +2025-03-18 01:10:00,175 Current Learning Rate: 0.0005626666 +2025-03-18 01:10:00,176 Train Loss: 0.0001740, Val Loss: 0.0002554 +2025-03-18 01:10:00,176 Epoch 709/2000 +2025-03-18 01:12:44,954 Current Learning Rate: 0.0005704506 +2025-03-18 01:12:44,957 Train Loss: 0.0001749, Val Loss: 0.0002548 +2025-03-18 01:12:44,957 Epoch 710/2000 +2025-03-18 01:15:30,089 Current Learning Rate: 0.0005782172 +2025-03-18 01:15:30,090 Train Loss: 0.0001753, Val Loss: 0.0002570 +2025-03-18 01:15:30,091 Epoch 711/2000 +2025-03-18 01:18:14,853 Current Learning Rate: 0.0005859646 +2025-03-18 01:18:14,854 Train Loss: 0.0001764, Val Loss: 0.0002574 +2025-03-18 01:18:14,854 Epoch 712/2000 +2025-03-18 01:20:59,361 Current Learning Rate: 0.0005936907 +2025-03-18 01:20:59,362 Train Loss: 0.0001776, Val Loss: 0.0002605 +2025-03-18 01:20:59,362 Epoch 713/2000 +2025-03-18 01:23:44,182 Current Learning Rate: 0.0006013936 +2025-03-18 01:23:44,183 Train Loss: 0.0001786, Val Loss: 0.0002628 +2025-03-18 01:23:44,183 Epoch 714/2000 +2025-03-18 01:26:28,810 Current Learning Rate: 0.0006090716 +2025-03-18 01:26:28,811 Train Loss: 0.0001793, Val Loss: 0.0002651 +2025-03-18 01:26:28,811 Epoch 715/2000 +2025-03-18 01:29:13,970 Current Learning Rate: 0.0006167227 +2025-03-18 01:29:13,971 Train Loss: 0.0001794, Val Loss: 0.0002694 +2025-03-18 01:29:13,971 Epoch 716/2000 +2025-03-18 01:31:58,570 Current Learning Rate: 0.0006243449 +2025-03-18 01:31:58,571 Train Loss: 0.0001798, Val Loss: 0.0002677 +2025-03-18 01:31:58,571 Epoch 717/2000 +2025-03-18 01:34:43,688 Current Learning Rate: 0.0006319365 +2025-03-18 01:34:43,689 Train Loss: 0.0001795, Val Loss: 0.0002688 +2025-03-18 01:34:43,689 Epoch 718/2000 +2025-03-18 01:37:28,716 Current Learning Rate: 0.0006394956 +2025-03-18 01:37:28,717 Train Loss: 0.0001794, Val Loss: 0.0002716 +2025-03-18 01:37:28,717 Epoch 719/2000 +2025-03-18 01:40:13,504 Current Learning Rate: 0.0006470202 +2025-03-18 01:40:13,505 Train Loss: 0.0001795, Val Loss: 0.0002714 +2025-03-18 01:40:13,505 Epoch 720/2000 +2025-03-18 01:42:58,025 Current Learning Rate: 0.0006545085 +2025-03-18 01:42:58,025 Train Loss: 0.0001802, Val Loss: 0.0002720 +2025-03-18 01:42:58,025 Epoch 721/2000 +2025-03-18 01:45:42,634 Current Learning Rate: 0.0006619587 +2025-03-18 01:45:42,634 Train Loss: 0.0001815, Val Loss: 0.0002767 +2025-03-18 01:45:42,634 Epoch 722/2000 +2025-03-18 01:48:26,773 Current Learning Rate: 0.0006693690 +2025-03-18 01:48:26,774 Train Loss: 0.0001837, Val Loss: 0.0002707 +2025-03-18 01:48:26,775 Epoch 723/2000 +2025-03-18 01:51:11,920 Current Learning Rate: 0.0006767374 +2025-03-18 01:51:11,921 Train Loss: 0.0001852, Val Loss: 0.0002658 +2025-03-18 01:51:11,921 Epoch 724/2000 +2025-03-18 01:53:57,107 Current Learning Rate: 0.0006840623 +2025-03-18 01:53:57,108 Train Loss: 0.0001837, Val Loss: 0.0002653 +2025-03-18 01:53:57,108 Epoch 725/2000 +2025-03-18 01:56:42,245 Current Learning Rate: 0.0006913417 +2025-03-18 01:56:42,246 Train Loss: 0.0001833, Val Loss: 0.0002667 +2025-03-18 01:56:42,246 Epoch 726/2000 +2025-03-18 01:59:26,982 Current Learning Rate: 0.0006985739 +2025-03-18 01:59:26,983 Train Loss: 0.0001846, Val Loss: 0.0002702 +2025-03-18 01:59:26,983 Epoch 727/2000 +2025-03-18 02:02:11,828 Current Learning Rate: 0.0007057572 +2025-03-18 02:02:11,829 Train Loss: 0.0001847, Val Loss: 0.0002689 +2025-03-18 02:02:11,830 Epoch 728/2000 +2025-03-18 02:04:56,660 Current Learning Rate: 0.0007128896 +2025-03-18 02:04:56,661 Train Loss: 0.0001859, Val Loss: 0.0002700 +2025-03-18 02:04:56,661 Epoch 729/2000 +2025-03-18 02:07:41,287 Current Learning Rate: 0.0007199696 +2025-03-18 02:07:41,288 Train Loss: 0.0001870, Val Loss: 0.0002688 +2025-03-18 02:07:41,288 Epoch 730/2000 +2025-03-18 02:10:25,833 Current Learning Rate: 0.0007269952 +2025-03-18 02:10:25,834 Train Loss: 0.0001867, Val Loss: 0.0002705 +2025-03-18 02:10:25,834 Epoch 731/2000 +2025-03-18 02:13:10,417 Current Learning Rate: 0.0007339649 +2025-03-18 02:13:10,418 Train Loss: 0.0001866, Val Loss: 0.0002726 +2025-03-18 02:13:10,418 Epoch 732/2000 +2025-03-18 02:15:55,662 Current Learning Rate: 0.0007408768 +2025-03-18 02:15:55,663 Train Loss: 0.0001872, Val Loss: 0.0002707 +2025-03-18 02:15:55,663 Epoch 733/2000 +2025-03-18 02:18:40,771 Current Learning Rate: 0.0007477293 +2025-03-18 02:18:40,772 Train Loss: 0.0001881, Val Loss: 0.0002735 +2025-03-18 02:18:40,772 Epoch 734/2000 +2025-03-18 02:21:26,051 Current Learning Rate: 0.0007545207 +2025-03-18 02:21:26,051 Train Loss: 0.0001884, Val Loss: 0.0002777 +2025-03-18 02:21:26,051 Epoch 735/2000 +2025-03-18 02:24:10,974 Current Learning Rate: 0.0007612493 +2025-03-18 02:24:10,975 Train Loss: 0.0001893, Val Loss: 0.0002799 +2025-03-18 02:24:10,975 Epoch 736/2000 +2025-03-18 02:26:55,843 Current Learning Rate: 0.0007679134 +2025-03-18 02:26:55,843 Train Loss: 0.0001900, Val Loss: 0.0002793 +2025-03-18 02:26:55,844 Epoch 737/2000 +2025-03-18 02:29:40,508 Current Learning Rate: 0.0007745114 +2025-03-18 02:29:40,509 Train Loss: 0.0001893, Val Loss: 0.0002816 +2025-03-18 02:29:40,509 Epoch 738/2000 +2025-03-18 02:32:25,218 Current Learning Rate: 0.0007810417 +2025-03-18 02:32:25,218 Train Loss: 0.0001890, Val Loss: 0.0002844 +2025-03-18 02:32:25,219 Epoch 739/2000 +2025-03-18 02:35:09,871 Current Learning Rate: 0.0007875026 +2025-03-18 02:35:09,872 Train Loss: 0.0001884, Val Loss: 0.0002870 +2025-03-18 02:35:09,872 Epoch 740/2000 +2025-03-18 02:37:54,295 Current Learning Rate: 0.0007938926 +2025-03-18 02:37:54,295 Train Loss: 0.0001886, Val Loss: 0.0002825 +2025-03-18 02:37:54,296 Epoch 741/2000 +2025-03-18 02:40:39,359 Current Learning Rate: 0.0008002101 +2025-03-18 02:40:39,359 Train Loss: 0.0001896, Val Loss: 0.0002839 +2025-03-18 02:40:39,360 Epoch 742/2000 +2025-03-18 02:43:24,844 Current Learning Rate: 0.0008064535 +2025-03-18 02:43:24,845 Train Loss: 0.0001912, Val Loss: 0.0002868 +2025-03-18 02:43:24,845 Epoch 743/2000 +2025-03-18 02:46:09,783 Current Learning Rate: 0.0008126213 +2025-03-18 02:46:09,783 Train Loss: 0.0001906, Val Loss: 0.0002826 +2025-03-18 02:46:09,783 Epoch 744/2000 +2025-03-18 02:48:55,344 Current Learning Rate: 0.0008187120 +2025-03-18 02:48:55,345 Train Loss: 0.0001896, Val Loss: 0.0002755 +2025-03-18 02:48:55,345 Epoch 745/2000 +2025-03-18 02:51:40,720 Current Learning Rate: 0.0008247240 +2025-03-18 02:51:40,721 Train Loss: 0.0001895, Val Loss: 0.0002727 +2025-03-18 02:51:40,722 Epoch 746/2000 +2025-03-18 02:54:25,908 Current Learning Rate: 0.0008306559 +2025-03-18 02:54:25,908 Train Loss: 0.0001899, Val Loss: 0.0002748 +2025-03-18 02:54:25,908 Epoch 747/2000 +2025-03-18 02:57:10,701 Current Learning Rate: 0.0008365063 +2025-03-18 02:57:10,701 Train Loss: 0.0001909, Val Loss: 0.0002748 +2025-03-18 02:57:10,702 Epoch 748/2000 +2025-03-18 02:59:55,566 Current Learning Rate: 0.0008422736 +2025-03-18 02:59:55,566 Train Loss: 0.0001933, Val Loss: 0.0002760 +2025-03-18 02:59:55,566 Epoch 749/2000 +2025-03-18 03:02:40,892 Current Learning Rate: 0.0008479564 +2025-03-18 03:02:40,893 Train Loss: 0.0001928, Val Loss: 0.0002727 +2025-03-18 03:02:40,893 Epoch 750/2000 +2025-03-18 03:05:25,577 Current Learning Rate: 0.0008535534 +2025-03-18 03:05:25,577 Train Loss: 0.0001915, Val Loss: 0.0002702 +2025-03-18 03:05:25,577 Epoch 751/2000 +2025-03-18 03:08:11,170 Current Learning Rate: 0.0008590631 +2025-03-18 03:08:11,171 Train Loss: 0.0001916, Val Loss: 0.0002719 +2025-03-18 03:08:11,171 Epoch 752/2000 +2025-03-18 03:10:56,155 Current Learning Rate: 0.0008644843 +2025-03-18 03:10:56,156 Train Loss: 0.0001934, Val Loss: 0.0002738 +2025-03-18 03:10:56,156 Epoch 753/2000 +2025-03-18 03:13:41,163 Current Learning Rate: 0.0008698155 +2025-03-18 03:13:41,163 Train Loss: 0.0001947, Val Loss: 0.0002734 +2025-03-18 03:13:41,163 Epoch 754/2000 +2025-03-18 03:16:25,697 Current Learning Rate: 0.0008750555 +2025-03-18 03:16:25,697 Train Loss: 0.0001944, Val Loss: 0.0002744 +2025-03-18 03:16:25,697 Epoch 755/2000 +2025-03-18 03:19:09,932 Current Learning Rate: 0.0008802030 +2025-03-18 03:19:09,933 Train Loss: 0.0001942, Val Loss: 0.0002784 +2025-03-18 03:19:09,933 Epoch 756/2000 +2025-03-18 03:21:54,391 Current Learning Rate: 0.0008852566 +2025-03-18 03:21:54,391 Train Loss: 0.0001944, Val Loss: 0.0002797 +2025-03-18 03:21:54,391 Epoch 757/2000 +2025-03-18 03:24:39,101 Current Learning Rate: 0.0008902152 +2025-03-18 03:24:39,101 Train Loss: 0.0001944, Val Loss: 0.0002851 +2025-03-18 03:24:39,101 Epoch 758/2000 +2025-03-18 03:27:23,715 Current Learning Rate: 0.0008950775 +2025-03-18 03:27:23,716 Train Loss: 0.0001945, Val Loss: 0.0002860 +2025-03-18 03:27:23,716 Epoch 759/2000 +2025-03-18 03:30:08,504 Current Learning Rate: 0.0008998423 +2025-03-18 03:30:08,505 Train Loss: 0.0001936, Val Loss: 0.0002897 +2025-03-18 03:30:08,505 Epoch 760/2000 +2025-03-18 03:32:53,372 Current Learning Rate: 0.0009045085 +2025-03-18 03:32:53,373 Train Loss: 0.0001948, Val Loss: 0.0002933 +2025-03-18 03:32:53,373 Epoch 761/2000 +2025-03-18 03:35:38,004 Current Learning Rate: 0.0009090749 +2025-03-18 03:35:38,005 Train Loss: 0.0001965, Val Loss: 0.0002921 +2025-03-18 03:35:38,005 Epoch 762/2000 +2025-03-18 03:38:23,209 Current Learning Rate: 0.0009135403 +2025-03-18 03:38:23,210 Train Loss: 0.0001978, Val Loss: 0.0002922 +2025-03-18 03:38:23,210 Epoch 763/2000 +2025-03-18 03:41:08,010 Current Learning Rate: 0.0009179037 +2025-03-18 03:41:08,011 Train Loss: 0.0001971, Val Loss: 0.0002854 +2025-03-18 03:41:08,011 Epoch 764/2000 +2025-03-18 03:43:52,798 Current Learning Rate: 0.0009221640 +2025-03-18 03:43:52,799 Train Loss: 0.0001971, Val Loss: 0.0002786 +2025-03-18 03:43:52,799 Epoch 765/2000 +2025-03-18 03:46:37,833 Current Learning Rate: 0.0009263201 +2025-03-18 03:46:37,834 Train Loss: 0.0001972, Val Loss: 0.0002778 +2025-03-18 03:46:37,834 Epoch 766/2000 +2025-03-18 03:49:22,821 Current Learning Rate: 0.0009303710 +2025-03-18 03:49:22,821 Train Loss: 0.0001980, Val Loss: 0.0002826 +2025-03-18 03:49:22,821 Epoch 767/2000 +2025-03-18 03:52:07,785 Current Learning Rate: 0.0009343158 +2025-03-18 03:52:07,786 Train Loss: 0.0002004, Val Loss: 0.0002807 +2025-03-18 03:52:07,787 Epoch 768/2000 +2025-03-18 03:54:52,186 Current Learning Rate: 0.0009381533 +2025-03-18 03:54:52,187 Train Loss: 0.0001992, Val Loss: 0.0002806 +2025-03-18 03:54:52,187 Epoch 769/2000 +2025-03-18 03:57:36,619 Current Learning Rate: 0.0009418828 +2025-03-18 03:57:36,620 Train Loss: 0.0001978, Val Loss: 0.0002850 +2025-03-18 03:57:36,621 Epoch 770/2000 +2025-03-18 04:00:21,305 Current Learning Rate: 0.0009455033 +2025-03-18 04:00:21,305 Train Loss: 0.0001972, Val Loss: 0.0002837 +2025-03-18 04:00:21,305 Epoch 771/2000 +2025-03-18 04:03:05,785 Current Learning Rate: 0.0009490138 +2025-03-18 04:03:05,785 Train Loss: 0.0001960, Val Loss: 0.0002832 +2025-03-18 04:03:05,785 Epoch 772/2000 +2025-03-18 04:05:50,647 Current Learning Rate: 0.0009524135 +2025-03-18 04:05:50,647 Train Loss: 0.0001984, Val Loss: 0.0002849 +2025-03-18 04:05:50,648 Epoch 773/2000 +2025-03-18 04:08:35,020 Current Learning Rate: 0.0009557016 +2025-03-18 04:08:35,021 Train Loss: 0.0001991, Val Loss: 0.0002811 +2025-03-18 04:08:35,021 Epoch 774/2000 +2025-03-18 04:11:19,452 Current Learning Rate: 0.0009588773 +2025-03-18 04:11:19,452 Train Loss: 0.0001977, Val Loss: 0.0002789 +2025-03-18 04:11:19,453 Epoch 775/2000 +2025-03-18 04:14:04,019 Current Learning Rate: 0.0009619398 +2025-03-18 04:14:04,020 Train Loss: 0.0001966, Val Loss: 0.0002794 +2025-03-18 04:14:04,020 Epoch 776/2000 +2025-03-18 04:16:49,122 Current Learning Rate: 0.0009648882 +2025-03-18 04:16:49,122 Train Loss: 0.0001985, Val Loss: 0.0002814 +2025-03-18 04:16:49,124 Epoch 777/2000 +2025-03-18 04:19:33,862 Current Learning Rate: 0.0009677220 +2025-03-18 04:19:33,862 Train Loss: 0.0001996, Val Loss: 0.0002797 +2025-03-18 04:19:33,863 Epoch 778/2000 +2025-03-18 04:22:18,476 Current Learning Rate: 0.0009704404 +2025-03-18 04:22:18,477 Train Loss: 0.0001997, Val Loss: 0.0002802 +2025-03-18 04:22:18,477 Epoch 779/2000 +2025-03-18 04:25:02,769 Current Learning Rate: 0.0009730427 +2025-03-18 04:25:02,770 Train Loss: 0.0001977, Val Loss: 0.0002807 +2025-03-18 04:25:02,773 Epoch 780/2000 +2025-03-18 04:27:47,197 Current Learning Rate: 0.0009755283 +2025-03-18 04:27:47,198 Train Loss: 0.0001982, Val Loss: 0.0002814 +2025-03-18 04:27:47,198 Epoch 781/2000 +2025-03-18 04:30:31,605 Current Learning Rate: 0.0009778965 +2025-03-18 04:30:31,605 Train Loss: 0.0001989, Val Loss: 0.0002812 +2025-03-18 04:30:31,606 Epoch 782/2000 +2025-03-18 04:33:15,982 Current Learning Rate: 0.0009801468 +2025-03-18 04:33:15,982 Train Loss: 0.0001996, Val Loss: 0.0002829 +2025-03-18 04:33:15,982 Epoch 783/2000 +2025-03-18 04:36:00,694 Current Learning Rate: 0.0009822787 +2025-03-18 04:36:00,695 Train Loss: 0.0001986, Val Loss: 0.0002814 +2025-03-18 04:36:00,695 Epoch 784/2000 +2025-03-18 04:38:45,414 Current Learning Rate: 0.0009842916 +2025-03-18 04:38:45,414 Train Loss: 0.0001973, Val Loss: 0.0002811 +2025-03-18 04:38:45,415 Epoch 785/2000 +2025-03-18 04:41:30,603 Current Learning Rate: 0.0009861850 +2025-03-18 04:41:30,603 Train Loss: 0.0001967, Val Loss: 0.0002840 +2025-03-18 04:41:30,603 Epoch 786/2000 +2025-03-18 04:44:15,875 Current Learning Rate: 0.0009879584 +2025-03-18 04:44:15,876 Train Loss: 0.0001980, Val Loss: 0.0002807 +2025-03-18 04:44:15,876 Epoch 787/2000 +2025-03-18 04:47:00,464 Current Learning Rate: 0.0009896114 +2025-03-18 04:47:00,465 Train Loss: 0.0001969, Val Loss: 0.0002778 +2025-03-18 04:47:00,465 Epoch 788/2000 +2025-03-18 04:49:45,483 Current Learning Rate: 0.0009911436 +2025-03-18 04:49:45,483 Train Loss: 0.0001974, Val Loss: 0.0002805 +2025-03-18 04:49:45,484 Epoch 789/2000 +2025-03-18 04:52:30,726 Current Learning Rate: 0.0009925547 +2025-03-18 04:52:30,727 Train Loss: 0.0001985, Val Loss: 0.0002839 +2025-03-18 04:52:30,727 Epoch 790/2000 +2025-03-18 04:55:15,455 Current Learning Rate: 0.0009938442 +2025-03-18 04:55:15,455 Train Loss: 0.0001983, Val Loss: 0.0002826 +2025-03-18 04:55:15,455 Epoch 791/2000 +2025-03-18 04:58:00,267 Current Learning Rate: 0.0009950118 +2025-03-18 04:58:00,268 Train Loss: 0.0001968, Val Loss: 0.0002863 +2025-03-18 04:58:00,268 Epoch 792/2000 +2025-03-18 05:00:44,624 Current Learning Rate: 0.0009960574 +2025-03-18 05:00:44,624 Train Loss: 0.0001970, Val Loss: 0.0002924 +2025-03-18 05:00:44,624 Epoch 793/2000 +2025-03-18 05:03:29,284 Current Learning Rate: 0.0009969805 +2025-03-18 05:03:29,285 Train Loss: 0.0001979, Val Loss: 0.0003003 +2025-03-18 05:03:29,285 Epoch 794/2000 +2025-03-18 05:06:14,064 Current Learning Rate: 0.0009977810 +2025-03-18 05:06:14,064 Train Loss: 0.0001991, Val Loss: 0.0002931 +2025-03-18 05:06:14,065 Epoch 795/2000 +2025-03-18 05:08:59,427 Current Learning Rate: 0.0009984587 +2025-03-18 05:08:59,427 Train Loss: 0.0001986, Val Loss: 0.0002901 +2025-03-18 05:08:59,428 Epoch 796/2000 +2025-03-18 05:11:44,217 Current Learning Rate: 0.0009990134 +2025-03-18 05:11:44,218 Train Loss: 0.0001980, Val Loss: 0.0002849 +2025-03-18 05:11:44,218 Epoch 797/2000 +2025-03-18 05:14:29,453 Current Learning Rate: 0.0009994449 +2025-03-18 05:14:29,454 Train Loss: 0.0001985, Val Loss: 0.0002843 +2025-03-18 05:14:29,454 Epoch 798/2000 +2025-03-18 05:17:13,650 Current Learning Rate: 0.0009997533 +2025-03-18 05:17:13,651 Train Loss: 0.0001976, Val Loss: 0.0002860 +2025-03-18 05:17:13,651 Epoch 799/2000 +2025-03-18 05:19:58,507 Current Learning Rate: 0.0009999383 +2025-03-18 05:19:58,507 Train Loss: 0.0001966, Val Loss: 0.0002897 +2025-03-18 05:19:58,507 Epoch 800/2000 +2025-03-18 05:22:43,292 Current Learning Rate: 0.0010000000 +2025-03-18 05:22:43,292 Train Loss: 0.0001976, Val Loss: 0.0002854 +2025-03-18 05:22:43,292 Epoch 801/2000 +2025-03-18 05:25:28,016 Current Learning Rate: 0.0009999383 +2025-03-18 05:25:28,017 Train Loss: 0.0001981, Val Loss: 0.0002819 +2025-03-18 05:25:28,017 Epoch 802/2000 +2025-03-18 05:28:12,843 Current Learning Rate: 0.0009997533 +2025-03-18 05:28:12,843 Train Loss: 0.0001974, Val Loss: 0.0002792 +2025-03-18 05:28:12,844 Epoch 803/2000 +2025-03-18 05:30:57,552 Current Learning Rate: 0.0009994449 +2025-03-18 05:30:57,552 Train Loss: 0.0001967, Val Loss: 0.0002788 +2025-03-18 05:30:57,553 Epoch 804/2000 +2025-03-18 05:33:42,283 Current Learning Rate: 0.0009990134 +2025-03-18 05:33:42,283 Train Loss: 0.0001960, Val Loss: 0.0002810 +2025-03-18 05:33:42,283 Epoch 805/2000 +2025-03-18 05:36:27,495 Current Learning Rate: 0.0009984587 +2025-03-18 05:36:27,495 Train Loss: 0.0001973, Val Loss: 0.0002817 +2025-03-18 05:36:27,495 Epoch 806/2000 +2025-03-18 05:39:12,267 Current Learning Rate: 0.0009977810 +2025-03-18 05:39:12,267 Train Loss: 0.0001977, Val Loss: 0.0002847 +2025-03-18 05:39:12,268 Epoch 807/2000 +2025-03-18 05:41:56,547 Current Learning Rate: 0.0009969805 +2025-03-18 05:41:56,548 Train Loss: 0.0001966, Val Loss: 0.0002830 +2025-03-18 05:41:56,548 Epoch 808/2000 +2025-03-18 05:44:41,982 Current Learning Rate: 0.0009960574 +2025-03-18 05:44:41,983 Train Loss: 0.0001959, Val Loss: 0.0002825 +2025-03-18 05:44:41,983 Epoch 809/2000 +2025-03-18 05:47:26,070 Current Learning Rate: 0.0009950118 +2025-03-18 05:47:26,071 Train Loss: 0.0001957, Val Loss: 0.0002837 +2025-03-18 05:47:26,071 Epoch 810/2000 +2025-03-18 05:50:10,938 Current Learning Rate: 0.0009938442 +2025-03-18 05:50:10,938 Train Loss: 0.0001966, Val Loss: 0.0002848 +2025-03-18 05:50:10,939 Epoch 811/2000 +2025-03-18 05:52:55,665 Current Learning Rate: 0.0009925547 +2025-03-18 05:52:55,666 Train Loss: 0.0001971, Val Loss: 0.0002900 +2025-03-18 05:52:55,666 Epoch 812/2000 +2025-03-18 05:55:40,044 Current Learning Rate: 0.0009911436 +2025-03-18 05:55:40,044 Train Loss: 0.0001975, Val Loss: 0.0002815 +2025-03-18 05:55:40,045 Epoch 813/2000 +2025-03-18 05:58:25,180 Current Learning Rate: 0.0009896114 +2025-03-18 05:58:25,180 Train Loss: 0.0001964, Val Loss: 0.0002834 +2025-03-18 05:58:25,181 Epoch 814/2000 +2025-03-18 06:01:10,016 Current Learning Rate: 0.0009879584 +2025-03-18 06:01:10,017 Train Loss: 0.0001956, Val Loss: 0.0002847 +2025-03-18 06:01:10,017 Epoch 815/2000 +2025-03-18 06:03:55,045 Current Learning Rate: 0.0009861850 +2025-03-18 06:03:55,045 Train Loss: 0.0001953, Val Loss: 0.0002837 +2025-03-18 06:03:55,046 Epoch 816/2000 +2025-03-18 06:06:40,350 Current Learning Rate: 0.0009842916 +2025-03-18 06:06:40,351 Train Loss: 0.0001947, Val Loss: 0.0002835 +2025-03-18 06:06:40,351 Epoch 817/2000 +2025-03-18 06:09:25,264 Current Learning Rate: 0.0009822787 +2025-03-18 06:09:25,264 Train Loss: 0.0001942, Val Loss: 0.0002813 +2025-03-18 06:09:25,265 Epoch 818/2000 +2025-03-18 06:12:09,917 Current Learning Rate: 0.0009801468 +2025-03-18 06:12:09,918 Train Loss: 0.0001939, Val Loss: 0.0002809 +2025-03-18 06:12:09,918 Epoch 819/2000 +2025-03-18 06:14:54,432 Current Learning Rate: 0.0009778965 +2025-03-18 06:14:54,432 Train Loss: 0.0001934, Val Loss: 0.0002814 +2025-03-18 06:14:54,433 Epoch 820/2000 +2025-03-18 06:17:39,260 Current Learning Rate: 0.0009755283 +2025-03-18 06:17:39,261 Train Loss: 0.0001923, Val Loss: 0.0002775 +2025-03-18 06:17:39,261 Epoch 821/2000 +2025-03-18 06:20:24,083 Current Learning Rate: 0.0009730427 +2025-03-18 06:20:24,084 Train Loss: 0.0001921, Val Loss: 0.0002766 +2025-03-18 06:20:24,085 Epoch 822/2000 +2025-03-18 06:23:08,842 Current Learning Rate: 0.0009704404 +2025-03-18 06:23:08,843 Train Loss: 0.0001920, Val Loss: 0.0002756 +2025-03-18 06:23:08,843 Epoch 823/2000 +2025-03-18 06:25:54,033 Current Learning Rate: 0.0009677220 +2025-03-18 06:25:54,034 Train Loss: 0.0001917, Val Loss: 0.0002796 +2025-03-18 06:25:54,034 Epoch 824/2000 +2025-03-18 06:28:38,648 Current Learning Rate: 0.0009648882 +2025-03-18 06:28:38,648 Train Loss: 0.0001921, Val Loss: 0.0002778 +2025-03-18 06:28:38,649 Epoch 825/2000 +2025-03-18 06:31:23,499 Current Learning Rate: 0.0009619398 +2025-03-18 06:31:23,500 Train Loss: 0.0001922, Val Loss: 0.0002852 +2025-03-18 06:31:23,500 Epoch 826/2000 +2025-03-18 06:34:08,099 Current Learning Rate: 0.0009588773 +2025-03-18 06:34:08,100 Train Loss: 0.0001921, Val Loss: 0.0002854 +2025-03-18 06:34:08,100 Epoch 827/2000 +2025-03-18 06:36:52,210 Current Learning Rate: 0.0009557016 +2025-03-18 06:36:52,210 Train Loss: 0.0001931, Val Loss: 0.0002816 +2025-03-18 06:36:52,210 Epoch 828/2000 +2025-03-18 06:39:37,092 Current Learning Rate: 0.0009524135 +2025-03-18 06:39:37,092 Train Loss: 0.0001924, Val Loss: 0.0002776 +2025-03-18 06:39:37,092 Epoch 829/2000 +2025-03-18 06:42:22,038 Current Learning Rate: 0.0009490138 +2025-03-18 06:42:22,038 Train Loss: 0.0001923, Val Loss: 0.0002785 +2025-03-18 06:42:22,038 Epoch 830/2000 +2025-03-18 06:45:06,646 Current Learning Rate: 0.0009455033 +2025-03-18 06:45:06,646 Train Loss: 0.0001930, Val Loss: 0.0002770 +2025-03-18 06:45:06,646 Epoch 831/2000 +2025-03-18 06:47:51,545 Current Learning Rate: 0.0009418828 +2025-03-18 06:47:51,546 Train Loss: 0.0001933, Val Loss: 0.0002732 +2025-03-18 06:47:51,546 Epoch 832/2000 +2025-03-18 06:50:36,513 Current Learning Rate: 0.0009381533 +2025-03-18 06:50:36,513 Train Loss: 0.0001924, Val Loss: 0.0002738 +2025-03-18 06:50:36,513 Epoch 833/2000 +2025-03-18 06:53:21,154 Current Learning Rate: 0.0009343158 +2025-03-18 06:53:21,155 Train Loss: 0.0001927, Val Loss: 0.0002738 +2025-03-18 06:53:21,155 Epoch 834/2000 +2025-03-18 06:56:05,969 Current Learning Rate: 0.0009303710 +2025-03-18 06:56:05,970 Train Loss: 0.0001927, Val Loss: 0.0002723 +2025-03-18 06:56:05,970 Epoch 835/2000 +2025-03-18 06:58:51,010 Current Learning Rate: 0.0009263201 +2025-03-18 06:58:51,011 Train Loss: 0.0001915, Val Loss: 0.0002727 +2025-03-18 06:58:51,011 Epoch 836/2000 +2025-03-18 07:01:35,753 Current Learning Rate: 0.0009221640 +2025-03-18 07:01:35,753 Train Loss: 0.0001909, Val Loss: 0.0002757 +2025-03-18 07:01:35,753 Epoch 837/2000 +2025-03-18 07:04:20,550 Current Learning Rate: 0.0009179037 +2025-03-18 07:04:20,550 Train Loss: 0.0001898, Val Loss: 0.0002761 +2025-03-18 07:04:20,550 Epoch 838/2000 +2025-03-18 07:07:05,363 Current Learning Rate: 0.0009135403 +2025-03-18 07:07:05,363 Train Loss: 0.0001895, Val Loss: 0.0002750 +2025-03-18 07:07:05,364 Epoch 839/2000 +2025-03-18 07:09:50,048 Current Learning Rate: 0.0009090749 +2025-03-18 07:09:50,049 Train Loss: 0.0001886, Val Loss: 0.0002700 +2025-03-18 07:09:50,049 Epoch 840/2000 +2025-03-18 07:12:34,528 Current Learning Rate: 0.0009045085 +2025-03-18 07:12:34,529 Train Loss: 0.0001887, Val Loss: 0.0002684 +2025-03-18 07:12:34,529 Epoch 841/2000 +2025-03-18 07:15:18,813 Current Learning Rate: 0.0008998423 +2025-03-18 07:15:18,814 Train Loss: 0.0001876, Val Loss: 0.0002656 +2025-03-18 07:15:18,814 Epoch 842/2000 +2025-03-18 07:18:03,853 Current Learning Rate: 0.0008950775 +2025-03-18 07:18:03,853 Train Loss: 0.0001853, Val Loss: 0.0002676 +2025-03-18 07:18:03,853 Epoch 843/2000 +2025-03-18 07:20:48,276 Current Learning Rate: 0.0008902152 +2025-03-18 07:20:48,280 Train Loss: 0.0001858, Val Loss: 0.0002677 +2025-03-18 07:20:48,280 Epoch 844/2000 +2025-03-18 07:23:32,987 Current Learning Rate: 0.0008852566 +2025-03-18 07:23:32,987 Train Loss: 0.0001866, Val Loss: 0.0002707 +2025-03-18 07:23:32,987 Epoch 845/2000 +2025-03-18 07:26:17,905 Current Learning Rate: 0.0008802030 +2025-03-18 07:26:17,905 Train Loss: 0.0001864, Val Loss: 0.0002701 +2025-03-18 07:26:17,905 Epoch 846/2000 +2025-03-18 07:29:02,232 Current Learning Rate: 0.0008750555 +2025-03-18 07:29:02,233 Train Loss: 0.0001852, Val Loss: 0.0002702 +2025-03-18 07:29:02,233 Epoch 847/2000 +2025-03-18 07:31:46,976 Current Learning Rate: 0.0008698155 +2025-03-18 07:31:46,977 Train Loss: 0.0001847, Val Loss: 0.0002689 +2025-03-18 07:31:46,977 Epoch 848/2000 +2025-03-18 07:34:32,160 Current Learning Rate: 0.0008644843 +2025-03-18 07:34:32,161 Train Loss: 0.0001846, Val Loss: 0.0002696 +2025-03-18 07:34:32,161 Epoch 849/2000 +2025-03-18 07:37:16,548 Current Learning Rate: 0.0008590631 +2025-03-18 07:37:16,548 Train Loss: 0.0001845, Val Loss: 0.0002675 +2025-03-18 07:37:16,548 Epoch 850/2000 +2025-03-18 07:40:01,143 Current Learning Rate: 0.0008535534 +2025-03-18 07:40:01,144 Train Loss: 0.0001849, Val Loss: 0.0002680 +2025-03-18 07:40:01,144 Epoch 851/2000 +2025-03-18 07:42:46,017 Current Learning Rate: 0.0008479564 +2025-03-18 07:42:46,018 Train Loss: 0.0001843, Val Loss: 0.0002652 +2025-03-18 07:42:46,018 Epoch 852/2000 +2025-03-18 07:45:30,486 Current Learning Rate: 0.0008422736 +2025-03-18 07:45:30,487 Train Loss: 0.0001841, Val Loss: 0.0002664 +2025-03-18 07:45:30,487 Epoch 853/2000 +2025-03-18 07:48:14,885 Current Learning Rate: 0.0008365063 +2025-03-18 07:48:14,886 Train Loss: 0.0001848, Val Loss: 0.0002657 +2025-03-18 07:48:14,886 Epoch 854/2000 +2025-03-18 07:50:59,859 Current Learning Rate: 0.0008306559 +2025-03-18 07:50:59,859 Train Loss: 0.0001856, Val Loss: 0.0002673 +2025-03-18 07:50:59,859 Epoch 855/2000 +2025-03-18 07:53:44,948 Current Learning Rate: 0.0008247240 +2025-03-18 07:53:44,949 Train Loss: 0.0001851, Val Loss: 0.0002680 +2025-03-18 07:53:44,949 Epoch 856/2000 +2025-03-18 07:56:30,215 Current Learning Rate: 0.0008187120 +2025-03-18 07:56:30,216 Train Loss: 0.0001834, Val Loss: 0.0002673 +2025-03-18 07:56:30,216 Epoch 857/2000 +2025-03-18 07:59:14,888 Current Learning Rate: 0.0008126213 +2025-03-18 07:59:14,888 Train Loss: 0.0001813, Val Loss: 0.0002663 +2025-03-18 07:59:14,888 Epoch 858/2000 +2025-03-18 08:01:59,789 Current Learning Rate: 0.0008064535 +2025-03-18 08:01:59,789 Train Loss: 0.0001794, Val Loss: 0.0002669 +2025-03-18 08:01:59,790 Epoch 859/2000 +2025-03-18 08:04:44,579 Current Learning Rate: 0.0008002101 +2025-03-18 08:04:44,580 Train Loss: 0.0001783, Val Loss: 0.0002628 +2025-03-18 08:04:44,580 Epoch 860/2000 +2025-03-18 08:07:29,162 Current Learning Rate: 0.0007938926 +2025-03-18 08:07:29,163 Train Loss: 0.0001786, Val Loss: 0.0002665 +2025-03-18 08:07:29,163 Epoch 861/2000 +2025-03-18 08:10:13,490 Current Learning Rate: 0.0007875026 +2025-03-18 08:10:13,491 Train Loss: 0.0001797, Val Loss: 0.0002679 +2025-03-18 08:10:13,491 Epoch 862/2000 +2025-03-18 08:12:58,272 Current Learning Rate: 0.0007810417 +2025-03-18 08:12:58,273 Train Loss: 0.0001788, Val Loss: 0.0002611 +2025-03-18 08:12:58,273 Epoch 863/2000 +2025-03-18 08:15:42,781 Current Learning Rate: 0.0007745114 +2025-03-18 08:15:42,781 Train Loss: 0.0001785, Val Loss: 0.0002611 +2025-03-18 08:15:42,781 Epoch 864/2000 +2025-03-18 08:18:27,024 Current Learning Rate: 0.0007679134 +2025-03-18 08:18:27,025 Train Loss: 0.0001789, Val Loss: 0.0002639 +2025-03-18 08:18:27,025 Epoch 865/2000 +2025-03-18 08:21:11,550 Current Learning Rate: 0.0007612493 +2025-03-18 08:21:11,551 Train Loss: 0.0001797, Val Loss: 0.0002631 +2025-03-18 08:21:11,551 Epoch 866/2000 +2025-03-18 08:23:56,240 Current Learning Rate: 0.0007545207 +2025-03-18 08:23:56,240 Train Loss: 0.0001795, Val Loss: 0.0002618 +2025-03-18 08:23:56,240 Epoch 867/2000 +2025-03-18 08:26:41,937 Current Learning Rate: 0.0007477293 +2025-03-18 08:26:41,940 Train Loss: 0.0001798, Val Loss: 0.0002636 +2025-03-18 08:26:41,940 Epoch 868/2000 +2025-03-18 08:29:27,025 Current Learning Rate: 0.0007408768 +2025-03-18 08:29:27,026 Train Loss: 0.0001791, Val Loss: 0.0002630 +2025-03-18 08:29:27,026 Epoch 869/2000 +2025-03-18 08:32:11,937 Current Learning Rate: 0.0007339649 +2025-03-18 08:32:11,938 Train Loss: 0.0001779, Val Loss: 0.0002610 +2025-03-18 08:32:11,938 Epoch 870/2000 +2025-03-18 08:34:56,816 Current Learning Rate: 0.0007269952 +2025-03-18 08:34:56,816 Train Loss: 0.0001776, Val Loss: 0.0002607 +2025-03-18 08:34:56,816 Epoch 871/2000 +2025-03-18 08:37:41,392 Current Learning Rate: 0.0007199696 +2025-03-18 08:37:41,392 Train Loss: 0.0001766, Val Loss: 0.0002642 +2025-03-18 08:37:41,393 Epoch 872/2000 +2025-03-18 08:40:26,393 Current Learning Rate: 0.0007128896 +2025-03-18 08:40:26,394 Train Loss: 0.0001766, Val Loss: 0.0002587 +2025-03-18 08:40:26,394 Epoch 873/2000 +2025-03-18 08:43:10,687 Current Learning Rate: 0.0007057572 +2025-03-18 08:43:10,687 Train Loss: 0.0001752, Val Loss: 0.0002558 +2025-03-18 08:43:10,687 Epoch 874/2000 +2025-03-18 08:45:55,662 Current Learning Rate: 0.0006985739 +2025-03-18 08:45:55,663 Train Loss: 0.0001741, Val Loss: 0.0002548 +2025-03-18 08:45:55,663 Epoch 875/2000 +2025-03-18 08:48:40,481 Current Learning Rate: 0.0006913417 +2025-03-18 08:48:40,481 Train Loss: 0.0001731, Val Loss: 0.0002536 +2025-03-18 08:48:40,481 Epoch 876/2000 +2025-03-18 08:51:25,038 Current Learning Rate: 0.0006840623 +2025-03-18 08:51:25,039 Train Loss: 0.0001733, Val Loss: 0.0002550 +2025-03-18 08:51:25,040 Epoch 877/2000 +2025-03-18 08:54:09,962 Current Learning Rate: 0.0006767374 +2025-03-18 08:54:09,962 Train Loss: 0.0001732, Val Loss: 0.0002550 +2025-03-18 08:54:09,962 Epoch 878/2000 +2025-03-18 08:56:54,656 Current Learning Rate: 0.0006693690 +2025-03-18 08:56:54,657 Train Loss: 0.0001721, Val Loss: 0.0002536 +2025-03-18 08:56:54,657 Epoch 879/2000 +2025-03-18 08:59:39,703 Current Learning Rate: 0.0006619587 +2025-03-18 08:59:39,704 Train Loss: 0.0001710, Val Loss: 0.0002517 +2025-03-18 08:59:39,704 Epoch 880/2000 +2025-03-18 09:02:24,380 Current Learning Rate: 0.0006545085 +2025-03-18 09:02:24,381 Train Loss: 0.0001703, Val Loss: 0.0002503 +2025-03-18 09:02:24,381 Epoch 881/2000 +2025-03-18 09:05:09,107 Current Learning Rate: 0.0006470202 +2025-03-18 09:05:09,107 Train Loss: 0.0001703, Val Loss: 0.0002503 +2025-03-18 09:05:09,107 Epoch 882/2000 +2025-03-18 09:07:53,769 Current Learning Rate: 0.0006394956 +2025-03-18 09:07:53,770 Train Loss: 0.0001702, Val Loss: 0.0002486 +2025-03-18 09:07:53,770 Epoch 883/2000 +2025-03-18 09:10:38,527 Current Learning Rate: 0.0006319365 +2025-03-18 09:10:38,528 Train Loss: 0.0001702, Val Loss: 0.0002490 +2025-03-18 09:10:38,528 Epoch 884/2000 +2025-03-18 09:13:23,233 Current Learning Rate: 0.0006243449 +2025-03-18 09:13:23,234 Train Loss: 0.0001704, Val Loss: 0.0002488 +2025-03-18 09:13:23,234 Epoch 885/2000 +2025-03-18 09:16:08,075 Current Learning Rate: 0.0006167227 +2025-03-18 09:16:08,076 Train Loss: 0.0001703, Val Loss: 0.0002521 +2025-03-18 09:16:08,076 Epoch 886/2000 +2025-03-18 09:18:52,697 Current Learning Rate: 0.0006090716 +2025-03-18 09:18:52,698 Train Loss: 0.0001711, Val Loss: 0.0002523 +2025-03-18 09:18:52,698 Epoch 887/2000 +2025-03-18 09:21:37,984 Current Learning Rate: 0.0006013936 +2025-03-18 09:21:37,984 Train Loss: 0.0001710, Val Loss: 0.0002516 +2025-03-18 09:21:37,984 Epoch 888/2000 +2025-03-18 09:24:22,729 Current Learning Rate: 0.0005936907 +2025-03-18 09:24:22,730 Train Loss: 0.0001702, Val Loss: 0.0002497 +2025-03-18 09:24:22,730 Epoch 889/2000 +2025-03-18 09:27:07,805 Current Learning Rate: 0.0005859646 +2025-03-18 09:27:07,806 Train Loss: 0.0001684, Val Loss: 0.0002483 +2025-03-18 09:27:07,806 Epoch 890/2000 +2025-03-18 09:29:52,064 Current Learning Rate: 0.0005782172 +2025-03-18 09:29:52,065 Train Loss: 0.0001668, Val Loss: 0.0002496 +2025-03-18 09:29:52,065 Epoch 891/2000 +2025-03-18 09:32:36,698 Current Learning Rate: 0.0005704506 +2025-03-18 09:32:36,699 Train Loss: 0.0001668, Val Loss: 0.0002499 +2025-03-18 09:32:36,699 Epoch 892/2000 +2025-03-18 09:35:21,430 Current Learning Rate: 0.0005626666 +2025-03-18 09:35:21,430 Train Loss: 0.0001664, Val Loss: 0.0002463 +2025-03-18 09:35:21,431 Epoch 893/2000 +2025-03-18 09:38:06,153 Current Learning Rate: 0.0005548672 +2025-03-18 09:38:06,154 Train Loss: 0.0001663, Val Loss: 0.0002459 +2025-03-18 09:38:06,154 Epoch 894/2000 +2025-03-18 09:40:51,026 Current Learning Rate: 0.0005470542 +2025-03-18 09:40:51,026 Train Loss: 0.0001681, Val Loss: 0.0002468 +2025-03-18 09:40:51,026 Epoch 895/2000 +2025-03-18 09:43:35,974 Current Learning Rate: 0.0005392295 +2025-03-18 09:43:35,974 Train Loss: 0.0001680, Val Loss: 0.0002447 +2025-03-18 09:43:35,975 Epoch 896/2000 +2025-03-18 09:46:21,080 Current Learning Rate: 0.0005313953 +2025-03-18 09:46:21,080 Train Loss: 0.0001658, Val Loss: 0.0002440 +2025-03-18 09:46:21,081 Epoch 897/2000 +2025-03-18 09:49:05,999 Current Learning Rate: 0.0005235532 +2025-03-18 09:49:05,999 Train Loss: 0.0001647, Val Loss: 0.0002470 +2025-03-18 09:49:06,000 Epoch 898/2000 +2025-03-18 09:51:50,372 Current Learning Rate: 0.0005157054 +2025-03-18 09:51:50,373 Train Loss: 0.0001644, Val Loss: 0.0002457 +2025-03-18 09:51:50,373 Epoch 899/2000 +2025-03-18 09:54:35,007 Current Learning Rate: 0.0005078537 +2025-03-18 09:54:35,008 Train Loss: 0.0001638, Val Loss: 0.0002448 +2025-03-18 09:54:35,008 Epoch 900/2000 +2025-03-18 09:57:19,230 Current Learning Rate: 0.0005000000 +2025-03-18 09:57:19,231 Train Loss: 0.0001638, Val Loss: 0.0002443 +2025-03-18 09:57:19,231 Epoch 901/2000 +2025-03-18 10:00:04,052 Current Learning Rate: 0.0004921463 +2025-03-18 10:00:04,053 Train Loss: 0.0001635, Val Loss: 0.0002440 +2025-03-18 10:00:04,053 Epoch 902/2000 +2025-03-18 10:02:48,701 Current Learning Rate: 0.0004842946 +2025-03-18 10:02:48,702 Train Loss: 0.0001632, Val Loss: 0.0002438 +2025-03-18 10:02:48,702 Epoch 903/2000 +2025-03-18 10:05:33,284 Current Learning Rate: 0.0004764468 +2025-03-18 10:05:33,284 Train Loss: 0.0001628, Val Loss: 0.0002423 +2025-03-18 10:05:33,285 Epoch 904/2000 +2025-03-18 10:08:18,287 Current Learning Rate: 0.0004686047 +2025-03-18 10:08:18,287 Train Loss: 0.0001622, Val Loss: 0.0002440 +2025-03-18 10:08:18,288 Epoch 905/2000 +2025-03-18 10:11:03,367 Current Learning Rate: 0.0004607705 +2025-03-18 10:11:03,367 Train Loss: 0.0001617, Val Loss: 0.0002426 +2025-03-18 10:11:03,368 Epoch 906/2000 +2025-03-18 10:13:47,980 Current Learning Rate: 0.0004529458 +2025-03-18 10:13:47,980 Train Loss: 0.0001610, Val Loss: 0.0002406 +2025-03-18 10:13:47,980 Epoch 907/2000 +2025-03-18 10:16:32,612 Current Learning Rate: 0.0004451328 +2025-03-18 10:16:32,613 Train Loss: 0.0001600, Val Loss: 0.0002400 +2025-03-18 10:16:32,613 Epoch 908/2000 +2025-03-18 10:19:17,888 Current Learning Rate: 0.0004373334 +2025-03-18 10:19:17,889 Train Loss: 0.0001594, Val Loss: 0.0002393 +2025-03-18 10:19:17,889 Epoch 909/2000 +2025-03-18 10:22:02,570 Current Learning Rate: 0.0004295494 +2025-03-18 10:22:02,571 Train Loss: 0.0001590, Val Loss: 0.0002402 +2025-03-18 10:22:02,571 Epoch 910/2000 +2025-03-18 10:24:47,852 Current Learning Rate: 0.0004217828 +2025-03-18 10:24:47,853 Train Loss: 0.0001588, Val Loss: 0.0002392 +2025-03-18 10:24:47,853 Epoch 911/2000 +2025-03-18 10:27:32,596 Current Learning Rate: 0.0004140354 +2025-03-18 10:27:32,596 Train Loss: 0.0001587, Val Loss: 0.0002389 +2025-03-18 10:27:32,596 Epoch 912/2000 +2025-03-18 10:30:17,150 Current Learning Rate: 0.0004063093 +2025-03-18 10:30:17,150 Train Loss: 0.0001585, Val Loss: 0.0002405 +2025-03-18 10:30:17,153 Epoch 913/2000 +2025-03-18 10:33:01,520 Current Learning Rate: 0.0003986064 +2025-03-18 10:33:01,521 Train Loss: 0.0001583, Val Loss: 0.0002381 +2025-03-18 10:33:01,521 Epoch 914/2000 +2025-03-18 10:35:46,435 Current Learning Rate: 0.0003909284 +2025-03-18 10:35:46,436 Train Loss: 0.0001576, Val Loss: 0.0002362 +2025-03-18 10:35:46,436 Epoch 915/2000 +2025-03-18 10:38:31,111 Current Learning Rate: 0.0003832773 +2025-03-18 10:38:31,111 Train Loss: 0.0001565, Val Loss: 0.0002351 +2025-03-18 10:38:31,111 Epoch 916/2000 +2025-03-18 10:41:15,626 Current Learning Rate: 0.0003756551 +2025-03-18 10:41:15,626 Train Loss: 0.0001566, Val Loss: 0.0002358 +2025-03-18 10:41:15,627 Epoch 917/2000 +2025-03-18 10:43:59,882 Current Learning Rate: 0.0003680635 +2025-03-18 10:43:59,882 Train Loss: 0.0001566, Val Loss: 0.0002366 +2025-03-18 10:43:59,882 Epoch 918/2000 +2025-03-18 10:46:43,878 Current Learning Rate: 0.0003605044 +2025-03-18 10:46:43,879 Train Loss: 0.0001561, Val Loss: 0.0002382 +2025-03-18 10:46:43,879 Epoch 919/2000 +2025-03-18 10:49:28,466 Current Learning Rate: 0.0003529798 +2025-03-18 10:49:28,466 Train Loss: 0.0001555, Val Loss: 0.0002376 +2025-03-18 10:49:28,467 Epoch 920/2000 +2025-03-18 10:52:13,020 Current Learning Rate: 0.0003454915 +2025-03-18 10:52:13,020 Train Loss: 0.0001549, Val Loss: 0.0002352 +2025-03-18 10:52:13,020 Epoch 921/2000 +2025-03-18 10:54:57,903 Current Learning Rate: 0.0003380413 +2025-03-18 10:54:57,904 Train Loss: 0.0001537, Val Loss: 0.0002339 +2025-03-18 10:54:57,904 Epoch 922/2000 +2025-03-18 10:57:43,020 Current Learning Rate: 0.0003306310 +2025-03-18 10:57:43,021 Train Loss: 0.0001526, Val Loss: 0.0002327 +2025-03-18 10:57:43,021 Epoch 923/2000 +2025-03-18 11:00:28,055 Current Learning Rate: 0.0003232626 +2025-03-18 11:00:28,055 Train Loss: 0.0001521, Val Loss: 0.0002326 +2025-03-18 11:00:28,055 Epoch 924/2000 +2025-03-18 11:03:12,943 Current Learning Rate: 0.0003159377 +2025-03-18 11:03:12,943 Train Loss: 0.0001518, Val Loss: 0.0002325 +2025-03-18 11:03:12,943 Epoch 925/2000 +2025-03-18 11:05:57,785 Current Learning Rate: 0.0003086583 +2025-03-18 11:05:57,785 Train Loss: 0.0001512, Val Loss: 0.0002323 +2025-03-18 11:05:57,786 Epoch 926/2000 +2025-03-18 11:08:42,540 Current Learning Rate: 0.0003014261 +2025-03-18 11:08:42,540 Train Loss: 0.0001510, Val Loss: 0.0002334 +2025-03-18 11:08:42,540 Epoch 927/2000 +2025-03-18 11:11:27,260 Current Learning Rate: 0.0002942428 +2025-03-18 11:11:27,261 Train Loss: 0.0001507, Val Loss: 0.0002335 +2025-03-18 11:11:27,261 Epoch 928/2000 +2025-03-18 11:14:11,534 Current Learning Rate: 0.0002871104 +2025-03-18 11:14:11,534 Train Loss: 0.0001505, Val Loss: 0.0002331 +2025-03-18 11:14:11,534 Epoch 929/2000 +2025-03-18 11:16:56,125 Current Learning Rate: 0.0002800304 +2025-03-18 11:16:56,125 Train Loss: 0.0001508, Val Loss: 0.0002323 +2025-03-18 11:16:56,125 Epoch 930/2000 +2025-03-18 11:19:40,113 Current Learning Rate: 0.0002730048 +2025-03-18 11:19:40,114 Train Loss: 0.0001506, Val Loss: 0.0002329 +2025-03-18 11:19:40,114 Epoch 931/2000 +2025-03-18 11:22:24,566 Current Learning Rate: 0.0002660351 +2025-03-18 11:22:24,567 Train Loss: 0.0001506, Val Loss: 0.0002301 +2025-03-18 11:22:24,567 Epoch 932/2000 +2025-03-18 11:25:08,991 Current Learning Rate: 0.0002591232 +2025-03-18 11:25:08,991 Train Loss: 0.0001505, Val Loss: 0.0002302 +2025-03-18 11:25:08,991 Epoch 933/2000 +2025-03-18 11:27:53,817 Current Learning Rate: 0.0002522707 +2025-03-18 11:27:53,817 Train Loss: 0.0001501, Val Loss: 0.0002307 +2025-03-18 11:27:53,818 Epoch 934/2000 +2025-03-18 11:30:38,504 Current Learning Rate: 0.0002454793 +2025-03-18 11:30:38,504 Train Loss: 0.0001492, Val Loss: 0.0002295 +2025-03-18 11:30:38,504 Epoch 935/2000 +2025-03-18 11:33:22,863 Current Learning Rate: 0.0002387507 +2025-03-18 11:33:22,990 Train Loss: 0.0001483, Val Loss: 0.0002281 +2025-03-18 11:33:22,990 Epoch 936/2000 +2025-03-18 11:36:07,599 Current Learning Rate: 0.0002320866 +2025-03-18 11:36:07,741 Train Loss: 0.0001476, Val Loss: 0.0002277 +2025-03-18 11:36:07,742 Epoch 937/2000 +2025-03-18 11:38:51,739 Current Learning Rate: 0.0002254886 +2025-03-18 11:38:51,740 Train Loss: 0.0001473, Val Loss: 0.0002290 +2025-03-18 11:38:51,740 Epoch 938/2000 +2025-03-18 11:41:36,001 Current Learning Rate: 0.0002189583 +2025-03-18 11:41:36,001 Train Loss: 0.0001472, Val Loss: 0.0002300 +2025-03-18 11:41:36,002 Epoch 939/2000 +2025-03-18 11:44:20,948 Current Learning Rate: 0.0002124974 +2025-03-18 11:44:20,948 Train Loss: 0.0001470, Val Loss: 0.0002306 +2025-03-18 11:44:20,949 Epoch 940/2000 +2025-03-18 11:47:05,415 Current Learning Rate: 0.0002061074 +2025-03-18 11:47:05,416 Train Loss: 0.0001467, Val Loss: 0.0002309 +2025-03-18 11:47:05,416 Epoch 941/2000 +2025-03-18 11:49:50,209 Current Learning Rate: 0.0001997899 +2025-03-18 11:49:50,210 Train Loss: 0.0001463, Val Loss: 0.0002301 +2025-03-18 11:49:50,210 Epoch 942/2000 +2025-03-18 11:52:35,257 Current Learning Rate: 0.0001935465 +2025-03-18 11:52:35,257 Train Loss: 0.0001457, Val Loss: 0.0002289 +2025-03-18 11:52:35,257 Epoch 943/2000 +2025-03-18 11:55:19,858 Current Learning Rate: 0.0001873787 +2025-03-18 11:55:20,026 Train Loss: 0.0001453, Val Loss: 0.0002263 +2025-03-18 11:55:20,026 Epoch 944/2000 +2025-03-18 11:58:05,028 Current Learning Rate: 0.0001812880 +2025-03-18 11:58:05,164 Train Loss: 0.0001449, Val Loss: 0.0002253 +2025-03-18 11:58:05,164 Epoch 945/2000 +2025-03-18 12:00:49,692 Current Learning Rate: 0.0001752760 +2025-03-18 12:00:49,692 Train Loss: 0.0001446, Val Loss: 0.0002256 +2025-03-18 12:00:49,693 Epoch 946/2000 +2025-03-18 12:03:34,166 Current Learning Rate: 0.0001693441 +2025-03-18 12:03:34,166 Train Loss: 0.0001443, Val Loss: 0.0002258 +2025-03-18 12:03:34,166 Epoch 947/2000 +2025-03-18 12:06:18,425 Current Learning Rate: 0.0001634937 +2025-03-18 12:06:18,426 Train Loss: 0.0001442, Val Loss: 0.0002258 +2025-03-18 12:06:18,426 Epoch 948/2000 +2025-03-18 12:09:03,055 Current Learning Rate: 0.0001577264 +2025-03-18 12:09:03,056 Train Loss: 0.0001439, Val Loss: 0.0002261 +2025-03-18 12:09:03,056 Epoch 949/2000 +2025-03-18 12:11:47,470 Current Learning Rate: 0.0001520436 +2025-03-18 12:11:47,579 Train Loss: 0.0001436, Val Loss: 0.0002245 +2025-03-18 12:11:47,579 Epoch 950/2000 +2025-03-18 12:14:32,315 Current Learning Rate: 0.0001464466 +2025-03-18 12:14:32,472 Train Loss: 0.0001434, Val Loss: 0.0002234 +2025-03-18 12:14:32,472 Epoch 951/2000 +2025-03-18 12:17:17,452 Current Learning Rate: 0.0001409369 +2025-03-18 12:17:17,585 Train Loss: 0.0001432, Val Loss: 0.0002227 +2025-03-18 12:17:17,585 Epoch 952/2000 +2025-03-18 12:20:02,415 Current Learning Rate: 0.0001355157 +2025-03-18 12:20:02,949 Train Loss: 0.0001430, Val Loss: 0.0002225 +2025-03-18 12:20:02,949 Epoch 953/2000 +2025-03-18 12:22:47,861 Current Learning Rate: 0.0001301845 +2025-03-18 12:22:47,996 Train Loss: 0.0001426, Val Loss: 0.0002224 +2025-03-18 12:22:47,997 Epoch 954/2000 +2025-03-18 12:25:32,817 Current Learning Rate: 0.0001249445 +2025-03-18 12:25:32,962 Train Loss: 0.0001420, Val Loss: 0.0002223 +2025-03-18 12:25:32,963 Epoch 955/2000 +2025-03-18 12:28:17,782 Current Learning Rate: 0.0001197970 +2025-03-18 12:28:17,920 Train Loss: 0.0001416, Val Loss: 0.0002221 +2025-03-18 12:28:17,920 Epoch 956/2000 +2025-03-18 12:31:02,493 Current Learning Rate: 0.0001147434 +2025-03-18 12:31:02,494 Train Loss: 0.0001413, Val Loss: 0.0002222 +2025-03-18 12:31:02,494 Epoch 957/2000 +2025-03-18 12:33:46,794 Current Learning Rate: 0.0001097848 +2025-03-18 12:33:46,795 Train Loss: 0.0001410, Val Loss: 0.0002224 +2025-03-18 12:33:46,795 Epoch 958/2000 +2025-03-18 12:36:31,421 Current Learning Rate: 0.0001049225 +2025-03-18 12:36:31,421 Train Loss: 0.0001407, Val Loss: 0.0002224 +2025-03-18 12:36:31,421 Epoch 959/2000 +2025-03-18 12:39:16,272 Current Learning Rate: 0.0001001577 +2025-03-18 12:39:16,410 Train Loss: 0.0001404, Val Loss: 0.0002221 +2025-03-18 12:39:16,410 Epoch 960/2000 +2025-03-18 12:42:01,256 Current Learning Rate: 0.0000954915 +2025-03-18 12:42:01,395 Train Loss: 0.0001402, Val Loss: 0.0002211 +2025-03-18 12:42:01,395 Epoch 961/2000 +2025-03-18 12:44:46,075 Current Learning Rate: 0.0000909251 +2025-03-18 12:44:46,198 Train Loss: 0.0001399, Val Loss: 0.0002205 +2025-03-18 12:44:46,198 Epoch 962/2000 +2025-03-18 12:47:30,883 Current Learning Rate: 0.0000864597 +2025-03-18 12:47:31,003 Train Loss: 0.0001397, Val Loss: 0.0002200 +2025-03-18 12:47:31,003 Epoch 963/2000 +2025-03-18 12:50:15,596 Current Learning Rate: 0.0000820963 +2025-03-18 12:50:15,741 Train Loss: 0.0001394, Val Loss: 0.0002197 +2025-03-18 12:50:15,742 Epoch 964/2000 +2025-03-18 12:53:00,687 Current Learning Rate: 0.0000778360 +2025-03-18 12:53:00,833 Train Loss: 0.0001392, Val Loss: 0.0002193 +2025-03-18 12:53:00,833 Epoch 965/2000 +2025-03-18 12:55:45,494 Current Learning Rate: 0.0000736799 +2025-03-18 12:55:45,609 Train Loss: 0.0001390, Val Loss: 0.0002191 +2025-03-18 12:55:45,609 Epoch 966/2000 +2025-03-18 12:58:30,535 Current Learning Rate: 0.0000696290 +2025-03-18 12:58:30,660 Train Loss: 0.0001388, Val Loss: 0.0002189 +2025-03-18 12:58:30,660 Epoch 967/2000 +2025-03-18 13:01:15,110 Current Learning Rate: 0.0000656842 +2025-03-18 13:01:15,238 Train Loss: 0.0001386, Val Loss: 0.0002188 +2025-03-18 13:01:15,238 Epoch 968/2000 +2025-03-18 13:04:00,123 Current Learning Rate: 0.0000618467 +2025-03-18 13:04:00,244 Train Loss: 0.0001384, Val Loss: 0.0002186 +2025-03-18 13:04:00,244 Epoch 969/2000 +2025-03-18 13:06:44,792 Current Learning Rate: 0.0000581172 +2025-03-18 13:06:44,906 Train Loss: 0.0001382, Val Loss: 0.0002185 +2025-03-18 13:06:44,906 Epoch 970/2000 +2025-03-18 13:09:29,179 Current Learning Rate: 0.0000544967 +2025-03-18 13:09:29,295 Train Loss: 0.0001380, Val Loss: 0.0002185 +2025-03-18 13:09:29,296 Epoch 971/2000 +2025-03-18 13:12:13,978 Current Learning Rate: 0.0000509862 +2025-03-18 13:12:14,109 Train Loss: 0.0001378, Val Loss: 0.0002185 +2025-03-18 13:12:14,110 Epoch 972/2000 +2025-03-18 13:14:58,395 Current Learning Rate: 0.0000475865 +2025-03-18 13:14:58,509 Train Loss: 0.0001377, Val Loss: 0.0002184 +2025-03-18 13:14:58,510 Epoch 973/2000 +2025-03-18 13:17:42,616 Current Learning Rate: 0.0000442984 +2025-03-18 13:17:42,729 Train Loss: 0.0001375, Val Loss: 0.0002183 +2025-03-18 13:17:42,730 Epoch 974/2000 +2025-03-18 13:20:27,144 Current Learning Rate: 0.0000411227 +2025-03-18 13:20:27,262 Train Loss: 0.0001374, Val Loss: 0.0002180 +2025-03-18 13:20:27,262 Epoch 975/2000 +2025-03-18 13:23:11,689 Current Learning Rate: 0.0000380602 +2025-03-18 13:23:11,803 Train Loss: 0.0001372, Val Loss: 0.0002178 +2025-03-18 13:23:11,803 Epoch 976/2000 +2025-03-18 13:25:56,678 Current Learning Rate: 0.0000351118 +2025-03-18 13:25:56,842 Train Loss: 0.0001371, Val Loss: 0.0002175 +2025-03-18 13:25:56,843 Epoch 977/2000 +2025-03-18 13:28:41,250 Current Learning Rate: 0.0000322780 +2025-03-18 13:28:41,426 Train Loss: 0.0001369, Val Loss: 0.0002173 +2025-03-18 13:28:41,426 Epoch 978/2000 +2025-03-18 13:31:25,883 Current Learning Rate: 0.0000295596 +2025-03-18 13:31:26,012 Train Loss: 0.0001368, Val Loss: 0.0002171 +2025-03-18 13:31:26,013 Epoch 979/2000 +2025-03-18 13:34:10,371 Current Learning Rate: 0.0000269573 +2025-03-18 13:34:10,488 Train Loss: 0.0001367, Val Loss: 0.0002170 +2025-03-18 13:34:10,488 Epoch 980/2000 +2025-03-18 13:36:55,066 Current Learning Rate: 0.0000244717 +2025-03-18 13:36:55,188 Train Loss: 0.0001365, Val Loss: 0.0002169 +2025-03-18 13:36:55,188 Epoch 981/2000 +2025-03-18 13:39:39,707 Current Learning Rate: 0.0000221035 +2025-03-18 13:39:39,857 Train Loss: 0.0001364, Val Loss: 0.0002168 +2025-03-18 13:39:39,857 Epoch 982/2000 +2025-03-18 13:42:24,062 Current Learning Rate: 0.0000198532 +2025-03-18 13:42:24,240 Train Loss: 0.0001363, Val Loss: 0.0002167 +2025-03-18 13:42:24,240 Epoch 983/2000 +2025-03-18 13:45:08,709 Current Learning Rate: 0.0000177213 +2025-03-18 13:45:08,826 Train Loss: 0.0001362, Val Loss: 0.0002166 +2025-03-18 13:45:08,826 Epoch 984/2000 +2025-03-18 13:47:53,149 Current Learning Rate: 0.0000157084 +2025-03-18 13:47:53,289 Train Loss: 0.0001361, Val Loss: 0.0002165 +2025-03-18 13:47:53,289 Epoch 985/2000 +2025-03-18 13:50:37,584 Current Learning Rate: 0.0000138150 +2025-03-18 13:50:37,710 Train Loss: 0.0001360, Val Loss: 0.0002165 +2025-03-18 13:50:37,711 Epoch 986/2000 +2025-03-18 13:53:21,777 Current Learning Rate: 0.0000120416 +2025-03-18 13:53:21,902 Train Loss: 0.0001359, Val Loss: 0.0002164 +2025-03-18 13:53:21,902 Epoch 987/2000 +2025-03-18 13:56:07,041 Current Learning Rate: 0.0000103886 +2025-03-18 13:56:07,170 Train Loss: 0.0001359, Val Loss: 0.0002164 +2025-03-18 13:56:07,170 Epoch 988/2000 +2025-03-18 13:58:52,110 Current Learning Rate: 0.0000088564 +2025-03-18 13:58:52,250 Train Loss: 0.0001358, Val Loss: 0.0002164 +2025-03-18 13:58:52,250 Epoch 989/2000 +2025-03-18 14:01:36,465 Current Learning Rate: 0.0000074453 +2025-03-18 14:01:36,636 Train Loss: 0.0001357, Val Loss: 0.0002163 +2025-03-18 14:01:36,637 Epoch 990/2000 +2025-03-18 14:04:20,821 Current Learning Rate: 0.0000061558 +2025-03-18 14:04:20,934 Train Loss: 0.0001357, Val Loss: 0.0002162 +2025-03-18 14:04:20,934 Epoch 991/2000 +2025-03-18 14:07:05,231 Current Learning Rate: 0.0000049882 +2025-03-18 14:07:05,372 Train Loss: 0.0001356, Val Loss: 0.0002162 +2025-03-18 14:07:05,372 Epoch 992/2000 +2025-03-18 14:09:50,582 Current Learning Rate: 0.0000039426 +2025-03-18 14:09:50,711 Train Loss: 0.0001355, Val Loss: 0.0002161 +2025-03-18 14:09:50,712 Epoch 993/2000 +2025-03-18 14:12:35,411 Current Learning Rate: 0.0000030195 +2025-03-18 14:12:35,535 Train Loss: 0.0001355, Val Loss: 0.0002160 +2025-03-18 14:12:35,535 Epoch 994/2000 +2025-03-18 14:15:20,210 Current Learning Rate: 0.0000022190 +2025-03-18 14:15:20,342 Train Loss: 0.0001354, Val Loss: 0.0002159 +2025-03-18 14:15:20,342 Epoch 995/2000 +2025-03-18 14:18:05,003 Current Learning Rate: 0.0000015413 +2025-03-18 14:18:05,154 Train Loss: 0.0001354, Val Loss: 0.0002159 +2025-03-18 14:18:05,154 Epoch 996/2000 +2025-03-18 14:20:49,945 Current Learning Rate: 0.0000009866 +2025-03-18 14:20:50,094 Train Loss: 0.0001353, Val Loss: 0.0002158 +2025-03-18 14:20:50,094 Epoch 997/2000 +2025-03-18 14:23:34,690 Current Learning Rate: 0.0000005551 +2025-03-18 14:23:34,835 Train Loss: 0.0001353, Val Loss: 0.0002158 +2025-03-18 14:23:34,835 Epoch 998/2000 +2025-03-18 14:26:19,540 Current Learning Rate: 0.0000002467 +2025-03-18 14:26:19,663 Train Loss: 0.0001353, Val Loss: 0.0002157 +2025-03-18 14:26:19,663 Epoch 999/2000 +2025-03-18 14:29:04,026 Current Learning Rate: 0.0000000617 +2025-03-18 14:29:04,167 Train Loss: 0.0001353, Val Loss: 0.0002157 +2025-03-18 14:29:04,167 Epoch 1000/2000 +2025-03-18 14:31:48,682 Current Learning Rate: 0.0000000000 +2025-03-18 14:31:48,810 Train Loss: 0.0001352, Val Loss: 0.0002157 +2025-03-18 14:31:48,810 Epoch 1001/2000 +2025-03-18 14:34:33,445 Current Learning Rate: 0.0000000617 +2025-03-18 14:34:33,446 Train Loss: 0.0001352, Val Loss: 0.0002157 +2025-03-18 14:34:33,446 Epoch 1002/2000 +2025-03-18 14:37:17,928 Current Learning Rate: 0.0000002467 +2025-03-18 14:37:18,053 Train Loss: 0.0001352, Val Loss: 0.0002157 +2025-03-18 14:37:18,053 Epoch 1003/2000 +2025-03-18 14:40:02,395 Current Learning Rate: 0.0000005551 +2025-03-18 14:40:02,396 Train Loss: 0.0001353, Val Loss: 0.0002157 +2025-03-18 14:40:02,396 Epoch 1004/2000 +2025-03-18 14:42:46,856 Current Learning Rate: 0.0000009866 +2025-03-18 14:42:46,857 Train Loss: 0.0001353, Val Loss: 0.0002157 +2025-03-18 14:42:46,857 Epoch 1005/2000 +2025-03-18 14:45:31,391 Current Learning Rate: 0.0000015413 +2025-03-18 14:45:31,392 Train Loss: 0.0001353, Val Loss: 0.0002158 +2025-03-18 14:45:31,392 Epoch 1006/2000 +2025-03-18 14:48:15,952 Current Learning Rate: 0.0000022190 +2025-03-18 14:48:15,952 Train Loss: 0.0001353, Val Loss: 0.0002158 +2025-03-18 14:48:15,953 Epoch 1007/2000 +2025-03-18 14:51:00,618 Current Learning Rate: 0.0000030195 +2025-03-18 14:51:00,619 Train Loss: 0.0001354, Val Loss: 0.0002159 +2025-03-18 14:51:00,619 Epoch 1008/2000 +2025-03-18 14:53:45,268 Current Learning Rate: 0.0000039426 +2025-03-18 14:53:45,269 Train Loss: 0.0001354, Val Loss: 0.0002159 +2025-03-18 14:53:45,269 Epoch 1009/2000 +2025-03-18 14:56:29,818 Current Learning Rate: 0.0000049882 +2025-03-18 14:56:29,818 Train Loss: 0.0001354, Val Loss: 0.0002160 +2025-03-18 14:56:29,818 Epoch 1010/2000 +2025-03-18 14:59:14,185 Current Learning Rate: 0.0000061558 +2025-03-18 14:59:14,185 Train Loss: 0.0001355, Val Loss: 0.0002160 +2025-03-18 14:59:14,185 Epoch 1011/2000 +2025-03-18 15:01:58,716 Current Learning Rate: 0.0000074453 +2025-03-18 15:01:58,717 Train Loss: 0.0001355, Val Loss: 0.0002161 +2025-03-18 15:01:58,717 Epoch 1012/2000 +2025-03-18 15:04:43,221 Current Learning Rate: 0.0000088564 +2025-03-18 15:04:43,221 Train Loss: 0.0001355, Val Loss: 0.0002161 +2025-03-18 15:04:43,221 Epoch 1013/2000 +2025-03-18 15:07:27,801 Current Learning Rate: 0.0000103886 +2025-03-18 15:07:27,801 Train Loss: 0.0001356, Val Loss: 0.0002162 +2025-03-18 15:07:27,801 Epoch 1014/2000 +2025-03-18 15:10:12,405 Current Learning Rate: 0.0000120416 +2025-03-18 15:10:12,405 Train Loss: 0.0001356, Val Loss: 0.0002162 +2025-03-18 15:10:12,405 Epoch 1015/2000 +2025-03-18 15:12:56,829 Current Learning Rate: 0.0000138150 +2025-03-18 15:12:56,830 Train Loss: 0.0001357, Val Loss: 0.0002162 +2025-03-18 15:12:56,830 Epoch 1016/2000 +2025-03-18 15:15:41,235 Current Learning Rate: 0.0000157084 +2025-03-18 15:15:41,236 Train Loss: 0.0001357, Val Loss: 0.0002162 +2025-03-18 15:15:41,236 Epoch 1017/2000 +2025-03-18 15:18:25,926 Current Learning Rate: 0.0000177213 +2025-03-18 15:18:25,926 Train Loss: 0.0001358, Val Loss: 0.0002162 +2025-03-18 15:18:25,926 Epoch 1018/2000 +2025-03-18 15:21:10,046 Current Learning Rate: 0.0000198532 +2025-03-18 15:21:10,046 Train Loss: 0.0001358, Val Loss: 0.0002163 +2025-03-18 15:21:10,046 Epoch 1019/2000 +2025-03-18 15:23:54,391 Current Learning Rate: 0.0000221035 +2025-03-18 15:23:54,392 Train Loss: 0.0001359, Val Loss: 0.0002163 +2025-03-18 15:23:54,392 Epoch 1020/2000 +2025-03-18 15:26:38,799 Current Learning Rate: 0.0000244717 +2025-03-18 15:26:38,799 Train Loss: 0.0001359, Val Loss: 0.0002164 +2025-03-18 15:26:38,800 Epoch 1021/2000 +2025-03-18 15:29:23,832 Current Learning Rate: 0.0000269573 +2025-03-18 15:29:23,833 Train Loss: 0.0001360, Val Loss: 0.0002165 +2025-03-18 15:29:23,833 Epoch 1022/2000 +2025-03-18 15:32:08,280 Current Learning Rate: 0.0000295596 +2025-03-18 15:32:08,280 Train Loss: 0.0001360, Val Loss: 0.0002165 +2025-03-18 15:32:08,280 Epoch 1023/2000 +2025-03-18 15:34:51,963 Current Learning Rate: 0.0000322780 +2025-03-18 15:34:51,964 Train Loss: 0.0001361, Val Loss: 0.0002166 +2025-03-18 15:34:51,964 Epoch 1024/2000 +2025-03-18 15:37:36,709 Current Learning Rate: 0.0000351118 +2025-03-18 15:37:36,710 Train Loss: 0.0001362, Val Loss: 0.0002167 +2025-03-18 15:37:36,710 Epoch 1025/2000 +2025-03-18 15:40:21,137 Current Learning Rate: 0.0000380602 +2025-03-18 15:40:21,137 Train Loss: 0.0001362, Val Loss: 0.0002168 +2025-03-18 15:40:21,138 Epoch 1026/2000 +2025-03-18 15:43:05,400 Current Learning Rate: 0.0000411227 +2025-03-18 15:43:05,400 Train Loss: 0.0001363, Val Loss: 0.0002169 +2025-03-18 15:43:05,401 Epoch 1027/2000 +2025-03-18 15:45:50,038 Current Learning Rate: 0.0000442984 +2025-03-18 15:45:50,038 Train Loss: 0.0001364, Val Loss: 0.0002169 +2025-03-18 15:45:50,038 Epoch 1028/2000 +2025-03-18 15:48:34,495 Current Learning Rate: 0.0000475865 +2025-03-18 15:48:34,496 Train Loss: 0.0001364, Val Loss: 0.0002170 +2025-03-18 15:48:34,496 Epoch 1029/2000 +2025-03-18 15:51:18,698 Current Learning Rate: 0.0000509862 +2025-03-18 15:51:18,698 Train Loss: 0.0001365, Val Loss: 0.0002171 +2025-03-18 15:51:18,699 Epoch 1030/2000 +2025-03-18 15:54:02,945 Current Learning Rate: 0.0000544967 +2025-03-18 15:54:02,946 Train Loss: 0.0001366, Val Loss: 0.0002172 +2025-03-18 15:54:02,946 Epoch 1031/2000 +2025-03-18 15:56:47,762 Current Learning Rate: 0.0000581172 +2025-03-18 15:56:47,763 Train Loss: 0.0001367, Val Loss: 0.0002173 +2025-03-18 15:56:47,763 Epoch 1032/2000 +2025-03-18 15:59:32,107 Current Learning Rate: 0.0000618467 +2025-03-18 15:59:32,108 Train Loss: 0.0001367, Val Loss: 0.0002174 +2025-03-18 15:59:32,108 Epoch 1033/2000 +2025-03-18 16:02:16,382 Current Learning Rate: 0.0000656842 +2025-03-18 16:02:16,382 Train Loss: 0.0001368, Val Loss: 0.0002174 +2025-03-18 16:02:16,383 Epoch 1034/2000 +2025-03-18 16:05:00,604 Current Learning Rate: 0.0000696290 +2025-03-18 16:05:00,604 Train Loss: 0.0001369, Val Loss: 0.0002175 +2025-03-18 16:05:00,605 Epoch 1035/2000 +2025-03-18 16:07:45,438 Current Learning Rate: 0.0000736799 +2025-03-18 16:07:45,438 Train Loss: 0.0001370, Val Loss: 0.0002177 +2025-03-18 16:07:45,438 Epoch 1036/2000 +2025-03-18 16:10:29,543 Current Learning Rate: 0.0000778360 +2025-03-18 16:10:29,544 Train Loss: 0.0001371, Val Loss: 0.0002178 +2025-03-18 16:10:29,544 Epoch 1037/2000 +2025-03-18 16:13:13,790 Current Learning Rate: 0.0000820963 +2025-03-18 16:13:13,791 Train Loss: 0.0001372, Val Loss: 0.0002179 +2025-03-18 16:13:13,791 Epoch 1038/2000 +2025-03-18 16:15:58,719 Current Learning Rate: 0.0000864597 +2025-03-18 16:15:58,720 Train Loss: 0.0001373, Val Loss: 0.0002181 +2025-03-18 16:15:58,720 Epoch 1039/2000 +2025-03-18 16:18:43,318 Current Learning Rate: 0.0000909251 +2025-03-18 16:18:43,319 Train Loss: 0.0001375, Val Loss: 0.0002184 +2025-03-18 16:18:43,319 Epoch 1040/2000 +2025-03-18 16:21:27,845 Current Learning Rate: 0.0000954915 +2025-03-18 16:21:27,845 Train Loss: 0.0001376, Val Loss: 0.0002186 +2025-03-18 16:21:27,846 Epoch 1041/2000 +2025-03-18 16:24:11,961 Current Learning Rate: 0.0001001577 +2025-03-18 16:24:11,962 Train Loss: 0.0001378, Val Loss: 0.0002185 +2025-03-18 16:24:11,962 Epoch 1042/2000 +2025-03-18 16:26:56,101 Current Learning Rate: 0.0001049225 +2025-03-18 16:26:56,101 Train Loss: 0.0001379, Val Loss: 0.0002189 +2025-03-18 16:26:56,101 Epoch 1043/2000 +2025-03-18 16:29:40,512 Current Learning Rate: 0.0001097848 +2025-03-18 16:29:40,513 Train Loss: 0.0001381, Val Loss: 0.0002194 +2025-03-18 16:29:40,513 Epoch 1044/2000 +2025-03-18 16:32:24,861 Current Learning Rate: 0.0001147434 +2025-03-18 16:32:24,862 Train Loss: 0.0001382, Val Loss: 0.0002193 +2025-03-18 16:32:24,862 Epoch 1045/2000 +2025-03-18 16:35:09,537 Current Learning Rate: 0.0001197970 +2025-03-18 16:35:09,537 Train Loss: 0.0001384, Val Loss: 0.0002190 +2025-03-18 16:35:09,538 Epoch 1046/2000 +2025-03-18 16:37:53,932 Current Learning Rate: 0.0001249445 +2025-03-18 16:37:53,932 Train Loss: 0.0001386, Val Loss: 0.0002194 +2025-03-18 16:37:53,932 Epoch 1047/2000 +2025-03-18 16:40:38,101 Current Learning Rate: 0.0001301845 +2025-03-18 16:40:38,102 Train Loss: 0.0001388, Val Loss: 0.0002194 +2025-03-18 16:40:38,102 Epoch 1048/2000 +2025-03-18 16:43:22,251 Current Learning Rate: 0.0001355157 +2025-03-18 16:43:22,252 Train Loss: 0.0001390, Val Loss: 0.0002196 +2025-03-18 16:43:22,252 Epoch 1049/2000 +2025-03-18 16:46:06,731 Current Learning Rate: 0.0001409369 +2025-03-18 16:46:06,732 Train Loss: 0.0001392, Val Loss: 0.0002201 +2025-03-18 16:46:06,732 Epoch 1050/2000 +2025-03-18 16:48:51,250 Current Learning Rate: 0.0001464466 +2025-03-18 16:48:51,250 Train Loss: 0.0001394, Val Loss: 0.0002207 +2025-03-18 16:48:51,251 Epoch 1051/2000 +2025-03-18 16:51:36,040 Current Learning Rate: 0.0001520436 +2025-03-18 16:51:36,040 Train Loss: 0.0001395, Val Loss: 0.0002206 +2025-03-18 16:51:36,040 Epoch 1052/2000 +2025-03-18 16:54:20,383 Current Learning Rate: 0.0001577264 +2025-03-18 16:54:20,383 Train Loss: 0.0001397, Val Loss: 0.0002209 +2025-03-18 16:54:20,383 Epoch 1053/2000 +2025-03-18 16:57:04,768 Current Learning Rate: 0.0001634937 +2025-03-18 16:57:04,769 Train Loss: 0.0001399, Val Loss: 0.0002215 +2025-03-18 16:57:04,769 Epoch 1054/2000 +2025-03-18 16:59:49,485 Current Learning Rate: 0.0001693441 +2025-03-18 16:59:49,485 Train Loss: 0.0001402, Val Loss: 0.0002214 +2025-03-18 16:59:49,486 Epoch 1055/2000 +2025-03-18 17:02:34,055 Current Learning Rate: 0.0001752760 +2025-03-18 17:02:34,056 Train Loss: 0.0001407, Val Loss: 0.0002215 +2025-03-18 17:02:34,056 Epoch 1056/2000 +2025-03-18 17:05:18,742 Current Learning Rate: 0.0001812880 +2025-03-18 17:05:18,743 Train Loss: 0.0001410, Val Loss: 0.0002215 +2025-03-18 17:05:18,743 Epoch 1057/2000 +2025-03-18 17:08:03,054 Current Learning Rate: 0.0001873787 +2025-03-18 17:08:03,055 Train Loss: 0.0001412, Val Loss: 0.0002212 +2025-03-18 17:08:03,055 Epoch 1058/2000 +2025-03-18 17:10:47,420 Current Learning Rate: 0.0001935465 +2025-03-18 17:10:47,420 Train Loss: 0.0001413, Val Loss: 0.0002214 +2025-03-18 17:10:47,421 Epoch 1059/2000 +2025-03-18 17:13:31,868 Current Learning Rate: 0.0001997899 +2025-03-18 17:13:31,868 Train Loss: 0.0001413, Val Loss: 0.0002220 +2025-03-18 17:13:31,868 Epoch 1060/2000 +2025-03-18 17:16:16,865 Current Learning Rate: 0.0002061074 +2025-03-18 17:16:16,866 Train Loss: 0.0001415, Val Loss: 0.0002231 +2025-03-18 17:16:16,866 Epoch 1061/2000 +2025-03-18 17:19:01,238 Current Learning Rate: 0.0002124974 +2025-03-18 17:19:01,238 Train Loss: 0.0001419, Val Loss: 0.0002228 +2025-03-18 17:19:01,239 Epoch 1062/2000 +2025-03-18 17:21:45,616 Current Learning Rate: 0.0002189583 +2025-03-18 17:21:45,617 Train Loss: 0.0001424, Val Loss: 0.0002226 +2025-03-18 17:21:45,617 Epoch 1063/2000 +2025-03-18 17:24:30,255 Current Learning Rate: 0.0002254886 +2025-03-18 17:24:30,255 Train Loss: 0.0001426, Val Loss: 0.0002229 +2025-03-18 17:24:30,255 Epoch 1064/2000 +2025-03-18 17:27:14,856 Current Learning Rate: 0.0002320866 +2025-03-18 17:27:14,856 Train Loss: 0.0001429, Val Loss: 0.0002236 +2025-03-18 17:27:14,856 Epoch 1065/2000 +2025-03-18 17:29:59,906 Current Learning Rate: 0.0002387507 +2025-03-18 17:29:59,907 Train Loss: 0.0001432, Val Loss: 0.0002237 +2025-03-18 17:29:59,907 Epoch 1066/2000 +2025-03-18 17:32:44,470 Current Learning Rate: 0.0002454793 +2025-03-18 17:32:44,471 Train Loss: 0.0001433, Val Loss: 0.0002236 +2025-03-18 17:32:44,471 Epoch 1067/2000 +2025-03-18 17:35:28,871 Current Learning Rate: 0.0002522707 +2025-03-18 17:35:28,872 Train Loss: 0.0001433, Val Loss: 0.0002236 +2025-03-18 17:35:28,872 Epoch 1068/2000 +2025-03-18 17:38:13,510 Current Learning Rate: 0.0002591232 +2025-03-18 17:38:13,510 Train Loss: 0.0001436, Val Loss: 0.0002239 +2025-03-18 17:38:13,511 Epoch 1069/2000 +2025-03-18 17:40:58,131 Current Learning Rate: 0.0002660351 +2025-03-18 17:40:58,132 Train Loss: 0.0001441, Val Loss: 0.0002242 +2025-03-18 17:40:58,132 Epoch 1070/2000 +2025-03-18 17:43:42,437 Current Learning Rate: 0.0002730048 +2025-03-18 17:43:42,438 Train Loss: 0.0001448, Val Loss: 0.0002243 +2025-03-18 17:43:42,438 Epoch 1071/2000 +2025-03-18 17:46:27,355 Current Learning Rate: 0.0002800304 +2025-03-18 17:46:27,355 Train Loss: 0.0001456, Val Loss: 0.0002252 +2025-03-18 17:46:27,355 Epoch 1072/2000 +2025-03-18 17:49:11,633 Current Learning Rate: 0.0002871104 +2025-03-18 17:49:11,634 Train Loss: 0.0001466, Val Loss: 0.0002254 +2025-03-18 17:49:11,634 Epoch 1073/2000 +2025-03-18 17:51:56,173 Current Learning Rate: 0.0002942428 +2025-03-18 17:51:56,173 Train Loss: 0.0001470, Val Loss: 0.0002256 +2025-03-18 17:51:56,174 Epoch 1074/2000 +2025-03-18 17:54:40,775 Current Learning Rate: 0.0003014261 +2025-03-18 17:54:40,776 Train Loss: 0.0001467, Val Loss: 0.0002270 +2025-03-18 17:54:40,776 Epoch 1075/2000 +2025-03-18 17:57:25,286 Current Learning Rate: 0.0003086583 +2025-03-18 17:57:25,287 Train Loss: 0.0001464, Val Loss: 0.0002282 +2025-03-18 17:57:25,287 Epoch 1076/2000 +2025-03-18 18:00:09,916 Current Learning Rate: 0.0003159377 +2025-03-18 18:00:09,916 Train Loss: 0.0001465, Val Loss: 0.0002282 +2025-03-18 18:00:09,916 Epoch 1077/2000 +2025-03-18 18:02:54,805 Current Learning Rate: 0.0003232626 +2025-03-18 18:02:54,806 Train Loss: 0.0001468, Val Loss: 0.0002282 +2025-03-18 18:02:54,806 Epoch 1078/2000 +2025-03-18 18:05:38,966 Current Learning Rate: 0.0003306310 +2025-03-18 18:05:38,967 Train Loss: 0.0001473, Val Loss: 0.0002278 +2025-03-18 18:05:38,967 Epoch 1079/2000 +2025-03-18 18:08:23,729 Current Learning Rate: 0.0003380413 +2025-03-18 18:08:23,730 Train Loss: 0.0001481, Val Loss: 0.0002289 +2025-03-18 18:08:23,730 Epoch 1080/2000 +2025-03-18 18:11:08,167 Current Learning Rate: 0.0003454915 +2025-03-18 18:11:08,167 Train Loss: 0.0001488, Val Loss: 0.0002287 +2025-03-18 18:11:08,168 Epoch 1081/2000 +2025-03-18 18:13:52,702 Current Learning Rate: 0.0003529798 +2025-03-18 18:13:52,703 Train Loss: 0.0001494, Val Loss: 0.0002280 +2025-03-18 18:13:52,703 Epoch 1082/2000 +2025-03-18 18:16:36,973 Current Learning Rate: 0.0003605044 +2025-03-18 18:16:36,973 Train Loss: 0.0001496, Val Loss: 0.0002305 +2025-03-18 18:16:36,973 Epoch 1083/2000 +2025-03-18 18:19:22,080 Current Learning Rate: 0.0003680635 +2025-03-18 18:19:22,080 Train Loss: 0.0001500, Val Loss: 0.0002311 +2025-03-18 18:19:22,080 Epoch 1084/2000 +2025-03-18 18:22:06,639 Current Learning Rate: 0.0003756551 +2025-03-18 18:22:06,640 Train Loss: 0.0001498, Val Loss: 0.0002317 +2025-03-18 18:22:06,640 Epoch 1085/2000 +2025-03-18 18:24:51,310 Current Learning Rate: 0.0003832773 +2025-03-18 18:24:51,311 Train Loss: 0.0001496, Val Loss: 0.0002321 +2025-03-18 18:24:51,311 Epoch 1086/2000 +2025-03-18 18:27:35,723 Current Learning Rate: 0.0003909284 +2025-03-18 18:27:35,724 Train Loss: 0.0001499, Val Loss: 0.0002315 +2025-03-18 18:27:35,724 Epoch 1087/2000 +2025-03-18 18:30:20,090 Current Learning Rate: 0.0003986064 +2025-03-18 18:30:20,090 Train Loss: 0.0001507, Val Loss: 0.0002311 +2025-03-18 18:30:20,091 Epoch 1088/2000 +2025-03-18 18:33:04,655 Current Learning Rate: 0.0004063093 +2025-03-18 18:33:04,655 Train Loss: 0.0001515, Val Loss: 0.0002336 +2025-03-18 18:33:04,656 Epoch 1089/2000 +2025-03-18 18:35:49,122 Current Learning Rate: 0.0004140354 +2025-03-18 18:35:49,123 Train Loss: 0.0001516, Val Loss: 0.0002336 +2025-03-18 18:35:49,123 Epoch 1090/2000 +2025-03-18 18:38:33,934 Current Learning Rate: 0.0004217828 +2025-03-18 18:38:33,934 Train Loss: 0.0001516, Val Loss: 0.0002326 +2025-03-18 18:38:33,934 Epoch 1091/2000 +2025-03-18 18:41:18,692 Current Learning Rate: 0.0004295494 +2025-03-18 18:41:18,693 Train Loss: 0.0001518, Val Loss: 0.0002351 +2025-03-18 18:41:18,693 Epoch 1092/2000 +2025-03-18 18:44:03,492 Current Learning Rate: 0.0004373334 +2025-03-18 18:44:03,492 Train Loss: 0.0001523, Val Loss: 0.0002402 +2025-03-18 18:44:03,493 Epoch 1093/2000 +2025-03-18 18:46:48,126 Current Learning Rate: 0.0004451328 +2025-03-18 18:46:48,127 Train Loss: 0.0001527, Val Loss: 0.0002397 +2025-03-18 18:46:48,127 Epoch 1094/2000 +2025-03-18 18:49:32,959 Current Learning Rate: 0.0004529458 +2025-03-18 18:49:32,960 Train Loss: 0.0001531, Val Loss: 0.0002366 +2025-03-18 18:49:32,960 Epoch 1095/2000 +2025-03-18 18:52:17,704 Current Learning Rate: 0.0004607705 +2025-03-18 18:52:17,704 Train Loss: 0.0001534, Val Loss: 0.0002355 +2025-03-18 18:52:17,704 Epoch 1096/2000 +2025-03-18 18:55:02,176 Current Learning Rate: 0.0004686047 +2025-03-18 18:55:02,176 Train Loss: 0.0001540, Val Loss: 0.0002368 +2025-03-18 18:55:02,177 Epoch 1097/2000 +2025-03-18 18:57:46,603 Current Learning Rate: 0.0004764468 +2025-03-18 18:57:46,603 Train Loss: 0.0001545, Val Loss: 0.0002349 +2025-03-18 18:57:46,603 Epoch 1098/2000 +2025-03-18 19:00:31,480 Current Learning Rate: 0.0004842946 +2025-03-18 19:00:31,481 Train Loss: 0.0001549, Val Loss: 0.0002362 +2025-03-18 19:00:31,481 Epoch 1099/2000 +2025-03-18 19:03:16,441 Current Learning Rate: 0.0004921463 +2025-03-18 19:03:16,442 Train Loss: 0.0001546, Val Loss: 0.0002358 +2025-03-18 19:03:16,442 Epoch 1100/2000 +2025-03-18 19:06:00,567 Current Learning Rate: 0.0005000000 +2025-03-18 19:06:00,567 Train Loss: 0.0001547, Val Loss: 0.0002377 +2025-03-18 19:06:00,567 Epoch 1101/2000 +2025-03-18 19:08:45,040 Current Learning Rate: 0.0005078537 +2025-03-18 19:08:45,041 Train Loss: 0.0001558, Val Loss: 0.0002369 +2025-03-18 19:08:45,041 Epoch 1102/2000 +2025-03-18 19:11:29,855 Current Learning Rate: 0.0005157054 +2025-03-18 19:11:29,855 Train Loss: 0.0001562, Val Loss: 0.0002372 +2025-03-18 19:11:29,855 Epoch 1103/2000 +2025-03-18 19:14:14,473 Current Learning Rate: 0.0005235532 +2025-03-18 19:14:14,474 Train Loss: 0.0001564, Val Loss: 0.0002370 +2025-03-18 19:14:14,474 Epoch 1104/2000 +2025-03-18 19:16:58,730 Current Learning Rate: 0.0005313953 +2025-03-18 19:16:58,731 Train Loss: 0.0001564, Val Loss: 0.0002374 +2025-03-18 19:16:58,731 Epoch 1105/2000 +2025-03-18 19:19:43,197 Current Learning Rate: 0.0005392295 +2025-03-18 19:19:43,197 Train Loss: 0.0001569, Val Loss: 0.0002359 +2025-03-18 19:19:43,197 Epoch 1106/2000 +2025-03-18 19:22:27,962 Current Learning Rate: 0.0005470542 +2025-03-18 19:22:27,962 Train Loss: 0.0001576, Val Loss: 0.0002366 +2025-03-18 19:22:27,962 Epoch 1107/2000 +2025-03-18 19:25:13,188 Current Learning Rate: 0.0005548672 +2025-03-18 19:25:13,188 Train Loss: 0.0001585, Val Loss: 0.0002395 +2025-03-18 19:25:13,188 Epoch 1108/2000 +2025-03-18 19:27:57,351 Current Learning Rate: 0.0005626666 +2025-03-18 19:27:57,352 Train Loss: 0.0001596, Val Loss: 0.0002408 +2025-03-18 19:27:57,352 Epoch 1109/2000 +2025-03-18 19:30:42,039 Current Learning Rate: 0.0005704506 +2025-03-18 19:30:42,040 Train Loss: 0.0001599, Val Loss: 0.0002407 +2025-03-18 19:30:42,040 Epoch 1110/2000 +2025-03-18 19:33:26,254 Current Learning Rate: 0.0005782172 +2025-03-18 19:33:26,255 Train Loss: 0.0001602, Val Loss: 0.0002435 +2025-03-18 19:33:26,255 Epoch 1111/2000 +2025-03-18 19:36:10,429 Current Learning Rate: 0.0005859646 +2025-03-18 19:36:10,430 Train Loss: 0.0001604, Val Loss: 0.0002456 +2025-03-18 19:36:10,430 Epoch 1112/2000 +2025-03-18 19:38:54,848 Current Learning Rate: 0.0005936907 +2025-03-18 19:38:54,849 Train Loss: 0.0001606, Val Loss: 0.0002480 +2025-03-18 19:38:54,849 Epoch 1113/2000 +2025-03-18 19:41:39,370 Current Learning Rate: 0.0006013936 +2025-03-18 19:41:39,370 Train Loss: 0.0001604, Val Loss: 0.0002497 +2025-03-18 19:41:39,370 Epoch 1114/2000 +2025-03-18 19:44:23,788 Current Learning Rate: 0.0006090716 +2025-03-18 19:44:23,788 Train Loss: 0.0001615, Val Loss: 0.0002456 +2025-03-18 19:44:23,788 Epoch 1115/2000 +2025-03-18 19:47:08,486 Current Learning Rate: 0.0006167227 +2025-03-18 19:47:08,486 Train Loss: 0.0001616, Val Loss: 0.0002421 +2025-03-18 19:47:08,486 Epoch 1116/2000 +2025-03-18 19:49:52,992 Current Learning Rate: 0.0006243449 +2025-03-18 19:49:52,992 Train Loss: 0.0001618, Val Loss: 0.0002418 +2025-03-18 19:49:52,993 Epoch 1117/2000 +2025-03-18 19:52:37,809 Current Learning Rate: 0.0006319365 +2025-03-18 19:52:37,809 Train Loss: 0.0001627, Val Loss: 0.0002460 +2025-03-18 19:52:37,809 Epoch 1118/2000 +2025-03-18 19:55:22,270 Current Learning Rate: 0.0006394956 +2025-03-18 19:55:22,270 Train Loss: 0.0001635, Val Loss: 0.0002433 +2025-03-18 19:55:22,270 Epoch 1119/2000 +2025-03-18 19:58:07,351 Current Learning Rate: 0.0006470202 +2025-03-18 19:58:07,352 Train Loss: 0.0001631, Val Loss: 0.0002424 +2025-03-18 19:58:07,352 Epoch 1120/2000 +2025-03-18 20:00:52,440 Current Learning Rate: 0.0006545085 +2025-03-18 20:00:52,440 Train Loss: 0.0001627, Val Loss: 0.0002421 +2025-03-18 20:00:52,441 Epoch 1121/2000 +2025-03-18 20:03:37,058 Current Learning Rate: 0.0006619587 +2025-03-18 20:03:37,058 Train Loss: 0.0001641, Val Loss: 0.0002416 +2025-03-18 20:03:37,058 Epoch 1122/2000 +2025-03-18 20:06:21,412 Current Learning Rate: 0.0006693690 +2025-03-18 20:06:21,412 Train Loss: 0.0001653, Val Loss: 0.0002427 +2025-03-18 20:06:21,412 Epoch 1123/2000 +2025-03-18 20:09:06,400 Current Learning Rate: 0.0006767374 +2025-03-18 20:09:06,400 Train Loss: 0.0001655, Val Loss: 0.0002432 +2025-03-18 20:09:06,400 Epoch 1124/2000 +2025-03-18 20:11:50,981 Current Learning Rate: 0.0006840623 +2025-03-18 20:11:50,981 Train Loss: 0.0001658, Val Loss: 0.0002445 +2025-03-18 20:11:50,981 Epoch 1125/2000 +2025-03-18 20:14:35,483 Current Learning Rate: 0.0006913417 +2025-03-18 20:14:35,483 Train Loss: 0.0001665, Val Loss: 0.0002466 +2025-03-18 20:14:35,483 Epoch 1126/2000 +2025-03-18 20:17:20,428 Current Learning Rate: 0.0006985739 +2025-03-18 20:17:20,429 Train Loss: 0.0001661, Val Loss: 0.0002488 +2025-03-18 20:17:20,429 Epoch 1127/2000 +2025-03-18 20:20:04,856 Current Learning Rate: 0.0007057572 +2025-03-18 20:20:04,856 Train Loss: 0.0001661, Val Loss: 0.0002492 +2025-03-18 20:20:04,857 Epoch 1128/2000 +2025-03-18 20:22:49,305 Current Learning Rate: 0.0007128896 +2025-03-18 20:22:49,306 Train Loss: 0.0001667, Val Loss: 0.0002512 +2025-03-18 20:22:49,306 Epoch 1129/2000 +2025-03-18 20:25:33,793 Current Learning Rate: 0.0007199696 +2025-03-18 20:25:33,793 Train Loss: 0.0001670, Val Loss: 0.0002529 +2025-03-18 20:25:33,793 Epoch 1130/2000 +2025-03-18 20:28:18,591 Current Learning Rate: 0.0007269952 +2025-03-18 20:28:18,592 Train Loss: 0.0001677, Val Loss: 0.0002528 +2025-03-18 20:28:18,592 Epoch 1131/2000 +2025-03-18 20:31:03,200 Current Learning Rate: 0.0007339649 +2025-03-18 20:31:03,200 Train Loss: 0.0001681, Val Loss: 0.0002529 +2025-03-18 20:31:03,201 Epoch 1132/2000 +2025-03-18 20:33:47,728 Current Learning Rate: 0.0007408768 +2025-03-18 20:33:47,729 Train Loss: 0.0001679, Val Loss: 0.0002490 +2025-03-18 20:33:47,729 Epoch 1133/2000 +2025-03-18 20:36:32,671 Current Learning Rate: 0.0007477293 +2025-03-18 20:36:32,672 Train Loss: 0.0001674, Val Loss: 0.0002467 +2025-03-18 20:36:32,672 Epoch 1134/2000 +2025-03-18 20:39:17,021 Current Learning Rate: 0.0007545207 +2025-03-18 20:39:17,022 Train Loss: 0.0001678, Val Loss: 0.0002475 +2025-03-18 20:39:17,022 Epoch 1135/2000 +2025-03-18 20:42:01,806 Current Learning Rate: 0.0007612493 +2025-03-18 20:42:01,807 Train Loss: 0.0001694, Val Loss: 0.0002473 +2025-03-18 20:42:01,807 Epoch 1136/2000 +2025-03-18 20:44:46,749 Current Learning Rate: 0.0007679134 +2025-03-18 20:44:46,750 Train Loss: 0.0001704, Val Loss: 0.0002485 +2025-03-18 20:44:46,750 Epoch 1137/2000 +2025-03-18 20:47:32,077 Current Learning Rate: 0.0007745114 +2025-03-18 20:47:32,077 Train Loss: 0.0001710, Val Loss: 0.0002500 +2025-03-18 20:47:32,078 Epoch 1138/2000 +2025-03-18 20:50:16,882 Current Learning Rate: 0.0007810417 +2025-03-18 20:50:16,883 Train Loss: 0.0001717, Val Loss: 0.0002513 +2025-03-18 20:50:16,883 Epoch 1139/2000 +2025-03-18 20:53:02,050 Current Learning Rate: 0.0007875026 +2025-03-18 20:53:02,050 Train Loss: 0.0001723, Val Loss: 0.0002538 +2025-03-18 20:53:02,050 Epoch 1140/2000 +2025-03-18 20:55:46,899 Current Learning Rate: 0.0007938926 +2025-03-18 20:55:46,899 Train Loss: 0.0001713, Val Loss: 0.0002566 +2025-03-18 20:55:46,899 Epoch 1141/2000 +2025-03-18 20:58:31,330 Current Learning Rate: 0.0008002101 +2025-03-18 20:58:31,330 Train Loss: 0.0001711, Val Loss: 0.0002566 +2025-03-18 20:58:31,330 Epoch 1142/2000 +2025-03-18 21:01:16,408 Current Learning Rate: 0.0008064535 +2025-03-18 21:01:16,409 Train Loss: 0.0001716, Val Loss: 0.0002587 +2025-03-18 21:01:16,409 Epoch 1143/2000 +2025-03-18 21:04:01,436 Current Learning Rate: 0.0008126213 +2025-03-18 21:04:01,437 Train Loss: 0.0001716, Val Loss: 0.0002574 +2025-03-18 21:04:01,437 Epoch 1144/2000 +2025-03-18 21:06:45,724 Current Learning Rate: 0.0008187120 +2025-03-18 21:06:45,725 Train Loss: 0.0001719, Val Loss: 0.0002565 +2025-03-18 21:06:45,725 Epoch 1145/2000 +2025-03-18 21:09:29,803 Current Learning Rate: 0.0008247240 +2025-03-18 21:09:29,803 Train Loss: 0.0001730, Val Loss: 0.0002556 +2025-03-18 21:09:29,803 Epoch 1146/2000 +2025-03-18 21:12:14,368 Current Learning Rate: 0.0008306559 +2025-03-18 21:12:14,369 Train Loss: 0.0001721, Val Loss: 0.0002566 +2025-03-18 21:12:14,369 Epoch 1147/2000 +2025-03-18 21:14:59,080 Current Learning Rate: 0.0008365063 +2025-03-18 21:14:59,081 Train Loss: 0.0001725, Val Loss: 0.0002601 +2025-03-18 21:14:59,081 Epoch 1148/2000 +2025-03-18 21:17:43,463 Current Learning Rate: 0.0008422736 +2025-03-18 21:17:43,463 Train Loss: 0.0001732, Val Loss: 0.0002573 +2025-03-18 21:17:43,463 Epoch 1149/2000 +2025-03-18 21:20:27,875 Current Learning Rate: 0.0008479564 +2025-03-18 21:20:27,876 Train Loss: 0.0001736, Val Loss: 0.0002592 +2025-03-18 21:20:27,876 Epoch 1150/2000 +2025-03-18 21:23:12,289 Current Learning Rate: 0.0008535534 +2025-03-18 21:23:12,290 Train Loss: 0.0001747, Val Loss: 0.0002597 +2025-03-18 21:23:12,290 Epoch 1151/2000 +2025-03-18 21:25:56,728 Current Learning Rate: 0.0008590631 +2025-03-18 21:25:56,728 Train Loss: 0.0001736, Val Loss: 0.0002575 +2025-03-18 21:25:56,729 Epoch 1152/2000 +2025-03-18 21:28:41,619 Current Learning Rate: 0.0008644843 +2025-03-18 21:28:41,619 Train Loss: 0.0001743, Val Loss: 0.0002567 +2025-03-18 21:28:41,619 Epoch 1153/2000 +2025-03-18 21:31:26,167 Current Learning Rate: 0.0008698155 +2025-03-18 21:31:26,167 Train Loss: 0.0001755, Val Loss: 0.0002546 +2025-03-18 21:31:26,168 Epoch 1154/2000 +2025-03-18 21:34:10,667 Current Learning Rate: 0.0008750555 +2025-03-18 21:34:10,667 Train Loss: 0.0001759, Val Loss: 0.0002572 +2025-03-18 21:34:10,667 Epoch 1155/2000 +2025-03-18 21:36:54,905 Current Learning Rate: 0.0008802030 +2025-03-18 21:36:54,905 Train Loss: 0.0001744, Val Loss: 0.0002548 +2025-03-18 21:36:54,906 Epoch 1156/2000 +2025-03-18 21:39:39,183 Current Learning Rate: 0.0008852566 +2025-03-18 21:39:39,183 Train Loss: 0.0001744, Val Loss: 0.0002524 +2025-03-18 21:39:39,184 Epoch 1157/2000 +2025-03-18 21:42:23,595 Current Learning Rate: 0.0008902152 +2025-03-18 21:42:23,595 Train Loss: 0.0001746, Val Loss: 0.0002527 +2025-03-18 21:42:23,595 Epoch 1158/2000 +2025-03-18 21:45:07,637 Current Learning Rate: 0.0008950775 +2025-03-18 21:45:07,638 Train Loss: 0.0001752, Val Loss: 0.0002552 +2025-03-18 21:45:07,638 Epoch 1159/2000 +2025-03-18 21:47:51,824 Current Learning Rate: 0.0008998423 +2025-03-18 21:47:51,824 Train Loss: 0.0001758, Val Loss: 0.0002571 +2025-03-18 21:47:51,824 Epoch 1160/2000 +2025-03-18 21:50:36,086 Current Learning Rate: 0.0009045085 +2025-03-18 21:50:36,086 Train Loss: 0.0001757, Val Loss: 0.0002566 +2025-03-18 21:50:36,087 Epoch 1161/2000 +2025-03-18 21:53:21,014 Current Learning Rate: 0.0009090749 +2025-03-18 21:53:21,014 Train Loss: 0.0001756, Val Loss: 0.0002552 +2025-03-18 21:53:21,014 Epoch 1162/2000 +2025-03-18 21:56:05,321 Current Learning Rate: 0.0009135403 +2025-03-18 21:56:05,321 Train Loss: 0.0001764, Val Loss: 0.0002569 +2025-03-18 21:56:05,321 Epoch 1163/2000 +2025-03-18 21:58:49,799 Current Learning Rate: 0.0009179037 +2025-03-18 21:58:49,799 Train Loss: 0.0001766, Val Loss: 0.0002577 +2025-03-18 21:58:49,800 Epoch 1164/2000 +2025-03-18 22:01:34,498 Current Learning Rate: 0.0009221640 +2025-03-18 22:01:34,499 Train Loss: 0.0001776, Val Loss: 0.0002566 +2025-03-18 22:01:34,499 Epoch 1165/2000 +2025-03-18 22:04:18,890 Current Learning Rate: 0.0009263201 +2025-03-18 22:04:18,890 Train Loss: 0.0001780, Val Loss: 0.0002592 +2025-03-18 22:04:18,890 Epoch 1166/2000 +2025-03-18 22:07:03,464 Current Learning Rate: 0.0009303710 +2025-03-18 22:07:03,465 Train Loss: 0.0001788, Val Loss: 0.0002589 +2025-03-18 22:07:03,465 Epoch 1167/2000 +2025-03-18 22:09:48,198 Current Learning Rate: 0.0009343158 +2025-03-18 22:09:48,198 Train Loss: 0.0001788, Val Loss: 0.0002572 +2025-03-18 22:09:48,198 Epoch 1168/2000 +2025-03-18 22:12:32,754 Current Learning Rate: 0.0009381533 +2025-03-18 22:12:32,754 Train Loss: 0.0001803, Val Loss: 0.0002576 +2025-03-18 22:12:32,754 Epoch 1169/2000 +2025-03-18 22:15:17,197 Current Learning Rate: 0.0009418828 +2025-03-18 22:15:17,197 Train Loss: 0.0001791, Val Loss: 0.0002555 +2025-03-18 22:15:17,198 Epoch 1170/2000 +2025-03-18 22:18:01,565 Current Learning Rate: 0.0009455033 +2025-03-18 22:18:01,566 Train Loss: 0.0001778, Val Loss: 0.0002574 +2025-03-18 22:18:01,566 Epoch 1171/2000 +2025-03-18 22:20:45,578 Current Learning Rate: 0.0009490138 +2025-03-18 22:20:45,578 Train Loss: 0.0001778, Val Loss: 0.0002567 +2025-03-18 22:20:45,579 Epoch 1172/2000 +2025-03-18 22:23:30,209 Current Learning Rate: 0.0009524135 +2025-03-18 22:23:30,209 Train Loss: 0.0001782, Val Loss: 0.0002626 +2025-03-18 22:23:30,209 Epoch 1173/2000 +2025-03-18 22:26:14,643 Current Learning Rate: 0.0009557016 +2025-03-18 22:26:14,643 Train Loss: 0.0001796, Val Loss: 0.0002650 +2025-03-18 22:26:14,643 Epoch 1174/2000 +2025-03-18 22:28:58,998 Current Learning Rate: 0.0009588773 +2025-03-18 22:28:58,998 Train Loss: 0.0001796, Val Loss: 0.0002661 +2025-03-18 22:28:58,998 Epoch 1175/2000 +2025-03-18 22:31:43,805 Current Learning Rate: 0.0009619398 +2025-03-18 22:31:43,806 Train Loss: 0.0001800, Val Loss: 0.0002683 +2025-03-18 22:31:43,806 Epoch 1176/2000 +2025-03-18 22:34:28,441 Current Learning Rate: 0.0009648882 +2025-03-18 22:34:28,441 Train Loss: 0.0001796, Val Loss: 0.0002689 +2025-03-18 22:34:28,441 Epoch 1177/2000 +2025-03-18 22:37:12,943 Current Learning Rate: 0.0009677220 +2025-03-18 22:37:12,944 Train Loss: 0.0001796, Val Loss: 0.0002647 +2025-03-18 22:37:12,944 Epoch 1178/2000 +2025-03-18 22:39:57,300 Current Learning Rate: 0.0009704404 +2025-03-18 22:39:57,301 Train Loss: 0.0001791, Val Loss: 0.0002577 +2025-03-18 22:39:57,301 Epoch 1179/2000 +2025-03-18 22:42:41,907 Current Learning Rate: 0.0009730427 +2025-03-18 22:42:41,908 Train Loss: 0.0001789, Val Loss: 0.0002590 +2025-03-18 22:42:41,908 Epoch 1180/2000 +2025-03-18 22:45:26,169 Current Learning Rate: 0.0009755283 +2025-03-18 22:45:26,169 Train Loss: 0.0001789, Val Loss: 0.0002592 +2025-03-18 22:45:26,170 Epoch 1181/2000 +2025-03-18 22:48:11,148 Current Learning Rate: 0.0009778965 +2025-03-18 22:48:11,148 Train Loss: 0.0001789, Val Loss: 0.0002624 +2025-03-18 22:48:11,149 Epoch 1182/2000 +2025-03-18 22:50:55,532 Current Learning Rate: 0.0009801468 +2025-03-18 22:50:55,533 Train Loss: 0.0001794, Val Loss: 0.0002670 +2025-03-18 22:50:55,533 Epoch 1183/2000 +2025-03-18 22:53:40,193 Current Learning Rate: 0.0009822787 +2025-03-18 22:53:40,193 Train Loss: 0.0001792, Val Loss: 0.0002645 +2025-03-18 22:53:40,193 Epoch 1184/2000 +2025-03-18 22:56:24,768 Current Learning Rate: 0.0009842916 +2025-03-18 22:56:24,769 Train Loss: 0.0001789, Val Loss: 0.0002614 +2025-03-18 22:56:24,769 Epoch 1185/2000 +2025-03-18 22:59:09,252 Current Learning Rate: 0.0009861850 +2025-03-18 22:59:09,252 Train Loss: 0.0001782, Val Loss: 0.0002579 +2025-03-18 22:59:09,253 Epoch 1186/2000 +2025-03-18 23:01:53,405 Current Learning Rate: 0.0009879584 +2025-03-18 23:01:53,405 Train Loss: 0.0001792, Val Loss: 0.0002572 +2025-03-18 23:01:53,405 Epoch 1187/2000 +2025-03-18 23:04:37,769 Current Learning Rate: 0.0009896114 +2025-03-18 23:04:37,770 Train Loss: 0.0001797, Val Loss: 0.0002574 +2025-03-18 23:04:37,770 Epoch 1188/2000 +2025-03-18 23:07:22,111 Current Learning Rate: 0.0009911436 +2025-03-18 23:07:22,112 Train Loss: 0.0001795, Val Loss: 0.0002591 +2025-03-18 23:07:22,112 Epoch 1189/2000 +2025-03-18 23:10:06,714 Current Learning Rate: 0.0009925547 +2025-03-18 23:10:06,715 Train Loss: 0.0001802, Val Loss: 0.0002575 +2025-03-18 23:10:06,715 Epoch 1190/2000 +2025-03-18 23:12:51,245 Current Learning Rate: 0.0009938442 +2025-03-18 23:12:51,246 Train Loss: 0.0001801, Val Loss: 0.0002604 +2025-03-18 23:12:51,246 Epoch 1191/2000 +2025-03-18 23:15:36,221 Current Learning Rate: 0.0009950118 +2025-03-18 23:15:36,221 Train Loss: 0.0001796, Val Loss: 0.0002604 +2025-03-18 23:15:36,221 Epoch 1192/2000 +2025-03-18 23:18:21,339 Current Learning Rate: 0.0009960574 +2025-03-18 23:18:21,340 Train Loss: 0.0001803, Val Loss: 0.0002619 +2025-03-18 23:18:21,340 Epoch 1193/2000 +2025-03-18 23:21:06,610 Current Learning Rate: 0.0009969805 +2025-03-18 23:21:06,611 Train Loss: 0.0001801, Val Loss: 0.0002646 +2025-03-18 23:21:06,611 Epoch 1194/2000 +2025-03-18 23:23:50,997 Current Learning Rate: 0.0009977810 +2025-03-18 23:23:50,997 Train Loss: 0.0001799, Val Loss: 0.0002619 +2025-03-18 23:23:50,998 Epoch 1195/2000 +2025-03-18 23:26:35,330 Current Learning Rate: 0.0009984587 +2025-03-18 23:26:35,331 Train Loss: 0.0001788, Val Loss: 0.0002618 +2025-03-18 23:26:35,331 Epoch 1196/2000 +2025-03-18 23:29:20,210 Current Learning Rate: 0.0009990134 +2025-03-18 23:29:20,210 Train Loss: 0.0001788, Val Loss: 0.0002625 +2025-03-18 23:29:20,210 Epoch 1197/2000 +2025-03-18 23:32:04,941 Current Learning Rate: 0.0009994449 +2025-03-18 23:32:04,941 Train Loss: 0.0001787, Val Loss: 0.0002620 +2025-03-18 23:32:04,942 Epoch 1198/2000 +2025-03-18 23:34:49,818 Current Learning Rate: 0.0009997533 +2025-03-18 23:34:49,819 Train Loss: 0.0001796, Val Loss: 0.0002604 +2025-03-18 23:34:49,819 Epoch 1199/2000 +2025-03-18 23:37:34,262 Current Learning Rate: 0.0009999383 +2025-03-18 23:37:34,263 Train Loss: 0.0001800, Val Loss: 0.0002607 +2025-03-18 23:37:34,263 Epoch 1200/2000 +2025-03-18 23:40:18,812 Current Learning Rate: 0.0010000000 +2025-03-18 23:40:18,812 Train Loss: 0.0001805, Val Loss: 0.0002614 +2025-03-18 23:40:18,813 Epoch 1201/2000 +2025-03-18 23:43:03,579 Current Learning Rate: 0.0009999383 +2025-03-18 23:43:03,579 Train Loss: 0.0001792, Val Loss: 0.0002607 +2025-03-18 23:43:03,580 Epoch 1202/2000 +2025-03-18 23:45:47,856 Current Learning Rate: 0.0009997533 +2025-03-18 23:45:47,857 Train Loss: 0.0001777, Val Loss: 0.0002621 +2025-03-18 23:45:47,857 Epoch 1203/2000 +2025-03-18 23:48:32,484 Current Learning Rate: 0.0009994449 +2025-03-18 23:48:32,484 Train Loss: 0.0001783, Val Loss: 0.0002672 +2025-03-18 23:48:32,485 Epoch 1204/2000 +2025-03-18 23:51:17,459 Current Learning Rate: 0.0009990134 +2025-03-18 23:51:17,459 Train Loss: 0.0001790, Val Loss: 0.0002679 +2025-03-18 23:51:17,460 Epoch 1205/2000 +2025-03-18 23:54:02,165 Current Learning Rate: 0.0009984587 +2025-03-18 23:54:02,165 Train Loss: 0.0001788, Val Loss: 0.0002656 +2025-03-18 23:54:02,166 Epoch 1206/2000 +2025-03-18 23:56:46,932 Current Learning Rate: 0.0009977810 +2025-03-18 23:56:46,933 Train Loss: 0.0001782, Val Loss: 0.0002604 +2025-03-18 23:56:46,933 Epoch 1207/2000 +2025-03-18 23:59:31,433 Current Learning Rate: 0.0009969805 +2025-03-18 23:59:31,434 Train Loss: 0.0001769, Val Loss: 0.0002619 +2025-03-18 23:59:31,434 Epoch 1208/2000 +2025-03-19 00:02:15,815 Current Learning Rate: 0.0009960574 +2025-03-19 00:02:15,815 Train Loss: 0.0001776, Val Loss: 0.0002662 +2025-03-19 00:02:15,816 Epoch 1209/2000 +2025-03-19 00:05:01,008 Current Learning Rate: 0.0009950118 +2025-03-19 00:05:01,009 Train Loss: 0.0001784, Val Loss: 0.0002627 +2025-03-19 00:05:01,009 Epoch 1210/2000 +2025-03-19 00:07:45,730 Current Learning Rate: 0.0009938442 +2025-03-19 00:07:45,731 Train Loss: 0.0001791, Val Loss: 0.0002628 +2025-03-19 00:07:45,731 Epoch 1211/2000 +2025-03-19 00:10:30,364 Current Learning Rate: 0.0009925547 +2025-03-19 00:10:30,364 Train Loss: 0.0001804, Val Loss: 0.0002621 +2025-03-19 00:10:30,365 Epoch 1212/2000 +2025-03-19 00:13:15,136 Current Learning Rate: 0.0009911436 +2025-03-19 00:13:15,137 Train Loss: 0.0001805, Val Loss: 0.0002595 +2025-03-19 00:13:15,137 Epoch 1213/2000 +2025-03-19 00:15:59,846 Current Learning Rate: 0.0009896114 +2025-03-19 00:15:59,846 Train Loss: 0.0001794, Val Loss: 0.0002598 +2025-03-19 00:15:59,847 Epoch 1214/2000 +2025-03-19 00:18:44,451 Current Learning Rate: 0.0009879584 +2025-03-19 00:18:44,452 Train Loss: 0.0001793, Val Loss: 0.0002587 +2025-03-19 00:18:44,452 Epoch 1215/2000 +2025-03-19 00:21:28,873 Current Learning Rate: 0.0009861850 +2025-03-19 00:21:28,873 Train Loss: 0.0001789, Val Loss: 0.0002592 +2025-03-19 00:21:28,874 Epoch 1216/2000 +2025-03-19 00:24:13,554 Current Learning Rate: 0.0009842916 +2025-03-19 00:24:13,554 Train Loss: 0.0001768, Val Loss: 0.0002562 +2025-03-19 00:24:13,555 Epoch 1217/2000 +2025-03-19 00:26:58,301 Current Learning Rate: 0.0009822787 +2025-03-19 00:26:58,302 Train Loss: 0.0001759, Val Loss: 0.0002559 +2025-03-19 00:26:58,302 Epoch 1218/2000 +2025-03-19 00:29:42,631 Current Learning Rate: 0.0009801468 +2025-03-19 00:29:42,631 Train Loss: 0.0001759, Val Loss: 0.0002585 +2025-03-19 00:29:42,632 Epoch 1219/2000 +2025-03-19 00:32:26,977 Current Learning Rate: 0.0009778965 +2025-03-19 00:32:26,978 Train Loss: 0.0001754, Val Loss: 0.0002568 +2025-03-19 00:32:26,978 Epoch 1220/2000 +2025-03-19 00:35:11,434 Current Learning Rate: 0.0009755283 +2025-03-19 00:35:11,435 Train Loss: 0.0001747, Val Loss: 0.0002585 +2025-03-19 00:35:11,435 Epoch 1221/2000 +2025-03-19 00:37:55,833 Current Learning Rate: 0.0009730427 +2025-03-19 00:37:55,833 Train Loss: 0.0001747, Val Loss: 0.0002580 +2025-03-19 00:37:55,833 Epoch 1222/2000 +2025-03-19 00:40:40,815 Current Learning Rate: 0.0009704404 +2025-03-19 00:40:40,815 Train Loss: 0.0001756, Val Loss: 0.0002562 +2025-03-19 00:40:40,815 Epoch 1223/2000 +2025-03-19 00:43:25,493 Current Learning Rate: 0.0009677220 +2025-03-19 00:43:25,494 Train Loss: 0.0001761, Val Loss: 0.0002594 +2025-03-19 00:43:25,494 Epoch 1224/2000 +2025-03-19 00:46:10,029 Current Learning Rate: 0.0009648882 +2025-03-19 00:46:10,029 Train Loss: 0.0001764, Val Loss: 0.0002639 +2025-03-19 00:46:10,030 Epoch 1225/2000 +2025-03-19 00:48:54,591 Current Learning Rate: 0.0009619398 +2025-03-19 00:48:54,591 Train Loss: 0.0001766, Val Loss: 0.0002606 +2025-03-19 00:48:54,591 Epoch 1226/2000 +2025-03-19 00:51:39,306 Current Learning Rate: 0.0009588773 +2025-03-19 00:51:39,306 Train Loss: 0.0001757, Val Loss: 0.0002568 +2025-03-19 00:51:39,306 Epoch 1227/2000 +2025-03-19 00:54:23,922 Current Learning Rate: 0.0009557016 +2025-03-19 00:54:23,922 Train Loss: 0.0001756, Val Loss: 0.0002558 +2025-03-19 00:54:23,923 Epoch 1228/2000 +2025-03-19 00:57:08,547 Current Learning Rate: 0.0009524135 +2025-03-19 00:57:08,547 Train Loss: 0.0001759, Val Loss: 0.0002554 +2025-03-19 00:57:08,547 Epoch 1229/2000 +2025-03-19 00:59:53,271 Current Learning Rate: 0.0009490138 +2025-03-19 00:59:53,272 Train Loss: 0.0001766, Val Loss: 0.0002571 +2025-03-19 00:59:53,272 Epoch 1230/2000 +2025-03-19 01:02:37,761 Current Learning Rate: 0.0009455033 +2025-03-19 01:02:37,762 Train Loss: 0.0001770, Val Loss: 0.0002561 +2025-03-19 01:02:37,762 Epoch 1231/2000 +2025-03-19 01:05:22,294 Current Learning Rate: 0.0009418828 +2025-03-19 01:05:22,294 Train Loss: 0.0001757, Val Loss: 0.0002562 +2025-03-19 01:05:22,294 Epoch 1232/2000 +2025-03-19 01:08:06,877 Current Learning Rate: 0.0009381533 +2025-03-19 01:08:06,877 Train Loss: 0.0001749, Val Loss: 0.0002524 +2025-03-19 01:08:06,878 Epoch 1233/2000 +2025-03-19 01:10:52,174 Current Learning Rate: 0.0009343158 +2025-03-19 01:10:52,174 Train Loss: 0.0001737, Val Loss: 0.0002522 +2025-03-19 01:10:52,174 Epoch 1234/2000 +2025-03-19 01:13:36,718 Current Learning Rate: 0.0009303710 +2025-03-19 01:13:36,718 Train Loss: 0.0001731, Val Loss: 0.0002508 +2025-03-19 01:13:36,718 Epoch 1235/2000 +2025-03-19 01:16:21,375 Current Learning Rate: 0.0009263201 +2025-03-19 01:16:21,375 Train Loss: 0.0001723, Val Loss: 0.0002530 +2025-03-19 01:16:21,376 Epoch 1236/2000 +2025-03-19 01:19:05,621 Current Learning Rate: 0.0009221640 +2025-03-19 01:19:05,621 Train Loss: 0.0001722, Val Loss: 0.0002540 +2025-03-19 01:19:05,622 Epoch 1237/2000 +2025-03-19 01:21:50,075 Current Learning Rate: 0.0009179037 +2025-03-19 01:21:50,076 Train Loss: 0.0001724, Val Loss: 0.0002559 +2025-03-19 01:21:50,076 Epoch 1238/2000 +2025-03-19 01:24:34,887 Current Learning Rate: 0.0009135403 +2025-03-19 01:24:34,888 Train Loss: 0.0001713, Val Loss: 0.0002543 +2025-03-19 01:24:34,888 Epoch 1239/2000 +2025-03-19 01:27:19,323 Current Learning Rate: 0.0009090749 +2025-03-19 01:27:19,324 Train Loss: 0.0001715, Val Loss: 0.0002523 +2025-03-19 01:27:19,324 Epoch 1240/2000 +2025-03-19 01:30:03,968 Current Learning Rate: 0.0009045085 +2025-03-19 01:30:03,968 Train Loss: 0.0001716, Val Loss: 0.0002515 +2025-03-19 01:30:03,968 Epoch 1241/2000 +2025-03-19 01:32:48,376 Current Learning Rate: 0.0008998423 +2025-03-19 01:32:48,377 Train Loss: 0.0001715, Val Loss: 0.0002518 +2025-03-19 01:32:48,377 Epoch 1242/2000 +2025-03-19 01:35:32,895 Current Learning Rate: 0.0008950775 +2025-03-19 01:35:32,895 Train Loss: 0.0001711, Val Loss: 0.0002518 +2025-03-19 01:35:32,895 Epoch 1243/2000 +2025-03-19 01:38:17,572 Current Learning Rate: 0.0008902152 +2025-03-19 01:38:17,573 Train Loss: 0.0001717, Val Loss: 0.0002554 +2025-03-19 01:38:17,573 Epoch 1244/2000 +2025-03-19 01:41:01,635 Current Learning Rate: 0.0008852566 +2025-03-19 01:41:01,636 Train Loss: 0.0001710, Val Loss: 0.0002544 +2025-03-19 01:41:01,636 Epoch 1245/2000 +2025-03-19 01:43:46,186 Current Learning Rate: 0.0008802030 +2025-03-19 01:43:46,186 Train Loss: 0.0001713, Val Loss: 0.0002544 +2025-03-19 01:43:46,187 Epoch 1246/2000 +2025-03-19 01:46:30,772 Current Learning Rate: 0.0008750555 +2025-03-19 01:46:30,772 Train Loss: 0.0001719, Val Loss: 0.0002578 +2025-03-19 01:46:30,773 Epoch 1247/2000 +2025-03-19 01:49:15,842 Current Learning Rate: 0.0008698155 +2025-03-19 01:49:15,844 Train Loss: 0.0001709, Val Loss: 0.0002562 +2025-03-19 01:49:15,844 Epoch 1248/2000 +2025-03-19 01:52:00,346 Current Learning Rate: 0.0008644843 +2025-03-19 01:52:00,346 Train Loss: 0.0001700, Val Loss: 0.0002540 +2025-03-19 01:52:00,346 Epoch 1249/2000 +2025-03-19 01:54:44,978 Current Learning Rate: 0.0008590631 +2025-03-19 01:54:44,978 Train Loss: 0.0001699, Val Loss: 0.0002490 +2025-03-19 01:54:44,979 Epoch 1250/2000 +2025-03-19 01:57:29,429 Current Learning Rate: 0.0008535534 +2025-03-19 01:57:29,430 Train Loss: 0.0001692, Val Loss: 0.0002502 +2025-03-19 01:57:29,430 Epoch 1251/2000 +2025-03-19 02:00:14,157 Current Learning Rate: 0.0008479564 +2025-03-19 02:00:14,157 Train Loss: 0.0001686, Val Loss: 0.0002512 +2025-03-19 02:00:14,157 Epoch 1252/2000 +2025-03-19 02:02:58,823 Current Learning Rate: 0.0008422736 +2025-03-19 02:02:58,824 Train Loss: 0.0001679, Val Loss: 0.0002500 +2025-03-19 02:02:58,824 Epoch 1253/2000 +2025-03-19 02:05:43,264 Current Learning Rate: 0.0008365063 +2025-03-19 02:05:43,264 Train Loss: 0.0001679, Val Loss: 0.0002487 +2025-03-19 02:05:43,264 Epoch 1254/2000 +2025-03-19 02:08:27,673 Current Learning Rate: 0.0008306559 +2025-03-19 02:08:27,673 Train Loss: 0.0001676, Val Loss: 0.0002498 +2025-03-19 02:08:27,674 Epoch 1255/2000 +2025-03-19 02:11:12,054 Current Learning Rate: 0.0008247240 +2025-03-19 02:11:12,054 Train Loss: 0.0001674, Val Loss: 0.0002503 +2025-03-19 02:11:12,054 Epoch 1256/2000 +2025-03-19 02:13:56,157 Current Learning Rate: 0.0008187120 +2025-03-19 02:13:56,157 Train Loss: 0.0001667, Val Loss: 0.0002533 +2025-03-19 02:13:56,158 Epoch 1257/2000 +2025-03-19 02:16:40,961 Current Learning Rate: 0.0008126213 +2025-03-19 02:16:40,962 Train Loss: 0.0001662, Val Loss: 0.0002530 +2025-03-19 02:16:40,962 Epoch 1258/2000 +2025-03-19 02:19:25,083 Current Learning Rate: 0.0008064535 +2025-03-19 02:19:25,084 Train Loss: 0.0001672, Val Loss: 0.0002517 +2025-03-19 02:19:25,084 Epoch 1259/2000 +2025-03-19 02:22:09,583 Current Learning Rate: 0.0008002101 +2025-03-19 02:22:09,583 Train Loss: 0.0001680, Val Loss: 0.0002525 +2025-03-19 02:22:09,583 Epoch 1260/2000 +2025-03-19 02:24:54,062 Current Learning Rate: 0.0007938926 +2025-03-19 02:24:54,063 Train Loss: 0.0001685, Val Loss: 0.0002521 +2025-03-19 02:24:54,063 Epoch 1261/2000 +2025-03-19 02:27:38,256 Current Learning Rate: 0.0007875026 +2025-03-19 02:27:38,257 Train Loss: 0.0001678, Val Loss: 0.0002489 +2025-03-19 02:27:38,257 Epoch 1262/2000 +2025-03-19 02:30:22,537 Current Learning Rate: 0.0007810417 +2025-03-19 02:30:22,538 Train Loss: 0.0001662, Val Loss: 0.0002474 +2025-03-19 02:30:22,538 Epoch 1263/2000 +2025-03-19 02:33:07,409 Current Learning Rate: 0.0007745114 +2025-03-19 02:33:07,409 Train Loss: 0.0001659, Val Loss: 0.0002473 +2025-03-19 02:33:07,410 Epoch 1264/2000 +2025-03-19 02:35:51,924 Current Learning Rate: 0.0007679134 +2025-03-19 02:35:51,924 Train Loss: 0.0001653, Val Loss: 0.0002451 +2025-03-19 02:35:51,925 Epoch 1265/2000 +2025-03-19 02:38:36,279 Current Learning Rate: 0.0007612493 +2025-03-19 02:38:36,279 Train Loss: 0.0001644, Val Loss: 0.0002454 +2025-03-19 02:38:36,279 Epoch 1266/2000 +2025-03-19 02:41:20,624 Current Learning Rate: 0.0007545207 +2025-03-19 02:41:20,624 Train Loss: 0.0001636, Val Loss: 0.0002473 +2025-03-19 02:41:20,624 Epoch 1267/2000 +2025-03-19 02:44:05,658 Current Learning Rate: 0.0007477293 +2025-03-19 02:44:05,658 Train Loss: 0.0001636, Val Loss: 0.0002494 +2025-03-19 02:44:05,658 Epoch 1268/2000 +2025-03-19 02:46:50,082 Current Learning Rate: 0.0007408768 +2025-03-19 02:46:50,082 Train Loss: 0.0001633, Val Loss: 0.0002485 +2025-03-19 02:46:50,082 Epoch 1269/2000 +2025-03-19 02:49:34,552 Current Learning Rate: 0.0007339649 +2025-03-19 02:49:34,553 Train Loss: 0.0001630, Val Loss: 0.0002476 +2025-03-19 02:49:34,553 Epoch 1270/2000 +2025-03-19 02:52:19,120 Current Learning Rate: 0.0007269952 +2025-03-19 02:52:19,120 Train Loss: 0.0001626, Val Loss: 0.0002510 +2025-03-19 02:52:19,120 Epoch 1271/2000 +2025-03-19 02:55:03,621 Current Learning Rate: 0.0007199696 +2025-03-19 02:55:03,621 Train Loss: 0.0001617, Val Loss: 0.0002510 +2025-03-19 02:55:03,622 Epoch 1272/2000 +2025-03-19 02:57:48,155 Current Learning Rate: 0.0007128896 +2025-03-19 02:57:48,155 Train Loss: 0.0001615, Val Loss: 0.0002478 +2025-03-19 02:57:48,155 Epoch 1273/2000 +2025-03-19 03:00:32,447 Current Learning Rate: 0.0007057572 +2025-03-19 03:00:32,447 Train Loss: 0.0001616, Val Loss: 0.0002454 +2025-03-19 03:00:32,447 Epoch 1274/2000 +2025-03-19 03:03:16,951 Current Learning Rate: 0.0006985739 +2025-03-19 03:03:16,952 Train Loss: 0.0001616, Val Loss: 0.0002431 +2025-03-19 03:03:16,952 Epoch 1275/2000 +2025-03-19 03:06:01,396 Current Learning Rate: 0.0006913417 +2025-03-19 03:06:01,397 Train Loss: 0.0001612, Val Loss: 0.0002424 +2025-03-19 03:06:01,397 Epoch 1276/2000 +2025-03-19 03:08:45,845 Current Learning Rate: 0.0006840623 +2025-03-19 03:08:45,845 Train Loss: 0.0001610, Val Loss: 0.0002412 +2025-03-19 03:08:45,845 Epoch 1277/2000 +2025-03-19 03:11:30,162 Current Learning Rate: 0.0006767374 +2025-03-19 03:11:30,164 Train Loss: 0.0001606, Val Loss: 0.0002400 +2025-03-19 03:11:30,165 Epoch 1278/2000 +2025-03-19 03:14:14,932 Current Learning Rate: 0.0006693690 +2025-03-19 03:14:14,933 Train Loss: 0.0001602, Val Loss: 0.0002408 +2025-03-19 03:14:14,933 Epoch 1279/2000 +2025-03-19 03:16:59,474 Current Learning Rate: 0.0006619587 +2025-03-19 03:16:59,474 Train Loss: 0.0001595, Val Loss: 0.0002434 +2025-03-19 03:16:59,474 Epoch 1280/2000 +2025-03-19 03:19:43,872 Current Learning Rate: 0.0006545085 +2025-03-19 03:19:43,873 Train Loss: 0.0001599, Val Loss: 0.0002453 +2025-03-19 03:19:43,873 Epoch 1281/2000 +2025-03-19 03:22:28,360 Current Learning Rate: 0.0006470202 +2025-03-19 03:22:28,360 Train Loss: 0.0001592, Val Loss: 0.0002442 +2025-03-19 03:22:28,361 Epoch 1282/2000 +2025-03-19 03:25:13,015 Current Learning Rate: 0.0006394956 +2025-03-19 03:25:13,015 Train Loss: 0.0001581, Val Loss: 0.0002449 +2025-03-19 03:25:13,015 Epoch 1283/2000 +2025-03-19 03:27:57,808 Current Learning Rate: 0.0006319365 +2025-03-19 03:27:57,808 Train Loss: 0.0001568, Val Loss: 0.0002411 +2025-03-19 03:27:57,809 Epoch 1284/2000 +2025-03-19 03:30:42,106 Current Learning Rate: 0.0006243449 +2025-03-19 03:30:42,107 Train Loss: 0.0001557, Val Loss: 0.0002405 +2025-03-19 03:30:42,107 Epoch 1285/2000 +2025-03-19 03:33:26,823 Current Learning Rate: 0.0006167227 +2025-03-19 03:33:26,823 Train Loss: 0.0001559, Val Loss: 0.0002452 +2025-03-19 03:33:26,824 Epoch 1286/2000 +2025-03-19 03:36:11,348 Current Learning Rate: 0.0006090716 +2025-03-19 03:36:11,349 Train Loss: 0.0001568, Val Loss: 0.0002474 +2025-03-19 03:36:11,349 Epoch 1287/2000 +2025-03-19 03:38:55,836 Current Learning Rate: 0.0006013936 +2025-03-19 03:38:55,836 Train Loss: 0.0001564, Val Loss: 0.0002466 +2025-03-19 03:38:55,836 Epoch 1288/2000 +2025-03-19 03:41:40,259 Current Learning Rate: 0.0005936907 +2025-03-19 03:41:40,260 Train Loss: 0.0001563, Val Loss: 0.0002409 +2025-03-19 03:41:40,260 Epoch 1289/2000 +2025-03-19 03:44:24,859 Current Learning Rate: 0.0005859646 +2025-03-19 03:44:24,860 Train Loss: 0.0001562, Val Loss: 0.0002368 +2025-03-19 03:44:24,860 Epoch 1290/2000 +2025-03-19 03:47:09,401 Current Learning Rate: 0.0005782172 +2025-03-19 03:47:09,401 Train Loss: 0.0001560, Val Loss: 0.0002363 +2025-03-19 03:47:09,402 Epoch 1291/2000 +2025-03-19 03:49:53,867 Current Learning Rate: 0.0005704506 +2025-03-19 03:49:53,867 Train Loss: 0.0001562, Val Loss: 0.0002369 +2025-03-19 03:49:53,867 Epoch 1292/2000 +2025-03-19 03:52:38,351 Current Learning Rate: 0.0005626666 +2025-03-19 03:52:38,352 Train Loss: 0.0001565, Val Loss: 0.0002361 +2025-03-19 03:52:38,352 Epoch 1293/2000 +2025-03-19 03:55:22,994 Current Learning Rate: 0.0005548672 +2025-03-19 03:55:22,995 Train Loss: 0.0001568, Val Loss: 0.0002363 +2025-03-19 03:55:22,995 Epoch 1294/2000 +2025-03-19 03:58:07,453 Current Learning Rate: 0.0005470542 +2025-03-19 03:58:07,453 Train Loss: 0.0001557, Val Loss: 0.0002352 +2025-03-19 03:58:07,454 Epoch 1295/2000 +2025-03-19 04:00:51,711 Current Learning Rate: 0.0005392295 +2025-03-19 04:00:51,712 Train Loss: 0.0001546, Val Loss: 0.0002352 +2025-03-19 04:00:51,712 Epoch 1296/2000 +2025-03-19 04:03:36,468 Current Learning Rate: 0.0005313953 +2025-03-19 04:03:36,469 Train Loss: 0.0001546, Val Loss: 0.0002360 +2025-03-19 04:03:36,469 Epoch 1297/2000 +2025-03-19 04:06:21,236 Current Learning Rate: 0.0005235532 +2025-03-19 04:06:21,237 Train Loss: 0.0001551, Val Loss: 0.0002358 +2025-03-19 04:06:21,237 Epoch 1298/2000 +2025-03-19 04:09:05,850 Current Learning Rate: 0.0005157054 +2025-03-19 04:09:05,850 Train Loss: 0.0001546, Val Loss: 0.0002353 +2025-03-19 04:09:05,850 Epoch 1299/2000 +2025-03-19 04:11:50,364 Current Learning Rate: 0.0005078537 +2025-03-19 04:11:50,365 Train Loss: 0.0001529, Val Loss: 0.0002352 +2025-03-19 04:11:50,365 Epoch 1300/2000 +2025-03-19 04:14:34,649 Current Learning Rate: 0.0005000000 +2025-03-19 04:14:34,650 Train Loss: 0.0001520, Val Loss: 0.0002366 +2025-03-19 04:14:34,650 Epoch 1301/2000 +2025-03-19 04:17:19,145 Current Learning Rate: 0.0004921463 +2025-03-19 04:17:19,145 Train Loss: 0.0001518, Val Loss: 0.0002374 +2025-03-19 04:17:19,145 Epoch 1302/2000 +2025-03-19 04:20:03,605 Current Learning Rate: 0.0004842946 +2025-03-19 04:20:03,606 Train Loss: 0.0001519, Val Loss: 0.0002375 +2025-03-19 04:20:03,606 Epoch 1303/2000 +2025-03-19 04:22:48,213 Current Learning Rate: 0.0004764468 +2025-03-19 04:22:48,213 Train Loss: 0.0001512, Val Loss: 0.0002350 +2025-03-19 04:22:48,213 Epoch 1304/2000 +2025-03-19 04:25:32,773 Current Learning Rate: 0.0004686047 +2025-03-19 04:25:32,774 Train Loss: 0.0001509, Val Loss: 0.0002327 +2025-03-19 04:25:32,774 Epoch 1305/2000 +2025-03-19 04:28:17,613 Current Learning Rate: 0.0004607705 +2025-03-19 04:28:17,613 Train Loss: 0.0001507, Val Loss: 0.0002340 +2025-03-19 04:28:17,613 Epoch 1306/2000 +2025-03-19 04:31:01,897 Current Learning Rate: 0.0004529458 +2025-03-19 04:31:01,897 Train Loss: 0.0001502, Val Loss: 0.0002345 +2025-03-19 04:31:01,898 Epoch 1307/2000 +2025-03-19 04:33:46,469 Current Learning Rate: 0.0004451328 +2025-03-19 04:33:46,470 Train Loss: 0.0001503, Val Loss: 0.0002343 +2025-03-19 04:33:46,470 Epoch 1308/2000 +2025-03-19 04:36:31,096 Current Learning Rate: 0.0004373334 +2025-03-19 04:36:31,096 Train Loss: 0.0001501, Val Loss: 0.0002314 +2025-03-19 04:36:31,096 Epoch 1309/2000 +2025-03-19 04:39:15,160 Current Learning Rate: 0.0004295494 +2025-03-19 04:39:15,160 Train Loss: 0.0001502, Val Loss: 0.0002305 +2025-03-19 04:39:15,160 Epoch 1310/2000 +2025-03-19 04:42:00,016 Current Learning Rate: 0.0004217828 +2025-03-19 04:42:00,016 Train Loss: 0.0001495, Val Loss: 0.0002298 +2025-03-19 04:42:00,016 Epoch 1311/2000 +2025-03-19 04:44:44,386 Current Learning Rate: 0.0004140354 +2025-03-19 04:44:44,386 Train Loss: 0.0001490, Val Loss: 0.0002290 +2025-03-19 04:44:44,387 Epoch 1312/2000 +2025-03-19 04:47:28,811 Current Learning Rate: 0.0004063093 +2025-03-19 04:47:28,811 Train Loss: 0.0001490, Val Loss: 0.0002285 +2025-03-19 04:47:28,811 Epoch 1313/2000 +2025-03-19 04:50:13,095 Current Learning Rate: 0.0003986064 +2025-03-19 04:50:13,095 Train Loss: 0.0001488, Val Loss: 0.0002272 +2025-03-19 04:50:13,096 Epoch 1314/2000 +2025-03-19 04:52:57,829 Current Learning Rate: 0.0003909284 +2025-03-19 04:52:57,829 Train Loss: 0.0001475, Val Loss: 0.0002266 +2025-03-19 04:52:57,829 Epoch 1315/2000 +2025-03-19 04:55:42,280 Current Learning Rate: 0.0003832773 +2025-03-19 04:55:42,280 Train Loss: 0.0001469, Val Loss: 0.0002263 +2025-03-19 04:55:42,280 Epoch 1316/2000 +2025-03-19 04:58:26,696 Current Learning Rate: 0.0003756551 +2025-03-19 04:58:26,696 Train Loss: 0.0001465, Val Loss: 0.0002272 +2025-03-19 04:58:26,696 Epoch 1317/2000 +2025-03-19 05:01:11,335 Current Learning Rate: 0.0003680635 +2025-03-19 05:01:11,335 Train Loss: 0.0001463, Val Loss: 0.0002268 +2025-03-19 05:01:11,336 Epoch 1318/2000 +2025-03-19 05:03:55,570 Current Learning Rate: 0.0003605044 +2025-03-19 05:03:55,570 Train Loss: 0.0001466, Val Loss: 0.0002266 +2025-03-19 05:03:55,570 Epoch 1319/2000 +2025-03-19 05:06:40,589 Current Learning Rate: 0.0003529798 +2025-03-19 05:06:40,590 Train Loss: 0.0001464, Val Loss: 0.0002259 +2025-03-19 05:06:40,590 Epoch 1320/2000 +2025-03-19 05:09:24,843 Current Learning Rate: 0.0003454915 +2025-03-19 05:09:24,844 Train Loss: 0.0001457, Val Loss: 0.0002266 +2025-03-19 05:09:24,844 Epoch 1321/2000 +2025-03-19 05:12:08,839 Current Learning Rate: 0.0003380413 +2025-03-19 05:12:08,840 Train Loss: 0.0001452, Val Loss: 0.0002275 +2025-03-19 05:12:08,840 Epoch 1322/2000 +2025-03-19 05:14:54,236 Current Learning Rate: 0.0003306310 +2025-03-19 05:14:54,237 Train Loss: 0.0001450, Val Loss: 0.0002251 +2025-03-19 05:14:54,237 Epoch 1323/2000 +2025-03-19 05:17:39,007 Current Learning Rate: 0.0003232626 +2025-03-19 05:17:39,008 Train Loss: 0.0001449, Val Loss: 0.0002238 +2025-03-19 05:17:39,008 Epoch 1324/2000 +2025-03-19 05:20:23,755 Current Learning Rate: 0.0003159377 +2025-03-19 05:20:23,756 Train Loss: 0.0001448, Val Loss: 0.0002249 +2025-03-19 05:20:23,756 Epoch 1325/2000 +2025-03-19 05:23:07,794 Current Learning Rate: 0.0003086583 +2025-03-19 05:23:07,794 Train Loss: 0.0001441, Val Loss: 0.0002272 +2025-03-19 05:23:07,794 Epoch 1326/2000 +2025-03-19 05:25:51,785 Current Learning Rate: 0.0003014261 +2025-03-19 05:25:51,785 Train Loss: 0.0001433, Val Loss: 0.0002288 +2025-03-19 05:25:51,785 Epoch 1327/2000 +2025-03-19 05:28:36,304 Current Learning Rate: 0.0002942428 +2025-03-19 05:28:36,305 Train Loss: 0.0001429, Val Loss: 0.0002276 +2025-03-19 05:28:36,305 Epoch 1328/2000 +2025-03-19 05:31:20,821 Current Learning Rate: 0.0002871104 +2025-03-19 05:31:20,821 Train Loss: 0.0001428, Val Loss: 0.0002239 +2025-03-19 05:31:20,822 Epoch 1329/2000 +2025-03-19 05:34:05,179 Current Learning Rate: 0.0002800304 +2025-03-19 05:34:05,179 Train Loss: 0.0001428, Val Loss: 0.0002230 +2025-03-19 05:34:05,179 Epoch 1330/2000 +2025-03-19 05:36:49,890 Current Learning Rate: 0.0002730048 +2025-03-19 05:36:49,890 Train Loss: 0.0001425, Val Loss: 0.0002227 +2025-03-19 05:36:49,890 Epoch 1331/2000 +2025-03-19 05:39:34,820 Current Learning Rate: 0.0002660351 +2025-03-19 05:39:34,821 Train Loss: 0.0001423, Val Loss: 0.0002226 +2025-03-19 05:39:34,821 Epoch 1332/2000 +2025-03-19 05:42:19,412 Current Learning Rate: 0.0002591232 +2025-03-19 05:42:19,413 Train Loss: 0.0001422, Val Loss: 0.0002228 +2025-03-19 05:42:19,413 Epoch 1333/2000 +2025-03-19 05:45:04,123 Current Learning Rate: 0.0002522707 +2025-03-19 05:45:04,124 Train Loss: 0.0001416, Val Loss: 0.0002234 +2025-03-19 05:45:04,124 Epoch 1334/2000 +2025-03-19 05:47:48,668 Current Learning Rate: 0.0002454793 +2025-03-19 05:47:48,668 Train Loss: 0.0001410, Val Loss: 0.0002243 +2025-03-19 05:47:48,669 Epoch 1335/2000 +2025-03-19 05:50:33,122 Current Learning Rate: 0.0002387507 +2025-03-19 05:50:33,122 Train Loss: 0.0001405, Val Loss: 0.0002237 +2025-03-19 05:50:33,122 Epoch 1336/2000 +2025-03-19 05:53:17,687 Current Learning Rate: 0.0002320866 +2025-03-19 05:53:17,687 Train Loss: 0.0001400, Val Loss: 0.0002199 +2025-03-19 05:53:17,687 Epoch 1337/2000 +2025-03-19 05:56:02,325 Current Learning Rate: 0.0002254886 +2025-03-19 05:56:02,325 Train Loss: 0.0001396, Val Loss: 0.0002203 +2025-03-19 05:56:02,326 Epoch 1338/2000 +2025-03-19 05:58:47,112 Current Learning Rate: 0.0002189583 +2025-03-19 05:58:47,113 Train Loss: 0.0001394, Val Loss: 0.0002204 +2025-03-19 05:58:47,113 Epoch 1339/2000 +2025-03-19 06:01:31,507 Current Learning Rate: 0.0002124974 +2025-03-19 06:01:31,508 Train Loss: 0.0001393, Val Loss: 0.0002202 +2025-03-19 06:01:31,508 Epoch 1340/2000 +2025-03-19 06:04:15,929 Current Learning Rate: 0.0002061074 +2025-03-19 06:04:15,930 Train Loss: 0.0001390, Val Loss: 0.0002195 +2025-03-19 06:04:15,930 Epoch 1341/2000 +2025-03-19 06:07:00,485 Current Learning Rate: 0.0001997899 +2025-03-19 06:07:00,485 Train Loss: 0.0001388, Val Loss: 0.0002200 +2025-03-19 06:07:00,486 Epoch 1342/2000 +2025-03-19 06:09:44,988 Current Learning Rate: 0.0001935465 +2025-03-19 06:09:44,988 Train Loss: 0.0001387, Val Loss: 0.0002208 +2025-03-19 06:09:44,988 Epoch 1343/2000 +2025-03-19 06:12:29,167 Current Learning Rate: 0.0001873787 +2025-03-19 06:12:29,167 Train Loss: 0.0001385, Val Loss: 0.0002203 +2025-03-19 06:12:29,167 Epoch 1344/2000 +2025-03-19 06:15:13,833 Current Learning Rate: 0.0001812880 +2025-03-19 06:15:13,833 Train Loss: 0.0001381, Val Loss: 0.0002189 +2025-03-19 06:15:13,833 Epoch 1345/2000 +2025-03-19 06:17:58,211 Current Learning Rate: 0.0001752760 +2025-03-19 06:17:58,212 Train Loss: 0.0001376, Val Loss: 0.0002180 +2025-03-19 06:17:58,212 Epoch 1346/2000 +2025-03-19 06:20:42,407 Current Learning Rate: 0.0001693441 +2025-03-19 06:20:42,407 Train Loss: 0.0001372, Val Loss: 0.0002175 +2025-03-19 06:20:42,408 Epoch 1347/2000 +2025-03-19 06:23:26,804 Current Learning Rate: 0.0001634937 +2025-03-19 06:23:26,805 Train Loss: 0.0001368, Val Loss: 0.0002174 +2025-03-19 06:23:26,805 Epoch 1348/2000 +2025-03-19 06:26:11,016 Current Learning Rate: 0.0001577264 +2025-03-19 06:26:11,016 Train Loss: 0.0001364, Val Loss: 0.0002169 +2025-03-19 06:26:11,016 Epoch 1349/2000 +2025-03-19 06:28:55,693 Current Learning Rate: 0.0001520436 +2025-03-19 06:28:55,693 Train Loss: 0.0001360, Val Loss: 0.0002167 +2025-03-19 06:28:55,693 Epoch 1350/2000 +2025-03-19 06:31:40,258 Current Learning Rate: 0.0001464466 +2025-03-19 06:31:40,258 Train Loss: 0.0001357, Val Loss: 0.0002166 +2025-03-19 06:31:40,258 Epoch 1351/2000 +2025-03-19 06:34:24,890 Current Learning Rate: 0.0001409369 +2025-03-19 06:34:24,890 Train Loss: 0.0001353, Val Loss: 0.0002163 +2025-03-19 06:34:24,890 Epoch 1352/2000 +2025-03-19 06:37:09,273 Current Learning Rate: 0.0001355157 +2025-03-19 06:37:09,274 Train Loss: 0.0001350, Val Loss: 0.0002164 +2025-03-19 06:37:09,274 Epoch 1353/2000 +2025-03-19 06:39:53,847 Current Learning Rate: 0.0001301845 +2025-03-19 06:39:53,847 Train Loss: 0.0001348, Val Loss: 0.0002168 +2025-03-19 06:39:53,847 Epoch 1354/2000 +2025-03-19 06:42:38,382 Current Learning Rate: 0.0001249445 +2025-03-19 06:42:38,382 Train Loss: 0.0001347, Val Loss: 0.0002169 +2025-03-19 06:42:38,383 Epoch 1355/2000 +2025-03-19 06:45:22,647 Current Learning Rate: 0.0001197970 +2025-03-19 06:45:22,648 Train Loss: 0.0001345, Val Loss: 0.0002166 +2025-03-19 06:45:22,648 Epoch 1356/2000 +2025-03-19 06:48:07,409 Current Learning Rate: 0.0001147434 +2025-03-19 06:48:07,564 Train Loss: 0.0001343, Val Loss: 0.0002156 +2025-03-19 06:48:07,564 Epoch 1357/2000 +2025-03-19 06:50:51,700 Current Learning Rate: 0.0001097848 +2025-03-19 06:50:51,815 Train Loss: 0.0001342, Val Loss: 0.0002149 +2025-03-19 06:50:51,816 Epoch 1358/2000 +2025-03-19 06:53:36,358 Current Learning Rate: 0.0001049225 +2025-03-19 06:53:36,480 Train Loss: 0.0001341, Val Loss: 0.0002146 +2025-03-19 06:53:36,480 Epoch 1359/2000 +2025-03-19 06:56:20,801 Current Learning Rate: 0.0001001577 +2025-03-19 06:56:20,914 Train Loss: 0.0001338, Val Loss: 0.0002145 +2025-03-19 06:56:20,914 Epoch 1360/2000 +2025-03-19 06:59:05,730 Current Learning Rate: 0.0000954915 +2025-03-19 06:59:05,848 Train Loss: 0.0001335, Val Loss: 0.0002145 +2025-03-19 06:59:05,848 Epoch 1361/2000 +2025-03-19 07:01:49,917 Current Learning Rate: 0.0000909251 +2025-03-19 07:01:50,039 Train Loss: 0.0001333, Val Loss: 0.0002143 +2025-03-19 07:01:50,040 Epoch 1362/2000 +2025-03-19 07:04:34,412 Current Learning Rate: 0.0000864597 +2025-03-19 07:04:34,523 Train Loss: 0.0001330, Val Loss: 0.0002141 +2025-03-19 07:04:34,524 Epoch 1363/2000 +2025-03-19 07:07:19,130 Current Learning Rate: 0.0000820963 +2025-03-19 07:07:19,244 Train Loss: 0.0001328, Val Loss: 0.0002139 +2025-03-19 07:07:19,245 Epoch 1364/2000 +2025-03-19 07:10:03,726 Current Learning Rate: 0.0000778360 +2025-03-19 07:10:03,846 Train Loss: 0.0001326, Val Loss: 0.0002138 +2025-03-19 07:10:03,846 Epoch 1365/2000 +2025-03-19 07:12:47,991 Current Learning Rate: 0.0000736799 +2025-03-19 07:12:48,106 Train Loss: 0.0001324, Val Loss: 0.0002136 +2025-03-19 07:12:48,106 Epoch 1366/2000 +2025-03-19 07:15:32,336 Current Learning Rate: 0.0000696290 +2025-03-19 07:15:32,454 Train Loss: 0.0001322, Val Loss: 0.0002134 +2025-03-19 07:15:32,454 Epoch 1367/2000 +2025-03-19 07:18:17,085 Current Learning Rate: 0.0000656842 +2025-03-19 07:18:17,197 Train Loss: 0.0001321, Val Loss: 0.0002132 +2025-03-19 07:18:17,197 Epoch 1368/2000 +2025-03-19 07:21:01,504 Current Learning Rate: 0.0000618467 +2025-03-19 07:21:01,693 Train Loss: 0.0001319, Val Loss: 0.0002130 +2025-03-19 07:21:01,693 Epoch 1369/2000 +2025-03-19 07:23:45,781 Current Learning Rate: 0.0000581172 +2025-03-19 07:23:45,897 Train Loss: 0.0001317, Val Loss: 0.0002129 +2025-03-19 07:23:45,898 Epoch 1370/2000 +2025-03-19 07:26:30,036 Current Learning Rate: 0.0000544967 +2025-03-19 07:26:30,159 Train Loss: 0.0001316, Val Loss: 0.0002127 +2025-03-19 07:26:30,159 Epoch 1371/2000 +2025-03-19 07:29:14,312 Current Learning Rate: 0.0000509862 +2025-03-19 07:29:14,428 Train Loss: 0.0001314, Val Loss: 0.0002126 +2025-03-19 07:29:14,428 Epoch 1372/2000 +2025-03-19 07:31:58,547 Current Learning Rate: 0.0000475865 +2025-03-19 07:31:58,656 Train Loss: 0.0001313, Val Loss: 0.0002124 +2025-03-19 07:31:58,657 Epoch 1373/2000 +2025-03-19 07:34:42,725 Current Learning Rate: 0.0000442984 +2025-03-19 07:34:42,844 Train Loss: 0.0001312, Val Loss: 0.0002122 +2025-03-19 07:34:42,844 Epoch 1374/2000 +2025-03-19 07:37:27,219 Current Learning Rate: 0.0000411227 +2025-03-19 07:37:27,336 Train Loss: 0.0001310, Val Loss: 0.0002119 +2025-03-19 07:37:27,336 Epoch 1375/2000 +2025-03-19 07:40:11,695 Current Learning Rate: 0.0000380602 +2025-03-19 07:40:11,815 Train Loss: 0.0001309, Val Loss: 0.0002117 +2025-03-19 07:40:11,815 Epoch 1376/2000 +2025-03-19 07:42:56,102 Current Learning Rate: 0.0000351118 +2025-03-19 07:42:56,239 Train Loss: 0.0001308, Val Loss: 0.0002116 +2025-03-19 07:42:56,240 Epoch 1377/2000 +2025-03-19 07:45:40,217 Current Learning Rate: 0.0000322780 +2025-03-19 07:45:40,331 Train Loss: 0.0001307, Val Loss: 0.0002114 +2025-03-19 07:45:40,331 Epoch 1378/2000 +2025-03-19 07:48:24,564 Current Learning Rate: 0.0000295596 +2025-03-19 07:48:24,681 Train Loss: 0.0001306, Val Loss: 0.0002113 +2025-03-19 07:48:24,681 Epoch 1379/2000 +2025-03-19 07:51:09,092 Current Learning Rate: 0.0000269573 +2025-03-19 07:51:09,210 Train Loss: 0.0001304, Val Loss: 0.0002112 +2025-03-19 07:51:09,210 Epoch 1380/2000 +2025-03-19 07:53:53,706 Current Learning Rate: 0.0000244717 +2025-03-19 07:53:53,817 Train Loss: 0.0001303, Val Loss: 0.0002112 +2025-03-19 07:53:53,818 Epoch 1381/2000 +2025-03-19 07:56:38,323 Current Learning Rate: 0.0000221035 +2025-03-19 07:56:38,436 Train Loss: 0.0001302, Val Loss: 0.0002111 +2025-03-19 07:56:38,436 Epoch 1382/2000 +2025-03-19 07:59:22,777 Current Learning Rate: 0.0000198532 +2025-03-19 07:59:22,888 Train Loss: 0.0001302, Val Loss: 0.0002110 +2025-03-19 07:59:22,888 Epoch 1383/2000 +2025-03-19 08:02:07,276 Current Learning Rate: 0.0000177213 +2025-03-19 08:02:07,389 Train Loss: 0.0001301, Val Loss: 0.0002109 +2025-03-19 08:02:07,389 Epoch 1384/2000 +2025-03-19 08:04:51,467 Current Learning Rate: 0.0000157084 +2025-03-19 08:04:51,581 Train Loss: 0.0001300, Val Loss: 0.0002109 +2025-03-19 08:04:51,581 Epoch 1385/2000 +2025-03-19 08:07:35,780 Current Learning Rate: 0.0000138150 +2025-03-19 08:07:35,898 Train Loss: 0.0001299, Val Loss: 0.0002109 +2025-03-19 08:07:35,898 Epoch 1386/2000 +2025-03-19 08:10:20,057 Current Learning Rate: 0.0000120416 +2025-03-19 08:10:20,180 Train Loss: 0.0001298, Val Loss: 0.0002108 +2025-03-19 08:10:20,180 Epoch 1387/2000 +2025-03-19 08:13:04,559 Current Learning Rate: 0.0000103886 +2025-03-19 08:13:04,691 Train Loss: 0.0001298, Val Loss: 0.0002108 +2025-03-19 08:13:04,691 Epoch 1388/2000 +2025-03-19 08:15:48,548 Current Learning Rate: 0.0000088564 +2025-03-19 08:15:48,661 Train Loss: 0.0001297, Val Loss: 0.0002107 +2025-03-19 08:15:48,661 Epoch 1389/2000 +2025-03-19 08:18:33,145 Current Learning Rate: 0.0000074453 +2025-03-19 08:18:33,268 Train Loss: 0.0001296, Val Loss: 0.0002107 +2025-03-19 08:18:33,268 Epoch 1390/2000 +2025-03-19 08:21:17,042 Current Learning Rate: 0.0000061558 +2025-03-19 08:21:17,159 Train Loss: 0.0001296, Val Loss: 0.0002106 +2025-03-19 08:21:17,159 Epoch 1391/2000 +2025-03-19 08:24:01,447 Current Learning Rate: 0.0000049882 +2025-03-19 08:24:01,566 Train Loss: 0.0001295, Val Loss: 0.0002105 +2025-03-19 08:24:01,567 Epoch 1392/2000 +2025-03-19 08:26:45,916 Current Learning Rate: 0.0000039426 +2025-03-19 08:26:46,045 Train Loss: 0.0001295, Val Loss: 0.0002104 +2025-03-19 08:26:46,046 Epoch 1393/2000 +2025-03-19 08:29:30,471 Current Learning Rate: 0.0000030195 +2025-03-19 08:29:30,621 Train Loss: 0.0001294, Val Loss: 0.0002103 +2025-03-19 08:29:30,621 Epoch 1394/2000 +2025-03-19 08:32:14,441 Current Learning Rate: 0.0000022190 +2025-03-19 08:32:14,582 Train Loss: 0.0001294, Val Loss: 0.0002102 +2025-03-19 08:32:14,582 Epoch 1395/2000 +2025-03-19 08:34:58,654 Current Learning Rate: 0.0000015413 +2025-03-19 08:34:58,778 Train Loss: 0.0001293, Val Loss: 0.0002102 +2025-03-19 08:34:58,778 Epoch 1396/2000 +2025-03-19 08:37:43,241 Current Learning Rate: 0.0000009866 +2025-03-19 08:37:43,369 Train Loss: 0.0001293, Val Loss: 0.0002101 +2025-03-19 08:37:43,369 Epoch 1397/2000 +2025-03-19 08:40:27,685 Current Learning Rate: 0.0000005551 +2025-03-19 08:40:27,831 Train Loss: 0.0001293, Val Loss: 0.0002101 +2025-03-19 08:40:27,832 Epoch 1398/2000 +2025-03-19 08:43:11,742 Current Learning Rate: 0.0000002467 +2025-03-19 08:43:11,863 Train Loss: 0.0001293, Val Loss: 0.0002101 +2025-03-19 08:43:11,863 Epoch 1399/2000 +2025-03-19 08:45:56,766 Current Learning Rate: 0.0000000617 +2025-03-19 08:45:56,887 Train Loss: 0.0001292, Val Loss: 0.0002101 +2025-03-19 08:45:56,887 Epoch 1400/2000 +2025-03-19 08:48:41,162 Current Learning Rate: 0.0000000000 +2025-03-19 08:48:41,287 Train Loss: 0.0001292, Val Loss: 0.0002101 +2025-03-19 08:48:41,287 Epoch 1401/2000 +2025-03-19 08:51:25,556 Current Learning Rate: 0.0000000617 +2025-03-19 08:51:25,680 Train Loss: 0.0001292, Val Loss: 0.0002100 +2025-03-19 08:51:25,681 Epoch 1402/2000 +2025-03-19 08:54:09,905 Current Learning Rate: 0.0000002467 +2025-03-19 08:54:10,025 Train Loss: 0.0001292, Val Loss: 0.0002100 +2025-03-19 08:54:10,025 Epoch 1403/2000 +2025-03-19 08:56:54,623 Current Learning Rate: 0.0000005551 +2025-03-19 08:56:54,623 Train Loss: 0.0001292, Val Loss: 0.0002101 +2025-03-19 08:56:54,624 Epoch 1404/2000 +2025-03-19 08:59:39,060 Current Learning Rate: 0.0000009866 +2025-03-19 08:59:39,060 Train Loss: 0.0001293, Val Loss: 0.0002101 +2025-03-19 08:59:39,060 Epoch 1405/2000 +2025-03-19 09:02:23,415 Current Learning Rate: 0.0000015413 +2025-03-19 09:02:23,416 Train Loss: 0.0001293, Val Loss: 0.0002101 +2025-03-19 09:02:23,416 Epoch 1406/2000 +2025-03-19 09:05:07,766 Current Learning Rate: 0.0000022190 +2025-03-19 09:05:07,766 Train Loss: 0.0001293, Val Loss: 0.0002101 +2025-03-19 09:05:07,766 Epoch 1407/2000 +2025-03-19 09:07:52,299 Current Learning Rate: 0.0000030195 +2025-03-19 09:07:52,299 Train Loss: 0.0001293, Val Loss: 0.0002102 +2025-03-19 09:07:52,299 Epoch 1408/2000 +2025-03-19 09:10:36,555 Current Learning Rate: 0.0000039426 +2025-03-19 09:10:36,556 Train Loss: 0.0001294, Val Loss: 0.0002102 +2025-03-19 09:10:36,556 Epoch 1409/2000 +2025-03-19 09:13:20,836 Current Learning Rate: 0.0000049882 +2025-03-19 09:13:20,836 Train Loss: 0.0001294, Val Loss: 0.0002103 +2025-03-19 09:13:20,837 Epoch 1410/2000 +2025-03-19 09:16:04,999 Current Learning Rate: 0.0000061558 +2025-03-19 09:16:04,999 Train Loss: 0.0001294, Val Loss: 0.0002104 +2025-03-19 09:16:04,999 Epoch 1411/2000 +2025-03-19 09:18:49,328 Current Learning Rate: 0.0000074453 +2025-03-19 09:18:49,329 Train Loss: 0.0001294, Val Loss: 0.0002104 +2025-03-19 09:18:49,329 Epoch 1412/2000 +2025-03-19 09:21:33,597 Current Learning Rate: 0.0000088564 +2025-03-19 09:21:33,598 Train Loss: 0.0001295, Val Loss: 0.0002105 +2025-03-19 09:21:33,598 Epoch 1413/2000 +2025-03-19 09:24:18,400 Current Learning Rate: 0.0000103886 +2025-03-19 09:24:18,400 Train Loss: 0.0001295, Val Loss: 0.0002106 +2025-03-19 09:24:18,400 Epoch 1414/2000 +2025-03-19 09:27:02,310 Current Learning Rate: 0.0000120416 +2025-03-19 09:27:02,310 Train Loss: 0.0001296, Val Loss: 0.0002106 +2025-03-19 09:27:02,310 Epoch 1415/2000 +2025-03-19 09:29:46,745 Current Learning Rate: 0.0000138150 +2025-03-19 09:29:46,746 Train Loss: 0.0001296, Val Loss: 0.0002106 +2025-03-19 09:29:46,746 Epoch 1416/2000 +2025-03-19 09:32:31,104 Current Learning Rate: 0.0000157084 +2025-03-19 09:32:31,105 Train Loss: 0.0001296, Val Loss: 0.0002106 +2025-03-19 09:32:31,105 Epoch 1417/2000 +2025-03-19 09:35:15,732 Current Learning Rate: 0.0000177213 +2025-03-19 09:35:15,732 Train Loss: 0.0001297, Val Loss: 0.0002106 +2025-03-19 09:35:15,732 Epoch 1418/2000 +2025-03-19 09:38:00,315 Current Learning Rate: 0.0000198532 +2025-03-19 09:38:00,316 Train Loss: 0.0001297, Val Loss: 0.0002107 +2025-03-19 09:38:00,317 Epoch 1419/2000 +2025-03-19 09:40:44,698 Current Learning Rate: 0.0000221035 +2025-03-19 09:40:44,698 Train Loss: 0.0001298, Val Loss: 0.0002107 +2025-03-19 09:40:44,699 Epoch 1420/2000 +2025-03-19 09:43:29,150 Current Learning Rate: 0.0000244717 +2025-03-19 09:43:29,151 Train Loss: 0.0001298, Val Loss: 0.0002107 +2025-03-19 09:43:29,151 Epoch 1421/2000 +2025-03-19 09:46:13,740 Current Learning Rate: 0.0000269573 +2025-03-19 09:46:13,741 Train Loss: 0.0001299, Val Loss: 0.0002108 +2025-03-19 09:46:13,741 Epoch 1422/2000 +2025-03-19 09:48:58,276 Current Learning Rate: 0.0000295596 +2025-03-19 09:48:58,276 Train Loss: 0.0001299, Val Loss: 0.0002109 +2025-03-19 09:48:58,277 Epoch 1423/2000 +2025-03-19 09:51:42,777 Current Learning Rate: 0.0000322780 +2025-03-19 09:51:42,777 Train Loss: 0.0001300, Val Loss: 0.0002110 +2025-03-19 09:51:42,778 Epoch 1424/2000 +2025-03-19 09:54:27,090 Current Learning Rate: 0.0000351118 +2025-03-19 09:54:27,091 Train Loss: 0.0001300, Val Loss: 0.0002111 +2025-03-19 09:54:27,091 Epoch 1425/2000 +2025-03-19 09:57:11,485 Current Learning Rate: 0.0000380602 +2025-03-19 09:57:11,485 Train Loss: 0.0001301, Val Loss: 0.0002112 +2025-03-19 09:57:11,485 Epoch 1426/2000 +2025-03-19 09:59:56,197 Current Learning Rate: 0.0000411227 +2025-03-19 09:59:56,198 Train Loss: 0.0001301, Val Loss: 0.0002113 +2025-03-19 09:59:56,198 Epoch 1427/2000 +2025-03-19 10:02:40,211 Current Learning Rate: 0.0000442984 +2025-03-19 10:02:40,212 Train Loss: 0.0001302, Val Loss: 0.0002114 +2025-03-19 10:02:40,212 Epoch 1428/2000 +2025-03-19 10:05:24,909 Current Learning Rate: 0.0000475865 +2025-03-19 10:05:24,909 Train Loss: 0.0001303, Val Loss: 0.0002114 +2025-03-19 10:05:24,909 Epoch 1429/2000 +2025-03-19 10:08:09,536 Current Learning Rate: 0.0000509862 +2025-03-19 10:08:09,536 Train Loss: 0.0001303, Val Loss: 0.0002115 +2025-03-19 10:08:09,536 Epoch 1430/2000 +2025-03-19 10:10:54,182 Current Learning Rate: 0.0000544967 +2025-03-19 10:10:54,182 Train Loss: 0.0001304, Val Loss: 0.0002116 +2025-03-19 10:10:54,182 Epoch 1431/2000 +2025-03-19 10:13:38,643 Current Learning Rate: 0.0000581172 +2025-03-19 10:13:38,644 Train Loss: 0.0001305, Val Loss: 0.0002116 +2025-03-19 10:13:38,644 Epoch 1432/2000 +2025-03-19 10:16:23,248 Current Learning Rate: 0.0000618467 +2025-03-19 10:16:23,249 Train Loss: 0.0001305, Val Loss: 0.0002117 +2025-03-19 10:16:23,249 Epoch 1433/2000 +2025-03-19 10:19:08,079 Current Learning Rate: 0.0000656842 +2025-03-19 10:19:08,079 Train Loss: 0.0001306, Val Loss: 0.0002118 +2025-03-19 10:19:08,079 Epoch 1434/2000 +2025-03-19 10:21:52,429 Current Learning Rate: 0.0000696290 +2025-03-19 10:21:52,429 Train Loss: 0.0001307, Val Loss: 0.0002119 +2025-03-19 10:21:52,430 Epoch 1435/2000 +2025-03-19 10:24:36,734 Current Learning Rate: 0.0000736799 +2025-03-19 10:24:36,735 Train Loss: 0.0001308, Val Loss: 0.0002120 +2025-03-19 10:24:36,735 Epoch 1436/2000 +2025-03-19 10:27:21,170 Current Learning Rate: 0.0000778360 +2025-03-19 10:27:21,171 Train Loss: 0.0001308, Val Loss: 0.0002121 +2025-03-19 10:27:21,171 Epoch 1437/2000 +2025-03-19 10:30:05,114 Current Learning Rate: 0.0000820963 +2025-03-19 10:30:05,114 Train Loss: 0.0001309, Val Loss: 0.0002122 +2025-03-19 10:30:05,115 Epoch 1438/2000 +2025-03-19 10:32:49,410 Current Learning Rate: 0.0000864597 +2025-03-19 10:32:49,410 Train Loss: 0.0001310, Val Loss: 0.0002123 +2025-03-19 10:32:49,411 Epoch 1439/2000 +2025-03-19 10:35:34,223 Current Learning Rate: 0.0000909251 +2025-03-19 10:35:34,223 Train Loss: 0.0001311, Val Loss: 0.0002125 +2025-03-19 10:35:34,223 Epoch 1440/2000 +2025-03-19 10:38:18,543 Current Learning Rate: 0.0000954915 +2025-03-19 10:38:18,543 Train Loss: 0.0001313, Val Loss: 0.0002127 +2025-03-19 10:38:18,543 Epoch 1441/2000 +2025-03-19 10:41:02,633 Current Learning Rate: 0.0001001577 +2025-03-19 10:41:02,633 Train Loss: 0.0001314, Val Loss: 0.0002129 +2025-03-19 10:41:02,633 Epoch 1442/2000 +2025-03-19 10:43:46,826 Current Learning Rate: 0.0001049225 +2025-03-19 10:43:46,826 Train Loss: 0.0001314, Val Loss: 0.0002130 +2025-03-19 10:43:46,826 Epoch 1443/2000 +2025-03-19 10:46:31,348 Current Learning Rate: 0.0001097848 +2025-03-19 10:46:31,349 Train Loss: 0.0001316, Val Loss: 0.0002130 +2025-03-19 10:46:31,349 Epoch 1444/2000 +2025-03-19 10:49:15,783 Current Learning Rate: 0.0001147434 +2025-03-19 10:49:15,784 Train Loss: 0.0001318, Val Loss: 0.0002132 +2025-03-19 10:49:15,784 Epoch 1445/2000 +2025-03-19 10:52:00,509 Current Learning Rate: 0.0001197970 +2025-03-19 10:52:00,509 Train Loss: 0.0001319, Val Loss: 0.0002134 +2025-03-19 10:52:00,510 Epoch 1446/2000 +2025-03-19 10:54:45,325 Current Learning Rate: 0.0001249445 +2025-03-19 10:54:45,325 Train Loss: 0.0001321, Val Loss: 0.0002135 +2025-03-19 10:54:45,326 Epoch 1447/2000 +2025-03-19 10:57:29,951 Current Learning Rate: 0.0001301845 +2025-03-19 10:57:29,951 Train Loss: 0.0001323, Val Loss: 0.0002136 +2025-03-19 10:57:29,952 Epoch 1448/2000 +2025-03-19 11:00:14,530 Current Learning Rate: 0.0001355157 +2025-03-19 11:00:14,530 Train Loss: 0.0001325, Val Loss: 0.0002138 +2025-03-19 11:00:14,531 Epoch 1449/2000 +2025-03-19 11:02:58,979 Current Learning Rate: 0.0001409369 +2025-03-19 11:02:58,980 Train Loss: 0.0001326, Val Loss: 0.0002140 +2025-03-19 11:02:58,980 Epoch 1450/2000 +2025-03-19 11:05:43,505 Current Learning Rate: 0.0001464466 +2025-03-19 11:05:43,506 Train Loss: 0.0001328, Val Loss: 0.0002141 +2025-03-19 11:05:43,506 Epoch 1451/2000 +2025-03-19 11:08:27,864 Current Learning Rate: 0.0001520436 +2025-03-19 11:08:27,865 Train Loss: 0.0001329, Val Loss: 0.0002140 +2025-03-19 11:08:27,865 Epoch 1452/2000 +2025-03-19 11:11:12,576 Current Learning Rate: 0.0001577264 +2025-03-19 11:11:12,576 Train Loss: 0.0001331, Val Loss: 0.0002142 +2025-03-19 11:11:12,576 Epoch 1453/2000 +2025-03-19 11:13:56,605 Current Learning Rate: 0.0001634937 +2025-03-19 11:13:56,606 Train Loss: 0.0001333, Val Loss: 0.0002145 +2025-03-19 11:13:56,606 Epoch 1454/2000 +2025-03-19 11:16:40,589 Current Learning Rate: 0.0001693441 +2025-03-19 11:16:40,589 Train Loss: 0.0001335, Val Loss: 0.0002148 +2025-03-19 11:16:40,590 Epoch 1455/2000 +2025-03-19 11:19:25,596 Current Learning Rate: 0.0001752760 +2025-03-19 11:19:25,596 Train Loss: 0.0001338, Val Loss: 0.0002151 +2025-03-19 11:19:25,596 Epoch 1456/2000 +2025-03-19 11:22:09,983 Current Learning Rate: 0.0001812880 +2025-03-19 11:22:09,983 Train Loss: 0.0001341, Val Loss: 0.0002156 +2025-03-19 11:22:09,984 Epoch 1457/2000 +2025-03-19 11:24:54,330 Current Learning Rate: 0.0001873787 +2025-03-19 11:24:54,330 Train Loss: 0.0001343, Val Loss: 0.0002155 +2025-03-19 11:24:54,331 Epoch 1458/2000 +2025-03-19 11:27:38,941 Current Learning Rate: 0.0001935465 +2025-03-19 11:27:38,941 Train Loss: 0.0001345, Val Loss: 0.0002154 +2025-03-19 11:27:38,941 Epoch 1459/2000 +2025-03-19 11:30:23,532 Current Learning Rate: 0.0001997899 +2025-03-19 11:30:23,533 Train Loss: 0.0001346, Val Loss: 0.0002158 +2025-03-19 11:30:23,533 Epoch 1460/2000 +2025-03-19 11:33:07,988 Current Learning Rate: 0.0002061074 +2025-03-19 11:33:07,989 Train Loss: 0.0001348, Val Loss: 0.0002165 +2025-03-19 11:33:07,989 Epoch 1461/2000 +2025-03-19 11:35:52,640 Current Learning Rate: 0.0002124974 +2025-03-19 11:35:52,641 Train Loss: 0.0001351, Val Loss: 0.0002168 +2025-03-19 11:35:52,641 Epoch 1462/2000 +2025-03-19 11:38:37,221 Current Learning Rate: 0.0002189583 +2025-03-19 11:38:37,222 Train Loss: 0.0001354, Val Loss: 0.0002167 +2025-03-19 11:38:37,222 Epoch 1463/2000 +2025-03-19 11:41:21,609 Current Learning Rate: 0.0002254886 +2025-03-19 11:41:21,609 Train Loss: 0.0001357, Val Loss: 0.0002170 +2025-03-19 11:41:21,610 Epoch 1464/2000 +2025-03-19 11:44:06,213 Current Learning Rate: 0.0002320866 +2025-03-19 11:44:06,213 Train Loss: 0.0001361, Val Loss: 0.0002175 +2025-03-19 11:44:06,213 Epoch 1465/2000 +2025-03-19 11:46:50,940 Current Learning Rate: 0.0002387507 +2025-03-19 11:46:50,941 Train Loss: 0.0001364, Val Loss: 0.0002185 +2025-03-19 11:46:50,941 Epoch 1466/2000 +2025-03-19 11:49:34,938 Current Learning Rate: 0.0002454793 +2025-03-19 11:49:34,938 Train Loss: 0.0001365, Val Loss: 0.0002200 +2025-03-19 11:49:34,938 Epoch 1467/2000 +2025-03-19 11:52:19,257 Current Learning Rate: 0.0002522707 +2025-03-19 11:52:19,258 Train Loss: 0.0001367, Val Loss: 0.0002193 +2025-03-19 11:52:19,258 Epoch 1468/2000 +2025-03-19 11:55:04,007 Current Learning Rate: 0.0002591232 +2025-03-19 11:55:04,007 Train Loss: 0.0001370, Val Loss: 0.0002182 +2025-03-19 11:55:04,008 Epoch 1469/2000 +2025-03-19 11:57:48,706 Current Learning Rate: 0.0002660351 +2025-03-19 11:57:48,707 Train Loss: 0.0001372, Val Loss: 0.0002186 +2025-03-19 11:57:48,707 Epoch 1470/2000 +2025-03-19 12:00:33,942 Current Learning Rate: 0.0002730048 +2025-03-19 12:00:33,942 Train Loss: 0.0001375, Val Loss: 0.0002194 +2025-03-19 12:00:33,943 Epoch 1471/2000 +2025-03-19 12:03:18,396 Current Learning Rate: 0.0002800304 +2025-03-19 12:03:18,396 Train Loss: 0.0001379, Val Loss: 0.0002196 +2025-03-19 12:03:18,396 Epoch 1472/2000 +2025-03-19 12:06:02,755 Current Learning Rate: 0.0002871104 +2025-03-19 12:06:02,755 Train Loss: 0.0001383, Val Loss: 0.0002199 +2025-03-19 12:06:02,755 Epoch 1473/2000 +2025-03-19 12:08:47,273 Current Learning Rate: 0.0002942428 +2025-03-19 12:08:47,274 Train Loss: 0.0001385, Val Loss: 0.0002200 +2025-03-19 12:08:47,274 Epoch 1474/2000 +2025-03-19 12:11:31,829 Current Learning Rate: 0.0003014261 +2025-03-19 12:11:31,829 Train Loss: 0.0001387, Val Loss: 0.0002199 +2025-03-19 12:11:31,830 Epoch 1475/2000 +2025-03-19 12:14:16,206 Current Learning Rate: 0.0003086583 +2025-03-19 12:14:16,206 Train Loss: 0.0001390, Val Loss: 0.0002198 +2025-03-19 12:14:16,207 Epoch 1476/2000 +2025-03-19 12:17:00,848 Current Learning Rate: 0.0003159377 +2025-03-19 12:17:00,848 Train Loss: 0.0001395, Val Loss: 0.0002203 +2025-03-19 12:17:00,848 Epoch 1477/2000 +2025-03-19 12:19:45,198 Current Learning Rate: 0.0003232626 +2025-03-19 12:19:45,198 Train Loss: 0.0001400, Val Loss: 0.0002217 +2025-03-19 12:19:45,198 Epoch 1478/2000 +2025-03-19 12:22:29,487 Current Learning Rate: 0.0003306310 +2025-03-19 12:22:29,487 Train Loss: 0.0001407, Val Loss: 0.0002227 +2025-03-19 12:22:29,487 Epoch 1479/2000 +2025-03-19 12:25:13,875 Current Learning Rate: 0.0003380413 +2025-03-19 12:25:13,875 Train Loss: 0.0001410, Val Loss: 0.0002226 +2025-03-19 12:25:13,875 Epoch 1480/2000 +2025-03-19 12:27:58,308 Current Learning Rate: 0.0003454915 +2025-03-19 12:27:58,308 Train Loss: 0.0001406, Val Loss: 0.0002228 +2025-03-19 12:27:58,309 Epoch 1481/2000 +2025-03-19 12:30:43,086 Current Learning Rate: 0.0003529798 +2025-03-19 12:30:43,086 Train Loss: 0.0001403, Val Loss: 0.0002210 +2025-03-19 12:30:43,087 Epoch 1482/2000 +2025-03-19 12:33:27,498 Current Learning Rate: 0.0003605044 +2025-03-19 12:33:27,498 Train Loss: 0.0001402, Val Loss: 0.0002200 +2025-03-19 12:33:27,498 Epoch 1483/2000 +2025-03-19 12:36:11,646 Current Learning Rate: 0.0003680635 +2025-03-19 12:36:11,646 Train Loss: 0.0001408, Val Loss: 0.0002217 +2025-03-19 12:36:11,646 Epoch 1484/2000 +2025-03-19 12:38:56,081 Current Learning Rate: 0.0003756551 +2025-03-19 12:38:56,081 Train Loss: 0.0001415, Val Loss: 0.0002225 +2025-03-19 12:38:56,082 Epoch 1485/2000 +2025-03-19 12:41:40,513 Current Learning Rate: 0.0003832773 +2025-03-19 12:41:40,514 Train Loss: 0.0001419, Val Loss: 0.0002233 +2025-03-19 12:41:40,514 Epoch 1486/2000 +2025-03-19 12:44:24,900 Current Learning Rate: 0.0003909284 +2025-03-19 12:44:24,901 Train Loss: 0.0001418, Val Loss: 0.0002262 +2025-03-19 12:44:24,901 Epoch 1487/2000 +2025-03-19 12:47:09,657 Current Learning Rate: 0.0003986064 +2025-03-19 12:47:09,657 Train Loss: 0.0001420, Val Loss: 0.0002301 +2025-03-19 12:47:09,657 Epoch 1488/2000 +2025-03-19 12:49:54,081 Current Learning Rate: 0.0004063093 +2025-03-19 12:49:54,082 Train Loss: 0.0001427, Val Loss: 0.0002328 +2025-03-19 12:49:54,082 Epoch 1489/2000 +2025-03-19 12:52:38,615 Current Learning Rate: 0.0004140354 +2025-03-19 12:52:38,616 Train Loss: 0.0001434, Val Loss: 0.0002286 +2025-03-19 12:52:38,616 Epoch 1490/2000 +2025-03-19 12:55:23,332 Current Learning Rate: 0.0004217828 +2025-03-19 12:55:23,332 Train Loss: 0.0001444, Val Loss: 0.0002308 +2025-03-19 12:55:23,332 Epoch 1491/2000 +2025-03-19 12:58:07,991 Current Learning Rate: 0.0004295494 +2025-03-19 12:58:07,992 Train Loss: 0.0001451, Val Loss: 0.0002366 +2025-03-19 12:58:07,992 Epoch 1492/2000 +2025-03-19 13:00:52,818 Current Learning Rate: 0.0004373334 +2025-03-19 13:00:52,820 Train Loss: 0.0001454, Val Loss: 0.0002387 +2025-03-19 13:00:52,820 Epoch 1493/2000 +2025-03-19 13:03:37,276 Current Learning Rate: 0.0004451328 +2025-03-19 13:03:37,276 Train Loss: 0.0001458, Val Loss: 0.0002364 +2025-03-19 13:03:37,277 Epoch 1494/2000 +2025-03-19 13:06:22,031 Current Learning Rate: 0.0004529458 +2025-03-19 13:06:22,032 Train Loss: 0.0001465, Val Loss: 0.0002343 +2025-03-19 13:06:22,032 Epoch 1495/2000 +2025-03-19 13:09:06,602 Current Learning Rate: 0.0004607705 +2025-03-19 13:09:06,603 Train Loss: 0.0001470, Val Loss: 0.0002307 +2025-03-19 13:09:06,603 Epoch 1496/2000 +2025-03-19 13:11:51,111 Current Learning Rate: 0.0004686047 +2025-03-19 13:11:51,111 Train Loss: 0.0001466, Val Loss: 0.0002273 +2025-03-19 13:11:51,111 Epoch 1497/2000 +2025-03-19 13:14:35,560 Current Learning Rate: 0.0004764468 +2025-03-19 13:14:35,560 Train Loss: 0.0001460, Val Loss: 0.0002258 +2025-03-19 13:14:35,560 Epoch 1498/2000 +2025-03-19 13:17:20,064 Current Learning Rate: 0.0004842946 +2025-03-19 13:17:20,065 Train Loss: 0.0001461, Val Loss: 0.0002269 +2025-03-19 13:17:20,065 Epoch 1499/2000 +2025-03-19 13:20:04,351 Current Learning Rate: 0.0004921463 +2025-03-19 13:20:04,351 Train Loss: 0.0001463, Val Loss: 0.0002292 +2025-03-19 13:20:04,351 Epoch 1500/2000 +2025-03-19 13:22:48,760 Current Learning Rate: 0.0005000000 +2025-03-19 13:22:48,760 Train Loss: 0.0001468, Val Loss: 0.0002296 +2025-03-19 13:22:48,760 Epoch 1501/2000 +2025-03-19 13:25:33,057 Current Learning Rate: 0.0005078537 +2025-03-19 13:25:33,057 Train Loss: 0.0001474, Val Loss: 0.0002293 +2025-03-19 13:25:33,057 Epoch 1502/2000 +2025-03-19 13:28:17,776 Current Learning Rate: 0.0005157054 +2025-03-19 13:28:17,777 Train Loss: 0.0001474, Val Loss: 0.0002281 +2025-03-19 13:28:17,777 Epoch 1503/2000 +2025-03-19 13:31:02,568 Current Learning Rate: 0.0005235532 +2025-03-19 13:31:02,568 Train Loss: 0.0001473, Val Loss: 0.0002292 +2025-03-19 13:31:02,568 Epoch 1504/2000 +2025-03-19 13:33:46,817 Current Learning Rate: 0.0005313953 +2025-03-19 13:33:46,817 Train Loss: 0.0001475, Val Loss: 0.0002291 +2025-03-19 13:33:46,817 Epoch 1505/2000 +2025-03-19 13:36:30,871 Current Learning Rate: 0.0005392295 +2025-03-19 13:36:30,871 Train Loss: 0.0001478, Val Loss: 0.0002308 +2025-03-19 13:36:30,872 Epoch 1506/2000 +2025-03-19 13:39:15,238 Current Learning Rate: 0.0005470542 +2025-03-19 13:39:15,238 Train Loss: 0.0001482, Val Loss: 0.0002295 +2025-03-19 13:39:15,238 Epoch 1507/2000 +2025-03-19 13:41:59,449 Current Learning Rate: 0.0005548672 +2025-03-19 13:41:59,449 Train Loss: 0.0001487, Val Loss: 0.0002324 +2025-03-19 13:41:59,449 Epoch 1508/2000 +2025-03-19 13:44:43,825 Current Learning Rate: 0.0005626666 +2025-03-19 13:44:43,825 Train Loss: 0.0001492, Val Loss: 0.0002345 +2025-03-19 13:44:43,825 Epoch 1509/2000 +2025-03-19 13:47:28,470 Current Learning Rate: 0.0005704506 +2025-03-19 13:47:28,470 Train Loss: 0.0001499, Val Loss: 0.0002341 +2025-03-19 13:47:28,470 Epoch 1510/2000 +2025-03-19 13:50:12,979 Current Learning Rate: 0.0005782172 +2025-03-19 13:50:12,980 Train Loss: 0.0001506, Val Loss: 0.0002377 +2025-03-19 13:50:12,980 Epoch 1511/2000 +2025-03-19 13:52:57,312 Current Learning Rate: 0.0005859646 +2025-03-19 13:52:57,313 Train Loss: 0.0001513, Val Loss: 0.0002400 +2025-03-19 13:52:57,313 Epoch 1512/2000 +2025-03-19 13:55:41,885 Current Learning Rate: 0.0005936907 +2025-03-19 13:55:41,886 Train Loss: 0.0001519, Val Loss: 0.0002401 +2025-03-19 13:55:41,886 Epoch 1513/2000 +2025-03-19 13:58:26,251 Current Learning Rate: 0.0006013936 +2025-03-19 13:58:26,251 Train Loss: 0.0001527, Val Loss: 0.0002366 +2025-03-19 13:58:26,251 Epoch 1514/2000 +2025-03-19 14:01:11,218 Current Learning Rate: 0.0006090716 +2025-03-19 14:01:11,219 Train Loss: 0.0001527, Val Loss: 0.0002349 +2025-03-19 14:01:11,219 Epoch 1515/2000 +2025-03-19 14:03:55,244 Current Learning Rate: 0.0006167227 +2025-03-19 14:03:55,245 Train Loss: 0.0001527, Val Loss: 0.0002358 +2025-03-19 14:03:55,245 Epoch 1516/2000 +2025-03-19 14:06:40,475 Current Learning Rate: 0.0006243449 +2025-03-19 14:06:40,476 Train Loss: 0.0001518, Val Loss: 0.0002326 +2025-03-19 14:06:40,476 Epoch 1517/2000 +2025-03-19 14:09:24,883 Current Learning Rate: 0.0006319365 +2025-03-19 14:09:24,884 Train Loss: 0.0001519, Val Loss: 0.0002323 +2025-03-19 14:09:24,884 Epoch 1518/2000 +2025-03-19 14:12:09,385 Current Learning Rate: 0.0006394956 +2025-03-19 14:12:09,385 Train Loss: 0.0001528, Val Loss: 0.0002346 +2025-03-19 14:12:09,386 Epoch 1519/2000 +2025-03-19 14:14:54,083 Current Learning Rate: 0.0006470202 +2025-03-19 14:14:54,084 Train Loss: 0.0001534, Val Loss: 0.0002398 +2025-03-19 14:14:54,084 Epoch 1520/2000 +2025-03-19 14:17:38,615 Current Learning Rate: 0.0006545085 +2025-03-19 14:17:38,616 Train Loss: 0.0001541, Val Loss: 0.0002449 +2025-03-19 14:17:38,616 Epoch 1521/2000 +2025-03-19 14:20:22,627 Current Learning Rate: 0.0006619587 +2025-03-19 14:20:22,627 Train Loss: 0.0001552, Val Loss: 0.0002430 +2025-03-19 14:20:22,627 Epoch 1522/2000 +2025-03-19 14:23:07,244 Current Learning Rate: 0.0006693690 +2025-03-19 14:23:07,244 Train Loss: 0.0001554, Val Loss: 0.0002408 +2025-03-19 14:23:07,244 Epoch 1523/2000 +2025-03-19 14:25:52,048 Current Learning Rate: 0.0006767374 +2025-03-19 14:25:52,048 Train Loss: 0.0001560, Val Loss: 0.0002433 +2025-03-19 14:25:52,049 Epoch 1524/2000 +2025-03-19 14:28:36,610 Current Learning Rate: 0.0006840623 +2025-03-19 14:28:36,610 Train Loss: 0.0001557, Val Loss: 0.0002466 +2025-03-19 14:28:36,610 Epoch 1525/2000 +2025-03-19 14:31:20,918 Current Learning Rate: 0.0006913417 +2025-03-19 14:31:20,918 Train Loss: 0.0001558, Val Loss: 0.0002412 +2025-03-19 14:31:20,919 Epoch 1526/2000 +2025-03-19 14:34:05,889 Current Learning Rate: 0.0006985739 +2025-03-19 14:34:05,889 Train Loss: 0.0001565, Val Loss: 0.0002368 +2025-03-19 14:34:05,890 Epoch 1527/2000 +2025-03-19 14:36:50,504 Current Learning Rate: 0.0007057572 +2025-03-19 14:36:50,504 Train Loss: 0.0001571, Val Loss: 0.0002357 +2025-03-19 14:36:50,504 Epoch 1528/2000 +2025-03-19 14:39:34,913 Current Learning Rate: 0.0007128896 +2025-03-19 14:39:34,914 Train Loss: 0.0001574, Val Loss: 0.0002365 +2025-03-19 14:39:34,914 Epoch 1529/2000 +2025-03-19 14:42:19,568 Current Learning Rate: 0.0007199696 +2025-03-19 14:42:19,568 Train Loss: 0.0001580, Val Loss: 0.0002349 +2025-03-19 14:42:19,569 Epoch 1530/2000 +2025-03-19 14:45:04,031 Current Learning Rate: 0.0007269952 +2025-03-19 14:45:04,032 Train Loss: 0.0001585, Val Loss: 0.0002365 +2025-03-19 14:45:04,032 Epoch 1531/2000 +2025-03-19 14:47:48,521 Current Learning Rate: 0.0007339649 +2025-03-19 14:47:48,521 Train Loss: 0.0001588, Val Loss: 0.0002400 +2025-03-19 14:47:48,521 Epoch 1532/2000 +2025-03-19 14:50:33,086 Current Learning Rate: 0.0007408768 +2025-03-19 14:50:33,086 Train Loss: 0.0001590, Val Loss: 0.0002377 +2025-03-19 14:50:33,087 Epoch 1533/2000 +2025-03-19 14:53:17,506 Current Learning Rate: 0.0007477293 +2025-03-19 14:53:17,506 Train Loss: 0.0001591, Val Loss: 0.0002395 +2025-03-19 14:53:17,507 Epoch 1534/2000 +2025-03-19 14:56:02,264 Current Learning Rate: 0.0007545207 +2025-03-19 14:56:02,265 Train Loss: 0.0001604, Val Loss: 0.0002429 +2025-03-19 14:56:02,265 Epoch 1535/2000 +2025-03-19 14:58:46,659 Current Learning Rate: 0.0007612493 +2025-03-19 14:58:46,659 Train Loss: 0.0001607, Val Loss: 0.0002446 +2025-03-19 14:58:46,659 Epoch 1536/2000 +2025-03-19 15:01:31,154 Current Learning Rate: 0.0007679134 +2025-03-19 15:01:31,154 Train Loss: 0.0001608, Val Loss: 0.0002429 +2025-03-19 15:01:31,155 Epoch 1537/2000 +2025-03-19 15:04:15,753 Current Learning Rate: 0.0007745114 +2025-03-19 15:04:15,753 Train Loss: 0.0001606, Val Loss: 0.0002427 +2025-03-19 15:04:15,754 Epoch 1538/2000 +2025-03-19 15:07:00,284 Current Learning Rate: 0.0007810417 +2025-03-19 15:07:00,285 Train Loss: 0.0001612, Val Loss: 0.0002416 +2025-03-19 15:07:00,285 Epoch 1539/2000 +2025-03-19 15:09:44,633 Current Learning Rate: 0.0007875026 +2025-03-19 15:09:44,633 Train Loss: 0.0001621, Val Loss: 0.0002421 +2025-03-19 15:09:44,633 Epoch 1540/2000 +2025-03-19 15:12:29,031 Current Learning Rate: 0.0007938926 +2025-03-19 15:12:29,031 Train Loss: 0.0001617, Val Loss: 0.0002433 +2025-03-19 15:12:29,032 Epoch 1541/2000 +2025-03-19 15:15:13,655 Current Learning Rate: 0.0008002101 +2025-03-19 15:15:13,656 Train Loss: 0.0001622, Val Loss: 0.0002492 +2025-03-19 15:15:13,656 Epoch 1542/2000 +2025-03-19 15:17:58,357 Current Learning Rate: 0.0008064535 +2025-03-19 15:17:58,358 Train Loss: 0.0001633, Val Loss: 0.0002476 +2025-03-19 15:17:58,358 Epoch 1543/2000 +2025-03-19 15:20:42,887 Current Learning Rate: 0.0008126213 +2025-03-19 15:20:42,888 Train Loss: 0.0001633, Val Loss: 0.0002493 +2025-03-19 15:20:42,888 Epoch 1544/2000 +2025-03-19 15:23:27,269 Current Learning Rate: 0.0008187120 +2025-03-19 15:23:27,270 Train Loss: 0.0001634, Val Loss: 0.0002501 +2025-03-19 15:23:27,270 Epoch 1545/2000 +2025-03-19 15:26:11,769 Current Learning Rate: 0.0008247240 +2025-03-19 15:26:11,769 Train Loss: 0.0001632, Val Loss: 0.0002454 +2025-03-19 15:26:11,769 Epoch 1546/2000 +2025-03-19 15:28:56,094 Current Learning Rate: 0.0008306559 +2025-03-19 15:28:56,094 Train Loss: 0.0001637, Val Loss: 0.0002445 +2025-03-19 15:28:56,094 Epoch 1547/2000 +2025-03-19 15:31:40,702 Current Learning Rate: 0.0008365063 +2025-03-19 15:31:40,702 Train Loss: 0.0001640, Val Loss: 0.0002463 +2025-03-19 15:31:40,702 Epoch 1548/2000 +2025-03-19 15:34:24,612 Current Learning Rate: 0.0008422736 +2025-03-19 15:34:24,613 Train Loss: 0.0001635, Val Loss: 0.0002471 +2025-03-19 15:34:24,613 Epoch 1549/2000 +2025-03-19 15:37:09,249 Current Learning Rate: 0.0008479564 +2025-03-19 15:37:09,250 Train Loss: 0.0001624, Val Loss: 0.0002451 +2025-03-19 15:37:09,250 Epoch 1550/2000 +2025-03-19 15:39:53,853 Current Learning Rate: 0.0008535534 +2025-03-19 15:39:53,853 Train Loss: 0.0001628, Val Loss: 0.0002457 +2025-03-19 15:39:53,854 Epoch 1551/2000 +2025-03-19 15:42:38,175 Current Learning Rate: 0.0008590631 +2025-03-19 15:42:38,175 Train Loss: 0.0001643, Val Loss: 0.0002461 +2025-03-19 15:42:38,175 Epoch 1552/2000 +2025-03-19 15:45:22,165 Current Learning Rate: 0.0008644843 +2025-03-19 15:45:22,165 Train Loss: 0.0001647, Val Loss: 0.0002449 +2025-03-19 15:45:22,165 Epoch 1553/2000 +2025-03-19 15:48:06,320 Current Learning Rate: 0.0008698155 +2025-03-19 15:48:06,321 Train Loss: 0.0001653, Val Loss: 0.0002445 +2025-03-19 15:48:06,321 Epoch 1554/2000 +2025-03-19 15:50:51,081 Current Learning Rate: 0.0008750555 +2025-03-19 15:50:51,081 Train Loss: 0.0001653, Val Loss: 0.0002465 +2025-03-19 15:50:51,082 Epoch 1555/2000 +2025-03-19 15:53:35,255 Current Learning Rate: 0.0008802030 +2025-03-19 15:53:35,255 Train Loss: 0.0001657, Val Loss: 0.0002497 +2025-03-19 15:53:35,255 Epoch 1556/2000 +2025-03-19 15:56:20,133 Current Learning Rate: 0.0008852566 +2025-03-19 15:56:20,134 Train Loss: 0.0001663, Val Loss: 0.0002499 +2025-03-19 15:56:20,134 Epoch 1557/2000 +2025-03-19 15:59:04,241 Current Learning Rate: 0.0008902152 +2025-03-19 15:59:04,242 Train Loss: 0.0001668, Val Loss: 0.0002492 +2025-03-19 15:59:04,242 Epoch 1558/2000 +2025-03-19 16:01:48,772 Current Learning Rate: 0.0008950775 +2025-03-19 16:01:48,773 Train Loss: 0.0001667, Val Loss: 0.0002505 +2025-03-19 16:01:48,773 Epoch 1559/2000 +2025-03-19 16:04:33,105 Current Learning Rate: 0.0008998423 +2025-03-19 16:04:33,105 Train Loss: 0.0001671, Val Loss: 0.0002494 +2025-03-19 16:04:33,106 Epoch 1560/2000 +2025-03-19 16:07:17,569 Current Learning Rate: 0.0009045085 +2025-03-19 16:07:17,569 Train Loss: 0.0001671, Val Loss: 0.0002479 +2025-03-19 16:07:17,569 Epoch 1561/2000 +2025-03-19 16:10:02,059 Current Learning Rate: 0.0009090749 +2025-03-19 16:10:02,059 Train Loss: 0.0001666, Val Loss: 0.0002453 +2025-03-19 16:10:02,059 Epoch 1562/2000 +2025-03-19 16:12:46,577 Current Learning Rate: 0.0009135403 +2025-03-19 16:12:46,578 Train Loss: 0.0001659, Val Loss: 0.0002455 +2025-03-19 16:12:46,578 Epoch 1563/2000 +2025-03-19 16:15:30,495 Current Learning Rate: 0.0009179037 +2025-03-19 16:15:30,496 Train Loss: 0.0001668, Val Loss: 0.0002510 +2025-03-19 16:15:30,496 Epoch 1564/2000 +2025-03-19 16:18:14,991 Current Learning Rate: 0.0009221640 +2025-03-19 16:18:14,992 Train Loss: 0.0001671, Val Loss: 0.0002510 +2025-03-19 16:18:14,992 Epoch 1565/2000 +2025-03-19 16:20:58,797 Current Learning Rate: 0.0009263201 +2025-03-19 16:20:58,797 Train Loss: 0.0001681, Val Loss: 0.0002542 +2025-03-19 16:20:58,798 Epoch 1566/2000 +2025-03-19 16:23:42,930 Current Learning Rate: 0.0009303710 +2025-03-19 16:23:42,931 Train Loss: 0.0001679, Val Loss: 0.0002539 +2025-03-19 16:23:42,931 Epoch 1567/2000 +2025-03-19 16:26:27,234 Current Learning Rate: 0.0009343158 +2025-03-19 16:26:27,235 Train Loss: 0.0001675, Val Loss: 0.0002528 +2025-03-19 16:26:27,235 Epoch 1568/2000 +2025-03-19 16:29:11,340 Current Learning Rate: 0.0009381533 +2025-03-19 16:29:11,341 Train Loss: 0.0001672, Val Loss: 0.0002500 +2025-03-19 16:29:11,341 Epoch 1569/2000 +2025-03-19 16:31:55,548 Current Learning Rate: 0.0009418828 +2025-03-19 16:31:55,548 Train Loss: 0.0001687, Val Loss: 0.0002477 +2025-03-19 16:31:55,549 Epoch 1570/2000 +2025-03-19 16:34:39,949 Current Learning Rate: 0.0009455033 +2025-03-19 16:34:39,949 Train Loss: 0.0001687, Val Loss: 0.0002491 +2025-03-19 16:34:39,949 Epoch 1571/2000 +2025-03-19 16:37:23,932 Current Learning Rate: 0.0009490138 +2025-03-19 16:37:23,932 Train Loss: 0.0001691, Val Loss: 0.0002475 +2025-03-19 16:37:23,932 Epoch 1572/2000 +2025-03-19 16:40:08,533 Current Learning Rate: 0.0009524135 +2025-03-19 16:40:08,533 Train Loss: 0.0001695, Val Loss: 0.0002491 +2025-03-19 16:40:08,534 Epoch 1573/2000 +2025-03-19 16:42:52,437 Current Learning Rate: 0.0009557016 +2025-03-19 16:42:52,437 Train Loss: 0.0001701, Val Loss: 0.0002507 +2025-03-19 16:42:52,438 Epoch 1574/2000 +2025-03-19 16:45:37,240 Current Learning Rate: 0.0009588773 +2025-03-19 16:45:37,241 Train Loss: 0.0001706, Val Loss: 0.0002490 +2025-03-19 16:45:37,241 Epoch 1575/2000 +2025-03-19 16:48:21,770 Current Learning Rate: 0.0009619398 +2025-03-19 16:48:21,770 Train Loss: 0.0001709, Val Loss: 0.0002488 +2025-03-19 16:48:21,770 Epoch 1576/2000 +2025-03-19 16:51:05,837 Current Learning Rate: 0.0009648882 +2025-03-19 16:51:05,838 Train Loss: 0.0001706, Val Loss: 0.0002498 +2025-03-19 16:51:05,838 Epoch 1577/2000 +2025-03-19 16:53:50,688 Current Learning Rate: 0.0009677220 +2025-03-19 16:53:50,688 Train Loss: 0.0001709, Val Loss: 0.0002519 +2025-03-19 16:53:50,688 Epoch 1578/2000 +2025-03-19 16:56:35,651 Current Learning Rate: 0.0009704404 +2025-03-19 16:56:35,651 Train Loss: 0.0001707, Val Loss: 0.0002511 +2025-03-19 16:56:35,651 Epoch 1579/2000 +2025-03-19 16:59:20,335 Current Learning Rate: 0.0009730427 +2025-03-19 16:59:20,335 Train Loss: 0.0001700, Val Loss: 0.0002481 +2025-03-19 16:59:20,336 Epoch 1580/2000 +2025-03-19 17:02:04,824 Current Learning Rate: 0.0009755283 +2025-03-19 17:02:04,825 Train Loss: 0.0001693, Val Loss: 0.0002498 +2025-03-19 17:02:04,825 Epoch 1581/2000 +2025-03-19 17:04:49,332 Current Learning Rate: 0.0009778965 +2025-03-19 17:04:49,333 Train Loss: 0.0001693, Val Loss: 0.0002488 +2025-03-19 17:04:49,333 Epoch 1582/2000 +2025-03-19 17:07:33,919 Current Learning Rate: 0.0009801468 +2025-03-19 17:07:33,920 Train Loss: 0.0001698, Val Loss: 0.0002479 +2025-03-19 17:07:33,920 Epoch 1583/2000 +2025-03-19 17:10:18,209 Current Learning Rate: 0.0009822787 +2025-03-19 17:10:18,210 Train Loss: 0.0001700, Val Loss: 0.0002458 +2025-03-19 17:10:18,210 Epoch 1584/2000 +2025-03-19 17:13:02,511 Current Learning Rate: 0.0009842916 +2025-03-19 17:13:02,512 Train Loss: 0.0001692, Val Loss: 0.0002465 +2025-03-19 17:13:02,512 Epoch 1585/2000 +2025-03-19 17:15:47,220 Current Learning Rate: 0.0009861850 +2025-03-19 17:15:47,221 Train Loss: 0.0001694, Val Loss: 0.0002471 +2025-03-19 17:15:47,221 Epoch 1586/2000 +2025-03-19 17:18:31,749 Current Learning Rate: 0.0009879584 +2025-03-19 17:18:31,750 Train Loss: 0.0001696, Val Loss: 0.0002482 +2025-03-19 17:18:31,750 Epoch 1587/2000 +2025-03-19 17:21:16,357 Current Learning Rate: 0.0009896114 +2025-03-19 17:21:16,357 Train Loss: 0.0001702, Val Loss: 0.0002479 +2025-03-19 17:21:16,358 Epoch 1588/2000 +2025-03-19 17:24:00,915 Current Learning Rate: 0.0009911436 +2025-03-19 17:24:00,915 Train Loss: 0.0001706, Val Loss: 0.0002506 +2025-03-19 17:24:00,916 Epoch 1589/2000 +2025-03-19 17:26:45,381 Current Learning Rate: 0.0009925547 +2025-03-19 17:26:45,382 Train Loss: 0.0001709, Val Loss: 0.0002509 +2025-03-19 17:26:45,382 Epoch 1590/2000 +2025-03-19 17:29:29,844 Current Learning Rate: 0.0009938442 +2025-03-19 17:29:29,844 Train Loss: 0.0001705, Val Loss: 0.0002503 +2025-03-19 17:29:29,844 Epoch 1591/2000 +2025-03-19 17:32:14,230 Current Learning Rate: 0.0009950118 +2025-03-19 17:32:14,230 Train Loss: 0.0001694, Val Loss: 0.0002513 +2025-03-19 17:32:14,230 Epoch 1592/2000 +2025-03-19 17:34:58,624 Current Learning Rate: 0.0009960574 +2025-03-19 17:34:58,625 Train Loss: 0.0001688, Val Loss: 0.0002580 +2025-03-19 17:34:58,625 Epoch 1593/2000 +2025-03-19 17:37:43,314 Current Learning Rate: 0.0009969805 +2025-03-19 17:37:43,314 Train Loss: 0.0001699, Val Loss: 0.0002538 +2025-03-19 17:37:43,314 Epoch 1594/2000 +2025-03-19 17:40:27,819 Current Learning Rate: 0.0009977810 +2025-03-19 17:40:27,820 Train Loss: 0.0001702, Val Loss: 0.0002539 +2025-03-19 17:40:27,820 Epoch 1595/2000 +2025-03-19 17:43:12,251 Current Learning Rate: 0.0009984587 +2025-03-19 17:43:12,252 Train Loss: 0.0001700, Val Loss: 0.0002527 +2025-03-19 17:43:12,252 Epoch 1596/2000 +2025-03-19 17:45:56,651 Current Learning Rate: 0.0009990134 +2025-03-19 17:45:56,651 Train Loss: 0.0001703, Val Loss: 0.0002526 +2025-03-19 17:45:56,652 Epoch 1597/2000 +2025-03-19 17:48:41,293 Current Learning Rate: 0.0009994449 +2025-03-19 17:48:41,293 Train Loss: 0.0001701, Val Loss: 0.0002531 +2025-03-19 17:48:41,293 Epoch 1598/2000 +2025-03-19 17:51:25,629 Current Learning Rate: 0.0009997533 +2025-03-19 17:51:25,629 Train Loss: 0.0001700, Val Loss: 0.0002511 +2025-03-19 17:51:25,629 Epoch 1599/2000 +2025-03-19 17:54:09,801 Current Learning Rate: 0.0009999383 +2025-03-19 17:54:09,801 Train Loss: 0.0001705, Val Loss: 0.0002505 +2025-03-19 17:54:09,801 Epoch 1600/2000 +2025-03-19 17:56:54,727 Current Learning Rate: 0.0010000000 +2025-03-19 17:56:54,727 Train Loss: 0.0001708, Val Loss: 0.0002507 +2025-03-19 17:56:54,727 Epoch 1601/2000 +2025-03-19 17:59:39,222 Current Learning Rate: 0.0009999383 +2025-03-19 17:59:39,223 Train Loss: 0.0001699, Val Loss: 0.0002493 +2025-03-19 17:59:39,223 Epoch 1602/2000 +2025-03-19 18:02:24,262 Current Learning Rate: 0.0009997533 +2025-03-19 18:02:24,262 Train Loss: 0.0001687, Val Loss: 0.0002488 +2025-03-19 18:02:24,262 Epoch 1603/2000 +2025-03-19 18:05:08,967 Current Learning Rate: 0.0009994449 +2025-03-19 18:05:08,968 Train Loss: 0.0001690, Val Loss: 0.0002497 +2025-03-19 18:05:08,968 Epoch 1604/2000 +2025-03-19 18:07:53,284 Current Learning Rate: 0.0009990134 +2025-03-19 18:07:53,285 Train Loss: 0.0001697, Val Loss: 0.0002507 +2025-03-19 18:07:53,285 Epoch 1605/2000 +2025-03-19 18:10:38,111 Current Learning Rate: 0.0009984587 +2025-03-19 18:10:38,111 Train Loss: 0.0001706, Val Loss: 0.0002489 +2025-03-19 18:10:38,111 Epoch 1606/2000 +2025-03-19 18:13:23,067 Current Learning Rate: 0.0009977810 +2025-03-19 18:13:23,067 Train Loss: 0.0001702, Val Loss: 0.0002509 +2025-03-19 18:13:23,067 Epoch 1607/2000 +2025-03-19 18:16:07,557 Current Learning Rate: 0.0009969805 +2025-03-19 18:16:07,557 Train Loss: 0.0001695, Val Loss: 0.0002489 +2025-03-19 18:16:07,558 Epoch 1608/2000 +2025-03-19 18:18:52,117 Current Learning Rate: 0.0009960574 +2025-03-19 18:18:52,118 Train Loss: 0.0001697, Val Loss: 0.0002483 +2025-03-19 18:18:52,118 Epoch 1609/2000 +2025-03-19 18:21:36,873 Current Learning Rate: 0.0009950118 +2025-03-19 18:21:36,874 Train Loss: 0.0001698, Val Loss: 0.0002519 +2025-03-19 18:21:36,874 Epoch 1610/2000 +2025-03-19 18:24:21,364 Current Learning Rate: 0.0009938442 +2025-03-19 18:24:21,365 Train Loss: 0.0001690, Val Loss: 0.0002551 +2025-03-19 18:24:21,365 Epoch 1611/2000 +2025-03-19 18:27:05,980 Current Learning Rate: 0.0009925547 +2025-03-19 18:27:05,981 Train Loss: 0.0001692, Val Loss: 0.0002565 +2025-03-19 18:27:05,981 Epoch 1612/2000 +2025-03-19 18:29:50,394 Current Learning Rate: 0.0009911436 +2025-03-19 18:29:50,395 Train Loss: 0.0001685, Val Loss: 0.0002588 +2025-03-19 18:29:50,395 Epoch 1613/2000 +2025-03-19 18:32:34,792 Current Learning Rate: 0.0009896114 +2025-03-19 18:32:34,792 Train Loss: 0.0001680, Val Loss: 0.0002579 +2025-03-19 18:32:34,792 Epoch 1614/2000 +2025-03-19 18:35:19,339 Current Learning Rate: 0.0009879584 +2025-03-19 18:35:19,339 Train Loss: 0.0001681, Val Loss: 0.0002530 +2025-03-19 18:35:19,340 Epoch 1615/2000 +2025-03-19 18:38:03,750 Current Learning Rate: 0.0009861850 +2025-03-19 18:38:03,750 Train Loss: 0.0001692, Val Loss: 0.0002545 +2025-03-19 18:38:03,751 Epoch 1616/2000 +2025-03-19 18:40:48,269 Current Learning Rate: 0.0009842916 +2025-03-19 18:40:48,270 Train Loss: 0.0001707, Val Loss: 0.0002524 +2025-03-19 18:40:48,270 Epoch 1617/2000 +2025-03-19 18:43:32,806 Current Learning Rate: 0.0009822787 +2025-03-19 18:43:32,807 Train Loss: 0.0001706, Val Loss: 0.0002532 +2025-03-19 18:43:32,807 Epoch 1618/2000 +2025-03-19 18:46:17,284 Current Learning Rate: 0.0009801468 +2025-03-19 18:46:17,285 Train Loss: 0.0001704, Val Loss: 0.0002536 +2025-03-19 18:46:17,285 Epoch 1619/2000 +2025-03-19 18:49:01,761 Current Learning Rate: 0.0009778965 +2025-03-19 18:49:01,762 Train Loss: 0.0001702, Val Loss: 0.0002479 +2025-03-19 18:49:01,763 Epoch 1620/2000 +2025-03-19 18:51:46,248 Current Learning Rate: 0.0009755283 +2025-03-19 18:51:46,248 Train Loss: 0.0001691, Val Loss: 0.0002466 +2025-03-19 18:51:46,248 Epoch 1621/2000 +2025-03-19 18:54:30,836 Current Learning Rate: 0.0009730427 +2025-03-19 18:54:30,836 Train Loss: 0.0001676, Val Loss: 0.0002446 +2025-03-19 18:54:30,836 Epoch 1622/2000 +2025-03-19 18:57:14,989 Current Learning Rate: 0.0009704404 +2025-03-19 18:57:14,989 Train Loss: 0.0001671, Val Loss: 0.0002452 +2025-03-19 18:57:14,989 Epoch 1623/2000 +2025-03-19 18:59:59,668 Current Learning Rate: 0.0009677220 +2025-03-19 18:59:59,668 Train Loss: 0.0001676, Val Loss: 0.0002485 +2025-03-19 18:59:59,668 Epoch 1624/2000 +2025-03-19 19:02:44,066 Current Learning Rate: 0.0009648882 +2025-03-19 19:02:44,066 Train Loss: 0.0001681, Val Loss: 0.0002532 +2025-03-19 19:02:44,066 Epoch 1625/2000 +2025-03-19 19:05:28,676 Current Learning Rate: 0.0009619398 +2025-03-19 19:05:28,676 Train Loss: 0.0001681, Val Loss: 0.0002615 +2025-03-19 19:05:28,676 Epoch 1626/2000 +2025-03-19 19:08:13,073 Current Learning Rate: 0.0009588773 +2025-03-19 19:08:13,073 Train Loss: 0.0001681, Val Loss: 0.0002543 +2025-03-19 19:08:13,073 Epoch 1627/2000 +2025-03-19 19:10:57,373 Current Learning Rate: 0.0009557016 +2025-03-19 19:10:57,373 Train Loss: 0.0001678, Val Loss: 0.0002479 +2025-03-19 19:10:57,373 Epoch 1628/2000 +2025-03-19 19:13:41,754 Current Learning Rate: 0.0009524135 +2025-03-19 19:13:41,754 Train Loss: 0.0001673, Val Loss: 0.0002444 +2025-03-19 19:13:41,754 Epoch 1629/2000 +2025-03-19 19:16:26,094 Current Learning Rate: 0.0009490138 +2025-03-19 19:16:26,094 Train Loss: 0.0001667, Val Loss: 0.0002462 +2025-03-19 19:16:26,094 Epoch 1630/2000 +2025-03-19 19:19:10,717 Current Learning Rate: 0.0009455033 +2025-03-19 19:19:10,717 Train Loss: 0.0001661, Val Loss: 0.0002456 +2025-03-19 19:19:10,718 Epoch 1631/2000 +2025-03-19 19:21:55,120 Current Learning Rate: 0.0009418828 +2025-03-19 19:21:55,121 Train Loss: 0.0001671, Val Loss: 0.0002456 +2025-03-19 19:21:55,121 Epoch 1632/2000 +2025-03-19 19:24:39,634 Current Learning Rate: 0.0009381533 +2025-03-19 19:24:39,635 Train Loss: 0.0001673, Val Loss: 0.0002458 +2025-03-19 19:24:39,635 Epoch 1633/2000 +2025-03-19 19:27:24,016 Current Learning Rate: 0.0009343158 +2025-03-19 19:27:24,016 Train Loss: 0.0001671, Val Loss: 0.0002486 +2025-03-19 19:27:24,017 Epoch 1634/2000 +2025-03-19 19:30:08,391 Current Learning Rate: 0.0009303710 +2025-03-19 19:30:08,391 Train Loss: 0.0001672, Val Loss: 0.0002477 +2025-03-19 19:30:08,391 Epoch 1635/2000 +2025-03-19 19:32:52,995 Current Learning Rate: 0.0009263201 +2025-03-19 19:32:52,995 Train Loss: 0.0001659, Val Loss: 0.0002476 +2025-03-19 19:32:52,996 Epoch 1636/2000 +2025-03-19 19:35:37,467 Current Learning Rate: 0.0009221640 +2025-03-19 19:35:37,467 Train Loss: 0.0001635, Val Loss: 0.0002470 +2025-03-19 19:35:37,467 Epoch 1637/2000 +2025-03-19 19:38:21,987 Current Learning Rate: 0.0009179037 +2025-03-19 19:38:21,988 Train Loss: 0.0001627, Val Loss: 0.0002516 +2025-03-19 19:38:21,988 Epoch 1638/2000 +2025-03-19 19:41:06,227 Current Learning Rate: 0.0009135403 +2025-03-19 19:41:06,227 Train Loss: 0.0001622, Val Loss: 0.0002464 +2025-03-19 19:41:06,228 Epoch 1639/2000 +2025-03-19 19:43:50,548 Current Learning Rate: 0.0009090749 +2025-03-19 19:43:50,548 Train Loss: 0.0001623, Val Loss: 0.0002459 +2025-03-19 19:43:50,549 Epoch 1640/2000 +2025-03-19 19:46:34,903 Current Learning Rate: 0.0009045085 +2025-03-19 19:46:34,904 Train Loss: 0.0001626, Val Loss: 0.0002443 +2025-03-19 19:46:34,904 Epoch 1641/2000 +2025-03-19 19:49:19,383 Current Learning Rate: 0.0008998423 +2025-03-19 19:49:19,383 Train Loss: 0.0001615, Val Loss: 0.0002440 +2025-03-19 19:49:19,383 Epoch 1642/2000 +2025-03-19 19:52:03,759 Current Learning Rate: 0.0008950775 +2025-03-19 19:52:03,760 Train Loss: 0.0001612, Val Loss: 0.0002467 +2025-03-19 19:52:03,760 Epoch 1643/2000 +2025-03-19 19:54:48,274 Current Learning Rate: 0.0008902152 +2025-03-19 19:54:48,275 Train Loss: 0.0001620, Val Loss: 0.0002510 +2025-03-19 19:54:48,275 Epoch 1644/2000 +2025-03-19 19:57:32,759 Current Learning Rate: 0.0008852566 +2025-03-19 19:57:32,760 Train Loss: 0.0001625, Val Loss: 0.0002481 +2025-03-19 19:57:32,760 Epoch 1645/2000 +2025-03-19 20:00:17,164 Current Learning Rate: 0.0008802030 +2025-03-19 20:00:17,165 Train Loss: 0.0001626, Val Loss: 0.0002522 +2025-03-19 20:00:17,165 Epoch 1646/2000 +2025-03-19 20:03:01,878 Current Learning Rate: 0.0008750555 +2025-03-19 20:03:01,878 Train Loss: 0.0001629, Val Loss: 0.0002551 +2025-03-19 20:03:01,878 Epoch 1647/2000 +2025-03-19 20:05:45,964 Current Learning Rate: 0.0008698155 +2025-03-19 20:05:45,965 Train Loss: 0.0001626, Val Loss: 0.0002524 +2025-03-19 20:05:45,965 Epoch 1648/2000 +2025-03-19 20:08:30,240 Current Learning Rate: 0.0008644843 +2025-03-19 20:08:30,241 Train Loss: 0.0001620, Val Loss: 0.0002519 +2025-03-19 20:08:30,241 Epoch 1649/2000 +2025-03-19 20:11:14,786 Current Learning Rate: 0.0008590631 +2025-03-19 20:11:14,787 Train Loss: 0.0001621, Val Loss: 0.0002515 +2025-03-19 20:11:14,787 Epoch 1650/2000 +2025-03-19 20:13:59,078 Current Learning Rate: 0.0008535534 +2025-03-19 20:13:59,079 Train Loss: 0.0001628, Val Loss: 0.0002476 +2025-03-19 20:13:59,079 Epoch 1651/2000 +2025-03-19 20:16:43,362 Current Learning Rate: 0.0008479564 +2025-03-19 20:16:43,363 Train Loss: 0.0001621, Val Loss: 0.0002458 +2025-03-19 20:16:43,363 Epoch 1652/2000 +2025-03-19 20:19:27,316 Current Learning Rate: 0.0008422736 +2025-03-19 20:19:27,316 Train Loss: 0.0001611, Val Loss: 0.0002449 +2025-03-19 20:19:27,317 Epoch 1653/2000 +2025-03-19 20:22:11,627 Current Learning Rate: 0.0008365063 +2025-03-19 20:22:11,628 Train Loss: 0.0001603, Val Loss: 0.0002449 +2025-03-19 20:22:11,628 Epoch 1654/2000 +2025-03-19 20:24:55,836 Current Learning Rate: 0.0008306559 +2025-03-19 20:24:55,837 Train Loss: 0.0001598, Val Loss: 0.0002436 +2025-03-19 20:24:55,837 Epoch 1655/2000 +2025-03-19 20:27:40,014 Current Learning Rate: 0.0008247240 +2025-03-19 20:27:40,014 Train Loss: 0.0001597, Val Loss: 0.0002434 +2025-03-19 20:27:40,014 Epoch 1656/2000 +2025-03-19 20:30:24,202 Current Learning Rate: 0.0008187120 +2025-03-19 20:30:24,202 Train Loss: 0.0001591, Val Loss: 0.0002448 +2025-03-19 20:30:24,203 Epoch 1657/2000 +2025-03-19 20:33:08,440 Current Learning Rate: 0.0008126213 +2025-03-19 20:33:08,440 Train Loss: 0.0001590, Val Loss: 0.0002414 +2025-03-19 20:33:08,440 Epoch 1658/2000 +2025-03-19 20:35:52,743 Current Learning Rate: 0.0008064535 +2025-03-19 20:35:52,743 Train Loss: 0.0001595, Val Loss: 0.0002420 +2025-03-19 20:35:52,743 Epoch 1659/2000 +2025-03-19 20:38:37,234 Current Learning Rate: 0.0008002101 +2025-03-19 20:38:37,235 Train Loss: 0.0001588, Val Loss: 0.0002408 +2025-03-19 20:38:37,235 Epoch 1660/2000 +2025-03-19 20:41:21,630 Current Learning Rate: 0.0007938926 +2025-03-19 20:41:21,631 Train Loss: 0.0001590, Val Loss: 0.0002417 +2025-03-19 20:41:21,631 Epoch 1661/2000 +2025-03-19 20:44:05,673 Current Learning Rate: 0.0007875026 +2025-03-19 20:44:05,674 Train Loss: 0.0001587, Val Loss: 0.0002435 +2025-03-19 20:44:05,674 Epoch 1662/2000 +2025-03-19 20:46:49,838 Current Learning Rate: 0.0007810417 +2025-03-19 20:46:49,838 Train Loss: 0.0001587, Val Loss: 0.0002399 +2025-03-19 20:46:49,838 Epoch 1663/2000 +2025-03-19 20:49:33,840 Current Learning Rate: 0.0007745114 +2025-03-19 20:49:33,840 Train Loss: 0.0001576, Val Loss: 0.0002389 +2025-03-19 20:49:33,841 Epoch 1664/2000 +2025-03-19 20:52:18,242 Current Learning Rate: 0.0007679134 +2025-03-19 20:52:18,242 Train Loss: 0.0001567, Val Loss: 0.0002391 +2025-03-19 20:52:18,242 Epoch 1665/2000 +2025-03-19 20:55:02,759 Current Learning Rate: 0.0007612493 +2025-03-19 20:55:02,759 Train Loss: 0.0001551, Val Loss: 0.0002394 +2025-03-19 20:55:02,760 Epoch 1666/2000 +2025-03-19 20:57:47,118 Current Learning Rate: 0.0007545207 +2025-03-19 20:57:47,119 Train Loss: 0.0001550, Val Loss: 0.0002388 +2025-03-19 20:57:47,119 Epoch 1667/2000 +2025-03-19 21:00:31,340 Current Learning Rate: 0.0007477293 +2025-03-19 21:00:31,340 Train Loss: 0.0001552, Val Loss: 0.0002381 +2025-03-19 21:00:31,340 Epoch 1668/2000 +2025-03-19 21:03:15,548 Current Learning Rate: 0.0007408768 +2025-03-19 21:03:15,548 Train Loss: 0.0001555, Val Loss: 0.0002375 +2025-03-19 21:03:15,549 Epoch 1669/2000 +2025-03-19 21:06:00,702 Current Learning Rate: 0.0007339649 +2025-03-19 21:06:00,703 Train Loss: 0.0001559, Val Loss: 0.0002382 +2025-03-19 21:06:00,703 Epoch 1670/2000 +2025-03-19 21:08:45,056 Current Learning Rate: 0.0007269952 +2025-03-19 21:08:45,056 Train Loss: 0.0001560, Val Loss: 0.0002396 +2025-03-19 21:08:45,056 Epoch 1671/2000 +2025-03-19 21:11:29,287 Current Learning Rate: 0.0007199696 +2025-03-19 21:11:29,287 Train Loss: 0.0001555, Val Loss: 0.0002379 +2025-03-19 21:11:29,287 Epoch 1672/2000 +2025-03-19 21:14:13,647 Current Learning Rate: 0.0007128896 +2025-03-19 21:14:13,648 Train Loss: 0.0001545, Val Loss: 0.0002379 +2025-03-19 21:14:13,648 Epoch 1673/2000 +2025-03-19 21:16:58,069 Current Learning Rate: 0.0007057572 +2025-03-19 21:16:58,070 Train Loss: 0.0001543, Val Loss: 0.0002382 +2025-03-19 21:16:58,070 Epoch 1674/2000 +2025-03-19 21:19:42,625 Current Learning Rate: 0.0006985739 +2025-03-19 21:19:42,625 Train Loss: 0.0001553, Val Loss: 0.0002387 +2025-03-19 21:19:42,625 Epoch 1675/2000 +2025-03-19 21:22:26,965 Current Learning Rate: 0.0006913417 +2025-03-19 21:22:26,965 Train Loss: 0.0001565, Val Loss: 0.0002413 +2025-03-19 21:22:26,965 Epoch 1676/2000 +2025-03-19 21:25:11,312 Current Learning Rate: 0.0006840623 +2025-03-19 21:25:11,312 Train Loss: 0.0001572, Val Loss: 0.0002415 +2025-03-19 21:25:11,312 Epoch 1677/2000 +2025-03-19 21:27:55,805 Current Learning Rate: 0.0006767374 +2025-03-19 21:27:55,805 Train Loss: 0.0001558, Val Loss: 0.0002392 +2025-03-19 21:27:55,805 Epoch 1678/2000 +2025-03-19 21:30:40,190 Current Learning Rate: 0.0006693690 +2025-03-19 21:30:40,190 Train Loss: 0.0001544, Val Loss: 0.0002353 +2025-03-19 21:30:40,191 Epoch 1679/2000 +2025-03-19 21:33:24,664 Current Learning Rate: 0.0006619587 +2025-03-19 21:33:24,664 Train Loss: 0.0001515, Val Loss: 0.0002352 +2025-03-19 21:33:24,664 Epoch 1680/2000 +2025-03-19 21:36:09,033 Current Learning Rate: 0.0006545085 +2025-03-19 21:36:09,033 Train Loss: 0.0001503, Val Loss: 0.0002360 +2025-03-19 21:36:09,033 Epoch 1681/2000 +2025-03-19 21:38:53,648 Current Learning Rate: 0.0006470202 +2025-03-19 21:38:53,649 Train Loss: 0.0001504, Val Loss: 0.0002325 +2025-03-19 21:38:53,649 Epoch 1682/2000 +2025-03-19 21:41:37,870 Current Learning Rate: 0.0006394956 +2025-03-19 21:41:37,871 Train Loss: 0.0001501, Val Loss: 0.0002328 +2025-03-19 21:41:37,871 Epoch 1683/2000 +2025-03-19 21:44:22,639 Current Learning Rate: 0.0006319365 +2025-03-19 21:44:22,640 Train Loss: 0.0001506, Val Loss: 0.0002337 +2025-03-19 21:44:22,640 Epoch 1684/2000 +2025-03-19 21:47:07,101 Current Learning Rate: 0.0006243449 +2025-03-19 21:47:07,101 Train Loss: 0.0001500, Val Loss: 0.0002314 +2025-03-19 21:47:07,101 Epoch 1685/2000 +2025-03-19 21:49:51,424 Current Learning Rate: 0.0006167227 +2025-03-19 21:49:51,424 Train Loss: 0.0001499, Val Loss: 0.0002310 +2025-03-19 21:49:51,424 Epoch 1686/2000 +2025-03-19 21:52:36,074 Current Learning Rate: 0.0006090716 +2025-03-19 21:52:36,074 Train Loss: 0.0001502, Val Loss: 0.0002293 +2025-03-19 21:52:36,075 Epoch 1687/2000 +2025-03-19 21:55:20,467 Current Learning Rate: 0.0006013936 +2025-03-19 21:55:20,467 Train Loss: 0.0001507, Val Loss: 0.0002297 +2025-03-19 21:55:20,467 Epoch 1688/2000 +2025-03-19 21:58:04,582 Current Learning Rate: 0.0005936907 +2025-03-19 21:58:04,583 Train Loss: 0.0001509, Val Loss: 0.0002291 +2025-03-19 21:58:04,583 Epoch 1689/2000 +2025-03-19 22:00:48,891 Current Learning Rate: 0.0005859646 +2025-03-19 22:00:48,892 Train Loss: 0.0001512, Val Loss: 0.0002311 +2025-03-19 22:00:48,892 Epoch 1690/2000 +2025-03-19 22:03:33,633 Current Learning Rate: 0.0005782172 +2025-03-19 22:03:33,633 Train Loss: 0.0001523, Val Loss: 0.0002324 +2025-03-19 22:03:33,634 Epoch 1691/2000 +2025-03-19 22:06:17,806 Current Learning Rate: 0.0005704506 +2025-03-19 22:06:17,807 Train Loss: 0.0001509, Val Loss: 0.0002320 +2025-03-19 22:06:17,807 Epoch 1692/2000 +2025-03-19 22:09:02,171 Current Learning Rate: 0.0005626666 +2025-03-19 22:09:02,171 Train Loss: 0.0001502, Val Loss: 0.0002324 +2025-03-19 22:09:02,171 Epoch 1693/2000 +2025-03-19 22:11:46,485 Current Learning Rate: 0.0005548672 +2025-03-19 22:11:46,485 Train Loss: 0.0001501, Val Loss: 0.0002321 +2025-03-19 22:11:46,485 Epoch 1694/2000 +2025-03-19 22:14:30,918 Current Learning Rate: 0.0005470542 +2025-03-19 22:14:30,918 Train Loss: 0.0001493, Val Loss: 0.0002329 +2025-03-19 22:14:30,918 Epoch 1695/2000 +2025-03-19 22:17:15,067 Current Learning Rate: 0.0005392295 +2025-03-19 22:17:15,068 Train Loss: 0.0001480, Val Loss: 0.0002300 +2025-03-19 22:17:15,068 Epoch 1696/2000 +2025-03-19 22:19:59,303 Current Learning Rate: 0.0005313953 +2025-03-19 22:19:59,303 Train Loss: 0.0001477, Val Loss: 0.0002296 +2025-03-19 22:19:59,303 Epoch 1697/2000 +2025-03-19 22:22:43,671 Current Learning Rate: 0.0005235532 +2025-03-19 22:22:43,671 Train Loss: 0.0001482, Val Loss: 0.0002303 +2025-03-19 22:22:43,671 Epoch 1698/2000 +2025-03-19 22:25:27,617 Current Learning Rate: 0.0005157054 +2025-03-19 22:25:27,618 Train Loss: 0.0001476, Val Loss: 0.0002312 +2025-03-19 22:25:27,618 Epoch 1699/2000 +2025-03-19 22:28:12,047 Current Learning Rate: 0.0005078537 +2025-03-19 22:28:12,047 Train Loss: 0.0001469, Val Loss: 0.0002326 +2025-03-19 22:28:12,048 Epoch 1700/2000 +2025-03-19 22:30:56,944 Current Learning Rate: 0.0005000000 +2025-03-19 22:30:56,944 Train Loss: 0.0001459, Val Loss: 0.0002302 +2025-03-19 22:30:56,944 Epoch 1701/2000 +2025-03-19 22:33:40,974 Current Learning Rate: 0.0004921463 +2025-03-19 22:33:40,974 Train Loss: 0.0001451, Val Loss: 0.0002293 +2025-03-19 22:33:40,974 Epoch 1702/2000 +2025-03-19 22:36:25,557 Current Learning Rate: 0.0004842946 +2025-03-19 22:36:25,557 Train Loss: 0.0001447, Val Loss: 0.0002317 +2025-03-19 22:36:25,557 Epoch 1703/2000 +2025-03-19 22:39:09,838 Current Learning Rate: 0.0004764468 +2025-03-19 22:39:09,838 Train Loss: 0.0001449, Val Loss: 0.0002364 +2025-03-19 22:39:09,838 Epoch 1704/2000 +2025-03-19 22:41:54,387 Current Learning Rate: 0.0004686047 +2025-03-19 22:41:54,387 Train Loss: 0.0001446, Val Loss: 0.0002322 +2025-03-19 22:41:54,388 Epoch 1705/2000 +2025-03-19 22:44:38,791 Current Learning Rate: 0.0004607705 +2025-03-19 22:44:38,791 Train Loss: 0.0001449, Val Loss: 0.0002263 +2025-03-19 22:44:38,791 Epoch 1706/2000 +2025-03-19 22:47:23,228 Current Learning Rate: 0.0004529458 +2025-03-19 22:47:23,228 Train Loss: 0.0001453, Val Loss: 0.0002251 +2025-03-19 22:47:23,228 Epoch 1707/2000 +2025-03-19 22:50:07,855 Current Learning Rate: 0.0004451328 +2025-03-19 22:50:07,855 Train Loss: 0.0001462, Val Loss: 0.0002267 +2025-03-19 22:50:07,856 Epoch 1708/2000 +2025-03-19 22:52:52,198 Current Learning Rate: 0.0004373334 +2025-03-19 22:52:52,198 Train Loss: 0.0001462, Val Loss: 0.0002261 +2025-03-19 22:52:52,199 Epoch 1709/2000 +2025-03-19 22:55:36,701 Current Learning Rate: 0.0004295494 +2025-03-19 22:55:36,702 Train Loss: 0.0001457, Val Loss: 0.0002266 +2025-03-19 22:55:36,702 Epoch 1710/2000 +2025-03-19 22:58:21,470 Current Learning Rate: 0.0004217828 +2025-03-19 22:58:21,471 Train Loss: 0.0001451, Val Loss: 0.0002266 +2025-03-19 22:58:21,471 Epoch 1711/2000 +2025-03-19 23:01:06,178 Current Learning Rate: 0.0004140354 +2025-03-19 23:01:06,178 Train Loss: 0.0001441, Val Loss: 0.0002247 +2025-03-19 23:01:06,179 Epoch 1712/2000 +2025-03-19 23:03:50,376 Current Learning Rate: 0.0004063093 +2025-03-19 23:03:50,376 Train Loss: 0.0001432, Val Loss: 0.0002251 +2025-03-19 23:03:50,376 Epoch 1713/2000 +2025-03-19 23:06:34,446 Current Learning Rate: 0.0003986064 +2025-03-19 23:06:34,446 Train Loss: 0.0001424, Val Loss: 0.0002254 +2025-03-19 23:06:34,447 Epoch 1714/2000 +2025-03-19 23:09:18,777 Current Learning Rate: 0.0003909284 +2025-03-19 23:09:18,778 Train Loss: 0.0001418, Val Loss: 0.0002243 +2025-03-19 23:09:18,778 Epoch 1715/2000 +2025-03-19 23:12:02,674 Current Learning Rate: 0.0003832773 +2025-03-19 23:12:02,675 Train Loss: 0.0001411, Val Loss: 0.0002228 +2025-03-19 23:12:02,675 Epoch 1716/2000 +2025-03-19 23:14:46,968 Current Learning Rate: 0.0003756551 +2025-03-19 23:14:46,969 Train Loss: 0.0001412, Val Loss: 0.0002241 +2025-03-19 23:14:46,969 Epoch 1717/2000 +2025-03-19 23:17:30,997 Current Learning Rate: 0.0003680635 +2025-03-19 23:17:30,997 Train Loss: 0.0001415, Val Loss: 0.0002235 +2025-03-19 23:17:30,997 Epoch 1718/2000 +2025-03-19 23:20:15,055 Current Learning Rate: 0.0003605044 +2025-03-19 23:20:15,055 Train Loss: 0.0001412, Val Loss: 0.0002239 +2025-03-19 23:20:15,056 Epoch 1719/2000 +2025-03-19 23:22:59,370 Current Learning Rate: 0.0003529798 +2025-03-19 23:22:59,371 Train Loss: 0.0001410, Val Loss: 0.0002267 +2025-03-19 23:22:59,371 Epoch 1720/2000 +2025-03-19 23:25:43,615 Current Learning Rate: 0.0003454915 +2025-03-19 23:25:43,615 Train Loss: 0.0001409, Val Loss: 0.0002241 +2025-03-19 23:25:43,615 Epoch 1721/2000 +2025-03-19 23:28:28,408 Current Learning Rate: 0.0003380413 +2025-03-19 23:28:28,408 Train Loss: 0.0001406, Val Loss: 0.0002214 +2025-03-19 23:28:28,409 Epoch 1722/2000 +2025-03-19 23:31:12,872 Current Learning Rate: 0.0003306310 +2025-03-19 23:31:12,873 Train Loss: 0.0001401, Val Loss: 0.0002198 +2025-03-19 23:31:12,873 Epoch 1723/2000 +2025-03-19 23:33:56,985 Current Learning Rate: 0.0003232626 +2025-03-19 23:33:56,985 Train Loss: 0.0001398, Val Loss: 0.0002190 +2025-03-19 23:33:56,986 Epoch 1724/2000 +2025-03-19 23:36:41,317 Current Learning Rate: 0.0003159377 +2025-03-19 23:36:41,318 Train Loss: 0.0001392, Val Loss: 0.0002188 +2025-03-19 23:36:41,318 Epoch 1725/2000 +2025-03-19 23:39:25,685 Current Learning Rate: 0.0003086583 +2025-03-19 23:39:25,686 Train Loss: 0.0001389, Val Loss: 0.0002188 +2025-03-19 23:39:25,686 Epoch 1726/2000 +2025-03-19 23:42:09,957 Current Learning Rate: 0.0003014261 +2025-03-19 23:42:09,957 Train Loss: 0.0001382, Val Loss: 0.0002188 +2025-03-19 23:42:09,957 Epoch 1727/2000 +2025-03-19 23:44:54,510 Current Learning Rate: 0.0002942428 +2025-03-19 23:44:54,510 Train Loss: 0.0001376, Val Loss: 0.0002192 +2025-03-19 23:44:54,510 Epoch 1728/2000 +2025-03-19 23:47:38,952 Current Learning Rate: 0.0002871104 +2025-03-19 23:47:38,952 Train Loss: 0.0001376, Val Loss: 0.0002195 +2025-03-19 23:47:38,953 Epoch 1729/2000 +2025-03-19 23:50:23,633 Current Learning Rate: 0.0002800304 +2025-03-19 23:50:23,634 Train Loss: 0.0001376, Val Loss: 0.0002195 +2025-03-19 23:50:23,634 Epoch 1730/2000 +2025-03-19 23:53:08,732 Current Learning Rate: 0.0002730048 +2025-03-19 23:53:08,732 Train Loss: 0.0001375, Val Loss: 0.0002191 +2025-03-19 23:53:08,732 Epoch 1731/2000 +2025-03-19 23:55:53,367 Current Learning Rate: 0.0002660351 +2025-03-19 23:55:53,368 Train Loss: 0.0001370, Val Loss: 0.0002197 +2025-03-19 23:55:53,368 Epoch 1732/2000 +2025-03-19 23:58:37,875 Current Learning Rate: 0.0002591232 +2025-03-19 23:58:37,875 Train Loss: 0.0001364, Val Loss: 0.0002190 +2025-03-19 23:58:37,875 Epoch 1733/2000 +2025-03-20 00:01:22,257 Current Learning Rate: 0.0002522707 +2025-03-20 00:01:22,257 Train Loss: 0.0001361, Val Loss: 0.0002175 +2025-03-20 00:01:22,257 Epoch 1734/2000 +2025-03-20 00:04:06,738 Current Learning Rate: 0.0002454793 +2025-03-20 00:04:06,739 Train Loss: 0.0001354, Val Loss: 0.0002169 +2025-03-20 00:04:06,739 Epoch 1735/2000 +2025-03-20 00:06:51,028 Current Learning Rate: 0.0002387507 +2025-03-20 00:06:51,029 Train Loss: 0.0001351, Val Loss: 0.0002170 +2025-03-20 00:06:51,029 Epoch 1736/2000 +2025-03-20 00:09:35,713 Current Learning Rate: 0.0002320866 +2025-03-20 00:09:35,713 Train Loss: 0.0001350, Val Loss: 0.0002177 +2025-03-20 00:09:35,713 Epoch 1737/2000 +2025-03-20 00:12:20,041 Current Learning Rate: 0.0002254886 +2025-03-20 00:12:20,041 Train Loss: 0.0001346, Val Loss: 0.0002182 +2025-03-20 00:12:20,042 Epoch 1738/2000 +2025-03-20 00:15:04,736 Current Learning Rate: 0.0002189583 +2025-03-20 00:15:04,737 Train Loss: 0.0001346, Val Loss: 0.0002176 +2025-03-20 00:15:04,737 Epoch 1739/2000 +2025-03-20 00:17:49,059 Current Learning Rate: 0.0002124974 +2025-03-20 00:17:49,059 Train Loss: 0.0001345, Val Loss: 0.0002168 +2025-03-20 00:17:49,059 Epoch 1740/2000 +2025-03-20 00:20:33,352 Current Learning Rate: 0.0002061074 +2025-03-20 00:20:33,353 Train Loss: 0.0001343, Val Loss: 0.0002159 +2025-03-20 00:20:33,353 Epoch 1741/2000 +2025-03-20 00:23:17,715 Current Learning Rate: 0.0001997899 +2025-03-20 00:23:17,716 Train Loss: 0.0001340, Val Loss: 0.0002158 +2025-03-20 00:23:17,716 Epoch 1742/2000 +2025-03-20 00:26:02,065 Current Learning Rate: 0.0001935465 +2025-03-20 00:26:02,065 Train Loss: 0.0001336, Val Loss: 0.0002161 +2025-03-20 00:26:02,065 Epoch 1743/2000 +2025-03-20 00:28:46,447 Current Learning Rate: 0.0001873787 +2025-03-20 00:28:46,448 Train Loss: 0.0001333, Val Loss: 0.0002161 +2025-03-20 00:28:46,448 Epoch 1744/2000 +2025-03-20 00:31:30,791 Current Learning Rate: 0.0001812880 +2025-03-20 00:31:30,791 Train Loss: 0.0001331, Val Loss: 0.0002156 +2025-03-20 00:31:30,791 Epoch 1745/2000 +2025-03-20 00:34:15,194 Current Learning Rate: 0.0001752760 +2025-03-20 00:34:15,194 Train Loss: 0.0001331, Val Loss: 0.0002155 +2025-03-20 00:34:15,194 Epoch 1746/2000 +2025-03-20 00:36:59,617 Current Learning Rate: 0.0001693441 +2025-03-20 00:36:59,617 Train Loss: 0.0001328, Val Loss: 0.0002147 +2025-03-20 00:36:59,618 Epoch 1747/2000 +2025-03-20 00:39:43,815 Current Learning Rate: 0.0001634937 +2025-03-20 00:39:43,816 Train Loss: 0.0001323, Val Loss: 0.0002138 +2025-03-20 00:39:43,816 Epoch 1748/2000 +2025-03-20 00:42:28,102 Current Learning Rate: 0.0001577264 +2025-03-20 00:42:28,102 Train Loss: 0.0001321, Val Loss: 0.0002135 +2025-03-20 00:42:28,103 Epoch 1749/2000 +2025-03-20 00:45:12,556 Current Learning Rate: 0.0001520436 +2025-03-20 00:45:12,556 Train Loss: 0.0001319, Val Loss: 0.0002137 +2025-03-20 00:45:12,556 Epoch 1750/2000 +2025-03-20 00:47:57,004 Current Learning Rate: 0.0001464466 +2025-03-20 00:47:57,004 Train Loss: 0.0001317, Val Loss: 0.0002136 +2025-03-20 00:47:57,005 Epoch 1751/2000 +2025-03-20 00:50:41,441 Current Learning Rate: 0.0001409369 +2025-03-20 00:50:41,441 Train Loss: 0.0001316, Val Loss: 0.0002133 +2025-03-20 00:50:41,442 Epoch 1752/2000 +2025-03-20 00:53:25,379 Current Learning Rate: 0.0001355157 +2025-03-20 00:53:25,379 Train Loss: 0.0001315, Val Loss: 0.0002129 +2025-03-20 00:53:25,380 Epoch 1753/2000 +2025-03-20 00:56:09,667 Current Learning Rate: 0.0001301845 +2025-03-20 00:56:09,667 Train Loss: 0.0001313, Val Loss: 0.0002124 +2025-03-20 00:56:09,667 Epoch 1754/2000 +2025-03-20 00:58:53,748 Current Learning Rate: 0.0001249445 +2025-03-20 00:58:53,749 Train Loss: 0.0001309, Val Loss: 0.0002125 +2025-03-20 00:58:53,749 Epoch 1755/2000 +2025-03-20 01:01:38,071 Current Learning Rate: 0.0001197970 +2025-03-20 01:01:38,072 Train Loss: 0.0001305, Val Loss: 0.0002123 +2025-03-20 01:01:38,072 Epoch 1756/2000 +2025-03-20 01:04:22,458 Current Learning Rate: 0.0001147434 +2025-03-20 01:04:22,459 Train Loss: 0.0001301, Val Loss: 0.0002120 +2025-03-20 01:04:22,459 Epoch 1757/2000 +2025-03-20 01:07:06,364 Current Learning Rate: 0.0001097848 +2025-03-20 01:07:06,364 Train Loss: 0.0001298, Val Loss: 0.0002114 +2025-03-20 01:07:06,364 Epoch 1758/2000 +2025-03-20 01:09:50,780 Current Learning Rate: 0.0001049225 +2025-03-20 01:09:50,780 Train Loss: 0.0001296, Val Loss: 0.0002110 +2025-03-20 01:09:50,780 Epoch 1759/2000 +2025-03-20 01:12:34,890 Current Learning Rate: 0.0001001577 +2025-03-20 01:12:34,890 Train Loss: 0.0001293, Val Loss: 0.0002110 +2025-03-20 01:12:34,891 Epoch 1760/2000 +2025-03-20 01:15:19,344 Current Learning Rate: 0.0000954915 +2025-03-20 01:15:19,345 Train Loss: 0.0001291, Val Loss: 0.0002109 +2025-03-20 01:15:19,345 Epoch 1761/2000 +2025-03-20 01:18:04,071 Current Learning Rate: 0.0000909251 +2025-03-20 01:18:04,072 Train Loss: 0.0001290, Val Loss: 0.0002108 +2025-03-20 01:18:04,072 Epoch 1762/2000 +2025-03-20 01:20:48,121 Current Learning Rate: 0.0000864597 +2025-03-20 01:20:48,121 Train Loss: 0.0001288, Val Loss: 0.0002107 +2025-03-20 01:20:48,121 Epoch 1763/2000 +2025-03-20 01:23:31,912 Current Learning Rate: 0.0000820963 +2025-03-20 01:23:31,913 Train Loss: 0.0001287, Val Loss: 0.0002107 +2025-03-20 01:23:31,913 Epoch 1764/2000 +2025-03-20 01:26:15,931 Current Learning Rate: 0.0000778360 +2025-03-20 01:26:15,931 Train Loss: 0.0001286, Val Loss: 0.0002106 +2025-03-20 01:26:15,931 Epoch 1765/2000 +2025-03-20 01:29:00,033 Current Learning Rate: 0.0000736799 +2025-03-20 01:29:00,034 Train Loss: 0.0001285, Val Loss: 0.0002106 +2025-03-20 01:29:00,034 Epoch 1766/2000 +2025-03-20 01:31:44,233 Current Learning Rate: 0.0000696290 +2025-03-20 01:31:44,233 Train Loss: 0.0001283, Val Loss: 0.0002104 +2025-03-20 01:31:44,233 Epoch 1767/2000 +2025-03-20 01:34:28,202 Current Learning Rate: 0.0000656842 +2025-03-20 01:34:28,203 Train Loss: 0.0001281, Val Loss: 0.0002101 +2025-03-20 01:34:28,203 Epoch 1768/2000 +2025-03-20 01:37:12,222 Current Learning Rate: 0.0000618467 +2025-03-20 01:37:12,339 Train Loss: 0.0001279, Val Loss: 0.0002097 +2025-03-20 01:37:12,339 Epoch 1769/2000 +2025-03-20 01:39:56,338 Current Learning Rate: 0.0000581172 +2025-03-20 01:39:56,458 Train Loss: 0.0001277, Val Loss: 0.0002094 +2025-03-20 01:39:56,458 Epoch 1770/2000 +2025-03-20 01:42:40,660 Current Learning Rate: 0.0000544967 +2025-03-20 01:42:40,774 Train Loss: 0.0001276, Val Loss: 0.0002092 +2025-03-20 01:42:40,774 Epoch 1771/2000 +2025-03-20 01:45:24,916 Current Learning Rate: 0.0000509862 +2025-03-20 01:45:25,032 Train Loss: 0.0001275, Val Loss: 0.0002090 +2025-03-20 01:45:25,032 Epoch 1772/2000 +2025-03-20 01:48:09,201 Current Learning Rate: 0.0000475865 +2025-03-20 01:48:09,336 Train Loss: 0.0001274, Val Loss: 0.0002088 +2025-03-20 01:48:09,336 Epoch 1773/2000 +2025-03-20 01:50:53,354 Current Learning Rate: 0.0000442984 +2025-03-20 01:50:53,474 Train Loss: 0.0001272, Val Loss: 0.0002087 +2025-03-20 01:50:53,475 Epoch 1774/2000 +2025-03-20 01:53:38,043 Current Learning Rate: 0.0000411227 +2025-03-20 01:53:38,157 Train Loss: 0.0001271, Val Loss: 0.0002086 +2025-03-20 01:53:38,157 Epoch 1775/2000 +2025-03-20 01:56:22,137 Current Learning Rate: 0.0000380602 +2025-03-20 01:56:22,259 Train Loss: 0.0001270, Val Loss: 0.0002084 +2025-03-20 01:56:22,259 Epoch 1776/2000 +2025-03-20 01:59:07,136 Current Learning Rate: 0.0000351118 +2025-03-20 01:59:07,260 Train Loss: 0.0001269, Val Loss: 0.0002083 +2025-03-20 01:59:07,261 Epoch 1777/2000 +2025-03-20 02:01:51,463 Current Learning Rate: 0.0000322780 +2025-03-20 02:01:51,589 Train Loss: 0.0001268, Val Loss: 0.0002082 +2025-03-20 02:01:51,589 Epoch 1778/2000 +2025-03-20 02:04:35,783 Current Learning Rate: 0.0000295596 +2025-03-20 02:04:35,895 Train Loss: 0.0001267, Val Loss: 0.0002081 +2025-03-20 02:04:35,896 Epoch 1779/2000 +2025-03-20 02:07:20,308 Current Learning Rate: 0.0000269573 +2025-03-20 02:07:20,470 Train Loss: 0.0001266, Val Loss: 0.0002080 +2025-03-20 02:07:20,471 Epoch 1780/2000 +2025-03-20 02:10:04,644 Current Learning Rate: 0.0000244717 +2025-03-20 02:10:04,764 Train Loss: 0.0001265, Val Loss: 0.0002079 +2025-03-20 02:10:04,764 Epoch 1781/2000 +2025-03-20 02:12:49,007 Current Learning Rate: 0.0000221035 +2025-03-20 02:12:49,121 Train Loss: 0.0001265, Val Loss: 0.0002078 +2025-03-20 02:12:49,121 Epoch 1782/2000 +2025-03-20 02:15:33,533 Current Learning Rate: 0.0000198532 +2025-03-20 02:15:33,650 Train Loss: 0.0001264, Val Loss: 0.0002078 +2025-03-20 02:15:33,650 Epoch 1783/2000 +2025-03-20 02:18:17,892 Current Learning Rate: 0.0000177213 +2025-03-20 02:18:18,008 Train Loss: 0.0001263, Val Loss: 0.0002077 +2025-03-20 02:18:18,008 Epoch 1784/2000 +2025-03-20 02:21:02,368 Current Learning Rate: 0.0000157084 +2025-03-20 02:21:02,487 Train Loss: 0.0001262, Val Loss: 0.0002077 +2025-03-20 02:21:02,488 Epoch 1785/2000 +2025-03-20 02:23:46,924 Current Learning Rate: 0.0000138150 +2025-03-20 02:23:47,051 Train Loss: 0.0001262, Val Loss: 0.0002077 +2025-03-20 02:23:47,051 Epoch 1786/2000 +2025-03-20 02:26:31,101 Current Learning Rate: 0.0000120416 +2025-03-20 02:26:31,219 Train Loss: 0.0001261, Val Loss: 0.0002076 +2025-03-20 02:26:31,220 Epoch 1787/2000 +2025-03-20 02:29:15,565 Current Learning Rate: 0.0000103886 +2025-03-20 02:29:15,677 Train Loss: 0.0001260, Val Loss: 0.0002076 +2025-03-20 02:29:15,677 Epoch 1788/2000 +2025-03-20 02:32:00,107 Current Learning Rate: 0.0000088564 +2025-03-20 02:32:00,221 Train Loss: 0.0001260, Val Loss: 0.0002075 +2025-03-20 02:32:00,221 Epoch 1789/2000 +2025-03-20 02:34:44,447 Current Learning Rate: 0.0000074453 +2025-03-20 02:34:44,580 Train Loss: 0.0001259, Val Loss: 0.0002074 +2025-03-20 02:34:44,580 Epoch 1790/2000 +2025-03-20 02:37:28,865 Current Learning Rate: 0.0000061558 +2025-03-20 02:37:28,987 Train Loss: 0.0001259, Val Loss: 0.0002073 +2025-03-20 02:37:28,988 Epoch 1791/2000 +2025-03-20 02:40:13,297 Current Learning Rate: 0.0000049882 +2025-03-20 02:40:13,413 Train Loss: 0.0001258, Val Loss: 0.0002072 +2025-03-20 02:40:13,413 Epoch 1792/2000 +2025-03-20 02:42:57,844 Current Learning Rate: 0.0000039426 +2025-03-20 02:42:57,959 Train Loss: 0.0001258, Val Loss: 0.0002071 +2025-03-20 02:42:57,959 Epoch 1793/2000 +2025-03-20 02:45:41,604 Current Learning Rate: 0.0000030195 +2025-03-20 02:45:41,723 Train Loss: 0.0001257, Val Loss: 0.0002070 +2025-03-20 02:45:41,723 Epoch 1794/2000 +2025-03-20 02:48:26,576 Current Learning Rate: 0.0000022190 +2025-03-20 02:48:26,707 Train Loss: 0.0001257, Val Loss: 0.0002069 +2025-03-20 02:48:26,707 Epoch 1795/2000 +2025-03-20 02:51:10,936 Current Learning Rate: 0.0000015413 +2025-03-20 02:51:11,064 Train Loss: 0.0001256, Val Loss: 0.0002069 +2025-03-20 02:51:11,064 Epoch 1796/2000 +2025-03-20 02:53:55,507 Current Learning Rate: 0.0000009866 +2025-03-20 02:53:55,638 Train Loss: 0.0001256, Val Loss: 0.0002069 +2025-03-20 02:53:55,638 Epoch 1797/2000 +2025-03-20 02:56:40,362 Current Learning Rate: 0.0000005551 +2025-03-20 02:56:40,490 Train Loss: 0.0001256, Val Loss: 0.0002068 +2025-03-20 02:56:40,490 Epoch 1798/2000 +2025-03-20 02:59:25,100 Current Learning Rate: 0.0000002467 +2025-03-20 02:59:25,227 Train Loss: 0.0001256, Val Loss: 0.0002068 +2025-03-20 02:59:25,227 Epoch 1799/2000 +2025-03-20 03:02:09,691 Current Learning Rate: 0.0000000617 +2025-03-20 03:02:09,820 Train Loss: 0.0001255, Val Loss: 0.0002068 +2025-03-20 03:02:09,820 Epoch 1800/2000 +2025-03-20 03:04:54,481 Current Learning Rate: 0.0000000000 +2025-03-20 03:04:54,606 Train Loss: 0.0001255, Val Loss: 0.0002068 +2025-03-20 03:04:54,606 Epoch 1801/2000 +2025-03-20 03:07:39,065 Current Learning Rate: 0.0000000617 +2025-03-20 03:07:39,193 Train Loss: 0.0001255, Val Loss: 0.0002068 +2025-03-20 03:07:39,193 Epoch 1802/2000 +2025-03-20 03:10:23,872 Current Learning Rate: 0.0000002467 +2025-03-20 03:10:24,017 Train Loss: 0.0001255, Val Loss: 0.0002068 +2025-03-20 03:10:24,017 Epoch 1803/2000 +2025-03-20 03:13:08,292 Current Learning Rate: 0.0000005551 +2025-03-20 03:13:08,293 Train Loss: 0.0001255, Val Loss: 0.0002068 +2025-03-20 03:13:08,293 Epoch 1804/2000 +2025-03-20 03:15:52,479 Current Learning Rate: 0.0000009866 +2025-03-20 03:15:52,479 Train Loss: 0.0001256, Val Loss: 0.0002068 +2025-03-20 03:15:52,480 Epoch 1805/2000 +2025-03-20 03:18:36,548 Current Learning Rate: 0.0000015413 +2025-03-20 03:18:36,548 Train Loss: 0.0001256, Val Loss: 0.0002068 +2025-03-20 03:18:36,548 Epoch 1806/2000 +2025-03-20 03:21:20,733 Current Learning Rate: 0.0000022190 +2025-03-20 03:21:20,734 Train Loss: 0.0001256, Val Loss: 0.0002069 +2025-03-20 03:21:20,734 Epoch 1807/2000 +2025-03-20 03:24:05,196 Current Learning Rate: 0.0000030195 +2025-03-20 03:24:05,196 Train Loss: 0.0001256, Val Loss: 0.0002069 +2025-03-20 03:24:05,197 Epoch 1808/2000 +2025-03-20 03:26:49,896 Current Learning Rate: 0.0000039426 +2025-03-20 03:26:49,897 Train Loss: 0.0001257, Val Loss: 0.0002069 +2025-03-20 03:26:49,897 Epoch 1809/2000 +2025-03-20 03:29:34,448 Current Learning Rate: 0.0000049882 +2025-03-20 03:29:34,449 Train Loss: 0.0001257, Val Loss: 0.0002070 +2025-03-20 03:29:34,449 Epoch 1810/2000 +2025-03-20 03:32:18,997 Current Learning Rate: 0.0000061558 +2025-03-20 03:32:18,998 Train Loss: 0.0001257, Val Loss: 0.0002071 +2025-03-20 03:32:18,998 Epoch 1811/2000 +2025-03-20 03:35:03,689 Current Learning Rate: 0.0000074453 +2025-03-20 03:35:03,689 Train Loss: 0.0001257, Val Loss: 0.0002072 +2025-03-20 03:35:03,689 Epoch 1812/2000 +2025-03-20 03:37:48,271 Current Learning Rate: 0.0000088564 +2025-03-20 03:37:48,272 Train Loss: 0.0001258, Val Loss: 0.0002072 +2025-03-20 03:37:48,272 Epoch 1813/2000 +2025-03-20 03:40:32,802 Current Learning Rate: 0.0000103886 +2025-03-20 03:40:32,802 Train Loss: 0.0001258, Val Loss: 0.0002073 +2025-03-20 03:40:32,802 Epoch 1814/2000 +2025-03-20 03:43:17,188 Current Learning Rate: 0.0000120416 +2025-03-20 03:43:17,188 Train Loss: 0.0001258, Val Loss: 0.0002074 +2025-03-20 03:43:17,189 Epoch 1815/2000 +2025-03-20 03:46:01,605 Current Learning Rate: 0.0000138150 +2025-03-20 03:46:01,605 Train Loss: 0.0001259, Val Loss: 0.0002074 +2025-03-20 03:46:01,605 Epoch 1816/2000 +2025-03-20 03:48:46,291 Current Learning Rate: 0.0000157084 +2025-03-20 03:48:46,292 Train Loss: 0.0001259, Val Loss: 0.0002074 +2025-03-20 03:48:46,292 Epoch 1817/2000 +2025-03-20 03:51:30,601 Current Learning Rate: 0.0000177213 +2025-03-20 03:51:30,601 Train Loss: 0.0001259, Val Loss: 0.0002074 +2025-03-20 03:51:30,601 Epoch 1818/2000 +2025-03-20 03:54:15,072 Current Learning Rate: 0.0000198532 +2025-03-20 03:54:15,072 Train Loss: 0.0001260, Val Loss: 0.0002074 +2025-03-20 03:54:15,072 Epoch 1819/2000 +2025-03-20 03:56:59,742 Current Learning Rate: 0.0000221035 +2025-03-20 03:56:59,742 Train Loss: 0.0001260, Val Loss: 0.0002075 +2025-03-20 03:56:59,743 Epoch 1820/2000 +2025-03-20 03:59:43,873 Current Learning Rate: 0.0000244717 +2025-03-20 03:59:43,874 Train Loss: 0.0001261, Val Loss: 0.0002075 +2025-03-20 03:59:43,874 Epoch 1821/2000 +2025-03-20 04:02:28,591 Current Learning Rate: 0.0000269573 +2025-03-20 04:02:28,591 Train Loss: 0.0001261, Val Loss: 0.0002076 +2025-03-20 04:02:28,591 Epoch 1822/2000 +2025-03-20 04:05:13,372 Current Learning Rate: 0.0000295596 +2025-03-20 04:05:13,373 Train Loss: 0.0001261, Val Loss: 0.0002077 +2025-03-20 04:05:13,373 Epoch 1823/2000 +2025-03-20 04:07:57,855 Current Learning Rate: 0.0000322780 +2025-03-20 04:07:57,855 Train Loss: 0.0001262, Val Loss: 0.0002078 +2025-03-20 04:07:57,856 Epoch 1824/2000 +2025-03-20 04:10:42,225 Current Learning Rate: 0.0000351118 +2025-03-20 04:10:42,225 Train Loss: 0.0001262, Val Loss: 0.0002078 +2025-03-20 04:10:42,225 Epoch 1825/2000 +2025-03-20 04:13:26,886 Current Learning Rate: 0.0000380602 +2025-03-20 04:13:26,887 Train Loss: 0.0001263, Val Loss: 0.0002079 +2025-03-20 04:13:26,887 Epoch 1826/2000 +2025-03-20 04:16:11,581 Current Learning Rate: 0.0000411227 +2025-03-20 04:16:11,582 Train Loss: 0.0001263, Val Loss: 0.0002080 +2025-03-20 04:16:11,582 Epoch 1827/2000 +2025-03-20 04:18:55,538 Current Learning Rate: 0.0000442984 +2025-03-20 04:18:55,538 Train Loss: 0.0001264, Val Loss: 0.0002081 +2025-03-20 04:18:55,538 Epoch 1828/2000 +2025-03-20 04:21:40,007 Current Learning Rate: 0.0000475865 +2025-03-20 04:21:40,008 Train Loss: 0.0001265, Val Loss: 0.0002081 +2025-03-20 04:21:40,008 Epoch 1829/2000 +2025-03-20 04:24:24,981 Current Learning Rate: 0.0000509862 +2025-03-20 04:24:24,981 Train Loss: 0.0001265, Val Loss: 0.0002082 +2025-03-20 04:24:24,982 Epoch 1830/2000 +2025-03-20 04:27:09,203 Current Learning Rate: 0.0000544967 +2025-03-20 04:27:09,203 Train Loss: 0.0001266, Val Loss: 0.0002083 +2025-03-20 04:27:09,204 Epoch 1831/2000 +2025-03-20 04:29:53,369 Current Learning Rate: 0.0000581172 +2025-03-20 04:29:53,369 Train Loss: 0.0001266, Val Loss: 0.0002084 +2025-03-20 04:29:53,370 Epoch 1832/2000 +2025-03-20 04:32:37,555 Current Learning Rate: 0.0000618467 +2025-03-20 04:32:37,556 Train Loss: 0.0001267, Val Loss: 0.0002085 +2025-03-20 04:32:37,556 Epoch 1833/2000 +2025-03-20 04:35:21,936 Current Learning Rate: 0.0000656842 +2025-03-20 04:35:21,937 Train Loss: 0.0001268, Val Loss: 0.0002085 +2025-03-20 04:35:21,937 Epoch 1834/2000 +2025-03-20 04:38:06,660 Current Learning Rate: 0.0000696290 +2025-03-20 04:38:06,661 Train Loss: 0.0001268, Val Loss: 0.0002086 +2025-03-20 04:38:06,661 Epoch 1835/2000 +2025-03-20 04:40:50,851 Current Learning Rate: 0.0000736799 +2025-03-20 04:40:50,852 Train Loss: 0.0001269, Val Loss: 0.0002087 +2025-03-20 04:40:50,852 Epoch 1836/2000 +2025-03-20 04:43:35,334 Current Learning Rate: 0.0000778360 +2025-03-20 04:43:35,335 Train Loss: 0.0001270, Val Loss: 0.0002088 +2025-03-20 04:43:35,335 Epoch 1837/2000 +2025-03-20 04:46:19,635 Current Learning Rate: 0.0000820963 +2025-03-20 04:46:19,635 Train Loss: 0.0001271, Val Loss: 0.0002089 +2025-03-20 04:46:19,635 Epoch 1838/2000 +2025-03-20 04:49:03,905 Current Learning Rate: 0.0000864597 +2025-03-20 04:49:03,906 Train Loss: 0.0001272, Val Loss: 0.0002091 +2025-03-20 04:49:03,906 Epoch 1839/2000 +2025-03-20 04:51:48,609 Current Learning Rate: 0.0000909251 +2025-03-20 04:51:48,609 Train Loss: 0.0001273, Val Loss: 0.0002091 +2025-03-20 04:51:48,610 Epoch 1840/2000 +2025-03-20 04:54:32,803 Current Learning Rate: 0.0000954915 +2025-03-20 04:54:32,804 Train Loss: 0.0001273, Val Loss: 0.0002093 +2025-03-20 04:54:32,804 Epoch 1841/2000 +2025-03-20 04:57:17,308 Current Learning Rate: 0.0001001577 +2025-03-20 04:57:17,308 Train Loss: 0.0001274, Val Loss: 0.0002095 +2025-03-20 04:57:17,308 Epoch 1842/2000 +2025-03-20 05:00:01,809 Current Learning Rate: 0.0001049225 +2025-03-20 05:00:01,809 Train Loss: 0.0001276, Val Loss: 0.0002096 +2025-03-20 05:00:01,810 Epoch 1843/2000 +2025-03-20 05:02:46,037 Current Learning Rate: 0.0001097848 +2025-03-20 05:02:46,038 Train Loss: 0.0001277, Val Loss: 0.0002097 +2025-03-20 05:02:46,038 Epoch 1844/2000 +2025-03-20 05:05:30,158 Current Learning Rate: 0.0001147434 +2025-03-20 05:05:30,159 Train Loss: 0.0001279, Val Loss: 0.0002099 +2025-03-20 05:05:30,159 Epoch 1845/2000 +2025-03-20 05:08:14,476 Current Learning Rate: 0.0001197970 +2025-03-20 05:08:14,477 Train Loss: 0.0001280, Val Loss: 0.0002100 +2025-03-20 05:08:14,477 Epoch 1846/2000 +2025-03-20 05:10:59,059 Current Learning Rate: 0.0001249445 +2025-03-20 05:10:59,060 Train Loss: 0.0001282, Val Loss: 0.0002102 +2025-03-20 05:10:59,060 Epoch 1847/2000 +2025-03-20 05:13:43,329 Current Learning Rate: 0.0001301845 +2025-03-20 05:13:43,329 Train Loss: 0.0001283, Val Loss: 0.0002102 +2025-03-20 05:13:43,329 Epoch 1848/2000 +2025-03-20 05:16:27,783 Current Learning Rate: 0.0001355157 +2025-03-20 05:16:27,783 Train Loss: 0.0001284, Val Loss: 0.0002103 +2025-03-20 05:16:27,784 Epoch 1849/2000 +2025-03-20 05:19:11,852 Current Learning Rate: 0.0001409369 +2025-03-20 05:19:11,852 Train Loss: 0.0001285, Val Loss: 0.0002107 +2025-03-20 05:19:11,852 Epoch 1850/2000 +2025-03-20 05:21:56,393 Current Learning Rate: 0.0001464466 +2025-03-20 05:21:56,393 Train Loss: 0.0001287, Val Loss: 0.0002108 +2025-03-20 05:21:56,393 Epoch 1851/2000 +2025-03-20 05:24:40,617 Current Learning Rate: 0.0001520436 +2025-03-20 05:24:40,617 Train Loss: 0.0001289, Val Loss: 0.0002108 +2025-03-20 05:24:40,617 Epoch 1852/2000 +2025-03-20 05:27:25,005 Current Learning Rate: 0.0001577264 +2025-03-20 05:27:25,006 Train Loss: 0.0001291, Val Loss: 0.0002109 +2025-03-20 05:27:25,006 Epoch 1853/2000 +2025-03-20 05:30:09,600 Current Learning Rate: 0.0001634937 +2025-03-20 05:30:09,600 Train Loss: 0.0001293, Val Loss: 0.0002113 +2025-03-20 05:30:09,600 Epoch 1854/2000 +2025-03-20 05:32:53,963 Current Learning Rate: 0.0001693441 +2025-03-20 05:32:53,963 Train Loss: 0.0001295, Val Loss: 0.0002118 +2025-03-20 05:32:53,963 Epoch 1855/2000 +2025-03-20 05:35:38,306 Current Learning Rate: 0.0001752760 +2025-03-20 05:35:38,307 Train Loss: 0.0001296, Val Loss: 0.0002120 +2025-03-20 05:35:38,307 Epoch 1856/2000 +2025-03-20 05:38:22,748 Current Learning Rate: 0.0001812880 +2025-03-20 05:38:22,748 Train Loss: 0.0001298, Val Loss: 0.0002126 +2025-03-20 05:38:22,748 Epoch 1857/2000 +2025-03-20 05:41:06,990 Current Learning Rate: 0.0001873787 +2025-03-20 05:41:06,991 Train Loss: 0.0001301, Val Loss: 0.0002132 +2025-03-20 05:41:06,991 Epoch 1858/2000 +2025-03-20 05:43:51,357 Current Learning Rate: 0.0001935465 +2025-03-20 05:43:51,357 Train Loss: 0.0001303, Val Loss: 0.0002129 +2025-03-20 05:43:51,358 Epoch 1859/2000 +2025-03-20 05:46:35,833 Current Learning Rate: 0.0001997899 +2025-03-20 05:46:35,834 Train Loss: 0.0001304, Val Loss: 0.0002128 +2025-03-20 05:46:35,834 Epoch 1860/2000 +2025-03-20 05:49:20,309 Current Learning Rate: 0.0002061074 +2025-03-20 05:49:20,309 Train Loss: 0.0001306, Val Loss: 0.0002131 +2025-03-20 05:49:20,309 Epoch 1861/2000 +2025-03-20 05:52:04,598 Current Learning Rate: 0.0002124974 +2025-03-20 05:52:04,598 Train Loss: 0.0001311, Val Loss: 0.0002136 +2025-03-20 05:52:04,599 Epoch 1862/2000 +2025-03-20 05:54:49,293 Current Learning Rate: 0.0002189583 +2025-03-20 05:54:49,293 Train Loss: 0.0001314, Val Loss: 0.0002147 +2025-03-20 05:54:49,293 Epoch 1863/2000 +2025-03-20 05:57:33,916 Current Learning Rate: 0.0002254886 +2025-03-20 05:57:33,916 Train Loss: 0.0001317, Val Loss: 0.0002148 +2025-03-20 05:57:33,917 Epoch 1864/2000 +2025-03-20 06:00:18,416 Current Learning Rate: 0.0002320866 +2025-03-20 06:00:18,416 Train Loss: 0.0001318, Val Loss: 0.0002142 +2025-03-20 06:00:18,416 Epoch 1865/2000 +2025-03-20 06:03:02,845 Current Learning Rate: 0.0002387507 +2025-03-20 06:03:02,845 Train Loss: 0.0001317, Val Loss: 0.0002147 +2025-03-20 06:03:02,845 Epoch 1866/2000 +2025-03-20 06:05:47,281 Current Learning Rate: 0.0002454793 +2025-03-20 06:05:47,281 Train Loss: 0.0001320, Val Loss: 0.0002153 +2025-03-20 06:05:47,281 Epoch 1867/2000 +2025-03-20 06:08:31,694 Current Learning Rate: 0.0002522707 +2025-03-20 06:08:31,694 Train Loss: 0.0001323, Val Loss: 0.0002153 +2025-03-20 06:08:31,695 Epoch 1868/2000 +2025-03-20 06:11:15,991 Current Learning Rate: 0.0002591232 +2025-03-20 06:11:15,992 Train Loss: 0.0001328, Val Loss: 0.0002142 +2025-03-20 06:11:15,992 Epoch 1869/2000 +2025-03-20 06:14:00,669 Current Learning Rate: 0.0002660351 +2025-03-20 06:14:00,669 Train Loss: 0.0001333, Val Loss: 0.0002144 +2025-03-20 06:14:00,670 Epoch 1870/2000 +2025-03-20 06:16:44,833 Current Learning Rate: 0.0002730048 +2025-03-20 06:16:44,834 Train Loss: 0.0001337, Val Loss: 0.0002143 +2025-03-20 06:16:44,834 Epoch 1871/2000 +2025-03-20 06:19:28,878 Current Learning Rate: 0.0002800304 +2025-03-20 06:19:28,879 Train Loss: 0.0001342, Val Loss: 0.0002141 +2025-03-20 06:19:28,879 Epoch 1872/2000 +2025-03-20 06:22:13,840 Current Learning Rate: 0.0002871104 +2025-03-20 06:22:13,840 Train Loss: 0.0001340, Val Loss: 0.0002145 +2025-03-20 06:22:13,840 Epoch 1873/2000 +2025-03-20 06:24:57,520 Current Learning Rate: 0.0002942428 +2025-03-20 06:24:57,521 Train Loss: 0.0001341, Val Loss: 0.0002145 +2025-03-20 06:24:57,521 Epoch 1874/2000 +2025-03-20 06:27:41,975 Current Learning Rate: 0.0003014261 +2025-03-20 06:27:41,975 Train Loss: 0.0001341, Val Loss: 0.0002149 +2025-03-20 06:27:41,976 Epoch 1875/2000 +2025-03-20 06:30:26,339 Current Learning Rate: 0.0003086583 +2025-03-20 06:30:26,340 Train Loss: 0.0001340, Val Loss: 0.0002159 +2025-03-20 06:30:26,340 Epoch 1876/2000 +2025-03-20 06:33:10,646 Current Learning Rate: 0.0003159377 +2025-03-20 06:33:10,646 Train Loss: 0.0001345, Val Loss: 0.0002149 +2025-03-20 06:33:10,646 Epoch 1877/2000 +2025-03-20 06:35:55,257 Current Learning Rate: 0.0003232626 +2025-03-20 06:35:55,258 Train Loss: 0.0001348, Val Loss: 0.0002155 +2025-03-20 06:35:55,258 Epoch 1878/2000 +2025-03-20 06:38:39,841 Current Learning Rate: 0.0003306310 +2025-03-20 06:38:39,842 Train Loss: 0.0001355, Val Loss: 0.0002160 +2025-03-20 06:38:39,842 Epoch 1879/2000 +2025-03-20 06:41:23,982 Current Learning Rate: 0.0003380413 +2025-03-20 06:41:23,983 Train Loss: 0.0001358, Val Loss: 0.0002167 +2025-03-20 06:41:23,983 Epoch 1880/2000 +2025-03-20 06:44:08,461 Current Learning Rate: 0.0003454915 +2025-03-20 06:44:08,461 Train Loss: 0.0001359, Val Loss: 0.0002174 +2025-03-20 06:44:08,461 Epoch 1881/2000 +2025-03-20 06:46:52,767 Current Learning Rate: 0.0003529798 +2025-03-20 06:46:52,767 Train Loss: 0.0001362, Val Loss: 0.0002189 +2025-03-20 06:46:52,767 Epoch 1882/2000 +2025-03-20 06:49:37,213 Current Learning Rate: 0.0003605044 +2025-03-20 06:49:37,213 Train Loss: 0.0001364, Val Loss: 0.0002192 +2025-03-20 06:49:37,213 Epoch 1883/2000 +2025-03-20 06:52:21,611 Current Learning Rate: 0.0003680635 +2025-03-20 06:52:21,611 Train Loss: 0.0001367, Val Loss: 0.0002193 +2025-03-20 06:52:21,612 Epoch 1884/2000 +2025-03-20 06:55:05,995 Current Learning Rate: 0.0003756551 +2025-03-20 06:55:05,995 Train Loss: 0.0001372, Val Loss: 0.0002194 +2025-03-20 06:55:05,996 Epoch 1885/2000 +2025-03-20 06:57:50,577 Current Learning Rate: 0.0003832773 +2025-03-20 06:57:50,578 Train Loss: 0.0001375, Val Loss: 0.0002200 +2025-03-20 06:57:50,578 Epoch 1886/2000 +2025-03-20 07:00:35,356 Current Learning Rate: 0.0003909284 +2025-03-20 07:00:35,357 Train Loss: 0.0001372, Val Loss: 0.0002197 +2025-03-20 07:00:35,357 Epoch 1887/2000 +2025-03-20 07:03:20,087 Current Learning Rate: 0.0003986064 +2025-03-20 07:03:20,087 Train Loss: 0.0001370, Val Loss: 0.0002203 +2025-03-20 07:03:20,087 Epoch 1888/2000 +2025-03-20 07:06:04,720 Current Learning Rate: 0.0004063093 +2025-03-20 07:06:04,720 Train Loss: 0.0001368, Val Loss: 0.0002217 +2025-03-20 07:06:04,721 Epoch 1889/2000 +2025-03-20 07:08:48,848 Current Learning Rate: 0.0004140354 +2025-03-20 07:08:48,849 Train Loss: 0.0001369, Val Loss: 0.0002200 +2025-03-20 07:08:48,849 Epoch 1890/2000 +2025-03-20 07:11:33,254 Current Learning Rate: 0.0004217828 +2025-03-20 07:11:33,254 Train Loss: 0.0001375, Val Loss: 0.0002193 +2025-03-20 07:11:33,255 Epoch 1891/2000 +2025-03-20 07:14:17,618 Current Learning Rate: 0.0004295494 +2025-03-20 07:14:17,618 Train Loss: 0.0001382, Val Loss: 0.0002198 +2025-03-20 07:14:17,618 Epoch 1892/2000 +2025-03-20 07:17:02,157 Current Learning Rate: 0.0004373334 +2025-03-20 07:17:02,158 Train Loss: 0.0001388, Val Loss: 0.0002191 +2025-03-20 07:17:02,158 Epoch 1893/2000 +2025-03-20 07:19:46,924 Current Learning Rate: 0.0004451328 +2025-03-20 07:19:46,924 Train Loss: 0.0001394, Val Loss: 0.0002203 +2025-03-20 07:19:46,924 Epoch 1894/2000 +2025-03-20 07:22:31,150 Current Learning Rate: 0.0004529458 +2025-03-20 07:22:31,151 Train Loss: 0.0001401, Val Loss: 0.0002216 +2025-03-20 07:22:31,151 Epoch 1895/2000 +2025-03-20 07:25:15,776 Current Learning Rate: 0.0004607705 +2025-03-20 07:25:15,777 Train Loss: 0.0001404, Val Loss: 0.0002210 +2025-03-20 07:25:15,777 Epoch 1896/2000 +2025-03-20 07:28:00,169 Current Learning Rate: 0.0004686047 +2025-03-20 07:28:00,169 Train Loss: 0.0001408, Val Loss: 0.0002200 +2025-03-20 07:28:00,170 Epoch 1897/2000 +2025-03-20 07:30:44,406 Current Learning Rate: 0.0004764468 +2025-03-20 07:30:44,407 Train Loss: 0.0001406, Val Loss: 0.0002201 +2025-03-20 07:30:44,407 Epoch 1898/2000 +2025-03-20 07:33:28,485 Current Learning Rate: 0.0004842946 +2025-03-20 07:33:28,485 Train Loss: 0.0001405, Val Loss: 0.0002201 +2025-03-20 07:33:28,485 Epoch 1899/2000 +2025-03-20 07:36:12,653 Current Learning Rate: 0.0004921463 +2025-03-20 07:36:12,654 Train Loss: 0.0001409, Val Loss: 0.0002215 +2025-03-20 07:36:12,654 Epoch 1900/2000 +2025-03-20 07:38:57,144 Current Learning Rate: 0.0005000000 +2025-03-20 07:38:57,144 Train Loss: 0.0001414, Val Loss: 0.0002220 +2025-03-20 07:38:57,145 Epoch 1901/2000 +2025-03-20 07:41:41,766 Current Learning Rate: 0.0005078537 +2025-03-20 07:41:41,767 Train Loss: 0.0001416, Val Loss: 0.0002240 +2025-03-20 07:41:41,767 Epoch 1902/2000 +2025-03-20 07:44:26,263 Current Learning Rate: 0.0005157054 +2025-03-20 07:44:26,263 Train Loss: 0.0001415, Val Loss: 0.0002240 +2025-03-20 07:44:26,264 Epoch 1903/2000 +2025-03-20 07:47:10,368 Current Learning Rate: 0.0005235532 +2025-03-20 07:47:10,368 Train Loss: 0.0001417, Val Loss: 0.0002227 +2025-03-20 07:47:10,369 Epoch 1904/2000 +2025-03-20 07:49:54,884 Current Learning Rate: 0.0005313953 +2025-03-20 07:49:54,884 Train Loss: 0.0001424, Val Loss: 0.0002237 +2025-03-20 07:49:54,884 Epoch 1905/2000 +2025-03-20 07:52:39,557 Current Learning Rate: 0.0005392295 +2025-03-20 07:52:39,557 Train Loss: 0.0001434, Val Loss: 0.0002249 +2025-03-20 07:52:39,557 Epoch 1906/2000 +2025-03-20 07:55:23,921 Current Learning Rate: 0.0005470542 +2025-03-20 07:55:23,921 Train Loss: 0.0001430, Val Loss: 0.0002274 +2025-03-20 07:55:23,921 Epoch 1907/2000 +2025-03-20 07:58:08,273 Current Learning Rate: 0.0005548672 +2025-03-20 07:58:08,273 Train Loss: 0.0001429, Val Loss: 0.0002283 +2025-03-20 07:58:08,274 Epoch 1908/2000 +2025-03-20 08:00:52,681 Current Learning Rate: 0.0005626666 +2025-03-20 08:00:52,681 Train Loss: 0.0001441, Val Loss: 0.0002290 +2025-03-20 08:00:52,682 Epoch 1909/2000 +2025-03-20 08:03:37,181 Current Learning Rate: 0.0005704506 +2025-03-20 08:03:37,181 Train Loss: 0.0001451, Val Loss: 0.0002276 +2025-03-20 08:03:37,182 Epoch 1910/2000 +2025-03-20 08:06:21,894 Current Learning Rate: 0.0005782172 +2025-03-20 08:06:21,895 Train Loss: 0.0001456, Val Loss: 0.0002279 +2025-03-20 08:06:21,895 Epoch 1911/2000 +2025-03-20 08:09:06,204 Current Learning Rate: 0.0005859646 +2025-03-20 08:09:06,205 Train Loss: 0.0001461, Val Loss: 0.0002270 +2025-03-20 08:09:06,205 Epoch 1912/2000 +2025-03-20 08:11:50,514 Current Learning Rate: 0.0005936907 +2025-03-20 08:11:50,514 Train Loss: 0.0001460, Val Loss: 0.0002279 +2025-03-20 08:11:50,514 Epoch 1913/2000 +2025-03-20 08:14:35,003 Current Learning Rate: 0.0006013936 +2025-03-20 08:14:35,003 Train Loss: 0.0001457, Val Loss: 0.0002266 +2025-03-20 08:14:35,003 Epoch 1914/2000 +2025-03-20 08:17:19,438 Current Learning Rate: 0.0006090716 +2025-03-20 08:17:19,439 Train Loss: 0.0001457, Val Loss: 0.0002293 +2025-03-20 08:17:19,439 Epoch 1915/2000 +2025-03-20 08:20:04,129 Current Learning Rate: 0.0006167227 +2025-03-20 08:20:04,129 Train Loss: 0.0001467, Val Loss: 0.0002304 +2025-03-20 08:20:04,129 Epoch 1916/2000 +2025-03-20 08:22:48,493 Current Learning Rate: 0.0006243449 +2025-03-20 08:22:48,493 Train Loss: 0.0001477, Val Loss: 0.0002323 +2025-03-20 08:22:48,493 Epoch 1917/2000 +2025-03-20 08:25:33,196 Current Learning Rate: 0.0006319365 +2025-03-20 08:25:33,196 Train Loss: 0.0001485, Val Loss: 0.0002304 +2025-03-20 08:25:33,196 Epoch 1918/2000 +2025-03-20 08:28:17,451 Current Learning Rate: 0.0006394956 +2025-03-20 08:28:17,451 Train Loss: 0.0001492, Val Loss: 0.0002293 +2025-03-20 08:28:17,451 Epoch 1919/2000 +2025-03-20 08:31:01,764 Current Learning Rate: 0.0006470202 +2025-03-20 08:31:01,765 Train Loss: 0.0001490, Val Loss: 0.0002295 +2025-03-20 08:31:01,765 Epoch 1920/2000 +2025-03-20 08:33:46,052 Current Learning Rate: 0.0006545085 +2025-03-20 08:33:46,053 Train Loss: 0.0001487, Val Loss: 0.0002319 +2025-03-20 08:33:46,054 Epoch 1921/2000 +2025-03-20 08:36:30,275 Current Learning Rate: 0.0006619587 +2025-03-20 08:36:30,276 Train Loss: 0.0001486, Val Loss: 0.0002341 +2025-03-20 08:36:30,276 Epoch 1922/2000 +2025-03-20 08:39:14,444 Current Learning Rate: 0.0006693690 +2025-03-20 08:39:14,444 Train Loss: 0.0001495, Val Loss: 0.0002386 +2025-03-20 08:39:14,444 Epoch 1923/2000 +2025-03-20 08:41:59,229 Current Learning Rate: 0.0006767374 +2025-03-20 08:41:59,229 Train Loss: 0.0001505, Val Loss: 0.0002394 +2025-03-20 08:41:59,230 Epoch 1924/2000 +2025-03-20 08:44:43,630 Current Learning Rate: 0.0006840623 +2025-03-20 08:44:43,630 Train Loss: 0.0001509, Val Loss: 0.0002364 +2025-03-20 08:44:43,631 Epoch 1925/2000 +2025-03-20 08:47:27,907 Current Learning Rate: 0.0006913417 +2025-03-20 08:47:27,907 Train Loss: 0.0001508, Val Loss: 0.0002343 +2025-03-20 08:47:27,908 Epoch 1926/2000 +2025-03-20 08:50:12,261 Current Learning Rate: 0.0006985739 +2025-03-20 08:50:12,261 Train Loss: 0.0001508, Val Loss: 0.0002321 +2025-03-20 08:50:12,262 Epoch 1927/2000 +2025-03-20 08:52:56,807 Current Learning Rate: 0.0007057572 +2025-03-20 08:52:56,808 Train Loss: 0.0001497, Val Loss: 0.0002300 +2025-03-20 08:52:56,808 Epoch 1928/2000 +2025-03-20 08:55:41,479 Current Learning Rate: 0.0007128896 +2025-03-20 08:55:41,480 Train Loss: 0.0001498, Val Loss: 0.0002311 +2025-03-20 08:55:41,480 Epoch 1929/2000 +2025-03-20 08:58:25,649 Current Learning Rate: 0.0007199696 +2025-03-20 08:58:25,649 Train Loss: 0.0001502, Val Loss: 0.0002321 +2025-03-20 08:58:25,649 Epoch 1930/2000 +2025-03-20 09:01:10,394 Current Learning Rate: 0.0007269952 +2025-03-20 09:01:10,394 Train Loss: 0.0001509, Val Loss: 0.0002354 +2025-03-20 09:01:10,394 Epoch 1931/2000 +2025-03-20 09:03:54,526 Current Learning Rate: 0.0007339649 +2025-03-20 09:03:54,526 Train Loss: 0.0001515, Val Loss: 0.0002347 +2025-03-20 09:03:54,526 Epoch 1932/2000 +2025-03-20 09:06:39,445 Current Learning Rate: 0.0007408768 +2025-03-20 09:06:39,445 Train Loss: 0.0001525, Val Loss: 0.0002370 +2025-03-20 09:06:39,446 Epoch 1933/2000 +2025-03-20 09:09:23,762 Current Learning Rate: 0.0007477293 +2025-03-20 09:09:23,762 Train Loss: 0.0001528, Val Loss: 0.0002405 +2025-03-20 09:09:23,763 Epoch 1934/2000 +2025-03-20 09:12:07,934 Current Learning Rate: 0.0007545207 +2025-03-20 09:12:07,935 Train Loss: 0.0001536, Val Loss: 0.0002460 +2025-03-20 09:12:07,935 Epoch 1935/2000 +2025-03-20 09:14:52,241 Current Learning Rate: 0.0007612493 +2025-03-20 09:14:52,241 Train Loss: 0.0001541, Val Loss: 0.0002428 +2025-03-20 09:14:52,241 Epoch 1936/2000 +2025-03-20 09:17:36,448 Current Learning Rate: 0.0007679134 +2025-03-20 09:17:36,449 Train Loss: 0.0001553, Val Loss: 0.0002383 +2025-03-20 09:17:36,449 Epoch 1937/2000 +2025-03-20 09:20:20,913 Current Learning Rate: 0.0007745114 +2025-03-20 09:20:20,913 Train Loss: 0.0001554, Val Loss: 0.0002429 +2025-03-20 09:20:20,913 Epoch 1938/2000 +2025-03-20 09:23:05,134 Current Learning Rate: 0.0007810417 +2025-03-20 09:23:05,134 Train Loss: 0.0001552, Val Loss: 0.0002387 +2025-03-20 09:23:05,134 Epoch 1939/2000 +2025-03-20 09:25:49,511 Current Learning Rate: 0.0007875026 +2025-03-20 09:25:49,511 Train Loss: 0.0001544, Val Loss: 0.0002374 +2025-03-20 09:25:49,512 Epoch 1940/2000 +2025-03-20 09:28:33,782 Current Learning Rate: 0.0007938926 +2025-03-20 09:28:33,782 Train Loss: 0.0001544, Val Loss: 0.0002388 +2025-03-20 09:28:33,783 Epoch 1941/2000 +2025-03-20 09:31:18,361 Current Learning Rate: 0.0008002101 +2025-03-20 09:31:18,361 Train Loss: 0.0001546, Val Loss: 0.0002349 +2025-03-20 09:31:18,362 Epoch 1942/2000 +2025-03-20 09:34:02,907 Current Learning Rate: 0.0008064535 +2025-03-20 09:34:02,908 Train Loss: 0.0001557, Val Loss: 0.0002346 +2025-03-20 09:34:02,908 Epoch 1943/2000 +2025-03-20 09:36:47,796 Current Learning Rate: 0.0008126213 +2025-03-20 09:36:47,796 Train Loss: 0.0001560, Val Loss: 0.0002338 +2025-03-20 09:36:47,797 Epoch 1944/2000 +2025-03-20 09:39:32,332 Current Learning Rate: 0.0008187120 +2025-03-20 09:39:32,333 Train Loss: 0.0001558, Val Loss: 0.0002359 +2025-03-20 09:39:32,333 Epoch 1945/2000 +2025-03-20 09:42:16,786 Current Learning Rate: 0.0008247240 +2025-03-20 09:42:16,787 Train Loss: 0.0001561, Val Loss: 0.0002390 +2025-03-20 09:42:16,787 Epoch 1946/2000 +2025-03-20 09:45:01,098 Current Learning Rate: 0.0008306559 +2025-03-20 09:45:01,099 Train Loss: 0.0001575, Val Loss: 0.0002419 +2025-03-20 09:45:01,099 Epoch 1947/2000 +2025-03-20 09:47:45,473 Current Learning Rate: 0.0008365063 +2025-03-20 09:47:45,473 Train Loss: 0.0001582, Val Loss: 0.0002420 +2025-03-20 09:47:45,474 Epoch 1948/2000 +2025-03-20 09:50:29,974 Current Learning Rate: 0.0008422736 +2025-03-20 09:50:29,974 Train Loss: 0.0001580, Val Loss: 0.0002401 +2025-03-20 09:50:29,975 Epoch 1949/2000 +2025-03-20 09:53:14,486 Current Learning Rate: 0.0008479564 +2025-03-20 09:53:14,487 Train Loss: 0.0001582, Val Loss: 0.0002390 +2025-03-20 09:53:14,487 Epoch 1950/2000 +2025-03-20 09:55:58,937 Current Learning Rate: 0.0008535534 +2025-03-20 09:55:58,937 Train Loss: 0.0001587, Val Loss: 0.0002384 +2025-03-20 09:55:58,938 Epoch 1951/2000 +2025-03-20 09:58:43,628 Current Learning Rate: 0.0008590631 +2025-03-20 09:58:43,628 Train Loss: 0.0001586, Val Loss: 0.0002368 +2025-03-20 09:58:43,628 Epoch 1952/2000 +2025-03-20 10:01:28,093 Current Learning Rate: 0.0008644843 +2025-03-20 10:01:28,093 Train Loss: 0.0001587, Val Loss: 0.0002359 +2025-03-20 10:01:28,093 Epoch 1953/2000 +2025-03-20 10:04:12,442 Current Learning Rate: 0.0008698155 +2025-03-20 10:04:12,442 Train Loss: 0.0001580, Val Loss: 0.0002365 +2025-03-20 10:04:12,443 Epoch 1954/2000 +2025-03-20 10:06:56,813 Current Learning Rate: 0.0008750555 +2025-03-20 10:06:56,814 Train Loss: 0.0001584, Val Loss: 0.0002387 +2025-03-20 10:06:56,814 Epoch 1955/2000 +2025-03-20 10:09:41,506 Current Learning Rate: 0.0008802030 +2025-03-20 10:09:41,507 Train Loss: 0.0001594, Val Loss: 0.0002413 +2025-03-20 10:09:41,507 Epoch 1956/2000 +2025-03-20 10:12:25,828 Current Learning Rate: 0.0008852566 +2025-03-20 10:12:25,828 Train Loss: 0.0001599, Val Loss: 0.0002381 +2025-03-20 10:12:25,829 Epoch 1957/2000 +2025-03-20 10:15:10,133 Current Learning Rate: 0.0008902152 +2025-03-20 10:15:10,134 Train Loss: 0.0001591, Val Loss: 0.0002394 +2025-03-20 10:15:10,134 Epoch 1958/2000 +2025-03-20 10:17:54,294 Current Learning Rate: 0.0008950775 +2025-03-20 10:17:54,294 Train Loss: 0.0001590, Val Loss: 0.0002465 +2025-03-20 10:17:54,294 Epoch 1959/2000 +2025-03-20 10:20:38,648 Current Learning Rate: 0.0008998423 +2025-03-20 10:20:38,648 Train Loss: 0.0001596, Val Loss: 0.0002500 +2025-03-20 10:20:38,648 Epoch 1960/2000 +2025-03-20 10:23:23,121 Current Learning Rate: 0.0009045085 +2025-03-20 10:23:23,122 Train Loss: 0.0001601, Val Loss: 0.0002402 +2025-03-20 10:23:23,122 Epoch 1961/2000 +2025-03-20 10:26:07,385 Current Learning Rate: 0.0009090749 +2025-03-20 10:26:07,386 Train Loss: 0.0001612, Val Loss: 0.0002369 +2025-03-20 10:26:07,386 Epoch 1962/2000 +2025-03-20 10:28:51,716 Current Learning Rate: 0.0009135403 +2025-03-20 10:28:51,717 Train Loss: 0.0001624, Val Loss: 0.0002377 +2025-03-20 10:28:51,717 Epoch 1963/2000 +2025-03-20 10:31:35,957 Current Learning Rate: 0.0009179037 +2025-03-20 10:31:35,958 Train Loss: 0.0001615, Val Loss: 0.0002367 +2025-03-20 10:31:35,958 Epoch 1964/2000 +2025-03-20 10:34:20,221 Current Learning Rate: 0.0009221640 +2025-03-20 10:34:20,222 Train Loss: 0.0001606, Val Loss: 0.0002378 +2025-03-20 10:34:20,222 Epoch 1965/2000 +2025-03-20 10:37:04,482 Current Learning Rate: 0.0009263201 +2025-03-20 10:37:04,482 Train Loss: 0.0001604, Val Loss: 0.0002408 +2025-03-20 10:37:04,482 Epoch 1966/2000 +2025-03-20 10:39:48,661 Current Learning Rate: 0.0009303710 +2025-03-20 10:39:48,662 Train Loss: 0.0001605, Val Loss: 0.0002429 +2025-03-20 10:39:48,662 Epoch 1967/2000 +2025-03-20 10:42:33,352 Current Learning Rate: 0.0009343158 +2025-03-20 10:42:33,352 Train Loss: 0.0001607, Val Loss: 0.0002422 +2025-03-20 10:42:33,352 Epoch 1968/2000 +2025-03-20 10:45:17,792 Current Learning Rate: 0.0009381533 +2025-03-20 10:45:17,792 Train Loss: 0.0001610, Val Loss: 0.0002409 +2025-03-20 10:45:17,792 Epoch 1969/2000 +2025-03-20 10:48:02,080 Current Learning Rate: 0.0009418828 +2025-03-20 10:48:02,081 Train Loss: 0.0001602, Val Loss: 0.0002421 +2025-03-20 10:48:02,081 Epoch 1970/2000 +2025-03-20 10:50:46,641 Current Learning Rate: 0.0009455033 +2025-03-20 10:50:46,642 Train Loss: 0.0001602, Val Loss: 0.0002428 +2025-03-20 10:50:46,642 Epoch 1971/2000 +2025-03-20 10:53:31,199 Current Learning Rate: 0.0009490138 +2025-03-20 10:53:31,199 Train Loss: 0.0001612, Val Loss: 0.0002433 +2025-03-20 10:53:31,199 Epoch 1972/2000 +2025-03-20 10:56:15,660 Current Learning Rate: 0.0009524135 +2025-03-20 10:56:15,661 Train Loss: 0.0001615, Val Loss: 0.0002430 +2025-03-20 10:56:15,661 Epoch 1973/2000 +2025-03-20 10:58:59,874 Current Learning Rate: 0.0009557016 +2025-03-20 10:58:59,874 Train Loss: 0.0001614, Val Loss: 0.0002420 +2025-03-20 10:58:59,875 Epoch 1974/2000 +2025-03-20 11:01:44,274 Current Learning Rate: 0.0009588773 +2025-03-20 11:01:44,275 Train Loss: 0.0001617, Val Loss: 0.0002417 +2025-03-20 11:01:44,275 Epoch 1975/2000 +2025-03-20 11:04:29,022 Current Learning Rate: 0.0009619398 +2025-03-20 11:04:29,022 Train Loss: 0.0001617, Val Loss: 0.0002417 +2025-03-20 11:04:29,023 Epoch 1976/2000 +2025-03-20 11:07:13,427 Current Learning Rate: 0.0009648882 +2025-03-20 11:07:13,427 Train Loss: 0.0001622, Val Loss: 0.0002462 +2025-03-20 11:07:13,428 Epoch 1977/2000 +2025-03-20 11:09:57,758 Current Learning Rate: 0.0009677220 +2025-03-20 11:09:57,758 Train Loss: 0.0001623, Val Loss: 0.0002468 +2025-03-20 11:09:57,759 Epoch 1978/2000 +2025-03-20 11:12:41,595 Current Learning Rate: 0.0009704404 +2025-03-20 11:12:41,596 Train Loss: 0.0001619, Val Loss: 0.0002460 +2025-03-20 11:12:41,596 Epoch 1979/2000 +2025-03-20 11:15:25,791 Current Learning Rate: 0.0009730427 +2025-03-20 11:15:25,792 Train Loss: 0.0001620, Val Loss: 0.0002483 +2025-03-20 11:15:25,792 Epoch 1980/2000 +2025-03-20 11:18:10,514 Current Learning Rate: 0.0009755283 +2025-03-20 11:18:10,514 Train Loss: 0.0001624, Val Loss: 0.0002523 +2025-03-20 11:18:10,514 Epoch 1981/2000 +2025-03-20 11:20:54,921 Current Learning Rate: 0.0009778965 +2025-03-20 11:20:54,922 Train Loss: 0.0001621, Val Loss: 0.0002497 +2025-03-20 11:20:54,922 Epoch 1982/2000 +2025-03-20 11:23:39,383 Current Learning Rate: 0.0009801468 +2025-03-20 11:23:39,384 Train Loss: 0.0001624, Val Loss: 0.0002458 +2025-03-20 11:23:39,384 Epoch 1983/2000 +2025-03-20 11:26:23,742 Current Learning Rate: 0.0009822787 +2025-03-20 11:26:23,742 Train Loss: 0.0001625, Val Loss: 0.0002432 +2025-03-20 11:26:23,742 Epoch 1984/2000 +2025-03-20 11:29:08,125 Current Learning Rate: 0.0009842916 +2025-03-20 11:29:08,125 Train Loss: 0.0001626, Val Loss: 0.0002410 +2025-03-20 11:29:08,125 Epoch 1985/2000 +2025-03-20 11:31:52,515 Current Learning Rate: 0.0009861850 +2025-03-20 11:31:52,516 Train Loss: 0.0001629, Val Loss: 0.0002434 +2025-03-20 11:31:52,516 Epoch 1986/2000 +2025-03-20 11:34:36,965 Current Learning Rate: 0.0009879584 +2025-03-20 11:34:36,966 Train Loss: 0.0001637, Val Loss: 0.0002417 +2025-03-20 11:34:36,966 Epoch 1987/2000 +2025-03-20 11:37:21,369 Current Learning Rate: 0.0009896114 +2025-03-20 11:37:21,369 Train Loss: 0.0001639, Val Loss: 0.0002448 +2025-03-20 11:37:21,369 Epoch 1988/2000 +2025-03-20 11:40:05,919 Current Learning Rate: 0.0009911436 +2025-03-20 11:40:05,919 Train Loss: 0.0001636, Val Loss: 0.0002441 +2025-03-20 11:40:05,919 Epoch 1989/2000 +2025-03-20 11:42:50,378 Current Learning Rate: 0.0009925547 +2025-03-20 11:42:50,378 Train Loss: 0.0001634, Val Loss: 0.0002451 +2025-03-20 11:42:50,378 Epoch 1990/2000 +2025-03-20 11:45:34,696 Current Learning Rate: 0.0009938442 +2025-03-20 11:45:34,697 Train Loss: 0.0001632, Val Loss: 0.0002457 +2025-03-20 11:45:34,697 Epoch 1991/2000 +2025-03-20 11:48:19,089 Current Learning Rate: 0.0009950118 +2025-03-20 11:48:19,089 Train Loss: 0.0001638, Val Loss: 0.0002484 +2025-03-20 11:48:19,090 Epoch 1992/2000 +2025-03-20 11:51:03,603 Current Learning Rate: 0.0009960574 +2025-03-20 11:51:03,604 Train Loss: 0.0001649, Val Loss: 0.0002464 +2025-03-20 11:51:03,604 Epoch 1993/2000 +2025-03-20 11:53:47,674 Current Learning Rate: 0.0009969805 +2025-03-20 11:53:47,674 Train Loss: 0.0001662, Val Loss: 0.0002469 +2025-03-20 11:53:47,675 Epoch 1994/2000 +2025-03-20 11:56:32,043 Current Learning Rate: 0.0009977810 +2025-03-20 11:56:32,043 Train Loss: 0.0001654, Val Loss: 0.0002448 +2025-03-20 11:56:32,043 Epoch 1995/2000 +2025-03-20 11:59:16,743 Current Learning Rate: 0.0009984587 +2025-03-20 11:59:16,743 Train Loss: 0.0001650, Val Loss: 0.0002448 +2025-03-20 11:59:16,743 Epoch 1996/2000 +2025-03-20 12:02:01,222 Current Learning Rate: 0.0009990134 +2025-03-20 12:02:01,222 Train Loss: 0.0001639, Val Loss: 0.0002421 +2025-03-20 12:02:01,222 Epoch 1997/2000 +2025-03-20 12:04:45,817 Current Learning Rate: 0.0009994449 +2025-03-20 12:04:45,817 Train Loss: 0.0001645, Val Loss: 0.0002428 +2025-03-20 12:04:45,817 Epoch 1998/2000 +2025-03-20 12:07:30,467 Current Learning Rate: 0.0009997533 +2025-03-20 12:07:30,468 Train Loss: 0.0001646, Val Loss: 0.0002430 +2025-03-20 12:07:30,468 Epoch 1999/2000 +2025-03-20 12:10:15,243 Current Learning Rate: 0.0009999383 +2025-03-20 12:10:15,244 Train Loss: 0.0001647, Val Loss: 0.0002429 +2025-03-20 12:10:15,244 Epoch 2000/2000 +2025-03-20 12:12:59,957 Current Learning Rate: 0.0010000000 +2025-03-20 12:12:59,958 Train Loss: 0.0001651, Val Loss: 0.0002439 +2025-03-20 12:13:10,841 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Kno_exp1_20250226_inference.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Kno_exp1_20250226_inference.log new file mode 100644 index 0000000000000000000000000000000000000000..7d1a3b3e5f1a8bcec0a9922635c1a0603addf3fd --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Kno_exp1_20250226_inference.log @@ -0,0 +1,4 @@ +2025-02-26 00:37:42,946 成功加载模型权重: /jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Kno_exp1_20250226_best_model.pth +2025-02-26 00:37:42,946 开始推理... +2025-02-26 00:41:05,335 推理结果已保存至 /jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 00:41:17,048 推理任务完成! diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Kno_exp1_20250226_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Kno_exp1_20250226_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..04c515e4580cb8c1744b304e0338854226f0ac4a --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Kno_exp1_20250226_training_log.log @@ -0,0 +1,3064 @@ +2025-02-25 23:04:16,603 Epoch 1/2000 +2025-02-25 23:06:28,425 Epoch 1/2000 +2025-02-25 23:07:44,531 Epoch 1/2000 +2025-02-25 23:10:18,355 Epoch 1/2000 +2025-02-25 23:11:22,774 Current Learning Rate: 0.0099993832 +2025-02-25 23:11:22,780 Train Loss: 0.0107643, Val Loss: 0.0159971 +2025-02-25 23:11:22,781 Epoch 2/2000 +2025-02-25 23:12:27,556 Current Learning Rate: 0.0099975328 +2025-02-25 23:12:27,557 Train Loss: 0.0195464, Val Loss: 0.0445548 +2025-02-25 23:12:27,557 Epoch 3/2000 +2025-02-25 23:13:31,804 Current Learning Rate: 0.0099944494 +2025-02-25 23:13:31,804 Train Loss: 0.0224501, Val Loss: 0.0445962 +2025-02-25 23:13:31,804 Epoch 4/2000 +2025-02-25 23:14:34,672 Current Learning Rate: 0.0099901336 +2025-02-25 23:14:34,672 Train Loss: 0.0224774, Val Loss: 0.0445954 +2025-02-25 23:14:34,673 Epoch 5/2000 +2025-02-25 23:15:40,157 Current Learning Rate: 0.0099845867 +2025-02-25 23:15:40,157 Train Loss: 0.0224594, Val Loss: 0.0446043 +2025-02-25 23:15:40,158 Epoch 6/2000 +2025-02-25 23:16:43,030 Current Learning Rate: 0.0099778098 +2025-02-25 23:16:43,031 Train Loss: 0.0224593, Val Loss: 0.0445979 +2025-02-25 23:16:43,032 Epoch 7/2000 +2025-02-25 23:17:47,239 Current Learning Rate: 0.0099698048 +2025-02-25 23:17:47,240 Train Loss: 0.0224614, Val Loss: 0.0446165 +2025-02-25 23:17:47,240 Epoch 8/2000 +2025-02-25 23:18:52,049 Current Learning Rate: 0.0099605735 +2025-02-25 23:18:52,050 Train Loss: 0.0224673, Val Loss: 0.0446118 +2025-02-25 23:18:52,050 Epoch 9/2000 +2025-02-25 23:19:56,910 Current Learning Rate: 0.0099501183 +2025-02-25 23:19:56,911 Train Loss: 0.0224631, Val Loss: 0.0445717 +2025-02-25 23:19:56,911 Epoch 10/2000 +2025-02-25 23:21:00,781 Current Learning Rate: 0.0099384417 +2025-02-25 23:21:00,782 Train Loss: 0.0224253, Val Loss: 0.0444539 +2025-02-25 23:21:00,783 Epoch 11/2000 +2025-02-25 23:22:05,654 Current Learning Rate: 0.0099255466 +2025-02-25 23:22:05,654 Train Loss: 0.0224254, Val Loss: 0.0445603 +2025-02-25 23:22:05,655 Epoch 12/2000 +2025-02-25 23:23:10,400 Current Learning Rate: 0.0099114363 +2025-02-25 23:23:10,400 Train Loss: 0.0224325, Val Loss: 0.0443700 +2025-02-25 23:23:10,401 Epoch 13/2000 +2025-02-25 23:24:12,581 Current Learning Rate: 0.0098961141 +2025-02-25 23:24:12,581 Train Loss: 0.0223692, Val Loss: 0.0445073 +2025-02-25 23:24:12,582 Epoch 14/2000 +2025-02-25 23:25:15,025 Current Learning Rate: 0.0098795838 +2025-02-25 23:25:15,026 Train Loss: 0.0224201, Val Loss: 0.0444357 +2025-02-25 23:25:15,026 Epoch 15/2000 +2025-02-25 23:26:17,732 Current Learning Rate: 0.0098618496 +2025-02-25 23:26:17,733 Train Loss: 0.0224022, Val Loss: 0.0444246 +2025-02-25 23:26:17,733 Epoch 16/2000 +2025-02-25 23:27:21,221 Current Learning Rate: 0.0098429158 +2025-02-25 23:27:21,222 Train Loss: 0.0224070, Val Loss: 0.0444540 +2025-02-25 23:27:21,222 Epoch 17/2000 +2025-02-25 23:28:24,197 Current Learning Rate: 0.0098227871 +2025-02-25 23:28:24,201 Train Loss: 0.0224357, Val Loss: 0.0445381 +2025-02-25 23:28:24,202 Epoch 18/2000 +2025-02-25 23:29:26,456 Current Learning Rate: 0.0098014684 +2025-02-25 23:29:26,457 Train Loss: 0.0224350, Val Loss: 0.0445079 +2025-02-25 23:29:26,457 Epoch 19/2000 +2025-02-25 23:30:29,619 Current Learning Rate: 0.0097789651 +2025-02-25 23:30:29,620 Train Loss: 0.0224390, Val Loss: 0.0445320 +2025-02-25 23:30:29,620 Epoch 20/2000 +2025-02-25 23:31:32,440 Current Learning Rate: 0.0097552826 +2025-02-25 23:31:32,441 Train Loss: 0.0224368, Val Loss: 0.0445217 +2025-02-25 23:31:32,441 Epoch 21/2000 +2025-02-25 23:32:38,240 Current Learning Rate: 0.0097304268 +2025-02-25 23:32:38,241 Train Loss: 0.0224367, Val Loss: 0.0445221 +2025-02-25 23:32:38,241 Epoch 22/2000 +2025-02-25 23:33:39,839 Current Learning Rate: 0.0097044038 +2025-02-25 23:33:39,839 Train Loss: 0.0224384, Val Loss: 0.0445471 +2025-02-25 23:33:39,839 Epoch 23/2000 +2025-02-25 23:34:42,935 Current Learning Rate: 0.0096772202 +2025-02-25 23:34:42,935 Train Loss: 0.0224458, Val Loss: 0.0445591 +2025-02-25 23:34:42,935 Epoch 24/2000 +2025-02-25 23:35:44,233 Current Learning Rate: 0.0096488824 +2025-02-25 23:35:44,234 Train Loss: 0.0224544, Val Loss: 0.0445776 +2025-02-25 23:35:44,234 Epoch 25/2000 +2025-02-25 23:36:46,697 Current Learning Rate: 0.0096193977 +2025-02-25 23:36:46,698 Train Loss: 0.0224540, Val Loss: 0.0445784 +2025-02-25 23:36:46,698 Epoch 26/2000 +2025-02-25 23:37:49,943 Current Learning Rate: 0.0095887731 +2025-02-25 23:37:49,944 Train Loss: 0.0224372, Val Loss: 0.0445021 +2025-02-25 23:37:49,944 Epoch 27/2000 +2025-02-25 23:38:51,900 Current Learning Rate: 0.0095570164 +2025-02-25 23:38:51,901 Train Loss: 0.0224284, Val Loss: 0.0444953 +2025-02-25 23:38:51,901 Epoch 28/2000 +2025-02-25 23:39:54,437 Current Learning Rate: 0.0095241353 +2025-02-25 23:39:54,438 Train Loss: 0.0224032, Val Loss: 0.0443980 +2025-02-25 23:39:54,438 Epoch 29/2000 +2025-02-25 23:40:55,537 Current Learning Rate: 0.0094901379 +2025-02-25 23:40:55,538 Train Loss: 0.0223987, Val Loss: 0.0443328 +2025-02-25 23:40:55,538 Epoch 30/2000 +2025-02-25 23:41:58,758 Current Learning Rate: 0.0094550326 +2025-02-25 23:41:58,758 Train Loss: 0.0223809, Val Loss: 0.0443447 +2025-02-25 23:41:58,759 Epoch 31/2000 +2025-02-25 23:43:01,163 Current Learning Rate: 0.0094188282 +2025-02-25 23:43:01,164 Train Loss: 0.0223902, Val Loss: 0.0444225 +2025-02-25 23:43:01,164 Epoch 32/2000 +2025-02-25 23:44:03,473 Current Learning Rate: 0.0093815334 +2025-02-25 23:44:03,474 Train Loss: 0.0223860, Val Loss: 0.0442495 +2025-02-25 23:44:03,474 Epoch 33/2000 +2025-02-25 23:45:06,435 Current Learning Rate: 0.0093431576 +2025-02-25 23:45:06,435 Train Loss: 0.0223484, Val Loss: 0.0443031 +2025-02-25 23:45:06,436 Epoch 34/2000 +2025-02-25 23:46:10,330 Current Learning Rate: 0.0093037101 +2025-02-25 23:46:10,331 Train Loss: 0.0223970, Val Loss: 0.0444186 +2025-02-25 23:46:10,331 Epoch 35/2000 +2025-02-25 23:47:15,188 Current Learning Rate: 0.0092632008 +2025-02-25 23:47:15,189 Train Loss: 0.0224083, Val Loss: 0.0445148 +2025-02-25 23:47:15,190 Epoch 36/2000 +2025-02-25 23:48:18,608 Current Learning Rate: 0.0092216396 +2025-02-25 23:48:18,609 Train Loss: 0.0224303, Val Loss: 0.0445067 +2025-02-25 23:48:18,609 Epoch 37/2000 +2025-02-25 23:49:22,088 Current Learning Rate: 0.0091790368 +2025-02-25 23:49:22,092 Train Loss: 0.0224471, Val Loss: 0.0445567 +2025-02-25 23:49:22,092 Epoch 38/2000 +2025-02-25 23:50:25,016 Current Learning Rate: 0.0091354029 +2025-02-25 23:50:25,016 Train Loss: 0.0224478, Val Loss: 0.0445624 +2025-02-25 23:50:25,017 Epoch 39/2000 +2025-02-25 23:51:28,752 Current Learning Rate: 0.0090907486 +2025-02-25 23:51:28,752 Train Loss: 0.0224464, Val Loss: 0.0445551 +2025-02-25 23:51:28,753 Epoch 40/2000 +2025-02-25 23:52:31,583 Current Learning Rate: 0.0090450850 +2025-02-25 23:52:31,583 Train Loss: 0.0224433, Val Loss: 0.0445576 +2025-02-25 23:52:31,584 Epoch 41/2000 +2025-02-25 23:53:35,893 Current Learning Rate: 0.0089984233 +2025-02-25 23:53:35,894 Train Loss: 0.0224456, Val Loss: 0.0445486 +2025-02-25 23:53:35,894 Epoch 42/2000 +2025-02-25 23:54:38,299 Current Learning Rate: 0.0089507751 +2025-02-25 23:54:38,300 Train Loss: 0.0224432, Val Loss: 0.0445532 +2025-02-25 23:54:38,300 Epoch 43/2000 +2025-02-25 23:55:40,945 Current Learning Rate: 0.0089021520 +2025-02-25 23:55:40,945 Train Loss: 0.0224471, Val Loss: 0.0445712 +2025-02-25 23:55:40,945 Epoch 44/2000 +2025-02-25 23:56:43,462 Current Learning Rate: 0.0088525662 +2025-02-25 23:56:43,463 Train Loss: 0.0224491, Val Loss: 0.0445592 +2025-02-25 23:56:43,464 Epoch 45/2000 +2025-02-25 23:57:47,966 Current Learning Rate: 0.0088020298 +2025-02-25 23:57:47,967 Train Loss: 0.0224481, Val Loss: 0.0445716 +2025-02-25 23:57:47,967 Epoch 46/2000 +2025-02-25 23:58:51,820 Current Learning Rate: 0.0087505553 +2025-02-25 23:58:51,820 Train Loss: 0.0224481, Val Loss: 0.0445616 +2025-02-25 23:58:51,821 Epoch 47/2000 +2025-02-25 23:59:55,580 Current Learning Rate: 0.0086981555 +2025-02-25 23:59:55,581 Train Loss: 0.0224523, Val Loss: 0.0445817 +2025-02-25 23:59:55,581 Epoch 48/2000 +2025-02-26 00:00:59,793 Current Learning Rate: 0.0086448431 +2025-02-26 00:00:59,793 Train Loss: 0.0224589, Val Loss: 0.0445571 +2025-02-26 00:00:59,794 Epoch 49/2000 +2025-02-26 00:02:02,147 Current Learning Rate: 0.0085906315 +2025-02-26 00:02:02,148 Train Loss: 0.0224405, Val Loss: 0.0444769 +2025-02-26 00:02:02,148 Epoch 50/2000 +2025-02-26 00:03:03,728 Current Learning Rate: 0.0085355339 +2025-02-26 00:03:03,728 Train Loss: 0.0224057, Val Loss: 0.0444435 +2025-02-26 00:03:03,729 Epoch 51/2000 +2025-02-26 00:04:07,245 Current Learning Rate: 0.0084795640 +2025-02-26 00:04:07,246 Train Loss: 0.0224084, Val Loss: 0.0444437 +2025-02-26 00:04:07,246 Epoch 52/2000 +2025-02-26 00:05:11,514 Current Learning Rate: 0.0084227355 +2025-02-26 00:05:11,515 Train Loss: 0.0224074, Val Loss: 0.0444466 +2025-02-26 00:05:11,515 Epoch 53/2000 +2025-02-26 00:06:15,107 Current Learning Rate: 0.0083650626 +2025-02-26 00:06:15,108 Train Loss: 0.0224080, Val Loss: 0.0445447 +2025-02-26 00:06:15,108 Epoch 54/2000 +2025-02-26 00:07:17,956 Current Learning Rate: 0.0083065593 +2025-02-26 00:07:17,957 Train Loss: 0.0224408, Val Loss: 0.0445406 +2025-02-26 00:07:17,957 Epoch 55/2000 +2025-02-26 00:08:20,727 Current Learning Rate: 0.0082472402 +2025-02-26 00:08:20,728 Train Loss: 0.0224420, Val Loss: 0.0445420 +2025-02-26 00:08:20,728 Epoch 56/2000 +2025-02-26 00:09:22,801 Current Learning Rate: 0.0081871199 +2025-02-26 00:09:22,801 Train Loss: 0.0224411, Val Loss: 0.0445514 +2025-02-26 00:09:22,802 Epoch 57/2000 +2025-02-26 00:10:26,005 Current Learning Rate: 0.0081262133 +2025-02-26 00:10:26,005 Train Loss: 0.0224438, Val Loss: 0.0445523 +2025-02-26 00:10:26,006 Epoch 58/2000 +2025-02-26 00:11:30,537 Current Learning Rate: 0.0080645353 +2025-02-26 00:11:30,537 Train Loss: 0.0224424, Val Loss: 0.0445456 +2025-02-26 00:11:30,537 Epoch 59/2000 +2025-02-26 00:12:33,808 Current Learning Rate: 0.0080021011 +2025-02-26 00:12:33,809 Train Loss: 0.0224440, Val Loss: 0.0445622 +2025-02-26 00:12:33,809 Epoch 60/2000 +2025-02-26 00:13:36,328 Current Learning Rate: 0.0079389263 +2025-02-26 00:13:36,329 Train Loss: 0.0224438, Val Loss: 0.0445589 +2025-02-26 00:13:36,329 Epoch 61/2000 +2025-02-26 00:14:40,717 Current Learning Rate: 0.0078750263 +2025-02-26 00:14:40,718 Train Loss: 0.0224439, Val Loss: 0.0445534 +2025-02-26 00:14:40,718 Epoch 62/2000 +2025-02-26 00:15:43,755 Current Learning Rate: 0.0078104169 +2025-02-26 00:15:43,755 Train Loss: 0.0224442, Val Loss: 0.0445504 +2025-02-26 00:15:43,755 Epoch 63/2000 +2025-02-26 00:16:43,958 Current Learning Rate: 0.0077451141 +2025-02-26 00:16:43,958 Train Loss: 0.0224490, Val Loss: 0.0445560 +2025-02-26 00:16:43,959 Epoch 64/2000 +2025-02-26 00:17:47,467 Current Learning Rate: 0.0076791340 +2025-02-26 00:17:47,467 Train Loss: 0.0224347, Val Loss: 0.0444168 +2025-02-26 00:17:47,468 Epoch 65/2000 +2025-02-26 00:18:49,818 Current Learning Rate: 0.0076124928 +2025-02-26 00:18:49,819 Train Loss: 0.0223985, Val Loss: 0.0444090 +2025-02-26 00:18:49,819 Epoch 66/2000 +2025-02-26 00:19:53,613 Current Learning Rate: 0.0075452071 +2025-02-26 00:19:53,613 Train Loss: 0.0223750, Val Loss: 0.0443101 +2025-02-26 00:19:53,614 Epoch 67/2000 +2025-02-26 00:20:56,472 Current Learning Rate: 0.0074772933 +2025-02-26 00:20:56,472 Train Loss: 0.0223603, Val Loss: 0.0442897 +2025-02-26 00:20:56,472 Epoch 68/2000 +2025-02-26 00:22:00,579 Current Learning Rate: 0.0074087684 +2025-02-26 00:22:00,579 Train Loss: 0.0224110, Val Loss: 0.0444666 +2025-02-26 00:22:00,580 Epoch 69/2000 +2025-02-26 00:23:04,682 Current Learning Rate: 0.0073396491 +2025-02-26 00:23:04,683 Train Loss: 0.0224168, Val Loss: 0.0444668 +2025-02-26 00:23:04,683 Epoch 70/2000 +2025-02-26 00:24:07,270 Current Learning Rate: 0.0072699525 +2025-02-26 00:24:07,271 Train Loss: 0.0224174, Val Loss: 0.0444639 +2025-02-26 00:24:07,272 Epoch 71/2000 +2025-02-26 00:25:10,834 Current Learning Rate: 0.0071996958 +2025-02-26 00:25:10,835 Train Loss: 0.0223725, Val Loss: 0.0442604 +2025-02-26 00:25:10,835 Epoch 72/2000 +2025-02-26 00:26:14,658 Current Learning Rate: 0.0071288965 +2025-02-26 00:26:14,658 Train Loss: 0.0223478, Val Loss: 0.0442636 +2025-02-26 00:26:14,658 Epoch 73/2000 +2025-02-26 00:27:15,331 Current Learning Rate: 0.0070575718 +2025-02-26 00:27:15,331 Train Loss: 0.0223478, Val Loss: 0.0442475 +2025-02-26 00:27:15,332 Epoch 74/2000 +2025-02-26 00:28:17,947 Current Learning Rate: 0.0069857395 +2025-02-26 00:28:17,948 Train Loss: 0.0223428, Val Loss: 0.0441561 +2025-02-26 00:28:17,948 Epoch 75/2000 +2025-02-26 00:29:21,884 Current Learning Rate: 0.0069134172 +2025-02-26 00:29:21,884 Train Loss: 0.0223310, Val Loss: 0.0442091 +2025-02-26 00:29:21,885 Epoch 76/2000 +2025-02-26 00:30:23,774 Current Learning Rate: 0.0068406228 +2025-02-26 00:30:23,774 Train Loss: 0.0223332, Val Loss: 0.0442112 +2025-02-26 00:30:23,774 Epoch 77/2000 +2025-02-26 00:31:24,419 Current Learning Rate: 0.0067673742 +2025-02-26 00:31:24,420 Train Loss: 0.0222675, Val Loss: 0.0439948 +2025-02-26 00:31:24,420 Epoch 78/2000 +2025-02-26 00:32:26,776 Current Learning Rate: 0.0066936896 +2025-02-26 00:32:26,776 Train Loss: 0.0222588, Val Loss: 0.0439769 +2025-02-26 00:32:26,777 Epoch 79/2000 +2025-02-26 00:33:30,104 Current Learning Rate: 0.0066195871 +2025-02-26 00:33:30,105 Train Loss: 0.0222589, Val Loss: 0.0440126 +2025-02-26 00:33:30,105 Epoch 80/2000 +2025-02-26 00:34:32,214 Current Learning Rate: 0.0065450850 +2025-02-26 00:34:32,215 Train Loss: 0.0222709, Val Loss: 0.0440331 +2025-02-26 00:34:32,215 Epoch 81/2000 +2025-02-26 00:35:35,405 Current Learning Rate: 0.0064702016 +2025-02-26 00:35:35,406 Train Loss: 0.0222986, Val Loss: 0.0442996 +2025-02-26 00:35:35,406 Epoch 82/2000 +2025-02-26 00:36:38,052 Current Learning Rate: 0.0063949555 +2025-02-26 00:36:38,052 Train Loss: 0.0223482, Val Loss: 0.0442293 +2025-02-26 00:36:38,053 Epoch 83/2000 +2025-02-26 00:37:41,723 Current Learning Rate: 0.0063193652 +2025-02-26 00:37:41,723 Train Loss: 0.0223330, Val Loss: 0.0440979 +2025-02-26 00:37:41,724 Epoch 84/2000 +2025-02-26 00:38:45,114 Current Learning Rate: 0.0062434494 +2025-02-26 00:38:45,115 Train Loss: 0.0223741, Val Loss: 0.0443825 +2025-02-26 00:38:45,115 Epoch 85/2000 +2025-02-26 00:39:48,090 Current Learning Rate: 0.0061672268 +2025-02-26 00:39:48,090 Train Loss: 0.0223891, Val Loss: 0.0443915 +2025-02-26 00:39:48,091 Epoch 86/2000 +2025-02-26 00:40:51,524 Current Learning Rate: 0.0060907162 +2025-02-26 00:40:51,525 Train Loss: 0.0224042, Val Loss: 0.0444629 +2025-02-26 00:40:51,525 Epoch 87/2000 +2025-02-26 00:41:53,305 Current Learning Rate: 0.0060139365 +2025-02-26 00:41:53,306 Train Loss: 0.0224162, Val Loss: 0.0444701 +2025-02-26 00:41:53,306 Epoch 88/2000 +2025-02-26 00:42:57,714 Current Learning Rate: 0.0059369066 +2025-02-26 00:42:57,715 Train Loss: 0.0224164, Val Loss: 0.0444686 +2025-02-26 00:42:57,715 Epoch 89/2000 +2025-02-26 00:44:00,218 Current Learning Rate: 0.0058596455 +2025-02-26 00:44:00,219 Train Loss: 0.0224161, Val Loss: 0.0444670 +2025-02-26 00:44:00,220 Epoch 90/2000 +2025-02-26 00:45:02,578 Current Learning Rate: 0.0057821723 +2025-02-26 00:45:02,579 Train Loss: 0.0224143, Val Loss: 0.0444435 +2025-02-26 00:45:02,579 Epoch 91/2000 +2025-02-26 00:46:03,939 Current Learning Rate: 0.0057045062 +2025-02-26 00:46:03,939 Train Loss: 0.0224085, Val Loss: 0.0444376 +2025-02-26 00:46:03,940 Epoch 92/2000 +2025-02-26 00:47:05,937 Current Learning Rate: 0.0056266662 +2025-02-26 00:47:05,937 Train Loss: 0.0224025, Val Loss: 0.0443395 +2025-02-26 00:47:05,938 Epoch 93/2000 +2025-02-26 00:48:10,854 Current Learning Rate: 0.0055486716 +2025-02-26 00:48:10,854 Train Loss: 0.0224115, Val Loss: 0.0444513 +2025-02-26 00:48:10,855 Epoch 94/2000 +2025-02-26 00:49:13,978 Current Learning Rate: 0.0054705416 +2025-02-26 00:49:13,978 Train Loss: 0.0224109, Val Loss: 0.0444553 +2025-02-26 00:49:13,979 Epoch 95/2000 +2025-02-26 00:50:17,062 Current Learning Rate: 0.0053922955 +2025-02-26 00:50:17,062 Train Loss: 0.0223864, Val Loss: 0.0442763 +2025-02-26 00:50:17,063 Epoch 96/2000 +2025-02-26 00:51:20,707 Current Learning Rate: 0.0053139526 +2025-02-26 00:51:20,707 Train Loss: 0.0223557, Val Loss: 0.0442800 +2025-02-26 00:51:20,708 Epoch 97/2000 +2025-02-26 00:52:22,913 Current Learning Rate: 0.0052355323 +2025-02-26 00:52:22,914 Train Loss: 0.0223568, Val Loss: 0.0442965 +2025-02-26 00:52:22,914 Epoch 98/2000 +2025-02-26 00:53:26,017 Current Learning Rate: 0.0051570538 +2025-02-26 00:53:26,018 Train Loss: 0.0223568, Val Loss: 0.0442890 +2025-02-26 00:53:26,018 Epoch 99/2000 +2025-02-26 00:54:30,409 Current Learning Rate: 0.0050785366 +2025-02-26 00:54:30,410 Train Loss: 0.0223576, Val Loss: 0.0443015 +2025-02-26 00:54:30,410 Epoch 100/2000 +2025-02-26 00:55:32,071 Current Learning Rate: 0.0050000000 +2025-02-26 00:55:32,071 Train Loss: 0.0223654, Val Loss: 0.0443511 +2025-02-26 00:55:32,071 Epoch 101/2000 +2025-02-26 00:56:34,313 Current Learning Rate: 0.0049214634 +2025-02-26 00:56:34,313 Train Loss: 0.0223743, Val Loss: 0.0443012 +2025-02-26 00:56:34,314 Epoch 102/2000 +2025-02-26 00:57:35,507 Current Learning Rate: 0.0048429462 +2025-02-26 00:57:35,508 Train Loss: 0.0223530, Val Loss: 0.0442487 +2025-02-26 00:57:35,508 Epoch 103/2000 +2025-02-26 00:58:38,187 Current Learning Rate: 0.0047644677 +2025-02-26 00:58:38,187 Train Loss: 0.0223497, Val Loss: 0.0442736 +2025-02-26 00:58:38,188 Epoch 104/2000 +2025-02-26 00:59:42,723 Current Learning Rate: 0.0046860474 +2025-02-26 00:59:42,724 Train Loss: 0.0223504, Val Loss: 0.0442747 +2025-02-26 00:59:42,724 Epoch 105/2000 +2025-02-26 01:00:45,198 Current Learning Rate: 0.0046077045 +2025-02-26 01:00:45,199 Train Loss: 0.0223382, Val Loss: 0.0442397 +2025-02-26 01:00:45,199 Epoch 106/2000 +2025-02-26 01:01:47,818 Current Learning Rate: 0.0045294584 +2025-02-26 01:01:47,818 Train Loss: 0.0223227, Val Loss: 0.0441854 +2025-02-26 01:01:47,819 Epoch 107/2000 +2025-02-26 01:02:51,705 Current Learning Rate: 0.0044513284 +2025-02-26 01:02:51,705 Train Loss: 0.0223123, Val Loss: 0.0441498 +2025-02-26 01:02:51,706 Epoch 108/2000 +2025-02-26 01:03:55,403 Current Learning Rate: 0.0043733338 +2025-02-26 01:03:55,403 Train Loss: 0.0223072, Val Loss: 0.0441487 +2025-02-26 01:03:55,403 Epoch 109/2000 +2025-02-26 01:04:59,489 Current Learning Rate: 0.0042954938 +2025-02-26 01:04:59,490 Train Loss: 0.0223083, Val Loss: 0.0441656 +2025-02-26 01:04:59,490 Epoch 110/2000 +2025-02-26 01:06:01,570 Current Learning Rate: 0.0042178277 +2025-02-26 01:06:01,571 Train Loss: 0.0222970, Val Loss: 0.0440695 +2025-02-26 01:06:01,571 Epoch 111/2000 +2025-02-26 01:07:04,517 Current Learning Rate: 0.0041403545 +2025-02-26 01:07:04,518 Train Loss: 0.0223128, Val Loss: 0.0441049 +2025-02-26 01:07:04,518 Epoch 112/2000 +2025-02-26 01:08:06,762 Current Learning Rate: 0.0040630934 +2025-02-26 01:08:06,763 Train Loss: 0.0222994, Val Loss: 0.0441125 +2025-02-26 01:08:06,763 Epoch 113/2000 +2025-02-26 01:09:11,496 Current Learning Rate: 0.0039860635 +2025-02-26 01:09:11,497 Train Loss: 0.0222993, Val Loss: 0.0441154 +2025-02-26 01:09:11,497 Epoch 114/2000 +2025-02-26 01:10:15,016 Current Learning Rate: 0.0039092838 +2025-02-26 01:10:15,017 Train Loss: 0.0223015, Val Loss: 0.0441362 +2025-02-26 01:10:15,017 Epoch 115/2000 +2025-02-26 01:11:18,565 Current Learning Rate: 0.0038327732 +2025-02-26 01:11:18,565 Train Loss: 0.0222969, Val Loss: 0.0440776 +2025-02-26 01:11:18,566 Epoch 116/2000 +2025-02-26 01:12:22,415 Current Learning Rate: 0.0037565506 +2025-02-26 01:12:22,416 Train Loss: 0.0222785, Val Loss: 0.0440426 +2025-02-26 01:12:22,416 Epoch 117/2000 +2025-02-26 01:13:26,328 Current Learning Rate: 0.0036806348 +2025-02-26 01:13:26,329 Train Loss: 0.0222678, Val Loss: 0.0440215 +2025-02-26 01:13:26,329 Epoch 118/2000 +2025-02-26 01:14:27,536 Current Learning Rate: 0.0036050445 +2025-02-26 01:14:27,536 Train Loss: 0.0222712, Val Loss: 0.0440669 +2025-02-26 01:14:27,537 Epoch 119/2000 +2025-02-26 01:15:30,463 Current Learning Rate: 0.0035297984 +2025-02-26 01:15:30,464 Train Loss: 0.0222699, Val Loss: 0.0440234 +2025-02-26 01:15:30,464 Epoch 120/2000 +2025-02-26 01:16:34,343 Current Learning Rate: 0.0034549150 +2025-02-26 01:16:34,344 Train Loss: 0.0222852, Val Loss: 0.0440966 +2025-02-26 01:16:34,344 Epoch 121/2000 +2025-02-26 01:17:36,841 Current Learning Rate: 0.0033804129 +2025-02-26 01:17:36,841 Train Loss: 0.0223104, Val Loss: 0.0441619 +2025-02-26 01:17:36,842 Epoch 122/2000 +2025-02-26 01:18:40,105 Current Learning Rate: 0.0033063104 +2025-02-26 01:18:40,106 Train Loss: 0.0223185, Val Loss: 0.0441550 +2025-02-26 01:18:40,106 Epoch 123/2000 +2025-02-26 01:19:42,893 Current Learning Rate: 0.0032326258 +2025-02-26 01:19:42,894 Train Loss: 0.0223168, Val Loss: 0.0441622 +2025-02-26 01:19:42,894 Epoch 124/2000 +2025-02-26 01:20:45,886 Current Learning Rate: 0.0031593772 +2025-02-26 01:20:45,889 Train Loss: 0.0223014, Val Loss: 0.0440977 +2025-02-26 01:20:45,890 Epoch 125/2000 +2025-02-26 01:21:48,862 Current Learning Rate: 0.0030865828 +2025-02-26 01:21:48,864 Train Loss: 0.0222951, Val Loss: 0.0441078 +2025-02-26 01:21:48,865 Epoch 126/2000 +2025-02-26 01:22:52,375 Current Learning Rate: 0.0030142605 +2025-02-26 01:22:52,375 Train Loss: 0.0222942, Val Loss: 0.0441002 +2025-02-26 01:22:52,375 Epoch 127/2000 +2025-02-26 01:23:56,403 Current Learning Rate: 0.0029424282 +2025-02-26 01:23:56,404 Train Loss: 0.0222935, Val Loss: 0.0440856 +2025-02-26 01:23:56,404 Epoch 128/2000 +2025-02-26 01:25:00,634 Current Learning Rate: 0.0028711035 +2025-02-26 01:25:00,635 Train Loss: 0.0222937, Val Loss: 0.0440926 +2025-02-26 01:25:00,635 Epoch 129/2000 +2025-02-26 01:26:05,252 Current Learning Rate: 0.0028003042 +2025-02-26 01:26:05,252 Train Loss: 0.0223006, Val Loss: 0.0441332 +2025-02-26 01:26:05,252 Epoch 130/2000 +2025-02-26 01:27:07,737 Current Learning Rate: 0.0027300475 +2025-02-26 01:27:07,737 Train Loss: 0.0223056, Val Loss: 0.0441191 +2025-02-26 01:27:07,738 Epoch 131/2000 +2025-02-26 01:28:12,535 Current Learning Rate: 0.0026603509 +2025-02-26 01:28:12,535 Train Loss: 0.0223055, Val Loss: 0.0441360 +2025-02-26 01:28:12,536 Epoch 132/2000 +2025-02-26 01:29:17,220 Current Learning Rate: 0.0025912316 +2025-02-26 01:29:17,221 Train Loss: 0.0223081, Val Loss: 0.0441386 +2025-02-26 01:29:17,221 Epoch 133/2000 +2025-02-26 01:30:21,776 Current Learning Rate: 0.0025227067 +2025-02-26 01:30:21,777 Train Loss: 0.0223089, Val Loss: 0.0441427 +2025-02-26 01:30:21,777 Epoch 134/2000 +2025-02-26 01:31:26,161 Current Learning Rate: 0.0024547929 +2025-02-26 01:31:26,161 Train Loss: 0.0223087, Val Loss: 0.0441384 +2025-02-26 01:31:26,161 Epoch 135/2000 +2025-02-26 01:32:29,008 Current Learning Rate: 0.0023875072 +2025-02-26 01:32:29,008 Train Loss: 0.0223042, Val Loss: 0.0441261 +2025-02-26 01:32:29,009 Epoch 136/2000 +2025-02-26 01:33:32,311 Current Learning Rate: 0.0023208660 +2025-02-26 01:33:32,312 Train Loss: 0.0223026, Val Loss: 0.0441242 +2025-02-26 01:33:32,312 Epoch 137/2000 +2025-02-26 01:34:37,319 Current Learning Rate: 0.0022548859 +2025-02-26 01:34:37,320 Train Loss: 0.0223031, Val Loss: 0.0441182 +2025-02-26 01:34:37,320 Epoch 138/2000 +2025-02-26 01:35:41,830 Current Learning Rate: 0.0021895831 +2025-02-26 01:35:41,830 Train Loss: 0.0223033, Val Loss: 0.0441289 +2025-02-26 01:35:41,831 Epoch 139/2000 +2025-02-26 01:36:45,240 Current Learning Rate: 0.0021249737 +2025-02-26 01:36:45,241 Train Loss: 0.0223025, Val Loss: 0.0441366 +2025-02-26 01:36:45,241 Epoch 140/2000 +2025-02-26 01:37:49,234 Current Learning Rate: 0.0020610737 +2025-02-26 01:37:49,235 Train Loss: 0.0223024, Val Loss: 0.0441073 +2025-02-26 01:37:49,235 Epoch 141/2000 +2025-02-26 01:38:53,465 Current Learning Rate: 0.0019978989 +2025-02-26 01:38:53,465 Train Loss: 0.0223013, Val Loss: 0.0441352 +2025-02-26 01:38:53,466 Epoch 142/2000 +2025-02-26 01:39:57,774 Current Learning Rate: 0.0019354647 +2025-02-26 01:39:57,775 Train Loss: 0.0223032, Val Loss: 0.0441288 +2025-02-26 01:39:57,775 Epoch 143/2000 +2025-02-26 01:41:02,053 Current Learning Rate: 0.0018737867 +2025-02-26 01:41:02,054 Train Loss: 0.0223048, Val Loss: 0.0441199 +2025-02-26 01:41:02,054 Epoch 144/2000 +2025-02-26 01:42:04,798 Current Learning Rate: 0.0018128801 +2025-02-26 01:42:04,799 Train Loss: 0.0223047, Val Loss: 0.0441285 +2025-02-26 01:42:04,799 Epoch 145/2000 +2025-02-26 01:43:14,774 Current Learning Rate: 0.0017527598 +2025-02-26 01:43:14,775 Train Loss: 0.0223073, Val Loss: 0.0441502 +2025-02-26 01:43:14,775 Epoch 146/2000 +2025-02-26 01:44:18,265 Current Learning Rate: 0.0016934407 +2025-02-26 01:44:18,268 Train Loss: 0.0223061, Val Loss: 0.0441309 +2025-02-26 01:44:18,269 Epoch 147/2000 +2025-02-26 01:45:22,122 Current Learning Rate: 0.0016349374 +2025-02-26 01:45:22,122 Train Loss: 0.0223087, Val Loss: 0.0441478 +2025-02-26 01:45:22,123 Epoch 148/2000 +2025-02-26 01:46:25,958 Current Learning Rate: 0.0015772645 +2025-02-26 01:46:25,958 Train Loss: 0.0223079, Val Loss: 0.0441294 +2025-02-26 01:46:25,958 Epoch 149/2000 +2025-02-26 01:47:29,531 Current Learning Rate: 0.0015204360 +2025-02-26 01:47:29,531 Train Loss: 0.0223078, Val Loss: 0.0441438 +2025-02-26 01:47:29,531 Epoch 150/2000 +2025-02-26 01:48:32,650 Current Learning Rate: 0.0014644661 +2025-02-26 01:48:32,651 Train Loss: 0.0223073, Val Loss: 0.0441466 +2025-02-26 01:48:32,651 Epoch 151/2000 +2025-02-26 01:49:37,467 Current Learning Rate: 0.0014093685 +2025-02-26 01:49:37,468 Train Loss: 0.0223078, Val Loss: 0.0441299 +2025-02-26 01:49:37,468 Epoch 152/2000 +2025-02-26 01:50:42,142 Current Learning Rate: 0.0013551569 +2025-02-26 01:50:42,143 Train Loss: 0.0223065, Val Loss: 0.0441418 +2025-02-26 01:50:42,143 Epoch 153/2000 +2025-02-26 01:51:45,201 Current Learning Rate: 0.0013018445 +2025-02-26 01:51:45,202 Train Loss: 0.0223081, Val Loss: 0.0441583 +2025-02-26 01:51:45,202 Epoch 154/2000 +2025-02-26 01:52:49,123 Current Learning Rate: 0.0012494447 +2025-02-26 01:52:49,124 Train Loss: 0.0223159, Val Loss: 0.0441682 +2025-02-26 01:52:49,124 Epoch 155/2000 +2025-02-26 01:53:53,018 Current Learning Rate: 0.0011979702 +2025-02-26 01:53:53,018 Train Loss: 0.0223156, Val Loss: 0.0441738 +2025-02-26 01:53:53,019 Epoch 156/2000 +2025-02-26 01:54:58,341 Current Learning Rate: 0.0011474338 +2025-02-26 01:54:58,342 Train Loss: 0.0223144, Val Loss: 0.0441578 +2025-02-26 01:54:58,342 Epoch 157/2000 +2025-02-26 01:56:02,078 Current Learning Rate: 0.0010978480 +2025-02-26 01:56:02,079 Train Loss: 0.0223146, Val Loss: 0.0441468 +2025-02-26 01:56:02,079 Epoch 158/2000 +2025-02-26 01:57:05,707 Current Learning Rate: 0.0010492249 +2025-02-26 01:57:05,708 Train Loss: 0.0223141, Val Loss: 0.0441531 +2025-02-26 01:57:05,708 Epoch 159/2000 +2025-02-26 01:58:08,495 Current Learning Rate: 0.0010015767 +2025-02-26 01:58:08,496 Train Loss: 0.0223137, Val Loss: 0.0441710 +2025-02-26 01:58:08,496 Epoch 160/2000 +2025-02-26 01:59:11,318 Current Learning Rate: 0.0009549150 +2025-02-26 01:59:11,318 Train Loss: 0.0223161, Val Loss: 0.0441620 +2025-02-26 01:59:11,319 Epoch 161/2000 +2025-02-26 02:00:15,207 Current Learning Rate: 0.0009092514 +2025-02-26 02:00:15,208 Train Loss: 0.0223149, Val Loss: 0.0441649 +2025-02-26 02:00:15,208 Epoch 162/2000 +2025-02-26 02:01:19,501 Current Learning Rate: 0.0008645971 +2025-02-26 02:01:19,501 Train Loss: 0.0223160, Val Loss: 0.0441677 +2025-02-26 02:01:19,501 Epoch 163/2000 +2025-02-26 02:02:22,895 Current Learning Rate: 0.0008209632 +2025-02-26 02:02:22,896 Train Loss: 0.0223150, Val Loss: 0.0441686 +2025-02-26 02:02:22,896 Epoch 164/2000 +2025-02-26 02:03:26,044 Current Learning Rate: 0.0007783604 +2025-02-26 02:03:26,044 Train Loss: 0.0223138, Val Loss: 0.0441458 +2025-02-26 02:03:26,045 Epoch 165/2000 +2025-02-26 02:04:29,001 Current Learning Rate: 0.0007367992 +2025-02-26 02:04:29,001 Train Loss: 0.0223106, Val Loss: 0.0441615 +2025-02-26 02:04:29,002 Epoch 166/2000 +2025-02-26 02:05:32,264 Current Learning Rate: 0.0006962899 +2025-02-26 02:05:32,265 Train Loss: 0.0223140, Val Loss: 0.0441625 +2025-02-26 02:05:32,266 Epoch 167/2000 +2025-02-26 02:06:36,155 Current Learning Rate: 0.0006568424 +2025-02-26 02:06:36,155 Train Loss: 0.0223150, Val Loss: 0.0441703 +2025-02-26 02:06:36,155 Epoch 168/2000 +2025-02-26 02:07:39,651 Current Learning Rate: 0.0006184666 +2025-02-26 02:07:39,651 Train Loss: 0.0223150, Val Loss: 0.0441613 +2025-02-26 02:07:39,652 Epoch 169/2000 +2025-02-26 02:08:42,911 Current Learning Rate: 0.0005811718 +2025-02-26 02:08:42,911 Train Loss: 0.0223143, Val Loss: 0.0441641 +2025-02-26 02:08:42,912 Epoch 170/2000 +2025-02-26 02:09:45,863 Current Learning Rate: 0.0005449674 +2025-02-26 02:09:45,863 Train Loss: 0.0223143, Val Loss: 0.0441568 +2025-02-26 02:09:45,864 Epoch 171/2000 +2025-02-26 02:10:49,475 Current Learning Rate: 0.0005098621 +2025-02-26 02:10:49,475 Train Loss: 0.0223138, Val Loss: 0.0441577 +2025-02-26 02:10:49,476 Epoch 172/2000 +2025-02-26 02:11:54,975 Current Learning Rate: 0.0004758647 +2025-02-26 02:11:54,976 Train Loss: 0.0223130, Val Loss: 0.0441515 +2025-02-26 02:11:54,976 Epoch 173/2000 +2025-02-26 02:12:59,254 Current Learning Rate: 0.0004429836 +2025-02-26 02:12:59,254 Train Loss: 0.0223133, Val Loss: 0.0441662 +2025-02-26 02:12:59,254 Epoch 174/2000 +2025-02-26 02:14:03,189 Current Learning Rate: 0.0004112269 +2025-02-26 02:14:03,190 Train Loss: 0.0223124, Val Loss: 0.0441545 +2025-02-26 02:14:03,190 Epoch 175/2000 +2025-02-26 02:15:08,528 Current Learning Rate: 0.0003806023 +2025-02-26 02:15:08,529 Train Loss: 0.0223119, Val Loss: 0.0441559 +2025-02-26 02:15:08,529 Epoch 176/2000 +2025-02-26 02:16:12,157 Current Learning Rate: 0.0003511176 +2025-02-26 02:16:12,158 Train Loss: 0.0223112, Val Loss: 0.0441526 +2025-02-26 02:16:12,160 Epoch 177/2000 +2025-02-26 02:17:15,596 Current Learning Rate: 0.0003227798 +2025-02-26 02:17:15,597 Train Loss: 0.0223114, Val Loss: 0.0441482 +2025-02-26 02:17:15,598 Epoch 178/2000 +2025-02-26 02:18:19,718 Current Learning Rate: 0.0002955962 +2025-02-26 02:18:19,719 Train Loss: 0.0223092, Val Loss: 0.0441418 +2025-02-26 02:18:19,719 Epoch 179/2000 +2025-02-26 02:19:24,026 Current Learning Rate: 0.0002695732 +2025-02-26 02:19:24,027 Train Loss: 0.0223080, Val Loss: 0.0441372 +2025-02-26 02:19:24,027 Epoch 180/2000 +2025-02-26 02:20:26,685 Current Learning Rate: 0.0002447174 +2025-02-26 02:20:26,686 Train Loss: 0.0223087, Val Loss: 0.0441397 +2025-02-26 02:20:26,686 Epoch 181/2000 +2025-02-26 02:21:31,107 Current Learning Rate: 0.0002210349 +2025-02-26 02:21:31,107 Train Loss: 0.0223085, Val Loss: 0.0441464 +2025-02-26 02:21:31,108 Epoch 182/2000 +2025-02-26 02:22:35,254 Current Learning Rate: 0.0001985316 +2025-02-26 02:22:35,255 Train Loss: 0.0223082, Val Loss: 0.0441410 +2025-02-26 02:22:35,255 Epoch 183/2000 +2025-02-26 02:23:37,778 Current Learning Rate: 0.0001772129 +2025-02-26 02:23:37,779 Train Loss: 0.0223070, Val Loss: 0.0441367 +2025-02-26 02:23:37,779 Epoch 184/2000 +2025-02-26 02:24:40,746 Current Learning Rate: 0.0001570842 +2025-02-26 02:24:40,746 Train Loss: 0.0223073, Val Loss: 0.0441435 +2025-02-26 02:24:40,747 Epoch 185/2000 +2025-02-26 02:25:43,908 Current Learning Rate: 0.0001381504 +2025-02-26 02:25:43,909 Train Loss: 0.0223069, Val Loss: 0.0441443 +2025-02-26 02:25:43,909 Epoch 186/2000 +2025-02-26 02:26:47,623 Current Learning Rate: 0.0001204162 +2025-02-26 02:26:47,624 Train Loss: 0.0223065, Val Loss: 0.0441407 +2025-02-26 02:26:47,624 Epoch 187/2000 +2025-02-26 02:27:49,809 Current Learning Rate: 0.0001038859 +2025-02-26 02:27:49,809 Train Loss: 0.0223066, Val Loss: 0.0441448 +2025-02-26 02:27:49,809 Epoch 188/2000 +2025-02-26 02:28:52,580 Current Learning Rate: 0.0000885637 +2025-02-26 02:28:52,581 Train Loss: 0.0223062, Val Loss: 0.0441371 +2025-02-26 02:28:52,581 Epoch 189/2000 +2025-02-26 02:29:56,044 Current Learning Rate: 0.0000744534 +2025-02-26 02:29:56,044 Train Loss: 0.0223060, Val Loss: 0.0441380 +2025-02-26 02:29:56,045 Epoch 190/2000 +2025-02-26 02:30:59,478 Current Learning Rate: 0.0000615583 +2025-02-26 02:30:59,479 Train Loss: 0.0223063, Val Loss: 0.0441391 +2025-02-26 02:30:59,479 Epoch 191/2000 +2025-02-26 02:32:04,187 Current Learning Rate: 0.0000498817 +2025-02-26 02:32:04,187 Train Loss: 0.0223058, Val Loss: 0.0441356 +2025-02-26 02:32:04,188 Epoch 192/2000 +2025-02-26 02:33:08,125 Current Learning Rate: 0.0000394265 +2025-02-26 02:33:08,125 Train Loss: 0.0223056, Val Loss: 0.0441386 +2025-02-26 02:33:08,126 Epoch 193/2000 +2025-02-26 02:34:11,725 Current Learning Rate: 0.0000301952 +2025-02-26 02:34:11,725 Train Loss: 0.0223057, Val Loss: 0.0441377 +2025-02-26 02:34:11,725 Epoch 194/2000 +2025-02-26 02:35:14,577 Current Learning Rate: 0.0000221902 +2025-02-26 02:35:14,577 Train Loss: 0.0223061, Val Loss: 0.0441362 +2025-02-26 02:35:14,578 Epoch 195/2000 +2025-02-26 02:36:18,616 Current Learning Rate: 0.0000154133 +2025-02-26 02:36:18,617 Train Loss: 0.0223056, Val Loss: 0.0441353 +2025-02-26 02:36:18,617 Epoch 196/2000 +2025-02-26 02:37:21,669 Current Learning Rate: 0.0000098664 +2025-02-26 02:37:21,670 Train Loss: 0.0223051, Val Loss: 0.0441390 +2025-02-26 02:37:21,670 Epoch 197/2000 +2025-02-26 02:38:26,037 Current Learning Rate: 0.0000055506 +2025-02-26 02:38:26,038 Train Loss: 0.0223053, Val Loss: 0.0441371 +2025-02-26 02:38:26,038 Epoch 198/2000 +2025-02-26 02:39:30,620 Current Learning Rate: 0.0000024672 +2025-02-26 02:39:30,620 Train Loss: 0.0223054, Val Loss: 0.0441374 +2025-02-26 02:39:30,620 Epoch 199/2000 +2025-02-26 02:40:34,280 Current Learning Rate: 0.0000006168 +2025-02-26 02:40:34,281 Train Loss: 0.0223049, Val Loss: 0.0441367 +2025-02-26 02:40:34,281 Epoch 200/2000 +2025-02-26 02:41:39,373 Current Learning Rate: 0.0000000000 +2025-02-26 02:41:39,374 Train Loss: 0.0223060, Val Loss: 0.0441367 +2025-02-26 02:41:39,374 Epoch 201/2000 +2025-02-26 02:42:43,464 Current Learning Rate: 0.0000006168 +2025-02-26 02:42:43,464 Train Loss: 0.0223060, Val Loss: 0.0441367 +2025-02-26 02:42:43,465 Epoch 202/2000 +2025-02-26 02:43:47,286 Current Learning Rate: 0.0000024672 +2025-02-26 02:43:47,287 Train Loss: 0.0223056, Val Loss: 0.0441368 +2025-02-26 02:43:47,288 Epoch 203/2000 +2025-02-26 02:44:50,078 Current Learning Rate: 0.0000055506 +2025-02-26 02:44:50,079 Train Loss: 0.0223059, Val Loss: 0.0441365 +2025-02-26 02:44:50,079 Epoch 204/2000 +2025-02-26 02:45:54,317 Current Learning Rate: 0.0000098664 +2025-02-26 02:45:54,318 Train Loss: 0.0223058, Val Loss: 0.0441357 +2025-02-26 02:45:54,318 Epoch 205/2000 +2025-02-26 02:46:58,655 Current Learning Rate: 0.0000154133 +2025-02-26 02:46:58,655 Train Loss: 0.0223060, Val Loss: 0.0441382 +2025-02-26 02:46:58,655 Epoch 206/2000 +2025-02-26 02:48:02,809 Current Learning Rate: 0.0000221902 +2025-02-26 02:48:02,810 Train Loss: 0.0223055, Val Loss: 0.0441368 +2025-02-26 02:48:02,810 Epoch 207/2000 +2025-02-26 02:49:06,215 Current Learning Rate: 0.0000301952 +2025-02-26 02:49:06,216 Train Loss: 0.0223058, Val Loss: 0.0441356 +2025-02-26 02:49:06,216 Epoch 208/2000 +2025-02-26 02:50:10,807 Current Learning Rate: 0.0000394265 +2025-02-26 02:50:10,807 Train Loss: 0.0223060, Val Loss: 0.0441375 +2025-02-26 02:50:10,808 Epoch 209/2000 +2025-02-26 02:51:15,022 Current Learning Rate: 0.0000498817 +2025-02-26 02:51:15,022 Train Loss: 0.0223064, Val Loss: 0.0441389 +2025-02-26 02:51:15,023 Epoch 210/2000 +2025-02-26 02:52:17,553 Current Learning Rate: 0.0000615583 +2025-02-26 02:52:17,554 Train Loss: 0.0223061, Val Loss: 0.0441388 +2025-02-26 02:52:17,558 Epoch 211/2000 +2025-02-26 02:53:21,266 Current Learning Rate: 0.0000744534 +2025-02-26 02:53:21,267 Train Loss: 0.0223068, Val Loss: 0.0441369 +2025-02-26 02:53:21,267 Epoch 212/2000 +2025-02-26 02:54:24,706 Current Learning Rate: 0.0000885637 +2025-02-26 02:54:24,706 Train Loss: 0.0223063, Val Loss: 0.0441396 +2025-02-26 02:54:24,706 Epoch 213/2000 +2025-02-26 02:55:28,427 Current Learning Rate: 0.0001038859 +2025-02-26 02:55:28,428 Train Loss: 0.0223062, Val Loss: 0.0441376 +2025-02-26 02:55:28,428 Epoch 214/2000 +2025-02-26 02:56:31,783 Current Learning Rate: 0.0001204162 +2025-02-26 02:56:31,783 Train Loss: 0.0223060, Val Loss: 0.0441357 +2025-02-26 02:56:31,784 Epoch 215/2000 +2025-02-26 02:57:38,614 Current Learning Rate: 0.0001381504 +2025-02-26 02:57:38,619 Train Loss: 0.0223063, Val Loss: 0.0441371 +2025-02-26 02:57:38,619 Epoch 216/2000 +2025-02-26 02:58:42,121 Current Learning Rate: 0.0001570842 +2025-02-26 02:58:42,122 Train Loss: 0.0223063, Val Loss: 0.0441418 +2025-02-26 02:58:42,122 Epoch 217/2000 +2025-02-26 02:59:45,074 Current Learning Rate: 0.0001772129 +2025-02-26 02:59:45,075 Train Loss: 0.0223069, Val Loss: 0.0441404 +2025-02-26 02:59:45,075 Epoch 218/2000 +2025-02-26 03:00:46,566 Current Learning Rate: 0.0001985316 +2025-02-26 03:00:46,567 Train Loss: 0.0223060, Val Loss: 0.0441375 +2025-02-26 03:00:46,567 Epoch 219/2000 +2025-02-26 03:01:49,848 Current Learning Rate: 0.0002210349 +2025-02-26 03:01:49,848 Train Loss: 0.0223066, Val Loss: 0.0441393 +2025-02-26 03:01:49,849 Epoch 220/2000 +2025-02-26 03:02:52,508 Current Learning Rate: 0.0002447174 +2025-02-26 03:02:52,508 Train Loss: 0.0223071, Val Loss: 0.0441339 +2025-02-26 03:02:52,509 Epoch 221/2000 +2025-02-26 03:03:56,020 Current Learning Rate: 0.0002695732 +2025-02-26 03:03:56,021 Train Loss: 0.0223072, Val Loss: 0.0441400 +2025-02-26 03:03:56,021 Epoch 222/2000 +2025-02-26 03:04:58,697 Current Learning Rate: 0.0002955962 +2025-02-26 03:04:58,697 Train Loss: 0.0223071, Val Loss: 0.0441280 +2025-02-26 03:04:58,697 Epoch 223/2000 +2025-02-26 03:05:59,990 Current Learning Rate: 0.0003227798 +2025-02-26 03:05:59,990 Train Loss: 0.0223073, Val Loss: 0.0441411 +2025-02-26 03:05:59,991 Epoch 224/2000 +2025-02-26 03:07:03,072 Current Learning Rate: 0.0003511176 +2025-02-26 03:07:03,073 Train Loss: 0.0223069, Val Loss: 0.0441429 +2025-02-26 03:07:03,073 Epoch 225/2000 +2025-02-26 03:08:06,441 Current Learning Rate: 0.0003806023 +2025-02-26 03:08:06,441 Train Loss: 0.0223074, Val Loss: 0.0441357 +2025-02-26 03:08:06,442 Epoch 226/2000 +2025-02-26 03:09:09,985 Current Learning Rate: 0.0004112269 +2025-02-26 03:09:09,985 Train Loss: 0.0223061, Val Loss: 0.0441360 +2025-02-26 03:09:09,986 Epoch 227/2000 +2025-02-26 03:10:11,360 Current Learning Rate: 0.0004429836 +2025-02-26 03:10:11,360 Train Loss: 0.0223058, Val Loss: 0.0441416 +2025-02-26 03:10:11,360 Epoch 228/2000 +2025-02-26 03:11:13,148 Current Learning Rate: 0.0004758647 +2025-02-26 03:11:13,148 Train Loss: 0.0223064, Val Loss: 0.0441476 +2025-02-26 03:11:13,149 Epoch 229/2000 +2025-02-26 03:12:15,352 Current Learning Rate: 0.0005098621 +2025-02-26 03:12:15,353 Train Loss: 0.0223072, Val Loss: 0.0441403 +2025-02-26 03:12:15,353 Epoch 230/2000 +2025-02-26 03:13:16,929 Current Learning Rate: 0.0005449674 +2025-02-26 03:13:16,929 Train Loss: 0.0223068, Val Loss: 0.0441410 +2025-02-26 03:13:16,929 Epoch 231/2000 +2025-02-26 03:14:19,312 Current Learning Rate: 0.0005811718 +2025-02-26 03:14:19,313 Train Loss: 0.0223067, Val Loss: 0.0441483 +2025-02-26 03:14:19,313 Epoch 232/2000 +2025-02-26 03:15:20,132 Current Learning Rate: 0.0006184666 +2025-02-26 03:15:20,132 Train Loss: 0.0223075, Val Loss: 0.0441480 +2025-02-26 03:15:20,133 Epoch 233/2000 +2025-02-26 03:16:23,004 Current Learning Rate: 0.0006568424 +2025-02-26 03:16:23,005 Train Loss: 0.0223083, Val Loss: 0.0441434 +2025-02-26 03:16:23,005 Epoch 234/2000 +2025-02-26 03:17:24,119 Current Learning Rate: 0.0006962899 +2025-02-26 03:17:24,119 Train Loss: 0.0223086, Val Loss: 0.0441396 +2025-02-26 03:17:24,120 Epoch 235/2000 +2025-02-26 03:18:27,152 Current Learning Rate: 0.0007367992 +2025-02-26 03:18:27,153 Train Loss: 0.0223086, Val Loss: 0.0441465 +2025-02-26 03:18:27,153 Epoch 236/2000 +2025-02-26 03:19:33,114 Current Learning Rate: 0.0007783604 +2025-02-26 03:19:33,114 Train Loss: 0.0223086, Val Loss: 0.0441481 +2025-02-26 03:19:33,114 Epoch 237/2000 +2025-02-26 03:20:35,288 Current Learning Rate: 0.0008209632 +2025-02-26 03:20:35,289 Train Loss: 0.0223042, Val Loss: 0.0441293 +2025-02-26 03:20:35,289 Epoch 238/2000 +2025-02-26 03:21:37,315 Current Learning Rate: 0.0008645971 +2025-02-26 03:21:37,316 Train Loss: 0.0223035, Val Loss: 0.0441316 +2025-02-26 03:21:37,316 Epoch 239/2000 +2025-02-26 03:22:37,880 Current Learning Rate: 0.0009092514 +2025-02-26 03:22:37,881 Train Loss: 0.0223030, Val Loss: 0.0441459 +2025-02-26 03:22:37,881 Epoch 240/2000 +2025-02-26 03:23:39,974 Current Learning Rate: 0.0009549150 +2025-02-26 03:23:39,975 Train Loss: 0.0223043, Val Loss: 0.0441361 +2025-02-26 03:23:39,975 Epoch 241/2000 +2025-02-26 03:24:43,142 Current Learning Rate: 0.0010015767 +2025-02-26 03:24:43,142 Train Loss: 0.0223051, Val Loss: 0.0441363 +2025-02-26 03:24:43,142 Epoch 242/2000 +2025-02-26 03:25:49,232 Current Learning Rate: 0.0010492249 +2025-02-26 03:25:49,233 Train Loss: 0.0223056, Val Loss: 0.0441411 +2025-02-26 03:25:49,233 Epoch 243/2000 +2025-02-26 03:26:51,821 Current Learning Rate: 0.0010978480 +2025-02-26 03:26:51,822 Train Loss: 0.0223051, Val Loss: 0.0441320 +2025-02-26 03:26:51,822 Epoch 244/2000 +2025-02-26 03:27:54,610 Current Learning Rate: 0.0011474338 +2025-02-26 03:27:54,611 Train Loss: 0.0223047, Val Loss: 0.0441355 +2025-02-26 03:27:54,611 Epoch 245/2000 +2025-02-26 03:28:57,971 Current Learning Rate: 0.0011979702 +2025-02-26 03:28:57,972 Train Loss: 0.0223068, Val Loss: 0.0441385 +2025-02-26 03:28:57,972 Epoch 246/2000 +2025-02-26 03:30:00,791 Current Learning Rate: 0.0012494447 +2025-02-26 03:30:00,791 Train Loss: 0.0223070, Val Loss: 0.0441434 +2025-02-26 03:30:00,792 Epoch 247/2000 +2025-02-26 03:31:04,845 Current Learning Rate: 0.0013018445 +2025-02-26 03:31:04,845 Train Loss: 0.0223073, Val Loss: 0.0441462 +2025-02-26 03:31:04,846 Epoch 248/2000 +2025-02-26 03:32:09,028 Current Learning Rate: 0.0013551569 +2025-02-26 03:32:09,028 Train Loss: 0.0223159, Val Loss: 0.0441638 +2025-02-26 03:32:09,029 Epoch 249/2000 +2025-02-26 03:33:12,135 Current Learning Rate: 0.0014093685 +2025-02-26 03:33:12,136 Train Loss: 0.0223172, Val Loss: 0.0441721 +2025-02-26 03:33:12,137 Epoch 250/2000 +2025-02-26 03:34:14,447 Current Learning Rate: 0.0014644661 +2025-02-26 03:34:14,448 Train Loss: 0.0223170, Val Loss: 0.0441787 +2025-02-26 03:34:14,448 Epoch 251/2000 +2025-02-26 03:35:18,586 Current Learning Rate: 0.0015204360 +2025-02-26 03:35:18,589 Train Loss: 0.0223111, Val Loss: 0.0441426 +2025-02-26 03:35:18,590 Epoch 252/2000 +2025-02-26 03:36:21,342 Current Learning Rate: 0.0015772645 +2025-02-26 03:36:21,342 Train Loss: 0.0223071, Val Loss: 0.0441433 +2025-02-26 03:36:21,343 Epoch 253/2000 +2025-02-26 03:37:22,650 Current Learning Rate: 0.0016349374 +2025-02-26 03:37:22,651 Train Loss: 0.0223077, Val Loss: 0.0441335 +2025-02-26 03:37:22,651 Epoch 254/2000 +2025-02-26 03:38:23,641 Current Learning Rate: 0.0016934407 +2025-02-26 03:38:23,642 Train Loss: 0.0223071, Val Loss: 0.0441470 +2025-02-26 03:38:23,642 Epoch 255/2000 +2025-02-26 03:39:24,552 Current Learning Rate: 0.0017527598 +2025-02-26 03:39:24,552 Train Loss: 0.0223066, Val Loss: 0.0441349 +2025-02-26 03:39:24,552 Epoch 256/2000 +2025-02-26 03:40:27,300 Current Learning Rate: 0.0018128801 +2025-02-26 03:40:27,300 Train Loss: 0.0223070, Val Loss: 0.0441408 +2025-02-26 03:40:27,300 Epoch 257/2000 +2025-02-26 03:41:32,019 Current Learning Rate: 0.0018737867 +2025-02-26 03:41:32,019 Train Loss: 0.0223057, Val Loss: 0.0441193 +2025-02-26 03:41:32,019 Epoch 258/2000 +2025-02-26 03:42:34,728 Current Learning Rate: 0.0019354647 +2025-02-26 03:42:34,729 Train Loss: 0.0223011, Val Loss: 0.0441201 +2025-02-26 03:42:34,729 Epoch 259/2000 +2025-02-26 03:43:37,833 Current Learning Rate: 0.0019978989 +2025-02-26 03:43:37,833 Train Loss: 0.0223011, Val Loss: 0.0441123 +2025-02-26 03:43:37,834 Epoch 260/2000 +2025-02-26 03:44:41,099 Current Learning Rate: 0.0020610737 +2025-02-26 03:44:41,099 Train Loss: 0.0222997, Val Loss: 0.0441169 +2025-02-26 03:44:41,100 Epoch 261/2000 +2025-02-26 03:45:44,923 Current Learning Rate: 0.0021249737 +2025-02-26 03:45:44,924 Train Loss: 0.0223042, Val Loss: 0.0441305 +2025-02-26 03:45:44,924 Epoch 262/2000 +2025-02-26 03:46:47,758 Current Learning Rate: 0.0021895831 +2025-02-26 03:46:47,758 Train Loss: 0.0223063, Val Loss: 0.0441432 +2025-02-26 03:46:47,758 Epoch 263/2000 +2025-02-26 03:47:52,082 Current Learning Rate: 0.0022548859 +2025-02-26 03:47:52,083 Train Loss: 0.0223061, Val Loss: 0.0441215 +2025-02-26 03:47:52,083 Epoch 264/2000 +2025-02-26 03:48:55,263 Current Learning Rate: 0.0023208660 +2025-02-26 03:48:55,264 Train Loss: 0.0223051, Val Loss: 0.0441355 +2025-02-26 03:48:55,264 Epoch 265/2000 +2025-02-26 03:49:59,148 Current Learning Rate: 0.0023875072 +2025-02-26 03:49:59,149 Train Loss: 0.0223045, Val Loss: 0.0441138 +2025-02-26 03:49:59,149 Epoch 266/2000 +2025-02-26 03:51:03,916 Current Learning Rate: 0.0024547929 +2025-02-26 03:51:03,916 Train Loss: 0.0223026, Val Loss: 0.0441189 +2025-02-26 03:51:03,917 Epoch 267/2000 +2025-02-26 03:52:08,750 Current Learning Rate: 0.0025227067 +2025-02-26 03:52:08,750 Train Loss: 0.0223039, Val Loss: 0.0441116 +2025-02-26 03:52:08,750 Epoch 268/2000 +2025-02-26 03:53:12,790 Current Learning Rate: 0.0025912316 +2025-02-26 03:53:12,791 Train Loss: 0.0222916, Val Loss: 0.0440665 +2025-02-26 03:53:12,791 Epoch 269/2000 +2025-02-26 03:54:16,486 Current Learning Rate: 0.0026603509 +2025-02-26 03:54:16,486 Train Loss: 0.0222874, Val Loss: 0.0440904 +2025-02-26 03:54:16,487 Epoch 270/2000 +2025-02-26 03:55:20,474 Current Learning Rate: 0.0027300475 +2025-02-26 03:55:20,475 Train Loss: 0.0222907, Val Loss: 0.0440882 +2025-02-26 03:55:20,475 Epoch 271/2000 +2025-02-26 03:56:23,529 Current Learning Rate: 0.0028003042 +2025-02-26 03:56:23,530 Train Loss: 0.0222876, Val Loss: 0.0440764 +2025-02-26 03:56:23,530 Epoch 272/2000 +2025-02-26 03:57:26,655 Current Learning Rate: 0.0028711035 +2025-02-26 03:57:26,655 Train Loss: 0.0222865, Val Loss: 0.0440676 +2025-02-26 03:57:26,656 Epoch 273/2000 +2025-02-26 03:58:29,570 Current Learning Rate: 0.0029424282 +2025-02-26 03:58:29,571 Train Loss: 0.0222869, Val Loss: 0.0440691 +2025-02-26 03:58:29,571 Epoch 274/2000 +2025-02-26 03:59:33,512 Current Learning Rate: 0.0030142605 +2025-02-26 03:59:33,512 Train Loss: 0.0222910, Val Loss: 0.0440814 +2025-02-26 03:59:33,513 Epoch 275/2000 +2025-02-26 04:00:37,358 Current Learning Rate: 0.0030865828 +2025-02-26 04:00:37,358 Train Loss: 0.0222899, Val Loss: 0.0440821 +2025-02-26 04:00:37,359 Epoch 276/2000 +2025-02-26 04:01:39,984 Current Learning Rate: 0.0031593772 +2025-02-26 04:01:39,985 Train Loss: 0.0222830, Val Loss: 0.0440475 +2025-02-26 04:01:39,985 Epoch 277/2000 +2025-02-26 04:02:43,569 Current Learning Rate: 0.0032326258 +2025-02-26 04:02:43,570 Train Loss: 0.0222728, Val Loss: 0.0440421 +2025-02-26 04:02:43,570 Epoch 278/2000 +2025-02-26 04:03:47,070 Current Learning Rate: 0.0033063104 +2025-02-26 04:03:47,070 Train Loss: 0.0222740, Val Loss: 0.0440409 +2025-02-26 04:03:47,071 Epoch 279/2000 +2025-02-26 04:04:50,028 Current Learning Rate: 0.0033804129 +2025-02-26 04:04:50,029 Train Loss: 0.0222742, Val Loss: 0.0440556 +2025-02-26 04:04:50,029 Epoch 280/2000 +2025-02-26 04:05:52,080 Current Learning Rate: 0.0034549150 +2025-02-26 04:05:52,081 Train Loss: 0.0222981, Val Loss: 0.0441405 +2025-02-26 04:05:52,081 Epoch 281/2000 +2025-02-26 04:06:54,245 Current Learning Rate: 0.0035297984 +2025-02-26 04:06:54,246 Train Loss: 0.0222780, Val Loss: 0.0440341 +2025-02-26 04:06:54,246 Epoch 282/2000 +2025-02-26 04:07:57,650 Current Learning Rate: 0.0036050445 +2025-02-26 04:07:57,651 Train Loss: 0.0222683, Val Loss: 0.0440263 +2025-02-26 04:07:57,652 Epoch 283/2000 +2025-02-26 04:09:01,972 Current Learning Rate: 0.0036806348 +2025-02-26 04:09:01,974 Train Loss: 0.0222691, Val Loss: 0.0440265 +2025-02-26 04:09:01,981 Epoch 284/2000 +2025-02-26 04:10:04,394 Current Learning Rate: 0.0037565506 +2025-02-26 04:10:04,394 Train Loss: 0.0222689, Val Loss: 0.0440374 +2025-02-26 04:10:04,395 Epoch 285/2000 +2025-02-26 04:11:07,586 Current Learning Rate: 0.0038327732 +2025-02-26 04:11:07,586 Train Loss: 0.0223006, Val Loss: 0.0441734 +2025-02-26 04:11:07,587 Epoch 286/2000 +2025-02-26 04:12:10,876 Current Learning Rate: 0.0039092838 +2025-02-26 04:12:10,877 Train Loss: 0.0223157, Val Loss: 0.0441464 +2025-02-26 04:12:10,877 Epoch 287/2000 +2025-02-26 04:13:14,861 Current Learning Rate: 0.0039860635 +2025-02-26 04:13:14,861 Train Loss: 0.0223142, Val Loss: 0.0441600 +2025-02-26 04:13:14,862 Epoch 288/2000 +2025-02-26 04:14:17,404 Current Learning Rate: 0.0040630934 +2025-02-26 04:14:17,404 Train Loss: 0.0223164, Val Loss: 0.0441596 +2025-02-26 04:14:17,405 Epoch 289/2000 +2025-02-26 04:15:19,803 Current Learning Rate: 0.0041403545 +2025-02-26 04:15:19,804 Train Loss: 0.0223158, Val Loss: 0.0441474 +2025-02-26 04:15:19,804 Epoch 290/2000 +2025-02-26 04:16:22,562 Current Learning Rate: 0.0042178277 +2025-02-26 04:16:22,563 Train Loss: 0.0223107, Val Loss: 0.0440303 +2025-02-26 04:16:22,563 Epoch 291/2000 +2025-02-26 04:17:24,864 Current Learning Rate: 0.0042954938 +2025-02-26 04:17:24,864 Train Loss: 0.0222725, Val Loss: 0.0440399 +2025-02-26 04:17:24,864 Epoch 292/2000 +2025-02-26 04:18:27,452 Current Learning Rate: 0.0043733338 +2025-02-26 04:18:27,452 Train Loss: 0.0222738, Val Loss: 0.0440321 +2025-02-26 04:18:27,453 Epoch 293/2000 +2025-02-26 04:19:30,478 Current Learning Rate: 0.0044513284 +2025-02-26 04:19:30,479 Train Loss: 0.0222729, Val Loss: 0.0440355 +2025-02-26 04:19:30,479 Epoch 294/2000 +2025-02-26 04:20:31,086 Current Learning Rate: 0.0045294584 +2025-02-26 04:20:31,086 Train Loss: 0.0222770, Val Loss: 0.0440475 +2025-02-26 04:20:31,087 Epoch 295/2000 +2025-02-26 04:21:33,254 Current Learning Rate: 0.0046077045 +2025-02-26 04:21:33,254 Train Loss: 0.0222781, Val Loss: 0.0440669 +2025-02-26 04:21:33,255 Epoch 296/2000 +2025-02-26 04:22:34,917 Current Learning Rate: 0.0046860474 +2025-02-26 04:22:34,918 Train Loss: 0.0222797, Val Loss: 0.0440679 +2025-02-26 04:22:34,918 Epoch 297/2000 +2025-02-26 04:23:36,866 Current Learning Rate: 0.0047644677 +2025-02-26 04:23:36,867 Train Loss: 0.0222802, Val Loss: 0.0440587 +2025-02-26 04:23:36,867 Epoch 298/2000 +2025-02-26 04:24:37,840 Current Learning Rate: 0.0048429462 +2025-02-26 04:24:37,840 Train Loss: 0.0222800, Val Loss: 0.0440823 +2025-02-26 04:24:37,841 Epoch 299/2000 +2025-02-26 04:25:39,400 Current Learning Rate: 0.0049214634 +2025-02-26 04:25:39,400 Train Loss: 0.0222857, Val Loss: 0.0440783 +2025-02-26 04:25:39,401 Epoch 300/2000 +2025-02-26 04:26:40,282 Current Learning Rate: 0.0050000000 +2025-02-26 04:26:40,283 Train Loss: 0.0222872, Val Loss: 0.0441032 +2025-02-26 04:26:40,283 Epoch 301/2000 +2025-02-26 04:27:41,098 Current Learning Rate: 0.0050785366 +2025-02-26 04:27:41,099 Train Loss: 0.0222913, Val Loss: 0.0440922 +2025-02-26 04:27:41,099 Epoch 302/2000 +2025-02-26 04:28:43,328 Current Learning Rate: 0.0051570538 +2025-02-26 04:28:43,329 Train Loss: 0.0222974, Val Loss: 0.0441633 +2025-02-26 04:28:43,329 Epoch 303/2000 +2025-02-26 04:29:44,354 Current Learning Rate: 0.0052355323 +2025-02-26 04:29:44,354 Train Loss: 0.0223111, Val Loss: 0.0441618 +2025-02-26 04:29:44,354 Epoch 304/2000 +2025-02-26 04:30:46,980 Current Learning Rate: 0.0053139526 +2025-02-26 04:30:46,980 Train Loss: 0.0223068, Val Loss: 0.0441336 +2025-02-26 04:30:46,981 Epoch 305/2000 +2025-02-26 04:31:47,408 Current Learning Rate: 0.0053922955 +2025-02-26 04:31:47,408 Train Loss: 0.0223041, Val Loss: 0.0441236 +2025-02-26 04:31:47,409 Epoch 306/2000 +2025-02-26 04:32:50,105 Current Learning Rate: 0.0054705416 +2025-02-26 04:32:50,106 Train Loss: 0.0223096, Val Loss: 0.0441700 +2025-02-26 04:32:50,106 Epoch 307/2000 +2025-02-26 04:33:51,673 Current Learning Rate: 0.0055486716 +2025-02-26 04:33:51,674 Train Loss: 0.0223122, Val Loss: 0.0441516 +2025-02-26 04:33:51,674 Epoch 308/2000 +2025-02-26 04:34:53,987 Current Learning Rate: 0.0056266662 +2025-02-26 04:34:53,988 Train Loss: 0.0223071, Val Loss: 0.0440161 +2025-02-26 04:34:53,988 Epoch 309/2000 +2025-02-26 04:35:57,091 Current Learning Rate: 0.0057045062 +2025-02-26 04:35:57,092 Train Loss: 0.0222673, Val Loss: 0.0440375 +2025-02-26 04:35:57,092 Epoch 310/2000 +2025-02-26 04:37:01,065 Current Learning Rate: 0.0057821723 +2025-02-26 04:37:01,066 Train Loss: 0.0222932, Val Loss: 0.0441034 +2025-02-26 04:37:01,066 Epoch 311/2000 +2025-02-26 04:38:05,777 Current Learning Rate: 0.0058596455 +2025-02-26 04:38:05,778 Train Loss: 0.0222899, Val Loss: 0.0440762 +2025-02-26 04:38:05,779 Epoch 312/2000 +2025-02-26 04:39:07,361 Current Learning Rate: 0.0059369066 +2025-02-26 04:39:07,361 Train Loss: 0.0222924, Val Loss: 0.0441243 +2025-02-26 04:39:07,362 Epoch 313/2000 +2025-02-26 04:40:08,441 Current Learning Rate: 0.0060139365 +2025-02-26 04:40:08,441 Train Loss: 0.0223105, Val Loss: 0.0441938 +2025-02-26 04:40:08,445 Epoch 314/2000 +2025-02-26 04:41:09,706 Current Learning Rate: 0.0060907162 +2025-02-26 04:41:09,707 Train Loss: 0.0223259, Val Loss: 0.0441904 +2025-02-26 04:41:09,707 Epoch 315/2000 +2025-02-26 04:42:11,564 Current Learning Rate: 0.0061672268 +2025-02-26 04:42:11,564 Train Loss: 0.0223161, Val Loss: 0.0441124 +2025-02-26 04:42:11,565 Epoch 316/2000 +2025-02-26 04:43:13,041 Current Learning Rate: 0.0062434494 +2025-02-26 04:43:13,042 Train Loss: 0.0222978, Val Loss: 0.0441162 +2025-02-26 04:43:13,042 Epoch 317/2000 +2025-02-26 04:44:15,069 Current Learning Rate: 0.0063193652 +2025-02-26 04:44:15,070 Train Loss: 0.0222999, Val Loss: 0.0441759 +2025-02-26 04:44:15,070 Epoch 318/2000 +2025-02-26 04:45:18,693 Current Learning Rate: 0.0063949555 +2025-02-26 04:45:18,693 Train Loss: 0.0223108, Val Loss: 0.0441335 +2025-02-26 04:45:18,693 Epoch 319/2000 +2025-02-26 04:46:22,379 Current Learning Rate: 0.0064702016 +2025-02-26 04:46:22,379 Train Loss: 0.0223120, Val Loss: 0.0440573 +2025-02-26 04:46:22,379 Epoch 320/2000 +2025-02-26 04:47:26,043 Current Learning Rate: 0.0065450850 +2025-02-26 04:47:26,044 Train Loss: 0.0222334, Val Loss: 0.0438829 +2025-02-26 04:47:26,044 Epoch 321/2000 +2025-02-26 04:48:26,049 Current Learning Rate: 0.0066195871 +2025-02-26 04:48:26,050 Train Loss: 0.0222975, Val Loss: 0.0441251 +2025-02-26 04:48:26,050 Epoch 322/2000 +2025-02-26 04:49:27,904 Current Learning Rate: 0.0066936896 +2025-02-26 04:49:27,904 Train Loss: 0.0222147, Val Loss: 0.0437234 +2025-02-26 04:49:27,905 Epoch 323/2000 +2025-02-26 04:50:28,890 Current Learning Rate: 0.0067673742 +2025-02-26 04:50:28,891 Train Loss: 0.0221719, Val Loss: 0.0437799 +2025-02-26 04:50:28,891 Epoch 324/2000 +2025-02-26 04:51:30,287 Current Learning Rate: 0.0068406228 +2025-02-26 04:51:30,287 Train Loss: 0.0222588, Val Loss: 0.0440978 +2025-02-26 04:51:30,288 Epoch 325/2000 +2025-02-26 04:52:31,567 Current Learning Rate: 0.0069134172 +2025-02-26 04:52:31,568 Train Loss: 0.0222956, Val Loss: 0.0441014 +2025-02-26 04:52:31,568 Epoch 326/2000 +2025-02-26 04:53:32,847 Current Learning Rate: 0.0069857395 +2025-02-26 04:53:32,848 Train Loss: 0.0222944, Val Loss: 0.0440982 +2025-02-26 04:53:32,848 Epoch 327/2000 +2025-02-26 04:54:36,521 Current Learning Rate: 0.0070575718 +2025-02-26 04:54:36,521 Train Loss: 0.0222943, Val Loss: 0.0440938 +2025-02-26 04:54:36,521 Epoch 328/2000 +2025-02-26 04:55:40,542 Current Learning Rate: 0.0071288965 +2025-02-26 04:55:40,542 Train Loss: 0.0223031, Val Loss: 0.0441685 +2025-02-26 04:55:40,543 Epoch 329/2000 +2025-02-26 04:56:43,614 Current Learning Rate: 0.0071996958 +2025-02-26 04:56:43,614 Train Loss: 0.0222526, Val Loss: 0.0438702 +2025-02-26 04:56:43,615 Epoch 330/2000 +2025-02-26 04:57:46,225 Current Learning Rate: 0.0072699525 +2025-02-26 04:57:46,226 Train Loss: 0.0222282, Val Loss: 0.0439048 +2025-02-26 04:57:46,226 Epoch 331/2000 +2025-02-26 04:58:46,430 Current Learning Rate: 0.0073396491 +2025-02-26 04:58:46,430 Train Loss: 0.0222296, Val Loss: 0.0438830 +2025-02-26 04:58:46,430 Epoch 332/2000 +2025-02-26 04:59:52,028 Current Learning Rate: 0.0074087684 +2025-02-26 04:59:52,028 Train Loss: 0.0222571, Val Loss: 0.0440157 +2025-02-26 04:59:52,029 Epoch 333/2000 +2025-02-26 05:00:55,434 Current Learning Rate: 0.0074772933 +2025-02-26 05:00:55,435 Train Loss: 0.0222533, Val Loss: 0.0439513 +2025-02-26 05:00:55,435 Epoch 334/2000 +2025-02-26 05:01:57,422 Current Learning Rate: 0.0075452071 +2025-02-26 05:01:57,422 Train Loss: 0.0222434, Val Loss: 0.0439533 +2025-02-26 05:01:57,422 Epoch 335/2000 +2025-02-26 05:03:00,565 Current Learning Rate: 0.0076124928 +2025-02-26 05:03:00,566 Train Loss: 0.0222247, Val Loss: 0.0438683 +2025-02-26 05:03:00,566 Epoch 336/2000 +2025-02-26 05:04:02,009 Current Learning Rate: 0.0076791340 +2025-02-26 05:04:02,009 Train Loss: 0.0222173, Val Loss: 0.0438839 +2025-02-26 05:04:02,009 Epoch 337/2000 +2025-02-26 05:05:12,456 Current Learning Rate: 0.0077451141 +2025-02-26 05:05:12,457 Train Loss: 0.0222170, Val Loss: 0.0438693 +2025-02-26 05:05:12,457 Epoch 338/2000 +2025-02-26 05:06:15,672 Current Learning Rate: 0.0078104169 +2025-02-26 05:06:15,673 Train Loss: 0.0222158, Val Loss: 0.0438621 +2025-02-26 05:06:15,673 Epoch 339/2000 +2025-02-26 05:07:18,072 Current Learning Rate: 0.0078750263 +2025-02-26 05:07:18,072 Train Loss: 0.0222198, Val Loss: 0.0438766 +2025-02-26 05:07:18,072 Epoch 340/2000 +2025-02-26 05:08:22,724 Current Learning Rate: 0.0079389263 +2025-02-26 05:08:22,725 Train Loss: 0.0222839, Val Loss: 0.0440589 +2025-02-26 05:08:22,725 Epoch 341/2000 +2025-02-26 05:09:28,268 Current Learning Rate: 0.0080021011 +2025-02-26 05:09:28,269 Train Loss: 0.0222890, Val Loss: 0.0440683 +2025-02-26 05:09:28,269 Epoch 342/2000 +2025-02-26 05:10:32,608 Current Learning Rate: 0.0080645353 +2025-02-26 05:10:32,609 Train Loss: 0.0222880, Val Loss: 0.0440513 +2025-02-26 05:10:32,609 Epoch 343/2000 +2025-02-26 05:11:37,001 Current Learning Rate: 0.0081262133 +2025-02-26 05:11:37,002 Train Loss: 0.0222876, Val Loss: 0.0440531 +2025-02-26 05:11:37,002 Epoch 344/2000 +2025-02-26 05:12:40,735 Current Learning Rate: 0.0081871199 +2025-02-26 05:12:40,735 Train Loss: 0.0222849, Val Loss: 0.0440498 +2025-02-26 05:12:40,735 Epoch 345/2000 +2025-02-26 05:13:43,576 Current Learning Rate: 0.0082472402 +2025-02-26 05:13:43,576 Train Loss: 0.0222830, Val Loss: 0.0440513 +2025-02-26 05:13:43,576 Epoch 346/2000 +2025-02-26 05:14:47,104 Current Learning Rate: 0.0083065593 +2025-02-26 05:14:47,104 Train Loss: 0.0222841, Val Loss: 0.0440569 +2025-02-26 05:14:47,104 Epoch 347/2000 +2025-02-26 05:15:50,510 Current Learning Rate: 0.0083650626 +2025-02-26 05:15:50,510 Train Loss: 0.0222754, Val Loss: 0.0440404 +2025-02-26 05:15:50,511 Epoch 348/2000 +2025-02-26 05:16:54,002 Current Learning Rate: 0.0084227355 +2025-02-26 05:16:54,003 Train Loss: 0.0222748, Val Loss: 0.0440079 +2025-02-26 05:16:54,003 Epoch 349/2000 +2025-02-26 05:17:56,599 Current Learning Rate: 0.0084795640 +2025-02-26 05:17:56,599 Train Loss: 0.0222574, Val Loss: 0.0438635 +2025-02-26 05:17:56,600 Epoch 350/2000 +2025-02-26 05:19:00,155 Current Learning Rate: 0.0085355339 +2025-02-26 05:19:00,156 Train Loss: 0.0222268, Val Loss: 0.0440582 +2025-02-26 05:19:00,156 Epoch 351/2000 +2025-02-26 05:20:02,704 Current Learning Rate: 0.0085906315 +2025-02-26 05:20:02,705 Train Loss: 0.0223019, Val Loss: 0.0440980 +2025-02-26 05:20:02,705 Epoch 352/2000 +2025-02-26 05:21:06,710 Current Learning Rate: 0.0086448431 +2025-02-26 05:21:06,711 Train Loss: 0.0223330, Val Loss: 0.0443159 +2025-02-26 05:21:06,711 Epoch 353/2000 +2025-02-26 05:22:07,921 Current Learning Rate: 0.0086981555 +2025-02-26 05:22:07,922 Train Loss: 0.0223671, Val Loss: 0.0443177 +2025-02-26 05:22:07,922 Epoch 354/2000 +2025-02-26 05:23:09,362 Current Learning Rate: 0.0087505553 +2025-02-26 05:23:09,362 Train Loss: 0.0223659, Val Loss: 0.0443236 +2025-02-26 05:23:09,363 Epoch 355/2000 +2025-02-26 05:24:10,538 Current Learning Rate: 0.0088020298 +2025-02-26 05:24:10,538 Train Loss: 0.0223663, Val Loss: 0.0443183 +2025-02-26 05:24:10,538 Epoch 356/2000 +2025-02-26 05:25:13,977 Current Learning Rate: 0.0088525662 +2025-02-26 05:25:13,977 Train Loss: 0.0223673, Val Loss: 0.0443137 +2025-02-26 05:25:13,977 Epoch 357/2000 +2025-02-26 05:26:14,501 Current Learning Rate: 0.0089021520 +2025-02-26 05:26:14,501 Train Loss: 0.0223700, Val Loss: 0.0443354 +2025-02-26 05:26:14,501 Epoch 358/2000 +2025-02-26 05:27:17,158 Current Learning Rate: 0.0089507751 +2025-02-26 05:27:17,159 Train Loss: 0.0223712, Val Loss: 0.0443526 +2025-02-26 05:27:17,159 Epoch 359/2000 +2025-02-26 05:28:19,976 Current Learning Rate: 0.0089984233 +2025-02-26 05:28:19,977 Train Loss: 0.0223810, Val Loss: 0.0443566 +2025-02-26 05:28:19,977 Epoch 360/2000 +2025-02-26 05:29:21,715 Current Learning Rate: 0.0090450850 +2025-02-26 05:29:21,718 Train Loss: 0.0223816, Val Loss: 0.0443470 +2025-02-26 05:29:21,719 Epoch 361/2000 +2025-02-26 05:30:25,981 Current Learning Rate: 0.0090907486 +2025-02-26 05:30:25,981 Train Loss: 0.0223800, Val Loss: 0.0442988 +2025-02-26 05:30:25,981 Epoch 362/2000 +2025-02-26 05:31:30,566 Current Learning Rate: 0.0091354029 +2025-02-26 05:31:30,566 Train Loss: 0.0223674, Val Loss: 0.0442978 +2025-02-26 05:31:30,566 Epoch 363/2000 +2025-02-26 05:32:32,966 Current Learning Rate: 0.0091790368 +2025-02-26 05:32:32,967 Train Loss: 0.0223690, Val Loss: 0.0443176 +2025-02-26 05:32:32,967 Epoch 364/2000 +2025-02-26 05:33:35,026 Current Learning Rate: 0.0092216396 +2025-02-26 05:33:35,026 Train Loss: 0.0223710, Val Loss: 0.0443322 +2025-02-26 05:33:35,027 Epoch 365/2000 +2025-02-26 05:34:37,643 Current Learning Rate: 0.0092632008 +2025-02-26 05:34:37,643 Train Loss: 0.0223772, Val Loss: 0.0443321 +2025-02-26 05:34:37,644 Epoch 366/2000 +2025-02-26 05:35:39,912 Current Learning Rate: 0.0093037101 +2025-02-26 05:35:39,913 Train Loss: 0.0223778, Val Loss: 0.0443337 +2025-02-26 05:35:39,913 Epoch 367/2000 +2025-02-26 05:36:42,493 Current Learning Rate: 0.0093431576 +2025-02-26 05:36:42,494 Train Loss: 0.0223781, Val Loss: 0.0443471 +2025-02-26 05:36:42,494 Epoch 368/2000 +2025-02-26 05:37:44,876 Current Learning Rate: 0.0093815334 +2025-02-26 05:37:44,876 Train Loss: 0.0223763, Val Loss: 0.0443211 +2025-02-26 05:37:44,877 Epoch 369/2000 +2025-02-26 05:38:46,005 Current Learning Rate: 0.0094188282 +2025-02-26 05:38:46,006 Train Loss: 0.0223687, Val Loss: 0.0443053 +2025-02-26 05:38:46,006 Epoch 370/2000 +2025-02-26 05:39:51,479 Current Learning Rate: 0.0094550326 +2025-02-26 05:39:51,480 Train Loss: 0.0223679, Val Loss: 0.0443231 +2025-02-26 05:39:51,480 Epoch 371/2000 +2025-02-26 05:40:54,186 Current Learning Rate: 0.0094901379 +2025-02-26 05:40:54,187 Train Loss: 0.0223667, Val Loss: 0.0442781 +2025-02-26 05:40:54,187 Epoch 372/2000 +2025-02-26 05:41:56,592 Current Learning Rate: 0.0095241353 +2025-02-26 05:41:56,593 Train Loss: 0.0223636, Val Loss: 0.0443057 +2025-02-26 05:41:56,593 Epoch 373/2000 +2025-02-26 05:42:57,497 Current Learning Rate: 0.0095570164 +2025-02-26 05:42:57,498 Train Loss: 0.0223644, Val Loss: 0.0443045 +2025-02-26 05:42:57,498 Epoch 374/2000 +2025-02-26 05:44:00,044 Current Learning Rate: 0.0095887731 +2025-02-26 05:44:00,044 Train Loss: 0.0223640, Val Loss: 0.0443144 +2025-02-26 05:44:00,044 Epoch 375/2000 +2025-02-26 05:45:01,589 Current Learning Rate: 0.0096193977 +2025-02-26 05:45:01,590 Train Loss: 0.0223653, Val Loss: 0.0443020 +2025-02-26 05:45:01,590 Epoch 376/2000 +2025-02-26 05:46:02,808 Current Learning Rate: 0.0096488824 +2025-02-26 05:46:02,808 Train Loss: 0.0223686, Val Loss: 0.0442469 +2025-02-26 05:46:02,809 Epoch 377/2000 +2025-02-26 05:47:06,290 Current Learning Rate: 0.0096772202 +2025-02-26 05:47:06,291 Train Loss: 0.0223413, Val Loss: 0.0442398 +2025-02-26 05:47:06,291 Epoch 378/2000 +2025-02-26 05:48:10,034 Current Learning Rate: 0.0097044038 +2025-02-26 05:48:10,035 Train Loss: 0.0223436, Val Loss: 0.0442444 +2025-02-26 05:48:10,035 Epoch 379/2000 +2025-02-26 05:49:14,454 Current Learning Rate: 0.0097304268 +2025-02-26 05:49:14,455 Train Loss: 0.0223439, Val Loss: 0.0442376 +2025-02-26 05:49:14,455 Epoch 380/2000 +2025-02-26 05:50:18,449 Current Learning Rate: 0.0097552826 +2025-02-26 05:50:18,450 Train Loss: 0.0223434, Val Loss: 0.0442413 +2025-02-26 05:50:18,450 Epoch 381/2000 +2025-02-26 05:51:23,375 Current Learning Rate: 0.0097789651 +2025-02-26 05:51:23,376 Train Loss: 0.0223443, Val Loss: 0.0442383 +2025-02-26 05:51:23,376 Epoch 382/2000 +2025-02-26 05:52:26,750 Current Learning Rate: 0.0098014684 +2025-02-26 05:52:26,751 Train Loss: 0.0223494, Val Loss: 0.0442765 +2025-02-26 05:52:26,751 Epoch 383/2000 +2025-02-26 05:53:29,493 Current Learning Rate: 0.0098227871 +2025-02-26 05:53:29,493 Train Loss: 0.0223479, Val Loss: 0.0442478 +2025-02-26 05:53:29,494 Epoch 384/2000 +2025-02-26 05:54:34,179 Current Learning Rate: 0.0098429158 +2025-02-26 05:54:34,180 Train Loss: 0.0223465, Val Loss: 0.0442628 +2025-02-26 05:54:34,180 Epoch 385/2000 +2025-02-26 05:55:37,020 Current Learning Rate: 0.0098618496 +2025-02-26 05:55:37,020 Train Loss: 0.0223472, Val Loss: 0.0442489 +2025-02-26 05:55:37,020 Epoch 386/2000 +2025-02-26 05:56:38,932 Current Learning Rate: 0.0098795838 +2025-02-26 05:56:38,933 Train Loss: 0.0223462, Val Loss: 0.0442423 +2025-02-26 05:56:38,934 Epoch 387/2000 +2025-02-26 05:57:41,289 Current Learning Rate: 0.0098961141 +2025-02-26 05:57:41,289 Train Loss: 0.0223475, Val Loss: 0.0442470 +2025-02-26 05:57:41,289 Epoch 388/2000 +2025-02-26 05:58:43,960 Current Learning Rate: 0.0099114363 +2025-02-26 05:58:43,960 Train Loss: 0.0223463, Val Loss: 0.0442365 +2025-02-26 05:58:43,961 Epoch 389/2000 +2025-02-26 05:59:45,425 Current Learning Rate: 0.0099255466 +2025-02-26 05:59:45,425 Train Loss: 0.0223376, Val Loss: 0.0442404 +2025-02-26 05:59:45,426 Epoch 390/2000 +2025-02-26 06:00:45,988 Current Learning Rate: 0.0099384417 +2025-02-26 06:00:45,988 Train Loss: 0.0223342, Val Loss: 0.0441862 +2025-02-26 06:00:45,989 Epoch 391/2000 +2025-02-26 06:01:49,880 Current Learning Rate: 0.0099501183 +2025-02-26 06:01:49,880 Train Loss: 0.0223236, Val Loss: 0.0441937 +2025-02-26 06:01:49,881 Epoch 392/2000 +2025-02-26 06:02:53,744 Current Learning Rate: 0.0099605735 +2025-02-26 06:02:53,745 Train Loss: 0.0223235, Val Loss: 0.0442758 +2025-02-26 06:02:53,745 Epoch 393/2000 +2025-02-26 06:03:54,952 Current Learning Rate: 0.0099698048 +2025-02-26 06:03:54,953 Train Loss: 0.0223327, Val Loss: 0.0441096 +2025-02-26 06:03:54,953 Epoch 394/2000 +2025-02-26 06:04:56,651 Current Learning Rate: 0.0099778098 +2025-02-26 06:04:56,652 Train Loss: 0.0223062, Val Loss: 0.0440965 +2025-02-26 06:04:56,652 Epoch 395/2000 +2025-02-26 06:05:59,377 Current Learning Rate: 0.0099845867 +2025-02-26 06:05:59,378 Train Loss: 0.0222990, Val Loss: 0.0441031 +2025-02-26 06:05:59,378 Epoch 396/2000 +2025-02-26 06:07:00,145 Current Learning Rate: 0.0099901336 +2025-02-26 06:07:00,146 Train Loss: 0.0223233, Val Loss: 0.0443810 +2025-02-26 06:07:00,146 Epoch 397/2000 +2025-02-26 06:08:05,633 Current Learning Rate: 0.0099944494 +2025-02-26 06:08:05,634 Train Loss: 0.0223994, Val Loss: 0.0444189 +2025-02-26 06:08:05,634 Epoch 398/2000 +2025-02-26 06:09:10,266 Current Learning Rate: 0.0099975328 +2025-02-26 06:09:10,267 Train Loss: 0.0223958, Val Loss: 0.0443159 +2025-02-26 06:09:10,267 Epoch 399/2000 +2025-02-26 06:10:14,017 Current Learning Rate: 0.0099993832 +2025-02-26 06:10:14,018 Train Loss: 0.0223716, Val Loss: 0.0443069 +2025-02-26 06:10:14,018 Epoch 400/2000 +2025-02-26 06:11:16,464 Current Learning Rate: 0.0100000000 +2025-02-26 06:11:16,465 Train Loss: 0.0223698, Val Loss: 0.0443039 +2025-02-26 06:11:16,465 Epoch 401/2000 +2025-02-26 06:12:19,621 Current Learning Rate: 0.0099993832 +2025-02-26 06:12:19,621 Train Loss: 0.0223690, Val Loss: 0.0443005 +2025-02-26 06:12:19,622 Epoch 402/2000 +2025-02-26 06:13:20,818 Current Learning Rate: 0.0099975328 +2025-02-26 06:13:20,819 Train Loss: 0.0223827, Val Loss: 0.0444726 +2025-02-26 06:13:20,819 Epoch 403/2000 +2025-02-26 06:14:23,692 Current Learning Rate: 0.0099944494 +2025-02-26 06:14:23,693 Train Loss: 0.0224338, Val Loss: 0.0445047 +2025-02-26 06:14:23,694 Epoch 404/2000 +2025-02-26 06:15:26,126 Current Learning Rate: 0.0099901336 +2025-02-26 06:15:26,127 Train Loss: 0.0224269, Val Loss: 0.0444829 +2025-02-26 06:15:26,128 Epoch 405/2000 +2025-02-26 06:16:28,165 Current Learning Rate: 0.0099845867 +2025-02-26 06:16:28,166 Train Loss: 0.0224280, Val Loss: 0.0444915 +2025-02-26 06:16:28,166 Epoch 406/2000 +2025-02-26 06:17:35,229 Current Learning Rate: 0.0099778098 +2025-02-26 06:17:35,230 Train Loss: 0.0224286, Val Loss: 0.0445025 +2025-02-26 06:17:35,230 Epoch 407/2000 +2025-02-26 06:18:38,737 Current Learning Rate: 0.0099698048 +2025-02-26 06:18:38,738 Train Loss: 0.0224295, Val Loss: 0.0444972 +2025-02-26 06:18:38,738 Epoch 408/2000 +2025-02-26 06:19:41,874 Current Learning Rate: 0.0099605735 +2025-02-26 06:19:41,875 Train Loss: 0.0224300, Val Loss: 0.0445026 +2025-02-26 06:19:41,875 Epoch 409/2000 +2025-02-26 06:20:43,360 Current Learning Rate: 0.0099501183 +2025-02-26 06:20:43,360 Train Loss: 0.0224274, Val Loss: 0.0445225 +2025-02-26 06:20:43,361 Epoch 410/2000 +2025-02-26 06:21:47,603 Current Learning Rate: 0.0099384417 +2025-02-26 06:21:47,604 Train Loss: 0.0224236, Val Loss: 0.0444444 +2025-02-26 06:21:47,604 Epoch 411/2000 +2025-02-26 06:22:49,633 Current Learning Rate: 0.0099255466 +2025-02-26 06:22:49,634 Train Loss: 0.0224118, Val Loss: 0.0444478 +2025-02-26 06:22:49,634 Epoch 412/2000 +2025-02-26 06:23:51,994 Current Learning Rate: 0.0099114363 +2025-02-26 06:23:51,995 Train Loss: 0.0224117, Val Loss: 0.0445128 +2025-02-26 06:23:51,995 Epoch 413/2000 +2025-02-26 06:24:55,491 Current Learning Rate: 0.0098961141 +2025-02-26 06:24:55,493 Train Loss: 0.0224448, Val Loss: 0.0445325 +2025-02-26 06:24:55,493 Epoch 414/2000 +2025-02-26 06:25:57,110 Current Learning Rate: 0.0098795838 +2025-02-26 06:25:57,111 Train Loss: 0.0224493, Val Loss: 0.0445648 +2025-02-26 06:25:57,111 Epoch 415/2000 +2025-02-26 06:26:59,985 Current Learning Rate: 0.0098618496 +2025-02-26 06:26:59,986 Train Loss: 0.0224485, Val Loss: 0.0445414 +2025-02-26 06:26:59,986 Epoch 416/2000 +2025-02-26 06:28:02,577 Current Learning Rate: 0.0098429158 +2025-02-26 06:28:02,577 Train Loss: 0.0224485, Val Loss: 0.0445597 +2025-02-26 06:28:02,578 Epoch 417/2000 +2025-02-26 06:29:04,875 Current Learning Rate: 0.0098227871 +2025-02-26 06:29:04,875 Train Loss: 0.0224466, Val Loss: 0.0445378 +2025-02-26 06:29:04,875 Epoch 418/2000 +2025-02-26 06:30:07,382 Current Learning Rate: 0.0098014684 +2025-02-26 06:30:07,383 Train Loss: 0.0224411, Val Loss: 0.0445444 +2025-02-26 06:30:07,383 Epoch 419/2000 +2025-02-26 06:31:08,873 Current Learning Rate: 0.0097789651 +2025-02-26 06:31:08,874 Train Loss: 0.0224595, Val Loss: 0.0445892 +2025-02-26 06:31:08,875 Epoch 420/2000 +2025-02-26 06:32:09,978 Current Learning Rate: 0.0097552826 +2025-02-26 06:32:09,978 Train Loss: 0.0224592, Val Loss: 0.0445847 +2025-02-26 06:32:09,979 Epoch 421/2000 +2025-02-26 06:33:12,004 Current Learning Rate: 0.0097304268 +2025-02-26 06:33:12,005 Train Loss: 0.0224563, Val Loss: 0.0445757 +2025-02-26 06:33:12,005 Epoch 422/2000 +2025-02-26 06:34:14,088 Current Learning Rate: 0.0097044038 +2025-02-26 06:34:14,088 Train Loss: 0.0224555, Val Loss: 0.0445713 +2025-02-26 06:34:14,088 Epoch 423/2000 +2025-02-26 06:35:17,270 Current Learning Rate: 0.0096772202 +2025-02-26 06:35:17,270 Train Loss: 0.0224507, Val Loss: 0.0445584 +2025-02-26 06:35:17,270 Epoch 424/2000 +2025-02-26 06:36:20,084 Current Learning Rate: 0.0096488824 +2025-02-26 06:36:20,085 Train Loss: 0.0224490, Val Loss: 0.0445540 +2025-02-26 06:36:20,085 Epoch 425/2000 +2025-02-26 06:37:22,760 Current Learning Rate: 0.0096193977 +2025-02-26 06:37:22,760 Train Loss: 0.0224484, Val Loss: 0.0445584 +2025-02-26 06:37:22,761 Epoch 426/2000 +2025-02-26 06:38:29,063 Current Learning Rate: 0.0095887731 +2025-02-26 06:38:29,063 Train Loss: 0.0224621, Val Loss: 0.0446111 +2025-02-26 06:38:29,064 Epoch 427/2000 +2025-02-26 06:39:32,092 Current Learning Rate: 0.0095570164 +2025-02-26 06:39:32,092 Train Loss: 0.0224665, Val Loss: 0.0446288 +2025-02-26 06:39:32,092 Epoch 428/2000 +2025-02-26 06:40:35,172 Current Learning Rate: 0.0095241353 +2025-02-26 06:40:35,173 Train Loss: 0.0224662, Val Loss: 0.0446100 +2025-02-26 06:40:35,173 Epoch 429/2000 +2025-02-26 06:41:36,796 Current Learning Rate: 0.0094901379 +2025-02-26 06:41:36,796 Train Loss: 0.0224667, Val Loss: 0.0446105 +2025-02-26 06:41:36,796 Epoch 430/2000 +2025-02-26 06:42:38,941 Current Learning Rate: 0.0094550326 +2025-02-26 06:42:38,942 Train Loss: 0.0224673, Val Loss: 0.0445955 +2025-02-26 06:42:38,942 Epoch 431/2000 +2025-02-26 06:43:41,493 Current Learning Rate: 0.0094188282 +2025-02-26 06:43:41,494 Train Loss: 0.0224661, Val Loss: 0.0446145 +2025-02-26 06:43:41,494 Epoch 432/2000 +2025-02-26 06:44:44,100 Current Learning Rate: 0.0093815334 +2025-02-26 06:44:44,100 Train Loss: 0.0224646, Val Loss: 0.0446083 +2025-02-26 06:44:44,100 Epoch 433/2000 +2025-02-26 06:45:45,933 Current Learning Rate: 0.0093431576 +2025-02-26 06:45:45,933 Train Loss: 0.0224654, Val Loss: 0.0446134 +2025-02-26 06:45:45,933 Epoch 434/2000 +2025-02-26 06:46:47,976 Current Learning Rate: 0.0093037101 +2025-02-26 06:46:47,977 Train Loss: 0.0224681, Val Loss: 0.0446252 +2025-02-26 06:46:47,977 Epoch 435/2000 +2025-02-26 06:47:50,396 Current Learning Rate: 0.0092632008 +2025-02-26 06:47:50,396 Train Loss: 0.0224702, Val Loss: 0.0446196 +2025-02-26 06:47:50,397 Epoch 436/2000 +2025-02-26 06:48:52,513 Current Learning Rate: 0.0092216396 +2025-02-26 06:48:52,514 Train Loss: 0.0224700, Val Loss: 0.0446174 +2025-02-26 06:48:52,514 Epoch 437/2000 +2025-02-26 06:49:56,878 Current Learning Rate: 0.0091790368 +2025-02-26 06:49:56,878 Train Loss: 0.0224689, Val Loss: 0.0446198 +2025-02-26 06:49:56,879 Epoch 438/2000 +2025-02-26 06:51:00,913 Current Learning Rate: 0.0091354029 +2025-02-26 06:51:00,913 Train Loss: 0.0224677, Val Loss: 0.0446093 +2025-02-26 06:51:00,913 Epoch 439/2000 +2025-02-26 06:52:02,726 Current Learning Rate: 0.0090907486 +2025-02-26 06:52:02,727 Train Loss: 0.0224658, Val Loss: 0.0446073 +2025-02-26 06:52:02,727 Epoch 440/2000 +2025-02-26 06:53:05,735 Current Learning Rate: 0.0090450850 +2025-02-26 06:53:05,736 Train Loss: 0.0224659, Val Loss: 0.0446056 +2025-02-26 06:53:05,736 Epoch 441/2000 +2025-02-26 06:54:07,731 Current Learning Rate: 0.0089984233 +2025-02-26 06:54:07,731 Train Loss: 0.0224660, Val Loss: 0.0445988 +2025-02-26 06:54:07,731 Epoch 442/2000 +2025-02-26 06:55:10,082 Current Learning Rate: 0.0089507751 +2025-02-26 06:55:10,083 Train Loss: 0.0224626, Val Loss: 0.0446132 +2025-02-26 06:55:10,083 Epoch 443/2000 +2025-02-26 06:56:12,813 Current Learning Rate: 0.0089021520 +2025-02-26 06:56:12,813 Train Loss: 0.0224672, Val Loss: 0.0446222 +2025-02-26 06:56:12,813 Epoch 444/2000 +2025-02-26 06:57:14,571 Current Learning Rate: 0.0088525662 +2025-02-26 06:57:14,572 Train Loss: 0.0224604, Val Loss: 0.0445920 +2025-02-26 06:57:14,572 Epoch 445/2000 +2025-02-26 06:58:17,461 Current Learning Rate: 0.0088020298 +2025-02-26 06:58:17,461 Train Loss: 0.0224583, Val Loss: 0.0445883 +2025-02-26 06:58:17,461 Epoch 446/2000 +2025-02-26 06:59:21,060 Current Learning Rate: 0.0087505553 +2025-02-26 06:59:21,060 Train Loss: 0.0224576, Val Loss: 0.0445929 +2025-02-26 06:59:21,061 Epoch 447/2000 +2025-02-26 07:00:23,534 Current Learning Rate: 0.0086981555 +2025-02-26 07:00:23,535 Train Loss: 0.0224595, Val Loss: 0.0446066 +2025-02-26 07:00:23,535 Epoch 448/2000 +2025-02-26 07:01:25,291 Current Learning Rate: 0.0086448431 +2025-02-26 07:01:25,292 Train Loss: 0.0224638, Val Loss: 0.0446017 +2025-02-26 07:01:25,292 Epoch 449/2000 +2025-02-26 07:02:28,300 Current Learning Rate: 0.0085906315 +2025-02-26 07:02:28,300 Train Loss: 0.0224632, Val Loss: 0.0446058 +2025-02-26 07:02:28,300 Epoch 450/2000 +2025-02-26 07:03:30,741 Current Learning Rate: 0.0085355339 +2025-02-26 07:03:30,741 Train Loss: 0.0224614, Val Loss: 0.0445973 +2025-02-26 07:03:30,741 Epoch 451/2000 +2025-02-26 07:04:32,805 Current Learning Rate: 0.0084795640 +2025-02-26 07:04:32,806 Train Loss: 0.0224609, Val Loss: 0.0445976 +2025-02-26 07:04:32,806 Epoch 452/2000 +2025-02-26 07:05:36,804 Current Learning Rate: 0.0084227355 +2025-02-26 07:05:36,805 Train Loss: 0.0224608, Val Loss: 0.0446020 +2025-02-26 07:05:36,805 Epoch 453/2000 +2025-02-26 07:06:40,723 Current Learning Rate: 0.0083650626 +2025-02-26 07:06:40,724 Train Loss: 0.0224616, Val Loss: 0.0445974 +2025-02-26 07:06:40,724 Epoch 454/2000 +2025-02-26 07:07:44,369 Current Learning Rate: 0.0083065593 +2025-02-26 07:07:44,369 Train Loss: 0.0224671, Val Loss: 0.0446214 +2025-02-26 07:07:44,369 Epoch 455/2000 +2025-02-26 07:08:46,895 Current Learning Rate: 0.0082472402 +2025-02-26 07:08:46,895 Train Loss: 0.0224699, Val Loss: 0.0446197 +2025-02-26 07:08:46,896 Epoch 456/2000 +2025-02-26 07:09:48,444 Current Learning Rate: 0.0081871199 +2025-02-26 07:09:48,444 Train Loss: 0.0224682, Val Loss: 0.0446170 +2025-02-26 07:09:48,445 Epoch 457/2000 +2025-02-26 07:10:51,689 Current Learning Rate: 0.0081262133 +2025-02-26 07:10:51,690 Train Loss: 0.0224677, Val Loss: 0.0446124 +2025-02-26 07:10:51,690 Epoch 458/2000 +2025-02-26 07:11:53,377 Current Learning Rate: 0.0080645353 +2025-02-26 07:11:53,377 Train Loss: 0.0224657, Val Loss: 0.0445969 +2025-02-26 07:11:53,378 Epoch 459/2000 +2025-02-26 07:12:55,134 Current Learning Rate: 0.0080021011 +2025-02-26 07:12:55,135 Train Loss: 0.0224630, Val Loss: 0.0446028 +2025-02-26 07:12:55,135 Epoch 460/2000 +2025-02-26 07:13:58,128 Current Learning Rate: 0.0079389263 +2025-02-26 07:13:58,128 Train Loss: 0.0224594, Val Loss: 0.0445895 +2025-02-26 07:13:58,129 Epoch 461/2000 +2025-02-26 07:15:01,801 Current Learning Rate: 0.0078750263 +2025-02-26 07:15:01,801 Train Loss: 0.0224591, Val Loss: 0.0445938 +2025-02-26 07:15:01,802 Epoch 462/2000 +2025-02-26 07:16:04,667 Current Learning Rate: 0.0078104169 +2025-02-26 07:16:04,668 Train Loss: 0.0224611, Val Loss: 0.0445985 +2025-02-26 07:16:04,668 Epoch 463/2000 +2025-02-26 07:17:06,720 Current Learning Rate: 0.0077451141 +2025-02-26 07:17:06,721 Train Loss: 0.0224619, Val Loss: 0.0445983 +2025-02-26 07:17:06,721 Epoch 464/2000 +2025-02-26 07:18:09,146 Current Learning Rate: 0.0076791340 +2025-02-26 07:18:09,147 Train Loss: 0.0224591, Val Loss: 0.0445687 +2025-02-26 07:18:09,147 Epoch 465/2000 +2025-02-26 07:19:12,953 Current Learning Rate: 0.0076124928 +2025-02-26 07:19:12,954 Train Loss: 0.0224495, Val Loss: 0.0445613 +2025-02-26 07:19:12,954 Epoch 466/2000 +2025-02-26 07:20:16,095 Current Learning Rate: 0.0075452071 +2025-02-26 07:20:16,096 Train Loss: 0.0224540, Val Loss: 0.0445903 +2025-02-26 07:20:16,096 Epoch 467/2000 +2025-02-26 07:21:18,451 Current Learning Rate: 0.0074772933 +2025-02-26 07:21:18,451 Train Loss: 0.0224585, Val Loss: 0.0445894 +2025-02-26 07:21:18,452 Epoch 468/2000 +2025-02-26 07:22:21,777 Current Learning Rate: 0.0074087684 +2025-02-26 07:22:21,778 Train Loss: 0.0224601, Val Loss: 0.0445940 +2025-02-26 07:22:21,778 Epoch 469/2000 +2025-02-26 07:23:24,903 Current Learning Rate: 0.0073396491 +2025-02-26 07:23:24,904 Train Loss: 0.0224590, Val Loss: 0.0445935 +2025-02-26 07:23:24,904 Epoch 470/2000 +2025-02-26 07:24:26,665 Current Learning Rate: 0.0072699525 +2025-02-26 07:24:26,666 Train Loss: 0.0224640, Val Loss: 0.0446100 +2025-02-26 07:24:26,666 Epoch 471/2000 +2025-02-26 07:25:28,310 Current Learning Rate: 0.0071996958 +2025-02-26 07:25:28,311 Train Loss: 0.0224648, Val Loss: 0.0446155 +2025-02-26 07:25:28,311 Epoch 472/2000 +2025-02-26 07:26:29,524 Current Learning Rate: 0.0071288965 +2025-02-26 07:26:29,524 Train Loss: 0.0224621, Val Loss: 0.0446018 +2025-02-26 07:26:29,524 Epoch 473/2000 +2025-02-26 07:27:31,789 Current Learning Rate: 0.0070575718 +2025-02-26 07:27:31,790 Train Loss: 0.0224602, Val Loss: 0.0445969 +2025-02-26 07:27:31,790 Epoch 474/2000 +2025-02-26 07:28:34,005 Current Learning Rate: 0.0069857395 +2025-02-26 07:28:34,006 Train Loss: 0.0224606, Val Loss: 0.0445993 +2025-02-26 07:28:34,006 Epoch 475/2000 +2025-02-26 07:29:34,918 Current Learning Rate: 0.0069134172 +2025-02-26 07:29:34,919 Train Loss: 0.0224606, Val Loss: 0.0445992 +2025-02-26 07:29:34,919 Epoch 476/2000 +2025-02-26 07:30:36,574 Current Learning Rate: 0.0068406228 +2025-02-26 07:30:36,574 Train Loss: 0.0224597, Val Loss: 0.0445617 +2025-02-26 07:30:36,574 Epoch 477/2000 +2025-02-26 07:31:39,167 Current Learning Rate: 0.0067673742 +2025-02-26 07:31:39,167 Train Loss: 0.0224433, Val Loss: 0.0445388 +2025-02-26 07:31:39,168 Epoch 478/2000 +2025-02-26 07:32:40,453 Current Learning Rate: 0.0066936896 +2025-02-26 07:32:40,453 Train Loss: 0.0224414, Val Loss: 0.0445467 +2025-02-26 07:32:40,454 Epoch 479/2000 +2025-02-26 07:33:41,085 Current Learning Rate: 0.0066195871 +2025-02-26 07:33:41,085 Train Loss: 0.0224423, Val Loss: 0.0445393 +2025-02-26 07:33:41,085 Epoch 480/2000 +2025-02-26 07:34:43,328 Current Learning Rate: 0.0065450850 +2025-02-26 07:34:43,328 Train Loss: 0.0224422, Val Loss: 0.0445525 +2025-02-26 07:34:43,328 Epoch 481/2000 +2025-02-26 07:35:44,509 Current Learning Rate: 0.0064702016 +2025-02-26 07:35:44,510 Train Loss: 0.0224440, Val Loss: 0.0445505 +2025-02-26 07:35:44,510 Epoch 482/2000 +2025-02-26 07:36:46,410 Current Learning Rate: 0.0063949555 +2025-02-26 07:36:46,410 Train Loss: 0.0224463, Val Loss: 0.0445720 +2025-02-26 07:36:46,411 Epoch 483/2000 +2025-02-26 07:37:47,873 Current Learning Rate: 0.0063193652 +2025-02-26 07:37:47,873 Train Loss: 0.0224512, Val Loss: 0.0445713 +2025-02-26 07:37:47,874 Epoch 484/2000 +2025-02-26 07:38:49,739 Current Learning Rate: 0.0062434494 +2025-02-26 07:38:49,740 Train Loss: 0.0224527, Val Loss: 0.0445745 +2025-02-26 07:38:49,740 Epoch 485/2000 +2025-02-26 07:39:51,560 Current Learning Rate: 0.0061672268 +2025-02-26 07:39:51,560 Train Loss: 0.0224522, Val Loss: 0.0445751 +2025-02-26 07:39:51,560 Epoch 486/2000 +2025-02-26 07:40:54,138 Current Learning Rate: 0.0060907162 +2025-02-26 07:40:54,139 Train Loss: 0.0224523, Val Loss: 0.0445534 +2025-02-26 07:40:54,139 Epoch 487/2000 +2025-02-26 07:41:55,402 Current Learning Rate: 0.0060139365 +2025-02-26 07:41:55,403 Train Loss: 0.0224495, Val Loss: 0.0445697 +2025-02-26 07:41:55,403 Epoch 488/2000 +2025-02-26 07:43:00,953 Current Learning Rate: 0.0059369066 +2025-02-26 07:43:00,953 Train Loss: 0.0224536, Val Loss: 0.0445934 +2025-02-26 07:43:00,954 Epoch 489/2000 +2025-02-26 07:44:03,806 Current Learning Rate: 0.0058596455 +2025-02-26 07:44:03,807 Train Loss: 0.0224594, Val Loss: 0.0446005 +2025-02-26 07:44:03,807 Epoch 490/2000 +2025-02-26 07:45:05,991 Current Learning Rate: 0.0057821723 +2025-02-26 07:45:05,992 Train Loss: 0.0224604, Val Loss: 0.0445985 +2025-02-26 07:45:05,992 Epoch 491/2000 +2025-02-26 07:46:08,862 Current Learning Rate: 0.0057045062 +2025-02-26 07:46:08,863 Train Loss: 0.0224631, Val Loss: 0.0446067 +2025-02-26 07:46:08,863 Epoch 492/2000 +2025-02-26 07:47:11,877 Current Learning Rate: 0.0056266662 +2025-02-26 07:47:11,878 Train Loss: 0.0224637, Val Loss: 0.0446198 +2025-02-26 07:47:11,878 Epoch 493/2000 +2025-02-26 07:48:15,975 Current Learning Rate: 0.0055486716 +2025-02-26 07:48:15,976 Train Loss: 0.0224631, Val Loss: 0.0446075 +2025-02-26 07:48:15,976 Epoch 494/2000 +2025-02-26 07:49:17,556 Current Learning Rate: 0.0054705416 +2025-02-26 07:49:17,556 Train Loss: 0.0224619, Val Loss: 0.0446078 +2025-02-26 07:49:17,557 Epoch 495/2000 +2025-02-26 07:50:20,298 Current Learning Rate: 0.0053922955 +2025-02-26 07:50:20,300 Train Loss: 0.0224619, Val Loss: 0.0446049 +2025-02-26 07:50:20,301 Epoch 496/2000 +2025-02-26 07:51:24,591 Current Learning Rate: 0.0053139526 +2025-02-26 07:51:24,591 Train Loss: 0.0224605, Val Loss: 0.0446049 +2025-02-26 07:51:24,591 Epoch 497/2000 +2025-02-26 07:52:27,801 Current Learning Rate: 0.0052355323 +2025-02-26 07:52:27,802 Train Loss: 0.0224607, Val Loss: 0.0446048 +2025-02-26 07:52:27,802 Epoch 498/2000 +2025-02-26 07:53:30,517 Current Learning Rate: 0.0051570538 +2025-02-26 07:53:30,518 Train Loss: 0.0224605, Val Loss: 0.0446028 +2025-02-26 07:53:30,518 Epoch 499/2000 +2025-02-26 07:54:33,485 Current Learning Rate: 0.0050785366 +2025-02-26 07:54:33,486 Train Loss: 0.0224599, Val Loss: 0.0445980 +2025-02-26 07:54:33,486 Epoch 500/2000 +2025-02-26 07:55:38,715 Current Learning Rate: 0.0050000000 +2025-02-26 07:55:38,715 Train Loss: 0.0224633, Val Loss: 0.0446163 +2025-02-26 07:55:38,716 Epoch 501/2000 +2025-02-26 07:56:41,366 Current Learning Rate: 0.0049214634 +2025-02-26 07:56:41,366 Train Loss: 0.0224659, Val Loss: 0.0446183 +2025-02-26 07:56:41,367 Epoch 502/2000 +2025-02-26 07:57:48,606 Current Learning Rate: 0.0048429462 +2025-02-26 07:57:48,606 Train Loss: 0.0224654, Val Loss: 0.0446194 +2025-02-26 07:57:48,606 Epoch 503/2000 +2025-02-26 07:58:52,983 Current Learning Rate: 0.0047644677 +2025-02-26 07:58:52,983 Train Loss: 0.0224654, Val Loss: 0.0446157 +2025-02-26 07:58:52,984 Epoch 504/2000 +2025-02-26 07:59:55,651 Current Learning Rate: 0.0046860474 +2025-02-26 07:59:55,652 Train Loss: 0.0224643, Val Loss: 0.0446128 +2025-02-26 07:59:55,652 Epoch 505/2000 +2025-02-26 08:00:57,728 Current Learning Rate: 0.0046077045 +2025-02-26 08:00:57,728 Train Loss: 0.0224642, Val Loss: 0.0446143 +2025-02-26 08:00:57,728 Epoch 506/2000 +2025-02-26 08:01:59,796 Current Learning Rate: 0.0045294584 +2025-02-26 08:01:59,797 Train Loss: 0.0224634, Val Loss: 0.0446153 +2025-02-26 08:01:59,798 Epoch 507/2000 +2025-02-26 08:03:02,349 Current Learning Rate: 0.0044513284 +2025-02-26 08:03:02,350 Train Loss: 0.0224626, Val Loss: 0.0446090 +2025-02-26 08:03:02,350 Epoch 508/2000 +2025-02-26 08:04:05,522 Current Learning Rate: 0.0043733338 +2025-02-26 08:04:05,523 Train Loss: 0.0224620, Val Loss: 0.0446060 +2025-02-26 08:04:05,523 Epoch 509/2000 +2025-02-26 08:05:06,864 Current Learning Rate: 0.0042954938 +2025-02-26 08:05:06,865 Train Loss: 0.0224608, Val Loss: 0.0446070 +2025-02-26 08:05:06,865 Epoch 510/2000 +2025-02-26 08:06:08,547 Current Learning Rate: 0.0042178277 +2025-02-26 08:06:08,548 Train Loss: 0.0224614, Val Loss: 0.0446034 +2025-02-26 08:06:08,548 Epoch 511/2000 +2025-02-26 08:07:10,725 Current Learning Rate: 0.0041403545 +2025-02-26 08:07:10,726 Train Loss: 0.0224625, Val Loss: 0.0446153 +2025-02-26 08:07:10,726 Epoch 512/2000 +2025-02-26 08:08:14,719 Current Learning Rate: 0.0040630934 +2025-02-26 08:08:14,720 Train Loss: 0.0224642, Val Loss: 0.0446198 +2025-02-26 08:08:14,720 Epoch 513/2000 +2025-02-26 08:09:22,308 Current Learning Rate: 0.0039860635 +2025-02-26 08:09:22,309 Train Loss: 0.0224651, Val Loss: 0.0446157 +2025-02-26 08:09:22,309 Epoch 514/2000 +2025-02-26 08:10:24,865 Current Learning Rate: 0.0039092838 +2025-02-26 08:10:24,866 Train Loss: 0.0224639, Val Loss: 0.0446157 +2025-02-26 08:10:24,866 Epoch 515/2000 +2025-02-26 08:11:26,923 Current Learning Rate: 0.0038327732 +2025-02-26 08:11:26,924 Train Loss: 0.0224634, Val Loss: 0.0446159 +2025-02-26 08:11:26,924 Epoch 516/2000 +2025-02-26 08:12:29,518 Current Learning Rate: 0.0037565506 +2025-02-26 08:12:29,519 Train Loss: 0.0224640, Val Loss: 0.0446119 +2025-02-26 08:12:29,519 Epoch 517/2000 +2025-02-26 08:13:34,628 Current Learning Rate: 0.0036806348 +2025-02-26 08:13:34,628 Train Loss: 0.0224635, Val Loss: 0.0446117 +2025-02-26 08:13:34,629 Epoch 518/2000 +2025-02-26 08:14:41,479 Current Learning Rate: 0.0036050445 +2025-02-26 08:14:41,480 Train Loss: 0.0224635, Val Loss: 0.0446127 +2025-02-26 08:14:41,480 Epoch 519/2000 +2025-02-26 08:15:45,759 Current Learning Rate: 0.0035297984 +2025-02-26 08:15:45,760 Train Loss: 0.0224630, Val Loss: 0.0446044 +2025-02-26 08:15:45,760 Epoch 520/2000 +2025-02-26 08:16:50,388 Current Learning Rate: 0.0034549150 +2025-02-26 08:16:50,389 Train Loss: 0.0224588, Val Loss: 0.0446045 +2025-02-26 08:16:50,389 Epoch 521/2000 +2025-02-26 08:17:57,819 Current Learning Rate: 0.0033804129 +2025-02-26 08:17:57,820 Train Loss: 0.0224589, Val Loss: 0.0445981 +2025-02-26 08:17:57,821 Epoch 522/2000 +2025-02-26 08:19:01,681 Current Learning Rate: 0.0033063104 +2025-02-26 08:19:01,682 Train Loss: 0.0224592, Val Loss: 0.0445978 +2025-02-26 08:19:01,682 Epoch 523/2000 +2025-02-26 08:20:04,278 Current Learning Rate: 0.0032326258 +2025-02-26 08:20:04,279 Train Loss: 0.0224592, Val Loss: 0.0445971 +2025-02-26 08:20:04,279 Epoch 524/2000 +2025-02-26 08:21:06,970 Current Learning Rate: 0.0031593772 +2025-02-26 08:21:06,971 Train Loss: 0.0224590, Val Loss: 0.0446030 +2025-02-26 08:21:06,971 Epoch 525/2000 +2025-02-26 08:22:09,012 Current Learning Rate: 0.0030865828 +2025-02-26 08:22:09,012 Train Loss: 0.0224597, Val Loss: 0.0445995 +2025-02-26 08:22:09,012 Epoch 526/2000 +2025-02-26 08:23:10,880 Current Learning Rate: 0.0030142605 +2025-02-26 08:23:10,881 Train Loss: 0.0224607, Val Loss: 0.0446080 +2025-02-26 08:23:10,881 Epoch 527/2000 +2025-02-26 08:24:11,948 Current Learning Rate: 0.0029424282 +2025-02-26 08:24:11,949 Train Loss: 0.0224614, Val Loss: 0.0446073 +2025-02-26 08:24:11,949 Epoch 528/2000 +2025-02-26 08:25:13,741 Current Learning Rate: 0.0028711035 +2025-02-26 08:25:13,741 Train Loss: 0.0224615, Val Loss: 0.0446082 +2025-02-26 08:25:13,741 Epoch 529/2000 +2025-02-26 08:26:16,194 Current Learning Rate: 0.0028003042 +2025-02-26 08:26:16,195 Train Loss: 0.0224605, Val Loss: 0.0446112 +2025-02-26 08:26:16,195 Epoch 530/2000 +2025-02-26 08:27:17,571 Current Learning Rate: 0.0027300475 +2025-02-26 08:27:17,572 Train Loss: 0.0224617, Val Loss: 0.0446082 +2025-02-26 08:27:17,572 Epoch 531/2000 +2025-02-26 08:28:19,140 Current Learning Rate: 0.0026603509 +2025-02-26 08:28:19,140 Train Loss: 0.0224619, Val Loss: 0.0446070 +2025-02-26 08:28:19,140 Epoch 532/2000 +2025-02-26 08:29:21,538 Current Learning Rate: 0.0025912316 +2025-02-26 08:29:21,538 Train Loss: 0.0224618, Val Loss: 0.0446058 +2025-02-26 08:29:21,538 Epoch 533/2000 +2025-02-26 08:30:23,081 Current Learning Rate: 0.0025227067 +2025-02-26 08:30:23,082 Train Loss: 0.0224624, Val Loss: 0.0446139 +2025-02-26 08:30:23,082 Epoch 534/2000 +2025-02-26 08:31:25,035 Current Learning Rate: 0.0024547929 +2025-02-26 08:31:25,035 Train Loss: 0.0224631, Val Loss: 0.0446118 +2025-02-26 08:31:25,035 Epoch 535/2000 +2025-02-26 08:32:26,930 Current Learning Rate: 0.0023875072 +2025-02-26 08:32:26,931 Train Loss: 0.0224633, Val Loss: 0.0446114 +2025-02-26 08:32:26,931 Epoch 536/2000 +2025-02-26 08:33:28,969 Current Learning Rate: 0.0023208660 +2025-02-26 08:33:28,969 Train Loss: 0.0224630, Val Loss: 0.0446116 +2025-02-26 08:33:28,970 Epoch 537/2000 +2025-02-26 08:34:30,122 Current Learning Rate: 0.0022548859 +2025-02-26 08:34:30,123 Train Loss: 0.0224634, Val Loss: 0.0446118 +2025-02-26 08:34:30,123 Epoch 538/2000 +2025-02-26 08:35:32,005 Current Learning Rate: 0.0021895831 +2025-02-26 08:35:32,006 Train Loss: 0.0224628, Val Loss: 0.0446127 +2025-02-26 08:35:32,006 Epoch 539/2000 +2025-02-26 08:36:35,783 Current Learning Rate: 0.0021249737 +2025-02-26 08:36:35,783 Train Loss: 0.0224629, Val Loss: 0.0446127 +2025-02-26 08:36:35,784 Epoch 540/2000 +2025-02-26 08:37:37,420 Current Learning Rate: 0.0020610737 +2025-02-26 08:37:37,421 Train Loss: 0.0224635, Val Loss: 0.0446132 +2025-02-26 08:37:37,421 Epoch 541/2000 +2025-02-26 08:38:38,722 Current Learning Rate: 0.0019978989 +2025-02-26 08:38:38,723 Train Loss: 0.0224625, Val Loss: 0.0446113 +2025-02-26 08:38:38,723 Epoch 542/2000 +2025-02-26 08:39:41,126 Current Learning Rate: 0.0019354647 +2025-02-26 08:39:41,127 Train Loss: 0.0224626, Val Loss: 0.0446104 +2025-02-26 08:39:41,127 Epoch 543/2000 +2025-02-26 08:40:44,227 Current Learning Rate: 0.0018737867 +2025-02-26 08:40:44,228 Train Loss: 0.0224624, Val Loss: 0.0446099 +2025-02-26 08:40:44,228 Epoch 544/2000 +2025-02-26 08:41:46,068 Current Learning Rate: 0.0018128801 +2025-02-26 08:41:46,069 Train Loss: 0.0224632, Val Loss: 0.0446114 +2025-02-26 08:41:46,069 Epoch 545/2000 +2025-02-26 08:42:50,628 Current Learning Rate: 0.0017527598 +2025-02-26 08:42:50,629 Train Loss: 0.0224630, Val Loss: 0.0446118 +2025-02-26 08:42:50,630 Epoch 546/2000 +2025-02-26 08:43:53,443 Current Learning Rate: 0.0016934407 +2025-02-26 08:43:53,444 Train Loss: 0.0224626, Val Loss: 0.0446177 +2025-02-26 08:43:53,444 Epoch 547/2000 +2025-02-26 08:44:56,219 Current Learning Rate: 0.0016349374 +2025-02-26 08:44:56,220 Train Loss: 0.0224646, Val Loss: 0.0446206 +2025-02-26 08:44:56,220 Epoch 548/2000 +2025-02-26 08:45:59,920 Current Learning Rate: 0.0015772645 +2025-02-26 08:45:59,920 Train Loss: 0.0224658, Val Loss: 0.0446192 +2025-02-26 08:45:59,921 Epoch 549/2000 +2025-02-26 08:47:04,130 Current Learning Rate: 0.0015204360 +2025-02-26 08:47:04,130 Train Loss: 0.0224658, Val Loss: 0.0446198 +2025-02-26 08:47:04,131 Epoch 550/2000 +2025-02-26 08:48:08,202 Current Learning Rate: 0.0014644661 +2025-02-26 08:48:08,203 Train Loss: 0.0224655, Val Loss: 0.0446205 +2025-02-26 08:48:08,203 Epoch 551/2000 +2025-02-26 08:49:10,409 Current Learning Rate: 0.0014093685 +2025-02-26 08:49:10,409 Train Loss: 0.0224655, Val Loss: 0.0446187 +2025-02-26 08:49:10,409 Epoch 552/2000 +2025-02-26 08:50:14,062 Current Learning Rate: 0.0013551569 +2025-02-26 08:50:14,063 Train Loss: 0.0224648, Val Loss: 0.0446199 +2025-02-26 08:50:14,063 Epoch 553/2000 +2025-02-26 08:51:17,960 Current Learning Rate: 0.0013018445 +2025-02-26 08:51:17,960 Train Loss: 0.0224643, Val Loss: 0.0446190 +2025-02-26 08:51:17,961 Epoch 554/2000 +2025-02-26 08:52:19,903 Current Learning Rate: 0.0012494447 +2025-02-26 08:52:19,904 Train Loss: 0.0224648, Val Loss: 0.0446173 +2025-02-26 08:52:19,904 Epoch 555/2000 +2025-02-26 08:53:23,876 Current Learning Rate: 0.0011979702 +2025-02-26 08:53:23,877 Train Loss: 0.0224648, Val Loss: 0.0446177 +2025-02-26 08:53:23,877 Epoch 556/2000 +2025-02-26 08:54:27,105 Current Learning Rate: 0.0011474338 +2025-02-26 08:54:27,105 Train Loss: 0.0224652, Val Loss: 0.0446204 +2025-02-26 08:54:27,105 Epoch 557/2000 +2025-02-26 08:55:29,014 Current Learning Rate: 0.0010978480 +2025-02-26 08:55:29,016 Train Loss: 0.0224657, Val Loss: 0.0446204 +2025-02-26 08:55:29,016 Epoch 558/2000 +2025-02-26 08:56:31,800 Current Learning Rate: 0.0010492249 +2025-02-26 08:56:31,801 Train Loss: 0.0224653, Val Loss: 0.0446198 +2025-02-26 08:56:31,801 Epoch 559/2000 +2025-02-26 08:57:33,983 Current Learning Rate: 0.0010015767 +2025-02-26 08:57:33,984 Train Loss: 0.0224655, Val Loss: 0.0446206 +2025-02-26 08:57:33,984 Epoch 560/2000 +2025-02-26 08:58:36,032 Current Learning Rate: 0.0009549150 +2025-02-26 08:58:36,032 Train Loss: 0.0224649, Val Loss: 0.0446204 +2025-02-26 08:58:36,033 Epoch 561/2000 +2025-02-26 08:59:38,610 Current Learning Rate: 0.0009092514 +2025-02-26 08:59:38,610 Train Loss: 0.0224654, Val Loss: 0.0446210 +2025-02-26 08:59:38,610 Epoch 562/2000 +2025-02-26 09:00:41,535 Current Learning Rate: 0.0008645971 +2025-02-26 09:00:41,536 Train Loss: 0.0224651, Val Loss: 0.0446201 +2025-02-26 09:00:41,536 Epoch 563/2000 +2025-02-26 09:01:44,057 Current Learning Rate: 0.0008209632 +2025-02-26 09:01:44,058 Train Loss: 0.0224659, Val Loss: 0.0446214 +2025-02-26 09:01:44,058 Epoch 564/2000 +2025-02-26 09:02:47,182 Current Learning Rate: 0.0007783604 +2025-02-26 09:02:47,182 Train Loss: 0.0224657, Val Loss: 0.0446227 +2025-02-26 09:02:47,182 Epoch 565/2000 +2025-02-26 09:03:52,925 Current Learning Rate: 0.0007367992 +2025-02-26 09:03:52,926 Train Loss: 0.0224662, Val Loss: 0.0446234 +2025-02-26 09:03:52,926 Epoch 566/2000 +2025-02-26 09:04:57,100 Current Learning Rate: 0.0006962899 +2025-02-26 09:04:57,101 Train Loss: 0.0224656, Val Loss: 0.0446213 +2025-02-26 09:04:57,101 Epoch 567/2000 +2025-02-26 09:05:59,130 Current Learning Rate: 0.0006568424 +2025-02-26 09:05:59,130 Train Loss: 0.0224657, Val Loss: 0.0446213 +2025-02-26 09:05:59,131 Epoch 568/2000 +2025-02-26 09:07:03,185 Current Learning Rate: 0.0006184666 +2025-02-26 09:07:03,185 Train Loss: 0.0224654, Val Loss: 0.0446217 +2025-02-26 09:07:03,186 Epoch 569/2000 +2025-02-26 09:08:07,403 Current Learning Rate: 0.0005811718 +2025-02-26 09:08:07,403 Train Loss: 0.0224658, Val Loss: 0.0446240 +2025-02-26 09:08:07,403 Epoch 570/2000 +2025-02-26 09:09:11,497 Current Learning Rate: 0.0005449674 +2025-02-26 09:09:11,497 Train Loss: 0.0224656, Val Loss: 0.0446237 +2025-02-26 09:09:11,497 Epoch 571/2000 +2025-02-26 09:10:17,490 Current Learning Rate: 0.0005098621 +2025-02-26 09:10:17,491 Train Loss: 0.0224650, Val Loss: 0.0446226 +2025-02-26 09:10:17,491 Epoch 572/2000 +2025-02-26 09:11:20,669 Current Learning Rate: 0.0004758647 +2025-02-26 09:11:20,670 Train Loss: 0.0224657, Val Loss: 0.0446221 +2025-02-26 09:11:20,670 Epoch 573/2000 +2025-02-26 09:12:24,076 Current Learning Rate: 0.0004429836 +2025-02-26 09:12:24,077 Train Loss: 0.0224660, Val Loss: 0.0446221 +2025-02-26 09:12:24,077 Epoch 574/2000 +2025-02-26 09:13:25,871 Current Learning Rate: 0.0004112269 +2025-02-26 09:13:25,872 Train Loss: 0.0224657, Val Loss: 0.0446213 +2025-02-26 09:13:25,872 Epoch 575/2000 +2025-02-26 09:14:27,878 Current Learning Rate: 0.0003806023 +2025-02-26 09:14:27,879 Train Loss: 0.0224657, Val Loss: 0.0446228 +2025-02-26 09:14:27,879 Epoch 576/2000 +2025-02-26 09:15:30,606 Current Learning Rate: 0.0003511176 +2025-02-26 09:15:30,607 Train Loss: 0.0224653, Val Loss: 0.0446211 +2025-02-26 09:15:30,607 Epoch 577/2000 +2025-02-26 09:16:33,420 Current Learning Rate: 0.0003227798 +2025-02-26 09:16:33,420 Train Loss: 0.0224650, Val Loss: 0.0446196 +2025-02-26 09:16:33,421 Epoch 578/2000 +2025-02-26 09:17:38,049 Current Learning Rate: 0.0002955962 +2025-02-26 09:17:38,049 Train Loss: 0.0224655, Val Loss: 0.0446201 +2025-02-26 09:17:38,050 Epoch 579/2000 +2025-02-26 09:18:40,943 Current Learning Rate: 0.0002695732 +2025-02-26 09:18:40,943 Train Loss: 0.0224651, Val Loss: 0.0446207 +2025-02-26 09:18:40,944 Epoch 580/2000 +2025-02-26 09:19:42,152 Current Learning Rate: 0.0002447174 +2025-02-26 09:19:42,153 Train Loss: 0.0224645, Val Loss: 0.0446199 +2025-02-26 09:19:42,153 Epoch 581/2000 +2025-02-26 09:20:45,599 Current Learning Rate: 0.0002210349 +2025-02-26 09:20:45,599 Train Loss: 0.0224650, Val Loss: 0.0446208 +2025-02-26 09:20:45,599 Epoch 582/2000 +2025-02-26 09:21:47,859 Current Learning Rate: 0.0001985316 +2025-02-26 09:21:47,860 Train Loss: 0.0224649, Val Loss: 0.0446206 +2025-02-26 09:21:47,860 Epoch 583/2000 +2025-02-26 09:22:49,154 Current Learning Rate: 0.0001772129 +2025-02-26 09:22:49,155 Train Loss: 0.0224655, Val Loss: 0.0446199 +2025-02-26 09:22:49,155 Epoch 584/2000 +2025-02-26 09:23:51,228 Current Learning Rate: 0.0001570842 +2025-02-26 09:23:51,229 Train Loss: 0.0224652, Val Loss: 0.0446209 +2025-02-26 09:23:51,229 Epoch 585/2000 +2025-02-26 09:24:53,577 Current Learning Rate: 0.0001381504 +2025-02-26 09:24:53,577 Train Loss: 0.0224652, Val Loss: 0.0446214 +2025-02-26 09:24:53,577 Epoch 586/2000 +2025-02-26 09:25:54,210 Current Learning Rate: 0.0001204162 +2025-02-26 09:25:54,211 Train Loss: 0.0224657, Val Loss: 0.0446213 +2025-02-26 09:25:54,211 Epoch 587/2000 +2025-02-26 09:26:55,063 Current Learning Rate: 0.0001038859 +2025-02-26 09:26:55,064 Train Loss: 0.0224649, Val Loss: 0.0446209 +2025-02-26 09:26:55,064 Epoch 588/2000 +2025-02-26 09:27:57,270 Current Learning Rate: 0.0000885637 +2025-02-26 09:27:57,270 Train Loss: 0.0224649, Val Loss: 0.0446213 +2025-02-26 09:27:57,270 Epoch 589/2000 +2025-02-26 09:28:59,848 Current Learning Rate: 0.0000744534 +2025-02-26 09:28:59,848 Train Loss: 0.0224657, Val Loss: 0.0446211 +2025-02-26 09:28:59,848 Epoch 590/2000 +2025-02-26 09:30:01,673 Current Learning Rate: 0.0000615583 +2025-02-26 09:30:01,673 Train Loss: 0.0224652, Val Loss: 0.0446210 +2025-02-26 09:30:01,674 Epoch 591/2000 +2025-02-26 09:31:04,033 Current Learning Rate: 0.0000498817 +2025-02-26 09:31:04,033 Train Loss: 0.0224655, Val Loss: 0.0446217 +2025-02-26 09:31:04,034 Epoch 592/2000 +2025-02-26 09:32:06,182 Current Learning Rate: 0.0000394265 +2025-02-26 09:32:06,182 Train Loss: 0.0224653, Val Loss: 0.0446210 +2025-02-26 09:32:06,182 Epoch 593/2000 +2025-02-26 09:33:08,098 Current Learning Rate: 0.0000301952 +2025-02-26 09:33:08,099 Train Loss: 0.0224654, Val Loss: 0.0446220 +2025-02-26 09:33:08,099 Epoch 594/2000 +2025-02-26 09:34:09,725 Current Learning Rate: 0.0000221902 +2025-02-26 09:34:09,725 Train Loss: 0.0224652, Val Loss: 0.0446214 +2025-02-26 09:34:09,725 Epoch 595/2000 +2025-02-26 09:35:12,132 Current Learning Rate: 0.0000154133 +2025-02-26 09:35:12,133 Train Loss: 0.0224654, Val Loss: 0.0446211 +2025-02-26 09:35:12,133 Epoch 596/2000 +2025-02-26 09:36:14,476 Current Learning Rate: 0.0000098664 +2025-02-26 09:36:14,477 Train Loss: 0.0224656, Val Loss: 0.0446212 +2025-02-26 09:36:14,477 Epoch 597/2000 +2025-02-26 09:37:16,761 Current Learning Rate: 0.0000055506 +2025-02-26 09:37:16,762 Train Loss: 0.0224652, Val Loss: 0.0446210 +2025-02-26 09:37:16,762 Epoch 598/2000 +2025-02-26 09:38:18,461 Current Learning Rate: 0.0000024672 +2025-02-26 09:38:18,462 Train Loss: 0.0224650, Val Loss: 0.0446212 +2025-02-26 09:38:18,462 Epoch 599/2000 +2025-02-26 09:39:19,239 Current Learning Rate: 0.0000006168 +2025-02-26 09:39:19,240 Train Loss: 0.0224645, Val Loss: 0.0446212 +2025-02-26 09:39:19,240 Epoch 600/2000 +2025-02-26 09:40:21,570 Current Learning Rate: 0.0000000000 +2025-02-26 09:40:21,571 Train Loss: 0.0224649, Val Loss: 0.0446212 +2025-02-26 09:40:21,571 Epoch 601/2000 +2025-02-26 09:41:24,442 Current Learning Rate: 0.0000006168 +2025-02-26 09:41:24,442 Train Loss: 0.0224652, Val Loss: 0.0446212 +2025-02-26 09:41:24,443 Epoch 602/2000 +2025-02-26 09:42:25,187 Current Learning Rate: 0.0000024672 +2025-02-26 09:42:25,187 Train Loss: 0.0224653, Val Loss: 0.0446212 +2025-02-26 09:42:25,188 Epoch 603/2000 +2025-02-26 09:43:27,546 Current Learning Rate: 0.0000055506 +2025-02-26 09:43:27,546 Train Loss: 0.0224659, Val Loss: 0.0446212 +2025-02-26 09:43:27,547 Epoch 604/2000 +2025-02-26 09:44:28,930 Current Learning Rate: 0.0000098664 +2025-02-26 09:44:28,931 Train Loss: 0.0224655, Val Loss: 0.0446211 +2025-02-26 09:44:28,931 Epoch 605/2000 +2025-02-26 09:45:32,809 Current Learning Rate: 0.0000154133 +2025-02-26 09:45:32,810 Train Loss: 0.0224653, Val Loss: 0.0446209 +2025-02-26 09:45:32,810 Epoch 606/2000 +2025-02-26 09:46:34,393 Current Learning Rate: 0.0000221902 +2025-02-26 09:46:34,393 Train Loss: 0.0224654, Val Loss: 0.0446211 +2025-02-26 09:46:34,393 Epoch 607/2000 +2025-02-26 09:47:36,223 Current Learning Rate: 0.0000301952 +2025-02-26 09:47:36,224 Train Loss: 0.0224648, Val Loss: 0.0446209 +2025-02-26 09:47:36,224 Epoch 608/2000 +2025-02-26 09:48:41,181 Current Learning Rate: 0.0000394265 +2025-02-26 09:48:41,181 Train Loss: 0.0224657, Val Loss: 0.0446208 +2025-02-26 09:48:41,181 Epoch 609/2000 +2025-02-26 09:49:46,124 Current Learning Rate: 0.0000498817 +2025-02-26 09:49:46,124 Train Loss: 0.0224655, Val Loss: 0.0446206 +2025-02-26 09:49:46,124 Epoch 610/2000 +2025-02-26 09:50:49,815 Current Learning Rate: 0.0000615583 +2025-02-26 09:50:49,816 Train Loss: 0.0224651, Val Loss: 0.0446212 +2025-02-26 09:50:49,816 Epoch 611/2000 +2025-02-26 09:51:53,427 Current Learning Rate: 0.0000744534 +2025-02-26 09:51:53,427 Train Loss: 0.0224656, Val Loss: 0.0446213 +2025-02-26 09:51:53,428 Epoch 612/2000 +2025-02-26 09:52:55,299 Current Learning Rate: 0.0000885637 +2025-02-26 09:52:55,300 Train Loss: 0.0224653, Val Loss: 0.0446212 +2025-02-26 09:52:55,300 Epoch 613/2000 +2025-02-26 09:53:58,384 Current Learning Rate: 0.0001038859 +2025-02-26 09:53:58,385 Train Loss: 0.0224646, Val Loss: 0.0446206 +2025-02-26 09:53:58,385 Epoch 614/2000 +2025-02-26 09:54:59,848 Current Learning Rate: 0.0001204162 +2025-02-26 09:54:59,849 Train Loss: 0.0224655, Val Loss: 0.0446212 +2025-02-26 09:54:59,849 Epoch 615/2000 +2025-02-26 09:56:01,600 Current Learning Rate: 0.0001381504 +2025-02-26 09:56:01,601 Train Loss: 0.0224652, Val Loss: 0.0446212 +2025-02-26 09:56:01,601 Epoch 616/2000 +2025-02-26 09:57:06,195 Current Learning Rate: 0.0001570842 +2025-02-26 09:57:06,195 Train Loss: 0.0224655, Val Loss: 0.0446223 +2025-02-26 09:57:06,196 Epoch 617/2000 +2025-02-26 09:58:10,075 Current Learning Rate: 0.0001772129 +2025-02-26 09:58:10,076 Train Loss: 0.0224654, Val Loss: 0.0446213 +2025-02-26 09:58:10,076 Epoch 618/2000 +2025-02-26 09:59:13,289 Current Learning Rate: 0.0001985316 +2025-02-26 09:59:13,294 Train Loss: 0.0224654, Val Loss: 0.0446215 +2025-02-26 09:59:13,294 Epoch 619/2000 +2025-02-26 10:00:18,032 Current Learning Rate: 0.0002210349 +2025-02-26 10:00:18,033 Train Loss: 0.0224657, Val Loss: 0.0446214 +2025-02-26 10:00:18,033 Epoch 620/2000 +2025-02-26 10:01:22,360 Current Learning Rate: 0.0002447174 +2025-02-26 10:01:22,360 Train Loss: 0.0224657, Val Loss: 0.0446220 +2025-02-26 10:01:22,361 Epoch 621/2000 +2025-02-26 10:02:26,521 Current Learning Rate: 0.0002695732 +2025-02-26 10:02:26,522 Train Loss: 0.0224649, Val Loss: 0.0446214 +2025-02-26 10:02:26,522 Epoch 622/2000 +2025-02-26 10:03:28,813 Current Learning Rate: 0.0002955962 +2025-02-26 10:03:28,813 Train Loss: 0.0224651, Val Loss: 0.0446222 +2025-02-26 10:03:28,813 Epoch 623/2000 +2025-02-26 10:04:31,551 Current Learning Rate: 0.0003227798 +2025-02-26 10:04:31,551 Train Loss: 0.0224645, Val Loss: 0.0446215 +2025-02-26 10:04:31,552 Epoch 624/2000 +2025-02-26 10:05:34,060 Current Learning Rate: 0.0003511176 +2025-02-26 10:05:34,061 Train Loss: 0.0224656, Val Loss: 0.0446211 +2025-02-26 10:05:34,061 Epoch 625/2000 +2025-02-26 10:06:37,585 Current Learning Rate: 0.0003806023 +2025-02-26 10:06:37,586 Train Loss: 0.0224651, Val Loss: 0.0446207 +2025-02-26 10:06:37,587 Epoch 626/2000 +2025-02-26 10:07:39,560 Current Learning Rate: 0.0004112269 +2025-02-26 10:07:39,560 Train Loss: 0.0224657, Val Loss: 0.0446228 +2025-02-26 10:07:39,561 Epoch 627/2000 +2025-02-26 10:08:39,646 Current Learning Rate: 0.0004429836 +2025-02-26 10:08:39,646 Train Loss: 0.0224654, Val Loss: 0.0446215 +2025-02-26 10:08:39,646 Epoch 628/2000 +2025-02-26 10:09:42,509 Current Learning Rate: 0.0004758647 +2025-02-26 10:09:42,509 Train Loss: 0.0224661, Val Loss: 0.0446223 +2025-02-26 10:09:42,509 Epoch 629/2000 +2025-02-26 10:10:43,441 Current Learning Rate: 0.0005098621 +2025-02-26 10:10:43,441 Train Loss: 0.0224655, Val Loss: 0.0446216 +2025-02-26 10:10:43,442 Epoch 630/2000 +2025-02-26 10:11:46,625 Current Learning Rate: 0.0005449674 +2025-02-26 10:11:46,626 Train Loss: 0.0224658, Val Loss: 0.0446225 +2025-02-26 10:11:46,626 Epoch 631/2000 +2025-02-26 10:12:49,017 Current Learning Rate: 0.0005811718 +2025-02-26 10:12:49,017 Train Loss: 0.0224655, Val Loss: 0.0446209 +2025-02-26 10:12:49,017 Epoch 632/2000 +2025-02-26 10:13:51,173 Current Learning Rate: 0.0006184666 +2025-02-26 10:13:51,174 Train Loss: 0.0224657, Val Loss: 0.0446257 +2025-02-26 10:13:51,174 Epoch 633/2000 +2025-02-26 10:14:52,893 Current Learning Rate: 0.0006568424 +2025-02-26 10:14:52,893 Train Loss: 0.0224660, Val Loss: 0.0446237 +2025-02-26 10:14:52,893 Epoch 634/2000 +2025-02-26 10:15:54,609 Current Learning Rate: 0.0006962899 +2025-02-26 10:15:54,610 Train Loss: 0.0224668, Val Loss: 0.0446239 +2025-02-26 10:15:54,610 Epoch 635/2000 +2025-02-26 10:16:56,211 Current Learning Rate: 0.0007367992 +2025-02-26 10:16:56,212 Train Loss: 0.0224658, Val Loss: 0.0446246 +2025-02-26 10:16:56,212 Epoch 636/2000 +2025-02-26 10:17:59,119 Current Learning Rate: 0.0007783604 +2025-02-26 10:17:59,119 Train Loss: 0.0224663, Val Loss: 0.0446231 +2025-02-26 10:17:59,120 Epoch 637/2000 +2025-02-26 10:19:00,952 Current Learning Rate: 0.0008209632 +2025-02-26 10:19:00,952 Train Loss: 0.0224660, Val Loss: 0.0446249 +2025-02-26 10:19:00,953 Epoch 638/2000 +2025-02-26 10:20:03,111 Current Learning Rate: 0.0008645971 +2025-02-26 10:20:03,111 Train Loss: 0.0224660, Val Loss: 0.0446238 +2025-02-26 10:20:03,112 Epoch 639/2000 +2025-02-26 10:21:05,732 Current Learning Rate: 0.0009092514 +2025-02-26 10:21:05,733 Train Loss: 0.0224657, Val Loss: 0.0446218 +2025-02-26 10:21:05,733 Epoch 640/2000 +2025-02-26 10:22:08,887 Current Learning Rate: 0.0009549150 +2025-02-26 10:22:08,888 Train Loss: 0.0224660, Val Loss: 0.0446247 +2025-02-26 10:22:08,888 Epoch 641/2000 +2025-02-26 10:23:13,978 Current Learning Rate: 0.0010015767 +2025-02-26 10:23:13,979 Train Loss: 0.0224666, Val Loss: 0.0446234 +2025-02-26 10:23:13,979 Epoch 642/2000 +2025-02-26 10:24:17,414 Current Learning Rate: 0.0010492249 +2025-02-26 10:24:17,414 Train Loss: 0.0224662, Val Loss: 0.0446237 +2025-02-26 10:24:17,414 Epoch 643/2000 +2025-02-26 10:25:20,858 Current Learning Rate: 0.0010978480 +2025-02-26 10:25:20,858 Train Loss: 0.0224659, Val Loss: 0.0446241 +2025-02-26 10:25:20,859 Epoch 644/2000 +2025-02-26 10:26:23,252 Current Learning Rate: 0.0011474338 +2025-02-26 10:26:23,253 Train Loss: 0.0224655, Val Loss: 0.0446222 +2025-02-26 10:26:23,253 Epoch 645/2000 +2025-02-26 10:27:25,658 Current Learning Rate: 0.0011979702 +2025-02-26 10:27:25,659 Train Loss: 0.0224655, Val Loss: 0.0446236 +2025-02-26 10:27:25,660 Epoch 646/2000 +2025-02-26 10:28:28,976 Current Learning Rate: 0.0012494447 +2025-02-26 10:28:28,977 Train Loss: 0.0224657, Val Loss: 0.0446228 +2025-02-26 10:28:28,977 Epoch 647/2000 +2025-02-26 10:29:31,800 Current Learning Rate: 0.0013018445 +2025-02-26 10:29:31,801 Train Loss: 0.0224664, Val Loss: 0.0446242 +2025-02-26 10:29:31,801 Epoch 648/2000 +2025-02-26 10:30:35,599 Current Learning Rate: 0.0013551569 +2025-02-26 10:30:35,599 Train Loss: 0.0224671, Val Loss: 0.0446230 +2025-02-26 10:30:35,600 Epoch 649/2000 +2025-02-26 10:31:37,922 Current Learning Rate: 0.0014093685 +2025-02-26 10:31:37,923 Train Loss: 0.0224668, Val Loss: 0.0446235 +2025-02-26 10:31:37,923 Epoch 650/2000 +2025-02-26 10:32:41,605 Current Learning Rate: 0.0014644661 +2025-02-26 10:32:41,606 Train Loss: 0.0224663, Val Loss: 0.0446224 +2025-02-26 10:32:41,607 Epoch 651/2000 +2025-02-26 10:33:43,981 Current Learning Rate: 0.0015204360 +2025-02-26 10:33:43,981 Train Loss: 0.0224660, Val Loss: 0.0446218 +2025-02-26 10:33:43,982 Epoch 652/2000 +2025-02-26 10:34:46,462 Current Learning Rate: 0.0015772645 +2025-02-26 10:34:46,463 Train Loss: 0.0224659, Val Loss: 0.0446219 +2025-02-26 10:34:46,463 Epoch 653/2000 +2025-02-26 10:35:47,385 Current Learning Rate: 0.0016349374 +2025-02-26 10:35:47,385 Train Loss: 0.0224662, Val Loss: 0.0446231 +2025-02-26 10:35:47,386 Epoch 654/2000 +2025-02-26 10:36:50,260 Current Learning Rate: 0.0016934407 +2025-02-26 10:36:50,260 Train Loss: 0.0224658, Val Loss: 0.0446231 +2025-02-26 10:36:50,261 Epoch 655/2000 +2025-02-26 10:37:53,145 Current Learning Rate: 0.0017527598 +2025-02-26 10:37:53,146 Train Loss: 0.0224667, Val Loss: 0.0446235 +2025-02-26 10:37:53,146 Epoch 656/2000 +2025-02-26 10:38:56,163 Current Learning Rate: 0.0018128801 +2025-02-26 10:38:56,163 Train Loss: 0.0224667, Val Loss: 0.0446246 +2025-02-26 10:38:56,164 Epoch 657/2000 +2025-02-26 10:39:59,621 Current Learning Rate: 0.0018737867 +2025-02-26 10:39:59,621 Train Loss: 0.0224661, Val Loss: 0.0446242 +2025-02-26 10:39:59,622 Epoch 658/2000 +2025-02-26 10:41:00,961 Current Learning Rate: 0.0019354647 +2025-02-26 10:41:00,961 Train Loss: 0.0224665, Val Loss: 0.0446218 +2025-02-26 10:41:00,962 Epoch 659/2000 +2025-02-26 10:42:04,404 Current Learning Rate: 0.0019978989 +2025-02-26 10:42:04,405 Train Loss: 0.0224663, Val Loss: 0.0446234 +2025-02-26 10:42:04,406 Epoch 660/2000 +2025-02-26 10:43:08,744 Current Learning Rate: 0.0020610737 +2025-02-26 10:43:08,745 Train Loss: 0.0224663, Val Loss: 0.0446229 +2025-02-26 10:43:08,745 Epoch 661/2000 +2025-02-26 10:44:12,042 Current Learning Rate: 0.0021249737 +2025-02-26 10:44:12,042 Train Loss: 0.0224665, Val Loss: 0.0446226 +2025-02-26 10:44:12,043 Epoch 662/2000 +2025-02-26 10:45:14,983 Current Learning Rate: 0.0021895831 +2025-02-26 10:45:14,983 Train Loss: 0.0224652, Val Loss: 0.0446168 +2025-02-26 10:45:14,984 Epoch 663/2000 +2025-02-26 10:46:17,790 Current Learning Rate: 0.0022548859 +2025-02-26 10:46:17,791 Train Loss: 0.0224649, Val Loss: 0.0446141 +2025-02-26 10:46:17,791 Epoch 664/2000 +2025-02-26 10:47:23,442 Current Learning Rate: 0.0023208660 +2025-02-26 10:47:23,443 Train Loss: 0.0224642, Val Loss: 0.0446134 +2025-02-26 10:47:23,443 Epoch 665/2000 +2025-02-26 10:48:24,949 Current Learning Rate: 0.0023875072 +2025-02-26 10:48:24,950 Train Loss: 0.0224642, Val Loss: 0.0446169 +2025-02-26 10:48:24,950 Epoch 666/2000 +2025-02-26 10:49:27,358 Current Learning Rate: 0.0024547929 +2025-02-26 10:49:27,359 Train Loss: 0.0224640, Val Loss: 0.0446138 +2025-02-26 10:49:27,359 Epoch 667/2000 +2025-02-26 10:50:28,701 Current Learning Rate: 0.0025227067 +2025-02-26 10:50:28,701 Train Loss: 0.0224643, Val Loss: 0.0446129 +2025-02-26 10:50:28,701 Epoch 668/2000 +2025-02-26 10:51:29,795 Current Learning Rate: 0.0025912316 +2025-02-26 10:51:29,796 Train Loss: 0.0224642, Val Loss: 0.0446156 +2025-02-26 10:51:29,796 Epoch 669/2000 +2025-02-26 10:52:30,869 Current Learning Rate: 0.0026603509 +2025-02-26 10:52:30,870 Train Loss: 0.0224637, Val Loss: 0.0446146 +2025-02-26 10:52:30,871 Epoch 670/2000 +2025-02-26 10:53:33,398 Current Learning Rate: 0.0027300475 +2025-02-26 10:53:33,399 Train Loss: 0.0224642, Val Loss: 0.0446192 +2025-02-26 10:53:33,399 Epoch 671/2000 +2025-02-26 10:54:35,652 Current Learning Rate: 0.0028003042 +2025-02-26 10:54:35,653 Train Loss: 0.0224631, Val Loss: 0.0446178 +2025-02-26 10:54:35,653 Epoch 672/2000 +2025-02-26 10:55:40,802 Current Learning Rate: 0.0028711035 +2025-02-26 10:55:40,803 Train Loss: 0.0224632, Val Loss: 0.0446112 +2025-02-26 10:55:40,804 Epoch 673/2000 +2025-02-26 10:56:43,050 Current Learning Rate: 0.0029424282 +2025-02-26 10:56:43,051 Train Loss: 0.0224630, Val Loss: 0.0446111 +2025-02-26 10:56:43,051 Epoch 674/2000 +2025-02-26 10:57:47,048 Current Learning Rate: 0.0030142605 +2025-02-26 10:57:47,048 Train Loss: 0.0224630, Val Loss: 0.0446126 +2025-02-26 10:57:47,049 Epoch 675/2000 +2025-02-26 10:58:50,234 Current Learning Rate: 0.0030865828 +2025-02-26 10:58:50,235 Train Loss: 0.0224630, Val Loss: 0.0446111 +2025-02-26 10:58:50,235 Epoch 676/2000 +2025-02-26 10:59:51,749 Current Learning Rate: 0.0031593772 +2025-02-26 10:59:51,749 Train Loss: 0.0224634, Val Loss: 0.0446125 +2025-02-26 10:59:51,749 Epoch 677/2000 +2025-02-26 11:00:54,049 Current Learning Rate: 0.0032326258 +2025-02-26 11:00:54,049 Train Loss: 0.0224641, Val Loss: 0.0446175 +2025-02-26 11:00:54,049 Epoch 678/2000 +2025-02-26 11:01:58,274 Current Learning Rate: 0.0033063104 +2025-02-26 11:01:58,274 Train Loss: 0.0224637, Val Loss: 0.0446151 +2025-02-26 11:01:58,275 Epoch 679/2000 +2025-02-26 11:03:00,758 Current Learning Rate: 0.0033804129 +2025-02-26 11:03:00,759 Train Loss: 0.0224642, Val Loss: 0.0446138 +2025-02-26 11:03:00,759 Epoch 680/2000 +2025-02-26 11:04:04,390 Current Learning Rate: 0.0034549150 +2025-02-26 11:04:04,390 Train Loss: 0.0224642, Val Loss: 0.0446159 +2025-02-26 11:04:04,391 Epoch 681/2000 +2025-02-26 11:05:07,751 Current Learning Rate: 0.0035297984 +2025-02-26 11:05:07,752 Train Loss: 0.0224643, Val Loss: 0.0446144 +2025-02-26 11:05:07,752 Epoch 682/2000 +2025-02-26 11:06:10,515 Current Learning Rate: 0.0036050445 +2025-02-26 11:06:10,515 Train Loss: 0.0224648, Val Loss: 0.0446146 +2025-02-26 11:06:10,516 Epoch 683/2000 +2025-02-26 11:07:13,663 Current Learning Rate: 0.0036806348 +2025-02-26 11:07:13,663 Train Loss: 0.0224645, Val Loss: 0.0446158 +2025-02-26 11:07:13,664 Epoch 684/2000 +2025-02-26 11:08:15,634 Current Learning Rate: 0.0037565506 +2025-02-26 11:08:15,634 Train Loss: 0.0224648, Val Loss: 0.0446184 +2025-02-26 11:08:15,635 Epoch 685/2000 +2025-02-26 11:09:19,604 Current Learning Rate: 0.0038327732 +2025-02-26 11:09:19,604 Train Loss: 0.0224636, Val Loss: 0.0446098 +2025-02-26 11:09:19,605 Epoch 686/2000 +2025-02-26 11:10:24,736 Current Learning Rate: 0.0039092838 +2025-02-26 11:10:24,736 Train Loss: 0.0224610, Val Loss: 0.0446041 +2025-02-26 11:10:24,737 Epoch 687/2000 +2025-02-26 11:11:27,052 Current Learning Rate: 0.0039860635 +2025-02-26 11:11:27,052 Train Loss: 0.0224611, Val Loss: 0.0446069 +2025-02-26 11:11:27,053 Epoch 688/2000 +2025-02-26 11:12:28,761 Current Learning Rate: 0.0040630934 +2025-02-26 11:12:28,761 Train Loss: 0.0224610, Val Loss: 0.0446111 +2025-02-26 11:12:28,761 Epoch 689/2000 +2025-02-26 11:13:32,036 Current Learning Rate: 0.0041403545 +2025-02-26 11:13:32,037 Train Loss: 0.0224622, Val Loss: 0.0446113 +2025-02-26 11:13:32,037 Epoch 690/2000 +2025-02-26 11:14:33,452 Current Learning Rate: 0.0042178277 +2025-02-26 11:14:33,452 Train Loss: 0.0224621, Val Loss: 0.0446116 +2025-02-26 11:14:33,453 Epoch 691/2000 +2025-02-26 11:15:36,058 Current Learning Rate: 0.0042954938 +2025-02-26 11:15:36,059 Train Loss: 0.0224625, Val Loss: 0.0446170 +2025-02-26 11:15:36,059 Epoch 692/2000 +2025-02-26 11:16:39,105 Current Learning Rate: 0.0043733338 +2025-02-26 11:16:39,106 Train Loss: 0.0224641, Val Loss: 0.0446174 +2025-02-26 11:16:39,106 Epoch 693/2000 +2025-02-26 11:17:40,293 Current Learning Rate: 0.0044513284 +2025-02-26 11:17:40,293 Train Loss: 0.0224644, Val Loss: 0.0446116 +2025-02-26 11:17:40,293 Epoch 694/2000 +2025-02-26 11:18:43,090 Current Learning Rate: 0.0045294584 +2025-02-26 11:18:43,090 Train Loss: 0.0224635, Val Loss: 0.0446126 +2025-02-26 11:18:43,091 Epoch 695/2000 +2025-02-26 11:19:45,253 Current Learning Rate: 0.0046077045 +2025-02-26 11:19:45,253 Train Loss: 0.0224636, Val Loss: 0.0446179 +2025-02-26 11:19:45,253 Epoch 696/2000 +2025-02-26 11:20:47,757 Current Learning Rate: 0.0046860474 +2025-02-26 11:20:47,757 Train Loss: 0.0224648, Val Loss: 0.0446207 +2025-02-26 11:20:47,757 Epoch 697/2000 +2025-02-26 11:21:52,365 Current Learning Rate: 0.0047644677 +2025-02-26 11:21:52,365 Train Loss: 0.0224663, Val Loss: 0.0446204 +2025-02-26 11:21:52,365 Epoch 698/2000 +2025-02-26 11:22:57,945 Current Learning Rate: 0.0048429462 +2025-02-26 11:22:57,945 Train Loss: 0.0224669, Val Loss: 0.0446132 +2025-02-26 11:22:57,945 Epoch 699/2000 +2025-02-26 11:24:00,391 Current Learning Rate: 0.0049214634 +2025-02-26 11:24:00,392 Train Loss: 0.0224649, Val Loss: 0.0446137 +2025-02-26 11:24:00,392 Epoch 700/2000 +2025-02-26 11:25:04,280 Current Learning Rate: 0.0050000000 +2025-02-26 11:25:04,280 Train Loss: 0.0224650, Val Loss: 0.0446222 +2025-02-26 11:25:04,281 Epoch 701/2000 +2025-02-26 11:26:08,512 Current Learning Rate: 0.0050785366 +2025-02-26 11:26:08,513 Train Loss: 0.0224654, Val Loss: 0.0446155 +2025-02-26 11:26:08,513 Epoch 702/2000 +2025-02-26 11:27:12,481 Current Learning Rate: 0.0051570538 +2025-02-26 11:27:12,481 Train Loss: 0.0224650, Val Loss: 0.0446171 +2025-02-26 11:27:12,481 Epoch 703/2000 +2025-02-26 11:28:15,476 Current Learning Rate: 0.0052355323 +2025-02-26 11:28:15,476 Train Loss: 0.0224660, Val Loss: 0.0446180 +2025-02-26 11:28:15,477 Epoch 704/2000 +2025-02-26 11:29:16,907 Current Learning Rate: 0.0053139526 +2025-02-26 11:29:16,907 Train Loss: 0.0224660, Val Loss: 0.0446222 +2025-02-26 11:29:16,907 Epoch 705/2000 +2025-02-26 11:30:17,967 Current Learning Rate: 0.0053922955 +2025-02-26 11:30:17,967 Train Loss: 0.0224658, Val Loss: 0.0446001 +2025-02-26 11:30:17,968 Epoch 706/2000 +2025-02-26 11:31:21,665 Current Learning Rate: 0.0054705416 +2025-02-26 11:31:21,666 Train Loss: 0.0224600, Val Loss: 0.0445990 +2025-02-26 11:31:21,667 Epoch 707/2000 +2025-02-26 11:32:25,851 Current Learning Rate: 0.0055486716 +2025-02-26 11:32:25,852 Train Loss: 0.0224619, Val Loss: 0.0446068 +2025-02-26 11:32:25,852 Epoch 708/2000 +2025-02-26 11:33:30,598 Current Learning Rate: 0.0056266662 +2025-02-26 11:33:30,598 Train Loss: 0.0224623, Val Loss: 0.0446084 +2025-02-26 11:33:30,599 Epoch 709/2000 +2025-02-26 11:34:33,960 Current Learning Rate: 0.0057045062 +2025-02-26 11:34:33,961 Train Loss: 0.0224631, Val Loss: 0.0446051 +2025-02-26 11:34:33,961 Epoch 710/2000 +2025-02-26 11:35:35,036 Current Learning Rate: 0.0057821723 +2025-02-26 11:35:35,037 Train Loss: 0.0224610, Val Loss: 0.0445967 +2025-02-26 11:35:35,037 Epoch 711/2000 +2025-02-26 11:36:36,300 Current Learning Rate: 0.0058596455 +2025-02-26 11:36:36,300 Train Loss: 0.0224603, Val Loss: 0.0446068 +2025-02-26 11:36:36,301 Epoch 712/2000 +2025-02-26 11:37:38,634 Current Learning Rate: 0.0059369066 +2025-02-26 11:37:38,635 Train Loss: 0.0224602, Val Loss: 0.0445927 +2025-02-26 11:37:38,635 Epoch 713/2000 +2025-02-26 11:38:40,191 Current Learning Rate: 0.0060139365 +2025-02-26 11:38:40,192 Train Loss: 0.0224602, Val Loss: 0.0445952 +2025-02-26 11:38:40,192 Epoch 714/2000 +2025-02-26 11:39:41,930 Current Learning Rate: 0.0060907162 +2025-02-26 11:39:41,931 Train Loss: 0.0224599, Val Loss: 0.0445930 +2025-02-26 11:39:41,931 Epoch 715/2000 +2025-02-26 11:40:44,245 Current Learning Rate: 0.0061672268 +2025-02-26 11:40:44,245 Train Loss: 0.0224598, Val Loss: 0.0446011 +2025-02-26 11:40:44,246 Epoch 716/2000 +2025-02-26 11:41:47,695 Current Learning Rate: 0.0062434494 +2025-02-26 11:41:47,696 Train Loss: 0.0224596, Val Loss: 0.0445946 +2025-02-26 11:41:47,696 Epoch 717/2000 +2025-02-26 11:42:51,591 Current Learning Rate: 0.0063193652 +2025-02-26 11:42:51,592 Train Loss: 0.0224590, Val Loss: 0.0445923 +2025-02-26 11:42:51,592 Epoch 718/2000 +2025-02-26 11:43:54,672 Current Learning Rate: 0.0063949555 +2025-02-26 11:43:54,673 Train Loss: 0.0224574, Val Loss: 0.0445871 +2025-02-26 11:43:54,673 Epoch 719/2000 +2025-02-26 11:44:57,143 Current Learning Rate: 0.0064702016 +2025-02-26 11:44:57,144 Train Loss: 0.0224573, Val Loss: 0.0445904 +2025-02-26 11:44:57,144 Epoch 720/2000 +2025-02-26 11:45:59,147 Current Learning Rate: 0.0065450850 +2025-02-26 11:45:59,148 Train Loss: 0.0224558, Val Loss: 0.0445768 +2025-02-26 11:45:59,148 Epoch 721/2000 +2025-02-26 11:47:02,071 Current Learning Rate: 0.0066195871 +2025-02-26 11:47:02,071 Train Loss: 0.0224522, Val Loss: 0.0445664 +2025-02-26 11:47:02,071 Epoch 722/2000 +2025-02-26 11:48:06,997 Current Learning Rate: 0.0066936896 +2025-02-26 11:48:06,998 Train Loss: 0.0224518, Val Loss: 0.0445679 +2025-02-26 11:48:06,998 Epoch 723/2000 +2025-02-26 11:49:08,315 Current Learning Rate: 0.0067673742 +2025-02-26 11:49:08,315 Train Loss: 0.0224517, Val Loss: 0.0445666 +2025-02-26 11:49:08,316 Epoch 724/2000 +2025-02-26 11:50:11,549 Current Learning Rate: 0.0068406228 +2025-02-26 11:50:11,550 Train Loss: 0.0224514, Val Loss: 0.0445675 +2025-02-26 11:50:11,550 Epoch 725/2000 +2025-02-26 11:51:14,647 Current Learning Rate: 0.0069134172 +2025-02-26 11:51:14,648 Train Loss: 0.0224524, Val Loss: 0.0445666 +2025-02-26 11:51:14,648 Epoch 726/2000 +2025-02-26 11:52:16,608 Current Learning Rate: 0.0069857395 +2025-02-26 11:52:16,608 Train Loss: 0.0224527, Val Loss: 0.0445678 +2025-02-26 11:52:16,609 Epoch 727/2000 +2025-02-26 11:53:19,327 Current Learning Rate: 0.0070575718 +2025-02-26 11:53:19,328 Train Loss: 0.0224536, Val Loss: 0.0445736 +2025-02-26 11:53:19,328 Epoch 728/2000 +2025-02-26 11:54:23,462 Current Learning Rate: 0.0071288965 +2025-02-26 11:54:23,463 Train Loss: 0.0224546, Val Loss: 0.0445755 +2025-02-26 11:54:23,463 Epoch 729/2000 +2025-02-26 11:55:26,136 Current Learning Rate: 0.0071996958 +2025-02-26 11:55:26,136 Train Loss: 0.0224551, Val Loss: 0.0445875 +2025-02-26 11:55:26,137 Epoch 730/2000 +2025-02-26 11:56:29,010 Current Learning Rate: 0.0072699525 +2025-02-26 11:56:29,010 Train Loss: 0.0224571, Val Loss: 0.0445909 +2025-02-26 11:56:29,010 Epoch 731/2000 +2025-02-26 11:57:31,633 Current Learning Rate: 0.0073396491 +2025-02-26 11:57:31,633 Train Loss: 0.0224607, Val Loss: 0.0446116 +2025-02-26 11:57:31,634 Epoch 732/2000 +2025-02-26 11:58:33,519 Current Learning Rate: 0.0074087684 +2025-02-26 11:58:33,520 Train Loss: 0.0224638, Val Loss: 0.0446028 +2025-02-26 11:58:33,520 Epoch 733/2000 +2025-02-26 11:59:35,267 Current Learning Rate: 0.0074772933 +2025-02-26 11:59:35,268 Train Loss: 0.0224645, Val Loss: 0.0446130 +2025-02-26 11:59:35,268 Epoch 734/2000 +2025-02-26 12:00:37,605 Current Learning Rate: 0.0075452071 +2025-02-26 12:00:37,605 Train Loss: 0.0224652, Val Loss: 0.0446095 +2025-02-26 12:00:37,606 Epoch 735/2000 +2025-02-26 12:01:39,837 Current Learning Rate: 0.0076124928 +2025-02-26 12:01:39,838 Train Loss: 0.0224646, Val Loss: 0.0446141 +2025-02-26 12:01:39,838 Epoch 736/2000 +2025-02-26 12:02:41,932 Current Learning Rate: 0.0076791340 +2025-02-26 12:02:41,932 Train Loss: 0.0224651, Val Loss: 0.0446040 +2025-02-26 12:02:41,932 Epoch 737/2000 +2025-02-26 12:03:47,157 Current Learning Rate: 0.0077451141 +2025-02-26 12:03:47,157 Train Loss: 0.0224653, Val Loss: 0.0446106 +2025-02-26 12:03:47,158 Epoch 738/2000 +2025-02-26 12:04:50,143 Current Learning Rate: 0.0078104169 +2025-02-26 12:04:50,144 Train Loss: 0.0224649, Val Loss: 0.0446074 +2025-02-26 12:04:50,144 Epoch 739/2000 +2025-02-26 12:05:52,063 Current Learning Rate: 0.0078750263 +2025-02-26 12:05:52,063 Train Loss: 0.0224627, Val Loss: 0.0446064 +2025-02-26 12:05:52,064 Epoch 740/2000 +2025-02-26 12:06:55,309 Current Learning Rate: 0.0079389263 +2025-02-26 12:06:55,309 Train Loss: 0.0224647, Val Loss: 0.0446233 +2025-02-26 12:06:55,309 Epoch 741/2000 +2025-02-26 12:07:58,505 Current Learning Rate: 0.0080021011 +2025-02-26 12:07:58,506 Train Loss: 0.0224651, Val Loss: 0.0446188 +2025-02-26 12:07:58,506 Epoch 742/2000 +2025-02-26 12:09:01,267 Current Learning Rate: 0.0080645353 +2025-02-26 12:09:01,267 Train Loss: 0.0224645, Val Loss: 0.0446086 +2025-02-26 12:09:01,268 Epoch 743/2000 +2025-02-26 12:10:03,048 Current Learning Rate: 0.0081262133 +2025-02-26 12:10:03,048 Train Loss: 0.0224649, Val Loss: 0.0446128 +2025-02-26 12:10:03,048 Epoch 744/2000 +2025-02-26 12:11:06,286 Current Learning Rate: 0.0081871199 +2025-02-26 12:11:06,287 Train Loss: 0.0224640, Val Loss: 0.0446129 +2025-02-26 12:11:06,287 Epoch 745/2000 +2025-02-26 12:12:08,072 Current Learning Rate: 0.0082472402 +2025-02-26 12:12:08,072 Train Loss: 0.0224642, Val Loss: 0.0446065 +2025-02-26 12:12:08,072 Epoch 746/2000 +2025-02-26 12:13:10,553 Current Learning Rate: 0.0083065593 +2025-02-26 12:13:10,554 Train Loss: 0.0224659, Val Loss: 0.0446135 +2025-02-26 12:13:10,554 Epoch 747/2000 +2025-02-26 12:14:11,669 Current Learning Rate: 0.0083650626 +2025-02-26 12:14:11,669 Train Loss: 0.0224661, Val Loss: 0.0446064 +2025-02-26 12:14:11,669 Epoch 748/2000 +2025-02-26 12:15:14,018 Current Learning Rate: 0.0084227355 +2025-02-26 12:15:14,018 Train Loss: 0.0224636, Val Loss: 0.0446044 +2025-02-26 12:15:14,018 Epoch 749/2000 +2025-02-26 12:16:18,523 Current Learning Rate: 0.0084795640 +2025-02-26 12:16:18,524 Train Loss: 0.0224653, Val Loss: 0.0446140 +2025-02-26 12:16:18,524 Epoch 750/2000 +2025-02-26 12:17:23,543 Current Learning Rate: 0.0085355339 +2025-02-26 12:17:23,544 Train Loss: 0.0224662, Val Loss: 0.0446104 +2025-02-26 12:17:23,544 Epoch 751/2000 +2025-02-26 12:18:26,806 Current Learning Rate: 0.0085906315 +2025-02-26 12:18:26,806 Train Loss: 0.0224655, Val Loss: 0.0446095 +2025-02-26 12:18:26,806 Epoch 752/2000 +2025-02-26 12:19:29,919 Current Learning Rate: 0.0086448431 +2025-02-26 12:19:29,919 Train Loss: 0.0224656, Val Loss: 0.0446178 +2025-02-26 12:19:29,919 Epoch 753/2000 +2025-02-26 12:20:31,688 Current Learning Rate: 0.0086981555 +2025-02-26 12:20:31,689 Train Loss: 0.0224634, Val Loss: 0.0445953 +2025-02-26 12:20:31,689 Epoch 754/2000 +2025-02-26 12:21:34,305 Current Learning Rate: 0.0087505553 +2025-02-26 12:21:34,305 Train Loss: 0.0224621, Val Loss: 0.0446029 +2025-02-26 12:21:34,305 Epoch 755/2000 +2025-02-26 12:22:36,355 Current Learning Rate: 0.0088020298 +2025-02-26 12:22:36,355 Train Loss: 0.0224639, Val Loss: 0.0446030 +2025-02-26 12:22:36,356 Epoch 756/2000 +2025-02-26 12:23:38,583 Current Learning Rate: 0.0088525662 +2025-02-26 12:23:38,583 Train Loss: 0.0224645, Val Loss: 0.0446037 +2025-02-26 12:23:38,583 Epoch 757/2000 +2025-02-26 12:24:40,941 Current Learning Rate: 0.0089021520 +2025-02-26 12:24:40,941 Train Loss: 0.0224646, Val Loss: 0.0446146 +2025-02-26 12:24:40,942 Epoch 758/2000 +2025-02-26 12:25:44,592 Current Learning Rate: 0.0089507751 +2025-02-26 12:25:44,593 Train Loss: 0.0224665, Val Loss: 0.0446049 +2025-02-26 12:25:44,593 Epoch 759/2000 +2025-02-26 12:26:47,570 Current Learning Rate: 0.0089984233 +2025-02-26 12:26:47,570 Train Loss: 0.0224630, Val Loss: 0.0446139 +2025-02-26 12:26:47,571 Epoch 760/2000 +2025-02-26 12:27:50,651 Current Learning Rate: 0.0090450850 +2025-02-26 12:27:50,652 Train Loss: 0.0224660, Val Loss: 0.0446197 +2025-02-26 12:27:50,652 Epoch 761/2000 +2025-02-26 12:28:54,037 Current Learning Rate: 0.0090907486 +2025-02-26 12:28:54,038 Train Loss: 0.0224649, Val Loss: 0.0446227 +2025-02-26 12:28:54,038 Epoch 762/2000 +2025-02-26 12:29:58,507 Current Learning Rate: 0.0091354029 +2025-02-26 12:29:58,507 Train Loss: 0.0224670, Val Loss: 0.0446181 +2025-02-26 12:29:58,508 Epoch 763/2000 +2025-02-26 12:31:01,824 Current Learning Rate: 0.0091790368 +2025-02-26 12:31:01,825 Train Loss: 0.0224693, Val Loss: 0.0446073 +2025-02-26 12:31:01,825 Epoch 764/2000 +2025-02-26 12:32:04,401 Current Learning Rate: 0.0092216396 +2025-02-26 12:32:04,401 Train Loss: 0.0224633, Val Loss: 0.0445928 +2025-02-26 12:32:04,401 Epoch 765/2000 +2025-02-26 12:33:07,442 Current Learning Rate: 0.0092632008 +2025-02-26 12:33:07,442 Train Loss: 0.0224607, Val Loss: 0.0446205 +2025-02-26 12:33:07,443 Epoch 766/2000 +2025-02-26 12:34:09,080 Current Learning Rate: 0.0093037101 +2025-02-26 12:34:09,080 Train Loss: 0.0224708, Val Loss: 0.0446278 +2025-02-26 12:34:09,081 Epoch 767/2000 +2025-02-26 12:35:10,967 Current Learning Rate: 0.0093431576 +2025-02-26 12:35:10,967 Train Loss: 0.0224723, Val Loss: 0.0446281 +2025-02-26 12:35:10,968 Epoch 768/2000 +2025-02-26 12:36:13,505 Current Learning Rate: 0.0093815334 +2025-02-26 12:36:13,506 Train Loss: 0.0224717, Val Loss: 0.0446262 +2025-02-26 12:36:13,506 Epoch 769/2000 +2025-02-26 12:37:15,471 Current Learning Rate: 0.0094188282 +2025-02-26 12:37:15,472 Train Loss: 0.0224694, Val Loss: 0.0446184 +2025-02-26 12:37:15,472 Epoch 770/2000 +2025-02-26 12:38:18,075 Current Learning Rate: 0.0094550326 +2025-02-26 12:38:18,075 Train Loss: 0.0224657, Val Loss: 0.0446020 +2025-02-26 12:38:18,075 Epoch 771/2000 +2025-02-26 12:39:21,691 Current Learning Rate: 0.0094901379 +2025-02-26 12:39:21,692 Train Loss: 0.0224614, Val Loss: 0.0445873 +2025-02-26 12:39:21,692 Epoch 772/2000 +2025-02-26 12:40:24,456 Current Learning Rate: 0.0095241353 +2025-02-26 12:40:24,457 Train Loss: 0.0224600, Val Loss: 0.0445902 +2025-02-26 12:40:24,457 Epoch 773/2000 +2025-02-26 12:41:26,189 Current Learning Rate: 0.0095570164 +2025-02-26 12:41:26,190 Train Loss: 0.0224588, Val Loss: 0.0445978 +2025-02-26 12:41:26,190 Epoch 774/2000 +2025-02-26 12:42:29,785 Current Learning Rate: 0.0095887731 +2025-02-26 12:42:29,786 Train Loss: 0.0224600, Val Loss: 0.0445769 +2025-02-26 12:42:29,786 Epoch 775/2000 +2025-02-26 12:43:32,674 Current Learning Rate: 0.0096193977 +2025-02-26 12:43:32,675 Train Loss: 0.0224541, Val Loss: 0.0445688 +2025-02-26 12:43:32,675 Epoch 776/2000 +2025-02-26 12:44:35,316 Current Learning Rate: 0.0096488824 +2025-02-26 12:44:35,317 Train Loss: 0.0224565, Val Loss: 0.0446063 +2025-02-26 12:44:35,317 Epoch 777/2000 +2025-02-26 12:45:38,317 Current Learning Rate: 0.0096772202 +2025-02-26 12:45:38,317 Train Loss: 0.0224632, Val Loss: 0.0445993 +2025-02-26 12:45:38,317 Epoch 778/2000 +2025-02-26 12:46:40,265 Current Learning Rate: 0.0097044038 +2025-02-26 12:46:40,266 Train Loss: 0.0224631, Val Loss: 0.0445928 +2025-02-26 12:46:40,266 Epoch 779/2000 +2025-02-26 12:47:42,485 Current Learning Rate: 0.0097304268 +2025-02-26 12:47:42,485 Train Loss: 0.0224565, Val Loss: 0.0445884 +2025-02-26 12:47:42,485 Epoch 780/2000 +2025-02-26 12:48:45,610 Current Learning Rate: 0.0097552826 +2025-02-26 12:48:45,610 Train Loss: 0.0224596, Val Loss: 0.0446174 +2025-02-26 12:48:45,611 Epoch 781/2000 +2025-02-26 12:49:48,396 Current Learning Rate: 0.0097789651 +2025-02-26 12:49:48,396 Train Loss: 0.0224696, Val Loss: 0.0446219 +2025-02-26 12:49:48,397 Epoch 782/2000 +2025-02-26 12:50:49,277 Current Learning Rate: 0.0098014684 +2025-02-26 12:50:49,277 Train Loss: 0.0224727, Val Loss: 0.0446265 +2025-02-26 12:50:49,277 Epoch 783/2000 +2025-02-26 12:51:53,196 Current Learning Rate: 0.0098227871 +2025-02-26 12:51:53,197 Train Loss: 0.0224597, Val Loss: 0.0445689 +2025-02-26 12:51:53,197 Epoch 784/2000 +2025-02-26 12:52:54,011 Current Learning Rate: 0.0098429158 +2025-02-26 12:52:54,011 Train Loss: 0.0224501, Val Loss: 0.0445471 +2025-02-26 12:52:54,012 Epoch 785/2000 +2025-02-26 12:53:56,916 Current Learning Rate: 0.0098618496 +2025-02-26 12:53:56,917 Train Loss: 0.0224446, Val Loss: 0.0445390 +2025-02-26 12:53:56,917 Epoch 786/2000 +2025-02-26 12:55:00,521 Current Learning Rate: 0.0098795838 +2025-02-26 12:55:00,521 Train Loss: 0.0224462, Val Loss: 0.0445569 +2025-02-26 12:55:00,521 Epoch 787/2000 +2025-02-26 12:56:03,627 Current Learning Rate: 0.0098961141 +2025-02-26 12:56:03,627 Train Loss: 0.0224512, Val Loss: 0.0445695 +2025-02-26 12:56:03,627 Epoch 788/2000 +2025-02-26 12:57:07,075 Current Learning Rate: 0.0099114363 +2025-02-26 12:57:07,075 Train Loss: 0.0224546, Val Loss: 0.0445863 +2025-02-26 12:57:07,076 Epoch 789/2000 +2025-02-26 12:58:10,478 Current Learning Rate: 0.0099255466 +2025-02-26 12:58:10,479 Train Loss: 0.0224601, Val Loss: 0.0445882 +2025-02-26 12:58:10,480 Epoch 790/2000 +2025-02-26 12:59:09,970 Current Learning Rate: 0.0099384417 +2025-02-26 12:59:09,970 Train Loss: 0.0224623, Val Loss: 0.0446053 +2025-02-26 12:59:09,971 Epoch 791/2000 +2025-02-26 13:00:14,142 Current Learning Rate: 0.0099501183 +2025-02-26 13:00:14,143 Train Loss: 0.0224657, Val Loss: 0.0446035 +2025-02-26 13:00:14,143 Epoch 792/2000 +2025-02-26 13:01:18,726 Current Learning Rate: 0.0099605735 +2025-02-26 13:01:18,727 Train Loss: 0.0224650, Val Loss: 0.0446060 +2025-02-26 13:01:18,727 Epoch 793/2000 +2025-02-26 13:02:20,658 Current Learning Rate: 0.0099698048 +2025-02-26 13:02:20,659 Train Loss: 0.0224672, Val Loss: 0.0446153 +2025-02-26 13:02:20,659 Epoch 794/2000 +2025-02-26 13:03:24,160 Current Learning Rate: 0.0099778098 +2025-02-26 13:03:24,160 Train Loss: 0.0224682, Val Loss: 0.0446171 +2025-02-26 13:03:24,161 Epoch 795/2000 +2025-02-26 13:04:25,507 Current Learning Rate: 0.0099845867 +2025-02-26 13:04:25,508 Train Loss: 0.0224684, Val Loss: 0.0446143 +2025-02-26 13:04:25,508 Epoch 796/2000 +2025-02-26 13:05:28,490 Current Learning Rate: 0.0099901336 +2025-02-26 13:05:28,491 Train Loss: 0.0224686, Val Loss: 0.0446161 +2025-02-26 13:05:28,491 Epoch 797/2000 +2025-02-26 13:06:30,942 Current Learning Rate: 0.0099944494 +2025-02-26 13:06:30,942 Train Loss: 0.0224690, Val Loss: 0.0446144 +2025-02-26 13:06:30,942 Epoch 798/2000 +2025-02-26 13:07:33,063 Current Learning Rate: 0.0099975328 +2025-02-26 13:07:33,064 Train Loss: 0.0224674, Val Loss: 0.0446138 +2025-02-26 13:07:33,064 Epoch 799/2000 +2025-02-26 13:08:37,964 Current Learning Rate: 0.0099993832 +2025-02-26 13:08:37,965 Train Loss: 0.0224683, Val Loss: 0.0446117 +2025-02-26 13:08:37,965 Epoch 800/2000 +2025-02-26 13:09:39,506 Current Learning Rate: 0.0100000000 +2025-02-26 13:09:39,507 Train Loss: 0.0224717, Val Loss: 0.0446335 +2025-02-26 13:09:39,507 Epoch 801/2000 +2025-02-26 13:10:41,728 Current Learning Rate: 0.0099993832 +2025-02-26 13:10:41,729 Train Loss: 0.0224737, Val Loss: 0.0446333 +2025-02-26 13:10:41,733 Epoch 802/2000 +2025-02-26 13:11:44,014 Current Learning Rate: 0.0099975328 +2025-02-26 13:11:44,014 Train Loss: 0.0224719, Val Loss: 0.0446206 +2025-02-26 13:11:44,015 Epoch 803/2000 +2025-02-26 13:12:46,171 Current Learning Rate: 0.0099944494 +2025-02-26 13:12:46,171 Train Loss: 0.0224695, Val Loss: 0.0446195 +2025-02-26 13:12:46,171 Epoch 804/2000 +2025-02-26 13:13:50,045 Current Learning Rate: 0.0099901336 +2025-02-26 13:13:50,046 Train Loss: 0.0224700, Val Loss: 0.0446295 +2025-02-26 13:13:50,046 Epoch 805/2000 +2025-02-26 13:14:53,616 Current Learning Rate: 0.0099845867 +2025-02-26 13:14:53,617 Train Loss: 0.0224697, Val Loss: 0.0446212 +2025-02-26 13:14:53,617 Epoch 806/2000 +2025-02-26 13:15:57,847 Current Learning Rate: 0.0099778098 +2025-02-26 13:15:57,847 Train Loss: 0.0224706, Val Loss: 0.0446208 +2025-02-26 13:15:57,847 Epoch 807/2000 +2025-02-26 13:17:00,533 Current Learning Rate: 0.0099698048 +2025-02-26 13:17:00,533 Train Loss: 0.0224747, Val Loss: 0.0446444 +2025-02-26 13:17:00,534 Epoch 808/2000 +2025-02-26 13:18:03,040 Current Learning Rate: 0.0099605735 +2025-02-26 13:18:03,040 Train Loss: 0.0224742, Val Loss: 0.0446350 +2025-02-26 13:18:03,040 Epoch 809/2000 +2025-02-26 13:19:06,416 Current Learning Rate: 0.0099501183 +2025-02-26 13:19:06,417 Train Loss: 0.0224730, Val Loss: 0.0446324 +2025-02-26 13:19:06,417 Epoch 810/2000 +2025-02-26 13:20:09,830 Current Learning Rate: 0.0099384417 +2025-02-26 13:20:09,831 Train Loss: 0.0224733, Val Loss: 0.0446346 +2025-02-26 13:20:09,832 Epoch 811/2000 +2025-02-26 13:21:12,896 Current Learning Rate: 0.0099255466 +2025-02-26 13:21:12,897 Train Loss: 0.0224733, Val Loss: 0.0446339 +2025-02-26 13:21:12,897 Epoch 812/2000 +2025-02-26 13:22:15,931 Current Learning Rate: 0.0099114363 +2025-02-26 13:22:15,932 Train Loss: 0.0224723, Val Loss: 0.0446294 +2025-02-26 13:22:15,932 Epoch 813/2000 +2025-02-26 13:23:20,167 Current Learning Rate: 0.0098961141 +2025-02-26 13:23:20,168 Train Loss: 0.0224729, Val Loss: 0.0446314 +2025-02-26 13:23:20,168 Epoch 814/2000 +2025-02-26 13:24:26,238 Current Learning Rate: 0.0098795838 +2025-02-26 13:24:26,239 Train Loss: 0.0224726, Val Loss: 0.0446277 +2025-02-26 13:24:26,240 Epoch 815/2000 +2025-02-26 13:25:29,501 Current Learning Rate: 0.0098618496 +2025-02-26 13:25:29,501 Train Loss: 0.0224716, Val Loss: 0.0446314 +2025-02-26 13:25:29,502 Epoch 816/2000 +2025-02-26 13:26:33,794 Current Learning Rate: 0.0098429158 +2025-02-26 13:26:33,794 Train Loss: 0.0224707, Val Loss: 0.0446235 +2025-02-26 13:26:33,795 Epoch 817/2000 +2025-02-26 13:27:36,818 Current Learning Rate: 0.0098227871 +2025-02-26 13:27:36,818 Train Loss: 0.0224686, Val Loss: 0.0446190 +2025-02-26 13:27:36,818 Epoch 818/2000 +2025-02-26 13:28:40,164 Current Learning Rate: 0.0098014684 +2025-02-26 13:28:40,165 Train Loss: 0.0224679, Val Loss: 0.0446177 +2025-02-26 13:28:40,165 Epoch 819/2000 +2025-02-26 13:29:42,408 Current Learning Rate: 0.0097789651 +2025-02-26 13:29:42,408 Train Loss: 0.0224646, Val Loss: 0.0445985 +2025-02-26 13:29:42,409 Epoch 820/2000 +2025-02-26 13:30:44,153 Current Learning Rate: 0.0097552826 +2025-02-26 13:30:44,154 Train Loss: 0.0224612, Val Loss: 0.0446079 +2025-02-26 13:30:44,154 Epoch 821/2000 +2025-02-26 13:31:47,842 Current Learning Rate: 0.0097304268 +2025-02-26 13:31:47,842 Train Loss: 0.0224638, Val Loss: 0.0446005 +2025-02-26 13:31:47,842 Epoch 822/2000 +2025-02-26 13:32:50,839 Current Learning Rate: 0.0097044038 +2025-02-26 13:32:50,840 Train Loss: 0.0224607, Val Loss: 0.0445918 +2025-02-26 13:32:50,840 Epoch 823/2000 +2025-02-26 13:33:53,712 Current Learning Rate: 0.0096772202 +2025-02-26 13:33:53,712 Train Loss: 0.0224592, Val Loss: 0.0445894 +2025-02-26 13:33:53,712 Epoch 824/2000 +2025-02-26 13:34:56,255 Current Learning Rate: 0.0096488824 +2025-02-26 13:34:56,255 Train Loss: 0.0224608, Val Loss: 0.0445941 +2025-02-26 13:34:56,256 Epoch 825/2000 +2025-02-26 13:35:58,542 Current Learning Rate: 0.0096193977 +2025-02-26 13:35:58,542 Train Loss: 0.0224604, Val Loss: 0.0445968 +2025-02-26 13:35:58,543 Epoch 826/2000 +2025-02-26 13:37:00,459 Current Learning Rate: 0.0095887731 +2025-02-26 13:37:00,459 Train Loss: 0.0224612, Val Loss: 0.0445976 +2025-02-26 13:37:00,460 Epoch 827/2000 +2025-02-26 13:38:03,334 Current Learning Rate: 0.0095570164 +2025-02-26 13:38:03,335 Train Loss: 0.0224611, Val Loss: 0.0445988 +2025-02-26 13:38:03,335 Epoch 828/2000 +2025-02-26 13:39:06,532 Current Learning Rate: 0.0095241353 +2025-02-26 13:39:06,533 Train Loss: 0.0224611, Val Loss: 0.0445963 +2025-02-26 13:39:06,533 Epoch 829/2000 +2025-02-26 13:40:09,491 Current Learning Rate: 0.0094901379 +2025-02-26 13:40:09,492 Train Loss: 0.0224592, Val Loss: 0.0446006 +2025-02-26 13:40:09,492 Epoch 830/2000 +2025-02-26 13:41:11,267 Current Learning Rate: 0.0094550326 +2025-02-26 13:41:11,267 Train Loss: 0.0224539, Val Loss: 0.0445787 +2025-02-26 13:41:11,267 Epoch 831/2000 +2025-02-26 13:42:13,303 Current Learning Rate: 0.0094188282 +2025-02-26 13:42:13,304 Train Loss: 0.0224524, Val Loss: 0.0445480 +2025-02-26 13:42:13,304 Epoch 832/2000 +2025-02-26 13:43:14,899 Current Learning Rate: 0.0093815334 +2025-02-26 13:43:14,899 Train Loss: 0.0224455, Val Loss: 0.0445514 +2025-02-26 13:43:14,899 Epoch 833/2000 +2025-02-26 13:44:17,168 Current Learning Rate: 0.0093431576 +2025-02-26 13:44:17,168 Train Loss: 0.0224462, Val Loss: 0.0445590 +2025-02-26 13:44:17,168 Epoch 834/2000 +2025-02-26 13:45:19,723 Current Learning Rate: 0.0093037101 +2025-02-26 13:45:19,723 Train Loss: 0.0224476, Val Loss: 0.0445613 +2025-02-26 13:45:19,723 Epoch 835/2000 +2025-02-26 13:46:22,703 Current Learning Rate: 0.0092632008 +2025-02-26 13:46:22,703 Train Loss: 0.0224479, Val Loss: 0.0445772 +2025-02-26 13:46:22,703 Epoch 836/2000 +2025-02-26 13:47:24,248 Current Learning Rate: 0.0092216396 +2025-02-26 13:47:24,249 Train Loss: 0.0224528, Val Loss: 0.0445608 +2025-02-26 13:47:24,249 Epoch 837/2000 +2025-02-26 13:48:27,219 Current Learning Rate: 0.0091790368 +2025-02-26 13:48:27,220 Train Loss: 0.0224471, Val Loss: 0.0445383 +2025-02-26 13:48:27,220 Epoch 838/2000 +2025-02-26 13:49:30,139 Current Learning Rate: 0.0091354029 +2025-02-26 13:49:30,140 Train Loss: 0.0224439, Val Loss: 0.0445562 +2025-02-26 13:49:30,140 Epoch 839/2000 +2025-02-26 13:50:31,993 Current Learning Rate: 0.0090907486 +2025-02-26 13:50:31,994 Train Loss: 0.0224494, Val Loss: 0.0445820 +2025-02-26 13:50:31,994 Epoch 840/2000 +2025-02-26 13:51:33,692 Current Learning Rate: 0.0090450850 +2025-02-26 13:51:33,693 Train Loss: 0.0224571, Val Loss: 0.0445867 +2025-02-26 13:51:33,693 Epoch 841/2000 +2025-02-26 13:52:35,612 Current Learning Rate: 0.0089984233 +2025-02-26 13:52:35,612 Train Loss: 0.0224570, Val Loss: 0.0445774 +2025-02-26 13:52:35,612 Epoch 842/2000 +2025-02-26 13:53:38,809 Current Learning Rate: 0.0089507751 +2025-02-26 13:53:38,810 Train Loss: 0.0224558, Val Loss: 0.0445800 +2025-02-26 13:53:38,812 Epoch 843/2000 +2025-02-26 13:54:41,940 Current Learning Rate: 0.0089021520 +2025-02-26 13:54:41,940 Train Loss: 0.0224562, Val Loss: 0.0445752 +2025-02-26 13:54:41,941 Epoch 844/2000 +2025-02-26 13:55:46,043 Current Learning Rate: 0.0088525662 +2025-02-26 13:55:46,043 Train Loss: 0.0224525, Val Loss: 0.0445656 +2025-02-26 13:55:46,043 Epoch 845/2000 +2025-02-26 13:56:49,317 Current Learning Rate: 0.0088020298 +2025-02-26 13:56:49,318 Train Loss: 0.0224528, Val Loss: 0.0445656 +2025-02-26 13:56:49,318 Epoch 846/2000 +2025-02-26 13:57:54,822 Current Learning Rate: 0.0087505553 +2025-02-26 13:57:54,822 Train Loss: 0.0224520, Val Loss: 0.0445636 +2025-02-26 13:57:54,823 Epoch 847/2000 +2025-02-26 13:59:00,566 Current Learning Rate: 0.0086981555 +2025-02-26 13:59:00,566 Train Loss: 0.0224448, Val Loss: 0.0445293 +2025-02-26 13:59:00,567 Epoch 848/2000 +2025-02-26 14:00:00,990 Current Learning Rate: 0.0086448431 +2025-02-26 14:00:00,991 Train Loss: 0.0224407, Val Loss: 0.0445393 +2025-02-26 14:00:00,991 Epoch 849/2000 +2025-02-26 14:01:02,897 Current Learning Rate: 0.0085906315 +2025-02-26 14:01:02,898 Train Loss: 0.0224402, Val Loss: 0.0445258 +2025-02-26 14:01:02,898 Epoch 850/2000 +2025-02-26 14:02:05,861 Current Learning Rate: 0.0085355339 +2025-02-26 14:02:05,861 Train Loss: 0.0224374, Val Loss: 0.0445236 +2025-02-26 14:02:05,862 Epoch 851/2000 +2025-02-26 14:03:09,067 Current Learning Rate: 0.0084795640 +2025-02-26 14:03:09,068 Train Loss: 0.0224391, Val Loss: 0.0445332 +2025-02-26 14:03:09,068 Epoch 852/2000 +2025-02-26 14:04:10,744 Current Learning Rate: 0.0084227355 +2025-02-26 14:04:10,744 Train Loss: 0.0224441, Val Loss: 0.0445503 +2025-02-26 14:04:10,745 Epoch 853/2000 +2025-02-26 14:05:12,642 Current Learning Rate: 0.0083650626 +2025-02-26 14:05:12,643 Train Loss: 0.0224479, Val Loss: 0.0445610 +2025-02-26 14:05:12,643 Epoch 854/2000 +2025-02-26 14:06:14,863 Current Learning Rate: 0.0083065593 +2025-02-26 14:06:14,864 Train Loss: 0.0224488, Val Loss: 0.0445505 +2025-02-26 14:06:14,864 Epoch 855/2000 +2025-02-26 14:07:17,954 Current Learning Rate: 0.0082472402 +2025-02-26 14:07:17,954 Train Loss: 0.0224449, Val Loss: 0.0445333 +2025-02-26 14:07:17,955 Epoch 856/2000 +2025-02-26 14:08:19,542 Current Learning Rate: 0.0081871199 +2025-02-26 14:08:19,543 Train Loss: 0.0224433, Val Loss: 0.0445445 +2025-02-26 14:08:19,543 Epoch 857/2000 +2025-02-26 14:09:21,443 Current Learning Rate: 0.0081262133 +2025-02-26 14:09:21,443 Train Loss: 0.0224400, Val Loss: 0.0445289 +2025-02-26 14:09:21,443 Epoch 858/2000 +2025-02-26 14:10:25,192 Current Learning Rate: 0.0080645353 +2025-02-26 14:10:25,192 Train Loss: 0.0224380, Val Loss: 0.0445321 +2025-02-26 14:10:25,192 Epoch 859/2000 +2025-02-26 14:11:28,316 Current Learning Rate: 0.0080021011 +2025-02-26 14:11:28,317 Train Loss: 0.0224371, Val Loss: 0.0445119 +2025-02-26 14:11:28,317 Epoch 860/2000 +2025-02-26 14:12:30,965 Current Learning Rate: 0.0079389263 +2025-02-26 14:12:30,965 Train Loss: 0.0224332, Val Loss: 0.0445102 +2025-02-26 14:12:30,966 Epoch 861/2000 +2025-02-26 14:13:33,387 Current Learning Rate: 0.0078750263 +2025-02-26 14:13:33,388 Train Loss: 0.0224412, Val Loss: 0.0445413 +2025-02-26 14:13:33,388 Epoch 862/2000 +2025-02-26 14:14:36,142 Current Learning Rate: 0.0078104169 +2025-02-26 14:14:36,142 Train Loss: 0.0224431, Val Loss: 0.0445407 +2025-02-26 14:14:36,142 Epoch 863/2000 +2025-02-26 14:15:38,490 Current Learning Rate: 0.0077451141 +2025-02-26 14:15:38,490 Train Loss: 0.0224419, Val Loss: 0.0445337 +2025-02-26 14:15:38,491 Epoch 864/2000 +2025-02-26 14:16:41,322 Current Learning Rate: 0.0076791340 +2025-02-26 14:16:41,322 Train Loss: 0.0224434, Val Loss: 0.0445519 +2025-02-26 14:16:41,323 Epoch 865/2000 +2025-02-26 14:17:45,739 Current Learning Rate: 0.0076124928 +2025-02-26 14:17:45,740 Train Loss: 0.0224481, Val Loss: 0.0445700 +2025-02-26 14:17:45,740 Epoch 866/2000 +2025-02-26 14:18:48,746 Current Learning Rate: 0.0075452071 +2025-02-26 14:18:48,746 Train Loss: 0.0224530, Val Loss: 0.0445683 +2025-02-26 14:18:48,746 Epoch 867/2000 +2025-02-26 14:19:49,743 Current Learning Rate: 0.0074772933 +2025-02-26 14:19:49,743 Train Loss: 0.0224549, Val Loss: 0.0445736 +2025-02-26 14:19:49,744 Epoch 868/2000 +2025-02-26 14:20:54,660 Current Learning Rate: 0.0074087684 +2025-02-26 14:20:54,660 Train Loss: 0.0224553, Val Loss: 0.0445704 +2025-02-26 14:20:54,660 Epoch 869/2000 +2025-02-26 14:21:56,224 Current Learning Rate: 0.0073396491 +2025-02-26 14:21:56,225 Train Loss: 0.0224554, Val Loss: 0.0445745 +2025-02-26 14:21:56,225 Epoch 870/2000 +2025-02-26 14:23:00,153 Current Learning Rate: 0.0072699525 +2025-02-26 14:23:00,154 Train Loss: 0.0224546, Val Loss: 0.0445768 +2025-02-26 14:23:00,154 Epoch 871/2000 +2025-02-26 14:24:04,106 Current Learning Rate: 0.0071996958 +2025-02-26 14:24:04,107 Train Loss: 0.0224562, Val Loss: 0.0445839 +2025-02-26 14:24:04,107 Epoch 872/2000 +2025-02-26 14:25:08,189 Current Learning Rate: 0.0071288965 +2025-02-26 14:25:08,189 Train Loss: 0.0224555, Val Loss: 0.0445796 +2025-02-26 14:25:08,190 Epoch 873/2000 +2025-02-26 14:26:10,432 Current Learning Rate: 0.0070575718 +2025-02-26 14:26:10,433 Train Loss: 0.0224566, Val Loss: 0.0445793 +2025-02-26 14:26:10,433 Epoch 874/2000 +2025-02-26 14:27:14,481 Current Learning Rate: 0.0069857395 +2025-02-26 14:27:14,482 Train Loss: 0.0224557, Val Loss: 0.0445773 +2025-02-26 14:27:14,482 Epoch 875/2000 +2025-02-26 14:28:19,281 Current Learning Rate: 0.0069134172 +2025-02-26 14:28:19,282 Train Loss: 0.0224551, Val Loss: 0.0445828 +2025-02-26 14:28:19,282 Epoch 876/2000 +2025-02-26 14:29:22,246 Current Learning Rate: 0.0068406228 +2025-02-26 14:29:22,247 Train Loss: 0.0224556, Val Loss: 0.0445811 +2025-02-26 14:29:22,247 Epoch 877/2000 +2025-02-26 14:30:24,991 Current Learning Rate: 0.0067673742 +2025-02-26 14:30:24,992 Train Loss: 0.0224566, Val Loss: 0.0445758 +2025-02-26 14:30:24,992 Epoch 878/2000 +2025-02-26 14:31:28,393 Current Learning Rate: 0.0066936896 +2025-02-26 14:31:28,393 Train Loss: 0.0224556, Val Loss: 0.0445940 +2025-02-26 14:31:28,394 Epoch 879/2000 +2025-02-26 14:32:29,489 Current Learning Rate: 0.0066195871 +2025-02-26 14:32:29,490 Train Loss: 0.0224597, Val Loss: 0.0445912 +2025-02-26 14:32:29,490 Epoch 880/2000 +2025-02-26 14:33:29,734 Current Learning Rate: 0.0065450850 +2025-02-26 14:33:29,735 Train Loss: 0.0224531, Val Loss: 0.0445709 +2025-02-26 14:33:29,735 Epoch 881/2000 +2025-02-26 14:34:31,893 Current Learning Rate: 0.0064702016 +2025-02-26 14:34:31,893 Train Loss: 0.0224473, Val Loss: 0.0445532 +2025-02-26 14:34:31,894 Epoch 882/2000 +2025-02-26 14:35:33,622 Current Learning Rate: 0.0063949555 +2025-02-26 14:35:33,622 Train Loss: 0.0224472, Val Loss: 0.0445567 +2025-02-26 14:35:33,623 Epoch 883/2000 +2025-02-26 14:36:37,002 Current Learning Rate: 0.0063193652 +2025-02-26 14:36:37,002 Train Loss: 0.0224481, Val Loss: 0.0445543 +2025-02-26 14:36:37,005 Epoch 884/2000 +2025-02-26 14:37:39,745 Current Learning Rate: 0.0062434494 +2025-02-26 14:37:39,746 Train Loss: 0.0224492, Val Loss: 0.0445785 +2025-02-26 14:37:39,746 Epoch 885/2000 +2025-02-26 14:38:43,542 Current Learning Rate: 0.0061672268 +2025-02-26 14:38:43,542 Train Loss: 0.0224503, Val Loss: 0.0445785 +2025-02-26 14:38:43,543 Epoch 886/2000 +2025-02-26 14:39:46,638 Current Learning Rate: 0.0060907162 +2025-02-26 14:39:46,639 Train Loss: 0.0224522, Val Loss: 0.0445741 +2025-02-26 14:39:46,639 Epoch 887/2000 +2025-02-26 14:40:49,130 Current Learning Rate: 0.0060139365 +2025-02-26 14:40:49,131 Train Loss: 0.0224521, Val Loss: 0.0445688 +2025-02-26 14:40:49,131 Epoch 888/2000 +2025-02-26 14:41:52,842 Current Learning Rate: 0.0059369066 +2025-02-26 14:41:52,843 Train Loss: 0.0224545, Val Loss: 0.0445862 +2025-02-26 14:41:52,843 Epoch 889/2000 +2025-02-26 14:42:55,971 Current Learning Rate: 0.0058596455 +2025-02-26 14:42:55,971 Train Loss: 0.0224562, Val Loss: 0.0445843 +2025-02-26 14:42:55,971 Epoch 890/2000 +2025-02-26 14:44:00,855 Current Learning Rate: 0.0057821723 +2025-02-26 14:44:00,856 Train Loss: 0.0224549, Val Loss: 0.0445892 +2025-02-26 14:44:00,856 Epoch 891/2000 +2025-02-26 14:45:04,790 Current Learning Rate: 0.0057045062 +2025-02-26 14:45:04,792 Train Loss: 0.0224558, Val Loss: 0.0445757 +2025-02-26 14:45:04,792 Epoch 892/2000 +2025-02-26 14:46:09,352 Current Learning Rate: 0.0056266662 +2025-02-26 14:46:09,352 Train Loss: 0.0224547, Val Loss: 0.0445801 +2025-02-26 14:46:09,353 Epoch 893/2000 +2025-02-26 14:47:12,780 Current Learning Rate: 0.0055486716 +2025-02-26 14:47:12,781 Train Loss: 0.0224535, Val Loss: 0.0445793 +2025-02-26 14:47:12,781 Epoch 894/2000 +2025-02-26 14:48:16,134 Current Learning Rate: 0.0054705416 +2025-02-26 14:48:16,135 Train Loss: 0.0224504, Val Loss: 0.0445745 +2025-02-26 14:48:16,135 Epoch 895/2000 +2025-02-26 14:49:18,145 Current Learning Rate: 0.0053922955 +2025-02-26 14:49:18,145 Train Loss: 0.0224500, Val Loss: 0.0445664 +2025-02-26 14:49:18,146 Epoch 896/2000 +2025-02-26 14:50:22,020 Current Learning Rate: 0.0053139526 +2025-02-26 14:50:22,021 Train Loss: 0.0224517, Val Loss: 0.0445852 +2025-02-26 14:50:22,021 Epoch 897/2000 +2025-02-26 14:51:24,288 Current Learning Rate: 0.0052355323 +2025-02-26 14:51:24,289 Train Loss: 0.0224548, Val Loss: 0.0445836 +2025-02-26 14:51:24,289 Epoch 898/2000 +2025-02-26 14:52:28,112 Current Learning Rate: 0.0051570538 +2025-02-26 14:52:28,112 Train Loss: 0.0224550, Val Loss: 0.0445791 +2025-02-26 14:52:28,113 Epoch 899/2000 +2025-02-26 14:53:33,092 Current Learning Rate: 0.0050785366 +2025-02-26 14:53:33,092 Train Loss: 0.0224539, Val Loss: 0.0445985 +2025-02-26 14:53:33,093 Epoch 900/2000 +2025-02-26 14:54:36,982 Current Learning Rate: 0.0050000000 +2025-02-26 14:54:36,983 Train Loss: 0.0224592, Val Loss: 0.0445862 +2025-02-26 14:54:36,983 Epoch 901/2000 +2025-02-26 14:55:40,290 Current Learning Rate: 0.0049214634 +2025-02-26 14:55:40,290 Train Loss: 0.0224536, Val Loss: 0.0445744 +2025-02-26 14:55:40,290 Epoch 902/2000 +2025-02-26 14:56:43,014 Current Learning Rate: 0.0048429462 +2025-02-26 14:56:43,014 Train Loss: 0.0224520, Val Loss: 0.0445732 +2025-02-26 14:56:43,015 Epoch 903/2000 +2025-02-26 14:57:45,340 Current Learning Rate: 0.0047644677 +2025-02-26 14:57:45,341 Train Loss: 0.0224516, Val Loss: 0.0445756 +2025-02-26 14:57:45,341 Epoch 904/2000 +2025-02-26 14:58:50,625 Current Learning Rate: 0.0046860474 +2025-02-26 14:58:50,626 Train Loss: 0.0224525, Val Loss: 0.0445757 +2025-02-26 14:58:50,626 Epoch 905/2000 +2025-02-26 14:59:52,387 Current Learning Rate: 0.0046077045 +2025-02-26 14:59:52,388 Train Loss: 0.0224533, Val Loss: 0.0445756 +2025-02-26 14:59:52,388 Epoch 906/2000 +2025-02-26 15:00:56,919 Current Learning Rate: 0.0045294584 +2025-02-26 15:00:56,919 Train Loss: 0.0224530, Val Loss: 0.0445675 +2025-02-26 15:00:56,920 Epoch 907/2000 +2025-02-26 15:02:01,018 Current Learning Rate: 0.0044513284 +2025-02-26 15:02:01,019 Train Loss: 0.0224501, Val Loss: 0.0445663 +2025-02-26 15:02:01,019 Epoch 908/2000 +2025-02-26 15:03:05,501 Current Learning Rate: 0.0043733338 +2025-02-26 15:03:05,502 Train Loss: 0.0224505, Val Loss: 0.0445690 +2025-02-26 15:03:05,502 Epoch 909/2000 +2025-02-26 15:04:09,282 Current Learning Rate: 0.0042954938 +2025-02-26 15:04:09,283 Train Loss: 0.0224505, Val Loss: 0.0445710 +2025-02-26 15:04:09,283 Epoch 910/2000 +2025-02-26 15:05:14,129 Current Learning Rate: 0.0042178277 +2025-02-26 15:05:14,130 Train Loss: 0.0224497, Val Loss: 0.0445698 +2025-02-26 15:05:14,130 Epoch 911/2000 +2025-02-26 15:06:16,456 Current Learning Rate: 0.0041403545 +2025-02-26 15:06:16,457 Train Loss: 0.0224497, Val Loss: 0.0445702 +2025-02-26 15:06:16,457 Epoch 912/2000 +2025-02-26 15:07:19,068 Current Learning Rate: 0.0040630934 +2025-02-26 15:07:19,068 Train Loss: 0.0224503, Val Loss: 0.0445718 +2025-02-26 15:07:19,069 Epoch 913/2000 +2025-02-26 15:08:21,153 Current Learning Rate: 0.0039860635 +2025-02-26 15:08:21,153 Train Loss: 0.0224506, Val Loss: 0.0445671 +2025-02-26 15:08:21,154 Epoch 914/2000 +2025-02-26 15:09:22,043 Current Learning Rate: 0.0039092838 +2025-02-26 15:09:22,044 Train Loss: 0.0224501, Val Loss: 0.0445752 +2025-02-26 15:09:22,044 Epoch 915/2000 +2025-02-26 15:10:27,233 Current Learning Rate: 0.0038327732 +2025-02-26 15:10:27,233 Train Loss: 0.0224514, Val Loss: 0.0445761 +2025-02-26 15:10:27,234 Epoch 916/2000 +2025-02-26 15:11:29,526 Current Learning Rate: 0.0037565506 +2025-02-26 15:11:29,527 Train Loss: 0.0224516, Val Loss: 0.0445711 +2025-02-26 15:11:29,527 Epoch 917/2000 +2025-02-26 15:12:33,400 Current Learning Rate: 0.0036806348 +2025-02-26 15:12:33,401 Train Loss: 0.0224519, Val Loss: 0.0445713 +2025-02-26 15:12:33,401 Epoch 918/2000 +2025-02-26 15:13:38,189 Current Learning Rate: 0.0036050445 +2025-02-26 15:13:38,189 Train Loss: 0.0224513, Val Loss: 0.0445789 +2025-02-26 15:13:38,190 Epoch 919/2000 +2025-02-26 15:14:43,004 Current Learning Rate: 0.0035297984 +2025-02-26 15:14:43,005 Train Loss: 0.0224512, Val Loss: 0.0445738 +2025-02-26 15:14:43,005 Epoch 920/2000 +2025-02-26 15:15:46,319 Current Learning Rate: 0.0034549150 +2025-02-26 15:15:46,320 Train Loss: 0.0224521, Val Loss: 0.0445739 +2025-02-26 15:15:46,320 Epoch 921/2000 +2025-02-26 15:16:47,673 Current Learning Rate: 0.0033804129 +2025-02-26 15:16:47,674 Train Loss: 0.0224525, Val Loss: 0.0445730 +2025-02-26 15:16:47,674 Epoch 922/2000 +2025-02-26 15:17:50,545 Current Learning Rate: 0.0033063104 +2025-02-26 15:17:50,546 Train Loss: 0.0224517, Val Loss: 0.0445764 +2025-02-26 15:17:50,546 Epoch 923/2000 +2025-02-26 15:18:52,408 Current Learning Rate: 0.0032326258 +2025-02-26 15:18:52,408 Train Loss: 0.0224524, Val Loss: 0.0445817 +2025-02-26 15:18:52,408 Epoch 924/2000 +2025-02-26 15:19:55,677 Current Learning Rate: 0.0031593772 +2025-02-26 15:19:55,677 Train Loss: 0.0224548, Val Loss: 0.0445871 +2025-02-26 15:19:55,677 Epoch 925/2000 +2025-02-26 15:20:58,515 Current Learning Rate: 0.0030865828 +2025-02-26 15:20:58,516 Train Loss: 0.0224552, Val Loss: 0.0445833 +2025-02-26 15:20:58,516 Epoch 926/2000 +2025-02-26 15:22:00,668 Current Learning Rate: 0.0030142605 +2025-02-26 15:22:00,669 Train Loss: 0.0224544, Val Loss: 0.0445826 +2025-02-26 15:22:00,669 Epoch 927/2000 +2025-02-26 15:23:02,714 Current Learning Rate: 0.0029424282 +2025-02-26 15:23:02,714 Train Loss: 0.0224549, Val Loss: 0.0445835 +2025-02-26 15:23:02,714 Epoch 928/2000 +2025-02-26 15:24:05,072 Current Learning Rate: 0.0028711035 +2025-02-26 15:24:05,072 Train Loss: 0.0224562, Val Loss: 0.0445862 +2025-02-26 15:24:05,073 Epoch 929/2000 +2025-02-26 15:25:10,811 Current Learning Rate: 0.0028003042 +2025-02-26 15:25:10,811 Train Loss: 0.0224559, Val Loss: 0.0445901 +2025-02-26 15:25:10,811 Epoch 930/2000 +2025-02-26 15:26:13,946 Current Learning Rate: 0.0027300475 +2025-02-26 15:26:13,947 Train Loss: 0.0224561, Val Loss: 0.0445877 +2025-02-26 15:26:13,947 Epoch 931/2000 +2025-02-26 15:27:16,110 Current Learning Rate: 0.0026603509 +2025-02-26 15:27:16,110 Train Loss: 0.0224570, Val Loss: 0.0445925 +2025-02-26 15:27:16,111 Epoch 932/2000 +2025-02-26 15:28:18,488 Current Learning Rate: 0.0025912316 +2025-02-26 15:28:18,488 Train Loss: 0.0224581, Val Loss: 0.0445988 +2025-02-26 15:28:18,488 Epoch 933/2000 +2025-02-26 15:29:22,341 Current Learning Rate: 0.0025227067 +2025-02-26 15:29:22,342 Train Loss: 0.0224577, Val Loss: 0.0445919 +2025-02-26 15:29:22,342 Epoch 934/2000 +2025-02-26 15:30:24,293 Current Learning Rate: 0.0024547929 +2025-02-26 15:30:24,293 Train Loss: 0.0224570, Val Loss: 0.0445920 +2025-02-26 15:30:24,294 Epoch 935/2000 +2025-02-26 15:31:27,064 Current Learning Rate: 0.0023875072 +2025-02-26 15:31:27,064 Train Loss: 0.0224577, Val Loss: 0.0445901 +2025-02-26 15:31:27,064 Epoch 936/2000 +2025-02-26 15:32:29,712 Current Learning Rate: 0.0023208660 +2025-02-26 15:32:29,713 Train Loss: 0.0224571, Val Loss: 0.0445989 +2025-02-26 15:32:29,713 Epoch 937/2000 +2025-02-26 15:33:31,500 Current Learning Rate: 0.0022548859 +2025-02-26 15:33:31,501 Train Loss: 0.0224580, Val Loss: 0.0445902 +2025-02-26 15:33:31,501 Epoch 938/2000 +2025-02-26 15:34:32,843 Current Learning Rate: 0.0021895831 +2025-02-26 15:34:32,843 Train Loss: 0.0224578, Val Loss: 0.0445896 +2025-02-26 15:34:32,843 Epoch 939/2000 +2025-02-26 15:35:34,657 Current Learning Rate: 0.0021249737 +2025-02-26 15:35:34,660 Train Loss: 0.0224573, Val Loss: 0.0445929 +2025-02-26 15:35:34,660 Epoch 940/2000 +2025-02-26 15:36:37,106 Current Learning Rate: 0.0020610737 +2025-02-26 15:36:37,107 Train Loss: 0.0224575, Val Loss: 0.0445906 +2025-02-26 15:36:37,107 Epoch 941/2000 +2025-02-26 15:37:40,373 Current Learning Rate: 0.0019978989 +2025-02-26 15:37:40,374 Train Loss: 0.0224574, Val Loss: 0.0445941 +2025-02-26 15:37:40,374 Epoch 942/2000 +2025-02-26 15:38:43,936 Current Learning Rate: 0.0019354647 +2025-02-26 15:38:43,937 Train Loss: 0.0224580, Val Loss: 0.0445914 +2025-02-26 15:38:43,937 Epoch 943/2000 +2025-02-26 15:39:48,389 Current Learning Rate: 0.0018737867 +2025-02-26 15:39:48,389 Train Loss: 0.0224573, Val Loss: 0.0445935 +2025-02-26 15:39:48,390 Epoch 944/2000 +2025-02-26 15:40:51,420 Current Learning Rate: 0.0018128801 +2025-02-26 15:40:51,421 Train Loss: 0.0224575, Val Loss: 0.0445932 +2025-02-26 15:40:51,421 Epoch 945/2000 +2025-02-26 15:41:55,499 Current Learning Rate: 0.0017527598 +2025-02-26 15:41:55,499 Train Loss: 0.0224578, Val Loss: 0.0445917 +2025-02-26 15:41:55,499 Epoch 946/2000 +2025-02-26 15:42:59,381 Current Learning Rate: 0.0016934407 +2025-02-26 15:42:59,381 Train Loss: 0.0224573, Val Loss: 0.0445902 +2025-02-26 15:42:59,381 Epoch 947/2000 +2025-02-26 15:44:02,242 Current Learning Rate: 0.0016349374 +2025-02-26 15:44:02,242 Train Loss: 0.0224572, Val Loss: 0.0445946 +2025-02-26 15:44:02,243 Epoch 948/2000 +2025-02-26 15:45:06,608 Current Learning Rate: 0.0015772645 +2025-02-26 15:45:06,608 Train Loss: 0.0224579, Val Loss: 0.0445916 +2025-02-26 15:45:06,609 Epoch 949/2000 +2025-02-26 15:46:08,428 Current Learning Rate: 0.0015204360 +2025-02-26 15:46:08,428 Train Loss: 0.0224574, Val Loss: 0.0445946 +2025-02-26 15:46:08,428 Epoch 950/2000 +2025-02-26 15:47:11,714 Current Learning Rate: 0.0014644661 +2025-02-26 15:47:11,714 Train Loss: 0.0224576, Val Loss: 0.0445906 +2025-02-26 15:47:11,715 Epoch 951/2000 +2025-02-26 15:48:14,185 Current Learning Rate: 0.0014093685 +2025-02-26 15:48:14,186 Train Loss: 0.0224570, Val Loss: 0.0445915 +2025-02-26 15:48:14,186 Epoch 952/2000 +2025-02-26 15:49:15,563 Current Learning Rate: 0.0013551569 +2025-02-26 15:49:15,563 Train Loss: 0.0224577, Val Loss: 0.0445924 +2025-02-26 15:49:15,564 Epoch 953/2000 +2025-02-26 15:50:19,293 Current Learning Rate: 0.0013018445 +2025-02-26 15:50:19,294 Train Loss: 0.0224573, Val Loss: 0.0445918 +2025-02-26 15:50:19,294 Epoch 954/2000 +2025-02-26 15:51:21,307 Current Learning Rate: 0.0012494447 +2025-02-26 15:51:21,307 Train Loss: 0.0224577, Val Loss: 0.0445966 +2025-02-26 15:51:21,307 Epoch 955/2000 +2025-02-26 15:52:24,320 Current Learning Rate: 0.0011979702 +2025-02-26 15:52:24,320 Train Loss: 0.0224585, Val Loss: 0.0445965 +2025-02-26 15:52:24,320 Epoch 956/2000 +2025-02-26 15:53:27,847 Current Learning Rate: 0.0011474338 +2025-02-26 15:53:27,848 Train Loss: 0.0224582, Val Loss: 0.0445945 +2025-02-26 15:53:27,848 Epoch 957/2000 +2025-02-26 15:54:30,433 Current Learning Rate: 0.0010978480 +2025-02-26 15:54:30,434 Train Loss: 0.0224586, Val Loss: 0.0445918 +2025-02-26 15:54:30,434 Epoch 958/2000 +2025-02-26 15:55:32,801 Current Learning Rate: 0.0010492249 +2025-02-26 15:55:32,801 Train Loss: 0.0224584, Val Loss: 0.0445964 +2025-02-26 15:55:32,802 Epoch 959/2000 +2025-02-26 15:56:35,428 Current Learning Rate: 0.0010015767 +2025-02-26 15:56:35,429 Train Loss: 0.0224588, Val Loss: 0.0445981 +2025-02-26 15:56:35,429 Epoch 960/2000 +2025-02-26 15:57:37,554 Current Learning Rate: 0.0009549150 +2025-02-26 15:57:37,555 Train Loss: 0.0224592, Val Loss: 0.0445982 +2025-02-26 15:57:37,555 Epoch 961/2000 +2025-02-26 15:58:39,510 Current Learning Rate: 0.0009092514 +2025-02-26 15:58:39,511 Train Loss: 0.0224592, Val Loss: 0.0445977 +2025-02-26 15:58:39,511 Epoch 962/2000 +2025-02-26 15:59:42,502 Current Learning Rate: 0.0008645971 +2025-02-26 15:59:42,503 Train Loss: 0.0224593, Val Loss: 0.0445973 +2025-02-26 15:59:42,503 Epoch 963/2000 +2025-02-26 16:00:45,909 Current Learning Rate: 0.0008209632 +2025-02-26 16:00:45,910 Train Loss: 0.0224585, Val Loss: 0.0445975 +2025-02-26 16:00:45,910 Epoch 964/2000 +2025-02-26 16:01:49,451 Current Learning Rate: 0.0007783604 +2025-02-26 16:01:49,451 Train Loss: 0.0224586, Val Loss: 0.0445965 +2025-02-26 16:01:49,451 Epoch 965/2000 +2025-02-26 16:02:51,417 Current Learning Rate: 0.0007367992 +2025-02-26 16:02:51,418 Train Loss: 0.0224586, Val Loss: 0.0446008 +2025-02-26 16:02:51,418 Epoch 966/2000 +2025-02-26 16:03:53,847 Current Learning Rate: 0.0006962899 +2025-02-26 16:03:53,848 Train Loss: 0.0224591, Val Loss: 0.0445982 +2025-02-26 16:03:53,848 Epoch 967/2000 +2025-02-26 16:04:56,704 Current Learning Rate: 0.0006568424 +2025-02-26 16:04:56,705 Train Loss: 0.0224589, Val Loss: 0.0445983 +2025-02-26 16:04:56,705 Epoch 968/2000 +2025-02-26 16:06:00,054 Current Learning Rate: 0.0006184666 +2025-02-26 16:06:00,055 Train Loss: 0.0224592, Val Loss: 0.0446004 +2025-02-26 16:06:00,055 Epoch 969/2000 +2025-02-26 16:07:03,373 Current Learning Rate: 0.0005811718 +2025-02-26 16:07:03,374 Train Loss: 0.0224589, Val Loss: 0.0446009 +2025-02-26 16:07:03,374 Epoch 970/2000 +2025-02-26 16:08:05,057 Current Learning Rate: 0.0005449674 +2025-02-26 16:08:05,057 Train Loss: 0.0224596, Val Loss: 0.0445975 +2025-02-26 16:08:05,058 Epoch 971/2000 +2025-02-26 16:09:07,458 Current Learning Rate: 0.0005098621 +2025-02-26 16:09:07,458 Train Loss: 0.0224595, Val Loss: 0.0445987 +2025-02-26 16:09:07,458 Epoch 972/2000 +2025-02-26 16:10:09,627 Current Learning Rate: 0.0004758647 +2025-02-26 16:10:09,627 Train Loss: 0.0224597, Val Loss: 0.0445994 +2025-02-26 16:10:09,627 Epoch 973/2000 +2025-02-26 16:11:11,206 Current Learning Rate: 0.0004429836 +2025-02-26 16:11:11,207 Train Loss: 0.0224592, Val Loss: 0.0445980 +2025-02-26 16:11:11,207 Epoch 974/2000 +2025-02-26 16:12:13,254 Current Learning Rate: 0.0004112269 +2025-02-26 16:12:13,255 Train Loss: 0.0224588, Val Loss: 0.0445983 +2025-02-26 16:12:13,255 Epoch 975/2000 +2025-02-26 16:13:16,433 Current Learning Rate: 0.0003806023 +2025-02-26 16:13:16,434 Train Loss: 0.0224594, Val Loss: 0.0445986 +2025-02-26 16:13:16,434 Epoch 976/2000 +2025-02-26 16:14:18,278 Current Learning Rate: 0.0003511176 +2025-02-26 16:14:18,279 Train Loss: 0.0224589, Val Loss: 0.0445996 +2025-02-26 16:14:18,279 Epoch 977/2000 +2025-02-26 16:15:21,140 Current Learning Rate: 0.0003227798 +2025-02-26 16:15:21,141 Train Loss: 0.0224592, Val Loss: 0.0445983 +2025-02-26 16:15:21,141 Epoch 978/2000 +2025-02-26 16:16:23,704 Current Learning Rate: 0.0002955962 +2025-02-26 16:16:23,705 Train Loss: 0.0224593, Val Loss: 0.0446004 +2025-02-26 16:16:23,705 Epoch 979/2000 +2025-02-26 16:17:26,139 Current Learning Rate: 0.0002695732 +2025-02-26 16:17:26,140 Train Loss: 0.0224594, Val Loss: 0.0445991 +2025-02-26 16:17:26,140 Epoch 980/2000 +2025-02-26 16:18:29,563 Current Learning Rate: 0.0002447174 +2025-02-26 16:18:29,564 Train Loss: 0.0224596, Val Loss: 0.0445991 +2025-02-26 16:18:29,564 Epoch 981/2000 +2025-02-26 16:19:31,622 Current Learning Rate: 0.0002210349 +2025-02-26 16:19:31,622 Train Loss: 0.0224597, Val Loss: 0.0445997 +2025-02-26 16:19:31,622 Epoch 982/2000 +2025-02-26 16:20:34,345 Current Learning Rate: 0.0001985316 +2025-02-26 16:20:34,345 Train Loss: 0.0224587, Val Loss: 0.0446008 +2025-02-26 16:20:34,346 Epoch 983/2000 +2025-02-26 16:21:35,824 Current Learning Rate: 0.0001772129 +2025-02-26 16:21:35,824 Train Loss: 0.0224590, Val Loss: 0.0445988 +2025-02-26 16:21:35,825 Epoch 984/2000 +2025-02-26 16:22:39,498 Current Learning Rate: 0.0001570842 +2025-02-26 16:22:39,498 Train Loss: 0.0224592, Val Loss: 0.0445985 +2025-02-26 16:22:39,498 Epoch 985/2000 +2025-02-26 16:23:41,732 Current Learning Rate: 0.0001381504 +2025-02-26 16:23:41,732 Train Loss: 0.0224591, Val Loss: 0.0445998 +2025-02-26 16:23:41,732 Epoch 986/2000 +2025-02-26 16:24:44,951 Current Learning Rate: 0.0001204162 +2025-02-26 16:24:44,951 Train Loss: 0.0224591, Val Loss: 0.0445983 +2025-02-26 16:24:44,951 Epoch 987/2000 +2025-02-26 16:25:46,907 Current Learning Rate: 0.0001038859 +2025-02-26 16:25:46,907 Train Loss: 0.0224585, Val Loss: 0.0445993 +2025-02-26 16:25:46,907 Epoch 988/2000 +2025-02-26 16:26:48,707 Current Learning Rate: 0.0000885637 +2025-02-26 16:26:48,708 Train Loss: 0.0224596, Val Loss: 0.0445994 +2025-02-26 16:26:48,708 Epoch 989/2000 +2025-02-26 16:27:53,176 Current Learning Rate: 0.0000744534 +2025-02-26 16:27:53,176 Train Loss: 0.0224589, Val Loss: 0.0445991 +2025-02-26 16:27:53,176 Epoch 990/2000 +2025-02-26 16:28:54,506 Current Learning Rate: 0.0000615583 +2025-02-26 16:28:54,506 Train Loss: 0.0224593, Val Loss: 0.0445985 +2025-02-26 16:28:54,507 Epoch 991/2000 +2025-02-26 16:29:58,674 Current Learning Rate: 0.0000498817 +2025-02-26 16:29:58,674 Train Loss: 0.0224597, Val Loss: 0.0445984 +2025-02-26 16:29:58,675 Epoch 992/2000 +2025-02-26 16:31:02,309 Current Learning Rate: 0.0000394265 +2025-02-26 16:31:02,309 Train Loss: 0.0224589, Val Loss: 0.0445989 +2025-02-26 16:31:02,309 Epoch 993/2000 +2025-02-26 16:32:05,034 Current Learning Rate: 0.0000301952 +2025-02-26 16:32:05,035 Train Loss: 0.0224590, Val Loss: 0.0445991 +2025-02-26 16:32:05,035 Epoch 994/2000 +2025-02-26 16:33:07,470 Current Learning Rate: 0.0000221902 +2025-02-26 16:33:07,471 Train Loss: 0.0224585, Val Loss: 0.0445990 +2025-02-26 16:33:07,471 Epoch 995/2000 +2025-02-26 16:34:09,453 Current Learning Rate: 0.0000154133 +2025-02-26 16:34:09,453 Train Loss: 0.0224586, Val Loss: 0.0445987 +2025-02-26 16:34:09,454 Epoch 996/2000 +2025-02-26 16:35:12,809 Current Learning Rate: 0.0000098664 +2025-02-26 16:35:12,809 Train Loss: 0.0224594, Val Loss: 0.0445989 +2025-02-26 16:35:12,810 Epoch 997/2000 +2025-02-26 16:36:15,843 Current Learning Rate: 0.0000055506 +2025-02-26 16:36:15,844 Train Loss: 0.0224585, Val Loss: 0.0445989 +2025-02-26 16:36:15,844 Epoch 998/2000 +2025-02-26 16:37:20,287 Current Learning Rate: 0.0000024672 +2025-02-26 16:37:20,287 Train Loss: 0.0224593, Val Loss: 0.0445990 +2025-02-26 16:37:20,287 Epoch 999/2000 +2025-02-26 16:38:22,861 Current Learning Rate: 0.0000006168 +2025-02-26 16:38:22,862 Train Loss: 0.0224592, Val Loss: 0.0445989 +2025-02-26 16:38:22,862 Epoch 1000/2000 +2025-02-26 16:39:26,351 Current Learning Rate: 0.0000000000 +2025-02-26 16:39:26,351 Train Loss: 0.0224594, Val Loss: 0.0445990 +2025-02-26 16:39:26,352 Epoch 1001/2000 +2025-02-26 16:40:28,547 Current Learning Rate: 0.0000006168 +2025-02-26 16:40:28,547 Train Loss: 0.0224589, Val Loss: 0.0445990 +2025-02-26 16:40:28,547 Epoch 1002/2000 +2025-02-26 16:41:32,043 Current Learning Rate: 0.0000024672 +2025-02-26 16:41:32,043 Train Loss: 0.0224594, Val Loss: 0.0445989 +2025-02-26 16:41:32,043 Epoch 1003/2000 +2025-02-26 16:42:35,081 Current Learning Rate: 0.0000055506 +2025-02-26 16:42:35,081 Train Loss: 0.0224594, Val Loss: 0.0445989 +2025-02-26 16:42:35,082 Epoch 1004/2000 +2025-02-26 16:43:37,863 Current Learning Rate: 0.0000098664 +2025-02-26 16:43:37,863 Train Loss: 0.0224583, Val Loss: 0.0445988 +2025-02-26 16:43:37,863 Epoch 1005/2000 +2025-02-26 16:44:39,260 Current Learning Rate: 0.0000154133 +2025-02-26 16:44:39,260 Train Loss: 0.0224596, Val Loss: 0.0445988 +2025-02-26 16:44:39,260 Epoch 1006/2000 +2025-02-26 16:45:44,469 Current Learning Rate: 0.0000221902 +2025-02-26 16:45:44,469 Train Loss: 0.0224591, Val Loss: 0.0445987 +2025-02-26 16:45:44,469 Epoch 1007/2000 +2025-02-26 16:46:48,814 Current Learning Rate: 0.0000301952 +2025-02-26 16:46:48,814 Train Loss: 0.0224592, Val Loss: 0.0445991 +2025-02-26 16:46:48,814 Epoch 1008/2000 +2025-02-26 16:47:50,731 Current Learning Rate: 0.0000394265 +2025-02-26 16:47:50,731 Train Loss: 0.0224590, Val Loss: 0.0445987 +2025-02-26 16:47:50,732 Epoch 1009/2000 +2025-02-26 16:48:52,770 Current Learning Rate: 0.0000498817 +2025-02-26 16:48:52,771 Train Loss: 0.0224589, Val Loss: 0.0445990 +2025-02-26 16:48:52,773 Epoch 1010/2000 +2025-02-26 16:49:54,777 Current Learning Rate: 0.0000615583 +2025-02-26 16:49:54,778 Train Loss: 0.0224587, Val Loss: 0.0445990 +2025-02-26 16:49:54,778 Epoch 1011/2000 +2025-02-26 16:50:57,344 Current Learning Rate: 0.0000744534 +2025-02-26 16:50:57,344 Train Loss: 0.0224596, Val Loss: 0.0445987 +2025-02-26 16:50:57,344 Epoch 1012/2000 +2025-02-26 16:51:59,592 Current Learning Rate: 0.0000885637 +2025-02-26 16:51:59,593 Train Loss: 0.0224585, Val Loss: 0.0445986 +2025-02-26 16:51:59,593 Epoch 1013/2000 +2025-02-26 16:53:02,195 Current Learning Rate: 0.0001038859 +2025-02-26 16:53:02,195 Train Loss: 0.0224592, Val Loss: 0.0445983 +2025-02-26 16:53:02,195 Epoch 1014/2000 +2025-02-26 16:54:04,911 Current Learning Rate: 0.0001204162 +2025-02-26 16:54:04,912 Train Loss: 0.0224595, Val Loss: 0.0445986 +2025-02-26 16:54:04,912 Epoch 1015/2000 +2025-02-26 16:55:07,193 Current Learning Rate: 0.0001381504 +2025-02-26 16:55:07,193 Train Loss: 0.0224593, Val Loss: 0.0445986 +2025-02-26 16:55:07,193 Epoch 1016/2000 +2025-02-26 16:56:08,616 Current Learning Rate: 0.0001570842 +2025-02-26 16:56:08,617 Train Loss: 0.0224593, Val Loss: 0.0445989 +2025-02-26 16:56:08,617 Epoch 1017/2000 +2025-02-26 16:57:10,687 Current Learning Rate: 0.0001772129 +2025-02-26 16:57:10,687 Train Loss: 0.0224598, Val Loss: 0.0445984 +2025-02-26 16:57:10,687 Epoch 1018/2000 +2025-02-26 16:58:13,347 Current Learning Rate: 0.0001985316 +2025-02-26 16:58:13,347 Train Loss: 0.0224592, Val Loss: 0.0445978 +2025-02-26 16:58:13,348 Epoch 1019/2000 +2025-02-26 16:59:16,867 Current Learning Rate: 0.0002210349 +2025-02-26 16:59:16,868 Train Loss: 0.0224591, Val Loss: 0.0445996 +2025-02-26 16:59:16,868 Epoch 1020/2000 +2025-02-26 17:00:19,691 Current Learning Rate: 0.0002447174 +2025-02-26 17:00:19,693 Train Loss: 0.0224590, Val Loss: 0.0445989 +2025-02-26 17:00:19,693 Epoch 1021/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp1_20250224_inference.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp1_20250224_inference.log new file mode 100644 index 0000000000000000000000000000000000000000..c939751ed185ae57535323e4c29e12fc58d26b2b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp1_20250224_inference.log @@ -0,0 +1,18 @@ +2025-02-24 18:00:42,704 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-24 18:00:42,708 开始推理... +2025-02-24 18:09:56,678 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 18:09:56,682 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 18:09:56,682 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 18:09:56,682 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 18:09:57,450 输入数据范围:[-2.09, 2.09] +2025-02-24 18:09:58,188 输出数据范围:[-2.05, 2.18] +2025-02-24 18:09:58,188 推理完成! +2025-02-24 20:12:53,532 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-24 20:12:53,536 开始推理... +2025-02-24 20:22:08,245 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-24 20:22:08,250 输入数据形状:(822, 10, 2, 256, 256) +2025-02-24 20:22:08,250 目标数据形状:(822, 10, 2, 256, 256) +2025-02-24 20:22:08,250 输出数据形状:(822, 10, 2, 256, 256) +2025-02-24 20:22:09,005 输入数据范围:[-2.09, 2.09] +2025-02-24 20:22:09,754 输出数据范围:[-2.08, 2.09] +2025-02-24 20:22:09,755 推理完成! diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp1_20250224_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp1_20250224_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..e6736a7021b2a8d8dcd3dcadbd6b4a6dd5c3798a --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp1_20250224_training_log.log @@ -0,0 +1,1432 @@ +2025-02-24 16:22:15,927 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-24 16:22:15,977 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-24 16:22:15,995 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-24 16:22:16,018 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-24 16:22:16,022 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-24 16:22:16,024 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-24 16:23:27,380 Epoch 1/2000 +2025-02-24 16:28:31,405 Current Learning Rate: 0.0099993832 +2025-02-24 16:28:31,456 Train Loss: 0.0219006, Val Loss: 0.0069225 +2025-02-24 16:28:31,456 Epoch 2/2000 +2025-02-24 16:33:36,817 Current Learning Rate: 0.0099975328 +2025-02-24 16:33:36,881 Train Loss: 0.0042377, Val Loss: 0.0033775 +2025-02-24 16:33:36,882 Epoch 3/2000 +2025-02-24 16:38:43,548 Current Learning Rate: 0.0099944494 +2025-02-24 16:38:43,610 Train Loss: 0.0031064, Val Loss: 0.0031769 +2025-02-24 16:38:43,610 Epoch 4/2000 +2025-02-24 16:43:49,679 Current Learning Rate: 0.0099901336 +2025-02-24 16:43:49,742 Train Loss: 0.0029768, Val Loss: 0.0029244 +2025-02-24 16:43:49,742 Epoch 5/2000 +2025-02-24 16:48:54,921 Current Learning Rate: 0.0099845867 +2025-02-24 16:48:54,987 Train Loss: 0.0025615, Val Loss: 0.0026449 +2025-02-24 16:48:54,987 Epoch 6/2000 +2025-02-24 16:54:00,340 Current Learning Rate: 0.0099778098 +2025-02-24 16:54:00,340 Train Loss: 0.0029487, Val Loss: 0.0028431 +2025-02-24 16:54:00,340 Epoch 7/2000 +2025-02-24 16:59:06,510 Current Learning Rate: 0.0099698048 +2025-02-24 16:59:06,605 Train Loss: 0.0024534, Val Loss: 0.0024732 +2025-02-24 16:59:06,605 Epoch 8/2000 +2025-02-24 17:04:12,383 Current Learning Rate: 0.0099605735 +2025-02-24 17:04:12,452 Train Loss: 0.0022332, Val Loss: 0.0024169 +2025-02-24 17:04:12,453 Epoch 9/2000 +2025-02-24 17:09:16,682 Current Learning Rate: 0.0099501183 +2025-02-24 17:09:16,735 Train Loss: 0.0021389, Val Loss: 0.0022395 +2025-02-24 17:09:16,735 Epoch 10/2000 +2025-02-24 17:14:22,023 Current Learning Rate: 0.0099384417 +2025-02-24 17:14:22,076 Train Loss: 0.0020420, Val Loss: 0.0021020 +2025-02-24 17:14:22,077 Epoch 11/2000 +2025-02-24 17:19:27,032 Current Learning Rate: 0.0099255466 +2025-02-24 17:19:27,097 Train Loss: 0.0019862, Val Loss: 0.0020404 +2025-02-24 17:19:27,098 Epoch 12/2000 +2025-02-24 17:24:33,108 Current Learning Rate: 0.0099114363 +2025-02-24 17:24:33,175 Train Loss: 0.0019081, Val Loss: 0.0019160 +2025-02-24 17:24:33,175 Epoch 13/2000 +2025-02-24 17:29:37,865 Current Learning Rate: 0.0098961141 +2025-02-24 17:29:37,866 Train Loss: 0.0019024, Val Loss: 0.0019181 +2025-02-24 17:29:37,866 Epoch 14/2000 +2025-02-24 17:35:04,316 Current Learning Rate: 0.0098795838 +2025-02-24 17:35:04,380 Train Loss: 0.0017997, Val Loss: 0.0018444 +2025-02-24 17:35:04,380 Epoch 15/2000 +2025-02-24 17:40:14,138 Current Learning Rate: 0.0098618496 +2025-02-24 17:40:14,191 Train Loss: 0.0017440, Val Loss: 0.0017538 +2025-02-24 17:40:14,191 Epoch 16/2000 +2025-02-24 17:48:38,847 Current Learning Rate: 0.0098429158 +2025-02-24 17:48:38,922 Train Loss: 0.0017036, Val Loss: 0.0017417 +2025-02-24 17:48:38,922 Epoch 17/2000 +2025-02-24 17:54:03,310 Current Learning Rate: 0.0098227871 +2025-02-24 17:54:05,065 Train Loss: 0.0016424, Val Loss: 0.0016729 +2025-02-24 17:54:05,065 Epoch 18/2000 +2025-02-24 18:02:43,758 Current Learning Rate: 0.0098014684 +2025-02-24 18:02:43,759 Train Loss: 0.0016250, Val Loss: 0.0016796 +2025-02-24 18:02:43,759 Epoch 19/2000 +2025-02-24 18:07:47,848 Current Learning Rate: 0.0097789651 +2025-02-24 18:07:47,906 Train Loss: 0.0015522, Val Loss: 0.0016468 +2025-02-24 18:07:47,907 Epoch 20/2000 +2025-02-24 18:16:12,209 Current Learning Rate: 0.0097552826 +2025-02-24 18:16:12,279 Train Loss: 0.0015154, Val Loss: 0.0015886 +2025-02-24 18:16:12,279 Epoch 21/2000 +2025-02-24 18:22:02,868 Current Learning Rate: 0.0097304268 +2025-02-24 18:22:04,457 Train Loss: 0.0014773, Val Loss: 0.0015820 +2025-02-24 18:22:04,458 Epoch 22/2000 +2025-02-24 18:27:08,130 Current Learning Rate: 0.0097044038 +2025-02-24 18:27:08,194 Train Loss: 0.0015940, Val Loss: 0.0015402 +2025-02-24 18:27:08,195 Epoch 23/2000 +2025-02-24 18:32:12,372 Current Learning Rate: 0.0096772202 +2025-02-24 18:32:12,429 Train Loss: 0.0014236, Val Loss: 0.0015194 +2025-02-24 18:32:12,430 Epoch 24/2000 +2025-02-24 18:37:17,027 Current Learning Rate: 0.0096488824 +2025-02-24 18:37:17,084 Train Loss: 0.0013903, Val Loss: 0.0014901 +2025-02-24 18:37:17,084 Epoch 25/2000 +2025-02-24 18:42:22,253 Current Learning Rate: 0.0096193977 +2025-02-24 18:42:22,317 Train Loss: 0.0013663, Val Loss: 0.0014732 +2025-02-24 18:42:22,317 Epoch 26/2000 +2025-02-24 18:47:27,293 Current Learning Rate: 0.0095887731 +2025-02-24 18:47:27,350 Train Loss: 0.0013755, Val Loss: 0.0014309 +2025-02-24 18:47:27,350 Epoch 27/2000 +2025-02-24 18:52:32,339 Current Learning Rate: 0.0095570164 +2025-02-24 18:52:32,397 Train Loss: 0.0013176, Val Loss: 0.0014165 +2025-02-24 18:52:32,397 Epoch 28/2000 +2025-02-24 18:57:37,834 Current Learning Rate: 0.0095241353 +2025-02-24 18:57:37,834 Train Loss: 0.0012976, Val Loss: 0.0014497 +2025-02-24 18:57:37,834 Epoch 29/2000 +2025-02-24 19:02:42,443 Current Learning Rate: 0.0094901379 +2025-02-24 19:02:42,526 Train Loss: 0.0012757, Val Loss: 0.0013847 +2025-02-24 19:02:42,526 Epoch 30/2000 +2025-02-24 19:07:47,968 Current Learning Rate: 0.0094550326 +2025-02-24 19:07:48,026 Train Loss: 0.0012505, Val Loss: 0.0013743 +2025-02-24 19:07:48,027 Epoch 31/2000 +2025-02-24 19:12:54,107 Current Learning Rate: 0.0094188282 +2025-02-24 19:12:54,107 Train Loss: 0.0013141, Val Loss: 0.0014200 +2025-02-24 19:12:54,107 Epoch 32/2000 +2025-02-24 19:17:59,312 Current Learning Rate: 0.0093815334 +2025-02-24 19:17:59,367 Train Loss: 0.0012207, Val Loss: 0.0013702 +2025-02-24 19:17:59,367 Epoch 33/2000 +2025-02-24 19:23:04,648 Current Learning Rate: 0.0093431576 +2025-02-24 19:23:04,946 Train Loss: 0.0011929, Val Loss: 0.0013128 +2025-02-24 19:23:04,946 Epoch 34/2000 +2025-02-24 19:28:10,025 Current Learning Rate: 0.0093037101 +2025-02-24 19:28:10,089 Train Loss: 0.0011704, Val Loss: 0.0013056 +2025-02-24 19:28:10,089 Epoch 35/2000 +2025-02-24 19:33:15,221 Current Learning Rate: 0.0092632008 +2025-02-24 19:33:15,281 Train Loss: 0.0011569, Val Loss: 0.0012951 +2025-02-24 19:33:15,281 Epoch 36/2000 +2025-02-24 19:38:19,566 Current Learning Rate: 0.0092216396 +2025-02-24 19:38:19,630 Train Loss: 0.0011477, Val Loss: 0.0012914 +2025-02-24 19:38:19,630 Epoch 37/2000 +2025-02-24 19:43:24,128 Current Learning Rate: 0.0091790368 +2025-02-24 19:43:24,128 Train Loss: 0.0011395, Val Loss: 0.0013517 +2025-02-24 19:43:24,128 Epoch 38/2000 +2025-02-24 19:48:30,455 Current Learning Rate: 0.0091354029 +2025-02-24 19:48:30,515 Train Loss: 0.0012095, Val Loss: 0.0012652 +2025-02-24 19:48:30,515 Epoch 39/2000 +2025-02-24 19:53:36,857 Current Learning Rate: 0.0090907486 +2025-02-24 19:53:36,923 Train Loss: 0.0011026, Val Loss: 0.0012358 +2025-02-24 19:53:36,923 Epoch 40/2000 +2025-02-24 19:58:42,682 Current Learning Rate: 0.0090450850 +2025-02-24 19:58:42,760 Train Loss: 0.0010813, Val Loss: 0.0012147 +2025-02-24 19:58:42,760 Epoch 41/2000 +2025-02-24 20:03:48,201 Current Learning Rate: 0.0089984233 +2025-02-24 20:03:48,261 Train Loss: 0.0010622, Val Loss: 0.0012036 +2025-02-24 20:03:48,261 Epoch 42/2000 +2025-02-24 20:08:55,087 Current Learning Rate: 0.0089507751 +2025-02-24 20:08:55,158 Train Loss: 0.0010497, Val Loss: 0.0011976 +2025-02-24 20:08:55,158 Epoch 43/2000 +2025-02-24 20:14:00,130 Current Learning Rate: 0.0089021520 +2025-02-24 20:14:00,193 Train Loss: 0.0010410, Val Loss: 0.0011912 +2025-02-24 20:14:00,194 Epoch 44/2000 +2025-02-24 20:19:04,952 Current Learning Rate: 0.0088525662 +2025-02-24 20:19:04,953 Train Loss: 0.0010625, Val Loss: 0.0012223 +2025-02-24 20:19:04,953 Epoch 45/2000 +2025-02-24 20:24:10,736 Current Learning Rate: 0.0088020298 +2025-02-24 20:24:10,809 Train Loss: 0.0010263, Val Loss: 0.0011601 +2025-02-24 20:24:10,811 Epoch 46/2000 +2025-02-24 20:29:16,557 Current Learning Rate: 0.0087505553 +2025-02-24 20:29:16,626 Train Loss: 0.0010138, Val Loss: 0.0011454 +2025-02-24 20:29:16,627 Epoch 47/2000 +2025-02-24 20:34:22,847 Current Learning Rate: 0.0086981555 +2025-02-24 20:34:22,847 Train Loss: 0.0010530, Val Loss: 0.0011727 +2025-02-24 20:34:22,847 Epoch 48/2000 +2025-02-24 20:39:29,260 Current Learning Rate: 0.0086448431 +2025-02-24 20:39:29,319 Train Loss: 0.0010026, Val Loss: 0.0011379 +2025-02-24 20:39:29,319 Epoch 49/2000 +2025-02-24 20:44:35,284 Current Learning Rate: 0.0085906315 +2025-02-24 20:44:35,364 Train Loss: 0.0009755, Val Loss: 0.0011105 +2025-02-24 20:44:35,364 Epoch 50/2000 +2025-02-24 20:49:40,594 Current Learning Rate: 0.0085355339 +2025-02-24 20:49:40,654 Train Loss: 0.0009610, Val Loss: 0.0010918 +2025-02-24 20:49:40,655 Epoch 51/2000 +2025-02-24 20:54:45,550 Current Learning Rate: 0.0084795640 +2025-02-24 20:54:45,614 Train Loss: 0.0009483, Val Loss: 0.0010876 +2025-02-24 20:54:45,615 Epoch 52/2000 +2025-02-24 20:59:50,848 Current Learning Rate: 0.0084227355 +2025-02-24 20:59:50,903 Train Loss: 0.0009375, Val Loss: 0.0010803 +2025-02-24 20:59:50,903 Epoch 53/2000 +2025-02-24 21:04:55,869 Current Learning Rate: 0.0083650626 +2025-02-24 21:04:55,943 Train Loss: 0.0009282, Val Loss: 0.0010775 +2025-02-24 21:04:55,943 Epoch 54/2000 +2025-02-24 21:10:00,999 Current Learning Rate: 0.0083065593 +2025-02-24 21:10:01,058 Train Loss: 0.0009249, Val Loss: 0.0010669 +2025-02-24 21:10:01,058 Epoch 55/2000 +2025-02-24 21:15:06,237 Current Learning Rate: 0.0082472402 +2025-02-24 21:15:06,238 Train Loss: 0.0009233, Val Loss: 0.0010795 +2025-02-24 21:15:06,238 Epoch 56/2000 +2025-02-24 21:20:11,958 Current Learning Rate: 0.0081871199 +2025-02-24 21:20:12,021 Train Loss: 0.0009207, Val Loss: 0.0010579 +2025-02-24 21:20:12,021 Epoch 57/2000 +2025-02-24 21:25:18,260 Current Learning Rate: 0.0081262133 +2025-02-24 21:25:18,261 Train Loss: 0.0009505, Val Loss: 0.0010594 +2025-02-24 21:25:18,261 Epoch 58/2000 +2025-02-24 21:30:23,943 Current Learning Rate: 0.0080645353 +2025-02-24 21:30:23,998 Train Loss: 0.0009072, Val Loss: 0.0010465 +2025-02-24 21:30:23,998 Epoch 59/2000 +2025-02-24 21:35:29,306 Current Learning Rate: 0.0080021011 +2025-02-24 21:35:29,363 Train Loss: 0.0008878, Val Loss: 0.0010366 +2025-02-24 21:35:29,364 Epoch 60/2000 +2025-02-24 21:40:34,606 Current Learning Rate: 0.0079389263 +2025-02-24 21:40:34,658 Train Loss: 0.0008695, Val Loss: 0.0010209 +2025-02-24 21:40:34,659 Epoch 61/2000 +2025-02-24 21:45:39,990 Current Learning Rate: 0.0078750263 +2025-02-24 21:45:40,048 Train Loss: 0.0008593, Val Loss: 0.0010130 +2025-02-24 21:45:40,048 Epoch 62/2000 +2025-02-24 21:50:45,728 Current Learning Rate: 0.0078104169 +2025-02-24 21:50:45,811 Train Loss: 0.0008533, Val Loss: 0.0010058 +2025-02-24 21:50:45,811 Epoch 63/2000 +2025-02-24 21:55:51,182 Current Learning Rate: 0.0077451141 +2025-02-24 21:55:51,234 Train Loss: 0.0008512, Val Loss: 0.0010009 +2025-02-24 21:55:51,235 Epoch 64/2000 +2025-02-24 22:00:57,012 Current Learning Rate: 0.0076791340 +2025-02-24 22:00:57,012 Train Loss: 0.0008796, Val Loss: 0.0013906 +2025-02-24 22:00:57,013 Epoch 65/2000 +2025-02-24 22:06:01,851 Current Learning Rate: 0.0076124928 +2025-02-24 22:06:01,852 Train Loss: 0.0008711, Val Loss: 0.0010037 +2025-02-24 22:06:01,852 Epoch 66/2000 +2025-02-24 22:11:07,167 Current Learning Rate: 0.0075452071 +2025-02-24 22:11:07,167 Train Loss: 0.0008401, Val Loss: 0.0010026 +2025-02-24 22:11:07,167 Epoch 67/2000 +2025-02-24 22:16:11,746 Current Learning Rate: 0.0074772933 +2025-02-24 22:16:11,804 Train Loss: 0.0008362, Val Loss: 0.0009964 +2025-02-24 22:16:11,805 Epoch 68/2000 +2025-02-24 22:21:15,801 Current Learning Rate: 0.0074087684 +2025-02-24 22:21:15,866 Train Loss: 0.0008307, Val Loss: 0.0009897 +2025-02-24 22:21:15,867 Epoch 69/2000 +2025-02-24 22:26:19,957 Current Learning Rate: 0.0073396491 +2025-02-24 22:26:20,008 Train Loss: 0.0008219, Val Loss: 0.0009823 +2025-02-24 22:26:20,008 Epoch 70/2000 +2025-02-24 22:31:24,979 Current Learning Rate: 0.0072699525 +2025-02-24 22:31:25,041 Train Loss: 0.0008071, Val Loss: 0.0009654 +2025-02-24 22:31:25,042 Epoch 71/2000 +2025-02-24 22:36:29,659 Current Learning Rate: 0.0071996958 +2025-02-24 22:36:29,715 Train Loss: 0.0007985, Val Loss: 0.0009583 +2025-02-24 22:36:29,716 Epoch 72/2000 +2025-02-24 22:41:34,923 Current Learning Rate: 0.0071288965 +2025-02-24 22:41:34,982 Train Loss: 0.0007928, Val Loss: 0.0009536 +2025-02-24 22:41:34,982 Epoch 73/2000 +2025-02-24 22:46:39,952 Current Learning Rate: 0.0070575718 +2025-02-24 22:46:40,006 Train Loss: 0.0007888, Val Loss: 0.0009536 +2025-02-24 22:46:40,007 Epoch 74/2000 +2025-02-24 22:51:45,399 Current Learning Rate: 0.0069857395 +2025-02-24 22:51:45,399 Train Loss: 0.0007892, Val Loss: 0.0009659 +2025-02-24 22:51:45,400 Epoch 75/2000 +2025-02-24 22:56:50,287 Current Learning Rate: 0.0069134172 +2025-02-24 22:56:50,339 Train Loss: 0.0007879, Val Loss: 0.0009508 +2025-02-24 22:56:50,339 Epoch 76/2000 +2025-02-24 23:01:55,173 Current Learning Rate: 0.0068406228 +2025-02-24 23:01:55,174 Train Loss: 0.0007834, Val Loss: 0.0009604 +2025-02-24 23:01:55,174 Epoch 77/2000 +2025-02-24 23:06:59,862 Current Learning Rate: 0.0067673742 +2025-02-24 23:06:59,862 Train Loss: 0.0008116, Val Loss: 0.0010532 +2025-02-24 23:06:59,863 Epoch 78/2000 +2025-02-24 23:12:04,739 Current Learning Rate: 0.0066936896 +2025-02-24 23:12:04,808 Train Loss: 0.0008011, Val Loss: 0.0009420 +2025-02-24 23:12:04,808 Epoch 79/2000 +2025-02-24 23:17:10,210 Current Learning Rate: 0.0066195871 +2025-02-24 23:17:10,270 Train Loss: 0.0007750, Val Loss: 0.0009363 +2025-02-24 23:17:10,270 Epoch 80/2000 +2025-02-24 23:22:14,993 Current Learning Rate: 0.0065450850 +2025-02-24 23:22:15,052 Train Loss: 0.0007616, Val Loss: 0.0009246 +2025-02-24 23:22:15,052 Epoch 81/2000 +2025-02-24 23:27:19,983 Current Learning Rate: 0.0064702016 +2025-02-24 23:27:20,050 Train Loss: 0.0007488, Val Loss: 0.0009083 +2025-02-24 23:27:20,050 Epoch 82/2000 +2025-02-24 23:32:25,371 Current Learning Rate: 0.0063949555 +2025-02-24 23:32:25,428 Train Loss: 0.0007423, Val Loss: 0.0009001 +2025-02-24 23:32:25,428 Epoch 83/2000 +2025-02-24 23:37:29,943 Current Learning Rate: 0.0063193652 +2025-02-24 23:37:30,092 Train Loss: 0.0007379, Val Loss: 0.0008931 +2025-02-24 23:37:30,092 Epoch 84/2000 +2025-02-24 23:42:34,864 Current Learning Rate: 0.0062434494 +2025-02-24 23:42:34,864 Train Loss: 0.0007346, Val Loss: 0.0008947 +2025-02-24 23:42:34,864 Epoch 85/2000 +2025-02-24 23:47:39,851 Current Learning Rate: 0.0061672268 +2025-02-24 23:47:39,852 Train Loss: 0.0007349, Val Loss: 0.0008986 +2025-02-24 23:47:39,852 Epoch 86/2000 +2025-02-24 23:52:45,004 Current Learning Rate: 0.0060907162 +2025-02-24 23:52:45,005 Train Loss: 0.0007334, Val Loss: 0.0009059 +2025-02-24 23:52:45,005 Epoch 87/2000 +2025-02-24 23:57:50,461 Current Learning Rate: 0.0060139365 +2025-02-24 23:57:50,461 Train Loss: 0.0007310, Val Loss: 0.0009001 +2025-02-24 23:57:50,462 Epoch 88/2000 +2025-02-25 00:02:55,672 Current Learning Rate: 0.0059369066 +2025-02-25 00:02:55,727 Train Loss: 0.0007323, Val Loss: 0.0008906 +2025-02-25 00:02:55,728 Epoch 89/2000 +2025-02-25 00:08:02,220 Current Learning Rate: 0.0058596455 +2025-02-25 00:08:02,221 Train Loss: 0.0007313, Val Loss: 0.0008926 +2025-02-25 00:08:02,221 Epoch 90/2000 +2025-02-25 00:13:07,458 Current Learning Rate: 0.0057821723 +2025-02-25 00:13:07,525 Train Loss: 0.0007255, Val Loss: 0.0008833 +2025-02-25 00:13:07,526 Epoch 91/2000 +2025-02-25 00:18:12,456 Current Learning Rate: 0.0057045062 +2025-02-25 00:18:12,631 Train Loss: 0.0007171, Val Loss: 0.0008724 +2025-02-25 00:18:12,631 Epoch 92/2000 +2025-02-25 00:23:17,285 Current Learning Rate: 0.0056266662 +2025-02-25 00:23:17,285 Train Loss: 0.0007076, Val Loss: 0.0008813 +2025-02-25 00:23:17,285 Epoch 93/2000 +2025-02-25 00:28:22,060 Current Learning Rate: 0.0055486716 +2025-02-25 00:28:22,117 Train Loss: 0.0007008, Val Loss: 0.0008616 +2025-02-25 00:28:22,117 Epoch 94/2000 +2025-02-25 00:33:28,113 Current Learning Rate: 0.0054705416 +2025-02-25 00:33:28,176 Train Loss: 0.0006961, Val Loss: 0.0008613 +2025-02-25 00:33:28,176 Epoch 95/2000 +2025-02-25 00:38:33,709 Current Learning Rate: 0.0053922955 +2025-02-25 00:38:33,709 Train Loss: 0.0006937, Val Loss: 0.0008745 +2025-02-25 00:38:33,710 Epoch 96/2000 +2025-02-25 00:43:39,902 Current Learning Rate: 0.0053139526 +2025-02-25 00:43:39,960 Train Loss: 0.0006931, Val Loss: 0.0008569 +2025-02-25 00:43:39,961 Epoch 97/2000 +2025-02-25 00:48:46,015 Current Learning Rate: 0.0052355323 +2025-02-25 00:48:46,083 Train Loss: 0.0006933, Val Loss: 0.0008546 +2025-02-25 00:48:46,083 Epoch 98/2000 +2025-02-25 00:53:51,171 Current Learning Rate: 0.0051570538 +2025-02-25 00:53:51,228 Train Loss: 0.0006960, Val Loss: 0.0008536 +2025-02-25 00:53:51,228 Epoch 99/2000 +2025-02-25 00:58:56,321 Current Learning Rate: 0.0050785366 +2025-02-25 00:58:56,386 Train Loss: 0.0006997, Val Loss: 0.0008528 +2025-02-25 00:58:56,386 Epoch 100/2000 +2025-02-25 01:04:02,596 Current Learning Rate: 0.0050000000 +2025-02-25 01:04:06,262 Train Loss: 0.0006968, Val Loss: 0.0008492 +2025-02-25 01:04:06,263 Epoch 101/2000 +2025-02-25 01:09:11,120 Current Learning Rate: 0.0049214634 +2025-02-25 01:09:11,184 Train Loss: 0.0006866, Val Loss: 0.0008345 +2025-02-25 01:09:11,184 Epoch 102/2000 +2025-02-25 01:14:16,655 Current Learning Rate: 0.0048429462 +2025-02-25 01:14:16,711 Train Loss: 0.0006751, Val Loss: 0.0008283 +2025-02-25 01:14:16,711 Epoch 103/2000 +2025-02-25 01:19:21,843 Current Learning Rate: 0.0047644677 +2025-02-25 01:19:21,913 Train Loss: 0.0006671, Val Loss: 0.0008256 +2025-02-25 01:19:21,913 Epoch 104/2000 +2025-02-25 01:24:27,706 Current Learning Rate: 0.0046860474 +2025-02-25 01:24:27,706 Train Loss: 0.0006623, Val Loss: 0.0008259 +2025-02-25 01:24:27,707 Epoch 105/2000 +2025-02-25 01:29:34,154 Current Learning Rate: 0.0046077045 +2025-02-25 01:29:34,154 Train Loss: 0.0006592, Val Loss: 0.0008260 +2025-02-25 01:29:34,155 Epoch 106/2000 +2025-02-25 01:34:40,839 Current Learning Rate: 0.0045294584 +2025-02-25 01:34:40,916 Train Loss: 0.0006568, Val Loss: 0.0008223 +2025-02-25 01:34:40,916 Epoch 107/2000 +2025-02-25 01:39:47,618 Current Learning Rate: 0.0044513284 +2025-02-25 01:39:47,619 Train Loss: 0.0006570, Val Loss: 0.0008245 +2025-02-25 01:39:47,619 Epoch 108/2000 +2025-02-25 01:44:53,702 Current Learning Rate: 0.0043733338 +2025-02-25 01:44:53,765 Train Loss: 0.0006581, Val Loss: 0.0008159 +2025-02-25 01:44:53,765 Epoch 109/2000 +2025-02-25 01:49:59,724 Current Learning Rate: 0.0042954938 +2025-02-25 01:49:59,725 Train Loss: 0.0006584, Val Loss: 0.0008177 +2025-02-25 01:49:59,725 Epoch 110/2000 +2025-02-25 01:55:06,315 Current Learning Rate: 0.0042178277 +2025-02-25 01:55:06,372 Train Loss: 0.0006586, Val Loss: 0.0008147 +2025-02-25 01:55:06,372 Epoch 111/2000 +2025-02-25 02:00:11,633 Current Learning Rate: 0.0041403545 +2025-02-25 02:00:11,690 Train Loss: 0.0006572, Val Loss: 0.0008100 +2025-02-25 02:00:11,690 Epoch 112/2000 +2025-02-25 02:05:17,119 Current Learning Rate: 0.0040630934 +2025-02-25 02:05:17,337 Train Loss: 0.0006538, Val Loss: 0.0008048 +2025-02-25 02:05:17,338 Epoch 113/2000 +2025-02-25 02:10:23,422 Current Learning Rate: 0.0039860635 +2025-02-25 02:10:23,494 Train Loss: 0.0006486, Val Loss: 0.0007998 +2025-02-25 02:10:23,494 Epoch 114/2000 +2025-02-25 02:15:29,413 Current Learning Rate: 0.0039092838 +2025-02-25 02:15:29,473 Train Loss: 0.0006426, Val Loss: 0.0007954 +2025-02-25 02:15:29,473 Epoch 115/2000 +2025-02-25 02:20:35,524 Current Learning Rate: 0.0038327732 +2025-02-25 02:20:35,583 Train Loss: 0.0006375, Val Loss: 0.0007939 +2025-02-25 02:20:35,584 Epoch 116/2000 +2025-02-25 02:25:41,221 Current Learning Rate: 0.0037565506 +2025-02-25 02:25:41,287 Train Loss: 0.0006333, Val Loss: 0.0007926 +2025-02-25 02:25:41,288 Epoch 117/2000 +2025-02-25 02:30:47,220 Current Learning Rate: 0.0036806348 +2025-02-25 02:30:47,281 Train Loss: 0.0006304, Val Loss: 0.0007906 +2025-02-25 02:30:47,281 Epoch 118/2000 +2025-02-25 02:35:52,689 Current Learning Rate: 0.0036050445 +2025-02-25 02:35:52,761 Train Loss: 0.0006285, Val Loss: 0.0007860 +2025-02-25 02:35:52,761 Epoch 119/2000 +2025-02-25 02:40:58,776 Current Learning Rate: 0.0035297984 +2025-02-25 02:40:58,907 Train Loss: 0.0006283, Val Loss: 0.0007851 +2025-02-25 02:40:58,907 Epoch 120/2000 +2025-02-25 02:46:03,769 Current Learning Rate: 0.0034549150 +2025-02-25 02:46:04,243 Train Loss: 0.0006296, Val Loss: 0.0007841 +2025-02-25 02:46:04,244 Epoch 121/2000 +2025-02-25 02:51:09,203 Current Learning Rate: 0.0033804129 +2025-02-25 02:51:09,203 Train Loss: 0.0006329, Val Loss: 0.0007877 +2025-02-25 02:51:09,203 Epoch 122/2000 +2025-02-25 02:56:14,848 Current Learning Rate: 0.0033063104 +2025-02-25 02:56:14,916 Train Loss: 0.0006340, Val Loss: 0.0007840 +2025-02-25 02:56:14,916 Epoch 123/2000 +2025-02-25 03:01:20,488 Current Learning Rate: 0.0032326258 +2025-02-25 03:01:20,543 Train Loss: 0.0006272, Val Loss: 0.0007773 +2025-02-25 03:01:20,543 Epoch 124/2000 +2025-02-25 03:06:24,894 Current Learning Rate: 0.0031593772 +2025-02-25 03:06:24,956 Train Loss: 0.0006200, Val Loss: 0.0007725 +2025-02-25 03:06:24,956 Epoch 125/2000 +2025-02-25 03:11:30,295 Current Learning Rate: 0.0030865828 +2025-02-25 03:11:30,355 Train Loss: 0.0006140, Val Loss: 0.0007692 +2025-02-25 03:11:30,355 Epoch 126/2000 +2025-02-25 03:16:35,863 Current Learning Rate: 0.0030142605 +2025-02-25 03:16:35,927 Train Loss: 0.0006099, Val Loss: 0.0007660 +2025-02-25 03:16:35,928 Epoch 127/2000 +2025-02-25 03:21:41,776 Current Learning Rate: 0.0029424282 +2025-02-25 03:21:41,864 Train Loss: 0.0006068, Val Loss: 0.0007634 +2025-02-25 03:21:41,864 Epoch 128/2000 +2025-02-25 03:26:47,454 Current Learning Rate: 0.0028711035 +2025-02-25 03:26:47,511 Train Loss: 0.0006044, Val Loss: 0.0007612 +2025-02-25 03:26:47,512 Epoch 129/2000 +2025-02-25 03:31:52,479 Current Learning Rate: 0.0028003042 +2025-02-25 03:31:52,536 Train Loss: 0.0006026, Val Loss: 0.0007600 +2025-02-25 03:31:52,537 Epoch 130/2000 +2025-02-25 03:36:57,834 Current Learning Rate: 0.0027300475 +2025-02-25 03:36:57,925 Train Loss: 0.0006018, Val Loss: 0.0007597 +2025-02-25 03:36:57,925 Epoch 131/2000 +2025-02-25 03:42:03,366 Current Learning Rate: 0.0026603509 +2025-02-25 03:42:04,128 Train Loss: 0.0006029, Val Loss: 0.0007589 +2025-02-25 03:42:04,129 Epoch 132/2000 +2025-02-25 03:47:10,508 Current Learning Rate: 0.0025912316 +2025-02-25 03:47:10,508 Train Loss: 0.0006039, Val Loss: 0.0007650 +2025-02-25 03:47:10,509 Epoch 133/2000 +2025-02-25 03:52:16,200 Current Learning Rate: 0.0025227067 +2025-02-25 03:52:16,201 Train Loss: 0.0006044, Val Loss: 0.0007670 +2025-02-25 03:52:16,201 Epoch 134/2000 +2025-02-25 03:57:22,534 Current Learning Rate: 0.0024547929 +2025-02-25 03:57:22,535 Train Loss: 0.0006022, Val Loss: 0.0007629 +2025-02-25 03:57:22,535 Epoch 135/2000 +2025-02-25 04:02:27,775 Current Learning Rate: 0.0023875072 +2025-02-25 04:02:27,776 Train Loss: 0.0005988, Val Loss: 0.0007603 +2025-02-25 04:02:27,776 Epoch 136/2000 +2025-02-25 04:07:32,398 Current Learning Rate: 0.0023208660 +2025-02-25 04:07:32,456 Train Loss: 0.0005954, Val Loss: 0.0007573 +2025-02-25 04:07:32,456 Epoch 137/2000 +2025-02-25 04:12:37,505 Current Learning Rate: 0.0022548859 +2025-02-25 04:12:37,569 Train Loss: 0.0005925, Val Loss: 0.0007539 +2025-02-25 04:12:37,569 Epoch 138/2000 +2025-02-25 04:17:42,476 Current Learning Rate: 0.0021895831 +2025-02-25 04:17:42,629 Train Loss: 0.0005900, Val Loss: 0.0007516 +2025-02-25 04:17:42,629 Epoch 139/2000 +2025-02-25 04:22:47,845 Current Learning Rate: 0.0021249737 +2025-02-25 04:22:47,900 Train Loss: 0.0005881, Val Loss: 0.0007513 +2025-02-25 04:22:47,900 Epoch 140/2000 +2025-02-25 04:27:53,453 Current Learning Rate: 0.0020610737 +2025-02-25 04:27:53,454 Train Loss: 0.0005869, Val Loss: 0.0007524 +2025-02-25 04:27:53,454 Epoch 141/2000 +2025-02-25 04:32:59,701 Current Learning Rate: 0.0019978989 +2025-02-25 04:32:59,752 Train Loss: 0.0005865, Val Loss: 0.0007481 +2025-02-25 04:32:59,752 Epoch 142/2000 +2025-02-25 04:38:05,052 Current Learning Rate: 0.0019354647 +2025-02-25 04:38:05,107 Train Loss: 0.0005871, Val Loss: 0.0007451 +2025-02-25 04:38:05,108 Epoch 143/2000 +2025-02-25 04:43:09,652 Current Learning Rate: 0.0018737867 +2025-02-25 04:43:09,723 Train Loss: 0.0005856, Val Loss: 0.0007448 +2025-02-25 04:43:09,723 Epoch 144/2000 +2025-02-25 04:48:14,548 Current Learning Rate: 0.0018128801 +2025-02-25 04:48:14,602 Train Loss: 0.0005828, Val Loss: 0.0007436 +2025-02-25 04:48:14,602 Epoch 145/2000 +2025-02-25 04:53:19,267 Current Learning Rate: 0.0017527598 +2025-02-25 04:53:19,321 Train Loss: 0.0005801, Val Loss: 0.0007411 +2025-02-25 04:53:19,322 Epoch 146/2000 +2025-02-25 04:58:24,489 Current Learning Rate: 0.0016934407 +2025-02-25 04:58:24,545 Train Loss: 0.0005777, Val Loss: 0.0007388 +2025-02-25 04:58:24,545 Epoch 147/2000 +2025-02-25 05:03:28,439 Current Learning Rate: 0.0016349374 +2025-02-25 05:03:28,498 Train Loss: 0.0005756, Val Loss: 0.0007366 +2025-02-25 05:03:28,498 Epoch 148/2000 +2025-02-25 05:08:33,781 Current Learning Rate: 0.0015772645 +2025-02-25 05:08:33,848 Train Loss: 0.0005740, Val Loss: 0.0007345 +2025-02-25 05:08:33,848 Epoch 149/2000 +2025-02-25 05:13:38,582 Current Learning Rate: 0.0015204360 +2025-02-25 05:13:38,638 Train Loss: 0.0005728, Val Loss: 0.0007327 +2025-02-25 05:13:38,638 Epoch 150/2000 +2025-02-25 05:18:44,455 Current Learning Rate: 0.0014644661 +2025-02-25 05:18:44,512 Train Loss: 0.0005720, Val Loss: 0.0007310 +2025-02-25 05:18:44,512 Epoch 151/2000 +2025-02-25 05:23:50,728 Current Learning Rate: 0.0014093685 +2025-02-25 05:23:50,805 Train Loss: 0.0005714, Val Loss: 0.0007283 +2025-02-25 05:23:50,806 Epoch 152/2000 +2025-02-25 05:28:56,722 Current Learning Rate: 0.0013551569 +2025-02-25 05:28:56,787 Train Loss: 0.0005705, Val Loss: 0.0007264 +2025-02-25 05:28:56,787 Epoch 153/2000 +2025-02-25 05:34:02,031 Current Learning Rate: 0.0013018445 +2025-02-25 05:34:02,109 Train Loss: 0.0005689, Val Loss: 0.0007241 +2025-02-25 05:34:02,109 Epoch 154/2000 +2025-02-25 05:39:07,470 Current Learning Rate: 0.0012494447 +2025-02-25 05:39:07,530 Train Loss: 0.0005672, Val Loss: 0.0007218 +2025-02-25 05:39:07,531 Epoch 155/2000 +2025-02-25 05:44:14,395 Current Learning Rate: 0.0011979702 +2025-02-25 05:44:14,460 Train Loss: 0.0005657, Val Loss: 0.0007204 +2025-02-25 05:44:14,460 Epoch 156/2000 +2025-02-25 05:49:19,666 Current Learning Rate: 0.0011474338 +2025-02-25 05:49:19,724 Train Loss: 0.0005643, Val Loss: 0.0007199 +2025-02-25 05:49:19,724 Epoch 157/2000 +2025-02-25 05:54:25,831 Current Learning Rate: 0.0010978480 +2025-02-25 05:54:25,891 Train Loss: 0.0005630, Val Loss: 0.0007197 +2025-02-25 05:54:25,891 Epoch 158/2000 +2025-02-25 05:59:30,649 Current Learning Rate: 0.0010492249 +2025-02-25 05:59:30,706 Train Loss: 0.0005617, Val Loss: 0.0007186 +2025-02-25 05:59:30,707 Epoch 159/2000 +2025-02-25 06:04:35,043 Current Learning Rate: 0.0010015767 +2025-02-25 06:04:35,100 Train Loss: 0.0005604, Val Loss: 0.0007177 +2025-02-25 06:04:35,100 Epoch 160/2000 +2025-02-25 06:09:39,945 Current Learning Rate: 0.0009549150 +2025-02-25 06:09:40,007 Train Loss: 0.0005592, Val Loss: 0.0007171 +2025-02-25 06:09:40,007 Epoch 161/2000 +2025-02-25 06:14:44,757 Current Learning Rate: 0.0009092514 +2025-02-25 06:14:44,812 Train Loss: 0.0005581, Val Loss: 0.0007164 +2025-02-25 06:14:44,813 Epoch 162/2000 +2025-02-25 06:19:49,165 Current Learning Rate: 0.0008645971 +2025-02-25 06:19:49,230 Train Loss: 0.0005570, Val Loss: 0.0007158 +2025-02-25 06:19:49,230 Epoch 163/2000 +2025-02-25 06:24:54,323 Current Learning Rate: 0.0008209632 +2025-02-25 06:24:54,386 Train Loss: 0.0005561, Val Loss: 0.0007147 +2025-02-25 06:24:54,387 Epoch 164/2000 +2025-02-25 06:30:00,982 Current Learning Rate: 0.0007783604 +2025-02-25 06:30:01,039 Train Loss: 0.0005551, Val Loss: 0.0007130 +2025-02-25 06:30:01,039 Epoch 165/2000 +2025-02-25 06:35:06,679 Current Learning Rate: 0.0007367992 +2025-02-25 06:35:06,749 Train Loss: 0.0005540, Val Loss: 0.0007105 +2025-02-25 06:35:06,749 Epoch 166/2000 +2025-02-25 06:40:12,089 Current Learning Rate: 0.0006962899 +2025-02-25 06:40:12,149 Train Loss: 0.0005529, Val Loss: 0.0007085 +2025-02-25 06:40:12,150 Epoch 167/2000 +2025-02-25 06:45:18,481 Current Learning Rate: 0.0006568424 +2025-02-25 06:45:18,552 Train Loss: 0.0005518, Val Loss: 0.0007069 +2025-02-25 06:45:18,553 Epoch 168/2000 +2025-02-25 06:50:24,337 Current Learning Rate: 0.0006184666 +2025-02-25 06:50:24,392 Train Loss: 0.0005508, Val Loss: 0.0007055 +2025-02-25 06:50:24,393 Epoch 169/2000 +2025-02-25 06:55:29,783 Current Learning Rate: 0.0005811718 +2025-02-25 06:55:29,850 Train Loss: 0.0005499, Val Loss: 0.0007043 +2025-02-25 06:55:29,850 Epoch 170/2000 +2025-02-25 07:00:35,993 Current Learning Rate: 0.0005449674 +2025-02-25 07:00:36,050 Train Loss: 0.0005490, Val Loss: 0.0007037 +2025-02-25 07:00:36,051 Epoch 171/2000 +2025-02-25 07:05:41,739 Current Learning Rate: 0.0005098621 +2025-02-25 07:05:41,799 Train Loss: 0.0005482, Val Loss: 0.0007032 +2025-02-25 07:05:41,799 Epoch 172/2000 +2025-02-25 07:10:47,510 Current Learning Rate: 0.0004758647 +2025-02-25 07:10:47,567 Train Loss: 0.0005473, Val Loss: 0.0007025 +2025-02-25 07:10:47,567 Epoch 173/2000 +2025-02-25 07:15:53,007 Current Learning Rate: 0.0004429836 +2025-02-25 07:15:53,081 Train Loss: 0.0005465, Val Loss: 0.0007012 +2025-02-25 07:15:53,081 Epoch 174/2000 +2025-02-25 07:20:58,411 Current Learning Rate: 0.0004112269 +2025-02-25 07:20:58,464 Train Loss: 0.0005457, Val Loss: 0.0006999 +2025-02-25 07:20:58,464 Epoch 175/2000 +2025-02-25 07:26:03,259 Current Learning Rate: 0.0003806023 +2025-02-25 07:26:03,316 Train Loss: 0.0005449, Val Loss: 0.0006990 +2025-02-25 07:26:03,317 Epoch 176/2000 +2025-02-25 07:31:08,177 Current Learning Rate: 0.0003511176 +2025-02-25 07:31:08,231 Train Loss: 0.0005442, Val Loss: 0.0006983 +2025-02-25 07:31:08,231 Epoch 177/2000 +2025-02-25 07:36:12,810 Current Learning Rate: 0.0003227798 +2025-02-25 07:36:12,870 Train Loss: 0.0005435, Val Loss: 0.0006978 +2025-02-25 07:36:12,870 Epoch 178/2000 +2025-02-25 07:41:17,367 Current Learning Rate: 0.0002955962 +2025-02-25 07:41:17,426 Train Loss: 0.0005429, Val Loss: 0.0006973 +2025-02-25 07:41:17,427 Epoch 179/2000 +2025-02-25 07:46:22,463 Current Learning Rate: 0.0002695732 +2025-02-25 07:46:22,523 Train Loss: 0.0005423, Val Loss: 0.0006968 +2025-02-25 07:46:22,523 Epoch 180/2000 +2025-02-25 07:51:26,827 Current Learning Rate: 0.0002447174 +2025-02-25 07:51:26,883 Train Loss: 0.0005417, Val Loss: 0.0006964 +2025-02-25 07:51:26,883 Epoch 181/2000 +2025-02-25 07:56:31,691 Current Learning Rate: 0.0002210349 +2025-02-25 07:56:31,750 Train Loss: 0.0005411, Val Loss: 0.0006960 +2025-02-25 07:56:31,750 Epoch 182/2000 +2025-02-25 08:01:37,124 Current Learning Rate: 0.0001985316 +2025-02-25 08:01:37,182 Train Loss: 0.0005406, Val Loss: 0.0006958 +2025-02-25 08:01:37,183 Epoch 183/2000 +2025-02-25 08:06:41,537 Current Learning Rate: 0.0001772129 +2025-02-25 08:06:41,594 Train Loss: 0.0005401, Val Loss: 0.0006956 +2025-02-25 08:06:41,594 Epoch 184/2000 +2025-02-25 08:11:46,492 Current Learning Rate: 0.0001570842 +2025-02-25 08:11:46,551 Train Loss: 0.0005397, Val Loss: 0.0006952 +2025-02-25 08:11:46,551 Epoch 185/2000 +2025-02-25 08:16:51,662 Current Learning Rate: 0.0001381504 +2025-02-25 08:16:51,722 Train Loss: 0.0005392, Val Loss: 0.0006947 +2025-02-25 08:16:51,722 Epoch 186/2000 +2025-02-25 08:21:57,333 Current Learning Rate: 0.0001204162 +2025-02-25 08:21:57,413 Train Loss: 0.0005388, Val Loss: 0.0006943 +2025-02-25 08:21:57,413 Epoch 187/2000 +2025-02-25 08:27:02,948 Current Learning Rate: 0.0001038859 +2025-02-25 08:27:03,009 Train Loss: 0.0005384, Val Loss: 0.0006942 +2025-02-25 08:27:03,009 Epoch 188/2000 +2025-02-25 08:32:08,815 Current Learning Rate: 0.0000885637 +2025-02-25 08:32:08,815 Train Loss: 0.0005380, Val Loss: 0.0006943 +2025-02-25 08:32:08,816 Epoch 189/2000 +2025-02-25 08:37:14,825 Current Learning Rate: 0.0000744534 +2025-02-25 08:37:14,896 Train Loss: 0.0005377, Val Loss: 0.0006941 +2025-02-25 08:37:14,896 Epoch 190/2000 +2025-02-25 08:42:20,675 Current Learning Rate: 0.0000615583 +2025-02-25 08:42:20,739 Train Loss: 0.0005374, Val Loss: 0.0006937 +2025-02-25 08:42:20,739 Epoch 191/2000 +2025-02-25 08:47:25,262 Current Learning Rate: 0.0000498817 +2025-02-25 08:47:25,324 Train Loss: 0.0005371, Val Loss: 0.0006932 +2025-02-25 08:47:25,325 Epoch 192/2000 +2025-02-25 08:52:30,429 Current Learning Rate: 0.0000394265 +2025-02-25 08:52:30,491 Train Loss: 0.0005369, Val Loss: 0.0006928 +2025-02-25 08:52:30,491 Epoch 193/2000 +2025-02-25 08:57:35,196 Current Learning Rate: 0.0000301952 +2025-02-25 08:57:35,258 Train Loss: 0.0005367, Val Loss: 0.0006925 +2025-02-25 08:57:35,258 Epoch 194/2000 +2025-02-25 09:02:39,965 Current Learning Rate: 0.0000221902 +2025-02-25 09:02:40,021 Train Loss: 0.0005365, Val Loss: 0.0006923 +2025-02-25 09:02:40,021 Epoch 195/2000 +2025-02-25 09:07:44,508 Current Learning Rate: 0.0000154133 +2025-02-25 09:07:44,566 Train Loss: 0.0005363, Val Loss: 0.0006921 +2025-02-25 09:07:44,566 Epoch 196/2000 +2025-02-25 09:12:49,743 Current Learning Rate: 0.0000098664 +2025-02-25 09:12:49,798 Train Loss: 0.0005361, Val Loss: 0.0006920 +2025-02-25 09:12:49,798 Epoch 197/2000 +2025-02-25 09:17:54,664 Current Learning Rate: 0.0000055506 +2025-02-25 09:17:54,725 Train Loss: 0.0005360, Val Loss: 0.0006919 +2025-02-25 09:17:54,726 Epoch 198/2000 +2025-02-25 09:22:59,659 Current Learning Rate: 0.0000024672 +2025-02-25 09:22:59,719 Train Loss: 0.0005359, Val Loss: 0.0006919 +2025-02-25 09:22:59,719 Epoch 199/2000 +2025-02-25 09:28:04,930 Current Learning Rate: 0.0000006168 +2025-02-25 09:28:04,991 Train Loss: 0.0005358, Val Loss: 0.0006918 +2025-02-25 09:28:04,991 Epoch 200/2000 +2025-02-25 09:33:11,531 Current Learning Rate: 0.0000000000 +2025-02-25 09:33:11,594 Train Loss: 0.0005358, Val Loss: 0.0006918 +2025-02-25 09:33:11,594 Epoch 201/2000 +2025-02-25 09:41:44,975 Current Learning Rate: 0.0000006168 +2025-02-25 09:41:44,975 Train Loss: 0.0005357, Val Loss: 0.0006918 +2025-02-25 09:41:44,976 Epoch 202/2000 +2025-02-25 09:48:01,168 Current Learning Rate: 0.0000024672 +2025-02-25 09:48:01,238 Train Loss: 0.0005357, Val Loss: 0.0006917 +2025-02-25 09:48:01,238 Epoch 203/2000 +2025-02-25 09:53:29,042 Current Learning Rate: 0.0000055506 +2025-02-25 09:53:29,043 Train Loss: 0.0005358, Val Loss: 0.0006918 +2025-02-25 09:53:29,043 Epoch 204/2000 +2025-02-25 10:02:47,841 Current Learning Rate: 0.0000098664 +2025-02-25 10:02:47,842 Train Loss: 0.0005359, Val Loss: 0.0006918 +2025-02-25 10:02:47,842 Epoch 205/2000 +2025-02-25 10:12:02,839 Current Learning Rate: 0.0000154133 +2025-02-25 10:12:02,839 Train Loss: 0.0005359, Val Loss: 0.0006919 +2025-02-25 10:12:02,840 Epoch 206/2000 +2025-02-25 10:22:13,938 Current Learning Rate: 0.0000221902 +2025-02-25 10:22:13,939 Train Loss: 0.0005360, Val Loss: 0.0006920 +2025-02-25 10:22:13,939 Epoch 207/2000 +2025-02-25 10:29:22,721 Current Learning Rate: 0.0000301952 +2025-02-25 10:29:22,721 Train Loss: 0.0005361, Val Loss: 0.0006921 +2025-02-25 10:29:22,721 Epoch 208/2000 +2025-02-25 10:39:26,862 Current Learning Rate: 0.0000394265 +2025-02-25 10:39:26,863 Train Loss: 0.0005362, Val Loss: 0.0006922 +2025-02-25 10:39:26,863 Epoch 209/2000 +2025-02-25 10:49:42,338 Current Learning Rate: 0.0000498817 +2025-02-25 10:49:42,338 Train Loss: 0.0005363, Val Loss: 0.0006923 +2025-02-25 10:49:42,339 Epoch 210/2000 +2025-02-25 11:00:03,401 Current Learning Rate: 0.0000615583 +2025-02-25 11:00:03,401 Train Loss: 0.0005365, Val Loss: 0.0006925 +2025-02-25 11:00:03,401 Epoch 211/2000 +2025-02-25 11:10:19,520 Current Learning Rate: 0.0000744534 +2025-02-25 11:10:19,521 Train Loss: 0.0005366, Val Loss: 0.0006929 +2025-02-25 11:10:19,521 Epoch 212/2000 +2025-02-25 11:20:35,681 Current Learning Rate: 0.0000885637 +2025-02-25 11:20:35,681 Train Loss: 0.0005367, Val Loss: 0.0006932 +2025-02-25 11:20:35,682 Epoch 213/2000 +2025-02-25 11:30:49,725 Current Learning Rate: 0.0001038859 +2025-02-25 11:30:49,726 Train Loss: 0.0005368, Val Loss: 0.0006933 +2025-02-25 11:30:49,726 Epoch 214/2000 +2025-02-25 11:40:55,549 Current Learning Rate: 0.0001204162 +2025-02-25 11:40:55,550 Train Loss: 0.0005369, Val Loss: 0.0006932 +2025-02-25 11:40:55,550 Epoch 215/2000 +2025-02-25 11:50:59,950 Current Learning Rate: 0.0001381504 +2025-02-25 11:50:59,951 Train Loss: 0.0005371, Val Loss: 0.0006930 +2025-02-25 11:50:59,951 Epoch 216/2000 +2025-02-25 12:00:57,045 Current Learning Rate: 0.0001570842 +2025-02-25 12:00:57,045 Train Loss: 0.0005372, Val Loss: 0.0006930 +2025-02-25 12:00:57,045 Epoch 217/2000 +2025-02-25 12:11:00,822 Current Learning Rate: 0.0001772129 +2025-02-25 12:11:00,823 Train Loss: 0.0005373, Val Loss: 0.0006932 +2025-02-25 12:11:00,823 Epoch 218/2000 +2025-02-25 12:21:13,979 Current Learning Rate: 0.0001985316 +2025-02-25 12:21:13,979 Train Loss: 0.0005374, Val Loss: 0.0006933 +2025-02-25 12:21:13,980 Epoch 219/2000 +2025-02-25 12:31:04,962 Current Learning Rate: 0.0002210349 +2025-02-25 12:31:04,962 Train Loss: 0.0005375, Val Loss: 0.0006933 +2025-02-25 12:31:04,962 Epoch 220/2000 +2025-02-25 12:41:04,361 Current Learning Rate: 0.0002447174 +2025-02-25 12:41:04,361 Train Loss: 0.0005376, Val Loss: 0.0006932 +2025-02-25 12:41:04,362 Epoch 221/2000 +2025-02-25 12:51:14,675 Current Learning Rate: 0.0002695732 +2025-02-25 12:51:14,675 Train Loss: 0.0005377, Val Loss: 0.0006932 +2025-02-25 12:51:14,675 Epoch 222/2000 +2025-02-25 13:01:11,064 Current Learning Rate: 0.0002955962 +2025-02-25 13:01:11,065 Train Loss: 0.0005378, Val Loss: 0.0006933 +2025-02-25 13:01:11,065 Epoch 223/2000 +2025-02-25 13:11:02,817 Current Learning Rate: 0.0003227798 +2025-02-25 13:11:02,817 Train Loss: 0.0005379, Val Loss: 0.0006934 +2025-02-25 13:11:02,818 Epoch 224/2000 +2025-02-25 13:21:10,493 Current Learning Rate: 0.0003511176 +2025-02-25 13:21:10,493 Train Loss: 0.0005379, Val Loss: 0.0006933 +2025-02-25 13:21:10,494 Epoch 225/2000 +2025-02-25 13:31:15,852 Current Learning Rate: 0.0003806023 +2025-02-25 13:31:15,853 Train Loss: 0.0005380, Val Loss: 0.0006933 +2025-02-25 13:31:15,853 Epoch 226/2000 +2025-02-25 13:41:24,109 Current Learning Rate: 0.0004112269 +2025-02-25 13:41:24,110 Train Loss: 0.0005381, Val Loss: 0.0006933 +2025-02-25 13:41:24,110 Epoch 227/2000 +2025-02-25 13:51:31,384 Current Learning Rate: 0.0004429836 +2025-02-25 13:51:31,384 Train Loss: 0.0005382, Val Loss: 0.0006935 +2025-02-25 13:51:31,385 Epoch 228/2000 +2025-02-25 14:01:17,015 Current Learning Rate: 0.0004758647 +2025-02-25 14:01:17,015 Train Loss: 0.0005382, Val Loss: 0.0006936 +2025-02-25 14:01:17,015 Epoch 229/2000 +2025-02-25 14:11:18,696 Current Learning Rate: 0.0005098621 +2025-02-25 14:11:18,696 Train Loss: 0.0005383, Val Loss: 0.0006938 +2025-02-25 14:11:18,696 Epoch 230/2000 +2025-02-25 14:21:07,584 Current Learning Rate: 0.0005449674 +2025-02-25 14:21:07,584 Train Loss: 0.0005384, Val Loss: 0.0006941 +2025-02-25 14:21:07,584 Epoch 231/2000 +2025-02-25 14:31:01,121 Current Learning Rate: 0.0005811718 +2025-02-25 14:31:01,121 Train Loss: 0.0005384, Val Loss: 0.0006943 +2025-02-25 14:31:01,122 Epoch 232/2000 +2025-02-25 14:40:57,577 Current Learning Rate: 0.0006184666 +2025-02-25 14:40:57,577 Train Loss: 0.0005385, Val Loss: 0.0006945 +2025-02-25 14:40:57,578 Epoch 233/2000 +2025-02-25 14:50:57,873 Current Learning Rate: 0.0006568424 +2025-02-25 14:50:57,874 Train Loss: 0.0005386, Val Loss: 0.0006946 +2025-02-25 14:50:57,874 Epoch 234/2000 +2025-02-25 15:00:59,902 Current Learning Rate: 0.0006962899 +2025-02-25 15:00:59,903 Train Loss: 0.0005386, Val Loss: 0.0006947 +2025-02-25 15:00:59,903 Epoch 235/2000 +2025-02-25 15:11:01,763 Current Learning Rate: 0.0007367992 +2025-02-25 15:11:01,764 Train Loss: 0.0005386, Val Loss: 0.0006946 +2025-02-25 15:11:01,764 Epoch 236/2000 +2025-02-25 15:21:00,561 Current Learning Rate: 0.0007783604 +2025-02-25 15:21:00,562 Train Loss: 0.0005387, Val Loss: 0.0006945 +2025-02-25 15:21:00,562 Epoch 237/2000 +2025-02-25 15:30:59,138 Current Learning Rate: 0.0008209632 +2025-02-25 15:30:59,139 Train Loss: 0.0005387, Val Loss: 0.0006944 +2025-02-25 15:30:59,139 Epoch 238/2000 +2025-02-25 15:40:36,090 Current Learning Rate: 0.0008645971 +2025-02-25 15:40:36,091 Train Loss: 0.0005388, Val Loss: 0.0006944 +2025-02-25 15:40:36,091 Epoch 239/2000 +2025-02-25 15:50:36,246 Current Learning Rate: 0.0009092514 +2025-02-25 15:50:36,246 Train Loss: 0.0005388, Val Loss: 0.0006946 +2025-02-25 15:50:36,246 Epoch 240/2000 +2025-02-25 16:00:08,769 Current Learning Rate: 0.0009549150 +2025-02-25 16:00:08,771 Train Loss: 0.0005388, Val Loss: 0.0006948 +2025-02-25 16:00:08,772 Epoch 241/2000 +2025-02-25 16:10:16,178 Current Learning Rate: 0.0010015767 +2025-02-25 16:10:16,179 Train Loss: 0.0005388, Val Loss: 0.0006949 +2025-02-25 16:10:16,179 Epoch 242/2000 +2025-02-25 16:20:28,495 Current Learning Rate: 0.0010492249 +2025-02-25 16:20:28,495 Train Loss: 0.0005388, Val Loss: 0.0006949 +2025-02-25 16:20:28,495 Epoch 243/2000 +2025-02-25 16:30:33,644 Current Learning Rate: 0.0010978480 +2025-02-25 16:30:33,645 Train Loss: 0.0005389, Val Loss: 0.0006948 +2025-02-25 16:30:33,645 Epoch 244/2000 +2025-02-25 16:40:29,615 Current Learning Rate: 0.0011474338 +2025-02-25 16:40:29,615 Train Loss: 0.0005389, Val Loss: 0.0006950 +2025-02-25 16:40:29,616 Epoch 245/2000 +2025-02-25 16:50:28,551 Current Learning Rate: 0.0011979702 +2025-02-25 16:50:28,552 Train Loss: 0.0005389, Val Loss: 0.0006947 +2025-02-25 16:50:28,552 Epoch 246/2000 +2025-02-25 17:00:07,080 Current Learning Rate: 0.0012494447 +2025-02-25 17:00:07,081 Train Loss: 0.0005389, Val Loss: 0.0006943 +2025-02-25 17:00:07,081 Epoch 247/2000 +2025-02-25 17:10:17,143 Current Learning Rate: 0.0013018445 +2025-02-25 17:10:17,144 Train Loss: 0.0005389, Val Loss: 0.0006939 +2025-02-25 17:10:17,144 Epoch 248/2000 +2025-02-25 17:20:21,425 Current Learning Rate: 0.0013551569 +2025-02-25 17:20:21,426 Train Loss: 0.0005389, Val Loss: 0.0006936 +2025-02-25 17:20:21,426 Epoch 249/2000 +2025-02-25 17:30:26,224 Current Learning Rate: 0.0014093685 +2025-02-25 17:30:26,224 Train Loss: 0.0005389, Val Loss: 0.0006934 +2025-02-25 17:30:26,224 Epoch 250/2000 +2025-02-25 17:40:17,853 Current Learning Rate: 0.0014644661 +2025-02-25 17:40:17,854 Train Loss: 0.0005389, Val Loss: 0.0006933 +2025-02-25 17:40:17,854 Epoch 251/2000 +2025-02-25 17:50:17,042 Current Learning Rate: 0.0015204360 +2025-02-25 17:50:17,043 Train Loss: 0.0005389, Val Loss: 0.0006933 +2025-02-25 17:50:17,044 Epoch 252/2000 +2025-02-25 18:00:11,649 Current Learning Rate: 0.0015772645 +2025-02-25 18:00:11,650 Train Loss: 0.0005389, Val Loss: 0.0006937 +2025-02-25 18:00:11,650 Epoch 253/2000 +2025-02-25 18:10:11,082 Current Learning Rate: 0.0016349374 +2025-02-25 18:10:11,083 Train Loss: 0.0005390, Val Loss: 0.0006940 +2025-02-25 18:10:11,083 Epoch 254/2000 +2025-02-25 18:19:18,649 Current Learning Rate: 0.0016934407 +2025-02-25 18:19:18,650 Train Loss: 0.0005390, Val Loss: 0.0006942 +2025-02-25 18:19:18,652 Epoch 255/2000 +2025-02-25 18:29:04,936 Current Learning Rate: 0.0017527598 +2025-02-25 18:29:04,936 Train Loss: 0.0005391, Val Loss: 0.0006944 +2025-02-25 18:29:04,937 Epoch 256/2000 +2025-02-25 18:38:21,031 Current Learning Rate: 0.0018128801 +2025-02-25 18:38:21,032 Train Loss: 0.0005391, Val Loss: 0.0006941 +2025-02-25 18:38:21,032 Epoch 257/2000 +2025-02-25 18:47:23,982 Current Learning Rate: 0.0018737867 +2025-02-25 18:47:23,982 Train Loss: 0.0005393, Val Loss: 0.0006943 +2025-02-25 18:47:23,983 Epoch 258/2000 +2025-02-25 18:57:19,923 Current Learning Rate: 0.0019354647 +2025-02-25 18:57:19,924 Train Loss: 0.0005395, Val Loss: 0.0006947 +2025-02-25 18:57:19,924 Epoch 259/2000 +2025-02-25 19:06:40,541 Current Learning Rate: 0.0019978989 +2025-02-25 19:06:40,541 Train Loss: 0.0005396, Val Loss: 0.0006942 +2025-02-25 19:06:40,542 Epoch 260/2000 +2025-02-25 19:15:54,803 Current Learning Rate: 0.0020610737 +2025-02-25 19:15:54,804 Train Loss: 0.0005399, Val Loss: 0.0006941 +2025-02-25 19:15:54,805 Epoch 261/2000 +2025-02-25 19:25:07,058 Current Learning Rate: 0.0021249737 +2025-02-25 19:25:07,059 Train Loss: 0.0005401, Val Loss: 0.0006935 +2025-02-25 19:25:07,059 Epoch 262/2000 +2025-02-25 19:34:06,826 Current Learning Rate: 0.0021895831 +2025-02-25 19:34:06,827 Train Loss: 0.0005400, Val Loss: 0.0006935 +2025-02-25 19:34:06,827 Epoch 263/2000 +2025-02-25 19:43:32,418 Current Learning Rate: 0.0022548859 +2025-02-25 19:43:32,418 Train Loss: 0.0005400, Val Loss: 0.0006937 +2025-02-25 19:43:32,419 Epoch 264/2000 +2025-02-25 19:53:20,024 Current Learning Rate: 0.0023208660 +2025-02-25 19:53:20,025 Train Loss: 0.0005399, Val Loss: 0.0006943 +2025-02-25 19:53:20,025 Epoch 265/2000 +2025-02-25 20:03:04,058 Current Learning Rate: 0.0023875072 +2025-02-25 20:03:04,060 Train Loss: 0.0005398, Val Loss: 0.0006949 +2025-02-25 20:03:04,060 Epoch 266/2000 +2025-02-25 20:13:02,516 Current Learning Rate: 0.0024547929 +2025-02-25 20:13:02,517 Train Loss: 0.0005396, Val Loss: 0.0006955 +2025-02-25 20:13:02,517 Epoch 267/2000 +2025-02-25 20:22:40,988 Current Learning Rate: 0.0025227067 +2025-02-25 20:22:40,988 Train Loss: 0.0005396, Val Loss: 0.0006957 +2025-02-25 20:22:40,988 Epoch 268/2000 +2025-02-25 20:32:36,122 Current Learning Rate: 0.0025912316 +2025-02-25 20:32:36,123 Train Loss: 0.0005395, Val Loss: 0.0006966 +2025-02-25 20:32:36,123 Epoch 269/2000 +2025-02-25 20:41:59,104 Current Learning Rate: 0.0026603509 +2025-02-25 20:41:59,104 Train Loss: 0.0005395, Val Loss: 0.0006959 +2025-02-25 20:41:59,105 Epoch 270/2000 +2025-02-25 20:51:56,159 Current Learning Rate: 0.0027300475 +2025-02-25 20:51:56,160 Train Loss: 0.0005396, Val Loss: 0.0006952 +2025-02-25 20:51:56,160 Epoch 271/2000 +2025-02-25 21:02:05,225 Current Learning Rate: 0.0028003042 +2025-02-25 21:02:05,225 Train Loss: 0.0005397, Val Loss: 0.0006948 +2025-02-25 21:02:05,225 Epoch 272/2000 +2025-02-25 21:12:04,874 Current Learning Rate: 0.0028711035 +2025-02-25 21:12:04,874 Train Loss: 0.0005401, Val Loss: 0.0006960 +2025-02-25 21:12:04,874 Epoch 273/2000 +2025-02-25 21:22:11,870 Current Learning Rate: 0.0029424282 +2025-02-25 21:22:11,870 Train Loss: 0.0005425, Val Loss: 0.0006937 +2025-02-25 21:22:11,870 Epoch 274/2000 +2025-02-25 21:32:22,662 Current Learning Rate: 0.0030142605 +2025-02-25 21:32:22,662 Train Loss: 0.0005435, Val Loss: 0.0006939 +2025-02-25 21:32:22,662 Epoch 275/2000 +2025-02-25 21:42:19,738 Current Learning Rate: 0.0030865828 +2025-02-25 21:42:19,739 Train Loss: 0.0005436, Val Loss: 0.0006944 +2025-02-25 21:42:19,739 Epoch 276/2000 +2025-02-25 21:51:42,565 Current Learning Rate: 0.0031593772 +2025-02-25 21:51:42,565 Train Loss: 0.0005430, Val Loss: 0.0006936 +2025-02-25 21:51:42,566 Epoch 277/2000 +2025-02-25 22:00:58,262 Current Learning Rate: 0.0032326258 +2025-02-25 22:00:58,263 Train Loss: 0.0005431, Val Loss: 0.0006950 +2025-02-25 22:00:58,263 Epoch 278/2000 +2025-02-25 22:10:09,066 Current Learning Rate: 0.0033063104 +2025-02-25 22:10:09,066 Train Loss: 0.0005438, Val Loss: 0.0006954 +2025-02-25 22:10:09,066 Epoch 279/2000 +2025-02-25 22:20:04,938 Current Learning Rate: 0.0033804129 +2025-02-25 22:20:04,938 Train Loss: 0.0005427, Val Loss: 0.0006947 +2025-02-25 22:20:04,938 Epoch 280/2000 +2025-02-25 22:30:00,236 Current Learning Rate: 0.0034549150 +2025-02-25 22:30:00,236 Train Loss: 0.0005404, Val Loss: 0.0006932 +2025-02-25 22:30:00,237 Epoch 281/2000 +2025-02-25 22:39:38,000 Current Learning Rate: 0.0035297984 +2025-02-25 22:39:38,000 Train Loss: 0.0005399, Val Loss: 0.0006951 +2025-02-25 22:39:38,000 Epoch 282/2000 +2025-02-25 22:49:12,064 Current Learning Rate: 0.0036050445 +2025-02-25 22:49:12,065 Train Loss: 0.0005402, Val Loss: 0.0007001 +2025-02-25 22:49:12,070 Epoch 283/2000 +2025-02-25 22:58:27,821 Current Learning Rate: 0.0036806348 +2025-02-25 22:58:27,822 Train Loss: 0.0005415, Val Loss: 0.0006988 +2025-02-25 22:58:27,822 Epoch 284/2000 +2025-02-25 23:07:57,702 Current Learning Rate: 0.0037565506 +2025-02-25 23:07:57,702 Train Loss: 0.0005433, Val Loss: 0.0007030 +2025-02-25 23:07:57,703 Epoch 285/2000 +2025-02-25 23:17:11,400 Current Learning Rate: 0.0038327732 +2025-02-25 23:17:11,401 Train Loss: 0.0005445, Val Loss: 0.0007052 +2025-02-25 23:17:11,401 Epoch 286/2000 +2025-02-25 23:26:25,394 Current Learning Rate: 0.0039092838 +2025-02-25 23:26:25,395 Train Loss: 0.0005450, Val Loss: 0.0006999 +2025-02-25 23:26:25,395 Epoch 287/2000 +2025-02-25 23:35:43,531 Current Learning Rate: 0.0039860635 +2025-02-25 23:35:43,532 Train Loss: 0.0005440, Val Loss: 0.0006988 +2025-02-25 23:35:43,533 Epoch 288/2000 +2025-02-25 23:45:03,682 Current Learning Rate: 0.0040630934 +2025-02-25 23:45:03,683 Train Loss: 0.0005431, Val Loss: 0.0006985 +2025-02-25 23:45:03,683 Epoch 289/2000 +2025-02-25 23:54:26,668 Current Learning Rate: 0.0041403545 +2025-02-25 23:54:26,669 Train Loss: 0.0005436, Val Loss: 0.0006997 +2025-02-25 23:54:26,669 Epoch 290/2000 +2025-02-26 00:03:44,004 Current Learning Rate: 0.0042178277 +2025-02-26 00:03:44,004 Train Loss: 0.0005454, Val Loss: 0.0007032 +2025-02-26 00:03:44,004 Epoch 291/2000 +2025-02-26 00:13:22,444 Current Learning Rate: 0.0042954938 +2025-02-26 00:13:22,445 Train Loss: 0.0005477, Val Loss: 0.0007006 +2025-02-26 00:13:22,445 Epoch 292/2000 +2025-02-26 00:22:56,714 Current Learning Rate: 0.0043733338 +2025-02-26 00:22:56,715 Train Loss: 0.0005466, Val Loss: 0.0007033 +2025-02-26 00:22:56,715 Epoch 293/2000 +2025-02-26 00:32:23,455 Current Learning Rate: 0.0044513284 +2025-02-26 00:32:23,456 Train Loss: 0.0005430, Val Loss: 0.0007012 +2025-02-26 00:32:23,456 Epoch 294/2000 +2025-02-26 00:41:54,969 Current Learning Rate: 0.0045294584 +2025-02-26 00:41:54,970 Train Loss: 0.0005414, Val Loss: 0.0007025 +2025-02-26 00:41:54,971 Epoch 295/2000 +2025-02-26 00:51:24,795 Current Learning Rate: 0.0046077045 +2025-02-26 00:51:24,796 Train Loss: 0.0005422, Val Loss: 0.0007055 +2025-02-26 00:51:24,796 Epoch 296/2000 +2025-02-26 01:00:40,544 Current Learning Rate: 0.0046860474 +2025-02-26 01:00:40,544 Train Loss: 0.0005438, Val Loss: 0.0007003 +2025-02-26 01:00:40,545 Epoch 297/2000 +2025-02-26 01:10:02,252 Current Learning Rate: 0.0047644677 +2025-02-26 01:10:02,253 Train Loss: 0.0005464, Val Loss: 0.0007090 +2025-02-26 01:10:02,253 Epoch 298/2000 +2025-02-26 01:19:32,114 Current Learning Rate: 0.0048429462 +2025-02-26 01:19:32,115 Train Loss: 0.0005495, Val Loss: 0.0007086 +2025-02-26 01:19:32,115 Epoch 299/2000 +2025-02-26 01:29:02,451 Current Learning Rate: 0.0049214634 +2025-02-26 01:29:02,452 Train Loss: 0.0005497, Val Loss: 0.0007147 +2025-02-26 01:29:02,452 Epoch 300/2000 +2025-02-26 01:38:18,512 Current Learning Rate: 0.0050000000 +2025-02-26 01:38:18,513 Train Loss: 0.0005449, Val Loss: 0.0007045 +2025-02-26 01:38:18,513 Epoch 301/2000 +2025-02-26 01:47:43,768 Current Learning Rate: 0.0050785366 +2025-02-26 01:47:43,768 Train Loss: 0.0005425, Val Loss: 0.0007122 +2025-02-26 01:47:43,769 Epoch 302/2000 +2025-02-26 01:57:00,422 Current Learning Rate: 0.0051570538 +2025-02-26 01:57:00,422 Train Loss: 0.0005440, Val Loss: 0.0007197 +2025-02-26 01:57:00,422 Epoch 303/2000 +2025-02-26 02:06:29,002 Current Learning Rate: 0.0052355323 +2025-02-26 02:06:29,003 Train Loss: 0.0005465, Val Loss: 0.0007093 +2025-02-26 02:06:29,003 Epoch 304/2000 +2025-02-26 02:15:47,798 Current Learning Rate: 0.0053139526 +2025-02-26 02:15:47,798 Train Loss: 0.0005486, Val Loss: 0.0007087 +2025-02-26 02:15:47,799 Epoch 305/2000 +2025-02-26 02:24:58,676 Current Learning Rate: 0.0053922955 +2025-02-26 02:24:58,676 Train Loss: 0.0005508, Val Loss: 0.0007035 +2025-02-26 02:24:58,677 Epoch 306/2000 +2025-02-26 02:34:09,916 Current Learning Rate: 0.0054705416 +2025-02-26 02:34:09,917 Train Loss: 0.0005553, Val Loss: 0.0007202 +2025-02-26 02:34:09,917 Epoch 307/2000 +2025-02-26 02:43:25,842 Current Learning Rate: 0.0055486716 +2025-02-26 02:43:25,843 Train Loss: 0.0005521, Val Loss: 0.0007076 +2025-02-26 02:43:25,843 Epoch 308/2000 +2025-02-26 02:48:35,438 Current Learning Rate: 0.0056266662 +2025-02-26 02:48:35,438 Train Loss: 0.0005452, Val Loss: 0.0007047 +2025-02-26 02:48:35,439 Epoch 309/2000 +2025-02-26 02:53:43,305 Current Learning Rate: 0.0057045062 +2025-02-26 02:53:43,306 Train Loss: 0.0005436, Val Loss: 0.0007041 +2025-02-26 02:53:43,306 Epoch 310/2000 +2025-02-26 02:58:52,781 Current Learning Rate: 0.0057821723 +2025-02-26 02:58:52,782 Train Loss: 0.0005464, Val Loss: 0.0007041 +2025-02-26 02:58:52,782 Epoch 311/2000 +2025-02-26 03:04:04,104 Current Learning Rate: 0.0058596455 +2025-02-26 03:04:04,105 Train Loss: 0.0005492, Val Loss: 0.0007061 +2025-02-26 03:04:04,105 Epoch 312/2000 +2025-02-26 03:09:10,523 Current Learning Rate: 0.0059369066 +2025-02-26 03:09:10,523 Train Loss: 0.0005508, Val Loss: 0.0007060 +2025-02-26 03:09:10,523 Epoch 313/2000 +2025-02-26 03:14:18,616 Current Learning Rate: 0.0060139365 +2025-02-26 03:14:18,617 Train Loss: 0.0005518, Val Loss: 0.0007084 +2025-02-26 03:14:18,617 Epoch 314/2000 +2025-02-26 03:19:29,456 Current Learning Rate: 0.0060907162 +2025-02-26 03:19:29,456 Train Loss: 0.0005561, Val Loss: 0.0007242 +2025-02-26 03:19:29,457 Epoch 315/2000 +2025-02-26 03:24:38,545 Current Learning Rate: 0.0061672268 +2025-02-26 03:24:38,546 Train Loss: 0.0005587, Val Loss: 0.0007057 +2025-02-26 03:24:38,546 Epoch 316/2000 +2025-02-26 03:29:47,145 Current Learning Rate: 0.0062434494 +2025-02-26 03:29:47,146 Train Loss: 0.0005544, Val Loss: 0.0006981 +2025-02-26 03:29:47,146 Epoch 317/2000 +2025-02-26 03:34:55,284 Current Learning Rate: 0.0063193652 +2025-02-26 03:34:55,285 Train Loss: 0.0005453, Val Loss: 0.0007050 +2025-02-26 03:34:55,285 Epoch 318/2000 +2025-02-26 03:40:03,622 Current Learning Rate: 0.0063949555 +2025-02-26 03:40:03,623 Train Loss: 0.0005432, Val Loss: 0.0007009 +2025-02-26 03:40:03,623 Epoch 319/2000 +2025-02-26 03:45:11,781 Current Learning Rate: 0.0064702016 +2025-02-26 03:45:11,782 Train Loss: 0.0005465, Val Loss: 0.0007066 +2025-02-26 03:45:11,782 Epoch 320/2000 +2025-02-26 03:50:21,340 Current Learning Rate: 0.0065450850 +2025-02-26 03:50:21,340 Train Loss: 0.0005504, Val Loss: 0.0007076 +2025-02-26 03:50:21,341 Epoch 321/2000 +2025-02-26 03:55:29,260 Current Learning Rate: 0.0066195871 +2025-02-26 03:55:29,260 Train Loss: 0.0005530, Val Loss: 0.0007129 +2025-02-26 03:55:29,261 Epoch 322/2000 +2025-02-26 04:00:37,465 Current Learning Rate: 0.0066936896 +2025-02-26 04:00:37,465 Train Loss: 0.0005517, Val Loss: 0.0007049 +2025-02-26 04:00:37,465 Epoch 323/2000 +2025-02-26 04:05:45,203 Current Learning Rate: 0.0067673742 +2025-02-26 04:05:45,205 Train Loss: 0.0005508, Val Loss: 0.0007066 +2025-02-26 04:05:45,208 Epoch 324/2000 +2025-02-26 04:10:54,287 Current Learning Rate: 0.0068406228 +2025-02-26 04:10:54,288 Train Loss: 0.0005498, Val Loss: 0.0007052 +2025-02-26 04:10:54,288 Epoch 325/2000 +2025-02-26 04:16:01,901 Current Learning Rate: 0.0069134172 +2025-02-26 04:16:01,902 Train Loss: 0.0005467, Val Loss: 0.0007089 +2025-02-26 04:16:01,902 Epoch 326/2000 +2025-02-26 04:21:13,088 Current Learning Rate: 0.0069857395 +2025-02-26 04:21:13,089 Train Loss: 0.0005470, Val Loss: 0.0007300 +2025-02-26 04:21:13,089 Epoch 327/2000 +2025-02-26 04:26:22,263 Current Learning Rate: 0.0070575718 +2025-02-26 04:26:22,264 Train Loss: 0.0005533, Val Loss: 0.0007129 +2025-02-26 04:26:22,265 Epoch 328/2000 +2025-02-26 04:31:30,125 Current Learning Rate: 0.0071288965 +2025-02-26 04:31:30,125 Train Loss: 0.0005576, Val Loss: 0.0007110 +2025-02-26 04:31:30,126 Epoch 329/2000 +2025-02-26 04:36:39,912 Current Learning Rate: 0.0071996958 +2025-02-26 04:36:39,913 Train Loss: 0.0005579, Val Loss: 0.0007481 +2025-02-26 04:36:39,913 Epoch 330/2000 +2025-02-26 04:41:48,270 Current Learning Rate: 0.0072699525 +2025-02-26 04:41:48,271 Train Loss: 0.0005511, Val Loss: 0.0007299 +2025-02-26 04:41:48,271 Epoch 331/2000 +2025-02-26 04:46:58,667 Current Learning Rate: 0.0073396491 +2025-02-26 04:46:58,668 Train Loss: 0.0005421, Val Loss: 0.0007091 +2025-02-26 04:46:58,668 Epoch 332/2000 +2025-02-26 04:52:06,939 Current Learning Rate: 0.0074087684 +2025-02-26 04:52:06,940 Train Loss: 0.0005421, Val Loss: 0.0007054 +2025-02-26 04:52:06,940 Epoch 333/2000 +2025-02-26 04:57:16,120 Current Learning Rate: 0.0074772933 +2025-02-26 04:57:16,121 Train Loss: 0.0005480, Val Loss: 0.0006991 +2025-02-26 04:57:16,121 Epoch 334/2000 +2025-02-26 05:02:25,235 Current Learning Rate: 0.0075452071 +2025-02-26 05:02:25,236 Train Loss: 0.0005498, Val Loss: 0.0007087 +2025-02-26 05:02:25,236 Epoch 335/2000 +2025-02-26 05:07:34,035 Current Learning Rate: 0.0076124928 +2025-02-26 05:07:34,035 Train Loss: 0.0005482, Val Loss: 0.0007033 +2025-02-26 05:07:34,035 Epoch 336/2000 +2025-02-26 05:12:41,177 Current Learning Rate: 0.0076791340 +2025-02-26 05:12:41,177 Train Loss: 0.0005510, Val Loss: 0.0007072 +2025-02-26 05:12:41,177 Epoch 337/2000 +2025-02-26 05:17:50,336 Current Learning Rate: 0.0077451141 +2025-02-26 05:17:50,337 Train Loss: 0.0005554, Val Loss: 0.0007172 +2025-02-26 05:17:50,337 Epoch 338/2000 +2025-02-26 05:22:58,867 Current Learning Rate: 0.0078104169 +2025-02-26 05:22:58,867 Train Loss: 0.0005547, Val Loss: 0.0007031 +2025-02-26 05:22:58,867 Epoch 339/2000 +2025-02-26 05:28:07,803 Current Learning Rate: 0.0078750263 +2025-02-26 05:28:07,804 Train Loss: 0.0005554, Val Loss: 0.0006968 +2025-02-26 05:28:07,804 Epoch 340/2000 +2025-02-26 05:33:17,719 Current Learning Rate: 0.0079389263 +2025-02-26 05:33:17,720 Train Loss: 0.0005531, Val Loss: 0.0007022 +2025-02-26 05:33:17,720 Epoch 341/2000 +2025-02-26 05:38:26,791 Current Learning Rate: 0.0080021011 +2025-02-26 05:38:26,791 Train Loss: 0.0005494, Val Loss: 0.0006981 +2025-02-26 05:38:26,792 Epoch 342/2000 +2025-02-26 05:43:37,414 Current Learning Rate: 0.0080645353 +2025-02-26 05:43:37,415 Train Loss: 0.0005458, Val Loss: 0.0006945 +2025-02-26 05:43:37,415 Epoch 343/2000 +2025-02-26 05:48:44,499 Current Learning Rate: 0.0081262133 +2025-02-26 05:48:44,500 Train Loss: 0.0005450, Val Loss: 0.0006969 +2025-02-26 05:48:44,500 Epoch 344/2000 +2025-02-26 05:53:54,030 Current Learning Rate: 0.0081871199 +2025-02-26 05:53:54,030 Train Loss: 0.0005476, Val Loss: 0.0006984 +2025-02-26 05:53:54,030 Epoch 345/2000 +2025-02-26 05:59:03,086 Current Learning Rate: 0.0082472402 +2025-02-26 05:59:03,086 Train Loss: 0.0005505, Val Loss: 0.0006989 +2025-02-26 05:59:03,087 Epoch 346/2000 +2025-02-26 06:04:12,605 Current Learning Rate: 0.0083065593 +2025-02-26 06:04:12,606 Train Loss: 0.0005499, Val Loss: 0.0006956 +2025-02-26 06:04:12,606 Epoch 347/2000 +2025-02-26 06:09:21,547 Current Learning Rate: 0.0083650626 +2025-02-26 06:09:21,547 Train Loss: 0.0005520, Val Loss: 0.0007259 +2025-02-26 06:09:21,548 Epoch 348/2000 +2025-02-26 06:14:29,183 Current Learning Rate: 0.0084227355 +2025-02-26 06:14:29,184 Train Loss: 0.0005537, Val Loss: 0.0007024 +2025-02-26 06:14:29,184 Epoch 349/2000 +2025-02-26 06:19:36,250 Current Learning Rate: 0.0084795640 +2025-02-26 06:19:36,251 Train Loss: 0.0005500, Val Loss: 0.0007016 +2025-02-26 06:19:36,252 Epoch 350/2000 +2025-02-26 06:24:44,302 Current Learning Rate: 0.0085355339 +2025-02-26 06:24:44,369 Train Loss: 0.0005416, Val Loss: 0.0006855 +2025-02-26 06:24:44,370 Epoch 351/2000 +2025-02-26 06:29:52,756 Current Learning Rate: 0.0085906315 +2025-02-26 06:29:52,757 Train Loss: 0.0005357, Val Loss: 0.0007150 +2025-02-26 06:29:52,757 Epoch 352/2000 +2025-02-26 06:35:01,891 Current Learning Rate: 0.0086448431 +2025-02-26 06:35:01,891 Train Loss: 0.0005378, Val Loss: 0.0006922 +2025-02-26 06:35:01,891 Epoch 353/2000 +2025-02-26 06:40:11,482 Current Learning Rate: 0.0086981555 +2025-02-26 06:40:11,483 Train Loss: 0.0005426, Val Loss: 0.0006933 +2025-02-26 06:40:11,483 Epoch 354/2000 +2025-02-26 06:45:21,755 Current Learning Rate: 0.0087505553 +2025-02-26 06:45:21,756 Train Loss: 0.0005408, Val Loss: 0.0006922 +2025-02-26 06:45:21,756 Epoch 355/2000 +2025-02-26 06:50:31,485 Current Learning Rate: 0.0088020298 +2025-02-26 06:50:31,486 Train Loss: 0.0005460, Val Loss: 0.0007217 +2025-02-26 06:50:31,486 Epoch 356/2000 +2025-02-26 06:55:40,651 Current Learning Rate: 0.0088525662 +2025-02-26 06:55:40,652 Train Loss: 0.0005484, Val Loss: 0.0007136 +2025-02-26 06:55:40,652 Epoch 357/2000 +2025-02-26 07:00:50,738 Current Learning Rate: 0.0089021520 +2025-02-26 07:00:50,739 Train Loss: 0.0005501, Val Loss: 0.0006890 +2025-02-26 07:00:50,739 Epoch 358/2000 +2025-02-26 07:06:01,927 Current Learning Rate: 0.0089507751 +2025-02-26 07:06:01,927 Train Loss: 0.0007577, Val Loss: 0.0007122 +2025-02-26 07:06:01,927 Epoch 359/2000 +2025-02-26 07:11:08,953 Current Learning Rate: 0.0089984233 +2025-02-26 07:11:08,953 Train Loss: 0.0005669, Val Loss: 0.0006898 +2025-02-26 07:11:08,954 Epoch 360/2000 +2025-02-26 07:16:18,473 Current Learning Rate: 0.0090450850 +2025-02-26 07:16:18,539 Train Loss: 0.0005387, Val Loss: 0.0006816 +2025-02-26 07:16:18,540 Epoch 361/2000 +2025-02-26 07:21:28,068 Current Learning Rate: 0.0090907486 +2025-02-26 07:21:28,149 Train Loss: 0.0005339, Val Loss: 0.0006780 +2025-02-26 07:21:28,150 Epoch 362/2000 +2025-02-26 07:26:34,721 Current Learning Rate: 0.0091354029 +2025-02-26 07:26:34,721 Train Loss: 0.0005354, Val Loss: 0.0006836 +2025-02-26 07:26:34,722 Epoch 363/2000 +2025-02-26 07:31:44,809 Current Learning Rate: 0.0091790368 +2025-02-26 07:31:44,810 Train Loss: 0.0005404, Val Loss: 0.0006818 +2025-02-26 07:31:44,810 Epoch 364/2000 +2025-02-26 07:36:54,014 Current Learning Rate: 0.0092216396 +2025-02-26 07:36:54,015 Train Loss: 0.0005401, Val Loss: 0.0006884 +2025-02-26 07:36:54,015 Epoch 365/2000 +2025-02-26 07:42:03,702 Current Learning Rate: 0.0092632008 +2025-02-26 07:42:03,702 Train Loss: 0.0005405, Val Loss: 0.0007169 +2025-02-26 07:42:03,703 Epoch 366/2000 +2025-02-26 07:47:12,022 Current Learning Rate: 0.0093037101 +2025-02-26 07:47:12,023 Train Loss: 0.0005425, Val Loss: 0.0006928 +2025-02-26 07:47:12,023 Epoch 367/2000 +2025-02-26 07:52:19,262 Current Learning Rate: 0.0093431576 +2025-02-26 07:52:19,346 Train Loss: 0.0005414, Val Loss: 0.0006775 +2025-02-26 07:52:19,346 Epoch 368/2000 +2025-02-26 07:57:31,000 Current Learning Rate: 0.0093815334 +2025-02-26 07:57:31,000 Train Loss: 0.0005347, Val Loss: 0.0006792 +2025-02-26 07:57:31,000 Epoch 369/2000 +2025-02-26 08:02:40,410 Current Learning Rate: 0.0094188282 +2025-02-26 08:02:40,479 Train Loss: 0.0005247, Val Loss: 0.0006753 +2025-02-26 08:02:40,479 Epoch 370/2000 +2025-02-26 08:07:49,012 Current Learning Rate: 0.0094550326 +2025-02-26 08:07:49,013 Train Loss: 0.0005250, Val Loss: 0.0006755 +2025-02-26 08:07:49,013 Epoch 371/2000 +2025-02-26 08:12:58,923 Current Learning Rate: 0.0094901379 +2025-02-26 08:12:58,924 Train Loss: 0.0006798, Val Loss: 0.0006809 +2025-02-26 08:12:58,924 Epoch 372/2000 +2025-02-26 08:18:07,592 Current Learning Rate: 0.0095241353 +2025-02-26 08:18:07,697 Train Loss: 0.0005292, Val Loss: 0.0006638 +2025-02-26 08:18:07,697 Epoch 373/2000 +2025-02-26 08:23:18,279 Current Learning Rate: 0.0095570164 +2025-02-26 08:23:18,279 Train Loss: 0.0005175, Val Loss: 0.0006728 +2025-02-26 08:23:18,280 Epoch 374/2000 +2025-02-26 08:28:30,003 Current Learning Rate: 0.0095887731 +2025-02-26 08:28:30,003 Train Loss: 0.0005207, Val Loss: 0.0006741 +2025-02-26 08:28:30,004 Epoch 375/2000 +2025-02-26 08:33:38,103 Current Learning Rate: 0.0096193977 +2025-02-26 08:33:38,104 Train Loss: 0.0005241, Val Loss: 0.0006674 +2025-02-26 08:33:38,104 Epoch 376/2000 +2025-02-26 08:38:51,099 Current Learning Rate: 0.0096488824 +2025-02-26 08:38:51,099 Train Loss: 0.0005306, Val Loss: 0.0006886 +2025-02-26 08:38:51,100 Epoch 377/2000 +2025-02-26 08:44:02,458 Current Learning Rate: 0.0096772202 +2025-02-26 08:44:02,458 Train Loss: 0.0005307, Val Loss: 0.0006839 +2025-02-26 08:44:02,459 Epoch 378/2000 +2025-02-26 08:49:11,191 Current Learning Rate: 0.0097044038 +2025-02-26 08:49:11,192 Train Loss: 0.0005312, Val Loss: 0.0006792 +2025-02-26 08:49:11,192 Epoch 379/2000 +2025-02-26 08:54:22,156 Current Learning Rate: 0.0097304268 +2025-02-26 08:54:22,157 Train Loss: 0.0005302, Val Loss: 0.0006694 +2025-02-26 08:54:22,157 Epoch 380/2000 +2025-02-26 08:59:30,669 Current Learning Rate: 0.0097552826 +2025-02-26 08:59:30,727 Train Loss: 0.0005216, Val Loss: 0.0006637 +2025-02-26 08:59:30,728 Epoch 381/2000 +2025-02-26 09:04:40,792 Current Learning Rate: 0.0097789651 +2025-02-26 09:04:40,899 Train Loss: 0.0005130, Val Loss: 0.0006573 +2025-02-26 09:04:40,899 Epoch 382/2000 +2025-02-26 09:09:51,864 Current Learning Rate: 0.0098014684 +2025-02-26 09:09:51,865 Train Loss: 0.0005140, Val Loss: 0.0006762 +2025-02-26 09:09:51,865 Epoch 383/2000 +2025-02-26 09:15:03,849 Current Learning Rate: 0.0098227871 +2025-02-26 09:15:03,850 Train Loss: 0.0005164, Val Loss: 0.0006644 +2025-02-26 09:15:03,850 Epoch 384/2000 +2025-02-26 09:20:10,615 Current Learning Rate: 0.0098429158 +2025-02-26 09:20:10,616 Train Loss: 0.0005997, Val Loss: 0.0006710 +2025-02-26 09:20:10,616 Epoch 385/2000 +2025-02-26 09:25:20,608 Current Learning Rate: 0.0098618496 +2025-02-26 09:25:20,609 Train Loss: 0.0005252, Val Loss: 0.0006625 +2025-02-26 09:25:20,609 Epoch 386/2000 +2025-02-26 09:30:29,562 Current Learning Rate: 0.0098795838 +2025-02-26 09:30:29,619 Train Loss: 0.0005101, Val Loss: 0.0006564 +2025-02-26 09:30:29,619 Epoch 387/2000 +2025-02-26 09:35:36,709 Current Learning Rate: 0.0098961141 +2025-02-26 09:35:36,791 Train Loss: 0.0005029, Val Loss: 0.0006499 +2025-02-26 09:35:36,791 Epoch 388/2000 +2025-02-26 09:40:48,125 Current Learning Rate: 0.0099114363 +2025-02-26 09:40:48,126 Train Loss: 0.0005062, Val Loss: 0.0006690 +2025-02-26 09:40:48,126 Epoch 389/2000 +2025-02-26 09:45:58,316 Current Learning Rate: 0.0099255466 +2025-02-26 09:45:58,317 Train Loss: 0.0005112, Val Loss: 0.0006602 +2025-02-26 09:45:58,317 Epoch 390/2000 +2025-02-26 09:51:09,858 Current Learning Rate: 0.0099384417 +2025-02-26 09:51:09,859 Train Loss: 0.0005134, Val Loss: 0.0006611 +2025-02-26 09:51:09,859 Epoch 391/2000 +2025-02-26 09:56:22,616 Current Learning Rate: 0.0099501183 +2025-02-26 09:56:22,617 Train Loss: 0.0005176, Val Loss: 0.0006759 +2025-02-26 09:56:22,618 Epoch 392/2000 +2025-02-26 10:01:32,535 Current Learning Rate: 0.0099605735 +2025-02-26 10:01:32,536 Train Loss: 0.0005183, Val Loss: 0.0006709 +2025-02-26 10:01:32,539 Epoch 393/2000 +2025-02-26 10:06:41,088 Current Learning Rate: 0.0099698048 +2025-02-26 10:06:41,089 Train Loss: 0.0005169, Val Loss: 0.0006606 +2025-02-26 10:06:41,089 Epoch 394/2000 +2025-02-26 10:11:47,916 Current Learning Rate: 0.0099778098 +2025-02-26 10:11:47,916 Train Loss: 0.0005176, Val Loss: 0.0006681 +2025-02-26 10:11:47,916 Epoch 395/2000 +2025-02-26 10:16:55,809 Current Learning Rate: 0.0099845867 +2025-02-26 10:16:55,810 Train Loss: 0.0005250, Val Loss: 0.0006657 +2025-02-26 10:16:55,810 Epoch 396/2000 +2025-02-26 10:22:01,964 Current Learning Rate: 0.0099901336 +2025-02-26 10:22:01,965 Train Loss: 0.0005296, Val Loss: 0.0006587 +2025-02-26 10:22:01,965 Epoch 397/2000 +2025-02-26 10:27:08,746 Current Learning Rate: 0.0099944494 +2025-02-26 10:27:08,746 Train Loss: 0.0005217, Val Loss: 0.0006516 +2025-02-26 10:27:08,746 Epoch 398/2000 +2025-02-26 10:32:14,545 Current Learning Rate: 0.0099975328 +2025-02-26 10:32:14,684 Train Loss: 0.0005097, Val Loss: 0.0006488 +2025-02-26 10:32:14,684 Epoch 399/2000 +2025-02-26 10:37:22,171 Current Learning Rate: 0.0099993832 +2025-02-26 10:37:22,247 Train Loss: 0.0004929, Val Loss: 0.0006443 +2025-02-26 10:37:22,248 Epoch 400/2000 +2025-02-26 10:42:29,099 Current Learning Rate: 0.0100000000 +2025-02-26 10:42:29,161 Train Loss: 0.0004895, Val Loss: 0.0006379 +2025-02-26 10:42:29,161 Epoch 401/2000 +2025-02-26 10:47:39,994 Current Learning Rate: 0.0099993832 +2025-02-26 10:47:39,994 Train Loss: 0.0004932, Val Loss: 0.0006428 +2025-02-26 10:47:39,995 Epoch 402/2000 +2025-02-26 10:52:51,842 Current Learning Rate: 0.0099975328 +2025-02-26 10:52:51,842 Train Loss: 0.0004956, Val Loss: 0.0006512 +2025-02-26 10:52:51,843 Epoch 403/2000 +2025-02-26 10:57:58,957 Current Learning Rate: 0.0099944494 +2025-02-26 10:57:58,959 Train Loss: 0.0004978, Val Loss: 0.0006468 +2025-02-26 10:57:58,959 Epoch 404/2000 +2025-02-26 11:03:11,017 Current Learning Rate: 0.0099901336 +2025-02-26 11:03:11,018 Train Loss: 0.0005000, Val Loss: 0.0006875 +2025-02-26 11:03:11,018 Epoch 405/2000 +2025-02-26 11:08:19,594 Current Learning Rate: 0.0099845867 +2025-02-26 11:08:19,595 Train Loss: 0.0006802, Val Loss: 0.0007148 +2025-02-26 11:08:19,595 Epoch 406/2000 +2025-02-26 11:13:30,456 Current Learning Rate: 0.0099778098 +2025-02-26 11:13:30,456 Train Loss: 0.0005337, Val Loss: 0.0006453 +2025-02-26 11:13:30,456 Epoch 407/2000 +2025-02-26 11:18:39,601 Current Learning Rate: 0.0099698048 +2025-02-26 11:18:39,660 Train Loss: 0.0004918, Val Loss: 0.0006349 +2025-02-26 11:18:39,661 Epoch 408/2000 +2025-02-26 11:23:44,491 Current Learning Rate: 0.0099605735 +2025-02-26 11:23:44,556 Train Loss: 0.0004841, Val Loss: 0.0006346 +2025-02-26 11:23:44,556 Epoch 409/2000 +2025-02-26 11:28:51,677 Current Learning Rate: 0.0099501183 +2025-02-26 11:28:51,677 Train Loss: 0.0004850, Val Loss: 0.0006426 +2025-02-26 11:28:51,678 Epoch 410/2000 +2025-02-26 11:34:00,792 Current Learning Rate: 0.0099384417 +2025-02-26 11:34:00,792 Train Loss: 0.0004882, Val Loss: 0.0006488 +2025-02-26 11:34:00,793 Epoch 411/2000 +2025-02-26 11:39:09,658 Current Learning Rate: 0.0099255466 +2025-02-26 11:39:09,658 Train Loss: 0.0004906, Val Loss: 0.0006565 +2025-02-26 11:39:09,659 Epoch 412/2000 +2025-02-26 11:44:20,900 Current Learning Rate: 0.0099114363 +2025-02-26 11:44:20,900 Train Loss: 0.0004931, Val Loss: 0.0006476 +2025-02-26 11:44:20,900 Epoch 413/2000 +2025-02-26 11:49:24,932 Current Learning Rate: 0.0098961141 +2025-02-26 11:49:24,932 Train Loss: 0.0004939, Val Loss: 0.0006385 +2025-02-26 11:49:24,933 Epoch 414/2000 +2025-02-26 11:54:29,718 Current Learning Rate: 0.0098795838 +2025-02-26 11:54:29,779 Train Loss: 0.0004946, Val Loss: 0.0006305 +2025-02-26 11:54:29,779 Epoch 415/2000 +2025-02-26 11:59:35,036 Current Learning Rate: 0.0098618496 +2025-02-26 11:59:35,115 Train Loss: 0.0004964, Val Loss: 0.0006279 +2025-02-26 11:59:35,115 Epoch 416/2000 +2025-02-26 12:04:45,561 Current Learning Rate: 0.0098429158 +2025-02-26 12:04:45,562 Train Loss: 0.0004973, Val Loss: 0.0006422 +2025-02-26 12:04:45,562 Epoch 417/2000 +2025-02-26 12:09:52,832 Current Learning Rate: 0.0098227871 +2025-02-26 12:09:52,833 Train Loss: 0.0004952, Val Loss: 0.0006415 +2025-02-26 12:09:52,833 Epoch 418/2000 +2025-02-26 12:15:03,394 Current Learning Rate: 0.0098014684 +2025-02-26 12:15:03,395 Train Loss: 0.0004836, Val Loss: 0.0006290 +2025-02-26 12:15:03,395 Epoch 419/2000 +2025-02-26 12:20:15,154 Current Learning Rate: 0.0097789651 +2025-02-26 12:20:15,242 Train Loss: 0.0004711, Val Loss: 0.0006232 +2025-02-26 12:20:15,242 Epoch 420/2000 +2025-02-26 12:25:23,944 Current Learning Rate: 0.0097552826 +2025-02-26 12:25:24,004 Train Loss: 0.0004685, Val Loss: 0.0006208 +2025-02-26 12:25:24,005 Epoch 421/2000 +2025-02-26 12:30:31,996 Current Learning Rate: 0.0097304268 +2025-02-26 12:30:31,998 Train Loss: 0.0004713, Val Loss: 0.0006284 +2025-02-26 12:30:31,999 Epoch 422/2000 +2025-02-26 12:35:43,213 Current Learning Rate: 0.0097044038 +2025-02-26 12:35:43,214 Train Loss: 0.0004748, Val Loss: 0.0006270 +2025-02-26 12:35:43,214 Epoch 423/2000 +2025-02-26 12:40:48,858 Current Learning Rate: 0.0096772202 +2025-02-26 12:40:48,859 Train Loss: 0.0004745, Val Loss: 0.0006352 +2025-02-26 12:40:48,859 Epoch 424/2000 +2025-02-26 12:45:58,116 Current Learning Rate: 0.0096488824 +2025-02-26 12:45:58,116 Train Loss: 0.0004761, Val Loss: 0.0006361 +2025-02-26 12:45:58,117 Epoch 425/2000 +2025-02-26 12:51:05,112 Current Learning Rate: 0.0096193977 +2025-02-26 12:51:05,112 Train Loss: 0.0004778, Val Loss: 0.0006287 +2025-02-26 12:51:05,113 Epoch 426/2000 +2025-02-26 12:56:11,778 Current Learning Rate: 0.0095887731 +2025-02-26 12:56:11,779 Train Loss: 0.0004772, Val Loss: 0.0006226 +2025-02-26 12:56:11,779 Epoch 427/2000 +2025-02-26 13:01:23,429 Current Learning Rate: 0.0095570164 +2025-02-26 13:01:23,430 Train Loss: 0.0004788, Val Loss: 0.0006296 +2025-02-26 13:01:23,430 Epoch 428/2000 +2025-02-26 13:06:34,911 Current Learning Rate: 0.0095241353 +2025-02-26 13:06:34,911 Train Loss: 0.0004869, Val Loss: 0.0006429 +2025-02-26 13:06:34,911 Epoch 429/2000 +2025-02-26 13:11:42,956 Current Learning Rate: 0.0094901379 +2025-02-26 13:11:42,957 Train Loss: 0.0004931, Val Loss: 0.0006444 +2025-02-26 13:11:42,957 Epoch 430/2000 +2025-02-26 13:16:49,220 Current Learning Rate: 0.0094550326 +2025-02-26 13:16:49,221 Train Loss: 0.0004927, Val Loss: 0.0006309 +2025-02-26 13:16:49,221 Epoch 431/2000 +2025-02-26 13:21:55,389 Current Learning Rate: 0.0094188282 +2025-02-26 13:21:55,389 Train Loss: 0.0004845, Val Loss: 0.0006245 +2025-02-26 13:21:55,390 Epoch 432/2000 +2025-02-26 13:27:00,572 Current Learning Rate: 0.0093815334 +2025-02-26 13:27:00,636 Train Loss: 0.0004676, Val Loss: 0.0006085 +2025-02-26 13:27:00,637 Epoch 433/2000 +2025-02-26 13:32:06,242 Current Learning Rate: 0.0093431576 +2025-02-26 13:32:06,242 Train Loss: 0.0004554, Val Loss: 0.0006086 +2025-02-26 13:32:06,242 Epoch 434/2000 +2025-02-26 13:37:11,794 Current Learning Rate: 0.0093037101 +2025-02-26 13:37:11,794 Train Loss: 0.0004522, Val Loss: 0.0006097 +2025-02-26 13:37:11,795 Epoch 435/2000 +2025-02-26 13:42:19,867 Current Learning Rate: 0.0092632008 +2025-02-26 13:42:19,945 Train Loss: 0.0004538, Val Loss: 0.0006020 +2025-02-26 13:42:19,945 Epoch 436/2000 +2025-02-26 13:47:26,538 Current Learning Rate: 0.0092216396 +2025-02-26 13:47:26,539 Train Loss: 0.0004559, Val Loss: 0.0006040 +2025-02-26 13:47:26,539 Epoch 437/2000 +2025-02-26 13:52:33,274 Current Learning Rate: 0.0091790368 +2025-02-26 13:52:33,274 Train Loss: 0.0004580, Val Loss: 0.0006174 +2025-02-26 13:52:33,275 Epoch 438/2000 +2025-02-26 13:57:38,572 Current Learning Rate: 0.0091354029 +2025-02-26 13:57:38,573 Train Loss: 0.0004576, Val Loss: 0.0006067 +2025-02-26 13:57:38,573 Epoch 439/2000 +2025-02-26 14:02:45,092 Current Learning Rate: 0.0090907486 +2025-02-26 14:02:45,092 Train Loss: 0.0004580, Val Loss: 0.0006049 +2025-02-26 14:02:45,093 Epoch 440/2000 +2025-02-26 14:07:51,738 Current Learning Rate: 0.0090450850 +2025-02-26 14:07:51,738 Train Loss: 0.0004591, Val Loss: 0.0006103 +2025-02-26 14:07:51,738 Epoch 441/2000 +2025-02-26 14:12:59,788 Current Learning Rate: 0.0089984233 +2025-02-26 14:12:59,788 Train Loss: 0.0004611, Val Loss: 0.0006048 +2025-02-26 14:12:59,788 Epoch 442/2000 +2025-02-26 14:18:10,260 Current Learning Rate: 0.0089507751 +2025-02-26 14:18:10,261 Train Loss: 0.0004636, Val Loss: 0.0006068 +2025-02-26 14:18:10,261 Epoch 443/2000 +2025-02-26 14:23:15,185 Current Learning Rate: 0.0089021520 +2025-02-26 14:23:15,185 Train Loss: 0.0004665, Val Loss: 0.0006099 +2025-02-26 14:23:15,186 Epoch 444/2000 +2025-02-26 14:28:23,030 Current Learning Rate: 0.0088525662 +2025-02-26 14:28:23,031 Train Loss: 0.0004726, Val Loss: 0.0006181 +2025-02-26 14:28:23,031 Epoch 445/2000 +2025-02-26 14:33:32,834 Current Learning Rate: 0.0088020298 +2025-02-26 14:33:32,835 Train Loss: 0.0004795, Val Loss: 0.0006117 +2025-02-26 14:33:32,835 Epoch 446/2000 +2025-02-26 14:38:38,775 Current Learning Rate: 0.0087505553 +2025-02-26 14:38:38,775 Train Loss: 0.0004676, Val Loss: 0.0006022 +2025-02-26 14:38:38,776 Epoch 447/2000 +2025-02-26 14:43:43,488 Current Learning Rate: 0.0086981555 +2025-02-26 14:43:43,547 Train Loss: 0.0004488, Val Loss: 0.0005895 +2025-02-26 14:43:43,548 Epoch 448/2000 +2025-02-26 14:48:51,949 Current Learning Rate: 0.0086448431 +2025-02-26 14:48:52,022 Train Loss: 0.0004377, Val Loss: 0.0005890 +2025-02-26 14:48:52,022 Epoch 449/2000 +2025-02-26 14:53:57,694 Current Learning Rate: 0.0085906315 +2025-02-26 14:53:57,751 Train Loss: 0.0004348, Val Loss: 0.0005890 +2025-02-26 14:53:57,751 Epoch 450/2000 +2025-02-26 14:59:06,852 Current Learning Rate: 0.0085355339 +2025-02-26 14:59:06,916 Train Loss: 0.0004358, Val Loss: 0.0005866 +2025-02-26 14:59:06,916 Epoch 451/2000 +2025-02-26 15:04:12,264 Current Learning Rate: 0.0084795640 +2025-02-26 15:04:12,264 Train Loss: 0.0004364, Val Loss: 0.0005901 +2025-02-26 15:04:12,265 Epoch 452/2000 +2025-02-26 15:09:22,355 Current Learning Rate: 0.0084227355 +2025-02-26 15:09:22,355 Train Loss: 0.0004377, Val Loss: 0.0006011 +2025-02-26 15:09:22,355 Epoch 453/2000 +2025-02-26 15:14:28,565 Current Learning Rate: 0.0083650626 +2025-02-26 15:14:28,629 Train Loss: 0.0004396, Val Loss: 0.0005856 +2025-02-26 15:14:28,630 Epoch 454/2000 +2025-02-26 15:19:37,809 Current Learning Rate: 0.0083065593 +2025-02-26 15:19:37,914 Train Loss: 0.0004389, Val Loss: 0.0005807 +2025-02-26 15:19:37,914 Epoch 455/2000 +2025-02-26 15:24:42,802 Current Learning Rate: 0.0082472402 +2025-02-26 15:24:42,803 Train Loss: 0.0004391, Val Loss: 0.0005826 +2025-02-26 15:24:42,803 Epoch 456/2000 +2025-02-26 15:29:48,097 Current Learning Rate: 0.0081871199 +2025-02-26 15:29:48,098 Train Loss: 0.0004421, Val Loss: 0.0005935 +2025-02-26 15:29:48,098 Epoch 457/2000 +2025-02-26 15:34:53,119 Current Learning Rate: 0.0081262133 +2025-02-26 15:34:53,120 Train Loss: 0.0004440, Val Loss: 0.0005887 +2025-02-26 15:34:53,120 Epoch 458/2000 +2025-02-26 15:40:01,853 Current Learning Rate: 0.0080645353 +2025-02-26 15:40:01,853 Train Loss: 0.0004430, Val Loss: 0.0005896 +2025-02-26 15:40:01,854 Epoch 459/2000 +2025-02-26 15:45:12,163 Current Learning Rate: 0.0080021011 +2025-02-26 15:45:12,163 Train Loss: 0.0004472, Val Loss: 0.0005984 +2025-02-26 15:45:12,163 Epoch 460/2000 +2025-02-26 15:50:21,856 Current Learning Rate: 0.0079389263 +2025-02-26 15:50:21,857 Train Loss: 0.0004534, Val Loss: 0.0005917 +2025-02-26 15:50:21,857 Epoch 461/2000 +2025-02-26 15:55:27,028 Current Learning Rate: 0.0078750263 +2025-02-26 15:55:27,029 Train Loss: 0.0004541, Val Loss: 0.0006031 +2025-02-26 15:55:27,030 Epoch 462/2000 +2025-02-26 16:00:36,848 Current Learning Rate: 0.0078104169 +2025-02-26 16:00:36,849 Train Loss: 0.0004498, Val Loss: 0.0006000 +2025-02-26 16:00:36,849 Epoch 463/2000 +2025-02-26 16:05:41,619 Current Learning Rate: 0.0077451141 +2025-02-26 16:05:41,620 Train Loss: 0.0004402, Val Loss: 0.0005874 +2025-02-26 16:05:41,620 Epoch 464/2000 +2025-02-26 16:10:47,202 Current Learning Rate: 0.0076791340 +2025-02-26 16:10:47,274 Train Loss: 0.0004271, Val Loss: 0.0005762 +2025-02-26 16:10:47,274 Epoch 465/2000 +2025-02-26 16:15:56,290 Current Learning Rate: 0.0076124928 +2025-02-26 16:15:56,351 Train Loss: 0.0004201, Val Loss: 0.0005714 +2025-02-26 16:15:56,351 Epoch 466/2000 +2025-02-26 16:21:05,723 Current Learning Rate: 0.0075452071 +2025-02-26 16:21:05,796 Train Loss: 0.0004173, Val Loss: 0.0005708 +2025-02-26 16:21:05,797 Epoch 467/2000 +2025-02-26 16:26:15,997 Current Learning Rate: 0.0074772933 +2025-02-26 16:26:16,063 Train Loss: 0.0004174, Val Loss: 0.0005677 +2025-02-26 16:26:16,063 Epoch 468/2000 +2025-02-26 16:31:23,099 Current Learning Rate: 0.0074087684 +2025-02-26 16:31:23,099 Train Loss: 0.0004184, Val Loss: 0.0005703 +2025-02-26 16:31:23,099 Epoch 469/2000 +2025-02-26 16:36:28,555 Current Learning Rate: 0.0073396491 +2025-02-26 16:36:28,616 Train Loss: 0.0004197, Val Loss: 0.0005665 +2025-02-26 16:36:28,616 Epoch 470/2000 +2025-02-26 16:41:37,148 Current Learning Rate: 0.0072699525 +2025-02-26 16:41:37,205 Train Loss: 0.0004211, Val Loss: 0.0005660 +2025-02-26 16:41:37,205 Epoch 471/2000 +2025-02-26 16:46:45,392 Current Learning Rate: 0.0071996958 +2025-02-26 16:46:45,392 Train Loss: 0.0004217, Val Loss: 0.0005675 +2025-02-26 16:46:45,392 Epoch 472/2000 +2025-02-26 16:51:51,023 Current Learning Rate: 0.0071288965 +2025-02-26 16:51:51,023 Train Loss: 0.0004218, Val Loss: 0.0005779 +2025-02-26 16:51:51,023 Epoch 473/2000 +2025-02-26 16:56:56,687 Current Learning Rate: 0.0070575718 +2025-02-26 16:56:56,687 Train Loss: 0.0004239, Val Loss: 0.0005783 +2025-02-26 16:56:56,688 Epoch 474/2000 +2025-03-01 20:58:27,941 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-01 20:59:16,361 Loading best model from checkpoint. +2025-03-01 21:00:09,745 Testing completed and best model saved. +2025-03-01 22:32:21,642 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-01 22:33:00,444 Loading best model from checkpoint. +2025-03-01 22:34:06,101 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp_128_20250324_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp_128_20250324_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..4f3baa8d5920d8342fa646e9148ad05e10770a3b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Simvp_exp_128_20250324_training_log.log @@ -0,0 +1,6009 @@ +2025-03-24 15:39:09,628 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-24 15:39:09,736 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-24 15:39:09,798 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-24 15:39:09,812 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-24 15:39:09,824 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-24 15:39:09,835 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-24 15:39:09,841 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-24 15:39:09,845 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-24 15:40:09,695 Epoch 1/2000 +2025-03-24 15:42:44,497 Current Learning Rate: 0.0009999383 +2025-03-24 15:42:44,550 Train Loss: 0.0197788, Val Loss: 0.0081102 +2025-03-24 15:42:44,550 Epoch 2/2000 +2025-03-24 15:45:17,693 Current Learning Rate: 0.0009997533 +2025-03-24 15:45:17,747 Train Loss: 0.0055511, Val Loss: 0.0042628 +2025-03-24 15:45:17,748 Epoch 3/2000 +2025-03-24 15:47:51,521 Current Learning Rate: 0.0009994449 +2025-03-24 15:47:51,589 Train Loss: 0.0039316, Val Loss: 0.0035975 +2025-03-24 15:47:51,589 Epoch 4/2000 +2025-03-24 15:50:25,387 Current Learning Rate: 0.0009990134 +2025-03-24 15:50:25,444 Train Loss: 0.0033328, Val Loss: 0.0030391 +2025-03-24 15:50:25,445 Epoch 5/2000 +2025-03-24 15:52:59,284 Current Learning Rate: 0.0009984587 +2025-03-24 15:52:59,358 Train Loss: 0.0029624, Val Loss: 0.0027794 +2025-03-24 15:52:59,359 Epoch 6/2000 +2025-03-24 15:55:34,048 Current Learning Rate: 0.0009977810 +2025-03-24 15:55:34,107 Train Loss: 0.0026678, Val Loss: 0.0025757 +2025-03-24 15:55:34,108 Epoch 7/2000 +2025-03-24 15:58:08,231 Current Learning Rate: 0.0009969805 +2025-03-24 15:58:08,295 Train Loss: 0.0024445, Val Loss: 0.0023652 +2025-03-24 15:58:08,295 Epoch 8/2000 +2025-03-24 16:00:42,743 Current Learning Rate: 0.0009960574 +2025-03-24 16:00:42,796 Train Loss: 0.0022773, Val Loss: 0.0022226 +2025-03-24 16:00:42,797 Epoch 9/2000 +2025-03-24 16:03:16,740 Current Learning Rate: 0.0009950118 +2025-03-24 16:03:16,791 Train Loss: 0.0021338, Val Loss: 0.0022133 +2025-03-24 16:03:16,792 Epoch 10/2000 +2025-03-24 16:05:51,159 Current Learning Rate: 0.0009938442 +2025-03-24 16:05:51,212 Train Loss: 0.0020019, Val Loss: 0.0021602 +2025-03-24 16:05:51,212 Epoch 11/2000 +2025-03-24 16:08:24,821 Current Learning Rate: 0.0009925547 +2025-03-24 16:08:24,879 Train Loss: 0.0019038, Val Loss: 0.0019061 +2025-03-24 16:08:24,879 Epoch 12/2000 +2025-03-24 16:10:58,445 Current Learning Rate: 0.0009911436 +2025-03-24 16:10:58,506 Train Loss: 0.0018069, Val Loss: 0.0018409 +2025-03-24 16:10:58,506 Epoch 13/2000 +2025-03-24 16:13:31,995 Current Learning Rate: 0.0009896114 +2025-03-24 16:13:31,995 Train Loss: 0.0017332, Val Loss: 0.0019299 +2025-03-24 16:13:31,995 Epoch 14/2000 +2025-03-24 16:16:06,062 Current Learning Rate: 0.0009879584 +2025-03-24 16:16:06,116 Train Loss: 0.0016469, Val Loss: 0.0016867 +2025-03-24 16:16:06,117 Epoch 15/2000 +2025-03-24 16:18:40,085 Current Learning Rate: 0.0009861850 +2025-03-24 16:18:40,085 Train Loss: 0.0015834, Val Loss: 0.0016918 +2025-03-24 16:18:40,086 Epoch 16/2000 +2025-03-24 16:21:14,365 Current Learning Rate: 0.0009842916 +2025-03-24 16:21:14,429 Train Loss: 0.0015435, Val Loss: 0.0016286 +2025-03-24 16:21:14,429 Epoch 17/2000 +2025-03-24 16:23:48,223 Current Learning Rate: 0.0009822787 +2025-03-24 16:23:48,280 Train Loss: 0.0015103, Val Loss: 0.0015408 +2025-03-24 16:23:48,280 Epoch 18/2000 +2025-03-24 16:26:22,244 Current Learning Rate: 0.0009801468 +2025-03-24 16:26:22,302 Train Loss: 0.0014532, Val Loss: 0.0014740 +2025-03-24 16:26:22,302 Epoch 19/2000 +2025-03-24 16:28:56,645 Current Learning Rate: 0.0009778965 +2025-03-24 16:28:56,701 Train Loss: 0.0013774, Val Loss: 0.0014381 +2025-03-24 16:28:56,701 Epoch 20/2000 +2025-03-24 16:31:30,597 Current Learning Rate: 0.0009755283 +2025-03-24 16:31:30,597 Train Loss: 0.0013505, Val Loss: 0.0014482 +2025-03-24 16:31:30,597 Epoch 21/2000 +2025-03-24 16:34:04,963 Current Learning Rate: 0.0009730427 +2025-03-24 16:34:05,020 Train Loss: 0.0013151, Val Loss: 0.0014082 +2025-03-24 16:34:05,020 Epoch 22/2000 +2025-03-24 16:36:38,316 Current Learning Rate: 0.0009704404 +2025-03-24 16:36:38,369 Train Loss: 0.0012945, Val Loss: 0.0013546 +2025-03-24 16:36:38,370 Epoch 23/2000 +2025-03-24 16:39:11,580 Current Learning Rate: 0.0009677220 +2025-03-24 16:39:11,641 Train Loss: 0.0012554, Val Loss: 0.0013331 +2025-03-24 16:39:11,642 Epoch 24/2000 +2025-03-24 16:41:45,744 Current Learning Rate: 0.0009648882 +2025-03-24 16:41:45,797 Train Loss: 0.0012167, Val Loss: 0.0012768 +2025-03-24 16:41:45,798 Epoch 25/2000 +2025-03-24 16:44:19,234 Current Learning Rate: 0.0009619398 +2025-03-24 16:44:19,235 Train Loss: 0.0011661, Val Loss: 0.0013058 +2025-03-24 16:44:19,235 Epoch 26/2000 +2025-03-24 16:46:53,514 Current Learning Rate: 0.0009588773 +2025-03-24 16:46:53,570 Train Loss: 0.0011541, Val Loss: 0.0012394 +2025-03-24 16:46:53,570 Epoch 27/2000 +2025-03-24 16:49:27,830 Current Learning Rate: 0.0009557016 +2025-03-24 16:49:27,889 Train Loss: 0.0011225, Val Loss: 0.0012058 +2025-03-24 16:49:27,889 Epoch 28/2000 +2025-03-24 16:52:02,566 Current Learning Rate: 0.0009524135 +2025-03-24 16:52:02,566 Train Loss: 0.0011068, Val Loss: 0.0012072 +2025-03-24 16:52:02,566 Epoch 29/2000 +2025-03-24 16:54:37,003 Current Learning Rate: 0.0009490138 +2025-03-24 16:54:37,057 Train Loss: 0.0010989, Val Loss: 0.0011673 +2025-03-24 16:54:37,057 Epoch 30/2000 +2025-03-24 16:57:10,624 Current Learning Rate: 0.0009455033 +2025-03-24 16:57:10,625 Train Loss: 0.0010718, Val Loss: 0.0011735 +2025-03-24 16:57:10,625 Epoch 31/2000 +2025-03-24 16:59:44,010 Current Learning Rate: 0.0009418828 +2025-03-24 16:59:44,066 Train Loss: 0.0010387, Val Loss: 0.0011111 +2025-03-24 16:59:44,066 Epoch 32/2000 +2025-03-24 17:02:17,560 Current Learning Rate: 0.0009381533 +2025-03-24 17:02:17,611 Train Loss: 0.0010035, Val Loss: 0.0011024 +2025-03-24 17:02:17,612 Epoch 33/2000 +2025-03-24 17:04:51,426 Current Learning Rate: 0.0009343158 +2025-03-24 17:04:51,426 Train Loss: 0.0009941, Val Loss: 0.0011232 +2025-03-24 17:04:51,427 Epoch 34/2000 +2025-03-24 17:07:26,585 Current Learning Rate: 0.0009303710 +2025-03-24 17:07:26,639 Train Loss: 0.0009906, Val Loss: 0.0010782 +2025-03-24 17:07:26,639 Epoch 35/2000 +2025-03-24 17:10:00,334 Current Learning Rate: 0.0009263201 +2025-03-24 17:10:00,335 Train Loss: 0.0009938, Val Loss: 0.0010842 +2025-03-24 17:10:00,335 Epoch 36/2000 +2025-03-24 17:12:34,013 Current Learning Rate: 0.0009221640 +2025-03-24 17:12:34,079 Train Loss: 0.0009653, Val Loss: 0.0010500 +2025-03-24 17:12:34,079 Epoch 37/2000 +2025-03-24 17:15:06,873 Current Learning Rate: 0.0009179037 +2025-03-24 17:15:06,936 Train Loss: 0.0009383, Val Loss: 0.0010207 +2025-03-24 17:15:06,936 Epoch 38/2000 +2025-03-24 17:17:39,441 Current Learning Rate: 0.0009135403 +2025-03-24 17:17:39,500 Train Loss: 0.0009074, Val Loss: 0.0009979 +2025-03-24 17:17:39,500 Epoch 39/2000 +2025-03-24 17:20:12,945 Current Learning Rate: 0.0009090749 +2025-03-24 17:20:12,946 Train Loss: 0.0009087, Val Loss: 0.0010132 +2025-03-24 17:20:12,946 Epoch 40/2000 +2025-03-24 17:22:46,033 Current Learning Rate: 0.0009045085 +2025-03-24 17:22:46,034 Train Loss: 0.0009012, Val Loss: 0.0010041 +2025-03-24 17:22:46,034 Epoch 41/2000 +2025-03-24 17:25:19,798 Current Learning Rate: 0.0008998423 +2025-03-24 17:25:19,903 Train Loss: 0.0008789, Val Loss: 0.0009823 +2025-03-24 17:25:19,903 Epoch 42/2000 +2025-03-24 17:27:54,191 Current Learning Rate: 0.0008950775 +2025-03-24 17:27:54,191 Train Loss: 0.0008790, Val Loss: 0.0009967 +2025-03-24 17:27:54,192 Epoch 43/2000 +2025-03-24 17:30:28,315 Current Learning Rate: 0.0008902152 +2025-03-24 17:30:28,370 Train Loss: 0.0008776, Val Loss: 0.0009592 +2025-03-24 17:30:28,371 Epoch 44/2000 +2025-03-24 17:33:02,355 Current Learning Rate: 0.0008852566 +2025-03-24 17:33:02,414 Train Loss: 0.0008719, Val Loss: 0.0009584 +2025-03-24 17:33:02,414 Epoch 45/2000 +2025-03-24 17:35:37,689 Current Learning Rate: 0.0008802030 +2025-03-24 17:35:37,690 Train Loss: 0.0008718, Val Loss: 0.0009617 +2025-03-24 17:35:37,690 Epoch 46/2000 +2025-03-24 17:38:10,871 Current Learning Rate: 0.0008750555 +2025-03-24 17:38:10,926 Train Loss: 0.0008566, Val Loss: 0.0009489 +2025-03-24 17:38:10,926 Epoch 47/2000 +2025-03-24 17:40:42,826 Current Learning Rate: 0.0008698155 +2025-03-24 17:40:42,968 Train Loss: 0.0008280, Val Loss: 0.0009113 +2025-03-24 17:40:42,968 Epoch 48/2000 +2025-03-24 17:43:15,400 Current Learning Rate: 0.0008644843 +2025-03-24 17:43:15,452 Train Loss: 0.0008106, Val Loss: 0.0009085 +2025-03-24 17:43:15,452 Epoch 49/2000 +2025-03-24 17:45:48,385 Current Learning Rate: 0.0008590631 +2025-03-24 17:45:48,386 Train Loss: 0.0008068, Val Loss: 0.0009289 +2025-03-24 17:45:48,386 Epoch 50/2000 +2025-03-24 17:48:20,733 Current Learning Rate: 0.0008535534 +2025-03-24 17:48:20,733 Train Loss: 0.0008070, Val Loss: 0.0009351 +2025-03-24 17:48:20,734 Epoch 51/2000 +2025-03-24 17:50:54,263 Current Learning Rate: 0.0008479564 +2025-03-24 17:50:54,317 Train Loss: 0.0008026, Val Loss: 0.0009024 +2025-03-24 17:50:54,317 Epoch 52/2000 +2025-03-24 17:53:27,220 Current Learning Rate: 0.0008422736 +2025-03-24 17:53:27,276 Train Loss: 0.0007976, Val Loss: 0.0009005 +2025-03-24 17:53:27,276 Epoch 53/2000 +2025-03-24 17:56:00,282 Current Learning Rate: 0.0008365063 +2025-03-24 17:56:00,334 Train Loss: 0.0007908, Val Loss: 0.0008925 +2025-03-24 17:56:00,334 Epoch 54/2000 +2025-03-24 17:58:33,287 Current Learning Rate: 0.0008306559 +2025-03-24 17:58:33,336 Train Loss: 0.0007846, Val Loss: 0.0008894 +2025-03-24 17:58:33,336 Epoch 55/2000 +2025-03-24 18:01:05,970 Current Learning Rate: 0.0008247240 +2025-03-24 18:01:06,023 Train Loss: 0.0007722, Val Loss: 0.0008741 +2025-03-24 18:01:06,023 Epoch 56/2000 +2025-03-24 18:03:38,575 Current Learning Rate: 0.0008187120 +2025-03-24 18:03:38,642 Train Loss: 0.0007569, Val Loss: 0.0008643 +2025-03-24 18:03:38,642 Epoch 57/2000 +2025-03-24 18:06:10,755 Current Learning Rate: 0.0008126213 +2025-03-24 18:06:10,828 Train Loss: 0.0007503, Val Loss: 0.0008542 +2025-03-24 18:06:10,828 Epoch 58/2000 +2025-03-24 18:08:43,753 Current Learning Rate: 0.0008064535 +2025-03-24 18:08:43,814 Train Loss: 0.0007467, Val Loss: 0.0008458 +2025-03-24 18:08:43,814 Epoch 59/2000 +2025-03-24 18:11:17,982 Current Learning Rate: 0.0008002101 +2025-03-24 18:11:18,174 Train Loss: 0.0007511, Val Loss: 0.0008451 +2025-03-24 18:11:18,174 Epoch 60/2000 +2025-03-24 18:13:52,226 Current Learning Rate: 0.0007938926 +2025-03-24 18:13:52,281 Train Loss: 0.0007487, Val Loss: 0.0008404 +2025-03-24 18:13:52,282 Epoch 61/2000 +2025-03-24 18:16:25,275 Current Learning Rate: 0.0007875026 +2025-03-24 18:16:25,275 Train Loss: 0.0007561, Val Loss: 0.0008885 +2025-03-24 18:16:25,275 Epoch 62/2000 +2025-03-24 18:18:58,344 Current Learning Rate: 0.0007810417 +2025-03-24 18:18:58,398 Train Loss: 0.0007500, Val Loss: 0.0008367 +2025-03-24 18:18:58,398 Epoch 63/2000 +2025-03-24 18:21:30,430 Current Learning Rate: 0.0007745114 +2025-03-24 18:21:30,504 Train Loss: 0.0007279, Val Loss: 0.0008333 +2025-03-24 18:21:30,505 Epoch 64/2000 +2025-03-24 18:24:03,236 Current Learning Rate: 0.0007679134 +2025-03-24 18:24:03,292 Train Loss: 0.0007030, Val Loss: 0.0007972 +2025-03-24 18:24:03,292 Epoch 65/2000 +2025-03-24 18:26:37,229 Current Learning Rate: 0.0007612493 +2025-03-24 18:26:37,229 Train Loss: 0.0006938, Val Loss: 0.0007984 +2025-03-24 18:26:37,229 Epoch 66/2000 +2025-03-24 18:29:11,954 Current Learning Rate: 0.0007545207 +2025-03-24 18:29:11,954 Train Loss: 0.0006939, Val Loss: 0.0008072 +2025-03-24 18:29:11,954 Epoch 67/2000 +2025-03-24 18:31:45,295 Current Learning Rate: 0.0007477293 +2025-03-24 18:31:45,374 Train Loss: 0.0006957, Val Loss: 0.0007892 +2025-03-24 18:31:45,375 Epoch 68/2000 +2025-03-24 18:34:18,616 Current Learning Rate: 0.0007408768 +2025-03-24 18:34:18,675 Train Loss: 0.0006896, Val Loss: 0.0007812 +2025-03-24 18:34:18,675 Epoch 69/2000 +2025-03-24 18:36:52,578 Current Learning Rate: 0.0007339649 +2025-03-24 18:36:52,579 Train Loss: 0.0006841, Val Loss: 0.0007843 +2025-03-24 18:36:52,579 Epoch 70/2000 +2025-03-24 18:39:26,902 Current Learning Rate: 0.0007269952 +2025-03-24 18:39:26,962 Train Loss: 0.0006786, Val Loss: 0.0007693 +2025-03-24 18:39:26,962 Epoch 71/2000 +2025-03-24 18:42:00,876 Current Learning Rate: 0.0007199696 +2025-03-24 18:42:00,877 Train Loss: 0.0006746, Val Loss: 0.0007773 +2025-03-24 18:42:00,877 Epoch 72/2000 +2025-03-24 18:44:34,755 Current Learning Rate: 0.0007128896 +2025-03-24 18:44:34,756 Train Loss: 0.0006809, Val Loss: 0.0007918 +2025-03-24 18:44:34,756 Epoch 73/2000 +2025-03-24 18:47:08,574 Current Learning Rate: 0.0007057572 +2025-03-24 18:47:08,574 Train Loss: 0.0006835, Val Loss: 0.0008038 +2025-03-24 18:47:08,574 Epoch 74/2000 +2025-03-24 18:49:42,065 Current Learning Rate: 0.0006985739 +2025-03-24 18:49:42,125 Train Loss: 0.0006805, Val Loss: 0.0007691 +2025-03-24 18:49:42,125 Epoch 75/2000 +2025-03-24 18:52:16,423 Current Learning Rate: 0.0006913417 +2025-03-24 18:52:16,482 Train Loss: 0.0006684, Val Loss: 0.0007539 +2025-03-24 18:52:16,483 Epoch 76/2000 +2025-03-24 18:54:49,827 Current Learning Rate: 0.0006840623 +2025-03-24 18:54:49,878 Train Loss: 0.0006494, Val Loss: 0.0007427 +2025-03-24 18:54:49,878 Epoch 77/2000 +2025-03-24 18:57:23,471 Current Learning Rate: 0.0006767374 +2025-03-24 18:57:23,525 Train Loss: 0.0006390, Val Loss: 0.0007388 +2025-03-24 18:57:23,525 Epoch 78/2000 +2025-03-24 18:59:56,618 Current Learning Rate: 0.0006693690 +2025-03-24 18:59:56,618 Train Loss: 0.0006373, Val Loss: 0.0007416 +2025-03-24 18:59:56,619 Epoch 79/2000 +2025-03-24 19:02:29,986 Current Learning Rate: 0.0006619587 +2025-03-24 19:02:30,052 Train Loss: 0.0006391, Val Loss: 0.0007311 +2025-03-24 19:02:30,052 Epoch 80/2000 +2025-03-24 19:05:04,098 Current Learning Rate: 0.0006545085 +2025-03-24 19:05:04,099 Train Loss: 0.0006367, Val Loss: 0.0007824 +2025-03-24 19:05:04,099 Epoch 81/2000 +2025-03-24 19:07:38,186 Current Learning Rate: 0.0006470202 +2025-03-24 19:07:38,187 Train Loss: 0.0006400, Val Loss: 0.0007371 +2025-03-24 19:07:38,188 Epoch 82/2000 +2025-03-24 19:10:12,080 Current Learning Rate: 0.0006394956 +2025-03-24 19:10:12,080 Train Loss: 0.0006416, Val Loss: 0.0007346 +2025-03-24 19:10:12,081 Epoch 83/2000 +2025-03-24 19:12:44,743 Current Learning Rate: 0.0006319365 +2025-03-24 19:12:44,803 Train Loss: 0.0006403, Val Loss: 0.0007275 +2025-03-24 19:12:44,804 Epoch 84/2000 +2025-03-24 19:15:18,144 Current Learning Rate: 0.0006243449 +2025-03-24 19:15:18,198 Train Loss: 0.0006351, Val Loss: 0.0007234 +2025-03-24 19:15:18,198 Epoch 85/2000 +2025-03-24 19:17:52,214 Current Learning Rate: 0.0006167227 +2025-03-24 19:17:52,294 Train Loss: 0.0006240, Val Loss: 0.0007210 +2025-03-24 19:17:52,294 Epoch 86/2000 +2025-03-24 19:20:26,162 Current Learning Rate: 0.0006090716 +2025-03-24 19:20:26,219 Train Loss: 0.0006127, Val Loss: 0.0007086 +2025-03-24 19:20:26,219 Epoch 87/2000 +2025-03-24 19:22:59,886 Current Learning Rate: 0.0006013936 +2025-03-24 19:22:59,941 Train Loss: 0.0006052, Val Loss: 0.0006970 +2025-03-24 19:22:59,941 Epoch 88/2000 +2025-03-24 19:25:32,745 Current Learning Rate: 0.0005936907 +2025-03-24 19:25:32,806 Train Loss: 0.0006037, Val Loss: 0.0006966 +2025-03-24 19:25:32,806 Epoch 89/2000 +2025-03-24 19:28:06,300 Current Learning Rate: 0.0005859646 +2025-03-24 19:28:06,300 Train Loss: 0.0006032, Val Loss: 0.0007170 +2025-03-24 19:28:06,300 Epoch 90/2000 +2025-03-24 19:30:39,733 Current Learning Rate: 0.0005782172 +2025-03-24 19:30:39,733 Train Loss: 0.0006040, Val Loss: 0.0007170 +2025-03-24 19:30:39,733 Epoch 91/2000 +2025-03-24 19:33:13,425 Current Learning Rate: 0.0005704506 +2025-03-24 19:33:13,426 Train Loss: 0.0006045, Val Loss: 0.0006994 +2025-03-24 19:33:13,426 Epoch 92/2000 +2025-03-24 19:35:46,375 Current Learning Rate: 0.0005626666 +2025-03-24 19:35:46,438 Train Loss: 0.0006066, Val Loss: 0.0006943 +2025-03-24 19:35:46,439 Epoch 93/2000 +2025-03-24 19:38:19,102 Current Learning Rate: 0.0005548672 +2025-03-24 19:38:19,174 Train Loss: 0.0006052, Val Loss: 0.0006914 +2025-03-24 19:38:19,174 Epoch 94/2000 +2025-03-24 19:40:53,276 Current Learning Rate: 0.0005470542 +2025-03-24 19:40:53,277 Train Loss: 0.0006076, Val Loss: 0.0006948 +2025-03-24 19:40:53,277 Epoch 95/2000 +2025-03-24 19:43:26,964 Current Learning Rate: 0.0005392295 +2025-03-24 19:43:27,025 Train Loss: 0.0006057, Val Loss: 0.0006858 +2025-03-24 19:43:27,025 Epoch 96/2000 +2025-03-24 19:45:59,985 Current Learning Rate: 0.0005313953 +2025-03-24 19:46:00,065 Train Loss: 0.0005951, Val Loss: 0.0006791 +2025-03-24 19:46:00,065 Epoch 97/2000 +2025-03-24 19:48:34,486 Current Learning Rate: 0.0005235532 +2025-03-24 19:48:34,548 Train Loss: 0.0005814, Val Loss: 0.0006770 +2025-03-24 19:48:34,548 Epoch 98/2000 +2025-03-24 19:51:08,977 Current Learning Rate: 0.0005157054 +2025-03-24 19:51:09,048 Train Loss: 0.0005736, Val Loss: 0.0006640 +2025-03-24 19:51:09,048 Epoch 99/2000 +2025-03-24 19:53:43,110 Current Learning Rate: 0.0005078537 +2025-03-24 19:53:43,175 Train Loss: 0.0005701, Val Loss: 0.0006616 +2025-03-24 19:53:43,175 Epoch 100/2000 +2025-03-24 19:56:17,459 Current Learning Rate: 0.0005000000 +2025-03-24 19:56:17,460 Train Loss: 0.0005700, Val Loss: 0.0006777 +2025-03-24 19:56:17,460 Epoch 101/2000 +2025-03-24 19:58:51,131 Current Learning Rate: 0.0004921463 +2025-03-24 19:58:51,131 Train Loss: 0.0005701, Val Loss: 0.0006619 +2025-03-24 19:58:51,132 Epoch 102/2000 +2025-03-24 20:01:25,023 Current Learning Rate: 0.0004842946 +2025-03-24 20:01:25,023 Train Loss: 0.0005709, Val Loss: 0.0006677 +2025-03-24 20:01:25,023 Epoch 103/2000 +2025-03-24 20:03:58,671 Current Learning Rate: 0.0004764468 +2025-03-24 20:03:58,671 Train Loss: 0.0005713, Val Loss: 0.0006622 +2025-03-24 20:03:58,671 Epoch 104/2000 +2025-03-24 20:06:32,095 Current Learning Rate: 0.0004686047 +2025-03-24 20:06:32,096 Train Loss: 0.0005694, Val Loss: 0.0006650 +2025-03-24 20:06:32,096 Epoch 105/2000 +2025-03-24 20:09:05,138 Current Learning Rate: 0.0004607705 +2025-03-24 20:09:05,207 Train Loss: 0.0005680, Val Loss: 0.0006602 +2025-03-24 20:09:05,208 Epoch 106/2000 +2025-03-24 20:11:38,277 Current Learning Rate: 0.0004529458 +2025-03-24 20:11:38,278 Train Loss: 0.0005707, Val Loss: 0.0006622 +2025-03-24 20:11:38,278 Epoch 107/2000 +2025-03-24 20:14:11,994 Current Learning Rate: 0.0004451328 +2025-03-24 20:14:11,994 Train Loss: 0.0005725, Val Loss: 0.0006625 +2025-03-24 20:14:11,995 Epoch 108/2000 +2025-03-24 20:16:44,972 Current Learning Rate: 0.0004373334 +2025-03-24 20:16:45,028 Train Loss: 0.0005676, Val Loss: 0.0006585 +2025-03-24 20:16:45,028 Epoch 109/2000 +2025-03-24 20:19:18,865 Current Learning Rate: 0.0004295494 +2025-03-24 20:19:18,960 Train Loss: 0.0005596, Val Loss: 0.0006502 +2025-03-24 20:19:18,960 Epoch 110/2000 +2025-03-24 20:21:52,057 Current Learning Rate: 0.0004217828 +2025-03-24 20:21:52,113 Train Loss: 0.0005519, Val Loss: 0.0006435 +2025-03-24 20:21:52,113 Epoch 111/2000 +2025-03-24 20:24:24,809 Current Learning Rate: 0.0004140354 +2025-03-24 20:24:24,867 Train Loss: 0.0005466, Val Loss: 0.0006418 +2025-03-24 20:24:24,867 Epoch 112/2000 +2025-03-24 20:26:58,265 Current Learning Rate: 0.0004063093 +2025-03-24 20:26:58,327 Train Loss: 0.0005435, Val Loss: 0.0006386 +2025-03-24 20:26:58,327 Epoch 113/2000 +2025-03-24 20:29:32,974 Current Learning Rate: 0.0003986064 +2025-03-24 20:29:33,033 Train Loss: 0.0005426, Val Loss: 0.0006374 +2025-03-24 20:29:33,033 Epoch 114/2000 +2025-03-24 20:32:07,281 Current Learning Rate: 0.0003909284 +2025-03-24 20:32:07,281 Train Loss: 0.0005432, Val Loss: 0.0006407 +2025-03-24 20:32:07,281 Epoch 115/2000 +2025-03-24 20:34:41,351 Current Learning Rate: 0.0003832773 +2025-03-24 20:34:41,416 Train Loss: 0.0005442, Val Loss: 0.0006369 +2025-03-24 20:34:41,416 Epoch 116/2000 +2025-03-24 20:37:15,536 Current Learning Rate: 0.0003756551 +2025-03-24 20:37:15,536 Train Loss: 0.0005442, Val Loss: 0.0006378 +2025-03-24 20:37:15,536 Epoch 117/2000 +2025-03-24 20:39:48,804 Current Learning Rate: 0.0003680635 +2025-03-24 20:39:48,805 Train Loss: 0.0005449, Val Loss: 0.0006447 +2025-03-24 20:39:48,805 Epoch 118/2000 +2025-03-24 20:42:22,076 Current Learning Rate: 0.0003605044 +2025-03-24 20:42:22,077 Train Loss: 0.0005468, Val Loss: 0.0006409 +2025-03-24 20:42:22,077 Epoch 119/2000 +2025-03-24 20:44:55,808 Current Learning Rate: 0.0003529798 +2025-03-24 20:44:55,808 Train Loss: 0.0005456, Val Loss: 0.0006376 +2025-03-24 20:44:55,808 Epoch 120/2000 +2025-03-24 20:47:29,288 Current Learning Rate: 0.0003454915 +2025-03-24 20:47:29,347 Train Loss: 0.0005419, Val Loss: 0.0006354 +2025-03-24 20:47:29,347 Epoch 121/2000 +2025-03-24 20:50:02,950 Current Learning Rate: 0.0003380413 +2025-03-24 20:50:03,005 Train Loss: 0.0005362, Val Loss: 0.0006315 +2025-03-24 20:50:03,005 Epoch 122/2000 +2025-03-24 20:52:36,421 Current Learning Rate: 0.0003306310 +2025-03-24 20:52:36,476 Train Loss: 0.0005299, Val Loss: 0.0006238 +2025-03-24 20:52:36,476 Epoch 123/2000 +2025-03-24 20:55:10,621 Current Learning Rate: 0.0003232626 +2025-03-24 20:55:10,677 Train Loss: 0.0005257, Val Loss: 0.0006182 +2025-03-24 20:55:10,678 Epoch 124/2000 +2025-03-24 20:57:45,363 Current Learning Rate: 0.0003159377 +2025-03-24 20:57:45,415 Train Loss: 0.0005229, Val Loss: 0.0006160 +2025-03-24 20:57:45,415 Epoch 125/2000 +2025-03-24 21:00:19,801 Current Learning Rate: 0.0003086583 +2025-03-24 21:00:19,852 Train Loss: 0.0005211, Val Loss: 0.0006146 +2025-03-24 21:00:19,852 Epoch 126/2000 +2025-03-24 21:02:53,877 Current Learning Rate: 0.0003014261 +2025-03-24 21:02:53,928 Train Loss: 0.0005210, Val Loss: 0.0006141 +2025-03-24 21:02:53,928 Epoch 127/2000 +2025-03-24 21:05:27,700 Current Learning Rate: 0.0002942428 +2025-03-24 21:05:27,700 Train Loss: 0.0005219, Val Loss: 0.0006191 +2025-03-24 21:05:27,700 Epoch 128/2000 +2025-03-24 21:08:01,555 Current Learning Rate: 0.0002871104 +2025-03-24 21:08:01,555 Train Loss: 0.0005224, Val Loss: 0.0006192 +2025-03-24 21:08:01,556 Epoch 129/2000 +2025-03-24 21:10:34,839 Current Learning Rate: 0.0002800304 +2025-03-24 21:10:34,840 Train Loss: 0.0005235, Val Loss: 0.0006185 +2025-03-24 21:10:34,840 Epoch 130/2000 +2025-03-24 21:13:07,909 Current Learning Rate: 0.0002730048 +2025-03-24 21:13:07,909 Train Loss: 0.0005242, Val Loss: 0.0006176 +2025-03-24 21:13:07,909 Epoch 131/2000 +2025-03-24 21:15:40,743 Current Learning Rate: 0.0002660351 +2025-03-24 21:15:40,744 Train Loss: 0.0005233, Val Loss: 0.0006159 +2025-03-24 21:15:40,744 Epoch 132/2000 +2025-03-24 21:18:14,189 Current Learning Rate: 0.0002591232 +2025-03-24 21:18:14,246 Train Loss: 0.0005206, Val Loss: 0.0006122 +2025-03-24 21:18:14,246 Epoch 133/2000 +2025-03-24 21:20:48,325 Current Learning Rate: 0.0002522707 +2025-03-24 21:20:48,381 Train Loss: 0.0005165, Val Loss: 0.0006080 +2025-03-24 21:20:48,381 Epoch 134/2000 +2025-03-24 21:23:21,735 Current Learning Rate: 0.0002454793 +2025-03-24 21:23:21,797 Train Loss: 0.0005124, Val Loss: 0.0006044 +2025-03-24 21:23:21,798 Epoch 135/2000 +2025-03-24 21:25:54,858 Current Learning Rate: 0.0002387507 +2025-03-24 21:25:54,927 Train Loss: 0.0005090, Val Loss: 0.0006020 +2025-03-24 21:25:54,927 Epoch 136/2000 +2025-03-24 21:28:27,273 Current Learning Rate: 0.0002320866 +2025-03-24 21:28:27,329 Train Loss: 0.0005067, Val Loss: 0.0005996 +2025-03-24 21:28:27,329 Epoch 137/2000 +2025-03-24 21:31:00,134 Current Learning Rate: 0.0002254886 +2025-03-24 21:31:00,192 Train Loss: 0.0005050, Val Loss: 0.0005979 +2025-03-24 21:31:00,192 Epoch 138/2000 +2025-03-24 21:33:33,059 Current Learning Rate: 0.0002189583 +2025-03-24 21:33:33,116 Train Loss: 0.0005039, Val Loss: 0.0005962 +2025-03-24 21:33:33,116 Epoch 139/2000 +2025-03-24 21:36:06,645 Current Learning Rate: 0.0002124974 +2025-03-24 21:36:06,646 Train Loss: 0.0005043, Val Loss: 0.0005974 +2025-03-24 21:36:06,646 Epoch 140/2000 +2025-03-24 21:38:39,484 Current Learning Rate: 0.0002061074 +2025-03-24 21:38:39,484 Train Loss: 0.0005050, Val Loss: 0.0005991 +2025-03-24 21:38:39,485 Epoch 141/2000 +2025-03-24 21:41:13,525 Current Learning Rate: 0.0001997899 +2025-03-24 21:41:13,526 Train Loss: 0.0005060, Val Loss: 0.0005980 +2025-03-24 21:41:13,526 Epoch 142/2000 +2025-03-24 21:43:47,636 Current Learning Rate: 0.0001935465 +2025-03-24 21:43:47,698 Train Loss: 0.0005052, Val Loss: 0.0005962 +2025-03-24 21:43:47,698 Epoch 143/2000 +2025-03-24 21:46:20,881 Current Learning Rate: 0.0001873787 +2025-03-24 21:46:20,940 Train Loss: 0.0005033, Val Loss: 0.0005938 +2025-03-24 21:46:20,940 Epoch 144/2000 +2025-03-24 21:48:55,910 Current Learning Rate: 0.0001812880 +2025-03-24 21:48:55,967 Train Loss: 0.0005011, Val Loss: 0.0005917 +2025-03-24 21:48:55,969 Epoch 145/2000 +2025-03-24 21:51:28,805 Current Learning Rate: 0.0001752760 +2025-03-24 21:51:28,884 Train Loss: 0.0004990, Val Loss: 0.0005896 +2025-03-24 21:51:28,885 Epoch 146/2000 +2025-03-24 21:54:03,191 Current Learning Rate: 0.0001693441 +2025-03-24 21:54:03,254 Train Loss: 0.0004970, Val Loss: 0.0005877 +2025-03-24 21:54:03,254 Epoch 147/2000 +2025-03-24 21:56:36,534 Current Learning Rate: 0.0001634937 +2025-03-24 21:56:36,597 Train Loss: 0.0004954, Val Loss: 0.0005864 +2025-03-24 21:56:36,597 Epoch 148/2000 +2025-03-24 21:59:10,291 Current Learning Rate: 0.0001577264 +2025-03-24 21:59:10,357 Train Loss: 0.0004941, Val Loss: 0.0005854 +2025-03-24 21:59:10,357 Epoch 149/2000 +2025-03-24 22:01:44,573 Current Learning Rate: 0.0001520436 +2025-03-24 22:01:44,631 Train Loss: 0.0004934, Val Loss: 0.0005847 +2025-03-24 22:01:44,631 Epoch 150/2000 +2025-03-24 22:04:18,436 Current Learning Rate: 0.0001464466 +2025-03-24 22:04:18,436 Train Loss: 0.0004935, Val Loss: 0.0005848 +2025-03-24 22:04:18,436 Epoch 151/2000 +2025-03-24 22:06:51,349 Current Learning Rate: 0.0001409369 +2025-03-24 22:06:51,403 Train Loss: 0.0004930, Val Loss: 0.0005832 +2025-03-24 22:06:51,403 Epoch 152/2000 +2025-03-24 22:09:24,685 Current Learning Rate: 0.0001355157 +2025-03-24 22:09:24,740 Train Loss: 0.0004918, Val Loss: 0.0005806 +2025-03-24 22:09:24,741 Epoch 153/2000 +2025-03-24 22:11:58,833 Current Learning Rate: 0.0001301845 +2025-03-24 22:11:58,891 Train Loss: 0.0004902, Val Loss: 0.0005786 +2025-03-24 22:11:58,892 Epoch 154/2000 +2025-03-24 22:14:33,144 Current Learning Rate: 0.0001249445 +2025-03-24 22:14:33,200 Train Loss: 0.0004888, Val Loss: 0.0005773 +2025-03-24 22:14:33,201 Epoch 155/2000 +2025-03-24 22:17:06,662 Current Learning Rate: 0.0001197970 +2025-03-24 22:17:06,723 Train Loss: 0.0004874, Val Loss: 0.0005763 +2025-03-24 22:17:06,723 Epoch 156/2000 +2025-03-24 22:19:40,791 Current Learning Rate: 0.0001147434 +2025-03-24 22:19:40,846 Train Loss: 0.0004861, Val Loss: 0.0005755 +2025-03-24 22:19:40,846 Epoch 157/2000 +2025-03-24 22:22:15,192 Current Learning Rate: 0.0001097848 +2025-03-24 22:22:15,249 Train Loss: 0.0004851, Val Loss: 0.0005749 +2025-03-24 22:22:15,249 Epoch 158/2000 +2025-03-24 22:24:48,098 Current Learning Rate: 0.0001049225 +2025-03-24 22:24:48,098 Train Loss: 0.0004844, Val Loss: 0.0005759 +2025-03-24 22:24:48,099 Epoch 159/2000 +2025-03-24 22:27:21,477 Current Learning Rate: 0.0001001577 +2025-03-24 22:27:21,546 Train Loss: 0.0004837, Val Loss: 0.0005736 +2025-03-24 22:27:21,546 Epoch 160/2000 +2025-03-24 22:29:55,297 Current Learning Rate: 0.0000954915 +2025-03-24 22:29:55,353 Train Loss: 0.0004827, Val Loss: 0.0005708 +2025-03-24 22:29:55,353 Epoch 161/2000 +2025-03-24 22:32:28,735 Current Learning Rate: 0.0000909251 +2025-03-24 22:32:28,809 Train Loss: 0.0004817, Val Loss: 0.0005700 +2025-03-24 22:32:28,809 Epoch 162/2000 +2025-03-24 22:35:03,114 Current Learning Rate: 0.0000864597 +2025-03-24 22:35:03,174 Train Loss: 0.0004806, Val Loss: 0.0005695 +2025-03-24 22:35:03,174 Epoch 163/2000 +2025-03-24 22:37:36,570 Current Learning Rate: 0.0000820963 +2025-03-24 22:37:36,642 Train Loss: 0.0004797, Val Loss: 0.0005689 +2025-03-24 22:37:36,646 Epoch 164/2000 +2025-03-24 22:40:09,574 Current Learning Rate: 0.0000778360 +2025-03-24 22:40:09,636 Train Loss: 0.0004789, Val Loss: 0.0005682 +2025-03-24 22:40:09,636 Epoch 165/2000 +2025-03-24 22:42:42,181 Current Learning Rate: 0.0000736799 +2025-03-24 22:42:42,238 Train Loss: 0.0004781, Val Loss: 0.0005676 +2025-03-24 22:42:42,238 Epoch 166/2000 +2025-03-24 22:45:15,393 Current Learning Rate: 0.0000696290 +2025-03-24 22:45:15,446 Train Loss: 0.0004774, Val Loss: 0.0005671 +2025-03-24 22:45:15,446 Epoch 167/2000 +2025-03-24 22:47:48,371 Current Learning Rate: 0.0000656842 +2025-03-24 22:47:48,468 Train Loss: 0.0004765, Val Loss: 0.0005668 +2025-03-24 22:47:48,468 Epoch 168/2000 +2025-03-24 22:50:21,509 Current Learning Rate: 0.0000618467 +2025-03-24 22:50:21,572 Train Loss: 0.0004756, Val Loss: 0.0005659 +2025-03-24 22:50:21,572 Epoch 169/2000 +2025-03-24 22:52:54,686 Current Learning Rate: 0.0000581172 +2025-03-24 22:52:54,745 Train Loss: 0.0004748, Val Loss: 0.0005650 +2025-03-24 22:52:54,745 Epoch 170/2000 +2025-03-24 22:55:27,806 Current Learning Rate: 0.0000544967 +2025-03-24 22:55:27,859 Train Loss: 0.0004740, Val Loss: 0.0005642 +2025-03-24 22:55:27,860 Epoch 171/2000 +2025-03-24 22:58:01,625 Current Learning Rate: 0.0000509862 +2025-03-24 22:58:01,693 Train Loss: 0.0004733, Val Loss: 0.0005631 +2025-03-24 22:58:01,693 Epoch 172/2000 +2025-03-24 23:00:35,058 Current Learning Rate: 0.0000475865 +2025-03-24 23:00:35,124 Train Loss: 0.0004726, Val Loss: 0.0005622 +2025-03-24 23:00:35,125 Epoch 173/2000 +2025-03-24 23:03:07,164 Current Learning Rate: 0.0000442984 +2025-03-24 23:03:07,224 Train Loss: 0.0004719, Val Loss: 0.0005616 +2025-03-24 23:03:07,225 Epoch 174/2000 +2025-03-24 23:05:40,861 Current Learning Rate: 0.0000411227 +2025-03-24 23:05:40,918 Train Loss: 0.0004712, Val Loss: 0.0005611 +2025-03-24 23:05:40,919 Epoch 175/2000 +2025-03-24 23:08:14,265 Current Learning Rate: 0.0000380602 +2025-03-24 23:08:14,327 Train Loss: 0.0004706, Val Loss: 0.0005605 +2025-03-24 23:08:14,327 Epoch 176/2000 +2025-03-24 23:10:48,642 Current Learning Rate: 0.0000351118 +2025-03-24 23:10:48,693 Train Loss: 0.0004700, Val Loss: 0.0005600 +2025-03-24 23:10:48,694 Epoch 177/2000 +2025-03-24 23:13:22,459 Current Learning Rate: 0.0000322780 +2025-03-24 23:13:22,511 Train Loss: 0.0004694, Val Loss: 0.0005595 +2025-03-24 23:13:22,511 Epoch 178/2000 +2025-03-24 23:15:56,518 Current Learning Rate: 0.0000295596 +2025-03-24 23:15:56,575 Train Loss: 0.0004688, Val Loss: 0.0005591 +2025-03-24 23:15:56,575 Epoch 179/2000 +2025-03-24 23:18:30,367 Current Learning Rate: 0.0000269573 +2025-03-24 23:18:30,427 Train Loss: 0.0004682, Val Loss: 0.0005587 +2025-03-24 23:18:30,428 Epoch 180/2000 +2025-03-24 23:21:05,409 Current Learning Rate: 0.0000244717 +2025-03-24 23:21:05,463 Train Loss: 0.0004677, Val Loss: 0.0005582 +2025-03-24 23:21:05,464 Epoch 181/2000 +2025-03-24 23:23:39,197 Current Learning Rate: 0.0000221035 +2025-03-24 23:23:39,248 Train Loss: 0.0004672, Val Loss: 0.0005577 +2025-03-24 23:23:39,248 Epoch 182/2000 +2025-03-24 23:26:13,525 Current Learning Rate: 0.0000198532 +2025-03-24 23:26:13,601 Train Loss: 0.0004668, Val Loss: 0.0005574 +2025-03-24 23:26:13,602 Epoch 183/2000 +2025-03-24 23:28:47,010 Current Learning Rate: 0.0000177213 +2025-03-24 23:28:47,068 Train Loss: 0.0004664, Val Loss: 0.0005571 +2025-03-24 23:28:47,068 Epoch 184/2000 +2025-03-24 23:31:21,125 Current Learning Rate: 0.0000157084 +2025-03-24 23:31:21,195 Train Loss: 0.0004659, Val Loss: 0.0005567 +2025-03-24 23:31:21,196 Epoch 185/2000 +2025-03-24 23:33:54,598 Current Learning Rate: 0.0000138150 +2025-03-24 23:33:54,657 Train Loss: 0.0004655, Val Loss: 0.0005564 +2025-03-24 23:33:54,657 Epoch 186/2000 +2025-03-24 23:36:28,721 Current Learning Rate: 0.0000120416 +2025-03-24 23:36:28,800 Train Loss: 0.0004651, Val Loss: 0.0005561 +2025-03-24 23:36:28,801 Epoch 187/2000 +2025-03-24 23:39:01,371 Current Learning Rate: 0.0000103886 +2025-03-24 23:39:01,435 Train Loss: 0.0004648, Val Loss: 0.0005557 +2025-03-24 23:39:01,435 Epoch 188/2000 +2025-03-24 23:41:35,066 Current Learning Rate: 0.0000088564 +2025-03-24 23:41:35,127 Train Loss: 0.0004645, Val Loss: 0.0005553 +2025-03-24 23:41:35,128 Epoch 189/2000 +2025-03-24 23:44:08,888 Current Learning Rate: 0.0000074453 +2025-03-24 23:44:08,945 Train Loss: 0.0004641, Val Loss: 0.0005550 +2025-03-24 23:44:08,945 Epoch 190/2000 +2025-03-24 23:46:43,134 Current Learning Rate: 0.0000061558 +2025-03-24 23:46:43,196 Train Loss: 0.0004638, Val Loss: 0.0005547 +2025-03-24 23:46:43,196 Epoch 191/2000 +2025-03-24 23:49:16,580 Current Learning Rate: 0.0000049882 +2025-03-24 23:49:16,658 Train Loss: 0.0004636, Val Loss: 0.0005545 +2025-03-24 23:49:16,658 Epoch 192/2000 +2025-03-24 23:51:50,251 Current Learning Rate: 0.0000039426 +2025-03-24 23:51:50,314 Train Loss: 0.0004633, Val Loss: 0.0005543 +2025-03-24 23:51:50,314 Epoch 193/2000 +2025-03-24 23:54:23,941 Current Learning Rate: 0.0000030195 +2025-03-24 23:54:24,014 Train Loss: 0.0004631, Val Loss: 0.0005542 +2025-03-24 23:54:24,015 Epoch 194/2000 +2025-03-24 23:56:57,926 Current Learning Rate: 0.0000022190 +2025-03-24 23:56:57,978 Train Loss: 0.0004629, Val Loss: 0.0005541 +2025-03-24 23:56:57,979 Epoch 195/2000 +2025-03-24 23:59:30,377 Current Learning Rate: 0.0000015413 +2025-03-24 23:59:30,434 Train Loss: 0.0004627, Val Loss: 0.0005540 +2025-03-24 23:59:30,434 Epoch 196/2000 +2025-03-25 00:02:03,551 Current Learning Rate: 0.0000009866 +2025-03-25 00:02:03,609 Train Loss: 0.0004626, Val Loss: 0.0005539 +2025-03-25 00:02:03,610 Epoch 197/2000 +2025-03-25 00:04:37,107 Current Learning Rate: 0.0000005551 +2025-03-25 00:04:37,179 Train Loss: 0.0004624, Val Loss: 0.0005539 +2025-03-25 00:04:37,179 Epoch 198/2000 +2025-03-25 00:07:10,908 Current Learning Rate: 0.0000002467 +2025-03-25 00:07:10,967 Train Loss: 0.0004623, Val Loss: 0.0005538 +2025-03-25 00:07:10,967 Epoch 199/2000 +2025-03-25 00:09:44,575 Current Learning Rate: 0.0000000617 +2025-03-25 00:09:44,643 Train Loss: 0.0004623, Val Loss: 0.0005538 +2025-03-25 00:09:44,644 Epoch 200/2000 +2025-03-25 00:12:16,742 Current Learning Rate: 0.0000000000 +2025-03-25 00:12:16,813 Train Loss: 0.0004622, Val Loss: 0.0005538 +2025-03-25 00:12:16,813 Epoch 201/2000 +2025-03-25 00:14:49,925 Current Learning Rate: 0.0000000617 +2025-03-25 00:14:49,926 Train Loss: 0.0004622, Val Loss: 0.0005538 +2025-03-25 00:14:49,926 Epoch 202/2000 +2025-03-25 00:17:23,453 Current Learning Rate: 0.0000002467 +2025-03-25 00:17:23,512 Train Loss: 0.0004622, Val Loss: 0.0005538 +2025-03-25 00:17:23,512 Epoch 203/2000 +2025-03-25 00:19:57,397 Current Learning Rate: 0.0000005551 +2025-03-25 00:19:57,397 Train Loss: 0.0004623, Val Loss: 0.0005538 +2025-03-25 00:19:57,397 Epoch 204/2000 +2025-03-25 00:22:31,224 Current Learning Rate: 0.0000009866 +2025-03-25 00:22:31,225 Train Loss: 0.0004623, Val Loss: 0.0005538 +2025-03-25 00:22:31,225 Epoch 205/2000 +2025-03-25 00:25:05,768 Current Learning Rate: 0.0000015413 +2025-03-25 00:25:05,769 Train Loss: 0.0004624, Val Loss: 0.0005538 +2025-03-25 00:25:05,769 Epoch 206/2000 +2025-03-25 00:27:39,536 Current Learning Rate: 0.0000022190 +2025-03-25 00:27:39,536 Train Loss: 0.0004625, Val Loss: 0.0005539 +2025-03-25 00:27:39,536 Epoch 207/2000 +2025-03-25 00:30:12,879 Current Learning Rate: 0.0000030195 +2025-03-25 00:30:12,880 Train Loss: 0.0004626, Val Loss: 0.0005540 +2025-03-25 00:30:12,880 Epoch 208/2000 +2025-03-25 00:32:46,575 Current Learning Rate: 0.0000039426 +2025-03-25 00:32:46,575 Train Loss: 0.0004627, Val Loss: 0.0005540 +2025-03-25 00:32:46,576 Epoch 209/2000 +2025-03-25 00:35:20,179 Current Learning Rate: 0.0000049882 +2025-03-25 00:35:20,179 Train Loss: 0.0004629, Val Loss: 0.0005540 +2025-03-25 00:35:20,180 Epoch 210/2000 +2025-03-25 00:37:53,702 Current Learning Rate: 0.0000061558 +2025-03-25 00:37:53,703 Train Loss: 0.0004630, Val Loss: 0.0005542 +2025-03-25 00:37:53,703 Epoch 211/2000 +2025-03-25 00:40:26,787 Current Learning Rate: 0.0000074453 +2025-03-25 00:40:26,788 Train Loss: 0.0004632, Val Loss: 0.0005543 +2025-03-25 00:40:26,788 Epoch 212/2000 +2025-03-25 00:43:00,129 Current Learning Rate: 0.0000088564 +2025-03-25 00:43:00,129 Train Loss: 0.0004633, Val Loss: 0.0005544 +2025-03-25 00:43:00,129 Epoch 213/2000 +2025-03-25 00:45:33,252 Current Learning Rate: 0.0000103886 +2025-03-25 00:45:33,254 Train Loss: 0.0004635, Val Loss: 0.0005546 +2025-03-25 00:45:33,255 Epoch 214/2000 +2025-03-25 00:48:06,105 Current Learning Rate: 0.0000120416 +2025-03-25 00:48:06,105 Train Loss: 0.0004637, Val Loss: 0.0005548 +2025-03-25 00:48:06,106 Epoch 215/2000 +2025-03-25 00:50:39,445 Current Learning Rate: 0.0000138150 +2025-03-25 00:50:39,445 Train Loss: 0.0004638, Val Loss: 0.0005549 +2025-03-25 00:50:39,446 Epoch 216/2000 +2025-03-25 00:53:12,016 Current Learning Rate: 0.0000157084 +2025-03-25 00:53:12,016 Train Loss: 0.0004640, Val Loss: 0.0005551 +2025-03-25 00:53:12,017 Epoch 217/2000 +2025-03-25 00:55:45,134 Current Learning Rate: 0.0000177213 +2025-03-25 00:55:45,134 Train Loss: 0.0004642, Val Loss: 0.0005552 +2025-03-25 00:55:45,135 Epoch 218/2000 +2025-03-25 00:58:19,750 Current Learning Rate: 0.0000198532 +2025-03-25 00:58:19,750 Train Loss: 0.0004643, Val Loss: 0.0005553 +2025-03-25 00:58:19,751 Epoch 219/2000 +2025-03-25 01:00:52,621 Current Learning Rate: 0.0000221035 +2025-03-25 01:00:52,622 Train Loss: 0.0004645, Val Loss: 0.0005555 +2025-03-25 01:00:52,622 Epoch 220/2000 +2025-03-25 01:03:25,813 Current Learning Rate: 0.0000244717 +2025-03-25 01:03:25,813 Train Loss: 0.0004647, Val Loss: 0.0005556 +2025-03-25 01:03:25,814 Epoch 221/2000 +2025-03-25 01:05:58,443 Current Learning Rate: 0.0000269573 +2025-03-25 01:05:58,443 Train Loss: 0.0004649, Val Loss: 0.0005557 +2025-03-25 01:05:58,444 Epoch 222/2000 +2025-03-25 01:08:31,059 Current Learning Rate: 0.0000295596 +2025-03-25 01:08:31,059 Train Loss: 0.0004650, Val Loss: 0.0005557 +2025-03-25 01:08:31,060 Epoch 223/2000 +2025-03-25 01:11:03,570 Current Learning Rate: 0.0000322780 +2025-03-25 01:11:03,570 Train Loss: 0.0004652, Val Loss: 0.0005557 +2025-03-25 01:11:03,571 Epoch 224/2000 +2025-03-25 01:13:36,196 Current Learning Rate: 0.0000351118 +2025-03-25 01:13:36,197 Train Loss: 0.0004654, Val Loss: 0.0005559 +2025-03-25 01:13:36,197 Epoch 225/2000 +2025-03-25 01:16:08,165 Current Learning Rate: 0.0000380602 +2025-03-25 01:16:08,166 Train Loss: 0.0004656, Val Loss: 0.0005561 +2025-03-25 01:16:08,166 Epoch 226/2000 +2025-03-25 01:18:41,534 Current Learning Rate: 0.0000411227 +2025-03-25 01:18:41,534 Train Loss: 0.0004658, Val Loss: 0.0005563 +2025-03-25 01:18:41,534 Epoch 227/2000 +2025-03-25 01:21:15,256 Current Learning Rate: 0.0000442984 +2025-03-25 01:21:15,257 Train Loss: 0.0004660, Val Loss: 0.0005564 +2025-03-25 01:21:15,257 Epoch 228/2000 +2025-03-25 01:23:48,950 Current Learning Rate: 0.0000475865 +2025-03-25 01:23:48,950 Train Loss: 0.0004662, Val Loss: 0.0005565 +2025-03-25 01:23:48,951 Epoch 229/2000 +2025-03-25 01:26:22,898 Current Learning Rate: 0.0000509862 +2025-03-25 01:26:22,899 Train Loss: 0.0004664, Val Loss: 0.0005566 +2025-03-25 01:26:22,899 Epoch 230/2000 +2025-03-25 01:28:56,884 Current Learning Rate: 0.0000544967 +2025-03-25 01:28:56,884 Train Loss: 0.0004667, Val Loss: 0.0005565 +2025-03-25 01:28:56,884 Epoch 231/2000 +2025-03-25 01:31:31,045 Current Learning Rate: 0.0000581172 +2025-03-25 01:31:31,045 Train Loss: 0.0004669, Val Loss: 0.0005564 +2025-03-25 01:31:31,046 Epoch 232/2000 +2025-03-25 01:34:04,784 Current Learning Rate: 0.0000618467 +2025-03-25 01:34:04,785 Train Loss: 0.0004671, Val Loss: 0.0005565 +2025-03-25 01:34:04,785 Epoch 233/2000 +2025-03-25 01:36:38,207 Current Learning Rate: 0.0000656842 +2025-03-25 01:36:38,207 Train Loss: 0.0004673, Val Loss: 0.0005567 +2025-03-25 01:36:38,208 Epoch 234/2000 +2025-03-25 01:39:12,566 Current Learning Rate: 0.0000696290 +2025-03-25 01:39:12,567 Train Loss: 0.0004676, Val Loss: 0.0005569 +2025-03-25 01:39:12,567 Epoch 235/2000 +2025-03-25 01:41:46,244 Current Learning Rate: 0.0000736799 +2025-03-25 01:41:46,245 Train Loss: 0.0004678, Val Loss: 0.0005572 +2025-03-25 01:41:46,245 Epoch 236/2000 +2025-03-25 01:44:18,618 Current Learning Rate: 0.0000778360 +2025-03-25 01:44:18,619 Train Loss: 0.0004680, Val Loss: 0.0005574 +2025-03-25 01:44:18,619 Epoch 237/2000 +2025-03-25 01:46:51,769 Current Learning Rate: 0.0000820963 +2025-03-25 01:46:51,769 Train Loss: 0.0004682, Val Loss: 0.0005575 +2025-03-25 01:46:51,769 Epoch 238/2000 +2025-03-25 01:49:24,644 Current Learning Rate: 0.0000864597 +2025-03-25 01:49:24,644 Train Loss: 0.0004684, Val Loss: 0.0005576 +2025-03-25 01:49:24,645 Epoch 239/2000 +2025-03-25 01:51:57,747 Current Learning Rate: 0.0000909251 +2025-03-25 01:51:57,747 Train Loss: 0.0004686, Val Loss: 0.0005578 +2025-03-25 01:51:57,747 Epoch 240/2000 +2025-03-25 01:54:31,315 Current Learning Rate: 0.0000954915 +2025-03-25 01:54:31,316 Train Loss: 0.0004688, Val Loss: 0.0005579 +2025-03-25 01:54:31,316 Epoch 241/2000 +2025-03-25 01:57:04,378 Current Learning Rate: 0.0001001577 +2025-03-25 01:57:04,379 Train Loss: 0.0004691, Val Loss: 0.0005581 +2025-03-25 01:57:04,379 Epoch 242/2000 +2025-03-25 01:59:37,117 Current Learning Rate: 0.0001049225 +2025-03-25 01:59:37,117 Train Loss: 0.0004693, Val Loss: 0.0005583 +2025-03-25 01:59:37,117 Epoch 243/2000 +2025-03-25 02:02:10,106 Current Learning Rate: 0.0001097848 +2025-03-25 02:02:10,106 Train Loss: 0.0004695, Val Loss: 0.0005585 +2025-03-25 02:02:10,107 Epoch 244/2000 +2025-03-25 02:04:42,360 Current Learning Rate: 0.0001147434 +2025-03-25 02:04:42,360 Train Loss: 0.0004698, Val Loss: 0.0005589 +2025-03-25 02:04:42,361 Epoch 245/2000 +2025-03-25 02:07:15,379 Current Learning Rate: 0.0001197970 +2025-03-25 02:07:15,379 Train Loss: 0.0004701, Val Loss: 0.0005592 +2025-03-25 02:07:15,379 Epoch 246/2000 +2025-03-25 02:09:49,132 Current Learning Rate: 0.0001249445 +2025-03-25 02:09:49,133 Train Loss: 0.0004704, Val Loss: 0.0005596 +2025-03-25 02:09:49,133 Epoch 247/2000 +2025-03-25 02:12:23,001 Current Learning Rate: 0.0001301845 +2025-03-25 02:12:23,001 Train Loss: 0.0004710, Val Loss: 0.0005600 +2025-03-25 02:12:23,001 Epoch 248/2000 +2025-03-25 02:14:57,393 Current Learning Rate: 0.0001355157 +2025-03-25 02:14:57,393 Train Loss: 0.0004717, Val Loss: 0.0005615 +2025-03-25 02:14:57,393 Epoch 249/2000 +2025-03-25 02:17:32,112 Current Learning Rate: 0.0001409369 +2025-03-25 02:17:32,112 Train Loss: 0.0004723, Val Loss: 0.0005615 +2025-03-25 02:17:32,112 Epoch 250/2000 +2025-03-25 02:20:06,343 Current Learning Rate: 0.0001464466 +2025-03-25 02:20:06,343 Train Loss: 0.0004725, Val Loss: 0.0005617 +2025-03-25 02:20:06,343 Epoch 251/2000 +2025-03-25 02:22:40,628 Current Learning Rate: 0.0001520436 +2025-03-25 02:22:40,628 Train Loss: 0.0004718, Val Loss: 0.0005606 +2025-03-25 02:22:40,629 Epoch 252/2000 +2025-03-25 02:25:14,415 Current Learning Rate: 0.0001577264 +2025-03-25 02:25:14,415 Train Loss: 0.0004716, Val Loss: 0.0005610 +2025-03-25 02:25:14,416 Epoch 253/2000 +2025-03-25 02:27:47,165 Current Learning Rate: 0.0001634937 +2025-03-25 02:27:47,165 Train Loss: 0.0004720, Val Loss: 0.0005613 +2025-03-25 02:27:47,165 Epoch 254/2000 +2025-03-25 02:30:20,030 Current Learning Rate: 0.0001693441 +2025-03-25 02:30:20,031 Train Loss: 0.0004724, Val Loss: 0.0005619 +2025-03-25 02:30:20,031 Epoch 255/2000 +2025-03-25 02:32:53,907 Current Learning Rate: 0.0001752760 +2025-03-25 02:32:53,907 Train Loss: 0.0004730, Val Loss: 0.0005630 +2025-03-25 02:32:53,907 Epoch 256/2000 +2025-03-25 02:35:27,448 Current Learning Rate: 0.0001812880 +2025-03-25 02:35:27,448 Train Loss: 0.0004742, Val Loss: 0.0005641 +2025-03-25 02:35:27,449 Epoch 257/2000 +2025-03-25 02:38:00,945 Current Learning Rate: 0.0001873787 +2025-03-25 02:38:00,946 Train Loss: 0.0004748, Val Loss: 0.0005640 +2025-03-25 02:38:00,946 Epoch 258/2000 +2025-03-25 02:40:34,205 Current Learning Rate: 0.0001935465 +2025-03-25 02:40:34,205 Train Loss: 0.0004752, Val Loss: 0.0005642 +2025-03-25 02:40:34,205 Epoch 259/2000 +2025-03-25 02:43:07,577 Current Learning Rate: 0.0001997899 +2025-03-25 02:43:07,577 Train Loss: 0.0004752, Val Loss: 0.0005641 +2025-03-25 02:43:07,577 Epoch 260/2000 +2025-03-25 02:45:41,649 Current Learning Rate: 0.0002061074 +2025-03-25 02:45:41,649 Train Loss: 0.0004742, Val Loss: 0.0005629 +2025-03-25 02:45:41,649 Epoch 261/2000 +2025-03-25 02:48:15,445 Current Learning Rate: 0.0002124974 +2025-03-25 02:48:15,446 Train Loss: 0.0004743, Val Loss: 0.0005620 +2025-03-25 02:48:15,446 Epoch 262/2000 +2025-03-25 02:50:49,613 Current Learning Rate: 0.0002189583 +2025-03-25 02:50:49,614 Train Loss: 0.0004748, Val Loss: 0.0005620 +2025-03-25 02:50:49,614 Epoch 263/2000 +2025-03-25 02:53:24,231 Current Learning Rate: 0.0002254886 +2025-03-25 02:53:24,231 Train Loss: 0.0004755, Val Loss: 0.0005637 +2025-03-25 02:53:24,231 Epoch 264/2000 +2025-03-25 02:55:56,999 Current Learning Rate: 0.0002320866 +2025-03-25 02:55:57,000 Train Loss: 0.0004771, Val Loss: 0.0005639 +2025-03-25 02:55:57,000 Epoch 265/2000 +2025-03-25 02:58:29,659 Current Learning Rate: 0.0002387507 +2025-03-25 02:58:29,659 Train Loss: 0.0004774, Val Loss: 0.0005649 +2025-03-25 02:58:29,660 Epoch 266/2000 +2025-03-25 03:01:02,295 Current Learning Rate: 0.0002454793 +2025-03-25 03:01:02,295 Train Loss: 0.0004777, Val Loss: 0.0005651 +2025-03-25 03:01:02,295 Epoch 267/2000 +2025-03-25 03:03:34,768 Current Learning Rate: 0.0002522707 +2025-03-25 03:03:34,768 Train Loss: 0.0004787, Val Loss: 0.0005643 +2025-03-25 03:03:34,768 Epoch 268/2000 +2025-03-25 03:06:07,441 Current Learning Rate: 0.0002591232 +2025-03-25 03:06:07,442 Train Loss: 0.0004792, Val Loss: 0.0005656 +2025-03-25 03:06:07,442 Epoch 269/2000 +2025-03-25 03:08:41,337 Current Learning Rate: 0.0002660351 +2025-03-25 03:08:41,337 Train Loss: 0.0004781, Val Loss: 0.0005636 +2025-03-25 03:08:41,337 Epoch 270/2000 +2025-03-25 03:11:14,648 Current Learning Rate: 0.0002730048 +2025-03-25 03:11:14,649 Train Loss: 0.0004770, Val Loss: 0.0005639 +2025-03-25 03:11:14,649 Epoch 271/2000 +2025-03-25 03:13:48,673 Current Learning Rate: 0.0002800304 +2025-03-25 03:13:48,674 Train Loss: 0.0004776, Val Loss: 0.0005649 +2025-03-25 03:13:48,674 Epoch 272/2000 +2025-03-25 03:16:22,610 Current Learning Rate: 0.0002871104 +2025-03-25 03:16:22,611 Train Loss: 0.0004787, Val Loss: 0.0005662 +2025-03-25 03:16:22,611 Epoch 273/2000 +2025-03-25 03:18:56,883 Current Learning Rate: 0.0002942428 +2025-03-25 03:18:56,884 Train Loss: 0.0004809, Val Loss: 0.0005675 +2025-03-25 03:18:56,884 Epoch 274/2000 +2025-03-25 03:21:29,038 Current Learning Rate: 0.0003014261 +2025-03-25 03:21:29,038 Train Loss: 0.0004818, Val Loss: 0.0005672 +2025-03-25 03:21:29,038 Epoch 275/2000 +2025-03-25 03:24:01,733 Current Learning Rate: 0.0003086583 +2025-03-25 03:24:01,734 Train Loss: 0.0004824, Val Loss: 0.0005677 +2025-03-25 03:24:01,734 Epoch 276/2000 +2025-03-25 03:26:34,329 Current Learning Rate: 0.0003159377 +2025-03-25 03:26:34,329 Train Loss: 0.0004818, Val Loss: 0.0005673 +2025-03-25 03:26:34,329 Epoch 277/2000 +2025-03-25 03:29:08,492 Current Learning Rate: 0.0003232626 +2025-03-25 03:29:08,492 Train Loss: 0.0004798, Val Loss: 0.0005668 +2025-03-25 03:29:08,492 Epoch 278/2000 +2025-03-25 03:31:42,122 Current Learning Rate: 0.0003306310 +2025-03-25 03:31:42,123 Train Loss: 0.0004803, Val Loss: 0.0005679 +2025-03-25 03:31:42,123 Epoch 279/2000 +2025-03-25 03:34:14,816 Current Learning Rate: 0.0003380413 +2025-03-25 03:34:14,817 Train Loss: 0.0004821, Val Loss: 0.0005706 +2025-03-25 03:34:14,817 Epoch 280/2000 +2025-03-25 03:36:48,207 Current Learning Rate: 0.0003454915 +2025-03-25 03:36:48,207 Train Loss: 0.0004841, Val Loss: 0.0005694 +2025-03-25 03:36:48,207 Epoch 281/2000 +2025-03-25 03:39:22,208 Current Learning Rate: 0.0003529798 +2025-03-25 03:39:22,209 Train Loss: 0.0004849, Val Loss: 0.0005705 +2025-03-25 03:39:22,209 Epoch 282/2000 +2025-03-25 03:41:55,873 Current Learning Rate: 0.0003605044 +2025-03-25 03:41:55,873 Train Loss: 0.0004858, Val Loss: 0.0005743 +2025-03-25 03:41:55,873 Epoch 283/2000 +2025-03-25 03:44:29,074 Current Learning Rate: 0.0003680635 +2025-03-25 03:44:29,074 Train Loss: 0.0004869, Val Loss: 0.0005695 +2025-03-25 03:44:29,074 Epoch 284/2000 +2025-03-25 03:47:01,577 Current Learning Rate: 0.0003756551 +2025-03-25 03:47:01,578 Train Loss: 0.0004882, Val Loss: 0.0005714 +2025-03-25 03:47:01,578 Epoch 285/2000 +2025-03-25 03:49:34,670 Current Learning Rate: 0.0003832773 +2025-03-25 03:49:34,671 Train Loss: 0.0004878, Val Loss: 0.0005708 +2025-03-25 03:49:34,671 Epoch 286/2000 +2025-03-25 03:52:07,419 Current Learning Rate: 0.0003909284 +2025-03-25 03:52:07,419 Train Loss: 0.0004835, Val Loss: 0.0005703 +2025-03-25 03:52:07,420 Epoch 287/2000 +2025-03-25 03:54:40,861 Current Learning Rate: 0.0003986064 +2025-03-25 03:54:40,861 Train Loss: 0.0004843, Val Loss: 0.0005738 +2025-03-25 03:54:40,861 Epoch 288/2000 +2025-03-25 03:57:14,527 Current Learning Rate: 0.0004063093 +2025-03-25 03:57:14,528 Train Loss: 0.0004865, Val Loss: 0.0005758 +2025-03-25 03:57:14,528 Epoch 289/2000 +2025-03-25 03:59:47,747 Current Learning Rate: 0.0004140354 +2025-03-25 03:59:47,748 Train Loss: 0.0004896, Val Loss: 0.0005764 +2025-03-25 03:59:47,752 Epoch 290/2000 +2025-03-25 04:02:20,763 Current Learning Rate: 0.0004217828 +2025-03-25 04:02:20,764 Train Loss: 0.0004895, Val Loss: 0.0005739 +2025-03-25 04:02:20,764 Epoch 291/2000 +2025-03-25 04:04:53,260 Current Learning Rate: 0.0004295494 +2025-03-25 04:04:53,260 Train Loss: 0.0004873, Val Loss: 0.0005751 +2025-03-25 04:04:53,261 Epoch 292/2000 +2025-03-25 04:07:26,070 Current Learning Rate: 0.0004373334 +2025-03-25 04:07:26,070 Train Loss: 0.0004899, Val Loss: 0.0005739 +2025-03-25 04:07:26,071 Epoch 293/2000 +2025-03-25 04:09:58,900 Current Learning Rate: 0.0004451328 +2025-03-25 04:09:58,900 Train Loss: 0.0004924, Val Loss: 0.0005792 +2025-03-25 04:09:58,900 Epoch 294/2000 +2025-03-25 04:12:32,238 Current Learning Rate: 0.0004529458 +2025-03-25 04:12:32,238 Train Loss: 0.0004960, Val Loss: 0.0005791 +2025-03-25 04:12:32,239 Epoch 295/2000 +2025-03-25 04:15:05,765 Current Learning Rate: 0.0004607705 +2025-03-25 04:15:05,766 Train Loss: 0.0004952, Val Loss: 0.0005757 +2025-03-25 04:15:05,766 Epoch 296/2000 +2025-03-25 04:17:39,826 Current Learning Rate: 0.0004686047 +2025-03-25 04:17:39,826 Train Loss: 0.0004898, Val Loss: 0.0005736 +2025-03-25 04:17:39,826 Epoch 297/2000 +2025-03-25 04:20:13,805 Current Learning Rate: 0.0004764468 +2025-03-25 04:20:13,805 Train Loss: 0.0004899, Val Loss: 0.0005796 +2025-03-25 04:20:13,806 Epoch 298/2000 +2025-03-25 04:22:46,496 Current Learning Rate: 0.0004842946 +2025-03-25 04:22:46,497 Train Loss: 0.0004923, Val Loss: 0.0005825 +2025-03-25 04:22:46,497 Epoch 299/2000 +2025-03-25 04:25:20,456 Current Learning Rate: 0.0004921463 +2025-03-25 04:25:20,456 Train Loss: 0.0004944, Val Loss: 0.0005833 +2025-03-25 04:25:20,457 Epoch 300/2000 +2025-03-25 04:27:53,973 Current Learning Rate: 0.0005000000 +2025-03-25 04:27:53,973 Train Loss: 0.0004967, Val Loss: 0.0005844 +2025-03-25 04:27:53,974 Epoch 301/2000 +2025-03-25 04:30:27,839 Current Learning Rate: 0.0005078537 +2025-03-25 04:30:27,840 Train Loss: 0.0004983, Val Loss: 0.0005811 +2025-03-25 04:30:27,840 Epoch 302/2000 +2025-03-25 04:33:01,359 Current Learning Rate: 0.0005157054 +2025-03-25 04:33:01,359 Train Loss: 0.0005002, Val Loss: 0.0005799 +2025-03-25 04:33:01,360 Epoch 303/2000 +2025-03-25 04:35:35,412 Current Learning Rate: 0.0005235532 +2025-03-25 04:35:35,412 Train Loss: 0.0004968, Val Loss: 0.0005775 +2025-03-25 04:35:35,412 Epoch 304/2000 +2025-03-25 04:38:09,286 Current Learning Rate: 0.0005313953 +2025-03-25 04:38:09,286 Train Loss: 0.0004925, Val Loss: 0.0005803 +2025-03-25 04:38:09,287 Epoch 305/2000 +2025-03-25 04:40:43,844 Current Learning Rate: 0.0005392295 +2025-03-25 04:40:43,845 Train Loss: 0.0004962, Val Loss: 0.0005831 +2025-03-25 04:40:43,845 Epoch 306/2000 +2025-03-25 04:43:18,131 Current Learning Rate: 0.0005470542 +2025-03-25 04:43:18,132 Train Loss: 0.0004989, Val Loss: 0.0005844 +2025-03-25 04:43:18,132 Epoch 307/2000 +2025-03-25 04:45:51,681 Current Learning Rate: 0.0005548672 +2025-03-25 04:45:51,682 Train Loss: 0.0005011, Val Loss: 0.0005880 +2025-03-25 04:45:51,682 Epoch 308/2000 +2025-03-25 04:48:25,032 Current Learning Rate: 0.0005626666 +2025-03-25 04:48:25,033 Train Loss: 0.0005020, Val Loss: 0.0005845 +2025-03-25 04:48:25,033 Epoch 309/2000 +2025-03-25 04:50:57,577 Current Learning Rate: 0.0005704506 +2025-03-25 04:50:57,578 Train Loss: 0.0005013, Val Loss: 0.0005833 +2025-03-25 04:50:57,578 Epoch 310/2000 +2025-03-25 04:53:31,610 Current Learning Rate: 0.0005782172 +2025-03-25 04:53:31,611 Train Loss: 0.0005025, Val Loss: 0.0005840 +2025-03-25 04:53:31,611 Epoch 311/2000 +2025-03-25 04:56:05,222 Current Learning Rate: 0.0005859646 +2025-03-25 04:56:05,223 Train Loss: 0.0005020, Val Loss: 0.0005819 +2025-03-25 04:56:05,223 Epoch 312/2000 +2025-03-25 04:58:38,969 Current Learning Rate: 0.0005936907 +2025-03-25 04:58:38,969 Train Loss: 0.0004980, Val Loss: 0.0005895 +2025-03-25 04:58:38,969 Epoch 313/2000 +2025-03-25 05:01:12,794 Current Learning Rate: 0.0006013936 +2025-03-25 05:01:12,795 Train Loss: 0.0005018, Val Loss: 0.0005951 +2025-03-25 05:01:12,795 Epoch 314/2000 +2025-03-25 05:03:45,816 Current Learning Rate: 0.0006090716 +2025-03-25 05:03:45,816 Train Loss: 0.0005049, Val Loss: 0.0005883 +2025-03-25 05:03:45,817 Epoch 315/2000 +2025-03-25 05:06:20,460 Current Learning Rate: 0.0006167227 +2025-03-25 05:06:20,460 Train Loss: 0.0005059, Val Loss: 0.0005934 +2025-03-25 05:06:20,461 Epoch 316/2000 +2025-03-25 05:08:54,119 Current Learning Rate: 0.0006243449 +2025-03-25 05:08:54,120 Train Loss: 0.0005091, Val Loss: 0.0005967 +2025-03-25 05:08:54,120 Epoch 317/2000 +2025-03-25 05:11:27,383 Current Learning Rate: 0.0006319365 +2025-03-25 05:11:27,384 Train Loss: 0.0005115, Val Loss: 0.0005903 +2025-03-25 05:11:27,384 Epoch 318/2000 +2025-03-25 05:14:01,077 Current Learning Rate: 0.0006394956 +2025-03-25 05:14:01,077 Train Loss: 0.0005132, Val Loss: 0.0005946 +2025-03-25 05:14:01,077 Epoch 319/2000 +2025-03-25 05:16:34,847 Current Learning Rate: 0.0006470202 +2025-03-25 05:16:34,847 Train Loss: 0.0005098, Val Loss: 0.0005866 +2025-03-25 05:16:34,847 Epoch 320/2000 +2025-03-25 05:19:08,388 Current Learning Rate: 0.0006545085 +2025-03-25 05:19:08,388 Train Loss: 0.0005017, Val Loss: 0.0005965 +2025-03-25 05:19:08,389 Epoch 321/2000 +2025-03-25 05:21:42,672 Current Learning Rate: 0.0006619587 +2025-03-25 05:21:42,673 Train Loss: 0.0005067, Val Loss: 0.0006083 +2025-03-25 05:21:42,673 Epoch 322/2000 +2025-03-25 05:24:15,936 Current Learning Rate: 0.0006693690 +2025-03-25 05:24:15,937 Train Loss: 0.0005086, Val Loss: 0.0005943 +2025-03-25 05:24:15,937 Epoch 323/2000 +2025-03-25 05:26:48,422 Current Learning Rate: 0.0006767374 +2025-03-25 05:26:48,422 Train Loss: 0.0005107, Val Loss: 0.0005956 +2025-03-25 05:26:48,422 Epoch 324/2000 +2025-03-25 05:29:21,221 Current Learning Rate: 0.0006840623 +2025-03-25 05:29:21,222 Train Loss: 0.0005104, Val Loss: 0.0005960 +2025-03-25 05:29:21,222 Epoch 325/2000 +2025-03-25 05:31:55,507 Current Learning Rate: 0.0006913417 +2025-03-25 05:31:55,508 Train Loss: 0.0005048, Val Loss: 0.0006047 +2025-03-25 05:31:55,508 Epoch 326/2000 +2025-03-25 05:34:29,436 Current Learning Rate: 0.0006985739 +2025-03-25 05:34:29,437 Train Loss: 0.0005085, Val Loss: 0.0005982 +2025-03-25 05:34:29,437 Epoch 327/2000 +2025-03-25 05:37:02,641 Current Learning Rate: 0.0007057572 +2025-03-25 05:37:02,642 Train Loss: 0.0005118, Val Loss: 0.0006090 +2025-03-25 05:37:02,642 Epoch 328/2000 +2025-03-25 05:39:35,543 Current Learning Rate: 0.0007128896 +2025-03-25 05:39:35,543 Train Loss: 0.0005148, Val Loss: 0.0005959 +2025-03-25 05:39:35,544 Epoch 329/2000 +2025-03-25 05:42:08,716 Current Learning Rate: 0.0007199696 +2025-03-25 05:42:08,717 Train Loss: 0.0005185, Val Loss: 0.0005965 +2025-03-25 05:42:08,717 Epoch 330/2000 +2025-03-25 05:44:41,506 Current Learning Rate: 0.0007269952 +2025-03-25 05:44:41,507 Train Loss: 0.0005187, Val Loss: 0.0005970 +2025-03-25 05:44:41,507 Epoch 331/2000 +2025-03-25 05:47:13,743 Current Learning Rate: 0.0007339649 +2025-03-25 05:47:13,744 Train Loss: 0.0005073, Val Loss: 0.0006031 +2025-03-25 05:47:13,744 Epoch 332/2000 +2025-03-25 05:49:46,052 Current Learning Rate: 0.0007408768 +2025-03-25 05:49:46,052 Train Loss: 0.0005402, Val Loss: 0.0005930 +2025-03-25 05:49:46,053 Epoch 333/2000 +2025-03-25 05:52:19,195 Current Learning Rate: 0.0007477293 +2025-03-25 05:52:19,195 Train Loss: 0.0005096, Val Loss: 0.0005939 +2025-03-25 05:52:19,196 Epoch 334/2000 +2025-03-25 05:54:52,843 Current Learning Rate: 0.0007545207 +2025-03-25 05:54:52,844 Train Loss: 0.0005045, Val Loss: 0.0005935 +2025-03-25 05:54:52,844 Epoch 335/2000 +2025-03-25 05:57:25,566 Current Learning Rate: 0.0007612493 +2025-03-25 05:57:25,566 Train Loss: 0.0005120, Val Loss: 0.0006056 +2025-03-25 05:57:25,566 Epoch 336/2000 +2025-03-25 05:59:58,885 Current Learning Rate: 0.0007679134 +2025-03-25 05:59:58,885 Train Loss: 0.0005164, Val Loss: 0.0006023 +2025-03-25 05:59:58,885 Epoch 337/2000 +2025-03-25 06:02:31,139 Current Learning Rate: 0.0007745114 +2025-03-25 06:02:31,140 Train Loss: 0.0005155, Val Loss: 0.0006003 +2025-03-25 06:02:31,140 Epoch 338/2000 +2025-03-25 06:05:03,777 Current Learning Rate: 0.0007810417 +2025-03-25 06:05:03,777 Train Loss: 0.0005145, Val Loss: 0.0005948 +2025-03-25 06:05:03,778 Epoch 339/2000 +2025-03-25 06:07:37,439 Current Learning Rate: 0.0007875026 +2025-03-25 06:07:37,440 Train Loss: 0.0005159, Val Loss: 0.0005984 +2025-03-25 06:07:37,440 Epoch 340/2000 +2025-03-25 06:10:11,809 Current Learning Rate: 0.0007938926 +2025-03-25 06:10:11,809 Train Loss: 0.0005101, Val Loss: 0.0005944 +2025-03-25 06:10:11,809 Epoch 341/2000 +2025-03-25 06:12:46,093 Current Learning Rate: 0.0008002101 +2025-03-25 06:12:46,093 Train Loss: 0.0005143, Val Loss: 0.0006044 +2025-03-25 06:12:46,093 Epoch 342/2000 +2025-03-25 06:15:19,776 Current Learning Rate: 0.0008064535 +2025-03-25 06:15:19,776 Train Loss: 0.0009532, Val Loss: 0.0009502 +2025-03-25 06:15:19,776 Epoch 343/2000 +2025-03-25 06:17:53,811 Current Learning Rate: 0.0008126213 +2025-03-25 06:17:53,812 Train Loss: 0.0007693, Val Loss: 0.0007691 +2025-03-25 06:17:53,812 Epoch 344/2000 +2025-03-25 06:20:27,217 Current Learning Rate: 0.0008187120 +2025-03-25 06:20:27,217 Train Loss: 0.0006509, Val Loss: 0.0007045 +2025-03-25 06:20:27,217 Epoch 345/2000 +2025-03-25 06:23:01,000 Current Learning Rate: 0.0008247240 +2025-03-25 06:23:01,001 Train Loss: 0.0006020, Val Loss: 0.0006534 +2025-03-25 06:23:01,001 Epoch 346/2000 +2025-03-25 06:25:34,854 Current Learning Rate: 0.0008306559 +2025-03-25 06:25:34,855 Train Loss: 0.0005828, Val Loss: 0.0006419 +2025-03-25 06:25:34,855 Epoch 347/2000 +2025-03-25 06:28:08,558 Current Learning Rate: 0.0008365063 +2025-03-25 06:28:08,558 Train Loss: 0.0005689, Val Loss: 0.0006378 +2025-03-25 06:28:08,559 Epoch 348/2000 +2025-03-25 06:30:42,058 Current Learning Rate: 0.0008422736 +2025-03-25 06:30:42,059 Train Loss: 0.0005620, Val Loss: 0.0006344 +2025-03-25 06:30:42,059 Epoch 349/2000 +2025-03-25 06:33:14,838 Current Learning Rate: 0.0008479564 +2025-03-25 06:33:14,838 Train Loss: 0.0005656, Val Loss: 0.0006272 +2025-03-25 06:33:14,838 Epoch 350/2000 +2025-03-25 06:35:47,812 Current Learning Rate: 0.0008535534 +2025-03-25 06:35:47,813 Train Loss: 0.0005570, Val Loss: 0.0006389 +2025-03-25 06:35:47,813 Epoch 351/2000 +2025-03-25 06:38:20,263 Current Learning Rate: 0.0008590631 +2025-03-25 06:38:20,268 Train Loss: 0.0005398, Val Loss: 0.0006148 +2025-03-25 06:38:20,269 Epoch 352/2000 +2025-03-25 06:40:53,881 Current Learning Rate: 0.0008644843 +2025-03-25 06:40:53,882 Train Loss: 0.0005399, Val Loss: 0.0006267 +2025-03-25 06:40:53,882 Epoch 353/2000 +2025-03-25 06:43:27,596 Current Learning Rate: 0.0008698155 +2025-03-25 06:43:27,596 Train Loss: 0.0005389, Val Loss: 0.0006199 +2025-03-25 06:43:27,596 Epoch 354/2000 +2025-03-25 06:46:02,011 Current Learning Rate: 0.0008750555 +2025-03-25 06:46:02,011 Train Loss: 0.0005378, Val Loss: 0.0006099 +2025-03-25 06:46:02,012 Epoch 355/2000 +2025-03-25 06:48:36,086 Current Learning Rate: 0.0008802030 +2025-03-25 06:48:36,087 Train Loss: 0.0005302, Val Loss: 0.0006134 +2025-03-25 06:48:36,087 Epoch 356/2000 +2025-03-25 06:51:09,832 Current Learning Rate: 0.0008852566 +2025-03-25 06:51:09,832 Train Loss: 0.0005281, Val Loss: 0.0006184 +2025-03-25 06:51:09,833 Epoch 357/2000 +2025-03-25 06:53:42,638 Current Learning Rate: 0.0008902152 +2025-03-25 06:53:42,639 Train Loss: 0.0005298, Val Loss: 0.0006139 +2025-03-25 06:53:42,639 Epoch 358/2000 +2025-03-25 06:56:15,845 Current Learning Rate: 0.0008950775 +2025-03-25 06:56:15,845 Train Loss: 0.0005324, Val Loss: 0.0006230 +2025-03-25 06:56:15,846 Epoch 359/2000 +2025-03-25 06:58:49,764 Current Learning Rate: 0.0008998423 +2025-03-25 06:58:49,765 Train Loss: 0.0005350, Val Loss: 0.0006162 +2025-03-25 06:58:49,765 Epoch 360/2000 +2025-03-25 07:01:22,902 Current Learning Rate: 0.0009045085 +2025-03-25 07:01:22,902 Train Loss: 0.0005306, Val Loss: 0.0006092 +2025-03-25 07:01:22,902 Epoch 361/2000 +2025-03-25 07:03:55,779 Current Learning Rate: 0.0009090749 +2025-03-25 07:03:55,780 Train Loss: 0.0005139, Val Loss: 0.0006046 +2025-03-25 07:03:55,780 Epoch 362/2000 +2025-03-25 07:06:28,719 Current Learning Rate: 0.0009135403 +2025-03-25 07:06:28,719 Train Loss: 0.0005183, Val Loss: 0.0006093 +2025-03-25 07:06:28,720 Epoch 363/2000 +2025-03-25 07:09:01,672 Current Learning Rate: 0.0009179037 +2025-03-25 07:09:01,673 Train Loss: 0.0005202, Val Loss: 0.0006160 +2025-03-25 07:09:01,673 Epoch 364/2000 +2025-03-25 07:11:34,001 Current Learning Rate: 0.0009221640 +2025-03-25 07:11:34,001 Train Loss: 0.0005223, Val Loss: 0.0006100 +2025-03-25 07:11:34,002 Epoch 365/2000 +2025-03-25 07:14:07,200 Current Learning Rate: 0.0009263201 +2025-03-25 07:14:07,200 Train Loss: 0.0005226, Val Loss: 0.0006001 +2025-03-25 07:14:07,200 Epoch 366/2000 +2025-03-25 07:16:39,793 Current Learning Rate: 0.0009303710 +2025-03-25 07:16:39,794 Train Loss: 0.0005247, Val Loss: 0.0006010 +2025-03-25 07:16:39,794 Epoch 367/2000 +2025-03-25 07:19:12,346 Current Learning Rate: 0.0009343158 +2025-03-25 07:19:12,346 Train Loss: 0.0005227, Val Loss: 0.0006001 +2025-03-25 07:19:12,346 Epoch 368/2000 +2025-03-25 07:21:45,674 Current Learning Rate: 0.0009381533 +2025-03-25 07:21:45,675 Train Loss: 0.0005094, Val Loss: 0.0005908 +2025-03-25 07:21:45,675 Epoch 369/2000 +2025-03-25 07:24:18,711 Current Learning Rate: 0.0009418828 +2025-03-25 07:24:18,711 Train Loss: 0.0008533, Val Loss: 0.0008039 +2025-03-25 07:24:18,711 Epoch 370/2000 +2025-03-25 07:26:52,322 Current Learning Rate: 0.0009455033 +2025-03-25 07:26:52,322 Train Loss: 0.0006561, Val Loss: 0.0006702 +2025-03-25 07:26:52,322 Epoch 371/2000 +2025-03-25 07:29:25,811 Current Learning Rate: 0.0009490138 +2025-03-25 07:29:25,811 Train Loss: 0.0005675, Val Loss: 0.0006217 +2025-03-25 07:29:25,811 Epoch 372/2000 +2025-03-25 07:31:58,291 Current Learning Rate: 0.0009524135 +2025-03-25 07:31:58,292 Train Loss: 0.0005380, Val Loss: 0.0006055 +2025-03-25 07:31:58,292 Epoch 373/2000 +2025-03-25 07:34:32,541 Current Learning Rate: 0.0009557016 +2025-03-25 07:34:32,542 Train Loss: 0.0005246, Val Loss: 0.0006212 +2025-03-25 07:34:32,542 Epoch 374/2000 +2025-03-25 07:37:06,535 Current Learning Rate: 0.0009588773 +2025-03-25 07:37:06,535 Train Loss: 0.0005225, Val Loss: 0.0006128 +2025-03-25 07:37:06,535 Epoch 375/2000 +2025-03-25 07:39:40,580 Current Learning Rate: 0.0009619398 +2025-03-25 07:39:40,580 Train Loss: 0.0005221, Val Loss: 0.0005992 +2025-03-25 07:39:40,581 Epoch 376/2000 +2025-03-25 07:42:14,627 Current Learning Rate: 0.0009648882 +2025-03-25 07:42:14,627 Train Loss: 0.0005171, Val Loss: 0.0005964 +2025-03-25 07:42:14,628 Epoch 377/2000 +2025-03-25 07:44:48,082 Current Learning Rate: 0.0009677220 +2025-03-25 07:44:48,083 Train Loss: 0.0005093, Val Loss: 0.0006024 +2025-03-25 07:44:48,083 Epoch 378/2000 +2025-03-25 07:47:21,749 Current Learning Rate: 0.0009704404 +2025-03-25 07:47:21,750 Train Loss: 0.0005099, Val Loss: 0.0006009 +2025-03-25 07:47:21,750 Epoch 379/2000 +2025-03-25 07:49:55,777 Current Learning Rate: 0.0009730427 +2025-03-25 07:49:55,777 Train Loss: 0.0005122, Val Loss: 0.0006143 +2025-03-25 07:49:55,777 Epoch 380/2000 +2025-03-25 07:52:29,954 Current Learning Rate: 0.0009755283 +2025-03-25 07:52:29,955 Train Loss: 0.0005138, Val Loss: 0.0005995 +2025-03-25 07:52:29,955 Epoch 381/2000 +2025-03-25 07:55:03,628 Current Learning Rate: 0.0009778965 +2025-03-25 07:55:03,629 Train Loss: 0.0005165, Val Loss: 0.0005876 +2025-03-25 07:55:03,629 Epoch 382/2000 +2025-03-25 07:57:37,648 Current Learning Rate: 0.0009801468 +2025-03-25 07:57:37,649 Train Loss: 0.0005215, Val Loss: 0.0005916 +2025-03-25 07:57:37,649 Epoch 383/2000 +2025-03-25 08:00:11,512 Current Learning Rate: 0.0009822787 +2025-03-25 08:00:11,513 Train Loss: 0.0005180, Val Loss: 0.0005805 +2025-03-25 08:00:11,513 Epoch 384/2000 +2025-03-25 08:02:44,371 Current Learning Rate: 0.0009842916 +2025-03-25 08:02:44,371 Train Loss: 0.0005009, Val Loss: 0.0005752 +2025-03-25 08:02:44,372 Epoch 385/2000 +2025-03-25 08:05:17,147 Current Learning Rate: 0.0009861850 +2025-03-25 08:05:17,148 Train Loss: 0.0004977, Val Loss: 0.0005829 +2025-03-25 08:05:17,148 Epoch 386/2000 +2025-03-25 08:07:50,557 Current Learning Rate: 0.0009879584 +2025-03-25 08:07:50,557 Train Loss: 0.0005016, Val Loss: 0.0005923 +2025-03-25 08:07:50,558 Epoch 387/2000 +2025-03-25 08:10:24,487 Current Learning Rate: 0.0009896114 +2025-03-25 08:10:24,487 Train Loss: 0.0005048, Val Loss: 0.0005894 +2025-03-25 08:10:24,487 Epoch 388/2000 +2025-03-25 08:12:58,737 Current Learning Rate: 0.0009911436 +2025-03-25 08:12:58,737 Train Loss: 0.0005009, Val Loss: 0.0005856 +2025-03-25 08:12:58,738 Epoch 389/2000 +2025-03-25 08:15:32,225 Current Learning Rate: 0.0009925547 +2025-03-25 08:15:32,226 Train Loss: 0.0005043, Val Loss: 0.0005944 +2025-03-25 08:15:32,226 Epoch 390/2000 +2025-03-25 08:18:05,506 Current Learning Rate: 0.0009938442 +2025-03-25 08:18:05,507 Train Loss: 0.0005088, Val Loss: 0.0005822 +2025-03-25 08:18:05,507 Epoch 391/2000 +2025-03-25 08:20:38,141 Current Learning Rate: 0.0009950118 +2025-03-25 08:20:38,141 Train Loss: 0.0005013, Val Loss: 0.0005788 +2025-03-25 08:20:38,141 Epoch 392/2000 +2025-03-25 08:23:11,566 Current Learning Rate: 0.0009960574 +2025-03-25 08:23:11,566 Train Loss: 0.0004951, Val Loss: 0.0005815 +2025-03-25 08:23:11,566 Epoch 393/2000 +2025-03-25 08:25:45,612 Current Learning Rate: 0.0009969805 +2025-03-25 08:25:45,613 Train Loss: 0.0004961, Val Loss: 0.0005796 +2025-03-25 08:25:45,613 Epoch 394/2000 +2025-03-25 08:28:19,276 Current Learning Rate: 0.0009977810 +2025-03-25 08:28:19,276 Train Loss: 0.0004985, Val Loss: 0.0005926 +2025-03-25 08:28:19,276 Epoch 395/2000 +2025-03-25 08:30:51,485 Current Learning Rate: 0.0009984587 +2025-03-25 08:30:51,486 Train Loss: 0.0005033, Val Loss: 0.0006081 +2025-03-25 08:30:51,486 Epoch 396/2000 +2025-03-25 08:33:24,078 Current Learning Rate: 0.0009990134 +2025-03-25 08:33:24,079 Train Loss: 0.0005033, Val Loss: 0.0005963 +2025-03-25 08:33:24,079 Epoch 397/2000 +2025-03-25 08:35:57,868 Current Learning Rate: 0.0009994449 +2025-03-25 08:35:57,868 Train Loss: 0.0004978, Val Loss: 0.0005816 +2025-03-25 08:35:57,868 Epoch 398/2000 +2025-03-25 08:38:31,083 Current Learning Rate: 0.0009997533 +2025-03-25 08:38:31,084 Train Loss: 0.0004893, Val Loss: 0.0005776 +2025-03-25 08:38:31,084 Epoch 399/2000 +2025-03-25 08:41:04,855 Current Learning Rate: 0.0009999383 +2025-03-25 08:41:04,856 Train Loss: 0.0004919, Val Loss: 0.0005909 +2025-03-25 08:41:04,856 Epoch 400/2000 +2025-03-25 08:43:39,185 Current Learning Rate: 0.0010000000 +2025-03-25 08:43:39,185 Train Loss: 0.0004935, Val Loss: 0.0005972 +2025-03-25 08:43:39,185 Epoch 401/2000 +2025-03-25 08:46:12,993 Current Learning Rate: 0.0009999383 +2025-03-25 08:46:12,994 Train Loss: 0.0004949, Val Loss: 0.0006064 +2025-03-25 08:46:12,994 Epoch 402/2000 +2025-03-25 08:48:47,272 Current Learning Rate: 0.0009997533 +2025-03-25 08:48:47,272 Train Loss: 0.0004980, Val Loss: 0.0005822 +2025-03-25 08:48:47,272 Epoch 403/2000 +2025-03-25 08:51:22,289 Current Learning Rate: 0.0009994449 +2025-03-25 08:51:22,290 Train Loss: 0.0005002, Val Loss: 0.0005872 +2025-03-25 08:51:22,290 Epoch 404/2000 +2025-03-25 08:53:55,836 Current Learning Rate: 0.0009990134 +2025-03-25 08:53:55,837 Train Loss: 0.0005080, Val Loss: 0.0005799 +2025-03-25 08:53:55,837 Epoch 405/2000 +2025-03-25 08:56:29,721 Current Learning Rate: 0.0009984587 +2025-03-25 08:56:29,721 Train Loss: 0.0005082, Val Loss: 0.0005815 +2025-03-25 08:56:29,721 Epoch 406/2000 +2025-03-25 08:59:03,404 Current Learning Rate: 0.0009977810 +2025-03-25 08:59:03,404 Train Loss: 0.0005044, Val Loss: 0.0005838 +2025-03-25 08:59:03,404 Epoch 407/2000 +2025-03-25 09:01:36,748 Current Learning Rate: 0.0009969805 +2025-03-25 09:01:36,748 Train Loss: 0.0004946, Val Loss: 0.0005653 +2025-03-25 09:01:36,749 Epoch 408/2000 +2025-03-25 09:04:10,488 Current Learning Rate: 0.0009960574 +2025-03-25 09:04:10,488 Train Loss: 0.0004747, Val Loss: 0.0005584 +2025-03-25 09:04:10,488 Epoch 409/2000 +2025-03-25 09:06:44,479 Current Learning Rate: 0.0009950118 +2025-03-25 09:06:44,480 Train Loss: 0.0004774, Val Loss: 0.0005639 +2025-03-25 09:06:44,480 Epoch 410/2000 +2025-03-25 09:09:17,522 Current Learning Rate: 0.0009938442 +2025-03-25 09:09:17,522 Train Loss: 0.0004807, Val Loss: 0.0005760 +2025-03-25 09:09:17,522 Epoch 411/2000 +2025-03-25 09:11:50,511 Current Learning Rate: 0.0009925547 +2025-03-25 09:11:50,511 Train Loss: 0.0004835, Val Loss: 0.0005687 +2025-03-25 09:11:50,512 Epoch 412/2000 +2025-03-25 09:14:24,394 Current Learning Rate: 0.0009911436 +2025-03-25 09:14:24,395 Train Loss: 0.0004836, Val Loss: 0.0005620 +2025-03-25 09:14:24,395 Epoch 413/2000 +2025-03-25 09:16:58,364 Current Learning Rate: 0.0009896114 +2025-03-25 09:16:58,364 Train Loss: 0.0004834, Val Loss: 0.0005639 +2025-03-25 09:16:58,364 Epoch 414/2000 +2025-03-25 09:19:31,166 Current Learning Rate: 0.0009879584 +2025-03-25 09:19:31,167 Train Loss: 0.0004782, Val Loss: 0.0005565 +2025-03-25 09:19:31,167 Epoch 415/2000 +2025-03-25 09:22:04,172 Current Learning Rate: 0.0009861850 +2025-03-25 09:22:04,172 Train Loss: 0.0004745, Val Loss: 0.0005659 +2025-03-25 09:22:04,173 Epoch 416/2000 +2025-03-25 09:24:37,590 Current Learning Rate: 0.0009842916 +2025-03-25 09:24:37,591 Train Loss: 0.0004809, Val Loss: 0.0005645 +2025-03-25 09:24:37,591 Epoch 417/2000 +2025-03-25 09:27:11,773 Current Learning Rate: 0.0009822787 +2025-03-25 09:27:11,774 Train Loss: 0.0004868, Val Loss: 0.0005739 +2025-03-25 09:27:11,774 Epoch 418/2000 +2025-03-25 09:29:45,415 Current Learning Rate: 0.0009801468 +2025-03-25 09:29:45,415 Train Loss: 0.0004867, Val Loss: 0.0005623 +2025-03-25 09:29:45,416 Epoch 419/2000 +2025-03-25 09:32:17,941 Current Learning Rate: 0.0009778965 +2025-03-25 09:32:17,941 Train Loss: 0.0004892, Val Loss: 0.0005665 +2025-03-25 09:32:17,941 Epoch 420/2000 +2025-03-25 09:34:51,657 Current Learning Rate: 0.0009755283 +2025-03-25 09:34:51,657 Train Loss: 0.0004877, Val Loss: 0.0005665 +2025-03-25 09:34:51,658 Epoch 421/2000 +2025-03-25 09:37:24,375 Current Learning Rate: 0.0009730427 +2025-03-25 09:37:24,376 Train Loss: 0.0004756, Val Loss: 0.0005551 +2025-03-25 09:37:24,376 Epoch 422/2000 +2025-03-25 09:39:57,898 Current Learning Rate: 0.0009704404 +2025-03-25 09:39:57,953 Train Loss: 0.0004625, Val Loss: 0.0005536 +2025-03-25 09:39:57,953 Epoch 423/2000 +2025-03-25 09:42:31,294 Current Learning Rate: 0.0009677220 +2025-03-25 09:42:31,294 Train Loss: 0.0004661, Val Loss: 0.0005554 +2025-03-25 09:42:31,295 Epoch 424/2000 +2025-03-25 09:45:04,916 Current Learning Rate: 0.0009648882 +2025-03-25 09:45:04,920 Train Loss: 0.0004686, Val Loss: 0.0005603 +2025-03-25 09:45:04,921 Epoch 425/2000 +2025-03-25 09:47:38,817 Current Learning Rate: 0.0009619398 +2025-03-25 09:47:38,887 Train Loss: 0.0004704, Val Loss: 0.0005529 +2025-03-25 09:47:38,887 Epoch 426/2000 +2025-03-25 09:50:12,780 Current Learning Rate: 0.0009588773 +2025-03-25 09:50:12,780 Train Loss: 0.0004694, Val Loss: 0.0005684 +2025-03-25 09:50:12,780 Epoch 427/2000 +2025-03-25 09:52:45,716 Current Learning Rate: 0.0009557016 +2025-03-25 09:52:45,716 Train Loss: 0.0004719, Val Loss: 0.0005566 +2025-03-25 09:52:45,717 Epoch 428/2000 +2025-03-25 09:55:18,346 Current Learning Rate: 0.0009524135 +2025-03-25 09:55:18,346 Train Loss: 0.0004718, Val Loss: 0.0005562 +2025-03-25 09:55:18,346 Epoch 429/2000 +2025-03-25 09:57:51,644 Current Learning Rate: 0.0009490138 +2025-03-25 09:57:51,709 Train Loss: 0.0004664, Val Loss: 0.0005456 +2025-03-25 09:57:51,709 Epoch 430/2000 +2025-03-25 10:00:23,280 Current Learning Rate: 0.0009455033 +2025-03-25 10:00:23,345 Train Loss: 0.0004582, Val Loss: 0.0005445 +2025-03-25 10:00:23,346 Epoch 431/2000 +2025-03-25 10:02:56,526 Current Learning Rate: 0.0009418828 +2025-03-25 10:02:56,527 Train Loss: 0.0004589, Val Loss: 0.0005469 +2025-03-25 10:02:56,527 Epoch 432/2000 +2025-03-25 10:05:30,351 Current Learning Rate: 0.0009381533 +2025-03-25 10:05:30,351 Train Loss: 0.0004624, Val Loss: 0.0005481 +2025-03-25 10:05:30,351 Epoch 433/2000 +2025-03-25 10:08:03,600 Current Learning Rate: 0.0009343158 +2025-03-25 10:08:03,600 Train Loss: 0.0004634, Val Loss: 0.0005472 +2025-03-25 10:08:03,600 Epoch 434/2000 +2025-03-25 10:10:35,850 Current Learning Rate: 0.0009303710 +2025-03-25 10:10:35,851 Train Loss: 0.0004622, Val Loss: 0.0005464 +2025-03-25 10:10:35,852 Epoch 435/2000 +2025-03-25 10:13:08,831 Current Learning Rate: 0.0009263201 +2025-03-25 10:13:08,831 Train Loss: 0.0004659, Val Loss: 0.0005496 +2025-03-25 10:13:08,832 Epoch 436/2000 +2025-03-25 10:15:41,316 Current Learning Rate: 0.0009221640 +2025-03-25 10:15:41,317 Train Loss: 0.0004705, Val Loss: 0.0005601 +2025-03-25 10:15:41,317 Epoch 437/2000 +2025-03-25 10:18:13,959 Current Learning Rate: 0.0009179037 +2025-03-25 10:18:13,960 Train Loss: 0.0004758, Val Loss: 0.0005573 +2025-03-25 10:18:13,960 Epoch 438/2000 +2025-03-25 10:20:47,828 Current Learning Rate: 0.0009135403 +2025-03-25 10:20:47,828 Train Loss: 0.0004772, Val Loss: 0.0005562 +2025-03-25 10:20:47,828 Epoch 439/2000 +2025-03-25 10:23:21,097 Current Learning Rate: 0.0009090749 +2025-03-25 10:23:21,097 Train Loss: 0.0004709, Val Loss: 0.0005490 +2025-03-25 10:23:21,097 Epoch 440/2000 +2025-03-25 10:25:54,003 Current Learning Rate: 0.0009045085 +2025-03-25 10:25:54,063 Train Loss: 0.0004571, Val Loss: 0.0005343 +2025-03-25 10:25:54,064 Epoch 441/2000 +2025-03-25 10:28:26,872 Current Learning Rate: 0.0008998423 +2025-03-25 10:28:26,928 Train Loss: 0.0004457, Val Loss: 0.0005323 +2025-03-25 10:28:26,929 Epoch 442/2000 +2025-03-25 10:31:00,108 Current Learning Rate: 0.0008950775 +2025-03-25 10:31:00,108 Train Loss: 0.0004471, Val Loss: 0.0005332 +2025-03-25 10:31:00,108 Epoch 443/2000 +2025-03-25 10:33:33,629 Current Learning Rate: 0.0008902152 +2025-03-25 10:33:33,630 Train Loss: 0.0004488, Val Loss: 0.0005326 +2025-03-25 10:33:33,630 Epoch 444/2000 +2025-03-25 10:36:06,875 Current Learning Rate: 0.0008852566 +2025-03-25 10:36:06,936 Train Loss: 0.0004517, Val Loss: 0.0005319 +2025-03-25 10:36:06,937 Epoch 445/2000 +2025-03-25 10:38:41,662 Current Learning Rate: 0.0008802030 +2025-03-25 10:38:41,663 Train Loss: 0.0004501, Val Loss: 0.0005320 +2025-03-25 10:38:41,663 Epoch 446/2000 +2025-03-25 10:41:15,521 Current Learning Rate: 0.0008750555 +2025-03-25 10:41:15,578 Train Loss: 0.0004461, Val Loss: 0.0005315 +2025-03-25 10:41:15,578 Epoch 447/2000 +2025-03-25 10:43:49,833 Current Learning Rate: 0.0008698155 +2025-03-25 10:43:49,834 Train Loss: 0.0004447, Val Loss: 0.0005320 +2025-03-25 10:43:49,834 Epoch 448/2000 +2025-03-25 10:46:23,061 Current Learning Rate: 0.0008644843 +2025-03-25 10:46:23,061 Train Loss: 0.0004500, Val Loss: 0.0005342 +2025-03-25 10:46:23,062 Epoch 449/2000 +2025-03-25 10:48:57,186 Current Learning Rate: 0.0008590631 +2025-03-25 10:48:57,186 Train Loss: 0.0004555, Val Loss: 0.0005419 +2025-03-25 10:48:57,186 Epoch 450/2000 +2025-03-25 10:51:31,606 Current Learning Rate: 0.0008535534 +2025-03-25 10:51:31,606 Train Loss: 0.0004565, Val Loss: 0.0005378 +2025-03-25 10:51:31,606 Epoch 451/2000 +2025-03-25 10:54:04,833 Current Learning Rate: 0.0008479564 +2025-03-25 10:54:04,834 Train Loss: 0.0004573, Val Loss: 0.0005443 +2025-03-25 10:54:04,834 Epoch 452/2000 +2025-03-25 10:56:37,502 Current Learning Rate: 0.0008422736 +2025-03-25 10:56:37,502 Train Loss: 0.0004607, Val Loss: 0.0005474 +2025-03-25 10:56:37,502 Epoch 453/2000 +2025-03-25 10:59:11,854 Current Learning Rate: 0.0008365063 +2025-03-25 10:59:11,855 Train Loss: 0.0004582, Val Loss: 0.0005438 +2025-03-25 10:59:11,855 Epoch 454/2000 +2025-03-25 11:01:46,265 Current Learning Rate: 0.0008306559 +2025-03-25 11:01:46,316 Train Loss: 0.0004460, Val Loss: 0.0005276 +2025-03-25 11:01:46,324 Epoch 455/2000 +2025-03-25 11:04:19,329 Current Learning Rate: 0.0008247240 +2025-03-25 11:04:19,385 Train Loss: 0.0004335, Val Loss: 0.0005188 +2025-03-25 11:04:19,385 Epoch 456/2000 +2025-03-25 11:06:52,663 Current Learning Rate: 0.0008187120 +2025-03-25 11:06:52,663 Train Loss: 0.0004305, Val Loss: 0.0005277 +2025-03-25 11:06:52,664 Epoch 457/2000 +2025-03-25 11:09:25,367 Current Learning Rate: 0.0008126213 +2025-03-25 11:09:25,431 Train Loss: 0.0004318, Val Loss: 0.0005185 +2025-03-25 11:09:25,431 Epoch 458/2000 +2025-03-25 11:11:57,860 Current Learning Rate: 0.0008064535 +2025-03-25 11:11:57,922 Train Loss: 0.0004338, Val Loss: 0.0005174 +2025-03-25 11:11:57,922 Epoch 459/2000 +2025-03-25 11:14:30,571 Current Learning Rate: 0.0008002101 +2025-03-25 11:14:30,571 Train Loss: 0.0004349, Val Loss: 0.0005187 +2025-03-25 11:14:30,571 Epoch 460/2000 +2025-03-25 11:17:03,036 Current Learning Rate: 0.0007938926 +2025-03-25 11:17:03,036 Train Loss: 0.0004358, Val Loss: 0.0005180 +2025-03-25 11:17:03,036 Epoch 461/2000 +2025-03-25 11:19:36,134 Current Learning Rate: 0.0007875026 +2025-03-25 11:19:36,134 Train Loss: 0.0004362, Val Loss: 0.0005237 +2025-03-25 11:19:36,134 Epoch 462/2000 +2025-03-25 11:22:09,673 Current Learning Rate: 0.0007810417 +2025-03-25 11:22:09,674 Train Loss: 0.0004377, Val Loss: 0.0005264 +2025-03-25 11:22:09,674 Epoch 463/2000 +2025-03-25 11:24:43,931 Current Learning Rate: 0.0007745114 +2025-03-25 11:24:43,931 Train Loss: 0.0004395, Val Loss: 0.0005297 +2025-03-25 11:24:43,931 Epoch 464/2000 +2025-03-25 11:27:17,914 Current Learning Rate: 0.0007679134 +2025-03-25 11:27:17,915 Train Loss: 0.0004420, Val Loss: 0.0005339 +2025-03-25 11:27:17,915 Epoch 465/2000 +2025-03-25 11:29:51,891 Current Learning Rate: 0.0007612493 +2025-03-25 11:29:51,891 Train Loss: 0.0004427, Val Loss: 0.0005290 +2025-03-25 11:29:51,891 Epoch 466/2000 +2025-03-25 11:32:25,848 Current Learning Rate: 0.0007545207 +2025-03-25 11:32:25,848 Train Loss: 0.0004386, Val Loss: 0.0005250 +2025-03-25 11:32:25,849 Epoch 467/2000 +2025-03-25 11:34:59,132 Current Learning Rate: 0.0007477293 +2025-03-25 11:34:59,188 Train Loss: 0.0004292, Val Loss: 0.0005138 +2025-03-25 11:34:59,189 Epoch 468/2000 +2025-03-25 11:37:32,906 Current Learning Rate: 0.0007408768 +2025-03-25 11:37:32,963 Train Loss: 0.0004213, Val Loss: 0.0005076 +2025-03-25 11:37:32,963 Epoch 469/2000 +2025-03-25 11:40:06,536 Current Learning Rate: 0.0007339649 +2025-03-25 11:40:06,594 Train Loss: 0.0004182, Val Loss: 0.0005054 +2025-03-25 11:40:06,594 Epoch 470/2000 +2025-03-25 11:42:39,931 Current Learning Rate: 0.0007269952 +2025-03-25 11:42:39,931 Train Loss: 0.0004186, Val Loss: 0.0005058 +2025-03-25 11:42:39,932 Epoch 471/2000 +2025-03-25 11:45:13,521 Current Learning Rate: 0.0007199696 +2025-03-25 11:45:13,590 Train Loss: 0.0004202, Val Loss: 0.0005052 +2025-03-25 11:45:13,590 Epoch 472/2000 +2025-03-25 11:47:47,446 Current Learning Rate: 0.0007128896 +2025-03-25 11:47:47,503 Train Loss: 0.0004204, Val Loss: 0.0005043 +2025-03-25 11:47:47,503 Epoch 473/2000 +2025-03-25 11:50:21,746 Current Learning Rate: 0.0007057572 +2025-03-25 11:50:21,746 Train Loss: 0.0004210, Val Loss: 0.0005072 +2025-03-25 11:50:21,746 Epoch 474/2000 +2025-03-25 11:52:54,539 Current Learning Rate: 0.0006985739 +2025-03-25 11:52:54,539 Train Loss: 0.0004234, Val Loss: 0.0005110 +2025-03-25 11:52:54,540 Epoch 475/2000 +2025-03-25 11:55:28,402 Current Learning Rate: 0.0006913417 +2025-03-25 11:55:28,402 Train Loss: 0.0004249, Val Loss: 0.0005142 +2025-03-25 11:55:28,402 Epoch 476/2000 +2025-03-25 11:58:01,638 Current Learning Rate: 0.0006840623 +2025-03-25 11:58:01,638 Train Loss: 0.0004256, Val Loss: 0.0005159 +2025-03-25 11:58:01,638 Epoch 477/2000 +2025-03-25 12:00:34,277 Current Learning Rate: 0.0006767374 +2025-03-25 12:00:34,277 Train Loss: 0.0004251, Val Loss: 0.0005101 +2025-03-25 12:00:34,277 Epoch 478/2000 +2025-03-25 12:03:07,627 Current Learning Rate: 0.0006693690 +2025-03-25 12:03:07,627 Train Loss: 0.0004232, Val Loss: 0.0005069 +2025-03-25 12:03:07,628 Epoch 479/2000 +2025-03-25 12:05:41,586 Current Learning Rate: 0.0006619587 +2025-03-25 12:05:41,644 Train Loss: 0.0004195, Val Loss: 0.0005033 +2025-03-25 12:05:41,644 Epoch 480/2000 +2025-03-25 12:08:15,560 Current Learning Rate: 0.0006545085 +2025-03-25 12:08:15,615 Train Loss: 0.0004146, Val Loss: 0.0004999 +2025-03-25 12:08:15,615 Epoch 481/2000 +2025-03-25 12:10:49,292 Current Learning Rate: 0.0006470202 +2025-03-25 12:10:49,367 Train Loss: 0.0004106, Val Loss: 0.0004961 +2025-03-25 12:10:49,367 Epoch 482/2000 +2025-03-25 12:13:23,991 Current Learning Rate: 0.0006394956 +2025-03-25 12:13:24,049 Train Loss: 0.0004091, Val Loss: 0.0004945 +2025-03-25 12:13:24,050 Epoch 483/2000 +2025-03-25 12:15:58,563 Current Learning Rate: 0.0006319365 +2025-03-25 12:15:58,652 Train Loss: 0.0004092, Val Loss: 0.0004941 +2025-03-25 12:15:58,652 Epoch 484/2000 +2025-03-25 12:18:32,200 Current Learning Rate: 0.0006243449 +2025-03-25 12:18:32,201 Train Loss: 0.0004102, Val Loss: 0.0004989 +2025-03-25 12:18:32,201 Epoch 485/2000 +2025-03-25 12:21:06,560 Current Learning Rate: 0.0006167227 +2025-03-25 12:21:06,561 Train Loss: 0.0004130, Val Loss: 0.0004990 +2025-03-25 12:21:06,561 Epoch 486/2000 +2025-03-25 12:23:40,385 Current Learning Rate: 0.0006090716 +2025-03-25 12:23:40,386 Train Loss: 0.0004158, Val Loss: 0.0004980 +2025-03-25 12:23:40,386 Epoch 487/2000 +2025-03-25 12:26:14,495 Current Learning Rate: 0.0006013936 +2025-03-25 12:26:14,496 Train Loss: 0.0004214, Val Loss: 0.0004988 +2025-03-25 12:26:14,496 Epoch 488/2000 +2025-03-25 12:28:47,260 Current Learning Rate: 0.0005936907 +2025-03-25 12:28:47,320 Train Loss: 0.0004226, Val Loss: 0.0004940 +2025-03-25 12:28:47,320 Epoch 489/2000 +2025-03-25 12:31:20,716 Current Learning Rate: 0.0005859646 +2025-03-25 12:31:20,771 Train Loss: 0.0004165, Val Loss: 0.0004898 +2025-03-25 12:31:20,772 Epoch 490/2000 +2025-03-25 12:33:55,019 Current Learning Rate: 0.0005782172 +2025-03-25 12:33:55,088 Train Loss: 0.0004082, Val Loss: 0.0004848 +2025-03-25 12:33:55,088 Epoch 491/2000 +2025-03-25 12:36:28,338 Current Learning Rate: 0.0005704506 +2025-03-25 12:36:28,416 Train Loss: 0.0004002, Val Loss: 0.0004813 +2025-03-25 12:36:28,416 Epoch 492/2000 +2025-03-25 12:39:02,801 Current Learning Rate: 0.0005626666 +2025-03-25 12:39:02,873 Train Loss: 0.0003959, Val Loss: 0.0004793 +2025-03-25 12:39:02,874 Epoch 493/2000 +2025-03-25 12:41:36,441 Current Learning Rate: 0.0005548672 +2025-03-25 12:41:36,515 Train Loss: 0.0003941, Val Loss: 0.0004789 +2025-03-25 12:41:36,516 Epoch 494/2000 +2025-03-25 12:44:10,153 Current Learning Rate: 0.0005470542 +2025-03-25 12:44:10,153 Train Loss: 0.0003934, Val Loss: 0.0004800 +2025-03-25 12:44:10,153 Epoch 495/2000 +2025-03-25 12:46:44,297 Current Learning Rate: 0.0005392295 +2025-03-25 12:46:44,355 Train Loss: 0.0003945, Val Loss: 0.0004785 +2025-03-25 12:46:44,355 Epoch 496/2000 +2025-03-25 12:49:18,577 Current Learning Rate: 0.0005313953 +2025-03-25 12:49:18,577 Train Loss: 0.0003957, Val Loss: 0.0004801 +2025-03-25 12:49:18,577 Epoch 497/2000 +2025-03-25 12:51:52,056 Current Learning Rate: 0.0005235532 +2025-03-25 12:51:52,057 Train Loss: 0.0003969, Val Loss: 0.0004798 +2025-03-25 12:51:52,057 Epoch 498/2000 +2025-03-25 12:54:26,266 Current Learning Rate: 0.0005157054 +2025-03-25 12:54:26,266 Train Loss: 0.0003979, Val Loss: 0.0004829 +2025-03-25 12:54:26,267 Epoch 499/2000 +2025-03-25 12:56:59,608 Current Learning Rate: 0.0005078537 +2025-03-25 12:56:59,609 Train Loss: 0.0003988, Val Loss: 0.0004826 +2025-03-25 12:56:59,609 Epoch 500/2000 +2025-03-25 12:59:32,467 Current Learning Rate: 0.0005000000 +2025-03-25 12:59:32,467 Train Loss: 0.0004007, Val Loss: 0.0004832 +2025-03-25 12:59:32,467 Epoch 501/2000 +2025-03-25 13:02:04,801 Current Learning Rate: 0.0004921463 +2025-03-25 13:02:04,801 Train Loss: 0.0004016, Val Loss: 0.0004851 +2025-03-25 13:02:04,801 Epoch 502/2000 +2025-03-25 13:04:38,318 Current Learning Rate: 0.0004842946 +2025-03-25 13:04:38,318 Train Loss: 0.0004011, Val Loss: 0.0004832 +2025-03-25 13:04:38,318 Epoch 503/2000 +2025-03-25 13:07:13,048 Current Learning Rate: 0.0004764468 +2025-03-25 13:07:13,048 Train Loss: 0.0003987, Val Loss: 0.0004790 +2025-03-25 13:07:13,048 Epoch 504/2000 +2025-03-25 13:09:46,317 Current Learning Rate: 0.0004686047 +2025-03-25 13:09:46,390 Train Loss: 0.0003943, Val Loss: 0.0004757 +2025-03-25 13:09:46,390 Epoch 505/2000 +2025-03-25 13:12:19,929 Current Learning Rate: 0.0004607705 +2025-03-25 13:12:20,001 Train Loss: 0.0003894, Val Loss: 0.0004725 +2025-03-25 13:12:20,001 Epoch 506/2000 +2025-03-25 13:14:53,803 Current Learning Rate: 0.0004529458 +2025-03-25 13:14:53,868 Train Loss: 0.0003856, Val Loss: 0.0004695 +2025-03-25 13:14:53,868 Epoch 507/2000 +2025-03-25 13:17:27,401 Current Learning Rate: 0.0004451328 +2025-03-25 13:17:27,469 Train Loss: 0.0003832, Val Loss: 0.0004680 +2025-03-25 13:17:27,470 Epoch 508/2000 +2025-03-25 13:20:01,837 Current Learning Rate: 0.0004373334 +2025-03-25 13:20:01,894 Train Loss: 0.0003819, Val Loss: 0.0004673 +2025-03-25 13:20:01,894 Epoch 509/2000 +2025-03-25 13:22:35,757 Current Learning Rate: 0.0004295494 +2025-03-25 13:22:35,757 Train Loss: 0.0003816, Val Loss: 0.0004674 +2025-03-25 13:22:35,758 Epoch 510/2000 +2025-03-25 13:25:08,803 Current Learning Rate: 0.0004217828 +2025-03-25 13:25:08,804 Train Loss: 0.0003834, Val Loss: 0.0004676 +2025-03-25 13:25:08,804 Epoch 511/2000 +2025-03-25 13:27:42,005 Current Learning Rate: 0.0004140354 +2025-03-25 13:27:42,005 Train Loss: 0.0003842, Val Loss: 0.0004715 +2025-03-25 13:27:42,005 Epoch 512/2000 +2025-03-25 13:30:15,451 Current Learning Rate: 0.0004063093 +2025-03-25 13:30:15,453 Train Loss: 0.0003852, Val Loss: 0.0004730 +2025-03-25 13:30:15,453 Epoch 513/2000 +2025-03-25 13:32:48,813 Current Learning Rate: 0.0003986064 +2025-03-25 13:32:48,814 Train Loss: 0.0003864, Val Loss: 0.0004751 +2025-03-25 13:32:48,814 Epoch 514/2000 +2025-03-25 13:35:21,274 Current Learning Rate: 0.0003909284 +2025-03-25 13:35:21,275 Train Loss: 0.0003882, Val Loss: 0.0004734 +2025-03-25 13:35:21,275 Epoch 515/2000 +2025-03-25 13:37:54,121 Current Learning Rate: 0.0003832773 +2025-03-25 13:37:54,122 Train Loss: 0.0003890, Val Loss: 0.0004711 +2025-03-25 13:37:54,122 Epoch 516/2000 +2025-03-25 13:40:26,643 Current Learning Rate: 0.0003756551 +2025-03-25 13:40:26,699 Train Loss: 0.0003874, Val Loss: 0.0004670 +2025-03-25 13:40:26,700 Epoch 517/2000 +2025-03-25 13:42:59,912 Current Learning Rate: 0.0003680635 +2025-03-25 13:43:00,001 Train Loss: 0.0003835, Val Loss: 0.0004648 +2025-03-25 13:43:00,002 Epoch 518/2000 +2025-03-25 13:45:32,615 Current Learning Rate: 0.0003605044 +2025-03-25 13:45:32,678 Train Loss: 0.0003792, Val Loss: 0.0004617 +2025-03-25 13:45:32,679 Epoch 519/2000 +2025-03-25 13:48:05,976 Current Learning Rate: 0.0003529798 +2025-03-25 13:48:06,054 Train Loss: 0.0003760, Val Loss: 0.0004592 +2025-03-25 13:48:06,054 Epoch 520/2000 +2025-03-25 13:50:40,235 Current Learning Rate: 0.0003454915 +2025-03-25 13:50:40,291 Train Loss: 0.0003739, Val Loss: 0.0004571 +2025-03-25 13:50:40,291 Epoch 521/2000 +2025-03-25 13:53:14,615 Current Learning Rate: 0.0003380413 +2025-03-25 13:53:14,691 Train Loss: 0.0003724, Val Loss: 0.0004556 +2025-03-25 13:53:14,692 Epoch 522/2000 +2025-03-25 13:55:49,234 Current Learning Rate: 0.0003306310 +2025-03-25 13:55:49,290 Train Loss: 0.0003714, Val Loss: 0.0004550 +2025-03-25 13:55:49,290 Epoch 523/2000 +2025-03-25 13:58:23,372 Current Learning Rate: 0.0003232626 +2025-03-25 13:58:23,373 Train Loss: 0.0003710, Val Loss: 0.0004551 +2025-03-25 13:58:23,373 Epoch 524/2000 +2025-03-25 14:00:57,497 Current Learning Rate: 0.0003159377 +2025-03-25 14:00:57,497 Train Loss: 0.0003719, Val Loss: 0.0004594 +2025-03-25 14:00:57,498 Epoch 525/2000 +2025-03-25 14:03:31,908 Current Learning Rate: 0.0003086583 +2025-03-25 14:03:31,908 Train Loss: 0.0003731, Val Loss: 0.0004612 +2025-03-25 14:03:31,908 Epoch 526/2000 +2025-03-25 14:06:05,406 Current Learning Rate: 0.0003014261 +2025-03-25 14:06:05,407 Train Loss: 0.0003738, Val Loss: 0.0004561 +2025-03-25 14:06:05,407 Epoch 527/2000 +2025-03-25 14:08:38,655 Current Learning Rate: 0.0002942428 +2025-03-25 14:08:38,655 Train Loss: 0.0003745, Val Loss: 0.0004577 +2025-03-25 14:08:38,655 Epoch 528/2000 +2025-03-25 14:11:11,618 Current Learning Rate: 0.0002871104 +2025-03-25 14:11:11,619 Train Loss: 0.0003754, Val Loss: 0.0004570 +2025-03-25 14:11:11,619 Epoch 529/2000 +2025-03-25 14:13:45,365 Current Learning Rate: 0.0002800304 +2025-03-25 14:13:45,422 Train Loss: 0.0003747, Val Loss: 0.0004547 +2025-03-25 14:13:45,422 Epoch 530/2000 +2025-03-25 14:16:19,422 Current Learning Rate: 0.0002730048 +2025-03-25 14:16:19,477 Train Loss: 0.0003725, Val Loss: 0.0004527 +2025-03-25 14:16:19,478 Epoch 531/2000 +2025-03-25 14:18:53,296 Current Learning Rate: 0.0002660351 +2025-03-25 14:18:53,355 Train Loss: 0.0003701, Val Loss: 0.0004508 +2025-03-25 14:18:53,355 Epoch 532/2000 +2025-03-25 14:21:26,830 Current Learning Rate: 0.0002591232 +2025-03-25 14:21:26,892 Train Loss: 0.0003680, Val Loss: 0.0004494 +2025-03-25 14:21:26,893 Epoch 533/2000 +2025-03-25 14:24:00,805 Current Learning Rate: 0.0002522707 +2025-03-25 14:24:00,863 Train Loss: 0.0003664, Val Loss: 0.0004482 +2025-03-25 14:24:00,863 Epoch 534/2000 +2025-03-25 14:26:34,275 Current Learning Rate: 0.0002454793 +2025-03-25 14:26:34,342 Train Loss: 0.0003652, Val Loss: 0.0004471 +2025-03-25 14:26:34,342 Epoch 535/2000 +2025-03-25 14:29:08,039 Current Learning Rate: 0.0002387507 +2025-03-25 14:29:08,125 Train Loss: 0.0003643, Val Loss: 0.0004466 +2025-03-25 14:29:08,125 Epoch 536/2000 +2025-03-25 14:31:42,306 Current Learning Rate: 0.0002320866 +2025-03-25 14:31:42,368 Train Loss: 0.0003639, Val Loss: 0.0004465 +2025-03-25 14:31:42,369 Epoch 537/2000 +2025-03-25 14:34:15,939 Current Learning Rate: 0.0002254886 +2025-03-25 14:34:15,940 Train Loss: 0.0003646, Val Loss: 0.0004478 +2025-03-25 14:34:15,940 Epoch 538/2000 +2025-03-25 14:36:50,258 Current Learning Rate: 0.0002189583 +2025-03-25 14:36:50,258 Train Loss: 0.0003655, Val Loss: 0.0004489 +2025-03-25 14:36:50,259 Epoch 539/2000 +2025-03-25 14:39:24,364 Current Learning Rate: 0.0002124974 +2025-03-25 14:39:24,365 Train Loss: 0.0003660, Val Loss: 0.0004473 +2025-03-25 14:39:24,365 Epoch 540/2000 +2025-03-25 14:41:58,093 Current Learning Rate: 0.0002061074 +2025-03-25 14:41:58,148 Train Loss: 0.0003655, Val Loss: 0.0004457 +2025-03-25 14:41:58,149 Epoch 541/2000 +2025-03-25 14:44:31,008 Current Learning Rate: 0.0001997899 +2025-03-25 14:44:31,061 Train Loss: 0.0003643, Val Loss: 0.0004445 +2025-03-25 14:44:31,061 Epoch 542/2000 +2025-03-25 14:47:03,818 Current Learning Rate: 0.0001935465 +2025-03-25 14:47:03,883 Train Loss: 0.0003631, Val Loss: 0.0004434 +2025-03-25 14:47:03,883 Epoch 543/2000 +2025-03-25 14:49:36,476 Current Learning Rate: 0.0001873787 +2025-03-25 14:49:36,532 Train Loss: 0.0003619, Val Loss: 0.0004425 +2025-03-25 14:49:36,532 Epoch 544/2000 +2025-03-25 14:52:10,419 Current Learning Rate: 0.0001812880 +2025-03-25 14:52:10,483 Train Loss: 0.0003609, Val Loss: 0.0004418 +2025-03-25 14:52:10,483 Epoch 545/2000 +2025-03-25 14:54:44,347 Current Learning Rate: 0.0001752760 +2025-03-25 14:54:44,425 Train Loss: 0.0003601, Val Loss: 0.0004412 +2025-03-25 14:54:44,426 Epoch 546/2000 +2025-03-25 14:57:18,372 Current Learning Rate: 0.0001693441 +2025-03-25 14:57:18,432 Train Loss: 0.0003595, Val Loss: 0.0004401 +2025-03-25 14:57:18,433 Epoch 547/2000 +2025-03-25 14:59:53,044 Current Learning Rate: 0.0001634937 +2025-03-25 14:59:53,107 Train Loss: 0.0003594, Val Loss: 0.0004392 +2025-03-25 14:59:53,107 Epoch 548/2000 +2025-03-25 15:02:26,486 Current Learning Rate: 0.0001577264 +2025-03-25 15:02:26,541 Train Loss: 0.0003593, Val Loss: 0.0004384 +2025-03-25 15:02:26,542 Epoch 549/2000 +2025-03-25 15:05:00,464 Current Learning Rate: 0.0001520436 +2025-03-25 15:05:00,529 Train Loss: 0.0003586, Val Loss: 0.0004371 +2025-03-25 15:05:00,529 Epoch 550/2000 +2025-03-25 15:07:34,776 Current Learning Rate: 0.0001464466 +2025-03-25 15:07:34,829 Train Loss: 0.0003577, Val Loss: 0.0004362 +2025-03-25 15:07:34,830 Epoch 551/2000 +2025-03-25 15:10:09,262 Current Learning Rate: 0.0001409369 +2025-03-25 15:10:09,329 Train Loss: 0.0003567, Val Loss: 0.0004355 +2025-03-25 15:10:09,330 Epoch 552/2000 +2025-03-25 15:12:42,654 Current Learning Rate: 0.0001355157 +2025-03-25 15:12:42,715 Train Loss: 0.0003558, Val Loss: 0.0004349 +2025-03-25 15:12:42,716 Epoch 553/2000 +2025-03-25 15:15:15,067 Current Learning Rate: 0.0001301845 +2025-03-25 15:15:15,144 Train Loss: 0.0003551, Val Loss: 0.0004342 +2025-03-25 15:15:15,145 Epoch 554/2000 +2025-03-25 15:17:49,098 Current Learning Rate: 0.0001249445 +2025-03-25 15:17:49,166 Train Loss: 0.0003546, Val Loss: 0.0004336 +2025-03-25 15:17:49,166 Epoch 555/2000 +2025-03-25 15:20:22,559 Current Learning Rate: 0.0001197970 +2025-03-25 15:20:22,632 Train Loss: 0.0003541, Val Loss: 0.0004332 +2025-03-25 15:20:22,632 Epoch 556/2000 +2025-03-25 15:22:56,088 Current Learning Rate: 0.0001147434 +2025-03-25 15:22:56,161 Train Loss: 0.0003536, Val Loss: 0.0004329 +2025-03-25 15:22:56,162 Epoch 557/2000 +2025-03-25 15:25:30,418 Current Learning Rate: 0.0001097848 +2025-03-25 15:25:30,473 Train Loss: 0.0003533, Val Loss: 0.0004325 +2025-03-25 15:25:30,473 Epoch 558/2000 +2025-03-25 15:28:04,365 Current Learning Rate: 0.0001049225 +2025-03-25 15:28:04,426 Train Loss: 0.0003529, Val Loss: 0.0004322 +2025-03-25 15:28:04,426 Epoch 559/2000 +2025-03-25 15:30:36,920 Current Learning Rate: 0.0001001577 +2025-03-25 15:30:36,977 Train Loss: 0.0003524, Val Loss: 0.0004322 +2025-03-25 15:30:36,977 Epoch 560/2000 +2025-03-25 15:33:09,597 Current Learning Rate: 0.0000954915 +2025-03-25 15:33:09,696 Train Loss: 0.0003518, Val Loss: 0.0004320 +2025-03-25 15:33:09,697 Epoch 561/2000 +2025-03-25 15:35:42,685 Current Learning Rate: 0.0000909251 +2025-03-25 15:35:42,753 Train Loss: 0.0003512, Val Loss: 0.0004318 +2025-03-25 15:35:42,753 Epoch 562/2000 +2025-03-25 15:38:15,591 Current Learning Rate: 0.0000864597 +2025-03-25 15:38:15,649 Train Loss: 0.0003507, Val Loss: 0.0004315 +2025-03-25 15:38:15,649 Epoch 563/2000 +2025-03-25 15:40:49,615 Current Learning Rate: 0.0000820963 +2025-03-25 15:40:49,679 Train Loss: 0.0003502, Val Loss: 0.0004312 +2025-03-25 15:40:49,679 Epoch 564/2000 +2025-03-25 15:43:22,738 Current Learning Rate: 0.0000778360 +2025-03-25 15:43:22,802 Train Loss: 0.0003497, Val Loss: 0.0004306 +2025-03-25 15:43:22,802 Epoch 565/2000 +2025-03-25 15:45:56,655 Current Learning Rate: 0.0000736799 +2025-03-25 15:45:56,735 Train Loss: 0.0003493, Val Loss: 0.0004297 +2025-03-25 15:45:56,735 Epoch 566/2000 +2025-03-25 15:48:30,137 Current Learning Rate: 0.0000696290 +2025-03-25 15:48:30,201 Train Loss: 0.0003488, Val Loss: 0.0004287 +2025-03-25 15:48:30,201 Epoch 567/2000 +2025-03-25 15:51:03,318 Current Learning Rate: 0.0000656842 +2025-03-25 15:51:03,380 Train Loss: 0.0003484, Val Loss: 0.0004282 +2025-03-25 15:51:03,380 Epoch 568/2000 +2025-03-25 15:53:36,866 Current Learning Rate: 0.0000618467 +2025-03-25 15:53:36,936 Train Loss: 0.0003480, Val Loss: 0.0004280 +2025-03-25 15:53:36,936 Epoch 569/2000 +2025-03-25 15:56:11,284 Current Learning Rate: 0.0000581172 +2025-03-25 15:56:11,359 Train Loss: 0.0003476, Val Loss: 0.0004277 +2025-03-25 15:56:11,359 Epoch 570/2000 +2025-03-25 15:58:45,534 Current Learning Rate: 0.0000544967 +2025-03-25 15:58:45,609 Train Loss: 0.0003472, Val Loss: 0.0004272 +2025-03-25 15:58:45,609 Epoch 571/2000 +2025-03-25 16:01:19,510 Current Learning Rate: 0.0000509862 +2025-03-25 16:01:19,579 Train Loss: 0.0003468, Val Loss: 0.0004267 +2025-03-25 16:01:19,579 Epoch 572/2000 +2025-03-25 16:03:52,743 Current Learning Rate: 0.0000475865 +2025-03-25 16:03:52,804 Train Loss: 0.0003464, Val Loss: 0.0004262 +2025-03-25 16:03:52,804 Epoch 573/2000 +2025-03-25 16:06:26,936 Current Learning Rate: 0.0000442984 +2025-03-25 16:06:26,993 Train Loss: 0.0003460, Val Loss: 0.0004258 +2025-03-25 16:06:26,994 Epoch 574/2000 +2025-03-25 16:09:00,502 Current Learning Rate: 0.0000411227 +2025-03-25 16:09:00,556 Train Loss: 0.0003457, Val Loss: 0.0004254 +2025-03-25 16:09:00,557 Epoch 575/2000 +2025-03-25 16:11:33,647 Current Learning Rate: 0.0000380602 +2025-03-25 16:11:33,713 Train Loss: 0.0003453, Val Loss: 0.0004251 +2025-03-25 16:11:33,713 Epoch 576/2000 +2025-03-25 16:14:07,008 Current Learning Rate: 0.0000351118 +2025-03-25 16:14:07,072 Train Loss: 0.0003450, Val Loss: 0.0004249 +2025-03-25 16:14:07,072 Epoch 577/2000 +2025-03-25 16:16:40,340 Current Learning Rate: 0.0000322780 +2025-03-25 16:16:40,409 Train Loss: 0.0003447, Val Loss: 0.0004247 +2025-03-25 16:16:40,409 Epoch 578/2000 +2025-03-25 16:19:13,042 Current Learning Rate: 0.0000295596 +2025-03-25 16:19:13,109 Train Loss: 0.0003444, Val Loss: 0.0004244 +2025-03-25 16:19:13,110 Epoch 579/2000 +2025-03-25 16:21:45,757 Current Learning Rate: 0.0000269573 +2025-03-25 16:21:45,856 Train Loss: 0.0003441, Val Loss: 0.0004242 +2025-03-25 16:21:45,856 Epoch 580/2000 +2025-03-25 16:24:19,299 Current Learning Rate: 0.0000244717 +2025-03-25 16:24:19,352 Train Loss: 0.0003438, Val Loss: 0.0004240 +2025-03-25 16:24:19,353 Epoch 581/2000 +2025-03-25 16:26:53,185 Current Learning Rate: 0.0000221035 +2025-03-25 16:26:53,241 Train Loss: 0.0003436, Val Loss: 0.0004237 +2025-03-25 16:26:53,242 Epoch 582/2000 +2025-03-25 16:29:26,205 Current Learning Rate: 0.0000198532 +2025-03-25 16:29:26,281 Train Loss: 0.0003433, Val Loss: 0.0004235 +2025-03-25 16:29:26,282 Epoch 583/2000 +2025-03-25 16:31:58,703 Current Learning Rate: 0.0000177213 +2025-03-25 16:31:58,773 Train Loss: 0.0003431, Val Loss: 0.0004233 +2025-03-25 16:31:58,773 Epoch 584/2000 +2025-03-25 16:34:31,486 Current Learning Rate: 0.0000157084 +2025-03-25 16:34:31,568 Train Loss: 0.0003428, Val Loss: 0.0004230 +2025-03-25 16:34:31,568 Epoch 585/2000 +2025-03-25 16:37:04,187 Current Learning Rate: 0.0000138150 +2025-03-25 16:37:04,241 Train Loss: 0.0003426, Val Loss: 0.0004227 +2025-03-25 16:37:04,241 Epoch 586/2000 +2025-03-25 16:39:36,498 Current Learning Rate: 0.0000120416 +2025-03-25 16:39:36,557 Train Loss: 0.0003424, Val Loss: 0.0004224 +2025-03-25 16:39:36,557 Epoch 587/2000 +2025-03-25 16:42:09,597 Current Learning Rate: 0.0000103886 +2025-03-25 16:42:09,658 Train Loss: 0.0003422, Val Loss: 0.0004222 +2025-03-25 16:42:09,659 Epoch 588/2000 +2025-03-25 16:44:43,799 Current Learning Rate: 0.0000088564 +2025-03-25 16:44:43,863 Train Loss: 0.0003420, Val Loss: 0.0004220 +2025-03-25 16:44:43,863 Epoch 589/2000 +2025-03-25 16:47:17,842 Current Learning Rate: 0.0000074453 +2025-03-25 16:47:17,903 Train Loss: 0.0003418, Val Loss: 0.0004219 +2025-03-25 16:47:17,903 Epoch 590/2000 +2025-03-25 16:49:52,173 Current Learning Rate: 0.0000061558 +2025-03-25 16:49:52,247 Train Loss: 0.0003417, Val Loss: 0.0004217 +2025-03-25 16:49:52,247 Epoch 591/2000 +2025-03-25 16:52:26,897 Current Learning Rate: 0.0000049882 +2025-03-25 16:52:26,969 Train Loss: 0.0003415, Val Loss: 0.0004216 +2025-03-25 16:52:26,969 Epoch 592/2000 +2025-03-25 16:55:00,818 Current Learning Rate: 0.0000039426 +2025-03-25 16:55:00,895 Train Loss: 0.0003414, Val Loss: 0.0004215 +2025-03-25 16:55:00,895 Epoch 593/2000 +2025-03-25 16:57:35,416 Current Learning Rate: 0.0000030195 +2025-03-25 16:57:35,476 Train Loss: 0.0003413, Val Loss: 0.0004214 +2025-03-25 16:57:35,476 Epoch 594/2000 +2025-03-25 17:00:08,926 Current Learning Rate: 0.0000022190 +2025-03-25 17:00:08,994 Train Loss: 0.0003412, Val Loss: 0.0004214 +2025-03-25 17:00:08,994 Epoch 595/2000 +2025-03-25 17:02:43,225 Current Learning Rate: 0.0000015413 +2025-03-25 17:02:43,289 Train Loss: 0.0003411, Val Loss: 0.0004213 +2025-03-25 17:02:43,289 Epoch 596/2000 +2025-03-25 17:05:16,919 Current Learning Rate: 0.0000009866 +2025-03-25 17:05:16,992 Train Loss: 0.0003410, Val Loss: 0.0004213 +2025-03-25 17:05:16,992 Epoch 597/2000 +2025-03-25 17:07:50,265 Current Learning Rate: 0.0000005551 +2025-03-25 17:07:50,323 Train Loss: 0.0003409, Val Loss: 0.0004213 +2025-03-25 17:07:50,323 Epoch 598/2000 +2025-03-25 17:10:24,047 Current Learning Rate: 0.0000002467 +2025-03-25 17:10:24,112 Train Loss: 0.0003408, Val Loss: 0.0004213 +2025-03-25 17:10:24,112 Epoch 599/2000 +2025-03-25 17:12:57,126 Current Learning Rate: 0.0000000617 +2025-03-25 17:12:57,201 Train Loss: 0.0003408, Val Loss: 0.0004213 +2025-03-25 17:12:57,201 Epoch 600/2000 +2025-03-25 17:15:30,590 Current Learning Rate: 0.0000000000 +2025-03-25 17:15:30,649 Train Loss: 0.0003408, Val Loss: 0.0004213 +2025-03-25 17:15:30,650 Epoch 601/2000 +2025-03-25 17:18:03,595 Current Learning Rate: 0.0000000617 +2025-03-25 17:18:03,596 Train Loss: 0.0003408, Val Loss: 0.0004213 +2025-03-25 17:18:03,596 Epoch 602/2000 +2025-03-25 17:20:36,673 Current Learning Rate: 0.0000002467 +2025-03-25 17:20:36,725 Train Loss: 0.0003408, Val Loss: 0.0004213 +2025-03-25 17:20:36,725 Epoch 603/2000 +2025-03-25 17:23:10,247 Current Learning Rate: 0.0000005551 +2025-03-25 17:23:10,248 Train Loss: 0.0003408, Val Loss: 0.0004213 +2025-03-25 17:23:10,248 Epoch 604/2000 +2025-03-25 17:25:44,032 Current Learning Rate: 0.0000009866 +2025-03-25 17:25:44,034 Train Loss: 0.0003408, Val Loss: 0.0004213 +2025-03-25 17:25:44,035 Epoch 605/2000 +2025-03-25 17:28:16,905 Current Learning Rate: 0.0000015413 +2025-03-25 17:28:16,906 Train Loss: 0.0003409, Val Loss: 0.0004213 +2025-03-25 17:28:16,906 Epoch 606/2000 +2025-03-25 17:30:50,249 Current Learning Rate: 0.0000022190 +2025-03-25 17:30:50,249 Train Loss: 0.0003409, Val Loss: 0.0004213 +2025-03-25 17:30:50,250 Epoch 607/2000 +2025-03-25 17:33:23,181 Current Learning Rate: 0.0000030195 +2025-03-25 17:33:23,182 Train Loss: 0.0003410, Val Loss: 0.0004213 +2025-03-25 17:33:23,182 Epoch 608/2000 +2025-03-25 17:35:56,629 Current Learning Rate: 0.0000039426 +2025-03-25 17:35:56,630 Train Loss: 0.0003411, Val Loss: 0.0004213 +2025-03-25 17:35:56,630 Epoch 609/2000 +2025-03-25 17:38:29,367 Current Learning Rate: 0.0000049882 +2025-03-25 17:38:29,368 Train Loss: 0.0003411, Val Loss: 0.0004213 +2025-03-25 17:38:29,368 Epoch 610/2000 +2025-03-25 17:41:01,683 Current Learning Rate: 0.0000061558 +2025-03-25 17:41:01,683 Train Loss: 0.0003412, Val Loss: 0.0004214 +2025-03-25 17:41:01,683 Epoch 611/2000 +2025-03-25 17:43:33,773 Current Learning Rate: 0.0000074453 +2025-03-25 17:43:33,774 Train Loss: 0.0003413, Val Loss: 0.0004215 +2025-03-25 17:43:33,774 Epoch 612/2000 +2025-03-25 17:46:07,648 Current Learning Rate: 0.0000088564 +2025-03-25 17:46:07,649 Train Loss: 0.0003414, Val Loss: 0.0004216 +2025-03-25 17:46:07,649 Epoch 613/2000 +2025-03-25 17:48:41,717 Current Learning Rate: 0.0000103886 +2025-03-25 17:48:41,717 Train Loss: 0.0003415, Val Loss: 0.0004217 +2025-03-25 17:48:41,718 Epoch 614/2000 +2025-03-25 17:51:14,762 Current Learning Rate: 0.0000120416 +2025-03-25 17:51:14,763 Train Loss: 0.0003416, Val Loss: 0.0004218 +2025-03-25 17:51:14,763 Epoch 615/2000 +2025-03-25 17:53:48,915 Current Learning Rate: 0.0000138150 +2025-03-25 17:53:48,916 Train Loss: 0.0003417, Val Loss: 0.0004219 +2025-03-25 17:53:48,916 Epoch 616/2000 +2025-03-25 17:56:22,597 Current Learning Rate: 0.0000157084 +2025-03-25 17:56:22,597 Train Loss: 0.0003418, Val Loss: 0.0004221 +2025-03-25 17:56:22,597 Epoch 617/2000 +2025-03-25 17:58:57,053 Current Learning Rate: 0.0000177213 +2025-03-25 17:58:57,054 Train Loss: 0.0003419, Val Loss: 0.0004222 +2025-03-25 17:58:57,054 Epoch 618/2000 +2025-03-25 18:01:31,970 Current Learning Rate: 0.0000198532 +2025-03-25 18:01:31,970 Train Loss: 0.0003420, Val Loss: 0.0004224 +2025-03-25 18:01:31,971 Epoch 619/2000 +2025-03-25 18:04:05,280 Current Learning Rate: 0.0000221035 +2025-03-25 18:04:05,281 Train Loss: 0.0003421, Val Loss: 0.0004225 +2025-03-25 18:04:05,281 Epoch 620/2000 +2025-03-25 18:06:39,235 Current Learning Rate: 0.0000244717 +2025-03-25 18:06:39,236 Train Loss: 0.0003422, Val Loss: 0.0004226 +2025-03-25 18:06:39,236 Epoch 621/2000 +2025-03-25 18:09:12,684 Current Learning Rate: 0.0000269573 +2025-03-25 18:09:12,685 Train Loss: 0.0003424, Val Loss: 0.0004227 +2025-03-25 18:09:12,685 Epoch 622/2000 +2025-03-25 18:11:46,891 Current Learning Rate: 0.0000295596 +2025-03-25 18:11:46,892 Train Loss: 0.0003425, Val Loss: 0.0004228 +2025-03-25 18:11:46,892 Epoch 623/2000 +2025-03-25 18:14:20,054 Current Learning Rate: 0.0000322780 +2025-03-25 18:14:20,054 Train Loss: 0.0003426, Val Loss: 0.0004229 +2025-03-25 18:14:20,055 Epoch 624/2000 +2025-03-25 18:16:52,778 Current Learning Rate: 0.0000351118 +2025-03-25 18:16:52,779 Train Loss: 0.0003427, Val Loss: 0.0004230 +2025-03-25 18:16:52,779 Epoch 625/2000 +2025-03-25 18:19:25,618 Current Learning Rate: 0.0000380602 +2025-03-25 18:19:25,618 Train Loss: 0.0003428, Val Loss: 0.0004231 +2025-03-25 18:19:25,618 Epoch 626/2000 +2025-03-25 18:22:00,008 Current Learning Rate: 0.0000411227 +2025-03-25 18:22:00,008 Train Loss: 0.0003430, Val Loss: 0.0004232 +2025-03-25 18:22:00,008 Epoch 627/2000 +2025-03-25 18:24:33,833 Current Learning Rate: 0.0000442984 +2025-03-25 18:24:33,834 Train Loss: 0.0003431, Val Loss: 0.0004234 +2025-03-25 18:24:33,834 Epoch 628/2000 +2025-03-25 18:27:08,170 Current Learning Rate: 0.0000475865 +2025-03-25 18:27:08,171 Train Loss: 0.0003432, Val Loss: 0.0004236 +2025-03-25 18:27:08,171 Epoch 629/2000 +2025-03-25 18:29:42,801 Current Learning Rate: 0.0000509862 +2025-03-25 18:29:42,802 Train Loss: 0.0003434, Val Loss: 0.0004237 +2025-03-25 18:29:42,802 Epoch 630/2000 +2025-03-25 18:32:16,869 Current Learning Rate: 0.0000544967 +2025-03-25 18:32:16,869 Train Loss: 0.0003435, Val Loss: 0.0004238 +2025-03-25 18:32:16,869 Epoch 631/2000 +2025-03-25 18:34:51,063 Current Learning Rate: 0.0000581172 +2025-03-25 18:34:51,064 Train Loss: 0.0003437, Val Loss: 0.0004239 +2025-03-25 18:34:51,064 Epoch 632/2000 +2025-03-25 18:37:25,220 Current Learning Rate: 0.0000618467 +2025-03-25 18:37:25,220 Train Loss: 0.0003438, Val Loss: 0.0004239 +2025-03-25 18:37:25,220 Epoch 633/2000 +2025-03-25 18:39:58,120 Current Learning Rate: 0.0000656842 +2025-03-25 18:39:58,120 Train Loss: 0.0003440, Val Loss: 0.0004240 +2025-03-25 18:39:58,120 Epoch 634/2000 +2025-03-25 18:42:30,946 Current Learning Rate: 0.0000696290 +2025-03-25 18:42:30,947 Train Loss: 0.0003441, Val Loss: 0.0004241 +2025-03-25 18:42:30,947 Epoch 635/2000 +2025-03-25 18:45:04,147 Current Learning Rate: 0.0000736799 +2025-03-25 18:45:04,148 Train Loss: 0.0003443, Val Loss: 0.0004242 +2025-03-25 18:45:04,148 Epoch 636/2000 +2025-03-25 18:47:37,379 Current Learning Rate: 0.0000778360 +2025-03-25 18:47:37,379 Train Loss: 0.0003444, Val Loss: 0.0004243 +2025-03-25 18:47:37,379 Epoch 637/2000 +2025-03-25 18:50:11,329 Current Learning Rate: 0.0000820963 +2025-03-25 18:50:11,330 Train Loss: 0.0003446, Val Loss: 0.0004245 +2025-03-25 18:50:11,330 Epoch 638/2000 +2025-03-25 18:52:44,621 Current Learning Rate: 0.0000864597 +2025-03-25 18:52:44,622 Train Loss: 0.0003448, Val Loss: 0.0004247 +2025-03-25 18:52:44,622 Epoch 639/2000 +2025-03-25 18:55:18,740 Current Learning Rate: 0.0000909251 +2025-03-25 18:55:18,740 Train Loss: 0.0003449, Val Loss: 0.0004248 +2025-03-25 18:55:18,741 Epoch 640/2000 +2025-03-25 18:57:52,908 Current Learning Rate: 0.0000954915 +2025-03-25 18:57:52,908 Train Loss: 0.0003451, Val Loss: 0.0004250 +2025-03-25 18:57:52,908 Epoch 641/2000 +2025-03-25 19:00:27,295 Current Learning Rate: 0.0001001577 +2025-03-25 19:00:27,295 Train Loss: 0.0003452, Val Loss: 0.0004252 +2025-03-25 19:00:27,295 Epoch 642/2000 +2025-03-25 19:03:01,416 Current Learning Rate: 0.0001049225 +2025-03-25 19:03:01,417 Train Loss: 0.0003454, Val Loss: 0.0004255 +2025-03-25 19:03:01,417 Epoch 643/2000 +2025-03-25 19:05:35,455 Current Learning Rate: 0.0001097848 +2025-03-25 19:05:35,456 Train Loss: 0.0003456, Val Loss: 0.0004257 +2025-03-25 19:05:35,456 Epoch 644/2000 +2025-03-25 19:08:09,664 Current Learning Rate: 0.0001147434 +2025-03-25 19:08:09,665 Train Loss: 0.0003458, Val Loss: 0.0004259 +2025-03-25 19:08:09,665 Epoch 645/2000 +2025-03-25 19:10:43,666 Current Learning Rate: 0.0001197970 +2025-03-25 19:10:43,666 Train Loss: 0.0003460, Val Loss: 0.0004262 +2025-03-25 19:10:43,667 Epoch 646/2000 +2025-03-25 19:13:17,575 Current Learning Rate: 0.0001249445 +2025-03-25 19:13:17,575 Train Loss: 0.0003462, Val Loss: 0.0004264 +2025-03-25 19:13:17,575 Epoch 647/2000 +2025-03-25 19:15:51,046 Current Learning Rate: 0.0001301845 +2025-03-25 19:15:51,047 Train Loss: 0.0003464, Val Loss: 0.0004267 +2025-03-25 19:15:51,047 Epoch 648/2000 +2025-03-25 19:18:23,737 Current Learning Rate: 0.0001355157 +2025-03-25 19:18:23,738 Train Loss: 0.0003466, Val Loss: 0.0004270 +2025-03-25 19:18:23,738 Epoch 649/2000 +2025-03-25 19:20:56,433 Current Learning Rate: 0.0001409369 +2025-03-25 19:20:56,434 Train Loss: 0.0003468, Val Loss: 0.0004272 +2025-03-25 19:20:56,434 Epoch 650/2000 +2025-03-25 19:23:30,967 Current Learning Rate: 0.0001464466 +2025-03-25 19:23:30,968 Train Loss: 0.0003470, Val Loss: 0.0004275 +2025-03-25 19:23:30,968 Epoch 651/2000 +2025-03-25 19:26:04,911 Current Learning Rate: 0.0001520436 +2025-03-25 19:26:04,912 Train Loss: 0.0003472, Val Loss: 0.0004278 +2025-03-25 19:26:04,912 Epoch 652/2000 +2025-03-25 19:28:37,997 Current Learning Rate: 0.0001577264 +2025-03-25 19:28:37,998 Train Loss: 0.0003475, Val Loss: 0.0004280 +2025-03-25 19:28:37,998 Epoch 653/2000 +2025-03-25 19:31:10,379 Current Learning Rate: 0.0001634937 +2025-03-25 19:31:10,380 Train Loss: 0.0003477, Val Loss: 0.0004283 +2025-03-25 19:31:10,380 Epoch 654/2000 +2025-03-25 19:33:44,463 Current Learning Rate: 0.0001693441 +2025-03-25 19:33:44,463 Train Loss: 0.0003479, Val Loss: 0.0004286 +2025-03-25 19:33:44,463 Epoch 655/2000 +2025-03-25 19:36:17,092 Current Learning Rate: 0.0001752760 +2025-03-25 19:36:17,093 Train Loss: 0.0003482, Val Loss: 0.0004289 +2025-03-25 19:36:17,093 Epoch 656/2000 +2025-03-25 19:38:50,411 Current Learning Rate: 0.0001812880 +2025-03-25 19:38:50,412 Train Loss: 0.0003484, Val Loss: 0.0004294 +2025-03-25 19:38:50,412 Epoch 657/2000 +2025-03-25 19:41:24,151 Current Learning Rate: 0.0001873787 +2025-03-25 19:41:24,152 Train Loss: 0.0003487, Val Loss: 0.0004298 +2025-03-25 19:41:24,152 Epoch 658/2000 +2025-03-25 19:43:58,421 Current Learning Rate: 0.0001935465 +2025-03-25 19:43:58,422 Train Loss: 0.0003490, Val Loss: 0.0004303 +2025-03-25 19:43:58,422 Epoch 659/2000 +2025-03-25 19:46:32,503 Current Learning Rate: 0.0001997899 +2025-03-25 19:46:32,503 Train Loss: 0.0003495, Val Loss: 0.0004312 +2025-03-25 19:46:32,503 Epoch 660/2000 +2025-03-25 19:49:05,880 Current Learning Rate: 0.0002061074 +2025-03-25 19:49:05,880 Train Loss: 0.0003503, Val Loss: 0.0004322 +2025-03-25 19:49:05,881 Epoch 661/2000 +2025-03-25 19:51:39,850 Current Learning Rate: 0.0002124974 +2025-03-25 19:51:39,850 Train Loss: 0.0003510, Val Loss: 0.0004312 +2025-03-25 19:51:39,851 Epoch 662/2000 +2025-03-25 19:54:14,171 Current Learning Rate: 0.0002189583 +2025-03-25 19:54:14,172 Train Loss: 0.0003513, Val Loss: 0.0004323 +2025-03-25 19:54:14,172 Epoch 663/2000 +2025-03-25 19:56:48,292 Current Learning Rate: 0.0002254886 +2025-03-25 19:56:48,293 Train Loss: 0.0003514, Val Loss: 0.0004336 +2025-03-25 19:56:48,293 Epoch 664/2000 +2025-03-25 19:59:21,116 Current Learning Rate: 0.0002320866 +2025-03-25 19:59:21,116 Train Loss: 0.0003511, Val Loss: 0.0004334 +2025-03-25 19:59:21,116 Epoch 665/2000 +2025-03-25 20:01:54,595 Current Learning Rate: 0.0002387507 +2025-03-25 20:01:54,595 Train Loss: 0.0003506, Val Loss: 0.0004321 +2025-03-25 20:01:54,595 Epoch 666/2000 +2025-03-25 20:04:28,540 Current Learning Rate: 0.0002454793 +2025-03-25 20:04:28,541 Train Loss: 0.0003508, Val Loss: 0.0004319 +2025-03-25 20:04:28,541 Epoch 667/2000 +2025-03-25 20:07:02,954 Current Learning Rate: 0.0002522707 +2025-03-25 20:07:02,954 Train Loss: 0.0003512, Val Loss: 0.0004317 +2025-03-25 20:07:02,954 Epoch 668/2000 +2025-03-25 20:09:36,487 Current Learning Rate: 0.0002591232 +2025-03-25 20:09:36,488 Train Loss: 0.0003516, Val Loss: 0.0004320 +2025-03-25 20:09:36,488 Epoch 669/2000 +2025-03-25 20:12:09,854 Current Learning Rate: 0.0002660351 +2025-03-25 20:12:09,855 Train Loss: 0.0003521, Val Loss: 0.0004323 +2025-03-25 20:12:09,855 Epoch 670/2000 +2025-03-25 20:14:42,581 Current Learning Rate: 0.0002730048 +2025-03-25 20:14:42,582 Train Loss: 0.0003532, Val Loss: 0.0004341 +2025-03-25 20:14:42,582 Epoch 671/2000 +2025-03-25 20:17:17,000 Current Learning Rate: 0.0002800304 +2025-03-25 20:17:17,000 Train Loss: 0.0003542, Val Loss: 0.0004350 +2025-03-25 20:17:17,000 Epoch 672/2000 +2025-03-25 20:19:49,434 Current Learning Rate: 0.0002871104 +2025-03-25 20:19:49,435 Train Loss: 0.0003547, Val Loss: 0.0004350 +2025-03-25 20:19:49,435 Epoch 673/2000 +2025-03-25 20:22:21,921 Current Learning Rate: 0.0002942428 +2025-03-25 20:22:21,921 Train Loss: 0.0003549, Val Loss: 0.0004353 +2025-03-25 20:22:21,921 Epoch 674/2000 +2025-03-25 20:24:54,246 Current Learning Rate: 0.0003014261 +2025-03-25 20:24:54,247 Train Loss: 0.0003545, Val Loss: 0.0004348 +2025-03-25 20:24:54,247 Epoch 675/2000 +2025-03-25 20:27:27,479 Current Learning Rate: 0.0003086583 +2025-03-25 20:27:27,480 Train Loss: 0.0003537, Val Loss: 0.0004345 +2025-03-25 20:27:27,480 Epoch 676/2000 +2025-03-25 20:30:00,013 Current Learning Rate: 0.0003159377 +2025-03-25 20:30:00,013 Train Loss: 0.0003539, Val Loss: 0.0004353 +2025-03-25 20:30:00,014 Epoch 677/2000 +2025-03-25 20:32:33,675 Current Learning Rate: 0.0003232626 +2025-03-25 20:32:33,675 Train Loss: 0.0003545, Val Loss: 0.0004360 +2025-03-25 20:32:33,676 Epoch 678/2000 +2025-03-25 20:35:06,727 Current Learning Rate: 0.0003306310 +2025-03-25 20:35:06,728 Train Loss: 0.0003552, Val Loss: 0.0004367 +2025-03-25 20:35:06,728 Epoch 679/2000 +2025-03-25 20:37:40,317 Current Learning Rate: 0.0003380413 +2025-03-25 20:37:40,318 Train Loss: 0.0003566, Val Loss: 0.0004372 +2025-03-25 20:37:40,318 Epoch 680/2000 +2025-03-25 20:40:13,767 Current Learning Rate: 0.0003454915 +2025-03-25 20:40:13,768 Train Loss: 0.0003577, Val Loss: 0.0004385 +2025-03-25 20:40:13,768 Epoch 681/2000 +2025-03-25 20:42:46,837 Current Learning Rate: 0.0003529798 +2025-03-25 20:42:46,838 Train Loss: 0.0003581, Val Loss: 0.0004386 +2025-03-25 20:42:46,838 Epoch 682/2000 +2025-03-25 20:45:21,020 Current Learning Rate: 0.0003605044 +2025-03-25 20:45:21,021 Train Loss: 0.0003583, Val Loss: 0.0004382 +2025-03-25 20:45:21,021 Epoch 683/2000 +2025-03-25 20:47:54,991 Current Learning Rate: 0.0003680635 +2025-03-25 20:47:54,992 Train Loss: 0.0003586, Val Loss: 0.0004398 +2025-03-25 20:47:54,992 Epoch 684/2000 +2025-03-25 20:50:29,065 Current Learning Rate: 0.0003756551 +2025-03-25 20:50:29,066 Train Loss: 0.0003583, Val Loss: 0.0004393 +2025-03-25 20:50:29,066 Epoch 685/2000 +2025-03-25 20:53:02,169 Current Learning Rate: 0.0003832773 +2025-03-25 20:53:02,169 Train Loss: 0.0003571, Val Loss: 0.0004386 +2025-03-25 20:53:02,170 Epoch 686/2000 +2025-03-25 20:55:35,776 Current Learning Rate: 0.0003909284 +2025-03-25 20:55:35,777 Train Loss: 0.0003576, Val Loss: 0.0004392 +2025-03-25 20:55:35,777 Epoch 687/2000 +2025-03-25 20:58:10,303 Current Learning Rate: 0.0003986064 +2025-03-25 20:58:10,303 Train Loss: 0.0003586, Val Loss: 0.0004404 +2025-03-25 20:58:10,304 Epoch 688/2000 +2025-03-25 21:00:44,476 Current Learning Rate: 0.0004063093 +2025-03-25 21:00:44,477 Train Loss: 0.0003603, Val Loss: 0.0004418 +2025-03-25 21:00:44,477 Epoch 689/2000 +2025-03-25 21:03:17,560 Current Learning Rate: 0.0004140354 +2025-03-25 21:03:17,561 Train Loss: 0.0003616, Val Loss: 0.0004426 +2025-03-25 21:03:17,561 Epoch 690/2000 +2025-03-25 21:05:52,037 Current Learning Rate: 0.0004217828 +2025-03-25 21:05:52,040 Train Loss: 0.0003619, Val Loss: 0.0004430 +2025-03-25 21:05:52,042 Epoch 691/2000 +2025-03-25 21:08:24,708 Current Learning Rate: 0.0004295494 +2025-03-25 21:08:24,708 Train Loss: 0.0003629, Val Loss: 0.0004465 +2025-03-25 21:08:24,709 Epoch 692/2000 +2025-03-25 21:10:56,934 Current Learning Rate: 0.0004373334 +2025-03-25 21:10:56,934 Train Loss: 0.0003636, Val Loss: 0.0004442 +2025-03-25 21:10:56,934 Epoch 693/2000 +2025-03-25 21:13:28,979 Current Learning Rate: 0.0004451328 +2025-03-25 21:13:28,980 Train Loss: 0.0003646, Val Loss: 0.0004441 +2025-03-25 21:13:28,981 Epoch 694/2000 +2025-03-25 21:16:02,285 Current Learning Rate: 0.0004529458 +2025-03-25 21:16:02,285 Train Loss: 0.0003644, Val Loss: 0.0004432 +2025-03-25 21:16:02,286 Epoch 695/2000 +2025-03-25 21:18:36,139 Current Learning Rate: 0.0004607705 +2025-03-25 21:18:36,140 Train Loss: 0.0003618, Val Loss: 0.0004424 +2025-03-25 21:18:36,140 Epoch 696/2000 +2025-03-25 21:21:09,341 Current Learning Rate: 0.0004686047 +2025-03-25 21:21:09,342 Train Loss: 0.0003617, Val Loss: 0.0004440 +2025-03-25 21:21:09,342 Epoch 697/2000 +2025-03-25 21:23:42,639 Current Learning Rate: 0.0004764468 +2025-03-25 21:23:42,640 Train Loss: 0.0003635, Val Loss: 0.0004455 +2025-03-25 21:23:42,640 Epoch 698/2000 +2025-03-25 21:26:16,680 Current Learning Rate: 0.0004842946 +2025-03-25 21:26:16,681 Train Loss: 0.0003652, Val Loss: 0.0004459 +2025-03-25 21:26:16,681 Epoch 699/2000 +2025-03-25 21:28:50,875 Current Learning Rate: 0.0004921463 +2025-03-25 21:28:50,876 Train Loss: 0.0003662, Val Loss: 0.0004470 +2025-03-25 21:28:50,876 Epoch 700/2000 +2025-03-25 21:31:24,808 Current Learning Rate: 0.0005000000 +2025-03-25 21:31:24,808 Train Loss: 0.0003674, Val Loss: 0.0004484 +2025-03-25 21:31:24,809 Epoch 701/2000 +2025-03-25 21:33:58,868 Current Learning Rate: 0.0005078537 +2025-03-25 21:33:58,868 Train Loss: 0.0003681, Val Loss: 0.0004495 +2025-03-25 21:33:58,868 Epoch 702/2000 +2025-03-25 21:36:33,198 Current Learning Rate: 0.0005157054 +2025-03-25 21:36:33,198 Train Loss: 0.0003680, Val Loss: 0.0004526 +2025-03-25 21:36:33,198 Epoch 703/2000 +2025-03-25 21:39:05,883 Current Learning Rate: 0.0005235532 +2025-03-25 21:39:05,883 Train Loss: 0.0003684, Val Loss: 0.0004511 +2025-03-25 21:39:05,883 Epoch 704/2000 +2025-03-25 21:41:38,562 Current Learning Rate: 0.0005313953 +2025-03-25 21:41:38,562 Train Loss: 0.0003701, Val Loss: 0.0004512 +2025-03-25 21:41:38,562 Epoch 705/2000 +2025-03-25 21:44:11,337 Current Learning Rate: 0.0005392295 +2025-03-25 21:44:11,337 Train Loss: 0.0003715, Val Loss: 0.0004523 +2025-03-25 21:44:11,337 Epoch 706/2000 +2025-03-25 21:46:45,289 Current Learning Rate: 0.0005470542 +2025-03-25 21:46:45,289 Train Loss: 0.0003718, Val Loss: 0.0004521 +2025-03-25 21:46:45,290 Epoch 707/2000 +2025-03-25 21:49:18,342 Current Learning Rate: 0.0005548672 +2025-03-25 21:49:18,343 Train Loss: 0.0003677, Val Loss: 0.0004487 +2025-03-25 21:49:18,343 Epoch 708/2000 +2025-03-25 21:51:51,368 Current Learning Rate: 0.0005626666 +2025-03-25 21:51:51,369 Train Loss: 0.0003678, Val Loss: 0.0004500 +2025-03-25 21:51:51,369 Epoch 709/2000 +2025-03-25 21:54:25,321 Current Learning Rate: 0.0005704506 +2025-03-25 21:54:25,321 Train Loss: 0.0003703, Val Loss: 0.0004528 +2025-03-25 21:54:25,322 Epoch 710/2000 +2025-03-25 21:56:59,799 Current Learning Rate: 0.0005782172 +2025-03-25 21:56:59,800 Train Loss: 0.0003721, Val Loss: 0.0004555 +2025-03-25 21:56:59,800 Epoch 711/2000 +2025-03-25 21:59:34,640 Current Learning Rate: 0.0005859646 +2025-03-25 21:59:34,641 Train Loss: 0.0003733, Val Loss: 0.0004533 +2025-03-25 21:59:34,641 Epoch 712/2000 +2025-03-25 22:02:08,972 Current Learning Rate: 0.0005936907 +2025-03-25 22:02:08,973 Train Loss: 0.0003753, Val Loss: 0.0004545 +2025-03-25 22:02:08,973 Epoch 713/2000 +2025-03-25 22:04:43,368 Current Learning Rate: 0.0006013936 +2025-03-25 22:04:43,369 Train Loss: 0.0003747, Val Loss: 0.0004535 +2025-03-25 22:04:43,369 Epoch 714/2000 +2025-03-25 22:07:17,432 Current Learning Rate: 0.0006090716 +2025-03-25 22:07:17,433 Train Loss: 0.0003721, Val Loss: 0.0004545 +2025-03-25 22:07:17,433 Epoch 715/2000 +2025-03-25 22:09:50,913 Current Learning Rate: 0.0006167227 +2025-03-25 22:09:50,913 Train Loss: 0.0003731, Val Loss: 0.0004579 +2025-03-25 22:09:50,913 Epoch 716/2000 +2025-03-25 22:12:23,508 Current Learning Rate: 0.0006243449 +2025-03-25 22:12:23,508 Train Loss: 0.0003752, Val Loss: 0.0004595 +2025-03-25 22:12:23,508 Epoch 717/2000 +2025-03-25 22:14:57,096 Current Learning Rate: 0.0006319365 +2025-03-25 22:14:57,097 Train Loss: 0.0003777, Val Loss: 0.0004575 +2025-03-25 22:14:57,097 Epoch 718/2000 +2025-03-25 22:17:30,891 Current Learning Rate: 0.0006394956 +2025-03-25 22:17:30,892 Train Loss: 0.0003802, Val Loss: 0.0004588 +2025-03-25 22:17:30,892 Epoch 719/2000 +2025-03-25 22:20:05,129 Current Learning Rate: 0.0006470202 +2025-03-25 22:20:05,129 Train Loss: 0.0003818, Val Loss: 0.0004676 +2025-03-25 22:20:05,130 Epoch 720/2000 +2025-03-25 22:22:39,430 Current Learning Rate: 0.0006545085 +2025-03-25 22:22:39,430 Train Loss: 0.0003818, Val Loss: 0.0004627 +2025-03-25 22:22:39,430 Epoch 721/2000 +2025-03-25 22:25:13,631 Current Learning Rate: 0.0006619587 +2025-03-25 22:25:13,631 Train Loss: 0.0003769, Val Loss: 0.0004566 +2025-03-25 22:25:13,632 Epoch 722/2000 +2025-03-25 22:27:48,324 Current Learning Rate: 0.0006693690 +2025-03-25 22:27:48,324 Train Loss: 0.0003753, Val Loss: 0.0004587 +2025-03-25 22:27:48,325 Epoch 723/2000 +2025-03-25 22:30:22,192 Current Learning Rate: 0.0006767374 +2025-03-25 22:30:22,192 Train Loss: 0.0003783, Val Loss: 0.0004588 +2025-03-25 22:30:22,192 Epoch 724/2000 +2025-03-25 22:32:56,220 Current Learning Rate: 0.0006840623 +2025-03-25 22:32:56,221 Train Loss: 0.0003808, Val Loss: 0.0004614 +2025-03-25 22:32:56,221 Epoch 725/2000 +2025-03-25 22:35:29,433 Current Learning Rate: 0.0006913417 +2025-03-25 22:35:29,433 Train Loss: 0.0003825, Val Loss: 0.0004624 +2025-03-25 22:35:29,434 Epoch 726/2000 +2025-03-25 22:38:02,640 Current Learning Rate: 0.0006985739 +2025-03-25 22:38:02,641 Train Loss: 0.0003829, Val Loss: 0.0004620 +2025-03-25 22:38:02,641 Epoch 727/2000 +2025-03-25 22:40:34,668 Current Learning Rate: 0.0007057572 +2025-03-25 22:40:34,668 Train Loss: 0.0003852, Val Loss: 0.0004638 +2025-03-25 22:40:34,668 Epoch 728/2000 +2025-03-25 22:43:07,534 Current Learning Rate: 0.0007128896 +2025-03-25 22:43:07,534 Train Loss: 0.0003803, Val Loss: 0.0004596 +2025-03-25 22:43:07,534 Epoch 729/2000 +2025-03-25 22:45:40,972 Current Learning Rate: 0.0007199696 +2025-03-25 22:45:40,972 Train Loss: 0.0003790, Val Loss: 0.0004660 +2025-03-25 22:45:40,973 Epoch 730/2000 +2025-03-25 22:48:14,559 Current Learning Rate: 0.0007269952 +2025-03-25 22:48:14,560 Train Loss: 0.0003830, Val Loss: 0.0004669 +2025-03-25 22:48:14,560 Epoch 731/2000 +2025-03-25 22:50:48,467 Current Learning Rate: 0.0007339649 +2025-03-25 22:50:48,468 Train Loss: 0.0003860, Val Loss: 0.0004656 +2025-03-25 22:50:48,468 Epoch 732/2000 +2025-03-25 22:53:22,162 Current Learning Rate: 0.0007408768 +2025-03-25 22:53:22,163 Train Loss: 0.0003868, Val Loss: 0.0004689 +2025-03-25 22:53:22,163 Epoch 733/2000 +2025-03-25 22:55:54,894 Current Learning Rate: 0.0007477293 +2025-03-25 22:55:54,895 Train Loss: 0.0003840, Val Loss: 0.0004649 +2025-03-25 22:55:54,895 Epoch 734/2000 +2025-03-25 22:58:28,646 Current Learning Rate: 0.0007545207 +2025-03-25 22:58:28,647 Train Loss: 0.0003828, Val Loss: 0.0004658 +2025-03-25 22:58:28,647 Epoch 735/2000 +2025-03-25 23:01:03,359 Current Learning Rate: 0.0007612493 +2025-03-25 23:01:03,359 Train Loss: 0.0004238, Val Loss: 0.0004723 +2025-03-25 23:01:03,359 Epoch 736/2000 +2025-03-25 23:03:36,751 Current Learning Rate: 0.0007679134 +2025-03-25 23:03:36,751 Train Loss: 0.0003857, Val Loss: 0.0004650 +2025-03-25 23:03:36,751 Epoch 737/2000 +2025-03-25 23:06:09,363 Current Learning Rate: 0.0007745114 +2025-03-25 23:06:09,364 Train Loss: 0.0003811, Val Loss: 0.0004661 +2025-03-25 23:06:09,364 Epoch 738/2000 +2025-03-25 23:08:43,083 Current Learning Rate: 0.0007810417 +2025-03-25 23:08:43,083 Train Loss: 0.0003842, Val Loss: 0.0004675 +2025-03-25 23:08:43,084 Epoch 739/2000 +2025-03-25 23:11:16,648 Current Learning Rate: 0.0007875026 +2025-03-25 23:11:16,649 Train Loss: 0.0003891, Val Loss: 0.0004714 +2025-03-25 23:11:16,649 Epoch 740/2000 +2025-03-25 23:13:50,871 Current Learning Rate: 0.0007938926 +2025-03-25 23:13:50,871 Train Loss: 0.0003906, Val Loss: 0.0004729 +2025-03-25 23:13:50,872 Epoch 741/2000 +2025-03-25 23:16:24,510 Current Learning Rate: 0.0008002101 +2025-03-25 23:16:24,510 Train Loss: 0.0003919, Val Loss: 0.0004786 +2025-03-25 23:16:24,510 Epoch 742/2000 +2025-03-25 23:18:57,288 Current Learning Rate: 0.0008064535 +2025-03-25 23:18:57,288 Train Loss: 0.0003936, Val Loss: 0.0004806 +2025-03-25 23:18:57,288 Epoch 743/2000 +2025-03-25 23:21:29,910 Current Learning Rate: 0.0008126213 +2025-03-25 23:21:29,910 Train Loss: 0.0003927, Val Loss: 0.0004779 +2025-03-25 23:21:29,910 Epoch 744/2000 +2025-03-25 23:24:03,440 Current Learning Rate: 0.0008187120 +2025-03-25 23:24:03,441 Train Loss: 0.0003871, Val Loss: 0.0004680 +2025-03-25 23:24:03,441 Epoch 745/2000 +2025-03-25 23:26:36,549 Current Learning Rate: 0.0008247240 +2025-03-25 23:26:36,549 Train Loss: 0.0003872, Val Loss: 0.0004712 +2025-03-25 23:26:36,549 Epoch 746/2000 +2025-03-25 23:29:10,116 Current Learning Rate: 0.0008306559 +2025-03-25 23:29:10,116 Train Loss: 0.0003919, Val Loss: 0.0004762 +2025-03-25 23:29:10,116 Epoch 747/2000 +2025-03-25 23:31:43,774 Current Learning Rate: 0.0008365063 +2025-03-25 23:31:43,774 Train Loss: 0.0003936, Val Loss: 0.0004736 +2025-03-25 23:31:43,774 Epoch 748/2000 +2025-03-25 23:34:17,654 Current Learning Rate: 0.0008422736 +2025-03-25 23:34:17,654 Train Loss: 0.0003954, Val Loss: 0.0004913 +2025-03-25 23:34:17,654 Epoch 749/2000 +2025-03-25 23:36:52,088 Current Learning Rate: 0.0008479564 +2025-03-25 23:36:52,089 Train Loss: 0.0003954, Val Loss: 0.0004784 +2025-03-25 23:36:52,089 Epoch 750/2000 +2025-03-25 23:39:26,818 Current Learning Rate: 0.0008535534 +2025-03-25 23:39:26,818 Train Loss: 0.0003976, Val Loss: 0.0004818 +2025-03-25 23:39:26,818 Epoch 751/2000 +2025-03-25 23:42:00,994 Current Learning Rate: 0.0008590631 +2025-03-25 23:42:00,994 Train Loss: 0.0004031, Val Loss: 0.0004824 +2025-03-25 23:42:00,994 Epoch 752/2000 +2025-03-25 23:44:35,638 Current Learning Rate: 0.0008644843 +2025-03-25 23:44:35,638 Train Loss: 0.0003970, Val Loss: 0.0004740 +2025-03-25 23:44:35,639 Epoch 753/2000 +2025-03-25 23:47:10,274 Current Learning Rate: 0.0008698155 +2025-03-25 23:47:10,274 Train Loss: 0.0003888, Val Loss: 0.0004716 +2025-03-25 23:47:10,274 Epoch 754/2000 +2025-03-25 23:49:44,450 Current Learning Rate: 0.0008750555 +2025-03-25 23:49:44,451 Train Loss: 0.0003911, Val Loss: 0.0004774 +2025-03-25 23:49:44,451 Epoch 755/2000 +2025-03-25 23:52:18,199 Current Learning Rate: 0.0008802030 +2025-03-25 23:52:18,200 Train Loss: 0.0003950, Val Loss: 0.0004770 +2025-03-25 23:52:18,200 Epoch 756/2000 +2025-03-25 23:54:52,829 Current Learning Rate: 0.0008852566 +2025-03-25 23:54:52,829 Train Loss: 0.0003973, Val Loss: 0.0004781 +2025-03-25 23:54:52,829 Epoch 757/2000 +2025-03-25 23:57:26,272 Current Learning Rate: 0.0008902152 +2025-03-25 23:57:26,272 Train Loss: 0.0003981, Val Loss: 0.0004779 +2025-03-25 23:57:26,272 Epoch 758/2000 +2025-03-25 23:59:59,546 Current Learning Rate: 0.0008950775 +2025-03-25 23:59:59,547 Train Loss: 0.0003942, Val Loss: 0.0004732 +2025-03-25 23:59:59,547 Epoch 759/2000 +2025-03-26 00:02:33,117 Current Learning Rate: 0.0008998423 +2025-03-26 00:02:33,118 Train Loss: 0.0003938, Val Loss: 0.0004739 +2025-03-26 00:02:33,118 Epoch 760/2000 +2025-03-26 00:05:06,111 Current Learning Rate: 0.0009045085 +2025-03-26 00:05:06,112 Train Loss: 0.0004387, Val Loss: 0.0004858 +2025-03-26 00:05:06,112 Epoch 761/2000 +2025-03-26 00:07:39,260 Current Learning Rate: 0.0009090749 +2025-03-26 00:07:39,260 Train Loss: 0.0003998, Val Loss: 0.0004719 +2025-03-26 00:07:39,260 Epoch 762/2000 +2025-03-26 00:10:13,384 Current Learning Rate: 0.0009135403 +2025-03-26 00:10:13,384 Train Loss: 0.0003899, Val Loss: 0.0004698 +2025-03-26 00:10:13,384 Epoch 763/2000 +2025-03-26 00:12:46,728 Current Learning Rate: 0.0009179037 +2025-03-26 00:12:46,729 Train Loss: 0.0003920, Val Loss: 0.0004766 +2025-03-26 00:12:46,729 Epoch 764/2000 +2025-03-26 00:15:20,087 Current Learning Rate: 0.0009221640 +2025-03-26 00:15:20,087 Train Loss: 0.0003971, Val Loss: 0.0004748 +2025-03-26 00:15:20,087 Epoch 765/2000 +2025-03-26 00:17:54,366 Current Learning Rate: 0.0009263201 +2025-03-26 00:17:54,367 Train Loss: 0.0003995, Val Loss: 0.0004803 +2025-03-26 00:17:54,367 Epoch 766/2000 +2025-03-26 00:20:28,265 Current Learning Rate: 0.0009303710 +2025-03-26 00:20:28,265 Train Loss: 0.0004000, Val Loss: 0.0004772 +2025-03-26 00:20:28,266 Epoch 767/2000 +2025-03-26 00:23:01,834 Current Learning Rate: 0.0009343158 +2025-03-26 00:23:01,835 Train Loss: 0.0004014, Val Loss: 0.0004839 +2025-03-26 00:23:01,835 Epoch 768/2000 +2025-03-26 00:25:35,655 Current Learning Rate: 0.0009381533 +2025-03-26 00:25:35,658 Train Loss: 0.0004034, Val Loss: 0.0004848 +2025-03-26 00:25:35,658 Epoch 769/2000 +2025-03-26 00:28:10,098 Current Learning Rate: 0.0009418828 +2025-03-26 00:28:10,098 Train Loss: 0.0004010, Val Loss: 0.0004786 +2025-03-26 00:28:10,098 Epoch 770/2000 +2025-03-26 00:30:42,697 Current Learning Rate: 0.0009455033 +2025-03-26 00:30:42,697 Train Loss: 0.0003938, Val Loss: 0.0004749 +2025-03-26 00:30:42,697 Epoch 771/2000 +2025-03-26 00:33:15,876 Current Learning Rate: 0.0009490138 +2025-03-26 00:33:15,876 Train Loss: 0.0003966, Val Loss: 0.0004767 +2025-03-26 00:33:15,876 Epoch 772/2000 +2025-03-26 00:35:48,470 Current Learning Rate: 0.0009524135 +2025-03-26 00:35:48,471 Train Loss: 0.0003984, Val Loss: 0.0004792 +2025-03-26 00:35:48,471 Epoch 773/2000 +2025-03-26 00:38:21,678 Current Learning Rate: 0.0009557016 +2025-03-26 00:38:21,678 Train Loss: 0.0004013, Val Loss: 0.0004836 +2025-03-26 00:38:21,678 Epoch 774/2000 +2025-03-26 00:40:54,769 Current Learning Rate: 0.0009588773 +2025-03-26 00:40:54,770 Train Loss: 0.0004028, Val Loss: 0.0004855 +2025-03-26 00:40:54,770 Epoch 775/2000 +2025-03-26 00:43:28,437 Current Learning Rate: 0.0009619398 +2025-03-26 00:43:28,437 Train Loss: 0.0003972, Val Loss: 0.0004790 +2025-03-26 00:43:28,437 Epoch 776/2000 +2025-03-26 00:46:02,313 Current Learning Rate: 0.0009648882 +2025-03-26 00:46:02,314 Train Loss: 0.0003951, Val Loss: 0.0004817 +2025-03-26 00:46:02,314 Epoch 777/2000 +2025-03-26 00:48:36,110 Current Learning Rate: 0.0009677220 +2025-03-26 00:48:36,110 Train Loss: 0.0004000, Val Loss: 0.0004885 +2025-03-26 00:48:36,110 Epoch 778/2000 +2025-03-26 00:51:09,952 Current Learning Rate: 0.0009704404 +2025-03-26 00:51:09,952 Train Loss: 0.0004019, Val Loss: 0.0004939 +2025-03-26 00:51:09,952 Epoch 779/2000 +2025-03-26 00:53:44,045 Current Learning Rate: 0.0009730427 +2025-03-26 00:53:44,046 Train Loss: 0.0004048, Val Loss: 0.0004909 +2025-03-26 00:53:44,046 Epoch 780/2000 +2025-03-26 00:56:18,268 Current Learning Rate: 0.0009755283 +2025-03-26 00:56:18,269 Train Loss: 0.0004079, Val Loss: 0.0004826 +2025-03-26 00:56:18,269 Epoch 781/2000 +2025-03-26 00:58:52,771 Current Learning Rate: 0.0009778965 +2025-03-26 00:58:52,771 Train Loss: 0.0004061, Val Loss: 0.0004832 +2025-03-26 00:58:52,771 Epoch 782/2000 +2025-03-26 01:01:26,204 Current Learning Rate: 0.0009801468 +2025-03-26 01:01:26,204 Train Loss: 0.0004041, Val Loss: 0.0004843 +2025-03-26 01:01:26,204 Epoch 783/2000 +2025-03-26 01:03:59,485 Current Learning Rate: 0.0009822787 +2025-03-26 01:03:59,485 Train Loss: 0.0003995, Val Loss: 0.0004841 +2025-03-26 01:03:59,486 Epoch 784/2000 +2025-03-26 01:06:32,621 Current Learning Rate: 0.0009842916 +2025-03-26 01:06:32,622 Train Loss: 0.0003984, Val Loss: 0.0004864 +2025-03-26 01:06:32,622 Epoch 785/2000 +2025-03-26 01:09:05,900 Current Learning Rate: 0.0009861850 +2025-03-26 01:09:05,900 Train Loss: 0.0003987, Val Loss: 0.0004857 +2025-03-26 01:09:05,901 Epoch 786/2000 +2025-03-26 01:11:38,740 Current Learning Rate: 0.0009879584 +2025-03-26 01:11:38,741 Train Loss: 0.0004015, Val Loss: 0.0004817 +2025-03-26 01:11:38,741 Epoch 787/2000 +2025-03-26 01:14:11,169 Current Learning Rate: 0.0009896114 +2025-03-26 01:14:11,169 Train Loss: 0.0004053, Val Loss: 0.0004910 +2025-03-26 01:14:11,170 Epoch 788/2000 +2025-03-26 01:16:44,088 Current Learning Rate: 0.0009911436 +2025-03-26 01:16:44,089 Train Loss: 0.0004094, Val Loss: 0.0004903 +2025-03-26 01:16:44,089 Epoch 789/2000 +2025-03-26 01:19:16,725 Current Learning Rate: 0.0009925547 +2025-03-26 01:19:16,725 Train Loss: 0.0004102, Val Loss: 0.0004864 +2025-03-26 01:19:16,726 Epoch 790/2000 +2025-03-26 01:21:49,981 Current Learning Rate: 0.0009938442 +2025-03-26 01:21:49,982 Train Loss: 0.0004078, Val Loss: 0.0004839 +2025-03-26 01:21:49,982 Epoch 791/2000 +2025-03-26 01:24:22,502 Current Learning Rate: 0.0009950118 +2025-03-26 01:24:22,503 Train Loss: 0.0003978, Val Loss: 0.0004749 +2025-03-26 01:24:22,503 Epoch 792/2000 +2025-03-26 01:26:55,164 Current Learning Rate: 0.0009960574 +2025-03-26 01:26:55,164 Train Loss: 0.0003920, Val Loss: 0.0004735 +2025-03-26 01:26:55,164 Epoch 793/2000 +2025-03-26 01:29:28,863 Current Learning Rate: 0.0009969805 +2025-03-26 01:29:28,864 Train Loss: 0.0003963, Val Loss: 0.0004827 +2025-03-26 01:29:28,864 Epoch 794/2000 +2025-03-26 01:32:01,881 Current Learning Rate: 0.0009977810 +2025-03-26 01:32:01,881 Train Loss: 0.0003988, Val Loss: 0.0004823 +2025-03-26 01:32:01,882 Epoch 795/2000 +2025-03-26 01:34:34,613 Current Learning Rate: 0.0009984587 +2025-03-26 01:34:34,613 Train Loss: 0.0004004, Val Loss: 0.0004818 +2025-03-26 01:34:34,614 Epoch 796/2000 +2025-03-26 01:37:08,215 Current Learning Rate: 0.0009990134 +2025-03-26 01:37:08,215 Train Loss: 0.0003998, Val Loss: 0.0004768 +2025-03-26 01:37:08,215 Epoch 797/2000 +2025-03-26 01:39:41,019 Current Learning Rate: 0.0009994449 +2025-03-26 01:39:41,019 Train Loss: 0.0003949, Val Loss: 0.0004760 +2025-03-26 01:39:41,019 Epoch 798/2000 +2025-03-26 01:42:13,359 Current Learning Rate: 0.0009997533 +2025-03-26 01:42:13,360 Train Loss: 0.0003956, Val Loss: 0.0004769 +2025-03-26 01:42:13,360 Epoch 799/2000 +2025-03-26 01:44:46,323 Current Learning Rate: 0.0009999383 +2025-03-26 01:44:46,323 Train Loss: 0.0003992, Val Loss: 0.0004788 +2025-03-26 01:44:46,324 Epoch 800/2000 +2025-03-26 01:47:20,040 Current Learning Rate: 0.0010000000 +2025-03-26 01:47:20,040 Train Loss: 0.0004022, Val Loss: 0.0004781 +2025-03-26 01:47:20,040 Epoch 801/2000 +2025-03-26 01:49:52,815 Current Learning Rate: 0.0009999383 +2025-03-26 01:49:52,815 Train Loss: 0.0004038, Val Loss: 0.0004808 +2025-03-26 01:49:52,815 Epoch 802/2000 +2025-03-26 01:52:25,675 Current Learning Rate: 0.0009997533 +2025-03-26 01:52:25,676 Train Loss: 0.0003986, Val Loss: 0.0004774 +2025-03-26 01:52:25,676 Epoch 803/2000 +2025-03-26 01:55:00,331 Current Learning Rate: 0.0009994449 +2025-03-26 01:55:00,331 Train Loss: 0.0003912, Val Loss: 0.0004716 +2025-03-26 01:55:00,331 Epoch 804/2000 +2025-03-26 01:57:33,904 Current Learning Rate: 0.0009990134 +2025-03-26 01:57:33,904 Train Loss: 0.0003930, Val Loss: 0.0004746 +2025-03-26 01:57:33,905 Epoch 805/2000 +2025-03-26 02:00:08,247 Current Learning Rate: 0.0009984587 +2025-03-26 02:00:08,247 Train Loss: 0.0003955, Val Loss: 0.0004798 +2025-03-26 02:00:08,248 Epoch 806/2000 +2025-03-26 02:02:41,066 Current Learning Rate: 0.0009977810 +2025-03-26 02:02:41,067 Train Loss: 0.0003970, Val Loss: 0.0004776 +2025-03-26 02:02:41,067 Epoch 807/2000 +2025-03-26 02:05:14,882 Current Learning Rate: 0.0009969805 +2025-03-26 02:05:14,883 Train Loss: 0.0003975, Val Loss: 0.0004809 +2025-03-26 02:05:14,883 Epoch 808/2000 +2025-03-26 02:07:48,732 Current Learning Rate: 0.0009960574 +2025-03-26 02:07:48,732 Train Loss: 0.0004011, Val Loss: 0.0004806 +2025-03-26 02:07:48,732 Epoch 809/2000 +2025-03-26 02:10:22,311 Current Learning Rate: 0.0009950118 +2025-03-26 02:10:22,312 Train Loss: 0.0004044, Val Loss: 0.0004854 +2025-03-26 02:10:22,312 Epoch 810/2000 +2025-03-26 02:12:55,918 Current Learning Rate: 0.0009938442 +2025-03-26 02:12:55,919 Train Loss: 0.0004051, Val Loss: 0.0004829 +2025-03-26 02:12:55,919 Epoch 811/2000 +2025-03-26 02:15:29,997 Current Learning Rate: 0.0009925547 +2025-03-26 02:15:29,998 Train Loss: 0.0004066, Val Loss: 0.0004813 +2025-03-26 02:15:29,998 Epoch 812/2000 +2025-03-26 02:18:03,670 Current Learning Rate: 0.0009911436 +2025-03-26 02:18:03,671 Train Loss: 0.0004070, Val Loss: 0.0004883 +2025-03-26 02:18:03,671 Epoch 813/2000 +2025-03-26 02:20:38,458 Current Learning Rate: 0.0009896114 +2025-03-26 02:20:38,460 Train Loss: 0.0003969, Val Loss: 0.0004752 +2025-03-26 02:20:38,460 Epoch 814/2000 +2025-03-26 02:23:12,119 Current Learning Rate: 0.0009879584 +2025-03-26 02:23:12,120 Train Loss: 0.0003859, Val Loss: 0.0004665 +2025-03-26 02:23:12,121 Epoch 815/2000 +2025-03-26 02:25:45,060 Current Learning Rate: 0.0009861850 +2025-03-26 02:25:45,060 Train Loss: 0.0003860, Val Loss: 0.0004685 +2025-03-26 02:25:45,060 Epoch 816/2000 +2025-03-26 02:28:17,806 Current Learning Rate: 0.0009842916 +2025-03-26 02:28:17,806 Train Loss: 0.0003912, Val Loss: 0.0004738 +2025-03-26 02:28:17,806 Epoch 817/2000 +2025-03-26 02:30:50,891 Current Learning Rate: 0.0009822787 +2025-03-26 02:30:50,892 Train Loss: 0.0003922, Val Loss: 0.0004716 +2025-03-26 02:30:50,892 Epoch 818/2000 +2025-03-26 02:33:23,117 Current Learning Rate: 0.0009801468 +2025-03-26 02:33:23,117 Train Loss: 0.0003927, Val Loss: 0.0004721 +2025-03-26 02:33:23,117 Epoch 819/2000 +2025-03-26 02:35:56,332 Current Learning Rate: 0.0009778965 +2025-03-26 02:35:56,333 Train Loss: 0.0003942, Val Loss: 0.0004743 +2025-03-26 02:35:56,333 Epoch 820/2000 +2025-03-26 02:38:30,735 Current Learning Rate: 0.0009755283 +2025-03-26 02:38:30,736 Train Loss: 0.0003949, Val Loss: 0.0004834 +2025-03-26 02:38:30,736 Epoch 821/2000 +2025-03-26 02:41:04,413 Current Learning Rate: 0.0009730427 +2025-03-26 02:41:04,413 Train Loss: 0.0003899, Val Loss: 0.0004677 +2025-03-26 02:41:04,414 Epoch 822/2000 +2025-03-26 02:43:37,416 Current Learning Rate: 0.0009704404 +2025-03-26 02:43:37,417 Train Loss: 0.0003841, Val Loss: 0.0004674 +2025-03-26 02:43:37,417 Epoch 823/2000 +2025-03-26 02:46:11,049 Current Learning Rate: 0.0009677220 +2025-03-26 02:46:11,050 Train Loss: 0.0003868, Val Loss: 0.0004678 +2025-03-26 02:46:11,050 Epoch 824/2000 +2025-03-26 02:48:43,866 Current Learning Rate: 0.0009648882 +2025-03-26 02:48:43,867 Train Loss: 0.0003874, Val Loss: 0.0004686 +2025-03-26 02:48:43,867 Epoch 825/2000 +2025-03-26 02:51:16,778 Current Learning Rate: 0.0009619398 +2025-03-26 02:51:16,779 Train Loss: 0.0003892, Val Loss: 0.0004774 +2025-03-26 02:51:16,779 Epoch 826/2000 +2025-03-26 02:53:49,427 Current Learning Rate: 0.0009588773 +2025-03-26 02:53:49,427 Train Loss: 0.0003912, Val Loss: 0.0004764 +2025-03-26 02:53:49,427 Epoch 827/2000 +2025-03-26 02:56:22,325 Current Learning Rate: 0.0009557016 +2025-03-26 02:56:22,326 Train Loss: 0.0003912, Val Loss: 0.0004750 +2025-03-26 02:56:22,326 Epoch 828/2000 +2025-03-26 02:58:55,829 Current Learning Rate: 0.0009524135 +2025-03-26 02:58:55,829 Train Loss: 0.0003940, Val Loss: 0.0004708 +2025-03-26 02:58:55,829 Epoch 829/2000 +2025-03-26 03:01:30,056 Current Learning Rate: 0.0009490138 +2025-03-26 03:01:30,056 Train Loss: 0.0003981, Val Loss: 0.0004853 +2025-03-26 03:01:30,057 Epoch 830/2000 +2025-03-26 03:04:03,912 Current Learning Rate: 0.0009455033 +2025-03-26 03:04:03,912 Train Loss: 0.0003989, Val Loss: 0.0004859 +2025-03-26 03:04:03,913 Epoch 831/2000 +2025-03-26 03:06:36,633 Current Learning Rate: 0.0009418828 +2025-03-26 03:06:36,634 Train Loss: 0.0003981, Val Loss: 0.0004752 +2025-03-26 03:06:36,634 Epoch 832/2000 +2025-03-26 03:09:09,951 Current Learning Rate: 0.0009381533 +2025-03-26 03:09:09,952 Train Loss: 0.0003937, Val Loss: 0.0004696 +2025-03-26 03:09:09,952 Epoch 833/2000 +2025-03-26 03:11:42,769 Current Learning Rate: 0.0009343158 +2025-03-26 03:11:42,769 Train Loss: 0.0003822, Val Loss: 0.0004608 +2025-03-26 03:11:42,769 Epoch 834/2000 +2025-03-26 03:14:15,932 Current Learning Rate: 0.0009303710 +2025-03-26 03:14:15,932 Train Loss: 0.0003778, Val Loss: 0.0004619 +2025-03-26 03:14:15,932 Epoch 835/2000 +2025-03-26 03:16:49,045 Current Learning Rate: 0.0009263201 +2025-03-26 03:16:49,046 Train Loss: 0.0003791, Val Loss: 0.0004624 +2025-03-26 03:16:49,046 Epoch 836/2000 +2025-03-26 03:19:23,582 Current Learning Rate: 0.0009221640 +2025-03-26 03:19:23,583 Train Loss: 0.0003816, Val Loss: 0.0004641 +2025-03-26 03:19:23,583 Epoch 837/2000 +2025-03-26 03:21:57,612 Current Learning Rate: 0.0009179037 +2025-03-26 03:21:57,613 Train Loss: 0.0003822, Val Loss: 0.0004683 +2025-03-26 03:21:57,613 Epoch 838/2000 +2025-03-26 03:24:31,306 Current Learning Rate: 0.0009135403 +2025-03-26 03:24:31,306 Train Loss: 0.0003831, Val Loss: 0.0004645 +2025-03-26 03:24:31,306 Epoch 839/2000 +2025-03-26 03:27:05,992 Current Learning Rate: 0.0009090749 +2025-03-26 03:27:05,992 Train Loss: 0.0003845, Val Loss: 0.0004621 +2025-03-26 03:27:05,992 Epoch 840/2000 +2025-03-26 03:29:39,599 Current Learning Rate: 0.0009045085 +2025-03-26 03:29:39,600 Train Loss: 0.0003843, Val Loss: 0.0004619 +2025-03-26 03:29:39,600 Epoch 841/2000 +2025-03-26 03:32:13,193 Current Learning Rate: 0.0008998423 +2025-03-26 03:32:13,193 Train Loss: 0.0003799, Val Loss: 0.0004604 +2025-03-26 03:32:13,194 Epoch 842/2000 +2025-03-26 03:34:47,042 Current Learning Rate: 0.0008950775 +2025-03-26 03:34:47,043 Train Loss: 0.0003754, Val Loss: 0.0004627 +2025-03-26 03:34:47,043 Epoch 843/2000 +2025-03-26 03:37:20,195 Current Learning Rate: 0.0008902152 +2025-03-26 03:37:20,196 Train Loss: 0.0003760, Val Loss: 0.0004586 +2025-03-26 03:37:20,196 Epoch 844/2000 +2025-03-26 03:39:53,634 Current Learning Rate: 0.0008852566 +2025-03-26 03:39:53,635 Train Loss: 0.0003779, Val Loss: 0.0004639 +2025-03-26 03:39:53,635 Epoch 845/2000 +2025-03-26 03:42:27,697 Current Learning Rate: 0.0008802030 +2025-03-26 03:42:27,697 Train Loss: 0.0003791, Val Loss: 0.0004609 +2025-03-26 03:42:27,697 Epoch 846/2000 +2025-03-26 03:45:01,175 Current Learning Rate: 0.0008750555 +2025-03-26 03:45:01,176 Train Loss: 0.0003803, Val Loss: 0.0004620 +2025-03-26 03:45:01,176 Epoch 847/2000 +2025-03-26 03:47:35,383 Current Learning Rate: 0.0008698155 +2025-03-26 03:47:35,383 Train Loss: 0.0003846, Val Loss: 0.0004693 +2025-03-26 03:47:35,383 Epoch 848/2000 +2025-03-26 03:50:09,793 Current Learning Rate: 0.0008644843 +2025-03-26 03:50:09,793 Train Loss: 0.0003884, Val Loss: 0.0004751 +2025-03-26 03:50:09,793 Epoch 849/2000 +2025-03-26 03:52:44,124 Current Learning Rate: 0.0008590631 +2025-03-26 03:52:44,125 Train Loss: 0.0003892, Val Loss: 0.0004693 +2025-03-26 03:52:44,125 Epoch 850/2000 +2025-03-26 03:55:17,955 Current Learning Rate: 0.0008535534 +2025-03-26 03:55:17,956 Train Loss: 0.0003913, Val Loss: 0.0004691 +2025-03-26 03:55:17,956 Epoch 851/2000 +2025-03-26 03:57:51,989 Current Learning Rate: 0.0008479564 +2025-03-26 03:57:51,990 Train Loss: 0.0003903, Val Loss: 0.0004666 +2025-03-26 03:57:51,990 Epoch 852/2000 +2025-03-26 04:00:25,334 Current Learning Rate: 0.0008422736 +2025-03-26 04:00:25,334 Train Loss: 0.0003833, Val Loss: 0.0004604 +2025-03-26 04:00:25,335 Epoch 853/2000 +2025-03-26 04:02:58,296 Current Learning Rate: 0.0008365063 +2025-03-26 04:02:58,296 Train Loss: 0.0003731, Val Loss: 0.0004534 +2025-03-26 04:02:58,296 Epoch 854/2000 +2025-03-26 04:05:31,932 Current Learning Rate: 0.0008306559 +2025-03-26 04:05:31,932 Train Loss: 0.0003686, Val Loss: 0.0004527 +2025-03-26 04:05:31,933 Epoch 855/2000 +2025-03-26 04:08:06,423 Current Learning Rate: 0.0008247240 +2025-03-26 04:08:06,424 Train Loss: 0.0003686, Val Loss: 0.0004529 +2025-03-26 04:08:06,424 Epoch 856/2000 +2025-03-26 04:10:39,954 Current Learning Rate: 0.0008187120 +2025-03-26 04:10:39,955 Train Loss: 0.0003688, Val Loss: 0.0004547 +2025-03-26 04:10:39,955 Epoch 857/2000 +2025-03-26 04:13:13,727 Current Learning Rate: 0.0008126213 +2025-03-26 04:13:13,727 Train Loss: 0.0003703, Val Loss: 0.0004546 +2025-03-26 04:13:13,727 Epoch 858/2000 +2025-03-26 04:15:46,317 Current Learning Rate: 0.0008064535 +2025-03-26 04:15:46,318 Train Loss: 0.0003726, Val Loss: 0.0004535 +2025-03-26 04:15:46,318 Epoch 859/2000 +2025-03-26 04:18:19,248 Current Learning Rate: 0.0008002101 +2025-03-26 04:18:19,248 Train Loss: 0.0003714, Val Loss: 0.0004546 +2025-03-26 04:18:19,249 Epoch 860/2000 +2025-03-26 04:20:51,571 Current Learning Rate: 0.0007938926 +2025-03-26 04:20:51,572 Train Loss: 0.0003720, Val Loss: 0.0004570 +2025-03-26 04:20:51,572 Epoch 861/2000 +2025-03-26 04:23:24,384 Current Learning Rate: 0.0007875026 +2025-03-26 04:23:24,385 Train Loss: 0.0003739, Val Loss: 0.0004582 +2025-03-26 04:23:24,385 Epoch 862/2000 +2025-03-26 04:25:57,320 Current Learning Rate: 0.0007810417 +2025-03-26 04:25:57,320 Train Loss: 0.0003767, Val Loss: 0.0004578 +2025-03-26 04:25:57,321 Epoch 863/2000 +2025-03-26 04:28:31,001 Current Learning Rate: 0.0007745114 +2025-03-26 04:28:31,001 Train Loss: 0.0003770, Val Loss: 0.0004595 +2025-03-26 04:28:31,001 Epoch 864/2000 +2025-03-26 04:31:03,372 Current Learning Rate: 0.0007679134 +2025-03-26 04:31:03,373 Train Loss: 0.0003774, Val Loss: 0.0004589 +2025-03-26 04:31:03,373 Epoch 865/2000 +2025-03-26 04:33:36,271 Current Learning Rate: 0.0007612493 +2025-03-26 04:33:36,272 Train Loss: 0.0003802, Val Loss: 0.0004611 +2025-03-26 04:33:36,272 Epoch 866/2000 +2025-03-26 04:36:09,490 Current Learning Rate: 0.0007545207 +2025-03-26 04:36:09,491 Train Loss: 0.0003817, Val Loss: 0.0004579 +2025-03-26 04:36:09,492 Epoch 867/2000 +2025-03-26 04:38:42,483 Current Learning Rate: 0.0007477293 +2025-03-26 04:38:42,483 Train Loss: 0.0003804, Val Loss: 0.0004594 +2025-03-26 04:38:42,483 Epoch 868/2000 +2025-03-26 04:41:15,153 Current Learning Rate: 0.0007408768 +2025-03-26 04:41:15,153 Train Loss: 0.0003750, Val Loss: 0.0004526 +2025-03-26 04:41:15,153 Epoch 869/2000 +2025-03-26 04:43:49,468 Current Learning Rate: 0.0007339649 +2025-03-26 04:43:49,468 Train Loss: 0.0003652, Val Loss: 0.0004455 +2025-03-26 04:43:49,469 Epoch 870/2000 +2025-03-26 04:46:22,754 Current Learning Rate: 0.0007269952 +2025-03-26 04:46:22,755 Train Loss: 0.0003595, Val Loss: 0.0004442 +2025-03-26 04:46:22,756 Epoch 871/2000 +2025-03-26 04:48:55,710 Current Learning Rate: 0.0007199696 +2025-03-26 04:48:55,711 Train Loss: 0.0003584, Val Loss: 0.0004431 +2025-03-26 04:48:55,711 Epoch 872/2000 +2025-03-26 04:51:28,636 Current Learning Rate: 0.0007128896 +2025-03-26 04:51:28,637 Train Loss: 0.0003588, Val Loss: 0.0004428 +2025-03-26 04:51:28,637 Epoch 873/2000 +2025-03-26 04:54:02,020 Current Learning Rate: 0.0007057572 +2025-03-26 04:54:02,021 Train Loss: 0.0003598, Val Loss: 0.0004443 +2025-03-26 04:54:02,021 Epoch 874/2000 +2025-03-26 04:56:35,218 Current Learning Rate: 0.0006985739 +2025-03-26 04:56:35,219 Train Loss: 0.0003615, Val Loss: 0.0004442 +2025-03-26 04:56:35,219 Epoch 875/2000 +2025-03-26 04:59:08,876 Current Learning Rate: 0.0006913417 +2025-03-26 04:59:08,877 Train Loss: 0.0003623, Val Loss: 0.0004485 +2025-03-26 04:59:08,877 Epoch 876/2000 +2025-03-26 05:01:42,726 Current Learning Rate: 0.0006840623 +2025-03-26 05:01:42,727 Train Loss: 0.0003624, Val Loss: 0.0004484 +2025-03-26 05:01:42,727 Epoch 877/2000 +2025-03-26 05:04:15,963 Current Learning Rate: 0.0006767374 +2025-03-26 05:04:15,964 Train Loss: 0.0003636, Val Loss: 0.0004490 +2025-03-26 05:04:15,964 Epoch 878/2000 +2025-03-26 05:06:49,012 Current Learning Rate: 0.0006693690 +2025-03-26 05:06:49,013 Train Loss: 0.0003642, Val Loss: 0.0004451 +2025-03-26 05:06:49,013 Epoch 879/2000 +2025-03-26 05:09:21,927 Current Learning Rate: 0.0006619587 +2025-03-26 05:09:21,928 Train Loss: 0.0003639, Val Loss: 0.0004468 +2025-03-26 05:09:21,928 Epoch 880/2000 +2025-03-26 05:11:55,632 Current Learning Rate: 0.0006545085 +2025-03-26 05:11:55,633 Train Loss: 0.0003655, Val Loss: 0.0004487 +2025-03-26 05:11:55,633 Epoch 881/2000 +2025-03-26 05:14:28,510 Current Learning Rate: 0.0006470202 +2025-03-26 05:14:28,510 Train Loss: 0.0003683, Val Loss: 0.0004525 +2025-03-26 05:14:28,510 Epoch 882/2000 +2025-03-26 05:17:01,151 Current Learning Rate: 0.0006394956 +2025-03-26 05:17:01,151 Train Loss: 0.0003693, Val Loss: 0.0004515 +2025-03-26 05:17:01,152 Epoch 883/2000 +2025-03-26 05:19:33,571 Current Learning Rate: 0.0006319365 +2025-03-26 05:19:33,572 Train Loss: 0.0003674, Val Loss: 0.0004490 +2025-03-26 05:19:33,572 Epoch 884/2000 +2025-03-26 05:22:06,626 Current Learning Rate: 0.0006243449 +2025-03-26 05:22:06,626 Train Loss: 0.0003621, Val Loss: 0.0004425 +2025-03-26 05:22:06,627 Epoch 885/2000 +2025-03-26 05:24:39,131 Current Learning Rate: 0.0006167227 +2025-03-26 05:24:39,131 Train Loss: 0.0003560, Val Loss: 0.0004376 +2025-03-26 05:24:39,131 Epoch 886/2000 +2025-03-26 05:27:13,490 Current Learning Rate: 0.0006090716 +2025-03-26 05:27:13,491 Train Loss: 0.0003520, Val Loss: 0.0004358 +2025-03-26 05:27:13,491 Epoch 887/2000 +2025-03-26 05:29:46,842 Current Learning Rate: 0.0006013936 +2025-03-26 05:29:46,843 Train Loss: 0.0003506, Val Loss: 0.0004349 +2025-03-26 05:29:46,843 Epoch 888/2000 +2025-03-26 05:32:20,053 Current Learning Rate: 0.0005936907 +2025-03-26 05:32:20,053 Train Loss: 0.0003501, Val Loss: 0.0004344 +2025-03-26 05:32:20,053 Epoch 889/2000 +2025-03-26 05:34:53,227 Current Learning Rate: 0.0005859646 +2025-03-26 05:34:53,228 Train Loss: 0.0003511, Val Loss: 0.0004361 +2025-03-26 05:34:53,228 Epoch 890/2000 +2025-03-26 05:37:26,656 Current Learning Rate: 0.0005782172 +2025-03-26 05:37:26,656 Train Loss: 0.0003523, Val Loss: 0.0004385 +2025-03-26 05:37:26,657 Epoch 891/2000 +2025-03-26 05:40:00,823 Current Learning Rate: 0.0005704506 +2025-03-26 05:40:00,824 Train Loss: 0.0003527, Val Loss: 0.0004366 +2025-03-26 05:40:00,824 Epoch 892/2000 +2025-03-26 05:42:34,929 Current Learning Rate: 0.0005626666 +2025-03-26 05:42:34,930 Train Loss: 0.0003541, Val Loss: 0.0004356 +2025-03-26 05:42:34,930 Epoch 893/2000 +2025-03-26 05:45:08,662 Current Learning Rate: 0.0005548672 +2025-03-26 05:45:08,662 Train Loss: 0.0003549, Val Loss: 0.0004388 +2025-03-26 05:45:08,663 Epoch 894/2000 +2025-03-26 05:47:41,845 Current Learning Rate: 0.0005470542 +2025-03-26 05:47:41,846 Train Loss: 0.0003562, Val Loss: 0.0004439 +2025-03-26 05:47:41,846 Epoch 895/2000 +2025-03-26 05:50:14,445 Current Learning Rate: 0.0005392295 +2025-03-26 05:50:14,446 Train Loss: 0.0003579, Val Loss: 0.0004430 +2025-03-26 05:50:14,446 Epoch 896/2000 +2025-03-26 05:52:47,227 Current Learning Rate: 0.0005313953 +2025-03-26 05:52:47,228 Train Loss: 0.0003604, Val Loss: 0.0004431 +2025-03-26 05:52:47,228 Epoch 897/2000 +2025-03-26 05:55:19,959 Current Learning Rate: 0.0005235532 +2025-03-26 05:55:19,959 Train Loss: 0.0003609, Val Loss: 0.0004383 +2025-03-26 05:55:19,960 Epoch 898/2000 +2025-03-26 05:57:53,309 Current Learning Rate: 0.0005157054 +2025-03-26 05:57:53,309 Train Loss: 0.0003586, Val Loss: 0.0004350 +2025-03-26 05:57:53,310 Epoch 899/2000 +2025-03-26 06:00:26,712 Current Learning Rate: 0.0005078537 +2025-03-26 06:00:26,712 Train Loss: 0.0003541, Val Loss: 0.0004313 +2025-03-26 06:00:26,712 Epoch 900/2000 +2025-03-26 06:03:00,377 Current Learning Rate: 0.0005000000 +2025-03-26 06:03:00,378 Train Loss: 0.0003490, Val Loss: 0.0004286 +2025-03-26 06:03:00,378 Epoch 901/2000 +2025-03-26 06:05:34,054 Current Learning Rate: 0.0004921463 +2025-03-26 06:05:34,055 Train Loss: 0.0003454, Val Loss: 0.0004271 +2025-03-26 06:05:34,055 Epoch 902/2000 +2025-03-26 06:08:07,873 Current Learning Rate: 0.0004842946 +2025-03-26 06:08:07,874 Train Loss: 0.0003435, Val Loss: 0.0004262 +2025-03-26 06:08:07,874 Epoch 903/2000 +2025-03-26 06:10:41,785 Current Learning Rate: 0.0004764468 +2025-03-26 06:10:41,786 Train Loss: 0.0003425, Val Loss: 0.0004261 +2025-03-26 06:10:41,786 Epoch 904/2000 +2025-03-26 06:13:16,134 Current Learning Rate: 0.0004686047 +2025-03-26 06:13:16,135 Train Loss: 0.0003422, Val Loss: 0.0004264 +2025-03-26 06:13:16,135 Epoch 905/2000 +2025-03-26 06:15:50,167 Current Learning Rate: 0.0004607705 +2025-03-26 06:15:50,167 Train Loss: 0.0003431, Val Loss: 0.0004273 +2025-03-26 06:15:50,167 Epoch 906/2000 +2025-03-26 06:18:24,279 Current Learning Rate: 0.0004529458 +2025-03-26 06:18:24,279 Train Loss: 0.0003444, Val Loss: 0.0004272 +2025-03-26 06:18:24,280 Epoch 907/2000 +2025-03-26 06:20:59,114 Current Learning Rate: 0.0004451328 +2025-03-26 06:20:59,115 Train Loss: 0.0003453, Val Loss: 0.0004300 +2025-03-26 06:20:59,115 Epoch 908/2000 +2025-03-26 06:23:33,194 Current Learning Rate: 0.0004373334 +2025-03-26 06:23:33,194 Train Loss: 0.0003464, Val Loss: 0.0004317 +2025-03-26 06:23:33,194 Epoch 909/2000 +2025-03-26 06:26:07,078 Current Learning Rate: 0.0004295494 +2025-03-26 06:26:07,079 Train Loss: 0.0003473, Val Loss: 0.0004320 +2025-03-26 06:26:07,079 Epoch 910/2000 +2025-03-26 06:28:41,394 Current Learning Rate: 0.0004217828 +2025-03-26 06:28:41,395 Train Loss: 0.0003489, Val Loss: 0.0004313 +2025-03-26 06:28:41,395 Epoch 911/2000 +2025-03-26 06:31:15,523 Current Learning Rate: 0.0004140354 +2025-03-26 06:31:15,523 Train Loss: 0.0003498, Val Loss: 0.0004340 +2025-03-26 06:31:15,523 Epoch 912/2000 +2025-03-26 06:33:49,562 Current Learning Rate: 0.0004063093 +2025-03-26 06:33:49,562 Train Loss: 0.0003492, Val Loss: 0.0004312 +2025-03-26 06:33:49,562 Epoch 913/2000 +2025-03-26 06:36:23,882 Current Learning Rate: 0.0003986064 +2025-03-26 06:36:23,882 Train Loss: 0.0003471, Val Loss: 0.0004275 +2025-03-26 06:36:23,882 Epoch 914/2000 +2025-03-26 06:38:58,747 Current Learning Rate: 0.0003909284 +2025-03-26 06:38:58,747 Train Loss: 0.0003442, Val Loss: 0.0004247 +2025-03-26 06:38:58,747 Epoch 915/2000 +2025-03-26 06:41:33,408 Current Learning Rate: 0.0003832773 +2025-03-26 06:41:33,408 Train Loss: 0.0003413, Val Loss: 0.0004222 +2025-03-26 06:41:33,408 Epoch 916/2000 +2025-03-26 06:44:07,753 Current Learning Rate: 0.0003756551 +2025-03-26 06:44:07,807 Train Loss: 0.0003391, Val Loss: 0.0004206 +2025-03-26 06:44:07,808 Epoch 917/2000 +2025-03-26 06:46:42,054 Current Learning Rate: 0.0003680635 +2025-03-26 06:46:42,109 Train Loss: 0.0003376, Val Loss: 0.0004195 +2025-03-26 06:46:42,109 Epoch 918/2000 +2025-03-26 06:49:16,786 Current Learning Rate: 0.0003605044 +2025-03-26 06:49:16,850 Train Loss: 0.0003366, Val Loss: 0.0004188 +2025-03-26 06:49:16,850 Epoch 919/2000 +2025-03-26 06:51:50,845 Current Learning Rate: 0.0003529798 +2025-03-26 06:51:50,895 Train Loss: 0.0003361, Val Loss: 0.0004188 +2025-03-26 06:51:50,895 Epoch 920/2000 +2025-03-26 06:54:24,834 Current Learning Rate: 0.0003454915 +2025-03-26 06:54:24,834 Train Loss: 0.0003361, Val Loss: 0.0004199 +2025-03-26 06:54:24,834 Epoch 921/2000 +2025-03-26 06:56:59,243 Current Learning Rate: 0.0003380413 +2025-03-26 06:56:59,243 Train Loss: 0.0003374, Val Loss: 0.0004213 +2025-03-26 06:56:59,244 Epoch 922/2000 +2025-03-26 06:59:33,134 Current Learning Rate: 0.0003306310 +2025-03-26 06:59:33,135 Train Loss: 0.0003383, Val Loss: 0.0004218 +2025-03-26 06:59:33,135 Epoch 923/2000 +2025-03-26 07:02:07,512 Current Learning Rate: 0.0003232626 +2025-03-26 07:02:07,512 Train Loss: 0.0003392, Val Loss: 0.0004243 +2025-03-26 07:02:07,512 Epoch 924/2000 +2025-03-26 07:04:41,339 Current Learning Rate: 0.0003159377 +2025-03-26 07:04:41,340 Train Loss: 0.0003408, Val Loss: 0.0004226 +2025-03-26 07:04:41,340 Epoch 925/2000 +2025-03-26 07:07:15,386 Current Learning Rate: 0.0003086583 +2025-03-26 07:07:15,386 Train Loss: 0.0003411, Val Loss: 0.0004214 +2025-03-26 07:07:15,386 Epoch 926/2000 +2025-03-26 07:09:48,233 Current Learning Rate: 0.0003014261 +2025-03-26 07:09:48,234 Train Loss: 0.0003396, Val Loss: 0.0004196 +2025-03-26 07:09:48,234 Epoch 927/2000 +2025-03-26 07:12:21,057 Current Learning Rate: 0.0002942428 +2025-03-26 07:12:21,133 Train Loss: 0.0003376, Val Loss: 0.0004180 +2025-03-26 07:12:21,133 Epoch 928/2000 +2025-03-26 07:14:53,764 Current Learning Rate: 0.0002871104 +2025-03-26 07:14:53,818 Train Loss: 0.0003358, Val Loss: 0.0004163 +2025-03-26 07:14:53,818 Epoch 929/2000 +2025-03-26 07:17:28,017 Current Learning Rate: 0.0002800304 +2025-03-26 07:17:28,083 Train Loss: 0.0003342, Val Loss: 0.0004150 +2025-03-26 07:17:28,083 Epoch 930/2000 +2025-03-26 07:20:01,852 Current Learning Rate: 0.0002730048 +2025-03-26 07:20:01,956 Train Loss: 0.0003330, Val Loss: 0.0004141 +2025-03-26 07:20:01,956 Epoch 931/2000 +2025-03-26 07:22:34,634 Current Learning Rate: 0.0002660351 +2025-03-26 07:22:34,700 Train Loss: 0.0003321, Val Loss: 0.0004135 +2025-03-26 07:22:34,700 Epoch 932/2000 +2025-03-26 07:25:07,619 Current Learning Rate: 0.0002591232 +2025-03-26 07:25:07,679 Train Loss: 0.0003316, Val Loss: 0.0004129 +2025-03-26 07:25:07,679 Epoch 933/2000 +2025-03-26 07:27:40,442 Current Learning Rate: 0.0002522707 +2025-03-26 07:27:40,442 Train Loss: 0.0003317, Val Loss: 0.0004146 +2025-03-26 07:27:40,442 Epoch 934/2000 +2025-03-26 07:30:13,269 Current Learning Rate: 0.0002454793 +2025-03-26 07:30:13,270 Train Loss: 0.0003325, Val Loss: 0.0004152 +2025-03-26 07:30:13,270 Epoch 935/2000 +2025-03-26 07:32:46,433 Current Learning Rate: 0.0002387507 +2025-03-26 07:32:46,433 Train Loss: 0.0003332, Val Loss: 0.0004140 +2025-03-26 07:32:46,433 Epoch 936/2000 +2025-03-26 07:35:19,284 Current Learning Rate: 0.0002320866 +2025-03-26 07:35:19,336 Train Loss: 0.0003337, Val Loss: 0.0004117 +2025-03-26 07:35:19,336 Epoch 937/2000 +2025-03-26 07:37:52,197 Current Learning Rate: 0.0002254886 +2025-03-26 07:37:52,263 Train Loss: 0.0003335, Val Loss: 0.0004113 +2025-03-26 07:37:52,263 Epoch 938/2000 +2025-03-26 07:40:24,688 Current Learning Rate: 0.0002189583 +2025-03-26 07:40:24,757 Train Loss: 0.0003324, Val Loss: 0.0004108 +2025-03-26 07:40:24,757 Epoch 939/2000 +2025-03-26 07:42:58,088 Current Learning Rate: 0.0002124974 +2025-03-26 07:42:58,167 Train Loss: 0.0003312, Val Loss: 0.0004099 +2025-03-26 07:42:58,167 Epoch 940/2000 +2025-03-26 07:45:31,319 Current Learning Rate: 0.0002061074 +2025-03-26 07:45:31,370 Train Loss: 0.0003301, Val Loss: 0.0004089 +2025-03-26 07:45:31,370 Epoch 941/2000 +2025-03-26 07:48:04,393 Current Learning Rate: 0.0001997899 +2025-03-26 07:48:04,450 Train Loss: 0.0003292, Val Loss: 0.0004081 +2025-03-26 07:48:04,451 Epoch 942/2000 +2025-03-26 07:50:36,880 Current Learning Rate: 0.0001935465 +2025-03-26 07:50:36,943 Train Loss: 0.0003285, Val Loss: 0.0004075 +2025-03-26 07:50:36,944 Epoch 943/2000 +2025-03-26 07:53:09,810 Current Learning Rate: 0.0001873787 +2025-03-26 07:53:09,869 Train Loss: 0.0003280, Val Loss: 0.0004071 +2025-03-26 07:53:09,869 Epoch 944/2000 +2025-03-26 07:55:43,161 Current Learning Rate: 0.0001812880 +2025-03-26 07:55:43,212 Train Loss: 0.0003277, Val Loss: 0.0004071 +2025-03-26 07:55:43,212 Epoch 945/2000 +2025-03-26 07:58:15,638 Current Learning Rate: 0.0001752760 +2025-03-26 07:58:15,638 Train Loss: 0.0003277, Val Loss: 0.0004076 +2025-03-26 07:58:15,638 Epoch 946/2000 +2025-03-26 08:00:48,494 Current Learning Rate: 0.0001693441 +2025-03-26 08:00:48,558 Train Loss: 0.0003283, Val Loss: 0.0004071 +2025-03-26 08:00:48,558 Epoch 947/2000 +2025-03-26 08:03:21,716 Current Learning Rate: 0.0001634937 +2025-03-26 08:03:21,775 Train Loss: 0.0003281, Val Loss: 0.0004064 +2025-03-26 08:03:21,775 Epoch 948/2000 +2025-03-26 08:05:54,991 Current Learning Rate: 0.0001577264 +2025-03-26 08:05:55,046 Train Loss: 0.0003272, Val Loss: 0.0004060 +2025-03-26 08:05:55,046 Epoch 949/2000 +2025-03-26 08:08:28,376 Current Learning Rate: 0.0001520436 +2025-03-26 08:08:28,438 Train Loss: 0.0003263, Val Loss: 0.0004054 +2025-03-26 08:08:28,439 Epoch 950/2000 +2025-03-26 08:11:02,367 Current Learning Rate: 0.0001464466 +2025-03-26 08:11:02,435 Train Loss: 0.0003256, Val Loss: 0.0004048 +2025-03-26 08:11:02,435 Epoch 951/2000 +2025-03-26 08:13:37,015 Current Learning Rate: 0.0001409369 +2025-03-26 08:13:37,075 Train Loss: 0.0003250, Val Loss: 0.0004043 +2025-03-26 08:13:37,075 Epoch 952/2000 +2025-03-26 08:16:10,238 Current Learning Rate: 0.0001355157 +2025-03-26 08:16:10,299 Train Loss: 0.0003246, Val Loss: 0.0004040 +2025-03-26 08:16:10,299 Epoch 953/2000 +2025-03-26 08:18:44,275 Current Learning Rate: 0.0001301845 +2025-03-26 08:18:44,329 Train Loss: 0.0003243, Val Loss: 0.0004038 +2025-03-26 08:18:44,330 Epoch 954/2000 +2025-03-26 08:21:16,569 Current Learning Rate: 0.0001249445 +2025-03-26 08:21:16,624 Train Loss: 0.0003241, Val Loss: 0.0004035 +2025-03-26 08:21:16,624 Epoch 955/2000 +2025-03-26 08:23:49,096 Current Learning Rate: 0.0001197970 +2025-03-26 08:23:49,166 Train Loss: 0.0003239, Val Loss: 0.0004029 +2025-03-26 08:23:49,167 Epoch 956/2000 +2025-03-26 08:26:22,563 Current Learning Rate: 0.0001147434 +2025-03-26 08:26:22,640 Train Loss: 0.0003235, Val Loss: 0.0004027 +2025-03-26 08:26:22,640 Epoch 957/2000 +2025-03-26 08:28:55,891 Current Learning Rate: 0.0001097848 +2025-03-26 08:28:55,892 Train Loss: 0.0003230, Val Loss: 0.0004028 +2025-03-26 08:28:55,892 Epoch 958/2000 +2025-03-26 08:31:29,987 Current Learning Rate: 0.0001049225 +2025-03-26 08:31:29,988 Train Loss: 0.0003225, Val Loss: 0.0004028 +2025-03-26 08:31:29,988 Epoch 959/2000 +2025-03-26 08:34:03,081 Current Learning Rate: 0.0001001577 +2025-03-26 08:34:03,082 Train Loss: 0.0003221, Val Loss: 0.0004029 +2025-03-26 08:34:03,082 Epoch 960/2000 +2025-03-26 08:36:36,476 Current Learning Rate: 0.0000954915 +2025-03-26 08:36:36,529 Train Loss: 0.0003217, Val Loss: 0.0004027 +2025-03-26 08:36:36,529 Epoch 961/2000 +2025-03-26 08:39:10,706 Current Learning Rate: 0.0000909251 +2025-03-26 08:39:10,768 Train Loss: 0.0003214, Val Loss: 0.0004024 +2025-03-26 08:39:10,769 Epoch 962/2000 +2025-03-26 08:41:44,473 Current Learning Rate: 0.0000864597 +2025-03-26 08:41:44,535 Train Loss: 0.0003210, Val Loss: 0.0004017 +2025-03-26 08:41:44,535 Epoch 963/2000 +2025-03-26 08:44:18,308 Current Learning Rate: 0.0000820963 +2025-03-26 08:44:18,382 Train Loss: 0.0003207, Val Loss: 0.0004008 +2025-03-26 08:44:18,383 Epoch 964/2000 +2025-03-26 08:46:52,295 Current Learning Rate: 0.0000778360 +2025-03-26 08:46:52,357 Train Loss: 0.0003203, Val Loss: 0.0004001 +2025-03-26 08:46:52,357 Epoch 965/2000 +2025-03-26 08:49:26,177 Current Learning Rate: 0.0000736799 +2025-03-26 08:49:26,233 Train Loss: 0.0003200, Val Loss: 0.0003998 +2025-03-26 08:49:26,233 Epoch 966/2000 +2025-03-26 08:51:59,918 Current Learning Rate: 0.0000696290 +2025-03-26 08:51:59,980 Train Loss: 0.0003196, Val Loss: 0.0003995 +2025-03-26 08:51:59,981 Epoch 967/2000 +2025-03-26 08:54:33,945 Current Learning Rate: 0.0000656842 +2025-03-26 08:54:33,999 Train Loss: 0.0003193, Val Loss: 0.0003993 +2025-03-26 08:54:33,999 Epoch 968/2000 +2025-03-26 08:57:07,336 Current Learning Rate: 0.0000618467 +2025-03-26 08:57:07,401 Train Loss: 0.0003190, Val Loss: 0.0003990 +2025-03-26 08:57:07,401 Epoch 969/2000 +2025-03-26 08:59:41,650 Current Learning Rate: 0.0000581172 +2025-03-26 08:59:41,709 Train Loss: 0.0003187, Val Loss: 0.0003989 +2025-03-26 08:59:41,710 Epoch 970/2000 +2025-03-26 09:02:15,603 Current Learning Rate: 0.0000544967 +2025-03-26 09:02:15,657 Train Loss: 0.0003185, Val Loss: 0.0003987 +2025-03-26 09:02:15,657 Epoch 971/2000 +2025-03-26 09:04:49,481 Current Learning Rate: 0.0000509862 +2025-03-26 09:04:49,541 Train Loss: 0.0003182, Val Loss: 0.0003984 +2025-03-26 09:04:49,541 Epoch 972/2000 +2025-03-26 09:07:23,651 Current Learning Rate: 0.0000475865 +2025-03-26 09:07:23,739 Train Loss: 0.0003179, Val Loss: 0.0003980 +2025-03-26 09:07:23,740 Epoch 973/2000 +2025-03-26 09:09:57,431 Current Learning Rate: 0.0000442984 +2025-03-26 09:09:57,494 Train Loss: 0.0003176, Val Loss: 0.0003977 +2025-03-26 09:09:57,494 Epoch 974/2000 +2025-03-26 09:12:31,100 Current Learning Rate: 0.0000411227 +2025-03-26 09:12:31,156 Train Loss: 0.0003174, Val Loss: 0.0003975 +2025-03-26 09:12:31,167 Epoch 975/2000 +2025-03-26 09:15:05,053 Current Learning Rate: 0.0000380602 +2025-03-26 09:15:05,111 Train Loss: 0.0003171, Val Loss: 0.0003972 +2025-03-26 09:15:05,112 Epoch 976/2000 +2025-03-26 09:17:39,408 Current Learning Rate: 0.0000351118 +2025-03-26 09:17:39,464 Train Loss: 0.0003169, Val Loss: 0.0003970 +2025-03-26 09:17:39,464 Epoch 977/2000 +2025-03-26 09:20:13,489 Current Learning Rate: 0.0000322780 +2025-03-26 09:20:13,544 Train Loss: 0.0003167, Val Loss: 0.0003968 +2025-03-26 09:20:13,544 Epoch 978/2000 +2025-03-26 09:22:47,272 Current Learning Rate: 0.0000295596 +2025-03-26 09:22:47,337 Train Loss: 0.0003165, Val Loss: 0.0003966 +2025-03-26 09:22:47,337 Epoch 979/2000 +2025-03-26 09:25:21,197 Current Learning Rate: 0.0000269573 +2025-03-26 09:25:21,255 Train Loss: 0.0003162, Val Loss: 0.0003964 +2025-03-26 09:25:21,255 Epoch 980/2000 +2025-03-26 09:27:55,052 Current Learning Rate: 0.0000244717 +2025-03-26 09:27:55,106 Train Loss: 0.0003160, Val Loss: 0.0003962 +2025-03-26 09:27:55,107 Epoch 981/2000 +2025-03-26 09:30:28,254 Current Learning Rate: 0.0000221035 +2025-03-26 09:30:28,314 Train Loss: 0.0003158, Val Loss: 0.0003960 +2025-03-26 09:30:28,315 Epoch 982/2000 +2025-03-26 09:33:02,870 Current Learning Rate: 0.0000198532 +2025-03-26 09:33:02,927 Train Loss: 0.0003156, Val Loss: 0.0003959 +2025-03-26 09:33:02,927 Epoch 983/2000 +2025-03-26 09:35:36,615 Current Learning Rate: 0.0000177213 +2025-03-26 09:35:36,674 Train Loss: 0.0003154, Val Loss: 0.0003957 +2025-03-26 09:35:36,674 Epoch 984/2000 +2025-03-26 09:38:10,926 Current Learning Rate: 0.0000157084 +2025-03-26 09:38:10,983 Train Loss: 0.0003153, Val Loss: 0.0003955 +2025-03-26 09:38:10,983 Epoch 985/2000 +2025-03-26 09:40:43,300 Current Learning Rate: 0.0000138150 +2025-03-26 09:40:43,362 Train Loss: 0.0003151, Val Loss: 0.0003953 +2025-03-26 09:40:43,362 Epoch 986/2000 +2025-03-26 09:43:17,160 Current Learning Rate: 0.0000120416 +2025-03-26 09:43:17,221 Train Loss: 0.0003149, Val Loss: 0.0003952 +2025-03-26 09:43:17,222 Epoch 987/2000 +2025-03-26 09:45:50,643 Current Learning Rate: 0.0000103886 +2025-03-26 09:45:50,702 Train Loss: 0.0003148, Val Loss: 0.0003951 +2025-03-26 09:45:50,702 Epoch 988/2000 +2025-03-26 09:48:25,089 Current Learning Rate: 0.0000088564 +2025-03-26 09:48:25,148 Train Loss: 0.0003146, Val Loss: 0.0003950 +2025-03-26 09:48:25,148 Epoch 989/2000 +2025-03-26 09:50:58,752 Current Learning Rate: 0.0000074453 +2025-03-26 09:50:58,809 Train Loss: 0.0003145, Val Loss: 0.0003949 +2025-03-26 09:50:58,809 Epoch 990/2000 +2025-03-26 09:53:32,412 Current Learning Rate: 0.0000061558 +2025-03-26 09:53:32,467 Train Loss: 0.0003144, Val Loss: 0.0003947 +2025-03-26 09:53:32,467 Epoch 991/2000 +2025-03-26 09:56:05,459 Current Learning Rate: 0.0000049882 +2025-03-26 09:56:05,518 Train Loss: 0.0003143, Val Loss: 0.0003946 +2025-03-26 09:56:05,518 Epoch 992/2000 +2025-03-26 09:58:38,942 Current Learning Rate: 0.0000039426 +2025-03-26 09:58:39,023 Train Loss: 0.0003142, Val Loss: 0.0003945 +2025-03-26 09:58:39,023 Epoch 993/2000 +2025-03-26 10:01:11,523 Current Learning Rate: 0.0000030195 +2025-03-26 10:01:11,579 Train Loss: 0.0003141, Val Loss: 0.0003945 +2025-03-26 10:01:11,579 Epoch 994/2000 +2025-03-26 10:03:44,902 Current Learning Rate: 0.0000022190 +2025-03-26 10:03:44,953 Train Loss: 0.0003140, Val Loss: 0.0003944 +2025-03-26 10:03:44,953 Epoch 995/2000 +2025-03-26 10:06:19,093 Current Learning Rate: 0.0000015413 +2025-03-26 10:06:19,149 Train Loss: 0.0003139, Val Loss: 0.0003944 +2025-03-26 10:06:19,149 Epoch 996/2000 +2025-03-26 10:08:52,782 Current Learning Rate: 0.0000009866 +2025-03-26 10:08:52,857 Train Loss: 0.0003138, Val Loss: 0.0003943 +2025-03-26 10:08:52,857 Epoch 997/2000 +2025-03-26 10:11:26,806 Current Learning Rate: 0.0000005551 +2025-03-26 10:11:26,877 Train Loss: 0.0003138, Val Loss: 0.0003943 +2025-03-26 10:11:26,877 Epoch 998/2000 +2025-03-26 10:14:00,974 Current Learning Rate: 0.0000002467 +2025-03-26 10:14:01,026 Train Loss: 0.0003137, Val Loss: 0.0003943 +2025-03-26 10:14:01,026 Epoch 999/2000 +2025-03-26 10:16:34,846 Current Learning Rate: 0.0000000617 +2025-03-26 10:16:34,909 Train Loss: 0.0003137, Val Loss: 0.0003943 +2025-03-26 10:16:34,909 Epoch 1000/2000 +2025-03-26 10:19:08,530 Current Learning Rate: 0.0000000000 +2025-03-26 10:19:08,583 Train Loss: 0.0003137, Val Loss: 0.0003943 +2025-03-26 10:19:08,583 Epoch 1001/2000 +2025-03-26 10:21:41,853 Current Learning Rate: 0.0000000617 +2025-03-26 10:21:41,853 Train Loss: 0.0003137, Val Loss: 0.0003943 +2025-03-26 10:21:41,853 Epoch 1002/2000 +2025-03-26 10:24:16,035 Current Learning Rate: 0.0000002467 +2025-03-26 10:24:16,091 Train Loss: 0.0003137, Val Loss: 0.0003943 +2025-03-26 10:24:16,092 Epoch 1003/2000 +2025-03-26 10:26:50,410 Current Learning Rate: 0.0000005551 +2025-03-26 10:26:50,411 Train Loss: 0.0003137, Val Loss: 0.0003943 +2025-03-26 10:26:50,411 Epoch 1004/2000 +2025-03-26 10:29:24,075 Current Learning Rate: 0.0000009866 +2025-03-26 10:29:24,076 Train Loss: 0.0003137, Val Loss: 0.0003943 +2025-03-26 10:29:24,076 Epoch 1005/2000 +2025-03-26 10:31:58,179 Current Learning Rate: 0.0000015413 +2025-03-26 10:31:58,180 Train Loss: 0.0003138, Val Loss: 0.0003943 +2025-03-26 10:31:58,180 Epoch 1006/2000 +2025-03-26 10:34:31,960 Current Learning Rate: 0.0000022190 +2025-03-26 10:34:31,961 Train Loss: 0.0003138, Val Loss: 0.0003943 +2025-03-26 10:34:31,961 Epoch 1007/2000 +2025-03-26 10:37:06,083 Current Learning Rate: 0.0000030195 +2025-03-26 10:37:06,083 Train Loss: 0.0003139, Val Loss: 0.0003944 +2025-03-26 10:37:06,083 Epoch 1008/2000 +2025-03-26 10:39:40,015 Current Learning Rate: 0.0000039426 +2025-03-26 10:39:40,015 Train Loss: 0.0003139, Val Loss: 0.0003944 +2025-03-26 10:39:40,015 Epoch 1009/2000 +2025-03-26 10:42:14,407 Current Learning Rate: 0.0000049882 +2025-03-26 10:42:14,407 Train Loss: 0.0003140, Val Loss: 0.0003944 +2025-03-26 10:42:14,408 Epoch 1010/2000 +2025-03-26 10:44:48,411 Current Learning Rate: 0.0000061558 +2025-03-26 10:44:48,411 Train Loss: 0.0003140, Val Loss: 0.0003945 +2025-03-26 10:44:48,412 Epoch 1011/2000 +2025-03-26 10:47:22,661 Current Learning Rate: 0.0000074453 +2025-03-26 10:47:22,661 Train Loss: 0.0003141, Val Loss: 0.0003945 +2025-03-26 10:47:22,662 Epoch 1012/2000 +2025-03-26 10:49:56,395 Current Learning Rate: 0.0000088564 +2025-03-26 10:49:56,396 Train Loss: 0.0003142, Val Loss: 0.0003946 +2025-03-26 10:49:56,396 Epoch 1013/2000 +2025-03-26 10:52:30,555 Current Learning Rate: 0.0000103886 +2025-03-26 10:52:30,555 Train Loss: 0.0003143, Val Loss: 0.0003948 +2025-03-26 10:52:30,555 Epoch 1014/2000 +2025-03-26 10:55:04,977 Current Learning Rate: 0.0000120416 +2025-03-26 10:55:04,977 Train Loss: 0.0003144, Val Loss: 0.0003948 +2025-03-26 10:55:04,977 Epoch 1015/2000 +2025-03-26 10:57:39,179 Current Learning Rate: 0.0000138150 +2025-03-26 10:57:39,179 Train Loss: 0.0003144, Val Loss: 0.0003949 +2025-03-26 10:57:39,180 Epoch 1016/2000 +2025-03-26 11:00:13,595 Current Learning Rate: 0.0000157084 +2025-03-26 11:00:13,596 Train Loss: 0.0003145, Val Loss: 0.0003950 +2025-03-26 11:00:13,596 Epoch 1017/2000 +2025-03-26 11:02:47,366 Current Learning Rate: 0.0000177213 +2025-03-26 11:02:47,367 Train Loss: 0.0003146, Val Loss: 0.0003951 +2025-03-26 11:02:47,367 Epoch 1018/2000 +2025-03-26 11:05:21,529 Current Learning Rate: 0.0000198532 +2025-03-26 11:05:21,530 Train Loss: 0.0003147, Val Loss: 0.0003952 +2025-03-26 11:05:21,530 Epoch 1019/2000 +2025-03-26 11:07:55,852 Current Learning Rate: 0.0000221035 +2025-03-26 11:07:55,853 Train Loss: 0.0003148, Val Loss: 0.0003953 +2025-03-26 11:07:55,853 Epoch 1020/2000 +2025-03-26 11:10:29,888 Current Learning Rate: 0.0000244717 +2025-03-26 11:10:29,888 Train Loss: 0.0003149, Val Loss: 0.0003954 +2025-03-26 11:10:29,889 Epoch 1021/2000 +2025-03-26 11:13:03,266 Current Learning Rate: 0.0000269573 +2025-03-26 11:13:03,266 Train Loss: 0.0003150, Val Loss: 0.0003955 +2025-03-26 11:13:03,266 Epoch 1022/2000 +2025-03-26 11:15:37,053 Current Learning Rate: 0.0000295596 +2025-03-26 11:15:37,053 Train Loss: 0.0003151, Val Loss: 0.0003957 +2025-03-26 11:15:37,054 Epoch 1023/2000 +2025-03-26 11:18:10,896 Current Learning Rate: 0.0000322780 +2025-03-26 11:18:10,896 Train Loss: 0.0003152, Val Loss: 0.0003958 +2025-03-26 11:18:10,896 Epoch 1024/2000 +2025-03-26 11:20:44,867 Current Learning Rate: 0.0000351118 +2025-03-26 11:20:44,868 Train Loss: 0.0003153, Val Loss: 0.0003959 +2025-03-26 11:20:44,868 Epoch 1025/2000 +2025-03-26 11:23:18,960 Current Learning Rate: 0.0000380602 +2025-03-26 11:23:18,961 Train Loss: 0.0003154, Val Loss: 0.0003960 +2025-03-26 11:23:18,961 Epoch 1026/2000 +2025-03-26 11:25:53,553 Current Learning Rate: 0.0000411227 +2025-03-26 11:25:53,553 Train Loss: 0.0003155, Val Loss: 0.0003961 +2025-03-26 11:25:53,554 Epoch 1027/2000 +2025-03-26 11:28:27,695 Current Learning Rate: 0.0000442984 +2025-03-26 11:28:27,695 Train Loss: 0.0003156, Val Loss: 0.0003962 +2025-03-26 11:28:27,695 Epoch 1028/2000 +2025-03-26 11:31:01,598 Current Learning Rate: 0.0000475865 +2025-03-26 11:31:01,598 Train Loss: 0.0003157, Val Loss: 0.0003964 +2025-03-26 11:31:01,598 Epoch 1029/2000 +2025-03-26 11:33:35,542 Current Learning Rate: 0.0000509862 +2025-03-26 11:33:35,543 Train Loss: 0.0003159, Val Loss: 0.0003965 +2025-03-26 11:33:35,543 Epoch 1030/2000 +2025-03-26 11:36:09,122 Current Learning Rate: 0.0000544967 +2025-03-26 11:36:09,122 Train Loss: 0.0003160, Val Loss: 0.0003965 +2025-03-26 11:36:09,122 Epoch 1031/2000 +2025-03-26 11:38:42,863 Current Learning Rate: 0.0000581172 +2025-03-26 11:38:42,863 Train Loss: 0.0003161, Val Loss: 0.0003966 +2025-03-26 11:38:42,863 Epoch 1032/2000 +2025-03-26 11:41:16,720 Current Learning Rate: 0.0000618467 +2025-03-26 11:41:16,720 Train Loss: 0.0003163, Val Loss: 0.0003968 +2025-03-26 11:41:16,720 Epoch 1033/2000 +2025-03-26 11:43:50,468 Current Learning Rate: 0.0000656842 +2025-03-26 11:43:50,468 Train Loss: 0.0003164, Val Loss: 0.0003969 +2025-03-26 11:43:50,468 Epoch 1034/2000 +2025-03-26 11:46:24,318 Current Learning Rate: 0.0000696290 +2025-03-26 11:46:24,319 Train Loss: 0.0003165, Val Loss: 0.0003969 +2025-03-26 11:46:24,319 Epoch 1035/2000 +2025-03-26 11:48:57,333 Current Learning Rate: 0.0000736799 +2025-03-26 11:48:57,333 Train Loss: 0.0003167, Val Loss: 0.0003970 +2025-03-26 11:48:57,333 Epoch 1036/2000 +2025-03-26 11:51:30,792 Current Learning Rate: 0.0000778360 +2025-03-26 11:51:30,792 Train Loss: 0.0003168, Val Loss: 0.0003971 +2025-03-26 11:51:30,793 Epoch 1037/2000 +2025-03-26 11:54:04,557 Current Learning Rate: 0.0000820963 +2025-03-26 11:54:04,558 Train Loss: 0.0003170, Val Loss: 0.0003972 +2025-03-26 11:54:04,558 Epoch 1038/2000 +2025-03-26 11:56:38,064 Current Learning Rate: 0.0000864597 +2025-03-26 11:56:38,064 Train Loss: 0.0003171, Val Loss: 0.0003973 +2025-03-26 11:56:38,064 Epoch 1039/2000 +2025-03-26 11:59:11,650 Current Learning Rate: 0.0000909251 +2025-03-26 11:59:11,651 Train Loss: 0.0003173, Val Loss: 0.0003975 +2025-03-26 11:59:11,651 Epoch 1040/2000 +2025-03-26 12:01:45,713 Current Learning Rate: 0.0000954915 +2025-03-26 12:01:45,713 Train Loss: 0.0003174, Val Loss: 0.0003976 +2025-03-26 12:01:45,713 Epoch 1041/2000 +2025-03-26 12:04:19,274 Current Learning Rate: 0.0001001577 +2025-03-26 12:04:19,274 Train Loss: 0.0003176, Val Loss: 0.0003977 +2025-03-26 12:04:19,274 Epoch 1042/2000 +2025-03-26 12:06:53,288 Current Learning Rate: 0.0001049225 +2025-03-26 12:06:53,288 Train Loss: 0.0003177, Val Loss: 0.0003979 +2025-03-26 12:06:53,289 Epoch 1043/2000 +2025-03-26 12:09:26,851 Current Learning Rate: 0.0001097848 +2025-03-26 12:09:26,851 Train Loss: 0.0003179, Val Loss: 0.0003981 +2025-03-26 12:09:26,851 Epoch 1044/2000 +2025-03-26 12:12:00,399 Current Learning Rate: 0.0001147434 +2025-03-26 12:12:00,399 Train Loss: 0.0003180, Val Loss: 0.0003982 +2025-03-26 12:12:00,400 Epoch 1045/2000 +2025-03-26 12:14:34,544 Current Learning Rate: 0.0001197970 +2025-03-26 12:14:34,545 Train Loss: 0.0003182, Val Loss: 0.0003985 +2025-03-26 12:14:34,545 Epoch 1046/2000 +2025-03-26 12:17:08,486 Current Learning Rate: 0.0001249445 +2025-03-26 12:17:08,486 Train Loss: 0.0003184, Val Loss: 0.0003987 +2025-03-26 12:17:08,487 Epoch 1047/2000 +2025-03-26 12:19:42,799 Current Learning Rate: 0.0001301845 +2025-03-26 12:19:42,799 Train Loss: 0.0003186, Val Loss: 0.0003989 +2025-03-26 12:19:42,800 Epoch 1048/2000 +2025-03-26 12:22:16,521 Current Learning Rate: 0.0001355157 +2025-03-26 12:22:16,522 Train Loss: 0.0003188, Val Loss: 0.0003992 +2025-03-26 12:22:16,522 Epoch 1049/2000 +2025-03-26 12:24:49,122 Current Learning Rate: 0.0001409369 +2025-03-26 12:24:49,122 Train Loss: 0.0003189, Val Loss: 0.0003994 +2025-03-26 12:24:49,122 Epoch 1050/2000 +2025-03-26 12:27:22,430 Current Learning Rate: 0.0001464466 +2025-03-26 12:27:22,431 Train Loss: 0.0003191, Val Loss: 0.0003996 +2025-03-26 12:27:22,431 Epoch 1051/2000 +2025-03-26 12:29:55,830 Current Learning Rate: 0.0001520436 +2025-03-26 12:29:55,831 Train Loss: 0.0003193, Val Loss: 0.0003999 +2025-03-26 12:29:55,831 Epoch 1052/2000 +2025-03-26 12:32:29,733 Current Learning Rate: 0.0001577264 +2025-03-26 12:32:29,734 Train Loss: 0.0003195, Val Loss: 0.0004002 +2025-03-26 12:32:29,734 Epoch 1053/2000 +2025-03-26 12:35:03,506 Current Learning Rate: 0.0001634937 +2025-03-26 12:35:03,506 Train Loss: 0.0003197, Val Loss: 0.0004005 +2025-03-26 12:35:03,506 Epoch 1054/2000 +2025-03-26 12:37:37,281 Current Learning Rate: 0.0001693441 +2025-03-26 12:37:37,282 Train Loss: 0.0003199, Val Loss: 0.0004007 +2025-03-26 12:37:37,282 Epoch 1055/2000 +2025-03-26 12:40:10,937 Current Learning Rate: 0.0001752760 +2025-03-26 12:40:10,938 Train Loss: 0.0003201, Val Loss: 0.0004009 +2025-03-26 12:40:10,938 Epoch 1056/2000 +2025-03-26 12:42:43,785 Current Learning Rate: 0.0001812880 +2025-03-26 12:42:43,785 Train Loss: 0.0003203, Val Loss: 0.0004011 +2025-03-26 12:42:43,785 Epoch 1057/2000 +2025-03-26 12:45:16,929 Current Learning Rate: 0.0001873787 +2025-03-26 12:45:16,929 Train Loss: 0.0003206, Val Loss: 0.0004014 +2025-03-26 12:45:16,929 Epoch 1058/2000 +2025-03-26 12:47:50,041 Current Learning Rate: 0.0001935465 +2025-03-26 12:47:50,042 Train Loss: 0.0003208, Val Loss: 0.0004017 +2025-03-26 12:47:50,042 Epoch 1059/2000 +2025-03-26 12:50:22,626 Current Learning Rate: 0.0001997899 +2025-03-26 12:50:22,626 Train Loss: 0.0003210, Val Loss: 0.0004022 +2025-03-26 12:50:22,626 Epoch 1060/2000 +2025-03-26 12:52:55,057 Current Learning Rate: 0.0002061074 +2025-03-26 12:52:55,058 Train Loss: 0.0003214, Val Loss: 0.0004027 +2025-03-26 12:52:55,058 Epoch 1061/2000 +2025-03-26 12:55:28,679 Current Learning Rate: 0.0002124974 +2025-03-26 12:55:28,680 Train Loss: 0.0003219, Val Loss: 0.0004039 +2025-03-26 12:55:28,680 Epoch 1062/2000 +2025-03-26 12:58:03,050 Current Learning Rate: 0.0002189583 +2025-03-26 12:58:03,051 Train Loss: 0.0003227, Val Loss: 0.0004057 +2025-03-26 12:58:03,051 Epoch 1063/2000 +2025-03-26 13:00:36,947 Current Learning Rate: 0.0002254886 +2025-03-26 13:00:36,948 Train Loss: 0.0003229, Val Loss: 0.0004060 +2025-03-26 13:00:36,948 Epoch 1064/2000 +2025-03-26 13:03:10,590 Current Learning Rate: 0.0002320866 +2025-03-26 13:03:10,591 Train Loss: 0.0003230, Val Loss: 0.0004055 +2025-03-26 13:03:10,591 Epoch 1065/2000 +2025-03-26 13:05:43,029 Current Learning Rate: 0.0002387507 +2025-03-26 13:05:43,030 Train Loss: 0.0003227, Val Loss: 0.0004052 +2025-03-26 13:05:43,030 Epoch 1066/2000 +2025-03-26 13:08:16,444 Current Learning Rate: 0.0002454793 +2025-03-26 13:08:16,444 Train Loss: 0.0003225, Val Loss: 0.0004050 +2025-03-26 13:08:16,445 Epoch 1067/2000 +2025-03-26 13:10:49,266 Current Learning Rate: 0.0002522707 +2025-03-26 13:10:49,267 Train Loss: 0.0003228, Val Loss: 0.0004050 +2025-03-26 13:10:49,267 Epoch 1068/2000 +2025-03-26 13:13:21,668 Current Learning Rate: 0.0002591232 +2025-03-26 13:13:21,668 Train Loss: 0.0003230, Val Loss: 0.0004050 +2025-03-26 13:13:21,669 Epoch 1069/2000 +2025-03-26 13:15:55,734 Current Learning Rate: 0.0002660351 +2025-03-26 13:15:55,735 Train Loss: 0.0003233, Val Loss: 0.0004051 +2025-03-26 13:15:55,735 Epoch 1070/2000 +2025-03-26 13:18:30,139 Current Learning Rate: 0.0002730048 +2025-03-26 13:18:30,139 Train Loss: 0.0003236, Val Loss: 0.0004053 +2025-03-26 13:18:30,140 Epoch 1071/2000 +2025-03-26 13:21:04,053 Current Learning Rate: 0.0002800304 +2025-03-26 13:21:04,054 Train Loss: 0.0003241, Val Loss: 0.0004060 +2025-03-26 13:21:04,054 Epoch 1072/2000 +2025-03-26 13:23:37,954 Current Learning Rate: 0.0002871104 +2025-03-26 13:23:37,955 Train Loss: 0.0003250, Val Loss: 0.0004074 +2025-03-26 13:23:37,955 Epoch 1073/2000 +2025-03-26 13:26:11,243 Current Learning Rate: 0.0002942428 +2025-03-26 13:26:11,243 Train Loss: 0.0003259, Val Loss: 0.0004073 +2025-03-26 13:26:11,244 Epoch 1074/2000 +2025-03-26 13:28:44,106 Current Learning Rate: 0.0003014261 +2025-03-26 13:28:44,106 Train Loss: 0.0003262, Val Loss: 0.0004082 +2025-03-26 13:28:44,107 Epoch 1075/2000 +2025-03-26 13:31:16,720 Current Learning Rate: 0.0003086583 +2025-03-26 13:31:16,721 Train Loss: 0.0003263, Val Loss: 0.0004091 +2025-03-26 13:31:16,721 Epoch 1076/2000 +2025-03-26 13:33:51,314 Current Learning Rate: 0.0003159377 +2025-03-26 13:33:51,315 Train Loss: 0.0003261, Val Loss: 0.0004082 +2025-03-26 13:33:51,315 Epoch 1077/2000 +2025-03-26 13:36:25,038 Current Learning Rate: 0.0003232626 +2025-03-26 13:36:25,039 Train Loss: 0.0003255, Val Loss: 0.0004068 +2025-03-26 13:36:25,039 Epoch 1078/2000 +2025-03-26 13:38:58,994 Current Learning Rate: 0.0003306310 +2025-03-26 13:38:58,994 Train Loss: 0.0003255, Val Loss: 0.0004074 +2025-03-26 13:38:58,995 Epoch 1079/2000 +2025-03-26 13:41:32,268 Current Learning Rate: 0.0003380413 +2025-03-26 13:41:32,268 Train Loss: 0.0003259, Val Loss: 0.0004079 +2025-03-26 13:41:32,268 Epoch 1080/2000 +2025-03-26 13:44:04,952 Current Learning Rate: 0.0003454915 +2025-03-26 13:44:04,953 Train Loss: 0.0003264, Val Loss: 0.0004083 +2025-03-26 13:44:04,953 Epoch 1081/2000 +2025-03-26 13:46:38,636 Current Learning Rate: 0.0003529798 +2025-03-26 13:46:38,637 Train Loss: 0.0003271, Val Loss: 0.0004096 +2025-03-26 13:46:38,637 Epoch 1082/2000 +2025-03-26 13:49:12,584 Current Learning Rate: 0.0003605044 +2025-03-26 13:49:12,584 Train Loss: 0.0003284, Val Loss: 0.0004105 +2025-03-26 13:49:12,585 Epoch 1083/2000 +2025-03-26 13:51:46,541 Current Learning Rate: 0.0003680635 +2025-03-26 13:51:46,542 Train Loss: 0.0003290, Val Loss: 0.0004097 +2025-03-26 13:51:46,542 Epoch 1084/2000 +2025-03-26 13:54:20,825 Current Learning Rate: 0.0003756551 +2025-03-26 13:54:20,826 Train Loss: 0.0003296, Val Loss: 0.0004097 +2025-03-26 13:54:20,826 Epoch 1085/2000 +2025-03-26 13:56:55,296 Current Learning Rate: 0.0003832773 +2025-03-26 13:56:55,296 Train Loss: 0.0003301, Val Loss: 0.0004113 +2025-03-26 13:56:55,296 Epoch 1086/2000 +2025-03-26 13:59:29,725 Current Learning Rate: 0.0003909284 +2025-03-26 13:59:29,725 Train Loss: 0.0003300, Val Loss: 0.0004112 +2025-03-26 13:59:29,725 Epoch 1087/2000 +2025-03-26 14:02:04,541 Current Learning Rate: 0.0003986064 +2025-03-26 14:02:04,542 Train Loss: 0.0003293, Val Loss: 0.0004106 +2025-03-26 14:02:04,542 Epoch 1088/2000 +2025-03-26 14:04:39,581 Current Learning Rate: 0.0004063093 +2025-03-26 14:04:39,582 Train Loss: 0.0003285, Val Loss: 0.0004105 +2025-03-26 14:04:39,582 Epoch 1089/2000 +2025-03-26 14:07:13,842 Current Learning Rate: 0.0004140354 +2025-03-26 14:07:13,843 Train Loss: 0.0003288, Val Loss: 0.0004115 +2025-03-26 14:07:13,844 Epoch 1090/2000 +2025-03-26 14:09:47,688 Current Learning Rate: 0.0004217828 +2025-03-26 14:09:47,689 Train Loss: 0.0003295, Val Loss: 0.0004121 +2025-03-26 14:09:47,689 Epoch 1091/2000 +2025-03-26 14:12:22,911 Current Learning Rate: 0.0004295494 +2025-03-26 14:12:22,911 Train Loss: 0.0003309, Val Loss: 0.0004137 +2025-03-26 14:12:22,911 Epoch 1092/2000 +2025-03-26 14:14:57,218 Current Learning Rate: 0.0004373334 +2025-03-26 14:14:57,218 Train Loss: 0.0003319, Val Loss: 0.0004131 +2025-03-26 14:14:57,219 Epoch 1093/2000 +2025-03-26 14:17:31,321 Current Learning Rate: 0.0004451328 +2025-03-26 14:17:31,321 Train Loss: 0.0003326, Val Loss: 0.0004143 +2025-03-26 14:17:31,325 Epoch 1094/2000 +2025-03-26 14:20:05,309 Current Learning Rate: 0.0004529458 +2025-03-26 14:20:05,310 Train Loss: 0.0003334, Val Loss: 0.0004145 +2025-03-26 14:20:05,310 Epoch 1095/2000 +2025-03-26 14:22:40,098 Current Learning Rate: 0.0004607705 +2025-03-26 14:22:40,099 Train Loss: 0.0003334, Val Loss: 0.0004142 +2025-03-26 14:22:40,100 Epoch 1096/2000 +2025-03-26 14:25:14,569 Current Learning Rate: 0.0004686047 +2025-03-26 14:25:14,569 Train Loss: 0.0003336, Val Loss: 0.0004144 +2025-03-26 14:25:14,570 Epoch 1097/2000 +2025-03-26 14:27:48,374 Current Learning Rate: 0.0004764468 +2025-03-26 14:27:48,374 Train Loss: 0.0003337, Val Loss: 0.0004144 +2025-03-26 14:27:48,374 Epoch 1098/2000 +2025-03-26 14:30:22,616 Current Learning Rate: 0.0004842946 +2025-03-26 14:30:22,616 Train Loss: 0.0003329, Val Loss: 0.0004146 +2025-03-26 14:30:22,616 Epoch 1099/2000 +2025-03-26 14:32:56,109 Current Learning Rate: 0.0004921463 +2025-03-26 14:32:56,110 Train Loss: 0.0003321, Val Loss: 0.0004158 +2025-03-26 14:32:56,110 Epoch 1100/2000 +2025-03-26 14:35:29,109 Current Learning Rate: 0.0005000000 +2025-03-26 14:35:29,110 Train Loss: 0.0003330, Val Loss: 0.0004170 +2025-03-26 14:35:29,110 Epoch 1101/2000 +2025-03-26 14:38:03,027 Current Learning Rate: 0.0005078537 +2025-03-26 14:38:03,027 Train Loss: 0.0003340, Val Loss: 0.0004187 +2025-03-26 14:38:03,027 Epoch 1102/2000 +2025-03-26 14:40:36,481 Current Learning Rate: 0.0005157054 +2025-03-26 14:40:36,481 Train Loss: 0.0003361, Val Loss: 0.0004184 +2025-03-26 14:40:36,482 Epoch 1103/2000 +2025-03-26 14:43:10,570 Current Learning Rate: 0.0005235532 +2025-03-26 14:43:10,570 Train Loss: 0.0003368, Val Loss: 0.0004189 +2025-03-26 14:43:10,570 Epoch 1104/2000 +2025-03-26 14:45:44,982 Current Learning Rate: 0.0005313953 +2025-03-26 14:45:44,983 Train Loss: 0.0003374, Val Loss: 0.0004189 +2025-03-26 14:45:44,983 Epoch 1105/2000 +2025-03-26 14:48:19,412 Current Learning Rate: 0.0005392295 +2025-03-26 14:48:19,413 Train Loss: 0.0003379, Val Loss: 0.0004200 +2025-03-26 14:48:19,414 Epoch 1106/2000 +2025-03-26 14:50:54,721 Current Learning Rate: 0.0005470542 +2025-03-26 14:50:54,722 Train Loss: 0.0003385, Val Loss: 0.0004217 +2025-03-26 14:50:54,722 Epoch 1107/2000 +2025-03-26 14:53:28,472 Current Learning Rate: 0.0005548672 +2025-03-26 14:53:28,472 Train Loss: 0.0003384, Val Loss: 0.0004220 +2025-03-26 14:53:28,473 Epoch 1108/2000 +2025-03-26 14:56:03,275 Current Learning Rate: 0.0005626666 +2025-03-26 14:56:03,275 Train Loss: 0.0003385, Val Loss: 0.0004203 +2025-03-26 14:56:03,276 Epoch 1109/2000 +2025-03-26 14:58:38,030 Current Learning Rate: 0.0005704506 +2025-03-26 14:58:38,030 Train Loss: 0.0003381, Val Loss: 0.0004207 +2025-03-26 14:58:38,031 Epoch 1110/2000 +2025-03-26 15:01:12,658 Current Learning Rate: 0.0005782172 +2025-03-26 15:01:12,658 Train Loss: 0.0003367, Val Loss: 0.0004207 +2025-03-26 15:01:12,658 Epoch 1111/2000 +2025-03-26 15:03:47,370 Current Learning Rate: 0.0005859646 +2025-03-26 15:03:47,370 Train Loss: 0.0003369, Val Loss: 0.0004208 +2025-03-26 15:03:47,370 Epoch 1112/2000 +2025-03-26 15:06:21,273 Current Learning Rate: 0.0005936907 +2025-03-26 15:06:21,274 Train Loss: 0.0003386, Val Loss: 0.0004234 +2025-03-26 15:06:21,274 Epoch 1113/2000 +2025-03-26 15:08:55,954 Current Learning Rate: 0.0006013936 +2025-03-26 15:08:55,955 Train Loss: 0.0003407, Val Loss: 0.0004270 +2025-03-26 15:08:55,955 Epoch 1114/2000 +2025-03-26 15:11:29,867 Current Learning Rate: 0.0006090716 +2025-03-26 15:11:29,868 Train Loss: 0.0003417, Val Loss: 0.0004244 +2025-03-26 15:11:29,870 Epoch 1115/2000 +2025-03-26 15:14:05,349 Current Learning Rate: 0.0006167227 +2025-03-26 15:14:05,349 Train Loss: 0.0003424, Val Loss: 0.0004258 +2025-03-26 15:14:05,350 Epoch 1116/2000 +2025-03-26 15:16:39,583 Current Learning Rate: 0.0006243449 +2025-03-26 15:16:39,584 Train Loss: 0.0003425, Val Loss: 0.0004281 +2025-03-26 15:16:39,585 Epoch 1117/2000 +2025-03-26 15:19:13,034 Current Learning Rate: 0.0006319365 +2025-03-26 15:19:13,035 Train Loss: 0.0003432, Val Loss: 0.0004309 +2025-03-26 15:19:13,035 Epoch 1118/2000 +2025-03-26 15:21:47,950 Current Learning Rate: 0.0006394956 +2025-03-26 15:21:47,951 Train Loss: 0.0003449, Val Loss: 0.0004256 +2025-03-26 15:21:47,951 Epoch 1119/2000 +2025-03-26 15:24:22,448 Current Learning Rate: 0.0006470202 +2025-03-26 15:24:22,449 Train Loss: 0.0003449, Val Loss: 0.0004275 +2025-03-26 15:24:22,449 Epoch 1120/2000 +2025-03-26 15:26:56,181 Current Learning Rate: 0.0006545085 +2025-03-26 15:26:56,181 Train Loss: 0.0003434, Val Loss: 0.0004255 +2025-03-26 15:26:56,181 Epoch 1121/2000 +2025-03-26 15:29:30,001 Current Learning Rate: 0.0006619587 +2025-03-26 15:29:30,001 Train Loss: 0.0003410, Val Loss: 0.0004257 +2025-03-26 15:29:30,002 Epoch 1122/2000 +2025-03-26 15:32:04,846 Current Learning Rate: 0.0006693690 +2025-03-26 15:32:04,847 Train Loss: 0.0003431, Val Loss: 0.0004279 +2025-03-26 15:32:04,847 Epoch 1123/2000 +2025-03-26 15:34:38,162 Current Learning Rate: 0.0006767374 +2025-03-26 15:34:38,163 Train Loss: 0.0003443, Val Loss: 0.0004276 +2025-03-26 15:34:38,163 Epoch 1124/2000 +2025-03-26 15:37:12,711 Current Learning Rate: 0.0006840623 +2025-03-26 15:37:12,712 Train Loss: 0.0003458, Val Loss: 0.0004334 +2025-03-26 15:37:12,713 Epoch 1125/2000 +2025-03-26 15:39:46,973 Current Learning Rate: 0.0006913417 +2025-03-26 15:39:46,973 Train Loss: 0.0003470, Val Loss: 0.0004304 +2025-03-26 15:39:46,973 Epoch 1126/2000 +2025-03-26 15:42:20,899 Current Learning Rate: 0.0006985739 +2025-03-26 15:42:20,900 Train Loss: 0.0003485, Val Loss: 0.0004360 +2025-03-26 15:42:20,900 Epoch 1127/2000 +2025-03-26 15:44:56,178 Current Learning Rate: 0.0007057572 +2025-03-26 15:44:56,179 Train Loss: 0.0003496, Val Loss: 0.0004311 +2025-03-26 15:44:56,181 Epoch 1128/2000 +2025-03-26 15:47:30,536 Current Learning Rate: 0.0007128896 +2025-03-26 15:47:30,537 Train Loss: 0.0003492, Val Loss: 0.0004310 +2025-03-26 15:47:30,537 Epoch 1129/2000 +2025-03-26 15:50:05,421 Current Learning Rate: 0.0007199696 +2025-03-26 15:50:05,421 Train Loss: 0.0003462, Val Loss: 0.0004273 +2025-03-26 15:50:05,422 Epoch 1130/2000 +2025-03-26 15:52:40,520 Current Learning Rate: 0.0007269952 +2025-03-26 15:52:40,521 Train Loss: 0.0003447, Val Loss: 0.0004291 +2025-03-26 15:52:40,521 Epoch 1131/2000 +2025-03-26 15:55:15,309 Current Learning Rate: 0.0007339649 +2025-03-26 15:55:15,309 Train Loss: 0.0003470, Val Loss: 0.0004293 +2025-03-26 15:55:15,310 Epoch 1132/2000 +2025-03-26 15:57:49,711 Current Learning Rate: 0.0007408768 +2025-03-26 15:57:49,711 Train Loss: 0.0003488, Val Loss: 0.0004310 +2025-03-26 15:57:49,711 Epoch 1133/2000 +2025-03-26 16:00:24,436 Current Learning Rate: 0.0007477293 +2025-03-26 16:00:24,437 Train Loss: 0.0003501, Val Loss: 0.0004359 +2025-03-26 16:00:24,437 Epoch 1134/2000 +2025-03-26 16:02:59,198 Current Learning Rate: 0.0007545207 +2025-03-26 16:02:59,198 Train Loss: 0.0003518, Val Loss: 0.0004329 +2025-03-26 16:02:59,198 Epoch 1135/2000 +2025-03-26 16:05:34,153 Current Learning Rate: 0.0007612493 +2025-03-26 16:05:34,154 Train Loss: 0.0003527, Val Loss: 0.0004346 +2025-03-26 16:05:34,154 Epoch 1136/2000 +2025-03-26 16:08:09,301 Current Learning Rate: 0.0007679134 +2025-03-26 16:08:09,302 Train Loss: 0.0003524, Val Loss: 0.0004352 +2025-03-26 16:08:09,302 Epoch 1137/2000 +2025-03-26 16:10:43,870 Current Learning Rate: 0.0007745114 +2025-03-26 16:10:43,871 Train Loss: 0.0003516, Val Loss: 0.0004330 +2025-03-26 16:10:43,871 Epoch 1138/2000 +2025-03-26 16:13:18,373 Current Learning Rate: 0.0007810417 +2025-03-26 16:13:18,374 Train Loss: 0.0003533, Val Loss: 0.0004394 +2025-03-26 16:13:18,374 Epoch 1139/2000 +2025-03-26 16:15:52,797 Current Learning Rate: 0.0007875026 +2025-03-26 16:15:52,798 Train Loss: 0.0003551, Val Loss: 0.0004370 +2025-03-26 16:15:52,798 Epoch 1140/2000 +2025-03-26 16:18:27,909 Current Learning Rate: 0.0007938926 +2025-03-26 16:18:27,909 Train Loss: 0.0003546, Val Loss: 0.0004360 +2025-03-26 16:18:27,910 Epoch 1141/2000 +2025-03-26 16:21:02,078 Current Learning Rate: 0.0008002101 +2025-03-26 16:21:02,079 Train Loss: 0.0003510, Val Loss: 0.0004330 +2025-03-26 16:21:02,079 Epoch 1142/2000 +2025-03-26 16:23:36,774 Current Learning Rate: 0.0008064535 +2025-03-26 16:23:36,775 Train Loss: 0.0003508, Val Loss: 0.0004352 +2025-03-26 16:23:36,775 Epoch 1143/2000 +2025-03-26 16:26:10,878 Current Learning Rate: 0.0008126213 +2025-03-26 16:26:10,879 Train Loss: 0.0003537, Val Loss: 0.0004362 +2025-03-26 16:26:10,879 Epoch 1144/2000 +2025-03-26 16:28:44,393 Current Learning Rate: 0.0008187120 +2025-03-26 16:28:44,394 Train Loss: 0.0003551, Val Loss: 0.0004395 +2025-03-26 16:28:44,395 Epoch 1145/2000 +2025-03-26 16:31:19,177 Current Learning Rate: 0.0008247240 +2025-03-26 16:31:19,178 Train Loss: 0.0003559, Val Loss: 0.0004375 +2025-03-26 16:31:19,178 Epoch 1146/2000 +2025-03-26 16:33:52,540 Current Learning Rate: 0.0008306559 +2025-03-26 16:33:52,541 Train Loss: 0.0003579, Val Loss: 0.0004395 +2025-03-26 16:33:52,541 Epoch 1147/2000 +2025-03-26 16:36:28,066 Current Learning Rate: 0.0008365063 +2025-03-26 16:36:28,067 Train Loss: 0.0003572, Val Loss: 0.0004355 +2025-03-26 16:36:28,067 Epoch 1148/2000 +2025-03-26 16:39:03,283 Current Learning Rate: 0.0008422736 +2025-03-26 16:39:03,284 Train Loss: 0.0003530, Val Loss: 0.0004352 +2025-03-26 16:39:03,284 Epoch 1149/2000 +2025-03-26 16:41:37,974 Current Learning Rate: 0.0008479564 +2025-03-26 16:41:37,974 Train Loss: 0.0003543, Val Loss: 0.0004378 +2025-03-26 16:41:37,974 Epoch 1150/2000 +2025-03-26 16:44:12,502 Current Learning Rate: 0.0008535534 +2025-03-26 16:44:12,503 Train Loss: 0.0003563, Val Loss: 0.0004407 +2025-03-26 16:44:12,504 Epoch 1151/2000 +2025-03-26 16:46:46,817 Current Learning Rate: 0.0008590631 +2025-03-26 16:46:46,818 Train Loss: 0.0003574, Val Loss: 0.0004535 +2025-03-26 16:46:46,818 Epoch 1152/2000 +2025-03-26 16:49:20,081 Current Learning Rate: 0.0008644843 +2025-03-26 16:49:20,082 Train Loss: 0.0003600, Val Loss: 0.0004521 +2025-03-26 16:49:20,082 Epoch 1153/2000 +2025-03-26 16:51:53,659 Current Learning Rate: 0.0008698155 +2025-03-26 16:51:53,660 Train Loss: 0.0003614, Val Loss: 0.0004474 +2025-03-26 16:51:53,660 Epoch 1154/2000 +2025-03-26 16:54:28,110 Current Learning Rate: 0.0008750555 +2025-03-26 16:54:28,110 Train Loss: 0.0003617, Val Loss: 0.0004472 +2025-03-26 16:54:28,111 Epoch 1155/2000 +2025-03-26 16:57:02,485 Current Learning Rate: 0.0008802030 +2025-03-26 16:57:02,486 Train Loss: 0.0003639, Val Loss: 0.0004443 +2025-03-26 16:57:02,486 Epoch 1156/2000 +2025-03-26 16:59:35,964 Current Learning Rate: 0.0008852566 +2025-03-26 16:59:35,965 Train Loss: 0.0003638, Val Loss: 0.0004466 +2025-03-26 16:59:35,965 Epoch 1157/2000 +2025-03-26 17:02:09,257 Current Learning Rate: 0.0008902152 +2025-03-26 17:02:09,258 Train Loss: 0.0003619, Val Loss: 0.0004444 +2025-03-26 17:02:09,258 Epoch 1158/2000 +2025-03-26 17:04:43,234 Current Learning Rate: 0.0008950775 +2025-03-26 17:04:43,235 Train Loss: 0.0003558, Val Loss: 0.0004375 +2025-03-26 17:04:43,235 Epoch 1159/2000 +2025-03-26 17:07:16,434 Current Learning Rate: 0.0008998423 +2025-03-26 17:07:16,435 Train Loss: 0.0003564, Val Loss: 0.0004401 +2025-03-26 17:07:16,435 Epoch 1160/2000 +2025-03-26 17:09:49,564 Current Learning Rate: 0.0009045085 +2025-03-26 17:09:49,564 Train Loss: 0.0003593, Val Loss: 0.0004414 +2025-03-26 17:09:49,564 Epoch 1161/2000 +2025-03-26 17:12:22,618 Current Learning Rate: 0.0009090749 +2025-03-26 17:12:22,618 Train Loss: 0.0003613, Val Loss: 0.0004415 +2025-03-26 17:12:22,619 Epoch 1162/2000 +2025-03-26 17:14:56,667 Current Learning Rate: 0.0009135403 +2025-03-26 17:14:56,667 Train Loss: 0.0003620, Val Loss: 0.0004428 +2025-03-26 17:14:56,668 Epoch 1163/2000 +2025-03-26 17:17:30,267 Current Learning Rate: 0.0009179037 +2025-03-26 17:17:30,268 Train Loss: 0.0003614, Val Loss: 0.0004413 +2025-03-26 17:17:30,268 Epoch 1164/2000 +2025-03-26 17:20:04,689 Current Learning Rate: 0.0009221640 +2025-03-26 17:20:04,689 Train Loss: 0.0003632, Val Loss: 0.0004453 +2025-03-26 17:20:04,690 Epoch 1165/2000 +2025-03-26 17:22:39,669 Current Learning Rate: 0.0009263201 +2025-03-26 17:22:39,670 Train Loss: 0.0003656, Val Loss: 0.0004466 +2025-03-26 17:22:39,670 Epoch 1166/2000 +2025-03-26 17:25:12,700 Current Learning Rate: 0.0009303710 +2025-03-26 17:25:12,701 Train Loss: 0.0003640, Val Loss: 0.0004459 +2025-03-26 17:25:12,701 Epoch 1167/2000 +2025-03-26 17:27:46,597 Current Learning Rate: 0.0009343158 +2025-03-26 17:27:46,598 Train Loss: 0.0003596, Val Loss: 0.0004407 +2025-03-26 17:27:46,598 Epoch 1168/2000 +2025-03-26 17:30:19,456 Current Learning Rate: 0.0009381533 +2025-03-26 17:30:19,457 Train Loss: 0.0003588, Val Loss: 0.0004434 +2025-03-26 17:30:19,457 Epoch 1169/2000 +2025-03-26 17:32:53,834 Current Learning Rate: 0.0009418828 +2025-03-26 17:32:53,834 Train Loss: 0.0003619, Val Loss: 0.0004436 +2025-03-26 17:32:53,834 Epoch 1170/2000 +2025-03-26 17:35:28,504 Current Learning Rate: 0.0009455033 +2025-03-26 17:35:28,505 Train Loss: 0.0003627, Val Loss: 0.0004422 +2025-03-26 17:35:28,505 Epoch 1171/2000 +2025-03-26 17:38:02,914 Current Learning Rate: 0.0009490138 +2025-03-26 17:38:02,915 Train Loss: 0.0003651, Val Loss: 0.0004423 +2025-03-26 17:38:02,915 Epoch 1172/2000 +2025-03-26 17:40:36,013 Current Learning Rate: 0.0009524135 +2025-03-26 17:40:36,014 Train Loss: 0.0003826, Val Loss: 0.0004499 +2025-03-26 17:40:36,014 Epoch 1173/2000 +2025-03-26 17:43:10,378 Current Learning Rate: 0.0009557016 +2025-03-26 17:43:10,379 Train Loss: 0.0003642, Val Loss: 0.0004432 +2025-03-26 17:43:10,379 Epoch 1174/2000 +2025-03-26 17:45:44,289 Current Learning Rate: 0.0009588773 +2025-03-26 17:45:44,290 Train Loss: 0.0003571, Val Loss: 0.0004414 +2025-03-26 17:45:44,290 Epoch 1175/2000 +2025-03-26 17:48:17,524 Current Learning Rate: 0.0009619398 +2025-03-26 17:48:17,525 Train Loss: 0.0003599, Val Loss: 0.0004428 +2025-03-26 17:48:17,525 Epoch 1176/2000 +2025-03-26 17:50:52,278 Current Learning Rate: 0.0009648882 +2025-03-26 17:50:52,278 Train Loss: 0.0003631, Val Loss: 0.0004455 +2025-03-26 17:50:52,279 Epoch 1177/2000 +2025-03-26 17:53:28,438 Current Learning Rate: 0.0009677220 +2025-03-26 17:53:28,439 Train Loss: 0.0003641, Val Loss: 0.0004454 +2025-03-26 17:53:28,439 Epoch 1178/2000 +2025-03-26 17:56:03,208 Current Learning Rate: 0.0009704404 +2025-03-26 17:56:03,209 Train Loss: 0.0003665, Val Loss: 0.0004467 +2025-03-26 17:56:03,209 Epoch 1179/2000 +2025-03-26 17:58:37,399 Current Learning Rate: 0.0009730427 +2025-03-26 17:58:37,399 Train Loss: 0.0003668, Val Loss: 0.0004500 +2025-03-26 17:58:37,400 Epoch 1180/2000 +2025-03-26 18:01:12,398 Current Learning Rate: 0.0009755283 +2025-03-26 18:01:12,399 Train Loss: 0.0003639, Val Loss: 0.0004443 +2025-03-26 18:01:12,399 Epoch 1181/2000 +2025-03-26 18:03:47,435 Current Learning Rate: 0.0009778965 +2025-03-26 18:03:47,436 Train Loss: 0.0003607, Val Loss: 0.0004469 +2025-03-26 18:03:47,436 Epoch 1182/2000 +2025-03-26 18:06:21,894 Current Learning Rate: 0.0009801468 +2025-03-26 18:06:21,894 Train Loss: 0.0003635, Val Loss: 0.0004514 +2025-03-26 18:06:21,894 Epoch 1183/2000 +2025-03-26 18:08:56,569 Current Learning Rate: 0.0009822787 +2025-03-26 18:08:56,570 Train Loss: 0.0003660, Val Loss: 0.0004465 +2025-03-26 18:08:56,570 Epoch 1184/2000 +2025-03-26 18:11:31,194 Current Learning Rate: 0.0009842916 +2025-03-26 18:11:31,195 Train Loss: 0.0003680, Val Loss: 0.0004476 +2025-03-26 18:11:31,195 Epoch 1185/2000 +2025-03-26 18:14:06,135 Current Learning Rate: 0.0009861850 +2025-03-26 18:14:06,136 Train Loss: 0.0003699, Val Loss: 0.0004567 +2025-03-26 18:14:06,136 Epoch 1186/2000 +2025-03-26 18:16:38,929 Current Learning Rate: 0.0009879584 +2025-03-26 18:16:38,929 Train Loss: 0.0003675, Val Loss: 0.0004483 +2025-03-26 18:16:38,930 Epoch 1187/2000 +2025-03-26 18:19:13,037 Current Learning Rate: 0.0009896114 +2025-03-26 18:19:13,038 Train Loss: 0.0003620, Val Loss: 0.0004458 +2025-03-26 18:19:13,038 Epoch 1188/2000 +2025-03-26 18:21:46,202 Current Learning Rate: 0.0009911436 +2025-03-26 18:21:46,203 Train Loss: 0.0003617, Val Loss: 0.0004437 +2025-03-26 18:21:46,203 Epoch 1189/2000 +2025-03-26 18:24:19,122 Current Learning Rate: 0.0009925547 +2025-03-26 18:24:19,123 Train Loss: 0.0003637, Val Loss: 0.0004441 +2025-03-26 18:24:19,123 Epoch 1190/2000 +2025-03-26 18:26:52,172 Current Learning Rate: 0.0009938442 +2025-03-26 18:26:52,173 Train Loss: 0.0003649, Val Loss: 0.0004459 +2025-03-26 18:26:52,173 Epoch 1191/2000 +2025-03-26 18:29:27,082 Current Learning Rate: 0.0009950118 +2025-03-26 18:29:27,083 Train Loss: 0.0003667, Val Loss: 0.0004525 +2025-03-26 18:29:27,083 Epoch 1192/2000 +2025-03-26 18:32:02,032 Current Learning Rate: 0.0009960574 +2025-03-26 18:32:02,033 Train Loss: 0.0003683, Val Loss: 0.0004600 +2025-03-26 18:32:02,033 Epoch 1193/2000 +2025-03-26 18:34:36,144 Current Learning Rate: 0.0009969805 +2025-03-26 18:34:36,144 Train Loss: 0.0003710, Val Loss: 0.0004713 +2025-03-26 18:34:36,144 Epoch 1194/2000 +2025-03-26 18:37:08,065 Current Learning Rate: 0.0009977810 +2025-03-26 18:37:08,066 Train Loss: 0.0003738, Val Loss: 0.0004552 +2025-03-26 18:37:08,066 Epoch 1195/2000 +2025-03-26 18:39:42,565 Current Learning Rate: 0.0009984587 +2025-03-26 18:39:42,565 Train Loss: 0.0003756, Val Loss: 0.0004495 +2025-03-26 18:39:42,565 Epoch 1196/2000 +2025-03-26 18:42:16,526 Current Learning Rate: 0.0009990134 +2025-03-26 18:42:16,526 Train Loss: 0.0003746, Val Loss: 0.0004484 +2025-03-26 18:42:16,527 Epoch 1197/2000 +2025-03-26 18:44:52,017 Current Learning Rate: 0.0009994449 +2025-03-26 18:44:52,018 Train Loss: 0.0003651, Val Loss: 0.0004455 +2025-03-26 18:44:52,018 Epoch 1198/2000 +2025-03-26 18:47:26,382 Current Learning Rate: 0.0009997533 +2025-03-26 18:47:26,383 Train Loss: 0.0003581, Val Loss: 0.0004455 +2025-03-26 18:47:26,383 Epoch 1199/2000 +2025-03-26 18:50:01,215 Current Learning Rate: 0.0009999383 +2025-03-26 18:50:01,215 Train Loss: 0.0003609, Val Loss: 0.0004447 +2025-03-26 18:50:01,215 Epoch 1200/2000 +2025-03-26 18:52:35,818 Current Learning Rate: 0.0010000000 +2025-03-26 18:52:35,819 Train Loss: 0.0003631, Val Loss: 0.0004434 +2025-03-26 18:52:35,819 Epoch 1201/2000 +2025-03-26 18:55:11,030 Current Learning Rate: 0.0009999383 +2025-03-26 18:55:11,030 Train Loss: 0.0003652, Val Loss: 0.0004472 +2025-03-26 18:55:11,031 Epoch 1202/2000 +2025-03-26 18:57:44,375 Current Learning Rate: 0.0009997533 +2025-03-26 18:57:44,376 Train Loss: 0.0003648, Val Loss: 0.0004505 +2025-03-26 18:57:44,376 Epoch 1203/2000 +2025-03-26 19:00:18,671 Current Learning Rate: 0.0009994449 +2025-03-26 19:00:18,672 Train Loss: 0.0003659, Val Loss: 0.0004547 +2025-03-26 19:00:18,672 Epoch 1204/2000 +2025-03-26 19:02:53,320 Current Learning Rate: 0.0009990134 +2025-03-26 19:02:53,321 Train Loss: 0.0003674, Val Loss: 0.0004496 +2025-03-26 19:02:53,321 Epoch 1205/2000 +2025-03-26 19:05:28,169 Current Learning Rate: 0.0009984587 +2025-03-26 19:05:28,170 Train Loss: 0.0003671, Val Loss: 0.0004553 +2025-03-26 19:05:28,170 Epoch 1206/2000 +2025-03-26 19:08:02,563 Current Learning Rate: 0.0009977810 +2025-03-26 19:08:02,564 Train Loss: 0.0003677, Val Loss: 0.0004469 +2025-03-26 19:08:02,564 Epoch 1207/2000 +2025-03-26 19:10:36,936 Current Learning Rate: 0.0009969805 +2025-03-26 19:10:36,938 Train Loss: 0.0003676, Val Loss: 0.0004526 +2025-03-26 19:10:36,938 Epoch 1208/2000 +2025-03-26 19:13:11,591 Current Learning Rate: 0.0009960574 +2025-03-26 19:13:11,592 Train Loss: 0.0003665, Val Loss: 0.0004476 +2025-03-26 19:13:11,592 Epoch 1209/2000 +2025-03-26 19:15:45,964 Current Learning Rate: 0.0009950118 +2025-03-26 19:15:45,964 Train Loss: 0.0003615, Val Loss: 0.0004432 +2025-03-26 19:15:45,964 Epoch 1210/2000 +2025-03-26 19:18:20,713 Current Learning Rate: 0.0009938442 +2025-03-26 19:18:20,713 Train Loss: 0.0003588, Val Loss: 0.0004463 +2025-03-26 19:18:20,714 Epoch 1211/2000 +2025-03-26 19:20:54,695 Current Learning Rate: 0.0009925547 +2025-03-26 19:20:54,698 Train Loss: 0.0003613, Val Loss: 0.0004416 +2025-03-26 19:20:54,698 Epoch 1212/2000 +2025-03-26 19:23:29,870 Current Learning Rate: 0.0009911436 +2025-03-26 19:23:29,871 Train Loss: 0.0003627, Val Loss: 0.0004457 +2025-03-26 19:23:29,871 Epoch 1213/2000 +2025-03-26 19:26:04,896 Current Learning Rate: 0.0009896114 +2025-03-26 19:26:04,896 Train Loss: 0.0003641, Val Loss: 0.0004510 +2025-03-26 19:26:04,896 Epoch 1214/2000 +2025-03-26 19:28:38,468 Current Learning Rate: 0.0009879584 +2025-03-26 19:28:38,468 Train Loss: 0.0003641, Val Loss: 0.0004471 +2025-03-26 19:28:38,469 Epoch 1215/2000 +2025-03-26 19:31:11,977 Current Learning Rate: 0.0009861850 +2025-03-26 19:31:11,978 Train Loss: 0.0003637, Val Loss: 0.0004501 +2025-03-26 19:31:11,978 Epoch 1216/2000 +2025-03-26 19:33:46,506 Current Learning Rate: 0.0009842916 +2025-03-26 19:33:46,507 Train Loss: 0.0003659, Val Loss: 0.0004511 +2025-03-26 19:33:46,507 Epoch 1217/2000 +2025-03-26 19:36:22,169 Current Learning Rate: 0.0009822787 +2025-03-26 19:36:22,169 Train Loss: 0.0003677, Val Loss: 0.0004452 +2025-03-26 19:36:22,169 Epoch 1218/2000 +2025-03-26 19:38:56,055 Current Learning Rate: 0.0009801468 +2025-03-26 19:38:56,056 Train Loss: 0.0003662, Val Loss: 0.0004480 +2025-03-26 19:38:56,056 Epoch 1219/2000 +2025-03-26 19:41:30,881 Current Learning Rate: 0.0009778965 +2025-03-26 19:41:30,882 Train Loss: 0.0003624, Val Loss: 0.0004453 +2025-03-26 19:41:30,882 Epoch 1220/2000 +2025-03-26 19:44:05,722 Current Learning Rate: 0.0009755283 +2025-03-26 19:44:05,723 Train Loss: 0.0003581, Val Loss: 0.0004426 +2025-03-26 19:44:05,723 Epoch 1221/2000 +2025-03-26 19:46:39,842 Current Learning Rate: 0.0009730427 +2025-03-26 19:46:39,843 Train Loss: 0.0003579, Val Loss: 0.0004434 +2025-03-26 19:46:39,843 Epoch 1222/2000 +2025-03-26 19:49:14,345 Current Learning Rate: 0.0009704404 +2025-03-26 19:49:14,346 Train Loss: 0.0003595, Val Loss: 0.0004450 +2025-03-26 19:49:14,346 Epoch 1223/2000 +2025-03-26 19:51:48,947 Current Learning Rate: 0.0009677220 +2025-03-26 19:51:48,948 Train Loss: 0.0003600, Val Loss: 0.0004438 +2025-03-26 19:51:48,948 Epoch 1224/2000 +2025-03-26 19:54:23,175 Current Learning Rate: 0.0009648882 +2025-03-26 19:54:23,176 Train Loss: 0.0003609, Val Loss: 0.0004488 +2025-03-26 19:54:23,176 Epoch 1225/2000 +2025-03-26 19:56:57,684 Current Learning Rate: 0.0009619398 +2025-03-26 19:56:57,684 Train Loss: 0.0003625, Val Loss: 0.0004457 +2025-03-26 19:56:57,685 Epoch 1226/2000 +2025-03-26 19:59:31,044 Current Learning Rate: 0.0009588773 +2025-03-26 19:59:31,045 Train Loss: 0.0003635, Val Loss: 0.0004487 +2025-03-26 19:59:31,046 Epoch 1227/2000 +2025-03-26 20:02:05,316 Current Learning Rate: 0.0009557016 +2025-03-26 20:02:05,317 Train Loss: 0.0003652, Val Loss: 0.0004545 +2025-03-26 20:02:05,317 Epoch 1228/2000 +2025-03-26 20:04:40,419 Current Learning Rate: 0.0009524135 +2025-03-26 20:04:40,420 Train Loss: 0.0003702, Val Loss: 0.0004517 +2025-03-26 20:04:40,420 Epoch 1229/2000 +2025-03-26 20:07:14,399 Current Learning Rate: 0.0009490138 +2025-03-26 20:07:14,399 Train Loss: 0.0003741, Val Loss: 0.0004483 +2025-03-26 20:07:14,399 Epoch 1230/2000 +2025-03-26 20:09:48,992 Current Learning Rate: 0.0009455033 +2025-03-26 20:09:48,993 Train Loss: 0.0003714, Val Loss: 0.0004471 +2025-03-26 20:09:48,993 Epoch 1231/2000 +2025-03-26 20:12:24,624 Current Learning Rate: 0.0009418828 +2025-03-26 20:12:24,624 Train Loss: 0.0003642, Val Loss: 0.0004390 +2025-03-26 20:12:24,624 Epoch 1232/2000 +2025-03-26 20:14:58,743 Current Learning Rate: 0.0009381533 +2025-03-26 20:14:58,743 Train Loss: 0.0003545, Val Loss: 0.0004355 +2025-03-26 20:14:58,744 Epoch 1233/2000 +2025-03-26 20:17:33,697 Current Learning Rate: 0.0009343158 +2025-03-26 20:17:33,697 Train Loss: 0.0003520, Val Loss: 0.0004378 +2025-03-26 20:17:33,698 Epoch 1234/2000 +2025-03-26 20:20:08,625 Current Learning Rate: 0.0009303710 +2025-03-26 20:20:08,626 Train Loss: 0.0003535, Val Loss: 0.0004380 +2025-03-26 20:20:08,626 Epoch 1235/2000 +2025-03-26 20:22:43,204 Current Learning Rate: 0.0009263201 +2025-03-26 20:22:43,204 Train Loss: 0.0003548, Val Loss: 0.0004407 +2025-03-26 20:22:43,204 Epoch 1236/2000 +2025-03-26 20:25:17,900 Current Learning Rate: 0.0009221640 +2025-03-26 20:25:17,901 Train Loss: 0.0003545, Val Loss: 0.0004424 +2025-03-26 20:25:17,901 Epoch 1237/2000 +2025-03-26 20:27:52,622 Current Learning Rate: 0.0009179037 +2025-03-26 20:27:52,623 Train Loss: 0.0003538, Val Loss: 0.0004377 +2025-03-26 20:27:52,623 Epoch 1238/2000 +2025-03-26 20:30:26,973 Current Learning Rate: 0.0009135403 +2025-03-26 20:30:26,974 Train Loss: 0.0003573, Val Loss: 0.0004413 +2025-03-26 20:30:26,974 Epoch 1239/2000 +2025-03-26 20:33:01,122 Current Learning Rate: 0.0009090749 +2025-03-26 20:33:01,123 Train Loss: 0.0003589, Val Loss: 0.0004428 +2025-03-26 20:33:01,123 Epoch 1240/2000 +2025-03-26 20:35:35,572 Current Learning Rate: 0.0009045085 +2025-03-26 20:35:35,572 Train Loss: 0.0003575, Val Loss: 0.0004405 +2025-03-26 20:35:35,573 Epoch 1241/2000 +2025-03-26 20:38:10,603 Current Learning Rate: 0.0008998423 +2025-03-26 20:38:10,603 Train Loss: 0.0003580, Val Loss: 0.0004433 +2025-03-26 20:38:10,603 Epoch 1242/2000 +2025-03-26 20:40:45,215 Current Learning Rate: 0.0008950775 +2025-03-26 20:40:45,215 Train Loss: 0.0003594, Val Loss: 0.0004390 +2025-03-26 20:40:45,216 Epoch 1243/2000 +2025-03-26 20:43:18,799 Current Learning Rate: 0.0008902152 +2025-03-26 20:43:18,800 Train Loss: 0.0003613, Val Loss: 0.0004419 +2025-03-26 20:43:18,800 Epoch 1244/2000 +2025-03-26 20:45:54,048 Current Learning Rate: 0.0008852566 +2025-03-26 20:45:54,048 Train Loss: 0.0003634, Val Loss: 0.0004459 +2025-03-26 20:45:54,048 Epoch 1245/2000 +2025-03-26 20:48:28,116 Current Learning Rate: 0.0008802030 +2025-03-26 20:48:28,117 Train Loss: 0.0003648, Val Loss: 0.0004483 +2025-03-26 20:48:28,117 Epoch 1246/2000 +2025-03-26 20:51:02,507 Current Learning Rate: 0.0008750555 +2025-03-26 20:51:02,507 Train Loss: 0.0003660, Val Loss: 0.0004418 +2025-03-26 20:51:02,508 Epoch 1247/2000 +2025-03-26 20:53:35,646 Current Learning Rate: 0.0008698155 +2025-03-26 20:53:35,646 Train Loss: 0.0003637, Val Loss: 0.0004405 +2025-03-26 20:53:35,647 Epoch 1248/2000 +2025-03-26 20:56:09,747 Current Learning Rate: 0.0008644843 +2025-03-26 20:56:09,748 Train Loss: 0.0003575, Val Loss: 0.0004342 +2025-03-26 20:56:09,748 Epoch 1249/2000 +2025-03-26 20:58:43,661 Current Learning Rate: 0.0008590631 +2025-03-26 20:58:43,661 Train Loss: 0.0003484, Val Loss: 0.0004323 +2025-03-26 20:58:43,662 Epoch 1250/2000 +2025-03-26 21:01:18,048 Current Learning Rate: 0.0008535534 +2025-03-26 21:01:18,049 Train Loss: 0.0003457, Val Loss: 0.0004302 +2025-03-26 21:01:18,049 Epoch 1251/2000 +2025-03-26 21:03:53,325 Current Learning Rate: 0.0008479564 +2025-03-26 21:03:53,326 Train Loss: 0.0003465, Val Loss: 0.0004298 +2025-03-26 21:03:53,326 Epoch 1252/2000 +2025-03-26 21:06:27,507 Current Learning Rate: 0.0008422736 +2025-03-26 21:06:27,508 Train Loss: 0.0003478, Val Loss: 0.0004313 +2025-03-26 21:06:27,508 Epoch 1253/2000 +2025-03-26 21:09:02,757 Current Learning Rate: 0.0008365063 +2025-03-26 21:09:02,758 Train Loss: 0.0003487, Val Loss: 0.0004344 +2025-03-26 21:09:02,758 Epoch 1254/2000 +2025-03-26 21:11:35,218 Current Learning Rate: 0.0008306559 +2025-03-26 21:11:35,219 Train Loss: 0.0003499, Val Loss: 0.0004353 +2025-03-26 21:11:35,219 Epoch 1255/2000 +2025-03-26 21:14:08,215 Current Learning Rate: 0.0008247240 +2025-03-26 21:14:08,215 Train Loss: 0.0003499, Val Loss: 0.0004351 +2025-03-26 21:14:08,216 Epoch 1256/2000 +2025-03-26 21:16:42,097 Current Learning Rate: 0.0008187120 +2025-03-26 21:16:42,097 Train Loss: 0.0003503, Val Loss: 0.0004338 +2025-03-26 21:16:42,098 Epoch 1257/2000 +2025-03-26 21:19:14,473 Current Learning Rate: 0.0008126213 +2025-03-26 21:19:14,474 Train Loss: 0.0003508, Val Loss: 0.0004368 +2025-03-26 21:19:14,474 Epoch 1258/2000 +2025-03-26 21:21:47,542 Current Learning Rate: 0.0008064535 +2025-03-26 21:21:47,543 Train Loss: 0.0003523, Val Loss: 0.0004357 +2025-03-26 21:21:47,543 Epoch 1259/2000 +2025-03-26 21:24:20,454 Current Learning Rate: 0.0008002101 +2025-03-26 21:24:20,454 Train Loss: 0.0003536, Val Loss: 0.0004355 +2025-03-26 21:24:20,455 Epoch 1260/2000 +2025-03-26 21:26:53,226 Current Learning Rate: 0.0007938926 +2025-03-26 21:26:53,227 Train Loss: 0.0003532, Val Loss: 0.0004366 +2025-03-26 21:26:53,227 Epoch 1261/2000 +2025-03-26 21:29:28,272 Current Learning Rate: 0.0007875026 +2025-03-26 21:29:28,272 Train Loss: 0.0003531, Val Loss: 0.0004362 +2025-03-26 21:29:28,272 Epoch 1262/2000 +2025-03-26 21:32:00,991 Current Learning Rate: 0.0007810417 +2025-03-26 21:32:00,992 Train Loss: 0.0003532, Val Loss: 0.0004371 +2025-03-26 21:32:00,993 Epoch 1263/2000 +2025-03-26 21:34:35,953 Current Learning Rate: 0.0007745114 +2025-03-26 21:34:35,954 Train Loss: 0.0003523, Val Loss: 0.0004342 +2025-03-26 21:34:35,954 Epoch 1264/2000 +2025-03-26 21:37:10,105 Current Learning Rate: 0.0007679134 +2025-03-26 21:37:10,106 Train Loss: 0.0003485, Val Loss: 0.0004298 +2025-03-26 21:37:10,106 Epoch 1265/2000 +2025-03-26 21:39:45,253 Current Learning Rate: 0.0007612493 +2025-03-26 21:39:45,254 Train Loss: 0.0003441, Val Loss: 0.0004286 +2025-03-26 21:39:45,254 Epoch 1266/2000 +2025-03-26 21:42:20,161 Current Learning Rate: 0.0007545207 +2025-03-26 21:42:20,162 Train Loss: 0.0003419, Val Loss: 0.0004283 +2025-03-26 21:42:20,162 Epoch 1267/2000 +2025-03-26 21:44:54,884 Current Learning Rate: 0.0007477293 +2025-03-26 21:44:54,885 Train Loss: 0.0003415, Val Loss: 0.0004282 +2025-03-26 21:44:54,885 Epoch 1268/2000 +2025-03-26 21:47:30,018 Current Learning Rate: 0.0007408768 +2025-03-26 21:47:30,019 Train Loss: 0.0003415, Val Loss: 0.0004271 +2025-03-26 21:47:30,019 Epoch 1269/2000 +2025-03-26 21:50:05,127 Current Learning Rate: 0.0007339649 +2025-03-26 21:50:05,128 Train Loss: 0.0003431, Val Loss: 0.0004318 +2025-03-26 21:50:05,128 Epoch 1270/2000 +2025-03-26 21:52:38,478 Current Learning Rate: 0.0007269952 +2025-03-26 21:52:38,479 Train Loss: 0.0003442, Val Loss: 0.0004293 +2025-03-26 21:52:38,479 Epoch 1271/2000 +2025-03-26 21:55:13,102 Current Learning Rate: 0.0007199696 +2025-03-26 21:55:13,103 Train Loss: 0.0003450, Val Loss: 0.0004292 +2025-03-26 21:55:13,103 Epoch 1272/2000 +2025-03-26 21:57:47,005 Current Learning Rate: 0.0007128896 +2025-03-26 21:57:47,005 Train Loss: 0.0003466, Val Loss: 0.0004318 +2025-03-26 21:57:47,005 Epoch 1273/2000 +2025-03-26 22:00:21,854 Current Learning Rate: 0.0007057572 +2025-03-26 22:00:21,855 Train Loss: 0.0003478, Val Loss: 0.0004315 +2025-03-26 22:00:21,855 Epoch 1274/2000 +2025-03-26 22:02:56,219 Current Learning Rate: 0.0006985739 +2025-03-26 22:02:56,220 Train Loss: 0.0003524, Val Loss: 0.0004346 +2025-03-26 22:02:56,220 Epoch 1275/2000 +2025-03-26 22:05:30,799 Current Learning Rate: 0.0006913417 +2025-03-26 22:05:30,799 Train Loss: 0.0003555, Val Loss: 0.0004386 +2025-03-26 22:05:30,799 Epoch 1276/2000 +2025-03-26 22:08:05,607 Current Learning Rate: 0.0006840623 +2025-03-26 22:08:05,609 Train Loss: 0.0003532, Val Loss: 0.0004322 +2025-03-26 22:08:05,609 Epoch 1277/2000 +2025-03-26 22:10:40,490 Current Learning Rate: 0.0006767374 +2025-03-26 22:10:40,491 Train Loss: 0.0003485, Val Loss: 0.0004285 +2025-03-26 22:10:40,491 Epoch 1278/2000 +2025-03-26 22:13:14,880 Current Learning Rate: 0.0006693690 +2025-03-26 22:13:14,881 Train Loss: 0.0003420, Val Loss: 0.0004226 +2025-03-26 22:13:14,881 Epoch 1279/2000 +2025-03-26 22:15:49,608 Current Learning Rate: 0.0006619587 +2025-03-26 22:15:49,609 Train Loss: 0.0003371, Val Loss: 0.0004202 +2025-03-26 22:15:49,609 Epoch 1280/2000 +2025-03-26 22:18:24,498 Current Learning Rate: 0.0006545085 +2025-03-26 22:18:24,498 Train Loss: 0.0003348, Val Loss: 0.0004207 +2025-03-26 22:18:24,498 Epoch 1281/2000 +2025-03-26 22:20:58,673 Current Learning Rate: 0.0006470202 +2025-03-26 22:20:58,673 Train Loss: 0.0003343, Val Loss: 0.0004207 +2025-03-26 22:20:58,673 Epoch 1282/2000 +2025-03-26 22:23:32,468 Current Learning Rate: 0.0006394956 +2025-03-26 22:23:32,468 Train Loss: 0.0003348, Val Loss: 0.0004204 +2025-03-26 22:23:32,468 Epoch 1283/2000 +2025-03-26 22:26:08,125 Current Learning Rate: 0.0006319365 +2025-03-26 22:26:08,125 Train Loss: 0.0003358, Val Loss: 0.0004211 +2025-03-26 22:26:08,125 Epoch 1284/2000 +2025-03-26 22:28:42,210 Current Learning Rate: 0.0006243449 +2025-03-26 22:28:42,211 Train Loss: 0.0003367, Val Loss: 0.0004219 +2025-03-26 22:28:42,211 Epoch 1285/2000 +2025-03-26 22:31:17,674 Current Learning Rate: 0.0006167227 +2025-03-26 22:31:17,674 Train Loss: 0.0003376, Val Loss: 0.0004216 +2025-03-26 22:31:17,674 Epoch 1286/2000 +2025-03-26 22:33:52,538 Current Learning Rate: 0.0006090716 +2025-03-26 22:33:52,538 Train Loss: 0.0003383, Val Loss: 0.0004227 +2025-03-26 22:33:52,539 Epoch 1287/2000 +2025-03-26 22:36:28,534 Current Learning Rate: 0.0006013936 +2025-03-26 22:36:28,534 Train Loss: 0.0003402, Val Loss: 0.0004231 +2025-03-26 22:36:28,534 Epoch 1288/2000 +2025-03-26 22:39:02,814 Current Learning Rate: 0.0005936907 +2025-03-26 22:39:02,814 Train Loss: 0.0003417, Val Loss: 0.0004310 +2025-03-26 22:39:02,814 Epoch 1289/2000 +2025-03-26 22:41:37,178 Current Learning Rate: 0.0005859646 +2025-03-26 22:41:37,178 Train Loss: 0.0003428, Val Loss: 0.0004260 +2025-03-26 22:41:37,178 Epoch 1290/2000 +2025-03-26 22:44:12,099 Current Learning Rate: 0.0005782172 +2025-03-26 22:44:12,099 Train Loss: 0.0003443, Val Loss: 0.0004252 +2025-03-26 22:44:12,099 Epoch 1291/2000 +2025-03-26 22:46:47,683 Current Learning Rate: 0.0005704506 +2025-03-26 22:46:47,684 Train Loss: 0.0003436, Val Loss: 0.0004235 +2025-03-26 22:46:47,684 Epoch 1292/2000 +2025-03-26 22:49:21,726 Current Learning Rate: 0.0005626666 +2025-03-26 22:49:21,727 Train Loss: 0.0003408, Val Loss: 0.0004222 +2025-03-26 22:49:21,727 Epoch 1293/2000 +2025-03-26 22:51:56,928 Current Learning Rate: 0.0005548672 +2025-03-26 22:51:56,928 Train Loss: 0.0003367, Val Loss: 0.0004190 +2025-03-26 22:51:56,928 Epoch 1294/2000 +2025-03-26 22:54:31,247 Current Learning Rate: 0.0005470542 +2025-03-26 22:54:31,248 Train Loss: 0.0003328, Val Loss: 0.0004164 +2025-03-26 22:54:31,248 Epoch 1295/2000 +2025-03-26 22:57:05,494 Current Learning Rate: 0.0005392295 +2025-03-26 22:57:05,494 Train Loss: 0.0003304, Val Loss: 0.0004152 +2025-03-26 22:57:05,495 Epoch 1296/2000 +2025-03-26 22:59:40,222 Current Learning Rate: 0.0005313953 +2025-03-26 22:59:40,223 Train Loss: 0.0003291, Val Loss: 0.0004143 +2025-03-26 22:59:40,223 Epoch 1297/2000 +2025-03-26 23:02:15,477 Current Learning Rate: 0.0005235532 +2025-03-26 23:02:15,477 Train Loss: 0.0003285, Val Loss: 0.0004140 +2025-03-26 23:02:15,477 Epoch 1298/2000 +2025-03-26 23:04:49,628 Current Learning Rate: 0.0005157054 +2025-03-26 23:04:49,628 Train Loss: 0.0003288, Val Loss: 0.0004142 +2025-03-26 23:04:49,629 Epoch 1299/2000 +2025-03-26 23:07:24,860 Current Learning Rate: 0.0005078537 +2025-03-26 23:07:24,861 Train Loss: 0.0003298, Val Loss: 0.0004150 +2025-03-26 23:07:24,861 Epoch 1300/2000 +2025-03-26 23:09:59,378 Current Learning Rate: 0.0005000000 +2025-03-26 23:09:59,379 Train Loss: 0.0003312, Val Loss: 0.0004161 +2025-03-26 23:09:59,379 Epoch 1301/2000 +2025-03-26 23:12:33,568 Current Learning Rate: 0.0004921463 +2025-03-26 23:12:33,568 Train Loss: 0.0003325, Val Loss: 0.0004171 +2025-03-26 23:12:33,568 Epoch 1302/2000 +2025-03-26 23:15:06,188 Current Learning Rate: 0.0004842946 +2025-03-26 23:15:06,189 Train Loss: 0.0003334, Val Loss: 0.0004208 +2025-03-26 23:15:06,189 Epoch 1303/2000 +2025-03-26 23:17:40,894 Current Learning Rate: 0.0004764468 +2025-03-26 23:17:40,895 Train Loss: 0.0003349, Val Loss: 0.0004208 +2025-03-26 23:17:40,895 Epoch 1304/2000 +2025-03-26 23:20:15,332 Current Learning Rate: 0.0004686047 +2025-03-26 23:20:15,333 Train Loss: 0.0003365, Val Loss: 0.0004239 +2025-03-26 23:20:15,333 Epoch 1305/2000 +2025-03-26 23:22:49,890 Current Learning Rate: 0.0004607705 +2025-03-26 23:22:49,891 Train Loss: 0.0003368, Val Loss: 0.0004251 +2025-03-26 23:22:49,891 Epoch 1306/2000 +2025-03-26 23:25:23,719 Current Learning Rate: 0.0004529458 +2025-03-26 23:25:23,720 Train Loss: 0.0003351, Val Loss: 0.0004217 +2025-03-26 23:25:23,720 Epoch 1307/2000 +2025-03-26 23:27:58,764 Current Learning Rate: 0.0004451328 +2025-03-26 23:27:58,765 Train Loss: 0.0003321, Val Loss: 0.0004171 +2025-03-26 23:27:58,765 Epoch 1308/2000 +2025-03-26 23:30:33,330 Current Learning Rate: 0.0004373334 +2025-03-26 23:30:33,330 Train Loss: 0.0003291, Val Loss: 0.0004140 +2025-03-26 23:30:33,330 Epoch 1309/2000 +2025-03-26 23:33:07,947 Current Learning Rate: 0.0004295494 +2025-03-26 23:33:07,948 Train Loss: 0.0003268, Val Loss: 0.0004112 +2025-03-26 23:33:07,948 Epoch 1310/2000 +2025-03-26 23:35:42,036 Current Learning Rate: 0.0004217828 +2025-03-26 23:35:42,036 Train Loss: 0.0003252, Val Loss: 0.0004097 +2025-03-26 23:35:42,036 Epoch 1311/2000 +2025-03-26 23:38:17,028 Current Learning Rate: 0.0004140354 +2025-03-26 23:38:17,028 Train Loss: 0.0003242, Val Loss: 0.0004088 +2025-03-26 23:38:17,029 Epoch 1312/2000 +2025-03-26 23:40:51,031 Current Learning Rate: 0.0004063093 +2025-03-26 23:40:51,031 Train Loss: 0.0003236, Val Loss: 0.0004083 +2025-03-26 23:40:51,032 Epoch 1313/2000 +2025-03-26 23:43:24,211 Current Learning Rate: 0.0003986064 +2025-03-26 23:43:24,212 Train Loss: 0.0003233, Val Loss: 0.0004081 +2025-03-26 23:43:24,212 Epoch 1314/2000 +2025-03-26 23:45:59,311 Current Learning Rate: 0.0003909284 +2025-03-26 23:45:59,312 Train Loss: 0.0003240, Val Loss: 0.0004105 +2025-03-26 23:45:59,312 Epoch 1315/2000 +2025-03-26 23:48:32,755 Current Learning Rate: 0.0003832773 +2025-03-26 23:48:32,755 Train Loss: 0.0003255, Val Loss: 0.0004109 +2025-03-26 23:48:32,756 Epoch 1316/2000 +2025-03-26 23:51:07,904 Current Learning Rate: 0.0003756551 +2025-03-26 23:51:07,905 Train Loss: 0.0003265, Val Loss: 0.0004134 +2025-03-26 23:51:07,905 Epoch 1317/2000 +2025-03-26 23:53:42,694 Current Learning Rate: 0.0003680635 +2025-03-26 23:53:42,694 Train Loss: 0.0003278, Val Loss: 0.0004165 +2025-03-26 23:53:42,695 Epoch 1318/2000 +2025-03-26 23:56:17,174 Current Learning Rate: 0.0003605044 +2025-03-26 23:56:17,174 Train Loss: 0.0003292, Val Loss: 0.0004143 +2025-03-26 23:56:17,174 Epoch 1319/2000 +2025-03-26 23:58:51,515 Current Learning Rate: 0.0003529798 +2025-03-26 23:58:51,515 Train Loss: 0.0003295, Val Loss: 0.0004111 +2025-03-26 23:58:51,516 Epoch 1320/2000 +2025-03-27 00:01:26,090 Current Learning Rate: 0.0003454915 +2025-03-27 00:01:26,091 Train Loss: 0.0003276, Val Loss: 0.0004088 +2025-03-27 00:01:26,091 Epoch 1321/2000 +2025-03-27 00:04:00,106 Current Learning Rate: 0.0003380413 +2025-03-27 00:04:00,107 Train Loss: 0.0003254, Val Loss: 0.0004074 +2025-03-27 00:04:00,107 Epoch 1322/2000 +2025-03-27 00:06:34,391 Current Learning Rate: 0.0003306310 +2025-03-27 00:06:34,391 Train Loss: 0.0003235, Val Loss: 0.0004062 +2025-03-27 00:06:34,391 Epoch 1323/2000 +2025-03-27 00:09:08,774 Current Learning Rate: 0.0003232626 +2025-03-27 00:09:08,775 Train Loss: 0.0003219, Val Loss: 0.0004054 +2025-03-27 00:09:08,775 Epoch 1324/2000 +2025-03-27 00:11:43,042 Current Learning Rate: 0.0003159377 +2025-03-27 00:11:43,043 Train Loss: 0.0003208, Val Loss: 0.0004049 +2025-03-27 00:11:43,043 Epoch 1325/2000 +2025-03-27 00:14:17,952 Current Learning Rate: 0.0003086583 +2025-03-27 00:14:17,952 Train Loss: 0.0003201, Val Loss: 0.0004045 +2025-03-27 00:14:17,952 Epoch 1326/2000 +2025-03-27 00:16:51,688 Current Learning Rate: 0.0003014261 +2025-03-27 00:16:51,688 Train Loss: 0.0003196, Val Loss: 0.0004047 +2025-03-27 00:16:51,689 Epoch 1327/2000 +2025-03-27 00:19:26,119 Current Learning Rate: 0.0002942428 +2025-03-27 00:19:26,119 Train Loss: 0.0003197, Val Loss: 0.0004042 +2025-03-27 00:19:26,119 Epoch 1328/2000 +2025-03-27 00:22:00,380 Current Learning Rate: 0.0002871104 +2025-03-27 00:22:00,381 Train Loss: 0.0003207, Val Loss: 0.0004054 +2025-03-27 00:22:00,381 Epoch 1329/2000 +2025-03-27 00:24:35,646 Current Learning Rate: 0.0002800304 +2025-03-27 00:24:35,646 Train Loss: 0.0003216, Val Loss: 0.0004045 +2025-03-27 00:24:35,647 Epoch 1330/2000 +2025-03-27 00:27:10,507 Current Learning Rate: 0.0002730048 +2025-03-27 00:27:10,507 Train Loss: 0.0003224, Val Loss: 0.0004023 +2025-03-27 00:27:10,507 Epoch 1331/2000 +2025-03-27 00:29:44,980 Current Learning Rate: 0.0002660351 +2025-03-27 00:29:44,980 Train Loss: 0.0003226, Val Loss: 0.0004006 +2025-03-27 00:29:44,981 Epoch 1332/2000 +2025-03-27 00:32:19,399 Current Learning Rate: 0.0002591232 +2025-03-27 00:32:19,400 Train Loss: 0.0003216, Val Loss: 0.0003997 +2025-03-27 00:32:19,400 Epoch 1333/2000 +2025-03-27 00:34:53,435 Current Learning Rate: 0.0002522707 +2025-03-27 00:34:53,436 Train Loss: 0.0003203, Val Loss: 0.0003991 +2025-03-27 00:34:53,437 Epoch 1334/2000 +2025-03-27 00:37:28,081 Current Learning Rate: 0.0002454793 +2025-03-27 00:37:28,081 Train Loss: 0.0003192, Val Loss: 0.0003985 +2025-03-27 00:37:28,081 Epoch 1335/2000 +2025-03-27 00:40:02,866 Current Learning Rate: 0.0002387507 +2025-03-27 00:40:02,867 Train Loss: 0.0003182, Val Loss: 0.0003980 +2025-03-27 00:40:02,867 Epoch 1336/2000 +2025-03-27 00:42:36,716 Current Learning Rate: 0.0002320866 +2025-03-27 00:42:36,716 Train Loss: 0.0003174, Val Loss: 0.0003976 +2025-03-27 00:42:36,717 Epoch 1337/2000 +2025-03-27 00:45:11,412 Current Learning Rate: 0.0002254886 +2025-03-27 00:45:11,412 Train Loss: 0.0003167, Val Loss: 0.0003972 +2025-03-27 00:45:11,413 Epoch 1338/2000 +2025-03-27 00:47:44,788 Current Learning Rate: 0.0002189583 +2025-03-27 00:47:44,789 Train Loss: 0.0003163, Val Loss: 0.0003971 +2025-03-27 00:47:44,789 Epoch 1339/2000 +2025-03-27 00:50:17,200 Current Learning Rate: 0.0002124974 +2025-03-27 00:50:17,200 Train Loss: 0.0003160, Val Loss: 0.0003971 +2025-03-27 00:50:17,201 Epoch 1340/2000 +2025-03-27 00:52:50,532 Current Learning Rate: 0.0002061074 +2025-03-27 00:52:50,532 Train Loss: 0.0003161, Val Loss: 0.0003973 +2025-03-27 00:52:50,533 Epoch 1341/2000 +2025-03-27 00:55:24,325 Current Learning Rate: 0.0001997899 +2025-03-27 00:55:24,325 Train Loss: 0.0003168, Val Loss: 0.0003988 +2025-03-27 00:55:24,326 Epoch 1342/2000 +2025-03-27 00:57:58,963 Current Learning Rate: 0.0001935465 +2025-03-27 00:57:58,963 Train Loss: 0.0003172, Val Loss: 0.0003971 +2025-03-27 00:57:58,963 Epoch 1343/2000 +2025-03-27 01:00:32,853 Current Learning Rate: 0.0001873787 +2025-03-27 01:00:32,854 Train Loss: 0.0003166, Val Loss: 0.0003969 +2025-03-27 01:00:32,854 Epoch 1344/2000 +2025-03-27 01:03:07,637 Current Learning Rate: 0.0001812880 +2025-03-27 01:03:07,637 Train Loss: 0.0003157, Val Loss: 0.0003964 +2025-03-27 01:03:07,638 Epoch 1345/2000 +2025-03-27 01:05:40,991 Current Learning Rate: 0.0001752760 +2025-03-27 01:05:40,992 Train Loss: 0.0003149, Val Loss: 0.0003958 +2025-03-27 01:05:40,992 Epoch 1346/2000 +2025-03-27 01:08:14,231 Current Learning Rate: 0.0001693441 +2025-03-27 01:08:14,232 Train Loss: 0.0003142, Val Loss: 0.0003953 +2025-03-27 01:08:14,232 Epoch 1347/2000 +2025-03-27 01:10:47,013 Current Learning Rate: 0.0001634937 +2025-03-27 01:10:47,013 Train Loss: 0.0003137, Val Loss: 0.0003949 +2025-03-27 01:10:47,013 Epoch 1348/2000 +2025-03-27 01:13:19,974 Current Learning Rate: 0.0001577264 +2025-03-27 01:13:19,975 Train Loss: 0.0003133, Val Loss: 0.0003947 +2025-03-27 01:13:19,975 Epoch 1349/2000 +2025-03-27 01:15:54,773 Current Learning Rate: 0.0001520436 +2025-03-27 01:15:54,773 Train Loss: 0.0003131, Val Loss: 0.0003946 +2025-03-27 01:15:54,774 Epoch 1350/2000 +2025-03-27 01:18:28,871 Current Learning Rate: 0.0001464466 +2025-03-27 01:18:28,871 Train Loss: 0.0003130, Val Loss: 0.0003946 +2025-03-27 01:18:28,871 Epoch 1351/2000 +2025-03-27 01:21:03,030 Current Learning Rate: 0.0001409369 +2025-03-27 01:21:03,096 Train Loss: 0.0003129, Val Loss: 0.0003942 +2025-03-27 01:21:03,096 Epoch 1352/2000 +2025-03-27 01:23:37,938 Current Learning Rate: 0.0001355157 +2025-03-27 01:23:37,998 Train Loss: 0.0003126, Val Loss: 0.0003937 +2025-03-27 01:23:37,998 Epoch 1353/2000 +2025-03-27 01:26:13,071 Current Learning Rate: 0.0001301845 +2025-03-27 01:26:13,131 Train Loss: 0.0003121, Val Loss: 0.0003933 +2025-03-27 01:26:13,132 Epoch 1354/2000 +2025-03-27 01:28:47,874 Current Learning Rate: 0.0001249445 +2025-03-27 01:28:47,927 Train Loss: 0.0003116, Val Loss: 0.0003930 +2025-03-27 01:28:47,927 Epoch 1355/2000 +2025-03-27 01:31:21,780 Current Learning Rate: 0.0001197970 +2025-03-27 01:31:21,840 Train Loss: 0.0003112, Val Loss: 0.0003928 +2025-03-27 01:31:21,840 Epoch 1356/2000 +2025-03-27 01:33:56,644 Current Learning Rate: 0.0001147434 +2025-03-27 01:33:56,645 Train Loss: 0.0003108, Val Loss: 0.0003928 +2025-03-27 01:33:56,645 Epoch 1357/2000 +2025-03-27 01:36:31,602 Current Learning Rate: 0.0001097848 +2025-03-27 01:36:31,603 Train Loss: 0.0003105, Val Loss: 0.0003931 +2025-03-27 01:36:31,603 Epoch 1358/2000 +2025-03-27 01:39:06,890 Current Learning Rate: 0.0001049225 +2025-03-27 01:39:06,891 Train Loss: 0.0003103, Val Loss: 0.0003932 +2025-03-27 01:39:06,891 Epoch 1359/2000 +2025-03-27 01:41:41,217 Current Learning Rate: 0.0001001577 +2025-03-27 01:41:41,278 Train Loss: 0.0003100, Val Loss: 0.0003927 +2025-03-27 01:41:41,278 Epoch 1360/2000 +2025-03-27 01:44:15,638 Current Learning Rate: 0.0000954915 +2025-03-27 01:44:15,700 Train Loss: 0.0003097, Val Loss: 0.0003919 +2025-03-27 01:44:15,700 Epoch 1361/2000 +2025-03-27 01:46:50,534 Current Learning Rate: 0.0000909251 +2025-03-27 01:46:50,592 Train Loss: 0.0003093, Val Loss: 0.0003911 +2025-03-27 01:46:50,592 Epoch 1362/2000 +2025-03-27 01:49:25,024 Current Learning Rate: 0.0000864597 +2025-03-27 01:49:25,113 Train Loss: 0.0003090, Val Loss: 0.0003906 +2025-03-27 01:49:25,114 Epoch 1363/2000 +2025-03-27 01:51:59,586 Current Learning Rate: 0.0000820963 +2025-03-27 01:51:59,647 Train Loss: 0.0003087, Val Loss: 0.0003902 +2025-03-27 01:51:59,647 Epoch 1364/2000 +2025-03-27 01:54:32,725 Current Learning Rate: 0.0000778360 +2025-03-27 01:54:32,786 Train Loss: 0.0003084, Val Loss: 0.0003900 +2025-03-27 01:54:32,786 Epoch 1365/2000 +2025-03-27 01:57:05,941 Current Learning Rate: 0.0000736799 +2025-03-27 01:57:06,007 Train Loss: 0.0003082, Val Loss: 0.0003897 +2025-03-27 01:57:06,008 Epoch 1366/2000 +2025-03-27 01:59:40,910 Current Learning Rate: 0.0000696290 +2025-03-27 01:59:40,972 Train Loss: 0.0003079, Val Loss: 0.0003894 +2025-03-27 01:59:40,972 Epoch 1367/2000 +2025-03-27 02:02:15,629 Current Learning Rate: 0.0000656842 +2025-03-27 02:02:15,685 Train Loss: 0.0003076, Val Loss: 0.0003892 +2025-03-27 02:02:15,685 Epoch 1368/2000 +2025-03-27 02:04:51,342 Current Learning Rate: 0.0000618467 +2025-03-27 02:04:51,417 Train Loss: 0.0003074, Val Loss: 0.0003888 +2025-03-27 02:04:51,417 Epoch 1369/2000 +2025-03-27 02:07:25,684 Current Learning Rate: 0.0000581172 +2025-03-27 02:07:25,740 Train Loss: 0.0003071, Val Loss: 0.0003884 +2025-03-27 02:07:25,741 Epoch 1370/2000 +2025-03-27 02:10:01,079 Current Learning Rate: 0.0000544967 +2025-03-27 02:10:01,131 Train Loss: 0.0003069, Val Loss: 0.0003881 +2025-03-27 02:10:01,131 Epoch 1371/2000 +2025-03-27 02:12:35,410 Current Learning Rate: 0.0000509862 +2025-03-27 02:12:35,472 Train Loss: 0.0003066, Val Loss: 0.0003878 +2025-03-27 02:12:35,473 Epoch 1372/2000 +2025-03-27 02:15:10,565 Current Learning Rate: 0.0000475865 +2025-03-27 02:15:10,622 Train Loss: 0.0003064, Val Loss: 0.0003876 +2025-03-27 02:15:10,622 Epoch 1373/2000 +2025-03-27 02:17:45,287 Current Learning Rate: 0.0000442984 +2025-03-27 02:17:45,342 Train Loss: 0.0003062, Val Loss: 0.0003873 +2025-03-27 02:17:45,342 Epoch 1374/2000 +2025-03-27 02:20:19,843 Current Learning Rate: 0.0000411227 +2025-03-27 02:20:19,898 Train Loss: 0.0003060, Val Loss: 0.0003872 +2025-03-27 02:20:19,898 Epoch 1375/2000 +2025-03-27 02:22:52,458 Current Learning Rate: 0.0000380602 +2025-03-27 02:22:52,528 Train Loss: 0.0003058, Val Loss: 0.0003870 +2025-03-27 02:22:52,528 Epoch 1376/2000 +2025-03-27 02:25:25,557 Current Learning Rate: 0.0000351118 +2025-03-27 02:25:25,650 Train Loss: 0.0003056, Val Loss: 0.0003868 +2025-03-27 02:25:25,650 Epoch 1377/2000 +2025-03-27 02:27:58,790 Current Learning Rate: 0.0000322780 +2025-03-27 02:27:58,852 Train Loss: 0.0003054, Val Loss: 0.0003866 +2025-03-27 02:27:58,852 Epoch 1378/2000 +2025-03-27 02:30:33,081 Current Learning Rate: 0.0000295596 +2025-03-27 02:30:33,139 Train Loss: 0.0003052, Val Loss: 0.0003865 +2025-03-27 02:30:33,139 Epoch 1379/2000 +2025-03-27 02:33:07,208 Current Learning Rate: 0.0000269573 +2025-03-27 02:33:07,287 Train Loss: 0.0003050, Val Loss: 0.0003863 +2025-03-27 02:33:07,287 Epoch 1380/2000 +2025-03-27 02:35:40,460 Current Learning Rate: 0.0000244717 +2025-03-27 02:35:40,519 Train Loss: 0.0003048, Val Loss: 0.0003861 +2025-03-27 02:35:40,520 Epoch 1381/2000 +2025-03-27 02:38:15,530 Current Learning Rate: 0.0000221035 +2025-03-27 02:38:15,589 Train Loss: 0.0003046, Val Loss: 0.0003860 +2025-03-27 02:38:15,589 Epoch 1382/2000 +2025-03-27 02:40:51,353 Current Learning Rate: 0.0000198532 +2025-03-27 02:40:51,408 Train Loss: 0.0003045, Val Loss: 0.0003858 +2025-03-27 02:40:51,409 Epoch 1383/2000 +2025-03-27 02:43:25,835 Current Learning Rate: 0.0000177213 +2025-03-27 02:43:25,892 Train Loss: 0.0003043, Val Loss: 0.0003856 +2025-03-27 02:43:25,892 Epoch 1384/2000 +2025-03-27 02:45:59,576 Current Learning Rate: 0.0000157084 +2025-03-27 02:45:59,651 Train Loss: 0.0003041, Val Loss: 0.0003855 +2025-03-27 02:45:59,652 Epoch 1385/2000 +2025-03-27 02:48:33,641 Current Learning Rate: 0.0000138150 +2025-03-27 02:48:33,705 Train Loss: 0.0003040, Val Loss: 0.0003853 +2025-03-27 02:48:33,706 Epoch 1386/2000 +2025-03-27 02:51:08,474 Current Learning Rate: 0.0000120416 +2025-03-27 02:51:08,542 Train Loss: 0.0003039, Val Loss: 0.0003853 +2025-03-27 02:51:08,542 Epoch 1387/2000 +2025-03-27 02:53:43,436 Current Learning Rate: 0.0000103886 +2025-03-27 02:53:43,490 Train Loss: 0.0003037, Val Loss: 0.0003852 +2025-03-27 02:53:43,490 Epoch 1388/2000 +2025-03-27 02:56:17,723 Current Learning Rate: 0.0000088564 +2025-03-27 02:56:17,781 Train Loss: 0.0003036, Val Loss: 0.0003851 +2025-03-27 02:56:17,781 Epoch 1389/2000 +2025-03-27 02:58:52,739 Current Learning Rate: 0.0000074453 +2025-03-27 02:58:52,811 Train Loss: 0.0003035, Val Loss: 0.0003849 +2025-03-27 02:58:52,811 Epoch 1390/2000 +2025-03-27 03:01:27,005 Current Learning Rate: 0.0000061558 +2025-03-27 03:01:27,061 Train Loss: 0.0003034, Val Loss: 0.0003848 +2025-03-27 03:01:27,062 Epoch 1391/2000 +2025-03-27 03:04:01,488 Current Learning Rate: 0.0000049882 +2025-03-27 03:04:01,539 Train Loss: 0.0003033, Val Loss: 0.0003847 +2025-03-27 03:04:01,539 Epoch 1392/2000 +2025-03-27 03:06:35,885 Current Learning Rate: 0.0000039426 +2025-03-27 03:06:35,946 Train Loss: 0.0003032, Val Loss: 0.0003846 +2025-03-27 03:06:35,946 Epoch 1393/2000 +2025-03-27 03:09:10,436 Current Learning Rate: 0.0000030195 +2025-03-27 03:09:10,490 Train Loss: 0.0003031, Val Loss: 0.0003846 +2025-03-27 03:09:10,490 Epoch 1394/2000 +2025-03-27 03:11:43,848 Current Learning Rate: 0.0000022190 +2025-03-27 03:11:43,905 Train Loss: 0.0003030, Val Loss: 0.0003845 +2025-03-27 03:11:43,905 Epoch 1395/2000 +2025-03-27 03:14:18,162 Current Learning Rate: 0.0000015413 +2025-03-27 03:14:18,253 Train Loss: 0.0003030, Val Loss: 0.0003845 +2025-03-27 03:14:18,253 Epoch 1396/2000 +2025-03-27 03:16:50,766 Current Learning Rate: 0.0000009866 +2025-03-27 03:16:50,821 Train Loss: 0.0003029, Val Loss: 0.0003845 +2025-03-27 03:16:50,821 Epoch 1397/2000 +2025-03-27 03:19:23,892 Current Learning Rate: 0.0000005551 +2025-03-27 03:19:23,945 Train Loss: 0.0003029, Val Loss: 0.0003845 +2025-03-27 03:19:23,945 Epoch 1398/2000 +2025-03-27 03:21:57,428 Current Learning Rate: 0.0000002467 +2025-03-27 03:21:57,484 Train Loss: 0.0003028, Val Loss: 0.0003845 +2025-03-27 03:21:57,484 Epoch 1399/2000 +2025-03-27 03:24:30,484 Current Learning Rate: 0.0000000617 +2025-03-27 03:24:30,546 Train Loss: 0.0003028, Val Loss: 0.0003845 +2025-03-27 03:24:30,546 Epoch 1400/2000 +2025-03-27 03:27:06,055 Current Learning Rate: 0.0000000000 +2025-03-27 03:27:06,124 Train Loss: 0.0003028, Val Loss: 0.0003845 +2025-03-27 03:27:06,125 Epoch 1401/2000 +2025-03-27 03:29:38,993 Current Learning Rate: 0.0000000617 +2025-03-27 03:29:38,994 Train Loss: 0.0003027, Val Loss: 0.0003845 +2025-03-27 03:29:38,994 Epoch 1402/2000 +2025-03-27 03:32:12,178 Current Learning Rate: 0.0000002467 +2025-03-27 03:32:12,235 Train Loss: 0.0003028, Val Loss: 0.0003844 +2025-03-27 03:32:12,235 Epoch 1403/2000 +2025-03-27 03:34:46,576 Current Learning Rate: 0.0000005551 +2025-03-27 03:34:46,576 Train Loss: 0.0003028, Val Loss: 0.0003845 +2025-03-27 03:34:46,577 Epoch 1404/2000 +2025-03-27 03:37:21,557 Current Learning Rate: 0.0000009866 +2025-03-27 03:37:21,557 Train Loss: 0.0003028, Val Loss: 0.0003845 +2025-03-27 03:37:21,558 Epoch 1405/2000 +2025-03-27 03:39:56,708 Current Learning Rate: 0.0000015413 +2025-03-27 03:39:56,709 Train Loss: 0.0003028, Val Loss: 0.0003845 +2025-03-27 03:39:56,709 Epoch 1406/2000 +2025-03-27 03:42:29,271 Current Learning Rate: 0.0000022190 +2025-03-27 03:42:29,272 Train Loss: 0.0003029, Val Loss: 0.0003845 +2025-03-27 03:42:29,272 Epoch 1407/2000 +2025-03-27 03:45:03,978 Current Learning Rate: 0.0000030195 +2025-03-27 03:45:03,978 Train Loss: 0.0003029, Val Loss: 0.0003845 +2025-03-27 03:45:03,979 Epoch 1408/2000 +2025-03-27 03:47:37,597 Current Learning Rate: 0.0000039426 +2025-03-27 03:47:37,597 Train Loss: 0.0003030, Val Loss: 0.0003845 +2025-03-27 03:47:37,597 Epoch 1409/2000 +2025-03-27 03:50:12,211 Current Learning Rate: 0.0000049882 +2025-03-27 03:50:12,212 Train Loss: 0.0003030, Val Loss: 0.0003846 +2025-03-27 03:50:12,212 Epoch 1410/2000 +2025-03-27 03:52:44,335 Current Learning Rate: 0.0000061558 +2025-03-27 03:52:44,336 Train Loss: 0.0003031, Val Loss: 0.0003846 +2025-03-27 03:52:44,336 Epoch 1411/2000 +2025-03-27 03:55:17,307 Current Learning Rate: 0.0000074453 +2025-03-27 03:55:17,307 Train Loss: 0.0003032, Val Loss: 0.0003847 +2025-03-27 03:55:17,308 Epoch 1412/2000 +2025-03-27 03:57:50,013 Current Learning Rate: 0.0000088564 +2025-03-27 03:57:50,013 Train Loss: 0.0003032, Val Loss: 0.0003847 +2025-03-27 03:57:50,013 Epoch 1413/2000 +2025-03-27 04:00:23,823 Current Learning Rate: 0.0000103886 +2025-03-27 04:00:23,824 Train Loss: 0.0003033, Val Loss: 0.0003849 +2025-03-27 04:00:23,824 Epoch 1414/2000 +2025-03-27 04:02:58,189 Current Learning Rate: 0.0000120416 +2025-03-27 04:02:58,190 Train Loss: 0.0003034, Val Loss: 0.0003850 +2025-03-27 04:02:58,190 Epoch 1415/2000 +2025-03-27 04:05:33,357 Current Learning Rate: 0.0000138150 +2025-03-27 04:05:33,357 Train Loss: 0.0003034, Val Loss: 0.0003850 +2025-03-27 04:05:33,357 Epoch 1416/2000 +2025-03-27 04:08:08,353 Current Learning Rate: 0.0000157084 +2025-03-27 04:08:08,353 Train Loss: 0.0003035, Val Loss: 0.0003851 +2025-03-27 04:08:08,353 Epoch 1417/2000 +2025-03-27 04:10:42,875 Current Learning Rate: 0.0000177213 +2025-03-27 04:10:42,875 Train Loss: 0.0003036, Val Loss: 0.0003851 +2025-03-27 04:10:42,875 Epoch 1418/2000 +2025-03-27 04:13:16,645 Current Learning Rate: 0.0000198532 +2025-03-27 04:13:16,646 Train Loss: 0.0003037, Val Loss: 0.0003852 +2025-03-27 04:13:16,646 Epoch 1419/2000 +2025-03-27 04:15:51,387 Current Learning Rate: 0.0000221035 +2025-03-27 04:15:51,388 Train Loss: 0.0003038, Val Loss: 0.0003853 +2025-03-27 04:15:51,388 Epoch 1420/2000 +2025-03-27 04:18:25,641 Current Learning Rate: 0.0000244717 +2025-03-27 04:18:25,641 Train Loss: 0.0003039, Val Loss: 0.0003854 +2025-03-27 04:18:25,642 Epoch 1421/2000 +2025-03-27 04:20:59,930 Current Learning Rate: 0.0000269573 +2025-03-27 04:20:59,930 Train Loss: 0.0003040, Val Loss: 0.0003855 +2025-03-27 04:20:59,931 Epoch 1422/2000 +2025-03-27 04:23:34,489 Current Learning Rate: 0.0000295596 +2025-03-27 04:23:34,490 Train Loss: 0.0003040, Val Loss: 0.0003856 +2025-03-27 04:23:34,490 Epoch 1423/2000 +2025-03-27 04:26:09,085 Current Learning Rate: 0.0000322780 +2025-03-27 04:26:09,085 Train Loss: 0.0003041, Val Loss: 0.0003858 +2025-03-27 04:26:09,086 Epoch 1424/2000 +2025-03-27 04:28:43,384 Current Learning Rate: 0.0000351118 +2025-03-27 04:28:43,385 Train Loss: 0.0003042, Val Loss: 0.0003859 +2025-03-27 04:28:43,385 Epoch 1425/2000 +2025-03-27 04:31:17,368 Current Learning Rate: 0.0000380602 +2025-03-27 04:31:17,368 Train Loss: 0.0003043, Val Loss: 0.0003860 +2025-03-27 04:31:17,368 Epoch 1426/2000 +2025-03-27 04:33:51,095 Current Learning Rate: 0.0000411227 +2025-03-27 04:33:51,096 Train Loss: 0.0003044, Val Loss: 0.0003861 +2025-03-27 04:33:51,096 Epoch 1427/2000 +2025-03-27 04:36:25,681 Current Learning Rate: 0.0000442984 +2025-03-27 04:36:25,682 Train Loss: 0.0003045, Val Loss: 0.0003862 +2025-03-27 04:36:25,682 Epoch 1428/2000 +2025-03-27 04:39:01,174 Current Learning Rate: 0.0000475865 +2025-03-27 04:39:01,174 Train Loss: 0.0003046, Val Loss: 0.0003864 +2025-03-27 04:39:01,174 Epoch 1429/2000 +2025-03-27 04:41:35,611 Current Learning Rate: 0.0000509862 +2025-03-27 04:41:35,612 Train Loss: 0.0003047, Val Loss: 0.0003865 +2025-03-27 04:41:35,612 Epoch 1430/2000 +2025-03-27 04:44:09,825 Current Learning Rate: 0.0000544967 +2025-03-27 04:44:09,826 Train Loss: 0.0003048, Val Loss: 0.0003866 +2025-03-27 04:44:09,826 Epoch 1431/2000 +2025-03-27 04:46:43,871 Current Learning Rate: 0.0000581172 +2025-03-27 04:46:43,872 Train Loss: 0.0003049, Val Loss: 0.0003868 +2025-03-27 04:46:43,872 Epoch 1432/2000 +2025-03-27 04:49:18,260 Current Learning Rate: 0.0000618467 +2025-03-27 04:49:18,260 Train Loss: 0.0003051, Val Loss: 0.0003869 +2025-03-27 04:49:18,260 Epoch 1433/2000 +2025-03-27 04:51:53,552 Current Learning Rate: 0.0000656842 +2025-03-27 04:51:53,553 Train Loss: 0.0003052, Val Loss: 0.0003870 +2025-03-27 04:51:53,553 Epoch 1434/2000 +2025-03-27 04:54:27,555 Current Learning Rate: 0.0000696290 +2025-03-27 04:54:27,556 Train Loss: 0.0003053, Val Loss: 0.0003872 +2025-03-27 04:54:27,556 Epoch 1435/2000 +2025-03-27 04:57:02,572 Current Learning Rate: 0.0000736799 +2025-03-27 04:57:02,572 Train Loss: 0.0003054, Val Loss: 0.0003873 +2025-03-27 04:57:02,573 Epoch 1436/2000 +2025-03-27 04:59:36,401 Current Learning Rate: 0.0000778360 +2025-03-27 04:59:36,401 Train Loss: 0.0003056, Val Loss: 0.0003874 +2025-03-27 04:59:36,401 Epoch 1437/2000 +2025-03-27 05:02:10,775 Current Learning Rate: 0.0000820963 +2025-03-27 05:02:10,776 Train Loss: 0.0003057, Val Loss: 0.0003874 +2025-03-27 05:02:10,776 Epoch 1438/2000 +2025-03-27 05:04:45,661 Current Learning Rate: 0.0000864597 +2025-03-27 05:04:45,661 Train Loss: 0.0003058, Val Loss: 0.0003876 +2025-03-27 05:04:45,661 Epoch 1439/2000 +2025-03-27 05:07:18,247 Current Learning Rate: 0.0000909251 +2025-03-27 05:07:18,248 Train Loss: 0.0003060, Val Loss: 0.0003877 +2025-03-27 05:07:18,248 Epoch 1440/2000 +2025-03-27 05:09:52,370 Current Learning Rate: 0.0000954915 +2025-03-27 05:09:52,371 Train Loss: 0.0003061, Val Loss: 0.0003878 +2025-03-27 05:09:52,371 Epoch 1441/2000 +2025-03-27 05:12:26,520 Current Learning Rate: 0.0001001577 +2025-03-27 05:12:26,520 Train Loss: 0.0003062, Val Loss: 0.0003879 +2025-03-27 05:12:26,520 Epoch 1442/2000 +2025-03-27 05:15:00,727 Current Learning Rate: 0.0001049225 +2025-03-27 05:15:00,728 Train Loss: 0.0003064, Val Loss: 0.0003880 +2025-03-27 05:15:00,728 Epoch 1443/2000 +2025-03-27 05:17:35,213 Current Learning Rate: 0.0001097848 +2025-03-27 05:17:35,214 Train Loss: 0.0003065, Val Loss: 0.0003881 +2025-03-27 05:17:35,214 Epoch 1444/2000 +2025-03-27 05:20:09,784 Current Learning Rate: 0.0001147434 +2025-03-27 05:20:09,784 Train Loss: 0.0003067, Val Loss: 0.0003883 +2025-03-27 05:20:09,784 Epoch 1445/2000 +2025-03-27 05:22:44,002 Current Learning Rate: 0.0001197970 +2025-03-27 05:22:44,002 Train Loss: 0.0003068, Val Loss: 0.0003885 +2025-03-27 05:22:44,002 Epoch 1446/2000 +2025-03-27 05:25:18,099 Current Learning Rate: 0.0001249445 +2025-03-27 05:25:18,099 Train Loss: 0.0003070, Val Loss: 0.0003886 +2025-03-27 05:25:18,100 Epoch 1447/2000 +2025-03-27 05:27:52,504 Current Learning Rate: 0.0001301845 +2025-03-27 05:27:52,504 Train Loss: 0.0003071, Val Loss: 0.0003888 +2025-03-27 05:27:52,504 Epoch 1448/2000 +2025-03-27 05:30:27,130 Current Learning Rate: 0.0001355157 +2025-03-27 05:30:27,130 Train Loss: 0.0003073, Val Loss: 0.0003889 +2025-03-27 05:30:27,131 Epoch 1449/2000 +2025-03-27 05:33:00,276 Current Learning Rate: 0.0001409369 +2025-03-27 05:33:00,276 Train Loss: 0.0003074, Val Loss: 0.0003891 +2025-03-27 05:33:00,276 Epoch 1450/2000 +2025-03-27 05:35:34,843 Current Learning Rate: 0.0001464466 +2025-03-27 05:35:34,844 Train Loss: 0.0003076, Val Loss: 0.0003893 +2025-03-27 05:35:34,844 Epoch 1451/2000 +2025-03-27 05:38:09,211 Current Learning Rate: 0.0001520436 +2025-03-27 05:38:09,212 Train Loss: 0.0003078, Val Loss: 0.0003895 +2025-03-27 05:38:09,212 Epoch 1452/2000 +2025-03-27 05:40:42,592 Current Learning Rate: 0.0001577264 +2025-03-27 05:40:42,592 Train Loss: 0.0003080, Val Loss: 0.0003897 +2025-03-27 05:40:42,592 Epoch 1453/2000 +2025-03-27 05:43:15,298 Current Learning Rate: 0.0001634937 +2025-03-27 05:43:15,298 Train Loss: 0.0003081, Val Loss: 0.0003900 +2025-03-27 05:43:15,298 Epoch 1454/2000 +2025-03-27 05:45:49,967 Current Learning Rate: 0.0001693441 +2025-03-27 05:45:49,968 Train Loss: 0.0003083, Val Loss: 0.0003902 +2025-03-27 05:45:49,968 Epoch 1455/2000 +2025-03-27 05:48:24,266 Current Learning Rate: 0.0001752760 +2025-03-27 05:48:24,266 Train Loss: 0.0003085, Val Loss: 0.0003904 +2025-03-27 05:48:24,266 Epoch 1456/2000 +2025-03-27 05:50:58,734 Current Learning Rate: 0.0001812880 +2025-03-27 05:50:58,735 Train Loss: 0.0003087, Val Loss: 0.0003906 +2025-03-27 05:50:58,735 Epoch 1457/2000 +2025-03-27 05:53:32,152 Current Learning Rate: 0.0001873787 +2025-03-27 05:53:32,152 Train Loss: 0.0003089, Val Loss: 0.0003908 +2025-03-27 05:53:32,152 Epoch 1458/2000 +2025-03-27 05:56:06,582 Current Learning Rate: 0.0001935465 +2025-03-27 05:56:06,583 Train Loss: 0.0003091, Val Loss: 0.0003910 +2025-03-27 05:56:06,583 Epoch 1459/2000 +2025-03-27 05:58:40,808 Current Learning Rate: 0.0001997899 +2025-03-27 05:58:40,809 Train Loss: 0.0003093, Val Loss: 0.0003913 +2025-03-27 05:58:40,809 Epoch 1460/2000 +2025-03-27 06:01:15,235 Current Learning Rate: 0.0002061074 +2025-03-27 06:01:15,236 Train Loss: 0.0003095, Val Loss: 0.0003917 +2025-03-27 06:01:15,236 Epoch 1461/2000 +2025-03-27 06:03:49,792 Current Learning Rate: 0.0002124974 +2025-03-27 06:03:49,793 Train Loss: 0.0003097, Val Loss: 0.0003923 +2025-03-27 06:03:49,793 Epoch 1462/2000 +2025-03-27 06:06:22,564 Current Learning Rate: 0.0002189583 +2025-03-27 06:06:22,565 Train Loss: 0.0003100, Val Loss: 0.0003927 +2025-03-27 06:06:22,565 Epoch 1463/2000 +2025-03-27 06:08:57,129 Current Learning Rate: 0.0002254886 +2025-03-27 06:08:57,129 Train Loss: 0.0003104, Val Loss: 0.0003930 +2025-03-27 06:08:57,130 Epoch 1464/2000 +2025-03-27 06:11:30,634 Current Learning Rate: 0.0002320866 +2025-03-27 06:11:30,635 Train Loss: 0.0003110, Val Loss: 0.0003944 +2025-03-27 06:11:30,635 Epoch 1465/2000 +2025-03-27 06:14:04,409 Current Learning Rate: 0.0002387507 +2025-03-27 06:14:04,409 Train Loss: 0.0003114, Val Loss: 0.0003948 +2025-03-27 06:14:04,409 Epoch 1466/2000 +2025-03-27 06:16:38,862 Current Learning Rate: 0.0002454793 +2025-03-27 06:16:38,862 Train Loss: 0.0003117, Val Loss: 0.0003940 +2025-03-27 06:16:38,863 Epoch 1467/2000 +2025-03-27 06:19:13,268 Current Learning Rate: 0.0002522707 +2025-03-27 06:19:13,268 Train Loss: 0.0003119, Val Loss: 0.0003941 +2025-03-27 06:19:13,269 Epoch 1468/2000 +2025-03-27 06:21:46,930 Current Learning Rate: 0.0002591232 +2025-03-27 06:21:46,930 Train Loss: 0.0003116, Val Loss: 0.0003944 +2025-03-27 06:21:46,930 Epoch 1469/2000 +2025-03-27 06:24:21,066 Current Learning Rate: 0.0002660351 +2025-03-27 06:24:21,067 Train Loss: 0.0003113, Val Loss: 0.0003946 +2025-03-27 06:24:21,067 Epoch 1470/2000 +2025-03-27 06:26:55,832 Current Learning Rate: 0.0002730048 +2025-03-27 06:26:55,833 Train Loss: 0.0003115, Val Loss: 0.0003945 +2025-03-27 06:26:55,833 Epoch 1471/2000 +2025-03-27 06:29:31,186 Current Learning Rate: 0.0002800304 +2025-03-27 06:29:31,186 Train Loss: 0.0003117, Val Loss: 0.0003945 +2025-03-27 06:29:31,187 Epoch 1472/2000 +2025-03-27 06:32:04,800 Current Learning Rate: 0.0002871104 +2025-03-27 06:32:04,801 Train Loss: 0.0003120, Val Loss: 0.0003945 +2025-03-27 06:32:04,801 Epoch 1473/2000 +2025-03-27 06:34:38,363 Current Learning Rate: 0.0002942428 +2025-03-27 06:34:38,364 Train Loss: 0.0003122, Val Loss: 0.0003947 +2025-03-27 06:34:38,364 Epoch 1474/2000 +2025-03-27 06:37:12,325 Current Learning Rate: 0.0003014261 +2025-03-27 06:37:12,325 Train Loss: 0.0003125, Val Loss: 0.0003949 +2025-03-27 06:37:12,325 Epoch 1475/2000 +2025-03-27 06:39:47,194 Current Learning Rate: 0.0003086583 +2025-03-27 06:39:47,195 Train Loss: 0.0003131, Val Loss: 0.0003954 +2025-03-27 06:39:47,195 Epoch 1476/2000 +2025-03-27 06:42:21,867 Current Learning Rate: 0.0003159377 +2025-03-27 06:42:21,868 Train Loss: 0.0003140, Val Loss: 0.0003971 +2025-03-27 06:42:21,868 Epoch 1477/2000 +2025-03-27 06:44:56,566 Current Learning Rate: 0.0003232626 +2025-03-27 06:44:56,567 Train Loss: 0.0003149, Val Loss: 0.0003984 +2025-03-27 06:44:56,567 Epoch 1478/2000 +2025-03-27 06:47:31,202 Current Learning Rate: 0.0003306310 +2025-03-27 06:47:31,202 Train Loss: 0.0003152, Val Loss: 0.0004001 +2025-03-27 06:47:31,203 Epoch 1479/2000 +2025-03-27 06:50:06,586 Current Learning Rate: 0.0003380413 +2025-03-27 06:50:06,586 Train Loss: 0.0003153, Val Loss: 0.0003987 +2025-03-27 06:50:06,586 Epoch 1480/2000 +2025-03-27 06:52:42,188 Current Learning Rate: 0.0003454915 +2025-03-27 06:52:42,188 Train Loss: 0.0003152, Val Loss: 0.0003982 +2025-03-27 06:52:42,188 Epoch 1481/2000 +2025-03-27 06:55:15,339 Current Learning Rate: 0.0003529798 +2025-03-27 06:55:15,339 Train Loss: 0.0003145, Val Loss: 0.0003971 +2025-03-27 06:55:15,340 Epoch 1482/2000 +2025-03-27 06:57:48,385 Current Learning Rate: 0.0003605044 +2025-03-27 06:57:48,386 Train Loss: 0.0003142, Val Loss: 0.0003969 +2025-03-27 06:57:48,386 Epoch 1483/2000 +2025-03-27 07:00:22,007 Current Learning Rate: 0.0003680635 +2025-03-27 07:00:22,007 Train Loss: 0.0003145, Val Loss: 0.0003971 +2025-03-27 07:00:22,008 Epoch 1484/2000 +2025-03-27 07:02:55,860 Current Learning Rate: 0.0003756551 +2025-03-27 07:02:55,861 Train Loss: 0.0003148, Val Loss: 0.0003977 +2025-03-27 07:02:55,861 Epoch 1485/2000 +2025-03-27 07:05:30,729 Current Learning Rate: 0.0003832773 +2025-03-27 07:05:30,730 Train Loss: 0.0003152, Val Loss: 0.0003983 +2025-03-27 07:05:30,730 Epoch 1486/2000 +2025-03-27 07:08:05,252 Current Learning Rate: 0.0003909284 +2025-03-27 07:08:05,253 Train Loss: 0.0003161, Val Loss: 0.0003992 +2025-03-27 07:08:05,253 Epoch 1487/2000 +2025-03-27 07:10:39,469 Current Learning Rate: 0.0003986064 +2025-03-27 07:10:39,469 Train Loss: 0.0003172, Val Loss: 0.0003996 +2025-03-27 07:10:39,470 Epoch 1488/2000 +2025-03-27 07:13:13,772 Current Learning Rate: 0.0004063093 +2025-03-27 07:13:13,773 Train Loss: 0.0003177, Val Loss: 0.0003999 +2025-03-27 07:13:13,773 Epoch 1489/2000 +2025-03-27 07:15:47,928 Current Learning Rate: 0.0004140354 +2025-03-27 07:15:47,929 Train Loss: 0.0003181, Val Loss: 0.0004009 +2025-03-27 07:15:47,929 Epoch 1490/2000 +2025-03-27 07:18:22,639 Current Learning Rate: 0.0004217828 +2025-03-27 07:18:22,640 Train Loss: 0.0003187, Val Loss: 0.0004014 +2025-03-27 07:18:22,640 Epoch 1491/2000 +2025-03-27 07:20:56,679 Current Learning Rate: 0.0004295494 +2025-03-27 07:20:56,679 Train Loss: 0.0003189, Val Loss: 0.0004001 +2025-03-27 07:20:56,679 Epoch 1492/2000 +2025-03-27 07:23:31,745 Current Learning Rate: 0.0004373334 +2025-03-27 07:23:31,745 Train Loss: 0.0003187, Val Loss: 0.0003999 +2025-03-27 07:23:31,745 Epoch 1493/2000 +2025-03-27 07:26:05,847 Current Learning Rate: 0.0004451328 +2025-03-27 07:26:05,848 Train Loss: 0.0003176, Val Loss: 0.0003996 +2025-03-27 07:26:05,848 Epoch 1494/2000 +2025-03-27 07:28:40,563 Current Learning Rate: 0.0004529458 +2025-03-27 07:28:40,563 Train Loss: 0.0003172, Val Loss: 0.0004004 +2025-03-27 07:28:40,563 Epoch 1495/2000 +2025-03-27 07:31:14,460 Current Learning Rate: 0.0004607705 +2025-03-27 07:31:14,461 Train Loss: 0.0003178, Val Loss: 0.0004016 +2025-03-27 07:31:14,461 Epoch 1496/2000 +2025-03-27 07:33:49,290 Current Learning Rate: 0.0004686047 +2025-03-27 07:33:49,290 Train Loss: 0.0003185, Val Loss: 0.0004020 +2025-03-27 07:33:49,290 Epoch 1497/2000 +2025-03-27 07:36:22,877 Current Learning Rate: 0.0004764468 +2025-03-27 07:36:22,878 Train Loss: 0.0003198, Val Loss: 0.0004035 +2025-03-27 07:36:22,878 Epoch 1498/2000 +2025-03-27 07:38:57,524 Current Learning Rate: 0.0004842946 +2025-03-27 07:38:57,525 Train Loss: 0.0003208, Val Loss: 0.0004029 +2025-03-27 07:38:57,525 Epoch 1499/2000 +2025-03-27 07:41:32,174 Current Learning Rate: 0.0004921463 +2025-03-27 07:41:32,174 Train Loss: 0.0003212, Val Loss: 0.0004032 +2025-03-27 07:41:32,174 Epoch 1500/2000 +2025-03-27 07:44:07,255 Current Learning Rate: 0.0005000000 +2025-03-27 07:44:07,256 Train Loss: 0.0003216, Val Loss: 0.0004032 +2025-03-27 07:44:07,256 Epoch 1501/2000 +2025-03-27 07:46:40,958 Current Learning Rate: 0.0005078537 +2025-03-27 07:46:40,958 Train Loss: 0.0003221, Val Loss: 0.0004035 +2025-03-27 07:46:40,959 Epoch 1502/2000 +2025-03-27 07:49:16,862 Current Learning Rate: 0.0005157054 +2025-03-27 07:49:16,862 Train Loss: 0.0003226, Val Loss: 0.0004047 +2025-03-27 07:49:16,862 Epoch 1503/2000 +2025-03-27 07:51:50,471 Current Learning Rate: 0.0005235532 +2025-03-27 07:51:50,472 Train Loss: 0.0003233, Val Loss: 0.0004063 +2025-03-27 07:51:50,472 Epoch 1504/2000 +2025-03-27 07:54:25,666 Current Learning Rate: 0.0005313953 +2025-03-27 07:54:25,667 Train Loss: 0.0003237, Val Loss: 0.0004078 +2025-03-27 07:54:25,667 Epoch 1505/2000 +2025-03-27 07:56:59,936 Current Learning Rate: 0.0005392295 +2025-03-27 07:56:59,936 Train Loss: 0.0003230, Val Loss: 0.0004049 +2025-03-27 07:56:59,936 Epoch 1506/2000 +2025-03-27 07:59:34,651 Current Learning Rate: 0.0005470542 +2025-03-27 07:59:34,651 Train Loss: 0.0003211, Val Loss: 0.0004042 +2025-03-27 07:59:34,652 Epoch 1507/2000 +2025-03-27 08:02:10,273 Current Learning Rate: 0.0005548672 +2025-03-27 08:02:10,274 Train Loss: 0.0003213, Val Loss: 0.0004059 +2025-03-27 08:02:10,274 Epoch 1508/2000 +2025-03-27 08:04:44,606 Current Learning Rate: 0.0005626666 +2025-03-27 08:04:44,606 Train Loss: 0.0003222, Val Loss: 0.0004077 +2025-03-27 08:04:44,607 Epoch 1509/2000 +2025-03-27 08:07:19,046 Current Learning Rate: 0.0005704506 +2025-03-27 08:07:19,047 Train Loss: 0.0003237, Val Loss: 0.0004073 +2025-03-27 08:07:19,047 Epoch 1510/2000 +2025-03-27 08:09:52,912 Current Learning Rate: 0.0005782172 +2025-03-27 08:09:52,913 Train Loss: 0.0003249, Val Loss: 0.0004078 +2025-03-27 08:09:52,913 Epoch 1511/2000 +2025-03-27 08:12:27,661 Current Learning Rate: 0.0005859646 +2025-03-27 08:12:27,661 Train Loss: 0.0003254, Val Loss: 0.0004080 +2025-03-27 08:12:27,662 Epoch 1512/2000 +2025-03-27 08:15:02,227 Current Learning Rate: 0.0005936907 +2025-03-27 08:15:02,227 Train Loss: 0.0003260, Val Loss: 0.0004081 +2025-03-27 08:15:02,228 Epoch 1513/2000 +2025-03-27 08:17:36,803 Current Learning Rate: 0.0006013936 +2025-03-27 08:17:36,803 Train Loss: 0.0003270, Val Loss: 0.0004092 +2025-03-27 08:17:36,803 Epoch 1514/2000 +2025-03-27 08:20:11,869 Current Learning Rate: 0.0006090716 +2025-03-27 08:20:11,869 Train Loss: 0.0003272, Val Loss: 0.0004101 +2025-03-27 08:20:11,870 Epoch 1515/2000 +2025-03-27 08:22:47,038 Current Learning Rate: 0.0006167227 +2025-03-27 08:22:47,039 Train Loss: 0.0003274, Val Loss: 0.0004090 +2025-03-27 08:22:47,039 Epoch 1516/2000 +2025-03-27 08:25:22,190 Current Learning Rate: 0.0006243449 +2025-03-27 08:25:22,190 Train Loss: 0.0003265, Val Loss: 0.0004084 +2025-03-27 08:25:22,190 Epoch 1517/2000 +2025-03-27 08:27:57,508 Current Learning Rate: 0.0006319365 +2025-03-27 08:27:57,509 Train Loss: 0.0003245, Val Loss: 0.0004088 +2025-03-27 08:27:57,509 Epoch 1518/2000 +2025-03-27 08:30:33,006 Current Learning Rate: 0.0006394956 +2025-03-27 08:30:33,006 Train Loss: 0.0003252, Val Loss: 0.0004094 +2025-03-27 08:30:33,007 Epoch 1519/2000 +2025-03-27 08:33:06,595 Current Learning Rate: 0.0006470202 +2025-03-27 08:33:06,596 Train Loss: 0.0003268, Val Loss: 0.0004108 +2025-03-27 08:33:06,596 Epoch 1520/2000 +2025-03-27 08:35:40,013 Current Learning Rate: 0.0006545085 +2025-03-27 08:35:40,013 Train Loss: 0.0003281, Val Loss: 0.0004131 +2025-03-27 08:35:40,013 Epoch 1521/2000 +2025-03-27 08:38:13,388 Current Learning Rate: 0.0006619587 +2025-03-27 08:38:13,389 Train Loss: 0.0003294, Val Loss: 0.0004133 +2025-03-27 08:38:13,389 Epoch 1522/2000 +2025-03-27 08:40:48,335 Current Learning Rate: 0.0006693690 +2025-03-27 08:40:48,336 Train Loss: 0.0003297, Val Loss: 0.0004149 +2025-03-27 08:40:48,337 Epoch 1523/2000 +2025-03-27 08:43:21,382 Current Learning Rate: 0.0006767374 +2025-03-27 08:43:21,382 Train Loss: 0.0003307, Val Loss: 0.0004141 +2025-03-27 08:43:21,383 Epoch 1524/2000 +2025-03-27 08:45:55,957 Current Learning Rate: 0.0006840623 +2025-03-27 08:45:55,957 Train Loss: 0.0003309, Val Loss: 0.0004157 +2025-03-27 08:45:55,958 Epoch 1525/2000 +2025-03-27 08:48:30,426 Current Learning Rate: 0.0006913417 +2025-03-27 08:48:30,427 Train Loss: 0.0003322, Val Loss: 0.0004144 +2025-03-27 08:48:30,427 Epoch 1526/2000 +2025-03-27 08:51:04,936 Current Learning Rate: 0.0006985739 +2025-03-27 08:51:04,936 Train Loss: 0.0003328, Val Loss: 0.0004209 +2025-03-27 08:51:04,936 Epoch 1527/2000 +2025-03-27 08:53:39,994 Current Learning Rate: 0.0007057572 +2025-03-27 08:53:39,994 Train Loss: 0.0003322, Val Loss: 0.0004183 +2025-03-27 08:53:39,995 Epoch 1528/2000 +2025-03-27 08:56:14,678 Current Learning Rate: 0.0007128896 +2025-03-27 08:56:14,679 Train Loss: 0.0003295, Val Loss: 0.0004135 +2025-03-27 08:56:14,679 Epoch 1529/2000 +2025-03-27 08:58:48,702 Current Learning Rate: 0.0007199696 +2025-03-27 08:58:48,703 Train Loss: 0.0003286, Val Loss: 0.0004125 +2025-03-27 08:58:48,703 Epoch 1530/2000 +2025-03-27 09:01:22,075 Current Learning Rate: 0.0007269952 +2025-03-27 09:01:22,075 Train Loss: 0.0003303, Val Loss: 0.0004135 +2025-03-27 09:01:22,076 Epoch 1531/2000 +2025-03-27 09:03:57,337 Current Learning Rate: 0.0007339649 +2025-03-27 09:03:57,338 Train Loss: 0.0003316, Val Loss: 0.0004162 +2025-03-27 09:03:57,338 Epoch 1532/2000 +2025-03-27 09:06:32,180 Current Learning Rate: 0.0007408768 +2025-03-27 09:06:32,181 Train Loss: 0.0003329, Val Loss: 0.0004185 +2025-03-27 09:06:32,181 Epoch 1533/2000 +2025-03-27 09:09:06,598 Current Learning Rate: 0.0007477293 +2025-03-27 09:09:06,599 Train Loss: 0.0003344, Val Loss: 0.0004207 +2025-03-27 09:09:06,599 Epoch 1534/2000 +2025-03-27 09:11:41,357 Current Learning Rate: 0.0007545207 +2025-03-27 09:11:41,357 Train Loss: 0.0003347, Val Loss: 0.0004220 +2025-03-27 09:11:41,357 Epoch 1535/2000 +2025-03-27 09:14:16,133 Current Learning Rate: 0.0007612493 +2025-03-27 09:14:16,134 Train Loss: 0.0003352, Val Loss: 0.0004207 +2025-03-27 09:14:16,134 Epoch 1536/2000 +2025-03-27 09:16:51,605 Current Learning Rate: 0.0007679134 +2025-03-27 09:16:51,606 Train Loss: 0.0003350, Val Loss: 0.0004187 +2025-03-27 09:16:51,606 Epoch 1537/2000 +2025-03-27 09:19:24,379 Current Learning Rate: 0.0007745114 +2025-03-27 09:19:24,379 Train Loss: 0.0003327, Val Loss: 0.0004156 +2025-03-27 09:19:24,379 Epoch 1538/2000 +2025-03-27 09:21:59,040 Current Learning Rate: 0.0007810417 +2025-03-27 09:21:59,041 Train Loss: 0.0003322, Val Loss: 0.0004156 +2025-03-27 09:21:59,041 Epoch 1539/2000 +2025-03-27 09:24:33,387 Current Learning Rate: 0.0007875026 +2025-03-27 09:24:33,387 Train Loss: 0.0003344, Val Loss: 0.0004168 +2025-03-27 09:24:33,388 Epoch 1540/2000 +2025-03-27 09:27:08,032 Current Learning Rate: 0.0007938926 +2025-03-27 09:27:08,033 Train Loss: 0.0003363, Val Loss: 0.0004231 +2025-03-27 09:27:08,033 Epoch 1541/2000 +2025-03-27 09:29:43,673 Current Learning Rate: 0.0008002101 +2025-03-27 09:29:43,674 Train Loss: 0.0003370, Val Loss: 0.0004220 +2025-03-27 09:29:43,674 Epoch 1542/2000 +2025-03-27 09:32:16,733 Current Learning Rate: 0.0008064535 +2025-03-27 09:32:16,733 Train Loss: 0.0003380, Val Loss: 0.0004269 +2025-03-27 09:32:16,733 Epoch 1543/2000 +2025-03-27 09:34:50,848 Current Learning Rate: 0.0008126213 +2025-03-27 09:34:50,849 Train Loss: 0.0003392, Val Loss: 0.0004240 +2025-03-27 09:34:50,849 Epoch 1544/2000 +2025-03-27 09:37:23,827 Current Learning Rate: 0.0008187120 +2025-03-27 09:37:23,828 Train Loss: 0.0003386, Val Loss: 0.0004227 +2025-03-27 09:37:23,828 Epoch 1545/2000 +2025-03-27 09:39:59,050 Current Learning Rate: 0.0008247240 +2025-03-27 09:39:59,051 Train Loss: 0.0003406, Val Loss: 0.0004236 +2025-03-27 09:39:59,051 Epoch 1546/2000 +2025-03-27 09:42:32,031 Current Learning Rate: 0.0008306559 +2025-03-27 09:42:32,032 Train Loss: 0.0003432, Val Loss: 0.0004215 +2025-03-27 09:42:32,032 Epoch 1547/2000 +2025-03-27 09:45:06,228 Current Learning Rate: 0.0008365063 +2025-03-27 09:45:06,229 Train Loss: 0.0003412, Val Loss: 0.0004207 +2025-03-27 09:45:06,229 Epoch 1548/2000 +2025-03-27 09:47:40,443 Current Learning Rate: 0.0008422736 +2025-03-27 09:47:40,444 Train Loss: 0.0003353, Val Loss: 0.0004183 +2025-03-27 09:47:40,444 Epoch 1549/2000 +2025-03-27 09:50:14,733 Current Learning Rate: 0.0008479564 +2025-03-27 09:50:14,734 Train Loss: 0.0003353, Val Loss: 0.0004189 +2025-03-27 09:50:14,734 Epoch 1550/2000 +2025-03-27 09:52:47,797 Current Learning Rate: 0.0008535534 +2025-03-27 09:52:47,797 Train Loss: 0.0003381, Val Loss: 0.0004212 +2025-03-27 09:52:47,797 Epoch 1551/2000 +2025-03-27 09:55:20,547 Current Learning Rate: 0.0008590631 +2025-03-27 09:55:20,548 Train Loss: 0.0003396, Val Loss: 0.0004219 +2025-03-27 09:55:20,548 Epoch 1552/2000 +2025-03-27 09:57:53,518 Current Learning Rate: 0.0008644843 +2025-03-27 09:57:53,518 Train Loss: 0.0003403, Val Loss: 0.0004295 +2025-03-27 09:57:53,518 Epoch 1553/2000 +2025-03-27 10:00:26,867 Current Learning Rate: 0.0008698155 +2025-03-27 10:00:26,867 Train Loss: 0.0003402, Val Loss: 0.0004398 +2025-03-27 10:00:26,867 Epoch 1554/2000 +2025-03-27 10:03:00,257 Current Learning Rate: 0.0008750555 +2025-03-27 10:03:00,257 Train Loss: 0.0003430, Val Loss: 0.0004328 +2025-03-27 10:03:00,257 Epoch 1555/2000 +2025-03-27 10:05:35,563 Current Learning Rate: 0.0008802030 +2025-03-27 10:05:35,564 Train Loss: 0.0003421, Val Loss: 0.0004257 +2025-03-27 10:05:35,564 Epoch 1556/2000 +2025-03-27 10:08:10,701 Current Learning Rate: 0.0008852566 +2025-03-27 10:08:10,701 Train Loss: 0.0003379, Val Loss: 0.0004238 +2025-03-27 10:08:10,701 Epoch 1557/2000 +2025-03-27 10:10:42,997 Current Learning Rate: 0.0008902152 +2025-03-27 10:10:42,998 Train Loss: 0.0003388, Val Loss: 0.0004226 +2025-03-27 10:10:42,998 Epoch 1558/2000 +2025-03-27 10:13:15,895 Current Learning Rate: 0.0008950775 +2025-03-27 10:13:15,896 Train Loss: 0.0003414, Val Loss: 0.0004264 +2025-03-27 10:13:15,896 Epoch 1559/2000 +2025-03-27 10:15:51,116 Current Learning Rate: 0.0008998423 +2025-03-27 10:15:51,117 Train Loss: 0.0003424, Val Loss: 0.0004251 +2025-03-27 10:15:51,117 Epoch 1560/2000 +2025-03-27 10:18:26,197 Current Learning Rate: 0.0009045085 +2025-03-27 10:18:26,197 Train Loss: 0.0003436, Val Loss: 0.0004262 +2025-03-27 10:18:26,198 Epoch 1561/2000 +2025-03-27 10:21:00,247 Current Learning Rate: 0.0009090749 +2025-03-27 10:21:00,247 Train Loss: 0.0003442, Val Loss: 0.0004358 +2025-03-27 10:21:00,247 Epoch 1562/2000 +2025-03-27 10:23:34,857 Current Learning Rate: 0.0009135403 +2025-03-27 10:23:34,858 Train Loss: 0.0003434, Val Loss: 0.0004355 +2025-03-27 10:23:34,858 Epoch 1563/2000 +2025-03-27 10:26:09,951 Current Learning Rate: 0.0009179037 +2025-03-27 10:26:09,951 Train Loss: 0.0003434, Val Loss: 0.0004294 +2025-03-27 10:26:09,952 Epoch 1564/2000 +2025-03-27 10:28:45,327 Current Learning Rate: 0.0009221640 +2025-03-27 10:28:45,328 Train Loss: 0.0003477, Val Loss: 0.0004303 +2025-03-27 10:28:45,328 Epoch 1565/2000 +2025-03-27 10:31:18,565 Current Learning Rate: 0.0009263201 +2025-03-27 10:31:18,566 Train Loss: 0.0003479, Val Loss: 0.0004339 +2025-03-27 10:31:18,566 Epoch 1566/2000 +2025-03-27 10:33:52,814 Current Learning Rate: 0.0009303710 +2025-03-27 10:33:52,815 Train Loss: 0.0003489, Val Loss: 0.0004324 +2025-03-27 10:33:52,815 Epoch 1567/2000 +2025-03-27 10:36:26,787 Current Learning Rate: 0.0009343158 +2025-03-27 10:36:26,787 Train Loss: 0.0003489, Val Loss: 0.0004275 +2025-03-27 10:36:26,787 Epoch 1568/2000 +2025-03-27 10:39:00,456 Current Learning Rate: 0.0009381533 +2025-03-27 10:39:00,456 Train Loss: 0.0003427, Val Loss: 0.0004236 +2025-03-27 10:39:00,456 Epoch 1569/2000 +2025-03-27 10:41:35,505 Current Learning Rate: 0.0009418828 +2025-03-27 10:41:35,505 Train Loss: 0.0003395, Val Loss: 0.0004248 +2025-03-27 10:41:35,506 Epoch 1570/2000 +2025-03-27 10:44:09,667 Current Learning Rate: 0.0009455033 +2025-03-27 10:44:09,668 Train Loss: 0.0003425, Val Loss: 0.0004247 +2025-03-27 10:44:09,668 Epoch 1571/2000 +2025-03-27 10:46:43,131 Current Learning Rate: 0.0009490138 +2025-03-27 10:46:43,131 Train Loss: 0.0003444, Val Loss: 0.0004255 +2025-03-27 10:46:43,132 Epoch 1572/2000 +2025-03-27 10:49:16,943 Current Learning Rate: 0.0009524135 +2025-03-27 10:49:16,943 Train Loss: 0.0003454, Val Loss: 0.0004281 +2025-03-27 10:49:16,944 Epoch 1573/2000 +2025-03-27 10:51:51,272 Current Learning Rate: 0.0009557016 +2025-03-27 10:51:51,272 Train Loss: 0.0003465, Val Loss: 0.0004276 +2025-03-27 10:51:51,272 Epoch 1574/2000 +2025-03-27 10:54:25,739 Current Learning Rate: 0.0009588773 +2025-03-27 10:54:25,740 Train Loss: 0.0003484, Val Loss: 0.0004307 +2025-03-27 10:54:25,740 Epoch 1575/2000 +2025-03-27 10:56:59,663 Current Learning Rate: 0.0009619398 +2025-03-27 10:56:59,664 Train Loss: 0.0003451, Val Loss: 0.0004256 +2025-03-27 10:56:59,664 Epoch 1576/2000 +2025-03-27 10:59:34,407 Current Learning Rate: 0.0009648882 +2025-03-27 10:59:34,408 Train Loss: 0.0003414, Val Loss: 0.0004309 +2025-03-27 10:59:34,408 Epoch 1577/2000 +2025-03-27 11:02:09,334 Current Learning Rate: 0.0009677220 +2025-03-27 11:02:09,334 Train Loss: 0.0003449, Val Loss: 0.0004283 +2025-03-27 11:02:09,335 Epoch 1578/2000 +2025-03-27 11:04:43,285 Current Learning Rate: 0.0009704404 +2025-03-27 11:04:43,285 Train Loss: 0.0003458, Val Loss: 0.0004277 +2025-03-27 11:04:43,286 Epoch 1579/2000 +2025-03-27 11:07:17,336 Current Learning Rate: 0.0009730427 +2025-03-27 11:07:17,337 Train Loss: 0.0003467, Val Loss: 0.0004287 +2025-03-27 11:07:17,337 Epoch 1580/2000 +2025-03-27 11:09:50,517 Current Learning Rate: 0.0009755283 +2025-03-27 11:09:50,517 Train Loss: 0.0003468, Val Loss: 0.0004318 +2025-03-27 11:09:50,518 Epoch 1581/2000 +2025-03-27 11:12:23,972 Current Learning Rate: 0.0009778965 +2025-03-27 11:12:23,972 Train Loss: 0.0003500, Val Loss: 0.0004438 +2025-03-27 11:12:23,973 Epoch 1582/2000 +2025-03-27 11:14:57,276 Current Learning Rate: 0.0009801468 +2025-03-27 11:14:57,277 Train Loss: 0.0003502, Val Loss: 0.0004390 +2025-03-27 11:14:57,277 Epoch 1583/2000 +2025-03-27 11:17:29,963 Current Learning Rate: 0.0009822787 +2025-03-27 11:17:29,963 Train Loss: 0.0003473, Val Loss: 0.0004336 +2025-03-27 11:17:29,964 Epoch 1584/2000 +2025-03-27 11:20:04,464 Current Learning Rate: 0.0009842916 +2025-03-27 11:20:04,465 Train Loss: 0.0003427, Val Loss: 0.0004321 +2025-03-27 11:20:04,465 Epoch 1585/2000 +2025-03-27 11:22:38,826 Current Learning Rate: 0.0009861850 +2025-03-27 11:22:38,827 Train Loss: 0.0003441, Val Loss: 0.0004283 +2025-03-27 11:22:38,827 Epoch 1586/2000 +2025-03-27 11:25:13,507 Current Learning Rate: 0.0009879584 +2025-03-27 11:25:13,507 Train Loss: 0.0003457, Val Loss: 0.0004304 +2025-03-27 11:25:13,508 Epoch 1587/2000 +2025-03-27 11:27:48,082 Current Learning Rate: 0.0009896114 +2025-03-27 11:27:48,083 Train Loss: 0.0003484, Val Loss: 0.0004297 +2025-03-27 11:27:48,083 Epoch 1588/2000 +2025-03-27 11:30:23,250 Current Learning Rate: 0.0009911436 +2025-03-27 11:30:23,250 Train Loss: 0.0003484, Val Loss: 0.0004324 +2025-03-27 11:30:23,251 Epoch 1589/2000 +2025-03-27 11:32:58,338 Current Learning Rate: 0.0009925547 +2025-03-27 11:32:58,339 Train Loss: 0.0003495, Val Loss: 0.0004353 +2025-03-27 11:32:58,339 Epoch 1590/2000 +2025-03-27 11:35:32,765 Current Learning Rate: 0.0009938442 +2025-03-27 11:35:32,766 Train Loss: 0.0003519, Val Loss: 0.0004377 +2025-03-27 11:35:32,766 Epoch 1591/2000 +2025-03-27 11:38:06,750 Current Learning Rate: 0.0009950118 +2025-03-27 11:38:06,751 Train Loss: 0.0003540, Val Loss: 0.0004380 +2025-03-27 11:38:06,751 Epoch 1592/2000 +2025-03-27 11:40:41,604 Current Learning Rate: 0.0009960574 +2025-03-27 11:40:41,605 Train Loss: 0.0003542, Val Loss: 0.0004316 +2025-03-27 11:40:41,605 Epoch 1593/2000 +2025-03-27 11:43:16,602 Current Learning Rate: 0.0009969805 +2025-03-27 11:43:16,602 Train Loss: 0.0003460, Val Loss: 0.0004264 +2025-03-27 11:43:16,603 Epoch 1594/2000 +2025-03-27 11:45:51,355 Current Learning Rate: 0.0009977810 +2025-03-27 11:45:51,355 Train Loss: 0.0003420, Val Loss: 0.0004283 +2025-03-27 11:45:51,355 Epoch 1595/2000 +2025-03-27 11:48:24,439 Current Learning Rate: 0.0009984587 +2025-03-27 11:48:24,440 Train Loss: 0.0003440, Val Loss: 0.0004278 +2025-03-27 11:48:24,440 Epoch 1596/2000 +2025-03-27 11:50:57,859 Current Learning Rate: 0.0009990134 +2025-03-27 11:50:57,860 Train Loss: 0.0003460, Val Loss: 0.0004270 +2025-03-27 11:50:57,860 Epoch 1597/2000 +2025-03-27 11:53:32,711 Current Learning Rate: 0.0009994449 +2025-03-27 11:53:32,712 Train Loss: 0.0003477, Val Loss: 0.0004350 +2025-03-27 11:53:32,712 Epoch 1598/2000 +2025-03-27 11:56:07,945 Current Learning Rate: 0.0009997533 +2025-03-27 11:56:07,945 Train Loss: 0.0003487, Val Loss: 0.0004349 +2025-03-27 11:56:07,945 Epoch 1599/2000 +2025-03-27 11:58:42,097 Current Learning Rate: 0.0009999383 +2025-03-27 11:58:42,097 Train Loss: 0.0003480, Val Loss: 0.0004442 +2025-03-27 11:58:42,097 Epoch 1600/2000 +2025-03-27 12:01:17,166 Current Learning Rate: 0.0010000000 +2025-03-27 12:01:17,167 Train Loss: 0.0003497, Val Loss: 0.0004277 +2025-03-27 12:01:17,167 Epoch 1601/2000 +2025-03-27 12:03:51,269 Current Learning Rate: 0.0009999383 +2025-03-27 12:03:51,270 Train Loss: 0.0003478, Val Loss: 0.0004293 +2025-03-27 12:03:51,270 Epoch 1602/2000 +2025-03-27 12:06:24,632 Current Learning Rate: 0.0009997533 +2025-03-27 12:06:24,633 Train Loss: 0.0003431, Val Loss: 0.0004262 +2025-03-27 12:06:24,634 Epoch 1603/2000 +2025-03-27 12:09:00,120 Current Learning Rate: 0.0009994449 +2025-03-27 12:09:00,121 Train Loss: 0.0003433, Val Loss: 0.0004275 +2025-03-27 12:09:00,121 Epoch 1604/2000 +2025-03-27 12:11:34,318 Current Learning Rate: 0.0009990134 +2025-03-27 12:11:34,319 Train Loss: 0.0003457, Val Loss: 0.0004271 +2025-03-27 12:11:34,319 Epoch 1605/2000 +2025-03-27 12:14:08,973 Current Learning Rate: 0.0009984587 +2025-03-27 12:14:08,974 Train Loss: 0.0003473, Val Loss: 0.0004315 +2025-03-27 12:14:08,974 Epoch 1606/2000 +2025-03-27 12:16:44,201 Current Learning Rate: 0.0009977810 +2025-03-27 12:16:44,202 Train Loss: 0.0003485, Val Loss: 0.0004319 +2025-03-27 12:16:44,202 Epoch 1607/2000 +2025-03-27 12:19:18,464 Current Learning Rate: 0.0009969805 +2025-03-27 12:19:18,464 Train Loss: 0.0003488, Val Loss: 0.0004337 +2025-03-27 12:19:18,464 Epoch 1608/2000 +2025-03-27 12:21:52,572 Current Learning Rate: 0.0009960574 +2025-03-27 12:21:52,573 Train Loss: 0.0003498, Val Loss: 0.0004372 +2025-03-27 12:21:52,573 Epoch 1609/2000 +2025-03-27 12:24:26,664 Current Learning Rate: 0.0009950118 +2025-03-27 12:24:26,665 Train Loss: 0.0003527, Val Loss: 0.0004443 +2025-03-27 12:24:26,665 Epoch 1610/2000 +2025-03-27 12:27:00,596 Current Learning Rate: 0.0009938442 +2025-03-27 12:27:00,597 Train Loss: 0.0003544, Val Loss: 0.0004333 +2025-03-27 12:27:00,597 Epoch 1611/2000 +2025-03-27 12:29:34,939 Current Learning Rate: 0.0009925547 +2025-03-27 12:29:34,939 Train Loss: 0.0003530, Val Loss: 0.0004315 +2025-03-27 12:29:34,939 Epoch 1612/2000 +2025-03-27 12:32:09,225 Current Learning Rate: 0.0009911436 +2025-03-27 12:32:09,225 Train Loss: 0.0003473, Val Loss: 0.0004270 +2025-03-27 12:32:09,225 Epoch 1613/2000 +2025-03-27 12:34:43,937 Current Learning Rate: 0.0009896114 +2025-03-27 12:34:43,937 Train Loss: 0.0003412, Val Loss: 0.0004259 +2025-03-27 12:34:43,938 Epoch 1614/2000 +2025-03-27 12:37:18,078 Current Learning Rate: 0.0009879584 +2025-03-27 12:37:18,078 Train Loss: 0.0003426, Val Loss: 0.0004250 +2025-03-27 12:37:18,078 Epoch 1615/2000 +2025-03-27 12:39:52,656 Current Learning Rate: 0.0009861850 +2025-03-27 12:39:52,657 Train Loss: 0.0003438, Val Loss: 0.0004258 +2025-03-27 12:39:52,657 Epoch 1616/2000 +2025-03-27 12:42:27,084 Current Learning Rate: 0.0009842916 +2025-03-27 12:42:27,085 Train Loss: 0.0003447, Val Loss: 0.0004291 +2025-03-27 12:42:27,085 Epoch 1617/2000 +2025-03-27 12:45:02,148 Current Learning Rate: 0.0009822787 +2025-03-27 12:45:02,149 Train Loss: 0.0003458, Val Loss: 0.0004415 +2025-03-27 12:45:02,149 Epoch 1618/2000 +2025-03-27 12:47:36,484 Current Learning Rate: 0.0009801468 +2025-03-27 12:47:36,484 Train Loss: 0.0003469, Val Loss: 0.0004312 +2025-03-27 12:47:36,484 Epoch 1619/2000 +2025-03-27 12:50:12,182 Current Learning Rate: 0.0009778965 +2025-03-27 12:50:12,182 Train Loss: 0.0003482, Val Loss: 0.0004332 +2025-03-27 12:50:12,183 Epoch 1620/2000 +2025-03-27 12:52:45,755 Current Learning Rate: 0.0009755283 +2025-03-27 12:52:45,755 Train Loss: 0.0003486, Val Loss: 0.0004374 +2025-03-27 12:52:45,756 Epoch 1621/2000 +2025-03-27 12:55:20,635 Current Learning Rate: 0.0009730427 +2025-03-27 12:55:20,635 Train Loss: 0.0003506, Val Loss: 0.0004315 +2025-03-27 12:55:20,635 Epoch 1622/2000 +2025-03-27 12:57:55,387 Current Learning Rate: 0.0009704404 +2025-03-27 12:57:55,387 Train Loss: 0.0003532, Val Loss: 0.0004317 +2025-03-27 12:57:55,388 Epoch 1623/2000 +2025-03-27 13:00:30,147 Current Learning Rate: 0.0009677220 +2025-03-27 13:00:30,148 Train Loss: 0.0003533, Val Loss: 0.0004321 +2025-03-27 13:00:30,148 Epoch 1624/2000 +2025-03-27 13:03:03,937 Current Learning Rate: 0.0009648882 +2025-03-27 13:03:03,937 Train Loss: 0.0003492, Val Loss: 0.0004290 +2025-03-27 13:03:03,937 Epoch 1625/2000 +2025-03-27 13:05:37,102 Current Learning Rate: 0.0009619398 +2025-03-27 13:05:37,103 Train Loss: 0.0003415, Val Loss: 0.0004246 +2025-03-27 13:05:37,103 Epoch 1626/2000 +2025-03-27 13:08:12,309 Current Learning Rate: 0.0009588773 +2025-03-27 13:08:12,310 Train Loss: 0.0003393, Val Loss: 0.0004242 +2025-03-27 13:08:12,310 Epoch 1627/2000 +2025-03-27 13:10:45,962 Current Learning Rate: 0.0009557016 +2025-03-27 13:10:45,962 Train Loss: 0.0003407, Val Loss: 0.0004269 +2025-03-27 13:10:45,962 Epoch 1628/2000 +2025-03-27 13:13:20,284 Current Learning Rate: 0.0009524135 +2025-03-27 13:13:20,285 Train Loss: 0.0003421, Val Loss: 0.0004281 +2025-03-27 13:13:20,285 Epoch 1629/2000 +2025-03-27 13:15:55,126 Current Learning Rate: 0.0009490138 +2025-03-27 13:15:55,126 Train Loss: 0.0003424, Val Loss: 0.0004284 +2025-03-27 13:15:55,126 Epoch 1630/2000 +2025-03-27 13:18:29,718 Current Learning Rate: 0.0009455033 +2025-03-27 13:18:29,719 Train Loss: 0.0003432, Val Loss: 0.0004291 +2025-03-27 13:18:29,719 Epoch 1631/2000 +2025-03-27 13:21:04,872 Current Learning Rate: 0.0009418828 +2025-03-27 13:21:04,872 Train Loss: 0.0003439, Val Loss: 0.0004300 +2025-03-27 13:21:04,872 Epoch 1632/2000 +2025-03-27 13:23:39,498 Current Learning Rate: 0.0009381533 +2025-03-27 13:23:39,499 Train Loss: 0.0003451, Val Loss: 0.0004323 +2025-03-27 13:23:39,499 Epoch 1633/2000 +2025-03-27 13:26:12,562 Current Learning Rate: 0.0009343158 +2025-03-27 13:26:12,563 Train Loss: 0.0003462, Val Loss: 0.0004318 +2025-03-27 13:26:12,563 Epoch 1634/2000 +2025-03-27 13:28:45,581 Current Learning Rate: 0.0009303710 +2025-03-27 13:28:45,581 Train Loss: 0.0003455, Val Loss: 0.0004301 +2025-03-27 13:28:45,582 Epoch 1635/2000 +2025-03-27 13:31:18,756 Current Learning Rate: 0.0009263201 +2025-03-27 13:31:18,757 Train Loss: 0.0003462, Val Loss: 0.0004313 +2025-03-27 13:31:18,757 Epoch 1636/2000 +2025-03-27 13:33:53,222 Current Learning Rate: 0.0009221640 +2025-03-27 13:33:53,223 Train Loss: 0.0003490, Val Loss: 0.0004296 +2025-03-27 13:33:53,223 Epoch 1637/2000 +2025-03-27 13:36:28,525 Current Learning Rate: 0.0009179037 +2025-03-27 13:36:28,526 Train Loss: 0.0003530, Val Loss: 0.0004331 +2025-03-27 13:36:28,526 Epoch 1638/2000 +2025-03-27 13:39:03,002 Current Learning Rate: 0.0009135403 +2025-03-27 13:39:03,002 Train Loss: 0.0003528, Val Loss: 0.0004327 +2025-03-27 13:39:03,002 Epoch 1639/2000 +2025-03-27 13:41:37,717 Current Learning Rate: 0.0009090749 +2025-03-27 13:41:37,718 Train Loss: 0.0003476, Val Loss: 0.0004254 +2025-03-27 13:41:37,718 Epoch 1640/2000 +2025-03-27 13:44:12,950 Current Learning Rate: 0.0009045085 +2025-03-27 13:44:12,951 Train Loss: 0.0003390, Val Loss: 0.0004212 +2025-03-27 13:44:12,951 Epoch 1641/2000 +2025-03-27 13:46:48,140 Current Learning Rate: 0.0008998423 +2025-03-27 13:46:48,140 Train Loss: 0.0003354, Val Loss: 0.0004198 +2025-03-27 13:46:48,141 Epoch 1642/2000 +2025-03-27 13:49:23,065 Current Learning Rate: 0.0008950775 +2025-03-27 13:49:23,065 Train Loss: 0.0003358, Val Loss: 0.0004219 +2025-03-27 13:49:23,066 Epoch 1643/2000 +2025-03-27 13:51:55,926 Current Learning Rate: 0.0008902152 +2025-03-27 13:51:55,926 Train Loss: 0.0003361, Val Loss: 0.0004228 +2025-03-27 13:51:55,926 Epoch 1644/2000 +2025-03-27 13:54:28,993 Current Learning Rate: 0.0008852566 +2025-03-27 13:54:28,993 Train Loss: 0.0003375, Val Loss: 0.0004220 +2025-03-27 13:54:28,994 Epoch 1645/2000 +2025-03-27 13:57:01,929 Current Learning Rate: 0.0008802030 +2025-03-27 13:57:01,931 Train Loss: 0.0003385, Val Loss: 0.0004248 +2025-03-27 13:57:01,931 Epoch 1646/2000 +2025-03-27 13:59:36,475 Current Learning Rate: 0.0008750555 +2025-03-27 13:59:36,475 Train Loss: 0.0003390, Val Loss: 0.0004230 +2025-03-27 13:59:36,475 Epoch 1647/2000 +2025-03-27 14:02:10,699 Current Learning Rate: 0.0008698155 +2025-03-27 14:02:10,700 Train Loss: 0.0003392, Val Loss: 0.0004261 +2025-03-27 14:02:10,700 Epoch 1648/2000 +2025-03-27 14:04:44,545 Current Learning Rate: 0.0008644843 +2025-03-27 14:04:44,546 Train Loss: 0.0003399, Val Loss: 0.0004226 +2025-03-27 14:04:44,546 Epoch 1649/2000 +2025-03-27 14:07:19,406 Current Learning Rate: 0.0008590631 +2025-03-27 14:07:19,407 Train Loss: 0.0003401, Val Loss: 0.0004243 +2025-03-27 14:07:19,407 Epoch 1650/2000 +2025-03-27 14:09:53,895 Current Learning Rate: 0.0008535534 +2025-03-27 14:09:53,896 Train Loss: 0.0003411, Val Loss: 0.0004225 +2025-03-27 14:09:53,896 Epoch 1651/2000 +2025-03-27 14:12:28,374 Current Learning Rate: 0.0008479564 +2025-03-27 14:12:28,374 Train Loss: 0.0003431, Val Loss: 0.0004277 +2025-03-27 14:12:28,375 Epoch 1652/2000 +2025-03-27 14:15:02,868 Current Learning Rate: 0.0008422736 +2025-03-27 14:15:02,868 Train Loss: 0.0003439, Val Loss: 0.0004262 +2025-03-27 14:15:02,868 Epoch 1653/2000 +2025-03-27 14:17:36,972 Current Learning Rate: 0.0008365063 +2025-03-27 14:17:36,973 Train Loss: 0.0003460, Val Loss: 0.0004358 +2025-03-27 14:17:36,973 Epoch 1654/2000 +2025-03-27 14:20:11,776 Current Learning Rate: 0.0008306559 +2025-03-27 14:20:11,777 Train Loss: 0.0003475, Val Loss: 0.0004333 +2025-03-27 14:20:11,777 Epoch 1655/2000 +2025-03-27 14:22:45,812 Current Learning Rate: 0.0008247240 +2025-03-27 14:22:45,812 Train Loss: 0.0003449, Val Loss: 0.0004285 +2025-03-27 14:22:45,813 Epoch 1656/2000 +2025-03-27 14:25:21,307 Current Learning Rate: 0.0008187120 +2025-03-27 14:25:21,307 Train Loss: 0.0003386, Val Loss: 0.0004204 +2025-03-27 14:25:21,307 Epoch 1657/2000 +2025-03-27 14:27:55,727 Current Learning Rate: 0.0008126213 +2025-03-27 14:27:55,728 Train Loss: 0.0003324, Val Loss: 0.0004168 +2025-03-27 14:27:55,728 Epoch 1658/2000 +2025-03-27 14:30:29,458 Current Learning Rate: 0.0008064535 +2025-03-27 14:30:29,459 Train Loss: 0.0003304, Val Loss: 0.0004157 +2025-03-27 14:30:29,459 Epoch 1659/2000 +2025-03-27 14:33:03,648 Current Learning Rate: 0.0008002101 +2025-03-27 14:33:03,648 Train Loss: 0.0003307, Val Loss: 0.0004166 +2025-03-27 14:33:03,648 Epoch 1660/2000 +2025-03-27 14:35:38,728 Current Learning Rate: 0.0007938926 +2025-03-27 14:35:38,728 Train Loss: 0.0003312, Val Loss: 0.0004185 +2025-03-27 14:35:38,728 Epoch 1661/2000 +2025-03-27 14:38:12,758 Current Learning Rate: 0.0007875026 +2025-03-27 14:38:12,758 Train Loss: 0.0003321, Val Loss: 0.0004191 +2025-03-27 14:38:12,759 Epoch 1662/2000 +2025-03-27 14:40:46,074 Current Learning Rate: 0.0007810417 +2025-03-27 14:40:46,075 Train Loss: 0.0003331, Val Loss: 0.0004182 +2025-03-27 14:40:46,075 Epoch 1663/2000 +2025-03-27 14:43:19,078 Current Learning Rate: 0.0007745114 +2025-03-27 14:43:19,079 Train Loss: 0.0003334, Val Loss: 0.0004187 +2025-03-27 14:43:19,079 Epoch 1664/2000 +2025-03-27 14:45:53,645 Current Learning Rate: 0.0007679134 +2025-03-27 14:45:53,646 Train Loss: 0.0003336, Val Loss: 0.0004186 +2025-03-27 14:45:53,646 Epoch 1665/2000 +2025-03-27 14:48:26,154 Current Learning Rate: 0.0007612493 +2025-03-27 14:48:26,155 Train Loss: 0.0003345, Val Loss: 0.0004195 +2025-03-27 14:48:26,155 Epoch 1666/2000 +2025-03-27 14:50:59,508 Current Learning Rate: 0.0007545207 +2025-03-27 14:50:59,509 Train Loss: 0.0003355, Val Loss: 0.0004207 +2025-03-27 14:50:59,509 Epoch 1667/2000 +2025-03-27 14:53:32,890 Current Learning Rate: 0.0007477293 +2025-03-27 14:53:32,891 Train Loss: 0.0003368, Val Loss: 0.0004229 +2025-03-27 14:53:32,891 Epoch 1668/2000 +2025-03-27 14:56:08,543 Current Learning Rate: 0.0007408768 +2025-03-27 14:56:08,543 Train Loss: 0.0003382, Val Loss: 0.0004223 +2025-03-27 14:56:08,543 Epoch 1669/2000 +2025-03-27 14:58:42,019 Current Learning Rate: 0.0007339649 +2025-03-27 14:58:42,020 Train Loss: 0.0003394, Val Loss: 0.0004294 +2025-03-27 14:58:42,020 Epoch 1670/2000 +2025-03-27 15:01:15,930 Current Learning Rate: 0.0007269952 +2025-03-27 15:01:15,931 Train Loss: 0.0003409, Val Loss: 0.0004282 +2025-03-27 15:01:15,931 Epoch 1671/2000 +2025-03-27 15:03:50,678 Current Learning Rate: 0.0007199696 +2025-03-27 15:03:50,679 Train Loss: 0.0003421, Val Loss: 0.0004199 +2025-03-27 15:03:50,679 Epoch 1672/2000 +2025-03-27 15:06:24,336 Current Learning Rate: 0.0007128896 +2025-03-27 15:06:24,337 Train Loss: 0.0003402, Val Loss: 0.0004182 +2025-03-27 15:06:24,338 Epoch 1673/2000 +2025-03-27 15:08:58,839 Current Learning Rate: 0.0007057572 +2025-03-27 15:08:58,840 Train Loss: 0.0003351, Val Loss: 0.0004147 +2025-03-27 15:08:58,840 Epoch 1674/2000 +2025-03-27 15:11:33,132 Current Learning Rate: 0.0006985739 +2025-03-27 15:11:33,132 Train Loss: 0.0003286, Val Loss: 0.0004106 +2025-03-27 15:11:33,133 Epoch 1675/2000 +2025-03-27 15:14:09,201 Current Learning Rate: 0.0006913417 +2025-03-27 15:14:09,201 Train Loss: 0.0003254, Val Loss: 0.0004098 +2025-03-27 15:14:09,201 Epoch 1676/2000 +2025-03-27 15:16:42,661 Current Learning Rate: 0.0006840623 +2025-03-27 15:16:42,662 Train Loss: 0.0003244, Val Loss: 0.0004107 +2025-03-27 15:16:42,663 Epoch 1677/2000 +2025-03-27 15:19:18,433 Current Learning Rate: 0.0006767374 +2025-03-27 15:19:18,434 Train Loss: 0.0003242, Val Loss: 0.0004100 +2025-03-27 15:19:18,434 Epoch 1678/2000 +2025-03-27 15:21:51,495 Current Learning Rate: 0.0006693690 +2025-03-27 15:21:51,495 Train Loss: 0.0003250, Val Loss: 0.0004121 +2025-03-27 15:21:51,496 Epoch 1679/2000 +2025-03-27 15:24:23,392 Current Learning Rate: 0.0006619587 +2025-03-27 15:24:23,392 Train Loss: 0.0003260, Val Loss: 0.0004110 +2025-03-27 15:24:23,393 Epoch 1680/2000 +2025-03-27 15:26:57,948 Current Learning Rate: 0.0006545085 +2025-03-27 15:26:57,948 Train Loss: 0.0003267, Val Loss: 0.0004119 +2025-03-27 15:26:57,949 Epoch 1681/2000 +2025-03-27 15:29:32,509 Current Learning Rate: 0.0006470202 +2025-03-27 15:29:32,510 Train Loss: 0.0003273, Val Loss: 0.0004119 +2025-03-27 15:29:32,510 Epoch 1682/2000 +2025-03-27 15:32:06,397 Current Learning Rate: 0.0006394956 +2025-03-27 15:32:06,398 Train Loss: 0.0003279, Val Loss: 0.0004118 +2025-03-27 15:32:06,398 Epoch 1683/2000 +2025-03-27 15:34:40,460 Current Learning Rate: 0.0006319365 +2025-03-27 15:34:40,461 Train Loss: 0.0003289, Val Loss: 0.0004150 +2025-03-27 15:34:40,461 Epoch 1684/2000 +2025-03-27 15:37:14,311 Current Learning Rate: 0.0006243449 +2025-03-27 15:37:14,311 Train Loss: 0.0003296, Val Loss: 0.0004148 +2025-03-27 15:37:14,312 Epoch 1685/2000 +2025-03-27 15:39:48,819 Current Learning Rate: 0.0006167227 +2025-03-27 15:39:48,819 Train Loss: 0.0003295, Val Loss: 0.0004212 +2025-03-27 15:39:48,820 Epoch 1686/2000 +2025-03-27 15:42:23,119 Current Learning Rate: 0.0006090716 +2025-03-27 15:42:23,119 Train Loss: 0.0003300, Val Loss: 0.0004147 +2025-03-27 15:42:23,120 Epoch 1687/2000 +2025-03-27 15:44:57,834 Current Learning Rate: 0.0006013936 +2025-03-27 15:44:57,834 Train Loss: 0.0003323, Val Loss: 0.0004138 +2025-03-27 15:44:57,835 Epoch 1688/2000 +2025-03-27 15:47:30,755 Current Learning Rate: 0.0005936907 +2025-03-27 15:47:30,756 Train Loss: 0.0003337, Val Loss: 0.0004148 +2025-03-27 15:47:30,756 Epoch 1689/2000 +2025-03-27 15:50:04,163 Current Learning Rate: 0.0005859646 +2025-03-27 15:50:04,163 Train Loss: 0.0003320, Val Loss: 0.0004128 +2025-03-27 15:50:04,164 Epoch 1690/2000 +2025-03-27 15:52:37,102 Current Learning Rate: 0.0005782172 +2025-03-27 15:52:37,103 Train Loss: 0.0003285, Val Loss: 0.0004106 +2025-03-27 15:52:37,103 Epoch 1691/2000 +2025-03-27 15:55:10,111 Current Learning Rate: 0.0005704506 +2025-03-27 15:55:10,111 Train Loss: 0.0003245, Val Loss: 0.0004082 +2025-03-27 15:55:10,111 Epoch 1692/2000 +2025-03-27 15:57:43,685 Current Learning Rate: 0.0005626666 +2025-03-27 15:57:43,685 Train Loss: 0.0003218, Val Loss: 0.0004058 +2025-03-27 15:57:43,686 Epoch 1693/2000 +2025-03-27 16:00:18,531 Current Learning Rate: 0.0005548672 +2025-03-27 16:00:18,532 Train Loss: 0.0003202, Val Loss: 0.0004049 +2025-03-27 16:00:18,532 Epoch 1694/2000 +2025-03-27 16:02:52,234 Current Learning Rate: 0.0005470542 +2025-03-27 16:02:52,234 Train Loss: 0.0003194, Val Loss: 0.0004051 +2025-03-27 16:02:52,234 Epoch 1695/2000 +2025-03-27 16:05:26,262 Current Learning Rate: 0.0005392295 +2025-03-27 16:05:26,263 Train Loss: 0.0003192, Val Loss: 0.0004048 +2025-03-27 16:05:26,263 Epoch 1696/2000 +2025-03-27 16:08:01,107 Current Learning Rate: 0.0005313953 +2025-03-27 16:08:01,108 Train Loss: 0.0003201, Val Loss: 0.0004060 +2025-03-27 16:08:01,108 Epoch 1697/2000 +2025-03-27 16:10:34,383 Current Learning Rate: 0.0005235532 +2025-03-27 16:10:34,384 Train Loss: 0.0003213, Val Loss: 0.0004062 +2025-03-27 16:10:34,384 Epoch 1698/2000 +2025-03-27 16:13:09,283 Current Learning Rate: 0.0005157054 +2025-03-27 16:13:09,283 Train Loss: 0.0003223, Val Loss: 0.0004084 +2025-03-27 16:13:09,283 Epoch 1699/2000 +2025-03-27 16:15:43,077 Current Learning Rate: 0.0005078537 +2025-03-27 16:15:43,078 Train Loss: 0.0003230, Val Loss: 0.0004106 +2025-03-27 16:15:43,078 Epoch 1700/2000 +2025-03-27 16:18:17,374 Current Learning Rate: 0.0005000000 +2025-03-27 16:18:17,375 Train Loss: 0.0003237, Val Loss: 0.0004091 +2025-03-27 16:18:17,375 Epoch 1701/2000 +2025-03-27 16:20:49,952 Current Learning Rate: 0.0004921463 +2025-03-27 16:20:49,952 Train Loss: 0.0003254, Val Loss: 0.0004107 +2025-03-27 16:20:49,952 Epoch 1702/2000 +2025-03-27 16:23:25,529 Current Learning Rate: 0.0004842946 +2025-03-27 16:23:25,530 Train Loss: 0.0003272, Val Loss: 0.0004131 +2025-03-27 16:23:25,530 Epoch 1703/2000 +2025-03-27 16:25:59,726 Current Learning Rate: 0.0004764468 +2025-03-27 16:25:59,727 Train Loss: 0.0003275, Val Loss: 0.0004143 +2025-03-27 16:25:59,727 Epoch 1704/2000 +2025-03-27 16:28:34,867 Current Learning Rate: 0.0004686047 +2025-03-27 16:28:34,867 Train Loss: 0.0003260, Val Loss: 0.0004123 +2025-03-27 16:28:34,868 Epoch 1705/2000 +2025-03-27 16:31:09,883 Current Learning Rate: 0.0004607705 +2025-03-27 16:31:09,884 Train Loss: 0.0003235, Val Loss: 0.0004085 +2025-03-27 16:31:09,884 Epoch 1706/2000 +2025-03-27 16:33:44,461 Current Learning Rate: 0.0004529458 +2025-03-27 16:33:44,462 Train Loss: 0.0003206, Val Loss: 0.0004043 +2025-03-27 16:33:44,462 Epoch 1707/2000 +2025-03-27 16:36:18,657 Current Learning Rate: 0.0004451328 +2025-03-27 16:36:18,658 Train Loss: 0.0003181, Val Loss: 0.0004013 +2025-03-27 16:36:18,658 Epoch 1708/2000 +2025-03-27 16:38:52,592 Current Learning Rate: 0.0004373334 +2025-03-27 16:38:52,593 Train Loss: 0.0003166, Val Loss: 0.0004000 +2025-03-27 16:38:52,593 Epoch 1709/2000 +2025-03-27 16:41:27,460 Current Learning Rate: 0.0004295494 +2025-03-27 16:41:27,460 Train Loss: 0.0003156, Val Loss: 0.0003991 +2025-03-27 16:41:27,461 Epoch 1710/2000 +2025-03-27 16:44:00,928 Current Learning Rate: 0.0004217828 +2025-03-27 16:44:00,928 Train Loss: 0.0003150, Val Loss: 0.0003986 +2025-03-27 16:44:00,928 Epoch 1711/2000 +2025-03-27 16:46:34,975 Current Learning Rate: 0.0004140354 +2025-03-27 16:46:34,976 Train Loss: 0.0003147, Val Loss: 0.0003985 +2025-03-27 16:46:34,976 Epoch 1712/2000 +2025-03-27 16:49:09,095 Current Learning Rate: 0.0004063093 +2025-03-27 16:49:09,096 Train Loss: 0.0003149, Val Loss: 0.0003997 +2025-03-27 16:49:09,096 Epoch 1713/2000 +2025-03-27 16:51:43,006 Current Learning Rate: 0.0003986064 +2025-03-27 16:51:43,007 Train Loss: 0.0003162, Val Loss: 0.0004000 +2025-03-27 16:51:43,007 Epoch 1714/2000 +2025-03-27 16:54:17,112 Current Learning Rate: 0.0003909284 +2025-03-27 16:54:17,112 Train Loss: 0.0003174, Val Loss: 0.0004043 +2025-03-27 16:54:17,113 Epoch 1715/2000 +2025-03-27 16:56:51,417 Current Learning Rate: 0.0003832773 +2025-03-27 16:56:51,418 Train Loss: 0.0003181, Val Loss: 0.0004063 +2025-03-27 16:56:51,418 Epoch 1716/2000 +2025-03-27 16:59:26,118 Current Learning Rate: 0.0003756551 +2025-03-27 16:59:26,119 Train Loss: 0.0003194, Val Loss: 0.0004082 +2025-03-27 16:59:26,119 Epoch 1717/2000 +2025-03-27 17:02:00,701 Current Learning Rate: 0.0003680635 +2025-03-27 17:02:00,701 Train Loss: 0.0003206, Val Loss: 0.0004056 +2025-03-27 17:02:00,702 Epoch 1718/2000 +2025-03-27 17:04:35,271 Current Learning Rate: 0.0003605044 +2025-03-27 17:04:35,272 Train Loss: 0.0003197, Val Loss: 0.0004034 +2025-03-27 17:04:35,272 Epoch 1719/2000 +2025-03-27 17:07:10,284 Current Learning Rate: 0.0003529798 +2025-03-27 17:07:10,285 Train Loss: 0.0003180, Val Loss: 0.0004011 +2025-03-27 17:07:10,286 Epoch 1720/2000 +2025-03-27 17:09:46,609 Current Learning Rate: 0.0003454915 +2025-03-27 17:09:46,610 Train Loss: 0.0003164, Val Loss: 0.0003996 +2025-03-27 17:09:46,610 Epoch 1721/2000 +2025-03-27 17:12:18,818 Current Learning Rate: 0.0003380413 +2025-03-27 17:12:18,818 Train Loss: 0.0003148, Val Loss: 0.0003988 +2025-03-27 17:12:18,819 Epoch 1722/2000 +2025-03-27 17:14:52,951 Current Learning Rate: 0.0003306310 +2025-03-27 17:14:52,952 Train Loss: 0.0003135, Val Loss: 0.0003979 +2025-03-27 17:14:52,952 Epoch 1723/2000 +2025-03-27 17:17:27,583 Current Learning Rate: 0.0003232626 +2025-03-27 17:17:27,583 Train Loss: 0.0003126, Val Loss: 0.0003973 +2025-03-27 17:17:27,583 Epoch 1724/2000 +2025-03-27 17:20:02,297 Current Learning Rate: 0.0003159377 +2025-03-27 17:20:02,297 Train Loss: 0.0003119, Val Loss: 0.0003969 +2025-03-27 17:20:02,298 Epoch 1725/2000 +2025-03-27 17:22:34,918 Current Learning Rate: 0.0003086583 +2025-03-27 17:22:34,918 Train Loss: 0.0003116, Val Loss: 0.0003969 +2025-03-27 17:22:34,918 Epoch 1726/2000 +2025-03-27 17:25:08,638 Current Learning Rate: 0.0003014261 +2025-03-27 17:25:08,639 Train Loss: 0.0003116, Val Loss: 0.0003971 +2025-03-27 17:25:08,639 Epoch 1727/2000 +2025-03-27 17:27:42,977 Current Learning Rate: 0.0002942428 +2025-03-27 17:27:42,978 Train Loss: 0.0003125, Val Loss: 0.0003975 +2025-03-27 17:27:42,978 Epoch 1728/2000 +2025-03-27 17:30:17,339 Current Learning Rate: 0.0002871104 +2025-03-27 17:30:17,340 Train Loss: 0.0003136, Val Loss: 0.0003960 +2025-03-27 17:30:17,340 Epoch 1729/2000 +2025-03-27 17:32:51,121 Current Learning Rate: 0.0002800304 +2025-03-27 17:32:51,121 Train Loss: 0.0003145, Val Loss: 0.0003945 +2025-03-27 17:32:51,121 Epoch 1730/2000 +2025-03-27 17:35:25,631 Current Learning Rate: 0.0002730048 +2025-03-27 17:35:25,632 Train Loss: 0.0003146, Val Loss: 0.0003947 +2025-03-27 17:35:25,632 Epoch 1731/2000 +2025-03-27 17:38:00,553 Current Learning Rate: 0.0002660351 +2025-03-27 17:38:00,553 Train Loss: 0.0003135, Val Loss: 0.0003938 +2025-03-27 17:38:00,553 Epoch 1732/2000 +2025-03-27 17:40:34,264 Current Learning Rate: 0.0002591232 +2025-03-27 17:40:34,265 Train Loss: 0.0003124, Val Loss: 0.0003927 +2025-03-27 17:40:34,265 Epoch 1733/2000 +2025-03-27 17:43:08,285 Current Learning Rate: 0.0002522707 +2025-03-27 17:43:08,285 Train Loss: 0.0003113, Val Loss: 0.0003917 +2025-03-27 17:43:08,285 Epoch 1734/2000 +2025-03-27 17:45:43,192 Current Learning Rate: 0.0002454793 +2025-03-27 17:45:43,193 Train Loss: 0.0003104, Val Loss: 0.0003908 +2025-03-27 17:45:43,193 Epoch 1735/2000 +2025-03-27 17:48:16,049 Current Learning Rate: 0.0002387507 +2025-03-27 17:48:16,050 Train Loss: 0.0003096, Val Loss: 0.0003901 +2025-03-27 17:48:16,050 Epoch 1736/2000 +2025-03-27 17:50:49,029 Current Learning Rate: 0.0002320866 +2025-03-27 17:50:49,029 Train Loss: 0.0003090, Val Loss: 0.0003897 +2025-03-27 17:50:49,029 Epoch 1737/2000 +2025-03-27 17:53:22,598 Current Learning Rate: 0.0002254886 +2025-03-27 17:53:22,598 Train Loss: 0.0003086, Val Loss: 0.0003895 +2025-03-27 17:53:22,598 Epoch 1738/2000 +2025-03-27 17:55:57,220 Current Learning Rate: 0.0002189583 +2025-03-27 17:55:57,220 Train Loss: 0.0003083, Val Loss: 0.0003895 +2025-03-27 17:55:57,221 Epoch 1739/2000 +2025-03-27 17:58:31,727 Current Learning Rate: 0.0002124974 +2025-03-27 17:58:31,728 Train Loss: 0.0003082, Val Loss: 0.0003897 +2025-03-27 17:58:31,728 Epoch 1740/2000 +2025-03-27 18:01:06,117 Current Learning Rate: 0.0002061074 +2025-03-27 18:01:06,118 Train Loss: 0.0003086, Val Loss: 0.0003914 +2025-03-27 18:01:06,118 Epoch 1741/2000 +2025-03-27 18:03:40,723 Current Learning Rate: 0.0001997899 +2025-03-27 18:03:40,723 Train Loss: 0.0003093, Val Loss: 0.0003905 +2025-03-27 18:03:40,723 Epoch 1742/2000 +2025-03-27 18:06:15,609 Current Learning Rate: 0.0001935465 +2025-03-27 18:06:15,616 Train Loss: 0.0003092, Val Loss: 0.0003904 +2025-03-27 18:06:15,617 Epoch 1743/2000 +2025-03-27 18:08:49,750 Current Learning Rate: 0.0001873787 +2025-03-27 18:08:49,750 Train Loss: 0.0003084, Val Loss: 0.0003897 +2025-03-27 18:08:49,751 Epoch 1744/2000 +2025-03-27 18:11:24,594 Current Learning Rate: 0.0001812880 +2025-03-27 18:11:24,595 Train Loss: 0.0003076, Val Loss: 0.0003890 +2025-03-27 18:11:24,595 Epoch 1745/2000 +2025-03-27 18:13:58,455 Current Learning Rate: 0.0001752760 +2025-03-27 18:13:58,456 Train Loss: 0.0003070, Val Loss: 0.0003886 +2025-03-27 18:13:58,456 Epoch 1746/2000 +2025-03-27 18:16:32,878 Current Learning Rate: 0.0001693441 +2025-03-27 18:16:32,879 Train Loss: 0.0003065, Val Loss: 0.0003882 +2025-03-27 18:16:32,879 Epoch 1747/2000 +2025-03-27 18:19:07,284 Current Learning Rate: 0.0001634937 +2025-03-27 18:19:07,285 Train Loss: 0.0003062, Val Loss: 0.0003880 +2025-03-27 18:19:07,285 Epoch 1748/2000 +2025-03-27 18:21:41,139 Current Learning Rate: 0.0001577264 +2025-03-27 18:21:41,139 Train Loss: 0.0003059, Val Loss: 0.0003879 +2025-03-27 18:21:41,139 Epoch 1749/2000 +2025-03-27 18:24:14,418 Current Learning Rate: 0.0001520436 +2025-03-27 18:24:14,419 Train Loss: 0.0003058, Val Loss: 0.0003877 +2025-03-27 18:24:14,419 Epoch 1750/2000 +2025-03-27 18:26:47,750 Current Learning Rate: 0.0001464466 +2025-03-27 18:26:47,751 Train Loss: 0.0003057, Val Loss: 0.0003869 +2025-03-27 18:26:47,751 Epoch 1751/2000 +2025-03-27 18:29:22,277 Current Learning Rate: 0.0001409369 +2025-03-27 18:29:22,277 Train Loss: 0.0003055, Val Loss: 0.0003864 +2025-03-27 18:29:22,278 Epoch 1752/2000 +2025-03-27 18:31:56,559 Current Learning Rate: 0.0001355157 +2025-03-27 18:31:56,560 Train Loss: 0.0003050, Val Loss: 0.0003864 +2025-03-27 18:31:56,560 Epoch 1753/2000 +2025-03-27 18:34:30,581 Current Learning Rate: 0.0001301845 +2025-03-27 18:34:30,582 Train Loss: 0.0003046, Val Loss: 0.0003863 +2025-03-27 18:34:30,583 Epoch 1754/2000 +2025-03-27 18:37:04,968 Current Learning Rate: 0.0001249445 +2025-03-27 18:37:04,969 Train Loss: 0.0003042, Val Loss: 0.0003860 +2025-03-27 18:37:04,969 Epoch 1755/2000 +2025-03-27 18:39:39,438 Current Learning Rate: 0.0001197970 +2025-03-27 18:39:39,438 Train Loss: 0.0003038, Val Loss: 0.0003858 +2025-03-27 18:39:39,440 Epoch 1756/2000 +2025-03-27 18:42:14,167 Current Learning Rate: 0.0001147434 +2025-03-27 18:42:14,168 Train Loss: 0.0003036, Val Loss: 0.0003856 +2025-03-27 18:42:14,168 Epoch 1757/2000 +2025-03-27 18:44:47,929 Current Learning Rate: 0.0001097848 +2025-03-27 18:44:47,930 Train Loss: 0.0003033, Val Loss: 0.0003855 +2025-03-27 18:44:47,930 Epoch 1758/2000 +2025-03-27 18:47:23,284 Current Learning Rate: 0.0001049225 +2025-03-27 18:47:23,284 Train Loss: 0.0003031, Val Loss: 0.0003851 +2025-03-27 18:47:23,285 Epoch 1759/2000 +2025-03-27 18:49:56,327 Current Learning Rate: 0.0001001577 +2025-03-27 18:49:56,380 Train Loss: 0.0003028, Val Loss: 0.0003844 +2025-03-27 18:49:56,381 Epoch 1760/2000 +2025-03-27 18:52:30,739 Current Learning Rate: 0.0000954915 +2025-03-27 18:52:30,796 Train Loss: 0.0003025, Val Loss: 0.0003839 +2025-03-27 18:52:30,797 Epoch 1761/2000 +2025-03-27 18:55:03,846 Current Learning Rate: 0.0000909251 +2025-03-27 18:55:03,921 Train Loss: 0.0003022, Val Loss: 0.0003837 +2025-03-27 18:55:03,921 Epoch 1762/2000 +2025-03-27 18:57:37,435 Current Learning Rate: 0.0000864597 +2025-03-27 18:57:37,499 Train Loss: 0.0003019, Val Loss: 0.0003836 +2025-03-27 18:57:37,499 Epoch 1763/2000 +2025-03-27 19:00:09,770 Current Learning Rate: 0.0000820963 +2025-03-27 19:00:09,829 Train Loss: 0.0003017, Val Loss: 0.0003834 +2025-03-27 19:00:09,829 Epoch 1764/2000 +2025-03-27 19:02:43,477 Current Learning Rate: 0.0000778360 +2025-03-27 19:02:43,544 Train Loss: 0.0003014, Val Loss: 0.0003832 +2025-03-27 19:02:43,545 Epoch 1765/2000 +2025-03-27 19:05:19,519 Current Learning Rate: 0.0000736799 +2025-03-27 19:05:19,594 Train Loss: 0.0003012, Val Loss: 0.0003830 +2025-03-27 19:05:19,594 Epoch 1766/2000 +2025-03-27 19:07:52,993 Current Learning Rate: 0.0000696290 +2025-03-27 19:07:53,048 Train Loss: 0.0003009, Val Loss: 0.0003827 +2025-03-27 19:07:53,048 Epoch 1767/2000 +2025-03-27 19:10:27,418 Current Learning Rate: 0.0000656842 +2025-03-27 19:10:27,478 Train Loss: 0.0003007, Val Loss: 0.0003823 +2025-03-27 19:10:27,478 Epoch 1768/2000 +2025-03-27 19:13:01,880 Current Learning Rate: 0.0000618467 +2025-03-27 19:13:01,960 Train Loss: 0.0003005, Val Loss: 0.0003819 +2025-03-27 19:13:01,961 Epoch 1769/2000 +2025-03-27 19:15:35,050 Current Learning Rate: 0.0000581172 +2025-03-27 19:15:35,108 Train Loss: 0.0003002, Val Loss: 0.0003816 +2025-03-27 19:15:35,108 Epoch 1770/2000 +2025-03-27 19:18:09,414 Current Learning Rate: 0.0000544967 +2025-03-27 19:18:09,514 Train Loss: 0.0003000, Val Loss: 0.0003813 +2025-03-27 19:18:09,515 Epoch 1771/2000 +2025-03-27 19:20:42,008 Current Learning Rate: 0.0000509862 +2025-03-27 19:20:42,079 Train Loss: 0.0002998, Val Loss: 0.0003811 +2025-03-27 19:20:42,080 Epoch 1772/2000 +2025-03-27 19:23:16,490 Current Learning Rate: 0.0000475865 +2025-03-27 19:23:16,558 Train Loss: 0.0002996, Val Loss: 0.0003810 +2025-03-27 19:23:16,558 Epoch 1773/2000 +2025-03-27 19:25:50,526 Current Learning Rate: 0.0000442984 +2025-03-27 19:25:50,587 Train Loss: 0.0002994, Val Loss: 0.0003809 +2025-03-27 19:25:50,588 Epoch 1774/2000 +2025-03-27 19:28:24,597 Current Learning Rate: 0.0000411227 +2025-03-27 19:28:24,660 Train Loss: 0.0002992, Val Loss: 0.0003808 +2025-03-27 19:28:24,661 Epoch 1775/2000 +2025-03-27 19:30:58,321 Current Learning Rate: 0.0000380602 +2025-03-27 19:30:58,389 Train Loss: 0.0002990, Val Loss: 0.0003806 +2025-03-27 19:30:58,389 Epoch 1776/2000 +2025-03-27 19:33:32,059 Current Learning Rate: 0.0000351118 +2025-03-27 19:33:32,136 Train Loss: 0.0002988, Val Loss: 0.0003805 +2025-03-27 19:33:32,137 Epoch 1777/2000 +2025-03-27 19:36:05,894 Current Learning Rate: 0.0000322780 +2025-03-27 19:36:05,968 Train Loss: 0.0002986, Val Loss: 0.0003803 +2025-03-27 19:36:05,968 Epoch 1778/2000 +2025-03-27 19:38:40,154 Current Learning Rate: 0.0000295596 +2025-03-27 19:38:40,221 Train Loss: 0.0002985, Val Loss: 0.0003801 +2025-03-27 19:38:40,221 Epoch 1779/2000 +2025-03-27 19:41:14,599 Current Learning Rate: 0.0000269573 +2025-03-27 19:41:14,673 Train Loss: 0.0002983, Val Loss: 0.0003799 +2025-03-27 19:41:14,673 Epoch 1780/2000 +2025-03-27 19:43:47,812 Current Learning Rate: 0.0000244717 +2025-03-27 19:43:47,870 Train Loss: 0.0002981, Val Loss: 0.0003797 +2025-03-27 19:43:47,871 Epoch 1781/2000 +2025-03-27 19:46:22,340 Current Learning Rate: 0.0000221035 +2025-03-27 19:46:22,403 Train Loss: 0.0002980, Val Loss: 0.0003795 +2025-03-27 19:46:22,403 Epoch 1782/2000 +2025-03-27 19:48:56,974 Current Learning Rate: 0.0000198532 +2025-03-27 19:48:57,039 Train Loss: 0.0002978, Val Loss: 0.0003792 +2025-03-27 19:48:57,039 Epoch 1783/2000 +2025-03-27 19:51:31,277 Current Learning Rate: 0.0000177213 +2025-03-27 19:51:31,329 Train Loss: 0.0002977, Val Loss: 0.0003791 +2025-03-27 19:51:31,329 Epoch 1784/2000 +2025-03-27 19:54:05,371 Current Learning Rate: 0.0000157084 +2025-03-27 19:54:05,449 Train Loss: 0.0002976, Val Loss: 0.0003790 +2025-03-27 19:54:05,449 Epoch 1785/2000 +2025-03-27 19:56:39,489 Current Learning Rate: 0.0000138150 +2025-03-27 19:56:39,543 Train Loss: 0.0002974, Val Loss: 0.0003789 +2025-03-27 19:56:39,544 Epoch 1786/2000 +2025-03-27 19:59:14,207 Current Learning Rate: 0.0000120416 +2025-03-27 19:59:14,264 Train Loss: 0.0002973, Val Loss: 0.0003789 +2025-03-27 19:59:14,264 Epoch 1787/2000 +2025-03-27 20:01:48,183 Current Learning Rate: 0.0000103886 +2025-03-27 20:01:48,246 Train Loss: 0.0002972, Val Loss: 0.0003788 +2025-03-27 20:01:48,246 Epoch 1788/2000 +2025-03-27 20:04:22,149 Current Learning Rate: 0.0000088564 +2025-03-27 20:04:22,206 Train Loss: 0.0002971, Val Loss: 0.0003786 +2025-03-27 20:04:22,206 Epoch 1789/2000 +2025-03-27 20:06:54,838 Current Learning Rate: 0.0000074453 +2025-03-27 20:06:54,901 Train Loss: 0.0002970, Val Loss: 0.0003785 +2025-03-27 20:06:54,902 Epoch 1790/2000 +2025-03-27 20:09:29,902 Current Learning Rate: 0.0000061558 +2025-03-27 20:09:29,967 Train Loss: 0.0002969, Val Loss: 0.0003784 +2025-03-27 20:09:29,967 Epoch 1791/2000 +2025-03-27 20:12:03,907 Current Learning Rate: 0.0000049882 +2025-03-27 20:12:03,984 Train Loss: 0.0002968, Val Loss: 0.0003783 +2025-03-27 20:12:03,984 Epoch 1792/2000 +2025-03-27 20:14:38,688 Current Learning Rate: 0.0000039426 +2025-03-27 20:14:38,742 Train Loss: 0.0002967, Val Loss: 0.0003783 +2025-03-27 20:14:38,742 Epoch 1793/2000 +2025-03-27 20:17:12,649 Current Learning Rate: 0.0000030195 +2025-03-27 20:17:12,747 Train Loss: 0.0002966, Val Loss: 0.0003782 +2025-03-27 20:17:12,748 Epoch 1794/2000 +2025-03-27 20:19:47,163 Current Learning Rate: 0.0000022190 +2025-03-27 20:19:47,219 Train Loss: 0.0002966, Val Loss: 0.0003782 +2025-03-27 20:19:47,219 Epoch 1795/2000 +2025-03-27 20:22:20,481 Current Learning Rate: 0.0000015413 +2025-03-27 20:22:20,551 Train Loss: 0.0002965, Val Loss: 0.0003781 +2025-03-27 20:22:20,552 Epoch 1796/2000 +2025-03-27 20:24:54,794 Current Learning Rate: 0.0000009866 +2025-03-27 20:24:54,850 Train Loss: 0.0002964, Val Loss: 0.0003781 +2025-03-27 20:24:54,850 Epoch 1797/2000 +2025-03-27 20:27:29,417 Current Learning Rate: 0.0000005551 +2025-03-27 20:27:29,491 Train Loss: 0.0002964, Val Loss: 0.0003781 +2025-03-27 20:27:29,491 Epoch 1798/2000 +2025-03-27 20:30:03,951 Current Learning Rate: 0.0000002467 +2025-03-27 20:30:04,020 Train Loss: 0.0002964, Val Loss: 0.0003781 +2025-03-27 20:30:04,020 Epoch 1799/2000 +2025-03-27 20:32:38,176 Current Learning Rate: 0.0000000617 +2025-03-27 20:32:38,230 Train Loss: 0.0002963, Val Loss: 0.0003781 +2025-03-27 20:32:38,230 Epoch 1800/2000 +2025-03-27 20:35:12,648 Current Learning Rate: 0.0000000000 +2025-03-27 20:35:12,710 Train Loss: 0.0002963, Val Loss: 0.0003781 +2025-03-27 20:35:12,710 Epoch 1801/2000 +2025-03-27 20:37:45,584 Current Learning Rate: 0.0000000617 +2025-03-27 20:37:45,584 Train Loss: 0.0002963, Val Loss: 0.0003781 +2025-03-27 20:37:45,584 Epoch 1802/2000 +2025-03-27 20:40:19,920 Current Learning Rate: 0.0000002467 +2025-03-27 20:40:19,984 Train Loss: 0.0002963, Val Loss: 0.0003781 +2025-03-27 20:40:19,985 Epoch 1803/2000 +2025-03-27 20:42:54,533 Current Learning Rate: 0.0000005551 +2025-03-27 20:42:54,534 Train Loss: 0.0002963, Val Loss: 0.0003781 +2025-03-27 20:42:54,534 Epoch 1804/2000 +2025-03-27 20:45:28,678 Current Learning Rate: 0.0000009866 +2025-03-27 20:45:28,679 Train Loss: 0.0002964, Val Loss: 0.0003781 +2025-03-27 20:45:28,679 Epoch 1805/2000 +2025-03-27 20:48:03,097 Current Learning Rate: 0.0000015413 +2025-03-27 20:48:03,097 Train Loss: 0.0002964, Val Loss: 0.0003781 +2025-03-27 20:48:03,097 Epoch 1806/2000 +2025-03-27 20:50:37,810 Current Learning Rate: 0.0000022190 +2025-03-27 20:50:37,810 Train Loss: 0.0002964, Val Loss: 0.0003781 +2025-03-27 20:50:37,810 Epoch 1807/2000 +2025-03-27 20:53:12,867 Current Learning Rate: 0.0000030195 +2025-03-27 20:53:12,867 Train Loss: 0.0002965, Val Loss: 0.0003781 +2025-03-27 20:53:12,867 Epoch 1808/2000 +2025-03-27 20:55:46,919 Current Learning Rate: 0.0000039426 +2025-03-27 20:55:46,919 Train Loss: 0.0002965, Val Loss: 0.0003782 +2025-03-27 20:55:46,920 Epoch 1809/2000 +2025-03-27 20:58:21,569 Current Learning Rate: 0.0000049882 +2025-03-27 20:58:21,570 Train Loss: 0.0002966, Val Loss: 0.0003782 +2025-03-27 20:58:21,570 Epoch 1810/2000 +2025-03-27 21:00:55,930 Current Learning Rate: 0.0000061558 +2025-03-27 21:00:55,930 Train Loss: 0.0002966, Val Loss: 0.0003782 +2025-03-27 21:00:55,930 Epoch 1811/2000 +2025-03-27 21:03:30,464 Current Learning Rate: 0.0000074453 +2025-03-27 21:03:30,464 Train Loss: 0.0002967, Val Loss: 0.0003783 +2025-03-27 21:03:30,464 Epoch 1812/2000 +2025-03-27 21:06:04,434 Current Learning Rate: 0.0000088564 +2025-03-27 21:06:04,434 Train Loss: 0.0002967, Val Loss: 0.0003783 +2025-03-27 21:06:04,434 Epoch 1813/2000 +2025-03-27 21:08:38,905 Current Learning Rate: 0.0000103886 +2025-03-27 21:08:38,906 Train Loss: 0.0002968, Val Loss: 0.0003784 +2025-03-27 21:08:38,906 Epoch 1814/2000 +2025-03-27 21:11:12,942 Current Learning Rate: 0.0000120416 +2025-03-27 21:11:12,943 Train Loss: 0.0002969, Val Loss: 0.0003786 +2025-03-27 21:11:12,943 Epoch 1815/2000 +2025-03-27 21:13:46,628 Current Learning Rate: 0.0000138150 +2025-03-27 21:13:46,629 Train Loss: 0.0002969, Val Loss: 0.0003786 +2025-03-27 21:13:46,629 Epoch 1816/2000 +2025-03-27 21:16:21,224 Current Learning Rate: 0.0000157084 +2025-03-27 21:16:21,225 Train Loss: 0.0002970, Val Loss: 0.0003787 +2025-03-27 21:16:21,225 Epoch 1817/2000 +2025-03-27 21:18:55,101 Current Learning Rate: 0.0000177213 +2025-03-27 21:18:55,101 Train Loss: 0.0002971, Val Loss: 0.0003787 +2025-03-27 21:18:55,102 Epoch 1818/2000 +2025-03-27 21:21:30,073 Current Learning Rate: 0.0000198532 +2025-03-27 21:21:30,074 Train Loss: 0.0002972, Val Loss: 0.0003788 +2025-03-27 21:21:30,074 Epoch 1819/2000 +2025-03-27 21:24:04,056 Current Learning Rate: 0.0000221035 +2025-03-27 21:24:04,056 Train Loss: 0.0002972, Val Loss: 0.0003789 +2025-03-27 21:24:04,056 Epoch 1820/2000 +2025-03-27 21:26:38,869 Current Learning Rate: 0.0000244717 +2025-03-27 21:26:38,870 Train Loss: 0.0002973, Val Loss: 0.0003790 +2025-03-27 21:26:38,870 Epoch 1821/2000 +2025-03-27 21:29:12,604 Current Learning Rate: 0.0000269573 +2025-03-27 21:29:12,605 Train Loss: 0.0002974, Val Loss: 0.0003792 +2025-03-27 21:29:12,605 Epoch 1822/2000 +2025-03-27 21:31:46,465 Current Learning Rate: 0.0000295596 +2025-03-27 21:31:46,466 Train Loss: 0.0002975, Val Loss: 0.0003793 +2025-03-27 21:31:46,466 Epoch 1823/2000 +2025-03-27 21:34:20,953 Current Learning Rate: 0.0000322780 +2025-03-27 21:34:20,953 Train Loss: 0.0002976, Val Loss: 0.0003794 +2025-03-27 21:34:20,954 Epoch 1824/2000 +2025-03-27 21:36:55,180 Current Learning Rate: 0.0000351118 +2025-03-27 21:36:55,181 Train Loss: 0.0002977, Val Loss: 0.0003795 +2025-03-27 21:36:55,181 Epoch 1825/2000 +2025-03-27 21:39:30,041 Current Learning Rate: 0.0000380602 +2025-03-27 21:39:30,041 Train Loss: 0.0002978, Val Loss: 0.0003796 +2025-03-27 21:39:30,042 Epoch 1826/2000 +2025-03-27 21:42:04,346 Current Learning Rate: 0.0000411227 +2025-03-27 21:42:04,347 Train Loss: 0.0002978, Val Loss: 0.0003797 +2025-03-27 21:42:04,347 Epoch 1827/2000 +2025-03-27 21:44:39,584 Current Learning Rate: 0.0000442984 +2025-03-27 21:44:39,584 Train Loss: 0.0002979, Val Loss: 0.0003798 +2025-03-27 21:44:39,584 Epoch 1828/2000 +2025-03-27 21:47:14,196 Current Learning Rate: 0.0000475865 +2025-03-27 21:47:14,197 Train Loss: 0.0002980, Val Loss: 0.0003799 +2025-03-27 21:47:14,197 Epoch 1829/2000 +2025-03-27 21:49:49,172 Current Learning Rate: 0.0000509862 +2025-03-27 21:49:49,172 Train Loss: 0.0002981, Val Loss: 0.0003800 +2025-03-27 21:49:49,173 Epoch 1830/2000 +2025-03-27 21:52:24,412 Current Learning Rate: 0.0000544967 +2025-03-27 21:52:24,413 Train Loss: 0.0002982, Val Loss: 0.0003801 +2025-03-27 21:52:24,413 Epoch 1831/2000 +2025-03-27 21:54:58,765 Current Learning Rate: 0.0000581172 +2025-03-27 21:54:58,766 Train Loss: 0.0002983, Val Loss: 0.0003802 +2025-03-27 21:54:58,766 Epoch 1832/2000 +2025-03-27 21:57:33,504 Current Learning Rate: 0.0000618467 +2025-03-27 21:57:33,504 Train Loss: 0.0002984, Val Loss: 0.0003803 +2025-03-27 21:57:33,504 Epoch 1833/2000 +2025-03-27 22:00:06,457 Current Learning Rate: 0.0000656842 +2025-03-27 22:00:06,458 Train Loss: 0.0002985, Val Loss: 0.0003804 +2025-03-27 22:00:06,458 Epoch 1834/2000 +2025-03-27 22:02:40,796 Current Learning Rate: 0.0000696290 +2025-03-27 22:02:40,796 Train Loss: 0.0002987, Val Loss: 0.0003806 +2025-03-27 22:02:40,797 Epoch 1835/2000 +2025-03-27 22:05:15,507 Current Learning Rate: 0.0000736799 +2025-03-27 22:05:15,507 Train Loss: 0.0002988, Val Loss: 0.0003807 +2025-03-27 22:05:15,507 Epoch 1836/2000 +2025-03-27 22:07:49,112 Current Learning Rate: 0.0000778360 +2025-03-27 22:07:49,112 Train Loss: 0.0002989, Val Loss: 0.0003809 +2025-03-27 22:07:49,112 Epoch 1837/2000 +2025-03-27 22:10:23,244 Current Learning Rate: 0.0000820963 +2025-03-27 22:10:23,245 Train Loss: 0.0002990, Val Loss: 0.0003810 +2025-03-27 22:10:23,245 Epoch 1838/2000 +2025-03-27 22:12:58,174 Current Learning Rate: 0.0000864597 +2025-03-27 22:12:58,174 Train Loss: 0.0002991, Val Loss: 0.0003811 +2025-03-27 22:12:58,175 Epoch 1839/2000 +2025-03-27 22:15:32,481 Current Learning Rate: 0.0000909251 +2025-03-27 22:15:32,482 Train Loss: 0.0002992, Val Loss: 0.0003812 +2025-03-27 22:15:32,482 Epoch 1840/2000 +2025-03-27 22:18:05,919 Current Learning Rate: 0.0000954915 +2025-03-27 22:18:05,920 Train Loss: 0.0002994, Val Loss: 0.0003813 +2025-03-27 22:18:05,920 Epoch 1841/2000 +2025-03-27 22:20:38,881 Current Learning Rate: 0.0001001577 +2025-03-27 22:20:38,881 Train Loss: 0.0002995, Val Loss: 0.0003815 +2025-03-27 22:20:38,881 Epoch 1842/2000 +2025-03-27 22:23:11,904 Current Learning Rate: 0.0001049225 +2025-03-27 22:23:11,904 Train Loss: 0.0002996, Val Loss: 0.0003816 +2025-03-27 22:23:11,904 Epoch 1843/2000 +2025-03-27 22:25:44,890 Current Learning Rate: 0.0001097848 +2025-03-27 22:25:44,891 Train Loss: 0.0002998, Val Loss: 0.0003818 +2025-03-27 22:25:44,891 Epoch 1844/2000 +2025-03-27 22:28:19,203 Current Learning Rate: 0.0001147434 +2025-03-27 22:28:19,203 Train Loss: 0.0002999, Val Loss: 0.0003819 +2025-03-27 22:28:19,204 Epoch 1845/2000 +2025-03-27 22:30:53,728 Current Learning Rate: 0.0001197970 +2025-03-27 22:30:53,729 Train Loss: 0.0003000, Val Loss: 0.0003821 +2025-03-27 22:30:53,729 Epoch 1846/2000 +2025-03-27 22:33:26,538 Current Learning Rate: 0.0001249445 +2025-03-27 22:33:26,538 Train Loss: 0.0003002, Val Loss: 0.0003822 +2025-03-27 22:33:26,539 Epoch 1847/2000 +2025-03-27 22:36:01,871 Current Learning Rate: 0.0001301845 +2025-03-27 22:36:01,872 Train Loss: 0.0003003, Val Loss: 0.0003824 +2025-03-27 22:36:01,872 Epoch 1848/2000 +2025-03-27 22:38:35,968 Current Learning Rate: 0.0001355157 +2025-03-27 22:38:35,968 Train Loss: 0.0003005, Val Loss: 0.0003825 +2025-03-27 22:38:35,969 Epoch 1849/2000 +2025-03-27 22:41:08,893 Current Learning Rate: 0.0001409369 +2025-03-27 22:41:08,893 Train Loss: 0.0003006, Val Loss: 0.0003825 +2025-03-27 22:41:08,894 Epoch 1850/2000 +2025-03-27 22:43:43,365 Current Learning Rate: 0.0001464466 +2025-03-27 22:43:43,366 Train Loss: 0.0003008, Val Loss: 0.0003826 +2025-03-27 22:43:43,366 Epoch 1851/2000 +2025-03-27 22:46:17,968 Current Learning Rate: 0.0001520436 +2025-03-27 22:46:17,968 Train Loss: 0.0003009, Val Loss: 0.0003828 +2025-03-27 22:46:17,968 Epoch 1852/2000 +2025-03-27 22:48:52,307 Current Learning Rate: 0.0001577264 +2025-03-27 22:48:52,308 Train Loss: 0.0003011, Val Loss: 0.0003829 +2025-03-27 22:48:52,308 Epoch 1853/2000 +2025-03-27 22:51:26,549 Current Learning Rate: 0.0001634937 +2025-03-27 22:51:26,549 Train Loss: 0.0003012, Val Loss: 0.0003831 +2025-03-27 22:51:26,550 Epoch 1854/2000 +2025-03-27 22:54:00,910 Current Learning Rate: 0.0001693441 +2025-03-27 22:54:00,910 Train Loss: 0.0003014, Val Loss: 0.0003833 +2025-03-27 22:54:00,911 Epoch 1855/2000 +2025-03-27 22:56:36,186 Current Learning Rate: 0.0001752760 +2025-03-27 22:56:36,186 Train Loss: 0.0003016, Val Loss: 0.0003836 +2025-03-27 22:56:36,187 Epoch 1856/2000 +2025-03-27 22:59:10,556 Current Learning Rate: 0.0001812880 +2025-03-27 22:59:10,556 Train Loss: 0.0003017, Val Loss: 0.0003838 +2025-03-27 22:59:10,557 Epoch 1857/2000 +2025-03-27 23:01:46,049 Current Learning Rate: 0.0001873787 +2025-03-27 23:01:46,049 Train Loss: 0.0003019, Val Loss: 0.0003840 +2025-03-27 23:01:46,049 Epoch 1858/2000 +2025-03-27 23:04:18,713 Current Learning Rate: 0.0001935465 +2025-03-27 23:04:18,713 Train Loss: 0.0003021, Val Loss: 0.0003843 +2025-03-27 23:04:18,713 Epoch 1859/2000 +2025-03-27 23:06:53,815 Current Learning Rate: 0.0001997899 +2025-03-27 23:06:53,816 Train Loss: 0.0003022, Val Loss: 0.0003847 +2025-03-27 23:06:53,816 Epoch 1860/2000 +2025-03-27 23:09:27,638 Current Learning Rate: 0.0002061074 +2025-03-27 23:09:27,639 Train Loss: 0.0003024, Val Loss: 0.0003851 +2025-03-27 23:09:27,639 Epoch 1861/2000 +2025-03-27 23:12:01,937 Current Learning Rate: 0.0002124974 +2025-03-27 23:12:01,937 Train Loss: 0.0003026, Val Loss: 0.0003854 +2025-03-27 23:12:01,938 Epoch 1862/2000 +2025-03-27 23:14:37,044 Current Learning Rate: 0.0002189583 +2025-03-27 23:14:37,045 Train Loss: 0.0003028, Val Loss: 0.0003858 +2025-03-27 23:14:37,045 Epoch 1863/2000 +2025-03-27 23:17:11,316 Current Learning Rate: 0.0002254886 +2025-03-27 23:17:11,317 Train Loss: 0.0003030, Val Loss: 0.0003865 +2025-03-27 23:17:11,317 Epoch 1864/2000 +2025-03-27 23:19:45,778 Current Learning Rate: 0.0002320866 +2025-03-27 23:19:45,779 Train Loss: 0.0003033, Val Loss: 0.0003869 +2025-03-27 23:19:45,779 Epoch 1865/2000 +2025-03-27 23:22:21,099 Current Learning Rate: 0.0002387507 +2025-03-27 23:22:21,099 Train Loss: 0.0003038, Val Loss: 0.0003876 +2025-03-27 23:22:21,100 Epoch 1866/2000 +2025-03-27 23:24:54,419 Current Learning Rate: 0.0002454793 +2025-03-27 23:24:54,419 Train Loss: 0.0003044, Val Loss: 0.0003875 +2025-03-27 23:24:54,420 Epoch 1867/2000 +2025-03-27 23:27:28,179 Current Learning Rate: 0.0002522707 +2025-03-27 23:27:28,179 Train Loss: 0.0003047, Val Loss: 0.0003871 +2025-03-27 23:27:28,179 Epoch 1868/2000 +2025-03-27 23:30:01,291 Current Learning Rate: 0.0002591232 +2025-03-27 23:30:01,292 Train Loss: 0.0003049, Val Loss: 0.0003877 +2025-03-27 23:30:01,292 Epoch 1869/2000 +2025-03-27 23:32:36,256 Current Learning Rate: 0.0002660351 +2025-03-27 23:32:36,257 Train Loss: 0.0003048, Val Loss: 0.0003880 +2025-03-27 23:32:36,257 Epoch 1870/2000 +2025-03-27 23:35:10,369 Current Learning Rate: 0.0002730048 +2025-03-27 23:35:10,370 Train Loss: 0.0003044, Val Loss: 0.0003880 +2025-03-27 23:35:10,370 Epoch 1871/2000 +2025-03-27 23:37:43,884 Current Learning Rate: 0.0002800304 +2025-03-27 23:37:43,884 Train Loss: 0.0003044, Val Loss: 0.0003876 +2025-03-27 23:37:43,884 Epoch 1872/2000 +2025-03-27 23:40:17,370 Current Learning Rate: 0.0002871104 +2025-03-27 23:40:17,371 Train Loss: 0.0003046, Val Loss: 0.0003875 +2025-03-27 23:40:17,371 Epoch 1873/2000 +2025-03-27 23:42:51,485 Current Learning Rate: 0.0002942428 +2025-03-27 23:42:51,485 Train Loss: 0.0003048, Val Loss: 0.0003875 +2025-03-27 23:42:51,485 Epoch 1874/2000 +2025-03-27 23:45:25,308 Current Learning Rate: 0.0003014261 +2025-03-27 23:45:25,308 Train Loss: 0.0003051, Val Loss: 0.0003876 +2025-03-27 23:45:25,308 Epoch 1875/2000 +2025-03-27 23:47:59,494 Current Learning Rate: 0.0003086583 +2025-03-27 23:47:59,495 Train Loss: 0.0003053, Val Loss: 0.0003877 +2025-03-27 23:47:59,495 Epoch 1876/2000 +2025-03-27 23:50:33,936 Current Learning Rate: 0.0003159377 +2025-03-27 23:50:33,937 Train Loss: 0.0003056, Val Loss: 0.0003880 +2025-03-27 23:50:33,937 Epoch 1877/2000 +2025-03-27 23:53:08,358 Current Learning Rate: 0.0003232626 +2025-03-27 23:53:08,359 Train Loss: 0.0003060, Val Loss: 0.0003887 +2025-03-27 23:53:08,359 Epoch 1878/2000 +2025-03-27 23:55:42,716 Current Learning Rate: 0.0003306310 +2025-03-27 23:55:42,716 Train Loss: 0.0003069, Val Loss: 0.0003897 +2025-03-27 23:55:42,717 Epoch 1879/2000 +2025-03-27 23:58:17,411 Current Learning Rate: 0.0003380413 +2025-03-27 23:58:17,411 Train Loss: 0.0003078, Val Loss: 0.0003936 +2025-03-27 23:58:17,412 Epoch 1880/2000 +2025-03-28 00:00:51,776 Current Learning Rate: 0.0003454915 +2025-03-28 00:00:51,776 Train Loss: 0.0003081, Val Loss: 0.0003926 +2025-03-28 00:00:51,777 Epoch 1881/2000 +2025-03-28 00:03:26,771 Current Learning Rate: 0.0003529798 +2025-03-28 00:03:26,771 Train Loss: 0.0003083, Val Loss: 0.0003922 +2025-03-28 00:03:26,772 Epoch 1882/2000 +2025-03-28 00:06:00,353 Current Learning Rate: 0.0003605044 +2025-03-28 00:06:00,354 Train Loss: 0.0003082, Val Loss: 0.0003920 +2025-03-28 00:06:00,354 Epoch 1883/2000 +2025-03-28 00:08:35,107 Current Learning Rate: 0.0003680635 +2025-03-28 00:08:35,107 Train Loss: 0.0003077, Val Loss: 0.0003914 +2025-03-28 00:08:35,107 Epoch 1884/2000 +2025-03-28 00:11:09,510 Current Learning Rate: 0.0003756551 +2025-03-28 00:11:09,511 Train Loss: 0.0003072, Val Loss: 0.0003903 +2025-03-28 00:11:09,511 Epoch 1885/2000 +2025-03-28 00:13:45,281 Current Learning Rate: 0.0003832773 +2025-03-28 00:13:45,282 Train Loss: 0.0003073, Val Loss: 0.0003900 +2025-03-28 00:13:45,282 Epoch 1886/2000 +2025-03-28 00:16:19,114 Current Learning Rate: 0.0003909284 +2025-03-28 00:16:19,114 Train Loss: 0.0003076, Val Loss: 0.0003902 +2025-03-28 00:16:19,115 Epoch 1887/2000 +2025-03-28 00:18:52,933 Current Learning Rate: 0.0003986064 +2025-03-28 00:18:52,934 Train Loss: 0.0003080, Val Loss: 0.0003906 +2025-03-28 00:18:52,934 Epoch 1888/2000 +2025-03-28 00:21:28,589 Current Learning Rate: 0.0004063093 +2025-03-28 00:21:28,589 Train Loss: 0.0003086, Val Loss: 0.0003913 +2025-03-28 00:21:28,589 Epoch 1889/2000 +2025-03-28 00:24:02,845 Current Learning Rate: 0.0004140354 +2025-03-28 00:24:02,846 Train Loss: 0.0003098, Val Loss: 0.0003923 +2025-03-28 00:24:02,846 Epoch 1890/2000 +2025-03-28 00:26:37,233 Current Learning Rate: 0.0004217828 +2025-03-28 00:26:37,233 Train Loss: 0.0003102, Val Loss: 0.0003937 +2025-03-28 00:26:37,233 Epoch 1891/2000 +2025-03-28 00:29:11,304 Current Learning Rate: 0.0004295494 +2025-03-28 00:29:11,305 Train Loss: 0.0003107, Val Loss: 0.0003931 +2025-03-28 00:29:11,305 Epoch 1892/2000 +2025-03-28 00:31:45,446 Current Learning Rate: 0.0004373334 +2025-03-28 00:31:45,447 Train Loss: 0.0003110, Val Loss: 0.0003934 +2025-03-28 00:31:45,447 Epoch 1893/2000 +2025-03-28 00:34:20,293 Current Learning Rate: 0.0004451328 +2025-03-28 00:34:20,294 Train Loss: 0.0003113, Val Loss: 0.0003986 +2025-03-28 00:34:20,294 Epoch 1894/2000 +2025-03-28 00:36:52,897 Current Learning Rate: 0.0004529458 +2025-03-28 00:36:52,898 Train Loss: 0.0003114, Val Loss: 0.0003984 +2025-03-28 00:36:52,898 Epoch 1895/2000 +2025-03-28 00:39:25,835 Current Learning Rate: 0.0004607705 +2025-03-28 00:39:25,836 Train Loss: 0.0003108, Val Loss: 0.0003942 +2025-03-28 00:39:25,836 Epoch 1896/2000 +2025-03-28 00:41:59,851 Current Learning Rate: 0.0004686047 +2025-03-28 00:41:59,851 Train Loss: 0.0003099, Val Loss: 0.0003927 +2025-03-28 00:41:59,851 Epoch 1897/2000 +2025-03-28 00:44:33,776 Current Learning Rate: 0.0004764468 +2025-03-28 00:44:33,777 Train Loss: 0.0003101, Val Loss: 0.0003934 +2025-03-28 00:44:33,777 Epoch 1898/2000 +2025-03-28 00:47:07,626 Current Learning Rate: 0.0004842946 +2025-03-28 00:47:07,627 Train Loss: 0.0003106, Val Loss: 0.0003941 +2025-03-28 00:47:07,627 Epoch 1899/2000 +2025-03-28 00:49:41,341 Current Learning Rate: 0.0004921463 +2025-03-28 00:49:41,341 Train Loss: 0.0003117, Val Loss: 0.0003947 +2025-03-28 00:49:41,341 Epoch 1900/2000 +2025-03-28 00:52:15,437 Current Learning Rate: 0.0005000000 +2025-03-28 00:52:15,437 Train Loss: 0.0003127, Val Loss: 0.0003947 +2025-03-28 00:52:15,438 Epoch 1901/2000 +2025-03-28 00:54:50,633 Current Learning Rate: 0.0005078537 +2025-03-28 00:54:50,634 Train Loss: 0.0003136, Val Loss: 0.0003955 +2025-03-28 00:54:50,634 Epoch 1902/2000 +2025-03-28 00:57:24,373 Current Learning Rate: 0.0005157054 +2025-03-28 00:57:24,373 Train Loss: 0.0003139, Val Loss: 0.0003953 +2025-03-28 00:57:24,373 Epoch 1903/2000 +2025-03-28 00:59:59,084 Current Learning Rate: 0.0005235532 +2025-03-28 00:59:59,084 Train Loss: 0.0003142, Val Loss: 0.0003954 +2025-03-28 00:59:59,084 Epoch 1904/2000 +2025-03-28 01:02:34,076 Current Learning Rate: 0.0005313953 +2025-03-28 01:02:34,077 Train Loss: 0.0003145, Val Loss: 0.0003970 +2025-03-28 01:02:34,077 Epoch 1905/2000 +2025-03-28 01:05:07,647 Current Learning Rate: 0.0005392295 +2025-03-28 01:05:07,648 Train Loss: 0.0003149, Val Loss: 0.0003970 +2025-03-28 01:05:07,648 Epoch 1906/2000 +2025-03-28 01:07:41,586 Current Learning Rate: 0.0005470542 +2025-03-28 01:07:41,587 Train Loss: 0.0003157, Val Loss: 0.0003987 +2025-03-28 01:07:41,588 Epoch 1907/2000 +2025-03-28 01:10:16,452 Current Learning Rate: 0.0005548672 +2025-03-28 01:10:16,453 Train Loss: 0.0003163, Val Loss: 0.0003986 +2025-03-28 01:10:16,453 Epoch 1908/2000 +2025-03-28 01:12:50,728 Current Learning Rate: 0.0005626666 +2025-03-28 01:12:50,729 Train Loss: 0.0003155, Val Loss: 0.0003977 +2025-03-28 01:12:50,729 Epoch 1909/2000 +2025-03-28 01:15:25,331 Current Learning Rate: 0.0005704506 +2025-03-28 01:15:25,331 Train Loss: 0.0003135, Val Loss: 0.0003964 +2025-03-28 01:15:25,332 Epoch 1910/2000 +2025-03-28 01:17:58,931 Current Learning Rate: 0.0005782172 +2025-03-28 01:17:58,931 Train Loss: 0.0003134, Val Loss: 0.0003974 +2025-03-28 01:17:58,932 Epoch 1911/2000 +2025-03-28 01:20:33,355 Current Learning Rate: 0.0005859646 +2025-03-28 01:20:33,355 Train Loss: 0.0003143, Val Loss: 0.0003988 +2025-03-28 01:20:33,355 Epoch 1912/2000 +2025-03-28 01:23:07,289 Current Learning Rate: 0.0005936907 +2025-03-28 01:23:07,290 Train Loss: 0.0003156, Val Loss: 0.0003984 +2025-03-28 01:23:07,290 Epoch 1913/2000 +2025-03-28 01:25:40,289 Current Learning Rate: 0.0006013936 +2025-03-28 01:25:40,289 Train Loss: 0.0003166, Val Loss: 0.0003994 +2025-03-28 01:25:40,290 Epoch 1914/2000 +2025-03-28 01:28:13,175 Current Learning Rate: 0.0006090716 +2025-03-28 01:28:13,175 Train Loss: 0.0003173, Val Loss: 0.0003989 +2025-03-28 01:28:13,175 Epoch 1915/2000 +2025-03-28 01:30:46,908 Current Learning Rate: 0.0006167227 +2025-03-28 01:30:46,908 Train Loss: 0.0003180, Val Loss: 0.0004000 +2025-03-28 01:30:46,908 Epoch 1916/2000 +2025-03-28 01:33:20,954 Current Learning Rate: 0.0006243449 +2025-03-28 01:33:20,954 Train Loss: 0.0003185, Val Loss: 0.0004020 +2025-03-28 01:33:20,954 Epoch 1917/2000 +2025-03-28 01:35:55,819 Current Learning Rate: 0.0006319365 +2025-03-28 01:35:55,819 Train Loss: 0.0003191, Val Loss: 0.0004019 +2025-03-28 01:35:55,819 Epoch 1918/2000 +2025-03-28 01:38:31,365 Current Learning Rate: 0.0006394956 +2025-03-28 01:38:31,365 Train Loss: 0.0003196, Val Loss: 0.0004008 +2025-03-28 01:38:31,366 Epoch 1919/2000 +2025-03-28 01:41:05,440 Current Learning Rate: 0.0006470202 +2025-03-28 01:41:05,440 Train Loss: 0.0003190, Val Loss: 0.0004003 +2025-03-28 01:41:05,440 Epoch 1920/2000 +2025-03-28 01:43:40,734 Current Learning Rate: 0.0006545085 +2025-03-28 01:43:40,735 Train Loss: 0.0003170, Val Loss: 0.0004002 +2025-03-28 01:43:40,735 Epoch 1921/2000 +2025-03-28 01:46:14,476 Current Learning Rate: 0.0006619587 +2025-03-28 01:46:14,477 Train Loss: 0.0003168, Val Loss: 0.0004009 +2025-03-28 01:46:14,477 Epoch 1922/2000 +2025-03-28 01:48:47,653 Current Learning Rate: 0.0006693690 +2025-03-28 01:48:47,654 Train Loss: 0.0003177, Val Loss: 0.0004039 +2025-03-28 01:48:47,655 Epoch 1923/2000 +2025-03-28 01:51:21,016 Current Learning Rate: 0.0006767374 +2025-03-28 01:51:21,017 Train Loss: 0.0003191, Val Loss: 0.0004039 +2025-03-28 01:51:21,017 Epoch 1924/2000 +2025-03-28 01:53:55,187 Current Learning Rate: 0.0006840623 +2025-03-28 01:53:55,188 Train Loss: 0.0003201, Val Loss: 0.0004031 +2025-03-28 01:53:55,188 Epoch 1925/2000 +2025-03-28 01:56:29,421 Current Learning Rate: 0.0006913417 +2025-03-28 01:56:29,421 Train Loss: 0.0003211, Val Loss: 0.0004041 +2025-03-28 01:56:29,422 Epoch 1926/2000 +2025-03-28 01:59:04,002 Current Learning Rate: 0.0006985739 +2025-03-28 01:59:04,003 Train Loss: 0.0003215, Val Loss: 0.0004034 +2025-03-28 01:59:04,003 Epoch 1927/2000 +2025-03-28 02:01:38,335 Current Learning Rate: 0.0007057572 +2025-03-28 02:01:38,335 Train Loss: 0.0003220, Val Loss: 0.0004056 +2025-03-28 02:01:38,335 Epoch 1928/2000 +2025-03-28 02:04:13,141 Current Learning Rate: 0.0007128896 +2025-03-28 02:04:13,142 Train Loss: 0.0003229, Val Loss: 0.0004052 +2025-03-28 02:04:13,142 Epoch 1929/2000 +2025-03-28 02:06:48,355 Current Learning Rate: 0.0007199696 +2025-03-28 02:06:48,356 Train Loss: 0.0003234, Val Loss: 0.0004077 +2025-03-28 02:06:48,356 Epoch 1930/2000 +2025-03-28 02:09:23,407 Current Learning Rate: 0.0007269952 +2025-03-28 02:09:23,408 Train Loss: 0.0003235, Val Loss: 0.0004049 +2025-03-28 02:09:23,408 Epoch 1931/2000 +2025-03-28 02:11:56,741 Current Learning Rate: 0.0007339649 +2025-03-28 02:11:56,742 Train Loss: 0.0003231, Val Loss: 0.0004046 +2025-03-28 02:11:56,742 Epoch 1932/2000 +2025-03-28 02:14:32,205 Current Learning Rate: 0.0007408768 +2025-03-28 02:14:32,206 Train Loss: 0.0003211, Val Loss: 0.0004043 +2025-03-28 02:14:32,206 Epoch 1933/2000 +2025-03-28 02:17:06,679 Current Learning Rate: 0.0007477293 +2025-03-28 02:17:06,679 Train Loss: 0.0003203, Val Loss: 0.0004043 +2025-03-28 02:17:06,680 Epoch 1934/2000 +2025-03-28 02:19:40,390 Current Learning Rate: 0.0007545207 +2025-03-28 02:19:40,390 Train Loss: 0.0003218, Val Loss: 0.0004056 +2025-03-28 02:19:40,390 Epoch 1935/2000 +2025-03-28 02:22:14,730 Current Learning Rate: 0.0007612493 +2025-03-28 02:22:14,731 Train Loss: 0.0003229, Val Loss: 0.0004067 +2025-03-28 02:22:14,731 Epoch 1936/2000 +2025-03-28 02:24:49,042 Current Learning Rate: 0.0007679134 +2025-03-28 02:24:49,042 Train Loss: 0.0003240, Val Loss: 0.0004071 +2025-03-28 02:24:49,042 Epoch 1937/2000 +2025-03-28 02:27:23,953 Current Learning Rate: 0.0007745114 +2025-03-28 02:27:23,953 Train Loss: 0.0003254, Val Loss: 0.0004083 +2025-03-28 02:27:23,954 Epoch 1938/2000 +2025-03-28 02:29:57,648 Current Learning Rate: 0.0007810417 +2025-03-28 02:29:57,649 Train Loss: 0.0003251, Val Loss: 0.0004094 +2025-03-28 02:29:57,649 Epoch 1939/2000 +2025-03-28 02:32:31,444 Current Learning Rate: 0.0007875026 +2025-03-28 02:32:31,445 Train Loss: 0.0003259, Val Loss: 0.0004085 +2025-03-28 02:32:31,445 Epoch 1940/2000 +2025-03-28 02:35:05,123 Current Learning Rate: 0.0007938926 +2025-03-28 02:35:05,123 Train Loss: 0.0003272, Val Loss: 0.0004108 +2025-03-28 02:35:05,123 Epoch 1941/2000 +2025-03-28 02:37:40,300 Current Learning Rate: 0.0008002101 +2025-03-28 02:37:40,300 Train Loss: 0.0003271, Val Loss: 0.0004095 +2025-03-28 02:37:40,300 Epoch 1942/2000 +2025-03-28 02:40:12,433 Current Learning Rate: 0.0008064535 +2025-03-28 02:40:12,434 Train Loss: 0.0003274, Val Loss: 0.0004106 +2025-03-28 02:40:12,434 Epoch 1943/2000 +2025-03-28 02:42:45,721 Current Learning Rate: 0.0008126213 +2025-03-28 02:42:45,722 Train Loss: 0.0003279, Val Loss: 0.0004088 +2025-03-28 02:42:45,722 Epoch 1944/2000 +2025-03-28 02:45:18,413 Current Learning Rate: 0.0008187120 +2025-03-28 02:45:18,413 Train Loss: 0.0003275, Val Loss: 0.0004102 +2025-03-28 02:45:18,413 Epoch 1945/2000 +2025-03-28 02:47:53,221 Current Learning Rate: 0.0008247240 +2025-03-28 02:47:53,222 Train Loss: 0.0003245, Val Loss: 0.0004084 +2025-03-28 02:47:53,222 Epoch 1946/2000 +2025-03-28 02:50:27,220 Current Learning Rate: 0.0008306559 +2025-03-28 02:50:27,221 Train Loss: 0.0003246, Val Loss: 0.0004112 +2025-03-28 02:50:27,221 Epoch 1947/2000 +2025-03-28 02:53:00,666 Current Learning Rate: 0.0008365063 +2025-03-28 02:53:00,666 Train Loss: 0.0003264, Val Loss: 0.0004104 +2025-03-28 02:53:00,667 Epoch 1948/2000 +2025-03-28 02:55:34,327 Current Learning Rate: 0.0008422736 +2025-03-28 02:55:34,327 Train Loss: 0.0003281, Val Loss: 0.0004115 +2025-03-28 02:55:34,328 Epoch 1949/2000 +2025-03-28 02:58:08,507 Current Learning Rate: 0.0008479564 +2025-03-28 02:58:08,507 Train Loss: 0.0003283, Val Loss: 0.0004117 +2025-03-28 02:58:08,508 Epoch 1950/2000 +2025-03-28 03:00:43,182 Current Learning Rate: 0.0008535534 +2025-03-28 03:00:43,183 Train Loss: 0.0003294, Val Loss: 0.0004125 +2025-03-28 03:00:43,183 Epoch 1951/2000 +2025-03-28 03:03:17,758 Current Learning Rate: 0.0008590631 +2025-03-28 03:03:17,758 Train Loss: 0.0003299, Val Loss: 0.0004119 +2025-03-28 03:03:17,759 Epoch 1952/2000 +2025-03-28 03:05:51,965 Current Learning Rate: 0.0008644843 +2025-03-28 03:05:51,965 Train Loss: 0.0003278, Val Loss: 0.0004096 +2025-03-28 03:05:51,966 Epoch 1953/2000 +2025-03-28 03:08:25,376 Current Learning Rate: 0.0008698155 +2025-03-28 03:08:25,377 Train Loss: 0.0003264, Val Loss: 0.0004122 +2025-03-28 03:08:25,377 Epoch 1954/2000 +2025-03-28 03:10:58,083 Current Learning Rate: 0.0008750555 +2025-03-28 03:10:58,084 Train Loss: 0.0003278, Val Loss: 0.0004129 +2025-03-28 03:10:58,084 Epoch 1955/2000 +2025-03-28 03:13:31,441 Current Learning Rate: 0.0008802030 +2025-03-28 03:13:31,442 Train Loss: 0.0003297, Val Loss: 0.0004141 +2025-03-28 03:13:31,442 Epoch 1956/2000 +2025-03-28 03:16:04,380 Current Learning Rate: 0.0008852566 +2025-03-28 03:16:04,380 Train Loss: 0.0003310, Val Loss: 0.0004135 +2025-03-28 03:16:04,381 Epoch 1957/2000 +2025-03-28 03:18:39,113 Current Learning Rate: 0.0008902152 +2025-03-28 03:18:39,114 Train Loss: 0.0003321, Val Loss: 0.0004136 +2025-03-28 03:18:39,114 Epoch 1958/2000 +2025-03-28 03:21:13,778 Current Learning Rate: 0.0008950775 +2025-03-28 03:21:13,778 Train Loss: 0.0003317, Val Loss: 0.0004139 +2025-03-28 03:21:13,779 Epoch 1959/2000 +2025-03-28 03:23:47,216 Current Learning Rate: 0.0008998423 +2025-03-28 03:23:47,216 Train Loss: 0.0003306, Val Loss: 0.0004140 +2025-03-28 03:23:47,217 Epoch 1960/2000 +2025-03-28 03:26:21,032 Current Learning Rate: 0.0009045085 +2025-03-28 03:26:21,033 Train Loss: 0.0003320, Val Loss: 0.0004153 +2025-03-28 03:26:21,033 Epoch 1961/2000 +2025-03-28 03:28:54,308 Current Learning Rate: 0.0009090749 +2025-03-28 03:28:54,309 Train Loss: 0.0003350, Val Loss: 0.0004318 +2025-03-28 03:28:54,309 Epoch 1962/2000 +2025-03-28 03:31:28,946 Current Learning Rate: 0.0009135403 +2025-03-28 03:31:28,946 Train Loss: 0.0003370, Val Loss: 0.0004165 +2025-03-28 03:31:28,946 Epoch 1963/2000 +2025-03-28 03:34:03,760 Current Learning Rate: 0.0009179037 +2025-03-28 03:34:03,760 Train Loss: 0.0003354, Val Loss: 0.0004216 +2025-03-28 03:34:03,760 Epoch 1964/2000 +2025-03-28 03:36:37,820 Current Learning Rate: 0.0009221640 +2025-03-28 03:36:37,820 Train Loss: 0.0003347, Val Loss: 0.0004143 +2025-03-28 03:36:37,820 Epoch 1965/2000 +2025-03-28 03:39:11,677 Current Learning Rate: 0.0009263201 +2025-03-28 03:39:11,677 Train Loss: 0.0003317, Val Loss: 0.0004126 +2025-03-28 03:39:11,677 Epoch 1966/2000 +2025-03-28 03:41:44,125 Current Learning Rate: 0.0009303710 +2025-03-28 03:41:44,126 Train Loss: 0.0003283, Val Loss: 0.0004136 +2025-03-28 03:41:44,126 Epoch 1967/2000 +2025-03-28 03:44:16,689 Current Learning Rate: 0.0009343158 +2025-03-28 03:44:16,689 Train Loss: 0.0003302, Val Loss: 0.0004131 +2025-03-28 03:44:16,689 Epoch 1968/2000 +2025-03-28 03:46:50,474 Current Learning Rate: 0.0009381533 +2025-03-28 03:46:50,474 Train Loss: 0.0003320, Val Loss: 0.0004146 +2025-03-28 03:46:50,475 Epoch 1969/2000 +2025-03-28 03:49:24,826 Current Learning Rate: 0.0009418828 +2025-03-28 03:49:24,827 Train Loss: 0.0003333, Val Loss: 0.0004154 +2025-03-28 03:49:24,827 Epoch 1970/2000 +2025-03-28 03:51:59,881 Current Learning Rate: 0.0009455033 +2025-03-28 03:51:59,882 Train Loss: 0.0003339, Val Loss: 0.0004208 +2025-03-28 03:51:59,882 Epoch 1971/2000 +2025-03-28 03:54:33,933 Current Learning Rate: 0.0009490138 +2025-03-28 03:54:33,934 Train Loss: 0.0003344, Val Loss: 0.0004168 +2025-03-28 03:54:33,934 Epoch 1972/2000 +2025-03-28 03:57:08,393 Current Learning Rate: 0.0009524135 +2025-03-28 03:57:08,394 Train Loss: 0.0003362, Val Loss: 0.0004176 +2025-03-28 03:57:08,394 Epoch 1973/2000 +2025-03-28 03:59:41,660 Current Learning Rate: 0.0009557016 +2025-03-28 03:59:41,661 Train Loss: 0.0003351, Val Loss: 0.0004190 +2025-03-28 03:59:41,661 Epoch 1974/2000 +2025-03-28 04:02:15,637 Current Learning Rate: 0.0009588773 +2025-03-28 04:02:15,637 Train Loss: 0.0003303, Val Loss: 0.0004131 +2025-03-28 04:02:15,638 Epoch 1975/2000 +2025-03-28 04:04:49,207 Current Learning Rate: 0.0009619398 +2025-03-28 04:04:49,207 Train Loss: 0.0003309, Val Loss: 0.0004159 +2025-03-28 04:04:49,207 Epoch 1976/2000 +2025-03-28 04:07:24,059 Current Learning Rate: 0.0009648882 +2025-03-28 04:07:24,060 Train Loss: 0.0003336, Val Loss: 0.0004163 +2025-03-28 04:07:24,060 Epoch 1977/2000 +2025-03-28 04:09:58,140 Current Learning Rate: 0.0009677220 +2025-03-28 04:09:58,141 Train Loss: 0.0003347, Val Loss: 0.0004166 +2025-03-28 04:09:58,141 Epoch 1978/2000 +2025-03-28 04:12:32,053 Current Learning Rate: 0.0009704404 +2025-03-28 04:12:32,054 Train Loss: 0.0003353, Val Loss: 0.0004193 +2025-03-28 04:12:32,054 Epoch 1979/2000 +2025-03-28 04:15:06,243 Current Learning Rate: 0.0009730427 +2025-03-28 04:15:06,244 Train Loss: 0.0003347, Val Loss: 0.0004176 +2025-03-28 04:15:06,244 Epoch 1980/2000 +2025-03-28 04:17:41,069 Current Learning Rate: 0.0009755283 +2025-03-28 04:17:41,070 Train Loss: 0.0003363, Val Loss: 0.0004184 +2025-03-28 04:17:41,070 Epoch 1981/2000 +2025-03-28 04:20:14,823 Current Learning Rate: 0.0009778965 +2025-03-28 04:20:14,823 Train Loss: 0.0003382, Val Loss: 0.0004220 +2025-03-28 04:20:14,823 Epoch 1982/2000 +2025-03-28 04:22:49,419 Current Learning Rate: 0.0009801468 +2025-03-28 04:22:49,419 Train Loss: 0.0003401, Val Loss: 0.0004191 +2025-03-28 04:22:49,420 Epoch 1983/2000 +2025-03-28 04:25:23,597 Current Learning Rate: 0.0009822787 +2025-03-28 04:25:23,598 Train Loss: 0.0003391, Val Loss: 0.0004203 +2025-03-28 04:25:23,598 Epoch 1984/2000 +2025-03-28 04:27:58,338 Current Learning Rate: 0.0009842916 +2025-03-28 04:27:58,339 Train Loss: 0.0003359, Val Loss: 0.0004177 +2025-03-28 04:27:58,339 Epoch 1985/2000 +2025-03-28 04:30:33,238 Current Learning Rate: 0.0009861850 +2025-03-28 04:30:33,238 Train Loss: 0.0003308, Val Loss: 0.0004142 +2025-03-28 04:30:33,238 Epoch 1986/2000 +2025-03-28 04:33:07,451 Current Learning Rate: 0.0009879584 +2025-03-28 04:33:07,452 Train Loss: 0.0003323, Val Loss: 0.0004175 +2025-03-28 04:33:07,452 Epoch 1987/2000 +2025-03-28 04:35:41,117 Current Learning Rate: 0.0009896114 +2025-03-28 04:35:41,118 Train Loss: 0.0003341, Val Loss: 0.0004188 +2025-03-28 04:35:41,118 Epoch 1988/2000 +2025-03-28 04:38:15,438 Current Learning Rate: 0.0009911436 +2025-03-28 04:38:15,438 Train Loss: 0.0003355, Val Loss: 0.0004203 +2025-03-28 04:38:15,438 Epoch 1989/2000 +2025-03-28 04:40:49,838 Current Learning Rate: 0.0009925547 +2025-03-28 04:40:49,839 Train Loss: 0.0003370, Val Loss: 0.0004190 +2025-03-28 04:40:49,839 Epoch 1990/2000 +2025-03-28 04:43:23,982 Current Learning Rate: 0.0009938442 +2025-03-28 04:43:23,983 Train Loss: 0.0003373, Val Loss: 0.0004176 +2025-03-28 04:43:23,983 Epoch 1991/2000 +2025-03-28 04:45:58,133 Current Learning Rate: 0.0009950118 +2025-03-28 04:45:58,133 Train Loss: 0.0003367, Val Loss: 0.0004236 +2025-03-28 04:45:58,134 Epoch 1992/2000 +2025-03-28 04:48:33,532 Current Learning Rate: 0.0009960574 +2025-03-28 04:48:33,533 Train Loss: 0.0003378, Val Loss: 0.0004207 +2025-03-28 04:48:33,533 Epoch 1993/2000 +2025-03-28 04:51:08,805 Current Learning Rate: 0.0009969805 +2025-03-28 04:51:08,806 Train Loss: 0.0003378, Val Loss: 0.0004208 +2025-03-28 04:51:08,806 Epoch 1994/2000 +2025-03-28 04:53:41,103 Current Learning Rate: 0.0009977810 +2025-03-28 04:53:41,103 Train Loss: 0.0003387, Val Loss: 0.0004228 +2025-03-28 04:53:41,104 Epoch 1995/2000 +2025-03-28 04:56:15,262 Current Learning Rate: 0.0009984587 +2025-03-28 04:56:15,263 Train Loss: 0.0003386, Val Loss: 0.0004220 +2025-03-28 04:56:15,263 Epoch 1996/2000 +2025-03-28 04:58:49,324 Current Learning Rate: 0.0009990134 +2025-03-28 04:58:49,324 Train Loss: 0.0003378, Val Loss: 0.0004199 +2025-03-28 04:58:49,325 Epoch 1997/2000 +2025-03-28 05:01:23,921 Current Learning Rate: 0.0009994449 +2025-03-28 05:01:23,921 Train Loss: 0.0003333, Val Loss: 0.0004164 +2025-03-28 05:01:23,922 Epoch 1998/2000 +2025-03-28 05:03:58,342 Current Learning Rate: 0.0009997533 +2025-03-28 05:03:58,342 Train Loss: 0.0003324, Val Loss: 0.0004177 +2025-03-28 05:03:58,343 Epoch 1999/2000 +2025-03-28 05:06:33,520 Current Learning Rate: 0.0009999383 +2025-03-28 05:06:33,520 Train Loss: 0.0003343, Val Loss: 0.0004183 +2025-03-28 05:06:33,520 Epoch 2000/2000 +2025-03-28 05:09:07,278 Current Learning Rate: 0.0010000000 +2025-03-28 05:09:07,279 Train Loss: 0.0003352, Val Loss: 0.0004193 +2025-03-28 05:09:10,833 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_K_uv_20250218_exp1_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_K_uv_20250218_exp1_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..3e0d2bfb946ebc6e30a3503105a63c899017e062 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_K_uv_20250218_exp1_training_log.log @@ -0,0 +1,78 @@ +2025-02-18 11:17:53,278 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:17:53,439 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:17:53,586 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:17:53,709 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:17:53,741 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:17:53,802 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:17:53,813 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:17:53,817 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:20:47,228 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:20:47,325 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:20:47,346 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:20:47,379 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:20:47,407 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:20:47,433 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:20:47,447 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:20:47,449 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:21:21,680 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:21:21,722 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:21:21,801 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:21:21,807 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:21:21,812 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:21:22,527 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:21:22,564 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:21:22,571 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:21:46,526 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:21:46,547 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:21:46,563 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:21:47,711 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:21:47,742 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:21:47,797 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:21:47,803 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:21:47,805 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:22:11,648 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:22:11,905 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:22:12,036 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:22:12,080 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:22:12,109 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:22:12,171 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:22:12,182 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:22:12,194 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:34:43,514 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:34:43,522 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:34:43,546 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:34:43,607 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:34:43,618 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:34:43,630 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:34:43,663 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:34:43,676 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:35:40,272 Epoch 1/2000 +2025-02-18 11:35:44,883 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,883 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,888 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:35:44,925 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:36:23,345 Current Learning Rate: 0.0099993832 +2025-02-18 11:36:24,693 Train Loss: 1.3628420, Val Loss: 0.1698331 +2025-02-18 11:36:24,694 Epoch 2/2000 +2025-02-18 11:37:06,152 Current Learning Rate: 0.0099975328 +2025-02-18 11:37:07,935 Train Loss: 0.0687663, Val Loss: 0.0357662 +2025-02-18 11:37:07,935 Epoch 3/2000 +2025-02-18 11:37:49,856 Current Learning Rate: 0.0099944494 +2025-02-18 11:37:51,612 Train Loss: 0.0274687, Val Loss: 0.0195614 +2025-02-18 11:37:51,612 Epoch 4/2000 +2025-02-18 11:38:32,803 Current Learning Rate: 0.0099901336 +2025-02-18 11:38:34,030 Train Loss: 0.0202074, Val Loss: 0.0180349 +2025-02-18 11:38:34,031 Epoch 5/2000 +2025-02-18 11:39:15,841 Current Learning Rate: 0.0099845867 +2025-02-18 11:39:17,499 Train Loss: 0.0193941, Val Loss: 0.0177117 +2025-02-18 11:39:17,505 Epoch 6/2000 +2025-02-18 11:39:59,506 Current Learning Rate: 0.0099778098 +2025-02-18 11:40:01,383 Train Loss: 0.0191047, Val Loss: 0.0175423 +2025-02-18 11:40:01,384 Epoch 7/2000 +2025-02-18 11:40:43,129 Current Learning Rate: 0.0099698048 +2025-02-18 11:40:44,922 Train Loss: 0.0189317, Val Loss: 0.0174137 +2025-02-18 11:40:44,922 Epoch 8/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_K_uv_20250218_exp2_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_K_uv_20250218_exp2_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..03d83973669c4de993721db567e4dc11c6cd6028 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_K_uv_20250218_exp2_training_log.log @@ -0,0 +1,649 @@ +2025-02-18 11:42:14,518 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:42:14,526 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:42:14,568 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:42:14,637 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:42:14,686 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:42:14,702 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:42:14,714 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:42:14,718 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:42:52,388 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 11:42:52,451 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 11:42:52,545 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 11:42:52,578 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 11:42:52,589 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 11:42:52,607 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 11:42:52,621 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 11:42:52,629 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 11:43:15,702 Epoch 1/2000 +2025-02-18 11:43:20,492 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,492 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:20,493 Reducer buckets have been rebuilt in this iteration. +2025-02-18 11:43:59,250 Current Learning Rate: 0.0099993832 +2025-02-18 11:44:00,759 Train Loss: 1.3628420, Val Loss: 0.1698331 +2025-02-18 11:44:00,762 Epoch 2/2000 +2025-02-18 11:44:42,481 Current Learning Rate: 0.0099975328 +2025-02-18 11:44:43,991 Train Loss: 0.0687663, Val Loss: 0.0357662 +2025-02-18 11:44:43,991 Epoch 3/2000 +2025-02-18 11:45:26,064 Current Learning Rate: 0.0099944494 +2025-02-18 11:45:27,993 Train Loss: 0.0274687, Val Loss: 0.0195614 +2025-02-18 11:45:27,994 Epoch 4/2000 +2025-02-18 11:46:09,808 Current Learning Rate: 0.0099901336 +2025-02-18 11:46:11,881 Train Loss: 0.0202074, Val Loss: 0.0180349 +2025-02-18 11:46:11,881 Epoch 5/2000 +2025-02-18 11:46:53,799 Current Learning Rate: 0.0099845867 +2025-02-18 11:46:55,553 Train Loss: 0.0193941, Val Loss: 0.0177117 +2025-02-18 11:46:55,557 Epoch 6/2000 +2025-02-18 11:47:37,638 Current Learning Rate: 0.0099778098 +2025-02-18 11:47:39,763 Train Loss: 0.0191047, Val Loss: 0.0175423 +2025-02-18 11:47:39,763 Epoch 7/2000 +2025-02-18 11:48:21,731 Current Learning Rate: 0.0099698048 +2025-02-18 11:48:23,718 Train Loss: 0.0189317, Val Loss: 0.0174137 +2025-02-18 11:48:23,719 Epoch 8/2000 +2025-02-18 11:49:05,878 Current Learning Rate: 0.0099605735 +2025-02-18 11:49:07,409 Train Loss: 0.0187991, Val Loss: 0.0173049 +2025-02-18 11:49:07,410 Epoch 9/2000 +2025-02-18 11:49:49,527 Current Learning Rate: 0.0099501183 +2025-02-18 11:49:51,296 Train Loss: 0.0186761, Val Loss: 0.0172068 +2025-02-18 11:49:51,297 Epoch 10/2000 +2025-02-18 11:50:33,097 Current Learning Rate: 0.0099384417 +2025-02-18 11:50:34,968 Train Loss: 0.0185759, Val Loss: 0.0171160 +2025-02-18 11:50:34,968 Epoch 11/2000 +2025-02-18 11:51:16,768 Current Learning Rate: 0.0099255466 +2025-02-18 11:51:18,470 Train Loss: 0.0184690, Val Loss: 0.0170284 +2025-02-18 11:51:18,471 Epoch 12/2000 +2025-02-18 11:52:00,007 Current Learning Rate: 0.0099114363 +2025-02-18 11:52:01,345 Train Loss: 0.0183741, Val Loss: 0.0169453 +2025-02-18 11:52:01,346 Epoch 13/2000 +2025-02-18 11:52:43,115 Current Learning Rate: 0.0098961141 +2025-02-18 11:52:44,937 Train Loss: 0.0182787, Val Loss: 0.0168658 +2025-02-18 11:52:44,937 Epoch 14/2000 +2025-02-18 11:53:26,758 Current Learning Rate: 0.0098795838 +2025-02-18 11:53:28,848 Train Loss: 0.0181902, Val Loss: 0.0167882 +2025-02-18 11:53:28,849 Epoch 15/2000 +2025-02-18 11:54:09,794 Current Learning Rate: 0.0098618496 +2025-02-18 11:54:11,095 Train Loss: 0.0181102, Val Loss: 0.0167148 +2025-02-18 11:54:11,096 Epoch 16/2000 +2025-02-18 11:54:52,819 Current Learning Rate: 0.0098429158 +2025-02-18 11:54:54,236 Train Loss: 0.0180223, Val Loss: 0.0166435 +2025-02-18 11:54:54,237 Epoch 17/2000 +2025-02-18 11:55:36,300 Current Learning Rate: 0.0098227871 +2025-02-18 11:55:38,214 Train Loss: 0.0179441, Val Loss: 0.0165739 +2025-02-18 11:55:38,214 Epoch 18/2000 +2025-02-18 11:56:20,053 Current Learning Rate: 0.0098014684 +2025-02-18 11:56:22,204 Train Loss: 0.0178550, Val Loss: 0.0164465 +2025-02-18 11:56:22,204 Epoch 19/2000 +2025-02-18 11:57:03,548 Current Learning Rate: 0.0097789651 +2025-02-18 11:57:05,324 Train Loss: 0.0176387, Val Loss: 0.0162635 +2025-02-18 11:57:05,324 Epoch 20/2000 +2025-02-18 11:57:46,483 Current Learning Rate: 0.0097552826 +2025-02-18 11:57:48,315 Train Loss: 0.0174251, Val Loss: 0.0161026 +2025-02-18 11:57:48,316 Epoch 21/2000 +2025-02-18 11:58:29,538 Current Learning Rate: 0.0097304268 +2025-02-18 11:58:31,217 Train Loss: 0.0172205, Val Loss: 0.0159079 +2025-02-18 11:58:31,218 Epoch 22/2000 +2025-02-18 11:59:12,500 Current Learning Rate: 0.0097044038 +2025-02-18 11:59:13,681 Train Loss: 0.0169694, Val Loss: 0.0156002 +2025-02-18 11:59:13,683 Epoch 23/2000 +2025-02-18 11:59:55,418 Current Learning Rate: 0.0096772202 +2025-02-18 11:59:56,993 Train Loss: 0.0165015, Val Loss: 0.0151188 +2025-02-18 11:59:56,993 Epoch 24/2000 +2025-02-18 12:00:38,790 Current Learning Rate: 0.0096488824 +2025-02-18 12:00:39,918 Train Loss: 0.0158210, Val Loss: 0.0143949 +2025-02-18 12:00:39,919 Epoch 25/2000 +2025-02-18 12:01:22,034 Current Learning Rate: 0.0096193977 +2025-02-18 12:01:23,482 Train Loss: 0.0150978, Val Loss: 0.0137453 +2025-02-18 12:01:23,484 Epoch 26/2000 +2025-02-18 12:02:05,491 Current Learning Rate: 0.0095887731 +2025-02-18 12:02:07,069 Train Loss: 0.0146063, Val Loss: 0.0133372 +2025-02-18 12:02:07,069 Epoch 27/2000 +2025-02-18 12:02:48,291 Current Learning Rate: 0.0095570164 +2025-02-18 12:02:49,392 Train Loss: 0.0141579, Val Loss: 0.0129903 +2025-02-18 12:02:49,392 Epoch 28/2000 +2025-02-18 12:03:31,428 Current Learning Rate: 0.0095241353 +2025-02-18 12:03:33,011 Train Loss: 0.0138481, Val Loss: 0.0127644 +2025-02-18 12:03:33,016 Epoch 29/2000 +2025-02-18 12:04:15,259 Current Learning Rate: 0.0094901379 +2025-02-18 12:04:17,239 Train Loss: 0.0135901, Val Loss: 0.0124894 +2025-02-18 12:04:17,239 Epoch 30/2000 +2025-02-18 12:04:59,403 Current Learning Rate: 0.0094550326 +2025-02-18 12:05:01,238 Train Loss: 0.0133189, Val Loss: 0.0121462 +2025-02-18 12:05:01,239 Epoch 31/2000 +2025-02-18 12:05:43,059 Current Learning Rate: 0.0094188282 +2025-02-18 12:05:44,431 Train Loss: 0.0128272, Val Loss: 0.0116701 +2025-02-18 12:05:44,431 Epoch 32/2000 +2025-02-18 12:06:25,938 Current Learning Rate: 0.0093815334 +2025-02-18 12:06:27,933 Train Loss: 0.0122802, Val Loss: 0.0112547 +2025-02-18 12:06:27,934 Epoch 33/2000 +2025-02-18 12:07:10,039 Current Learning Rate: 0.0093431576 +2025-02-18 12:07:11,798 Train Loss: 0.0117762, Val Loss: 0.0106204 +2025-02-18 12:07:11,799 Epoch 34/2000 +2025-02-18 12:07:53,462 Current Learning Rate: 0.0093037101 +2025-02-18 12:07:55,177 Train Loss: 0.0112313, Val Loss: 0.0101279 +2025-02-18 12:07:55,178 Epoch 35/2000 +2025-02-18 12:08:37,066 Current Learning Rate: 0.0092632008 +2025-02-18 12:08:39,137 Train Loss: 0.0108187, Val Loss: 0.0096379 +2025-02-18 12:08:39,138 Epoch 36/2000 +2025-02-18 12:09:21,129 Current Learning Rate: 0.0092216396 +2025-02-18 12:09:23,012 Train Loss: 0.0102309, Val Loss: 0.0092521 +2025-02-18 12:09:23,012 Epoch 37/2000 +2025-02-18 12:10:04,966 Current Learning Rate: 0.0091790368 +2025-02-18 12:10:06,776 Train Loss: 0.0098315, Val Loss: 0.0088337 +2025-02-18 12:10:06,776 Epoch 38/2000 +2025-02-18 12:10:48,909 Current Learning Rate: 0.0091354029 +2025-02-18 12:10:50,456 Train Loss: 0.0093248, Val Loss: 0.0083890 +2025-02-18 12:10:50,458 Epoch 39/2000 +2025-02-18 12:11:32,518 Current Learning Rate: 0.0090907486 +2025-02-18 12:11:34,189 Train Loss: 0.0087695, Val Loss: 0.0080588 +2025-02-18 12:11:34,189 Epoch 40/2000 +2025-02-18 12:12:16,194 Current Learning Rate: 0.0090450850 +2025-02-18 12:12:17,986 Train Loss: 0.0081769, Val Loss: 0.0073063 +2025-02-18 12:12:17,987 Epoch 41/2000 +2025-02-18 12:12:59,838 Current Learning Rate: 0.0089984233 +2025-02-18 12:13:01,261 Train Loss: 0.0076267, Val Loss: 0.0069956 +2025-02-18 12:13:01,261 Epoch 42/2000 +2025-02-18 12:13:42,414 Current Learning Rate: 0.0089507751 +2025-02-18 12:13:42,415 Train Loss: 0.0076356, Val Loss: 0.0090246 +2025-02-18 12:13:42,415 Epoch 43/2000 +2025-02-18 12:14:25,024 Current Learning Rate: 0.0089021520 +2025-02-18 12:14:27,186 Train Loss: 0.0074178, Val Loss: 0.0064407 +2025-02-18 12:14:27,186 Epoch 44/2000 +2025-02-18 12:15:08,764 Current Learning Rate: 0.0088525662 +2025-02-18 12:15:09,977 Train Loss: 0.0068171, Val Loss: 0.0060696 +2025-02-18 12:15:09,977 Epoch 45/2000 +2025-02-18 12:15:52,174 Current Learning Rate: 0.0088020298 +2025-02-18 12:15:53,426 Train Loss: 0.0066029, Val Loss: 0.0059524 +2025-02-18 12:15:53,426 Epoch 46/2000 +2025-02-18 12:16:35,338 Current Learning Rate: 0.0087505553 +2025-02-18 12:16:37,448 Train Loss: 0.0065815, Val Loss: 0.0057815 +2025-02-18 12:16:37,449 Epoch 47/2000 +2025-02-18 12:17:18,719 Current Learning Rate: 0.0086981555 +2025-02-18 12:17:18,720 Train Loss: 0.0065231, Val Loss: 0.0061614 +2025-02-18 12:17:18,720 Epoch 48/2000 +2025-02-18 12:18:01,487 Current Learning Rate: 0.0086448431 +2025-02-18 12:18:03,649 Train Loss: 0.0062545, Val Loss: 0.0056254 +2025-02-18 12:18:03,650 Epoch 49/2000 +2025-02-18 12:18:45,446 Current Learning Rate: 0.0085906315 +2025-02-18 12:18:47,196 Train Loss: 0.0062356, Val Loss: 0.0054859 +2025-02-18 12:18:47,196 Epoch 50/2000 +2025-02-18 12:19:28,575 Current Learning Rate: 0.0085355339 +2025-02-18 12:19:30,542 Train Loss: 0.0059626, Val Loss: 0.0053286 +2025-02-18 12:19:30,542 Epoch 51/2000 +2025-02-18 12:20:12,724 Current Learning Rate: 0.0084795640 +2025-02-18 12:20:14,653 Train Loss: 0.0060748, Val Loss: 0.0053129 +2025-02-18 12:20:14,653 Epoch 52/2000 +2025-02-18 12:20:56,323 Current Learning Rate: 0.0084227355 +2025-02-18 12:20:58,470 Train Loss: 0.0054856, Val Loss: 0.0050178 +2025-02-18 12:20:58,471 Epoch 53/2000 +2025-02-18 12:21:39,824 Current Learning Rate: 0.0083650626 +2025-02-18 12:21:41,319 Train Loss: 0.0056379, Val Loss: 0.0049119 +2025-02-18 12:21:41,319 Epoch 54/2000 +2025-02-18 12:22:23,175 Current Learning Rate: 0.0083065593 +2025-02-18 12:22:23,176 Train Loss: 0.0057683, Val Loss: 0.0053156 +2025-02-18 12:22:23,176 Epoch 55/2000 +2025-02-18 12:23:05,869 Current Learning Rate: 0.0082472402 +2025-02-18 12:23:07,795 Train Loss: 0.0053413, Val Loss: 0.0048144 +2025-02-18 12:23:07,795 Epoch 56/2000 +2025-02-18 12:23:49,611 Current Learning Rate: 0.0081871199 +2025-02-18 12:23:51,366 Train Loss: 0.0051032, Val Loss: 0.0047444 +2025-02-18 12:23:51,367 Epoch 57/2000 +2025-02-18 12:24:32,519 Current Learning Rate: 0.0081262133 +2025-02-18 12:24:34,487 Train Loss: 0.0049701, Val Loss: 0.0045479 +2025-02-18 12:24:34,488 Epoch 58/2000 +2025-02-18 12:25:16,503 Current Learning Rate: 0.0080645353 +2025-02-18 12:25:16,504 Train Loss: 0.0047992, Val Loss: 0.0050299 +2025-02-18 12:25:16,504 Epoch 59/2000 +2025-02-18 12:25:58,811 Current Learning Rate: 0.0080021011 +2025-02-18 12:26:00,720 Train Loss: 0.0049871, Val Loss: 0.0042919 +2025-02-18 12:26:00,720 Epoch 60/2000 +2025-02-18 12:26:42,328 Current Learning Rate: 0.0079389263 +2025-02-18 12:26:42,329 Train Loss: 0.0046179, Val Loss: 0.0044405 +2025-02-18 12:26:42,329 Epoch 61/2000 +2025-02-18 12:27:25,127 Current Learning Rate: 0.0078750263 +2025-02-18 12:27:25,128 Train Loss: 0.0049569, Val Loss: 0.0044025 +2025-02-18 12:27:25,128 Epoch 62/2000 +2025-02-18 12:28:07,952 Current Learning Rate: 0.0078104169 +2025-02-18 12:28:07,952 Train Loss: 0.0047013, Val Loss: 0.0046409 +2025-02-18 12:28:07,953 Epoch 63/2000 +2025-02-18 12:28:50,796 Current Learning Rate: 0.0077451141 +2025-02-18 12:28:52,687 Train Loss: 0.0044193, Val Loss: 0.0039515 +2025-02-18 12:28:52,687 Epoch 64/2000 +2025-02-18 12:29:33,860 Current Learning Rate: 0.0076791340 +2025-02-18 12:29:33,861 Train Loss: 0.0046267, Val Loss: 0.0039819 +2025-02-18 12:29:33,861 Epoch 65/2000 +2025-02-18 12:30:16,348 Current Learning Rate: 0.0076124928 +2025-02-18 12:30:17,908 Train Loss: 0.0042735, Val Loss: 0.0039007 +2025-02-18 12:30:17,909 Epoch 66/2000 +2025-02-18 12:30:59,781 Current Learning Rate: 0.0075452071 +2025-02-18 12:31:01,427 Train Loss: 0.0040702, Val Loss: 0.0036998 +2025-02-18 12:31:01,428 Epoch 67/2000 +2025-02-18 12:31:42,939 Current Learning Rate: 0.0074772933 +2025-02-18 12:31:42,940 Train Loss: 0.0041402, Val Loss: 0.0043896 +2025-02-18 12:31:42,940 Epoch 68/2000 +2025-02-18 12:32:25,289 Current Learning Rate: 0.0074087684 +2025-02-18 12:32:25,289 Train Loss: 0.0040681, Val Loss: 0.0037126 +2025-02-18 12:32:25,290 Epoch 69/2000 +2025-02-18 12:33:07,948 Current Learning Rate: 0.0073396491 +2025-02-18 12:33:09,164 Train Loss: 0.0041826, Val Loss: 0.0035981 +2025-02-18 12:33:09,165 Epoch 70/2000 +2025-02-18 12:33:51,447 Current Learning Rate: 0.0072699525 +2025-02-18 12:33:53,320 Train Loss: 0.0039610, Val Loss: 0.0035859 +2025-02-18 12:33:53,321 Epoch 71/2000 +2025-02-18 12:34:34,910 Current Learning Rate: 0.0071996958 +2025-02-18 12:34:34,911 Train Loss: 0.0040461, Val Loss: 0.0038279 +2025-02-18 12:34:34,911 Epoch 72/2000 +2025-02-18 12:35:17,593 Current Learning Rate: 0.0071288965 +2025-02-18 12:35:18,958 Train Loss: 0.0038196, Val Loss: 0.0034883 +2025-02-18 12:35:18,958 Epoch 73/2000 +2025-02-18 12:36:00,463 Current Learning Rate: 0.0070575718 +2025-02-18 12:36:00,464 Train Loss: 0.0037394, Val Loss: 0.0035490 +2025-02-18 12:36:00,465 Epoch 74/2000 +2025-02-18 12:36:43,416 Current Learning Rate: 0.0069857395 +2025-02-18 12:36:43,416 Train Loss: 0.0039330, Val Loss: 0.0036724 +2025-02-18 12:36:43,417 Epoch 75/2000 +2025-02-18 12:37:25,961 Current Learning Rate: 0.0069134172 +2025-02-18 12:37:25,962 Train Loss: 0.0036523, Val Loss: 0.0037484 +2025-02-18 12:37:25,962 Epoch 76/2000 +2025-02-18 12:38:08,559 Current Learning Rate: 0.0068406228 +2025-02-18 12:38:10,194 Train Loss: 0.0038443, Val Loss: 0.0034668 +2025-02-18 12:38:10,195 Epoch 77/2000 +2025-02-18 12:38:52,348 Current Learning Rate: 0.0067673742 +2025-02-18 12:38:54,215 Train Loss: 0.0038872, Val Loss: 0.0033315 +2025-02-18 12:38:54,215 Epoch 78/2000 +2025-02-18 12:39:36,357 Current Learning Rate: 0.0066936896 +2025-02-18 12:39:38,490 Train Loss: 0.0038132, Val Loss: 0.0032982 +2025-02-18 12:39:38,490 Epoch 79/2000 +2025-02-18 12:40:20,492 Current Learning Rate: 0.0066195871 +2025-02-18 12:40:22,024 Train Loss: 0.0035646, Val Loss: 0.0031638 +2025-02-18 12:40:22,025 Epoch 80/2000 +2025-02-18 12:41:04,384 Current Learning Rate: 0.0065450850 +2025-02-18 12:41:04,384 Train Loss: 0.0034006, Val Loss: 0.0031946 +2025-02-18 12:41:04,384 Epoch 81/2000 +2025-02-18 12:41:46,486 Current Learning Rate: 0.0064702016 +2025-02-18 12:41:48,643 Train Loss: 0.0033588, Val Loss: 0.0030893 +2025-02-18 12:41:48,644 Epoch 82/2000 +2025-02-18 12:42:29,850 Current Learning Rate: 0.0063949555 +2025-02-18 12:42:30,923 Train Loss: 0.0033879, Val Loss: 0.0030405 +2025-02-18 12:42:30,923 Epoch 83/2000 +2025-02-18 12:43:13,233 Current Learning Rate: 0.0063193652 +2025-02-18 12:43:14,913 Train Loss: 0.0031879, Val Loss: 0.0029718 +2025-02-18 12:43:14,928 Epoch 84/2000 +2025-02-18 12:43:57,251 Current Learning Rate: 0.0062434494 +2025-02-18 12:43:57,252 Train Loss: 0.0032266, Val Loss: 0.0031460 +2025-02-18 12:43:57,252 Epoch 85/2000 +2025-02-18 12:44:39,349 Current Learning Rate: 0.0061672268 +2025-02-18 12:44:41,258 Train Loss: 0.0034299, Val Loss: 0.0029372 +2025-02-18 12:44:41,259 Epoch 86/2000 +2025-02-18 12:45:22,666 Current Learning Rate: 0.0060907162 +2025-02-18 12:45:22,668 Train Loss: 0.0031645, Val Loss: 0.0029600 +2025-02-18 12:45:22,668 Epoch 87/2000 +2025-02-18 12:46:05,573 Current Learning Rate: 0.0060139365 +2025-02-18 12:46:05,574 Train Loss: 0.0034704, Val Loss: 0.0032086 +2025-02-18 12:46:05,574 Epoch 88/2000 +2025-02-18 12:46:48,414 Current Learning Rate: 0.0059369066 +2025-02-18 12:46:49,978 Train Loss: 0.0032061, Val Loss: 0.0029067 +2025-02-18 12:46:49,978 Epoch 89/2000 +2025-02-18 12:47:31,437 Current Learning Rate: 0.0058596455 +2025-02-18 12:47:32,844 Train Loss: 0.0030000, Val Loss: 0.0028417 +2025-02-18 12:47:32,857 Epoch 90/2000 +2025-02-18 12:48:15,087 Current Learning Rate: 0.0057821723 +2025-02-18 12:48:16,952 Train Loss: 0.0029832, Val Loss: 0.0028316 +2025-02-18 12:48:16,953 Epoch 91/2000 +2025-02-18 12:48:59,088 Current Learning Rate: 0.0057045062 +2025-02-18 12:48:59,090 Train Loss: 0.0031305, Val Loss: 0.0028579 +2025-02-18 12:48:59,090 Epoch 92/2000 +2025-02-18 12:49:41,859 Current Learning Rate: 0.0056266662 +2025-02-18 12:49:41,860 Train Loss: 0.0030638, Val Loss: 0.0029005 +2025-02-18 12:49:41,860 Epoch 93/2000 +2025-02-18 12:50:24,399 Current Learning Rate: 0.0055486716 +2025-02-18 12:50:26,199 Train Loss: 0.0031297, Val Loss: 0.0028207 +2025-02-18 12:50:26,200 Epoch 94/2000 +2025-02-18 12:51:08,305 Current Learning Rate: 0.0054705416 +2025-02-18 12:51:08,306 Train Loss: 0.0031764, Val Loss: 0.0029541 +2025-02-18 12:51:08,306 Epoch 95/2000 +2025-02-18 12:51:50,998 Current Learning Rate: 0.0053922955 +2025-02-18 12:51:50,999 Train Loss: 0.0028410, Val Loss: 0.0028266 +2025-02-18 12:51:50,999 Epoch 96/2000 +2025-02-18 12:52:33,609 Current Learning Rate: 0.0053139526 +2025-02-18 12:52:33,610 Train Loss: 0.0031975, Val Loss: 0.0028658 +2025-02-18 12:52:33,610 Epoch 97/2000 +2025-02-18 12:53:16,181 Current Learning Rate: 0.0052355323 +2025-02-18 12:53:18,204 Train Loss: 0.0029171, Val Loss: 0.0027451 +2025-02-18 12:53:18,205 Epoch 98/2000 +2025-02-18 12:54:00,168 Current Learning Rate: 0.0051570538 +2025-02-18 12:54:02,214 Train Loss: 0.0030150, Val Loss: 0.0027333 +2025-02-18 12:54:02,214 Epoch 99/2000 +2025-02-18 12:54:44,237 Current Learning Rate: 0.0050785366 +2025-02-18 12:54:45,999 Train Loss: 0.0029330, Val Loss: 0.0027265 +2025-02-18 12:54:45,999 Epoch 100/2000 +2025-02-18 12:55:27,425 Current Learning Rate: 0.0050000000 +2025-02-18 12:55:28,907 Train Loss: 0.0028290, Val Loss: 0.0026971 +2025-02-18 12:55:28,907 Epoch 101/2000 +2025-02-18 12:56:10,907 Current Learning Rate: 0.0049214634 +2025-02-18 12:56:12,102 Train Loss: 0.0028858, Val Loss: 0.0026960 +2025-02-18 12:56:12,103 Epoch 102/2000 +2025-02-18 12:56:53,651 Current Learning Rate: 0.0048429462 +2025-02-18 12:56:54,713 Train Loss: 0.0029532, Val Loss: 0.0026186 +2025-02-18 12:56:54,713 Epoch 103/2000 +2025-02-18 12:57:36,563 Current Learning Rate: 0.0047644677 +2025-02-18 12:57:36,564 Train Loss: 0.0027671, Val Loss: 0.0026594 +2025-02-18 12:57:36,564 Epoch 104/2000 +2025-02-18 12:58:19,587 Current Learning Rate: 0.0046860474 +2025-02-18 12:58:19,587 Train Loss: 0.0029455, Val Loss: 0.0027470 +2025-02-18 12:58:19,587 Epoch 105/2000 +2025-02-18 12:59:01,917 Current Learning Rate: 0.0046077045 +2025-02-18 12:59:01,918 Train Loss: 0.0029949, Val Loss: 0.0027208 +2025-02-18 12:59:01,918 Epoch 106/2000 +2025-02-18 12:59:44,410 Current Learning Rate: 0.0045294584 +2025-02-18 12:59:44,410 Train Loss: 0.0031252, Val Loss: 0.0027911 +2025-02-18 12:59:44,411 Epoch 107/2000 +2025-02-18 13:00:26,626 Current Learning Rate: 0.0044513284 +2025-02-18 13:00:28,412 Train Loss: 0.0027322, Val Loss: 0.0025305 +2025-02-18 13:00:28,412 Epoch 108/2000 +2025-02-18 13:01:10,166 Current Learning Rate: 0.0043733338 +2025-02-18 13:01:10,167 Train Loss: 0.0027422, Val Loss: 0.0025760 +2025-02-18 13:01:10,167 Epoch 109/2000 +2025-02-18 13:01:51,747 Current Learning Rate: 0.0042954938 +2025-02-18 13:01:51,747 Train Loss: 0.0030014, Val Loss: 0.0025514 +2025-02-18 13:01:51,748 Epoch 110/2000 +2025-02-18 13:02:34,351 Current Learning Rate: 0.0042178277 +2025-02-18 13:02:34,351 Train Loss: 0.0030148, Val Loss: 0.0026140 +2025-02-18 13:02:34,352 Epoch 111/2000 +2025-02-18 13:03:16,218 Current Learning Rate: 0.0041403545 +2025-02-18 13:03:18,552 Train Loss: 0.0026457, Val Loss: 0.0025104 +2025-02-18 13:03:18,553 Epoch 112/2000 +2025-02-18 13:04:00,673 Current Learning Rate: 0.0040630934 +2025-02-18 13:04:02,737 Train Loss: 0.0026000, Val Loss: 0.0024824 +2025-02-18 13:04:02,738 Epoch 113/2000 +2025-02-18 13:04:44,450 Current Learning Rate: 0.0039860635 +2025-02-18 13:04:46,560 Train Loss: 0.0027027, Val Loss: 0.0024382 +2025-02-18 13:04:46,560 Epoch 114/2000 +2025-02-18 13:05:27,799 Current Learning Rate: 0.0039092838 +2025-02-18 13:05:27,800 Train Loss: 0.0026419, Val Loss: 0.0024624 +2025-02-18 13:05:27,800 Epoch 115/2000 +2025-02-18 13:06:10,862 Current Learning Rate: 0.0038327732 +2025-02-18 13:06:10,863 Train Loss: 0.0031512, Val Loss: 0.0024630 +2025-02-18 13:06:10,863 Epoch 116/2000 +2025-02-18 13:06:53,102 Current Learning Rate: 0.0037565506 +2025-02-18 13:06:53,103 Train Loss: 0.0027621, Val Loss: 0.0024817 +2025-02-18 13:06:53,104 Epoch 117/2000 +2025-02-18 13:07:35,289 Current Learning Rate: 0.0036806348 +2025-02-18 13:07:35,289 Train Loss: 0.0027294, Val Loss: 0.0025535 +2025-02-18 13:07:35,289 Epoch 118/2000 +2025-02-18 13:08:17,845 Current Learning Rate: 0.0036050445 +2025-02-18 13:08:17,845 Train Loss: 0.0025651, Val Loss: 0.0025561 +2025-02-18 13:08:17,846 Epoch 119/2000 +2025-02-18 13:09:00,242 Current Learning Rate: 0.0035297984 +2025-02-18 13:09:00,242 Train Loss: 0.0031162, Val Loss: 0.0025939 +2025-02-18 13:09:00,243 Epoch 120/2000 +2025-02-18 13:09:42,791 Current Learning Rate: 0.0034549150 +2025-02-18 13:09:42,791 Train Loss: 0.0027383, Val Loss: 0.0025477 +2025-02-18 13:09:42,792 Epoch 121/2000 +2025-02-18 13:10:25,578 Current Learning Rate: 0.0033804129 +2025-02-18 13:10:27,319 Train Loss: 0.0027425, Val Loss: 0.0023956 +2025-02-18 13:10:27,320 Epoch 122/2000 +2025-02-18 13:11:08,656 Current Learning Rate: 0.0033063104 +2025-02-18 13:11:08,656 Train Loss: 0.0026657, Val Loss: 0.0024754 +2025-02-18 13:11:08,656 Epoch 123/2000 +2025-02-18 13:11:51,834 Current Learning Rate: 0.0032326258 +2025-02-18 13:11:53,759 Train Loss: 0.0024242, Val Loss: 0.0023507 +2025-02-18 13:11:53,760 Epoch 124/2000 +2025-02-18 13:12:35,916 Current Learning Rate: 0.0031593772 +2025-02-18 13:12:35,917 Train Loss: 0.0025713, Val Loss: 0.0023812 +2025-02-18 13:12:35,917 Epoch 125/2000 +2025-02-18 13:13:18,844 Current Learning Rate: 0.0030865828 +2025-02-18 13:13:21,089 Train Loss: 0.0025928, Val Loss: 0.0023480 +2025-02-18 13:13:21,089 Epoch 126/2000 +2025-02-18 13:14:02,360 Current Learning Rate: 0.0030142605 +2025-02-18 13:14:04,286 Train Loss: 0.0024050, Val Loss: 0.0023392 +2025-02-18 13:14:04,286 Epoch 127/2000 +2025-02-18 13:14:46,606 Current Learning Rate: 0.0029424282 +2025-02-18 13:14:48,684 Train Loss: 0.0025779, Val Loss: 0.0023248 +2025-02-18 13:14:48,685 Epoch 128/2000 +2025-02-18 13:15:30,433 Current Learning Rate: 0.0028711035 +2025-02-18 13:15:30,434 Train Loss: 0.0025542, Val Loss: 0.0024302 +2025-02-18 13:15:30,435 Epoch 129/2000 +2025-02-18 13:16:13,088 Current Learning Rate: 0.0028003042 +2025-02-18 13:16:13,089 Train Loss: 0.0027851, Val Loss: 0.0023806 +2025-02-18 13:16:13,089 Epoch 130/2000 +2025-02-18 13:16:55,222 Current Learning Rate: 0.0027300475 +2025-02-18 13:16:55,223 Train Loss: 0.0024214, Val Loss: 0.0023312 +2025-02-18 13:16:55,224 Epoch 131/2000 +2025-02-18 13:17:38,039 Current Learning Rate: 0.0026603509 +2025-02-18 13:17:39,458 Train Loss: 0.0025682, Val Loss: 0.0022957 +2025-02-18 13:17:39,459 Epoch 132/2000 +2025-02-18 13:18:20,980 Current Learning Rate: 0.0025912316 +2025-02-18 13:18:22,217 Train Loss: 0.0023948, Val Loss: 0.0022712 +2025-02-18 13:18:22,217 Epoch 133/2000 +2025-02-18 13:19:03,940 Current Learning Rate: 0.0025227067 +2025-02-18 13:19:03,941 Train Loss: 0.0026540, Val Loss: 0.0023643 +2025-02-18 13:19:03,941 Epoch 134/2000 +2025-02-18 13:19:47,228 Current Learning Rate: 0.0024547929 +2025-02-18 13:19:47,228 Train Loss: 0.0026912, Val Loss: 0.0022917 +2025-02-18 13:19:47,229 Epoch 135/2000 +2025-02-18 13:20:30,279 Current Learning Rate: 0.0023875072 +2025-02-18 13:20:32,202 Train Loss: 0.0023403, Val Loss: 0.0022606 +2025-02-18 13:20:32,202 Epoch 136/2000 +2025-02-18 13:21:13,548 Current Learning Rate: 0.0023208660 +2025-02-18 13:21:13,549 Train Loss: 0.0025837, Val Loss: 0.0022768 +2025-02-18 13:21:13,549 Epoch 137/2000 +2025-02-18 13:21:56,736 Current Learning Rate: 0.0022548859 +2025-02-18 13:21:58,097 Train Loss: 0.0022579, Val Loss: 0.0022366 +2025-02-18 13:21:58,097 Epoch 138/2000 +2025-02-18 13:22:40,345 Current Learning Rate: 0.0021895831 +2025-02-18 13:22:41,717 Train Loss: 0.0023904, Val Loss: 0.0022284 +2025-02-18 13:22:41,717 Epoch 139/2000 +2025-02-18 13:23:23,732 Current Learning Rate: 0.0021249737 +2025-02-18 13:23:23,733 Train Loss: 0.0024008, Val Loss: 0.0022358 +2025-02-18 13:23:23,733 Epoch 140/2000 +2025-02-18 13:24:07,254 Current Learning Rate: 0.0020610737 +2025-02-18 13:24:09,092 Train Loss: 0.0022246, Val Loss: 0.0022110 +2025-02-18 13:24:09,092 Epoch 141/2000 +2025-02-18 13:24:51,508 Current Learning Rate: 0.0019978989 +2025-02-18 13:24:51,509 Train Loss: 0.0023086, Val Loss: 0.0022171 +2025-02-18 13:24:51,509 Epoch 142/2000 +2025-02-18 13:25:34,071 Current Learning Rate: 0.0019354647 +2025-02-18 13:25:35,999 Train Loss: 0.0022637, Val Loss: 0.0021975 +2025-02-18 13:25:35,999 Epoch 143/2000 +2025-02-18 13:26:17,828 Current Learning Rate: 0.0018737867 +2025-02-18 13:26:17,829 Train Loss: 0.0024424, Val Loss: 0.0022221 +2025-02-18 13:26:17,829 Epoch 144/2000 +2025-02-18 13:27:00,090 Current Learning Rate: 0.0018128801 +2025-02-18 13:27:00,090 Train Loss: 0.0022830, Val Loss: 0.0022219 +2025-02-18 13:27:00,091 Epoch 145/2000 +2025-02-18 13:27:43,268 Current Learning Rate: 0.0017527598 +2025-02-18 13:27:43,268 Train Loss: 0.0025422, Val Loss: 0.0022204 +2025-02-18 13:27:43,268 Epoch 146/2000 +2025-02-18 13:28:25,985 Current Learning Rate: 0.0016934407 +2025-02-18 13:28:27,579 Train Loss: 0.0022766, Val Loss: 0.0021824 +2025-02-18 13:28:27,580 Epoch 147/2000 +2025-02-18 13:29:09,208 Current Learning Rate: 0.0016349374 +2025-02-18 13:29:09,209 Train Loss: 0.0023299, Val Loss: 0.0022020 +2025-02-18 13:29:09,210 Epoch 148/2000 +2025-02-18 13:29:51,369 Current Learning Rate: 0.0015772645 +2025-02-18 13:29:51,369 Train Loss: 0.0023008, Val Loss: 0.0022081 +2025-02-18 13:29:51,370 Epoch 149/2000 +2025-02-18 13:30:34,048 Current Learning Rate: 0.0015204360 +2025-02-18 13:30:35,580 Train Loss: 0.0022495, Val Loss: 0.0021472 +2025-02-18 13:30:35,580 Epoch 150/2000 +2025-02-18 13:31:17,736 Current Learning Rate: 0.0014644661 +2025-02-18 13:31:19,639 Train Loss: 0.0022816, Val Loss: 0.0021302 +2025-02-18 13:31:19,639 Epoch 151/2000 +2025-02-18 13:32:01,716 Current Learning Rate: 0.0014093685 +2025-02-18 13:32:01,721 Train Loss: 0.0025645, Val Loss: 0.0021428 +2025-02-18 13:32:01,722 Epoch 152/2000 +2025-02-18 13:32:44,805 Current Learning Rate: 0.0013551569 +2025-02-18 13:32:44,805 Train Loss: 0.0024435, Val Loss: 0.0021654 +2025-02-18 13:32:44,806 Epoch 153/2000 +2025-02-18 13:33:27,199 Current Learning Rate: 0.0013018445 +2025-02-18 13:33:29,106 Train Loss: 0.0025198, Val Loss: 0.0021174 +2025-02-18 13:33:29,108 Epoch 154/2000 +2025-02-18 13:34:11,534 Current Learning Rate: 0.0012494447 +2025-02-18 13:34:13,156 Train Loss: 0.0021932, Val Loss: 0.0020996 +2025-02-18 13:34:13,156 Epoch 155/2000 +2025-02-18 13:34:54,403 Current Learning Rate: 0.0011979702 +2025-02-18 13:34:54,404 Train Loss: 0.0021102, Val Loss: 0.0021050 +2025-02-18 13:34:54,404 Epoch 156/2000 +2025-02-18 13:35:36,836 Current Learning Rate: 0.0011474338 +2025-02-18 13:35:38,400 Train Loss: 0.0021750, Val Loss: 0.0020915 +2025-02-18 13:35:38,400 Epoch 157/2000 +2025-02-18 13:36:19,949 Current Learning Rate: 0.0010978480 +2025-02-18 13:36:19,950 Train Loss: 0.0021748, Val Loss: 0.0021046 +2025-02-18 13:36:19,950 Epoch 158/2000 +2025-02-18 13:37:01,890 Current Learning Rate: 0.0010492249 +2025-02-18 13:37:01,891 Train Loss: 0.0022929, Val Loss: 0.0020975 +2025-02-18 13:37:01,891 Epoch 159/2000 +2025-02-18 13:37:44,328 Current Learning Rate: 0.0010015767 +2025-02-18 13:37:44,329 Train Loss: 0.0022445, Val Loss: 0.0020939 +2025-02-18 13:37:44,330 Epoch 160/2000 +2025-02-18 13:38:26,477 Current Learning Rate: 0.0009549150 +2025-02-18 13:38:27,756 Train Loss: 0.0023150, Val Loss: 0.0020744 +2025-02-18 13:38:27,757 Epoch 161/2000 +2025-02-18 13:39:09,579 Current Learning Rate: 0.0009092514 +2025-02-18 13:39:11,054 Train Loss: 0.0023384, Val Loss: 0.0020734 +2025-02-18 13:39:11,054 Epoch 162/2000 +2025-02-18 13:39:53,041 Current Learning Rate: 0.0008645971 +2025-02-18 13:39:53,042 Train Loss: 0.0024140, Val Loss: 0.0020823 +2025-02-18 13:39:53,042 Epoch 163/2000 +2025-02-18 13:40:35,431 Current Learning Rate: 0.0008209632 +2025-02-18 13:40:35,432 Train Loss: 0.0024373, Val Loss: 0.0020741 +2025-02-18 13:40:35,432 Epoch 164/2000 +2025-02-18 13:41:18,399 Current Learning Rate: 0.0007783604 +2025-02-18 13:41:19,717 Train Loss: 0.0022746, Val Loss: 0.0020577 +2025-02-18 13:41:19,717 Epoch 165/2000 +2025-02-18 13:42:01,952 Current Learning Rate: 0.0007367992 +2025-02-18 13:42:01,953 Train Loss: 0.0022988, Val Loss: 0.0020613 +2025-02-18 13:42:01,953 Epoch 166/2000 +2025-02-18 13:42:44,119 Current Learning Rate: 0.0006962899 +2025-02-18 13:42:45,422 Train Loss: 0.0023153, Val Loss: 0.0020529 +2025-02-18 13:42:45,423 Epoch 167/2000 +2025-02-18 13:43:27,273 Current Learning Rate: 0.0006568424 +2025-02-18 13:43:28,747 Train Loss: 0.0021537, Val Loss: 0.0020437 +2025-02-18 13:43:28,748 Epoch 168/2000 +2025-02-18 13:44:10,016 Current Learning Rate: 0.0006184666 +2025-02-18 13:44:11,047 Train Loss: 0.0020853, Val Loss: 0.0020344 +2025-02-18 13:44:11,048 Epoch 169/2000 +2025-02-18 13:44:53,571 Current Learning Rate: 0.0005811718 +2025-02-18 13:44:55,499 Train Loss: 0.0022062, Val Loss: 0.0020296 +2025-02-18 13:44:55,499 Epoch 170/2000 +2025-02-18 13:45:37,794 Current Learning Rate: 0.0005449674 +2025-02-18 13:45:40,026 Train Loss: 0.0020276, Val Loss: 0.0020274 +2025-02-18 13:45:40,026 Epoch 171/2000 +2025-02-18 13:46:21,995 Current Learning Rate: 0.0005098621 +2025-02-18 13:46:24,016 Train Loss: 0.0022592, Val Loss: 0.0020274 +2025-02-18 13:46:24,016 Epoch 172/2000 +2025-02-18 13:47:05,363 Current Learning Rate: 0.0004758647 +2025-02-18 13:47:05,364 Train Loss: 0.0022123, Val Loss: 0.0020308 +2025-02-18 13:47:05,364 Epoch 173/2000 +2025-02-18 13:47:48,734 Current Learning Rate: 0.0004429836 +2025-02-18 13:47:50,812 Train Loss: 0.0021227, Val Loss: 0.0020174 +2025-02-18 13:47:50,812 Epoch 174/2000 +2025-02-18 13:48:32,077 Current Learning Rate: 0.0004112269 +2025-02-18 13:48:33,831 Train Loss: 0.0021057, Val Loss: 0.0020160 +2025-02-18 13:48:33,832 Epoch 175/2000 +2025-02-18 13:49:15,057 Current Learning Rate: 0.0003806023 +2025-02-18 13:49:15,058 Train Loss: 0.0022247, Val Loss: 0.0020161 +2025-02-18 13:49:15,058 Epoch 176/2000 +2025-02-18 13:49:57,453 Current Learning Rate: 0.0003511176 +2025-02-18 13:49:59,525 Train Loss: 0.0020290, Val Loss: 0.0020113 +2025-02-18 13:49:59,525 Epoch 177/2000 +2025-02-18 13:50:40,854 Current Learning Rate: 0.0003227798 +2025-02-18 13:50:43,034 Train Loss: 0.0020590, Val Loss: 0.0020087 +2025-02-18 13:50:43,034 Epoch 178/2000 +2025-02-18 13:51:24,539 Current Learning Rate: 0.0002955962 +2025-02-18 13:51:24,540 Train Loss: 0.0021380, Val Loss: 0.0020131 +2025-02-18 13:51:24,540 Epoch 179/2000 +2025-02-18 13:52:07,550 Current Learning Rate: 0.0002695732 +2025-02-18 13:52:09,062 Train Loss: 0.0021452, Val Loss: 0.0020065 +2025-02-18 13:52:09,062 Epoch 180/2000 +2025-02-18 13:52:51,219 Current Learning Rate: 0.0002447174 +2025-02-18 13:52:51,220 Train Loss: 0.0022274, Val Loss: 0.0020067 +2025-02-18 13:52:51,221 Epoch 181/2000 +2025-02-18 13:53:33,391 Current Learning Rate: 0.0002210349 +2025-02-18 13:53:34,471 Train Loss: 0.0021063, Val Loss: 0.0020031 +2025-02-18 13:53:34,472 Epoch 182/2000 +2025-02-18 13:54:16,342 Current Learning Rate: 0.0001985316 +2025-02-18 13:54:16,343 Train Loss: 0.0021554, Val Loss: 0.0020034 +2025-02-18 13:54:16,343 Epoch 183/2000 +2025-02-18 13:54:58,990 Current Learning Rate: 0.0001772129 +2025-02-18 13:55:00,726 Train Loss: 0.0024092, Val Loss: 0.0020027 +2025-02-18 13:55:00,726 Epoch 184/2000 +2025-02-18 13:55:43,043 Current Learning Rate: 0.0001570842 +2025-02-18 13:55:45,124 Train Loss: 0.0020361, Val Loss: 0.0019969 +2025-02-18 13:55:45,125 Epoch 185/2000 +2025-02-18 13:56:26,341 Current Learning Rate: 0.0001381504 +2025-02-18 13:56:27,682 Train Loss: 0.0021787, Val Loss: 0.0019946 +2025-02-18 13:56:27,682 Epoch 186/2000 +2025-02-18 13:57:09,808 Current Learning Rate: 0.0001204162 +2025-02-18 13:57:11,437 Train Loss: 0.0019766, Val Loss: 0.0019902 +2025-02-18 13:57:11,438 Epoch 187/2000 +2025-02-18 13:57:53,413 Current Learning Rate: 0.0001038859 +2025-02-18 13:57:55,321 Train Loss: 0.0022846, Val Loss: 0.0019884 +2025-02-18 13:57:55,329 Epoch 188/2000 +2025-02-18 13:58:36,557 Current Learning Rate: 0.0000885637 +2025-02-18 13:58:38,631 Train Loss: 0.0020690, Val Loss: 0.0019866 +2025-02-18 13:58:38,632 Epoch 189/2000 +2025-02-18 13:59:19,993 Current Learning Rate: 0.0000744534 +2025-02-18 13:59:21,660 Train Loss: 0.0021456, Val Loss: 0.0019859 +2025-02-18 13:59:21,681 Epoch 190/2000 +2025-02-18 14:00:03,799 Current Learning Rate: 0.0000615583 +2025-02-18 14:00:03,800 Train Loss: 0.0020096, Val Loss: 0.0019868 +2025-02-18 14:00:03,800 Epoch 191/2000 +2025-02-18 14:00:46,181 Current Learning Rate: 0.0000498817 +2025-02-18 14:00:46,182 Train Loss: 0.0021014, Val Loss: 0.0019870 +2025-02-18 14:00:46,182 Epoch 192/2000 +2025-02-18 14:01:28,810 Current Learning Rate: 0.0000394265 +2025-02-18 14:01:30,201 Train Loss: 0.0021133, Val Loss: 0.0019855 +2025-02-18 14:01:30,202 Epoch 193/2000 +2025-02-18 14:02:12,523 Current Learning Rate: 0.0000301952 +2025-02-18 14:02:12,524 Train Loss: 0.0020557, Val Loss: 0.0019860 +2025-02-18 14:02:12,524 Epoch 194/2000 +2025-02-18 14:02:55,286 Current Learning Rate: 0.0000221902 +2025-02-18 14:02:57,126 Train Loss: 0.0020902, Val Loss: 0.0019851 +2025-02-18 14:02:57,134 Epoch 195/2000 +2025-02-18 14:03:38,649 Current Learning Rate: 0.0000154133 +2025-02-18 14:03:40,356 Train Loss: 0.0020577, Val Loss: 0.0019844 +2025-02-18 14:03:40,356 Epoch 196/2000 +2025-02-18 14:04:22,463 Current Learning Rate: 0.0000098664 +2025-02-18 14:04:24,579 Train Loss: 0.0021702, Val Loss: 0.0019841 +2025-02-18 14:04:24,579 Epoch 197/2000 +2025-02-18 14:05:06,792 Current Learning Rate: 0.0000055506 +2025-02-18 14:05:08,844 Train Loss: 0.0021440, Val Loss: 0.0019836 +2025-02-18 14:05:08,845 Epoch 198/2000 +2025-02-18 14:05:50,477 Current Learning Rate: 0.0000024672 +2025-02-18 14:05:52,297 Train Loss: 0.0022741, Val Loss: 0.0019835 +2025-02-18 14:05:52,297 Epoch 199/2000 +2025-02-18 14:06:33,747 Current Learning Rate: 0.0000006168 +2025-02-18 14:06:34,853 Train Loss: 0.0020641, Val Loss: 0.0019835 +2025-02-18 14:06:34,861 Epoch 200/2000 +2025-02-18 14:07:16,856 Current Learning Rate: 0.0000000000 +2025-02-18 14:07:18,951 Train Loss: 0.0020595, Val Loss: 0.0019833 +2025-02-18 14:07:18,952 Epoch 201/2000 +2025-02-18 14:08:00,517 Current Learning Rate: 0.0000006168 +2025-02-18 14:08:00,518 Train Loss: 0.0020695, Val Loss: 0.0019834 +2025-02-18 14:08:00,518 Epoch 202/2000 +2025-02-18 14:08:42,480 Current Learning Rate: 0.0000024672 +2025-02-18 14:08:42,481 Train Loss: 0.0020921, Val Loss: 0.0019834 +2025-02-18 14:08:42,481 Epoch 203/2000 +2025-02-18 14:09:24,745 Current Learning Rate: 0.0000055506 +2025-02-18 14:09:24,745 Train Loss: 0.0021943, Val Loss: 0.0019834 +2025-02-18 14:09:24,746 Epoch 204/2000 +2025-02-18 14:10:06,317 Current Learning Rate: 0.0000098664 +2025-02-18 14:10:06,318 Train Loss: 0.0020710, Val Loss: 0.0019834 +2025-02-18 14:10:06,318 Epoch 205/2000 +2025-02-18 14:10:48,856 Current Learning Rate: 0.0000154133 +2025-02-18 14:10:48,856 Train Loss: 0.0020787, Val Loss: 0.0019834 +2025-02-18 14:10:48,857 Epoch 206/2000 +2025-02-18 14:11:31,517 Current Learning Rate: 0.0000221902 +2025-02-18 14:11:31,517 Train Loss: 0.0021324, Val Loss: 0.0019837 +2025-02-18 14:11:31,518 Epoch 207/2000 +2025-02-18 14:12:13,640 Current Learning Rate: 0.0000301952 +2025-02-18 14:12:13,641 Train Loss: 0.0020701, Val Loss: 0.0019840 +2025-02-18 14:12:13,641 Epoch 208/2000 +2025-02-18 14:12:55,941 Current Learning Rate: 0.0000394265 +2025-02-18 14:12:55,942 Train Loss: 0.0023016, Val Loss: 0.0019862 +2025-02-18 14:12:55,942 Epoch 209/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_128_20250322_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_128_20250322_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..387546d5b7c87eb28a1001794db074d9942d6081 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_128_20250322_training_log.log @@ -0,0 +1,6017 @@ +2025-03-22 16:11:51,223 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-22 16:11:51,359 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-22 16:11:51,374 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-22 16:11:51,398 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-22 16:11:51,423 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-22 16:11:51,430 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-22 16:11:51,435 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-22 16:11:51,437 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-22 16:13:10,822 Epoch 1/2000 +2025-03-22 16:13:13,213 Reducer buckets have been rebuilt in this iteration. +2025-03-22 16:13:13,213 Reducer buckets have been rebuilt in this iteration. +2025-03-22 16:13:13,213 Reducer buckets have been rebuilt in this iteration. +2025-03-22 16:13:13,213 Reducer buckets have been rebuilt in this iteration. +2025-03-22 16:13:13,213 Reducer buckets have been rebuilt in this iteration. +2025-03-22 16:13:13,213 Reducer buckets have been rebuilt in this iteration. +2025-03-22 16:13:13,213 Reducer buckets have been rebuilt in this iteration. +2025-03-22 16:13:13,213 Reducer buckets have been rebuilt in this iteration. +2025-03-22 16:17:51,691 Current Learning Rate: 0.0009999383 +2025-03-22 16:17:52,577 Train Loss: 0.0428830, Val Loss: 0.0084876 +2025-03-22 16:17:52,577 Epoch 2/2000 +2025-03-22 16:22:31,865 Current Learning Rate: 0.0009997533 +2025-03-22 16:22:32,755 Train Loss: 0.0059191, Val Loss: 0.0046103 +2025-03-22 16:22:32,756 Epoch 3/2000 +2025-03-22 16:27:12,262 Current Learning Rate: 0.0009994449 +2025-03-22 16:27:13,141 Train Loss: 0.0042016, Val Loss: 0.0041436 +2025-03-22 16:27:13,141 Epoch 4/2000 +2025-03-22 16:31:52,717 Current Learning Rate: 0.0009990134 +2025-03-22 16:31:53,632 Train Loss: 0.0035474, Val Loss: 0.0032568 +2025-03-22 16:31:53,632 Epoch 5/2000 +2025-03-22 16:36:32,903 Current Learning Rate: 0.0009984587 +2025-03-22 16:36:33,834 Train Loss: 0.0032337, Val Loss: 0.0029892 +2025-03-22 16:36:33,834 Epoch 6/2000 +2025-03-22 16:41:13,118 Current Learning Rate: 0.0009977810 +2025-03-22 16:41:14,063 Train Loss: 0.0029551, Val Loss: 0.0026622 +2025-03-22 16:41:14,063 Epoch 7/2000 +2025-03-22 16:45:53,602 Current Learning Rate: 0.0009969805 +2025-03-22 16:45:54,482 Train Loss: 0.0027615, Val Loss: 0.0024908 +2025-03-22 16:45:54,483 Epoch 8/2000 +2025-03-22 16:50:34,289 Current Learning Rate: 0.0009960574 +2025-03-22 16:50:35,231 Train Loss: 0.0025527, Val Loss: 0.0023548 +2025-03-22 16:50:35,231 Epoch 9/2000 +2025-03-22 16:55:14,859 Current Learning Rate: 0.0009950118 +2025-03-22 16:55:15,709 Train Loss: 0.0023068, Val Loss: 0.0022181 +2025-03-22 16:55:15,709 Epoch 10/2000 +2025-03-22 16:59:55,477 Current Learning Rate: 0.0009938442 +2025-03-22 16:59:56,386 Train Loss: 0.0021072, Val Loss: 0.0019685 +2025-03-22 16:59:56,386 Epoch 11/2000 +2025-03-22 17:04:35,840 Current Learning Rate: 0.0009925547 +2025-03-22 17:04:36,698 Train Loss: 0.0019348, Val Loss: 0.0018727 +2025-03-22 17:04:36,698 Epoch 12/2000 +2025-03-22 17:09:16,060 Current Learning Rate: 0.0009911436 +2025-03-22 17:09:16,908 Train Loss: 0.0018161, Val Loss: 0.0017598 +2025-03-22 17:09:16,908 Epoch 13/2000 +2025-03-22 17:13:56,297 Current Learning Rate: 0.0009896114 +2025-03-22 17:13:56,298 Train Loss: 0.0016777, Val Loss: 0.0029946 +2025-03-22 17:13:56,298 Epoch 14/2000 +2025-03-22 17:18:36,430 Current Learning Rate: 0.0009879584 +2025-03-22 17:18:37,370 Train Loss: 0.0015565, Val Loss: 0.0014408 +2025-03-22 17:18:37,370 Epoch 15/2000 +2025-03-22 17:23:16,839 Current Learning Rate: 0.0009861850 +2025-03-22 17:23:17,674 Train Loss: 0.0014213, Val Loss: 0.0014264 +2025-03-22 17:23:17,675 Epoch 16/2000 +2025-03-22 17:27:57,198 Current Learning Rate: 0.0009842916 +2025-03-22 17:27:57,199 Train Loss: 0.0014164, Val Loss: 0.0015448 +2025-03-22 17:27:57,199 Epoch 17/2000 +2025-03-22 17:32:37,151 Current Learning Rate: 0.0009822787 +2025-03-22 17:32:37,971 Train Loss: 0.0013040, Val Loss: 0.0011767 +2025-03-22 17:32:37,971 Epoch 18/2000 +2025-03-22 17:37:17,561 Current Learning Rate: 0.0009801468 +2025-03-22 17:37:18,441 Train Loss: 0.0011527, Val Loss: 0.0010864 +2025-03-22 17:37:18,442 Epoch 19/2000 +2025-03-22 17:41:58,022 Current Learning Rate: 0.0009778965 +2025-03-22 17:41:58,847 Train Loss: 0.0010863, Val Loss: 0.0010519 +2025-03-22 17:41:58,847 Epoch 20/2000 +2025-03-22 17:46:38,321 Current Learning Rate: 0.0009755283 +2025-03-22 17:46:39,196 Train Loss: 0.0009699, Val Loss: 0.0010392 +2025-03-22 17:46:39,197 Epoch 21/2000 +2025-03-22 17:51:18,338 Current Learning Rate: 0.0009730427 +2025-03-22 17:51:19,167 Train Loss: 0.0009597, Val Loss: 0.0009895 +2025-03-22 17:51:19,167 Epoch 22/2000 +2025-03-22 17:55:58,832 Current Learning Rate: 0.0009704404 +2025-03-22 17:55:59,738 Train Loss: 0.0009997, Val Loss: 0.0009613 +2025-03-22 17:55:59,738 Epoch 23/2000 +2025-03-22 18:00:39,672 Current Learning Rate: 0.0009677220 +2025-03-22 18:00:40,555 Train Loss: 0.0008857, Val Loss: 0.0009601 +2025-03-22 18:00:40,556 Epoch 24/2000 +2025-03-22 18:05:20,310 Current Learning Rate: 0.0009648882 +2025-03-22 18:05:21,134 Train Loss: 0.0007643, Val Loss: 0.0008107 +2025-03-22 18:05:21,135 Epoch 25/2000 +2025-03-22 18:10:01,300 Current Learning Rate: 0.0009619398 +2025-03-22 18:10:02,169 Train Loss: 0.0008164, Val Loss: 0.0007745 +2025-03-22 18:10:02,170 Epoch 26/2000 +2025-03-22 18:14:41,904 Current Learning Rate: 0.0009588773 +2025-03-22 18:14:42,735 Train Loss: 0.0007035, Val Loss: 0.0007424 +2025-03-22 18:14:42,735 Epoch 27/2000 +2025-03-22 18:19:22,322 Current Learning Rate: 0.0009557016 +2025-03-22 18:19:22,323 Train Loss: 0.0008220, Val Loss: 0.0007886 +2025-03-22 18:19:22,324 Epoch 28/2000 +2025-03-22 18:24:02,306 Current Learning Rate: 0.0009524135 +2025-03-22 18:24:03,349 Train Loss: 0.0007881, Val Loss: 0.0007089 +2025-03-22 18:24:03,350 Epoch 29/2000 +2025-03-22 18:28:42,902 Current Learning Rate: 0.0009490138 +2025-03-22 18:28:43,750 Train Loss: 0.0005957, Val Loss: 0.0006124 +2025-03-22 18:28:43,750 Epoch 30/2000 +2025-03-22 18:33:23,183 Current Learning Rate: 0.0009455033 +2025-03-22 18:33:23,184 Train Loss: 0.0006137, Val Loss: 0.0006844 +2025-03-22 18:33:23,184 Epoch 31/2000 +2025-03-22 18:38:03,535 Current Learning Rate: 0.0009418828 +2025-03-22 18:38:04,353 Train Loss: 0.0006041, Val Loss: 0.0005650 +2025-03-22 18:38:04,353 Epoch 32/2000 +2025-03-22 18:42:43,880 Current Learning Rate: 0.0009381533 +2025-03-22 18:42:43,881 Train Loss: 0.0006000, Val Loss: 0.0006061 +2025-03-22 18:42:43,881 Epoch 33/2000 +2025-03-22 18:47:24,069 Current Learning Rate: 0.0009343158 +2025-03-22 18:47:24,939 Train Loss: 0.0005860, Val Loss: 0.0005312 +2025-03-22 18:47:24,939 Epoch 34/2000 +2025-03-22 18:52:04,406 Current Learning Rate: 0.0009303710 +2025-03-22 18:52:05,245 Train Loss: 0.0005056, Val Loss: 0.0004950 +2025-03-22 18:52:05,245 Epoch 35/2000 +2025-03-22 18:56:45,053 Current Learning Rate: 0.0009263201 +2025-03-22 18:56:45,054 Train Loss: 0.0005524, Val Loss: 0.0005032 +2025-03-22 18:56:45,054 Epoch 36/2000 +2025-03-22 19:01:25,233 Current Learning Rate: 0.0009221640 +2025-03-22 19:01:25,233 Train Loss: 0.0005533, Val Loss: 0.0006124 +2025-03-22 19:01:25,233 Epoch 37/2000 +2025-03-22 19:06:05,230 Current Learning Rate: 0.0009179037 +2025-03-22 19:06:05,231 Train Loss: 0.0005607, Val Loss: 0.0005925 +2025-03-22 19:06:05,231 Epoch 38/2000 +2025-03-22 19:10:45,354 Current Learning Rate: 0.0009135403 +2025-03-22 19:10:46,246 Train Loss: 0.0005035, Val Loss: 0.0004843 +2025-03-22 19:10:46,247 Epoch 39/2000 +2025-03-22 19:15:25,902 Current Learning Rate: 0.0009090749 +2025-03-22 19:15:26,753 Train Loss: 0.0004622, Val Loss: 0.0004472 +2025-03-22 19:15:26,753 Epoch 40/2000 +2025-03-22 19:20:06,279 Current Learning Rate: 0.0009045085 +2025-03-22 19:20:06,280 Train Loss: 0.0004162, Val Loss: 0.0004911 +2025-03-22 19:20:06,280 Epoch 41/2000 +2025-03-22 19:24:46,491 Current Learning Rate: 0.0008998423 +2025-03-22 19:24:47,392 Train Loss: 0.0005121, Val Loss: 0.0004378 +2025-03-22 19:24:47,393 Epoch 42/2000 +2025-03-22 19:29:26,909 Current Learning Rate: 0.0008950775 +2025-03-22 19:29:26,909 Train Loss: 0.0004563, Val Loss: 0.0005958 +2025-03-22 19:29:26,910 Epoch 43/2000 +2025-03-22 19:34:07,019 Current Learning Rate: 0.0008902152 +2025-03-22 19:34:07,020 Train Loss: 0.0004708, Val Loss: 0.0006100 +2025-03-22 19:34:07,020 Epoch 44/2000 +2025-03-22 19:38:47,047 Current Learning Rate: 0.0008852566 +2025-03-22 19:38:47,047 Train Loss: 0.0004310, Val Loss: 0.0005002 +2025-03-22 19:38:47,048 Epoch 45/2000 +2025-03-22 19:43:27,439 Current Learning Rate: 0.0008802030 +2025-03-22 19:43:28,386 Train Loss: 0.0004326, Val Loss: 0.0004041 +2025-03-22 19:43:28,386 Epoch 46/2000 +2025-03-22 19:48:07,914 Current Learning Rate: 0.0008750555 +2025-03-22 19:48:07,915 Train Loss: 0.0003915, Val Loss: 0.0004211 +2025-03-22 19:48:07,915 Epoch 47/2000 +2025-03-22 19:52:48,511 Current Learning Rate: 0.0008698155 +2025-03-22 19:52:49,372 Train Loss: 0.0003852, Val Loss: 0.0003772 +2025-03-22 19:52:49,373 Epoch 48/2000 +2025-03-22 19:57:28,678 Current Learning Rate: 0.0008644843 +2025-03-22 19:57:28,678 Train Loss: 0.0003641, Val Loss: 0.0004900 +2025-03-22 19:57:28,679 Epoch 49/2000 +2025-03-22 20:02:08,887 Current Learning Rate: 0.0008590631 +2025-03-22 20:02:09,824 Train Loss: 0.0003904, Val Loss: 0.0003688 +2025-03-22 20:02:09,825 Epoch 50/2000 +2025-03-22 20:06:49,611 Current Learning Rate: 0.0008535534 +2025-03-22 20:06:49,612 Train Loss: 0.0003794, Val Loss: 0.0003809 +2025-03-22 20:06:49,613 Epoch 51/2000 +2025-03-22 20:11:30,142 Current Learning Rate: 0.0008479564 +2025-03-22 20:11:31,027 Train Loss: 0.0003567, Val Loss: 0.0003456 +2025-03-22 20:11:31,028 Epoch 52/2000 +2025-03-22 20:16:10,396 Current Learning Rate: 0.0008422736 +2025-03-22 20:16:10,396 Train Loss: 0.0003651, Val Loss: 0.0003534 +2025-03-22 20:16:10,397 Epoch 53/2000 +2025-03-22 20:20:50,713 Current Learning Rate: 0.0008365063 +2025-03-22 20:20:50,714 Train Loss: 0.0006455, Val Loss: 0.0004989 +2025-03-22 20:20:50,714 Epoch 54/2000 +2025-03-22 20:25:30,992 Current Learning Rate: 0.0008306559 +2025-03-22 20:25:31,884 Train Loss: 0.0003847, Val Loss: 0.0003355 +2025-03-22 20:25:31,884 Epoch 55/2000 +2025-03-22 20:30:11,462 Current Learning Rate: 0.0008247240 +2025-03-22 20:30:12,296 Train Loss: 0.0003844, Val Loss: 0.0003295 +2025-03-22 20:30:12,296 Epoch 56/2000 +2025-03-22 20:34:51,986 Current Learning Rate: 0.0008187120 +2025-03-22 20:34:52,835 Train Loss: 0.0003071, Val Loss: 0.0002840 +2025-03-22 20:34:52,836 Epoch 57/2000 +2025-03-22 20:39:32,613 Current Learning Rate: 0.0008126213 +2025-03-22 20:39:33,546 Train Loss: 0.0002948, Val Loss: 0.0002745 +2025-03-22 20:39:33,547 Epoch 58/2000 +2025-03-22 20:44:13,223 Current Learning Rate: 0.0008064535 +2025-03-22 20:44:13,224 Train Loss: 0.0003203, Val Loss: 0.0002873 +2025-03-22 20:44:13,224 Epoch 59/2000 +2025-03-22 20:48:53,054 Current Learning Rate: 0.0008002101 +2025-03-22 20:48:53,054 Train Loss: 0.0003164, Val Loss: 0.0002892 +2025-03-22 20:48:53,055 Epoch 60/2000 +2025-03-22 20:53:33,333 Current Learning Rate: 0.0007938926 +2025-03-22 20:53:33,333 Train Loss: 0.0003267, Val Loss: 0.0002993 +2025-03-22 20:53:33,333 Epoch 61/2000 +2025-03-22 20:58:13,714 Current Learning Rate: 0.0007875026 +2025-03-22 20:58:13,714 Train Loss: 0.0002798, Val Loss: 0.0002904 +2025-03-22 20:58:13,714 Epoch 62/2000 +2025-03-22 21:02:53,363 Current Learning Rate: 0.0007810417 +2025-03-22 21:02:54,196 Train Loss: 0.0002693, Val Loss: 0.0002709 +2025-03-22 21:02:54,196 Epoch 63/2000 +2025-03-22 21:07:33,607 Current Learning Rate: 0.0007745114 +2025-03-22 21:07:33,608 Train Loss: 0.0002991, Val Loss: 0.0002782 +2025-03-22 21:07:33,608 Epoch 64/2000 +2025-03-22 21:12:13,362 Current Learning Rate: 0.0007679134 +2025-03-22 21:12:13,362 Train Loss: 0.0003387, Val Loss: 0.0002818 +2025-03-22 21:12:13,363 Epoch 65/2000 +2025-03-22 21:16:53,091 Current Learning Rate: 0.0007612493 +2025-03-22 21:16:53,091 Train Loss: 0.0003413, Val Loss: 0.0002734 +2025-03-22 21:16:53,091 Epoch 66/2000 +2025-03-22 21:21:33,237 Current Learning Rate: 0.0007545207 +2025-03-22 21:21:33,238 Train Loss: 0.0002584, Val Loss: 0.0002786 +2025-03-22 21:21:33,238 Epoch 67/2000 +2025-03-22 21:26:13,016 Current Learning Rate: 0.0007477293 +2025-03-22 21:26:13,016 Train Loss: 0.0003094, Val Loss: 0.0002848 +2025-03-22 21:26:13,017 Epoch 68/2000 +2025-03-22 21:30:53,023 Current Learning Rate: 0.0007408768 +2025-03-22 21:30:53,024 Train Loss: 0.0002597, Val Loss: 0.0002746 +2025-03-22 21:30:53,024 Epoch 69/2000 +2025-03-22 21:35:32,890 Current Learning Rate: 0.0007339649 +2025-03-22 21:35:32,890 Train Loss: 0.0002934, Val Loss: 0.0002760 +2025-03-22 21:35:32,890 Epoch 70/2000 +2025-03-22 21:40:13,180 Current Learning Rate: 0.0007269952 +2025-03-22 21:40:14,073 Train Loss: 0.0002637, Val Loss: 0.0002705 +2025-03-22 21:40:14,074 Epoch 71/2000 +2025-03-22 21:44:53,334 Current Learning Rate: 0.0007199696 +2025-03-22 21:44:53,335 Train Loss: 0.0002712, Val Loss: 0.0002844 +2025-03-22 21:44:53,335 Epoch 72/2000 +2025-03-22 21:49:33,539 Current Learning Rate: 0.0007128896 +2025-03-22 21:49:34,375 Train Loss: 0.0002562, Val Loss: 0.0002603 +2025-03-22 21:49:34,376 Epoch 73/2000 +2025-03-22 21:54:14,130 Current Learning Rate: 0.0007057572 +2025-03-22 21:54:15,031 Train Loss: 0.0002597, Val Loss: 0.0002592 +2025-03-22 21:54:15,031 Epoch 74/2000 +2025-03-22 21:58:54,717 Current Learning Rate: 0.0006985739 +2025-03-22 21:58:54,718 Train Loss: 0.0002634, Val Loss: 0.0002659 +2025-03-22 21:58:54,718 Epoch 75/2000 +2025-03-22 22:03:34,711 Current Learning Rate: 0.0006913417 +2025-03-22 22:03:35,563 Train Loss: 0.0002661, Val Loss: 0.0002519 +2025-03-22 22:03:35,564 Epoch 76/2000 +2025-03-22 22:08:14,800 Current Learning Rate: 0.0006840623 +2025-03-22 22:08:15,661 Train Loss: 0.0002584, Val Loss: 0.0002350 +2025-03-22 22:08:15,661 Epoch 77/2000 +2025-03-22 22:12:55,063 Current Learning Rate: 0.0006767374 +2025-03-22 22:12:55,064 Train Loss: 0.0002387, Val Loss: 0.0002480 +2025-03-22 22:12:55,064 Epoch 78/2000 +2025-03-22 22:17:35,468 Current Learning Rate: 0.0006693690 +2025-03-22 22:17:36,334 Train Loss: 0.0002499, Val Loss: 0.0002196 +2025-03-22 22:17:36,334 Epoch 79/2000 +2025-03-22 22:22:16,081 Current Learning Rate: 0.0006619587 +2025-03-22 22:22:16,948 Train Loss: 0.0002211, Val Loss: 0.0002133 +2025-03-22 22:22:16,949 Epoch 80/2000 +2025-03-22 22:26:56,342 Current Learning Rate: 0.0006545085 +2025-03-22 22:26:56,343 Train Loss: 0.0002169, Val Loss: 0.0002357 +2025-03-22 22:26:56,343 Epoch 81/2000 +2025-03-22 22:31:36,452 Current Learning Rate: 0.0006470202 +2025-03-22 22:31:36,452 Train Loss: 0.0002013, Val Loss: 0.0002235 +2025-03-22 22:31:36,453 Epoch 82/2000 +2025-03-22 22:36:16,403 Current Learning Rate: 0.0006394956 +2025-03-22 22:36:16,403 Train Loss: 0.0002400, Val Loss: 0.0002236 +2025-03-22 22:36:16,404 Epoch 83/2000 +2025-03-22 22:40:56,522 Current Learning Rate: 0.0006319365 +2025-03-22 22:40:57,421 Train Loss: 0.0002330, Val Loss: 0.0002105 +2025-03-22 22:40:57,421 Epoch 84/2000 +2025-03-22 22:45:36,871 Current Learning Rate: 0.0006243449 +2025-03-22 22:45:36,872 Train Loss: 0.0002367, Val Loss: 0.0002137 +2025-03-22 22:45:36,872 Epoch 85/2000 +2025-03-22 22:50:16,802 Current Learning Rate: 0.0006167227 +2025-03-22 22:50:16,803 Train Loss: 0.0002264, Val Loss: 0.0002112 +2025-03-22 22:50:16,803 Epoch 86/2000 +2025-03-22 22:54:57,221 Current Learning Rate: 0.0006090716 +2025-03-22 22:54:57,221 Train Loss: 0.0002424, Val Loss: 0.0002109 +2025-03-22 22:54:57,222 Epoch 87/2000 +2025-03-22 22:59:37,491 Current Learning Rate: 0.0006013936 +2025-03-22 22:59:38,397 Train Loss: 0.0002017, Val Loss: 0.0002095 +2025-03-22 22:59:38,397 Epoch 88/2000 +2025-03-22 23:04:18,100 Current Learning Rate: 0.0005936907 +2025-03-22 23:04:18,101 Train Loss: 0.0001991, Val Loss: 0.0002133 +2025-03-22 23:04:18,101 Epoch 89/2000 +2025-03-22 23:08:58,058 Current Learning Rate: 0.0005859646 +2025-03-22 23:08:58,059 Train Loss: 0.0002184, Val Loss: 0.0002326 +2025-03-22 23:08:58,059 Epoch 90/2000 +2025-03-22 23:13:37,867 Current Learning Rate: 0.0005782172 +2025-03-22 23:13:37,868 Train Loss: 0.0002670, Val Loss: 0.0002356 +2025-03-22 23:13:37,868 Epoch 91/2000 +2025-03-22 23:18:17,645 Current Learning Rate: 0.0005704506 +2025-03-22 23:18:18,577 Train Loss: 0.0002424, Val Loss: 0.0001979 +2025-03-22 23:18:18,577 Epoch 92/2000 +2025-03-22 23:22:57,951 Current Learning Rate: 0.0005626666 +2025-03-22 23:22:58,877 Train Loss: 0.0001865, Val Loss: 0.0001920 +2025-03-22 23:22:58,878 Epoch 93/2000 +2025-03-22 23:27:38,419 Current Learning Rate: 0.0005548672 +2025-03-22 23:27:38,420 Train Loss: 0.0001873, Val Loss: 0.0001965 +2025-03-22 23:27:38,420 Epoch 94/2000 +2025-03-22 23:32:18,252 Current Learning Rate: 0.0005470542 +2025-03-22 23:32:18,253 Train Loss: 0.0002439, Val Loss: 0.0002118 +2025-03-22 23:32:18,253 Epoch 95/2000 +2025-03-22 23:36:58,214 Current Learning Rate: 0.0005392295 +2025-03-22 23:36:58,214 Train Loss: 0.0002317, Val Loss: 0.0001959 +2025-03-22 23:36:58,215 Epoch 96/2000 +2025-03-22 23:41:38,379 Current Learning Rate: 0.0005313953 +2025-03-22 23:41:38,380 Train Loss: 0.0001984, Val Loss: 0.0002081 +2025-03-22 23:41:38,380 Epoch 97/2000 +2025-03-22 23:46:18,264 Current Learning Rate: 0.0005235532 +2025-03-22 23:46:18,264 Train Loss: 0.0002133, Val Loss: 0.0002437 +2025-03-22 23:46:18,264 Epoch 98/2000 +2025-03-22 23:50:58,186 Current Learning Rate: 0.0005157054 +2025-03-22 23:50:59,074 Train Loss: 0.0002068, Val Loss: 0.0001799 +2025-03-22 23:50:59,075 Epoch 99/2000 +2025-03-22 23:55:38,691 Current Learning Rate: 0.0005078537 +2025-03-22 23:55:38,692 Train Loss: 0.0001824, Val Loss: 0.0001916 +2025-03-22 23:55:38,692 Epoch 100/2000 +2025-03-23 00:00:19,242 Current Learning Rate: 0.0005000000 +2025-03-23 00:00:20,180 Train Loss: 0.0001799, Val Loss: 0.0001749 +2025-03-23 00:00:20,180 Epoch 101/2000 +2025-03-23 00:05:00,294 Current Learning Rate: 0.0004921463 +2025-03-23 00:05:00,295 Train Loss: 0.0001925, Val Loss: 0.0001761 +2025-03-23 00:05:00,295 Epoch 102/2000 +2025-03-23 00:09:40,226 Current Learning Rate: 0.0004842946 +2025-03-23 00:09:40,226 Train Loss: 0.0002137, Val Loss: 0.0001774 +2025-03-23 00:09:40,227 Epoch 103/2000 +2025-03-23 00:14:20,150 Current Learning Rate: 0.0004764468 +2025-03-23 00:14:20,151 Train Loss: 0.0001595, Val Loss: 0.0001869 +2025-03-23 00:14:20,152 Epoch 104/2000 +2025-03-23 00:19:00,240 Current Learning Rate: 0.0004686047 +2025-03-23 00:19:00,240 Train Loss: 0.0001713, Val Loss: 0.0002201 +2025-03-23 00:19:00,241 Epoch 105/2000 +2025-03-23 00:23:40,537 Current Learning Rate: 0.0004607705 +2025-03-23 00:23:40,537 Train Loss: 0.0001868, Val Loss: 0.0001773 +2025-03-23 00:23:40,537 Epoch 106/2000 +2025-03-23 00:28:20,574 Current Learning Rate: 0.0004529458 +2025-03-23 00:28:20,575 Train Loss: 0.0001959, Val Loss: 0.0001932 +2025-03-23 00:28:20,575 Epoch 107/2000 +2025-03-23 00:33:00,935 Current Learning Rate: 0.0004451328 +2025-03-23 00:33:01,874 Train Loss: 0.0001861, Val Loss: 0.0001644 +2025-03-23 00:33:01,874 Epoch 108/2000 +2025-03-23 00:37:41,566 Current Learning Rate: 0.0004373334 +2025-03-23 00:37:41,567 Train Loss: 0.0001844, Val Loss: 0.0001721 +2025-03-23 00:37:41,567 Epoch 109/2000 +2025-03-23 00:42:21,826 Current Learning Rate: 0.0004295494 +2025-03-23 00:42:21,826 Train Loss: 0.0001835, Val Loss: 0.0001751 +2025-03-23 00:42:21,827 Epoch 110/2000 +2025-03-23 00:47:01,697 Current Learning Rate: 0.0004217828 +2025-03-23 00:47:01,697 Train Loss: 0.0001707, Val Loss: 0.0001718 +2025-03-23 00:47:01,698 Epoch 111/2000 +2025-03-23 00:51:41,706 Current Learning Rate: 0.0004140354 +2025-03-23 00:51:42,754 Train Loss: 0.0001503, Val Loss: 0.0001626 +2025-03-23 00:51:42,755 Epoch 112/2000 +2025-03-23 00:56:22,401 Current Learning Rate: 0.0004063093 +2025-03-23 00:56:22,401 Train Loss: 0.0001850, Val Loss: 0.0001687 +2025-03-23 00:56:22,401 Epoch 113/2000 +2025-03-23 01:01:02,576 Current Learning Rate: 0.0003986064 +2025-03-23 01:01:02,576 Train Loss: 0.0002110, Val Loss: 0.0001707 +2025-03-23 01:01:02,577 Epoch 114/2000 +2025-03-23 01:05:43,257 Current Learning Rate: 0.0003909284 +2025-03-23 01:05:44,229 Train Loss: 0.0001417, Val Loss: 0.0001608 +2025-03-23 01:05:44,229 Epoch 115/2000 +2025-03-23 01:10:23,652 Current Learning Rate: 0.0003832773 +2025-03-23 01:10:23,653 Train Loss: 0.0002092, Val Loss: 0.0001644 +2025-03-23 01:10:23,653 Epoch 116/2000 +2025-03-23 01:15:04,252 Current Learning Rate: 0.0003756551 +2025-03-23 01:15:04,252 Train Loss: 0.0002126, Val Loss: 0.0001768 +2025-03-23 01:15:04,253 Epoch 117/2000 +2025-03-23 01:19:44,600 Current Learning Rate: 0.0003680635 +2025-03-23 01:19:45,549 Train Loss: 0.0001742, Val Loss: 0.0001514 +2025-03-23 01:19:45,549 Epoch 118/2000 +2025-03-23 01:24:25,547 Current Learning Rate: 0.0003605044 +2025-03-23 01:24:25,548 Train Loss: 0.0001749, Val Loss: 0.0001526 +2025-03-23 01:24:25,548 Epoch 119/2000 +2025-03-23 01:29:06,255 Current Learning Rate: 0.0003529798 +2025-03-23 01:29:06,255 Train Loss: 0.0001562, Val Loss: 0.0001796 +2025-03-23 01:29:06,256 Epoch 120/2000 +2025-03-23 01:33:46,560 Current Learning Rate: 0.0003454915 +2025-03-23 01:33:46,560 Train Loss: 0.0001547, Val Loss: 0.0001578 +2025-03-23 01:33:46,561 Epoch 121/2000 +2025-03-23 01:38:26,782 Current Learning Rate: 0.0003380413 +2025-03-23 01:38:26,783 Train Loss: 0.0002068, Val Loss: 0.0001514 +2025-03-23 01:38:26,783 Epoch 122/2000 +2025-03-23 01:43:06,858 Current Learning Rate: 0.0003306310 +2025-03-23 01:43:06,859 Train Loss: 0.0001686, Val Loss: 0.0001574 +2025-03-23 01:43:06,859 Epoch 123/2000 +2025-03-23 01:47:47,061 Current Learning Rate: 0.0003232626 +2025-03-23 01:47:48,005 Train Loss: 0.0001608, Val Loss: 0.0001510 +2025-03-23 01:47:48,006 Epoch 124/2000 +2025-03-23 01:52:27,950 Current Learning Rate: 0.0003159377 +2025-03-23 01:52:27,951 Train Loss: 0.0001914, Val Loss: 0.0001555 +2025-03-23 01:52:27,951 Epoch 125/2000 +2025-03-23 01:57:08,111 Current Learning Rate: 0.0003086583 +2025-03-23 01:57:09,138 Train Loss: 0.0001391, Val Loss: 0.0001448 +2025-03-23 01:57:09,138 Epoch 126/2000 +2025-03-23 02:01:48,749 Current Learning Rate: 0.0003014261 +2025-03-23 02:01:48,750 Train Loss: 0.0001464, Val Loss: 0.0001504 +2025-03-23 02:01:48,750 Epoch 127/2000 +2025-03-23 02:06:29,269 Current Learning Rate: 0.0002942428 +2025-03-23 02:06:29,270 Train Loss: 0.0001602, Val Loss: 0.0001467 +2025-03-23 02:06:29,270 Epoch 128/2000 +2025-03-23 02:11:09,781 Current Learning Rate: 0.0002871104 +2025-03-23 02:11:10,775 Train Loss: 0.0001603, Val Loss: 0.0001430 +2025-03-23 02:11:10,776 Epoch 129/2000 +2025-03-23 02:15:50,575 Current Learning Rate: 0.0002800304 +2025-03-23 02:15:51,605 Train Loss: 0.0001376, Val Loss: 0.0001405 +2025-03-23 02:15:51,605 Epoch 130/2000 +2025-03-23 02:20:31,412 Current Learning Rate: 0.0002730048 +2025-03-23 02:20:31,412 Train Loss: 0.0001603, Val Loss: 0.0001521 +2025-03-23 02:20:31,413 Epoch 131/2000 +2025-03-23 02:25:11,750 Current Learning Rate: 0.0002660351 +2025-03-23 02:25:12,639 Train Loss: 0.0001381, Val Loss: 0.0001371 +2025-03-23 02:25:12,640 Epoch 132/2000 +2025-03-23 02:29:52,219 Current Learning Rate: 0.0002591232 +2025-03-23 02:29:52,220 Train Loss: 0.0001613, Val Loss: 0.0001476 +2025-03-23 02:29:52,220 Epoch 133/2000 +2025-03-23 02:34:32,590 Current Learning Rate: 0.0002522707 +2025-03-23 02:34:32,591 Train Loss: 0.0001376, Val Loss: 0.0001378 +2025-03-23 02:34:32,591 Epoch 134/2000 +2025-03-23 02:39:12,789 Current Learning Rate: 0.0002454793 +2025-03-23 02:39:12,789 Train Loss: 0.0001521, Val Loss: 0.0001372 +2025-03-23 02:39:12,790 Epoch 135/2000 +2025-03-23 02:43:53,172 Current Learning Rate: 0.0002387507 +2025-03-23 02:43:53,172 Train Loss: 0.0001290, Val Loss: 0.0001385 +2025-03-23 02:43:53,173 Epoch 136/2000 +2025-03-23 02:48:33,509 Current Learning Rate: 0.0002320866 +2025-03-23 02:48:34,398 Train Loss: 0.0001433, Val Loss: 0.0001365 +2025-03-23 02:48:34,398 Epoch 137/2000 +2025-03-23 02:53:13,956 Current Learning Rate: 0.0002254886 +2025-03-23 02:53:13,956 Train Loss: 0.0001533, Val Loss: 0.0001643 +2025-03-23 02:53:13,956 Epoch 138/2000 +2025-03-23 02:57:54,147 Current Learning Rate: 0.0002189583 +2025-03-23 02:57:55,007 Train Loss: 0.0001354, Val Loss: 0.0001333 +2025-03-23 02:57:55,008 Epoch 139/2000 +2025-03-23 03:02:34,634 Current Learning Rate: 0.0002124974 +2025-03-23 03:02:34,635 Train Loss: 0.0001528, Val Loss: 0.0001411 +2025-03-23 03:02:34,635 Epoch 140/2000 +2025-03-23 03:07:14,622 Current Learning Rate: 0.0002061074 +2025-03-23 03:07:14,622 Train Loss: 0.0001310, Val Loss: 0.0001484 +2025-03-23 03:07:14,622 Epoch 141/2000 +2025-03-23 03:11:54,703 Current Learning Rate: 0.0001997899 +2025-03-23 03:11:54,703 Train Loss: 0.0001324, Val Loss: 0.0001366 +2025-03-23 03:11:54,704 Epoch 142/2000 +2025-03-23 03:16:34,800 Current Learning Rate: 0.0001935465 +2025-03-23 03:16:34,800 Train Loss: 0.0001413, Val Loss: 0.0001388 +2025-03-23 03:16:34,800 Epoch 143/2000 +2025-03-23 03:21:14,495 Current Learning Rate: 0.0001873787 +2025-03-23 03:21:15,356 Train Loss: 0.0001324, Val Loss: 0.0001303 +2025-03-23 03:21:15,356 Epoch 144/2000 +2025-03-23 03:25:54,985 Current Learning Rate: 0.0001812880 +2025-03-23 03:25:54,986 Train Loss: 0.0001369, Val Loss: 0.0001309 +2025-03-23 03:25:54,986 Epoch 145/2000 +2025-03-23 03:30:34,795 Current Learning Rate: 0.0001752760 +2025-03-23 03:30:34,796 Train Loss: 0.0001623, Val Loss: 0.0001378 +2025-03-23 03:30:34,796 Epoch 146/2000 +2025-03-23 03:35:14,439 Current Learning Rate: 0.0001693441 +2025-03-23 03:35:14,440 Train Loss: 0.0001319, Val Loss: 0.0001359 +2025-03-23 03:35:14,440 Epoch 147/2000 +2025-03-23 03:39:54,236 Current Learning Rate: 0.0001634937 +2025-03-23 03:39:54,237 Train Loss: 0.0001200, Val Loss: 0.0001536 +2025-03-23 03:39:54,237 Epoch 148/2000 +2025-03-23 03:44:33,965 Current Learning Rate: 0.0001577264 +2025-03-23 03:44:34,911 Train Loss: 0.0001043, Val Loss: 0.0001260 +2025-03-23 03:44:34,911 Epoch 149/2000 +2025-03-23 03:49:14,390 Current Learning Rate: 0.0001520436 +2025-03-23 03:49:14,391 Train Loss: 0.0001288, Val Loss: 0.0001288 +2025-03-23 03:49:14,391 Epoch 150/2000 +2025-03-23 03:53:54,480 Current Learning Rate: 0.0001464466 +2025-03-23 03:53:54,480 Train Loss: 0.0001398, Val Loss: 0.0001291 +2025-03-23 03:53:54,480 Epoch 151/2000 +2025-03-23 03:58:34,386 Current Learning Rate: 0.0001409369 +2025-03-23 03:58:34,386 Train Loss: 0.0001303, Val Loss: 0.0001299 +2025-03-23 03:58:34,387 Epoch 152/2000 +2025-03-23 04:03:14,428 Current Learning Rate: 0.0001355157 +2025-03-23 04:03:15,301 Train Loss: 0.0001275, Val Loss: 0.0001253 +2025-03-23 04:03:15,302 Epoch 153/2000 +2025-03-23 04:07:54,682 Current Learning Rate: 0.0001301845 +2025-03-23 04:07:54,683 Train Loss: 0.0001365, Val Loss: 0.0001283 +2025-03-23 04:07:54,683 Epoch 154/2000 +2025-03-23 04:12:34,896 Current Learning Rate: 0.0001249445 +2025-03-23 04:12:34,897 Train Loss: 0.0001230, Val Loss: 0.0001288 +2025-03-23 04:12:34,897 Epoch 155/2000 +2025-03-23 04:17:14,839 Current Learning Rate: 0.0001197970 +2025-03-23 04:17:14,840 Train Loss: 0.0001294, Val Loss: 0.0001285 +2025-03-23 04:17:14,840 Epoch 156/2000 +2025-03-23 04:21:54,659 Current Learning Rate: 0.0001147434 +2025-03-23 04:21:55,522 Train Loss: 0.0001391, Val Loss: 0.0001233 +2025-03-23 04:21:55,522 Epoch 157/2000 +2025-03-23 04:26:34,943 Current Learning Rate: 0.0001097848 +2025-03-23 04:26:34,944 Train Loss: 0.0001310, Val Loss: 0.0001235 +2025-03-23 04:26:34,944 Epoch 158/2000 +2025-03-23 04:31:14,918 Current Learning Rate: 0.0001049225 +2025-03-23 04:31:14,919 Train Loss: 0.0001229, Val Loss: 0.0001237 +2025-03-23 04:31:14,919 Epoch 159/2000 +2025-03-23 04:35:54,691 Current Learning Rate: 0.0001001577 +2025-03-23 04:35:54,691 Train Loss: 0.0001243, Val Loss: 0.0001233 +2025-03-23 04:35:54,691 Epoch 160/2000 +2025-03-23 04:40:34,094 Current Learning Rate: 0.0000954915 +2025-03-23 04:40:34,910 Train Loss: 0.0001385, Val Loss: 0.0001215 +2025-03-23 04:40:34,911 Epoch 161/2000 +2025-03-23 04:45:13,915 Current Learning Rate: 0.0000909251 +2025-03-23 04:45:14,751 Train Loss: 0.0001330, Val Loss: 0.0001214 +2025-03-23 04:45:14,751 Epoch 162/2000 +2025-03-23 04:49:54,275 Current Learning Rate: 0.0000864597 +2025-03-23 04:49:55,136 Train Loss: 0.0001343, Val Loss: 0.0001209 +2025-03-23 04:49:55,137 Epoch 163/2000 +2025-03-23 04:54:34,398 Current Learning Rate: 0.0000820963 +2025-03-23 04:54:34,398 Train Loss: 0.0001219, Val Loss: 0.0001212 +2025-03-23 04:54:34,399 Epoch 164/2000 +2025-03-23 04:59:14,655 Current Learning Rate: 0.0000778360 +2025-03-23 04:59:14,656 Train Loss: 0.0001249, Val Loss: 0.0001225 +2025-03-23 04:59:14,656 Epoch 165/2000 +2025-03-23 05:03:54,969 Current Learning Rate: 0.0000736799 +2025-03-23 05:03:54,969 Train Loss: 0.0001176, Val Loss: 0.0001230 +2025-03-23 05:03:54,969 Epoch 166/2000 +2025-03-23 05:08:34,813 Current Learning Rate: 0.0000696290 +2025-03-23 05:08:35,651 Train Loss: 0.0001324, Val Loss: 0.0001199 +2025-03-23 05:08:35,651 Epoch 167/2000 +2025-03-23 05:13:14,687 Current Learning Rate: 0.0000656842 +2025-03-23 05:13:15,546 Train Loss: 0.0001138, Val Loss: 0.0001195 +2025-03-23 05:13:15,547 Epoch 168/2000 +2025-03-23 05:17:54,585 Current Learning Rate: 0.0000618467 +2025-03-23 05:17:54,586 Train Loss: 0.0001277, Val Loss: 0.0001219 +2025-03-23 05:17:54,586 Epoch 169/2000 +2025-03-23 05:22:34,443 Current Learning Rate: 0.0000581172 +2025-03-23 05:22:35,278 Train Loss: 0.0001190, Val Loss: 0.0001190 +2025-03-23 05:22:35,279 Epoch 170/2000 +2025-03-23 05:27:14,379 Current Learning Rate: 0.0000544967 +2025-03-23 05:27:15,218 Train Loss: 0.0001147, Val Loss: 0.0001184 +2025-03-23 05:27:15,218 Epoch 171/2000 +2025-03-23 05:31:54,528 Current Learning Rate: 0.0000509862 +2025-03-23 05:31:55,382 Train Loss: 0.0001219, Val Loss: 0.0001182 +2025-03-23 05:31:55,382 Epoch 172/2000 +2025-03-23 05:36:34,686 Current Learning Rate: 0.0000475865 +2025-03-23 05:36:35,521 Train Loss: 0.0001153, Val Loss: 0.0001179 +2025-03-23 05:36:35,521 Epoch 173/2000 +2025-03-23 05:41:15,021 Current Learning Rate: 0.0000442984 +2025-03-23 05:41:15,022 Train Loss: 0.0001220, Val Loss: 0.0001188 +2025-03-23 05:41:15,022 Epoch 174/2000 +2025-03-23 05:45:54,804 Current Learning Rate: 0.0000411227 +2025-03-23 05:45:55,666 Train Loss: 0.0001155, Val Loss: 0.0001175 +2025-03-23 05:45:55,667 Epoch 175/2000 +2025-03-23 05:50:35,132 Current Learning Rate: 0.0000380602 +2025-03-23 05:50:35,968 Train Loss: 0.0001187, Val Loss: 0.0001173 +2025-03-23 05:50:35,968 Epoch 176/2000 +2025-03-23 05:55:15,402 Current Learning Rate: 0.0000351118 +2025-03-23 05:55:15,403 Train Loss: 0.0001445, Val Loss: 0.0001175 +2025-03-23 05:55:15,403 Epoch 177/2000 +2025-03-23 05:59:55,409 Current Learning Rate: 0.0000322780 +2025-03-23 05:59:56,244 Train Loss: 0.0001116, Val Loss: 0.0001165 +2025-03-23 05:59:56,244 Epoch 178/2000 +2025-03-23 06:04:35,618 Current Learning Rate: 0.0000295596 +2025-03-23 06:04:36,443 Train Loss: 0.0001108, Val Loss: 0.0001165 +2025-03-23 06:04:36,443 Epoch 179/2000 +2025-03-23 06:09:16,029 Current Learning Rate: 0.0000269573 +2025-03-23 06:09:16,029 Train Loss: 0.0001331, Val Loss: 0.0001170 +2025-03-23 06:09:16,030 Epoch 180/2000 +2025-03-23 06:13:55,928 Current Learning Rate: 0.0000244717 +2025-03-23 06:13:56,760 Train Loss: 0.0001300, Val Loss: 0.0001162 +2025-03-23 06:13:56,760 Epoch 181/2000 +2025-03-23 06:18:36,065 Current Learning Rate: 0.0000221035 +2025-03-23 06:18:36,955 Train Loss: 0.0001233, Val Loss: 0.0001161 +2025-03-23 06:18:36,956 Epoch 182/2000 +2025-03-23 06:23:16,266 Current Learning Rate: 0.0000198532 +2025-03-23 06:23:16,267 Train Loss: 0.0001241, Val Loss: 0.0001164 +2025-03-23 06:23:16,267 Epoch 183/2000 +2025-03-23 06:27:56,403 Current Learning Rate: 0.0000177213 +2025-03-23 06:27:57,265 Train Loss: 0.0001080, Val Loss: 0.0001160 +2025-03-23 06:27:57,266 Epoch 184/2000 +2025-03-23 06:32:36,473 Current Learning Rate: 0.0000157084 +2025-03-23 06:32:37,309 Train Loss: 0.0001166, Val Loss: 0.0001157 +2025-03-23 06:32:37,309 Epoch 185/2000 +2025-03-23 06:37:17,060 Current Learning Rate: 0.0000138150 +2025-03-23 06:37:17,911 Train Loss: 0.0001094, Val Loss: 0.0001154 +2025-03-23 06:37:17,911 Epoch 186/2000 +2025-03-23 06:41:57,287 Current Learning Rate: 0.0000120416 +2025-03-23 06:41:57,287 Train Loss: 0.0001192, Val Loss: 0.0001155 +2025-03-23 06:41:57,288 Epoch 187/2000 +2025-03-23 06:46:37,248 Current Learning Rate: 0.0000103886 +2025-03-23 06:46:38,081 Train Loss: 0.0001227, Val Loss: 0.0001153 +2025-03-23 06:46:38,081 Epoch 188/2000 +2025-03-23 06:51:17,497 Current Learning Rate: 0.0000088564 +2025-03-23 06:51:18,333 Train Loss: 0.0001234, Val Loss: 0.0001152 +2025-03-23 06:51:18,333 Epoch 189/2000 +2025-03-23 06:55:58,027 Current Learning Rate: 0.0000074453 +2025-03-23 06:55:58,836 Train Loss: 0.0001083, Val Loss: 0.0001151 +2025-03-23 06:55:58,836 Epoch 190/2000 +2025-03-23 07:00:38,036 Current Learning Rate: 0.0000061558 +2025-03-23 07:00:38,909 Train Loss: 0.0001168, Val Loss: 0.0001151 +2025-03-23 07:00:38,910 Epoch 191/2000 +2025-03-23 07:05:18,187 Current Learning Rate: 0.0000049882 +2025-03-23 07:05:19,037 Train Loss: 0.0000970, Val Loss: 0.0001150 +2025-03-23 07:05:19,037 Epoch 192/2000 +2025-03-23 07:09:58,106 Current Learning Rate: 0.0000039426 +2025-03-23 07:09:58,911 Train Loss: 0.0001124, Val Loss: 0.0001150 +2025-03-23 07:09:58,911 Epoch 193/2000 +2025-03-23 07:14:37,896 Current Learning Rate: 0.0000030195 +2025-03-23 07:14:38,733 Train Loss: 0.0001066, Val Loss: 0.0001149 +2025-03-23 07:14:38,733 Epoch 194/2000 +2025-03-23 07:19:17,806 Current Learning Rate: 0.0000022190 +2025-03-23 07:19:18,673 Train Loss: 0.0001331, Val Loss: 0.0001149 +2025-03-23 07:19:18,674 Epoch 195/2000 +2025-03-23 07:23:57,985 Current Learning Rate: 0.0000015413 +2025-03-23 07:23:58,818 Train Loss: 0.0001056, Val Loss: 0.0001149 +2025-03-23 07:23:58,818 Epoch 196/2000 +2025-03-23 07:28:38,015 Current Learning Rate: 0.0000009866 +2025-03-23 07:28:38,852 Train Loss: 0.0001027, Val Loss: 0.0001149 +2025-03-23 07:28:38,852 Epoch 197/2000 +2025-03-23 07:33:18,087 Current Learning Rate: 0.0000005551 +2025-03-23 07:33:18,908 Train Loss: 0.0001222, Val Loss: 0.0001149 +2025-03-23 07:33:18,908 Epoch 198/2000 +2025-03-23 07:37:58,453 Current Learning Rate: 0.0000002467 +2025-03-23 07:37:58,454 Train Loss: 0.0001126, Val Loss: 0.0001149 +2025-03-23 07:37:58,454 Epoch 199/2000 +2025-03-23 07:42:38,350 Current Learning Rate: 0.0000000617 +2025-03-23 07:42:39,150 Train Loss: 0.0001286, Val Loss: 0.0001148 +2025-03-23 07:42:39,151 Epoch 200/2000 +2025-03-23 07:47:18,686 Current Learning Rate: 0.0000000000 +2025-03-23 07:47:18,687 Train Loss: 0.0001202, Val Loss: 0.0001149 +2025-03-23 07:47:18,687 Epoch 201/2000 +2025-03-23 07:51:58,520 Current Learning Rate: 0.0000000617 +2025-03-23 07:51:58,521 Train Loss: 0.0001168, Val Loss: 0.0001149 +2025-03-23 07:51:58,522 Epoch 202/2000 +2025-03-23 07:56:38,738 Current Learning Rate: 0.0000002467 +2025-03-23 07:56:38,738 Train Loss: 0.0001204, Val Loss: 0.0001149 +2025-03-23 07:56:38,738 Epoch 203/2000 +2025-03-23 08:01:18,788 Current Learning Rate: 0.0000005551 +2025-03-23 08:01:18,788 Train Loss: 0.0001110, Val Loss: 0.0001148 +2025-03-23 08:01:18,789 Epoch 204/2000 +2025-03-23 08:05:58,858 Current Learning Rate: 0.0000009866 +2025-03-23 08:05:59,757 Train Loss: 0.0001155, Val Loss: 0.0001148 +2025-03-23 08:05:59,757 Epoch 205/2000 +2025-03-23 08:10:39,076 Current Learning Rate: 0.0000015413 +2025-03-23 08:10:39,994 Train Loss: 0.0001235, Val Loss: 0.0001148 +2025-03-23 08:10:39,995 Epoch 206/2000 +2025-03-23 08:15:19,328 Current Learning Rate: 0.0000022190 +2025-03-23 08:15:19,329 Train Loss: 0.0001063, Val Loss: 0.0001149 +2025-03-23 08:15:19,329 Epoch 207/2000 +2025-03-23 08:19:59,209 Current Learning Rate: 0.0000030195 +2025-03-23 08:19:59,210 Train Loss: 0.0001099, Val Loss: 0.0001148 +2025-03-23 08:19:59,210 Epoch 208/2000 +2025-03-23 08:24:39,187 Current Learning Rate: 0.0000039426 +2025-03-23 08:24:39,187 Train Loss: 0.0001081, Val Loss: 0.0001149 +2025-03-23 08:24:39,187 Epoch 209/2000 +2025-03-23 08:29:18,947 Current Learning Rate: 0.0000049882 +2025-03-23 08:29:18,947 Train Loss: 0.0001069, Val Loss: 0.0001149 +2025-03-23 08:29:18,947 Epoch 210/2000 +2025-03-23 08:33:58,653 Current Learning Rate: 0.0000061558 +2025-03-23 08:33:58,654 Train Loss: 0.0001061, Val Loss: 0.0001149 +2025-03-23 08:33:58,654 Epoch 211/2000 +2025-03-23 08:38:38,365 Current Learning Rate: 0.0000074453 +2025-03-23 08:38:38,365 Train Loss: 0.0001301, Val Loss: 0.0001149 +2025-03-23 08:38:38,366 Epoch 212/2000 +2025-03-23 08:43:17,876 Current Learning Rate: 0.0000088564 +2025-03-23 08:43:17,877 Train Loss: 0.0001281, Val Loss: 0.0001149 +2025-03-23 08:43:17,877 Epoch 213/2000 +2025-03-23 08:47:57,635 Current Learning Rate: 0.0000103886 +2025-03-23 08:47:57,635 Train Loss: 0.0001135, Val Loss: 0.0001149 +2025-03-23 08:47:57,635 Epoch 214/2000 +2025-03-23 08:52:37,544 Current Learning Rate: 0.0000120416 +2025-03-23 08:52:37,545 Train Loss: 0.0001048, Val Loss: 0.0001148 +2025-03-23 08:52:37,545 Epoch 215/2000 +2025-03-23 08:57:17,646 Current Learning Rate: 0.0000138150 +2025-03-23 08:57:17,646 Train Loss: 0.0001238, Val Loss: 0.0001149 +2025-03-23 08:57:17,647 Epoch 216/2000 +2025-03-23 09:01:57,595 Current Learning Rate: 0.0000157084 +2025-03-23 09:01:57,596 Train Loss: 0.0001116, Val Loss: 0.0001150 +2025-03-23 09:01:57,596 Epoch 217/2000 +2025-03-23 09:06:37,671 Current Learning Rate: 0.0000177213 +2025-03-23 09:06:37,672 Train Loss: 0.0001161, Val Loss: 0.0001149 +2025-03-23 09:06:37,672 Epoch 218/2000 +2025-03-23 09:11:17,727 Current Learning Rate: 0.0000198532 +2025-03-23 09:11:17,728 Train Loss: 0.0001230, Val Loss: 0.0001149 +2025-03-23 09:11:17,728 Epoch 219/2000 +2025-03-23 09:15:57,885 Current Learning Rate: 0.0000221035 +2025-03-23 09:15:57,885 Train Loss: 0.0001106, Val Loss: 0.0001149 +2025-03-23 09:15:57,885 Epoch 220/2000 +2025-03-23 09:20:37,536 Current Learning Rate: 0.0000244717 +2025-03-23 09:20:37,536 Train Loss: 0.0001138, Val Loss: 0.0001149 +2025-03-23 09:20:37,536 Epoch 221/2000 +2025-03-23 09:25:17,580 Current Learning Rate: 0.0000269573 +2025-03-23 09:25:17,580 Train Loss: 0.0001265, Val Loss: 0.0001151 +2025-03-23 09:25:17,581 Epoch 222/2000 +2025-03-23 09:29:57,364 Current Learning Rate: 0.0000295596 +2025-03-23 09:29:57,365 Train Loss: 0.0000965, Val Loss: 0.0001150 +2025-03-23 09:29:57,365 Epoch 223/2000 +2025-03-23 09:34:37,301 Current Learning Rate: 0.0000322780 +2025-03-23 09:34:37,301 Train Loss: 0.0001335, Val Loss: 0.0001153 +2025-03-23 09:34:37,302 Epoch 224/2000 +2025-03-23 09:39:17,292 Current Learning Rate: 0.0000351118 +2025-03-23 09:39:17,292 Train Loss: 0.0001190, Val Loss: 0.0001150 +2025-03-23 09:39:17,293 Epoch 225/2000 +2025-03-23 09:43:57,271 Current Learning Rate: 0.0000380602 +2025-03-23 09:43:57,272 Train Loss: 0.0001175, Val Loss: 0.0001154 +2025-03-23 09:43:57,272 Epoch 226/2000 +2025-03-23 09:48:37,479 Current Learning Rate: 0.0000411227 +2025-03-23 09:48:37,481 Train Loss: 0.0001065, Val Loss: 0.0001154 +2025-03-23 09:48:37,482 Epoch 227/2000 +2025-03-23 09:53:17,486 Current Learning Rate: 0.0000442984 +2025-03-23 09:53:17,487 Train Loss: 0.0001025, Val Loss: 0.0001154 +2025-03-23 09:53:17,487 Epoch 228/2000 +2025-03-23 09:57:57,554 Current Learning Rate: 0.0000475865 +2025-03-23 09:57:57,554 Train Loss: 0.0001054, Val Loss: 0.0001151 +2025-03-23 09:57:57,555 Epoch 229/2000 +2025-03-23 10:02:37,561 Current Learning Rate: 0.0000509862 +2025-03-23 10:02:37,562 Train Loss: 0.0001083, Val Loss: 0.0001153 +2025-03-23 10:02:37,562 Epoch 230/2000 +2025-03-23 10:07:17,303 Current Learning Rate: 0.0000544967 +2025-03-23 10:07:17,303 Train Loss: 0.0001084, Val Loss: 0.0001150 +2025-03-23 10:07:17,303 Epoch 231/2000 +2025-03-23 10:11:56,976 Current Learning Rate: 0.0000581172 +2025-03-23 10:11:56,977 Train Loss: 0.0001063, Val Loss: 0.0001159 +2025-03-23 10:11:56,977 Epoch 232/2000 +2025-03-23 10:16:36,658 Current Learning Rate: 0.0000618467 +2025-03-23 10:16:36,659 Train Loss: 0.0001173, Val Loss: 0.0001154 +2025-03-23 10:16:36,659 Epoch 233/2000 +2025-03-23 10:21:16,399 Current Learning Rate: 0.0000656842 +2025-03-23 10:21:16,399 Train Loss: 0.0001224, Val Loss: 0.0001170 +2025-03-23 10:21:16,400 Epoch 234/2000 +2025-03-23 10:25:56,379 Current Learning Rate: 0.0000696290 +2025-03-23 10:25:56,379 Train Loss: 0.0001232, Val Loss: 0.0001157 +2025-03-23 10:25:56,380 Epoch 235/2000 +2025-03-23 10:30:36,361 Current Learning Rate: 0.0000736799 +2025-03-23 10:30:36,361 Train Loss: 0.0001059, Val Loss: 0.0001155 +2025-03-23 10:30:36,361 Epoch 236/2000 +2025-03-23 10:35:16,280 Current Learning Rate: 0.0000778360 +2025-03-23 10:35:16,281 Train Loss: 0.0000995, Val Loss: 0.0001155 +2025-03-23 10:35:16,281 Epoch 237/2000 +2025-03-23 10:39:55,964 Current Learning Rate: 0.0000820963 +2025-03-23 10:39:55,965 Train Loss: 0.0001203, Val Loss: 0.0001160 +2025-03-23 10:39:55,965 Epoch 238/2000 +2025-03-23 10:44:35,643 Current Learning Rate: 0.0000864597 +2025-03-23 10:44:35,643 Train Loss: 0.0001277, Val Loss: 0.0001166 +2025-03-23 10:44:35,644 Epoch 239/2000 +2025-03-23 10:49:15,508 Current Learning Rate: 0.0000909251 +2025-03-23 10:49:15,508 Train Loss: 0.0001310, Val Loss: 0.0001166 +2025-03-23 10:49:15,508 Epoch 240/2000 +2025-03-23 10:53:55,450 Current Learning Rate: 0.0000954915 +2025-03-23 10:53:55,450 Train Loss: 0.0001239, Val Loss: 0.0001167 +2025-03-23 10:53:55,451 Epoch 241/2000 +2025-03-23 10:58:35,154 Current Learning Rate: 0.0001001577 +2025-03-23 10:58:35,154 Train Loss: 0.0001083, Val Loss: 0.0001175 +2025-03-23 10:58:35,155 Epoch 242/2000 +2025-03-23 11:03:15,193 Current Learning Rate: 0.0001049225 +2025-03-23 11:03:15,193 Train Loss: 0.0001272, Val Loss: 0.0001199 +2025-03-23 11:03:15,193 Epoch 243/2000 +2025-03-23 11:07:54,915 Current Learning Rate: 0.0001097848 +2025-03-23 11:07:54,915 Train Loss: 0.0001165, Val Loss: 0.0001153 +2025-03-23 11:07:54,915 Epoch 244/2000 +2025-03-23 11:12:34,392 Current Learning Rate: 0.0001147434 +2025-03-23 11:12:34,393 Train Loss: 0.0001207, Val Loss: 0.0001186 +2025-03-23 11:12:34,393 Epoch 245/2000 +2025-03-23 11:17:14,148 Current Learning Rate: 0.0001197970 +2025-03-23 11:17:14,148 Train Loss: 0.0001133, Val Loss: 0.0001159 +2025-03-23 11:17:14,148 Epoch 246/2000 +2025-03-23 11:21:54,239 Current Learning Rate: 0.0001249445 +2025-03-23 11:21:54,240 Train Loss: 0.0001040, Val Loss: 0.0001155 +2025-03-23 11:21:54,240 Epoch 247/2000 +2025-03-23 11:26:34,028 Current Learning Rate: 0.0001301845 +2025-03-23 11:26:34,028 Train Loss: 0.0001313, Val Loss: 0.0001209 +2025-03-23 11:26:34,029 Epoch 248/2000 +2025-03-23 11:31:13,801 Current Learning Rate: 0.0001355157 +2025-03-23 11:31:13,801 Train Loss: 0.0001115, Val Loss: 0.0001151 +2025-03-23 11:31:13,802 Epoch 249/2000 +2025-03-23 11:35:53,599 Current Learning Rate: 0.0001409369 +2025-03-23 11:35:53,599 Train Loss: 0.0001367, Val Loss: 0.0001179 +2025-03-23 11:35:53,599 Epoch 250/2000 +2025-03-23 11:40:33,358 Current Learning Rate: 0.0001464466 +2025-03-23 11:40:33,359 Train Loss: 0.0001259, Val Loss: 0.0001176 +2025-03-23 11:40:33,359 Epoch 251/2000 +2025-03-23 11:45:13,196 Current Learning Rate: 0.0001520436 +2025-03-23 11:45:13,196 Train Loss: 0.0001192, Val Loss: 0.0001150 +2025-03-23 11:45:13,196 Epoch 252/2000 +2025-03-23 11:49:53,248 Current Learning Rate: 0.0001577264 +2025-03-23 11:49:53,248 Train Loss: 0.0001212, Val Loss: 0.0001194 +2025-03-23 11:49:53,249 Epoch 253/2000 +2025-03-23 11:54:32,972 Current Learning Rate: 0.0001634937 +2025-03-23 11:54:32,973 Train Loss: 0.0001246, Val Loss: 0.0001175 +2025-03-23 11:54:32,973 Epoch 254/2000 +2025-03-23 11:59:12,808 Current Learning Rate: 0.0001693441 +2025-03-23 11:59:12,809 Train Loss: 0.0001454, Val Loss: 0.0001201 +2025-03-23 11:59:12,809 Epoch 255/2000 +2025-03-23 12:03:52,813 Current Learning Rate: 0.0001752760 +2025-03-23 12:03:52,814 Train Loss: 0.0001177, Val Loss: 0.0001190 +2025-03-23 12:03:52,814 Epoch 256/2000 +2025-03-23 12:08:32,551 Current Learning Rate: 0.0001812880 +2025-03-23 12:08:32,551 Train Loss: 0.0001250, Val Loss: 0.0001168 +2025-03-23 12:08:32,551 Epoch 257/2000 +2025-03-23 12:13:12,151 Current Learning Rate: 0.0001873787 +2025-03-23 12:13:12,151 Train Loss: 0.0001065, Val Loss: 0.0001150 +2025-03-23 12:13:12,151 Epoch 258/2000 +2025-03-23 12:17:52,154 Current Learning Rate: 0.0001935465 +2025-03-23 12:17:52,154 Train Loss: 0.0001137, Val Loss: 0.0001213 +2025-03-23 12:17:52,155 Epoch 259/2000 +2025-03-23 12:22:32,318 Current Learning Rate: 0.0001997899 +2025-03-23 12:22:32,318 Train Loss: 0.0001165, Val Loss: 0.0001156 +2025-03-23 12:22:32,319 Epoch 260/2000 +2025-03-23 12:27:12,567 Current Learning Rate: 0.0002061074 +2025-03-23 12:27:12,567 Train Loss: 0.0001320, Val Loss: 0.0001150 +2025-03-23 12:27:12,567 Epoch 261/2000 +2025-03-23 12:31:51,863 Current Learning Rate: 0.0002124974 +2025-03-23 12:31:51,863 Train Loss: 0.0001190, Val Loss: 0.0001149 +2025-03-23 12:31:51,863 Epoch 262/2000 +2025-03-23 12:36:31,831 Current Learning Rate: 0.0002189583 +2025-03-23 12:36:31,831 Train Loss: 0.0001271, Val Loss: 0.0001397 +2025-03-23 12:36:31,832 Epoch 263/2000 +2025-03-23 12:41:11,799 Current Learning Rate: 0.0002254886 +2025-03-23 12:41:11,799 Train Loss: 0.0001294, Val Loss: 0.0001256 +2025-03-23 12:41:11,802 Epoch 264/2000 +2025-03-23 12:45:51,731 Current Learning Rate: 0.0002320866 +2025-03-23 12:45:51,732 Train Loss: 0.0001212, Val Loss: 0.0001187 +2025-03-23 12:45:51,732 Epoch 265/2000 +2025-03-23 12:50:31,671 Current Learning Rate: 0.0002387507 +2025-03-23 12:50:31,671 Train Loss: 0.0001289, Val Loss: 0.0001545 +2025-03-23 12:50:31,671 Epoch 266/2000 +2025-03-23 12:55:11,901 Current Learning Rate: 0.0002454793 +2025-03-23 12:55:11,901 Train Loss: 0.0001362, Val Loss: 0.0001167 +2025-03-23 12:55:11,901 Epoch 267/2000 +2025-03-23 12:59:51,601 Current Learning Rate: 0.0002522707 +2025-03-23 12:59:51,601 Train Loss: 0.0001169, Val Loss: 0.0001344 +2025-03-23 12:59:51,602 Epoch 268/2000 +2025-03-23 13:04:31,235 Current Learning Rate: 0.0002591232 +2025-03-23 13:04:31,235 Train Loss: 0.0001249, Val Loss: 0.0001258 +2025-03-23 13:04:31,235 Epoch 269/2000 +2025-03-23 13:09:11,150 Current Learning Rate: 0.0002660351 +2025-03-23 13:09:11,150 Train Loss: 0.0001234, Val Loss: 0.0001193 +2025-03-23 13:09:11,150 Epoch 270/2000 +2025-03-23 13:13:51,136 Current Learning Rate: 0.0002730048 +2025-03-23 13:13:51,136 Train Loss: 0.0001288, Val Loss: 0.0001170 +2025-03-23 13:13:51,137 Epoch 271/2000 +2025-03-23 13:18:30,952 Current Learning Rate: 0.0002800304 +2025-03-23 13:18:30,954 Train Loss: 0.0001517, Val Loss: 0.0001261 +2025-03-23 13:18:30,958 Epoch 272/2000 +2025-03-23 13:23:10,491 Current Learning Rate: 0.0002871104 +2025-03-23 13:23:10,492 Train Loss: 0.0001392, Val Loss: 0.0001168 +2025-03-23 13:23:10,492 Epoch 273/2000 +2025-03-23 13:27:50,239 Current Learning Rate: 0.0002942428 +2025-03-23 13:27:50,239 Train Loss: 0.0001430, Val Loss: 0.0001279 +2025-03-23 13:27:50,239 Epoch 274/2000 +2025-03-23 13:32:30,099 Current Learning Rate: 0.0003014261 +2025-03-23 13:32:30,100 Train Loss: 0.0001329, Val Loss: 0.0001310 +2025-03-23 13:32:30,100 Epoch 275/2000 +2025-03-23 13:37:09,887 Current Learning Rate: 0.0003086583 +2025-03-23 13:37:09,888 Train Loss: 0.0001173, Val Loss: 0.0001184 +2025-03-23 13:37:09,888 Epoch 276/2000 +2025-03-23 13:41:49,987 Current Learning Rate: 0.0003159377 +2025-03-23 13:41:49,987 Train Loss: 0.0001541, Val Loss: 0.0001339 +2025-03-23 13:41:49,988 Epoch 277/2000 +2025-03-23 13:46:30,210 Current Learning Rate: 0.0003232626 +2025-03-23 13:46:30,210 Train Loss: 0.0001549, Val Loss: 0.0001369 +2025-03-23 13:46:30,211 Epoch 278/2000 +2025-03-23 13:51:10,051 Current Learning Rate: 0.0003306310 +2025-03-23 13:51:10,052 Train Loss: 0.0001152, Val Loss: 0.0001210 +2025-03-23 13:51:10,052 Epoch 279/2000 +2025-03-23 13:55:50,136 Current Learning Rate: 0.0003380413 +2025-03-23 13:55:50,137 Train Loss: 0.0001261, Val Loss: 0.0001271 +2025-03-23 13:55:50,137 Epoch 280/2000 +2025-03-23 14:00:30,154 Current Learning Rate: 0.0003454915 +2025-03-23 14:00:30,154 Train Loss: 0.0001170, Val Loss: 0.0001216 +2025-03-23 14:00:30,155 Epoch 281/2000 +2025-03-23 14:05:10,001 Current Learning Rate: 0.0003529798 +2025-03-23 14:05:10,001 Train Loss: 0.0001503, Val Loss: 0.0001237 +2025-03-23 14:05:10,001 Epoch 282/2000 +2025-03-23 14:09:49,682 Current Learning Rate: 0.0003605044 +2025-03-23 14:09:49,682 Train Loss: 0.0001240, Val Loss: 0.0001183 +2025-03-23 14:09:49,682 Epoch 283/2000 +2025-03-23 14:14:29,641 Current Learning Rate: 0.0003680635 +2025-03-23 14:14:29,641 Train Loss: 0.0001366, Val Loss: 0.0001270 +2025-03-23 14:14:29,641 Epoch 284/2000 +2025-03-23 14:19:09,319 Current Learning Rate: 0.0003756551 +2025-03-23 14:19:09,320 Train Loss: 0.0001339, Val Loss: 0.0001699 +2025-03-23 14:19:09,320 Epoch 285/2000 +2025-03-23 14:23:49,052 Current Learning Rate: 0.0003832773 +2025-03-23 14:23:49,053 Train Loss: 0.0001252, Val Loss: 0.0001254 +2025-03-23 14:23:49,053 Epoch 286/2000 +2025-03-23 14:28:28,573 Current Learning Rate: 0.0003909284 +2025-03-23 14:28:28,574 Train Loss: 0.0001603, Val Loss: 0.0001413 +2025-03-23 14:28:28,574 Epoch 287/2000 +2025-03-23 14:33:08,260 Current Learning Rate: 0.0003986064 +2025-03-23 14:33:08,261 Train Loss: 0.0001203, Val Loss: 0.0001190 +2025-03-23 14:33:08,261 Epoch 288/2000 +2025-03-23 14:37:48,218 Current Learning Rate: 0.0004063093 +2025-03-23 14:37:48,218 Train Loss: 0.0001499, Val Loss: 0.0001412 +2025-03-23 14:37:48,219 Epoch 289/2000 +2025-03-23 14:42:28,239 Current Learning Rate: 0.0004140354 +2025-03-23 14:42:28,239 Train Loss: 0.0001187, Val Loss: 0.0001250 +2025-03-23 14:42:28,239 Epoch 290/2000 +2025-03-23 14:47:08,036 Current Learning Rate: 0.0004217828 +2025-03-23 14:47:08,037 Train Loss: 0.0001338, Val Loss: 0.0001203 +2025-03-23 14:47:08,037 Epoch 291/2000 +2025-03-23 14:51:47,814 Current Learning Rate: 0.0004295494 +2025-03-23 14:51:47,814 Train Loss: 0.0001631, Val Loss: 0.0001286 +2025-03-23 14:51:47,815 Epoch 292/2000 +2025-03-23 14:56:27,719 Current Learning Rate: 0.0004373334 +2025-03-23 14:56:27,719 Train Loss: 0.0001425, Val Loss: 0.0001211 +2025-03-23 14:56:27,720 Epoch 293/2000 +2025-03-23 15:01:07,623 Current Learning Rate: 0.0004451328 +2025-03-23 15:01:07,624 Train Loss: 0.0001428, Val Loss: 0.0001734 +2025-03-23 15:01:07,624 Epoch 294/2000 +2025-03-23 15:05:47,593 Current Learning Rate: 0.0004529458 +2025-03-23 15:05:47,594 Train Loss: 0.0001317, Val Loss: 0.0001284 +2025-03-23 15:05:47,594 Epoch 295/2000 +2025-03-23 15:10:27,328 Current Learning Rate: 0.0004607705 +2025-03-23 15:10:27,328 Train Loss: 0.0001545, Val Loss: 0.0001286 +2025-03-23 15:10:27,329 Epoch 296/2000 +2025-03-23 15:15:07,134 Current Learning Rate: 0.0004686047 +2025-03-23 15:15:07,134 Train Loss: 0.0001376, Val Loss: 0.0001895 +2025-03-23 15:15:07,135 Epoch 297/2000 +2025-03-23 15:19:46,840 Current Learning Rate: 0.0004764468 +2025-03-23 15:19:46,840 Train Loss: 0.0001697, Val Loss: 0.0001429 +2025-03-23 15:19:46,841 Epoch 298/2000 +2025-03-23 15:24:26,879 Current Learning Rate: 0.0004842946 +2025-03-23 15:24:26,880 Train Loss: 0.0001610, Val Loss: 0.0001560 +2025-03-23 15:24:26,880 Epoch 299/2000 +2025-03-23 15:29:06,721 Current Learning Rate: 0.0004921463 +2025-03-23 15:29:06,722 Train Loss: 0.0001412, Val Loss: 0.0001266 +2025-03-23 15:29:06,722 Epoch 300/2000 +2025-03-23 15:33:46,872 Current Learning Rate: 0.0005000000 +2025-03-23 15:33:46,872 Train Loss: 0.0001546, Val Loss: 0.0001672 +2025-03-23 15:33:46,872 Epoch 301/2000 +2025-03-23 15:38:26,677 Current Learning Rate: 0.0005078537 +2025-03-23 15:38:26,677 Train Loss: 0.0001546, Val Loss: 0.0001382 +2025-03-23 15:38:26,678 Epoch 302/2000 +2025-03-23 15:43:06,460 Current Learning Rate: 0.0005157054 +2025-03-23 15:43:06,460 Train Loss: 0.0001729, Val Loss: 0.0001391 +2025-03-23 15:43:06,460 Epoch 303/2000 +2025-03-23 15:47:46,390 Current Learning Rate: 0.0005235532 +2025-03-23 15:47:46,390 Train Loss: 0.0001607, Val Loss: 0.0001511 +2025-03-23 15:47:46,391 Epoch 304/2000 +2025-03-23 15:52:26,311 Current Learning Rate: 0.0005313953 +2025-03-23 15:52:26,312 Train Loss: 0.0001930, Val Loss: 0.0001456 +2025-03-23 15:52:26,312 Epoch 305/2000 +2025-03-23 15:57:06,698 Current Learning Rate: 0.0005392295 +2025-03-23 15:57:06,698 Train Loss: 0.0001750, Val Loss: 0.0001716 +2025-03-23 15:57:06,699 Epoch 306/2000 +2025-03-23 16:01:46,843 Current Learning Rate: 0.0005470542 +2025-03-23 16:01:46,844 Train Loss: 0.0001264, Val Loss: 0.0001317 +2025-03-23 16:01:46,844 Epoch 307/2000 +2025-03-23 16:06:26,569 Current Learning Rate: 0.0005548672 +2025-03-23 16:06:26,569 Train Loss: 0.0001634, Val Loss: 0.0001355 +2025-03-23 16:06:26,570 Epoch 308/2000 +2025-03-23 16:11:06,522 Current Learning Rate: 0.0005626666 +2025-03-23 16:11:06,522 Train Loss: 0.0001803, Val Loss: 0.0001604 +2025-03-23 16:11:06,523 Epoch 309/2000 +2025-03-23 16:15:46,233 Current Learning Rate: 0.0005704506 +2025-03-23 16:15:46,234 Train Loss: 0.0001507, Val Loss: 0.0001334 +2025-03-23 16:15:46,234 Epoch 310/2000 +2025-03-23 16:20:25,704 Current Learning Rate: 0.0005782172 +2025-03-23 16:20:25,705 Train Loss: 0.0001832, Val Loss: 0.0001609 +2025-03-23 16:20:25,705 Epoch 311/2000 +2025-03-23 16:25:05,115 Current Learning Rate: 0.0005859646 +2025-03-23 16:25:05,116 Train Loss: 0.0001794, Val Loss: 0.0001470 +2025-03-23 16:25:05,116 Epoch 312/2000 +2025-03-23 16:29:44,839 Current Learning Rate: 0.0005936907 +2025-03-23 16:29:44,840 Train Loss: 0.0001425, Val Loss: 0.0001803 +2025-03-23 16:29:44,840 Epoch 313/2000 +2025-03-23 16:34:24,564 Current Learning Rate: 0.0006013936 +2025-03-23 16:34:24,564 Train Loss: 0.0001538, Val Loss: 0.0001359 +2025-03-23 16:34:24,564 Epoch 314/2000 +2025-03-23 16:39:04,347 Current Learning Rate: 0.0006090716 +2025-03-23 16:39:04,347 Train Loss: 0.0001658, Val Loss: 0.0001355 +2025-03-23 16:39:04,347 Epoch 315/2000 +2025-03-23 16:43:43,781 Current Learning Rate: 0.0006167227 +2025-03-23 16:43:43,781 Train Loss: 0.0002005, Val Loss: 0.0001670 +2025-03-23 16:43:43,782 Epoch 316/2000 +2025-03-23 16:48:23,274 Current Learning Rate: 0.0006243449 +2025-03-23 16:48:23,275 Train Loss: 0.0001463, Val Loss: 0.0001363 +2025-03-23 16:48:23,275 Epoch 317/2000 +2025-03-23 16:53:02,697 Current Learning Rate: 0.0006319365 +2025-03-23 16:53:02,698 Train Loss: 0.0001653, Val Loss: 0.0001369 +2025-03-23 16:53:02,698 Epoch 318/2000 +2025-03-23 16:57:42,307 Current Learning Rate: 0.0006394956 +2025-03-23 16:57:42,307 Train Loss: 0.0001421, Val Loss: 0.0001322 +2025-03-23 16:57:42,307 Epoch 319/2000 +2025-03-23 17:02:22,006 Current Learning Rate: 0.0006470202 +2025-03-23 17:02:22,007 Train Loss: 0.0001345, Val Loss: 0.0001898 +2025-03-23 17:02:22,007 Epoch 320/2000 +2025-03-23 17:07:01,691 Current Learning Rate: 0.0006545085 +2025-03-23 17:07:01,691 Train Loss: 0.0001632, Val Loss: 0.0001452 +2025-03-23 17:07:01,691 Epoch 321/2000 +2025-03-23 17:11:41,090 Current Learning Rate: 0.0006619587 +2025-03-23 17:11:41,090 Train Loss: 0.0001878, Val Loss: 0.0001994 +2025-03-23 17:11:41,090 Epoch 322/2000 +2025-03-23 17:16:21,070 Current Learning Rate: 0.0006693690 +2025-03-23 17:16:21,070 Train Loss: 0.0001729, Val Loss: 0.0001891 +2025-03-23 17:16:21,070 Epoch 323/2000 +2025-03-23 17:21:01,286 Current Learning Rate: 0.0006767374 +2025-03-23 17:21:01,286 Train Loss: 0.0001617, Val Loss: 0.0001400 +2025-03-23 17:21:01,287 Epoch 324/2000 +2025-03-23 17:25:41,082 Current Learning Rate: 0.0006840623 +2025-03-23 17:25:41,083 Train Loss: 0.0001753, Val Loss: 0.0001810 +2025-03-23 17:25:41,083 Epoch 325/2000 +2025-03-23 17:30:21,385 Current Learning Rate: 0.0006913417 +2025-03-23 17:30:21,385 Train Loss: 0.0001596, Val Loss: 0.0001380 +2025-03-23 17:30:21,386 Epoch 326/2000 +2025-03-23 17:35:01,047 Current Learning Rate: 0.0006985739 +2025-03-23 17:35:01,048 Train Loss: 0.0001343, Val Loss: 0.0001318 +2025-03-23 17:35:01,048 Epoch 327/2000 +2025-03-23 17:39:40,956 Current Learning Rate: 0.0007057572 +2025-03-23 17:39:40,956 Train Loss: 0.0001801, Val Loss: 0.0001375 +2025-03-23 17:39:40,956 Epoch 328/2000 +2025-03-23 17:44:20,946 Current Learning Rate: 0.0007128896 +2025-03-23 17:44:20,947 Train Loss: 0.0001843, Val Loss: 0.0001660 +2025-03-23 17:44:20,947 Epoch 329/2000 +2025-03-23 17:49:01,379 Current Learning Rate: 0.0007199696 +2025-03-23 17:49:01,380 Train Loss: 0.0001702, Val Loss: 0.0001885 +2025-03-23 17:49:01,380 Epoch 330/2000 +2025-03-23 17:53:42,029 Current Learning Rate: 0.0007269952 +2025-03-23 17:53:42,030 Train Loss: 0.0001645, Val Loss: 0.0001623 +2025-03-23 17:53:42,030 Epoch 331/2000 +2025-03-23 17:58:21,888 Current Learning Rate: 0.0007339649 +2025-03-23 17:58:21,889 Train Loss: 0.0001574, Val Loss: 0.0001460 +2025-03-23 17:58:21,889 Epoch 332/2000 +2025-03-23 18:03:01,750 Current Learning Rate: 0.0007408768 +2025-03-23 18:03:01,751 Train Loss: 0.0001799, Val Loss: 0.0001573 +2025-03-23 18:03:01,751 Epoch 333/2000 +2025-03-23 18:07:41,775 Current Learning Rate: 0.0007477293 +2025-03-23 18:07:41,775 Train Loss: 0.0001676, Val Loss: 0.0001363 +2025-03-23 18:07:41,775 Epoch 334/2000 +2025-03-23 18:12:21,796 Current Learning Rate: 0.0007545207 +2025-03-23 18:12:21,796 Train Loss: 0.0001861, Val Loss: 0.0001525 +2025-03-23 18:12:21,797 Epoch 335/2000 +2025-03-23 18:17:01,867 Current Learning Rate: 0.0007612493 +2025-03-23 18:17:01,867 Train Loss: 0.0001653, Val Loss: 0.0001341 +2025-03-23 18:17:01,868 Epoch 336/2000 +2025-03-23 18:21:42,160 Current Learning Rate: 0.0007679134 +2025-03-23 18:21:42,160 Train Loss: 0.0001625, Val Loss: 0.0001632 +2025-03-23 18:21:42,160 Epoch 337/2000 +2025-03-23 18:26:22,119 Current Learning Rate: 0.0007745114 +2025-03-23 18:26:22,119 Train Loss: 0.0001673, Val Loss: 0.0001432 +2025-03-23 18:26:22,120 Epoch 338/2000 +2025-03-23 18:31:02,117 Current Learning Rate: 0.0007810417 +2025-03-23 18:31:02,118 Train Loss: 0.0001614, Val Loss: 0.0001934 +2025-03-23 18:31:02,118 Epoch 339/2000 +2025-03-23 18:35:41,912 Current Learning Rate: 0.0007875026 +2025-03-23 18:35:41,912 Train Loss: 0.0001557, Val Loss: 0.0001467 +2025-03-23 18:35:41,913 Epoch 340/2000 +2025-03-23 18:40:21,456 Current Learning Rate: 0.0007938926 +2025-03-23 18:40:21,456 Train Loss: 0.0001773, Val Loss: 0.0001494 +2025-03-23 18:40:21,457 Epoch 341/2000 +2025-03-23 18:45:01,164 Current Learning Rate: 0.0008002101 +2025-03-23 18:45:01,165 Train Loss: 0.0001628, Val Loss: 0.0001342 +2025-03-23 18:45:01,165 Epoch 342/2000 +2025-03-23 18:49:41,701 Current Learning Rate: 0.0008064535 +2025-03-23 18:49:41,701 Train Loss: 0.0001529, Val Loss: 0.0001371 +2025-03-23 18:49:41,701 Epoch 343/2000 +2025-03-23 18:54:22,063 Current Learning Rate: 0.0008126213 +2025-03-23 18:54:22,063 Train Loss: 0.0001689, Val Loss: 0.0001678 +2025-03-23 18:54:22,064 Epoch 344/2000 +2025-03-23 18:59:02,495 Current Learning Rate: 0.0008187120 +2025-03-23 18:59:02,495 Train Loss: 0.0001879, Val Loss: 0.0002475 +2025-03-23 18:59:02,496 Epoch 345/2000 +2025-03-23 19:03:42,747 Current Learning Rate: 0.0008247240 +2025-03-23 19:03:42,747 Train Loss: 0.0002213, Val Loss: 0.0002284 +2025-03-23 19:03:42,748 Epoch 346/2000 +2025-03-23 19:08:22,455 Current Learning Rate: 0.0008306559 +2025-03-23 19:08:22,455 Train Loss: 0.0001520, Val Loss: 0.0001469 +2025-03-23 19:08:22,456 Epoch 347/2000 +2025-03-23 19:13:02,491 Current Learning Rate: 0.0008365063 +2025-03-23 19:13:02,492 Train Loss: 0.0001402, Val Loss: 0.0001444 +2025-03-23 19:13:02,492 Epoch 348/2000 +2025-03-23 19:17:42,671 Current Learning Rate: 0.0008422736 +2025-03-23 19:17:42,672 Train Loss: 0.0001591, Val Loss: 0.0001358 +2025-03-23 19:17:42,672 Epoch 349/2000 +2025-03-23 19:22:22,542 Current Learning Rate: 0.0008479564 +2025-03-23 19:22:22,542 Train Loss: 0.0001777, Val Loss: 0.0001489 +2025-03-23 19:22:22,542 Epoch 350/2000 +2025-03-23 19:27:02,799 Current Learning Rate: 0.0008535534 +2025-03-23 19:27:02,799 Train Loss: 0.0001848, Val Loss: 0.0001313 +2025-03-23 19:27:02,800 Epoch 351/2000 +2025-03-23 19:31:43,146 Current Learning Rate: 0.0008590631 +2025-03-23 19:31:43,147 Train Loss: 0.0001882, Val Loss: 0.0001502 +2025-03-23 19:31:43,147 Epoch 352/2000 +2025-03-23 19:36:23,432 Current Learning Rate: 0.0008644843 +2025-03-23 19:36:23,433 Train Loss: 0.0001534, Val Loss: 0.0001372 +2025-03-23 19:36:23,433 Epoch 353/2000 +2025-03-23 19:41:03,186 Current Learning Rate: 0.0008698155 +2025-03-23 19:41:03,186 Train Loss: 0.0002339, Val Loss: 0.0001990 +2025-03-23 19:41:03,186 Epoch 354/2000 +2025-03-23 19:45:42,845 Current Learning Rate: 0.0008750555 +2025-03-23 19:45:42,845 Train Loss: 0.0001716, Val Loss: 0.0001583 +2025-03-23 19:45:42,845 Epoch 355/2000 +2025-03-23 19:50:22,942 Current Learning Rate: 0.0008802030 +2025-03-23 19:50:22,943 Train Loss: 0.0001841, Val Loss: 0.0001419 +2025-03-23 19:50:22,943 Epoch 356/2000 +2025-03-23 19:55:03,435 Current Learning Rate: 0.0008852566 +2025-03-23 19:55:03,436 Train Loss: 0.0001466, Val Loss: 0.0001370 +2025-03-23 19:55:03,436 Epoch 357/2000 +2025-03-23 19:59:43,473 Current Learning Rate: 0.0008902152 +2025-03-23 19:59:43,476 Train Loss: 0.0001678, Val Loss: 0.0001578 +2025-03-23 19:59:43,476 Epoch 358/2000 +2025-03-23 20:04:23,508 Current Learning Rate: 0.0008950775 +2025-03-23 20:04:23,508 Train Loss: 0.0002125, Val Loss: 0.0002278 +2025-03-23 20:04:23,508 Epoch 359/2000 +2025-03-23 20:09:03,849 Current Learning Rate: 0.0008998423 +2025-03-23 20:09:03,849 Train Loss: 0.0002034, Val Loss: 0.0001769 +2025-03-23 20:09:03,849 Epoch 360/2000 +2025-03-23 20:13:44,102 Current Learning Rate: 0.0009045085 +2025-03-23 20:13:44,102 Train Loss: 0.0001715, Val Loss: 0.0001467 +2025-03-23 20:13:44,103 Epoch 361/2000 +2025-03-23 20:18:24,306 Current Learning Rate: 0.0009090749 +2025-03-23 20:18:24,307 Train Loss: 0.0001627, Val Loss: 0.0001419 +2025-03-23 20:18:24,307 Epoch 362/2000 +2025-03-23 20:23:04,929 Current Learning Rate: 0.0009135403 +2025-03-23 20:23:04,930 Train Loss: 0.0002212, Val Loss: 0.0001645 +2025-03-23 20:23:04,930 Epoch 363/2000 +2025-03-23 20:27:45,913 Current Learning Rate: 0.0009179037 +2025-03-23 20:27:45,913 Train Loss: 0.0001823, Val Loss: 0.0001396 +2025-03-23 20:27:45,913 Epoch 364/2000 +2025-03-23 20:32:27,029 Current Learning Rate: 0.0009221640 +2025-03-23 20:32:27,029 Train Loss: 0.0001874, Val Loss: 0.0001942 +2025-03-23 20:32:27,030 Epoch 365/2000 +2025-03-23 20:37:07,532 Current Learning Rate: 0.0009263201 +2025-03-23 20:37:07,532 Train Loss: 0.0001695, Val Loss: 0.0001526 +2025-03-23 20:37:07,532 Epoch 366/2000 +2025-03-23 20:41:48,078 Current Learning Rate: 0.0009303710 +2025-03-23 20:41:48,078 Train Loss: 0.0001610, Val Loss: 0.0001463 +2025-03-23 20:41:48,078 Epoch 367/2000 +2025-03-23 20:46:28,296 Current Learning Rate: 0.0009343158 +2025-03-23 20:46:28,296 Train Loss: 0.0001736, Val Loss: 0.0001779 +2025-03-23 20:46:28,296 Epoch 368/2000 +2025-03-23 20:51:08,936 Current Learning Rate: 0.0009381533 +2025-03-23 20:51:08,936 Train Loss: 0.0001641, Val Loss: 0.0001669 +2025-03-23 20:51:08,937 Epoch 369/2000 +2025-03-23 20:55:49,544 Current Learning Rate: 0.0009418828 +2025-03-23 20:55:49,545 Train Loss: 0.0001705, Val Loss: 0.0001537 +2025-03-23 20:55:49,545 Epoch 370/2000 +2025-03-23 21:00:30,110 Current Learning Rate: 0.0009455033 +2025-03-23 21:00:30,111 Train Loss: 0.0001810, Val Loss: 0.0001746 +2025-03-23 21:00:30,111 Epoch 371/2000 +2025-03-23 21:05:11,485 Current Learning Rate: 0.0009490138 +2025-03-23 21:05:11,485 Train Loss: 0.0001715, Val Loss: 0.0001502 +2025-03-23 21:05:11,485 Epoch 372/2000 +2025-03-23 21:09:51,843 Current Learning Rate: 0.0009524135 +2025-03-23 21:09:51,844 Train Loss: 0.0001668, Val Loss: 0.0001644 +2025-03-23 21:09:51,844 Epoch 373/2000 +2025-03-23 21:14:32,658 Current Learning Rate: 0.0009557016 +2025-03-23 21:14:32,658 Train Loss: 0.0001467, Val Loss: 0.0001468 +2025-03-23 21:14:32,659 Epoch 374/2000 +2025-03-23 21:19:14,093 Current Learning Rate: 0.0009588773 +2025-03-23 21:19:14,094 Train Loss: 0.0001438, Val Loss: 0.0001874 +2025-03-23 21:19:14,094 Epoch 375/2000 +2025-03-23 21:23:53,821 Current Learning Rate: 0.0009619398 +2025-03-23 21:23:53,822 Train Loss: 0.0001656, Val Loss: 0.0001546 +2025-03-23 21:23:53,822 Epoch 376/2000 +2025-03-23 21:28:33,710 Current Learning Rate: 0.0009648882 +2025-03-23 21:28:33,710 Train Loss: 0.0001424, Val Loss: 0.0001346 +2025-03-23 21:28:33,711 Epoch 377/2000 +2025-03-23 21:33:14,498 Current Learning Rate: 0.0009677220 +2025-03-23 21:33:14,498 Train Loss: 0.0001601, Val Loss: 0.0001406 +2025-03-23 21:33:14,499 Epoch 378/2000 +2025-03-23 21:37:54,998 Current Learning Rate: 0.0009704404 +2025-03-23 21:37:54,998 Train Loss: 0.0001717, Val Loss: 0.0001425 +2025-03-23 21:37:54,999 Epoch 379/2000 +2025-03-23 21:42:35,811 Current Learning Rate: 0.0009730427 +2025-03-23 21:42:35,811 Train Loss: 0.0001588, Val Loss: 0.0001486 +2025-03-23 21:42:35,812 Epoch 380/2000 +2025-03-23 21:47:16,521 Current Learning Rate: 0.0009755283 +2025-03-23 21:47:16,522 Train Loss: 0.0001649, Val Loss: 0.0001580 +2025-03-23 21:47:16,522 Epoch 381/2000 +2025-03-23 21:51:57,078 Current Learning Rate: 0.0009778965 +2025-03-23 21:51:57,078 Train Loss: 0.0001749, Val Loss: 0.0001474 +2025-03-23 21:51:57,079 Epoch 382/2000 +2025-03-23 21:56:37,854 Current Learning Rate: 0.0009801468 +2025-03-23 21:56:37,855 Train Loss: 0.0001700, Val Loss: 0.0001441 +2025-03-23 21:56:37,855 Epoch 383/2000 +2025-03-23 22:01:17,951 Current Learning Rate: 0.0009822787 +2025-03-23 22:01:17,951 Train Loss: 0.0001447, Val Loss: 0.0001685 +2025-03-23 22:01:17,952 Epoch 384/2000 +2025-03-23 22:05:58,313 Current Learning Rate: 0.0009842916 +2025-03-23 22:05:58,314 Train Loss: 0.0002207, Val Loss: 0.0001795 +2025-03-23 22:05:58,314 Epoch 385/2000 +2025-03-23 22:10:38,410 Current Learning Rate: 0.0009861850 +2025-03-23 22:10:38,411 Train Loss: 0.0001765, Val Loss: 0.0001654 +2025-03-23 22:10:38,411 Epoch 386/2000 +2025-03-23 22:15:18,809 Current Learning Rate: 0.0009879584 +2025-03-23 22:15:18,810 Train Loss: 0.0001685, Val Loss: 0.0001429 +2025-03-23 22:15:18,810 Epoch 387/2000 +2025-03-23 22:19:59,585 Current Learning Rate: 0.0009896114 +2025-03-23 22:19:59,585 Train Loss: 0.0001461, Val Loss: 0.0001303 +2025-03-23 22:19:59,585 Epoch 388/2000 +2025-03-23 22:24:40,047 Current Learning Rate: 0.0009911436 +2025-03-23 22:24:40,048 Train Loss: 0.0001450, Val Loss: 0.0001385 +2025-03-23 22:24:40,048 Epoch 389/2000 +2025-03-23 22:29:20,436 Current Learning Rate: 0.0009925547 +2025-03-23 22:29:20,436 Train Loss: 0.0001501, Val Loss: 0.0002103 +2025-03-23 22:29:20,437 Epoch 390/2000 +2025-03-23 22:34:01,303 Current Learning Rate: 0.0009938442 +2025-03-23 22:34:01,304 Train Loss: 0.0002077, Val Loss: 0.0001814 +2025-03-23 22:34:01,304 Epoch 391/2000 +2025-03-23 22:38:41,690 Current Learning Rate: 0.0009950118 +2025-03-23 22:38:41,691 Train Loss: 0.0001451, Val Loss: 0.0001456 +2025-03-23 22:38:41,691 Epoch 392/2000 +2025-03-23 22:43:22,884 Current Learning Rate: 0.0009960574 +2025-03-23 22:43:22,885 Train Loss: 0.0001469, Val Loss: 0.0001468 +2025-03-23 22:43:22,885 Epoch 393/2000 +2025-03-23 22:48:03,540 Current Learning Rate: 0.0009969805 +2025-03-23 22:48:03,541 Train Loss: 0.0001584, Val Loss: 0.0001617 +2025-03-23 22:48:03,541 Epoch 394/2000 +2025-03-23 22:52:44,381 Current Learning Rate: 0.0009977810 +2025-03-23 22:52:44,382 Train Loss: 0.0001891, Val Loss: 0.0001764 +2025-03-23 22:52:44,382 Epoch 395/2000 +2025-03-23 22:57:25,223 Current Learning Rate: 0.0009984587 +2025-03-23 22:57:25,223 Train Loss: 0.0001597, Val Loss: 0.0001497 +2025-03-23 22:57:25,223 Epoch 396/2000 +2025-03-23 23:02:06,295 Current Learning Rate: 0.0009990134 +2025-03-23 23:02:06,296 Train Loss: 0.0001606, Val Loss: 0.0001557 +2025-03-23 23:02:06,297 Epoch 397/2000 +2025-03-23 23:06:47,270 Current Learning Rate: 0.0009994449 +2025-03-23 23:06:47,270 Train Loss: 0.0001902, Val Loss: 0.0001732 +2025-03-23 23:06:47,270 Epoch 398/2000 +2025-03-23 23:11:27,796 Current Learning Rate: 0.0009997533 +2025-03-23 23:11:27,801 Train Loss: 0.0001608, Val Loss: 0.0001318 +2025-03-23 23:11:27,801 Epoch 399/2000 +2025-03-23 23:16:08,321 Current Learning Rate: 0.0009999383 +2025-03-23 23:16:08,322 Train Loss: 0.0001421, Val Loss: 0.0001424 +2025-03-23 23:16:08,322 Epoch 400/2000 +2025-03-23 23:20:48,983 Current Learning Rate: 0.0010000000 +2025-03-23 23:20:48,984 Train Loss: 0.0001382, Val Loss: 0.0001607 +2025-03-23 23:20:48,984 Epoch 401/2000 +2025-03-23 23:25:30,043 Current Learning Rate: 0.0009999383 +2025-03-23 23:25:30,043 Train Loss: 0.0001454, Val Loss: 0.0001848 +2025-03-23 23:25:30,044 Epoch 402/2000 +2025-03-23 23:30:10,429 Current Learning Rate: 0.0009997533 +2025-03-23 23:30:10,429 Train Loss: 0.0001601, Val Loss: 0.0001454 +2025-03-23 23:30:10,430 Epoch 403/2000 +2025-03-23 23:34:50,394 Current Learning Rate: 0.0009994449 +2025-03-23 23:34:50,394 Train Loss: 0.0001678, Val Loss: 0.0001657 +2025-03-23 23:34:50,394 Epoch 404/2000 +2025-03-23 23:39:31,086 Current Learning Rate: 0.0009990134 +2025-03-23 23:39:31,086 Train Loss: 0.0001794, Val Loss: 0.0001574 +2025-03-23 23:39:31,086 Epoch 405/2000 +2025-03-23 23:44:12,038 Current Learning Rate: 0.0009984587 +2025-03-23 23:44:12,039 Train Loss: 0.0001267, Val Loss: 0.0001264 +2025-03-23 23:44:12,039 Epoch 406/2000 +2025-03-23 23:48:52,943 Current Learning Rate: 0.0009977810 +2025-03-23 23:48:52,944 Train Loss: 0.0001400, Val Loss: 0.0001343 +2025-03-23 23:48:52,944 Epoch 407/2000 +2025-03-23 23:53:33,441 Current Learning Rate: 0.0009969805 +2025-03-23 23:53:33,442 Train Loss: 0.0001816, Val Loss: 0.0001665 +2025-03-23 23:53:33,442 Epoch 408/2000 +2025-03-23 23:58:14,776 Current Learning Rate: 0.0009960574 +2025-03-23 23:58:14,777 Train Loss: 0.0001352, Val Loss: 0.0001226 +2025-03-23 23:58:14,777 Epoch 409/2000 +2025-03-24 00:02:54,867 Current Learning Rate: 0.0009950118 +2025-03-24 00:02:54,867 Train Loss: 0.0001386, Val Loss: 0.0001191 +2025-03-24 00:02:54,867 Epoch 410/2000 +2025-03-24 00:07:34,899 Current Learning Rate: 0.0009938442 +2025-03-24 00:07:34,899 Train Loss: 0.0001359, Val Loss: 0.0001403 +2025-03-24 00:07:34,899 Epoch 411/2000 +2025-03-24 00:12:14,800 Current Learning Rate: 0.0009925547 +2025-03-24 00:12:14,801 Train Loss: 0.0001461, Val Loss: 0.0001181 +2025-03-24 00:12:14,801 Epoch 412/2000 +2025-03-24 00:16:55,248 Current Learning Rate: 0.0009911436 +2025-03-24 00:16:55,248 Train Loss: 0.0001445, Val Loss: 0.0001233 +2025-03-24 00:16:55,248 Epoch 413/2000 +2025-03-24 00:21:35,795 Current Learning Rate: 0.0009896114 +2025-03-24 00:21:35,796 Train Loss: 0.0001460, Val Loss: 0.0001359 +2025-03-24 00:21:35,796 Epoch 414/2000 +2025-03-24 00:26:16,780 Current Learning Rate: 0.0009879584 +2025-03-24 00:26:16,781 Train Loss: 0.0001724, Val Loss: 0.0001909 +2025-03-24 00:26:16,781 Epoch 415/2000 +2025-03-24 00:30:57,744 Current Learning Rate: 0.0009861850 +2025-03-24 00:30:57,744 Train Loss: 0.0001721, Val Loss: 0.0001383 +2025-03-24 00:30:57,745 Epoch 416/2000 +2025-03-24 00:35:38,944 Current Learning Rate: 0.0009842916 +2025-03-24 00:35:38,945 Train Loss: 0.0001451, Val Loss: 0.0001247 +2025-03-24 00:35:38,945 Epoch 417/2000 +2025-03-24 00:40:20,119 Current Learning Rate: 0.0009822787 +2025-03-24 00:40:20,119 Train Loss: 0.0001803, Val Loss: 0.0001765 +2025-03-24 00:40:20,120 Epoch 418/2000 +2025-03-24 00:45:00,686 Current Learning Rate: 0.0009801468 +2025-03-24 00:45:00,686 Train Loss: 0.0001828, Val Loss: 0.0001325 +2025-03-24 00:45:00,687 Epoch 419/2000 +2025-03-24 00:49:41,355 Current Learning Rate: 0.0009778965 +2025-03-24 00:49:41,355 Train Loss: 0.0001223, Val Loss: 0.0001224 +2025-03-24 00:49:41,356 Epoch 420/2000 +2025-03-24 00:54:22,104 Current Learning Rate: 0.0009755283 +2025-03-24 00:54:22,105 Train Loss: 0.0001374, Val Loss: 0.0001229 +2025-03-24 00:54:22,105 Epoch 421/2000 +2025-03-24 00:59:02,953 Current Learning Rate: 0.0009730427 +2025-03-24 00:59:02,954 Train Loss: 0.0001328, Val Loss: 0.0001304 +2025-03-24 00:59:02,954 Epoch 422/2000 +2025-03-24 01:03:43,998 Current Learning Rate: 0.0009704404 +2025-03-24 01:03:43,999 Train Loss: 0.0001401, Val Loss: 0.0001325 +2025-03-24 01:03:43,999 Epoch 423/2000 +2025-03-24 01:08:25,386 Current Learning Rate: 0.0009677220 +2025-03-24 01:08:25,387 Train Loss: 0.0001409, Val Loss: 0.0001333 +2025-03-24 01:08:25,388 Epoch 424/2000 +2025-03-24 01:13:05,692 Current Learning Rate: 0.0009648882 +2025-03-24 01:13:06,497 Train Loss: 0.0001194, Val Loss: 0.0001135 +2025-03-24 01:13:06,498 Epoch 425/2000 +2025-03-24 01:17:46,249 Current Learning Rate: 0.0009619398 +2025-03-24 01:17:46,250 Train Loss: 0.0001490, Val Loss: 0.0001255 +2025-03-24 01:17:46,250 Epoch 426/2000 +2025-03-24 01:22:27,021 Current Learning Rate: 0.0009588773 +2025-03-24 01:22:27,021 Train Loss: 0.0001257, Val Loss: 0.0001173 +2025-03-24 01:22:27,022 Epoch 427/2000 +2025-03-24 01:27:07,425 Current Learning Rate: 0.0009557016 +2025-03-24 01:27:07,426 Train Loss: 0.0001845, Val Loss: 0.0001297 +2025-03-24 01:27:07,426 Epoch 428/2000 +2025-03-24 01:31:47,582 Current Learning Rate: 0.0009524135 +2025-03-24 01:31:47,582 Train Loss: 0.0001381, Val Loss: 0.0001304 +2025-03-24 01:31:47,582 Epoch 429/2000 +2025-03-24 01:36:28,246 Current Learning Rate: 0.0009490138 +2025-03-24 01:36:28,247 Train Loss: 0.0001577, Val Loss: 0.0001213 +2025-03-24 01:36:28,247 Epoch 430/2000 +2025-03-24 01:41:08,840 Current Learning Rate: 0.0009455033 +2025-03-24 01:41:08,840 Train Loss: 0.0001277, Val Loss: 0.0001240 +2025-03-24 01:41:08,841 Epoch 431/2000 +2025-03-24 01:45:49,931 Current Learning Rate: 0.0009418828 +2025-03-24 01:45:49,932 Train Loss: 0.0001398, Val Loss: 0.0001360 +2025-03-24 01:45:49,932 Epoch 432/2000 +2025-03-24 01:50:31,170 Current Learning Rate: 0.0009381533 +2025-03-24 01:50:31,170 Train Loss: 0.0001215, Val Loss: 0.0001203 +2025-03-24 01:50:31,171 Epoch 433/2000 +2025-03-24 01:55:11,664 Current Learning Rate: 0.0009343158 +2025-03-24 01:55:11,665 Train Loss: 0.0001344, Val Loss: 0.0001562 +2025-03-24 01:55:11,665 Epoch 434/2000 +2025-03-24 01:59:52,071 Current Learning Rate: 0.0009303710 +2025-03-24 01:59:52,072 Train Loss: 0.0001375, Val Loss: 0.0001389 +2025-03-24 01:59:52,072 Epoch 435/2000 +2025-03-24 02:04:32,505 Current Learning Rate: 0.0009263201 +2025-03-24 02:04:32,506 Train Loss: 0.0001459, Val Loss: 0.0001448 +2025-03-24 02:04:32,506 Epoch 436/2000 +2025-03-24 02:09:13,090 Current Learning Rate: 0.0009221640 +2025-03-24 02:09:13,091 Train Loss: 0.0001613, Val Loss: 0.0001678 +2025-03-24 02:09:13,091 Epoch 437/2000 +2025-03-24 02:13:53,524 Current Learning Rate: 0.0009179037 +2025-03-24 02:13:53,524 Train Loss: 0.0001258, Val Loss: 0.0001211 +2025-03-24 02:13:53,525 Epoch 438/2000 +2025-03-24 02:18:34,106 Current Learning Rate: 0.0009135403 +2025-03-24 02:18:34,107 Train Loss: 0.0001187, Val Loss: 0.0001297 +2025-03-24 02:18:34,108 Epoch 439/2000 +2025-03-24 02:23:14,404 Current Learning Rate: 0.0009090749 +2025-03-24 02:23:14,404 Train Loss: 0.0001251, Val Loss: 0.0001261 +2025-03-24 02:23:14,405 Epoch 440/2000 +2025-03-24 02:27:55,183 Current Learning Rate: 0.0009045085 +2025-03-24 02:27:55,183 Train Loss: 0.0002170, Val Loss: 0.0001436 +2025-03-24 02:27:55,184 Epoch 441/2000 +2025-03-24 02:32:36,202 Current Learning Rate: 0.0008998423 +2025-03-24 02:32:36,203 Train Loss: 0.0001681, Val Loss: 0.0001166 +2025-03-24 02:32:36,204 Epoch 442/2000 +2025-03-24 02:37:16,718 Current Learning Rate: 0.0008950775 +2025-03-24 02:37:17,667 Train Loss: 0.0001281, Val Loss: 0.0001078 +2025-03-24 02:37:17,668 Epoch 443/2000 +2025-03-24 02:41:57,859 Current Learning Rate: 0.0008902152 +2025-03-24 02:41:57,859 Train Loss: 0.0001145, Val Loss: 0.0001112 +2025-03-24 02:41:57,860 Epoch 444/2000 +2025-03-24 02:46:38,252 Current Learning Rate: 0.0008852566 +2025-03-24 02:46:38,252 Train Loss: 0.0001373, Val Loss: 0.0001156 +2025-03-24 02:46:38,253 Epoch 445/2000 +2025-03-24 02:51:18,859 Current Learning Rate: 0.0008802030 +2025-03-24 02:51:18,860 Train Loss: 0.0001231, Val Loss: 0.0001098 +2025-03-24 02:51:18,860 Epoch 446/2000 +2025-03-24 02:55:59,214 Current Learning Rate: 0.0008750555 +2025-03-24 02:55:59,214 Train Loss: 0.0001245, Val Loss: 0.0001107 +2025-03-24 02:55:59,214 Epoch 447/2000 +2025-03-24 03:00:40,236 Current Learning Rate: 0.0008698155 +2025-03-24 03:00:41,192 Train Loss: 0.0001263, Val Loss: 0.0001025 +2025-03-24 03:00:41,192 Epoch 448/2000 +2025-03-24 03:05:21,429 Current Learning Rate: 0.0008644843 +2025-03-24 03:05:21,429 Train Loss: 0.0001484, Val Loss: 0.0001141 +2025-03-24 03:05:21,430 Epoch 449/2000 +2025-03-24 03:10:02,089 Current Learning Rate: 0.0008590631 +2025-03-24 03:10:02,090 Train Loss: 0.0001241, Val Loss: 0.0001293 +2025-03-24 03:10:02,090 Epoch 450/2000 +2025-03-24 03:14:43,124 Current Learning Rate: 0.0008535534 +2025-03-24 03:14:43,124 Train Loss: 0.0001370, Val Loss: 0.0001209 +2025-03-24 03:14:43,124 Epoch 451/2000 +2025-03-24 03:19:23,262 Current Learning Rate: 0.0008479564 +2025-03-24 03:19:23,262 Train Loss: 0.0001193, Val Loss: 0.0001076 +2025-03-24 03:19:23,263 Epoch 452/2000 +2025-03-24 03:24:03,608 Current Learning Rate: 0.0008422736 +2025-03-24 03:24:03,608 Train Loss: 0.0001075, Val Loss: 0.0001131 +2025-03-24 03:24:03,608 Epoch 453/2000 +2025-03-24 03:28:44,783 Current Learning Rate: 0.0008365063 +2025-03-24 03:28:44,783 Train Loss: 0.0001241, Val Loss: 0.0001042 +2025-03-24 03:28:44,784 Epoch 454/2000 +2025-03-24 03:33:25,642 Current Learning Rate: 0.0008306559 +2025-03-24 03:33:25,642 Train Loss: 0.0001162, Val Loss: 0.0001266 +2025-03-24 03:33:25,642 Epoch 455/2000 +2025-03-24 03:38:06,058 Current Learning Rate: 0.0008247240 +2025-03-24 03:38:06,058 Train Loss: 0.0001504, Val Loss: 0.0001724 +2025-03-24 03:38:06,058 Epoch 456/2000 +2025-03-24 03:42:46,758 Current Learning Rate: 0.0008187120 +2025-03-24 03:42:46,759 Train Loss: 0.0001336, Val Loss: 0.0001186 +2025-03-24 03:42:46,759 Epoch 457/2000 +2025-03-24 03:47:27,434 Current Learning Rate: 0.0008126213 +2025-03-24 03:47:27,434 Train Loss: 0.0001205, Val Loss: 0.0001134 +2025-03-24 03:47:27,434 Epoch 458/2000 +2025-03-24 03:52:08,426 Current Learning Rate: 0.0008064535 +2025-03-24 03:52:08,427 Train Loss: 0.0001099, Val Loss: 0.0001082 +2025-03-24 03:52:08,427 Epoch 459/2000 +2025-03-24 03:56:49,113 Current Learning Rate: 0.0008002101 +2025-03-24 03:56:49,113 Train Loss: 0.0001122, Val Loss: 0.0001119 +2025-03-24 03:56:49,113 Epoch 460/2000 +2025-03-24 04:01:29,713 Current Learning Rate: 0.0007938926 +2025-03-24 04:01:29,713 Train Loss: 0.0001139, Val Loss: 0.0001199 +2025-03-24 04:01:29,713 Epoch 461/2000 +2025-03-24 04:06:10,240 Current Learning Rate: 0.0007875026 +2025-03-24 04:06:10,240 Train Loss: 0.0001258, Val Loss: 0.0001212 +2025-03-24 04:06:10,240 Epoch 462/2000 +2025-03-24 04:10:51,187 Current Learning Rate: 0.0007810417 +2025-03-24 04:10:51,188 Train Loss: 0.0001054, Val Loss: 0.0001060 +2025-03-24 04:10:51,188 Epoch 463/2000 +2025-03-24 04:15:31,386 Current Learning Rate: 0.0007745114 +2025-03-24 04:15:31,387 Train Loss: 0.0001086, Val Loss: 0.0001041 +2025-03-24 04:15:31,387 Epoch 464/2000 +2025-03-24 04:20:11,746 Current Learning Rate: 0.0007679134 +2025-03-24 04:20:11,746 Train Loss: 0.0001105, Val Loss: 0.0001077 +2025-03-24 04:20:11,746 Epoch 465/2000 +2025-03-24 04:24:52,713 Current Learning Rate: 0.0007612493 +2025-03-24 04:24:52,713 Train Loss: 0.0001188, Val Loss: 0.0001052 +2025-03-24 04:24:52,714 Epoch 466/2000 +2025-03-24 04:29:32,886 Current Learning Rate: 0.0007545207 +2025-03-24 04:29:32,887 Train Loss: 0.0001046, Val Loss: 0.0001071 +2025-03-24 04:29:32,887 Epoch 467/2000 +2025-03-24 04:34:13,050 Current Learning Rate: 0.0007477293 +2025-03-24 04:34:13,051 Train Loss: 0.0001505, Val Loss: 0.0001097 +2025-03-24 04:34:13,051 Epoch 468/2000 +2025-03-24 04:38:52,849 Current Learning Rate: 0.0007408768 +2025-03-24 04:38:52,849 Train Loss: 0.0001156, Val Loss: 0.0001206 +2025-03-24 04:38:52,849 Epoch 469/2000 +2025-03-24 04:43:33,329 Current Learning Rate: 0.0007339649 +2025-03-24 04:43:34,182 Train Loss: 0.0000934, Val Loss: 0.0001022 +2025-03-24 04:43:34,182 Epoch 470/2000 +2025-03-24 04:48:13,888 Current Learning Rate: 0.0007269952 +2025-03-24 04:48:13,889 Train Loss: 0.0001272, Val Loss: 0.0001265 +2025-03-24 04:48:13,889 Epoch 471/2000 +2025-03-24 04:52:54,074 Current Learning Rate: 0.0007199696 +2025-03-24 04:52:54,075 Train Loss: 0.0001328, Val Loss: 0.0001091 +2025-03-24 04:52:54,075 Epoch 472/2000 +2025-03-24 04:57:34,406 Current Learning Rate: 0.0007128896 +2025-03-24 04:57:34,406 Train Loss: 0.0001109, Val Loss: 0.0001125 +2025-03-24 04:57:34,407 Epoch 473/2000 +2025-03-24 05:02:14,769 Current Learning Rate: 0.0007057572 +2025-03-24 05:02:15,601 Train Loss: 0.0000912, Val Loss: 0.0000979 +2025-03-24 05:02:15,601 Epoch 474/2000 +2025-03-24 05:06:55,358 Current Learning Rate: 0.0006985739 +2025-03-24 05:06:55,359 Train Loss: 0.0000936, Val Loss: 0.0001051 +2025-03-24 05:06:55,359 Epoch 475/2000 +2025-03-24 05:11:35,802 Current Learning Rate: 0.0006913417 +2025-03-24 05:11:35,802 Train Loss: 0.0001030, Val Loss: 0.0000991 +2025-03-24 05:11:35,802 Epoch 476/2000 +2025-03-24 05:16:15,905 Current Learning Rate: 0.0006840623 +2025-03-24 05:16:16,770 Train Loss: 0.0001147, Val Loss: 0.0000973 +2025-03-24 05:16:16,770 Epoch 477/2000 +2025-03-24 05:20:56,496 Current Learning Rate: 0.0006767374 +2025-03-24 05:20:56,497 Train Loss: 0.0001057, Val Loss: 0.0000975 +2025-03-24 05:20:56,497 Epoch 478/2000 +2025-03-24 05:25:36,747 Current Learning Rate: 0.0006693690 +2025-03-24 05:25:37,679 Train Loss: 0.0001024, Val Loss: 0.0000952 +2025-03-24 05:25:37,680 Epoch 479/2000 +2025-03-24 05:30:18,043 Current Learning Rate: 0.0006619587 +2025-03-24 05:30:18,044 Train Loss: 0.0001176, Val Loss: 0.0001028 +2025-03-24 05:30:18,045 Epoch 480/2000 +2025-03-24 05:34:58,754 Current Learning Rate: 0.0006545085 +2025-03-24 05:34:58,754 Train Loss: 0.0001166, Val Loss: 0.0001075 +2025-03-24 05:34:58,755 Epoch 481/2000 +2025-03-24 05:39:39,022 Current Learning Rate: 0.0006470202 +2025-03-24 05:39:39,023 Train Loss: 0.0001099, Val Loss: 0.0001107 +2025-03-24 05:39:39,023 Epoch 482/2000 +2025-03-24 05:44:19,863 Current Learning Rate: 0.0006394956 +2025-03-24 05:44:19,864 Train Loss: 0.0001098, Val Loss: 0.0001016 +2025-03-24 05:44:19,864 Epoch 483/2000 +2025-03-24 05:49:00,644 Current Learning Rate: 0.0006319365 +2025-03-24 05:49:00,644 Train Loss: 0.0000906, Val Loss: 0.0000979 +2025-03-24 05:49:00,644 Epoch 484/2000 +2025-03-24 05:53:40,963 Current Learning Rate: 0.0006243449 +2025-03-24 05:53:41,869 Train Loss: 0.0000921, Val Loss: 0.0000917 +2025-03-24 05:53:41,870 Epoch 485/2000 +2025-03-24 05:58:21,811 Current Learning Rate: 0.0006167227 +2025-03-24 05:58:21,813 Train Loss: 0.0001097, Val Loss: 0.0001015 +2025-03-24 05:58:21,813 Epoch 486/2000 +2025-03-24 06:03:02,719 Current Learning Rate: 0.0006090716 +2025-03-24 06:03:02,720 Train Loss: 0.0001057, Val Loss: 0.0000952 +2025-03-24 06:03:02,721 Epoch 487/2000 +2025-03-24 06:07:43,667 Current Learning Rate: 0.0006013936 +2025-03-24 06:07:43,668 Train Loss: 0.0001010, Val Loss: 0.0001058 +2025-03-24 06:07:43,668 Epoch 488/2000 +2025-03-24 06:12:24,001 Current Learning Rate: 0.0005936907 +2025-03-24 06:12:24,002 Train Loss: 0.0001077, Val Loss: 0.0001175 +2025-03-24 06:12:24,002 Epoch 489/2000 +2025-03-24 06:17:03,856 Current Learning Rate: 0.0005859646 +2025-03-24 06:17:03,857 Train Loss: 0.0001099, Val Loss: 0.0001130 +2025-03-24 06:17:03,857 Epoch 490/2000 +2025-03-24 06:21:43,543 Current Learning Rate: 0.0005782172 +2025-03-24 06:21:43,544 Train Loss: 0.0001078, Val Loss: 0.0000945 +2025-03-24 06:21:43,545 Epoch 491/2000 +2025-03-24 06:26:23,921 Current Learning Rate: 0.0005704506 +2025-03-24 06:26:24,864 Train Loss: 0.0001065, Val Loss: 0.0000917 +2025-03-24 06:26:24,864 Epoch 492/2000 +2025-03-24 06:31:05,425 Current Learning Rate: 0.0005626666 +2025-03-24 06:31:05,426 Train Loss: 0.0001133, Val Loss: 0.0000984 +2025-03-24 06:31:05,427 Epoch 493/2000 +2025-03-24 06:35:46,084 Current Learning Rate: 0.0005548672 +2025-03-24 06:35:46,084 Train Loss: 0.0001034, Val Loss: 0.0000921 +2025-03-24 06:35:46,085 Epoch 494/2000 +2025-03-24 06:40:26,481 Current Learning Rate: 0.0005470542 +2025-03-24 06:40:26,482 Train Loss: 0.0000886, Val Loss: 0.0000919 +2025-03-24 06:40:26,482 Epoch 495/2000 +2025-03-24 06:45:06,888 Current Learning Rate: 0.0005392295 +2025-03-24 06:45:06,889 Train Loss: 0.0001039, Val Loss: 0.0001059 +2025-03-24 06:45:06,889 Epoch 496/2000 +2025-03-24 06:49:47,620 Current Learning Rate: 0.0005313953 +2025-03-24 06:49:47,621 Train Loss: 0.0000932, Val Loss: 0.0000942 +2025-03-24 06:49:47,621 Epoch 497/2000 +2025-03-24 06:54:28,545 Current Learning Rate: 0.0005235532 +2025-03-24 06:54:29,623 Train Loss: 0.0000908, Val Loss: 0.0000890 +2025-03-24 06:54:29,623 Epoch 498/2000 +2025-03-24 06:59:09,598 Current Learning Rate: 0.0005157054 +2025-03-24 06:59:09,599 Train Loss: 0.0001120, Val Loss: 0.0000924 +2025-03-24 06:59:09,599 Epoch 499/2000 +2025-03-24 07:03:50,135 Current Learning Rate: 0.0005078537 +2025-03-24 07:03:50,136 Train Loss: 0.0000972, Val Loss: 0.0000910 +2025-03-24 07:03:50,136 Epoch 500/2000 +2025-03-24 07:08:30,656 Current Learning Rate: 0.0005000000 +2025-03-24 07:08:30,656 Train Loss: 0.0000927, Val Loss: 0.0000995 +2025-03-24 07:08:30,657 Epoch 501/2000 +2025-03-24 07:13:11,361 Current Learning Rate: 0.0004921463 +2025-03-24 07:13:11,362 Train Loss: 0.0001049, Val Loss: 0.0000931 +2025-03-24 07:13:11,362 Epoch 502/2000 +2025-03-24 07:17:51,860 Current Learning Rate: 0.0004842946 +2025-03-24 07:17:51,860 Train Loss: 0.0001004, Val Loss: 0.0001008 +2025-03-24 07:17:51,861 Epoch 503/2000 +2025-03-24 07:22:32,181 Current Learning Rate: 0.0004764468 +2025-03-24 07:22:33,076 Train Loss: 0.0000905, Val Loss: 0.0000867 +2025-03-24 07:22:33,077 Epoch 504/2000 +2025-03-24 07:27:12,879 Current Learning Rate: 0.0004686047 +2025-03-24 07:27:14,001 Train Loss: 0.0000916, Val Loss: 0.0000851 +2025-03-24 07:27:14,001 Epoch 505/2000 +2025-03-24 07:31:53,792 Current Learning Rate: 0.0004607705 +2025-03-24 07:31:53,793 Train Loss: 0.0000991, Val Loss: 0.0000906 +2025-03-24 07:31:53,794 Epoch 506/2000 +2025-03-24 07:36:34,040 Current Learning Rate: 0.0004529458 +2025-03-24 07:36:35,109 Train Loss: 0.0000780, Val Loss: 0.0000831 +2025-03-24 07:36:35,110 Epoch 507/2000 +2025-03-24 07:41:15,189 Current Learning Rate: 0.0004451328 +2025-03-24 07:41:15,190 Train Loss: 0.0000870, Val Loss: 0.0000859 +2025-03-24 07:41:15,190 Epoch 508/2000 +2025-03-24 07:45:55,204 Current Learning Rate: 0.0004373334 +2025-03-24 07:45:55,205 Train Loss: 0.0000837, Val Loss: 0.0000849 +2025-03-24 07:45:55,205 Epoch 509/2000 +2025-03-24 07:50:35,703 Current Learning Rate: 0.0004295494 +2025-03-24 07:50:35,703 Train Loss: 0.0000849, Val Loss: 0.0000842 +2025-03-24 07:50:35,703 Epoch 510/2000 +2025-03-24 07:55:15,852 Current Learning Rate: 0.0004217828 +2025-03-24 07:55:15,853 Train Loss: 0.0000777, Val Loss: 0.0000840 +2025-03-24 07:55:15,853 Epoch 511/2000 +2025-03-24 07:59:56,050 Current Learning Rate: 0.0004140354 +2025-03-24 07:59:56,051 Train Loss: 0.0000919, Val Loss: 0.0000865 +2025-03-24 07:59:56,051 Epoch 512/2000 +2025-03-24 08:04:36,808 Current Learning Rate: 0.0004063093 +2025-03-24 08:04:36,809 Train Loss: 0.0000864, Val Loss: 0.0000907 +2025-03-24 08:04:36,809 Epoch 513/2000 +2025-03-24 08:09:17,606 Current Learning Rate: 0.0003986064 +2025-03-24 08:09:17,606 Train Loss: 0.0000938, Val Loss: 0.0000931 +2025-03-24 08:09:17,607 Epoch 514/2000 +2025-03-24 08:13:57,948 Current Learning Rate: 0.0003909284 +2025-03-24 08:13:57,948 Train Loss: 0.0000823, Val Loss: 0.0000843 +2025-03-24 08:13:57,948 Epoch 515/2000 +2025-03-24 08:18:38,469 Current Learning Rate: 0.0003832773 +2025-03-24 08:18:39,334 Train Loss: 0.0000900, Val Loss: 0.0000831 +2025-03-24 08:18:39,335 Epoch 516/2000 +2025-03-24 08:23:18,955 Current Learning Rate: 0.0003756551 +2025-03-24 08:23:18,956 Train Loss: 0.0000894, Val Loss: 0.0000837 +2025-03-24 08:23:18,956 Epoch 517/2000 +2025-03-24 08:27:59,121 Current Learning Rate: 0.0003680635 +2025-03-24 08:27:59,121 Train Loss: 0.0000874, Val Loss: 0.0000832 +2025-03-24 08:27:59,122 Epoch 518/2000 +2025-03-24 08:32:39,876 Current Learning Rate: 0.0003605044 +2025-03-24 08:32:39,876 Train Loss: 0.0000873, Val Loss: 0.0000846 +2025-03-24 08:32:39,876 Epoch 519/2000 +2025-03-24 08:37:20,280 Current Learning Rate: 0.0003529798 +2025-03-24 08:37:21,333 Train Loss: 0.0000774, Val Loss: 0.0000811 +2025-03-24 08:37:21,334 Epoch 520/2000 +2025-03-24 08:42:01,204 Current Learning Rate: 0.0003454915 +2025-03-24 08:42:01,205 Train Loss: 0.0000961, Val Loss: 0.0000829 +2025-03-24 08:42:01,205 Epoch 521/2000 +2025-03-24 08:46:41,633 Current Learning Rate: 0.0003380413 +2025-03-24 08:46:41,633 Train Loss: 0.0000868, Val Loss: 0.0000860 +2025-03-24 08:46:41,633 Epoch 522/2000 +2025-03-24 08:51:21,826 Current Learning Rate: 0.0003306310 +2025-03-24 08:51:21,827 Train Loss: 0.0000786, Val Loss: 0.0000836 +2025-03-24 08:51:21,827 Epoch 523/2000 +2025-03-24 08:56:02,910 Current Learning Rate: 0.0003232626 +2025-03-24 08:56:02,911 Train Loss: 0.0000790, Val Loss: 0.0000879 +2025-03-24 08:56:02,912 Epoch 524/2000 +2025-03-24 09:00:43,312 Current Learning Rate: 0.0003159377 +2025-03-24 09:00:43,313 Train Loss: 0.0000702, Val Loss: 0.0000822 +2025-03-24 09:00:43,313 Epoch 525/2000 +2025-03-24 09:05:23,388 Current Learning Rate: 0.0003086583 +2025-03-24 09:05:23,389 Train Loss: 0.0000853, Val Loss: 0.0000815 +2025-03-24 09:05:23,389 Epoch 526/2000 +2025-03-24 09:10:03,614 Current Learning Rate: 0.0003014261 +2025-03-24 09:10:04,499 Train Loss: 0.0000764, Val Loss: 0.0000793 +2025-03-24 09:10:04,499 Epoch 527/2000 +2025-03-24 09:14:44,365 Current Learning Rate: 0.0002942428 +2025-03-24 09:14:44,366 Train Loss: 0.0000785, Val Loss: 0.0000813 +2025-03-24 09:14:44,366 Epoch 528/2000 +2025-03-24 09:19:24,714 Current Learning Rate: 0.0002871104 +2025-03-24 09:19:24,715 Train Loss: 0.0000802, Val Loss: 0.0000805 +2025-03-24 09:19:24,715 Epoch 529/2000 +2025-03-24 09:24:05,313 Current Learning Rate: 0.0002800304 +2025-03-24 09:24:05,314 Train Loss: 0.0000770, Val Loss: 0.0000800 +2025-03-24 09:24:05,314 Epoch 530/2000 +2025-03-24 09:28:45,882 Current Learning Rate: 0.0002730048 +2025-03-24 09:28:45,883 Train Loss: 0.0000871, Val Loss: 0.0000812 +2025-03-24 09:28:45,883 Epoch 531/2000 +2025-03-24 09:33:27,386 Current Learning Rate: 0.0002660351 +2025-03-24 09:33:27,386 Train Loss: 0.0000851, Val Loss: 0.0000805 +2025-03-24 09:33:27,386 Epoch 532/2000 +2025-03-24 09:38:07,643 Current Learning Rate: 0.0002591232 +2025-03-24 09:38:07,644 Train Loss: 0.0000866, Val Loss: 0.0000816 +2025-03-24 09:38:07,644 Epoch 533/2000 +2025-03-24 09:42:48,345 Current Learning Rate: 0.0002522707 +2025-03-24 09:42:48,346 Train Loss: 0.0000815, Val Loss: 0.0000818 +2025-03-24 09:42:48,346 Epoch 534/2000 +2025-03-24 09:47:28,587 Current Learning Rate: 0.0002454793 +2025-03-24 09:47:29,486 Train Loss: 0.0000742, Val Loss: 0.0000791 +2025-03-24 09:47:29,486 Epoch 535/2000 +2025-03-24 09:52:09,721 Current Learning Rate: 0.0002387507 +2025-03-24 09:52:09,722 Train Loss: 0.0000778, Val Loss: 0.0000798 +2025-03-24 09:52:09,722 Epoch 536/2000 +2025-03-24 09:56:50,093 Current Learning Rate: 0.0002320866 +2025-03-24 09:56:51,044 Train Loss: 0.0000737, Val Loss: 0.0000783 +2025-03-24 09:56:51,044 Epoch 537/2000 +2025-03-24 10:01:31,101 Current Learning Rate: 0.0002254886 +2025-03-24 10:01:31,102 Train Loss: 0.0000896, Val Loss: 0.0000794 +2025-03-24 10:01:31,102 Epoch 538/2000 +2025-03-24 10:06:11,550 Current Learning Rate: 0.0002189583 +2025-03-24 10:06:12,449 Train Loss: 0.0000734, Val Loss: 0.0000771 +2025-03-24 10:06:12,449 Epoch 539/2000 +2025-03-24 10:10:52,462 Current Learning Rate: 0.0002124974 +2025-03-24 10:10:52,462 Train Loss: 0.0000761, Val Loss: 0.0000773 +2025-03-24 10:10:52,463 Epoch 540/2000 +2025-03-24 10:15:33,635 Current Learning Rate: 0.0002061074 +2025-03-24 10:15:33,635 Train Loss: 0.0000769, Val Loss: 0.0000783 +2025-03-24 10:15:33,635 Epoch 541/2000 +2025-03-24 10:20:14,394 Current Learning Rate: 0.0001997899 +2025-03-24 10:20:14,395 Train Loss: 0.0000759, Val Loss: 0.0000782 +2025-03-24 10:20:14,395 Epoch 542/2000 +2025-03-24 10:24:55,024 Current Learning Rate: 0.0001935465 +2025-03-24 10:24:55,024 Train Loss: 0.0000778, Val Loss: 0.0000774 +2025-03-24 10:24:55,024 Epoch 543/2000 +2025-03-24 10:29:35,169 Current Learning Rate: 0.0001873787 +2025-03-24 10:29:36,045 Train Loss: 0.0000739, Val Loss: 0.0000766 +2025-03-24 10:29:36,045 Epoch 544/2000 +2025-03-24 10:34:15,965 Current Learning Rate: 0.0001812880 +2025-03-24 10:34:17,006 Train Loss: 0.0000736, Val Loss: 0.0000762 +2025-03-24 10:34:17,007 Epoch 545/2000 +2025-03-24 10:38:57,020 Current Learning Rate: 0.0001752760 +2025-03-24 10:38:57,021 Train Loss: 0.0000739, Val Loss: 0.0000772 +2025-03-24 10:38:57,021 Epoch 546/2000 +2025-03-24 10:43:38,440 Current Learning Rate: 0.0001693441 +2025-03-24 10:43:38,441 Train Loss: 0.0000745, Val Loss: 0.0000778 +2025-03-24 10:43:38,441 Epoch 547/2000 +2025-03-24 10:48:18,695 Current Learning Rate: 0.0001634937 +2025-03-24 10:48:19,529 Train Loss: 0.0000627, Val Loss: 0.0000757 +2025-03-24 10:48:19,529 Epoch 548/2000 +2025-03-24 10:52:58,750 Current Learning Rate: 0.0001577264 +2025-03-24 10:52:58,750 Train Loss: 0.0000711, Val Loss: 0.0000762 +2025-03-24 10:52:58,750 Epoch 549/2000 +2025-03-24 10:57:38,956 Current Learning Rate: 0.0001520436 +2025-03-24 10:57:38,956 Train Loss: 0.0000788, Val Loss: 0.0000765 +2025-03-24 10:57:38,957 Epoch 550/2000 +2025-03-24 11:02:19,356 Current Learning Rate: 0.0001464466 +2025-03-24 11:02:19,356 Train Loss: 0.0000945, Val Loss: 0.0000758 +2025-03-24 11:02:19,356 Epoch 551/2000 +2025-03-24 11:06:59,735 Current Learning Rate: 0.0001409369 +2025-03-24 11:07:00,554 Train Loss: 0.0000771, Val Loss: 0.0000755 +2025-03-24 11:07:00,554 Epoch 552/2000 +2025-03-24 11:11:40,027 Current Learning Rate: 0.0001355157 +2025-03-24 11:11:40,960 Train Loss: 0.0000800, Val Loss: 0.0000749 +2025-03-24 11:11:40,960 Epoch 553/2000 +2025-03-24 11:16:20,996 Current Learning Rate: 0.0001301845 +2025-03-24 11:16:22,065 Train Loss: 0.0000655, Val Loss: 0.0000748 +2025-03-24 11:16:22,065 Epoch 554/2000 +2025-03-24 11:21:02,252 Current Learning Rate: 0.0001249445 +2025-03-24 11:21:02,253 Train Loss: 0.0000745, Val Loss: 0.0000751 +2025-03-24 11:21:02,253 Epoch 555/2000 +2025-03-24 11:25:42,836 Current Learning Rate: 0.0001197970 +2025-03-24 11:25:42,837 Train Loss: 0.0000714, Val Loss: 0.0000754 +2025-03-24 11:25:42,837 Epoch 556/2000 +2025-03-24 11:30:23,070 Current Learning Rate: 0.0001147434 +2025-03-24 11:30:24,027 Train Loss: 0.0000744, Val Loss: 0.0000748 +2025-03-24 11:30:24,027 Epoch 557/2000 +2025-03-24 11:35:03,564 Current Learning Rate: 0.0001097848 +2025-03-24 11:35:04,476 Train Loss: 0.0000755, Val Loss: 0.0000744 +2025-03-24 11:35:04,476 Epoch 558/2000 +2025-03-24 11:39:44,863 Current Learning Rate: 0.0001049225 +2025-03-24 11:39:44,864 Train Loss: 0.0000748, Val Loss: 0.0000748 +2025-03-24 11:39:44,864 Epoch 559/2000 +2025-03-24 11:44:25,864 Current Learning Rate: 0.0001001577 +2025-03-24 11:44:26,814 Train Loss: 0.0000670, Val Loss: 0.0000741 +2025-03-24 11:44:26,815 Epoch 560/2000 +2025-03-24 11:49:06,898 Current Learning Rate: 0.0000954915 +2025-03-24 11:49:06,899 Train Loss: 0.0000719, Val Loss: 0.0000745 +2025-03-24 11:49:06,900 Epoch 561/2000 +2025-03-24 11:53:47,226 Current Learning Rate: 0.0000909251 +2025-03-24 11:53:47,227 Train Loss: 0.0000677, Val Loss: 0.0000741 +2025-03-24 11:53:47,227 Epoch 562/2000 +2025-03-24 11:58:28,259 Current Learning Rate: 0.0000864597 +2025-03-24 11:58:29,290 Train Loss: 0.0000796, Val Loss: 0.0000739 +2025-03-24 11:58:29,290 Epoch 563/2000 +2025-03-24 12:03:09,391 Current Learning Rate: 0.0000820963 +2025-03-24 12:03:09,392 Train Loss: 0.0000731, Val Loss: 0.0000743 +2025-03-24 12:03:09,392 Epoch 564/2000 +2025-03-24 12:07:50,033 Current Learning Rate: 0.0000778360 +2025-03-24 12:07:50,034 Train Loss: 0.0000767, Val Loss: 0.0000743 +2025-03-24 12:07:50,034 Epoch 565/2000 +2025-03-24 12:12:31,019 Current Learning Rate: 0.0000736799 +2025-03-24 12:12:31,936 Train Loss: 0.0000756, Val Loss: 0.0000735 +2025-03-24 12:12:31,937 Epoch 566/2000 +2025-03-24 12:17:11,868 Current Learning Rate: 0.0000696290 +2025-03-24 12:17:12,892 Train Loss: 0.0000667, Val Loss: 0.0000733 +2025-03-24 12:17:12,893 Epoch 567/2000 +2025-03-24 12:21:52,946 Current Learning Rate: 0.0000656842 +2025-03-24 12:21:53,884 Train Loss: 0.0000642, Val Loss: 0.0000732 +2025-03-24 12:21:53,885 Epoch 568/2000 +2025-03-24 12:26:33,506 Current Learning Rate: 0.0000618467 +2025-03-24 12:26:34,529 Train Loss: 0.0000718, Val Loss: 0.0000731 +2025-03-24 12:26:34,530 Epoch 569/2000 +2025-03-24 12:31:14,531 Current Learning Rate: 0.0000581172 +2025-03-24 12:31:14,532 Train Loss: 0.0000635, Val Loss: 0.0000732 +2025-03-24 12:31:14,533 Epoch 570/2000 +2025-03-24 12:35:55,114 Current Learning Rate: 0.0000544967 +2025-03-24 12:35:55,114 Train Loss: 0.0000677, Val Loss: 0.0000732 +2025-03-24 12:35:55,114 Epoch 571/2000 +2025-03-24 12:40:36,067 Current Learning Rate: 0.0000509862 +2025-03-24 12:40:36,986 Train Loss: 0.0000672, Val Loss: 0.0000728 +2025-03-24 12:40:36,986 Epoch 572/2000 +2025-03-24 12:45:16,907 Current Learning Rate: 0.0000475865 +2025-03-24 12:45:16,908 Train Loss: 0.0000712, Val Loss: 0.0000732 +2025-03-24 12:45:16,908 Epoch 573/2000 +2025-03-24 12:49:57,139 Current Learning Rate: 0.0000442984 +2025-03-24 12:49:58,022 Train Loss: 0.0000744, Val Loss: 0.0000728 +2025-03-24 12:49:58,022 Epoch 574/2000 +2025-03-24 12:54:37,904 Current Learning Rate: 0.0000411227 +2025-03-24 12:54:38,770 Train Loss: 0.0000653, Val Loss: 0.0000727 +2025-03-24 12:54:38,770 Epoch 575/2000 +2025-03-24 12:59:18,327 Current Learning Rate: 0.0000380602 +2025-03-24 12:59:19,192 Train Loss: 0.0000677, Val Loss: 0.0000727 +2025-03-24 12:59:19,193 Epoch 576/2000 +2025-03-24 13:03:58,862 Current Learning Rate: 0.0000351118 +2025-03-24 13:03:59,795 Train Loss: 0.0000610, Val Loss: 0.0000725 +2025-03-24 13:03:59,795 Epoch 577/2000 +2025-03-24 13:08:39,269 Current Learning Rate: 0.0000322780 +2025-03-24 13:08:40,111 Train Loss: 0.0000621, Val Loss: 0.0000725 +2025-03-24 13:08:40,111 Epoch 578/2000 +2025-03-24 13:13:19,681 Current Learning Rate: 0.0000295596 +2025-03-24 13:13:19,681 Train Loss: 0.0000775, Val Loss: 0.0000725 +2025-03-24 13:13:19,681 Epoch 579/2000 +2025-03-24 13:17:59,906 Current Learning Rate: 0.0000269573 +2025-03-24 13:18:00,780 Train Loss: 0.0000761, Val Loss: 0.0000724 +2025-03-24 13:18:00,780 Epoch 580/2000 +2025-03-24 13:22:40,245 Current Learning Rate: 0.0000244717 +2025-03-24 13:22:40,246 Train Loss: 0.0000770, Val Loss: 0.0000725 +2025-03-24 13:22:40,247 Epoch 581/2000 +2025-03-24 13:27:20,428 Current Learning Rate: 0.0000221035 +2025-03-24 13:27:21,325 Train Loss: 0.0000635, Val Loss: 0.0000723 +2025-03-24 13:27:21,326 Epoch 582/2000 +2025-03-24 13:32:01,278 Current Learning Rate: 0.0000198532 +2025-03-24 13:32:01,282 Train Loss: 0.0000733, Val Loss: 0.0000724 +2025-03-24 13:32:01,282 Epoch 583/2000 +2025-03-24 13:36:42,171 Current Learning Rate: 0.0000177213 +2025-03-24 13:36:43,148 Train Loss: 0.0000770, Val Loss: 0.0000723 +2025-03-24 13:36:43,148 Epoch 584/2000 +2025-03-24 13:41:23,738 Current Learning Rate: 0.0000157084 +2025-03-24 13:41:24,767 Train Loss: 0.0000749, Val Loss: 0.0000723 +2025-03-24 13:41:24,767 Epoch 585/2000 +2025-03-24 13:46:04,776 Current Learning Rate: 0.0000138150 +2025-03-24 13:46:05,779 Train Loss: 0.0000759, Val Loss: 0.0000722 +2025-03-24 13:46:05,780 Epoch 586/2000 +2025-03-24 13:50:45,632 Current Learning Rate: 0.0000120416 +2025-03-24 13:50:46,526 Train Loss: 0.0000633, Val Loss: 0.0000722 +2025-03-24 13:50:46,527 Epoch 587/2000 +2025-03-24 13:55:26,519 Current Learning Rate: 0.0000103886 +2025-03-24 13:55:26,520 Train Loss: 0.0000677, Val Loss: 0.0000722 +2025-03-24 13:55:26,520 Epoch 588/2000 +2025-03-24 14:00:07,540 Current Learning Rate: 0.0000088564 +2025-03-24 14:00:08,557 Train Loss: 0.0000708, Val Loss: 0.0000721 +2025-03-24 14:00:08,557 Epoch 589/2000 +2025-03-24 14:04:49,277 Current Learning Rate: 0.0000074453 +2025-03-24 14:04:50,294 Train Loss: 0.0000727, Val Loss: 0.0000721 +2025-03-24 14:04:50,295 Epoch 590/2000 +2025-03-24 14:09:30,227 Current Learning Rate: 0.0000061558 +2025-03-24 14:09:31,093 Train Loss: 0.0000667, Val Loss: 0.0000721 +2025-03-24 14:09:31,094 Epoch 591/2000 +2025-03-24 14:14:10,779 Current Learning Rate: 0.0000049882 +2025-03-24 14:14:11,805 Train Loss: 0.0000737, Val Loss: 0.0000721 +2025-03-24 14:14:11,805 Epoch 592/2000 +2025-03-24 14:18:51,635 Current Learning Rate: 0.0000039426 +2025-03-24 14:18:52,686 Train Loss: 0.0000587, Val Loss: 0.0000720 +2025-03-24 14:18:52,687 Epoch 593/2000 +2025-03-24 14:23:32,894 Current Learning Rate: 0.0000030195 +2025-03-24 14:23:32,894 Train Loss: 0.0000748, Val Loss: 0.0000720 +2025-03-24 14:23:32,895 Epoch 594/2000 +2025-03-24 14:28:13,964 Current Learning Rate: 0.0000022190 +2025-03-24 14:28:14,967 Train Loss: 0.0000612, Val Loss: 0.0000720 +2025-03-24 14:28:14,967 Epoch 595/2000 +2025-03-24 14:32:54,920 Current Learning Rate: 0.0000015413 +2025-03-24 14:32:55,853 Train Loss: 0.0000838, Val Loss: 0.0000720 +2025-03-24 14:32:55,853 Epoch 596/2000 +2025-03-24 14:37:35,723 Current Learning Rate: 0.0000009866 +2025-03-24 14:37:35,724 Train Loss: 0.0000759, Val Loss: 0.0000720 +2025-03-24 14:37:35,724 Epoch 597/2000 +2025-03-24 14:42:16,403 Current Learning Rate: 0.0000005551 +2025-03-24 14:42:17,468 Train Loss: 0.0000760, Val Loss: 0.0000720 +2025-03-24 14:42:17,468 Epoch 598/2000 +2025-03-24 14:46:57,213 Current Learning Rate: 0.0000002467 +2025-03-24 14:46:58,286 Train Loss: 0.0000692, Val Loss: 0.0000720 +2025-03-24 14:46:58,286 Epoch 599/2000 +2025-03-24 14:51:38,094 Current Learning Rate: 0.0000000617 +2025-03-24 14:51:38,094 Train Loss: 0.0000677, Val Loss: 0.0000720 +2025-03-24 14:51:38,095 Epoch 600/2000 +2025-03-24 14:56:18,893 Current Learning Rate: 0.0000000000 +2025-03-24 14:56:18,894 Train Loss: 0.0000747, Val Loss: 0.0000720 +2025-03-24 14:56:18,894 Epoch 601/2000 +2025-03-24 15:00:59,699 Current Learning Rate: 0.0000000617 +2025-03-24 15:00:59,700 Train Loss: 0.0000825, Val Loss: 0.0000721 +2025-03-24 15:00:59,700 Epoch 602/2000 +2025-03-24 15:05:39,915 Current Learning Rate: 0.0000002467 +2025-03-24 15:05:39,915 Train Loss: 0.0000682, Val Loss: 0.0000720 +2025-03-24 15:05:39,915 Epoch 603/2000 +2025-03-24 15:10:19,965 Current Learning Rate: 0.0000005551 +2025-03-24 15:10:19,965 Train Loss: 0.0000710, Val Loss: 0.0000720 +2025-03-24 15:10:19,965 Epoch 604/2000 +2025-03-24 15:15:00,082 Current Learning Rate: 0.0000009866 +2025-03-24 15:15:00,082 Train Loss: 0.0000749, Val Loss: 0.0000720 +2025-03-24 15:15:00,082 Epoch 605/2000 +2025-03-24 15:19:40,387 Current Learning Rate: 0.0000015413 +2025-03-24 15:19:40,388 Train Loss: 0.0000739, Val Loss: 0.0000722 +2025-03-24 15:19:40,388 Epoch 606/2000 +2025-03-24 15:24:21,152 Current Learning Rate: 0.0000022190 +2025-03-24 15:24:21,152 Train Loss: 0.0000769, Val Loss: 0.0000720 +2025-03-24 15:24:21,153 Epoch 607/2000 +2025-03-24 15:29:01,654 Current Learning Rate: 0.0000030195 +2025-03-24 15:29:01,654 Train Loss: 0.0000710, Val Loss: 0.0000720 +2025-03-24 15:29:01,655 Epoch 608/2000 +2025-03-24 15:33:42,271 Current Learning Rate: 0.0000039426 +2025-03-24 15:33:42,272 Train Loss: 0.0000836, Val Loss: 0.0000720 +2025-03-24 15:33:42,273 Epoch 609/2000 +2025-03-24 15:38:23,366 Current Learning Rate: 0.0000049882 +2025-03-24 15:38:23,367 Train Loss: 0.0000685, Val Loss: 0.0000720 +2025-03-24 15:38:23,368 Epoch 610/2000 +2025-03-24 15:43:03,741 Current Learning Rate: 0.0000061558 +2025-03-24 15:43:03,742 Train Loss: 0.0000678, Val Loss: 0.0000720 +2025-03-24 15:43:03,742 Epoch 611/2000 +2025-03-24 15:47:44,237 Current Learning Rate: 0.0000074453 +2025-03-24 15:47:44,238 Train Loss: 0.0000838, Val Loss: 0.0000720 +2025-03-24 15:47:44,238 Epoch 612/2000 +2025-03-24 15:52:25,260 Current Learning Rate: 0.0000088564 +2025-03-24 15:52:25,260 Train Loss: 0.0000708, Val Loss: 0.0000720 +2025-03-24 15:52:25,260 Epoch 613/2000 +2025-03-24 15:57:05,803 Current Learning Rate: 0.0000103886 +2025-03-24 15:57:05,803 Train Loss: 0.0000768, Val Loss: 0.0000720 +2025-03-24 15:57:05,803 Epoch 614/2000 +2025-03-24 16:01:46,262 Current Learning Rate: 0.0000120416 +2025-03-24 16:01:46,262 Train Loss: 0.0000689, Val Loss: 0.0000720 +2025-03-24 16:01:46,263 Epoch 615/2000 +2025-03-24 16:06:26,577 Current Learning Rate: 0.0000138150 +2025-03-24 16:06:26,578 Train Loss: 0.0000690, Val Loss: 0.0000721 +2025-03-24 16:06:26,578 Epoch 616/2000 +2025-03-24 16:11:07,064 Current Learning Rate: 0.0000157084 +2025-03-24 16:11:07,064 Train Loss: 0.0000710, Val Loss: 0.0000721 +2025-03-24 16:11:07,064 Epoch 617/2000 +2025-03-24 16:15:47,813 Current Learning Rate: 0.0000177213 +2025-03-24 16:15:47,814 Train Loss: 0.0000798, Val Loss: 0.0000721 +2025-03-24 16:15:47,814 Epoch 618/2000 +2025-03-24 16:20:28,607 Current Learning Rate: 0.0000198532 +2025-03-24 16:20:28,607 Train Loss: 0.0000681, Val Loss: 0.0000721 +2025-03-24 16:20:28,607 Epoch 619/2000 +2025-03-24 16:25:09,199 Current Learning Rate: 0.0000221035 +2025-03-24 16:25:09,199 Train Loss: 0.0000726, Val Loss: 0.0000721 +2025-03-24 16:25:09,200 Epoch 620/2000 +2025-03-24 16:29:50,218 Current Learning Rate: 0.0000244717 +2025-03-24 16:29:50,219 Train Loss: 0.0000753, Val Loss: 0.0000722 +2025-03-24 16:29:50,219 Epoch 621/2000 +2025-03-24 16:34:30,359 Current Learning Rate: 0.0000269573 +2025-03-24 16:34:30,360 Train Loss: 0.0000873, Val Loss: 0.0000722 +2025-03-24 16:34:30,360 Epoch 622/2000 +2025-03-24 16:39:10,655 Current Learning Rate: 0.0000295596 +2025-03-24 16:39:10,656 Train Loss: 0.0000768, Val Loss: 0.0000726 +2025-03-24 16:39:10,656 Epoch 623/2000 +2025-03-24 16:43:51,287 Current Learning Rate: 0.0000322780 +2025-03-24 16:43:51,287 Train Loss: 0.0000744, Val Loss: 0.0000723 +2025-03-24 16:43:51,288 Epoch 624/2000 +2025-03-24 16:48:32,361 Current Learning Rate: 0.0000351118 +2025-03-24 16:48:32,361 Train Loss: 0.0000690, Val Loss: 0.0000722 +2025-03-24 16:48:32,362 Epoch 625/2000 +2025-03-24 16:53:13,314 Current Learning Rate: 0.0000380602 +2025-03-24 16:53:13,314 Train Loss: 0.0000627, Val Loss: 0.0000723 +2025-03-24 16:53:13,315 Epoch 626/2000 +2025-03-24 16:57:54,276 Current Learning Rate: 0.0000411227 +2025-03-24 16:57:54,277 Train Loss: 0.0000601, Val Loss: 0.0000722 +2025-03-24 16:57:54,277 Epoch 627/2000 +2025-03-24 17:02:34,727 Current Learning Rate: 0.0000442984 +2025-03-24 17:02:34,727 Train Loss: 0.0000749, Val Loss: 0.0000727 +2025-03-24 17:02:34,728 Epoch 628/2000 +2025-03-24 17:07:15,055 Current Learning Rate: 0.0000475865 +2025-03-24 17:07:15,056 Train Loss: 0.0000709, Val Loss: 0.0000723 +2025-03-24 17:07:15,056 Epoch 629/2000 +2025-03-24 17:11:55,552 Current Learning Rate: 0.0000509862 +2025-03-24 17:11:55,552 Train Loss: 0.0000688, Val Loss: 0.0000722 +2025-03-24 17:11:55,552 Epoch 630/2000 +2025-03-24 17:16:36,081 Current Learning Rate: 0.0000544967 +2025-03-24 17:16:36,081 Train Loss: 0.0000652, Val Loss: 0.0000723 +2025-03-24 17:16:36,082 Epoch 631/2000 +2025-03-24 17:21:16,342 Current Learning Rate: 0.0000581172 +2025-03-24 17:21:16,343 Train Loss: 0.0000716, Val Loss: 0.0000723 +2025-03-24 17:21:16,343 Epoch 632/2000 +2025-03-24 17:25:56,963 Current Learning Rate: 0.0000618467 +2025-03-24 17:25:56,963 Train Loss: 0.0000680, Val Loss: 0.0000726 +2025-03-24 17:25:56,963 Epoch 633/2000 +2025-03-24 17:30:37,848 Current Learning Rate: 0.0000656842 +2025-03-24 17:30:37,849 Train Loss: 0.0000627, Val Loss: 0.0000724 +2025-03-24 17:30:37,849 Epoch 634/2000 +2025-03-24 17:35:18,377 Current Learning Rate: 0.0000696290 +2025-03-24 17:35:18,378 Train Loss: 0.0000684, Val Loss: 0.0000728 +2025-03-24 17:35:18,378 Epoch 635/2000 +2025-03-24 17:39:58,801 Current Learning Rate: 0.0000736799 +2025-03-24 17:39:58,802 Train Loss: 0.0000780, Val Loss: 0.0000728 +2025-03-24 17:39:58,802 Epoch 636/2000 +2025-03-24 17:44:39,285 Current Learning Rate: 0.0000778360 +2025-03-24 17:44:39,285 Train Loss: 0.0000701, Val Loss: 0.0000727 +2025-03-24 17:44:39,285 Epoch 637/2000 +2025-03-24 17:49:19,812 Current Learning Rate: 0.0000820963 +2025-03-24 17:49:19,812 Train Loss: 0.0000674, Val Loss: 0.0000724 +2025-03-24 17:49:19,812 Epoch 638/2000 +2025-03-24 17:54:00,328 Current Learning Rate: 0.0000864597 +2025-03-24 17:54:00,328 Train Loss: 0.0000753, Val Loss: 0.0000729 +2025-03-24 17:54:00,328 Epoch 639/2000 +2025-03-24 17:58:40,732 Current Learning Rate: 0.0000909251 +2025-03-24 17:58:40,733 Train Loss: 0.0000781, Val Loss: 0.0000728 +2025-03-24 17:58:40,733 Epoch 640/2000 +2025-03-24 18:03:20,833 Current Learning Rate: 0.0000954915 +2025-03-24 18:03:20,834 Train Loss: 0.0000663, Val Loss: 0.0000726 +2025-03-24 18:03:20,834 Epoch 641/2000 +2025-03-24 18:08:01,150 Current Learning Rate: 0.0001001577 +2025-03-24 18:08:01,151 Train Loss: 0.0000671, Val Loss: 0.0000734 +2025-03-24 18:08:01,151 Epoch 642/2000 +2025-03-24 18:12:42,056 Current Learning Rate: 0.0001049225 +2025-03-24 18:12:42,057 Train Loss: 0.0000766, Val Loss: 0.0000740 +2025-03-24 18:12:42,057 Epoch 643/2000 +2025-03-24 18:17:22,629 Current Learning Rate: 0.0001097848 +2025-03-24 18:17:22,630 Train Loss: 0.0000675, Val Loss: 0.0000727 +2025-03-24 18:17:22,630 Epoch 644/2000 +2025-03-24 18:22:03,454 Current Learning Rate: 0.0001147434 +2025-03-24 18:22:03,454 Train Loss: 0.0000723, Val Loss: 0.0000737 +2025-03-24 18:22:03,455 Epoch 645/2000 +2025-03-24 18:26:44,593 Current Learning Rate: 0.0001197970 +2025-03-24 18:26:44,594 Train Loss: 0.0000645, Val Loss: 0.0000735 +2025-03-24 18:26:44,594 Epoch 646/2000 +2025-03-24 18:31:24,784 Current Learning Rate: 0.0001249445 +2025-03-24 18:31:24,785 Train Loss: 0.0000615, Val Loss: 0.0000730 +2025-03-24 18:31:24,785 Epoch 647/2000 +2025-03-24 18:36:06,387 Current Learning Rate: 0.0001301845 +2025-03-24 18:36:06,388 Train Loss: 0.0000670, Val Loss: 0.0000733 +2025-03-24 18:36:06,388 Epoch 648/2000 +2025-03-24 18:40:47,011 Current Learning Rate: 0.0001355157 +2025-03-24 18:40:47,011 Train Loss: 0.0000671, Val Loss: 0.0000737 +2025-03-24 18:40:47,011 Epoch 649/2000 +2025-03-24 18:45:27,454 Current Learning Rate: 0.0001409369 +2025-03-24 18:45:27,454 Train Loss: 0.0000719, Val Loss: 0.0000728 +2025-03-24 18:45:27,455 Epoch 650/2000 +2025-03-24 18:50:08,262 Current Learning Rate: 0.0001464466 +2025-03-24 18:50:08,262 Train Loss: 0.0000844, Val Loss: 0.0000728 +2025-03-24 18:50:08,262 Epoch 651/2000 +2025-03-24 18:54:49,011 Current Learning Rate: 0.0001520436 +2025-03-24 18:54:49,011 Train Loss: 0.0000629, Val Loss: 0.0000749 +2025-03-24 18:54:49,011 Epoch 652/2000 +2025-03-24 18:59:29,183 Current Learning Rate: 0.0001577264 +2025-03-24 18:59:29,184 Train Loss: 0.0000772, Val Loss: 0.0000738 +2025-03-24 18:59:29,184 Epoch 653/2000 +2025-03-24 19:04:09,730 Current Learning Rate: 0.0001634937 +2025-03-24 19:04:09,730 Train Loss: 0.0000779, Val Loss: 0.0000745 +2025-03-24 19:04:09,731 Epoch 654/2000 +2025-03-24 19:08:50,664 Current Learning Rate: 0.0001693441 +2025-03-24 19:08:50,665 Train Loss: 0.0000723, Val Loss: 0.0000756 +2025-03-24 19:08:50,665 Epoch 655/2000 +2025-03-24 19:13:31,336 Current Learning Rate: 0.0001752760 +2025-03-24 19:13:31,336 Train Loss: 0.0000731, Val Loss: 0.0000735 +2025-03-24 19:13:31,336 Epoch 656/2000 +2025-03-24 19:18:11,950 Current Learning Rate: 0.0001812880 +2025-03-24 19:18:11,950 Train Loss: 0.0000689, Val Loss: 0.0000736 +2025-03-24 19:18:11,951 Epoch 657/2000 +2025-03-24 19:22:52,161 Current Learning Rate: 0.0001873787 +2025-03-24 19:22:52,161 Train Loss: 0.0000672, Val Loss: 0.0000733 +2025-03-24 19:22:52,162 Epoch 658/2000 +2025-03-24 19:27:32,779 Current Learning Rate: 0.0001935465 +2025-03-24 19:27:32,780 Train Loss: 0.0000684, Val Loss: 0.0000730 +2025-03-24 19:27:32,780 Epoch 659/2000 +2025-03-24 19:32:13,307 Current Learning Rate: 0.0001997899 +2025-03-24 19:32:13,308 Train Loss: 0.0000645, Val Loss: 0.0000759 +2025-03-24 19:32:13,308 Epoch 660/2000 +2025-03-24 19:36:53,923 Current Learning Rate: 0.0002061074 +2025-03-24 19:36:53,925 Train Loss: 0.0000802, Val Loss: 0.0000739 +2025-03-24 19:36:53,925 Epoch 661/2000 +2025-03-24 19:41:34,266 Current Learning Rate: 0.0002124974 +2025-03-24 19:41:34,266 Train Loss: 0.0000785, Val Loss: 0.0000734 +2025-03-24 19:41:34,266 Epoch 662/2000 +2025-03-24 19:46:15,026 Current Learning Rate: 0.0002189583 +2025-03-24 19:46:15,027 Train Loss: 0.0000860, Val Loss: 0.0000769 +2025-03-24 19:46:15,027 Epoch 663/2000 +2025-03-24 19:50:55,604 Current Learning Rate: 0.0002254886 +2025-03-24 19:50:55,604 Train Loss: 0.0000735, Val Loss: 0.0000751 +2025-03-24 19:50:55,605 Epoch 664/2000 +2025-03-24 19:55:35,757 Current Learning Rate: 0.0002320866 +2025-03-24 19:55:35,758 Train Loss: 0.0000773, Val Loss: 0.0000735 +2025-03-24 19:55:35,758 Epoch 665/2000 +2025-03-24 20:00:16,646 Current Learning Rate: 0.0002387507 +2025-03-24 20:00:16,646 Train Loss: 0.0000812, Val Loss: 0.0000749 +2025-03-24 20:00:16,647 Epoch 666/2000 +2025-03-24 20:04:56,995 Current Learning Rate: 0.0002454793 +2025-03-24 20:04:56,995 Train Loss: 0.0000820, Val Loss: 0.0000767 +2025-03-24 20:04:56,995 Epoch 667/2000 +2025-03-24 20:09:37,364 Current Learning Rate: 0.0002522707 +2025-03-24 20:09:37,364 Train Loss: 0.0000890, Val Loss: 0.0000761 +2025-03-24 20:09:37,364 Epoch 668/2000 +2025-03-24 20:14:18,386 Current Learning Rate: 0.0002591232 +2025-03-24 20:14:18,386 Train Loss: 0.0000752, Val Loss: 0.0000744 +2025-03-24 20:14:18,387 Epoch 669/2000 +2025-03-24 20:18:59,808 Current Learning Rate: 0.0002660351 +2025-03-24 20:18:59,809 Train Loss: 0.0000726, Val Loss: 0.0000773 +2025-03-24 20:18:59,809 Epoch 670/2000 +2025-03-24 20:23:40,594 Current Learning Rate: 0.0002730048 +2025-03-24 20:23:40,597 Train Loss: 0.0000620, Val Loss: 0.0000737 +2025-03-24 20:23:40,597 Epoch 671/2000 +2025-03-24 20:28:21,982 Current Learning Rate: 0.0002800304 +2025-03-24 20:28:21,982 Train Loss: 0.0000834, Val Loss: 0.0000806 +2025-03-24 20:28:21,983 Epoch 672/2000 +2025-03-24 20:33:02,444 Current Learning Rate: 0.0002871104 +2025-03-24 20:33:02,445 Train Loss: 0.0000766, Val Loss: 0.0000743 +2025-03-24 20:33:02,445 Epoch 673/2000 +2025-03-24 20:37:42,767 Current Learning Rate: 0.0002942428 +2025-03-24 20:37:42,768 Train Loss: 0.0000684, Val Loss: 0.0000738 +2025-03-24 20:37:42,768 Epoch 674/2000 +2025-03-24 20:42:23,337 Current Learning Rate: 0.0003014261 +2025-03-24 20:42:23,338 Train Loss: 0.0000763, Val Loss: 0.0000748 +2025-03-24 20:42:23,338 Epoch 675/2000 +2025-03-24 20:47:03,869 Current Learning Rate: 0.0003086583 +2025-03-24 20:47:03,869 Train Loss: 0.0000690, Val Loss: 0.0000781 +2025-03-24 20:47:03,870 Epoch 676/2000 +2025-03-24 20:51:43,546 Current Learning Rate: 0.0003159377 +2025-03-24 20:51:43,546 Train Loss: 0.0000777, Val Loss: 0.0000934 +2025-03-24 20:51:43,546 Epoch 677/2000 +2025-03-24 20:56:23,532 Current Learning Rate: 0.0003232626 +2025-03-24 20:56:23,533 Train Loss: 0.0000627, Val Loss: 0.0000747 +2025-03-24 20:56:23,533 Epoch 678/2000 +2025-03-24 21:01:03,944 Current Learning Rate: 0.0003306310 +2025-03-24 21:01:03,945 Train Loss: 0.0000825, Val Loss: 0.0000770 +2025-03-24 21:01:03,945 Epoch 679/2000 +2025-03-24 21:05:44,768 Current Learning Rate: 0.0003380413 +2025-03-24 21:05:44,768 Train Loss: 0.0000772, Val Loss: 0.0000777 +2025-03-24 21:05:44,768 Epoch 680/2000 +2025-03-24 21:10:24,959 Current Learning Rate: 0.0003454915 +2025-03-24 21:10:24,960 Train Loss: 0.0000752, Val Loss: 0.0000761 +2025-03-24 21:10:24,960 Epoch 681/2000 +2025-03-24 21:15:05,297 Current Learning Rate: 0.0003529798 +2025-03-24 21:15:05,298 Train Loss: 0.0000819, Val Loss: 0.0000778 +2025-03-24 21:15:05,298 Epoch 682/2000 +2025-03-24 21:19:45,469 Current Learning Rate: 0.0003605044 +2025-03-24 21:19:45,470 Train Loss: 0.0000763, Val Loss: 0.0000764 +2025-03-24 21:19:45,470 Epoch 683/2000 +2025-03-24 21:24:25,428 Current Learning Rate: 0.0003680635 +2025-03-24 21:24:25,428 Train Loss: 0.0000779, Val Loss: 0.0000783 +2025-03-24 21:24:25,428 Epoch 684/2000 +2025-03-24 21:29:05,184 Current Learning Rate: 0.0003756551 +2025-03-24 21:29:05,185 Train Loss: 0.0000734, Val Loss: 0.0000774 +2025-03-24 21:29:05,185 Epoch 685/2000 +2025-03-24 21:33:45,200 Current Learning Rate: 0.0003832773 +2025-03-24 21:33:45,201 Train Loss: 0.0000796, Val Loss: 0.0000779 +2025-03-24 21:33:45,201 Epoch 686/2000 +2025-03-24 21:38:25,253 Current Learning Rate: 0.0003909284 +2025-03-24 21:38:25,254 Train Loss: 0.0000671, Val Loss: 0.0000744 +2025-03-24 21:38:25,254 Epoch 687/2000 +2025-03-24 21:43:05,741 Current Learning Rate: 0.0003986064 +2025-03-24 21:43:05,742 Train Loss: 0.0000849, Val Loss: 0.0000775 +2025-03-24 21:43:05,742 Epoch 688/2000 +2025-03-24 21:47:45,730 Current Learning Rate: 0.0004063093 +2025-03-24 21:47:45,730 Train Loss: 0.0000947, Val Loss: 0.0000798 +2025-03-24 21:47:45,730 Epoch 689/2000 +2025-03-24 21:52:25,212 Current Learning Rate: 0.0004140354 +2025-03-24 21:52:25,212 Train Loss: 0.0000764, Val Loss: 0.0000777 +2025-03-24 21:52:25,213 Epoch 690/2000 +2025-03-24 21:57:05,222 Current Learning Rate: 0.0004217828 +2025-03-24 21:57:05,223 Train Loss: 0.0000973, Val Loss: 0.0000808 +2025-03-24 21:57:05,223 Epoch 691/2000 +2025-03-24 22:01:45,331 Current Learning Rate: 0.0004295494 +2025-03-24 22:01:45,331 Train Loss: 0.0000823, Val Loss: 0.0000848 +2025-03-24 22:01:45,331 Epoch 692/2000 +2025-03-24 22:06:25,307 Current Learning Rate: 0.0004373334 +2025-03-24 22:06:25,307 Train Loss: 0.0000749, Val Loss: 0.0000763 +2025-03-24 22:06:25,308 Epoch 693/2000 +2025-03-24 22:11:04,866 Current Learning Rate: 0.0004451328 +2025-03-24 22:11:04,866 Train Loss: 0.0000763, Val Loss: 0.0000788 +2025-03-24 22:11:04,866 Epoch 694/2000 +2025-03-24 22:15:44,733 Current Learning Rate: 0.0004529458 +2025-03-24 22:15:44,733 Train Loss: 0.0000803, Val Loss: 0.0000808 +2025-03-24 22:15:44,733 Epoch 695/2000 +2025-03-24 22:20:24,220 Current Learning Rate: 0.0004607705 +2025-03-24 22:20:24,220 Train Loss: 0.0000764, Val Loss: 0.0000755 +2025-03-24 22:20:24,221 Epoch 696/2000 +2025-03-24 22:25:03,629 Current Learning Rate: 0.0004686047 +2025-03-24 22:25:03,629 Train Loss: 0.0000795, Val Loss: 0.0000780 +2025-03-24 22:25:03,629 Epoch 697/2000 +2025-03-24 22:29:43,338 Current Learning Rate: 0.0004764468 +2025-03-24 22:29:43,339 Train Loss: 0.0000887, Val Loss: 0.0000857 +2025-03-24 22:29:43,339 Epoch 698/2000 +2025-03-24 22:34:23,462 Current Learning Rate: 0.0004842946 +2025-03-24 22:34:23,462 Train Loss: 0.0000997, Val Loss: 0.0000810 +2025-03-24 22:34:23,462 Epoch 699/2000 +2025-03-24 22:39:03,284 Current Learning Rate: 0.0004921463 +2025-03-24 22:39:03,285 Train Loss: 0.0000972, Val Loss: 0.0000866 +2025-03-24 22:39:03,285 Epoch 700/2000 +2025-03-24 22:43:43,235 Current Learning Rate: 0.0005000000 +2025-03-24 22:43:43,235 Train Loss: 0.0000786, Val Loss: 0.0000786 +2025-03-24 22:43:43,236 Epoch 701/2000 +2025-03-24 22:48:23,006 Current Learning Rate: 0.0005078537 +2025-03-24 22:48:23,006 Train Loss: 0.0000822, Val Loss: 0.0000810 +2025-03-24 22:48:23,007 Epoch 702/2000 +2025-03-24 22:53:03,026 Current Learning Rate: 0.0005157054 +2025-03-24 22:53:03,027 Train Loss: 0.0000821, Val Loss: 0.0000830 +2025-03-24 22:53:03,027 Epoch 703/2000 +2025-03-24 22:57:43,101 Current Learning Rate: 0.0005235532 +2025-03-24 22:57:43,101 Train Loss: 0.0001004, Val Loss: 0.0000818 +2025-03-24 22:57:43,101 Epoch 704/2000 +2025-03-24 23:02:23,010 Current Learning Rate: 0.0005313953 +2025-03-24 23:02:23,011 Train Loss: 0.0000817, Val Loss: 0.0000855 +2025-03-24 23:02:23,011 Epoch 705/2000 +2025-03-24 23:07:02,736 Current Learning Rate: 0.0005392295 +2025-03-24 23:07:02,737 Train Loss: 0.0000749, Val Loss: 0.0000779 +2025-03-24 23:07:02,737 Epoch 706/2000 +2025-03-24 23:11:42,706 Current Learning Rate: 0.0005470542 +2025-03-24 23:11:42,706 Train Loss: 0.0001092, Val Loss: 0.0000833 +2025-03-24 23:11:42,707 Epoch 707/2000 +2025-03-24 23:16:22,416 Current Learning Rate: 0.0005548672 +2025-03-24 23:16:22,416 Train Loss: 0.0000957, Val Loss: 0.0000888 +2025-03-24 23:16:22,416 Epoch 708/2000 +2025-03-24 23:21:02,251 Current Learning Rate: 0.0005626666 +2025-03-24 23:21:02,251 Train Loss: 0.0000843, Val Loss: 0.0000791 +2025-03-24 23:21:02,251 Epoch 709/2000 +2025-03-24 23:25:42,782 Current Learning Rate: 0.0005704506 +2025-03-24 23:25:42,783 Train Loss: 0.0000996, Val Loss: 0.0000803 +2025-03-24 23:25:42,783 Epoch 710/2000 +2025-03-24 23:30:23,591 Current Learning Rate: 0.0005782172 +2025-03-24 23:30:23,592 Train Loss: 0.0001021, Val Loss: 0.0000875 +2025-03-24 23:30:23,592 Epoch 711/2000 +2025-03-24 23:35:03,753 Current Learning Rate: 0.0005859646 +2025-03-24 23:35:03,754 Train Loss: 0.0000852, Val Loss: 0.0000763 +2025-03-24 23:35:03,754 Epoch 712/2000 +2025-03-24 23:39:43,404 Current Learning Rate: 0.0005936907 +2025-03-24 23:39:43,405 Train Loss: 0.0000876, Val Loss: 0.0000926 +2025-03-24 23:39:43,405 Epoch 713/2000 +2025-03-24 23:44:23,422 Current Learning Rate: 0.0006013936 +2025-03-24 23:44:23,423 Train Loss: 0.0000848, Val Loss: 0.0000875 +2025-03-24 23:44:23,423 Epoch 714/2000 +2025-03-24 23:49:04,002 Current Learning Rate: 0.0006090716 +2025-03-24 23:49:04,003 Train Loss: 0.0000731, Val Loss: 0.0000791 +2025-03-24 23:49:04,003 Epoch 715/2000 +2025-03-24 23:53:44,525 Current Learning Rate: 0.0006167227 +2025-03-24 23:53:44,525 Train Loss: 0.0000922, Val Loss: 0.0000799 +2025-03-24 23:53:44,525 Epoch 716/2000 +2025-03-24 23:58:25,099 Current Learning Rate: 0.0006243449 +2025-03-24 23:58:25,100 Train Loss: 0.0000838, Val Loss: 0.0000829 +2025-03-24 23:58:25,100 Epoch 717/2000 +2025-03-25 00:03:05,509 Current Learning Rate: 0.0006319365 +2025-03-25 00:03:05,510 Train Loss: 0.0000822, Val Loss: 0.0000797 +2025-03-25 00:03:05,510 Epoch 718/2000 +2025-03-25 00:07:46,304 Current Learning Rate: 0.0006394956 +2025-03-25 00:07:46,305 Train Loss: 0.0000991, Val Loss: 0.0001035 +2025-03-25 00:07:46,305 Epoch 719/2000 +2025-03-25 00:12:27,177 Current Learning Rate: 0.0006470202 +2025-03-25 00:12:27,178 Train Loss: 0.0000958, Val Loss: 0.0000833 +2025-03-25 00:12:27,178 Epoch 720/2000 +2025-03-25 00:17:07,422 Current Learning Rate: 0.0006545085 +2025-03-25 00:17:07,422 Train Loss: 0.0001105, Val Loss: 0.0001170 +2025-03-25 00:17:07,422 Epoch 721/2000 +2025-03-25 00:21:47,295 Current Learning Rate: 0.0006619587 +2025-03-25 00:21:47,295 Train Loss: 0.0001190, Val Loss: 0.0000828 +2025-03-25 00:21:47,295 Epoch 722/2000 +2025-03-25 00:26:27,739 Current Learning Rate: 0.0006693690 +2025-03-25 00:26:27,740 Train Loss: 0.0000831, Val Loss: 0.0000828 +2025-03-25 00:26:27,740 Epoch 723/2000 +2025-03-25 00:31:07,660 Current Learning Rate: 0.0006767374 +2025-03-25 00:31:07,661 Train Loss: 0.0000855, Val Loss: 0.0000992 +2025-03-25 00:31:07,661 Epoch 724/2000 +2025-03-25 00:35:47,530 Current Learning Rate: 0.0006840623 +2025-03-25 00:35:47,531 Train Loss: 0.0000923, Val Loss: 0.0000868 +2025-03-25 00:35:47,531 Epoch 725/2000 +2025-03-25 00:40:28,335 Current Learning Rate: 0.0006913417 +2025-03-25 00:40:28,335 Train Loss: 0.0001025, Val Loss: 0.0000806 +2025-03-25 00:40:28,336 Epoch 726/2000 +2025-03-25 00:45:08,685 Current Learning Rate: 0.0006985739 +2025-03-25 00:45:08,686 Train Loss: 0.0000933, Val Loss: 0.0000881 +2025-03-25 00:45:08,686 Epoch 727/2000 +2025-03-25 00:49:48,975 Current Learning Rate: 0.0007057572 +2025-03-25 00:49:48,976 Train Loss: 0.0000891, Val Loss: 0.0000957 +2025-03-25 00:49:48,976 Epoch 728/2000 +2025-03-25 00:54:29,892 Current Learning Rate: 0.0007128896 +2025-03-25 00:54:29,893 Train Loss: 0.0001061, Val Loss: 0.0000964 +2025-03-25 00:54:29,894 Epoch 729/2000 +2025-03-25 00:59:10,769 Current Learning Rate: 0.0007199696 +2025-03-25 00:59:10,770 Train Loss: 0.0000917, Val Loss: 0.0000854 +2025-03-25 00:59:10,770 Epoch 730/2000 +2025-03-25 01:03:50,579 Current Learning Rate: 0.0007269952 +2025-03-25 01:03:50,579 Train Loss: 0.0000818, Val Loss: 0.0000822 +2025-03-25 01:03:50,580 Epoch 731/2000 +2025-03-25 01:08:30,691 Current Learning Rate: 0.0007339649 +2025-03-25 01:08:30,692 Train Loss: 0.0000899, Val Loss: 0.0000922 +2025-03-25 01:08:30,692 Epoch 732/2000 +2025-03-25 01:13:11,876 Current Learning Rate: 0.0007408768 +2025-03-25 01:13:11,877 Train Loss: 0.0001151, Val Loss: 0.0000948 +2025-03-25 01:13:11,877 Epoch 733/2000 +2025-03-25 01:17:52,599 Current Learning Rate: 0.0007477293 +2025-03-25 01:17:52,599 Train Loss: 0.0000984, Val Loss: 0.0000820 +2025-03-25 01:17:52,600 Epoch 734/2000 +2025-03-25 01:22:33,626 Current Learning Rate: 0.0007545207 +2025-03-25 01:22:33,627 Train Loss: 0.0000891, Val Loss: 0.0001074 +2025-03-25 01:22:33,627 Epoch 735/2000 +2025-03-25 01:27:14,164 Current Learning Rate: 0.0007612493 +2025-03-25 01:27:14,165 Train Loss: 0.0000975, Val Loss: 0.0000836 +2025-03-25 01:27:14,165 Epoch 736/2000 +2025-03-25 01:31:54,250 Current Learning Rate: 0.0007679134 +2025-03-25 01:31:54,251 Train Loss: 0.0000876, Val Loss: 0.0001134 +2025-03-25 01:31:54,251 Epoch 737/2000 +2025-03-25 01:36:35,028 Current Learning Rate: 0.0007745114 +2025-03-25 01:36:35,028 Train Loss: 0.0000972, Val Loss: 0.0000994 +2025-03-25 01:36:35,029 Epoch 738/2000 +2025-03-25 01:41:15,501 Current Learning Rate: 0.0007810417 +2025-03-25 01:41:15,502 Train Loss: 0.0001118, Val Loss: 0.0000860 +2025-03-25 01:41:15,502 Epoch 739/2000 +2025-03-25 01:45:55,888 Current Learning Rate: 0.0007875026 +2025-03-25 01:45:55,889 Train Loss: 0.0000872, Val Loss: 0.0001099 +2025-03-25 01:45:55,889 Epoch 740/2000 +2025-03-25 01:50:36,035 Current Learning Rate: 0.0007938926 +2025-03-25 01:50:36,035 Train Loss: 0.0000954, Val Loss: 0.0000870 +2025-03-25 01:50:36,035 Epoch 741/2000 +2025-03-25 01:55:16,835 Current Learning Rate: 0.0008002101 +2025-03-25 01:55:16,835 Train Loss: 0.0001112, Val Loss: 0.0000851 +2025-03-25 01:55:16,836 Epoch 742/2000 +2025-03-25 01:59:57,645 Current Learning Rate: 0.0008064535 +2025-03-25 01:59:57,646 Train Loss: 0.0001026, Val Loss: 0.0000882 +2025-03-25 01:59:57,646 Epoch 743/2000 +2025-03-25 02:04:37,465 Current Learning Rate: 0.0008126213 +2025-03-25 02:04:37,469 Train Loss: 0.0001337, Val Loss: 0.0000996 +2025-03-25 02:04:37,469 Epoch 744/2000 +2025-03-25 02:09:18,321 Current Learning Rate: 0.0008187120 +2025-03-25 02:09:18,321 Train Loss: 0.0001018, Val Loss: 0.0000896 +2025-03-25 02:09:18,321 Epoch 745/2000 +2025-03-25 02:13:58,781 Current Learning Rate: 0.0008247240 +2025-03-25 02:13:58,782 Train Loss: 0.0000975, Val Loss: 0.0001011 +2025-03-25 02:13:58,782 Epoch 746/2000 +2025-03-25 02:18:38,989 Current Learning Rate: 0.0008306559 +2025-03-25 02:18:38,989 Train Loss: 0.0001248, Val Loss: 0.0001083 +2025-03-25 02:18:38,989 Epoch 747/2000 +2025-03-25 02:23:19,483 Current Learning Rate: 0.0008365063 +2025-03-25 02:23:19,484 Train Loss: 0.0000911, Val Loss: 0.0001147 +2025-03-25 02:23:19,484 Epoch 748/2000 +2025-03-25 02:27:59,596 Current Learning Rate: 0.0008422736 +2025-03-25 02:27:59,597 Train Loss: 0.0001370, Val Loss: 0.0001123 +2025-03-25 02:27:59,597 Epoch 749/2000 +2025-03-25 02:32:40,353 Current Learning Rate: 0.0008479564 +2025-03-25 02:32:40,354 Train Loss: 0.0001296, Val Loss: 0.0000930 +2025-03-25 02:32:40,354 Epoch 750/2000 +2025-03-25 02:37:20,983 Current Learning Rate: 0.0008535534 +2025-03-25 02:37:20,984 Train Loss: 0.0001136, Val Loss: 0.0001212 +2025-03-25 02:37:20,984 Epoch 751/2000 +2025-03-25 02:42:01,895 Current Learning Rate: 0.0008590631 +2025-03-25 02:42:01,895 Train Loss: 0.0000931, Val Loss: 0.0000963 +2025-03-25 02:42:01,896 Epoch 752/2000 +2025-03-25 02:46:42,220 Current Learning Rate: 0.0008644843 +2025-03-25 02:46:42,220 Train Loss: 0.0001085, Val Loss: 0.0000927 +2025-03-25 02:46:42,221 Epoch 753/2000 +2025-03-25 02:51:22,161 Current Learning Rate: 0.0008698155 +2025-03-25 02:51:22,162 Train Loss: 0.0001086, Val Loss: 0.0000926 +2025-03-25 02:51:22,162 Epoch 754/2000 +2025-03-25 02:56:02,284 Current Learning Rate: 0.0008750555 +2025-03-25 02:56:02,285 Train Loss: 0.0000932, Val Loss: 0.0001697 +2025-03-25 02:56:02,285 Epoch 755/2000 +2025-03-25 03:00:43,438 Current Learning Rate: 0.0008802030 +2025-03-25 03:00:43,439 Train Loss: 0.0001251, Val Loss: 0.0000937 +2025-03-25 03:00:43,439 Epoch 756/2000 +2025-03-25 03:05:24,576 Current Learning Rate: 0.0008852566 +2025-03-25 03:05:24,577 Train Loss: 0.0001044, Val Loss: 0.0000914 +2025-03-25 03:05:24,577 Epoch 757/2000 +2025-03-25 03:10:05,179 Current Learning Rate: 0.0008902152 +2025-03-25 03:10:05,179 Train Loss: 0.0000988, Val Loss: 0.0001084 +2025-03-25 03:10:05,179 Epoch 758/2000 +2025-03-25 03:14:45,432 Current Learning Rate: 0.0008950775 +2025-03-25 03:14:45,432 Train Loss: 0.0001115, Val Loss: 0.0000958 +2025-03-25 03:14:45,433 Epoch 759/2000 +2025-03-25 03:19:26,237 Current Learning Rate: 0.0008998423 +2025-03-25 03:19:26,238 Train Loss: 0.0001047, Val Loss: 0.0001090 +2025-03-25 03:19:26,238 Epoch 760/2000 +2025-03-25 03:24:06,902 Current Learning Rate: 0.0009045085 +2025-03-25 03:24:06,903 Train Loss: 0.0000906, Val Loss: 0.0000866 +2025-03-25 03:24:06,903 Epoch 761/2000 +2025-03-25 03:28:47,595 Current Learning Rate: 0.0009090749 +2025-03-25 03:28:47,595 Train Loss: 0.0001100, Val Loss: 0.0001407 +2025-03-25 03:28:47,596 Epoch 762/2000 +2025-03-25 03:33:27,962 Current Learning Rate: 0.0009135403 +2025-03-25 03:33:27,962 Train Loss: 0.0000998, Val Loss: 0.0000882 +2025-03-25 03:33:27,963 Epoch 763/2000 +2025-03-25 03:38:08,018 Current Learning Rate: 0.0009179037 +2025-03-25 03:38:08,019 Train Loss: 0.0001074, Val Loss: 0.0001004 +2025-03-25 03:38:08,019 Epoch 764/2000 +2025-03-25 03:42:48,814 Current Learning Rate: 0.0009221640 +2025-03-25 03:42:48,815 Train Loss: 0.0001055, Val Loss: 0.0000867 +2025-03-25 03:42:48,815 Epoch 765/2000 +2025-03-25 03:47:29,294 Current Learning Rate: 0.0009263201 +2025-03-25 03:47:29,295 Train Loss: 0.0000993, Val Loss: 0.0000878 +2025-03-25 03:47:29,295 Epoch 766/2000 +2025-03-25 03:52:09,964 Current Learning Rate: 0.0009303710 +2025-03-25 03:52:09,965 Train Loss: 0.0001908, Val Loss: 0.0001333 +2025-03-25 03:52:09,965 Epoch 767/2000 +2025-03-25 03:56:50,868 Current Learning Rate: 0.0009343158 +2025-03-25 03:56:50,868 Train Loss: 0.0001243, Val Loss: 0.0000904 +2025-03-25 03:56:50,868 Epoch 768/2000 +2025-03-25 04:01:31,068 Current Learning Rate: 0.0009381533 +2025-03-25 04:01:31,069 Train Loss: 0.0000888, Val Loss: 0.0000843 +2025-03-25 04:01:31,069 Epoch 769/2000 +2025-03-25 04:06:12,019 Current Learning Rate: 0.0009418828 +2025-03-25 04:06:12,020 Train Loss: 0.0000795, Val Loss: 0.0000960 +2025-03-25 04:06:12,020 Epoch 770/2000 +2025-03-25 04:10:52,073 Current Learning Rate: 0.0009455033 +2025-03-25 04:10:52,074 Train Loss: 0.0001207, Val Loss: 0.0000900 +2025-03-25 04:10:52,074 Epoch 771/2000 +2025-03-25 04:15:33,175 Current Learning Rate: 0.0009490138 +2025-03-25 04:15:33,176 Train Loss: 0.0001218, Val Loss: 0.0001014 +2025-03-25 04:15:33,176 Epoch 772/2000 +2025-03-25 04:20:13,789 Current Learning Rate: 0.0009524135 +2025-03-25 04:20:13,789 Train Loss: 0.0001041, Val Loss: 0.0001083 +2025-03-25 04:20:13,790 Epoch 773/2000 +2025-03-25 04:24:54,751 Current Learning Rate: 0.0009557016 +2025-03-25 04:24:54,751 Train Loss: 0.0001091, Val Loss: 0.0000913 +2025-03-25 04:24:54,751 Epoch 774/2000 +2025-03-25 04:29:34,932 Current Learning Rate: 0.0009588773 +2025-03-25 04:29:34,932 Train Loss: 0.0001231, Val Loss: 0.0000961 +2025-03-25 04:29:34,932 Epoch 775/2000 +2025-03-25 04:34:15,816 Current Learning Rate: 0.0009619398 +2025-03-25 04:34:15,816 Train Loss: 0.0001031, Val Loss: 0.0000948 +2025-03-25 04:34:15,816 Epoch 776/2000 +2025-03-25 04:38:55,948 Current Learning Rate: 0.0009648882 +2025-03-25 04:38:55,949 Train Loss: 0.0001090, Val Loss: 0.0000921 +2025-03-25 04:38:55,949 Epoch 777/2000 +2025-03-25 04:43:36,644 Current Learning Rate: 0.0009677220 +2025-03-25 04:43:36,644 Train Loss: 0.0001522, Val Loss: 0.0000913 +2025-03-25 04:43:36,645 Epoch 778/2000 +2025-03-25 04:48:17,705 Current Learning Rate: 0.0009704404 +2025-03-25 04:48:17,706 Train Loss: 0.0001079, Val Loss: 0.0000872 +2025-03-25 04:48:17,706 Epoch 779/2000 +2025-03-25 04:52:57,982 Current Learning Rate: 0.0009730427 +2025-03-25 04:52:57,982 Train Loss: 0.0001025, Val Loss: 0.0000960 +2025-03-25 04:52:57,982 Epoch 780/2000 +2025-03-25 04:57:38,066 Current Learning Rate: 0.0009755283 +2025-03-25 04:57:38,067 Train Loss: 0.0001190, Val Loss: 0.0001139 +2025-03-25 04:57:38,067 Epoch 781/2000 +2025-03-25 05:02:18,882 Current Learning Rate: 0.0009778965 +2025-03-25 05:02:18,882 Train Loss: 0.0000946, Val Loss: 0.0000972 +2025-03-25 05:02:18,882 Epoch 782/2000 +2025-03-25 05:06:59,078 Current Learning Rate: 0.0009801468 +2025-03-25 05:06:59,078 Train Loss: 0.0001142, Val Loss: 0.0001131 +2025-03-25 05:06:59,078 Epoch 783/2000 +2025-03-25 05:11:38,855 Current Learning Rate: 0.0009822787 +2025-03-25 05:11:38,856 Train Loss: 0.0000918, Val Loss: 0.0001004 +2025-03-25 05:11:38,856 Epoch 784/2000 +2025-03-25 05:16:19,751 Current Learning Rate: 0.0009842916 +2025-03-25 05:16:19,751 Train Loss: 0.0000904, Val Loss: 0.0000889 +2025-03-25 05:16:19,752 Epoch 785/2000 +2025-03-25 05:20:59,597 Current Learning Rate: 0.0009861850 +2025-03-25 05:20:59,597 Train Loss: 0.0001185, Val Loss: 0.0000988 +2025-03-25 05:20:59,598 Epoch 786/2000 +2025-03-25 05:25:39,865 Current Learning Rate: 0.0009879584 +2025-03-25 05:25:39,866 Train Loss: 0.0001028, Val Loss: 0.0000920 +2025-03-25 05:25:39,866 Epoch 787/2000 +2025-03-25 05:30:20,058 Current Learning Rate: 0.0009896114 +2025-03-25 05:30:20,060 Train Loss: 0.0001160, Val Loss: 0.0000997 +2025-03-25 05:30:20,061 Epoch 788/2000 +2025-03-25 05:34:59,938 Current Learning Rate: 0.0009911436 +2025-03-25 05:34:59,938 Train Loss: 0.0001144, Val Loss: 0.0000957 +2025-03-25 05:34:59,939 Epoch 789/2000 +2025-03-25 05:39:41,029 Current Learning Rate: 0.0009925547 +2025-03-25 05:39:41,029 Train Loss: 0.0001068, Val Loss: 0.0001131 +2025-03-25 05:39:41,029 Epoch 790/2000 +2025-03-25 05:44:21,364 Current Learning Rate: 0.0009938442 +2025-03-25 05:44:21,365 Train Loss: 0.0001218, Val Loss: 0.0000939 +2025-03-25 05:44:21,365 Epoch 791/2000 +2025-03-25 05:49:01,732 Current Learning Rate: 0.0009950118 +2025-03-25 05:49:01,732 Train Loss: 0.0001055, Val Loss: 0.0000975 +2025-03-25 05:49:01,733 Epoch 792/2000 +2025-03-25 05:53:41,794 Current Learning Rate: 0.0009960574 +2025-03-25 05:53:41,794 Train Loss: 0.0001530, Val Loss: 0.0001445 +2025-03-25 05:53:41,795 Epoch 793/2000 +2025-03-25 05:58:22,903 Current Learning Rate: 0.0009969805 +2025-03-25 05:58:22,903 Train Loss: 0.0001047, Val Loss: 0.0000927 +2025-03-25 05:58:22,904 Epoch 794/2000 +2025-03-25 06:03:03,064 Current Learning Rate: 0.0009977810 +2025-03-25 06:03:03,065 Train Loss: 0.0000861, Val Loss: 0.0001107 +2025-03-25 06:03:03,065 Epoch 795/2000 +2025-03-25 06:07:43,313 Current Learning Rate: 0.0009984587 +2025-03-25 06:07:43,313 Train Loss: 0.0001003, Val Loss: 0.0000953 +2025-03-25 06:07:43,314 Epoch 796/2000 +2025-03-25 06:12:23,865 Current Learning Rate: 0.0009990134 +2025-03-25 06:12:23,866 Train Loss: 0.0001040, Val Loss: 0.0001217 +2025-03-25 06:12:23,866 Epoch 797/2000 +2025-03-25 06:17:04,106 Current Learning Rate: 0.0009994449 +2025-03-25 06:17:04,107 Train Loss: 0.0001150, Val Loss: 0.0000953 +2025-03-25 06:17:04,107 Epoch 798/2000 +2025-03-25 06:21:44,846 Current Learning Rate: 0.0009997533 +2025-03-25 06:21:44,846 Train Loss: 0.0001078, Val Loss: 0.0001056 +2025-03-25 06:21:44,847 Epoch 799/2000 +2025-03-25 06:26:25,713 Current Learning Rate: 0.0009999383 +2025-03-25 06:26:25,714 Train Loss: 0.0001123, Val Loss: 0.0001001 +2025-03-25 06:26:25,714 Epoch 800/2000 +2025-03-25 06:31:06,569 Current Learning Rate: 0.0010000000 +2025-03-25 06:31:06,569 Train Loss: 0.0000950, Val Loss: 0.0000938 +2025-03-25 06:31:06,570 Epoch 801/2000 +2025-03-25 06:35:47,581 Current Learning Rate: 0.0009999383 +2025-03-25 06:35:47,581 Train Loss: 0.0001382, Val Loss: 0.0000977 +2025-03-25 06:35:47,581 Epoch 802/2000 +2025-03-25 06:40:28,401 Current Learning Rate: 0.0009997533 +2025-03-25 06:40:28,401 Train Loss: 0.0001053, Val Loss: 0.0000946 +2025-03-25 06:40:28,401 Epoch 803/2000 +2025-03-25 06:45:08,902 Current Learning Rate: 0.0009994449 +2025-03-25 06:45:08,902 Train Loss: 0.0001059, Val Loss: 0.0001407 +2025-03-25 06:45:08,902 Epoch 804/2000 +2025-03-25 06:49:50,225 Current Learning Rate: 0.0009990134 +2025-03-25 06:49:50,226 Train Loss: 0.0001015, Val Loss: 0.0000961 +2025-03-25 06:49:50,226 Epoch 805/2000 +2025-03-25 06:54:30,773 Current Learning Rate: 0.0009984587 +2025-03-25 06:54:30,773 Train Loss: 0.0001095, Val Loss: 0.0000962 +2025-03-25 06:54:30,774 Epoch 806/2000 +2025-03-25 06:59:11,278 Current Learning Rate: 0.0009977810 +2025-03-25 06:59:11,278 Train Loss: 0.0000863, Val Loss: 0.0000857 +2025-03-25 06:59:11,279 Epoch 807/2000 +2025-03-25 07:03:51,469 Current Learning Rate: 0.0009969805 +2025-03-25 07:03:51,469 Train Loss: 0.0001099, Val Loss: 0.0001040 +2025-03-25 07:03:51,469 Epoch 808/2000 +2025-03-25 07:08:32,307 Current Learning Rate: 0.0009960574 +2025-03-25 07:08:32,307 Train Loss: 0.0001063, Val Loss: 0.0001006 +2025-03-25 07:08:32,308 Epoch 809/2000 +2025-03-25 07:13:12,692 Current Learning Rate: 0.0009950118 +2025-03-25 07:13:12,693 Train Loss: 0.0001080, Val Loss: 0.0000972 +2025-03-25 07:13:12,693 Epoch 810/2000 +2025-03-25 07:17:52,979 Current Learning Rate: 0.0009938442 +2025-03-25 07:17:52,979 Train Loss: 0.0001191, Val Loss: 0.0000935 +2025-03-25 07:17:52,980 Epoch 811/2000 +2025-03-25 07:22:32,695 Current Learning Rate: 0.0009925547 +2025-03-25 07:22:32,696 Train Loss: 0.0001023, Val Loss: 0.0001972 +2025-03-25 07:22:32,696 Epoch 812/2000 +2025-03-25 07:27:13,216 Current Learning Rate: 0.0009911436 +2025-03-25 07:27:13,217 Train Loss: 0.0001134, Val Loss: 0.0001060 +2025-03-25 07:27:13,217 Epoch 813/2000 +2025-03-25 07:31:52,820 Current Learning Rate: 0.0009896114 +2025-03-25 07:31:52,821 Train Loss: 0.0001001, Val Loss: 0.0001034 +2025-03-25 07:31:52,821 Epoch 814/2000 +2025-03-25 07:36:33,392 Current Learning Rate: 0.0009879584 +2025-03-25 07:36:33,392 Train Loss: 0.0000983, Val Loss: 0.0000958 +2025-03-25 07:36:33,393 Epoch 815/2000 +2025-03-25 07:41:13,908 Current Learning Rate: 0.0009861850 +2025-03-25 07:41:13,909 Train Loss: 0.0001105, Val Loss: 0.0000911 +2025-03-25 07:41:13,909 Epoch 816/2000 +2025-03-25 07:45:54,868 Current Learning Rate: 0.0009842916 +2025-03-25 07:45:54,868 Train Loss: 0.0000957, Val Loss: 0.0000946 +2025-03-25 07:45:54,868 Epoch 817/2000 +2025-03-25 07:50:35,607 Current Learning Rate: 0.0009822787 +2025-03-25 07:50:35,608 Train Loss: 0.0001059, Val Loss: 0.0001002 +2025-03-25 07:50:35,608 Epoch 818/2000 +2025-03-25 07:55:15,955 Current Learning Rate: 0.0009801468 +2025-03-25 07:55:15,955 Train Loss: 0.0000825, Val Loss: 0.0000850 +2025-03-25 07:55:15,956 Epoch 819/2000 +2025-03-25 07:59:56,765 Current Learning Rate: 0.0009778965 +2025-03-25 07:59:56,765 Train Loss: 0.0000902, Val Loss: 0.0001090 +2025-03-25 07:59:56,766 Epoch 820/2000 +2025-03-25 08:04:37,476 Current Learning Rate: 0.0009755283 +2025-03-25 08:04:37,476 Train Loss: 0.0001007, Val Loss: 0.0001135 +2025-03-25 08:04:37,477 Epoch 821/2000 +2025-03-25 08:09:17,943 Current Learning Rate: 0.0009730427 +2025-03-25 08:09:17,944 Train Loss: 0.0001103, Val Loss: 0.0000911 +2025-03-25 08:09:17,944 Epoch 822/2000 +2025-03-25 08:13:58,340 Current Learning Rate: 0.0009704404 +2025-03-25 08:13:58,341 Train Loss: 0.0000945, Val Loss: 0.0000945 +2025-03-25 08:13:58,341 Epoch 823/2000 +2025-03-25 08:18:38,897 Current Learning Rate: 0.0009677220 +2025-03-25 08:18:38,897 Train Loss: 0.0000993, Val Loss: 0.0000945 +2025-03-25 08:18:38,897 Epoch 824/2000 +2025-03-25 08:23:19,522 Current Learning Rate: 0.0009648882 +2025-03-25 08:23:19,523 Train Loss: 0.0000979, Val Loss: 0.0000902 +2025-03-25 08:23:19,523 Epoch 825/2000 +2025-03-25 08:28:00,248 Current Learning Rate: 0.0009619398 +2025-03-25 08:28:00,249 Train Loss: 0.0000997, Val Loss: 0.0000879 +2025-03-25 08:28:00,249 Epoch 826/2000 +2025-03-25 08:32:41,004 Current Learning Rate: 0.0009588773 +2025-03-25 08:32:41,005 Train Loss: 0.0000935, Val Loss: 0.0001040 +2025-03-25 08:32:41,005 Epoch 827/2000 +2025-03-25 08:37:21,907 Current Learning Rate: 0.0009557016 +2025-03-25 08:37:21,907 Train Loss: 0.0000937, Val Loss: 0.0000924 +2025-03-25 08:37:21,907 Epoch 828/2000 +2025-03-25 08:42:02,230 Current Learning Rate: 0.0009524135 +2025-03-25 08:42:02,230 Train Loss: 0.0001043, Val Loss: 0.0001084 +2025-03-25 08:42:02,231 Epoch 829/2000 +2025-03-25 08:46:42,685 Current Learning Rate: 0.0009490138 +2025-03-25 08:46:42,686 Train Loss: 0.0001082, Val Loss: 0.0000944 +2025-03-25 08:46:42,686 Epoch 830/2000 +2025-03-25 08:51:23,012 Current Learning Rate: 0.0009455033 +2025-03-25 08:51:23,013 Train Loss: 0.0000720, Val Loss: 0.0000901 +2025-03-25 08:51:23,013 Epoch 831/2000 +2025-03-25 08:56:04,089 Current Learning Rate: 0.0009418828 +2025-03-25 08:56:04,089 Train Loss: 0.0000981, Val Loss: 0.0000884 +2025-03-25 08:56:04,090 Epoch 832/2000 +2025-03-25 09:00:44,352 Current Learning Rate: 0.0009381533 +2025-03-25 09:00:44,352 Train Loss: 0.0000941, Val Loss: 0.0000948 +2025-03-25 09:00:44,353 Epoch 833/2000 +2025-03-25 09:05:24,433 Current Learning Rate: 0.0009343158 +2025-03-25 09:05:24,433 Train Loss: 0.0001076, Val Loss: 0.0001104 +2025-03-25 09:05:24,433 Epoch 834/2000 +2025-03-25 09:10:04,112 Current Learning Rate: 0.0009303710 +2025-03-25 09:10:04,112 Train Loss: 0.0001180, Val Loss: 0.0000888 +2025-03-25 09:10:04,113 Epoch 835/2000 +2025-03-25 09:14:44,292 Current Learning Rate: 0.0009263201 +2025-03-25 09:14:44,293 Train Loss: 0.0000908, Val Loss: 0.0000907 +2025-03-25 09:14:44,294 Epoch 836/2000 +2025-03-25 09:19:24,381 Current Learning Rate: 0.0009221640 +2025-03-25 09:19:24,381 Train Loss: 0.0000892, Val Loss: 0.0000961 +2025-03-25 09:19:24,382 Epoch 837/2000 +2025-03-25 09:24:04,890 Current Learning Rate: 0.0009179037 +2025-03-25 09:24:04,891 Train Loss: 0.0000709, Val Loss: 0.0000890 +2025-03-25 09:24:04,891 Epoch 838/2000 +2025-03-25 09:28:46,205 Current Learning Rate: 0.0009135403 +2025-03-25 09:28:46,205 Train Loss: 0.0000927, Val Loss: 0.0000923 +2025-03-25 09:28:46,206 Epoch 839/2000 +2025-03-25 09:33:26,281 Current Learning Rate: 0.0009090749 +2025-03-25 09:33:26,281 Train Loss: 0.0000925, Val Loss: 0.0000896 +2025-03-25 09:33:26,281 Epoch 840/2000 +2025-03-25 09:38:07,215 Current Learning Rate: 0.0009045085 +2025-03-25 09:38:07,216 Train Loss: 0.0000773, Val Loss: 0.0000859 +2025-03-25 09:38:07,216 Epoch 841/2000 +2025-03-25 09:42:47,618 Current Learning Rate: 0.0008998423 +2025-03-25 09:42:47,619 Train Loss: 0.0001085, Val Loss: 0.0001131 +2025-03-25 09:42:47,619 Epoch 842/2000 +2025-03-25 09:47:28,155 Current Learning Rate: 0.0008950775 +2025-03-25 09:47:28,156 Train Loss: 0.0001050, Val Loss: 0.0000940 +2025-03-25 09:47:28,156 Epoch 843/2000 +2025-03-25 09:52:08,134 Current Learning Rate: 0.0008902152 +2025-03-25 09:52:08,135 Train Loss: 0.0000810, Val Loss: 0.0000964 +2025-03-25 09:52:08,135 Epoch 844/2000 +2025-03-25 09:56:48,577 Current Learning Rate: 0.0008852566 +2025-03-25 09:56:48,578 Train Loss: 0.0000927, Val Loss: 0.0000973 +2025-03-25 09:56:48,578 Epoch 845/2000 +2025-03-25 10:01:29,354 Current Learning Rate: 0.0008802030 +2025-03-25 10:01:29,355 Train Loss: 0.0000779, Val Loss: 0.0000838 +2025-03-25 10:01:29,355 Epoch 846/2000 +2025-03-25 10:06:09,635 Current Learning Rate: 0.0008750555 +2025-03-25 10:06:09,635 Train Loss: 0.0000973, Val Loss: 0.0000913 +2025-03-25 10:06:09,636 Epoch 847/2000 +2025-03-25 10:10:49,275 Current Learning Rate: 0.0008698155 +2025-03-25 10:10:49,276 Train Loss: 0.0000889, Val Loss: 0.0000845 +2025-03-25 10:10:49,276 Epoch 848/2000 +2025-03-25 10:15:29,426 Current Learning Rate: 0.0008644843 +2025-03-25 10:15:29,427 Train Loss: 0.0000999, Val Loss: 0.0000843 +2025-03-25 10:15:29,427 Epoch 849/2000 +2025-03-25 10:20:09,813 Current Learning Rate: 0.0008590631 +2025-03-25 10:20:09,813 Train Loss: 0.0000942, Val Loss: 0.0000922 +2025-03-25 10:20:09,814 Epoch 850/2000 +2025-03-25 10:24:49,080 Current Learning Rate: 0.0008535534 +2025-03-25 10:24:49,080 Train Loss: 0.0001020, Val Loss: 0.0000856 +2025-03-25 10:24:49,080 Epoch 851/2000 +2025-03-25 10:29:29,766 Current Learning Rate: 0.0008479564 +2025-03-25 10:29:29,767 Train Loss: 0.0000727, Val Loss: 0.0000780 +2025-03-25 10:29:29,767 Epoch 852/2000 +2025-03-25 10:34:10,593 Current Learning Rate: 0.0008422736 +2025-03-25 10:34:10,594 Train Loss: 0.0000784, Val Loss: 0.0000791 +2025-03-25 10:34:10,594 Epoch 853/2000 +2025-03-25 10:38:51,991 Current Learning Rate: 0.0008365063 +2025-03-25 10:38:51,991 Train Loss: 0.0000722, Val Loss: 0.0000832 +2025-03-25 10:38:51,991 Epoch 854/2000 +2025-03-25 10:43:32,441 Current Learning Rate: 0.0008306559 +2025-03-25 10:43:32,442 Train Loss: 0.0000766, Val Loss: 0.0000792 +2025-03-25 10:43:32,442 Epoch 855/2000 +2025-03-25 10:48:12,968 Current Learning Rate: 0.0008247240 +2025-03-25 10:48:12,968 Train Loss: 0.0000841, Val Loss: 0.0000846 +2025-03-25 10:48:12,968 Epoch 856/2000 +2025-03-25 10:52:53,018 Current Learning Rate: 0.0008187120 +2025-03-25 10:52:53,019 Train Loss: 0.0000855, Val Loss: 0.0000893 +2025-03-25 10:52:53,019 Epoch 857/2000 +2025-03-25 10:57:34,517 Current Learning Rate: 0.0008126213 +2025-03-25 10:57:34,518 Train Loss: 0.0000804, Val Loss: 0.0000894 +2025-03-25 10:57:34,518 Epoch 858/2000 +2025-03-25 11:02:15,909 Current Learning Rate: 0.0008064535 +2025-03-25 11:02:15,909 Train Loss: 0.0000757, Val Loss: 0.0000900 +2025-03-25 11:02:15,910 Epoch 859/2000 +2025-03-25 11:06:57,191 Current Learning Rate: 0.0008002101 +2025-03-25 11:06:57,192 Train Loss: 0.0000955, Val Loss: 0.0001602 +2025-03-25 11:06:57,192 Epoch 860/2000 +2025-03-25 11:11:37,778 Current Learning Rate: 0.0007938926 +2025-03-25 11:11:37,779 Train Loss: 0.0000829, Val Loss: 0.0000821 +2025-03-25 11:11:37,779 Epoch 861/2000 +2025-03-25 11:16:18,631 Current Learning Rate: 0.0007875026 +2025-03-25 11:16:18,631 Train Loss: 0.0000871, Val Loss: 0.0000823 +2025-03-25 11:16:18,632 Epoch 862/2000 +2025-03-25 11:20:59,743 Current Learning Rate: 0.0007810417 +2025-03-25 11:20:59,744 Train Loss: 0.0000920, Val Loss: 0.0000974 +2025-03-25 11:20:59,744 Epoch 863/2000 +2025-03-25 11:25:39,662 Current Learning Rate: 0.0007745114 +2025-03-25 11:25:39,663 Train Loss: 0.0000684, Val Loss: 0.0000798 +2025-03-25 11:25:39,663 Epoch 864/2000 +2025-03-25 11:30:20,766 Current Learning Rate: 0.0007679134 +2025-03-25 11:30:20,766 Train Loss: 0.0001129, Val Loss: 0.0000908 +2025-03-25 11:30:20,767 Epoch 865/2000 +2025-03-25 11:35:00,931 Current Learning Rate: 0.0007612493 +2025-03-25 11:35:00,932 Train Loss: 0.0000877, Val Loss: 0.0000827 +2025-03-25 11:35:00,932 Epoch 866/2000 +2025-03-25 11:39:41,017 Current Learning Rate: 0.0007545207 +2025-03-25 11:39:41,017 Train Loss: 0.0000829, Val Loss: 0.0000783 +2025-03-25 11:39:41,018 Epoch 867/2000 +2025-03-25 11:44:21,868 Current Learning Rate: 0.0007477293 +2025-03-25 11:44:21,869 Train Loss: 0.0000707, Val Loss: 0.0000841 +2025-03-25 11:44:21,869 Epoch 868/2000 +2025-03-25 11:49:02,634 Current Learning Rate: 0.0007408768 +2025-03-25 11:49:02,634 Train Loss: 0.0000877, Val Loss: 0.0000781 +2025-03-25 11:49:02,634 Epoch 869/2000 +2025-03-25 11:53:43,438 Current Learning Rate: 0.0007339649 +2025-03-25 11:53:43,438 Train Loss: 0.0000907, Val Loss: 0.0000830 +2025-03-25 11:53:43,439 Epoch 870/2000 +2025-03-25 11:58:23,475 Current Learning Rate: 0.0007269952 +2025-03-25 11:58:23,476 Train Loss: 0.0001018, Val Loss: 0.0000964 +2025-03-25 11:58:23,476 Epoch 871/2000 +2025-03-25 12:03:03,751 Current Learning Rate: 0.0007199696 +2025-03-25 12:03:03,751 Train Loss: 0.0000762, Val Loss: 0.0000790 +2025-03-25 12:03:03,752 Epoch 872/2000 +2025-03-25 12:07:43,960 Current Learning Rate: 0.0007128896 +2025-03-25 12:07:43,961 Train Loss: 0.0000872, Val Loss: 0.0000798 +2025-03-25 12:07:43,962 Epoch 873/2000 +2025-03-25 12:12:24,976 Current Learning Rate: 0.0007057572 +2025-03-25 12:12:24,976 Train Loss: 0.0000589, Val Loss: 0.0000741 +2025-03-25 12:12:24,977 Epoch 874/2000 +2025-03-25 12:17:05,381 Current Learning Rate: 0.0006985739 +2025-03-25 12:17:05,382 Train Loss: 0.0000752, Val Loss: 0.0000884 +2025-03-25 12:17:05,382 Epoch 875/2000 +2025-03-25 12:21:46,844 Current Learning Rate: 0.0006913417 +2025-03-25 12:21:46,845 Train Loss: 0.0000844, Val Loss: 0.0000813 +2025-03-25 12:21:46,845 Epoch 876/2000 +2025-03-25 12:26:28,272 Current Learning Rate: 0.0006840623 +2025-03-25 12:26:28,273 Train Loss: 0.0000957, Val Loss: 0.0000858 +2025-03-25 12:26:28,273 Epoch 877/2000 +2025-03-25 12:31:08,535 Current Learning Rate: 0.0006767374 +2025-03-25 12:31:08,535 Train Loss: 0.0000970, Val Loss: 0.0000796 +2025-03-25 12:31:08,535 Epoch 878/2000 +2025-03-25 12:35:48,837 Current Learning Rate: 0.0006693690 +2025-03-25 12:35:48,838 Train Loss: 0.0000777, Val Loss: 0.0000787 +2025-03-25 12:35:48,838 Epoch 879/2000 +2025-03-25 12:40:29,702 Current Learning Rate: 0.0006619587 +2025-03-25 12:40:29,703 Train Loss: 0.0000830, Val Loss: 0.0000875 +2025-03-25 12:40:29,704 Epoch 880/2000 +2025-03-25 12:45:10,432 Current Learning Rate: 0.0006545085 +2025-03-25 12:45:10,433 Train Loss: 0.0000798, Val Loss: 0.0000788 +2025-03-25 12:45:10,433 Epoch 881/2000 +2025-03-25 12:49:50,711 Current Learning Rate: 0.0006470202 +2025-03-25 12:49:50,712 Train Loss: 0.0000703, Val Loss: 0.0000774 +2025-03-25 12:49:50,712 Epoch 882/2000 +2025-03-25 12:54:31,662 Current Learning Rate: 0.0006394956 +2025-03-25 12:54:31,662 Train Loss: 0.0001111, Val Loss: 0.0000758 +2025-03-25 12:54:31,663 Epoch 883/2000 +2025-03-25 12:59:12,698 Current Learning Rate: 0.0006319365 +2025-03-25 12:59:12,699 Train Loss: 0.0000709, Val Loss: 0.0000803 +2025-03-25 12:59:12,699 Epoch 884/2000 +2025-03-25 13:03:53,795 Current Learning Rate: 0.0006243449 +2025-03-25 13:03:53,795 Train Loss: 0.0000753, Val Loss: 0.0000764 +2025-03-25 13:03:53,795 Epoch 885/2000 +2025-03-25 13:08:35,042 Current Learning Rate: 0.0006167227 +2025-03-25 13:08:35,042 Train Loss: 0.0000711, Val Loss: 0.0000762 +2025-03-25 13:08:35,042 Epoch 886/2000 +2025-03-25 13:13:15,427 Current Learning Rate: 0.0006090716 +2025-03-25 13:13:15,428 Train Loss: 0.0000736, Val Loss: 0.0000760 +2025-03-25 13:13:15,428 Epoch 887/2000 +2025-03-25 13:17:55,885 Current Learning Rate: 0.0006013936 +2025-03-25 13:17:55,885 Train Loss: 0.0000781, Val Loss: 0.0000747 +2025-03-25 13:17:55,885 Epoch 888/2000 +2025-03-25 13:22:37,576 Current Learning Rate: 0.0005936907 +2025-03-25 13:22:38,717 Train Loss: 0.0000649, Val Loss: 0.0000711 +2025-03-25 13:22:38,718 Epoch 889/2000 +2025-03-25 13:27:19,301 Current Learning Rate: 0.0005859646 +2025-03-25 13:27:19,302 Train Loss: 0.0000858, Val Loss: 0.0000786 +2025-03-25 13:27:19,302 Epoch 890/2000 +2025-03-25 13:31:59,446 Current Learning Rate: 0.0005782172 +2025-03-25 13:31:59,447 Train Loss: 0.0001114, Val Loss: 0.0000862 +2025-03-25 13:31:59,447 Epoch 891/2000 +2025-03-25 13:36:39,967 Current Learning Rate: 0.0005704506 +2025-03-25 13:36:39,967 Train Loss: 0.0000716, Val Loss: 0.0000724 +2025-03-25 13:36:39,968 Epoch 892/2000 +2025-03-25 13:41:20,468 Current Learning Rate: 0.0005626666 +2025-03-25 13:41:20,469 Train Loss: 0.0000701, Val Loss: 0.0000923 +2025-03-25 13:41:20,469 Epoch 893/2000 +2025-03-25 13:46:00,906 Current Learning Rate: 0.0005548672 +2025-03-25 13:46:00,906 Train Loss: 0.0000704, Val Loss: 0.0000717 +2025-03-25 13:46:00,907 Epoch 894/2000 +2025-03-25 13:50:41,858 Current Learning Rate: 0.0005470542 +2025-03-25 13:50:41,859 Train Loss: 0.0000774, Val Loss: 0.0000712 +2025-03-25 13:50:41,859 Epoch 895/2000 +2025-03-25 13:55:23,241 Current Learning Rate: 0.0005392295 +2025-03-25 13:55:24,201 Train Loss: 0.0000682, Val Loss: 0.0000694 +2025-03-25 13:55:24,201 Epoch 896/2000 +2025-03-25 14:00:03,995 Current Learning Rate: 0.0005313953 +2025-03-25 14:00:03,996 Train Loss: 0.0000778, Val Loss: 0.0000720 +2025-03-25 14:00:03,996 Epoch 897/2000 +2025-03-25 14:04:45,045 Current Learning Rate: 0.0005235532 +2025-03-25 14:04:45,046 Train Loss: 0.0000548, Val Loss: 0.0000728 +2025-03-25 14:04:45,046 Epoch 898/2000 +2025-03-25 14:09:26,238 Current Learning Rate: 0.0005157054 +2025-03-25 14:09:26,239 Train Loss: 0.0001225, Val Loss: 0.0000738 +2025-03-25 14:09:26,239 Epoch 899/2000 +2025-03-25 14:14:07,335 Current Learning Rate: 0.0005078537 +2025-03-25 14:14:07,335 Train Loss: 0.0000755, Val Loss: 0.0000721 +2025-03-25 14:14:07,336 Epoch 900/2000 +2025-03-25 14:18:47,753 Current Learning Rate: 0.0005000000 +2025-03-25 14:18:47,754 Train Loss: 0.0000634, Val Loss: 0.0000717 +2025-03-25 14:18:47,754 Epoch 901/2000 +2025-03-25 14:23:28,526 Current Learning Rate: 0.0004921463 +2025-03-25 14:23:28,527 Train Loss: 0.0000711, Val Loss: 0.0000774 +2025-03-25 14:23:28,527 Epoch 902/2000 +2025-03-25 14:28:10,364 Current Learning Rate: 0.0004842946 +2025-03-25 14:28:10,364 Train Loss: 0.0000760, Val Loss: 0.0000707 +2025-03-25 14:28:10,365 Epoch 903/2000 +2025-03-25 14:32:51,044 Current Learning Rate: 0.0004764468 +2025-03-25 14:32:51,906 Train Loss: 0.0000593, Val Loss: 0.0000684 +2025-03-25 14:32:51,906 Epoch 904/2000 +2025-03-25 14:37:31,120 Current Learning Rate: 0.0004686047 +2025-03-25 14:37:31,998 Train Loss: 0.0000595, Val Loss: 0.0000679 +2025-03-25 14:37:31,998 Epoch 905/2000 +2025-03-25 14:42:12,523 Current Learning Rate: 0.0004607705 +2025-03-25 14:42:12,524 Train Loss: 0.0000695, Val Loss: 0.0000709 +2025-03-25 14:42:12,524 Epoch 906/2000 +2025-03-25 14:46:53,063 Current Learning Rate: 0.0004529458 +2025-03-25 14:46:53,064 Train Loss: 0.0000836, Val Loss: 0.0000725 +2025-03-25 14:46:53,064 Epoch 907/2000 +2025-03-25 14:51:34,416 Current Learning Rate: 0.0004451328 +2025-03-25 14:51:34,417 Train Loss: 0.0000620, Val Loss: 0.0000687 +2025-03-25 14:51:34,417 Epoch 908/2000 +2025-03-25 14:56:15,976 Current Learning Rate: 0.0004373334 +2025-03-25 14:56:15,976 Train Loss: 0.0000554, Val Loss: 0.0000699 +2025-03-25 14:56:15,977 Epoch 909/2000 +2025-03-25 15:00:56,783 Current Learning Rate: 0.0004295494 +2025-03-25 15:00:56,784 Train Loss: 0.0000677, Val Loss: 0.0000681 +2025-03-25 15:00:56,784 Epoch 910/2000 +2025-03-25 15:05:37,565 Current Learning Rate: 0.0004217828 +2025-03-25 15:05:37,565 Train Loss: 0.0000675, Val Loss: 0.0000722 +2025-03-25 15:05:37,565 Epoch 911/2000 +2025-03-25 15:10:18,440 Current Learning Rate: 0.0004140354 +2025-03-25 15:10:18,440 Train Loss: 0.0000657, Val Loss: 0.0000709 +2025-03-25 15:10:18,440 Epoch 912/2000 +2025-03-25 15:14:59,563 Current Learning Rate: 0.0004063093 +2025-03-25 15:14:59,564 Train Loss: 0.0000715, Val Loss: 0.0000686 +2025-03-25 15:14:59,564 Epoch 913/2000 +2025-03-25 15:19:40,151 Current Learning Rate: 0.0003986064 +2025-03-25 15:19:40,151 Train Loss: 0.0000832, Val Loss: 0.0000697 +2025-03-25 15:19:40,151 Epoch 914/2000 +2025-03-25 15:24:20,747 Current Learning Rate: 0.0003909284 +2025-03-25 15:24:20,748 Train Loss: 0.0000648, Val Loss: 0.0000684 +2025-03-25 15:24:20,748 Epoch 915/2000 +2025-03-25 15:29:01,434 Current Learning Rate: 0.0003832773 +2025-03-25 15:29:02,380 Train Loss: 0.0000615, Val Loss: 0.0000672 +2025-03-25 15:29:02,381 Epoch 916/2000 +2025-03-25 15:33:42,753 Current Learning Rate: 0.0003756551 +2025-03-25 15:33:42,754 Train Loss: 0.0000672, Val Loss: 0.0000702 +2025-03-25 15:33:42,754 Epoch 917/2000 +2025-03-25 15:38:23,507 Current Learning Rate: 0.0003680635 +2025-03-25 15:38:23,509 Train Loss: 0.0000706, Val Loss: 0.0000694 +2025-03-25 15:38:23,510 Epoch 918/2000 +2025-03-25 15:43:04,764 Current Learning Rate: 0.0003605044 +2025-03-25 15:43:04,765 Train Loss: 0.0000725, Val Loss: 0.0000687 +2025-03-25 15:43:04,765 Epoch 919/2000 +2025-03-25 15:47:45,727 Current Learning Rate: 0.0003529798 +2025-03-25 15:47:45,728 Train Loss: 0.0000661, Val Loss: 0.0000714 +2025-03-25 15:47:45,728 Epoch 920/2000 +2025-03-25 15:52:26,771 Current Learning Rate: 0.0003454915 +2025-03-25 15:52:27,971 Train Loss: 0.0000594, Val Loss: 0.0000664 +2025-03-25 15:52:27,972 Epoch 921/2000 +2025-03-25 15:57:08,355 Current Learning Rate: 0.0003380413 +2025-03-25 15:57:08,356 Train Loss: 0.0000594, Val Loss: 0.0000676 +2025-03-25 15:57:08,356 Epoch 922/2000 +2025-03-25 16:01:50,455 Current Learning Rate: 0.0003306310 +2025-03-25 16:01:50,455 Train Loss: 0.0000611, Val Loss: 0.0000666 +2025-03-25 16:01:50,456 Epoch 923/2000 +2025-03-25 16:06:32,447 Current Learning Rate: 0.0003232626 +2025-03-25 16:06:32,451 Train Loss: 0.0000624, Val Loss: 0.0000666 +2025-03-25 16:06:32,454 Epoch 924/2000 +2025-03-25 16:11:12,962 Current Learning Rate: 0.0003159377 +2025-03-25 16:11:12,963 Train Loss: 0.0000638, Val Loss: 0.0000686 +2025-03-25 16:11:12,963 Epoch 925/2000 +2025-03-25 16:15:54,104 Current Learning Rate: 0.0003086583 +2025-03-25 16:15:55,001 Train Loss: 0.0000549, Val Loss: 0.0000658 +2025-03-25 16:15:55,001 Epoch 926/2000 +2025-03-25 16:20:35,391 Current Learning Rate: 0.0003014261 +2025-03-25 16:20:35,392 Train Loss: 0.0000707, Val Loss: 0.0000662 +2025-03-25 16:20:35,392 Epoch 927/2000 +2025-03-25 16:25:15,784 Current Learning Rate: 0.0002942428 +2025-03-25 16:25:15,784 Train Loss: 0.0000581, Val Loss: 0.0000667 +2025-03-25 16:25:15,785 Epoch 928/2000 +2025-03-25 16:29:56,495 Current Learning Rate: 0.0002871104 +2025-03-25 16:29:56,495 Train Loss: 0.0000553, Val Loss: 0.0000678 +2025-03-25 16:29:56,495 Epoch 929/2000 +2025-03-25 16:34:37,849 Current Learning Rate: 0.0002800304 +2025-03-25 16:34:37,850 Train Loss: 0.0000688, Val Loss: 0.0000670 +2025-03-25 16:34:37,850 Epoch 930/2000 +2025-03-25 16:39:18,622 Current Learning Rate: 0.0002730048 +2025-03-25 16:39:18,623 Train Loss: 0.0000607, Val Loss: 0.0000684 +2025-03-25 16:39:18,623 Epoch 931/2000 +2025-03-25 16:43:59,440 Current Learning Rate: 0.0002660351 +2025-03-25 16:43:59,440 Train Loss: 0.0000719, Val Loss: 0.0000666 +2025-03-25 16:43:59,441 Epoch 932/2000 +2025-03-25 16:48:40,146 Current Learning Rate: 0.0002591232 +2025-03-25 16:48:41,143 Train Loss: 0.0000635, Val Loss: 0.0000654 +2025-03-25 16:48:41,143 Epoch 933/2000 +2025-03-25 16:53:20,940 Current Learning Rate: 0.0002522707 +2025-03-25 16:53:20,941 Train Loss: 0.0000549, Val Loss: 0.0000658 +2025-03-25 16:53:20,941 Epoch 934/2000 +2025-03-25 16:58:01,307 Current Learning Rate: 0.0002454793 +2025-03-25 16:58:02,382 Train Loss: 0.0000618, Val Loss: 0.0000650 +2025-03-25 16:58:02,383 Epoch 935/2000 +2025-03-25 17:02:42,846 Current Learning Rate: 0.0002387507 +2025-03-25 17:02:43,817 Train Loss: 0.0000566, Val Loss: 0.0000650 +2025-03-25 17:02:43,818 Epoch 936/2000 +2025-03-25 17:07:24,068 Current Learning Rate: 0.0002320866 +2025-03-25 17:07:25,108 Train Loss: 0.0000508, Val Loss: 0.0000647 +2025-03-25 17:07:25,109 Epoch 937/2000 +2025-03-25 17:12:04,980 Current Learning Rate: 0.0002254886 +2025-03-25 17:12:06,295 Train Loss: 0.0000589, Val Loss: 0.0000644 +2025-03-25 17:12:06,296 Epoch 938/2000 +2025-03-25 17:16:46,840 Current Learning Rate: 0.0002189583 +2025-03-25 17:16:47,766 Train Loss: 0.0000549, Val Loss: 0.0000643 +2025-03-25 17:16:47,766 Epoch 939/2000 +2025-03-25 17:21:27,778 Current Learning Rate: 0.0002124974 +2025-03-25 17:21:27,779 Train Loss: 0.0000618, Val Loss: 0.0000643 +2025-03-25 17:21:27,779 Epoch 940/2000 +2025-03-25 17:26:09,157 Current Learning Rate: 0.0002061074 +2025-03-25 17:26:09,157 Train Loss: 0.0000597, Val Loss: 0.0000647 +2025-03-25 17:26:09,157 Epoch 941/2000 +2025-03-25 17:30:50,008 Current Learning Rate: 0.0001997899 +2025-03-25 17:30:51,284 Train Loss: 0.0000597, Val Loss: 0.0000638 +2025-03-25 17:30:51,284 Epoch 942/2000 +2025-03-25 17:35:31,861 Current Learning Rate: 0.0001935465 +2025-03-25 17:35:31,862 Train Loss: 0.0000529, Val Loss: 0.0000638 +2025-03-25 17:35:31,862 Epoch 943/2000 +2025-03-25 17:40:12,127 Current Learning Rate: 0.0001873787 +2025-03-25 17:40:12,127 Train Loss: 0.0000616, Val Loss: 0.0000658 +2025-03-25 17:40:12,130 Epoch 944/2000 +2025-03-25 17:44:52,938 Current Learning Rate: 0.0001812880 +2025-03-25 17:44:52,938 Train Loss: 0.0000556, Val Loss: 0.0000641 +2025-03-25 17:44:52,939 Epoch 945/2000 +2025-03-25 17:49:34,004 Current Learning Rate: 0.0001752760 +2025-03-25 17:49:34,005 Train Loss: 0.0000550, Val Loss: 0.0000647 +2025-03-25 17:49:34,005 Epoch 946/2000 +2025-03-25 17:54:14,939 Current Learning Rate: 0.0001693441 +2025-03-25 17:54:14,940 Train Loss: 0.0000581, Val Loss: 0.0000641 +2025-03-25 17:54:14,940 Epoch 947/2000 +2025-03-25 17:58:56,231 Current Learning Rate: 0.0001634937 +2025-03-25 17:58:56,231 Train Loss: 0.0000532, Val Loss: 0.0000644 +2025-03-25 17:58:56,232 Epoch 948/2000 +2025-03-25 18:03:37,568 Current Learning Rate: 0.0001577264 +2025-03-25 18:03:37,569 Train Loss: 0.0000627, Val Loss: 0.0000657 +2025-03-25 18:03:37,569 Epoch 949/2000 +2025-03-25 18:08:18,776 Current Learning Rate: 0.0001520436 +2025-03-25 18:08:18,777 Train Loss: 0.0000490, Val Loss: 0.0000640 +2025-03-25 18:08:18,777 Epoch 950/2000 +2025-03-25 18:12:59,488 Current Learning Rate: 0.0001464466 +2025-03-25 18:13:00,365 Train Loss: 0.0000588, Val Loss: 0.0000636 +2025-03-25 18:13:00,365 Epoch 951/2000 +2025-03-25 18:17:40,613 Current Learning Rate: 0.0001409369 +2025-03-25 18:17:41,591 Train Loss: 0.0000489, Val Loss: 0.0000630 +2025-03-25 18:17:41,591 Epoch 952/2000 +2025-03-25 18:22:21,546 Current Learning Rate: 0.0001355157 +2025-03-25 18:22:22,494 Train Loss: 0.0000532, Val Loss: 0.0000630 +2025-03-25 18:22:22,494 Epoch 953/2000 +2025-03-25 18:27:02,524 Current Learning Rate: 0.0001301845 +2025-03-25 18:27:02,525 Train Loss: 0.0000542, Val Loss: 0.0000633 +2025-03-25 18:27:02,525 Epoch 954/2000 +2025-03-25 18:31:43,897 Current Learning Rate: 0.0001249445 +2025-03-25 18:31:43,897 Train Loss: 0.0000700, Val Loss: 0.0000637 +2025-03-25 18:31:43,898 Epoch 955/2000 +2025-03-25 18:36:24,787 Current Learning Rate: 0.0001197970 +2025-03-25 18:36:24,787 Train Loss: 0.0000608, Val Loss: 0.0000633 +2025-03-25 18:36:24,788 Epoch 956/2000 +2025-03-25 18:41:05,502 Current Learning Rate: 0.0001147434 +2025-03-25 18:41:06,480 Train Loss: 0.0000568, Val Loss: 0.0000625 +2025-03-25 18:41:06,480 Epoch 957/2000 +2025-03-25 18:45:47,071 Current Learning Rate: 0.0001097848 +2025-03-25 18:45:47,074 Train Loss: 0.0000507, Val Loss: 0.0000628 +2025-03-25 18:45:47,074 Epoch 958/2000 +2025-03-25 18:50:28,183 Current Learning Rate: 0.0001049225 +2025-03-25 18:50:28,184 Train Loss: 0.0000506, Val Loss: 0.0000626 +2025-03-25 18:50:28,184 Epoch 959/2000 +2025-03-25 18:55:09,794 Current Learning Rate: 0.0001001577 +2025-03-25 18:55:09,794 Train Loss: 0.0000574, Val Loss: 0.0000634 +2025-03-25 18:55:09,795 Epoch 960/2000 +2025-03-25 18:59:50,874 Current Learning Rate: 0.0000954915 +2025-03-25 18:59:50,874 Train Loss: 0.0000571, Val Loss: 0.0000626 +2025-03-25 18:59:50,874 Epoch 961/2000 +2025-03-25 19:04:31,089 Current Learning Rate: 0.0000909251 +2025-03-25 19:04:32,200 Train Loss: 0.0000467, Val Loss: 0.0000620 +2025-03-25 19:04:32,200 Epoch 962/2000 +2025-03-25 19:09:12,147 Current Learning Rate: 0.0000864597 +2025-03-25 19:09:13,186 Train Loss: 0.0000539, Val Loss: 0.0000619 +2025-03-25 19:09:13,186 Epoch 963/2000 +2025-03-25 19:13:53,147 Current Learning Rate: 0.0000820963 +2025-03-25 19:13:53,148 Train Loss: 0.0000708, Val Loss: 0.0000621 +2025-03-25 19:13:53,148 Epoch 964/2000 +2025-03-25 19:18:33,707 Current Learning Rate: 0.0000778360 +2025-03-25 19:18:34,789 Train Loss: 0.0000540, Val Loss: 0.0000619 +2025-03-25 19:18:34,790 Epoch 965/2000 +2025-03-25 19:23:14,658 Current Learning Rate: 0.0000736799 +2025-03-25 19:23:15,579 Train Loss: 0.0000546, Val Loss: 0.0000618 +2025-03-25 19:23:15,579 Epoch 966/2000 +2025-03-25 19:27:55,420 Current Learning Rate: 0.0000696290 +2025-03-25 19:27:56,557 Train Loss: 0.0000542, Val Loss: 0.0000617 +2025-03-25 19:27:56,558 Epoch 967/2000 +2025-03-25 19:32:36,677 Current Learning Rate: 0.0000656842 +2025-03-25 19:32:37,700 Train Loss: 0.0000596, Val Loss: 0.0000616 +2025-03-25 19:32:37,700 Epoch 968/2000 +2025-03-25 19:37:17,803 Current Learning Rate: 0.0000618467 +2025-03-25 19:37:18,773 Train Loss: 0.0000535, Val Loss: 0.0000616 +2025-03-25 19:37:18,773 Epoch 969/2000 +2025-03-25 19:41:58,825 Current Learning Rate: 0.0000581172 +2025-03-25 19:41:59,794 Train Loss: 0.0000561, Val Loss: 0.0000615 +2025-03-25 19:41:59,794 Epoch 970/2000 +2025-03-25 19:46:39,940 Current Learning Rate: 0.0000544967 +2025-03-25 19:46:39,941 Train Loss: 0.0000559, Val Loss: 0.0000617 +2025-03-25 19:46:39,941 Epoch 971/2000 +2025-03-25 19:51:19,951 Current Learning Rate: 0.0000509862 +2025-03-25 19:51:19,952 Train Loss: 0.0000507, Val Loss: 0.0000615 +2025-03-25 19:51:19,952 Epoch 972/2000 +2025-03-25 19:56:01,374 Current Learning Rate: 0.0000475865 +2025-03-25 19:56:02,655 Train Loss: 0.0000488, Val Loss: 0.0000613 +2025-03-25 19:56:02,655 Epoch 973/2000 +2025-03-25 20:00:42,435 Current Learning Rate: 0.0000442984 +2025-03-25 20:00:43,414 Train Loss: 0.0000545, Val Loss: 0.0000613 +2025-03-25 20:00:43,414 Epoch 974/2000 +2025-03-25 20:05:23,675 Current Learning Rate: 0.0000411227 +2025-03-25 20:05:24,657 Train Loss: 0.0000584, Val Loss: 0.0000613 +2025-03-25 20:05:24,658 Epoch 975/2000 +2025-03-25 20:10:04,492 Current Learning Rate: 0.0000380602 +2025-03-25 20:10:05,502 Train Loss: 0.0000531, Val Loss: 0.0000613 +2025-03-25 20:10:05,502 Epoch 976/2000 +2025-03-25 20:14:46,420 Current Learning Rate: 0.0000351118 +2025-03-25 20:14:47,369 Train Loss: 0.0000585, Val Loss: 0.0000612 +2025-03-25 20:14:47,370 Epoch 977/2000 +2025-03-25 20:19:27,321 Current Learning Rate: 0.0000322780 +2025-03-25 20:19:27,322 Train Loss: 0.0000560, Val Loss: 0.0000612 +2025-03-25 20:19:27,322 Epoch 978/2000 +2025-03-25 20:24:09,050 Current Learning Rate: 0.0000295596 +2025-03-25 20:24:10,008 Train Loss: 0.0000569, Val Loss: 0.0000612 +2025-03-25 20:24:10,009 Epoch 979/2000 +2025-03-25 20:28:50,092 Current Learning Rate: 0.0000269573 +2025-03-25 20:28:50,952 Train Loss: 0.0000571, Val Loss: 0.0000611 +2025-03-25 20:28:50,952 Epoch 980/2000 +2025-03-25 20:33:31,375 Current Learning Rate: 0.0000244717 +2025-03-25 20:33:31,376 Train Loss: 0.0000524, Val Loss: 0.0000612 +2025-03-25 20:33:31,377 Epoch 981/2000 +2025-03-25 20:38:12,551 Current Learning Rate: 0.0000221035 +2025-03-25 20:38:13,590 Train Loss: 0.0000502, Val Loss: 0.0000611 +2025-03-25 20:38:13,592 Epoch 982/2000 +2025-03-25 20:42:53,556 Current Learning Rate: 0.0000198532 +2025-03-25 20:42:54,490 Train Loss: 0.0000516, Val Loss: 0.0000611 +2025-03-25 20:42:54,490 Epoch 983/2000 +2025-03-25 20:47:34,272 Current Learning Rate: 0.0000177213 +2025-03-25 20:47:35,167 Train Loss: 0.0000590, Val Loss: 0.0000611 +2025-03-25 20:47:35,167 Epoch 984/2000 +2025-03-25 20:52:14,585 Current Learning Rate: 0.0000157084 +2025-03-25 20:52:15,435 Train Loss: 0.0000503, Val Loss: 0.0000610 +2025-03-25 20:52:15,435 Epoch 985/2000 +2025-03-25 20:56:54,907 Current Learning Rate: 0.0000138150 +2025-03-25 20:56:55,911 Train Loss: 0.0000564, Val Loss: 0.0000610 +2025-03-25 20:56:55,911 Epoch 986/2000 +2025-03-25 21:01:35,414 Current Learning Rate: 0.0000120416 +2025-03-25 21:01:35,415 Train Loss: 0.0000586, Val Loss: 0.0000611 +2025-03-25 21:01:35,415 Epoch 987/2000 +2025-03-25 21:06:16,027 Current Learning Rate: 0.0000103886 +2025-03-25 21:06:16,027 Train Loss: 0.0000621, Val Loss: 0.0000610 +2025-03-25 21:06:16,028 Epoch 988/2000 +2025-03-25 21:10:56,756 Current Learning Rate: 0.0000088564 +2025-03-25 21:10:57,736 Train Loss: 0.0000550, Val Loss: 0.0000610 +2025-03-25 21:10:57,736 Epoch 989/2000 +2025-03-25 21:15:37,746 Current Learning Rate: 0.0000074453 +2025-03-25 21:15:38,749 Train Loss: 0.0000616, Val Loss: 0.0000609 +2025-03-25 21:15:38,749 Epoch 990/2000 +2025-03-25 21:20:18,839 Current Learning Rate: 0.0000061558 +2025-03-25 21:20:19,864 Train Loss: 0.0000617, Val Loss: 0.0000609 +2025-03-25 21:20:19,864 Epoch 991/2000 +2025-03-25 21:24:59,850 Current Learning Rate: 0.0000049882 +2025-03-25 21:24:59,851 Train Loss: 0.0000574, Val Loss: 0.0000609 +2025-03-25 21:24:59,851 Epoch 992/2000 +2025-03-25 21:29:41,249 Current Learning Rate: 0.0000039426 +2025-03-25 21:29:41,249 Train Loss: 0.0000660, Val Loss: 0.0000609 +2025-03-25 21:29:41,249 Epoch 993/2000 +2025-03-25 21:34:22,220 Current Learning Rate: 0.0000030195 +2025-03-25 21:34:22,220 Train Loss: 0.0000545, Val Loss: 0.0000610 +2025-03-25 21:34:22,220 Epoch 994/2000 +2025-03-25 21:39:03,407 Current Learning Rate: 0.0000022190 +2025-03-25 21:39:04,434 Train Loss: 0.0000652, Val Loss: 0.0000609 +2025-03-25 21:39:04,434 Epoch 995/2000 +2025-03-25 21:43:44,562 Current Learning Rate: 0.0000015413 +2025-03-25 21:43:44,563 Train Loss: 0.0000651, Val Loss: 0.0000609 +2025-03-25 21:43:44,563 Epoch 996/2000 +2025-03-25 21:48:25,063 Current Learning Rate: 0.0000009866 +2025-03-25 21:48:25,064 Train Loss: 0.0000592, Val Loss: 0.0000609 +2025-03-25 21:48:25,064 Epoch 997/2000 +2025-03-25 21:53:05,802 Current Learning Rate: 0.0000005551 +2025-03-25 21:53:05,802 Train Loss: 0.0000628, Val Loss: 0.0000609 +2025-03-25 21:53:05,803 Epoch 998/2000 +2025-03-25 21:57:46,335 Current Learning Rate: 0.0000002467 +2025-03-25 21:57:46,335 Train Loss: 0.0000606, Val Loss: 0.0000609 +2025-03-25 21:57:46,336 Epoch 999/2000 +2025-03-25 22:02:27,154 Current Learning Rate: 0.0000000617 +2025-03-25 22:02:27,154 Train Loss: 0.0000530, Val Loss: 0.0000609 +2025-03-25 22:02:27,155 Epoch 1000/2000 +2025-03-25 22:07:07,448 Current Learning Rate: 0.0000000000 +2025-03-25 22:07:07,449 Train Loss: 0.0000518, Val Loss: 0.0000609 +2025-03-25 22:07:07,449 Epoch 1001/2000 +2025-03-25 22:11:48,288 Current Learning Rate: 0.0000000617 +2025-03-25 22:11:49,259 Train Loss: 0.0000671, Val Loss: 0.0000609 +2025-03-25 22:11:49,259 Epoch 1002/2000 +2025-03-25 22:16:29,566 Current Learning Rate: 0.0000002467 +2025-03-25 22:16:29,567 Train Loss: 0.0000614, Val Loss: 0.0000609 +2025-03-25 22:16:29,567 Epoch 1003/2000 +2025-03-25 22:21:10,737 Current Learning Rate: 0.0000005551 +2025-03-25 22:21:11,893 Train Loss: 0.0000549, Val Loss: 0.0000608 +2025-03-25 22:21:11,894 Epoch 1004/2000 +2025-03-25 22:25:52,281 Current Learning Rate: 0.0000009866 +2025-03-25 22:25:52,281 Train Loss: 0.0000578, Val Loss: 0.0000609 +2025-03-25 22:25:52,282 Epoch 1005/2000 +2025-03-25 22:30:33,472 Current Learning Rate: 0.0000015413 +2025-03-25 22:30:33,474 Train Loss: 0.0000638, Val Loss: 0.0000609 +2025-03-25 22:30:33,476 Epoch 1006/2000 +2025-03-25 22:35:14,221 Current Learning Rate: 0.0000022190 +2025-03-25 22:35:14,221 Train Loss: 0.0000545, Val Loss: 0.0000609 +2025-03-25 22:35:14,222 Epoch 1007/2000 +2025-03-25 22:39:54,980 Current Learning Rate: 0.0000030195 +2025-03-25 22:39:54,980 Train Loss: 0.0000551, Val Loss: 0.0000608 +2025-03-25 22:39:54,980 Epoch 1008/2000 +2025-03-25 22:44:35,450 Current Learning Rate: 0.0000039426 +2025-03-25 22:44:35,450 Train Loss: 0.0000629, Val Loss: 0.0000609 +2025-03-25 22:44:35,451 Epoch 1009/2000 +2025-03-25 22:49:16,320 Current Learning Rate: 0.0000049882 +2025-03-25 22:49:16,322 Train Loss: 0.0000606, Val Loss: 0.0000609 +2025-03-25 22:49:16,323 Epoch 1010/2000 +2025-03-25 22:53:57,407 Current Learning Rate: 0.0000061558 +2025-03-25 22:53:57,407 Train Loss: 0.0000625, Val Loss: 0.0000609 +2025-03-25 22:53:57,407 Epoch 1011/2000 +2025-03-25 22:58:38,285 Current Learning Rate: 0.0000074453 +2025-03-25 22:58:38,285 Train Loss: 0.0000571, Val Loss: 0.0000609 +2025-03-25 22:58:38,286 Epoch 1012/2000 +2025-03-25 23:03:19,273 Current Learning Rate: 0.0000088564 +2025-03-25 23:03:19,274 Train Loss: 0.0000547, Val Loss: 0.0000609 +2025-03-25 23:03:19,274 Epoch 1013/2000 +2025-03-25 23:08:00,353 Current Learning Rate: 0.0000103886 +2025-03-25 23:08:00,354 Train Loss: 0.0000552, Val Loss: 0.0000609 +2025-03-25 23:08:00,354 Epoch 1014/2000 +2025-03-25 23:12:41,502 Current Learning Rate: 0.0000120416 +2025-03-25 23:12:41,502 Train Loss: 0.0000567, Val Loss: 0.0000609 +2025-03-25 23:12:41,502 Epoch 1015/2000 +2025-03-25 23:17:22,807 Current Learning Rate: 0.0000138150 +2025-03-25 23:17:22,807 Train Loss: 0.0000511, Val Loss: 0.0000609 +2025-03-25 23:17:22,808 Epoch 1016/2000 +2025-03-25 23:22:03,421 Current Learning Rate: 0.0000157084 +2025-03-25 23:22:03,422 Train Loss: 0.0000582, Val Loss: 0.0000609 +2025-03-25 23:22:03,422 Epoch 1017/2000 +2025-03-25 23:26:44,611 Current Learning Rate: 0.0000177213 +2025-03-25 23:26:44,611 Train Loss: 0.0000624, Val Loss: 0.0000609 +2025-03-25 23:26:44,611 Epoch 1018/2000 +2025-03-25 23:31:25,708 Current Learning Rate: 0.0000198532 +2025-03-25 23:31:25,709 Train Loss: 0.0000494, Val Loss: 0.0000609 +2025-03-25 23:31:25,709 Epoch 1019/2000 +2025-03-25 23:36:06,655 Current Learning Rate: 0.0000221035 +2025-03-25 23:36:06,655 Train Loss: 0.0000579, Val Loss: 0.0000610 +2025-03-25 23:36:06,655 Epoch 1020/2000 +2025-03-25 23:40:47,782 Current Learning Rate: 0.0000244717 +2025-03-25 23:40:47,783 Train Loss: 0.0000578, Val Loss: 0.0000609 +2025-03-25 23:40:47,783 Epoch 1021/2000 +2025-03-25 23:45:28,492 Current Learning Rate: 0.0000269573 +2025-03-25 23:45:28,493 Train Loss: 0.0000491, Val Loss: 0.0000610 +2025-03-25 23:45:28,493 Epoch 1022/2000 +2025-03-25 23:50:09,048 Current Learning Rate: 0.0000295596 +2025-03-25 23:50:09,049 Train Loss: 0.0000602, Val Loss: 0.0000610 +2025-03-25 23:50:09,049 Epoch 1023/2000 +2025-03-25 23:54:49,782 Current Learning Rate: 0.0000322780 +2025-03-25 23:54:49,783 Train Loss: 0.0000502, Val Loss: 0.0000610 +2025-03-25 23:54:49,783 Epoch 1024/2000 +2025-03-25 23:59:29,989 Current Learning Rate: 0.0000351118 +2025-03-25 23:59:29,989 Train Loss: 0.0000507, Val Loss: 0.0000610 +2025-03-25 23:59:29,989 Epoch 1025/2000 +2025-03-26 00:04:10,769 Current Learning Rate: 0.0000380602 +2025-03-26 00:04:10,770 Train Loss: 0.0000551, Val Loss: 0.0000611 +2025-03-26 00:04:10,770 Epoch 1026/2000 +2025-03-26 00:08:51,911 Current Learning Rate: 0.0000411227 +2025-03-26 00:08:51,911 Train Loss: 0.0000536, Val Loss: 0.0000611 +2025-03-26 00:08:51,912 Epoch 1027/2000 +2025-03-26 00:13:32,283 Current Learning Rate: 0.0000442984 +2025-03-26 00:13:32,284 Train Loss: 0.0000634, Val Loss: 0.0000611 +2025-03-26 00:13:32,284 Epoch 1028/2000 +2025-03-26 00:18:13,452 Current Learning Rate: 0.0000475865 +2025-03-26 00:18:13,452 Train Loss: 0.0000466, Val Loss: 0.0000611 +2025-03-26 00:18:13,453 Epoch 1029/2000 +2025-03-26 00:22:54,105 Current Learning Rate: 0.0000509862 +2025-03-26 00:22:54,106 Train Loss: 0.0000660, Val Loss: 0.0000621 +2025-03-26 00:22:54,106 Epoch 1030/2000 +2025-03-26 00:27:34,917 Current Learning Rate: 0.0000544967 +2025-03-26 00:27:34,917 Train Loss: 0.0000559, Val Loss: 0.0000612 +2025-03-26 00:27:34,917 Epoch 1031/2000 +2025-03-26 00:32:15,611 Current Learning Rate: 0.0000581172 +2025-03-26 00:32:15,612 Train Loss: 0.0000559, Val Loss: 0.0000611 +2025-03-26 00:32:15,612 Epoch 1032/2000 +2025-03-26 00:36:56,107 Current Learning Rate: 0.0000618467 +2025-03-26 00:36:56,108 Train Loss: 0.0000623, Val Loss: 0.0000612 +2025-03-26 00:36:56,108 Epoch 1033/2000 +2025-03-26 00:41:36,943 Current Learning Rate: 0.0000656842 +2025-03-26 00:41:36,944 Train Loss: 0.0000576, Val Loss: 0.0000611 +2025-03-26 00:41:36,944 Epoch 1034/2000 +2025-03-26 00:46:17,859 Current Learning Rate: 0.0000696290 +2025-03-26 00:46:17,859 Train Loss: 0.0000590, Val Loss: 0.0000615 +2025-03-26 00:46:17,860 Epoch 1035/2000 +2025-03-26 00:50:58,577 Current Learning Rate: 0.0000736799 +2025-03-26 00:50:58,577 Train Loss: 0.0000578, Val Loss: 0.0000613 +2025-03-26 00:50:58,577 Epoch 1036/2000 +2025-03-26 00:55:39,219 Current Learning Rate: 0.0000778360 +2025-03-26 00:55:39,220 Train Loss: 0.0000588, Val Loss: 0.0000616 +2025-03-26 00:55:39,220 Epoch 1037/2000 +2025-03-26 01:00:20,232 Current Learning Rate: 0.0000820963 +2025-03-26 01:00:20,232 Train Loss: 0.0000557, Val Loss: 0.0000613 +2025-03-26 01:00:20,232 Epoch 1038/2000 +2025-03-26 01:05:01,086 Current Learning Rate: 0.0000864597 +2025-03-26 01:05:01,087 Train Loss: 0.0000484, Val Loss: 0.0000615 +2025-03-26 01:05:01,087 Epoch 1039/2000 +2025-03-26 01:09:40,976 Current Learning Rate: 0.0000909251 +2025-03-26 01:09:40,977 Train Loss: 0.0000562, Val Loss: 0.0000616 +2025-03-26 01:09:40,977 Epoch 1040/2000 +2025-03-26 01:14:22,259 Current Learning Rate: 0.0000954915 +2025-03-26 01:14:22,260 Train Loss: 0.0000504, Val Loss: 0.0000613 +2025-03-26 01:14:22,260 Epoch 1041/2000 +2025-03-26 01:19:02,784 Current Learning Rate: 0.0001001577 +2025-03-26 01:19:02,785 Train Loss: 0.0000580, Val Loss: 0.0000618 +2025-03-26 01:19:02,785 Epoch 1042/2000 +2025-03-26 01:23:43,331 Current Learning Rate: 0.0001049225 +2025-03-26 01:23:43,331 Train Loss: 0.0000542, Val Loss: 0.0000616 +2025-03-26 01:23:43,331 Epoch 1043/2000 +2025-03-26 01:28:24,184 Current Learning Rate: 0.0001097848 +2025-03-26 01:28:24,184 Train Loss: 0.0000493, Val Loss: 0.0000618 +2025-03-26 01:28:24,184 Epoch 1044/2000 +2025-03-26 01:33:05,771 Current Learning Rate: 0.0001147434 +2025-03-26 01:33:05,771 Train Loss: 0.0000585, Val Loss: 0.0000623 +2025-03-26 01:33:05,771 Epoch 1045/2000 +2025-03-26 01:37:45,944 Current Learning Rate: 0.0001197970 +2025-03-26 01:37:45,945 Train Loss: 0.0000568, Val Loss: 0.0000620 +2025-03-26 01:37:45,945 Epoch 1046/2000 +2025-03-26 01:42:26,809 Current Learning Rate: 0.0001249445 +2025-03-26 01:42:26,810 Train Loss: 0.0000522, Val Loss: 0.0000615 +2025-03-26 01:42:26,810 Epoch 1047/2000 +2025-03-26 01:47:07,206 Current Learning Rate: 0.0001301845 +2025-03-26 01:47:07,206 Train Loss: 0.0000571, Val Loss: 0.0000620 +2025-03-26 01:47:07,207 Epoch 1048/2000 +2025-03-26 01:51:47,635 Current Learning Rate: 0.0001355157 +2025-03-26 01:51:47,636 Train Loss: 0.0000505, Val Loss: 0.0000618 +2025-03-26 01:51:47,636 Epoch 1049/2000 +2025-03-26 01:56:28,749 Current Learning Rate: 0.0001409369 +2025-03-26 01:56:28,750 Train Loss: 0.0000567, Val Loss: 0.0000618 +2025-03-26 01:56:28,750 Epoch 1050/2000 +2025-03-26 02:01:10,052 Current Learning Rate: 0.0001464466 +2025-03-26 02:01:10,053 Train Loss: 0.0000515, Val Loss: 0.0000620 +2025-03-26 02:01:10,053 Epoch 1051/2000 +2025-03-26 02:05:50,907 Current Learning Rate: 0.0001520436 +2025-03-26 02:05:50,907 Train Loss: 0.0000450, Val Loss: 0.0000615 +2025-03-26 02:05:50,908 Epoch 1052/2000 +2025-03-26 02:10:31,638 Current Learning Rate: 0.0001577264 +2025-03-26 02:10:31,638 Train Loss: 0.0000521, Val Loss: 0.0000618 +2025-03-26 02:10:31,638 Epoch 1053/2000 +2025-03-26 02:15:12,288 Current Learning Rate: 0.0001634937 +2025-03-26 02:15:12,289 Train Loss: 0.0000585, Val Loss: 0.0000619 +2025-03-26 02:15:12,289 Epoch 1054/2000 +2025-03-26 02:19:53,433 Current Learning Rate: 0.0001693441 +2025-03-26 02:19:53,433 Train Loss: 0.0000638, Val Loss: 0.0000619 +2025-03-26 02:19:53,433 Epoch 1055/2000 +2025-03-26 02:24:33,912 Current Learning Rate: 0.0001752760 +2025-03-26 02:24:33,913 Train Loss: 0.0000624, Val Loss: 0.0000626 +2025-03-26 02:24:33,913 Epoch 1056/2000 +2025-03-26 02:29:13,935 Current Learning Rate: 0.0001812880 +2025-03-26 02:29:13,936 Train Loss: 0.0000636, Val Loss: 0.0000627 +2025-03-26 02:29:13,937 Epoch 1057/2000 +2025-03-26 02:33:53,803 Current Learning Rate: 0.0001873787 +2025-03-26 02:33:53,804 Train Loss: 0.0000588, Val Loss: 0.0000619 +2025-03-26 02:33:53,804 Epoch 1058/2000 +2025-03-26 02:38:34,874 Current Learning Rate: 0.0001935465 +2025-03-26 02:38:34,875 Train Loss: 0.0000566, Val Loss: 0.0000619 +2025-03-26 02:38:34,875 Epoch 1059/2000 +2025-03-26 02:43:15,409 Current Learning Rate: 0.0001997899 +2025-03-26 02:43:15,409 Train Loss: 0.0000477, Val Loss: 0.0000618 +2025-03-26 02:43:15,410 Epoch 1060/2000 +2025-03-26 02:47:56,468 Current Learning Rate: 0.0002061074 +2025-03-26 02:47:56,468 Train Loss: 0.0000668, Val Loss: 0.0000621 +2025-03-26 02:47:56,468 Epoch 1061/2000 +2025-03-26 02:52:36,436 Current Learning Rate: 0.0002124974 +2025-03-26 02:52:36,436 Train Loss: 0.0000558, Val Loss: 0.0000634 +2025-03-26 02:52:36,437 Epoch 1062/2000 +2025-03-26 02:57:17,694 Current Learning Rate: 0.0002189583 +2025-03-26 02:57:17,695 Train Loss: 0.0000569, Val Loss: 0.0000619 +2025-03-26 02:57:17,695 Epoch 1063/2000 +2025-03-26 03:01:58,217 Current Learning Rate: 0.0002254886 +2025-03-26 03:01:58,218 Train Loss: 0.0000639, Val Loss: 0.0000707 +2025-03-26 03:01:58,218 Epoch 1064/2000 +2025-03-26 03:06:38,481 Current Learning Rate: 0.0002320866 +2025-03-26 03:06:38,482 Train Loss: 0.0000580, Val Loss: 0.0000628 +2025-03-26 03:06:38,482 Epoch 1065/2000 +2025-03-26 03:11:18,864 Current Learning Rate: 0.0002387507 +2025-03-26 03:11:18,864 Train Loss: 0.0000576, Val Loss: 0.0000639 +2025-03-26 03:11:18,864 Epoch 1066/2000 +2025-03-26 03:15:59,251 Current Learning Rate: 0.0002454793 +2025-03-26 03:15:59,252 Train Loss: 0.0000578, Val Loss: 0.0000631 +2025-03-26 03:15:59,252 Epoch 1067/2000 +2025-03-26 03:20:39,497 Current Learning Rate: 0.0002522707 +2025-03-26 03:20:39,497 Train Loss: 0.0000628, Val Loss: 0.0000650 +2025-03-26 03:20:39,497 Epoch 1068/2000 +2025-03-26 03:25:20,102 Current Learning Rate: 0.0002591232 +2025-03-26 03:25:20,103 Train Loss: 0.0000568, Val Loss: 0.0000624 +2025-03-26 03:25:20,103 Epoch 1069/2000 +2025-03-26 03:30:00,251 Current Learning Rate: 0.0002660351 +2025-03-26 03:30:00,251 Train Loss: 0.0000717, Val Loss: 0.0000631 +2025-03-26 03:30:00,251 Epoch 1070/2000 +2025-03-26 03:34:41,199 Current Learning Rate: 0.0002730048 +2025-03-26 03:34:41,199 Train Loss: 0.0000599, Val Loss: 0.0000661 +2025-03-26 03:34:41,199 Epoch 1071/2000 +2025-03-26 03:39:21,413 Current Learning Rate: 0.0002800304 +2025-03-26 03:39:21,413 Train Loss: 0.0000612, Val Loss: 0.0000644 +2025-03-26 03:39:21,413 Epoch 1072/2000 +2025-03-26 03:44:01,887 Current Learning Rate: 0.0002871104 +2025-03-26 03:44:01,887 Train Loss: 0.0000615, Val Loss: 0.0000662 +2025-03-26 03:44:01,888 Epoch 1073/2000 +2025-03-26 03:48:42,639 Current Learning Rate: 0.0002942428 +2025-03-26 03:48:42,640 Train Loss: 0.0000550, Val Loss: 0.0000636 +2025-03-26 03:48:42,640 Epoch 1074/2000 +2025-03-26 03:53:23,679 Current Learning Rate: 0.0003014261 +2025-03-26 03:53:23,679 Train Loss: 0.0000482, Val Loss: 0.0000620 +2025-03-26 03:53:23,680 Epoch 1075/2000 +2025-03-26 03:58:04,460 Current Learning Rate: 0.0003086583 +2025-03-26 03:58:04,461 Train Loss: 0.0000674, Val Loss: 0.0000626 +2025-03-26 03:58:04,461 Epoch 1076/2000 +2025-03-26 04:02:45,770 Current Learning Rate: 0.0003159377 +2025-03-26 04:02:45,770 Train Loss: 0.0000568, Val Loss: 0.0000647 +2025-03-26 04:02:45,770 Epoch 1077/2000 +2025-03-26 04:07:26,257 Current Learning Rate: 0.0003232626 +2025-03-26 04:07:26,258 Train Loss: 0.0000544, Val Loss: 0.0000641 +2025-03-26 04:07:26,258 Epoch 1078/2000 +2025-03-26 04:12:06,931 Current Learning Rate: 0.0003306310 +2025-03-26 04:12:06,931 Train Loss: 0.0000559, Val Loss: 0.0000708 +2025-03-26 04:12:06,932 Epoch 1079/2000 +2025-03-26 04:16:47,536 Current Learning Rate: 0.0003380413 +2025-03-26 04:16:47,537 Train Loss: 0.0000613, Val Loss: 0.0000640 +2025-03-26 04:16:47,537 Epoch 1080/2000 +2025-03-26 04:21:28,417 Current Learning Rate: 0.0003454915 +2025-03-26 04:21:28,418 Train Loss: 0.0000697, Val Loss: 0.0000694 +2025-03-26 04:21:28,418 Epoch 1081/2000 +2025-03-26 04:26:09,432 Current Learning Rate: 0.0003529798 +2025-03-26 04:26:09,433 Train Loss: 0.0000573, Val Loss: 0.0000640 +2025-03-26 04:26:09,433 Epoch 1082/2000 +2025-03-26 04:30:49,613 Current Learning Rate: 0.0003605044 +2025-03-26 04:30:49,614 Train Loss: 0.0000625, Val Loss: 0.0000637 +2025-03-26 04:30:49,614 Epoch 1083/2000 +2025-03-26 04:35:29,911 Current Learning Rate: 0.0003680635 +2025-03-26 04:35:29,912 Train Loss: 0.0000585, Val Loss: 0.0000628 +2025-03-26 04:35:29,912 Epoch 1084/2000 +2025-03-26 04:40:10,366 Current Learning Rate: 0.0003756551 +2025-03-26 04:40:10,367 Train Loss: 0.0000671, Val Loss: 0.0000683 +2025-03-26 04:40:10,367 Epoch 1085/2000 +2025-03-26 04:44:51,353 Current Learning Rate: 0.0003832773 +2025-03-26 04:44:51,353 Train Loss: 0.0000625, Val Loss: 0.0000634 +2025-03-26 04:44:51,353 Epoch 1086/2000 +2025-03-26 04:49:31,958 Current Learning Rate: 0.0003909284 +2025-03-26 04:49:31,959 Train Loss: 0.0000601, Val Loss: 0.0000663 +2025-03-26 04:49:31,959 Epoch 1087/2000 +2025-03-26 04:54:12,150 Current Learning Rate: 0.0003986064 +2025-03-26 04:54:12,150 Train Loss: 0.0000713, Val Loss: 0.0000658 +2025-03-26 04:54:12,151 Epoch 1088/2000 +2025-03-26 04:58:53,408 Current Learning Rate: 0.0004063093 +2025-03-26 04:58:53,408 Train Loss: 0.0000691, Val Loss: 0.0000651 +2025-03-26 04:58:53,408 Epoch 1089/2000 +2025-03-26 05:03:34,092 Current Learning Rate: 0.0004140354 +2025-03-26 05:03:34,092 Train Loss: 0.0000583, Val Loss: 0.0000714 +2025-03-26 05:03:34,093 Epoch 1090/2000 +2025-03-26 05:08:15,012 Current Learning Rate: 0.0004217828 +2025-03-26 05:08:15,013 Train Loss: 0.0000583, Val Loss: 0.0000632 +2025-03-26 05:08:15,013 Epoch 1091/2000 +2025-03-26 05:12:55,553 Current Learning Rate: 0.0004295494 +2025-03-26 05:12:55,554 Train Loss: 0.0000634, Val Loss: 0.0000634 +2025-03-26 05:12:55,557 Epoch 1092/2000 +2025-03-26 05:17:36,134 Current Learning Rate: 0.0004373334 +2025-03-26 05:17:36,134 Train Loss: 0.0000615, Val Loss: 0.0000661 +2025-03-26 05:17:36,135 Epoch 1093/2000 +2025-03-26 05:22:16,604 Current Learning Rate: 0.0004451328 +2025-03-26 05:22:16,605 Train Loss: 0.0000635, Val Loss: 0.0000646 +2025-03-26 05:22:16,605 Epoch 1094/2000 +2025-03-26 05:26:56,110 Current Learning Rate: 0.0004529458 +2025-03-26 05:26:56,111 Train Loss: 0.0000574, Val Loss: 0.0000676 +2025-03-26 05:26:56,111 Epoch 1095/2000 +2025-03-26 05:31:35,733 Current Learning Rate: 0.0004607705 +2025-03-26 05:31:35,733 Train Loss: 0.0000625, Val Loss: 0.0000651 +2025-03-26 05:31:35,734 Epoch 1096/2000 +2025-03-26 05:36:16,110 Current Learning Rate: 0.0004686047 +2025-03-26 05:36:16,110 Train Loss: 0.0000742, Val Loss: 0.0000801 +2025-03-26 05:36:16,111 Epoch 1097/2000 +2025-03-26 05:40:56,961 Current Learning Rate: 0.0004764468 +2025-03-26 05:40:56,962 Train Loss: 0.0000651, Val Loss: 0.0000646 +2025-03-26 05:40:56,962 Epoch 1098/2000 +2025-03-26 05:45:37,564 Current Learning Rate: 0.0004842946 +2025-03-26 05:45:37,564 Train Loss: 0.0000575, Val Loss: 0.0000631 +2025-03-26 05:45:37,565 Epoch 1099/2000 +2025-03-26 05:50:18,161 Current Learning Rate: 0.0004921463 +2025-03-26 05:50:18,161 Train Loss: 0.0000570, Val Loss: 0.0000704 +2025-03-26 05:50:18,162 Epoch 1100/2000 +2025-03-26 05:54:58,554 Current Learning Rate: 0.0005000000 +2025-03-26 05:54:58,555 Train Loss: 0.0000745, Val Loss: 0.0000722 +2025-03-26 05:54:58,555 Epoch 1101/2000 +2025-03-26 05:59:39,248 Current Learning Rate: 0.0005078537 +2025-03-26 05:59:39,249 Train Loss: 0.0000558, Val Loss: 0.0000702 +2025-03-26 05:59:39,249 Epoch 1102/2000 +2025-03-26 06:04:19,855 Current Learning Rate: 0.0005157054 +2025-03-26 06:04:19,856 Train Loss: 0.0000615, Val Loss: 0.0000666 +2025-03-26 06:04:19,856 Epoch 1103/2000 +2025-03-26 06:09:00,088 Current Learning Rate: 0.0005235532 +2025-03-26 06:09:00,089 Train Loss: 0.0000554, Val Loss: 0.0000657 +2025-03-26 06:09:00,089 Epoch 1104/2000 +2025-03-26 06:13:40,989 Current Learning Rate: 0.0005313953 +2025-03-26 06:13:40,989 Train Loss: 0.0000697, Val Loss: 0.0000665 +2025-03-26 06:13:40,989 Epoch 1105/2000 +2025-03-26 06:18:21,171 Current Learning Rate: 0.0005392295 +2025-03-26 06:18:21,171 Train Loss: 0.0000698, Val Loss: 0.0000691 +2025-03-26 06:18:21,171 Epoch 1106/2000 +2025-03-26 06:23:01,845 Current Learning Rate: 0.0005470542 +2025-03-26 06:23:01,846 Train Loss: 0.0000704, Val Loss: 0.0000752 +2025-03-26 06:23:01,846 Epoch 1107/2000 +2025-03-26 06:27:41,889 Current Learning Rate: 0.0005548672 +2025-03-26 06:27:41,889 Train Loss: 0.0000622, Val Loss: 0.0000668 +2025-03-26 06:27:41,889 Epoch 1108/2000 +2025-03-26 06:32:22,327 Current Learning Rate: 0.0005626666 +2025-03-26 06:32:22,328 Train Loss: 0.0000645, Val Loss: 0.0000695 +2025-03-26 06:32:22,328 Epoch 1109/2000 +2025-03-26 06:37:02,898 Current Learning Rate: 0.0005704506 +2025-03-26 06:37:02,899 Train Loss: 0.0000528, Val Loss: 0.0000759 +2025-03-26 06:37:02,899 Epoch 1110/2000 +2025-03-26 06:41:43,686 Current Learning Rate: 0.0005782172 +2025-03-26 06:41:43,687 Train Loss: 0.0000698, Val Loss: 0.0000773 +2025-03-26 06:41:43,687 Epoch 1111/2000 +2025-03-26 06:46:25,194 Current Learning Rate: 0.0005859646 +2025-03-26 06:46:25,194 Train Loss: 0.0000737, Val Loss: 0.0000731 +2025-03-26 06:46:25,194 Epoch 1112/2000 +2025-03-26 06:51:05,912 Current Learning Rate: 0.0005936907 +2025-03-26 06:51:05,913 Train Loss: 0.0000730, Val Loss: 0.0000679 +2025-03-26 06:51:05,913 Epoch 1113/2000 +2025-03-26 06:55:46,262 Current Learning Rate: 0.0006013936 +2025-03-26 06:55:46,262 Train Loss: 0.0000599, Val Loss: 0.0000663 +2025-03-26 06:55:46,262 Epoch 1114/2000 +2025-03-26 07:00:26,864 Current Learning Rate: 0.0006090716 +2025-03-26 07:00:26,864 Train Loss: 0.0000663, Val Loss: 0.0000679 +2025-03-26 07:00:26,865 Epoch 1115/2000 +2025-03-26 07:05:07,196 Current Learning Rate: 0.0006167227 +2025-03-26 07:05:07,196 Train Loss: 0.0000683, Val Loss: 0.0000690 +2025-03-26 07:05:07,196 Epoch 1116/2000 +2025-03-26 07:09:47,859 Current Learning Rate: 0.0006243449 +2025-03-26 07:09:47,861 Train Loss: 0.0000665, Val Loss: 0.0000687 +2025-03-26 07:09:47,862 Epoch 1117/2000 +2025-03-26 07:14:27,837 Current Learning Rate: 0.0006319365 +2025-03-26 07:14:27,838 Train Loss: 0.0000763, Val Loss: 0.0000806 +2025-03-26 07:14:27,838 Epoch 1118/2000 +2025-03-26 07:19:08,305 Current Learning Rate: 0.0006394956 +2025-03-26 07:19:08,306 Train Loss: 0.0000686, Val Loss: 0.0000696 +2025-03-26 07:19:08,306 Epoch 1119/2000 +2025-03-26 07:23:48,009 Current Learning Rate: 0.0006470202 +2025-03-26 07:23:48,009 Train Loss: 0.0000701, Val Loss: 0.0000707 +2025-03-26 07:23:48,009 Epoch 1120/2000 +2025-03-26 07:28:28,635 Current Learning Rate: 0.0006545085 +2025-03-26 07:28:28,636 Train Loss: 0.0000623, Val Loss: 0.0000721 +2025-03-26 07:28:28,636 Epoch 1121/2000 +2025-03-26 07:33:09,099 Current Learning Rate: 0.0006619587 +2025-03-26 07:33:09,100 Train Loss: 0.0000708, Val Loss: 0.0000692 +2025-03-26 07:33:09,101 Epoch 1122/2000 +2025-03-26 07:37:49,656 Current Learning Rate: 0.0006693690 +2025-03-26 07:37:49,656 Train Loss: 0.0000661, Val Loss: 0.0000825 +2025-03-26 07:37:49,657 Epoch 1123/2000 +2025-03-26 07:42:29,783 Current Learning Rate: 0.0006767374 +2025-03-26 07:42:29,784 Train Loss: 0.0000728, Val Loss: 0.0001023 +2025-03-26 07:42:29,784 Epoch 1124/2000 +2025-03-26 07:47:10,433 Current Learning Rate: 0.0006840623 +2025-03-26 07:47:10,433 Train Loss: 0.0000676, Val Loss: 0.0000717 +2025-03-26 07:47:10,434 Epoch 1125/2000 +2025-03-26 07:51:50,941 Current Learning Rate: 0.0006913417 +2025-03-26 07:51:50,942 Train Loss: 0.0000800, Val Loss: 0.0000716 +2025-03-26 07:51:50,942 Epoch 1126/2000 +2025-03-26 07:56:31,336 Current Learning Rate: 0.0006985739 +2025-03-26 07:56:31,336 Train Loss: 0.0000701, Val Loss: 0.0000756 +2025-03-26 07:56:31,336 Epoch 1127/2000 +2025-03-26 08:01:11,694 Current Learning Rate: 0.0007057572 +2025-03-26 08:01:11,694 Train Loss: 0.0000689, Val Loss: 0.0000678 +2025-03-26 08:01:11,695 Epoch 1128/2000 +2025-03-26 08:05:52,302 Current Learning Rate: 0.0007128896 +2025-03-26 08:05:52,302 Train Loss: 0.0000802, Val Loss: 0.0000727 +2025-03-26 08:05:52,302 Epoch 1129/2000 +2025-03-26 08:10:32,793 Current Learning Rate: 0.0007199696 +2025-03-26 08:10:32,794 Train Loss: 0.0000685, Val Loss: 0.0000724 +2025-03-26 08:10:32,794 Epoch 1130/2000 +2025-03-26 08:15:13,313 Current Learning Rate: 0.0007269952 +2025-03-26 08:15:13,313 Train Loss: 0.0000592, Val Loss: 0.0000859 +2025-03-26 08:15:13,314 Epoch 1131/2000 +2025-03-26 08:19:53,213 Current Learning Rate: 0.0007339649 +2025-03-26 08:19:53,214 Train Loss: 0.0000651, Val Loss: 0.0000693 +2025-03-26 08:19:53,214 Epoch 1132/2000 +2025-03-26 08:24:34,110 Current Learning Rate: 0.0007408768 +2025-03-26 08:24:34,111 Train Loss: 0.0000868, Val Loss: 0.0000992 +2025-03-26 08:24:34,111 Epoch 1133/2000 +2025-03-26 08:29:14,638 Current Learning Rate: 0.0007477293 +2025-03-26 08:29:14,638 Train Loss: 0.0000894, Val Loss: 0.0000712 +2025-03-26 08:29:14,639 Epoch 1134/2000 +2025-03-26 08:33:54,773 Current Learning Rate: 0.0007545207 +2025-03-26 08:33:54,773 Train Loss: 0.0000814, Val Loss: 0.0000857 +2025-03-26 08:33:54,773 Epoch 1135/2000 +2025-03-26 08:38:34,926 Current Learning Rate: 0.0007612493 +2025-03-26 08:38:34,927 Train Loss: 0.0000738, Val Loss: 0.0000764 +2025-03-26 08:38:34,927 Epoch 1136/2000 +2025-03-26 08:43:15,312 Current Learning Rate: 0.0007679134 +2025-03-26 08:43:15,312 Train Loss: 0.0000673, Val Loss: 0.0000747 +2025-03-26 08:43:15,312 Epoch 1137/2000 +2025-03-26 08:47:55,504 Current Learning Rate: 0.0007745114 +2025-03-26 08:47:55,504 Train Loss: 0.0000819, Val Loss: 0.0000714 +2025-03-26 08:47:55,504 Epoch 1138/2000 +2025-03-26 08:52:36,398 Current Learning Rate: 0.0007810417 +2025-03-26 08:52:36,399 Train Loss: 0.0000746, Val Loss: 0.0000787 +2025-03-26 08:52:36,399 Epoch 1139/2000 +2025-03-26 08:57:16,907 Current Learning Rate: 0.0007875026 +2025-03-26 08:57:16,908 Train Loss: 0.0000786, Val Loss: 0.0000717 +2025-03-26 08:57:16,908 Epoch 1140/2000 +2025-03-26 09:01:57,508 Current Learning Rate: 0.0007938926 +2025-03-26 09:01:57,509 Train Loss: 0.0000678, Val Loss: 0.0000753 +2025-03-26 09:01:57,509 Epoch 1141/2000 +2025-03-26 09:06:38,286 Current Learning Rate: 0.0008002101 +2025-03-26 09:06:38,287 Train Loss: 0.0000722, Val Loss: 0.0000687 +2025-03-26 09:06:38,287 Epoch 1142/2000 +2025-03-26 09:11:18,528 Current Learning Rate: 0.0008064535 +2025-03-26 09:11:18,529 Train Loss: 0.0000619, Val Loss: 0.0000674 +2025-03-26 09:11:18,529 Epoch 1143/2000 +2025-03-26 09:15:58,155 Current Learning Rate: 0.0008126213 +2025-03-26 09:15:58,155 Train Loss: 0.0001007, Val Loss: 0.0000832 +2025-03-26 09:15:58,155 Epoch 1144/2000 +2025-03-26 09:20:38,355 Current Learning Rate: 0.0008187120 +2025-03-26 09:20:38,356 Train Loss: 0.0000597, Val Loss: 0.0000695 +2025-03-26 09:20:38,356 Epoch 1145/2000 +2025-03-26 09:25:18,897 Current Learning Rate: 0.0008247240 +2025-03-26 09:25:18,897 Train Loss: 0.0000849, Val Loss: 0.0000767 +2025-03-26 09:25:18,898 Epoch 1146/2000 +2025-03-26 09:29:58,927 Current Learning Rate: 0.0008306559 +2025-03-26 09:29:58,927 Train Loss: 0.0000620, Val Loss: 0.0000770 +2025-03-26 09:29:58,927 Epoch 1147/2000 +2025-03-26 09:34:39,217 Current Learning Rate: 0.0008365063 +2025-03-26 09:34:39,217 Train Loss: 0.0000763, Val Loss: 0.0000786 +2025-03-26 09:34:39,218 Epoch 1148/2000 +2025-03-26 09:39:19,166 Current Learning Rate: 0.0008422736 +2025-03-26 09:39:19,166 Train Loss: 0.0000720, Val Loss: 0.0000821 +2025-03-26 09:39:19,167 Epoch 1149/2000 +2025-03-26 09:43:59,878 Current Learning Rate: 0.0008479564 +2025-03-26 09:43:59,878 Train Loss: 0.0000773, Val Loss: 0.0000783 +2025-03-26 09:43:59,878 Epoch 1150/2000 +2025-03-26 09:48:40,430 Current Learning Rate: 0.0008535534 +2025-03-26 09:48:40,430 Train Loss: 0.0000911, Val Loss: 0.0000850 +2025-03-26 09:48:40,431 Epoch 1151/2000 +2025-03-26 09:53:21,866 Current Learning Rate: 0.0008590631 +2025-03-26 09:53:21,866 Train Loss: 0.0000814, Val Loss: 0.0000760 +2025-03-26 09:53:21,866 Epoch 1152/2000 +2025-03-26 09:58:02,264 Current Learning Rate: 0.0008644843 +2025-03-26 09:58:02,265 Train Loss: 0.0000726, Val Loss: 0.0000866 +2025-03-26 09:58:02,265 Epoch 1153/2000 +2025-03-26 10:02:42,915 Current Learning Rate: 0.0008698155 +2025-03-26 10:02:42,916 Train Loss: 0.0000737, Val Loss: 0.0000752 +2025-03-26 10:02:42,916 Epoch 1154/2000 +2025-03-26 10:07:22,464 Current Learning Rate: 0.0008750555 +2025-03-26 10:07:22,464 Train Loss: 0.0000927, Val Loss: 0.0000820 +2025-03-26 10:07:22,464 Epoch 1155/2000 +2025-03-26 10:12:02,756 Current Learning Rate: 0.0008802030 +2025-03-26 10:12:02,756 Train Loss: 0.0000735, Val Loss: 0.0000850 +2025-03-26 10:12:02,757 Epoch 1156/2000 +2025-03-26 10:16:43,211 Current Learning Rate: 0.0008852566 +2025-03-26 10:16:43,212 Train Loss: 0.0000662, Val Loss: 0.0000863 +2025-03-26 10:16:43,212 Epoch 1157/2000 +2025-03-26 10:21:23,523 Current Learning Rate: 0.0008902152 +2025-03-26 10:21:23,523 Train Loss: 0.0001004, Val Loss: 0.0000889 +2025-03-26 10:21:23,523 Epoch 1158/2000 +2025-03-26 10:26:04,169 Current Learning Rate: 0.0008950775 +2025-03-26 10:26:04,170 Train Loss: 0.0000892, Val Loss: 0.0000736 +2025-03-26 10:26:04,170 Epoch 1159/2000 +2025-03-26 10:30:44,828 Current Learning Rate: 0.0008998423 +2025-03-26 10:30:44,829 Train Loss: 0.0000760, Val Loss: 0.0000737 +2025-03-26 10:30:44,829 Epoch 1160/2000 +2025-03-26 10:35:25,205 Current Learning Rate: 0.0009045085 +2025-03-26 10:35:25,205 Train Loss: 0.0000903, Val Loss: 0.0000862 +2025-03-26 10:35:25,206 Epoch 1161/2000 +2025-03-26 10:40:05,850 Current Learning Rate: 0.0009090749 +2025-03-26 10:40:05,851 Train Loss: 0.0000913, Val Loss: 0.0000792 +2025-03-26 10:40:05,851 Epoch 1162/2000 +2025-03-26 10:44:46,183 Current Learning Rate: 0.0009135403 +2025-03-26 10:44:46,184 Train Loss: 0.0000778, Val Loss: 0.0000781 +2025-03-26 10:44:46,184 Epoch 1163/2000 +2025-03-26 10:49:27,287 Current Learning Rate: 0.0009179037 +2025-03-26 10:49:27,288 Train Loss: 0.0000749, Val Loss: 0.0000813 +2025-03-26 10:49:27,288 Epoch 1164/2000 +2025-03-26 10:54:07,775 Current Learning Rate: 0.0009221640 +2025-03-26 10:54:07,775 Train Loss: 0.0001060, Val Loss: 0.0000817 +2025-03-26 10:54:07,776 Epoch 1165/2000 +2025-03-26 10:58:48,344 Current Learning Rate: 0.0009263201 +2025-03-26 10:58:48,344 Train Loss: 0.0000785, Val Loss: 0.0001063 +2025-03-26 10:58:48,345 Epoch 1166/2000 +2025-03-26 11:03:28,671 Current Learning Rate: 0.0009303710 +2025-03-26 11:03:28,671 Train Loss: 0.0000938, Val Loss: 0.0000750 +2025-03-26 11:03:28,672 Epoch 1167/2000 +2025-03-26 11:08:08,543 Current Learning Rate: 0.0009343158 +2025-03-26 11:08:08,543 Train Loss: 0.0000796, Val Loss: 0.0000805 +2025-03-26 11:08:08,543 Epoch 1168/2000 +2025-03-26 11:12:48,507 Current Learning Rate: 0.0009381533 +2025-03-26 11:12:48,507 Train Loss: 0.0000736, Val Loss: 0.0000824 +2025-03-26 11:12:48,508 Epoch 1169/2000 +2025-03-26 11:17:29,214 Current Learning Rate: 0.0009418828 +2025-03-26 11:17:29,214 Train Loss: 0.0000788, Val Loss: 0.0000772 +2025-03-26 11:17:29,215 Epoch 1170/2000 +2025-03-26 11:22:10,341 Current Learning Rate: 0.0009455033 +2025-03-26 11:22:10,341 Train Loss: 0.0001282, Val Loss: 0.0000822 +2025-03-26 11:22:10,341 Epoch 1171/2000 +2025-03-26 11:26:51,021 Current Learning Rate: 0.0009490138 +2025-03-26 11:26:51,022 Train Loss: 0.0000860, Val Loss: 0.0000851 +2025-03-26 11:26:51,022 Epoch 1172/2000 +2025-03-26 11:31:31,961 Current Learning Rate: 0.0009524135 +2025-03-26 11:31:31,962 Train Loss: 0.0000819, Val Loss: 0.0000810 +2025-03-26 11:31:31,962 Epoch 1173/2000 +2025-03-26 11:36:12,692 Current Learning Rate: 0.0009557016 +2025-03-26 11:36:12,693 Train Loss: 0.0000797, Val Loss: 0.0000886 +2025-03-26 11:36:12,693 Epoch 1174/2000 +2025-03-26 11:40:53,775 Current Learning Rate: 0.0009588773 +2025-03-26 11:40:53,775 Train Loss: 0.0000783, Val Loss: 0.0000797 +2025-03-26 11:40:53,775 Epoch 1175/2000 +2025-03-26 11:45:34,213 Current Learning Rate: 0.0009619398 +2025-03-26 11:45:34,214 Train Loss: 0.0000632, Val Loss: 0.0000805 +2025-03-26 11:45:34,214 Epoch 1176/2000 +2025-03-26 11:50:15,042 Current Learning Rate: 0.0009648882 +2025-03-26 11:50:15,042 Train Loss: 0.0000887, Val Loss: 0.0000758 +2025-03-26 11:50:15,043 Epoch 1177/2000 +2025-03-26 11:54:55,870 Current Learning Rate: 0.0009677220 +2025-03-26 11:54:55,870 Train Loss: 0.0000781, Val Loss: 0.0000775 +2025-03-26 11:54:55,871 Epoch 1178/2000 +2025-03-26 11:59:36,277 Current Learning Rate: 0.0009704404 +2025-03-26 11:59:36,278 Train Loss: 0.0000794, Val Loss: 0.0000748 +2025-03-26 11:59:36,278 Epoch 1179/2000 +2025-03-26 12:04:16,731 Current Learning Rate: 0.0009730427 +2025-03-26 12:04:16,731 Train Loss: 0.0000756, Val Loss: 0.0000718 +2025-03-26 12:04:16,731 Epoch 1180/2000 +2025-03-26 12:08:57,348 Current Learning Rate: 0.0009755283 +2025-03-26 12:08:57,348 Train Loss: 0.0000845, Val Loss: 0.0000786 +2025-03-26 12:08:57,348 Epoch 1181/2000 +2025-03-26 12:13:37,442 Current Learning Rate: 0.0009778965 +2025-03-26 12:13:37,442 Train Loss: 0.0000796, Val Loss: 0.0000953 +2025-03-26 12:13:37,443 Epoch 1182/2000 +2025-03-26 12:18:18,318 Current Learning Rate: 0.0009801468 +2025-03-26 12:18:18,319 Train Loss: 0.0000979, Val Loss: 0.0000894 +2025-03-26 12:18:18,319 Epoch 1183/2000 +2025-03-26 12:22:59,591 Current Learning Rate: 0.0009822787 +2025-03-26 12:22:59,591 Train Loss: 0.0000996, Val Loss: 0.0000925 +2025-03-26 12:22:59,592 Epoch 1184/2000 +2025-03-26 12:27:40,073 Current Learning Rate: 0.0009842916 +2025-03-26 12:27:40,074 Train Loss: 0.0000640, Val Loss: 0.0000728 +2025-03-26 12:27:40,074 Epoch 1185/2000 +2025-03-26 12:32:20,997 Current Learning Rate: 0.0009861850 +2025-03-26 12:32:20,997 Train Loss: 0.0001055, Val Loss: 0.0000905 +2025-03-26 12:32:20,998 Epoch 1186/2000 +2025-03-26 12:37:01,759 Current Learning Rate: 0.0009879584 +2025-03-26 12:37:01,759 Train Loss: 0.0000856, Val Loss: 0.0000796 +2025-03-26 12:37:01,759 Epoch 1187/2000 +2025-03-26 12:41:42,416 Current Learning Rate: 0.0009896114 +2025-03-26 12:41:42,417 Train Loss: 0.0000762, Val Loss: 0.0000794 +2025-03-26 12:41:42,417 Epoch 1188/2000 +2025-03-26 12:46:22,385 Current Learning Rate: 0.0009911436 +2025-03-26 12:46:22,385 Train Loss: 0.0000886, Val Loss: 0.0000893 +2025-03-26 12:46:22,386 Epoch 1189/2000 +2025-03-26 12:51:02,954 Current Learning Rate: 0.0009925547 +2025-03-26 12:51:02,955 Train Loss: 0.0000780, Val Loss: 0.0000852 +2025-03-26 12:51:02,955 Epoch 1190/2000 +2025-03-26 12:55:43,260 Current Learning Rate: 0.0009938442 +2025-03-26 12:55:43,260 Train Loss: 0.0000885, Val Loss: 0.0000931 +2025-03-26 12:55:43,261 Epoch 1191/2000 +2025-03-26 13:00:23,839 Current Learning Rate: 0.0009950118 +2025-03-26 13:00:23,840 Train Loss: 0.0000827, Val Loss: 0.0000795 +2025-03-26 13:00:23,840 Epoch 1192/2000 +2025-03-26 13:05:04,276 Current Learning Rate: 0.0009960574 +2025-03-26 13:05:04,276 Train Loss: 0.0000799, Val Loss: 0.0000757 +2025-03-26 13:05:04,276 Epoch 1193/2000 +2025-03-26 13:09:44,447 Current Learning Rate: 0.0009969805 +2025-03-26 13:09:44,447 Train Loss: 0.0000822, Val Loss: 0.0000853 +2025-03-26 13:09:44,447 Epoch 1194/2000 +2025-03-26 13:14:25,771 Current Learning Rate: 0.0009977810 +2025-03-26 13:14:25,772 Train Loss: 0.0000949, Val Loss: 0.0001013 +2025-03-26 13:14:25,772 Epoch 1195/2000 +2025-03-26 13:19:06,532 Current Learning Rate: 0.0009984587 +2025-03-26 13:19:06,532 Train Loss: 0.0001219, Val Loss: 0.0001030 +2025-03-26 13:19:06,533 Epoch 1196/2000 +2025-03-26 13:23:47,334 Current Learning Rate: 0.0009990134 +2025-03-26 13:23:47,334 Train Loss: 0.0001119, Val Loss: 0.0001022 +2025-03-26 13:23:47,335 Epoch 1197/2000 +2025-03-26 13:28:28,295 Current Learning Rate: 0.0009994449 +2025-03-26 13:28:28,296 Train Loss: 0.0000814, Val Loss: 0.0000739 +2025-03-26 13:28:28,296 Epoch 1198/2000 +2025-03-26 13:33:08,400 Current Learning Rate: 0.0009997533 +2025-03-26 13:33:08,400 Train Loss: 0.0000845, Val Loss: 0.0000926 +2025-03-26 13:33:08,401 Epoch 1199/2000 +2025-03-26 13:37:48,611 Current Learning Rate: 0.0009999383 +2025-03-26 13:37:48,611 Train Loss: 0.0000899, Val Loss: 0.0000796 +2025-03-26 13:37:48,611 Epoch 1200/2000 +2025-03-26 13:42:29,573 Current Learning Rate: 0.0010000000 +2025-03-26 13:42:29,574 Train Loss: 0.0001473, Val Loss: 0.0000922 +2025-03-26 13:42:29,574 Epoch 1201/2000 +2025-03-26 13:47:10,545 Current Learning Rate: 0.0009999383 +2025-03-26 13:47:10,546 Train Loss: 0.0000967, Val Loss: 0.0001131 +2025-03-26 13:47:10,546 Epoch 1202/2000 +2025-03-26 13:51:51,478 Current Learning Rate: 0.0009997533 +2025-03-26 13:51:51,478 Train Loss: 0.0000693, Val Loss: 0.0000805 +2025-03-26 13:51:51,478 Epoch 1203/2000 +2025-03-26 13:56:32,025 Current Learning Rate: 0.0009994449 +2025-03-26 13:56:32,026 Train Loss: 0.0000767, Val Loss: 0.0000752 +2025-03-26 13:56:32,026 Epoch 1204/2000 +2025-03-26 14:01:12,323 Current Learning Rate: 0.0009990134 +2025-03-26 14:01:12,323 Train Loss: 0.0001197, Val Loss: 0.0000839 +2025-03-26 14:01:12,323 Epoch 1205/2000 +2025-03-26 14:05:52,806 Current Learning Rate: 0.0009984587 +2025-03-26 14:05:52,806 Train Loss: 0.0000905, Val Loss: 0.0000781 +2025-03-26 14:05:52,807 Epoch 1206/2000 +2025-03-26 14:10:33,712 Current Learning Rate: 0.0009977810 +2025-03-26 14:10:33,713 Train Loss: 0.0000868, Val Loss: 0.0000786 +2025-03-26 14:10:33,713 Epoch 1207/2000 +2025-03-26 14:15:14,526 Current Learning Rate: 0.0009969805 +2025-03-26 14:15:14,527 Train Loss: 0.0000737, Val Loss: 0.0000934 +2025-03-26 14:15:14,527 Epoch 1208/2000 +2025-03-26 14:19:55,347 Current Learning Rate: 0.0009960574 +2025-03-26 14:19:55,348 Train Loss: 0.0000812, Val Loss: 0.0000789 +2025-03-26 14:19:55,348 Epoch 1209/2000 +2025-03-26 14:24:36,132 Current Learning Rate: 0.0009950118 +2025-03-26 14:24:36,132 Train Loss: 0.0000722, Val Loss: 0.0000733 +2025-03-26 14:24:36,133 Epoch 1210/2000 +2025-03-26 14:29:16,410 Current Learning Rate: 0.0009938442 +2025-03-26 14:29:16,410 Train Loss: 0.0000975, Val Loss: 0.0000751 +2025-03-26 14:29:16,414 Epoch 1211/2000 +2025-03-26 14:33:56,208 Current Learning Rate: 0.0009925547 +2025-03-26 14:33:56,209 Train Loss: 0.0000894, Val Loss: 0.0000824 +2025-03-26 14:33:56,209 Epoch 1212/2000 +2025-03-26 14:38:37,112 Current Learning Rate: 0.0009911436 +2025-03-26 14:38:37,117 Train Loss: 0.0000891, Val Loss: 0.0000853 +2025-03-26 14:38:37,118 Epoch 1213/2000 +2025-03-26 14:43:17,255 Current Learning Rate: 0.0009896114 +2025-03-26 14:43:17,255 Train Loss: 0.0000758, Val Loss: 0.0000985 +2025-03-26 14:43:17,255 Epoch 1214/2000 +2025-03-26 14:47:57,513 Current Learning Rate: 0.0009879584 +2025-03-26 14:47:57,514 Train Loss: 0.0000909, Val Loss: 0.0000814 +2025-03-26 14:47:57,514 Epoch 1215/2000 +2025-03-26 14:52:38,573 Current Learning Rate: 0.0009861850 +2025-03-26 14:52:38,574 Train Loss: 0.0000893, Val Loss: 0.0000822 +2025-03-26 14:52:38,574 Epoch 1216/2000 +2025-03-26 14:57:18,927 Current Learning Rate: 0.0009842916 +2025-03-26 14:57:18,927 Train Loss: 0.0000851, Val Loss: 0.0000874 +2025-03-26 14:57:18,928 Epoch 1217/2000 +2025-03-26 15:01:59,438 Current Learning Rate: 0.0009822787 +2025-03-26 15:01:59,438 Train Loss: 0.0000780, Val Loss: 0.0000793 +2025-03-26 15:01:59,439 Epoch 1218/2000 +2025-03-26 15:06:40,508 Current Learning Rate: 0.0009801468 +2025-03-26 15:06:40,508 Train Loss: 0.0001157, Val Loss: 0.0000898 +2025-03-26 15:06:40,509 Epoch 1219/2000 +2025-03-26 15:11:21,634 Current Learning Rate: 0.0009778965 +2025-03-26 15:11:21,635 Train Loss: 0.0000673, Val Loss: 0.0000732 +2025-03-26 15:11:21,635 Epoch 1220/2000 +2025-03-26 15:16:02,105 Current Learning Rate: 0.0009755283 +2025-03-26 15:16:02,106 Train Loss: 0.0000780, Val Loss: 0.0000879 +2025-03-26 15:16:02,107 Epoch 1221/2000 +2025-03-26 15:20:42,940 Current Learning Rate: 0.0009730427 +2025-03-26 15:20:42,940 Train Loss: 0.0000813, Val Loss: 0.0001035 +2025-03-26 15:20:42,941 Epoch 1222/2000 +2025-03-26 15:25:23,667 Current Learning Rate: 0.0009704404 +2025-03-26 15:25:23,667 Train Loss: 0.0000866, Val Loss: 0.0000777 +2025-03-26 15:25:23,668 Epoch 1223/2000 +2025-03-26 15:30:03,793 Current Learning Rate: 0.0009677220 +2025-03-26 15:30:03,794 Train Loss: 0.0000743, Val Loss: 0.0000909 +2025-03-26 15:30:03,794 Epoch 1224/2000 +2025-03-26 15:34:44,346 Current Learning Rate: 0.0009648882 +2025-03-26 15:34:44,346 Train Loss: 0.0000902, Val Loss: 0.0000893 +2025-03-26 15:34:44,346 Epoch 1225/2000 +2025-03-26 15:39:24,961 Current Learning Rate: 0.0009619398 +2025-03-26 15:39:24,962 Train Loss: 0.0000803, Val Loss: 0.0000785 +2025-03-26 15:39:24,962 Epoch 1226/2000 +2025-03-26 15:44:05,521 Current Learning Rate: 0.0009588773 +2025-03-26 15:44:05,521 Train Loss: 0.0000830, Val Loss: 0.0000811 +2025-03-26 15:44:05,521 Epoch 1227/2000 +2025-03-26 15:48:45,900 Current Learning Rate: 0.0009557016 +2025-03-26 15:48:45,901 Train Loss: 0.0000866, Val Loss: 0.0001006 +2025-03-26 15:48:45,901 Epoch 1228/2000 +2025-03-26 15:53:26,307 Current Learning Rate: 0.0009524135 +2025-03-26 15:53:26,308 Train Loss: 0.0000993, Val Loss: 0.0000787 +2025-03-26 15:53:26,308 Epoch 1229/2000 +2025-03-26 15:58:07,126 Current Learning Rate: 0.0009490138 +2025-03-26 15:58:07,127 Train Loss: 0.0000784, Val Loss: 0.0000782 +2025-03-26 15:58:07,127 Epoch 1230/2000 +2025-03-26 16:02:47,371 Current Learning Rate: 0.0009455033 +2025-03-26 16:02:47,372 Train Loss: 0.0000936, Val Loss: 0.0001226 +2025-03-26 16:02:47,372 Epoch 1231/2000 +2025-03-26 16:07:27,877 Current Learning Rate: 0.0009418828 +2025-03-26 16:07:27,877 Train Loss: 0.0000824, Val Loss: 0.0000778 +2025-03-26 16:07:27,878 Epoch 1232/2000 +2025-03-26 16:12:08,149 Current Learning Rate: 0.0009381533 +2025-03-26 16:12:08,149 Train Loss: 0.0000700, Val Loss: 0.0000706 +2025-03-26 16:12:08,150 Epoch 1233/2000 +2025-03-26 16:16:48,561 Current Learning Rate: 0.0009343158 +2025-03-26 16:16:48,562 Train Loss: 0.0000755, Val Loss: 0.0001012 +2025-03-26 16:16:48,562 Epoch 1234/2000 +2025-03-26 16:21:29,184 Current Learning Rate: 0.0009303710 +2025-03-26 16:21:29,184 Train Loss: 0.0000726, Val Loss: 0.0000902 +2025-03-26 16:21:29,184 Epoch 1235/2000 +2025-03-26 16:26:10,076 Current Learning Rate: 0.0009263201 +2025-03-26 16:26:10,076 Train Loss: 0.0000734, Val Loss: 0.0000787 +2025-03-26 16:26:10,077 Epoch 1236/2000 +2025-03-26 16:30:50,410 Current Learning Rate: 0.0009221640 +2025-03-26 16:30:50,410 Train Loss: 0.0000818, Val Loss: 0.0000742 +2025-03-26 16:30:50,410 Epoch 1237/2000 +2025-03-26 16:35:30,451 Current Learning Rate: 0.0009179037 +2025-03-26 16:35:30,451 Train Loss: 0.0000713, Val Loss: 0.0000754 +2025-03-26 16:35:30,452 Epoch 1238/2000 +2025-03-26 16:40:10,900 Current Learning Rate: 0.0009135403 +2025-03-26 16:40:10,901 Train Loss: 0.0000679, Val Loss: 0.0000722 +2025-03-26 16:40:10,901 Epoch 1239/2000 +2025-03-26 16:44:50,915 Current Learning Rate: 0.0009090749 +2025-03-26 16:44:50,916 Train Loss: 0.0000967, Val Loss: 0.0000860 +2025-03-26 16:44:50,916 Epoch 1240/2000 +2025-03-26 16:49:31,332 Current Learning Rate: 0.0009045085 +2025-03-26 16:49:31,333 Train Loss: 0.0000664, Val Loss: 0.0000758 +2025-03-26 16:49:31,333 Epoch 1241/2000 +2025-03-26 16:54:12,161 Current Learning Rate: 0.0008998423 +2025-03-26 16:54:12,162 Train Loss: 0.0000776, Val Loss: 0.0001027 +2025-03-26 16:54:12,162 Epoch 1242/2000 +2025-03-26 16:58:52,454 Current Learning Rate: 0.0008950775 +2025-03-26 16:58:52,455 Train Loss: 0.0000683, Val Loss: 0.0000713 +2025-03-26 16:58:52,461 Epoch 1243/2000 +2025-03-26 17:03:32,779 Current Learning Rate: 0.0008902152 +2025-03-26 17:03:32,780 Train Loss: 0.0000728, Val Loss: 0.0000808 +2025-03-26 17:03:32,780 Epoch 1244/2000 +2025-03-26 17:08:14,097 Current Learning Rate: 0.0008852566 +2025-03-26 17:08:14,098 Train Loss: 0.0000800, Val Loss: 0.0000805 +2025-03-26 17:08:14,098 Epoch 1245/2000 +2025-03-26 17:12:54,413 Current Learning Rate: 0.0008802030 +2025-03-26 17:12:54,414 Train Loss: 0.0000824, Val Loss: 0.0000760 +2025-03-26 17:12:54,414 Epoch 1246/2000 +2025-03-26 17:17:34,311 Current Learning Rate: 0.0008750555 +2025-03-26 17:17:34,311 Train Loss: 0.0000578, Val Loss: 0.0000669 +2025-03-26 17:17:34,311 Epoch 1247/2000 +2025-03-26 17:22:15,076 Current Learning Rate: 0.0008698155 +2025-03-26 17:22:15,077 Train Loss: 0.0000730, Val Loss: 0.0000721 +2025-03-26 17:22:15,077 Epoch 1248/2000 +2025-03-26 17:26:56,066 Current Learning Rate: 0.0008644843 +2025-03-26 17:26:56,066 Train Loss: 0.0000750, Val Loss: 0.0000702 +2025-03-26 17:26:56,067 Epoch 1249/2000 +2025-03-26 17:31:36,082 Current Learning Rate: 0.0008590631 +2025-03-26 17:31:36,083 Train Loss: 0.0000729, Val Loss: 0.0000785 +2025-03-26 17:31:36,083 Epoch 1250/2000 +2025-03-26 17:36:16,429 Current Learning Rate: 0.0008535534 +2025-03-26 17:36:16,430 Train Loss: 0.0000772, Val Loss: 0.0000741 +2025-03-26 17:36:16,430 Epoch 1251/2000 +2025-03-26 17:40:56,863 Current Learning Rate: 0.0008479564 +2025-03-26 17:40:56,863 Train Loss: 0.0000704, Val Loss: 0.0000707 +2025-03-26 17:40:56,863 Epoch 1252/2000 +2025-03-26 17:45:37,142 Current Learning Rate: 0.0008422736 +2025-03-26 17:45:37,143 Train Loss: 0.0000769, Val Loss: 0.0000741 +2025-03-26 17:45:37,143 Epoch 1253/2000 +2025-03-26 17:50:17,125 Current Learning Rate: 0.0008365063 +2025-03-26 17:50:17,126 Train Loss: 0.0000879, Val Loss: 0.0000721 +2025-03-26 17:50:17,126 Epoch 1254/2000 +2025-03-26 17:54:57,637 Current Learning Rate: 0.0008306559 +2025-03-26 17:54:57,638 Train Loss: 0.0000802, Val Loss: 0.0000735 +2025-03-26 17:54:57,639 Epoch 1255/2000 +2025-03-26 17:59:38,133 Current Learning Rate: 0.0008247240 +2025-03-26 17:59:38,134 Train Loss: 0.0000654, Val Loss: 0.0000740 +2025-03-26 17:59:38,134 Epoch 1256/2000 +2025-03-26 18:04:18,689 Current Learning Rate: 0.0008187120 +2025-03-26 18:04:18,689 Train Loss: 0.0000721, Val Loss: 0.0000699 +2025-03-26 18:04:18,690 Epoch 1257/2000 +2025-03-26 18:08:58,875 Current Learning Rate: 0.0008126213 +2025-03-26 18:08:58,878 Train Loss: 0.0000776, Val Loss: 0.0000796 +2025-03-26 18:08:58,878 Epoch 1258/2000 +2025-03-26 18:13:39,341 Current Learning Rate: 0.0008064535 +2025-03-26 18:13:39,342 Train Loss: 0.0000613, Val Loss: 0.0000753 +2025-03-26 18:13:39,342 Epoch 1259/2000 +2025-03-26 18:18:19,834 Current Learning Rate: 0.0008002101 +2025-03-26 18:18:19,835 Train Loss: 0.0000795, Val Loss: 0.0001387 +2025-03-26 18:18:19,835 Epoch 1260/2000 +2025-03-26 18:23:00,165 Current Learning Rate: 0.0007938926 +2025-03-26 18:23:00,165 Train Loss: 0.0000836, Val Loss: 0.0000782 +2025-03-26 18:23:00,165 Epoch 1261/2000 +2025-03-26 18:27:40,417 Current Learning Rate: 0.0007875026 +2025-03-26 18:27:40,417 Train Loss: 0.0000604, Val Loss: 0.0000699 +2025-03-26 18:27:40,417 Epoch 1262/2000 +2025-03-26 18:32:21,100 Current Learning Rate: 0.0007810417 +2025-03-26 18:32:21,101 Train Loss: 0.0000611, Val Loss: 0.0000672 +2025-03-26 18:32:21,101 Epoch 1263/2000 +2025-03-26 18:37:01,224 Current Learning Rate: 0.0007745114 +2025-03-26 18:37:01,224 Train Loss: 0.0000615, Val Loss: 0.0000684 +2025-03-26 18:37:01,225 Epoch 1264/2000 +2025-03-26 18:41:42,113 Current Learning Rate: 0.0007679134 +2025-03-26 18:41:42,115 Train Loss: 0.0000734, Val Loss: 0.0000692 +2025-03-26 18:41:42,115 Epoch 1265/2000 +2025-03-26 18:46:23,042 Current Learning Rate: 0.0007612493 +2025-03-26 18:46:23,043 Train Loss: 0.0000629, Val Loss: 0.0000700 +2025-03-26 18:46:23,043 Epoch 1266/2000 +2025-03-26 18:51:03,631 Current Learning Rate: 0.0007545207 +2025-03-26 18:51:03,631 Train Loss: 0.0000645, Val Loss: 0.0000648 +2025-03-26 18:51:03,632 Epoch 1267/2000 +2025-03-26 18:55:44,173 Current Learning Rate: 0.0007477293 +2025-03-26 18:55:44,174 Train Loss: 0.0000603, Val Loss: 0.0000742 +2025-03-26 18:55:44,174 Epoch 1268/2000 +2025-03-26 19:00:24,780 Current Learning Rate: 0.0007408768 +2025-03-26 19:00:24,781 Train Loss: 0.0000792, Val Loss: 0.0000674 +2025-03-26 19:00:24,781 Epoch 1269/2000 +2025-03-26 19:05:04,893 Current Learning Rate: 0.0007339649 +2025-03-26 19:05:04,894 Train Loss: 0.0000660, Val Loss: 0.0000695 +2025-03-26 19:05:04,894 Epoch 1270/2000 +2025-03-26 19:09:45,326 Current Learning Rate: 0.0007269952 +2025-03-26 19:09:45,327 Train Loss: 0.0000731, Val Loss: 0.0000699 +2025-03-26 19:09:45,327 Epoch 1271/2000 +2025-03-26 19:14:25,497 Current Learning Rate: 0.0007199696 +2025-03-26 19:14:25,498 Train Loss: 0.0000662, Val Loss: 0.0000797 +2025-03-26 19:14:25,498 Epoch 1272/2000 +2025-03-26 19:19:06,159 Current Learning Rate: 0.0007128896 +2025-03-26 19:19:06,159 Train Loss: 0.0000759, Val Loss: 0.0000706 +2025-03-26 19:19:06,159 Epoch 1273/2000 +2025-03-26 19:23:46,599 Current Learning Rate: 0.0007057572 +2025-03-26 19:23:46,600 Train Loss: 0.0000673, Val Loss: 0.0000706 +2025-03-26 19:23:46,600 Epoch 1274/2000 +2025-03-26 19:28:26,893 Current Learning Rate: 0.0006985739 +2025-03-26 19:28:26,894 Train Loss: 0.0000721, Val Loss: 0.0000795 +2025-03-26 19:28:26,894 Epoch 1275/2000 +2025-03-26 19:33:07,497 Current Learning Rate: 0.0006913417 +2025-03-26 19:33:07,498 Train Loss: 0.0000795, Val Loss: 0.0000720 +2025-03-26 19:33:07,498 Epoch 1276/2000 +2025-03-26 19:37:47,717 Current Learning Rate: 0.0006840623 +2025-03-26 19:37:47,717 Train Loss: 0.0000647, Val Loss: 0.0000672 +2025-03-26 19:37:47,717 Epoch 1277/2000 +2025-03-26 19:42:28,056 Current Learning Rate: 0.0006767374 +2025-03-26 19:42:28,056 Train Loss: 0.0000736, Val Loss: 0.0000705 +2025-03-26 19:42:28,057 Epoch 1278/2000 +2025-03-26 19:47:08,540 Current Learning Rate: 0.0006693690 +2025-03-26 19:47:08,541 Train Loss: 0.0000650, Val Loss: 0.0000673 +2025-03-26 19:47:08,541 Epoch 1279/2000 +2025-03-26 19:51:49,112 Current Learning Rate: 0.0006619587 +2025-03-26 19:51:49,112 Train Loss: 0.0000605, Val Loss: 0.0000660 +2025-03-26 19:51:49,113 Epoch 1280/2000 +2025-03-26 19:56:29,406 Current Learning Rate: 0.0006545085 +2025-03-26 19:56:29,407 Train Loss: 0.0000556, Val Loss: 0.0000664 +2025-03-26 19:56:29,407 Epoch 1281/2000 +2025-03-26 20:01:10,217 Current Learning Rate: 0.0006470202 +2025-03-26 20:01:10,218 Train Loss: 0.0000674, Val Loss: 0.0000672 +2025-03-26 20:01:10,218 Epoch 1282/2000 +2025-03-26 20:05:50,149 Current Learning Rate: 0.0006394956 +2025-03-26 20:05:50,149 Train Loss: 0.0000700, Val Loss: 0.0000750 +2025-03-26 20:05:50,149 Epoch 1283/2000 +2025-03-26 20:10:30,691 Current Learning Rate: 0.0006319365 +2025-03-26 20:10:30,691 Train Loss: 0.0000610, Val Loss: 0.0000680 +2025-03-26 20:10:30,692 Epoch 1284/2000 +2025-03-26 20:15:11,260 Current Learning Rate: 0.0006243449 +2025-03-26 20:15:11,261 Train Loss: 0.0000628, Val Loss: 0.0000685 +2025-03-26 20:15:11,261 Epoch 1285/2000 +2025-03-26 20:19:51,210 Current Learning Rate: 0.0006167227 +2025-03-26 20:19:51,211 Train Loss: 0.0000583, Val Loss: 0.0000638 +2025-03-26 20:19:51,211 Epoch 1286/2000 +2025-03-26 20:24:31,567 Current Learning Rate: 0.0006090716 +2025-03-26 20:24:31,567 Train Loss: 0.0000627, Val Loss: 0.0000664 +2025-03-26 20:24:31,567 Epoch 1287/2000 +2025-03-26 20:29:12,301 Current Learning Rate: 0.0006013936 +2025-03-26 20:29:12,302 Train Loss: 0.0000568, Val Loss: 0.0000689 +2025-03-26 20:29:12,302 Epoch 1288/2000 +2025-03-26 20:33:52,577 Current Learning Rate: 0.0005936907 +2025-03-26 20:33:52,578 Train Loss: 0.0000722, Val Loss: 0.0000666 +2025-03-26 20:33:52,578 Epoch 1289/2000 +2025-03-26 20:38:33,120 Current Learning Rate: 0.0005859646 +2025-03-26 20:38:33,121 Train Loss: 0.0000567, Val Loss: 0.0000666 +2025-03-26 20:38:33,121 Epoch 1290/2000 +2025-03-26 20:43:13,550 Current Learning Rate: 0.0005782172 +2025-03-26 20:43:13,551 Train Loss: 0.0000560, Val Loss: 0.0000624 +2025-03-26 20:43:13,552 Epoch 1291/2000 +2025-03-26 20:47:53,434 Current Learning Rate: 0.0005704506 +2025-03-26 20:47:53,434 Train Loss: 0.0000531, Val Loss: 0.0000643 +2025-03-26 20:47:53,434 Epoch 1292/2000 +2025-03-26 20:52:34,104 Current Learning Rate: 0.0005626666 +2025-03-26 20:52:34,105 Train Loss: 0.0000581, Val Loss: 0.0000660 +2025-03-26 20:52:34,105 Epoch 1293/2000 +2025-03-26 20:57:14,570 Current Learning Rate: 0.0005548672 +2025-03-26 20:57:14,570 Train Loss: 0.0000666, Val Loss: 0.0000647 +2025-03-26 20:57:14,570 Epoch 1294/2000 +2025-03-26 21:01:55,203 Current Learning Rate: 0.0005470542 +2025-03-26 21:01:55,203 Train Loss: 0.0000610, Val Loss: 0.0000628 +2025-03-26 21:01:55,203 Epoch 1295/2000 +2025-03-26 21:06:35,561 Current Learning Rate: 0.0005392295 +2025-03-26 21:06:35,562 Train Loss: 0.0000549, Val Loss: 0.0000719 +2025-03-26 21:06:35,562 Epoch 1296/2000 +2025-03-26 21:11:16,214 Current Learning Rate: 0.0005313953 +2025-03-26 21:11:16,215 Train Loss: 0.0000730, Val Loss: 0.0000676 +2025-03-26 21:11:16,215 Epoch 1297/2000 +2025-03-26 21:15:56,796 Current Learning Rate: 0.0005235532 +2025-03-26 21:15:56,797 Train Loss: 0.0000597, Val Loss: 0.0000651 +2025-03-26 21:15:56,797 Epoch 1298/2000 +2025-03-26 21:20:36,969 Current Learning Rate: 0.0005157054 +2025-03-26 21:20:36,969 Train Loss: 0.0000596, Val Loss: 0.0000637 +2025-03-26 21:20:36,970 Epoch 1299/2000 +2025-03-26 21:25:17,690 Current Learning Rate: 0.0005078537 +2025-03-26 21:25:17,690 Train Loss: 0.0000601, Val Loss: 0.0000663 +2025-03-26 21:25:17,691 Epoch 1300/2000 +2025-03-26 21:29:58,022 Current Learning Rate: 0.0005000000 +2025-03-26 21:29:58,023 Train Loss: 0.0000573, Val Loss: 0.0000678 +2025-03-26 21:29:58,023 Epoch 1301/2000 +2025-03-26 21:34:38,135 Current Learning Rate: 0.0004921463 +2025-03-26 21:34:38,136 Train Loss: 0.0000637, Val Loss: 0.0000656 +2025-03-26 21:34:38,137 Epoch 1302/2000 +2025-03-26 21:39:18,536 Current Learning Rate: 0.0004842946 +2025-03-26 21:39:18,537 Train Loss: 0.0000566, Val Loss: 0.0000623 +2025-03-26 21:39:18,537 Epoch 1303/2000 +2025-03-26 21:43:58,872 Current Learning Rate: 0.0004764468 +2025-03-26 21:43:58,873 Train Loss: 0.0000551, Val Loss: 0.0000624 +2025-03-26 21:43:58,873 Epoch 1304/2000 +2025-03-26 21:48:39,323 Current Learning Rate: 0.0004686047 +2025-03-26 21:48:39,323 Train Loss: 0.0000574, Val Loss: 0.0000662 +2025-03-26 21:48:39,324 Epoch 1305/2000 +2025-03-26 21:53:19,538 Current Learning Rate: 0.0004607705 +2025-03-26 21:53:19,539 Train Loss: 0.0000645, Val Loss: 0.0000670 +2025-03-26 21:53:19,539 Epoch 1306/2000 +2025-03-26 21:58:00,210 Current Learning Rate: 0.0004529458 +2025-03-26 21:58:00,210 Train Loss: 0.0000606, Val Loss: 0.0000639 +2025-03-26 21:58:00,210 Epoch 1307/2000 +2025-03-26 22:02:40,605 Current Learning Rate: 0.0004451328 +2025-03-26 22:02:40,605 Train Loss: 0.0000621, Val Loss: 0.0000638 +2025-03-26 22:02:40,606 Epoch 1308/2000 +2025-03-26 22:07:21,530 Current Learning Rate: 0.0004373334 +2025-03-26 22:07:21,531 Train Loss: 0.0000552, Val Loss: 0.0000612 +2025-03-26 22:07:21,531 Epoch 1309/2000 +2025-03-26 22:12:02,376 Current Learning Rate: 0.0004295494 +2025-03-26 22:12:03,577 Train Loss: 0.0000544, Val Loss: 0.0000604 +2025-03-26 22:12:03,577 Epoch 1310/2000 +2025-03-26 22:16:43,411 Current Learning Rate: 0.0004217828 +2025-03-26 22:16:43,412 Train Loss: 0.0000655, Val Loss: 0.0000621 +2025-03-26 22:16:43,412 Epoch 1311/2000 +2025-03-26 22:21:23,684 Current Learning Rate: 0.0004140354 +2025-03-26 22:21:23,685 Train Loss: 0.0000504, Val Loss: 0.0000626 +2025-03-26 22:21:23,685 Epoch 1312/2000 +2025-03-26 22:26:04,613 Current Learning Rate: 0.0004063093 +2025-03-26 22:26:05,486 Train Loss: 0.0000619, Val Loss: 0.0000604 +2025-03-26 22:26:05,487 Epoch 1313/2000 +2025-03-26 22:30:45,066 Current Learning Rate: 0.0003986064 +2025-03-26 22:30:45,067 Train Loss: 0.0000667, Val Loss: 0.0000621 +2025-03-26 22:30:45,067 Epoch 1314/2000 +2025-03-26 22:35:25,262 Current Learning Rate: 0.0003909284 +2025-03-26 22:35:25,263 Train Loss: 0.0000524, Val Loss: 0.0000605 +2025-03-26 22:35:25,263 Epoch 1315/2000 +2025-03-26 22:40:05,179 Current Learning Rate: 0.0003832773 +2025-03-26 22:40:05,179 Train Loss: 0.0000549, Val Loss: 0.0000613 +2025-03-26 22:40:05,180 Epoch 1316/2000 +2025-03-26 22:44:45,875 Current Learning Rate: 0.0003756551 +2025-03-26 22:44:46,819 Train Loss: 0.0000551, Val Loss: 0.0000603 +2025-03-26 22:44:46,819 Epoch 1317/2000 +2025-03-26 22:49:26,542 Current Learning Rate: 0.0003680635 +2025-03-26 22:49:26,543 Train Loss: 0.0000527, Val Loss: 0.0000603 +2025-03-26 22:49:26,544 Epoch 1318/2000 +2025-03-26 22:54:07,031 Current Learning Rate: 0.0003605044 +2025-03-26 22:54:07,032 Train Loss: 0.0000574, Val Loss: 0.0000606 +2025-03-26 22:54:07,032 Epoch 1319/2000 +2025-03-26 22:58:47,617 Current Learning Rate: 0.0003529798 +2025-03-26 22:58:47,617 Train Loss: 0.0000607, Val Loss: 0.0000623 +2025-03-26 22:58:47,618 Epoch 1320/2000 +2025-03-26 23:03:28,206 Current Learning Rate: 0.0003454915 +2025-03-26 23:03:28,207 Train Loss: 0.0000573, Val Loss: 0.0000604 +2025-03-26 23:03:28,207 Epoch 1321/2000 +2025-03-26 23:08:08,528 Current Learning Rate: 0.0003380413 +2025-03-26 23:08:08,529 Train Loss: 0.0000533, Val Loss: 0.0000615 +2025-03-26 23:08:08,529 Epoch 1322/2000 +2025-03-26 23:12:48,905 Current Learning Rate: 0.0003306310 +2025-03-26 23:12:49,798 Train Loss: 0.0000530, Val Loss: 0.0000597 +2025-03-26 23:12:49,799 Epoch 1323/2000 +2025-03-26 23:17:29,140 Current Learning Rate: 0.0003232626 +2025-03-26 23:17:29,141 Train Loss: 0.0000579, Val Loss: 0.0000600 +2025-03-26 23:17:29,141 Epoch 1324/2000 +2025-03-26 23:22:09,295 Current Learning Rate: 0.0003159377 +2025-03-26 23:22:09,296 Train Loss: 0.0000636, Val Loss: 0.0000602 +2025-03-26 23:22:09,296 Epoch 1325/2000 +2025-03-26 23:26:50,183 Current Learning Rate: 0.0003086583 +2025-03-26 23:26:50,184 Train Loss: 0.0000491, Val Loss: 0.0000598 +2025-03-26 23:26:50,184 Epoch 1326/2000 +2025-03-26 23:31:30,795 Current Learning Rate: 0.0003014261 +2025-03-26 23:31:31,656 Train Loss: 0.0000388, Val Loss: 0.0000590 +2025-03-26 23:31:31,657 Epoch 1327/2000 +2025-03-26 23:36:11,424 Current Learning Rate: 0.0002942428 +2025-03-26 23:36:11,425 Train Loss: 0.0000656, Val Loss: 0.0000605 +2025-03-26 23:36:11,425 Epoch 1328/2000 +2025-03-26 23:40:51,215 Current Learning Rate: 0.0002871104 +2025-03-26 23:40:51,215 Train Loss: 0.0000577, Val Loss: 0.0000632 +2025-03-26 23:40:51,216 Epoch 1329/2000 +2025-03-26 23:45:31,414 Current Learning Rate: 0.0002800304 +2025-03-26 23:45:31,415 Train Loss: 0.0000511, Val Loss: 0.0000595 +2025-03-26 23:45:31,415 Epoch 1330/2000 +2025-03-26 23:50:11,814 Current Learning Rate: 0.0002730048 +2025-03-26 23:50:11,815 Train Loss: 0.0000526, Val Loss: 0.0000595 +2025-03-26 23:50:11,815 Epoch 1331/2000 +2025-03-26 23:54:52,592 Current Learning Rate: 0.0002660351 +2025-03-26 23:54:53,489 Train Loss: 0.0000493, Val Loss: 0.0000589 +2025-03-26 23:54:53,489 Epoch 1332/2000 +2025-03-26 23:59:32,896 Current Learning Rate: 0.0002591232 +2025-03-26 23:59:33,826 Train Loss: 0.0000487, Val Loss: 0.0000588 +2025-03-26 23:59:33,826 Epoch 1333/2000 +2025-03-27 00:04:13,526 Current Learning Rate: 0.0002522707 +2025-03-27 00:04:13,527 Train Loss: 0.0000516, Val Loss: 0.0000590 +2025-03-27 00:04:13,527 Epoch 1334/2000 +2025-03-27 00:08:53,804 Current Learning Rate: 0.0002454793 +2025-03-27 00:08:54,860 Train Loss: 0.0000559, Val Loss: 0.0000586 +2025-03-27 00:08:54,860 Epoch 1335/2000 +2025-03-27 00:13:34,710 Current Learning Rate: 0.0002387507 +2025-03-27 00:13:35,787 Train Loss: 0.0000527, Val Loss: 0.0000582 +2025-03-27 00:13:35,787 Epoch 1336/2000 +2025-03-27 00:18:15,495 Current Learning Rate: 0.0002320866 +2025-03-27 00:18:15,496 Train Loss: 0.0000536, Val Loss: 0.0000583 +2025-03-27 00:18:15,496 Epoch 1337/2000 +2025-03-27 00:22:55,295 Current Learning Rate: 0.0002254886 +2025-03-27 00:22:55,296 Train Loss: 0.0000502, Val Loss: 0.0000591 +2025-03-27 00:22:55,296 Epoch 1338/2000 +2025-03-27 00:27:35,683 Current Learning Rate: 0.0002189583 +2025-03-27 00:27:35,684 Train Loss: 0.0000584, Val Loss: 0.0000582 +2025-03-27 00:27:35,684 Epoch 1339/2000 +2025-03-27 00:32:16,226 Current Learning Rate: 0.0002124974 +2025-03-27 00:32:16,227 Train Loss: 0.0000480, Val Loss: 0.0000587 +2025-03-27 00:32:16,227 Epoch 1340/2000 +2025-03-27 00:36:56,939 Current Learning Rate: 0.0002061074 +2025-03-27 00:36:57,858 Train Loss: 0.0000432, Val Loss: 0.0000580 +2025-03-27 00:36:57,858 Epoch 1341/2000 +2025-03-27 00:41:37,421 Current Learning Rate: 0.0001997899 +2025-03-27 00:41:38,310 Train Loss: 0.0000443, Val Loss: 0.0000576 +2025-03-27 00:41:38,311 Epoch 1342/2000 +2025-03-27 00:46:17,848 Current Learning Rate: 0.0001935465 +2025-03-27 00:46:17,849 Train Loss: 0.0000418, Val Loss: 0.0000587 +2025-03-27 00:46:17,849 Epoch 1343/2000 +2025-03-27 00:50:58,015 Current Learning Rate: 0.0001873787 +2025-03-27 00:50:58,016 Train Loss: 0.0000496, Val Loss: 0.0000577 +2025-03-27 00:50:58,016 Epoch 1344/2000 +2025-03-27 00:55:38,418 Current Learning Rate: 0.0001812880 +2025-03-27 00:55:38,419 Train Loss: 0.0000454, Val Loss: 0.0000607 +2025-03-27 00:55:38,419 Epoch 1345/2000 +2025-03-27 01:00:18,913 Current Learning Rate: 0.0001752760 +2025-03-27 01:00:18,914 Train Loss: 0.0000459, Val Loss: 0.0000578 +2025-03-27 01:00:18,914 Epoch 1346/2000 +2025-03-27 01:04:59,327 Current Learning Rate: 0.0001693441 +2025-03-27 01:04:59,327 Train Loss: 0.0000571, Val Loss: 0.0000578 +2025-03-27 01:04:59,328 Epoch 1347/2000 +2025-03-27 01:09:39,559 Current Learning Rate: 0.0001634937 +2025-03-27 01:09:40,689 Train Loss: 0.0000515, Val Loss: 0.0000576 +2025-03-27 01:09:40,689 Epoch 1348/2000 +2025-03-27 01:14:20,232 Current Learning Rate: 0.0001577264 +2025-03-27 01:14:21,143 Train Loss: 0.0000522, Val Loss: 0.0000574 +2025-03-27 01:14:21,143 Epoch 1349/2000 +2025-03-27 01:19:00,640 Current Learning Rate: 0.0001520436 +2025-03-27 01:19:00,641 Train Loss: 0.0000503, Val Loss: 0.0000574 +2025-03-27 01:19:00,641 Epoch 1350/2000 +2025-03-27 01:23:40,992 Current Learning Rate: 0.0001464466 +2025-03-27 01:23:41,928 Train Loss: 0.0000461, Val Loss: 0.0000573 +2025-03-27 01:23:41,929 Epoch 1351/2000 +2025-03-27 01:28:21,734 Current Learning Rate: 0.0001409369 +2025-03-27 01:28:21,734 Train Loss: 0.0000550, Val Loss: 0.0000575 +2025-03-27 01:28:21,734 Epoch 1352/2000 +2025-03-27 01:33:01,214 Current Learning Rate: 0.0001355157 +2025-03-27 01:33:02,171 Train Loss: 0.0000449, Val Loss: 0.0000573 +2025-03-27 01:33:02,171 Epoch 1353/2000 +2025-03-27 01:37:41,395 Current Learning Rate: 0.0001301845 +2025-03-27 01:37:42,490 Train Loss: 0.0000469, Val Loss: 0.0000572 +2025-03-27 01:37:42,490 Epoch 1354/2000 +2025-03-27 01:42:22,173 Current Learning Rate: 0.0001249445 +2025-03-27 01:42:23,138 Train Loss: 0.0000476, Val Loss: 0.0000570 +2025-03-27 01:42:23,139 Epoch 1355/2000 +2025-03-27 01:47:02,744 Current Learning Rate: 0.0001197970 +2025-03-27 01:47:02,745 Train Loss: 0.0000487, Val Loss: 0.0000574 +2025-03-27 01:47:02,745 Epoch 1356/2000 +2025-03-27 01:51:43,020 Current Learning Rate: 0.0001147434 +2025-03-27 01:51:44,085 Train Loss: 0.0000483, Val Loss: 0.0000567 +2025-03-27 01:51:44,086 Epoch 1357/2000 +2025-03-27 01:56:23,779 Current Learning Rate: 0.0001097848 +2025-03-27 01:56:24,636 Train Loss: 0.0000500, Val Loss: 0.0000566 +2025-03-27 01:56:24,636 Epoch 1358/2000 +2025-03-27 02:01:04,554 Current Learning Rate: 0.0001049225 +2025-03-27 02:01:04,555 Train Loss: 0.0000493, Val Loss: 0.0000570 +2025-03-27 02:01:04,555 Epoch 1359/2000 +2025-03-27 02:05:45,532 Current Learning Rate: 0.0001001577 +2025-03-27 02:05:46,403 Train Loss: 0.0000544, Val Loss: 0.0000566 +2025-03-27 02:05:46,404 Epoch 1360/2000 +2025-03-27 02:10:25,807 Current Learning Rate: 0.0000954915 +2025-03-27 02:10:25,808 Train Loss: 0.0000590, Val Loss: 0.0000569 +2025-03-27 02:10:25,808 Epoch 1361/2000 +2025-03-27 02:15:05,959 Current Learning Rate: 0.0000909251 +2025-03-27 02:15:05,960 Train Loss: 0.0000520, Val Loss: 0.0000568 +2025-03-27 02:15:05,960 Epoch 1362/2000 +2025-03-27 02:19:45,781 Current Learning Rate: 0.0000864597 +2025-03-27 02:19:46,627 Train Loss: 0.0000457, Val Loss: 0.0000563 +2025-03-27 02:19:46,628 Epoch 1363/2000 +2025-03-27 02:24:26,447 Current Learning Rate: 0.0000820963 +2025-03-27 02:24:26,448 Train Loss: 0.0000440, Val Loss: 0.0000564 +2025-03-27 02:24:26,448 Epoch 1364/2000 +2025-03-27 02:29:07,195 Current Learning Rate: 0.0000778360 +2025-03-27 02:29:08,118 Train Loss: 0.0000455, Val Loss: 0.0000562 +2025-03-27 02:29:08,119 Epoch 1365/2000 +2025-03-27 02:33:47,239 Current Learning Rate: 0.0000736799 +2025-03-27 02:33:47,240 Train Loss: 0.0000472, Val Loss: 0.0000563 +2025-03-27 02:33:47,241 Epoch 1366/2000 +2025-03-27 02:38:27,839 Current Learning Rate: 0.0000696290 +2025-03-27 02:38:28,918 Train Loss: 0.0000517, Val Loss: 0.0000562 +2025-03-27 02:38:28,919 Epoch 1367/2000 +2025-03-27 02:43:08,491 Current Learning Rate: 0.0000656842 +2025-03-27 02:43:08,491 Train Loss: 0.0000499, Val Loss: 0.0000563 +2025-03-27 02:43:08,492 Epoch 1368/2000 +2025-03-27 02:47:48,848 Current Learning Rate: 0.0000618467 +2025-03-27 02:47:48,848 Train Loss: 0.0000515, Val Loss: 0.0000562 +2025-03-27 02:47:48,848 Epoch 1369/2000 +2025-03-27 02:52:29,584 Current Learning Rate: 0.0000581172 +2025-03-27 02:52:30,461 Train Loss: 0.0000609, Val Loss: 0.0000561 +2025-03-27 02:52:30,461 Epoch 1370/2000 +2025-03-27 02:57:10,075 Current Learning Rate: 0.0000544967 +2025-03-27 02:57:10,999 Train Loss: 0.0000418, Val Loss: 0.0000559 +2025-03-27 02:57:10,999 Epoch 1371/2000 +2025-03-27 03:01:50,970 Current Learning Rate: 0.0000509862 +2025-03-27 03:01:50,971 Train Loss: 0.0000473, Val Loss: 0.0000561 +2025-03-27 03:01:50,971 Epoch 1372/2000 +2025-03-27 03:06:31,698 Current Learning Rate: 0.0000475865 +2025-03-27 03:06:32,605 Train Loss: 0.0000408, Val Loss: 0.0000559 +2025-03-27 03:06:32,606 Epoch 1373/2000 +2025-03-27 03:11:12,590 Current Learning Rate: 0.0000442984 +2025-03-27 03:11:12,591 Train Loss: 0.0000492, Val Loss: 0.0000559 +2025-03-27 03:11:12,591 Epoch 1374/2000 +2025-03-27 03:15:52,436 Current Learning Rate: 0.0000411227 +2025-03-27 03:15:53,290 Train Loss: 0.0000537, Val Loss: 0.0000558 +2025-03-27 03:15:53,291 Epoch 1375/2000 +2025-03-27 03:20:32,812 Current Learning Rate: 0.0000380602 +2025-03-27 03:20:32,813 Train Loss: 0.0000415, Val Loss: 0.0000559 +2025-03-27 03:20:32,814 Epoch 1376/2000 +2025-03-27 03:25:13,034 Current Learning Rate: 0.0000351118 +2025-03-27 03:25:13,924 Train Loss: 0.0000490, Val Loss: 0.0000558 +2025-03-27 03:25:13,924 Epoch 1377/2000 +2025-03-27 03:29:53,300 Current Learning Rate: 0.0000322780 +2025-03-27 03:29:53,301 Train Loss: 0.0000473, Val Loss: 0.0000558 +2025-03-27 03:29:53,301 Epoch 1378/2000 +2025-03-27 03:34:33,881 Current Learning Rate: 0.0000295596 +2025-03-27 03:34:34,929 Train Loss: 0.0000474, Val Loss: 0.0000558 +2025-03-27 03:34:34,929 Epoch 1379/2000 +2025-03-27 03:39:14,694 Current Learning Rate: 0.0000269573 +2025-03-27 03:39:14,694 Train Loss: 0.0000502, Val Loss: 0.0000558 +2025-03-27 03:39:14,695 Epoch 1380/2000 +2025-03-27 03:43:54,723 Current Learning Rate: 0.0000244717 +2025-03-27 03:43:55,580 Train Loss: 0.0000420, Val Loss: 0.0000557 +2025-03-27 03:43:55,581 Epoch 1381/2000 +2025-03-27 03:48:35,437 Current Learning Rate: 0.0000221035 +2025-03-27 03:48:35,437 Train Loss: 0.0000506, Val Loss: 0.0000557 +2025-03-27 03:48:35,438 Epoch 1382/2000 +2025-03-27 03:53:16,640 Current Learning Rate: 0.0000198532 +2025-03-27 03:53:17,560 Train Loss: 0.0000443, Val Loss: 0.0000556 +2025-03-27 03:53:17,560 Epoch 1383/2000 +2025-03-27 03:57:57,525 Current Learning Rate: 0.0000177213 +2025-03-27 03:57:58,614 Train Loss: 0.0000602, Val Loss: 0.0000556 +2025-03-27 03:57:58,614 Epoch 1384/2000 +2025-03-27 04:02:39,066 Current Learning Rate: 0.0000157084 +2025-03-27 04:02:39,067 Train Loss: 0.0000557, Val Loss: 0.0000556 +2025-03-27 04:02:39,067 Epoch 1385/2000 +2025-03-27 04:07:19,200 Current Learning Rate: 0.0000138150 +2025-03-27 04:07:20,091 Train Loss: 0.0000510, Val Loss: 0.0000556 +2025-03-27 04:07:20,092 Epoch 1386/2000 +2025-03-27 04:11:59,836 Current Learning Rate: 0.0000120416 +2025-03-27 04:12:00,757 Train Loss: 0.0000449, Val Loss: 0.0000555 +2025-03-27 04:12:00,757 Epoch 1387/2000 +2025-03-27 04:16:40,683 Current Learning Rate: 0.0000103886 +2025-03-27 04:16:41,592 Train Loss: 0.0000472, Val Loss: 0.0000555 +2025-03-27 04:16:41,592 Epoch 1388/2000 +2025-03-27 04:21:21,500 Current Learning Rate: 0.0000088564 +2025-03-27 04:21:22,420 Train Loss: 0.0000573, Val Loss: 0.0000555 +2025-03-27 04:21:22,420 Epoch 1389/2000 +2025-03-27 04:26:02,159 Current Learning Rate: 0.0000074453 +2025-03-27 04:26:03,110 Train Loss: 0.0000525, Val Loss: 0.0000555 +2025-03-27 04:26:03,110 Epoch 1390/2000 +2025-03-27 04:30:42,827 Current Learning Rate: 0.0000061558 +2025-03-27 04:30:43,744 Train Loss: 0.0000501, Val Loss: 0.0000555 +2025-03-27 04:30:43,744 Epoch 1391/2000 +2025-03-27 04:35:24,107 Current Learning Rate: 0.0000049882 +2025-03-27 04:35:24,994 Train Loss: 0.0000429, Val Loss: 0.0000555 +2025-03-27 04:35:24,994 Epoch 1392/2000 +2025-03-27 04:40:04,518 Current Learning Rate: 0.0000039426 +2025-03-27 04:40:05,449 Train Loss: 0.0000507, Val Loss: 0.0000555 +2025-03-27 04:40:05,449 Epoch 1393/2000 +2025-03-27 04:44:45,174 Current Learning Rate: 0.0000030195 +2025-03-27 04:44:46,135 Train Loss: 0.0000472, Val Loss: 0.0000555 +2025-03-27 04:44:46,135 Epoch 1394/2000 +2025-03-27 04:49:25,551 Current Learning Rate: 0.0000022190 +2025-03-27 04:49:25,552 Train Loss: 0.0000485, Val Loss: 0.0000555 +2025-03-27 04:49:25,552 Epoch 1395/2000 +2025-03-27 04:54:05,855 Current Learning Rate: 0.0000015413 +2025-03-27 04:54:05,856 Train Loss: 0.0000498, Val Loss: 0.0000555 +2025-03-27 04:54:05,856 Epoch 1396/2000 +2025-03-27 04:58:46,190 Current Learning Rate: 0.0000009866 +2025-03-27 04:58:46,191 Train Loss: 0.0000441, Val Loss: 0.0000555 +2025-03-27 04:58:46,191 Epoch 1397/2000 +2025-03-27 05:03:26,489 Current Learning Rate: 0.0000005551 +2025-03-27 05:03:27,424 Train Loss: 0.0000441, Val Loss: 0.0000554 +2025-03-27 05:03:27,424 Epoch 1398/2000 +2025-03-27 05:08:07,100 Current Learning Rate: 0.0000002467 +2025-03-27 05:08:08,077 Train Loss: 0.0000460, Val Loss: 0.0000554 +2025-03-27 05:08:08,078 Epoch 1399/2000 +2025-03-27 05:12:47,918 Current Learning Rate: 0.0000000617 +2025-03-27 05:12:47,919 Train Loss: 0.0000487, Val Loss: 0.0000555 +2025-03-27 05:12:47,919 Epoch 1400/2000 +2025-03-27 05:17:28,004 Current Learning Rate: 0.0000000000 +2025-03-27 05:17:28,005 Train Loss: 0.0000408, Val Loss: 0.0000554 +2025-03-27 05:17:28,005 Epoch 1401/2000 +2025-03-27 05:22:08,924 Current Learning Rate: 0.0000000617 +2025-03-27 05:22:09,789 Train Loss: 0.0000470, Val Loss: 0.0000554 +2025-03-27 05:22:09,789 Epoch 1402/2000 +2025-03-27 05:26:49,480 Current Learning Rate: 0.0000002467 +2025-03-27 05:26:49,481 Train Loss: 0.0000523, Val Loss: 0.0000554 +2025-03-27 05:26:49,481 Epoch 1403/2000 +2025-03-27 05:31:30,497 Current Learning Rate: 0.0000005551 +2025-03-27 05:31:30,498 Train Loss: 0.0000448, Val Loss: 0.0000555 +2025-03-27 05:31:30,498 Epoch 1404/2000 +2025-03-27 05:36:10,885 Current Learning Rate: 0.0000009866 +2025-03-27 05:36:10,886 Train Loss: 0.0000467, Val Loss: 0.0000555 +2025-03-27 05:36:10,886 Epoch 1405/2000 +2025-03-27 05:40:51,340 Current Learning Rate: 0.0000015413 +2025-03-27 05:40:51,341 Train Loss: 0.0000519, Val Loss: 0.0000555 +2025-03-27 05:40:51,341 Epoch 1406/2000 +2025-03-27 05:45:32,029 Current Learning Rate: 0.0000022190 +2025-03-27 05:45:32,030 Train Loss: 0.0000538, Val Loss: 0.0000554 +2025-03-27 05:45:32,030 Epoch 1407/2000 +2025-03-27 05:50:12,478 Current Learning Rate: 0.0000030195 +2025-03-27 05:50:12,479 Train Loss: 0.0000550, Val Loss: 0.0000554 +2025-03-27 05:50:12,479 Epoch 1408/2000 +2025-03-27 05:54:53,769 Current Learning Rate: 0.0000039426 +2025-03-27 05:54:53,769 Train Loss: 0.0000446, Val Loss: 0.0000555 +2025-03-27 05:54:53,770 Epoch 1409/2000 +2025-03-27 05:59:34,199 Current Learning Rate: 0.0000049882 +2025-03-27 05:59:34,199 Train Loss: 0.0000468, Val Loss: 0.0000555 +2025-03-27 05:59:34,199 Epoch 1410/2000 +2025-03-27 06:04:14,493 Current Learning Rate: 0.0000061558 +2025-03-27 06:04:14,494 Train Loss: 0.0000537, Val Loss: 0.0000554 +2025-03-27 06:04:14,494 Epoch 1411/2000 +2025-03-27 06:08:55,040 Current Learning Rate: 0.0000074453 +2025-03-27 06:08:55,041 Train Loss: 0.0000442, Val Loss: 0.0000555 +2025-03-27 06:08:55,041 Epoch 1412/2000 +2025-03-27 06:13:35,620 Current Learning Rate: 0.0000088564 +2025-03-27 06:13:35,621 Train Loss: 0.0000453, Val Loss: 0.0000555 +2025-03-27 06:13:35,621 Epoch 1413/2000 +2025-03-27 06:18:15,638 Current Learning Rate: 0.0000103886 +2025-03-27 06:18:15,638 Train Loss: 0.0000454, Val Loss: 0.0000555 +2025-03-27 06:18:15,638 Epoch 1414/2000 +2025-03-27 06:22:56,287 Current Learning Rate: 0.0000120416 +2025-03-27 06:22:56,288 Train Loss: 0.0000556, Val Loss: 0.0000555 +2025-03-27 06:22:56,288 Epoch 1415/2000 +2025-03-27 06:27:36,205 Current Learning Rate: 0.0000138150 +2025-03-27 06:27:36,206 Train Loss: 0.0000476, Val Loss: 0.0000555 +2025-03-27 06:27:36,206 Epoch 1416/2000 +2025-03-27 06:32:17,072 Current Learning Rate: 0.0000157084 +2025-03-27 06:32:17,073 Train Loss: 0.0000631, Val Loss: 0.0000555 +2025-03-27 06:32:17,073 Epoch 1417/2000 +2025-03-27 06:36:57,394 Current Learning Rate: 0.0000177213 +2025-03-27 06:36:57,395 Train Loss: 0.0000522, Val Loss: 0.0000555 +2025-03-27 06:36:57,395 Epoch 1418/2000 +2025-03-27 06:41:37,826 Current Learning Rate: 0.0000198532 +2025-03-27 06:41:37,827 Train Loss: 0.0000467, Val Loss: 0.0000555 +2025-03-27 06:41:37,827 Epoch 1419/2000 +2025-03-27 06:46:18,050 Current Learning Rate: 0.0000221035 +2025-03-27 06:46:18,051 Train Loss: 0.0000547, Val Loss: 0.0000556 +2025-03-27 06:46:18,051 Epoch 1420/2000 +2025-03-27 06:50:58,711 Current Learning Rate: 0.0000244717 +2025-03-27 06:50:58,711 Train Loss: 0.0000453, Val Loss: 0.0000556 +2025-03-27 06:50:58,712 Epoch 1421/2000 +2025-03-27 06:55:39,332 Current Learning Rate: 0.0000269573 +2025-03-27 06:55:39,333 Train Loss: 0.0000458, Val Loss: 0.0000556 +2025-03-27 06:55:39,333 Epoch 1422/2000 +2025-03-27 07:00:19,633 Current Learning Rate: 0.0000295596 +2025-03-27 07:00:19,633 Train Loss: 0.0000418, Val Loss: 0.0000555 +2025-03-27 07:00:19,633 Epoch 1423/2000 +2025-03-27 07:05:00,438 Current Learning Rate: 0.0000322780 +2025-03-27 07:05:00,439 Train Loss: 0.0000412, Val Loss: 0.0000556 +2025-03-27 07:05:00,439 Epoch 1424/2000 +2025-03-27 07:09:41,948 Current Learning Rate: 0.0000351118 +2025-03-27 07:09:41,948 Train Loss: 0.0000408, Val Loss: 0.0000556 +2025-03-27 07:09:41,949 Epoch 1425/2000 +2025-03-27 07:14:22,220 Current Learning Rate: 0.0000380602 +2025-03-27 07:14:22,221 Train Loss: 0.0000488, Val Loss: 0.0000557 +2025-03-27 07:14:22,221 Epoch 1426/2000 +2025-03-27 07:19:02,978 Current Learning Rate: 0.0000411227 +2025-03-27 07:19:02,979 Train Loss: 0.0000409, Val Loss: 0.0000557 +2025-03-27 07:19:02,979 Epoch 1427/2000 +2025-03-27 07:23:43,790 Current Learning Rate: 0.0000442984 +2025-03-27 07:23:43,791 Train Loss: 0.0000522, Val Loss: 0.0000557 +2025-03-27 07:23:43,791 Epoch 1428/2000 +2025-03-27 07:28:24,435 Current Learning Rate: 0.0000475865 +2025-03-27 07:28:24,435 Train Loss: 0.0000461, Val Loss: 0.0000556 +2025-03-27 07:28:24,435 Epoch 1429/2000 +2025-03-27 07:33:04,679 Current Learning Rate: 0.0000509862 +2025-03-27 07:33:04,679 Train Loss: 0.0000404, Val Loss: 0.0000556 +2025-03-27 07:33:04,680 Epoch 1430/2000 +2025-03-27 07:37:44,842 Current Learning Rate: 0.0000544967 +2025-03-27 07:37:44,842 Train Loss: 0.0000472, Val Loss: 0.0000557 +2025-03-27 07:37:44,843 Epoch 1431/2000 +2025-03-27 07:42:25,378 Current Learning Rate: 0.0000581172 +2025-03-27 07:42:25,378 Train Loss: 0.0000391, Val Loss: 0.0000557 +2025-03-27 07:42:25,379 Epoch 1432/2000 +2025-03-27 07:47:06,038 Current Learning Rate: 0.0000618467 +2025-03-27 07:47:06,039 Train Loss: 0.0000518, Val Loss: 0.0000558 +2025-03-27 07:47:06,040 Epoch 1433/2000 +2025-03-27 07:51:46,824 Current Learning Rate: 0.0000656842 +2025-03-27 07:51:46,824 Train Loss: 0.0000541, Val Loss: 0.0000558 +2025-03-27 07:51:46,825 Epoch 1434/2000 +2025-03-27 07:56:27,359 Current Learning Rate: 0.0000696290 +2025-03-27 07:56:27,360 Train Loss: 0.0000510, Val Loss: 0.0000560 +2025-03-27 07:56:27,360 Epoch 1435/2000 +2025-03-27 08:01:08,090 Current Learning Rate: 0.0000736799 +2025-03-27 08:01:08,091 Train Loss: 0.0000444, Val Loss: 0.0000560 +2025-03-27 08:01:08,091 Epoch 1436/2000 +2025-03-27 08:05:48,540 Current Learning Rate: 0.0000778360 +2025-03-27 08:05:48,541 Train Loss: 0.0000432, Val Loss: 0.0000558 +2025-03-27 08:05:48,541 Epoch 1437/2000 +2025-03-27 08:10:28,957 Current Learning Rate: 0.0000820963 +2025-03-27 08:10:28,957 Train Loss: 0.0000483, Val Loss: 0.0000558 +2025-03-27 08:10:28,957 Epoch 1438/2000 +2025-03-27 08:15:08,807 Current Learning Rate: 0.0000864597 +2025-03-27 08:15:08,807 Train Loss: 0.0000574, Val Loss: 0.0000559 +2025-03-27 08:15:08,808 Epoch 1439/2000 +2025-03-27 08:19:49,280 Current Learning Rate: 0.0000909251 +2025-03-27 08:19:49,281 Train Loss: 0.0000469, Val Loss: 0.0000559 +2025-03-27 08:19:49,281 Epoch 1440/2000 +2025-03-27 08:24:29,950 Current Learning Rate: 0.0000954915 +2025-03-27 08:24:29,950 Train Loss: 0.0000500, Val Loss: 0.0000561 +2025-03-27 08:24:29,950 Epoch 1441/2000 +2025-03-27 08:29:10,855 Current Learning Rate: 0.0001001577 +2025-03-27 08:29:10,856 Train Loss: 0.0000577, Val Loss: 0.0000563 +2025-03-27 08:29:10,856 Epoch 1442/2000 +2025-03-27 08:33:51,451 Current Learning Rate: 0.0001049225 +2025-03-27 08:33:51,451 Train Loss: 0.0000525, Val Loss: 0.0000562 +2025-03-27 08:33:51,452 Epoch 1443/2000 +2025-03-27 08:38:32,656 Current Learning Rate: 0.0001097848 +2025-03-27 08:38:32,656 Train Loss: 0.0000482, Val Loss: 0.0000563 +2025-03-27 08:38:32,657 Epoch 1444/2000 +2025-03-27 08:43:12,768 Current Learning Rate: 0.0001147434 +2025-03-27 08:43:12,768 Train Loss: 0.0000435, Val Loss: 0.0000562 +2025-03-27 08:43:12,769 Epoch 1445/2000 +2025-03-27 08:47:53,365 Current Learning Rate: 0.0001197970 +2025-03-27 08:47:53,366 Train Loss: 0.0000480, Val Loss: 0.0000564 +2025-03-27 08:47:53,366 Epoch 1446/2000 +2025-03-27 08:52:34,450 Current Learning Rate: 0.0001249445 +2025-03-27 08:52:34,451 Train Loss: 0.0000514, Val Loss: 0.0000562 +2025-03-27 08:52:34,451 Epoch 1447/2000 +2025-03-27 08:57:14,916 Current Learning Rate: 0.0001301845 +2025-03-27 08:57:14,917 Train Loss: 0.0000493, Val Loss: 0.0000569 +2025-03-27 08:57:14,917 Epoch 1448/2000 +2025-03-27 09:01:55,321 Current Learning Rate: 0.0001355157 +2025-03-27 09:01:55,321 Train Loss: 0.0000553, Val Loss: 0.0000567 +2025-03-27 09:01:55,322 Epoch 1449/2000 +2025-03-27 09:06:35,798 Current Learning Rate: 0.0001409369 +2025-03-27 09:06:35,799 Train Loss: 0.0000515, Val Loss: 0.0000563 +2025-03-27 09:06:35,799 Epoch 1450/2000 +2025-03-27 09:11:16,633 Current Learning Rate: 0.0001464466 +2025-03-27 09:11:16,633 Train Loss: 0.0000425, Val Loss: 0.0000563 +2025-03-27 09:11:16,633 Epoch 1451/2000 +2025-03-27 09:15:58,105 Current Learning Rate: 0.0001520436 +2025-03-27 09:15:58,105 Train Loss: 0.0000530, Val Loss: 0.0000566 +2025-03-27 09:15:58,106 Epoch 1452/2000 +2025-03-27 09:20:38,444 Current Learning Rate: 0.0001577264 +2025-03-27 09:20:38,444 Train Loss: 0.0000475, Val Loss: 0.0000568 +2025-03-27 09:20:38,444 Epoch 1453/2000 +2025-03-27 09:25:19,725 Current Learning Rate: 0.0001634937 +2025-03-27 09:25:19,726 Train Loss: 0.0000484, Val Loss: 0.0000563 +2025-03-27 09:25:19,726 Epoch 1454/2000 +2025-03-27 09:30:00,094 Current Learning Rate: 0.0001693441 +2025-03-27 09:30:00,094 Train Loss: 0.0000552, Val Loss: 0.0000571 +2025-03-27 09:30:00,095 Epoch 1455/2000 +2025-03-27 09:34:40,312 Current Learning Rate: 0.0001752760 +2025-03-27 09:34:40,313 Train Loss: 0.0000482, Val Loss: 0.0000567 +2025-03-27 09:34:40,313 Epoch 1456/2000 +2025-03-27 09:39:20,163 Current Learning Rate: 0.0001812880 +2025-03-27 09:39:20,163 Train Loss: 0.0000518, Val Loss: 0.0000564 +2025-03-27 09:39:20,164 Epoch 1457/2000 +2025-03-27 09:44:00,777 Current Learning Rate: 0.0001873787 +2025-03-27 09:44:00,777 Train Loss: 0.0000517, Val Loss: 0.0000571 +2025-03-27 09:44:00,777 Epoch 1458/2000 +2025-03-27 09:48:41,020 Current Learning Rate: 0.0001935465 +2025-03-27 09:48:41,020 Train Loss: 0.0000599, Val Loss: 0.0000607 +2025-03-27 09:48:41,020 Epoch 1459/2000 +2025-03-27 09:53:21,919 Current Learning Rate: 0.0001997899 +2025-03-27 09:53:21,919 Train Loss: 0.0000451, Val Loss: 0.0000573 +2025-03-27 09:53:21,920 Epoch 1460/2000 +2025-03-27 09:58:02,657 Current Learning Rate: 0.0002061074 +2025-03-27 09:58:02,658 Train Loss: 0.0000473, Val Loss: 0.0000573 +2025-03-27 09:58:02,658 Epoch 1461/2000 +2025-03-27 10:02:43,470 Current Learning Rate: 0.0002124974 +2025-03-27 10:02:43,471 Train Loss: 0.0000534, Val Loss: 0.0000573 +2025-03-27 10:02:43,471 Epoch 1462/2000 +2025-03-27 10:07:24,684 Current Learning Rate: 0.0002189583 +2025-03-27 10:07:24,685 Train Loss: 0.0000470, Val Loss: 0.0000572 +2025-03-27 10:07:24,685 Epoch 1463/2000 +2025-03-27 10:12:05,482 Current Learning Rate: 0.0002254886 +2025-03-27 10:12:05,483 Train Loss: 0.0000471, Val Loss: 0.0000567 +2025-03-27 10:12:05,483 Epoch 1464/2000 +2025-03-27 10:16:45,676 Current Learning Rate: 0.0002320866 +2025-03-27 10:16:45,677 Train Loss: 0.0000395, Val Loss: 0.0000564 +2025-03-27 10:16:45,677 Epoch 1465/2000 +2025-03-27 10:21:26,532 Current Learning Rate: 0.0002387507 +2025-03-27 10:21:26,532 Train Loss: 0.0000489, Val Loss: 0.0000568 +2025-03-27 10:21:26,533 Epoch 1466/2000 +2025-03-27 10:26:07,785 Current Learning Rate: 0.0002454793 +2025-03-27 10:26:07,786 Train Loss: 0.0000556, Val Loss: 0.0000576 +2025-03-27 10:26:07,786 Epoch 1467/2000 +2025-03-27 10:30:48,241 Current Learning Rate: 0.0002522707 +2025-03-27 10:30:48,241 Train Loss: 0.0000521, Val Loss: 0.0000577 +2025-03-27 10:30:48,241 Epoch 1468/2000 +2025-03-27 10:35:28,933 Current Learning Rate: 0.0002591232 +2025-03-27 10:35:28,933 Train Loss: 0.0000510, Val Loss: 0.0000568 +2025-03-27 10:35:28,933 Epoch 1469/2000 +2025-03-27 10:40:10,022 Current Learning Rate: 0.0002660351 +2025-03-27 10:40:10,022 Train Loss: 0.0000532, Val Loss: 0.0000570 +2025-03-27 10:40:10,022 Epoch 1470/2000 +2025-03-27 10:44:49,953 Current Learning Rate: 0.0002730048 +2025-03-27 10:44:49,954 Train Loss: 0.0000500, Val Loss: 0.0000631 +2025-03-27 10:44:49,954 Epoch 1471/2000 +2025-03-27 10:49:30,632 Current Learning Rate: 0.0002800304 +2025-03-27 10:49:30,633 Train Loss: 0.0000575, Val Loss: 0.0000649 +2025-03-27 10:49:30,633 Epoch 1472/2000 +2025-03-27 10:54:11,065 Current Learning Rate: 0.0002871104 +2025-03-27 10:54:11,066 Train Loss: 0.0000471, Val Loss: 0.0000587 +2025-03-27 10:54:11,066 Epoch 1473/2000 +2025-03-27 10:58:52,075 Current Learning Rate: 0.0002942428 +2025-03-27 10:58:52,075 Train Loss: 0.0000458, Val Loss: 0.0000571 +2025-03-27 10:58:52,076 Epoch 1474/2000 +2025-03-27 11:03:32,859 Current Learning Rate: 0.0003014261 +2025-03-27 11:03:32,860 Train Loss: 0.0000487, Val Loss: 0.0000571 +2025-03-27 11:03:32,861 Epoch 1475/2000 +2025-03-27 11:08:12,990 Current Learning Rate: 0.0003086583 +2025-03-27 11:08:12,991 Train Loss: 0.0000487, Val Loss: 0.0000584 +2025-03-27 11:08:12,991 Epoch 1476/2000 +2025-03-27 11:12:53,730 Current Learning Rate: 0.0003159377 +2025-03-27 11:12:53,730 Train Loss: 0.0000419, Val Loss: 0.0000581 +2025-03-27 11:12:53,730 Epoch 1477/2000 +2025-03-27 11:17:34,183 Current Learning Rate: 0.0003232626 +2025-03-27 11:17:34,184 Train Loss: 0.0000471, Val Loss: 0.0000578 +2025-03-27 11:17:34,184 Epoch 1478/2000 +2025-03-27 11:22:14,850 Current Learning Rate: 0.0003306310 +2025-03-27 11:22:14,850 Train Loss: 0.0000467, Val Loss: 0.0000569 +2025-03-27 11:22:14,851 Epoch 1479/2000 +2025-03-27 11:26:55,748 Current Learning Rate: 0.0003380413 +2025-03-27 11:26:55,748 Train Loss: 0.0000570, Val Loss: 0.0000583 +2025-03-27 11:26:55,749 Epoch 1480/2000 +2025-03-27 11:31:36,683 Current Learning Rate: 0.0003454915 +2025-03-27 11:31:36,684 Train Loss: 0.0000535, Val Loss: 0.0000604 +2025-03-27 11:31:36,684 Epoch 1481/2000 +2025-03-27 11:36:17,819 Current Learning Rate: 0.0003529798 +2025-03-27 11:36:17,820 Train Loss: 0.0000521, Val Loss: 0.0000591 +2025-03-27 11:36:17,820 Epoch 1482/2000 +2025-03-27 11:40:58,380 Current Learning Rate: 0.0003605044 +2025-03-27 11:40:58,381 Train Loss: 0.0000436, Val Loss: 0.0000569 +2025-03-27 11:40:58,381 Epoch 1483/2000 +2025-03-27 11:45:39,083 Current Learning Rate: 0.0003680635 +2025-03-27 11:45:39,084 Train Loss: 0.0000493, Val Loss: 0.0000579 +2025-03-27 11:45:39,084 Epoch 1484/2000 +2025-03-27 11:50:19,679 Current Learning Rate: 0.0003756551 +2025-03-27 11:50:19,679 Train Loss: 0.0000532, Val Loss: 0.0000575 +2025-03-27 11:50:19,680 Epoch 1485/2000 +2025-03-27 11:55:00,805 Current Learning Rate: 0.0003832773 +2025-03-27 11:55:00,806 Train Loss: 0.0000471, Val Loss: 0.0000581 +2025-03-27 11:55:00,806 Epoch 1486/2000 +2025-03-27 11:59:41,410 Current Learning Rate: 0.0003909284 +2025-03-27 11:59:41,411 Train Loss: 0.0000630, Val Loss: 0.0000581 +2025-03-27 11:59:41,411 Epoch 1487/2000 +2025-03-27 12:04:22,312 Current Learning Rate: 0.0003986064 +2025-03-27 12:04:22,312 Train Loss: 0.0000514, Val Loss: 0.0000588 +2025-03-27 12:04:22,313 Epoch 1488/2000 +2025-03-27 12:09:02,463 Current Learning Rate: 0.0004063093 +2025-03-27 12:09:02,464 Train Loss: 0.0000443, Val Loss: 0.0000573 +2025-03-27 12:09:02,464 Epoch 1489/2000 +2025-03-27 12:13:42,745 Current Learning Rate: 0.0004140354 +2025-03-27 12:13:42,746 Train Loss: 0.0000564, Val Loss: 0.0000584 +2025-03-27 12:13:42,746 Epoch 1490/2000 +2025-03-27 12:18:23,678 Current Learning Rate: 0.0004217828 +2025-03-27 12:18:23,678 Train Loss: 0.0000533, Val Loss: 0.0000600 +2025-03-27 12:18:23,678 Epoch 1491/2000 +2025-03-27 12:23:04,192 Current Learning Rate: 0.0004295494 +2025-03-27 12:23:04,193 Train Loss: 0.0000514, Val Loss: 0.0000575 +2025-03-27 12:23:04,193 Epoch 1492/2000 +2025-03-27 12:27:44,682 Current Learning Rate: 0.0004373334 +2025-03-27 12:27:44,682 Train Loss: 0.0000469, Val Loss: 0.0000608 +2025-03-27 12:27:44,682 Epoch 1493/2000 +2025-03-27 12:32:24,834 Current Learning Rate: 0.0004451328 +2025-03-27 12:32:24,835 Train Loss: 0.0000539, Val Loss: 0.0000600 +2025-03-27 12:32:24,835 Epoch 1494/2000 +2025-03-27 12:37:05,385 Current Learning Rate: 0.0004529458 +2025-03-27 12:37:05,385 Train Loss: 0.0000460, Val Loss: 0.0000579 +2025-03-27 12:37:05,385 Epoch 1495/2000 +2025-03-27 12:41:45,475 Current Learning Rate: 0.0004607705 +2025-03-27 12:41:45,475 Train Loss: 0.0000619, Val Loss: 0.0000595 +2025-03-27 12:41:45,475 Epoch 1496/2000 +2025-03-27 12:46:25,888 Current Learning Rate: 0.0004686047 +2025-03-27 12:46:25,889 Train Loss: 0.0000580, Val Loss: 0.0000599 +2025-03-27 12:46:25,889 Epoch 1497/2000 +2025-03-27 12:51:05,974 Current Learning Rate: 0.0004764468 +2025-03-27 12:51:05,974 Train Loss: 0.0000517, Val Loss: 0.0000594 +2025-03-27 12:51:05,975 Epoch 1498/2000 +2025-03-27 12:55:46,283 Current Learning Rate: 0.0004842946 +2025-03-27 12:55:46,283 Train Loss: 0.0000457, Val Loss: 0.0000573 +2025-03-27 12:55:46,284 Epoch 1499/2000 +2025-03-27 13:00:27,527 Current Learning Rate: 0.0004921463 +2025-03-27 13:00:27,528 Train Loss: 0.0000530, Val Loss: 0.0000581 +2025-03-27 13:00:27,528 Epoch 1500/2000 +2025-03-27 13:05:08,625 Current Learning Rate: 0.0005000000 +2025-03-27 13:05:08,625 Train Loss: 0.0000563, Val Loss: 0.0000635 +2025-03-27 13:05:08,626 Epoch 1501/2000 +2025-03-27 13:09:48,517 Current Learning Rate: 0.0005078537 +2025-03-27 13:09:48,518 Train Loss: 0.0000569, Val Loss: 0.0000598 +2025-03-27 13:09:48,518 Epoch 1502/2000 +2025-03-27 13:14:29,265 Current Learning Rate: 0.0005157054 +2025-03-27 13:14:29,265 Train Loss: 0.0000594, Val Loss: 0.0000731 +2025-03-27 13:14:29,266 Epoch 1503/2000 +2025-03-27 13:19:09,742 Current Learning Rate: 0.0005235532 +2025-03-27 13:19:09,742 Train Loss: 0.0000572, Val Loss: 0.0000603 +2025-03-27 13:19:09,743 Epoch 1504/2000 +2025-03-27 13:23:50,069 Current Learning Rate: 0.0005313953 +2025-03-27 13:23:50,069 Train Loss: 0.0000520, Val Loss: 0.0000584 +2025-03-27 13:23:50,069 Epoch 1505/2000 +2025-03-27 13:28:30,831 Current Learning Rate: 0.0005392295 +2025-03-27 13:28:30,831 Train Loss: 0.0000555, Val Loss: 0.0000620 +2025-03-27 13:28:30,832 Epoch 1506/2000 +2025-03-27 13:33:11,128 Current Learning Rate: 0.0005470542 +2025-03-27 13:33:11,129 Train Loss: 0.0000558, Val Loss: 0.0000605 +2025-03-27 13:33:11,129 Epoch 1507/2000 +2025-03-27 13:37:51,820 Current Learning Rate: 0.0005548672 +2025-03-27 13:37:51,820 Train Loss: 0.0000500, Val Loss: 0.0000603 +2025-03-27 13:37:51,821 Epoch 1508/2000 +2025-03-27 13:42:32,239 Current Learning Rate: 0.0005626666 +2025-03-27 13:42:32,240 Train Loss: 0.0000646, Val Loss: 0.0000622 +2025-03-27 13:42:32,240 Epoch 1509/2000 +2025-03-27 13:47:13,377 Current Learning Rate: 0.0005704506 +2025-03-27 13:47:13,377 Train Loss: 0.0000645, Val Loss: 0.0000608 +2025-03-27 13:47:13,377 Epoch 1510/2000 +2025-03-27 13:51:53,936 Current Learning Rate: 0.0005782172 +2025-03-27 13:51:53,937 Train Loss: 0.0000548, Val Loss: 0.0000590 +2025-03-27 13:51:53,937 Epoch 1511/2000 +2025-03-27 13:56:34,737 Current Learning Rate: 0.0005859646 +2025-03-27 13:56:34,738 Train Loss: 0.0000564, Val Loss: 0.0000592 +2025-03-27 13:56:34,738 Epoch 1512/2000 +2025-03-27 14:01:16,368 Current Learning Rate: 0.0005936907 +2025-03-27 14:01:16,369 Train Loss: 0.0000540, Val Loss: 0.0000594 +2025-03-27 14:01:16,369 Epoch 1513/2000 +2025-03-27 14:05:57,689 Current Learning Rate: 0.0006013936 +2025-03-27 14:05:57,689 Train Loss: 0.0000466, Val Loss: 0.0000583 +2025-03-27 14:05:57,690 Epoch 1514/2000 +2025-03-27 14:10:38,303 Current Learning Rate: 0.0006090716 +2025-03-27 14:10:38,304 Train Loss: 0.0000490, Val Loss: 0.0000599 +2025-03-27 14:10:38,305 Epoch 1515/2000 +2025-03-27 14:15:19,552 Current Learning Rate: 0.0006167227 +2025-03-27 14:15:19,553 Train Loss: 0.0000596, Val Loss: 0.0000600 +2025-03-27 14:15:19,553 Epoch 1516/2000 +2025-03-27 14:20:00,215 Current Learning Rate: 0.0006243449 +2025-03-27 14:20:00,215 Train Loss: 0.0000655, Val Loss: 0.0000619 +2025-03-27 14:20:00,216 Epoch 1517/2000 +2025-03-27 14:24:40,513 Current Learning Rate: 0.0006319365 +2025-03-27 14:24:40,513 Train Loss: 0.0000523, Val Loss: 0.0000622 +2025-03-27 14:24:40,514 Epoch 1518/2000 +2025-03-27 14:29:21,359 Current Learning Rate: 0.0006394956 +2025-03-27 14:29:21,359 Train Loss: 0.0000639, Val Loss: 0.0000630 +2025-03-27 14:29:21,360 Epoch 1519/2000 +2025-03-27 14:34:02,208 Current Learning Rate: 0.0006470202 +2025-03-27 14:34:02,208 Train Loss: 0.0000589, Val Loss: 0.0000631 +2025-03-27 14:34:02,209 Epoch 1520/2000 +2025-03-27 14:38:43,630 Current Learning Rate: 0.0006545085 +2025-03-27 14:38:43,631 Train Loss: 0.0000561, Val Loss: 0.0000603 +2025-03-27 14:38:43,631 Epoch 1521/2000 +2025-03-27 14:43:23,849 Current Learning Rate: 0.0006619587 +2025-03-27 14:43:23,849 Train Loss: 0.0000570, Val Loss: 0.0000658 +2025-03-27 14:43:23,850 Epoch 1522/2000 +2025-03-27 14:48:04,380 Current Learning Rate: 0.0006693690 +2025-03-27 14:48:04,381 Train Loss: 0.0000686, Val Loss: 0.0000660 +2025-03-27 14:48:04,381 Epoch 1523/2000 +2025-03-27 14:52:44,677 Current Learning Rate: 0.0006767374 +2025-03-27 14:52:44,678 Train Loss: 0.0000521, Val Loss: 0.0000630 +2025-03-27 14:52:44,678 Epoch 1524/2000 +2025-03-27 14:57:25,705 Current Learning Rate: 0.0006840623 +2025-03-27 14:57:25,706 Train Loss: 0.0000651, Val Loss: 0.0000616 +2025-03-27 14:57:25,706 Epoch 1525/2000 +2025-03-27 15:02:05,953 Current Learning Rate: 0.0006913417 +2025-03-27 15:02:05,954 Train Loss: 0.0000604, Val Loss: 0.0000619 +2025-03-27 15:02:05,954 Epoch 1526/2000 +2025-03-27 15:06:46,823 Current Learning Rate: 0.0006985739 +2025-03-27 15:06:46,823 Train Loss: 0.0000546, Val Loss: 0.0000666 +2025-03-27 15:06:46,824 Epoch 1527/2000 +2025-03-27 15:11:27,316 Current Learning Rate: 0.0007057572 +2025-03-27 15:11:27,317 Train Loss: 0.0000701, Val Loss: 0.0000661 +2025-03-27 15:11:27,317 Epoch 1528/2000 +2025-03-27 15:16:07,582 Current Learning Rate: 0.0007128896 +2025-03-27 15:16:07,582 Train Loss: 0.0000556, Val Loss: 0.0000642 +2025-03-27 15:16:07,582 Epoch 1529/2000 +2025-03-27 15:20:48,273 Current Learning Rate: 0.0007199696 +2025-03-27 15:20:48,274 Train Loss: 0.0000607, Val Loss: 0.0000694 +2025-03-27 15:20:48,274 Epoch 1530/2000 +2025-03-27 15:25:28,423 Current Learning Rate: 0.0007269952 +2025-03-27 15:25:28,424 Train Loss: 0.0000680, Val Loss: 0.0000670 +2025-03-27 15:25:28,424 Epoch 1531/2000 +2025-03-27 15:30:09,068 Current Learning Rate: 0.0007339649 +2025-03-27 15:30:09,069 Train Loss: 0.0000765, Val Loss: 0.0000697 +2025-03-27 15:30:09,069 Epoch 1532/2000 +2025-03-27 15:34:49,517 Current Learning Rate: 0.0007408768 +2025-03-27 15:34:49,518 Train Loss: 0.0000857, Val Loss: 0.0000701 +2025-03-27 15:34:49,518 Epoch 1533/2000 +2025-03-27 15:39:30,396 Current Learning Rate: 0.0007477293 +2025-03-27 15:39:30,397 Train Loss: 0.0000715, Val Loss: 0.0000674 +2025-03-27 15:39:30,397 Epoch 1534/2000 +2025-03-27 15:44:10,739 Current Learning Rate: 0.0007545207 +2025-03-27 15:44:10,740 Train Loss: 0.0000617, Val Loss: 0.0000642 +2025-03-27 15:44:10,740 Epoch 1535/2000 +2025-03-27 15:48:51,165 Current Learning Rate: 0.0007612493 +2025-03-27 15:48:51,165 Train Loss: 0.0000568, Val Loss: 0.0000629 +2025-03-27 15:48:51,165 Epoch 1536/2000 +2025-03-27 15:53:31,869 Current Learning Rate: 0.0007679134 +2025-03-27 15:53:31,870 Train Loss: 0.0000700, Val Loss: 0.0000676 +2025-03-27 15:53:31,870 Epoch 1537/2000 +2025-03-27 15:58:12,615 Current Learning Rate: 0.0007745114 +2025-03-27 15:58:12,615 Train Loss: 0.0000849, Val Loss: 0.0000863 +2025-03-27 15:58:12,616 Epoch 1538/2000 +2025-03-27 16:02:53,826 Current Learning Rate: 0.0007810417 +2025-03-27 16:02:53,827 Train Loss: 0.0000665, Val Loss: 0.0000647 +2025-03-27 16:02:53,827 Epoch 1539/2000 +2025-03-27 16:07:34,207 Current Learning Rate: 0.0007875026 +2025-03-27 16:07:34,208 Train Loss: 0.0000577, Val Loss: 0.0000761 +2025-03-27 16:07:34,208 Epoch 1540/2000 +2025-03-27 16:12:14,606 Current Learning Rate: 0.0007938926 +2025-03-27 16:12:14,606 Train Loss: 0.0000593, Val Loss: 0.0000696 +2025-03-27 16:12:14,607 Epoch 1541/2000 +2025-03-27 16:16:55,428 Current Learning Rate: 0.0008002101 +2025-03-27 16:16:55,428 Train Loss: 0.0000688, Val Loss: 0.0000629 +2025-03-27 16:16:55,428 Epoch 1542/2000 +2025-03-27 16:21:35,736 Current Learning Rate: 0.0008064535 +2025-03-27 16:21:35,736 Train Loss: 0.0000765, Val Loss: 0.0000779 +2025-03-27 16:21:35,736 Epoch 1543/2000 +2025-03-27 16:26:15,564 Current Learning Rate: 0.0008126213 +2025-03-27 16:26:15,565 Train Loss: 0.0001020, Val Loss: 0.0000874 +2025-03-27 16:26:15,565 Epoch 1544/2000 +2025-03-27 16:30:55,873 Current Learning Rate: 0.0008187120 +2025-03-27 16:30:55,873 Train Loss: 0.0000646, Val Loss: 0.0000628 +2025-03-27 16:30:55,874 Epoch 1545/2000 +2025-03-27 16:35:36,618 Current Learning Rate: 0.0008247240 +2025-03-27 16:35:36,619 Train Loss: 0.0000665, Val Loss: 0.0000704 +2025-03-27 16:35:36,619 Epoch 1546/2000 +2025-03-27 16:40:17,232 Current Learning Rate: 0.0008306559 +2025-03-27 16:40:17,233 Train Loss: 0.0000609, Val Loss: 0.0000635 +2025-03-27 16:40:17,233 Epoch 1547/2000 +2025-03-27 16:44:57,031 Current Learning Rate: 0.0008365063 +2025-03-27 16:44:57,032 Train Loss: 0.0000691, Val Loss: 0.0000726 +2025-03-27 16:44:57,032 Epoch 1548/2000 +2025-03-27 16:49:37,192 Current Learning Rate: 0.0008422736 +2025-03-27 16:49:37,192 Train Loss: 0.0000625, Val Loss: 0.0000721 +2025-03-27 16:49:37,193 Epoch 1549/2000 +2025-03-27 16:54:17,521 Current Learning Rate: 0.0008479564 +2025-03-27 16:54:17,521 Train Loss: 0.0000611, Val Loss: 0.0000636 +2025-03-27 16:54:17,521 Epoch 1550/2000 +2025-03-27 16:58:58,253 Current Learning Rate: 0.0008535534 +2025-03-27 16:58:58,254 Train Loss: 0.0000721, Val Loss: 0.0000735 +2025-03-27 16:58:58,254 Epoch 1551/2000 +2025-03-27 17:03:39,639 Current Learning Rate: 0.0008590631 +2025-03-27 17:03:39,640 Train Loss: 0.0000652, Val Loss: 0.0000700 +2025-03-27 17:03:39,640 Epoch 1552/2000 +2025-03-27 17:08:19,312 Current Learning Rate: 0.0008644843 +2025-03-27 17:08:19,313 Train Loss: 0.0000733, Val Loss: 0.0000726 +2025-03-27 17:08:19,313 Epoch 1553/2000 +2025-03-27 17:12:59,571 Current Learning Rate: 0.0008698155 +2025-03-27 17:12:59,571 Train Loss: 0.0000791, Val Loss: 0.0000705 +2025-03-27 17:12:59,572 Epoch 1554/2000 +2025-03-27 17:17:39,674 Current Learning Rate: 0.0008750555 +2025-03-27 17:17:39,674 Train Loss: 0.0001065, Val Loss: 0.0000690 +2025-03-27 17:17:39,674 Epoch 1555/2000 +2025-03-27 17:22:20,266 Current Learning Rate: 0.0008802030 +2025-03-27 17:22:20,267 Train Loss: 0.0000536, Val Loss: 0.0000650 +2025-03-27 17:22:20,267 Epoch 1556/2000 +2025-03-27 17:27:00,075 Current Learning Rate: 0.0008852566 +2025-03-27 17:27:00,075 Train Loss: 0.0000556, Val Loss: 0.0000664 +2025-03-27 17:27:00,076 Epoch 1557/2000 +2025-03-27 17:31:40,620 Current Learning Rate: 0.0008902152 +2025-03-27 17:31:40,621 Train Loss: 0.0000673, Val Loss: 0.0000635 +2025-03-27 17:31:40,621 Epoch 1558/2000 +2025-03-27 17:36:20,804 Current Learning Rate: 0.0008950775 +2025-03-27 17:36:20,804 Train Loss: 0.0000632, Val Loss: 0.0000648 +2025-03-27 17:36:20,804 Epoch 1559/2000 +2025-03-27 17:41:01,387 Current Learning Rate: 0.0008998423 +2025-03-27 17:41:01,387 Train Loss: 0.0000690, Val Loss: 0.0000663 +2025-03-27 17:41:01,388 Epoch 1560/2000 +2025-03-27 17:45:41,215 Current Learning Rate: 0.0009045085 +2025-03-27 17:45:41,215 Train Loss: 0.0000531, Val Loss: 0.0000621 +2025-03-27 17:45:41,215 Epoch 1561/2000 +2025-03-27 17:50:22,112 Current Learning Rate: 0.0009090749 +2025-03-27 17:50:22,113 Train Loss: 0.0000676, Val Loss: 0.0000653 +2025-03-27 17:50:22,113 Epoch 1562/2000 +2025-03-27 17:55:02,436 Current Learning Rate: 0.0009135403 +2025-03-27 17:55:02,436 Train Loss: 0.0000585, Val Loss: 0.0001058 +2025-03-27 17:55:02,436 Epoch 1563/2000 +2025-03-27 17:59:42,683 Current Learning Rate: 0.0009179037 +2025-03-27 17:59:42,683 Train Loss: 0.0000891, Val Loss: 0.0000846 +2025-03-27 17:59:42,683 Epoch 1564/2000 +2025-03-27 18:04:23,083 Current Learning Rate: 0.0009221640 +2025-03-27 18:04:23,084 Train Loss: 0.0000706, Val Loss: 0.0000744 +2025-03-27 18:04:23,084 Epoch 1565/2000 +2025-03-27 18:09:03,043 Current Learning Rate: 0.0009263201 +2025-03-27 18:09:03,043 Train Loss: 0.0000646, Val Loss: 0.0000721 +2025-03-27 18:09:03,044 Epoch 1566/2000 +2025-03-27 18:13:43,387 Current Learning Rate: 0.0009303710 +2025-03-27 18:13:43,387 Train Loss: 0.0000611, Val Loss: 0.0000836 +2025-03-27 18:13:43,388 Epoch 1567/2000 +2025-03-27 18:18:23,976 Current Learning Rate: 0.0009343158 +2025-03-27 18:18:23,977 Train Loss: 0.0000601, Val Loss: 0.0000667 +2025-03-27 18:18:23,977 Epoch 1568/2000 +2025-03-27 18:23:04,697 Current Learning Rate: 0.0009381533 +2025-03-27 18:23:04,698 Train Loss: 0.0000733, Val Loss: 0.0000882 +2025-03-27 18:23:04,698 Epoch 1569/2000 +2025-03-27 18:27:45,315 Current Learning Rate: 0.0009418828 +2025-03-27 18:27:45,316 Train Loss: 0.0000710, Val Loss: 0.0000874 +2025-03-27 18:27:45,316 Epoch 1570/2000 +2025-03-27 18:32:25,170 Current Learning Rate: 0.0009455033 +2025-03-27 18:32:25,171 Train Loss: 0.0000797, Val Loss: 0.0000874 +2025-03-27 18:32:25,171 Epoch 1571/2000 +2025-03-27 18:37:05,390 Current Learning Rate: 0.0009490138 +2025-03-27 18:37:05,391 Train Loss: 0.0000737, Val Loss: 0.0000700 +2025-03-27 18:37:05,391 Epoch 1572/2000 +2025-03-27 18:41:45,549 Current Learning Rate: 0.0009524135 +2025-03-27 18:41:45,549 Train Loss: 0.0000694, Val Loss: 0.0000672 +2025-03-27 18:41:45,550 Epoch 1573/2000 +2025-03-27 18:46:25,739 Current Learning Rate: 0.0009557016 +2025-03-27 18:46:25,740 Train Loss: 0.0000806, Val Loss: 0.0000802 +2025-03-27 18:46:25,740 Epoch 1574/2000 +2025-03-27 18:51:06,755 Current Learning Rate: 0.0009588773 +2025-03-27 18:51:06,755 Train Loss: 0.0000743, Val Loss: 0.0000722 +2025-03-27 18:51:06,756 Epoch 1575/2000 +2025-03-27 18:55:47,461 Current Learning Rate: 0.0009619398 +2025-03-27 18:55:47,461 Train Loss: 0.0000703, Val Loss: 0.0000780 +2025-03-27 18:55:47,462 Epoch 1576/2000 +2025-03-27 19:00:27,713 Current Learning Rate: 0.0009648882 +2025-03-27 19:00:27,713 Train Loss: 0.0000702, Val Loss: 0.0000684 +2025-03-27 19:00:27,713 Epoch 1577/2000 +2025-03-27 19:05:07,875 Current Learning Rate: 0.0009677220 +2025-03-27 19:05:07,876 Train Loss: 0.0000643, Val Loss: 0.0000671 +2025-03-27 19:05:07,876 Epoch 1578/2000 +2025-03-27 19:09:48,547 Current Learning Rate: 0.0009704404 +2025-03-27 19:09:48,547 Train Loss: 0.0000647, Val Loss: 0.0000686 +2025-03-27 19:09:48,548 Epoch 1579/2000 +2025-03-27 19:14:28,892 Current Learning Rate: 0.0009730427 +2025-03-27 19:14:28,893 Train Loss: 0.0000672, Val Loss: 0.0000990 +2025-03-27 19:14:28,893 Epoch 1580/2000 +2025-03-27 19:19:09,320 Current Learning Rate: 0.0009755283 +2025-03-27 19:19:09,320 Train Loss: 0.0000705, Val Loss: 0.0000767 +2025-03-27 19:19:09,320 Epoch 1581/2000 +2025-03-27 19:23:49,938 Current Learning Rate: 0.0009778965 +2025-03-27 19:23:49,938 Train Loss: 0.0000793, Val Loss: 0.0000763 +2025-03-27 19:23:49,939 Epoch 1582/2000 +2025-03-27 19:28:30,520 Current Learning Rate: 0.0009801468 +2025-03-27 19:28:30,521 Train Loss: 0.0000684, Val Loss: 0.0000662 +2025-03-27 19:28:30,521 Epoch 1583/2000 +2025-03-27 19:33:10,154 Current Learning Rate: 0.0009822787 +2025-03-27 19:33:10,154 Train Loss: 0.0000750, Val Loss: 0.0000709 +2025-03-27 19:33:10,155 Epoch 1584/2000 +2025-03-27 19:37:50,729 Current Learning Rate: 0.0009842916 +2025-03-27 19:37:50,730 Train Loss: 0.0000675, Val Loss: 0.0000767 +2025-03-27 19:37:50,730 Epoch 1585/2000 +2025-03-27 19:42:31,198 Current Learning Rate: 0.0009861850 +2025-03-27 19:42:31,199 Train Loss: 0.0000713, Val Loss: 0.0000786 +2025-03-27 19:42:31,199 Epoch 1586/2000 +2025-03-27 19:47:11,936 Current Learning Rate: 0.0009879584 +2025-03-27 19:47:11,937 Train Loss: 0.0001478, Val Loss: 0.0000782 +2025-03-27 19:47:11,937 Epoch 1587/2000 +2025-03-27 19:51:52,705 Current Learning Rate: 0.0009896114 +2025-03-27 19:51:52,705 Train Loss: 0.0000753, Val Loss: 0.0000683 +2025-03-27 19:51:52,706 Epoch 1588/2000 +2025-03-27 19:56:33,705 Current Learning Rate: 0.0009911436 +2025-03-27 19:56:33,705 Train Loss: 0.0000627, Val Loss: 0.0000654 +2025-03-27 19:56:33,706 Epoch 1589/2000 +2025-03-27 20:01:13,583 Current Learning Rate: 0.0009925547 +2025-03-27 20:01:13,583 Train Loss: 0.0000669, Val Loss: 0.0000738 +2025-03-27 20:01:13,583 Epoch 1590/2000 +2025-03-27 20:05:54,281 Current Learning Rate: 0.0009938442 +2025-03-27 20:05:54,282 Train Loss: 0.0000780, Val Loss: 0.0000661 +2025-03-27 20:05:54,282 Epoch 1591/2000 +2025-03-27 20:10:34,532 Current Learning Rate: 0.0009950118 +2025-03-27 20:10:34,533 Train Loss: 0.0000749, Val Loss: 0.0000693 +2025-03-27 20:10:34,533 Epoch 1592/2000 +2025-03-27 20:15:14,777 Current Learning Rate: 0.0009960574 +2025-03-27 20:15:14,777 Train Loss: 0.0000631, Val Loss: 0.0000732 +2025-03-27 20:15:14,777 Epoch 1593/2000 +2025-03-27 20:19:55,396 Current Learning Rate: 0.0009969805 +2025-03-27 20:19:55,396 Train Loss: 0.0000726, Val Loss: 0.0000683 +2025-03-27 20:19:55,397 Epoch 1594/2000 +2025-03-27 20:24:36,171 Current Learning Rate: 0.0009977810 +2025-03-27 20:24:36,172 Train Loss: 0.0000732, Val Loss: 0.0000680 +2025-03-27 20:24:36,172 Epoch 1595/2000 +2025-03-27 20:29:16,923 Current Learning Rate: 0.0009984587 +2025-03-27 20:29:16,924 Train Loss: 0.0000709, Val Loss: 0.0000750 +2025-03-27 20:29:16,924 Epoch 1596/2000 +2025-03-27 20:33:57,716 Current Learning Rate: 0.0009990134 +2025-03-27 20:33:57,716 Train Loss: 0.0000732, Val Loss: 0.0000723 +2025-03-27 20:33:57,716 Epoch 1597/2000 +2025-03-27 20:38:37,723 Current Learning Rate: 0.0009994449 +2025-03-27 20:38:37,723 Train Loss: 0.0000655, Val Loss: 0.0000688 +2025-03-27 20:38:37,724 Epoch 1598/2000 +2025-03-27 20:43:18,222 Current Learning Rate: 0.0009997533 +2025-03-27 20:43:18,222 Train Loss: 0.0000758, Val Loss: 0.0000813 +2025-03-27 20:43:18,222 Epoch 1599/2000 +2025-03-27 20:47:58,592 Current Learning Rate: 0.0009999383 +2025-03-27 20:47:58,595 Train Loss: 0.0000778, Val Loss: 0.0000693 +2025-03-27 20:47:58,595 Epoch 1600/2000 +2025-03-27 20:52:39,210 Current Learning Rate: 0.0010000000 +2025-03-27 20:52:39,211 Train Loss: 0.0000841, Val Loss: 0.0000717 +2025-03-27 20:52:39,211 Epoch 1601/2000 +2025-03-27 20:57:19,814 Current Learning Rate: 0.0009999383 +2025-03-27 20:57:19,815 Train Loss: 0.0000635, Val Loss: 0.0000805 +2025-03-27 20:57:19,815 Epoch 1602/2000 +2025-03-27 21:02:00,372 Current Learning Rate: 0.0009997533 +2025-03-27 21:02:00,373 Train Loss: 0.0000659, Val Loss: 0.0000695 +2025-03-27 21:02:00,374 Epoch 1603/2000 +2025-03-27 21:06:40,622 Current Learning Rate: 0.0009994449 +2025-03-27 21:06:40,623 Train Loss: 0.0000723, Val Loss: 0.0000763 +2025-03-27 21:06:40,623 Epoch 1604/2000 +2025-03-27 21:11:21,439 Current Learning Rate: 0.0009990134 +2025-03-27 21:11:21,440 Train Loss: 0.0000647, Val Loss: 0.0000712 +2025-03-27 21:11:21,440 Epoch 1605/2000 +2025-03-27 21:16:02,070 Current Learning Rate: 0.0009984587 +2025-03-27 21:16:02,071 Train Loss: 0.0000725, Val Loss: 0.0000784 +2025-03-27 21:16:02,071 Epoch 1606/2000 +2025-03-27 21:20:42,866 Current Learning Rate: 0.0009977810 +2025-03-27 21:20:42,866 Train Loss: 0.0000700, Val Loss: 0.0000684 +2025-03-27 21:20:42,867 Epoch 1607/2000 +2025-03-27 21:25:23,548 Current Learning Rate: 0.0009969805 +2025-03-27 21:25:23,549 Train Loss: 0.0001133, Val Loss: 0.0000801 +2025-03-27 21:25:23,549 Epoch 1608/2000 +2025-03-27 21:30:03,781 Current Learning Rate: 0.0009960574 +2025-03-27 21:30:03,782 Train Loss: 0.0000708, Val Loss: 0.0000824 +2025-03-27 21:30:03,782 Epoch 1609/2000 +2025-03-27 21:34:44,800 Current Learning Rate: 0.0009950118 +2025-03-27 21:34:44,800 Train Loss: 0.0000632, Val Loss: 0.0000697 +2025-03-27 21:34:44,801 Epoch 1610/2000 +2025-03-27 21:39:25,648 Current Learning Rate: 0.0009938442 +2025-03-27 21:39:25,649 Train Loss: 0.0000749, Val Loss: 0.0000679 +2025-03-27 21:39:25,649 Epoch 1611/2000 +2025-03-27 21:44:06,093 Current Learning Rate: 0.0009925547 +2025-03-27 21:44:06,094 Train Loss: 0.0000708, Val Loss: 0.0000677 +2025-03-27 21:44:06,094 Epoch 1612/2000 +2025-03-27 21:48:47,066 Current Learning Rate: 0.0009911436 +2025-03-27 21:48:47,067 Train Loss: 0.0000801, Val Loss: 0.0001036 +2025-03-27 21:48:47,067 Epoch 1613/2000 +2025-03-27 21:53:27,384 Current Learning Rate: 0.0009896114 +2025-03-27 21:53:27,384 Train Loss: 0.0000710, Val Loss: 0.0000678 +2025-03-27 21:53:27,384 Epoch 1614/2000 +2025-03-27 21:58:07,941 Current Learning Rate: 0.0009879584 +2025-03-27 21:58:07,941 Train Loss: 0.0000883, Val Loss: 0.0003274 +2025-03-27 21:58:07,942 Epoch 1615/2000 +2025-03-27 22:02:48,498 Current Learning Rate: 0.0009861850 +2025-03-27 22:02:48,498 Train Loss: 0.0001006, Val Loss: 0.0000719 +2025-03-27 22:02:48,498 Epoch 1616/2000 +2025-03-27 22:07:28,870 Current Learning Rate: 0.0009842916 +2025-03-27 22:07:28,871 Train Loss: 0.0000862, Val Loss: 0.0001282 +2025-03-27 22:07:28,871 Epoch 1617/2000 +2025-03-27 22:12:10,006 Current Learning Rate: 0.0009822787 +2025-03-27 22:12:10,006 Train Loss: 0.0000764, Val Loss: 0.0000859 +2025-03-27 22:12:10,006 Epoch 1618/2000 +2025-03-27 22:16:50,708 Current Learning Rate: 0.0009801468 +2025-03-27 22:16:50,708 Train Loss: 0.0000722, Val Loss: 0.0000748 +2025-03-27 22:16:50,708 Epoch 1619/2000 +2025-03-27 22:21:31,626 Current Learning Rate: 0.0009778965 +2025-03-27 22:21:31,627 Train Loss: 0.0000704, Val Loss: 0.0000845 +2025-03-27 22:21:31,627 Epoch 1620/2000 +2025-03-27 22:26:11,956 Current Learning Rate: 0.0009755283 +2025-03-27 22:26:11,956 Train Loss: 0.0000591, Val Loss: 0.0000714 +2025-03-27 22:26:11,956 Epoch 1621/2000 +2025-03-27 22:30:52,286 Current Learning Rate: 0.0009730427 +2025-03-27 22:30:52,286 Train Loss: 0.0000679, Val Loss: 0.0000666 +2025-03-27 22:30:52,286 Epoch 1622/2000 +2025-03-27 22:35:32,056 Current Learning Rate: 0.0009704404 +2025-03-27 22:35:32,056 Train Loss: 0.0000662, Val Loss: 0.0000680 +2025-03-27 22:35:32,056 Epoch 1623/2000 +2025-03-27 22:40:12,313 Current Learning Rate: 0.0009677220 +2025-03-27 22:40:12,314 Train Loss: 0.0000745, Val Loss: 0.0000763 +2025-03-27 22:40:12,314 Epoch 1624/2000 +2025-03-27 22:44:52,444 Current Learning Rate: 0.0009648882 +2025-03-27 22:44:52,445 Train Loss: 0.0000652, Val Loss: 0.0000669 +2025-03-27 22:44:52,445 Epoch 1625/2000 +2025-03-27 22:49:33,054 Current Learning Rate: 0.0009619398 +2025-03-27 22:49:33,055 Train Loss: 0.0000574, Val Loss: 0.0000677 +2025-03-27 22:49:33,056 Epoch 1626/2000 +2025-03-27 22:54:13,626 Current Learning Rate: 0.0009588773 +2025-03-27 22:54:13,627 Train Loss: 0.0000771, Val Loss: 0.0000689 +2025-03-27 22:54:13,627 Epoch 1627/2000 +2025-03-27 22:58:54,167 Current Learning Rate: 0.0009557016 +2025-03-27 22:58:54,168 Train Loss: 0.0000747, Val Loss: 0.0000669 +2025-03-27 22:58:54,168 Epoch 1628/2000 +2025-03-27 23:03:34,469 Current Learning Rate: 0.0009524135 +2025-03-27 23:03:34,469 Train Loss: 0.0000778, Val Loss: 0.0000724 +2025-03-27 23:03:34,470 Epoch 1629/2000 +2025-03-27 23:08:14,823 Current Learning Rate: 0.0009490138 +2025-03-27 23:08:14,823 Train Loss: 0.0000754, Val Loss: 0.0000749 +2025-03-27 23:08:14,823 Epoch 1630/2000 +2025-03-27 23:12:55,254 Current Learning Rate: 0.0009455033 +2025-03-27 23:12:55,254 Train Loss: 0.0000685, Val Loss: 0.0000715 +2025-03-27 23:12:55,254 Epoch 1631/2000 +2025-03-27 23:17:36,357 Current Learning Rate: 0.0009418828 +2025-03-27 23:17:36,357 Train Loss: 0.0000583, Val Loss: 0.0000641 +2025-03-27 23:17:36,357 Epoch 1632/2000 +2025-03-27 23:22:16,592 Current Learning Rate: 0.0009381533 +2025-03-27 23:22:16,593 Train Loss: 0.0000662, Val Loss: 0.0000676 +2025-03-27 23:22:16,593 Epoch 1633/2000 +2025-03-27 23:26:56,526 Current Learning Rate: 0.0009343158 +2025-03-27 23:26:56,526 Train Loss: 0.0000691, Val Loss: 0.0000637 +2025-03-27 23:26:56,526 Epoch 1634/2000 +2025-03-27 23:31:36,885 Current Learning Rate: 0.0009303710 +2025-03-27 23:31:36,886 Train Loss: 0.0000546, Val Loss: 0.0000635 +2025-03-27 23:31:36,886 Epoch 1635/2000 +2025-03-27 23:36:17,231 Current Learning Rate: 0.0009263201 +2025-03-27 23:36:17,232 Train Loss: 0.0000689, Val Loss: 0.0000656 +2025-03-27 23:36:17,232 Epoch 1636/2000 +2025-03-27 23:40:57,423 Current Learning Rate: 0.0009221640 +2025-03-27 23:40:57,423 Train Loss: 0.0000742, Val Loss: 0.0000726 +2025-03-27 23:40:57,423 Epoch 1637/2000 +2025-03-27 23:45:37,862 Current Learning Rate: 0.0009179037 +2025-03-27 23:45:37,862 Train Loss: 0.0000669, Val Loss: 0.0000673 +2025-03-27 23:45:37,862 Epoch 1638/2000 +2025-03-27 23:50:18,523 Current Learning Rate: 0.0009135403 +2025-03-27 23:50:18,524 Train Loss: 0.0000879, Val Loss: 0.0000700 +2025-03-27 23:50:18,524 Epoch 1639/2000 +2025-03-27 23:54:58,561 Current Learning Rate: 0.0009090749 +2025-03-27 23:54:58,561 Train Loss: 0.0000643, Val Loss: 0.0000755 +2025-03-27 23:54:58,562 Epoch 1640/2000 +2025-03-27 23:59:39,362 Current Learning Rate: 0.0009045085 +2025-03-27 23:59:39,362 Train Loss: 0.0000714, Val Loss: 0.0000648 +2025-03-27 23:59:39,362 Epoch 1641/2000 +2025-03-28 00:04:20,433 Current Learning Rate: 0.0008998423 +2025-03-28 00:04:20,433 Train Loss: 0.0000592, Val Loss: 0.0000675 +2025-03-28 00:04:20,434 Epoch 1642/2000 +2025-03-28 00:09:00,714 Current Learning Rate: 0.0008950775 +2025-03-28 00:09:00,715 Train Loss: 0.0000587, Val Loss: 0.0000631 +2025-03-28 00:09:00,715 Epoch 1643/2000 +2025-03-28 00:13:41,205 Current Learning Rate: 0.0008902152 +2025-03-28 00:13:41,206 Train Loss: 0.0000747, Val Loss: 0.0000680 +2025-03-28 00:13:41,206 Epoch 1644/2000 +2025-03-28 00:18:21,942 Current Learning Rate: 0.0008852566 +2025-03-28 00:18:21,942 Train Loss: 0.0000526, Val Loss: 0.0000607 +2025-03-28 00:18:21,943 Epoch 1645/2000 +2025-03-28 00:23:02,195 Current Learning Rate: 0.0008802030 +2025-03-28 00:23:02,195 Train Loss: 0.0000659, Val Loss: 0.0000690 +2025-03-28 00:23:02,196 Epoch 1646/2000 +2025-03-28 00:27:42,545 Current Learning Rate: 0.0008750555 +2025-03-28 00:27:42,545 Train Loss: 0.0000579, Val Loss: 0.0000724 +2025-03-28 00:27:42,546 Epoch 1647/2000 +2025-03-28 00:32:23,193 Current Learning Rate: 0.0008698155 +2025-03-28 00:32:23,194 Train Loss: 0.0000636, Val Loss: 0.0000683 +2025-03-28 00:32:23,194 Epoch 1648/2000 +2025-03-28 00:37:03,157 Current Learning Rate: 0.0008644843 +2025-03-28 00:37:03,157 Train Loss: 0.0000634, Val Loss: 0.0000631 +2025-03-28 00:37:03,158 Epoch 1649/2000 +2025-03-28 00:41:43,622 Current Learning Rate: 0.0008590631 +2025-03-28 00:41:43,623 Train Loss: 0.0000609, Val Loss: 0.0000638 +2025-03-28 00:41:43,623 Epoch 1650/2000 +2025-03-28 00:46:24,350 Current Learning Rate: 0.0008535534 +2025-03-28 00:46:24,351 Train Loss: 0.0000541, Val Loss: 0.0000644 +2025-03-28 00:46:24,351 Epoch 1651/2000 +2025-03-28 00:51:05,504 Current Learning Rate: 0.0008479564 +2025-03-28 00:51:05,505 Train Loss: 0.0000529, Val Loss: 0.0000686 +2025-03-28 00:51:05,505 Epoch 1652/2000 +2025-03-28 00:55:46,525 Current Learning Rate: 0.0008422736 +2025-03-28 00:55:46,526 Train Loss: 0.0000557, Val Loss: 0.0000721 +2025-03-28 00:55:46,526 Epoch 1653/2000 +2025-03-28 01:00:27,337 Current Learning Rate: 0.0008365063 +2025-03-28 01:00:27,338 Train Loss: 0.0000518, Val Loss: 0.0000701 +2025-03-28 01:00:27,338 Epoch 1654/2000 +2025-03-28 01:05:07,574 Current Learning Rate: 0.0008306559 +2025-03-28 01:05:07,574 Train Loss: 0.0000626, Val Loss: 0.0000672 +2025-03-28 01:05:07,575 Epoch 1655/2000 +2025-03-28 01:09:48,545 Current Learning Rate: 0.0008247240 +2025-03-28 01:09:48,545 Train Loss: 0.0000690, Val Loss: 0.0000673 +2025-03-28 01:09:48,545 Epoch 1656/2000 +2025-03-28 01:14:29,462 Current Learning Rate: 0.0008187120 +2025-03-28 01:14:29,463 Train Loss: 0.0000666, Val Loss: 0.0000707 +2025-03-28 01:14:29,463 Epoch 1657/2000 +2025-03-28 01:19:10,024 Current Learning Rate: 0.0008126213 +2025-03-28 01:19:10,024 Train Loss: 0.0000676, Val Loss: 0.0000758 +2025-03-28 01:19:10,024 Epoch 1658/2000 +2025-03-28 01:23:51,234 Current Learning Rate: 0.0008064535 +2025-03-28 01:23:51,235 Train Loss: 0.0000602, Val Loss: 0.0000651 +2025-03-28 01:23:51,235 Epoch 1659/2000 +2025-03-28 01:28:31,714 Current Learning Rate: 0.0008002101 +2025-03-28 01:28:31,714 Train Loss: 0.0000536, Val Loss: 0.0000787 +2025-03-28 01:28:31,714 Epoch 1660/2000 +2025-03-28 01:33:12,501 Current Learning Rate: 0.0007938926 +2025-03-28 01:33:12,502 Train Loss: 0.0000534, Val Loss: 0.0000644 +2025-03-28 01:33:12,502 Epoch 1661/2000 +2025-03-28 01:37:53,024 Current Learning Rate: 0.0007875026 +2025-03-28 01:37:53,025 Train Loss: 0.0000488, Val Loss: 0.0000603 +2025-03-28 01:37:53,025 Epoch 1662/2000 +2025-03-28 01:42:33,556 Current Learning Rate: 0.0007810417 +2025-03-28 01:42:33,556 Train Loss: 0.0000670, Val Loss: 0.0000992 +2025-03-28 01:42:33,556 Epoch 1663/2000 +2025-03-28 01:47:13,741 Current Learning Rate: 0.0007745114 +2025-03-28 01:47:13,742 Train Loss: 0.0000614, Val Loss: 0.0000664 +2025-03-28 01:47:13,742 Epoch 1664/2000 +2025-03-28 01:51:54,596 Current Learning Rate: 0.0007679134 +2025-03-28 01:51:54,597 Train Loss: 0.0000709, Val Loss: 0.0000651 +2025-03-28 01:51:54,597 Epoch 1665/2000 +2025-03-28 01:56:35,542 Current Learning Rate: 0.0007612493 +2025-03-28 01:56:35,543 Train Loss: 0.0000643, Val Loss: 0.0000656 +2025-03-28 01:56:35,544 Epoch 1666/2000 +2025-03-28 02:01:16,058 Current Learning Rate: 0.0007545207 +2025-03-28 02:01:16,059 Train Loss: 0.0000902, Val Loss: 0.0001208 +2025-03-28 02:01:16,059 Epoch 1667/2000 +2025-03-28 02:05:56,254 Current Learning Rate: 0.0007477293 +2025-03-28 02:05:56,254 Train Loss: 0.0000962, Val Loss: 0.0000668 +2025-03-28 02:05:56,254 Epoch 1668/2000 +2025-03-28 02:10:36,682 Current Learning Rate: 0.0007408768 +2025-03-28 02:10:36,683 Train Loss: 0.0000730, Val Loss: 0.0000703 +2025-03-28 02:10:36,683 Epoch 1669/2000 +2025-03-28 02:15:16,786 Current Learning Rate: 0.0007339649 +2025-03-28 02:15:16,787 Train Loss: 0.0000533, Val Loss: 0.0000631 +2025-03-28 02:15:16,787 Epoch 1670/2000 +2025-03-28 02:19:57,417 Current Learning Rate: 0.0007269952 +2025-03-28 02:19:57,420 Train Loss: 0.0000570, Val Loss: 0.0000620 +2025-03-28 02:19:57,421 Epoch 1671/2000 +2025-03-28 02:24:37,716 Current Learning Rate: 0.0007199696 +2025-03-28 02:24:37,717 Train Loss: 0.0000658, Val Loss: 0.0000670 +2025-03-28 02:24:37,717 Epoch 1672/2000 +2025-03-28 02:29:17,675 Current Learning Rate: 0.0007128896 +2025-03-28 02:29:17,676 Train Loss: 0.0000533, Val Loss: 0.0000594 +2025-03-28 02:29:17,676 Epoch 1673/2000 +2025-03-28 02:33:58,521 Current Learning Rate: 0.0007057572 +2025-03-28 02:33:58,521 Train Loss: 0.0000653, Val Loss: 0.0000602 +2025-03-28 02:33:58,521 Epoch 1674/2000 +2025-03-28 02:38:38,940 Current Learning Rate: 0.0006985739 +2025-03-28 02:38:38,941 Train Loss: 0.0000523, Val Loss: 0.0000595 +2025-03-28 02:38:38,941 Epoch 1675/2000 +2025-03-28 02:43:19,349 Current Learning Rate: 0.0006913417 +2025-03-28 02:43:19,349 Train Loss: 0.0000605, Val Loss: 0.0000602 +2025-03-28 02:43:19,349 Epoch 1676/2000 +2025-03-28 02:48:00,091 Current Learning Rate: 0.0006840623 +2025-03-28 02:48:00,091 Train Loss: 0.0000492, Val Loss: 0.0000584 +2025-03-28 02:48:00,092 Epoch 1677/2000 +2025-03-28 02:52:40,555 Current Learning Rate: 0.0006767374 +2025-03-28 02:52:40,555 Train Loss: 0.0000527, Val Loss: 0.0000587 +2025-03-28 02:52:40,555 Epoch 1678/2000 +2025-03-28 02:57:21,240 Current Learning Rate: 0.0006693690 +2025-03-28 02:57:21,241 Train Loss: 0.0000450, Val Loss: 0.0000727 +2025-03-28 02:57:21,241 Epoch 1679/2000 +2025-03-28 03:02:01,620 Current Learning Rate: 0.0006619587 +2025-03-28 03:02:01,621 Train Loss: 0.0000552, Val Loss: 0.0000649 +2025-03-28 03:02:01,621 Epoch 1680/2000 +2025-03-28 03:06:42,083 Current Learning Rate: 0.0006545085 +2025-03-28 03:06:42,084 Train Loss: 0.0000592, Val Loss: 0.0000585 +2025-03-28 03:06:42,084 Epoch 1681/2000 +2025-03-28 03:11:22,241 Current Learning Rate: 0.0006470202 +2025-03-28 03:11:22,241 Train Loss: 0.0000551, Val Loss: 0.0000585 +2025-03-28 03:11:22,242 Epoch 1682/2000 +2025-03-28 03:16:02,706 Current Learning Rate: 0.0006394956 +2025-03-28 03:16:02,707 Train Loss: 0.0000704, Val Loss: 0.0001223 +2025-03-28 03:16:02,707 Epoch 1683/2000 +2025-03-28 03:20:42,801 Current Learning Rate: 0.0006319365 +2025-03-28 03:20:42,801 Train Loss: 0.0000749, Val Loss: 0.0000658 +2025-03-28 03:20:42,801 Epoch 1684/2000 +2025-03-28 03:25:22,282 Current Learning Rate: 0.0006243449 +2025-03-28 03:25:22,283 Train Loss: 0.0000539, Val Loss: 0.0000641 +2025-03-28 03:25:22,283 Epoch 1685/2000 +2025-03-28 03:30:03,240 Current Learning Rate: 0.0006167227 +2025-03-28 03:30:03,241 Train Loss: 0.0000493, Val Loss: 0.0000604 +2025-03-28 03:30:03,241 Epoch 1686/2000 +2025-03-28 03:34:43,578 Current Learning Rate: 0.0006090716 +2025-03-28 03:34:43,578 Train Loss: 0.0000509, Val Loss: 0.0000665 +2025-03-28 03:34:43,578 Epoch 1687/2000 +2025-03-28 03:39:23,560 Current Learning Rate: 0.0006013936 +2025-03-28 03:39:23,561 Train Loss: 0.0000577, Val Loss: 0.0000609 +2025-03-28 03:39:23,561 Epoch 1688/2000 +2025-03-28 03:44:03,469 Current Learning Rate: 0.0005936907 +2025-03-28 03:44:03,470 Train Loss: 0.0000509, Val Loss: 0.0000593 +2025-03-28 03:44:03,470 Epoch 1689/2000 +2025-03-28 03:48:44,040 Current Learning Rate: 0.0005859646 +2025-03-28 03:48:44,040 Train Loss: 0.0000536, Val Loss: 0.0000606 +2025-03-28 03:48:44,041 Epoch 1690/2000 +2025-03-28 03:53:24,045 Current Learning Rate: 0.0005782172 +2025-03-28 03:53:24,045 Train Loss: 0.0000549, Val Loss: 0.0000579 +2025-03-28 03:53:24,046 Epoch 1691/2000 +2025-03-28 03:58:05,237 Current Learning Rate: 0.0005704506 +2025-03-28 03:58:05,237 Train Loss: 0.0000591, Val Loss: 0.0000599 +2025-03-28 03:58:05,237 Epoch 1692/2000 +2025-03-28 04:02:45,664 Current Learning Rate: 0.0005626666 +2025-03-28 04:02:45,665 Train Loss: 0.0000437, Val Loss: 0.0000585 +2025-03-28 04:02:45,665 Epoch 1693/2000 +2025-03-28 04:07:26,287 Current Learning Rate: 0.0005548672 +2025-03-28 04:07:26,288 Train Loss: 0.0000436, Val Loss: 0.0000571 +2025-03-28 04:07:26,288 Epoch 1694/2000 +2025-03-28 04:12:07,011 Current Learning Rate: 0.0005470542 +2025-03-28 04:12:07,011 Train Loss: 0.0000536, Val Loss: 0.0000627 +2025-03-28 04:12:07,011 Epoch 1695/2000 +2025-03-28 04:16:47,391 Current Learning Rate: 0.0005392295 +2025-03-28 04:16:47,391 Train Loss: 0.0000444, Val Loss: 0.0000604 +2025-03-28 04:16:47,391 Epoch 1696/2000 +2025-03-28 04:21:27,755 Current Learning Rate: 0.0005313953 +2025-03-28 04:21:27,756 Train Loss: 0.0000500, Val Loss: 0.0000589 +2025-03-28 04:21:27,756 Epoch 1697/2000 +2025-03-28 04:26:08,226 Current Learning Rate: 0.0005235532 +2025-03-28 04:26:08,226 Train Loss: 0.0000548, Val Loss: 0.0000600 +2025-03-28 04:26:08,227 Epoch 1698/2000 +2025-03-28 04:30:48,502 Current Learning Rate: 0.0005157054 +2025-03-28 04:30:48,503 Train Loss: 0.0000466, Val Loss: 0.0000619 +2025-03-28 04:30:48,503 Epoch 1699/2000 +2025-03-28 04:35:29,596 Current Learning Rate: 0.0005078537 +2025-03-28 04:35:29,596 Train Loss: 0.0000518, Val Loss: 0.0000597 +2025-03-28 04:35:29,596 Epoch 1700/2000 +2025-03-28 04:40:09,564 Current Learning Rate: 0.0005000000 +2025-03-28 04:40:09,564 Train Loss: 0.0000457, Val Loss: 0.0000564 +2025-03-28 04:40:09,564 Epoch 1701/2000 +2025-03-28 04:44:49,952 Current Learning Rate: 0.0004921463 +2025-03-28 04:44:49,952 Train Loss: 0.0000537, Val Loss: 0.0000603 +2025-03-28 04:44:49,952 Epoch 1702/2000 +2025-03-28 04:49:30,379 Current Learning Rate: 0.0004842946 +2025-03-28 04:49:30,380 Train Loss: 0.0000420, Val Loss: 0.0000568 +2025-03-28 04:49:30,380 Epoch 1703/2000 +2025-03-28 04:54:10,984 Current Learning Rate: 0.0004764468 +2025-03-28 04:54:10,984 Train Loss: 0.0000515, Val Loss: 0.0000609 +2025-03-28 04:54:10,985 Epoch 1704/2000 +2025-03-28 04:58:50,540 Current Learning Rate: 0.0004686047 +2025-03-28 04:58:50,540 Train Loss: 0.0000463, Val Loss: 0.0000617 +2025-03-28 04:58:50,540 Epoch 1705/2000 +2025-03-28 05:03:30,961 Current Learning Rate: 0.0004607705 +2025-03-28 05:03:30,961 Train Loss: 0.0000436, Val Loss: 0.0000582 +2025-03-28 05:03:30,961 Epoch 1706/2000 +2025-03-28 05:08:11,705 Current Learning Rate: 0.0004529458 +2025-03-28 05:08:11,706 Train Loss: 0.0000485, Val Loss: 0.0000570 +2025-03-28 05:08:11,706 Epoch 1707/2000 +2025-03-28 05:12:52,429 Current Learning Rate: 0.0004451328 +2025-03-28 05:12:52,430 Train Loss: 0.0000546, Val Loss: 0.0000600 +2025-03-28 05:12:52,430 Epoch 1708/2000 +2025-03-28 05:17:33,165 Current Learning Rate: 0.0004373334 +2025-03-28 05:17:33,165 Train Loss: 0.0000505, Val Loss: 0.0000588 +2025-03-28 05:17:33,166 Epoch 1709/2000 +2025-03-28 05:22:13,954 Current Learning Rate: 0.0004295494 +2025-03-28 05:22:13,954 Train Loss: 0.0000476, Val Loss: 0.0000567 +2025-03-28 05:22:13,954 Epoch 1710/2000 +2025-03-28 05:26:54,574 Current Learning Rate: 0.0004217828 +2025-03-28 05:26:54,574 Train Loss: 0.0000471, Val Loss: 0.0000567 +2025-03-28 05:26:54,575 Epoch 1711/2000 +2025-03-28 05:31:34,861 Current Learning Rate: 0.0004140354 +2025-03-28 05:31:34,861 Train Loss: 0.0000519, Val Loss: 0.0000565 +2025-03-28 05:31:34,861 Epoch 1712/2000 +2025-03-28 05:36:15,646 Current Learning Rate: 0.0004063093 +2025-03-28 05:36:15,646 Train Loss: 0.0000486, Val Loss: 0.0000564 +2025-03-28 05:36:15,646 Epoch 1713/2000 +2025-03-28 05:40:56,359 Current Learning Rate: 0.0003986064 +2025-03-28 05:40:56,360 Train Loss: 0.0000496, Val Loss: 0.0000589 +2025-03-28 05:40:56,360 Epoch 1714/2000 +2025-03-28 05:45:36,335 Current Learning Rate: 0.0003909284 +2025-03-28 05:45:36,335 Train Loss: 0.0000385, Val Loss: 0.0000559 +2025-03-28 05:45:36,335 Epoch 1715/2000 +2025-03-28 05:50:16,482 Current Learning Rate: 0.0003832773 +2025-03-28 05:50:16,482 Train Loss: 0.0000462, Val Loss: 0.0000567 +2025-03-28 05:50:16,482 Epoch 1716/2000 +2025-03-28 05:54:56,863 Current Learning Rate: 0.0003756551 +2025-03-28 05:54:56,864 Train Loss: 0.0000472, Val Loss: 0.0000566 +2025-03-28 05:54:56,864 Epoch 1717/2000 +2025-03-28 05:59:37,615 Current Learning Rate: 0.0003680635 +2025-03-28 05:59:37,616 Train Loss: 0.0000489, Val Loss: 0.0000558 +2025-03-28 05:59:37,616 Epoch 1718/2000 +2025-03-28 06:04:17,776 Current Learning Rate: 0.0003605044 +2025-03-28 06:04:17,776 Train Loss: 0.0000508, Val Loss: 0.0000588 +2025-03-28 06:04:17,776 Epoch 1719/2000 +2025-03-28 06:08:57,977 Current Learning Rate: 0.0003529798 +2025-03-28 06:08:57,978 Train Loss: 0.0000441, Val Loss: 0.0000560 +2025-03-28 06:08:57,978 Epoch 1720/2000 +2025-03-28 06:13:38,554 Current Learning Rate: 0.0003454915 +2025-03-28 06:13:38,554 Train Loss: 0.0000548, Val Loss: 0.0000573 +2025-03-28 06:13:38,555 Epoch 1721/2000 +2025-03-28 06:18:19,181 Current Learning Rate: 0.0003380413 +2025-03-28 06:18:19,182 Train Loss: 0.0000567, Val Loss: 0.0000583 +2025-03-28 06:18:19,182 Epoch 1722/2000 +2025-03-28 06:22:59,876 Current Learning Rate: 0.0003306310 +2025-03-28 06:22:59,877 Train Loss: 0.0000455, Val Loss: 0.0000558 +2025-03-28 06:22:59,877 Epoch 1723/2000 +2025-03-28 06:27:40,180 Current Learning Rate: 0.0003232626 +2025-03-28 06:27:40,181 Train Loss: 0.0000575, Val Loss: 0.0000574 +2025-03-28 06:27:40,181 Epoch 1724/2000 +2025-03-28 06:32:20,665 Current Learning Rate: 0.0003159377 +2025-03-28 06:32:20,666 Train Loss: 0.0000486, Val Loss: 0.0000561 +2025-03-28 06:32:20,666 Epoch 1725/2000 +2025-03-28 06:37:00,790 Current Learning Rate: 0.0003086583 +2025-03-28 06:37:00,791 Train Loss: 0.0000474, Val Loss: 0.0000560 +2025-03-28 06:37:00,791 Epoch 1726/2000 +2025-03-28 06:41:41,856 Current Learning Rate: 0.0003014261 +2025-03-28 06:41:41,856 Train Loss: 0.0000394, Val Loss: 0.0000565 +2025-03-28 06:41:41,856 Epoch 1727/2000 +2025-03-28 06:46:22,552 Current Learning Rate: 0.0002942428 +2025-03-28 06:46:22,552 Train Loss: 0.0000449, Val Loss: 0.0000556 +2025-03-28 06:46:22,552 Epoch 1728/2000 +2025-03-28 06:51:03,382 Current Learning Rate: 0.0002871104 +2025-03-28 06:51:03,382 Train Loss: 0.0000543, Val Loss: 0.0000569 +2025-03-28 06:51:03,383 Epoch 1729/2000 +2025-03-28 06:55:43,694 Current Learning Rate: 0.0002800304 +2025-03-28 06:55:43,695 Train Loss: 0.0000455, Val Loss: 0.0000556 +2025-03-28 06:55:43,695 Epoch 1730/2000 +2025-03-28 07:00:24,171 Current Learning Rate: 0.0002730048 +2025-03-28 07:00:24,171 Train Loss: 0.0000466, Val Loss: 0.0000560 +2025-03-28 07:00:24,171 Epoch 1731/2000 +2025-03-28 07:05:04,886 Current Learning Rate: 0.0002660351 +2025-03-28 07:05:04,886 Train Loss: 0.0000487, Val Loss: 0.0000571 +2025-03-28 07:05:04,886 Epoch 1732/2000 +2025-03-28 07:09:45,093 Current Learning Rate: 0.0002591232 +2025-03-28 07:09:45,093 Train Loss: 0.0000525, Val Loss: 0.0000555 +2025-03-28 07:09:45,093 Epoch 1733/2000 +2025-03-28 07:14:25,456 Current Learning Rate: 0.0002522707 +2025-03-28 07:14:26,329 Train Loss: 0.0000416, Val Loss: 0.0000548 +2025-03-28 07:14:26,330 Epoch 1734/2000 +2025-03-28 07:19:05,692 Current Learning Rate: 0.0002454793 +2025-03-28 07:19:05,693 Train Loss: 0.0000476, Val Loss: 0.0000568 +2025-03-28 07:19:05,693 Epoch 1735/2000 +2025-03-28 07:23:46,114 Current Learning Rate: 0.0002387507 +2025-03-28 07:23:46,114 Train Loss: 0.0000529, Val Loss: 0.0000565 +2025-03-28 07:23:46,115 Epoch 1736/2000 +2025-03-28 07:28:26,390 Current Learning Rate: 0.0002320866 +2025-03-28 07:28:27,230 Train Loss: 0.0000439, Val Loss: 0.0000547 +2025-03-28 07:28:27,230 Epoch 1737/2000 +2025-03-28 07:33:07,342 Current Learning Rate: 0.0002254886 +2025-03-28 07:33:07,343 Train Loss: 0.0000447, Val Loss: 0.0000550 +2025-03-28 07:33:07,343 Epoch 1738/2000 +2025-03-28 07:37:47,897 Current Learning Rate: 0.0002189583 +2025-03-28 07:37:48,736 Train Loss: 0.0000438, Val Loss: 0.0000542 +2025-03-28 07:37:48,736 Epoch 1739/2000 +2025-03-28 07:42:28,482 Current Learning Rate: 0.0002124974 +2025-03-28 07:42:28,483 Train Loss: 0.0000455, Val Loss: 0.0000547 +2025-03-28 07:42:28,483 Epoch 1740/2000 +2025-03-28 07:47:09,287 Current Learning Rate: 0.0002061074 +2025-03-28 07:47:09,287 Train Loss: 0.0000507, Val Loss: 0.0000549 +2025-03-28 07:47:09,288 Epoch 1741/2000 +2025-03-28 07:51:49,515 Current Learning Rate: 0.0001997899 +2025-03-28 07:51:49,515 Train Loss: 0.0000439, Val Loss: 0.0000549 +2025-03-28 07:51:49,516 Epoch 1742/2000 +2025-03-28 07:56:29,990 Current Learning Rate: 0.0001935465 +2025-03-28 07:56:29,990 Train Loss: 0.0000436, Val Loss: 0.0000548 +2025-03-28 07:56:29,990 Epoch 1743/2000 +2025-03-28 08:01:10,267 Current Learning Rate: 0.0001873787 +2025-03-28 08:01:10,268 Train Loss: 0.0000426, Val Loss: 0.0000551 +2025-03-28 08:01:10,268 Epoch 1744/2000 +2025-03-28 08:05:50,961 Current Learning Rate: 0.0001812880 +2025-03-28 08:05:50,961 Train Loss: 0.0000521, Val Loss: 0.0000545 +2025-03-28 08:05:50,961 Epoch 1745/2000 +2025-03-28 08:10:31,753 Current Learning Rate: 0.0001752760 +2025-03-28 08:10:31,753 Train Loss: 0.0000432, Val Loss: 0.0000543 +2025-03-28 08:10:31,753 Epoch 1746/2000 +2025-03-28 08:15:12,630 Current Learning Rate: 0.0001693441 +2025-03-28 08:15:12,630 Train Loss: 0.0000503, Val Loss: 0.0000549 +2025-03-28 08:15:12,631 Epoch 1747/2000 +2025-03-28 08:19:53,080 Current Learning Rate: 0.0001634937 +2025-03-28 08:19:53,081 Train Loss: 0.0000453, Val Loss: 0.0000543 +2025-03-28 08:19:53,081 Epoch 1748/2000 +2025-03-28 08:24:33,545 Current Learning Rate: 0.0001577264 +2025-03-28 08:24:33,545 Train Loss: 0.0000440, Val Loss: 0.0000549 +2025-03-28 08:24:33,545 Epoch 1749/2000 +2025-03-28 08:29:13,666 Current Learning Rate: 0.0001520436 +2025-03-28 08:29:13,666 Train Loss: 0.0000536, Val Loss: 0.0000553 +2025-03-28 08:29:13,667 Epoch 1750/2000 +2025-03-28 08:33:53,969 Current Learning Rate: 0.0001464466 +2025-03-28 08:33:54,950 Train Loss: 0.0000459, Val Loss: 0.0000539 +2025-03-28 08:33:54,951 Epoch 1751/2000 +2025-03-28 08:38:34,776 Current Learning Rate: 0.0001409369 +2025-03-28 08:38:35,644 Train Loss: 0.0000415, Val Loss: 0.0000539 +2025-03-28 08:38:35,644 Epoch 1752/2000 +2025-03-28 08:43:15,104 Current Learning Rate: 0.0001355157 +2025-03-28 08:43:16,105 Train Loss: 0.0000418, Val Loss: 0.0000538 +2025-03-28 08:43:16,105 Epoch 1753/2000 +2025-03-28 08:47:55,553 Current Learning Rate: 0.0001301845 +2025-03-28 08:47:56,375 Train Loss: 0.0000440, Val Loss: 0.0000538 +2025-03-28 08:47:56,375 Epoch 1754/2000 +2025-03-28 08:52:36,144 Current Learning Rate: 0.0001249445 +2025-03-28 08:52:37,024 Train Loss: 0.0000428, Val Loss: 0.0000535 +2025-03-28 08:52:37,025 Epoch 1755/2000 +2025-03-28 08:57:17,027 Current Learning Rate: 0.0001197970 +2025-03-28 08:57:17,850 Train Loss: 0.0000399, Val Loss: 0.0000533 +2025-03-28 08:57:17,850 Epoch 1756/2000 +2025-03-28 09:01:57,119 Current Learning Rate: 0.0001147434 +2025-03-28 09:01:57,119 Train Loss: 0.0000467, Val Loss: 0.0000535 +2025-03-28 09:01:57,120 Epoch 1757/2000 +2025-03-28 09:06:36,996 Current Learning Rate: 0.0001097848 +2025-03-28 09:06:36,996 Train Loss: 0.0000398, Val Loss: 0.0000535 +2025-03-28 09:06:36,996 Epoch 1758/2000 +2025-03-28 09:11:17,243 Current Learning Rate: 0.0001049225 +2025-03-28 09:11:17,243 Train Loss: 0.0000471, Val Loss: 0.0000534 +2025-03-28 09:11:17,244 Epoch 1759/2000 +2025-03-28 09:15:57,128 Current Learning Rate: 0.0001001577 +2025-03-28 09:15:57,128 Train Loss: 0.0000473, Val Loss: 0.0000536 +2025-03-28 09:15:57,129 Epoch 1760/2000 +2025-03-28 09:20:37,700 Current Learning Rate: 0.0000954915 +2025-03-28 09:20:38,574 Train Loss: 0.0000400, Val Loss: 0.0000532 +2025-03-28 09:20:38,575 Epoch 1761/2000 +2025-03-28 09:25:18,456 Current Learning Rate: 0.0000909251 +2025-03-28 09:25:19,337 Train Loss: 0.0000384, Val Loss: 0.0000531 +2025-03-28 09:25:19,338 Epoch 1762/2000 +2025-03-28 09:29:58,716 Current Learning Rate: 0.0000864597 +2025-03-28 09:29:59,587 Train Loss: 0.0000458, Val Loss: 0.0000530 +2025-03-28 09:29:59,588 Epoch 1763/2000 +2025-03-28 09:34:39,091 Current Learning Rate: 0.0000820963 +2025-03-28 09:34:39,092 Train Loss: 0.0000429, Val Loss: 0.0000530 +2025-03-28 09:34:39,092 Epoch 1764/2000 +2025-03-28 09:39:19,410 Current Learning Rate: 0.0000778360 +2025-03-28 09:39:20,270 Train Loss: 0.0000414, Val Loss: 0.0000529 +2025-03-28 09:39:20,270 Epoch 1765/2000 +2025-03-28 09:43:59,827 Current Learning Rate: 0.0000736799 +2025-03-28 09:44:00,777 Train Loss: 0.0000434, Val Loss: 0.0000528 +2025-03-28 09:44:00,777 Epoch 1766/2000 +2025-03-28 09:48:40,688 Current Learning Rate: 0.0000696290 +2025-03-28 09:48:40,690 Train Loss: 0.0000457, Val Loss: 0.0000528 +2025-03-28 09:48:40,690 Epoch 1767/2000 +2025-03-28 09:53:21,127 Current Learning Rate: 0.0000656842 +2025-03-28 09:53:21,986 Train Loss: 0.0000407, Val Loss: 0.0000527 +2025-03-28 09:53:21,986 Epoch 1768/2000 +2025-03-28 09:58:01,445 Current Learning Rate: 0.0000618467 +2025-03-28 09:58:01,446 Train Loss: 0.0000488, Val Loss: 0.0000528 +2025-03-28 09:58:01,446 Epoch 1769/2000 +2025-03-28 10:02:42,254 Current Learning Rate: 0.0000581172 +2025-03-28 10:02:42,254 Train Loss: 0.0000444, Val Loss: 0.0000528 +2025-03-28 10:02:42,254 Epoch 1770/2000 +2025-03-28 10:07:22,944 Current Learning Rate: 0.0000544967 +2025-03-28 10:07:23,783 Train Loss: 0.0000365, Val Loss: 0.0000526 +2025-03-28 10:07:23,783 Epoch 1771/2000 +2025-03-28 10:12:03,512 Current Learning Rate: 0.0000509862 +2025-03-28 10:12:03,513 Train Loss: 0.0000492, Val Loss: 0.0000528 +2025-03-28 10:12:03,513 Epoch 1772/2000 +2025-03-28 10:16:43,556 Current Learning Rate: 0.0000475865 +2025-03-28 10:16:43,557 Train Loss: 0.0000504, Val Loss: 0.0000527 +2025-03-28 10:16:43,557 Epoch 1773/2000 +2025-03-28 10:21:23,790 Current Learning Rate: 0.0000442984 +2025-03-28 10:21:24,646 Train Loss: 0.0000460, Val Loss: 0.0000525 +2025-03-28 10:21:24,646 Epoch 1774/2000 +2025-03-28 10:26:04,073 Current Learning Rate: 0.0000411227 +2025-03-28 10:26:04,989 Train Loss: 0.0000424, Val Loss: 0.0000525 +2025-03-28 10:26:04,990 Epoch 1775/2000 +2025-03-28 10:30:44,728 Current Learning Rate: 0.0000380602 +2025-03-28 10:30:44,729 Train Loss: 0.0000444, Val Loss: 0.0000525 +2025-03-28 10:30:44,729 Epoch 1776/2000 +2025-03-28 10:35:24,669 Current Learning Rate: 0.0000351118 +2025-03-28 10:35:24,669 Train Loss: 0.0000445, Val Loss: 0.0000526 +2025-03-28 10:35:24,670 Epoch 1777/2000 +2025-03-28 10:40:04,661 Current Learning Rate: 0.0000322780 +2025-03-28 10:40:05,539 Train Loss: 0.0000345, Val Loss: 0.0000525 +2025-03-28 10:40:05,539 Epoch 1778/2000 +2025-03-28 10:44:45,174 Current Learning Rate: 0.0000295596 +2025-03-28 10:44:45,174 Train Loss: 0.0000497, Val Loss: 0.0000529 +2025-03-28 10:44:45,174 Epoch 1779/2000 +2025-03-28 10:49:25,266 Current Learning Rate: 0.0000269573 +2025-03-28 10:49:26,093 Train Loss: 0.0000372, Val Loss: 0.0000524 +2025-03-28 10:49:26,094 Epoch 1780/2000 +2025-03-28 10:54:05,512 Current Learning Rate: 0.0000244717 +2025-03-28 10:54:06,563 Train Loss: 0.0000415, Val Loss: 0.0000524 +2025-03-28 10:54:06,564 Epoch 1781/2000 +2025-03-28 10:58:45,925 Current Learning Rate: 0.0000221035 +2025-03-28 10:58:45,926 Train Loss: 0.0000452, Val Loss: 0.0000524 +2025-03-28 10:58:45,926 Epoch 1782/2000 +2025-03-28 11:03:26,517 Current Learning Rate: 0.0000198532 +2025-03-28 11:03:27,428 Train Loss: 0.0000466, Val Loss: 0.0000524 +2025-03-28 11:03:27,428 Epoch 1783/2000 +2025-03-28 11:08:07,040 Current Learning Rate: 0.0000177213 +2025-03-28 11:08:07,977 Train Loss: 0.0000483, Val Loss: 0.0000524 +2025-03-28 11:08:07,977 Epoch 1784/2000 +2025-03-28 11:12:47,273 Current Learning Rate: 0.0000157084 +2025-03-28 11:12:48,100 Train Loss: 0.0000327, Val Loss: 0.0000523 +2025-03-28 11:12:48,100 Epoch 1785/2000 +2025-03-28 11:17:27,559 Current Learning Rate: 0.0000138150 +2025-03-28 11:17:28,409 Train Loss: 0.0000491, Val Loss: 0.0000523 +2025-03-28 11:17:28,409 Epoch 1786/2000 +2025-03-28 11:22:07,736 Current Learning Rate: 0.0000120416 +2025-03-28 11:22:08,591 Train Loss: 0.0000420, Val Loss: 0.0000523 +2025-03-28 11:22:08,591 Epoch 1787/2000 +2025-03-28 11:26:48,149 Current Learning Rate: 0.0000103886 +2025-03-28 11:26:49,051 Train Loss: 0.0000388, Val Loss: 0.0000523 +2025-03-28 11:26:49,051 Epoch 1788/2000 +2025-03-28 11:31:29,099 Current Learning Rate: 0.0000088564 +2025-03-28 11:31:30,062 Train Loss: 0.0000493, Val Loss: 0.0000523 +2025-03-28 11:31:30,063 Epoch 1789/2000 +2025-03-28 11:36:10,031 Current Learning Rate: 0.0000074453 +2025-03-28 11:36:10,947 Train Loss: 0.0000433, Val Loss: 0.0000523 +2025-03-28 11:36:10,948 Epoch 1790/2000 +2025-03-28 11:40:50,195 Current Learning Rate: 0.0000061558 +2025-03-28 11:40:51,245 Train Loss: 0.0000484, Val Loss: 0.0000522 +2025-03-28 11:40:51,245 Epoch 1791/2000 +2025-03-28 11:45:30,907 Current Learning Rate: 0.0000049882 +2025-03-28 11:45:30,908 Train Loss: 0.0000461, Val Loss: 0.0000522 +2025-03-28 11:45:30,908 Epoch 1792/2000 +2025-03-28 11:50:11,112 Current Learning Rate: 0.0000039426 +2025-03-28 11:50:11,112 Train Loss: 0.0000493, Val Loss: 0.0000522 +2025-03-28 11:50:11,114 Epoch 1793/2000 +2025-03-28 11:54:52,039 Current Learning Rate: 0.0000030195 +2025-03-28 11:54:52,914 Train Loss: 0.0000416, Val Loss: 0.0000522 +2025-03-28 11:54:52,915 Epoch 1794/2000 +2025-03-28 11:59:32,505 Current Learning Rate: 0.0000022190 +2025-03-28 11:59:33,610 Train Loss: 0.0000458, Val Loss: 0.0000522 +2025-03-28 11:59:33,611 Epoch 1795/2000 +2025-03-28 12:04:13,385 Current Learning Rate: 0.0000015413 +2025-03-28 12:04:13,386 Train Loss: 0.0000381, Val Loss: 0.0000522 +2025-03-28 12:04:13,386 Epoch 1796/2000 +2025-03-28 12:08:53,886 Current Learning Rate: 0.0000009866 +2025-03-28 12:08:54,899 Train Loss: 0.0000376, Val Loss: 0.0000522 +2025-03-28 12:08:54,900 Epoch 1797/2000 +2025-03-28 12:13:34,560 Current Learning Rate: 0.0000005551 +2025-03-28 12:13:34,561 Train Loss: 0.0000407, Val Loss: 0.0000522 +2025-03-28 12:13:34,561 Epoch 1798/2000 +2025-03-28 12:18:14,936 Current Learning Rate: 0.0000002467 +2025-03-28 12:18:14,937 Train Loss: 0.0000408, Val Loss: 0.0000522 +2025-03-28 12:18:14,937 Epoch 1799/2000 +2025-03-28 12:22:54,894 Current Learning Rate: 0.0000000617 +2025-03-28 12:22:54,895 Train Loss: 0.0000469, Val Loss: 0.0000522 +2025-03-28 12:22:54,895 Epoch 1800/2000 +2025-03-28 12:27:35,814 Current Learning Rate: 0.0000000000 +2025-03-28 12:27:35,814 Train Loss: 0.0000386, Val Loss: 0.0000522 +2025-03-28 12:27:35,815 Epoch 1801/2000 +2025-03-28 12:32:15,393 Current Learning Rate: 0.0000000617 +2025-03-28 12:32:16,286 Train Loss: 0.0000420, Val Loss: 0.0000522 +2025-03-28 12:32:16,287 Epoch 1802/2000 +2025-03-28 12:36:55,499 Current Learning Rate: 0.0000002467 +2025-03-28 12:36:55,500 Train Loss: 0.0000405, Val Loss: 0.0000522 +2025-03-28 12:36:55,500 Epoch 1803/2000 +2025-03-28 12:41:35,870 Current Learning Rate: 0.0000005551 +2025-03-28 12:41:35,871 Train Loss: 0.0000461, Val Loss: 0.0000522 +2025-03-28 12:41:35,871 Epoch 1804/2000 +2025-03-28 12:46:16,489 Current Learning Rate: 0.0000009866 +2025-03-28 12:46:16,490 Train Loss: 0.0000427, Val Loss: 0.0000522 +2025-03-28 12:46:16,490 Epoch 1805/2000 +2025-03-28 12:50:56,989 Current Learning Rate: 0.0000015413 +2025-03-28 12:50:56,989 Train Loss: 0.0000436, Val Loss: 0.0000522 +2025-03-28 12:50:56,990 Epoch 1806/2000 +2025-03-28 12:55:37,906 Current Learning Rate: 0.0000022190 +2025-03-28 12:55:37,906 Train Loss: 0.0000383, Val Loss: 0.0000522 +2025-03-28 12:55:37,907 Epoch 1807/2000 +2025-03-28 13:00:17,861 Current Learning Rate: 0.0000030195 +2025-03-28 13:00:17,861 Train Loss: 0.0000554, Val Loss: 0.0000522 +2025-03-28 13:00:17,861 Epoch 1808/2000 +2025-03-28 13:04:58,540 Current Learning Rate: 0.0000039426 +2025-03-28 13:04:59,541 Train Loss: 0.0000462, Val Loss: 0.0000522 +2025-03-28 13:04:59,541 Epoch 1809/2000 +2025-03-28 13:09:39,350 Current Learning Rate: 0.0000049882 +2025-03-28 13:09:39,351 Train Loss: 0.0000423, Val Loss: 0.0000522 +2025-03-28 13:09:39,351 Epoch 1810/2000 +2025-03-28 13:14:19,695 Current Learning Rate: 0.0000061558 +2025-03-28 13:14:19,696 Train Loss: 0.0000440, Val Loss: 0.0000522 +2025-03-28 13:14:19,696 Epoch 1811/2000 +2025-03-28 13:18:59,806 Current Learning Rate: 0.0000074453 +2025-03-28 13:18:59,807 Train Loss: 0.0000420, Val Loss: 0.0000522 +2025-03-28 13:18:59,807 Epoch 1812/2000 +2025-03-28 13:23:39,704 Current Learning Rate: 0.0000088564 +2025-03-28 13:23:39,704 Train Loss: 0.0000377, Val Loss: 0.0000522 +2025-03-28 13:23:39,705 Epoch 1813/2000 +2025-03-28 13:28:20,509 Current Learning Rate: 0.0000103886 +2025-03-28 13:28:20,510 Train Loss: 0.0000399, Val Loss: 0.0000522 +2025-03-28 13:28:20,510 Epoch 1814/2000 +2025-03-28 13:33:01,284 Current Learning Rate: 0.0000120416 +2025-03-28 13:33:01,285 Train Loss: 0.0000383, Val Loss: 0.0000522 +2025-03-28 13:33:01,285 Epoch 1815/2000 +2025-03-28 13:37:41,428 Current Learning Rate: 0.0000138150 +2025-03-28 13:37:41,428 Train Loss: 0.0000368, Val Loss: 0.0000522 +2025-03-28 13:37:41,428 Epoch 1816/2000 +2025-03-28 13:42:21,418 Current Learning Rate: 0.0000157084 +2025-03-28 13:42:21,418 Train Loss: 0.0000442, Val Loss: 0.0000523 +2025-03-28 13:42:21,419 Epoch 1817/2000 +2025-03-28 13:47:02,103 Current Learning Rate: 0.0000177213 +2025-03-28 13:47:02,104 Train Loss: 0.0000414, Val Loss: 0.0000523 +2025-03-28 13:47:02,104 Epoch 1818/2000 +2025-03-28 13:51:42,755 Current Learning Rate: 0.0000198532 +2025-03-28 13:51:42,756 Train Loss: 0.0000420, Val Loss: 0.0000523 +2025-03-28 13:51:42,756 Epoch 1819/2000 +2025-03-28 13:56:23,415 Current Learning Rate: 0.0000221035 +2025-03-28 13:56:23,416 Train Loss: 0.0000410, Val Loss: 0.0000523 +2025-03-28 13:56:23,416 Epoch 1820/2000 +2025-03-28 14:01:03,762 Current Learning Rate: 0.0000244717 +2025-03-28 14:01:03,762 Train Loss: 0.0000475, Val Loss: 0.0000523 +2025-03-28 14:01:03,762 Epoch 1821/2000 +2025-03-28 14:05:44,480 Current Learning Rate: 0.0000269573 +2025-03-28 14:05:44,481 Train Loss: 0.0000408, Val Loss: 0.0000523 +2025-03-28 14:05:44,481 Epoch 1822/2000 +2025-03-28 14:10:24,865 Current Learning Rate: 0.0000295596 +2025-03-28 14:10:24,866 Train Loss: 0.0000470, Val Loss: 0.0000523 +2025-03-28 14:10:24,866 Epoch 1823/2000 +2025-03-28 14:15:05,916 Current Learning Rate: 0.0000322780 +2025-03-28 14:15:05,916 Train Loss: 0.0000410, Val Loss: 0.0000524 +2025-03-28 14:15:05,917 Epoch 1824/2000 +2025-03-28 14:19:45,932 Current Learning Rate: 0.0000351118 +2025-03-28 14:19:45,932 Train Loss: 0.0000384, Val Loss: 0.0000524 +2025-03-28 14:19:45,933 Epoch 1825/2000 +2025-03-28 14:24:26,600 Current Learning Rate: 0.0000380602 +2025-03-28 14:24:26,601 Train Loss: 0.0000496, Val Loss: 0.0000524 +2025-03-28 14:24:26,601 Epoch 1826/2000 +2025-03-28 14:29:07,426 Current Learning Rate: 0.0000411227 +2025-03-28 14:29:07,427 Train Loss: 0.0000428, Val Loss: 0.0000525 +2025-03-28 14:29:07,427 Epoch 1827/2000 +2025-03-28 14:33:47,873 Current Learning Rate: 0.0000442984 +2025-03-28 14:33:47,874 Train Loss: 0.0000499, Val Loss: 0.0000524 +2025-03-28 14:33:47,874 Epoch 1828/2000 +2025-03-28 14:38:28,598 Current Learning Rate: 0.0000475865 +2025-03-28 14:38:28,599 Train Loss: 0.0000356, Val Loss: 0.0000523 +2025-03-28 14:38:28,599 Epoch 1829/2000 +2025-03-28 14:43:09,195 Current Learning Rate: 0.0000509862 +2025-03-28 14:43:09,196 Train Loss: 0.0000480, Val Loss: 0.0000526 +2025-03-28 14:43:09,196 Epoch 1830/2000 +2025-03-28 14:47:49,648 Current Learning Rate: 0.0000544967 +2025-03-28 14:47:49,649 Train Loss: 0.0000423, Val Loss: 0.0000525 +2025-03-28 14:47:49,649 Epoch 1831/2000 +2025-03-28 14:52:29,928 Current Learning Rate: 0.0000581172 +2025-03-28 14:52:29,928 Train Loss: 0.0000390, Val Loss: 0.0000527 +2025-03-28 14:52:29,929 Epoch 1832/2000 +2025-03-28 14:57:10,729 Current Learning Rate: 0.0000618467 +2025-03-28 14:57:10,730 Train Loss: 0.0000427, Val Loss: 0.0000525 +2025-03-28 14:57:10,730 Epoch 1833/2000 +2025-03-28 15:01:50,768 Current Learning Rate: 0.0000656842 +2025-03-28 15:01:50,768 Train Loss: 0.0000444, Val Loss: 0.0000526 +2025-03-28 15:01:50,768 Epoch 1834/2000 +2025-03-28 15:06:30,923 Current Learning Rate: 0.0000696290 +2025-03-28 15:06:30,923 Train Loss: 0.0000326, Val Loss: 0.0000524 +2025-03-28 15:06:30,923 Epoch 1835/2000 +2025-03-28 15:11:11,118 Current Learning Rate: 0.0000736799 +2025-03-28 15:11:11,119 Train Loss: 0.0000444, Val Loss: 0.0000527 +2025-03-28 15:11:11,119 Epoch 1836/2000 +2025-03-28 15:15:51,558 Current Learning Rate: 0.0000778360 +2025-03-28 15:15:51,559 Train Loss: 0.0000413, Val Loss: 0.0000528 +2025-03-28 15:15:51,559 Epoch 1837/2000 +2025-03-28 15:20:31,689 Current Learning Rate: 0.0000820963 +2025-03-28 15:20:31,689 Train Loss: 0.0000482, Val Loss: 0.0000527 +2025-03-28 15:20:31,689 Epoch 1838/2000 +2025-03-28 15:25:12,221 Current Learning Rate: 0.0000864597 +2025-03-28 15:25:12,222 Train Loss: 0.0000396, Val Loss: 0.0000525 +2025-03-28 15:25:12,222 Epoch 1839/2000 +2025-03-28 15:29:52,784 Current Learning Rate: 0.0000909251 +2025-03-28 15:29:52,785 Train Loss: 0.0000414, Val Loss: 0.0000526 +2025-03-28 15:29:52,785 Epoch 1840/2000 +2025-03-28 15:34:32,608 Current Learning Rate: 0.0000954915 +2025-03-28 15:34:32,608 Train Loss: 0.0000428, Val Loss: 0.0000526 +2025-03-28 15:34:32,609 Epoch 1841/2000 +2025-03-28 15:39:13,052 Current Learning Rate: 0.0001001577 +2025-03-28 15:39:13,052 Train Loss: 0.0000367, Val Loss: 0.0000531 +2025-03-28 15:39:13,053 Epoch 1842/2000 +2025-03-28 15:43:53,213 Current Learning Rate: 0.0001049225 +2025-03-28 15:43:53,214 Train Loss: 0.0000413, Val Loss: 0.0000529 +2025-03-28 15:43:53,214 Epoch 1843/2000 +2025-03-28 15:48:33,542 Current Learning Rate: 0.0001097848 +2025-03-28 15:48:33,542 Train Loss: 0.0000396, Val Loss: 0.0000530 +2025-03-28 15:48:33,542 Epoch 1844/2000 +2025-03-28 15:53:13,390 Current Learning Rate: 0.0001147434 +2025-03-28 15:53:13,390 Train Loss: 0.0000453, Val Loss: 0.0000528 +2025-03-28 15:53:13,391 Epoch 1845/2000 +2025-03-28 15:57:54,324 Current Learning Rate: 0.0001197970 +2025-03-28 15:57:54,324 Train Loss: 0.0000470, Val Loss: 0.0000528 +2025-03-28 15:57:54,324 Epoch 1846/2000 +2025-03-28 16:02:35,428 Current Learning Rate: 0.0001249445 +2025-03-28 16:02:35,429 Train Loss: 0.0000454, Val Loss: 0.0000529 +2025-03-28 16:02:35,429 Epoch 1847/2000 +2025-03-28 16:07:15,816 Current Learning Rate: 0.0001301845 +2025-03-28 16:07:15,817 Train Loss: 0.0000393, Val Loss: 0.0000533 +2025-03-28 16:07:15,817 Epoch 1848/2000 +2025-03-28 16:11:55,989 Current Learning Rate: 0.0001355157 +2025-03-28 16:11:55,990 Train Loss: 0.0000460, Val Loss: 0.0000533 +2025-03-28 16:11:55,990 Epoch 1849/2000 +2025-03-28 16:16:36,212 Current Learning Rate: 0.0001409369 +2025-03-28 16:16:36,212 Train Loss: 0.0000473, Val Loss: 0.0000533 +2025-03-28 16:16:36,212 Epoch 1850/2000 +2025-03-28 16:21:16,493 Current Learning Rate: 0.0001464466 +2025-03-28 16:21:16,494 Train Loss: 0.0000394, Val Loss: 0.0000529 +2025-03-28 16:21:16,494 Epoch 1851/2000 +2025-03-28 16:25:56,682 Current Learning Rate: 0.0001520436 +2025-03-28 16:25:56,683 Train Loss: 0.0000401, Val Loss: 0.0000534 +2025-03-28 16:25:56,683 Epoch 1852/2000 +2025-03-28 16:30:37,567 Current Learning Rate: 0.0001577264 +2025-03-28 16:30:37,570 Train Loss: 0.0000417, Val Loss: 0.0000533 +2025-03-28 16:30:37,570 Epoch 1853/2000 +2025-03-28 16:35:17,671 Current Learning Rate: 0.0001634937 +2025-03-28 16:35:17,671 Train Loss: 0.0000539, Val Loss: 0.0000533 +2025-03-28 16:35:17,672 Epoch 1854/2000 +2025-03-28 16:39:58,196 Current Learning Rate: 0.0001693441 +2025-03-28 16:39:58,197 Train Loss: 0.0000429, Val Loss: 0.0000537 +2025-03-28 16:39:58,197 Epoch 1855/2000 +2025-03-28 16:44:38,122 Current Learning Rate: 0.0001752760 +2025-03-28 16:44:38,123 Train Loss: 0.0000405, Val Loss: 0.0000533 +2025-03-28 16:44:38,123 Epoch 1856/2000 +2025-03-28 16:49:18,386 Current Learning Rate: 0.0001812880 +2025-03-28 16:49:18,386 Train Loss: 0.0000494, Val Loss: 0.0000532 +2025-03-28 16:49:18,387 Epoch 1857/2000 +2025-03-28 16:53:59,119 Current Learning Rate: 0.0001873787 +2025-03-28 16:53:59,119 Train Loss: 0.0000420, Val Loss: 0.0000545 +2025-03-28 16:53:59,119 Epoch 1858/2000 +2025-03-28 16:58:39,974 Current Learning Rate: 0.0001935465 +2025-03-28 16:58:39,974 Train Loss: 0.0000391, Val Loss: 0.0000547 +2025-03-28 16:58:39,975 Epoch 1859/2000 +2025-03-28 17:03:20,188 Current Learning Rate: 0.0001997899 +2025-03-28 17:03:20,189 Train Loss: 0.0000456, Val Loss: 0.0000532 +2025-03-28 17:03:20,189 Epoch 1860/2000 +2025-03-28 17:08:00,048 Current Learning Rate: 0.0002061074 +2025-03-28 17:08:00,049 Train Loss: 0.0000415, Val Loss: 0.0000531 +2025-03-28 17:08:00,049 Epoch 1861/2000 +2025-03-28 17:12:40,003 Current Learning Rate: 0.0002124974 +2025-03-28 17:12:40,003 Train Loss: 0.0000386, Val Loss: 0.0000532 +2025-03-28 17:12:40,003 Epoch 1862/2000 +2025-03-28 17:17:20,117 Current Learning Rate: 0.0002189583 +2025-03-28 17:17:20,118 Train Loss: 0.0000378, Val Loss: 0.0000534 +2025-03-28 17:17:20,118 Epoch 1863/2000 +2025-03-28 17:22:00,587 Current Learning Rate: 0.0002254886 +2025-03-28 17:22:00,588 Train Loss: 0.0000485, Val Loss: 0.0000534 +2025-03-28 17:22:00,588 Epoch 1864/2000 +2025-03-28 17:26:41,668 Current Learning Rate: 0.0002320866 +2025-03-28 17:26:41,668 Train Loss: 0.0000465, Val Loss: 0.0000545 +2025-03-28 17:26:41,668 Epoch 1865/2000 +2025-03-28 17:31:21,922 Current Learning Rate: 0.0002387507 +2025-03-28 17:31:21,922 Train Loss: 0.0000430, Val Loss: 0.0000551 +2025-03-28 17:31:21,923 Epoch 1866/2000 +2025-03-28 17:36:02,518 Current Learning Rate: 0.0002454793 +2025-03-28 17:36:02,518 Train Loss: 0.0000405, Val Loss: 0.0000534 +2025-03-28 17:36:02,518 Epoch 1867/2000 +2025-03-28 17:40:42,994 Current Learning Rate: 0.0002522707 +2025-03-28 17:40:42,997 Train Loss: 0.0000430, Val Loss: 0.0000546 +2025-03-28 17:40:42,998 Epoch 1868/2000 +2025-03-28 17:45:23,029 Current Learning Rate: 0.0002591232 +2025-03-28 17:45:23,029 Train Loss: 0.0000464, Val Loss: 0.0000549 +2025-03-28 17:45:23,030 Epoch 1869/2000 +2025-03-28 17:50:03,654 Current Learning Rate: 0.0002660351 +2025-03-28 17:50:03,654 Train Loss: 0.0000417, Val Loss: 0.0000552 +2025-03-28 17:50:03,654 Epoch 1870/2000 +2025-03-28 17:54:43,348 Current Learning Rate: 0.0002730048 +2025-03-28 17:54:43,349 Train Loss: 0.0000394, Val Loss: 0.0000544 +2025-03-28 17:54:43,349 Epoch 1871/2000 +2025-03-28 17:59:23,649 Current Learning Rate: 0.0002800304 +2025-03-28 17:59:23,650 Train Loss: 0.0000479, Val Loss: 0.0000554 +2025-03-28 17:59:23,650 Epoch 1872/2000 +2025-03-28 18:04:03,670 Current Learning Rate: 0.0002871104 +2025-03-28 18:04:03,670 Train Loss: 0.0000430, Val Loss: 0.0000535 +2025-03-28 18:04:03,671 Epoch 1873/2000 +2025-03-28 18:08:44,956 Current Learning Rate: 0.0002942428 +2025-03-28 18:08:44,956 Train Loss: 0.0000546, Val Loss: 0.0000547 +2025-03-28 18:08:44,956 Epoch 1874/2000 +2025-03-28 18:13:24,872 Current Learning Rate: 0.0003014261 +2025-03-28 18:13:24,873 Train Loss: 0.0000433, Val Loss: 0.0000536 +2025-03-28 18:13:24,873 Epoch 1875/2000 +2025-03-28 18:18:04,674 Current Learning Rate: 0.0003086583 +2025-03-28 18:18:04,674 Train Loss: 0.0000364, Val Loss: 0.0000534 +2025-03-28 18:18:04,675 Epoch 1876/2000 +2025-03-28 18:22:44,553 Current Learning Rate: 0.0003159377 +2025-03-28 18:22:44,554 Train Loss: 0.0000433, Val Loss: 0.0000584 +2025-03-28 18:22:44,554 Epoch 1877/2000 +2025-03-28 18:27:24,227 Current Learning Rate: 0.0003232626 +2025-03-28 18:27:24,228 Train Loss: 0.0000484, Val Loss: 0.0000538 +2025-03-28 18:27:24,228 Epoch 1878/2000 +2025-03-28 18:32:05,015 Current Learning Rate: 0.0003306310 +2025-03-28 18:32:05,015 Train Loss: 0.0000535, Val Loss: 0.0000553 +2025-03-28 18:32:05,016 Epoch 1879/2000 +2025-03-28 18:36:45,218 Current Learning Rate: 0.0003380413 +2025-03-28 18:36:45,219 Train Loss: 0.0000517, Val Loss: 0.0000556 +2025-03-28 18:36:45,219 Epoch 1880/2000 +2025-03-28 18:41:25,678 Current Learning Rate: 0.0003454915 +2025-03-28 18:41:25,679 Train Loss: 0.0000419, Val Loss: 0.0000553 +2025-03-28 18:41:25,679 Epoch 1881/2000 +2025-03-28 18:46:05,075 Current Learning Rate: 0.0003529798 +2025-03-28 18:46:05,075 Train Loss: 0.0000500, Val Loss: 0.0000542 +2025-03-28 18:46:05,075 Epoch 1882/2000 +2025-03-28 18:50:45,343 Current Learning Rate: 0.0003605044 +2025-03-28 18:50:45,344 Train Loss: 0.0000496, Val Loss: 0.0000544 +2025-03-28 18:50:45,344 Epoch 1883/2000 +2025-03-28 18:55:25,360 Current Learning Rate: 0.0003680635 +2025-03-28 18:55:25,360 Train Loss: 0.0000411, Val Loss: 0.0000544 +2025-03-28 18:55:25,360 Epoch 1884/2000 +2025-03-28 19:00:05,599 Current Learning Rate: 0.0003756551 +2025-03-28 19:00:05,599 Train Loss: 0.0000433, Val Loss: 0.0000540 +2025-03-28 19:00:05,599 Epoch 1885/2000 +2025-03-28 19:04:45,892 Current Learning Rate: 0.0003832773 +2025-03-28 19:04:45,892 Train Loss: 0.0000402, Val Loss: 0.0000540 +2025-03-28 19:04:45,892 Epoch 1886/2000 +2025-03-28 19:09:25,988 Current Learning Rate: 0.0003909284 +2025-03-28 19:09:25,989 Train Loss: 0.0000392, Val Loss: 0.0000536 +2025-03-28 19:09:25,989 Epoch 1887/2000 +2025-03-28 19:14:06,151 Current Learning Rate: 0.0003986064 +2025-03-28 19:14:06,151 Train Loss: 0.0000514, Val Loss: 0.0000540 +2025-03-28 19:14:06,151 Epoch 1888/2000 +2025-03-28 19:18:46,077 Current Learning Rate: 0.0004063093 +2025-03-28 19:18:46,077 Train Loss: 0.0000428, Val Loss: 0.0000553 +2025-03-28 19:18:46,077 Epoch 1889/2000 +2025-03-28 19:23:26,897 Current Learning Rate: 0.0004140354 +2025-03-28 19:23:26,897 Train Loss: 0.0000418, Val Loss: 0.0000542 +2025-03-28 19:23:26,898 Epoch 1890/2000 +2025-03-28 19:28:06,601 Current Learning Rate: 0.0004217828 +2025-03-28 19:28:06,602 Train Loss: 0.0000534, Val Loss: 0.0000552 +2025-03-28 19:28:06,602 Epoch 1891/2000 +2025-03-28 19:32:46,850 Current Learning Rate: 0.0004295494 +2025-03-28 19:32:46,850 Train Loss: 0.0000492, Val Loss: 0.0000544 +2025-03-28 19:32:46,851 Epoch 1892/2000 +2025-03-28 19:37:26,675 Current Learning Rate: 0.0004373334 +2025-03-28 19:37:26,676 Train Loss: 0.0000493, Val Loss: 0.0000568 +2025-03-28 19:37:26,676 Epoch 1893/2000 +2025-03-28 19:42:06,563 Current Learning Rate: 0.0004451328 +2025-03-28 19:42:06,564 Train Loss: 0.0000482, Val Loss: 0.0000538 +2025-03-28 19:42:06,564 Epoch 1894/2000 +2025-03-28 19:46:46,499 Current Learning Rate: 0.0004529458 +2025-03-28 19:46:46,499 Train Loss: 0.0000391, Val Loss: 0.0000541 +2025-03-28 19:46:46,499 Epoch 1895/2000 +2025-03-28 19:51:26,593 Current Learning Rate: 0.0004607705 +2025-03-28 19:51:26,594 Train Loss: 0.0000490, Val Loss: 0.0000564 +2025-03-28 19:51:26,594 Epoch 1896/2000 +2025-03-28 19:56:06,741 Current Learning Rate: 0.0004686047 +2025-03-28 19:56:06,742 Train Loss: 0.0000503, Val Loss: 0.0000575 +2025-03-28 19:56:06,742 Epoch 1897/2000 +2025-03-28 20:00:46,637 Current Learning Rate: 0.0004764468 +2025-03-28 20:00:46,638 Train Loss: 0.0000377, Val Loss: 0.0000555 +2025-03-28 20:00:46,638 Epoch 1898/2000 +2025-03-28 20:05:26,688 Current Learning Rate: 0.0004842946 +2025-03-28 20:05:26,688 Train Loss: 0.0000482, Val Loss: 0.0000563 +2025-03-28 20:05:26,689 Epoch 1899/2000 +2025-03-28 20:10:06,828 Current Learning Rate: 0.0004921463 +2025-03-28 20:10:06,828 Train Loss: 0.0000435, Val Loss: 0.0000557 +2025-03-28 20:10:06,828 Epoch 1900/2000 +2025-03-28 20:14:46,799 Current Learning Rate: 0.0005000000 +2025-03-28 20:14:46,799 Train Loss: 0.0000486, Val Loss: 0.0000585 +2025-03-28 20:14:46,800 Epoch 1901/2000 +2025-03-28 20:19:27,515 Current Learning Rate: 0.0005078537 +2025-03-28 20:19:27,516 Train Loss: 0.0000503, Val Loss: 0.0000551 +2025-03-28 20:19:27,516 Epoch 1902/2000 +2025-03-28 20:24:07,502 Current Learning Rate: 0.0005157054 +2025-03-28 20:24:07,502 Train Loss: 0.0000486, Val Loss: 0.0000548 +2025-03-28 20:24:07,502 Epoch 1903/2000 +2025-03-28 20:28:47,963 Current Learning Rate: 0.0005235532 +2025-03-28 20:28:47,963 Train Loss: 0.0000513, Val Loss: 0.0000554 +2025-03-28 20:28:47,964 Epoch 1904/2000 +2025-03-28 20:33:28,117 Current Learning Rate: 0.0005313953 +2025-03-28 20:33:28,117 Train Loss: 0.0000437, Val Loss: 0.0000557 +2025-03-28 20:33:28,118 Epoch 1905/2000 +2025-03-28 20:38:07,988 Current Learning Rate: 0.0005392295 +2025-03-28 20:38:07,988 Train Loss: 0.0000468, Val Loss: 0.0000583 +2025-03-28 20:38:07,989 Epoch 1906/2000 +2025-03-28 20:42:47,956 Current Learning Rate: 0.0005470542 +2025-03-28 20:42:47,957 Train Loss: 0.0000452, Val Loss: 0.0000553 +2025-03-28 20:42:47,957 Epoch 1907/2000 +2025-03-28 20:47:27,868 Current Learning Rate: 0.0005548672 +2025-03-28 20:47:27,869 Train Loss: 0.0000507, Val Loss: 0.0000606 +2025-03-28 20:47:27,869 Epoch 1908/2000 +2025-03-28 20:52:07,885 Current Learning Rate: 0.0005626666 +2025-03-28 20:52:07,886 Train Loss: 0.0000479, Val Loss: 0.0000566 +2025-03-28 20:52:07,886 Epoch 1909/2000 +2025-03-28 20:56:48,874 Current Learning Rate: 0.0005704506 +2025-03-28 20:56:48,874 Train Loss: 0.0000430, Val Loss: 0.0000555 +2025-03-28 20:56:48,874 Epoch 1910/2000 +2025-03-28 21:01:29,671 Current Learning Rate: 0.0005782172 +2025-03-28 21:01:29,672 Train Loss: 0.0000526, Val Loss: 0.0000587 +2025-03-28 21:01:29,672 Epoch 1911/2000 +2025-03-28 21:06:09,985 Current Learning Rate: 0.0005859646 +2025-03-28 21:06:09,986 Train Loss: 0.0000461, Val Loss: 0.0000595 +2025-03-28 21:06:09,986 Epoch 1912/2000 +2025-03-28 21:10:50,908 Current Learning Rate: 0.0005936907 +2025-03-28 21:10:50,909 Train Loss: 0.0000484, Val Loss: 0.0000568 +2025-03-28 21:10:50,909 Epoch 1913/2000 +2025-03-28 21:15:31,439 Current Learning Rate: 0.0006013936 +2025-03-28 21:15:31,440 Train Loss: 0.0000538, Val Loss: 0.0000598 +2025-03-28 21:15:31,440 Epoch 1914/2000 +2025-03-28 21:20:11,895 Current Learning Rate: 0.0006090716 +2025-03-28 21:20:11,896 Train Loss: 0.0000580, Val Loss: 0.0000597 +2025-03-28 21:20:11,896 Epoch 1915/2000 +2025-03-28 21:24:52,673 Current Learning Rate: 0.0006167227 +2025-03-28 21:24:52,674 Train Loss: 0.0000462, Val Loss: 0.0000570 +2025-03-28 21:24:52,674 Epoch 1916/2000 +2025-03-28 21:29:32,838 Current Learning Rate: 0.0006243449 +2025-03-28 21:29:32,838 Train Loss: 0.0000551, Val Loss: 0.0000607 +2025-03-28 21:29:32,839 Epoch 1917/2000 +2025-03-28 21:34:13,264 Current Learning Rate: 0.0006319365 +2025-03-28 21:34:13,265 Train Loss: 0.0000459, Val Loss: 0.0000564 +2025-03-28 21:34:13,265 Epoch 1918/2000 +2025-03-28 21:38:53,590 Current Learning Rate: 0.0006394956 +2025-03-28 21:38:53,590 Train Loss: 0.0000460, Val Loss: 0.0000575 +2025-03-28 21:38:53,591 Epoch 1919/2000 +2025-03-28 21:43:33,720 Current Learning Rate: 0.0006470202 +2025-03-28 21:43:33,721 Train Loss: 0.0000554, Val Loss: 0.0000586 +2025-03-28 21:43:33,721 Epoch 1920/2000 +2025-03-28 21:48:13,783 Current Learning Rate: 0.0006545085 +2025-03-28 21:48:13,784 Train Loss: 0.0000491, Val Loss: 0.0000624 +2025-03-28 21:48:13,784 Epoch 1921/2000 +2025-03-28 21:52:53,498 Current Learning Rate: 0.0006619587 +2025-03-28 21:52:53,499 Train Loss: 0.0000471, Val Loss: 0.0000588 +2025-03-28 21:52:53,499 Epoch 1922/2000 +2025-03-28 21:57:33,887 Current Learning Rate: 0.0006693690 +2025-03-28 21:57:33,888 Train Loss: 0.0000563, Val Loss: 0.0000569 +2025-03-28 21:57:33,888 Epoch 1923/2000 +2025-03-28 22:02:14,608 Current Learning Rate: 0.0006767374 +2025-03-28 22:02:14,609 Train Loss: 0.0000577, Val Loss: 0.0000622 +2025-03-28 22:02:14,609 Epoch 1924/2000 +2025-03-28 22:06:55,168 Current Learning Rate: 0.0006840623 +2025-03-28 22:06:55,169 Train Loss: 0.0000540, Val Loss: 0.0000602 +2025-03-28 22:06:55,169 Epoch 1925/2000 +2025-03-28 22:11:35,535 Current Learning Rate: 0.0006913417 +2025-03-28 22:11:35,535 Train Loss: 0.0000487, Val Loss: 0.0000557 +2025-03-28 22:11:35,535 Epoch 1926/2000 +2025-03-28 22:16:15,354 Current Learning Rate: 0.0006985739 +2025-03-28 22:16:15,354 Train Loss: 0.0000498, Val Loss: 0.0000580 +2025-03-28 22:16:15,354 Epoch 1927/2000 +2025-03-28 22:20:55,277 Current Learning Rate: 0.0007057572 +2025-03-28 22:20:55,277 Train Loss: 0.0000460, Val Loss: 0.0000584 +2025-03-28 22:20:55,277 Epoch 1928/2000 +2025-03-28 22:25:35,289 Current Learning Rate: 0.0007128896 +2025-03-28 22:25:35,290 Train Loss: 0.0000590, Val Loss: 0.0000706 +2025-03-28 22:25:35,290 Epoch 1929/2000 +2025-03-28 22:30:15,444 Current Learning Rate: 0.0007199696 +2025-03-28 22:30:15,445 Train Loss: 0.0000577, Val Loss: 0.0000567 +2025-03-28 22:30:15,445 Epoch 1930/2000 +2025-03-28 22:34:55,585 Current Learning Rate: 0.0007269952 +2025-03-28 22:34:55,586 Train Loss: 0.0000573, Val Loss: 0.0000607 +2025-03-28 22:34:55,586 Epoch 1931/2000 +2025-03-28 22:39:35,069 Current Learning Rate: 0.0007339649 +2025-03-28 22:39:35,070 Train Loss: 0.0000475, Val Loss: 0.0000605 +2025-03-28 22:39:35,070 Epoch 1932/2000 +2025-03-28 22:44:14,952 Current Learning Rate: 0.0007408768 +2025-03-28 22:44:14,953 Train Loss: 0.0000575, Val Loss: 0.0000580 +2025-03-28 22:44:14,953 Epoch 1933/2000 +2025-03-28 22:48:55,233 Current Learning Rate: 0.0007477293 +2025-03-28 22:48:55,234 Train Loss: 0.0000578, Val Loss: 0.0000572 +2025-03-28 22:48:55,234 Epoch 1934/2000 +2025-03-28 22:53:35,805 Current Learning Rate: 0.0007545207 +2025-03-28 22:53:35,806 Train Loss: 0.0000558, Val Loss: 0.0000592 +2025-03-28 22:53:35,806 Epoch 1935/2000 +2025-03-28 22:58:16,020 Current Learning Rate: 0.0007612493 +2025-03-28 22:58:16,021 Train Loss: 0.0000597, Val Loss: 0.0000672 +2025-03-28 22:58:16,021 Epoch 1936/2000 +2025-03-28 23:02:56,067 Current Learning Rate: 0.0007679134 +2025-03-28 23:02:56,068 Train Loss: 0.0000542, Val Loss: 0.0000599 +2025-03-28 23:02:56,068 Epoch 1937/2000 +2025-03-28 23:07:36,802 Current Learning Rate: 0.0007745114 +2025-03-28 23:07:36,802 Train Loss: 0.0000496, Val Loss: 0.0000631 +2025-03-28 23:07:36,802 Epoch 1938/2000 +2025-03-28 23:12:17,003 Current Learning Rate: 0.0007810417 +2025-03-28 23:12:17,004 Train Loss: 0.0000501, Val Loss: 0.0000613 +2025-03-28 23:12:17,004 Epoch 1939/2000 +2025-03-28 23:16:58,011 Current Learning Rate: 0.0007875026 +2025-03-28 23:16:58,012 Train Loss: 0.0000541, Val Loss: 0.0000607 +2025-03-28 23:16:58,012 Epoch 1940/2000 +2025-03-28 23:21:38,104 Current Learning Rate: 0.0007938926 +2025-03-28 23:21:38,105 Train Loss: 0.0000550, Val Loss: 0.0000674 +2025-03-28 23:21:38,105 Epoch 1941/2000 +2025-03-28 23:26:17,763 Current Learning Rate: 0.0008002101 +2025-03-28 23:26:17,774 Train Loss: 0.0000603, Val Loss: 0.0000665 +2025-03-28 23:26:17,774 Epoch 1942/2000 +2025-03-28 23:30:57,664 Current Learning Rate: 0.0008064535 +2025-03-28 23:30:57,665 Train Loss: 0.0000526, Val Loss: 0.0000598 +2025-03-28 23:30:57,665 Epoch 1943/2000 +2025-03-28 23:35:38,173 Current Learning Rate: 0.0008126213 +2025-03-28 23:35:38,173 Train Loss: 0.0000566, Val Loss: 0.0000626 +2025-03-28 23:35:38,174 Epoch 1944/2000 +2025-03-28 23:40:18,514 Current Learning Rate: 0.0008187120 +2025-03-28 23:40:18,515 Train Loss: 0.0000614, Val Loss: 0.0000660 +2025-03-28 23:40:18,515 Epoch 1945/2000 +2025-03-28 23:44:58,436 Current Learning Rate: 0.0008247240 +2025-03-28 23:44:58,437 Train Loss: 0.0000735, Val Loss: 0.0000630 +2025-03-28 23:44:58,437 Epoch 1946/2000 +2025-03-28 23:49:39,136 Current Learning Rate: 0.0008306559 +2025-03-28 23:49:39,137 Train Loss: 0.0000490, Val Loss: 0.0000594 +2025-03-28 23:49:39,137 Epoch 1947/2000 +2025-03-28 23:54:19,103 Current Learning Rate: 0.0008365063 +2025-03-28 23:54:19,104 Train Loss: 0.0000545, Val Loss: 0.0000703 +2025-03-28 23:54:19,104 Epoch 1948/2000 +2025-03-28 23:58:59,823 Current Learning Rate: 0.0008422736 +2025-03-28 23:58:59,824 Train Loss: 0.0000509, Val Loss: 0.0000594 +2025-03-28 23:58:59,824 Epoch 1949/2000 +2025-03-29 00:03:40,521 Current Learning Rate: 0.0008479564 +2025-03-29 00:03:40,522 Train Loss: 0.0000631, Val Loss: 0.0000645 +2025-03-29 00:03:40,522 Epoch 1950/2000 +2025-03-29 00:08:20,816 Current Learning Rate: 0.0008535534 +2025-03-29 00:08:20,817 Train Loss: 0.0000617, Val Loss: 0.0000659 +2025-03-29 00:08:20,817 Epoch 1951/2000 +2025-03-29 00:13:01,928 Current Learning Rate: 0.0008590631 +2025-03-29 00:13:01,928 Train Loss: 0.0000535, Val Loss: 0.0000702 +2025-03-29 00:13:01,929 Epoch 1952/2000 +2025-03-29 00:17:42,603 Current Learning Rate: 0.0008644843 +2025-03-29 00:17:42,604 Train Loss: 0.0000748, Val Loss: 0.0000772 +2025-03-29 00:17:42,605 Epoch 1953/2000 +2025-03-29 00:22:24,045 Current Learning Rate: 0.0008698155 +2025-03-29 00:22:24,045 Train Loss: 0.0000642, Val Loss: 0.0000635 +2025-03-29 00:22:24,046 Epoch 1954/2000 +2025-03-29 00:27:04,933 Current Learning Rate: 0.0008750555 +2025-03-29 00:27:04,933 Train Loss: 0.0000659, Val Loss: 0.0000652 +2025-03-29 00:27:04,933 Epoch 1955/2000 +2025-03-29 00:31:45,128 Current Learning Rate: 0.0008802030 +2025-03-29 00:31:45,129 Train Loss: 0.0000545, Val Loss: 0.0000584 +2025-03-29 00:31:45,129 Epoch 1956/2000 +2025-03-29 00:36:26,638 Current Learning Rate: 0.0008852566 +2025-03-29 00:36:26,638 Train Loss: 0.0000501, Val Loss: 0.0000620 +2025-03-29 00:36:26,639 Epoch 1957/2000 +2025-03-29 00:41:07,625 Current Learning Rate: 0.0008902152 +2025-03-29 00:41:07,626 Train Loss: 0.0000675, Val Loss: 0.0000707 +2025-03-29 00:41:07,626 Epoch 1958/2000 +2025-03-29 00:45:47,811 Current Learning Rate: 0.0008950775 +2025-03-29 00:45:47,811 Train Loss: 0.0000648, Val Loss: 0.0000680 +2025-03-29 00:45:47,812 Epoch 1959/2000 +2025-03-29 00:50:28,448 Current Learning Rate: 0.0008998423 +2025-03-29 00:50:28,449 Train Loss: 0.0000775, Val Loss: 0.0000704 +2025-03-29 00:50:28,449 Epoch 1960/2000 +2025-03-29 00:55:08,815 Current Learning Rate: 0.0009045085 +2025-03-29 00:55:08,816 Train Loss: 0.0000622, Val Loss: 0.0000629 +2025-03-29 00:55:08,816 Epoch 1961/2000 +2025-03-29 00:59:49,260 Current Learning Rate: 0.0009090749 +2025-03-29 00:59:49,260 Train Loss: 0.0000527, Val Loss: 0.0000619 +2025-03-29 00:59:49,261 Epoch 1962/2000 +2025-03-29 01:04:29,633 Current Learning Rate: 0.0009135403 +2025-03-29 01:04:29,633 Train Loss: 0.0000557, Val Loss: 0.0000610 +2025-03-29 01:04:29,634 Epoch 1963/2000 +2025-03-29 01:09:10,106 Current Learning Rate: 0.0009179037 +2025-03-29 01:09:10,107 Train Loss: 0.0000585, Val Loss: 0.0000695 +2025-03-29 01:09:10,108 Epoch 1964/2000 +2025-03-29 01:13:51,077 Current Learning Rate: 0.0009221640 +2025-03-29 01:13:51,078 Train Loss: 0.0000683, Val Loss: 0.0000644 +2025-03-29 01:13:51,079 Epoch 1965/2000 +2025-03-29 01:18:31,521 Current Learning Rate: 0.0009263201 +2025-03-29 01:18:31,522 Train Loss: 0.0000623, Val Loss: 0.0000654 +2025-03-29 01:18:31,522 Epoch 1966/2000 +2025-03-29 01:23:11,793 Current Learning Rate: 0.0009303710 +2025-03-29 01:23:11,793 Train Loss: 0.0000644, Val Loss: 0.0000701 +2025-03-29 01:23:11,794 Epoch 1967/2000 +2025-03-29 01:27:52,459 Current Learning Rate: 0.0009343158 +2025-03-29 01:27:52,460 Train Loss: 0.0000645, Val Loss: 0.0000629 +2025-03-29 01:27:52,460 Epoch 1968/2000 +2025-03-29 01:32:32,863 Current Learning Rate: 0.0009381533 +2025-03-29 01:32:32,863 Train Loss: 0.0000672, Val Loss: 0.0000615 +2025-03-29 01:32:32,863 Epoch 1969/2000 +2025-03-29 01:37:13,920 Current Learning Rate: 0.0009418828 +2025-03-29 01:37:13,920 Train Loss: 0.0000584, Val Loss: 0.0000619 +2025-03-29 01:37:13,921 Epoch 1970/2000 +2025-03-29 01:41:54,564 Current Learning Rate: 0.0009455033 +2025-03-29 01:41:54,564 Train Loss: 0.0000480, Val Loss: 0.0000680 +2025-03-29 01:41:54,564 Epoch 1971/2000 +2025-03-29 01:46:34,777 Current Learning Rate: 0.0009490138 +2025-03-29 01:46:34,778 Train Loss: 0.0000619, Val Loss: 0.0000748 +2025-03-29 01:46:34,778 Epoch 1972/2000 +2025-03-29 01:51:16,477 Current Learning Rate: 0.0009524135 +2025-03-29 01:51:16,478 Train Loss: 0.0000551, Val Loss: 0.0000658 +2025-03-29 01:51:16,478 Epoch 1973/2000 +2025-03-29 01:55:56,504 Current Learning Rate: 0.0009557016 +2025-03-29 01:55:56,504 Train Loss: 0.0000632, Val Loss: 0.0000659 +2025-03-29 01:55:56,504 Epoch 1974/2000 +2025-03-29 02:00:37,318 Current Learning Rate: 0.0009588773 +2025-03-29 02:00:37,318 Train Loss: 0.0000645, Val Loss: 0.0000665 +2025-03-29 02:00:37,318 Epoch 1975/2000 +2025-03-29 02:05:18,057 Current Learning Rate: 0.0009619398 +2025-03-29 02:05:18,057 Train Loss: 0.0000499, Val Loss: 0.0000660 +2025-03-29 02:05:18,058 Epoch 1976/2000 +2025-03-29 02:09:58,333 Current Learning Rate: 0.0009648882 +2025-03-29 02:09:58,334 Train Loss: 0.0000642, Val Loss: 0.0000793 +2025-03-29 02:09:58,334 Epoch 1977/2000 +2025-03-29 02:14:38,772 Current Learning Rate: 0.0009677220 +2025-03-29 02:14:38,772 Train Loss: 0.0000665, Val Loss: 0.0000787 +2025-03-29 02:14:38,773 Epoch 1978/2000 +2025-03-29 02:19:19,700 Current Learning Rate: 0.0009704404 +2025-03-29 02:19:19,700 Train Loss: 0.0000643, Val Loss: 0.0000715 +2025-03-29 02:19:19,700 Epoch 1979/2000 +2025-03-29 02:24:00,643 Current Learning Rate: 0.0009730427 +2025-03-29 02:24:00,644 Train Loss: 0.0000787, Val Loss: 0.0000737 +2025-03-29 02:24:00,644 Epoch 1980/2000 +2025-03-29 02:28:41,302 Current Learning Rate: 0.0009755283 +2025-03-29 02:28:41,303 Train Loss: 0.0000542, Val Loss: 0.0000664 +2025-03-29 02:28:41,303 Epoch 1981/2000 +2025-03-29 02:33:21,588 Current Learning Rate: 0.0009778965 +2025-03-29 02:33:21,589 Train Loss: 0.0000594, Val Loss: 0.0000717 +2025-03-29 02:33:21,589 Epoch 1982/2000 +2025-03-29 02:38:02,769 Current Learning Rate: 0.0009801468 +2025-03-29 02:38:02,770 Train Loss: 0.0000708, Val Loss: 0.0000667 +2025-03-29 02:38:02,770 Epoch 1983/2000 +2025-03-29 02:42:43,806 Current Learning Rate: 0.0009822787 +2025-03-29 02:42:43,806 Train Loss: 0.0000761, Val Loss: 0.0000669 +2025-03-29 02:42:43,806 Epoch 1984/2000 +2025-03-29 02:47:24,822 Current Learning Rate: 0.0009842916 +2025-03-29 02:47:24,823 Train Loss: 0.0000604, Val Loss: 0.0000750 +2025-03-29 02:47:24,823 Epoch 1985/2000 +2025-03-29 02:52:05,725 Current Learning Rate: 0.0009861850 +2025-03-29 02:52:05,725 Train Loss: 0.0000771, Val Loss: 0.0000766 +2025-03-29 02:52:05,726 Epoch 1986/2000 +2025-03-29 02:56:46,506 Current Learning Rate: 0.0009879584 +2025-03-29 02:56:46,506 Train Loss: 0.0000609, Val Loss: 0.0000647 +2025-03-29 02:56:46,507 Epoch 1987/2000 +2025-03-29 03:01:27,796 Current Learning Rate: 0.0009896114 +2025-03-29 03:01:27,797 Train Loss: 0.0000671, Val Loss: 0.0000666 +2025-03-29 03:01:27,797 Epoch 1988/2000 +2025-03-29 03:06:08,262 Current Learning Rate: 0.0009911436 +2025-03-29 03:06:08,263 Train Loss: 0.0000550, Val Loss: 0.0000703 +2025-03-29 03:06:08,263 Epoch 1989/2000 +2025-03-29 03:10:49,195 Current Learning Rate: 0.0009925547 +2025-03-29 03:10:49,195 Train Loss: 0.0000693, Val Loss: 0.0000837 +2025-03-29 03:10:49,196 Epoch 1990/2000 +2025-03-29 03:15:30,102 Current Learning Rate: 0.0009938442 +2025-03-29 03:15:30,103 Train Loss: 0.0000582, Val Loss: 0.0000653 +2025-03-29 03:15:30,103 Epoch 1991/2000 +2025-03-29 03:20:10,933 Current Learning Rate: 0.0009950118 +2025-03-29 03:20:10,934 Train Loss: 0.0000540, Val Loss: 0.0000677 +2025-03-29 03:20:10,934 Epoch 1992/2000 +2025-03-29 03:24:51,293 Current Learning Rate: 0.0009960574 +2025-03-29 03:24:51,294 Train Loss: 0.0000693, Val Loss: 0.0000653 +2025-03-29 03:24:51,294 Epoch 1993/2000 +2025-03-29 03:29:31,764 Current Learning Rate: 0.0009969805 +2025-03-29 03:29:31,765 Train Loss: 0.0000589, Val Loss: 0.0000615 +2025-03-29 03:29:31,765 Epoch 1994/2000 +2025-03-29 03:34:12,284 Current Learning Rate: 0.0009977810 +2025-03-29 03:34:12,284 Train Loss: 0.0000583, Val Loss: 0.0000624 +2025-03-29 03:34:12,285 Epoch 1995/2000 +2025-03-29 03:38:52,551 Current Learning Rate: 0.0009984587 +2025-03-29 03:38:52,552 Train Loss: 0.0000665, Val Loss: 0.0000645 +2025-03-29 03:38:52,552 Epoch 1996/2000 +2025-03-29 03:43:33,461 Current Learning Rate: 0.0009990134 +2025-03-29 03:43:33,461 Train Loss: 0.0000489, Val Loss: 0.0000686 +2025-03-29 03:43:33,462 Epoch 1997/2000 +2025-03-29 03:48:14,374 Current Learning Rate: 0.0009994449 +2025-03-29 03:48:14,375 Train Loss: 0.0000646, Val Loss: 0.0000774 +2025-03-29 03:48:14,376 Epoch 1998/2000 +2025-03-29 03:52:55,330 Current Learning Rate: 0.0009997533 +2025-03-29 03:52:55,330 Train Loss: 0.0000613, Val Loss: 0.0000642 +2025-03-29 03:52:55,331 Epoch 1999/2000 +2025-03-29 03:57:36,381 Current Learning Rate: 0.0009999383 +2025-03-29 03:57:36,382 Train Loss: 0.0000722, Val Loss: 0.0000657 +2025-03-29 03:57:36,382 Epoch 2000/2000 +2025-03-29 04:02:17,182 Current Learning Rate: 0.0010000000 +2025-03-29 04:02:17,182 Train Loss: 0.0000605, Val Loss: 0.0000662 +2025-03-29 04:02:26,372 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250221_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250221_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..508b66cc3b56448eb663ff3b851975be72e4da8f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250221_training_log.log @@ -0,0 +1,31 @@ +2025-02-21 22:57:52,738 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-21 22:57:52,789 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-21 22:57:52,818 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-21 22:57:52,844 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-21 22:57:52,876 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-21 22:57:52,895 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-21 22:57:52,899 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-21 22:57:52,901 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-21 22:58:25,645 Epoch 1/2000 +2025-02-21 22:58:27,868 Reducer buckets have been rebuilt in this iteration. +2025-02-21 22:58:27,868 Reducer buckets have been rebuilt in this iteration. +2025-02-21 22:58:27,868 Reducer buckets have been rebuilt in this iteration. +2025-02-21 22:58:27,870 Reducer buckets have been rebuilt in this iteration. +2025-02-21 22:58:27,870 Reducer buckets have been rebuilt in this iteration. +2025-02-21 22:58:27,870 Reducer buckets have been rebuilt in this iteration. +2025-02-21 22:58:27,870 Reducer buckets have been rebuilt in this iteration. +2025-02-21 22:58:27,870 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:09:53,915 Current Learning Rate: 0.0009999383 +2025-02-21 23:09:54,598 Train Loss: 251412670963189856.0000000, Val Loss: 252710721119013376.0000000 +2025-02-21 23:09:54,598 Epoch 2/2000 +2025-02-21 23:21:20,546 Current Learning Rate: 0.0009997533 +2025-02-21 23:21:21,288 Train Loss: 251412241705967168.0000000, Val Loss: 252710133857416864.0000000 +2025-02-21 23:21:21,289 Epoch 3/2000 +2025-02-21 23:32:14,355 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-21 23:32:14,433 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-21 23:32:14,470 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-21 23:32:14,491 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-21 23:32:14,536 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-21 23:32:14,550 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-21 23:32:14,582 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-21 23:32:14,589 Added key: store_based_barrier_key:1 to store for rank: 0 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250222_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250222_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..34393d378c2ba9f90beecdd6a6f8c76922d9b199 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250222_training_log.log @@ -0,0 +1,23 @@ +2025-02-21 23:33:39,872 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-21 23:33:39,930 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-21 23:33:40,019 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-21 23:33:40,036 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-21 23:33:40,067 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-21 23:33:40,077 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-21 23:33:40,114 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-21 23:33:40,131 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-21 23:34:29,176 Epoch 1/2000 +2025-02-21 23:34:31,455 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:34:31,461 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:34:31,461 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:34:31,461 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:34:31,461 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:34:31,461 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:34:31,461 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:34:31,461 Reducer buckets have been rebuilt in this iteration. +2025-02-21 23:45:56,745 Current Learning Rate: 0.0009999383 +2025-02-21 23:45:57,491 Train Loss: 0.0191960, Val Loss: 0.0003263 +2025-02-21 23:45:57,491 Epoch 2/2000 +2025-02-21 23:57:23,478 Current Learning Rate: 0.0009997533 +2025-02-21 23:57:24,289 Train Loss: 0.0002348, Val Loss: 0.0000897 +2025-02-21 23:57:24,290 Epoch 3/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_2step_finetune_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_2step_finetune_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..c6599613195b223937e60ae3129ca4734ac31311 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_2step_finetune_training_log.log @@ -0,0 +1,93 @@ +2025-02-27 01:29:00,335 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 01:29:00,369 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 01:29:00,384 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 01:29:00,552 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 01:29:00,568 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 01:29:00,610 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 01:29:00,631 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 01:29:00,635 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 01:29:42,899 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 01:29:43,071 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 01:29:43,106 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 01:29:43,112 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 01:29:43,355 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 01:29:43,394 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 01:29:43,423 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 01:29:43,430 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 01:30:47,739 Loading pretrained model from /jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-27 01:30:49,752 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:30:57,785 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:31:00,690 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:31:01,602 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:31:02,828 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:31:06,769 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:31:08,137 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:31:10,427 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:31:43,414 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 01:31:43,488 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 01:31:43,587 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 01:31:43,605 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 01:31:43,705 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 01:31:43,757 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 01:31:43,764 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 01:31:43,771 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 01:32:57,523 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:32:58,469 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:33:00,695 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:33:10,008 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:33:11,795 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:33:13,356 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:33:16,885 Loading pretrained model from /jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-27 01:33:17,264 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:33:17,302 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 01:40:09,065 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 01:40:09,103 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 01:40:09,180 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 01:40:09,193 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 01:40:09,242 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 01:40:09,265 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 01:40:09,292 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 01:40:09,308 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 01:40:41,660 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 01:40:41,710 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 01:40:41,794 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 01:40:41,812 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 01:40:41,819 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 01:40:41,838 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 01:40:41,847 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 01:40:41,851 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 01:41:43,498 Error loading pretrained model: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_inference.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_inference.log new file mode 100644 index 0000000000000000000000000000000000000000..6a4e3dadd32ca7e6c2c370904b21b50a4979f3bb --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_inference.log @@ -0,0 +1,189 @@ +2025-02-24 17:43:57,849 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-24 17:43:57,869 开始推理... +2025-02-25 00:15:14,613 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 00:15:14,630 开始单批次推理... +2025-02-25 00:15:24,673 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 00:15:24,673 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 00:15:24,673 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:15:24,673 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:15:24,675 输入数据范围:[-1.72, 1.93] +2025-02-25 00:15:24,677 输出数据范围:[-1.66, 1.90] +2025-02-25 00:15:24,681 单批次推理完成! +2025-02-25 00:23:52,236 缺失的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 00:23:52,237 意外的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 00:23:52,237 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-25 00:23:52,262 开始单批次推理... +2025-02-25 00:24:05,732 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 00:24:05,737 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 00:24:05,737 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:24:05,737 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:24:05,739 输入数据范围:[-1.72, 1.93] +2025-02-25 00:24:05,740 输出数据范围:[-3.44, 2.91] +2025-02-25 00:24:05,744 单批次推理完成! +2025-02-25 00:24:57,827 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-25 00:24:57,856 开始单批次推理... +2025-02-25 00:25:00,397 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 00:25:00,401 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 00:25:00,401 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:25:00,402 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 00:25:00,404 输入数据范围:[-1.72, 1.93] +2025-02-25 00:25:00,406 输出数据范围:[-1.66, 1.99] +2025-02-25 00:25:00,410 单批次推理完成! +2025-02-25 09:34:00,385 缺失的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 09:34:00,386 意外的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 09:34:00,386 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 09:34:00,407 开始单批次推理... +2025-02-25 09:34:02,599 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 09:34:02,603 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 09:34:02,604 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 09:34:02,604 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 09:34:02,606 输入数据范围:[-1.72, 1.93] +2025-02-25 09:34:02,607 输出数据范围:[-2.29, 2.43] +2025-02-25 09:34:02,611 单批次推理完成! +2025-02-25 09:35:09,583 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 09:35:09,608 开始单批次推理... +2025-02-25 09:35:19,024 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 09:35:19,025 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 09:35:19,025 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 09:35:19,025 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 09:35:19,026 输入数据范围:[-1.72, 1.93] +2025-02-25 09:35:19,028 输出数据范围:[-1.61, 1.90] +2025-02-25 09:35:19,032 单批次推理完成! +2025-02-25 10:37:36,163 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 10:37:36,183 开始单批次推理... +2025-02-25 10:37:45,889 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 10:37:45,889 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 10:37:45,889 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 10:37:45,889 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 10:37:45,891 输入数据范围:[-1.72, 1.93] +2025-02-25 10:37:45,893 输出数据范围:[-1.64, 1.90] +2025-02-25 10:37:45,896 单批次推理完成! +2025-02-25 14:02:32,148 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 14:02:32,165 开始单批次推理... +2025-02-25 14:02:42,169 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 14:02:42,169 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 14:02:42,170 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:02:42,170 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:02:42,173 输入数据范围:[-1.72, 1.93] +2025-02-25 14:02:42,175 输出数据范围:[-1.64, 1.86] +2025-02-25 14:02:42,180 单批次推理完成! +2025-02-25 14:19:43,204 缺失的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 14:19:43,205 意外的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 14:19:43,205 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-25 14:19:43,229 开始单批次推理... +2025-02-25 14:19:52,934 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 14:19:52,938 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 14:19:52,938 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:19:52,938 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:19:52,940 输入数据范围:[-1.72, 1.93] +2025-02-25 14:19:52,943 输出数据范围:[-3.44, 2.91] +2025-02-25 14:19:52,948 单批次推理完成! +2025-02-25 14:21:09,528 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Simvp_exp1_20250224_best_model.pth +2025-02-25 14:21:09,560 开始单批次推理... +2025-02-25 14:21:11,731 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 14:21:11,736 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 14:21:11,736 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:21:11,736 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 14:21:11,738 输入数据范围:[-1.72, 1.93] +2025-02-25 14:21:11,740 输出数据范围:[-1.63, 1.90] +2025-02-25 14:21:11,744 单批次推理完成! +2025-02-25 17:17:16,495 缺失的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 17:17:16,496 意外的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 17:17:16,496 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 17:17:16,515 开始单批次推理... +2025-02-25 17:17:18,505 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 17:17:18,511 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 17:17:18,512 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 17:17:18,512 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 17:17:18,514 输入数据范围:[-1.72, 1.93] +2025-02-25 17:17:18,516 输出数据范围:[-2.29, 2.43] +2025-02-25 17:17:18,520 单批次推理完成! +2025-02-25 17:18:40,686 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 17:18:40,700 开始单批次推理... +2025-02-25 17:18:50,365 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 17:18:50,372 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 17:18:50,372 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 17:18:50,372 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 17:18:50,373 输入数据范围:[-1.72, 1.93] +2025-02-25 17:18:50,376 输出数据范围:[-1.64, 1.89] +2025-02-25 17:18:50,380 单批次推理完成! +2025-02-25 20:13:17,009 缺失的权重键:['enc.enc.0.conv.conv.weight', 'enc.enc.0.conv.conv.bias', 'enc.enc.0.conv.norm.weight', 'enc.enc.0.conv.norm.bias', 'enc.enc.1.conv.conv.weight', 'enc.enc.1.conv.conv.bias', 'enc.enc.1.conv.norm.weight', 'enc.enc.1.conv.norm.bias', 'enc.enc.2.conv.conv.weight', 'enc.enc.2.conv.conv.bias', 'enc.enc.2.conv.norm.weight', 'enc.enc.2.conv.norm.bias', 'enc.enc.3.conv.conv.weight', 'enc.enc.3.conv.conv.bias', 'enc.enc.3.conv.norm.weight', 'enc.enc.3.conv.norm.bias', 'hid.enc.0.conv1.weight', 'hid.enc.0.conv1.bias', 'hid.enc.0.layers.0.conv.weight', 'hid.enc.0.layers.0.conv.bias', 'hid.enc.0.layers.0.norm.weight', 'hid.enc.0.layers.0.norm.bias', 'hid.enc.0.layers.1.conv.weight', 'hid.enc.0.layers.1.conv.bias', 'hid.enc.0.layers.1.norm.weight', 'hid.enc.0.layers.1.norm.bias', 'hid.enc.0.layers.2.conv.weight', 'hid.enc.0.layers.2.conv.bias', 'hid.enc.0.layers.2.norm.weight', 'hid.enc.0.layers.2.norm.bias', 'hid.enc.0.layers.3.conv.weight', 'hid.enc.0.layers.3.conv.bias', 'hid.enc.0.layers.3.norm.weight', 'hid.enc.0.layers.3.norm.bias', 'hid.enc.1.conv1.weight', 'hid.enc.1.conv1.bias', 'hid.enc.1.layers.0.conv.weight', 'hid.enc.1.layers.0.conv.bias', 'hid.enc.1.layers.0.norm.weight', 'hid.enc.1.layers.0.norm.bias', 'hid.enc.1.layers.1.conv.weight', 'hid.enc.1.layers.1.conv.bias', 'hid.enc.1.layers.1.norm.weight', 'hid.enc.1.layers.1.norm.bias', 'hid.enc.1.layers.2.conv.weight', 'hid.enc.1.layers.2.conv.bias', 'hid.enc.1.layers.2.norm.weight', 'hid.enc.1.layers.2.norm.bias', 'hid.enc.1.layers.3.conv.weight', 'hid.enc.1.layers.3.conv.bias', 'hid.enc.1.layers.3.norm.weight', 'hid.enc.1.layers.3.norm.bias', 'hid.enc.2.conv1.weight', 'hid.enc.2.conv1.bias', 'hid.enc.2.layers.0.conv.weight', 'hid.enc.2.layers.0.conv.bias', 'hid.enc.2.layers.0.norm.weight', 'hid.enc.2.layers.0.norm.bias', 'hid.enc.2.layers.1.conv.weight', 'hid.enc.2.layers.1.conv.bias', 'hid.enc.2.layers.1.norm.weight', 'hid.enc.2.layers.1.norm.bias', 'hid.enc.2.layers.2.conv.weight', 'hid.enc.2.layers.2.conv.bias', 'hid.enc.2.layers.2.norm.weight', 'hid.enc.2.layers.2.norm.bias', 'hid.enc.2.layers.3.conv.weight', 'hid.enc.2.layers.3.conv.bias', 'hid.enc.2.layers.3.norm.weight', 'hid.enc.2.layers.3.norm.bias', 'hid.enc.3.conv1.weight', 'hid.enc.3.conv1.bias', 'hid.enc.3.layers.0.conv.weight', 'hid.enc.3.layers.0.conv.bias', 'hid.enc.3.layers.0.norm.weight', 'hid.enc.3.layers.0.norm.bias', 'hid.enc.3.layers.1.conv.weight', 'hid.enc.3.layers.1.conv.bias', 'hid.enc.3.layers.1.norm.weight', 'hid.enc.3.layers.1.norm.bias', 'hid.enc.3.layers.2.conv.weight', 'hid.enc.3.layers.2.conv.bias', 'hid.enc.3.layers.2.norm.weight', 'hid.enc.3.layers.2.norm.bias', 'hid.enc.3.layers.3.conv.weight', 'hid.enc.3.layers.3.conv.bias', 'hid.enc.3.layers.3.norm.weight', 'hid.enc.3.layers.3.norm.bias', 'hid.enc.4.conv1.weight', 'hid.enc.4.conv1.bias', 'hid.enc.4.layers.0.conv.weight', 'hid.enc.4.layers.0.conv.bias', 'hid.enc.4.layers.0.norm.weight', 'hid.enc.4.layers.0.norm.bias', 'hid.enc.4.layers.1.conv.weight', 'hid.enc.4.layers.1.conv.bias', 'hid.enc.4.layers.1.norm.weight', 'hid.enc.4.layers.1.norm.bias', 'hid.enc.4.layers.2.conv.weight', 'hid.enc.4.layers.2.conv.bias', 'hid.enc.4.layers.2.norm.weight', 'hid.enc.4.layers.2.norm.bias', 'hid.enc.4.layers.3.conv.weight', 'hid.enc.4.layers.3.conv.bias', 'hid.enc.4.layers.3.norm.weight', 'hid.enc.4.layers.3.norm.bias', 'hid.enc.5.conv1.weight', 'hid.enc.5.conv1.bias', 'hid.enc.5.layers.0.conv.weight', 'hid.enc.5.layers.0.conv.bias', 'hid.enc.5.layers.0.norm.weight', 'hid.enc.5.layers.0.norm.bias', 'hid.enc.5.layers.1.conv.weight', 'hid.enc.5.layers.1.conv.bias', 'hid.enc.5.layers.1.norm.weight', 'hid.enc.5.layers.1.norm.bias', 'hid.enc.5.layers.2.conv.weight', 'hid.enc.5.layers.2.conv.bias', 'hid.enc.5.layers.2.norm.weight', 'hid.enc.5.layers.2.norm.bias', 'hid.enc.5.layers.3.conv.weight', 'hid.enc.5.layers.3.conv.bias', 'hid.enc.5.layers.3.norm.weight', 'hid.enc.5.layers.3.norm.bias', 'hid.enc.6.conv1.weight', 'hid.enc.6.conv1.bias', 'hid.enc.6.layers.0.conv.weight', 'hid.enc.6.layers.0.conv.bias', 'hid.enc.6.layers.0.norm.weight', 'hid.enc.6.layers.0.norm.bias', 'hid.enc.6.layers.1.conv.weight', 'hid.enc.6.layers.1.conv.bias', 'hid.enc.6.layers.1.norm.weight', 'hid.enc.6.layers.1.norm.bias', 'hid.enc.6.layers.2.conv.weight', 'hid.enc.6.layers.2.conv.bias', 'hid.enc.6.layers.2.norm.weight', 'hid.enc.6.layers.2.norm.bias', 'hid.enc.6.layers.3.conv.weight', 'hid.enc.6.layers.3.conv.bias', 'hid.enc.6.layers.3.norm.weight', 'hid.enc.6.layers.3.norm.bias', 'hid.enc.7.conv1.weight', 'hid.enc.7.conv1.bias', 'hid.enc.7.layers.0.conv.weight', 'hid.enc.7.layers.0.conv.bias', 'hid.enc.7.layers.0.norm.weight', 'hid.enc.7.layers.0.norm.bias', 'hid.enc.7.layers.1.conv.weight', 'hid.enc.7.layers.1.conv.bias', 'hid.enc.7.layers.1.norm.weight', 'hid.enc.7.layers.1.norm.bias', 'hid.enc.7.layers.2.conv.weight', 'hid.enc.7.layers.2.conv.bias', 'hid.enc.7.layers.2.norm.weight', 'hid.enc.7.layers.2.norm.bias', 'hid.enc.7.layers.3.conv.weight', 'hid.enc.7.layers.3.conv.bias', 'hid.enc.7.layers.3.norm.weight', 'hid.enc.7.layers.3.norm.bias', 'hid.dec.0.conv1.weight', 'hid.dec.0.conv1.bias', 'hid.dec.0.layers.0.conv.weight', 'hid.dec.0.layers.0.conv.bias', 'hid.dec.0.layers.0.norm.weight', 'hid.dec.0.layers.0.norm.bias', 'hid.dec.0.layers.1.conv.weight', 'hid.dec.0.layers.1.conv.bias', 'hid.dec.0.layers.1.norm.weight', 'hid.dec.0.layers.1.norm.bias', 'hid.dec.0.layers.2.conv.weight', 'hid.dec.0.layers.2.conv.bias', 'hid.dec.0.layers.2.norm.weight', 'hid.dec.0.layers.2.norm.bias', 'hid.dec.0.layers.3.conv.weight', 'hid.dec.0.layers.3.conv.bias', 'hid.dec.0.layers.3.norm.weight', 'hid.dec.0.layers.3.norm.bias', 'hid.dec.1.conv1.weight', 'hid.dec.1.conv1.bias', 'hid.dec.1.layers.0.conv.weight', 'hid.dec.1.layers.0.conv.bias', 'hid.dec.1.layers.0.norm.weight', 'hid.dec.1.layers.0.norm.bias', 'hid.dec.1.layers.1.conv.weight', 'hid.dec.1.layers.1.conv.bias', 'hid.dec.1.layers.1.norm.weight', 'hid.dec.1.layers.1.norm.bias', 'hid.dec.1.layers.2.conv.weight', 'hid.dec.1.layers.2.conv.bias', 'hid.dec.1.layers.2.norm.weight', 'hid.dec.1.layers.2.norm.bias', 'hid.dec.1.layers.3.conv.weight', 'hid.dec.1.layers.3.conv.bias', 'hid.dec.1.layers.3.norm.weight', 'hid.dec.1.layers.3.norm.bias', 'hid.dec.2.conv1.weight', 'hid.dec.2.conv1.bias', 'hid.dec.2.layers.0.conv.weight', 'hid.dec.2.layers.0.conv.bias', 'hid.dec.2.layers.0.norm.weight', 'hid.dec.2.layers.0.norm.bias', 'hid.dec.2.layers.1.conv.weight', 'hid.dec.2.layers.1.conv.bias', 'hid.dec.2.layers.1.norm.weight', 'hid.dec.2.layers.1.norm.bias', 'hid.dec.2.layers.2.conv.weight', 'hid.dec.2.layers.2.conv.bias', 'hid.dec.2.layers.2.norm.weight', 'hid.dec.2.layers.2.norm.bias', 'hid.dec.2.layers.3.conv.weight', 'hid.dec.2.layers.3.conv.bias', 'hid.dec.2.layers.3.norm.weight', 'hid.dec.2.layers.3.norm.bias', 'hid.dec.3.conv1.weight', 'hid.dec.3.conv1.bias', 'hid.dec.3.layers.0.conv.weight', 'hid.dec.3.layers.0.conv.bias', 'hid.dec.3.layers.0.norm.weight', 'hid.dec.3.layers.0.norm.bias', 'hid.dec.3.layers.1.conv.weight', 'hid.dec.3.layers.1.conv.bias', 'hid.dec.3.layers.1.norm.weight', 'hid.dec.3.layers.1.norm.bias', 'hid.dec.3.layers.2.conv.weight', 'hid.dec.3.layers.2.conv.bias', 'hid.dec.3.layers.2.norm.weight', 'hid.dec.3.layers.2.norm.bias', 'hid.dec.3.layers.3.conv.weight', 'hid.dec.3.layers.3.conv.bias', 'hid.dec.3.layers.3.norm.weight', 'hid.dec.3.layers.3.norm.bias', 'hid.dec.4.conv1.weight', 'hid.dec.4.conv1.bias', 'hid.dec.4.layers.0.conv.weight', 'hid.dec.4.layers.0.conv.bias', 'hid.dec.4.layers.0.norm.weight', 'hid.dec.4.layers.0.norm.bias', 'hid.dec.4.layers.1.conv.weight', 'hid.dec.4.layers.1.conv.bias', 'hid.dec.4.layers.1.norm.weight', 'hid.dec.4.layers.1.norm.bias', 'hid.dec.4.layers.2.conv.weight', 'hid.dec.4.layers.2.conv.bias', 'hid.dec.4.layers.2.norm.weight', 'hid.dec.4.layers.2.norm.bias', 'hid.dec.4.layers.3.conv.weight', 'hid.dec.4.layers.3.conv.bias', 'hid.dec.4.layers.3.norm.weight', 'hid.dec.4.layers.3.norm.bias', 'hid.dec.5.conv1.weight', 'hid.dec.5.conv1.bias', 'hid.dec.5.layers.0.conv.weight', 'hid.dec.5.layers.0.conv.bias', 'hid.dec.5.layers.0.norm.weight', 'hid.dec.5.layers.0.norm.bias', 'hid.dec.5.layers.1.conv.weight', 'hid.dec.5.layers.1.conv.bias', 'hid.dec.5.layers.1.norm.weight', 'hid.dec.5.layers.1.norm.bias', 'hid.dec.5.layers.2.conv.weight', 'hid.dec.5.layers.2.conv.bias', 'hid.dec.5.layers.2.norm.weight', 'hid.dec.5.layers.2.norm.bias', 'hid.dec.5.layers.3.conv.weight', 'hid.dec.5.layers.3.conv.bias', 'hid.dec.5.layers.3.norm.weight', 'hid.dec.5.layers.3.norm.bias', 'hid.dec.6.conv1.weight', 'hid.dec.6.conv1.bias', 'hid.dec.6.layers.0.conv.weight', 'hid.dec.6.layers.0.conv.bias', 'hid.dec.6.layers.0.norm.weight', 'hid.dec.6.layers.0.norm.bias', 'hid.dec.6.layers.1.conv.weight', 'hid.dec.6.layers.1.conv.bias', 'hid.dec.6.layers.1.norm.weight', 'hid.dec.6.layers.1.norm.bias', 'hid.dec.6.layers.2.conv.weight', 'hid.dec.6.layers.2.conv.bias', 'hid.dec.6.layers.2.norm.weight', 'hid.dec.6.layers.2.norm.bias', 'hid.dec.6.layers.3.conv.weight', 'hid.dec.6.layers.3.conv.bias', 'hid.dec.6.layers.3.norm.weight', 'hid.dec.6.layers.3.norm.bias', 'hid.dec.7.conv1.weight', 'hid.dec.7.conv1.bias', 'hid.dec.7.layers.0.conv.weight', 'hid.dec.7.layers.0.conv.bias', 'hid.dec.7.layers.0.norm.weight', 'hid.dec.7.layers.0.norm.bias', 'hid.dec.7.layers.1.conv.weight', 'hid.dec.7.layers.1.conv.bias', 'hid.dec.7.layers.1.norm.weight', 'hid.dec.7.layers.1.norm.bias', 'hid.dec.7.layers.2.conv.weight', 'hid.dec.7.layers.2.conv.bias', 'hid.dec.7.layers.2.norm.weight', 'hid.dec.7.layers.2.norm.bias', 'hid.dec.7.layers.3.conv.weight', 'hid.dec.7.layers.3.conv.bias', 'hid.dec.7.layers.3.norm.weight', 'hid.dec.7.layers.3.norm.bias', 'dec.dec.0.conv.conv.weight', 'dec.dec.0.conv.conv.bias', 'dec.dec.0.conv.norm.weight', 'dec.dec.0.conv.norm.bias', 'dec.dec.1.conv.conv.weight', 'dec.dec.1.conv.conv.bias', 'dec.dec.1.conv.norm.weight', 'dec.dec.1.conv.norm.bias', 'dec.dec.2.conv.conv.weight', 'dec.dec.2.conv.conv.bias', 'dec.dec.2.conv.norm.weight', 'dec.dec.2.conv.norm.bias', 'dec.dec.3.conv.conv.weight', 'dec.dec.3.conv.conv.bias', 'dec.dec.3.conv.norm.weight', 'dec.dec.3.conv.norm.bias', 'dec.readout.weight', 'dec.readout.bias'] +2025-02-25 20:13:17,010 意外的权重键:['atmospheric_encoder.enc.0.conv.conv.weight', 'atmospheric_encoder.enc.0.conv.conv.bias', 'atmospheric_encoder.enc.0.conv.norm.weight', 'atmospheric_encoder.enc.0.conv.norm.bias', 'atmospheric_encoder.enc.1.conv.conv.weight', 'atmospheric_encoder.enc.1.conv.conv.bias', 'atmospheric_encoder.enc.1.conv.norm.weight', 'atmospheric_encoder.enc.1.conv.norm.bias', 'atmospheric_encoder.enc.2.conv.conv.weight', 'atmospheric_encoder.enc.2.conv.conv.bias', 'atmospheric_encoder.enc.2.conv.norm.weight', 'atmospheric_encoder.enc.2.conv.norm.bias', 'atmospheric_encoder.enc.3.conv.conv.weight', 'atmospheric_encoder.enc.3.conv.conv.bias', 'atmospheric_encoder.enc.3.conv.norm.weight', 'atmospheric_encoder.enc.3.conv.norm.bias', 'temporal_evolution.enc.0.block.pos_embed.weight', 'temporal_evolution.enc.0.block.pos_embed.bias', 'temporal_evolution.enc.0.block.norm1.weight', 'temporal_evolution.enc.0.block.norm1.bias', 'temporal_evolution.enc.0.block.norm1.running_mean', 'temporal_evolution.enc.0.block.norm1.running_var', 'temporal_evolution.enc.0.block.norm1.num_batches_tracked', 'temporal_evolution.enc.0.block.conv1.weight', 'temporal_evolution.enc.0.block.conv1.bias', 'temporal_evolution.enc.0.block.conv2.weight', 'temporal_evolution.enc.0.block.conv2.bias', 'temporal_evolution.enc.0.block.attn.weight', 'temporal_evolution.enc.0.block.attn.bias', 'temporal_evolution.enc.0.block.norm2.weight', 'temporal_evolution.enc.0.block.norm2.bias', 'temporal_evolution.enc.0.block.norm2.running_mean', 'temporal_evolution.enc.0.block.norm2.running_var', 'temporal_evolution.enc.0.block.norm2.num_batches_tracked', 'temporal_evolution.enc.0.block.mlp.fc1.weight', 'temporal_evolution.enc.0.block.mlp.fc1.bias', 'temporal_evolution.enc.0.block.mlp.fc2.weight', 'temporal_evolution.enc.0.block.mlp.fc2.bias', 'temporal_evolution.enc.0.reduction.weight', 'temporal_evolution.enc.0.reduction.bias', 'temporal_evolution.enc.1.block.gamma_1', 'temporal_evolution.enc.1.block.gamma_2', 'temporal_evolution.enc.1.block.pos_embed.weight', 'temporal_evolution.enc.1.block.pos_embed.bias', 'temporal_evolution.enc.1.block.norm1.weight', 'temporal_evolution.enc.1.block.norm1.bias', 'temporal_evolution.enc.1.block.attn.qkv.weight', 'temporal_evolution.enc.1.block.attn.qkv.bias', 'temporal_evolution.enc.1.block.attn.proj.weight', 'temporal_evolution.enc.1.block.attn.proj.bias', 'temporal_evolution.enc.1.block.norm2.weight', 'temporal_evolution.enc.1.block.norm2.bias', 'temporal_evolution.enc.1.block.mlp.fc1.weight', 'temporal_evolution.enc.1.block.mlp.fc1.bias', 'temporal_evolution.enc.1.block.mlp.fc2.weight', 'temporal_evolution.enc.1.block.mlp.fc2.bias', 'temporal_evolution.enc.2.block.gamma_1', 'temporal_evolution.enc.2.block.gamma_2', 'temporal_evolution.enc.2.block.pos_embed.weight', 'temporal_evolution.enc.2.block.pos_embed.bias', 'temporal_evolution.enc.2.block.norm1.weight', 'temporal_evolution.enc.2.block.norm1.bias', 'temporal_evolution.enc.2.block.attn.qkv.weight', 'temporal_evolution.enc.2.block.attn.qkv.bias', 'temporal_evolution.enc.2.block.attn.proj.weight', 'temporal_evolution.enc.2.block.attn.proj.bias', 'temporal_evolution.enc.2.block.norm2.weight', 'temporal_evolution.enc.2.block.norm2.bias', 'temporal_evolution.enc.2.block.mlp.fc1.weight', 'temporal_evolution.enc.2.block.mlp.fc1.bias', 'temporal_evolution.enc.2.block.mlp.fc2.weight', 'temporal_evolution.enc.2.block.mlp.fc2.bias', 'temporal_evolution.enc.3.block.gamma_1', 'temporal_evolution.enc.3.block.gamma_2', 'temporal_evolution.enc.3.block.pos_embed.weight', 'temporal_evolution.enc.3.block.pos_embed.bias', 'temporal_evolution.enc.3.block.norm1.weight', 'temporal_evolution.enc.3.block.norm1.bias', 'temporal_evolution.enc.3.block.attn.qkv.weight', 'temporal_evolution.enc.3.block.attn.qkv.bias', 'temporal_evolution.enc.3.block.attn.proj.weight', 'temporal_evolution.enc.3.block.attn.proj.bias', 'temporal_evolution.enc.3.block.norm2.weight', 'temporal_evolution.enc.3.block.norm2.bias', 'temporal_evolution.enc.3.block.mlp.fc1.weight', 'temporal_evolution.enc.3.block.mlp.fc1.bias', 'temporal_evolution.enc.3.block.mlp.fc2.weight', 'temporal_evolution.enc.3.block.mlp.fc2.bias', 'temporal_evolution.enc.4.block.gamma_1', 'temporal_evolution.enc.4.block.gamma_2', 'temporal_evolution.enc.4.block.pos_embed.weight', 'temporal_evolution.enc.4.block.pos_embed.bias', 'temporal_evolution.enc.4.block.norm1.weight', 'temporal_evolution.enc.4.block.norm1.bias', 'temporal_evolution.enc.4.block.attn.qkv.weight', 'temporal_evolution.enc.4.block.attn.qkv.bias', 'temporal_evolution.enc.4.block.attn.proj.weight', 'temporal_evolution.enc.4.block.attn.proj.bias', 'temporal_evolution.enc.4.block.norm2.weight', 'temporal_evolution.enc.4.block.norm2.bias', 'temporal_evolution.enc.4.block.mlp.fc1.weight', 'temporal_evolution.enc.4.block.mlp.fc1.bias', 'temporal_evolution.enc.4.block.mlp.fc2.weight', 'temporal_evolution.enc.4.block.mlp.fc2.bias', 'temporal_evolution.enc.5.block.gamma_1', 'temporal_evolution.enc.5.block.gamma_2', 'temporal_evolution.enc.5.block.pos_embed.weight', 'temporal_evolution.enc.5.block.pos_embed.bias', 'temporal_evolution.enc.5.block.norm1.weight', 'temporal_evolution.enc.5.block.norm1.bias', 'temporal_evolution.enc.5.block.attn.qkv.weight', 'temporal_evolution.enc.5.block.attn.qkv.bias', 'temporal_evolution.enc.5.block.attn.proj.weight', 'temporal_evolution.enc.5.block.attn.proj.bias', 'temporal_evolution.enc.5.block.norm2.weight', 'temporal_evolution.enc.5.block.norm2.bias', 'temporal_evolution.enc.5.block.mlp.fc1.weight', 'temporal_evolution.enc.5.block.mlp.fc1.bias', 'temporal_evolution.enc.5.block.mlp.fc2.weight', 'temporal_evolution.enc.5.block.mlp.fc2.bias', 'temporal_evolution.enc.6.block.gamma_1', 'temporal_evolution.enc.6.block.gamma_2', 'temporal_evolution.enc.6.block.pos_embed.weight', 'temporal_evolution.enc.6.block.pos_embed.bias', 'temporal_evolution.enc.6.block.norm1.weight', 'temporal_evolution.enc.6.block.norm1.bias', 'temporal_evolution.enc.6.block.attn.qkv.weight', 'temporal_evolution.enc.6.block.attn.qkv.bias', 'temporal_evolution.enc.6.block.attn.proj.weight', 'temporal_evolution.enc.6.block.attn.proj.bias', 'temporal_evolution.enc.6.block.norm2.weight', 'temporal_evolution.enc.6.block.norm2.bias', 'temporal_evolution.enc.6.block.mlp.fc1.weight', 'temporal_evolution.enc.6.block.mlp.fc1.bias', 'temporal_evolution.enc.6.block.mlp.fc2.weight', 'temporal_evolution.enc.6.block.mlp.fc2.bias', 'temporal_evolution.enc.7.block.pos_embed.weight', 'temporal_evolution.enc.7.block.pos_embed.bias', 'temporal_evolution.enc.7.block.norm1.weight', 'temporal_evolution.enc.7.block.norm1.bias', 'temporal_evolution.enc.7.block.norm1.running_mean', 'temporal_evolution.enc.7.block.norm1.running_var', 'temporal_evolution.enc.7.block.norm1.num_batches_tracked', 'temporal_evolution.enc.7.block.conv1.weight', 'temporal_evolution.enc.7.block.conv1.bias', 'temporal_evolution.enc.7.block.conv2.weight', 'temporal_evolution.enc.7.block.conv2.bias', 'temporal_evolution.enc.7.block.attn.weight', 'temporal_evolution.enc.7.block.attn.bias', 'temporal_evolution.enc.7.block.norm2.weight', 'temporal_evolution.enc.7.block.norm2.bias', 'temporal_evolution.enc.7.block.norm2.running_mean', 'temporal_evolution.enc.7.block.norm2.running_var', 'temporal_evolution.enc.7.block.norm2.num_batches_tracked', 'temporal_evolution.enc.7.block.mlp.fc1.weight', 'temporal_evolution.enc.7.block.mlp.fc1.bias', 'temporal_evolution.enc.7.block.mlp.fc2.weight', 'temporal_evolution.enc.7.block.mlp.fc2.bias', 'temporal_evolution.enc.7.reduction.weight', 'temporal_evolution.enc.7.reduction.bias', 'atmospheric_decoder.dec.0.conv.conv.weight', 'atmospheric_decoder.dec.0.conv.conv.bias', 'atmospheric_decoder.dec.0.conv.norm.weight', 'atmospheric_decoder.dec.0.conv.norm.bias', 'atmospheric_decoder.dec.1.conv.conv.weight', 'atmospheric_decoder.dec.1.conv.conv.bias', 'atmospheric_decoder.dec.1.conv.norm.weight', 'atmospheric_decoder.dec.1.conv.norm.bias', 'atmospheric_decoder.dec.2.conv.conv.weight', 'atmospheric_decoder.dec.2.conv.conv.bias', 'atmospheric_decoder.dec.2.conv.norm.weight', 'atmospheric_decoder.dec.2.conv.norm.bias', 'atmospheric_decoder.dec.3.conv.conv.weight', 'atmospheric_decoder.dec.3.conv.conv.bias', 'atmospheric_decoder.dec.3.conv.norm.weight', 'atmospheric_decoder.dec.3.conv.norm.bias', 'atmospheric_decoder.readout.weight', 'atmospheric_decoder.readout.bias'] +2025-02-25 20:13:17,010 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 20:13:17,031 开始推理... +2025-02-25 20:14:38,524 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 20:14:38,539 开始单批次推理... +2025-02-25 20:14:48,432 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 20:14:48,434 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 20:14:48,435 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 20:14:48,435 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 20:14:48,436 输入数据范围:[-1.72, 1.93] +2025-02-25 20:14:48,437 输出数据范围:[-1.67, 1.90] +2025-02-25 20:14:48,440 单批次推理完成! +2025-02-25 22:11:27,158 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-25 22:11:27,192 开始单批次推理... +2025-02-25 22:11:37,145 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-25 22:11:37,148 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-25 22:11:37,148 目标数据形状:(2, 10, 2, 256, 256) +2025-02-25 22:11:37,148 输出数据形状:(2, 10, 2, 256, 256) +2025-02-25 22:11:37,150 输入数据范围:[-1.72, 1.93] +2025-02-25 22:11:37,152 输出数据范围:[-1.64, 1.90] +2025-02-25 22:11:37,156 单批次推理完成! +2025-02-26 00:34:30,831 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-26 00:34:30,864 开始单批次推理... +2025-02-26 00:34:40,977 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 00:34:40,977 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-26 00:34:40,978 目标数据形状:(2, 10, 2, 256, 256) +2025-02-26 00:34:40,978 输出数据形状:(2, 10, 2, 256, 256) +2025-02-26 00:34:40,979 输入数据范围:[-1.72, 1.93] +2025-02-26 00:34:40,981 输出数据范围:[-1.64, 1.92] +2025-02-26 00:34:40,984 单批次推理完成! +2025-02-26 09:46:25,052 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-26 09:46:25,104 开始单批次推理... +2025-02-26 09:46:36,309 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 09:46:36,314 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-26 09:46:36,314 目标数据形状:(2, 10, 2, 256, 256) +2025-02-26 09:46:36,315 输出数据形状:(2, 10, 2, 256, 256) +2025-02-26 09:46:36,316 输入数据范围:[-1.72, 1.93] +2025-02-26 09:46:36,318 输出数据范围:[-1.65, 1.88] +2025-02-26 09:46:36,322 单批次推理完成! +2025-02-26 10:49:37,885 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-26 10:49:37,909 开始单批次推理... +2025-02-26 10:49:47,961 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 10:49:47,962 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-26 10:49:47,962 目标数据形状:(2, 10, 2, 256, 256) +2025-02-26 10:49:47,962 输出数据形状:(2, 10, 2, 256, 256) +2025-02-26 10:49:47,963 输入数据范围:[-1.72, 1.93] +2025-02-26 10:49:47,965 输出数据范围:[-1.64, 1.89] +2025-02-26 10:49:47,967 单批次推理完成! +2025-02-26 10:50:13,814 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-26 10:50:13,855 开始单批次推理... +2025-02-26 10:50:24,204 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 10:50:24,208 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-26 10:50:24,209 目标数据形状:(2, 60, 2, 256, 256) +2025-02-26 10:50:24,209 输出数据形状:(2, 10, 2, 256, 256) +2025-02-26 10:50:24,210 输入数据范围:[-1.91, 2.13] +2025-02-26 10:50:24,212 输出数据范围:[-1.77, 2.09] +2025-02-26 10:50:24,215 单批次推理完成! +2025-02-26 10:54:07,390 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-26 10:54:07,410 开始推理... +2025-02-26 10:54:40,214 结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 10:54:40,224 最终输出形状: (1, 60, 2, 256, 256) +2025-02-26 10:54:40,225 预测完成,总步数: 60 +2025-02-26 10:54:40,225 推理完成! +2025-02-26 11:19:12,505 开始滚动推理... +2025-02-26 11:19:14,403 推理过程出错: 'Triton' object has no attribute 'output_length' +2025-02-26 11:19:56,555 开始滚动推理... +2025-02-26 11:21:03,144 预测完成 | 输入形状: torch.Size([2, 10, 2, 256, 256]) | 预测形状: (2, 60, 2, 256, 256) | 目标形状: (2, 60, 2, 256, 256) +2025-02-26 11:21:03,147 输入范围: [-1.91, 2.13] +2025-02-26 11:21:03,169 预测范围: [-1.77, 2.08] +2025-02-26 11:21:03,200 推理流程顺利完成! +2025-02-26 11:25:54,992 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-26 11:25:55,016 开始单批次推理... +2025-02-26 11:26:05,909 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 11:26:05,909 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-26 11:26:05,910 目标数据形状:(2, 10, 2, 256, 256) +2025-02-26 11:26:05,910 输出数据形状:(2, 10, 2, 256, 256) +2025-02-26 11:26:05,911 输入数据范围:[-1.72, 1.93] +2025-02-26 11:26:05,913 输出数据范围:[-1.63, 1.89] +2025-02-26 11:26:05,915 单批次推理完成! diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_inference_new.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_inference_new.log new file mode 100644 index 0000000000000000000000000000000000000000..e82103b38b34fb4e1e8a63291f8daa62f4619a50 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_inference_new.log @@ -0,0 +1,9 @@ +2025-02-26 15:00:39,982 成功加载模型权重:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/Kuro_Triton_exp1_20250224_best_model.pth +2025-02-26 15:00:40,002 开始单批次推理... +2025-02-26 15:00:50,376 单批次结果已保存至:/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results +2025-02-26 15:00:50,378 输入数据形状:(2, 10, 2, 256, 256) (batch_size, seq_len, channels, height, width) +2025-02-26 15:00:50,378 目标数据形状:(2, 10, 2, 256, 256) +2025-02-26 15:00:50,379 输出数据形状:(2, 10, 2, 256, 256) +2025-02-26 15:00:50,380 输入数据范围:[-1.72, 1.93] +2025-02-26 15:00:50,382 输出数据范围:[-1.64, 1.89] +2025-02-26 15:00:50,385 单批次推理完成! diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..2f9d88d00cabf5dfb188f9ee8556e45fc76cb913 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_20250224_training_log.log @@ -0,0 +1,938 @@ +2025-02-24 16:11:07,025 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-24 16:11:07,038 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-24 16:11:07,080 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-24 16:11:07,121 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-24 16:11:07,158 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-24 16:11:07,177 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-24 16:11:07,185 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-24 16:11:07,191 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-24 16:12:13,158 Epoch 1/2000 +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:12:16,907 Reducer buckets have been rebuilt in this iteration. +2025-02-24 16:27:16,028 Current Learning Rate: 0.0009999383 +2025-02-24 16:27:16,817 Train Loss: 0.0406381, Val Loss: 0.0053257 +2025-02-24 16:27:16,818 Epoch 2/2000 +2025-02-24 16:42:18,281 Current Learning Rate: 0.0009997533 +2025-02-24 16:42:19,303 Train Loss: 0.0047488, Val Loss: 0.0037061 +2025-02-24 16:42:19,304 Epoch 3/2000 +2025-02-24 16:57:22,683 Current Learning Rate: 0.0009994449 +2025-02-24 16:57:23,533 Train Loss: 0.0036298, Val Loss: 0.0033693 +2025-02-24 16:57:23,533 Epoch 4/2000 +2025-02-24 17:12:25,186 Current Learning Rate: 0.0009990134 +2025-02-24 17:12:26,059 Train Loss: 0.0032028, Val Loss: 0.0030099 +2025-02-24 17:12:26,059 Epoch 5/2000 +2025-02-24 17:27:27,267 Current Learning Rate: 0.0009984587 +2025-02-24 17:27:28,176 Train Loss: 0.0029359, Val Loss: 0.0027050 +2025-02-24 17:27:28,177 Epoch 6/2000 +2025-02-24 17:42:29,880 Current Learning Rate: 0.0009977810 +2025-02-24 17:42:30,846 Train Loss: 0.0027535, Val Loss: 0.0025139 +2025-02-24 17:42:30,846 Epoch 7/2000 +2025-02-24 17:57:33,515 Current Learning Rate: 0.0009969805 +2025-02-24 17:57:34,512 Train Loss: 0.0026977, Val Loss: 0.0023625 +2025-02-24 17:57:34,513 Epoch 8/2000 +2025-02-24 18:12:37,864 Current Learning Rate: 0.0009960574 +2025-02-24 18:12:37,865 Train Loss: 0.0025400, Val Loss: 0.0024198 +2025-02-24 18:12:37,865 Epoch 9/2000 +2025-02-24 18:27:40,015 Current Learning Rate: 0.0009950118 +2025-02-24 18:27:40,015 Train Loss: 0.0024398, Val Loss: 0.0027710 +2025-02-24 18:27:40,015 Epoch 10/2000 +2025-02-24 18:42:43,127 Current Learning Rate: 0.0009938442 +2025-02-24 18:42:44,154 Train Loss: 0.0023841, Val Loss: 0.0021985 +2025-02-24 18:42:44,154 Epoch 11/2000 +2025-02-24 18:57:46,279 Current Learning Rate: 0.0009925547 +2025-02-24 18:57:46,280 Train Loss: 0.0022101, Val Loss: 0.0022231 +2025-02-24 18:57:46,280 Epoch 12/2000 +2025-02-24 19:12:48,317 Current Learning Rate: 0.0009911436 +2025-02-24 19:12:49,429 Train Loss: 0.0021464, Val Loss: 0.0019511 +2025-02-24 19:12:49,430 Epoch 13/2000 +2025-02-24 19:27:52,189 Current Learning Rate: 0.0009896114 +2025-02-24 19:27:53,126 Train Loss: 0.0019859, Val Loss: 0.0018656 +2025-02-24 19:27:53,127 Epoch 14/2000 +2025-02-24 19:42:56,220 Current Learning Rate: 0.0009879584 +2025-02-24 19:42:57,122 Train Loss: 0.0018646, Val Loss: 0.0017206 +2025-02-24 19:42:57,123 Epoch 15/2000 +2025-02-24 19:57:59,289 Current Learning Rate: 0.0009861850 +2025-02-24 19:58:00,235 Train Loss: 0.0017463, Val Loss: 0.0016954 +2025-02-24 19:58:00,235 Epoch 16/2000 +2025-02-24 20:13:02,848 Current Learning Rate: 0.0009842916 +2025-02-24 20:13:02,848 Train Loss: 0.0016704, Val Loss: 0.0018268 +2025-02-24 20:13:02,849 Epoch 17/2000 +2025-02-24 20:28:06,151 Current Learning Rate: 0.0009822787 +2025-02-24 20:28:07,067 Train Loss: 0.0015416, Val Loss: 0.0014259 +2025-02-24 20:28:07,067 Epoch 18/2000 +2025-02-24 20:43:10,826 Current Learning Rate: 0.0009801468 +2025-02-24 20:43:11,797 Train Loss: 0.0014084, Val Loss: 0.0013179 +2025-02-24 20:43:11,797 Epoch 19/2000 +2025-02-24 20:58:13,971 Current Learning Rate: 0.0009778965 +2025-02-24 20:58:15,020 Train Loss: 0.0013198, Val Loss: 0.0013071 +2025-02-24 20:58:15,020 Epoch 20/2000 +2025-02-24 21:13:17,915 Current Learning Rate: 0.0009755283 +2025-02-24 21:13:18,833 Train Loss: 0.0012210, Val Loss: 0.0011555 +2025-02-24 21:13:18,834 Epoch 21/2000 +2025-02-24 21:28:21,985 Current Learning Rate: 0.0009730427 +2025-02-24 21:28:22,937 Train Loss: 0.0011413, Val Loss: 0.0011252 +2025-02-24 21:28:22,937 Epoch 22/2000 +2025-02-24 21:43:26,165 Current Learning Rate: 0.0009704404 +2025-02-24 21:43:26,166 Train Loss: 0.0011451, Val Loss: 0.0011662 +2025-02-24 21:43:26,166 Epoch 23/2000 +2025-02-24 21:58:30,148 Current Learning Rate: 0.0009677220 +2025-02-24 21:58:31,140 Train Loss: 0.0010685, Val Loss: 0.0010649 +2025-02-24 21:58:31,141 Epoch 24/2000 +2025-02-24 22:13:34,687 Current Learning Rate: 0.0009648882 +2025-02-24 22:13:35,624 Train Loss: 0.0009016, Val Loss: 0.0009312 +2025-02-24 22:13:35,624 Epoch 25/2000 +2025-02-24 22:28:38,293 Current Learning Rate: 0.0009619398 +2025-02-24 22:28:39,202 Train Loss: 0.0009121, Val Loss: 0.0008724 +2025-02-24 22:28:39,202 Epoch 26/2000 +2025-02-24 22:43:42,880 Current Learning Rate: 0.0009588773 +2025-02-24 22:43:42,881 Train Loss: 0.0008411, Val Loss: 0.0008799 +2025-02-24 22:43:42,881 Epoch 27/2000 +2025-02-24 22:58:46,431 Current Learning Rate: 0.0009557016 +2025-02-24 22:58:47,413 Train Loss: 0.0008557, Val Loss: 0.0008101 +2025-02-24 22:58:47,414 Epoch 28/2000 +2025-02-24 23:13:49,935 Current Learning Rate: 0.0009524135 +2025-02-24 23:13:50,803 Train Loss: 0.0008248, Val Loss: 0.0008042 +2025-02-24 23:13:50,803 Epoch 29/2000 +2025-02-24 23:28:53,290 Current Learning Rate: 0.0009490138 +2025-02-24 23:28:54,256 Train Loss: 0.0006904, Val Loss: 0.0007522 +2025-02-24 23:28:54,256 Epoch 30/2000 +2025-02-24 23:43:56,574 Current Learning Rate: 0.0009455033 +2025-02-24 23:43:56,575 Train Loss: 0.0007436, Val Loss: 0.0011877 +2025-02-24 23:43:56,575 Epoch 31/2000 +2025-02-24 23:59:00,575 Current Learning Rate: 0.0009418828 +2025-02-24 23:59:01,486 Train Loss: 0.0007554, Val Loss: 0.0006824 +2025-02-24 23:59:01,486 Epoch 32/2000 +2025-02-25 00:14:04,480 Current Learning Rate: 0.0009381533 +2025-02-25 00:14:06,360 Train Loss: 0.0006869, Val Loss: 0.0006696 +2025-02-25 00:14:06,361 Epoch 33/2000 +2025-02-25 00:29:09,347 Current Learning Rate: 0.0009343158 +2025-02-25 00:29:10,169 Train Loss: 0.0006142, Val Loss: 0.0006497 +2025-02-25 00:29:10,169 Epoch 34/2000 +2025-02-25 00:44:13,071 Current Learning Rate: 0.0009303710 +2025-02-25 00:44:14,012 Train Loss: 0.0005384, Val Loss: 0.0006241 +2025-02-25 00:44:14,012 Epoch 35/2000 +2025-02-25 00:59:16,622 Current Learning Rate: 0.0009263201 +2025-02-25 00:59:16,623 Train Loss: 0.0005865, Val Loss: 0.0006459 +2025-02-25 00:59:16,623 Epoch 36/2000 +2025-02-25 01:14:19,789 Current Learning Rate: 0.0009221640 +2025-02-25 01:14:20,884 Train Loss: 0.0006411, Val Loss: 0.0006062 +2025-02-25 01:14:20,884 Epoch 37/2000 +2025-02-25 01:29:24,210 Current Learning Rate: 0.0009179037 +2025-02-25 01:29:24,211 Train Loss: 0.0005853, Val Loss: 0.0006425 +2025-02-25 01:29:24,211 Epoch 38/2000 +2025-02-25 01:44:27,395 Current Learning Rate: 0.0009135403 +2025-02-25 01:44:28,509 Train Loss: 0.0005547, Val Loss: 0.0005610 +2025-02-25 01:44:28,510 Epoch 39/2000 +2025-02-25 01:59:31,667 Current Learning Rate: 0.0009090749 +2025-02-25 01:59:32,575 Train Loss: 0.0004919, Val Loss: 0.0005452 +2025-02-25 01:59:32,576 Epoch 40/2000 +2025-02-25 02:14:36,419 Current Learning Rate: 0.0009045085 +2025-02-25 02:14:36,420 Train Loss: 0.0004709, Val Loss: 0.0006160 +2025-02-25 02:14:36,420 Epoch 41/2000 +2025-02-25 02:29:39,939 Current Learning Rate: 0.0008998423 +2025-02-25 02:29:40,881 Train Loss: 0.0005403, Val Loss: 0.0005210 +2025-02-25 02:29:40,882 Epoch 42/2000 +2025-02-25 02:44:43,949 Current Learning Rate: 0.0008950775 +2025-02-25 02:44:43,950 Train Loss: 0.0004746, Val Loss: 0.0005861 +2025-02-25 02:44:43,950 Epoch 43/2000 +2025-02-25 02:59:47,680 Current Learning Rate: 0.0008902152 +2025-02-25 02:59:47,680 Train Loss: 0.0005121, Val Loss: 0.0005827 +2025-02-25 02:59:47,680 Epoch 44/2000 +2025-02-25 03:14:51,351 Current Learning Rate: 0.0008852566 +2025-02-25 03:14:51,351 Train Loss: 0.0004441, Val Loss: 0.0005960 +2025-02-25 03:14:51,351 Epoch 45/2000 +2025-02-25 03:29:55,318 Current Learning Rate: 0.0008802030 +2025-02-25 03:29:56,290 Train Loss: 0.0004549, Val Loss: 0.0004676 +2025-02-25 03:29:56,291 Epoch 46/2000 +2025-02-25 03:44:58,575 Current Learning Rate: 0.0008750555 +2025-02-25 03:44:58,576 Train Loss: 0.0004177, Val Loss: 0.0004849 +2025-02-25 03:44:58,576 Epoch 47/2000 +2025-02-25 04:00:01,937 Current Learning Rate: 0.0008698155 +2025-02-25 04:00:04,796 Train Loss: 0.0004078, Val Loss: 0.0004234 +2025-02-25 04:00:04,796 Epoch 48/2000 +2025-02-25 04:15:07,817 Current Learning Rate: 0.0008644843 +2025-02-25 04:15:07,818 Train Loss: 0.0003959, Val Loss: 0.0004665 +2025-02-25 04:15:07,819 Epoch 49/2000 +2025-02-25 04:30:11,693 Current Learning Rate: 0.0008590631 +2025-02-25 04:30:12,713 Train Loss: 0.0004122, Val Loss: 0.0004154 +2025-02-25 04:30:12,713 Epoch 50/2000 +2025-02-25 04:45:15,538 Current Learning Rate: 0.0008535534 +2025-02-25 04:45:16,632 Train Loss: 0.0004019, Val Loss: 0.0004076 +2025-02-25 04:45:16,632 Epoch 51/2000 +2025-02-25 05:00:20,380 Current Learning Rate: 0.0008479564 +2025-02-25 05:00:21,280 Train Loss: 0.0003756, Val Loss: 0.0003701 +2025-02-25 05:00:21,281 Epoch 52/2000 +2025-02-25 05:15:23,837 Current Learning Rate: 0.0008422736 +2025-02-25 05:15:23,841 Train Loss: 0.0003748, Val Loss: 0.0004063 +2025-02-25 05:15:23,841 Epoch 53/2000 +2025-02-25 05:30:27,716 Current Learning Rate: 0.0008365063 +2025-02-25 05:30:27,716 Train Loss: 0.0005704, Val Loss: 0.0004781 +2025-02-25 05:30:27,717 Epoch 54/2000 +2025-02-25 05:45:31,146 Current Learning Rate: 0.0008306559 +2025-02-25 05:45:31,146 Train Loss: 0.0003712, Val Loss: 0.0003772 +2025-02-25 05:45:31,146 Epoch 55/2000 +2025-02-25 06:00:34,966 Current Learning Rate: 0.0008247240 +2025-02-25 06:00:34,967 Train Loss: 0.0004134, Val Loss: 0.0004025 +2025-02-25 06:00:34,967 Epoch 56/2000 +2025-02-25 06:15:39,445 Current Learning Rate: 0.0008187120 +2025-02-25 06:15:40,545 Train Loss: 0.0003266, Val Loss: 0.0003262 +2025-02-25 06:15:40,545 Epoch 57/2000 +2025-02-25 06:30:44,292 Current Learning Rate: 0.0008126213 +2025-02-25 06:30:45,302 Train Loss: 0.0003011, Val Loss: 0.0003144 +2025-02-25 06:30:45,302 Epoch 58/2000 +2025-02-25 06:45:48,633 Current Learning Rate: 0.0008064535 +2025-02-25 06:45:48,634 Train Loss: 0.0003419, Val Loss: 0.0003240 +2025-02-25 06:45:48,635 Epoch 59/2000 +2025-02-25 07:00:52,035 Current Learning Rate: 0.0008002101 +2025-02-25 07:00:52,036 Train Loss: 0.0003142, Val Loss: 0.0003275 +2025-02-25 07:00:52,036 Epoch 60/2000 +2025-02-25 07:15:54,621 Current Learning Rate: 0.0007938926 +2025-02-25 07:15:54,621 Train Loss: 0.0003328, Val Loss: 0.0003441 +2025-02-25 07:15:54,621 Epoch 61/2000 +2025-02-25 07:30:57,200 Current Learning Rate: 0.0007875026 +2025-02-25 07:30:57,200 Train Loss: 0.0003144, Val Loss: 0.0003336 +2025-02-25 07:30:57,200 Epoch 62/2000 +2025-02-25 07:46:00,913 Current Learning Rate: 0.0007810417 +2025-02-25 07:46:00,913 Train Loss: 0.0002909, Val Loss: 0.0003155 +2025-02-25 07:46:00,914 Epoch 63/2000 +2025-02-25 08:01:04,932 Current Learning Rate: 0.0007745114 +2025-02-25 08:01:06,008 Train Loss: 0.0003106, Val Loss: 0.0003073 +2025-02-25 08:01:06,008 Epoch 64/2000 +2025-02-25 08:16:09,420 Current Learning Rate: 0.0007679134 +2025-02-25 08:16:09,421 Train Loss: 0.0003480, Val Loss: 0.0003098 +2025-02-25 08:16:09,421 Epoch 65/2000 +2025-02-25 08:31:13,486 Current Learning Rate: 0.0007612493 +2025-02-25 08:31:14,500 Train Loss: 0.0003508, Val Loss: 0.0002983 +2025-02-25 08:31:14,500 Epoch 66/2000 +2025-02-25 08:46:17,285 Current Learning Rate: 0.0007545207 +2025-02-25 08:46:18,296 Train Loss: 0.0002690, Val Loss: 0.0002935 +2025-02-25 08:46:18,297 Epoch 67/2000 +2025-02-25 09:01:21,350 Current Learning Rate: 0.0007477293 +2025-02-25 09:01:21,352 Train Loss: 0.0003194, Val Loss: 0.0003067 +2025-02-25 09:01:21,352 Epoch 68/2000 +2025-02-25 09:16:24,402 Current Learning Rate: 0.0007408768 +2025-02-25 09:16:25,564 Train Loss: 0.0002590, Val Loss: 0.0002914 +2025-02-25 09:16:25,564 Epoch 69/2000 +2025-02-25 09:31:28,130 Current Learning Rate: 0.0007339649 +2025-02-25 09:31:28,131 Train Loss: 0.0002838, Val Loss: 0.0003011 +2025-02-25 09:31:28,132 Epoch 70/2000 +2025-02-25 09:46:31,264 Current Learning Rate: 0.0007269952 +2025-02-25 09:46:31,265 Train Loss: 0.0002724, Val Loss: 0.0003282 +2025-02-25 09:46:31,265 Epoch 71/2000 +2025-02-25 10:01:35,150 Current Learning Rate: 0.0007199696 +2025-02-25 10:01:35,151 Train Loss: 0.0002907, Val Loss: 0.0002919 +2025-02-25 10:01:35,151 Epoch 72/2000 +2025-02-25 10:16:38,037 Current Learning Rate: 0.0007128896 +2025-02-25 10:16:38,037 Train Loss: 0.0002937, Val Loss: 0.0003012 +2025-02-25 10:16:38,037 Epoch 73/2000 +2025-02-25 10:31:41,420 Current Learning Rate: 0.0007057572 +2025-02-25 10:31:42,335 Train Loss: 0.0002739, Val Loss: 0.0002700 +2025-02-25 10:31:42,336 Epoch 74/2000 +2025-02-25 10:46:44,862 Current Learning Rate: 0.0006985739 +2025-02-25 10:46:45,831 Train Loss: 0.0002685, Val Loss: 0.0002608 +2025-02-25 10:46:45,831 Epoch 75/2000 +2025-02-25 11:01:48,402 Current Learning Rate: 0.0006913417 +2025-02-25 11:01:48,402 Train Loss: 0.0002618, Val Loss: 0.0002696 +2025-02-25 11:01:48,402 Epoch 76/2000 +2025-02-25 11:16:51,147 Current Learning Rate: 0.0006840623 +2025-02-25 11:16:52,042 Train Loss: 0.0002518, Val Loss: 0.0002458 +2025-02-25 11:16:52,042 Epoch 77/2000 +2025-02-25 11:31:54,389 Current Learning Rate: 0.0006767374 +2025-02-25 11:31:54,389 Train Loss: 0.0002349, Val Loss: 0.0002580 +2025-02-25 11:31:54,390 Epoch 78/2000 +2025-02-25 11:46:57,106 Current Learning Rate: 0.0006693690 +2025-02-25 11:46:57,107 Train Loss: 0.0002533, Val Loss: 0.0002504 +2025-02-25 11:46:57,107 Epoch 79/2000 +2025-02-25 12:01:59,996 Current Learning Rate: 0.0006619587 +2025-02-25 12:01:59,997 Train Loss: 0.0002345, Val Loss: 0.0002552 +2025-02-25 12:01:59,997 Epoch 80/2000 +2025-02-25 12:17:03,130 Current Learning Rate: 0.0006545085 +2025-02-25 12:17:03,131 Train Loss: 0.0002340, Val Loss: 0.0002564 +2025-02-25 12:17:03,131 Epoch 81/2000 +2025-02-25 12:32:05,871 Current Learning Rate: 0.0006470202 +2025-02-25 12:32:05,872 Train Loss: 0.0002187, Val Loss: 0.0002478 +2025-02-25 12:32:05,872 Epoch 82/2000 +2025-02-25 12:47:09,285 Current Learning Rate: 0.0006394956 +2025-02-25 12:47:09,285 Train Loss: 0.0002616, Val Loss: 0.0002532 +2025-02-25 12:47:09,286 Epoch 83/2000 +2025-02-25 13:02:12,213 Current Learning Rate: 0.0006319365 +2025-02-25 13:02:13,017 Train Loss: 0.0002400, Val Loss: 0.0002432 +2025-02-25 13:02:13,018 Epoch 84/2000 +2025-02-25 13:17:15,104 Current Learning Rate: 0.0006243449 +2025-02-25 13:17:15,989 Train Loss: 0.0002385, Val Loss: 0.0002424 +2025-02-25 13:17:15,989 Epoch 85/2000 +2025-02-25 13:32:18,146 Current Learning Rate: 0.0006167227 +2025-02-25 13:32:19,058 Train Loss: 0.0002356, Val Loss: 0.0002413 +2025-02-25 13:32:19,059 Epoch 86/2000 +2025-02-25 13:47:21,459 Current Learning Rate: 0.0006090716 +2025-02-25 13:47:21,460 Train Loss: 0.0002399, Val Loss: 0.0002569 +2025-02-25 13:47:21,460 Epoch 87/2000 +2025-02-25 14:02:25,066 Current Learning Rate: 0.0006013936 +2025-02-25 14:02:25,067 Train Loss: 0.0002128, Val Loss: 0.0002451 +2025-02-25 14:02:25,067 Epoch 88/2000 +2025-02-25 14:17:27,765 Current Learning Rate: 0.0005936907 +2025-02-25 14:17:28,687 Train Loss: 0.0002162, Val Loss: 0.0002375 +2025-02-25 14:17:28,688 Epoch 89/2000 +2025-02-25 14:32:31,720 Current Learning Rate: 0.0005859646 +2025-02-25 14:32:32,732 Train Loss: 0.0002342, Val Loss: 0.0002355 +2025-02-25 14:32:32,732 Epoch 90/2000 +2025-02-25 14:47:35,041 Current Learning Rate: 0.0005782172 +2025-02-25 14:47:35,953 Train Loss: 0.0002678, Val Loss: 0.0002264 +2025-02-25 14:47:35,954 Epoch 91/2000 +2025-02-25 15:02:38,966 Current Learning Rate: 0.0005704506 +2025-02-25 15:02:39,919 Train Loss: 0.0002453, Val Loss: 0.0002140 +2025-02-25 15:02:39,920 Epoch 92/2000 +2025-02-25 15:17:43,009 Current Learning Rate: 0.0005626666 +2025-02-25 15:17:44,040 Train Loss: 0.0001895, Val Loss: 0.0002100 +2025-02-25 15:17:44,040 Epoch 93/2000 +2025-02-25 15:32:46,791 Current Learning Rate: 0.0005548672 +2025-02-25 15:32:47,661 Train Loss: 0.0001881, Val Loss: 0.0002013 +2025-02-25 15:32:47,661 Epoch 94/2000 +2025-02-25 15:47:50,748 Current Learning Rate: 0.0005470542 +2025-02-25 15:47:50,749 Train Loss: 0.0002285, Val Loss: 0.0002250 +2025-02-25 15:47:50,749 Epoch 95/2000 +2025-02-25 16:02:54,055 Current Learning Rate: 0.0005392295 +2025-02-25 16:02:54,055 Train Loss: 0.0002251, Val Loss: 0.0002040 +2025-02-25 16:02:54,055 Epoch 96/2000 +2025-02-25 16:17:59,050 Current Learning Rate: 0.0005313953 +2025-02-25 16:17:59,052 Train Loss: 0.0001951, Val Loss: 0.0002039 +2025-02-25 16:17:59,053 Epoch 97/2000 +2025-02-25 16:33:02,606 Current Learning Rate: 0.0005235532 +2025-02-25 16:33:02,606 Train Loss: 0.0002096, Val Loss: 0.0002188 +2025-02-25 16:33:02,607 Epoch 98/2000 +2025-02-25 16:48:06,201 Current Learning Rate: 0.0005157054 +2025-02-25 16:48:07,131 Train Loss: 0.0002015, Val Loss: 0.0001987 +2025-02-25 16:48:07,131 Epoch 99/2000 +2025-02-25 17:03:10,862 Current Learning Rate: 0.0005078537 +2025-02-25 17:03:10,863 Train Loss: 0.0001924, Val Loss: 0.0002114 +2025-02-25 17:03:10,863 Epoch 100/2000 +2025-02-25 17:18:13,997 Current Learning Rate: 0.0005000000 +2025-02-25 17:18:13,997 Train Loss: 0.0001947, Val Loss: 0.0001999 +2025-02-25 17:18:13,998 Epoch 101/2000 +2025-02-25 17:33:16,414 Current Learning Rate: 0.0004921463 +2025-02-25 17:33:16,414 Train Loss: 0.0002028, Val Loss: 0.0002025 +2025-02-25 17:33:16,414 Epoch 102/2000 +2025-02-25 17:48:20,832 Current Learning Rate: 0.0004842946 +2025-02-25 17:48:20,833 Train Loss: 0.0002183, Val Loss: 0.0002091 +2025-02-25 17:48:20,833 Epoch 103/2000 +2025-02-25 18:03:24,432 Current Learning Rate: 0.0004764468 +2025-02-25 18:03:24,433 Train Loss: 0.0001786, Val Loss: 0.0002148 +2025-02-25 18:03:24,433 Epoch 104/2000 +2025-02-25 18:18:27,755 Current Learning Rate: 0.0004686047 +2025-02-25 18:18:27,756 Train Loss: 0.0001798, Val Loss: 0.0002055 +2025-02-25 18:18:27,756 Epoch 105/2000 +2025-02-25 18:33:31,423 Current Learning Rate: 0.0004607705 +2025-02-25 18:33:32,394 Train Loss: 0.0001941, Val Loss: 0.0001975 +2025-02-25 18:33:32,394 Epoch 106/2000 +2025-02-25 18:48:35,960 Current Learning Rate: 0.0004529458 +2025-02-25 18:48:35,961 Train Loss: 0.0002056, Val Loss: 0.0002399 +2025-02-25 18:48:35,962 Epoch 107/2000 +2025-02-25 19:03:38,991 Current Learning Rate: 0.0004451328 +2025-02-25 19:03:38,992 Train Loss: 0.0001906, Val Loss: 0.0002040 +2025-02-25 19:03:38,992 Epoch 108/2000 +2025-02-25 19:18:41,657 Current Learning Rate: 0.0004373334 +2025-02-25 19:18:42,527 Train Loss: 0.0001806, Val Loss: 0.0001967 +2025-02-25 19:18:42,528 Epoch 109/2000 +2025-02-25 19:33:45,767 Current Learning Rate: 0.0004295494 +2025-02-25 19:33:45,767 Train Loss: 0.0001806, Val Loss: 0.0001990 +2025-02-25 19:33:45,767 Epoch 110/2000 +2025-02-25 19:48:50,020 Current Learning Rate: 0.0004217828 +2025-02-25 19:48:50,021 Train Loss: 0.0001675, Val Loss: 0.0001971 +2025-02-25 19:48:50,021 Epoch 111/2000 +2025-02-25 20:03:52,846 Current Learning Rate: 0.0004140354 +2025-02-25 20:03:53,767 Train Loss: 0.0001470, Val Loss: 0.0001785 +2025-02-25 20:03:53,767 Epoch 112/2000 +2025-02-25 20:18:56,116 Current Learning Rate: 0.0004063093 +2025-02-25 20:18:56,117 Train Loss: 0.0001739, Val Loss: 0.0001800 +2025-02-25 20:18:56,117 Epoch 113/2000 +2025-02-25 20:33:58,736 Current Learning Rate: 0.0003986064 +2025-02-25 20:33:58,736 Train Loss: 0.0002093, Val Loss: 0.0001841 +2025-02-25 20:33:58,736 Epoch 114/2000 +2025-02-25 20:49:01,269 Current Learning Rate: 0.0003909284 +2025-02-25 20:49:03,853 Train Loss: 0.0001446, Val Loss: 0.0001755 +2025-02-25 20:49:03,854 Epoch 115/2000 +2025-02-25 21:04:06,164 Current Learning Rate: 0.0003832773 +2025-02-25 21:04:06,165 Train Loss: 0.0002054, Val Loss: 0.0001839 +2025-02-25 21:04:06,165 Epoch 116/2000 +2025-02-25 21:19:09,503 Current Learning Rate: 0.0003756551 +2025-02-25 21:19:09,503 Train Loss: 0.0002168, Val Loss: 0.0002063 +2025-02-25 21:19:09,503 Epoch 117/2000 +2025-02-25 21:34:12,561 Current Learning Rate: 0.0003680635 +2025-02-25 21:34:13,329 Train Loss: 0.0001846, Val Loss: 0.0001725 +2025-02-25 21:34:13,329 Epoch 118/2000 +2025-02-25 21:49:15,429 Current Learning Rate: 0.0003605044 +2025-02-25 21:49:16,177 Train Loss: 0.0001766, Val Loss: 0.0001693 +2025-02-25 21:49:16,177 Epoch 119/2000 +2025-02-25 22:04:18,936 Current Learning Rate: 0.0003529798 +2025-02-25 22:04:18,936 Train Loss: 0.0001543, Val Loss: 0.0001781 +2025-02-25 22:04:18,937 Epoch 120/2000 +2025-02-25 22:19:22,010 Current Learning Rate: 0.0003454915 +2025-02-25 22:19:22,770 Train Loss: 0.0001525, Val Loss: 0.0001688 +2025-02-25 22:19:22,771 Epoch 121/2000 +2025-02-25 22:34:25,377 Current Learning Rate: 0.0003380413 +2025-02-25 22:34:26,126 Train Loss: 0.0001948, Val Loss: 0.0001664 +2025-02-25 22:34:26,127 Epoch 122/2000 +2025-02-25 22:49:28,199 Current Learning Rate: 0.0003306310 +2025-02-25 22:49:28,200 Train Loss: 0.0001663, Val Loss: 0.0001676 +2025-02-25 22:49:28,200 Epoch 123/2000 +2025-02-25 23:04:31,654 Current Learning Rate: 0.0003232626 +2025-02-25 23:04:32,432 Train Loss: 0.0001568, Val Loss: 0.0001639 +2025-02-25 23:04:32,432 Epoch 124/2000 +2025-02-25 23:19:34,796 Current Learning Rate: 0.0003159377 +2025-02-25 23:19:34,797 Train Loss: 0.0001820, Val Loss: 0.0001678 +2025-02-25 23:19:34,797 Epoch 125/2000 +2025-02-25 23:34:37,793 Current Learning Rate: 0.0003086583 +2025-02-25 23:34:38,657 Train Loss: 0.0001385, Val Loss: 0.0001612 +2025-02-25 23:34:38,657 Epoch 126/2000 +2025-02-25 23:49:41,020 Current Learning Rate: 0.0003014261 +2025-02-25 23:49:41,021 Train Loss: 0.0001461, Val Loss: 0.0001638 +2025-02-25 23:49:41,021 Epoch 127/2000 +2025-02-26 00:04:44,363 Current Learning Rate: 0.0002942428 +2025-02-26 00:04:44,363 Train Loss: 0.0001603, Val Loss: 0.0001642 +2025-02-26 00:04:44,363 Epoch 128/2000 +2025-02-26 00:19:47,628 Current Learning Rate: 0.0002871104 +2025-02-26 00:19:47,628 Train Loss: 0.0001603, Val Loss: 0.0001628 +2025-02-26 00:19:47,628 Epoch 129/2000 +2025-02-26 00:34:50,322 Current Learning Rate: 0.0002800304 +2025-02-26 00:34:50,323 Train Loss: 0.0001403, Val Loss: 0.0001614 +2025-02-26 00:34:50,323 Epoch 130/2000 +2025-02-26 00:49:53,781 Current Learning Rate: 0.0002730048 +2025-02-26 00:49:53,782 Train Loss: 0.0001583, Val Loss: 0.0001651 +2025-02-26 00:49:53,782 Epoch 131/2000 +2025-02-26 01:04:56,705 Current Learning Rate: 0.0002660351 +2025-02-26 01:04:57,442 Train Loss: 0.0001401, Val Loss: 0.0001569 +2025-02-26 01:04:57,442 Epoch 132/2000 +2025-02-26 01:20:00,262 Current Learning Rate: 0.0002591232 +2025-02-26 01:20:00,262 Train Loss: 0.0001626, Val Loss: 0.0001687 +2025-02-26 01:20:00,263 Epoch 133/2000 +2025-02-26 01:35:03,206 Current Learning Rate: 0.0002522707 +2025-02-26 01:35:03,206 Train Loss: 0.0001407, Val Loss: 0.0001578 +2025-02-26 01:35:03,206 Epoch 134/2000 +2025-02-26 01:50:06,740 Current Learning Rate: 0.0002454793 +2025-02-26 01:50:06,741 Train Loss: 0.0001549, Val Loss: 0.0001586 +2025-02-26 01:50:06,741 Epoch 135/2000 +2025-02-26 02:05:09,590 Current Learning Rate: 0.0002387507 +2025-02-26 02:05:09,590 Train Loss: 0.0001322, Val Loss: 0.0001576 +2025-02-26 02:05:09,591 Epoch 136/2000 +2025-02-26 02:20:12,392 Current Learning Rate: 0.0002320866 +2025-02-26 02:20:13,203 Train Loss: 0.0001440, Val Loss: 0.0001562 +2025-02-26 02:20:13,203 Epoch 137/2000 +2025-02-26 02:35:15,294 Current Learning Rate: 0.0002254886 +2025-02-26 02:35:15,295 Train Loss: 0.0001528, Val Loss: 0.0001667 +2025-02-26 02:35:15,295 Epoch 138/2000 +2025-02-26 02:50:18,038 Current Learning Rate: 0.0002189583 +2025-02-26 02:50:18,891 Train Loss: 0.0001358, Val Loss: 0.0001525 +2025-02-26 02:50:18,891 Epoch 139/2000 +2025-02-26 03:05:22,429 Current Learning Rate: 0.0002124974 +2025-02-26 03:05:22,430 Train Loss: 0.0001528, Val Loss: 0.0001608 +2025-02-26 03:05:22,430 Epoch 140/2000 +2025-02-26 03:20:25,745 Current Learning Rate: 0.0002061074 +2025-02-26 03:20:25,746 Train Loss: 0.0001313, Val Loss: 0.0001563 +2025-02-26 03:20:25,746 Epoch 141/2000 +2025-02-26 03:35:29,194 Current Learning Rate: 0.0001997899 +2025-02-26 03:35:30,049 Train Loss: 0.0001324, Val Loss: 0.0001501 +2025-02-26 03:35:30,049 Epoch 142/2000 +2025-02-26 03:50:32,947 Current Learning Rate: 0.0001935465 +2025-02-26 03:50:32,948 Train Loss: 0.0001395, Val Loss: 0.0001551 +2025-02-26 03:50:32,948 Epoch 143/2000 +2025-02-26 04:05:35,584 Current Learning Rate: 0.0001873787 +2025-02-26 04:05:36,442 Train Loss: 0.0001363, Val Loss: 0.0001461 +2025-02-26 04:05:36,442 Epoch 144/2000 +2025-02-26 04:20:39,099 Current Learning Rate: 0.0001812880 +2025-02-26 04:20:39,100 Train Loss: 0.0001362, Val Loss: 0.0001465 +2025-02-26 04:20:39,100 Epoch 145/2000 +2025-02-26 04:35:41,757 Current Learning Rate: 0.0001752760 +2025-02-26 04:35:41,757 Train Loss: 0.0001643, Val Loss: 0.0001544 +2025-02-26 04:35:41,757 Epoch 146/2000 +2025-02-26 04:50:44,724 Current Learning Rate: 0.0001693441 +2025-02-26 04:50:44,724 Train Loss: 0.0001328, Val Loss: 0.0001556 +2025-02-26 04:50:44,725 Epoch 147/2000 +2025-02-26 05:05:47,749 Current Learning Rate: 0.0001634937 +2025-02-26 05:05:47,750 Train Loss: 0.0001218, Val Loss: 0.0001580 +2025-02-26 05:05:47,750 Epoch 148/2000 +2025-02-26 05:20:50,392 Current Learning Rate: 0.0001577264 +2025-02-26 05:20:51,242 Train Loss: 0.0001057, Val Loss: 0.0001442 +2025-02-26 05:20:51,242 Epoch 149/2000 +2025-02-26 05:35:54,519 Current Learning Rate: 0.0001520436 +2025-02-26 05:35:54,520 Train Loss: 0.0001296, Val Loss: 0.0001466 +2025-02-26 05:35:54,520 Epoch 150/2000 +2025-02-26 05:50:57,400 Current Learning Rate: 0.0001464466 +2025-02-26 05:50:57,400 Train Loss: 0.0001403, Val Loss: 0.0001475 +2025-02-26 05:50:57,400 Epoch 151/2000 +2025-02-26 06:06:00,950 Current Learning Rate: 0.0001409369 +2025-02-26 06:06:01,794 Train Loss: 0.0001305, Val Loss: 0.0001440 +2025-02-26 06:06:01,795 Epoch 152/2000 +2025-02-26 06:21:03,812 Current Learning Rate: 0.0001355157 +2025-02-26 06:21:04,758 Train Loss: 0.0001285, Val Loss: 0.0001414 +2025-02-26 06:21:04,758 Epoch 153/2000 +2025-02-26 06:36:06,875 Current Learning Rate: 0.0001301845 +2025-02-26 06:36:06,876 Train Loss: 0.0001381, Val Loss: 0.0001449 +2025-02-26 06:36:06,876 Epoch 154/2000 +2025-02-26 06:51:09,343 Current Learning Rate: 0.0001249445 +2025-02-26 06:51:09,343 Train Loss: 0.0001238, Val Loss: 0.0001418 +2025-02-26 06:51:09,344 Epoch 155/2000 +2025-02-26 07:06:12,322 Current Learning Rate: 0.0001197970 +2025-02-26 07:06:13,245 Train Loss: 0.0001294, Val Loss: 0.0001414 +2025-02-26 07:06:13,245 Epoch 156/2000 +2025-02-26 07:21:16,214 Current Learning Rate: 0.0001147434 +2025-02-26 07:21:17,145 Train Loss: 0.0001394, Val Loss: 0.0001393 +2025-02-26 07:21:17,146 Epoch 157/2000 +2025-02-26 07:36:20,501 Current Learning Rate: 0.0001097848 +2025-02-26 07:36:20,502 Train Loss: 0.0001324, Val Loss: 0.0001396 +2025-02-26 07:36:20,502 Epoch 158/2000 +2025-02-26 07:51:23,004 Current Learning Rate: 0.0001049225 +2025-02-26 07:51:23,004 Train Loss: 0.0001241, Val Loss: 0.0001410 +2025-02-26 07:51:23,004 Epoch 159/2000 +2025-02-26 08:06:25,464 Current Learning Rate: 0.0001001577 +2025-02-26 08:06:25,465 Train Loss: 0.0001252, Val Loss: 0.0001400 +2025-02-26 08:06:25,465 Epoch 160/2000 +2025-02-26 08:21:29,079 Current Learning Rate: 0.0000954915 +2025-02-26 08:21:29,936 Train Loss: 0.0001419, Val Loss: 0.0001385 +2025-02-26 08:21:29,936 Epoch 161/2000 +2025-02-26 08:36:32,243 Current Learning Rate: 0.0000909251 +2025-02-26 08:36:33,158 Train Loss: 0.0001335, Val Loss: 0.0001376 +2025-02-26 08:36:33,158 Epoch 162/2000 +2025-02-26 08:51:35,787 Current Learning Rate: 0.0000864597 +2025-02-26 08:51:36,655 Train Loss: 0.0001353, Val Loss: 0.0001374 +2025-02-26 08:51:36,655 Epoch 163/2000 +2025-02-26 09:06:39,090 Current Learning Rate: 0.0000820963 +2025-02-26 09:06:39,948 Train Loss: 0.0001217, Val Loss: 0.0001374 +2025-02-26 09:06:39,948 Epoch 164/2000 +2025-02-26 09:21:43,008 Current Learning Rate: 0.0000778360 +2025-02-26 09:21:43,009 Train Loss: 0.0001269, Val Loss: 0.0001380 +2025-02-26 09:21:43,010 Epoch 165/2000 +2025-02-26 09:36:45,997 Current Learning Rate: 0.0000736799 +2025-02-26 09:36:45,997 Train Loss: 0.0001192, Val Loss: 0.0001391 +2025-02-26 09:36:45,997 Epoch 166/2000 +2025-02-26 09:51:48,465 Current Learning Rate: 0.0000696290 +2025-02-26 09:51:49,269 Train Loss: 0.0001330, Val Loss: 0.0001354 +2025-02-26 09:51:49,269 Epoch 167/2000 +2025-02-26 10:06:52,869 Current Learning Rate: 0.0000656842 +2025-02-26 10:06:53,769 Train Loss: 0.0001153, Val Loss: 0.0001352 +2025-02-26 10:06:53,769 Epoch 168/2000 +2025-02-26 10:21:56,208 Current Learning Rate: 0.0000618467 +2025-02-26 10:21:56,208 Train Loss: 0.0001294, Val Loss: 0.0001372 +2025-02-26 10:21:56,209 Epoch 169/2000 +2025-02-26 10:36:58,943 Current Learning Rate: 0.0000581172 +2025-02-26 10:36:59,781 Train Loss: 0.0001191, Val Loss: 0.0001345 +2025-02-26 10:36:59,781 Epoch 170/2000 +2025-02-26 10:52:02,082 Current Learning Rate: 0.0000544967 +2025-02-26 10:52:03,528 Train Loss: 0.0001146, Val Loss: 0.0001343 +2025-02-26 10:52:03,529 Epoch 171/2000 +2025-02-26 11:07:06,166 Current Learning Rate: 0.0000509862 +2025-02-26 11:07:06,167 Train Loss: 0.0001237, Val Loss: 0.0001343 +2025-02-26 11:07:06,167 Epoch 172/2000 +2025-02-26 11:22:09,864 Current Learning Rate: 0.0000475865 +2025-02-26 11:22:09,864 Train Loss: 0.0001171, Val Loss: 0.0001349 +2025-02-26 11:22:09,865 Epoch 173/2000 +2025-02-26 11:37:12,952 Current Learning Rate: 0.0000442984 +2025-02-26 11:37:12,952 Train Loss: 0.0001234, Val Loss: 0.0001352 +2025-02-26 11:37:12,953 Epoch 174/2000 +2025-02-26 11:52:16,865 Current Learning Rate: 0.0000411227 +2025-02-26 11:52:17,795 Train Loss: 0.0001172, Val Loss: 0.0001338 +2025-02-26 11:52:17,795 Epoch 175/2000 +2025-02-26 12:07:21,226 Current Learning Rate: 0.0000380602 +2025-02-26 12:07:22,060 Train Loss: 0.0001205, Val Loss: 0.0001335 +2025-02-26 12:07:22,060 Epoch 176/2000 +2025-02-26 12:22:24,897 Current Learning Rate: 0.0000351118 +2025-02-26 12:22:24,899 Train Loss: 0.0001470, Val Loss: 0.0001340 +2025-02-26 12:22:24,899 Epoch 177/2000 +2025-02-26 12:37:27,732 Current Learning Rate: 0.0000322780 +2025-02-26 12:37:28,564 Train Loss: 0.0001123, Val Loss: 0.0001330 +2025-02-26 12:37:28,565 Epoch 178/2000 +2025-02-26 12:52:31,567 Current Learning Rate: 0.0000295596 +2025-02-26 12:52:31,568 Train Loss: 0.0001130, Val Loss: 0.0001330 +2025-02-26 12:52:31,568 Epoch 179/2000 +2025-02-26 13:07:35,722 Current Learning Rate: 0.0000269573 +2025-02-26 13:07:35,723 Train Loss: 0.0001342, Val Loss: 0.0001334 +2025-02-26 13:07:35,723 Epoch 180/2000 +2025-02-26 13:22:38,587 Current Learning Rate: 0.0000244717 +2025-02-26 13:22:39,540 Train Loss: 0.0001320, Val Loss: 0.0001326 +2025-02-26 13:22:39,545 Epoch 181/2000 +2025-02-26 13:37:42,509 Current Learning Rate: 0.0000221035 +2025-02-26 13:37:43,447 Train Loss: 0.0001246, Val Loss: 0.0001326 +2025-02-26 13:37:43,447 Epoch 182/2000 +2025-02-26 13:52:46,364 Current Learning Rate: 0.0000198532 +2025-02-26 13:52:46,366 Train Loss: 0.0001256, Val Loss: 0.0001330 +2025-02-26 13:52:46,366 Epoch 183/2000 +2025-02-26 14:07:49,211 Current Learning Rate: 0.0000177213 +2025-02-26 14:07:50,119 Train Loss: 0.0001099, Val Loss: 0.0001324 +2025-02-26 14:07:50,119 Epoch 184/2000 +2025-02-26 14:22:53,008 Current Learning Rate: 0.0000157084 +2025-02-26 14:22:53,009 Train Loss: 0.0001182, Val Loss: 0.0001325 +2025-02-26 14:22:53,010 Epoch 185/2000 +2025-02-26 14:37:56,894 Current Learning Rate: 0.0000138150 +2025-02-26 14:37:57,763 Train Loss: 0.0001106, Val Loss: 0.0001319 +2025-02-26 14:37:57,764 Epoch 186/2000 +2025-02-26 14:53:00,217 Current Learning Rate: 0.0000120416 +2025-02-26 14:53:00,218 Train Loss: 0.0001201, Val Loss: 0.0001323 +2025-02-26 14:53:00,219 Epoch 187/2000 +2025-02-26 15:08:03,536 Current Learning Rate: 0.0000103886 +2025-02-26 15:08:04,417 Train Loss: 0.0001238, Val Loss: 0.0001318 +2025-02-26 15:08:04,418 Epoch 188/2000 +2025-02-26 15:23:07,546 Current Learning Rate: 0.0000088564 +2025-02-26 15:23:08,505 Train Loss: 0.0001243, Val Loss: 0.0001317 +2025-02-26 15:23:08,505 Epoch 189/2000 +2025-02-26 15:38:11,730 Current Learning Rate: 0.0000074453 +2025-02-26 15:38:11,731 Train Loss: 0.0001103, Val Loss: 0.0001318 +2025-02-26 15:38:11,732 Epoch 190/2000 +2025-02-26 15:53:14,555 Current Learning Rate: 0.0000061558 +2025-02-26 15:53:14,555 Train Loss: 0.0001181, Val Loss: 0.0001319 +2025-02-26 15:53:14,556 Epoch 191/2000 +2025-02-26 16:08:17,803 Current Learning Rate: 0.0000049882 +2025-02-26 16:08:18,688 Train Loss: 0.0000986, Val Loss: 0.0001317 +2025-02-26 16:08:18,689 Epoch 192/2000 +2025-02-26 16:23:21,826 Current Learning Rate: 0.0000039426 +2025-02-26 16:23:22,695 Train Loss: 0.0001135, Val Loss: 0.0001315 +2025-02-26 16:23:22,695 Epoch 193/2000 +2025-02-26 16:38:25,031 Current Learning Rate: 0.0000030195 +2025-02-26 16:38:25,903 Train Loss: 0.0001078, Val Loss: 0.0001315 +2025-02-26 16:38:25,904 Epoch 194/2000 +2025-02-26 16:53:28,657 Current Learning Rate: 0.0000022190 +2025-02-26 16:53:28,658 Train Loss: 0.0001342, Val Loss: 0.0001315 +2025-02-26 16:53:28,658 Epoch 195/2000 +2025-02-26 17:08:32,752 Current Learning Rate: 0.0000015413 +2025-02-26 17:08:33,688 Train Loss: 0.0001054, Val Loss: 0.0001314 +2025-02-26 17:08:33,689 Epoch 196/2000 +2025-02-26 17:23:36,888 Current Learning Rate: 0.0000009866 +2025-02-26 17:23:37,744 Train Loss: 0.0001050, Val Loss: 0.0001314 +2025-02-26 17:23:37,744 Epoch 197/2000 +2025-02-26 17:38:40,602 Current Learning Rate: 0.0000005551 +2025-02-26 17:38:40,602 Train Loss: 0.0001236, Val Loss: 0.0001314 +2025-02-26 17:38:40,602 Epoch 198/2000 +2025-02-26 17:53:43,745 Current Learning Rate: 0.0000002467 +2025-02-26 17:53:43,748 Train Loss: 0.0001136, Val Loss: 0.0001315 +2025-02-26 17:53:43,748 Epoch 199/2000 +2025-02-26 18:08:47,237 Current Learning Rate: 0.0000000617 +2025-02-26 18:08:47,238 Train Loss: 0.0001300, Val Loss: 0.0001314 +2025-02-26 18:08:47,238 Epoch 200/2000 +2025-02-26 18:23:50,389 Current Learning Rate: 0.0000000000 +2025-02-26 18:23:50,389 Train Loss: 0.0001223, Val Loss: 0.0001314 +2025-02-26 18:23:50,389 Epoch 201/2000 +2025-02-26 18:38:53,661 Current Learning Rate: 0.0000000617 +2025-02-26 18:38:54,584 Train Loss: 0.0001189, Val Loss: 0.0001313 +2025-02-26 18:38:54,585 Epoch 202/2000 +2025-02-26 18:53:58,223 Current Learning Rate: 0.0000002467 +2025-02-26 18:53:59,159 Train Loss: 0.0001218, Val Loss: 0.0001313 +2025-02-26 18:53:59,159 Epoch 203/2000 +2025-02-26 19:09:03,810 Current Learning Rate: 0.0000005551 +2025-02-26 19:09:04,753 Train Loss: 0.0001125, Val Loss: 0.0001313 +2025-02-26 19:09:04,753 Epoch 204/2000 +2025-02-26 19:24:07,519 Current Learning Rate: 0.0000009866 +2025-02-26 19:24:07,520 Train Loss: 0.0001168, Val Loss: 0.0001314 +2025-02-26 19:24:07,520 Epoch 205/2000 +2025-02-26 19:39:10,339 Current Learning Rate: 0.0000015413 +2025-02-26 19:39:10,339 Train Loss: 0.0001267, Val Loss: 0.0001314 +2025-02-26 19:39:10,339 Epoch 206/2000 +2025-02-26 19:54:13,213 Current Learning Rate: 0.0000022190 +2025-02-26 19:54:13,214 Train Loss: 0.0001077, Val Loss: 0.0001314 +2025-02-26 19:54:13,214 Epoch 207/2000 +2025-02-26 20:09:17,196 Current Learning Rate: 0.0000030195 +2025-02-26 20:09:17,197 Train Loss: 0.0001119, Val Loss: 0.0001313 +2025-02-26 20:09:17,197 Epoch 208/2000 +2025-02-26 20:24:20,157 Current Learning Rate: 0.0000039426 +2025-02-26 20:24:20,158 Train Loss: 0.0001094, Val Loss: 0.0001314 +2025-02-26 20:24:20,158 Epoch 209/2000 +2025-02-26 20:39:23,276 Current Learning Rate: 0.0000049882 +2025-02-26 20:39:23,277 Train Loss: 0.0001084, Val Loss: 0.0001315 +2025-02-26 20:39:23,277 Epoch 210/2000 +2025-02-26 20:54:26,514 Current Learning Rate: 0.0000061558 +2025-02-26 20:54:26,515 Train Loss: 0.0001076, Val Loss: 0.0001315 +2025-02-26 20:54:26,515 Epoch 211/2000 +2025-02-26 21:09:29,910 Current Learning Rate: 0.0000074453 +2025-02-26 21:09:29,911 Train Loss: 0.0001316, Val Loss: 0.0001318 +2025-02-26 21:09:29,911 Epoch 212/2000 +2025-02-26 21:24:32,745 Current Learning Rate: 0.0000088564 +2025-02-26 21:24:32,745 Train Loss: 0.0001302, Val Loss: 0.0001317 +2025-02-26 21:24:32,746 Epoch 213/2000 +2025-02-26 21:39:39,211 Current Learning Rate: 0.0000103886 +2025-02-26 21:39:39,211 Train Loss: 0.0001150, Val Loss: 0.0001316 +2025-02-26 21:39:39,212 Epoch 214/2000 +2025-02-26 21:54:43,274 Current Learning Rate: 0.0000120416 +2025-02-26 21:54:43,274 Train Loss: 0.0001056, Val Loss: 0.0001314 +2025-02-26 21:54:43,275 Epoch 215/2000 +2025-02-26 22:09:46,383 Current Learning Rate: 0.0000138150 +2025-02-26 22:09:46,383 Train Loss: 0.0001266, Val Loss: 0.0001315 +2025-02-26 22:09:46,383 Epoch 216/2000 +2025-02-26 22:24:50,671 Current Learning Rate: 0.0000157084 +2025-02-26 22:24:50,672 Train Loss: 0.0001133, Val Loss: 0.0001316 +2025-02-26 22:24:50,672 Epoch 217/2000 +2025-02-26 22:39:54,065 Current Learning Rate: 0.0000177213 +2025-02-26 22:39:54,065 Train Loss: 0.0001164, Val Loss: 0.0001315 +2025-02-26 22:39:54,065 Epoch 218/2000 +2025-02-26 22:54:57,914 Current Learning Rate: 0.0000198532 +2025-02-26 22:54:57,915 Train Loss: 0.0001242, Val Loss: 0.0001315 +2025-02-26 22:54:57,916 Epoch 219/2000 +2025-02-26 23:10:02,613 Current Learning Rate: 0.0000221035 +2025-02-26 23:10:02,613 Train Loss: 0.0001131, Val Loss: 0.0001315 +2025-02-26 23:10:02,613 Epoch 220/2000 +2025-02-26 23:25:05,545 Current Learning Rate: 0.0000244717 +2025-02-26 23:25:05,546 Train Loss: 0.0001155, Val Loss: 0.0001316 +2025-02-26 23:25:05,546 Epoch 221/2000 +2025-02-26 23:40:08,698 Current Learning Rate: 0.0000269573 +2025-02-26 23:40:08,699 Train Loss: 0.0001290, Val Loss: 0.0001316 +2025-02-26 23:40:08,699 Epoch 222/2000 +2025-02-26 23:55:11,517 Current Learning Rate: 0.0000295596 +2025-02-26 23:55:11,517 Train Loss: 0.0000971, Val Loss: 0.0001316 +2025-02-26 23:55:11,518 Epoch 223/2000 +2025-02-27 00:10:16,125 Current Learning Rate: 0.0000322780 +2025-02-27 00:10:16,125 Train Loss: 0.0001354, Val Loss: 0.0001318 +2025-02-27 00:10:16,126 Epoch 224/2000 +2025-02-27 00:25:20,094 Current Learning Rate: 0.0000351118 +2025-02-27 00:25:20,094 Train Loss: 0.0001217, Val Loss: 0.0001315 +2025-02-27 00:25:20,094 Epoch 225/2000 +2025-02-27 00:40:22,239 Current Learning Rate: 0.0000380602 +2025-02-27 00:40:22,239 Train Loss: 0.0001183, Val Loss: 0.0001320 +2025-02-27 00:40:22,240 Epoch 226/2000 +2025-02-27 00:55:25,038 Current Learning Rate: 0.0000411227 +2025-02-27 00:55:25,038 Train Loss: 0.0001081, Val Loss: 0.0001317 +2025-02-27 00:55:25,039 Epoch 227/2000 +2025-02-27 01:10:27,836 Current Learning Rate: 0.0000442984 +2025-02-27 01:10:27,837 Train Loss: 0.0001040, Val Loss: 0.0001317 +2025-02-27 01:10:27,837 Epoch 228/2000 +2025-02-27 01:25:30,659 Current Learning Rate: 0.0000475865 +2025-02-27 01:25:30,660 Train Loss: 0.0001060, Val Loss: 0.0001314 +2025-02-27 01:25:30,660 Epoch 229/2000 +2025-02-27 01:40:32,607 Current Learning Rate: 0.0000509862 +2025-02-27 01:40:32,607 Train Loss: 0.0001090, Val Loss: 0.0001316 +2025-02-27 01:40:32,607 Epoch 230/2000 +2025-02-27 01:55:35,686 Current Learning Rate: 0.0000544967 +2025-02-27 01:55:35,686 Train Loss: 0.0001090, Val Loss: 0.0001315 +2025-02-27 01:55:35,686 Epoch 231/2000 +2025-02-27 02:10:39,291 Current Learning Rate: 0.0000581172 +2025-02-27 02:10:39,293 Train Loss: 0.0001057, Val Loss: 0.0001320 +2025-02-27 02:10:39,293 Epoch 232/2000 +2025-02-27 02:25:41,795 Current Learning Rate: 0.0000618467 +2025-02-27 02:25:41,795 Train Loss: 0.0001204, Val Loss: 0.0001317 +2025-02-27 02:25:41,796 Epoch 233/2000 +2025-02-27 02:40:44,566 Current Learning Rate: 0.0000656842 +2025-02-27 02:40:44,567 Train Loss: 0.0001237, Val Loss: 0.0001340 +2025-02-27 02:40:44,567 Epoch 234/2000 +2025-02-27 02:55:47,331 Current Learning Rate: 0.0000696290 +2025-02-27 02:55:47,332 Train Loss: 0.0001249, Val Loss: 0.0001319 +2025-02-27 02:55:47,332 Epoch 235/2000 +2025-02-27 03:10:49,837 Current Learning Rate: 0.0000736799 +2025-02-27 03:10:49,837 Train Loss: 0.0001076, Val Loss: 0.0001318 +2025-02-27 03:10:49,837 Epoch 236/2000 +2025-02-27 03:25:53,111 Current Learning Rate: 0.0000778360 +2025-02-27 03:25:53,111 Train Loss: 0.0001014, Val Loss: 0.0001318 +2025-02-27 03:25:53,112 Epoch 237/2000 +2025-02-27 03:40:54,991 Current Learning Rate: 0.0000820963 +2025-02-27 03:40:54,992 Train Loss: 0.0001204, Val Loss: 0.0001326 +2025-02-27 03:40:54,992 Epoch 238/2000 +2025-02-27 03:55:57,428 Current Learning Rate: 0.0000864597 +2025-02-27 03:55:57,428 Train Loss: 0.0001292, Val Loss: 0.0001323 +2025-02-27 03:55:57,428 Epoch 239/2000 +2025-02-27 04:11:01,401 Current Learning Rate: 0.0000909251 +2025-02-27 04:11:01,401 Train Loss: 0.0001331, Val Loss: 0.0001328 +2025-02-27 04:11:01,402 Epoch 240/2000 +2025-02-27 04:26:03,985 Current Learning Rate: 0.0000954915 +2025-02-27 04:26:03,986 Train Loss: 0.0001264, Val Loss: 0.0001329 +2025-02-27 04:26:03,986 Epoch 241/2000 +2025-02-27 04:41:06,543 Current Learning Rate: 0.0001001577 +2025-02-27 04:41:06,544 Train Loss: 0.0001080, Val Loss: 0.0001335 +2025-02-27 04:41:06,544 Epoch 242/2000 +2025-02-27 04:56:09,604 Current Learning Rate: 0.0001049225 +2025-02-27 04:56:09,604 Train Loss: 0.0001296, Val Loss: 0.0001346 +2025-02-27 04:56:09,605 Epoch 243/2000 +2025-02-27 05:11:13,177 Current Learning Rate: 0.0001097848 +2025-02-27 05:11:13,177 Train Loss: 0.0001171, Val Loss: 0.0001318 +2025-02-27 05:11:13,178 Epoch 244/2000 +2025-02-27 05:26:15,938 Current Learning Rate: 0.0001147434 +2025-02-27 05:26:15,938 Train Loss: 0.0001198, Val Loss: 0.0001332 +2025-02-27 05:26:15,938 Epoch 245/2000 +2025-02-27 05:41:18,762 Current Learning Rate: 0.0001197970 +2025-02-27 05:41:18,762 Train Loss: 0.0001143, Val Loss: 0.0001324 +2025-02-27 05:41:18,762 Epoch 246/2000 +2025-02-27 05:56:22,400 Current Learning Rate: 0.0001249445 +2025-02-27 05:56:22,401 Train Loss: 0.0001046, Val Loss: 0.0001320 +2025-02-27 05:56:22,401 Epoch 247/2000 +2025-02-27 06:11:24,997 Current Learning Rate: 0.0001301845 +2025-02-27 06:11:24,997 Train Loss: 0.0001314, Val Loss: 0.0001352 +2025-02-27 06:11:24,997 Epoch 248/2000 +2025-02-27 06:26:27,669 Current Learning Rate: 0.0001355157 +2025-02-27 06:26:27,669 Train Loss: 0.0001103, Val Loss: 0.0001313 +2025-02-27 06:26:27,670 Epoch 249/2000 +2025-02-27 06:41:30,258 Current Learning Rate: 0.0001409369 +2025-02-27 06:41:30,258 Train Loss: 0.0001403, Val Loss: 0.0001331 +2025-02-27 06:41:30,258 Epoch 250/2000 +2025-02-27 06:56:33,068 Current Learning Rate: 0.0001464466 +2025-02-27 06:56:33,068 Train Loss: 0.0001261, Val Loss: 0.0001333 +2025-02-27 06:56:33,068 Epoch 251/2000 +2025-02-27 07:11:35,732 Current Learning Rate: 0.0001520436 +2025-02-27 07:11:36,584 Train Loss: 0.0001162, Val Loss: 0.0001312 +2025-02-27 07:11:36,584 Epoch 252/2000 +2025-02-27 07:26:39,694 Current Learning Rate: 0.0001577264 +2025-02-27 07:26:39,695 Train Loss: 0.0001201, Val Loss: 0.0001348 +2025-02-27 07:26:39,695 Epoch 253/2000 +2025-02-27 07:41:42,037 Current Learning Rate: 0.0001634937 +2025-02-27 07:41:42,038 Train Loss: 0.0001229, Val Loss: 0.0001329 +2025-02-27 07:41:42,038 Epoch 254/2000 +2025-02-27 07:56:44,790 Current Learning Rate: 0.0001693441 +2025-02-27 07:56:44,791 Train Loss: 0.0001422, Val Loss: 0.0001334 +2025-02-27 07:56:44,791 Epoch 255/2000 +2025-02-27 08:11:47,741 Current Learning Rate: 0.0001752760 +2025-02-27 08:11:47,742 Train Loss: 0.0001174, Val Loss: 0.0001365 +2025-02-27 08:11:47,742 Epoch 256/2000 +2025-02-27 08:26:50,235 Current Learning Rate: 0.0001812880 +2025-02-27 08:26:50,235 Train Loss: 0.0001229, Val Loss: 0.0001323 +2025-02-27 08:26:50,236 Epoch 257/2000 +2025-02-27 08:41:54,224 Current Learning Rate: 0.0001873787 +2025-02-27 08:41:54,225 Train Loss: 0.0001053, Val Loss: 0.0001316 +2025-02-27 08:41:54,225 Epoch 258/2000 +2025-02-27 08:56:57,018 Current Learning Rate: 0.0001935465 +2025-02-27 08:56:57,019 Train Loss: 0.0001139, Val Loss: 0.0001333 +2025-02-27 08:56:57,019 Epoch 259/2000 +2025-02-27 09:12:00,829 Current Learning Rate: 0.0001997899 +2025-02-27 09:12:00,829 Train Loss: 0.0001159, Val Loss: 0.0001313 +2025-02-27 09:12:00,830 Epoch 260/2000 +2025-02-27 09:27:03,550 Current Learning Rate: 0.0002061074 +2025-02-27 09:27:03,551 Train Loss: 0.0001287, Val Loss: 0.0001317 +2025-02-27 09:27:03,551 Epoch 261/2000 +2025-02-27 09:42:06,571 Current Learning Rate: 0.0002124974 +2025-02-27 09:42:06,571 Train Loss: 0.0001176, Val Loss: 0.0001314 +2025-02-27 09:42:06,571 Epoch 262/2000 +2025-02-27 09:57:09,324 Current Learning Rate: 0.0002189583 +2025-02-27 09:57:09,324 Train Loss: 0.0001262, Val Loss: 0.0001407 +2025-02-27 09:57:09,324 Epoch 263/2000 +2025-02-27 10:12:11,736 Current Learning Rate: 0.0002254886 +2025-02-27 10:12:11,737 Train Loss: 0.0001265, Val Loss: 0.0001454 +2025-02-27 10:12:11,737 Epoch 264/2000 +2025-02-27 10:27:15,267 Current Learning Rate: 0.0002320866 +2025-02-27 10:27:15,267 Train Loss: 0.0001205, Val Loss: 0.0001345 +2025-02-27 10:27:15,267 Epoch 265/2000 +2025-02-27 10:42:18,336 Current Learning Rate: 0.0002387507 +2025-02-27 10:42:18,337 Train Loss: 0.0001247, Val Loss: 0.0001421 +2025-02-27 10:42:18,337 Epoch 266/2000 +2025-02-27 10:57:21,156 Current Learning Rate: 0.0002454793 +2025-02-27 10:57:21,157 Train Loss: 0.0001377, Val Loss: 0.0001345 +2025-02-27 10:57:21,157 Epoch 267/2000 +2025-02-27 11:12:24,034 Current Learning Rate: 0.0002522707 +2025-02-27 11:12:24,034 Train Loss: 0.0001154, Val Loss: 0.0001381 +2025-02-27 11:12:24,034 Epoch 268/2000 +2025-02-27 11:27:27,369 Current Learning Rate: 0.0002591232 +2025-02-27 11:27:27,369 Train Loss: 0.0001191, Val Loss: 0.0001387 +2025-02-27 11:27:27,369 Epoch 269/2000 +2025-02-27 11:42:30,882 Current Learning Rate: 0.0002660351 +2025-02-27 11:42:30,882 Train Loss: 0.0001209, Val Loss: 0.0001334 +2025-02-27 11:42:30,883 Epoch 270/2000 +2025-02-27 11:57:34,284 Current Learning Rate: 0.0002730048 +2025-02-27 11:57:34,284 Train Loss: 0.0001306, Val Loss: 0.0001322 +2025-02-27 11:57:34,284 Epoch 271/2000 +2025-02-27 12:12:37,587 Current Learning Rate: 0.0002800304 +2025-02-27 12:12:37,588 Train Loss: 0.0001657, Val Loss: 0.0001433 +2025-02-27 12:12:37,588 Epoch 272/2000 +2025-02-27 12:27:40,782 Current Learning Rate: 0.0002871104 +2025-02-27 12:27:40,782 Train Loss: 0.0001331, Val Loss: 0.0001353 +2025-02-27 12:27:40,783 Epoch 273/2000 +2025-02-27 12:42:44,280 Current Learning Rate: 0.0002942428 +2025-02-27 12:42:44,281 Train Loss: 0.0001476, Val Loss: 0.0001604 +2025-02-27 12:42:44,281 Epoch 274/2000 +2025-02-27 12:57:47,501 Current Learning Rate: 0.0003014261 +2025-02-27 12:57:47,502 Train Loss: 0.0001318, Val Loss: 0.0001415 +2025-02-27 12:57:47,502 Epoch 275/2000 +2025-02-27 13:12:50,147 Current Learning Rate: 0.0003086583 +2025-02-27 13:12:50,147 Train Loss: 0.0001140, Val Loss: 0.0001337 +2025-02-27 13:12:50,147 Epoch 276/2000 +2025-02-27 13:27:52,779 Current Learning Rate: 0.0003159377 +2025-02-27 13:27:52,779 Train Loss: 0.0001510, Val Loss: 0.0001521 +2025-02-27 13:27:52,779 Epoch 277/2000 +2025-02-27 13:42:56,794 Current Learning Rate: 0.0003232626 +2025-02-27 13:42:56,795 Train Loss: 0.0001491, Val Loss: 0.0001541 +2025-02-27 13:42:56,795 Epoch 278/2000 +2025-02-27 13:58:00,228 Current Learning Rate: 0.0003306310 +2025-02-27 13:58:00,229 Train Loss: 0.0001157, Val Loss: 0.0001385 +2025-02-27 13:58:00,229 Epoch 279/2000 +2025-02-27 14:13:03,161 Current Learning Rate: 0.0003380413 +2025-02-27 14:13:03,161 Train Loss: 0.0001222, Val Loss: 0.0001423 +2025-02-27 14:13:03,161 Epoch 280/2000 +2025-02-27 14:28:07,171 Current Learning Rate: 0.0003454915 +2025-02-27 14:28:07,171 Train Loss: 0.0001196, Val Loss: 0.0001387 +2025-02-27 14:28:07,171 Epoch 281/2000 +2025-02-27 14:43:11,403 Current Learning Rate: 0.0003529798 +2025-02-27 14:43:11,403 Train Loss: 0.0001595, Val Loss: 0.0001399 +2025-02-27 14:43:11,403 Epoch 282/2000 +2025-02-27 14:58:14,979 Current Learning Rate: 0.0003605044 +2025-02-27 14:58:14,980 Train Loss: 0.0001215, Val Loss: 0.0001348 +2025-02-27 14:58:14,980 Epoch 283/2000 +2025-02-27 15:13:18,084 Current Learning Rate: 0.0003680635 +2025-02-27 15:13:18,085 Train Loss: 0.0001353, Val Loss: 0.0001416 +2025-02-27 15:13:18,085 Epoch 284/2000 +2025-02-27 15:28:21,999 Current Learning Rate: 0.0003756551 +2025-02-27 15:28:21,999 Train Loss: 0.0001235, Val Loss: 0.0001543 +2025-02-27 15:28:22,000 Epoch 285/2000 +2025-02-27 15:43:25,003 Current Learning Rate: 0.0003832773 +2025-02-27 15:43:25,003 Train Loss: 0.0001204, Val Loss: 0.0001522 +2025-02-27 15:43:25,003 Epoch 286/2000 +2025-02-27 15:58:28,628 Current Learning Rate: 0.0003909284 +2025-02-27 15:58:28,629 Train Loss: 0.0001570, Val Loss: 0.0001703 +2025-02-27 15:58:28,629 Epoch 287/2000 +2025-02-27 16:13:32,044 Current Learning Rate: 0.0003986064 +2025-02-27 16:13:32,045 Train Loss: 0.0001267, Val Loss: 0.0001384 +2025-02-27 16:13:32,045 Epoch 288/2000 +2025-02-27 16:28:35,531 Current Learning Rate: 0.0004063093 +2025-02-27 16:28:35,532 Train Loss: 0.0001498, Val Loss: 0.0001506 +2025-02-27 16:28:35,532 Epoch 289/2000 +2025-02-27 16:43:39,376 Current Learning Rate: 0.0004140354 +2025-02-27 16:43:39,376 Train Loss: 0.0001196, Val Loss: 0.0001419 +2025-02-27 16:43:39,376 Epoch 290/2000 +2025-02-27 16:58:42,664 Current Learning Rate: 0.0004217828 +2025-02-27 16:58:42,664 Train Loss: 0.0001333, Val Loss: 0.0001425 +2025-02-27 16:58:42,665 Epoch 291/2000 +2025-02-27 17:13:47,253 Current Learning Rate: 0.0004295494 +2025-02-27 17:13:47,253 Train Loss: 0.0001548, Val Loss: 0.0001493 +2025-02-27 17:13:47,254 Epoch 292/2000 +2025-02-27 17:28:50,101 Current Learning Rate: 0.0004373334 +2025-02-27 17:28:50,102 Train Loss: 0.0001495, Val Loss: 0.0001432 +2025-02-27 17:28:50,102 Epoch 293/2000 +2025-02-27 17:43:53,258 Current Learning Rate: 0.0004451328 +2025-02-27 17:43:53,258 Train Loss: 0.0001432, Val Loss: 0.0001589 +2025-02-27 17:43:53,259 Epoch 294/2000 +2025-02-27 17:58:56,742 Current Learning Rate: 0.0004529458 +2025-02-27 17:58:56,743 Train Loss: 0.0001250, Val Loss: 0.0001421 +2025-02-27 17:58:56,743 Epoch 295/2000 +2025-02-27 18:14:00,023 Current Learning Rate: 0.0004607705 +2025-02-27 18:14:00,023 Train Loss: 0.0001608, Val Loss: 0.0001542 +2025-02-27 18:14:00,024 Epoch 296/2000 +2025-02-27 18:29:03,394 Current Learning Rate: 0.0004686047 +2025-02-27 18:29:03,394 Train Loss: 0.0001463, Val Loss: 0.0001644 +2025-02-27 18:29:03,397 Epoch 297/2000 +2025-02-27 18:44:06,864 Current Learning Rate: 0.0004764468 +2025-02-27 18:44:06,865 Train Loss: 0.0001613, Val Loss: 0.0001482 +2025-02-27 18:44:06,865 Epoch 298/2000 +2025-02-27 18:59:09,949 Current Learning Rate: 0.0004842946 +2025-02-27 18:59:09,950 Train Loss: 0.0001474, Val Loss: 0.0001603 +2025-02-27 18:59:09,950 Epoch 299/2000 +2025-02-27 19:14:12,955 Current Learning Rate: 0.0004921463 +2025-02-27 19:14:12,956 Train Loss: 0.0001619, Val Loss: 0.0001543 +2025-02-27 19:14:12,956 Epoch 300/2000 +2025-02-27 19:29:15,939 Current Learning Rate: 0.0005000000 +2025-02-27 19:29:15,939 Train Loss: 0.0001659, Val Loss: 0.0001737 +2025-02-27 19:29:15,940 Epoch 301/2000 +2025-02-27 19:44:20,023 Current Learning Rate: 0.0005078537 +2025-02-27 19:44:20,023 Train Loss: 0.0001540, Val Loss: 0.0001577 +2025-02-27 19:44:20,024 Epoch 302/2000 +2025-02-27 19:59:24,439 Current Learning Rate: 0.0005157054 +2025-02-27 19:59:24,440 Train Loss: 0.0001767, Val Loss: 0.0001527 +2025-02-27 19:59:24,440 Epoch 303/2000 +2025-02-27 20:14:27,866 Current Learning Rate: 0.0005235532 +2025-02-27 20:14:27,867 Train Loss: 0.0001547, Val Loss: 0.0001607 +2025-02-27 20:14:27,867 Epoch 304/2000 +2025-02-27 20:29:31,234 Current Learning Rate: 0.0005313953 +2025-02-27 20:29:31,234 Train Loss: 0.0001848, Val Loss: 0.0001495 +2025-02-27 20:29:31,235 Epoch 305/2000 +2025-02-27 20:44:34,292 Current Learning Rate: 0.0005392295 +2025-02-27 20:44:34,293 Train Loss: 0.0001771, Val Loss: 0.0001666 +2025-02-27 20:44:34,293 Epoch 306/2000 +2025-03-01 21:01:11,136 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-01 21:01:54,517 Loading best model from checkpoint. +2025-03-01 21:01:59,587 Error loading model checkpoint during testing: CUDA out of memory. Tried to allocate 12.50 GiB (GPU 0; 39.59 GiB total capacity; 18.45 GiB already allocated; 1.41 GiB free; 36.28 GiB reserved in total by PyTorch) +2025-03-01 21:02:21,017 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-01 21:03:04,677 Loading best model from checkpoint. +2025-03-01 21:06:55,192 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_64_20250323_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_64_20250323_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..deb5bfed3b8ba6e2430acb79013d25f0239c93c3 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp1_64_20250323_training_log.log @@ -0,0 +1,2357 @@ +2025-03-23 14:01:50,685 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-23 14:01:50,724 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-23 14:01:50,732 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-23 14:01:50,751 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-23 14:01:50,792 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-23 14:01:50,797 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-23 14:01:50,806 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-23 14:01:50,808 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-23 14:02:57,255 Epoch 1/2000 +2025-03-23 14:02:59,259 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,260 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,260 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,269 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,269 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,270 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,270 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:02:59,270 Reducer buckets have been rebuilt in this iteration. +2025-03-23 14:04:43,796 Current Learning Rate: 0.0009999383 +2025-03-23 14:04:44,618 Train Loss: 0.0458504, Val Loss: 0.0152245 +2025-03-23 14:04:44,618 Epoch 2/2000 +2025-03-23 14:06:30,020 Current Learning Rate: 0.0009997533 +2025-03-23 14:06:30,882 Train Loss: 0.0148973, Val Loss: 0.0139330 +2025-03-23 14:06:30,883 Epoch 3/2000 +2025-03-23 14:08:16,691 Current Learning Rate: 0.0009994449 +2025-03-23 14:08:17,582 Train Loss: 0.0120852, Val Loss: 0.0085270 +2025-03-23 14:08:17,583 Epoch 4/2000 +2025-03-23 14:10:03,566 Current Learning Rate: 0.0009990134 +2025-03-23 14:10:04,403 Train Loss: 0.0070812, Val Loss: 0.0059078 +2025-03-23 14:10:04,404 Epoch 5/2000 +2025-03-23 14:11:50,240 Current Learning Rate: 0.0009984587 +2025-03-23 14:11:51,117 Train Loss: 0.0055814, Val Loss: 0.0049097 +2025-03-23 14:11:51,118 Epoch 6/2000 +2025-03-23 14:13:37,323 Current Learning Rate: 0.0009977810 +2025-03-23 14:13:38,146 Train Loss: 0.0047863, Val Loss: 0.0043249 +2025-03-23 14:13:38,146 Epoch 7/2000 +2025-03-23 14:15:24,349 Current Learning Rate: 0.0009969805 +2025-03-23 14:15:25,174 Train Loss: 0.0042014, Val Loss: 0.0038307 +2025-03-23 14:15:25,174 Epoch 8/2000 +2025-03-23 14:17:11,901 Current Learning Rate: 0.0009960574 +2025-03-23 14:17:12,743 Train Loss: 0.0037845, Val Loss: 0.0037744 +2025-03-23 14:17:12,743 Epoch 9/2000 +2025-03-23 14:18:59,252 Current Learning Rate: 0.0009950118 +2025-03-23 14:18:59,252 Train Loss: 0.0034883, Val Loss: 0.0038579 +2025-03-23 14:18:59,253 Epoch 10/2000 +2025-03-23 14:20:45,718 Current Learning Rate: 0.0009938442 +2025-03-23 14:20:46,619 Train Loss: 0.0032466, Val Loss: 0.0030537 +2025-03-23 14:20:46,619 Epoch 11/2000 +2025-03-23 14:22:32,570 Current Learning Rate: 0.0009925547 +2025-03-23 14:22:33,524 Train Loss: 0.0030135, Val Loss: 0.0028341 +2025-03-23 14:22:33,524 Epoch 12/2000 +2025-03-23 14:24:20,043 Current Learning Rate: 0.0009911436 +2025-03-23 14:24:20,911 Train Loss: 0.0027891, Val Loss: 0.0026320 +2025-03-23 14:24:20,911 Epoch 13/2000 +2025-03-23 14:26:07,638 Current Learning Rate: 0.0009896114 +2025-03-23 14:26:08,472 Train Loss: 0.0025856, Val Loss: 0.0024929 +2025-03-23 14:26:08,472 Epoch 14/2000 +2025-03-23 14:27:54,975 Current Learning Rate: 0.0009879584 +2025-03-23 14:27:55,861 Train Loss: 0.0023875, Val Loss: 0.0022572 +2025-03-23 14:27:55,861 Epoch 15/2000 +2025-03-23 14:29:42,922 Current Learning Rate: 0.0009861850 +2025-03-23 14:29:43,808 Train Loss: 0.0021858, Val Loss: 0.0021389 +2025-03-23 14:29:43,808 Epoch 16/2000 +2025-03-23 14:31:30,270 Current Learning Rate: 0.0009842916 +2025-03-23 14:31:31,140 Train Loss: 0.0020350, Val Loss: 0.0020107 +2025-03-23 14:31:31,141 Epoch 17/2000 +2025-03-23 14:33:17,585 Current Learning Rate: 0.0009822787 +2025-03-23 14:33:18,478 Train Loss: 0.0018338, Val Loss: 0.0017023 +2025-03-23 14:33:18,478 Epoch 18/2000 +2025-03-23 14:35:04,809 Current Learning Rate: 0.0009801468 +2025-03-23 14:35:05,676 Train Loss: 0.0016798, Val Loss: 0.0015693 +2025-03-23 14:35:05,676 Epoch 19/2000 +2025-03-23 14:36:51,993 Current Learning Rate: 0.0009778965 +2025-03-23 14:36:52,858 Train Loss: 0.0015461, Val Loss: 0.0015507 +2025-03-23 14:36:52,858 Epoch 20/2000 +2025-03-23 14:38:39,231 Current Learning Rate: 0.0009755283 +2025-03-23 14:38:40,100 Train Loss: 0.0014080, Val Loss: 0.0013611 +2025-03-23 14:38:40,100 Epoch 21/2000 +2025-03-23 14:40:26,179 Current Learning Rate: 0.0009730427 +2025-03-23 14:40:27,037 Train Loss: 0.0012863, Val Loss: 0.0012412 +2025-03-23 14:40:27,037 Epoch 22/2000 +2025-03-23 14:42:13,383 Current Learning Rate: 0.0009704404 +2025-03-23 14:42:14,265 Train Loss: 0.0012300, Val Loss: 0.0012034 +2025-03-23 14:42:14,265 Epoch 23/2000 +2025-03-23 14:44:00,848 Current Learning Rate: 0.0009677220 +2025-03-23 14:44:01,863 Train Loss: 0.0011427, Val Loss: 0.0011505 +2025-03-23 14:44:01,864 Epoch 24/2000 +2025-03-23 14:45:48,273 Current Learning Rate: 0.0009648882 +2025-03-23 14:45:49,221 Train Loss: 0.0009912, Val Loss: 0.0010506 +2025-03-23 14:45:49,221 Epoch 25/2000 +2025-03-23 14:47:35,728 Current Learning Rate: 0.0009619398 +2025-03-23 14:47:36,629 Train Loss: 0.0009824, Val Loss: 0.0010170 +2025-03-23 14:47:36,629 Epoch 26/2000 +2025-03-23 14:49:22,313 Current Learning Rate: 0.0009588773 +2025-03-23 14:49:23,239 Train Loss: 0.0008817, Val Loss: 0.0009833 +2025-03-23 14:49:23,239 Epoch 27/2000 +2025-03-23 14:51:09,094 Current Learning Rate: 0.0009557016 +2025-03-23 14:51:09,972 Train Loss: 0.0009348, Val Loss: 0.0009520 +2025-03-23 14:51:09,973 Epoch 28/2000 +2025-03-23 14:52:56,109 Current Learning Rate: 0.0009524135 +2025-03-23 14:52:56,994 Train Loss: 0.0008819, Val Loss: 0.0008262 +2025-03-23 14:52:56,995 Epoch 29/2000 +2025-03-23 14:54:43,339 Current Learning Rate: 0.0009490138 +2025-03-23 14:54:44,283 Train Loss: 0.0007084, Val Loss: 0.0007473 +2025-03-23 14:54:44,283 Epoch 30/2000 +2025-03-23 14:56:30,191 Current Learning Rate: 0.0009455033 +2025-03-23 14:56:30,192 Train Loss: 0.0007202, Val Loss: 0.0007829 +2025-03-23 14:56:30,193 Epoch 31/2000 +2025-03-23 14:58:16,933 Current Learning Rate: 0.0009418828 +2025-03-23 14:58:17,829 Train Loss: 0.0007690, Val Loss: 0.0007118 +2025-03-23 14:58:17,829 Epoch 32/2000 +2025-03-23 15:00:04,153 Current Learning Rate: 0.0009381533 +2025-03-23 15:00:05,053 Train Loss: 0.0006823, Val Loss: 0.0006897 +2025-03-23 15:00:05,053 Epoch 33/2000 +2025-03-23 15:01:51,159 Current Learning Rate: 0.0009343158 +2025-03-23 15:01:52,033 Train Loss: 0.0006648, Val Loss: 0.0006427 +2025-03-23 15:01:52,033 Epoch 34/2000 +2025-03-23 15:03:38,323 Current Learning Rate: 0.0009303710 +2025-03-23 15:03:39,185 Train Loss: 0.0005647, Val Loss: 0.0006063 +2025-03-23 15:03:39,185 Epoch 35/2000 +2025-03-23 15:05:25,251 Current Learning Rate: 0.0009263201 +2025-03-23 15:05:25,251 Train Loss: 0.0006281, Val Loss: 0.0006134 +2025-03-23 15:05:25,252 Epoch 36/2000 +2025-03-23 15:07:11,684 Current Learning Rate: 0.0009221640 +2025-03-23 15:07:11,685 Train Loss: 0.0006243, Val Loss: 0.0006244 +2025-03-23 15:07:11,685 Epoch 37/2000 +2025-03-23 15:08:58,574 Current Learning Rate: 0.0009179037 +2025-03-23 15:08:58,575 Train Loss: 0.0006147, Val Loss: 0.0006647 +2025-03-23 15:08:58,575 Epoch 38/2000 +2025-03-23 15:10:45,097 Current Learning Rate: 0.0009135403 +2025-03-23 15:10:45,959 Train Loss: 0.0005690, Val Loss: 0.0005520 +2025-03-23 15:10:45,959 Epoch 39/2000 +2025-03-23 15:12:31,669 Current Learning Rate: 0.0009090749 +2025-03-23 15:12:32,625 Train Loss: 0.0005424, Val Loss: 0.0005511 +2025-03-23 15:12:32,625 Epoch 40/2000 +2025-03-23 15:14:19,228 Current Learning Rate: 0.0009045085 +2025-03-23 15:14:19,228 Train Loss: 0.0004682, Val Loss: 0.0005550 +2025-03-23 15:14:19,229 Epoch 41/2000 +2025-03-23 15:16:05,839 Current Learning Rate: 0.0008998423 +2025-03-23 15:16:06,692 Train Loss: 0.0005461, Val Loss: 0.0005075 +2025-03-23 15:16:06,692 Epoch 42/2000 +2025-03-23 15:17:52,453 Current Learning Rate: 0.0008950775 +2025-03-23 15:17:52,454 Train Loss: 0.0004912, Val Loss: 0.0005107 +2025-03-23 15:17:52,454 Epoch 43/2000 +2025-03-23 15:19:39,074 Current Learning Rate: 0.0008902152 +2025-03-23 15:19:39,075 Train Loss: 0.0005176, Val Loss: 0.0005524 +2025-03-23 15:19:39,075 Epoch 44/2000 +2025-03-23 15:21:25,676 Current Learning Rate: 0.0008852566 +2025-03-23 15:21:25,677 Train Loss: 0.0004632, Val Loss: 0.0006566 +2025-03-23 15:21:25,678 Epoch 45/2000 +2025-03-23 15:23:12,617 Current Learning Rate: 0.0008802030 +2025-03-23 15:23:13,509 Train Loss: 0.0004703, Val Loss: 0.0004638 +2025-03-23 15:23:13,509 Epoch 46/2000 +2025-03-23 15:24:59,014 Current Learning Rate: 0.0008750555 +2025-03-23 15:24:59,015 Train Loss: 0.0004248, Val Loss: 0.0005088 +2025-03-23 15:24:59,016 Epoch 47/2000 +2025-03-23 15:26:45,321 Current Learning Rate: 0.0008698155 +2025-03-23 15:26:46,167 Train Loss: 0.0004295, Val Loss: 0.0004294 +2025-03-23 15:26:46,167 Epoch 48/2000 +2025-03-23 15:28:32,497 Current Learning Rate: 0.0008644843 +2025-03-23 15:28:32,498 Train Loss: 0.0004214, Val Loss: 0.0005097 +2025-03-23 15:28:32,499 Epoch 49/2000 +2025-03-23 15:30:18,708 Current Learning Rate: 0.0008590631 +2025-03-23 15:30:19,557 Train Loss: 0.0004325, Val Loss: 0.0004260 +2025-03-23 15:30:19,558 Epoch 50/2000 +2025-03-23 15:32:05,634 Current Learning Rate: 0.0008535534 +2025-03-23 15:32:06,547 Train Loss: 0.0004121, Val Loss: 0.0004258 +2025-03-23 15:32:06,547 Epoch 51/2000 +2025-03-23 15:33:52,556 Current Learning Rate: 0.0008479564 +2025-03-23 15:33:53,435 Train Loss: 0.0003827, Val Loss: 0.0003868 +2025-03-23 15:33:53,435 Epoch 52/2000 +2025-03-23 15:35:39,743 Current Learning Rate: 0.0008422736 +2025-03-23 15:35:39,743 Train Loss: 0.0003851, Val Loss: 0.0003947 +2025-03-23 15:35:39,743 Epoch 53/2000 +2025-03-23 15:37:26,766 Current Learning Rate: 0.0008365063 +2025-03-23 15:37:26,767 Train Loss: 0.0005760, Val Loss: 0.0004832 +2025-03-23 15:37:26,767 Epoch 54/2000 +2025-03-23 15:39:13,183 Current Learning Rate: 0.0008306559 +2025-03-23 15:39:14,082 Train Loss: 0.0003941, Val Loss: 0.0003675 +2025-03-23 15:39:14,082 Epoch 55/2000 +2025-03-23 15:41:00,468 Current Learning Rate: 0.0008247240 +2025-03-23 15:41:01,323 Train Loss: 0.0004171, Val Loss: 0.0003511 +2025-03-23 15:41:01,323 Epoch 56/2000 +2025-03-23 15:42:47,751 Current Learning Rate: 0.0008187120 +2025-03-23 15:42:48,603 Train Loss: 0.0003384, Val Loss: 0.0003205 +2025-03-23 15:42:48,604 Epoch 57/2000 +2025-03-23 15:44:35,143 Current Learning Rate: 0.0008126213 +2025-03-23 15:44:35,987 Train Loss: 0.0003059, Val Loss: 0.0003051 +2025-03-23 15:44:35,988 Epoch 58/2000 +2025-03-23 15:46:22,451 Current Learning Rate: 0.0008064535 +2025-03-23 15:46:22,452 Train Loss: 0.0003343, Val Loss: 0.0003221 +2025-03-23 15:46:22,452 Epoch 59/2000 +2025-03-23 15:48:08,988 Current Learning Rate: 0.0008002101 +2025-03-23 15:48:08,988 Train Loss: 0.0003248, Val Loss: 0.0003230 +2025-03-23 15:48:08,989 Epoch 60/2000 +2025-03-23 15:49:56,126 Current Learning Rate: 0.0007938926 +2025-03-23 15:49:56,126 Train Loss: 0.0003380, Val Loss: 0.0003266 +2025-03-23 15:49:56,127 Epoch 61/2000 +2025-03-23 15:51:43,112 Current Learning Rate: 0.0007875026 +2025-03-23 15:51:43,113 Train Loss: 0.0003078, Val Loss: 0.0003088 +2025-03-23 15:51:43,113 Epoch 62/2000 +2025-03-23 15:53:29,377 Current Learning Rate: 0.0007810417 +2025-03-23 15:53:29,377 Train Loss: 0.0003177, Val Loss: 0.0003161 +2025-03-23 15:53:29,377 Epoch 63/2000 +2025-03-23 15:55:16,949 Current Learning Rate: 0.0007745114 +2025-03-23 15:55:17,796 Train Loss: 0.0003261, Val Loss: 0.0002933 +2025-03-23 15:55:17,796 Epoch 64/2000 +2025-03-23 15:57:04,694 Current Learning Rate: 0.0007679134 +2025-03-23 15:57:04,695 Train Loss: 0.0003292, Val Loss: 0.0003019 +2025-03-23 15:57:04,696 Epoch 65/2000 +2025-03-23 15:58:51,554 Current Learning Rate: 0.0007612493 +2025-03-23 15:58:52,420 Train Loss: 0.0003480, Val Loss: 0.0002889 +2025-03-23 15:58:52,420 Epoch 66/2000 +2025-03-23 16:00:38,846 Current Learning Rate: 0.0007545207 +2025-03-23 16:00:38,847 Train Loss: 0.0002708, Val Loss: 0.0002894 +2025-03-23 16:00:38,847 Epoch 67/2000 +2025-03-23 16:02:25,488 Current Learning Rate: 0.0007477293 +2025-03-23 16:02:25,489 Train Loss: 0.0003276, Val Loss: 0.0002953 +2025-03-23 16:02:25,489 Epoch 68/2000 +2025-03-23 16:04:12,496 Current Learning Rate: 0.0007408768 +2025-03-23 16:04:13,588 Train Loss: 0.0002764, Val Loss: 0.0002802 +2025-03-23 16:04:13,588 Epoch 69/2000 +2025-03-23 16:05:59,687 Current Learning Rate: 0.0007339649 +2025-03-23 16:05:59,687 Train Loss: 0.0002865, Val Loss: 0.0002988 +2025-03-23 16:05:59,688 Epoch 70/2000 +2025-03-23 16:07:45,746 Current Learning Rate: 0.0007269952 +2025-03-23 16:07:46,575 Train Loss: 0.0002713, Val Loss: 0.0002685 +2025-03-23 16:07:46,575 Epoch 71/2000 +2025-03-23 16:09:32,493 Current Learning Rate: 0.0007199696 +2025-03-23 16:09:32,493 Train Loss: 0.0002836, Val Loss: 0.0002974 +2025-03-23 16:09:32,494 Epoch 72/2000 +2025-03-23 16:11:20,078 Current Learning Rate: 0.0007128896 +2025-03-23 16:11:20,079 Train Loss: 0.0002832, Val Loss: 0.0002818 +2025-03-23 16:11:20,079 Epoch 73/2000 +2025-03-23 16:13:06,997 Current Learning Rate: 0.0007057572 +2025-03-23 16:13:06,998 Train Loss: 0.0002823, Val Loss: 0.0002734 +2025-03-23 16:13:06,998 Epoch 74/2000 +2025-03-23 16:14:53,918 Current Learning Rate: 0.0006985739 +2025-03-23 16:14:54,767 Train Loss: 0.0002763, Val Loss: 0.0002674 +2025-03-23 16:14:54,768 Epoch 75/2000 +2025-03-23 16:16:41,292 Current Learning Rate: 0.0006913417 +2025-03-23 16:16:41,293 Train Loss: 0.0002740, Val Loss: 0.0002789 +2025-03-23 16:16:41,293 Epoch 76/2000 +2025-03-23 16:18:28,005 Current Learning Rate: 0.0006840623 +2025-03-23 16:18:28,831 Train Loss: 0.0002684, Val Loss: 0.0002620 +2025-03-23 16:18:28,831 Epoch 77/2000 +2025-03-23 16:20:15,320 Current Learning Rate: 0.0006767374 +2025-03-23 16:20:16,148 Train Loss: 0.0002421, Val Loss: 0.0002606 +2025-03-23 16:20:16,148 Epoch 78/2000 +2025-03-23 16:22:02,066 Current Learning Rate: 0.0006693690 +2025-03-23 16:22:02,983 Train Loss: 0.0002550, Val Loss: 0.0002493 +2025-03-23 16:22:02,983 Epoch 79/2000 +2025-03-23 16:23:48,935 Current Learning Rate: 0.0006619587 +2025-03-23 16:23:49,758 Train Loss: 0.0002301, Val Loss: 0.0002396 +2025-03-23 16:23:49,759 Epoch 80/2000 +2025-03-23 16:25:36,168 Current Learning Rate: 0.0006545085 +2025-03-23 16:25:36,169 Train Loss: 0.0002305, Val Loss: 0.0002479 +2025-03-23 16:25:36,169 Epoch 81/2000 +2025-03-23 16:27:23,099 Current Learning Rate: 0.0006470202 +2025-03-23 16:27:23,950 Train Loss: 0.0002128, Val Loss: 0.0002363 +2025-03-23 16:27:23,950 Epoch 82/2000 +2025-03-23 16:29:10,107 Current Learning Rate: 0.0006394956 +2025-03-23 16:29:10,107 Train Loss: 0.0002447, Val Loss: 0.0002398 +2025-03-23 16:29:10,108 Epoch 83/2000 +2025-03-23 16:30:57,196 Current Learning Rate: 0.0006319365 +2025-03-23 16:30:58,015 Train Loss: 0.0002323, Val Loss: 0.0002253 +2025-03-23 16:30:58,015 Epoch 84/2000 +2025-03-23 16:32:44,659 Current Learning Rate: 0.0006243449 +2025-03-23 16:32:44,660 Train Loss: 0.0002386, Val Loss: 0.0002277 +2025-03-23 16:32:44,660 Epoch 85/2000 +2025-03-23 16:34:30,910 Current Learning Rate: 0.0006167227 +2025-03-23 16:34:30,910 Train Loss: 0.0002364, Val Loss: 0.0002413 +2025-03-23 16:34:30,910 Epoch 86/2000 +2025-03-23 16:36:17,350 Current Learning Rate: 0.0006090716 +2025-03-23 16:36:17,350 Train Loss: 0.0002531, Val Loss: 0.0002504 +2025-03-23 16:36:17,350 Epoch 87/2000 +2025-03-23 16:38:03,957 Current Learning Rate: 0.0006013936 +2025-03-23 16:38:03,965 Train Loss: 0.0002237, Val Loss: 0.0002487 +2025-03-23 16:38:03,965 Epoch 88/2000 +2025-03-23 16:39:50,647 Current Learning Rate: 0.0005936907 +2025-03-23 16:39:50,647 Train Loss: 0.0002236, Val Loss: 0.0002571 +2025-03-23 16:39:50,648 Epoch 89/2000 +2025-03-23 16:41:36,896 Current Learning Rate: 0.0005859646 +2025-03-23 16:41:36,896 Train Loss: 0.0002287, Val Loss: 0.0002396 +2025-03-23 16:41:36,896 Epoch 90/2000 +2025-03-23 16:43:22,933 Current Learning Rate: 0.0005782172 +2025-03-23 16:43:22,933 Train Loss: 0.0002566, Val Loss: 0.0002254 +2025-03-23 16:43:22,933 Epoch 91/2000 +2025-03-23 16:45:10,177 Current Learning Rate: 0.0005704506 +2025-03-23 16:45:11,071 Train Loss: 0.0002471, Val Loss: 0.0002157 +2025-03-23 16:45:11,071 Epoch 92/2000 +2025-03-23 16:46:57,697 Current Learning Rate: 0.0005626666 +2025-03-23 16:46:58,538 Train Loss: 0.0001896, Val Loss: 0.0002065 +2025-03-23 16:46:58,538 Epoch 93/2000 +2025-03-23 16:48:44,997 Current Learning Rate: 0.0005548672 +2025-03-23 16:48:45,876 Train Loss: 0.0001890, Val Loss: 0.0001987 +2025-03-23 16:48:45,876 Epoch 94/2000 +2025-03-23 16:50:31,998 Current Learning Rate: 0.0005470542 +2025-03-23 16:50:31,999 Train Loss: 0.0002285, Val Loss: 0.0002224 +2025-03-23 16:50:31,999 Epoch 95/2000 +2025-03-23 16:52:18,680 Current Learning Rate: 0.0005392295 +2025-03-23 16:52:18,680 Train Loss: 0.0002289, Val Loss: 0.0002076 +2025-03-23 16:52:18,680 Epoch 96/2000 +2025-03-23 16:54:05,896 Current Learning Rate: 0.0005313953 +2025-03-23 16:54:05,897 Train Loss: 0.0001966, Val Loss: 0.0002072 +2025-03-23 16:54:05,897 Epoch 97/2000 +2025-03-23 16:55:52,927 Current Learning Rate: 0.0005235532 +2025-03-23 16:55:52,927 Train Loss: 0.0002122, Val Loss: 0.0002088 +2025-03-23 16:55:52,928 Epoch 98/2000 +2025-03-23 16:57:39,571 Current Learning Rate: 0.0005157054 +2025-03-23 16:57:40,391 Train Loss: 0.0002052, Val Loss: 0.0001919 +2025-03-23 16:57:40,391 Epoch 99/2000 +2025-03-23 16:59:26,938 Current Learning Rate: 0.0005078537 +2025-03-23 16:59:26,938 Train Loss: 0.0001868, Val Loss: 0.0002111 +2025-03-23 16:59:26,938 Epoch 100/2000 +2025-03-23 17:01:14,038 Current Learning Rate: 0.0005000000 +2025-03-23 17:01:14,039 Train Loss: 0.0001862, Val Loss: 0.0001928 +2025-03-23 17:01:14,039 Epoch 101/2000 +2025-03-23 17:03:00,908 Current Learning Rate: 0.0004921463 +2025-03-23 17:03:01,789 Train Loss: 0.0001959, Val Loss: 0.0001863 +2025-03-23 17:03:01,789 Epoch 102/2000 +2025-03-23 17:04:47,751 Current Learning Rate: 0.0004842946 +2025-03-23 17:04:47,752 Train Loss: 0.0002104, Val Loss: 0.0001954 +2025-03-23 17:04:47,752 Epoch 103/2000 +2025-03-23 17:06:34,185 Current Learning Rate: 0.0004764468 +2025-03-23 17:06:34,186 Train Loss: 0.0001722, Val Loss: 0.0001946 +2025-03-23 17:06:34,186 Epoch 104/2000 +2025-03-23 17:08:21,011 Current Learning Rate: 0.0004686047 +2025-03-23 17:08:21,011 Train Loss: 0.0001816, Val Loss: 0.0002174 +2025-03-23 17:08:21,011 Epoch 105/2000 +2025-03-23 17:10:07,468 Current Learning Rate: 0.0004607705 +2025-03-23 17:10:07,468 Train Loss: 0.0001957, Val Loss: 0.0001954 +2025-03-23 17:10:07,469 Epoch 106/2000 +2025-03-23 17:11:53,839 Current Learning Rate: 0.0004529458 +2025-03-23 17:11:53,839 Train Loss: 0.0001948, Val Loss: 0.0002181 +2025-03-23 17:11:53,840 Epoch 107/2000 +2025-03-23 17:13:40,830 Current Learning Rate: 0.0004451328 +2025-03-23 17:13:40,830 Train Loss: 0.0001943, Val Loss: 0.0001903 +2025-03-23 17:13:40,830 Epoch 108/2000 +2025-03-23 17:15:27,768 Current Learning Rate: 0.0004373334 +2025-03-23 17:15:28,657 Train Loss: 0.0001824, Val Loss: 0.0001846 +2025-03-23 17:15:28,657 Epoch 109/2000 +2025-03-23 17:17:14,865 Current Learning Rate: 0.0004295494 +2025-03-23 17:17:14,866 Train Loss: 0.0001819, Val Loss: 0.0001908 +2025-03-23 17:17:14,866 Epoch 110/2000 +2025-03-23 17:19:01,838 Current Learning Rate: 0.0004217828 +2025-03-23 17:19:02,743 Train Loss: 0.0001670, Val Loss: 0.0001757 +2025-03-23 17:19:02,744 Epoch 111/2000 +2025-03-23 17:20:49,585 Current Learning Rate: 0.0004140354 +2025-03-23 17:20:50,471 Train Loss: 0.0001486, Val Loss: 0.0001712 +2025-03-23 17:20:50,471 Epoch 112/2000 +2025-03-23 17:22:37,247 Current Learning Rate: 0.0004063093 +2025-03-23 17:22:37,247 Train Loss: 0.0001747, Val Loss: 0.0001845 +2025-03-23 17:22:37,248 Epoch 113/2000 +2025-03-23 17:24:23,827 Current Learning Rate: 0.0003986064 +2025-03-23 17:24:23,827 Train Loss: 0.0002078, Val Loss: 0.0001819 +2025-03-23 17:24:23,827 Epoch 114/2000 +2025-03-23 17:26:10,647 Current Learning Rate: 0.0003909284 +2025-03-23 17:26:10,648 Train Loss: 0.0001457, Val Loss: 0.0001729 +2025-03-23 17:26:10,648 Epoch 115/2000 +2025-03-23 17:27:57,843 Current Learning Rate: 0.0003832773 +2025-03-23 17:27:57,843 Train Loss: 0.0002005, Val Loss: 0.0001827 +2025-03-23 17:27:57,844 Epoch 116/2000 +2025-03-23 17:29:44,455 Current Learning Rate: 0.0003756551 +2025-03-23 17:29:44,455 Train Loss: 0.0002056, Val Loss: 0.0001953 +2025-03-23 17:29:44,456 Epoch 117/2000 +2025-03-23 17:31:31,121 Current Learning Rate: 0.0003680635 +2025-03-23 17:31:32,091 Train Loss: 0.0001765, Val Loss: 0.0001661 +2025-03-23 17:31:32,092 Epoch 118/2000 +2025-03-23 17:33:18,491 Current Learning Rate: 0.0003605044 +2025-03-23 17:33:18,492 Train Loss: 0.0001752, Val Loss: 0.0001684 +2025-03-23 17:33:18,492 Epoch 119/2000 +2025-03-23 17:35:05,208 Current Learning Rate: 0.0003529798 +2025-03-23 17:35:05,209 Train Loss: 0.0001576, Val Loss: 0.0001714 +2025-03-23 17:35:05,209 Epoch 120/2000 +2025-03-23 17:36:51,988 Current Learning Rate: 0.0003454915 +2025-03-23 17:36:51,989 Train Loss: 0.0001545, Val Loss: 0.0001676 +2025-03-23 17:36:51,989 Epoch 121/2000 +2025-03-23 17:38:38,737 Current Learning Rate: 0.0003380413 +2025-03-23 17:38:38,738 Train Loss: 0.0002098, Val Loss: 0.0001686 +2025-03-23 17:38:38,738 Epoch 122/2000 +2025-03-23 17:40:25,475 Current Learning Rate: 0.0003306310 +2025-03-23 17:40:25,475 Train Loss: 0.0001741, Val Loss: 0.0001685 +2025-03-23 17:40:25,475 Epoch 123/2000 +2025-03-23 17:42:11,799 Current Learning Rate: 0.0003232626 +2025-03-23 17:42:12,743 Train Loss: 0.0001588, Val Loss: 0.0001614 +2025-03-23 17:42:12,743 Epoch 124/2000 +2025-03-23 17:43:58,937 Current Learning Rate: 0.0003159377 +2025-03-23 17:43:59,867 Train Loss: 0.0001791, Val Loss: 0.0001603 +2025-03-23 17:43:59,867 Epoch 125/2000 +2025-03-23 17:45:46,294 Current Learning Rate: 0.0003086583 +2025-03-23 17:45:47,161 Train Loss: 0.0001409, Val Loss: 0.0001547 +2025-03-23 17:45:47,162 Epoch 126/2000 +2025-03-23 17:47:33,656 Current Learning Rate: 0.0003014261 +2025-03-23 17:47:33,657 Train Loss: 0.0001467, Val Loss: 0.0001584 +2025-03-23 17:47:33,657 Epoch 127/2000 +2025-03-23 17:49:20,289 Current Learning Rate: 0.0002942428 +2025-03-23 17:49:20,290 Train Loss: 0.0001607, Val Loss: 0.0001597 +2025-03-23 17:49:20,290 Epoch 128/2000 +2025-03-23 17:51:07,110 Current Learning Rate: 0.0002871104 +2025-03-23 17:51:07,931 Train Loss: 0.0001545, Val Loss: 0.0001525 +2025-03-23 17:51:07,931 Epoch 129/2000 +2025-03-23 17:52:54,349 Current Learning Rate: 0.0002800304 +2025-03-23 17:52:55,172 Train Loss: 0.0001398, Val Loss: 0.0001518 +2025-03-23 17:52:55,172 Epoch 130/2000 +2025-03-23 17:54:41,447 Current Learning Rate: 0.0002730048 +2025-03-23 17:54:42,266 Train Loss: 0.0001547, Val Loss: 0.0001511 +2025-03-23 17:54:42,266 Epoch 131/2000 +2025-03-23 17:56:28,846 Current Learning Rate: 0.0002660351 +2025-03-23 17:56:29,655 Train Loss: 0.0001371, Val Loss: 0.0001474 +2025-03-23 17:56:29,655 Epoch 132/2000 +2025-03-23 17:58:16,356 Current Learning Rate: 0.0002591232 +2025-03-23 17:58:16,357 Train Loss: 0.0001594, Val Loss: 0.0001566 +2025-03-23 17:58:16,357 Epoch 133/2000 +2025-03-23 18:00:03,333 Current Learning Rate: 0.0002522707 +2025-03-23 18:00:03,334 Train Loss: 0.0001416, Val Loss: 0.0001526 +2025-03-23 18:00:03,334 Epoch 134/2000 +2025-03-23 18:01:50,340 Current Learning Rate: 0.0002454793 +2025-03-23 18:01:50,341 Train Loss: 0.0001544, Val Loss: 0.0001539 +2025-03-23 18:01:50,341 Epoch 135/2000 +2025-03-23 18:03:37,202 Current Learning Rate: 0.0002387507 +2025-03-23 18:03:37,202 Train Loss: 0.0001323, Val Loss: 0.0001539 +2025-03-23 18:03:37,203 Epoch 136/2000 +2025-03-23 18:05:24,499 Current Learning Rate: 0.0002320866 +2025-03-23 18:05:24,499 Train Loss: 0.0001439, Val Loss: 0.0001478 +2025-03-23 18:05:24,500 Epoch 137/2000 +2025-03-23 18:07:11,517 Current Learning Rate: 0.0002254886 +2025-03-23 18:07:11,518 Train Loss: 0.0001538, Val Loss: 0.0001536 +2025-03-23 18:07:11,518 Epoch 138/2000 +2025-03-23 18:08:57,837 Current Learning Rate: 0.0002189583 +2025-03-23 18:08:58,647 Train Loss: 0.0001347, Val Loss: 0.0001441 +2025-03-23 18:08:58,647 Epoch 139/2000 +2025-03-23 18:10:44,281 Current Learning Rate: 0.0002124974 +2025-03-23 18:10:44,281 Train Loss: 0.0001537, Val Loss: 0.0001477 +2025-03-23 18:10:44,281 Epoch 140/2000 +2025-03-23 18:12:31,066 Current Learning Rate: 0.0002061074 +2025-03-23 18:12:31,067 Train Loss: 0.0001304, Val Loss: 0.0001449 +2025-03-23 18:12:31,067 Epoch 141/2000 +2025-03-23 18:14:17,900 Current Learning Rate: 0.0001997899 +2025-03-23 18:14:18,782 Train Loss: 0.0001323, Val Loss: 0.0001409 +2025-03-23 18:14:18,782 Epoch 142/2000 +2025-03-23 18:16:04,517 Current Learning Rate: 0.0001935465 +2025-03-23 18:16:04,517 Train Loss: 0.0001410, Val Loss: 0.0001439 +2025-03-23 18:16:04,517 Epoch 143/2000 +2025-03-23 18:17:51,148 Current Learning Rate: 0.0001873787 +2025-03-23 18:17:52,069 Train Loss: 0.0001330, Val Loss: 0.0001381 +2025-03-23 18:17:52,069 Epoch 144/2000 +2025-03-23 18:19:38,364 Current Learning Rate: 0.0001812880 +2025-03-23 18:19:38,364 Train Loss: 0.0001369, Val Loss: 0.0001393 +2025-03-23 18:19:38,365 Epoch 145/2000 +2025-03-23 18:21:24,573 Current Learning Rate: 0.0001752760 +2025-03-23 18:21:24,573 Train Loss: 0.0001585, Val Loss: 0.0001448 +2025-03-23 18:21:24,574 Epoch 146/2000 +2025-03-23 18:23:11,109 Current Learning Rate: 0.0001693441 +2025-03-23 18:23:11,110 Train Loss: 0.0001326, Val Loss: 0.0001433 +2025-03-23 18:23:11,110 Epoch 147/2000 +2025-03-23 18:24:57,666 Current Learning Rate: 0.0001634937 +2025-03-23 18:24:57,666 Train Loss: 0.0001219, Val Loss: 0.0001427 +2025-03-23 18:24:57,666 Epoch 148/2000 +2025-03-23 18:26:44,372 Current Learning Rate: 0.0001577264 +2025-03-23 18:26:45,260 Train Loss: 0.0001059, Val Loss: 0.0001343 +2025-03-23 18:26:45,261 Epoch 149/2000 +2025-03-23 18:28:30,998 Current Learning Rate: 0.0001520436 +2025-03-23 18:28:30,998 Train Loss: 0.0001305, Val Loss: 0.0001376 +2025-03-23 18:28:30,998 Epoch 150/2000 +2025-03-23 18:30:17,094 Current Learning Rate: 0.0001464466 +2025-03-23 18:30:17,094 Train Loss: 0.0001390, Val Loss: 0.0001368 +2025-03-23 18:30:17,095 Epoch 151/2000 +2025-03-23 18:32:04,034 Current Learning Rate: 0.0001409369 +2025-03-23 18:32:04,035 Train Loss: 0.0001314, Val Loss: 0.0001372 +2025-03-23 18:32:04,035 Epoch 152/2000 +2025-03-23 18:33:51,537 Current Learning Rate: 0.0001355157 +2025-03-23 18:33:52,407 Train Loss: 0.0001277, Val Loss: 0.0001340 +2025-03-23 18:33:52,408 Epoch 153/2000 +2025-03-23 18:35:39,033 Current Learning Rate: 0.0001301845 +2025-03-23 18:35:39,033 Train Loss: 0.0001370, Val Loss: 0.0001352 +2025-03-23 18:35:39,034 Epoch 154/2000 +2025-03-23 18:37:25,921 Current Learning Rate: 0.0001249445 +2025-03-23 18:37:25,921 Train Loss: 0.0001238, Val Loss: 0.0001345 +2025-03-23 18:37:25,921 Epoch 155/2000 +2025-03-23 18:39:12,788 Current Learning Rate: 0.0001197970 +2025-03-23 18:39:12,789 Train Loss: 0.0001296, Val Loss: 0.0001353 +2025-03-23 18:39:12,789 Epoch 156/2000 +2025-03-23 18:40:59,849 Current Learning Rate: 0.0001147434 +2025-03-23 18:41:00,740 Train Loss: 0.0001392, Val Loss: 0.0001302 +2025-03-23 18:41:00,741 Epoch 157/2000 +2025-03-23 18:42:47,376 Current Learning Rate: 0.0001097848 +2025-03-23 18:42:47,376 Train Loss: 0.0001307, Val Loss: 0.0001305 +2025-03-23 18:42:47,377 Epoch 158/2000 +2025-03-23 18:44:33,956 Current Learning Rate: 0.0001049225 +2025-03-23 18:44:33,957 Train Loss: 0.0001228, Val Loss: 0.0001311 +2025-03-23 18:44:33,957 Epoch 159/2000 +2025-03-23 18:46:20,931 Current Learning Rate: 0.0001001577 +2025-03-23 18:46:20,931 Train Loss: 0.0001260, Val Loss: 0.0001304 +2025-03-23 18:46:20,932 Epoch 160/2000 +2025-03-23 18:48:08,220 Current Learning Rate: 0.0000954915 +2025-03-23 18:48:09,067 Train Loss: 0.0001372, Val Loss: 0.0001289 +2025-03-23 18:48:09,067 Epoch 161/2000 +2025-03-23 18:49:55,640 Current Learning Rate: 0.0000909251 +2025-03-23 18:49:56,489 Train Loss: 0.0001337, Val Loss: 0.0001287 +2025-03-23 18:49:56,490 Epoch 162/2000 +2025-03-23 18:51:42,802 Current Learning Rate: 0.0000864597 +2025-03-23 18:51:43,619 Train Loss: 0.0001335, Val Loss: 0.0001287 +2025-03-23 18:51:43,620 Epoch 163/2000 +2025-03-23 18:53:30,170 Current Learning Rate: 0.0000820963 +2025-03-23 18:53:30,170 Train Loss: 0.0001258, Val Loss: 0.0001288 +2025-03-23 18:53:30,171 Epoch 164/2000 +2025-03-23 18:55:17,498 Current Learning Rate: 0.0000778360 +2025-03-23 18:55:18,322 Train Loss: 0.0001251, Val Loss: 0.0001286 +2025-03-23 18:55:18,323 Epoch 165/2000 +2025-03-23 18:57:04,869 Current Learning Rate: 0.0000736799 +2025-03-23 18:57:04,870 Train Loss: 0.0001185, Val Loss: 0.0001305 +2025-03-23 18:57:04,870 Epoch 166/2000 +2025-03-23 18:58:51,113 Current Learning Rate: 0.0000696290 +2025-03-23 18:58:51,971 Train Loss: 0.0001333, Val Loss: 0.0001273 +2025-03-23 18:58:51,972 Epoch 167/2000 +2025-03-23 19:00:38,148 Current Learning Rate: 0.0000656842 +2025-03-23 19:00:38,998 Train Loss: 0.0001147, Val Loss: 0.0001267 +2025-03-23 19:00:38,999 Epoch 168/2000 +2025-03-23 19:02:25,678 Current Learning Rate: 0.0000618467 +2025-03-23 19:02:25,679 Train Loss: 0.0001273, Val Loss: 0.0001284 +2025-03-23 19:02:25,680 Epoch 169/2000 +2025-03-23 19:04:12,446 Current Learning Rate: 0.0000581172 +2025-03-23 19:04:13,307 Train Loss: 0.0001202, Val Loss: 0.0001260 +2025-03-23 19:04:13,308 Epoch 170/2000 +2025-03-23 19:05:59,592 Current Learning Rate: 0.0000544967 +2025-03-23 19:06:00,421 Train Loss: 0.0001169, Val Loss: 0.0001253 +2025-03-23 19:06:00,421 Epoch 171/2000 +2025-03-23 19:07:47,007 Current Learning Rate: 0.0000509862 +2025-03-23 19:07:47,849 Train Loss: 0.0001231, Val Loss: 0.0001253 +2025-03-23 19:07:47,850 Epoch 172/2000 +2025-03-23 19:09:34,276 Current Learning Rate: 0.0000475865 +2025-03-23 19:09:35,148 Train Loss: 0.0001173, Val Loss: 0.0001252 +2025-03-23 19:09:35,148 Epoch 173/2000 +2025-03-23 19:11:21,633 Current Learning Rate: 0.0000442984 +2025-03-23 19:11:21,634 Train Loss: 0.0001227, Val Loss: 0.0001268 +2025-03-23 19:11:21,634 Epoch 174/2000 +2025-03-23 19:13:08,414 Current Learning Rate: 0.0000411227 +2025-03-23 19:13:09,241 Train Loss: 0.0001157, Val Loss: 0.0001246 +2025-03-23 19:13:09,241 Epoch 175/2000 +2025-03-23 19:14:55,455 Current Learning Rate: 0.0000380602 +2025-03-23 19:14:56,357 Train Loss: 0.0001198, Val Loss: 0.0001244 +2025-03-23 19:14:56,358 Epoch 176/2000 +2025-03-23 19:16:42,679 Current Learning Rate: 0.0000351118 +2025-03-23 19:16:42,680 Train Loss: 0.0001446, Val Loss: 0.0001251 +2025-03-23 19:16:42,680 Epoch 177/2000 +2025-03-23 19:18:29,750 Current Learning Rate: 0.0000322780 +2025-03-23 19:18:30,612 Train Loss: 0.0001127, Val Loss: 0.0001235 +2025-03-23 19:18:30,612 Epoch 178/2000 +2025-03-23 19:20:16,858 Current Learning Rate: 0.0000295596 +2025-03-23 19:20:16,859 Train Loss: 0.0001118, Val Loss: 0.0001236 +2025-03-23 19:20:16,859 Epoch 179/2000 +2025-03-23 19:22:03,563 Current Learning Rate: 0.0000269573 +2025-03-23 19:22:03,564 Train Loss: 0.0001332, Val Loss: 0.0001242 +2025-03-23 19:22:03,564 Epoch 180/2000 +2025-03-23 19:23:50,535 Current Learning Rate: 0.0000244717 +2025-03-23 19:23:51,386 Train Loss: 0.0001301, Val Loss: 0.0001233 +2025-03-23 19:23:51,386 Epoch 181/2000 +2025-03-23 19:25:37,628 Current Learning Rate: 0.0000221035 +2025-03-23 19:25:38,476 Train Loss: 0.0001235, Val Loss: 0.0001233 +2025-03-23 19:25:38,476 Epoch 182/2000 +2025-03-23 19:27:24,981 Current Learning Rate: 0.0000198532 +2025-03-23 19:27:24,982 Train Loss: 0.0001245, Val Loss: 0.0001236 +2025-03-23 19:27:24,983 Epoch 183/2000 +2025-03-23 19:29:11,644 Current Learning Rate: 0.0000177213 +2025-03-23 19:29:12,516 Train Loss: 0.0001092, Val Loss: 0.0001230 +2025-03-23 19:29:12,516 Epoch 184/2000 +2025-03-23 19:30:58,833 Current Learning Rate: 0.0000157084 +2025-03-23 19:30:59,690 Train Loss: 0.0001175, Val Loss: 0.0001229 +2025-03-23 19:30:59,690 Epoch 185/2000 +2025-03-23 19:32:45,732 Current Learning Rate: 0.0000138150 +2025-03-23 19:32:46,567 Train Loss: 0.0001110, Val Loss: 0.0001225 +2025-03-23 19:32:46,567 Epoch 186/2000 +2025-03-23 19:34:32,584 Current Learning Rate: 0.0000120416 +2025-03-23 19:34:33,459 Train Loss: 0.0001196, Val Loss: 0.0001225 +2025-03-23 19:34:33,459 Epoch 187/2000 +2025-03-23 19:36:19,341 Current Learning Rate: 0.0000103886 +2025-03-23 19:36:20,254 Train Loss: 0.0001228, Val Loss: 0.0001223 +2025-03-23 19:36:20,254 Epoch 188/2000 +2025-03-23 19:38:06,164 Current Learning Rate: 0.0000088564 +2025-03-23 19:38:06,999 Train Loss: 0.0001239, Val Loss: 0.0001223 +2025-03-23 19:38:06,999 Epoch 189/2000 +2025-03-23 19:39:52,594 Current Learning Rate: 0.0000074453 +2025-03-23 19:39:53,417 Train Loss: 0.0001089, Val Loss: 0.0001222 +2025-03-23 19:39:53,418 Epoch 190/2000 +2025-03-23 19:41:39,329 Current Learning Rate: 0.0000061558 +2025-03-23 19:41:40,206 Train Loss: 0.0001175, Val Loss: 0.0001222 +2025-03-23 19:41:40,206 Epoch 191/2000 +2025-03-23 19:43:26,406 Current Learning Rate: 0.0000049882 +2025-03-23 19:43:27,264 Train Loss: 0.0000982, Val Loss: 0.0001220 +2025-03-23 19:43:27,264 Epoch 192/2000 +2025-03-23 19:45:13,447 Current Learning Rate: 0.0000039426 +2025-03-23 19:45:14,287 Train Loss: 0.0001131, Val Loss: 0.0001220 +2025-03-23 19:45:14,287 Epoch 193/2000 +2025-03-23 19:47:00,406 Current Learning Rate: 0.0000030195 +2025-03-23 19:47:01,260 Train Loss: 0.0001078, Val Loss: 0.0001219 +2025-03-23 19:47:01,261 Epoch 194/2000 +2025-03-23 19:48:47,469 Current Learning Rate: 0.0000022190 +2025-03-23 19:48:48,308 Train Loss: 0.0001343, Val Loss: 0.0001219 +2025-03-23 19:48:48,308 Epoch 195/2000 +2025-03-23 19:50:34,714 Current Learning Rate: 0.0000015413 +2025-03-23 19:50:35,590 Train Loss: 0.0001081, Val Loss: 0.0001218 +2025-03-23 19:50:35,590 Epoch 196/2000 +2025-03-23 19:52:22,351 Current Learning Rate: 0.0000009866 +2025-03-23 19:52:23,224 Train Loss: 0.0001036, Val Loss: 0.0001218 +2025-03-23 19:52:23,224 Epoch 197/2000 +2025-03-23 19:54:09,681 Current Learning Rate: 0.0000005551 +2025-03-23 19:54:10,521 Train Loss: 0.0001225, Val Loss: 0.0001218 +2025-03-23 19:54:10,521 Epoch 198/2000 +2025-03-23 19:55:56,769 Current Learning Rate: 0.0000002467 +2025-03-23 19:55:56,770 Train Loss: 0.0001139, Val Loss: 0.0001220 +2025-03-23 19:55:56,770 Epoch 199/2000 +2025-03-23 19:57:43,521 Current Learning Rate: 0.0000000617 +2025-03-23 19:57:44,355 Train Loss: 0.0001307, Val Loss: 0.0001218 +2025-03-23 19:57:44,355 Epoch 200/2000 +2025-03-23 19:59:30,576 Current Learning Rate: 0.0000000000 +2025-03-23 19:59:30,577 Train Loss: 0.0001204, Val Loss: 0.0001219 +2025-03-23 19:59:30,577 Epoch 201/2000 +2025-03-23 20:01:17,455 Current Learning Rate: 0.0000000617 +2025-03-23 20:01:18,302 Train Loss: 0.0001177, Val Loss: 0.0001218 +2025-03-23 20:01:18,302 Epoch 202/2000 +2025-03-23 20:03:04,298 Current Learning Rate: 0.0000002467 +2025-03-23 20:03:04,299 Train Loss: 0.0001206, Val Loss: 0.0001218 +2025-03-23 20:03:04,299 Epoch 203/2000 +2025-03-23 20:04:51,789 Current Learning Rate: 0.0000005551 +2025-03-23 20:04:52,614 Train Loss: 0.0001123, Val Loss: 0.0001218 +2025-03-23 20:04:52,614 Epoch 204/2000 +2025-03-23 20:06:38,877 Current Learning Rate: 0.0000009866 +2025-03-23 20:06:38,877 Train Loss: 0.0001156, Val Loss: 0.0001218 +2025-03-23 20:06:38,877 Epoch 205/2000 +2025-03-23 20:08:25,204 Current Learning Rate: 0.0000015413 +2025-03-23 20:08:25,205 Train Loss: 0.0001229, Val Loss: 0.0001218 +2025-03-23 20:08:25,205 Epoch 206/2000 +2025-03-23 20:10:11,451 Current Learning Rate: 0.0000022190 +2025-03-23 20:10:12,318 Train Loss: 0.0001071, Val Loss: 0.0001218 +2025-03-23 20:10:12,318 Epoch 207/2000 +2025-03-23 20:11:58,196 Current Learning Rate: 0.0000030195 +2025-03-23 20:11:58,197 Train Loss: 0.0001106, Val Loss: 0.0001218 +2025-03-23 20:11:58,197 Epoch 208/2000 +2025-03-23 20:13:45,164 Current Learning Rate: 0.0000039426 +2025-03-23 20:13:45,165 Train Loss: 0.0001090, Val Loss: 0.0001218 +2025-03-23 20:13:45,165 Epoch 209/2000 +2025-03-23 20:15:32,836 Current Learning Rate: 0.0000049882 +2025-03-23 20:15:32,837 Train Loss: 0.0001080, Val Loss: 0.0001218 +2025-03-23 20:15:32,837 Epoch 210/2000 +2025-03-23 20:17:19,478 Current Learning Rate: 0.0000061558 +2025-03-23 20:17:19,479 Train Loss: 0.0001067, Val Loss: 0.0001219 +2025-03-23 20:17:19,479 Epoch 211/2000 +2025-03-23 20:19:06,296 Current Learning Rate: 0.0000074453 +2025-03-23 20:19:06,297 Train Loss: 0.0001303, Val Loss: 0.0001220 +2025-03-23 20:19:06,297 Epoch 212/2000 +2025-03-23 20:20:53,453 Current Learning Rate: 0.0000088564 +2025-03-23 20:20:53,453 Train Loss: 0.0001280, Val Loss: 0.0001219 +2025-03-23 20:20:53,454 Epoch 213/2000 +2025-03-23 20:22:39,811 Current Learning Rate: 0.0000103886 +2025-03-23 20:22:39,812 Train Loss: 0.0001137, Val Loss: 0.0001219 +2025-03-23 20:22:39,812 Epoch 214/2000 +2025-03-23 20:24:26,086 Current Learning Rate: 0.0000120416 +2025-03-23 20:24:26,087 Train Loss: 0.0001062, Val Loss: 0.0001219 +2025-03-23 20:24:26,087 Epoch 215/2000 +2025-03-23 20:26:12,615 Current Learning Rate: 0.0000138150 +2025-03-23 20:26:12,616 Train Loss: 0.0001233, Val Loss: 0.0001219 +2025-03-23 20:26:12,616 Epoch 216/2000 +2025-03-23 20:27:59,450 Current Learning Rate: 0.0000157084 +2025-03-23 20:27:59,451 Train Loss: 0.0001123, Val Loss: 0.0001220 +2025-03-23 20:27:59,451 Epoch 217/2000 +2025-03-23 20:29:45,900 Current Learning Rate: 0.0000177213 +2025-03-23 20:29:45,901 Train Loss: 0.0001188, Val Loss: 0.0001219 +2025-03-23 20:29:45,901 Epoch 218/2000 +2025-03-23 20:31:32,222 Current Learning Rate: 0.0000198532 +2025-03-23 20:31:32,223 Train Loss: 0.0001238, Val Loss: 0.0001220 +2025-03-23 20:31:32,223 Epoch 219/2000 +2025-03-23 20:33:18,857 Current Learning Rate: 0.0000221035 +2025-03-23 20:33:18,858 Train Loss: 0.0001115, Val Loss: 0.0001219 +2025-03-23 20:33:18,858 Epoch 220/2000 +2025-03-23 20:35:05,495 Current Learning Rate: 0.0000244717 +2025-03-23 20:35:05,495 Train Loss: 0.0001146, Val Loss: 0.0001219 +2025-03-23 20:35:05,496 Epoch 221/2000 +2025-03-23 20:36:52,008 Current Learning Rate: 0.0000269573 +2025-03-23 20:36:52,008 Train Loss: 0.0001262, Val Loss: 0.0001221 +2025-03-23 20:36:52,008 Epoch 222/2000 +2025-03-23 20:38:38,541 Current Learning Rate: 0.0000295596 +2025-03-23 20:38:38,542 Train Loss: 0.0000980, Val Loss: 0.0001219 +2025-03-23 20:38:38,542 Epoch 223/2000 +2025-03-23 20:40:25,265 Current Learning Rate: 0.0000322780 +2025-03-23 20:40:25,266 Train Loss: 0.0001343, Val Loss: 0.0001225 +2025-03-23 20:40:25,266 Epoch 224/2000 +2025-03-23 20:42:11,561 Current Learning Rate: 0.0000351118 +2025-03-23 20:42:11,561 Train Loss: 0.0001193, Val Loss: 0.0001220 +2025-03-23 20:42:11,561 Epoch 225/2000 +2025-03-23 20:43:58,556 Current Learning Rate: 0.0000380602 +2025-03-23 20:43:58,557 Train Loss: 0.0001185, Val Loss: 0.0001226 +2025-03-23 20:43:58,557 Epoch 226/2000 +2025-03-23 20:45:44,831 Current Learning Rate: 0.0000411227 +2025-03-23 20:45:44,831 Train Loss: 0.0001074, Val Loss: 0.0001225 +2025-03-23 20:45:44,832 Epoch 227/2000 +2025-03-23 20:47:31,358 Current Learning Rate: 0.0000442984 +2025-03-23 20:47:31,358 Train Loss: 0.0001040, Val Loss: 0.0001223 +2025-03-23 20:47:31,359 Epoch 228/2000 +2025-03-23 20:49:18,978 Current Learning Rate: 0.0000475865 +2025-03-23 20:49:18,979 Train Loss: 0.0001083, Val Loss: 0.0001220 +2025-03-23 20:49:18,979 Epoch 229/2000 +2025-03-23 20:51:05,510 Current Learning Rate: 0.0000509862 +2025-03-23 20:51:05,511 Train Loss: 0.0001098, Val Loss: 0.0001222 +2025-03-23 20:51:05,511 Epoch 230/2000 +2025-03-23 20:52:51,544 Current Learning Rate: 0.0000544967 +2025-03-23 20:52:51,544 Train Loss: 0.0001089, Val Loss: 0.0001220 +2025-03-23 20:52:51,545 Epoch 231/2000 +2025-03-23 20:54:38,264 Current Learning Rate: 0.0000581172 +2025-03-23 20:54:38,265 Train Loss: 0.0001072, Val Loss: 0.0001226 +2025-03-23 20:54:38,265 Epoch 232/2000 +2025-03-23 20:56:25,268 Current Learning Rate: 0.0000618467 +2025-03-23 20:56:25,268 Train Loss: 0.0001175, Val Loss: 0.0001224 +2025-03-23 20:56:25,268 Epoch 233/2000 +2025-03-23 20:58:11,342 Current Learning Rate: 0.0000656842 +2025-03-23 20:58:11,343 Train Loss: 0.0001229, Val Loss: 0.0001237 +2025-03-23 20:58:11,343 Epoch 234/2000 +2025-03-23 20:59:57,968 Current Learning Rate: 0.0000696290 +2025-03-23 20:59:57,968 Train Loss: 0.0001226, Val Loss: 0.0001228 +2025-03-23 20:59:57,969 Epoch 235/2000 +2025-03-23 21:01:44,957 Current Learning Rate: 0.0000736799 +2025-03-23 21:01:44,958 Train Loss: 0.0001069, Val Loss: 0.0001225 +2025-03-23 21:01:44,958 Epoch 236/2000 +2025-03-23 21:03:32,254 Current Learning Rate: 0.0000778360 +2025-03-23 21:03:32,255 Train Loss: 0.0001002, Val Loss: 0.0001225 +2025-03-23 21:03:32,255 Epoch 237/2000 +2025-03-23 21:05:18,856 Current Learning Rate: 0.0000820963 +2025-03-23 21:05:18,856 Train Loss: 0.0001216, Val Loss: 0.0001229 +2025-03-23 21:05:18,857 Epoch 238/2000 +2025-03-23 21:07:05,648 Current Learning Rate: 0.0000864597 +2025-03-23 21:07:05,648 Train Loss: 0.0001268, Val Loss: 0.0001233 +2025-03-23 21:07:05,648 Epoch 239/2000 +2025-03-23 21:08:52,114 Current Learning Rate: 0.0000909251 +2025-03-23 21:08:52,115 Train Loss: 0.0001308, Val Loss: 0.0001238 +2025-03-23 21:08:52,115 Epoch 240/2000 +2025-03-23 21:10:39,640 Current Learning Rate: 0.0000954915 +2025-03-23 21:10:39,641 Train Loss: 0.0001239, Val Loss: 0.0001235 +2025-03-23 21:10:39,641 Epoch 241/2000 +2025-03-23 21:12:26,526 Current Learning Rate: 0.0001001577 +2025-03-23 21:12:26,526 Train Loss: 0.0001080, Val Loss: 0.0001235 +2025-03-23 21:12:26,527 Epoch 242/2000 +2025-03-23 21:14:13,348 Current Learning Rate: 0.0001049225 +2025-03-23 21:14:13,365 Train Loss: 0.0001269, Val Loss: 0.0001257 +2025-03-23 21:14:13,366 Epoch 243/2000 +2025-03-23 21:15:59,851 Current Learning Rate: 0.0001097848 +2025-03-23 21:15:59,851 Train Loss: 0.0001180, Val Loss: 0.0001225 +2025-03-23 21:15:59,852 Epoch 244/2000 +2025-03-23 21:17:45,983 Current Learning Rate: 0.0001147434 +2025-03-23 21:17:45,984 Train Loss: 0.0001200, Val Loss: 0.0001242 +2025-03-23 21:17:45,984 Epoch 245/2000 +2025-03-23 21:19:32,663 Current Learning Rate: 0.0001197970 +2025-03-23 21:19:32,664 Train Loss: 0.0001132, Val Loss: 0.0001233 +2025-03-23 21:19:32,664 Epoch 246/2000 +2025-03-23 21:21:19,103 Current Learning Rate: 0.0001249445 +2025-03-23 21:21:19,103 Train Loss: 0.0001042, Val Loss: 0.0001227 +2025-03-23 21:21:19,104 Epoch 247/2000 +2025-03-23 21:23:06,210 Current Learning Rate: 0.0001301845 +2025-03-23 21:23:06,211 Train Loss: 0.0001307, Val Loss: 0.0001284 +2025-03-23 21:23:06,211 Epoch 248/2000 +2025-03-23 21:24:53,358 Current Learning Rate: 0.0001355157 +2025-03-23 21:24:53,358 Train Loss: 0.0001108, Val Loss: 0.0001222 +2025-03-23 21:24:53,358 Epoch 249/2000 +2025-03-23 21:26:40,226 Current Learning Rate: 0.0001409369 +2025-03-23 21:26:40,227 Train Loss: 0.0001356, Val Loss: 0.0001248 +2025-03-23 21:26:40,227 Epoch 250/2000 +2025-03-23 21:28:27,153 Current Learning Rate: 0.0001464466 +2025-03-23 21:28:27,154 Train Loss: 0.0001251, Val Loss: 0.0001242 +2025-03-23 21:28:27,154 Epoch 251/2000 +2025-03-23 21:30:13,990 Current Learning Rate: 0.0001520436 +2025-03-23 21:30:13,991 Train Loss: 0.0001186, Val Loss: 0.0001223 +2025-03-23 21:30:13,991 Epoch 252/2000 +2025-03-23 21:32:01,048 Current Learning Rate: 0.0001577264 +2025-03-23 21:32:01,049 Train Loss: 0.0001196, Val Loss: 0.0001260 +2025-03-23 21:32:01,049 Epoch 253/2000 +2025-03-23 21:33:47,618 Current Learning Rate: 0.0001634937 +2025-03-23 21:33:47,618 Train Loss: 0.0001195, Val Loss: 0.0001248 +2025-03-23 21:33:47,618 Epoch 254/2000 +2025-03-23 21:35:34,265 Current Learning Rate: 0.0001693441 +2025-03-23 21:35:34,265 Train Loss: 0.0001431, Val Loss: 0.0001267 +2025-03-23 21:35:34,266 Epoch 255/2000 +2025-03-23 21:37:20,995 Current Learning Rate: 0.0001752760 +2025-03-23 21:37:20,995 Train Loss: 0.0001168, Val Loss: 0.0001264 +2025-03-23 21:37:20,995 Epoch 256/2000 +2025-03-23 21:39:07,801 Current Learning Rate: 0.0001812880 +2025-03-23 21:39:07,801 Train Loss: 0.0001226, Val Loss: 0.0001230 +2025-03-23 21:39:07,801 Epoch 257/2000 +2025-03-23 21:40:54,335 Current Learning Rate: 0.0001873787 +2025-03-23 21:40:54,335 Train Loss: 0.0001056, Val Loss: 0.0001220 +2025-03-23 21:40:54,336 Epoch 258/2000 +2025-03-23 21:42:40,888 Current Learning Rate: 0.0001935465 +2025-03-23 21:42:40,888 Train Loss: 0.0001131, Val Loss: 0.0001250 +2025-03-23 21:42:40,888 Epoch 259/2000 +2025-03-23 21:44:27,739 Current Learning Rate: 0.0001997899 +2025-03-23 21:44:27,740 Train Loss: 0.0001173, Val Loss: 0.0001228 +2025-03-23 21:44:27,740 Epoch 260/2000 +2025-03-23 21:46:14,435 Current Learning Rate: 0.0002061074 +2025-03-23 21:46:14,458 Train Loss: 0.0001267, Val Loss: 0.0001222 +2025-03-23 21:46:14,459 Epoch 261/2000 +2025-03-23 21:48:00,051 Current Learning Rate: 0.0002124974 +2025-03-23 21:48:00,051 Train Loss: 0.0001178, Val Loss: 0.0001230 +2025-03-23 21:48:00,052 Epoch 262/2000 +2025-03-23 21:49:46,536 Current Learning Rate: 0.0002189583 +2025-03-23 21:49:46,537 Train Loss: 0.0001242, Val Loss: 0.0001279 +2025-03-23 21:49:46,537 Epoch 263/2000 +2025-03-23 21:51:33,098 Current Learning Rate: 0.0002254886 +2025-03-23 21:51:33,098 Train Loss: 0.0001269, Val Loss: 0.0001347 +2025-03-23 21:51:33,099 Epoch 264/2000 +2025-03-23 21:53:19,946 Current Learning Rate: 0.0002320866 +2025-03-23 21:53:19,947 Train Loss: 0.0001179, Val Loss: 0.0001241 +2025-03-23 21:53:19,947 Epoch 265/2000 +2025-03-23 21:55:06,170 Current Learning Rate: 0.0002387507 +2025-03-23 21:55:06,170 Train Loss: 0.0001292, Val Loss: 0.0001419 +2025-03-23 21:55:06,171 Epoch 266/2000 +2025-03-23 21:56:52,709 Current Learning Rate: 0.0002454793 +2025-03-23 21:56:52,710 Train Loss: 0.0001342, Val Loss: 0.0001249 +2025-03-23 21:56:52,710 Epoch 267/2000 +2025-03-23 21:58:39,878 Current Learning Rate: 0.0002522707 +2025-03-23 21:58:39,879 Train Loss: 0.0001164, Val Loss: 0.0001333 +2025-03-23 21:58:39,879 Epoch 268/2000 +2025-03-23 22:00:26,738 Current Learning Rate: 0.0002591232 +2025-03-23 22:00:26,738 Train Loss: 0.0001179, Val Loss: 0.0001283 +2025-03-23 22:00:26,739 Epoch 269/2000 +2025-03-23 22:02:13,632 Current Learning Rate: 0.0002660351 +2025-03-23 22:02:13,633 Train Loss: 0.0001219, Val Loss: 0.0001262 +2025-03-23 22:02:13,633 Epoch 270/2000 +2025-03-23 22:04:00,660 Current Learning Rate: 0.0002730048 +2025-03-23 22:04:00,660 Train Loss: 0.0001282, Val Loss: 0.0001234 +2025-03-23 22:04:00,660 Epoch 271/2000 +2025-03-23 22:05:47,642 Current Learning Rate: 0.0002800304 +2025-03-23 22:05:47,642 Train Loss: 0.0001362, Val Loss: 0.0001299 +2025-03-23 22:05:47,643 Epoch 272/2000 +2025-03-23 22:07:34,758 Current Learning Rate: 0.0002871104 +2025-03-23 22:07:34,758 Train Loss: 0.0001279, Val Loss: 0.0001244 +2025-03-23 22:07:34,759 Epoch 273/2000 +2025-03-23 22:09:21,815 Current Learning Rate: 0.0002942428 +2025-03-23 22:09:21,816 Train Loss: 0.0001406, Val Loss: 0.0001352 +2025-03-23 22:09:21,816 Epoch 274/2000 +2025-03-23 22:11:08,651 Current Learning Rate: 0.0003014261 +2025-03-23 22:11:08,652 Train Loss: 0.0001277, Val Loss: 0.0001331 +2025-03-23 22:11:08,652 Epoch 275/2000 +2025-03-23 22:12:55,620 Current Learning Rate: 0.0003086583 +2025-03-23 22:12:55,620 Train Loss: 0.0001127, Val Loss: 0.0001253 +2025-03-23 22:12:55,620 Epoch 276/2000 +2025-03-23 22:14:42,542 Current Learning Rate: 0.0003159377 +2025-03-23 22:14:42,543 Train Loss: 0.0001468, Val Loss: 0.0001381 +2025-03-23 22:14:42,543 Epoch 277/2000 +2025-03-23 22:16:29,049 Current Learning Rate: 0.0003232626 +2025-03-23 22:16:29,049 Train Loss: 0.0001462, Val Loss: 0.0001461 +2025-03-23 22:16:29,050 Epoch 278/2000 +2025-03-23 22:18:15,521 Current Learning Rate: 0.0003306310 +2025-03-23 22:18:15,521 Train Loss: 0.0001149, Val Loss: 0.0001312 +2025-03-23 22:18:15,521 Epoch 279/2000 +2025-03-23 22:20:02,193 Current Learning Rate: 0.0003380413 +2025-03-23 22:20:02,194 Train Loss: 0.0001250, Val Loss: 0.0001382 +2025-03-23 22:20:02,194 Epoch 280/2000 +2025-03-23 22:21:48,889 Current Learning Rate: 0.0003454915 +2025-03-23 22:21:48,890 Train Loss: 0.0001195, Val Loss: 0.0001314 +2025-03-23 22:21:48,890 Epoch 281/2000 +2025-03-23 22:23:35,438 Current Learning Rate: 0.0003529798 +2025-03-23 22:23:35,439 Train Loss: 0.0001400, Val Loss: 0.0001370 +2025-03-23 22:23:35,439 Epoch 282/2000 +2025-03-23 22:25:21,773 Current Learning Rate: 0.0003605044 +2025-03-23 22:25:21,774 Train Loss: 0.0001255, Val Loss: 0.0001305 +2025-03-23 22:25:21,774 Epoch 283/2000 +2025-03-23 22:27:08,425 Current Learning Rate: 0.0003680635 +2025-03-23 22:27:08,426 Train Loss: 0.0001360, Val Loss: 0.0001387 +2025-03-23 22:27:08,426 Epoch 284/2000 +2025-03-23 22:28:55,153 Current Learning Rate: 0.0003756551 +2025-03-23 22:28:55,154 Train Loss: 0.0001233, Val Loss: 0.0001405 +2025-03-23 22:28:55,154 Epoch 285/2000 +2025-03-23 22:30:41,558 Current Learning Rate: 0.0003832773 +2025-03-23 22:30:41,559 Train Loss: 0.0001229, Val Loss: 0.0001362 +2025-03-23 22:30:41,559 Epoch 286/2000 +2025-03-23 22:32:27,896 Current Learning Rate: 0.0003909284 +2025-03-23 22:32:27,896 Train Loss: 0.0001487, Val Loss: 0.0001464 +2025-03-23 22:32:27,896 Epoch 287/2000 +2025-03-23 22:34:14,816 Current Learning Rate: 0.0003986064 +2025-03-23 22:34:14,816 Train Loss: 0.0001219, Val Loss: 0.0001285 +2025-03-23 22:34:14,817 Epoch 288/2000 +2025-03-23 22:36:01,382 Current Learning Rate: 0.0004063093 +2025-03-23 22:36:01,382 Train Loss: 0.0001392, Val Loss: 0.0001444 +2025-03-23 22:36:01,382 Epoch 289/2000 +2025-03-23 22:37:48,522 Current Learning Rate: 0.0004140354 +2025-03-23 22:37:48,522 Train Loss: 0.0001203, Val Loss: 0.0001366 +2025-03-23 22:37:48,523 Epoch 290/2000 +2025-03-23 22:39:34,873 Current Learning Rate: 0.0004217828 +2025-03-23 22:39:34,874 Train Loss: 0.0001368, Val Loss: 0.0001329 +2025-03-23 22:39:34,874 Epoch 291/2000 +2025-03-23 22:41:21,525 Current Learning Rate: 0.0004295494 +2025-03-23 22:41:21,526 Train Loss: 0.0001503, Val Loss: 0.0001386 +2025-03-23 22:41:21,526 Epoch 292/2000 +2025-03-23 22:43:08,375 Current Learning Rate: 0.0004373334 +2025-03-23 22:43:08,376 Train Loss: 0.0001438, Val Loss: 0.0001334 +2025-03-23 22:43:08,376 Epoch 293/2000 +2025-03-23 22:44:54,997 Current Learning Rate: 0.0004451328 +2025-03-23 22:44:54,998 Train Loss: 0.0001409, Val Loss: 0.0001724 +2025-03-23 22:44:54,998 Epoch 294/2000 +2025-03-23 22:46:41,547 Current Learning Rate: 0.0004529458 +2025-03-23 22:46:41,547 Train Loss: 0.0001259, Val Loss: 0.0001370 +2025-03-23 22:46:41,547 Epoch 295/2000 +2025-03-23 22:48:28,219 Current Learning Rate: 0.0004607705 +2025-03-23 22:48:28,219 Train Loss: 0.0001552, Val Loss: 0.0001398 +2025-03-23 22:48:28,220 Epoch 296/2000 +2025-03-23 22:50:14,995 Current Learning Rate: 0.0004686047 +2025-03-23 22:50:14,996 Train Loss: 0.0001416, Val Loss: 0.0001737 +2025-03-23 22:50:14,996 Epoch 297/2000 +2025-03-23 22:52:01,426 Current Learning Rate: 0.0004764468 +2025-03-23 22:52:01,427 Train Loss: 0.0001581, Val Loss: 0.0001485 +2025-03-23 22:52:01,427 Epoch 298/2000 +2025-03-23 22:53:47,851 Current Learning Rate: 0.0004842946 +2025-03-23 22:53:47,851 Train Loss: 0.0001458, Val Loss: 0.0001535 +2025-03-23 22:53:47,851 Epoch 299/2000 +2025-03-23 22:55:34,177 Current Learning Rate: 0.0004921463 +2025-03-23 22:55:34,177 Train Loss: 0.0001449, Val Loss: 0.0001341 +2025-03-23 22:55:34,177 Epoch 300/2000 +2025-03-23 22:57:20,606 Current Learning Rate: 0.0005000000 +2025-03-23 22:57:20,607 Train Loss: 0.0001462, Val Loss: 0.0001464 +2025-03-23 22:57:20,607 Epoch 301/2000 +2025-03-23 22:59:07,123 Current Learning Rate: 0.0005078537 +2025-03-23 22:59:07,123 Train Loss: 0.0001537, Val Loss: 0.0001433 +2025-03-23 22:59:07,123 Epoch 302/2000 +2025-03-23 23:00:53,633 Current Learning Rate: 0.0005157054 +2025-03-23 23:00:53,633 Train Loss: 0.0001784, Val Loss: 0.0001502 +2025-03-23 23:00:53,633 Epoch 303/2000 +2025-03-23 23:02:39,892 Current Learning Rate: 0.0005235532 +2025-03-23 23:02:39,892 Train Loss: 0.0001510, Val Loss: 0.0001507 +2025-03-23 23:02:39,892 Epoch 304/2000 +2025-03-23 23:04:26,582 Current Learning Rate: 0.0005313953 +2025-03-23 23:04:26,586 Train Loss: 0.0001818, Val Loss: 0.0001509 +2025-03-23 23:04:26,586 Epoch 305/2000 +2025-03-23 23:06:13,639 Current Learning Rate: 0.0005392295 +2025-03-23 23:06:13,640 Train Loss: 0.0001654, Val Loss: 0.0001541 +2025-03-23 23:06:13,640 Epoch 306/2000 +2025-03-23 23:08:00,296 Current Learning Rate: 0.0005470542 +2025-03-23 23:08:00,297 Train Loss: 0.0001306, Val Loss: 0.0001410 +2025-03-23 23:08:00,297 Epoch 307/2000 +2025-03-23 23:09:47,095 Current Learning Rate: 0.0005548672 +2025-03-23 23:09:47,096 Train Loss: 0.0001618, Val Loss: 0.0001458 +2025-03-23 23:09:47,096 Epoch 308/2000 +2025-03-23 23:11:34,166 Current Learning Rate: 0.0005626666 +2025-03-23 23:11:34,167 Train Loss: 0.0001675, Val Loss: 0.0001701 +2025-03-23 23:11:34,167 Epoch 309/2000 +2025-03-23 23:13:20,697 Current Learning Rate: 0.0005704506 +2025-03-23 23:13:20,697 Train Loss: 0.0001571, Val Loss: 0.0001512 +2025-03-23 23:13:20,697 Epoch 310/2000 +2025-03-23 23:15:07,132 Current Learning Rate: 0.0005782172 +2025-03-23 23:15:07,133 Train Loss: 0.0001768, Val Loss: 0.0001660 +2025-03-23 23:15:07,133 Epoch 311/2000 +2025-03-23 23:16:53,900 Current Learning Rate: 0.0005859646 +2025-03-23 23:16:53,901 Train Loss: 0.0001760, Val Loss: 0.0001557 +2025-03-23 23:16:53,901 Epoch 312/2000 +2025-03-23 23:18:40,848 Current Learning Rate: 0.0005936907 +2025-03-23 23:18:40,848 Train Loss: 0.0001369, Val Loss: 0.0001628 +2025-03-23 23:18:40,849 Epoch 313/2000 +2025-03-23 23:20:27,822 Current Learning Rate: 0.0006013936 +2025-03-23 23:20:27,822 Train Loss: 0.0001440, Val Loss: 0.0001430 +2025-03-23 23:20:27,822 Epoch 314/2000 +2025-03-23 23:22:14,564 Current Learning Rate: 0.0006090716 +2025-03-23 23:22:14,565 Train Loss: 0.0001673, Val Loss: 0.0001509 +2025-03-23 23:22:14,565 Epoch 315/2000 +2025-03-23 23:24:01,962 Current Learning Rate: 0.0006167227 +2025-03-23 23:24:01,963 Train Loss: 0.0001909, Val Loss: 0.0001766 +2025-03-23 23:24:01,963 Epoch 316/2000 +2025-03-23 23:25:48,657 Current Learning Rate: 0.0006243449 +2025-03-23 23:25:48,657 Train Loss: 0.0001501, Val Loss: 0.0001583 +2025-03-23 23:25:48,657 Epoch 317/2000 +2025-03-23 23:27:35,197 Current Learning Rate: 0.0006319365 +2025-03-23 23:27:35,197 Train Loss: 0.0001722, Val Loss: 0.0001526 +2025-03-23 23:27:35,198 Epoch 318/2000 +2025-03-23 23:29:21,952 Current Learning Rate: 0.0006394956 +2025-03-23 23:29:21,953 Train Loss: 0.0001460, Val Loss: 0.0001429 +2025-03-23 23:29:21,953 Epoch 319/2000 +2025-03-23 23:31:09,395 Current Learning Rate: 0.0006470202 +2025-03-23 23:31:09,396 Train Loss: 0.0001308, Val Loss: 0.0001731 +2025-03-23 23:31:09,396 Epoch 320/2000 +2025-03-23 23:32:56,495 Current Learning Rate: 0.0006545085 +2025-03-23 23:32:56,495 Train Loss: 0.0001532, Val Loss: 0.0001522 +2025-03-23 23:32:56,496 Epoch 321/2000 +2025-03-23 23:34:43,389 Current Learning Rate: 0.0006619587 +2025-03-23 23:34:43,390 Train Loss: 0.0001881, Val Loss: 0.0002288 +2025-03-23 23:34:43,390 Epoch 322/2000 +2025-03-23 23:36:30,007 Current Learning Rate: 0.0006693690 +2025-03-23 23:36:30,008 Train Loss: 0.0001702, Val Loss: 0.0001724 +2025-03-23 23:36:30,008 Epoch 323/2000 +2025-03-23 23:38:17,029 Current Learning Rate: 0.0006767374 +2025-03-23 23:38:17,030 Train Loss: 0.0001549, Val Loss: 0.0001587 +2025-03-23 23:38:17,030 Epoch 324/2000 +2025-03-23 23:40:04,116 Current Learning Rate: 0.0006840623 +2025-03-23 23:40:04,116 Train Loss: 0.0001890, Val Loss: 0.0001895 +2025-03-23 23:40:04,116 Epoch 325/2000 +2025-03-23 23:41:50,739 Current Learning Rate: 0.0006913417 +2025-03-23 23:41:50,740 Train Loss: 0.0001551, Val Loss: 0.0001506 +2025-03-23 23:41:50,740 Epoch 326/2000 +2025-03-23 23:43:37,349 Current Learning Rate: 0.0006985739 +2025-03-23 23:43:37,349 Train Loss: 0.0001395, Val Loss: 0.0001521 +2025-03-23 23:43:37,349 Epoch 327/2000 +2025-03-23 23:45:24,141 Current Learning Rate: 0.0007057572 +2025-03-23 23:45:24,142 Train Loss: 0.0001776, Val Loss: 0.0001522 +2025-03-23 23:45:24,142 Epoch 328/2000 +2025-03-23 23:47:10,864 Current Learning Rate: 0.0007128896 +2025-03-23 23:47:10,865 Train Loss: 0.0001784, Val Loss: 0.0001615 +2025-03-23 23:47:10,865 Epoch 329/2000 +2025-03-23 23:48:57,653 Current Learning Rate: 0.0007199696 +2025-03-23 23:48:57,653 Train Loss: 0.0001807, Val Loss: 0.0001924 +2025-03-23 23:48:57,653 Epoch 330/2000 +2025-03-23 23:50:43,880 Current Learning Rate: 0.0007269952 +2025-03-23 23:50:43,881 Train Loss: 0.0001651, Val Loss: 0.0001809 +2025-03-23 23:50:43,881 Epoch 331/2000 +2025-03-23 23:52:30,289 Current Learning Rate: 0.0007339649 +2025-03-23 23:52:30,290 Train Loss: 0.0001562, Val Loss: 0.0001646 +2025-03-23 23:52:30,290 Epoch 332/2000 +2025-03-23 23:54:16,880 Current Learning Rate: 0.0007408768 +2025-03-23 23:54:16,880 Train Loss: 0.0001758, Val Loss: 0.0001656 +2025-03-23 23:54:16,880 Epoch 333/2000 +2025-03-23 23:56:03,513 Current Learning Rate: 0.0007477293 +2025-03-23 23:56:03,513 Train Loss: 0.0001635, Val Loss: 0.0001461 +2025-03-23 23:56:03,514 Epoch 334/2000 +2025-03-23 23:57:50,086 Current Learning Rate: 0.0007545207 +2025-03-23 23:57:50,087 Train Loss: 0.0001893, Val Loss: 0.0001703 +2025-03-23 23:57:50,087 Epoch 335/2000 +2025-03-23 23:59:36,513 Current Learning Rate: 0.0007612493 +2025-03-23 23:59:36,513 Train Loss: 0.0002081, Val Loss: 0.0001772 +2025-03-23 23:59:36,513 Epoch 336/2000 +2025-03-24 00:01:23,034 Current Learning Rate: 0.0007679134 +2025-03-24 00:01:23,034 Train Loss: 0.0001675, Val Loss: 0.0001653 +2025-03-24 00:01:23,035 Epoch 337/2000 +2025-03-24 00:03:09,658 Current Learning Rate: 0.0007745114 +2025-03-24 00:03:09,659 Train Loss: 0.0001579, Val Loss: 0.0001599 +2025-03-24 00:03:09,659 Epoch 338/2000 +2025-03-24 00:04:56,027 Current Learning Rate: 0.0007810417 +2025-03-24 00:04:56,028 Train Loss: 0.0001606, Val Loss: 0.0001798 +2025-03-24 00:04:56,028 Epoch 339/2000 +2025-03-24 00:06:42,310 Current Learning Rate: 0.0007875026 +2025-03-24 00:06:42,311 Train Loss: 0.0001476, Val Loss: 0.0001562 +2025-03-24 00:06:42,311 Epoch 340/2000 +2025-03-24 00:08:29,435 Current Learning Rate: 0.0007938926 +2025-03-24 00:08:29,435 Train Loss: 0.0001811, Val Loss: 0.0001817 +2025-03-24 00:08:29,436 Epoch 341/2000 +2025-03-24 00:10:16,172 Current Learning Rate: 0.0008002101 +2025-03-24 00:10:16,173 Train Loss: 0.0001671, Val Loss: 0.0001537 +2025-03-24 00:10:16,173 Epoch 342/2000 +2025-03-24 00:12:02,817 Current Learning Rate: 0.0008064535 +2025-03-24 00:12:02,818 Train Loss: 0.0001648, Val Loss: 0.0001603 +2025-03-24 00:12:02,818 Epoch 343/2000 +2025-03-24 00:13:49,904 Current Learning Rate: 0.0008126213 +2025-03-24 00:13:49,904 Train Loss: 0.0001751, Val Loss: 0.0001787 +2025-03-24 00:13:49,905 Epoch 344/2000 +2025-03-24 00:15:36,748 Current Learning Rate: 0.0008187120 +2025-03-24 00:15:36,748 Train Loss: 0.0001806, Val Loss: 0.0002005 +2025-03-24 00:15:36,748 Epoch 345/2000 +2025-03-24 00:17:23,326 Current Learning Rate: 0.0008247240 +2025-03-24 00:17:23,326 Train Loss: 0.0002110, Val Loss: 0.0002137 +2025-03-24 00:17:23,327 Epoch 346/2000 +2025-03-24 00:19:09,571 Current Learning Rate: 0.0008306559 +2025-03-24 00:19:09,571 Train Loss: 0.0001618, Val Loss: 0.0001685 +2025-03-24 00:19:09,571 Epoch 347/2000 +2025-03-24 00:20:56,130 Current Learning Rate: 0.0008365063 +2025-03-24 00:20:56,130 Train Loss: 0.0001556, Val Loss: 0.0001615 +2025-03-24 00:20:56,130 Epoch 348/2000 +2025-03-24 00:22:43,551 Current Learning Rate: 0.0008422736 +2025-03-24 00:22:43,551 Train Loss: 0.0001739, Val Loss: 0.0001681 +2025-03-24 00:22:43,552 Epoch 349/2000 +2025-03-24 00:24:30,105 Current Learning Rate: 0.0008479564 +2025-03-24 00:24:30,106 Train Loss: 0.0001688, Val Loss: 0.0001599 +2025-03-24 00:24:30,106 Epoch 350/2000 +2025-03-24 00:26:17,096 Current Learning Rate: 0.0008535534 +2025-03-24 00:26:17,096 Train Loss: 0.0001796, Val Loss: 0.0001490 +2025-03-24 00:26:17,096 Epoch 351/2000 +2025-03-24 00:28:03,552 Current Learning Rate: 0.0008590631 +2025-03-24 00:28:03,553 Train Loss: 0.0001767, Val Loss: 0.0001736 +2025-03-24 00:28:03,553 Epoch 352/2000 +2025-03-24 00:29:50,459 Current Learning Rate: 0.0008644843 +2025-03-24 00:29:50,460 Train Loss: 0.0001587, Val Loss: 0.0001590 +2025-03-24 00:29:50,460 Epoch 353/2000 +2025-03-24 00:31:37,794 Current Learning Rate: 0.0008698155 +2025-03-24 00:31:37,794 Train Loss: 0.0002079, Val Loss: 0.0001793 +2025-03-24 00:31:37,795 Epoch 354/2000 +2025-03-24 00:33:24,208 Current Learning Rate: 0.0008750555 +2025-03-24 00:33:24,209 Train Loss: 0.0001708, Val Loss: 0.0001718 +2025-03-24 00:33:24,209 Epoch 355/2000 +2025-03-24 00:35:10,381 Current Learning Rate: 0.0008802030 +2025-03-24 00:35:10,382 Train Loss: 0.0001758, Val Loss: 0.0001504 +2025-03-24 00:35:10,382 Epoch 356/2000 +2025-03-24 00:36:56,854 Current Learning Rate: 0.0008852566 +2025-03-24 00:36:56,855 Train Loss: 0.0001580, Val Loss: 0.0001488 +2025-03-24 00:36:56,855 Epoch 357/2000 +2025-03-24 00:38:44,113 Current Learning Rate: 0.0008902152 +2025-03-24 00:38:44,113 Train Loss: 0.0001656, Val Loss: 0.0001674 +2025-03-24 00:38:44,113 Epoch 358/2000 +2025-03-24 00:40:31,044 Current Learning Rate: 0.0008950775 +2025-03-24 00:40:31,045 Train Loss: 0.0002041, Val Loss: 0.0002422 +2025-03-24 00:40:31,045 Epoch 359/2000 +2025-03-24 00:42:17,989 Current Learning Rate: 0.0008998423 +2025-03-24 00:42:17,990 Train Loss: 0.0002174, Val Loss: 0.0001877 +2025-03-24 00:42:17,990 Epoch 360/2000 +2025-03-24 00:44:04,929 Current Learning Rate: 0.0009045085 +2025-03-24 00:44:04,930 Train Loss: 0.0001771, Val Loss: 0.0001539 +2025-03-24 00:44:04,930 Epoch 361/2000 +2025-03-24 00:45:51,215 Current Learning Rate: 0.0009090749 +2025-03-24 00:45:51,216 Train Loss: 0.0001748, Val Loss: 0.0001459 +2025-03-24 00:45:51,216 Epoch 362/2000 +2025-03-24 00:47:37,533 Current Learning Rate: 0.0009135403 +2025-03-24 00:47:37,534 Train Loss: 0.0002154, Val Loss: 0.0001821 +2025-03-24 00:47:37,534 Epoch 363/2000 +2025-03-24 00:49:23,994 Current Learning Rate: 0.0009179037 +2025-03-24 00:49:23,994 Train Loss: 0.0001687, Val Loss: 0.0001485 +2025-03-24 00:49:23,994 Epoch 364/2000 +2025-03-24 00:51:10,371 Current Learning Rate: 0.0009221640 +2025-03-24 00:51:10,372 Train Loss: 0.0001814, Val Loss: 0.0001945 +2025-03-24 00:51:10,372 Epoch 365/2000 +2025-03-24 00:52:56,578 Current Learning Rate: 0.0009263201 +2025-03-24 00:52:56,578 Train Loss: 0.0001699, Val Loss: 0.0001533 +2025-03-24 00:52:56,578 Epoch 366/2000 +2025-03-24 00:54:43,014 Current Learning Rate: 0.0009303710 +2025-03-24 00:54:43,014 Train Loss: 0.0001586, Val Loss: 0.0001508 +2025-03-24 00:54:43,014 Epoch 367/2000 +2025-03-24 00:56:30,010 Current Learning Rate: 0.0009343158 +2025-03-24 00:56:30,010 Train Loss: 0.0001643, Val Loss: 0.0001781 +2025-03-24 00:56:30,011 Epoch 368/2000 +2025-03-24 00:58:16,547 Current Learning Rate: 0.0009381533 +2025-03-24 00:58:16,547 Train Loss: 0.0001670, Val Loss: 0.0001796 +2025-03-24 00:58:16,548 Epoch 369/2000 +2025-03-24 01:00:02,878 Current Learning Rate: 0.0009418828 +2025-03-24 01:00:02,879 Train Loss: 0.0001737, Val Loss: 0.0001583 +2025-03-24 01:00:02,879 Epoch 370/2000 +2025-03-24 01:01:49,275 Current Learning Rate: 0.0009455033 +2025-03-24 01:01:49,275 Train Loss: 0.0001760, Val Loss: 0.0001679 +2025-03-24 01:01:49,276 Epoch 371/2000 +2025-03-24 01:03:35,735 Current Learning Rate: 0.0009490138 +2025-03-24 01:03:35,735 Train Loss: 0.0001676, Val Loss: 0.0001619 +2025-03-24 01:03:35,735 Epoch 372/2000 +2025-03-24 01:05:22,694 Current Learning Rate: 0.0009524135 +2025-03-24 01:05:22,695 Train Loss: 0.0001564, Val Loss: 0.0001966 +2025-03-24 01:05:22,695 Epoch 373/2000 +2025-03-24 01:07:08,938 Current Learning Rate: 0.0009557016 +2025-03-24 01:07:08,939 Train Loss: 0.0001431, Val Loss: 0.0001535 +2025-03-24 01:07:08,939 Epoch 374/2000 +2025-03-24 01:08:55,160 Current Learning Rate: 0.0009588773 +2025-03-24 01:08:55,161 Train Loss: 0.0001572, Val Loss: 0.0002023 +2025-03-24 01:08:55,161 Epoch 375/2000 +2025-03-24 01:10:41,844 Current Learning Rate: 0.0009619398 +2025-03-24 01:10:41,845 Train Loss: 0.0001707, Val Loss: 0.0001565 +2025-03-24 01:10:41,845 Epoch 376/2000 +2025-03-24 01:12:28,655 Current Learning Rate: 0.0009648882 +2025-03-24 01:12:28,655 Train Loss: 0.0001458, Val Loss: 0.0001440 +2025-03-24 01:12:28,655 Epoch 377/2000 +2025-03-24 01:14:15,251 Current Learning Rate: 0.0009677220 +2025-03-24 01:14:15,252 Train Loss: 0.0001604, Val Loss: 0.0001535 +2025-03-24 01:14:15,252 Epoch 378/2000 +2025-03-24 01:16:01,918 Current Learning Rate: 0.0009704404 +2025-03-24 01:16:01,919 Train Loss: 0.0001705, Val Loss: 0.0001559 +2025-03-24 01:16:01,919 Epoch 379/2000 +2025-03-24 01:17:48,679 Current Learning Rate: 0.0009730427 +2025-03-24 01:17:48,680 Train Loss: 0.0001672, Val Loss: 0.0001638 +2025-03-24 01:17:48,680 Epoch 380/2000 +2025-03-24 01:19:35,210 Current Learning Rate: 0.0009755283 +2025-03-24 01:19:35,211 Train Loss: 0.0001691, Val Loss: 0.0001708 +2025-03-24 01:19:35,211 Epoch 381/2000 +2025-03-24 01:21:22,110 Current Learning Rate: 0.0009778965 +2025-03-24 01:21:22,110 Train Loss: 0.0001676, Val Loss: 0.0001614 +2025-03-24 01:21:22,110 Epoch 382/2000 +2025-03-24 01:23:09,043 Current Learning Rate: 0.0009801468 +2025-03-24 01:23:09,043 Train Loss: 0.0001729, Val Loss: 0.0001733 +2025-03-24 01:23:09,043 Epoch 383/2000 +2025-03-24 01:24:56,089 Current Learning Rate: 0.0009822787 +2025-03-24 01:24:56,090 Train Loss: 0.0001551, Val Loss: 0.0002965 +2025-03-24 01:24:56,090 Epoch 384/2000 +2025-03-24 01:26:43,051 Current Learning Rate: 0.0009842916 +2025-03-24 01:26:43,052 Train Loss: 0.0002391, Val Loss: 0.0001695 +2025-03-24 01:26:43,052 Epoch 385/2000 +2025-03-24 01:28:29,590 Current Learning Rate: 0.0009861850 +2025-03-24 01:28:29,591 Train Loss: 0.0001628, Val Loss: 0.0001597 +2025-03-24 01:28:29,591 Epoch 386/2000 +2025-03-24 01:30:15,739 Current Learning Rate: 0.0009879584 +2025-03-24 01:30:15,740 Train Loss: 0.0001655, Val Loss: 0.0001486 +2025-03-24 01:30:15,740 Epoch 387/2000 +2025-03-24 01:32:02,315 Current Learning Rate: 0.0009896114 +2025-03-24 01:32:02,316 Train Loss: 0.0001392, Val Loss: 0.0001379 +2025-03-24 01:32:02,316 Epoch 388/2000 +2025-03-24 01:33:49,171 Current Learning Rate: 0.0009911436 +2025-03-24 01:33:49,171 Train Loss: 0.0001587, Val Loss: 0.0001819 +2025-03-24 01:33:49,171 Epoch 389/2000 +2025-03-24 01:35:35,755 Current Learning Rate: 0.0009925547 +2025-03-24 01:35:35,755 Train Loss: 0.0001607, Val Loss: 0.0002392 +2025-03-24 01:35:35,755 Epoch 390/2000 +2025-03-24 01:37:21,774 Current Learning Rate: 0.0009938442 +2025-03-24 01:37:21,774 Train Loss: 0.0001953, Val Loss: 0.0001639 +2025-03-24 01:37:21,775 Epoch 391/2000 +2025-03-24 01:39:08,626 Current Learning Rate: 0.0009950118 +2025-03-24 01:39:08,626 Train Loss: 0.0001405, Val Loss: 0.0001414 +2025-03-24 01:39:08,626 Epoch 392/2000 +2025-03-24 01:40:55,593 Current Learning Rate: 0.0009960574 +2025-03-24 01:40:55,593 Train Loss: 0.0001421, Val Loss: 0.0001593 +2025-03-24 01:40:55,594 Epoch 393/2000 +2025-03-24 01:42:42,172 Current Learning Rate: 0.0009969805 +2025-03-24 01:42:42,172 Train Loss: 0.0001672, Val Loss: 0.0001830 +2025-03-24 01:42:42,172 Epoch 394/2000 +2025-03-24 01:44:28,563 Current Learning Rate: 0.0009977810 +2025-03-24 01:44:28,564 Train Loss: 0.0001932, Val Loss: 0.0001796 +2025-03-24 01:44:28,564 Epoch 395/2000 +2025-03-24 01:46:15,376 Current Learning Rate: 0.0009984587 +2025-03-24 01:46:15,377 Train Loss: 0.0001452, Val Loss: 0.0001681 +2025-03-24 01:46:15,377 Epoch 396/2000 +2025-03-24 01:48:01,813 Current Learning Rate: 0.0009990134 +2025-03-24 01:48:01,814 Train Loss: 0.0001970, Val Loss: 0.0003645 +2025-03-24 01:48:01,814 Epoch 397/2000 +2025-03-24 01:49:47,871 Current Learning Rate: 0.0009994449 +2025-03-24 01:49:47,871 Train Loss: 0.0002073, Val Loss: 0.0001623 +2025-03-24 01:49:47,871 Epoch 398/2000 +2025-03-24 01:51:34,118 Current Learning Rate: 0.0009997533 +2025-03-24 01:51:34,119 Train Loss: 0.0001561, Val Loss: 0.0001370 +2025-03-24 01:51:34,119 Epoch 399/2000 +2025-03-24 01:53:20,337 Current Learning Rate: 0.0009999383 +2025-03-24 01:53:20,337 Train Loss: 0.0001357, Val Loss: 0.0001382 +2025-03-24 01:53:20,337 Epoch 400/2000 +2025-03-24 01:55:06,898 Current Learning Rate: 0.0010000000 +2025-03-24 01:55:06,898 Train Loss: 0.0001402, Val Loss: 0.0001596 +2025-03-24 01:55:06,898 Epoch 401/2000 +2025-03-24 01:56:54,434 Current Learning Rate: 0.0009999383 +2025-03-24 01:56:54,434 Train Loss: 0.0001473, Val Loss: 0.0001627 +2025-03-24 01:56:54,434 Epoch 402/2000 +2025-03-24 01:58:41,237 Current Learning Rate: 0.0009997533 +2025-03-24 01:58:41,238 Train Loss: 0.0001609, Val Loss: 0.0001732 +2025-03-24 01:58:41,238 Epoch 403/2000 +2025-03-24 02:00:28,109 Current Learning Rate: 0.0009994449 +2025-03-24 02:00:28,110 Train Loss: 0.0001554, Val Loss: 0.0001600 +2025-03-24 02:00:28,110 Epoch 404/2000 +2025-03-24 02:02:15,071 Current Learning Rate: 0.0009990134 +2025-03-24 02:02:15,072 Train Loss: 0.0001633, Val Loss: 0.0001766 +2025-03-24 02:02:15,072 Epoch 405/2000 +2025-03-24 02:04:01,642 Current Learning Rate: 0.0009984587 +2025-03-24 02:04:01,642 Train Loss: 0.0001355, Val Loss: 0.0001406 +2025-03-24 02:04:01,642 Epoch 406/2000 +2025-03-24 02:05:48,083 Current Learning Rate: 0.0009977810 +2025-03-24 02:05:48,083 Train Loss: 0.0001425, Val Loss: 0.0001453 +2025-03-24 02:05:48,084 Epoch 407/2000 +2025-03-24 02:07:34,914 Current Learning Rate: 0.0009969805 +2025-03-24 02:07:34,915 Train Loss: 0.0001681, Val Loss: 0.0001551 +2025-03-24 02:07:34,915 Epoch 408/2000 +2025-03-24 02:09:21,373 Current Learning Rate: 0.0009960574 +2025-03-24 02:09:21,373 Train Loss: 0.0001438, Val Loss: 0.0001403 +2025-03-24 02:09:21,374 Epoch 409/2000 +2025-03-24 02:11:08,355 Current Learning Rate: 0.0009950118 +2025-03-24 02:11:08,355 Train Loss: 0.0001427, Val Loss: 0.0001368 +2025-03-24 02:11:08,355 Epoch 410/2000 +2025-03-24 02:12:54,410 Current Learning Rate: 0.0009938442 +2025-03-24 02:12:54,411 Train Loss: 0.0001397, Val Loss: 0.0001382 +2025-03-24 02:12:54,411 Epoch 411/2000 +2025-03-24 02:14:41,476 Current Learning Rate: 0.0009925547 +2025-03-24 02:14:41,476 Train Loss: 0.0001386, Val Loss: 0.0001426 +2025-03-24 02:14:41,476 Epoch 412/2000 +2025-03-24 02:16:28,260 Current Learning Rate: 0.0009911436 +2025-03-24 02:16:28,260 Train Loss: 0.0001490, Val Loss: 0.0001451 +2025-03-24 02:16:28,261 Epoch 413/2000 +2025-03-24 02:18:15,470 Current Learning Rate: 0.0009896114 +2025-03-24 02:18:15,471 Train Loss: 0.0001466, Val Loss: 0.0001438 +2025-03-24 02:18:15,472 Epoch 414/2000 +2025-03-24 02:20:02,278 Current Learning Rate: 0.0009879584 +2025-03-24 02:20:02,278 Train Loss: 0.0001712, Val Loss: 0.0001831 +2025-03-24 02:20:02,278 Epoch 415/2000 +2025-03-24 02:21:49,342 Current Learning Rate: 0.0009861850 +2025-03-24 02:21:49,343 Train Loss: 0.0001605, Val Loss: 0.0001482 +2025-03-24 02:21:49,343 Epoch 416/2000 +2025-03-24 02:23:36,701 Current Learning Rate: 0.0009842916 +2025-03-24 02:23:36,702 Train Loss: 0.0001472, Val Loss: 0.0001336 +2025-03-24 02:23:36,702 Epoch 417/2000 +2025-03-24 02:25:23,550 Current Learning Rate: 0.0009822787 +2025-03-24 02:25:23,551 Train Loss: 0.0001681, Val Loss: 0.0001726 +2025-03-24 02:25:23,551 Epoch 418/2000 +2025-03-24 02:27:10,613 Current Learning Rate: 0.0009801468 +2025-03-24 02:27:10,613 Train Loss: 0.0001785, Val Loss: 0.0001376 +2025-03-24 02:27:10,613 Epoch 419/2000 +2025-03-24 02:28:57,355 Current Learning Rate: 0.0009778965 +2025-03-24 02:28:57,355 Train Loss: 0.0001242, Val Loss: 0.0001268 +2025-03-24 02:28:57,355 Epoch 420/2000 +2025-03-24 02:30:44,331 Current Learning Rate: 0.0009755283 +2025-03-24 02:30:44,332 Train Loss: 0.0001309, Val Loss: 0.0001281 +2025-03-24 02:30:44,332 Epoch 421/2000 +2025-03-24 02:32:31,452 Current Learning Rate: 0.0009730427 +2025-03-24 02:32:31,453 Train Loss: 0.0001273, Val Loss: 0.0001401 +2025-03-24 02:32:31,453 Epoch 422/2000 +2025-03-24 02:34:17,850 Current Learning Rate: 0.0009704404 +2025-03-24 02:34:17,850 Train Loss: 0.0001339, Val Loss: 0.0001362 +2025-03-24 02:34:17,851 Epoch 423/2000 +2025-03-24 02:36:04,322 Current Learning Rate: 0.0009677220 +2025-03-24 02:36:04,322 Train Loss: 0.0001462, Val Loss: 0.0001498 +2025-03-24 02:36:04,322 Epoch 424/2000 +2025-03-24 02:37:51,074 Current Learning Rate: 0.0009648882 +2025-03-24 02:37:51,074 Train Loss: 0.0001218, Val Loss: 0.0001241 +2025-03-24 02:37:51,075 Epoch 425/2000 +2025-03-24 02:39:37,307 Current Learning Rate: 0.0009619398 +2025-03-24 02:39:37,308 Train Loss: 0.0001469, Val Loss: 0.0001334 +2025-03-24 02:39:37,308 Epoch 426/2000 +2025-03-24 02:41:23,725 Current Learning Rate: 0.0009588773 +2025-03-24 02:41:23,725 Train Loss: 0.0001280, Val Loss: 0.0001236 +2025-03-24 02:41:23,725 Epoch 427/2000 +2025-03-24 02:43:10,662 Current Learning Rate: 0.0009557016 +2025-03-24 02:43:10,662 Train Loss: 0.0001871, Val Loss: 0.0001282 +2025-03-24 02:43:10,662 Epoch 428/2000 +2025-03-24 02:44:57,275 Current Learning Rate: 0.0009524135 +2025-03-24 02:44:57,275 Train Loss: 0.0001329, Val Loss: 0.0001241 +2025-03-24 02:44:57,275 Epoch 429/2000 +2025-03-24 02:46:44,059 Current Learning Rate: 0.0009490138 +2025-03-24 02:46:44,881 Train Loss: 0.0001478, Val Loss: 0.0001208 +2025-03-24 02:46:44,881 Epoch 430/2000 +2025-03-24 02:48:30,933 Current Learning Rate: 0.0009455033 +2025-03-24 02:48:30,934 Train Loss: 0.0001240, Val Loss: 0.0001400 +2025-03-24 02:48:30,934 Epoch 431/2000 +2025-03-24 02:50:17,536 Current Learning Rate: 0.0009418828 +2025-03-24 02:50:17,536 Train Loss: 0.0001391, Val Loss: 0.0001728 +2025-03-24 02:50:17,537 Epoch 432/2000 +2025-03-24 02:52:04,752 Current Learning Rate: 0.0009381533 +2025-03-24 02:52:04,752 Train Loss: 0.0001277, Val Loss: 0.0001259 +2025-03-24 02:52:04,753 Epoch 433/2000 +2025-03-24 02:53:51,542 Current Learning Rate: 0.0009343158 +2025-03-24 02:53:51,543 Train Loss: 0.0001359, Val Loss: 0.0001603 +2025-03-24 02:53:51,543 Epoch 434/2000 +2025-03-24 02:55:38,145 Current Learning Rate: 0.0009303710 +2025-03-24 02:55:38,145 Train Loss: 0.0001373, Val Loss: 0.0001441 +2025-03-24 02:55:38,146 Epoch 435/2000 +2025-03-24 02:57:25,089 Current Learning Rate: 0.0009263201 +2025-03-24 02:57:25,089 Train Loss: 0.0001411, Val Loss: 0.0001523 +2025-03-24 02:57:25,090 Epoch 436/2000 +2025-03-24 02:59:12,233 Current Learning Rate: 0.0009221640 +2025-03-24 02:59:12,233 Train Loss: 0.0001524, Val Loss: 0.0002032 +2025-03-24 02:59:12,234 Epoch 437/2000 +2025-03-24 03:00:59,020 Current Learning Rate: 0.0009179037 +2025-03-24 03:00:59,021 Train Loss: 0.0001303, Val Loss: 0.0001283 +2025-03-24 03:00:59,021 Epoch 438/2000 +2025-03-24 03:02:45,914 Current Learning Rate: 0.0009135403 +2025-03-24 03:02:45,914 Train Loss: 0.0001167, Val Loss: 0.0001325 +2025-03-24 03:02:45,914 Epoch 439/2000 +2025-03-24 03:04:32,872 Current Learning Rate: 0.0009090749 +2025-03-24 03:04:32,873 Train Loss: 0.0001246, Val Loss: 0.0001317 +2025-03-24 03:04:32,873 Epoch 440/2000 +2025-03-24 03:06:19,328 Current Learning Rate: 0.0009045085 +2025-03-24 03:06:19,329 Train Loss: 0.0001784, Val Loss: 0.0001331 +2025-03-24 03:06:19,329 Epoch 441/2000 +2025-03-24 03:08:05,792 Current Learning Rate: 0.0008998423 +2025-03-24 03:08:05,792 Train Loss: 0.0001466, Val Loss: 0.0001256 +2025-03-24 03:08:05,792 Epoch 442/2000 +2025-03-24 03:09:51,736 Current Learning Rate: 0.0008950775 +2025-03-24 03:09:52,644 Train Loss: 0.0001253, Val Loss: 0.0001172 +2025-03-24 03:09:52,644 Epoch 443/2000 +2025-03-24 03:11:38,888 Current Learning Rate: 0.0008902152 +2025-03-24 03:11:39,761 Train Loss: 0.0001108, Val Loss: 0.0001159 +2025-03-24 03:11:39,762 Epoch 444/2000 +2025-03-24 03:13:25,970 Current Learning Rate: 0.0008852566 +2025-03-24 03:13:25,971 Train Loss: 0.0001317, Val Loss: 0.0001261 +2025-03-24 03:13:25,971 Epoch 445/2000 +2025-03-24 03:15:12,701 Current Learning Rate: 0.0008802030 +2025-03-24 03:15:12,702 Train Loss: 0.0001293, Val Loss: 0.0001239 +2025-03-24 03:15:12,702 Epoch 446/2000 +2025-03-24 03:16:58,663 Current Learning Rate: 0.0008750555 +2025-03-24 03:16:58,663 Train Loss: 0.0001388, Val Loss: 0.0001283 +2025-03-24 03:16:58,663 Epoch 447/2000 +2025-03-24 03:18:45,208 Current Learning Rate: 0.0008698155 +2025-03-24 03:18:45,208 Train Loss: 0.0001344, Val Loss: 0.0001252 +2025-03-24 03:18:45,208 Epoch 448/2000 +2025-03-24 03:20:32,268 Current Learning Rate: 0.0008644843 +2025-03-24 03:20:32,268 Train Loss: 0.0001389, Val Loss: 0.0001414 +2025-03-24 03:20:32,269 Epoch 449/2000 +2025-03-24 03:22:18,950 Current Learning Rate: 0.0008590631 +2025-03-24 03:22:18,950 Train Loss: 0.0001279, Val Loss: 0.0001327 +2025-03-24 03:22:18,951 Epoch 450/2000 +2025-03-24 03:24:05,570 Current Learning Rate: 0.0008535534 +2025-03-24 03:24:05,570 Train Loss: 0.0001416, Val Loss: 0.0001481 +2025-03-24 03:24:05,571 Epoch 451/2000 +2025-03-24 03:25:52,271 Current Learning Rate: 0.0008479564 +2025-03-24 03:25:52,272 Train Loss: 0.0001238, Val Loss: 0.0001183 +2025-03-24 03:25:52,272 Epoch 452/2000 +2025-03-24 03:27:38,785 Current Learning Rate: 0.0008422736 +2025-03-24 03:27:38,786 Train Loss: 0.0001064, Val Loss: 0.0001169 +2025-03-24 03:27:38,786 Epoch 453/2000 +2025-03-24 03:29:25,421 Current Learning Rate: 0.0008365063 +2025-03-24 03:29:26,215 Train Loss: 0.0001201, Val Loss: 0.0001094 +2025-03-24 03:29:26,215 Epoch 454/2000 +2025-03-24 03:31:12,006 Current Learning Rate: 0.0008306559 +2025-03-24 03:31:12,007 Train Loss: 0.0001155, Val Loss: 0.0001303 +2025-03-24 03:31:12,008 Epoch 455/2000 +2025-03-24 03:32:58,905 Current Learning Rate: 0.0008247240 +2025-03-24 03:32:58,905 Train Loss: 0.0001450, Val Loss: 0.0001806 +2025-03-24 03:32:58,906 Epoch 456/2000 +2025-03-24 03:34:46,003 Current Learning Rate: 0.0008187120 +2025-03-24 03:34:46,003 Train Loss: 0.0001277, Val Loss: 0.0001171 +2025-03-24 03:34:46,003 Epoch 457/2000 +2025-03-24 03:36:32,958 Current Learning Rate: 0.0008126213 +2025-03-24 03:36:32,958 Train Loss: 0.0001113, Val Loss: 0.0001114 +2025-03-24 03:36:32,958 Epoch 458/2000 +2025-03-24 03:38:18,896 Current Learning Rate: 0.0008064535 +2025-03-24 03:38:18,897 Train Loss: 0.0001024, Val Loss: 0.0001106 +2025-03-24 03:38:18,897 Epoch 459/2000 +2025-03-24 03:40:05,514 Current Learning Rate: 0.0008002101 +2025-03-24 03:40:05,514 Train Loss: 0.0001083, Val Loss: 0.0001158 +2025-03-24 03:40:05,514 Epoch 460/2000 +2025-03-24 03:41:52,494 Current Learning Rate: 0.0007938926 +2025-03-24 03:41:52,495 Train Loss: 0.0001108, Val Loss: 0.0001288 +2025-03-24 03:41:52,495 Epoch 461/2000 +2025-03-24 03:43:39,788 Current Learning Rate: 0.0007875026 +2025-03-24 03:43:39,789 Train Loss: 0.0001223, Val Loss: 0.0001172 +2025-03-24 03:43:39,789 Epoch 462/2000 +2025-03-24 03:45:25,754 Current Learning Rate: 0.0007810417 +2025-03-24 03:45:26,570 Train Loss: 0.0000973, Val Loss: 0.0001090 +2025-03-24 03:45:26,570 Epoch 463/2000 +2025-03-24 03:47:12,560 Current Learning Rate: 0.0007745114 +2025-03-24 03:47:12,561 Train Loss: 0.0001083, Val Loss: 0.0001136 +2025-03-24 03:47:12,561 Epoch 464/2000 +2025-03-24 03:48:59,340 Current Learning Rate: 0.0007679134 +2025-03-24 03:48:59,341 Train Loss: 0.0001100, Val Loss: 0.0001159 +2025-03-24 03:48:59,341 Epoch 465/2000 +2025-03-24 03:50:45,947 Current Learning Rate: 0.0007612493 +2025-03-24 03:50:46,804 Train Loss: 0.0001215, Val Loss: 0.0001079 +2025-03-24 03:50:46,805 Epoch 466/2000 +2025-03-24 03:52:32,438 Current Learning Rate: 0.0007545207 +2025-03-24 03:52:33,292 Train Loss: 0.0001005, Val Loss: 0.0001072 +2025-03-24 03:52:33,292 Epoch 467/2000 +2025-03-24 03:54:19,368 Current Learning Rate: 0.0007477293 +2025-03-24 03:54:19,369 Train Loss: 0.0001410, Val Loss: 0.0001130 +2025-03-24 03:54:19,369 Epoch 468/2000 +2025-03-24 03:56:06,113 Current Learning Rate: 0.0007408768 +2025-03-24 03:56:06,114 Train Loss: 0.0001153, Val Loss: 0.0001188 +2025-03-24 03:56:06,114 Epoch 469/2000 +2025-03-24 03:57:52,629 Current Learning Rate: 0.0007339649 +2025-03-24 03:57:53,527 Train Loss: 0.0000931, Val Loss: 0.0001054 +2025-03-24 03:57:53,527 Epoch 470/2000 +2025-03-24 03:59:39,635 Current Learning Rate: 0.0007269952 +2025-03-24 03:59:39,636 Train Loss: 0.0001234, Val Loss: 0.0001369 +2025-03-24 03:59:39,636 Epoch 471/2000 +2025-03-24 04:01:26,351 Current Learning Rate: 0.0007199696 +2025-03-24 04:01:26,351 Train Loss: 0.0001231, Val Loss: 0.0001141 +2025-03-24 04:01:26,352 Epoch 472/2000 +2025-03-24 04:03:12,409 Current Learning Rate: 0.0007128896 +2025-03-24 04:03:12,410 Train Loss: 0.0001076, Val Loss: 0.0001184 +2025-03-24 04:03:12,410 Epoch 473/2000 +2025-03-24 04:04:59,197 Current Learning Rate: 0.0007057572 +2025-03-24 04:04:59,197 Train Loss: 0.0000919, Val Loss: 0.0001062 +2025-03-24 04:04:59,197 Epoch 474/2000 +2025-03-24 04:06:46,390 Current Learning Rate: 0.0006985739 +2025-03-24 04:06:46,391 Train Loss: 0.0000941, Val Loss: 0.0001058 +2025-03-24 04:06:46,391 Epoch 475/2000 +2025-03-24 04:08:32,697 Current Learning Rate: 0.0006913417 +2025-03-24 04:08:32,698 Train Loss: 0.0001019, Val Loss: 0.0001069 +2025-03-24 04:08:32,698 Epoch 476/2000 +2025-03-24 04:10:19,439 Current Learning Rate: 0.0006840623 +2025-03-24 04:10:19,440 Train Loss: 0.0001110, Val Loss: 0.0001060 +2025-03-24 04:10:19,440 Epoch 477/2000 +2025-03-24 04:12:05,810 Current Learning Rate: 0.0006767374 +2025-03-24 04:12:06,745 Train Loss: 0.0001041, Val Loss: 0.0001022 +2025-03-24 04:12:06,745 Epoch 478/2000 +2025-03-24 04:13:53,019 Current Learning Rate: 0.0006693690 +2025-03-24 04:13:53,867 Train Loss: 0.0000999, Val Loss: 0.0000995 +2025-03-24 04:13:53,867 Epoch 479/2000 +2025-03-24 04:15:40,356 Current Learning Rate: 0.0006619587 +2025-03-24 04:15:40,357 Train Loss: 0.0001147, Val Loss: 0.0001105 +2025-03-24 04:15:40,357 Epoch 480/2000 +2025-03-24 04:17:27,635 Current Learning Rate: 0.0006545085 +2025-03-24 04:17:27,635 Train Loss: 0.0001138, Val Loss: 0.0001124 +2025-03-24 04:17:27,636 Epoch 481/2000 +2025-03-24 04:19:14,476 Current Learning Rate: 0.0006470202 +2025-03-24 04:19:14,476 Train Loss: 0.0001092, Val Loss: 0.0001164 +2025-03-24 04:19:14,476 Epoch 482/2000 +2025-03-24 04:21:01,236 Current Learning Rate: 0.0006394956 +2025-03-24 04:21:01,237 Train Loss: 0.0001062, Val Loss: 0.0001102 +2025-03-24 04:21:01,237 Epoch 483/2000 +2025-03-24 04:22:47,992 Current Learning Rate: 0.0006319365 +2025-03-24 04:22:47,993 Train Loss: 0.0000881, Val Loss: 0.0001064 +2025-03-24 04:22:47,993 Epoch 484/2000 +2025-03-24 04:24:34,878 Current Learning Rate: 0.0006243449 +2025-03-24 04:24:34,878 Train Loss: 0.0000932, Val Loss: 0.0001025 +2025-03-24 04:24:34,878 Epoch 485/2000 +2025-03-24 04:26:21,953 Current Learning Rate: 0.0006167227 +2025-03-24 04:26:21,953 Train Loss: 0.0001095, Val Loss: 0.0001091 +2025-03-24 04:26:21,954 Epoch 486/2000 +2025-03-24 04:28:08,708 Current Learning Rate: 0.0006090716 +2025-03-24 04:28:08,709 Train Loss: 0.0001058, Val Loss: 0.0001116 +2025-03-24 04:28:08,709 Epoch 487/2000 +2025-03-24 04:29:55,660 Current Learning Rate: 0.0006013936 +2025-03-24 04:29:55,660 Train Loss: 0.0001014, Val Loss: 0.0001104 +2025-03-24 04:29:55,660 Epoch 488/2000 +2025-03-24 04:31:42,905 Current Learning Rate: 0.0005936907 +2025-03-24 04:31:42,906 Train Loss: 0.0001075, Val Loss: 0.0001158 +2025-03-24 04:31:42,906 Epoch 489/2000 +2025-03-24 04:33:29,896 Current Learning Rate: 0.0005859646 +2025-03-24 04:33:29,896 Train Loss: 0.0001094, Val Loss: 0.0001167 +2025-03-24 04:33:29,897 Epoch 490/2000 +2025-03-24 04:35:16,611 Current Learning Rate: 0.0005782172 +2025-03-24 04:35:16,612 Train Loss: 0.0001043, Val Loss: 0.0001022 +2025-03-24 04:35:16,613 Epoch 491/2000 +2025-03-24 04:37:03,156 Current Learning Rate: 0.0005704506 +2025-03-24 04:37:04,097 Train Loss: 0.0001051, Val Loss: 0.0000992 +2025-03-24 04:37:04,098 Epoch 492/2000 +2025-03-24 04:38:49,865 Current Learning Rate: 0.0005626666 +2025-03-24 04:38:49,866 Train Loss: 0.0001138, Val Loss: 0.0001020 +2025-03-24 04:38:49,867 Epoch 493/2000 +2025-03-24 04:40:36,139 Current Learning Rate: 0.0005548672 +2025-03-24 04:40:37,012 Train Loss: 0.0001005, Val Loss: 0.0000965 +2025-03-24 04:40:37,012 Epoch 494/2000 +2025-03-24 04:42:23,573 Current Learning Rate: 0.0005470542 +2025-03-24 04:42:23,574 Train Loss: 0.0000869, Val Loss: 0.0000967 +2025-03-24 04:42:23,574 Epoch 495/2000 +2025-03-24 04:44:09,950 Current Learning Rate: 0.0005392295 +2025-03-24 04:44:09,951 Train Loss: 0.0001014, Val Loss: 0.0001138 +2025-03-24 04:44:09,951 Epoch 496/2000 +2025-03-24 04:45:56,558 Current Learning Rate: 0.0005313953 +2025-03-24 04:45:56,559 Train Loss: 0.0000887, Val Loss: 0.0000990 +2025-03-24 04:45:56,559 Epoch 497/2000 +2025-03-24 04:47:43,310 Current Learning Rate: 0.0005235532 +2025-03-24 04:47:44,214 Train Loss: 0.0000880, Val Loss: 0.0000935 +2025-03-24 04:47:44,214 Epoch 498/2000 +2025-03-24 04:49:30,480 Current Learning Rate: 0.0005157054 +2025-03-24 04:49:30,480 Train Loss: 0.0001148, Val Loss: 0.0000996 +2025-03-24 04:49:30,481 Epoch 499/2000 +2025-03-24 04:51:17,255 Current Learning Rate: 0.0005078537 +2025-03-24 04:51:17,255 Train Loss: 0.0000966, Val Loss: 0.0000975 +2025-03-24 04:51:17,256 Epoch 500/2000 +2025-03-24 04:53:03,538 Current Learning Rate: 0.0005000000 +2025-03-24 04:53:03,538 Train Loss: 0.0000940, Val Loss: 0.0001020 +2025-03-24 04:53:03,539 Epoch 501/2000 +2025-03-24 04:54:50,364 Current Learning Rate: 0.0004921463 +2025-03-24 04:54:50,365 Train Loss: 0.0001055, Val Loss: 0.0000985 +2025-03-24 04:54:50,365 Epoch 502/2000 +2025-03-24 04:56:36,785 Current Learning Rate: 0.0004842946 +2025-03-24 04:56:36,785 Train Loss: 0.0000979, Val Loss: 0.0000976 +2025-03-24 04:56:36,785 Epoch 503/2000 +2025-03-24 04:58:23,128 Current Learning Rate: 0.0004764468 +2025-03-24 04:58:23,128 Train Loss: 0.0000874, Val Loss: 0.0000939 +2025-03-24 04:58:23,128 Epoch 504/2000 +2025-03-24 05:00:09,937 Current Learning Rate: 0.0004686047 +2025-03-24 05:00:10,801 Train Loss: 0.0000903, Val Loss: 0.0000915 +2025-03-24 05:00:10,802 Epoch 505/2000 +2025-03-24 05:01:57,434 Current Learning Rate: 0.0004607705 +2025-03-24 05:01:57,435 Train Loss: 0.0000966, Val Loss: 0.0000964 +2025-03-24 05:01:57,435 Epoch 506/2000 +2025-03-24 05:03:44,182 Current Learning Rate: 0.0004529458 +2025-03-24 05:03:45,064 Train Loss: 0.0000791, Val Loss: 0.0000896 +2025-03-24 05:03:45,064 Epoch 507/2000 +2025-03-24 05:05:31,308 Current Learning Rate: 0.0004451328 +2025-03-24 05:05:31,309 Train Loss: 0.0000851, Val Loss: 0.0000912 +2025-03-24 05:05:31,309 Epoch 508/2000 +2025-03-24 05:07:18,364 Current Learning Rate: 0.0004373334 +2025-03-24 05:07:18,365 Train Loss: 0.0000820, Val Loss: 0.0000909 +2025-03-24 05:07:18,365 Epoch 509/2000 +2025-03-24 05:09:05,340 Current Learning Rate: 0.0004295494 +2025-03-24 05:09:06,285 Train Loss: 0.0000823, Val Loss: 0.0000891 +2025-03-24 05:09:06,286 Epoch 510/2000 +2025-03-24 05:10:52,333 Current Learning Rate: 0.0004217828 +2025-03-24 05:10:52,334 Train Loss: 0.0000774, Val Loss: 0.0000901 +2025-03-24 05:10:52,334 Epoch 511/2000 +2025-03-24 05:12:39,043 Current Learning Rate: 0.0004140354 +2025-03-24 05:12:39,043 Train Loss: 0.0000924, Val Loss: 0.0000948 +2025-03-24 05:12:39,043 Epoch 512/2000 +2025-03-24 05:14:26,020 Current Learning Rate: 0.0004063093 +2025-03-24 05:14:26,020 Train Loss: 0.0000884, Val Loss: 0.0000952 +2025-03-24 05:14:26,021 Epoch 513/2000 +2025-03-24 05:16:12,408 Current Learning Rate: 0.0003986064 +2025-03-24 05:16:12,408 Train Loss: 0.0000936, Val Loss: 0.0000967 +2025-03-24 05:16:12,408 Epoch 514/2000 +2025-03-24 05:17:58,557 Current Learning Rate: 0.0003909284 +2025-03-24 05:17:58,558 Train Loss: 0.0000842, Val Loss: 0.0000908 +2025-03-24 05:17:58,558 Epoch 515/2000 +2025-03-24 05:19:44,715 Current Learning Rate: 0.0003832773 +2025-03-24 05:19:45,573 Train Loss: 0.0000896, Val Loss: 0.0000876 +2025-03-24 05:19:45,573 Epoch 516/2000 +2025-03-24 05:21:31,792 Current Learning Rate: 0.0003756551 +2025-03-24 05:21:32,703 Train Loss: 0.0000882, Val Loss: 0.0000872 +2025-03-24 05:21:32,704 Epoch 517/2000 +2025-03-24 05:23:18,603 Current Learning Rate: 0.0003680635 +2025-03-24 05:23:18,604 Train Loss: 0.0000847, Val Loss: 0.0000872 +2025-03-24 05:23:18,604 Epoch 518/2000 +2025-03-24 05:25:04,866 Current Learning Rate: 0.0003605044 +2025-03-24 05:25:04,867 Train Loss: 0.0000879, Val Loss: 0.0000903 +2025-03-24 05:25:04,867 Epoch 519/2000 +2025-03-24 05:26:51,065 Current Learning Rate: 0.0003529798 +2025-03-24 05:26:51,880 Train Loss: 0.0000786, Val Loss: 0.0000859 +2025-03-24 05:26:51,881 Epoch 520/2000 +2025-03-24 05:28:37,824 Current Learning Rate: 0.0003454915 +2025-03-24 05:28:37,825 Train Loss: 0.0000949, Val Loss: 0.0000878 +2025-03-24 05:28:37,825 Epoch 521/2000 +2025-03-24 05:30:24,456 Current Learning Rate: 0.0003380413 +2025-03-24 05:30:24,456 Train Loss: 0.0000862, Val Loss: 0.0000895 +2025-03-24 05:30:24,456 Epoch 522/2000 +2025-03-24 05:32:10,978 Current Learning Rate: 0.0003306310 +2025-03-24 05:32:10,979 Train Loss: 0.0000785, Val Loss: 0.0000890 +2025-03-24 05:32:10,979 Epoch 523/2000 +2025-03-24 05:33:57,504 Current Learning Rate: 0.0003232626 +2025-03-24 05:33:57,505 Train Loss: 0.0000768, Val Loss: 0.0000900 +2025-03-24 05:33:57,505 Epoch 524/2000 +2025-03-24 05:35:43,647 Current Learning Rate: 0.0003159377 +2025-03-24 05:35:44,470 Train Loss: 0.0000696, Val Loss: 0.0000859 +2025-03-24 05:35:44,470 Epoch 525/2000 +2025-03-24 05:37:30,756 Current Learning Rate: 0.0003086583 +2025-03-24 05:37:31,651 Train Loss: 0.0000846, Val Loss: 0.0000854 +2025-03-24 05:37:31,651 Epoch 526/2000 +2025-03-24 05:39:17,530 Current Learning Rate: 0.0003014261 +2025-03-24 05:39:18,337 Train Loss: 0.0000770, Val Loss: 0.0000837 +2025-03-24 05:39:18,338 Epoch 527/2000 +2025-03-24 05:41:04,142 Current Learning Rate: 0.0002942428 +2025-03-24 05:41:04,143 Train Loss: 0.0000797, Val Loss: 0.0000859 +2025-03-24 05:41:04,143 Epoch 528/2000 +2025-03-24 05:42:50,853 Current Learning Rate: 0.0002871104 +2025-03-24 05:42:50,854 Train Loss: 0.0000796, Val Loss: 0.0000851 +2025-03-24 05:42:50,854 Epoch 529/2000 +2025-03-24 05:44:37,324 Current Learning Rate: 0.0002800304 +2025-03-24 05:44:38,155 Train Loss: 0.0000750, Val Loss: 0.0000833 +2025-03-24 05:44:38,155 Epoch 530/2000 +2025-03-24 05:46:23,836 Current Learning Rate: 0.0002730048 +2025-03-24 05:46:23,836 Train Loss: 0.0000844, Val Loss: 0.0000847 +2025-03-24 05:46:23,837 Epoch 531/2000 +2025-03-24 05:48:10,713 Current Learning Rate: 0.0002660351 +2025-03-24 05:48:10,713 Train Loss: 0.0000839, Val Loss: 0.0000842 +2025-03-24 05:48:10,714 Epoch 532/2000 +2025-03-24 05:49:58,235 Current Learning Rate: 0.0002591232 +2025-03-24 05:49:58,235 Train Loss: 0.0000861, Val Loss: 0.0000865 +2025-03-24 05:49:58,236 Epoch 533/2000 +2025-03-24 05:51:45,353 Current Learning Rate: 0.0002522707 +2025-03-24 05:51:45,353 Train Loss: 0.0000815, Val Loss: 0.0000867 +2025-03-24 05:51:45,353 Epoch 534/2000 +2025-03-24 05:53:32,131 Current Learning Rate: 0.0002454793 +2025-03-24 05:53:32,131 Train Loss: 0.0000751, Val Loss: 0.0000841 +2025-03-24 05:53:32,131 Epoch 535/2000 +2025-03-24 05:55:18,802 Current Learning Rate: 0.0002387507 +2025-03-24 05:55:18,802 Train Loss: 0.0000781, Val Loss: 0.0000843 +2025-03-24 05:55:18,802 Epoch 536/2000 +2025-03-24 05:57:06,051 Current Learning Rate: 0.0002320866 +2025-03-24 05:57:06,939 Train Loss: 0.0000738, Val Loss: 0.0000825 +2025-03-24 05:57:06,939 Epoch 537/2000 +2025-03-24 05:58:53,293 Current Learning Rate: 0.0002254886 +2025-03-24 05:58:53,294 Train Loss: 0.0000877, Val Loss: 0.0000836 +2025-03-24 05:58:53,294 Epoch 538/2000 +2025-03-24 06:00:39,783 Current Learning Rate: 0.0002189583 +2025-03-24 06:00:40,641 Train Loss: 0.0000723, Val Loss: 0.0000813 +2025-03-24 06:00:40,641 Epoch 539/2000 +2025-03-24 06:02:27,172 Current Learning Rate: 0.0002124974 +2025-03-24 06:02:27,173 Train Loss: 0.0000750, Val Loss: 0.0000814 +2025-03-24 06:02:27,173 Epoch 540/2000 +2025-03-24 06:04:14,331 Current Learning Rate: 0.0002061074 +2025-03-24 06:04:14,332 Train Loss: 0.0000755, Val Loss: 0.0000826 +2025-03-24 06:04:14,332 Epoch 541/2000 +2025-03-24 06:06:00,264 Current Learning Rate: 0.0001997899 +2025-03-24 06:06:00,265 Train Loss: 0.0000751, Val Loss: 0.0000817 +2025-03-24 06:06:00,265 Epoch 542/2000 +2025-03-24 06:07:46,463 Current Learning Rate: 0.0001935465 +2025-03-24 06:07:47,377 Train Loss: 0.0000769, Val Loss: 0.0000810 +2025-03-24 06:07:47,377 Epoch 543/2000 +2025-03-24 06:09:33,285 Current Learning Rate: 0.0001873787 +2025-03-24 06:09:34,183 Train Loss: 0.0000737, Val Loss: 0.0000807 +2025-03-24 06:09:34,183 Epoch 544/2000 +2025-03-24 06:11:20,510 Current Learning Rate: 0.0001812880 +2025-03-24 06:11:21,469 Train Loss: 0.0000732, Val Loss: 0.0000798 +2025-03-24 06:11:21,470 Epoch 545/2000 +2025-03-24 06:13:07,788 Current Learning Rate: 0.0001752760 +2025-03-24 06:13:07,789 Train Loss: 0.0000742, Val Loss: 0.0000809 +2025-03-24 06:13:07,789 Epoch 546/2000 +2025-03-24 06:14:54,195 Current Learning Rate: 0.0001693441 +2025-03-24 06:14:54,195 Train Loss: 0.0000753, Val Loss: 0.0000808 +2025-03-24 06:14:54,195 Epoch 547/2000 +2025-03-24 06:16:41,152 Current Learning Rate: 0.0001634937 +2025-03-24 06:16:41,973 Train Loss: 0.0000629, Val Loss: 0.0000797 +2025-03-24 06:16:41,973 Epoch 548/2000 +2025-03-24 06:18:28,743 Current Learning Rate: 0.0001577264 +2025-03-24 06:18:28,744 Train Loss: 0.0000699, Val Loss: 0.0000804 +2025-03-24 06:18:28,744 Epoch 549/2000 +2025-03-24 06:20:15,931 Current Learning Rate: 0.0001520436 +2025-03-24 06:20:15,931 Train Loss: 0.0000772, Val Loss: 0.0000806 +2025-03-24 06:20:15,931 Epoch 550/2000 +2025-03-24 06:22:02,449 Current Learning Rate: 0.0001464466 +2025-03-24 06:22:03,334 Train Loss: 0.0000944, Val Loss: 0.0000797 +2025-03-24 06:22:03,334 Epoch 551/2000 +2025-03-24 06:23:49,271 Current Learning Rate: 0.0001409369 +2025-03-24 06:23:50,102 Train Loss: 0.0000759, Val Loss: 0.0000791 +2025-03-24 06:23:50,102 Epoch 552/2000 +2025-03-24 06:25:36,237 Current Learning Rate: 0.0001355157 +2025-03-24 06:25:37,138 Train Loss: 0.0000783, Val Loss: 0.0000786 +2025-03-24 06:25:37,139 Epoch 553/2000 +2025-03-24 06:27:23,592 Current Learning Rate: 0.0001301845 +2025-03-24 06:27:24,451 Train Loss: 0.0000649, Val Loss: 0.0000784 +2025-03-24 06:27:24,451 Epoch 554/2000 +2025-03-24 06:29:10,894 Current Learning Rate: 0.0001249445 +2025-03-24 06:29:10,894 Train Loss: 0.0000739, Val Loss: 0.0000788 +2025-03-24 06:29:10,895 Epoch 555/2000 +2025-03-24 06:30:57,538 Current Learning Rate: 0.0001197970 +2025-03-24 06:30:57,538 Train Loss: 0.0000708, Val Loss: 0.0000790 +2025-03-24 06:30:57,539 Epoch 556/2000 +2025-03-24 06:32:44,854 Current Learning Rate: 0.0001147434 +2025-03-24 06:32:45,707 Train Loss: 0.0000736, Val Loss: 0.0000784 +2025-03-24 06:32:45,707 Epoch 557/2000 +2025-03-24 06:34:32,258 Current Learning Rate: 0.0001097848 +2025-03-24 06:34:33,133 Train Loss: 0.0000744, Val Loss: 0.0000778 +2025-03-24 06:34:33,133 Epoch 558/2000 +2025-03-24 06:36:19,445 Current Learning Rate: 0.0001049225 +2025-03-24 06:36:19,446 Train Loss: 0.0000746, Val Loss: 0.0000783 +2025-03-24 06:36:19,446 Epoch 559/2000 +2025-03-24 06:38:06,288 Current Learning Rate: 0.0001001577 +2025-03-24 06:38:07,138 Train Loss: 0.0000661, Val Loss: 0.0000775 +2025-03-24 06:38:07,139 Epoch 560/2000 +2025-03-24 06:39:53,787 Current Learning Rate: 0.0000954915 +2025-03-24 06:39:53,788 Train Loss: 0.0000713, Val Loss: 0.0000779 +2025-03-24 06:39:53,788 Epoch 561/2000 +2025-03-24 06:41:40,459 Current Learning Rate: 0.0000909251 +2025-03-24 06:41:40,459 Train Loss: 0.0000669, Val Loss: 0.0000776 +2025-03-24 06:41:40,459 Epoch 562/2000 +2025-03-24 06:43:27,121 Current Learning Rate: 0.0000864597 +2025-03-24 06:43:27,121 Train Loss: 0.0000780, Val Loss: 0.0000776 +2025-03-24 06:43:27,121 Epoch 563/2000 +2025-03-24 06:45:14,106 Current Learning Rate: 0.0000820963 +2025-03-24 06:45:14,107 Train Loss: 0.0000729, Val Loss: 0.0000777 +2025-03-24 06:45:14,107 Epoch 564/2000 +2025-03-24 06:47:01,034 Current Learning Rate: 0.0000778360 +2025-03-24 06:47:01,034 Train Loss: 0.0000769, Val Loss: 0.0000781 +2025-03-24 06:47:01,035 Epoch 565/2000 +2025-03-24 06:48:47,492 Current Learning Rate: 0.0000736799 +2025-03-24 06:48:48,355 Train Loss: 0.0000755, Val Loss: 0.0000770 +2025-03-24 06:48:48,355 Epoch 566/2000 +2025-03-24 06:50:34,546 Current Learning Rate: 0.0000696290 +2025-03-24 06:50:35,363 Train Loss: 0.0000663, Val Loss: 0.0000769 +2025-03-24 06:50:35,364 Epoch 567/2000 +2025-03-24 06:52:21,892 Current Learning Rate: 0.0000656842 +2025-03-24 06:52:22,698 Train Loss: 0.0000641, Val Loss: 0.0000768 +2025-03-24 06:52:22,698 Epoch 568/2000 +2025-03-24 06:54:09,255 Current Learning Rate: 0.0000618467 +2025-03-24 06:54:10,143 Train Loss: 0.0000709, Val Loss: 0.0000766 +2025-03-24 06:54:10,144 Epoch 569/2000 +2025-03-24 06:55:56,444 Current Learning Rate: 0.0000581172 +2025-03-24 06:55:56,445 Train Loss: 0.0000635, Val Loss: 0.0000768 +2025-03-24 06:55:56,445 Epoch 570/2000 +2025-03-24 06:57:43,336 Current Learning Rate: 0.0000544967 +2025-03-24 06:57:44,166 Train Loss: 0.0000671, Val Loss: 0.0000766 +2025-03-24 06:57:44,166 Epoch 571/2000 +2025-03-24 06:59:30,179 Current Learning Rate: 0.0000509862 +2025-03-24 06:59:31,068 Train Loss: 0.0000668, Val Loss: 0.0000763 +2025-03-24 06:59:31,069 Epoch 572/2000 +2025-03-24 07:01:17,305 Current Learning Rate: 0.0000475865 +2025-03-24 07:01:17,306 Train Loss: 0.0000712, Val Loss: 0.0000766 +2025-03-24 07:01:17,306 Epoch 573/2000 +2025-03-24 07:03:04,203 Current Learning Rate: 0.0000442984 +2025-03-24 07:03:05,116 Train Loss: 0.0000749, Val Loss: 0.0000763 +2025-03-24 07:03:05,116 Epoch 574/2000 +2025-03-24 07:04:50,894 Current Learning Rate: 0.0000411227 +2025-03-24 07:04:51,777 Train Loss: 0.0000643, Val Loss: 0.0000762 +2025-03-24 07:04:51,777 Epoch 575/2000 +2025-03-24 07:06:37,873 Current Learning Rate: 0.0000380602 +2025-03-24 07:06:38,743 Train Loss: 0.0000671, Val Loss: 0.0000762 +2025-03-24 07:06:38,743 Epoch 576/2000 +2025-03-24 07:08:24,904 Current Learning Rate: 0.0000351118 +2025-03-24 07:08:25,791 Train Loss: 0.0000605, Val Loss: 0.0000761 +2025-03-24 07:08:25,791 Epoch 577/2000 +2025-03-24 07:10:12,056 Current Learning Rate: 0.0000322780 +2025-03-24 07:10:12,057 Train Loss: 0.0000620, Val Loss: 0.0000761 +2025-03-24 07:10:12,057 Epoch 578/2000 +2025-03-24 07:11:58,658 Current Learning Rate: 0.0000295596 +2025-03-24 07:11:59,520 Train Loss: 0.0000756, Val Loss: 0.0000760 +2025-03-24 07:11:59,520 Epoch 579/2000 +2025-03-24 07:13:45,439 Current Learning Rate: 0.0000269573 +2025-03-24 07:13:46,293 Train Loss: 0.0000753, Val Loss: 0.0000759 +2025-03-24 07:13:46,294 Epoch 580/2000 +2025-03-24 07:15:32,975 Current Learning Rate: 0.0000244717 +2025-03-24 07:15:32,976 Train Loss: 0.0000774, Val Loss: 0.0000760 +2025-03-24 07:15:32,976 Epoch 581/2000 +2025-03-24 07:17:20,027 Current Learning Rate: 0.0000221035 +2025-03-24 07:17:20,890 Train Loss: 0.0000636, Val Loss: 0.0000759 +2025-03-24 07:17:20,890 Epoch 582/2000 +2025-03-24 07:19:07,194 Current Learning Rate: 0.0000198532 +2025-03-24 07:19:07,195 Train Loss: 0.0000723, Val Loss: 0.0000759 +2025-03-24 07:19:07,195 Epoch 583/2000 +2025-03-24 07:20:53,847 Current Learning Rate: 0.0000177213 +2025-03-24 07:20:53,848 Train Loss: 0.0000753, Val Loss: 0.0000759 +2025-03-24 07:20:53,848 Epoch 584/2000 +2025-03-24 07:22:40,390 Current Learning Rate: 0.0000157084 +2025-03-24 07:22:41,339 Train Loss: 0.0000744, Val Loss: 0.0000758 +2025-03-24 07:22:41,339 Epoch 585/2000 +2025-03-24 07:24:27,783 Current Learning Rate: 0.0000138150 +2025-03-24 07:24:28,667 Train Loss: 0.0000749, Val Loss: 0.0000757 +2025-03-24 07:24:28,667 Epoch 586/2000 +2025-03-24 07:26:14,603 Current Learning Rate: 0.0000120416 +2025-03-24 07:26:15,465 Train Loss: 0.0000637, Val Loss: 0.0000757 +2025-03-24 07:26:15,465 Epoch 587/2000 +2025-03-24 07:28:01,120 Current Learning Rate: 0.0000103886 +2025-03-24 07:28:01,950 Train Loss: 0.0000669, Val Loss: 0.0000756 +2025-03-24 07:28:01,951 Epoch 588/2000 +2025-03-24 07:29:47,937 Current Learning Rate: 0.0000088564 +2025-03-24 07:29:48,857 Train Loss: 0.0000696, Val Loss: 0.0000756 +2025-03-24 07:29:48,858 Epoch 589/2000 +2025-03-24 07:31:34,463 Current Learning Rate: 0.0000074453 +2025-03-24 07:31:35,318 Train Loss: 0.0000715, Val Loss: 0.0000756 +2025-03-24 07:31:35,318 Epoch 590/2000 +2025-03-24 07:33:21,293 Current Learning Rate: 0.0000061558 +2025-03-24 07:33:21,293 Train Loss: 0.0000663, Val Loss: 0.0000757 +2025-03-24 07:33:21,294 Epoch 591/2000 +2025-03-24 07:35:08,429 Current Learning Rate: 0.0000049882 +2025-03-24 07:35:09,331 Train Loss: 0.0000728, Val Loss: 0.0000756 +2025-03-24 07:35:09,331 Epoch 592/2000 +2025-03-24 07:36:55,872 Current Learning Rate: 0.0000039426 +2025-03-24 07:36:56,722 Train Loss: 0.0000595, Val Loss: 0.0000755 +2025-03-24 07:36:56,723 Epoch 593/2000 +2025-03-24 07:38:43,201 Current Learning Rate: 0.0000030195 +2025-03-24 07:38:43,202 Train Loss: 0.0000739, Val Loss: 0.0000755 +2025-03-24 07:38:43,202 Epoch 594/2000 +2025-03-24 07:40:30,146 Current Learning Rate: 0.0000022190 +2025-03-24 07:40:30,979 Train Loss: 0.0000609, Val Loss: 0.0000755 +2025-03-24 07:40:30,979 Epoch 595/2000 +2025-03-24 07:42:17,433 Current Learning Rate: 0.0000015413 +2025-03-24 07:42:18,296 Train Loss: 0.0000827, Val Loss: 0.0000755 +2025-03-24 07:42:18,297 Epoch 596/2000 +2025-03-24 07:44:04,762 Current Learning Rate: 0.0000009866 +2025-03-24 07:44:04,763 Train Loss: 0.0000743, Val Loss: 0.0000755 +2025-03-24 07:44:04,764 Epoch 597/2000 +2025-03-24 07:45:51,373 Current Learning Rate: 0.0000005551 +2025-03-24 07:45:51,373 Train Loss: 0.0000749, Val Loss: 0.0000756 +2025-03-24 07:45:51,374 Epoch 598/2000 +2025-03-24 07:47:38,027 Current Learning Rate: 0.0000002467 +2025-03-24 07:47:38,028 Train Loss: 0.0000691, Val Loss: 0.0000755 +2025-03-24 07:47:38,028 Epoch 599/2000 +2025-03-24 07:49:25,028 Current Learning Rate: 0.0000000617 +2025-03-24 07:49:25,846 Train Loss: 0.0000682, Val Loss: 0.0000755 +2025-03-24 07:49:25,846 Epoch 600/2000 +2025-03-24 07:51:12,286 Current Learning Rate: 0.0000000000 +2025-03-24 07:51:13,186 Train Loss: 0.0000727, Val Loss: 0.0000755 +2025-03-24 07:51:13,186 Epoch 601/2000 +2025-03-24 07:52:59,787 Current Learning Rate: 0.0000000617 +2025-03-24 07:52:59,788 Train Loss: 0.0000810, Val Loss: 0.0000757 +2025-03-24 07:52:59,788 Epoch 602/2000 +2025-03-24 07:54:45,969 Current Learning Rate: 0.0000002467 +2025-03-24 07:54:45,971 Train Loss: 0.0000671, Val Loss: 0.0000755 +2025-03-24 07:54:45,974 Epoch 603/2000 +2025-03-24 07:56:32,950 Current Learning Rate: 0.0000005551 +2025-03-24 07:56:33,826 Train Loss: 0.0000702, Val Loss: 0.0000755 +2025-03-24 07:56:33,826 Epoch 604/2000 +2025-03-24 07:58:20,229 Current Learning Rate: 0.0000009866 +2025-03-24 07:58:21,136 Train Loss: 0.0000763, Val Loss: 0.0000755 +2025-03-24 07:58:21,136 Epoch 605/2000 +2025-03-24 08:00:07,192 Current Learning Rate: 0.0000015413 +2025-03-24 08:00:07,193 Train Loss: 0.0000741, Val Loss: 0.0000759 +2025-03-24 08:00:07,193 Epoch 606/2000 +2025-03-24 08:01:54,144 Current Learning Rate: 0.0000022190 +2025-03-24 08:01:54,144 Train Loss: 0.0000757, Val Loss: 0.0000755 +2025-03-24 08:01:54,144 Epoch 607/2000 +2025-03-24 08:03:41,267 Current Learning Rate: 0.0000030195 +2025-03-24 08:03:41,267 Train Loss: 0.0000703, Val Loss: 0.0000755 +2025-03-24 08:03:41,267 Epoch 608/2000 +2025-03-24 08:05:27,917 Current Learning Rate: 0.0000039426 +2025-03-24 08:05:27,918 Train Loss: 0.0000847, Val Loss: 0.0000755 +2025-03-24 08:05:27,918 Epoch 609/2000 +2025-03-24 08:07:14,408 Current Learning Rate: 0.0000049882 +2025-03-24 08:07:14,408 Train Loss: 0.0000686, Val Loss: 0.0000755 +2025-03-24 08:07:14,408 Epoch 610/2000 +2025-03-24 08:09:00,714 Current Learning Rate: 0.0000061558 +2025-03-24 08:09:00,714 Train Loss: 0.0000671, Val Loss: 0.0000755 +2025-03-24 08:09:00,714 Epoch 611/2000 +2025-03-24 08:10:47,882 Current Learning Rate: 0.0000074453 +2025-03-24 08:10:47,882 Train Loss: 0.0000825, Val Loss: 0.0000755 +2025-03-24 08:10:47,882 Epoch 612/2000 +2025-03-24 08:12:34,958 Current Learning Rate: 0.0000088564 +2025-03-24 08:12:34,958 Train Loss: 0.0000710, Val Loss: 0.0000755 +2025-03-24 08:12:34,958 Epoch 613/2000 +2025-03-24 08:14:22,012 Current Learning Rate: 0.0000103886 +2025-03-24 08:14:22,013 Train Loss: 0.0000755, Val Loss: 0.0000755 +2025-03-24 08:14:22,013 Epoch 614/2000 +2025-03-24 08:16:09,170 Current Learning Rate: 0.0000120416 +2025-03-24 08:16:09,171 Train Loss: 0.0000695, Val Loss: 0.0000755 +2025-03-24 08:16:09,171 Epoch 615/2000 +2025-03-24 08:17:56,322 Current Learning Rate: 0.0000138150 +2025-03-24 08:17:56,323 Train Loss: 0.0000685, Val Loss: 0.0000756 +2025-03-24 08:17:56,323 Epoch 616/2000 +2025-03-24 08:19:43,665 Current Learning Rate: 0.0000157084 +2025-03-24 08:19:43,666 Train Loss: 0.0000710, Val Loss: 0.0000755 +2025-03-24 08:19:43,666 Epoch 617/2000 +2025-03-24 08:21:30,561 Current Learning Rate: 0.0000177213 +2025-03-24 08:21:30,562 Train Loss: 0.0000806, Val Loss: 0.0000756 +2025-03-24 08:21:30,562 Epoch 618/2000 +2025-03-24 08:23:17,437 Current Learning Rate: 0.0000198532 +2025-03-24 08:23:17,437 Train Loss: 0.0000673, Val Loss: 0.0000757 +2025-03-24 08:23:17,437 Epoch 619/2000 +2025-03-24 08:25:04,351 Current Learning Rate: 0.0000221035 +2025-03-24 08:25:04,351 Train Loss: 0.0000712, Val Loss: 0.0000756 +2025-03-24 08:25:04,352 Epoch 620/2000 +2025-03-24 08:26:51,608 Current Learning Rate: 0.0000244717 +2025-03-24 08:26:51,611 Train Loss: 0.0000744, Val Loss: 0.0000758 +2025-03-24 08:26:51,611 Epoch 621/2000 +2025-03-24 08:28:38,359 Current Learning Rate: 0.0000269573 +2025-03-24 08:28:38,360 Train Loss: 0.0000857, Val Loss: 0.0000759 +2025-03-24 08:28:38,360 Epoch 622/2000 +2025-03-24 08:30:25,196 Current Learning Rate: 0.0000295596 +2025-03-24 08:30:25,197 Train Loss: 0.0000765, Val Loss: 0.0000761 +2025-03-24 08:30:25,197 Epoch 623/2000 +2025-03-24 08:32:11,842 Current Learning Rate: 0.0000322780 +2025-03-24 08:32:11,843 Train Loss: 0.0000735, Val Loss: 0.0000758 +2025-03-24 08:32:11,843 Epoch 624/2000 +2025-03-24 08:33:58,446 Current Learning Rate: 0.0000351118 +2025-03-24 08:33:58,446 Train Loss: 0.0000690, Val Loss: 0.0000757 +2025-03-24 08:33:58,447 Epoch 625/2000 +2025-03-24 08:35:44,930 Current Learning Rate: 0.0000380602 +2025-03-24 08:35:44,931 Train Loss: 0.0000623, Val Loss: 0.0000760 +2025-03-24 08:35:44,931 Epoch 626/2000 +2025-03-24 08:37:31,463 Current Learning Rate: 0.0000411227 +2025-03-24 08:37:31,464 Train Loss: 0.0000596, Val Loss: 0.0000757 +2025-03-24 08:37:31,464 Epoch 627/2000 +2025-03-24 08:39:18,004 Current Learning Rate: 0.0000442984 +2025-03-24 08:39:18,005 Train Loss: 0.0000739, Val Loss: 0.0000760 +2025-03-24 08:39:18,005 Epoch 628/2000 +2025-03-24 08:41:04,755 Current Learning Rate: 0.0000475865 +2025-03-24 08:41:04,756 Train Loss: 0.0000703, Val Loss: 0.0000757 +2025-03-24 08:41:04,756 Epoch 629/2000 +2025-03-24 08:42:50,999 Current Learning Rate: 0.0000509862 +2025-03-24 08:42:50,999 Train Loss: 0.0000680, Val Loss: 0.0000757 +2025-03-24 08:42:50,999 Epoch 630/2000 +2025-03-24 08:44:37,754 Current Learning Rate: 0.0000544967 +2025-03-24 08:44:37,754 Train Loss: 0.0000650, Val Loss: 0.0000759 +2025-03-24 08:44:37,754 Epoch 631/2000 +2025-03-24 08:46:24,478 Current Learning Rate: 0.0000581172 +2025-03-24 08:46:24,479 Train Loss: 0.0000720, Val Loss: 0.0000759 +2025-03-24 08:46:24,479 Epoch 632/2000 +2025-03-24 08:48:11,375 Current Learning Rate: 0.0000618467 +2025-03-24 08:48:11,375 Train Loss: 0.0000667, Val Loss: 0.0000761 +2025-03-24 08:48:11,376 Epoch 633/2000 +2025-03-24 08:49:57,586 Current Learning Rate: 0.0000656842 +2025-03-24 08:49:57,586 Train Loss: 0.0000621, Val Loss: 0.0000759 +2025-03-24 08:49:57,587 Epoch 634/2000 +2025-03-24 08:51:44,234 Current Learning Rate: 0.0000696290 +2025-03-24 08:51:44,235 Train Loss: 0.0000676, Val Loss: 0.0000763 +2025-03-24 08:51:44,235 Epoch 635/2000 +2025-03-24 08:53:31,106 Current Learning Rate: 0.0000736799 +2025-03-24 08:53:31,106 Train Loss: 0.0000762, Val Loss: 0.0000763 +2025-03-24 08:53:31,106 Epoch 636/2000 +2025-03-24 08:55:18,525 Current Learning Rate: 0.0000778360 +2025-03-24 08:55:18,526 Train Loss: 0.0000695, Val Loss: 0.0000762 +2025-03-24 08:55:18,526 Epoch 637/2000 +2025-03-24 08:57:05,546 Current Learning Rate: 0.0000820963 +2025-03-24 08:57:05,547 Train Loss: 0.0000665, Val Loss: 0.0000759 +2025-03-24 08:57:05,547 Epoch 638/2000 +2025-03-24 08:58:52,380 Current Learning Rate: 0.0000864597 +2025-03-24 08:58:52,380 Train Loss: 0.0000742, Val Loss: 0.0000765 +2025-03-24 08:58:52,380 Epoch 639/2000 +2025-03-24 09:00:39,257 Current Learning Rate: 0.0000909251 +2025-03-24 09:00:39,258 Train Loss: 0.0000765, Val Loss: 0.0000764 +2025-03-24 09:00:39,258 Epoch 640/2000 +2025-03-24 09:02:26,387 Current Learning Rate: 0.0000954915 +2025-03-24 09:02:26,388 Train Loss: 0.0000670, Val Loss: 0.0000763 +2025-03-24 09:02:26,388 Epoch 641/2000 +2025-03-24 09:04:13,167 Current Learning Rate: 0.0001001577 +2025-03-24 09:04:13,167 Train Loss: 0.0000671, Val Loss: 0.0000768 +2025-03-24 09:04:13,167 Epoch 642/2000 +2025-03-24 09:06:00,080 Current Learning Rate: 0.0001049225 +2025-03-24 09:06:00,080 Train Loss: 0.0000749, Val Loss: 0.0000777 +2025-03-24 09:06:00,081 Epoch 643/2000 +2025-03-24 09:07:47,385 Current Learning Rate: 0.0001097848 +2025-03-24 09:07:47,385 Train Loss: 0.0000684, Val Loss: 0.0000764 +2025-03-24 09:07:47,386 Epoch 644/2000 +2025-03-24 09:09:34,201 Current Learning Rate: 0.0001147434 +2025-03-24 09:09:34,202 Train Loss: 0.0000716, Val Loss: 0.0000774 +2025-03-24 09:09:34,202 Epoch 645/2000 +2025-03-24 09:11:20,934 Current Learning Rate: 0.0001197970 +2025-03-24 09:11:20,934 Train Loss: 0.0000639, Val Loss: 0.0000772 +2025-03-24 09:11:20,934 Epoch 646/2000 +2025-03-24 09:13:07,152 Current Learning Rate: 0.0001249445 +2025-03-24 09:13:07,153 Train Loss: 0.0000614, Val Loss: 0.0000766 +2025-03-24 09:13:07,153 Epoch 647/2000 +2025-03-24 09:14:53,381 Current Learning Rate: 0.0001301845 +2025-03-24 09:14:53,382 Train Loss: 0.0000671, Val Loss: 0.0000771 +2025-03-24 09:14:53,382 Epoch 648/2000 +2025-03-24 09:16:40,092 Current Learning Rate: 0.0001355157 +2025-03-24 09:16:40,092 Train Loss: 0.0000665, Val Loss: 0.0000772 +2025-03-24 09:16:40,093 Epoch 649/2000 +2025-03-24 09:18:26,634 Current Learning Rate: 0.0001409369 +2025-03-24 09:18:26,635 Train Loss: 0.0000715, Val Loss: 0.0000766 +2025-03-24 09:18:26,635 Epoch 650/2000 +2025-03-24 09:20:12,994 Current Learning Rate: 0.0001464466 +2025-03-24 09:20:12,994 Train Loss: 0.0000833, Val Loss: 0.0000765 +2025-03-24 09:20:12,994 Epoch 651/2000 +2025-03-24 09:21:59,909 Current Learning Rate: 0.0001520436 +2025-03-24 09:21:59,909 Train Loss: 0.0000624, Val Loss: 0.0000785 +2025-03-24 09:21:59,909 Epoch 652/2000 +2025-03-24 09:23:46,403 Current Learning Rate: 0.0001577264 +2025-03-24 09:23:46,404 Train Loss: 0.0000760, Val Loss: 0.0000778 +2025-03-24 09:23:46,404 Epoch 653/2000 +2025-03-24 09:25:33,287 Current Learning Rate: 0.0001634937 +2025-03-24 09:25:33,287 Train Loss: 0.0000770, Val Loss: 0.0000784 +2025-03-24 09:25:33,288 Epoch 654/2000 +2025-03-24 09:27:20,290 Current Learning Rate: 0.0001693441 +2025-03-24 09:27:20,290 Train Loss: 0.0000717, Val Loss: 0.0000799 +2025-03-24 09:27:20,290 Epoch 655/2000 +2025-03-24 09:29:07,001 Current Learning Rate: 0.0001752760 +2025-03-24 09:29:07,001 Train Loss: 0.0000729, Val Loss: 0.0000776 +2025-03-24 09:29:07,002 Epoch 656/2000 +2025-03-24 09:30:53,582 Current Learning Rate: 0.0001812880 +2025-03-24 09:30:53,583 Train Loss: 0.0000680, Val Loss: 0.0000773 +2025-03-24 09:30:53,584 Epoch 657/2000 +2025-03-24 09:32:39,754 Current Learning Rate: 0.0001873787 +2025-03-24 09:32:39,755 Train Loss: 0.0000662, Val Loss: 0.0000772 +2025-03-24 09:32:39,755 Epoch 658/2000 +2025-03-24 09:34:25,961 Current Learning Rate: 0.0001935465 +2025-03-24 09:34:25,962 Train Loss: 0.0000694, Val Loss: 0.0000768 +2025-03-24 09:34:25,962 Epoch 659/2000 +2025-03-24 09:36:12,860 Current Learning Rate: 0.0001997899 +2025-03-24 09:36:12,860 Train Loss: 0.0000641, Val Loss: 0.0000792 +2025-03-24 09:36:12,860 Epoch 660/2000 +2025-03-24 09:37:59,893 Current Learning Rate: 0.0002061074 +2025-03-24 09:37:59,893 Train Loss: 0.0000778, Val Loss: 0.0000775 +2025-03-24 09:37:59,893 Epoch 661/2000 +2025-03-24 09:39:46,385 Current Learning Rate: 0.0002124974 +2025-03-24 09:39:46,386 Train Loss: 0.0000771, Val Loss: 0.0000775 +2025-03-24 09:39:46,386 Epoch 662/2000 +2025-03-24 09:41:32,510 Current Learning Rate: 0.0002189583 +2025-03-24 09:41:32,511 Train Loss: 0.0000846, Val Loss: 0.0000797 +2025-03-24 09:41:32,511 Epoch 663/2000 +2025-03-24 09:43:19,448 Current Learning Rate: 0.0002254886 +2025-03-24 09:43:19,448 Train Loss: 0.0000718, Val Loss: 0.0000793 +2025-03-24 09:43:19,448 Epoch 664/2000 +2025-03-24 09:45:06,140 Current Learning Rate: 0.0002320866 +2025-03-24 09:45:06,140 Train Loss: 0.0000756, Val Loss: 0.0000775 +2025-03-24 09:45:06,141 Epoch 665/2000 +2025-03-24 09:46:53,195 Current Learning Rate: 0.0002387507 +2025-03-24 09:46:53,195 Train Loss: 0.0000797, Val Loss: 0.0000790 +2025-03-24 09:46:53,196 Epoch 666/2000 +2025-03-24 09:48:39,477 Current Learning Rate: 0.0002454793 +2025-03-24 09:48:39,478 Train Loss: 0.0000802, Val Loss: 0.0000808 +2025-03-24 09:48:39,478 Epoch 667/2000 +2025-03-24 09:50:25,872 Current Learning Rate: 0.0002522707 +2025-03-24 09:50:25,872 Train Loss: 0.0000857, Val Loss: 0.0000799 +2025-03-24 09:50:25,872 Epoch 668/2000 +2025-03-24 09:52:12,360 Current Learning Rate: 0.0002591232 +2025-03-24 09:52:12,360 Train Loss: 0.0000739, Val Loss: 0.0000786 +2025-03-24 09:52:12,360 Epoch 669/2000 +2025-03-24 09:53:59,509 Current Learning Rate: 0.0002660351 +2025-03-24 09:53:59,510 Train Loss: 0.0000711, Val Loss: 0.0000810 +2025-03-24 09:53:59,510 Epoch 670/2000 +2025-03-24 09:55:45,908 Current Learning Rate: 0.0002730048 +2025-03-24 09:55:45,909 Train Loss: 0.0000618, Val Loss: 0.0000775 +2025-03-24 09:55:45,909 Epoch 671/2000 +2025-03-24 09:57:32,254 Current Learning Rate: 0.0002800304 +2025-03-24 09:57:32,254 Train Loss: 0.0000792, Val Loss: 0.0000822 +2025-03-24 09:57:32,255 Epoch 672/2000 +2025-03-24 09:59:19,325 Current Learning Rate: 0.0002871104 +2025-03-24 09:59:19,325 Train Loss: 0.0000755, Val Loss: 0.0000779 +2025-03-24 09:59:19,326 Epoch 673/2000 +2025-03-24 10:01:05,996 Current Learning Rate: 0.0002942428 +2025-03-24 10:01:05,996 Train Loss: 0.0000679, Val Loss: 0.0000780 +2025-03-24 10:01:05,996 Epoch 674/2000 +2025-03-24 10:02:51,998 Current Learning Rate: 0.0003014261 +2025-03-24 10:02:51,998 Train Loss: 0.0000758, Val Loss: 0.0000795 +2025-03-24 10:02:51,999 Epoch 675/2000 +2025-03-24 10:04:39,478 Current Learning Rate: 0.0003086583 +2025-03-24 10:04:39,479 Train Loss: 0.0000687, Val Loss: 0.0000813 +2025-03-24 10:04:39,479 Epoch 676/2000 +2025-03-24 10:06:25,854 Current Learning Rate: 0.0003159377 +2025-03-24 10:06:25,855 Train Loss: 0.0000752, Val Loss: 0.0000848 +2025-03-24 10:06:25,855 Epoch 677/2000 +2025-03-24 10:08:12,539 Current Learning Rate: 0.0003232626 +2025-03-24 10:08:12,539 Train Loss: 0.0000607, Val Loss: 0.0000784 +2025-03-24 10:08:12,539 Epoch 678/2000 +2025-03-24 10:09:59,205 Current Learning Rate: 0.0003306310 +2025-03-24 10:09:59,205 Train Loss: 0.0000781, Val Loss: 0.0000812 +2025-03-24 10:09:59,205 Epoch 679/2000 +2025-03-24 10:11:45,522 Current Learning Rate: 0.0003380413 +2025-03-24 10:11:45,523 Train Loss: 0.0000763, Val Loss: 0.0000817 +2025-03-24 10:11:45,523 Epoch 680/2000 +2025-03-24 10:13:32,276 Current Learning Rate: 0.0003454915 +2025-03-24 10:13:32,280 Train Loss: 0.0000722, Val Loss: 0.0000803 +2025-03-24 10:13:32,280 Epoch 681/2000 +2025-03-24 10:15:19,248 Current Learning Rate: 0.0003529798 +2025-03-24 10:15:19,248 Train Loss: 0.0000797, Val Loss: 0.0000827 +2025-03-24 10:15:19,249 Epoch 682/2000 +2025-03-24 10:17:05,697 Current Learning Rate: 0.0003605044 +2025-03-24 10:17:05,698 Train Loss: 0.0000755, Val Loss: 0.0000811 +2025-03-24 10:17:05,699 Epoch 683/2000 +2025-03-24 10:18:52,686 Current Learning Rate: 0.0003680635 +2025-03-24 10:18:52,686 Train Loss: 0.0000748, Val Loss: 0.0000811 +2025-03-24 10:18:52,687 Epoch 684/2000 +2025-03-24 10:20:39,825 Current Learning Rate: 0.0003756551 +2025-03-24 10:20:39,825 Train Loss: 0.0000716, Val Loss: 0.0000808 +2025-03-24 10:20:39,825 Epoch 685/2000 +2025-03-24 10:22:26,815 Current Learning Rate: 0.0003832773 +2025-03-24 10:22:26,816 Train Loss: 0.0000764, Val Loss: 0.0000822 +2025-03-24 10:22:26,816 Epoch 686/2000 +2025-03-24 10:24:13,206 Current Learning Rate: 0.0003909284 +2025-03-24 10:24:13,206 Train Loss: 0.0000661, Val Loss: 0.0000787 +2025-03-24 10:24:13,207 Epoch 687/2000 +2025-03-24 10:26:00,035 Current Learning Rate: 0.0003986064 +2025-03-24 10:26:00,036 Train Loss: 0.0000836, Val Loss: 0.0000841 +2025-03-24 10:26:00,036 Epoch 688/2000 +2025-03-24 10:27:47,021 Current Learning Rate: 0.0004063093 +2025-03-24 10:27:47,022 Train Loss: 0.0000931, Val Loss: 0.0000823 +2025-03-24 10:27:47,022 Epoch 689/2000 +2025-03-24 10:29:33,909 Current Learning Rate: 0.0004140354 +2025-03-24 10:29:33,910 Train Loss: 0.0000735, Val Loss: 0.0000831 +2025-03-24 10:29:33,910 Epoch 690/2000 +2025-03-24 10:31:20,182 Current Learning Rate: 0.0004217828 +2025-03-24 10:31:20,183 Train Loss: 0.0000931, Val Loss: 0.0000860 +2025-03-24 10:31:20,183 Epoch 691/2000 +2025-03-24 10:33:07,326 Current Learning Rate: 0.0004295494 +2025-03-24 10:33:07,327 Train Loss: 0.0000822, Val Loss: 0.0000892 +2025-03-24 10:33:07,327 Epoch 692/2000 +2025-03-24 10:34:53,612 Current Learning Rate: 0.0004373334 +2025-03-24 10:34:53,612 Train Loss: 0.0000703, Val Loss: 0.0000797 +2025-03-24 10:34:53,612 Epoch 693/2000 +2025-03-24 10:36:40,086 Current Learning Rate: 0.0004451328 +2025-03-24 10:36:40,087 Train Loss: 0.0000773, Val Loss: 0.0000830 +2025-03-24 10:36:40,087 Epoch 694/2000 +2025-03-24 10:38:25,765 Current Learning Rate: 0.0004529458 +2025-03-24 10:38:25,766 Train Loss: 0.0000786, Val Loss: 0.0000831 +2025-03-24 10:38:25,766 Epoch 695/2000 +2025-03-24 10:40:12,849 Current Learning Rate: 0.0004607705 +2025-03-24 10:40:12,849 Train Loss: 0.0000743, Val Loss: 0.0000791 +2025-03-24 10:40:12,850 Epoch 696/2000 +2025-03-24 10:41:59,626 Current Learning Rate: 0.0004686047 +2025-03-24 10:41:59,627 Train Loss: 0.0000777, Val Loss: 0.0000835 +2025-03-24 10:41:59,627 Epoch 697/2000 +2025-03-24 10:43:46,849 Current Learning Rate: 0.0004764468 +2025-03-24 10:43:46,849 Train Loss: 0.0000839, Val Loss: 0.0000892 +2025-03-24 10:43:46,850 Epoch 698/2000 +2025-03-24 10:45:34,092 Current Learning Rate: 0.0004842946 +2025-03-24 10:45:34,104 Train Loss: 0.0000957, Val Loss: 0.0000866 +2025-03-24 10:45:34,104 Epoch 699/2000 +2025-03-24 10:47:20,757 Current Learning Rate: 0.0004921463 +2025-03-24 10:47:20,758 Train Loss: 0.0000948, Val Loss: 0.0000980 +2025-03-24 10:47:20,758 Epoch 700/2000 +2025-03-24 10:49:07,269 Current Learning Rate: 0.0005000000 +2025-03-24 10:49:07,269 Train Loss: 0.0000781, Val Loss: 0.0000821 +2025-03-24 10:49:07,269 Epoch 701/2000 +2025-03-24 10:50:53,628 Current Learning Rate: 0.0005078537 +2025-03-24 10:50:53,628 Train Loss: 0.0000828, Val Loss: 0.0000891 +2025-03-24 10:50:53,628 Epoch 702/2000 +2025-03-24 10:52:40,044 Current Learning Rate: 0.0005157054 +2025-03-24 10:52:40,045 Train Loss: 0.0000769, Val Loss: 0.0000866 +2025-03-24 10:52:40,045 Epoch 703/2000 +2025-03-24 10:54:26,588 Current Learning Rate: 0.0005235532 +2025-03-24 10:54:26,588 Train Loss: 0.0000961, Val Loss: 0.0000858 +2025-03-24 10:54:26,588 Epoch 704/2000 +2025-03-24 10:56:13,519 Current Learning Rate: 0.0005313953 +2025-03-24 10:56:13,519 Train Loss: 0.0000779, Val Loss: 0.0000870 +2025-03-24 10:56:13,519 Epoch 705/2000 +2025-03-24 10:58:00,641 Current Learning Rate: 0.0005392295 +2025-03-24 10:58:00,642 Train Loss: 0.0000744, Val Loss: 0.0000828 +2025-03-24 10:58:00,642 Epoch 706/2000 +2025-03-24 10:59:47,157 Current Learning Rate: 0.0005470542 +2025-03-24 10:59:47,157 Train Loss: 0.0000821, Val Loss: 0.0000857 +2025-03-24 10:59:47,158 Epoch 707/2000 +2025-03-24 11:01:33,653 Current Learning Rate: 0.0005548672 +2025-03-24 11:01:33,654 Train Loss: 0.0000970, Val Loss: 0.0000907 +2025-03-24 11:01:33,654 Epoch 708/2000 +2025-03-24 11:03:20,549 Current Learning Rate: 0.0005626666 +2025-03-24 11:03:20,549 Train Loss: 0.0000828, Val Loss: 0.0000845 +2025-03-24 11:03:20,549 Epoch 709/2000 +2025-03-24 11:05:07,793 Current Learning Rate: 0.0005704506 +2025-03-24 11:05:07,794 Train Loss: 0.0000983, Val Loss: 0.0000871 +2025-03-24 11:05:07,794 Epoch 710/2000 +2025-03-24 11:06:54,628 Current Learning Rate: 0.0005782172 +2025-03-24 11:06:54,629 Train Loss: 0.0000968, Val Loss: 0.0000907 +2025-03-24 11:06:54,629 Epoch 711/2000 +2025-03-24 11:08:41,117 Current Learning Rate: 0.0005859646 +2025-03-24 11:08:41,117 Train Loss: 0.0000856, Val Loss: 0.0000827 +2025-03-24 11:08:41,118 Epoch 712/2000 +2025-03-24 11:10:27,894 Current Learning Rate: 0.0005936907 +2025-03-24 11:10:27,894 Train Loss: 0.0000777, Val Loss: 0.0000943 +2025-03-24 11:10:27,895 Epoch 713/2000 +2025-03-24 11:12:14,431 Current Learning Rate: 0.0006013936 +2025-03-24 11:12:14,431 Train Loss: 0.0000828, Val Loss: 0.0000946 +2025-03-24 11:12:14,432 Epoch 714/2000 +2025-03-24 11:14:00,499 Current Learning Rate: 0.0006090716 +2025-03-24 11:14:00,499 Train Loss: 0.0000729, Val Loss: 0.0000865 +2025-03-24 11:14:00,499 Epoch 715/2000 +2025-03-24 11:15:46,866 Current Learning Rate: 0.0006167227 +2025-03-24 11:15:46,867 Train Loss: 0.0000914, Val Loss: 0.0000861 +2025-03-24 11:15:46,867 Epoch 716/2000 +2025-03-24 11:17:33,497 Current Learning Rate: 0.0006243449 +2025-03-24 11:17:33,498 Train Loss: 0.0000799, Val Loss: 0.0000880 +2025-03-24 11:17:33,498 Epoch 717/2000 +2025-03-24 11:19:20,239 Current Learning Rate: 0.0006319365 +2025-03-24 11:19:20,240 Train Loss: 0.0000792, Val Loss: 0.0000864 +2025-03-24 11:19:20,240 Epoch 718/2000 +2025-03-24 11:21:07,141 Current Learning Rate: 0.0006394956 +2025-03-24 11:21:07,141 Train Loss: 0.0000968, Val Loss: 0.0000994 +2025-03-24 11:21:07,142 Epoch 719/2000 +2025-03-24 11:22:54,128 Current Learning Rate: 0.0006470202 +2025-03-24 11:22:54,129 Train Loss: 0.0000948, Val Loss: 0.0000917 +2025-03-24 11:22:54,129 Epoch 720/2000 +2025-03-24 11:24:41,114 Current Learning Rate: 0.0006545085 +2025-03-24 11:24:41,114 Train Loss: 0.0001015, Val Loss: 0.0001089 +2025-03-24 11:24:41,114 Epoch 721/2000 +2025-03-24 11:26:28,137 Current Learning Rate: 0.0006619587 +2025-03-24 11:26:28,138 Train Loss: 0.0001171, Val Loss: 0.0000895 +2025-03-24 11:26:28,138 Epoch 722/2000 +2025-03-24 11:28:14,789 Current Learning Rate: 0.0006693690 +2025-03-24 11:28:14,789 Train Loss: 0.0000821, Val Loss: 0.0000869 +2025-03-24 11:28:14,790 Epoch 723/2000 +2025-03-24 11:30:01,703 Current Learning Rate: 0.0006767374 +2025-03-24 11:30:01,704 Train Loss: 0.0000791, Val Loss: 0.0000965 +2025-03-24 11:30:01,704 Epoch 724/2000 +2025-03-24 11:31:48,887 Current Learning Rate: 0.0006840623 +2025-03-24 11:31:48,888 Train Loss: 0.0000915, Val Loss: 0.0000960 +2025-03-24 11:31:48,888 Epoch 725/2000 +2025-03-24 11:33:35,294 Current Learning Rate: 0.0006913417 +2025-03-24 11:33:35,294 Train Loss: 0.0000982, Val Loss: 0.0000853 +2025-03-24 11:33:35,295 Epoch 726/2000 +2025-03-24 11:35:21,801 Current Learning Rate: 0.0006985739 +2025-03-24 11:35:21,802 Train Loss: 0.0000833, Val Loss: 0.0000940 +2025-03-24 11:35:21,802 Epoch 727/2000 +2025-03-24 11:37:08,678 Current Learning Rate: 0.0007057572 +2025-03-24 11:37:08,679 Train Loss: 0.0000934, Val Loss: 0.0000997 +2025-03-24 11:37:08,680 Epoch 728/2000 +2025-03-24 11:38:56,020 Current Learning Rate: 0.0007128896 +2025-03-24 11:38:56,020 Train Loss: 0.0001021, Val Loss: 0.0000985 +2025-03-24 11:38:56,020 Epoch 729/2000 +2025-03-24 11:40:42,547 Current Learning Rate: 0.0007199696 +2025-03-24 11:40:42,547 Train Loss: 0.0000918, Val Loss: 0.0000905 +2025-03-24 11:40:42,548 Epoch 730/2000 +2025-03-24 11:42:28,847 Current Learning Rate: 0.0007269952 +2025-03-24 11:42:28,848 Train Loss: 0.0000806, Val Loss: 0.0000860 +2025-03-24 11:42:28,848 Epoch 731/2000 +2025-03-24 11:44:15,164 Current Learning Rate: 0.0007339649 +2025-03-24 11:44:15,165 Train Loss: 0.0000906, Val Loss: 0.0001000 +2025-03-24 11:44:15,165 Epoch 732/2000 +2025-03-24 11:46:02,452 Current Learning Rate: 0.0007408768 +2025-03-24 11:46:02,453 Train Loss: 0.0000874, Val Loss: 0.0000925 +2025-03-24 11:46:02,453 Epoch 733/2000 +2025-03-24 11:47:49,365 Current Learning Rate: 0.0007477293 +2025-03-24 11:47:49,366 Train Loss: 0.0000891, Val Loss: 0.0000892 +2025-03-24 11:47:49,366 Epoch 734/2000 +2025-03-24 11:49:35,971 Current Learning Rate: 0.0007545207 +2025-03-24 11:49:35,972 Train Loss: 0.0000858, Val Loss: 0.0001051 +2025-03-24 11:49:35,972 Epoch 735/2000 +2025-03-24 11:51:22,897 Current Learning Rate: 0.0007612493 +2025-03-24 11:51:22,897 Train Loss: 0.0001025, Val Loss: 0.0000908 +2025-03-24 11:51:22,897 Epoch 736/2000 +2025-03-24 11:53:09,844 Current Learning Rate: 0.0007679134 +2025-03-24 11:53:09,845 Train Loss: 0.0000885, Val Loss: 0.0001011 +2025-03-24 11:53:09,846 Epoch 737/2000 +2025-03-24 11:54:56,680 Current Learning Rate: 0.0007745114 +2025-03-24 11:54:56,681 Train Loss: 0.0000974, Val Loss: 0.0000956 +2025-03-24 11:54:56,682 Epoch 738/2000 +2025-03-24 11:56:43,595 Current Learning Rate: 0.0007810417 +2025-03-24 11:56:43,595 Train Loss: 0.0001087, Val Loss: 0.0000929 +2025-03-24 11:56:43,596 Epoch 739/2000 +2025-03-24 11:58:30,258 Current Learning Rate: 0.0007875026 +2025-03-24 11:58:30,259 Train Loss: 0.0000905, Val Loss: 0.0001169 +2025-03-24 11:58:30,259 Epoch 740/2000 +2025-03-24 12:00:17,093 Current Learning Rate: 0.0007938926 +2025-03-24 12:00:17,094 Train Loss: 0.0000953, Val Loss: 0.0000955 +2025-03-24 12:00:17,094 Epoch 741/2000 +2025-03-24 12:02:03,515 Current Learning Rate: 0.0008002101 +2025-03-24 12:02:03,516 Train Loss: 0.0001053, Val Loss: 0.0000943 +2025-03-24 12:02:03,516 Epoch 742/2000 +2025-03-24 12:03:49,566 Current Learning Rate: 0.0008064535 +2025-03-24 12:03:49,566 Train Loss: 0.0001040, Val Loss: 0.0000974 +2025-03-24 12:03:49,566 Epoch 743/2000 +2025-03-24 12:05:35,950 Current Learning Rate: 0.0008126213 +2025-03-24 12:05:35,950 Train Loss: 0.0001203, Val Loss: 0.0001067 +2025-03-24 12:05:35,951 Epoch 744/2000 +2025-03-24 12:07:22,555 Current Learning Rate: 0.0008187120 +2025-03-24 12:07:22,555 Train Loss: 0.0001015, Val Loss: 0.0001025 +2025-03-24 12:07:22,556 Epoch 745/2000 +2025-03-24 12:09:08,851 Current Learning Rate: 0.0008247240 +2025-03-24 12:09:08,851 Train Loss: 0.0000949, Val Loss: 0.0001045 +2025-03-24 12:09:08,852 Epoch 746/2000 +2025-03-24 12:10:55,816 Current Learning Rate: 0.0008306559 +2025-03-24 12:10:55,817 Train Loss: 0.0001343, Val Loss: 0.0001092 +2025-03-24 12:10:55,817 Epoch 747/2000 +2025-03-24 12:12:42,952 Current Learning Rate: 0.0008365063 +2025-03-24 12:12:42,953 Train Loss: 0.0000916, Val Loss: 0.0001053 +2025-03-24 12:12:42,953 Epoch 748/2000 +2025-03-24 12:14:29,468 Current Learning Rate: 0.0008422736 +2025-03-24 12:14:29,469 Train Loss: 0.0001204, Val Loss: 0.0001092 +2025-03-24 12:14:29,469 Epoch 749/2000 +2025-03-24 12:16:16,039 Current Learning Rate: 0.0008479564 +2025-03-24 12:16:16,039 Train Loss: 0.0001275, Val Loss: 0.0001041 +2025-03-24 12:16:16,039 Epoch 750/2000 +2025-03-24 12:18:02,772 Current Learning Rate: 0.0008535534 +2025-03-24 12:18:02,773 Train Loss: 0.0001139, Val Loss: 0.0001325 +2025-03-24 12:18:02,773 Epoch 751/2000 +2025-03-24 12:19:49,806 Current Learning Rate: 0.0008590631 +2025-03-24 12:19:49,806 Train Loss: 0.0000943, Val Loss: 0.0001034 +2025-03-24 12:19:49,806 Epoch 752/2000 +2025-03-24 12:21:36,971 Current Learning Rate: 0.0008644843 +2025-03-24 12:21:36,971 Train Loss: 0.0001086, Val Loss: 0.0001040 +2025-03-24 12:21:36,972 Epoch 753/2000 +2025-03-24 12:23:23,352 Current Learning Rate: 0.0008698155 +2025-03-24 12:23:23,353 Train Loss: 0.0001076, Val Loss: 0.0000999 +2025-03-24 12:23:23,353 Epoch 754/2000 +2025-03-24 12:25:10,387 Current Learning Rate: 0.0008750555 +2025-03-24 12:25:10,387 Train Loss: 0.0000955, Val Loss: 0.0001838 +2025-03-24 12:25:10,388 Epoch 755/2000 +2025-03-24 12:26:56,963 Current Learning Rate: 0.0008802030 +2025-03-24 12:26:56,963 Train Loss: 0.0001190, Val Loss: 0.0001074 +2025-03-24 12:26:56,963 Epoch 756/2000 +2025-03-24 12:28:43,780 Current Learning Rate: 0.0008852566 +2025-03-24 12:28:43,780 Train Loss: 0.0001044, Val Loss: 0.0000961 +2025-03-24 12:28:43,781 Epoch 757/2000 +2025-03-24 12:30:29,933 Current Learning Rate: 0.0008902152 +2025-03-24 12:30:29,933 Train Loss: 0.0000954, Val Loss: 0.0001127 +2025-03-24 12:30:29,933 Epoch 758/2000 +2025-03-24 12:32:16,621 Current Learning Rate: 0.0008950775 +2025-03-24 12:32:16,621 Train Loss: 0.0001172, Val Loss: 0.0001135 +2025-03-24 12:32:16,622 Epoch 759/2000 +2025-03-24 12:34:03,546 Current Learning Rate: 0.0008998423 +2025-03-24 12:34:03,547 Train Loss: 0.0001000, Val Loss: 0.0001082 +2025-03-24 12:34:03,547 Epoch 760/2000 +2025-03-24 12:35:50,597 Current Learning Rate: 0.0009045085 +2025-03-24 12:35:50,598 Train Loss: 0.0000889, Val Loss: 0.0000945 +2025-03-24 12:35:50,598 Epoch 761/2000 +2025-03-24 12:37:36,851 Current Learning Rate: 0.0009090749 +2025-03-24 12:37:36,852 Train Loss: 0.0001035, Val Loss: 0.0001252 +2025-03-24 12:37:36,852 Epoch 762/2000 +2025-03-24 12:39:23,240 Current Learning Rate: 0.0009135403 +2025-03-24 12:39:23,241 Train Loss: 0.0000932, Val Loss: 0.0000996 +2025-03-24 12:39:23,241 Epoch 763/2000 +2025-03-24 12:41:09,499 Current Learning Rate: 0.0009179037 +2025-03-24 12:41:09,500 Train Loss: 0.0001107, Val Loss: 0.0001116 +2025-03-24 12:41:09,500 Epoch 764/2000 +2025-03-24 12:42:55,994 Current Learning Rate: 0.0009221640 +2025-03-24 12:42:55,994 Train Loss: 0.0001098, Val Loss: 0.0000953 +2025-03-24 12:42:55,994 Epoch 765/2000 +2025-03-24 12:44:42,874 Current Learning Rate: 0.0009263201 +2025-03-24 12:44:42,875 Train Loss: 0.0001028, Val Loss: 0.0000985 +2025-03-24 12:44:42,876 Epoch 766/2000 +2025-03-24 12:46:29,999 Current Learning Rate: 0.0009303710 +2025-03-24 12:46:30,000 Train Loss: 0.0001983, Val Loss: 0.0001433 +2025-03-24 12:46:30,001 Epoch 767/2000 +2025-03-24 12:48:16,929 Current Learning Rate: 0.0009343158 +2025-03-24 12:48:16,929 Train Loss: 0.0001216, Val Loss: 0.0000951 +2025-03-24 12:48:16,930 Epoch 768/2000 +2025-03-24 12:50:04,011 Current Learning Rate: 0.0009381533 +2025-03-24 12:50:04,011 Train Loss: 0.0000881, Val Loss: 0.0000892 +2025-03-24 12:50:04,012 Epoch 769/2000 +2025-03-24 12:51:51,106 Current Learning Rate: 0.0009418828 +2025-03-24 12:51:51,107 Train Loss: 0.0000781, Val Loss: 0.0001018 +2025-03-24 12:51:51,107 Epoch 770/2000 +2025-03-24 12:53:38,006 Current Learning Rate: 0.0009455033 +2025-03-24 12:53:38,007 Train Loss: 0.0001176, Val Loss: 0.0001002 +2025-03-24 12:53:38,007 Epoch 771/2000 +2025-03-24 12:55:25,214 Current Learning Rate: 0.0009490138 +2025-03-24 12:55:25,215 Train Loss: 0.0001257, Val Loss: 0.0001157 +2025-03-24 12:55:25,215 Epoch 772/2000 +2025-03-24 12:57:11,915 Current Learning Rate: 0.0009524135 +2025-03-24 12:57:11,916 Train Loss: 0.0001013, Val Loss: 0.0001109 +2025-03-24 12:57:11,916 Epoch 773/2000 +2025-03-24 12:58:58,622 Current Learning Rate: 0.0009557016 +2025-03-24 12:58:58,623 Train Loss: 0.0001113, Val Loss: 0.0001007 +2025-03-24 12:58:58,623 Epoch 774/2000 +2025-03-24 13:00:45,556 Current Learning Rate: 0.0009588773 +2025-03-24 13:00:45,557 Train Loss: 0.0001180, Val Loss: 0.0001018 +2025-03-24 13:00:45,557 Epoch 775/2000 +2025-03-24 13:02:32,398 Current Learning Rate: 0.0009619398 +2025-03-24 13:02:32,398 Train Loss: 0.0001064, Val Loss: 0.0001055 +2025-03-24 13:02:32,399 Epoch 776/2000 +2025-03-24 13:04:19,167 Current Learning Rate: 0.0009648882 +2025-03-24 13:04:19,167 Train Loss: 0.0001074, Val Loss: 0.0000995 +2025-03-24 13:04:19,167 Epoch 777/2000 +2025-03-24 13:06:05,579 Current Learning Rate: 0.0009677220 +2025-03-24 13:06:05,580 Train Loss: 0.0001521, Val Loss: 0.0001020 +2025-03-24 13:06:05,580 Epoch 778/2000 +2025-03-24 13:07:52,425 Current Learning Rate: 0.0009704404 +2025-03-24 13:07:52,426 Train Loss: 0.0001092, Val Loss: 0.0000940 +2025-03-24 13:07:52,426 Epoch 779/2000 +2025-03-24 13:09:38,912 Current Learning Rate: 0.0009730427 +2025-03-24 13:09:38,913 Train Loss: 0.0000969, Val Loss: 0.0001023 +2025-03-24 13:09:38,913 Epoch 780/2000 +2025-03-24 13:11:26,069 Current Learning Rate: 0.0009755283 +2025-03-24 13:11:26,070 Train Loss: 0.0001156, Val Loss: 0.0001101 +2025-03-24 13:11:26,070 Epoch 781/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp2_20241107_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp2_20241107_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..587621331a7d0db51468a9726a1661ad862841ca --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp2_20241107_training_log.log @@ -0,0 +1,35 @@ +2024-11-07 22:13:57,616 Added key: store_based_barrier_key:1 to store for rank: 2 +2024-11-07 22:13:57,712 Added key: store_based_barrier_key:1 to store for rank: 5 +2024-11-07 22:13:57,717 Added key: store_based_barrier_key:1 to store for rank: 1 +2024-11-07 22:13:57,754 Added key: store_based_barrier_key:1 to store for rank: 4 +2024-11-07 22:13:57,803 Added key: store_based_barrier_key:1 to store for rank: 7 +2024-11-07 22:13:57,819 Added key: store_based_barrier_key:1 to store for rank: 6 +2024-11-07 22:13:57,824 Added key: store_based_barrier_key:1 to store for rank: 3 +2024-11-07 22:13:57,826 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-07 22:15:08,339 Epoch 1/500 +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,063 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:14,064 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:15:52,061 Current Learning Rate: 0.0009999901 +2024-11-07 22:15:52,995 Train Loss: 0.7143657, Val Loss: 0.0286528 +2024-11-07 22:15:52,995 Epoch 2/500 +2024-11-07 22:16:35,814 Current Learning Rate: 0.0009999605 +2024-11-07 22:16:36,763 Train Loss: 0.0185759, Val Loss: 0.0147644 +2024-11-07 22:16:36,764 Epoch 3/500 +2024-11-07 22:17:19,440 Current Learning Rate: 0.0009999112 +2024-11-07 22:17:20,379 Train Loss: 0.0151753, Val Loss: 0.0132926 +2024-11-07 22:17:20,379 Epoch 4/500 +2024-11-07 22:18:03,294 Current Learning Rate: 0.0009998421 +2024-11-07 22:18:04,405 Train Loss: 0.0144290, Val Loss: 0.0130041 +2024-11-07 22:18:04,406 Epoch 5/500 +2024-11-07 22:18:47,143 Current Learning Rate: 0.0009997533 +2024-11-07 22:18:48,118 Train Loss: 0.0141721, Val Loss: 0.0128710 +2024-11-07 22:18:48,119 Epoch 6/500 +2024-11-07 22:19:30,797 Current Learning Rate: 0.0009996447 +2024-11-07 22:19:31,759 Train Loss: 0.0140451, Val Loss: 0.0127878 +2024-11-07 22:19:31,760 Epoch 7/500 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp3_20241107_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp3_20241107_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..454d530c3b338d5a0a2825dd27103ca9dcea9fa0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp3_20241107_training_log.log @@ -0,0 +1,1517 @@ +2024-11-07 22:19:54,475 Added key: store_based_barrier_key:1 to store for rank: 7 +2024-11-07 22:19:54,634 Added key: store_based_barrier_key:1 to store for rank: 1 +2024-11-07 22:19:54,656 Added key: store_based_barrier_key:1 to store for rank: 4 +2024-11-07 22:19:54,666 Added key: store_based_barrier_key:1 to store for rank: 5 +2024-11-07 22:19:54,670 Added key: store_based_barrier_key:1 to store for rank: 6 +2024-11-07 22:19:54,703 Added key: store_based_barrier_key:1 to store for rank: 3 +2024-11-07 22:19:54,736 Added key: store_based_barrier_key:1 to store for rank: 2 +2024-11-07 22:19:54,745 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-07 22:20:20,484 Epoch 1/500 +2024-11-07 22:20:25,602 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:20:25,602 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:20:25,603 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:20:25,603 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:20:25,603 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:20:25,603 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:20:25,603 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:20:25,603 Reducer buckets have been rebuilt in this iteration. +2024-11-07 22:21:03,830 Current Learning Rate: 0.0099999013 +2024-11-07 22:21:04,614 Train Loss: 1.2662283, Val Loss: 0.0961503 +2024-11-07 22:21:04,614 Epoch 2/500 +2024-11-07 22:21:45,929 Current Learning Rate: 0.0099996052 +2024-11-07 22:21:46,822 Train Loss: 0.0540553, Val Loss: 0.0281026 +2024-11-07 22:21:46,822 Epoch 3/500 +2024-11-07 22:22:28,866 Current Learning Rate: 0.0099991118 +2024-11-07 22:22:29,890 Train Loss: 0.0226790, Val Loss: 0.0175729 +2024-11-07 22:22:29,890 Epoch 4/500 +2024-11-07 22:23:11,971 Current Learning Rate: 0.0099984209 +2024-11-07 22:23:12,960 Train Loss: 0.0183915, Val Loss: 0.0152085 +2024-11-07 22:23:12,960 Epoch 5/500 +2024-11-07 22:23:54,813 Current Learning Rate: 0.0099975328 +2024-11-07 22:23:55,938 Train Loss: 0.0157156, Val Loss: 0.0134228 +2024-11-07 22:23:55,938 Epoch 6/500 +2024-11-07 22:24:37,460 Current Learning Rate: 0.0099964474 +2024-11-07 22:24:38,392 Train Loss: 0.0145322, Val Loss: 0.0131006 +2024-11-07 22:24:38,392 Epoch 7/500 +2024-11-07 22:25:20,176 Current Learning Rate: 0.0099951647 +2024-11-07 22:25:21,225 Train Loss: 0.0142447, Val Loss: 0.0129222 +2024-11-07 22:25:21,225 Epoch 8/500 +2024-11-07 22:26:03,953 Current Learning Rate: 0.0099936848 +2024-11-07 22:26:04,862 Train Loss: 0.0140850, Val Loss: 0.0128162 +2024-11-07 22:26:04,863 Epoch 9/500 +2024-11-07 22:26:47,446 Current Learning Rate: 0.0099920078 +2024-11-07 22:26:48,352 Train Loss: 0.0140031, Val Loss: 0.0127465 +2024-11-07 22:26:48,352 Epoch 10/500 +2024-11-07 22:27:31,005 Current Learning Rate: 0.0099901336 +2024-11-07 22:27:32,010 Train Loss: 0.0139315, Val Loss: 0.0126935 +2024-11-07 22:27:32,010 Epoch 11/500 +2024-11-07 22:28:15,007 Current Learning Rate: 0.0099880625 +2024-11-07 22:28:16,040 Train Loss: 0.0138531, Val Loss: 0.0126385 +2024-11-07 22:28:16,040 Epoch 12/500 +2024-11-07 22:28:59,303 Current Learning Rate: 0.0099857945 +2024-11-07 22:29:00,270 Train Loss: 0.0137969, Val Loss: 0.0126028 +2024-11-07 22:29:00,270 Epoch 13/500 +2024-11-07 22:29:42,845 Current Learning Rate: 0.0099833296 +2024-11-07 22:29:43,796 Train Loss: 0.0137499, Val Loss: 0.0125524 +2024-11-07 22:29:43,797 Epoch 14/500 +2024-11-07 22:30:26,263 Current Learning Rate: 0.0099806680 +2024-11-07 22:30:27,198 Train Loss: 0.0137007, Val Loss: 0.0125120 +2024-11-07 22:30:27,198 Epoch 15/500 +2024-11-07 22:31:09,734 Current Learning Rate: 0.0099778098 +2024-11-07 22:31:10,696 Train Loss: 0.0136692, Val Loss: 0.0124751 +2024-11-07 22:31:10,696 Epoch 16/500 +2024-11-07 22:31:53,584 Current Learning Rate: 0.0099747551 +2024-11-07 22:31:54,506 Train Loss: 0.0136214, Val Loss: 0.0124572 +2024-11-07 22:31:54,506 Epoch 17/500 +2024-11-07 22:32:37,299 Current Learning Rate: 0.0099715040 +2024-11-07 22:32:38,205 Train Loss: 0.0135864, Val Loss: 0.0124100 +2024-11-07 22:32:38,206 Epoch 18/500 +2024-11-07 22:33:20,727 Current Learning Rate: 0.0099680566 +2024-11-07 22:33:21,650 Train Loss: 0.0135543, Val Loss: 0.0123793 +2024-11-07 22:33:21,650 Epoch 19/500 +2024-11-07 22:34:04,208 Current Learning Rate: 0.0099644130 +2024-11-07 22:34:05,165 Train Loss: 0.0135091, Val Loss: 0.0123453 +2024-11-07 22:34:05,166 Epoch 20/500 +2024-11-07 22:34:47,727 Current Learning Rate: 0.0099605735 +2024-11-07 22:34:48,707 Train Loss: 0.0135106, Val Loss: 0.0123306 +2024-11-07 22:34:48,707 Epoch 21/500 +2024-11-07 22:35:31,477 Current Learning Rate: 0.0099565382 +2024-11-07 22:35:32,516 Train Loss: 0.0134545, Val Loss: 0.0122849 +2024-11-07 22:35:32,516 Epoch 22/500 +2024-11-07 22:36:15,957 Current Learning Rate: 0.0099523071 +2024-11-07 22:36:16,858 Train Loss: 0.0133962, Val Loss: 0.0122456 +2024-11-07 22:36:16,858 Epoch 23/500 +2024-11-07 22:36:59,754 Current Learning Rate: 0.0099478806 +2024-11-07 22:37:00,715 Train Loss: 0.0133583, Val Loss: 0.0122117 +2024-11-07 22:37:00,715 Epoch 24/500 +2024-11-07 22:37:43,392 Current Learning Rate: 0.0099432587 +2024-11-07 22:37:44,257 Train Loss: 0.0133090, Val Loss: 0.0121706 +2024-11-07 22:37:44,257 Epoch 25/500 +2024-11-07 22:38:26,754 Current Learning Rate: 0.0099384417 +2024-11-07 22:38:27,736 Train Loss: 0.0132701, Val Loss: 0.0121345 +2024-11-07 22:38:27,736 Epoch 26/500 +2024-11-07 22:39:10,473 Current Learning Rate: 0.0099334297 +2024-11-07 22:39:11,459 Train Loss: 0.0132252, Val Loss: 0.0120947 +2024-11-07 22:39:11,460 Epoch 27/500 +2024-11-07 22:39:54,434 Current Learning Rate: 0.0099282230 +2024-11-07 22:39:55,363 Train Loss: 0.0131743, Val Loss: 0.0120464 +2024-11-07 22:39:55,363 Epoch 28/500 +2024-11-07 22:40:37,858 Current Learning Rate: 0.0099228217 +2024-11-07 22:40:38,798 Train Loss: 0.0131305, Val Loss: 0.0120216 +2024-11-07 22:40:38,799 Epoch 29/500 +2024-11-07 22:41:21,332 Current Learning Rate: 0.0099172260 +2024-11-07 22:41:22,378 Train Loss: 0.0130951, Val Loss: 0.0119825 +2024-11-07 22:41:22,379 Epoch 30/500 +2024-11-07 22:42:05,202 Current Learning Rate: 0.0099114363 +2024-11-07 22:42:06,252 Train Loss: 0.0130516, Val Loss: 0.0119473 +2024-11-07 22:42:06,253 Epoch 31/500 +2024-11-07 22:42:48,065 Current Learning Rate: 0.0099054526 +2024-11-07 22:42:48,956 Train Loss: 0.0130103, Val Loss: 0.0119053 +2024-11-07 22:42:48,956 Epoch 32/500 +2024-11-07 22:43:30,683 Current Learning Rate: 0.0098992753 +2024-11-07 22:43:31,510 Train Loss: 0.0129652, Val Loss: 0.0118650 +2024-11-07 22:43:31,510 Epoch 33/500 +2024-11-07 22:44:14,169 Current Learning Rate: 0.0098929045 +2024-11-07 22:44:14,998 Train Loss: 0.0129213, Val Loss: 0.0118285 +2024-11-07 22:44:14,998 Epoch 34/500 +2024-11-07 22:44:56,545 Current Learning Rate: 0.0098863406 +2024-11-07 22:44:57,402 Train Loss: 0.0128869, Val Loss: 0.0118071 +2024-11-07 22:44:57,402 Epoch 35/500 +2024-11-07 22:45:39,235 Current Learning Rate: 0.0098795838 +2024-11-07 22:45:40,087 Train Loss: 0.0128680, Val Loss: 0.0117558 +2024-11-07 22:45:40,087 Epoch 36/500 +2024-11-07 22:46:22,135 Current Learning Rate: 0.0098726344 +2024-11-07 22:46:22,135 Train Loss: 0.0128403, Val Loss: 0.0117609 +2024-11-07 22:46:22,136 Epoch 37/500 +2024-11-07 22:47:05,416 Current Learning Rate: 0.0098654926 +2024-11-07 22:47:06,287 Train Loss: 0.0127943, Val Loss: 0.0117057 +2024-11-07 22:47:06,287 Epoch 38/500 +2024-11-07 22:47:47,634 Current Learning Rate: 0.0098581587 +2024-11-07 22:47:48,534 Train Loss: 0.0127491, Val Loss: 0.0116686 +2024-11-07 22:47:48,535 Epoch 39/500 +2024-11-07 22:48:30,442 Current Learning Rate: 0.0098506330 +2024-11-07 22:48:31,322 Train Loss: 0.0126882, Val Loss: 0.0116176 +2024-11-07 22:48:31,322 Epoch 40/500 +2024-11-07 22:49:13,401 Current Learning Rate: 0.0098429158 +2024-11-07 22:49:14,295 Train Loss: 0.0126285, Val Loss: 0.0115891 +2024-11-07 22:49:14,295 Epoch 41/500 +2024-11-07 22:49:55,995 Current Learning Rate: 0.0098350074 +2024-11-07 22:49:56,855 Train Loss: 0.0125765, Val Loss: 0.0115646 +2024-11-07 22:49:56,856 Epoch 42/500 +2024-11-07 22:50:39,742 Current Learning Rate: 0.0098269082 +2024-11-07 22:50:40,617 Train Loss: 0.0125321, Val Loss: 0.0115016 +2024-11-07 22:50:40,618 Epoch 43/500 +2024-11-07 22:51:22,759 Current Learning Rate: 0.0098186184 +2024-11-07 22:51:23,574 Train Loss: 0.0124958, Val Loss: 0.0114938 +2024-11-07 22:51:23,574 Epoch 44/500 +2024-11-07 22:52:05,747 Current Learning Rate: 0.0098101384 +2024-11-07 22:52:06,620 Train Loss: 0.0124629, Val Loss: 0.0114405 +2024-11-07 22:52:06,620 Epoch 45/500 +2024-11-07 22:52:48,425 Current Learning Rate: 0.0098014684 +2024-11-07 22:52:49,275 Train Loss: 0.0124407, Val Loss: 0.0114370 +2024-11-07 22:52:49,275 Epoch 46/500 +2024-11-07 22:53:31,303 Current Learning Rate: 0.0097926089 +2024-11-07 22:53:32,124 Train Loss: 0.0124412, Val Loss: 0.0113975 +2024-11-07 22:53:32,124 Epoch 47/500 +2024-11-07 22:54:13,751 Current Learning Rate: 0.0097835603 +2024-11-07 22:54:13,752 Train Loss: 0.0124600, Val Loss: 0.0114263 +2024-11-07 22:54:13,752 Epoch 48/500 +2024-11-07 22:54:56,968 Current Learning Rate: 0.0097743227 +2024-11-07 22:54:57,837 Train Loss: 0.0124140, Val Loss: 0.0113821 +2024-11-07 22:54:57,838 Epoch 49/500 +2024-11-07 22:55:40,107 Current Learning Rate: 0.0097648967 +2024-11-07 22:55:40,108 Train Loss: 0.0124548, Val Loss: 0.0114872 +2024-11-07 22:55:40,108 Epoch 50/500 +2024-11-07 22:56:22,887 Current Learning Rate: 0.0097552826 +2024-11-07 22:56:23,678 Train Loss: 0.0124228, Val Loss: 0.0113649 +2024-11-07 22:56:23,679 Epoch 51/500 +2024-11-07 22:57:06,012 Current Learning Rate: 0.0097454807 +2024-11-07 22:57:06,946 Train Loss: 0.0123534, Val Loss: 0.0113432 +2024-11-07 22:57:06,947 Epoch 52/500 +2024-11-07 22:57:48,659 Current Learning Rate: 0.0097354915 +2024-11-07 22:57:49,474 Train Loss: 0.0123351, Val Loss: 0.0113261 +2024-11-07 22:57:49,474 Epoch 53/500 +2024-11-07 22:58:31,602 Current Learning Rate: 0.0097253154 +2024-11-07 22:58:31,603 Train Loss: 0.0123520, Val Loss: 0.0113274 +2024-11-07 22:58:31,603 Epoch 54/500 +2024-11-07 22:59:14,604 Current Learning Rate: 0.0097149527 +2024-11-07 22:59:14,604 Train Loss: 0.0123213, Val Loss: 0.0113277 +2024-11-07 22:59:14,604 Epoch 55/500 +2024-11-07 22:59:57,404 Current Learning Rate: 0.0097044038 +2024-11-07 22:59:57,404 Train Loss: 0.0123168, Val Loss: 0.0113346 +2024-11-07 22:59:57,405 Epoch 56/500 +2024-11-07 23:00:40,534 Current Learning Rate: 0.0096936693 +2024-11-07 23:00:41,424 Train Loss: 0.0122913, Val Loss: 0.0112935 +2024-11-07 23:00:41,425 Epoch 57/500 +2024-11-07 23:01:23,188 Current Learning Rate: 0.0096827494 +2024-11-07 23:01:23,189 Train Loss: 0.0122762, Val Loss: 0.0113010 +2024-11-07 23:01:23,189 Epoch 58/500 +2024-11-07 23:02:05,894 Current Learning Rate: 0.0096716447 +2024-11-07 23:02:05,895 Train Loss: 0.0122585, Val Loss: 0.0113474 +2024-11-07 23:02:05,895 Epoch 59/500 +2024-11-07 23:02:48,631 Current Learning Rate: 0.0096603556 +2024-11-07 23:02:48,631 Train Loss: 0.0123177, Val Loss: 0.0112963 +2024-11-07 23:02:48,631 Epoch 60/500 +2024-11-07 23:03:32,309 Current Learning Rate: 0.0096488824 +2024-11-07 23:03:33,254 Train Loss: 0.0122490, Val Loss: 0.0112400 +2024-11-07 23:03:33,254 Epoch 61/500 +2024-11-07 23:04:16,450 Current Learning Rate: 0.0096372258 +2024-11-07 23:04:17,397 Train Loss: 0.0122190, Val Loss: 0.0112330 +2024-11-07 23:04:17,397 Epoch 62/500 +2024-11-07 23:05:00,005 Current Learning Rate: 0.0096253860 +2024-11-07 23:05:00,006 Train Loss: 0.0122347, Val Loss: 0.0112331 +2024-11-07 23:05:00,006 Epoch 63/500 +2024-11-07 23:05:42,539 Current Learning Rate: 0.0096133637 +2024-11-07 23:05:43,460 Train Loss: 0.0122209, Val Loss: 0.0112023 +2024-11-07 23:05:43,461 Epoch 64/500 +2024-11-07 23:06:25,988 Current Learning Rate: 0.0096011592 +2024-11-07 23:06:26,946 Train Loss: 0.0122057, Val Loss: 0.0111989 +2024-11-07 23:06:26,946 Epoch 65/500 +2024-11-07 23:07:10,037 Current Learning Rate: 0.0095887731 +2024-11-07 23:07:10,939 Train Loss: 0.0121947, Val Loss: 0.0111890 +2024-11-07 23:07:10,939 Epoch 66/500 +2024-11-07 23:07:53,574 Current Learning Rate: 0.0095762059 +2024-11-07 23:07:53,575 Train Loss: 0.0121866, Val Loss: 0.0112065 +2024-11-07 23:07:53,575 Epoch 67/500 +2024-11-07 23:08:36,935 Current Learning Rate: 0.0095634579 +2024-11-07 23:08:37,800 Train Loss: 0.0121710, Val Loss: 0.0111340 +2024-11-07 23:08:37,801 Epoch 68/500 +2024-11-07 23:09:20,222 Current Learning Rate: 0.0095505299 +2024-11-07 23:09:21,119 Train Loss: 0.0121109, Val Loss: 0.0110944 +2024-11-07 23:09:21,119 Epoch 69/500 +2024-11-07 23:10:03,479 Current Learning Rate: 0.0095374221 +2024-11-07 23:10:04,404 Train Loss: 0.0120731, Val Loss: 0.0110829 +2024-11-07 23:10:04,404 Epoch 70/500 +2024-11-07 23:10:46,795 Current Learning Rate: 0.0095241353 +2024-11-07 23:10:47,764 Train Loss: 0.0120599, Val Loss: 0.0110678 +2024-11-07 23:10:47,764 Epoch 71/500 +2024-11-07 23:11:29,672 Current Learning Rate: 0.0095106698 +2024-11-07 23:11:30,541 Train Loss: 0.0119886, Val Loss: 0.0109996 +2024-11-07 23:11:30,541 Epoch 72/500 +2024-11-07 23:12:12,935 Current Learning Rate: 0.0094970263 +2024-11-07 23:12:13,831 Train Loss: 0.0119338, Val Loss: 0.0108796 +2024-11-07 23:12:13,832 Epoch 73/500 +2024-11-07 23:12:55,664 Current Learning Rate: 0.0094832052 +2024-11-07 23:12:56,470 Train Loss: 0.0117798, Val Loss: 0.0107755 +2024-11-07 23:12:56,470 Epoch 74/500 +2024-11-07 23:13:38,630 Current Learning Rate: 0.0094692071 +2024-11-07 23:13:39,479 Train Loss: 0.0117550, Val Loss: 0.0105169 +2024-11-07 23:13:39,479 Epoch 75/500 +2024-11-07 23:14:21,627 Current Learning Rate: 0.0094550326 +2024-11-07 23:14:22,419 Train Loss: 0.0113729, Val Loss: 0.0104467 +2024-11-07 23:14:22,419 Epoch 76/500 +2024-11-07 23:15:04,125 Current Learning Rate: 0.0094406822 +2024-11-07 23:15:05,082 Train Loss: 0.0112202, Val Loss: 0.0101288 +2024-11-07 23:15:05,082 Epoch 77/500 +2024-11-07 23:15:48,041 Current Learning Rate: 0.0094261566 +2024-11-07 23:15:48,042 Train Loss: 0.0112488, Val Loss: 0.0111536 +2024-11-07 23:15:48,043 Epoch 78/500 +2024-11-07 23:16:30,985 Current Learning Rate: 0.0094114561 +2024-11-07 23:16:31,900 Train Loss: 0.0111576, Val Loss: 0.0098483 +2024-11-07 23:16:31,901 Epoch 79/500 +2024-11-07 23:17:14,779 Current Learning Rate: 0.0093965816 +2024-11-07 23:17:18,746 Train Loss: 0.0105467, Val Loss: 0.0096502 +2024-11-07 23:17:18,746 Epoch 80/500 +2024-11-07 23:18:01,676 Current Learning Rate: 0.0093815334 +2024-11-07 23:18:04,768 Train Loss: 0.0103667, Val Loss: 0.0095031 +2024-11-07 23:18:04,768 Epoch 81/500 +2024-11-07 23:18:47,242 Current Learning Rate: 0.0093663123 +2024-11-07 23:18:48,101 Train Loss: 0.0103201, Val Loss: 0.0093548 +2024-11-07 23:18:48,101 Epoch 82/500 +2024-11-07 23:19:30,540 Current Learning Rate: 0.0093509188 +2024-11-07 23:19:30,541 Train Loss: 0.0101477, Val Loss: 0.0096098 +2024-11-07 23:19:30,541 Epoch 83/500 +2024-11-07 23:20:12,703 Current Learning Rate: 0.0093353535 +2024-11-07 23:20:13,653 Train Loss: 0.0102413, Val Loss: 0.0092060 +2024-11-07 23:20:13,654 Epoch 84/500 +2024-11-07 23:20:56,538 Current Learning Rate: 0.0093196171 +2024-11-07 23:20:57,444 Train Loss: 0.0098635, Val Loss: 0.0091342 +2024-11-07 23:20:57,444 Epoch 85/500 +2024-11-07 23:21:40,154 Current Learning Rate: 0.0093037101 +2024-11-07 23:21:41,046 Train Loss: 0.0099973, Val Loss: 0.0090344 +2024-11-07 23:21:41,046 Epoch 86/500 +2024-11-07 23:22:23,649 Current Learning Rate: 0.0092876333 +2024-11-07 23:22:23,650 Train Loss: 0.0098351, Val Loss: 0.0091489 +2024-11-07 23:22:23,650 Epoch 87/500 +2024-11-07 23:23:06,237 Current Learning Rate: 0.0092713872 +2024-11-07 23:23:07,177 Train Loss: 0.0098475, Val Loss: 0.0090277 +2024-11-07 23:23:07,178 Epoch 88/500 +2024-11-07 23:23:50,263 Current Learning Rate: 0.0092549724 +2024-11-07 23:23:51,122 Train Loss: 0.0097378, Val Loss: 0.0089525 +2024-11-07 23:23:51,123 Epoch 89/500 +2024-11-07 23:24:33,725 Current Learning Rate: 0.0092383897 +2024-11-07 23:24:34,591 Train Loss: 0.0095828, Val Loss: 0.0088342 +2024-11-07 23:24:34,592 Epoch 90/500 +2024-11-07 23:25:15,895 Current Learning Rate: 0.0092216396 +2024-11-07 23:25:15,896 Train Loss: 0.0096245, Val Loss: 0.0088893 +2024-11-07 23:25:15,896 Epoch 91/500 +2024-11-07 23:25:58,636 Current Learning Rate: 0.0092047229 +2024-11-07 23:25:59,450 Train Loss: 0.0094644, Val Loss: 0.0086671 +2024-11-07 23:25:59,450 Epoch 92/500 +2024-11-07 23:26:41,827 Current Learning Rate: 0.0091876402 +2024-11-07 23:26:41,828 Train Loss: 0.0097283, Val Loss: 0.0088217 +2024-11-07 23:26:41,829 Epoch 93/500 +2024-11-07 23:27:24,495 Current Learning Rate: 0.0091703922 +2024-11-07 23:27:24,495 Train Loss: 0.0094455, Val Loss: 0.0088597 +2024-11-07 23:27:24,495 Epoch 94/500 +2024-11-07 23:28:07,063 Current Learning Rate: 0.0091529795 +2024-11-07 23:28:08,140 Train Loss: 0.0093411, Val Loss: 0.0085087 +2024-11-07 23:28:08,140 Epoch 95/500 +2024-11-07 23:28:50,568 Current Learning Rate: 0.0091354029 +2024-11-07 23:28:51,479 Train Loss: 0.0090551, Val Loss: 0.0083278 +2024-11-07 23:28:51,479 Epoch 96/500 +2024-11-07 23:29:34,164 Current Learning Rate: 0.0091176630 +2024-11-07 23:29:35,022 Train Loss: 0.0090625, Val Loss: 0.0081996 +2024-11-07 23:29:35,022 Epoch 97/500 +2024-11-07 23:30:17,334 Current Learning Rate: 0.0090997605 +2024-11-07 23:30:18,253 Train Loss: 0.0089111, Val Loss: 0.0080680 +2024-11-07 23:30:18,253 Epoch 98/500 +2024-11-07 23:31:00,914 Current Learning Rate: 0.0090816963 +2024-11-07 23:31:02,759 Train Loss: 0.0087597, Val Loss: 0.0080317 +2024-11-07 23:31:02,760 Epoch 99/500 +2024-11-07 23:31:44,977 Current Learning Rate: 0.0090634708 +2024-11-07 23:31:45,855 Train Loss: 0.0085938, Val Loss: 0.0079159 +2024-11-07 23:31:45,855 Epoch 100/500 +2024-11-07 23:32:28,014 Current Learning Rate: 0.0090450850 +2024-11-07 23:32:28,937 Train Loss: 0.0085626, Val Loss: 0.0078214 +2024-11-07 23:32:28,938 Epoch 101/500 +2024-11-07 23:33:11,897 Current Learning Rate: 0.0090265394 +2024-11-07 23:33:12,855 Train Loss: 0.0084213, Val Loss: 0.0076927 +2024-11-07 23:33:12,855 Epoch 102/500 +2024-11-07 23:33:55,304 Current Learning Rate: 0.0090078349 +2024-11-07 23:33:55,305 Train Loss: 0.0083415, Val Loss: 0.0079115 +2024-11-07 23:33:55,306 Epoch 103/500 +2024-11-07 23:34:38,038 Current Learning Rate: 0.0089889722 +2024-11-07 23:34:38,041 Train Loss: 0.0084495, Val Loss: 0.0081668 +2024-11-07 23:34:38,042 Epoch 104/500 +2024-11-07 23:35:21,108 Current Learning Rate: 0.0089699520 +2024-11-07 23:35:21,109 Train Loss: 0.0084807, Val Loss: 0.0079219 +2024-11-07 23:35:21,109 Epoch 105/500 +2024-11-07 23:36:04,113 Current Learning Rate: 0.0089507751 +2024-11-07 23:36:05,059 Train Loss: 0.0082787, Val Loss: 0.0075868 +2024-11-07 23:36:05,059 Epoch 106/500 +2024-11-07 23:36:47,654 Current Learning Rate: 0.0089314422 +2024-11-07 23:36:48,595 Train Loss: 0.0082018, Val Loss: 0.0074978 +2024-11-07 23:36:48,595 Epoch 107/500 +2024-11-07 23:37:31,110 Current Learning Rate: 0.0089119541 +2024-11-07 23:37:32,059 Train Loss: 0.0080878, Val Loss: 0.0074788 +2024-11-07 23:37:32,060 Epoch 108/500 +2024-11-07 23:38:14,750 Current Learning Rate: 0.0088923115 +2024-11-07 23:38:15,640 Train Loss: 0.0080311, Val Loss: 0.0074572 +2024-11-07 23:38:15,640 Epoch 109/500 +2024-11-07 23:38:58,264 Current Learning Rate: 0.0088725153 +2024-11-07 23:38:59,125 Train Loss: 0.0080315, Val Loss: 0.0074001 +2024-11-07 23:38:59,126 Epoch 110/500 +2024-11-07 23:39:41,625 Current Learning Rate: 0.0088525662 +2024-11-07 23:39:42,572 Train Loss: 0.0082375, Val Loss: 0.0073667 +2024-11-07 23:39:42,572 Epoch 111/500 +2024-11-07 23:40:25,610 Current Learning Rate: 0.0088324650 +2024-11-07 23:40:26,627 Train Loss: 0.0079146, Val Loss: 0.0073540 +2024-11-07 23:40:26,628 Epoch 112/500 +2024-11-07 23:41:09,480 Current Learning Rate: 0.0088122126 +2024-11-07 23:41:10,517 Train Loss: 0.0079099, Val Loss: 0.0071813 +2024-11-07 23:41:10,517 Epoch 113/500 +2024-11-07 23:41:53,648 Current Learning Rate: 0.0087918096 +2024-11-07 23:41:53,650 Train Loss: 0.0080728, Val Loss: 0.0075504 +2024-11-07 23:41:53,650 Epoch 114/500 +2024-11-07 23:42:36,547 Current Learning Rate: 0.0087712569 +2024-11-07 23:42:36,548 Train Loss: 0.0080282, Val Loss: 0.0074407 +2024-11-07 23:42:36,548 Epoch 115/500 +2024-11-07 23:43:19,178 Current Learning Rate: 0.0087505553 +2024-11-07 23:43:19,179 Train Loss: 0.0087424, Val Loss: 0.0090356 +2024-11-07 23:43:19,180 Epoch 116/500 +2024-11-07 23:44:01,731 Current Learning Rate: 0.0087297057 +2024-11-07 23:44:02,913 Train Loss: 0.0084912, Val Loss: 0.0077046 +2024-11-07 23:44:02,915 Epoch 117/500 +2024-11-07 23:44:45,520 Current Learning Rate: 0.0087087089 +2024-11-07 23:44:45,521 Train Loss: 0.0082660, Val Loss: 0.0079776 +2024-11-07 23:44:45,522 Epoch 118/500 +2024-11-07 23:45:28,337 Current Learning Rate: 0.0086875656 +2024-11-07 23:45:28,338 Train Loss: 0.0081005, Val Loss: 0.0074286 +2024-11-07 23:45:28,338 Epoch 119/500 +2024-11-07 23:46:11,223 Current Learning Rate: 0.0086662767 +2024-11-07 23:46:11,224 Train Loss: 0.0081271, Val Loss: 0.0073211 +2024-11-07 23:46:11,224 Epoch 120/500 +2024-11-07 23:46:53,991 Current Learning Rate: 0.0086448431 +2024-11-07 23:46:53,993 Train Loss: 0.0077879, Val Loss: 0.0072905 +2024-11-07 23:46:53,993 Epoch 121/500 +2024-11-07 23:47:37,028 Current Learning Rate: 0.0086232657 +2024-11-07 23:47:37,030 Train Loss: 0.0077787, Val Loss: 0.0072158 +2024-11-07 23:47:37,030 Epoch 122/500 +2024-11-07 23:48:20,070 Current Learning Rate: 0.0086015451 +2024-11-07 23:48:21,017 Train Loss: 0.0077486, Val Loss: 0.0071664 +2024-11-07 23:48:21,017 Epoch 123/500 +2024-11-07 23:49:04,061 Current Learning Rate: 0.0085796824 +2024-11-07 23:49:04,996 Train Loss: 0.0075843, Val Loss: 0.0071159 +2024-11-07 23:49:04,996 Epoch 124/500 +2024-11-07 23:49:47,546 Current Learning Rate: 0.0085576784 +2024-11-07 23:49:48,541 Train Loss: 0.0078122, Val Loss: 0.0070644 +2024-11-07 23:49:48,541 Epoch 125/500 +2024-11-07 23:50:31,315 Current Learning Rate: 0.0085355339 +2024-11-07 23:50:31,316 Train Loss: 0.0076326, Val Loss: 0.0076103 +2024-11-07 23:50:31,316 Epoch 126/500 +2024-11-07 23:51:14,031 Current Learning Rate: 0.0085132498 +2024-11-07 23:51:14,970 Train Loss: 0.0075467, Val Loss: 0.0069888 +2024-11-07 23:51:14,970 Epoch 127/500 +2024-11-07 23:51:57,629 Current Learning Rate: 0.0084908271 +2024-11-07 23:51:57,630 Train Loss: 0.0078883, Val Loss: 0.0070635 +2024-11-07 23:51:57,630 Epoch 128/500 +2024-11-07 23:52:40,466 Current Learning Rate: 0.0084682665 +2024-11-07 23:52:40,468 Train Loss: 0.0077054, Val Loss: 0.0070023 +2024-11-07 23:52:40,468 Epoch 129/500 +2024-11-07 23:53:23,308 Current Learning Rate: 0.0084455690 +2024-11-07 23:53:23,309 Train Loss: 0.0080053, Val Loss: 0.0074959 +2024-11-07 23:53:23,310 Epoch 130/500 +2024-11-07 23:54:06,174 Current Learning Rate: 0.0084227355 +2024-11-07 23:54:06,174 Train Loss: 0.0076480, Val Loss: 0.0070868 +2024-11-07 23:54:06,175 Epoch 131/500 +2024-11-07 23:54:48,779 Current Learning Rate: 0.0083997669 +2024-11-07 23:54:48,785 Train Loss: 0.0078695, Val Loss: 0.0071509 +2024-11-07 23:54:48,785 Epoch 132/500 +2024-11-07 23:55:31,650 Current Learning Rate: 0.0083766640 +2024-11-07 23:55:32,570 Train Loss: 0.0074656, Val Loss: 0.0068894 +2024-11-07 23:55:32,570 Epoch 133/500 +2024-11-07 23:56:14,476 Current Learning Rate: 0.0083534279 +2024-11-07 23:56:14,476 Train Loss: 0.0074357, Val Loss: 0.0071089 +2024-11-07 23:56:14,477 Epoch 134/500 +2024-11-07 23:56:57,691 Current Learning Rate: 0.0083300593 +2024-11-07 23:56:58,550 Train Loss: 0.0077327, Val Loss: 0.0068289 +2024-11-07 23:56:58,550 Epoch 135/500 +2024-11-07 23:57:40,645 Current Learning Rate: 0.0083065593 +2024-11-07 23:57:41,459 Train Loss: 0.0072730, Val Loss: 0.0067916 +2024-11-07 23:57:41,460 Epoch 136/500 +2024-11-07 23:58:23,470 Current Learning Rate: 0.0082829288 +2024-11-07 23:58:23,471 Train Loss: 0.0074184, Val Loss: 0.0068378 +2024-11-07 23:58:23,472 Epoch 137/500 +2024-11-07 23:59:06,151 Current Learning Rate: 0.0082591686 +2024-11-07 23:59:07,042 Train Loss: 0.0071494, Val Loss: 0.0067802 +2024-11-07 23:59:07,042 Epoch 138/500 +2024-11-07 23:59:48,695 Current Learning Rate: 0.0082352798 +2024-11-07 23:59:48,696 Train Loss: 0.0077151, Val Loss: 0.0071614 +2024-11-07 23:59:48,697 Epoch 139/500 +2024-11-08 00:00:31,407 Current Learning Rate: 0.0082112633 +2024-11-08 00:00:31,407 Train Loss: 0.0080143, Val Loss: 0.0074311 +2024-11-08 00:00:31,407 Epoch 140/500 +2024-11-08 00:01:14,322 Current Learning Rate: 0.0081871199 +2024-11-08 00:01:15,206 Train Loss: 0.0073650, Val Loss: 0.0066044 +2024-11-08 00:01:15,207 Epoch 141/500 +2024-11-08 00:01:56,719 Current Learning Rate: 0.0081628508 +2024-11-08 00:01:57,588 Train Loss: 0.0071499, Val Loss: 0.0066006 +2024-11-08 00:01:57,588 Epoch 142/500 +2024-11-08 00:02:39,777 Current Learning Rate: 0.0081384568 +2024-11-08 00:02:40,689 Train Loss: 0.0071652, Val Loss: 0.0065080 +2024-11-08 00:02:40,689 Epoch 143/500 +2024-11-08 00:03:22,842 Current Learning Rate: 0.0081139389 +2024-11-08 00:03:22,843 Train Loss: 0.0074271, Val Loss: 0.0081458 +2024-11-08 00:03:22,843 Epoch 144/500 +2024-11-08 00:04:05,831 Current Learning Rate: 0.0080892981 +2024-11-08 00:04:05,831 Train Loss: 0.0072859, Val Loss: 0.0066612 +2024-11-08 00:04:05,832 Epoch 145/500 +2024-11-08 00:04:48,743 Current Learning Rate: 0.0080645353 +2024-11-08 00:04:48,744 Train Loss: 0.0074111, Val Loss: 0.0066496 +2024-11-08 00:04:48,744 Epoch 146/500 +2024-11-08 00:05:31,624 Current Learning Rate: 0.0080396515 +2024-11-08 00:05:32,476 Train Loss: 0.0069171, Val Loss: 0.0064089 +2024-11-08 00:05:32,477 Epoch 147/500 +2024-11-08 00:06:14,650 Current Learning Rate: 0.0080146477 +2024-11-08 00:06:14,651 Train Loss: 0.0069974, Val Loss: 0.0070677 +2024-11-08 00:06:14,651 Epoch 148/500 +2024-11-08 00:06:57,289 Current Learning Rate: 0.0079895249 +2024-11-08 00:06:57,289 Train Loss: 0.0069063, Val Loss: 0.0067604 +2024-11-08 00:06:57,290 Epoch 149/500 +2024-11-08 00:07:40,250 Current Learning Rate: 0.0079642841 +2024-11-08 00:07:40,252 Train Loss: 0.0067736, Val Loss: 0.0064420 +2024-11-08 00:07:40,252 Epoch 150/500 +2024-11-08 00:08:23,396 Current Learning Rate: 0.0079389263 +2024-11-08 00:08:24,321 Train Loss: 0.0067033, Val Loss: 0.0061829 +2024-11-08 00:08:24,321 Epoch 151/500 +2024-11-08 00:09:07,074 Current Learning Rate: 0.0079134524 +2024-11-08 00:09:07,075 Train Loss: 0.0069070, Val Loss: 0.0064371 +2024-11-08 00:09:07,075 Epoch 152/500 +2024-11-08 00:09:49,089 Current Learning Rate: 0.0078878635 +2024-11-08 00:09:49,989 Train Loss: 0.0067236, Val Loss: 0.0061063 +2024-11-08 00:09:49,990 Epoch 153/500 +2024-11-08 00:10:31,498 Current Learning Rate: 0.0078621606 +2024-11-08 00:10:32,367 Train Loss: 0.0065974, Val Loss: 0.0060072 +2024-11-08 00:10:32,367 Epoch 154/500 +2024-11-08 00:11:13,916 Current Learning Rate: 0.0078363447 +2024-11-08 00:11:13,917 Train Loss: 0.0064580, Val Loss: 0.0064116 +2024-11-08 00:11:13,917 Epoch 155/500 +2024-11-08 00:11:56,040 Current Learning Rate: 0.0078104169 +2024-11-08 00:11:56,894 Train Loss: 0.0063879, Val Loss: 0.0058803 +2024-11-08 00:11:56,894 Epoch 156/500 +2024-11-08 00:12:38,747 Current Learning Rate: 0.0077843781 +2024-11-08 00:12:38,748 Train Loss: 0.0063071, Val Loss: 0.0058921 +2024-11-08 00:12:38,748 Epoch 157/500 +2024-11-08 00:13:21,424 Current Learning Rate: 0.0077582294 +2024-11-08 00:13:21,425 Train Loss: 0.0067627, Val Loss: 0.0062161 +2024-11-08 00:13:21,425 Epoch 158/500 +2024-11-08 00:14:03,960 Current Learning Rate: 0.0077319717 +2024-11-08 00:14:04,827 Train Loss: 0.0062842, Val Loss: 0.0058715 +2024-11-08 00:14:04,828 Epoch 159/500 +2024-11-08 00:14:46,831 Current Learning Rate: 0.0077056063 +2024-11-08 00:14:47,657 Train Loss: 0.0062131, Val Loss: 0.0058053 +2024-11-08 00:14:47,657 Epoch 160/500 +2024-11-08 00:15:29,611 Current Learning Rate: 0.0076791340 +2024-11-08 00:15:30,520 Train Loss: 0.0061689, Val Loss: 0.0055723 +2024-11-08 00:15:30,520 Epoch 161/500 +2024-11-08 00:16:13,339 Current Learning Rate: 0.0076525559 +2024-11-08 00:16:13,340 Train Loss: 0.0060606, Val Loss: 0.0056133 +2024-11-08 00:16:13,340 Epoch 162/500 +2024-11-08 00:16:56,203 Current Learning Rate: 0.0076258731 +2024-11-08 00:16:56,204 Train Loss: 0.0061301, Val Loss: 0.0057822 +2024-11-08 00:16:56,204 Epoch 163/500 +2024-11-08 00:17:38,685 Current Learning Rate: 0.0075990867 +2024-11-08 00:17:38,685 Train Loss: 0.0060600, Val Loss: 0.0056386 +2024-11-08 00:17:38,686 Epoch 164/500 +2024-11-08 00:18:21,277 Current Learning Rate: 0.0075721977 +2024-11-08 00:18:22,205 Train Loss: 0.0058362, Val Loss: 0.0053526 +2024-11-08 00:18:22,205 Epoch 165/500 +2024-11-08 00:19:03,781 Current Learning Rate: 0.0075452071 +2024-11-08 00:19:03,782 Train Loss: 0.0058426, Val Loss: 0.0054727 +2024-11-08 00:19:03,782 Epoch 166/500 +2024-11-08 00:19:46,461 Current Learning Rate: 0.0075181160 +2024-11-08 00:19:46,461 Train Loss: 0.0059891, Val Loss: 0.0058453 +2024-11-08 00:19:46,461 Epoch 167/500 +2024-11-08 00:20:29,278 Current Learning Rate: 0.0074909255 +2024-11-08 00:20:30,252 Train Loss: 0.0059390, Val Loss: 0.0053253 +2024-11-08 00:20:30,252 Epoch 168/500 +2024-11-08 00:21:12,399 Current Learning Rate: 0.0074636367 +2024-11-08 00:21:13,338 Train Loss: 0.0056589, Val Loss: 0.0052413 +2024-11-08 00:21:13,339 Epoch 169/500 +2024-11-08 00:21:54,822 Current Learning Rate: 0.0074362506 +2024-11-08 00:21:54,822 Train Loss: 0.0056802, Val Loss: 0.0052561 +2024-11-08 00:21:54,822 Epoch 170/500 +2024-11-08 00:22:38,600 Current Learning Rate: 0.0074087684 +2024-11-08 00:22:38,600 Train Loss: 0.0056563, Val Loss: 0.0056353 +2024-11-08 00:22:38,600 Epoch 171/500 +2024-11-08 00:23:21,301 Current Learning Rate: 0.0073811910 +2024-11-08 00:23:22,203 Train Loss: 0.0056567, Val Loss: 0.0051781 +2024-11-08 00:23:22,204 Epoch 172/500 +2024-11-08 00:24:04,180 Current Learning Rate: 0.0073535197 +2024-11-08 00:24:05,238 Train Loss: 0.0054826, Val Loss: 0.0051680 +2024-11-08 00:24:05,238 Epoch 173/500 +2024-11-08 00:24:47,051 Current Learning Rate: 0.0073257554 +2024-11-08 00:24:48,010 Train Loss: 0.0054443, Val Loss: 0.0050798 +2024-11-08 00:24:48,011 Epoch 174/500 +2024-11-08 00:25:29,932 Current Learning Rate: 0.0072978993 +2024-11-08 00:25:29,933 Train Loss: 0.0054496, Val Loss: 0.0051667 +2024-11-08 00:25:29,933 Epoch 175/500 +2024-11-08 00:26:12,970 Current Learning Rate: 0.0072699525 +2024-11-08 00:26:12,971 Train Loss: 0.0053565, Val Loss: 0.0053172 +2024-11-08 00:26:12,971 Epoch 176/500 +2024-11-08 00:26:55,505 Current Learning Rate: 0.0072419161 +2024-11-08 00:26:56,373 Train Loss: 0.0054158, Val Loss: 0.0050369 +2024-11-08 00:26:56,374 Epoch 177/500 +2024-11-08 00:27:38,124 Current Learning Rate: 0.0072137912 +2024-11-08 00:27:38,992 Train Loss: 0.0053009, Val Loss: 0.0049588 +2024-11-08 00:27:38,992 Epoch 178/500 +2024-11-08 00:28:20,718 Current Learning Rate: 0.0071855788 +2024-11-08 00:28:21,612 Train Loss: 0.0052910, Val Loss: 0.0049281 +2024-11-08 00:28:21,612 Epoch 179/500 +2024-11-08 00:29:03,385 Current Learning Rate: 0.0071572802 +2024-11-08 00:29:04,289 Train Loss: 0.0054851, Val Loss: 0.0049177 +2024-11-08 00:29:04,289 Epoch 180/500 +2024-11-08 00:29:46,269 Current Learning Rate: 0.0071288965 +2024-11-08 00:29:47,214 Train Loss: 0.0052598, Val Loss: 0.0048433 +2024-11-08 00:29:47,215 Epoch 181/500 +2024-11-08 00:30:29,207 Current Learning Rate: 0.0071004286 +2024-11-08 00:30:30,147 Train Loss: 0.0051566, Val Loss: 0.0048192 +2024-11-08 00:30:30,148 Epoch 182/500 +2024-11-08 00:31:12,169 Current Learning Rate: 0.0070718779 +2024-11-08 00:31:12,170 Train Loss: 0.0052951, Val Loss: 0.0049161 +2024-11-08 00:31:12,170 Epoch 183/500 +2024-11-08 00:31:55,776 Current Learning Rate: 0.0070432454 +2024-11-08 00:31:55,777 Train Loss: 0.0052649, Val Loss: 0.0049453 +2024-11-08 00:31:55,777 Epoch 184/500 +2024-11-08 00:32:38,561 Current Learning Rate: 0.0070145322 +2024-11-08 00:32:38,563 Train Loss: 0.0050289, Val Loss: 0.0048511 +2024-11-08 00:32:38,563 Epoch 185/500 +2024-11-08 00:33:21,504 Current Learning Rate: 0.0069857395 +2024-11-08 00:33:21,505 Train Loss: 0.0050443, Val Loss: 0.0048278 +2024-11-08 00:33:21,506 Epoch 186/500 +2024-11-08 00:34:04,501 Current Learning Rate: 0.0069568683 +2024-11-08 00:34:05,409 Train Loss: 0.0049701, Val Loss: 0.0047274 +2024-11-08 00:34:05,410 Epoch 187/500 +2024-11-08 00:34:47,942 Current Learning Rate: 0.0069279200 +2024-11-08 00:34:48,800 Train Loss: 0.0049878, Val Loss: 0.0046626 +2024-11-08 00:34:48,800 Epoch 188/500 +2024-11-08 00:35:30,223 Current Learning Rate: 0.0068988955 +2024-11-08 00:35:31,131 Train Loss: 0.0049107, Val Loss: 0.0045812 +2024-11-08 00:35:31,131 Epoch 189/500 +2024-11-08 00:36:12,896 Current Learning Rate: 0.0068697960 +2024-11-08 00:36:12,897 Train Loss: 0.0050526, Val Loss: 0.0047874 +2024-11-08 00:36:12,897 Epoch 190/500 +2024-11-08 00:36:55,387 Current Learning Rate: 0.0068406228 +2024-11-08 00:36:55,387 Train Loss: 0.0047581, Val Loss: 0.0046086 +2024-11-08 00:36:55,387 Epoch 191/500 +2024-11-08 00:37:37,879 Current Learning Rate: 0.0068113768 +2024-11-08 00:37:37,879 Train Loss: 0.0049161, Val Loss: 0.0046687 +2024-11-08 00:37:37,879 Epoch 192/500 +2024-11-08 00:38:21,034 Current Learning Rate: 0.0067820594 +2024-11-08 00:38:21,035 Train Loss: 0.0048083, Val Loss: 0.0046054 +2024-11-08 00:38:21,035 Epoch 193/500 +2024-11-08 00:39:04,276 Current Learning Rate: 0.0067526716 +2024-11-08 00:39:05,223 Train Loss: 0.0047706, Val Loss: 0.0045204 +2024-11-08 00:39:05,223 Epoch 194/500 +2024-11-08 00:39:47,144 Current Learning Rate: 0.0067232146 +2024-11-08 00:39:48,072 Train Loss: 0.0048379, Val Loss: 0.0044465 +2024-11-08 00:39:48,072 Epoch 195/500 +2024-11-08 00:40:29,931 Current Learning Rate: 0.0066936896 +2024-11-08 00:40:29,932 Train Loss: 0.0047138, Val Loss: 0.0044838 +2024-11-08 00:40:29,932 Epoch 196/500 +2024-11-08 00:41:13,073 Current Learning Rate: 0.0066640977 +2024-11-08 00:41:13,073 Train Loss: 0.0046545, Val Loss: 0.0046020 +2024-11-08 00:41:13,074 Epoch 197/500 +2024-11-08 00:41:55,789 Current Learning Rate: 0.0066344401 +2024-11-08 00:41:55,789 Train Loss: 0.0046251, Val Loss: 0.0044483 +2024-11-08 00:41:55,789 Epoch 198/500 +2024-11-08 00:42:38,579 Current Learning Rate: 0.0066047180 +2024-11-08 00:42:39,513 Train Loss: 0.0046265, Val Loss: 0.0044011 +2024-11-08 00:42:39,513 Epoch 199/500 +2024-11-08 00:43:21,974 Current Learning Rate: 0.0065749326 +2024-11-08 00:43:21,975 Train Loss: 0.0045786, Val Loss: 0.0044053 +2024-11-08 00:43:21,975 Epoch 200/500 +2024-11-08 00:44:04,304 Current Learning Rate: 0.0065450850 +2024-11-08 00:44:04,305 Train Loss: 0.0044677, Val Loss: 0.0044895 +2024-11-08 00:44:04,305 Epoch 201/500 +2024-11-08 00:44:47,147 Current Learning Rate: 0.0065151763 +2024-11-08 00:44:47,148 Train Loss: 0.0049675, Val Loss: 0.0044941 +2024-11-08 00:44:47,148 Epoch 202/500 +2024-11-08 00:45:30,077 Current Learning Rate: 0.0064852079 +2024-11-08 00:45:31,003 Train Loss: 0.0046224, Val Loss: 0.0043385 +2024-11-08 00:45:31,004 Epoch 203/500 +2024-11-08 00:46:12,810 Current Learning Rate: 0.0064551808 +2024-11-08 00:46:12,811 Train Loss: 0.0046912, Val Loss: 0.0044550 +2024-11-08 00:46:12,812 Epoch 204/500 +2024-11-08 00:46:56,135 Current Learning Rate: 0.0064250963 +2024-11-08 00:46:57,110 Train Loss: 0.0044573, Val Loss: 0.0042256 +2024-11-08 00:46:57,111 Epoch 205/500 +2024-11-08 00:47:39,864 Current Learning Rate: 0.0063949555 +2024-11-08 00:47:40,831 Train Loss: 0.0043434, Val Loss: 0.0041385 +2024-11-08 00:47:40,831 Epoch 206/500 +2024-11-08 00:48:23,421 Current Learning Rate: 0.0063647597 +2024-11-08 00:48:24,359 Train Loss: 0.0043148, Val Loss: 0.0041067 +2024-11-08 00:48:24,359 Epoch 207/500 +2024-11-08 00:49:07,156 Current Learning Rate: 0.0063345099 +2024-11-08 00:49:07,156 Train Loss: 0.0043762, Val Loss: 0.0044643 +2024-11-08 00:49:07,157 Epoch 208/500 +2024-11-08 00:49:49,924 Current Learning Rate: 0.0063042075 +2024-11-08 00:49:49,925 Train Loss: 0.0043904, Val Loss: 0.0041528 +2024-11-08 00:49:49,925 Epoch 209/500 +2024-11-08 00:50:32,995 Current Learning Rate: 0.0062738536 +2024-11-08 00:50:32,996 Train Loss: 0.0043048, Val Loss: 0.0041957 +2024-11-08 00:50:32,996 Epoch 210/500 +2024-11-08 00:51:15,955 Current Learning Rate: 0.0062434494 +2024-11-08 00:51:16,882 Train Loss: 0.0042058, Val Loss: 0.0039898 +2024-11-08 00:51:16,882 Epoch 211/500 +2024-11-08 00:51:59,865 Current Learning Rate: 0.0062129962 +2024-11-08 00:52:00,765 Train Loss: 0.0041541, Val Loss: 0.0039565 +2024-11-08 00:52:00,765 Epoch 212/500 +2024-11-08 00:52:43,845 Current Learning Rate: 0.0061824950 +2024-11-08 00:52:43,846 Train Loss: 0.0041081, Val Loss: 0.0039698 +2024-11-08 00:52:43,847 Epoch 213/500 +2024-11-08 00:53:26,315 Current Learning Rate: 0.0061519471 +2024-11-08 00:53:26,315 Train Loss: 0.0040916, Val Loss: 0.0039911 +2024-11-08 00:53:26,315 Epoch 214/500 +2024-11-08 00:54:09,131 Current Learning Rate: 0.0061213538 +2024-11-08 00:54:10,102 Train Loss: 0.0040633, Val Loss: 0.0039556 +2024-11-08 00:54:10,102 Epoch 215/500 +2024-11-08 00:54:51,944 Current Learning Rate: 0.0060907162 +2024-11-08 00:54:51,945 Train Loss: 0.0041073, Val Loss: 0.0040040 +2024-11-08 00:54:51,945 Epoch 216/500 +2024-11-08 00:55:35,033 Current Learning Rate: 0.0060600355 +2024-11-08 00:55:35,941 Train Loss: 0.0040960, Val Loss: 0.0039461 +2024-11-08 00:55:35,942 Epoch 217/500 +2024-11-08 00:56:17,739 Current Learning Rate: 0.0060293130 +2024-11-08 00:56:18,659 Train Loss: 0.0039874, Val Loss: 0.0038406 +2024-11-08 00:56:18,660 Epoch 218/500 +2024-11-08 00:57:00,475 Current Learning Rate: 0.0059985499 +2024-11-08 00:57:00,476 Train Loss: 0.0039897, Val Loss: 0.0038800 +2024-11-08 00:57:00,476 Epoch 219/500 +2024-11-08 00:57:43,726 Current Learning Rate: 0.0059677473 +2024-11-08 00:57:43,727 Train Loss: 0.0042972, Val Loss: 0.0042721 +2024-11-08 00:57:43,727 Epoch 220/500 +2024-11-08 00:58:26,828 Current Learning Rate: 0.0059369066 +2024-11-08 00:58:26,830 Train Loss: 0.0041985, Val Loss: 0.0043838 +2024-11-08 00:58:26,830 Epoch 221/500 +2024-11-08 00:59:10,609 Current Learning Rate: 0.0059060288 +2024-11-08 00:59:10,609 Train Loss: 0.0040493, Val Loss: 0.0039829 +2024-11-08 00:59:10,609 Epoch 222/500 +2024-11-08 00:59:53,489 Current Learning Rate: 0.0058751153 +2024-11-08 00:59:54,448 Train Loss: 0.0038319, Val Loss: 0.0038077 +2024-11-08 00:59:54,449 Epoch 223/500 +2024-11-08 01:00:36,917 Current Learning Rate: 0.0058441672 +2024-11-08 01:00:36,918 Train Loss: 0.0040734, Val Loss: 0.0038241 +2024-11-08 01:00:36,918 Epoch 224/500 +2024-11-08 01:01:20,220 Current Learning Rate: 0.0058131858 +2024-11-08 01:01:21,164 Train Loss: 0.0038879, Val Loss: 0.0037257 +2024-11-08 01:01:21,164 Epoch 225/500 +2024-11-08 01:02:03,897 Current Learning Rate: 0.0057821723 +2024-11-08 01:02:04,857 Train Loss: 0.0037249, Val Loss: 0.0036284 +2024-11-08 01:02:04,857 Epoch 226/500 +2024-11-08 01:02:47,554 Current Learning Rate: 0.0057511279 +2024-11-08 01:02:47,556 Train Loss: 0.0037179, Val Loss: 0.0037187 +2024-11-08 01:02:47,556 Epoch 227/500 +2024-11-08 01:03:30,651 Current Learning Rate: 0.0057200539 +2024-11-08 01:03:30,652 Train Loss: 0.0037466, Val Loss: 0.0036335 +2024-11-08 01:03:30,652 Epoch 228/500 +2024-11-08 01:04:12,876 Current Learning Rate: 0.0056889515 +2024-11-08 01:04:12,876 Train Loss: 0.0037025, Val Loss: 0.0036742 +2024-11-08 01:04:12,876 Epoch 229/500 +2024-11-08 01:04:55,874 Current Learning Rate: 0.0056578218 +2024-11-08 01:04:56,793 Train Loss: 0.0037014, Val Loss: 0.0035937 +2024-11-08 01:04:56,793 Epoch 230/500 +2024-11-08 01:05:38,645 Current Learning Rate: 0.0056266662 +2024-11-08 01:05:38,646 Train Loss: 0.0036929, Val Loss: 0.0037805 +2024-11-08 01:05:38,646 Epoch 231/500 +2024-11-08 01:06:21,313 Current Learning Rate: 0.0055954858 +2024-11-08 01:06:22,248 Train Loss: 0.0036820, Val Loss: 0.0035636 +2024-11-08 01:06:22,248 Epoch 232/500 +2024-11-08 01:07:04,249 Current Learning Rate: 0.0055642819 +2024-11-08 01:07:04,250 Train Loss: 0.0036421, Val Loss: 0.0035730 +2024-11-08 01:07:04,250 Epoch 233/500 +2024-11-08 01:07:46,680 Current Learning Rate: 0.0055330558 +2024-11-08 01:07:47,591 Train Loss: 0.0035876, Val Loss: 0.0035221 +2024-11-08 01:07:47,592 Epoch 234/500 +2024-11-08 01:08:28,836 Current Learning Rate: 0.0055018086 +2024-11-08 01:08:29,751 Train Loss: 0.0035359, Val Loss: 0.0034464 +2024-11-08 01:08:29,752 Epoch 235/500 +2024-11-08 01:09:12,370 Current Learning Rate: 0.0054705416 +2024-11-08 01:09:12,371 Train Loss: 0.0035811, Val Loss: 0.0035625 +2024-11-08 01:09:12,371 Epoch 236/500 +2024-11-08 01:09:55,451 Current Learning Rate: 0.0054392560 +2024-11-08 01:09:56,382 Train Loss: 0.0035540, Val Loss: 0.0034231 +2024-11-08 01:09:56,382 Epoch 237/500 +2024-11-08 01:10:38,942 Current Learning Rate: 0.0054079531 +2024-11-08 01:10:38,943 Train Loss: 0.0036245, Val Loss: 0.0037741 +2024-11-08 01:10:38,943 Epoch 238/500 +2024-11-08 01:11:21,877 Current Learning Rate: 0.0053766340 +2024-11-08 01:11:21,879 Train Loss: 0.0036562, Val Loss: 0.0034597 +2024-11-08 01:11:21,879 Epoch 239/500 +2024-11-08 01:12:04,831 Current Learning Rate: 0.0053453001 +2024-11-08 01:12:04,832 Train Loss: 0.0033883, Val Loss: 0.0034372 +2024-11-08 01:12:04,833 Epoch 240/500 +2024-11-08 01:12:47,752 Current Learning Rate: 0.0053139526 +2024-11-08 01:12:47,753 Train Loss: 0.0034333, Val Loss: 0.0035303 +2024-11-08 01:12:47,753 Epoch 241/500 +2024-11-08 01:13:30,666 Current Learning Rate: 0.0052825927 +2024-11-08 01:13:31,643 Train Loss: 0.0034895, Val Loss: 0.0034085 +2024-11-08 01:13:31,643 Epoch 242/500 +2024-11-08 01:14:14,307 Current Learning Rate: 0.0052512216 +2024-11-08 01:14:14,308 Train Loss: 0.0033472, Val Loss: 0.0034433 +2024-11-08 01:14:14,308 Epoch 243/500 +2024-11-08 01:14:57,568 Current Learning Rate: 0.0052198406 +2024-11-08 01:14:58,524 Train Loss: 0.0033367, Val Loss: 0.0033383 +2024-11-08 01:14:58,525 Epoch 244/500 +2024-11-08 01:15:41,259 Current Learning Rate: 0.0051884509 +2024-11-08 01:15:42,226 Train Loss: 0.0032853, Val Loss: 0.0032650 +2024-11-08 01:15:42,226 Epoch 245/500 +2024-11-08 01:16:24,114 Current Learning Rate: 0.0051570538 +2024-11-08 01:16:24,115 Train Loss: 0.0033493, Val Loss: 0.0032697 +2024-11-08 01:16:24,115 Epoch 246/500 +2024-11-08 01:17:07,834 Current Learning Rate: 0.0051256505 +2024-11-08 01:17:07,834 Train Loss: 0.0033674, Val Loss: 0.0033697 +2024-11-08 01:17:07,834 Epoch 247/500 +2024-11-08 01:17:50,717 Current Learning Rate: 0.0050942422 +2024-11-08 01:17:51,649 Train Loss: 0.0033547, Val Loss: 0.0032349 +2024-11-08 01:17:51,649 Epoch 248/500 +2024-11-08 01:18:33,709 Current Learning Rate: 0.0050628302 +2024-11-08 01:18:33,710 Train Loss: 0.0033681, Val Loss: 0.0032384 +2024-11-08 01:18:33,711 Epoch 249/500 +2024-11-08 01:19:16,551 Current Learning Rate: 0.0050314157 +2024-11-08 01:19:16,553 Train Loss: 0.0032533, Val Loss: 0.0033039 +2024-11-08 01:19:16,553 Epoch 250/500 +2024-11-08 01:19:59,261 Current Learning Rate: 0.0050000000 +2024-11-08 01:20:00,221 Train Loss: 0.0032788, Val Loss: 0.0032263 +2024-11-08 01:20:00,227 Epoch 251/500 +2024-11-08 01:20:42,237 Current Learning Rate: 0.0049685843 +2024-11-08 01:20:42,238 Train Loss: 0.0033295, Val Loss: 0.0033819 +2024-11-08 01:20:42,239 Epoch 252/500 +2024-11-08 01:21:24,811 Current Learning Rate: 0.0049371698 +2024-11-08 01:21:24,811 Train Loss: 0.0033042, Val Loss: 0.0032855 +2024-11-08 01:21:24,811 Epoch 253/500 +2024-11-08 01:22:07,851 Current Learning Rate: 0.0049057578 +2024-11-08 01:22:08,809 Train Loss: 0.0031218, Val Loss: 0.0031175 +2024-11-08 01:22:08,810 Epoch 254/500 +2024-11-08 01:22:50,893 Current Learning Rate: 0.0048743495 +2024-11-08 01:22:50,894 Train Loss: 0.0031184, Val Loss: 0.0031945 +2024-11-08 01:22:50,894 Epoch 255/500 +2024-11-08 01:23:33,866 Current Learning Rate: 0.0048429462 +2024-11-08 01:23:34,794 Train Loss: 0.0031108, Val Loss: 0.0030865 +2024-11-08 01:23:34,794 Epoch 256/500 +2024-11-08 01:24:16,734 Current Learning Rate: 0.0048115491 +2024-11-08 01:24:16,735 Train Loss: 0.0036382, Val Loss: 0.0034433 +2024-11-08 01:24:16,735 Epoch 257/500 +2024-11-08 01:24:59,993 Current Learning Rate: 0.0047801594 +2024-11-08 01:24:59,993 Train Loss: 0.0032260, Val Loss: 0.0032745 +2024-11-08 01:24:59,994 Epoch 258/500 +2024-11-08 01:25:43,279 Current Learning Rate: 0.0047487784 +2024-11-08 01:25:43,285 Train Loss: 0.0031652, Val Loss: 0.0033219 +2024-11-08 01:25:43,285 Epoch 259/500 +2024-11-08 01:26:25,688 Current Learning Rate: 0.0047174073 +2024-11-08 01:26:25,690 Train Loss: 0.0030733, Val Loss: 0.0032064 +2024-11-08 01:26:25,690 Epoch 260/500 +2024-11-08 01:27:08,444 Current Learning Rate: 0.0046860474 +2024-11-08 01:27:08,445 Train Loss: 0.0030425, Val Loss: 0.0031776 +2024-11-08 01:27:08,445 Epoch 261/500 +2024-11-08 01:27:51,541 Current Learning Rate: 0.0046546999 +2024-11-08 01:27:51,542 Train Loss: 0.0031654, Val Loss: 0.0032469 +2024-11-08 01:27:51,542 Epoch 262/500 +2024-11-08 01:28:34,582 Current Learning Rate: 0.0046233660 +2024-11-08 01:28:34,583 Train Loss: 0.0033401, Val Loss: 0.0034137 +2024-11-08 01:28:34,583 Epoch 263/500 +2024-11-08 01:29:17,565 Current Learning Rate: 0.0045920469 +2024-11-08 01:29:17,566 Train Loss: 0.0031818, Val Loss: 0.0031580 +2024-11-08 01:29:17,567 Epoch 264/500 +2024-11-08 01:30:00,771 Current Learning Rate: 0.0045607440 +2024-11-08 01:30:01,726 Train Loss: 0.0030530, Val Loss: 0.0030074 +2024-11-08 01:30:01,727 Epoch 265/500 +2024-11-08 01:30:44,152 Current Learning Rate: 0.0045294584 +2024-11-08 01:30:44,153 Train Loss: 0.0030204, Val Loss: 0.0031026 +2024-11-08 01:30:44,154 Epoch 266/500 +2024-11-08 01:31:27,336 Current Learning Rate: 0.0044981914 +2024-11-08 01:31:27,337 Train Loss: 0.0030143, Val Loss: 0.0030419 +2024-11-08 01:31:27,337 Epoch 267/500 +2024-11-08 01:32:10,115 Current Learning Rate: 0.0044669442 +2024-11-08 01:32:10,116 Train Loss: 0.0030092, Val Loss: 0.0030110 +2024-11-08 01:32:10,116 Epoch 268/500 +2024-11-08 01:32:52,995 Current Learning Rate: 0.0044357181 +2024-11-08 01:32:52,995 Train Loss: 0.0030129, Val Loss: 0.0030147 +2024-11-08 01:32:52,996 Epoch 269/500 +2024-11-08 01:33:36,069 Current Learning Rate: 0.0044045142 +2024-11-08 01:33:36,990 Train Loss: 0.0028804, Val Loss: 0.0029191 +2024-11-08 01:33:36,990 Epoch 270/500 +2024-11-08 01:34:18,944 Current Learning Rate: 0.0043733338 +2024-11-08 01:34:19,868 Train Loss: 0.0028698, Val Loss: 0.0028887 +2024-11-08 01:34:19,868 Epoch 271/500 +2024-11-08 01:35:02,025 Current Learning Rate: 0.0043421782 +2024-11-08 01:35:04,212 Train Loss: 0.0028867, Val Loss: 0.0028803 +2024-11-08 01:35:04,212 Epoch 272/500 +2024-11-08 01:35:45,810 Current Learning Rate: 0.0043110485 +2024-11-08 01:35:45,811 Train Loss: 0.0029417, Val Loss: 0.0029033 +2024-11-08 01:35:45,811 Epoch 273/500 +2024-11-08 01:36:29,178 Current Learning Rate: 0.0042799461 +2024-11-08 01:36:30,092 Train Loss: 0.0028996, Val Loss: 0.0028719 +2024-11-08 01:36:30,092 Epoch 274/500 +2024-11-08 01:37:12,740 Current Learning Rate: 0.0042488721 +2024-11-08 01:37:12,741 Train Loss: 0.0028168, Val Loss: 0.0028749 +2024-11-08 01:37:12,741 Epoch 275/500 +2024-11-08 01:37:55,621 Current Learning Rate: 0.0042178277 +2024-11-08 01:37:55,621 Train Loss: 0.0028263, Val Loss: 0.0028951 +2024-11-08 01:37:55,621 Epoch 276/500 +2024-11-08 01:38:38,214 Current Learning Rate: 0.0041868142 +2024-11-08 01:38:38,215 Train Loss: 0.0028179, Val Loss: 0.0029661 +2024-11-08 01:38:38,215 Epoch 277/500 +2024-11-08 01:39:21,529 Current Learning Rate: 0.0041558328 +2024-11-08 01:39:21,529 Train Loss: 0.0028070, Val Loss: 0.0029726 +2024-11-08 01:39:21,530 Epoch 278/500 +2024-11-08 01:40:04,572 Current Learning Rate: 0.0041248847 +2024-11-08 01:40:05,517 Train Loss: 0.0027948, Val Loss: 0.0028514 +2024-11-08 01:40:05,517 Epoch 279/500 +2024-11-08 01:40:48,068 Current Learning Rate: 0.0040939712 +2024-11-08 01:40:48,976 Train Loss: 0.0028953, Val Loss: 0.0028033 +2024-11-08 01:40:48,976 Epoch 280/500 +2024-11-08 01:41:31,579 Current Learning Rate: 0.0040630934 +2024-11-08 01:41:31,580 Train Loss: 0.0028294, Val Loss: 0.0028310 +2024-11-08 01:41:31,580 Epoch 281/500 +2024-11-08 01:42:14,632 Current Learning Rate: 0.0040322527 +2024-11-08 01:42:14,633 Train Loss: 0.0027343, Val Loss: 0.0028069 +2024-11-08 01:42:14,633 Epoch 282/500 +2024-11-08 01:42:57,495 Current Learning Rate: 0.0040014501 +2024-11-08 01:42:57,495 Train Loss: 0.0028285, Val Loss: 0.0028769 +2024-11-08 01:42:57,495 Epoch 283/500 +2024-11-08 01:43:40,285 Current Learning Rate: 0.0039706870 +2024-11-08 01:43:41,207 Train Loss: 0.0027319, Val Loss: 0.0027515 +2024-11-08 01:43:41,207 Epoch 284/500 +2024-11-08 01:44:23,877 Current Learning Rate: 0.0039399645 +2024-11-08 01:44:24,772 Train Loss: 0.0027280, Val Loss: 0.0027314 +2024-11-08 01:44:24,773 Epoch 285/500 +2024-11-08 01:45:07,588 Current Learning Rate: 0.0039092838 +2024-11-08 01:45:08,495 Train Loss: 0.0026949, Val Loss: 0.0027240 +2024-11-08 01:45:08,495 Epoch 286/500 +2024-11-08 01:45:51,233 Current Learning Rate: 0.0038786462 +2024-11-08 01:45:51,234 Train Loss: 0.0026612, Val Loss: 0.0027309 +2024-11-08 01:45:51,234 Epoch 287/500 +2024-11-08 01:46:34,229 Current Learning Rate: 0.0038480529 +2024-11-08 01:46:34,230 Train Loss: 0.0027216, Val Loss: 0.0029261 +2024-11-08 01:46:34,230 Epoch 288/500 +2024-11-08 01:47:17,435 Current Learning Rate: 0.0038175050 +2024-11-08 01:47:18,358 Train Loss: 0.0026921, Val Loss: 0.0026978 +2024-11-08 01:47:18,359 Epoch 289/500 +2024-11-08 01:48:01,053 Current Learning Rate: 0.0037870038 +2024-11-08 01:48:01,054 Train Loss: 0.0027452, Val Loss: 0.0027795 +2024-11-08 01:48:01,054 Epoch 290/500 +2024-11-08 01:48:44,164 Current Learning Rate: 0.0037565506 +2024-11-08 01:48:45,051 Train Loss: 0.0026693, Val Loss: 0.0026841 +2024-11-08 01:48:45,051 Epoch 291/500 +2024-11-08 01:49:27,840 Current Learning Rate: 0.0037261464 +2024-11-08 01:49:27,841 Train Loss: 0.0026630, Val Loss: 0.0026941 +2024-11-08 01:49:27,842 Epoch 292/500 +2024-11-08 01:50:10,773 Current Learning Rate: 0.0036957925 +2024-11-08 01:50:11,720 Train Loss: 0.0026150, Val Loss: 0.0026761 +2024-11-08 01:50:11,720 Epoch 293/500 +2024-11-08 01:50:54,462 Current Learning Rate: 0.0036654901 +2024-11-08 01:50:54,463 Train Loss: 0.0026499, Val Loss: 0.0026927 +2024-11-08 01:50:54,463 Epoch 294/500 +2024-11-08 01:51:36,886 Current Learning Rate: 0.0036352403 +2024-11-08 01:51:37,836 Train Loss: 0.0026793, Val Loss: 0.0026594 +2024-11-08 01:51:37,836 Epoch 295/500 +2024-11-08 01:52:19,763 Current Learning Rate: 0.0036050445 +2024-11-08 01:52:19,764 Train Loss: 0.0026303, Val Loss: 0.0027209 +2024-11-08 01:52:19,764 Epoch 296/500 +2024-11-08 01:53:02,695 Current Learning Rate: 0.0035749037 +2024-11-08 01:53:02,697 Train Loss: 0.0026265, Val Loss: 0.0026800 +2024-11-08 01:53:02,698 Epoch 297/500 +2024-11-08 01:53:45,551 Current Learning Rate: 0.0035448192 +2024-11-08 01:53:45,553 Train Loss: 0.0026187, Val Loss: 0.0027020 +2024-11-08 01:53:45,553 Epoch 298/500 +2024-11-08 01:54:28,358 Current Learning Rate: 0.0035147921 +2024-11-08 01:54:29,285 Train Loss: 0.0025764, Val Loss: 0.0026144 +2024-11-08 01:54:29,286 Epoch 299/500 +2024-11-08 01:55:11,450 Current Learning Rate: 0.0034848237 +2024-11-08 01:55:11,451 Train Loss: 0.0026466, Val Loss: 0.0026284 +2024-11-08 01:55:11,451 Epoch 300/500 +2024-11-08 01:55:54,248 Current Learning Rate: 0.0034549150 +2024-11-08 01:55:54,248 Train Loss: 0.0025195, Val Loss: 0.0026194 +2024-11-08 01:55:54,249 Epoch 301/500 +2024-11-08 01:56:37,326 Current Learning Rate: 0.0034250674 +2024-11-08 01:56:38,258 Train Loss: 0.0025307, Val Loss: 0.0026074 +2024-11-08 01:56:38,259 Epoch 302/500 +2024-11-08 01:57:20,813 Current Learning Rate: 0.0033952820 +2024-11-08 01:57:21,790 Train Loss: 0.0025387, Val Loss: 0.0025979 +2024-11-08 01:57:21,790 Epoch 303/500 +2024-11-08 01:58:04,607 Current Learning Rate: 0.0033655599 +2024-11-08 01:58:05,564 Train Loss: 0.0025362, Val Loss: 0.0025861 +2024-11-08 01:58:05,564 Epoch 304/500 +2024-11-08 01:58:48,077 Current Learning Rate: 0.0033359023 +2024-11-08 01:58:48,983 Train Loss: 0.0025322, Val Loss: 0.0025666 +2024-11-08 01:58:48,983 Epoch 305/500 +2024-11-08 01:59:31,723 Current Learning Rate: 0.0033063104 +2024-11-08 01:59:31,724 Train Loss: 0.0025310, Val Loss: 0.0026316 +2024-11-08 01:59:31,724 Epoch 306/500 +2024-11-08 02:00:14,565 Current Learning Rate: 0.0032767854 +2024-11-08 02:00:14,566 Train Loss: 0.0025621, Val Loss: 0.0025807 +2024-11-08 02:00:14,566 Epoch 307/500 +2024-11-08 02:00:57,956 Current Learning Rate: 0.0032473284 +2024-11-08 02:00:57,957 Train Loss: 0.0025565, Val Loss: 0.0026114 +2024-11-08 02:00:57,958 Epoch 308/500 +2024-11-08 02:01:41,146 Current Learning Rate: 0.0032179406 +2024-11-08 02:01:42,129 Train Loss: 0.0024896, Val Loss: 0.0025449 +2024-11-08 02:01:42,129 Epoch 309/500 +2024-11-08 02:02:24,549 Current Learning Rate: 0.0031886232 +2024-11-08 02:02:24,550 Train Loss: 0.0024695, Val Loss: 0.0025863 +2024-11-08 02:02:24,550 Epoch 310/500 +2024-11-08 02:03:07,221 Current Learning Rate: 0.0031593772 +2024-11-08 02:03:07,222 Train Loss: 0.0024845, Val Loss: 0.0025474 +2024-11-08 02:03:07,222 Epoch 311/500 +2024-11-08 02:03:50,070 Current Learning Rate: 0.0031302040 +2024-11-08 02:03:50,071 Train Loss: 0.0024813, Val Loss: 0.0025720 +2024-11-08 02:03:50,071 Epoch 312/500 +2024-11-08 02:04:32,775 Current Learning Rate: 0.0031011045 +2024-11-08 02:04:32,776 Train Loss: 0.0024523, Val Loss: 0.0025813 +2024-11-08 02:04:32,776 Epoch 313/500 +2024-11-08 02:05:15,634 Current Learning Rate: 0.0030720800 +2024-11-08 02:05:16,548 Train Loss: 0.0024558, Val Loss: 0.0025365 +2024-11-08 02:05:16,548 Epoch 314/500 +2024-11-08 02:05:58,411 Current Learning Rate: 0.0030431317 +2024-11-08 02:05:59,318 Train Loss: 0.0024416, Val Loss: 0.0024905 +2024-11-08 02:05:59,318 Epoch 315/500 +2024-11-08 02:06:41,350 Current Learning Rate: 0.0030142605 +2024-11-08 02:06:42,336 Train Loss: 0.0024359, Val Loss: 0.0024858 +2024-11-08 02:06:42,336 Epoch 316/500 +2024-11-08 02:07:24,288 Current Learning Rate: 0.0029854678 +2024-11-08 02:07:24,290 Train Loss: 0.0024819, Val Loss: 0.0025097 +2024-11-08 02:07:24,290 Epoch 317/500 +2024-11-08 02:08:07,873 Current Learning Rate: 0.0029567546 +2024-11-08 02:08:08,850 Train Loss: 0.0023871, Val Loss: 0.0024762 +2024-11-08 02:08:08,851 Epoch 318/500 +2024-11-08 02:08:52,062 Current Learning Rate: 0.0029281221 +2024-11-08 02:08:52,063 Train Loss: 0.0023623, Val Loss: 0.0024808 +2024-11-08 02:08:52,063 Epoch 319/500 +2024-11-08 02:09:34,073 Current Learning Rate: 0.0028995714 +2024-11-08 02:09:34,987 Train Loss: 0.0024143, Val Loss: 0.0024632 +2024-11-08 02:09:34,987 Epoch 320/500 +2024-11-08 02:10:17,181 Current Learning Rate: 0.0028711035 +2024-11-08 02:10:18,131 Train Loss: 0.0024126, Val Loss: 0.0024595 +2024-11-08 02:10:18,131 Epoch 321/500 +2024-11-08 02:10:59,663 Current Learning Rate: 0.0028427198 +2024-11-08 02:10:59,664 Train Loss: 0.0023715, Val Loss: 0.0024972 +2024-11-08 02:10:59,664 Epoch 322/500 +2024-11-08 02:11:42,702 Current Learning Rate: 0.0028144212 +2024-11-08 02:11:43,607 Train Loss: 0.0024014, Val Loss: 0.0024508 +2024-11-08 02:11:43,607 Epoch 323/500 +2024-11-08 02:12:25,305 Current Learning Rate: 0.0027862088 +2024-11-08 02:12:26,174 Train Loss: 0.0023354, Val Loss: 0.0024501 +2024-11-08 02:12:26,174 Epoch 324/500 +2024-11-08 02:13:07,870 Current Learning Rate: 0.0027580839 +2024-11-08 02:13:07,871 Train Loss: 0.0024019, Val Loss: 0.0024532 +2024-11-08 02:13:07,871 Epoch 325/500 +2024-11-08 02:13:51,212 Current Learning Rate: 0.0027300475 +2024-11-08 02:13:51,213 Train Loss: 0.0024504, Val Loss: 0.0025366 +2024-11-08 02:13:51,213 Epoch 326/500 +2024-11-08 02:14:33,632 Current Learning Rate: 0.0027021007 +2024-11-08 02:14:33,646 Train Loss: 0.0024147, Val Loss: 0.0025193 +2024-11-08 02:14:33,647 Epoch 327/500 +2024-11-08 02:15:16,497 Current Learning Rate: 0.0026742446 +2024-11-08 02:15:16,499 Train Loss: 0.0024114, Val Loss: 0.0025262 +2024-11-08 02:15:16,499 Epoch 328/500 +2024-11-08 02:15:59,557 Current Learning Rate: 0.0026464803 +2024-11-08 02:15:59,559 Train Loss: 0.0023630, Val Loss: 0.0024666 +2024-11-08 02:15:59,559 Epoch 329/500 +2024-11-08 02:16:43,123 Current Learning Rate: 0.0026188090 +2024-11-08 02:16:43,124 Train Loss: 0.0023734, Val Loss: 0.0024769 +2024-11-08 02:16:43,124 Epoch 330/500 +2024-11-08 02:17:25,723 Current Learning Rate: 0.0025912316 +2024-11-08 02:17:26,606 Train Loss: 0.0023692, Val Loss: 0.0024375 +2024-11-08 02:17:26,607 Epoch 331/500 +2024-11-08 02:18:08,684 Current Learning Rate: 0.0025637494 +2024-11-08 02:18:09,654 Train Loss: 0.0024236, Val Loss: 0.0024031 +2024-11-08 02:18:09,654 Epoch 332/500 +2024-11-08 02:18:52,444 Current Learning Rate: 0.0025363633 +2024-11-08 02:18:52,445 Train Loss: 0.0023295, Val Loss: 0.0024684 +2024-11-08 02:18:52,445 Epoch 333/500 +2024-11-08 02:19:35,503 Current Learning Rate: 0.0025090745 +2024-11-08 02:19:35,503 Train Loss: 0.0023807, Val Loss: 0.0025123 +2024-11-08 02:19:35,503 Epoch 334/500 +2024-11-08 02:20:18,342 Current Learning Rate: 0.0024818840 +2024-11-08 02:20:18,343 Train Loss: 0.0023584, Val Loss: 0.0024697 +2024-11-08 02:20:18,343 Epoch 335/500 +2024-11-08 02:21:01,720 Current Learning Rate: 0.0024547929 +2024-11-08 02:21:03,845 Train Loss: 0.0024140, Val Loss: 0.0023819 +2024-11-08 02:21:03,846 Epoch 336/500 +2024-11-08 02:21:46,677 Current Learning Rate: 0.0024278023 +2024-11-08 02:21:46,678 Train Loss: 0.0023144, Val Loss: 0.0023926 +2024-11-08 02:21:46,679 Epoch 337/500 +2024-11-08 02:22:29,085 Current Learning Rate: 0.0024009133 +2024-11-08 02:22:29,981 Train Loss: 0.0022992, Val Loss: 0.0023373 +2024-11-08 02:22:29,982 Epoch 338/500 +2024-11-08 02:23:12,452 Current Learning Rate: 0.0023741269 +2024-11-08 02:23:12,453 Train Loss: 0.0022943, Val Loss: 0.0023511 +2024-11-08 02:23:12,453 Epoch 339/500 +2024-11-08 02:23:55,185 Current Learning Rate: 0.0023474441 +2024-11-08 02:23:56,104 Train Loss: 0.0022536, Val Loss: 0.0023330 +2024-11-08 02:23:56,105 Epoch 340/500 +2024-11-08 02:24:38,069 Current Learning Rate: 0.0023208660 +2024-11-08 02:24:38,966 Train Loss: 0.0022210, Val Loss: 0.0023247 +2024-11-08 02:24:38,966 Epoch 341/500 +2024-11-08 02:25:20,850 Current Learning Rate: 0.0022943937 +2024-11-08 02:25:21,800 Train Loss: 0.0022269, Val Loss: 0.0023149 +2024-11-08 02:25:21,800 Epoch 342/500 +2024-11-08 02:26:03,607 Current Learning Rate: 0.0022680283 +2024-11-08 02:26:03,608 Train Loss: 0.0022980, Val Loss: 0.0023241 +2024-11-08 02:26:03,608 Epoch 343/500 +2024-11-08 02:26:46,863 Current Learning Rate: 0.0022417706 +2024-11-08 02:26:47,849 Train Loss: 0.0022748, Val Loss: 0.0023089 +2024-11-08 02:26:47,850 Epoch 344/500 +2024-11-08 02:27:30,008 Current Learning Rate: 0.0022156219 +2024-11-08 02:27:30,009 Train Loss: 0.0022347, Val Loss: 0.0023171 +2024-11-08 02:27:30,009 Epoch 345/500 +2024-11-08 02:28:13,050 Current Learning Rate: 0.0021895831 +2024-11-08 02:28:13,926 Train Loss: 0.0022281, Val Loss: 0.0022987 +2024-11-08 02:28:13,926 Epoch 346/500 +2024-11-08 02:28:55,969 Current Learning Rate: 0.0021636553 +2024-11-08 02:28:55,971 Train Loss: 0.0022425, Val Loss: 0.0023086 +2024-11-08 02:28:55,971 Epoch 347/500 +2024-11-08 02:29:38,756 Current Learning Rate: 0.0021378394 +2024-11-08 02:29:39,650 Train Loss: 0.0022256, Val Loss: 0.0022867 +2024-11-08 02:29:39,651 Epoch 348/500 +2024-11-08 02:30:21,836 Current Learning Rate: 0.0021121365 +2024-11-08 02:30:22,757 Train Loss: 0.0021581, Val Loss: 0.0022750 +2024-11-08 02:30:22,757 Epoch 349/500 +2024-11-08 02:31:04,848 Current Learning Rate: 0.0020865476 +2024-11-08 02:31:05,725 Train Loss: 0.0021814, Val Loss: 0.0022734 +2024-11-08 02:31:05,725 Epoch 350/500 +2024-11-08 02:31:47,933 Current Learning Rate: 0.0020610737 +2024-11-08 02:31:47,934 Train Loss: 0.0021778, Val Loss: 0.0022743 +2024-11-08 02:31:47,934 Epoch 351/500 +2024-11-08 02:32:30,711 Current Learning Rate: 0.0020357159 +2024-11-08 02:32:31,587 Train Loss: 0.0022169, Val Loss: 0.0022684 +2024-11-08 02:32:31,588 Epoch 352/500 +2024-11-08 02:33:14,013 Current Learning Rate: 0.0020104751 +2024-11-08 02:33:14,014 Train Loss: 0.0021586, Val Loss: 0.0022750 +2024-11-08 02:33:14,014 Epoch 353/500 +2024-11-08 02:33:57,305 Current Learning Rate: 0.0019853523 +2024-11-08 02:33:57,305 Train Loss: 0.0022150, Val Loss: 0.0022854 +2024-11-08 02:33:57,306 Epoch 354/500 +2024-11-08 02:34:40,097 Current Learning Rate: 0.0019603485 +2024-11-08 02:34:40,098 Train Loss: 0.0021774, Val Loss: 0.0022775 +2024-11-08 02:34:40,099 Epoch 355/500 +2024-11-08 02:35:23,131 Current Learning Rate: 0.0019354647 +2024-11-08 02:35:23,132 Train Loss: 0.0021493, Val Loss: 0.0022707 +2024-11-08 02:35:23,133 Epoch 356/500 +2024-11-08 02:36:06,200 Current Learning Rate: 0.0019107019 +2024-11-08 02:36:06,201 Train Loss: 0.0021664, Val Loss: 0.0022725 +2024-11-08 02:36:06,201 Epoch 357/500 +2024-11-08 02:36:49,437 Current Learning Rate: 0.0018860611 +2024-11-08 02:36:49,438 Train Loss: 0.0021720, Val Loss: 0.0022720 +2024-11-08 02:36:49,438 Epoch 358/500 +2024-11-08 02:37:31,708 Current Learning Rate: 0.0018615432 +2024-11-08 02:37:31,709 Train Loss: 0.0021683, Val Loss: 0.0022714 +2024-11-08 02:37:31,710 Epoch 359/500 +2024-11-08 02:38:14,718 Current Learning Rate: 0.0018371492 +2024-11-08 02:38:15,642 Train Loss: 0.0021925, Val Loss: 0.0022616 +2024-11-08 02:38:15,642 Epoch 360/500 +2024-11-08 02:38:57,573 Current Learning Rate: 0.0018128801 +2024-11-08 02:38:57,574 Train Loss: 0.0021806, Val Loss: 0.0022657 +2024-11-08 02:38:57,575 Epoch 361/500 +2024-11-08 02:39:41,029 Current Learning Rate: 0.0017887367 +2024-11-08 02:39:41,968 Train Loss: 0.0021662, Val Loss: 0.0022527 +2024-11-08 02:39:41,968 Epoch 362/500 +2024-11-08 02:40:23,904 Current Learning Rate: 0.0017647202 +2024-11-08 02:40:24,804 Train Loss: 0.0021331, Val Loss: 0.0022443 +2024-11-08 02:40:24,804 Epoch 363/500 +2024-11-08 02:41:06,935 Current Learning Rate: 0.0017408314 +2024-11-08 02:41:07,808 Train Loss: 0.0021321, Val Loss: 0.0022400 +2024-11-08 02:41:07,809 Epoch 364/500 +2024-11-08 02:41:50,038 Current Learning Rate: 0.0017170712 +2024-11-08 02:41:51,009 Train Loss: 0.0021359, Val Loss: 0.0022395 +2024-11-08 02:41:51,009 Epoch 365/500 +2024-11-08 02:42:34,106 Current Learning Rate: 0.0016934407 +2024-11-08 02:42:35,098 Train Loss: 0.0021738, Val Loss: 0.0022383 +2024-11-08 02:42:35,098 Epoch 366/500 +2024-11-08 02:43:18,448 Current Learning Rate: 0.0016699407 +2024-11-08 02:43:19,426 Train Loss: 0.0020829, Val Loss: 0.0022075 +2024-11-08 02:43:19,426 Epoch 367/500 +2024-11-08 02:44:02,608 Current Learning Rate: 0.0016465721 +2024-11-08 02:44:03,575 Train Loss: 0.0020564, Val Loss: 0.0021997 +2024-11-08 02:44:03,576 Epoch 368/500 +2024-11-08 02:44:45,433 Current Learning Rate: 0.0016233360 +2024-11-08 02:44:45,433 Train Loss: 0.0020949, Val Loss: 0.0022048 +2024-11-08 02:44:45,434 Epoch 369/500 +2024-11-08 02:45:28,592 Current Learning Rate: 0.0016002331 +2024-11-08 02:45:29,457 Train Loss: 0.0020485, Val Loss: 0.0021979 +2024-11-08 02:45:29,458 Epoch 370/500 +2024-11-08 02:46:11,374 Current Learning Rate: 0.0015772645 +2024-11-08 02:46:12,259 Train Loss: 0.0020595, Val Loss: 0.0021909 +2024-11-08 02:46:12,259 Epoch 371/500 +2024-11-08 02:46:54,846 Current Learning Rate: 0.0015544310 +2024-11-08 02:46:54,847 Train Loss: 0.0021024, Val Loss: 0.0021933 +2024-11-08 02:46:54,848 Epoch 372/500 +2024-11-08 02:47:37,139 Current Learning Rate: 0.0015317335 +2024-11-08 02:47:37,139 Train Loss: 0.0021224, Val Loss: 0.0021939 +2024-11-08 02:47:37,139 Epoch 373/500 +2024-11-08 02:48:20,440 Current Learning Rate: 0.0015091729 +2024-11-08 02:48:20,440 Train Loss: 0.0020834, Val Loss: 0.0021939 +2024-11-08 02:48:20,441 Epoch 374/500 +2024-11-08 02:49:03,116 Current Learning Rate: 0.0014867502 +2024-11-08 02:49:04,043 Train Loss: 0.0020996, Val Loss: 0.0021828 +2024-11-08 02:49:04,043 Epoch 375/500 +2024-11-08 02:49:46,051 Current Learning Rate: 0.0014644661 +2024-11-08 02:49:46,904 Train Loss: 0.0020614, Val Loss: 0.0021774 +2024-11-08 02:49:46,904 Epoch 376/500 +2024-11-08 02:50:30,146 Current Learning Rate: 0.0014423216 +2024-11-08 02:50:31,067 Train Loss: 0.0020614, Val Loss: 0.0021703 +2024-11-08 02:50:31,067 Epoch 377/500 +2024-11-08 02:51:13,879 Current Learning Rate: 0.0014203176 +2024-11-08 02:51:14,800 Train Loss: 0.0020286, Val Loss: 0.0021628 +2024-11-08 02:51:14,801 Epoch 378/500 +2024-11-08 02:51:57,277 Current Learning Rate: 0.0013984549 +2024-11-08 02:51:57,278 Train Loss: 0.0020562, Val Loss: 0.0021656 +2024-11-08 02:51:57,278 Epoch 379/500 +2024-11-08 02:52:39,972 Current Learning Rate: 0.0013767343 +2024-11-08 02:52:39,972 Train Loss: 0.0021161, Val Loss: 0.0021687 +2024-11-08 02:52:39,972 Epoch 380/500 +2024-11-08 02:53:23,275 Current Learning Rate: 0.0013551569 +2024-11-08 02:53:24,138 Train Loss: 0.0020256, Val Loss: 0.0021570 +2024-11-08 02:53:24,139 Epoch 381/500 +2024-11-08 02:54:06,426 Current Learning Rate: 0.0013337233 +2024-11-08 02:54:06,427 Train Loss: 0.0020641, Val Loss: 0.0021587 +2024-11-08 02:54:06,427 Epoch 382/500 +2024-11-08 02:54:49,601 Current Learning Rate: 0.0013124344 +2024-11-08 02:54:49,601 Train Loss: 0.0020250, Val Loss: 0.0021629 +2024-11-08 02:54:49,602 Epoch 383/500 +2024-11-08 02:55:33,939 Current Learning Rate: 0.0012912911 +2024-11-08 02:55:34,865 Train Loss: 0.0020530, Val Loss: 0.0021536 +2024-11-08 02:55:34,866 Epoch 384/500 +2024-11-08 02:56:17,929 Current Learning Rate: 0.0012702943 +2024-11-08 02:56:17,930 Train Loss: 0.0020520, Val Loss: 0.0021547 +2024-11-08 02:56:17,930 Epoch 385/500 +2024-11-08 02:57:00,183 Current Learning Rate: 0.0012494447 +2024-11-08 02:57:00,184 Train Loss: 0.0020536, Val Loss: 0.0021547 +2024-11-08 02:57:00,184 Epoch 386/500 +2024-11-08 02:57:43,369 Current Learning Rate: 0.0012287431 +2024-11-08 02:57:44,251 Train Loss: 0.0020622, Val Loss: 0.0021447 +2024-11-08 02:57:44,251 Epoch 387/500 +2024-11-08 02:58:26,402 Current Learning Rate: 0.0012081904 +2024-11-08 02:58:26,403 Train Loss: 0.0020455, Val Loss: 0.0021549 +2024-11-08 02:58:26,422 Epoch 388/500 +2024-11-08 02:59:09,085 Current Learning Rate: 0.0011877874 +2024-11-08 02:59:09,959 Train Loss: 0.0020113, Val Loss: 0.0021388 +2024-11-08 02:59:09,960 Epoch 389/500 +2024-11-08 02:59:52,738 Current Learning Rate: 0.0011675350 +2024-11-08 02:59:53,661 Train Loss: 0.0020213, Val Loss: 0.0021299 +2024-11-08 02:59:53,661 Epoch 390/500 +2024-11-08 03:00:36,552 Current Learning Rate: 0.0011474338 +2024-11-08 03:00:37,494 Train Loss: 0.0020310, Val Loss: 0.0021253 +2024-11-08 03:00:37,495 Epoch 391/500 +2024-11-08 03:01:20,232 Current Learning Rate: 0.0011274847 +2024-11-08 03:01:21,168 Train Loss: 0.0019776, Val Loss: 0.0021215 +2024-11-08 03:01:21,169 Epoch 392/500 +2024-11-08 03:02:03,896 Current Learning Rate: 0.0011076885 +2024-11-08 03:02:03,897 Train Loss: 0.0020572, Val Loss: 0.0021241 +2024-11-08 03:02:03,897 Epoch 393/500 +2024-11-08 03:02:47,308 Current Learning Rate: 0.0010880459 +2024-11-08 03:02:47,308 Train Loss: 0.0019873, Val Loss: 0.0021226 +2024-11-08 03:02:47,309 Epoch 394/500 +2024-11-08 03:03:29,724 Current Learning Rate: 0.0010685578 +2024-11-08 03:03:29,724 Train Loss: 0.0019853, Val Loss: 0.0021358 +2024-11-08 03:03:29,724 Epoch 395/500 +2024-11-08 03:04:12,557 Current Learning Rate: 0.0010492249 +2024-11-08 03:04:12,558 Train Loss: 0.0019712, Val Loss: 0.0021481 +2024-11-08 03:04:12,558 Epoch 396/500 +2024-11-08 03:04:55,815 Current Learning Rate: 0.0010300480 +2024-11-08 03:04:55,816 Train Loss: 0.0019875, Val Loss: 0.0021515 +2024-11-08 03:04:55,816 Epoch 397/500 +2024-11-08 03:05:38,584 Current Learning Rate: 0.0010110278 +2024-11-08 03:05:38,585 Train Loss: 0.0020178, Val Loss: 0.0021356 +2024-11-08 03:05:38,585 Epoch 398/500 +2024-11-08 03:06:21,488 Current Learning Rate: 0.0009921651 +2024-11-08 03:06:22,463 Train Loss: 0.0019730, Val Loss: 0.0021026 +2024-11-08 03:06:22,464 Epoch 399/500 +2024-11-08 03:07:04,627 Current Learning Rate: 0.0009734606 +2024-11-08 03:07:05,599 Train Loss: 0.0020401, Val Loss: 0.0020996 +2024-11-08 03:07:05,600 Epoch 400/500 +2024-11-08 03:07:47,667 Current Learning Rate: 0.0009549150 +2024-11-08 03:07:48,576 Train Loss: 0.0019790, Val Loss: 0.0020945 +2024-11-08 03:07:48,576 Epoch 401/500 +2024-11-08 03:08:30,487 Current Learning Rate: 0.0009365292 +2024-11-08 03:08:31,382 Train Loss: 0.0019772, Val Loss: 0.0020820 +2024-11-08 03:08:31,382 Epoch 402/500 +2024-11-08 03:09:13,527 Current Learning Rate: 0.0009183037 +2024-11-08 03:09:14,378 Train Loss: 0.0019532, Val Loss: 0.0020794 +2024-11-08 03:09:14,378 Epoch 403/500 +2024-11-08 03:09:57,173 Current Learning Rate: 0.0009002395 +2024-11-08 03:09:57,174 Train Loss: 0.0019340, Val Loss: 0.0020794 +2024-11-08 03:09:57,174 Epoch 404/500 +2024-11-08 03:10:39,934 Current Learning Rate: 0.0008823370 +2024-11-08 03:10:40,957 Train Loss: 0.0019301, Val Loss: 0.0020769 +2024-11-08 03:10:40,957 Epoch 405/500 +2024-11-08 03:11:23,636 Current Learning Rate: 0.0008645971 +2024-11-08 03:11:23,636 Train Loss: 0.0019756, Val Loss: 0.0020795 +2024-11-08 03:11:23,637 Epoch 406/500 +2024-11-08 03:12:06,832 Current Learning Rate: 0.0008470205 +2024-11-08 03:12:07,811 Train Loss: 0.0019526, Val Loss: 0.0020760 +2024-11-08 03:12:07,812 Epoch 407/500 +2024-11-08 03:12:50,900 Current Learning Rate: 0.0008296078 +2024-11-08 03:12:51,856 Train Loss: 0.0019354, Val Loss: 0.0020713 +2024-11-08 03:12:51,857 Epoch 408/500 +2024-11-08 03:13:34,738 Current Learning Rate: 0.0008123598 +2024-11-08 03:13:35,662 Train Loss: 0.0019533, Val Loss: 0.0020689 +2024-11-08 03:13:35,662 Epoch 409/500 +2024-11-08 03:14:18,350 Current Learning Rate: 0.0007952771 +2024-11-08 03:14:18,351 Train Loss: 0.0019848, Val Loss: 0.0020693 +2024-11-08 03:14:18,351 Epoch 410/500 +2024-11-08 03:15:01,488 Current Learning Rate: 0.0007783604 +2024-11-08 03:15:03,738 Train Loss: 0.0019504, Val Loss: 0.0020687 +2024-11-08 03:15:03,739 Epoch 411/500 +2024-11-08 03:15:46,475 Current Learning Rate: 0.0007616103 +2024-11-08 03:15:47,446 Train Loss: 0.0019496, Val Loss: 0.0020602 +2024-11-08 03:15:47,446 Epoch 412/500 +2024-11-08 03:16:30,411 Current Learning Rate: 0.0007450276 +2024-11-08 03:16:31,316 Train Loss: 0.0019875, Val Loss: 0.0020539 +2024-11-08 03:16:31,316 Epoch 413/500 +2024-11-08 03:17:13,837 Current Learning Rate: 0.0007286128 +2024-11-08 03:17:14,791 Train Loss: 0.0019368, Val Loss: 0.0020493 +2024-11-08 03:17:14,791 Epoch 414/500 +2024-11-08 03:17:57,186 Current Learning Rate: 0.0007123667 +2024-11-08 03:17:57,187 Train Loss: 0.0019335, Val Loss: 0.0020808 +2024-11-08 03:17:57,187 Epoch 415/500 +2024-11-08 03:18:39,372 Current Learning Rate: 0.0006962899 +2024-11-08 03:18:39,373 Train Loss: 0.0019303, Val Loss: 0.0020682 +2024-11-08 03:18:39,373 Epoch 416/500 +2024-11-08 03:19:22,089 Current Learning Rate: 0.0006803829 +2024-11-08 03:19:22,089 Train Loss: 0.0019483, Val Loss: 0.0020531 +2024-11-08 03:19:22,089 Epoch 417/500 +2024-11-08 03:20:05,227 Current Learning Rate: 0.0006646465 +2024-11-08 03:20:05,228 Train Loss: 0.0019274, Val Loss: 0.0020536 +2024-11-08 03:20:05,228 Epoch 418/500 +2024-11-08 03:20:47,826 Current Learning Rate: 0.0006490812 +2024-11-08 03:20:47,827 Train Loss: 0.0019011, Val Loss: 0.0020503 +2024-11-08 03:20:47,827 Epoch 419/500 +2024-11-08 03:21:30,741 Current Learning Rate: 0.0006336877 +2024-11-08 03:21:31,661 Train Loss: 0.0019319, Val Loss: 0.0020479 +2024-11-08 03:21:31,661 Epoch 420/500 +2024-11-08 03:22:13,843 Current Learning Rate: 0.0006184666 +2024-11-08 03:22:13,844 Train Loss: 0.0019217, Val Loss: 0.0020517 +2024-11-08 03:22:13,844 Epoch 421/500 +2024-11-08 03:22:57,318 Current Learning Rate: 0.0006034184 +2024-11-08 03:22:57,319 Train Loss: 0.0019180, Val Loss: 0.0020485 +2024-11-08 03:22:57,319 Epoch 422/500 +2024-11-08 03:23:40,104 Current Learning Rate: 0.0005885439 +2024-11-08 03:23:41,007 Train Loss: 0.0019535, Val Loss: 0.0020471 +2024-11-08 03:23:41,008 Epoch 423/500 +2024-11-08 03:24:22,919 Current Learning Rate: 0.0005738434 +2024-11-08 03:24:23,845 Train Loss: 0.0019003, Val Loss: 0.0020424 +2024-11-08 03:24:23,845 Epoch 424/500 +2024-11-08 03:25:06,074 Current Learning Rate: 0.0005593178 +2024-11-08 03:25:07,020 Train Loss: 0.0019005, Val Loss: 0.0020348 +2024-11-08 03:25:07,020 Epoch 425/500 +2024-11-08 03:25:49,349 Current Learning Rate: 0.0005449674 +2024-11-08 03:25:50,294 Train Loss: 0.0019160, Val Loss: 0.0020267 +2024-11-08 03:25:50,295 Epoch 426/500 +2024-11-08 03:26:32,618 Current Learning Rate: 0.0005307929 +2024-11-08 03:26:33,504 Train Loss: 0.0019771, Val Loss: 0.0020207 +2024-11-08 03:26:33,504 Epoch 427/500 +2024-11-08 03:27:15,877 Current Learning Rate: 0.0005167948 +2024-11-08 03:27:16,795 Train Loss: 0.0018834, Val Loss: 0.0020195 +2024-11-08 03:27:16,796 Epoch 428/500 +2024-11-08 03:27:58,566 Current Learning Rate: 0.0005029737 +2024-11-08 03:27:59,470 Train Loss: 0.0019240, Val Loss: 0.0020185 +2024-11-08 03:27:59,470 Epoch 429/500 +2024-11-08 03:28:42,288 Current Learning Rate: 0.0004893302 +2024-11-08 03:28:42,289 Train Loss: 0.0018934, Val Loss: 0.0020203 +2024-11-08 03:28:42,289 Epoch 430/500 +2024-11-08 03:29:24,353 Current Learning Rate: 0.0004758647 +2024-11-08 03:29:24,354 Train Loss: 0.0018826, Val Loss: 0.0020192 +2024-11-08 03:29:24,354 Epoch 431/500 +2024-11-08 03:30:07,498 Current Learning Rate: 0.0004625779 +2024-11-08 03:30:08,467 Train Loss: 0.0019006, Val Loss: 0.0020175 +2024-11-08 03:30:08,467 Epoch 432/500 +2024-11-08 03:30:50,700 Current Learning Rate: 0.0004494701 +2024-11-08 03:30:51,656 Train Loss: 0.0019101, Val Loss: 0.0020132 +2024-11-08 03:30:51,657 Epoch 433/500 +2024-11-08 03:31:33,800 Current Learning Rate: 0.0004365421 +2024-11-08 03:31:33,801 Train Loss: 0.0018850, Val Loss: 0.0020157 +2024-11-08 03:31:33,801 Epoch 434/500 +2024-11-08 03:32:17,676 Current Learning Rate: 0.0004237941 +2024-11-08 03:32:17,677 Train Loss: 0.0018760, Val Loss: 0.0020144 +2024-11-08 03:32:17,677 Epoch 435/500 +2024-11-08 03:32:59,763 Current Learning Rate: 0.0004112269 +2024-11-08 03:33:00,770 Train Loss: 0.0018896, Val Loss: 0.0020070 +2024-11-08 03:33:00,770 Epoch 436/500 +2024-11-08 03:33:42,959 Current Learning Rate: 0.0003988408 +2024-11-08 03:33:43,899 Train Loss: 0.0018631, Val Loss: 0.0020029 +2024-11-08 03:33:43,899 Epoch 437/500 +2024-11-08 03:34:26,017 Current Learning Rate: 0.0003866363 +2024-11-08 03:34:26,017 Train Loss: 0.0018556, Val Loss: 0.0020055 +2024-11-08 03:34:26,018 Epoch 438/500 +2024-11-08 03:35:09,398 Current Learning Rate: 0.0003746140 +2024-11-08 03:35:10,319 Train Loss: 0.0018958, Val Loss: 0.0020002 +2024-11-08 03:35:10,319 Epoch 439/500 +2024-11-08 03:35:52,319 Current Learning Rate: 0.0003627742 +2024-11-08 03:35:53,234 Train Loss: 0.0018885, Val Loss: 0.0019998 +2024-11-08 03:35:53,234 Epoch 440/500 +2024-11-08 03:36:35,330 Current Learning Rate: 0.0003511176 +2024-11-08 03:36:36,209 Train Loss: 0.0019299, Val Loss: 0.0019981 +2024-11-08 03:36:36,210 Epoch 441/500 +2024-11-08 03:37:18,256 Current Learning Rate: 0.0003396444 +2024-11-08 03:37:19,196 Train Loss: 0.0018409, Val Loss: 0.0019967 +2024-11-08 03:37:19,196 Epoch 442/500 +2024-11-08 03:38:01,515 Current Learning Rate: 0.0003283553 +2024-11-08 03:38:03,349 Train Loss: 0.0018589, Val Loss: 0.0019958 +2024-11-08 03:38:03,349 Epoch 443/500 +2024-11-08 03:38:44,801 Current Learning Rate: 0.0003172506 +2024-11-08 03:38:44,802 Train Loss: 0.0019458, Val Loss: 0.0019973 +2024-11-08 03:38:44,802 Epoch 444/500 +2024-11-08 03:39:28,311 Current Learning Rate: 0.0003063307 +2024-11-08 03:39:28,312 Train Loss: 0.0018902, Val Loss: 0.0019973 +2024-11-08 03:39:28,313 Epoch 445/500 +2024-11-08 03:40:11,675 Current Learning Rate: 0.0002955962 +2024-11-08 03:40:12,655 Train Loss: 0.0018916, Val Loss: 0.0019955 +2024-11-08 03:40:12,655 Epoch 446/500 +2024-11-08 03:40:55,442 Current Learning Rate: 0.0002850473 +2024-11-08 03:40:56,338 Train Loss: 0.0018387, Val Loss: 0.0019915 +2024-11-08 03:40:56,338 Epoch 447/500 +2024-11-08 03:41:38,955 Current Learning Rate: 0.0002746846 +2024-11-08 03:41:39,968 Train Loss: 0.0018810, Val Loss: 0.0019874 +2024-11-08 03:41:39,968 Epoch 448/500 +2024-11-08 03:42:22,937 Current Learning Rate: 0.0002645085 +2024-11-08 03:42:23,866 Train Loss: 0.0018351, Val Loss: 0.0019846 +2024-11-08 03:42:23,866 Epoch 449/500 +2024-11-08 03:43:06,840 Current Learning Rate: 0.0002545193 +2024-11-08 03:43:07,847 Train Loss: 0.0018554, Val Loss: 0.0019829 +2024-11-08 03:43:07,847 Epoch 450/500 +2024-11-08 03:43:50,727 Current Learning Rate: 0.0002447174 +2024-11-08 03:43:51,675 Train Loss: 0.0018819, Val Loss: 0.0019817 +2024-11-08 03:43:51,676 Epoch 451/500 +2024-11-08 03:44:34,288 Current Learning Rate: 0.0002351033 +2024-11-08 03:44:35,248 Train Loss: 0.0018689, Val Loss: 0.0019815 +2024-11-08 03:44:35,249 Epoch 452/500 +2024-11-08 03:45:17,275 Current Learning Rate: 0.0002256773 +2024-11-08 03:45:18,324 Train Loss: 0.0018431, Val Loss: 0.0019808 +2024-11-08 03:45:18,324 Epoch 453/500 +2024-11-08 03:46:00,256 Current Learning Rate: 0.0002164397 +2024-11-08 03:46:01,226 Train Loss: 0.0018769, Val Loss: 0.0019800 +2024-11-08 03:46:01,227 Epoch 454/500 +2024-11-08 03:46:42,684 Current Learning Rate: 0.0002073911 +2024-11-08 03:46:43,619 Train Loss: 0.0018639, Val Loss: 0.0019786 +2024-11-08 03:46:43,619 Epoch 455/500 +2024-11-08 03:47:25,569 Current Learning Rate: 0.0001985316 +2024-11-08 03:47:26,517 Train Loss: 0.0018774, Val Loss: 0.0019774 +2024-11-08 03:47:26,518 Epoch 456/500 +2024-11-08 03:48:08,458 Current Learning Rate: 0.0001898616 +2024-11-08 03:48:09,385 Train Loss: 0.0018268, Val Loss: 0.0019770 +2024-11-08 03:48:09,386 Epoch 457/500 +2024-11-08 03:48:51,326 Current Learning Rate: 0.0001813816 +2024-11-08 03:48:52,325 Train Loss: 0.0018571, Val Loss: 0.0019761 +2024-11-08 03:48:52,325 Epoch 458/500 +2024-11-08 03:49:34,083 Current Learning Rate: 0.0001730918 +2024-11-08 03:49:34,995 Train Loss: 0.0018650, Val Loss: 0.0019755 +2024-11-08 03:49:34,996 Epoch 459/500 +2024-11-08 03:50:16,989 Current Learning Rate: 0.0001649926 +2024-11-08 03:50:17,908 Train Loss: 0.0018698, Val Loss: 0.0019748 +2024-11-08 03:50:17,908 Epoch 460/500 +2024-11-08 03:51:00,619 Current Learning Rate: 0.0001570842 +2024-11-08 03:51:01,571 Train Loss: 0.0018219, Val Loss: 0.0019742 +2024-11-08 03:51:01,571 Epoch 461/500 +2024-11-08 03:51:44,743 Current Learning Rate: 0.0001493670 +2024-11-08 03:51:45,679 Train Loss: 0.0018631, Val Loss: 0.0019731 +2024-11-08 03:51:45,680 Epoch 462/500 +2024-11-08 03:52:28,532 Current Learning Rate: 0.0001418413 +2024-11-08 03:52:29,498 Train Loss: 0.0018991, Val Loss: 0.0019726 +2024-11-08 03:52:29,499 Epoch 463/500 +2024-11-08 03:53:12,147 Current Learning Rate: 0.0001345074 +2024-11-08 03:53:13,114 Train Loss: 0.0018186, Val Loss: 0.0019716 +2024-11-08 03:53:13,114 Epoch 464/500 +2024-11-08 03:53:55,970 Current Learning Rate: 0.0001273656 +2024-11-08 03:53:56,939 Train Loss: 0.0018497, Val Loss: 0.0019712 +2024-11-08 03:53:56,939 Epoch 465/500 +2024-11-08 03:54:39,746 Current Learning Rate: 0.0001204162 +2024-11-08 03:54:40,683 Train Loss: 0.0018605, Val Loss: 0.0019703 +2024-11-08 03:54:40,684 Epoch 466/500 +2024-11-08 03:55:23,624 Current Learning Rate: 0.0001136594 +2024-11-08 03:55:24,569 Train Loss: 0.0018498, Val Loss: 0.0019701 +2024-11-08 03:55:24,569 Epoch 467/500 +2024-11-08 03:56:07,199 Current Learning Rate: 0.0001070955 +2024-11-08 03:56:08,181 Train Loss: 0.0018141, Val Loss: 0.0019691 +2024-11-08 03:56:08,181 Epoch 468/500 +2024-11-08 03:56:50,231 Current Learning Rate: 0.0001007247 +2024-11-08 03:56:51,207 Train Loss: 0.0018464, Val Loss: 0.0019678 +2024-11-08 03:56:51,208 Epoch 469/500 +2024-11-08 03:57:33,279 Current Learning Rate: 0.0000945474 +2024-11-08 03:57:34,191 Train Loss: 0.0018417, Val Loss: 0.0019672 +2024-11-08 03:57:34,192 Epoch 470/500 +2024-11-08 03:58:16,243 Current Learning Rate: 0.0000885637 +2024-11-08 03:58:17,178 Train Loss: 0.0018638, Val Loss: 0.0019668 +2024-11-08 03:58:17,178 Epoch 471/500 +2024-11-08 03:58:59,171 Current Learning Rate: 0.0000827740 +2024-11-08 03:59:00,153 Train Loss: 0.0019116, Val Loss: 0.0019658 +2024-11-08 03:59:00,153 Epoch 472/500 +2024-11-08 03:59:41,962 Current Learning Rate: 0.0000771783 +2024-11-08 03:59:42,868 Train Loss: 0.0018586, Val Loss: 0.0019651 +2024-11-08 03:59:42,868 Epoch 473/500 +2024-11-08 04:00:25,196 Current Learning Rate: 0.0000717770 +2024-11-08 04:00:26,115 Train Loss: 0.0018395, Val Loss: 0.0019648 +2024-11-08 04:00:26,115 Epoch 474/500 +2024-11-08 04:01:08,225 Current Learning Rate: 0.0000665703 +2024-11-08 04:01:09,137 Train Loss: 0.0018328, Val Loss: 0.0019645 +2024-11-08 04:01:09,137 Epoch 475/500 +2024-11-08 04:01:50,815 Current Learning Rate: 0.0000615583 +2024-11-08 04:01:51,752 Train Loss: 0.0018439, Val Loss: 0.0019642 +2024-11-08 04:01:51,753 Epoch 476/500 +2024-11-08 04:02:34,150 Current Learning Rate: 0.0000567413 +2024-11-08 04:02:35,163 Train Loss: 0.0018266, Val Loss: 0.0019640 +2024-11-08 04:02:35,164 Epoch 477/500 +2024-11-08 04:03:16,852 Current Learning Rate: 0.0000521194 +2024-11-08 04:03:17,986 Train Loss: 0.0018249, Val Loss: 0.0019637 +2024-11-08 04:03:17,986 Epoch 478/500 +2024-11-08 04:03:59,608 Current Learning Rate: 0.0000476929 +2024-11-08 04:04:00,554 Train Loss: 0.0018796, Val Loss: 0.0019632 +2024-11-08 04:04:00,554 Epoch 479/500 +2024-11-08 04:04:42,542 Current Learning Rate: 0.0000434618 +2024-11-08 04:04:43,521 Train Loss: 0.0018416, Val Loss: 0.0019632 +2024-11-08 04:04:43,521 Epoch 480/500 +2024-11-08 04:05:25,332 Current Learning Rate: 0.0000394265 +2024-11-08 04:05:26,186 Train Loss: 0.0018411, Val Loss: 0.0019629 +2024-11-08 04:05:26,186 Epoch 481/500 +2024-11-08 04:06:08,179 Current Learning Rate: 0.0000355870 +2024-11-08 04:06:08,180 Train Loss: 0.0018423, Val Loss: 0.0019630 +2024-11-08 04:06:08,180 Epoch 482/500 +2024-11-08 04:06:51,291 Current Learning Rate: 0.0000319434 +2024-11-08 04:06:51,301 Train Loss: 0.0018067, Val Loss: 0.0019629 +2024-11-08 04:06:51,302 Epoch 483/500 +2024-11-08 04:07:34,259 Current Learning Rate: 0.0000284960 +2024-11-08 04:07:35,153 Train Loss: 0.0018860, Val Loss: 0.0019625 +2024-11-08 04:07:35,153 Epoch 484/500 +2024-11-08 04:08:17,998 Current Learning Rate: 0.0000252449 +2024-11-08 04:08:18,947 Train Loss: 0.0018050, Val Loss: 0.0019624 +2024-11-08 04:08:18,947 Epoch 485/500 +2024-11-08 04:09:01,776 Current Learning Rate: 0.0000221902 +2024-11-08 04:09:03,880 Train Loss: 0.0018236, Val Loss: 0.0019620 +2024-11-08 04:09:03,880 Epoch 486/500 +2024-11-08 04:09:46,970 Current Learning Rate: 0.0000193320 +2024-11-08 04:09:47,962 Train Loss: 0.0018714, Val Loss: 0.0019616 +2024-11-08 04:09:47,962 Epoch 487/500 +2024-11-08 04:10:30,856 Current Learning Rate: 0.0000166704 +2024-11-08 04:10:30,857 Train Loss: 0.0018238, Val Loss: 0.0019616 +2024-11-08 04:10:30,857 Epoch 488/500 +2024-11-08 04:11:13,917 Current Learning Rate: 0.0000142055 +2024-11-08 04:11:14,833 Train Loss: 0.0018733, Val Loss: 0.0019611 +2024-11-08 04:11:14,833 Epoch 489/500 +2024-11-08 04:11:57,549 Current Learning Rate: 0.0000119375 +2024-11-08 04:11:57,550 Train Loss: 0.0018520, Val Loss: 0.0019612 +2024-11-08 04:11:57,550 Epoch 490/500 +2024-11-08 04:12:40,503 Current Learning Rate: 0.0000098664 +2024-11-08 04:12:40,504 Train Loss: 0.0018408, Val Loss: 0.0019614 +2024-11-08 04:12:40,504 Epoch 491/500 +2024-11-08 04:13:23,407 Current Learning Rate: 0.0000079922 +2024-11-08 04:13:23,409 Train Loss: 0.0018850, Val Loss: 0.0019613 +2024-11-08 04:13:23,409 Epoch 492/500 +2024-11-08 04:14:06,452 Current Learning Rate: 0.0000063152 +2024-11-08 04:14:06,560 Train Loss: 0.0019089, Val Loss: 0.0019613 +2024-11-08 04:14:06,562 Epoch 493/500 +2024-11-08 04:14:49,648 Current Learning Rate: 0.0000048353 +2024-11-08 04:14:49,649 Train Loss: 0.0018883, Val Loss: 0.0019612 +2024-11-08 04:14:49,650 Epoch 494/500 +2024-11-08 04:15:32,659 Current Learning Rate: 0.0000035526 +2024-11-08 04:15:33,593 Train Loss: 0.0018896, Val Loss: 0.0019611 +2024-11-08 04:15:33,593 Epoch 495/500 +2024-11-08 04:16:15,611 Current Learning Rate: 0.0000024672 +2024-11-08 04:16:16,561 Train Loss: 0.0018893, Val Loss: 0.0019611 +2024-11-08 04:16:16,561 Epoch 496/500 +2024-11-08 04:16:59,002 Current Learning Rate: 0.0000015791 +2024-11-08 04:16:59,003 Train Loss: 0.0018042, Val Loss: 0.0019611 +2024-11-08 04:16:59,003 Epoch 497/500 +2024-11-08 04:17:42,334 Current Learning Rate: 0.0000008882 +2024-11-08 04:17:43,242 Train Loss: 0.0018666, Val Loss: 0.0019610 +2024-11-08 04:17:43,242 Epoch 498/500 +2024-11-08 04:18:25,783 Current Learning Rate: 0.0000003948 +2024-11-08 04:18:26,680 Train Loss: 0.0018384, Val Loss: 0.0019610 +2024-11-08 04:18:26,680 Epoch 499/500 +2024-11-08 04:19:08,371 Current Learning Rate: 0.0000000987 +2024-11-08 04:19:08,372 Train Loss: 0.0018463, Val Loss: 0.0019610 +2024-11-08 04:19:08,372 Epoch 500/500 +2024-11-08 04:19:51,863 Current Learning Rate: 0.0000000000 +2024-11-08 04:19:51,864 Train Loss: 0.0018044, Val Loss: 0.0019611 +2024-11-08 04:19:57,579 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp3_20241111_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp3_20241111_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..d301561c21960b101afdcb3369c96f4db25628f1 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_exp3_20241111_training_log.log @@ -0,0 +1,6070 @@ +2024-11-11 13:57:17,741 Epoch 1/2000 +2024-11-11 13:57:33,467 Current Learning Rate: 0.0099993832 +2024-11-11 13:57:34,075 Train Loss: 1.2661774, Val Loss: 0.0945691 +2024-11-11 13:57:34,075 Epoch 2/2000 +2024-11-11 13:57:48,763 Current Learning Rate: 0.0099975328 +2024-11-11 13:57:49,623 Train Loss: 0.0537641, Val Loss: 0.0279424 +2024-11-11 13:57:49,623 Epoch 3/2000 +2024-11-11 13:58:04,524 Current Learning Rate: 0.0099944494 +2024-11-11 13:58:05,625 Train Loss: 0.0225878, Val Loss: 0.0174877 +2024-11-11 13:58:05,626 Epoch 4/2000 +2024-11-11 13:58:20,987 Current Learning Rate: 0.0099901336 +2024-11-11 13:58:21,766 Train Loss: 0.0182771, Val Loss: 0.0150993 +2024-11-11 13:58:21,766 Epoch 5/2000 +2024-11-11 13:58:37,352 Current Learning Rate: 0.0099845867 +2024-11-11 13:58:38,354 Train Loss: 0.0156225, Val Loss: 0.0133836 +2024-11-11 13:58:38,355 Epoch 6/2000 +2024-11-11 13:58:54,652 Current Learning Rate: 0.0099778098 +2024-11-11 13:58:55,447 Train Loss: 0.0145146, Val Loss: 0.0130899 +2024-11-11 13:58:55,447 Epoch 7/2000 +2024-11-11 13:59:10,803 Current Learning Rate: 0.0099698048 +2024-11-11 13:59:11,606 Train Loss: 0.0142418, Val Loss: 0.0129172 +2024-11-11 13:59:11,606 Epoch 8/2000 +2024-11-11 13:59:26,594 Current Learning Rate: 0.0099605735 +2024-11-11 13:59:27,429 Train Loss: 0.0140829, Val Loss: 0.0128121 +2024-11-11 13:59:27,429 Epoch 9/2000 +2024-11-11 13:59:42,661 Current Learning Rate: 0.0099501183 +2024-11-11 13:59:43,689 Train Loss: 0.0139972, Val Loss: 0.0127396 +2024-11-11 13:59:43,689 Epoch 10/2000 +2024-11-11 13:59:59,735 Current Learning Rate: 0.0099384417 +2024-11-11 14:00:00,530 Train Loss: 0.0139306, Val Loss: 0.0126877 +2024-11-11 14:00:00,531 Epoch 11/2000 +2024-11-11 14:00:15,580 Current Learning Rate: 0.0099255466 +2024-11-11 14:00:16,603 Train Loss: 0.0138500, Val Loss: 0.0126377 +2024-11-11 14:00:16,604 Epoch 12/2000 +2024-11-11 14:00:32,449 Current Learning Rate: 0.0099114363 +2024-11-11 14:00:33,472 Train Loss: 0.0137952, Val Loss: 0.0126092 +2024-11-11 14:00:33,473 Epoch 13/2000 +2024-11-11 14:00:49,904 Current Learning Rate: 0.0098961141 +2024-11-11 14:00:50,822 Train Loss: 0.0137525, Val Loss: 0.0125482 +2024-11-11 14:00:50,822 Epoch 14/2000 +2024-11-11 14:01:06,299 Current Learning Rate: 0.0098795838 +2024-11-11 14:01:07,340 Train Loss: 0.0136998, Val Loss: 0.0125104 +2024-11-11 14:01:07,341 Epoch 15/2000 +2024-11-11 14:01:23,530 Current Learning Rate: 0.0098618496 +2024-11-11 14:01:24,335 Train Loss: 0.0136709, Val Loss: 0.0124729 +2024-11-11 14:01:24,336 Epoch 16/2000 +2024-11-11 14:01:39,180 Current Learning Rate: 0.0098429158 +2024-11-11 14:01:39,977 Train Loss: 0.0136169, Val Loss: 0.0124519 +2024-11-11 14:01:39,978 Epoch 17/2000 +2024-11-11 14:01:55,280 Current Learning Rate: 0.0098227871 +2024-11-11 14:01:56,264 Train Loss: 0.0135782, Val Loss: 0.0124041 +2024-11-11 14:01:56,264 Epoch 18/2000 +2024-11-11 14:02:12,422 Current Learning Rate: 0.0098014684 +2024-11-11 14:02:13,438 Train Loss: 0.0135803, Val Loss: 0.0123766 +2024-11-11 14:02:13,438 Epoch 19/2000 +2024-11-11 14:02:29,142 Current Learning Rate: 0.0097789651 +2024-11-11 14:02:30,239 Train Loss: 0.0135045, Val Loss: 0.0123399 +2024-11-11 14:02:30,239 Epoch 20/2000 +2024-11-11 14:02:45,969 Current Learning Rate: 0.0097552826 +2024-11-11 14:02:46,963 Train Loss: 0.0134864, Val Loss: 0.0123273 +2024-11-11 14:02:46,963 Epoch 21/2000 +2024-11-11 14:03:03,191 Current Learning Rate: 0.0097304268 +2024-11-11 14:03:04,677 Train Loss: 0.0135118, Val Loss: 0.0122758 +2024-11-11 14:03:04,677 Epoch 22/2000 +2024-11-11 14:03:20,274 Current Learning Rate: 0.0097044038 +2024-11-11 14:03:21,242 Train Loss: 0.0133713, Val Loss: 0.0122122 +2024-11-11 14:03:21,242 Epoch 23/2000 +2024-11-11 14:03:36,192 Current Learning Rate: 0.0096772202 +2024-11-11 14:03:36,969 Train Loss: 0.0133401, Val Loss: 0.0121755 +2024-11-11 14:03:36,969 Epoch 24/2000 +2024-11-11 14:03:52,247 Current Learning Rate: 0.0096488824 +2024-11-11 14:03:53,264 Train Loss: 0.0132870, Val Loss: 0.0121372 +2024-11-11 14:03:53,264 Epoch 25/2000 +2024-11-11 14:04:08,343 Current Learning Rate: 0.0096193977 +2024-11-11 14:04:09,135 Train Loss: 0.0132457, Val Loss: 0.0121040 +2024-11-11 14:04:09,135 Epoch 26/2000 +2024-11-11 14:04:23,673 Current Learning Rate: 0.0095887731 +2024-11-11 14:04:24,427 Train Loss: 0.0131983, Val Loss: 0.0120748 +2024-11-11 14:04:24,427 Epoch 27/2000 +2024-11-11 14:04:39,002 Current Learning Rate: 0.0095570164 +2024-11-11 14:04:39,773 Train Loss: 0.0131626, Val Loss: 0.0120470 +2024-11-11 14:04:39,773 Epoch 28/2000 +2024-11-11 14:04:54,297 Current Learning Rate: 0.0095241353 +2024-11-11 14:04:55,000 Train Loss: 0.0131318, Val Loss: 0.0120103 +2024-11-11 14:04:55,000 Epoch 29/2000 +2024-11-11 14:05:09,700 Current Learning Rate: 0.0094901379 +2024-11-11 14:05:10,421 Train Loss: 0.0130881, Val Loss: 0.0119711 +2024-11-11 14:05:10,421 Epoch 30/2000 +2024-11-11 14:05:25,041 Current Learning Rate: 0.0094550326 +2024-11-11 14:05:25,922 Train Loss: 0.0130381, Val Loss: 0.0119285 +2024-11-11 14:05:25,922 Epoch 31/2000 +2024-11-11 14:05:41,262 Current Learning Rate: 0.0094188282 +2024-11-11 14:05:42,297 Train Loss: 0.0129911, Val Loss: 0.0118835 +2024-11-11 14:05:42,297 Epoch 32/2000 +2024-11-11 14:05:57,406 Current Learning Rate: 0.0093815334 +2024-11-11 14:05:58,205 Train Loss: 0.0129408, Val Loss: 0.0118445 +2024-11-11 14:05:58,205 Epoch 33/2000 +2024-11-11 14:06:12,583 Current Learning Rate: 0.0093431576 +2024-11-11 14:06:13,444 Train Loss: 0.0128994, Val Loss: 0.0118045 +2024-11-11 14:06:13,444 Epoch 34/2000 +2024-11-11 14:06:27,706 Current Learning Rate: 0.0093037101 +2024-11-11 14:06:28,491 Train Loss: 0.0128525, Val Loss: 0.0117625 +2024-11-11 14:06:28,492 Epoch 35/2000 +2024-11-11 14:06:43,050 Current Learning Rate: 0.0092632008 +2024-11-11 14:06:43,806 Train Loss: 0.0128143, Val Loss: 0.0117353 +2024-11-11 14:06:43,807 Epoch 36/2000 +2024-11-11 14:06:58,363 Current Learning Rate: 0.0092216396 +2024-11-11 14:06:59,142 Train Loss: 0.0127623, Val Loss: 0.0116933 +2024-11-11 14:06:59,142 Epoch 37/2000 +2024-11-11 14:07:14,647 Current Learning Rate: 0.0091790368 +2024-11-11 14:07:15,449 Train Loss: 0.0127313, Val Loss: 0.0116767 +2024-11-11 14:07:15,449 Epoch 38/2000 +2024-11-11 14:07:31,353 Current Learning Rate: 0.0091354029 +2024-11-11 14:07:32,129 Train Loss: 0.0126899, Val Loss: 0.0116313 +2024-11-11 14:07:32,130 Epoch 39/2000 +2024-11-11 14:07:47,116 Current Learning Rate: 0.0090907486 +2024-11-11 14:07:47,900 Train Loss: 0.0126563, Val Loss: 0.0115961 +2024-11-11 14:07:47,900 Epoch 40/2000 +2024-11-11 14:08:04,085 Current Learning Rate: 0.0090450850 +2024-11-11 14:08:04,878 Train Loss: 0.0126185, Val Loss: 0.0115656 +2024-11-11 14:08:04,878 Epoch 41/2000 +2024-11-11 14:08:19,508 Current Learning Rate: 0.0089984233 +2024-11-11 14:08:20,279 Train Loss: 0.0125796, Val Loss: 0.0115548 +2024-11-11 14:08:20,279 Epoch 42/2000 +2024-11-11 14:08:35,722 Current Learning Rate: 0.0089507751 +2024-11-11 14:08:36,552 Train Loss: 0.0125514, Val Loss: 0.0115522 +2024-11-11 14:08:36,552 Epoch 43/2000 +2024-11-11 14:08:52,783 Current Learning Rate: 0.0089021520 +2024-11-11 14:08:53,590 Train Loss: 0.0125182, Val Loss: 0.0115146 +2024-11-11 14:08:53,591 Epoch 44/2000 +2024-11-11 14:09:09,053 Current Learning Rate: 0.0088525662 +2024-11-11 14:09:09,853 Train Loss: 0.0124835, Val Loss: 0.0114822 +2024-11-11 14:09:09,853 Epoch 45/2000 +2024-11-11 14:09:24,865 Current Learning Rate: 0.0088020298 +2024-11-11 14:09:25,694 Train Loss: 0.0124573, Val Loss: 0.0114429 +2024-11-11 14:09:25,694 Epoch 46/2000 +2024-11-11 14:09:39,949 Current Learning Rate: 0.0087505553 +2024-11-11 14:09:40,802 Train Loss: 0.0124457, Val Loss: 0.0114221 +2024-11-11 14:09:40,803 Epoch 47/2000 +2024-11-11 14:09:55,217 Current Learning Rate: 0.0086981555 +2024-11-11 14:09:56,011 Train Loss: 0.0124224, Val Loss: 0.0114076 +2024-11-11 14:09:56,012 Epoch 48/2000 +2024-11-11 14:10:10,537 Current Learning Rate: 0.0086448431 +2024-11-11 14:10:11,397 Train Loss: 0.0124149, Val Loss: 0.0113851 +2024-11-11 14:10:11,397 Epoch 49/2000 +2024-11-11 14:10:26,503 Current Learning Rate: 0.0085906315 +2024-11-11 14:10:26,504 Train Loss: 0.0124183, Val Loss: 0.0114720 +2024-11-11 14:10:26,504 Epoch 50/2000 +2024-11-11 14:10:41,894 Current Learning Rate: 0.0085355339 +2024-11-11 14:10:41,895 Train Loss: 0.0123957, Val Loss: 0.0113915 +2024-11-11 14:10:41,895 Epoch 51/2000 +2024-11-11 14:10:57,760 Current Learning Rate: 0.0084795640 +2024-11-11 14:10:58,530 Train Loss: 0.0123648, Val Loss: 0.0113490 +2024-11-11 14:10:58,531 Epoch 52/2000 +2024-11-11 14:11:13,324 Current Learning Rate: 0.0084227355 +2024-11-11 14:11:13,325 Train Loss: 0.0123474, Val Loss: 0.0113556 +2024-11-11 14:11:13,326 Epoch 53/2000 +2024-11-11 14:11:28,539 Current Learning Rate: 0.0083650626 +2024-11-11 14:11:29,242 Train Loss: 0.0123528, Val Loss: 0.0113295 +2024-11-11 14:11:29,242 Epoch 54/2000 +2024-11-11 14:11:43,858 Current Learning Rate: 0.0083065593 +2024-11-11 14:11:43,859 Train Loss: 0.0123243, Val Loss: 0.0113394 +2024-11-11 14:11:43,859 Epoch 55/2000 +2024-11-11 14:11:59,784 Current Learning Rate: 0.0082472402 +2024-11-11 14:12:00,507 Train Loss: 0.0122991, Val Loss: 0.0112948 +2024-11-11 14:12:00,507 Epoch 56/2000 +2024-11-11 14:12:15,124 Current Learning Rate: 0.0081871199 +2024-11-11 14:12:15,125 Train Loss: 0.0122957, Val Loss: 0.0113359 +2024-11-11 14:12:15,125 Epoch 57/2000 +2024-11-11 14:12:30,952 Current Learning Rate: 0.0081262133 +2024-11-11 14:12:31,721 Train Loss: 0.0122802, Val Loss: 0.0112878 +2024-11-11 14:12:31,721 Epoch 58/2000 +2024-11-11 14:12:46,994 Current Learning Rate: 0.0080645353 +2024-11-11 14:12:46,995 Train Loss: 0.0122734, Val Loss: 0.0112881 +2024-11-11 14:12:46,995 Epoch 59/2000 +2024-11-11 14:13:02,764 Current Learning Rate: 0.0080021011 +2024-11-11 14:13:02,764 Train Loss: 0.0122800, Val Loss: 0.0112881 +2024-11-11 14:13:02,765 Epoch 60/2000 +2024-11-11 14:13:18,621 Current Learning Rate: 0.0079389263 +2024-11-11 14:13:19,638 Train Loss: 0.0122700, Val Loss: 0.0112763 +2024-11-11 14:13:19,638 Epoch 61/2000 +2024-11-11 14:13:35,644 Current Learning Rate: 0.0078750263 +2024-11-11 14:13:36,634 Train Loss: 0.0122608, Val Loss: 0.0112667 +2024-11-11 14:13:36,634 Epoch 62/2000 +2024-11-11 14:13:52,697 Current Learning Rate: 0.0078104169 +2024-11-11 14:13:53,451 Train Loss: 0.0122487, Val Loss: 0.0112643 +2024-11-11 14:13:53,451 Epoch 63/2000 +2024-11-11 14:14:08,402 Current Learning Rate: 0.0077451141 +2024-11-11 14:14:09,416 Train Loss: 0.0122393, Val Loss: 0.0112629 +2024-11-11 14:14:09,417 Epoch 64/2000 +2024-11-11 14:14:25,244 Current Learning Rate: 0.0076791340 +2024-11-11 14:14:26,298 Train Loss: 0.0122336, Val Loss: 0.0112508 +2024-11-11 14:14:26,299 Epoch 65/2000 +2024-11-11 14:14:41,517 Current Learning Rate: 0.0076124928 +2024-11-11 14:14:42,293 Train Loss: 0.0122102, Val Loss: 0.0112134 +2024-11-11 14:14:42,293 Epoch 66/2000 +2024-11-11 14:14:58,575 Current Learning Rate: 0.0075452071 +2024-11-11 14:14:59,539 Train Loss: 0.0122081, Val Loss: 0.0112017 +2024-11-11 14:14:59,540 Epoch 67/2000 +2024-11-11 14:15:15,566 Current Learning Rate: 0.0074772933 +2024-11-11 14:15:16,590 Train Loss: 0.0121974, Val Loss: 0.0111912 +2024-11-11 14:15:16,591 Epoch 68/2000 +2024-11-11 14:15:32,850 Current Learning Rate: 0.0074087684 +2024-11-11 14:15:32,850 Train Loss: 0.0121820, Val Loss: 0.0111961 +2024-11-11 14:15:32,851 Epoch 69/2000 +2024-11-11 14:15:48,748 Current Learning Rate: 0.0073396491 +2024-11-11 14:15:49,551 Train Loss: 0.0121818, Val Loss: 0.0111584 +2024-11-11 14:15:49,552 Epoch 70/2000 +2024-11-11 14:16:04,629 Current Learning Rate: 0.0072699525 +2024-11-11 14:16:05,721 Train Loss: 0.0121623, Val Loss: 0.0111324 +2024-11-11 14:16:05,721 Epoch 71/2000 +2024-11-11 14:16:22,081 Current Learning Rate: 0.0071996958 +2024-11-11 14:16:22,831 Train Loss: 0.0121434, Val Loss: 0.0111188 +2024-11-11 14:16:22,832 Epoch 72/2000 +2024-11-11 14:16:37,019 Current Learning Rate: 0.0071288965 +2024-11-11 14:16:37,901 Train Loss: 0.0121036, Val Loss: 0.0111138 +2024-11-11 14:16:37,902 Epoch 73/2000 +2024-11-11 14:16:53,012 Current Learning Rate: 0.0070575718 +2024-11-11 14:16:53,013 Train Loss: 0.0120830, Val Loss: 0.0111231 +2024-11-11 14:16:53,014 Epoch 74/2000 +2024-11-11 14:17:08,410 Current Learning Rate: 0.0069857395 +2024-11-11 14:17:09,336 Train Loss: 0.0120593, Val Loss: 0.0110455 +2024-11-11 14:17:09,336 Epoch 75/2000 +2024-11-11 14:17:25,214 Current Learning Rate: 0.0069134172 +2024-11-11 14:17:26,247 Train Loss: 0.0120199, Val Loss: 0.0110349 +2024-11-11 14:17:26,248 Epoch 76/2000 +2024-11-11 14:17:42,284 Current Learning Rate: 0.0068406228 +2024-11-11 14:17:43,352 Train Loss: 0.0120269, Val Loss: 0.0109805 +2024-11-11 14:17:43,353 Epoch 77/2000 +2024-11-11 14:17:58,616 Current Learning Rate: 0.0067673742 +2024-11-11 14:17:59,448 Train Loss: 0.0118804, Val Loss: 0.0108571 +2024-11-11 14:17:59,448 Epoch 78/2000 +2024-11-11 14:18:14,848 Current Learning Rate: 0.0066936896 +2024-11-11 14:18:14,849 Train Loss: 0.0118254, Val Loss: 0.0109454 +2024-11-11 14:18:14,849 Epoch 79/2000 +2024-11-11 14:18:31,465 Current Learning Rate: 0.0066195871 +2024-11-11 14:18:32,467 Train Loss: 0.0116624, Val Loss: 0.0105468 +2024-11-11 14:18:32,467 Epoch 80/2000 +2024-11-11 14:18:48,726 Current Learning Rate: 0.0065450850 +2024-11-11 14:18:49,776 Train Loss: 0.0116413, Val Loss: 0.0104802 +2024-11-11 14:18:49,776 Epoch 81/2000 +2024-11-11 14:19:05,544 Current Learning Rate: 0.0064702016 +2024-11-11 14:19:06,615 Train Loss: 0.0113220, Val Loss: 0.0103361 +2024-11-11 14:19:06,617 Epoch 82/2000 +2024-11-11 14:19:22,882 Current Learning Rate: 0.0063949555 +2024-11-11 14:19:23,886 Train Loss: 0.0111988, Val Loss: 0.0101918 +2024-11-11 14:19:23,887 Epoch 83/2000 +2024-11-11 14:19:40,105 Current Learning Rate: 0.0063193652 +2024-11-11 14:19:41,141 Train Loss: 0.0110398, Val Loss: 0.0101036 +2024-11-11 14:19:41,142 Epoch 84/2000 +2024-11-11 14:19:57,519 Current Learning Rate: 0.0062434494 +2024-11-11 14:19:58,597 Train Loss: 0.0109725, Val Loss: 0.0100524 +2024-11-11 14:19:58,597 Epoch 85/2000 +2024-11-11 14:20:14,789 Current Learning Rate: 0.0061672268 +2024-11-11 14:20:15,557 Train Loss: 0.0109243, Val Loss: 0.0099134 +2024-11-11 14:20:15,557 Epoch 86/2000 +2024-11-11 14:20:30,737 Current Learning Rate: 0.0060907162 +2024-11-11 14:20:31,628 Train Loss: 0.0107117, Val Loss: 0.0098298 +2024-11-11 14:20:31,628 Epoch 87/2000 +2024-11-11 14:20:46,658 Current Learning Rate: 0.0060139365 +2024-11-11 14:20:46,659 Train Loss: 0.0107305, Val Loss: 0.0098862 +2024-11-11 14:20:46,659 Epoch 88/2000 +2024-11-11 14:21:02,896 Current Learning Rate: 0.0059369066 +2024-11-11 14:21:04,661 Train Loss: 0.0105596, Val Loss: 0.0096487 +2024-11-11 14:21:04,661 Epoch 89/2000 +2024-11-11 14:21:20,799 Current Learning Rate: 0.0058596455 +2024-11-11 14:21:21,613 Train Loss: 0.0103846, Val Loss: 0.0095496 +2024-11-11 14:21:21,613 Epoch 90/2000 +2024-11-11 14:21:36,590 Current Learning Rate: 0.0057821723 +2024-11-11 14:21:37,478 Train Loss: 0.0102785, Val Loss: 0.0094894 +2024-11-11 14:21:37,479 Epoch 91/2000 +2024-11-11 14:21:52,472 Current Learning Rate: 0.0057045062 +2024-11-11 14:21:53,255 Train Loss: 0.0101886, Val Loss: 0.0093482 +2024-11-11 14:21:53,255 Epoch 92/2000 +2024-11-11 14:22:08,255 Current Learning Rate: 0.0056266662 +2024-11-11 14:22:08,256 Train Loss: 0.0100830, Val Loss: 0.0093750 +2024-11-11 14:22:08,256 Epoch 93/2000 +2024-11-11 14:22:24,411 Current Learning Rate: 0.0055486716 +2024-11-11 14:22:25,170 Train Loss: 0.0100160, Val Loss: 0.0091938 +2024-11-11 14:22:25,170 Epoch 94/2000 +2024-11-11 14:22:39,379 Current Learning Rate: 0.0054705416 +2024-11-11 14:22:40,156 Train Loss: 0.0099185, Val Loss: 0.0091012 +2024-11-11 14:22:40,157 Epoch 95/2000 +2024-11-11 14:22:55,352 Current Learning Rate: 0.0053922955 +2024-11-11 14:22:56,117 Train Loss: 0.0097893, Val Loss: 0.0090778 +2024-11-11 14:22:56,118 Epoch 96/2000 +2024-11-11 14:23:11,657 Current Learning Rate: 0.0053139526 +2024-11-11 14:23:12,400 Train Loss: 0.0097754, Val Loss: 0.0089586 +2024-11-11 14:23:12,401 Epoch 97/2000 +2024-11-11 14:23:28,012 Current Learning Rate: 0.0052355323 +2024-11-11 14:23:28,750 Train Loss: 0.0096276, Val Loss: 0.0089473 +2024-11-11 14:23:28,750 Epoch 98/2000 +2024-11-11 14:23:43,761 Current Learning Rate: 0.0051570538 +2024-11-11 14:23:43,762 Train Loss: 0.0096329, Val Loss: 0.0091781 +2024-11-11 14:23:43,762 Epoch 99/2000 +2024-11-11 14:23:59,405 Current Learning Rate: 0.0050785366 +2024-11-11 14:24:00,170 Train Loss: 0.0095311, Val Loss: 0.0087892 +2024-11-11 14:24:00,170 Epoch 100/2000 +2024-11-11 14:24:15,090 Current Learning Rate: 0.0050000000 +2024-11-11 14:24:15,921 Train Loss: 0.0094008, Val Loss: 0.0086809 +2024-11-11 14:24:15,922 Epoch 101/2000 +2024-11-11 14:24:32,953 Current Learning Rate: 0.0049214634 +2024-11-11 14:24:32,953 Train Loss: 0.0094966, Val Loss: 0.0090536 +2024-11-11 14:24:32,953 Epoch 102/2000 +2024-11-11 14:24:48,689 Current Learning Rate: 0.0048429462 +2024-11-11 14:24:48,690 Train Loss: 0.0098488, Val Loss: 0.0090301 +2024-11-11 14:24:48,691 Epoch 103/2000 +2024-11-11 14:25:03,879 Current Learning Rate: 0.0047644677 +2024-11-11 14:25:03,880 Train Loss: 0.0094386, Val Loss: 0.0102315 +2024-11-11 14:25:03,881 Epoch 104/2000 +2024-11-11 14:25:19,963 Current Learning Rate: 0.0046860474 +2024-11-11 14:25:19,964 Train Loss: 0.0097765, Val Loss: 0.0087173 +2024-11-11 14:25:19,964 Epoch 105/2000 +2024-11-11 14:25:35,667 Current Learning Rate: 0.0046077045 +2024-11-11 14:25:35,667 Train Loss: 0.0093596, Val Loss: 0.0088081 +2024-11-11 14:25:35,667 Epoch 106/2000 +2024-11-11 14:25:51,694 Current Learning Rate: 0.0045294584 +2024-11-11 14:25:52,495 Train Loss: 0.0093826, Val Loss: 0.0085501 +2024-11-11 14:25:52,495 Epoch 107/2000 +2024-11-11 14:26:07,785 Current Learning Rate: 0.0044513284 +2024-11-11 14:26:07,786 Train Loss: 0.0092222, Val Loss: 0.0085908 +2024-11-11 14:26:07,786 Epoch 108/2000 +2024-11-11 14:26:23,748 Current Learning Rate: 0.0043733338 +2024-11-11 14:26:24,524 Train Loss: 0.0091933, Val Loss: 0.0084863 +2024-11-11 14:26:24,525 Epoch 109/2000 +2024-11-11 14:26:39,085 Current Learning Rate: 0.0042954938 +2024-11-11 14:26:39,086 Train Loss: 0.0093985, Val Loss: 0.0086324 +2024-11-11 14:26:39,086 Epoch 110/2000 +2024-11-11 14:26:55,275 Current Learning Rate: 0.0042178277 +2024-11-11 14:26:55,275 Train Loss: 0.0093343, Val Loss: 0.0085354 +2024-11-11 14:26:55,276 Epoch 111/2000 +2024-11-11 14:27:11,247 Current Learning Rate: 0.0041403545 +2024-11-11 14:27:12,026 Train Loss: 0.0090339, Val Loss: 0.0084420 +2024-11-11 14:27:12,026 Epoch 112/2000 +2024-11-11 14:27:26,618 Current Learning Rate: 0.0040630934 +2024-11-11 14:27:27,462 Train Loss: 0.0089710, Val Loss: 0.0083739 +2024-11-11 14:27:27,462 Epoch 113/2000 +2024-11-11 14:27:42,084 Current Learning Rate: 0.0039860635 +2024-11-11 14:27:42,084 Train Loss: 0.0090989, Val Loss: 0.0084235 +2024-11-11 14:27:42,085 Epoch 114/2000 +2024-11-11 14:27:58,247 Current Learning Rate: 0.0039092838 +2024-11-11 14:27:58,248 Train Loss: 0.0089871, Val Loss: 0.0084957 +2024-11-11 14:27:58,248 Epoch 115/2000 +2024-11-11 14:28:14,890 Current Learning Rate: 0.0038327732 +2024-11-11 14:28:14,891 Train Loss: 0.0095247, Val Loss: 0.0091416 +2024-11-11 14:28:14,891 Epoch 116/2000 +2024-11-11 14:28:31,490 Current Learning Rate: 0.0037565506 +2024-11-11 14:28:32,222 Train Loss: 0.0090694, Val Loss: 0.0082615 +2024-11-11 14:28:32,222 Epoch 117/2000 +2024-11-11 14:28:46,636 Current Learning Rate: 0.0036806348 +2024-11-11 14:28:46,637 Train Loss: 0.0089576, Val Loss: 0.0082832 +2024-11-11 14:28:46,637 Epoch 118/2000 +2024-11-11 14:29:01,774 Current Learning Rate: 0.0036050445 +2024-11-11 14:29:03,468 Train Loss: 0.0088039, Val Loss: 0.0080960 +2024-11-11 14:29:03,468 Epoch 119/2000 +2024-11-11 14:29:17,745 Current Learning Rate: 0.0035297984 +2024-11-11 14:29:17,746 Train Loss: 0.0088573, Val Loss: 0.0081461 +2024-11-11 14:29:17,746 Epoch 120/2000 +2024-11-11 14:29:33,047 Current Learning Rate: 0.0034549150 +2024-11-11 14:29:33,837 Train Loss: 0.0085194, Val Loss: 0.0079936 +2024-11-11 14:29:33,837 Epoch 121/2000 +2024-11-11 14:29:48,437 Current Learning Rate: 0.0033804129 +2024-11-11 14:29:49,164 Train Loss: 0.0084638, Val Loss: 0.0079802 +2024-11-11 14:29:49,165 Epoch 122/2000 +2024-11-11 14:30:04,560 Current Learning Rate: 0.0033063104 +2024-11-11 14:30:05,343 Train Loss: 0.0083644, Val Loss: 0.0077298 +2024-11-11 14:30:05,344 Epoch 123/2000 +2024-11-11 14:30:19,769 Current Learning Rate: 0.0032326258 +2024-11-11 14:30:20,557 Train Loss: 0.0081522, Val Loss: 0.0076117 +2024-11-11 14:30:20,557 Epoch 124/2000 +2024-11-11 14:30:35,764 Current Learning Rate: 0.0031593772 +2024-11-11 14:30:35,765 Train Loss: 0.0081840, Val Loss: 0.0076353 +2024-11-11 14:30:35,765 Epoch 125/2000 +2024-11-11 14:30:51,376 Current Learning Rate: 0.0030865828 +2024-11-11 14:30:52,227 Train Loss: 0.0081295, Val Loss: 0.0076054 +2024-11-11 14:30:52,227 Epoch 126/2000 +2024-11-11 14:31:06,569 Current Learning Rate: 0.0030142605 +2024-11-11 14:31:07,344 Train Loss: 0.0079633, Val Loss: 0.0075449 +2024-11-11 14:31:07,344 Epoch 127/2000 +2024-11-11 14:31:22,482 Current Learning Rate: 0.0029424282 +2024-11-11 14:31:23,522 Train Loss: 0.0079932, Val Loss: 0.0074044 +2024-11-11 14:31:23,522 Epoch 128/2000 +2024-11-11 14:31:39,151 Current Learning Rate: 0.0028711035 +2024-11-11 14:31:39,152 Train Loss: 0.0079451, Val Loss: 0.0074685 +2024-11-11 14:31:39,152 Epoch 129/2000 +2024-11-11 14:31:53,855 Current Learning Rate: 0.0028003042 +2024-11-11 14:31:54,643 Train Loss: 0.0080458, Val Loss: 0.0073241 +2024-11-11 14:31:54,643 Epoch 130/2000 +2024-11-11 14:32:09,356 Current Learning Rate: 0.0027300475 +2024-11-11 14:32:09,357 Train Loss: 0.0077950, Val Loss: 0.0073365 +2024-11-11 14:32:09,357 Epoch 131/2000 +2024-11-11 14:32:24,716 Current Learning Rate: 0.0026603509 +2024-11-11 14:32:24,717 Train Loss: 0.0078814, Val Loss: 0.0075878 +2024-11-11 14:32:24,717 Epoch 132/2000 +2024-11-11 14:32:39,992 Current Learning Rate: 0.0025912316 +2024-11-11 14:32:39,993 Train Loss: 0.0077670, Val Loss: 0.0075323 +2024-11-11 14:32:39,993 Epoch 133/2000 +2024-11-11 14:32:55,396 Current Learning Rate: 0.0025227067 +2024-11-11 14:32:56,191 Train Loss: 0.0079082, Val Loss: 0.0073139 +2024-11-11 14:32:56,191 Epoch 134/2000 +2024-11-11 14:33:11,173 Current Learning Rate: 0.0024547929 +2024-11-11 14:33:11,982 Train Loss: 0.0078441, Val Loss: 0.0071975 +2024-11-11 14:33:11,982 Epoch 135/2000 +2024-11-11 14:33:26,885 Current Learning Rate: 0.0023875072 +2024-11-11 14:33:27,631 Train Loss: 0.0076429, Val Loss: 0.0071971 +2024-11-11 14:33:27,632 Epoch 136/2000 +2024-11-11 14:33:42,905 Current Learning Rate: 0.0023208660 +2024-11-11 14:33:42,906 Train Loss: 0.0077887, Val Loss: 0.0072025 +2024-11-11 14:33:42,906 Epoch 137/2000 +2024-11-11 14:33:59,633 Current Learning Rate: 0.0022548859 +2024-11-11 14:33:59,634 Train Loss: 0.0075687, Val Loss: 0.0072376 +2024-11-11 14:33:59,634 Epoch 138/2000 +2024-11-11 14:34:15,815 Current Learning Rate: 0.0021895831 +2024-11-11 14:34:16,588 Train Loss: 0.0076570, Val Loss: 0.0071839 +2024-11-11 14:34:16,588 Epoch 139/2000 +2024-11-11 14:34:31,610 Current Learning Rate: 0.0021249737 +2024-11-11 14:34:32,318 Train Loss: 0.0076112, Val Loss: 0.0070536 +2024-11-11 14:34:32,319 Epoch 140/2000 +2024-11-11 14:34:48,361 Current Learning Rate: 0.0020610737 +2024-11-11 14:34:48,362 Train Loss: 0.0074787, Val Loss: 0.0071142 +2024-11-11 14:34:48,362 Epoch 141/2000 +2024-11-11 14:35:04,661 Current Learning Rate: 0.0019978989 +2024-11-11 14:35:05,481 Train Loss: 0.0075355, Val Loss: 0.0070531 +2024-11-11 14:35:05,481 Epoch 142/2000 +2024-11-11 14:35:20,933 Current Learning Rate: 0.0019354647 +2024-11-11 14:35:21,667 Train Loss: 0.0074945, Val Loss: 0.0070093 +2024-11-11 14:35:21,667 Epoch 143/2000 +2024-11-11 14:35:37,283 Current Learning Rate: 0.0018737867 +2024-11-11 14:35:38,050 Train Loss: 0.0075831, Val Loss: 0.0069973 +2024-11-11 14:35:38,050 Epoch 144/2000 +2024-11-11 14:35:52,508 Current Learning Rate: 0.0018128801 +2024-11-11 14:35:52,509 Train Loss: 0.0074640, Val Loss: 0.0070031 +2024-11-11 14:35:52,509 Epoch 145/2000 +2024-11-11 14:36:07,813 Current Learning Rate: 0.0017527598 +2024-11-11 14:36:07,814 Train Loss: 0.0075974, Val Loss: 0.0070424 +2024-11-11 14:36:07,814 Epoch 146/2000 +2024-11-11 14:36:23,208 Current Learning Rate: 0.0016934407 +2024-11-11 14:36:24,031 Train Loss: 0.0074360, Val Loss: 0.0069656 +2024-11-11 14:36:24,032 Epoch 147/2000 +2024-11-11 14:36:38,977 Current Learning Rate: 0.0016349374 +2024-11-11 14:36:40,023 Train Loss: 0.0074785, Val Loss: 0.0069548 +2024-11-11 14:36:40,023 Epoch 148/2000 +2024-11-11 14:36:55,988 Current Learning Rate: 0.0015772645 +2024-11-11 14:36:55,989 Train Loss: 0.0074445, Val Loss: 0.0069636 +2024-11-11 14:36:55,989 Epoch 149/2000 +2024-11-11 14:37:12,244 Current Learning Rate: 0.0015204360 +2024-11-11 14:37:13,285 Train Loss: 0.0073779, Val Loss: 0.0069269 +2024-11-11 14:37:13,286 Epoch 150/2000 +2024-11-11 14:37:29,307 Current Learning Rate: 0.0014644661 +2024-11-11 14:37:30,181 Train Loss: 0.0074302, Val Loss: 0.0069077 +2024-11-11 14:37:30,182 Epoch 151/2000 +2024-11-11 14:37:45,669 Current Learning Rate: 0.0014093685 +2024-11-11 14:37:45,670 Train Loss: 0.0075630, Val Loss: 0.0069273 +2024-11-11 14:37:45,670 Epoch 152/2000 +2024-11-11 14:38:01,698 Current Learning Rate: 0.0013551569 +2024-11-11 14:38:01,699 Train Loss: 0.0074667, Val Loss: 0.0069243 +2024-11-11 14:38:01,699 Epoch 153/2000 +2024-11-11 14:38:17,812 Current Learning Rate: 0.0013018445 +2024-11-11 14:38:17,812 Train Loss: 0.0074766, Val Loss: 0.0069102 +2024-11-11 14:38:17,813 Epoch 154/2000 +2024-11-11 14:38:33,435 Current Learning Rate: 0.0012494447 +2024-11-11 14:38:33,436 Train Loss: 0.0073373, Val Loss: 0.0070224 +2024-11-11 14:38:33,436 Epoch 155/2000 +2024-11-11 14:38:48,699 Current Learning Rate: 0.0011979702 +2024-11-11 14:38:49,458 Train Loss: 0.0072681, Val Loss: 0.0068770 +2024-11-11 14:38:49,458 Epoch 156/2000 +2024-11-11 14:39:03,830 Current Learning Rate: 0.0011474338 +2024-11-11 14:39:03,831 Train Loss: 0.0073033, Val Loss: 0.0068844 +2024-11-11 14:39:03,831 Epoch 157/2000 +2024-11-11 14:39:19,716 Current Learning Rate: 0.0010978480 +2024-11-11 14:39:19,716 Train Loss: 0.0072875, Val Loss: 0.0068927 +2024-11-11 14:39:19,717 Epoch 158/2000 +2024-11-11 14:39:35,281 Current Learning Rate: 0.0010492249 +2024-11-11 14:39:36,151 Train Loss: 0.0073433, Val Loss: 0.0068395 +2024-11-11 14:39:36,152 Epoch 159/2000 +2024-11-11 14:39:51,357 Current Learning Rate: 0.0010015767 +2024-11-11 14:39:52,371 Train Loss: 0.0073250, Val Loss: 0.0068180 +2024-11-11 14:39:52,371 Epoch 160/2000 +2024-11-11 14:40:08,373 Current Learning Rate: 0.0009549150 +2024-11-11 14:40:09,421 Train Loss: 0.0073589, Val Loss: 0.0068086 +2024-11-11 14:40:09,422 Epoch 161/2000 +2024-11-11 14:40:25,682 Current Learning Rate: 0.0009092514 +2024-11-11 14:40:25,683 Train Loss: 0.0073513, Val Loss: 0.0068151 +2024-11-11 14:40:25,683 Epoch 162/2000 +2024-11-11 14:40:42,024 Current Learning Rate: 0.0008645971 +2024-11-11 14:40:42,024 Train Loss: 0.0073903, Val Loss: 0.0068148 +2024-11-11 14:40:42,025 Epoch 163/2000 +2024-11-11 14:40:57,038 Current Learning Rate: 0.0008209632 +2024-11-11 14:40:57,782 Train Loss: 0.0073884, Val Loss: 0.0068027 +2024-11-11 14:40:57,782 Epoch 164/2000 +2024-11-11 14:41:12,557 Current Learning Rate: 0.0007783604 +2024-11-11 14:41:13,381 Train Loss: 0.0073094, Val Loss: 0.0067945 +2024-11-11 14:41:13,381 Epoch 165/2000 +2024-11-11 14:41:27,996 Current Learning Rate: 0.0007367992 +2024-11-11 14:41:28,698 Train Loss: 0.0073065, Val Loss: 0.0067943 +2024-11-11 14:41:28,698 Epoch 166/2000 +2024-11-11 14:41:43,623 Current Learning Rate: 0.0006962899 +2024-11-11 14:41:44,318 Train Loss: 0.0073264, Val Loss: 0.0067916 +2024-11-11 14:41:44,319 Epoch 167/2000 +2024-11-11 14:41:59,347 Current Learning Rate: 0.0006568424 +2024-11-11 14:42:00,015 Train Loss: 0.0072393, Val Loss: 0.0067788 +2024-11-11 14:42:00,015 Epoch 168/2000 +2024-11-11 14:42:15,282 Current Learning Rate: 0.0006184666 +2024-11-11 14:42:16,149 Train Loss: 0.0072001, Val Loss: 0.0067757 +2024-11-11 14:42:16,149 Epoch 169/2000 +2024-11-11 14:42:31,300 Current Learning Rate: 0.0005811718 +2024-11-11 14:42:32,027 Train Loss: 0.0072500, Val Loss: 0.0067664 +2024-11-11 14:42:32,027 Epoch 170/2000 +2024-11-11 14:42:46,442 Current Learning Rate: 0.0005449674 +2024-11-11 14:42:47,150 Train Loss: 0.0071376, Val Loss: 0.0067461 +2024-11-11 14:42:47,151 Epoch 171/2000 +2024-11-11 14:43:02,596 Current Learning Rate: 0.0005098621 +2024-11-11 14:43:03,505 Train Loss: 0.0072696, Val Loss: 0.0067427 +2024-11-11 14:43:03,506 Epoch 172/2000 +2024-11-11 14:43:19,383 Current Learning Rate: 0.0004758647 +2024-11-11 14:43:19,384 Train Loss: 0.0072316, Val Loss: 0.0067536 +2024-11-11 14:43:19,385 Epoch 173/2000 +2024-11-11 14:43:36,058 Current Learning Rate: 0.0004429836 +2024-11-11 14:43:36,791 Train Loss: 0.0071943, Val Loss: 0.0067361 +2024-11-11 14:43:36,791 Epoch 174/2000 +2024-11-11 14:43:51,310 Current Learning Rate: 0.0004112269 +2024-11-11 14:43:52,133 Train Loss: 0.0071758, Val Loss: 0.0067280 +2024-11-11 14:43:52,133 Epoch 175/2000 +2024-11-11 14:44:07,358 Current Learning Rate: 0.0003806023 +2024-11-11 14:44:07,359 Train Loss: 0.0072222, Val Loss: 0.0067455 +2024-11-11 14:44:07,359 Epoch 176/2000 +2024-11-11 14:44:22,137 Current Learning Rate: 0.0003511176 +2024-11-11 14:44:23,001 Train Loss: 0.0071256, Val Loss: 0.0067152 +2024-11-11 14:44:23,001 Epoch 177/2000 +2024-11-11 14:44:37,471 Current Learning Rate: 0.0003227798 +2024-11-11 14:44:37,471 Train Loss: 0.0071485, Val Loss: 0.0067262 +2024-11-11 14:44:37,472 Epoch 178/2000 +2024-11-11 14:44:52,901 Current Learning Rate: 0.0002955962 +2024-11-11 14:44:53,647 Train Loss: 0.0071857, Val Loss: 0.0067099 +2024-11-11 14:44:53,647 Epoch 179/2000 +2024-11-11 14:45:08,428 Current Learning Rate: 0.0002695732 +2024-11-11 14:45:08,429 Train Loss: 0.0072040, Val Loss: 0.0067122 +2024-11-11 14:45:08,430 Epoch 180/2000 +2024-11-11 14:45:24,485 Current Learning Rate: 0.0002447174 +2024-11-11 14:45:24,486 Train Loss: 0.0072393, Val Loss: 0.0067132 +2024-11-11 14:45:24,486 Epoch 181/2000 +2024-11-11 14:45:40,812 Current Learning Rate: 0.0002210349 +2024-11-11 14:45:41,560 Train Loss: 0.0071451, Val Loss: 0.0066998 +2024-11-11 14:45:41,561 Epoch 182/2000 +2024-11-11 14:45:56,967 Current Learning Rate: 0.0001985316 +2024-11-11 14:45:56,969 Train Loss: 0.0071755, Val Loss: 0.0067005 +2024-11-11 14:45:56,969 Epoch 183/2000 +2024-11-11 14:46:12,765 Current Learning Rate: 0.0001772129 +2024-11-11 14:46:12,766 Train Loss: 0.0073248, Val Loss: 0.0067023 +2024-11-11 14:46:12,766 Epoch 184/2000 +2024-11-11 14:46:28,837 Current Learning Rate: 0.0001570842 +2024-11-11 14:46:29,586 Train Loss: 0.0071232, Val Loss: 0.0066986 +2024-11-11 14:46:29,587 Epoch 185/2000 +2024-11-11 14:46:44,200 Current Learning Rate: 0.0001381504 +2024-11-11 14:46:44,200 Train Loss: 0.0071744, Val Loss: 0.0067003 +2024-11-11 14:46:44,200 Epoch 186/2000 +2024-11-11 14:47:00,589 Current Learning Rate: 0.0001204162 +2024-11-11 14:47:00,590 Train Loss: 0.0070693, Val Loss: 0.0067045 +2024-11-11 14:47:00,590 Epoch 187/2000 +2024-11-11 14:47:16,675 Current Learning Rate: 0.0001038859 +2024-11-11 14:47:17,469 Train Loss: 0.0072742, Val Loss: 0.0066943 +2024-11-11 14:47:17,469 Epoch 188/2000 +2024-11-11 14:47:33,364 Current Learning Rate: 0.0000885637 +2024-11-11 14:47:34,337 Train Loss: 0.0071269, Val Loss: 0.0066895 +2024-11-11 14:47:34,338 Epoch 189/2000 +2024-11-11 14:47:50,191 Current Learning Rate: 0.0000744534 +2024-11-11 14:47:51,126 Train Loss: 0.0071737, Val Loss: 0.0066882 +2024-11-11 14:47:51,127 Epoch 190/2000 +2024-11-11 14:48:06,026 Current Learning Rate: 0.0000615583 +2024-11-11 14:48:06,820 Train Loss: 0.0070884, Val Loss: 0.0066866 +2024-11-11 14:48:06,820 Epoch 191/2000 +2024-11-11 14:48:21,312 Current Learning Rate: 0.0000498817 +2024-11-11 14:48:21,313 Train Loss: 0.0071572, Val Loss: 0.0066868 +2024-11-11 14:48:21,313 Epoch 192/2000 +2024-11-11 14:48:37,373 Current Learning Rate: 0.0000394265 +2024-11-11 14:48:38,177 Train Loss: 0.0071594, Val Loss: 0.0066853 +2024-11-11 14:48:38,178 Epoch 193/2000 +2024-11-11 14:48:52,743 Current Learning Rate: 0.0000301952 +2024-11-11 14:48:52,744 Train Loss: 0.0071081, Val Loss: 0.0066857 +2024-11-11 14:48:52,744 Epoch 194/2000 +2024-11-11 14:49:08,362 Current Learning Rate: 0.0000221902 +2024-11-11 14:49:09,116 Train Loss: 0.0071270, Val Loss: 0.0066851 +2024-11-11 14:49:09,117 Epoch 195/2000 +2024-11-11 14:49:23,562 Current Learning Rate: 0.0000154133 +2024-11-11 14:49:24,293 Train Loss: 0.0071012, Val Loss: 0.0066851 +2024-11-11 14:49:24,293 Epoch 196/2000 +2024-11-11 14:49:38,864 Current Learning Rate: 0.0000098664 +2024-11-11 14:49:38,865 Train Loss: 0.0071697, Val Loss: 0.0066872 +2024-11-11 14:49:38,865 Epoch 197/2000 +2024-11-11 14:49:54,603 Current Learning Rate: 0.0000055506 +2024-11-11 14:49:54,604 Train Loss: 0.0071689, Val Loss: 0.0066890 +2024-11-11 14:49:54,604 Epoch 198/2000 +2024-11-11 14:50:11,226 Current Learning Rate: 0.0000024672 +2024-11-11 14:50:11,227 Train Loss: 0.0072255, Val Loss: 0.0066871 +2024-11-11 14:50:11,227 Epoch 199/2000 +2024-11-11 14:50:27,207 Current Learning Rate: 0.0000006168 +2024-11-11 14:50:27,208 Train Loss: 0.0071076, Val Loss: 0.0066866 +2024-11-11 14:50:27,209 Epoch 200/2000 +2024-11-11 14:50:42,650 Current Learning Rate: 0.0000000000 +2024-11-11 14:50:42,651 Train Loss: 0.0071233, Val Loss: 0.0066863 +2024-11-11 14:50:42,651 Epoch 201/2000 +2024-11-11 14:50:59,131 Current Learning Rate: 0.0000006168 +2024-11-11 14:50:59,132 Train Loss: 0.0071115, Val Loss: 0.0066865 +2024-11-11 14:50:59,132 Epoch 202/2000 +2024-11-11 14:51:15,454 Current Learning Rate: 0.0000024672 +2024-11-11 14:51:15,455 Train Loss: 0.0071187, Val Loss: 0.0066864 +2024-11-11 14:51:15,456 Epoch 203/2000 +2024-11-11 14:51:30,824 Current Learning Rate: 0.0000055506 +2024-11-11 14:51:30,825 Train Loss: 0.0071984, Val Loss: 0.0066861 +2024-11-11 14:51:30,825 Epoch 204/2000 +2024-11-11 14:51:46,130 Current Learning Rate: 0.0000098664 +2024-11-11 14:51:46,131 Train Loss: 0.0071159, Val Loss: 0.0066870 +2024-11-11 14:51:46,131 Epoch 205/2000 +2024-11-11 14:52:01,915 Current Learning Rate: 0.0000154133 +2024-11-11 14:52:01,916 Train Loss: 0.0071094, Val Loss: 0.0066877 +2024-11-11 14:52:01,916 Epoch 206/2000 +2024-11-11 14:52:18,069 Current Learning Rate: 0.0000221902 +2024-11-11 14:52:18,069 Train Loss: 0.0071672, Val Loss: 0.0066869 +2024-11-11 14:52:18,070 Epoch 207/2000 +2024-11-11 14:52:34,326 Current Learning Rate: 0.0000301952 +2024-11-11 14:52:35,349 Train Loss: 0.0071063, Val Loss: 0.0066836 +2024-11-11 14:52:35,349 Epoch 208/2000 +2024-11-11 14:52:50,950 Current Learning Rate: 0.0000394265 +2024-11-11 14:52:50,951 Train Loss: 0.0072819, Val Loss: 0.0066849 +2024-11-11 14:52:50,951 Epoch 209/2000 +2024-11-11 14:53:06,823 Current Learning Rate: 0.0000498817 +2024-11-11 14:53:06,824 Train Loss: 0.0071576, Val Loss: 0.0066842 +2024-11-11 14:53:06,824 Epoch 210/2000 +2024-11-11 14:53:22,350 Current Learning Rate: 0.0000615583 +2024-11-11 14:53:23,380 Train Loss: 0.0071084, Val Loss: 0.0066829 +2024-11-11 14:53:23,380 Epoch 211/2000 +2024-11-11 14:53:38,583 Current Learning Rate: 0.0000744534 +2024-11-11 14:53:39,588 Train Loss: 0.0071460, Val Loss: 0.0066817 +2024-11-11 14:53:39,589 Epoch 212/2000 +2024-11-11 14:53:56,036 Current Learning Rate: 0.0000885637 +2024-11-11 14:53:57,106 Train Loss: 0.0070951, Val Loss: 0.0066803 +2024-11-11 14:53:57,106 Epoch 213/2000 +2024-11-11 14:54:13,344 Current Learning Rate: 0.0001038859 +2024-11-11 14:54:13,345 Train Loss: 0.0071032, Val Loss: 0.0066816 +2024-11-11 14:54:13,345 Epoch 214/2000 +2024-11-11 14:54:29,228 Current Learning Rate: 0.0001204162 +2024-11-11 14:54:29,229 Train Loss: 0.0071267, Val Loss: 0.0066882 +2024-11-11 14:54:29,229 Epoch 215/2000 +2024-11-11 14:54:44,483 Current Learning Rate: 0.0001381504 +2024-11-11 14:54:44,484 Train Loss: 0.0073151, Val Loss: 0.0066887 +2024-11-11 14:54:44,484 Epoch 216/2000 +2024-11-11 14:55:00,812 Current Learning Rate: 0.0001570842 +2024-11-11 14:55:00,812 Train Loss: 0.0072554, Val Loss: 0.0066824 +2024-11-11 14:55:00,812 Epoch 217/2000 +2024-11-11 14:55:16,058 Current Learning Rate: 0.0001772129 +2024-11-11 14:55:16,059 Train Loss: 0.0071964, Val Loss: 0.0066823 +2024-11-11 14:55:16,059 Epoch 218/2000 +2024-11-11 14:55:33,319 Current Learning Rate: 0.0001985316 +2024-11-11 14:55:34,352 Train Loss: 0.0072060, Val Loss: 0.0066800 +2024-11-11 14:55:34,352 Epoch 219/2000 +2024-11-11 14:55:50,242 Current Learning Rate: 0.0002210349 +2024-11-11 14:55:50,243 Train Loss: 0.0074376, Val Loss: 0.0066992 +2024-11-11 14:55:50,243 Epoch 220/2000 +2024-11-11 14:56:06,937 Current Learning Rate: 0.0002447174 +2024-11-11 14:56:06,937 Train Loss: 0.0073687, Val Loss: 0.0066983 +2024-11-11 14:56:06,937 Epoch 221/2000 +2024-11-11 14:56:21,670 Current Learning Rate: 0.0002695732 +2024-11-11 14:56:21,671 Train Loss: 0.0071589, Val Loss: 0.0066803 +2024-11-11 14:56:21,671 Epoch 222/2000 +2024-11-11 14:56:37,122 Current Learning Rate: 0.0002955962 +2024-11-11 14:56:37,929 Train Loss: 0.0070669, Val Loss: 0.0066722 +2024-11-11 14:56:37,930 Epoch 223/2000 +2024-11-11 14:56:52,638 Current Learning Rate: 0.0003227798 +2024-11-11 14:56:52,639 Train Loss: 0.0072259, Val Loss: 0.0066722 +2024-11-11 14:56:52,639 Epoch 224/2000 +2024-11-11 14:57:08,152 Current Learning Rate: 0.0003511176 +2024-11-11 14:57:08,153 Train Loss: 0.0071376, Val Loss: 0.0066735 +2024-11-11 14:57:08,153 Epoch 225/2000 +2024-11-11 14:57:24,526 Current Learning Rate: 0.0003806023 +2024-11-11 14:57:25,578 Train Loss: 0.0070889, Val Loss: 0.0066610 +2024-11-11 14:57:25,579 Epoch 226/2000 +2024-11-11 14:57:41,353 Current Learning Rate: 0.0004112269 +2024-11-11 14:57:42,354 Train Loss: 0.0070849, Val Loss: 0.0066532 +2024-11-11 14:57:42,354 Epoch 227/2000 +2024-11-11 14:57:57,520 Current Learning Rate: 0.0004429836 +2024-11-11 14:57:57,521 Train Loss: 0.0072774, Val Loss: 0.0066535 +2024-11-11 14:57:57,521 Epoch 228/2000 +2024-11-11 14:58:13,445 Current Learning Rate: 0.0004758647 +2024-11-11 14:58:13,446 Train Loss: 0.0071172, Val Loss: 0.0068272 +2024-11-11 14:58:13,446 Epoch 229/2000 +2024-11-11 14:58:30,203 Current Learning Rate: 0.0005098621 +2024-11-11 14:58:30,203 Train Loss: 0.0070752, Val Loss: 0.0066571 +2024-11-11 14:58:30,204 Epoch 230/2000 +2024-11-11 14:58:46,594 Current Learning Rate: 0.0005449674 +2024-11-11 14:58:46,595 Train Loss: 0.0072198, Val Loss: 0.0067566 +2024-11-11 14:58:46,595 Epoch 231/2000 +2024-11-11 14:59:03,017 Current Learning Rate: 0.0005811718 +2024-11-11 14:59:03,018 Train Loss: 0.0071745, Val Loss: 0.0068617 +2024-11-11 14:59:03,018 Epoch 232/2000 +2024-11-11 14:59:18,849 Current Learning Rate: 0.0006184666 +2024-11-11 14:59:18,850 Train Loss: 0.0071195, Val Loss: 0.0066977 +2024-11-11 14:59:18,850 Epoch 233/2000 +2024-11-11 14:59:35,116 Current Learning Rate: 0.0006568424 +2024-11-11 14:59:35,117 Train Loss: 0.0071271, Val Loss: 0.0066595 +2024-11-11 14:59:35,117 Epoch 234/2000 +2024-11-11 14:59:50,317 Current Learning Rate: 0.0006962899 +2024-11-11 14:59:51,113 Train Loss: 0.0071020, Val Loss: 0.0066361 +2024-11-11 14:59:51,113 Epoch 235/2000 +2024-11-11 15:00:07,488 Current Learning Rate: 0.0007367992 +2024-11-11 15:00:07,489 Train Loss: 0.0070847, Val Loss: 0.0066725 +2024-11-11 15:00:07,489 Epoch 236/2000 +2024-11-11 15:00:23,002 Current Learning Rate: 0.0007783604 +2024-11-11 15:00:23,791 Train Loss: 0.0070665, Val Loss: 0.0065985 +2024-11-11 15:00:23,792 Epoch 237/2000 +2024-11-11 15:00:38,908 Current Learning Rate: 0.0008209632 +2024-11-11 15:00:38,908 Train Loss: 0.0072264, Val Loss: 0.0066265 +2024-11-11 15:00:38,909 Epoch 238/2000 +2024-11-11 15:00:54,586 Current Learning Rate: 0.0008645971 +2024-11-11 15:00:54,587 Train Loss: 0.0072261, Val Loss: 0.0066852 +2024-11-11 15:00:54,587 Epoch 239/2000 +2024-11-11 15:01:10,337 Current Learning Rate: 0.0009092514 +2024-11-11 15:01:11,348 Train Loss: 0.0070137, Val Loss: 0.0065943 +2024-11-11 15:01:11,349 Epoch 240/2000 +2024-11-11 15:01:27,526 Current Learning Rate: 0.0009549150 +2024-11-11 15:01:27,527 Train Loss: 0.0071517, Val Loss: 0.0066673 +2024-11-11 15:01:27,528 Epoch 241/2000 +2024-11-11 15:01:43,314 Current Learning Rate: 0.0010015767 +2024-11-11 15:01:43,315 Train Loss: 0.0071173, Val Loss: 0.0066433 +2024-11-11 15:01:43,315 Epoch 242/2000 +2024-11-11 15:01:58,615 Current Learning Rate: 0.0010492249 +2024-11-11 15:01:59,421 Train Loss: 0.0070518, Val Loss: 0.0065926 +2024-11-11 15:01:59,421 Epoch 243/2000 +2024-11-11 15:02:14,143 Current Learning Rate: 0.0010978480 +2024-11-11 15:02:14,143 Train Loss: 0.0070750, Val Loss: 0.0068506 +2024-11-11 15:02:14,144 Epoch 244/2000 +2024-11-11 15:02:31,550 Current Learning Rate: 0.0011474338 +2024-11-11 15:02:31,551 Train Loss: 0.0070213, Val Loss: 0.0067021 +2024-11-11 15:02:31,551 Epoch 245/2000 +2024-11-11 15:02:48,857 Current Learning Rate: 0.0011979702 +2024-11-11 15:02:49,666 Train Loss: 0.0070417, Val Loss: 0.0065648 +2024-11-11 15:02:49,666 Epoch 246/2000 +2024-11-11 15:03:04,849 Current Learning Rate: 0.0012494447 +2024-11-11 15:03:04,850 Train Loss: 0.0071130, Val Loss: 0.0067087 +2024-11-11 15:03:04,851 Epoch 247/2000 +2024-11-11 15:03:20,549 Current Learning Rate: 0.0013018445 +2024-11-11 15:03:20,550 Train Loss: 0.0069857, Val Loss: 0.0067104 +2024-11-11 15:03:20,550 Epoch 248/2000 +2024-11-11 15:03:36,330 Current Learning Rate: 0.0013551569 +2024-11-11 15:03:36,331 Train Loss: 0.0074562, Val Loss: 0.0067111 +2024-11-11 15:03:36,331 Epoch 249/2000 +2024-11-11 15:03:52,210 Current Learning Rate: 0.0014093685 +2024-11-11 15:03:52,210 Train Loss: 0.0072090, Val Loss: 0.0068691 +2024-11-11 15:03:52,211 Epoch 250/2000 +2024-11-11 15:04:08,081 Current Learning Rate: 0.0014644661 +2024-11-11 15:04:08,947 Train Loss: 0.0070478, Val Loss: 0.0065327 +2024-11-11 15:04:08,947 Epoch 251/2000 +2024-11-11 15:04:24,339 Current Learning Rate: 0.0015204360 +2024-11-11 15:04:24,340 Train Loss: 0.0070886, Val Loss: 0.0065896 +2024-11-11 15:04:24,340 Epoch 252/2000 +2024-11-11 15:04:40,240 Current Learning Rate: 0.0015772645 +2024-11-11 15:04:40,240 Train Loss: 0.0071351, Val Loss: 0.0065516 +2024-11-11 15:04:40,240 Epoch 253/2000 +2024-11-11 15:04:56,623 Current Learning Rate: 0.0016349374 +2024-11-11 15:04:57,379 Train Loss: 0.0068435, Val Loss: 0.0064097 +2024-11-11 15:04:57,379 Epoch 254/2000 +2024-11-11 15:05:12,359 Current Learning Rate: 0.0016934407 +2024-11-11 15:05:12,360 Train Loss: 0.0068755, Val Loss: 0.0077757 +2024-11-11 15:05:12,360 Epoch 255/2000 +2024-11-11 15:05:28,933 Current Learning Rate: 0.0017527598 +2024-11-11 15:05:28,933 Train Loss: 0.0070848, Val Loss: 0.0064744 +2024-11-11 15:05:28,934 Epoch 256/2000 +2024-11-11 15:05:43,919 Current Learning Rate: 0.0018128801 +2024-11-11 15:05:43,920 Train Loss: 0.0069190, Val Loss: 0.0065420 +2024-11-11 15:05:43,920 Epoch 257/2000 +2024-11-11 15:05:59,080 Current Learning Rate: 0.0018737867 +2024-11-11 15:05:59,781 Train Loss: 0.0067664, Val Loss: 0.0063515 +2024-11-11 15:05:59,781 Epoch 258/2000 +2024-11-11 15:06:15,350 Current Learning Rate: 0.0019354647 +2024-11-11 15:06:16,171 Train Loss: 0.0069395, Val Loss: 0.0062760 +2024-11-11 15:06:16,172 Epoch 259/2000 +2024-11-11 15:06:32,221 Current Learning Rate: 0.0019978989 +2024-11-11 15:06:32,221 Train Loss: 0.0067314, Val Loss: 0.0064107 +2024-11-11 15:06:32,222 Epoch 260/2000 +2024-11-11 15:06:49,390 Current Learning Rate: 0.0020610737 +2024-11-11 15:06:49,391 Train Loss: 0.0069326, Val Loss: 0.0065176 +2024-11-11 15:06:49,391 Epoch 261/2000 +2024-11-11 15:07:05,060 Current Learning Rate: 0.0021249737 +2024-11-11 15:07:05,060 Train Loss: 0.0069830, Val Loss: 0.0064641 +2024-11-11 15:07:05,061 Epoch 262/2000 +2024-11-11 15:07:20,826 Current Learning Rate: 0.0021895831 +2024-11-11 15:07:21,915 Train Loss: 0.0068408, Val Loss: 0.0062663 +2024-11-11 15:07:21,915 Epoch 263/2000 +2024-11-11 15:07:38,287 Current Learning Rate: 0.0022548859 +2024-11-11 15:07:38,288 Train Loss: 0.0067675, Val Loss: 0.0064039 +2024-11-11 15:07:38,288 Epoch 264/2000 +2024-11-11 15:07:54,780 Current Learning Rate: 0.0023208660 +2024-11-11 15:07:55,593 Train Loss: 0.0069052, Val Loss: 0.0062365 +2024-11-11 15:07:55,593 Epoch 265/2000 +2024-11-11 15:08:10,051 Current Learning Rate: 0.0023875072 +2024-11-11 15:08:10,927 Train Loss: 0.0067346, Val Loss: 0.0061322 +2024-11-11 15:08:10,927 Epoch 266/2000 +2024-11-11 15:08:26,223 Current Learning Rate: 0.0024547929 +2024-11-11 15:08:26,224 Train Loss: 0.0069941, Val Loss: 0.0063703 +2024-11-11 15:08:26,224 Epoch 267/2000 +2024-11-11 15:08:41,711 Current Learning Rate: 0.0025227067 +2024-11-11 15:08:41,712 Train Loss: 0.0069256, Val Loss: 0.0064368 +2024-11-11 15:08:41,712 Epoch 268/2000 +2024-11-11 15:08:58,433 Current Learning Rate: 0.0025912316 +2024-11-11 15:08:58,434 Train Loss: 0.0069142, Val Loss: 0.0061911 +2024-11-11 15:08:58,434 Epoch 269/2000 +2024-11-11 15:09:14,540 Current Learning Rate: 0.0026603509 +2024-11-11 15:09:14,541 Train Loss: 0.0067452, Val Loss: 0.0063971 +2024-11-11 15:09:14,541 Epoch 270/2000 +2024-11-11 15:09:31,423 Current Learning Rate: 0.0027300475 +2024-11-11 15:09:31,423 Train Loss: 0.0066415, Val Loss: 0.0061838 +2024-11-11 15:09:31,423 Epoch 271/2000 +2024-11-11 15:09:46,756 Current Learning Rate: 0.0028003042 +2024-11-11 15:09:47,536 Train Loss: 0.0068366, Val Loss: 0.0059799 +2024-11-11 15:09:47,536 Epoch 272/2000 +2024-11-11 15:10:02,691 Current Learning Rate: 0.0028711035 +2024-11-11 15:10:02,692 Train Loss: 0.0064761, Val Loss: 0.0064109 +2024-11-11 15:10:02,692 Epoch 273/2000 +2024-11-11 15:10:18,359 Current Learning Rate: 0.0029424282 +2024-11-11 15:10:18,359 Train Loss: 0.0079866, Val Loss: 0.0064800 +2024-11-11 15:10:18,359 Epoch 274/2000 +2024-11-11 15:10:33,310 Current Learning Rate: 0.0030142605 +2024-11-11 15:10:34,040 Train Loss: 0.0064719, Val Loss: 0.0059216 +2024-11-11 15:10:34,040 Epoch 275/2000 +2024-11-11 15:10:48,702 Current Learning Rate: 0.0030865828 +2024-11-11 15:10:49,468 Train Loss: 0.0063961, Val Loss: 0.0059214 +2024-11-11 15:10:49,469 Epoch 276/2000 +2024-11-11 15:11:03,948 Current Learning Rate: 0.0031593772 +2024-11-11 15:11:03,949 Train Loss: 0.0064374, Val Loss: 0.0063072 +2024-11-11 15:11:03,949 Epoch 277/2000 +2024-11-11 15:11:20,020 Current Learning Rate: 0.0032326258 +2024-11-11 15:11:20,020 Train Loss: 0.0063913, Val Loss: 0.0059689 +2024-11-11 15:11:20,020 Epoch 278/2000 +2024-11-11 15:11:35,884 Current Learning Rate: 0.0033063104 +2024-11-11 15:11:36,678 Train Loss: 0.0063676, Val Loss: 0.0058734 +2024-11-11 15:11:36,678 Epoch 279/2000 +2024-11-11 15:11:51,198 Current Learning Rate: 0.0033804129 +2024-11-11 15:11:51,199 Train Loss: 0.0070263, Val Loss: 0.0060165 +2024-11-11 15:11:51,199 Epoch 280/2000 +2024-11-11 15:12:06,757 Current Learning Rate: 0.0034549150 +2024-11-11 15:12:07,563 Train Loss: 0.0064468, Val Loss: 0.0058134 +2024-11-11 15:12:07,563 Epoch 281/2000 +2024-11-11 15:12:22,441 Current Learning Rate: 0.0035297984 +2024-11-11 15:12:22,442 Train Loss: 0.0063803, Val Loss: 0.0059726 +2024-11-11 15:12:22,442 Epoch 282/2000 +2024-11-11 15:12:38,403 Current Learning Rate: 0.0036050445 +2024-11-11 15:12:38,404 Train Loss: 0.0065073, Val Loss: 0.0061817 +2024-11-11 15:12:38,404 Epoch 283/2000 +2024-11-11 15:12:54,608 Current Learning Rate: 0.0036806348 +2024-11-11 15:12:54,608 Train Loss: 0.0064057, Val Loss: 0.0058602 +2024-11-11 15:12:54,609 Epoch 284/2000 +2024-11-11 15:13:09,226 Current Learning Rate: 0.0037565506 +2024-11-11 15:13:10,089 Train Loss: 0.0066021, Val Loss: 0.0057578 +2024-11-11 15:13:10,089 Epoch 285/2000 +2024-11-11 15:13:24,732 Current Learning Rate: 0.0038327732 +2024-11-11 15:13:25,544 Train Loss: 0.0062451, Val Loss: 0.0056805 +2024-11-11 15:13:25,544 Epoch 286/2000 +2024-11-11 15:13:40,205 Current Learning Rate: 0.0039092838 +2024-11-11 15:13:40,983 Train Loss: 0.0062877, Val Loss: 0.0055990 +2024-11-11 15:13:40,984 Epoch 287/2000 +2024-11-11 15:13:55,568 Current Learning Rate: 0.0039860635 +2024-11-11 15:13:55,569 Train Loss: 0.0062811, Val Loss: 0.0060695 +2024-11-11 15:13:55,569 Epoch 288/2000 +2024-11-11 15:14:11,163 Current Learning Rate: 0.0040630934 +2024-11-11 15:14:11,164 Train Loss: 0.0060915, Val Loss: 0.0056859 +2024-11-11 15:14:11,164 Epoch 289/2000 +2024-11-11 15:14:27,326 Current Learning Rate: 0.0041403545 +2024-11-11 15:14:27,327 Train Loss: 0.0065595, Val Loss: 0.0059594 +2024-11-11 15:14:27,327 Epoch 290/2000 +2024-11-11 15:14:43,423 Current Learning Rate: 0.0042178277 +2024-11-11 15:14:43,423 Train Loss: 0.0061185, Val Loss: 0.0057626 +2024-11-11 15:14:43,423 Epoch 291/2000 +2024-11-11 15:14:58,230 Current Learning Rate: 0.0042954938 +2024-11-11 15:14:58,230 Train Loss: 0.0063215, Val Loss: 0.0056026 +2024-11-11 15:14:58,230 Epoch 292/2000 +2024-11-11 15:15:14,010 Current Learning Rate: 0.0043733338 +2024-11-11 15:15:14,831 Train Loss: 0.0060687, Val Loss: 0.0055891 +2024-11-11 15:15:14,832 Epoch 293/2000 +2024-11-11 15:15:29,437 Current Learning Rate: 0.0044513284 +2024-11-11 15:15:29,438 Train Loss: 0.0068635, Val Loss: 0.0066914 +2024-11-11 15:15:29,438 Epoch 294/2000 +2024-11-11 15:15:45,421 Current Learning Rate: 0.0045294584 +2024-11-11 15:15:45,422 Train Loss: 0.0064227, Val Loss: 0.0056219 +2024-11-11 15:15:45,422 Epoch 295/2000 +2024-11-11 15:16:01,130 Current Learning Rate: 0.0046077045 +2024-11-11 15:16:01,883 Train Loss: 0.0058091, Val Loss: 0.0054769 +2024-11-11 15:16:01,883 Epoch 296/2000 +2024-11-11 15:16:17,237 Current Learning Rate: 0.0046860474 +2024-11-11 15:16:18,313 Train Loss: 0.0060037, Val Loss: 0.0054160 +2024-11-11 15:16:18,313 Epoch 297/2000 +2024-11-11 15:16:34,185 Current Learning Rate: 0.0047644677 +2024-11-11 15:16:34,186 Train Loss: 0.0059141, Val Loss: 0.0054338 +2024-11-11 15:16:34,187 Epoch 298/2000 +2024-11-11 15:16:48,856 Current Learning Rate: 0.0048429462 +2024-11-11 15:16:48,857 Train Loss: 0.0058849, Val Loss: 0.0055657 +2024-11-11 15:16:48,857 Epoch 299/2000 +2024-11-11 15:17:04,508 Current Learning Rate: 0.0049214634 +2024-11-11 15:17:04,509 Train Loss: 0.0061243, Val Loss: 0.0054230 +2024-11-11 15:17:04,509 Epoch 300/2000 +2024-11-11 15:17:19,857 Current Learning Rate: 0.0050000000 +2024-11-11 15:17:19,858 Train Loss: 0.0058888, Val Loss: 0.0054293 +2024-11-11 15:17:19,858 Epoch 301/2000 +2024-11-11 15:17:35,862 Current Learning Rate: 0.0050785366 +2024-11-11 15:17:35,863 Train Loss: 0.0057903, Val Loss: 0.0056493 +2024-11-11 15:17:35,863 Epoch 302/2000 +2024-11-11 15:17:51,352 Current Learning Rate: 0.0051570538 +2024-11-11 15:17:52,087 Train Loss: 0.0056770, Val Loss: 0.0053373 +2024-11-11 15:17:52,088 Epoch 303/2000 +2024-11-11 15:18:06,363 Current Learning Rate: 0.0052355323 +2024-11-11 15:18:06,364 Train Loss: 0.0060834, Val Loss: 0.0056789 +2024-11-11 15:18:06,364 Epoch 304/2000 +2024-11-11 15:18:21,697 Current Learning Rate: 0.0053139526 +2024-11-11 15:18:21,698 Train Loss: 0.0057881, Val Loss: 0.0055695 +2024-11-11 15:18:21,698 Epoch 305/2000 +2024-11-11 15:18:37,500 Current Learning Rate: 0.0053922955 +2024-11-11 15:18:38,558 Train Loss: 0.0057892, Val Loss: 0.0052579 +2024-11-11 15:18:38,558 Epoch 306/2000 +2024-11-11 15:18:54,840 Current Learning Rate: 0.0054705416 +2024-11-11 15:18:54,841 Train Loss: 0.0056334, Val Loss: 0.0060894 +2024-11-11 15:18:54,841 Epoch 307/2000 +2024-11-11 15:19:11,104 Current Learning Rate: 0.0055486716 +2024-11-11 15:19:11,105 Train Loss: 0.0059123, Val Loss: 0.0059840 +2024-11-11 15:19:11,105 Epoch 308/2000 +2024-11-11 15:19:26,922 Current Learning Rate: 0.0056266662 +2024-11-11 15:19:26,922 Train Loss: 0.0056471, Val Loss: 0.0054598 +2024-11-11 15:19:26,923 Epoch 309/2000 +2024-11-11 15:19:41,710 Current Learning Rate: 0.0057045062 +2024-11-11 15:19:41,710 Train Loss: 0.0057048, Val Loss: 0.0055944 +2024-11-11 15:19:41,710 Epoch 310/2000 +2024-11-11 15:19:58,069 Current Learning Rate: 0.0057821723 +2024-11-11 15:19:58,070 Train Loss: 0.0056177, Val Loss: 0.0061200 +2024-11-11 15:19:58,070 Epoch 311/2000 +2024-11-11 15:20:13,596 Current Learning Rate: 0.0058596455 +2024-11-11 15:20:14,350 Train Loss: 0.0056118, Val Loss: 0.0051161 +2024-11-11 15:20:14,350 Epoch 312/2000 +2024-11-11 15:20:29,284 Current Learning Rate: 0.0059369066 +2024-11-11 15:20:29,285 Train Loss: 0.0054531, Val Loss: 0.0070531 +2024-11-11 15:20:29,286 Epoch 313/2000 +2024-11-11 15:20:44,765 Current Learning Rate: 0.0060139365 +2024-11-11 15:20:44,765 Train Loss: 0.0058016, Val Loss: 0.0055967 +2024-11-11 15:20:44,765 Epoch 314/2000 +2024-11-11 15:21:00,523 Current Learning Rate: 0.0060907162 +2024-11-11 15:21:01,227 Train Loss: 0.0054130, Val Loss: 0.0049143 +2024-11-11 15:21:01,227 Epoch 315/2000 +2024-11-11 15:21:15,949 Current Learning Rate: 0.0061672268 +2024-11-11 15:21:15,949 Train Loss: 0.0056265, Val Loss: 0.0052043 +2024-11-11 15:21:15,949 Epoch 316/2000 +2024-11-11 15:21:31,215 Current Learning Rate: 0.0062434494 +2024-11-11 15:21:31,215 Train Loss: 0.0054177, Val Loss: 0.0049175 +2024-11-11 15:21:31,215 Epoch 317/2000 +2024-11-11 15:21:47,057 Current Learning Rate: 0.0063193652 +2024-11-11 15:21:47,060 Train Loss: 0.0052038, Val Loss: 0.0055301 +2024-11-11 15:21:47,066 Epoch 318/2000 +2024-11-11 15:22:03,473 Current Learning Rate: 0.0063949555 +2024-11-11 15:22:03,474 Train Loss: 0.0052530, Val Loss: 0.0049315 +2024-11-11 15:22:03,474 Epoch 319/2000 +2024-11-11 15:22:19,409 Current Learning Rate: 0.0064702016 +2024-11-11 15:22:19,409 Train Loss: 0.0053585, Val Loss: 0.0050218 +2024-11-11 15:22:19,409 Epoch 320/2000 +2024-11-11 15:22:35,355 Current Learning Rate: 0.0065450850 +2024-11-11 15:22:35,356 Train Loss: 0.0053360, Val Loss: 0.0052287 +2024-11-11 15:22:35,357 Epoch 321/2000 +2024-11-11 15:22:52,864 Current Learning Rate: 0.0066195871 +2024-11-11 15:22:53,914 Train Loss: 0.0052233, Val Loss: 0.0048462 +2024-11-11 15:22:53,915 Epoch 322/2000 +2024-11-11 15:23:10,018 Current Learning Rate: 0.0066936896 +2024-11-11 15:23:10,019 Train Loss: 0.0053973, Val Loss: 0.0054552 +2024-11-11 15:23:10,019 Epoch 323/2000 +2024-11-11 15:23:25,870 Current Learning Rate: 0.0067673742 +2024-11-11 15:23:26,659 Train Loss: 0.0050311, Val Loss: 0.0045520 +2024-11-11 15:23:26,660 Epoch 324/2000 +2024-11-11 15:23:42,273 Current Learning Rate: 0.0068406228 +2024-11-11 15:23:42,274 Train Loss: 0.0051359, Val Loss: 0.0046777 +2024-11-11 15:23:42,275 Epoch 325/2000 +2024-11-11 15:23:58,454 Current Learning Rate: 0.0069134172 +2024-11-11 15:23:58,454 Train Loss: 0.0051051, Val Loss: 0.0048157 +2024-11-11 15:23:58,454 Epoch 326/2000 +2024-11-11 15:24:13,276 Current Learning Rate: 0.0069857395 +2024-11-11 15:24:13,935 Train Loss: 0.0048852, Val Loss: 0.0045308 +2024-11-11 15:24:13,935 Epoch 327/2000 +2024-11-11 15:24:29,332 Current Learning Rate: 0.0070575718 +2024-11-11 15:24:30,019 Train Loss: 0.0048601, Val Loss: 0.0044837 +2024-11-11 15:24:30,019 Epoch 328/2000 +2024-11-11 15:24:44,791 Current Learning Rate: 0.0071288965 +2024-11-11 15:24:44,791 Train Loss: 0.0047553, Val Loss: 0.0045518 +2024-11-11 15:24:44,791 Epoch 329/2000 +2024-11-11 15:25:01,267 Current Learning Rate: 0.0071996958 +2024-11-11 15:25:03,762 Train Loss: 0.0048570, Val Loss: 0.0044149 +2024-11-11 15:25:03,762 Epoch 330/2000 +2024-11-11 15:25:19,017 Current Learning Rate: 0.0072699525 +2024-11-11 15:25:19,018 Train Loss: 0.0048096, Val Loss: 0.0045326 +2024-11-11 15:25:19,018 Epoch 331/2000 +2024-11-11 15:25:35,225 Current Learning Rate: 0.0073396491 +2024-11-11 15:25:35,230 Train Loss: 0.0048643, Val Loss: 0.0046675 +2024-11-11 15:25:35,231 Epoch 332/2000 +2024-11-11 15:25:51,301 Current Learning Rate: 0.0074087684 +2024-11-11 15:25:52,133 Train Loss: 0.0046107, Val Loss: 0.0042613 +2024-11-11 15:25:52,134 Epoch 333/2000 +2024-11-11 15:26:07,117 Current Learning Rate: 0.0074772933 +2024-11-11 15:26:07,118 Train Loss: 0.0049250, Val Loss: 0.0052920 +2024-11-11 15:26:07,118 Epoch 334/2000 +2024-11-11 15:26:22,818 Current Learning Rate: 0.0075452071 +2024-11-11 15:26:22,819 Train Loss: 0.0047885, Val Loss: 0.0043423 +2024-11-11 15:26:22,820 Epoch 335/2000 +2024-11-11 15:26:39,199 Current Learning Rate: 0.0076124928 +2024-11-11 15:26:39,200 Train Loss: 0.0046367, Val Loss: 0.0043337 +2024-11-11 15:26:39,200 Epoch 336/2000 +2024-11-11 15:26:54,778 Current Learning Rate: 0.0076791340 +2024-11-11 15:26:55,840 Train Loss: 0.0043341, Val Loss: 0.0041157 +2024-11-11 15:26:55,840 Epoch 337/2000 +2024-11-11 15:27:11,824 Current Learning Rate: 0.0077451141 +2024-11-11 15:27:11,825 Train Loss: 0.0045052, Val Loss: 0.0042326 +2024-11-11 15:27:11,826 Epoch 338/2000 +2024-11-11 15:27:27,594 Current Learning Rate: 0.0078104169 +2024-11-11 15:27:28,597 Train Loss: 0.0045867, Val Loss: 0.0040947 +2024-11-11 15:27:28,597 Epoch 339/2000 +2024-11-11 15:27:43,955 Current Learning Rate: 0.0078750263 +2024-11-11 15:27:43,956 Train Loss: 0.0042196, Val Loss: 0.0041421 +2024-11-11 15:27:43,956 Epoch 340/2000 +2024-11-11 15:27:59,980 Current Learning Rate: 0.0079389263 +2024-11-11 15:28:00,772 Train Loss: 0.0041555, Val Loss: 0.0039471 +2024-11-11 15:28:00,773 Epoch 341/2000 +2024-11-11 15:28:15,631 Current Learning Rate: 0.0080021011 +2024-11-11 15:28:15,631 Train Loss: 0.0041709, Val Loss: 0.0042655 +2024-11-11 15:28:15,631 Epoch 342/2000 +2024-11-11 15:28:31,013 Current Learning Rate: 0.0080645353 +2024-11-11 15:28:31,014 Train Loss: 0.0045411, Val Loss: 0.0039864 +2024-11-11 15:28:31,014 Epoch 343/2000 +2024-11-11 15:28:46,956 Current Learning Rate: 0.0081262133 +2024-11-11 15:28:46,956 Train Loss: 0.0043177, Val Loss: 0.0039841 +2024-11-11 15:28:46,956 Epoch 344/2000 +2024-11-11 15:29:02,963 Current Learning Rate: 0.0081871199 +2024-11-11 15:29:02,964 Train Loss: 0.0041448, Val Loss: 0.0041350 +2024-11-11 15:29:02,964 Epoch 345/2000 +2024-11-11 15:29:19,665 Current Learning Rate: 0.0082472402 +2024-11-11 15:29:20,507 Train Loss: 0.0041028, Val Loss: 0.0038796 +2024-11-11 15:29:20,508 Epoch 346/2000 +2024-11-11 15:29:36,213 Current Learning Rate: 0.0083065593 +2024-11-11 15:29:36,213 Train Loss: 0.0041301, Val Loss: 0.0040616 +2024-11-11 15:29:36,214 Epoch 347/2000 +2024-11-11 15:29:52,477 Current Learning Rate: 0.0083650626 +2024-11-11 15:29:53,221 Train Loss: 0.0041402, Val Loss: 0.0038527 +2024-11-11 15:29:53,221 Epoch 348/2000 +2024-11-11 15:30:07,524 Current Learning Rate: 0.0084227355 +2024-11-11 15:30:07,524 Train Loss: 0.0040083, Val Loss: 0.0039554 +2024-11-11 15:30:07,525 Epoch 349/2000 +2024-11-11 15:30:23,061 Current Learning Rate: 0.0084795640 +2024-11-11 15:30:23,062 Train Loss: 0.0041215, Val Loss: 0.0038532 +2024-11-11 15:30:23,062 Epoch 350/2000 +2024-11-11 15:30:40,449 Current Learning Rate: 0.0085355339 +2024-11-11 15:30:40,450 Train Loss: 0.0039445, Val Loss: 0.0039694 +2024-11-11 15:30:40,450 Epoch 351/2000 +2024-11-11 15:30:56,072 Current Learning Rate: 0.0085906315 +2024-11-11 15:30:56,872 Train Loss: 0.0041709, Val Loss: 0.0038109 +2024-11-11 15:30:56,872 Epoch 352/2000 +2024-11-11 15:31:12,185 Current Learning Rate: 0.0086448431 +2024-11-11 15:31:12,994 Train Loss: 0.0038993, Val Loss: 0.0037419 +2024-11-11 15:31:12,995 Epoch 353/2000 +2024-11-11 15:31:28,227 Current Learning Rate: 0.0086981555 +2024-11-11 15:31:28,228 Train Loss: 0.0042222, Val Loss: 0.0046172 +2024-11-11 15:31:28,228 Epoch 354/2000 +2024-11-11 15:31:45,014 Current Learning Rate: 0.0087505553 +2024-11-11 15:31:45,015 Train Loss: 0.0041261, Val Loss: 0.0038533 +2024-11-11 15:31:45,015 Epoch 355/2000 +2024-11-11 15:32:00,327 Current Learning Rate: 0.0088020298 +2024-11-11 15:32:01,366 Train Loss: 0.0037649, Val Loss: 0.0036230 +2024-11-11 15:32:01,367 Epoch 356/2000 +2024-11-11 15:32:16,636 Current Learning Rate: 0.0088525662 +2024-11-11 15:32:16,637 Train Loss: 0.0037887, Val Loss: 0.0039364 +2024-11-11 15:32:16,637 Epoch 357/2000 +2024-11-11 15:32:32,839 Current Learning Rate: 0.0089021520 +2024-11-11 15:32:33,660 Train Loss: 0.0037912, Val Loss: 0.0035895 +2024-11-11 15:32:33,660 Epoch 358/2000 +2024-11-11 15:32:48,544 Current Learning Rate: 0.0089507751 +2024-11-11 15:32:49,341 Train Loss: 0.0037223, Val Loss: 0.0034912 +2024-11-11 15:32:49,342 Epoch 359/2000 +2024-11-11 15:33:04,195 Current Learning Rate: 0.0089984233 +2024-11-11 15:33:05,024 Train Loss: 0.0037459, Val Loss: 0.0034155 +2024-11-11 15:33:05,025 Epoch 360/2000 +2024-11-11 15:33:19,243 Current Learning Rate: 0.0090450850 +2024-11-11 15:33:19,243 Train Loss: 0.0038109, Val Loss: 0.0038005 +2024-11-11 15:33:19,244 Epoch 361/2000 +2024-11-11 15:33:35,441 Current Learning Rate: 0.0090907486 +2024-11-11 15:33:35,442 Train Loss: 0.0037431, Val Loss: 0.0034637 +2024-11-11 15:33:35,442 Epoch 362/2000 +2024-11-11 15:33:52,118 Current Learning Rate: 0.0091354029 +2024-11-11 15:33:52,119 Train Loss: 0.0036233, Val Loss: 0.0037645 +2024-11-11 15:33:52,119 Epoch 363/2000 +2024-11-11 15:34:08,131 Current Learning Rate: 0.0091790368 +2024-11-11 15:34:08,132 Train Loss: 0.0035652, Val Loss: 0.0034323 +2024-11-11 15:34:08,132 Epoch 364/2000 +2024-11-11 15:34:24,458 Current Learning Rate: 0.0092216396 +2024-11-11 15:34:25,437 Train Loss: 0.0034464, Val Loss: 0.0032751 +2024-11-11 15:34:25,438 Epoch 365/2000 +2024-11-11 15:34:40,604 Current Learning Rate: 0.0092632008 +2024-11-11 15:34:40,606 Train Loss: 0.0036464, Val Loss: 0.0034118 +2024-11-11 15:34:40,606 Epoch 366/2000 +2024-11-11 15:34:56,395 Current Learning Rate: 0.0093037101 +2024-11-11 15:34:56,395 Train Loss: 0.0034116, Val Loss: 0.0041605 +2024-11-11 15:34:56,396 Epoch 367/2000 +2024-11-11 15:35:12,169 Current Learning Rate: 0.0093431576 +2024-11-11 15:35:13,266 Train Loss: 0.0033963, Val Loss: 0.0032637 +2024-11-11 15:35:13,267 Epoch 368/2000 +2024-11-11 15:35:28,503 Current Learning Rate: 0.0093815334 +2024-11-11 15:35:28,504 Train Loss: 0.0034644, Val Loss: 0.0032756 +2024-11-11 15:35:28,504 Epoch 369/2000 +2024-11-11 15:35:44,770 Current Learning Rate: 0.0094188282 +2024-11-11 15:35:44,771 Train Loss: 0.0032996, Val Loss: 0.0033540 +2024-11-11 15:35:44,771 Epoch 370/2000 +2024-11-11 15:36:01,298 Current Learning Rate: 0.0094550326 +2024-11-11 15:36:01,298 Train Loss: 0.0033698, Val Loss: 0.0033233 +2024-11-11 15:36:01,298 Epoch 371/2000 +2024-11-11 15:36:16,099 Current Learning Rate: 0.0094901379 +2024-11-11 15:36:16,099 Train Loss: 0.0035610, Val Loss: 0.0034147 +2024-11-11 15:36:16,099 Epoch 372/2000 +2024-11-11 15:36:31,398 Current Learning Rate: 0.0095241353 +2024-11-11 15:36:32,187 Train Loss: 0.0035433, Val Loss: 0.0032449 +2024-11-11 15:36:32,187 Epoch 373/2000 +2024-11-11 15:36:48,188 Current Learning Rate: 0.0095570164 +2024-11-11 15:36:48,967 Train Loss: 0.0033498, Val Loss: 0.0032401 +2024-11-11 15:36:48,967 Epoch 374/2000 +2024-11-11 15:37:04,010 Current Learning Rate: 0.0095887731 +2024-11-11 15:37:04,829 Train Loss: 0.0033526, Val Loss: 0.0030753 +2024-11-11 15:37:04,829 Epoch 375/2000 +2024-11-11 15:37:20,099 Current Learning Rate: 0.0096193977 +2024-11-11 15:37:20,100 Train Loss: 0.0032628, Val Loss: 0.0031094 +2024-11-11 15:37:20,100 Epoch 376/2000 +2024-11-11 15:37:35,793 Current Learning Rate: 0.0096488824 +2024-11-11 15:37:36,589 Train Loss: 0.0031918, Val Loss: 0.0030707 +2024-11-11 15:37:36,590 Epoch 377/2000 +2024-11-11 15:37:51,872 Current Learning Rate: 0.0096772202 +2024-11-11 15:37:51,873 Train Loss: 0.0030032, Val Loss: 0.0031136 +2024-11-11 15:37:51,873 Epoch 378/2000 +2024-11-11 15:38:07,306 Current Learning Rate: 0.0097044038 +2024-11-11 15:38:07,306 Train Loss: 0.0031911, Val Loss: 0.0030854 +2024-11-11 15:38:07,307 Epoch 379/2000 +2024-11-11 15:38:22,613 Current Learning Rate: 0.0097304268 +2024-11-11 15:38:22,613 Train Loss: 0.0033952, Val Loss: 0.0031591 +2024-11-11 15:38:22,614 Epoch 380/2000 +2024-11-11 15:38:38,057 Current Learning Rate: 0.0097552826 +2024-11-11 15:38:38,832 Train Loss: 0.0030825, Val Loss: 0.0030423 +2024-11-11 15:38:38,833 Epoch 381/2000 +2024-11-11 15:38:53,509 Current Learning Rate: 0.0097789651 +2024-11-11 15:38:53,510 Train Loss: 0.0032867, Val Loss: 0.0033022 +2024-11-11 15:38:53,510 Epoch 382/2000 +2024-11-11 15:39:09,111 Current Learning Rate: 0.0098014684 +2024-11-11 15:39:09,867 Train Loss: 0.0031884, Val Loss: 0.0029888 +2024-11-11 15:39:09,868 Epoch 383/2000 +2024-11-11 15:39:24,444 Current Learning Rate: 0.0098227871 +2024-11-11 15:39:24,445 Train Loss: 0.0031045, Val Loss: 0.0031278 +2024-11-11 15:39:24,445 Epoch 384/2000 +2024-11-11 15:39:40,576 Current Learning Rate: 0.0098429158 +2024-11-11 15:39:40,577 Train Loss: 0.0031473, Val Loss: 0.0030145 +2024-11-11 15:39:40,577 Epoch 385/2000 +2024-11-11 15:39:56,625 Current Learning Rate: 0.0098618496 +2024-11-11 15:39:57,505 Train Loss: 0.0030393, Val Loss: 0.0029522 +2024-11-11 15:39:57,505 Epoch 386/2000 +2024-11-11 15:40:13,526 Current Learning Rate: 0.0098795838 +2024-11-11 15:40:14,284 Train Loss: 0.0031005, Val Loss: 0.0029025 +2024-11-11 15:40:14,284 Epoch 387/2000 +2024-11-11 15:40:29,570 Current Learning Rate: 0.0098961141 +2024-11-11 15:40:29,571 Train Loss: 0.0029602, Val Loss: 0.0030039 +2024-11-11 15:40:29,571 Epoch 388/2000 +2024-11-11 15:40:46,762 Current Learning Rate: 0.0099114363 +2024-11-11 15:40:47,634 Train Loss: 0.0029413, Val Loss: 0.0028920 +2024-11-11 15:40:47,634 Epoch 389/2000 +2024-11-11 15:41:03,492 Current Learning Rate: 0.0099255466 +2024-11-11 15:41:03,493 Train Loss: 0.0029506, Val Loss: 0.0029535 +2024-11-11 15:41:03,493 Epoch 390/2000 +2024-11-11 15:41:19,282 Current Learning Rate: 0.0099384417 +2024-11-11 15:41:19,992 Train Loss: 0.0029540, Val Loss: 0.0028542 +2024-11-11 15:41:19,992 Epoch 391/2000 +2024-11-11 15:41:34,271 Current Learning Rate: 0.0099501183 +2024-11-11 15:41:35,082 Train Loss: 0.0027288, Val Loss: 0.0027654 +2024-11-11 15:41:35,082 Epoch 392/2000 +2024-11-11 15:41:50,359 Current Learning Rate: 0.0099605735 +2024-11-11 15:41:50,360 Train Loss: 0.0029981, Val Loss: 0.0027909 +2024-11-11 15:41:50,360 Epoch 393/2000 +2024-11-11 15:42:06,693 Current Learning Rate: 0.0099698048 +2024-11-11 15:42:06,693 Train Loss: 0.0028139, Val Loss: 0.0027993 +2024-11-11 15:42:06,693 Epoch 394/2000 +2024-11-11 15:42:22,873 Current Learning Rate: 0.0099778098 +2024-11-11 15:42:23,586 Train Loss: 0.0028165, Val Loss: 0.0027495 +2024-11-11 15:42:23,586 Epoch 395/2000 +2024-11-11 15:42:38,952 Current Learning Rate: 0.0099845867 +2024-11-11 15:42:38,953 Train Loss: 0.0027342, Val Loss: 0.0029531 +2024-11-11 15:42:38,953 Epoch 396/2000 +2024-11-11 15:42:55,143 Current Learning Rate: 0.0099901336 +2024-11-11 15:42:55,144 Train Loss: 0.0028151, Val Loss: 0.0028677 +2024-11-11 15:42:55,144 Epoch 397/2000 +2024-11-11 15:43:11,346 Current Learning Rate: 0.0099944494 +2024-11-11 15:43:11,347 Train Loss: 0.0028728, Val Loss: 0.0028663 +2024-11-11 15:43:11,347 Epoch 398/2000 +2024-11-11 15:43:27,473 Current Learning Rate: 0.0099975328 +2024-11-11 15:43:28,501 Train Loss: 0.0027439, Val Loss: 0.0026992 +2024-11-11 15:43:28,502 Epoch 399/2000 +2024-11-11 15:43:43,849 Current Learning Rate: 0.0099993832 +2024-11-11 15:43:43,850 Train Loss: 0.0029023, Val Loss: 0.0027593 +2024-11-11 15:43:43,850 Epoch 400/2000 +2024-11-11 15:44:00,709 Current Learning Rate: 0.0100000000 +2024-11-11 15:44:00,710 Train Loss: 0.0027569, Val Loss: 0.0027231 +2024-11-11 15:44:00,710 Epoch 401/2000 +2024-11-11 15:44:17,753 Current Learning Rate: 0.0099993832 +2024-11-11 15:44:18,592 Train Loss: 0.0027272, Val Loss: 0.0026476 +2024-11-11 15:44:18,593 Epoch 402/2000 +2024-11-11 15:44:33,953 Current Learning Rate: 0.0099975328 +2024-11-11 15:44:33,954 Train Loss: 0.0026140, Val Loss: 0.0026716 +2024-11-11 15:44:33,955 Epoch 403/2000 +2024-11-11 15:44:51,240 Current Learning Rate: 0.0099944494 +2024-11-11 15:44:52,250 Train Loss: 0.0025904, Val Loss: 0.0026408 +2024-11-11 15:44:52,251 Epoch 404/2000 +2024-11-11 15:45:07,899 Current Learning Rate: 0.0099901336 +2024-11-11 15:45:07,900 Train Loss: 0.0025123, Val Loss: 0.0027146 +2024-11-11 15:45:07,900 Epoch 405/2000 +2024-11-11 15:45:24,531 Current Learning Rate: 0.0099845867 +2024-11-11 15:45:24,532 Train Loss: 0.0027804, Val Loss: 0.0026874 +2024-11-11 15:45:24,532 Epoch 406/2000 +2024-11-11 15:45:39,425 Current Learning Rate: 0.0099778098 +2024-11-11 15:45:40,239 Train Loss: 0.0026509, Val Loss: 0.0026173 +2024-11-11 15:45:40,239 Epoch 407/2000 +2024-11-11 15:45:56,723 Current Learning Rate: 0.0099698048 +2024-11-11 15:45:57,423 Train Loss: 0.0026102, Val Loss: 0.0025745 +2024-11-11 15:45:57,423 Epoch 408/2000 +2024-11-11 15:46:11,774 Current Learning Rate: 0.0099605735 +2024-11-11 15:46:11,775 Train Loss: 0.0026633, Val Loss: 0.0026304 +2024-11-11 15:46:11,775 Epoch 409/2000 +2024-11-11 15:46:27,259 Current Learning Rate: 0.0099501183 +2024-11-11 15:46:28,016 Train Loss: 0.0027500, Val Loss: 0.0025415 +2024-11-11 15:46:28,017 Epoch 410/2000 +2024-11-11 15:46:42,588 Current Learning Rate: 0.0099384417 +2024-11-11 15:46:42,589 Train Loss: 0.0025426, Val Loss: 0.0025611 +2024-11-11 15:46:42,589 Epoch 411/2000 +2024-11-11 15:46:58,088 Current Learning Rate: 0.0099255466 +2024-11-11 15:46:58,089 Train Loss: 0.0025220, Val Loss: 0.0025420 +2024-11-11 15:46:58,089 Epoch 412/2000 +2024-11-11 15:47:13,169 Current Learning Rate: 0.0099114363 +2024-11-11 15:47:13,169 Train Loss: 0.0027022, Val Loss: 0.0026206 +2024-11-11 15:47:13,169 Epoch 413/2000 +2024-11-11 15:47:29,396 Current Learning Rate: 0.0098961141 +2024-11-11 15:47:30,431 Train Loss: 0.0024756, Val Loss: 0.0024991 +2024-11-11 15:47:30,432 Epoch 414/2000 +2024-11-11 15:47:46,902 Current Learning Rate: 0.0098795838 +2024-11-11 15:47:47,977 Train Loss: 0.0023801, Val Loss: 0.0024905 +2024-11-11 15:47:47,978 Epoch 415/2000 +2024-11-11 15:48:04,159 Current Learning Rate: 0.0098618496 +2024-11-11 15:48:04,162 Train Loss: 0.0023671, Val Loss: 0.0025034 +2024-11-11 15:48:04,162 Epoch 416/2000 +2024-11-11 15:48:19,640 Current Learning Rate: 0.0098429158 +2024-11-11 15:48:19,641 Train Loss: 0.0026003, Val Loss: 0.0025255 +2024-11-11 15:48:19,641 Epoch 417/2000 +2024-11-11 15:48:34,829 Current Learning Rate: 0.0098227871 +2024-11-11 15:48:35,666 Train Loss: 0.0024705, Val Loss: 0.0024665 +2024-11-11 15:48:35,666 Epoch 418/2000 +2024-11-11 15:48:50,252 Current Learning Rate: 0.0098014684 +2024-11-11 15:48:50,998 Train Loss: 0.0023990, Val Loss: 0.0024348 +2024-11-11 15:48:50,998 Epoch 419/2000 +2024-11-11 15:49:05,824 Current Learning Rate: 0.0097789651 +2024-11-11 15:49:06,610 Train Loss: 0.0024215, Val Loss: 0.0023730 +2024-11-11 15:49:06,610 Epoch 420/2000 +2024-11-11 15:49:21,374 Current Learning Rate: 0.0097552826 +2024-11-11 15:49:21,375 Train Loss: 0.0023986, Val Loss: 0.0025001 +2024-11-11 15:49:21,375 Epoch 421/2000 +2024-11-11 15:49:36,450 Current Learning Rate: 0.0097304268 +2024-11-11 15:49:36,450 Train Loss: 0.0024147, Val Loss: 0.0024293 +2024-11-11 15:49:36,451 Epoch 422/2000 +2024-11-11 15:49:52,287 Current Learning Rate: 0.0097044038 +2024-11-11 15:49:52,288 Train Loss: 0.0026629, Val Loss: 0.0026045 +2024-11-11 15:49:52,288 Epoch 423/2000 +2024-11-11 15:50:08,273 Current Learning Rate: 0.0096772202 +2024-11-11 15:50:08,274 Train Loss: 0.0024435, Val Loss: 0.0026035 +2024-11-11 15:50:08,274 Epoch 424/2000 +2024-11-11 15:50:24,311 Current Learning Rate: 0.0096488824 +2024-11-11 15:50:24,312 Train Loss: 0.0023706, Val Loss: 0.0023908 +2024-11-11 15:50:24,313 Epoch 425/2000 +2024-11-11 15:50:39,933 Current Learning Rate: 0.0096193977 +2024-11-11 15:50:39,934 Train Loss: 0.0023457, Val Loss: 0.0023849 +2024-11-11 15:50:39,934 Epoch 426/2000 +2024-11-11 15:50:55,838 Current Learning Rate: 0.0095887731 +2024-11-11 15:50:55,839 Train Loss: 0.0025702, Val Loss: 0.0023846 +2024-11-11 15:50:55,840 Epoch 427/2000 +2024-11-11 15:51:11,786 Current Learning Rate: 0.0095570164 +2024-11-11 15:51:12,601 Train Loss: 0.0022596, Val Loss: 0.0022983 +2024-11-11 15:51:12,601 Epoch 428/2000 +2024-11-11 15:51:27,757 Current Learning Rate: 0.0095241353 +2024-11-11 15:51:27,758 Train Loss: 0.0024092, Val Loss: 0.0023213 +2024-11-11 15:51:27,759 Epoch 429/2000 +2024-11-11 15:51:44,023 Current Learning Rate: 0.0094901379 +2024-11-11 15:51:44,024 Train Loss: 0.0023035, Val Loss: 0.0023651 +2024-11-11 15:51:44,024 Epoch 430/2000 +2024-11-11 15:51:59,926 Current Learning Rate: 0.0094550326 +2024-11-11 15:51:59,927 Train Loss: 0.0022316, Val Loss: 0.0023187 +2024-11-11 15:51:59,927 Epoch 431/2000 +2024-11-11 15:52:16,328 Current Learning Rate: 0.0094188282 +2024-11-11 15:52:17,345 Train Loss: 0.0022601, Val Loss: 0.0022593 +2024-11-11 15:52:17,345 Epoch 432/2000 +2024-11-11 15:52:33,331 Current Learning Rate: 0.0093815334 +2024-11-11 15:52:33,332 Train Loss: 0.0023575, Val Loss: 0.0023146 +2024-11-11 15:52:33,332 Epoch 433/2000 +2024-11-11 15:52:49,105 Current Learning Rate: 0.0093431576 +2024-11-11 15:52:49,106 Train Loss: 0.0022776, Val Loss: 0.0023683 +2024-11-11 15:52:49,107 Epoch 434/2000 +2024-11-11 15:53:04,540 Current Learning Rate: 0.0093037101 +2024-11-11 15:53:04,540 Train Loss: 0.0021982, Val Loss: 0.0023392 +2024-11-11 15:53:04,541 Epoch 435/2000 +2024-11-11 15:53:19,733 Current Learning Rate: 0.0092632008 +2024-11-11 15:53:19,734 Train Loss: 0.0022392, Val Loss: 0.0023363 +2024-11-11 15:53:19,734 Epoch 436/2000 +2024-11-11 15:53:35,784 Current Learning Rate: 0.0092216396 +2024-11-11 15:53:36,549 Train Loss: 0.0021519, Val Loss: 0.0022499 +2024-11-11 15:53:36,550 Epoch 437/2000 +2024-11-11 15:53:51,659 Current Learning Rate: 0.0091790368 +2024-11-11 15:53:52,652 Train Loss: 0.0021430, Val Loss: 0.0022377 +2024-11-11 15:53:52,652 Epoch 438/2000 +2024-11-11 15:54:08,774 Current Learning Rate: 0.0091354029 +2024-11-11 15:54:08,775 Train Loss: 0.0023445, Val Loss: 0.0023493 +2024-11-11 15:54:08,775 Epoch 439/2000 +2024-11-11 15:54:24,482 Current Learning Rate: 0.0090907486 +2024-11-11 15:54:25,521 Train Loss: 0.0023121, Val Loss: 0.0022053 +2024-11-11 15:54:25,521 Epoch 440/2000 +2024-11-11 15:54:41,306 Current Learning Rate: 0.0090450850 +2024-11-11 15:54:42,079 Train Loss: 0.0023536, Val Loss: 0.0021599 +2024-11-11 15:54:42,079 Epoch 441/2000 +2024-11-11 15:54:57,043 Current Learning Rate: 0.0089984233 +2024-11-11 15:54:57,956 Train Loss: 0.0019961, Val Loss: 0.0020880 +2024-11-11 15:54:57,957 Epoch 442/2000 +2024-11-11 15:55:13,785 Current Learning Rate: 0.0089507751 +2024-11-11 15:55:13,786 Train Loss: 0.0020018, Val Loss: 0.0021359 +2024-11-11 15:55:13,786 Epoch 443/2000 +2024-11-11 15:55:30,641 Current Learning Rate: 0.0089021520 +2024-11-11 15:55:30,642 Train Loss: 0.0023603, Val Loss: 0.0021651 +2024-11-11 15:55:30,643 Epoch 444/2000 +2024-11-11 15:55:45,897 Current Learning Rate: 0.0088525662 +2024-11-11 15:55:45,898 Train Loss: 0.0021467, Val Loss: 0.0021570 +2024-11-11 15:55:45,898 Epoch 445/2000 +2024-11-11 15:56:02,691 Current Learning Rate: 0.0088020298 +2024-11-11 15:56:02,692 Train Loss: 0.0021638, Val Loss: 0.0022031 +2024-11-11 15:56:02,692 Epoch 446/2000 +2024-11-11 15:56:18,638 Current Learning Rate: 0.0087505553 +2024-11-11 15:56:18,639 Train Loss: 0.0019990, Val Loss: 0.0021428 +2024-11-11 15:56:18,639 Epoch 447/2000 +2024-11-11 15:56:34,779 Current Learning Rate: 0.0086981555 +2024-11-11 15:56:34,780 Train Loss: 0.0021922, Val Loss: 0.0022111 +2024-11-11 15:56:34,780 Epoch 448/2000 +2024-11-11 15:56:51,500 Current Learning Rate: 0.0086448431 +2024-11-11 15:56:51,501 Train Loss: 0.0019944, Val Loss: 0.0020895 +2024-11-11 15:56:51,501 Epoch 449/2000 +2024-11-11 15:57:08,394 Current Learning Rate: 0.0085906315 +2024-11-11 15:57:08,395 Train Loss: 0.0020744, Val Loss: 0.0021320 +2024-11-11 15:57:08,395 Epoch 450/2000 +2024-11-11 15:57:24,634 Current Learning Rate: 0.0085355339 +2024-11-11 15:57:24,635 Train Loss: 0.0021858, Val Loss: 0.0022044 +2024-11-11 15:57:24,636 Epoch 451/2000 +2024-11-11 15:57:40,688 Current Learning Rate: 0.0084795640 +2024-11-11 15:57:41,618 Train Loss: 0.0021739, Val Loss: 0.0020811 +2024-11-11 15:57:41,618 Epoch 452/2000 +2024-11-11 15:57:57,554 Current Learning Rate: 0.0084227355 +2024-11-11 15:57:58,361 Train Loss: 0.0020215, Val Loss: 0.0020513 +2024-11-11 15:57:58,361 Epoch 453/2000 +2024-11-11 15:58:13,493 Current Learning Rate: 0.0083650626 +2024-11-11 15:58:13,494 Train Loss: 0.0021197, Val Loss: 0.0020636 +2024-11-11 15:58:13,494 Epoch 454/2000 +2024-11-11 15:58:29,027 Current Learning Rate: 0.0083065593 +2024-11-11 15:58:29,027 Train Loss: 0.0020417, Val Loss: 0.0020677 +2024-11-11 15:58:29,028 Epoch 455/2000 +2024-11-11 15:58:44,414 Current Learning Rate: 0.0082472402 +2024-11-11 15:58:44,415 Train Loss: 0.0020983, Val Loss: 0.0021948 +2024-11-11 15:58:44,415 Epoch 456/2000 +2024-11-11 15:59:00,094 Current Learning Rate: 0.0081871199 +2024-11-11 15:59:00,095 Train Loss: 0.0019864, Val Loss: 0.0020734 +2024-11-11 15:59:00,095 Epoch 457/2000 +2024-11-11 15:59:15,407 Current Learning Rate: 0.0081262133 +2024-11-11 15:59:15,407 Train Loss: 0.0020466, Val Loss: 0.0020746 +2024-11-11 15:59:15,407 Epoch 458/2000 +2024-11-11 15:59:30,863 Current Learning Rate: 0.0080645353 +2024-11-11 15:59:30,864 Train Loss: 0.0020444, Val Loss: 0.0021021 +2024-11-11 15:59:30,864 Epoch 459/2000 +2024-11-11 15:59:46,313 Current Learning Rate: 0.0080021011 +2024-11-11 15:59:46,313 Train Loss: 0.0021171, Val Loss: 0.0024517 +2024-11-11 15:59:46,313 Epoch 460/2000 +2024-11-11 16:00:01,747 Current Learning Rate: 0.0079389263 +2024-11-11 16:00:04,353 Train Loss: 0.0019752, Val Loss: 0.0020446 +2024-11-11 16:00:04,354 Epoch 461/2000 +2024-11-11 16:00:18,705 Current Learning Rate: 0.0078750263 +2024-11-11 16:00:19,504 Train Loss: 0.0020459, Val Loss: 0.0020055 +2024-11-11 16:00:19,505 Epoch 462/2000 +2024-11-11 16:00:34,124 Current Learning Rate: 0.0078104169 +2024-11-11 16:00:34,913 Train Loss: 0.0021494, Val Loss: 0.0020054 +2024-11-11 16:00:34,913 Epoch 463/2000 +2024-11-11 16:00:49,508 Current Learning Rate: 0.0077451141 +2024-11-11 16:00:50,383 Train Loss: 0.0018689, Val Loss: 0.0019831 +2024-11-11 16:00:50,383 Epoch 464/2000 +2024-11-11 16:01:05,465 Current Learning Rate: 0.0076791340 +2024-11-11 16:01:06,264 Train Loss: 0.0019285, Val Loss: 0.0019549 +2024-11-11 16:01:06,264 Epoch 465/2000 +2024-11-11 16:01:21,254 Current Learning Rate: 0.0076124928 +2024-11-11 16:01:21,254 Train Loss: 0.0019481, Val Loss: 0.0021419 +2024-11-11 16:01:21,254 Epoch 466/2000 +2024-11-11 16:01:36,962 Current Learning Rate: 0.0075452071 +2024-11-11 16:01:36,963 Train Loss: 0.0019473, Val Loss: 0.0019738 +2024-11-11 16:01:36,963 Epoch 467/2000 +2024-11-11 16:01:52,832 Current Learning Rate: 0.0074772933 +2024-11-11 16:01:52,833 Train Loss: 0.0017876, Val Loss: 0.0019702 +2024-11-11 16:01:52,833 Epoch 468/2000 +2024-11-11 16:02:09,078 Current Learning Rate: 0.0074087684 +2024-11-11 16:02:09,079 Train Loss: 0.0018833, Val Loss: 0.0020495 +2024-11-11 16:02:09,079 Epoch 469/2000 +2024-11-11 16:02:25,453 Current Learning Rate: 0.0073396491 +2024-11-11 16:02:26,538 Train Loss: 0.0019062, Val Loss: 0.0019353 +2024-11-11 16:02:26,539 Epoch 470/2000 +2024-11-11 16:02:42,158 Current Learning Rate: 0.0072699525 +2024-11-11 16:02:42,158 Train Loss: 0.0019746, Val Loss: 0.0020509 +2024-11-11 16:02:42,159 Epoch 471/2000 +2024-11-11 16:02:58,381 Current Learning Rate: 0.0071996958 +2024-11-11 16:02:59,207 Train Loss: 0.0021009, Val Loss: 0.0019330 +2024-11-11 16:02:59,208 Epoch 472/2000 +2024-11-11 16:03:14,271 Current Learning Rate: 0.0071288965 +2024-11-11 16:03:14,272 Train Loss: 0.0019569, Val Loss: 0.0019589 +2024-11-11 16:03:14,273 Epoch 473/2000 +2024-11-11 16:03:31,044 Current Learning Rate: 0.0070575718 +2024-11-11 16:03:31,044 Train Loss: 0.0019271, Val Loss: 0.0019746 +2024-11-11 16:03:31,045 Epoch 474/2000 +2024-11-11 16:03:46,930 Current Learning Rate: 0.0069857395 +2024-11-11 16:03:47,804 Train Loss: 0.0018458, Val Loss: 0.0019274 +2024-11-11 16:03:47,804 Epoch 475/2000 +2024-11-11 16:04:03,052 Current Learning Rate: 0.0069134172 +2024-11-11 16:04:04,410 Train Loss: 0.0018403, Val Loss: 0.0018845 +2024-11-11 16:04:04,410 Epoch 476/2000 +2024-11-11 16:04:19,964 Current Learning Rate: 0.0068406228 +2024-11-11 16:04:19,965 Train Loss: 0.0017707, Val Loss: 0.0018862 +2024-11-11 16:04:19,965 Epoch 477/2000 +2024-11-11 16:04:35,230 Current Learning Rate: 0.0067673742 +2024-11-11 16:04:36,004 Train Loss: 0.0017440, Val Loss: 0.0018785 +2024-11-11 16:04:36,004 Epoch 478/2000 +2024-11-11 16:04:51,096 Current Learning Rate: 0.0066936896 +2024-11-11 16:04:51,097 Train Loss: 0.0019223, Val Loss: 0.0019294 +2024-11-11 16:04:51,098 Epoch 479/2000 +2024-11-11 16:05:06,516 Current Learning Rate: 0.0066195871 +2024-11-11 16:05:07,498 Train Loss: 0.0018153, Val Loss: 0.0018524 +2024-11-11 16:05:07,498 Epoch 480/2000 +2024-11-11 16:05:23,821 Current Learning Rate: 0.0065450850 +2024-11-11 16:05:23,822 Train Loss: 0.0018126, Val Loss: 0.0018859 +2024-11-11 16:05:23,822 Epoch 481/2000 +2024-11-11 16:05:40,373 Current Learning Rate: 0.0064702016 +2024-11-11 16:05:40,373 Train Loss: 0.0017999, Val Loss: 0.0018556 +2024-11-11 16:05:40,373 Epoch 482/2000 +2024-11-11 16:05:56,273 Current Learning Rate: 0.0063949555 +2024-11-11 16:05:57,308 Train Loss: 0.0016655, Val Loss: 0.0018388 +2024-11-11 16:05:57,308 Epoch 483/2000 +2024-11-11 16:06:12,362 Current Learning Rate: 0.0063193652 +2024-11-11 16:06:12,363 Train Loss: 0.0019307, Val Loss: 0.0019162 +2024-11-11 16:06:12,363 Epoch 484/2000 +2024-11-11 16:06:27,663 Current Learning Rate: 0.0062434494 +2024-11-11 16:06:28,418 Train Loss: 0.0017061, Val Loss: 0.0018284 +2024-11-11 16:06:28,419 Epoch 485/2000 +2024-11-11 16:06:43,503 Current Learning Rate: 0.0061672268 +2024-11-11 16:06:43,504 Train Loss: 0.0017286, Val Loss: 0.0018455 +2024-11-11 16:06:43,504 Epoch 486/2000 +2024-11-11 16:06:59,204 Current Learning Rate: 0.0060907162 +2024-11-11 16:06:59,205 Train Loss: 0.0019324, Val Loss: 0.0019866 +2024-11-11 16:06:59,205 Epoch 487/2000 +2024-11-11 16:07:14,745 Current Learning Rate: 0.0060139365 +2024-11-11 16:07:15,568 Train Loss: 0.0017491, Val Loss: 0.0018149 +2024-11-11 16:07:15,568 Epoch 488/2000 +2024-11-11 16:07:29,955 Current Learning Rate: 0.0059369066 +2024-11-11 16:07:29,957 Train Loss: 0.0019003, Val Loss: 0.0019061 +2024-11-11 16:07:29,957 Epoch 489/2000 +2024-11-11 16:07:45,685 Current Learning Rate: 0.0058596455 +2024-11-11 16:07:45,685 Train Loss: 0.0018472, Val Loss: 0.0018604 +2024-11-11 16:07:45,685 Epoch 490/2000 +2024-11-11 16:08:01,170 Current Learning Rate: 0.0057821723 +2024-11-11 16:08:01,867 Train Loss: 0.0017701, Val Loss: 0.0017785 +2024-11-11 16:08:01,868 Epoch 491/2000 +2024-11-11 16:08:16,907 Current Learning Rate: 0.0057045062 +2024-11-11 16:08:16,908 Train Loss: 0.0018862, Val Loss: 0.0018500 +2024-11-11 16:08:16,908 Epoch 492/2000 +2024-11-11 16:08:32,207 Current Learning Rate: 0.0056266662 +2024-11-11 16:08:32,207 Train Loss: 0.0019619, Val Loss: 0.0018138 +2024-11-11 16:08:32,207 Epoch 493/2000 +2024-11-11 16:08:47,512 Current Learning Rate: 0.0055486716 +2024-11-11 16:08:47,513 Train Loss: 0.0018586, Val Loss: 0.0018043 +2024-11-11 16:08:47,513 Epoch 494/2000 +2024-11-11 16:09:03,379 Current Learning Rate: 0.0054705416 +2024-11-11 16:09:03,379 Train Loss: 0.0018664, Val Loss: 0.0018559 +2024-11-11 16:09:03,380 Epoch 495/2000 +2024-11-11 16:09:19,344 Current Learning Rate: 0.0053922955 +2024-11-11 16:09:20,380 Train Loss: 0.0019069, Val Loss: 0.0017716 +2024-11-11 16:09:20,380 Epoch 496/2000 +2024-11-11 16:09:36,318 Current Learning Rate: 0.0053139526 +2024-11-11 16:09:37,269 Train Loss: 0.0015875, Val Loss: 0.0017484 +2024-11-11 16:09:37,269 Epoch 497/2000 +2024-11-11 16:09:52,623 Current Learning Rate: 0.0052355323 +2024-11-11 16:09:52,624 Train Loss: 0.0017934, Val Loss: 0.0018129 +2024-11-11 16:09:52,624 Epoch 498/2000 +2024-11-11 16:10:08,125 Current Learning Rate: 0.0051570538 +2024-11-11 16:10:08,125 Train Loss: 0.0017281, Val Loss: 0.0017944 +2024-11-11 16:10:08,125 Epoch 499/2000 +2024-11-11 16:10:23,982 Current Learning Rate: 0.0050785366 +2024-11-11 16:10:23,982 Train Loss: 0.0017292, Val Loss: 0.0017568 +2024-11-11 16:10:23,983 Epoch 500/2000 +2024-11-11 16:10:40,212 Current Learning Rate: 0.0050000000 +2024-11-11 16:10:41,256 Train Loss: 0.0015728, Val Loss: 0.0017277 +2024-11-11 16:10:41,256 Epoch 501/2000 +2024-11-11 16:10:57,505 Current Learning Rate: 0.0049214634 +2024-11-11 16:10:57,506 Train Loss: 0.0016176, Val Loss: 0.0017323 +2024-11-11 16:10:57,507 Epoch 502/2000 +2024-11-11 16:11:13,288 Current Learning Rate: 0.0048429462 +2024-11-11 16:11:13,289 Train Loss: 0.0016708, Val Loss: 0.0017368 +2024-11-11 16:11:13,289 Epoch 503/2000 +2024-11-11 16:11:28,709 Current Learning Rate: 0.0047644677 +2024-11-11 16:11:29,408 Train Loss: 0.0017153, Val Loss: 0.0017276 +2024-11-11 16:11:29,408 Epoch 504/2000 +2024-11-11 16:11:43,653 Current Learning Rate: 0.0046860474 +2024-11-11 16:11:44,407 Train Loss: 0.0016124, Val Loss: 0.0017100 +2024-11-11 16:11:44,408 Epoch 505/2000 +2024-11-11 16:11:59,632 Current Learning Rate: 0.0046077045 +2024-11-11 16:12:00,691 Train Loss: 0.0015919, Val Loss: 0.0017059 +2024-11-11 16:12:00,692 Epoch 506/2000 +2024-11-11 16:12:15,848 Current Learning Rate: 0.0045294584 +2024-11-11 16:12:15,848 Train Loss: 0.0017435, Val Loss: 0.0017152 +2024-11-11 16:12:15,849 Epoch 507/2000 +2024-11-11 16:12:31,233 Current Learning Rate: 0.0044513284 +2024-11-11 16:12:31,233 Train Loss: 0.0015542, Val Loss: 0.0017474 +2024-11-11 16:12:31,233 Epoch 508/2000 +2024-11-11 16:12:47,634 Current Learning Rate: 0.0043733338 +2024-11-11 16:12:47,634 Train Loss: 0.0017828, Val Loss: 0.0017657 +2024-11-11 16:12:47,634 Epoch 509/2000 +2024-11-11 16:13:03,377 Current Learning Rate: 0.0042954938 +2024-11-11 16:13:05,795 Train Loss: 0.0016038, Val Loss: 0.0016854 +2024-11-11 16:13:05,795 Epoch 510/2000 +2024-11-11 16:13:20,775 Current Learning Rate: 0.0042178277 +2024-11-11 16:13:21,841 Train Loss: 0.0016788, Val Loss: 0.0016779 +2024-11-11 16:13:21,842 Epoch 511/2000 +2024-11-11 16:13:37,672 Current Learning Rate: 0.0041403545 +2024-11-11 16:13:38,760 Train Loss: 0.0016675, Val Loss: 0.0016651 +2024-11-11 16:13:38,760 Epoch 512/2000 +2024-11-11 16:13:54,901 Current Learning Rate: 0.0040630934 +2024-11-11 16:13:54,902 Train Loss: 0.0017742, Val Loss: 0.0016794 +2024-11-11 16:13:54,902 Epoch 513/2000 +2024-11-11 16:14:10,716 Current Learning Rate: 0.0039860635 +2024-11-11 16:14:11,598 Train Loss: 0.0015026, Val Loss: 0.0016478 +2024-11-11 16:14:11,598 Epoch 514/2000 +2024-11-11 16:14:26,196 Current Learning Rate: 0.0039092838 +2024-11-11 16:14:26,197 Train Loss: 0.0016885, Val Loss: 0.0017014 +2024-11-11 16:14:26,197 Epoch 515/2000 +2024-11-11 16:14:41,627 Current Learning Rate: 0.0038327732 +2024-11-11 16:14:41,627 Train Loss: 0.0017836, Val Loss: 0.0018343 +2024-11-11 16:14:41,627 Epoch 516/2000 +2024-11-11 16:14:57,435 Current Learning Rate: 0.0037565506 +2024-11-11 16:14:57,435 Train Loss: 0.0015580, Val Loss: 0.0016799 +2024-11-11 16:14:57,435 Epoch 517/2000 +2024-11-11 16:15:12,908 Current Learning Rate: 0.0036806348 +2024-11-11 16:15:12,908 Train Loss: 0.0015340, Val Loss: 0.0016495 +2024-11-11 16:15:12,908 Epoch 518/2000 +2024-11-11 16:15:28,391 Current Learning Rate: 0.0036050445 +2024-11-11 16:15:28,392 Train Loss: 0.0015838, Val Loss: 0.0016501 +2024-11-11 16:15:28,392 Epoch 519/2000 +2024-11-11 16:15:44,020 Current Learning Rate: 0.0035297984 +2024-11-11 16:15:44,787 Train Loss: 0.0015158, Val Loss: 0.0016460 +2024-11-11 16:15:44,788 Epoch 520/2000 +2024-11-11 16:15:59,636 Current Learning Rate: 0.0034549150 +2024-11-11 16:15:59,637 Train Loss: 0.0016864, Val Loss: 0.0016732 +2024-11-11 16:15:59,637 Epoch 521/2000 +2024-11-11 16:16:16,119 Current Learning Rate: 0.0033804129 +2024-11-11 16:16:16,119 Train Loss: 0.0015563, Val Loss: 0.0016919 +2024-11-11 16:16:16,120 Epoch 522/2000 +2024-11-11 16:16:31,722 Current Learning Rate: 0.0033063104 +2024-11-11 16:16:31,722 Train Loss: 0.0015584, Val Loss: 0.0016514 +2024-11-11 16:16:31,722 Epoch 523/2000 +2024-11-11 16:16:48,232 Current Learning Rate: 0.0032326258 +2024-11-11 16:16:48,233 Train Loss: 0.0014985, Val Loss: 0.0016575 +2024-11-11 16:16:48,233 Epoch 524/2000 +2024-11-11 16:17:04,498 Current Learning Rate: 0.0031593772 +2024-11-11 16:17:04,498 Train Loss: 0.0015001, Val Loss: 0.0016723 +2024-11-11 16:17:04,498 Epoch 525/2000 +2024-11-11 16:17:19,873 Current Learning Rate: 0.0030865828 +2024-11-11 16:17:20,731 Train Loss: 0.0015969, Val Loss: 0.0016347 +2024-11-11 16:17:20,731 Epoch 526/2000 +2024-11-11 16:17:35,853 Current Learning Rate: 0.0030142605 +2024-11-11 16:17:36,882 Train Loss: 0.0015379, Val Loss: 0.0016232 +2024-11-11 16:17:36,883 Epoch 527/2000 +2024-11-11 16:17:52,338 Current Learning Rate: 0.0029424282 +2024-11-11 16:17:53,220 Train Loss: 0.0014947, Val Loss: 0.0016079 +2024-11-11 16:17:53,220 Epoch 528/2000 +2024-11-11 16:18:08,765 Current Learning Rate: 0.0028711035 +2024-11-11 16:18:08,767 Train Loss: 0.0017007, Val Loss: 0.0016654 +2024-11-11 16:18:08,767 Epoch 529/2000 +2024-11-11 16:18:25,173 Current Learning Rate: 0.0028003042 +2024-11-11 16:18:25,174 Train Loss: 0.0015099, Val Loss: 0.0016081 +2024-11-11 16:18:25,174 Epoch 530/2000 +2024-11-11 16:18:40,740 Current Learning Rate: 0.0027300475 +2024-11-11 16:18:41,493 Train Loss: 0.0015892, Val Loss: 0.0015988 +2024-11-11 16:18:41,493 Epoch 531/2000 +2024-11-11 16:18:56,898 Current Learning Rate: 0.0026603509 +2024-11-11 16:18:57,683 Train Loss: 0.0014729, Val Loss: 0.0015968 +2024-11-11 16:18:57,684 Epoch 532/2000 +2024-11-11 16:19:13,202 Current Learning Rate: 0.0025912316 +2024-11-11 16:19:13,203 Train Loss: 0.0015314, Val Loss: 0.0015976 +2024-11-11 16:19:13,203 Epoch 533/2000 +2024-11-11 16:19:29,360 Current Learning Rate: 0.0025227067 +2024-11-11 16:19:30,177 Train Loss: 0.0014599, Val Loss: 0.0015746 +2024-11-11 16:19:30,177 Epoch 534/2000 +2024-11-11 16:19:45,423 Current Learning Rate: 0.0024547929 +2024-11-11 16:19:45,424 Train Loss: 0.0015706, Val Loss: 0.0015865 +2024-11-11 16:19:45,424 Epoch 535/2000 +2024-11-11 16:20:00,908 Current Learning Rate: 0.0023875072 +2024-11-11 16:20:00,909 Train Loss: 0.0015245, Val Loss: 0.0015782 +2024-11-11 16:20:00,909 Epoch 536/2000 +2024-11-11 16:20:16,538 Current Learning Rate: 0.0023208660 +2024-11-11 16:20:17,281 Train Loss: 0.0014032, Val Loss: 0.0015686 +2024-11-11 16:20:17,281 Epoch 537/2000 +2024-11-11 16:20:32,061 Current Learning Rate: 0.0022548859 +2024-11-11 16:20:32,062 Train Loss: 0.0015010, Val Loss: 0.0015706 +2024-11-11 16:20:32,062 Epoch 538/2000 +2024-11-11 16:20:47,831 Current Learning Rate: 0.0021895831 +2024-11-11 16:20:47,832 Train Loss: 0.0014061, Val Loss: 0.0015709 +2024-11-11 16:20:47,832 Epoch 539/2000 +2024-11-11 16:21:04,394 Current Learning Rate: 0.0021249737 +2024-11-11 16:21:04,394 Train Loss: 0.0014642, Val Loss: 0.0015716 +2024-11-11 16:21:04,395 Epoch 540/2000 +2024-11-11 16:21:20,644 Current Learning Rate: 0.0020610737 +2024-11-11 16:21:20,644 Train Loss: 0.0015021, Val Loss: 0.0016078 +2024-11-11 16:21:20,645 Epoch 541/2000 +2024-11-11 16:21:36,011 Current Learning Rate: 0.0019978989 +2024-11-11 16:21:36,011 Train Loss: 0.0014069, Val Loss: 0.0015928 +2024-11-11 16:21:36,012 Epoch 542/2000 +2024-11-11 16:21:52,204 Current Learning Rate: 0.0019354647 +2024-11-11 16:21:52,936 Train Loss: 0.0015883, Val Loss: 0.0015588 +2024-11-11 16:21:52,936 Epoch 543/2000 +2024-11-11 16:22:07,769 Current Learning Rate: 0.0018737867 +2024-11-11 16:22:07,770 Train Loss: 0.0014828, Val Loss: 0.0015647 +2024-11-11 16:22:07,770 Epoch 544/2000 +2024-11-11 16:22:24,511 Current Learning Rate: 0.0018128801 +2024-11-11 16:22:24,511 Train Loss: 0.0014293, Val Loss: 0.0015666 +2024-11-11 16:22:24,511 Epoch 545/2000 +2024-11-11 16:22:40,551 Current Learning Rate: 0.0017527598 +2024-11-11 16:22:41,294 Train Loss: 0.0013610, Val Loss: 0.0015508 +2024-11-11 16:22:41,294 Epoch 546/2000 +2024-11-11 16:22:57,430 Current Learning Rate: 0.0016934407 +2024-11-11 16:22:58,195 Train Loss: 0.0016783, Val Loss: 0.0015387 +2024-11-11 16:22:58,195 Epoch 547/2000 +2024-11-11 16:23:13,081 Current Learning Rate: 0.0016349374 +2024-11-11 16:23:13,082 Train Loss: 0.0014395, Val Loss: 0.0015399 +2024-11-11 16:23:13,082 Epoch 548/2000 +2024-11-11 16:23:29,144 Current Learning Rate: 0.0015772645 +2024-11-11 16:23:29,144 Train Loss: 0.0015553, Val Loss: 0.0015414 +2024-11-11 16:23:29,144 Epoch 549/2000 +2024-11-11 16:23:45,321 Current Learning Rate: 0.0015204360 +2024-11-11 16:23:46,052 Train Loss: 0.0015024, Val Loss: 0.0015371 +2024-11-11 16:23:46,052 Epoch 550/2000 +2024-11-11 16:24:01,416 Current Learning Rate: 0.0014644661 +2024-11-11 16:24:05,218 Train Loss: 0.0014474, Val Loss: 0.0015369 +2024-11-11 16:24:05,219 Epoch 551/2000 +2024-11-11 16:24:19,288 Current Learning Rate: 0.0014093685 +2024-11-11 16:24:19,289 Train Loss: 0.0013989, Val Loss: 0.0015375 +2024-11-11 16:24:19,290 Epoch 552/2000 +2024-11-11 16:24:34,434 Current Learning Rate: 0.0013551569 +2024-11-11 16:24:35,144 Train Loss: 0.0014447, Val Loss: 0.0015220 +2024-11-11 16:24:35,145 Epoch 553/2000 +2024-11-11 16:24:49,936 Current Learning Rate: 0.0013018445 +2024-11-11 16:24:50,707 Train Loss: 0.0014534, Val Loss: 0.0015164 +2024-11-11 16:24:50,707 Epoch 554/2000 +2024-11-11 16:25:05,563 Current Learning Rate: 0.0012494447 +2024-11-11 16:25:06,451 Train Loss: 0.0013869, Val Loss: 0.0015128 +2024-11-11 16:25:06,451 Epoch 555/2000 +2024-11-11 16:25:21,363 Current Learning Rate: 0.0011979702 +2024-11-11 16:25:22,177 Train Loss: 0.0013326, Val Loss: 0.0015067 +2024-11-11 16:25:22,177 Epoch 556/2000 +2024-11-11 16:25:37,131 Current Learning Rate: 0.0011474338 +2024-11-11 16:25:37,132 Train Loss: 0.0013669, Val Loss: 0.0015067 +2024-11-11 16:25:37,132 Epoch 557/2000 +2024-11-11 16:25:52,878 Current Learning Rate: 0.0010978480 +2024-11-11 16:25:53,583 Train Loss: 0.0013728, Val Loss: 0.0015037 +2024-11-11 16:25:53,583 Epoch 558/2000 +2024-11-11 16:26:08,387 Current Learning Rate: 0.0010492249 +2024-11-11 16:26:09,163 Train Loss: 0.0013686, Val Loss: 0.0015006 +2024-11-11 16:26:09,163 Epoch 559/2000 +2024-11-11 16:26:25,106 Current Learning Rate: 0.0010015767 +2024-11-11 16:26:26,082 Train Loss: 0.0014711, Val Loss: 0.0014985 +2024-11-11 16:26:26,082 Epoch 560/2000 +2024-11-11 16:26:41,589 Current Learning Rate: 0.0009549150 +2024-11-11 16:26:42,643 Train Loss: 0.0015281, Val Loss: 0.0014979 +2024-11-11 16:26:42,644 Epoch 561/2000 +2024-11-11 16:26:58,540 Current Learning Rate: 0.0009092514 +2024-11-11 16:26:59,323 Train Loss: 0.0013669, Val Loss: 0.0014935 +2024-11-11 16:26:59,323 Epoch 562/2000 +2024-11-11 16:27:14,382 Current Learning Rate: 0.0008645971 +2024-11-11 16:27:15,265 Train Loss: 0.0015915, Val Loss: 0.0014933 +2024-11-11 16:27:15,266 Epoch 563/2000 +2024-11-11 16:27:30,403 Current Learning Rate: 0.0008209632 +2024-11-11 16:27:31,160 Train Loss: 0.0014926, Val Loss: 0.0014908 +2024-11-11 16:27:31,160 Epoch 564/2000 +2024-11-11 16:27:45,602 Current Learning Rate: 0.0007783604 +2024-11-11 16:27:46,381 Train Loss: 0.0013564, Val Loss: 0.0014878 +2024-11-11 16:27:46,381 Epoch 565/2000 +2024-11-11 16:28:01,003 Current Learning Rate: 0.0007367992 +2024-11-11 16:28:01,776 Train Loss: 0.0015443, Val Loss: 0.0014873 +2024-11-11 16:28:01,776 Epoch 566/2000 +2024-11-11 16:28:17,254 Current Learning Rate: 0.0006962899 +2024-11-11 16:28:17,979 Train Loss: 0.0014868, Val Loss: 0.0014851 +2024-11-11 16:28:17,980 Epoch 567/2000 +2024-11-11 16:28:32,957 Current Learning Rate: 0.0006568424 +2024-11-11 16:28:33,715 Train Loss: 0.0013553, Val Loss: 0.0014838 +2024-11-11 16:28:33,715 Epoch 568/2000 +2024-11-11 16:28:49,021 Current Learning Rate: 0.0006184666 +2024-11-11 16:28:49,021 Train Loss: 0.0015204, Val Loss: 0.0014845 +2024-11-11 16:28:49,022 Epoch 569/2000 +2024-11-11 16:29:06,017 Current Learning Rate: 0.0005811718 +2024-11-11 16:29:07,093 Train Loss: 0.0014230, Val Loss: 0.0014805 +2024-11-11 16:29:07,094 Epoch 570/2000 +2024-11-11 16:29:21,762 Current Learning Rate: 0.0005449674 +2024-11-11 16:29:22,454 Train Loss: 0.0013558, Val Loss: 0.0014793 +2024-11-11 16:29:22,454 Epoch 571/2000 +2024-11-11 16:29:38,196 Current Learning Rate: 0.0005098621 +2024-11-11 16:29:38,978 Train Loss: 0.0013530, Val Loss: 0.0014775 +2024-11-11 16:29:38,978 Epoch 572/2000 +2024-11-11 16:29:54,993 Current Learning Rate: 0.0004758647 +2024-11-11 16:29:55,770 Train Loss: 0.0013935, Val Loss: 0.0014764 +2024-11-11 16:29:55,770 Epoch 573/2000 +2024-11-11 16:30:10,327 Current Learning Rate: 0.0004429836 +2024-11-11 16:30:11,045 Train Loss: 0.0014663, Val Loss: 0.0014754 +2024-11-11 16:30:11,045 Epoch 574/2000 +2024-11-11 16:30:26,011 Current Learning Rate: 0.0004112269 +2024-11-11 16:30:26,699 Train Loss: 0.0013592, Val Loss: 0.0014747 +2024-11-11 16:30:26,699 Epoch 575/2000 +2024-11-11 16:30:41,293 Current Learning Rate: 0.0003806023 +2024-11-11 16:30:42,035 Train Loss: 0.0013511, Val Loss: 0.0014742 +2024-11-11 16:30:42,035 Epoch 576/2000 +2024-11-11 16:30:57,009 Current Learning Rate: 0.0003511176 +2024-11-11 16:30:57,711 Train Loss: 0.0014025, Val Loss: 0.0014733 +2024-11-11 16:30:57,711 Epoch 577/2000 +2024-11-11 16:31:12,475 Current Learning Rate: 0.0003227798 +2024-11-11 16:31:13,269 Train Loss: 0.0014400, Val Loss: 0.0014729 +2024-11-11 16:31:13,270 Epoch 578/2000 +2024-11-11 16:31:29,072 Current Learning Rate: 0.0002955962 +2024-11-11 16:31:29,903 Train Loss: 0.0014251, Val Loss: 0.0014718 +2024-11-11 16:31:29,904 Epoch 579/2000 +2024-11-11 16:31:45,246 Current Learning Rate: 0.0002695732 +2024-11-11 16:31:46,282 Train Loss: 0.0015810, Val Loss: 0.0014708 +2024-11-11 16:31:46,283 Epoch 580/2000 +2024-11-11 16:32:01,929 Current Learning Rate: 0.0002447174 +2024-11-11 16:32:05,203 Train Loss: 0.0013351, Val Loss: 0.0014699 +2024-11-11 16:32:05,203 Epoch 581/2000 +2024-11-11 16:32:20,290 Current Learning Rate: 0.0002210349 +2024-11-11 16:32:20,291 Train Loss: 0.0014548, Val Loss: 0.0014701 +2024-11-11 16:32:20,291 Epoch 582/2000 +2024-11-11 16:32:35,617 Current Learning Rate: 0.0001985316 +2024-11-11 16:32:36,381 Train Loss: 0.0013962, Val Loss: 0.0014694 +2024-11-11 16:32:36,381 Epoch 583/2000 +2024-11-11 16:32:51,372 Current Learning Rate: 0.0001772129 +2024-11-11 16:32:51,372 Train Loss: 0.0015118, Val Loss: 0.0014702 +2024-11-11 16:32:51,373 Epoch 584/2000 +2024-11-11 16:33:07,100 Current Learning Rate: 0.0001570842 +2024-11-11 16:33:07,101 Train Loss: 0.0015893, Val Loss: 0.0014707 +2024-11-11 16:33:07,101 Epoch 585/2000 +2024-11-11 16:33:22,612 Current Learning Rate: 0.0001381504 +2024-11-11 16:33:22,612 Train Loss: 0.0013931, Val Loss: 0.0014706 +2024-11-11 16:33:22,612 Epoch 586/2000 +2024-11-11 16:33:38,755 Current Learning Rate: 0.0001204162 +2024-11-11 16:33:39,479 Train Loss: 0.0013333, Val Loss: 0.0014691 +2024-11-11 16:33:39,480 Epoch 587/2000 +2024-11-11 16:33:54,944 Current Learning Rate: 0.0001038859 +2024-11-11 16:33:55,726 Train Loss: 0.0014591, Val Loss: 0.0014679 +2024-11-11 16:33:55,727 Epoch 588/2000 +2024-11-11 16:34:10,394 Current Learning Rate: 0.0000885637 +2024-11-11 16:34:11,158 Train Loss: 0.0012854, Val Loss: 0.0014673 +2024-11-11 16:34:11,158 Epoch 589/2000 +2024-11-11 16:34:26,034 Current Learning Rate: 0.0000744534 +2024-11-11 16:34:26,780 Train Loss: 0.0013903, Val Loss: 0.0014670 +2024-11-11 16:34:26,781 Epoch 590/2000 +2024-11-11 16:34:41,404 Current Learning Rate: 0.0000615583 +2024-11-11 16:34:42,162 Train Loss: 0.0015155, Val Loss: 0.0014668 +2024-11-11 16:34:42,163 Epoch 591/2000 +2024-11-11 16:34:56,787 Current Learning Rate: 0.0000498817 +2024-11-11 16:34:57,567 Train Loss: 0.0014036, Val Loss: 0.0014665 +2024-11-11 16:34:57,568 Epoch 592/2000 +2024-11-11 16:35:11,852 Current Learning Rate: 0.0000394265 +2024-11-11 16:35:11,853 Train Loss: 0.0014947, Val Loss: 0.0014666 +2024-11-11 16:35:11,853 Epoch 593/2000 +2024-11-11 16:35:26,980 Current Learning Rate: 0.0000301952 +2024-11-11 16:35:27,731 Train Loss: 0.0013346, Val Loss: 0.0014663 +2024-11-11 16:35:27,731 Epoch 594/2000 +2024-11-11 16:35:42,223 Current Learning Rate: 0.0000221902 +2024-11-11 16:35:42,927 Train Loss: 0.0014882, Val Loss: 0.0014663 +2024-11-11 16:35:42,927 Epoch 595/2000 +2024-11-11 16:35:58,503 Current Learning Rate: 0.0000154133 +2024-11-11 16:35:59,237 Train Loss: 0.0014868, Val Loss: 0.0014662 +2024-11-11 16:35:59,237 Epoch 596/2000 +2024-11-11 16:36:15,399 Current Learning Rate: 0.0000098664 +2024-11-11 16:36:16,405 Train Loss: 0.0013868, Val Loss: 0.0014660 +2024-11-11 16:36:16,405 Epoch 597/2000 +2024-11-11 16:36:31,701 Current Learning Rate: 0.0000055506 +2024-11-11 16:36:31,702 Train Loss: 0.0013281, Val Loss: 0.0014660 +2024-11-11 16:36:31,702 Epoch 598/2000 +2024-11-11 16:36:47,029 Current Learning Rate: 0.0000024672 +2024-11-11 16:36:47,030 Train Loss: 0.0013248, Val Loss: 0.0014660 +2024-11-11 16:36:47,030 Epoch 599/2000 +2024-11-11 16:37:03,169 Current Learning Rate: 0.0000006168 +2024-11-11 16:37:05,082 Train Loss: 0.0013347, Val Loss: 0.0014660 +2024-11-11 16:37:05,083 Epoch 600/2000 +2024-11-11 16:37:19,616 Current Learning Rate: 0.0000000000 +2024-11-11 16:37:19,617 Train Loss: 0.0013388, Val Loss: 0.0014660 +2024-11-11 16:37:19,617 Epoch 601/2000 +2024-11-11 16:37:35,108 Current Learning Rate: 0.0000006168 +2024-11-11 16:37:35,996 Train Loss: 0.0015185, Val Loss: 0.0014659 +2024-11-11 16:37:35,997 Epoch 602/2000 +2024-11-11 16:37:51,057 Current Learning Rate: 0.0000024672 +2024-11-11 16:37:51,830 Train Loss: 0.0014399, Val Loss: 0.0014658 +2024-11-11 16:37:51,830 Epoch 603/2000 +2024-11-11 16:38:07,352 Current Learning Rate: 0.0000055506 +2024-11-11 16:38:07,353 Train Loss: 0.0012863, Val Loss: 0.0014660 +2024-11-11 16:38:07,353 Epoch 604/2000 +2024-11-11 16:38:23,196 Current Learning Rate: 0.0000098664 +2024-11-11 16:38:23,196 Train Loss: 0.0013989, Val Loss: 0.0014660 +2024-11-11 16:38:23,197 Epoch 605/2000 +2024-11-11 16:38:38,614 Current Learning Rate: 0.0000154133 +2024-11-11 16:38:38,614 Train Loss: 0.0013243, Val Loss: 0.0014660 +2024-11-11 16:38:38,614 Epoch 606/2000 +2024-11-11 16:38:53,839 Current Learning Rate: 0.0000221902 +2024-11-11 16:38:53,840 Train Loss: 0.0013791, Val Loss: 0.0014659 +2024-11-11 16:38:53,840 Epoch 607/2000 +2024-11-11 16:39:09,180 Current Learning Rate: 0.0000301952 +2024-11-11 16:39:09,181 Train Loss: 0.0012837, Val Loss: 0.0014660 +2024-11-11 16:39:09,181 Epoch 608/2000 +2024-11-11 16:39:25,202 Current Learning Rate: 0.0000394265 +2024-11-11 16:39:25,202 Train Loss: 0.0013361, Val Loss: 0.0014661 +2024-11-11 16:39:25,202 Epoch 609/2000 +2024-11-11 16:39:40,942 Current Learning Rate: 0.0000498817 +2024-11-11 16:39:40,943 Train Loss: 0.0014310, Val Loss: 0.0014659 +2024-11-11 16:39:40,943 Epoch 610/2000 +2024-11-11 16:39:56,618 Current Learning Rate: 0.0000615583 +2024-11-11 16:39:56,618 Train Loss: 0.0013358, Val Loss: 0.0014660 +2024-11-11 16:39:56,618 Epoch 611/2000 +2024-11-11 16:40:12,312 Current Learning Rate: 0.0000744534 +2024-11-11 16:40:13,033 Train Loss: 0.0014237, Val Loss: 0.0014657 +2024-11-11 16:40:13,033 Epoch 612/2000 +2024-11-11 16:40:27,999 Current Learning Rate: 0.0000885637 +2024-11-11 16:40:28,000 Train Loss: 0.0014800, Val Loss: 0.0014659 +2024-11-11 16:40:28,000 Epoch 613/2000 +2024-11-11 16:40:43,907 Current Learning Rate: 0.0001038859 +2024-11-11 16:40:43,908 Train Loss: 0.0013293, Val Loss: 0.0014664 +2024-11-11 16:40:43,908 Epoch 614/2000 +2024-11-11 16:41:00,248 Current Learning Rate: 0.0001204162 +2024-11-11 16:41:00,248 Train Loss: 0.0014150, Val Loss: 0.0014674 +2024-11-11 16:41:00,249 Epoch 615/2000 +2024-11-11 16:41:16,347 Current Learning Rate: 0.0001381504 +2024-11-11 16:41:16,347 Train Loss: 0.0012844, Val Loss: 0.0014670 +2024-11-11 16:41:16,347 Epoch 616/2000 +2024-11-11 16:41:32,143 Current Learning Rate: 0.0001570842 +2024-11-11 16:41:32,143 Train Loss: 0.0013916, Val Loss: 0.0014657 +2024-11-11 16:41:32,144 Epoch 617/2000 +2024-11-11 16:41:48,156 Current Learning Rate: 0.0001772129 +2024-11-11 16:41:48,888 Train Loss: 0.0014848, Val Loss: 0.0014650 +2024-11-11 16:41:48,888 Epoch 618/2000 +2024-11-11 16:42:03,341 Current Learning Rate: 0.0001985316 +2024-11-11 16:42:03,342 Train Loss: 0.0013309, Val Loss: 0.0014655 +2024-11-11 16:42:03,342 Epoch 619/2000 +2024-11-11 16:42:18,910 Current Learning Rate: 0.0002210349 +2024-11-11 16:42:18,911 Train Loss: 0.0013759, Val Loss: 0.0014656 +2024-11-11 16:42:18,911 Epoch 620/2000 +2024-11-11 16:42:34,685 Current Learning Rate: 0.0002447174 +2024-11-11 16:42:34,685 Train Loss: 0.0013422, Val Loss: 0.0014651 +2024-11-11 16:42:34,685 Epoch 621/2000 +2024-11-11 16:42:49,700 Current Learning Rate: 0.0002695732 +2024-11-11 16:42:49,701 Train Loss: 0.0013875, Val Loss: 0.0014653 +2024-11-11 16:42:49,701 Epoch 622/2000 +2024-11-11 16:43:05,760 Current Learning Rate: 0.0002955962 +2024-11-11 16:43:06,424 Train Loss: 0.0013393, Val Loss: 0.0014647 +2024-11-11 16:43:06,425 Epoch 623/2000 +2024-11-11 16:43:21,992 Current Learning Rate: 0.0003227798 +2024-11-11 16:43:21,992 Train Loss: 0.0013753, Val Loss: 0.0014654 +2024-11-11 16:43:21,993 Epoch 624/2000 +2024-11-11 16:43:37,681 Current Learning Rate: 0.0003511176 +2024-11-11 16:43:38,352 Train Loss: 0.0012840, Val Loss: 0.0014643 +2024-11-11 16:43:38,352 Epoch 625/2000 +2024-11-11 16:43:53,361 Current Learning Rate: 0.0003806023 +2024-11-11 16:43:53,362 Train Loss: 0.0014439, Val Loss: 0.0014646 +2024-11-11 16:43:53,362 Epoch 626/2000 +2024-11-11 16:44:09,429 Current Learning Rate: 0.0004112269 +2024-11-11 16:44:10,150 Train Loss: 0.0013898, Val Loss: 0.0014640 +2024-11-11 16:44:10,150 Epoch 627/2000 +2024-11-11 16:44:25,270 Current Learning Rate: 0.0004429836 +2024-11-11 16:44:25,271 Train Loss: 0.0014299, Val Loss: 0.0014642 +2024-11-11 16:44:25,271 Epoch 628/2000 +2024-11-11 16:44:41,604 Current Learning Rate: 0.0004758647 +2024-11-11 16:44:41,605 Train Loss: 0.0014359, Val Loss: 0.0014646 +2024-11-11 16:44:41,605 Epoch 629/2000 +2024-11-11 16:44:58,060 Current Learning Rate: 0.0005098621 +2024-11-11 16:44:58,060 Train Loss: 0.0013414, Val Loss: 0.0014663 +2024-11-11 16:44:58,061 Epoch 630/2000 +2024-11-11 16:45:14,057 Current Learning Rate: 0.0005449674 +2024-11-11 16:45:14,057 Train Loss: 0.0012910, Val Loss: 0.0014674 +2024-11-11 16:45:14,058 Epoch 631/2000 +2024-11-11 16:45:29,900 Current Learning Rate: 0.0005811718 +2024-11-11 16:45:29,901 Train Loss: 0.0014344, Val Loss: 0.0014734 +2024-11-11 16:45:29,901 Epoch 632/2000 +2024-11-11 16:45:45,480 Current Learning Rate: 0.0006184666 +2024-11-11 16:45:45,480 Train Loss: 0.0013955, Val Loss: 0.0014720 +2024-11-11 16:45:45,481 Epoch 633/2000 +2024-11-11 16:46:02,147 Current Learning Rate: 0.0006568424 +2024-11-11 16:46:02,148 Train Loss: 0.0012893, Val Loss: 0.0014700 +2024-11-11 16:46:02,148 Epoch 634/2000 +2024-11-11 16:46:17,450 Current Learning Rate: 0.0006962899 +2024-11-11 16:46:17,451 Train Loss: 0.0014054, Val Loss: 0.0014676 +2024-11-11 16:46:17,451 Epoch 635/2000 +2024-11-11 16:46:33,174 Current Learning Rate: 0.0007367992 +2024-11-11 16:46:33,175 Train Loss: 0.0014313, Val Loss: 0.0014680 +2024-11-11 16:46:33,175 Epoch 636/2000 +2024-11-11 16:46:49,428 Current Learning Rate: 0.0007783604 +2024-11-11 16:46:49,428 Train Loss: 0.0013911, Val Loss: 0.0014703 +2024-11-11 16:46:49,428 Epoch 637/2000 +2024-11-11 16:47:04,960 Current Learning Rate: 0.0008209632 +2024-11-11 16:47:05,715 Train Loss: 0.0013219, Val Loss: 0.0014604 +2024-11-11 16:47:05,716 Epoch 638/2000 +2024-11-11 16:47:21,127 Current Learning Rate: 0.0008645971 +2024-11-11 16:47:21,872 Train Loss: 0.0012867, Val Loss: 0.0014600 +2024-11-11 16:47:21,872 Epoch 639/2000 +2024-11-11 16:47:37,088 Current Learning Rate: 0.0009092514 +2024-11-11 16:47:37,089 Train Loss: 0.0015195, Val Loss: 0.0014849 +2024-11-11 16:47:37,089 Epoch 640/2000 +2024-11-11 16:47:53,161 Current Learning Rate: 0.0009549150 +2024-11-11 16:47:53,161 Train Loss: 0.0014029, Val Loss: 0.0014726 +2024-11-11 16:47:53,161 Epoch 641/2000 +2024-11-11 16:48:09,991 Current Learning Rate: 0.0010015767 +2024-11-11 16:48:10,764 Train Loss: 0.0012848, Val Loss: 0.0014594 +2024-11-11 16:48:10,764 Epoch 642/2000 +2024-11-11 16:48:25,904 Current Learning Rate: 0.0010492249 +2024-11-11 16:48:25,905 Train Loss: 0.0014437, Val Loss: 0.0014886 +2024-11-11 16:48:25,905 Epoch 643/2000 +2024-11-11 16:48:42,463 Current Learning Rate: 0.0010978480 +2024-11-11 16:48:42,464 Train Loss: 0.0014407, Val Loss: 0.0014649 +2024-11-11 16:48:42,464 Epoch 644/2000 +2024-11-11 16:48:57,818 Current Learning Rate: 0.0011474338 +2024-11-11 16:48:57,819 Train Loss: 0.0013760, Val Loss: 0.0014609 +2024-11-11 16:48:57,819 Epoch 645/2000 +2024-11-11 16:49:13,324 Current Learning Rate: 0.0011979702 +2024-11-11 16:49:13,325 Train Loss: 0.0013695, Val Loss: 0.0015007 +2024-11-11 16:49:13,325 Epoch 646/2000 +2024-11-11 16:49:28,425 Current Learning Rate: 0.0012494447 +2024-11-11 16:49:28,426 Train Loss: 0.0013328, Val Loss: 0.0014894 +2024-11-11 16:49:28,426 Epoch 647/2000 +2024-11-11 16:49:43,877 Current Learning Rate: 0.0013018445 +2024-11-11 16:49:43,878 Train Loss: 0.0013234, Val Loss: 0.0015706 +2024-11-11 16:49:43,878 Epoch 648/2000 +2024-11-11 16:49:59,402 Current Learning Rate: 0.0013551569 +2024-11-11 16:50:00,113 Train Loss: 0.0012977, Val Loss: 0.0014579 +2024-11-11 16:50:00,113 Epoch 649/2000 +2024-11-11 16:50:14,718 Current Learning Rate: 0.0014093685 +2024-11-11 16:50:14,719 Train Loss: 0.0013465, Val Loss: 0.0014898 +2024-11-11 16:50:14,720 Epoch 650/2000 +2024-11-11 16:50:30,729 Current Learning Rate: 0.0014644661 +2024-11-11 16:50:30,729 Train Loss: 0.0013614, Val Loss: 0.0014663 +2024-11-11 16:50:30,730 Epoch 651/2000 +2024-11-11 16:50:47,357 Current Learning Rate: 0.0015204360 +2024-11-11 16:50:47,357 Train Loss: 0.0014855, Val Loss: 0.0015023 +2024-11-11 16:50:47,358 Epoch 652/2000 +2024-11-11 16:51:03,667 Current Learning Rate: 0.0015772645 +2024-11-11 16:51:03,667 Train Loss: 0.0014394, Val Loss: 0.0015347 +2024-11-11 16:51:03,667 Epoch 653/2000 +2024-11-11 16:51:19,923 Current Learning Rate: 0.0016349374 +2024-11-11 16:51:19,923 Train Loss: 0.0018136, Val Loss: 0.0015625 +2024-11-11 16:51:19,924 Epoch 654/2000 +2024-11-11 16:51:35,867 Current Learning Rate: 0.0016934407 +2024-11-11 16:51:35,867 Train Loss: 0.0014872, Val Loss: 0.0015376 +2024-11-11 16:51:35,867 Epoch 655/2000 +2024-11-11 16:51:51,460 Current Learning Rate: 0.0017527598 +2024-11-11 16:51:51,461 Train Loss: 0.0014274, Val Loss: 0.0014770 +2024-11-11 16:51:51,461 Epoch 656/2000 +2024-11-11 16:52:08,309 Current Learning Rate: 0.0018128801 +2024-11-11 16:52:08,310 Train Loss: 0.0013513, Val Loss: 0.0014603 +2024-11-11 16:52:08,310 Epoch 657/2000 +2024-11-11 16:52:23,949 Current Learning Rate: 0.0018737867 +2024-11-11 16:52:24,645 Train Loss: 0.0013941, Val Loss: 0.0014547 +2024-11-11 16:52:24,645 Epoch 658/2000 +2024-11-11 16:52:40,023 Current Learning Rate: 0.0019354647 +2024-11-11 16:52:40,789 Train Loss: 0.0014043, Val Loss: 0.0014433 +2024-11-11 16:52:40,790 Epoch 659/2000 +2024-11-11 16:52:56,441 Current Learning Rate: 0.0019978989 +2024-11-11 16:52:56,442 Train Loss: 0.0014911, Val Loss: 0.0014573 +2024-11-11 16:52:56,442 Epoch 660/2000 +2024-11-11 16:53:13,614 Current Learning Rate: 0.0020610737 +2024-11-11 16:53:13,615 Train Loss: 0.0013329, Val Loss: 0.0014559 +2024-11-11 16:53:13,615 Epoch 661/2000 +2024-11-11 16:53:29,847 Current Learning Rate: 0.0021249737 +2024-11-11 16:53:29,849 Train Loss: 0.0015343, Val Loss: 0.0014612 +2024-11-11 16:53:29,849 Epoch 662/2000 +2024-11-11 16:53:45,481 Current Learning Rate: 0.0021895831 +2024-11-11 16:53:45,482 Train Loss: 0.0014109, Val Loss: 0.0014702 +2024-11-11 16:53:45,482 Epoch 663/2000 +2024-11-11 16:54:01,231 Current Learning Rate: 0.0022548859 +2024-11-11 16:54:01,232 Train Loss: 0.0014110, Val Loss: 0.0014773 +2024-11-11 16:54:01,232 Epoch 664/2000 +2024-11-11 16:54:17,349 Current Learning Rate: 0.0023208660 +2024-11-11 16:54:17,350 Train Loss: 0.0013513, Val Loss: 0.0014659 +2024-11-11 16:54:17,350 Epoch 665/2000 +2024-11-11 16:54:33,349 Current Learning Rate: 0.0023875072 +2024-11-11 16:54:33,350 Train Loss: 0.0013578, Val Loss: 0.0014904 +2024-11-11 16:54:33,350 Epoch 666/2000 +2024-11-11 16:54:49,275 Current Learning Rate: 0.0024547929 +2024-11-11 16:54:49,275 Train Loss: 0.0013009, Val Loss: 0.0014890 +2024-11-11 16:54:49,276 Epoch 667/2000 +2024-11-11 16:55:04,513 Current Learning Rate: 0.0025227067 +2024-11-11 16:55:04,514 Train Loss: 0.0016047, Val Loss: 0.0015879 +2024-11-11 16:55:04,514 Epoch 668/2000 +2024-11-11 16:55:19,894 Current Learning Rate: 0.0025912316 +2024-11-11 16:55:19,895 Train Loss: 0.0015072, Val Loss: 0.0016243 +2024-11-11 16:55:19,895 Epoch 669/2000 +2024-11-11 16:55:35,822 Current Learning Rate: 0.0026603509 +2024-11-11 16:55:35,823 Train Loss: 0.0016115, Val Loss: 0.0015441 +2024-11-11 16:55:35,823 Epoch 670/2000 +2024-11-11 16:55:52,155 Current Learning Rate: 0.0027300475 +2024-11-11 16:55:52,156 Train Loss: 0.0013965, Val Loss: 0.0014989 +2024-11-11 16:55:52,156 Epoch 671/2000 +2024-11-11 16:56:08,921 Current Learning Rate: 0.0028003042 +2024-11-11 16:56:08,922 Train Loss: 0.0014858, Val Loss: 0.0015511 +2024-11-11 16:56:08,922 Epoch 672/2000 +2024-11-11 16:56:24,914 Current Learning Rate: 0.0028711035 +2024-11-11 16:56:24,915 Train Loss: 0.0014088, Val Loss: 0.0014852 +2024-11-11 16:56:24,915 Epoch 673/2000 +2024-11-11 16:56:41,031 Current Learning Rate: 0.0029424282 +2024-11-11 16:56:41,031 Train Loss: 0.0013549, Val Loss: 0.0014781 +2024-11-11 16:56:41,032 Epoch 674/2000 +2024-11-11 16:56:56,604 Current Learning Rate: 0.0030142605 +2024-11-11 16:56:56,605 Train Loss: 0.0013579, Val Loss: 0.0015252 +2024-11-11 16:56:56,605 Epoch 675/2000 +2024-11-11 16:57:12,803 Current Learning Rate: 0.0030865828 +2024-11-11 16:57:12,805 Train Loss: 0.0015570, Val Loss: 0.0015282 +2024-11-11 16:57:12,805 Epoch 676/2000 +2024-11-11 16:57:28,459 Current Learning Rate: 0.0031593772 +2024-11-11 16:57:28,459 Train Loss: 0.0014531, Val Loss: 0.0014883 +2024-11-11 16:57:28,459 Epoch 677/2000 +2024-11-11 16:57:43,221 Current Learning Rate: 0.0032326258 +2024-11-11 16:57:43,221 Train Loss: 0.0013234, Val Loss: 0.0015019 +2024-11-11 16:57:43,221 Epoch 678/2000 +2024-11-11 16:57:58,533 Current Learning Rate: 0.0033063104 +2024-11-11 16:57:58,534 Train Loss: 0.0013389, Val Loss: 0.0014453 +2024-11-11 16:57:58,534 Epoch 679/2000 +2024-11-11 16:58:14,635 Current Learning Rate: 0.0033804129 +2024-11-11 16:58:15,303 Train Loss: 0.0012721, Val Loss: 0.0014410 +2024-11-11 16:58:15,303 Epoch 680/2000 +2024-11-11 16:58:30,687 Current Learning Rate: 0.0034549150 +2024-11-11 16:58:31,340 Train Loss: 0.0012667, Val Loss: 0.0014372 +2024-11-11 16:58:31,340 Epoch 681/2000 +2024-11-11 16:58:45,805 Current Learning Rate: 0.0035297984 +2024-11-11 16:58:45,806 Train Loss: 0.0013364, Val Loss: 0.0014814 +2024-11-11 16:58:45,806 Epoch 682/2000 +2024-11-11 16:59:01,372 Current Learning Rate: 0.0036050445 +2024-11-11 16:59:01,372 Train Loss: 0.0014830, Val Loss: 0.0014760 +2024-11-11 16:59:01,373 Epoch 683/2000 +2024-11-11 16:59:16,758 Current Learning Rate: 0.0036806348 +2024-11-11 16:59:16,758 Train Loss: 0.0013517, Val Loss: 0.0015127 +2024-11-11 16:59:16,758 Epoch 684/2000 +2024-11-11 16:59:32,008 Current Learning Rate: 0.0037565506 +2024-11-11 16:59:32,008 Train Loss: 0.0015798, Val Loss: 0.0015486 +2024-11-11 16:59:32,009 Epoch 685/2000 +2024-11-11 16:59:47,634 Current Learning Rate: 0.0038327732 +2024-11-11 16:59:47,634 Train Loss: 0.0013792, Val Loss: 0.0016443 +2024-11-11 16:59:47,635 Epoch 686/2000 +2024-11-11 17:00:03,985 Current Learning Rate: 0.0039092838 +2024-11-11 17:00:03,985 Train Loss: 0.0015946, Val Loss: 0.0015486 +2024-11-11 17:00:03,985 Epoch 687/2000 +2024-11-11 17:00:20,674 Current Learning Rate: 0.0039860635 +2024-11-11 17:00:20,675 Train Loss: 0.0013978, Val Loss: 0.0014972 +2024-11-11 17:00:20,675 Epoch 688/2000 +2024-11-11 17:00:36,709 Current Learning Rate: 0.0040630934 +2024-11-11 17:00:36,709 Train Loss: 0.0014477, Val Loss: 0.0014903 +2024-11-11 17:00:36,710 Epoch 689/2000 +2024-11-11 17:00:51,531 Current Learning Rate: 0.0041403545 +2024-11-11 17:00:51,532 Train Loss: 0.0014564, Val Loss: 0.0015632 +2024-11-11 17:00:51,532 Epoch 690/2000 +2024-11-11 17:01:06,923 Current Learning Rate: 0.0042178277 +2024-11-11 17:01:06,924 Train Loss: 0.0015276, Val Loss: 0.0015127 +2024-11-11 17:01:06,924 Epoch 691/2000 +2024-11-11 17:01:22,501 Current Learning Rate: 0.0042954938 +2024-11-11 17:01:22,502 Train Loss: 0.0014424, Val Loss: 0.0015113 +2024-11-11 17:01:22,502 Epoch 692/2000 +2024-11-11 17:01:37,948 Current Learning Rate: 0.0043733338 +2024-11-11 17:01:37,948 Train Loss: 0.0013432, Val Loss: 0.0015069 +2024-11-11 17:01:37,949 Epoch 693/2000 +2024-11-11 17:01:53,419 Current Learning Rate: 0.0044513284 +2024-11-11 17:01:53,420 Train Loss: 0.0016064, Val Loss: 0.0016084 +2024-11-11 17:01:53,420 Epoch 694/2000 +2024-11-11 17:02:09,692 Current Learning Rate: 0.0045294584 +2024-11-11 17:02:09,692 Train Loss: 0.0016979, Val Loss: 0.0015626 +2024-11-11 17:02:09,693 Epoch 695/2000 +2024-11-11 17:02:25,502 Current Learning Rate: 0.0046077045 +2024-11-11 17:02:25,503 Train Loss: 0.0013709, Val Loss: 0.0015908 +2024-11-11 17:02:25,503 Epoch 696/2000 +2024-11-11 17:02:42,140 Current Learning Rate: 0.0046860474 +2024-11-11 17:02:42,140 Train Loss: 0.0013405, Val Loss: 0.0014735 +2024-11-11 17:02:42,141 Epoch 697/2000 +2024-11-11 17:02:58,552 Current Learning Rate: 0.0047644677 +2024-11-11 17:02:58,553 Train Loss: 0.0015411, Val Loss: 0.0015687 +2024-11-11 17:02:58,553 Epoch 698/2000 +2024-11-11 17:03:14,633 Current Learning Rate: 0.0048429462 +2024-11-11 17:03:14,634 Train Loss: 0.0014065, Val Loss: 0.0015166 +2024-11-11 17:03:14,634 Epoch 699/2000 +2024-11-11 17:03:31,389 Current Learning Rate: 0.0049214634 +2024-11-11 17:03:31,389 Train Loss: 0.0019209, Val Loss: 0.0015936 +2024-11-11 17:03:31,389 Epoch 700/2000 +2024-11-11 17:03:47,452 Current Learning Rate: 0.0050000000 +2024-11-11 17:03:47,452 Train Loss: 0.0014235, Val Loss: 0.0015003 +2024-11-11 17:03:47,453 Epoch 701/2000 +2024-11-11 17:04:03,088 Current Learning Rate: 0.0050785366 +2024-11-11 17:04:03,089 Train Loss: 0.0013756, Val Loss: 0.0015143 +2024-11-11 17:04:03,090 Epoch 702/2000 +2024-11-11 17:04:18,856 Current Learning Rate: 0.0051570538 +2024-11-11 17:04:18,856 Train Loss: 0.0013229, Val Loss: 0.0015112 +2024-11-11 17:04:18,857 Epoch 703/2000 +2024-11-11 17:04:35,025 Current Learning Rate: 0.0052355323 +2024-11-11 17:04:35,026 Train Loss: 0.0014407, Val Loss: 0.0015284 +2024-11-11 17:04:35,026 Epoch 704/2000 +2024-11-11 17:04:51,159 Current Learning Rate: 0.0053139526 +2024-11-11 17:04:51,159 Train Loss: 0.0017120, Val Loss: 0.0016282 +2024-11-11 17:04:51,159 Epoch 705/2000 +2024-11-11 17:05:07,342 Current Learning Rate: 0.0053922955 +2024-11-11 17:05:07,342 Train Loss: 0.0015467, Val Loss: 0.0015771 +2024-11-11 17:05:07,343 Epoch 706/2000 +2024-11-11 17:05:23,397 Current Learning Rate: 0.0054705416 +2024-11-11 17:05:23,398 Train Loss: 0.0013901, Val Loss: 0.0014990 +2024-11-11 17:05:23,398 Epoch 707/2000 +2024-11-11 17:05:39,743 Current Learning Rate: 0.0055486716 +2024-11-11 17:05:39,744 Train Loss: 0.0015504, Val Loss: 0.0014934 +2024-11-11 17:05:39,744 Epoch 708/2000 +2024-11-11 17:05:55,815 Current Learning Rate: 0.0056266662 +2024-11-11 17:05:55,815 Train Loss: 0.0014991, Val Loss: 0.0014600 +2024-11-11 17:05:55,815 Epoch 709/2000 +2024-11-11 17:06:11,530 Current Learning Rate: 0.0057045062 +2024-11-11 17:06:11,530 Train Loss: 0.0013958, Val Loss: 0.0014611 +2024-11-11 17:06:11,530 Epoch 710/2000 +2024-11-11 17:06:28,017 Current Learning Rate: 0.0057821723 +2024-11-11 17:06:28,018 Train Loss: 0.0012802, Val Loss: 0.0015189 +2024-11-11 17:06:28,018 Epoch 711/2000 +2024-11-11 17:06:44,563 Current Learning Rate: 0.0058596455 +2024-11-11 17:06:44,564 Train Loss: 0.0013394, Val Loss: 0.0014677 +2024-11-11 17:06:44,564 Epoch 712/2000 +2024-11-11 17:07:00,097 Current Learning Rate: 0.0059369066 +2024-11-11 17:07:00,097 Train Loss: 0.0014255, Val Loss: 0.0014887 +2024-11-11 17:07:00,097 Epoch 713/2000 +2024-11-11 17:07:15,474 Current Learning Rate: 0.0060139365 +2024-11-11 17:07:15,475 Train Loss: 0.0014388, Val Loss: 0.0015676 +2024-11-11 17:07:15,475 Epoch 714/2000 +2024-11-11 17:07:30,828 Current Learning Rate: 0.0060907162 +2024-11-11 17:07:30,828 Train Loss: 0.0017099, Val Loss: 0.0017168 +2024-11-11 17:07:30,829 Epoch 715/2000 +2024-11-11 17:07:46,216 Current Learning Rate: 0.0061672268 +2024-11-11 17:07:46,216 Train Loss: 0.0017264, Val Loss: 0.0015712 +2024-11-11 17:07:46,216 Epoch 716/2000 +2024-11-11 17:08:02,523 Current Learning Rate: 0.0062434494 +2024-11-11 17:08:02,524 Train Loss: 0.0014189, Val Loss: 0.0014860 +2024-11-11 17:08:02,524 Epoch 717/2000 +2024-11-11 17:08:18,619 Current Learning Rate: 0.0063193652 +2024-11-11 17:08:18,620 Train Loss: 0.0014357, Val Loss: 0.0015216 +2024-11-11 17:08:18,620 Epoch 718/2000 +2024-11-11 17:08:35,352 Current Learning Rate: 0.0063949555 +2024-11-11 17:08:35,353 Train Loss: 0.0013072, Val Loss: 0.0014546 +2024-11-11 17:08:35,353 Epoch 719/2000 +2024-11-11 17:08:51,000 Current Learning Rate: 0.0064702016 +2024-11-11 17:08:51,000 Train Loss: 0.0014518, Val Loss: 0.0014949 +2024-11-11 17:08:51,001 Epoch 720/2000 +2024-11-11 17:09:06,208 Current Learning Rate: 0.0065450850 +2024-11-11 17:09:06,208 Train Loss: 0.0014047, Val Loss: 0.0015171 +2024-11-11 17:09:06,209 Epoch 721/2000 +2024-11-11 17:09:21,760 Current Learning Rate: 0.0066195871 +2024-11-11 17:09:21,761 Train Loss: 0.0014210, Val Loss: 0.0014804 +2024-11-11 17:09:21,761 Epoch 722/2000 +2024-11-11 17:09:37,125 Current Learning Rate: 0.0066936896 +2024-11-11 17:09:37,125 Train Loss: 0.0014459, Val Loss: 0.0018787 +2024-11-11 17:09:37,125 Epoch 723/2000 +2024-11-11 17:09:52,220 Current Learning Rate: 0.0067673742 +2024-11-11 17:09:52,220 Train Loss: 0.0013806, Val Loss: 0.0015081 +2024-11-11 17:09:52,221 Epoch 724/2000 +2024-11-11 17:10:07,619 Current Learning Rate: 0.0068406228 +2024-11-11 17:10:07,619 Train Loss: 0.0014868, Val Loss: 0.0015729 +2024-11-11 17:10:07,620 Epoch 725/2000 +2024-11-11 17:10:23,573 Current Learning Rate: 0.0069134172 +2024-11-11 17:10:23,574 Train Loss: 0.0016074, Val Loss: 0.0017101 +2024-11-11 17:10:23,574 Epoch 726/2000 +2024-11-11 17:10:39,384 Current Learning Rate: 0.0069857395 +2024-11-11 17:10:39,385 Train Loss: 0.0013524, Val Loss: 0.0014477 +2024-11-11 17:10:39,385 Epoch 727/2000 +2024-11-11 17:10:55,280 Current Learning Rate: 0.0070575718 +2024-11-11 17:10:55,281 Train Loss: 0.0013677, Val Loss: 0.0014720 +2024-11-11 17:10:55,281 Epoch 728/2000 +2024-11-11 17:11:10,835 Current Learning Rate: 0.0071288965 +2024-11-11 17:11:10,836 Train Loss: 0.0014198, Val Loss: 0.0015157 +2024-11-11 17:11:10,836 Epoch 729/2000 +2024-11-11 17:11:27,227 Current Learning Rate: 0.0071996958 +2024-11-11 17:11:27,228 Train Loss: 0.0012735, Val Loss: 0.0014381 +2024-11-11 17:11:27,228 Epoch 730/2000 +2024-11-11 17:11:43,332 Current Learning Rate: 0.0072699525 +2024-11-11 17:11:43,332 Train Loss: 0.0015931, Val Loss: 0.0018424 +2024-11-11 17:11:43,333 Epoch 731/2000 +2024-11-11 17:12:00,490 Current Learning Rate: 0.0073396491 +2024-11-11 17:12:00,491 Train Loss: 0.0016338, Val Loss: 0.0015356 +2024-11-11 17:12:00,491 Epoch 732/2000 +2024-11-11 17:12:16,637 Current Learning Rate: 0.0074087684 +2024-11-11 17:12:16,637 Train Loss: 0.0014040, Val Loss: 0.0014609 +2024-11-11 17:12:16,638 Epoch 733/2000 +2024-11-11 17:12:31,840 Current Learning Rate: 0.0074772933 +2024-11-11 17:12:32,671 Train Loss: 0.0012633, Val Loss: 0.0014138 +2024-11-11 17:12:32,671 Epoch 734/2000 +2024-11-11 17:12:46,926 Current Learning Rate: 0.0075452071 +2024-11-11 17:12:46,927 Train Loss: 0.0013944, Val Loss: 0.0015161 +2024-11-11 17:12:46,927 Epoch 735/2000 +2024-11-11 17:13:02,279 Current Learning Rate: 0.0076124928 +2024-11-11 17:13:02,280 Train Loss: 0.0014223, Val Loss: 0.0015125 +2024-11-11 17:13:02,280 Epoch 736/2000 +2024-11-11 17:13:17,803 Current Learning Rate: 0.0076791340 +2024-11-11 17:13:17,804 Train Loss: 0.0013919, Val Loss: 0.0015442 +2024-11-11 17:13:17,804 Epoch 737/2000 +2024-11-11 17:13:32,973 Current Learning Rate: 0.0077451141 +2024-11-11 17:13:32,973 Train Loss: 0.0013839, Val Loss: 0.0015243 +2024-11-11 17:13:32,974 Epoch 738/2000 +2024-11-11 17:13:48,400 Current Learning Rate: 0.0078104169 +2024-11-11 17:13:48,400 Train Loss: 0.0014401, Val Loss: 0.0015702 +2024-11-11 17:13:48,400 Epoch 739/2000 +2024-11-11 17:14:03,439 Current Learning Rate: 0.0078750263 +2024-11-11 17:14:03,439 Train Loss: 0.0018107, Val Loss: 0.0018858 +2024-11-11 17:14:03,439 Epoch 740/2000 +2024-11-11 17:14:18,950 Current Learning Rate: 0.0079389263 +2024-11-11 17:14:18,950 Train Loss: 0.0016465, Val Loss: 0.0016108 +2024-11-11 17:14:18,950 Epoch 741/2000 +2024-11-11 17:14:35,116 Current Learning Rate: 0.0080021011 +2024-11-11 17:14:35,117 Train Loss: 0.0014754, Val Loss: 0.0014901 +2024-11-11 17:14:35,117 Epoch 742/2000 +2024-11-11 17:14:51,098 Current Learning Rate: 0.0080645353 +2024-11-11 17:14:51,098 Train Loss: 0.0013448, Val Loss: 0.0014771 +2024-11-11 17:14:51,098 Epoch 743/2000 +2024-11-11 17:15:06,433 Current Learning Rate: 0.0081262133 +2024-11-11 17:15:06,434 Train Loss: 0.0014345, Val Loss: 0.0014970 +2024-11-11 17:15:06,434 Epoch 744/2000 +2024-11-11 17:15:21,907 Current Learning Rate: 0.0081871199 +2024-11-11 17:15:21,907 Train Loss: 0.0014178, Val Loss: 0.0014923 +2024-11-11 17:15:21,908 Epoch 745/2000 +2024-11-11 17:15:38,443 Current Learning Rate: 0.0082472402 +2024-11-11 17:15:38,444 Train Loss: 0.0013038, Val Loss: 0.0014537 +2024-11-11 17:15:38,444 Epoch 746/2000 +2024-11-11 17:15:54,349 Current Learning Rate: 0.0083065593 +2024-11-11 17:15:54,350 Train Loss: 0.0015127, Val Loss: 0.0015143 +2024-11-11 17:15:54,350 Epoch 747/2000 +2024-11-11 17:16:10,352 Current Learning Rate: 0.0083650626 +2024-11-11 17:16:10,352 Train Loss: 0.0015203, Val Loss: 0.0014412 +2024-11-11 17:16:10,353 Epoch 748/2000 +2024-11-11 17:16:26,800 Current Learning Rate: 0.0084227355 +2024-11-11 17:16:26,801 Train Loss: 0.0014944, Val Loss: 0.0015220 +2024-11-11 17:16:26,801 Epoch 749/2000 +2024-11-11 17:16:42,648 Current Learning Rate: 0.0084795640 +2024-11-11 17:16:43,437 Train Loss: 0.0012655, Val Loss: 0.0013929 +2024-11-11 17:16:43,437 Epoch 750/2000 +2024-11-11 17:16:59,023 Current Learning Rate: 0.0085355339 +2024-11-11 17:16:59,024 Train Loss: 0.0014169, Val Loss: 0.0014294 +2024-11-11 17:16:59,024 Epoch 751/2000 +2024-11-11 17:17:15,535 Current Learning Rate: 0.0085906315 +2024-11-11 17:17:15,535 Train Loss: 0.0014581, Val Loss: 0.0014705 +2024-11-11 17:17:15,535 Epoch 752/2000 +2024-11-11 17:17:31,363 Current Learning Rate: 0.0086448431 +2024-11-11 17:17:32,141 Train Loss: 0.0012912, Val Loss: 0.0013847 +2024-11-11 17:17:32,141 Epoch 753/2000 +2024-11-11 17:17:47,246 Current Learning Rate: 0.0086981555 +2024-11-11 17:17:47,247 Train Loss: 0.0013669, Val Loss: 0.0014633 +2024-11-11 17:17:47,247 Epoch 754/2000 +2024-11-11 17:18:03,462 Current Learning Rate: 0.0087505553 +2024-11-11 17:18:03,463 Train Loss: 0.0013582, Val Loss: 0.0014893 +2024-11-11 17:18:03,463 Epoch 755/2000 +2024-11-11 17:18:20,208 Current Learning Rate: 0.0088020298 +2024-11-11 17:18:20,209 Train Loss: 0.0012477, Val Loss: 0.0014051 +2024-11-11 17:18:20,210 Epoch 756/2000 +2024-11-11 17:18:37,412 Current Learning Rate: 0.0088525662 +2024-11-11 17:18:37,413 Train Loss: 0.0012354, Val Loss: 0.0014067 +2024-11-11 17:18:37,413 Epoch 757/2000 +2024-11-11 17:18:54,361 Current Learning Rate: 0.0089021520 +2024-11-11 17:18:54,362 Train Loss: 0.0012827, Val Loss: 0.0014827 +2024-11-11 17:18:54,362 Epoch 758/2000 +2024-11-11 17:19:11,100 Current Learning Rate: 0.0089507751 +2024-11-11 17:19:11,101 Train Loss: 0.0013223, Val Loss: 0.0014795 +2024-11-11 17:19:11,101 Epoch 759/2000 +2024-11-11 17:19:26,911 Current Learning Rate: 0.0089984233 +2024-11-11 17:19:26,912 Train Loss: 0.0014247, Val Loss: 0.0017839 +2024-11-11 17:19:26,912 Epoch 760/2000 +2024-11-11 17:19:42,660 Current Learning Rate: 0.0090450850 +2024-11-11 17:19:42,661 Train Loss: 0.0013827, Val Loss: 0.0014344 +2024-11-11 17:19:42,661 Epoch 761/2000 +2024-11-11 17:19:59,085 Current Learning Rate: 0.0090907486 +2024-11-11 17:19:59,086 Train Loss: 0.0014266, Val Loss: 0.0014995 +2024-11-11 17:19:59,087 Epoch 762/2000 +2024-11-11 17:20:14,751 Current Learning Rate: 0.0091354029 +2024-11-11 17:20:14,752 Train Loss: 0.0014604, Val Loss: 0.0015419 +2024-11-11 17:20:14,752 Epoch 763/2000 +2024-11-11 17:20:30,831 Current Learning Rate: 0.0091790368 +2024-11-11 17:20:30,832 Train Loss: 0.0014246, Val Loss: 0.0015470 +2024-11-11 17:20:30,832 Epoch 764/2000 +2024-11-11 17:20:46,613 Current Learning Rate: 0.0092216396 +2024-11-11 17:20:46,615 Train Loss: 0.0013135, Val Loss: 0.0014740 +2024-11-11 17:20:46,615 Epoch 765/2000 +2024-11-11 17:21:03,003 Current Learning Rate: 0.0092632008 +2024-11-11 17:21:03,004 Train Loss: 0.0014266, Val Loss: 0.0015145 +2024-11-11 17:21:03,004 Epoch 766/2000 +2024-11-11 17:21:19,068 Current Learning Rate: 0.0093037101 +2024-11-11 17:21:19,068 Train Loss: 0.0013968, Val Loss: 0.0014449 +2024-11-11 17:21:19,069 Epoch 767/2000 +2024-11-11 17:21:35,054 Current Learning Rate: 0.0093431576 +2024-11-11 17:21:35,054 Train Loss: 0.0013752, Val Loss: 0.0014473 +2024-11-11 17:21:35,055 Epoch 768/2000 +2024-11-11 17:21:50,172 Current Learning Rate: 0.0093815334 +2024-11-11 17:21:50,172 Train Loss: 0.0013089, Val Loss: 0.0014230 +2024-11-11 17:21:50,173 Epoch 769/2000 +2024-11-11 17:22:05,512 Current Learning Rate: 0.0094188282 +2024-11-11 17:22:05,514 Train Loss: 0.0013418, Val Loss: 0.0014052 +2024-11-11 17:22:05,514 Epoch 770/2000 +2024-11-11 17:22:20,416 Current Learning Rate: 0.0094550326 +2024-11-11 17:22:20,417 Train Loss: 0.0013460, Val Loss: 0.0014729 +2024-11-11 17:22:20,417 Epoch 771/2000 +2024-11-11 17:22:36,114 Current Learning Rate: 0.0094901379 +2024-11-11 17:22:36,115 Train Loss: 0.0013318, Val Loss: 0.0014820 +2024-11-11 17:22:36,115 Epoch 772/2000 +2024-11-11 17:22:51,343 Current Learning Rate: 0.0095241353 +2024-11-11 17:22:52,057 Train Loss: 0.0011896, Val Loss: 0.0013285 +2024-11-11 17:22:52,057 Epoch 773/2000 +2024-11-11 17:23:07,033 Current Learning Rate: 0.0095570164 +2024-11-11 17:23:07,034 Train Loss: 0.0013304, Val Loss: 0.0013801 +2024-11-11 17:23:07,034 Epoch 774/2000 +2024-11-11 17:23:22,702 Current Learning Rate: 0.0095887731 +2024-11-11 17:23:22,703 Train Loss: 0.0013142, Val Loss: 0.0014194 +2024-11-11 17:23:22,703 Epoch 775/2000 +2024-11-11 17:23:38,813 Current Learning Rate: 0.0096193977 +2024-11-11 17:23:38,813 Train Loss: 0.0013468, Val Loss: 0.0014064 +2024-11-11 17:23:38,813 Epoch 776/2000 +2024-11-11 17:23:55,001 Current Learning Rate: 0.0096488824 +2024-11-11 17:23:55,001 Train Loss: 0.0012973, Val Loss: 0.0014213 +2024-11-11 17:23:55,002 Epoch 777/2000 +2024-11-11 17:24:10,834 Current Learning Rate: 0.0096772202 +2024-11-11 17:24:11,587 Train Loss: 0.0011907, Val Loss: 0.0013259 +2024-11-11 17:24:11,587 Epoch 778/2000 +2024-11-11 17:24:25,919 Current Learning Rate: 0.0097044038 +2024-11-11 17:24:25,920 Train Loss: 0.0013381, Val Loss: 0.0014263 +2024-11-11 17:24:25,920 Epoch 779/2000 +2024-11-11 17:24:42,361 Current Learning Rate: 0.0097304268 +2024-11-11 17:24:42,362 Train Loss: 0.0013340, Val Loss: 0.0013705 +2024-11-11 17:24:42,362 Epoch 780/2000 +2024-11-11 17:24:58,263 Current Learning Rate: 0.0097552826 +2024-11-11 17:24:58,264 Train Loss: 0.0013152, Val Loss: 0.0013503 +2024-11-11 17:24:58,265 Epoch 781/2000 +2024-11-11 17:25:14,052 Current Learning Rate: 0.0097789651 +2024-11-11 17:25:14,053 Train Loss: 0.0012261, Val Loss: 0.0014349 +2024-11-11 17:25:14,054 Epoch 782/2000 +2024-11-11 17:25:30,706 Current Learning Rate: 0.0098014684 +2024-11-11 17:25:30,707 Train Loss: 0.0013467, Val Loss: 0.0014464 +2024-11-11 17:25:30,707 Epoch 783/2000 +2024-11-11 17:25:47,583 Current Learning Rate: 0.0098227871 +2024-11-11 17:25:47,584 Train Loss: 0.0012841, Val Loss: 0.0014398 +2024-11-11 17:25:47,584 Epoch 784/2000 +2024-11-11 17:26:02,644 Current Learning Rate: 0.0098429158 +2024-11-11 17:26:02,645 Train Loss: 0.0012904, Val Loss: 0.0013725 +2024-11-11 17:26:02,645 Epoch 785/2000 +2024-11-11 17:26:19,072 Current Learning Rate: 0.0098618496 +2024-11-11 17:26:19,073 Train Loss: 0.0013397, Val Loss: 0.0013750 +2024-11-11 17:26:19,073 Epoch 786/2000 +2024-11-11 17:26:34,064 Current Learning Rate: 0.0098795838 +2024-11-11 17:26:34,065 Train Loss: 0.0013598, Val Loss: 0.0013267 +2024-11-11 17:26:34,065 Epoch 787/2000 +2024-11-11 17:26:49,310 Current Learning Rate: 0.0098961141 +2024-11-11 17:26:49,310 Train Loss: 0.0013358, Val Loss: 0.0014850 +2024-11-11 17:26:49,311 Epoch 788/2000 +2024-11-11 17:27:04,770 Current Learning Rate: 0.0099114363 +2024-11-11 17:27:04,771 Train Loss: 0.0014228, Val Loss: 0.0015173 +2024-11-11 17:27:04,771 Epoch 789/2000 +2024-11-11 17:27:20,899 Current Learning Rate: 0.0099255466 +2024-11-11 17:27:20,899 Train Loss: 0.0014229, Val Loss: 0.0014432 +2024-11-11 17:27:20,899 Epoch 790/2000 +2024-11-11 17:27:37,233 Current Learning Rate: 0.0099384417 +2024-11-11 17:27:37,234 Train Loss: 0.0012249, Val Loss: 0.0013717 +2024-11-11 17:27:37,234 Epoch 791/2000 +2024-11-11 17:27:53,191 Current Learning Rate: 0.0099501183 +2024-11-11 17:27:53,192 Train Loss: 0.0011699, Val Loss: 0.0013676 +2024-11-11 17:27:53,192 Epoch 792/2000 +2024-11-11 17:28:09,828 Current Learning Rate: 0.0099605735 +2024-11-11 17:28:09,828 Train Loss: 0.0012559, Val Loss: 0.0013603 +2024-11-11 17:28:09,828 Epoch 793/2000 +2024-11-11 17:28:26,718 Current Learning Rate: 0.0099698048 +2024-11-11 17:28:26,719 Train Loss: 0.0012179, Val Loss: 0.0013536 +2024-11-11 17:28:26,720 Epoch 794/2000 +2024-11-11 17:28:42,520 Current Learning Rate: 0.0099778098 +2024-11-11 17:28:42,521 Train Loss: 0.0013702, Val Loss: 0.0013276 +2024-11-11 17:28:42,521 Epoch 795/2000 +2024-11-11 17:28:58,514 Current Learning Rate: 0.0099845867 +2024-11-11 17:28:59,554 Train Loss: 0.0011509, Val Loss: 0.0012980 +2024-11-11 17:28:59,554 Epoch 796/2000 +2024-11-11 17:29:14,723 Current Learning Rate: 0.0099901336 +2024-11-11 17:29:14,724 Train Loss: 0.0012321, Val Loss: 0.0013927 +2024-11-11 17:29:14,725 Epoch 797/2000 +2024-11-11 17:29:30,497 Current Learning Rate: 0.0099944494 +2024-11-11 17:29:30,497 Train Loss: 0.0012585, Val Loss: 0.0013159 +2024-11-11 17:29:30,498 Epoch 798/2000 +2024-11-11 17:29:46,559 Current Learning Rate: 0.0099975328 +2024-11-11 17:29:46,559 Train Loss: 0.0011738, Val Loss: 0.0013762 +2024-11-11 17:29:46,560 Epoch 799/2000 +2024-11-11 17:30:01,241 Current Learning Rate: 0.0099993832 +2024-11-11 17:30:01,242 Train Loss: 0.0011849, Val Loss: 0.0013323 +2024-11-11 17:30:01,242 Epoch 800/2000 +2024-11-11 17:30:16,359 Current Learning Rate: 0.0100000000 +2024-11-11 17:30:16,359 Train Loss: 0.0012342, Val Loss: 0.0013504 +2024-11-11 17:30:16,360 Epoch 801/2000 +2024-11-11 17:30:31,872 Current Learning Rate: 0.0099993832 +2024-11-11 17:30:31,873 Train Loss: 0.0012720, Val Loss: 0.0013579 +2024-11-11 17:30:31,873 Epoch 802/2000 +2024-11-11 17:30:47,663 Current Learning Rate: 0.0099975328 +2024-11-11 17:30:47,663 Train Loss: 0.0012628, Val Loss: 0.0013589 +2024-11-11 17:30:47,664 Epoch 803/2000 +2024-11-11 17:31:03,484 Current Learning Rate: 0.0099944494 +2024-11-11 17:31:03,484 Train Loss: 0.0013250, Val Loss: 0.0014446 +2024-11-11 17:31:03,485 Epoch 804/2000 +2024-11-11 17:31:19,722 Current Learning Rate: 0.0099901336 +2024-11-11 17:31:19,722 Train Loss: 0.0012140, Val Loss: 0.0013398 +2024-11-11 17:31:19,723 Epoch 805/2000 +2024-11-11 17:31:35,267 Current Learning Rate: 0.0099845867 +2024-11-11 17:31:35,268 Train Loss: 0.0013890, Val Loss: 0.0015312 +2024-11-11 17:31:35,268 Epoch 806/2000 +2024-11-11 17:31:51,629 Current Learning Rate: 0.0099778098 +2024-11-11 17:31:51,630 Train Loss: 0.0015518, Val Loss: 0.0015830 +2024-11-11 17:31:51,630 Epoch 807/2000 +2024-11-11 17:32:06,988 Current Learning Rate: 0.0099698048 +2024-11-11 17:32:06,989 Train Loss: 0.0013848, Val Loss: 0.0013396 +2024-11-11 17:32:06,989 Epoch 808/2000 +2024-11-11 17:32:22,774 Current Learning Rate: 0.0099605735 +2024-11-11 17:32:22,774 Train Loss: 0.0013203, Val Loss: 0.0013689 +2024-11-11 17:32:22,774 Epoch 809/2000 +2024-11-11 17:32:39,129 Current Learning Rate: 0.0099501183 +2024-11-11 17:32:39,130 Train Loss: 0.0011904, Val Loss: 0.0014270 +2024-11-11 17:32:39,130 Epoch 810/2000 +2024-11-11 17:32:54,516 Current Learning Rate: 0.0099384417 +2024-11-11 17:32:54,516 Train Loss: 0.0012725, Val Loss: 0.0013571 +2024-11-11 17:32:54,516 Epoch 811/2000 +2024-11-11 17:33:10,881 Current Learning Rate: 0.0099255466 +2024-11-11 17:33:10,882 Train Loss: 0.0011830, Val Loss: 0.0013175 +2024-11-11 17:33:10,882 Epoch 812/2000 +2024-11-11 17:33:26,183 Current Learning Rate: 0.0099114363 +2024-11-11 17:33:26,184 Train Loss: 0.0011857, Val Loss: 0.0013092 +2024-11-11 17:33:26,184 Epoch 813/2000 +2024-11-11 17:33:41,410 Current Learning Rate: 0.0098961141 +2024-11-11 17:33:42,181 Train Loss: 0.0011688, Val Loss: 0.0012712 +2024-11-11 17:33:42,181 Epoch 814/2000 +2024-11-11 17:33:57,011 Current Learning Rate: 0.0098795838 +2024-11-11 17:33:57,012 Train Loss: 0.0013154, Val Loss: 0.0013275 +2024-11-11 17:33:57,012 Epoch 815/2000 +2024-11-11 17:34:12,243 Current Learning Rate: 0.0098618496 +2024-11-11 17:34:12,244 Train Loss: 0.0012388, Val Loss: 0.0013311 +2024-11-11 17:34:12,244 Epoch 816/2000 +2024-11-11 17:34:27,673 Current Learning Rate: 0.0098429158 +2024-11-11 17:34:27,674 Train Loss: 0.0010880, Val Loss: 0.0012854 +2024-11-11 17:34:27,674 Epoch 817/2000 +2024-11-11 17:34:43,558 Current Learning Rate: 0.0098227871 +2024-11-11 17:34:43,558 Train Loss: 0.0014446, Val Loss: 0.0013267 +2024-11-11 17:34:43,559 Epoch 818/2000 +2024-11-11 17:34:59,665 Current Learning Rate: 0.0098014684 +2024-11-11 17:34:59,665 Train Loss: 0.0012407, Val Loss: 0.0012922 +2024-11-11 17:34:59,666 Epoch 819/2000 +2024-11-11 17:35:15,079 Current Learning Rate: 0.0097789651 +2024-11-11 17:35:15,080 Train Loss: 0.0012259, Val Loss: 0.0012754 +2024-11-11 17:35:15,080 Epoch 820/2000 +2024-11-11 17:35:30,349 Current Learning Rate: 0.0097552826 +2024-11-11 17:35:31,163 Train Loss: 0.0011163, Val Loss: 0.0012624 +2024-11-11 17:35:31,163 Epoch 821/2000 +2024-11-11 17:35:45,653 Current Learning Rate: 0.0097304268 +2024-11-11 17:35:45,653 Train Loss: 0.0012854, Val Loss: 0.0014429 +2024-11-11 17:35:45,654 Epoch 822/2000 +2024-11-11 17:36:00,763 Current Learning Rate: 0.0097044038 +2024-11-11 17:36:00,764 Train Loss: 0.0012318, Val Loss: 0.0013586 +2024-11-11 17:36:00,764 Epoch 823/2000 +2024-11-11 17:36:16,369 Current Learning Rate: 0.0096772202 +2024-11-11 17:36:16,369 Train Loss: 0.0012693, Val Loss: 0.0013378 +2024-11-11 17:36:16,370 Epoch 824/2000 +2024-11-11 17:36:32,042 Current Learning Rate: 0.0096488824 +2024-11-11 17:36:32,042 Train Loss: 0.0012024, Val Loss: 0.0012881 +2024-11-11 17:36:32,042 Epoch 825/2000 +2024-11-11 17:36:48,104 Current Learning Rate: 0.0096193977 +2024-11-11 17:36:48,779 Train Loss: 0.0012591, Val Loss: 0.0012407 +2024-11-11 17:36:48,779 Epoch 826/2000 +2024-11-11 17:37:04,131 Current Learning Rate: 0.0095887731 +2024-11-11 17:37:04,839 Train Loss: 0.0010302, Val Loss: 0.0012064 +2024-11-11 17:37:04,839 Epoch 827/2000 +2024-11-11 17:37:19,939 Current Learning Rate: 0.0095570164 +2024-11-11 17:37:20,693 Train Loss: 0.0011161, Val Loss: 0.0011938 +2024-11-11 17:37:20,693 Epoch 828/2000 +2024-11-11 17:37:35,742 Current Learning Rate: 0.0095241353 +2024-11-11 17:37:35,743 Train Loss: 0.0012396, Val Loss: 0.0013392 +2024-11-11 17:37:35,743 Epoch 829/2000 +2024-11-11 17:37:51,809 Current Learning Rate: 0.0094901379 +2024-11-11 17:37:51,810 Train Loss: 0.0012579, Val Loss: 0.0012809 +2024-11-11 17:37:51,810 Epoch 830/2000 +2024-11-11 17:38:08,141 Current Learning Rate: 0.0094550326 +2024-11-11 17:38:08,141 Train Loss: 0.0010387, Val Loss: 0.0011989 +2024-11-11 17:38:08,142 Epoch 831/2000 +2024-11-11 17:38:24,454 Current Learning Rate: 0.0094188282 +2024-11-11 17:38:24,454 Train Loss: 0.0013556, Val Loss: 0.0013783 +2024-11-11 17:38:24,455 Epoch 832/2000 +2024-11-11 17:38:41,277 Current Learning Rate: 0.0093815334 +2024-11-11 17:38:41,277 Train Loss: 0.0012077, Val Loss: 0.0012947 +2024-11-11 17:38:41,277 Epoch 833/2000 +2024-11-11 17:38:57,796 Current Learning Rate: 0.0093431576 +2024-11-11 17:38:57,796 Train Loss: 0.0013527, Val Loss: 0.0012552 +2024-11-11 17:38:57,796 Epoch 834/2000 +2024-11-11 17:39:13,801 Current Learning Rate: 0.0093037101 +2024-11-11 17:39:13,802 Train Loss: 0.0011967, Val Loss: 0.0011970 +2024-11-11 17:39:13,802 Epoch 835/2000 +2024-11-11 17:39:30,820 Current Learning Rate: 0.0092632008 +2024-11-11 17:39:31,854 Train Loss: 0.0010825, Val Loss: 0.0011737 +2024-11-11 17:39:31,854 Epoch 836/2000 +2024-11-11 17:39:47,179 Current Learning Rate: 0.0092216396 +2024-11-11 17:39:48,045 Train Loss: 0.0010552, Val Loss: 0.0011468 +2024-11-11 17:39:48,046 Epoch 837/2000 +2024-11-11 17:40:03,981 Current Learning Rate: 0.0091790368 +2024-11-11 17:40:04,817 Train Loss: 0.0010035, Val Loss: 0.0011338 +2024-11-11 17:40:04,817 Epoch 838/2000 +2024-11-11 17:40:20,042 Current Learning Rate: 0.0091354029 +2024-11-11 17:40:20,043 Train Loss: 0.0010370, Val Loss: 0.0012440 +2024-11-11 17:40:20,044 Epoch 839/2000 +2024-11-11 17:40:36,161 Current Learning Rate: 0.0090907486 +2024-11-11 17:40:36,162 Train Loss: 0.0010335, Val Loss: 0.0011683 +2024-11-11 17:40:36,162 Epoch 840/2000 +2024-11-11 17:40:52,126 Current Learning Rate: 0.0090450850 +2024-11-11 17:40:52,127 Train Loss: 0.0010770, Val Loss: 0.0011526 +2024-11-11 17:40:52,127 Epoch 841/2000 +2024-11-11 17:41:07,702 Current Learning Rate: 0.0089984233 +2024-11-11 17:41:07,703 Train Loss: 0.0010399, Val Loss: 0.0011815 +2024-11-11 17:41:07,703 Epoch 842/2000 +2024-11-11 17:41:23,659 Current Learning Rate: 0.0089507751 +2024-11-11 17:41:23,659 Train Loss: 0.0011041, Val Loss: 0.0011996 +2024-11-11 17:41:23,659 Epoch 843/2000 +2024-11-11 17:41:39,361 Current Learning Rate: 0.0089021520 +2024-11-11 17:41:39,362 Train Loss: 0.0010959, Val Loss: 0.0011954 +2024-11-11 17:41:39,362 Epoch 844/2000 +2024-11-11 17:41:54,416 Current Learning Rate: 0.0088525662 +2024-11-11 17:41:54,417 Train Loss: 0.0010951, Val Loss: 0.0011798 +2024-11-11 17:41:54,417 Epoch 845/2000 +2024-11-11 17:42:09,721 Current Learning Rate: 0.0088020298 +2024-11-11 17:42:09,721 Train Loss: 0.0010424, Val Loss: 0.0012155 +2024-11-11 17:42:09,722 Epoch 846/2000 +2024-11-11 17:42:25,807 Current Learning Rate: 0.0087505553 +2024-11-11 17:42:25,808 Train Loss: 0.0009852, Val Loss: 0.0011684 +2024-11-11 17:42:25,808 Epoch 847/2000 +2024-11-11 17:42:41,697 Current Learning Rate: 0.0086981555 +2024-11-11 17:42:41,697 Train Loss: 0.0011122, Val Loss: 0.0011711 +2024-11-11 17:42:41,698 Epoch 848/2000 +2024-11-11 17:42:57,606 Current Learning Rate: 0.0086448431 +2024-11-11 17:42:57,607 Train Loss: 0.0009678, Val Loss: 0.0011346 +2024-11-11 17:42:57,607 Epoch 849/2000 +2024-11-11 17:43:13,178 Current Learning Rate: 0.0085906315 +2024-11-11 17:43:13,179 Train Loss: 0.0010650, Val Loss: 0.0011588 +2024-11-11 17:43:13,179 Epoch 850/2000 +2024-11-11 17:43:28,776 Current Learning Rate: 0.0085355339 +2024-11-11 17:43:28,777 Train Loss: 0.0011443, Val Loss: 0.0011940 +2024-11-11 17:43:28,777 Epoch 851/2000 +2024-11-11 17:43:44,447 Current Learning Rate: 0.0084795640 +2024-11-11 17:43:44,448 Train Loss: 0.0011059, Val Loss: 0.0011838 +2024-11-11 17:43:44,448 Epoch 852/2000 +2024-11-11 17:44:00,672 Current Learning Rate: 0.0084227355 +2024-11-11 17:44:00,672 Train Loss: 0.0010949, Val Loss: 0.0011581 +2024-11-11 17:44:00,673 Epoch 853/2000 +2024-11-11 17:44:16,009 Current Learning Rate: 0.0083650626 +2024-11-11 17:44:16,010 Train Loss: 0.0010914, Val Loss: 0.0011674 +2024-11-11 17:44:16,010 Epoch 854/2000 +2024-11-11 17:44:31,467 Current Learning Rate: 0.0083065593 +2024-11-11 17:44:31,468 Train Loss: 0.0010065, Val Loss: 0.0011374 +2024-11-11 17:44:31,468 Epoch 855/2000 +2024-11-11 17:44:47,646 Current Learning Rate: 0.0082472402 +2024-11-11 17:44:48,469 Train Loss: 0.0010591, Val Loss: 0.0011211 +2024-11-11 17:44:48,470 Epoch 856/2000 +2024-11-11 17:45:04,162 Current Learning Rate: 0.0081871199 +2024-11-11 17:45:04,163 Train Loss: 0.0011274, Val Loss: 0.0011416 +2024-11-11 17:45:04,163 Epoch 857/2000 +2024-11-11 17:45:20,103 Current Learning Rate: 0.0081262133 +2024-11-11 17:45:20,104 Train Loss: 0.0010855, Val Loss: 0.0011784 +2024-11-11 17:45:20,104 Epoch 858/2000 +2024-11-11 17:45:35,618 Current Learning Rate: 0.0080645353 +2024-11-11 17:45:35,618 Train Loss: 0.0010290, Val Loss: 0.0012133 +2024-11-11 17:45:35,619 Epoch 859/2000 +2024-11-11 17:45:51,041 Current Learning Rate: 0.0080021011 +2024-11-11 17:45:51,041 Train Loss: 0.0012244, Val Loss: 0.0013354 +2024-11-11 17:45:51,042 Epoch 860/2000 +2024-11-11 17:46:07,061 Current Learning Rate: 0.0079389263 +2024-11-11 17:46:07,062 Train Loss: 0.0010804, Val Loss: 0.0011281 +2024-11-11 17:46:07,062 Epoch 861/2000 +2024-11-11 17:46:23,099 Current Learning Rate: 0.0078750263 +2024-11-11 17:46:24,133 Train Loss: 0.0009456, Val Loss: 0.0011056 +2024-11-11 17:46:24,134 Epoch 862/2000 +2024-11-11 17:46:39,370 Current Learning Rate: 0.0078104169 +2024-11-11 17:46:39,371 Train Loss: 0.0009509, Val Loss: 0.0011377 +2024-11-11 17:46:39,371 Epoch 863/2000 +2024-11-11 17:46:54,790 Current Learning Rate: 0.0077451141 +2024-11-11 17:46:55,573 Train Loss: 0.0009931, Val Loss: 0.0010930 +2024-11-11 17:46:55,573 Epoch 864/2000 +2024-11-11 17:47:10,014 Current Learning Rate: 0.0076791340 +2024-11-11 17:47:10,015 Train Loss: 0.0010525, Val Loss: 0.0011675 +2024-11-11 17:47:10,015 Epoch 865/2000 +2024-11-11 17:47:25,041 Current Learning Rate: 0.0076124928 +2024-11-11 17:47:25,042 Train Loss: 0.0011514, Val Loss: 0.0012510 +2024-11-11 17:47:25,042 Epoch 866/2000 +2024-11-11 17:47:39,866 Current Learning Rate: 0.0075452071 +2024-11-11 17:47:39,866 Train Loss: 0.0011009, Val Loss: 0.0012376 +2024-11-11 17:47:39,867 Epoch 867/2000 +2024-11-11 17:47:55,423 Current Learning Rate: 0.0074772933 +2024-11-11 17:47:55,424 Train Loss: 0.0010050, Val Loss: 0.0011109 +2024-11-11 17:47:55,424 Epoch 868/2000 +2024-11-11 17:48:10,209 Current Learning Rate: 0.0074087684 +2024-11-11 17:48:10,209 Train Loss: 0.0010858, Val Loss: 0.0011313 +2024-11-11 17:48:10,209 Epoch 869/2000 +2024-11-11 17:48:25,503 Current Learning Rate: 0.0073396491 +2024-11-11 17:48:26,213 Train Loss: 0.0009453, Val Loss: 0.0010767 +2024-11-11 17:48:26,213 Epoch 870/2000 +2024-11-11 17:48:41,576 Current Learning Rate: 0.0072699525 +2024-11-11 17:48:41,577 Train Loss: 0.0009463, Val Loss: 0.0011096 +2024-11-11 17:48:41,577 Epoch 871/2000 +2024-11-11 17:48:56,868 Current Learning Rate: 0.0071996958 +2024-11-11 17:48:57,665 Train Loss: 0.0008880, Val Loss: 0.0010729 +2024-11-11 17:48:57,665 Epoch 872/2000 +2024-11-11 17:49:12,267 Current Learning Rate: 0.0071288965 +2024-11-11 17:49:13,042 Train Loss: 0.0009114, Val Loss: 0.0010709 +2024-11-11 17:49:13,043 Epoch 873/2000 +2024-11-11 17:49:27,643 Current Learning Rate: 0.0070575718 +2024-11-11 17:49:28,404 Train Loss: 0.0009138, Val Loss: 0.0010572 +2024-11-11 17:49:28,405 Epoch 874/2000 +2024-11-11 17:49:43,480 Current Learning Rate: 0.0069857395 +2024-11-11 17:49:43,481 Train Loss: 0.0009576, Val Loss: 0.0010831 +2024-11-11 17:49:43,481 Epoch 875/2000 +2024-11-11 17:49:58,731 Current Learning Rate: 0.0069134172 +2024-11-11 17:49:58,731 Train Loss: 0.0011312, Val Loss: 0.0011140 +2024-11-11 17:49:58,732 Epoch 876/2000 +2024-11-11 17:50:14,029 Current Learning Rate: 0.0068406228 +2024-11-11 17:50:14,029 Train Loss: 0.0009896, Val Loss: 0.0011163 +2024-11-11 17:50:14,029 Epoch 877/2000 +2024-11-11 17:50:31,767 Current Learning Rate: 0.0067673742 +2024-11-11 17:50:31,767 Train Loss: 0.0009746, Val Loss: 0.0010994 +2024-11-11 17:50:31,767 Epoch 878/2000 +2024-11-11 17:50:47,210 Current Learning Rate: 0.0066936896 +2024-11-11 17:50:47,210 Train Loss: 0.0010055, Val Loss: 0.0011662 +2024-11-11 17:50:47,211 Epoch 879/2000 +2024-11-11 17:51:02,431 Current Learning Rate: 0.0066195871 +2024-11-11 17:51:02,432 Train Loss: 0.0010542, Val Loss: 0.0011160 +2024-11-11 17:51:02,432 Epoch 880/2000 +2024-11-11 17:51:18,395 Current Learning Rate: 0.0065450850 +2024-11-11 17:51:18,395 Train Loss: 0.0009923, Val Loss: 0.0011051 +2024-11-11 17:51:18,395 Epoch 881/2000 +2024-11-11 17:51:34,914 Current Learning Rate: 0.0064702016 +2024-11-11 17:51:34,914 Train Loss: 0.0008751, Val Loss: 0.0010801 +2024-11-11 17:51:34,914 Epoch 882/2000 +2024-11-11 17:51:50,837 Current Learning Rate: 0.0063949555 +2024-11-11 17:51:50,838 Train Loss: 0.0009270, Val Loss: 0.0010854 +2024-11-11 17:51:50,838 Epoch 883/2000 +2024-11-11 17:52:06,233 Current Learning Rate: 0.0063193652 +2024-11-11 17:52:06,234 Train Loss: 0.0009508, Val Loss: 0.0011039 +2024-11-11 17:52:06,234 Epoch 884/2000 +2024-11-11 17:52:21,901 Current Learning Rate: 0.0062434494 +2024-11-11 17:52:21,901 Train Loss: 0.0010031, Val Loss: 0.0010686 +2024-11-11 17:52:21,902 Epoch 885/2000 +2024-11-11 17:52:37,634 Current Learning Rate: 0.0061672268 +2024-11-11 17:52:37,635 Train Loss: 0.0010492, Val Loss: 0.0010652 +2024-11-11 17:52:37,635 Epoch 886/2000 +2024-11-11 17:52:53,867 Current Learning Rate: 0.0060907162 +2024-11-11 17:52:54,606 Train Loss: 0.0009808, Val Loss: 0.0010377 +2024-11-11 17:52:54,607 Epoch 887/2000 +2024-11-11 17:53:10,104 Current Learning Rate: 0.0060139365 +2024-11-11 17:53:10,911 Train Loss: 0.0009252, Val Loss: 0.0010329 +2024-11-11 17:53:10,911 Epoch 888/2000 +2024-11-11 17:53:25,175 Current Learning Rate: 0.0059369066 +2024-11-11 17:53:25,176 Train Loss: 0.0009400, Val Loss: 0.0010599 +2024-11-11 17:53:25,176 Epoch 889/2000 +2024-11-11 17:53:41,353 Current Learning Rate: 0.0058596455 +2024-11-11 17:53:41,353 Train Loss: 0.0010006, Val Loss: 0.0011004 +2024-11-11 17:53:41,354 Epoch 890/2000 +2024-11-11 17:53:57,976 Current Learning Rate: 0.0057821723 +2024-11-11 17:53:57,976 Train Loss: 0.0008872, Val Loss: 0.0010590 +2024-11-11 17:53:57,976 Epoch 891/2000 +2024-11-11 17:54:13,476 Current Learning Rate: 0.0057045062 +2024-11-11 17:54:14,595 Train Loss: 0.0009031, Val Loss: 0.0010181 +2024-11-11 17:54:14,596 Epoch 892/2000 +2024-11-11 17:54:30,074 Current Learning Rate: 0.0056266662 +2024-11-11 17:54:30,818 Train Loss: 0.0008798, Val Loss: 0.0010091 +2024-11-11 17:54:30,818 Epoch 893/2000 +2024-11-11 17:54:45,569 Current Learning Rate: 0.0055486716 +2024-11-11 17:54:45,570 Train Loss: 0.0008149, Val Loss: 0.0010137 +2024-11-11 17:54:45,570 Epoch 894/2000 +2024-11-11 17:55:01,152 Current Learning Rate: 0.0054705416 +2024-11-11 17:55:01,152 Train Loss: 0.0009229, Val Loss: 0.0010990 +2024-11-11 17:55:01,152 Epoch 895/2000 +2024-11-11 17:55:16,684 Current Learning Rate: 0.0053922955 +2024-11-11 17:55:16,685 Train Loss: 0.0008864, Val Loss: 0.0010769 +2024-11-11 17:55:16,685 Epoch 896/2000 +2024-11-11 17:55:32,393 Current Learning Rate: 0.0053139526 +2024-11-11 17:55:32,394 Train Loss: 0.0008386, Val Loss: 0.0010794 +2024-11-11 17:55:32,394 Epoch 897/2000 +2024-11-11 17:55:48,947 Current Learning Rate: 0.0052355323 +2024-11-11 17:55:48,948 Train Loss: 0.0009225, Val Loss: 0.0010566 +2024-11-11 17:55:48,948 Epoch 898/2000 +2024-11-11 17:56:05,724 Current Learning Rate: 0.0051570538 +2024-11-11 17:56:05,725 Train Loss: 0.0009125, Val Loss: 0.0010158 +2024-11-11 17:56:05,725 Epoch 899/2000 +2024-11-11 17:56:22,070 Current Learning Rate: 0.0050785366 +2024-11-11 17:56:22,070 Train Loss: 0.0010058, Val Loss: 0.0010308 +2024-11-11 17:56:22,071 Epoch 900/2000 +2024-11-11 17:56:38,467 Current Learning Rate: 0.0050000000 +2024-11-11 17:56:38,467 Train Loss: 0.0010355, Val Loss: 0.0010150 +2024-11-11 17:56:38,468 Epoch 901/2000 +2024-11-11 17:56:54,282 Current Learning Rate: 0.0049214634 +2024-11-11 17:56:54,282 Train Loss: 0.0009709, Val Loss: 0.0010411 +2024-11-11 17:56:54,282 Epoch 902/2000 +2024-11-11 17:57:10,396 Current Learning Rate: 0.0048429462 +2024-11-11 17:57:11,133 Train Loss: 0.0008987, Val Loss: 0.0009861 +2024-11-11 17:57:11,133 Epoch 903/2000 +2024-11-11 17:57:26,603 Current Learning Rate: 0.0047644677 +2024-11-11 17:57:27,392 Train Loss: 0.0008029, Val Loss: 0.0009636 +2024-11-11 17:57:27,393 Epoch 904/2000 +2024-11-11 17:57:42,786 Current Learning Rate: 0.0046860474 +2024-11-11 17:57:42,786 Train Loss: 0.0009061, Val Loss: 0.0009678 +2024-11-11 17:57:42,787 Epoch 905/2000 +2024-11-11 17:57:58,751 Current Learning Rate: 0.0046077045 +2024-11-11 17:57:58,751 Train Loss: 0.0008727, Val Loss: 0.0009731 +2024-11-11 17:57:58,751 Epoch 906/2000 +2024-11-11 17:58:14,351 Current Learning Rate: 0.0045294584 +2024-11-11 17:58:14,352 Train Loss: 0.0008415, Val Loss: 0.0010212 +2024-11-11 17:58:14,352 Epoch 907/2000 +2024-11-11 17:58:29,628 Current Learning Rate: 0.0044513284 +2024-11-11 17:58:29,628 Train Loss: 0.0008876, Val Loss: 0.0009804 +2024-11-11 17:58:29,628 Epoch 908/2000 +2024-11-11 17:58:45,530 Current Learning Rate: 0.0043733338 +2024-11-11 17:58:45,531 Train Loss: 0.0009508, Val Loss: 0.0009787 +2024-11-11 17:58:45,531 Epoch 909/2000 +2024-11-11 17:59:01,382 Current Learning Rate: 0.0042954938 +2024-11-11 17:59:04,223 Train Loss: 0.0009103, Val Loss: 0.0009574 +2024-11-11 17:59:04,223 Epoch 910/2000 +2024-11-11 17:59:19,515 Current Learning Rate: 0.0042178277 +2024-11-11 17:59:19,516 Train Loss: 0.0008181, Val Loss: 0.0009654 +2024-11-11 17:59:19,516 Epoch 911/2000 +2024-11-11 17:59:35,741 Current Learning Rate: 0.0041403545 +2024-11-11 17:59:35,741 Train Loss: 0.0009086, Val Loss: 0.0009607 +2024-11-11 17:59:35,762 Epoch 912/2000 +2024-11-11 17:59:50,502 Current Learning Rate: 0.0040630934 +2024-11-11 17:59:51,211 Train Loss: 0.0008478, Val Loss: 0.0009561 +2024-11-11 17:59:51,211 Epoch 913/2000 +2024-11-11 18:00:06,437 Current Learning Rate: 0.0039860635 +2024-11-11 18:00:06,438 Train Loss: 0.0008787, Val Loss: 0.0009568 +2024-11-11 18:00:06,438 Epoch 914/2000 +2024-11-11 18:00:22,238 Current Learning Rate: 0.0039092838 +2024-11-11 18:00:23,027 Train Loss: 0.0008378, Val Loss: 0.0009512 +2024-11-11 18:00:23,027 Epoch 915/2000 +2024-11-11 18:00:39,011 Current Learning Rate: 0.0038327732 +2024-11-11 18:00:39,882 Train Loss: 0.0008433, Val Loss: 0.0009452 +2024-11-11 18:00:39,882 Epoch 916/2000 +2024-11-11 18:00:55,021 Current Learning Rate: 0.0037565506 +2024-11-11 18:00:55,869 Train Loss: 0.0008674, Val Loss: 0.0009419 +2024-11-11 18:00:55,870 Epoch 917/2000 +2024-11-11 18:01:10,901 Current Learning Rate: 0.0036806348 +2024-11-11 18:01:12,011 Train Loss: 0.0007995, Val Loss: 0.0009365 +2024-11-11 18:01:12,012 Epoch 918/2000 +2024-11-11 18:01:28,396 Current Learning Rate: 0.0036050445 +2024-11-11 18:01:29,410 Train Loss: 0.0008046, Val Loss: 0.0009340 +2024-11-11 18:01:29,410 Epoch 919/2000 +2024-11-11 18:01:45,619 Current Learning Rate: 0.0035297984 +2024-11-11 18:01:45,619 Train Loss: 0.0009378, Val Loss: 0.0009402 +2024-11-11 18:01:45,620 Epoch 920/2000 +2024-11-11 18:02:01,212 Current Learning Rate: 0.0034549150 +2024-11-11 18:02:01,213 Train Loss: 0.0008981, Val Loss: 0.0009435 +2024-11-11 18:02:01,213 Epoch 921/2000 +2024-11-11 18:02:18,387 Current Learning Rate: 0.0033804129 +2024-11-11 18:02:18,388 Train Loss: 0.0009236, Val Loss: 0.0009714 +2024-11-11 18:02:18,388 Epoch 922/2000 +2024-11-11 18:02:34,038 Current Learning Rate: 0.0033063104 +2024-11-11 18:02:34,039 Train Loss: 0.0007968, Val Loss: 0.0010329 +2024-11-11 18:02:34,040 Epoch 923/2000 +2024-11-11 18:02:49,900 Current Learning Rate: 0.0032326258 +2024-11-11 18:02:49,901 Train Loss: 0.0008474, Val Loss: 0.0009551 +2024-11-11 18:02:49,901 Epoch 924/2000 +2024-11-11 18:03:05,886 Current Learning Rate: 0.0031593772 +2024-11-11 18:03:05,887 Train Loss: 0.0008605, Val Loss: 0.0009423 +2024-11-11 18:03:05,887 Epoch 925/2000 +2024-11-11 18:03:22,163 Current Learning Rate: 0.0030865828 +2024-11-11 18:03:22,164 Train Loss: 0.0007879, Val Loss: 0.0009484 +2024-11-11 18:03:22,164 Epoch 926/2000 +2024-11-11 18:03:37,523 Current Learning Rate: 0.0030142605 +2024-11-11 18:03:37,523 Train Loss: 0.0007850, Val Loss: 0.0009422 +2024-11-11 18:03:37,523 Epoch 927/2000 +2024-11-11 18:03:53,902 Current Learning Rate: 0.0029424282 +2024-11-11 18:03:53,902 Train Loss: 0.0008499, Val Loss: 0.0009353 +2024-11-11 18:03:53,903 Epoch 928/2000 +2024-11-11 18:04:09,285 Current Learning Rate: 0.0028711035 +2024-11-11 18:04:10,081 Train Loss: 0.0007770, Val Loss: 0.0009291 +2024-11-11 18:04:10,082 Epoch 929/2000 +2024-11-11 18:04:25,290 Current Learning Rate: 0.0028003042 +2024-11-11 18:04:26,011 Train Loss: 0.0007953, Val Loss: 0.0009265 +2024-11-11 18:04:26,011 Epoch 930/2000 +2024-11-11 18:04:41,158 Current Learning Rate: 0.0027300475 +2024-11-11 18:04:41,159 Train Loss: 0.0008035, Val Loss: 0.0009290 +2024-11-11 18:04:41,159 Epoch 931/2000 +2024-11-11 18:04:56,901 Current Learning Rate: 0.0026603509 +2024-11-11 18:04:57,656 Train Loss: 0.0008269, Val Loss: 0.0009218 +2024-11-11 18:04:57,656 Epoch 932/2000 +2024-11-11 18:05:12,502 Current Learning Rate: 0.0025912316 +2024-11-11 18:05:13,290 Train Loss: 0.0008099, Val Loss: 0.0009216 +2024-11-11 18:05:13,290 Epoch 933/2000 +2024-11-11 18:05:27,932 Current Learning Rate: 0.0025227067 +2024-11-11 18:05:28,759 Train Loss: 0.0007953, Val Loss: 0.0009200 +2024-11-11 18:05:28,759 Epoch 934/2000 +2024-11-11 18:05:44,188 Current Learning Rate: 0.0024547929 +2024-11-11 18:05:44,189 Train Loss: 0.0008011, Val Loss: 0.0009216 +2024-11-11 18:05:44,189 Epoch 935/2000 +2024-11-11 18:06:00,530 Current Learning Rate: 0.0023875072 +2024-11-11 18:06:01,545 Train Loss: 0.0007263, Val Loss: 0.0009178 +2024-11-11 18:06:01,545 Epoch 936/2000 +2024-11-11 18:06:16,635 Current Learning Rate: 0.0023208660 +2024-11-11 18:06:17,452 Train Loss: 0.0007329, Val Loss: 0.0009140 +2024-11-11 18:06:17,452 Epoch 937/2000 +2024-11-11 18:06:32,328 Current Learning Rate: 0.0022548859 +2024-11-11 18:06:33,096 Train Loss: 0.0007991, Val Loss: 0.0009119 +2024-11-11 18:06:33,096 Epoch 938/2000 +2024-11-11 18:06:49,252 Current Learning Rate: 0.0021895831 +2024-11-11 18:06:49,985 Train Loss: 0.0007170, Val Loss: 0.0009097 +2024-11-11 18:06:49,986 Epoch 939/2000 +2024-11-11 18:07:04,742 Current Learning Rate: 0.0021249737 +2024-11-11 18:07:04,743 Train Loss: 0.0008002, Val Loss: 0.0009106 +2024-11-11 18:07:04,743 Epoch 940/2000 +2024-11-11 18:07:20,276 Current Learning Rate: 0.0020610737 +2024-11-11 18:07:21,109 Train Loss: 0.0007906, Val Loss: 0.0009069 +2024-11-11 18:07:21,109 Epoch 941/2000 +2024-11-11 18:07:36,298 Current Learning Rate: 0.0019978989 +2024-11-11 18:07:36,299 Train Loss: 0.0008600, Val Loss: 0.0009077 +2024-11-11 18:07:36,300 Epoch 942/2000 +2024-11-11 18:07:51,999 Current Learning Rate: 0.0019354647 +2024-11-11 18:07:52,859 Train Loss: 0.0008623, Val Loss: 0.0009002 +2024-11-11 18:07:52,859 Epoch 943/2000 +2024-11-11 18:08:08,792 Current Learning Rate: 0.0018737867 +2024-11-11 18:08:09,871 Train Loss: 0.0007317, Val Loss: 0.0008945 +2024-11-11 18:08:09,871 Epoch 944/2000 +2024-11-11 18:08:25,556 Current Learning Rate: 0.0018128801 +2024-11-11 18:08:26,498 Train Loss: 0.0007197, Val Loss: 0.0008931 +2024-11-11 18:08:26,499 Epoch 945/2000 +2024-11-11 18:08:42,506 Current Learning Rate: 0.0017527598 +2024-11-11 18:08:43,408 Train Loss: 0.0008533, Val Loss: 0.0008858 +2024-11-11 18:08:43,408 Epoch 946/2000 +2024-11-11 18:08:59,082 Current Learning Rate: 0.0016934407 +2024-11-11 18:08:59,864 Train Loss: 0.0007981, Val Loss: 0.0008846 +2024-11-11 18:08:59,864 Epoch 947/2000 +2024-11-11 18:09:14,839 Current Learning Rate: 0.0016349374 +2024-11-11 18:09:15,885 Train Loss: 0.0007385, Val Loss: 0.0008841 +2024-11-11 18:09:15,886 Epoch 948/2000 +2024-11-11 18:09:32,184 Current Learning Rate: 0.0015772645 +2024-11-11 18:09:33,217 Train Loss: 0.0007402, Val Loss: 0.0008811 +2024-11-11 18:09:33,217 Epoch 949/2000 +2024-11-11 18:09:48,867 Current Learning Rate: 0.0015204360 +2024-11-11 18:09:49,828 Train Loss: 0.0007388, Val Loss: 0.0008791 +2024-11-11 18:09:49,828 Epoch 950/2000 +2024-11-11 18:10:05,394 Current Learning Rate: 0.0014644661 +2024-11-11 18:10:06,428 Train Loss: 0.0007365, Val Loss: 0.0008788 +2024-11-11 18:10:06,428 Epoch 951/2000 +2024-11-11 18:10:22,291 Current Learning Rate: 0.0014093685 +2024-11-11 18:10:23,337 Train Loss: 0.0008304, Val Loss: 0.0008780 +2024-11-11 18:10:23,337 Epoch 952/2000 +2024-11-11 18:10:39,622 Current Learning Rate: 0.0013551569 +2024-11-11 18:10:40,525 Train Loss: 0.0007347, Val Loss: 0.0008769 +2024-11-11 18:10:40,526 Epoch 953/2000 +2024-11-11 18:10:56,240 Current Learning Rate: 0.0013018445 +2024-11-11 18:10:57,262 Train Loss: 0.0007638, Val Loss: 0.0008759 +2024-11-11 18:10:57,262 Epoch 954/2000 +2024-11-11 18:11:13,149 Current Learning Rate: 0.0012494447 +2024-11-11 18:11:13,942 Train Loss: 0.0006930, Val Loss: 0.0008750 +2024-11-11 18:11:13,943 Epoch 955/2000 +2024-11-11 18:11:29,040 Current Learning Rate: 0.0011979702 +2024-11-11 18:11:30,024 Train Loss: 0.0007504, Val Loss: 0.0008742 +2024-11-11 18:11:30,024 Epoch 956/2000 +2024-11-11 18:11:46,069 Current Learning Rate: 0.0011474338 +2024-11-11 18:11:46,819 Train Loss: 0.0007186, Val Loss: 0.0008726 +2024-11-11 18:11:46,820 Epoch 957/2000 +2024-11-11 18:12:02,281 Current Learning Rate: 0.0010978480 +2024-11-11 18:12:04,771 Train Loss: 0.0008264, Val Loss: 0.0008722 +2024-11-11 18:12:04,771 Epoch 958/2000 +2024-11-11 18:12:20,546 Current Learning Rate: 0.0010492249 +2024-11-11 18:12:21,276 Train Loss: 0.0008291, Val Loss: 0.0008717 +2024-11-11 18:12:21,276 Epoch 959/2000 +2024-11-11 18:12:36,206 Current Learning Rate: 0.0010015767 +2024-11-11 18:12:37,073 Train Loss: 0.0007533, Val Loss: 0.0008700 +2024-11-11 18:12:37,073 Epoch 960/2000 +2024-11-11 18:12:51,389 Current Learning Rate: 0.0009549150 +2024-11-11 18:12:51,389 Train Loss: 0.0008044, Val Loss: 0.0008701 +2024-11-11 18:12:51,390 Epoch 961/2000 +2024-11-11 18:13:06,881 Current Learning Rate: 0.0009092514 +2024-11-11 18:13:06,881 Train Loss: 0.0009497, Val Loss: 0.0008702 +2024-11-11 18:13:06,882 Epoch 962/2000 +2024-11-11 18:13:22,443 Current Learning Rate: 0.0008645971 +2024-11-11 18:13:23,260 Train Loss: 0.0007587, Val Loss: 0.0008681 +2024-11-11 18:13:23,261 Epoch 963/2000 +2024-11-11 18:13:38,024 Current Learning Rate: 0.0008209632 +2024-11-11 18:13:38,840 Train Loss: 0.0007858, Val Loss: 0.0008676 +2024-11-11 18:13:38,840 Epoch 964/2000 +2024-11-11 18:13:53,426 Current Learning Rate: 0.0007783604 +2024-11-11 18:13:54,195 Train Loss: 0.0007155, Val Loss: 0.0008664 +2024-11-11 18:13:54,195 Epoch 965/2000 +2024-11-11 18:14:09,379 Current Learning Rate: 0.0007367992 +2024-11-11 18:14:10,408 Train Loss: 0.0008063, Val Loss: 0.0008662 +2024-11-11 18:14:10,409 Epoch 966/2000 +2024-11-11 18:14:26,624 Current Learning Rate: 0.0006962899 +2024-11-11 18:14:27,655 Train Loss: 0.0007275, Val Loss: 0.0008653 +2024-11-11 18:14:27,655 Epoch 967/2000 +2024-11-11 18:14:43,990 Current Learning Rate: 0.0006568424 +2024-11-11 18:14:45,042 Train Loss: 0.0007194, Val Loss: 0.0008642 +2024-11-11 18:14:45,043 Epoch 968/2000 +2024-11-11 18:15:00,679 Current Learning Rate: 0.0006184666 +2024-11-11 18:15:01,587 Train Loss: 0.0006976, Val Loss: 0.0008634 +2024-11-11 18:15:01,588 Epoch 969/2000 +2024-11-11 18:15:16,646 Current Learning Rate: 0.0005811718 +2024-11-11 18:15:17,453 Train Loss: 0.0007452, Val Loss: 0.0008625 +2024-11-11 18:15:17,454 Epoch 970/2000 +2024-11-11 18:15:31,720 Current Learning Rate: 0.0005449674 +2024-11-11 18:15:32,505 Train Loss: 0.0007519, Val Loss: 0.0008623 +2024-11-11 18:15:32,505 Epoch 971/2000 +2024-11-11 18:15:47,098 Current Learning Rate: 0.0005098621 +2024-11-11 18:15:47,849 Train Loss: 0.0006804, Val Loss: 0.0008617 +2024-11-11 18:15:47,850 Epoch 972/2000 +2024-11-11 18:16:02,872 Current Learning Rate: 0.0004758647 +2024-11-11 18:16:05,217 Train Loss: 0.0007153, Val Loss: 0.0008612 +2024-11-11 18:16:05,217 Epoch 973/2000 +2024-11-11 18:16:19,452 Current Learning Rate: 0.0004429836 +2024-11-11 18:16:20,279 Train Loss: 0.0006899, Val Loss: 0.0008603 +2024-11-11 18:16:20,280 Epoch 974/2000 +2024-11-11 18:16:34,776 Current Learning Rate: 0.0004112269 +2024-11-11 18:16:35,585 Train Loss: 0.0008203, Val Loss: 0.0008598 +2024-11-11 18:16:35,585 Epoch 975/2000 +2024-11-11 18:16:50,092 Current Learning Rate: 0.0003806023 +2024-11-11 18:16:50,869 Train Loss: 0.0007900, Val Loss: 0.0008593 +2024-11-11 18:16:50,870 Epoch 976/2000 +2024-11-11 18:17:05,491 Current Learning Rate: 0.0003511176 +2024-11-11 18:17:06,261 Train Loss: 0.0007146, Val Loss: 0.0008588 +2024-11-11 18:17:06,261 Epoch 977/2000 +2024-11-11 18:17:20,973 Current Learning Rate: 0.0003227798 +2024-11-11 18:17:21,749 Train Loss: 0.0007156, Val Loss: 0.0008587 +2024-11-11 18:17:21,749 Epoch 978/2000 +2024-11-11 18:17:36,300 Current Learning Rate: 0.0002955962 +2024-11-11 18:17:36,301 Train Loss: 0.0008374, Val Loss: 0.0008588 +2024-11-11 18:17:36,301 Epoch 979/2000 +2024-11-11 18:17:51,839 Current Learning Rate: 0.0002695732 +2024-11-11 18:17:52,660 Train Loss: 0.0008155, Val Loss: 0.0008584 +2024-11-11 18:17:52,661 Epoch 980/2000 +2024-11-11 18:18:07,838 Current Learning Rate: 0.0002447174 +2024-11-11 18:18:08,876 Train Loss: 0.0007135, Val Loss: 0.0008578 +2024-11-11 18:18:08,877 Epoch 981/2000 +2024-11-11 18:18:25,298 Current Learning Rate: 0.0002210349 +2024-11-11 18:18:26,311 Train Loss: 0.0007606, Val Loss: 0.0008577 +2024-11-11 18:18:26,311 Epoch 982/2000 +2024-11-11 18:18:42,014 Current Learning Rate: 0.0001985316 +2024-11-11 18:18:43,048 Train Loss: 0.0007418, Val Loss: 0.0008576 +2024-11-11 18:18:43,048 Epoch 983/2000 +2024-11-11 18:18:58,550 Current Learning Rate: 0.0001772129 +2024-11-11 18:18:58,551 Train Loss: 0.0007499, Val Loss: 0.0008576 +2024-11-11 18:18:58,551 Epoch 984/2000 +2024-11-11 18:19:14,874 Current Learning Rate: 0.0001570842 +2024-11-11 18:19:15,859 Train Loss: 0.0007082, Val Loss: 0.0008571 +2024-11-11 18:19:15,860 Epoch 985/2000 +2024-11-11 18:19:31,580 Current Learning Rate: 0.0001381504 +2024-11-11 18:19:32,311 Train Loss: 0.0007549, Val Loss: 0.0008567 +2024-11-11 18:19:32,311 Epoch 986/2000 +2024-11-11 18:19:46,666 Current Learning Rate: 0.0001204162 +2024-11-11 18:19:47,473 Train Loss: 0.0007097, Val Loss: 0.0008566 +2024-11-11 18:19:47,473 Epoch 987/2000 +2024-11-11 18:20:02,133 Current Learning Rate: 0.0001038859 +2024-11-11 18:20:04,571 Train Loss: 0.0007165, Val Loss: 0.0008563 +2024-11-11 18:20:04,572 Epoch 988/2000 +2024-11-11 18:20:18,843 Current Learning Rate: 0.0000885637 +2024-11-11 18:20:18,843 Train Loss: 0.0007841, Val Loss: 0.0008564 +2024-11-11 18:20:18,843 Epoch 989/2000 +2024-11-11 18:20:34,100 Current Learning Rate: 0.0000744534 +2024-11-11 18:20:34,857 Train Loss: 0.0007812, Val Loss: 0.0008561 +2024-11-11 18:20:34,857 Epoch 990/2000 +2024-11-11 18:20:50,019 Current Learning Rate: 0.0000615583 +2024-11-11 18:20:50,019 Train Loss: 0.0007407, Val Loss: 0.0008561 +2024-11-11 18:20:50,019 Epoch 991/2000 +2024-11-11 18:21:05,671 Current Learning Rate: 0.0000498817 +2024-11-11 18:21:06,509 Train Loss: 0.0007416, Val Loss: 0.0008561 +2024-11-11 18:21:06,509 Epoch 992/2000 +2024-11-11 18:21:21,475 Current Learning Rate: 0.0000394265 +2024-11-11 18:21:22,260 Train Loss: 0.0007115, Val Loss: 0.0008559 +2024-11-11 18:21:22,261 Epoch 993/2000 +2024-11-11 18:21:36,963 Current Learning Rate: 0.0000301952 +2024-11-11 18:21:37,822 Train Loss: 0.0007434, Val Loss: 0.0008559 +2024-11-11 18:21:37,822 Epoch 994/2000 +2024-11-11 18:21:52,460 Current Learning Rate: 0.0000221902 +2024-11-11 18:21:52,460 Train Loss: 0.0007455, Val Loss: 0.0008559 +2024-11-11 18:21:52,461 Epoch 995/2000 +2024-11-11 18:22:07,811 Current Learning Rate: 0.0000154133 +2024-11-11 18:22:08,572 Train Loss: 0.0008028, Val Loss: 0.0008559 +2024-11-11 18:22:08,573 Epoch 996/2000 +2024-11-11 18:22:23,237 Current Learning Rate: 0.0000098664 +2024-11-11 18:22:24,004 Train Loss: 0.0008328, Val Loss: 0.0008558 +2024-11-11 18:22:24,004 Epoch 997/2000 +2024-11-11 18:22:39,122 Current Learning Rate: 0.0000055506 +2024-11-11 18:22:39,124 Train Loss: 0.0008706, Val Loss: 0.0008559 +2024-11-11 18:22:39,124 Epoch 998/2000 +2024-11-11 18:22:55,385 Current Learning Rate: 0.0000024672 +2024-11-11 18:22:55,386 Train Loss: 0.0008158, Val Loss: 0.0008558 +2024-11-11 18:22:55,386 Epoch 999/2000 +2024-11-11 18:23:11,204 Current Learning Rate: 0.0000006168 +2024-11-11 18:23:11,941 Train Loss: 0.0007491, Val Loss: 0.0008558 +2024-11-11 18:23:11,941 Epoch 1000/2000 +2024-11-11 18:23:26,214 Current Learning Rate: 0.0000000000 +2024-11-11 18:23:26,991 Train Loss: 0.0007738, Val Loss: 0.0008558 +2024-11-11 18:23:26,992 Epoch 1001/2000 +2024-11-11 18:23:41,600 Current Learning Rate: 0.0000006168 +2024-11-11 18:23:41,601 Train Loss: 0.0007041, Val Loss: 0.0008558 +2024-11-11 18:23:41,601 Epoch 1002/2000 +2024-11-11 18:23:57,062 Current Learning Rate: 0.0000024672 +2024-11-11 18:23:57,062 Train Loss: 0.0007447, Val Loss: 0.0008558 +2024-11-11 18:23:57,062 Epoch 1003/2000 +2024-11-11 18:24:12,435 Current Learning Rate: 0.0000055506 +2024-11-11 18:24:13,188 Train Loss: 0.0007138, Val Loss: 0.0008558 +2024-11-11 18:24:13,189 Epoch 1004/2000 +2024-11-11 18:24:27,879 Current Learning Rate: 0.0000098664 +2024-11-11 18:24:28,752 Train Loss: 0.0007444, Val Loss: 0.0008558 +2024-11-11 18:24:28,752 Epoch 1005/2000 +2024-11-11 18:24:44,092 Current Learning Rate: 0.0000154133 +2024-11-11 18:24:44,824 Train Loss: 0.0007624, Val Loss: 0.0008557 +2024-11-11 18:24:44,824 Epoch 1006/2000 +2024-11-11 18:25:00,032 Current Learning Rate: 0.0000221902 +2024-11-11 18:25:00,033 Train Loss: 0.0007101, Val Loss: 0.0008558 +2024-11-11 18:25:00,033 Epoch 1007/2000 +2024-11-11 18:25:15,982 Current Learning Rate: 0.0000301952 +2024-11-11 18:25:15,983 Train Loss: 0.0006978, Val Loss: 0.0008558 +2024-11-11 18:25:15,984 Epoch 1008/2000 +2024-11-11 18:25:32,062 Current Learning Rate: 0.0000394265 +2024-11-11 18:25:32,062 Train Loss: 0.0008121, Val Loss: 0.0008559 +2024-11-11 18:25:32,063 Epoch 1009/2000 +2024-11-11 18:25:47,703 Current Learning Rate: 0.0000498817 +2024-11-11 18:25:47,704 Train Loss: 0.0007845, Val Loss: 0.0008559 +2024-11-11 18:25:47,704 Epoch 1010/2000 +2024-11-11 18:26:03,148 Current Learning Rate: 0.0000615583 +2024-11-11 18:26:03,148 Train Loss: 0.0007137, Val Loss: 0.0008558 +2024-11-11 18:26:03,148 Epoch 1011/2000 +2024-11-11 18:26:18,618 Current Learning Rate: 0.0000744534 +2024-11-11 18:26:18,618 Train Loss: 0.0007413, Val Loss: 0.0008558 +2024-11-11 18:26:18,627 Epoch 1012/2000 +2024-11-11 18:26:34,565 Current Learning Rate: 0.0000885637 +2024-11-11 18:26:34,566 Train Loss: 0.0007160, Val Loss: 0.0008558 +2024-11-11 18:26:34,566 Epoch 1013/2000 +2024-11-11 18:26:50,242 Current Learning Rate: 0.0001038859 +2024-11-11 18:26:50,242 Train Loss: 0.0007124, Val Loss: 0.0008558 +2024-11-11 18:26:50,243 Epoch 1014/2000 +2024-11-11 18:27:06,393 Current Learning Rate: 0.0001204162 +2024-11-11 18:27:06,393 Train Loss: 0.0007834, Val Loss: 0.0008560 +2024-11-11 18:27:06,393 Epoch 1015/2000 +2024-11-11 18:27:21,665 Current Learning Rate: 0.0001381504 +2024-11-11 18:27:21,665 Train Loss: 0.0007124, Val Loss: 0.0008559 +2024-11-11 18:27:21,665 Epoch 1016/2000 +2024-11-11 18:27:37,500 Current Learning Rate: 0.0001570842 +2024-11-11 18:27:37,501 Train Loss: 0.0008102, Val Loss: 0.0008561 +2024-11-11 18:27:37,501 Epoch 1017/2000 +2024-11-11 18:27:53,433 Current Learning Rate: 0.0001772129 +2024-11-11 18:27:53,433 Train Loss: 0.0007366, Val Loss: 0.0008566 +2024-11-11 18:27:53,434 Epoch 1018/2000 +2024-11-11 18:28:09,970 Current Learning Rate: 0.0001985316 +2024-11-11 18:28:09,971 Train Loss: 0.0007153, Val Loss: 0.0008562 +2024-11-11 18:28:09,971 Epoch 1019/2000 +2024-11-11 18:28:25,920 Current Learning Rate: 0.0002210349 +2024-11-11 18:28:25,921 Train Loss: 0.0006716, Val Loss: 0.0008560 +2024-11-11 18:28:25,921 Epoch 1020/2000 +2024-11-11 18:28:42,797 Current Learning Rate: 0.0002447174 +2024-11-11 18:28:42,797 Train Loss: 0.0007545, Val Loss: 0.0008561 +2024-11-11 18:28:42,798 Epoch 1021/2000 +2024-11-11 18:28:59,024 Current Learning Rate: 0.0002695732 +2024-11-11 18:28:59,024 Train Loss: 0.0008152, Val Loss: 0.0008583 +2024-11-11 18:28:59,025 Epoch 1022/2000 +2024-11-11 18:29:15,344 Current Learning Rate: 0.0002955962 +2024-11-11 18:29:15,345 Train Loss: 0.0007431, Val Loss: 0.0008562 +2024-11-11 18:29:15,345 Epoch 1023/2000 +2024-11-11 18:29:30,976 Current Learning Rate: 0.0003227798 +2024-11-11 18:29:30,978 Train Loss: 0.0008182, Val Loss: 0.0008566 +2024-11-11 18:29:30,978 Epoch 1024/2000 +2024-11-11 18:29:46,644 Current Learning Rate: 0.0003511176 +2024-11-11 18:29:46,645 Train Loss: 0.0007122, Val Loss: 0.0008565 +2024-11-11 18:29:46,645 Epoch 1025/2000 +2024-11-11 18:30:02,054 Current Learning Rate: 0.0003806023 +2024-11-11 18:30:02,054 Train Loss: 0.0007820, Val Loss: 0.0008568 +2024-11-11 18:30:02,055 Epoch 1026/2000 +2024-11-11 18:30:17,443 Current Learning Rate: 0.0004112269 +2024-11-11 18:30:17,444 Train Loss: 0.0008278, Val Loss: 0.0008565 +2024-11-11 18:30:17,444 Epoch 1027/2000 +2024-11-11 18:30:33,751 Current Learning Rate: 0.0004429836 +2024-11-11 18:30:33,751 Train Loss: 0.0007511, Val Loss: 0.0008568 +2024-11-11 18:30:33,752 Epoch 1028/2000 +2024-11-11 18:30:49,862 Current Learning Rate: 0.0004758647 +2024-11-11 18:30:49,863 Train Loss: 0.0007972, Val Loss: 0.0008570 +2024-11-11 18:30:49,863 Epoch 1029/2000 +2024-11-11 18:31:05,444 Current Learning Rate: 0.0005098621 +2024-11-11 18:31:05,444 Train Loss: 0.0008241, Val Loss: 0.0008577 +2024-11-11 18:31:05,445 Epoch 1030/2000 +2024-11-11 18:31:21,490 Current Learning Rate: 0.0005449674 +2024-11-11 18:31:21,491 Train Loss: 0.0007440, Val Loss: 0.0008575 +2024-11-11 18:31:21,491 Epoch 1031/2000 +2024-11-11 18:31:37,003 Current Learning Rate: 0.0005811718 +2024-11-11 18:31:37,004 Train Loss: 0.0007159, Val Loss: 0.0008574 +2024-11-11 18:31:37,004 Epoch 1032/2000 +2024-11-11 18:31:53,886 Current Learning Rate: 0.0006184666 +2024-11-11 18:31:53,886 Train Loss: 0.0007748, Val Loss: 0.0008571 +2024-11-11 18:31:53,887 Epoch 1033/2000 +2024-11-11 18:32:10,155 Current Learning Rate: 0.0006568424 +2024-11-11 18:32:10,155 Train Loss: 0.0006881, Val Loss: 0.0008574 +2024-11-11 18:32:10,155 Epoch 1034/2000 +2024-11-11 18:32:26,939 Current Learning Rate: 0.0006962899 +2024-11-11 18:32:26,940 Train Loss: 0.0007358, Val Loss: 0.0008586 +2024-11-11 18:32:26,940 Epoch 1035/2000 +2024-11-11 18:32:42,501 Current Learning Rate: 0.0007367992 +2024-11-11 18:32:42,502 Train Loss: 0.0007446, Val Loss: 0.0008596 +2024-11-11 18:32:42,502 Epoch 1036/2000 +2024-11-11 18:32:58,778 Current Learning Rate: 0.0007783604 +2024-11-11 18:32:58,778 Train Loss: 0.0007498, Val Loss: 0.0008631 +2024-11-11 18:32:58,778 Epoch 1037/2000 +2024-11-11 18:33:14,752 Current Learning Rate: 0.0008209632 +2024-11-11 18:33:14,753 Train Loss: 0.0006730, Val Loss: 0.0008626 +2024-11-11 18:33:14,753 Epoch 1038/2000 +2024-11-11 18:33:30,112 Current Learning Rate: 0.0008645971 +2024-11-11 18:33:30,113 Train Loss: 0.0007084, Val Loss: 0.0008636 +2024-11-11 18:33:30,113 Epoch 1039/2000 +2024-11-11 18:33:45,602 Current Learning Rate: 0.0009092514 +2024-11-11 18:33:45,602 Train Loss: 0.0007876, Val Loss: 0.0008646 +2024-11-11 18:33:45,603 Epoch 1040/2000 +2024-11-11 18:34:02,321 Current Learning Rate: 0.0009549150 +2024-11-11 18:34:02,322 Train Loss: 0.0007497, Val Loss: 0.0008628 +2024-11-11 18:34:02,323 Epoch 1041/2000 +2024-11-11 18:34:18,006 Current Learning Rate: 0.0010015767 +2024-11-11 18:34:18,007 Train Loss: 0.0007301, Val Loss: 0.0008607 +2024-11-11 18:34:18,007 Epoch 1042/2000 +2024-11-11 18:34:35,079 Current Learning Rate: 0.0010492249 +2024-11-11 18:34:35,079 Train Loss: 0.0007282, Val Loss: 0.0008592 +2024-11-11 18:34:35,080 Epoch 1043/2000 +2024-11-11 18:34:51,134 Current Learning Rate: 0.0010978480 +2024-11-11 18:34:52,226 Train Loss: 0.0007161, Val Loss: 0.0008554 +2024-11-11 18:34:52,227 Epoch 1044/2000 +2024-11-11 18:35:08,413 Current Learning Rate: 0.0011474338 +2024-11-11 18:35:08,414 Train Loss: 0.0007101, Val Loss: 0.0008654 +2024-11-11 18:35:08,415 Epoch 1045/2000 +2024-11-11 18:35:23,793 Current Learning Rate: 0.0011979702 +2024-11-11 18:35:23,794 Train Loss: 0.0008156, Val Loss: 0.0008671 +2024-11-11 18:35:23,794 Epoch 1046/2000 +2024-11-11 18:35:39,908 Current Learning Rate: 0.0012494447 +2024-11-11 18:35:39,909 Train Loss: 0.0006861, Val Loss: 0.0008554 +2024-11-11 18:35:39,909 Epoch 1047/2000 +2024-11-11 18:35:55,655 Current Learning Rate: 0.0013018445 +2024-11-11 18:35:55,656 Train Loss: 0.0007644, Val Loss: 0.0008688 +2024-11-11 18:35:55,656 Epoch 1048/2000 +2024-11-11 18:36:11,428 Current Learning Rate: 0.0013551569 +2024-11-11 18:36:11,429 Train Loss: 0.0007607, Val Loss: 0.0008591 +2024-11-11 18:36:11,429 Epoch 1049/2000 +2024-11-11 18:36:28,192 Current Learning Rate: 0.0014093685 +2024-11-11 18:36:28,192 Train Loss: 0.0007149, Val Loss: 0.0008560 +2024-11-11 18:36:28,193 Epoch 1050/2000 +2024-11-11 18:36:44,685 Current Learning Rate: 0.0014644661 +2024-11-11 18:36:44,686 Train Loss: 0.0008344, Val Loss: 0.0008616 +2024-11-11 18:36:44,686 Epoch 1051/2000 +2024-11-11 18:37:00,530 Current Learning Rate: 0.0015204360 +2024-11-11 18:37:00,531 Train Loss: 0.0006756, Val Loss: 0.0008582 +2024-11-11 18:37:00,531 Epoch 1052/2000 +2024-11-11 18:37:17,346 Current Learning Rate: 0.0015772645 +2024-11-11 18:37:17,346 Train Loss: 0.0007516, Val Loss: 0.0008673 +2024-11-11 18:37:17,347 Epoch 1053/2000 +2024-11-11 18:37:32,817 Current Learning Rate: 0.0016349374 +2024-11-11 18:37:32,818 Train Loss: 0.0006806, Val Loss: 0.0008589 +2024-11-11 18:37:32,818 Epoch 1054/2000 +2024-11-11 18:37:48,166 Current Learning Rate: 0.0016934407 +2024-11-11 18:37:48,167 Train Loss: 0.0006719, Val Loss: 0.0008565 +2024-11-11 18:37:48,167 Epoch 1055/2000 +2024-11-11 18:38:03,475 Current Learning Rate: 0.0017527598 +2024-11-11 18:38:03,476 Train Loss: 0.0007959, Val Loss: 0.0008839 +2024-11-11 18:38:03,476 Epoch 1056/2000 +2024-11-11 18:38:19,173 Current Learning Rate: 0.0018128801 +2024-11-11 18:38:19,173 Train Loss: 0.0008522, Val Loss: 0.0009142 +2024-11-11 18:38:19,174 Epoch 1057/2000 +2024-11-11 18:38:34,421 Current Learning Rate: 0.0018737867 +2024-11-11 18:38:34,421 Train Loss: 0.0007066, Val Loss: 0.0008673 +2024-11-11 18:38:34,422 Epoch 1058/2000 +2024-11-11 18:38:50,048 Current Learning Rate: 0.0019354647 +2024-11-11 18:38:50,049 Train Loss: 0.0008207, Val Loss: 0.0009349 +2024-11-11 18:38:50,049 Epoch 1059/2000 +2024-11-11 18:39:05,891 Current Learning Rate: 0.0019978989 +2024-11-11 18:39:05,892 Train Loss: 0.0007879, Val Loss: 0.0008692 +2024-11-11 18:39:05,892 Epoch 1060/2000 +2024-11-11 18:39:21,306 Current Learning Rate: 0.0020610737 +2024-11-11 18:39:21,306 Train Loss: 0.0007605, Val Loss: 0.0008579 +2024-11-11 18:39:21,307 Epoch 1061/2000 +2024-11-11 18:39:36,695 Current Learning Rate: 0.0021249737 +2024-11-11 18:39:37,468 Train Loss: 0.0007083, Val Loss: 0.0008546 +2024-11-11 18:39:37,468 Epoch 1062/2000 +2024-11-11 18:39:52,103 Current Learning Rate: 0.0021895831 +2024-11-11 18:39:52,103 Train Loss: 0.0007383, Val Loss: 0.0008613 +2024-11-11 18:39:52,104 Epoch 1063/2000 +2024-11-11 18:40:08,285 Current Learning Rate: 0.0022548859 +2024-11-11 18:40:09,076 Train Loss: 0.0007759, Val Loss: 0.0008530 +2024-11-11 18:40:09,076 Epoch 1064/2000 +2024-11-11 18:40:24,478 Current Learning Rate: 0.0023208660 +2024-11-11 18:40:24,479 Train Loss: 0.0008380, Val Loss: 0.0009085 +2024-11-11 18:40:24,479 Epoch 1065/2000 +2024-11-11 18:40:39,403 Current Learning Rate: 0.0023875072 +2024-11-11 18:40:39,404 Train Loss: 0.0007227, Val Loss: 0.0008622 +2024-11-11 18:40:39,404 Epoch 1066/2000 +2024-11-11 18:40:54,462 Current Learning Rate: 0.0024547929 +2024-11-11 18:40:54,462 Train Loss: 0.0007269, Val Loss: 0.0008761 +2024-11-11 18:40:54,462 Epoch 1067/2000 +2024-11-11 18:41:09,654 Current Learning Rate: 0.0025227067 +2024-11-11 18:41:09,654 Train Loss: 0.0007267, Val Loss: 0.0008554 +2024-11-11 18:41:09,655 Epoch 1068/2000 +2024-11-11 18:41:25,103 Current Learning Rate: 0.0025912316 +2024-11-11 18:41:25,104 Train Loss: 0.0007440, Val Loss: 0.0008720 +2024-11-11 18:41:25,104 Epoch 1069/2000 +2024-11-11 18:41:40,784 Current Learning Rate: 0.0026603509 +2024-11-11 18:41:40,785 Train Loss: 0.0007707, Val Loss: 0.0008668 +2024-11-11 18:41:40,785 Epoch 1070/2000 +2024-11-11 18:41:56,935 Current Learning Rate: 0.0027300475 +2024-11-11 18:41:56,935 Train Loss: 0.0008534, Val Loss: 0.0008801 +2024-11-11 18:41:56,936 Epoch 1071/2000 +2024-11-11 18:42:13,057 Current Learning Rate: 0.0028003042 +2024-11-11 18:42:13,059 Train Loss: 0.0007330, Val Loss: 0.0008630 +2024-11-11 18:42:13,059 Epoch 1072/2000 +2024-11-11 18:42:28,888 Current Learning Rate: 0.0028711035 +2024-11-11 18:42:28,889 Train Loss: 0.0006813, Val Loss: 0.0008599 +2024-11-11 18:42:28,889 Epoch 1073/2000 +2024-11-11 18:42:44,922 Current Learning Rate: 0.0029424282 +2024-11-11 18:42:44,923 Train Loss: 0.0007425, Val Loss: 0.0009174 +2024-11-11 18:42:44,923 Epoch 1074/2000 +2024-11-11 18:43:00,446 Current Learning Rate: 0.0030142605 +2024-11-11 18:43:00,447 Train Loss: 0.0007606, Val Loss: 0.0008840 +2024-11-11 18:43:00,447 Epoch 1075/2000 +2024-11-11 18:43:15,730 Current Learning Rate: 0.0030865828 +2024-11-11 18:43:15,730 Train Loss: 0.0008498, Val Loss: 0.0008971 +2024-11-11 18:43:15,730 Epoch 1076/2000 +2024-11-11 18:43:31,459 Current Learning Rate: 0.0031593772 +2024-11-11 18:43:31,460 Train Loss: 0.0008042, Val Loss: 0.0009118 +2024-11-11 18:43:31,460 Epoch 1077/2000 +2024-11-11 18:43:47,950 Current Learning Rate: 0.0032326258 +2024-11-11 18:43:47,951 Train Loss: 0.0007869, Val Loss: 0.0008806 +2024-11-11 18:43:47,951 Epoch 1078/2000 +2024-11-11 18:44:02,884 Current Learning Rate: 0.0033063104 +2024-11-11 18:44:02,885 Train Loss: 0.0007446, Val Loss: 0.0008635 +2024-11-11 18:44:02,885 Epoch 1079/2000 +2024-11-11 18:44:18,644 Current Learning Rate: 0.0033804129 +2024-11-11 18:44:18,644 Train Loss: 0.0008346, Val Loss: 0.0008997 +2024-11-11 18:44:18,645 Epoch 1080/2000 +2024-11-11 18:44:35,632 Current Learning Rate: 0.0034549150 +2024-11-11 18:44:35,633 Train Loss: 0.0007886, Val Loss: 0.0008844 +2024-11-11 18:44:35,633 Epoch 1081/2000 +2024-11-11 18:44:51,473 Current Learning Rate: 0.0035297984 +2024-11-11 18:44:51,474 Train Loss: 0.0007828, Val Loss: 0.0009006 +2024-11-11 18:44:51,474 Epoch 1082/2000 +2024-11-11 18:45:06,811 Current Learning Rate: 0.0036050445 +2024-11-11 18:45:06,812 Train Loss: 0.0007277, Val Loss: 0.0008892 +2024-11-11 18:45:06,812 Epoch 1083/2000 +2024-11-11 18:45:23,103 Current Learning Rate: 0.0036806348 +2024-11-11 18:45:23,104 Train Loss: 0.0007981, Val Loss: 0.0008976 +2024-11-11 18:45:23,104 Epoch 1084/2000 +2024-11-11 18:45:38,385 Current Learning Rate: 0.0037565506 +2024-11-11 18:45:38,385 Train Loss: 0.0007050, Val Loss: 0.0008793 +2024-11-11 18:45:38,385 Epoch 1085/2000 +2024-11-11 18:45:54,414 Current Learning Rate: 0.0038327732 +2024-11-11 18:45:54,415 Train Loss: 0.0009147, Val Loss: 0.0009100 +2024-11-11 18:45:54,415 Epoch 1086/2000 +2024-11-11 18:46:10,519 Current Learning Rate: 0.0039092838 +2024-11-11 18:46:10,520 Train Loss: 0.0008636, Val Loss: 0.0009302 +2024-11-11 18:46:10,520 Epoch 1087/2000 +2024-11-11 18:46:26,233 Current Learning Rate: 0.0039860635 +2024-11-11 18:46:26,233 Train Loss: 0.0009090, Val Loss: 0.0009573 +2024-11-11 18:46:26,233 Epoch 1088/2000 +2024-11-11 18:46:41,469 Current Learning Rate: 0.0040630934 +2024-11-11 18:46:41,470 Train Loss: 0.0007566, Val Loss: 0.0009119 +2024-11-11 18:46:41,470 Epoch 1089/2000 +2024-11-11 18:46:56,869 Current Learning Rate: 0.0041403545 +2024-11-11 18:46:56,870 Train Loss: 0.0008079, Val Loss: 0.0009209 +2024-11-11 18:46:56,870 Epoch 1090/2000 +2024-11-11 18:47:12,678 Current Learning Rate: 0.0042178277 +2024-11-11 18:47:12,679 Train Loss: 0.0007778, Val Loss: 0.0009014 +2024-11-11 18:47:12,679 Epoch 1091/2000 +2024-11-11 18:47:28,858 Current Learning Rate: 0.0042954938 +2024-11-11 18:47:28,859 Train Loss: 0.0007078, Val Loss: 0.0008732 +2024-11-11 18:47:28,859 Epoch 1092/2000 +2024-11-11 18:47:45,369 Current Learning Rate: 0.0043733338 +2024-11-11 18:47:45,370 Train Loss: 0.0008591, Val Loss: 0.0009461 +2024-11-11 18:47:45,370 Epoch 1093/2000 +2024-11-11 18:48:01,773 Current Learning Rate: 0.0044513284 +2024-11-11 18:48:01,773 Train Loss: 0.0007430, Val Loss: 0.0009078 +2024-11-11 18:48:01,774 Epoch 1094/2000 +2024-11-11 18:48:16,770 Current Learning Rate: 0.0045294584 +2024-11-11 18:48:16,771 Train Loss: 0.0008409, Val Loss: 0.0009163 +2024-11-11 18:48:16,771 Epoch 1095/2000 +2024-11-11 18:48:32,376 Current Learning Rate: 0.0046077045 +2024-11-11 18:48:32,377 Train Loss: 0.0007834, Val Loss: 0.0009657 +2024-11-11 18:48:32,377 Epoch 1096/2000 +2024-11-11 18:48:47,756 Current Learning Rate: 0.0046860474 +2024-11-11 18:48:47,756 Train Loss: 0.0007785, Val Loss: 0.0009072 +2024-11-11 18:48:47,756 Epoch 1097/2000 +2024-11-11 18:49:03,674 Current Learning Rate: 0.0047644677 +2024-11-11 18:49:03,675 Train Loss: 0.0008473, Val Loss: 0.0009193 +2024-11-11 18:49:03,675 Epoch 1098/2000 +2024-11-11 18:49:19,285 Current Learning Rate: 0.0048429462 +2024-11-11 18:49:19,286 Train Loss: 0.0009198, Val Loss: 0.0009047 +2024-11-11 18:49:19,286 Epoch 1099/2000 +2024-11-11 18:49:35,804 Current Learning Rate: 0.0049214634 +2024-11-11 18:49:35,805 Train Loss: 0.0007513, Val Loss: 0.0008817 +2024-11-11 18:49:35,805 Epoch 1100/2000 +2024-11-11 18:49:51,202 Current Learning Rate: 0.0050000000 +2024-11-11 18:49:51,203 Train Loss: 0.0006984, Val Loss: 0.0008786 +2024-11-11 18:49:51,203 Epoch 1101/2000 +2024-11-11 18:50:06,773 Current Learning Rate: 0.0050785366 +2024-11-11 18:50:06,774 Train Loss: 0.0007898, Val Loss: 0.0009168 +2024-11-11 18:50:06,774 Epoch 1102/2000 +2024-11-11 18:50:23,111 Current Learning Rate: 0.0051570538 +2024-11-11 18:50:23,112 Train Loss: 0.0007377, Val Loss: 0.0009072 +2024-11-11 18:50:23,112 Epoch 1103/2000 +2024-11-11 18:50:38,981 Current Learning Rate: 0.0052355323 +2024-11-11 18:50:38,981 Train Loss: 0.0007992, Val Loss: 0.0009590 +2024-11-11 18:50:38,982 Epoch 1104/2000 +2024-11-11 18:50:54,636 Current Learning Rate: 0.0053139526 +2024-11-11 18:50:54,637 Train Loss: 0.0009170, Val Loss: 0.0009772 +2024-11-11 18:50:54,637 Epoch 1105/2000 +2024-11-11 18:51:11,338 Current Learning Rate: 0.0053922955 +2024-11-11 18:51:11,338 Train Loss: 0.0008388, Val Loss: 0.0009675 +2024-11-11 18:51:11,339 Epoch 1106/2000 +2024-11-11 18:51:27,485 Current Learning Rate: 0.0054705416 +2024-11-11 18:51:27,487 Train Loss: 0.0008355, Val Loss: 0.0010288 +2024-11-11 18:51:27,487 Epoch 1107/2000 +2024-11-11 18:51:43,126 Current Learning Rate: 0.0055486716 +2024-11-11 18:51:43,126 Train Loss: 0.0007842, Val Loss: 0.0009114 +2024-11-11 18:51:43,127 Epoch 1108/2000 +2024-11-11 18:51:59,493 Current Learning Rate: 0.0056266662 +2024-11-11 18:51:59,495 Train Loss: 0.0008256, Val Loss: 0.0009426 +2024-11-11 18:51:59,495 Epoch 1109/2000 +2024-11-11 18:52:15,288 Current Learning Rate: 0.0057045062 +2024-11-11 18:52:15,289 Train Loss: 0.0009038, Val Loss: 0.0009367 +2024-11-11 18:52:15,289 Epoch 1110/2000 +2024-11-11 18:52:30,620 Current Learning Rate: 0.0057821723 +2024-11-11 18:52:30,621 Train Loss: 0.0008269, Val Loss: 0.0009090 +2024-11-11 18:52:30,621 Epoch 1111/2000 +2024-11-11 18:52:46,896 Current Learning Rate: 0.0058596455 +2024-11-11 18:52:46,897 Train Loss: 0.0009088, Val Loss: 0.0009546 +2024-11-11 18:52:46,897 Epoch 1112/2000 +2024-11-11 18:53:02,681 Current Learning Rate: 0.0059369066 +2024-11-11 18:53:02,681 Train Loss: 0.0008433, Val Loss: 0.0009075 +2024-11-11 18:53:02,682 Epoch 1113/2000 +2024-11-11 18:53:18,567 Current Learning Rate: 0.0060139365 +2024-11-11 18:53:18,567 Train Loss: 0.0009822, Val Loss: 0.0009441 +2024-11-11 18:53:18,567 Epoch 1114/2000 +2024-11-11 18:53:33,757 Current Learning Rate: 0.0060907162 +2024-11-11 18:53:33,758 Train Loss: 0.0009162, Val Loss: 0.0009002 +2024-11-11 18:53:33,758 Epoch 1115/2000 +2024-11-11 18:53:49,097 Current Learning Rate: 0.0061672268 +2024-11-11 18:53:49,098 Train Loss: 0.0007641, Val Loss: 0.0009208 +2024-11-11 18:53:49,098 Epoch 1116/2000 +2024-11-11 18:54:04,371 Current Learning Rate: 0.0062434494 +2024-11-11 18:54:04,372 Train Loss: 0.0008616, Val Loss: 0.0009157 +2024-11-11 18:54:04,372 Epoch 1117/2000 +2024-11-11 18:54:20,901 Current Learning Rate: 0.0063193652 +2024-11-11 18:54:20,902 Train Loss: 0.0007720, Val Loss: 0.0009131 +2024-11-11 18:54:20,902 Epoch 1118/2000 +2024-11-11 18:54:36,520 Current Learning Rate: 0.0063949555 +2024-11-11 18:54:36,521 Train Loss: 0.0008748, Val Loss: 0.0009098 +2024-11-11 18:54:36,522 Epoch 1119/2000 +2024-11-11 18:54:52,529 Current Learning Rate: 0.0064702016 +2024-11-11 18:54:52,529 Train Loss: 0.0008747, Val Loss: 0.0009104 +2024-11-11 18:54:52,529 Epoch 1120/2000 +2024-11-11 18:55:08,008 Current Learning Rate: 0.0065450850 +2024-11-11 18:55:08,009 Train Loss: 0.0007746, Val Loss: 0.0009134 +2024-11-11 18:55:08,009 Epoch 1121/2000 +2024-11-11 18:55:24,190 Current Learning Rate: 0.0066195871 +2024-11-11 18:55:24,191 Train Loss: 0.0008900, Val Loss: 0.0009446 +2024-11-11 18:55:24,191 Epoch 1122/2000 +2024-11-11 18:55:41,093 Current Learning Rate: 0.0066936896 +2024-11-11 18:55:41,093 Train Loss: 0.0008111, Val Loss: 0.0009460 +2024-11-11 18:55:41,094 Epoch 1123/2000 +2024-11-11 18:55:58,069 Current Learning Rate: 0.0067673742 +2024-11-11 18:55:58,069 Train Loss: 0.0010924, Val Loss: 0.0012449 +2024-11-11 18:55:58,070 Epoch 1124/2000 +2024-11-11 18:56:14,415 Current Learning Rate: 0.0068406228 +2024-11-11 18:56:14,416 Train Loss: 0.0009590, Val Loss: 0.0009485 +2024-11-11 18:56:14,416 Epoch 1125/2000 +2024-11-11 18:56:30,273 Current Learning Rate: 0.0069134172 +2024-11-11 18:56:30,274 Train Loss: 0.0009170, Val Loss: 0.0009529 +2024-11-11 18:56:30,274 Epoch 1126/2000 +2024-11-11 18:56:46,177 Current Learning Rate: 0.0069857395 +2024-11-11 18:56:46,178 Train Loss: 0.0008428, Val Loss: 0.0009891 +2024-11-11 18:56:46,178 Epoch 1127/2000 +2024-11-11 18:57:02,205 Current Learning Rate: 0.0070575718 +2024-11-11 18:57:02,205 Train Loss: 0.0008743, Val Loss: 0.0009457 +2024-11-11 18:57:02,206 Epoch 1128/2000 +2024-11-11 18:57:18,351 Current Learning Rate: 0.0071288965 +2024-11-11 18:57:18,352 Train Loss: 0.0007929, Val Loss: 0.0009199 +2024-11-11 18:57:18,352 Epoch 1129/2000 +2024-11-11 18:57:34,626 Current Learning Rate: 0.0071996958 +2024-11-11 18:57:34,627 Train Loss: 0.0008838, Val Loss: 0.0009955 +2024-11-11 18:57:34,627 Epoch 1130/2000 +2024-11-11 18:57:50,691 Current Learning Rate: 0.0072699525 +2024-11-11 18:57:50,691 Train Loss: 0.0008128, Val Loss: 0.0009871 +2024-11-11 18:57:50,692 Epoch 1131/2000 +2024-11-11 18:58:07,237 Current Learning Rate: 0.0073396491 +2024-11-11 18:58:07,237 Train Loss: 0.0008704, Val Loss: 0.0010355 +2024-11-11 18:58:07,238 Epoch 1132/2000 +2024-11-11 18:58:22,551 Current Learning Rate: 0.0074087684 +2024-11-11 18:58:22,552 Train Loss: 0.0008751, Val Loss: 0.0009685 +2024-11-11 18:58:22,552 Epoch 1133/2000 +2024-11-11 18:58:37,894 Current Learning Rate: 0.0074772933 +2024-11-11 18:58:37,894 Train Loss: 0.0008425, Val Loss: 0.0009452 +2024-11-11 18:58:37,895 Epoch 1134/2000 +2024-11-11 18:58:53,987 Current Learning Rate: 0.0075452071 +2024-11-11 18:58:53,988 Train Loss: 0.0007711, Val Loss: 0.0009159 +2024-11-11 18:58:53,988 Epoch 1135/2000 +2024-11-11 18:59:09,818 Current Learning Rate: 0.0076124928 +2024-11-11 18:59:09,818 Train Loss: 0.0007821, Val Loss: 0.0009073 +2024-11-11 18:59:09,818 Epoch 1136/2000 +2024-11-11 18:59:26,595 Current Learning Rate: 0.0076791340 +2024-11-11 18:59:26,595 Train Loss: 0.0009624, Val Loss: 0.0010466 +2024-11-11 18:59:26,596 Epoch 1137/2000 +2024-11-11 18:59:42,702 Current Learning Rate: 0.0077451141 +2024-11-11 18:59:42,703 Train Loss: 0.0008611, Val Loss: 0.0009538 +2024-11-11 18:59:42,703 Epoch 1138/2000 +2024-11-11 18:59:58,795 Current Learning Rate: 0.0078104169 +2024-11-11 18:59:58,795 Train Loss: 0.0009359, Val Loss: 0.0010198 +2024-11-11 18:59:58,796 Epoch 1139/2000 +2024-11-11 19:00:13,498 Current Learning Rate: 0.0078750263 +2024-11-11 19:00:13,498 Train Loss: 0.0008688, Val Loss: 0.0009391 +2024-11-11 19:00:13,498 Epoch 1140/2000 +2024-11-11 19:00:28,891 Current Learning Rate: 0.0079389263 +2024-11-11 19:00:28,892 Train Loss: 0.0008664, Val Loss: 0.0009756 +2024-11-11 19:00:28,892 Epoch 1141/2000 +2024-11-11 19:00:44,292 Current Learning Rate: 0.0080021011 +2024-11-11 19:00:44,293 Train Loss: 0.0008583, Val Loss: 0.0009709 +2024-11-11 19:00:44,293 Epoch 1142/2000 +2024-11-11 19:01:00,106 Current Learning Rate: 0.0080645353 +2024-11-11 19:01:00,106 Train Loss: 0.0008195, Val Loss: 0.0009350 +2024-11-11 19:01:00,106 Epoch 1143/2000 +2024-11-11 19:01:15,206 Current Learning Rate: 0.0081262133 +2024-11-11 19:01:15,206 Train Loss: 0.0009231, Val Loss: 0.0010300 +2024-11-11 19:01:15,206 Epoch 1144/2000 +2024-11-11 19:01:30,241 Current Learning Rate: 0.0081871199 +2024-11-11 19:01:30,242 Train Loss: 0.0009033, Val Loss: 0.0010105 +2024-11-11 19:01:30,242 Epoch 1145/2000 +2024-11-11 19:01:46,298 Current Learning Rate: 0.0082472402 +2024-11-11 19:01:46,319 Train Loss: 0.0009219, Val Loss: 0.0009805 +2024-11-11 19:01:46,319 Epoch 1146/2000 +2024-11-11 19:02:02,591 Current Learning Rate: 0.0083065593 +2024-11-11 19:02:02,592 Train Loss: 0.0010280, Val Loss: 0.0010458 +2024-11-11 19:02:02,592 Epoch 1147/2000 +2024-11-11 19:02:18,516 Current Learning Rate: 0.0083650626 +2024-11-11 19:02:18,517 Train Loss: 0.0008359, Val Loss: 0.0009765 +2024-11-11 19:02:18,517 Epoch 1148/2000 +2024-11-11 19:02:34,040 Current Learning Rate: 0.0084227355 +2024-11-11 19:02:34,040 Train Loss: 0.0008897, Val Loss: 0.0010902 +2024-11-11 19:02:34,041 Epoch 1149/2000 +2024-11-11 19:02:48,957 Current Learning Rate: 0.0084795640 +2024-11-11 19:02:48,957 Train Loss: 0.0009626, Val Loss: 0.0010651 +2024-11-11 19:02:48,957 Epoch 1150/2000 +2024-11-11 19:03:04,906 Current Learning Rate: 0.0085355339 +2024-11-11 19:03:04,906 Train Loss: 0.0009077, Val Loss: 0.0009267 +2024-11-11 19:03:04,907 Epoch 1151/2000 +2024-11-11 19:03:18,450 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-11 19:03:20,329 Current Learning Rate: 0.0085906315 +2024-11-11 19:03:20,331 Train Loss: 0.0008699, Val Loss: 0.0009190 +2024-11-11 19:03:20,332 Epoch 1152/2000 +2024-11-11 19:03:36,464 Current Learning Rate: 0.0086448431 +2024-11-11 19:03:36,465 Train Loss: 0.0008264, Val Loss: 0.0009337 +2024-11-11 19:03:36,466 Epoch 1153/2000 +2024-11-11 19:03:42,658 Loading best model from checkpoint. +2024-11-11 19:03:59,935 Testing completed and best model saved. +-11-11 19:03:51,275 Train Loss: 0.0007436, Val Loss: 0.0009308 +2024-11-11 19:03:51,275 Epoch 1154/2000 +2024-11-11 19:04:06,794 Current Learning Rate: 0.0087505553 +2024-11-11 19:04:06,794 Train Loss: 0.0006984, Val Loss: 0.0008624 +2024-11-11 19:04:06,794 Epoch 1155/2000 +2024-11-11 19:04:22,123 Current Learning Rate: 0.0088020298 +2024-11-11 19:04:22,123 Train Loss: 0.0007567, Val Loss: 0.0008781 +2024-11-11 19:04:22,124 Epoch 1156/2000 +2024-11-11 19:04:37,362 Current Learning Rate: 0.0088525662 +2024-11-11 19:04:37,363 Train Loss: 0.0008130, Val Loss: 0.0008850 +2024-11-11 19:04:37,363 Epoch 1157/2000 +2024-11-11 19:04:53,117 Current Learning Rate: 0.0089021520 +2024-11-11 19:04:53,118 Train Loss: 0.0007347, Val Loss: 0.0009162 +2024-11-11 19:04:53,118 Epoch 1158/2000 +2024-11-11 19:05:09,548 Current Learning Rate: 0.0089507751 +2024-11-11 19:05:09,548 Train Loss: 0.0008315, Val Loss: 0.0009582 +2024-11-11 19:05:09,549 Epoch 1159/2000 +2024-11-11 19:05:26,131 Current Learning Rate: 0.0089984233 +2024-11-11 19:05:26,131 Train Loss: 0.0008935, Val Loss: 0.0009455 +2024-11-11 19:05:26,132 Epoch 1160/2000 +2024-11-11 19:05:42,270 Current Learning Rate: 0.0090450850 +2024-11-11 19:05:42,271 Train Loss: 0.0007443, Val Loss: 0.0009121 +2024-11-11 19:05:42,271 Epoch 1161/2000 +2024-11-11 19:05:57,768 Current Learning Rate: 0.0090907486 +2024-11-11 19:05:57,769 Train Loss: 0.0007306, Val Loss: 0.0009244 +2024-11-11 19:05:57,769 Epoch 1162/2000 +2024-11-11 19:06:13,568 Current Learning Rate: 0.0091354029 +2024-11-11 19:06:13,569 Train Loss: 0.0009027, Val Loss: 0.0009766 +2024-11-11 19:06:13,569 Epoch 1163/2000 +2024-11-11 19:06:28,837 Current Learning Rate: 0.0091790368 +2024-11-11 19:06:28,837 Train Loss: 0.0008806, Val Loss: 0.0009236 +2024-11-11 19:06:28,837 Epoch 1164/2000 +2024-11-11 19:06:44,143 Current Learning Rate: 0.0092216396 +2024-11-11 19:06:44,144 Train Loss: 0.0008355, Val Loss: 0.0009116 +2024-11-11 19:06:44,144 Epoch 1165/2000 +2024-11-11 19:07:01,271 Current Learning Rate: 0.0092632008 +2024-11-11 19:07:01,272 Train Loss: 0.0008376, Val Loss: 0.0009840 +2024-11-11 19:07:01,272 Epoch 1166/2000 +2024-11-11 19:07:17,404 Current Learning Rate: 0.0093037101 +2024-11-11 19:07:17,404 Train Loss: 0.0009626, Val Loss: 0.0010315 +2024-11-11 19:07:17,404 Epoch 1167/2000 +2024-11-11 19:07:33,394 Current Learning Rate: 0.0093431576 +2024-11-11 19:07:33,395 Train Loss: 0.0009344, Val Loss: 0.0009433 +2024-11-11 19:07:33,395 Epoch 1168/2000 +2024-11-11 19:07:49,114 Current Learning Rate: 0.0093815334 +2024-11-11 19:07:49,115 Train Loss: 0.0009765, Val Loss: 0.0009319 +2024-11-11 19:07:49,115 Epoch 1169/2000 +2024-11-11 19:08:04,723 Current Learning Rate: 0.0094188282 +2024-11-11 19:08:04,723 Train Loss: 0.0008037, Val Loss: 0.0008954 +2024-11-11 19:08:04,723 Epoch 1170/2000 +2024-11-11 19:08:20,121 Current Learning Rate: 0.0094550326 +2024-11-11 19:08:20,121 Train Loss: 0.0007491, Val Loss: 0.0008791 +2024-11-11 19:08:20,121 Epoch 1171/2000 +2024-11-11 19:08:35,509 Current Learning Rate: 0.0094901379 +2024-11-11 19:08:35,510 Train Loss: 0.0007986, Val Loss: 0.0009121 +2024-11-11 19:08:35,510 Epoch 1172/2000 +2024-11-11 19:08:50,960 Current Learning Rate: 0.0095241353 +2024-11-11 19:08:50,961 Train Loss: 0.0007640, Val Loss: 0.0008937 +2024-11-11 19:08:50,961 Epoch 1173/2000 +2024-11-11 19:09:07,198 Current Learning Rate: 0.0095570164 +2024-11-11 19:09:07,198 Train Loss: 0.0008310, Val Loss: 0.0008824 +2024-11-11 19:09:07,198 Epoch 1174/2000 +2024-11-11 19:09:23,042 Current Learning Rate: 0.0095887731 +2024-11-11 19:09:23,042 Train Loss: 0.0008596, Val Loss: 0.0009071 +2024-11-11 19:09:23,042 Epoch 1175/2000 +2024-11-11 19:09:38,905 Current Learning Rate: 0.0096193977 +2024-11-11 19:09:38,905 Train Loss: 0.0007866, Val Loss: 0.0009351 +2024-11-11 19:09:38,906 Epoch 1176/2000 +2024-11-11 19:09:55,190 Current Learning Rate: 0.0096488824 +2024-11-11 19:09:55,190 Train Loss: 0.0007205, Val Loss: 0.0009014 +2024-11-11 19:09:55,190 Epoch 1177/2000 +2024-11-11 19:10:11,353 Current Learning Rate: 0.0096772202 +2024-11-11 19:10:11,353 Train Loss: 0.0007054, Val Loss: 0.0008608 +2024-11-11 19:10:11,353 Epoch 1178/2000 +2024-11-11 19:10:26,492 Current Learning Rate: 0.0097044038 +2024-11-11 19:10:26,492 Train Loss: 0.0007777, Val Loss: 0.0009524 +2024-11-11 19:10:26,493 Epoch 1179/2000 +2024-11-11 19:10:42,047 Current Learning Rate: 0.0097304268 +2024-11-11 19:10:42,048 Train Loss: 0.0009247, Val Loss: 0.0010017 +2024-11-11 19:10:42,048 Epoch 1180/2000 +2024-11-11 19:10:57,387 Current Learning Rate: 0.0097552826 +2024-11-11 19:10:57,388 Train Loss: 0.0009114, Val Loss: 0.0010670 +2024-11-11 19:10:57,388 Epoch 1181/2000 +2024-11-11 19:11:13,262 Current Learning Rate: 0.0097789651 +2024-11-11 19:11:13,262 Train Loss: 0.0009571, Val Loss: 0.0010167 +2024-11-11 19:11:13,263 Epoch 1182/2000 +2024-11-11 19:11:29,838 Current Learning Rate: 0.0098014684 +2024-11-11 19:11:29,839 Train Loss: 0.0008427, Val Loss: 0.0009216 +2024-11-11 19:11:29,839 Epoch 1183/2000 +2024-11-11 19:11:45,016 Current Learning Rate: 0.0098227871 +2024-11-11 19:11:45,017 Train Loss: 0.0007869, Val Loss: 0.0009104 +2024-11-11 19:11:45,017 Epoch 1184/2000 +2024-11-11 19:12:00,408 Current Learning Rate: 0.0098429158 +2024-11-11 19:12:00,409 Train Loss: 0.0007923, Val Loss: 0.0009151 +2024-11-11 19:12:00,409 Epoch 1185/2000 +2024-11-11 19:12:15,984 Current Learning Rate: 0.0098618496 +2024-11-11 19:12:15,984 Train Loss: 0.0007653, Val Loss: 0.0009307 +2024-11-11 19:12:15,984 Epoch 1186/2000 +2024-11-11 19:12:32,713 Current Learning Rate: 0.0098795838 +2024-11-11 19:12:32,714 Train Loss: 0.0007941, Val Loss: 0.0009135 +2024-11-11 19:12:32,714 Epoch 1187/2000 +2024-11-11 19:12:49,129 Current Learning Rate: 0.0098961141 +2024-11-11 19:12:49,130 Train Loss: 0.0007957, Val Loss: 0.0008860 +2024-11-11 19:12:49,130 Epoch 1188/2000 +2024-11-11 19:13:05,500 Current Learning Rate: 0.0099114363 +2024-11-11 19:13:05,501 Train Loss: 0.0007098, Val Loss: 0.0008635 +2024-11-11 19:13:05,501 Epoch 1189/2000 +2024-11-11 19:13:21,816 Current Learning Rate: 0.0099255466 +2024-11-11 19:13:21,816 Train Loss: 0.0008032, Val Loss: 0.0009301 +2024-11-11 19:13:21,816 Epoch 1190/2000 +2024-11-11 19:13:38,502 Current Learning Rate: 0.0099384417 +2024-11-11 19:13:38,502 Train Loss: 0.0008252, Val Loss: 0.0009136 +2024-11-11 19:13:38,503 Epoch 1191/2000 +2024-11-11 19:13:53,770 Current Learning Rate: 0.0099501183 +2024-11-11 19:13:53,771 Train Loss: 0.0007597, Val Loss: 0.0009133 +2024-11-11 19:13:53,771 Epoch 1192/2000 +2024-11-11 19:14:09,597 Current Learning Rate: 0.0099605735 +2024-11-11 19:14:09,598 Train Loss: 0.0008328, Val Loss: 0.0008811 +2024-11-11 19:14:09,598 Epoch 1193/2000 +2024-11-11 19:14:25,733 Current Learning Rate: 0.0099698048 +2024-11-11 19:14:25,734 Train Loss: 0.0007931, Val Loss: 0.0009434 +2024-11-11 19:14:25,734 Epoch 1194/2000 +2024-11-11 19:14:40,484 Current Learning Rate: 0.0099778098 +2024-11-11 19:14:40,485 Train Loss: 0.0009715, Val Loss: 0.0009576 +2024-11-11 19:14:40,485 Epoch 1195/2000 +2024-11-11 19:14:55,865 Current Learning Rate: 0.0099845867 +2024-11-11 19:14:55,865 Train Loss: 0.0007851, Val Loss: 0.0009919 +2024-11-11 19:14:55,865 Epoch 1196/2000 +2024-11-11 19:15:12,166 Current Learning Rate: 0.0099901336 +2024-11-11 19:15:12,167 Train Loss: 0.0008322, Val Loss: 0.0009438 +2024-11-11 19:15:12,167 Epoch 1197/2000 +2024-11-11 19:15:27,591 Current Learning Rate: 0.0099944494 +2024-11-11 19:15:27,591 Train Loss: 0.0008722, Val Loss: 0.0009891 +2024-11-11 19:15:27,591 Epoch 1198/2000 +2024-11-11 19:15:43,066 Current Learning Rate: 0.0099975328 +2024-11-11 19:15:43,066 Train Loss: 0.0007786, Val Loss: 0.0008811 +2024-11-11 19:15:43,067 Epoch 1199/2000 +2024-11-11 19:15:58,443 Current Learning Rate: 0.0099993832 +2024-11-11 19:15:58,443 Train Loss: 0.0008825, Val Loss: 0.0009694 +2024-11-11 19:15:58,443 Epoch 1200/2000 +2024-11-11 19:16:15,059 Current Learning Rate: 0.0100000000 +2024-11-11 19:16:15,061 Train Loss: 0.0009325, Val Loss: 0.0009888 +2024-11-11 19:16:15,061 Epoch 1201/2000 +2024-11-11 19:16:31,458 Current Learning Rate: 0.0099993832 +2024-11-11 19:16:31,459 Train Loss: 0.0009915, Val Loss: 0.0009818 +2024-11-11 19:16:31,459 Epoch 1202/2000 +2024-11-11 19:16:47,431 Current Learning Rate: 0.0099975328 +2024-11-11 19:16:47,432 Train Loss: 0.0007678, Val Loss: 0.0008826 +2024-11-11 19:16:47,432 Epoch 1203/2000 +2024-11-11 19:17:02,996 Current Learning Rate: 0.0099944494 +2024-11-11 19:17:02,997 Train Loss: 0.0009000, Val Loss: 0.0008945 +2024-11-11 19:17:02,997 Epoch 1204/2000 +2024-11-11 19:17:19,426 Current Learning Rate: 0.0099901336 +2024-11-11 19:17:19,426 Train Loss: 0.0008233, Val Loss: 0.0008658 +2024-11-11 19:17:19,426 Epoch 1205/2000 +2024-11-11 19:17:35,972 Current Learning Rate: 0.0099845867 +2024-11-11 19:17:35,972 Train Loss: 0.0007527, Val Loss: 0.0008549 +2024-11-11 19:17:35,972 Epoch 1206/2000 +2024-11-11 19:17:50,680 Current Learning Rate: 0.0099778098 +2024-11-11 19:17:51,480 Train Loss: 0.0007904, Val Loss: 0.0008346 +2024-11-11 19:17:51,481 Epoch 1207/2000 +2024-11-11 19:18:06,040 Current Learning Rate: 0.0099698048 +2024-11-11 19:18:06,801 Train Loss: 0.0007418, Val Loss: 0.0008254 +2024-11-11 19:18:06,801 Epoch 1208/2000 +2024-11-11 19:18:21,420 Current Learning Rate: 0.0099605735 +2024-11-11 19:18:21,421 Train Loss: 0.0007113, Val Loss: 0.0009579 +2024-11-11 19:18:21,421 Epoch 1209/2000 +2024-11-11 19:18:36,840 Current Learning Rate: 0.0099501183 +2024-11-11 19:18:36,841 Train Loss: 0.0008529, Val Loss: 0.0009929 +2024-11-11 19:18:36,841 Epoch 1210/2000 +2024-11-11 19:18:53,220 Current Learning Rate: 0.0099384417 +2024-11-11 19:18:53,221 Train Loss: 0.0007548, Val Loss: 0.0008843 +2024-11-11 19:18:53,221 Epoch 1211/2000 +2024-11-11 19:19:09,386 Current Learning Rate: 0.0099255466 +2024-11-11 19:19:09,387 Train Loss: 0.0008613, Val Loss: 0.0009586 +2024-11-11 19:19:09,387 Epoch 1212/2000 +2024-11-11 19:19:25,803 Current Learning Rate: 0.0099114363 +2024-11-11 19:19:25,803 Train Loss: 0.0008580, Val Loss: 0.0010224 +2024-11-11 19:19:25,804 Epoch 1213/2000 +2024-11-11 19:19:42,245 Current Learning Rate: 0.0098961141 +2024-11-11 19:19:42,245 Train Loss: 0.0007980, Val Loss: 0.0008683 +2024-11-11 19:19:42,246 Epoch 1214/2000 +2024-11-11 19:19:58,284 Current Learning Rate: 0.0098795838 +2024-11-11 19:19:58,285 Train Loss: 0.0008451, Val Loss: 0.0009139 +2024-11-11 19:19:58,285 Epoch 1215/2000 +2024-11-11 19:20:14,202 Current Learning Rate: 0.0098618496 +2024-11-11 19:20:14,202 Train Loss: 0.0007909, Val Loss: 0.0008778 +2024-11-11 19:20:14,203 Epoch 1216/2000 +2024-11-11 19:20:30,648 Current Learning Rate: 0.0098429158 +2024-11-11 19:20:30,649 Train Loss: 0.0007579, Val Loss: 0.0008329 +2024-11-11 19:20:30,649 Epoch 1217/2000 +2024-11-11 19:20:46,337 Current Learning Rate: 0.0098227871 +2024-11-11 19:20:46,338 Train Loss: 0.0007602, Val Loss: 0.0008391 +2024-11-11 19:20:46,338 Epoch 1218/2000 +2024-11-11 19:21:02,049 Current Learning Rate: 0.0098014684 +2024-11-11 19:21:02,049 Train Loss: 0.0007357, Val Loss: 0.0008317 +2024-11-11 19:21:02,049 Epoch 1219/2000 +2024-11-11 19:21:17,302 Current Learning Rate: 0.0097789651 +2024-11-11 19:21:17,303 Train Loss: 0.0006970, Val Loss: 0.0008263 +2024-11-11 19:21:17,303 Epoch 1220/2000 +2024-11-11 19:21:32,763 Current Learning Rate: 0.0097552826 +2024-11-11 19:21:33,486 Train Loss: 0.0007238, Val Loss: 0.0008217 +2024-11-11 19:21:33,486 Epoch 1221/2000 +2024-11-11 19:21:48,531 Current Learning Rate: 0.0097304268 +2024-11-11 19:21:48,532 Train Loss: 0.0008221, Val Loss: 0.0008921 +2024-11-11 19:21:48,532 Epoch 1222/2000 +2024-11-11 19:22:04,214 Current Learning Rate: 0.0097044038 +2024-11-11 19:22:04,214 Train Loss: 0.0007832, Val Loss: 0.0008790 +2024-11-11 19:22:04,214 Epoch 1223/2000 +2024-11-11 19:22:19,294 Current Learning Rate: 0.0096772202 +2024-11-11 19:22:19,294 Train Loss: 0.0007623, Val Loss: 0.0008899 +2024-11-11 19:22:19,295 Epoch 1224/2000 +2024-11-11 19:22:35,499 Current Learning Rate: 0.0096488824 +2024-11-11 19:22:35,499 Train Loss: 0.0007388, Val Loss: 0.0008893 +2024-11-11 19:22:35,499 Epoch 1225/2000 +2024-11-11 19:22:50,695 Current Learning Rate: 0.0096193977 +2024-11-11 19:22:50,695 Train Loss: 0.0007008, Val Loss: 0.0008764 +2024-11-11 19:22:50,696 Epoch 1226/2000 +2024-11-11 19:23:06,849 Current Learning Rate: 0.0095887731 +2024-11-11 19:23:06,849 Train Loss: 0.0007780, Val Loss: 0.0009302 +2024-11-11 19:23:06,849 Epoch 1227/2000 +2024-11-11 19:23:22,766 Current Learning Rate: 0.0095570164 +2024-11-11 19:23:22,767 Train Loss: 0.0007999, Val Loss: 0.0008586 +2024-11-11 19:23:22,767 Epoch 1228/2000 +2024-11-11 19:23:39,711 Current Learning Rate: 0.0095241353 +2024-11-11 19:23:39,712 Train Loss: 0.0007903, Val Loss: 0.0009007 +2024-11-11 19:23:39,712 Epoch 1229/2000 +2024-11-11 19:23:56,029 Current Learning Rate: 0.0094901379 +2024-11-11 19:23:56,029 Train Loss: 0.0008943, Val Loss: 0.0008876 +2024-11-11 19:23:56,030 Epoch 1230/2000 +2024-11-11 19:24:11,968 Current Learning Rate: 0.0094550326 +2024-11-11 19:24:11,969 Train Loss: 0.0007999, Val Loss: 0.0008620 +2024-11-11 19:24:11,970 Epoch 1231/2000 +2024-11-11 19:24:29,057 Current Learning Rate: 0.0094188282 +2024-11-11 19:24:29,058 Train Loss: 0.0007607, Val Loss: 0.0008616 +2024-11-11 19:24:29,058 Epoch 1232/2000 +2024-11-11 19:24:44,139 Current Learning Rate: 0.0093815334 +2024-11-11 19:24:44,953 Train Loss: 0.0006732, Val Loss: 0.0007991 +2024-11-11 19:24:44,953 Epoch 1233/2000 +2024-11-11 19:25:00,751 Current Learning Rate: 0.0093431576 +2024-11-11 19:25:00,753 Train Loss: 0.0007684, Val Loss: 0.0008466 +2024-11-11 19:25:00,754 Epoch 1234/2000 +2024-11-11 19:25:16,710 Current Learning Rate: 0.0093037101 +2024-11-11 19:25:16,711 Train Loss: 0.0008696, Val Loss: 0.0008539 +2024-11-11 19:25:16,712 Epoch 1235/2000 +2024-11-11 19:25:33,031 Current Learning Rate: 0.0092632008 +2024-11-11 19:25:33,033 Train Loss: 0.0006910, Val Loss: 0.0008334 +2024-11-11 19:25:33,033 Epoch 1236/2000 +2024-11-11 19:25:48,841 Current Learning Rate: 0.0092216396 +2024-11-11 19:25:48,841 Train Loss: 0.0007120, Val Loss: 0.0008217 +2024-11-11 19:25:48,841 Epoch 1237/2000 +2024-11-11 19:26:04,302 Current Learning Rate: 0.0091790368 +2024-11-11 19:26:04,303 Train Loss: 0.0007135, Val Loss: 0.0008230 +2024-11-11 19:26:04,304 Epoch 1238/2000 +2024-11-11 19:26:19,513 Current Learning Rate: 0.0091354029 +2024-11-11 19:26:19,514 Train Loss: 0.0008573, Val Loss: 0.0008267 +2024-11-11 19:26:19,514 Epoch 1239/2000 +2024-11-11 19:26:36,588 Current Learning Rate: 0.0090907486 +2024-11-11 19:26:36,589 Train Loss: 0.0006955, Val Loss: 0.0008639 +2024-11-11 19:26:36,589 Epoch 1240/2000 +2024-11-11 19:26:51,627 Current Learning Rate: 0.0090450850 +2024-11-11 19:26:51,627 Train Loss: 0.0008323, Val Loss: 0.0009813 +2024-11-11 19:26:51,628 Epoch 1241/2000 +2024-11-11 19:27:07,013 Current Learning Rate: 0.0089984233 +2024-11-11 19:27:07,013 Train Loss: 0.0008396, Val Loss: 0.0009004 +2024-11-11 19:27:07,013 Epoch 1242/2000 +2024-11-11 19:27:22,295 Current Learning Rate: 0.0089507751 +2024-11-11 19:27:22,295 Train Loss: 0.0007598, Val Loss: 0.0008683 +2024-11-11 19:27:22,295 Epoch 1243/2000 +2024-11-11 19:27:37,602 Current Learning Rate: 0.0089021520 +2024-11-11 19:27:37,603 Train Loss: 0.0007631, Val Loss: 0.0008305 +2024-11-11 19:27:37,603 Epoch 1244/2000 +2024-11-11 19:27:53,458 Current Learning Rate: 0.0088525662 +2024-11-11 19:27:53,459 Train Loss: 0.0007240, Val Loss: 0.0008004 +2024-11-11 19:27:53,459 Epoch 1245/2000 +2024-11-11 19:28:09,388 Current Learning Rate: 0.0088020298 +2024-11-11 19:28:09,389 Train Loss: 0.0007155, Val Loss: 0.0007998 +2024-11-11 19:28:09,389 Epoch 1246/2000 +2024-11-11 19:28:24,273 Current Learning Rate: 0.0087505553 +2024-11-11 19:28:24,273 Train Loss: 0.0006628, Val Loss: 0.0008059 +2024-11-11 19:28:24,273 Epoch 1247/2000 +2024-11-11 19:28:39,724 Current Learning Rate: 0.0086981555 +2024-11-11 19:28:39,725 Train Loss: 0.0007403, Val Loss: 0.0008217 +2024-11-11 19:28:39,725 Epoch 1248/2000 +2024-11-11 19:28:54,997 Current Learning Rate: 0.0086448431 +2024-11-11 19:28:54,998 Train Loss: 0.0007295, Val Loss: 0.0008898 +2024-11-11 19:28:54,998 Epoch 1249/2000 +2024-11-11 19:29:11,772 Current Learning Rate: 0.0085906315 +2024-11-11 19:29:11,772 Train Loss: 0.0007997, Val Loss: 0.0008358 +2024-11-11 19:29:11,772 Epoch 1250/2000 +2024-11-11 19:29:27,488 Current Learning Rate: 0.0085355339 +2024-11-11 19:29:27,489 Train Loss: 0.0006966, Val Loss: 0.0008217 +2024-11-11 19:29:27,489 Epoch 1251/2000 +2024-11-11 19:29:43,157 Current Learning Rate: 0.0084795640 +2024-11-11 19:29:44,157 Train Loss: 0.0006985, Val Loss: 0.0007988 +2024-11-11 19:29:44,157 Epoch 1252/2000 +2024-11-11 19:29:59,832 Current Learning Rate: 0.0084227355 +2024-11-11 19:30:00,861 Train Loss: 0.0007554, Val Loss: 0.0007808 +2024-11-11 19:30:00,861 Epoch 1253/2000 +2024-11-11 19:30:17,425 Current Learning Rate: 0.0083650626 +2024-11-11 19:30:18,514 Train Loss: 0.0006460, Val Loss: 0.0007802 +2024-11-11 19:30:18,514 Epoch 1254/2000 +2024-11-11 19:30:33,545 Current Learning Rate: 0.0083065593 +2024-11-11 19:30:33,546 Train Loss: 0.0006969, Val Loss: 0.0008002 +2024-11-11 19:30:33,546 Epoch 1255/2000 +2024-11-11 19:30:50,268 Current Learning Rate: 0.0082472402 +2024-11-11 19:30:50,269 Train Loss: 0.0007126, Val Loss: 0.0008224 +2024-11-11 19:30:50,269 Epoch 1256/2000 +2024-11-11 19:31:06,405 Current Learning Rate: 0.0081871199 +2024-11-11 19:31:06,406 Train Loss: 0.0007588, Val Loss: 0.0008356 +2024-11-11 19:31:06,406 Epoch 1257/2000 +2024-11-11 19:31:22,763 Current Learning Rate: 0.0081262133 +2024-11-11 19:31:22,763 Train Loss: 0.0006239, Val Loss: 0.0008300 +2024-11-11 19:31:22,763 Epoch 1258/2000 +2024-11-11 19:31:37,510 Current Learning Rate: 0.0080645353 +2024-11-11 19:31:37,511 Train Loss: 0.0007517, Val Loss: 0.0008245 +2024-11-11 19:31:37,511 Epoch 1259/2000 +2024-11-11 19:31:53,515 Current Learning Rate: 0.0080021011 +2024-11-11 19:31:54,295 Train Loss: 0.0006759, Val Loss: 0.0007783 +2024-11-11 19:31:54,295 Epoch 1260/2000 +2024-11-11 19:32:09,350 Current Learning Rate: 0.0079389263 +2024-11-11 19:32:10,081 Train Loss: 0.0007044, Val Loss: 0.0007597 +2024-11-11 19:32:10,082 Epoch 1261/2000 +2024-11-11 19:32:25,353 Current Learning Rate: 0.0078750263 +2024-11-11 19:32:25,353 Train Loss: 0.0007259, Val Loss: 0.0007679 +2024-11-11 19:32:25,354 Epoch 1262/2000 +2024-11-11 19:32:40,724 Current Learning Rate: 0.0078104169 +2024-11-11 19:32:40,725 Train Loss: 0.0007029, Val Loss: 0.0007838 +2024-11-11 19:32:40,725 Epoch 1263/2000 +2024-11-11 19:32:57,374 Current Learning Rate: 0.0077451141 +2024-11-11 19:32:57,375 Train Loss: 0.0006776, Val Loss: 0.0008143 +2024-11-11 19:32:57,375 Epoch 1264/2000 +2024-11-11 19:33:12,321 Current Learning Rate: 0.0076791340 +2024-11-11 19:33:12,322 Train Loss: 0.0006801, Val Loss: 0.0008060 +2024-11-11 19:33:12,322 Epoch 1265/2000 +2024-11-11 19:33:27,753 Current Learning Rate: 0.0076124928 +2024-11-11 19:33:27,754 Train Loss: 0.0007322, Val Loss: 0.0009160 +2024-11-11 19:33:27,754 Epoch 1266/2000 +2024-11-11 19:33:43,418 Current Learning Rate: 0.0075452071 +2024-11-11 19:33:43,418 Train Loss: 0.0007673, Val Loss: 0.0008446 +2024-11-11 19:33:43,418 Epoch 1267/2000 +2024-11-11 19:33:58,983 Current Learning Rate: 0.0074772933 +2024-11-11 19:33:58,984 Train Loss: 0.0007103, Val Loss: 0.0007735 +2024-11-11 19:33:58,984 Epoch 1268/2000 +2024-11-11 19:34:14,391 Current Learning Rate: 0.0074087684 +2024-11-11 19:34:14,391 Train Loss: 0.0006179, Val Loss: 0.0007601 +2024-11-11 19:34:14,391 Epoch 1269/2000 +2024-11-11 19:34:30,470 Current Learning Rate: 0.0073396491 +2024-11-11 19:34:30,471 Train Loss: 0.0006432, Val Loss: 0.0007638 +2024-11-11 19:34:30,471 Epoch 1270/2000 +2024-11-11 19:34:45,792 Current Learning Rate: 0.0072699525 +2024-11-11 19:34:45,792 Train Loss: 0.0007119, Val Loss: 0.0007776 +2024-11-11 19:34:45,793 Epoch 1271/2000 +2024-11-11 19:35:01,007 Current Learning Rate: 0.0071996958 +2024-11-11 19:35:01,008 Train Loss: 0.0005824, Val Loss: 0.0007780 +2024-11-11 19:35:01,008 Epoch 1272/2000 +2024-11-11 19:35:16,516 Current Learning Rate: 0.0071288965 +2024-11-11 19:35:16,516 Train Loss: 0.0006603, Val Loss: 0.0007763 +2024-11-11 19:35:16,516 Epoch 1273/2000 +2024-11-11 19:35:32,010 Current Learning Rate: 0.0070575718 +2024-11-11 19:35:32,817 Train Loss: 0.0006173, Val Loss: 0.0007556 +2024-11-11 19:35:32,817 Epoch 1274/2000 +2024-11-11 19:35:47,430 Current Learning Rate: 0.0069857395 +2024-11-11 19:35:48,212 Train Loss: 0.0005924, Val Loss: 0.0007387 +2024-11-11 19:35:48,212 Epoch 1275/2000 +2024-11-11 19:36:03,384 Current Learning Rate: 0.0069134172 +2024-11-11 19:36:03,384 Train Loss: 0.0006143, Val Loss: 0.0007536 +2024-11-11 19:36:03,385 Epoch 1276/2000 +2024-11-11 19:36:18,807 Current Learning Rate: 0.0068406228 +2024-11-11 19:36:18,808 Train Loss: 0.0006034, Val Loss: 0.0007466 +2024-11-11 19:36:18,809 Epoch 1277/2000 +2024-11-11 19:36:34,356 Current Learning Rate: 0.0067673742 +2024-11-11 19:36:34,356 Train Loss: 0.0006757, Val Loss: 0.0007473 +2024-11-11 19:36:34,357 Epoch 1278/2000 +2024-11-11 19:36:49,397 Current Learning Rate: 0.0066936896 +2024-11-11 19:36:49,397 Train Loss: 0.0005976, Val Loss: 0.0007534 +2024-11-11 19:36:49,398 Epoch 1279/2000 +2024-11-11 19:37:04,483 Current Learning Rate: 0.0066195871 +2024-11-11 19:37:04,484 Train Loss: 0.0006511, Val Loss: 0.0007671 +2024-11-11 19:37:04,484 Epoch 1280/2000 +2024-11-11 19:37:19,936 Current Learning Rate: 0.0065450850 +2024-11-11 19:37:19,937 Train Loss: 0.0005711, Val Loss: 0.0007763 +2024-11-11 19:37:19,937 Epoch 1281/2000 +2024-11-11 19:37:35,185 Current Learning Rate: 0.0064702016 +2024-11-11 19:37:35,185 Train Loss: 0.0006569, Val Loss: 0.0008083 +2024-11-11 19:37:35,185 Epoch 1282/2000 +2024-11-11 19:37:50,662 Current Learning Rate: 0.0063949555 +2024-11-11 19:37:50,663 Train Loss: 0.0006829, Val Loss: 0.0008200 +2024-11-11 19:37:50,664 Epoch 1283/2000 +2024-11-11 19:38:05,685 Current Learning Rate: 0.0063193652 +2024-11-11 19:38:05,686 Train Loss: 0.0006795, Val Loss: 0.0007918 +2024-11-11 19:38:05,687 Epoch 1284/2000 +2024-11-11 19:38:21,127 Current Learning Rate: 0.0062434494 +2024-11-11 19:38:21,128 Train Loss: 0.0007056, Val Loss: 0.0008107 +2024-11-11 19:38:21,128 Epoch 1285/2000 +2024-11-11 19:38:36,436 Current Learning Rate: 0.0061672268 +2024-11-11 19:38:36,437 Train Loss: 0.0007121, Val Loss: 0.0007828 +2024-11-11 19:38:36,438 Epoch 1286/2000 +2024-11-11 19:38:51,528 Current Learning Rate: 0.0060907162 +2024-11-11 19:38:51,528 Train Loss: 0.0007112, Val Loss: 0.0007594 +2024-11-11 19:38:51,529 Epoch 1287/2000 +2024-11-11 19:39:07,152 Current Learning Rate: 0.0060139365 +2024-11-11 19:39:07,153 Train Loss: 0.0006527, Val Loss: 0.0007456 +2024-11-11 19:39:07,153 Epoch 1288/2000 +2024-11-11 19:39:22,864 Current Learning Rate: 0.0059369066 +2024-11-11 19:39:22,864 Train Loss: 0.0006117, Val Loss: 0.0007583 +2024-11-11 19:39:22,865 Epoch 1289/2000 +2024-11-11 19:39:38,441 Current Learning Rate: 0.0058596455 +2024-11-11 19:39:38,441 Train Loss: 0.0007125, Val Loss: 0.0007986 +2024-11-11 19:39:38,442 Epoch 1290/2000 +2024-11-11 19:39:54,221 Current Learning Rate: 0.0057821723 +2024-11-11 19:39:54,221 Train Loss: 0.0006777, Val Loss: 0.0007551 +2024-11-11 19:39:54,222 Epoch 1291/2000 +2024-11-11 19:40:10,229 Current Learning Rate: 0.0057045062 +2024-11-11 19:40:10,230 Train Loss: 0.0006606, Val Loss: 0.0007435 +2024-11-11 19:40:10,231 Epoch 1292/2000 +2024-11-11 19:40:25,814 Current Learning Rate: 0.0056266662 +2024-11-11 19:40:25,815 Train Loss: 0.0006028, Val Loss: 0.0007567 +2024-11-11 19:40:25,815 Epoch 1293/2000 +2024-11-11 19:40:42,175 Current Learning Rate: 0.0055486716 +2024-11-11 19:40:42,927 Train Loss: 0.0006782, Val Loss: 0.0007165 +2024-11-11 19:40:42,927 Epoch 1294/2000 +2024-11-11 19:40:58,217 Current Learning Rate: 0.0054705416 +2024-11-11 19:40:59,109 Train Loss: 0.0006235, Val Loss: 0.0007030 +2024-11-11 19:40:59,109 Epoch 1295/2000 +2024-11-11 19:41:14,177 Current Learning Rate: 0.0053922955 +2024-11-11 19:41:14,178 Train Loss: 0.0006007, Val Loss: 0.0007068 +2024-11-11 19:41:14,179 Epoch 1296/2000 +2024-11-11 19:41:29,127 Current Learning Rate: 0.0053139526 +2024-11-11 19:41:29,127 Train Loss: 0.0006074, Val Loss: 0.0007046 +2024-11-11 19:41:29,128 Epoch 1297/2000 +2024-11-11 19:41:44,751 Current Learning Rate: 0.0052355323 +2024-11-11 19:41:45,497 Train Loss: 0.0005509, Val Loss: 0.0006939 +2024-11-11 19:41:45,497 Epoch 1298/2000 +2024-11-11 19:42:00,626 Current Learning Rate: 0.0051570538 +2024-11-11 19:42:01,625 Train Loss: 0.0006324, Val Loss: 0.0006921 +2024-11-11 19:42:01,625 Epoch 1299/2000 +2024-11-11 19:42:17,627 Current Learning Rate: 0.0050785366 +2024-11-11 19:42:17,628 Train Loss: 0.0006506, Val Loss: 0.0006980 +2024-11-11 19:42:17,628 Epoch 1300/2000 +2024-11-11 19:42:33,534 Current Learning Rate: 0.0050000000 +2024-11-11 19:42:33,535 Train Loss: 0.0007355, Val Loss: 0.0007156 +2024-11-11 19:42:33,535 Epoch 1301/2000 +2024-11-11 19:42:48,820 Current Learning Rate: 0.0049214634 +2024-11-11 19:42:49,835 Train Loss: 0.0006352, Val Loss: 0.0006887 +2024-11-11 19:42:49,836 Epoch 1302/2000 +2024-11-11 19:43:05,428 Current Learning Rate: 0.0048429462 +2024-11-11 19:43:06,263 Train Loss: 0.0006389, Val Loss: 0.0006805 +2024-11-11 19:43:06,264 Epoch 1303/2000 +2024-11-11 19:43:21,545 Current Learning Rate: 0.0047644677 +2024-11-11 19:43:22,491 Train Loss: 0.0005725, Val Loss: 0.0006790 +2024-11-11 19:43:22,491 Epoch 1304/2000 +2024-11-11 19:43:37,963 Current Learning Rate: 0.0046860474 +2024-11-11 19:43:37,964 Train Loss: 0.0005542, Val Loss: 0.0006816 +2024-11-11 19:43:37,964 Epoch 1305/2000 +2024-11-11 19:43:53,766 Current Learning Rate: 0.0046077045 +2024-11-11 19:43:53,767 Train Loss: 0.0006077, Val Loss: 0.0006828 +2024-11-11 19:43:53,767 Epoch 1306/2000 +2024-11-11 19:44:09,461 Current Learning Rate: 0.0045294584 +2024-11-11 19:44:09,462 Train Loss: 0.0005358, Val Loss: 0.0006855 +2024-11-11 19:44:09,462 Epoch 1307/2000 +2024-11-11 19:44:24,723 Current Learning Rate: 0.0044513284 +2024-11-11 19:44:24,723 Train Loss: 0.0005936, Val Loss: 0.0006877 +2024-11-11 19:44:24,723 Epoch 1308/2000 +2024-11-11 19:44:40,655 Current Learning Rate: 0.0043733338 +2024-11-11 19:44:40,656 Train Loss: 0.0005518, Val Loss: 0.0006815 +2024-11-11 19:44:40,656 Epoch 1309/2000 +2024-11-11 19:44:56,368 Current Learning Rate: 0.0042954938 +2024-11-11 19:44:57,079 Train Loss: 0.0005299, Val Loss: 0.0006740 +2024-11-11 19:44:57,079 Epoch 1310/2000 +2024-11-11 19:45:12,025 Current Learning Rate: 0.0042178277 +2024-11-11 19:45:12,026 Train Loss: 0.0006295, Val Loss: 0.0006814 +2024-11-11 19:45:12,026 Epoch 1311/2000 +2024-11-11 19:45:27,415 Current Learning Rate: 0.0041403545 +2024-11-11 19:45:27,416 Train Loss: 0.0005916, Val Loss: 0.0006889 +2024-11-11 19:45:27,416 Epoch 1312/2000 +2024-11-11 19:45:43,210 Current Learning Rate: 0.0040630934 +2024-11-11 19:45:44,034 Train Loss: 0.0006301, Val Loss: 0.0006719 +2024-11-11 19:45:44,035 Epoch 1313/2000 +2024-11-11 19:45:59,092 Current Learning Rate: 0.0039860635 +2024-11-11 19:45:59,093 Train Loss: 0.0005829, Val Loss: 0.0006777 +2024-11-11 19:45:59,093 Epoch 1314/2000 +2024-11-11 19:46:15,211 Current Learning Rate: 0.0039092838 +2024-11-11 19:46:15,212 Train Loss: 0.0005170, Val Loss: 0.0007158 +2024-11-11 19:46:15,212 Epoch 1315/2000 +2024-11-11 19:46:30,250 Current Learning Rate: 0.0038327732 +2024-11-11 19:46:30,251 Train Loss: 0.0005662, Val Loss: 0.0007079 +2024-11-11 19:46:30,251 Epoch 1316/2000 +2024-11-11 19:46:45,529 Current Learning Rate: 0.0037565506 +2024-11-11 19:46:45,529 Train Loss: 0.0005735, Val Loss: 0.0006768 +2024-11-11 19:46:45,529 Epoch 1317/2000 +2024-11-11 19:47:01,289 Current Learning Rate: 0.0036806348 +2024-11-11 19:47:01,290 Train Loss: 0.0005399, Val Loss: 0.0007005 +2024-11-11 19:47:01,291 Epoch 1318/2000 +2024-11-11 19:47:16,945 Current Learning Rate: 0.0036050445 +2024-11-11 19:47:16,946 Train Loss: 0.0005213, Val Loss: 0.0007417 +2024-11-11 19:47:16,946 Epoch 1319/2000 +2024-11-11 19:47:33,087 Current Learning Rate: 0.0035297984 +2024-11-11 19:47:33,088 Train Loss: 0.0006110, Val Loss: 0.0006880 +2024-11-11 19:47:33,088 Epoch 1320/2000 +2024-11-11 19:47:49,882 Current Learning Rate: 0.0034549150 +2024-11-11 19:47:49,882 Train Loss: 0.0005038, Val Loss: 0.0006751 +2024-11-11 19:47:49,882 Epoch 1321/2000 +2024-11-11 19:48:05,855 Current Learning Rate: 0.0033804129 +2024-11-11 19:48:05,856 Train Loss: 0.0005429, Val Loss: 0.0006789 +2024-11-11 19:48:05,856 Epoch 1322/2000 +2024-11-11 19:48:21,447 Current Learning Rate: 0.0033063104 +2024-11-11 19:48:21,447 Train Loss: 0.0005773, Val Loss: 0.0006816 +2024-11-11 19:48:21,447 Epoch 1323/2000 +2024-11-11 19:48:37,225 Current Learning Rate: 0.0032326258 +2024-11-11 19:48:37,226 Train Loss: 0.0005571, Val Loss: 0.0006779 +2024-11-11 19:48:37,226 Epoch 1324/2000 +2024-11-11 19:48:52,382 Current Learning Rate: 0.0031593772 +2024-11-11 19:48:53,203 Train Loss: 0.0005491, Val Loss: 0.0006707 +2024-11-11 19:48:53,203 Epoch 1325/2000 +2024-11-11 19:49:07,763 Current Learning Rate: 0.0030865828 +2024-11-11 19:49:08,458 Train Loss: 0.0005470, Val Loss: 0.0006639 +2024-11-11 19:49:08,458 Epoch 1326/2000 +2024-11-11 19:49:23,171 Current Learning Rate: 0.0030142605 +2024-11-11 19:49:24,006 Train Loss: 0.0005413, Val Loss: 0.0006619 +2024-11-11 19:49:24,006 Epoch 1327/2000 +2024-11-11 19:49:38,500 Current Learning Rate: 0.0029424282 +2024-11-11 19:49:39,297 Train Loss: 0.0005771, Val Loss: 0.0006574 +2024-11-11 19:49:39,297 Epoch 1328/2000 +2024-11-11 19:49:53,818 Current Learning Rate: 0.0028711035 +2024-11-11 19:49:54,593 Train Loss: 0.0005383, Val Loss: 0.0006545 +2024-11-11 19:49:54,593 Epoch 1329/2000 +2024-11-11 19:50:09,038 Current Learning Rate: 0.0028003042 +2024-11-11 19:50:09,768 Train Loss: 0.0005376, Val Loss: 0.0006539 +2024-11-11 19:50:09,768 Epoch 1330/2000 +2024-11-11 19:50:24,475 Current Learning Rate: 0.0027300475 +2024-11-11 19:50:25,189 Train Loss: 0.0005080, Val Loss: 0.0006516 +2024-11-11 19:50:25,190 Epoch 1331/2000 +2024-11-11 19:50:40,046 Current Learning Rate: 0.0026603509 +2024-11-11 19:50:41,117 Train Loss: 0.0005167, Val Loss: 0.0006495 +2024-11-11 19:50:41,117 Epoch 1332/2000 +2024-11-11 19:50:56,357 Current Learning Rate: 0.0025912316 +2024-11-11 19:50:56,358 Train Loss: 0.0005841, Val Loss: 0.0006509 +2024-11-11 19:50:56,359 Epoch 1333/2000 +2024-11-11 19:51:13,217 Current Learning Rate: 0.0025227067 +2024-11-11 19:51:13,218 Train Loss: 0.0005029, Val Loss: 0.0006523 +2024-11-11 19:51:13,218 Epoch 1334/2000 +2024-11-11 19:51:28,576 Current Learning Rate: 0.0024547929 +2024-11-11 19:51:28,576 Train Loss: 0.0005073, Val Loss: 0.0006524 +2024-11-11 19:51:28,577 Epoch 1335/2000 +2024-11-11 19:51:44,982 Current Learning Rate: 0.0023875072 +2024-11-11 19:51:44,983 Train Loss: 0.0004979, Val Loss: 0.0006558 +2024-11-11 19:51:44,983 Epoch 1336/2000 +2024-11-11 19:52:01,889 Current Learning Rate: 0.0023208660 +2024-11-11 19:52:04,546 Train Loss: 0.0005542, Val Loss: 0.0006464 +2024-11-11 19:52:04,546 Epoch 1337/2000 +2024-11-11 19:52:19,465 Current Learning Rate: 0.0022548859 +2024-11-11 19:52:20,550 Train Loss: 0.0005587, Val Loss: 0.0006404 +2024-11-11 19:52:20,550 Epoch 1338/2000 +2024-11-11 19:52:36,007 Current Learning Rate: 0.0021895831 +2024-11-11 19:52:36,770 Train Loss: 0.0005024, Val Loss: 0.0006403 +2024-11-11 19:52:36,771 Epoch 1339/2000 +2024-11-11 19:52:51,850 Current Learning Rate: 0.0021249737 +2024-11-11 19:52:52,825 Train Loss: 0.0005034, Val Loss: 0.0006401 +2024-11-11 19:52:52,826 Epoch 1340/2000 +2024-11-11 19:53:08,283 Current Learning Rate: 0.0020610737 +2024-11-11 19:53:09,365 Train Loss: 0.0004939, Val Loss: 0.0006363 +2024-11-11 19:53:09,365 Epoch 1341/2000 +2024-11-11 19:53:24,958 Current Learning Rate: 0.0019978989 +2024-11-11 19:53:25,737 Train Loss: 0.0004918, Val Loss: 0.0006351 +2024-11-11 19:53:25,738 Epoch 1342/2000 +2024-11-11 19:53:40,705 Current Learning Rate: 0.0019354647 +2024-11-11 19:53:41,502 Train Loss: 0.0005189, Val Loss: 0.0006343 +2024-11-11 19:53:41,502 Epoch 1343/2000 +2024-11-11 19:53:56,547 Current Learning Rate: 0.0018737867 +2024-11-11 19:53:57,385 Train Loss: 0.0005233, Val Loss: 0.0006332 +2024-11-11 19:53:57,385 Epoch 1344/2000 +2024-11-11 19:54:12,439 Current Learning Rate: 0.0018128801 +2024-11-11 19:54:13,530 Train Loss: 0.0005569, Val Loss: 0.0006325 +2024-11-11 19:54:13,530 Epoch 1345/2000 +2024-11-11 19:54:29,457 Current Learning Rate: 0.0017527598 +2024-11-11 19:54:30,228 Train Loss: 0.0004619, Val Loss: 0.0006304 +2024-11-11 19:54:30,229 Epoch 1346/2000 +2024-11-11 19:54:44,678 Current Learning Rate: 0.0016934407 +2024-11-11 19:54:45,428 Train Loss: 0.0004917, Val Loss: 0.0006289 +2024-11-11 19:54:45,429 Epoch 1347/2000 +2024-11-11 19:55:01,199 Current Learning Rate: 0.0016349374 +2024-11-11 19:55:01,200 Train Loss: 0.0005784, Val Loss: 0.0006300 +2024-11-11 19:55:01,200 Epoch 1348/2000 +2024-11-11 19:55:16,580 Current Learning Rate: 0.0015772645 +2024-11-11 19:55:17,277 Train Loss: 0.0005184, Val Loss: 0.0006288 +2024-11-11 19:55:17,277 Epoch 1349/2000 +2024-11-11 19:55:32,510 Current Learning Rate: 0.0015204360 +2024-11-11 19:55:33,224 Train Loss: 0.0005124, Val Loss: 0.0006275 +2024-11-11 19:55:33,224 Epoch 1350/2000 +2024-11-11 19:55:48,242 Current Learning Rate: 0.0014644661 +2024-11-11 19:55:48,968 Train Loss: 0.0005034, Val Loss: 0.0006267 +2024-11-11 19:55:48,968 Epoch 1351/2000 +2024-11-11 19:56:04,962 Current Learning Rate: 0.0014093685 +2024-11-11 19:56:04,963 Train Loss: 0.0006049, Val Loss: 0.0006274 +2024-11-11 19:56:04,963 Epoch 1352/2000 +2024-11-11 19:56:21,231 Current Learning Rate: 0.0013551569 +2024-11-11 19:56:22,015 Train Loss: 0.0005960, Val Loss: 0.0006250 +2024-11-11 19:56:22,016 Epoch 1353/2000 +2024-11-11 19:56:36,749 Current Learning Rate: 0.0013018445 +2024-11-11 19:56:37,543 Train Loss: 0.0004573, Val Loss: 0.0006241 +2024-11-11 19:56:37,544 Epoch 1354/2000 +2024-11-11 19:56:52,284 Current Learning Rate: 0.0012494447 +2024-11-11 19:56:53,035 Train Loss: 0.0004875, Val Loss: 0.0006239 +2024-11-11 19:56:53,035 Epoch 1355/2000 +2024-11-11 19:57:08,426 Current Learning Rate: 0.0011979702 +2024-11-11 19:57:09,263 Train Loss: 0.0004818, Val Loss: 0.0006237 +2024-11-11 19:57:09,264 Epoch 1356/2000 +2024-11-11 19:57:23,711 Current Learning Rate: 0.0011474338 +2024-11-11 19:57:24,519 Train Loss: 0.0004881, Val Loss: 0.0006232 +2024-11-11 19:57:24,520 Epoch 1357/2000 +2024-11-11 19:57:39,097 Current Learning Rate: 0.0010978480 +2024-11-11 19:57:39,965 Train Loss: 0.0004620, Val Loss: 0.0006225 +2024-11-11 19:57:39,966 Epoch 1358/2000 +2024-11-11 19:57:54,537 Current Learning Rate: 0.0010492249 +2024-11-11 19:57:54,538 Train Loss: 0.0005744, Val Loss: 0.0006229 +2024-11-11 19:57:54,538 Epoch 1359/2000 +2024-11-11 19:58:10,844 Current Learning Rate: 0.0010015767 +2024-11-11 19:58:11,585 Train Loss: 0.0004765, Val Loss: 0.0006223 +2024-11-11 19:58:11,585 Epoch 1360/2000 +2024-11-11 19:58:26,382 Current Learning Rate: 0.0009549150 +2024-11-11 19:58:27,213 Train Loss: 0.0005791, Val Loss: 0.0006222 +2024-11-11 19:58:27,213 Epoch 1361/2000 +2024-11-11 19:58:43,103 Current Learning Rate: 0.0009092514 +2024-11-11 19:58:43,998 Train Loss: 0.0004861, Val Loss: 0.0006207 +2024-11-11 19:58:43,998 Epoch 1362/2000 +2024-11-11 19:58:59,476 Current Learning Rate: 0.0008645971 +2024-11-11 19:59:00,494 Train Loss: 0.0005046, Val Loss: 0.0006202 +2024-11-11 19:59:00,495 Epoch 1363/2000 +2024-11-11 19:59:16,049 Current Learning Rate: 0.0008209632 +2024-11-11 19:59:16,786 Train Loss: 0.0005233, Val Loss: 0.0006189 +2024-11-11 19:59:16,786 Epoch 1364/2000 +2024-11-11 19:59:31,459 Current Learning Rate: 0.0007783604 +2024-11-11 19:59:31,460 Train Loss: 0.0005546, Val Loss: 0.0006190 +2024-11-11 19:59:31,461 Epoch 1365/2000 +2024-11-11 19:59:47,761 Current Learning Rate: 0.0007367992 +2024-11-11 19:59:48,511 Train Loss: 0.0004817, Val Loss: 0.0006182 +2024-11-11 19:59:48,512 Epoch 1366/2000 +2024-11-11 20:00:04,295 Current Learning Rate: 0.0006962899 +2024-11-11 20:00:05,153 Train Loss: 0.0005376, Val Loss: 0.0006179 +2024-11-11 20:00:05,153 Epoch 1367/2000 +2024-11-11 20:00:20,427 Current Learning Rate: 0.0006568424 +2024-11-11 20:00:21,418 Train Loss: 0.0005212, Val Loss: 0.0006174 +2024-11-11 20:00:21,418 Epoch 1368/2000 +2024-11-11 20:00:37,084 Current Learning Rate: 0.0006184666 +2024-11-11 20:00:38,080 Train Loss: 0.0005217, Val Loss: 0.0006170 +2024-11-11 20:00:38,080 Epoch 1369/2000 +2024-11-11 20:00:54,110 Current Learning Rate: 0.0005811718 +2024-11-11 20:00:55,156 Train Loss: 0.0004690, Val Loss: 0.0006166 +2024-11-11 20:00:55,157 Epoch 1370/2000 +2024-11-11 20:01:10,897 Current Learning Rate: 0.0005449674 +2024-11-11 20:01:11,730 Train Loss: 0.0004932, Val Loss: 0.0006164 +2024-11-11 20:01:11,730 Epoch 1371/2000 +2024-11-11 20:01:27,625 Current Learning Rate: 0.0005098621 +2024-11-11 20:01:28,422 Train Loss: 0.0005213, Val Loss: 0.0006162 +2024-11-11 20:01:28,423 Epoch 1372/2000 +2024-11-11 20:01:43,386 Current Learning Rate: 0.0004758647 +2024-11-11 20:01:44,315 Train Loss: 0.0004857, Val Loss: 0.0006157 +2024-11-11 20:01:44,315 Epoch 1373/2000 +2024-11-11 20:02:00,508 Current Learning Rate: 0.0004429836 +2024-11-11 20:02:00,509 Train Loss: 0.0005455, Val Loss: 0.0006157 +2024-11-11 20:02:00,509 Epoch 1374/2000 +2024-11-11 20:02:16,044 Current Learning Rate: 0.0004112269 +2024-11-11 20:02:16,879 Train Loss: 0.0004801, Val Loss: 0.0006153 +2024-11-11 20:02:16,880 Epoch 1375/2000 +2024-11-11 20:02:32,406 Current Learning Rate: 0.0003806023 +2024-11-11 20:02:32,407 Train Loss: 0.0005578, Val Loss: 0.0006154 +2024-11-11 20:02:32,408 Epoch 1376/2000 +2024-11-11 20:02:48,159 Current Learning Rate: 0.0003511176 +2024-11-11 20:02:48,957 Train Loss: 0.0004987, Val Loss: 0.0006150 +2024-11-11 20:02:48,957 Epoch 1377/2000 +2024-11-11 20:03:04,027 Current Learning Rate: 0.0003227798 +2024-11-11 20:03:04,849 Train Loss: 0.0004501, Val Loss: 0.0006145 +2024-11-11 20:03:04,849 Epoch 1378/2000 +2024-11-11 20:03:19,727 Current Learning Rate: 0.0002955962 +2024-11-11 20:03:20,514 Train Loss: 0.0005302, Val Loss: 0.0006143 +2024-11-11 20:03:20,515 Epoch 1379/2000 +2024-11-11 20:03:35,539 Current Learning Rate: 0.0002695732 +2024-11-11 20:03:35,540 Train Loss: 0.0005470, Val Loss: 0.0006144 +2024-11-11 20:03:35,540 Epoch 1380/2000 +2024-11-11 20:03:52,709 Current Learning Rate: 0.0002447174 +2024-11-11 20:03:53,559 Train Loss: 0.0004807, Val Loss: 0.0006140 +2024-11-11 20:03:53,559 Epoch 1381/2000 +2024-11-11 20:04:08,275 Current Learning Rate: 0.0002210349 +2024-11-11 20:04:09,009 Train Loss: 0.0004747, Val Loss: 0.0006140 +2024-11-11 20:04:09,010 Epoch 1382/2000 +2024-11-11 20:04:25,007 Current Learning Rate: 0.0001985316 +2024-11-11 20:04:25,809 Train Loss: 0.0004527, Val Loss: 0.0006137 +2024-11-11 20:04:25,810 Epoch 1383/2000 +2024-11-11 20:04:41,164 Current Learning Rate: 0.0001772129 +2024-11-11 20:04:42,001 Train Loss: 0.0004655, Val Loss: 0.0006135 +2024-11-11 20:04:42,001 Epoch 1384/2000 +2024-11-11 20:04:57,280 Current Learning Rate: 0.0001570842 +2024-11-11 20:04:58,004 Train Loss: 0.0005277, Val Loss: 0.0006132 +2024-11-11 20:04:58,004 Epoch 1385/2000 +2024-11-11 20:05:13,753 Current Learning Rate: 0.0001381504 +2024-11-11 20:05:14,449 Train Loss: 0.0005367, Val Loss: 0.0006131 +2024-11-11 20:05:14,450 Epoch 1386/2000 +2024-11-11 20:05:30,359 Current Learning Rate: 0.0001204162 +2024-11-11 20:05:31,109 Train Loss: 0.0004735, Val Loss: 0.0006130 +2024-11-11 20:05:31,109 Epoch 1387/2000 +2024-11-11 20:05:46,185 Current Learning Rate: 0.0001038859 +2024-11-11 20:05:46,905 Train Loss: 0.0005416, Val Loss: 0.0006130 +2024-11-11 20:05:46,905 Epoch 1388/2000 +2024-11-11 20:06:01,720 Current Learning Rate: 0.0000885637 +2024-11-11 20:06:04,291 Train Loss: 0.0005546, Val Loss: 0.0006130 +2024-11-11 20:06:04,291 Epoch 1389/2000 +2024-11-11 20:06:18,474 Current Learning Rate: 0.0000744534 +2024-11-11 20:06:19,201 Train Loss: 0.0004698, Val Loss: 0.0006130 +2024-11-11 20:06:19,201 Epoch 1390/2000 +2024-11-11 20:06:34,181 Current Learning Rate: 0.0000615583 +2024-11-11 20:06:34,780 Train Loss: 0.0005111, Val Loss: 0.0006129 +2024-11-11 20:06:34,780 Epoch 1391/2000 +2024-11-11 20:06:49,845 Current Learning Rate: 0.0000498817 +2024-11-11 20:06:49,846 Train Loss: 0.0005220, Val Loss: 0.0006129 +2024-11-11 20:06:49,846 Epoch 1392/2000 +2024-11-11 20:07:05,676 Current Learning Rate: 0.0000394265 +2024-11-11 20:07:06,525 Train Loss: 0.0005255, Val Loss: 0.0006129 +2024-11-11 20:07:06,526 Epoch 1393/2000 +2024-11-11 20:07:21,837 Current Learning Rate: 0.0000301952 +2024-11-11 20:07:21,838 Train Loss: 0.0004672, Val Loss: 0.0006129 +2024-11-11 20:07:21,839 Epoch 1394/2000 +2024-11-11 20:07:37,788 Current Learning Rate: 0.0000221902 +2024-11-11 20:07:38,536 Train Loss: 0.0004949, Val Loss: 0.0006128 +2024-11-11 20:07:38,537 Epoch 1395/2000 +2024-11-11 20:07:53,896 Current Learning Rate: 0.0000154133 +2024-11-11 20:07:53,897 Train Loss: 0.0005009, Val Loss: 0.0006128 +2024-11-11 20:07:53,897 Epoch 1396/2000 +2024-11-11 20:08:10,114 Current Learning Rate: 0.0000098664 +2024-11-11 20:08:10,896 Train Loss: 0.0004732, Val Loss: 0.0006128 +2024-11-11 20:08:10,896 Epoch 1397/2000 +2024-11-11 20:08:25,236 Current Learning Rate: 0.0000055506 +2024-11-11 20:08:25,237 Train Loss: 0.0005249, Val Loss: 0.0006128 +2024-11-11 20:08:25,237 Epoch 1398/2000 +2024-11-11 20:08:41,681 Current Learning Rate: 0.0000024672 +2024-11-11 20:08:42,420 Train Loss: 0.0004442, Val Loss: 0.0006128 +2024-11-11 20:08:42,420 Epoch 1399/2000 +2024-11-11 20:08:57,838 Current Learning Rate: 0.0000006168 +2024-11-11 20:08:58,618 Train Loss: 0.0004666, Val Loss: 0.0006128 +2024-11-11 20:08:58,618 Epoch 1400/2000 +2024-11-11 20:09:14,324 Current Learning Rate: 0.0000000000 +2024-11-11 20:09:14,325 Train Loss: 0.0005253, Val Loss: 0.0006129 +2024-11-11 20:09:14,325 Epoch 1401/2000 +2024-11-11 20:09:31,446 Current Learning Rate: 0.0000006168 +2024-11-11 20:09:31,447 Train Loss: 0.0005096, Val Loss: 0.0006128 +2024-11-11 20:09:31,447 Epoch 1402/2000 +2024-11-11 20:09:48,001 Current Learning Rate: 0.0000024672 +2024-11-11 20:09:48,002 Train Loss: 0.0004710, Val Loss: 0.0006128 +2024-11-11 20:09:48,003 Epoch 1403/2000 +2024-11-11 20:10:05,157 Current Learning Rate: 0.0000055506 +2024-11-11 20:10:05,158 Train Loss: 0.0004903, Val Loss: 0.0006128 +2024-11-11 20:10:05,158 Epoch 1404/2000 +2024-11-11 20:10:21,151 Current Learning Rate: 0.0000098664 +2024-11-11 20:10:21,152 Train Loss: 0.0004696, Val Loss: 0.0006128 +2024-11-11 20:10:21,152 Epoch 1405/2000 +2024-11-11 20:10:37,562 Current Learning Rate: 0.0000154133 +2024-11-11 20:10:37,563 Train Loss: 0.0005107, Val Loss: 0.0006128 +2024-11-11 20:10:37,563 Epoch 1406/2000 +2024-11-11 20:10:53,670 Current Learning Rate: 0.0000221902 +2024-11-11 20:10:54,735 Train Loss: 0.0004813, Val Loss: 0.0006128 +2024-11-11 20:10:54,736 Epoch 1407/2000 +2024-11-11 20:11:10,847 Current Learning Rate: 0.0000301952 +2024-11-11 20:11:10,848 Train Loss: 0.0005241, Val Loss: 0.0006128 +2024-11-11 20:11:10,848 Epoch 1408/2000 +2024-11-11 20:11:27,126 Current Learning Rate: 0.0000394265 +2024-11-11 20:11:27,127 Train Loss: 0.0004872, Val Loss: 0.0006128 +2024-11-11 20:11:27,127 Epoch 1409/2000 +2024-11-11 20:11:42,746 Current Learning Rate: 0.0000498817 +2024-11-11 20:11:42,747 Train Loss: 0.0004862, Val Loss: 0.0006128 +2024-11-11 20:11:42,747 Epoch 1410/2000 +2024-11-11 20:11:58,066 Current Learning Rate: 0.0000615583 +2024-11-11 20:11:58,066 Train Loss: 0.0004675, Val Loss: 0.0006128 +2024-11-11 20:11:58,066 Epoch 1411/2000 +2024-11-11 20:12:13,442 Current Learning Rate: 0.0000744534 +2024-11-11 20:12:13,443 Train Loss: 0.0004975, Val Loss: 0.0006129 +2024-11-11 20:12:13,443 Epoch 1412/2000 +2024-11-11 20:12:28,781 Current Learning Rate: 0.0000885637 +2024-11-11 20:12:29,566 Train Loss: 0.0004550, Val Loss: 0.0006127 +2024-11-11 20:12:29,567 Epoch 1413/2000 +2024-11-11 20:12:44,260 Current Learning Rate: 0.0001038859 +2024-11-11 20:12:44,261 Train Loss: 0.0005011, Val Loss: 0.0006128 +2024-11-11 20:12:44,261 Epoch 1414/2000 +2024-11-11 20:12:59,904 Current Learning Rate: 0.0001204162 +2024-11-11 20:12:59,904 Train Loss: 0.0005198, Val Loss: 0.0006129 +2024-11-11 20:12:59,904 Epoch 1415/2000 +2024-11-11 20:13:15,146 Current Learning Rate: 0.0001381504 +2024-11-11 20:13:15,147 Train Loss: 0.0004447, Val Loss: 0.0006128 +2024-11-11 20:13:15,147 Epoch 1416/2000 +2024-11-11 20:13:30,686 Current Learning Rate: 0.0001570842 +2024-11-11 20:13:30,687 Train Loss: 0.0004449, Val Loss: 0.0006128 +2024-11-11 20:13:30,687 Epoch 1417/2000 +2024-11-11 20:13:46,706 Current Learning Rate: 0.0001772129 +2024-11-11 20:13:46,707 Train Loss: 0.0005148, Val Loss: 0.0006130 +2024-11-11 20:13:46,707 Epoch 1418/2000 +2024-11-11 20:14:03,613 Current Learning Rate: 0.0001985316 +2024-11-11 20:14:03,613 Train Loss: 0.0004517, Val Loss: 0.0006130 +2024-11-11 20:14:03,614 Epoch 1419/2000 +2024-11-11 20:14:19,892 Current Learning Rate: 0.0002210349 +2024-11-11 20:14:19,892 Train Loss: 0.0004902, Val Loss: 0.0006130 +2024-11-11 20:14:19,893 Epoch 1420/2000 +2024-11-11 20:14:36,221 Current Learning Rate: 0.0002447174 +2024-11-11 20:14:36,222 Train Loss: 0.0004996, Val Loss: 0.0006131 +2024-11-11 20:14:36,222 Epoch 1421/2000 +2024-11-11 20:14:52,817 Current Learning Rate: 0.0002695732 +2024-11-11 20:14:52,817 Train Loss: 0.0005237, Val Loss: 0.0006131 +2024-11-11 20:14:52,818 Epoch 1422/2000 +2024-11-11 20:15:08,077 Current Learning Rate: 0.0002955962 +2024-11-11 20:15:08,077 Train Loss: 0.0004482, Val Loss: 0.0006132 +2024-11-11 20:15:08,078 Epoch 1423/2000 +2024-11-11 20:15:24,080 Current Learning Rate: 0.0003227798 +2024-11-11 20:15:24,080 Train Loss: 0.0004680, Val Loss: 0.0006132 +2024-11-11 20:15:24,080 Epoch 1424/2000 +2024-11-11 20:15:40,211 Current Learning Rate: 0.0003511176 +2024-11-11 20:15:40,211 Train Loss: 0.0005434, Val Loss: 0.0006135 +2024-11-11 20:15:40,211 Epoch 1425/2000 +2024-11-11 20:15:56,512 Current Learning Rate: 0.0003806023 +2024-11-11 20:15:56,512 Train Loss: 0.0005129, Val Loss: 0.0006135 +2024-11-11 20:15:56,513 Epoch 1426/2000 +2024-11-11 20:16:12,728 Current Learning Rate: 0.0004112269 +2024-11-11 20:16:12,728 Train Loss: 0.0005002, Val Loss: 0.0006136 +2024-11-11 20:16:12,728 Epoch 1427/2000 +2024-11-11 20:16:28,624 Current Learning Rate: 0.0004429836 +2024-11-11 20:16:28,624 Train Loss: 0.0005272, Val Loss: 0.0006138 +2024-11-11 20:16:28,624 Epoch 1428/2000 +2024-11-11 20:16:44,393 Current Learning Rate: 0.0004758647 +2024-11-11 20:16:44,394 Train Loss: 0.0004807, Val Loss: 0.0006142 +2024-11-11 20:16:44,394 Epoch 1429/2000 +2024-11-11 20:16:59,941 Current Learning Rate: 0.0005098621 +2024-11-11 20:16:59,941 Train Loss: 0.0004729, Val Loss: 0.0006140 +2024-11-11 20:16:59,941 Epoch 1430/2000 +2024-11-11 20:17:15,698 Current Learning Rate: 0.0005449674 +2024-11-11 20:17:15,699 Train Loss: 0.0004769, Val Loss: 0.0006139 +2024-11-11 20:17:15,699 Epoch 1431/2000 +2024-11-11 20:17:31,241 Current Learning Rate: 0.0005811718 +2024-11-11 20:17:31,242 Train Loss: 0.0005230, Val Loss: 0.0006142 +2024-11-11 20:17:31,242 Epoch 1432/2000 +2024-11-11 20:17:46,743 Current Learning Rate: 0.0006184666 +2024-11-11 20:17:46,743 Train Loss: 0.0005331, Val Loss: 0.0006143 +2024-11-11 20:17:46,743 Epoch 1433/2000 +2024-11-11 20:18:02,340 Current Learning Rate: 0.0006568424 +2024-11-11 20:18:02,340 Train Loss: 0.0004917, Val Loss: 0.0006143 +2024-11-11 20:18:02,340 Epoch 1434/2000 +2024-11-11 20:18:17,752 Current Learning Rate: 0.0006962899 +2024-11-11 20:18:17,753 Train Loss: 0.0004734, Val Loss: 0.0006141 +2024-11-11 20:18:17,753 Epoch 1435/2000 +2024-11-11 20:18:33,356 Current Learning Rate: 0.0007367992 +2024-11-11 20:18:33,356 Train Loss: 0.0004983, Val Loss: 0.0006142 +2024-11-11 20:18:33,357 Epoch 1436/2000 +2024-11-11 20:18:49,180 Current Learning Rate: 0.0007783604 +2024-11-11 20:18:49,181 Train Loss: 0.0005069, Val Loss: 0.0006153 +2024-11-11 20:18:49,181 Epoch 1437/2000 +2024-11-11 20:19:04,667 Current Learning Rate: 0.0008209632 +2024-11-11 20:19:04,668 Train Loss: 0.0004598, Val Loss: 0.0006165 +2024-11-11 20:19:04,668 Epoch 1438/2000 +2024-11-11 20:19:21,372 Current Learning Rate: 0.0008645971 +2024-11-11 20:19:21,373 Train Loss: 0.0005561, Val Loss: 0.0006181 +2024-11-11 20:19:21,374 Epoch 1439/2000 +2024-11-11 20:19:37,910 Current Learning Rate: 0.0009092514 +2024-11-11 20:19:37,911 Train Loss: 0.0004470, Val Loss: 0.0006189 +2024-11-11 20:19:37,911 Epoch 1440/2000 +2024-11-11 20:19:53,435 Current Learning Rate: 0.0009549150 +2024-11-11 20:19:53,436 Train Loss: 0.0004760, Val Loss: 0.0006184 +2024-11-11 20:19:53,436 Epoch 1441/2000 +2024-11-11 20:20:09,808 Current Learning Rate: 0.0010015767 +2024-11-11 20:20:09,809 Train Loss: 0.0005692, Val Loss: 0.0006224 +2024-11-11 20:20:09,809 Epoch 1442/2000 +2024-11-11 20:20:25,028 Current Learning Rate: 0.0010492249 +2024-11-11 20:20:25,029 Train Loss: 0.0004826, Val Loss: 0.0006208 +2024-11-11 20:20:25,029 Epoch 1443/2000 +2024-11-11 20:20:40,772 Current Learning Rate: 0.0010978480 +2024-11-11 20:20:40,773 Train Loss: 0.0005237, Val Loss: 0.0006214 +2024-11-11 20:20:40,773 Epoch 1444/2000 +2024-11-11 20:20:56,856 Current Learning Rate: 0.0011474338 +2024-11-11 20:20:56,856 Train Loss: 0.0005004, Val Loss: 0.0006202 +2024-11-11 20:20:56,857 Epoch 1445/2000 +2024-11-11 20:21:12,767 Current Learning Rate: 0.0011979702 +2024-11-11 20:21:12,768 Train Loss: 0.0005248, Val Loss: 0.0006198 +2024-11-11 20:21:12,769 Epoch 1446/2000 +2024-11-11 20:21:28,844 Current Learning Rate: 0.0012494447 +2024-11-11 20:21:28,844 Train Loss: 0.0005265, Val Loss: 0.0006207 +2024-11-11 20:21:28,844 Epoch 1447/2000 +2024-11-11 20:21:44,071 Current Learning Rate: 0.0013018445 +2024-11-11 20:21:44,071 Train Loss: 0.0004981, Val Loss: 0.0006168 +2024-11-11 20:21:44,071 Epoch 1448/2000 +2024-11-11 20:21:59,511 Current Learning Rate: 0.0013551569 +2024-11-11 20:21:59,512 Train Loss: 0.0004470, Val Loss: 0.0006150 +2024-11-11 20:21:59,512 Epoch 1449/2000 +2024-11-11 20:22:15,216 Current Learning Rate: 0.0014093685 +2024-11-11 20:22:15,217 Train Loss: 0.0004914, Val Loss: 0.0006155 +2024-11-11 20:22:15,217 Epoch 1450/2000 +2024-11-11 20:22:30,529 Current Learning Rate: 0.0014644661 +2024-11-11 20:22:30,529 Train Loss: 0.0005055, Val Loss: 0.0006161 +2024-11-11 20:22:30,529 Epoch 1451/2000 +2024-11-11 20:22:46,394 Current Learning Rate: 0.0015204360 +2024-11-11 20:22:46,394 Train Loss: 0.0005414, Val Loss: 0.0006216 +2024-11-11 20:22:46,395 Epoch 1452/2000 +2024-11-11 20:23:02,337 Current Learning Rate: 0.0015772645 +2024-11-11 20:23:02,338 Train Loss: 0.0004972, Val Loss: 0.0006162 +2024-11-11 20:23:02,338 Epoch 1453/2000 +2024-11-11 20:23:18,306 Current Learning Rate: 0.0016349374 +2024-11-11 20:23:18,306 Train Loss: 0.0004740, Val Loss: 0.0006138 +2024-11-11 20:23:18,306 Epoch 1454/2000 +2024-11-11 20:23:33,983 Current Learning Rate: 0.0016934407 +2024-11-11 20:23:33,983 Train Loss: 0.0004649, Val Loss: 0.0006135 +2024-11-11 20:23:33,984 Epoch 1455/2000 +2024-11-11 20:23:50,081 Current Learning Rate: 0.0017527598 +2024-11-11 20:23:50,082 Train Loss: 0.0005251, Val Loss: 0.0006147 +2024-11-11 20:23:50,082 Epoch 1456/2000 +2024-11-11 20:24:05,610 Current Learning Rate: 0.0018128801 +2024-11-11 20:24:05,611 Train Loss: 0.0004668, Val Loss: 0.0006150 +2024-11-11 20:24:05,611 Epoch 1457/2000 +2024-11-11 20:24:21,303 Current Learning Rate: 0.0018737867 +2024-11-11 20:24:21,303 Train Loss: 0.0004783, Val Loss: 0.0006162 +2024-11-11 20:24:21,304 Epoch 1458/2000 +2024-11-11 20:24:37,834 Current Learning Rate: 0.0019354647 +2024-11-11 20:24:37,834 Train Loss: 0.0004964, Val Loss: 0.0006145 +2024-11-11 20:24:37,835 Epoch 1459/2000 +2024-11-11 20:24:53,164 Current Learning Rate: 0.0019978989 +2024-11-11 20:24:53,164 Train Loss: 0.0005357, Val Loss: 0.0006147 +2024-11-11 20:24:53,165 Epoch 1460/2000 +2024-11-11 20:25:09,191 Current Learning Rate: 0.0020610737 +2024-11-11 20:25:09,192 Train Loss: 0.0004967, Val Loss: 0.0006198 +2024-11-11 20:25:09,192 Epoch 1461/2000 +2024-11-11 20:25:24,449 Current Learning Rate: 0.0021249737 +2024-11-11 20:25:24,450 Train Loss: 0.0004676, Val Loss: 0.0006166 +2024-11-11 20:25:24,450 Epoch 1462/2000 +2024-11-11 20:25:40,590 Current Learning Rate: 0.0021895831 +2024-11-11 20:25:40,590 Train Loss: 0.0005227, Val Loss: 0.0006166 +2024-11-11 20:25:40,590 Epoch 1463/2000 +2024-11-11 20:25:55,846 Current Learning Rate: 0.0022548859 +2024-11-11 20:25:55,846 Train Loss: 0.0004782, Val Loss: 0.0006176 +2024-11-11 20:25:55,847 Epoch 1464/2000 +2024-11-11 20:26:11,954 Current Learning Rate: 0.0023208660 +2024-11-11 20:26:11,955 Train Loss: 0.0004753, Val Loss: 0.0006173 +2024-11-11 20:26:11,955 Epoch 1465/2000 +2024-11-11 20:26:27,909 Current Learning Rate: 0.0023875072 +2024-11-11 20:26:27,909 Train Loss: 0.0005405, Val Loss: 0.0006557 +2024-11-11 20:26:27,909 Epoch 1466/2000 +2024-11-11 20:26:43,062 Current Learning Rate: 0.0024547929 +2024-11-11 20:26:43,062 Train Loss: 0.0004659, Val Loss: 0.0006196 +2024-11-11 20:26:43,062 Epoch 1467/2000 +2024-11-11 20:26:59,145 Current Learning Rate: 0.0025227067 +2024-11-11 20:26:59,146 Train Loss: 0.0005794, Val Loss: 0.0006680 +2024-11-11 20:26:59,146 Epoch 1468/2000 +2024-11-11 20:27:15,435 Current Learning Rate: 0.0025912316 +2024-11-11 20:27:15,435 Train Loss: 0.0005062, Val Loss: 0.0006347 +2024-11-11 20:27:15,435 Epoch 1469/2000 +2024-11-11 20:27:31,261 Current Learning Rate: 0.0026603509 +2024-11-11 20:27:31,262 Train Loss: 0.0004669, Val Loss: 0.0006229 +2024-11-11 20:27:31,262 Epoch 1470/2000 +2024-11-11 20:27:47,063 Current Learning Rate: 0.0027300475 +2024-11-11 20:27:47,064 Train Loss: 0.0005142, Val Loss: 0.0006265 +2024-11-11 20:27:47,064 Epoch 1471/2000 +2024-11-11 20:28:02,366 Current Learning Rate: 0.0028003042 +2024-11-11 20:28:02,367 Train Loss: 0.0005021, Val Loss: 0.0006402 +2024-11-11 20:28:02,367 Epoch 1472/2000 +2024-11-11 20:28:18,150 Current Learning Rate: 0.0028711035 +2024-11-11 20:28:18,150 Train Loss: 0.0004934, Val Loss: 0.0006229 +2024-11-11 20:28:18,150 Epoch 1473/2000 +2024-11-11 20:28:33,929 Current Learning Rate: 0.0029424282 +2024-11-11 20:28:33,929 Train Loss: 0.0004801, Val Loss: 0.0006213 +2024-11-11 20:28:33,930 Epoch 1474/2000 +2024-11-11 20:28:49,658 Current Learning Rate: 0.0030142605 +2024-11-11 20:28:49,659 Train Loss: 0.0005335, Val Loss: 0.0006207 +2024-11-11 20:28:49,659 Epoch 1475/2000 +2024-11-11 20:29:05,508 Current Learning Rate: 0.0030865828 +2024-11-11 20:29:05,509 Train Loss: 0.0005787, Val Loss: 0.0007002 +2024-11-11 20:29:05,509 Epoch 1476/2000 +2024-11-11 20:29:21,614 Current Learning Rate: 0.0031593772 +2024-11-11 20:29:21,614 Train Loss: 0.0005273, Val Loss: 0.0006420 +2024-11-11 20:29:21,615 Epoch 1477/2000 +2024-11-11 20:29:37,422 Current Learning Rate: 0.0032326258 +2024-11-11 20:29:37,422 Train Loss: 0.0005064, Val Loss: 0.0006591 +2024-11-11 20:29:37,422 Epoch 1478/2000 +2024-11-11 20:29:54,498 Current Learning Rate: 0.0033063104 +2024-11-11 20:29:54,498 Train Loss: 0.0005649, Val Loss: 0.0006485 +2024-11-11 20:29:54,499 Epoch 1479/2000 +2024-11-11 20:30:10,689 Current Learning Rate: 0.0033804129 +2024-11-11 20:30:10,690 Train Loss: 0.0005134, Val Loss: 0.0006413 +2024-11-11 20:30:10,690 Epoch 1480/2000 +2024-11-11 20:30:26,778 Current Learning Rate: 0.0034549150 +2024-11-11 20:30:26,778 Train Loss: 0.0006216, Val Loss: 0.0006586 +2024-11-11 20:30:26,778 Epoch 1481/2000 +2024-11-11 20:30:42,425 Current Learning Rate: 0.0035297984 +2024-11-11 20:30:42,425 Train Loss: 0.0005311, Val Loss: 0.0006958 +2024-11-11 20:30:42,425 Epoch 1482/2000 +2024-11-11 20:30:57,934 Current Learning Rate: 0.0036050445 +2024-11-11 20:30:57,935 Train Loss: 0.0005394, Val Loss: 0.0006506 +2024-11-11 20:30:57,935 Epoch 1483/2000 +2024-11-11 20:31:14,981 Current Learning Rate: 0.0036806348 +2024-11-11 20:31:14,981 Train Loss: 0.0005059, Val Loss: 0.0006382 +2024-11-11 20:31:14,982 Epoch 1484/2000 +2024-11-11 20:31:30,773 Current Learning Rate: 0.0037565506 +2024-11-11 20:31:30,773 Train Loss: 0.0005005, Val Loss: 0.0006381 +2024-11-11 20:31:30,773 Epoch 1485/2000 +2024-11-11 20:31:46,635 Current Learning Rate: 0.0038327732 +2024-11-11 20:31:46,635 Train Loss: 0.0005609, Val Loss: 0.0006412 +2024-11-11 20:31:46,636 Epoch 1486/2000 +2024-11-11 20:32:03,296 Current Learning Rate: 0.0039092838 +2024-11-11 20:32:03,297 Train Loss: 0.0006931, Val Loss: 0.0006978 +2024-11-11 20:32:03,297 Epoch 1487/2000 +2024-11-11 20:32:20,023 Current Learning Rate: 0.0039860635 +2024-11-11 20:32:20,023 Train Loss: 0.0005532, Val Loss: 0.0006877 +2024-11-11 20:32:20,023 Epoch 1488/2000 +2024-11-11 20:32:35,457 Current Learning Rate: 0.0040630934 +2024-11-11 20:32:35,457 Train Loss: 0.0005985, Val Loss: 0.0006451 +2024-11-11 20:32:35,458 Epoch 1489/2000 +2024-11-11 20:32:51,293 Current Learning Rate: 0.0041403545 +2024-11-11 20:32:51,293 Train Loss: 0.0005420, Val Loss: 0.0006564 +2024-11-11 20:32:51,293 Epoch 1490/2000 +2024-11-11 20:33:08,048 Current Learning Rate: 0.0042178277 +2024-11-11 20:33:08,048 Train Loss: 0.0006146, Val Loss: 0.0006611 +2024-11-11 20:33:08,048 Epoch 1491/2000 +2024-11-11 20:33:24,755 Current Learning Rate: 0.0042954938 +2024-11-11 20:33:24,756 Train Loss: 0.0005250, Val Loss: 0.0006514 +2024-11-11 20:33:24,756 Epoch 1492/2000 +2024-11-11 20:33:41,304 Current Learning Rate: 0.0043733338 +2024-11-11 20:33:41,305 Train Loss: 0.0004744, Val Loss: 0.0006579 +2024-11-11 20:33:41,305 Epoch 1493/2000 +2024-11-11 20:33:58,195 Current Learning Rate: 0.0044513284 +2024-11-11 20:33:58,196 Train Loss: 0.0005280, Val Loss: 0.0006560 +2024-11-11 20:33:58,196 Epoch 1494/2000 +2024-11-11 20:34:13,641 Current Learning Rate: 0.0045294584 +2024-11-11 20:34:13,641 Train Loss: 0.0004900, Val Loss: 0.0006516 +2024-11-11 20:34:13,641 Epoch 1495/2000 +2024-11-11 20:34:30,156 Current Learning Rate: 0.0046077045 +2024-11-11 20:34:30,157 Train Loss: 0.0007456, Val Loss: 0.0007136 +2024-11-11 20:34:30,157 Epoch 1496/2000 +2024-11-11 20:34:46,606 Current Learning Rate: 0.0046860474 +2024-11-11 20:34:46,606 Train Loss: 0.0005671, Val Loss: 0.0006747 +2024-11-11 20:34:46,606 Epoch 1497/2000 +2024-11-11 20:35:02,765 Current Learning Rate: 0.0047644677 +2024-11-11 20:35:02,765 Train Loss: 0.0006108, Val Loss: 0.0007261 +2024-11-11 20:35:02,765 Epoch 1498/2000 +2024-11-11 20:35:19,650 Current Learning Rate: 0.0048429462 +2024-11-11 20:35:19,650 Train Loss: 0.0006097, Val Loss: 0.0006788 +2024-11-11 20:35:19,651 Epoch 1499/2000 +2024-11-11 20:35:36,023 Current Learning Rate: 0.0049214634 +2024-11-11 20:35:36,023 Train Loss: 0.0005545, Val Loss: 0.0006714 +2024-11-11 20:35:36,023 Epoch 1500/2000 +2024-11-11 20:35:52,077 Current Learning Rate: 0.0050000000 +2024-11-11 20:35:52,077 Train Loss: 0.0005661, Val Loss: 0.0006809 +2024-11-11 20:35:52,077 Epoch 1501/2000 +2024-11-11 20:36:08,282 Current Learning Rate: 0.0050785366 +2024-11-11 20:36:08,282 Train Loss: 0.0005054, Val Loss: 0.0006400 +2024-11-11 20:36:08,282 Epoch 1502/2000 +2024-11-11 20:36:24,742 Current Learning Rate: 0.0051570538 +2024-11-11 20:36:24,742 Train Loss: 0.0005751, Val Loss: 0.0006711 +2024-11-11 20:36:24,742 Epoch 1503/2000 +2024-11-11 20:36:39,922 Current Learning Rate: 0.0052355323 +2024-11-11 20:36:39,923 Train Loss: 0.0005303, Val Loss: 0.0006609 +2024-11-11 20:36:39,923 Epoch 1504/2000 +2024-11-11 20:36:55,760 Current Learning Rate: 0.0053139526 +2024-11-11 20:36:55,760 Train Loss: 0.0005221, Val Loss: 0.0006698 +2024-11-11 20:36:55,761 Epoch 1505/2000 +2024-11-11 20:37:11,953 Current Learning Rate: 0.0053922955 +2024-11-11 20:37:11,954 Train Loss: 0.0005451, Val Loss: 0.0007014 +2024-11-11 20:37:11,954 Epoch 1506/2000 +2024-11-11 20:37:27,969 Current Learning Rate: 0.0054705416 +2024-11-11 20:37:27,970 Train Loss: 0.0006498, Val Loss: 0.0007386 +2024-11-11 20:37:27,970 Epoch 1507/2000 +2024-11-11 20:37:43,495 Current Learning Rate: 0.0055486716 +2024-11-11 20:37:43,495 Train Loss: 0.0005905, Val Loss: 0.0006920 +2024-11-11 20:37:43,495 Epoch 1508/2000 +2024-11-11 20:38:00,272 Current Learning Rate: 0.0056266662 +2024-11-11 20:38:00,272 Train Loss: 0.0005553, Val Loss: 0.0006715 +2024-11-11 20:38:00,272 Epoch 1509/2000 +2024-11-11 20:38:16,144 Current Learning Rate: 0.0057045062 +2024-11-11 20:38:16,144 Train Loss: 0.0005249, Val Loss: 0.0006635 +2024-11-11 20:38:16,144 Epoch 1510/2000 +2024-11-11 20:38:32,382 Current Learning Rate: 0.0057821723 +2024-11-11 20:38:32,382 Train Loss: 0.0006379, Val Loss: 0.0007383 +2024-11-11 20:38:32,382 Epoch 1511/2000 +2024-11-11 20:38:47,553 Current Learning Rate: 0.0058596455 +2024-11-11 20:38:47,553 Train Loss: 0.0006241, Val Loss: 0.0007068 +2024-11-11 20:38:47,554 Epoch 1512/2000 +2024-11-11 20:39:03,014 Current Learning Rate: 0.0059369066 +2024-11-11 20:39:03,015 Train Loss: 0.0005958, Val Loss: 0.0006902 +2024-11-11 20:39:03,015 Epoch 1513/2000 +2024-11-11 20:39:19,176 Current Learning Rate: 0.0060139365 +2024-11-11 20:39:19,176 Train Loss: 0.0005927, Val Loss: 0.0006851 +2024-11-11 20:39:19,176 Epoch 1514/2000 +2024-11-11 20:39:35,681 Current Learning Rate: 0.0060907162 +2024-11-11 20:39:35,681 Train Loss: 0.0005849, Val Loss: 0.0006862 +2024-11-11 20:39:35,682 Epoch 1515/2000 +2024-11-11 20:39:51,533 Current Learning Rate: 0.0061672268 +2024-11-11 20:39:51,534 Train Loss: 0.0005991, Val Loss: 0.0007278 +2024-11-11 20:39:51,534 Epoch 1516/2000 +2024-11-11 20:40:07,384 Current Learning Rate: 0.0062434494 +2024-11-11 20:40:07,385 Train Loss: 0.0006466, Val Loss: 0.0007101 +2024-11-11 20:40:07,385 Epoch 1517/2000 +2024-11-11 20:40:24,452 Current Learning Rate: 0.0063193652 +2024-11-11 20:40:24,452 Train Loss: 0.0005909, Val Loss: 0.0007273 +2024-11-11 20:40:24,453 Epoch 1518/2000 +2024-11-11 20:40:40,274 Current Learning Rate: 0.0063949555 +2024-11-11 20:40:40,274 Train Loss: 0.0006418, Val Loss: 0.0007695 +2024-11-11 20:40:40,274 Epoch 1519/2000 +2024-11-11 20:40:55,638 Current Learning Rate: 0.0064702016 +2024-11-11 20:40:55,638 Train Loss: 0.0006079, Val Loss: 0.0007664 +2024-11-11 20:40:55,638 Epoch 1520/2000 +2024-11-11 20:41:10,975 Current Learning Rate: 0.0065450850 +2024-11-11 20:41:10,975 Train Loss: 0.0006324, Val Loss: 0.0007451 +2024-11-11 20:41:10,976 Epoch 1521/2000 +2024-11-11 20:41:26,362 Current Learning Rate: 0.0066195871 +2024-11-11 20:41:26,362 Train Loss: 0.0005829, Val Loss: 0.0007283 +2024-11-11 20:41:26,362 Epoch 1522/2000 +2024-11-11 20:41:41,693 Current Learning Rate: 0.0066936896 +2024-11-11 20:41:41,694 Train Loss: 0.0005885, Val Loss: 0.0007237 +2024-11-11 20:41:41,694 Epoch 1523/2000 +2024-11-11 20:41:57,294 Current Learning Rate: 0.0067673742 +2024-11-11 20:41:57,295 Train Loss: 0.0006465, Val Loss: 0.0007127 +2024-11-11 20:41:57,295 Epoch 1524/2000 +2024-11-11 20:42:13,255 Current Learning Rate: 0.0068406228 +2024-11-11 20:42:13,256 Train Loss: 0.0005241, Val Loss: 0.0007018 +2024-11-11 20:42:13,256 Epoch 1525/2000 +2024-11-11 20:42:29,226 Current Learning Rate: 0.0069134172 +2024-11-11 20:42:29,226 Train Loss: 0.0005400, Val Loss: 0.0006822 +2024-11-11 20:42:29,227 Epoch 1526/2000 +2024-11-11 20:42:45,445 Current Learning Rate: 0.0069857395 +2024-11-11 20:42:45,445 Train Loss: 0.0006432, Val Loss: 0.0006552 +2024-11-11 20:42:45,445 Epoch 1527/2000 +2024-11-11 20:43:02,102 Current Learning Rate: 0.0070575718 +2024-11-11 20:43:02,102 Train Loss: 0.0005796, Val Loss: 0.0007324 +2024-11-11 20:43:02,102 Epoch 1528/2000 +2024-11-11 20:43:17,896 Current Learning Rate: 0.0071288965 +2024-11-11 20:43:17,896 Train Loss: 0.0005811, Val Loss: 0.0007003 +2024-11-11 20:43:17,897 Epoch 1529/2000 +2024-11-11 20:43:33,769 Current Learning Rate: 0.0071996958 +2024-11-11 20:43:33,770 Train Loss: 0.0005254, Val Loss: 0.0006856 +2024-11-11 20:43:33,771 Epoch 1530/2000 +2024-11-11 20:43:50,678 Current Learning Rate: 0.0072699525 +2024-11-11 20:43:50,679 Train Loss: 0.0007053, Val Loss: 0.0007410 +2024-11-11 20:43:50,680 Epoch 1531/2000 +2024-11-11 20:44:06,548 Current Learning Rate: 0.0073396491 +2024-11-11 20:44:06,549 Train Loss: 0.0007030, Val Loss: 0.0006901 +2024-11-11 20:44:06,549 Epoch 1532/2000 +2024-11-11 20:44:22,515 Current Learning Rate: 0.0074087684 +2024-11-11 20:44:22,516 Train Loss: 0.0005726, Val Loss: 0.0006696 +2024-11-11 20:44:22,516 Epoch 1533/2000 +2024-11-11 20:44:38,820 Current Learning Rate: 0.0074772933 +2024-11-11 20:44:38,821 Train Loss: 0.0005782, Val Loss: 0.0006637 +2024-11-11 20:44:38,821 Epoch 1534/2000 +2024-11-11 20:44:54,761 Current Learning Rate: 0.0075452071 +2024-11-11 20:44:54,761 Train Loss: 0.0005654, Val Loss: 0.0006796 +2024-11-11 20:44:54,762 Epoch 1535/2000 +2024-11-11 20:45:10,162 Current Learning Rate: 0.0076124928 +2024-11-11 20:45:10,163 Train Loss: 0.0005819, Val Loss: 0.0007103 +2024-11-11 20:45:10,163 Epoch 1536/2000 +2024-11-11 20:45:25,934 Current Learning Rate: 0.0076791340 +2024-11-11 20:45:25,934 Train Loss: 0.0006324, Val Loss: 0.0007561 +2024-11-11 20:45:25,934 Epoch 1537/2000 +2024-11-11 20:45:41,402 Current Learning Rate: 0.0077451141 +2024-11-11 20:45:41,402 Train Loss: 0.0005841, Val Loss: 0.0007257 +2024-11-11 20:45:41,403 Epoch 1538/2000 +2024-11-11 20:45:56,820 Current Learning Rate: 0.0078104169 +2024-11-11 20:45:56,821 Train Loss: 0.0006216, Val Loss: 0.0007181 +2024-11-11 20:45:56,821 Epoch 1539/2000 +2024-11-11 20:46:12,328 Current Learning Rate: 0.0078750263 +2024-11-11 20:46:12,328 Train Loss: 0.0006122, Val Loss: 0.0006882 +2024-11-11 20:46:12,329 Epoch 1540/2000 +2024-11-11 20:46:28,022 Current Learning Rate: 0.0079389263 +2024-11-11 20:46:28,022 Train Loss: 0.0006561, Val Loss: 0.0007055 +2024-11-11 20:46:28,023 Epoch 1541/2000 +2024-11-11 20:46:44,403 Current Learning Rate: 0.0080021011 +2024-11-11 20:46:44,404 Train Loss: 0.0006346, Val Loss: 0.0007708 +2024-11-11 20:46:44,404 Epoch 1542/2000 +2024-11-11 20:47:00,499 Current Learning Rate: 0.0080645353 +2024-11-11 20:47:00,499 Train Loss: 0.0007184, Val Loss: 0.0007942 +2024-11-11 20:47:00,500 Epoch 1543/2000 +2024-11-11 20:47:16,735 Current Learning Rate: 0.0081262133 +2024-11-11 20:47:16,736 Train Loss: 0.0006839, Val Loss: 0.0008166 +2024-11-11 20:47:16,736 Epoch 1544/2000 +2024-11-11 20:47:32,587 Current Learning Rate: 0.0081871199 +2024-11-11 20:47:32,588 Train Loss: 0.0006820, Val Loss: 0.0007634 +2024-11-11 20:47:32,588 Epoch 1545/2000 +2024-11-11 20:47:48,165 Current Learning Rate: 0.0082472402 +2024-11-11 20:47:48,165 Train Loss: 0.0006341, Val Loss: 0.0007432 +2024-11-11 20:47:48,165 Epoch 1546/2000 +2024-11-11 20:48:03,330 Current Learning Rate: 0.0083065593 +2024-11-11 20:48:03,330 Train Loss: 0.0006256, Val Loss: 0.0007161 +2024-11-11 20:48:03,331 Epoch 1547/2000 +2024-11-11 20:48:18,658 Current Learning Rate: 0.0083650626 +2024-11-11 20:48:18,659 Train Loss: 0.0006356, Val Loss: 0.0006923 +2024-11-11 20:48:18,659 Epoch 1548/2000 +2024-11-11 20:48:34,173 Current Learning Rate: 0.0084227355 +2024-11-11 20:48:34,174 Train Loss: 0.0005454, Val Loss: 0.0006823 +2024-11-11 20:48:34,174 Epoch 1549/2000 +2024-11-11 20:48:49,519 Current Learning Rate: 0.0084795640 +2024-11-11 20:48:49,520 Train Loss: 0.0006010, Val Loss: 0.0006703 +2024-11-11 20:48:49,520 Epoch 1550/2000 +2024-11-11 20:49:05,189 Current Learning Rate: 0.0085355339 +2024-11-11 20:49:05,190 Train Loss: 0.0006627, Val Loss: 0.0006890 +2024-11-11 20:49:05,190 Epoch 1551/2000 +2024-11-11 20:49:20,460 Current Learning Rate: 0.0085906315 +2024-11-11 20:49:20,460 Train Loss: 0.0007621, Val Loss: 0.0007254 +2024-11-11 20:49:20,460 Epoch 1552/2000 +2024-11-11 20:49:35,724 Current Learning Rate: 0.0086448431 +2024-11-11 20:49:35,724 Train Loss: 0.0005560, Val Loss: 0.0007352 +2024-11-11 20:49:35,725 Epoch 1553/2000 +2024-11-11 20:49:51,051 Current Learning Rate: 0.0086981555 +2024-11-11 20:49:51,052 Train Loss: 0.0006073, Val Loss: 0.0007517 +2024-11-11 20:49:51,052 Epoch 1554/2000 +2024-11-11 20:50:07,115 Current Learning Rate: 0.0087505553 +2024-11-11 20:50:07,115 Train Loss: 0.0006429, Val Loss: 0.0007569 +2024-11-11 20:50:07,115 Epoch 1555/2000 +2024-11-11 20:50:22,644 Current Learning Rate: 0.0088020298 +2024-11-11 20:50:22,645 Train Loss: 0.0007582, Val Loss: 0.0007841 +2024-11-11 20:50:22,645 Epoch 1556/2000 +2024-11-11 20:50:38,129 Current Learning Rate: 0.0088525662 +2024-11-11 20:50:38,130 Train Loss: 0.0006544, Val Loss: 0.0007788 +2024-11-11 20:50:38,130 Epoch 1557/2000 +2024-11-11 20:50:53,981 Current Learning Rate: 0.0089021520 +2024-11-11 20:50:53,982 Train Loss: 0.0006456, Val Loss: 0.0007563 +2024-11-11 20:50:53,982 Epoch 1558/2000 +2024-11-11 20:51:10,423 Current Learning Rate: 0.0089507751 +2024-11-11 20:51:10,423 Train Loss: 0.0006523, Val Loss: 0.0007980 +2024-11-11 20:51:10,424 Epoch 1559/2000 +2024-11-11 20:51:26,694 Current Learning Rate: 0.0089984233 +2024-11-11 20:51:26,695 Train Loss: 0.0006832, Val Loss: 0.0007425 +2024-11-11 20:51:26,695 Epoch 1560/2000 +2024-11-11 20:51:42,747 Current Learning Rate: 0.0090450850 +2024-11-11 20:51:42,748 Train Loss: 0.0005677, Val Loss: 0.0007043 +2024-11-11 20:51:42,748 Epoch 1561/2000 +2024-11-11 20:51:59,257 Current Learning Rate: 0.0090907486 +2024-11-11 20:51:59,257 Train Loss: 0.0006249, Val Loss: 0.0007395 +2024-11-11 20:51:59,258 Epoch 1562/2000 +2024-11-11 20:52:15,419 Current Learning Rate: 0.0091354029 +2024-11-11 20:52:15,420 Train Loss: 0.0005915, Val Loss: 0.0007268 +2024-11-11 20:52:15,420 Epoch 1563/2000 +2024-11-11 20:52:31,647 Current Learning Rate: 0.0091790368 +2024-11-11 20:52:31,647 Train Loss: 0.0006009, Val Loss: 0.0007376 +2024-11-11 20:52:31,648 Epoch 1564/2000 +2024-11-11 20:52:47,545 Current Learning Rate: 0.0092216396 +2024-11-11 20:52:47,545 Train Loss: 0.0006693, Val Loss: 0.0007563 +2024-11-11 20:52:47,546 Epoch 1565/2000 +2024-11-11 20:53:02,887 Current Learning Rate: 0.0092632008 +2024-11-11 20:53:02,888 Train Loss: 0.0005953, Val Loss: 0.0007969 +2024-11-11 20:53:02,889 Epoch 1566/2000 +2024-11-11 20:53:18,419 Current Learning Rate: 0.0093037101 +2024-11-11 20:53:18,419 Train Loss: 0.0006203, Val Loss: 0.0008209 +2024-11-11 20:53:18,419 Epoch 1567/2000 +2024-11-11 20:53:34,229 Current Learning Rate: 0.0093431576 +2024-11-11 20:53:34,230 Train Loss: 0.0006143, Val Loss: 0.0007918 +2024-11-11 20:53:34,230 Epoch 1568/2000 +2024-11-11 20:53:50,097 Current Learning Rate: 0.0093815334 +2024-11-11 20:53:50,098 Train Loss: 0.0006855, Val Loss: 0.0007661 +2024-11-11 20:53:50,098 Epoch 1569/2000 +2024-11-11 20:54:07,549 Current Learning Rate: 0.0094188282 +2024-11-11 20:54:07,550 Train Loss: 0.0006081, Val Loss: 0.0006879 +2024-11-11 20:54:07,550 Epoch 1570/2000 +2024-11-11 20:54:23,029 Current Learning Rate: 0.0094550326 +2024-11-11 20:54:23,030 Train Loss: 0.0005917, Val Loss: 0.0007156 +2024-11-11 20:54:23,030 Epoch 1571/2000 +2024-11-11 20:54:38,360 Current Learning Rate: 0.0094901379 +2024-11-11 20:54:38,360 Train Loss: 0.0005541, Val Loss: 0.0006825 +2024-11-11 20:54:38,360 Epoch 1572/2000 +2024-11-11 20:54:53,711 Current Learning Rate: 0.0095241353 +2024-11-11 20:54:53,711 Train Loss: 0.0005301, Val Loss: 0.0006911 +2024-11-11 20:54:53,711 Epoch 1573/2000 +2024-11-11 20:55:09,628 Current Learning Rate: 0.0095570164 +2024-11-11 20:55:09,628 Train Loss: 0.0005563, Val Loss: 0.0006888 +2024-11-11 20:55:09,628 Epoch 1574/2000 +2024-11-11 20:55:24,931 Current Learning Rate: 0.0095887731 +2024-11-11 20:55:24,931 Train Loss: 0.0006267, Val Loss: 0.0007232 +2024-11-11 20:55:24,932 Epoch 1575/2000 +2024-11-11 20:55:40,676 Current Learning Rate: 0.0096193977 +2024-11-11 20:55:40,677 Train Loss: 0.0005328, Val Loss: 0.0006615 +2024-11-11 20:55:40,677 Epoch 1576/2000 +2024-11-11 20:55:56,121 Current Learning Rate: 0.0096488824 +2024-11-11 20:55:56,121 Train Loss: 0.0005385, Val Loss: 0.0007085 +2024-11-11 20:55:56,122 Epoch 1577/2000 +2024-11-11 20:56:11,419 Current Learning Rate: 0.0096772202 +2024-11-11 20:56:11,420 Train Loss: 0.0006376, Val Loss: 0.0007590 +2024-11-11 20:56:11,420 Epoch 1578/2000 +2024-11-11 20:56:26,934 Current Learning Rate: 0.0097044038 +2024-11-11 20:56:26,934 Train Loss: 0.0006362, Val Loss: 0.0007474 +2024-11-11 20:56:26,934 Epoch 1579/2000 +2024-11-11 20:56:42,217 Current Learning Rate: 0.0097304268 +2024-11-11 20:56:42,218 Train Loss: 0.0006159, Val Loss: 0.0007392 +2024-11-11 20:56:42,218 Epoch 1580/2000 +2024-11-11 20:56:57,672 Current Learning Rate: 0.0097552826 +2024-11-11 20:56:57,672 Train Loss: 0.0006625, Val Loss: 0.0007923 +2024-11-11 20:56:57,673 Epoch 1581/2000 +2024-11-11 20:57:13,147 Current Learning Rate: 0.0097789651 +2024-11-11 20:57:13,147 Train Loss: 0.0005825, Val Loss: 0.0007274 +2024-11-11 20:57:13,147 Epoch 1582/2000 +2024-11-11 20:57:28,576 Current Learning Rate: 0.0098014684 +2024-11-11 20:57:28,577 Train Loss: 0.0010995, Val Loss: 0.0010468 +2024-11-11 20:57:28,577 Epoch 1583/2000 +2024-11-11 20:57:44,072 Current Learning Rate: 0.0098227871 +2024-11-11 20:57:44,073 Train Loss: 0.0007570, Val Loss: 0.0007988 +2024-11-11 20:57:44,073 Epoch 1584/2000 +2024-11-11 20:57:59,278 Current Learning Rate: 0.0098429158 +2024-11-11 20:57:59,279 Train Loss: 0.0006140, Val Loss: 0.0007082 +2024-11-11 20:57:59,279 Epoch 1585/2000 +2024-11-11 20:58:14,221 Current Learning Rate: 0.0098618496 +2024-11-11 20:58:14,221 Train Loss: 0.0006296, Val Loss: 0.0007013 +2024-11-11 20:58:14,221 Epoch 1586/2000 +2024-11-11 20:58:29,757 Current Learning Rate: 0.0098795838 +2024-11-11 20:58:29,758 Train Loss: 0.0006007, Val Loss: 0.0007067 +2024-11-11 20:58:29,758 Epoch 1587/2000 +2024-11-11 20:58:45,046 Current Learning Rate: 0.0098961141 +2024-11-11 20:58:45,047 Train Loss: 0.0006515, Val Loss: 0.0007251 +2024-11-11 20:58:45,047 Epoch 1588/2000 +2024-11-11 20:59:00,279 Current Learning Rate: 0.0099114363 +2024-11-11 20:59:00,280 Train Loss: 0.0006884, Val Loss: 0.0007127 +2024-11-11 20:59:00,280 Epoch 1589/2000 +2024-11-11 20:59:15,591 Current Learning Rate: 0.0099255466 +2024-11-11 20:59:15,591 Train Loss: 0.0006105, Val Loss: 0.0006956 +2024-11-11 20:59:15,591 Epoch 1590/2000 +2024-11-11 20:59:31,057 Current Learning Rate: 0.0099384417 +2024-11-11 20:59:31,057 Train Loss: 0.0005697, Val Loss: 0.0006622 +2024-11-11 20:59:31,058 Epoch 1591/2000 +2024-11-11 20:59:46,225 Current Learning Rate: 0.0099501183 +2024-11-11 20:59:46,226 Train Loss: 0.0005302, Val Loss: 0.0006668 +2024-11-11 20:59:46,226 Epoch 1592/2000 +2024-11-11 21:00:01,386 Current Learning Rate: 0.0099605735 +2024-11-11 21:00:01,386 Train Loss: 0.0006113, Val Loss: 0.0007438 +2024-11-11 21:00:01,386 Epoch 1593/2000 +2024-11-11 21:00:17,057 Current Learning Rate: 0.0099698048 +2024-11-11 21:00:17,058 Train Loss: 0.0005776, Val Loss: 0.0006927 +2024-11-11 21:00:17,059 Epoch 1594/2000 +2024-11-11 21:00:32,223 Current Learning Rate: 0.0099778098 +2024-11-11 21:00:32,223 Train Loss: 0.0005939, Val Loss: 0.0007548 +2024-11-11 21:00:32,223 Epoch 1595/2000 +2024-11-11 21:00:47,543 Current Learning Rate: 0.0099845867 +2024-11-11 21:00:47,543 Train Loss: 0.0006328, Val Loss: 0.0007164 +2024-11-11 21:00:47,544 Epoch 1596/2000 +2024-11-11 21:01:03,076 Current Learning Rate: 0.0099901336 +2024-11-11 21:01:03,077 Train Loss: 0.0006429, Val Loss: 0.0007135 +2024-11-11 21:01:03,077 Epoch 1597/2000 +2024-11-11 21:01:19,029 Current Learning Rate: 0.0099944494 +2024-11-11 21:01:19,029 Train Loss: 0.0005700, Val Loss: 0.0007071 +2024-11-11 21:01:19,030 Epoch 1598/2000 +2024-11-11 21:01:34,774 Current Learning Rate: 0.0099975328 +2024-11-11 21:01:34,775 Train Loss: 0.0005241, Val Loss: 0.0006831 +2024-11-11 21:01:34,775 Epoch 1599/2000 +2024-11-11 21:01:51,215 Current Learning Rate: 0.0099993832 +2024-11-11 21:01:51,215 Train Loss: 0.0005710, Val Loss: 0.0006871 +2024-11-11 21:01:51,216 Epoch 1600/2000 +2024-11-11 21:02:07,583 Current Learning Rate: 0.0100000000 +2024-11-11 21:02:07,583 Train Loss: 0.0006290, Val Loss: 0.0007006 +2024-11-11 21:02:07,584 Epoch 1601/2000 +2024-11-11 21:02:24,025 Current Learning Rate: 0.0099993832 +2024-11-11 21:02:24,026 Train Loss: 0.0005653, Val Loss: 0.0006867 +2024-11-11 21:02:24,026 Epoch 1602/2000 +2024-11-11 21:02:39,275 Current Learning Rate: 0.0099975328 +2024-11-11 21:02:39,276 Train Loss: 0.0006054, Val Loss: 0.0007082 +2024-11-11 21:02:39,276 Epoch 1603/2000 +2024-11-11 21:02:55,958 Current Learning Rate: 0.0099944494 +2024-11-11 21:02:55,959 Train Loss: 0.0005532, Val Loss: 0.0007211 +2024-11-11 21:02:55,959 Epoch 1604/2000 +2024-11-11 21:03:11,381 Current Learning Rate: 0.0099901336 +2024-11-11 21:03:11,382 Train Loss: 0.0006236, Val Loss: 0.0007576 +2024-11-11 21:03:11,382 Epoch 1605/2000 +2024-11-11 21:03:27,585 Current Learning Rate: 0.0099845867 +2024-11-11 21:03:27,586 Train Loss: 0.0006244, Val Loss: 0.0007181 +2024-11-11 21:03:27,587 Epoch 1606/2000 +2024-11-11 21:03:42,922 Current Learning Rate: 0.0099778098 +2024-11-11 21:03:42,922 Train Loss: 0.0005689, Val Loss: 0.0006912 +2024-11-11 21:03:42,922 Epoch 1607/2000 +2024-11-11 21:03:57,985 Current Learning Rate: 0.0099698048 +2024-11-11 21:03:57,986 Train Loss: 0.0006124, Val Loss: 0.0007400 +2024-11-11 21:03:57,986 Epoch 1608/2000 +2024-11-11 21:04:13,229 Current Learning Rate: 0.0099605735 +2024-11-11 21:04:13,230 Train Loss: 0.0006312, Val Loss: 0.0007457 +2024-11-11 21:04:13,230 Epoch 1609/2000 +2024-11-11 21:04:29,189 Current Learning Rate: 0.0099501183 +2024-11-11 21:04:29,189 Train Loss: 0.0006641, Val Loss: 0.0007831 +2024-11-11 21:04:29,190 Epoch 1610/2000 +2024-11-11 21:04:45,613 Current Learning Rate: 0.0099384417 +2024-11-11 21:04:45,615 Train Loss: 0.0006055, Val Loss: 0.0007235 +2024-11-11 21:04:45,615 Epoch 1611/2000 +2024-11-11 21:05:01,489 Current Learning Rate: 0.0099255466 +2024-11-11 21:05:01,490 Train Loss: 0.0006120, Val Loss: 0.0006749 +2024-11-11 21:05:01,490 Epoch 1612/2000 +2024-11-11 21:05:17,316 Current Learning Rate: 0.0099114363 +2024-11-11 21:05:17,317 Train Loss: 0.0006593, Val Loss: 0.0006665 +2024-11-11 21:05:17,317 Epoch 1613/2000 +2024-11-11 21:05:34,608 Current Learning Rate: 0.0098961141 +2024-11-11 21:05:34,609 Train Loss: 0.0005506, Val Loss: 0.0006879 +2024-11-11 21:05:34,609 Epoch 1614/2000 +2024-11-11 21:05:50,551 Current Learning Rate: 0.0098795838 +2024-11-11 21:05:50,552 Train Loss: 0.0005703, Val Loss: 0.0006667 +2024-11-11 21:05:50,552 Epoch 1615/2000 +2024-11-11 21:06:07,113 Current Learning Rate: 0.0098618496 +2024-11-11 21:06:07,113 Train Loss: 0.0005136, Val Loss: 0.0006420 +2024-11-11 21:06:07,113 Epoch 1616/2000 +2024-11-11 21:06:23,347 Current Learning Rate: 0.0098429158 +2024-11-11 21:06:23,347 Train Loss: 0.0005996, Val Loss: 0.0006467 +2024-11-11 21:06:23,348 Epoch 1617/2000 +2024-11-11 21:06:38,694 Current Learning Rate: 0.0098227871 +2024-11-11 21:06:38,696 Train Loss: 0.0005619, Val Loss: 0.0006564 +2024-11-11 21:06:38,696 Epoch 1618/2000 +2024-11-11 21:06:54,865 Current Learning Rate: 0.0098014684 +2024-11-11 21:06:54,866 Train Loss: 0.0005644, Val Loss: 0.0006659 +2024-11-11 21:06:54,866 Epoch 1619/2000 +2024-11-11 21:07:10,971 Current Learning Rate: 0.0097789651 +2024-11-11 21:07:10,973 Train Loss: 0.0006016, Val Loss: 0.0006553 +2024-11-11 21:07:10,973 Epoch 1620/2000 +2024-11-11 21:07:26,910 Current Learning Rate: 0.0097552826 +2024-11-11 21:07:26,910 Train Loss: 0.0004954, Val Loss: 0.0006867 +2024-11-11 21:07:26,911 Epoch 1621/2000 +2024-11-11 21:07:42,858 Current Learning Rate: 0.0097304268 +2024-11-11 21:07:42,858 Train Loss: 0.0005692, Val Loss: 0.0007243 +2024-11-11 21:07:42,859 Epoch 1622/2000 +2024-11-11 21:07:58,421 Current Learning Rate: 0.0097044038 +2024-11-11 21:07:58,421 Train Loss: 0.0006182, Val Loss: 0.0006944 +2024-11-11 21:07:58,422 Epoch 1623/2000 +2024-11-11 21:08:14,521 Current Learning Rate: 0.0096772202 +2024-11-11 21:08:14,522 Train Loss: 0.0005968, Val Loss: 0.0007559 +2024-11-11 21:08:14,522 Epoch 1624/2000 +2024-11-11 21:08:30,611 Current Learning Rate: 0.0096488824 +2024-11-11 21:08:30,613 Train Loss: 0.0005986, Val Loss: 0.0006993 +2024-11-11 21:08:30,613 Epoch 1625/2000 +2024-11-11 21:08:46,495 Current Learning Rate: 0.0096193977 +2024-11-11 21:08:46,496 Train Loss: 0.0005167, Val Loss: 0.0006882 +2024-11-11 21:08:46,496 Epoch 1626/2000 +2024-11-11 21:09:01,803 Current Learning Rate: 0.0095887731 +2024-11-11 21:09:01,803 Train Loss: 0.0005384, Val Loss: 0.0006748 +2024-11-11 21:09:01,804 Epoch 1627/2000 +2024-11-11 21:09:17,477 Current Learning Rate: 0.0095570164 +2024-11-11 21:09:17,477 Train Loss: 0.0006096, Val Loss: 0.0006921 +2024-11-11 21:09:17,478 Epoch 1628/2000 +2024-11-11 21:09:33,161 Current Learning Rate: 0.0095241353 +2024-11-11 21:09:33,161 Train Loss: 0.0006100, Val Loss: 0.0007079 +2024-11-11 21:09:33,162 Epoch 1629/2000 +2024-11-11 21:09:48,659 Current Learning Rate: 0.0094901379 +2024-11-11 21:09:48,659 Train Loss: 0.0006276, Val Loss: 0.0006840 +2024-11-11 21:09:48,660 Epoch 1630/2000 +2024-11-11 21:10:04,413 Current Learning Rate: 0.0094550326 +2024-11-11 21:10:04,413 Train Loss: 0.0006200, Val Loss: 0.0006968 +2024-11-11 21:10:04,413 Epoch 1631/2000 +2024-11-11 21:10:20,284 Current Learning Rate: 0.0094188282 +2024-11-11 21:10:20,285 Train Loss: 0.0005760, Val Loss: 0.0007054 +2024-11-11 21:10:20,285 Epoch 1632/2000 +2024-11-11 21:10:35,692 Current Learning Rate: 0.0093815334 +2024-11-11 21:10:35,693 Train Loss: 0.0005433, Val Loss: 0.0006638 +2024-11-11 21:10:35,693 Epoch 1633/2000 +2024-11-11 21:10:51,424 Current Learning Rate: 0.0093431576 +2024-11-11 21:10:51,425 Train Loss: 0.0005208, Val Loss: 0.0007129 +2024-11-11 21:10:51,425 Epoch 1634/2000 +2024-11-11 21:11:06,879 Current Learning Rate: 0.0093037101 +2024-11-11 21:11:06,879 Train Loss: 0.0005983, Val Loss: 0.0007182 +2024-11-11 21:11:06,880 Epoch 1635/2000 +2024-11-11 21:11:23,115 Current Learning Rate: 0.0092632008 +2024-11-11 21:11:23,116 Train Loss: 0.0006129, Val Loss: 0.0006546 +2024-11-11 21:11:23,116 Epoch 1636/2000 +2024-11-11 21:11:38,929 Current Learning Rate: 0.0092216396 +2024-11-11 21:11:38,929 Train Loss: 0.0006309, Val Loss: 0.0007002 +2024-11-11 21:11:38,930 Epoch 1637/2000 +2024-11-11 21:11:54,777 Current Learning Rate: 0.0091790368 +2024-11-11 21:11:54,778 Train Loss: 0.0005877, Val Loss: 0.0007419 +2024-11-11 21:11:54,778 Epoch 1638/2000 +2024-11-11 21:12:11,117 Current Learning Rate: 0.0091354029 +2024-11-11 21:12:11,118 Train Loss: 0.0006475, Val Loss: 0.0006683 +2024-11-11 21:12:11,118 Epoch 1639/2000 +2024-11-11 21:12:27,165 Current Learning Rate: 0.0090907486 +2024-11-11 21:12:27,166 Train Loss: 0.0005489, Val Loss: 0.0006673 +2024-11-11 21:12:27,166 Epoch 1640/2000 +2024-11-11 21:12:43,178 Current Learning Rate: 0.0090450850 +2024-11-11 21:12:43,179 Train Loss: 0.0007216, Val Loss: 0.0008442 +2024-11-11 21:12:43,179 Epoch 1641/2000 +2024-11-11 21:12:59,330 Current Learning Rate: 0.0089984233 +2024-11-11 21:12:59,332 Train Loss: 0.0006222, Val Loss: 0.0006745 +2024-11-11 21:12:59,332 Epoch 1642/2000 +2024-11-11 21:13:15,731 Current Learning Rate: 0.0089507751 +2024-11-11 21:13:15,731 Train Loss: 0.0005324, Val Loss: 0.0007027 +2024-11-11 21:13:15,732 Epoch 1643/2000 +2024-11-11 21:13:32,312 Current Learning Rate: 0.0089021520 +2024-11-11 21:13:32,312 Train Loss: 0.0006039, Val Loss: 0.0007143 +2024-11-11 21:13:32,313 Epoch 1644/2000 +2024-11-11 21:13:49,583 Current Learning Rate: 0.0088525662 +2024-11-11 21:13:49,584 Train Loss: 0.0005603, Val Loss: 0.0006621 +2024-11-11 21:13:49,584 Epoch 1645/2000 +2024-11-11 21:14:05,625 Current Learning Rate: 0.0088020298 +2024-11-11 21:14:05,625 Train Loss: 0.0006123, Val Loss: 0.0006480 +2024-11-11 21:14:05,625 Epoch 1646/2000 +2024-11-11 21:14:21,815 Current Learning Rate: 0.0087505553 +2024-11-11 21:14:21,816 Train Loss: 0.0005118, Val Loss: 0.0006217 +2024-11-11 21:14:21,816 Epoch 1647/2000 +2024-11-11 21:14:38,370 Current Learning Rate: 0.0086981555 +2024-11-11 21:14:38,371 Train Loss: 0.0005529, Val Loss: 0.0006320 +2024-11-11 21:14:38,372 Epoch 1648/2000 +2024-11-11 21:14:54,991 Current Learning Rate: 0.0086448431 +2024-11-11 21:14:54,992 Train Loss: 0.0005973, Val Loss: 0.0006550 +2024-11-11 21:14:54,992 Epoch 1649/2000 +2024-11-11 21:15:10,277 Current Learning Rate: 0.0085906315 +2024-11-11 21:15:10,277 Train Loss: 0.0005381, Val Loss: 0.0006692 +2024-11-11 21:15:10,277 Epoch 1650/2000 +2024-11-11 21:15:26,085 Current Learning Rate: 0.0085355339 +2024-11-11 21:15:26,086 Train Loss: 0.0005312, Val Loss: 0.0006560 +2024-11-11 21:15:26,086 Epoch 1651/2000 +2024-11-11 21:15:41,190 Current Learning Rate: 0.0084795640 +2024-11-11 21:15:41,190 Train Loss: 0.0005639, Val Loss: 0.0006777 +2024-11-11 21:15:41,190 Epoch 1652/2000 +2024-11-11 21:15:56,598 Current Learning Rate: 0.0084227355 +2024-11-11 21:15:56,599 Train Loss: 0.0005813, Val Loss: 0.0007803 +2024-11-11 21:15:56,599 Epoch 1653/2000 +2024-11-11 21:16:12,226 Current Learning Rate: 0.0083650626 +2024-11-11 21:16:12,226 Train Loss: 0.0005547, Val Loss: 0.0006887 +2024-11-11 21:16:12,226 Epoch 1654/2000 +2024-11-11 21:16:27,413 Current Learning Rate: 0.0083065593 +2024-11-11 21:16:27,414 Train Loss: 0.0005443, Val Loss: 0.0006525 +2024-11-11 21:16:27,414 Epoch 1655/2000 +2024-11-11 21:16:43,153 Current Learning Rate: 0.0082472402 +2024-11-11 21:16:43,153 Train Loss: 0.0005685, Val Loss: 0.0006408 +2024-11-11 21:16:43,153 Epoch 1656/2000 +2024-11-11 21:16:58,433 Current Learning Rate: 0.0081871199 +2024-11-11 21:16:58,434 Train Loss: 0.0005863, Val Loss: 0.0006451 +2024-11-11 21:16:58,434 Epoch 1657/2000 +2024-11-11 21:17:14,151 Current Learning Rate: 0.0081262133 +2024-11-11 21:17:14,151 Train Loss: 0.0005508, Val Loss: 0.0006583 +2024-11-11 21:17:14,151 Epoch 1658/2000 +2024-11-11 21:17:29,690 Current Learning Rate: 0.0080645353 +2024-11-11 21:17:29,690 Train Loss: 0.0004973, Val Loss: 0.0006194 +2024-11-11 21:17:29,690 Epoch 1659/2000 +2024-11-11 21:17:45,728 Current Learning Rate: 0.0080021011 +2024-11-11 21:17:46,492 Train Loss: 0.0004798, Val Loss: 0.0005947 +2024-11-11 21:17:46,493 Epoch 1660/2000 +2024-11-11 21:18:01,198 Current Learning Rate: 0.0079389263 +2024-11-11 21:18:02,052 Train Loss: 0.0005035, Val Loss: 0.0005858 +2024-11-11 21:18:02,053 Epoch 1661/2000 +2024-11-11 21:18:16,728 Current Learning Rate: 0.0078750263 +2024-11-11 21:18:16,729 Train Loss: 0.0005292, Val Loss: 0.0005920 +2024-11-11 21:18:16,729 Epoch 1662/2000 +2024-11-11 21:18:32,072 Current Learning Rate: 0.0078104169 +2024-11-11 21:18:32,073 Train Loss: 0.0004721, Val Loss: 0.0005989 +2024-11-11 21:18:32,073 Epoch 1663/2000 +2024-11-11 21:18:47,577 Current Learning Rate: 0.0077451141 +2024-11-11 21:18:47,577 Train Loss: 0.0004615, Val Loss: 0.0006117 +2024-11-11 21:18:47,577 Epoch 1664/2000 +2024-11-11 21:19:03,624 Current Learning Rate: 0.0076791340 +2024-11-11 21:19:03,624 Train Loss: 0.0004617, Val Loss: 0.0006160 +2024-11-11 21:19:03,624 Epoch 1665/2000 +2024-11-11 21:19:19,559 Current Learning Rate: 0.0076124928 +2024-11-11 21:19:19,560 Train Loss: 0.0004836, Val Loss: 0.0006143 +2024-11-11 21:19:19,560 Epoch 1666/2000 +2024-11-11 21:19:35,949 Current Learning Rate: 0.0075452071 +2024-11-11 21:19:35,950 Train Loss: 0.0004806, Val Loss: 0.0006009 +2024-11-11 21:19:35,950 Epoch 1667/2000 +2024-11-11 21:19:50,396 Current Learning Rate: 0.0074772933 +2024-11-11 21:19:50,397 Train Loss: 0.0004721, Val Loss: 0.0006422 +2024-11-11 21:19:50,397 Epoch 1668/2000 +2024-11-11 21:20:05,722 Current Learning Rate: 0.0074087684 +2024-11-11 21:20:05,723 Train Loss: 0.0005263, Val Loss: 0.0006798 +2024-11-11 21:20:05,733 Epoch 1669/2000 +2024-11-11 21:20:21,494 Current Learning Rate: 0.0073396491 +2024-11-11 21:20:21,494 Train Loss: 0.0006208, Val Loss: 0.0006382 +2024-11-11 21:20:21,495 Epoch 1670/2000 +2024-11-11 21:20:37,203 Current Learning Rate: 0.0072699525 +2024-11-11 21:20:37,204 Train Loss: 0.0005495, Val Loss: 0.0006000 +2024-11-11 21:20:37,204 Epoch 1671/2000 +2024-11-11 21:20:52,738 Current Learning Rate: 0.0071996958 +2024-11-11 21:20:52,738 Train Loss: 0.0004826, Val Loss: 0.0006239 +2024-11-11 21:20:52,738 Epoch 1672/2000 +2024-11-11 21:21:08,673 Current Learning Rate: 0.0071288965 +2024-11-11 21:21:08,674 Train Loss: 0.0005262, Val Loss: 0.0006366 +2024-11-11 21:21:08,674 Epoch 1673/2000 +2024-11-11 21:21:24,328 Current Learning Rate: 0.0070575718 +2024-11-11 21:21:24,329 Train Loss: 0.0005894, Val Loss: 0.0005975 +2024-11-11 21:21:24,329 Epoch 1674/2000 +2024-11-11 21:21:40,397 Current Learning Rate: 0.0069857395 +2024-11-11 21:21:40,399 Train Loss: 0.0004935, Val Loss: 0.0005900 +2024-11-11 21:21:40,399 Epoch 1675/2000 +2024-11-11 21:21:56,131 Current Learning Rate: 0.0069134172 +2024-11-11 21:21:57,193 Train Loss: 0.0004529, Val Loss: 0.0005800 +2024-11-11 21:21:57,194 Epoch 1676/2000 +2024-11-11 21:22:12,837 Current Learning Rate: 0.0068406228 +2024-11-11 21:22:13,807 Train Loss: 0.0005233, Val Loss: 0.0005721 +2024-11-11 21:22:13,808 Epoch 1677/2000 +2024-11-11 21:22:29,606 Current Learning Rate: 0.0067673742 +2024-11-11 21:22:30,593 Train Loss: 0.0004106, Val Loss: 0.0005661 +2024-11-11 21:22:30,594 Epoch 1678/2000 +2024-11-11 21:22:46,452 Current Learning Rate: 0.0066936896 +2024-11-11 21:22:46,452 Train Loss: 0.0004143, Val Loss: 0.0005719 +2024-11-11 21:22:46,452 Epoch 1679/2000 +2024-11-11 21:23:02,868 Current Learning Rate: 0.0066195871 +2024-11-11 21:23:02,868 Train Loss: 0.0004331, Val Loss: 0.0005670 +2024-11-11 21:23:02,869 Epoch 1680/2000 +2024-11-11 21:23:19,416 Current Learning Rate: 0.0065450850 +2024-11-11 21:23:19,416 Train Loss: 0.0004751, Val Loss: 0.0005682 +2024-11-11 21:23:19,416 Epoch 1681/2000 +2024-11-11 21:23:34,856 Current Learning Rate: 0.0064702016 +2024-11-11 21:23:35,622 Train Loss: 0.0004536, Val Loss: 0.0005595 +2024-11-11 21:23:35,622 Epoch 1682/2000 +2024-11-11 21:23:51,120 Current Learning Rate: 0.0063949555 +2024-11-11 21:23:51,121 Train Loss: 0.0004756, Val Loss: 0.0005645 +2024-11-11 21:23:51,122 Epoch 1683/2000 +2024-11-11 21:24:07,872 Current Learning Rate: 0.0063193652 +2024-11-11 21:24:07,873 Train Loss: 0.0003916, Val Loss: 0.0005648 +2024-11-11 21:24:07,873 Epoch 1684/2000 +2024-11-11 21:24:23,939 Current Learning Rate: 0.0062434494 +2024-11-11 21:24:23,940 Train Loss: 0.0004938, Val Loss: 0.0005678 +2024-11-11 21:24:23,940 Epoch 1685/2000 +2024-11-11 21:24:39,544 Current Learning Rate: 0.0061672268 +2024-11-11 21:24:39,544 Train Loss: 0.0004542, Val Loss: 0.0005658 +2024-11-11 21:24:39,544 Epoch 1686/2000 +2024-11-11 21:24:55,368 Current Learning Rate: 0.0060907162 +2024-11-11 21:24:55,368 Train Loss: 0.0004392, Val Loss: 0.0005781 +2024-11-11 21:24:55,369 Epoch 1687/2000 +2024-11-11 21:25:10,854 Current Learning Rate: 0.0060139365 +2024-11-11 21:25:10,854 Train Loss: 0.0004555, Val Loss: 0.0005663 +2024-11-11 21:25:10,855 Epoch 1688/2000 +2024-11-11 21:25:27,116 Current Learning Rate: 0.0059369066 +2024-11-11 21:25:27,943 Train Loss: 0.0004500, Val Loss: 0.0005566 +2024-11-11 21:25:27,944 Epoch 1689/2000 +2024-11-11 21:25:42,949 Current Learning Rate: 0.0058596455 +2024-11-11 21:25:44,031 Train Loss: 0.0004700, Val Loss: 0.0005511 +2024-11-11 21:25:44,032 Epoch 1690/2000 +2024-11-11 21:26:00,235 Current Learning Rate: 0.0057821723 +2024-11-11 21:26:00,235 Train Loss: 0.0004602, Val Loss: 0.0005725 +2024-11-11 21:26:00,236 Epoch 1691/2000 +2024-11-11 21:26:15,990 Current Learning Rate: 0.0057045062 +2024-11-11 21:26:15,990 Train Loss: 0.0004776, Val Loss: 0.0005732 +2024-11-11 21:26:15,990 Epoch 1692/2000 +2024-11-11 21:26:31,978 Current Learning Rate: 0.0056266662 +2024-11-11 21:26:32,948 Train Loss: 0.0004919, Val Loss: 0.0005486 +2024-11-11 21:26:32,948 Epoch 1693/2000 +2024-11-11 21:26:48,656 Current Learning Rate: 0.0055486716 +2024-11-11 21:26:48,657 Train Loss: 0.0004299, Val Loss: 0.0005550 +2024-11-11 21:26:48,658 Epoch 1694/2000 +2024-11-11 21:27:04,898 Current Learning Rate: 0.0054705416 +2024-11-11 21:27:04,899 Train Loss: 0.0004087, Val Loss: 0.0005548 +2024-11-11 21:27:04,899 Epoch 1695/2000 +2024-11-11 21:27:20,389 Current Learning Rate: 0.0053922955 +2024-11-11 21:27:21,409 Train Loss: 0.0003815, Val Loss: 0.0005389 +2024-11-11 21:27:21,409 Epoch 1696/2000 +2024-11-11 21:27:37,184 Current Learning Rate: 0.0053139526 +2024-11-11 21:27:37,185 Train Loss: 0.0003787, Val Loss: 0.0005402 +2024-11-11 21:27:37,185 Epoch 1697/2000 +2024-11-11 21:27:53,586 Current Learning Rate: 0.0052355323 +2024-11-11 21:27:53,587 Train Loss: 0.0004286, Val Loss: 0.0005417 +2024-11-11 21:27:53,587 Epoch 1698/2000 +2024-11-11 21:28:09,170 Current Learning Rate: 0.0051570538 +2024-11-11 21:28:09,172 Train Loss: 0.0004803, Val Loss: 0.0005458 +2024-11-11 21:28:09,173 Epoch 1699/2000 +2024-11-11 21:28:25,196 Current Learning Rate: 0.0050785366 +2024-11-11 21:28:25,197 Train Loss: 0.0004299, Val Loss: 0.0005413 +2024-11-11 21:28:25,197 Epoch 1700/2000 +2024-11-11 21:28:41,941 Current Learning Rate: 0.0050000000 +2024-11-11 21:28:42,658 Train Loss: 0.0004231, Val Loss: 0.0005336 +2024-11-11 21:28:42,658 Epoch 1701/2000 +2024-11-11 21:28:57,427 Current Learning Rate: 0.0049214634 +2024-11-11 21:28:57,427 Train Loss: 0.0004457, Val Loss: 0.0005372 +2024-11-11 21:28:57,427 Epoch 1702/2000 +2024-11-11 21:29:13,586 Current Learning Rate: 0.0048429462 +2024-11-11 21:29:13,587 Train Loss: 0.0004917, Val Loss: 0.0005428 +2024-11-11 21:29:13,587 Epoch 1703/2000 +2024-11-11 21:29:29,746 Current Learning Rate: 0.0047644677 +2024-11-11 21:29:29,747 Train Loss: 0.0004636, Val Loss: 0.0005574 +2024-11-11 21:29:29,747 Epoch 1704/2000 +2024-11-11 21:29:45,894 Current Learning Rate: 0.0046860474 +2024-11-11 21:29:45,895 Train Loss: 0.0004433, Val Loss: 0.0005445 +2024-11-11 21:29:45,895 Epoch 1705/2000 +2024-11-11 21:30:01,533 Current Learning Rate: 0.0046077045 +2024-11-11 21:30:03,920 Train Loss: 0.0004468, Val Loss: 0.0005330 +2024-11-11 21:30:03,920 Epoch 1706/2000 +2024-11-11 21:30:18,254 Current Learning Rate: 0.0045294584 +2024-11-11 21:30:18,999 Train Loss: 0.0004184, Val Loss: 0.0005270 +2024-11-11 21:30:18,999 Epoch 1707/2000 +2024-11-11 21:30:34,360 Current Learning Rate: 0.0044513284 +2024-11-11 21:30:35,097 Train Loss: 0.0004106, Val Loss: 0.0005222 +2024-11-11 21:30:35,098 Epoch 1708/2000 +2024-11-11 21:30:49,745 Current Learning Rate: 0.0043733338 +2024-11-11 21:30:50,488 Train Loss: 0.0004124, Val Loss: 0.0005208 +2024-11-11 21:30:50,488 Epoch 1709/2000 +2024-11-11 21:31:05,230 Current Learning Rate: 0.0042954938 +2024-11-11 21:31:05,231 Train Loss: 0.0004559, Val Loss: 0.0005259 +2024-11-11 21:31:05,232 Epoch 1710/2000 +2024-11-11 21:31:20,579 Current Learning Rate: 0.0042178277 +2024-11-11 21:31:20,579 Train Loss: 0.0004848, Val Loss: 0.0005336 +2024-11-11 21:31:20,579 Epoch 1711/2000 +2024-11-11 21:31:36,045 Current Learning Rate: 0.0041403545 +2024-11-11 21:31:36,045 Train Loss: 0.0004692, Val Loss: 0.0005393 +2024-11-11 21:31:36,045 Epoch 1712/2000 +2024-11-11 21:31:51,229 Current Learning Rate: 0.0040630934 +2024-11-11 21:31:51,229 Train Loss: 0.0003951, Val Loss: 0.0005302 +2024-11-11 21:31:51,229 Epoch 1713/2000 +2024-11-11 21:32:06,839 Current Learning Rate: 0.0039860635 +2024-11-11 21:32:06,839 Train Loss: 0.0003930, Val Loss: 0.0005283 +2024-11-11 21:32:06,839 Epoch 1714/2000 +2024-11-11 21:32:22,754 Current Learning Rate: 0.0039092838 +2024-11-11 21:32:22,754 Train Loss: 0.0003887, Val Loss: 0.0005323 +2024-11-11 21:32:22,754 Epoch 1715/2000 +2024-11-11 21:32:38,545 Current Learning Rate: 0.0038327732 +2024-11-11 21:32:38,546 Train Loss: 0.0004875, Val Loss: 0.0005406 +2024-11-11 21:32:38,546 Epoch 1716/2000 +2024-11-11 21:32:53,734 Current Learning Rate: 0.0037565506 +2024-11-11 21:32:53,735 Train Loss: 0.0004535, Val Loss: 0.0005704 +2024-11-11 21:32:53,735 Epoch 1717/2000 +2024-11-11 21:33:09,333 Current Learning Rate: 0.0036806348 +2024-11-11 21:33:09,334 Train Loss: 0.0004627, Val Loss: 0.0005671 +2024-11-11 21:33:09,334 Epoch 1718/2000 +2024-11-11 21:33:25,199 Current Learning Rate: 0.0036050445 +2024-11-11 21:33:25,200 Train Loss: 0.0004753, Val Loss: 0.0005282 +2024-11-11 21:33:25,200 Epoch 1719/2000 +2024-11-11 21:33:40,572 Current Learning Rate: 0.0035297984 +2024-11-11 21:33:41,378 Train Loss: 0.0003659, Val Loss: 0.0005147 +2024-11-11 21:33:41,378 Epoch 1720/2000 +2024-11-11 21:33:55,968 Current Learning Rate: 0.0034549150 +2024-11-11 21:33:55,968 Train Loss: 0.0003476, Val Loss: 0.0005149 +2024-11-11 21:33:55,969 Epoch 1721/2000 +2024-11-11 21:34:11,759 Current Learning Rate: 0.0033804129 +2024-11-11 21:34:12,517 Train Loss: 0.0003638, Val Loss: 0.0005122 +2024-11-11 21:34:12,517 Epoch 1722/2000 +2024-11-11 21:34:27,394 Current Learning Rate: 0.0033063104 +2024-11-11 21:34:28,209 Train Loss: 0.0003518, Val Loss: 0.0005106 +2024-11-11 21:34:28,209 Epoch 1723/2000 +2024-11-11 21:34:42,940 Current Learning Rate: 0.0032326258 +2024-11-11 21:34:43,808 Train Loss: 0.0003611, Val Loss: 0.0005086 +2024-11-11 21:34:43,808 Epoch 1724/2000 +2024-11-11 21:34:58,215 Current Learning Rate: 0.0031593772 +2024-11-11 21:34:58,971 Train Loss: 0.0004347, Val Loss: 0.0005074 +2024-11-11 21:34:58,972 Epoch 1725/2000 +2024-11-11 21:35:13,471 Current Learning Rate: 0.0030865828 +2024-11-11 21:35:14,299 Train Loss: 0.0003929, Val Loss: 0.0005071 +2024-11-11 21:35:14,300 Epoch 1726/2000 +2024-11-11 21:35:29,438 Current Learning Rate: 0.0030142605 +2024-11-11 21:35:29,439 Train Loss: 0.0004435, Val Loss: 0.0005078 +2024-11-11 21:35:29,440 Epoch 1727/2000 +2024-11-11 21:35:45,711 Current Learning Rate: 0.0029424282 +2024-11-11 21:35:45,712 Train Loss: 0.0004152, Val Loss: 0.0005074 +2024-11-11 21:35:45,712 Epoch 1728/2000 +2024-11-11 21:36:01,409 Current Learning Rate: 0.0028711035 +2024-11-11 21:36:01,410 Train Loss: 0.0004438, Val Loss: 0.0005084 +2024-11-11 21:36:01,410 Epoch 1729/2000 +2024-11-11 21:36:16,408 Current Learning Rate: 0.0028003042 +2024-11-11 21:36:16,409 Train Loss: 0.0004133, Val Loss: 0.0005072 +2024-11-11 21:36:16,409 Epoch 1730/2000 +2024-11-11 21:36:32,347 Current Learning Rate: 0.0027300475 +2024-11-11 21:36:33,145 Train Loss: 0.0004135, Val Loss: 0.0005039 +2024-11-11 21:36:33,145 Epoch 1731/2000 +2024-11-11 21:36:48,090 Current Learning Rate: 0.0026603509 +2024-11-11 21:36:48,771 Train Loss: 0.0003828, Val Loss: 0.0005030 +2024-11-11 21:36:48,771 Epoch 1732/2000 +2024-11-11 21:37:04,353 Current Learning Rate: 0.0025912316 +2024-11-11 21:37:04,354 Train Loss: 0.0004278, Val Loss: 0.0005036 +2024-11-11 21:37:04,354 Epoch 1733/2000 +2024-11-11 21:37:20,914 Current Learning Rate: 0.0025227067 +2024-11-11 21:37:20,915 Train Loss: 0.0003527, Val Loss: 0.0005042 +2024-11-11 21:37:20,916 Epoch 1734/2000 +2024-11-11 21:37:37,279 Current Learning Rate: 0.0024547929 +2024-11-11 21:37:37,280 Train Loss: 0.0004140, Val Loss: 0.0005063 +2024-11-11 21:37:37,280 Epoch 1735/2000 +2024-11-11 21:37:53,847 Current Learning Rate: 0.0023875072 +2024-11-11 21:37:54,817 Train Loss: 0.0004297, Val Loss: 0.0005015 +2024-11-11 21:37:54,818 Epoch 1736/2000 +2024-11-11 21:38:11,178 Current Learning Rate: 0.0023208660 +2024-11-11 21:38:11,891 Train Loss: 0.0004593, Val Loss: 0.0004956 +2024-11-11 21:38:11,891 Epoch 1737/2000 +2024-11-11 21:38:26,214 Current Learning Rate: 0.0022548859 +2024-11-11 21:38:27,181 Train Loss: 0.0004213, Val Loss: 0.0004920 +2024-11-11 21:38:27,182 Epoch 1738/2000 +2024-11-11 21:38:43,553 Current Learning Rate: 0.0021895831 +2024-11-11 21:38:43,554 Train Loss: 0.0003847, Val Loss: 0.0004928 +2024-11-11 21:38:43,554 Epoch 1739/2000 +2024-11-11 21:38:59,299 Current Learning Rate: 0.0021249737 +2024-11-11 21:39:00,058 Train Loss: 0.0003771, Val Loss: 0.0004876 +2024-11-11 21:39:00,058 Epoch 1740/2000 +2024-11-11 21:39:14,941 Current Learning Rate: 0.0020610737 +2024-11-11 21:39:15,759 Train Loss: 0.0003475, Val Loss: 0.0004875 +2024-11-11 21:39:15,759 Epoch 1741/2000 +2024-11-11 21:39:30,817 Current Learning Rate: 0.0019978989 +2024-11-11 21:39:31,683 Train Loss: 0.0003471, Val Loss: 0.0004858 +2024-11-11 21:39:31,684 Epoch 1742/2000 +2024-11-11 21:39:47,134 Current Learning Rate: 0.0019354647 +2024-11-11 21:39:48,120 Train Loss: 0.0003708, Val Loss: 0.0004855 +2024-11-11 21:39:48,121 Epoch 1743/2000 +2024-11-11 21:40:04,333 Current Learning Rate: 0.0018737867 +2024-11-11 21:40:04,334 Train Loss: 0.0003876, Val Loss: 0.0004855 +2024-11-11 21:40:04,335 Epoch 1744/2000 +2024-11-11 21:40:19,883 Current Learning Rate: 0.0018128801 +2024-11-11 21:40:20,653 Train Loss: 0.0003427, Val Loss: 0.0004844 +2024-11-11 21:40:20,654 Epoch 1745/2000 +2024-11-11 21:40:35,474 Current Learning Rate: 0.0017527598 +2024-11-11 21:40:36,222 Train Loss: 0.0004239, Val Loss: 0.0004835 +2024-11-11 21:40:36,222 Epoch 1746/2000 +2024-11-11 21:40:51,038 Current Learning Rate: 0.0016934407 +2024-11-11 21:40:51,771 Train Loss: 0.0003626, Val Loss: 0.0004820 +2024-11-11 21:40:51,772 Epoch 1747/2000 +2024-11-11 21:41:06,709 Current Learning Rate: 0.0016349374 +2024-11-11 21:41:07,441 Train Loss: 0.0003723, Val Loss: 0.0004810 +2024-11-11 21:41:07,441 Epoch 1748/2000 +2024-11-11 21:41:22,329 Current Learning Rate: 0.0015772645 +2024-11-11 21:41:23,205 Train Loss: 0.0003645, Val Loss: 0.0004809 +2024-11-11 21:41:23,206 Epoch 1749/2000 +2024-11-11 21:41:38,018 Current Learning Rate: 0.0015204360 +2024-11-11 21:41:38,019 Train Loss: 0.0003917, Val Loss: 0.0004810 +2024-11-11 21:41:38,019 Epoch 1750/2000 +2024-11-11 21:41:53,584 Current Learning Rate: 0.0014644661 +2024-11-11 21:41:53,584 Train Loss: 0.0003951, Val Loss: 0.0004822 +2024-11-11 21:41:53,585 Epoch 1751/2000 +2024-11-11 21:42:09,970 Current Learning Rate: 0.0014093685 +2024-11-11 21:42:11,020 Train Loss: 0.0003900, Val Loss: 0.0004800 +2024-11-11 21:42:11,020 Epoch 1752/2000 +2024-11-11 21:42:26,546 Current Learning Rate: 0.0013551569 +2024-11-11 21:42:27,473 Train Loss: 0.0003723, Val Loss: 0.0004783 +2024-11-11 21:42:27,474 Epoch 1753/2000 +2024-11-11 21:42:43,426 Current Learning Rate: 0.0013018445 +2024-11-11 21:42:44,194 Train Loss: 0.0003705, Val Loss: 0.0004778 +2024-11-11 21:42:44,194 Epoch 1754/2000 +2024-11-11 21:42:58,864 Current Learning Rate: 0.0012494447 +2024-11-11 21:42:59,617 Train Loss: 0.0004015, Val Loss: 0.0004778 +2024-11-11 21:42:59,617 Epoch 1755/2000 +2024-11-11 21:43:15,679 Current Learning Rate: 0.0011979702 +2024-11-11 21:43:16,399 Train Loss: 0.0003422, Val Loss: 0.0004773 +2024-11-11 21:43:16,399 Epoch 1756/2000 +2024-11-11 21:43:31,117 Current Learning Rate: 0.0011474338 +2024-11-11 21:43:31,118 Train Loss: 0.0003694, Val Loss: 0.0004775 +2024-11-11 21:43:31,119 Epoch 1757/2000 +2024-11-11 21:43:46,465 Current Learning Rate: 0.0010978480 +2024-11-11 21:43:47,262 Train Loss: 0.0003533, Val Loss: 0.0004769 +2024-11-11 21:43:47,262 Epoch 1758/2000 +2024-11-11 21:44:01,659 Current Learning Rate: 0.0010492249 +2024-11-11 21:44:03,839 Train Loss: 0.0003803, Val Loss: 0.0004760 +2024-11-11 21:44:03,839 Epoch 1759/2000 +2024-11-11 21:44:18,102 Current Learning Rate: 0.0010015767 +2024-11-11 21:44:18,876 Train Loss: 0.0003702, Val Loss: 0.0004746 +2024-11-11 21:44:18,876 Epoch 1760/2000 +2024-11-11 21:44:33,367 Current Learning Rate: 0.0009549150 +2024-11-11 21:44:34,092 Train Loss: 0.0003760, Val Loss: 0.0004743 +2024-11-11 21:44:34,092 Epoch 1761/2000 +2024-11-11 21:44:48,542 Current Learning Rate: 0.0009092514 +2024-11-11 21:44:48,543 Train Loss: 0.0005291, Val Loss: 0.0004754 +2024-11-11 21:44:48,543 Epoch 1762/2000 +2024-11-11 21:45:04,214 Current Learning Rate: 0.0008645971 +2024-11-11 21:45:05,031 Train Loss: 0.0003119, Val Loss: 0.0004739 +2024-11-11 21:45:05,032 Epoch 1763/2000 +2024-11-11 21:45:20,482 Current Learning Rate: 0.0008209632 +2024-11-11 21:45:21,257 Train Loss: 0.0003379, Val Loss: 0.0004735 +2024-11-11 21:45:21,258 Epoch 1764/2000 +2024-11-11 21:45:36,295 Current Learning Rate: 0.0007783604 +2024-11-11 21:45:37,410 Train Loss: 0.0003394, Val Loss: 0.0004732 +2024-11-11 21:45:37,410 Epoch 1765/2000 +2024-11-11 21:45:53,458 Current Learning Rate: 0.0007367992 +2024-11-11 21:45:54,230 Train Loss: 0.0003866, Val Loss: 0.0004730 +2024-11-11 21:45:54,230 Epoch 1766/2000 +2024-11-11 21:46:08,733 Current Learning Rate: 0.0006962899 +2024-11-11 21:46:09,634 Train Loss: 0.0003961, Val Loss: 0.0004728 +2024-11-11 21:46:09,635 Epoch 1767/2000 +2024-11-11 21:46:25,404 Current Learning Rate: 0.0006568424 +2024-11-11 21:46:26,165 Train Loss: 0.0003480, Val Loss: 0.0004724 +2024-11-11 21:46:26,165 Epoch 1768/2000 +2024-11-11 21:46:41,317 Current Learning Rate: 0.0006184666 +2024-11-11 21:46:42,122 Train Loss: 0.0003090, Val Loss: 0.0004720 +2024-11-11 21:46:42,122 Epoch 1769/2000 +2024-11-11 21:46:57,320 Current Learning Rate: 0.0005811718 +2024-11-11 21:46:57,321 Train Loss: 0.0004402, Val Loss: 0.0004724 +2024-11-11 21:46:57,321 Epoch 1770/2000 +2024-11-11 21:47:12,797 Current Learning Rate: 0.0005449674 +2024-11-11 21:47:12,797 Train Loss: 0.0004016, Val Loss: 0.0004722 +2024-11-11 21:47:12,798 Epoch 1771/2000 +2024-11-11 21:47:29,463 Current Learning Rate: 0.0005098621 +2024-11-11 21:47:30,420 Train Loss: 0.0003784, Val Loss: 0.0004716 +2024-11-11 21:47:30,420 Epoch 1772/2000 +2024-11-11 21:47:37,319 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-11 21:47:45,803 Current Learning Rate: 0.0004758647 +2024-11-11 21:47:46,777 Train Loss: 0.0003402, Val Loss: 0.0004713 +2024-11-11 21:47:46,782 Epoch 1773/2000 +2024-11-11 21:47:59,503 Loading best model from checkpoint. +2024-11-11 21:48:01,961 Current Learning Rate: 0.0004429836 +2024-11-11 21:48:04,354 Train Loss: 0.0003396, Val Loss: 0.0004709 +2024-11-11 21:48:04,357 Epoch 1774/2000 +2024-11-11 21:48:17,190 Testing completed and best model saved. +2024-11-11 21:48:19,262 Current Learning Rate: 0.0004112269 +2024-11-11 21:48:20,444 Train Loss: 0.0003725, Val Loss: 0.0004708 +2024-11-11 21:48:20,445 Epoch 1775/2000 +2024-11-11 21:48:35,956 Current Learning Rate: 0.0003806023 +2024-11-11 21:48:37,258 Train Loss: 0.0003322, Val Loss: 0.0004707 +2024-11-11 21:48:37,259 Epoch 1776/2000 +2024-11-11 21:48:53,900 Current Learning Rate: 0.0003511176 +2024-11-11 21:48:54,819 Train Loss: 0.0003374, Val Loss: 0.0004704 +2024-11-11 21:48:54,819 Epoch 1777/2000 +2024-11-11 21:49:09,992 Current Learning Rate: 0.0003227798 +2024-11-11 21:49:10,980 Train Loss: 0.0003443, Val Loss: 0.0004703 +2024-11-11 21:49:10,980 Epoch 1778/2000 +2024-11-11 21:49:26,425 Current Learning Rate: 0.0002955962 +2024-11-11 21:49:27,579 Train Loss: 0.0003375, Val Loss: 0.0004701 +2024-11-11 21:49:27,580 Epoch 1779/2000 +2024-11-11 21:49:44,290 Current Learning Rate: 0.0002695732 +2024-11-11 21:49:45,502 Train Loss: 0.0003066, Val Loss: 0.0004700 +2024-11-11 21:49:45,502 Epoch 1780/2000 +2024-11-11 21:50:01,038 Current Learning Rate: 0.0002447174 +2024-11-11 21:50:03,590 Train Loss: 0.0003402, Val Loss: 0.0004699 +2024-11-11 21:50:03,590 Epoch 1781/2000 +2024-11-11 21:50:18,791 Current Learning Rate: 0.0002210349 +2024-11-11 21:50:18,792 Train Loss: 0.0004048, Val Loss: 0.0004700 +2024-11-11 21:50:18,792 Epoch 1782/2000 +2024-11-11 21:50:34,836 Current Learning Rate: 0.0001985316 +2024-11-11 21:50:35,663 Train Loss: 0.0004596, Val Loss: 0.0004697 +2024-11-11 21:50:35,663 Epoch 1783/2000 +2024-11-11 21:50:50,740 Current Learning Rate: 0.0001772129 +2024-11-11 21:50:51,674 Train Loss: 0.0003570, Val Loss: 0.0004697 +2024-11-11 21:50:51,675 Epoch 1784/2000 +2024-11-11 21:51:07,094 Current Learning Rate: 0.0001570842 +2024-11-11 21:51:08,197 Train Loss: 0.0003369, Val Loss: 0.0004694 +2024-11-11 21:51:08,198 Epoch 1785/2000 +2024-11-11 21:51:24,013 Current Learning Rate: 0.0001381504 +2024-11-11 21:51:25,004 Train Loss: 0.0003301, Val Loss: 0.0004694 +2024-11-11 21:51:25,004 Epoch 1786/2000 +2024-11-11 21:51:41,129 Current Learning Rate: 0.0001204162 +2024-11-11 21:51:42,124 Train Loss: 0.0003317, Val Loss: 0.0004694 +2024-11-11 21:51:42,125 Epoch 1787/2000 +2024-11-11 21:51:58,458 Current Learning Rate: 0.0001038859 +2024-11-11 21:51:59,342 Train Loss: 0.0003683, Val Loss: 0.0004693 +2024-11-11 21:51:59,343 Epoch 1788/2000 +2024-11-11 21:52:14,208 Current Learning Rate: 0.0000885637 +2024-11-11 21:52:14,209 Train Loss: 0.0003520, Val Loss: 0.0004693 +2024-11-11 21:52:14,209 Epoch 1789/2000 +2024-11-11 21:52:30,902 Current Learning Rate: 0.0000744534 +2024-11-11 21:52:31,986 Train Loss: 0.0003830, Val Loss: 0.0004693 +2024-11-11 21:52:31,986 Epoch 1790/2000 +2024-11-11 21:52:47,176 Current Learning Rate: 0.0000615583 +2024-11-11 21:52:48,119 Train Loss: 0.0003614, Val Loss: 0.0004692 +2024-11-11 21:52:48,120 Epoch 1791/2000 +2024-11-11 21:53:03,705 Current Learning Rate: 0.0000498817 +2024-11-11 21:53:03,705 Train Loss: 0.0003523, Val Loss: 0.0004692 +2024-11-11 21:53:03,706 Epoch 1792/2000 +2024-11-11 21:53:20,146 Current Learning Rate: 0.0000394265 +2024-11-11 21:53:21,130 Train Loss: 0.0003641, Val Loss: 0.0004692 +2024-11-11 21:53:21,131 Epoch 1793/2000 +2024-11-11 21:53:36,524 Current Learning Rate: 0.0000301952 +2024-11-11 21:53:36,525 Train Loss: 0.0003091, Val Loss: 0.0004692 +2024-11-11 21:53:36,525 Epoch 1794/2000 +2024-11-11 21:53:52,783 Current Learning Rate: 0.0000221902 +2024-11-11 21:53:52,784 Train Loss: 0.0003335, Val Loss: 0.0004692 +2024-11-11 21:53:52,784 Epoch 1795/2000 +2024-11-11 21:54:09,994 Current Learning Rate: 0.0000154133 +2024-11-11 21:54:09,995 Train Loss: 0.0003923, Val Loss: 0.0004692 +2024-11-11 21:54:09,996 Epoch 1796/2000 +2024-11-11 21:54:26,341 Current Learning Rate: 0.0000098664 +2024-11-11 21:54:27,269 Train Loss: 0.0003742, Val Loss: 0.0004692 +2024-11-11 21:54:27,269 Epoch 1797/2000 +2024-11-11 21:54:42,567 Current Learning Rate: 0.0000055506 +2024-11-11 21:54:42,567 Train Loss: 0.0003479, Val Loss: 0.0004692 +2024-11-11 21:54:42,568 Epoch 1798/2000 +2024-11-11 21:54:58,721 Current Learning Rate: 0.0000024672 +2024-11-11 21:54:59,630 Train Loss: 0.0003606, Val Loss: 0.0004691 +2024-11-11 21:54:59,631 Epoch 1799/2000 +2024-11-11 21:55:15,011 Current Learning Rate: 0.0000006168 +2024-11-11 21:55:15,012 Train Loss: 0.0004004, Val Loss: 0.0004692 +2024-11-11 21:55:15,013 Epoch 1800/2000 +2024-11-11 21:55:31,238 Current Learning Rate: 0.0000000000 +2024-11-11 21:55:31,239 Train Loss: 0.0003904, Val Loss: 0.0004692 +2024-11-11 21:55:31,239 Epoch 1801/2000 +2024-11-11 21:55:47,692 Current Learning Rate: 0.0000006168 +2024-11-11 21:55:47,693 Train Loss: 0.0003716, Val Loss: 0.0004691 +2024-11-11 21:55:47,693 Epoch 1802/2000 +2024-11-11 21:56:03,571 Current Learning Rate: 0.0000024672 +2024-11-11 21:56:03,572 Train Loss: 0.0004168, Val Loss: 0.0004692 +2024-11-11 21:56:03,572 Epoch 1803/2000 +2024-11-11 21:56:19,536 Current Learning Rate: 0.0000055506 +2024-11-11 21:56:19,536 Train Loss: 0.0003723, Val Loss: 0.0004692 +2024-11-11 21:56:19,537 Epoch 1804/2000 +2024-11-11 21:56:36,065 Current Learning Rate: 0.0000098664 +2024-11-11 21:56:36,066 Train Loss: 0.0003215, Val Loss: 0.0004691 +2024-11-11 21:56:36,066 Epoch 1805/2000 +2024-11-11 21:56:52,175 Current Learning Rate: 0.0000154133 +2024-11-11 21:56:53,096 Train Loss: 0.0003321, Val Loss: 0.0004691 +2024-11-11 21:56:53,097 Epoch 1806/2000 +2024-11-11 21:57:08,682 Current Learning Rate: 0.0000221902 +2024-11-11 21:57:09,561 Train Loss: 0.0003177, Val Loss: 0.0004691 +2024-11-11 21:57:09,561 Epoch 1807/2000 +2024-11-11 21:57:24,711 Current Learning Rate: 0.0000301952 +2024-11-11 21:57:24,712 Train Loss: 0.0003665, Val Loss: 0.0004692 +2024-11-11 21:57:24,712 Epoch 1808/2000 +2024-11-11 21:57:41,111 Current Learning Rate: 0.0000394265 +2024-11-11 21:57:41,111 Train Loss: 0.0003340, Val Loss: 0.0004692 +2024-11-11 21:57:41,112 Epoch 1809/2000 +2024-11-11 21:57:58,362 Current Learning Rate: 0.0000498817 +2024-11-11 21:57:58,363 Train Loss: 0.0003851, Val Loss: 0.0004692 +2024-11-11 21:57:58,363 Epoch 1810/2000 +2024-11-11 21:58:13,926 Current Learning Rate: 0.0000615583 +2024-11-11 21:58:14,822 Train Loss: 0.0003246, Val Loss: 0.0004691 +2024-11-11 21:58:14,822 Epoch 1811/2000 +2024-11-11 21:58:29,888 Current Learning Rate: 0.0000744534 +2024-11-11 21:58:30,771 Train Loss: 0.0003821, Val Loss: 0.0004691 +2024-11-11 21:58:30,771 Epoch 1812/2000 +2024-11-11 21:58:46,192 Current Learning Rate: 0.0000885637 +2024-11-11 21:58:46,193 Train Loss: 0.0004197, Val Loss: 0.0004692 +2024-11-11 21:58:46,194 Epoch 1813/2000 +2024-11-11 21:59:02,290 Current Learning Rate: 0.0001038859 +2024-11-11 21:59:02,291 Train Loss: 0.0003423, Val Loss: 0.0004692 +2024-11-11 21:59:02,291 Epoch 1814/2000 +2024-11-11 21:59:19,378 Current Learning Rate: 0.0001204162 +2024-11-11 21:59:19,379 Train Loss: 0.0003315, Val Loss: 0.0004692 +2024-11-11 21:59:19,379 Epoch 1815/2000 +2024-11-11 21:59:35,674 Current Learning Rate: 0.0001381504 +2024-11-11 21:59:35,675 Train Loss: 0.0003277, Val Loss: 0.0004692 +2024-11-11 21:59:35,675 Epoch 1816/2000 +2024-11-11 21:59:51,596 Current Learning Rate: 0.0001570842 +2024-11-11 21:59:51,597 Train Loss: 0.0004323, Val Loss: 0.0004693 +2024-11-11 21:59:51,597 Epoch 1817/2000 +2024-11-11 22:00:07,077 Current Learning Rate: 0.0001772129 +2024-11-11 22:00:07,078 Train Loss: 0.0003093, Val Loss: 0.0004693 +2024-11-11 22:00:07,078 Epoch 1818/2000 +2024-11-11 22:00:23,129 Current Learning Rate: 0.0001985316 +2024-11-11 22:00:23,129 Train Loss: 0.0003487, Val Loss: 0.0004693 +2024-11-11 22:00:23,130 Epoch 1819/2000 +2024-11-11 22:00:39,661 Current Learning Rate: 0.0002210349 +2024-11-11 22:00:39,662 Train Loss: 0.0003095, Val Loss: 0.0004694 +2024-11-11 22:00:39,663 Epoch 1820/2000 +2024-11-11 22:00:55,428 Current Learning Rate: 0.0002447174 +2024-11-11 22:00:55,428 Train Loss: 0.0003606, Val Loss: 0.0004694 +2024-11-11 22:00:55,428 Epoch 1821/2000 +2024-11-11 22:01:12,829 Current Learning Rate: 0.0002695732 +2024-11-11 22:01:12,830 Train Loss: 0.0003425, Val Loss: 0.0004694 +2024-11-11 22:01:12,830 Epoch 1822/2000 +2024-11-11 22:01:28,743 Current Learning Rate: 0.0002955962 +2024-11-11 22:01:28,744 Train Loss: 0.0003432, Val Loss: 0.0004694 +2024-11-11 22:01:28,744 Epoch 1823/2000 +2024-11-11 22:01:45,759 Current Learning Rate: 0.0003227798 +2024-11-11 22:01:45,760 Train Loss: 0.0003854, Val Loss: 0.0004697 +2024-11-11 22:01:45,760 Epoch 1824/2000 +2024-11-11 22:02:01,192 Current Learning Rate: 0.0003511176 +2024-11-11 22:02:01,194 Train Loss: 0.0003785, Val Loss: 0.0004697 +2024-11-11 22:02:01,195 Epoch 1825/2000 +2024-11-11 22:02:16,697 Current Learning Rate: 0.0003806023 +2024-11-11 22:02:16,698 Train Loss: 0.0003138, Val Loss: 0.0004695 +2024-11-11 22:02:16,698 Epoch 1826/2000 +2024-11-11 22:02:33,139 Current Learning Rate: 0.0004112269 +2024-11-11 22:02:33,140 Train Loss: 0.0003309, Val Loss: 0.0004696 +2024-11-11 22:02:33,140 Epoch 1827/2000 +2024-11-11 22:02:49,142 Current Learning Rate: 0.0004429836 +2024-11-11 22:02:49,142 Train Loss: 0.0003257, Val Loss: 0.0004697 +2024-11-11 22:02:49,142 Epoch 1828/2000 +2024-11-11 22:03:05,205 Current Learning Rate: 0.0004758647 +2024-11-11 22:03:05,205 Train Loss: 0.0003437, Val Loss: 0.0004696 +2024-11-11 22:03:05,205 Epoch 1829/2000 +2024-11-11 22:03:21,199 Current Learning Rate: 0.0005098621 +2024-11-11 22:03:21,199 Train Loss: 0.0003767, Val Loss: 0.0004696 +2024-11-11 22:03:21,200 Epoch 1830/2000 +2024-11-11 22:03:37,582 Current Learning Rate: 0.0005449674 +2024-11-11 22:03:37,583 Train Loss: 0.0003757, Val Loss: 0.0004699 +2024-11-11 22:03:37,584 Epoch 1831/2000 +2024-11-11 22:03:53,689 Current Learning Rate: 0.0005811718 +2024-11-11 22:03:53,689 Train Loss: 0.0003557, Val Loss: 0.0004697 +2024-11-11 22:03:53,689 Epoch 1832/2000 +2024-11-11 22:04:10,567 Current Learning Rate: 0.0006184666 +2024-11-11 22:04:10,569 Train Loss: 0.0003385, Val Loss: 0.0004698 +2024-11-11 22:04:10,572 Epoch 1833/2000 +2024-11-11 22:04:26,485 Current Learning Rate: 0.0006568424 +2024-11-11 22:04:26,485 Train Loss: 0.0003319, Val Loss: 0.0004700 +2024-11-11 22:04:26,486 Epoch 1834/2000 +2024-11-11 22:04:42,683 Current Learning Rate: 0.0006962899 +2024-11-11 22:04:42,683 Train Loss: 0.0003140, Val Loss: 0.0004696 +2024-11-11 22:04:42,683 Epoch 1835/2000 +2024-11-11 22:04:43,134 Added key: store_based_barrier_key:1 to store for rank: 0 +2024-11-11 22:04:59,269 Current Learning Rate: 0.0007367992 +2024-11-11 22:04:59,271 Train Loss: 0.0003429, Val Loss: 0.0004696 +2024-11-11 22:04:59,272 Epoch 1836/2000 +2024-11-11 22:05:17,200 Testing completed and best model saved. +-11-11 22:05:16,182 Train Loss: 0.0004232, Val Loss: 0.0004709 +2024-11-11 22:05:16,183 Epoch 1837/2000 +2024-11-11 22:05:32,656 Current Learning Rate: 0.0008209632 +2024-11-11 22:05:32,656 Train Loss: 0.0003067, Val Loss: 0.0004707 +2024-11-11 22:05:32,657 Epoch 1838/2000 +2024-11-11 22:05:48,734 Current Learning Rate: 0.0008645971 +2024-11-11 22:05:48,734 Train Loss: 0.0003915, Val Loss: 0.0004725 +2024-11-11 22:05:48,734 Epoch 1839/2000 +2024-11-11 22:06:04,855 Current Learning Rate: 0.0009092514 +2024-11-11 22:06:04,856 Train Loss: 0.0003546, Val Loss: 0.0004736 +2024-11-11 22:06:04,856 Epoch 1840/2000 +2024-11-11 22:06:20,951 Current Learning Rate: 0.0009549150 +2024-11-11 22:06:20,952 Train Loss: 0.0003297, Val Loss: 0.0004727 +2024-11-11 22:06:20,952 Epoch 1841/2000 +2024-11-11 22:06:38,754 Current Learning Rate: 0.0010015767 +2024-11-11 22:06:38,755 Train Loss: 0.0003348, Val Loss: 0.0004729 +2024-11-11 22:06:38,755 Epoch 1842/2000 +2024-11-11 22:06:54,006 Current Learning Rate: 0.0010492249 +2024-11-11 22:06:54,006 Train Loss: 0.0003834, Val Loss: 0.0004737 +2024-11-11 22:06:54,006 Epoch 1843/2000 +2024-11-11 22:07:10,579 Current Learning Rate: 0.0010978480 +2024-11-11 22:07:10,580 Train Loss: 0.0004457, Val Loss: 0.0004777 +2024-11-11 22:07:10,580 Epoch 1844/2000 +2024-11-11 22:07:27,270 Current Learning Rate: 0.0011474338 +2024-11-11 22:07:27,271 Train Loss: 0.0004340, Val Loss: 0.0004838 +2024-11-11 22:07:27,271 Epoch 1845/2000 +2024-11-11 22:07:43,160 Current Learning Rate: 0.0011979702 +2024-11-11 22:07:43,164 Train Loss: 0.0003392, Val Loss: 0.0004817 +2024-11-11 22:07:43,165 Epoch 1846/2000 +2024-11-11 22:08:00,247 Current Learning Rate: 0.0012494447 +2024-11-11 22:08:00,248 Train Loss: 0.0003844, Val Loss: 0.0004774 +2024-11-11 22:08:00,248 Epoch 1847/2000 +2024-11-11 22:08:15,871 Current Learning Rate: 0.0013018445 +2024-11-11 22:08:15,871 Train Loss: 0.0003830, Val Loss: 0.0004773 +2024-11-11 22:08:15,871 Epoch 1848/2000 +2024-11-11 22:08:31,385 Current Learning Rate: 0.0013551569 +2024-11-11 22:08:31,386 Train Loss: 0.0003410, Val Loss: 0.0004737 +2024-11-11 22:08:31,386 Epoch 1849/2000 +2024-11-11 22:08:47,358 Current Learning Rate: 0.0014093685 +2024-11-11 22:08:47,359 Train Loss: 0.0003742, Val Loss: 0.0004718 +2024-11-11 22:08:47,359 Epoch 1850/2000 +2024-11-11 22:09:02,864 Current Learning Rate: 0.0014644661 +2024-11-11 22:09:02,865 Train Loss: 0.0003540, Val Loss: 0.0004720 +2024-11-11 22:09:02,865 Epoch 1851/2000 +2024-11-11 22:09:18,445 Current Learning Rate: 0.0015204360 +2024-11-11 22:09:18,446 Train Loss: 0.0003742, Val Loss: 0.0004732 +2024-11-11 22:09:18,446 Epoch 1852/2000 +2024-11-11 22:09:33,902 Current Learning Rate: 0.0015772645 +2024-11-11 22:09:33,903 Train Loss: 0.0003416, Val Loss: 0.0004722 +2024-11-11 22:09:33,903 Epoch 1853/2000 +2024-11-11 22:09:50,443 Current Learning Rate: 0.0016349374 +2024-11-11 22:09:50,444 Train Loss: 0.0003718, Val Loss: 0.0004713 +2024-11-11 22:09:50,444 Epoch 1854/2000 +2024-11-11 22:10:05,969 Current Learning Rate: 0.0016934407 +2024-11-11 22:10:05,970 Train Loss: 0.0003528, Val Loss: 0.0004708 +2024-11-11 22:10:05,970 Epoch 1855/2000 +2024-11-11 22:10:21,652 Current Learning Rate: 0.0017527598 +2024-11-11 22:10:21,653 Train Loss: 0.0004123, Val Loss: 0.0004723 +2024-11-11 22:10:21,653 Epoch 1856/2000 +2024-11-11 22:10:38,704 Current Learning Rate: 0.0018128801 +2024-11-11 22:10:38,705 Train Loss: 0.0003573, Val Loss: 0.0004761 +2024-11-11 22:10:38,705 Epoch 1857/2000 +2024-11-11 22:10:54,696 Current Learning Rate: 0.0018737867 +2024-11-11 22:10:54,698 Train Loss: 0.0003185, Val Loss: 0.0004699 +2024-11-11 22:10:54,699 Epoch 1858/2000 +2024-11-11 22:11:11,148 Current Learning Rate: 0.0019354647 +2024-11-11 22:11:11,972 Train Loss: 0.0003121, Val Loss: 0.0004688 +2024-11-11 22:11:11,973 Epoch 1859/2000 +2024-11-11 22:11:27,466 Current Learning Rate: 0.0019978989 +2024-11-11 22:11:27,467 Train Loss: 0.0003598, Val Loss: 0.0004757 +2024-11-11 22:11:27,468 Epoch 1860/2000 +2024-11-11 22:11:43,767 Current Learning Rate: 0.0020610737 +2024-11-11 22:11:43,768 Train Loss: 0.0003094, Val Loss: 0.0004693 +2024-11-11 22:11:43,768 Epoch 1861/2000 +2024-11-11 22:11:59,732 Current Learning Rate: 0.0021249737 +2024-11-11 22:11:59,732 Train Loss: 0.0004457, Val Loss: 0.0005119 +2024-11-11 22:11:59,732 Epoch 1862/2000 +2024-11-11 22:12:15,595 Current Learning Rate: 0.0021895831 +2024-11-11 22:12:15,596 Train Loss: 0.0003953, Val Loss: 0.0004799 +2024-11-11 22:12:15,596 Epoch 1863/2000 +2024-11-11 22:12:32,638 Current Learning Rate: 0.0022548859 +2024-11-11 22:12:32,639 Train Loss: 0.0003458, Val Loss: 0.0004753 +2024-11-11 22:12:32,639 Epoch 1864/2000 +2024-11-11 22:12:48,851 Current Learning Rate: 0.0023208660 +2024-11-11 22:12:48,852 Train Loss: 0.0003620, Val Loss: 0.0004777 +2024-11-11 22:12:48,852 Epoch 1865/2000 +2024-11-11 22:13:04,936 Current Learning Rate: 0.0023875072 +2024-11-11 22:13:04,936 Train Loss: 0.0003907, Val Loss: 0.0004786 +2024-11-11 22:13:04,937 Epoch 1866/2000 +2024-11-11 22:13:22,147 Current Learning Rate: 0.0024547929 +2024-11-11 22:13:22,147 Train Loss: 0.0003682, Val Loss: 0.0004740 +2024-11-11 22:13:22,148 Epoch 1867/2000 +2024-11-11 22:13:37,714 Current Learning Rate: 0.0025227067 +2024-11-11 22:13:37,715 Train Loss: 0.0004295, Val Loss: 0.0004764 +2024-11-11 22:13:37,715 Epoch 1868/2000 +2024-11-11 22:13:54,690 Current Learning Rate: 0.0025912316 +2024-11-11 22:13:54,691 Train Loss: 0.0003818, Val Loss: 0.0004751 +2024-11-11 22:13:54,691 Epoch 1869/2000 +2024-11-11 22:14:11,115 Current Learning Rate: 0.0026603509 +2024-11-11 22:14:11,115 Train Loss: 0.0003718, Val Loss: 0.0004788 +2024-11-11 22:14:11,115 Epoch 1870/2000 +2024-11-11 22:14:27,353 Current Learning Rate: 0.0027300475 +2024-11-11 22:14:27,354 Train Loss: 0.0003444, Val Loss: 0.0004801 +2024-11-11 22:14:27,354 Epoch 1871/2000 +2024-11-11 22:14:43,008 Current Learning Rate: 0.0028003042 +2024-11-11 22:14:43,009 Train Loss: 0.0004120, Val Loss: 0.0004983 +2024-11-11 22:14:43,009 Epoch 1872/2000 +2024-11-11 22:14:59,398 Current Learning Rate: 0.0028711035 +2024-11-11 22:14:59,399 Train Loss: 0.0003376, Val Loss: 0.0004895 +2024-11-11 22:14:59,399 Epoch 1873/2000 +2024-11-11 22:15:15,467 Current Learning Rate: 0.0029424282 +2024-11-11 22:15:15,468 Train Loss: 0.0004411, Val Loss: 0.0005521 +2024-11-11 22:15:15,468 Epoch 1874/2000 +2024-11-11 22:15:31,189 Current Learning Rate: 0.0030142605 +2024-11-11 22:15:31,190 Train Loss: 0.0003611, Val Loss: 0.0004960 +2024-11-11 22:15:31,190 Epoch 1875/2000 +2024-11-11 22:15:47,235 Current Learning Rate: 0.0030865828 +2024-11-11 22:15:47,236 Train Loss: 0.0003937, Val Loss: 0.0004952 +2024-11-11 22:15:47,236 Epoch 1876/2000 +2024-11-11 22:16:03,671 Current Learning Rate: 0.0031593772 +2024-11-11 22:16:03,672 Train Loss: 0.0004007, Val Loss: 0.0005017 +2024-11-11 22:16:03,672 Epoch 1877/2000 +2024-11-11 22:16:19,734 Current Learning Rate: 0.0032326258 +2024-11-11 22:16:19,735 Train Loss: 0.0003837, Val Loss: 0.0005004 +2024-11-11 22:16:19,736 Epoch 1878/2000 +2024-11-11 22:16:35,863 Current Learning Rate: 0.0033063104 +2024-11-11 22:16:35,864 Train Loss: 0.0003563, Val Loss: 0.0004938 +2024-11-11 22:16:35,864 Epoch 1879/2000 +2024-11-11 22:16:51,733 Current Learning Rate: 0.0033804129 +2024-11-11 22:16:51,734 Train Loss: 0.0003751, Val Loss: 0.0004927 +2024-11-11 22:16:51,734 Epoch 1880/2000 +2024-11-11 22:17:07,406 Current Learning Rate: 0.0034549150 +2024-11-11 22:17:07,406 Train Loss: 0.0003335, Val Loss: 0.0005016 +2024-11-11 22:17:07,406 Epoch 1881/2000 +2024-11-11 22:17:23,536 Current Learning Rate: 0.0035297984 +2024-11-11 22:17:23,537 Train Loss: 0.0003847, Val Loss: 0.0004856 +2024-11-11 22:17:23,538 Epoch 1882/2000 +2024-11-11 22:17:40,039 Current Learning Rate: 0.0036050445 +2024-11-11 22:17:40,040 Train Loss: 0.0003872, Val Loss: 0.0005008 +2024-11-11 22:17:40,040 Epoch 1883/2000 +2024-11-11 22:17:56,607 Current Learning Rate: 0.0036806348 +2024-11-11 22:17:56,608 Train Loss: 0.0004442, Val Loss: 0.0004979 +2024-11-11 22:17:56,608 Epoch 1884/2000 +2024-11-11 22:18:12,168 Current Learning Rate: 0.0037565506 +2024-11-11 22:18:12,170 Train Loss: 0.0004071, Val Loss: 0.0004930 +2024-11-11 22:18:12,170 Epoch 1885/2000 +2024-11-11 22:18:28,927 Current Learning Rate: 0.0038327732 +2024-11-11 22:18:28,927 Train Loss: 0.0003319, Val Loss: 0.0004986 +2024-11-11 22:18:28,928 Epoch 1886/2000 +2024-11-11 22:18:44,511 Current Learning Rate: 0.0039092838 +2024-11-11 22:18:44,511 Train Loss: 0.0003312, Val Loss: 0.0005034 +2024-11-11 22:18:44,511 Epoch 1887/2000 +2024-11-11 22:19:00,428 Current Learning Rate: 0.0039860635 +2024-11-11 22:19:00,429 Train Loss: 0.0004128, Val Loss: 0.0005134 +2024-11-11 22:19:00,429 Epoch 1888/2000 +2024-11-11 22:19:16,218 Current Learning Rate: 0.0040630934 +2024-11-11 22:19:16,218 Train Loss: 0.0003791, Val Loss: 0.0005249 +2024-11-11 22:19:16,219 Epoch 1889/2000 +2024-11-11 22:19:32,392 Current Learning Rate: 0.0041403545 +2024-11-11 22:19:32,393 Train Loss: 0.0003771, Val Loss: 0.0005103 +2024-11-11 22:19:32,393 Epoch 1890/2000 +2024-11-11 22:19:48,686 Current Learning Rate: 0.0042178277 +2024-11-11 22:19:48,687 Train Loss: 0.0003920, Val Loss: 0.0005140 +2024-11-11 22:19:48,687 Epoch 1891/2000 +2024-11-11 22:20:05,551 Current Learning Rate: 0.0042954938 +2024-11-11 22:20:05,552 Train Loss: 0.0004391, Val Loss: 0.0005231 +2024-11-11 22:20:05,552 Epoch 1892/2000 +2024-11-11 22:20:21,539 Current Learning Rate: 0.0043733338 +2024-11-11 22:20:21,540 Train Loss: 0.0003930, Val Loss: 0.0005018 +2024-11-11 22:20:21,540 Epoch 1893/2000 +2024-11-11 22:20:38,421 Current Learning Rate: 0.0044513284 +2024-11-11 22:20:38,421 Train Loss: 0.0003513, Val Loss: 0.0004932 +2024-11-11 22:20:38,421 Epoch 1894/2000 +2024-11-11 22:20:55,353 Current Learning Rate: 0.0045294584 +2024-11-11 22:20:55,354 Train Loss: 0.0004034, Val Loss: 0.0005197 +2024-11-11 22:20:55,355 Epoch 1895/2000 +2024-11-11 22:21:11,844 Current Learning Rate: 0.0046077045 +2024-11-11 22:21:11,844 Train Loss: 0.0004092, Val Loss: 0.0005259 +2024-11-11 22:21:11,845 Epoch 1896/2000 +2024-11-11 22:21:28,285 Current Learning Rate: 0.0046860474 +2024-11-11 22:21:28,286 Train Loss: 0.0003873, Val Loss: 0.0005355 +2024-11-11 22:21:28,305 Epoch 1897/2000 +2024-11-11 22:21:44,025 Current Learning Rate: 0.0047644677 +2024-11-11 22:21:44,025 Train Loss: 0.0005417, Val Loss: 0.0005593 +2024-11-11 22:21:44,026 Epoch 1898/2000 +2024-11-11 22:22:00,455 Current Learning Rate: 0.0048429462 +2024-11-11 22:22:00,456 Train Loss: 0.0004578, Val Loss: 0.0005557 +2024-11-11 22:22:00,456 Epoch 1899/2000 +2024-11-11 22:22:16,620 Current Learning Rate: 0.0049214634 +2024-11-11 22:22:16,621 Train Loss: 0.0004109, Val Loss: 0.0005379 +2024-11-11 22:22:16,621 Epoch 1900/2000 +2024-11-11 22:22:32,720 Current Learning Rate: 0.0050000000 +2024-11-11 22:22:32,721 Train Loss: 0.0004444, Val Loss: 0.0005574 +2024-11-11 22:22:32,721 Epoch 1901/2000 +2024-11-11 22:22:48,745 Current Learning Rate: 0.0050785366 +2024-11-11 22:22:48,745 Train Loss: 0.0004269, Val Loss: 0.0005297 +2024-11-11 22:22:48,747 Epoch 1902/2000 +2024-11-11 22:23:05,199 Current Learning Rate: 0.0051570538 +2024-11-11 22:23:05,199 Train Loss: 0.0003823, Val Loss: 0.0005118 +2024-11-11 22:23:05,199 Epoch 1903/2000 +2024-11-11 22:23:21,125 Current Learning Rate: 0.0052355323 +2024-11-11 22:23:21,125 Train Loss: 0.0004586, Val Loss: 0.0005273 +2024-11-11 22:23:21,125 Epoch 1904/2000 +2024-11-11 22:23:38,268 Current Learning Rate: 0.0053139526 +2024-11-11 22:23:38,268 Train Loss: 0.0004171, Val Loss: 0.0005362 +2024-11-11 22:23:38,269 Epoch 1905/2000 +2024-11-11 22:23:55,405 Current Learning Rate: 0.0053922955 +2024-11-11 22:23:55,406 Train Loss: 0.0004146, Val Loss: 0.0005299 +2024-11-11 22:23:55,406 Epoch 1906/2000 +2024-11-11 22:24:11,620 Current Learning Rate: 0.0054705416 +2024-11-11 22:24:11,621 Train Loss: 0.0003814, Val Loss: 0.0005246 +2024-11-11 22:24:11,621 Epoch 1907/2000 +2024-11-11 22:24:28,148 Current Learning Rate: 0.0055486716 +2024-11-11 22:24:28,149 Train Loss: 0.0003620, Val Loss: 0.0005353 +2024-11-11 22:24:28,149 Epoch 1908/2000 +2024-11-11 22:24:44,726 Current Learning Rate: 0.0056266662 +2024-11-11 22:24:44,726 Train Loss: 0.0003887, Val Loss: 0.0005592 +2024-11-11 22:24:44,727 Epoch 1909/2000 +2024-11-11 22:25:00,627 Current Learning Rate: 0.0057045062 +2024-11-11 22:25:00,628 Train Loss: 0.0004154, Val Loss: 0.0005705 +2024-11-11 22:25:00,628 Epoch 1910/2000 +2024-11-11 22:25:16,816 Current Learning Rate: 0.0057821723 +2024-11-11 22:25:16,817 Train Loss: 0.0004081, Val Loss: 0.0005572 +2024-11-11 22:25:16,817 Epoch 1911/2000 +2024-11-11 22:25:32,575 Current Learning Rate: 0.0058596455 +2024-11-11 22:25:32,576 Train Loss: 0.0005201, Val Loss: 0.0006403 +2024-11-11 22:25:32,577 Epoch 1912/2000 +2024-11-11 22:25:48,941 Current Learning Rate: 0.0059369066 +2024-11-11 22:25:48,942 Train Loss: 0.0005510, Val Loss: 0.0005839 +2024-11-11 22:25:48,942 Epoch 1913/2000 +2024-11-11 22:26:04,790 Current Learning Rate: 0.0060139365 +2024-11-11 22:26:04,791 Train Loss: 0.0004719, Val Loss: 0.0005980 +2024-11-11 22:26:04,791 Epoch 1914/2000 +2024-11-11 22:26:21,203 Current Learning Rate: 0.0060907162 +2024-11-11 22:26:21,204 Train Loss: 0.0004550, Val Loss: 0.0005598 +2024-11-11 22:26:21,204 Epoch 1915/2000 +2024-11-11 22:26:36,281 Current Learning Rate: 0.0061672268 +2024-11-11 22:26:36,281 Train Loss: 0.0004500, Val Loss: 0.0005986 +2024-11-11 22:26:36,282 Epoch 1916/2000 +2024-11-11 22:26:51,823 Current Learning Rate: 0.0062434494 +2024-11-11 22:26:51,824 Train Loss: 0.0004075, Val Loss: 0.0005628 +2024-11-11 22:26:51,824 Epoch 1917/2000 +2024-11-11 22:27:07,607 Current Learning Rate: 0.0063193652 +2024-11-11 22:27:07,608 Train Loss: 0.0004483, Val Loss: 0.0005511 +2024-11-11 22:27:07,608 Epoch 1918/2000 +2024-11-11 22:27:24,771 Current Learning Rate: 0.0063949555 +2024-11-11 22:27:24,772 Train Loss: 0.0004663, Val Loss: 0.0005478 +2024-11-11 22:27:24,772 Epoch 1919/2000 +2024-11-11 22:27:39,734 Current Learning Rate: 0.0064702016 +2024-11-11 22:27:39,735 Train Loss: 0.0003983, Val Loss: 0.0005294 +2024-11-11 22:27:39,735 Epoch 1920/2000 +2024-11-11 22:27:55,598 Current Learning Rate: 0.0065450850 +2024-11-11 22:27:55,598 Train Loss: 0.0004561, Val Loss: 0.0005404 +2024-11-11 22:27:55,599 Epoch 1921/2000 +2024-11-11 22:28:11,431 Current Learning Rate: 0.0066195871 +2024-11-11 22:28:11,432 Train Loss: 0.0003968, Val Loss: 0.0005200 +2024-11-11 22:28:11,432 Epoch 1922/2000 +2024-11-11 22:28:27,754 Current Learning Rate: 0.0066936896 +2024-11-11 22:28:27,754 Train Loss: 0.0003605, Val Loss: 0.0005241 +2024-11-11 22:28:27,755 Epoch 1923/2000 +2024-11-11 22:28:44,247 Current Learning Rate: 0.0067673742 +2024-11-11 22:28:44,247 Train Loss: 0.0004936, Val Loss: 0.0005612 +2024-11-11 22:28:44,248 Epoch 1924/2000 +2024-11-11 22:29:00,145 Current Learning Rate: 0.0068406228 +2024-11-11 22:29:00,146 Train Loss: 0.0004738, Val Loss: 0.0005509 +2024-11-11 22:29:00,146 Epoch 1925/2000 +2024-11-11 22:29:15,838 Current Learning Rate: 0.0069134172 +2024-11-11 22:29:15,839 Train Loss: 0.0004473, Val Loss: 0.0005633 +2024-11-11 22:29:15,839 Epoch 1926/2000 +2024-11-11 22:29:31,757 Current Learning Rate: 0.0069857395 +2024-11-11 22:29:31,757 Train Loss: 0.0005060, Val Loss: 0.0005654 +2024-11-11 22:29:31,757 Epoch 1927/2000 +2024-11-11 22:29:47,388 Current Learning Rate: 0.0070575718 +2024-11-11 22:29:47,389 Train Loss: 0.0003975, Val Loss: 0.0005398 +2024-11-11 22:29:47,389 Epoch 1928/2000 +2024-11-11 22:30:03,786 Current Learning Rate: 0.0071288965 +2024-11-11 22:30:03,787 Train Loss: 0.0004850, Val Loss: 0.0005724 +2024-11-11 22:30:03,787 Epoch 1929/2000 +2024-11-11 22:30:19,730 Current Learning Rate: 0.0071996958 +2024-11-11 22:30:19,731 Train Loss: 0.0004567, Val Loss: 0.0005773 +2024-11-11 22:30:19,731 Epoch 1930/2000 +2024-11-11 22:30:35,592 Current Learning Rate: 0.0072699525 +2024-11-11 22:30:35,592 Train Loss: 0.0004131, Val Loss: 0.0005631 +2024-11-11 22:30:35,592 Epoch 1931/2000 +2024-11-11 22:30:51,100 Current Learning Rate: 0.0073396491 +2024-11-11 22:30:51,101 Train Loss: 0.0004963, Val Loss: 0.0005700 +2024-11-11 22:30:51,101 Epoch 1932/2000 +2024-11-11 22:31:06,570 Current Learning Rate: 0.0074087684 +2024-11-11 22:31:06,571 Train Loss: 0.0004965, Val Loss: 0.0006470 +2024-11-11 22:31:06,571 Epoch 1933/2000 +2024-11-11 22:31:22,792 Current Learning Rate: 0.0074772933 +2024-11-11 22:31:22,792 Train Loss: 0.0005311, Val Loss: 0.0006102 +2024-11-11 22:31:22,793 Epoch 1934/2000 +2024-11-11 22:31:38,520 Current Learning Rate: 0.0075452071 +2024-11-11 22:31:38,520 Train Loss: 0.0004443, Val Loss: 0.0005798 +2024-11-11 22:31:38,521 Epoch 1935/2000 +2024-11-11 22:31:53,616 Current Learning Rate: 0.0076124928 +2024-11-11 22:31:53,616 Train Loss: 0.0004478, Val Loss: 0.0005784 +2024-11-11 22:31:53,617 Epoch 1936/2000 +2024-11-11 22:32:09,026 Current Learning Rate: 0.0076791340 +2024-11-11 22:32:09,026 Train Loss: 0.0004659, Val Loss: 0.0006153 +2024-11-11 22:32:09,027 Epoch 1937/2000 +2024-11-11 22:32:24,609 Current Learning Rate: 0.0077451141 +2024-11-11 22:32:24,610 Train Loss: 0.0004430, Val Loss: 0.0006007 +2024-11-11 22:32:24,610 Epoch 1938/2000 +2024-11-11 22:32:40,230 Current Learning Rate: 0.0078104169 +2024-11-11 22:32:40,231 Train Loss: 0.0005379, Val Loss: 0.0006273 +2024-11-11 22:32:40,232 Epoch 1939/2000 +2024-11-11 22:32:55,734 Current Learning Rate: 0.0078750263 +2024-11-11 22:32:55,734 Train Loss: 0.0005183, Val Loss: 0.0006318 +2024-11-11 22:32:55,734 Epoch 1940/2000 +2024-11-11 22:33:11,066 Current Learning Rate: 0.0079389263 +2024-11-11 22:33:11,066 Train Loss: 0.0005867, Val Loss: 0.0006767 +2024-11-11 22:33:11,066 Epoch 1941/2000 +2024-11-11 22:33:26,922 Current Learning Rate: 0.0080021011 +2024-11-11 22:33:26,923 Train Loss: 0.0004844, Val Loss: 0.0006373 +2024-11-11 22:33:26,924 Epoch 1942/2000 +2024-11-11 22:33:42,301 Current Learning Rate: 0.0080645353 +2024-11-11 22:33:42,302 Train Loss: 0.0005559, Val Loss: 0.0005938 +2024-11-11 22:33:42,302 Epoch 1943/2000 +2024-11-11 22:33:57,589 Current Learning Rate: 0.0081262133 +2024-11-11 22:33:57,589 Train Loss: 0.0005101, Val Loss: 0.0006782 +2024-11-11 22:33:57,590 Epoch 1944/2000 +2024-11-11 22:34:13,075 Current Learning Rate: 0.0081871199 +2024-11-11 22:34:13,076 Train Loss: 0.0005106, Val Loss: 0.0005803 +2024-11-11 22:34:13,076 Epoch 1945/2000 +2024-11-11 22:34:29,641 Current Learning Rate: 0.0082472402 +2024-11-11 22:34:29,642 Train Loss: 0.0004256, Val Loss: 0.0005619 +2024-11-11 22:34:29,642 Epoch 1946/2000 +2024-11-11 22:34:46,539 Current Learning Rate: 0.0083065593 +2024-11-11 22:34:46,539 Train Loss: 0.0005152, Val Loss: 0.0005588 +2024-11-11 22:34:46,539 Epoch 1947/2000 +2024-11-11 22:35:03,225 Current Learning Rate: 0.0083650626 +2024-11-11 22:35:03,226 Train Loss: 0.0003819, Val Loss: 0.0005585 +2024-11-11 22:35:03,227 Epoch 1948/2000 +2024-11-11 22:35:19,521 Current Learning Rate: 0.0084227355 +2024-11-11 22:35:19,522 Train Loss: 0.0004611, Val Loss: 0.0005604 +2024-11-11 22:35:19,522 Epoch 1949/2000 +2024-11-11 22:35:35,988 Current Learning Rate: 0.0084795640 +2024-11-11 22:35:35,989 Train Loss: 0.0005769, Val Loss: 0.0006435 +2024-11-11 22:35:35,989 Epoch 1950/2000 +2024-11-11 22:35:51,984 Current Learning Rate: 0.0085355339 +2024-11-11 22:35:51,985 Train Loss: 0.0004643, Val Loss: 0.0005785 +2024-11-11 22:35:51,985 Epoch 1951/2000 +2024-11-11 22:36:07,018 Current Learning Rate: 0.0085906315 +2024-11-11 22:36:07,019 Train Loss: 0.0004961, Val Loss: 0.0005766 +2024-11-11 22:36:07,019 Epoch 1952/2000 +2024-11-11 22:36:23,026 Current Learning Rate: 0.0086448431 +2024-11-11 22:36:23,026 Train Loss: 0.0005167, Val Loss: 0.0005771 +2024-11-11 22:36:23,026 Epoch 1953/2000 +2024-11-11 22:36:38,473 Current Learning Rate: 0.0086981555 +2024-11-11 22:36:38,473 Train Loss: 0.0004278, Val Loss: 0.0005576 +2024-11-11 22:36:38,474 Epoch 1954/2000 +2024-11-11 22:36:54,301 Current Learning Rate: 0.0087505553 +2024-11-11 22:36:54,305 Train Loss: 0.0004320, Val Loss: 0.0005682 +2024-11-11 22:36:54,306 Epoch 1955/2000 +2024-11-11 22:37:10,387 Current Learning Rate: 0.0088020298 +2024-11-11 22:37:10,387 Train Loss: 0.0003972, Val Loss: 0.0005400 +2024-11-11 22:37:10,387 Epoch 1956/2000 +2024-11-11 22:37:26,164 Current Learning Rate: 0.0088525662 +2024-11-11 22:37:26,165 Train Loss: 0.0005276, Val Loss: 0.0005955 +2024-11-11 22:37:26,165 Epoch 1957/2000 +2024-11-11 22:37:42,188 Current Learning Rate: 0.0089021520 +2024-11-11 22:37:42,189 Train Loss: 0.0004871, Val Loss: 0.0005588 +2024-11-11 22:37:42,189 Epoch 1958/2000 +2024-11-11 22:37:57,877 Current Learning Rate: 0.0089507751 +2024-11-11 22:37:57,878 Train Loss: 0.0004784, Val Loss: 0.0006114 +2024-11-11 22:37:57,878 Epoch 1959/2000 +2024-11-11 22:38:14,224 Current Learning Rate: 0.0089984233 +2024-11-11 22:38:14,225 Train Loss: 0.0004367, Val Loss: 0.0005510 +2024-11-11 22:38:14,225 Epoch 1960/2000 +2024-11-11 22:38:30,170 Current Learning Rate: 0.0090450850 +2024-11-11 22:38:30,170 Train Loss: 0.0005190, Val Loss: 0.0005609 +2024-11-11 22:38:30,171 Epoch 1961/2000 +2024-11-11 22:38:46,174 Current Learning Rate: 0.0090907486 +2024-11-11 22:38:46,174 Train Loss: 0.0004511, Val Loss: 0.0006288 +2024-11-11 22:38:46,175 Epoch 1962/2000 +2024-11-11 22:39:02,107 Current Learning Rate: 0.0091354029 +2024-11-11 22:39:02,108 Train Loss: 0.0004677, Val Loss: 0.0005959 +2024-11-11 22:39:02,109 Epoch 1963/2000 +2024-11-11 22:39:18,754 Current Learning Rate: 0.0091790368 +2024-11-11 22:39:18,755 Train Loss: 0.0005031, Val Loss: 0.0006509 +2024-11-11 22:39:18,755 Epoch 1964/2000 +2024-11-11 22:39:35,031 Current Learning Rate: 0.0092216396 +2024-11-11 22:39:35,031 Train Loss: 0.0005291, Val Loss: 0.0006005 +2024-11-11 22:39:35,031 Epoch 1965/2000 +2024-11-11 22:39:50,174 Current Learning Rate: 0.0092632008 +2024-11-11 22:39:50,175 Train Loss: 0.0004980, Val Loss: 0.0006615 +2024-11-11 22:39:50,175 Epoch 1966/2000 +2024-11-11 22:40:05,947 Current Learning Rate: 0.0093037101 +2024-11-11 22:40:05,947 Train Loss: 0.0005937, Val Loss: 0.0006493 +2024-11-11 22:40:05,948 Epoch 1967/2000 +2024-11-11 22:40:21,471 Current Learning Rate: 0.0093431576 +2024-11-11 22:40:21,472 Train Loss: 0.0004971, Val Loss: 0.0005757 +2024-11-11 22:40:21,472 Epoch 1968/2000 +2024-11-11 22:40:37,415 Current Learning Rate: 0.0093815334 +2024-11-11 22:40:37,416 Train Loss: 0.0004579, Val Loss: 0.0005759 +2024-11-11 22:40:37,416 Epoch 1969/2000 +2024-11-11 22:40:54,488 Current Learning Rate: 0.0094188282 +2024-11-11 22:40:54,489 Train Loss: 0.0005135, Val Loss: 0.0005550 +2024-11-11 22:40:54,489 Epoch 1970/2000 +2024-11-11 22:41:10,743 Current Learning Rate: 0.0094550326 +2024-11-11 22:41:10,743 Train Loss: 0.0004032, Val Loss: 0.0005354 +2024-11-11 22:41:10,744 Epoch 1971/2000 +2024-11-11 22:41:27,207 Current Learning Rate: 0.0094901379 +2024-11-11 22:41:27,209 Train Loss: 0.0004060, Val Loss: 0.0005371 +2024-11-11 22:41:27,209 Epoch 1972/2000 +2024-11-11 22:41:43,846 Current Learning Rate: 0.0095241353 +2024-11-11 22:41:43,847 Train Loss: 0.0004552, Val Loss: 0.0005439 +2024-11-11 22:41:43,848 Epoch 1973/2000 +2024-11-11 22:41:59,302 Current Learning Rate: 0.0095570164 +2024-11-11 22:41:59,302 Train Loss: 0.0004786, Val Loss: 0.0006519 +2024-11-11 22:41:59,303 Epoch 1974/2000 +2024-11-11 22:42:16,111 Current Learning Rate: 0.0095887731 +2024-11-11 22:42:16,112 Train Loss: 0.0005079, Val Loss: 0.0005804 +2024-11-11 22:42:16,112 Epoch 1975/2000 +2024-11-11 22:42:32,181 Current Learning Rate: 0.0096193977 +2024-11-11 22:42:32,182 Train Loss: 0.0004874, Val Loss: 0.0006093 +2024-11-11 22:42:32,182 Epoch 1976/2000 +2024-11-11 22:42:49,302 Current Learning Rate: 0.0096488824 +2024-11-11 22:42:49,303 Train Loss: 0.0004719, Val Loss: 0.0005910 +2024-11-11 22:42:49,304 Epoch 1977/2000 +2024-11-11 22:43:06,203 Current Learning Rate: 0.0096772202 +2024-11-11 22:43:06,203 Train Loss: 0.0005030, Val Loss: 0.0006389 +2024-11-11 22:43:06,204 Epoch 1978/2000 +2024-11-11 22:43:21,586 Current Learning Rate: 0.0097044038 +2024-11-11 22:43:21,587 Train Loss: 0.0005297, Val Loss: 0.0006394 +2024-11-11 22:43:21,587 Epoch 1979/2000 +2024-11-11 22:43:37,738 Current Learning Rate: 0.0097304268 +2024-11-11 22:43:37,739 Train Loss: 0.0005388, Val Loss: 0.0006585 +2024-11-11 22:43:37,739 Epoch 1980/2000 +2024-11-11 22:43:53,676 Current Learning Rate: 0.0097552826 +2024-11-11 22:43:53,676 Train Loss: 0.0005008, Val Loss: 0.0006466 +2024-11-11 22:43:53,677 Epoch 1981/2000 +2024-11-11 22:44:09,510 Current Learning Rate: 0.0097789651 +2024-11-11 22:44:09,510 Train Loss: 0.0005345, Val Loss: 0.0006037 +2024-11-11 22:44:09,510 Epoch 1982/2000 +2024-11-11 22:44:25,307 Current Learning Rate: 0.0098014684 +2024-11-11 22:44:25,308 Train Loss: 0.0005008, Val Loss: 0.0006197 +2024-11-11 22:44:25,308 Epoch 1983/2000 +2024-11-11 22:44:41,161 Current Learning Rate: 0.0098227871 +2024-11-11 22:44:41,162 Train Loss: 0.0005541, Val Loss: 0.0005858 +2024-11-11 22:44:41,162 Epoch 1984/2000 +2024-11-11 22:44:56,974 Current Learning Rate: 0.0098429158 +2024-11-11 22:44:56,974 Train Loss: 0.0004540, Val Loss: 0.0005872 +2024-11-11 22:44:56,975 Epoch 1985/2000 +2024-11-11 22:45:12,528 Current Learning Rate: 0.0098618496 +2024-11-11 22:45:12,528 Train Loss: 0.0005192, Val Loss: 0.0006465 +2024-11-11 22:45:12,529 Epoch 1986/2000 +2024-11-11 22:45:28,548 Current Learning Rate: 0.0098795838 +2024-11-11 22:45:28,548 Train Loss: 0.0005380, Val Loss: 0.0006143 +2024-11-11 22:45:28,549 Epoch 1987/2000 +2024-11-11 22:45:45,004 Current Learning Rate: 0.0098961141 +2024-11-11 22:45:45,005 Train Loss: 0.0004710, Val Loss: 0.0005924 +2024-11-11 22:45:45,005 Epoch 1988/2000 +2024-11-11 22:46:00,953 Current Learning Rate: 0.0099114363 +2024-11-11 22:46:00,954 Train Loss: 0.0005370, Val Loss: 0.0006376 +2024-11-11 22:46:00,954 Epoch 1989/2000 +2024-11-11 22:46:16,911 Current Learning Rate: 0.0099255466 +2024-11-11 22:46:16,911 Train Loss: 0.0004813, Val Loss: 0.0006095 +2024-11-11 22:46:16,911 Epoch 1990/2000 +2024-11-11 22:46:32,954 Current Learning Rate: 0.0099384417 +2024-11-11 22:46:32,955 Train Loss: 0.0005072, Val Loss: 0.0006012 +2024-11-11 22:46:32,955 Epoch 1991/2000 +2024-11-11 22:46:48,743 Current Learning Rate: 0.0099501183 +2024-11-11 22:46:48,744 Train Loss: 0.0004263, Val Loss: 0.0006099 +2024-11-11 22:46:48,744 Epoch 1992/2000 +2024-11-11 22:47:04,812 Current Learning Rate: 0.0099605735 +2024-11-11 22:47:04,812 Train Loss: 0.0004198, Val Loss: 0.0006060 +2024-11-11 22:47:04,813 Epoch 1993/2000 +2024-11-11 22:47:21,632 Current Learning Rate: 0.0099698048 +2024-11-11 22:47:21,633 Train Loss: 0.0005051, Val Loss: 0.0005728 +2024-11-11 22:47:21,633 Epoch 1994/2000 +2024-11-11 22:47:37,650 Current Learning Rate: 0.0099778098 +2024-11-11 22:47:37,650 Train Loss: 0.0004463, Val Loss: 0.0005757 +2024-11-11 22:47:37,651 Epoch 1995/2000 +2024-11-11 22:47:53,167 Current Learning Rate: 0.0099845867 +2024-11-11 22:47:53,167 Train Loss: 0.0004818, Val Loss: 0.0005728 +2024-11-11 22:47:53,168 Epoch 1996/2000 +2024-11-11 22:48:09,423 Current Learning Rate: 0.0099901336 +2024-11-11 22:48:09,423 Train Loss: 0.0004305, Val Loss: 0.0005917 +2024-11-11 22:48:09,424 Epoch 1997/2000 +2024-11-11 22:48:26,012 Current Learning Rate: 0.0099944494 +2024-11-11 22:48:26,013 Train Loss: 0.0004970, Val Loss: 0.0005895 +2024-11-11 22:48:26,013 Epoch 1998/2000 +2024-11-11 22:48:41,468 Current Learning Rate: 0.0099975328 +2024-11-11 22:48:41,468 Train Loss: 0.0004721, Val Loss: 0.0005880 +2024-11-11 22:48:41,469 Epoch 1999/2000 +2024-11-11 22:48:57,740 Current Learning Rate: 0.0099993832 +2024-11-11 22:48:57,742 Train Loss: 0.0004842, Val Loss: 0.0006773 +2024-11-11 22:48:57,743 Epoch 2000/2000 +2024-11-11 22:49:13,391 Current Learning Rate: 0.0100000000 +2024-11-11 22:49:13,391 Train Loss: 0.0005090, Val Loss: 0.0006428 +2024-11-11 22:49:16,988 Testing completed and best model saved. +2025-02-17 17:19:16,665 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-17 17:19:36,860 Loading best model from checkpoint. +2025-02-17 17:19:54,119 Testing completed and best model saved. +2025-02-17 18:21:25,906 Animation.save using +2025-02-17 18:22:38,427 Animation.save using +2025-02-17 18:23:09,626 Animation.save using +2025-02-17 18:39:34,079 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-17 18:39:48,459 Loading best model from checkpoint. +2025-02-17 18:39:54,512 Error loading model checkpoint during testing: The size of tensor a (40) must match the size of tensor b (10) at non-singleton dimension 1 +2025-02-17 18:42:21,244 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-17 18:42:34,481 Loading best model from checkpoint. +2025-02-17 18:43:27,442 Testing completed and best model saved. +2025-02-17 18:47:40,178 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-17 18:47:53,382 Loading best model from checkpoint. +2025-02-17 18:48:46,845 Testing completed and best model saved. +2025-02-17 18:52:52,187 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-17 18:53:05,667 Loading best model from checkpoint. +2025-02-17 18:53:59,552 Testing completed and best model saved. +2025-02-17 20:30:48,677 Loading best model from checkpoint. +2025-02-17 20:30:49,613 Error loading model checkpoint: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 20:31:20,940 Loading best model from checkpoint. +2025-02-17 20:31:21,192 Error loading model checkpoint: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 20:45:29,895 Loading best model from checkpoint. +2025-02-17 20:45:30,122 Error loading model checkpoint: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 20:48:07,523 Loading best model from checkpoint. +2025-02-17 20:48:07,797 Error loading model checkpoint: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 21:06:07,942 Loading best model from checkpoint. +2025-02-17 21:06:08,422 Error loading model checkpoint: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 21:10:17,504 Loading best model from checkpoint. +2025-02-17 21:10:17,753 Error loading model checkpoint: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 21:11:47,434 Loading best model from checkpoint. +2025-02-17 21:11:47,665 Error loading model checkpoint: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 21:14:02,706 Loading best model from checkpoint. +2025-02-17 21:14:02,939 Error loading model checkpoint directly: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 21:14:02,939 Attempting to fix the state_dict by removing "module." prefix. +2025-02-17 21:14:02,944 Model loaded successfully after fixing state_dict. +2025-02-17 21:17:11,275 Loading best model from checkpoint. +2025-02-17 21:17:11,560 Error loading model checkpoint directly: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-17 21:17:11,561 Attempting to fix the state_dict by removing "module." prefix. +2025-02-17 21:17:11,567 Model loaded successfully after fixing state_dict. +2025-02-17 21:34:14,323 Animation.save using +2025-02-17 21:34:27,244 Animation.save using +2025-02-17 21:35:40,218 Animation.save using +2025-02-17 21:39:32,910 Animation.save using +2025-02-17 21:40:20,243 Animation.save using diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_multi_finetune_20250227_exp1_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_multi_finetune_20250227_exp1_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..b8a8c5831d8cbff30be3425eb15f6b3b0a3abd55 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_multi_finetune_20250227_exp1_training_log.log @@ -0,0 +1,12 @@ +2025-02-27 11:54:37,779 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:54:37,830 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:54:37,895 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:54:37,943 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:54:37,960 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:54:37,967 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:54:37,972 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:54:37,974 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:55:54,624 Error loading model checkpoint: Error(s) in loading state_dict for Triton_finetune: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.gamma_1", "temporal_evolution.enc.0.block.gamma_2", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.attn.qkv.weight", "temporal_evolution.enc.0.block.attn.qkv.bias", "temporal_evolution.enc.0.block.attn.proj.weight", "temporal_evolution.enc.0.block.attn.proj.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.gamma_1", "temporal_evolution.enc.7.block.gamma_2", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.attn.qkv.weight", "temporal_evolution.enc.7.block.attn.qkv.bias", "temporal_evolution.enc.7.block.attn.proj.weight", "temporal_evolution.enc.7.block.attn.proj.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-27 11:56:01,581 Epoch 1/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_multi_finetune_20250227_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_multi_finetune_20250227_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..d1d50b66413a5801c8626951e67d19d84487fc27 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Triton_multi_finetune_20250227_training_log.log @@ -0,0 +1,62 @@ +2025-02-27 11:29:27,737 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:29:27,806 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:29:27,856 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:29:27,899 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:29:27,910 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:29:27,921 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:29:27,927 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:29:27,937 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:30:51,766 Epoch 1/2000 +2025-02-27 11:30:55,827 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,827 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,827 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:30:55,828 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:32:44,457 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:32:44,466 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:32:44,572 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:32:44,577 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:32:44,601 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:32:44,634 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:32:44,651 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:32:44,653 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:34:28,036 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:34:28,247 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:34:28,276 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:34:28,291 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:34:28,297 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:34:28,375 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:34:28,378 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:34:28,379 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:35:51,524 Epoch 1/2000 +2025-02-27 11:35:56,067 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,067 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,067 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:35:56,068 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:36:34,354 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-27 11:36:34,418 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-27 11:36:34,502 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-27 11:36:34,508 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-27 11:36:34,514 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-27 11:36:34,551 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-27 11:36:34,563 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 11:36:34,566 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-27 11:38:03,555 Epoch 1/2000 +2025-02-27 11:38:07,539 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,539 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:38:07,540 Reducer buckets have been rebuilt in this iteration. +2025-02-27 11:46:31,113 Current Learning Rate: 0.0000099994 +2025-02-27 11:46:32,241 Train Loss: 0.0180559, Val Loss: 0.0122653 +2025-02-27 11:46:32,241 Epoch 2/2000 diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_U_net_exp2_20250226_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_U_net_exp2_20250226_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..e87f67649cc488822d8e3f9af59af9fc6987d4d3 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_U_net_exp2_20250226_training_log.log @@ -0,0 +1,6033 @@ +2025-02-26 22:56:58,294 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-26 22:56:58,574 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-26 22:56:58,579 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-26 22:56:58,594 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-26 22:56:58,596 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-26 22:56:59,149 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-26 22:56:59,156 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-26 22:56:59,158 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-26 22:58:08,430 Epoch 1/2000 +2025-02-26 22:58:11,692 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,692 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:11,701 Reducer buckets have been rebuilt in this iteration. +2025-02-26 22:58:23,075 Current Learning Rate: 0.0099993832 +2025-02-26 22:58:23,286 Train Loss: 51.6283541, Val Loss: 25.5396109 +2025-02-26 22:58:23,286 Epoch 2/2000 +2025-02-26 22:58:37,420 Current Learning Rate: 0.0099975328 +2025-02-26 22:58:37,626 Train Loss: 1.2227646, Val Loss: 0.0883306 +2025-02-26 22:58:37,626 Epoch 3/2000 +2025-02-26 22:58:52,101 Current Learning Rate: 0.0099944494 +2025-02-26 22:58:52,310 Train Loss: 0.0641188, Val Loss: 0.0479623 +2025-02-26 22:58:52,310 Epoch 4/2000 +2025-02-26 22:59:07,548 Current Learning Rate: 0.0099901336 +2025-02-26 22:59:07,759 Train Loss: 0.0425325, Val Loss: 0.0358310 +2025-02-26 22:59:07,759 Epoch 5/2000 +2025-02-26 22:59:22,517 Current Learning Rate: 0.0099845867 +2025-02-26 22:59:22,721 Train Loss: 0.0333341, Val Loss: 0.0291384 +2025-02-26 22:59:22,721 Epoch 6/2000 +2025-02-26 22:59:38,156 Current Learning Rate: 0.0099778098 +2025-02-26 22:59:38,362 Train Loss: 0.0276849, Val Loss: 0.0247624 +2025-02-26 22:59:38,363 Epoch 7/2000 +2025-02-26 22:59:52,268 Current Learning Rate: 0.0099698048 +2025-02-26 22:59:52,475 Train Loss: 0.0238586, Val Loss: 0.0216011 +2025-02-26 22:59:52,475 Epoch 8/2000 +2025-02-26 23:00:07,243 Current Learning Rate: 0.0099605735 +2025-02-26 23:00:07,453 Train Loss: 0.0210399, Val Loss: 0.0192004 +2025-02-26 23:00:07,454 Epoch 9/2000 +2025-02-26 23:00:21,660 Current Learning Rate: 0.0099501183 +2025-02-26 23:00:21,865 Train Loss: 0.0188313, Val Loss: 0.0173217 +2025-02-26 23:00:21,865 Epoch 10/2000 +2025-02-26 23:00:37,039 Current Learning Rate: 0.0099384417 +2025-02-26 23:00:37,239 Train Loss: 0.0173443, Val Loss: 0.0157970 +2025-02-26 23:00:37,239 Epoch 11/2000 +2025-02-26 23:00:53,106 Current Learning Rate: 0.0099255466 +2025-02-26 23:00:53,298 Train Loss: 0.0156870, Val Loss: 0.0145480 +2025-02-26 23:00:53,299 Epoch 12/2000 +2025-02-26 23:01:08,347 Current Learning Rate: 0.0099114363 +2025-02-26 23:01:08,592 Train Loss: 0.0145054, Val Loss: 0.0135218 +2025-02-26 23:01:08,592 Epoch 13/2000 +2025-02-26 23:01:22,995 Current Learning Rate: 0.0098961141 +2025-02-26 23:01:23,206 Train Loss: 0.0135298, Val Loss: 0.0126529 +2025-02-26 23:01:23,206 Epoch 14/2000 +2025-02-26 23:01:38,196 Current Learning Rate: 0.0098795838 +2025-02-26 23:01:38,395 Train Loss: 0.0126993, Val Loss: 0.0119084 +2025-02-26 23:01:38,395 Epoch 15/2000 +2025-02-26 23:01:52,688 Current Learning Rate: 0.0098618496 +2025-02-26 23:01:52,912 Train Loss: 0.0119911, Val Loss: 0.0112693 +2025-02-26 23:01:52,912 Epoch 16/2000 +2025-02-26 23:02:06,774 Current Learning Rate: 0.0098429158 +2025-02-26 23:02:06,977 Train Loss: 0.0113760, Val Loss: 0.0107182 +2025-02-26 23:02:06,977 Epoch 17/2000 +2025-02-26 23:02:21,716 Current Learning Rate: 0.0098227871 +2025-02-26 23:02:21,983 Train Loss: 0.0108404, Val Loss: 0.0102318 +2025-02-26 23:02:21,983 Epoch 18/2000 +2025-02-26 23:02:35,817 Current Learning Rate: 0.0098014684 +2025-02-26 23:02:36,077 Train Loss: 0.0103701, Val Loss: 0.0098028 +2025-02-26 23:02:36,077 Epoch 19/2000 +2025-02-26 23:02:50,839 Current Learning Rate: 0.0097789651 +2025-02-26 23:02:51,065 Train Loss: 0.0099559, Val Loss: 0.0094205 +2025-02-26 23:02:51,065 Epoch 20/2000 +2025-02-26 23:03:05,711 Current Learning Rate: 0.0097552826 +2025-02-26 23:03:06,020 Train Loss: 0.0095860, Val Loss: 0.0090860 +2025-02-26 23:03:06,020 Epoch 21/2000 +2025-02-26 23:03:21,091 Current Learning Rate: 0.0097304268 +2025-02-26 23:03:21,326 Train Loss: 0.0092569, Val Loss: 0.0087874 +2025-02-26 23:03:21,327 Epoch 22/2000 +2025-02-26 23:03:35,955 Current Learning Rate: 0.0097044038 +2025-02-26 23:03:36,176 Train Loss: 0.0089598, Val Loss: 0.0085110 +2025-02-26 23:03:36,176 Epoch 23/2000 +2025-02-26 23:03:51,274 Current Learning Rate: 0.0096772202 +2025-02-26 23:03:51,497 Train Loss: 0.0086921, Val Loss: 0.0082654 +2025-02-26 23:03:51,497 Epoch 24/2000 +2025-02-26 23:04:07,446 Current Learning Rate: 0.0096488824 +2025-02-26 23:04:07,689 Train Loss: 0.0084496, Val Loss: 0.0080437 +2025-02-26 23:04:07,690 Epoch 25/2000 +2025-02-26 23:04:22,938 Current Learning Rate: 0.0096193977 +2025-02-26 23:04:22,938 Train Loss: 0.0094187, Val Loss: 0.0081274 +2025-02-26 23:04:22,938 Epoch 26/2000 +2025-02-26 23:04:38,681 Current Learning Rate: 0.0095887731 +2025-02-26 23:04:38,941 Train Loss: 0.0080624, Val Loss: 0.0076466 +2025-02-26 23:04:38,942 Epoch 27/2000 +2025-02-26 23:04:53,899 Current Learning Rate: 0.0095570164 +2025-02-26 23:04:54,130 Train Loss: 0.0078389, Val Loss: 0.0075092 +2025-02-26 23:04:54,130 Epoch 28/2000 +2025-02-26 23:05:10,362 Current Learning Rate: 0.0095241353 +2025-02-26 23:05:10,607 Train Loss: 0.0091723, Val Loss: 0.0073110 +2025-02-26 23:05:10,608 Epoch 29/2000 +2025-02-26 23:05:27,134 Current Learning Rate: 0.0094901379 +2025-02-26 23:05:27,359 Train Loss: 0.0075023, Val Loss: 0.0071609 +2025-02-26 23:05:27,359 Epoch 30/2000 +2025-02-26 23:05:42,740 Current Learning Rate: 0.0094550326 +2025-02-26 23:05:42,992 Train Loss: 0.0073511, Val Loss: 0.0070388 +2025-02-26 23:05:42,992 Epoch 31/2000 +2025-02-26 23:05:58,822 Current Learning Rate: 0.0094188282 +2025-02-26 23:05:58,823 Train Loss: 0.0083515, Val Loss: 0.0072086 +2025-02-26 23:05:58,823 Epoch 32/2000 +2025-02-26 23:06:15,360 Current Learning Rate: 0.0093815334 +2025-02-26 23:06:15,609 Train Loss: 0.0071335, Val Loss: 0.0067967 +2025-02-26 23:06:15,609 Epoch 33/2000 +2025-02-26 23:06:31,009 Current Learning Rate: 0.0093431576 +2025-02-26 23:06:31,009 Train Loss: 0.0070707, Val Loss: 0.0079217 +2025-02-26 23:06:31,009 Epoch 34/2000 +2025-02-26 23:06:47,434 Current Learning Rate: 0.0093037101 +2025-02-26 23:06:47,672 Train Loss: 0.0075967, Val Loss: 0.0066028 +2025-02-26 23:06:47,673 Epoch 35/2000 +2025-02-26 23:07:03,409 Current Learning Rate: 0.0092632008 +2025-02-26 23:07:03,410 Train Loss: 0.0071289, Val Loss: 0.0116526 +2025-02-26 23:07:03,410 Epoch 36/2000 +2025-02-26 23:07:18,351 Current Learning Rate: 0.0092216396 +2025-02-26 23:07:18,615 Train Loss: 0.0073541, Val Loss: 0.0064187 +2025-02-26 23:07:18,615 Epoch 37/2000 +2025-02-26 23:07:34,238 Current Learning Rate: 0.0091790368 +2025-02-26 23:07:34,475 Train Loss: 0.0066015, Val Loss: 0.0063422 +2025-02-26 23:07:34,476 Epoch 38/2000 +2025-02-26 23:07:49,853 Current Learning Rate: 0.0091354029 +2025-02-26 23:07:49,853 Train Loss: 0.0084744, Val Loss: 0.0070993 +2025-02-26 23:07:49,853 Epoch 39/2000 +2025-02-26 23:08:06,216 Current Learning Rate: 0.0090907486 +2025-02-26 23:08:06,488 Train Loss: 0.0065403, Val Loss: 0.0061936 +2025-02-26 23:08:06,488 Epoch 40/2000 +2025-02-26 23:08:22,096 Current Learning Rate: 0.0090450850 +2025-02-26 23:08:22,329 Train Loss: 0.0063671, Val Loss: 0.0061212 +2025-02-26 23:08:22,330 Epoch 41/2000 +2025-02-26 23:08:38,163 Current Learning Rate: 0.0089984233 +2025-02-26 23:08:38,404 Train Loss: 0.0062936, Val Loss: 0.0060547 +2025-02-26 23:08:38,404 Epoch 42/2000 +2025-02-26 23:08:54,720 Current Learning Rate: 0.0089507751 +2025-02-26 23:08:54,971 Train Loss: 0.0062259, Val Loss: 0.0059951 +2025-02-26 23:08:54,971 Epoch 43/2000 +2025-02-26 23:09:10,970 Current Learning Rate: 0.0089021520 +2025-02-26 23:09:10,970 Train Loss: 0.0086134, Val Loss: 0.0117356 +2025-02-26 23:09:10,970 Epoch 44/2000 +2025-02-26 23:09:27,421 Current Learning Rate: 0.0088525662 +2025-02-26 23:09:27,632 Train Loss: 0.0066158, Val Loss: 0.0058908 +2025-02-26 23:09:27,632 Epoch 45/2000 +2025-02-26 23:09:43,206 Current Learning Rate: 0.0088020298 +2025-02-26 23:09:43,429 Train Loss: 0.0060444, Val Loss: 0.0058211 +2025-02-26 23:09:43,430 Epoch 46/2000 +2025-02-26 23:09:58,572 Current Learning Rate: 0.0087505553 +2025-02-26 23:09:58,776 Train Loss: 0.0059841, Val Loss: 0.0057644 +2025-02-26 23:09:58,777 Epoch 47/2000 +2025-02-26 23:10:14,723 Current Learning Rate: 0.0086981555 +2025-02-26 23:10:14,941 Train Loss: 0.0059250, Val Loss: 0.0057106 +2025-02-26 23:10:14,941 Epoch 48/2000 +2025-02-26 23:10:29,729 Current Learning Rate: 0.0086448431 +2025-02-26 23:10:29,962 Train Loss: 0.0058711, Val Loss: 0.0056596 +2025-02-26 23:10:29,962 Epoch 49/2000 +2025-02-26 23:10:45,571 Current Learning Rate: 0.0085906315 +2025-02-26 23:10:45,806 Train Loss: 0.0058180, Val Loss: 0.0056109 +2025-02-26 23:10:45,806 Epoch 50/2000 +2025-02-26 23:11:01,551 Current Learning Rate: 0.0085355339 +2025-02-26 23:11:02,191 Train Loss: 0.0057686, Val Loss: 0.0055696 +2025-02-26 23:11:02,192 Epoch 51/2000 +2025-02-26 23:11:18,746 Current Learning Rate: 0.0084795640 +2025-02-26 23:11:18,747 Train Loss: 0.0070810, Val Loss: 0.0082433 +2025-02-26 23:11:18,747 Epoch 52/2000 +2025-02-26 23:11:36,789 Current Learning Rate: 0.0084227355 +2025-02-26 23:11:37,006 Train Loss: 0.0059533, Val Loss: 0.0054797 +2025-02-26 23:11:37,006 Epoch 53/2000 +2025-02-26 23:11:54,333 Current Learning Rate: 0.0083650626 +2025-02-26 23:11:54,615 Train Loss: 0.0056273, Val Loss: 0.0054333 +2025-02-26 23:11:54,616 Epoch 54/2000 +2025-02-26 23:12:12,270 Current Learning Rate: 0.0083065593 +2025-02-26 23:12:12,517 Train Loss: 0.0055809, Val Loss: 0.0053911 +2025-02-26 23:12:12,518 Epoch 55/2000 +2025-02-26 23:12:29,921 Current Learning Rate: 0.0082472402 +2025-02-26 23:12:30,168 Train Loss: 0.0055378, Val Loss: 0.0053523 +2025-02-26 23:12:30,168 Epoch 56/2000 +2025-02-26 23:12:47,372 Current Learning Rate: 0.0081871199 +2025-02-26 23:12:47,600 Train Loss: 0.0054967, Val Loss: 0.0053129 +2025-02-26 23:12:47,600 Epoch 57/2000 +2025-02-26 23:13:04,737 Current Learning Rate: 0.0081262133 +2025-02-26 23:13:04,738 Train Loss: 0.0073732, Val Loss: 0.0066412 +2025-02-26 23:13:04,738 Epoch 58/2000 +2025-02-26 23:13:22,876 Current Learning Rate: 0.0080645353 +2025-02-26 23:13:23,103 Train Loss: 0.0055424, Val Loss: 0.0052437 +2025-02-26 23:13:23,103 Epoch 59/2000 +2025-02-26 23:13:40,155 Current Learning Rate: 0.0080021011 +2025-02-26 23:13:40,420 Train Loss: 0.0053773, Val Loss: 0.0052033 +2025-02-26 23:13:40,421 Epoch 60/2000 +2025-02-26 23:13:58,737 Current Learning Rate: 0.0079389263 +2025-02-26 23:13:58,962 Train Loss: 0.0053377, Val Loss: 0.0051669 +2025-02-26 23:13:58,962 Epoch 61/2000 +2025-02-26 23:14:16,562 Current Learning Rate: 0.0078750263 +2025-02-26 23:14:16,807 Train Loss: 0.0053015, Val Loss: 0.0051323 +2025-02-26 23:14:16,807 Epoch 62/2000 +2025-02-26 23:14:32,254 Current Learning Rate: 0.0078104169 +2025-02-26 23:14:32,487 Train Loss: 0.0052657, Val Loss: 0.0051002 +2025-02-26 23:14:32,487 Epoch 63/2000 +2025-02-26 23:14:48,172 Current Learning Rate: 0.0077451141 +2025-02-26 23:14:48,431 Train Loss: 0.0052306, Val Loss: 0.0050721 +2025-02-26 23:14:48,431 Epoch 64/2000 +2025-02-26 23:15:04,308 Current Learning Rate: 0.0076791340 +2025-02-26 23:15:04,308 Train Loss: 0.0052019, Val Loss: 0.0051178 +2025-02-26 23:15:04,308 Epoch 65/2000 +2025-02-26 23:15:20,109 Current Learning Rate: 0.0076124928 +2025-02-26 23:15:20,110 Train Loss: 0.0067107, Val Loss: 0.0173421 +2025-02-26 23:15:20,110 Epoch 66/2000 +2025-02-26 23:15:36,381 Current Learning Rate: 0.0075452071 +2025-02-26 23:15:36,615 Train Loss: 0.0060447, Val Loss: 0.0049854 +2025-02-26 23:15:36,616 Epoch 67/2000 +2025-02-26 23:15:52,633 Current Learning Rate: 0.0074772933 +2025-02-26 23:15:52,861 Train Loss: 0.0051053, Val Loss: 0.0049575 +2025-02-26 23:15:52,861 Epoch 68/2000 +2025-02-26 23:16:07,476 Current Learning Rate: 0.0074087684 +2025-02-26 23:16:07,476 Train Loss: 0.0050772, Val Loss: 0.0049791 +2025-02-26 23:16:07,477 Epoch 69/2000 +2025-02-26 23:16:23,469 Current Learning Rate: 0.0073396491 +2025-02-26 23:16:23,681 Train Loss: 0.0050969, Val Loss: 0.0049000 +2025-02-26 23:16:23,681 Epoch 70/2000 +2025-02-26 23:16:39,679 Current Learning Rate: 0.0072699525 +2025-02-26 23:16:39,680 Train Loss: 0.0050878, Val Loss: 0.0049612 +2025-02-26 23:16:39,680 Epoch 71/2000 +2025-02-26 23:16:56,082 Current Learning Rate: 0.0071996958 +2025-02-26 23:16:56,292 Train Loss: 0.0050263, Val Loss: 0.0048577 +2025-02-26 23:16:56,292 Epoch 72/2000 +2025-02-26 23:17:12,417 Current Learning Rate: 0.0071288965 +2025-02-26 23:17:12,418 Train Loss: 0.0050535, Val Loss: 0.0049237 +2025-02-26 23:17:12,418 Epoch 73/2000 +2025-02-26 23:17:28,036 Current Learning Rate: 0.0070575718 +2025-02-26 23:17:28,261 Train Loss: 0.0049490, Val Loss: 0.0047865 +2025-02-26 23:17:28,261 Epoch 74/2000 +2025-02-26 23:17:43,890 Current Learning Rate: 0.0069857395 +2025-02-26 23:17:44,121 Train Loss: 0.0050867, Val Loss: 0.0047669 +2025-02-26 23:17:44,122 Epoch 75/2000 +2025-02-26 23:18:00,004 Current Learning Rate: 0.0069134172 +2025-02-26 23:18:00,232 Train Loss: 0.0048752, Val Loss: 0.0047543 +2025-02-26 23:18:00,232 Epoch 76/2000 +2025-02-26 23:18:16,198 Current Learning Rate: 0.0068406228 +2025-02-26 23:18:16,199 Train Loss: 0.0049354, Val Loss: 0.0055453 +2025-02-26 23:18:16,199 Epoch 77/2000 +2025-02-26 23:18:32,333 Current Learning Rate: 0.0067673742 +2025-02-26 23:18:32,548 Train Loss: 0.0049460, Val Loss: 0.0047490 +2025-02-26 23:18:32,548 Epoch 78/2000 +2025-02-26 23:18:48,540 Current Learning Rate: 0.0066936896 +2025-02-26 23:18:48,541 Train Loss: 0.0055781, Val Loss: 0.0050197 +2025-02-26 23:18:48,541 Epoch 79/2000 +2025-02-26 23:19:04,473 Current Learning Rate: 0.0066195871 +2025-02-26 23:19:04,697 Train Loss: 0.0048560, Val Loss: 0.0046541 +2025-02-26 23:19:04,697 Epoch 80/2000 +2025-02-26 23:19:20,505 Current Learning Rate: 0.0065450850 +2025-02-26 23:19:20,710 Train Loss: 0.0047515, Val Loss: 0.0046329 +2025-02-26 23:19:20,710 Epoch 81/2000 +2025-02-26 23:19:35,697 Current Learning Rate: 0.0064702016 +2025-02-26 23:19:35,962 Train Loss: 0.0047336, Val Loss: 0.0046113 +2025-02-26 23:19:35,962 Epoch 82/2000 +2025-02-26 23:19:50,565 Current Learning Rate: 0.0063949555 +2025-02-26 23:19:50,565 Train Loss: 0.0051102, Val Loss: 0.0130879 +2025-02-26 23:19:50,565 Epoch 83/2000 +2025-02-26 23:20:05,985 Current Learning Rate: 0.0063193652 +2025-02-26 23:20:06,188 Train Loss: 0.0054864, Val Loss: 0.0045766 +2025-02-26 23:20:06,189 Epoch 84/2000 +2025-02-26 23:20:21,725 Current Learning Rate: 0.0062434494 +2025-02-26 23:20:21,725 Train Loss: 0.0046705, Val Loss: 0.0045803 +2025-02-26 23:20:21,725 Epoch 85/2000 +2025-02-26 23:20:37,103 Current Learning Rate: 0.0061672268 +2025-02-26 23:20:37,303 Train Loss: 0.0046643, Val Loss: 0.0045636 +2025-02-26 23:20:37,304 Epoch 86/2000 +2025-02-26 23:20:52,136 Current Learning Rate: 0.0060907162 +2025-02-26 23:20:52,368 Train Loss: 0.0047257, Val Loss: 0.0045249 +2025-02-26 23:20:52,369 Epoch 87/2000 +2025-02-26 23:21:07,460 Current Learning Rate: 0.0060139365 +2025-02-26 23:21:07,696 Train Loss: 0.0046464, Val Loss: 0.0045025 +2025-02-26 23:21:07,696 Epoch 88/2000 +2025-02-26 23:21:23,258 Current Learning Rate: 0.0059369066 +2025-02-26 23:21:23,258 Train Loss: 0.0047437, Val Loss: 0.0046241 +2025-02-26 23:21:23,259 Epoch 89/2000 +2025-02-26 23:21:38,578 Current Learning Rate: 0.0058596455 +2025-02-26 23:21:38,578 Train Loss: 0.0045910, Val Loss: 0.0045073 +2025-02-26 23:21:38,579 Epoch 90/2000 +2025-02-26 23:21:53,344 Current Learning Rate: 0.0057821723 +2025-02-26 23:21:53,344 Train Loss: 0.0046708, Val Loss: 0.0045936 +2025-02-26 23:21:53,345 Epoch 91/2000 +2025-02-26 23:22:09,317 Current Learning Rate: 0.0057045062 +2025-02-26 23:22:09,584 Train Loss: 0.0045733, Val Loss: 0.0044925 +2025-02-26 23:22:09,584 Epoch 92/2000 +2025-02-26 23:22:24,573 Current Learning Rate: 0.0056266662 +2025-02-26 23:22:24,574 Train Loss: 0.0047777, Val Loss: 0.0051093 +2025-02-26 23:22:24,574 Epoch 93/2000 +2025-02-26 23:22:39,411 Current Learning Rate: 0.0055486716 +2025-02-26 23:22:39,623 Train Loss: 0.0046334, Val Loss: 0.0044223 +2025-02-26 23:22:39,624 Epoch 94/2000 +2025-02-26 23:22:53,759 Current Learning Rate: 0.0054705416 +2025-02-26 23:22:53,760 Train Loss: 0.0046518, Val Loss: 0.0073283 +2025-02-26 23:22:53,760 Epoch 95/2000 +2025-02-26 23:23:09,170 Current Learning Rate: 0.0053922955 +2025-02-26 23:23:09,402 Train Loss: 0.0048158, Val Loss: 0.0043945 +2025-02-26 23:23:09,402 Epoch 96/2000 +2025-02-26 23:23:24,163 Current Learning Rate: 0.0053139526 +2025-02-26 23:23:24,164 Train Loss: 0.0044761, Val Loss: 0.0044242 +2025-02-26 23:23:24,164 Epoch 97/2000 +2025-02-26 23:23:38,994 Current Learning Rate: 0.0052355323 +2025-02-26 23:23:39,226 Train Loss: 0.0046370, Val Loss: 0.0043719 +2025-02-26 23:23:39,226 Epoch 98/2000 +2025-02-26 23:23:53,588 Current Learning Rate: 0.0051570538 +2025-02-26 23:23:53,806 Train Loss: 0.0044642, Val Loss: 0.0043568 +2025-02-26 23:23:53,806 Epoch 99/2000 +2025-02-26 23:24:08,872 Current Learning Rate: 0.0050785366 +2025-02-26 23:24:08,872 Train Loss: 0.0044310, Val Loss: 0.0043796 +2025-02-26 23:24:08,873 Epoch 100/2000 +2025-02-26 23:24:23,491 Current Learning Rate: 0.0050000000 +2025-02-26 23:24:23,491 Train Loss: 0.0046603, Val Loss: 0.0048232 +2025-02-26 23:24:23,491 Epoch 101/2000 +2025-02-26 23:24:39,009 Current Learning Rate: 0.0049214634 +2025-02-26 23:24:39,211 Train Loss: 0.0044722, Val Loss: 0.0043156 +2025-02-26 23:24:39,212 Epoch 102/2000 +2025-02-26 23:24:54,478 Current Learning Rate: 0.0048429462 +2025-02-26 23:24:54,479 Train Loss: 0.0043985, Val Loss: 0.0044727 +2025-02-26 23:24:54,479 Epoch 103/2000 +2025-02-26 23:25:10,183 Current Learning Rate: 0.0047644677 +2025-02-26 23:25:10,184 Train Loss: 0.0044554, Val Loss: 0.0051151 +2025-02-26 23:25:10,184 Epoch 104/2000 +2025-02-26 23:25:25,660 Current Learning Rate: 0.0046860474 +2025-02-26 23:25:25,882 Train Loss: 0.0046577, Val Loss: 0.0042916 +2025-02-26 23:25:25,882 Epoch 105/2000 +2025-02-26 23:25:41,043 Current Learning Rate: 0.0046077045 +2025-02-26 23:25:41,044 Train Loss: 0.0044656, Val Loss: 0.0048602 +2025-02-26 23:25:41,044 Epoch 106/2000 +2025-02-26 23:25:55,922 Current Learning Rate: 0.0045294584 +2025-02-26 23:25:56,133 Train Loss: 0.0044009, Val Loss: 0.0042697 +2025-02-26 23:25:56,133 Epoch 107/2000 +2025-02-26 23:26:11,123 Current Learning Rate: 0.0044513284 +2025-02-26 23:26:11,124 Train Loss: 0.0043399, Val Loss: 0.0042729 +2025-02-26 23:26:11,124 Epoch 108/2000 +2025-02-26 23:26:27,072 Current Learning Rate: 0.0043733338 +2025-02-26 23:26:27,073 Train Loss: 0.0044818, Val Loss: 0.0045844 +2025-02-26 23:26:27,073 Epoch 109/2000 +2025-02-26 23:26:42,354 Current Learning Rate: 0.0042954938 +2025-02-26 23:26:42,547 Train Loss: 0.0043424, Val Loss: 0.0042396 +2025-02-26 23:26:42,547 Epoch 110/2000 +2025-02-26 23:26:58,367 Current Learning Rate: 0.0042178277 +2025-02-26 23:26:58,368 Train Loss: 0.0043828, Val Loss: 0.0055151 +2025-02-26 23:26:58,368 Epoch 111/2000 +2025-02-26 23:27:13,686 Current Learning Rate: 0.0041403545 +2025-02-26 23:27:13,916 Train Loss: 0.0044522, Val Loss: 0.0042320 +2025-02-26 23:27:13,916 Epoch 112/2000 +2025-02-26 23:27:29,036 Current Learning Rate: 0.0040630934 +2025-02-26 23:27:29,037 Train Loss: 0.0043691, Val Loss: 0.0042377 +2025-02-26 23:27:29,037 Epoch 113/2000 +2025-02-26 23:27:44,083 Current Learning Rate: 0.0039860635 +2025-02-26 23:27:44,322 Train Loss: 0.0042860, Val Loss: 0.0042242 +2025-02-26 23:27:44,322 Epoch 114/2000 +2025-02-26 23:28:00,027 Current Learning Rate: 0.0039092838 +2025-02-26 23:28:00,028 Train Loss: 0.0043278, Val Loss: 0.0042345 +2025-02-26 23:28:00,028 Epoch 115/2000 +2025-02-26 23:28:15,649 Current Learning Rate: 0.0038327732 +2025-02-26 23:28:15,649 Train Loss: 0.0042767, Val Loss: 0.0042336 +2025-02-26 23:28:15,650 Epoch 116/2000 +2025-02-26 23:28:30,819 Current Learning Rate: 0.0037565506 +2025-02-26 23:28:30,819 Train Loss: 0.0044107, Val Loss: 0.0052645 +2025-02-26 23:28:30,820 Epoch 117/2000 +2025-02-26 23:28:46,462 Current Learning Rate: 0.0036806348 +2025-02-26 23:28:46,673 Train Loss: 0.0043608, Val Loss: 0.0041763 +2025-02-26 23:28:46,673 Epoch 118/2000 +2025-02-26 23:29:01,874 Current Learning Rate: 0.0036050445 +2025-02-26 23:29:01,874 Train Loss: 0.0042353, Val Loss: 0.0041855 +2025-02-26 23:29:01,875 Epoch 119/2000 +2025-02-26 23:29:17,483 Current Learning Rate: 0.0035297984 +2025-02-26 23:29:17,483 Train Loss: 0.0042912, Val Loss: 0.0042248 +2025-02-26 23:29:17,484 Epoch 120/2000 +2025-02-26 23:29:32,831 Current Learning Rate: 0.0034549150 +2025-02-26 23:29:33,042 Train Loss: 0.0042283, Val Loss: 0.0041757 +2025-02-26 23:29:33,042 Epoch 121/2000 +2025-02-26 23:29:48,305 Current Learning Rate: 0.0033804129 +2025-02-26 23:29:48,305 Train Loss: 0.0042442, Val Loss: 0.0041836 +2025-02-26 23:29:48,305 Epoch 122/2000 +2025-02-26 23:30:03,635 Current Learning Rate: 0.0033063104 +2025-02-26 23:30:03,635 Train Loss: 0.0042356, Val Loss: 0.0042356 +2025-02-26 23:30:03,636 Epoch 123/2000 +2025-02-26 23:30:19,262 Current Learning Rate: 0.0032326258 +2025-02-26 23:30:19,263 Train Loss: 0.0042884, Val Loss: 0.0043799 +2025-02-26 23:30:19,263 Epoch 124/2000 +2025-02-26 23:30:34,495 Current Learning Rate: 0.0031593772 +2025-02-26 23:30:34,742 Train Loss: 0.0042149, Val Loss: 0.0041511 +2025-02-26 23:30:34,742 Epoch 125/2000 +2025-02-26 23:30:50,127 Current Learning Rate: 0.0030865828 +2025-02-26 23:30:50,128 Train Loss: 0.0042119, Val Loss: 0.0042517 +2025-02-26 23:30:50,128 Epoch 126/2000 +2025-02-26 23:31:05,032 Current Learning Rate: 0.0030142605 +2025-02-26 23:31:05,033 Train Loss: 0.0042302, Val Loss: 0.0042144 +2025-02-26 23:31:05,033 Epoch 127/2000 +2025-02-26 23:31:20,265 Current Learning Rate: 0.0029424282 +2025-02-26 23:31:20,501 Train Loss: 0.0041984, Val Loss: 0.0041250 +2025-02-26 23:31:20,501 Epoch 128/2000 +2025-02-26 23:31:35,613 Current Learning Rate: 0.0028711035 +2025-02-26 23:31:35,613 Train Loss: 0.0041947, Val Loss: 0.0041512 +2025-02-26 23:31:35,613 Epoch 129/2000 +2025-02-26 23:31:50,934 Current Learning Rate: 0.0028003042 +2025-02-26 23:31:50,935 Train Loss: 0.0041934, Val Loss: 0.0042383 +2025-02-26 23:31:50,935 Epoch 130/2000 +2025-02-26 23:32:06,257 Current Learning Rate: 0.0027300475 +2025-02-26 23:32:06,500 Train Loss: 0.0041995, Val Loss: 0.0041176 +2025-02-26 23:32:06,500 Epoch 131/2000 +2025-02-26 23:32:21,051 Current Learning Rate: 0.0026603509 +2025-02-26 23:32:21,051 Train Loss: 0.0041559, Val Loss: 0.0041251 +2025-02-26 23:32:21,051 Epoch 132/2000 +2025-02-26 23:32:36,010 Current Learning Rate: 0.0025912316 +2025-02-26 23:32:36,010 Train Loss: 0.0041727, Val Loss: 0.0041189 +2025-02-26 23:32:36,011 Epoch 133/2000 +2025-02-26 23:32:51,079 Current Learning Rate: 0.0025227067 +2025-02-26 23:32:51,281 Train Loss: 0.0041483, Val Loss: 0.0040987 +2025-02-26 23:32:51,282 Epoch 134/2000 +2025-02-26 23:33:06,708 Current Learning Rate: 0.0024547929 +2025-02-26 23:33:06,708 Train Loss: 0.0041674, Val Loss: 0.0041424 +2025-02-26 23:33:06,709 Epoch 135/2000 +2025-02-26 23:33:21,713 Current Learning Rate: 0.0023875072 +2025-02-26 23:33:21,939 Train Loss: 0.0041338, Val Loss: 0.0040881 +2025-02-26 23:33:21,939 Epoch 136/2000 +2025-02-26 23:33:37,179 Current Learning Rate: 0.0023208660 +2025-02-26 23:33:37,180 Train Loss: 0.0041424, Val Loss: 0.0040949 +2025-02-26 23:33:37,180 Epoch 137/2000 +2025-02-26 23:33:52,915 Current Learning Rate: 0.0022548859 +2025-02-26 23:33:53,129 Train Loss: 0.0041216, Val Loss: 0.0040699 +2025-02-26 23:33:53,130 Epoch 138/2000 +2025-02-26 23:34:08,843 Current Learning Rate: 0.0021895831 +2025-02-26 23:34:08,844 Train Loss: 0.0041390, Val Loss: 0.0040863 +2025-02-26 23:34:08,844 Epoch 139/2000 +2025-02-26 23:34:24,390 Current Learning Rate: 0.0021249737 +2025-02-26 23:34:24,391 Train Loss: 0.0041093, Val Loss: 0.0040930 +2025-02-26 23:34:24,391 Epoch 140/2000 +2025-02-26 23:34:39,670 Current Learning Rate: 0.0020610737 +2025-02-26 23:34:39,895 Train Loss: 0.0041072, Val Loss: 0.0040656 +2025-02-26 23:34:39,896 Epoch 141/2000 +2025-02-26 23:34:55,464 Current Learning Rate: 0.0019978989 +2025-02-26 23:34:55,464 Train Loss: 0.0041074, Val Loss: 0.0040737 +2025-02-26 23:34:55,464 Epoch 142/2000 +2025-02-26 23:35:10,991 Current Learning Rate: 0.0019354647 +2025-02-26 23:35:11,203 Train Loss: 0.0040963, Val Loss: 0.0040332 +2025-02-26 23:35:11,203 Epoch 143/2000 +2025-02-26 23:35:26,458 Current Learning Rate: 0.0018737867 +2025-02-26 23:35:26,458 Train Loss: 0.0041026, Val Loss: 0.0040888 +2025-02-26 23:35:26,458 Epoch 144/2000 +2025-02-26 23:35:42,230 Current Learning Rate: 0.0018128801 +2025-02-26 23:35:42,231 Train Loss: 0.0040867, Val Loss: 0.0040625 +2025-02-26 23:35:42,231 Epoch 145/2000 +2025-02-26 23:35:58,181 Current Learning Rate: 0.0017527598 +2025-02-26 23:35:58,182 Train Loss: 0.0040793, Val Loss: 0.0040481 +2025-02-26 23:35:58,182 Epoch 146/2000 +2025-02-26 23:36:14,568 Current Learning Rate: 0.0016934407 +2025-02-26 23:36:14,569 Train Loss: 0.0040767, Val Loss: 0.0040515 +2025-02-26 23:36:14,569 Epoch 147/2000 +2025-02-26 23:36:30,992 Current Learning Rate: 0.0016349374 +2025-02-26 23:36:30,993 Train Loss: 0.0040736, Val Loss: 0.0040399 +2025-02-26 23:36:30,993 Epoch 148/2000 +2025-02-26 23:36:46,843 Current Learning Rate: 0.0015772645 +2025-02-26 23:36:47,064 Train Loss: 0.0040668, Val Loss: 0.0040294 +2025-02-26 23:36:47,065 Epoch 149/2000 +2025-02-26 23:37:02,903 Current Learning Rate: 0.0015204360 +2025-02-26 23:37:03,125 Train Loss: 0.0040633, Val Loss: 0.0040167 +2025-02-26 23:37:03,125 Epoch 150/2000 +2025-02-26 23:37:18,914 Current Learning Rate: 0.0014644661 +2025-02-26 23:37:18,915 Train Loss: 0.0040596, Val Loss: 0.0040297 +2025-02-26 23:37:18,915 Epoch 151/2000 +2025-02-26 23:37:34,358 Current Learning Rate: 0.0014093685 +2025-02-26 23:37:34,359 Train Loss: 0.0040571, Val Loss: 0.0040279 +2025-02-26 23:37:34,359 Epoch 152/2000 +2025-02-26 23:37:50,213 Current Learning Rate: 0.0013551569 +2025-02-26 23:37:50,438 Train Loss: 0.0040493, Val Loss: 0.0040043 +2025-02-26 23:37:50,438 Epoch 153/2000 +2025-02-26 23:38:05,940 Current Learning Rate: 0.0013018445 +2025-02-26 23:38:06,190 Train Loss: 0.0040445, Val Loss: 0.0039963 +2025-02-26 23:38:06,190 Epoch 154/2000 +2025-02-26 23:38:21,039 Current Learning Rate: 0.0012494447 +2025-02-26 23:38:21,244 Train Loss: 0.0040418, Val Loss: 0.0039942 +2025-02-26 23:38:21,244 Epoch 155/2000 +2025-02-26 23:38:35,927 Current Learning Rate: 0.0011979702 +2025-02-26 23:38:36,160 Train Loss: 0.0040376, Val Loss: 0.0039826 +2025-02-26 23:38:36,161 Epoch 156/2000 +2025-02-26 23:38:51,223 Current Learning Rate: 0.0011474338 +2025-02-26 23:38:51,444 Train Loss: 0.0040341, Val Loss: 0.0039817 +2025-02-26 23:38:51,444 Epoch 157/2000 +2025-02-26 23:39:06,731 Current Learning Rate: 0.0010978480 +2025-02-26 23:39:06,942 Train Loss: 0.0040305, Val Loss: 0.0039753 +2025-02-26 23:39:06,943 Epoch 158/2000 +2025-02-26 23:39:20,879 Current Learning Rate: 0.0010492249 +2025-02-26 23:39:21,084 Train Loss: 0.0040276, Val Loss: 0.0039706 +2025-02-26 23:39:21,084 Epoch 159/2000 +2025-02-26 23:39:36,144 Current Learning Rate: 0.0010015767 +2025-02-26 23:39:36,360 Train Loss: 0.0040246, Val Loss: 0.0039705 +2025-02-26 23:39:36,360 Epoch 160/2000 +2025-02-26 23:39:50,724 Current Learning Rate: 0.0009549150 +2025-02-26 23:39:50,947 Train Loss: 0.0040220, Val Loss: 0.0039664 +2025-02-26 23:39:50,947 Epoch 161/2000 +2025-02-26 23:40:06,228 Current Learning Rate: 0.0009092514 +2025-02-26 23:40:06,437 Train Loss: 0.0040183, Val Loss: 0.0039635 +2025-02-26 23:40:06,438 Epoch 162/2000 +2025-02-26 23:40:21,131 Current Learning Rate: 0.0008645971 +2025-02-26 23:40:21,330 Train Loss: 0.0040149, Val Loss: 0.0039600 +2025-02-26 23:40:21,331 Epoch 163/2000 +2025-02-26 23:40:36,677 Current Learning Rate: 0.0008209632 +2025-02-26 23:40:36,885 Train Loss: 0.0040122, Val Loss: 0.0039575 +2025-02-26 23:40:36,886 Epoch 164/2000 +2025-02-26 23:40:51,565 Current Learning Rate: 0.0007783604 +2025-02-26 23:40:51,761 Train Loss: 0.0040098, Val Loss: 0.0039554 +2025-02-26 23:40:51,761 Epoch 165/2000 +2025-02-26 23:41:07,031 Current Learning Rate: 0.0007367992 +2025-02-26 23:41:07,310 Train Loss: 0.0040075, Val Loss: 0.0039538 +2025-02-26 23:41:07,311 Epoch 166/2000 +2025-02-26 23:41:22,316 Current Learning Rate: 0.0006962899 +2025-02-26 23:41:22,523 Train Loss: 0.0040052, Val Loss: 0.0039513 +2025-02-26 23:41:22,523 Epoch 167/2000 +2025-02-26 23:41:37,643 Current Learning Rate: 0.0006568424 +2025-02-26 23:41:37,845 Train Loss: 0.0040032, Val Loss: 0.0039486 +2025-02-26 23:41:37,845 Epoch 168/2000 +2025-02-26 23:41:52,760 Current Learning Rate: 0.0006184666 +2025-02-26 23:41:53,006 Train Loss: 0.0040009, Val Loss: 0.0039469 +2025-02-26 23:41:53,006 Epoch 169/2000 +2025-02-26 23:42:08,065 Current Learning Rate: 0.0005811718 +2025-02-26 23:42:08,450 Train Loss: 0.0039991, Val Loss: 0.0039449 +2025-02-26 23:42:08,450 Epoch 170/2000 +2025-02-26 23:42:22,531 Current Learning Rate: 0.0005449674 +2025-02-26 23:42:22,779 Train Loss: 0.0039967, Val Loss: 0.0039433 +2025-02-26 23:42:22,780 Epoch 171/2000 +2025-02-26 23:42:37,146 Current Learning Rate: 0.0005098621 +2025-02-26 23:42:37,362 Train Loss: 0.0039953, Val Loss: 0.0039417 +2025-02-26 23:42:37,362 Epoch 172/2000 +2025-02-26 23:42:51,830 Current Learning Rate: 0.0004758647 +2025-02-26 23:42:52,023 Train Loss: 0.0039939, Val Loss: 0.0039402 +2025-02-26 23:42:52,023 Epoch 173/2000 +2025-02-26 23:43:06,704 Current Learning Rate: 0.0004429836 +2025-02-26 23:43:06,915 Train Loss: 0.0039917, Val Loss: 0.0039393 +2025-02-26 23:43:06,915 Epoch 174/2000 +2025-02-26 23:43:21,528 Current Learning Rate: 0.0004112269 +2025-02-26 23:43:21,760 Train Loss: 0.0039902, Val Loss: 0.0039385 +2025-02-26 23:43:21,760 Epoch 175/2000 +2025-02-26 23:43:36,261 Current Learning Rate: 0.0003806023 +2025-02-26 23:43:36,504 Train Loss: 0.0039886, Val Loss: 0.0039369 +2025-02-26 23:43:36,504 Epoch 176/2000 +2025-02-26 23:43:50,968 Current Learning Rate: 0.0003511176 +2025-02-26 23:43:51,176 Train Loss: 0.0039871, Val Loss: 0.0039349 +2025-02-26 23:43:51,176 Epoch 177/2000 +2025-02-26 23:44:05,902 Current Learning Rate: 0.0003227798 +2025-02-26 23:44:06,107 Train Loss: 0.0039857, Val Loss: 0.0039330 +2025-02-26 23:44:06,107 Epoch 178/2000 +2025-02-26 23:44:20,311 Current Learning Rate: 0.0002955962 +2025-02-26 23:44:20,511 Train Loss: 0.0039844, Val Loss: 0.0039320 +2025-02-26 23:44:20,511 Epoch 179/2000 +2025-02-26 23:44:34,797 Current Learning Rate: 0.0002695732 +2025-02-26 23:44:34,989 Train Loss: 0.0039834, Val Loss: 0.0039308 +2025-02-26 23:44:34,990 Epoch 180/2000 +2025-02-26 23:44:49,246 Current Learning Rate: 0.0002447174 +2025-02-26 23:44:49,459 Train Loss: 0.0039822, Val Loss: 0.0039298 +2025-02-26 23:44:49,460 Epoch 181/2000 +2025-02-26 23:45:04,170 Current Learning Rate: 0.0002210349 +2025-02-26 23:45:04,391 Train Loss: 0.0039812, Val Loss: 0.0039290 +2025-02-26 23:45:04,391 Epoch 182/2000 +2025-02-26 23:45:19,313 Current Learning Rate: 0.0001985316 +2025-02-26 23:45:19,516 Train Loss: 0.0039800, Val Loss: 0.0039283 +2025-02-26 23:45:19,516 Epoch 183/2000 +2025-02-26 23:45:34,331 Current Learning Rate: 0.0001772129 +2025-02-26 23:45:34,523 Train Loss: 0.0039793, Val Loss: 0.0039272 +2025-02-26 23:45:34,524 Epoch 184/2000 +2025-02-26 23:45:49,864 Current Learning Rate: 0.0001570842 +2025-02-26 23:45:50,069 Train Loss: 0.0039783, Val Loss: 0.0039265 +2025-02-26 23:45:50,069 Epoch 185/2000 +2025-02-26 23:46:05,372 Current Learning Rate: 0.0001381504 +2025-02-26 23:46:05,595 Train Loss: 0.0039777, Val Loss: 0.0039260 +2025-02-26 23:46:05,595 Epoch 186/2000 +2025-02-26 23:46:19,933 Current Learning Rate: 0.0001204162 +2025-02-26 23:46:20,137 Train Loss: 0.0039771, Val Loss: 0.0039257 +2025-02-26 23:46:20,137 Epoch 187/2000 +2025-02-26 23:46:34,979 Current Learning Rate: 0.0001038859 +2025-02-26 23:46:35,181 Train Loss: 0.0039764, Val Loss: 0.0039246 +2025-02-26 23:46:35,182 Epoch 188/2000 +2025-02-26 23:46:50,123 Current Learning Rate: 0.0000885637 +2025-02-26 23:46:50,495 Train Loss: 0.0039762, Val Loss: 0.0039242 +2025-02-26 23:46:50,496 Epoch 189/2000 +2025-02-26 23:47:05,477 Current Learning Rate: 0.0000744534 +2025-02-26 23:47:05,673 Train Loss: 0.0039754, Val Loss: 0.0039238 +2025-02-26 23:47:05,673 Epoch 190/2000 +2025-02-26 23:47:21,881 Current Learning Rate: 0.0000615583 +2025-02-26 23:47:22,079 Train Loss: 0.0039747, Val Loss: 0.0039234 +2025-02-26 23:47:22,079 Epoch 191/2000 +2025-02-26 23:47:36,932 Current Learning Rate: 0.0000498817 +2025-02-26 23:47:37,138 Train Loss: 0.0039747, Val Loss: 0.0039230 +2025-02-26 23:47:37,138 Epoch 192/2000 +2025-02-26 23:47:51,858 Current Learning Rate: 0.0000394265 +2025-02-26 23:47:52,062 Train Loss: 0.0039744, Val Loss: 0.0039227 +2025-02-26 23:47:52,063 Epoch 193/2000 +2025-02-26 23:48:07,390 Current Learning Rate: 0.0000301952 +2025-02-26 23:48:07,597 Train Loss: 0.0039742, Val Loss: 0.0039226 +2025-02-26 23:48:07,598 Epoch 194/2000 +2025-02-26 23:48:23,593 Current Learning Rate: 0.0000221902 +2025-02-26 23:48:23,818 Train Loss: 0.0039738, Val Loss: 0.0039224 +2025-02-26 23:48:23,818 Epoch 195/2000 +2025-02-26 23:48:38,695 Current Learning Rate: 0.0000154133 +2025-02-26 23:48:38,917 Train Loss: 0.0039739, Val Loss: 0.0039223 +2025-02-26 23:48:38,917 Epoch 196/2000 +2025-02-26 23:48:54,700 Current Learning Rate: 0.0000098664 +2025-02-26 23:48:54,927 Train Loss: 0.0039738, Val Loss: 0.0039221 +2025-02-26 23:48:54,927 Epoch 197/2000 +2025-02-26 23:49:10,594 Current Learning Rate: 0.0000055506 +2025-02-26 23:49:10,781 Train Loss: 0.0039739, Val Loss: 0.0039220 +2025-02-26 23:49:10,781 Epoch 198/2000 +2025-02-26 23:49:25,706 Current Learning Rate: 0.0000024672 +2025-02-26 23:49:25,922 Train Loss: 0.0039736, Val Loss: 0.0039219 +2025-02-26 23:49:25,922 Epoch 199/2000 +2025-02-26 23:49:41,205 Current Learning Rate: 0.0000006168 +2025-02-26 23:49:41,206 Train Loss: 0.0039735, Val Loss: 0.0039219 +2025-02-26 23:49:41,206 Epoch 200/2000 +2025-02-26 23:49:56,287 Current Learning Rate: 0.0000000000 +2025-02-26 23:49:56,288 Train Loss: 0.0039736, Val Loss: 0.0039220 +2025-02-26 23:49:56,288 Epoch 201/2000 +2025-02-26 23:50:12,391 Current Learning Rate: 0.0000006168 +2025-02-26 23:50:12,391 Train Loss: 0.0039737, Val Loss: 0.0039219 +2025-02-26 23:50:12,392 Epoch 202/2000 +2025-02-26 23:50:27,885 Current Learning Rate: 0.0000024672 +2025-02-26 23:50:27,885 Train Loss: 0.0039736, Val Loss: 0.0039219 +2025-02-26 23:50:27,885 Epoch 203/2000 +2025-02-26 23:50:43,683 Current Learning Rate: 0.0000055506 +2025-02-26 23:50:43,683 Train Loss: 0.0039733, Val Loss: 0.0039220 +2025-02-26 23:50:43,683 Epoch 204/2000 +2025-02-26 23:50:58,910 Current Learning Rate: 0.0000098664 +2025-02-26 23:50:59,121 Train Loss: 0.0039735, Val Loss: 0.0039219 +2025-02-26 23:50:59,121 Epoch 205/2000 +2025-02-26 23:51:14,242 Current Learning Rate: 0.0000154133 +2025-02-26 23:51:14,434 Train Loss: 0.0039734, Val Loss: 0.0039218 +2025-02-26 23:51:14,434 Epoch 206/2000 +2025-02-26 23:51:29,943 Current Learning Rate: 0.0000221902 +2025-02-26 23:51:30,165 Train Loss: 0.0039735, Val Loss: 0.0039218 +2025-02-26 23:51:30,166 Epoch 207/2000 +2025-02-26 23:51:44,737 Current Learning Rate: 0.0000301952 +2025-02-26 23:51:44,942 Train Loss: 0.0039733, Val Loss: 0.0039217 +2025-02-26 23:51:44,942 Epoch 208/2000 +2025-02-26 23:52:00,706 Current Learning Rate: 0.0000394265 +2025-02-26 23:52:01,012 Train Loss: 0.0039730, Val Loss: 0.0039214 +2025-02-26 23:52:01,012 Epoch 209/2000 +2025-02-26 23:52:16,154 Current Learning Rate: 0.0000498817 +2025-02-26 23:52:16,400 Train Loss: 0.0039728, Val Loss: 0.0039212 +2025-02-26 23:52:16,400 Epoch 210/2000 +2025-02-26 23:52:31,283 Current Learning Rate: 0.0000615583 +2025-02-26 23:52:31,663 Train Loss: 0.0039726, Val Loss: 0.0039209 +2025-02-26 23:52:31,663 Epoch 211/2000 +2025-02-26 23:52:46,773 Current Learning Rate: 0.0000744534 +2025-02-26 23:52:46,773 Train Loss: 0.0039720, Val Loss: 0.0039209 +2025-02-26 23:52:46,774 Epoch 212/2000 +2025-02-26 23:53:01,956 Current Learning Rate: 0.0000885637 +2025-02-26 23:53:03,734 Train Loss: 0.0039715, Val Loss: 0.0039198 +2025-02-26 23:53:03,734 Epoch 213/2000 +2025-02-26 23:53:18,599 Current Learning Rate: 0.0001038859 +2025-02-26 23:53:18,821 Train Loss: 0.0039710, Val Loss: 0.0039193 +2025-02-26 23:53:18,822 Epoch 214/2000 +2025-02-26 23:53:33,582 Current Learning Rate: 0.0001204162 +2025-02-26 23:53:33,823 Train Loss: 0.0039700, Val Loss: 0.0039183 +2025-02-26 23:53:33,823 Epoch 215/2000 +2025-02-26 23:53:48,991 Current Learning Rate: 0.0001381504 +2025-02-26 23:53:49,218 Train Loss: 0.0039691, Val Loss: 0.0039179 +2025-02-26 23:53:49,219 Epoch 216/2000 +2025-02-26 23:54:04,769 Current Learning Rate: 0.0001570842 +2025-02-26 23:54:04,990 Train Loss: 0.0039685, Val Loss: 0.0039163 +2025-02-26 23:54:04,990 Epoch 217/2000 +2025-02-26 23:54:19,989 Current Learning Rate: 0.0001772129 +2025-02-26 23:54:20,275 Train Loss: 0.0039670, Val Loss: 0.0039148 +2025-02-26 23:54:20,276 Epoch 218/2000 +2025-02-26 23:54:35,876 Current Learning Rate: 0.0001985316 +2025-02-26 23:54:35,877 Train Loss: 0.0039812, Val Loss: 0.0039150 +2025-02-26 23:54:35,877 Epoch 219/2000 +2025-02-26 23:54:51,452 Current Learning Rate: 0.0002210349 +2025-02-26 23:54:51,453 Train Loss: 0.0039692, Val Loss: 0.0039214 +2025-02-26 23:54:51,453 Epoch 220/2000 +2025-02-26 23:55:06,959 Current Learning Rate: 0.0002447174 +2025-02-26 23:55:07,173 Train Loss: 0.0041357, Val Loss: 0.0039121 +2025-02-26 23:55:07,174 Epoch 221/2000 +2025-02-26 23:55:22,453 Current Learning Rate: 0.0002695732 +2025-02-26 23:55:22,752 Train Loss: 0.0039614, Val Loss: 0.0039087 +2025-02-26 23:55:22,752 Epoch 222/2000 +2025-02-26 23:55:37,709 Current Learning Rate: 0.0002955962 +2025-02-26 23:55:37,710 Train Loss: 0.0040746, Val Loss: 0.0039091 +2025-02-26 23:55:37,710 Epoch 223/2000 +2025-02-26 23:55:53,009 Current Learning Rate: 0.0003227798 +2025-02-26 23:55:53,009 Train Loss: 0.0039579, Val Loss: 0.0039108 +2025-02-26 23:55:53,009 Epoch 224/2000 +2025-02-26 23:56:08,024 Current Learning Rate: 0.0003511176 +2025-02-26 23:56:08,025 Train Loss: 0.0044362, Val Loss: 0.0039166 +2025-02-26 23:56:08,025 Epoch 225/2000 +2025-02-26 23:56:23,325 Current Learning Rate: 0.0003806023 +2025-02-26 23:56:23,668 Train Loss: 0.0039619, Val Loss: 0.0039064 +2025-02-26 23:56:23,668 Epoch 226/2000 +2025-02-26 23:56:38,868 Current Learning Rate: 0.0004112269 +2025-02-26 23:56:38,868 Train Loss: 0.0039975, Val Loss: 0.0039483 +2025-02-26 23:56:38,868 Epoch 227/2000 +2025-02-26 23:56:54,423 Current Learning Rate: 0.0004429836 +2025-02-26 23:56:54,626 Train Loss: 0.0040184, Val Loss: 0.0039008 +2025-02-26 23:56:54,626 Epoch 228/2000 +2025-02-26 23:57:10,753 Current Learning Rate: 0.0004758647 +2025-02-26 23:57:10,998 Train Loss: 0.0040626, Val Loss: 0.0038950 +2025-02-26 23:57:10,998 Epoch 229/2000 +2025-02-26 23:57:26,516 Current Learning Rate: 0.0005098621 +2025-02-26 23:57:26,516 Train Loss: 0.0040096, Val Loss: 0.0039316 +2025-02-26 23:57:26,517 Epoch 230/2000 +2025-02-26 23:57:42,715 Current Learning Rate: 0.0005449674 +2025-02-26 23:57:42,965 Train Loss: 0.0040534, Val Loss: 0.0038937 +2025-02-26 23:57:42,965 Epoch 231/2000 +2025-02-26 23:57:58,258 Current Learning Rate: 0.0005811718 +2025-02-26 23:57:58,258 Train Loss: 0.0040601, Val Loss: 0.0038967 +2025-02-26 23:57:58,258 Epoch 232/2000 +2025-02-26 23:58:14,246 Current Learning Rate: 0.0006184666 +2025-02-26 23:58:14,247 Train Loss: 0.0040397, Val Loss: 0.0038938 +2025-02-26 23:58:14,247 Epoch 233/2000 +2025-02-26 23:58:31,387 Current Learning Rate: 0.0006568424 +2025-02-26 23:58:31,388 Train Loss: 0.0041919, Val Loss: 0.0039044 +2025-02-26 23:58:31,388 Epoch 234/2000 +2025-02-26 23:58:48,175 Current Learning Rate: 0.0006962899 +2025-02-26 23:58:48,176 Train Loss: 0.0039508, Val Loss: 0.0041042 +2025-02-26 23:58:48,176 Epoch 235/2000 +2025-02-26 23:59:04,830 Current Learning Rate: 0.0007367992 +2025-02-26 23:59:05,068 Train Loss: 0.0045991, Val Loss: 0.0038917 +2025-02-26 23:59:05,068 Epoch 236/2000 +2025-02-26 23:59:21,336 Current Learning Rate: 0.0007783604 +2025-02-26 23:59:21,558 Train Loss: 0.0039289, Val Loss: 0.0038716 +2025-02-26 23:59:21,559 Epoch 237/2000 +2025-02-26 23:59:37,680 Current Learning Rate: 0.0008209632 +2025-02-26 23:59:37,681 Train Loss: 0.0039685, Val Loss: 0.0038821 +2025-02-26 23:59:37,681 Epoch 238/2000 +2025-02-26 23:59:54,312 Current Learning Rate: 0.0008645971 +2025-02-26 23:59:54,313 Train Loss: 0.0039456, Val Loss: 0.0038923 +2025-02-26 23:59:54,313 Epoch 239/2000 +2025-02-27 00:00:11,231 Current Learning Rate: 0.0009092514 +2025-02-27 00:00:11,232 Train Loss: 0.0040346, Val Loss: 0.0039010 +2025-02-27 00:00:11,232 Epoch 240/2000 +2025-02-27 00:00:28,705 Current Learning Rate: 0.0009549150 +2025-02-27 00:00:28,973 Train Loss: 0.0040069, Val Loss: 0.0038616 +2025-02-27 00:00:28,973 Epoch 241/2000 +2025-02-27 00:00:45,819 Current Learning Rate: 0.0010015767 +2025-02-27 00:00:45,819 Train Loss: 0.0040074, Val Loss: 0.0039186 +2025-02-27 00:00:45,819 Epoch 242/2000 +2025-02-27 00:01:02,595 Current Learning Rate: 0.0010492249 +2025-02-27 00:01:02,852 Train Loss: 0.0039158, Val Loss: 0.0038531 +2025-02-27 00:01:02,852 Epoch 243/2000 +2025-02-27 00:01:19,518 Current Learning Rate: 0.0010978480 +2025-02-27 00:01:19,519 Train Loss: 0.0039864, Val Loss: 0.0039181 +2025-02-27 00:01:19,519 Epoch 244/2000 +2025-02-27 00:01:35,721 Current Learning Rate: 0.0011474338 +2025-02-27 00:01:35,940 Train Loss: 0.0039439, Val Loss: 0.0038478 +2025-02-27 00:01:35,941 Epoch 245/2000 +2025-02-27 00:01:52,244 Current Learning Rate: 0.0011979702 +2025-02-27 00:01:52,550 Train Loss: 0.0039204, Val Loss: 0.0038430 +2025-02-27 00:01:52,550 Epoch 246/2000 +2025-02-27 00:02:09,030 Current Learning Rate: 0.0012494447 +2025-02-27 00:02:09,031 Train Loss: 7.4882148, Val Loss: 0.2311147 +2025-02-27 00:02:09,031 Epoch 247/2000 +2025-02-27 00:02:26,268 Current Learning Rate: 0.0013018445 +2025-02-27 00:02:26,269 Train Loss: 0.0760398, Val Loss: 0.0379350 +2025-02-27 00:02:26,269 Epoch 248/2000 +2025-02-27 00:02:43,838 Current Learning Rate: 0.0013551569 +2025-02-27 00:02:43,838 Train Loss: 0.0322420, Val Loss: 0.0264095 +2025-02-27 00:02:43,838 Epoch 249/2000 +2025-02-27 00:03:01,538 Current Learning Rate: 0.0014093685 +2025-02-27 00:03:01,538 Train Loss: 0.0240367, Val Loss: 0.0208316 +2025-02-27 00:03:01,538 Epoch 250/2000 +2025-02-27 00:03:19,763 Current Learning Rate: 0.0014644661 +2025-02-27 00:03:19,763 Train Loss: 0.0195346, Val Loss: 0.0173975 +2025-02-27 00:03:19,763 Epoch 251/2000 +2025-02-27 00:03:36,364 Current Learning Rate: 0.0015204360 +2025-02-27 00:03:36,364 Train Loss: 0.0166332, Val Loss: 0.0150732 +2025-02-27 00:03:36,364 Epoch 252/2000 +2025-02-27 00:03:54,423 Current Learning Rate: 0.0015772645 +2025-02-27 00:03:54,424 Train Loss: 0.0145907, Val Loss: 0.0133829 +2025-02-27 00:03:54,424 Epoch 253/2000 +2025-02-27 00:04:11,043 Current Learning Rate: 0.0016349374 +2025-02-27 00:04:11,044 Train Loss: 0.0130615, Val Loss: 0.0120901 +2025-02-27 00:04:11,044 Epoch 254/2000 +2025-02-27 00:04:29,008 Current Learning Rate: 0.0016934407 +2025-02-27 00:04:29,009 Train Loss: 0.0118759, Val Loss: 0.0110670 +2025-02-27 00:04:29,009 Epoch 255/2000 +2025-02-27 00:04:45,888 Current Learning Rate: 0.0017527598 +2025-02-27 00:04:45,889 Train Loss: 0.0109225, Val Loss: 0.0102329 +2025-02-27 00:04:45,889 Epoch 256/2000 +2025-02-27 00:05:02,855 Current Learning Rate: 0.0018128801 +2025-02-27 00:05:02,855 Train Loss: 0.0101384, Val Loss: 0.0095348 +2025-02-27 00:05:02,856 Epoch 257/2000 +2025-02-27 00:05:19,446 Current Learning Rate: 0.0018737867 +2025-02-27 00:05:19,446 Train Loss: 0.0094843, Val Loss: 0.0089509 +2025-02-27 00:05:19,447 Epoch 258/2000 +2025-02-27 00:05:35,840 Current Learning Rate: 0.0019354647 +2025-02-27 00:05:35,840 Train Loss: 0.0089262, Val Loss: 0.0084543 +2025-02-27 00:05:35,840 Epoch 259/2000 +2025-02-27 00:05:53,819 Current Learning Rate: 0.0019978989 +2025-02-27 00:05:53,819 Train Loss: 0.0084483, Val Loss: 0.0080235 +2025-02-27 00:05:53,819 Epoch 260/2000 +2025-02-27 00:06:11,304 Current Learning Rate: 0.0020610737 +2025-02-27 00:06:11,304 Train Loss: 0.0080348, Val Loss: 0.0076507 +2025-02-27 00:06:11,305 Epoch 261/2000 +2025-02-27 00:06:27,660 Current Learning Rate: 0.0021249737 +2025-02-27 00:06:27,660 Train Loss: 0.0076693, Val Loss: 0.0073185 +2025-02-27 00:06:27,660 Epoch 262/2000 +2025-02-27 00:06:43,463 Current Learning Rate: 0.0021895831 +2025-02-27 00:06:43,463 Train Loss: 0.0073539, Val Loss: 0.0070368 +2025-02-27 00:06:43,464 Epoch 263/2000 +2025-02-27 00:06:58,821 Current Learning Rate: 0.0022548859 +2025-02-27 00:06:58,822 Train Loss: 0.0070730, Val Loss: 0.0067815 +2025-02-27 00:06:58,822 Epoch 264/2000 +2025-02-27 00:07:15,042 Current Learning Rate: 0.0023208660 +2025-02-27 00:07:15,042 Train Loss: 0.0068241, Val Loss: 0.0065553 +2025-02-27 00:07:15,043 Epoch 265/2000 +2025-02-27 00:07:30,892 Current Learning Rate: 0.0023875072 +2025-02-27 00:07:30,892 Train Loss: 0.0066024, Val Loss: 0.0063535 +2025-02-27 00:07:30,892 Epoch 266/2000 +2025-02-27 00:07:46,653 Current Learning Rate: 0.0024547929 +2025-02-27 00:07:46,654 Train Loss: 0.0064048, Val Loss: 0.0061710 +2025-02-27 00:07:46,654 Epoch 267/2000 +2025-02-27 00:08:02,211 Current Learning Rate: 0.0025227067 +2025-02-27 00:08:02,212 Train Loss: 0.0062252, Val Loss: 0.0060050 +2025-02-27 00:08:02,212 Epoch 268/2000 +2025-02-27 00:08:18,623 Current Learning Rate: 0.0025912316 +2025-02-27 00:08:18,623 Train Loss: 0.0060628, Val Loss: 0.0058580 +2025-02-27 00:08:18,623 Epoch 269/2000 +2025-02-27 00:08:34,711 Current Learning Rate: 0.0026603509 +2025-02-27 00:08:34,712 Train Loss: 0.0059162, Val Loss: 0.0057245 +2025-02-27 00:08:34,713 Epoch 270/2000 +2025-02-27 00:08:51,077 Current Learning Rate: 0.0027300475 +2025-02-27 00:08:51,078 Train Loss: 0.0057823, Val Loss: 0.0055998 +2025-02-27 00:08:51,078 Epoch 271/2000 +2025-02-27 00:09:06,791 Current Learning Rate: 0.0028003042 +2025-02-27 00:09:06,792 Train Loss: 0.0056594, Val Loss: 0.0054896 +2025-02-27 00:09:06,792 Epoch 272/2000 +2025-02-27 00:09:22,566 Current Learning Rate: 0.0028711035 +2025-02-27 00:09:22,566 Train Loss: 0.0055468, Val Loss: 0.0053848 +2025-02-27 00:09:22,567 Epoch 273/2000 +2025-02-27 00:09:37,675 Current Learning Rate: 0.0029424282 +2025-02-27 00:09:37,676 Train Loss: 0.0054435, Val Loss: 0.0052875 +2025-02-27 00:09:37,676 Epoch 274/2000 +2025-02-27 00:09:54,146 Current Learning Rate: 0.0030142605 +2025-02-27 00:09:54,147 Train Loss: 0.0053486, Val Loss: 0.0051999 +2025-02-27 00:09:54,148 Epoch 275/2000 +2025-02-27 00:10:09,682 Current Learning Rate: 0.0030865828 +2025-02-27 00:10:09,683 Train Loss: 0.0052612, Val Loss: 0.0051209 +2025-02-27 00:10:09,683 Epoch 276/2000 +2025-02-27 00:10:25,962 Current Learning Rate: 0.0031593772 +2025-02-27 00:10:25,962 Train Loss: 0.0051798, Val Loss: 0.0050435 +2025-02-27 00:10:25,963 Epoch 277/2000 +2025-02-27 00:10:42,480 Current Learning Rate: 0.0032326258 +2025-02-27 00:10:42,480 Train Loss: 0.0051036, Val Loss: 0.0049720 +2025-02-27 00:10:42,480 Epoch 278/2000 +2025-02-27 00:10:58,245 Current Learning Rate: 0.0033063104 +2025-02-27 00:10:58,246 Train Loss: 0.0050330, Val Loss: 0.0049101 +2025-02-27 00:10:58,246 Epoch 279/2000 +2025-02-27 00:11:14,383 Current Learning Rate: 0.0033804129 +2025-02-27 00:11:14,384 Train Loss: 0.0050017, Val Loss: 0.0053700 +2025-02-27 00:11:14,384 Epoch 280/2000 +2025-02-27 00:11:30,102 Current Learning Rate: 0.0034549150 +2025-02-27 00:11:30,102 Train Loss: 0.0249143, Val Loss: 0.0054002 +2025-02-27 00:11:30,102 Epoch 281/2000 +2025-02-27 00:11:46,237 Current Learning Rate: 0.0035297984 +2025-02-27 00:11:46,238 Train Loss: 0.0049927, Val Loss: 0.0047644 +2025-02-27 00:11:46,238 Epoch 282/2000 +2025-02-27 00:12:01,294 Current Learning Rate: 0.0036050445 +2025-02-27 00:12:01,294 Train Loss: 0.0047924, Val Loss: 0.0046679 +2025-02-27 00:12:01,295 Epoch 283/2000 +2025-02-27 00:12:16,499 Current Learning Rate: 0.0036806348 +2025-02-27 00:12:16,499 Train Loss: 0.0047135, Val Loss: 0.0046068 +2025-02-27 00:12:16,499 Epoch 284/2000 +2025-02-27 00:12:32,196 Current Learning Rate: 0.0037565506 +2025-02-27 00:12:32,197 Train Loss: 0.0046564, Val Loss: 0.0045572 +2025-02-27 00:12:32,197 Epoch 285/2000 +2025-02-27 00:12:47,300 Current Learning Rate: 0.0038327732 +2025-02-27 00:12:47,301 Train Loss: 0.0046093, Val Loss: 0.0045163 +2025-02-27 00:12:47,301 Epoch 286/2000 +2025-02-27 00:13:02,404 Current Learning Rate: 0.0039092838 +2025-02-27 00:13:02,404 Train Loss: 0.0045694, Val Loss: 0.0044800 +2025-02-27 00:13:02,404 Epoch 287/2000 +2025-02-27 00:13:18,532 Current Learning Rate: 0.0039860635 +2025-02-27 00:13:18,532 Train Loss: 0.0045326, Val Loss: 0.0044478 +2025-02-27 00:13:18,532 Epoch 288/2000 +2025-02-27 00:13:34,261 Current Learning Rate: 0.0040630934 +2025-02-27 00:13:34,262 Train Loss: 0.0044997, Val Loss: 0.0044170 +2025-02-27 00:13:34,262 Epoch 289/2000 +2025-02-27 00:13:49,857 Current Learning Rate: 0.0041403545 +2025-02-27 00:13:49,857 Train Loss: 0.0044696, Val Loss: 0.0043880 +2025-02-27 00:13:49,858 Epoch 290/2000 +2025-02-27 00:14:04,788 Current Learning Rate: 0.0042178277 +2025-02-27 00:14:04,788 Train Loss: 0.0044403, Val Loss: 0.0043616 +2025-02-27 00:14:04,788 Epoch 291/2000 +2025-02-27 00:14:19,752 Current Learning Rate: 0.0042954938 +2025-02-27 00:14:19,753 Train Loss: 0.0044140, Val Loss: 0.0043361 +2025-02-27 00:14:19,753 Epoch 292/2000 +2025-02-27 00:14:35,053 Current Learning Rate: 0.0043733338 +2025-02-27 00:14:35,054 Train Loss: 0.0043904, Val Loss: 0.0043308 +2025-02-27 00:14:35,054 Epoch 293/2000 +2025-02-27 00:14:50,789 Current Learning Rate: 0.0044513284 +2025-02-27 00:14:50,789 Train Loss: 0.0065932, Val Loss: 0.0043609 +2025-02-27 00:14:50,789 Epoch 294/2000 +2025-02-27 00:15:06,085 Current Learning Rate: 0.0045294584 +2025-02-27 00:15:06,085 Train Loss: 0.0043662, Val Loss: 0.0042753 +2025-02-27 00:15:06,086 Epoch 295/2000 +2025-02-27 00:15:21,269 Current Learning Rate: 0.0046077045 +2025-02-27 00:15:21,270 Train Loss: 0.0043234, Val Loss: 0.0042496 +2025-02-27 00:15:21,270 Epoch 296/2000 +2025-02-27 00:15:36,372 Current Learning Rate: 0.0046860474 +2025-02-27 00:15:36,372 Train Loss: 0.0043014, Val Loss: 0.0042382 +2025-02-27 00:15:36,372 Epoch 297/2000 +2025-02-27 00:15:52,281 Current Learning Rate: 0.0047644677 +2025-02-27 00:15:52,282 Train Loss: 0.0054425, Val Loss: 0.0244873 +2025-02-27 00:15:52,282 Epoch 298/2000 +2025-02-27 00:16:07,365 Current Learning Rate: 0.0048429462 +2025-02-27 00:16:07,365 Train Loss: 0.0054852, Val Loss: 0.0042289 +2025-02-27 00:16:07,366 Epoch 299/2000 +2025-02-27 00:16:22,600 Current Learning Rate: 0.0049214634 +2025-02-27 00:16:22,600 Train Loss: 0.0042616, Val Loss: 0.0041858 +2025-02-27 00:16:22,600 Epoch 300/2000 +2025-02-27 00:16:38,477 Current Learning Rate: 0.0050000000 +2025-02-27 00:16:38,477 Train Loss: 0.0042309, Val Loss: 0.0041634 +2025-02-27 00:16:38,477 Epoch 301/2000 +2025-02-27 00:16:53,482 Current Learning Rate: 0.0050785366 +2025-02-27 00:16:53,482 Train Loss: 0.0042103, Val Loss: 0.0041448 +2025-02-27 00:16:53,482 Epoch 302/2000 +2025-02-27 00:17:09,209 Current Learning Rate: 0.0051570538 +2025-02-27 00:17:09,209 Train Loss: 0.0041957, Val Loss: 0.0041610 +2025-02-27 00:17:09,209 Epoch 303/2000 +2025-02-27 00:17:24,358 Current Learning Rate: 0.0052355323 +2025-02-27 00:17:24,358 Train Loss: 0.0056791, Val Loss: 0.0041603 +2025-02-27 00:17:24,359 Epoch 304/2000 +2025-02-27 00:17:38,954 Current Learning Rate: 0.0053139526 +2025-02-27 00:17:38,955 Train Loss: 0.0041859, Val Loss: 0.0041093 +2025-02-27 00:17:38,955 Epoch 305/2000 +2025-02-27 00:17:53,801 Current Learning Rate: 0.0053922955 +2025-02-27 00:17:53,801 Train Loss: 0.0041557, Val Loss: 0.0040908 +2025-02-27 00:17:53,801 Epoch 306/2000 +2025-02-27 00:18:09,646 Current Learning Rate: 0.0054705416 +2025-02-27 00:18:09,647 Train Loss: 0.0041396, Val Loss: 0.0040775 +2025-02-27 00:18:09,647 Epoch 307/2000 +2025-02-27 00:18:24,506 Current Learning Rate: 0.0055486716 +2025-02-27 00:18:24,507 Train Loss: 0.0041374, Val Loss: 0.0041645 +2025-02-27 00:18:24,507 Epoch 308/2000 +2025-02-27 00:18:40,059 Current Learning Rate: 0.0056266662 +2025-02-27 00:18:40,059 Train Loss: 0.0055520, Val Loss: 0.0040820 +2025-02-27 00:18:40,060 Epoch 309/2000 +2025-02-27 00:18:55,421 Current Learning Rate: 0.0057045062 +2025-02-27 00:18:55,421 Train Loss: 0.0041176, Val Loss: 0.0040563 +2025-02-27 00:18:55,421 Epoch 310/2000 +2025-02-27 00:19:10,631 Current Learning Rate: 0.0057821723 +2025-02-27 00:19:10,631 Train Loss: 0.0041036, Val Loss: 0.0040432 +2025-02-27 00:19:10,631 Epoch 311/2000 +2025-02-27 00:19:25,906 Current Learning Rate: 0.0058596455 +2025-02-27 00:19:25,907 Train Loss: 0.0040885, Val Loss: 0.0040301 +2025-02-27 00:19:25,907 Epoch 312/2000 +2025-02-27 00:19:40,928 Current Learning Rate: 0.0059369066 +2025-02-27 00:19:40,929 Train Loss: 0.0047728, Val Loss: 0.0044470 +2025-02-27 00:19:40,929 Epoch 313/2000 +2025-02-27 00:19:55,824 Current Learning Rate: 0.0060139365 +2025-02-27 00:19:55,824 Train Loss: 0.0041916, Val Loss: 0.0040167 +2025-02-27 00:19:55,824 Epoch 314/2000 +2025-02-27 00:20:10,658 Current Learning Rate: 0.0060907162 +2025-02-27 00:20:10,658 Train Loss: 0.0040616, Val Loss: 0.0040019 +2025-02-27 00:20:10,659 Epoch 315/2000 +2025-02-27 00:20:26,046 Current Learning Rate: 0.0061672268 +2025-02-27 00:20:26,046 Train Loss: 0.0040490, Val Loss: 0.0039913 +2025-02-27 00:20:26,047 Epoch 316/2000 +2025-02-27 00:20:41,147 Current Learning Rate: 0.0062434494 +2025-02-27 00:20:41,147 Train Loss: 0.0040426, Val Loss: 0.0039842 +2025-02-27 00:20:41,147 Epoch 317/2000 +2025-02-27 00:20:57,083 Current Learning Rate: 0.0063193652 +2025-02-27 00:20:57,084 Train Loss: 0.0046963, Val Loss: 0.0040013 +2025-02-27 00:20:57,084 Epoch 318/2000 +2025-02-27 00:21:12,842 Current Learning Rate: 0.0063949555 +2025-02-27 00:21:12,843 Train Loss: 0.0040326, Val Loss: 0.0039695 +2025-02-27 00:21:12,843 Epoch 319/2000 +2025-02-27 00:21:28,142 Current Learning Rate: 0.0064702016 +2025-02-27 00:21:28,143 Train Loss: 0.0040169, Val Loss: 0.0039603 +2025-02-27 00:21:28,143 Epoch 320/2000 +2025-02-27 00:21:43,760 Current Learning Rate: 0.0065450850 +2025-02-27 00:21:43,760 Train Loss: 0.0040093, Val Loss: 0.0039569 +2025-02-27 00:21:43,760 Epoch 321/2000 +2025-02-27 00:21:59,908 Current Learning Rate: 0.0066195871 +2025-02-27 00:21:59,909 Train Loss: 0.0041130, Val Loss: 0.0039506 +2025-02-27 00:21:59,909 Epoch 322/2000 +2025-02-27 00:22:16,070 Current Learning Rate: 0.0066936896 +2025-02-27 00:22:16,070 Train Loss: 0.0040011, Val Loss: 0.0039480 +2025-02-27 00:22:16,071 Epoch 323/2000 +2025-02-27 00:22:31,818 Current Learning Rate: 0.0067673742 +2025-02-27 00:22:31,819 Train Loss: 0.0046609, Val Loss: 0.0039492 +2025-02-27 00:22:31,819 Epoch 324/2000 +2025-02-27 00:22:47,401 Current Learning Rate: 0.0068406228 +2025-02-27 00:22:47,401 Train Loss: 0.0039924, Val Loss: 0.0039286 +2025-02-27 00:22:47,401 Epoch 325/2000 +2025-02-27 00:23:03,205 Current Learning Rate: 0.0069134172 +2025-02-27 00:23:03,206 Train Loss: 0.0039779, Val Loss: 0.0039293 +2025-02-27 00:23:03,206 Epoch 326/2000 +2025-02-27 00:23:19,319 Current Learning Rate: 0.0069857395 +2025-02-27 00:23:19,319 Train Loss: 0.0040672, Val Loss: 0.0039339 +2025-02-27 00:23:19,320 Epoch 327/2000 +2025-02-27 00:23:34,726 Current Learning Rate: 0.0070575718 +2025-02-27 00:23:34,726 Train Loss: 0.0039685, Val Loss: 0.0039193 +2025-02-27 00:23:34,727 Epoch 328/2000 +2025-02-27 00:23:50,515 Current Learning Rate: 0.0071288965 +2025-02-27 00:23:50,515 Train Loss: 0.0041364, Val Loss: 0.0039138 +2025-02-27 00:23:50,515 Epoch 329/2000 +2025-02-27 00:24:06,399 Current Learning Rate: 0.0071996958 +2025-02-27 00:24:06,399 Train Loss: 0.0039545, Val Loss: 0.0039061 +2025-02-27 00:24:06,400 Epoch 330/2000 +2025-02-27 00:24:21,720 Current Learning Rate: 0.0072699525 +2025-02-27 00:24:21,721 Train Loss: 0.0040319, Val Loss: 0.0039481 +2025-02-27 00:24:21,721 Epoch 331/2000 +2025-02-27 00:24:37,338 Current Learning Rate: 0.0073396491 +2025-02-27 00:24:37,339 Train Loss: 0.0039674, Val Loss: 0.0041066 +2025-02-27 00:24:37,339 Epoch 332/2000 +2025-02-27 00:24:52,530 Current Learning Rate: 0.0074087684 +2025-02-27 00:24:52,531 Train Loss: 0.0040453, Val Loss: 0.0039216 +2025-02-27 00:24:52,531 Epoch 333/2000 +2025-02-27 00:25:08,595 Current Learning Rate: 0.0074772933 +2025-02-27 00:25:08,596 Train Loss: 0.0042682, Val Loss: 0.0039440 +2025-02-27 00:25:08,596 Epoch 334/2000 +2025-02-27 00:25:24,124 Current Learning Rate: 0.0075452071 +2025-02-27 00:25:24,124 Train Loss: 0.0039464, Val Loss: 0.0038768 +2025-02-27 00:25:24,125 Epoch 335/2000 +2025-02-27 00:25:40,553 Current Learning Rate: 0.0076124928 +2025-02-27 00:25:40,553 Train Loss: 0.0040198, Val Loss: 0.0038725 +2025-02-27 00:25:40,553 Epoch 336/2000 +2025-02-27 00:25:55,782 Current Learning Rate: 0.0076791340 +2025-02-27 00:25:55,783 Train Loss: 0.0039149, Val Loss: 0.0038639 +2025-02-27 00:25:55,783 Epoch 337/2000 +2025-02-27 00:26:11,855 Current Learning Rate: 0.0077451141 +2025-02-27 00:26:11,855 Train Loss: 0.0048437, Val Loss: 0.0044514 +2025-02-27 00:26:11,856 Epoch 338/2000 +2025-02-27 00:26:27,202 Current Learning Rate: 0.0078104169 +2025-02-27 00:26:27,202 Train Loss: 518981441.9665785, Val Loss: 3681394.9391727 +2025-02-27 00:26:27,202 Epoch 339/2000 +2025-02-27 00:26:42,877 Current Learning Rate: 0.0078750263 +2025-02-27 00:26:42,878 Train Loss: 716405.9042180, Val Loss: 289223.5523114 +2025-02-27 00:26:42,878 Epoch 340/2000 +2025-02-27 00:26:59,323 Current Learning Rate: 0.0079389263 +2025-02-27 00:26:59,323 Train Loss: 236672.5192866, Val Loss: 171750.1885645 +2025-02-27 00:26:59,324 Epoch 341/2000 +2025-02-27 00:27:16,010 Current Learning Rate: 0.0080021011 +2025-02-27 00:27:16,011 Train Loss: 156013.4851217, Val Loss: 123025.2980535 +2025-02-27 00:27:16,012 Epoch 342/2000 +2025-02-27 00:27:32,263 Current Learning Rate: 0.0080645353 +2025-02-27 00:27:32,263 Train Loss: 119394.0066126, Val Loss: 96691.0523114 +2025-02-27 00:27:32,264 Epoch 343/2000 +2025-02-27 00:27:47,908 Current Learning Rate: 0.0081262133 +2025-02-27 00:27:47,908 Train Loss: 96692.0959824, Val Loss: 79766.8978102 +2025-02-27 00:27:47,908 Epoch 344/2000 +2025-02-27 00:28:03,459 Current Learning Rate: 0.0081871199 +2025-02-27 00:28:03,460 Train Loss: 81023.2164863, Val Loss: 67741.7944039 +2025-02-27 00:28:03,460 Epoch 345/2000 +2025-02-27 00:28:19,534 Current Learning Rate: 0.0082472402 +2025-02-27 00:28:19,534 Train Loss: 70544.4182071, Val Loss: 59259.4685219 +2025-02-27 00:28:19,534 Epoch 346/2000 +2025-02-27 00:28:35,942 Current Learning Rate: 0.0083065593 +2025-02-27 00:28:35,943 Train Loss: 61953.8133078, Val Loss: 52428.9575730 +2025-02-27 00:28:35,943 Epoch 347/2000 +2025-02-27 00:28:53,028 Current Learning Rate: 0.0083650626 +2025-02-27 00:28:53,029 Train Loss: 55397.0346158, Val Loss: 47233.0368005 +2025-02-27 00:28:53,029 Epoch 348/2000 +2025-02-27 00:29:09,139 Current Learning Rate: 0.0084227355 +2025-02-27 00:29:09,140 Train Loss: 50347.2271065, Val Loss: 42598.9788625 +2025-02-27 00:29:09,140 Epoch 349/2000 +2025-02-27 00:29:25,595 Current Learning Rate: 0.0084795640 +2025-02-27 00:29:25,595 Train Loss: 45783.5727257, Val Loss: 39045.8333333 +2025-02-27 00:29:25,596 Epoch 350/2000 +2025-02-27 00:29:41,213 Current Learning Rate: 0.0085355339 +2025-02-27 00:29:41,214 Train Loss: 41963.3972673, Val Loss: 35861.0340633 +2025-02-27 00:29:41,214 Epoch 351/2000 +2025-02-27 00:29:56,410 Current Learning Rate: 0.0085906315 +2025-02-27 00:29:56,410 Train Loss: 38976.6247746, Val Loss: 32978.2903741 +2025-02-27 00:29:56,410 Epoch 352/2000 +2025-02-27 00:30:11,988 Current Learning Rate: 0.0086448431 +2025-02-27 00:30:11,989 Train Loss: 35885.9500551, Val Loss: 30639.5479775 +2025-02-27 00:30:11,989 Epoch 353/2000 +2025-02-27 00:30:27,641 Current Learning Rate: 0.0086981555 +2025-02-27 00:30:27,641 Train Loss: 33399.3390880, Val Loss: 28571.1621807 +2025-02-27 00:30:27,641 Epoch 354/2000 +2025-02-27 00:30:42,782 Current Learning Rate: 0.0087505553 +2025-02-27 00:30:42,782 Train Loss: 31166.1496218, Val Loss: 26572.5471411 +2025-02-27 00:30:42,782 Epoch 355/2000 +2025-02-27 00:30:58,700 Current Learning Rate: 0.0088020298 +2025-02-27 00:30:58,701 Train Loss: 29301.1388513, Val Loss: 24857.5083637 +2025-02-27 00:30:58,701 Epoch 356/2000 +2025-02-27 00:31:14,978 Current Learning Rate: 0.0088525662 +2025-02-27 00:31:14,978 Train Loss: 27414.6377805, Val Loss: 23376.9076946 +2025-02-27 00:31:14,978 Epoch 357/2000 +2025-02-27 00:31:30,687 Current Learning Rate: 0.0089021520 +2025-02-27 00:31:30,687 Train Loss: 25692.7345081, Val Loss: 22011.7179897 +2025-02-27 00:31:30,687 Epoch 358/2000 +2025-02-27 00:31:46,654 Current Learning Rate: 0.0089507751 +2025-02-27 00:31:46,654 Train Loss: 24274.6266344, Val Loss: 20569.2100061 +2025-02-27 00:31:46,654 Epoch 359/2000 +2025-02-27 00:32:02,840 Current Learning Rate: 0.0089984233 +2025-02-27 00:32:02,841 Train Loss: 22725.8402214, Val Loss: 19619.4810675 +2025-02-27 00:32:02,841 Epoch 360/2000 +2025-02-27 00:32:19,070 Current Learning Rate: 0.0090450850 +2025-02-27 00:32:19,070 Train Loss: 21516.8136960, Val Loss: 18235.0771746 +2025-02-27 00:32:19,071 Epoch 361/2000 +2025-02-27 00:32:34,886 Current Learning Rate: 0.0090907486 +2025-02-27 00:32:34,887 Train Loss: 20303.1449128, Val Loss: 17242.1996655 +2025-02-27 00:32:34,887 Epoch 362/2000 +2025-02-27 00:32:50,692 Current Learning Rate: 0.0091354029 +2025-02-27 00:32:50,692 Train Loss: 19275.2230488, Val Loss: 16235.5080976 +2025-02-27 00:32:50,692 Epoch 363/2000 +2025-02-27 00:33:06,762 Current Learning Rate: 0.0091790368 +2025-02-27 00:33:06,762 Train Loss: 18271.4811580, Val Loss: 15320.4729319 +2025-02-27 00:33:06,762 Epoch 364/2000 +2025-02-27 00:33:23,007 Current Learning Rate: 0.0092216396 +2025-02-27 00:33:23,008 Train Loss: 17311.2081392, Val Loss: 14452.3494526 +2025-02-27 00:33:23,008 Epoch 365/2000 +2025-02-27 00:33:38,701 Current Learning Rate: 0.0092632008 +2025-02-27 00:33:38,702 Train Loss: 16310.7037590, Val Loss: 13758.9526308 +2025-02-27 00:33:38,702 Epoch 366/2000 +2025-02-27 00:33:54,562 Current Learning Rate: 0.0093037101 +2025-02-27 00:33:54,562 Train Loss: 15587.1873278, Val Loss: 13014.7175335 +2025-02-27 00:33:54,563 Epoch 367/2000 +2025-02-27 00:34:10,471 Current Learning Rate: 0.0093431576 +2025-02-27 00:34:10,472 Train Loss: 14820.1204476, Val Loss: 12618.7019085 +2025-02-27 00:34:10,472 Epoch 368/2000 +2025-02-27 00:34:26,338 Current Learning Rate: 0.0093815334 +2025-02-27 00:34:26,338 Train Loss: 14037.9452992, Val Loss: 11735.0570255 +2025-02-27 00:34:26,338 Epoch 369/2000 +2025-02-27 00:34:42,390 Current Learning Rate: 0.0094188282 +2025-02-27 00:34:42,390 Train Loss: 13264.0966743, Val Loss: 11197.8119297 +2025-02-27 00:34:42,390 Epoch 370/2000 +2025-02-27 00:34:58,397 Current Learning Rate: 0.0094550326 +2025-02-27 00:34:58,398 Train Loss: 12719.3951320, Val Loss: 10551.6271290 +2025-02-27 00:34:58,398 Epoch 371/2000 +2025-02-27 00:35:14,087 Current Learning Rate: 0.0094901379 +2025-02-27 00:35:14,087 Train Loss: 12028.9959986, Val Loss: 9931.0692290 +2025-02-27 00:35:14,087 Epoch 372/2000 +2025-02-27 00:35:30,114 Current Learning Rate: 0.0095241353 +2025-02-27 00:35:30,115 Train Loss: 11347.3808048, Val Loss: 9329.8351962 +2025-02-27 00:35:30,115 Epoch 373/2000 +2025-02-27 00:35:46,164 Current Learning Rate: 0.0095570164 +2025-02-27 00:35:46,164 Train Loss: 10793.0709598, Val Loss: 8892.8104091 +2025-02-27 00:35:46,164 Epoch 374/2000 +2025-02-27 00:36:01,841 Current Learning Rate: 0.0095887731 +2025-02-27 00:36:01,841 Train Loss: 10181.9044622, Val Loss: 8352.3298738 +2025-02-27 00:36:01,842 Epoch 375/2000 +2025-02-27 00:36:18,041 Current Learning Rate: 0.0096193977 +2025-02-27 00:36:18,041 Train Loss: 9712.7192760, Val Loss: 7951.8634998 +2025-02-27 00:36:18,042 Epoch 376/2000 +2025-02-27 00:36:34,030 Current Learning Rate: 0.0096488824 +2025-02-27 00:36:34,030 Train Loss: 9393.4154268, Val Loss: 7965.9182064 +2025-02-27 00:36:34,031 Epoch 377/2000 +2025-02-27 00:36:50,009 Current Learning Rate: 0.0096772202 +2025-02-27 00:36:50,010 Train Loss: 8869.0217272, Val Loss: 7220.5738671 +2025-02-27 00:36:50,010 Epoch 378/2000 +2025-02-27 00:37:06,000 Current Learning Rate: 0.0097044038 +2025-02-27 00:37:06,001 Train Loss: 8418.1976458, Val Loss: 7096.3328771 +2025-02-27 00:37:06,001 Epoch 379/2000 +2025-02-27 00:37:22,059 Current Learning Rate: 0.0097304268 +2025-02-27 00:37:22,059 Train Loss: 8097.3236258, Val Loss: 6618.8394351 +2025-02-27 00:37:22,059 Epoch 380/2000 +2025-02-27 00:37:38,184 Current Learning Rate: 0.0097552826 +2025-02-27 00:37:38,184 Train Loss: 7774.1563815, Val Loss: 6312.0202251 +2025-02-27 00:37:38,185 Epoch 381/2000 +2025-02-27 00:37:53,915 Current Learning Rate: 0.0097789651 +2025-02-27 00:37:53,915 Train Loss: 7405.1218096, Val Loss: 6069.5370476 +2025-02-27 00:37:53,916 Epoch 382/2000 +2025-02-27 00:38:09,407 Current Learning Rate: 0.0098014684 +2025-02-27 00:38:09,408 Train Loss: 7106.7981509, Val Loss: 5811.5106068 +2025-02-27 00:38:09,408 Epoch 383/2000 +2025-02-27 00:38:24,911 Current Learning Rate: 0.0098227871 +2025-02-27 00:38:24,912 Train Loss: 6826.6029205, Val Loss: 5556.0950806 +2025-02-27 00:38:24,912 Epoch 384/2000 +2025-02-27 00:38:41,254 Current Learning Rate: 0.0098429158 +2025-02-27 00:38:41,254 Train Loss: 6685.1701514, Val Loss: 5792.2469016 +2025-02-27 00:38:41,254 Epoch 385/2000 +2025-02-27 00:38:57,243 Current Learning Rate: 0.0098618496 +2025-02-27 00:38:57,244 Train Loss: 6342.7596965, Val Loss: 5103.4099377 +2025-02-27 00:38:57,244 Epoch 386/2000 +2025-02-27 00:39:13,223 Current Learning Rate: 0.0098795838 +2025-02-27 00:39:13,223 Train Loss: 6145.3745632, Val Loss: 4977.3172331 +2025-02-27 00:39:13,223 Epoch 387/2000 +2025-02-27 00:39:29,341 Current Learning Rate: 0.0098961141 +2025-02-27 00:39:29,342 Train Loss: 5824.9408642, Val Loss: 4741.4804783 +2025-02-27 00:39:29,342 Epoch 388/2000 +2025-02-27 00:39:45,518 Current Learning Rate: 0.0099114363 +2025-02-27 00:39:45,519 Train Loss: 5661.2729968, Val Loss: 4553.3935903 +2025-02-27 00:39:45,519 Epoch 389/2000 +2025-02-27 00:40:01,638 Current Learning Rate: 0.0099255466 +2025-02-27 00:40:01,638 Train Loss: 5429.7119057, Val Loss: 4388.2103482 +2025-02-27 00:40:01,639 Epoch 390/2000 +2025-02-27 00:40:17,761 Current Learning Rate: 0.0099384417 +2025-02-27 00:40:17,761 Train Loss: 5271.8003817, Val Loss: 4277.3721677 +2025-02-27 00:40:17,761 Epoch 391/2000 +2025-02-27 00:40:34,094 Current Learning Rate: 0.0099501183 +2025-02-27 00:40:34,094 Train Loss: 5117.2419629, Val Loss: 4070.6962819 +2025-02-27 00:40:34,094 Epoch 392/2000 +2025-02-27 00:40:50,890 Current Learning Rate: 0.0099605735 +2025-02-27 00:40:50,891 Train Loss: 4917.8396610, Val Loss: 4114.8813393 +2025-02-27 00:40:50,891 Epoch 393/2000 +2025-02-27 00:41:07,496 Current Learning Rate: 0.0099698048 +2025-02-27 00:41:07,497 Train Loss: 4763.6347341, Val Loss: 3778.9769142 +2025-02-27 00:41:07,497 Epoch 394/2000 +2025-02-27 00:41:24,212 Current Learning Rate: 0.0099778098 +2025-02-27 00:41:24,213 Train Loss: 4515.3520830, Val Loss: 3655.3373061 +2025-02-27 00:41:24,213 Epoch 395/2000 +2025-02-27 00:41:40,213 Current Learning Rate: 0.0099845867 +2025-02-27 00:41:40,213 Train Loss: 4459.1024744, Val Loss: 3497.0059212 +2025-02-27 00:41:40,214 Epoch 396/2000 +2025-02-27 00:41:56,146 Current Learning Rate: 0.0099901336 +2025-02-27 00:41:56,147 Train Loss: 4245.6366902, Val Loss: 3425.6892488 +2025-02-27 00:41:56,147 Epoch 397/2000 +2025-02-27 00:42:12,617 Current Learning Rate: 0.0099944494 +2025-02-27 00:42:12,617 Train Loss: 4195.1679715, Val Loss: 3264.3666363 +2025-02-27 00:42:12,618 Epoch 398/2000 +2025-02-27 00:42:28,497 Current Learning Rate: 0.0099975328 +2025-02-27 00:42:28,497 Train Loss: 3994.5041712, Val Loss: 3415.5826110 +2025-02-27 00:42:28,497 Epoch 399/2000 +2025-02-27 00:42:44,463 Current Learning Rate: 0.0099993832 +2025-02-27 00:42:44,463 Train Loss: 3966.2864012, Val Loss: 3717.6063051 +2025-02-27 00:42:44,463 Epoch 400/2000 +2025-02-27 00:43:00,840 Current Learning Rate: 0.0100000000 +2025-02-27 00:43:00,840 Train Loss: 3887.7800829, Val Loss: 4728.1413473 +2025-02-27 00:43:00,841 Epoch 401/2000 +2025-02-27 00:43:16,196 Current Learning Rate: 0.0099993832 +2025-02-27 00:43:16,197 Train Loss: 3914.4062899, Val Loss: 3009.6188317 +2025-02-27 00:43:16,197 Epoch 402/2000 +2025-02-27 00:43:31,691 Current Learning Rate: 0.0099975328 +2025-02-27 00:43:31,691 Train Loss: 4527.2207233, Val Loss: 4180.1644712 +2025-02-27 00:43:31,691 Epoch 403/2000 +2025-02-27 00:43:47,369 Current Learning Rate: 0.0099944494 +2025-02-27 00:43:47,370 Train Loss: 9224.2054943, Val Loss: 4701.3609147 +2025-02-27 00:43:47,370 Epoch 404/2000 +2025-02-27 00:44:02,501 Current Learning Rate: 0.0099901336 +2025-02-27 00:44:02,502 Train Loss: 3807.8757373, Val Loss: 3561.7136652 +2025-02-27 00:44:02,502 Epoch 405/2000 +2025-02-27 00:44:17,452 Current Learning Rate: 0.0099845867 +2025-02-27 00:44:17,452 Train Loss: 6617.3206279, Val Loss: 2468.0314781 +2025-02-27 00:44:17,452 Epoch 406/2000 +2025-02-27 00:44:32,183 Current Learning Rate: 0.0099778098 +2025-02-27 00:44:32,184 Train Loss: 5807.8668924, Val Loss: 8076.9555961 +2025-02-27 00:44:32,184 Epoch 407/2000 +2025-02-27 00:44:46,589 Current Learning Rate: 0.0099698048 +2025-02-27 00:44:46,590 Train Loss: 7659.3958091, Val Loss: 17592.2361618 +2025-02-27 00:44:46,590 Epoch 408/2000 +2025-02-27 00:45:01,791 Current Learning Rate: 0.0099605735 +2025-02-27 00:45:01,791 Train Loss: 6860.1261381, Val Loss: 5435.9570788 +2025-02-27 00:45:01,792 Epoch 409/2000 +2025-02-27 00:45:17,584 Current Learning Rate: 0.0099501183 +2025-02-27 00:45:17,585 Train Loss: 3779.3818490, Val Loss: 3909.3401669 +2025-02-27 00:45:17,585 Epoch 410/2000 +2025-02-27 00:45:34,081 Current Learning Rate: 0.0099384417 +2025-02-27 00:45:34,081 Train Loss: 14582.6601833, Val Loss: 52006.9426703 +2025-02-27 00:45:34,081 Epoch 411/2000 +2025-02-27 00:45:49,738 Current Learning Rate: 0.0099255466 +2025-02-27 00:45:49,739 Train Loss: 8136.9336153, Val Loss: 2043.8631007 +2025-02-27 00:45:49,739 Epoch 412/2000 +2025-02-27 00:46:06,595 Current Learning Rate: 0.0099114363 +2025-02-27 00:46:06,596 Train Loss: 2572.4173821, Val Loss: 2005.8022306 +2025-02-27 00:46:06,597 Epoch 413/2000 +2025-02-27 00:46:22,621 Current Learning Rate: 0.0098961141 +2025-02-27 00:46:22,622 Train Loss: 2541.0862357, Val Loss: 2571.1478767 +2025-02-27 00:46:22,622 Epoch 414/2000 +2025-02-27 00:46:38,771 Current Learning Rate: 0.0098795838 +2025-02-27 00:46:38,772 Train Loss: 3548.9557894, Val Loss: 6678.1356448 +2025-02-27 00:46:38,773 Epoch 415/2000 +2025-02-27 00:46:54,871 Current Learning Rate: 0.0098618496 +2025-02-27 00:46:54,871 Train Loss: 6603.7539685, Val Loss: 4187.5095043 +2025-02-27 00:46:54,871 Epoch 416/2000 +2025-02-27 00:47:10,117 Current Learning Rate: 0.0098429158 +2025-02-27 00:47:10,117 Train Loss: 7168.0834836, Val Loss: 6603.6365192 +2025-02-27 00:47:10,118 Epoch 417/2000 +2025-02-27 00:47:26,175 Current Learning Rate: 0.0098227871 +2025-02-27 00:47:26,175 Train Loss: 2845.3647399, Val Loss: 1978.1862312 +2025-02-27 00:47:26,176 Epoch 418/2000 +2025-02-27 00:47:43,152 Current Learning Rate: 0.0098014684 +2025-02-27 00:47:43,152 Train Loss: 9555.5980190, Val Loss: 4868.2013192 +2025-02-27 00:47:43,153 Epoch 419/2000 +2025-02-27 00:47:59,062 Current Learning Rate: 0.0097789651 +2025-02-27 00:47:59,063 Train Loss: 2653.1766685, Val Loss: 2602.0740192 +2025-02-27 00:47:59,063 Epoch 420/2000 +2025-02-27 00:48:16,208 Current Learning Rate: 0.0097552826 +2025-02-27 00:48:16,208 Train Loss: 3377.9827512, Val Loss: 2323.9811911 +2025-02-27 00:48:16,208 Epoch 421/2000 +2025-02-27 00:48:32,870 Current Learning Rate: 0.0097304268 +2025-02-27 00:48:32,870 Train Loss: 7619.9260959, Val Loss: 1768.1906507 +2025-02-27 00:48:32,871 Epoch 422/2000 +2025-02-27 00:48:48,969 Current Learning Rate: 0.0097044038 +2025-02-27 00:48:48,969 Train Loss: 4657.2433170, Val Loss: 14824.0598388 +2025-02-27 00:48:48,969 Epoch 423/2000 +2025-02-27 00:49:04,982 Current Learning Rate: 0.0096772202 +2025-02-27 00:49:04,983 Train Loss: 4460.7341879, Val Loss: 14616.4438869 +2025-02-27 00:49:04,983 Epoch 424/2000 +2025-02-27 00:49:20,964 Current Learning Rate: 0.0096488824 +2025-02-27 00:49:20,964 Train Loss: 5445.8252614, Val Loss: 5480.3967077 +2025-02-27 00:49:20,965 Epoch 425/2000 +2025-02-27 00:49:37,141 Current Learning Rate: 0.0096193977 +2025-02-27 00:49:37,142 Train Loss: 4015.4110173, Val Loss: 11559.4253726 +2025-02-27 00:49:37,142 Epoch 426/2000 +2025-02-27 00:49:53,734 Current Learning Rate: 0.0095887731 +2025-02-27 00:49:53,735 Train Loss: 5575.7586093, Val Loss: 1333.5194980 +2025-02-27 00:49:53,735 Epoch 427/2000 +2025-02-27 00:50:10,384 Current Learning Rate: 0.0095570164 +2025-02-27 00:50:10,384 Train Loss: 3222.1877818, Val Loss: 1284.6189268 +2025-02-27 00:50:10,385 Epoch 428/2000 +2025-02-27 00:50:27,046 Current Learning Rate: 0.0095241353 +2025-02-27 00:50:27,046 Train Loss: 9326.5236430, Val Loss: 2340.6163036 +2025-02-27 00:50:27,047 Epoch 429/2000 +2025-02-27 00:50:43,026 Current Learning Rate: 0.0094901379 +2025-02-27 00:50:43,026 Train Loss: 1754.6691487, Val Loss: 1258.0244687 +2025-02-27 00:50:43,026 Epoch 430/2000 +2025-02-27 00:50:59,578 Current Learning Rate: 0.0094550326 +2025-02-27 00:50:59,579 Train Loss: 3838.8840300, Val Loss: 2157.4342305 +2025-02-27 00:50:59,579 Epoch 431/2000 +2025-02-27 00:51:15,826 Current Learning Rate: 0.0094188282 +2025-02-27 00:51:15,826 Train Loss: 3081.0131652, Val Loss: 1123.5347761 +2025-02-27 00:51:15,826 Epoch 432/2000 +2025-02-27 00:51:32,954 Current Learning Rate: 0.0093815334 +2025-02-27 00:51:32,955 Train Loss: 7222.3845893, Val Loss: 1815.4505731 +2025-02-27 00:51:32,955 Epoch 433/2000 +2025-02-27 00:51:49,177 Current Learning Rate: 0.0093431576 +2025-02-27 00:51:49,178 Train Loss: 1623.7078300, Val Loss: 1552.3447717 +2025-02-27 00:51:49,178 Epoch 434/2000 +2025-02-27 00:52:06,334 Current Learning Rate: 0.0093037101 +2025-02-27 00:52:06,335 Train Loss: 13978.2548404, Val Loss: 2657.6548719 +2025-02-27 00:52:06,335 Epoch 435/2000 +2025-02-27 00:52:24,331 Current Learning Rate: 0.0092632008 +2025-02-27 00:52:24,331 Train Loss: 2420.3854355, Val Loss: 989.9985506 +2025-02-27 00:52:24,331 Epoch 436/2000 +2025-02-27 00:52:41,826 Current Learning Rate: 0.0092216396 +2025-02-27 00:52:41,827 Train Loss: 1245.2197241, Val Loss: 1051.5906564 +2025-02-27 00:52:41,827 Epoch 437/2000 +2025-02-27 00:52:58,326 Current Learning Rate: 0.0091790368 +2025-02-27 00:52:58,326 Train Loss: 1185.4333801, Val Loss: 926.6319761 +2025-02-27 00:52:58,326 Epoch 438/2000 +2025-02-27 00:53:14,619 Current Learning Rate: 0.0091354029 +2025-02-27 00:53:14,619 Train Loss: 1783.1070098, Val Loss: 1446.5680125 +2025-02-27 00:53:14,620 Epoch 439/2000 +2025-02-27 00:53:31,543 Current Learning Rate: 0.0090907486 +2025-02-27 00:53:31,543 Train Loss: 6625.3760123, Val Loss: 2546.6709436 +2025-02-27 00:53:31,544 Epoch 440/2000 +2025-02-27 00:53:47,674 Current Learning Rate: 0.0090450850 +2025-02-27 00:53:47,675 Train Loss: 1862.0777925, Val Loss: 856.8066287 +2025-02-27 00:53:47,675 Epoch 441/2000 +2025-02-27 00:54:05,606 Current Learning Rate: 0.0089984233 +2025-02-27 00:54:05,607 Train Loss: 1148.8547586, Val Loss: 879.5675681 +2025-02-27 00:54:05,607 Epoch 442/2000 +2025-02-27 00:54:23,309 Current Learning Rate: 0.0089507751 +2025-02-27 00:54:23,332 Train Loss: 4612.3412521, Val Loss: 778.8838794 +2025-02-27 00:54:23,337 Epoch 443/2000 +2025-02-27 00:54:38,696 Current Learning Rate: 0.0089021520 +2025-02-27 00:54:38,696 Train Loss: 1500.8413916, Val Loss: 771.0358097 +2025-02-27 00:54:38,696 Epoch 444/2000 +2025-02-27 00:54:54,771 Current Learning Rate: 0.0088525662 +2025-02-27 00:54:54,772 Train Loss: 10450.7119044, Val Loss: 6294.4086451 +2025-02-27 00:54:54,772 Epoch 445/2000 +2025-02-27 00:55:11,210 Current Learning Rate: 0.0088020298 +2025-02-27 00:55:11,211 Train Loss: 1447.5571212, Val Loss: 794.5643011 +2025-02-27 00:55:11,211 Epoch 446/2000 +2025-02-27 00:55:28,089 Current Learning Rate: 0.0087505553 +2025-02-27 00:55:28,090 Train Loss: 938.2156204, Val Loss: 763.8069067 +2025-02-27 00:55:28,090 Epoch 447/2000 +2025-02-27 00:55:45,830 Current Learning Rate: 0.0086981555 +2025-02-27 00:55:45,832 Train Loss: 924.5552957, Val Loss: 674.1475750 +2025-02-27 00:55:45,832 Epoch 448/2000 +2025-02-27 00:56:04,250 Current Learning Rate: 0.0086448431 +2025-02-27 00:56:04,251 Train Loss: 1082.0874946, Val Loss: 680.2298248 +2025-02-27 00:56:04,252 Epoch 449/2000 +2025-02-27 00:56:24,026 Current Learning Rate: 0.0085906315 +2025-02-27 00:56:24,027 Train Loss: 6343.1659323, Val Loss: 777.1927511 +2025-02-27 00:56:24,027 Epoch 450/2000 +2025-02-27 00:56:44,501 Current Learning Rate: 0.0085355339 +2025-02-27 00:56:44,501 Train Loss: 828.2059566, Val Loss: 777.8039818 +2025-02-27 00:56:44,501 Epoch 451/2000 +2025-02-27 00:57:04,214 Current Learning Rate: 0.0084795640 +2025-02-27 00:57:04,215 Train Loss: 956.8984987, Val Loss: 2803.9027334 +2025-02-27 00:57:04,215 Epoch 452/2000 +2025-02-27 00:57:25,547 Current Learning Rate: 0.0084227355 +2025-02-27 00:57:25,547 Train Loss: 3838.0380682, Val Loss: 582.0410275 +2025-02-27 00:57:25,548 Epoch 453/2000 +2025-02-27 00:57:43,895 Current Learning Rate: 0.0083650626 +2025-02-27 00:57:43,895 Train Loss: 877.3504894, Val Loss: 1982.6445122 +2025-02-27 00:57:43,895 Epoch 454/2000 +2025-02-27 00:58:01,418 Current Learning Rate: 0.0083065593 +2025-02-27 00:58:01,419 Train Loss: 6011.3490084, Val Loss: 3008.3106657 +2025-02-27 00:58:01,419 Epoch 455/2000 +2025-02-27 00:58:20,064 Current Learning Rate: 0.0082472402 +2025-02-27 00:58:20,065 Train Loss: 975.9520139, Val Loss: 531.8113334 +2025-02-27 00:58:20,065 Epoch 456/2000 +2025-02-27 00:58:37,857 Current Learning Rate: 0.0081871199 +2025-02-27 00:58:37,857 Train Loss: 762.2287796, Val Loss: 644.6721981 +2025-02-27 00:58:37,860 Epoch 457/2000 +2025-02-27 00:58:55,893 Current Learning Rate: 0.0081262133 +2025-02-27 00:58:55,894 Train Loss: 952.6100401, Val Loss: 3598.0866978 +2025-02-27 00:58:55,894 Epoch 458/2000 +2025-02-27 00:59:13,499 Current Learning Rate: 0.0080645353 +2025-02-27 00:59:13,500 Train Loss: 3405.0572093, Val Loss: 489.8910396 +2025-02-27 00:59:13,500 Epoch 459/2000 +2025-02-27 00:59:29,595 Current Learning Rate: 0.0080021011 +2025-02-27 00:59:29,595 Train Loss: 1528.1701395, Val Loss: 6138.5245590 +2025-02-27 00:59:29,595 Epoch 460/2000 +2025-02-27 00:59:46,324 Current Learning Rate: 0.0079389263 +2025-02-27 00:59:46,325 Train Loss: 1517.0204541, Val Loss: 535.2359788 +2025-02-27 00:59:46,325 Epoch 461/2000 +2025-02-27 01:00:03,913 Current Learning Rate: 0.0078750263 +2025-02-27 01:00:03,913 Train Loss: 2645.9121511, Val Loss: 757.2247686 +2025-02-27 01:00:03,914 Epoch 462/2000 +2025-02-27 01:00:22,465 Current Learning Rate: 0.0078104169 +2025-02-27 01:00:22,466 Train Loss: 1725.6602917, Val Loss: 864.4101658 +2025-02-27 01:00:22,467 Epoch 463/2000 +2025-02-27 01:00:40,878 Current Learning Rate: 0.0077451141 +2025-02-27 01:00:40,879 Train Loss: 1630.3948997, Val Loss: 10458.5074513 +2025-02-27 01:00:40,879 Epoch 464/2000 +2025-02-27 01:00:59,793 Current Learning Rate: 0.0076791340 +2025-02-27 01:00:59,793 Train Loss: 2075.8475615, Val Loss: 455.0515606 +2025-02-27 01:00:59,794 Epoch 465/2000 +2025-02-27 01:01:17,259 Current Learning Rate: 0.0076124928 +2025-02-27 01:01:17,260 Train Loss: 550.7725606, Val Loss: 1105.2372263 +2025-02-27 01:01:17,260 Epoch 466/2000 +2025-02-27 01:01:34,613 Current Learning Rate: 0.0075452071 +2025-02-27 01:01:34,614 Train Loss: 3385.6935335, Val Loss: 566.6910332 +2025-02-27 01:01:34,614 Epoch 467/2000 +2025-02-27 01:01:51,988 Current Learning Rate: 0.0074772933 +2025-02-27 01:01:51,989 Train Loss: 532.6383830, Val Loss: 1107.6203714 +2025-02-27 01:01:51,989 Epoch 468/2000 +2025-02-27 01:02:09,334 Current Learning Rate: 0.0074087684 +2025-02-27 01:02:09,334 Train Loss: 2645.4477580, Val Loss: 1104.8566520 +2025-02-27 01:02:09,335 Epoch 469/2000 +2025-02-27 01:02:25,389 Current Learning Rate: 0.0073396491 +2025-02-27 01:02:25,390 Train Loss: 660.2441308, Val Loss: 662.4573378 +2025-02-27 01:02:25,390 Epoch 470/2000 +2025-02-27 01:02:42,204 Current Learning Rate: 0.0072699525 +2025-02-27 01:02:42,205 Train Loss: 1890.1354062, Val Loss: 806.5849182 +2025-02-27 01:02:42,205 Epoch 471/2000 +2025-02-27 01:02:58,806 Current Learning Rate: 0.0071996958 +2025-02-27 01:02:58,806 Train Loss: 675.7175235, Val Loss: 1477.0676275 +2025-02-27 01:02:58,807 Epoch 472/2000 +2025-02-27 01:03:15,149 Current Learning Rate: 0.0071288965 +2025-02-27 01:03:15,150 Train Loss: 1715.0143574, Val Loss: 364.9633848 +2025-02-27 01:03:15,150 Epoch 473/2000 +2025-02-27 01:03:31,499 Current Learning Rate: 0.0070575718 +2025-02-27 01:03:31,500 Train Loss: 2225.6480991, Val Loss: 2333.6140891 +2025-02-27 01:03:31,500 Epoch 474/2000 +2025-02-27 01:03:46,547 Current Learning Rate: 0.0069857395 +2025-02-27 01:03:46,548 Train Loss: 632.2059853, Val Loss: 328.4242000 +2025-02-27 01:03:46,548 Epoch 475/2000 +2025-02-27 01:04:02,226 Current Learning Rate: 0.0069134172 +2025-02-27 01:04:02,227 Train Loss: 3779.0283174, Val Loss: 2082.4360363 +2025-02-27 01:04:02,227 Epoch 476/2000 +2025-02-27 01:04:17,776 Current Learning Rate: 0.0068406228 +2025-02-27 01:04:17,777 Train Loss: 851.6816047, Val Loss: 316.0799688 +2025-02-27 01:04:17,777 Epoch 477/2000 +2025-02-27 01:04:33,397 Current Learning Rate: 0.0067673742 +2025-02-27 01:04:33,397 Train Loss: 393.5365236, Val Loss: 335.3325362 +2025-02-27 01:04:33,397 Epoch 478/2000 +2025-02-27 01:04:49,787 Current Learning Rate: 0.0066936896 +2025-02-27 01:04:49,787 Train Loss: 388.1706179, Val Loss: 303.2427578 +2025-02-27 01:04:49,788 Epoch 479/2000 +2025-02-27 01:05:04,955 Current Learning Rate: 0.0066195871 +2025-02-27 01:05:04,955 Train Loss: 1068.7227979, Val Loss: 3569.3498137 +2025-02-27 01:05:04,955 Epoch 480/2000 +2025-02-27 01:05:21,208 Current Learning Rate: 0.0065450850 +2025-02-27 01:05:21,208 Train Loss: 943.8455076, Val Loss: 919.5504724 +2025-02-27 01:05:21,208 Epoch 481/2000 +2025-02-27 01:05:36,499 Current Learning Rate: 0.0064702016 +2025-02-27 01:05:36,499 Train Loss: 1527.8049999, Val Loss: 9924.3411648 +2025-02-27 01:05:36,499 Epoch 482/2000 +2025-02-27 01:05:52,588 Current Learning Rate: 0.0063949555 +2025-02-27 01:05:52,588 Train Loss: 1498.1174686, Val Loss: 282.6791196 +2025-02-27 01:05:52,588 Epoch 483/2000 +2025-02-27 01:06:08,547 Current Learning Rate: 0.0063193652 +2025-02-27 01:06:08,548 Train Loss: 357.1593684, Val Loss: 360.7357602 +2025-02-27 01:06:08,548 Epoch 484/2000 +2025-02-27 01:06:24,725 Current Learning Rate: 0.0062434494 +2025-02-27 01:06:24,726 Train Loss: 886.1113856, Val Loss: 9261.0998327 +2025-02-27 01:06:24,726 Epoch 485/2000 +2025-02-27 01:06:40,089 Current Learning Rate: 0.0061672268 +2025-02-27 01:06:40,089 Train Loss: 2038.7938524, Val Loss: 279.1367817 +2025-02-27 01:06:40,090 Epoch 486/2000 +2025-02-27 01:06:56,092 Current Learning Rate: 0.0060907162 +2025-02-27 01:06:56,092 Train Loss: 318.1203466, Val Loss: 242.1648205 +2025-02-27 01:06:56,093 Epoch 487/2000 +2025-02-27 01:07:11,515 Current Learning Rate: 0.0060139365 +2025-02-27 01:07:11,516 Train Loss: 374.5906368, Val Loss: 890.2878246 +2025-02-27 01:07:11,516 Epoch 488/2000 +2025-02-27 01:07:27,614 Current Learning Rate: 0.0059369066 +2025-02-27 01:07:27,615 Train Loss: 1438.3590854, Val Loss: 485.2848664 +2025-02-27 01:07:27,615 Epoch 489/2000 +2025-02-27 01:07:43,023 Current Learning Rate: 0.0058596455 +2025-02-27 01:07:43,024 Train Loss: 347.9299272, Val Loss: 343.7839837 +2025-02-27 01:07:43,024 Epoch 490/2000 +2025-02-27 01:07:59,138 Current Learning Rate: 0.0057821723 +2025-02-27 01:07:59,138 Train Loss: 3426.5908493, Val Loss: 2926.0422844 +2025-02-27 01:07:59,138 Epoch 491/2000 +2025-02-27 01:08:14,394 Current Learning Rate: 0.0057045062 +2025-02-27 01:08:14,394 Train Loss: 524.2754505, Val Loss: 245.8558139 +2025-02-27 01:08:14,394 Epoch 492/2000 +2025-02-27 01:08:29,790 Current Learning Rate: 0.0056266662 +2025-02-27 01:08:29,790 Train Loss: 278.9703454, Val Loss: 213.6330709 +2025-02-27 01:08:29,790 Epoch 493/2000 +2025-02-27 01:08:45,052 Current Learning Rate: 0.0055486716 +2025-02-27 01:08:45,052 Train Loss: 259.1106163, Val Loss: 217.8448947 +2025-02-27 01:08:45,053 Epoch 494/2000 +2025-02-27 01:09:00,856 Current Learning Rate: 0.0054705416 +2025-02-27 01:09:00,856 Train Loss: 276.7396033, Val Loss: 280.6395118 +2025-02-27 01:09:00,857 Epoch 495/2000 +2025-02-27 01:09:16,409 Current Learning Rate: 0.0053922955 +2025-02-27 01:09:16,414 Train Loss: 292.5108316, Val Loss: 227.5632954 +2025-02-27 01:09:16,414 Epoch 496/2000 +2025-02-27 01:09:31,611 Current Learning Rate: 0.0053139526 +2025-02-27 01:09:31,612 Train Loss: 1722.7395464, Val Loss: 1989.8730944 +2025-02-27 01:09:31,612 Epoch 497/2000 +2025-02-27 01:09:49,099 Current Learning Rate: 0.0052355323 +2025-02-27 01:09:49,100 Train Loss: 449.7026367, Val Loss: 219.0219020 +2025-02-27 01:09:49,100 Epoch 498/2000 +2025-02-27 01:10:07,888 Current Learning Rate: 0.0051570538 +2025-02-27 01:10:07,889 Train Loss: 248.3519265, Val Loss: 195.9075943 +2025-02-27 01:10:07,889 Epoch 499/2000 +2025-02-27 01:10:25,844 Current Learning Rate: 0.0050785366 +2025-02-27 01:10:25,845 Train Loss: 295.9033776, Val Loss: 260.5542467 +2025-02-27 01:10:25,845 Epoch 500/2000 +2025-02-27 01:10:43,469 Current Learning Rate: 0.0050000000 +2025-02-27 01:10:43,469 Train Loss: 1523.0093185, Val Loss: 222.7712575 +2025-02-27 01:10:43,470 Epoch 501/2000 +2025-02-27 01:11:01,527 Current Learning Rate: 0.0049214634 +2025-02-27 01:11:01,527 Train Loss: 250.3751381, Val Loss: 172.1525689 +2025-02-27 01:11:01,527 Epoch 502/2000 +2025-02-27 01:11:19,365 Current Learning Rate: 0.0048429462 +2025-02-27 01:11:19,366 Train Loss: 217.4826394, Val Loss: 168.9912893 +2025-02-27 01:11:19,366 Epoch 503/2000 +2025-02-27 01:11:37,248 Current Learning Rate: 0.0047644677 +2025-02-27 01:11:37,248 Train Loss: 949.1629472, Val Loss: 694.0935908 +2025-02-27 01:11:37,249 Epoch 504/2000 +2025-02-27 01:11:55,158 Current Learning Rate: 0.0046860474 +2025-02-27 01:11:55,158 Train Loss: 299.9423883, Val Loss: 159.7845593 +2025-02-27 01:11:55,159 Epoch 505/2000 +2025-02-27 01:12:13,741 Current Learning Rate: 0.0046077045 +2025-02-27 01:12:13,741 Train Loss: 224.2664706, Val Loss: 1007.1313655 +2025-02-27 01:12:13,741 Epoch 506/2000 +2025-02-27 01:12:31,042 Current Learning Rate: 0.0045294584 +2025-02-27 01:12:31,042 Train Loss: 661.4476380, Val Loss: 2007.6351268 +2025-02-27 01:12:31,042 Epoch 507/2000 +2025-02-27 01:12:47,490 Current Learning Rate: 0.0044513284 +2025-02-27 01:12:47,490 Train Loss: 383.9772210, Val Loss: 551.2281521 +2025-02-27 01:12:47,490 Epoch 508/2000 +2025-02-27 01:13:04,761 Current Learning Rate: 0.0043733338 +2025-02-27 01:13:04,761 Train Loss: 468.2038119, Val Loss: 4550.3816910 +2025-02-27 01:13:04,762 Epoch 509/2000 +2025-02-27 01:13:21,334 Current Learning Rate: 0.0042954938 +2025-02-27 01:13:21,334 Train Loss: 926.2242470, Val Loss: 143.7447014 +2025-02-27 01:13:21,334 Epoch 510/2000 +2025-02-27 01:13:36,728 Current Learning Rate: 0.0042178277 +2025-02-27 01:13:36,728 Train Loss: 178.8618649, Val Loss: 181.8688193 +2025-02-27 01:13:36,728 Epoch 511/2000 +2025-02-27 01:13:52,508 Current Learning Rate: 0.0041403545 +2025-02-27 01:13:52,509 Train Loss: 203.8183388, Val Loss: 887.2064491 +2025-02-27 01:13:52,509 Epoch 512/2000 +2025-02-27 01:14:09,220 Current Learning Rate: 0.0040630934 +2025-02-27 01:14:09,221 Train Loss: 435.9273980, Val Loss: 1245.0245923 +2025-02-27 01:14:09,221 Epoch 513/2000 +2025-02-27 01:14:25,476 Current Learning Rate: 0.0039860635 +2025-02-27 01:14:25,477 Train Loss: 362.3056082, Val Loss: 138.3131368 +2025-02-27 01:14:25,477 Epoch 514/2000 +2025-02-27 01:14:40,973 Current Learning Rate: 0.0039092838 +2025-02-27 01:14:40,973 Train Loss: 1028.5893021, Val Loss: 1663.2776146 +2025-02-27 01:14:40,973 Epoch 515/2000 +2025-02-27 01:14:56,740 Current Learning Rate: 0.0038327732 +2025-02-27 01:14:56,741 Train Loss: 299.6718749, Val Loss: 148.8328296 +2025-02-27 01:14:56,742 Epoch 516/2000 +2025-02-27 01:15:12,076 Current Learning Rate: 0.0037565506 +2025-02-27 01:15:12,077 Train Loss: 160.6193034, Val Loss: 136.7400811 +2025-02-27 01:15:12,077 Epoch 517/2000 +2025-02-27 01:15:28,053 Current Learning Rate: 0.0036806348 +2025-02-27 01:15:28,064 Train Loss: 184.0173255, Val Loss: 472.2013109 +2025-02-27 01:15:28,065 Epoch 518/2000 +2025-02-27 01:15:44,042 Current Learning Rate: 0.0036050445 +2025-02-27 01:15:44,043 Train Loss: 209.3348786, Val Loss: 227.4523421 +2025-02-27 01:15:44,043 Epoch 519/2000 +2025-02-27 01:15:59,981 Current Learning Rate: 0.0035297984 +2025-02-27 01:15:59,982 Train Loss: 427.5245190, Val Loss: 801.0076295 +2025-02-27 01:15:59,982 Epoch 520/2000 +2025-02-27 01:16:16,410 Current Learning Rate: 0.0034549150 +2025-02-27 01:16:16,410 Train Loss: 296.6322837, Val Loss: 301.0708804 +2025-02-27 01:16:16,410 Epoch 521/2000 +2025-02-27 01:16:32,686 Current Learning Rate: 0.0033804129 +2025-02-27 01:16:32,686 Train Loss: 237.0952823, Val Loss: 795.5489422 +2025-02-27 01:16:32,687 Epoch 522/2000 +2025-02-27 01:16:49,359 Current Learning Rate: 0.0033063104 +2025-02-27 01:16:49,359 Train Loss: 347.0963528, Val Loss: 251.1882669 +2025-02-27 01:16:49,360 Epoch 523/2000 +2025-02-27 01:17:06,471 Current Learning Rate: 0.0032326258 +2025-02-27 01:17:06,472 Train Loss: 171.5503374, Val Loss: 156.3928906 +2025-02-27 01:17:06,472 Epoch 524/2000 +2025-02-27 01:17:24,072 Current Learning Rate: 0.0031593772 +2025-02-27 01:17:24,072 Train Loss: 409.1241481, Val Loss: 374.3527006 +2025-02-27 01:17:24,072 Epoch 525/2000 +2025-02-27 01:17:40,352 Current Learning Rate: 0.0030865828 +2025-02-27 01:17:40,352 Train Loss: 183.3092735, Val Loss: 629.0677511 +2025-02-27 01:17:40,353 Epoch 526/2000 +2025-02-27 01:17:56,915 Current Learning Rate: 0.0030142605 +2025-02-27 01:17:56,916 Train Loss: 212.6020987, Val Loss: 122.2049878 +2025-02-27 01:17:56,916 Epoch 527/2000 +2025-02-27 01:18:13,168 Current Learning Rate: 0.0029424282 +2025-02-27 01:18:13,169 Train Loss: 184.3374506, Val Loss: 201.4156889 +2025-02-27 01:18:13,169 Epoch 528/2000 +2025-02-27 01:18:29,275 Current Learning Rate: 0.0028711035 +2025-02-27 01:18:29,276 Train Loss: 208.1436128, Val Loss: 554.4096311 +2025-02-27 01:18:29,276 Epoch 529/2000 +2025-02-27 01:18:45,474 Current Learning Rate: 0.0028003042 +2025-02-27 01:18:45,474 Train Loss: 200.7157759, Val Loss: 300.2890898 +2025-02-27 01:18:45,475 Epoch 530/2000 +2025-02-27 01:19:01,859 Current Learning Rate: 0.0027300475 +2025-02-27 01:19:01,860 Train Loss: 215.0513528, Val Loss: 106.7242031 +2025-02-27 01:19:01,860 Epoch 531/2000 +2025-02-27 01:19:18,805 Current Learning Rate: 0.0026603509 +2025-02-27 01:19:18,806 Train Loss: 180.7479761, Val Loss: 521.2236791 +2025-02-27 01:19:18,806 Epoch 532/2000 +2025-02-27 01:19:34,702 Current Learning Rate: 0.0025912316 +2025-02-27 01:19:34,703 Train Loss: 194.6259451, Val Loss: 225.3123426 +2025-02-27 01:19:34,703 Epoch 533/2000 +2025-02-27 01:19:50,552 Current Learning Rate: 0.0025227067 +2025-02-27 01:19:50,553 Train Loss: 164.0708375, Val Loss: 115.4596660 +2025-02-27 01:19:50,553 Epoch 534/2000 +2025-02-27 01:20:06,227 Current Learning Rate: 0.0024547929 +2025-02-27 01:20:06,228 Train Loss: 161.6834269, Val Loss: 93.8995139 +2025-02-27 01:20:06,228 Epoch 535/2000 +2025-02-27 01:20:22,460 Current Learning Rate: 0.0023875072 +2025-02-27 01:20:22,461 Train Loss: 128.7525359, Val Loss: 86.3452148 +2025-02-27 01:20:22,461 Epoch 536/2000 +2025-02-27 01:20:38,385 Current Learning Rate: 0.0023208660 +2025-02-27 01:20:38,386 Train Loss: 169.8807027, Val Loss: 156.0216216 +2025-02-27 01:20:38,386 Epoch 537/2000 +2025-02-27 01:20:53,286 Current Learning Rate: 0.0022548859 +2025-02-27 01:20:53,287 Train Loss: 144.3553808, Val Loss: 117.6959050 +2025-02-27 01:20:53,287 Epoch 538/2000 +2025-02-27 01:21:08,378 Current Learning Rate: 0.0021895831 +2025-02-27 01:21:08,378 Train Loss: 126.8885480, Val Loss: 176.9919974 +2025-02-27 01:21:08,379 Epoch 539/2000 +2025-02-27 01:21:23,813 Current Learning Rate: 0.0021249737 +2025-02-27 01:21:23,814 Train Loss: 126.6657319, Val Loss: 141.7396564 +2025-02-27 01:21:23,814 Epoch 540/2000 +2025-02-27 01:21:39,754 Current Learning Rate: 0.0020610737 +2025-02-27 01:21:39,755 Train Loss: 120.0145774, Val Loss: 262.4023675 +2025-02-27 01:21:39,755 Epoch 541/2000 +2025-02-27 01:21:55,059 Current Learning Rate: 0.0019978989 +2025-02-27 01:21:55,059 Train Loss: 116.2269319, Val Loss: 149.5147423 +2025-02-27 01:21:55,060 Epoch 542/2000 +2025-02-27 01:22:09,751 Current Learning Rate: 0.0019354647 +2025-02-27 01:22:09,751 Train Loss: 111.5824312, Val Loss: 87.5043363 +2025-02-27 01:22:09,751 Epoch 543/2000 +2025-02-27 01:22:25,125 Current Learning Rate: 0.0018737867 +2025-02-27 01:22:25,126 Train Loss: 121.8250515, Val Loss: 96.8787081 +2025-02-27 01:22:25,126 Epoch 544/2000 +2025-02-27 01:22:40,858 Current Learning Rate: 0.0018128801 +2025-02-27 01:22:40,858 Train Loss: 99.5927357, Val Loss: 79.6099839 +2025-02-27 01:22:40,859 Epoch 545/2000 +2025-02-27 01:22:56,762 Current Learning Rate: 0.0017527598 +2025-02-27 01:22:56,763 Train Loss: 101.1575651, Val Loss: 81.6805132 +2025-02-27 01:22:56,763 Epoch 546/2000 +2025-02-27 01:23:12,323 Current Learning Rate: 0.0016934407 +2025-02-27 01:23:12,324 Train Loss: 97.9827700, Val Loss: 76.7324005 +2025-02-27 01:23:12,324 Epoch 547/2000 +2025-02-27 01:23:28,446 Current Learning Rate: 0.0016349374 +2025-02-27 01:23:28,447 Train Loss: 97.2461108, Val Loss: 91.0777621 +2025-02-27 01:23:28,447 Epoch 548/2000 +2025-02-27 01:23:43,832 Current Learning Rate: 0.0015772645 +2025-02-27 01:23:43,832 Train Loss: 95.8458761, Val Loss: 77.6495213 +2025-02-27 01:23:43,832 Epoch 549/2000 +2025-02-27 01:23:59,604 Current Learning Rate: 0.0015204360 +2025-02-27 01:23:59,612 Train Loss: 90.1668386, Val Loss: 69.7510077 +2025-02-27 01:23:59,613 Epoch 550/2000 +2025-02-27 01:24:15,833 Current Learning Rate: 0.0014644661 +2025-02-27 01:24:15,833 Train Loss: 89.3180312, Val Loss: 74.9766463 +2025-02-27 01:24:15,834 Epoch 551/2000 +2025-02-27 01:24:31,430 Current Learning Rate: 0.0014093685 +2025-02-27 01:24:31,430 Train Loss: 86.3037693, Val Loss: 68.2816450 +2025-02-27 01:24:31,430 Epoch 552/2000 +2025-02-27 01:24:48,326 Current Learning Rate: 0.0013551569 +2025-02-27 01:24:48,326 Train Loss: 85.3991801, Val Loss: 69.4548165 +2025-02-27 01:24:48,327 Epoch 553/2000 +2025-02-27 01:25:03,791 Current Learning Rate: 0.0013018445 +2025-02-27 01:25:03,792 Train Loss: 85.8305601, Val Loss: 91.6347413 +2025-02-27 01:25:03,792 Epoch 554/2000 +2025-02-27 01:25:19,270 Current Learning Rate: 0.0012494447 +2025-02-27 01:25:19,270 Train Loss: 86.0510507, Val Loss: 72.0134340 +2025-02-27 01:25:19,271 Epoch 555/2000 +2025-02-27 01:25:34,999 Current Learning Rate: 0.0011979702 +2025-02-27 01:25:35,000 Train Loss: 84.2558383, Val Loss: 71.7623047 +2025-02-27 01:25:35,000 Epoch 556/2000 +2025-02-27 01:25:50,799 Current Learning Rate: 0.0011474338 +2025-02-27 01:25:50,799 Train Loss: 81.3191024, Val Loss: 64.4000274 +2025-02-27 01:25:50,799 Epoch 557/2000 +2025-02-27 01:26:06,042 Current Learning Rate: 0.0010978480 +2025-02-27 01:26:06,043 Train Loss: 81.1410372, Val Loss: 64.4546063 +2025-02-27 01:26:06,043 Epoch 558/2000 +2025-02-27 01:26:21,314 Current Learning Rate: 0.0010492249 +2025-02-27 01:26:21,315 Train Loss: 79.5880051, Val Loss: 63.5927914 +2025-02-27 01:26:21,315 Epoch 559/2000 +2025-02-27 01:26:37,076 Current Learning Rate: 0.0010015767 +2025-02-27 01:26:37,076 Train Loss: 79.3258739, Val Loss: 62.5329337 +2025-02-27 01:26:37,077 Epoch 560/2000 +2025-02-27 01:26:53,325 Current Learning Rate: 0.0009549150 +2025-02-27 01:26:53,325 Train Loss: 77.7917932, Val Loss: 62.1690892 +2025-02-27 01:26:53,325 Epoch 561/2000 +2025-02-27 01:27:10,025 Current Learning Rate: 0.0009092514 +2025-02-27 01:27:10,025 Train Loss: 77.5998577, Val Loss: 61.4659768 +2025-02-27 01:27:10,026 Epoch 562/2000 +2025-02-27 01:27:25,807 Current Learning Rate: 0.0008645971 +2025-02-27 01:27:25,807 Train Loss: 76.7075670, Val Loss: 61.7074726 +2025-02-27 01:27:25,807 Epoch 563/2000 +2025-02-27 01:27:41,776 Current Learning Rate: 0.0008209632 +2025-02-27 01:27:41,776 Train Loss: 76.6446790, Val Loss: 60.6937247 +2025-02-27 01:27:41,776 Epoch 564/2000 +2025-02-27 01:27:58,036 Current Learning Rate: 0.0007783604 +2025-02-27 01:27:58,037 Train Loss: 75.0296708, Val Loss: 60.3754709 +2025-02-27 01:27:58,037 Epoch 565/2000 +2025-02-27 01:28:14,564 Current Learning Rate: 0.0007367992 +2025-02-27 01:28:14,565 Train Loss: 74.5701513, Val Loss: 59.1330060 +2025-02-27 01:28:14,565 Epoch 566/2000 +2025-02-27 01:28:30,215 Current Learning Rate: 0.0006962899 +2025-02-27 01:28:30,216 Train Loss: 73.6413530, Val Loss: 58.8939123 +2025-02-27 01:28:30,216 Epoch 567/2000 +2025-02-27 01:28:45,986 Current Learning Rate: 0.0006568424 +2025-02-27 01:28:45,987 Train Loss: 73.1713330, Val Loss: 59.7082104 +2025-02-27 01:28:45,987 Epoch 568/2000 +2025-02-27 01:29:02,152 Current Learning Rate: 0.0006184666 +2025-02-27 01:29:02,152 Train Loss: 72.9418890, Val Loss: 58.6900304 +2025-02-27 01:29:02,153 Epoch 569/2000 +2025-02-27 01:29:18,474 Current Learning Rate: 0.0005811718 +2025-02-27 01:29:18,475 Train Loss: 71.3618990, Val Loss: 57.9371602 +2025-02-27 01:29:18,476 Epoch 570/2000 +2025-02-27 01:29:34,904 Current Learning Rate: 0.0005449674 +2025-02-27 01:29:34,904 Train Loss: 71.3668134, Val Loss: 57.0683552 +2025-02-27 01:29:34,904 Epoch 571/2000 +2025-02-27 01:29:50,449 Current Learning Rate: 0.0005098621 +2025-02-27 01:29:50,450 Train Loss: 71.7083166, Val Loss: 57.0805741 +2025-02-27 01:29:50,450 Epoch 572/2000 +2025-02-27 01:30:07,381 Current Learning Rate: 0.0004758647 +2025-02-27 01:30:07,381 Train Loss: 70.5060729, Val Loss: 56.2464329 +2025-02-27 01:30:07,381 Epoch 573/2000 +2025-02-27 01:30:22,596 Current Learning Rate: 0.0004429836 +2025-02-27 01:30:22,597 Train Loss: 69.9370605, Val Loss: 56.7031430 +2025-02-27 01:30:22,597 Epoch 574/2000 +2025-02-27 01:30:38,341 Current Learning Rate: 0.0004112269 +2025-02-27 01:30:38,341 Train Loss: 69.3171435, Val Loss: 56.7351159 +2025-02-27 01:30:38,342 Epoch 575/2000 +2025-02-27 01:30:54,252 Current Learning Rate: 0.0003806023 +2025-02-27 01:30:54,256 Train Loss: 69.0628253, Val Loss: 55.9796343 +2025-02-27 01:30:54,257 Epoch 576/2000 +2025-02-27 01:31:11,232 Current Learning Rate: 0.0003511176 +2025-02-27 01:31:11,233 Train Loss: 68.2718580, Val Loss: 54.9122623 +2025-02-27 01:31:11,233 Epoch 577/2000 +2025-02-27 01:31:27,396 Current Learning Rate: 0.0003227798 +2025-02-27 01:31:27,396 Train Loss: 67.9746306, Val Loss: 54.6086496 +2025-02-27 01:31:27,396 Epoch 578/2000 +2025-02-27 01:31:43,444 Current Learning Rate: 0.0002955962 +2025-02-27 01:31:43,445 Train Loss: 67.7682929, Val Loss: 54.8671164 +2025-02-27 01:31:43,445 Epoch 579/2000 +2025-02-27 01:31:59,510 Current Learning Rate: 0.0002695732 +2025-02-27 01:31:59,511 Train Loss: 67.3766922, Val Loss: 54.4103336 +2025-02-27 01:31:59,511 Epoch 580/2000 +2025-02-27 01:32:15,365 Current Learning Rate: 0.0002447174 +2025-02-27 01:32:15,366 Train Loss: 67.0656714, Val Loss: 54.0608954 +2025-02-27 01:32:15,366 Epoch 581/2000 +2025-02-27 01:32:31,439 Current Learning Rate: 0.0002210349 +2025-02-27 01:32:31,439 Train Loss: 66.9882179, Val Loss: 53.8631727 +2025-02-27 01:32:31,439 Epoch 582/2000 +2025-02-27 01:32:48,019 Current Learning Rate: 0.0001985316 +2025-02-27 01:32:48,019 Train Loss: 66.7454922, Val Loss: 53.7233227 +2025-02-27 01:32:48,019 Epoch 583/2000 +2025-02-27 01:33:04,520 Current Learning Rate: 0.0001772129 +2025-02-27 01:33:04,521 Train Loss: 66.4775873, Val Loss: 53.3969332 +2025-02-27 01:33:04,521 Epoch 584/2000 +2025-02-27 01:33:21,548 Current Learning Rate: 0.0001570842 +2025-02-27 01:33:21,548 Train Loss: 66.0039779, Val Loss: 53.3049557 +2025-02-27 01:33:21,549 Epoch 585/2000 +2025-02-27 01:33:37,571 Current Learning Rate: 0.0001381504 +2025-02-27 01:33:37,571 Train Loss: 65.8744960, Val Loss: 53.3027400 +2025-02-27 01:33:37,572 Epoch 586/2000 +2025-02-27 01:33:53,428 Current Learning Rate: 0.0001204162 +2025-02-27 01:33:53,429 Train Loss: 65.9403056, Val Loss: 53.1060940 +2025-02-27 01:33:53,429 Epoch 587/2000 +2025-02-27 01:34:09,827 Current Learning Rate: 0.0001038859 +2025-02-27 01:34:09,827 Train Loss: 65.7874733, Val Loss: 52.9842991 +2025-02-27 01:34:09,828 Epoch 588/2000 +2025-02-27 01:34:26,010 Current Learning Rate: 0.0000885637 +2025-02-27 01:34:26,011 Train Loss: 65.4528588, Val Loss: 52.7197696 +2025-02-27 01:34:26,011 Epoch 589/2000 +2025-02-27 01:34:43,056 Current Learning Rate: 0.0000744534 +2025-02-27 01:34:43,057 Train Loss: 65.4618741, Val Loss: 52.6600985 +2025-02-27 01:34:43,058 Epoch 590/2000 +2025-02-27 01:35:00,210 Current Learning Rate: 0.0000615583 +2025-02-27 01:35:00,210 Train Loss: 65.4070427, Val Loss: 52.5455499 +2025-02-27 01:35:00,211 Epoch 591/2000 +2025-02-27 01:35:17,855 Current Learning Rate: 0.0000498817 +2025-02-27 01:35:17,855 Train Loss: 65.0433201, Val Loss: 52.4777110 +2025-02-27 01:35:17,856 Epoch 592/2000 +2025-02-27 01:35:34,050 Current Learning Rate: 0.0000394265 +2025-02-27 01:35:34,050 Train Loss: 65.1587795, Val Loss: 52.5919247 +2025-02-27 01:35:34,050 Epoch 593/2000 +2025-02-27 01:35:50,633 Current Learning Rate: 0.0000301952 +2025-02-27 01:35:50,633 Train Loss: 65.2659855, Val Loss: 52.4678964 +2025-02-27 01:35:50,634 Epoch 594/2000 +2025-02-27 01:36:07,315 Current Learning Rate: 0.0000221902 +2025-02-27 01:36:07,316 Train Loss: 64.7297393, Val Loss: 52.3767446 +2025-02-27 01:36:07,316 Epoch 595/2000 +2025-02-27 01:36:24,217 Current Learning Rate: 0.0000154133 +2025-02-27 01:36:24,217 Train Loss: 64.9740531, Val Loss: 52.3587140 +2025-02-27 01:36:24,218 Epoch 596/2000 +2025-02-27 01:36:41,106 Current Learning Rate: 0.0000098664 +2025-02-27 01:36:41,106 Train Loss: 64.8840507, Val Loss: 52.2966512 +2025-02-27 01:36:41,107 Epoch 597/2000 +2025-02-27 01:36:57,648 Current Learning Rate: 0.0000055506 +2025-02-27 01:36:57,649 Train Loss: 64.7698882, Val Loss: 52.3064688 +2025-02-27 01:36:57,649 Epoch 598/2000 +2025-02-27 01:37:13,662 Current Learning Rate: 0.0000024672 +2025-02-27 01:37:13,663 Train Loss: 65.2681758, Val Loss: 52.2912924 +2025-02-27 01:37:13,663 Epoch 599/2000 +2025-02-27 01:37:29,409 Current Learning Rate: 0.0000006168 +2025-02-27 01:37:29,410 Train Loss: 64.9790157, Val Loss: 52.1402089 +2025-02-27 01:37:29,410 Epoch 600/2000 +2025-02-27 01:37:45,648 Current Learning Rate: 0.0000000000 +2025-02-27 01:37:45,648 Train Loss: 65.2907633, Val Loss: 52.4918404 +2025-02-27 01:37:45,648 Epoch 601/2000 +2025-02-27 01:38:01,912 Current Learning Rate: 0.0000006168 +2025-02-27 01:38:01,912 Train Loss: 65.0913558, Val Loss: 52.3987099 +2025-02-27 01:38:01,913 Epoch 602/2000 +2025-02-27 01:38:17,859 Current Learning Rate: 0.0000024672 +2025-02-27 01:38:17,862 Train Loss: 65.0152954, Val Loss: 52.1893608 +2025-02-27 01:38:17,863 Epoch 603/2000 +2025-02-27 01:38:34,108 Current Learning Rate: 0.0000055506 +2025-02-27 01:38:34,108 Train Loss: 64.8387549, Val Loss: 52.3717163 +2025-02-27 01:38:34,109 Epoch 604/2000 +2025-02-27 01:38:50,078 Current Learning Rate: 0.0000098664 +2025-02-27 01:38:50,078 Train Loss: 64.3510572, Val Loss: 52.3643883 +2025-02-27 01:38:50,078 Epoch 605/2000 +2025-02-27 01:39:06,088 Current Learning Rate: 0.0000154133 +2025-02-27 01:39:06,088 Train Loss: 64.9319892, Val Loss: 52.2407617 +2025-02-27 01:39:06,088 Epoch 606/2000 +2025-02-27 01:39:21,523 Current Learning Rate: 0.0000221902 +2025-02-27 01:39:21,524 Train Loss: 65.0204072, Val Loss: 52.3152521 +2025-02-27 01:39:21,524 Epoch 607/2000 +2025-02-27 01:39:37,685 Current Learning Rate: 0.0000301952 +2025-02-27 01:39:37,686 Train Loss: 65.1349082, Val Loss: 52.0958493 +2025-02-27 01:39:37,686 Epoch 608/2000 +2025-02-27 01:39:54,267 Current Learning Rate: 0.0000394265 +2025-02-27 01:39:54,267 Train Loss: 64.7982920, Val Loss: 52.3972597 +2025-02-27 01:39:54,267 Epoch 609/2000 +2025-02-27 01:40:10,815 Current Learning Rate: 0.0000498817 +2025-02-27 01:40:10,816 Train Loss: 65.0413054, Val Loss: 52.2177369 +2025-02-27 01:40:10,816 Epoch 610/2000 +2025-02-27 01:40:27,638 Current Learning Rate: 0.0000615583 +2025-02-27 01:40:27,639 Train Loss: 64.3068904, Val Loss: 52.0957750 +2025-02-27 01:40:27,639 Epoch 611/2000 +2025-02-27 01:40:44,349 Current Learning Rate: 0.0000744534 +2025-02-27 01:40:44,349 Train Loss: 64.5976859, Val Loss: 52.1400240 +2025-02-27 01:40:44,350 Epoch 612/2000 +2025-02-27 01:41:00,938 Current Learning Rate: 0.0000885637 +2025-02-27 01:41:00,939 Train Loss: 64.5452498, Val Loss: 52.1812680 +2025-02-27 01:41:00,939 Epoch 613/2000 +2025-02-27 01:41:17,630 Current Learning Rate: 0.0001038859 +2025-02-27 01:41:17,631 Train Loss: 64.3132318, Val Loss: 51.6703533 +2025-02-27 01:41:17,631 Epoch 614/2000 +2025-02-27 01:41:33,792 Current Learning Rate: 0.0001204162 +2025-02-27 01:41:33,803 Train Loss: 64.1325826, Val Loss: 51.4759103 +2025-02-27 01:41:33,804 Epoch 615/2000 +2025-02-27 01:41:48,559 Current Learning Rate: 0.0001381504 +2025-02-27 01:41:48,560 Train Loss: 63.8082608, Val Loss: 51.3064694 +2025-02-27 01:41:48,560 Epoch 616/2000 +2025-02-27 01:42:04,741 Current Learning Rate: 0.0001570842 +2025-02-27 01:42:04,742 Train Loss: 63.8338406, Val Loss: 51.6738609 +2025-02-27 01:42:04,742 Epoch 617/2000 +2025-02-27 01:42:21,265 Current Learning Rate: 0.0001772129 +2025-02-27 01:42:21,266 Train Loss: 63.3608413, Val Loss: 55.3268848 +2025-02-27 01:42:21,266 Epoch 618/2000 +2025-02-27 01:42:36,516 Current Learning Rate: 0.0001985316 +2025-02-27 01:42:36,516 Train Loss: 62.9725156, Val Loss: 50.5554162 +2025-02-27 01:42:36,517 Epoch 619/2000 +2025-02-27 01:42:52,617 Current Learning Rate: 0.0002210349 +2025-02-27 01:42:52,618 Train Loss: 68.9031783, Val Loss: 57.0321811 +2025-02-27 01:42:52,618 Epoch 620/2000 +2025-02-27 01:43:08,366 Current Learning Rate: 0.0002447174 +2025-02-27 01:43:08,366 Train Loss: 140.3350284, Val Loss: 49.7428022 +2025-02-27 01:43:08,366 Epoch 621/2000 +2025-02-27 01:43:24,487 Current Learning Rate: 0.0002695732 +2025-02-27 01:43:24,487 Train Loss: 62.8532925, Val Loss: 52.8818499 +2025-02-27 01:43:24,488 Epoch 622/2000 +2025-02-27 01:43:41,408 Current Learning Rate: 0.0002955962 +2025-02-27 01:43:41,409 Train Loss: 63.8667139, Val Loss: 49.5766477 +2025-02-27 01:43:41,409 Epoch 623/2000 +2025-02-27 01:43:57,175 Current Learning Rate: 0.0003227798 +2025-02-27 01:43:57,176 Train Loss: 144.8477463, Val Loss: 48.3589436 +2025-02-27 01:43:57,176 Epoch 624/2000 +2025-02-27 01:44:13,407 Current Learning Rate: 0.0003511176 +2025-02-27 01:44:13,408 Train Loss: 60.4476144, Val Loss: 46.9624666 +2025-02-27 01:44:13,409 Epoch 625/2000 +2025-02-27 01:44:30,441 Current Learning Rate: 0.0003806023 +2025-02-27 01:44:30,442 Train Loss: 252.0562600, Val Loss: 52.7364563 +2025-02-27 01:44:30,442 Epoch 626/2000 +2025-02-27 01:44:47,512 Current Learning Rate: 0.0004112269 +2025-02-27 01:44:47,513 Train Loss: 59.0781818, Val Loss: 49.0402504 +2025-02-27 01:44:47,513 Epoch 627/2000 +2025-02-27 01:45:05,076 Current Learning Rate: 0.0004429836 +2025-02-27 01:45:05,077 Train Loss: 63.1764179, Val Loss: 56.1193016 +2025-02-27 01:45:05,078 Epoch 628/2000 +2025-02-27 01:45:21,216 Current Learning Rate: 0.0004758647 +2025-02-27 01:45:21,216 Train Loss: 418.0493369, Val Loss: 47.7579589 +2025-02-27 01:45:21,217 Epoch 629/2000 +2025-02-27 01:45:38,023 Current Learning Rate: 0.0005098621 +2025-02-27 01:45:38,023 Train Loss: 57.8814520, Val Loss: 44.0902361 +2025-02-27 01:45:38,024 Epoch 630/2000 +2025-02-27 01:45:54,527 Current Learning Rate: 0.0005449674 +2025-02-27 01:45:54,528 Train Loss: 55.5019911, Val Loss: 43.0669899 +2025-02-27 01:45:54,528 Epoch 631/2000 +2025-02-27 01:46:10,625 Current Learning Rate: 0.0005811718 +2025-02-27 01:46:10,626 Train Loss: 241.6495985, Val Loss: 50.4165479 +2025-02-27 01:46:10,626 Epoch 632/2000 +2025-02-27 01:46:27,945 Current Learning Rate: 0.0006184666 +2025-02-27 01:46:27,946 Train Loss: 96.8648329, Val Loss: 164.0164430 +2025-02-27 01:46:27,946 Epoch 633/2000 +2025-02-27 01:46:43,761 Current Learning Rate: 0.0006568424 +2025-02-27 01:46:43,761 Train Loss: 481.3101882, Val Loss: 43.1344374 +2025-02-27 01:46:43,761 Epoch 634/2000 +2025-02-27 01:47:00,356 Current Learning Rate: 0.0006962899 +2025-02-27 01:47:00,356 Train Loss: 52.6290277, Val Loss: 40.0269022 +2025-02-27 01:47:00,357 Epoch 635/2000 +2025-02-27 01:47:17,503 Current Learning Rate: 0.0007367992 +2025-02-27 01:47:17,504 Train Loss: 134.6886079, Val Loss: 161.4379051 +2025-02-27 01:47:17,504 Epoch 636/2000 +2025-02-27 01:47:34,658 Current Learning Rate: 0.0007783604 +2025-02-27 01:47:34,659 Train Loss: 461.7324731, Val Loss: 46.3308085 +2025-02-27 01:47:34,659 Epoch 637/2000 +2025-02-27 01:47:51,076 Current Learning Rate: 0.0008209632 +2025-02-27 01:47:51,076 Train Loss: 51.8465229, Val Loss: 41.1131209 +2025-02-27 01:47:51,076 Epoch 638/2000 +2025-02-27 01:48:07,932 Current Learning Rate: 0.0008645971 +2025-02-27 01:48:07,932 Train Loss: 371.9164831, Val Loss: 49.4205254 +2025-02-27 01:48:07,933 Epoch 639/2000 +2025-02-27 01:48:23,438 Current Learning Rate: 0.0009092514 +2025-02-27 01:48:23,439 Train Loss: 71.3567726, Val Loss: 41.9936369 +2025-02-27 01:48:23,439 Epoch 640/2000 +2025-02-27 01:48:39,530 Current Learning Rate: 0.0009549150 +2025-02-27 01:48:39,531 Train Loss: 630.1893774, Val Loss: 39.4563921 +2025-02-27 01:48:39,531 Epoch 641/2000 +2025-02-27 01:48:55,759 Current Learning Rate: 0.0010015767 +2025-02-27 01:48:55,761 Train Loss: 49.1564812, Val Loss: 35.9776822 +2025-02-27 01:48:55,761 Epoch 642/2000 +2025-02-27 01:49:12,936 Current Learning Rate: 0.0010492249 +2025-02-27 01:49:12,937 Train Loss: 44.7401469, Val Loss: 48.7602535 +2025-02-27 01:49:12,937 Epoch 643/2000 +2025-02-27 01:49:31,045 Current Learning Rate: 0.0010978480 +2025-02-27 01:49:31,046 Train Loss: 1116.1681118, Val Loss: 51.3607550 +2025-02-27 01:49:31,046 Epoch 644/2000 +2025-02-27 01:49:48,965 Current Learning Rate: 0.0011474338 +2025-02-27 01:49:48,965 Train Loss: 51.0846910, Val Loss: 33.9736047 +2025-02-27 01:49:48,966 Epoch 645/2000 +2025-02-27 01:50:07,099 Current Learning Rate: 0.0011979702 +2025-02-27 01:50:07,099 Train Loss: 41.5713126, Val Loss: 32.3570329 +2025-02-27 01:50:07,100 Epoch 646/2000 +2025-02-27 01:50:25,179 Current Learning Rate: 0.0012494447 +2025-02-27 01:50:25,180 Train Loss: 38.8940129, Val Loss: 31.8020452 +2025-02-27 01:50:25,180 Epoch 647/2000 +2025-02-27 01:50:43,009 Current Learning Rate: 0.0013018445 +2025-02-27 01:50:43,013 Train Loss: 42.6102140, Val Loss: 28.7339682 +2025-02-27 01:50:43,013 Epoch 648/2000 +2025-02-27 01:50:59,586 Current Learning Rate: 0.0013551569 +2025-02-27 01:50:59,587 Train Loss: 1152.9012482, Val Loss: 37.9957584 +2025-02-27 01:50:59,587 Epoch 649/2000 +2025-02-27 01:51:15,927 Current Learning Rate: 0.0014093685 +2025-02-27 01:51:15,928 Train Loss: 45.1179845, Val Loss: 32.4013258 +2025-02-27 01:51:15,928 Epoch 650/2000 +2025-02-27 01:51:31,500 Current Learning Rate: 0.0014644661 +2025-02-27 01:51:31,500 Train Loss: 37.2604492, Val Loss: 28.5599858 +2025-02-27 01:51:31,501 Epoch 651/2000 +2025-02-27 01:51:47,363 Current Learning Rate: 0.0015204360 +2025-02-27 01:51:47,364 Train Loss: 34.3094425, Val Loss: 26.2660150 +2025-02-27 01:51:47,364 Epoch 652/2000 +2025-02-27 01:52:03,463 Current Learning Rate: 0.0015772645 +2025-02-27 01:52:03,463 Train Loss: 33.5404319, Val Loss: 25.7084849 +2025-02-27 01:52:03,463 Epoch 653/2000 +2025-02-27 01:52:19,301 Current Learning Rate: 0.0016349374 +2025-02-27 01:52:19,302 Train Loss: 38.1440197, Val Loss: 128.2554162 +2025-02-27 01:52:19,302 Epoch 654/2000 +2025-02-27 01:52:35,090 Current Learning Rate: 0.0016934407 +2025-02-27 01:52:35,091 Train Loss: 795.5730514, Val Loss: 47.2986404 +2025-02-27 01:52:35,091 Epoch 655/2000 +2025-02-27 01:52:50,913 Current Learning Rate: 0.0017527598 +2025-02-27 01:52:50,914 Train Loss: 35.0967053, Val Loss: 24.4442667 +2025-02-27 01:52:50,914 Epoch 656/2000 +2025-02-27 01:53:07,776 Current Learning Rate: 0.0018128801 +2025-02-27 01:53:07,777 Train Loss: 29.3395860, Val Loss: 23.4132920 +2025-02-27 01:53:07,777 Epoch 657/2000 +2025-02-27 01:53:23,451 Current Learning Rate: 0.0018737867 +2025-02-27 01:53:23,452 Train Loss: 27.9805365, Val Loss: 21.1901080 +2025-02-27 01:53:23,452 Epoch 658/2000 +2025-02-27 01:53:39,215 Current Learning Rate: 0.0019354647 +2025-02-27 01:53:39,215 Train Loss: 26.7577600, Val Loss: 22.1695949 +2025-02-27 01:53:39,216 Epoch 659/2000 +2025-02-27 01:53:54,966 Current Learning Rate: 0.0019978989 +2025-02-27 01:53:54,966 Train Loss: 158.3820128, Val Loss: 23.9270372 +2025-02-27 01:53:54,966 Epoch 660/2000 +2025-02-27 01:54:10,052 Current Learning Rate: 0.0020610737 +2025-02-27 01:54:10,052 Train Loss: 26.4080975, Val Loss: 18.5612432 +2025-02-27 01:54:10,053 Epoch 661/2000 +2025-02-27 01:54:24,864 Current Learning Rate: 0.0021249737 +2025-02-27 01:54:24,864 Train Loss: 764.0096581, Val Loss: 35.8149597 +2025-02-27 01:54:24,865 Epoch 662/2000 +2025-02-27 01:54:40,570 Current Learning Rate: 0.0021895831 +2025-02-27 01:54:40,571 Train Loss: 28.2839461, Val Loss: 18.9364461 +2025-02-27 01:54:40,571 Epoch 663/2000 +2025-02-27 01:54:56,616 Current Learning Rate: 0.0022548859 +2025-02-27 01:54:56,616 Train Loss: 22.3605815, Val Loss: 17.0499196 +2025-02-27 01:54:56,617 Epoch 664/2000 +2025-02-27 01:55:12,035 Current Learning Rate: 0.0023208660 +2025-02-27 01:55:12,035 Train Loss: 20.3159306, Val Loss: 15.6619213 +2025-02-27 01:55:12,035 Epoch 665/2000 +2025-02-27 01:55:27,777 Current Learning Rate: 0.0023875072 +2025-02-27 01:55:27,777 Train Loss: 19.3082922, Val Loss: 14.7679985 +2025-02-27 01:55:27,778 Epoch 666/2000 +2025-02-27 01:55:42,841 Current Learning Rate: 0.0024547929 +2025-02-27 01:55:42,842 Train Loss: 18.4482102, Val Loss: 13.9346707 +2025-02-27 01:55:42,848 Epoch 667/2000 +2025-02-27 01:55:58,269 Current Learning Rate: 0.0025227067 +2025-02-27 01:55:58,270 Train Loss: 43.3745038, Val Loss: 264.2076508 +2025-02-27 01:55:58,270 Epoch 668/2000 +2025-02-27 01:56:13,600 Current Learning Rate: 0.0025912316 +2025-02-27 01:56:13,601 Train Loss: 163.0822832, Val Loss: 14.5027808 +2025-02-27 01:56:13,601 Epoch 669/2000 +2025-02-27 01:56:28,825 Current Learning Rate: 0.0026603509 +2025-02-27 01:56:28,826 Train Loss: 17.3950756, Val Loss: 12.4574250 +2025-02-27 01:56:28,826 Epoch 670/2000 +2025-02-27 01:56:45,093 Current Learning Rate: 0.0027300475 +2025-02-27 01:56:45,094 Train Loss: 16.0260545, Val Loss: 18.5162951 +2025-02-27 01:56:45,094 Epoch 671/2000 +2025-02-27 01:57:01,452 Current Learning Rate: 0.0028003042 +2025-02-27 01:57:01,452 Train Loss: 518.0580858, Val Loss: 21.9831057 +2025-02-27 01:57:01,453 Epoch 672/2000 +2025-02-27 01:57:17,171 Current Learning Rate: 0.0028711035 +2025-02-27 01:57:17,172 Train Loss: 17.6609222, Val Loss: 11.7831844 +2025-02-27 01:57:17,172 Epoch 673/2000 +2025-02-27 01:57:32,967 Current Learning Rate: 0.0029424282 +2025-02-27 01:57:32,968 Train Loss: 14.0208923, Val Loss: 10.5617512 +2025-02-27 01:57:32,968 Epoch 674/2000 +2025-02-27 01:57:48,991 Current Learning Rate: 0.0030142605 +2025-02-27 01:57:48,992 Train Loss: 12.7413932, Val Loss: 9.6224589 +2025-02-27 01:57:48,992 Epoch 675/2000 +2025-02-27 01:58:06,222 Current Learning Rate: 0.0030865828 +2025-02-27 01:58:06,222 Train Loss: 12.0321444, Val Loss: 10.2623249 +2025-02-27 01:58:06,223 Epoch 676/2000 +2025-02-27 01:58:24,290 Current Learning Rate: 0.0031593772 +2025-02-27 01:58:24,291 Train Loss: 11.2074607, Val Loss: 8.5160898 +2025-02-27 01:58:24,292 Epoch 677/2000 +2025-02-27 01:58:42,171 Current Learning Rate: 0.0032326258 +2025-02-27 01:58:42,171 Train Loss: 10.4823267, Val Loss: 8.1106437 +2025-02-27 01:58:42,172 Epoch 678/2000 +2025-02-27 01:58:59,319 Current Learning Rate: 0.0033063104 +2025-02-27 01:58:59,320 Train Loss: 16.3371565, Val Loss: 47.4178661 +2025-02-27 01:58:59,320 Epoch 679/2000 +2025-02-27 01:59:17,046 Current Learning Rate: 0.0033804129 +2025-02-27 01:59:17,046 Train Loss: 119.8226408, Val Loss: 8.9012794 +2025-02-27 01:59:17,047 Epoch 680/2000 +2025-02-27 01:59:32,914 Current Learning Rate: 0.0034549150 +2025-02-27 01:59:32,914 Train Loss: 9.9802298, Val Loss: 7.1346443 +2025-02-27 01:59:32,914 Epoch 681/2000 +2025-02-27 01:59:48,817 Current Learning Rate: 0.0035297984 +2025-02-27 01:59:48,818 Train Loss: 9.0465227, Val Loss: 6.5011085 +2025-02-27 01:59:48,818 Epoch 682/2000 +2025-02-27 02:00:04,876 Current Learning Rate: 0.0036050445 +2025-02-27 02:00:04,876 Train Loss: 29.9076254, Val Loss: 32.3764446 +2025-02-27 02:00:04,876 Epoch 683/2000 +2025-02-27 02:00:21,107 Current Learning Rate: 0.0036806348 +2025-02-27 02:00:21,108 Train Loss: 11.1414281, Val Loss: 6.0443555 +2025-02-27 02:00:21,108 Epoch 684/2000 +2025-02-27 02:00:36,619 Current Learning Rate: 0.0037565506 +2025-02-27 02:00:36,620 Train Loss: 196.0058740, Val Loss: 11.9957382 +2025-02-27 02:00:36,620 Epoch 685/2000 +2025-02-27 02:00:52,535 Current Learning Rate: 0.0038327732 +2025-02-27 02:00:52,536 Train Loss: 10.0785948, Val Loss: 6.0778641 +2025-02-27 02:00:52,536 Epoch 686/2000 +2025-02-27 02:01:08,508 Current Learning Rate: 0.0039092838 +2025-02-27 02:01:08,508 Train Loss: 7.2936094, Val Loss: 5.2223672 +2025-02-27 02:01:08,509 Epoch 687/2000 +2025-02-27 02:01:24,750 Current Learning Rate: 0.0039860635 +2025-02-27 02:01:24,750 Train Loss: 6.9000064, Val Loss: 5.0271710 +2025-02-27 02:01:24,750 Epoch 688/2000 +2025-02-27 02:01:40,404 Current Learning Rate: 0.0040630934 +2025-02-27 02:01:40,405 Train Loss: 6.4823131, Val Loss: 4.5881554 +2025-02-27 02:01:40,405 Epoch 689/2000 +2025-02-27 02:01:55,545 Current Learning Rate: 0.0041403545 +2025-02-27 02:01:55,546 Train Loss: 5.9726424, Val Loss: 4.5400221 +2025-02-27 02:01:55,546 Epoch 690/2000 +2025-02-27 02:02:10,994 Current Learning Rate: 0.0042178277 +2025-02-27 02:02:10,995 Train Loss: 29.3915050, Val Loss: 4.5690924 +2025-02-27 02:02:10,995 Epoch 691/2000 +2025-02-27 02:02:27,162 Current Learning Rate: 0.0042954938 +2025-02-27 02:02:27,162 Train Loss: 5.8244144, Val Loss: 4.9208631 +2025-02-27 02:02:27,163 Epoch 692/2000 +2025-02-27 02:02:42,510 Current Learning Rate: 0.0043733338 +2025-02-27 02:02:42,511 Train Loss: 42.0132956, Val Loss: 4.0569966 +2025-02-27 02:02:42,511 Epoch 693/2000 +2025-02-27 02:02:57,466 Current Learning Rate: 0.0044513284 +2025-02-27 02:02:57,466 Train Loss: 5.2485305, Val Loss: 3.7868568 +2025-02-27 02:02:57,467 Epoch 694/2000 +2025-02-27 02:03:13,106 Current Learning Rate: 0.0045294584 +2025-02-27 02:03:13,106 Train Loss: 28.2105308, Val Loss: 6.8231780 +2025-02-27 02:03:13,106 Epoch 695/2000 +2025-02-27 02:03:28,615 Current Learning Rate: 0.0046077045 +2025-02-27 02:03:28,616 Train Loss: 5.5224806, Val Loss: 3.6594531 +2025-02-27 02:03:28,616 Epoch 696/2000 +2025-02-27 02:03:44,471 Current Learning Rate: 0.0046860474 +2025-02-27 02:03:44,472 Train Loss: 20.4798078, Val Loss: 9.5011425 +2025-02-27 02:03:44,472 Epoch 697/2000 +2025-02-27 02:04:01,469 Current Learning Rate: 0.0047644677 +2025-02-27 02:04:01,469 Train Loss: 5.4309526, Val Loss: 3.7643663 +2025-02-27 02:04:01,469 Epoch 698/2000 +2025-02-27 02:04:18,022 Current Learning Rate: 0.0048429462 +2025-02-27 02:04:18,023 Train Loss: 30.1329420, Val Loss: 3.0023546 +2025-02-27 02:04:18,023 Epoch 699/2000 +2025-02-27 02:04:34,542 Current Learning Rate: 0.0049214634 +2025-02-27 02:04:34,543 Train Loss: 8.4971211, Val Loss: 114.8396958 +2025-02-27 02:04:34,543 Epoch 700/2000 +2025-02-27 02:04:52,117 Current Learning Rate: 0.0050000000 +2025-02-27 02:04:52,117 Train Loss: 101938408.3515987, Val Loss: 2472348.9051095 +2025-02-27 02:04:52,117 Epoch 701/2000 +2025-02-27 02:05:08,678 Current Learning Rate: 0.0050785366 +2025-02-27 02:05:08,678 Train Loss: 413417.2577898, Val Loss: 88772.6642336 +2025-02-27 02:05:08,679 Epoch 702/2000 +2025-02-27 02:05:24,915 Current Learning Rate: 0.0051570538 +2025-02-27 02:05:24,915 Train Loss: 72794.6630473, Val Loss: 55109.6707725 +2025-02-27 02:05:24,915 Epoch 703/2000 +2025-02-27 02:05:40,967 Current Learning Rate: 0.0052355323 +2025-02-27 02:05:40,967 Train Loss: 49589.9128970, Val Loss: 40759.8471715 +2025-02-27 02:05:40,968 Epoch 704/2000 +2025-02-27 02:05:57,644 Current Learning Rate: 0.0053139526 +2025-02-27 02:05:57,644 Train Loss: 37969.8183424, Val Loss: 32516.7369982 +2025-02-27 02:05:57,644 Epoch 705/2000 +2025-02-27 02:06:14,080 Current Learning Rate: 0.0053922955 +2025-02-27 02:06:14,081 Train Loss: 31016.3261134, Val Loss: 27037.8375912 +2025-02-27 02:06:14,081 Epoch 706/2000 +2025-02-27 02:06:29,180 Current Learning Rate: 0.0054705416 +2025-02-27 02:06:29,180 Train Loss: 26284.0285292, Val Loss: 23391.2271898 +2025-02-27 02:06:29,180 Epoch 707/2000 +2025-02-27 02:06:44,298 Current Learning Rate: 0.0055486716 +2025-02-27 02:06:44,298 Train Loss: 22849.8562268, Val Loss: 20569.9783303 +2025-02-27 02:06:44,299 Epoch 708/2000 +2025-02-27 02:06:59,073 Current Learning Rate: 0.0056266662 +2025-02-27 02:06:59,073 Train Loss: 20376.4504433, Val Loss: 18443.0219738 +2025-02-27 02:06:59,074 Epoch 709/2000 +2025-02-27 02:07:14,161 Current Learning Rate: 0.0057045062 +2025-02-27 02:07:14,161 Train Loss: 18401.3566714, Val Loss: 16800.4647582 +2025-02-27 02:07:14,161 Epoch 710/2000 +2025-02-27 02:07:29,318 Current Learning Rate: 0.0057821723 +2025-02-27 02:07:29,319 Train Loss: 16744.0811761, Val Loss: 15426.8577023 +2025-02-27 02:07:29,319 Epoch 711/2000 +2025-02-27 02:07:45,341 Current Learning Rate: 0.0058596455 +2025-02-27 02:07:45,341 Train Loss: 15401.3711521, Val Loss: 14265.7675639 +2025-02-27 02:07:45,342 Epoch 712/2000 +2025-02-27 02:08:01,011 Current Learning Rate: 0.0059369066 +2025-02-27 02:08:01,012 Train Loss: 14263.8265830, Val Loss: 13198.7783227 +2025-02-27 02:08:01,012 Epoch 713/2000 +2025-02-27 02:08:16,144 Current Learning Rate: 0.0060139365 +2025-02-27 02:08:16,145 Train Loss: 13292.6537452, Val Loss: 12352.3612378 +2025-02-27 02:08:16,145 Epoch 714/2000 +2025-02-27 02:08:31,816 Current Learning Rate: 0.0060907162 +2025-02-27 02:08:31,816 Train Loss: 12429.5666454, Val Loss: 11514.3424954 +2025-02-27 02:08:31,816 Epoch 715/2000 +2025-02-27 02:08:46,802 Current Learning Rate: 0.0061672268 +2025-02-27 02:08:46,802 Train Loss: 11639.7063953, Val Loss: 10788.1873479 +2025-02-27 02:08:46,802 Epoch 716/2000 +2025-02-27 02:09:01,945 Current Learning Rate: 0.0062434494 +2025-02-27 02:09:01,945 Train Loss: 10892.8427606, Val Loss: 10125.8939705 +2025-02-27 02:09:01,946 Epoch 717/2000 +2025-02-27 02:09:17,020 Current Learning Rate: 0.0063193652 +2025-02-27 02:09:17,020 Train Loss: 10237.8510889, Val Loss: 9534.5012165 +2025-02-27 02:09:17,020 Epoch 718/2000 +2025-02-27 02:09:31,860 Current Learning Rate: 0.0063949555 +2025-02-27 02:09:31,860 Train Loss: 9641.1903085, Val Loss: 8938.4846411 +2025-02-27 02:09:31,861 Epoch 719/2000 +2025-02-27 02:09:47,227 Current Learning Rate: 0.0064702016 +2025-02-27 02:09:47,227 Train Loss: 9086.1330904, Val Loss: 8459.3323259 +2025-02-27 02:09:47,228 Epoch 720/2000 +2025-02-27 02:10:03,034 Current Learning Rate: 0.0065450850 +2025-02-27 02:10:03,035 Train Loss: 8551.0874490, Val Loss: 7970.5822308 +2025-02-27 02:10:03,035 Epoch 721/2000 +2025-02-27 02:10:19,426 Current Learning Rate: 0.0066195871 +2025-02-27 02:10:19,426 Train Loss: 8056.8829683, Val Loss: 7511.2738557 +2025-02-27 02:10:19,427 Epoch 722/2000 +2025-02-27 02:10:35,220 Current Learning Rate: 0.0066936896 +2025-02-27 02:10:35,221 Train Loss: 7636.2200212, Val Loss: 7081.2511405 +2025-02-27 02:10:35,221 Epoch 723/2000 +2025-02-27 02:10:50,594 Current Learning Rate: 0.0067673742 +2025-02-27 02:10:50,595 Train Loss: 7182.2689595, Val Loss: 6723.2169062 +2025-02-27 02:10:50,595 Epoch 724/2000 +2025-02-27 02:11:06,465 Current Learning Rate: 0.0068406228 +2025-02-27 02:11:06,465 Train Loss: 6819.4092213, Val Loss: 6332.5916210 +2025-02-27 02:11:06,465 Epoch 725/2000 +2025-02-27 02:11:22,134 Current Learning Rate: 0.0069134172 +2025-02-27 02:11:22,135 Train Loss: 6449.2683755, Val Loss: 6005.1071130 +2025-02-27 02:11:22,135 Epoch 726/2000 +2025-02-27 02:11:37,898 Current Learning Rate: 0.0069857395 +2025-02-27 02:11:37,899 Train Loss: 6099.3467041, Val Loss: 5675.9629714 +2025-02-27 02:11:37,899 Epoch 727/2000 +2025-02-27 02:11:53,706 Current Learning Rate: 0.0070575718 +2025-02-27 02:11:53,707 Train Loss: 5777.7775586, Val Loss: 5376.2797483 +2025-02-27 02:11:53,707 Epoch 728/2000 +2025-02-27 02:12:09,781 Current Learning Rate: 0.0071288965 +2025-02-27 02:12:09,782 Train Loss: 5470.9707037, Val Loss: 5082.1844586 +2025-02-27 02:12:09,782 Epoch 729/2000 +2025-02-27 02:12:25,576 Current Learning Rate: 0.0071996958 +2025-02-27 02:12:25,576 Train Loss: 5174.3883065, Val Loss: 4809.5578619 +2025-02-27 02:12:25,576 Epoch 730/2000 +2025-02-27 02:12:41,149 Current Learning Rate: 0.0072699525 +2025-02-27 02:12:41,149 Train Loss: 4910.3872044, Val Loss: 4569.6272430 +2025-02-27 02:12:41,149 Epoch 731/2000 +2025-02-27 02:12:56,947 Current Learning Rate: 0.0073396491 +2025-02-27 02:12:56,947 Train Loss: 4653.2627461, Val Loss: 4325.6252851 +2025-02-27 02:12:56,948 Epoch 732/2000 +2025-02-27 02:13:12,590 Current Learning Rate: 0.0074087684 +2025-02-27 02:13:12,591 Train Loss: 4416.0190769, Val Loss: 4110.5616066 +2025-02-27 02:13:12,591 Epoch 733/2000 +2025-02-27 02:13:28,174 Current Learning Rate: 0.0074772933 +2025-02-27 02:13:28,174 Train Loss: 4190.1336367, Val Loss: 3897.2468541 +2025-02-27 02:13:28,174 Epoch 734/2000 +2025-02-27 02:13:43,301 Current Learning Rate: 0.0075452071 +2025-02-27 02:13:43,302 Train Loss: 3988.1273091, Val Loss: 3683.2667085 +2025-02-27 02:13:43,302 Epoch 735/2000 +2025-02-27 02:13:58,715 Current Learning Rate: 0.0076124928 +2025-02-27 02:13:58,715 Train Loss: 3777.8259161, Val Loss: 3494.6437804 +2025-02-27 02:13:58,716 Epoch 736/2000 +2025-02-27 02:14:14,871 Current Learning Rate: 0.0076791340 +2025-02-27 02:14:14,872 Train Loss: 3590.2406369, Val Loss: 3321.9454266 +2025-02-27 02:14:14,872 Epoch 737/2000 +2025-02-27 02:14:29,584 Current Learning Rate: 0.0077451141 +2025-02-27 02:14:29,585 Train Loss: 3407.8875567, Val Loss: 3160.6849243 +2025-02-27 02:14:29,585 Epoch 738/2000 +2025-02-27 02:14:44,486 Current Learning Rate: 0.0078104169 +2025-02-27 02:14:44,486 Train Loss: 3227.9459254, Val Loss: 2997.7036287 +2025-02-27 02:14:44,486 Epoch 739/2000 +2025-02-27 02:14:59,110 Current Learning Rate: 0.0078750263 +2025-02-27 02:14:59,111 Train Loss: 3068.6187663, Val Loss: 2838.4978045 +2025-02-27 02:14:59,111 Epoch 740/2000 +2025-02-27 02:15:15,076 Current Learning Rate: 0.0079389263 +2025-02-27 02:15:15,077 Train Loss: 2912.1741308, Val Loss: 2696.4017830 +2025-02-27 02:15:15,077 Epoch 741/2000 +2025-02-27 02:15:30,754 Current Learning Rate: 0.0080021011 +2025-02-27 02:15:30,754 Train Loss: 2766.1759765, Val Loss: 2564.1190979 +2025-02-27 02:15:30,755 Epoch 742/2000 +2025-02-27 02:15:45,875 Current Learning Rate: 0.0080645353 +2025-02-27 02:15:45,875 Train Loss: 2651.4177680, Val Loss: 2491.6759333 +2025-02-27 02:15:45,876 Epoch 743/2000 +2025-02-27 02:16:01,470 Current Learning Rate: 0.0081262133 +2025-02-27 02:16:01,470 Train Loss: 2537.5039685, Val Loss: 2322.8769389 +2025-02-27 02:16:01,471 Epoch 744/2000 +2025-02-27 02:16:17,475 Current Learning Rate: 0.0081871199 +2025-02-27 02:16:17,475 Train Loss: 2411.9778791, Val Loss: 2328.3597076 +2025-02-27 02:16:17,475 Epoch 745/2000 +2025-02-27 02:16:33,772 Current Learning Rate: 0.0082472402 +2025-02-27 02:16:33,772 Train Loss: 12339.6112105, Val Loss: 2174.4579722 +2025-02-27 02:16:33,772 Epoch 746/2000 +2025-02-27 02:16:49,088 Current Learning Rate: 0.0083065593 +2025-02-27 02:16:49,088 Train Loss: 2822.8156133, Val Loss: 1959.8819571 +2025-02-27 02:16:49,089 Epoch 747/2000 +2025-02-27 02:17:04,324 Current Learning Rate: 0.0083650626 +2025-02-27 02:17:04,325 Train Loss: 2051.4132105, Val Loss: 1897.0269968 +2025-02-27 02:17:04,325 Epoch 748/2000 +2025-02-27 02:17:19,938 Current Learning Rate: 0.0084227355 +2025-02-27 02:17:19,938 Train Loss: 19033.2136293, Val Loss: 3404.4533626 +2025-02-27 02:17:19,939 Epoch 749/2000 +2025-02-27 02:17:35,479 Current Learning Rate: 0.0084795640 +2025-02-27 02:17:35,479 Train Loss: 1960.2422474, Val Loss: 1655.7369364 +2025-02-27 02:17:35,479 Epoch 750/2000 +2025-02-27 02:17:51,624 Current Learning Rate: 0.0085355339 +2025-02-27 02:17:51,625 Train Loss: 1703.1490144, Val Loss: 1565.0388962 +2025-02-27 02:17:51,625 Epoch 751/2000 +2025-02-27 02:18:06,387 Current Learning Rate: 0.0085906315 +2025-02-27 02:18:06,387 Train Loss: 1635.6396333, Val Loss: 1599.6283360 +2025-02-27 02:18:06,388 Epoch 752/2000 +2025-02-27 02:18:22,046 Current Learning Rate: 0.0086448431 +2025-02-27 02:18:22,047 Train Loss: 21905.2266963, Val Loss: 1725.6031402 +2025-02-27 02:18:22,047 Epoch 753/2000 +2025-02-27 02:18:37,236 Current Learning Rate: 0.0086981555 +2025-02-27 02:18:37,236 Train Loss: 2553.0831607, Val Loss: 1326.2656820 +2025-02-27 02:18:37,237 Epoch 754/2000 +2025-02-27 02:18:52,985 Current Learning Rate: 0.0087505553 +2025-02-27 02:18:52,985 Train Loss: 1357.9010061, Val Loss: 1253.0748175 +2025-02-27 02:18:52,985 Epoch 755/2000 +2025-02-27 02:19:08,318 Current Learning Rate: 0.0088020298 +2025-02-27 02:19:08,318 Train Loss: 1303.7872250, Val Loss: 1217.2775053 +2025-02-27 02:19:08,318 Epoch 756/2000 +2025-02-27 02:19:24,044 Current Learning Rate: 0.0088525662 +2025-02-27 02:19:24,045 Train Loss: 1248.6811700, Val Loss: 1237.6549669 +2025-02-27 02:19:24,046 Epoch 757/2000 +2025-02-27 02:19:39,809 Current Learning Rate: 0.0089021520 +2025-02-27 02:19:39,810 Train Loss: 9016.0444923, Val Loss: 11831.5529957 +2025-02-27 02:19:39,810 Epoch 758/2000 +2025-02-27 02:19:54,780 Current Learning Rate: 0.0089507751 +2025-02-27 02:19:54,781 Train Loss: 3093.8117364, Val Loss: 1114.0278332 +2025-02-27 02:19:54,781 Epoch 759/2000 +2025-02-27 02:20:10,513 Current Learning Rate: 0.0089984233 +2025-02-27 02:20:10,513 Train Loss: 1101.5675310, Val Loss: 1131.0926332 +2025-02-27 02:20:10,513 Epoch 760/2000 +2025-02-27 02:20:26,425 Current Learning Rate: 0.0090450850 +2025-02-27 02:20:26,426 Train Loss: 15712.4172118, Val Loss: 4547.6972134 +2025-02-27 02:20:26,426 Epoch 761/2000 +2025-02-27 02:20:42,356 Current Learning Rate: 0.0090907486 +2025-02-27 02:20:42,357 Train Loss: 1498.7661596, Val Loss: 898.6236171 +2025-02-27 02:20:42,357 Epoch 762/2000 +2025-02-27 02:20:58,380 Current Learning Rate: 0.0091354029 +2025-02-27 02:20:58,381 Train Loss: 942.6254444, Val Loss: 857.9658750 +2025-02-27 02:20:58,381 Epoch 763/2000 +2025-02-27 02:21:12,967 Current Learning Rate: 0.0091790368 +2025-02-27 02:21:12,968 Train Loss: 918.0281040, Val Loss: 964.3633098 +2025-02-27 02:21:12,968 Epoch 764/2000 +2025-02-27 02:21:27,937 Current Learning Rate: 0.0092216396 +2025-02-27 02:21:27,937 Train Loss: 1833.0814925, Val Loss: 16052.9879486 +2025-02-27 02:21:27,937 Epoch 765/2000 +2025-02-27 02:21:43,581 Current Learning Rate: 0.0092632008 +2025-02-27 02:21:43,582 Train Loss: 12654.9881014, Val Loss: 870.1646494 +2025-02-27 02:21:43,582 Epoch 766/2000 +2025-02-27 02:21:58,897 Current Learning Rate: 0.0093037101 +2025-02-27 02:21:58,897 Train Loss: 794.3806925, Val Loss: 727.2145277 +2025-02-27 02:21:58,898 Epoch 767/2000 +2025-02-27 02:22:14,595 Current Learning Rate: 0.0093431576 +2025-02-27 02:22:14,596 Train Loss: 764.5417113, Val Loss: 692.6416728 +2025-02-27 02:22:14,596 Epoch 768/2000 +2025-02-27 02:22:29,629 Current Learning Rate: 0.0093815334 +2025-02-27 02:22:29,630 Train Loss: 758.4646306, Val Loss: 702.3507237 +2025-02-27 02:22:29,630 Epoch 769/2000 +2025-02-27 02:22:45,510 Current Learning Rate: 0.0094188282 +2025-02-27 02:22:45,510 Train Loss: 23063.9234448, Val Loss: 1778.0076937 +2025-02-27 02:22:45,511 Epoch 770/2000 +2025-02-27 02:23:00,682 Current Learning Rate: 0.0094550326 +2025-02-27 02:23:00,682 Train Loss: 831.8599910, Val Loss: 610.6372130 +2025-02-27 02:23:00,683 Epoch 771/2000 +2025-02-27 02:23:16,277 Current Learning Rate: 0.0094901379 +2025-02-27 02:23:16,277 Train Loss: 640.4549239, Val Loss: 581.5315636 +2025-02-27 02:23:16,278 Epoch 772/2000 +2025-02-27 02:23:31,628 Current Learning Rate: 0.0095241353 +2025-02-27 02:23:31,629 Train Loss: 655.0951789, Val Loss: 561.7796485 +2025-02-27 02:23:31,629 Epoch 773/2000 +2025-02-27 02:23:46,716 Current Learning Rate: 0.0095570164 +2025-02-27 02:23:46,716 Train Loss: 598.5519391, Val Loss: 542.2379023 +2025-02-27 02:23:46,717 Epoch 774/2000 +2025-02-27 02:24:02,458 Current Learning Rate: 0.0095887731 +2025-02-27 02:24:02,458 Train Loss: 581.1594804, Val Loss: 524.2845610 +2025-02-27 02:24:02,458 Epoch 775/2000 +2025-02-27 02:24:18,643 Current Learning Rate: 0.0096193977 +2025-02-27 02:24:18,643 Train Loss: 557.0260003, Val Loss: 536.2854568 +2025-02-27 02:24:18,643 Epoch 776/2000 +2025-02-27 02:24:34,658 Current Learning Rate: 0.0096488824 +2025-02-27 02:24:34,663 Train Loss: 3744.0026764, Val Loss: 7741.4357132 +2025-02-27 02:24:34,663 Epoch 777/2000 +2025-02-27 02:24:50,854 Current Learning Rate: 0.0096772202 +2025-02-27 02:24:50,854 Train Loss: 1088.5987615, Val Loss: 519.1544121 +2025-02-27 02:24:50,854 Epoch 778/2000 +2025-02-27 02:25:06,144 Current Learning Rate: 0.0097044038 +2025-02-27 02:25:06,145 Train Loss: 13608.7805458, Val Loss: 3305.1230801 +2025-02-27 02:25:06,145 Epoch 779/2000 +2025-02-27 02:25:22,925 Current Learning Rate: 0.0097304268 +2025-02-27 02:25:22,925 Train Loss: 1065.4748793, Val Loss: 443.0378638 +2025-02-27 02:25:22,926 Epoch 780/2000 +2025-02-27 02:25:39,065 Current Learning Rate: 0.0097552826 +2025-02-27 02:25:39,066 Train Loss: 498.9978435, Val Loss: 424.2570640 +2025-02-27 02:25:39,066 Epoch 781/2000 +2025-02-27 02:25:55,998 Current Learning Rate: 0.0097789651 +2025-02-27 02:25:55,998 Train Loss: 454.1239649, Val Loss: 405.5040880 +2025-02-27 02:25:55,999 Epoch 782/2000 +2025-02-27 02:26:12,326 Current Learning Rate: 0.0098014684 +2025-02-27 02:26:12,326 Train Loss: 457.3408465, Val Loss: 392.8886350 +2025-02-27 02:26:12,327 Epoch 783/2000 +2025-02-27 02:26:29,223 Current Learning Rate: 0.0098227871 +2025-02-27 02:26:29,224 Train Loss: 445.9021125, Val Loss: 381.4321610 +2025-02-27 02:26:29,224 Epoch 784/2000 +2025-02-27 02:26:44,692 Current Learning Rate: 0.0098429158 +2025-02-27 02:26:44,692 Train Loss: 11371.1119291, Val Loss: 819.4691278 +2025-02-27 02:26:44,693 Epoch 785/2000 +2025-02-27 02:27:01,268 Current Learning Rate: 0.0098618496 +2025-02-27 02:27:01,268 Train Loss: 532.7761668, Val Loss: 365.3592372 +2025-02-27 02:27:01,269 Epoch 786/2000 +2025-02-27 02:27:17,674 Current Learning Rate: 0.0098795838 +2025-02-27 02:27:17,675 Train Loss: 379.9734412, Val Loss: 347.6125601 +2025-02-27 02:27:17,675 Epoch 787/2000 +2025-02-27 02:27:35,133 Current Learning Rate: 0.0098961141 +2025-02-27 02:27:35,133 Train Loss: 370.9302959, Val Loss: 332.8675118 +2025-02-27 02:27:35,133 Epoch 788/2000 +2025-02-27 02:27:51,053 Current Learning Rate: 0.0099114363 +2025-02-27 02:27:51,053 Train Loss: 357.5339292, Val Loss: 324.0313914 +2025-02-27 02:27:51,053 Epoch 789/2000 +2025-02-27 02:28:06,480 Current Learning Rate: 0.0099255466 +2025-02-27 02:28:06,480 Train Loss: 7784.1670747, Val Loss: 3991.8917275 +2025-02-27 02:28:06,481 Epoch 790/2000 +2025-02-27 02:28:21,648 Current Learning Rate: 0.0099384417 +2025-02-27 02:28:21,648 Train Loss: 751.0878658, Val Loss: 309.3574171 +2025-02-27 02:28:21,648 Epoch 791/2000 +2025-02-27 02:28:36,690 Current Learning Rate: 0.0099501183 +2025-02-27 02:28:36,690 Train Loss: 324.8726599, Val Loss: 294.2007691 +2025-02-27 02:28:36,691 Epoch 792/2000 +2025-02-27 02:28:52,661 Current Learning Rate: 0.0099605735 +2025-02-27 02:28:52,661 Train Loss: 313.5382026, Val Loss: 282.5925952 +2025-02-27 02:28:52,661 Epoch 793/2000 +2025-02-27 02:29:08,665 Current Learning Rate: 0.0099698048 +2025-02-27 02:29:08,666 Train Loss: 5292.2925795, Val Loss: 2495.7442784 +2025-02-27 02:29:08,666 Epoch 794/2000 +2025-02-27 02:29:24,584 Current Learning Rate: 0.0099778098 +2025-02-27 02:29:24,584 Train Loss: 1121.7357418, Val Loss: 270.6125702 +2025-02-27 02:29:24,584 Epoch 795/2000 +2025-02-27 02:29:40,425 Current Learning Rate: 0.0099845867 +2025-02-27 02:29:40,425 Train Loss: 298.9860481, Val Loss: 265.5684912 +2025-02-27 02:29:40,425 Epoch 796/2000 +2025-02-27 02:29:56,004 Current Learning Rate: 0.0099901336 +2025-02-27 02:29:56,005 Train Loss: 302.6754545, Val Loss: 459.9451604 +2025-02-27 02:29:56,005 Epoch 797/2000 +2025-02-27 02:30:12,383 Current Learning Rate: 0.0099944494 +2025-02-27 02:30:12,383 Train Loss: 11787.9320196, Val Loss: 441.8691608 +2025-02-27 02:30:12,385 Epoch 798/2000 +2025-02-27 02:30:27,851 Current Learning Rate: 0.0099975328 +2025-02-27 02:30:27,852 Train Loss: 486.4416706, Val Loss: 246.9291921 +2025-02-27 02:30:27,852 Epoch 799/2000 +2025-02-27 02:30:43,740 Current Learning Rate: 0.0099993832 +2025-02-27 02:30:43,741 Train Loss: 257.6596887, Val Loss: 233.4286492 +2025-02-27 02:30:43,741 Epoch 800/2000 +2025-02-27 02:30:59,483 Current Learning Rate: 0.0100000000 +2025-02-27 02:30:59,483 Train Loss: 251.3018478, Val Loss: 224.3536035 +2025-02-27 02:30:59,484 Epoch 801/2000 +2025-02-27 02:31:17,056 Current Learning Rate: 0.0099993832 +2025-02-27 02:31:17,057 Train Loss: 238.5510431, Val Loss: 215.9584302 +2025-02-27 02:31:17,057 Epoch 802/2000 +2025-02-27 02:31:34,438 Current Learning Rate: 0.0099975328 +2025-02-27 02:31:34,438 Train Loss: 232.9802748, Val Loss: 212.0134889 +2025-02-27 02:31:34,438 Epoch 803/2000 +2025-02-27 02:31:50,774 Current Learning Rate: 0.0099944494 +2025-02-27 02:31:50,774 Train Loss: 290.7654529, Val Loss: 1149.1422170 +2025-02-27 02:31:50,774 Epoch 804/2000 +2025-02-27 02:32:07,515 Current Learning Rate: 0.0099901336 +2025-02-27 02:32:07,515 Train Loss: 7197.5289449, Val Loss: 320.3294176 +2025-02-27 02:32:07,516 Epoch 805/2000 +2025-02-27 02:32:25,594 Current Learning Rate: 0.0099845867 +2025-02-27 02:32:25,595 Train Loss: 251.8206719, Val Loss: 193.0665429 +2025-02-27 02:32:25,595 Epoch 806/2000 +2025-02-27 02:32:42,743 Current Learning Rate: 0.0099778098 +2025-02-27 02:32:42,744 Train Loss: 220.9611514, Val Loss: 184.0615793 +2025-02-27 02:32:42,744 Epoch 807/2000 +2025-02-27 02:32:59,688 Current Learning Rate: 0.0099698048 +2025-02-27 02:32:59,689 Train Loss: 196.5661679, Val Loss: 179.7199481 +2025-02-27 02:32:59,689 Epoch 808/2000 +2025-02-27 02:33:16,671 Current Learning Rate: 0.0099605735 +2025-02-27 02:33:16,672 Train Loss: 251.3929890, Val Loss: 175.4583488 +2025-02-27 02:33:16,672 Epoch 809/2000 +2025-02-27 02:33:33,083 Current Learning Rate: 0.0099501183 +2025-02-27 02:33:33,084 Train Loss: 12546.5492326, Val Loss: 516.9318830 +2025-02-27 02:33:33,084 Epoch 810/2000 +2025-02-27 02:33:49,734 Current Learning Rate: 0.0099384417 +2025-02-27 02:33:49,734 Train Loss: 524.1139461, Val Loss: 184.2797644 +2025-02-27 02:33:49,735 Epoch 811/2000 +2025-02-27 02:34:05,619 Current Learning Rate: 0.0099255466 +2025-02-27 02:34:05,620 Train Loss: 191.6138987, Val Loss: 166.1324472 +2025-02-27 02:34:05,620 Epoch 812/2000 +2025-02-27 02:34:21,287 Current Learning Rate: 0.0099114363 +2025-02-27 02:34:21,288 Train Loss: 174.6681392, Val Loss: 157.4391817 +2025-02-27 02:34:21,288 Epoch 813/2000 +2025-02-27 02:34:37,839 Current Learning Rate: 0.0098961141 +2025-02-27 02:34:37,839 Train Loss: 172.1663654, Val Loss: 150.3709363 +2025-02-27 02:34:37,840 Epoch 814/2000 +2025-02-27 02:34:53,690 Current Learning Rate: 0.0098795838 +2025-02-27 02:34:53,690 Train Loss: 158.8081175, Val Loss: 143.6361479 +2025-02-27 02:34:53,690 Epoch 815/2000 +2025-02-27 02:35:09,747 Current Learning Rate: 0.0098618496 +2025-02-27 02:35:09,747 Train Loss: 153.3308100, Val Loss: 138.9618855 +2025-02-27 02:35:09,748 Epoch 816/2000 +2025-02-27 02:35:26,276 Current Learning Rate: 0.0098429158 +2025-02-27 02:35:26,276 Train Loss: 148.4321815, Val Loss: 133.9831959 +2025-02-27 02:35:26,277 Epoch 817/2000 +2025-02-27 02:35:42,470 Current Learning Rate: 0.0098227871 +2025-02-27 02:35:42,470 Train Loss: 143.3788405, Val Loss: 130.5873795 +2025-02-27 02:35:42,470 Epoch 818/2000 +2025-02-27 02:35:58,353 Current Learning Rate: 0.0098014684 +2025-02-27 02:35:58,354 Train Loss: 147.9812266, Val Loss: 124.7088296 +2025-02-27 02:35:58,354 Epoch 819/2000 +2025-02-27 02:36:15,403 Current Learning Rate: 0.0097789651 +2025-02-27 02:36:15,404 Train Loss: 159.7387333, Val Loss: 645.6029739 +2025-02-27 02:36:15,404 Epoch 820/2000 +2025-02-27 02:36:32,221 Current Learning Rate: 0.0097552826 +2025-02-27 02:36:32,222 Train Loss: 3579.7821224, Val Loss: 197.0089714 +2025-02-27 02:36:32,222 Epoch 821/2000 +2025-02-27 02:36:48,269 Current Learning Rate: 0.0097304268 +2025-02-27 02:36:48,270 Train Loss: 140.3369083, Val Loss: 117.5846666 +2025-02-27 02:36:48,270 Epoch 822/2000 +2025-02-27 02:37:04,660 Current Learning Rate: 0.0097044038 +2025-02-27 02:37:04,660 Train Loss: 126.8199007, Val Loss: 116.1466531 +2025-02-27 02:37:04,661 Epoch 823/2000 +2025-02-27 02:37:20,741 Current Learning Rate: 0.0096772202 +2025-02-27 02:37:20,742 Train Loss: 121.7262179, Val Loss: 109.0944699 +2025-02-27 02:37:20,742 Epoch 824/2000 +2025-02-27 02:37:36,661 Current Learning Rate: 0.0096488824 +2025-02-27 02:37:36,662 Train Loss: 3905.8873501, Val Loss: 2686.5945769 +2025-02-27 02:37:36,662 Epoch 825/2000 +2025-02-27 02:37:53,156 Current Learning Rate: 0.0096193977 +2025-02-27 02:37:53,157 Train Loss: 348.2379999, Val Loss: 117.5852294 +2025-02-27 02:37:53,157 Epoch 826/2000 +2025-02-27 02:38:09,651 Current Learning Rate: 0.0095887731 +2025-02-27 02:38:09,651 Train Loss: 119.1598081, Val Loss: 104.2045857 +2025-02-27 02:38:09,652 Epoch 827/2000 +2025-02-27 02:38:26,164 Current Learning Rate: 0.0095570164 +2025-02-27 02:38:26,165 Train Loss: 110.5555671, Val Loss: 99.9458807 +2025-02-27 02:38:26,165 Epoch 828/2000 +2025-02-27 02:38:42,420 Current Learning Rate: 0.0095241353 +2025-02-27 02:38:42,420 Train Loss: 106.6629319, Val Loss: 95.3387525 +2025-02-27 02:38:42,420 Epoch 829/2000 +2025-02-27 02:38:59,149 Current Learning Rate: 0.0094901379 +2025-02-27 02:38:59,149 Train Loss: 106.3263350, Val Loss: 92.5676436 +2025-02-27 02:38:59,150 Epoch 830/2000 +2025-02-27 02:39:15,514 Current Learning Rate: 0.0094550326 +2025-02-27 02:39:15,515 Train Loss: 4021.3218386, Val Loss: 425.6604449 +2025-02-27 02:39:15,515 Epoch 831/2000 +2025-02-27 02:39:31,639 Current Learning Rate: 0.0094188282 +2025-02-27 02:39:31,640 Train Loss: 177.6070640, Val Loss: 97.0441015 +2025-02-27 02:39:31,640 Epoch 832/2000 +2025-02-27 02:39:48,558 Current Learning Rate: 0.0093815334 +2025-02-27 02:39:48,559 Train Loss: 106.7735937, Val Loss: 89.8043058 +2025-02-27 02:39:48,559 Epoch 833/2000 +2025-02-27 02:40:04,528 Current Learning Rate: 0.0093431576 +2025-02-27 02:40:04,528 Train Loss: 95.6456696, Val Loss: 85.8193042 +2025-02-27 02:40:04,528 Epoch 834/2000 +2025-02-27 02:40:20,985 Current Learning Rate: 0.0093037101 +2025-02-27 02:40:20,985 Train Loss: 91.3635387, Val Loss: 81.1924770 +2025-02-27 02:40:20,986 Epoch 835/2000 +2025-02-27 02:40:37,957 Current Learning Rate: 0.0092632008 +2025-02-27 02:40:37,958 Train Loss: 87.4807127, Val Loss: 78.3932785 +2025-02-27 02:40:37,958 Epoch 836/2000 +2025-02-27 02:40:54,741 Current Learning Rate: 0.0092216396 +2025-02-27 02:40:54,742 Train Loss: 89.7307242, Val Loss: 177.2976870 +2025-02-27 02:40:54,742 Epoch 837/2000 +2025-02-27 02:41:11,886 Current Learning Rate: 0.0091790368 +2025-02-27 02:41:11,887 Train Loss: 1526.4157255, Val Loss: 97.2816322 +2025-02-27 02:41:11,887 Epoch 838/2000 +2025-02-27 02:41:28,970 Current Learning Rate: 0.0091354029 +2025-02-27 02:41:28,970 Train Loss: 89.8375880, Val Loss: 74.9025888 +2025-02-27 02:41:28,970 Epoch 839/2000 +2025-02-27 02:41:46,051 Current Learning Rate: 0.0090907486 +2025-02-27 02:41:46,052 Train Loss: 83.0786558, Val Loss: 75.5996014 +2025-02-27 02:41:46,052 Epoch 840/2000 +2025-02-27 02:42:04,428 Current Learning Rate: 0.0090450850 +2025-02-27 02:42:04,428 Train Loss: 1307.2973325, Val Loss: 1308.7188973 +2025-02-27 02:42:04,428 Epoch 841/2000 +2025-02-27 02:42:20,590 Current Learning Rate: 0.0089984233 +2025-02-27 02:42:20,591 Train Loss: 274.0321620, Val Loss: 70.9176524 +2025-02-27 02:42:20,591 Epoch 842/2000 +2025-02-27 02:42:37,659 Current Learning Rate: 0.0089507751 +2025-02-27 02:42:37,659 Train Loss: 75.7602583, Val Loss: 66.4967469 +2025-02-27 02:42:37,660 Epoch 843/2000 +2025-02-27 02:42:54,828 Current Learning Rate: 0.0089021520 +2025-02-27 02:42:54,829 Train Loss: 74.4492030, Val Loss: 67.8140118 +2025-02-27 02:42:54,829 Epoch 844/2000 +2025-02-27 02:43:10,334 Current Learning Rate: 0.0088525662 +2025-02-27 02:43:10,335 Train Loss: 1384.5724798, Val Loss: 1340.1012916 +2025-02-27 02:43:10,335 Epoch 845/2000 +2025-02-27 02:43:26,254 Current Learning Rate: 0.0088020298 +2025-02-27 02:43:26,254 Train Loss: 194.1544028, Val Loss: 64.8763437 +2025-02-27 02:43:26,254 Epoch 846/2000 +2025-02-27 02:43:42,180 Current Learning Rate: 0.0087505553 +2025-02-27 02:43:42,180 Train Loss: 68.0743915, Val Loss: 60.7990809 +2025-02-27 02:43:42,181 Epoch 847/2000 +2025-02-27 02:43:58,716 Current Learning Rate: 0.0086981555 +2025-02-27 02:43:58,717 Train Loss: 64.9687695, Val Loss: 60.5741708 +2025-02-27 02:43:58,717 Epoch 848/2000 +2025-02-27 02:44:15,454 Current Learning Rate: 0.0086448431 +2025-02-27 02:44:15,454 Train Loss: 62.1316241, Val Loss: 54.9124383 +2025-02-27 02:44:15,455 Epoch 849/2000 +2025-02-27 02:44:34,084 Current Learning Rate: 0.0085906315 +2025-02-27 02:44:34,086 Train Loss: 3672.1061250, Val Loss: 798.8334474 +2025-02-27 02:44:34,086 Epoch 850/2000 +2025-02-27 02:44:50,391 Current Learning Rate: 0.0085355339 +2025-02-27 02:44:50,392 Train Loss: 172.5468115, Val Loss: 64.3097317 +2025-02-27 02:44:50,392 Epoch 851/2000 +2025-02-27 02:45:08,485 Current Learning Rate: 0.0084795640 +2025-02-27 02:45:08,485 Train Loss: 73.2886204, Val Loss: 56.6332347 +2025-02-27 02:45:08,486 Epoch 852/2000 +2025-02-27 02:45:25,900 Current Learning Rate: 0.0084227355 +2025-02-27 02:45:25,901 Train Loss: 61.4048523, Val Loss: 52.7990002 +2025-02-27 02:45:25,901 Epoch 853/2000 +2025-02-27 02:45:41,817 Current Learning Rate: 0.0083650626 +2025-02-27 02:45:41,818 Train Loss: 85.6101583, Val Loss: 50.3989301 +2025-02-27 02:45:41,818 Epoch 854/2000 +2025-02-27 02:45:56,716 Current Learning Rate: 0.0083065593 +2025-02-27 02:45:56,717 Train Loss: 53.5781132, Val Loss: 47.6373855 +2025-02-27 02:45:56,717 Epoch 855/2000 +2025-02-27 02:46:13,029 Current Learning Rate: 0.0082472402 +2025-02-27 02:46:13,030 Train Loss: 51.1784469, Val Loss: 46.0791543 +2025-02-27 02:46:13,030 Epoch 856/2000 +2025-02-27 02:46:28,593 Current Learning Rate: 0.0081871199 +2025-02-27 02:46:28,593 Train Loss: 48.8423306, Val Loss: 44.4109402 +2025-02-27 02:46:28,594 Epoch 857/2000 +2025-02-27 02:46:43,533 Current Learning Rate: 0.0081262133 +2025-02-27 02:46:43,533 Train Loss: 47.0920488, Val Loss: 42.1970028 +2025-02-27 02:46:43,534 Epoch 858/2000 +2025-02-27 02:46:58,642 Current Learning Rate: 0.0080645353 +2025-02-27 02:46:58,643 Train Loss: 45.4838047, Val Loss: 40.6024690 +2025-02-27 02:46:58,643 Epoch 859/2000 +2025-02-27 02:47:14,281 Current Learning Rate: 0.0080021011 +2025-02-27 02:47:14,282 Train Loss: 43.9850569, Val Loss: 40.1918769 +2025-02-27 02:47:14,282 Epoch 860/2000 +2025-02-27 02:47:29,923 Current Learning Rate: 0.0079389263 +2025-02-27 02:47:29,924 Train Loss: 42.6160395, Val Loss: 38.4890517 +2025-02-27 02:47:29,924 Epoch 861/2000 +2025-02-27 02:47:45,593 Current Learning Rate: 0.0078750263 +2025-02-27 02:47:45,593 Train Loss: 400.6760731, Val Loss: 43.3211241 +2025-02-27 02:47:45,593 Epoch 862/2000 +2025-02-27 02:48:01,801 Current Learning Rate: 0.0078104169 +2025-02-27 02:48:01,802 Train Loss: 76.4590390, Val Loss: 38.0977740 +2025-02-27 02:48:01,802 Epoch 863/2000 +2025-02-27 02:48:18,252 Current Learning Rate: 0.0077451141 +2025-02-27 02:48:18,253 Train Loss: 40.8007521, Val Loss: 38.7237118 +2025-02-27 02:48:18,253 Epoch 864/2000 +2025-02-27 02:48:34,726 Current Learning Rate: 0.0076791340 +2025-02-27 02:48:34,727 Train Loss: 601.6315048, Val Loss: 112.3606305 +2025-02-27 02:48:34,727 Epoch 865/2000 +2025-02-27 02:48:50,529 Current Learning Rate: 0.0076124928 +2025-02-27 02:48:50,529 Train Loss: 121.7488498, Val Loss: 36.0283882 +2025-02-27 02:48:50,530 Epoch 866/2000 +2025-02-27 02:49:06,909 Current Learning Rate: 0.0075452071 +2025-02-27 02:49:06,910 Train Loss: 38.4260688, Val Loss: 33.7561429 +2025-02-27 02:49:06,910 Epoch 867/2000 +2025-02-27 02:49:23,362 Current Learning Rate: 0.0074772933 +2025-02-27 02:49:23,363 Train Loss: 36.3298024, Val Loss: 33.0773981 +2025-02-27 02:49:23,363 Epoch 868/2000 +2025-02-27 02:49:40,330 Current Learning Rate: 0.0074087684 +2025-02-27 02:49:40,331 Train Loss: 36.0487776, Val Loss: 31.1173491 +2025-02-27 02:49:40,331 Epoch 869/2000 +2025-02-27 02:49:56,440 Current Learning Rate: 0.0073396491 +2025-02-27 02:49:56,441 Train Loss: 1399.4110432, Val Loss: 459.7162504 +2025-02-27 02:49:56,441 Epoch 870/2000 +2025-02-27 02:50:13,147 Current Learning Rate: 0.0072699525 +2025-02-27 02:50:13,147 Train Loss: 92.1375653, Val Loss: 34.0438672 +2025-02-27 02:50:13,147 Epoch 871/2000 +2025-02-27 02:50:29,715 Current Learning Rate: 0.0071996958 +2025-02-27 02:50:29,716 Train Loss: 34.8517800, Val Loss: 30.3007890 +2025-02-27 02:50:29,716 Epoch 872/2000 +2025-02-27 02:50:46,769 Current Learning Rate: 0.0071288965 +2025-02-27 02:50:46,770 Train Loss: 32.5365167, Val Loss: 28.9793418 +2025-02-27 02:50:46,770 Epoch 873/2000 +2025-02-27 02:51:04,872 Current Learning Rate: 0.0070575718 +2025-02-27 02:51:04,872 Train Loss: 35.7095882, Val Loss: 27.9237574 +2025-02-27 02:51:04,872 Epoch 874/2000 +2025-02-27 02:51:22,212 Current Learning Rate: 0.0069857395 +2025-02-27 02:51:22,213 Train Loss: 30.1528406, Val Loss: 26.5960805 +2025-02-27 02:51:22,213 Epoch 875/2000 +2025-02-27 02:51:39,740 Current Learning Rate: 0.0069134172 +2025-02-27 02:51:39,740 Train Loss: 28.7684857, Val Loss: 26.2689376 +2025-02-27 02:51:39,740 Epoch 876/2000 +2025-02-27 02:51:56,830 Current Learning Rate: 0.0068406228 +2025-02-27 02:51:56,831 Train Loss: 28.1225498, Val Loss: 25.1151359 +2025-02-27 02:51:56,831 Epoch 877/2000 +2025-02-27 02:52:14,722 Current Learning Rate: 0.0067673742 +2025-02-27 02:52:14,722 Train Loss: 27.1168896, Val Loss: 24.1142938 +2025-02-27 02:52:14,723 Epoch 878/2000 +2025-02-27 02:52:32,239 Current Learning Rate: 0.0066936896 +2025-02-27 02:52:32,239 Train Loss: 27.6583084, Val Loss: 40.7675743 +2025-02-27 02:52:32,240 Epoch 879/2000 +2025-02-27 02:52:49,729 Current Learning Rate: 0.0066195871 +2025-02-27 02:52:49,729 Train Loss: 269.6544646, Val Loss: 36.7443677 +2025-02-27 02:52:49,729 Epoch 880/2000 +2025-02-27 02:53:07,498 Current Learning Rate: 0.0065450850 +2025-02-27 02:53:07,498 Train Loss: 26.8288458, Val Loss: 22.8917206 +2025-02-27 02:53:07,499 Epoch 881/2000 +2025-02-27 02:53:23,734 Current Learning Rate: 0.0064702016 +2025-02-27 02:53:23,735 Train Loss: 25.3616435, Val Loss: 22.4861713 +2025-02-27 02:53:23,735 Epoch 882/2000 +2025-02-27 02:53:39,279 Current Learning Rate: 0.0063949555 +2025-02-27 02:53:39,280 Train Loss: 256.6429502, Val Loss: 2762.3372871 +2025-02-27 02:53:39,280 Epoch 883/2000 +2025-02-27 02:53:54,966 Current Learning Rate: 0.0063193652 +2025-02-27 02:53:54,967 Train Loss: 249.6735573, Val Loss: 22.8537403 +2025-02-27 02:53:54,967 Epoch 884/2000 +2025-02-27 02:54:11,101 Current Learning Rate: 0.0062434494 +2025-02-27 02:54:11,101 Train Loss: 24.5439578, Val Loss: 21.4404965 +2025-02-27 02:54:11,101 Epoch 885/2000 +2025-02-27 02:54:28,230 Current Learning Rate: 0.0061672268 +2025-02-27 02:54:28,230 Train Loss: 23.5935488, Val Loss: 20.7156164 +2025-02-27 02:54:28,230 Epoch 886/2000 +2025-02-27 02:54:45,401 Current Learning Rate: 0.0060907162 +2025-02-27 02:54:45,401 Train Loss: 22.5262473, Val Loss: 19.9750955 +2025-02-27 02:54:45,401 Epoch 887/2000 +2025-02-27 02:55:02,987 Current Learning Rate: 0.0060139365 +2025-02-27 02:55:02,988 Train Loss: 29.0096925, Val Loss: 83.7061542 +2025-02-27 02:55:02,988 Epoch 888/2000 +2025-02-27 02:55:20,489 Current Learning Rate: 0.0059369066 +2025-02-27 02:55:20,489 Train Loss: 72.3376170, Val Loss: 19.9279458 +2025-02-27 02:55:20,490 Epoch 889/2000 +2025-02-27 02:55:37,407 Current Learning Rate: 0.0058596455 +2025-02-27 02:55:37,407 Train Loss: 544.6078932, Val Loss: 120.4024147 +2025-02-27 02:55:37,408 Epoch 890/2000 +2025-02-27 02:55:55,277 Current Learning Rate: 0.0057821723 +2025-02-27 02:55:55,277 Train Loss: 41.0759531, Val Loss: 19.4093549 +2025-02-27 02:55:55,277 Epoch 891/2000 +2025-02-27 02:56:12,819 Current Learning Rate: 0.0057045062 +2025-02-27 02:56:12,820 Train Loss: 20.8953431, Val Loss: 18.5237129 +2025-02-27 02:56:12,820 Epoch 892/2000 +2025-02-27 02:56:30,001 Current Learning Rate: 0.0056266662 +2025-02-27 02:56:30,002 Train Loss: 20.1109078, Val Loss: 17.7910590 +2025-02-27 02:56:30,002 Epoch 893/2000 +2025-02-27 02:56:45,472 Current Learning Rate: 0.0055486716 +2025-02-27 02:56:45,472 Train Loss: 19.4519416, Val Loss: 17.1549914 +2025-02-27 02:56:45,473 Epoch 894/2000 +2025-02-27 02:57:00,523 Current Learning Rate: 0.0054705416 +2025-02-27 02:57:00,523 Train Loss: 19.0878717, Val Loss: 17.1324900 +2025-02-27 02:57:00,524 Epoch 895/2000 +2025-02-27 02:57:15,177 Current Learning Rate: 0.0053922955 +2025-02-27 02:57:15,177 Train Loss: 18.4679302, Val Loss: 16.8056419 +2025-02-27 02:57:15,178 Epoch 896/2000 +2025-02-27 02:57:31,180 Current Learning Rate: 0.0053139526 +2025-02-27 02:57:31,181 Train Loss: 21.7836853, Val Loss: 82.4296343 +2025-02-27 02:57:31,181 Epoch 897/2000 +2025-02-27 02:57:46,219 Current Learning Rate: 0.0052355323 +2025-02-27 02:57:46,220 Train Loss: 55.0799156, Val Loss: 31.3922037 +2025-02-27 02:57:46,220 Epoch 898/2000 +2025-02-27 02:58:03,410 Current Learning Rate: 0.0051570538 +2025-02-27 02:58:03,411 Train Loss: 192.4111281, Val Loss: 25.5644341 +2025-02-27 02:58:03,411 Epoch 899/2000 +2025-02-27 02:58:19,217 Current Learning Rate: 0.0050785366 +2025-02-27 02:58:19,218 Train Loss: 22.6632336, Val Loss: 15.6403241 +2025-02-27 02:58:19,218 Epoch 900/2000 +2025-02-27 02:58:35,677 Current Learning Rate: 0.0050000000 +2025-02-27 02:58:35,677 Train Loss: 17.1785988, Val Loss: 14.9738395 +2025-02-27 02:58:35,678 Epoch 901/2000 +2025-02-27 02:58:51,637 Current Learning Rate: 0.0049214634 +2025-02-27 02:58:51,637 Train Loss: 19.5248897, Val Loss: 15.3419066 +2025-02-27 02:58:51,638 Epoch 902/2000 +2025-02-27 02:59:07,963 Current Learning Rate: 0.0048429462 +2025-02-27 02:59:07,963 Train Loss: 197.2493533, Val Loss: 98.0987341 +2025-02-27 02:59:07,964 Epoch 903/2000 +2025-02-27 02:59:24,124 Current Learning Rate: 0.0047644677 +2025-02-27 02:59:24,124 Train Loss: 33.8039732, Val Loss: 14.7225510 +2025-02-27 02:59:24,124 Epoch 904/2000 +2025-02-27 02:59:39,818 Current Learning Rate: 0.0046860474 +2025-02-27 02:59:39,818 Train Loss: 18.0327752, Val Loss: 13.8852007 +2025-02-27 02:59:39,818 Epoch 905/2000 +2025-02-27 02:59:55,670 Current Learning Rate: 0.0046077045 +2025-02-27 02:59:55,671 Train Loss: 15.5645658, Val Loss: 14.8409225 +2025-02-27 02:59:55,671 Epoch 906/2000 +2025-02-27 03:00:12,192 Current Learning Rate: 0.0045294584 +2025-02-27 03:00:12,192 Train Loss: 15.9296603, Val Loss: 14.2718120 +2025-02-27 03:00:12,192 Epoch 907/2000 +2025-02-27 03:00:27,285 Current Learning Rate: 0.0044513284 +2025-02-27 03:00:27,285 Train Loss: 180.9968193, Val Loss: 88.0805483 +2025-02-27 03:00:27,286 Epoch 908/2000 +2025-02-27 03:00:43,408 Current Learning Rate: 0.0043733338 +2025-02-27 03:00:43,408 Train Loss: 30.3002493, Val Loss: 13.4233654 +2025-02-27 03:00:43,409 Epoch 909/2000 +2025-02-27 03:00:59,083 Current Learning Rate: 0.0042954938 +2025-02-27 03:00:59,084 Train Loss: 14.5903423, Val Loss: 12.8253957 +2025-02-27 03:00:59,084 Epoch 910/2000 +2025-02-27 03:01:15,018 Current Learning Rate: 0.0042178277 +2025-02-27 03:01:15,018 Train Loss: 14.2328229, Val Loss: 12.6270396 +2025-02-27 03:01:15,019 Epoch 911/2000 +2025-02-27 03:01:30,327 Current Learning Rate: 0.0041403545 +2025-02-27 03:01:30,328 Train Loss: 13.9884400, Val Loss: 12.0926792 +2025-02-27 03:01:30,328 Epoch 912/2000 +2025-02-27 03:01:45,813 Current Learning Rate: 0.0040630934 +2025-02-27 03:01:45,814 Train Loss: 39.9089928, Val Loss: 413.4882349 +2025-02-27 03:01:45,814 Epoch 913/2000 +2025-02-27 03:02:02,477 Current Learning Rate: 0.0039860635 +2025-02-27 03:02:02,478 Train Loss: 56.1470008, Val Loss: 12.3865464 +2025-02-27 03:02:02,478 Epoch 914/2000 +2025-02-27 03:02:20,045 Current Learning Rate: 0.0039092838 +2025-02-27 03:02:20,046 Train Loss: 12.9228670, Val Loss: 11.3965319 +2025-02-27 03:02:20,046 Epoch 915/2000 +2025-02-27 03:02:36,914 Current Learning Rate: 0.0038327732 +2025-02-27 03:02:36,915 Train Loss: 12.9671948, Val Loss: 11.6971508 +2025-02-27 03:02:36,915 Epoch 916/2000 +2025-02-27 03:02:53,890 Current Learning Rate: 0.0037565506 +2025-02-27 03:02:53,890 Train Loss: 210.2849419, Val Loss: 57.7945414 +2025-02-27 03:02:53,890 Epoch 917/2000 +2025-02-27 03:03:11,061 Current Learning Rate: 0.0036806348 +2025-02-27 03:03:11,061 Train Loss: 18.9354907, Val Loss: 11.3863016 +2025-02-27 03:03:11,061 Epoch 918/2000 +2025-02-27 03:03:27,984 Current Learning Rate: 0.0036050445 +2025-02-27 03:03:27,984 Train Loss: 12.8032182, Val Loss: 10.9456085 +2025-02-27 03:03:27,985 Epoch 919/2000 +2025-02-27 03:03:45,095 Current Learning Rate: 0.0035297984 +2025-02-27 03:03:45,095 Train Loss: 12.1857278, Val Loss: 11.0589054 +2025-02-27 03:03:45,095 Epoch 920/2000 +2025-02-27 03:04:01,754 Current Learning Rate: 0.0034549150 +2025-02-27 03:04:01,754 Train Loss: 12.0096531, Val Loss: 11.4110344 +2025-02-27 03:04:01,755 Epoch 921/2000 +2025-02-27 03:04:18,664 Current Learning Rate: 0.0033804129 +2025-02-27 03:04:18,664 Train Loss: 12.1681461, Val Loss: 13.6334529 +2025-02-27 03:04:18,664 Epoch 922/2000 +2025-02-27 03:04:35,771 Current Learning Rate: 0.0033063104 +2025-02-27 03:04:35,772 Train Loss: 12.4363657, Val Loss: 12.2644529 +2025-02-27 03:04:35,772 Epoch 923/2000 +2025-02-27 03:04:51,551 Current Learning Rate: 0.0032326258 +2025-02-27 03:04:51,552 Train Loss: 135.6886754, Val Loss: 33.6821047 +2025-02-27 03:04:51,552 Epoch 924/2000 +2025-02-27 03:05:08,340 Current Learning Rate: 0.0031593772 +2025-02-27 03:05:08,341 Train Loss: 14.4788178, Val Loss: 10.0984946 +2025-02-27 03:05:08,341 Epoch 925/2000 +2025-02-27 03:05:24,628 Current Learning Rate: 0.0030865828 +2025-02-27 03:05:24,628 Train Loss: 10.9162867, Val Loss: 9.6282216 +2025-02-27 03:05:24,629 Epoch 926/2000 +2025-02-27 03:05:40,896 Current Learning Rate: 0.0030142605 +2025-02-27 03:05:40,896 Train Loss: 10.4979874, Val Loss: 9.3946536 +2025-02-27 03:05:40,897 Epoch 927/2000 +2025-02-27 03:05:57,719 Current Learning Rate: 0.0029424282 +2025-02-27 03:05:57,719 Train Loss: 10.3694788, Val Loss: 9.6469504 +2025-02-27 03:05:57,719 Epoch 928/2000 +2025-02-27 03:06:15,439 Current Learning Rate: 0.0028711035 +2025-02-27 03:06:15,439 Train Loss: 10.1120553, Val Loss: 9.0487936 +2025-02-27 03:06:15,440 Epoch 929/2000 +2025-02-27 03:06:32,184 Current Learning Rate: 0.0028003042 +2025-02-27 03:06:32,184 Train Loss: 9.9491565, Val Loss: 9.3433308 +2025-02-27 03:06:32,184 Epoch 930/2000 +2025-02-27 03:06:48,838 Current Learning Rate: 0.0027300475 +2025-02-27 03:06:48,838 Train Loss: 9.8345806, Val Loss: 8.9019987 +2025-02-27 03:06:48,839 Epoch 931/2000 +2025-02-27 03:07:05,916 Current Learning Rate: 0.0026603509 +2025-02-27 03:07:05,916 Train Loss: 10.1282690, Val Loss: 8.7645205 +2025-02-27 03:07:05,916 Epoch 932/2000 +2025-02-27 03:07:22,256 Current Learning Rate: 0.0025912316 +2025-02-27 03:07:22,257 Train Loss: 18.7097183, Val Loss: 172.3200963 +2025-02-27 03:07:22,257 Epoch 933/2000 +2025-02-27 03:07:39,018 Current Learning Rate: 0.0025227067 +2025-02-27 03:07:39,019 Train Loss: 27.2072984, Val Loss: 8.3824286 +2025-02-27 03:07:39,019 Epoch 934/2000 +2025-02-27 03:07:55,645 Current Learning Rate: 0.0024547929 +2025-02-27 03:07:55,645 Train Loss: 9.2258420, Val Loss: 8.3737184 +2025-02-27 03:07:55,645 Epoch 935/2000 +2025-02-27 03:08:12,838 Current Learning Rate: 0.0023875072 +2025-02-27 03:08:12,838 Train Loss: 9.2426407, Val Loss: 8.1876913 +2025-02-27 03:08:12,839 Epoch 936/2000 +2025-02-27 03:08:29,703 Current Learning Rate: 0.0023208660 +2025-02-27 03:08:29,703 Train Loss: 9.0634517, Val Loss: 8.2022808 +2025-02-27 03:08:29,703 Epoch 937/2000 +2025-02-27 03:08:46,607 Current Learning Rate: 0.0022548859 +2025-02-27 03:08:46,607 Train Loss: 14.5177630, Val Loss: 90.5725692 +2025-02-27 03:08:46,608 Epoch 938/2000 +2025-02-27 03:09:03,355 Current Learning Rate: 0.0021895831 +2025-02-27 03:09:03,356 Train Loss: 21.4962244, Val Loss: 8.5034419 +2025-02-27 03:09:03,356 Epoch 939/2000 +2025-02-27 03:09:19,848 Current Learning Rate: 0.0021249737 +2025-02-27 03:09:19,848 Train Loss: 8.9140935, Val Loss: 7.6943757 +2025-02-27 03:09:19,849 Epoch 940/2000 +2025-02-27 03:09:36,837 Current Learning Rate: 0.0020610737 +2025-02-27 03:09:36,837 Train Loss: 8.4747995, Val Loss: 7.8567216 +2025-02-27 03:09:36,837 Epoch 941/2000 +2025-02-27 03:09:53,708 Current Learning Rate: 0.0019978989 +2025-02-27 03:09:53,709 Train Loss: 8.7190676, Val Loss: 12.1120616 +2025-02-27 03:09:53,709 Epoch 942/2000 +2025-02-27 03:10:10,139 Current Learning Rate: 0.0019354647 +2025-02-27 03:10:10,139 Train Loss: 8.9440489, Val Loss: 9.0065381 +2025-02-27 03:10:10,140 Epoch 943/2000 +2025-02-27 03:10:25,174 Current Learning Rate: 0.0018737867 +2025-02-27 03:10:25,174 Train Loss: 12.2022878, Val Loss: 46.3731596 +2025-02-27 03:10:25,175 Epoch 944/2000 +2025-02-27 03:10:41,040 Current Learning Rate: 0.0018128801 +2025-02-27 03:10:41,041 Train Loss: 12.5172855, Val Loss: 7.4697708 +2025-02-27 03:10:41,041 Epoch 945/2000 +2025-02-27 03:10:55,810 Current Learning Rate: 0.0017527598 +2025-02-27 03:10:55,810 Train Loss: 8.2003655, Val Loss: 7.4250511 +2025-02-27 03:10:55,810 Epoch 946/2000 +2025-02-27 03:11:11,014 Current Learning Rate: 0.0016934407 +2025-02-27 03:11:11,014 Train Loss: 7.9656392, Val Loss: 7.6586844 +2025-02-27 03:11:11,015 Epoch 947/2000 +2025-02-27 03:11:26,320 Current Learning Rate: 0.0016349374 +2025-02-27 03:11:26,321 Train Loss: 7.9317389, Val Loss: 7.1110834 +2025-02-27 03:11:26,321 Epoch 948/2000 +2025-02-27 03:11:41,818 Current Learning Rate: 0.0015772645 +2025-02-27 03:11:41,819 Train Loss: 7.8189537, Val Loss: 7.0458055 +2025-02-27 03:11:41,819 Epoch 949/2000 +2025-02-27 03:11:57,665 Current Learning Rate: 0.0015204360 +2025-02-27 03:11:57,665 Train Loss: 7.6388183, Val Loss: 6.8599221 +2025-02-27 03:11:57,666 Epoch 950/2000 +2025-02-27 03:12:13,583 Current Learning Rate: 0.0014644661 +2025-02-27 03:12:13,583 Train Loss: 7.4358895, Val Loss: 6.9205868 +2025-02-27 03:12:13,584 Epoch 951/2000 +2025-02-27 03:12:29,746 Current Learning Rate: 0.0014093685 +2025-02-27 03:12:29,746 Train Loss: 7.4072808, Val Loss: 6.8128099 +2025-02-27 03:12:29,746 Epoch 952/2000 +2025-02-27 03:12:45,476 Current Learning Rate: 0.0013551569 +2025-02-27 03:12:45,477 Train Loss: 7.2981099, Val Loss: 6.6681543 +2025-02-27 03:12:45,477 Epoch 953/2000 +2025-02-27 03:13:01,286 Current Learning Rate: 0.0013018445 +2025-02-27 03:13:01,287 Train Loss: 7.2459691, Val Loss: 6.5688457 +2025-02-27 03:13:01,287 Epoch 954/2000 +2025-02-27 03:13:16,855 Current Learning Rate: 0.0012494447 +2025-02-27 03:13:16,856 Train Loss: 7.1666610, Val Loss: 6.4890816 +2025-02-27 03:13:16,856 Epoch 955/2000 +2025-02-27 03:13:31,950 Current Learning Rate: 0.0011979702 +2025-02-27 03:13:31,951 Train Loss: 7.1084032, Val Loss: 6.4513433 +2025-02-27 03:13:31,951 Epoch 956/2000 +2025-02-27 03:13:47,738 Current Learning Rate: 0.0011474338 +2025-02-27 03:13:47,738 Train Loss: 7.0225079, Val Loss: 6.3749053 +2025-02-27 03:13:47,739 Epoch 957/2000 +2025-02-27 03:14:03,565 Current Learning Rate: 0.0010978480 +2025-02-27 03:14:03,565 Train Loss: 6.9330170, Val Loss: 6.3117030 +2025-02-27 03:14:03,566 Epoch 958/2000 +2025-02-27 03:14:18,926 Current Learning Rate: 0.0010492249 +2025-02-27 03:14:18,927 Train Loss: 6.9066356, Val Loss: 6.3035065 +2025-02-27 03:14:18,927 Epoch 959/2000 +2025-02-27 03:14:34,358 Current Learning Rate: 0.0010015767 +2025-02-27 03:14:34,359 Train Loss: 6.8192449, Val Loss: 6.2170945 +2025-02-27 03:14:34,359 Epoch 960/2000 +2025-02-27 03:14:49,663 Current Learning Rate: 0.0009549150 +2025-02-27 03:14:49,664 Train Loss: 6.7445138, Val Loss: 6.1582976 +2025-02-27 03:14:49,664 Epoch 961/2000 +2025-02-27 03:15:05,355 Current Learning Rate: 0.0009092514 +2025-02-27 03:15:05,356 Train Loss: 6.6877058, Val Loss: 6.1103941 +2025-02-27 03:15:05,356 Epoch 962/2000 +2025-02-27 03:15:20,987 Current Learning Rate: 0.0008645971 +2025-02-27 03:15:20,988 Train Loss: 6.6339962, Val Loss: 6.0625465 +2025-02-27 03:15:20,988 Epoch 963/2000 +2025-02-27 03:15:37,284 Current Learning Rate: 0.0008209632 +2025-02-27 03:15:37,284 Train Loss: 6.5743512, Val Loss: 6.0077208 +2025-02-27 03:15:37,284 Epoch 964/2000 +2025-02-27 03:15:53,666 Current Learning Rate: 0.0007783604 +2025-02-27 03:15:53,667 Train Loss: 6.4991199, Val Loss: 5.9637801 +2025-02-27 03:15:53,667 Epoch 965/2000 +2025-02-27 03:16:09,447 Current Learning Rate: 0.0007367992 +2025-02-27 03:16:09,448 Train Loss: 6.4593592, Val Loss: 5.8987018 +2025-02-27 03:16:09,448 Epoch 966/2000 +2025-02-27 03:16:24,393 Current Learning Rate: 0.0006962899 +2025-02-27 03:16:24,393 Train Loss: 6.4104914, Val Loss: 5.8625920 +2025-02-27 03:16:24,394 Epoch 967/2000 +2025-02-27 03:16:39,133 Current Learning Rate: 0.0006568424 +2025-02-27 03:16:39,134 Train Loss: 6.4046539, Val Loss: 5.8383375 +2025-02-27 03:16:39,134 Epoch 968/2000 +2025-02-27 03:16:53,716 Current Learning Rate: 0.0006184666 +2025-02-27 03:16:53,717 Train Loss: 6.3335618, Val Loss: 5.7847491 +2025-02-27 03:16:53,717 Epoch 969/2000 +2025-02-27 03:17:09,222 Current Learning Rate: 0.0005811718 +2025-02-27 03:17:09,223 Train Loss: 6.3111957, Val Loss: 5.7474317 +2025-02-27 03:17:09,223 Epoch 970/2000 +2025-02-27 03:17:24,953 Current Learning Rate: 0.0005449674 +2025-02-27 03:17:24,954 Train Loss: 6.2801112, Val Loss: 5.6993717 +2025-02-27 03:17:24,954 Epoch 971/2000 +2025-02-27 03:17:40,774 Current Learning Rate: 0.0005098621 +2025-02-27 03:17:40,774 Train Loss: 6.2005227, Val Loss: 5.6866323 +2025-02-27 03:17:40,775 Epoch 972/2000 +2025-02-27 03:17:55,685 Current Learning Rate: 0.0004758647 +2025-02-27 03:17:55,686 Train Loss: 6.1890073, Val Loss: 5.6441433 +2025-02-27 03:17:55,686 Epoch 973/2000 +2025-02-27 03:18:11,650 Current Learning Rate: 0.0004429836 +2025-02-27 03:18:11,650 Train Loss: 6.1412195, Val Loss: 5.6173217 +2025-02-27 03:18:11,650 Epoch 974/2000 +2025-02-27 03:18:26,583 Current Learning Rate: 0.0004112269 +2025-02-27 03:18:26,583 Train Loss: 6.1316154, Val Loss: 5.5858827 +2025-02-27 03:18:26,584 Epoch 975/2000 +2025-02-27 03:18:41,963 Current Learning Rate: 0.0003806023 +2025-02-27 03:18:41,964 Train Loss: 6.0974284, Val Loss: 5.5541644 +2025-02-27 03:18:41,965 Epoch 976/2000 +2025-02-27 03:18:57,245 Current Learning Rate: 0.0003511176 +2025-02-27 03:18:57,246 Train Loss: 6.0591652, Val Loss: 5.5505251 +2025-02-27 03:18:57,246 Epoch 977/2000 +2025-02-27 03:19:12,604 Current Learning Rate: 0.0003227798 +2025-02-27 03:19:12,605 Train Loss: 6.0022498, Val Loss: 5.5097420 +2025-02-27 03:19:12,605 Epoch 978/2000 +2025-02-27 03:19:27,718 Current Learning Rate: 0.0002955962 +2025-02-27 03:19:27,719 Train Loss: 6.0130710, Val Loss: 5.4750194 +2025-02-27 03:19:27,719 Epoch 979/2000 +2025-02-27 03:19:42,840 Current Learning Rate: 0.0002695732 +2025-02-27 03:19:42,841 Train Loss: 5.9622745, Val Loss: 5.4631203 +2025-02-27 03:19:42,841 Epoch 980/2000 +2025-02-27 03:19:57,789 Current Learning Rate: 0.0002447174 +2025-02-27 03:19:57,789 Train Loss: 5.9407453, Val Loss: 5.4343337 +2025-02-27 03:19:57,790 Epoch 981/2000 +2025-02-27 03:20:13,400 Current Learning Rate: 0.0002210349 +2025-02-27 03:20:13,400 Train Loss: 5.9391218, Val Loss: 5.4175396 +2025-02-27 03:20:13,400 Epoch 982/2000 +2025-02-27 03:20:28,528 Current Learning Rate: 0.0001985316 +2025-02-27 03:20:28,529 Train Loss: 5.9038618, Val Loss: 5.4104194 +2025-02-27 03:20:28,529 Epoch 983/2000 +2025-02-27 03:20:43,999 Current Learning Rate: 0.0001772129 +2025-02-27 03:20:44,000 Train Loss: 5.8898213, Val Loss: 5.3855594 +2025-02-27 03:20:44,000 Epoch 984/2000 +2025-02-27 03:20:58,680 Current Learning Rate: 0.0001570842 +2025-02-27 03:20:58,680 Train Loss: 5.8816656, Val Loss: 5.3727241 +2025-02-27 03:20:58,681 Epoch 985/2000 +2025-02-27 03:21:14,956 Current Learning Rate: 0.0001381504 +2025-02-27 03:21:14,956 Train Loss: 5.8617404, Val Loss: 5.3590215 +2025-02-27 03:21:14,956 Epoch 986/2000 +2025-02-27 03:21:30,058 Current Learning Rate: 0.0001204162 +2025-02-27 03:21:30,058 Train Loss: 5.8702913, Val Loss: 5.3451514 +2025-02-27 03:21:30,058 Epoch 987/2000 +2025-02-27 03:21:45,877 Current Learning Rate: 0.0001038859 +2025-02-27 03:21:45,877 Train Loss: 5.8204239, Val Loss: 5.3343595 +2025-02-27 03:21:45,877 Epoch 988/2000 +2025-02-27 03:22:00,999 Current Learning Rate: 0.0000885637 +2025-02-27 03:22:00,999 Train Loss: 5.8307124, Val Loss: 5.3138961 +2025-02-27 03:22:01,000 Epoch 989/2000 +2025-02-27 03:22:16,441 Current Learning Rate: 0.0000744534 +2025-02-27 03:22:16,441 Train Loss: 5.7916748, Val Loss: 5.3202149 +2025-02-27 03:22:16,441 Epoch 990/2000 +2025-02-27 03:22:32,504 Current Learning Rate: 0.0000615583 +2025-02-27 03:22:32,504 Train Loss: 5.7958328, Val Loss: 5.3036842 +2025-02-27 03:22:32,505 Epoch 991/2000 +2025-02-27 03:22:47,968 Current Learning Rate: 0.0000498817 +2025-02-27 03:22:47,969 Train Loss: 5.8056704, Val Loss: 5.2984901 +2025-02-27 03:22:47,969 Epoch 992/2000 +2025-02-27 03:23:04,349 Current Learning Rate: 0.0000394265 +2025-02-27 03:23:04,350 Train Loss: 5.7773689, Val Loss: 5.2992494 +2025-02-27 03:23:04,350 Epoch 993/2000 +2025-02-27 03:23:20,503 Current Learning Rate: 0.0000301952 +2025-02-27 03:23:20,503 Train Loss: 5.8010773, Val Loss: 5.2955196 +2025-02-27 03:23:20,504 Epoch 994/2000 +2025-02-27 03:23:36,858 Current Learning Rate: 0.0000221902 +2025-02-27 03:23:36,859 Train Loss: 5.7804454, Val Loss: 5.2957078 +2025-02-27 03:23:36,859 Epoch 995/2000 +2025-02-27 03:23:53,699 Current Learning Rate: 0.0000154133 +2025-02-27 03:23:53,700 Train Loss: 5.7955156, Val Loss: 5.2834399 +2025-02-27 03:23:53,700 Epoch 996/2000 +2025-02-27 03:24:08,607 Current Learning Rate: 0.0000098664 +2025-02-27 03:24:08,608 Train Loss: 5.7950167, Val Loss: 5.2849460 +2025-02-27 03:24:08,608 Epoch 997/2000 +2025-02-27 03:24:23,465 Current Learning Rate: 0.0000055506 +2025-02-27 03:24:23,466 Train Loss: 5.7754421, Val Loss: 5.2881894 +2025-02-27 03:24:23,466 Epoch 998/2000 +2025-02-27 03:24:38,863 Current Learning Rate: 0.0000024672 +2025-02-27 03:24:38,864 Train Loss: 5.7640639, Val Loss: 5.2949631 +2025-02-27 03:24:38,865 Epoch 999/2000 +2025-02-27 03:24:53,600 Current Learning Rate: 0.0000006168 +2025-02-27 03:24:53,601 Train Loss: 5.7647340, Val Loss: 5.2871308 +2025-02-27 03:24:53,601 Epoch 1000/2000 +2025-02-27 03:25:10,331 Current Learning Rate: 0.0000000000 +2025-02-27 03:25:10,331 Train Loss: 5.7739924, Val Loss: 5.2856528 +2025-02-27 03:25:10,332 Epoch 1001/2000 +2025-02-27 03:25:26,253 Current Learning Rate: 0.0000006168 +2025-02-27 03:25:26,254 Train Loss: 5.7671816, Val Loss: 5.2930895 +2025-02-27 03:25:26,254 Epoch 1002/2000 +2025-02-27 03:25:42,219 Current Learning Rate: 0.0000024672 +2025-02-27 03:25:42,220 Train Loss: 5.7562616, Val Loss: 5.2827402 +2025-02-27 03:25:42,220 Epoch 1003/2000 +2025-02-27 03:25:58,516 Current Learning Rate: 0.0000055506 +2025-02-27 03:25:58,517 Train Loss: 5.7974168, Val Loss: 5.2857321 +2025-02-27 03:25:58,517 Epoch 1004/2000 +2025-02-27 03:26:14,475 Current Learning Rate: 0.0000098664 +2025-02-27 03:26:14,475 Train Loss: 5.7735963, Val Loss: 5.2839974 +2025-02-27 03:26:14,476 Epoch 1005/2000 +2025-02-27 03:26:30,711 Current Learning Rate: 0.0000154133 +2025-02-27 03:26:30,712 Train Loss: 5.7734112, Val Loss: 5.2879652 +2025-02-27 03:26:30,712 Epoch 1006/2000 +2025-02-27 03:26:46,839 Current Learning Rate: 0.0000221902 +2025-02-27 03:26:46,839 Train Loss: 5.7786027, Val Loss: 5.2845863 +2025-02-27 03:26:46,840 Epoch 1007/2000 +2025-02-27 03:27:03,215 Current Learning Rate: 0.0000301952 +2025-02-27 03:27:03,216 Train Loss: 5.7566285, Val Loss: 5.2711690 +2025-02-27 03:27:03,216 Epoch 1008/2000 +2025-02-27 03:27:19,560 Current Learning Rate: 0.0000394265 +2025-02-27 03:27:19,561 Train Loss: 5.7454259, Val Loss: 5.2620144 +2025-02-27 03:27:19,561 Epoch 1009/2000 +2025-02-27 03:27:34,392 Current Learning Rate: 0.0000498817 +2025-02-27 03:27:34,393 Train Loss: 5.7419360, Val Loss: 5.2738506 +2025-02-27 03:27:34,393 Epoch 1010/2000 +2025-02-27 03:27:49,924 Current Learning Rate: 0.0000615583 +2025-02-27 03:27:49,925 Train Loss: 5.7354718, Val Loss: 5.2572275 +2025-02-27 03:27:49,925 Epoch 1011/2000 +2025-02-27 03:28:06,077 Current Learning Rate: 0.0000744534 +2025-02-27 03:28:06,077 Train Loss: 5.7437107, Val Loss: 5.2442890 +2025-02-27 03:28:06,077 Epoch 1012/2000 +2025-02-27 03:28:22,147 Current Learning Rate: 0.0000885637 +2025-02-27 03:28:22,148 Train Loss: 5.7230677, Val Loss: 5.2348942 +2025-02-27 03:28:22,148 Epoch 1013/2000 +2025-02-27 03:28:38,320 Current Learning Rate: 0.0001038859 +2025-02-27 03:28:38,320 Train Loss: 5.7180977, Val Loss: 5.2123134 +2025-02-27 03:28:38,321 Epoch 1014/2000 +2025-02-27 03:28:54,487 Current Learning Rate: 0.0001204162 +2025-02-27 03:28:54,488 Train Loss: 5.6741603, Val Loss: 5.2010664 +2025-02-27 03:28:54,488 Epoch 1015/2000 +2025-02-27 03:29:10,310 Current Learning Rate: 0.0001381504 +2025-02-27 03:29:10,310 Train Loss: 5.6842746, Val Loss: 5.1658524 +2025-02-27 03:29:10,311 Epoch 1016/2000 +2025-02-27 03:29:26,844 Current Learning Rate: 0.0001570842 +2025-02-27 03:29:26,845 Train Loss: 5.6457107, Val Loss: 5.1458179 +2025-02-27 03:29:26,845 Epoch 1017/2000 +2025-02-27 03:29:43,758 Current Learning Rate: 0.0001772129 +2025-02-27 03:29:43,758 Train Loss: 5.6114350, Val Loss: 5.1263478 +2025-02-27 03:29:43,759 Epoch 1018/2000 +2025-02-27 03:29:59,227 Current Learning Rate: 0.0001985316 +2025-02-27 03:29:59,227 Train Loss: 5.6266210, Val Loss: 5.1258993 +2025-02-27 03:29:59,227 Epoch 1019/2000 +2025-02-27 03:30:15,812 Current Learning Rate: 0.0002210349 +2025-02-27 03:30:15,812 Train Loss: 19.9024475, Val Loss: 5.0791854 +2025-02-27 03:30:15,812 Epoch 1020/2000 +2025-02-27 03:30:32,595 Current Learning Rate: 0.0002447174 +2025-02-27 03:30:32,596 Train Loss: 5.5685541, Val Loss: 5.0214074 +2025-02-27 03:30:32,596 Epoch 1021/2000 +2025-02-27 03:30:49,190 Current Learning Rate: 0.0002695732 +2025-02-27 03:30:49,191 Train Loss: 6.0222626, Val Loss: 5.0100686 +2025-02-27 03:30:49,191 Epoch 1022/2000 +2025-02-27 03:31:06,076 Current Learning Rate: 0.0002955962 +2025-02-27 03:31:06,077 Train Loss: 19.0660997, Val Loss: 4.9304446 +2025-02-27 03:31:06,077 Epoch 1023/2000 +2025-02-27 03:31:22,932 Current Learning Rate: 0.0003227798 +2025-02-27 03:31:22,932 Train Loss: 5.4847562, Val Loss: 4.8634010 +2025-02-27 03:31:22,932 Epoch 1024/2000 +2025-02-27 03:31:38,865 Current Learning Rate: 0.0003511176 +2025-02-27 03:31:38,866 Train Loss: 40.3327260, Val Loss: 6.8030409 +2025-02-27 03:31:38,866 Epoch 1025/2000 +2025-02-27 03:31:55,121 Current Learning Rate: 0.0003806023 +2025-02-27 03:31:55,122 Train Loss: 5.5831396, Val Loss: 4.8039556 +2025-02-27 03:31:55,122 Epoch 1026/2000 +2025-02-27 03:32:11,348 Current Learning Rate: 0.0004112269 +2025-02-27 03:32:11,348 Train Loss: 5.3303739, Val Loss: 4.8696139 +2025-02-27 03:32:11,348 Epoch 1027/2000 +2025-02-27 03:32:27,616 Current Learning Rate: 0.0004429836 +2025-02-27 03:32:27,617 Train Loss: 31.6283346, Val Loss: 5.2578695 +2025-02-27 03:32:27,617 Epoch 1028/2000 +2025-02-27 03:32:44,665 Current Learning Rate: 0.0004758647 +2025-02-27 03:32:44,665 Train Loss: 5.2322947, Val Loss: 4.9213042 +2025-02-27 03:32:44,665 Epoch 1029/2000 +2025-02-27 03:33:01,283 Current Learning Rate: 0.0005098621 +2025-02-27 03:33:01,284 Train Loss: 78.3620700, Val Loss: 5.8263471 +2025-02-27 03:33:01,284 Epoch 1030/2000 +2025-02-27 03:33:18,457 Current Learning Rate: 0.0005449674 +2025-02-27 03:33:18,457 Train Loss: 5.5164346, Val Loss: 4.7191937 +2025-02-27 03:33:18,457 Epoch 1031/2000 +2025-02-27 03:33:35,279 Current Learning Rate: 0.0005811718 +2025-02-27 03:33:35,280 Train Loss: 5.1564097, Val Loss: 4.4604126 +2025-02-27 03:33:35,280 Epoch 1032/2000 +2025-02-27 03:33:52,071 Current Learning Rate: 0.0006184666 +2025-02-27 03:33:52,072 Train Loss: 5.2226789, Val Loss: 6.8151928 +2025-02-27 03:33:52,072 Epoch 1033/2000 +2025-02-27 03:34:08,401 Current Learning Rate: 0.0006568424 +2025-02-27 03:34:08,401 Train Loss: 169.2096184, Val Loss: 7.6713845 +2025-02-27 03:34:08,402 Epoch 1034/2000 +2025-02-27 03:34:25,024 Current Learning Rate: 0.0006962899 +2025-02-27 03:34:25,024 Train Loss: 5.5337329, Val Loss: 4.4019192 +2025-02-27 03:34:25,025 Epoch 1035/2000 +2025-02-27 03:34:41,509 Current Learning Rate: 0.0007367992 +2025-02-27 03:34:41,510 Train Loss: 4.7150448, Val Loss: 4.2304903 +2025-02-27 03:34:41,510 Epoch 1036/2000 +2025-02-27 03:34:58,487 Current Learning Rate: 0.0007783604 +2025-02-27 03:34:58,488 Train Loss: 4.5797386, Val Loss: 5.7204918 +2025-02-27 03:34:58,488 Epoch 1037/2000 +2025-02-27 03:35:14,572 Current Learning Rate: 0.0008209632 +2025-02-27 03:35:14,572 Train Loss: 49.8613255, Val Loss: 4.7111483 +2025-02-27 03:35:14,573 Epoch 1038/2000 +2025-02-27 03:35:30,800 Current Learning Rate: 0.0008645971 +2025-02-27 03:35:30,801 Train Loss: 4.5713427, Val Loss: 3.8934012 +2025-02-27 03:35:30,801 Epoch 1039/2000 +2025-02-27 03:35:46,984 Current Learning Rate: 0.0009092514 +2025-02-27 03:35:46,985 Train Loss: 40.1538670, Val Loss: 10.9658489 +2025-02-27 03:35:46,985 Epoch 1040/2000 +2025-02-27 03:36:03,431 Current Learning Rate: 0.0009549150 +2025-02-27 03:36:03,432 Train Loss: 5.1670143, Val Loss: 3.8340197 +2025-02-27 03:36:03,432 Epoch 1041/2000 +2025-02-27 03:36:19,310 Current Learning Rate: 0.0010015767 +2025-02-27 03:36:19,310 Train Loss: 31.7549658, Val Loss: 6.6150885 +2025-02-27 03:36:19,310 Epoch 1042/2000 +2025-02-27 03:36:35,627 Current Learning Rate: 0.0010492249 +2025-02-27 03:36:35,628 Train Loss: 8.6287979, Val Loss: 3.5854467 +2025-02-27 03:36:35,628 Epoch 1043/2000 +2025-02-27 03:36:51,081 Current Learning Rate: 0.0010978480 +2025-02-27 03:36:51,082 Train Loss: 35.2682288, Val Loss: 4.9900807 +2025-02-27 03:36:51,082 Epoch 1044/2000 +2025-02-27 03:37:07,401 Current Learning Rate: 0.0011474338 +2025-02-27 03:37:07,401 Train Loss: 4.9498917, Val Loss: 12.2695728 +2025-02-27 03:37:07,401 Epoch 1045/2000 +2025-02-27 03:37:24,307 Current Learning Rate: 0.0011979702 +2025-02-27 03:37:24,307 Train Loss: 28100.9531244, Val Loss: 1347.3996588 +2025-02-27 03:37:24,307 Epoch 1046/2000 +2025-02-27 03:37:40,419 Current Learning Rate: 0.0012494447 +2025-02-27 03:37:40,420 Train Loss: 373.5591799, Val Loss: 133.6452898 +2025-02-27 03:37:40,421 Epoch 1047/2000 +2025-02-27 03:37:56,800 Current Learning Rate: 0.0013018445 +2025-02-27 03:37:56,800 Train Loss: 120.2818443, Val Loss: 92.1619989 +2025-02-27 03:37:56,801 Epoch 1048/2000 +2025-02-27 03:38:12,756 Current Learning Rate: 0.0013551569 +2025-02-27 03:38:12,757 Train Loss: 89.9479422, Val Loss: 71.7684112 +2025-02-27 03:38:12,757 Epoch 1049/2000 +2025-02-27 03:38:28,609 Current Learning Rate: 0.0014093685 +2025-02-27 03:38:28,610 Train Loss: 73.0246228, Val Loss: 59.4220387 +2025-02-27 03:38:28,610 Epoch 1050/2000 +2025-02-27 03:38:44,335 Current Learning Rate: 0.0014644661 +2025-02-27 03:38:44,335 Train Loss: 61.5179155, Val Loss: 50.5261498 +2025-02-27 03:38:44,336 Epoch 1051/2000 +2025-02-27 03:39:00,404 Current Learning Rate: 0.0015204360 +2025-02-27 03:39:00,405 Train Loss: 53.1184544, Val Loss: 43.8886293 +2025-02-27 03:39:00,409 Epoch 1052/2000 +2025-02-27 03:39:18,132 Current Learning Rate: 0.0015772645 +2025-02-27 03:39:18,133 Train Loss: 46.6538859, Val Loss: 38.8659319 +2025-02-27 03:39:18,134 Epoch 1053/2000 +2025-02-27 03:39:35,526 Current Learning Rate: 0.0016349374 +2025-02-27 03:39:35,527 Train Loss: 41.1114573, Val Loss: 34.1991282 +2025-02-27 03:39:35,527 Epoch 1054/2000 +2025-02-27 03:39:53,341 Current Learning Rate: 0.0016934407 +2025-02-27 03:39:53,342 Train Loss: 36.6714804, Val Loss: 30.6187272 +2025-02-27 03:39:53,342 Epoch 1055/2000 +2025-02-27 03:40:12,032 Current Learning Rate: 0.0017527598 +2025-02-27 03:40:12,033 Train Loss: 32.9239780, Val Loss: 27.5659893 +2025-02-27 03:40:12,033 Epoch 1056/2000 +2025-02-27 03:40:31,344 Current Learning Rate: 0.0018128801 +2025-02-27 03:40:31,344 Train Loss: 30.0264011, Val Loss: 25.0502070 +2025-02-27 03:40:31,345 Epoch 1057/2000 +2025-02-27 03:40:48,713 Current Learning Rate: 0.0018737867 +2025-02-27 03:40:48,714 Train Loss: 27.2144802, Val Loss: 22.6782895 +2025-02-27 03:40:48,715 Epoch 1058/2000 +2025-02-27 03:41:08,282 Current Learning Rate: 0.0019354647 +2025-02-27 03:41:08,283 Train Loss: 24.7736134, Val Loss: 20.6987204 +2025-02-27 03:41:08,283 Epoch 1059/2000 +2025-02-27 03:41:26,305 Current Learning Rate: 0.0019978989 +2025-02-27 03:41:26,305 Train Loss: 22.5018406, Val Loss: 18.9885171 +2025-02-27 03:41:26,305 Epoch 1060/2000 +2025-02-27 03:41:42,746 Current Learning Rate: 0.0020610737 +2025-02-27 03:41:42,747 Train Loss: 20.6694110, Val Loss: 17.6032868 +2025-02-27 03:41:42,747 Epoch 1061/2000 +2025-02-27 03:41:59,536 Current Learning Rate: 0.0021249737 +2025-02-27 03:41:59,536 Train Loss: 19.0089417, Val Loss: 16.1837620 +2025-02-27 03:41:59,536 Epoch 1062/2000 +2025-02-27 03:42:17,309 Current Learning Rate: 0.0021895831 +2025-02-27 03:42:17,309 Train Loss: 17.3984986, Val Loss: 15.0585451 +2025-02-27 03:42:17,310 Epoch 1063/2000 +2025-02-27 03:42:33,652 Current Learning Rate: 0.0022548859 +2025-02-27 03:42:33,653 Train Loss: 16.1595726, Val Loss: 13.7937725 +2025-02-27 03:42:33,653 Epoch 1064/2000 +2025-02-27 03:42:50,161 Current Learning Rate: 0.0023208660 +2025-02-27 03:42:50,162 Train Loss: 14.9497255, Val Loss: 12.8019614 +2025-02-27 03:42:50,162 Epoch 1065/2000 +2025-02-27 03:43:06,513 Current Learning Rate: 0.0023875072 +2025-02-27 03:43:06,513 Train Loss: 13.8802753, Val Loss: 12.0058343 +2025-02-27 03:43:06,513 Epoch 1066/2000 +2025-02-27 03:43:21,700 Current Learning Rate: 0.0024547929 +2025-02-27 03:43:21,701 Train Loss: 12.8513411, Val Loss: 11.1148025 +2025-02-27 03:43:21,701 Epoch 1067/2000 +2025-02-27 03:43:37,620 Current Learning Rate: 0.0025227067 +2025-02-27 03:43:37,620 Train Loss: 11.9480621, Val Loss: 10.3123996 +2025-02-27 03:43:37,620 Epoch 1068/2000 +2025-02-27 03:43:53,527 Current Learning Rate: 0.0025912316 +2025-02-27 03:43:53,528 Train Loss: 11.1218885, Val Loss: 9.6382080 +2025-02-27 03:43:53,528 Epoch 1069/2000 +2025-02-27 03:44:09,416 Current Learning Rate: 0.0026603509 +2025-02-27 03:44:09,417 Train Loss: 10.3809908, Val Loss: 9.0193011 +2025-02-27 03:44:09,417 Epoch 1070/2000 +2025-02-27 03:44:25,888 Current Learning Rate: 0.0027300475 +2025-02-27 03:44:25,888 Train Loss: 9.7500777, Val Loss: 8.4799786 +2025-02-27 03:44:25,888 Epoch 1071/2000 +2025-02-27 03:44:42,542 Current Learning Rate: 0.0028003042 +2025-02-27 03:44:42,543 Train Loss: 9.1446696, Val Loss: 7.9676353 +2025-02-27 03:44:42,543 Epoch 1072/2000 +2025-02-27 03:44:57,731 Current Learning Rate: 0.0028711035 +2025-02-27 03:44:57,731 Train Loss: 8.6235809, Val Loss: 7.4421391 +2025-02-27 03:44:57,731 Epoch 1073/2000 +2025-02-27 03:45:12,925 Current Learning Rate: 0.0029424282 +2025-02-27 03:45:12,926 Train Loss: 8.0674209, Val Loss: 7.0224598 +2025-02-27 03:45:12,926 Epoch 1074/2000 +2025-02-27 03:45:28,569 Current Learning Rate: 0.0030142605 +2025-02-27 03:45:28,569 Train Loss: 7.6311274, Val Loss: 6.6162329 +2025-02-27 03:45:28,569 Epoch 1075/2000 +2025-02-27 03:45:43,615 Current Learning Rate: 0.0030865828 +2025-02-27 03:45:43,616 Train Loss: 7.1768873, Val Loss: 6.3056933 +2025-02-27 03:45:43,616 Epoch 1076/2000 +2025-02-27 03:45:58,404 Current Learning Rate: 0.0031593772 +2025-02-27 03:45:58,404 Train Loss: 6.8012342, Val Loss: 6.0068606 +2025-02-27 03:45:58,404 Epoch 1077/2000 +2025-02-27 03:46:14,416 Current Learning Rate: 0.0032326258 +2025-02-27 03:46:14,416 Train Loss: 6.4352709, Val Loss: 5.5895472 +2025-02-27 03:46:14,416 Epoch 1078/2000 +2025-02-27 03:46:30,097 Current Learning Rate: 0.0033063104 +2025-02-27 03:46:30,098 Train Loss: 6.0692619, Val Loss: 5.3095236 +2025-02-27 03:46:30,098 Epoch 1079/2000 +2025-02-27 03:46:45,560 Current Learning Rate: 0.0033804129 +2025-02-27 03:46:45,560 Train Loss: 5.8585791, Val Loss: 5.0897479 +2025-02-27 03:46:45,560 Epoch 1080/2000 +2025-02-27 03:47:01,342 Current Learning Rate: 0.0034549150 +2025-02-27 03:47:01,343 Train Loss: 5.5455810, Val Loss: 4.9289881 +2025-02-27 03:47:01,343 Epoch 1081/2000 +2025-02-27 03:47:17,175 Current Learning Rate: 0.0035297984 +2025-02-27 03:47:17,176 Train Loss: 5.3731946, Val Loss: 4.6804953 +2025-02-27 03:47:17,176 Epoch 1082/2000 +2025-02-27 03:47:33,397 Current Learning Rate: 0.0036050445 +2025-02-27 03:47:33,398 Train Loss: 5.2179388, Val Loss: 4.4007988 +2025-02-27 03:47:33,398 Epoch 1083/2000 +2025-02-27 03:47:49,888 Current Learning Rate: 0.0036806348 +2025-02-27 03:47:49,889 Train Loss: 4.7871131, Val Loss: 4.1469229 +2025-02-27 03:47:49,889 Epoch 1084/2000 +2025-02-27 03:48:06,952 Current Learning Rate: 0.0037565506 +2025-02-27 03:48:06,952 Train Loss: 4.8229857, Val Loss: 5.4744637 +2025-02-27 03:48:06,953 Epoch 1085/2000 +2025-02-27 03:48:22,257 Current Learning Rate: 0.0038327732 +2025-02-27 03:48:22,258 Train Loss: 5.2310978, Val Loss: 3.9478900 +2025-02-27 03:48:22,258 Epoch 1086/2000 +2025-02-27 03:48:38,275 Current Learning Rate: 0.0039092838 +2025-02-27 03:48:38,275 Train Loss: 59.2145792, Val Loss: 5.6191640 +2025-02-27 03:48:38,276 Epoch 1087/2000 +2025-02-27 03:48:53,517 Current Learning Rate: 0.0039860635 +2025-02-27 03:48:53,517 Train Loss: 4.1683350, Val Loss: 3.4524908 +2025-02-27 03:48:53,517 Epoch 1088/2000 +2025-02-27 03:49:09,629 Current Learning Rate: 0.0040630934 +2025-02-27 03:49:09,629 Train Loss: 3.9320867, Val Loss: 3.2686638 +2025-02-27 03:49:09,629 Epoch 1089/2000 +2025-02-27 03:49:25,972 Current Learning Rate: 0.0041403545 +2025-02-27 03:49:25,972 Train Loss: 3.8544350, Val Loss: 3.7388436 +2025-02-27 03:49:25,972 Epoch 1090/2000 +2025-02-27 03:49:42,307 Current Learning Rate: 0.0042178277 +2025-02-27 03:49:42,307 Train Loss: 95.6990084, Val Loss: 4.5900916 +2025-02-27 03:49:42,307 Epoch 1091/2000 +2025-02-27 03:49:59,245 Current Learning Rate: 0.0042954938 +2025-02-27 03:49:59,245 Train Loss: 3.9916291, Val Loss: 3.0083098 +2025-02-27 03:49:59,246 Epoch 1092/2000 +2025-02-27 03:50:15,953 Current Learning Rate: 0.0043733338 +2025-02-27 03:50:15,954 Train Loss: 3.1731126, Val Loss: 2.6617680 +2025-02-27 03:50:15,954 Epoch 1093/2000 +2025-02-27 03:50:32,334 Current Learning Rate: 0.0044513284 +2025-02-27 03:50:32,334 Train Loss: 2.8919422, Val Loss: 2.5280512 +2025-02-27 03:50:32,334 Epoch 1094/2000 +2025-02-27 03:50:49,054 Current Learning Rate: 0.0045294584 +2025-02-27 03:50:49,055 Train Loss: 2.7957575, Val Loss: 2.4012144 +2025-02-27 03:50:49,055 Epoch 1095/2000 +2025-02-27 03:51:05,094 Current Learning Rate: 0.0046077045 +2025-02-27 03:51:05,094 Train Loss: 2.6522281, Val Loss: 2.3337358 +2025-02-27 03:51:05,095 Epoch 1096/2000 +2025-02-27 03:51:21,976 Current Learning Rate: 0.0046860474 +2025-02-27 03:51:21,977 Train Loss: 2.9821665, Val Loss: 2.2534324 +2025-02-27 03:51:21,977 Epoch 1097/2000 +2025-02-27 03:51:38,283 Current Learning Rate: 0.0047644677 +2025-02-27 03:51:38,283 Train Loss: 92.7376307, Val Loss: 11.3016517 +2025-02-27 03:51:38,284 Epoch 1098/2000 +2025-02-27 03:51:54,405 Current Learning Rate: 0.0048429462 +2025-02-27 03:51:54,406 Train Loss: 3.6697388, Val Loss: 2.2631900 +2025-02-27 03:51:54,406 Epoch 1099/2000 +2025-02-27 03:52:11,298 Current Learning Rate: 0.0049214634 +2025-02-27 03:52:11,298 Train Loss: 2.3401993, Val Loss: 2.0375342 +2025-02-27 03:52:11,299 Epoch 1100/2000 +2025-02-27 03:52:29,113 Current Learning Rate: 0.0050000000 +2025-02-27 03:52:29,114 Train Loss: 2.1429358, Val Loss: 1.8758687 +2025-02-27 03:52:29,114 Epoch 1101/2000 +2025-02-27 03:52:46,405 Current Learning Rate: 0.0050785366 +2025-02-27 03:52:46,405 Train Loss: 1.9959084, Val Loss: 1.7660035 +2025-02-27 03:52:46,406 Epoch 1102/2000 +2025-02-27 03:53:05,094 Current Learning Rate: 0.0051570538 +2025-02-27 03:53:05,095 Train Loss: 1.9021069, Val Loss: 1.6869550 +2025-02-27 03:53:05,095 Epoch 1103/2000 +2025-02-27 03:53:21,623 Current Learning Rate: 0.0052355323 +2025-02-27 03:53:21,623 Train Loss: 1.8158028, Val Loss: 1.6015300 +2025-02-27 03:53:21,624 Epoch 1104/2000 +2025-02-27 03:53:37,485 Current Learning Rate: 0.0053139526 +2025-02-27 03:53:37,486 Train Loss: 1.7603731, Val Loss: 1.5550448 +2025-02-27 03:53:37,486 Epoch 1105/2000 +2025-02-27 03:53:53,235 Current Learning Rate: 0.0053922955 +2025-02-27 03:53:53,235 Train Loss: 4.1388857, Val Loss: 11.2341227 +2025-02-27 03:53:53,236 Epoch 1106/2000 +2025-02-27 03:54:10,354 Current Learning Rate: 0.0054705416 +2025-02-27 03:54:10,355 Train Loss: 8.6428559, Val Loss: 1.7056189 +2025-02-27 03:54:10,356 Epoch 1107/2000 +2025-02-27 03:54:27,210 Current Learning Rate: 0.0055486716 +2025-02-27 03:54:27,211 Train Loss: 1.9772311, Val Loss: 3.7740122 +2025-02-27 03:54:27,211 Epoch 1108/2000 +2025-02-27 03:54:43,639 Current Learning Rate: 0.0056266662 +2025-02-27 03:54:43,640 Train Loss: 39.2672783, Val Loss: 1.6670021 +2025-02-27 03:54:43,640 Epoch 1109/2000 +2025-02-27 03:55:00,657 Current Learning Rate: 0.0057045062 +2025-02-27 03:55:00,658 Train Loss: 1.6678086, Val Loss: 1.3311252 +2025-02-27 03:55:00,658 Epoch 1110/2000 +2025-02-27 03:55:16,640 Current Learning Rate: 0.0057821723 +2025-02-27 03:55:16,641 Train Loss: 1.4101552, Val Loss: 1.2177764 +2025-02-27 03:55:16,641 Epoch 1111/2000 +2025-02-27 03:55:32,132 Current Learning Rate: 0.0058596455 +2025-02-27 03:55:32,133 Train Loss: 1.3369332, Val Loss: 1.1884669 +2025-02-27 03:55:32,133 Epoch 1112/2000 +2025-02-27 03:55:47,859 Current Learning Rate: 0.0059369066 +2025-02-27 03:55:47,859 Train Loss: 1.2678203, Val Loss: 1.1081104 +2025-02-27 03:55:47,860 Epoch 1113/2000 +2025-02-27 03:56:03,693 Current Learning Rate: 0.0060139365 +2025-02-27 03:56:03,693 Train Loss: 1.2392165, Val Loss: 1.1095555 +2025-02-27 03:56:03,693 Epoch 1114/2000 +2025-02-27 03:56:19,121 Current Learning Rate: 0.0060907162 +2025-02-27 03:56:19,121 Train Loss: 5.1138403, Val Loss: 1.4867269 +2025-02-27 03:56:19,121 Epoch 1115/2000 +2025-02-27 03:56:34,098 Current Learning Rate: 0.0061672268 +2025-02-27 03:56:34,099 Train Loss: 1.3817802, Val Loss: 1.7291918 +2025-02-27 03:56:34,099 Epoch 1116/2000 +2025-02-27 03:56:48,954 Current Learning Rate: 0.0062434494 +2025-02-27 03:56:48,955 Train Loss: 24.4063085, Val Loss: 1.1833655 +2025-02-27 03:56:48,956 Epoch 1117/2000 +2025-02-27 03:57:03,860 Current Learning Rate: 0.0063193652 +2025-02-27 03:57:03,861 Train Loss: 1.2458795, Val Loss: 1.0480906 +2025-02-27 03:57:03,861 Epoch 1118/2000 +2025-02-27 03:57:18,293 Current Learning Rate: 0.0063949555 +2025-02-27 03:57:18,294 Train Loss: 17.7371580, Val Loss: 1.6896288 +2025-02-27 03:57:18,294 Epoch 1119/2000 +2025-02-27 03:57:32,828 Current Learning Rate: 0.0064702016 +2025-02-27 03:57:32,829 Train Loss: 1.0875993, Val Loss: 0.8560731 +2025-02-27 03:57:32,829 Epoch 1120/2000 +2025-02-27 03:57:47,312 Current Learning Rate: 0.0065450850 +2025-02-27 03:57:47,313 Train Loss: 0.9345345, Val Loss: 0.8054946 +2025-02-27 03:57:47,313 Epoch 1121/2000 +2025-02-27 03:58:03,854 Current Learning Rate: 0.0066195871 +2025-02-27 03:58:03,855 Train Loss: 0.8888009, Val Loss: 0.7694693 +2025-02-27 03:58:03,855 Epoch 1122/2000 +2025-02-27 03:58:21,253 Current Learning Rate: 0.0066936896 +2025-02-27 03:58:21,254 Train Loss: 1.1844970, Val Loss: 0.7506119 +2025-02-27 03:58:21,254 Epoch 1123/2000 +2025-02-27 03:58:37,886 Current Learning Rate: 0.0067673742 +2025-02-27 03:58:37,887 Train Loss: 0.8806379, Val Loss: 0.7421396 +2025-02-27 03:58:37,887 Epoch 1124/2000 +2025-02-27 03:58:54,980 Current Learning Rate: 0.0068406228 +2025-02-27 03:58:54,980 Train Loss: 3.8147316, Val Loss: 1.2777118 +2025-02-27 03:58:54,980 Epoch 1125/2000 +2025-02-27 03:59:11,750 Current Learning Rate: 0.0069134172 +2025-02-27 03:59:11,750 Train Loss: 0.8824466, Val Loss: 0.6881935 +2025-02-27 03:59:11,750 Epoch 1126/2000 +2025-02-27 03:59:28,213 Current Learning Rate: 0.0069857395 +2025-02-27 03:59:28,213 Train Loss: 1.2174271, Val Loss: 5.8501434 +2025-02-27 03:59:28,213 Epoch 1127/2000 +2025-02-27 03:59:44,594 Current Learning Rate: 0.0070575718 +2025-02-27 03:59:44,595 Train Loss: 7.5871856, Val Loss: 0.7486903 +2025-02-27 03:59:44,597 Epoch 1128/2000 +2025-02-27 04:00:01,478 Current Learning Rate: 0.0071288965 +2025-02-27 04:00:01,479 Train Loss: 0.7816524, Val Loss: 0.6626209 +2025-02-27 04:00:01,479 Epoch 1129/2000 +2025-02-27 04:00:18,823 Current Learning Rate: 0.0071996958 +2025-02-27 04:00:18,823 Train Loss: 3.3795809, Val Loss: 1.3927651 +2025-02-27 04:00:18,824 Epoch 1130/2000 +2025-02-27 04:00:36,280 Current Learning Rate: 0.0072699525 +2025-02-27 04:00:36,280 Train Loss: 0.7433795, Val Loss: 0.5717238 +2025-02-27 04:00:36,281 Epoch 1131/2000 +2025-02-27 04:00:53,137 Current Learning Rate: 0.0073396491 +2025-02-27 04:00:53,138 Train Loss: 4.1894701, Val Loss: 0.7195300 +2025-02-27 04:00:53,138 Epoch 1132/2000 +2025-02-27 04:01:10,326 Current Learning Rate: 0.0074087684 +2025-02-27 04:01:10,326 Train Loss: 0.6566041, Val Loss: 0.5526902 +2025-02-27 04:01:10,326 Epoch 1133/2000 +2025-02-27 04:01:25,811 Current Learning Rate: 0.0074772933 +2025-02-27 04:01:25,812 Train Loss: 2.0082565, Val Loss: 0.7232998 +2025-02-27 04:01:25,812 Epoch 1134/2000 +2025-02-27 04:01:42,338 Current Learning Rate: 0.0075452071 +2025-02-27 04:01:42,339 Train Loss: 3.3419135, Val Loss: 3.1236853 +2025-02-27 04:01:42,339 Epoch 1135/2000 +2025-02-27 04:01:58,396 Current Learning Rate: 0.0076124928 +2025-02-27 04:01:58,396 Train Loss: 0.8664674, Val Loss: 0.4700490 +2025-02-27 04:01:58,397 Epoch 1136/2000 +2025-02-27 04:02:15,313 Current Learning Rate: 0.0076791340 +2025-02-27 04:02:15,314 Train Loss: 3666030.2244700, Val Loss: 3385040288.0778589 +2025-02-27 04:02:15,314 Epoch 1137/2000 +2025-02-27 04:02:31,938 Current Learning Rate: 0.0077451141 +2025-02-27 04:02:31,939 Train Loss: 516206968.0152289, Val Loss: 1333782.3114355 +2025-02-27 04:02:31,939 Epoch 1138/2000 +2025-02-27 04:02:47,969 Current Learning Rate: 0.0078104169 +2025-02-27 04:02:47,969 Train Loss: 561990.0315600, Val Loss: 240908.0383212 +2025-02-27 04:02:47,969 Epoch 1139/2000 +2025-02-27 04:03:05,215 Current Learning Rate: 0.0078750263 +2025-02-27 04:03:05,216 Train Loss: 217617.4914838, Val Loss: 167191.2195864 +2025-02-27 04:03:05,216 Epoch 1140/2000 +2025-02-27 04:03:20,666 Current Learning Rate: 0.0079389263 +2025-02-27 04:03:20,666 Train Loss: 163842.3742611, Val Loss: 132502.7265815 +2025-02-27 04:03:20,667 Epoch 1141/2000 +2025-02-27 04:03:36,555 Current Learning Rate: 0.0080021011 +2025-02-27 04:03:36,555 Train Loss: 133456.8452560, Val Loss: 110197.7204988 +2025-02-27 04:03:36,556 Epoch 1142/2000 +2025-02-27 04:03:52,196 Current Learning Rate: 0.0080645353 +2025-02-27 04:03:52,197 Train Loss: 112413.0843603, Val Loss: 93699.8965937 +2025-02-27 04:03:52,197 Epoch 1143/2000 +2025-02-27 04:04:07,801 Current Learning Rate: 0.0081262133 +2025-02-27 04:04:07,801 Train Loss: 96273.2163611, Val Loss: 80549.3734793 +2025-02-27 04:04:07,804 Epoch 1144/2000 +2025-02-27 04:04:23,119 Current Learning Rate: 0.0081871199 +2025-02-27 04:04:23,119 Train Loss: 83658.9114317, Val Loss: 70343.6511557 +2025-02-27 04:04:23,119 Epoch 1145/2000 +2025-02-27 04:04:39,205 Current Learning Rate: 0.0082472402 +2025-02-27 04:04:39,205 Train Loss: 73841.4399860, Val Loss: 62715.6576946 +2025-02-27 04:04:39,205 Epoch 1146/2000 +2025-02-27 04:04:55,593 Current Learning Rate: 0.0083065593 +2025-02-27 04:04:55,594 Train Loss: 66123.1753457, Val Loss: 56430.7801095 +2025-02-27 04:04:55,594 Epoch 1147/2000 +2025-02-27 04:05:12,636 Current Learning Rate: 0.0083650626 +2025-02-27 04:05:12,637 Train Loss: 59922.2028730, Val Loss: 51224.0305657 +2025-02-27 04:05:12,637 Epoch 1148/2000 +2025-02-27 04:05:29,235 Current Learning Rate: 0.0084227355 +2025-02-27 04:05:29,236 Train Loss: 54628.5985873, Val Loss: 46815.2478710 +2025-02-27 04:05:29,236 Epoch 1149/2000 +2025-02-27 04:05:45,573 Current Learning Rate: 0.0084795640 +2025-02-27 04:05:45,574 Train Loss: 49871.7448151, Val Loss: 43067.7227798 +2025-02-27 04:05:45,574 Epoch 1150/2000 +2025-02-27 04:06:01,868 Current Learning Rate: 0.0085355339 +2025-02-27 04:06:01,868 Train Loss: 46124.6654268, Val Loss: 39764.4289842 +2025-02-27 04:06:01,868 Epoch 1151/2000 +2025-02-27 04:06:18,762 Current Learning Rate: 0.0085906315 +2025-02-27 04:06:18,763 Train Loss: 42734.0680418, Val Loss: 36864.6730535 +2025-02-27 04:06:18,763 Epoch 1152/2000 +2025-02-27 04:06:35,116 Current Learning Rate: 0.0086448431 +2025-02-27 04:06:35,117 Train Loss: 39715.7436630, Val Loss: 34323.9195560 +2025-02-27 04:06:35,117 Epoch 1153/2000 +2025-02-27 04:06:51,536 Current Learning Rate: 0.0086981555 +2025-02-27 04:06:51,537 Train Loss: 37092.7346959, Val Loss: 32001.6624848 +2025-02-27 04:06:51,537 Epoch 1154/2000 +2025-02-27 04:07:07,802 Current Learning Rate: 0.0087505553 +2025-02-27 04:07:07,802 Train Loss: 34765.2877655, Val Loss: 30057.9698905 +2025-02-27 04:07:07,802 Epoch 1155/2000 +2025-02-27 04:07:24,558 Current Learning Rate: 0.0088020298 +2025-02-27 04:07:24,558 Train Loss: 32591.4639064, Val Loss: 28216.6613443 +2025-02-27 04:07:24,558 Epoch 1156/2000 +2025-02-27 04:07:41,065 Current Learning Rate: 0.0088525662 +2025-02-27 04:07:41,066 Train Loss: 30778.4998059, Val Loss: 26587.1586071 +2025-02-27 04:07:41,066 Epoch 1157/2000 +2025-02-27 04:07:57,788 Current Learning Rate: 0.0089021520 +2025-02-27 04:07:57,789 Train Loss: 28991.5133441, Val Loss: 25154.4780262 +2025-02-27 04:07:57,789 Epoch 1158/2000 +2025-02-27 04:08:14,657 Current Learning Rate: 0.0089507751 +2025-02-27 04:08:14,657 Train Loss: 27493.0326683, Val Loss: 23784.2993461 +2025-02-27 04:08:14,657 Epoch 1159/2000 +2025-02-27 04:08:31,697 Current Learning Rate: 0.0089984233 +2025-02-27 04:08:31,697 Train Loss: 25985.9643949, Val Loss: 22558.5150547 +2025-02-27 04:08:31,698 Epoch 1160/2000 +2025-02-27 04:08:48,520 Current Learning Rate: 0.0090450850 +2025-02-27 04:08:48,521 Train Loss: 24652.8878619, Val Loss: 21417.3190389 +2025-02-27 04:08:48,521 Epoch 1161/2000 +2025-02-27 04:09:06,077 Current Learning Rate: 0.0090907486 +2025-02-27 04:09:06,077 Train Loss: 23442.4121769, Val Loss: 20354.6578467 +2025-02-27 04:09:06,078 Epoch 1162/2000 +2025-02-27 04:09:24,533 Current Learning Rate: 0.0091354029 +2025-02-27 04:09:24,533 Train Loss: 22293.2778529, Val Loss: 19391.5837895 +2025-02-27 04:09:24,533 Epoch 1163/2000 +2025-02-27 04:09:42,140 Current Learning Rate: 0.0091790368 +2025-02-27 04:09:42,140 Train Loss: 21293.7783664, Val Loss: 18488.6868917 +2025-02-27 04:09:42,141 Epoch 1164/2000 +2025-02-27 04:09:57,777 Current Learning Rate: 0.0092216396 +2025-02-27 04:09:57,778 Train Loss: 20299.1654769, Val Loss: 17637.0973996 +2025-02-27 04:09:57,778 Epoch 1165/2000 +2025-02-27 04:10:13,380 Current Learning Rate: 0.0092632008 +2025-02-27 04:10:13,381 Train Loss: 19412.6378244, Val Loss: 16873.9955900 +2025-02-27 04:10:13,381 Epoch 1166/2000 +2025-02-27 04:10:28,825 Current Learning Rate: 0.0093037101 +2025-02-27 04:10:28,825 Train Loss: 18537.5758942, Val Loss: 16088.6612302 +2025-02-27 04:10:28,825 Epoch 1167/2000 +2025-02-27 04:10:45,503 Current Learning Rate: 0.0093431576 +2025-02-27 04:10:45,504 Train Loss: 17739.4935064, Val Loss: 15433.9800030 +2025-02-27 04:10:45,504 Epoch 1168/2000 +2025-02-27 04:11:01,206 Current Learning Rate: 0.0093815334 +2025-02-27 04:11:01,206 Train Loss: 16963.1837366, Val Loss: 14728.5256995 +2025-02-27 04:11:01,207 Epoch 1169/2000 +2025-02-27 04:11:17,340 Current Learning Rate: 0.0094188282 +2025-02-27 04:11:17,341 Train Loss: 16257.6861506, Val Loss: 14109.5255474 +2025-02-27 04:11:17,341 Epoch 1170/2000 +2025-02-27 04:11:33,511 Current Learning Rate: 0.0094550326 +2025-02-27 04:11:33,512 Train Loss: 15604.4686623, Val Loss: 13504.8013610 +2025-02-27 04:11:33,512 Epoch 1171/2000 +2025-02-27 04:11:49,637 Current Learning Rate: 0.0094901379 +2025-02-27 04:11:49,638 Train Loss: 14949.6123103, Val Loss: 12987.2068887 +2025-02-27 04:11:49,638 Epoch 1172/2000 +2025-02-27 04:12:05,373 Current Learning Rate: 0.0095241353 +2025-02-27 04:12:05,373 Train Loss: 14330.9394882, Val Loss: 12452.9434687 +2025-02-27 04:12:05,374 Epoch 1173/2000 +2025-02-27 04:12:20,644 Current Learning Rate: 0.0095570164 +2025-02-27 04:12:20,644 Train Loss: 13792.1657556, Val Loss: 11915.1026460 +2025-02-27 04:12:20,645 Epoch 1174/2000 +2025-02-27 04:12:36,865 Current Learning Rate: 0.0095887731 +2025-02-27 04:12:36,866 Train Loss: 13237.3351399, Val Loss: 11449.5154729 +2025-02-27 04:12:36,866 Epoch 1175/2000 +2025-02-27 04:12:54,415 Current Learning Rate: 0.0096193977 +2025-02-27 04:12:54,416 Train Loss: 12702.5835337, Val Loss: 10994.5308698 +2025-02-27 04:12:54,416 Epoch 1176/2000 +2025-02-27 04:13:11,985 Current Learning Rate: 0.0096488824 +2025-02-27 04:13:11,986 Train Loss: 12210.9693417, Val Loss: 10573.5363443 +2025-02-27 04:13:11,986 Epoch 1177/2000 +2025-02-27 04:13:29,344 Current Learning Rate: 0.0096772202 +2025-02-27 04:13:29,344 Train Loss: 11733.2560772, Val Loss: 10161.1718750 +2025-02-27 04:13:29,345 Epoch 1178/2000 +2025-02-27 04:13:48,013 Current Learning Rate: 0.0097044038 +2025-02-27 04:13:48,013 Train Loss: 11317.0211057, Val Loss: 9754.0273342 +2025-02-27 04:13:48,014 Epoch 1179/2000 +2025-02-27 04:14:05,120 Current Learning Rate: 0.0097304268 +2025-02-27 04:14:05,120 Train Loss: 10851.2173599, Val Loss: 9377.2511785 +2025-02-27 04:14:05,121 Epoch 1180/2000 +2025-02-27 04:14:22,090 Current Learning Rate: 0.0097552826 +2025-02-27 04:14:22,091 Train Loss: 10420.8389534, Val Loss: 9039.8013610 +2025-02-27 04:14:22,091 Epoch 1181/2000 +2025-02-27 04:14:39,411 Current Learning Rate: 0.0097789651 +2025-02-27 04:14:39,412 Train Loss: 10057.5958383, Val Loss: 8687.4593218 +2025-02-27 04:14:39,412 Epoch 1182/2000 +2025-02-27 04:14:56,432 Current Learning Rate: 0.0098014684 +2025-02-27 04:14:56,432 Train Loss: 9656.1421232, Val Loss: 8343.4616408 +2025-02-27 04:14:56,432 Epoch 1183/2000 +2025-02-27 04:15:13,858 Current Learning Rate: 0.0098227871 +2025-02-27 04:15:13,859 Train Loss: 9310.2173505, Val Loss: 8023.2234641 +2025-02-27 04:15:13,859 Epoch 1184/2000 +2025-02-27 04:15:30,876 Current Learning Rate: 0.0098429158 +2025-02-27 04:15:30,876 Train Loss: 8967.5007514, Val Loss: 7732.8612568 +2025-02-27 04:15:30,877 Epoch 1185/2000 +2025-02-27 04:15:48,713 Current Learning Rate: 0.0098618496 +2025-02-27 04:15:48,714 Train Loss: 8617.3517496, Val Loss: 7436.2929592 +2025-02-27 04:15:48,714 Epoch 1186/2000 +2025-02-27 04:16:05,185 Current Learning Rate: 0.0098795838 +2025-02-27 04:16:05,186 Train Loss: 8313.3718205, Val Loss: 7183.4731410 +2025-02-27 04:16:05,186 Epoch 1187/2000 +2025-02-27 04:16:21,596 Current Learning Rate: 0.0098961141 +2025-02-27 04:16:21,596 Train Loss: 8017.9218640, Val Loss: 6917.2922369 +2025-02-27 04:16:21,597 Epoch 1188/2000 +2025-02-27 04:16:37,925 Current Learning Rate: 0.0099114363 +2025-02-27 04:16:37,926 Train Loss: 7722.0649781, Val Loss: 6662.4682558 +2025-02-27 04:16:37,926 Epoch 1189/2000 +2025-02-27 04:16:54,759 Current Learning Rate: 0.0099255466 +2025-02-27 04:16:54,760 Train Loss: 7467.9088549, Val Loss: 6438.0847970 +2025-02-27 04:16:54,760 Epoch 1190/2000 +2025-02-27 04:17:12,317 Current Learning Rate: 0.0099384417 +2025-02-27 04:17:12,317 Train Loss: 7216.3682951, Val Loss: 6172.9718864 +2025-02-27 04:17:12,318 Epoch 1191/2000 +2025-02-27 04:17:29,016 Current Learning Rate: 0.0099501183 +2025-02-27 04:17:29,016 Train Loss: 6942.2510896, Val Loss: 6060.8572841 +2025-02-27 04:17:29,016 Epoch 1192/2000 +2025-02-27 04:17:45,332 Current Learning Rate: 0.0099605735 +2025-02-27 04:17:45,333 Train Loss: 6699.2233212, Val Loss: 5767.3038321 +2025-02-27 04:17:45,333 Epoch 1193/2000 +2025-02-27 04:18:02,293 Current Learning Rate: 0.0099698048 +2025-02-27 04:18:02,294 Train Loss: 6465.3809895, Val Loss: 5597.7903551 +2025-02-27 04:18:02,294 Epoch 1194/2000 +2025-02-27 04:18:19,990 Current Learning Rate: 0.0099778098 +2025-02-27 04:18:19,991 Train Loss: 6232.7562134, Val Loss: 5347.9941264 +2025-02-27 04:18:19,991 Epoch 1195/2000 +2025-02-27 04:18:37,191 Current Learning Rate: 0.0099845867 +2025-02-27 04:18:37,192 Train Loss: 6029.2579886, Val Loss: 5183.7885873 +2025-02-27 04:18:37,192 Epoch 1196/2000 +2025-02-27 04:18:54,532 Current Learning Rate: 0.0099901336 +2025-02-27 04:18:54,532 Train Loss: 5813.1520592, Val Loss: 4971.5956699 +2025-02-27 04:18:54,532 Epoch 1197/2000 +2025-02-27 04:19:13,429 Current Learning Rate: 0.0099944494 +2025-02-27 04:19:13,430 Train Loss: 5616.7192040, Val Loss: 4808.8089264 +2025-02-27 04:19:13,430 Epoch 1198/2000 +2025-02-27 04:19:31,054 Current Learning Rate: 0.0099975328 +2025-02-27 04:19:31,054 Train Loss: 5417.7653557, Val Loss: 4640.0992245 +2025-02-27 04:19:31,055 Epoch 1199/2000 +2025-02-27 04:19:48,056 Current Learning Rate: 0.0099993832 +2025-02-27 04:19:48,057 Train Loss: 5229.8545283, Val Loss: 4506.0520263 +2025-02-27 04:19:48,057 Epoch 1200/2000 +2025-02-27 04:20:05,825 Current Learning Rate: 0.0100000000 +2025-02-27 04:20:05,826 Train Loss: 5054.6216718, Val Loss: 4327.0294442 +2025-02-27 04:20:05,826 Epoch 1201/2000 +2025-02-27 04:20:22,435 Current Learning Rate: 0.0099993832 +2025-02-27 04:20:22,435 Train Loss: 4871.1968678, Val Loss: 4216.3188108 +2025-02-27 04:20:22,436 Epoch 1202/2000 +2025-02-27 04:20:38,746 Current Learning Rate: 0.0099975328 +2025-02-27 04:20:38,747 Train Loss: 4718.3874361, Val Loss: 4050.6590252 +2025-02-27 04:20:38,747 Epoch 1203/2000 +2025-02-27 04:20:55,495 Current Learning Rate: 0.0099944494 +2025-02-27 04:20:55,495 Train Loss: 4571.1570108, Val Loss: 3896.6140606 +2025-02-27 04:20:55,495 Epoch 1204/2000 +2025-02-27 04:21:12,511 Current Learning Rate: 0.0099901336 +2025-02-27 04:21:12,512 Train Loss: 4412.0886090, Val Loss: 3823.3886006 +2025-02-27 04:21:12,512 Epoch 1205/2000 +2025-02-27 04:21:29,219 Current Learning Rate: 0.0099845867 +2025-02-27 04:21:29,219 Train Loss: 4271.5332765, Val Loss: 3642.0280186 +2025-02-27 04:21:29,220 Epoch 1206/2000 +2025-02-27 04:21:45,844 Current Learning Rate: 0.0099778098 +2025-02-27 04:21:45,845 Train Loss: 4108.0296939, Val Loss: 3508.9697860 +2025-02-27 04:21:45,846 Epoch 1207/2000 +2025-02-27 04:22:02,921 Current Learning Rate: 0.0099698048 +2025-02-27 04:22:02,922 Train Loss: 3959.7507452, Val Loss: 3392.0055885 +2025-02-27 04:22:02,922 Epoch 1208/2000 +2025-02-27 04:22:19,301 Current Learning Rate: 0.0099605735 +2025-02-27 04:22:19,302 Train Loss: 3844.2835880, Val Loss: 3266.8599358 +2025-02-27 04:22:19,302 Epoch 1209/2000 +2025-02-27 04:22:36,018 Current Learning Rate: 0.0099501183 +2025-02-27 04:22:36,018 Train Loss: 3700.0444008, Val Loss: 3187.6501198 +2025-02-27 04:22:36,019 Epoch 1210/2000 +2025-02-27 04:22:52,926 Current Learning Rate: 0.0099384417 +2025-02-27 04:22:52,927 Train Loss: 3579.1051122, Val Loss: 3072.6165697 +2025-02-27 04:22:52,927 Epoch 1211/2000 +2025-02-27 04:23:10,705 Current Learning Rate: 0.0099255466 +2025-02-27 04:23:10,706 Train Loss: 3461.3505974, Val Loss: 2940.1754486 +2025-02-27 04:23:10,706 Epoch 1212/2000 +2025-02-27 04:23:29,051 Current Learning Rate: 0.0099114363 +2025-02-27 04:23:29,051 Train Loss: 3348.8424530, Val Loss: 2853.4067537 +2025-02-27 04:23:29,052 Epoch 1213/2000 +2025-02-27 04:23:46,790 Current Learning Rate: 0.0098961141 +2025-02-27 04:23:46,791 Train Loss: 3230.4536862, Val Loss: 2757.6937443 +2025-02-27 04:23:46,791 Epoch 1214/2000 +2025-02-27 04:24:03,173 Current Learning Rate: 0.0098795838 +2025-02-27 04:24:03,173 Train Loss: 3124.2108952, Val Loss: 2670.4605763 +2025-02-27 04:24:03,174 Epoch 1215/2000 +2025-02-27 04:24:20,323 Current Learning Rate: 0.0098618496 +2025-02-27 04:24:20,324 Train Loss: 3021.4417550, Val Loss: 2558.6475916 +2025-02-27 04:24:20,324 Epoch 1216/2000 +2025-02-27 04:24:36,956 Current Learning Rate: 0.0098429158 +2025-02-27 04:24:36,957 Train Loss: 2928.8859888, Val Loss: 2478.8865287 +2025-02-27 04:24:36,957 Epoch 1217/2000 +2025-02-27 04:24:53,174 Current Learning Rate: 0.0098227871 +2025-02-27 04:24:53,174 Train Loss: 2833.2487077, Val Loss: 2386.5102266 +2025-02-27 04:24:53,175 Epoch 1218/2000 +2025-02-27 04:25:10,631 Current Learning Rate: 0.0098014684 +2025-02-27 04:25:10,631 Train Loss: 2725.4625742, Val Loss: 2333.4218655 +2025-02-27 04:25:10,632 Epoch 1219/2000 +2025-02-27 04:25:27,144 Current Learning Rate: 0.0097789651 +2025-02-27 04:25:27,144 Train Loss: 2646.8428491, Val Loss: 2232.0203676 +2025-02-27 04:25:27,144 Epoch 1220/2000 +2025-02-27 04:25:44,181 Current Learning Rate: 0.0097552826 +2025-02-27 04:25:44,182 Train Loss: 2578.2876958, Val Loss: 2155.6427254 +2025-02-27 04:25:44,182 Epoch 1221/2000 +2025-02-27 04:25:59,959 Current Learning Rate: 0.0097304268 +2025-02-27 04:25:59,959 Train Loss: 2494.2633504, Val Loss: 2100.7838637 +2025-02-27 04:25:59,959 Epoch 1222/2000 +2025-02-27 04:26:17,079 Current Learning Rate: 0.0097044038 +2025-02-27 04:26:17,080 Train Loss: 2392.9173868, Val Loss: 2024.4379895 +2025-02-27 04:26:17,080 Epoch 1223/2000 +2025-02-27 04:26:33,228 Current Learning Rate: 0.0096772202 +2025-02-27 04:26:33,230 Train Loss: 2365.7474397, Val Loss: 1965.2983387 +2025-02-27 04:26:33,230 Epoch 1224/2000 +2025-02-27 04:26:49,878 Current Learning Rate: 0.0096488824 +2025-02-27 04:26:49,879 Train Loss: 2275.6676396, Val Loss: 1898.8651441 +2025-02-27 04:26:49,879 Epoch 1225/2000 +2025-02-27 04:27:06,394 Current Learning Rate: 0.0096193977 +2025-02-27 04:27:06,395 Train Loss: 2298.2126411, Val Loss: 2089.3860725 +2025-02-27 04:27:06,396 Epoch 1226/2000 +2025-02-27 04:27:23,189 Current Learning Rate: 0.0095887731 +2025-02-27 04:27:23,190 Train Loss: 2246.2353569, Val Loss: 1809.1372035 +2025-02-27 04:27:23,190 Epoch 1227/2000 +2025-02-27 04:27:40,980 Current Learning Rate: 0.0095570164 +2025-02-27 04:27:40,980 Train Loss: 2191.7148422, Val Loss: 1782.3926969 +2025-02-27 04:27:40,981 Epoch 1228/2000 +2025-02-27 04:27:57,478 Current Learning Rate: 0.0095241353 +2025-02-27 04:27:57,479 Train Loss: 2333.2826707, Val Loss: 6600.5310029 +2025-02-27 04:27:57,479 Epoch 1229/2000 +2025-02-27 04:28:12,724 Current Learning Rate: 0.0094901379 +2025-02-27 04:28:12,725 Train Loss: 3632.7979665, Val Loss: 2582.7333771 +2025-02-27 04:28:12,725 Epoch 1230/2000 +2025-02-27 04:28:29,473 Current Learning Rate: 0.0094550326 +2025-02-27 04:28:29,474 Train Loss: 3218.4011008, Val Loss: 3672.7909253 +2025-02-27 04:28:29,474 Epoch 1231/2000 +2025-02-27 04:28:45,670 Current Learning Rate: 0.0094188282 +2025-02-27 04:28:45,671 Train Loss: 3216.6366370, Val Loss: 2673.5960310 +2025-02-27 04:28:45,671 Epoch 1232/2000 +2025-02-27 04:29:03,022 Current Learning Rate: 0.0093815334 +2025-02-27 04:29:03,022 Train Loss: 2335.9378229, Val Loss: 2299.4510341 +2025-02-27 04:29:03,022 Epoch 1233/2000 +2025-02-27 04:29:20,418 Current Learning Rate: 0.0093431576 +2025-02-27 04:29:20,419 Train Loss: 4935.5992969, Val Loss: 2706.7024977 +2025-02-27 04:29:20,419 Epoch 1234/2000 +2025-02-27 04:29:37,619 Current Learning Rate: 0.0093037101 +2025-02-27 04:29:37,620 Train Loss: 1938.7930642, Val Loss: 1457.3988272 +2025-02-27 04:29:37,620 Epoch 1235/2000 +2025-02-27 04:29:53,838 Current Learning Rate: 0.0092632008 +2025-02-27 04:29:53,838 Train Loss: 6432.0625145, Val Loss: 5642.8962325 +2025-02-27 04:29:53,838 Epoch 1236/2000 +2025-02-27 04:30:10,418 Current Learning Rate: 0.0092216396 +2025-02-27 04:30:10,418 Train Loss: 1971.5513010, Val Loss: 1258.2247709 +2025-02-27 04:30:10,418 Epoch 1237/2000 +2025-02-27 04:30:26,572 Current Learning Rate: 0.0091790368 +2025-02-27 04:30:26,573 Train Loss: 1572.2966429, Val Loss: 1337.5925477 +2025-02-27 04:30:26,573 Epoch 1238/2000 +2025-02-27 04:30:42,387 Current Learning Rate: 0.0091354029 +2025-02-27 04:30:42,388 Train Loss: 1664.3897566, Val Loss: 2353.9352855 +2025-02-27 04:30:42,389 Epoch 1239/2000 +2025-02-27 04:30:58,242 Current Learning Rate: 0.0090907486 +2025-02-27 04:30:58,243 Train Loss: 3520.9860356, Val Loss: 1261.4488053 +2025-02-27 04:30:58,243 Epoch 1240/2000 +2025-02-27 04:31:14,538 Current Learning Rate: 0.0090450850 +2025-02-27 04:31:14,539 Train Loss: 1923.7945280, Val Loss: 1612.5334075 +2025-02-27 04:31:14,539 Epoch 1241/2000 +2025-02-27 04:31:30,905 Current Learning Rate: 0.0089984233 +2025-02-27 04:31:30,906 Train Loss: 4081.7596715, Val Loss: 4695.2549992 +2025-02-27 04:31:30,906 Epoch 1242/2000 +2025-02-27 04:31:46,885 Current Learning Rate: 0.0089507751 +2025-02-27 04:31:46,885 Train Loss: 1653.5486683, Val Loss: 2053.5312595 +2025-02-27 04:31:46,886 Epoch 1243/2000 +2025-02-27 04:32:04,106 Current Learning Rate: 0.0089021520 +2025-02-27 04:32:04,106 Train Loss: 1509.7824499, Val Loss: 1006.4903864 +2025-02-27 04:32:04,106 Epoch 1244/2000 +2025-02-27 04:32:20,781 Current Learning Rate: 0.0088525662 +2025-02-27 04:32:20,782 Train Loss: 4831.4439701, Val Loss: 1478.3385369 +2025-02-27 04:32:20,782 Epoch 1245/2000 +2025-02-27 04:32:37,949 Current Learning Rate: 0.0088020298 +2025-02-27 04:32:37,950 Train Loss: 1241.3865035, Val Loss: 1055.4914177 +2025-02-27 04:32:37,950 Epoch 1246/2000 +2025-02-27 04:32:53,861 Current Learning Rate: 0.0087505553 +2025-02-27 04:32:53,861 Train Loss: 1959.1564109, Val Loss: 2417.6451300 +2025-02-27 04:32:53,861 Epoch 1247/2000 +2025-02-27 04:33:10,540 Current Learning Rate: 0.0086981555 +2025-02-27 04:33:10,541 Train Loss: 3109.4280794, Val Loss: 7911.0915640 +2025-02-27 04:33:10,541 Epoch 1248/2000 +2025-02-27 04:33:26,565 Current Learning Rate: 0.0086448431 +2025-02-27 04:33:26,566 Train Loss: 1655.8130389, Val Loss: 867.9417817 +2025-02-27 04:33:26,566 Epoch 1249/2000 +2025-02-27 04:33:43,734 Current Learning Rate: 0.0085906315 +2025-02-27 04:33:43,734 Train Loss: 1443.4961926, Val Loss: 1517.7077156 +2025-02-27 04:33:43,735 Epoch 1250/2000 +2025-02-27 04:34:00,403 Current Learning Rate: 0.0085355339 +2025-02-27 04:34:00,404 Train Loss: 3691.3205099, Val Loss: 2207.2528893 +2025-02-27 04:34:00,404 Epoch 1251/2000 +2025-02-27 04:34:18,311 Current Learning Rate: 0.0084795640 +2025-02-27 04:34:18,312 Train Loss: 1164.3314984, Val Loss: 1097.4022962 +2025-02-27 04:34:18,312 Epoch 1252/2000 +2025-02-27 04:34:35,491 Current Learning Rate: 0.0084227355 +2025-02-27 04:34:35,492 Train Loss: 1046.5307404, Val Loss: 831.7554483 +2025-02-27 04:34:35,492 Epoch 1253/2000 +2025-02-27 04:34:51,392 Current Learning Rate: 0.0083650626 +2025-02-27 04:34:51,393 Train Loss: 3584.6121596, Val Loss: 929.5907585 +2025-02-27 04:34:51,393 Epoch 1254/2000 +2025-02-27 04:35:07,941 Current Learning Rate: 0.0083065593 +2025-02-27 04:35:07,941 Train Loss: 948.0129668, Val Loss: 874.3697014 +2025-02-27 04:35:07,941 Epoch 1255/2000 +2025-02-27 04:35:25,445 Current Learning Rate: 0.0082472402 +2025-02-27 04:35:25,445 Train Loss: 1349.8456051, Val Loss: 3199.4697099 +2025-02-27 04:35:25,445 Epoch 1256/2000 +2025-02-27 04:35:42,668 Current Learning Rate: 0.0081871199 +2025-02-27 04:35:42,669 Train Loss: 1430.6389660, Val Loss: 716.6273428 +2025-02-27 04:35:42,669 Epoch 1257/2000 +2025-02-27 04:35:59,189 Current Learning Rate: 0.0081262133 +2025-02-27 04:35:59,190 Train Loss: 2715.7276844, Val Loss: 718.6544822 +2025-02-27 04:35:59,190 Epoch 1258/2000 +2025-02-27 04:36:16,377 Current Learning Rate: 0.0080645353 +2025-02-27 04:36:16,377 Train Loss: 874.1230897, Val Loss: 682.0112554 +2025-02-27 04:36:16,378 Epoch 1259/2000 +2025-02-27 04:36:33,001 Current Learning Rate: 0.0080021011 +2025-02-27 04:36:33,002 Train Loss: 3671.3984011, Val Loss: 1751.0943203 +2025-02-27 04:36:33,003 Epoch 1260/2000 +2025-02-27 04:36:49,633 Current Learning Rate: 0.0079389263 +2025-02-27 04:36:49,633 Train Loss: 962.1704197, Val Loss: 637.7997168 +2025-02-27 04:36:49,633 Epoch 1261/2000 +2025-02-27 04:37:07,018 Current Learning Rate: 0.0078750263 +2025-02-27 04:37:07,018 Train Loss: 793.3794649, Val Loss: 607.6957402 +2025-02-27 04:37:07,018 Epoch 1262/2000 +2025-02-27 04:37:24,751 Current Learning Rate: 0.0078104169 +2025-02-27 04:37:24,752 Train Loss: 2190.5480256, Val Loss: 760.1333091 +2025-02-27 04:37:24,752 Epoch 1263/2000 +2025-02-27 04:37:42,190 Current Learning Rate: 0.0077451141 +2025-02-27 04:37:42,191 Train Loss: 772.7196419, Val Loss: 621.9787865 +2025-02-27 04:37:42,191 Epoch 1264/2000 +2025-02-27 04:37:58,754 Current Learning Rate: 0.0076791340 +2025-02-27 04:37:58,755 Train Loss: 1112.0479693, Val Loss: 4043.1097932 +2025-02-27 04:37:58,755 Epoch 1265/2000 +2025-02-27 04:38:16,248 Current Learning Rate: 0.0076124928 +2025-02-27 04:38:16,249 Train Loss: 1499.6012512, Val Loss: 616.1226548 +2025-02-27 04:38:16,249 Epoch 1266/2000 +2025-02-27 04:38:33,819 Current Learning Rate: 0.0075452071 +2025-02-27 04:38:33,819 Train Loss: 1726.7572029, Val Loss: 2332.4286705 +2025-02-27 04:38:33,819 Epoch 1267/2000 +2025-02-27 04:38:51,535 Current Learning Rate: 0.0074772933 +2025-02-27 04:38:51,536 Train Loss: 879.4603722, Val Loss: 960.4593527 +2025-02-27 04:38:51,536 Epoch 1268/2000 +2025-02-27 04:39:09,104 Current Learning Rate: 0.0074087684 +2025-02-27 04:39:09,105 Train Loss: 1693.7189165, Val Loss: 1924.3043596 +2025-02-27 04:39:09,105 Epoch 1269/2000 +2025-02-27 04:39:26,344 Current Learning Rate: 0.0073396491 +2025-02-27 04:39:26,344 Train Loss: 789.8044488, Val Loss: 578.7593973 +2025-02-27 04:39:26,344 Epoch 1270/2000 +2025-02-27 04:39:43,206 Current Learning Rate: 0.0072699525 +2025-02-27 04:39:43,207 Train Loss: 2344.5334368, Val Loss: 972.1493523 +2025-02-27 04:39:43,207 Epoch 1271/2000 +2025-02-27 04:39:58,924 Current Learning Rate: 0.0071996958 +2025-02-27 04:39:58,925 Train Loss: 739.4537404, Val Loss: 699.8518880 +2025-02-27 04:39:58,925 Epoch 1272/2000 +2025-02-27 04:40:15,869 Current Learning Rate: 0.0071288965 +2025-02-27 04:40:15,870 Train Loss: 730.2782523, Val Loss: 463.3327096 +2025-02-27 04:40:15,870 Epoch 1273/2000 +2025-02-27 04:40:33,140 Current Learning Rate: 0.0070575718 +2025-02-27 04:40:33,141 Train Loss: 1870.1579388, Val Loss: 2723.8128231 +2025-02-27 04:40:33,141 Epoch 1274/2000 +2025-02-27 04:40:50,684 Current Learning Rate: 0.0069857395 +2025-02-27 04:40:50,685 Train Loss: 768.3705760, Val Loss: 479.5806127 +2025-02-27 04:40:50,685 Epoch 1275/2000 +2025-02-27 04:41:08,075 Current Learning Rate: 0.0069134172 +2025-02-27 04:41:08,076 Train Loss: 590.2489652, Val Loss: 1580.4067110 +2025-02-27 04:41:08,076 Epoch 1276/2000 +2025-02-27 04:41:24,513 Current Learning Rate: 0.0068406228 +2025-02-27 04:41:24,514 Train Loss: 989.9178948, Val Loss: 3946.7457421 +2025-02-27 04:41:24,514 Epoch 1277/2000 +2025-02-27 04:41:41,870 Current Learning Rate: 0.0067673742 +2025-02-27 04:41:41,871 Train Loss: 1239.6518307, Val Loss: 437.7497541 +2025-02-27 04:41:41,871 Epoch 1278/2000 +2025-02-27 04:41:58,484 Current Learning Rate: 0.0066936896 +2025-02-27 04:41:58,485 Train Loss: 1042.2209963, Val Loss: 417.2072510 +2025-02-27 04:41:58,485 Epoch 1279/2000 +2025-02-27 04:42:15,114 Current Learning Rate: 0.0066195871 +2025-02-27 04:42:15,115 Train Loss: 528.3380254, Val Loss: 392.9320754 +2025-02-27 04:42:15,115 Epoch 1280/2000 +2025-02-27 04:42:32,004 Current Learning Rate: 0.0065450850 +2025-02-27 04:42:32,005 Train Loss: 3586.2147959, Val Loss: 1602.7588722 +2025-02-27 04:42:32,005 Epoch 1281/2000 +2025-02-27 04:42:48,327 Current Learning Rate: 0.0064702016 +2025-02-27 04:42:48,328 Train Loss: 587.6015411, Val Loss: 374.6297141 +2025-02-27 04:42:48,328 Epoch 1282/2000 +2025-02-27 04:43:04,002 Current Learning Rate: 0.0063949555 +2025-02-27 04:43:04,003 Train Loss: 459.4133698, Val Loss: 363.8000815 +2025-02-27 04:43:04,003 Epoch 1283/2000 +2025-02-27 04:43:20,691 Current Learning Rate: 0.0063193652 +2025-02-27 04:43:20,691 Train Loss: 444.9617794, Val Loss: 491.3908293 +2025-02-27 04:43:20,691 Epoch 1284/2000 +2025-02-27 04:43:37,417 Current Learning Rate: 0.0062434494 +2025-02-27 04:43:37,417 Train Loss: 916.9116047, Val Loss: 416.3282913 +2025-02-27 04:43:37,417 Epoch 1285/2000 +2025-02-27 04:43:53,795 Current Learning Rate: 0.0061672268 +2025-02-27 04:43:53,796 Train Loss: 456.6989052, Val Loss: 486.8719171 +2025-02-27 04:43:53,797 Epoch 1286/2000 +2025-02-27 04:44:11,629 Current Learning Rate: 0.0060907162 +2025-02-27 04:44:11,630 Train Loss: 1963.9231899, Val Loss: 1945.3943554 +2025-02-27 04:44:11,630 Epoch 1287/2000 +2025-02-27 04:44:29,117 Current Learning Rate: 0.0060139365 +2025-02-27 04:44:29,118 Train Loss: 865.2724385, Val Loss: 332.1300171 +2025-02-27 04:44:29,118 Epoch 1288/2000 +2025-02-27 04:44:46,977 Current Learning Rate: 0.0059369066 +2025-02-27 04:44:46,978 Train Loss: 408.2584953, Val Loss: 331.1800011 +2025-02-27 04:44:46,978 Epoch 1289/2000 +2025-02-27 04:45:05,863 Current Learning Rate: 0.0058596455 +2025-02-27 04:45:05,864 Train Loss: 397.7110592, Val Loss: 326.4091963 +2025-02-27 04:45:05,864 Epoch 1290/2000 +2025-02-27 04:45:24,955 Current Learning Rate: 0.0057821723 +2025-02-27 04:45:24,956 Train Loss: 466.7456674, Val Loss: 578.4264751 +2025-02-27 04:45:24,956 Epoch 1291/2000 +2025-02-27 04:45:41,873 Current Learning Rate: 0.0057045062 +2025-02-27 04:45:41,873 Train Loss: 728.3722642, Val Loss: 309.5480013 +2025-02-27 04:45:41,874 Epoch 1292/2000 +2025-02-27 04:45:58,803 Current Learning Rate: 0.0056266662 +2025-02-27 04:45:58,803 Train Loss: 449.0025966, Val Loss: 611.5899198 +2025-02-27 04:45:58,803 Epoch 1293/2000 +2025-02-27 04:46:14,572 Current Learning Rate: 0.0055486716 +2025-02-27 04:46:14,572 Train Loss: 790.7143015, Val Loss: 322.7146477 +2025-02-27 04:46:14,573 Epoch 1294/2000 +2025-02-27 04:46:30,532 Current Learning Rate: 0.0054705416 +2025-02-27 04:46:30,532 Train Loss: 376.9084258, Val Loss: 276.5942917 +2025-02-27 04:46:30,533 Epoch 1295/2000 +2025-02-27 04:46:46,851 Current Learning Rate: 0.0053922955 +2025-02-27 04:46:46,852 Train Loss: 625.1796719, Val Loss: 297.7183544 +2025-02-27 04:46:46,853 Epoch 1296/2000 +2025-02-27 04:47:03,703 Current Learning Rate: 0.0053139526 +2025-02-27 04:47:03,704 Train Loss: 465.1133516, Val Loss: 317.8924605 +2025-02-27 04:47:03,704 Epoch 1297/2000 +2025-02-27 04:47:22,095 Current Learning Rate: 0.0052355323 +2025-02-27 04:47:22,095 Train Loss: 912.2695627, Val Loss: 277.3664295 +2025-02-27 04:47:22,096 Epoch 1298/2000 +2025-02-27 04:47:39,973 Current Learning Rate: 0.0051570538 +2025-02-27 04:47:39,974 Train Loss: 331.6126397, Val Loss: 258.0092173 +2025-02-27 04:47:39,974 Epoch 1299/2000 +2025-02-27 04:47:57,517 Current Learning Rate: 0.0050785366 +2025-02-27 04:47:57,518 Train Loss: 1376.3220906, Val Loss: 322.4446496 +2025-02-27 04:47:57,518 Epoch 1300/2000 +2025-02-27 04:48:14,974 Current Learning Rate: 0.0050000000 +2025-02-27 04:48:14,975 Train Loss: 344.5111361, Val Loss: 245.2419517 +2025-02-27 04:48:14,975 Epoch 1301/2000 +2025-02-27 04:48:31,786 Current Learning Rate: 0.0049214634 +2025-02-27 04:48:31,787 Train Loss: 353.7423101, Val Loss: 312.4082364 +2025-02-27 04:48:31,787 Epoch 1302/2000 +2025-02-27 04:48:47,698 Current Learning Rate: 0.0048429462 +2025-02-27 04:48:47,699 Train Loss: 858.5889572, Val Loss: 2874.6127490 +2025-02-27 04:48:47,699 Epoch 1303/2000 +2025-02-27 04:49:04,207 Current Learning Rate: 0.0047644677 +2025-02-27 04:49:04,207 Train Loss: 531.5075479, Val Loss: 233.9024127 +2025-02-27 04:49:04,207 Epoch 1304/2000 +2025-02-27 04:49:21,978 Current Learning Rate: 0.0046860474 +2025-02-27 04:49:21,978 Train Loss: 343.0624353, Val Loss: 246.3080918 +2025-02-27 04:49:21,979 Epoch 1305/2000 +2025-02-27 04:49:39,595 Current Learning Rate: 0.0046077045 +2025-02-27 04:49:39,596 Train Loss: 310.9042618, Val Loss: 300.1364336 +2025-02-27 04:49:39,596 Epoch 1306/2000 +2025-02-27 04:49:57,373 Current Learning Rate: 0.0045294584 +2025-02-27 04:49:57,373 Train Loss: 566.3855657, Val Loss: 306.4633646 +2025-02-27 04:49:57,374 Epoch 1307/2000 +2025-02-27 04:50:14,918 Current Learning Rate: 0.0044513284 +2025-02-27 04:50:14,919 Train Loss: 310.9173793, Val Loss: 808.3127091 +2025-02-27 04:50:14,919 Epoch 1308/2000 +2025-02-27 04:50:33,511 Current Learning Rate: 0.0043733338 +2025-02-27 04:50:33,511 Train Loss: 368.8146181, Val Loss: 209.4772569 +2025-02-27 04:50:33,512 Epoch 1309/2000 +2025-02-27 04:50:52,527 Current Learning Rate: 0.0042954938 +2025-02-27 04:50:52,528 Train Loss: 1019.3311560, Val Loss: 358.9566820 +2025-02-27 04:50:52,528 Epoch 1310/2000 +2025-02-27 04:51:12,872 Current Learning Rate: 0.0042178277 +2025-02-27 04:51:12,873 Train Loss: 338.9318783, Val Loss: 209.5946137 +2025-02-27 04:51:12,873 Epoch 1311/2000 +2025-02-27 04:51:32,644 Current Learning Rate: 0.0041403545 +2025-02-27 04:51:32,645 Train Loss: 253.5105768, Val Loss: 251.1881807 +2025-02-27 04:51:32,645 Epoch 1312/2000 +2025-02-27 04:51:53,284 Current Learning Rate: 0.0040630934 +2025-02-27 04:51:53,285 Train Loss: 581.6112533, Val Loss: 254.7488856 +2025-02-27 04:51:53,285 Epoch 1313/2000 +2025-02-27 04:52:11,928 Current Learning Rate: 0.0039860635 +2025-02-27 04:52:11,929 Train Loss: 8576.2925330, Val Loss: 8905.6413473 +2025-02-27 04:52:11,929 Epoch 1314/2000 +2025-02-27 04:52:29,800 Current Learning Rate: 0.0039092838 +2025-02-27 04:52:29,801 Train Loss: 1142.7290583, Val Loss: 254.0709261 +2025-02-27 04:52:29,801 Epoch 1315/2000 +2025-02-27 04:52:48,698 Current Learning Rate: 0.0038327732 +2025-02-27 04:52:48,698 Train Loss: 266.0687220, Val Loss: 209.3364127 +2025-02-27 04:52:48,699 Epoch 1316/2000 +2025-02-27 04:53:06,167 Current Learning Rate: 0.0037565506 +2025-02-27 04:53:06,167 Train Loss: 240.8557260, Val Loss: 196.0470514 +2025-02-27 04:53:06,167 Epoch 1317/2000 +2025-02-27 04:53:24,469 Current Learning Rate: 0.0036806348 +2025-02-27 04:53:24,469 Train Loss: 228.5321244, Val Loss: 192.4735544 +2025-02-27 04:53:24,469 Epoch 1318/2000 +2025-02-27 04:53:41,624 Current Learning Rate: 0.0036050445 +2025-02-27 04:53:41,624 Train Loss: 222.2594487, Val Loss: 181.8147430 +2025-02-27 04:53:41,625 Epoch 1319/2000 +2025-02-27 04:53:58,032 Current Learning Rate: 0.0035297984 +2025-02-27 04:53:58,032 Train Loss: 212.7387581, Val Loss: 176.9247964 +2025-02-27 04:53:58,033 Epoch 1320/2000 +2025-02-27 04:54:14,902 Current Learning Rate: 0.0034549150 +2025-02-27 04:54:14,903 Train Loss: 208.7087796, Val Loss: 170.0287920 +2025-02-27 04:54:14,903 Epoch 1321/2000 +2025-02-27 04:54:31,493 Current Learning Rate: 0.0033804129 +2025-02-27 04:54:31,493 Train Loss: 203.0165371, Val Loss: 167.7379480 +2025-02-27 04:54:31,494 Epoch 1322/2000 +2025-02-27 04:54:47,962 Current Learning Rate: 0.0033063104 +2025-02-27 04:54:47,963 Train Loss: 199.7324428, Val Loss: 169.2941871 +2025-02-27 04:54:47,963 Epoch 1323/2000 +2025-02-27 04:55:03,392 Current Learning Rate: 0.0032326258 +2025-02-27 04:55:03,393 Train Loss: 195.4456076, Val Loss: 162.0291353 +2025-02-27 04:55:03,393 Epoch 1324/2000 +2025-02-27 04:55:19,302 Current Learning Rate: 0.0031593772 +2025-02-27 04:55:19,302 Train Loss: 196.3118203, Val Loss: 182.4452792 +2025-02-27 04:55:19,303 Epoch 1325/2000 +2025-02-27 04:55:35,318 Current Learning Rate: 0.0030865828 +2025-02-27 04:55:35,319 Train Loss: 188.5327046, Val Loss: 156.7081260 +2025-02-27 04:55:35,319 Epoch 1326/2000 +2025-02-27 04:55:51,175 Current Learning Rate: 0.0030142605 +2025-02-27 04:55:51,176 Train Loss: 185.2318333, Val Loss: 152.2185997 +2025-02-27 04:55:51,176 Epoch 1327/2000 +2025-02-27 04:56:07,148 Current Learning Rate: 0.0029424282 +2025-02-27 04:56:07,148 Train Loss: 181.4423895, Val Loss: 162.0741023 +2025-02-27 04:56:07,149 Epoch 1328/2000 +2025-02-27 04:56:22,525 Current Learning Rate: 0.0028711035 +2025-02-27 04:56:22,526 Train Loss: 179.6023482, Val Loss: 148.1339726 +2025-02-27 04:56:22,526 Epoch 1329/2000 +2025-02-27 04:56:37,723 Current Learning Rate: 0.0028003042 +2025-02-27 04:56:37,723 Train Loss: 178.2601340, Val Loss: 145.1111523 +2025-02-27 04:56:37,724 Epoch 1330/2000 +2025-02-27 04:56:53,905 Current Learning Rate: 0.0027300475 +2025-02-27 04:56:53,906 Train Loss: 174.9060534, Val Loss: 142.6680412 +2025-02-27 04:56:53,906 Epoch 1331/2000 +2025-02-27 04:57:10,861 Current Learning Rate: 0.0026603509 +2025-02-27 04:57:10,862 Train Loss: 171.3314927, Val Loss: 148.8573536 +2025-02-27 04:57:10,862 Epoch 1332/2000 +2025-02-27 04:57:27,404 Current Learning Rate: 0.0025912316 +2025-02-27 04:57:27,404 Train Loss: 170.2925216, Val Loss: 138.4323796 +2025-02-27 04:57:27,404 Epoch 1333/2000 +2025-02-27 04:57:44,540 Current Learning Rate: 0.0025227067 +2025-02-27 04:57:44,541 Train Loss: 165.5403115, Val Loss: 157.1745950 +2025-02-27 04:57:44,541 Epoch 1334/2000 +2025-02-27 04:58:00,949 Current Learning Rate: 0.0024547929 +2025-02-27 04:58:00,950 Train Loss: 173.2289301, Val Loss: 138.1992312 +2025-02-27 04:58:00,950 Epoch 1335/2000 +2025-02-27 04:58:18,796 Current Learning Rate: 0.0023875072 +2025-02-27 04:58:18,796 Train Loss: 162.8653492, Val Loss: 135.6514426 +2025-02-27 04:58:18,797 Epoch 1336/2000 +2025-02-27 04:58:36,381 Current Learning Rate: 0.0023208660 +2025-02-27 04:58:36,381 Train Loss: 161.3985178, Val Loss: 146.6391815 +2025-02-27 04:58:36,381 Epoch 1337/2000 +2025-02-27 04:58:53,778 Current Learning Rate: 0.0022548859 +2025-02-27 04:58:53,778 Train Loss: 160.3220902, Val Loss: 131.5442073 +2025-02-27 04:58:53,779 Epoch 1338/2000 +2025-02-27 04:59:12,573 Current Learning Rate: 0.0021895831 +2025-02-27 04:59:12,573 Train Loss: 158.3169162, Val Loss: 128.7063235 +2025-02-27 04:59:12,573 Epoch 1339/2000 +2025-02-27 04:59:29,504 Current Learning Rate: 0.0021249737 +2025-02-27 04:59:29,504 Train Loss: 152.6542824, Val Loss: 126.4738802 +2025-02-27 04:59:29,504 Epoch 1340/2000 +2025-02-27 04:59:45,537 Current Learning Rate: 0.0020610737 +2025-02-27 04:59:45,538 Train Loss: 159.3281918, Val Loss: 124.7480510 +2025-02-27 04:59:45,538 Epoch 1341/2000 +2025-02-27 05:00:01,742 Current Learning Rate: 0.0019978989 +2025-02-27 05:00:01,742 Train Loss: 154.7414653, Val Loss: 130.3597570 +2025-02-27 05:00:01,742 Epoch 1342/2000 +2025-02-27 05:00:17,744 Current Learning Rate: 0.0019354647 +2025-02-27 05:00:17,744 Train Loss: 156.2674934, Val Loss: 121.9320419 +2025-02-27 05:00:17,744 Epoch 1343/2000 +2025-02-27 05:00:33,619 Current Learning Rate: 0.0018737867 +2025-02-27 05:00:33,620 Train Loss: 150.0851268, Val Loss: 121.4172155 +2025-02-27 05:00:33,620 Epoch 1344/2000 +2025-02-27 05:00:49,211 Current Learning Rate: 0.0018128801 +2025-02-27 05:00:49,211 Train Loss: 152.9086962, Val Loss: 120.6275959 +2025-02-27 05:00:49,211 Epoch 1345/2000 +2025-02-27 05:01:05,221 Current Learning Rate: 0.0017527598 +2025-02-27 05:01:05,222 Train Loss: 147.1207428, Val Loss: 143.8229184 +2025-02-27 05:01:05,222 Epoch 1346/2000 +2025-02-27 05:01:21,221 Current Learning Rate: 0.0016934407 +2025-02-27 05:01:21,222 Train Loss: 144.1602196, Val Loss: 121.4523070 +2025-02-27 05:01:21,222 Epoch 1347/2000 +2025-02-27 05:01:37,332 Current Learning Rate: 0.0016349374 +2025-02-27 05:01:37,333 Train Loss: 140.3404102, Val Loss: 115.6844328 +2025-02-27 05:01:37,333 Epoch 1348/2000 +2025-02-27 05:01:52,638 Current Learning Rate: 0.0015772645 +2025-02-27 05:01:52,638 Train Loss: 140.1215481, Val Loss: 116.7197108 +2025-02-27 05:01:52,639 Epoch 1349/2000 +2025-02-27 05:02:07,902 Current Learning Rate: 0.0015204360 +2025-02-27 05:02:07,903 Train Loss: 138.8960611, Val Loss: 112.7823923 +2025-02-27 05:02:07,903 Epoch 1350/2000 +2025-02-27 05:02:23,375 Current Learning Rate: 0.0014644661 +2025-02-27 05:02:23,375 Train Loss: 137.5647064, Val Loss: 111.6844188 +2025-02-27 05:02:23,375 Epoch 1351/2000 +2025-02-27 05:02:38,968 Current Learning Rate: 0.0014093685 +2025-02-27 05:02:38,968 Train Loss: 136.8295899, Val Loss: 114.3339208 +2025-02-27 05:02:38,968 Epoch 1352/2000 +2025-02-27 05:02:54,434 Current Learning Rate: 0.0013551569 +2025-02-27 05:02:54,435 Train Loss: 132.8826304, Val Loss: 109.4274947 +2025-02-27 05:02:54,435 Epoch 1353/2000 +2025-02-27 05:03:09,821 Current Learning Rate: 0.0013018445 +2025-02-27 05:03:09,821 Train Loss: 131.6807898, Val Loss: 111.9781257 +2025-02-27 05:03:09,822 Epoch 1354/2000 +2025-02-27 05:03:25,340 Current Learning Rate: 0.0012494447 +2025-02-27 05:03:25,341 Train Loss: 131.1798402, Val Loss: 108.4696036 +2025-02-27 05:03:25,341 Epoch 1355/2000 +2025-02-27 05:03:41,406 Current Learning Rate: 0.0011979702 +2025-02-27 05:03:41,407 Train Loss: 129.3185600, Val Loss: 106.6328107 +2025-02-27 05:03:41,407 Epoch 1356/2000 +2025-02-27 05:03:57,191 Current Learning Rate: 0.0011474338 +2025-02-27 05:03:57,191 Train Loss: 127.7598229, Val Loss: 108.1146359 +2025-02-27 05:03:57,191 Epoch 1357/2000 +2025-02-27 05:04:13,624 Current Learning Rate: 0.0010978480 +2025-02-27 05:04:13,625 Train Loss: 126.4173285, Val Loss: 104.4073694 +2025-02-27 05:04:13,625 Epoch 1358/2000 +2025-02-27 05:04:30,100 Current Learning Rate: 0.0010492249 +2025-02-27 05:04:30,101 Train Loss: 129.2047104, Val Loss: 103.7442930 +2025-02-27 05:04:30,101 Epoch 1359/2000 +2025-02-27 05:04:46,293 Current Learning Rate: 0.0010015767 +2025-02-27 05:04:46,294 Train Loss: 124.8426384, Val Loss: 102.5542441 +2025-02-27 05:04:46,294 Epoch 1360/2000 +2025-02-27 05:05:02,029 Current Learning Rate: 0.0009549150 +2025-02-27 05:05:02,029 Train Loss: 124.8884733, Val Loss: 101.6038821 +2025-02-27 05:05:02,030 Epoch 1361/2000 +2025-02-27 05:05:17,829 Current Learning Rate: 0.0009092514 +2025-02-27 05:05:17,830 Train Loss: 123.2869691, Val Loss: 101.3737454 +2025-02-27 05:05:17,830 Epoch 1362/2000 +2025-02-27 05:05:33,136 Current Learning Rate: 0.0008645971 +2025-02-27 05:05:33,136 Train Loss: 121.2931329, Val Loss: 102.4218275 +2025-02-27 05:05:33,137 Epoch 1363/2000 +2025-02-27 05:05:48,499 Current Learning Rate: 0.0008209632 +2025-02-27 05:05:48,500 Train Loss: 120.9896303, Val Loss: 99.6908562 +2025-02-27 05:05:48,500 Epoch 1364/2000 +2025-02-27 05:06:04,735 Current Learning Rate: 0.0007783604 +2025-02-27 05:06:04,735 Train Loss: 119.3542488, Val Loss: 99.9431779 +2025-02-27 05:06:04,736 Epoch 1365/2000 +2025-02-27 05:06:21,576 Current Learning Rate: 0.0007367992 +2025-02-27 05:06:21,577 Train Loss: 118.7330799, Val Loss: 98.4059340 +2025-02-27 05:06:21,577 Epoch 1366/2000 +2025-02-27 05:06:38,062 Current Learning Rate: 0.0006962899 +2025-02-27 05:06:38,063 Train Loss: 119.8890410, Val Loss: 97.6108939 +2025-02-27 05:06:38,063 Epoch 1367/2000 +2025-02-27 05:06:54,487 Current Learning Rate: 0.0006568424 +2025-02-27 05:06:54,487 Train Loss: 117.1702470, Val Loss: 98.0485368 +2025-02-27 05:06:54,487 Epoch 1368/2000 +2025-02-27 05:07:10,455 Current Learning Rate: 0.0006184666 +2025-02-27 05:07:10,456 Train Loss: 116.4891244, Val Loss: 96.1364211 +2025-02-27 05:07:10,456 Epoch 1369/2000 +2025-02-27 05:07:26,439 Current Learning Rate: 0.0005811718 +2025-02-27 05:07:26,439 Train Loss: 115.4049210, Val Loss: 95.9171683 +2025-02-27 05:07:26,440 Epoch 1370/2000 +2025-02-27 05:07:42,284 Current Learning Rate: 0.0005449674 +2025-02-27 05:07:42,285 Train Loss: 114.8481802, Val Loss: 94.9262767 +2025-02-27 05:07:42,285 Epoch 1371/2000 +2025-02-27 05:07:57,947 Current Learning Rate: 0.0005098621 +2025-02-27 05:07:57,948 Train Loss: 113.9814556, Val Loss: 95.2706679 +2025-02-27 05:07:57,948 Epoch 1372/2000 +2025-02-27 05:08:13,015 Current Learning Rate: 0.0004758647 +2025-02-27 05:08:13,015 Train Loss: 112.9830663, Val Loss: 94.8070933 +2025-02-27 05:08:13,015 Epoch 1373/2000 +2025-02-27 05:08:28,519 Current Learning Rate: 0.0004429836 +2025-02-27 05:08:28,520 Train Loss: 112.6148036, Val Loss: 93.8836685 +2025-02-27 05:08:28,520 Epoch 1374/2000 +2025-02-27 05:08:44,568 Current Learning Rate: 0.0004112269 +2025-02-27 05:08:44,568 Train Loss: 112.2761121, Val Loss: 92.7600810 +2025-02-27 05:08:44,569 Epoch 1375/2000 +2025-02-27 05:09:00,580 Current Learning Rate: 0.0003806023 +2025-02-27 05:09:00,581 Train Loss: 111.3097772, Val Loss: 92.5845032 +2025-02-27 05:09:00,581 Epoch 1376/2000 +2025-02-27 05:09:16,728 Current Learning Rate: 0.0003511176 +2025-02-27 05:09:16,729 Train Loss: 110.6916407, Val Loss: 92.3163569 +2025-02-27 05:09:16,729 Epoch 1377/2000 +2025-02-27 05:09:32,386 Current Learning Rate: 0.0003227798 +2025-02-27 05:09:32,386 Train Loss: 110.2610695, Val Loss: 91.9458839 +2025-02-27 05:09:32,386 Epoch 1378/2000 +2025-02-27 05:09:49,113 Current Learning Rate: 0.0002955962 +2025-02-27 05:09:49,114 Train Loss: 109.8298531, Val Loss: 91.2497297 +2025-02-27 05:09:49,114 Epoch 1379/2000 +2025-02-27 05:10:04,732 Current Learning Rate: 0.0002695732 +2025-02-27 05:10:04,732 Train Loss: 109.6257017, Val Loss: 91.2871676 +2025-02-27 05:10:04,733 Epoch 1380/2000 +2025-02-27 05:10:20,319 Current Learning Rate: 0.0002447174 +2025-02-27 05:10:20,319 Train Loss: 109.1018281, Val Loss: 90.5009204 +2025-02-27 05:10:20,319 Epoch 1381/2000 +2025-02-27 05:10:36,216 Current Learning Rate: 0.0002210349 +2025-02-27 05:10:36,217 Train Loss: 109.0740879, Val Loss: 90.4618624 +2025-02-27 05:10:36,217 Epoch 1382/2000 +2025-02-27 05:10:52,263 Current Learning Rate: 0.0001985316 +2025-02-27 05:10:52,263 Train Loss: 108.4976224, Val Loss: 90.1984474 +2025-02-27 05:10:52,264 Epoch 1383/2000 +2025-02-27 05:11:07,886 Current Learning Rate: 0.0001772129 +2025-02-27 05:11:07,887 Train Loss: 107.7809883, Val Loss: 89.7944170 +2025-02-27 05:11:07,887 Epoch 1384/2000 +2025-02-27 05:11:23,404 Current Learning Rate: 0.0001570842 +2025-02-27 05:11:23,404 Train Loss: 107.7228620, Val Loss: 89.5384257 +2025-02-27 05:11:23,404 Epoch 1385/2000 +2025-02-27 05:11:39,133 Current Learning Rate: 0.0001381504 +2025-02-27 05:11:39,134 Train Loss: 107.5666645, Val Loss: 89.3102983 +2025-02-27 05:11:39,134 Epoch 1386/2000 +2025-02-27 05:11:54,845 Current Learning Rate: 0.0001204162 +2025-02-27 05:11:54,846 Train Loss: 106.9607244, Val Loss: 89.1063384 +2025-02-27 05:11:54,846 Epoch 1387/2000 +2025-02-27 05:12:10,674 Current Learning Rate: 0.0001038859 +2025-02-27 05:12:10,674 Train Loss: 107.0903847, Val Loss: 89.0008515 +2025-02-27 05:12:10,675 Epoch 1388/2000 +2025-02-27 05:12:26,185 Current Learning Rate: 0.0000885637 +2025-02-27 05:12:26,186 Train Loss: 106.9484583, Val Loss: 88.7732023 +2025-02-27 05:12:26,186 Epoch 1389/2000 +2025-02-27 05:12:41,624 Current Learning Rate: 0.0000744534 +2025-02-27 05:12:41,625 Train Loss: 106.4221239, Val Loss: 88.8542409 +2025-02-27 05:12:41,625 Epoch 1390/2000 +2025-02-27 05:12:57,435 Current Learning Rate: 0.0000615583 +2025-02-27 05:12:57,436 Train Loss: 106.4846777, Val Loss: 88.5201128 +2025-02-27 05:12:57,436 Epoch 1391/2000 +2025-02-27 05:13:14,215 Current Learning Rate: 0.0000498817 +2025-02-27 05:13:14,215 Train Loss: 106.2651274, Val Loss: 88.6360707 +2025-02-27 05:13:14,215 Epoch 1392/2000 +2025-02-27 05:13:29,538 Current Learning Rate: 0.0000394265 +2025-02-27 05:13:29,539 Train Loss: 105.9558389, Val Loss: 88.3204298 +2025-02-27 05:13:29,539 Epoch 1393/2000 +2025-02-27 05:13:45,018 Current Learning Rate: 0.0000301952 +2025-02-27 05:13:45,019 Train Loss: 105.9584452, Val Loss: 88.4079566 +2025-02-27 05:13:45,019 Epoch 1394/2000 +2025-02-27 05:14:00,053 Current Learning Rate: 0.0000221902 +2025-02-27 05:14:00,053 Train Loss: 106.0009768, Val Loss: 88.2557355 +2025-02-27 05:14:00,053 Epoch 1395/2000 +2025-02-27 05:14:16,180 Current Learning Rate: 0.0000154133 +2025-02-27 05:14:16,180 Train Loss: 105.6980836, Val Loss: 88.2159513 +2025-02-27 05:14:16,181 Epoch 1396/2000 +2025-02-27 05:14:32,068 Current Learning Rate: 0.0000098664 +2025-02-27 05:14:32,068 Train Loss: 105.8227875, Val Loss: 88.0790662 +2025-02-27 05:14:32,069 Epoch 1397/2000 +2025-02-27 05:14:47,574 Current Learning Rate: 0.0000055506 +2025-02-27 05:14:47,574 Train Loss: 106.1183574, Val Loss: 88.2504844 +2025-02-27 05:14:47,575 Epoch 1398/2000 +2025-02-27 05:15:03,811 Current Learning Rate: 0.0000024672 +2025-02-27 05:15:03,812 Train Loss: 105.8732290, Val Loss: 88.1133499 +2025-02-27 05:15:03,812 Epoch 1399/2000 +2025-02-27 05:15:19,859 Current Learning Rate: 0.0000006168 +2025-02-27 05:15:19,860 Train Loss: 105.6262458, Val Loss: 88.2730333 +2025-02-27 05:15:19,860 Epoch 1400/2000 +2025-02-27 05:15:36,012 Current Learning Rate: 0.0000000000 +2025-02-27 05:15:36,012 Train Loss: 105.7385015, Val Loss: 88.2197738 +2025-02-27 05:15:36,013 Epoch 1401/2000 +2025-02-27 05:15:51,806 Current Learning Rate: 0.0000006168 +2025-02-27 05:15:51,806 Train Loss: 105.7959410, Val Loss: 88.2143059 +2025-02-27 05:15:51,806 Epoch 1402/2000 +2025-02-27 05:16:08,086 Current Learning Rate: 0.0000024672 +2025-02-27 05:16:08,087 Train Loss: 105.7574249, Val Loss: 88.0586618 +2025-02-27 05:16:08,087 Epoch 1403/2000 +2025-02-27 05:16:24,096 Current Learning Rate: 0.0000055506 +2025-02-27 05:16:24,096 Train Loss: 106.0006955, Val Loss: 88.2852477 +2025-02-27 05:16:24,096 Epoch 1404/2000 +2025-02-27 05:16:40,252 Current Learning Rate: 0.0000098664 +2025-02-27 05:16:40,252 Train Loss: 105.7993447, Val Loss: 88.0736369 +2025-02-27 05:16:40,253 Epoch 1405/2000 +2025-02-27 05:16:55,732 Current Learning Rate: 0.0000154133 +2025-02-27 05:16:55,733 Train Loss: 105.8323320, Val Loss: 88.1564992 +2025-02-27 05:16:55,733 Epoch 1406/2000 +2025-02-27 05:17:11,356 Current Learning Rate: 0.0000221902 +2025-02-27 05:17:11,357 Train Loss: 105.9003590, Val Loss: 88.1449114 +2025-02-27 05:17:11,357 Epoch 1407/2000 +2025-02-27 05:17:27,662 Current Learning Rate: 0.0000301952 +2025-02-27 05:17:27,663 Train Loss: 105.6066879, Val Loss: 88.0477809 +2025-02-27 05:17:27,663 Epoch 1408/2000 +2025-02-27 05:17:44,234 Current Learning Rate: 0.0000394265 +2025-02-27 05:17:44,235 Train Loss: 105.4323250, Val Loss: 87.8759780 +2025-02-27 05:17:44,235 Epoch 1409/2000 +2025-02-27 05:18:00,602 Current Learning Rate: 0.0000498817 +2025-02-27 05:18:00,603 Train Loss: 105.4879816, Val Loss: 88.0728350 +2025-02-27 05:18:00,603 Epoch 1410/2000 +2025-02-27 05:18:16,946 Current Learning Rate: 0.0000615583 +2025-02-27 05:18:16,947 Train Loss: 105.6067631, Val Loss: 88.0188092 +2025-02-27 05:18:16,947 Epoch 1411/2000 +2025-02-27 05:18:32,672 Current Learning Rate: 0.0000744534 +2025-02-27 05:18:32,672 Train Loss: 105.2020407, Val Loss: 87.7425531 +2025-02-27 05:18:32,673 Epoch 1412/2000 +2025-02-27 05:18:48,680 Current Learning Rate: 0.0000885637 +2025-02-27 05:18:48,680 Train Loss: 105.1315747, Val Loss: 87.4194722 +2025-02-27 05:18:48,681 Epoch 1413/2000 +2025-02-27 05:19:05,042 Current Learning Rate: 0.0001038859 +2025-02-27 05:19:05,042 Train Loss: 105.3935791, Val Loss: 87.1613707 +2025-02-27 05:19:05,042 Epoch 1414/2000 +2025-02-27 05:19:21,493 Current Learning Rate: 0.0001204162 +2025-02-27 05:19:21,494 Train Loss: 104.4140983, Val Loss: 86.6276918 +2025-02-27 05:19:21,494 Epoch 1415/2000 +2025-02-27 05:19:38,026 Current Learning Rate: 0.0001381504 +2025-02-27 05:19:38,026 Train Loss: 104.1724252, Val Loss: 86.4812371 +2025-02-27 05:19:38,027 Epoch 1416/2000 +2025-02-27 05:19:54,752 Current Learning Rate: 0.0001570842 +2025-02-27 05:19:54,753 Train Loss: 104.1069542, Val Loss: 86.1820163 +2025-02-27 05:19:54,753 Epoch 1417/2000 +2025-02-27 05:20:10,542 Current Learning Rate: 0.0001772129 +2025-02-27 05:20:10,543 Train Loss: 103.1343767, Val Loss: 86.4469089 +2025-02-27 05:20:10,543 Epoch 1418/2000 +2025-02-27 05:20:26,655 Current Learning Rate: 0.0001985316 +2025-02-27 05:20:26,655 Train Loss: 104.1679971, Val Loss: 85.1450335 +2025-02-27 05:20:26,655 Epoch 1419/2000 +2025-02-27 05:20:42,262 Current Learning Rate: 0.0002210349 +2025-02-27 05:20:42,263 Train Loss: 103.6035952, Val Loss: 88.3886526 +2025-02-27 05:20:42,263 Epoch 1420/2000 +2025-02-27 05:20:58,404 Current Learning Rate: 0.0002447174 +2025-02-27 05:20:58,405 Train Loss: 203.9731291, Val Loss: 84.4575192 +2025-02-27 05:20:58,405 Epoch 1421/2000 +2025-02-27 05:21:14,316 Current Learning Rate: 0.0002695732 +2025-02-27 05:21:14,316 Train Loss: 101.3387581, Val Loss: 83.6140297 +2025-02-27 05:21:14,317 Epoch 1422/2000 +2025-02-27 05:21:29,810 Current Learning Rate: 0.0002955962 +2025-02-27 05:21:29,811 Train Loss: 100.7886206, Val Loss: 81.9852880 +2025-02-27 05:21:29,811 Epoch 1423/2000 +2025-02-27 05:21:45,638 Current Learning Rate: 0.0003227798 +2025-02-27 05:21:45,639 Train Loss: 208.7372951, Val Loss: 81.1421296 +2025-02-27 05:21:45,640 Epoch 1424/2000 +2025-02-27 05:22:01,855 Current Learning Rate: 0.0003511176 +2025-02-27 05:22:01,857 Train Loss: 100.7760245, Val Loss: 80.5177400 +2025-02-27 05:22:01,858 Epoch 1425/2000 +2025-02-27 05:22:18,728 Current Learning Rate: 0.0003806023 +2025-02-27 05:22:18,728 Train Loss: 246.5141124, Val Loss: 79.7023504 +2025-02-27 05:22:18,729 Epoch 1426/2000 +2025-02-27 05:22:34,871 Current Learning Rate: 0.0004112269 +2025-02-27 05:22:34,872 Train Loss: 99.9716959, Val Loss: 81.3344810 +2025-02-27 05:22:34,872 Epoch 1427/2000 +2025-02-27 05:22:50,222 Current Learning Rate: 0.0004429836 +2025-02-27 05:22:50,223 Train Loss: 219.6369486, Val Loss: 78.4895507 +2025-02-27 05:22:50,223 Epoch 1428/2000 +2025-02-27 05:23:06,527 Current Learning Rate: 0.0004758647 +2025-02-27 05:23:06,528 Train Loss: 109.9277891, Val Loss: 106.8457239 +2025-02-27 05:23:06,528 Epoch 1429/2000 +2025-02-27 05:23:22,704 Current Learning Rate: 0.0005098621 +2025-02-27 05:23:22,704 Train Loss: 549.0840002, Val Loss: 96.2465859 +2025-02-27 05:23:22,704 Epoch 1430/2000 +2025-02-27 05:23:38,115 Current Learning Rate: 0.0005449674 +2025-02-27 05:23:38,115 Train Loss: 97.0843994, Val Loss: 74.3158449 +2025-02-27 05:23:38,115 Epoch 1431/2000 +2025-02-27 05:23:53,689 Current Learning Rate: 0.0005811718 +2025-02-27 05:23:53,689 Train Loss: 91.7960888, Val Loss: 75.5668028 +2025-02-27 05:23:53,689 Epoch 1432/2000 +2025-02-27 05:24:10,043 Current Learning Rate: 0.0006184666 +2025-02-27 05:24:10,045 Train Loss: 548.6974397, Val Loss: 75.4051843 +2025-02-27 05:24:10,045 Epoch 1433/2000 +2025-02-27 05:24:25,851 Current Learning Rate: 0.0006568424 +2025-02-27 05:24:25,851 Train Loss: 90.4439578, Val Loss: 71.9208655 +2025-02-27 05:24:25,852 Epoch 1434/2000 +2025-02-27 05:24:41,450 Current Learning Rate: 0.0006962899 +2025-02-27 05:24:41,450 Train Loss: 125.4855857, Val Loss: 160.0757252 +2025-02-27 05:24:41,451 Epoch 1435/2000 +2025-02-27 05:24:57,445 Current Learning Rate: 0.0007367992 +2025-02-27 05:24:57,446 Train Loss: 708.1418216, Val Loss: 76.0096329 +2025-02-27 05:24:57,446 Epoch 1436/2000 +2025-02-27 05:25:13,510 Current Learning Rate: 0.0007783604 +2025-02-27 05:25:13,511 Train Loss: 85.3347574, Val Loss: 67.0181193 +2025-02-27 05:25:13,511 Epoch 1437/2000 +2025-02-27 05:25:30,588 Current Learning Rate: 0.0008209632 +2025-02-27 05:25:30,588 Train Loss: 114.3789177, Val Loss: 151.6257568 +2025-02-27 05:25:30,589 Epoch 1438/2000 +2025-02-27 05:25:48,032 Current Learning Rate: 0.0008645971 +2025-02-27 05:25:48,032 Train Loss: 536.6044666, Val Loss: 89.2775353 +2025-02-27 05:25:48,032 Epoch 1439/2000 +2025-02-27 05:26:04,737 Current Learning Rate: 0.0009092514 +2025-02-27 05:26:04,738 Train Loss: 81.1296946, Val Loss: 71.9552338 +2025-02-27 05:26:04,738 Epoch 1440/2000 +2025-02-27 05:26:21,326 Current Learning Rate: 0.0009549150 +2025-02-27 05:26:21,326 Train Loss: 454.6909486, Val Loss: 61.7828347 +2025-02-27 05:26:21,327 Epoch 1441/2000 +2025-02-27 05:26:38,205 Current Learning Rate: 0.0010015767 +2025-02-27 05:26:38,206 Train Loss: 117.6175851, Val Loss: 58.6299226 +2025-02-27 05:26:38,206 Epoch 1442/2000 +2025-02-27 05:26:55,383 Current Learning Rate: 0.0010492249 +2025-02-27 05:26:55,383 Train Loss: 469.0789781, Val Loss: 60.9297102 +2025-02-27 05:26:55,384 Epoch 1443/2000 +2025-02-27 05:27:12,875 Current Learning Rate: 0.0010978480 +2025-02-27 05:27:12,876 Train Loss: 102.4332377, Val Loss: 77.0733331 +2025-02-27 05:27:12,876 Epoch 1444/2000 +2025-02-27 05:27:30,266 Current Learning Rate: 0.0011474338 +2025-02-27 05:27:30,267 Train Loss: 141.3887787, Val Loss: 52.2391980 +2025-02-27 05:27:30,267 Epoch 1445/2000 +2025-02-27 05:27:47,037 Current Learning Rate: 0.0011979702 +2025-02-27 05:27:47,038 Train Loss: 2207.2935111, Val Loss: 115.2021689 +2025-02-27 05:27:47,038 Epoch 1446/2000 +2025-02-27 05:28:03,691 Current Learning Rate: 0.0012494447 +2025-02-27 05:28:03,693 Train Loss: 121.6917799, Val Loss: 55.3308261 +2025-02-27 05:28:03,694 Epoch 1447/2000 +2025-02-27 05:28:19,485 Current Learning Rate: 0.0013018445 +2025-02-27 05:28:19,486 Train Loss: 63.7448495, Val Loss: 48.4392442 +2025-02-27 05:28:19,486 Epoch 1448/2000 +2025-02-27 05:28:35,344 Current Learning Rate: 0.0013551569 +2025-02-27 05:28:35,344 Train Loss: 58.2348718, Val Loss: 47.1374103 +2025-02-27 05:28:35,345 Epoch 1449/2000 +2025-02-27 05:28:52,331 Current Learning Rate: 0.0014093685 +2025-02-27 05:28:52,331 Train Loss: 57.3293936, Val Loss: 48.1736180 +2025-02-27 05:28:52,332 Epoch 1450/2000 +2025-02-27 05:29:08,767 Current Learning Rate: 0.0014644661 +2025-02-27 05:29:08,768 Train Loss: 275.6388378, Val Loss: 47.4901713 +2025-02-27 05:29:08,768 Epoch 1451/2000 +2025-02-27 05:29:26,380 Current Learning Rate: 0.0015204360 +2025-02-27 05:29:26,380 Train Loss: 54.0403532, Val Loss: 43.1622812 +2025-02-27 05:29:26,381 Epoch 1452/2000 +2025-02-27 05:29:43,327 Current Learning Rate: 0.0015772645 +2025-02-27 05:29:43,327 Train Loss: 94.1107101, Val Loss: 50.0199077 +2025-02-27 05:29:43,327 Epoch 1453/2000 +2025-02-27 05:29:59,734 Current Learning Rate: 0.0016349374 +2025-02-27 05:29:59,734 Train Loss: 58.6185792, Val Loss: 88.3775667 +2025-02-27 05:29:59,735 Epoch 1454/2000 +2025-02-27 05:30:17,237 Current Learning Rate: 0.0016934407 +2025-02-27 05:30:17,238 Train Loss: 394.1706403, Val Loss: 47.0310562 +2025-02-27 05:30:17,238 Epoch 1455/2000 +2025-02-27 05:30:34,281 Current Learning Rate: 0.0017527598 +2025-02-27 05:30:34,281 Train Loss: 51.0606543, Val Loss: 54.2848863 +2025-02-27 05:30:34,282 Epoch 1456/2000 +2025-02-27 05:30:51,997 Current Learning Rate: 0.0018128801 +2025-02-27 05:30:51,997 Train Loss: 142.6481585, Val Loss: 34.4208618 +2025-02-27 05:30:51,999 Epoch 1457/2000 +2025-02-27 05:31:09,033 Current Learning Rate: 0.0018737867 +2025-02-27 05:31:09,034 Train Loss: 115.1960655, Val Loss: 41.7564752 +2025-02-27 05:31:09,034 Epoch 1458/2000 +2025-02-27 05:31:24,743 Current Learning Rate: 0.0019354647 +2025-02-27 05:31:24,743 Train Loss: 95.8822973, Val Loss: 55.1371026 +2025-02-27 05:31:24,744 Epoch 1459/2000 +2025-02-27 05:31:41,125 Current Learning Rate: 0.0019978989 +2025-02-27 05:31:41,125 Train Loss: 180.3967237, Val Loss: 172.2652181 +2025-02-27 05:31:41,126 Epoch 1460/2000 +2025-02-27 05:31:56,615 Current Learning Rate: 0.0020610737 +2025-02-27 05:31:56,616 Train Loss: 108.4611683, Val Loss: 29.0888131 +2025-02-27 05:31:56,617 Epoch 1461/2000 +2025-02-27 05:32:13,137 Current Learning Rate: 0.0021249737 +2025-02-27 05:32:13,138 Train Loss: 104.5781133, Val Loss: 93.0555922 +2025-02-27 05:32:13,138 Epoch 1462/2000 +2025-02-27 05:32:29,487 Current Learning Rate: 0.0021895831 +2025-02-27 05:32:29,488 Train Loss: 123.1178731, Val Loss: 26.2828855 +2025-02-27 05:32:29,488 Epoch 1463/2000 +2025-02-27 05:32:45,824 Current Learning Rate: 0.0022548859 +2025-02-27 05:32:45,825 Train Loss: 58.1375970, Val Loss: 55.4554374 +2025-02-27 05:32:45,825 Epoch 1464/2000 +2025-02-27 05:33:03,449 Current Learning Rate: 0.0023208660 +2025-02-27 05:33:03,450 Train Loss: 77.1342237, Val Loss: 36.9076137 +2025-02-27 05:33:03,450 Epoch 1465/2000 +2025-02-27 05:33:21,525 Current Learning Rate: 0.0023875072 +2025-02-27 05:33:21,529 Train Loss: 1701791.6735976, Val Loss: 37414.0784672 +2025-02-27 05:33:21,529 Epoch 1466/2000 +2025-02-27 05:33:39,381 Current Learning Rate: 0.0024547929 +2025-02-27 05:33:39,382 Train Loss: 16682.1806745, Val Loss: 6498.8153893 +2025-02-27 05:33:39,382 Epoch 1467/2000 +2025-02-27 05:33:56,381 Current Learning Rate: 0.0025227067 +2025-02-27 05:33:56,382 Train Loss: 6297.6335898, Val Loss: 4378.9331470 +2025-02-27 05:33:56,382 Epoch 1468/2000 +2025-02-27 05:34:13,469 Current Learning Rate: 0.0025912316 +2025-02-27 05:34:13,470 Train Loss: 4524.2082378, Val Loss: 3284.3209683 +2025-02-27 05:34:13,470 Epoch 1469/2000 +2025-02-27 05:34:29,660 Current Learning Rate: 0.0026603509 +2025-02-27 05:34:29,661 Train Loss: 3528.4047385, Val Loss: 2611.0860516 +2025-02-27 05:34:29,661 Epoch 1470/2000 +2025-02-27 05:34:46,675 Current Learning Rate: 0.0027300475 +2025-02-27 05:34:46,676 Train Loss: 2845.7615368, Val Loss: 2146.6395320 +2025-02-27 05:34:46,676 Epoch 1471/2000 +2025-02-27 05:35:03,322 Current Learning Rate: 0.0028003042 +2025-02-27 05:35:03,329 Train Loss: 2391.3879398, Val Loss: 1822.6188032 +2025-02-27 05:35:03,329 Epoch 1472/2000 +2025-02-27 05:35:20,670 Current Learning Rate: 0.0028711035 +2025-02-27 05:35:20,671 Train Loss: 2041.3834978, Val Loss: 1570.5652182 +2025-02-27 05:35:20,671 Epoch 1473/2000 +2025-02-27 05:35:37,537 Current Learning Rate: 0.0029424282 +2025-02-27 05:35:37,538 Train Loss: 1785.8304372, Val Loss: 1382.8259248 +2025-02-27 05:35:37,539 Epoch 1474/2000 +2025-02-27 05:35:54,022 Current Learning Rate: 0.0030142605 +2025-02-27 05:35:54,023 Train Loss: 1572.3845384, Val Loss: 1219.3366978 +2025-02-27 05:35:54,023 Epoch 1475/2000 +2025-02-27 05:36:10,563 Current Learning Rate: 0.0030865828 +2025-02-27 05:36:10,563 Train Loss: 1407.3147535, Val Loss: 1096.2033341 +2025-02-27 05:36:10,564 Epoch 1476/2000 +2025-02-27 05:36:27,805 Current Learning Rate: 0.0031593772 +2025-02-27 05:36:27,806 Train Loss: 1261.3740501, Val Loss: 990.6461470 +2025-02-27 05:36:27,806 Epoch 1477/2000 +2025-02-27 05:36:45,944 Current Learning Rate: 0.0032326258 +2025-02-27 05:36:45,945 Train Loss: 1138.7777490, Val Loss: 899.0958362 +2025-02-27 05:36:45,945 Epoch 1478/2000 +2025-02-27 05:37:04,823 Current Learning Rate: 0.0033063104 +2025-02-27 05:37:04,824 Train Loss: 1039.9973465, Val Loss: 816.5306845 +2025-02-27 05:37:04,824 Epoch 1479/2000 +2025-02-27 05:37:23,633 Current Learning Rate: 0.0033804129 +2025-02-27 05:37:23,633 Train Loss: 952.5614732, Val Loss: 746.9531606 +2025-02-27 05:37:23,634 Epoch 1480/2000 +2025-02-27 05:37:41,824 Current Learning Rate: 0.0034549150 +2025-02-27 05:37:41,825 Train Loss: 873.1326297, Val Loss: 691.6221036 +2025-02-27 05:37:41,825 Epoch 1481/2000 +2025-02-27 05:37:58,470 Current Learning Rate: 0.0035297984 +2025-02-27 05:37:58,471 Train Loss: 809.5656112, Val Loss: 634.3999962 +2025-02-27 05:37:58,471 Epoch 1482/2000 +2025-02-27 05:38:15,248 Current Learning Rate: 0.0036050445 +2025-02-27 05:38:15,249 Train Loss: 746.1647648, Val Loss: 589.7797151 +2025-02-27 05:38:15,249 Epoch 1483/2000 +2025-02-27 05:38:32,221 Current Learning Rate: 0.0036806348 +2025-02-27 05:38:32,221 Train Loss: 692.2594326, Val Loss: 559.4687951 +2025-02-27 05:38:32,222 Epoch 1484/2000 +2025-02-27 05:38:48,856 Current Learning Rate: 0.0037565506 +2025-02-27 05:38:48,856 Train Loss: 638.2640826, Val Loss: 509.8200844 +2025-02-27 05:38:48,857 Epoch 1485/2000 +2025-02-27 05:39:05,466 Current Learning Rate: 0.0038327732 +2025-02-27 05:39:05,467 Train Loss: 599.7424726, Val Loss: 475.1178766 +2025-02-27 05:39:05,467 Epoch 1486/2000 +2025-02-27 05:39:22,393 Current Learning Rate: 0.0039092838 +2025-02-27 05:39:22,393 Train Loss: 562.7565622, Val Loss: 449.0855656 +2025-02-27 05:39:22,394 Epoch 1487/2000 +2025-02-27 05:39:38,872 Current Learning Rate: 0.0039860635 +2025-02-27 05:39:38,872 Train Loss: 524.2287035, Val Loss: 414.6711646 +2025-02-27 05:39:38,872 Epoch 1488/2000 +2025-02-27 05:39:55,277 Current Learning Rate: 0.0040630934 +2025-02-27 05:39:55,278 Train Loss: 491.2170634, Val Loss: 389.7338570 +2025-02-27 05:39:55,278 Epoch 1489/2000 +2025-02-27 05:40:11,816 Current Learning Rate: 0.0041403545 +2025-02-27 05:40:11,817 Train Loss: 462.9764301, Val Loss: 367.1346147 +2025-02-27 05:40:11,817 Epoch 1490/2000 +2025-02-27 05:40:28,401 Current Learning Rate: 0.0042178277 +2025-02-27 05:40:28,402 Train Loss: 433.2708161, Val Loss: 346.7693127 +2025-02-27 05:40:28,403 Epoch 1491/2000 +2025-02-27 05:40:44,813 Current Learning Rate: 0.0042954938 +2025-02-27 05:40:44,814 Train Loss: 408.7048214, Val Loss: 324.9494789 +2025-02-27 05:40:44,814 Epoch 1492/2000 +2025-02-27 05:41:01,501 Current Learning Rate: 0.0043733338 +2025-02-27 05:41:01,502 Train Loss: 385.0684797, Val Loss: 303.3943507 +2025-02-27 05:41:01,502 Epoch 1493/2000 +2025-02-27 05:41:18,650 Current Learning Rate: 0.0044513284 +2025-02-27 05:41:18,651 Train Loss: 367.1701536, Val Loss: 285.4783552 +2025-02-27 05:41:18,651 Epoch 1494/2000 +2025-02-27 05:41:35,283 Current Learning Rate: 0.0045294584 +2025-02-27 05:41:35,284 Train Loss: 342.1814768, Val Loss: 273.8547369 +2025-02-27 05:41:35,284 Epoch 1495/2000 +2025-02-27 05:41:52,060 Current Learning Rate: 0.0046077045 +2025-02-27 05:41:52,061 Train Loss: 325.1866667, Val Loss: 266.5458972 +2025-02-27 05:41:52,061 Epoch 1496/2000 +2025-02-27 05:42:08,384 Current Learning Rate: 0.0046860474 +2025-02-27 05:42:08,385 Train Loss: 311.2508287, Val Loss: 244.2927347 +2025-02-27 05:42:08,385 Epoch 1497/2000 +2025-02-27 05:42:25,330 Current Learning Rate: 0.0047644677 +2025-02-27 05:42:25,330 Train Loss: 292.7801015, Val Loss: 229.4688605 +2025-02-27 05:42:25,331 Epoch 1498/2000 +2025-02-27 05:42:41,682 Current Learning Rate: 0.0048429462 +2025-02-27 05:42:41,682 Train Loss: 276.5976791, Val Loss: 215.5315821 +2025-02-27 05:42:41,682 Epoch 1499/2000 +2025-02-27 05:42:58,041 Current Learning Rate: 0.0049214634 +2025-02-27 05:42:58,041 Train Loss: 261.7291685, Val Loss: 206.8106799 +2025-02-27 05:42:58,042 Epoch 1500/2000 +2025-02-27 05:43:14,742 Current Learning Rate: 0.0050000000 +2025-02-27 05:43:14,743 Train Loss: 250.0908217, Val Loss: 194.7179463 +2025-02-27 05:43:14,743 Epoch 1501/2000 +2025-02-27 05:43:31,252 Current Learning Rate: 0.0050785366 +2025-02-27 05:43:31,253 Train Loss: 236.9055879, Val Loss: 188.9574869 +2025-02-27 05:43:31,253 Epoch 1502/2000 +2025-02-27 05:43:48,669 Current Learning Rate: 0.0051570538 +2025-02-27 05:43:48,670 Train Loss: 226.8524535, Val Loss: 176.2919702 +2025-02-27 05:43:48,670 Epoch 1503/2000 +2025-02-27 05:44:05,250 Current Learning Rate: 0.0052355323 +2025-02-27 05:44:05,251 Train Loss: 214.8605534, Val Loss: 170.3141662 +2025-02-27 05:44:05,251 Epoch 1504/2000 +2025-02-27 05:44:21,913 Current Learning Rate: 0.0053139526 +2025-02-27 05:44:21,913 Train Loss: 204.8594334, Val Loss: 163.1333727 +2025-02-27 05:44:21,913 Epoch 1505/2000 +2025-02-27 05:44:38,539 Current Learning Rate: 0.0053922955 +2025-02-27 05:44:38,539 Train Loss: 198.1469188, Val Loss: 157.7128894 +2025-02-27 05:44:38,540 Epoch 1506/2000 +2025-02-27 05:44:57,267 Current Learning Rate: 0.0054705416 +2025-02-27 05:44:57,268 Train Loss: 189.9298729, Val Loss: 147.8035601 +2025-02-27 05:44:57,268 Epoch 1507/2000 +2025-02-27 05:45:15,306 Current Learning Rate: 0.0055486716 +2025-02-27 05:45:15,306 Train Loss: 179.9826428, Val Loss: 143.7139967 +2025-02-27 05:45:15,306 Epoch 1508/2000 +2025-02-27 05:45:33,036 Current Learning Rate: 0.0056266662 +2025-02-27 05:45:33,037 Train Loss: 174.2686434, Val Loss: 134.0274144 +2025-02-27 05:45:33,037 Epoch 1509/2000 +2025-02-27 05:45:49,984 Current Learning Rate: 0.0057045062 +2025-02-27 05:45:49,984 Train Loss: 163.1165799, Val Loss: 131.7367576 +2025-02-27 05:45:49,985 Epoch 1510/2000 +2025-02-27 05:46:07,574 Current Learning Rate: 0.0057821723 +2025-02-27 05:46:07,574 Train Loss: 159.6644883, Val Loss: 123.1904249 +2025-02-27 05:46:07,574 Epoch 1511/2000 +2025-02-27 05:46:24,280 Current Learning Rate: 0.0058596455 +2025-02-27 05:46:24,280 Train Loss: 154.0790997, Val Loss: 117.5295300 +2025-02-27 05:46:24,281 Epoch 1512/2000 +2025-02-27 05:46:41,339 Current Learning Rate: 0.0059369066 +2025-02-27 05:46:41,339 Train Loss: 195.9505908, Val Loss: 171.8943530 +2025-02-27 05:46:41,340 Epoch 1513/2000 +2025-02-27 05:46:57,886 Current Learning Rate: 0.0060139365 +2025-02-27 05:46:57,887 Train Loss: 858.5030875, Val Loss: 142.8908893 +2025-02-27 05:46:57,887 Epoch 1514/2000 +2025-02-27 05:47:16,045 Current Learning Rate: 0.0060907162 +2025-02-27 05:47:16,046 Train Loss: 135.2925049, Val Loss: 107.2389596 +2025-02-27 05:47:16,046 Epoch 1515/2000 +2025-02-27 05:47:33,666 Current Learning Rate: 0.0061672268 +2025-02-27 05:47:33,667 Train Loss: 546.1696015, Val Loss: 165.7989606 +2025-02-27 05:47:33,668 Epoch 1516/2000 +2025-02-27 05:47:50,737 Current Learning Rate: 0.0062434494 +2025-02-27 05:47:50,737 Train Loss: 223.3322741, Val Loss: 95.7364879 +2025-02-27 05:47:50,737 Epoch 1517/2000 +2025-02-27 05:48:08,101 Current Learning Rate: 0.0063193652 +2025-02-27 05:48:08,102 Train Loss: 645.2427406, Val Loss: 724.7572019 +2025-02-27 05:48:08,102 Epoch 1518/2000 +2025-02-27 05:48:25,806 Current Learning Rate: 0.0063949555 +2025-02-27 05:48:25,806 Train Loss: 194.5521151, Val Loss: 103.2761058 +2025-02-27 05:48:25,807 Epoch 1519/2000 +2025-02-27 05:48:43,123 Current Learning Rate: 0.0064702016 +2025-02-27 05:48:43,124 Train Loss: 117.4258218, Val Loss: 85.6481518 +2025-02-27 05:48:43,124 Epoch 1520/2000 +2025-02-27 05:49:00,945 Current Learning Rate: 0.0065450850 +2025-02-27 05:49:00,946 Train Loss: 2860.0337047, Val Loss: 203.5752405 +2025-02-27 05:49:00,946 Epoch 1521/2000 +2025-02-27 05:49:17,546 Current Learning Rate: 0.0066195871 +2025-02-27 05:49:17,547 Train Loss: 147.0669782, Val Loss: 80.2547468 +2025-02-27 05:49:17,547 Epoch 1522/2000 +2025-02-27 05:49:34,466 Current Learning Rate: 0.0066936896 +2025-02-27 05:49:34,467 Train Loss: 96.6130890, Val Loss: 76.5552872 +2025-02-27 05:49:34,467 Epoch 1523/2000 +2025-02-27 05:49:52,473 Current Learning Rate: 0.0067673742 +2025-02-27 05:49:52,473 Train Loss: 90.6240981, Val Loss: 69.5133820 +2025-02-27 05:49:52,474 Epoch 1524/2000 +2025-02-27 05:50:11,166 Current Learning Rate: 0.0068406228 +2025-02-27 05:50:11,166 Train Loss: 83.7621296, Val Loss: 65.1104261 +2025-02-27 05:50:11,167 Epoch 1525/2000 +2025-02-27 05:50:29,500 Current Learning Rate: 0.0069134172 +2025-02-27 05:50:29,501 Train Loss: 79.6546922, Val Loss: 60.7792459 +2025-02-27 05:50:29,501 Epoch 1526/2000 +2025-02-27 05:50:46,959 Current Learning Rate: 0.0069857395 +2025-02-27 05:50:46,959 Train Loss: 78.9835899, Val Loss: 57.2606887 +2025-02-27 05:50:46,959 Epoch 1527/2000 +2025-02-27 05:51:03,742 Current Learning Rate: 0.0070575718 +2025-02-27 05:51:03,743 Train Loss: 78.1975995, Val Loss: 55.1733889 +2025-02-27 05:51:03,743 Epoch 1528/2000 +2025-02-27 05:51:20,910 Current Learning Rate: 0.0071288965 +2025-02-27 05:51:20,911 Train Loss: 85.5483089, Val Loss: 63.2256642 +2025-02-27 05:51:20,911 Epoch 1529/2000 +2025-02-27 05:51:37,859 Current Learning Rate: 0.0071996958 +2025-02-27 05:51:37,859 Train Loss: 460.3097268, Val Loss: 62.2491373 +2025-02-27 05:51:37,860 Epoch 1530/2000 +2025-02-27 05:51:54,182 Current Learning Rate: 0.0072699525 +2025-02-27 05:51:54,182 Train Loss: 72.5886153, Val Loss: 49.5018380 +2025-02-27 05:51:54,183 Epoch 1531/2000 +2025-02-27 05:52:11,507 Current Learning Rate: 0.0073396491 +2025-02-27 05:52:11,507 Train Loss: 99.2173642, Val Loss: 156.1908330 +2025-02-27 05:52:11,507 Epoch 1532/2000 +2025-02-27 05:52:28,760 Current Learning Rate: 0.0074087684 +2025-02-27 05:52:28,761 Train Loss: 490.0269152, Val Loss: 47.8879148 +2025-02-27 05:52:28,761 Epoch 1533/2000 +2025-02-27 05:52:45,261 Current Learning Rate: 0.0074772933 +2025-02-27 05:52:45,261 Train Loss: 66.6125041, Val Loss: 47.6897443 +2025-02-27 05:52:45,261 Epoch 1534/2000 +2025-02-27 05:53:00,920 Current Learning Rate: 0.0075452071 +2025-02-27 05:53:00,921 Train Loss: 58.7829442, Val Loss: 41.1719069 +2025-02-27 05:53:00,921 Epoch 1535/2000 +2025-02-27 05:53:18,218 Current Learning Rate: 0.0076124928 +2025-02-27 05:53:18,218 Train Loss: 683.3333517, Val Loss: 78.1594678 +2025-02-27 05:53:18,218 Epoch 1536/2000 +2025-02-27 05:53:35,213 Current Learning Rate: 0.0076791340 +2025-02-27 05:53:35,213 Train Loss: 56.8061423, Val Loss: 40.5762365 +2025-02-27 05:53:35,214 Epoch 1537/2000 +2025-02-27 05:53:51,730 Current Learning Rate: 0.0077451141 +2025-02-27 05:53:51,731 Train Loss: 48.9690453, Val Loss: 38.0999140 +2025-02-27 05:53:51,731 Epoch 1538/2000 +2025-02-27 05:54:08,363 Current Learning Rate: 0.0078104169 +2025-02-27 05:54:08,364 Train Loss: 52.4181246, Val Loss: 33.8900037 +2025-02-27 05:54:08,364 Epoch 1539/2000 +2025-02-27 05:54:24,659 Current Learning Rate: 0.0078750263 +2025-02-27 05:54:24,660 Train Loss: 118.7815441, Val Loss: 44.3602543 +2025-02-27 05:54:24,660 Epoch 1540/2000 +2025-02-27 05:54:41,135 Current Learning Rate: 0.0079389263 +2025-02-27 05:54:41,136 Train Loss: 411.1079471, Val Loss: 65.1561691 +2025-02-27 05:54:41,136 Epoch 1541/2000 +2025-02-27 05:54:57,595 Current Learning Rate: 0.0080021011 +2025-02-27 05:54:57,595 Train Loss: 85.6840439, Val Loss: 36.4153548 +2025-02-27 05:54:57,595 Epoch 1542/2000 +2025-02-27 05:55:14,108 Current Learning Rate: 0.0080645353 +2025-02-27 05:55:14,108 Train Loss: 42.3124568, Val Loss: 34.7884701 +2025-02-27 05:55:14,108 Epoch 1543/2000 +2025-02-27 05:55:30,538 Current Learning Rate: 0.0081262133 +2025-02-27 05:55:30,539 Train Loss: 37.5374630, Val Loss: 29.3886097 +2025-02-27 05:55:30,539 Epoch 1544/2000 +2025-02-27 05:55:46,900 Current Learning Rate: 0.0081871199 +2025-02-27 05:55:46,900 Train Loss: 182.2165082, Val Loss: 30.4988477 +2025-02-27 05:55:46,901 Epoch 1545/2000 +2025-02-27 05:56:03,635 Current Learning Rate: 0.0082472402 +2025-02-27 05:56:03,636 Train Loss: 36.7765723, Val Loss: 47.0719297 +2025-02-27 05:56:03,636 Epoch 1546/2000 +2025-02-27 05:56:20,330 Current Learning Rate: 0.0083065593 +2025-02-27 05:56:20,330 Train Loss: 178.2157589, Val Loss: 41.8049373 +2025-02-27 05:56:20,331 Epoch 1547/2000 +2025-02-27 05:56:36,399 Current Learning Rate: 0.0083650626 +2025-02-27 05:56:36,400 Train Loss: 49.5024733, Val Loss: 55.6837452 +2025-02-27 05:56:36,400 Epoch 1548/2000 +2025-02-27 05:56:52,583 Current Learning Rate: 0.0084227355 +2025-02-27 05:56:52,584 Train Loss: 53.9119487, Val Loss: 66.7445778 +2025-02-27 05:56:52,584 Epoch 1549/2000 +2025-02-27 05:57:10,641 Current Learning Rate: 0.0084795640 +2025-02-27 05:57:10,642 Train Loss: 311.0628784, Val Loss: 24.3290001 +2025-02-27 05:57:10,642 Epoch 1550/2000 +2025-02-27 05:57:27,903 Current Learning Rate: 0.0085355339 +2025-02-27 05:57:27,903 Train Loss: 32.0728369, Val Loss: 22.3033120 +2025-02-27 05:57:27,904 Epoch 1551/2000 +2025-02-27 05:57:45,286 Current Learning Rate: 0.0085906315 +2025-02-27 05:57:45,287 Train Loss: 31.4498821, Val Loss: 21.5105693 +2025-02-27 05:57:45,287 Epoch 1552/2000 +2025-02-27 05:58:03,007 Current Learning Rate: 0.0086448431 +2025-02-27 05:58:03,008 Train Loss: 106.4533998, Val Loss: 19.7528900 +2025-02-27 05:58:03,008 Epoch 1553/2000 +2025-02-27 05:58:21,091 Current Learning Rate: 0.0086981555 +2025-02-27 05:58:21,092 Train Loss: 33.0151148, Val Loss: 21.0149475 +2025-02-27 05:58:21,092 Epoch 1554/2000 +2025-02-27 05:58:39,380 Current Learning Rate: 0.0087505553 +2025-02-27 05:58:39,380 Train Loss: 103.5372942, Val Loss: 24.9657687 +2025-02-27 05:58:39,380 Epoch 1555/2000 +2025-02-27 05:58:56,854 Current Learning Rate: 0.0088020298 +2025-02-27 05:58:56,854 Train Loss: 89.6003443, Val Loss: 121.8243453 +2025-02-27 05:58:56,854 Epoch 1556/2000 +2025-02-27 05:59:14,282 Current Learning Rate: 0.0088525662 +2025-02-27 05:59:14,283 Train Loss: 42.3015402, Val Loss: 16.7124679 +2025-02-27 05:59:14,283 Epoch 1557/2000 +2025-02-27 05:59:30,508 Current Learning Rate: 0.0089021520 +2025-02-27 05:59:30,509 Train Loss: 37.4586125, Val Loss: 16.3290829 +2025-02-27 05:59:30,509 Epoch 1558/2000 +2025-02-27 05:59:46,731 Current Learning Rate: 0.0089507751 +2025-02-27 05:59:46,731 Train Loss: 147.4480625, Val Loss: 24.5013461 +2025-02-27 05:59:46,731 Epoch 1559/2000 +2025-02-27 06:00:03,773 Current Learning Rate: 0.0089984233 +2025-02-27 06:00:03,774 Train Loss: 25.8257150, Val Loss: 20.5190546 +2025-02-27 06:00:03,774 Epoch 1560/2000 +2025-02-27 06:00:20,605 Current Learning Rate: 0.0090450850 +2025-02-27 06:00:20,605 Train Loss: 33.1002538, Val Loss: 13.7546534 +2025-02-27 06:00:20,605 Epoch 1561/2000 +2025-02-27 06:00:37,141 Current Learning Rate: 0.0090907486 +2025-02-27 06:00:37,141 Train Loss: 43.0126172, Val Loss: 29.8747532 +2025-02-27 06:00:37,141 Epoch 1562/2000 +2025-02-27 06:00:53,458 Current Learning Rate: 0.0091354029 +2025-02-27 06:00:53,459 Train Loss: 45.1555461, Val Loss: 28.2833699 +2025-02-27 06:00:53,459 Epoch 1563/2000 +2025-02-27 06:01:10,262 Current Learning Rate: 0.0091790368 +2025-02-27 06:01:10,263 Train Loss: 67.3172516, Val Loss: 68.7110162 +2025-02-27 06:01:10,263 Epoch 1564/2000 +2025-02-27 06:01:26,570 Current Learning Rate: 0.0092216396 +2025-02-27 06:01:26,570 Train Loss: 28.0326422, Val Loss: 29.6892368 +2025-02-27 06:01:26,570 Epoch 1565/2000 +2025-02-27 06:01:42,931 Current Learning Rate: 0.0092632008 +2025-02-27 06:01:42,932 Train Loss: 55.4800179, Val Loss: 204.8606349 +2025-02-27 06:01:42,932 Epoch 1566/2000 +2025-02-27 06:01:58,265 Current Learning Rate: 0.0093037101 +2025-02-27 06:01:58,265 Train Loss: 40.2674696, Val Loss: 12.5903426 +2025-02-27 06:01:58,266 Epoch 1567/2000 +2025-02-27 06:02:13,974 Current Learning Rate: 0.0093431576 +2025-02-27 06:02:13,974 Train Loss: 31.5029763, Val Loss: 10.7271158 +2025-02-27 06:02:13,974 Epoch 1568/2000 +2025-02-27 06:02:29,404 Current Learning Rate: 0.0093815334 +2025-02-27 06:02:29,405 Train Loss: 47.4682467, Val Loss: 10.4582643 +2025-02-27 06:02:29,405 Epoch 1569/2000 +2025-02-27 06:02:44,652 Current Learning Rate: 0.0094188282 +2025-02-27 06:02:44,653 Train Loss: 63.6657312, Val Loss: 21.2564930 +2025-02-27 06:02:44,653 Epoch 1570/2000 +2025-02-27 06:02:59,657 Current Learning Rate: 0.0094550326 +2025-02-27 06:02:59,657 Train Loss: 18.0693974, Val Loss: 17.2902596 +2025-02-27 06:02:59,657 Epoch 1571/2000 +2025-02-27 06:03:15,334 Current Learning Rate: 0.0094901379 +2025-02-27 06:03:15,334 Train Loss: 41.6236176, Val Loss: 26.3981209 +2025-02-27 06:03:15,334 Epoch 1572/2000 +2025-02-27 06:03:31,040 Current Learning Rate: 0.0095241353 +2025-02-27 06:03:31,041 Train Loss: 21.9603710, Val Loss: 9.0777089 +2025-02-27 06:03:31,041 Epoch 1573/2000 +2025-02-27 06:03:46,910 Current Learning Rate: 0.0095570164 +2025-02-27 06:03:46,910 Train Loss: 26.6854979, Val Loss: 21.1496465 +2025-02-27 06:03:46,911 Epoch 1574/2000 +2025-02-27 06:04:02,550 Current Learning Rate: 0.0095887731 +2025-02-27 06:04:02,551 Train Loss: 25.4280652, Val Loss: 13.0405549 +2025-02-27 06:04:02,551 Epoch 1575/2000 +2025-02-27 06:04:18,208 Current Learning Rate: 0.0096193977 +2025-02-27 06:04:18,209 Train Loss: 102.9687788, Val Loss: 57.2582859 +2025-02-27 06:04:18,209 Epoch 1576/2000 +2025-02-27 06:04:33,142 Current Learning Rate: 0.0096488824 +2025-02-27 06:04:33,142 Train Loss: 387.4091262, Val Loss: 42.5183521 +2025-02-27 06:04:33,143 Epoch 1577/2000 +2025-02-27 06:04:48,858 Current Learning Rate: 0.0096772202 +2025-02-27 06:04:48,859 Train Loss: 18.5464400, Val Loss: 8.5811901 +2025-02-27 06:04:48,859 Epoch 1578/2000 +2025-02-27 06:05:04,642 Current Learning Rate: 0.0097044038 +2025-02-27 06:05:04,642 Train Loss: 10.3625684, Val Loss: 7.2450975 +2025-02-27 06:05:04,642 Epoch 1579/2000 +2025-02-27 06:05:20,732 Current Learning Rate: 0.0097304268 +2025-02-27 06:05:20,732 Train Loss: 9.3434030, Val Loss: 6.8798671 +2025-02-27 06:05:20,733 Epoch 1580/2000 +2025-02-27 06:05:36,753 Current Learning Rate: 0.0097552826 +2025-02-27 06:05:36,754 Train Loss: 9.0169428, Val Loss: 9.6101894 +2025-02-27 06:05:36,754 Epoch 1581/2000 +2025-02-27 06:05:51,529 Current Learning Rate: 0.0097789651 +2025-02-27 06:05:51,529 Train Loss: 8.6739159, Val Loss: 6.1760946 +2025-02-27 06:05:51,529 Epoch 1582/2000 +2025-02-27 06:06:06,627 Current Learning Rate: 0.0098014684 +2025-02-27 06:06:06,627 Train Loss: 7.9150874, Val Loss: 5.6158027 +2025-02-27 06:06:06,627 Epoch 1583/2000 +2025-02-27 06:06:22,231 Current Learning Rate: 0.0098227871 +2025-02-27 06:06:22,232 Train Loss: 7.5653545, Val Loss: 5.3864727 +2025-02-27 06:06:22,232 Epoch 1584/2000 +2025-02-27 06:06:37,369 Current Learning Rate: 0.0098429158 +2025-02-27 06:06:37,369 Train Loss: 8.3392038, Val Loss: 8.5669764 +2025-02-27 06:06:37,369 Epoch 1585/2000 +2025-02-27 06:06:52,659 Current Learning Rate: 0.0098618496 +2025-02-27 06:06:52,659 Train Loss: 9.4960626, Val Loss: 5.0933308 +2025-02-27 06:06:52,659 Epoch 1586/2000 +2025-02-27 06:07:08,775 Current Learning Rate: 0.0098795838 +2025-02-27 06:07:08,775 Train Loss: 9.5852970, Val Loss: 5.0050142 +2025-02-27 06:07:08,776 Epoch 1587/2000 +2025-02-27 06:07:24,319 Current Learning Rate: 0.0098961141 +2025-02-27 06:07:24,320 Train Loss: 10.5672478, Val Loss: 5.5294230 +2025-02-27 06:07:24,320 Epoch 1588/2000 +2025-02-27 06:07:40,110 Current Learning Rate: 0.0099114363 +2025-02-27 06:07:40,111 Train Loss: 8.9072242, Val Loss: 5.9872151 +2025-02-27 06:07:40,111 Epoch 1589/2000 +2025-02-27 06:07:55,327 Current Learning Rate: 0.0099255466 +2025-02-27 06:07:55,328 Train Loss: 11.9919210, Val Loss: 4.7226493 +2025-02-27 06:07:55,328 Epoch 1590/2000 +2025-02-27 06:08:10,212 Current Learning Rate: 0.0099384417 +2025-02-27 06:08:10,212 Train Loss: 32.5876751, Val Loss: 29.3436278 +2025-02-27 06:08:10,212 Epoch 1591/2000 +2025-02-27 06:08:25,864 Current Learning Rate: 0.0099501183 +2025-02-27 06:08:25,864 Train Loss: 8.4422021, Val Loss: 4.3237805 +2025-02-27 06:08:25,865 Epoch 1592/2000 +2025-02-27 06:08:41,212 Current Learning Rate: 0.0099605735 +2025-02-27 06:08:41,213 Train Loss: 6.8291345, Val Loss: 5.7978450 +2025-02-27 06:08:41,213 Epoch 1593/2000 +2025-02-27 06:08:56,605 Current Learning Rate: 0.0099698048 +2025-02-27 06:08:56,606 Train Loss: 10.8030289, Val Loss: 7.4638020 +2025-02-27 06:08:56,606 Epoch 1594/2000 +2025-02-27 06:09:12,800 Current Learning Rate: 0.0099778098 +2025-02-27 06:09:12,801 Train Loss: 7.9898840, Val Loss: 3.8868500 +2025-02-27 06:09:12,803 Epoch 1595/2000 +2025-02-27 06:09:28,424 Current Learning Rate: 0.0099845867 +2025-02-27 06:09:28,425 Train Loss: 18.4177483, Val Loss: 19.9754337 +2025-02-27 06:09:28,425 Epoch 1596/2000 +2025-02-27 06:09:44,453 Current Learning Rate: 0.0099901336 +2025-02-27 06:09:44,453 Train Loss: 11.3425132, Val Loss: 9.2301824 +2025-02-27 06:09:44,453 Epoch 1597/2000 +2025-02-27 06:10:00,904 Current Learning Rate: 0.0099944494 +2025-02-27 06:10:00,904 Train Loss: 7.0720164, Val Loss: 4.1804244 +2025-02-27 06:10:00,904 Epoch 1598/2000 +2025-02-27 06:10:17,192 Current Learning Rate: 0.0099975328 +2025-02-27 06:10:17,192 Train Loss: 9.5214249, Val Loss: 4.7082585 +2025-02-27 06:10:17,192 Epoch 1599/2000 +2025-02-27 06:10:32,197 Current Learning Rate: 0.0099993832 +2025-02-27 06:10:32,197 Train Loss: 9.5066262, Val Loss: 3.4715813 +2025-02-27 06:10:32,197 Epoch 1600/2000 +2025-02-27 06:10:47,889 Current Learning Rate: 0.0100000000 +2025-02-27 06:10:47,889 Train Loss: 18.3994352, Val Loss: 146.4936001 +2025-02-27 06:10:47,890 Epoch 1601/2000 +2025-02-27 06:11:03,328 Current Learning Rate: 0.0099993832 +2025-02-27 06:11:03,329 Train Loss: 47.3008994, Val Loss: 4.4457281 +2025-02-27 06:11:03,329 Epoch 1602/2000 +2025-02-27 06:11:18,795 Current Learning Rate: 0.0099975328 +2025-02-27 06:11:18,796 Train Loss: 4.6240316, Val Loss: 2.9152716 +2025-02-27 06:11:18,796 Epoch 1603/2000 +2025-02-27 06:11:33,803 Current Learning Rate: 0.0099944494 +2025-02-27 06:11:33,804 Train Loss: 4.8346056, Val Loss: 5.7902561 +2025-02-27 06:11:33,804 Epoch 1604/2000 +2025-02-27 06:11:48,806 Current Learning Rate: 0.0099901336 +2025-02-27 06:11:48,806 Train Loss: 6.9504785, Val Loss: 3.9655451 +2025-02-27 06:11:48,806 Epoch 1605/2000 +2025-02-27 06:12:04,070 Current Learning Rate: 0.0099845867 +2025-02-27 06:12:04,071 Train Loss: 5.5754421, Val Loss: 2.7299030 +2025-02-27 06:12:04,071 Epoch 1606/2000 +2025-02-27 06:12:19,633 Current Learning Rate: 0.0099778098 +2025-02-27 06:12:19,634 Train Loss: 5.2980958, Val Loss: 21.3026922 +2025-02-27 06:12:19,634 Epoch 1607/2000 +2025-02-27 06:12:35,701 Current Learning Rate: 0.0099698048 +2025-02-27 06:12:35,701 Train Loss: 10.0718699, Val Loss: 6.5054109 +2025-02-27 06:12:35,701 Epoch 1608/2000 +2025-02-27 06:12:51,731 Current Learning Rate: 0.0099605735 +2025-02-27 06:12:51,731 Train Loss: 6.4153472, Val Loss: 33.1083252 +2025-02-27 06:12:51,732 Epoch 1609/2000 +2025-02-27 06:13:07,596 Current Learning Rate: 0.0099501183 +2025-02-27 06:13:07,597 Train Loss: 9.8395744, Val Loss: 2.3644262 +2025-02-27 06:13:07,597 Epoch 1610/2000 +2025-02-27 06:13:23,359 Current Learning Rate: 0.0099384417 +2025-02-27 06:13:23,359 Train Loss: 10.7319278, Val Loss: 4.0028460 +2025-02-27 06:13:23,360 Epoch 1611/2000 +2025-02-27 06:13:39,165 Current Learning Rate: 0.0099255466 +2025-02-27 06:13:39,166 Train Loss: 17.1411602, Val Loss: 43.3180018 +2025-02-27 06:13:39,166 Epoch 1612/2000 +2025-02-27 06:13:54,886 Current Learning Rate: 0.0099114363 +2025-02-27 06:13:54,887 Train Loss: 9.4950038, Val Loss: 4.3134264 +2025-02-27 06:13:54,887 Epoch 1613/2000 +2025-02-27 06:14:12,289 Current Learning Rate: 0.0098961141 +2025-02-27 06:14:12,290 Train Loss: 4.9283999, Val Loss: 38.1184807 +2025-02-27 06:14:12,290 Epoch 1614/2000 +2025-02-27 06:14:29,106 Current Learning Rate: 0.0098795838 +2025-02-27 06:14:29,107 Train Loss: 7.7102056, Val Loss: 2.2469727 +2025-02-27 06:14:29,107 Epoch 1615/2000 +2025-02-27 06:14:46,413 Current Learning Rate: 0.0098618496 +2025-02-27 06:14:46,413 Train Loss: 7.5240896, Val Loss: 4.2772352 +2025-02-27 06:14:46,414 Epoch 1616/2000 +2025-02-27 06:15:04,498 Current Learning Rate: 0.0098429158 +2025-02-27 06:15:04,499 Train Loss: 4.4668308, Val Loss: 2.2028988 +2025-02-27 06:15:04,499 Epoch 1617/2000 +2025-02-27 06:15:23,194 Current Learning Rate: 0.0098227871 +2025-02-27 06:15:23,195 Train Loss: 2798661574.6840606, Val Loss: 4213992328309.0996094 +2025-02-27 06:15:23,195 Epoch 1618/2000 +2025-02-27 06:15:40,634 Current Learning Rate: 0.0098014684 +2025-02-27 06:15:40,634 Train Loss: 18368886001.7713661, Val Loss: 18392186.8613139 +2025-02-27 06:15:40,635 Epoch 1619/2000 +2025-02-27 06:15:56,501 Current Learning Rate: 0.0097789651 +2025-02-27 06:15:56,501 Train Loss: 10695366.2037872, Val Loss: 6485622.6763990 +2025-02-27 06:15:56,502 Epoch 1620/2000 +2025-02-27 06:16:13,342 Current Learning Rate: 0.0097552826 +2025-02-27 06:16:13,343 Train Loss: 6138753.5918245, Val Loss: 4806592.0194647 +2025-02-27 06:16:13,343 Epoch 1621/2000 +2025-02-27 06:16:29,911 Current Learning Rate: 0.0097304268 +2025-02-27 06:16:29,911 Train Loss: 4808494.5155796, Val Loss: 3911183.4063260 +2025-02-27 06:16:29,911 Epoch 1622/2000 +2025-02-27 06:16:45,444 Current Learning Rate: 0.0097044038 +2025-02-27 06:16:45,445 Train Loss: 3980187.2597936, Val Loss: 3279607.8345499 +2025-02-27 06:16:45,445 Epoch 1623/2000 +2025-02-27 06:17:00,800 Current Learning Rate: 0.0096772202 +2025-02-27 06:17:00,800 Train Loss: 3378883.6789901, Val Loss: 2811115.3284672 +2025-02-27 06:17:00,801 Epoch 1624/2000 +2025-02-27 06:17:17,169 Current Learning Rate: 0.0096488824 +2025-02-27 06:17:17,170 Train Loss: 2968495.6817954, Val Loss: 2494161.8004866 +2025-02-27 06:17:17,170 Epoch 1625/2000 +2025-02-27 06:17:32,752 Current Learning Rate: 0.0096193977 +2025-02-27 06:17:32,753 Train Loss: 2643513.5717864, Val Loss: 2232785.4501217 +2025-02-27 06:17:32,753 Epoch 1626/2000 +2025-02-27 06:17:48,719 Current Learning Rate: 0.0095887731 +2025-02-27 06:17:48,720 Train Loss: 2389550.1973750, Val Loss: 2022227.9562044 +2025-02-27 06:17:48,720 Epoch 1627/2000 +2025-02-27 06:18:04,197 Current Learning Rate: 0.0095570164 +2025-02-27 06:18:04,197 Train Loss: 2167244.6348061, Val Loss: 1850355.8637470 +2025-02-27 06:18:04,198 Epoch 1628/2000 +2025-02-27 06:18:20,674 Current Learning Rate: 0.0095241353 +2025-02-27 06:18:20,675 Train Loss: 1991033.8803727, Val Loss: 1699526.4233577 +2025-02-27 06:18:20,675 Epoch 1629/2000 +2025-02-27 06:18:37,277 Current Learning Rate: 0.0094901379 +2025-02-27 06:18:37,277 Train Loss: 1836981.2102996, Val Loss: 1574836.1070560 +2025-02-27 06:18:37,277 Epoch 1630/2000 +2025-02-27 06:18:53,390 Current Learning Rate: 0.0094550326 +2025-02-27 06:18:53,391 Train Loss: 1707789.0231440, Val Loss: 1465185.4257908 +2025-02-27 06:18:53,391 Epoch 1631/2000 +2025-02-27 06:19:09,950 Current Learning Rate: 0.0094188282 +2025-02-27 06:19:09,953 Train Loss: 1592963.6349063, Val Loss: 1363664.4038929 +2025-02-27 06:19:09,953 Epoch 1632/2000 +2025-02-27 06:19:25,634 Current Learning Rate: 0.0093815334 +2025-02-27 06:19:25,635 Train Loss: 1490364.6207795, Val Loss: 1276078.2481752 +2025-02-27 06:19:25,635 Epoch 1633/2000 +2025-02-27 06:19:41,043 Current Learning Rate: 0.0093431576 +2025-02-27 06:19:41,043 Train Loss: 1397988.9950907, Val Loss: 1197459.1727494 +2025-02-27 06:19:41,043 Epoch 1634/2000 +2025-02-27 06:19:56,036 Current Learning Rate: 0.0093037101 +2025-02-27 06:19:56,036 Train Loss: 1317809.1093077, Val Loss: 1131371.0705596 +2025-02-27 06:19:56,036 Epoch 1635/2000 +2025-02-27 06:20:12,572 Current Learning Rate: 0.0092632008 +2025-02-27 06:20:12,573 Train Loss: 1245387.4942391, Val Loss: 1066487.8832117 +2025-02-27 06:20:12,573 Epoch 1636/2000 +2025-02-27 06:20:29,139 Current Learning Rate: 0.0092216396 +2025-02-27 06:20:29,140 Train Loss: 1179526.0715359, Val Loss: 1009032.3844282 +2025-02-27 06:20:29,140 Epoch 1637/2000 +2025-02-27 06:20:44,829 Current Learning Rate: 0.0091790368 +2025-02-27 06:20:44,829 Train Loss: 1120235.2289350, Val Loss: 958074.0754258 +2025-02-27 06:20:44,829 Epoch 1638/2000 +2025-02-27 06:21:01,013 Current Learning Rate: 0.0091354029 +2025-02-27 06:21:01,013 Train Loss: 1066124.9153391, Val Loss: 911735.0364964 +2025-02-27 06:21:01,013 Epoch 1639/2000 +2025-02-27 06:21:17,255 Current Learning Rate: 0.0090907486 +2025-02-27 06:21:17,256 Train Loss: 1017638.5863140, Val Loss: 869490.6326034 +2025-02-27 06:21:17,256 Epoch 1640/2000 +2025-02-27 06:21:34,390 Current Learning Rate: 0.0090450850 +2025-02-27 06:21:34,390 Train Loss: 969071.0740407, Val Loss: 830017.2992701 +2025-02-27 06:21:34,390 Epoch 1641/2000 +2025-02-27 06:21:52,434 Current Learning Rate: 0.0089984233 +2025-02-27 06:21:52,435 Train Loss: 927138.2767258, Val Loss: 792133.0291971 +2025-02-27 06:21:52,435 Epoch 1642/2000 +2025-02-27 06:22:10,018 Current Learning Rate: 0.0089507751 +2025-02-27 06:22:10,019 Train Loss: 890378.4700932, Val Loss: 758275.4136253 +2025-02-27 06:22:10,019 Epoch 1643/2000 +2025-02-27 06:22:27,705 Current Learning Rate: 0.0089021520 +2025-02-27 06:22:27,706 Train Loss: 854493.9825669, Val Loss: 729027.2019465 +2025-02-27 06:22:27,706 Epoch 1644/2000 +2025-02-27 06:22:45,409 Current Learning Rate: 0.0088525662 +2025-02-27 06:22:45,410 Train Loss: 820069.6523394, Val Loss: 697587.6277372 +2025-02-27 06:22:45,410 Epoch 1645/2000 +2025-02-27 06:23:02,594 Current Learning Rate: 0.0088020298 +2025-02-27 06:23:02,595 Train Loss: 787124.3913436, Val Loss: 671168.1751825 +2025-02-27 06:23:02,595 Epoch 1646/2000 +2025-02-27 06:23:19,820 Current Learning Rate: 0.0087505553 +2025-02-27 06:23:19,821 Train Loss: 755844.4434425, Val Loss: 641213.2725061 +2025-02-27 06:23:19,821 Epoch 1647/2000 +2025-02-27 06:23:36,019 Current Learning Rate: 0.0086981555 +2025-02-27 06:23:36,020 Train Loss: 726268.9640317, Val Loss: 617300.0364964 +2025-02-27 06:23:36,021 Epoch 1648/2000 +2025-02-27 06:23:52,602 Current Learning Rate: 0.0086448431 +2025-02-27 06:23:52,603 Train Loss: 698399.3146979, Val Loss: 593088.0778589 +2025-02-27 06:23:52,603 Epoch 1649/2000 +2025-02-27 06:24:09,301 Current Learning Rate: 0.0085906315 +2025-02-27 06:24:09,301 Train Loss: 673483.0898708, Val Loss: 573996.6545012 +2025-02-27 06:24:09,302 Epoch 1650/2000 +2025-02-27 06:24:25,722 Current Learning Rate: 0.0085355339 +2025-02-27 06:24:25,723 Train Loss: 650944.2861437, Val Loss: 551846.9586375 +2025-02-27 06:24:25,723 Epoch 1651/2000 +2025-02-27 06:24:42,170 Current Learning Rate: 0.0084795640 +2025-02-27 06:24:42,171 Train Loss: 626282.2432622, Val Loss: 532832.9805353 +2025-02-27 06:24:42,171 Epoch 1652/2000 +2025-02-27 06:24:58,700 Current Learning Rate: 0.0084227355 +2025-02-27 06:24:58,700 Train Loss: 607044.2049895, Val Loss: 514579.6836983 +2025-02-27 06:24:58,701 Epoch 1653/2000 +2025-02-27 06:25:15,167 Current Learning Rate: 0.0083650626 +2025-02-27 06:25:15,167 Train Loss: 584830.6091574, Val Loss: 497181.2287105 +2025-02-27 06:25:15,167 Epoch 1654/2000 +2025-02-27 06:25:31,932 Current Learning Rate: 0.0083065593 +2025-02-27 06:25:31,932 Train Loss: 564833.9475003, Val Loss: 481119.0936740 +2025-02-27 06:25:31,933 Epoch 1655/2000 +2025-02-27 06:25:49,730 Current Learning Rate: 0.0082472402 +2025-02-27 06:25:49,731 Train Loss: 548754.3677988, Val Loss: 475027.9379562 +2025-02-27 06:25:49,731 Epoch 1656/2000 +2025-02-27 06:26:07,844 Current Learning Rate: 0.0081871199 +2025-02-27 06:26:07,845 Train Loss: 530684.5501453, Val Loss: 447813.7287105 +2025-02-27 06:26:07,845 Epoch 1657/2000 +2025-02-27 06:26:25,837 Current Learning Rate: 0.0081262133 +2025-02-27 06:26:25,838 Train Loss: 514066.0524997, Val Loss: 435367.4026764 +2025-02-27 06:26:25,838 Epoch 1658/2000 +2025-02-27 06:26:44,166 Current Learning Rate: 0.0080645353 +2025-02-27 06:26:44,166 Train Loss: 495677.1751328, Val Loss: 422518.3819951 +2025-02-27 06:26:44,166 Epoch 1659/2000 +2025-02-27 06:27:02,534 Current Learning Rate: 0.0080021011 +2025-02-27 06:27:02,535 Train Loss: 480713.9850716, Val Loss: 407736.9464720 +2025-02-27 06:27:02,536 Epoch 1660/2000 +2025-02-27 06:27:21,581 Current Learning Rate: 0.0079389263 +2025-02-27 06:27:21,582 Train Loss: 466590.2695121, Val Loss: 398407.7737226 +2025-02-27 06:27:21,583 Epoch 1661/2000 +2025-02-27 06:27:40,101 Current Learning Rate: 0.0078750263 +2025-02-27 06:27:40,102 Train Loss: 454021.6786895, Val Loss: 383226.7031630 +2025-02-27 06:27:40,102 Epoch 1662/2000 +2025-02-27 06:27:58,406 Current Learning Rate: 0.0078104169 +2025-02-27 06:27:58,407 Train Loss: 440474.7274822, Val Loss: 375284.3309002 +2025-02-27 06:27:58,407 Epoch 1663/2000 +2025-02-27 06:28:15,774 Current Learning Rate: 0.0077451141 +2025-02-27 06:28:15,774 Train Loss: 426641.3255185, Val Loss: 362302.4756691 +2025-02-27 06:28:15,774 Epoch 1664/2000 +2025-02-27 06:28:31,306 Current Learning Rate: 0.0076791340 +2025-02-27 06:28:31,306 Train Loss: 414447.3664963, Val Loss: 350557.6399027 +2025-02-27 06:28:31,306 Epoch 1665/2000 +2025-02-27 06:28:48,617 Current Learning Rate: 0.0076124928 +2025-02-27 06:28:48,618 Train Loss: 402835.1292456, Val Loss: 342746.7639903 +2025-02-27 06:28:48,618 Epoch 1666/2000 +2025-02-27 06:29:04,459 Current Learning Rate: 0.0075452071 +2025-02-27 06:29:04,460 Train Loss: 392430.5861136, Val Loss: 329395.7664234 +2025-02-27 06:29:04,460 Epoch 1667/2000 +2025-02-27 06:29:21,330 Current Learning Rate: 0.0074772933 +2025-02-27 06:29:21,331 Train Loss: 380500.0075143, Val Loss: 323212.4148418 +2025-02-27 06:29:21,331 Epoch 1668/2000 +2025-02-27 06:29:36,743 Current Learning Rate: 0.0074087684 +2025-02-27 06:29:36,744 Train Loss: 371567.9445947, Val Loss: 310794.8540146 +2025-02-27 06:29:36,744 Epoch 1669/2000 +2025-02-27 06:29:52,476 Current Learning Rate: 0.0073396491 +2025-02-27 06:29:52,477 Train Loss: 359687.3429516, Val Loss: 303181.7214112 +2025-02-27 06:29:52,477 Epoch 1670/2000 +2025-02-27 06:30:08,779 Current Learning Rate: 0.0072699525 +2025-02-27 06:30:08,780 Train Loss: 350167.5693818, Val Loss: 294650.6995134 +2025-02-27 06:30:08,781 Epoch 1671/2000 +2025-02-27 06:30:25,160 Current Learning Rate: 0.0071996958 +2025-02-27 06:30:25,161 Train Loss: 340730.7328925, Val Loss: 285704.8905109 +2025-02-27 06:30:25,161 Epoch 1672/2000 +2025-02-27 06:30:40,991 Current Learning Rate: 0.0071288965 +2025-02-27 06:30:40,992 Train Loss: 331261.0710350, Val Loss: 277872.0620438 +2025-02-27 06:30:40,992 Epoch 1673/2000 +2025-02-27 06:30:56,897 Current Learning Rate: 0.0070575718 +2025-02-27 06:30:56,897 Train Loss: 321660.4418395, Val Loss: 270575.3892944 +2025-02-27 06:30:56,898 Epoch 1674/2000 +2025-02-27 06:31:13,469 Current Learning Rate: 0.0069857395 +2025-02-27 06:31:13,469 Train Loss: 313669.0952810, Val Loss: 262338.3637470 +2025-02-27 06:31:13,470 Epoch 1675/2000 +2025-02-27 06:31:29,728 Current Learning Rate: 0.0069134172 +2025-02-27 06:31:29,728 Train Loss: 305325.2134055, Val Loss: 256364.3217762 +2025-02-27 06:31:29,729 Epoch 1676/2000 +2025-02-27 06:31:46,665 Current Learning Rate: 0.0068406228 +2025-02-27 06:31:46,666 Train Loss: 296745.9362789, Val Loss: 249372.7007299 +2025-02-27 06:31:46,666 Epoch 1677/2000 +2025-02-27 06:32:03,972 Current Learning Rate: 0.0067673742 +2025-02-27 06:32:03,973 Train Loss: 289319.3693017, Val Loss: 244011.5115572 +2025-02-27 06:32:03,973 Epoch 1678/2000 +2025-02-27 06:32:20,062 Current Learning Rate: 0.0066936896 +2025-02-27 06:32:20,063 Train Loss: 281858.6103597, Val Loss: 235283.8077859 +2025-02-27 06:32:20,064 Epoch 1679/2000 +2025-02-27 06:32:36,782 Current Learning Rate: 0.0066195871 +2025-02-27 06:32:36,782 Train Loss: 274052.1513375, Val Loss: 228530.8485401 +2025-02-27 06:32:36,784 Epoch 1680/2000 +2025-02-27 06:32:54,336 Current Learning Rate: 0.0065450850 +2025-02-27 06:32:54,337 Train Loss: 267717.2490231, Val Loss: 224303.3029197 +2025-02-27 06:32:54,337 Epoch 1681/2000 +2025-02-27 06:33:12,076 Current Learning Rate: 0.0064702016 +2025-02-27 06:33:12,076 Train Loss: 260621.2691614, Val Loss: 217806.4111922 +2025-02-27 06:33:12,076 Epoch 1682/2000 +2025-02-27 06:33:30,224 Current Learning Rate: 0.0063949555 +2025-02-27 06:33:30,225 Train Loss: 253837.7387035, Val Loss: 210368.1690998 +2025-02-27 06:33:30,225 Epoch 1683/2000 +2025-02-27 06:33:49,247 Current Learning Rate: 0.0063193652 +2025-02-27 06:33:49,248 Train Loss: 246750.3769662, Val Loss: 205027.7737226 +2025-02-27 06:33:49,248 Epoch 1684/2000 +2025-02-27 06:34:10,046 Current Learning Rate: 0.0062434494 +2025-02-27 06:34:10,047 Train Loss: 240906.1852520, Val Loss: 200578.6496350 +2025-02-27 06:34:10,047 Epoch 1685/2000 +2025-02-27 06:34:26,080 Current Learning Rate: 0.0061672268 +2025-02-27 06:34:26,080 Train Loss: 235437.2850917, Val Loss: 198422.1046229 +2025-02-27 06:34:26,081 Epoch 1686/2000 +2025-02-27 06:34:44,190 Current Learning Rate: 0.0060907162 +2025-02-27 06:34:44,191 Train Loss: 230042.5551047, Val Loss: 190754.6897810 +2025-02-27 06:34:44,191 Epoch 1687/2000 +2025-02-27 06:35:00,779 Current Learning Rate: 0.0060139365 +2025-02-27 06:35:00,779 Train Loss: 224053.8488127, Val Loss: 188011.9860097 +2025-02-27 06:35:00,780 Epoch 1688/2000 +2025-02-27 06:35:17,875 Current Learning Rate: 0.0059369066 +2025-02-27 06:35:17,875 Train Loss: 217574.8960525, Val Loss: 179262.8649635 +2025-02-27 06:35:17,875 Epoch 1689/2000 +2025-02-27 06:35:33,773 Current Learning Rate: 0.0058596455 +2025-02-27 06:35:33,774 Train Loss: 212272.8909929, Val Loss: 174717.4422141 +2025-02-27 06:35:33,774 Epoch 1690/2000 +2025-02-27 06:35:49,411 Current Learning Rate: 0.0057821723 +2025-02-27 06:35:49,412 Train Loss: 206710.2855425, Val Loss: 171647.2323601 +2025-02-27 06:35:49,412 Epoch 1691/2000 +2025-02-27 06:36:05,014 Current Learning Rate: 0.0057045062 +2025-02-27 06:36:05,015 Train Loss: 203775.9435427, Val Loss: 167827.4026764 +2025-02-27 06:36:05,016 Epoch 1692/2000 +2025-02-27 06:36:20,746 Current Learning Rate: 0.0056266662 +2025-02-27 06:36:20,746 Train Loss: 197499.8331830, Val Loss: 162023.9172749 +2025-02-27 06:36:20,746 Epoch 1693/2000 +2025-02-27 06:36:35,798 Current Learning Rate: 0.0055486716 +2025-02-27 06:36:35,799 Train Loss: 194087.7930568, Val Loss: 158239.2214112 +2025-02-27 06:36:35,799 Epoch 1694/2000 +2025-02-27 06:36:51,906 Current Learning Rate: 0.0054705416 +2025-02-27 06:36:51,907 Train Loss: 188187.7963130, Val Loss: 155058.8047445 +2025-02-27 06:36:51,907 Epoch 1695/2000 +2025-02-27 06:37:08,201 Current Learning Rate: 0.0053922955 +2025-02-27 06:37:08,202 Train Loss: 182731.9461978, Val Loss: 155327.2871046 +2025-02-27 06:37:08,203 Epoch 1696/2000 +2025-02-27 06:37:23,620 Current Learning Rate: 0.0053139526 +2025-02-27 06:37:23,620 Train Loss: 181345.9871255, Val Loss: 147835.3832117 +2025-02-27 06:37:23,621 Epoch 1697/2000 +2025-02-27 06:37:39,448 Current Learning Rate: 0.0052355323 +2025-02-27 06:37:39,448 Train Loss: 174983.7398557, Val Loss: 143799.8053528 +2025-02-27 06:37:39,449 Epoch 1698/2000 +2025-02-27 06:37:55,363 Current Learning Rate: 0.0051570538 +2025-02-27 06:37:55,364 Train Loss: 172357.5653742, Val Loss: 139677.7068127 +2025-02-27 06:37:55,364 Epoch 1699/2000 +2025-02-27 06:38:10,873 Current Learning Rate: 0.0050785366 +2025-02-27 06:38:10,873 Train Loss: 166830.9220018, Val Loss: 136592.2141119 +2025-02-27 06:38:10,874 Epoch 1700/2000 +2025-02-27 06:38:27,756 Current Learning Rate: 0.0050000000 +2025-02-27 06:38:27,756 Train Loss: 164942.3840297, Val Loss: 132733.5310219 +2025-02-27 06:38:27,757 Epoch 1701/2000 +2025-02-27 06:38:43,195 Current Learning Rate: 0.0049214634 +2025-02-27 06:38:43,195 Train Loss: 158427.6510370, Val Loss: 131268.4336983 +2025-02-27 06:38:43,196 Epoch 1702/2000 +2025-02-27 06:38:59,119 Current Learning Rate: 0.0048429462 +2025-02-27 06:38:59,119 Train Loss: 156295.9941389, Val Loss: 126815.4638078 +2025-02-27 06:38:59,120 Epoch 1703/2000 +2025-02-27 06:39:14,321 Current Learning Rate: 0.0047644677 +2025-02-27 06:39:14,322 Train Loss: 152344.7405070, Val Loss: 127574.3430657 +2025-02-27 06:39:14,322 Epoch 1704/2000 +2025-02-27 06:39:29,581 Current Learning Rate: 0.0046860474 +2025-02-27 06:39:29,582 Train Loss: 151789.0652239, Val Loss: 139541.4020681 +2025-02-27 06:39:29,582 Epoch 1705/2000 +2025-02-27 06:39:45,673 Current Learning Rate: 0.0046077045 +2025-02-27 06:39:45,674 Train Loss: 150661.9196473, Val Loss: 121783.1158759 +2025-02-27 06:39:45,674 Epoch 1706/2000 +2025-02-27 06:40:01,682 Current Learning Rate: 0.0045294584 +2025-02-27 06:40:01,682 Train Loss: 145474.4181445, Val Loss: 116739.4099757 +2025-02-27 06:40:01,683 Epoch 1707/2000 +2025-02-27 06:40:18,318 Current Learning Rate: 0.0044513284 +2025-02-27 06:40:18,319 Train Loss: 145594.2881475, Val Loss: 117193.5629562 +2025-02-27 06:40:18,319 Epoch 1708/2000 +2025-02-27 06:40:33,938 Current Learning Rate: 0.0043733338 +2025-02-27 06:40:33,939 Train Loss: 141799.4672378, Val Loss: 122955.1125304 +2025-02-27 06:40:33,939 Epoch 1709/2000 +2025-02-27 06:40:50,624 Current Learning Rate: 0.0042954938 +2025-02-27 06:40:50,625 Train Loss: 135865.4640066, Val Loss: 111173.7712895 +2025-02-27 06:40:50,625 Epoch 1710/2000 +2025-02-27 06:41:06,431 Current Learning Rate: 0.0042178277 +2025-02-27 06:41:06,432 Train Loss: 138100.0916742, Val Loss: 105280.7390511 +2025-02-27 06:41:06,432 Epoch 1711/2000 +2025-02-27 06:41:22,396 Current Learning Rate: 0.0041403545 +2025-02-27 06:41:22,397 Train Loss: 131708.2397806, Val Loss: 132007.0863747 +2025-02-27 06:41:22,397 Epoch 1712/2000 +2025-02-27 06:41:38,132 Current Learning Rate: 0.0040630934 +2025-02-27 06:41:38,133 Train Loss: 137873.4004609, Val Loss: 116239.3461071 +2025-02-27 06:41:38,133 Epoch 1713/2000 +2025-02-27 06:41:55,231 Current Learning Rate: 0.0039860635 +2025-02-27 06:41:55,232 Train Loss: 133234.6539675, Val Loss: 98277.4437348 +2025-02-27 06:41:55,232 Epoch 1714/2000 +2025-02-27 06:42:11,885 Current Learning Rate: 0.0039092838 +2025-02-27 06:42:11,886 Train Loss: 125096.9193969, Val Loss: 104334.6684915 +2025-02-27 06:42:11,886 Epoch 1715/2000 +2025-02-27 06:42:28,120 Current Learning Rate: 0.0038327732 +2025-02-27 06:42:28,121 Train Loss: 124725.6000150, Val Loss: 98411.4370438 +2025-02-27 06:42:28,121 Epoch 1716/2000 +2025-02-27 06:42:44,812 Current Learning Rate: 0.0037565506 +2025-02-27 06:42:44,813 Train Loss: 127907.3566025, Val Loss: 116392.4163625 +2025-02-27 06:42:44,813 Epoch 1717/2000 +2025-02-27 06:43:01,228 Current Learning Rate: 0.0036806348 +2025-02-27 06:43:01,241 Train Loss: 118029.3012975, Val Loss: 104200.7877129 +2025-02-27 06:43:01,241 Epoch 1718/2000 +2025-02-27 06:43:18,148 Current Learning Rate: 0.0036050445 +2025-02-27 06:43:18,149 Train Loss: 116753.2712153, Val Loss: 88834.1043187 +2025-02-27 06:43:18,149 Epoch 1719/2000 +2025-02-27 06:43:33,898 Current Learning Rate: 0.0035297984 +2025-02-27 06:43:33,898 Train Loss: 122766.2387286, Val Loss: 88073.8214720 +2025-02-27 06:43:33,898 Epoch 1720/2000 +2025-02-27 06:43:50,142 Current Learning Rate: 0.0034549150 +2025-02-27 06:43:50,143 Train Loss: 107123.2108506, Val Loss: 84357.1745742 +2025-02-27 06:43:50,143 Epoch 1721/2000 +2025-02-27 06:44:06,557 Current Learning Rate: 0.0033804129 +2025-02-27 06:44:06,558 Train Loss: 117838.2528053, Val Loss: 91368.3561436 +2025-02-27 06:44:06,558 Epoch 1722/2000 +2025-02-27 06:44:23,016 Current Learning Rate: 0.0033063104 +2025-02-27 06:44:23,017 Train Loss: 117783.2962629, Val Loss: 90060.3026156 +2025-02-27 06:44:23,017 Epoch 1723/2000 +2025-02-27 06:44:38,915 Current Learning Rate: 0.0032326258 +2025-02-27 06:44:38,916 Train Loss: 100891.2744214, Val Loss: 79990.2113747 +2025-02-27 06:44:38,916 Epoch 1724/2000 +2025-02-27 06:44:55,369 Current Learning Rate: 0.0031593772 +2025-02-27 06:44:55,370 Train Loss: 103525.8717814, Val Loss: 113234.6624088 +2025-02-27 06:44:55,370 Epoch 1725/2000 +2025-02-27 06:45:12,623 Current Learning Rate: 0.0030865828 +2025-02-27 06:45:12,623 Train Loss: 99935.6668921, Val Loss: 82899.1788321 +2025-02-27 06:45:12,623 Epoch 1726/2000 +2025-02-27 06:45:30,551 Current Learning Rate: 0.0030142605 +2025-02-27 06:45:30,551 Train Loss: 101781.3376666, Val Loss: 77093.1767032 +2025-02-27 06:45:30,551 Epoch 1727/2000 +2025-02-27 06:45:48,725 Current Learning Rate: 0.0029424282 +2025-02-27 06:45:48,726 Train Loss: 93608.0375213, Val Loss: 73772.3631387 +2025-02-27 06:45:48,726 Epoch 1728/2000 +2025-02-27 06:46:05,617 Current Learning Rate: 0.0028711035 +2025-02-27 06:46:05,617 Train Loss: 99915.9146128, Val Loss: 72422.0818127 +2025-02-27 06:46:05,617 Epoch 1729/2000 +2025-02-27 06:46:22,952 Current Learning Rate: 0.0028003042 +2025-02-27 06:46:22,952 Train Loss: 89483.8438032, Val Loss: 73696.7183698 +2025-02-27 06:46:22,953 Epoch 1730/2000 +2025-02-27 06:46:40,570 Current Learning Rate: 0.0027300475 +2025-02-27 06:46:40,570 Train Loss: 88793.1837992, Val Loss: 68824.7551703 +2025-02-27 06:46:40,570 Epoch 1731/2000 +2025-02-27 06:46:57,292 Current Learning Rate: 0.0026603509 +2025-02-27 06:46:57,293 Train Loss: 95067.5444595, Val Loss: 87315.2706813 +2025-02-27 06:46:57,293 Epoch 1732/2000 +2025-02-27 06:47:13,122 Current Learning Rate: 0.0025912316 +2025-02-27 06:47:13,123 Train Loss: 90157.6640617, Val Loss: 67868.4914842 +2025-02-27 06:47:13,123 Epoch 1733/2000 +2025-02-27 06:47:28,325 Current Learning Rate: 0.0025227067 +2025-02-27 06:47:28,326 Train Loss: 84398.1383378, Val Loss: 65507.9774939 +2025-02-27 06:47:28,326 Epoch 1734/2000 +2025-02-27 06:47:43,289 Current Learning Rate: 0.0024547929 +2025-02-27 06:47:43,290 Train Loss: 87368.3666466, Val Loss: 85062.7357056 +2025-02-27 06:47:43,290 Epoch 1735/2000 +2025-02-27 06:47:57,982 Current Learning Rate: 0.0023875072 +2025-02-27 06:47:57,983 Train Loss: 84152.6759593, Val Loss: 62784.5369526 +2025-02-27 06:47:57,983 Epoch 1736/2000 +2025-02-27 06:48:13,416 Current Learning Rate: 0.0023208660 +2025-02-27 06:48:13,416 Train Loss: 80373.3905671, Val Loss: 66664.7019465 +2025-02-27 06:48:13,416 Epoch 1737/2000 +2025-02-27 06:48:28,079 Current Learning Rate: 0.0022548859 +2025-02-27 06:48:28,080 Train Loss: 80167.0361186, Val Loss: 60626.5799878 +2025-02-27 06:48:28,080 Epoch 1738/2000 +2025-02-27 06:48:42,804 Current Learning Rate: 0.0021895831 +2025-02-27 06:48:42,804 Train Loss: 81804.3860836, Val Loss: 60839.9847932 +2025-02-27 06:48:42,805 Epoch 1739/2000 +2025-02-27 06:48:58,222 Current Learning Rate: 0.0021249737 +2025-02-27 06:48:58,222 Train Loss: 76741.2679090, Val Loss: 61051.1732056 +2025-02-27 06:48:58,223 Epoch 1740/2000 +2025-02-27 06:49:14,416 Current Learning Rate: 0.0020610737 +2025-02-27 06:49:14,417 Train Loss: 73390.4149133, Val Loss: 59265.2851277 +2025-02-27 06:49:14,417 Epoch 1741/2000 +2025-02-27 06:49:30,173 Current Learning Rate: 0.0019978989 +2025-02-27 06:49:30,173 Train Loss: 74468.8772418, Val Loss: 65516.9221411 +2025-02-27 06:49:30,174 Epoch 1742/2000 +2025-02-27 06:49:46,038 Current Learning Rate: 0.0019354647 +2025-02-27 06:49:46,039 Train Loss: 77156.9059964, Val Loss: 57164.5468370 +2025-02-27 06:49:46,040 Epoch 1743/2000 +2025-02-27 06:50:01,577 Current Learning Rate: 0.0018737867 +2025-02-27 06:50:01,578 Train Loss: 74484.7388789, Val Loss: 56526.2302311 +2025-02-27 06:50:01,578 Epoch 1744/2000 +2025-02-27 06:50:17,175 Current Learning Rate: 0.0018128801 +2025-02-27 06:50:17,176 Train Loss: 69241.9939761, Val Loss: 54040.5869830 +2025-02-27 06:50:17,176 Epoch 1745/2000 +2025-02-27 06:50:33,743 Current Learning Rate: 0.0017527598 +2025-02-27 06:50:33,744 Train Loss: 68483.4168670, Val Loss: 53184.3559915 +2025-02-27 06:50:33,744 Epoch 1746/2000 +2025-02-27 06:50:49,881 Current Learning Rate: 0.0016934407 +2025-02-27 06:50:49,882 Train Loss: 68388.5236324, Val Loss: 52567.3638990 +2025-02-27 06:50:49,882 Epoch 1747/2000 +2025-02-27 06:51:05,339 Current Learning Rate: 0.0016349374 +2025-02-27 06:51:05,339 Train Loss: 65945.0123359, Val Loss: 52188.0793796 +2025-02-27 06:51:05,340 Epoch 1748/2000 +2025-02-27 06:51:21,174 Current Learning Rate: 0.0015772645 +2025-02-27 06:51:21,175 Train Loss: 66345.5796639, Val Loss: 51907.7836071 +2025-02-27 06:51:21,175 Epoch 1749/2000 +2025-02-27 06:51:36,228 Current Learning Rate: 0.0015204360 +2025-02-27 06:51:36,229 Train Loss: 64515.4115319, Val Loss: 50288.3774331 +2025-02-27 06:51:36,229 Epoch 1750/2000 +2025-02-27 06:51:52,106 Current Learning Rate: 0.0014644661 +2025-02-27 06:51:52,106 Train Loss: 63148.9562043, Val Loss: 49431.2028589 +2025-02-27 06:51:52,106 Epoch 1751/2000 +2025-02-27 06:52:07,527 Current Learning Rate: 0.0014093685 +2025-02-27 06:52:07,527 Train Loss: 62507.3648682, Val Loss: 49052.8299878 +2025-02-27 06:52:07,528 Epoch 1752/2000 +2025-02-27 06:52:22,447 Current Learning Rate: 0.0013551569 +2025-02-27 06:52:22,448 Train Loss: 62426.5227683, Val Loss: 48617.3631387 +2025-02-27 06:52:22,448 Epoch 1753/2000 +2025-02-27 06:52:37,414 Current Learning Rate: 0.0013018445 +2025-02-27 06:52:37,414 Train Loss: 61787.2798943, Val Loss: 47448.6982968 +2025-02-27 06:52:37,414 Epoch 1754/2000 +2025-02-27 06:52:51,218 Current Learning Rate: 0.0012494447 +2025-02-27 06:52:51,219 Train Loss: 60644.1237100, Val Loss: 49097.7562348 +2025-02-27 06:52:51,219 Epoch 1755/2000 +2025-02-27 06:53:06,735 Current Learning Rate: 0.0011979702 +2025-02-27 06:53:06,735 Train Loss: 59357.6361337, Val Loss: 46572.7486314 +2025-02-27 06:53:06,735 Epoch 1756/2000 +2025-02-27 06:53:22,968 Current Learning Rate: 0.0011474338 +2025-02-27 06:53:22,969 Train Loss: 59093.8150611, Val Loss: 45686.2644465 +2025-02-27 06:53:22,969 Epoch 1757/2000 +2025-02-27 06:53:38,557 Current Learning Rate: 0.0010978480 +2025-02-27 06:53:38,558 Train Loss: 59258.4595106, Val Loss: 46188.4899635 +2025-02-27 06:53:38,558 Epoch 1758/2000 +2025-02-27 06:53:53,804 Current Learning Rate: 0.0010492249 +2025-02-27 06:53:53,804 Train Loss: 57383.5695321, Val Loss: 44742.0339112 +2025-02-27 06:53:53,804 Epoch 1759/2000 +2025-02-27 06:54:09,823 Current Learning Rate: 0.0010015767 +2025-02-27 06:54:09,824 Train Loss: 57394.3827021, Val Loss: 44817.5752737 +2025-02-27 06:54:09,824 Epoch 1760/2000 +2025-02-27 06:54:25,786 Current Learning Rate: 0.0009549150 +2025-02-27 06:54:25,786 Train Loss: 55963.1628219, Val Loss: 43846.0120134 +2025-02-27 06:54:25,786 Epoch 1761/2000 +2025-02-27 06:54:42,035 Current Learning Rate: 0.0009092514 +2025-02-27 06:54:42,035 Train Loss: 55555.5102194, Val Loss: 43110.5291971 +2025-02-27 06:54:42,036 Epoch 1762/2000 +2025-02-27 06:54:57,170 Current Learning Rate: 0.0008645971 +2025-02-27 06:54:57,171 Train Loss: 54919.2174507, Val Loss: 42517.1555657 +2025-02-27 06:54:57,171 Epoch 1763/2000 +2025-02-27 06:55:11,717 Current Learning Rate: 0.0008209632 +2025-02-27 06:55:11,717 Train Loss: 54006.7414963, Val Loss: 42061.4933090 +2025-02-27 06:55:11,718 Epoch 1764/2000 +2025-02-27 06:55:26,738 Current Learning Rate: 0.0007783604 +2025-02-27 06:55:26,738 Train Loss: 53509.5828950, Val Loss: 41667.6718370 +2025-02-27 06:55:26,738 Epoch 1765/2000 +2025-02-27 06:55:41,594 Current Learning Rate: 0.0007367992 +2025-02-27 06:55:41,594 Train Loss: 52878.9174431, Val Loss: 41877.5357360 +2025-02-27 06:55:41,594 Epoch 1766/2000 +2025-02-27 06:55:56,475 Current Learning Rate: 0.0006962899 +2025-02-27 06:55:56,476 Train Loss: 52635.3498522, Val Loss: 43615.5596107 +2025-02-27 06:55:56,476 Epoch 1767/2000 +2025-02-27 06:56:11,566 Current Learning Rate: 0.0006568424 +2025-02-27 06:56:11,566 Train Loss: 52439.7244765, Val Loss: 40765.6683394 +2025-02-27 06:56:11,567 Epoch 1768/2000 +2025-02-27 06:56:26,292 Current Learning Rate: 0.0006184666 +2025-02-27 06:56:26,292 Train Loss: 51455.0877292, Val Loss: 40235.7382908 +2025-02-27 06:56:26,292 Epoch 1769/2000 +2025-02-27 06:56:42,037 Current Learning Rate: 0.0005811718 +2025-02-27 06:56:42,038 Train Loss: 50993.1456016, Val Loss: 39872.3479319 +2025-02-27 06:56:42,038 Epoch 1770/2000 +2025-02-27 06:56:57,505 Current Learning Rate: 0.0005449674 +2025-02-27 06:56:57,506 Train Loss: 50609.0413661, Val Loss: 39420.1003650 +2025-02-27 06:56:57,506 Epoch 1771/2000 +2025-02-27 06:57:12,484 Current Learning Rate: 0.0005098621 +2025-02-27 06:57:12,485 Train Loss: 50374.0644099, Val Loss: 39122.6178528 +2025-02-27 06:57:12,485 Epoch 1772/2000 +2025-02-27 06:57:27,673 Current Learning Rate: 0.0004758647 +2025-02-27 06:57:27,673 Train Loss: 49969.4326095, Val Loss: 38977.7531934 +2025-02-27 06:57:27,673 Epoch 1773/2000 +2025-02-27 06:57:43,238 Current Learning Rate: 0.0004429836 +2025-02-27 06:57:43,239 Train Loss: 49478.1765980, Val Loss: 38634.8479319 +2025-02-27 06:57:43,239 Epoch 1774/2000 +2025-02-27 06:57:58,100 Current Learning Rate: 0.0004112269 +2025-02-27 06:57:58,101 Train Loss: 49199.5803902, Val Loss: 38404.6129866 +2025-02-27 06:57:58,101 Epoch 1775/2000 +2025-02-27 06:58:13,213 Current Learning Rate: 0.0003806023 +2025-02-27 06:58:13,213 Train Loss: 49042.3771416, Val Loss: 38089.6783759 +2025-02-27 06:58:13,213 Epoch 1776/2000 +2025-02-27 06:58:28,525 Current Learning Rate: 0.0003511176 +2025-02-27 06:58:28,525 Train Loss: 48614.5489555, Val Loss: 38172.2635341 +2025-02-27 06:58:28,526 Epoch 1777/2000 +2025-02-27 06:58:43,642 Current Learning Rate: 0.0003227798 +2025-02-27 06:58:43,642 Train Loss: 48167.7564873, Val Loss: 37567.8064173 +2025-02-27 06:58:43,643 Epoch 1778/2000 +2025-02-27 06:58:58,689 Current Learning Rate: 0.0002955962 +2025-02-27 06:58:58,690 Train Loss: 47937.6015680, Val Loss: 37330.0821168 +2025-02-27 06:58:58,690 Epoch 1779/2000 +2025-02-27 06:59:15,183 Current Learning Rate: 0.0002695732 +2025-02-27 06:59:15,184 Train Loss: 47596.4634055, Val Loss: 37191.4317214 +2025-02-27 06:59:15,184 Epoch 1780/2000 +2025-02-27 06:59:30,345 Current Learning Rate: 0.0002447174 +2025-02-27 06:59:30,346 Train Loss: 47690.7437256, Val Loss: 36970.7740268 +2025-02-27 06:59:30,346 Epoch 1781/2000 +2025-02-27 06:59:46,016 Current Learning Rate: 0.0002210349 +2025-02-27 06:59:46,016 Train Loss: 47251.7316025, Val Loss: 36851.7434611 +2025-02-27 06:59:46,017 Epoch 1782/2000 +2025-02-27 07:00:01,231 Current Learning Rate: 0.0001985316 +2025-02-27 07:00:01,232 Train Loss: 46962.1854023, Val Loss: 36629.1963200 +2025-02-27 07:00:01,232 Epoch 1783/2000 +2025-02-27 07:00:17,298 Current Learning Rate: 0.0001772129 +2025-02-27 07:00:17,298 Train Loss: 46840.0563571, Val Loss: 36528.8982664 +2025-02-27 07:00:17,299 Epoch 1784/2000 +2025-02-27 07:00:32,540 Current Learning Rate: 0.0001570842 +2025-02-27 07:00:32,540 Train Loss: 46595.0283664, Val Loss: 36342.2483273 +2025-02-27 07:00:32,541 Epoch 1785/2000 +2025-02-27 07:00:48,161 Current Learning Rate: 0.0001381504 +2025-02-27 07:00:48,162 Train Loss: 46531.2149960, Val Loss: 36349.5688869 +2025-02-27 07:00:48,162 Epoch 1786/2000 +2025-02-27 07:01:04,256 Current Learning Rate: 0.0001204162 +2025-02-27 07:01:04,256 Train Loss: 46293.1570609, Val Loss: 36121.1853710 +2025-02-27 07:01:04,257 Epoch 1787/2000 +2025-02-27 07:01:20,299 Current Learning Rate: 0.0001038859 +2025-02-27 07:01:20,300 Train Loss: 46231.7051147, Val Loss: 36034.6069039 +2025-02-27 07:01:20,300 Epoch 1788/2000 +2025-02-27 07:01:36,127 Current Learning Rate: 0.0000885637 +2025-02-27 07:01:36,127 Train Loss: 46152.4009368, Val Loss: 35927.4209246 +2025-02-27 07:01:36,128 Epoch 1789/2000 +2025-02-27 07:01:52,048 Current Learning Rate: 0.0000744534 +2025-02-27 07:01:52,048 Train Loss: 45880.8911306, Val Loss: 35905.3710462 +2025-02-27 07:01:52,048 Epoch 1790/2000 +2025-02-27 07:02:07,123 Current Learning Rate: 0.0000615583 +2025-02-27 07:02:07,124 Train Loss: 45962.0804529, Val Loss: 35978.7294708 +2025-02-27 07:02:07,124 Epoch 1791/2000 +2025-02-27 07:02:22,504 Current Learning Rate: 0.0000498817 +2025-02-27 07:02:22,505 Train Loss: 45960.1742686, Val Loss: 35770.3527981 +2025-02-27 07:02:22,505 Epoch 1792/2000 +2025-02-27 07:02:37,925 Current Learning Rate: 0.0000394265 +2025-02-27 07:02:37,926 Train Loss: 45979.7941088, Val Loss: 35762.4482968 +2025-02-27 07:02:37,926 Epoch 1793/2000 +2025-02-27 07:02:53,169 Current Learning Rate: 0.0000301952 +2025-02-27 07:02:53,170 Train Loss: 45761.0603897, Val Loss: 35717.8193431 +2025-02-27 07:02:53,170 Epoch 1794/2000 +2025-02-27 07:03:08,669 Current Learning Rate: 0.0000221902 +2025-02-27 07:03:08,669 Train Loss: 45682.0602269, Val Loss: 35643.5021290 +2025-02-27 07:03:08,670 Epoch 1795/2000 +2025-02-27 07:03:24,296 Current Learning Rate: 0.0000154133 +2025-02-27 07:03:24,296 Train Loss: 45690.1058261, Val Loss: 35650.2212591 +2025-02-27 07:03:24,297 Epoch 1796/2000 +2025-02-27 07:03:39,844 Current Learning Rate: 0.0000098664 +2025-02-27 07:03:39,844 Train Loss: 45654.0880799, Val Loss: 35644.5072993 +2025-02-27 07:03:39,844 Epoch 1797/2000 +2025-02-27 07:03:55,306 Current Learning Rate: 0.0000055506 +2025-02-27 07:03:55,306 Train Loss: 45729.6272292, Val Loss: 35608.6009732 +2025-02-27 07:03:55,306 Epoch 1798/2000 +2025-02-27 07:04:10,451 Current Learning Rate: 0.0000024672 +2025-02-27 07:04:10,451 Train Loss: 45583.6285818, Val Loss: 35566.2530414 +2025-02-27 07:04:10,452 Epoch 1799/2000 +2025-02-27 07:04:26,050 Current Learning Rate: 0.0000006168 +2025-02-27 07:04:26,050 Train Loss: 45602.0693067, Val Loss: 35627.6444647 +2025-02-27 07:04:26,051 Epoch 1800/2000 +2025-02-27 07:04:41,410 Current Learning Rate: 0.0000000000 +2025-02-27 07:04:41,411 Train Loss: 45642.1178990, Val Loss: 35587.0650852 +2025-02-27 07:04:41,411 Epoch 1801/2000 +2025-02-27 07:04:56,919 Current Learning Rate: 0.0000006168 +2025-02-27 07:04:56,920 Train Loss: 45542.8495391, Val Loss: 35584.2100061 +2025-02-27 07:04:56,920 Epoch 1802/2000 +2025-02-27 07:05:12,358 Current Learning Rate: 0.0000024672 +2025-02-27 07:05:12,358 Train Loss: 45609.9447701, Val Loss: 35532.2422445 +2025-02-27 07:05:12,358 Epoch 1803/2000 +2025-02-27 07:05:27,738 Current Learning Rate: 0.0000055506 +2025-02-27 07:05:27,738 Train Loss: 45489.5681169, Val Loss: 35593.7340328 +2025-02-27 07:05:27,738 Epoch 1804/2000 +2025-02-27 07:05:43,469 Current Learning Rate: 0.0000098664 +2025-02-27 07:05:43,470 Train Loss: 45675.6900611, Val Loss: 35561.0439477 +2025-02-27 07:05:43,470 Epoch 1805/2000 +2025-02-27 07:05:59,570 Current Learning Rate: 0.0000154133 +2025-02-27 07:05:59,570 Train Loss: 45574.4232792, Val Loss: 35592.2642944 +2025-02-27 07:05:59,570 Epoch 1806/2000 +2025-02-27 07:06:16,561 Current Learning Rate: 0.0000221902 +2025-02-27 07:06:16,561 Train Loss: 45528.2972022, Val Loss: 35518.7568431 +2025-02-27 07:06:16,562 Epoch 1807/2000 +2025-02-27 07:06:32,396 Current Learning Rate: 0.0000301952 +2025-02-27 07:06:32,396 Train Loss: 45556.4413636, Val Loss: 35538.5241788 +2025-02-27 07:06:32,396 Epoch 1808/2000 +2025-02-27 07:06:48,437 Current Learning Rate: 0.0000394265 +2025-02-27 07:06:48,437 Train Loss: 45465.7956367, Val Loss: 35526.3381995 +2025-02-27 07:06:48,438 Epoch 1809/2000 +2025-02-27 07:07:04,419 Current Learning Rate: 0.0000498817 +2025-02-27 07:07:04,420 Train Loss: 45378.2347711, Val Loss: 35467.3616180 +2025-02-27 07:07:04,420 Epoch 1810/2000 +2025-02-27 07:07:19,883 Current Learning Rate: 0.0000615583 +2025-02-27 07:07:19,884 Train Loss: 45380.1616822, Val Loss: 35402.2019465 +2025-02-27 07:07:19,884 Epoch 1811/2000 +2025-02-27 07:07:34,737 Current Learning Rate: 0.0000744534 +2025-02-27 07:07:34,737 Train Loss: 45209.2984170, Val Loss: 35240.5497263 +2025-02-27 07:07:34,737 Epoch 1812/2000 +2025-02-27 07:07:50,066 Current Learning Rate: 0.0000885637 +2025-02-27 07:07:50,067 Train Loss: 45097.7805956, Val Loss: 35172.2734185 +2025-02-27 07:07:50,067 Epoch 1813/2000 +2025-02-27 07:08:05,495 Current Learning Rate: 0.0001038859 +2025-02-27 07:08:05,496 Train Loss: 44909.7181520, Val Loss: 35125.4493613 +2025-02-27 07:08:05,496 Epoch 1814/2000 +2025-02-27 07:08:20,696 Current Learning Rate: 0.0001204162 +2025-02-27 07:08:20,697 Train Loss: 44763.5144274, Val Loss: 34882.5501825 +2025-02-27 07:08:20,697 Epoch 1815/2000 +2025-02-27 07:08:37,086 Current Learning Rate: 0.0001381504 +2025-02-27 07:08:37,086 Train Loss: 44603.0020790, Val Loss: 34924.0625000 +2025-02-27 07:08:37,087 Epoch 1816/2000 +2025-02-27 07:08:52,700 Current Learning Rate: 0.0001570842 +2025-02-27 07:08:52,700 Train Loss: 44213.2863065, Val Loss: 34363.3515815 +2025-02-27 07:08:52,700 Epoch 1817/2000 +2025-02-27 07:09:09,207 Current Learning Rate: 0.0001772129 +2025-02-27 07:09:09,207 Train Loss: 44247.7510395, Val Loss: 34340.2828467 +2025-02-27 07:09:09,207 Epoch 1818/2000 +2025-02-27 07:09:24,696 Current Learning Rate: 0.0001985316 +2025-02-27 07:09:24,697 Train Loss: 43961.4941514, Val Loss: 33728.5302616 +2025-02-27 07:09:24,697 Epoch 1819/2000 +2025-02-27 07:09:40,420 Current Learning Rate: 0.0002210349 +2025-02-27 07:09:40,420 Train Loss: 43283.0303827, Val Loss: 33554.1005170 +2025-02-27 07:09:40,420 Epoch 1820/2000 +2025-02-27 07:09:56,300 Current Learning Rate: 0.0002447174 +2025-02-27 07:09:56,301 Train Loss: 43968.2889991, Val Loss: 33986.6294100 +2025-02-27 07:09:56,301 Epoch 1821/2000 +2025-02-27 07:10:12,221 Current Learning Rate: 0.0002695732 +2025-02-27 07:10:12,221 Train Loss: 45159.9927362, Val Loss: 35389.2837591 +2025-02-27 07:10:12,222 Epoch 1822/2000 +2025-02-27 07:10:26,576 Current Learning Rate: 0.0002955962 +2025-02-27 07:10:26,576 Train Loss: 44810.7393423, Val Loss: 33417.2916667 +2025-02-27 07:10:26,576 Epoch 1823/2000 +2025-02-27 07:10:41,537 Current Learning Rate: 0.0003227798 +2025-02-27 07:10:41,537 Train Loss: 43993.5000877, Val Loss: 31921.2709094 +2025-02-27 07:10:41,537 Epoch 1824/2000 +2025-02-27 07:10:57,427 Current Learning Rate: 0.0003511176 +2025-02-27 07:10:57,427 Train Loss: 46828.1080929, Val Loss: 31050.9591697 +2025-02-27 07:10:57,428 Epoch 1825/2000 +2025-02-27 07:11:12,808 Current Learning Rate: 0.0003806023 +2025-02-27 07:11:12,808 Train Loss: 45739.6546563, Val Loss: 32961.1701642 +2025-02-27 07:11:12,809 Epoch 1826/2000 +2025-02-27 07:11:28,488 Current Learning Rate: 0.0004112269 +2025-02-27 07:11:28,489 Train Loss: 48872.2333058, Val Loss: 29815.0771746 +2025-02-27 07:11:28,489 Epoch 1827/2000 +2025-02-27 07:11:44,060 Current Learning Rate: 0.0004429836 +2025-02-27 07:11:44,061 Train Loss: 66105.0212278, Val Loss: 32101.7936436 +2025-02-27 07:11:44,061 Epoch 1828/2000 +2025-02-27 07:11:59,649 Current Learning Rate: 0.0004758647 +2025-02-27 07:11:59,649 Train Loss: 41433.2998322, Val Loss: 31100.5751977 +2025-02-27 07:11:59,649 Epoch 1829/2000 +2025-02-27 07:12:14,454 Current Learning Rate: 0.0005098621 +2025-02-27 07:12:14,454 Train Loss: 48360.9895051, Val Loss: 29093.4340785 +2025-02-27 07:12:14,455 Epoch 1830/2000 +2025-02-27 07:12:30,477 Current Learning Rate: 0.0005449674 +2025-02-27 07:12:30,477 Train Loss: 111316.3502530, Val Loss: 31836.9491332 +2025-02-27 07:12:30,477 Epoch 1831/2000 +2025-02-27 07:12:46,800 Current Learning Rate: 0.0005811718 +2025-02-27 07:12:46,801 Train Loss: 38567.6279306, Val Loss: 26265.5577099 +2025-02-27 07:12:46,801 Epoch 1832/2000 +2025-02-27 07:13:02,642 Current Learning Rate: 0.0006184666 +2025-02-27 07:13:02,643 Train Loss: 34951.4419898, Val Loss: 29485.4383364 +2025-02-27 07:13:02,644 Epoch 1833/2000 +2025-02-27 07:13:20,826 Current Learning Rate: 0.0006568424 +2025-02-27 07:13:20,826 Train Loss: 106451.0130498, Val Loss: 29960.0581661 +2025-02-27 07:13:20,827 Epoch 1834/2000 +2025-02-27 07:13:36,557 Current Learning Rate: 0.0006962899 +2025-02-27 07:13:36,557 Train Loss: 35037.3676861, Val Loss: 31768.8340176 +2025-02-27 07:13:36,557 Epoch 1835/2000 +2025-02-27 07:13:52,462 Current Learning Rate: 0.0007367992 +2025-02-27 07:13:52,463 Train Loss: 136088.3570659, Val Loss: 25352.2361618 +2025-02-27 07:13:52,463 Epoch 1836/2000 +2025-02-27 07:14:08,129 Current Learning Rate: 0.0007783604 +2025-02-27 07:14:08,129 Train Loss: 32155.2506011, Val Loss: 23275.8173662 +2025-02-27 07:14:08,130 Epoch 1837/2000 +2025-02-27 07:14:23,714 Current Learning Rate: 0.0008209632 +2025-02-27 07:14:23,715 Train Loss: 69263.8913936, Val Loss: 21890.4045012 +2025-02-27 07:14:23,715 Epoch 1838/2000 +2025-02-27 07:14:39,881 Current Learning Rate: 0.0008645971 +2025-02-27 07:14:39,881 Train Loss: 125541.0283288, Val Loss: 21954.4270833 +2025-02-27 07:14:39,882 Epoch 1839/2000 +2025-02-27 07:14:56,075 Current Learning Rate: 0.0009092514 +2025-02-27 07:14:56,076 Train Loss: 35834.9604248, Val Loss: 21480.2212591 +2025-02-27 07:14:56,076 Epoch 1840/2000 +2025-02-27 07:15:12,605 Current Learning Rate: 0.0009549150 +2025-02-27 07:15:12,606 Train Loss: 226736.5954376, Val Loss: 23278.2432330 +2025-02-27 07:15:12,606 Epoch 1841/2000 +2025-02-27 07:15:28,061 Current Learning Rate: 0.0010015767 +2025-02-27 07:15:28,062 Train Loss: 27388.7026601, Val Loss: 19193.9948297 +2025-02-27 07:15:28,063 Epoch 1842/2000 +2025-02-27 07:15:43,759 Current Learning Rate: 0.0010492249 +2025-02-27 07:15:43,759 Train Loss: 28519.9120516, Val Loss: 18422.5536040 +2025-02-27 07:15:43,759 Epoch 1843/2000 +2025-02-27 07:15:59,741 Current Learning Rate: 0.0010978480 +2025-02-27 07:15:59,742 Train Loss: 290068.3369339, Val Loss: 21208.1554136 +2025-02-27 07:15:59,742 Epoch 1844/2000 +2025-02-27 07:16:15,292 Current Learning Rate: 0.0011474338 +2025-02-27 07:16:15,293 Train Loss: 26069.3451307, Val Loss: 18103.5990724 +2025-02-27 07:16:15,294 Epoch 1845/2000 +2025-02-27 07:16:31,486 Current Learning Rate: 0.0011979702 +2025-02-27 07:16:31,487 Train Loss: 25470.8021178, Val Loss: 16877.7942518 +2025-02-27 07:16:31,487 Epoch 1846/2000 +2025-02-27 07:16:49,429 Current Learning Rate: 0.0012494447 +2025-02-27 07:16:49,431 Train Loss: 118875.4434676, Val Loss: 16693.7127433 +2025-02-27 07:16:49,432 Epoch 1847/2000 +2025-02-27 07:17:07,551 Current Learning Rate: 0.0013018445 +2025-02-27 07:17:07,552 Train Loss: 43406.0490870, Val Loss: 66239.7658151 +2025-02-27 07:17:07,552 Epoch 1848/2000 +2025-02-27 07:17:24,899 Current Learning Rate: 0.0013551569 +2025-02-27 07:17:24,900 Train Loss: 179720.1247683, Val Loss: 18594.6947232 +2025-02-27 07:17:24,900 Epoch 1849/2000 +2025-02-27 07:17:41,771 Current Learning Rate: 0.0014093685 +2025-02-27 07:17:41,772 Train Loss: 22747.7772142, Val Loss: 15065.1343902 +2025-02-27 07:17:41,772 Epoch 1850/2000 +2025-02-27 07:17:59,340 Current Learning Rate: 0.0014644661 +2025-02-27 07:17:59,341 Train Loss: 250040.5785054, Val Loss: 25929.2590481 +2025-02-27 07:17:59,341 Epoch 1851/2000 +2025-02-27 07:18:15,569 Current Learning Rate: 0.0015204360 +2025-02-27 07:18:15,570 Train Loss: 20823.9076120, Val Loss: 16881.5431113 +2025-02-27 07:18:15,570 Epoch 1852/2000 +2025-02-27 07:18:31,106 Current Learning Rate: 0.0015772645 +2025-02-27 07:18:31,106 Train Loss: 25930.1651575, Val Loss: 52952.9136253 +2025-02-27 07:18:31,106 Epoch 1853/2000 +2025-02-27 07:18:48,154 Current Learning Rate: 0.0016349374 +2025-02-27 07:18:48,154 Train Loss: 143729.9634618, Val Loss: 14467.5047521 +2025-02-27 07:18:48,155 Epoch 1854/2000 +2025-02-27 07:19:04,632 Current Learning Rate: 0.0016934407 +2025-02-27 07:19:04,632 Train Loss: 24957.3178571, Val Loss: 28949.2868005 +2025-02-27 07:19:04,633 Epoch 1855/2000 +2025-02-27 07:19:20,440 Current Learning Rate: 0.0017527598 +2025-02-27 07:19:20,440 Train Loss: 153375.0359433, Val Loss: 19724.6072841 +2025-02-27 07:19:20,441 Epoch 1856/2000 +2025-02-27 07:19:36,857 Current Learning Rate: 0.0018128801 +2025-02-27 07:19:36,857 Train Loss: 18091.7991215, Val Loss: 14863.3650776 +2025-02-27 07:19:36,858 Epoch 1857/2000 +2025-02-27 07:19:53,010 Current Learning Rate: 0.0018737867 +2025-02-27 07:19:53,011 Train Loss: 119296.4446354, Val Loss: 21899.5730687 +2025-02-27 07:19:53,011 Epoch 1858/2000 +2025-02-27 07:20:09,850 Current Learning Rate: 0.0019354647 +2025-02-27 07:20:09,851 Train Loss: 22525.7855081, Val Loss: 14332.4355611 +2025-02-27 07:20:09,851 Epoch 1859/2000 +2025-02-27 07:20:27,546 Current Learning Rate: 0.0019978989 +2025-02-27 07:20:27,546 Train Loss: 133256.0690938, Val Loss: 15533.6861314 +2025-02-27 07:20:27,546 Epoch 1860/2000 +2025-02-27 07:20:44,939 Current Learning Rate: 0.0020610737 +2025-02-27 07:20:44,939 Train Loss: 15053.3115732, Val Loss: 11169.6293339 +2025-02-27 07:20:44,939 Epoch 1861/2000 +2025-02-27 07:21:00,707 Current Learning Rate: 0.0021249737 +2025-02-27 07:21:00,708 Train Loss: 126721.4887662, Val Loss: 11347.3336755 +2025-02-27 07:21:00,708 Epoch 1862/2000 +2025-02-27 07:21:17,152 Current Learning Rate: 0.0021895831 +2025-02-27 07:21:17,153 Train Loss: 14051.5647073, Val Loss: 10214.8488823 +2025-02-27 07:21:17,153 Epoch 1863/2000 +2025-02-27 07:21:33,537 Current Learning Rate: 0.0022548859 +2025-02-27 07:21:33,537 Train Loss: 222776.6348719, Val Loss: 19137.2236162 +2025-02-27 07:21:33,538 Epoch 1864/2000 +2025-02-27 07:21:49,845 Current Learning Rate: 0.0023208660 +2025-02-27 07:21:49,845 Train Loss: 13855.9688408, Val Loss: 10168.0092381 +2025-02-27 07:21:49,845 Epoch 1865/2000 +2025-02-27 07:22:05,532 Current Learning Rate: 0.0023875072 +2025-02-27 07:22:05,532 Train Loss: 13116.8628519, Val Loss: 9026.8808926 +2025-02-27 07:22:05,532 Epoch 1866/2000 +2025-02-27 07:22:21,814 Current Learning Rate: 0.0024547929 +2025-02-27 07:22:21,814 Train Loss: 35165.3317240, Val Loss: 8302.3161877 +2025-02-27 07:22:21,814 Epoch 1867/2000 +2025-02-27 07:22:38,173 Current Learning Rate: 0.0025227067 +2025-02-27 07:22:38,174 Train Loss: 22521.3072964, Val Loss: 63520.5839416 +2025-02-27 07:22:38,174 Epoch 1868/2000 +2025-02-27 07:22:53,921 Current Learning Rate: 0.0025912316 +2025-02-27 07:22:53,922 Train Loss: 68091.2995003, Val Loss: 134536.4720195 +2025-02-27 07:22:53,922 Epoch 1869/2000 +2025-02-27 07:23:10,165 Current Learning Rate: 0.0026603509 +2025-02-27 07:23:10,166 Train Loss: 24893.7009067, Val Loss: 5922.9064971 +2025-02-27 07:23:10,166 Epoch 1870/2000 +2025-02-27 07:23:26,263 Current Learning Rate: 0.0027300475 +2025-02-27 07:23:26,264 Train Loss: 12764.3927681, Val Loss: 5548.3010189 +2025-02-27 07:23:26,264 Epoch 1871/2000 +2025-02-27 07:23:42,276 Current Learning Rate: 0.0028003042 +2025-02-27 07:23:42,277 Train Loss: 531141797.9993237, Val Loss: 69219727.8832117 +2025-02-27 07:23:42,277 Epoch 1872/2000 +2025-02-27 07:23:58,525 Current Learning Rate: 0.0028711035 +2025-02-27 07:23:58,526 Train Loss: 8207029.7926060, Val Loss: 1147720.9732360 +2025-02-27 07:23:58,526 Epoch 1873/2000 +2025-02-27 07:24:16,297 Current Learning Rate: 0.0029424282 +2025-02-27 07:24:16,297 Train Loss: 995710.9397856, Val Loss: 740150.1581509 +2025-02-27 07:24:16,298 Epoch 1874/2000 +2025-02-27 07:24:32,152 Current Learning Rate: 0.0030142605 +2025-02-27 07:24:32,152 Train Loss: 713626.2007815, Val Loss: 555260.4866180 +2025-02-27 07:24:32,153 Epoch 1875/2000 +2025-02-27 07:24:49,084 Current Learning Rate: 0.0030865828 +2025-02-27 07:24:49,085 Train Loss: 558288.8072337, Val Loss: 446989.1545012 +2025-02-27 07:24:49,085 Epoch 1876/2000 +2025-02-27 07:25:05,246 Current Learning Rate: 0.0031593772 +2025-02-27 07:25:05,247 Train Loss: 468150.3135958, Val Loss: 382514.9026764 +2025-02-27 07:25:05,247 Epoch 1877/2000 +2025-02-27 07:25:21,369 Current Learning Rate: 0.0032326258 +2025-02-27 07:25:21,369 Train Loss: 406564.7550346, Val Loss: 337504.8965937 +2025-02-27 07:25:21,369 Epoch 1878/2000 +2025-02-27 07:25:37,128 Current Learning Rate: 0.0033063104 +2025-02-27 07:25:37,129 Train Loss: 363038.2531810, Val Loss: 304541.0766423 +2025-02-27 07:25:37,129 Epoch 1879/2000 +2025-02-27 07:25:53,259 Current Learning Rate: 0.0033804129 +2025-02-27 07:25:53,260 Train Loss: 330036.3024747, Val Loss: 277782.3357664 +2025-02-27 07:25:53,260 Epoch 1880/2000 +2025-02-27 07:26:10,531 Current Learning Rate: 0.0034549150 +2025-02-27 07:26:10,532 Train Loss: 303354.9909829, Val Loss: 255472.7341849 +2025-02-27 07:26:10,532 Epoch 1881/2000 +2025-02-27 07:26:26,834 Current Learning Rate: 0.0035297984 +2025-02-27 07:26:26,835 Train Loss: 279502.6114618, Val Loss: 236139.5316302 +2025-02-27 07:26:26,835 Epoch 1882/2000 +2025-02-27 07:26:43,161 Current Learning Rate: 0.0036050445 +2025-02-27 07:26:43,162 Train Loss: 259048.5261998, Val Loss: 219395.6021898 +2025-02-27 07:26:43,162 Epoch 1883/2000 +2025-02-27 07:27:00,511 Current Learning Rate: 0.0036806348 +2025-02-27 07:27:00,511 Train Loss: 240851.7418094, Val Loss: 204430.2919708 +2025-02-27 07:27:00,512 Epoch 1884/2000 +2025-02-27 07:27:17,757 Current Learning Rate: 0.0037565506 +2025-02-27 07:27:17,757 Train Loss: 225207.0070634, Val Loss: 190742.8923358 +2025-02-27 07:27:17,757 Epoch 1885/2000 +2025-02-27 07:27:34,314 Current Learning Rate: 0.0038327732 +2025-02-27 07:27:34,315 Train Loss: 211243.0998898, Val Loss: 178932.5212895 +2025-02-27 07:27:34,315 Epoch 1886/2000 +2025-02-27 07:27:51,062 Current Learning Rate: 0.0039092838 +2025-02-27 07:27:51,062 Train Loss: 197283.3067829, Val Loss: 167506.9343066 +2025-02-27 07:27:51,063 Epoch 1887/2000 +2025-02-27 07:28:07,222 Current Learning Rate: 0.0039860635 +2025-02-27 07:28:07,223 Train Loss: 185530.8010219, Val Loss: 157663.8807786 +2025-02-27 07:28:07,224 Epoch 1888/2000 +2025-02-27 07:28:23,466 Current Learning Rate: 0.0040630934 +2025-02-27 07:28:23,467 Train Loss: 175102.5265504, Val Loss: 148513.9750608 +2025-02-27 07:28:23,467 Epoch 1889/2000 +2025-02-27 07:28:39,609 Current Learning Rate: 0.0041403545 +2025-02-27 07:28:39,610 Train Loss: 164659.3274722, Val Loss: 139785.0273723 +2025-02-27 07:28:39,611 Epoch 1890/2000 +2025-02-27 07:28:56,570 Current Learning Rate: 0.0042178277 +2025-02-27 07:28:56,571 Train Loss: 155488.6536920, Val Loss: 131853.4154501 +2025-02-27 07:28:56,571 Epoch 1891/2000 +2025-02-27 07:29:12,718 Current Learning Rate: 0.0042954938 +2025-02-27 07:29:12,719 Train Loss: 147024.0371706, Val Loss: 124670.1368613 +2025-02-27 07:29:12,719 Epoch 1892/2000 +2025-02-27 07:29:28,618 Current Learning Rate: 0.0043733338 +2025-02-27 07:29:28,619 Train Loss: 139145.6306983, Val Loss: 118022.3129562 +2025-02-27 07:29:28,620 Epoch 1893/2000 +2025-02-27 07:29:46,036 Current Learning Rate: 0.0044513284 +2025-02-27 07:29:46,037 Train Loss: 131618.6072287, Val Loss: 111561.8597932 +2025-02-27 07:29:46,038 Epoch 1894/2000 +2025-02-27 07:30:02,631 Current Learning Rate: 0.0045294584 +2025-02-27 07:30:02,632 Train Loss: 124662.1743813, Val Loss: 105582.1608881 +2025-02-27 07:30:02,632 Epoch 1895/2000 +2025-02-27 07:30:18,416 Current Learning Rate: 0.0046077045 +2025-02-27 07:30:18,416 Train Loss: 118448.5702835, Val Loss: 100014.2472628 +2025-02-27 07:30:18,417 Epoch 1896/2000 +2025-02-27 07:30:35,126 Current Learning Rate: 0.0046860474 +2025-02-27 07:30:35,126 Train Loss: 111819.3247170, Val Loss: 94981.5632603 +2025-02-27 07:30:35,127 Epoch 1897/2000 +2025-02-27 07:30:52,694 Current Learning Rate: 0.0047644677 +2025-02-27 07:30:52,695 Train Loss: 105923.0069632, Val Loss: 89903.8442822 +2025-02-27 07:30:52,695 Epoch 1898/2000 +2025-02-27 07:31:09,585 Current Learning Rate: 0.0048429462 +2025-02-27 07:31:09,585 Train Loss: 100962.9553652, Val Loss: 85258.0915450 +2025-02-27 07:31:09,585 Epoch 1899/2000 +2025-02-27 07:31:26,490 Current Learning Rate: 0.0049214634 +2025-02-27 07:31:26,490 Train Loss: 95481.3672227, Val Loss: 81075.0121655 +2025-02-27 07:31:26,491 Epoch 1900/2000 +2025-02-27 07:31:42,606 Current Learning Rate: 0.0050000000 +2025-02-27 07:31:42,606 Train Loss: 90790.9362789, Val Loss: 76498.0717762 +2025-02-27 07:31:42,606 Epoch 1901/2000 +2025-02-27 07:31:59,744 Current Learning Rate: 0.0050785366 +2025-02-27 07:31:59,745 Train Loss: 86197.0511472, Val Loss: 73035.8105231 +2025-02-27 07:31:59,745 Epoch 1902/2000 +2025-02-27 07:32:17,459 Current Learning Rate: 0.0051570538 +2025-02-27 07:32:17,460 Train Loss: 81983.3709047, Val Loss: 68985.7116788 +2025-02-27 07:32:17,460 Epoch 1903/2000 +2025-02-27 07:32:35,414 Current Learning Rate: 0.0052355323 +2025-02-27 07:32:35,415 Train Loss: 78030.5854874, Val Loss: 65539.8038321 +2025-02-27 07:32:35,415 Epoch 1904/2000 +2025-02-27 07:32:52,633 Current Learning Rate: 0.0053139526 +2025-02-27 07:32:52,634 Train Loss: 74102.7993187, Val Loss: 62435.8416971 +2025-02-27 07:32:52,634 Epoch 1905/2000 +2025-02-27 07:33:10,177 Current Learning Rate: 0.0053922955 +2025-02-27 07:33:10,178 Train Loss: 70717.0794510, Val Loss: 59064.6205900 +2025-02-27 07:33:10,179 Epoch 1906/2000 +2025-02-27 07:33:27,910 Current Learning Rate: 0.0054705416 +2025-02-27 07:33:27,910 Train Loss: 67308.2513651, Val Loss: 56218.2238443 +2025-02-27 07:33:27,911 Epoch 1907/2000 +2025-02-27 07:33:46,422 Current Learning Rate: 0.0055486716 +2025-02-27 07:33:46,422 Train Loss: 63775.4546138, Val Loss: 53217.2293187 +2025-02-27 07:33:46,423 Epoch 1908/2000 +2025-02-27 07:34:04,015 Current Learning Rate: 0.0056266662 +2025-02-27 07:34:04,016 Train Loss: 60660.0782737, Val Loss: 50573.9225973 +2025-02-27 07:34:04,016 Epoch 1909/2000 +2025-02-27 07:34:22,400 Current Learning Rate: 0.0057045062 +2025-02-27 07:34:22,402 Train Loss: 58183.8230763, Val Loss: 49362.2285584 +2025-02-27 07:34:22,403 Epoch 1910/2000 +2025-02-27 07:34:38,887 Current Learning Rate: 0.0057821723 +2025-02-27 07:34:38,887 Train Loss: 55128.3649559, Val Loss: 45734.4122567 +2025-02-27 07:34:38,888 Epoch 1911/2000 +2025-02-27 07:34:57,542 Current Learning Rate: 0.0058596455 +2025-02-27 07:34:57,542 Train Loss: 52576.1846258, Val Loss: 43601.6803528 +2025-02-27 07:34:57,542 Epoch 1912/2000 +2025-02-27 07:35:15,360 Current Learning Rate: 0.0059369066 +2025-02-27 07:35:15,361 Train Loss: 50281.4349765, Val Loss: 41396.9601582 +2025-02-27 07:35:15,365 Epoch 1913/2000 +2025-02-27 07:35:34,417 Current Learning Rate: 0.0060139365 +2025-02-27 07:35:34,417 Train Loss: 47916.2510645, Val Loss: 39362.3958333 +2025-02-27 07:35:34,418 Epoch 1914/2000 +2025-02-27 07:35:51,912 Current Learning Rate: 0.0060907162 +2025-02-27 07:35:51,913 Train Loss: 45908.2423480, Val Loss: 37574.9315693 +2025-02-27 07:35:51,913 Epoch 1915/2000 +2025-02-27 07:36:08,860 Current Learning Rate: 0.0061672268 +2025-02-27 07:36:08,861 Train Loss: 43987.0048718, Val Loss: 36375.8333333 +2025-02-27 07:36:08,862 Epoch 1916/2000 +2025-02-27 07:36:25,607 Current Learning Rate: 0.0062434494 +2025-02-27 07:36:25,608 Train Loss: 42151.1190011, Val Loss: 34375.4288321 +2025-02-27 07:36:25,608 Epoch 1917/2000 +2025-02-27 07:36:42,159 Current Learning Rate: 0.0063193652 +2025-02-27 07:36:42,159 Train Loss: 40313.9890166, Val Loss: 32830.1315389 +2025-02-27 07:36:42,160 Epoch 1918/2000 +2025-02-27 07:36:59,646 Current Learning Rate: 0.0063949555 +2025-02-27 07:36:59,647 Train Loss: 38686.6542681, Val Loss: 31173.9518704 +2025-02-27 07:36:59,647 Epoch 1919/2000 +2025-02-27 07:37:16,800 Current Learning Rate: 0.0064702016 +2025-02-27 07:37:16,801 Train Loss: 37076.8472598, Val Loss: 30012.4916363 +2025-02-27 07:37:16,801 Epoch 1920/2000 +2025-02-27 07:37:33,215 Current Learning Rate: 0.0065450850 +2025-02-27 07:37:33,216 Train Loss: 35433.5663385, Val Loss: 28585.4786344 +2025-02-27 07:37:33,216 Epoch 1921/2000 +2025-02-27 07:37:50,097 Current Learning Rate: 0.0066195871 +2025-02-27 07:37:50,097 Train Loss: 34473.0376465, Val Loss: 27858.3561436 +2025-02-27 07:37:50,097 Epoch 1922/2000 +2025-02-27 07:38:05,614 Current Learning Rate: 0.0066936896 +2025-02-27 07:38:05,614 Train Loss: 32761.1406046, Val Loss: 26189.6719130 +2025-02-27 07:38:05,614 Epoch 1923/2000 +2025-02-27 07:38:22,245 Current Learning Rate: 0.0067673742 +2025-02-27 07:38:22,245 Train Loss: 31585.7786043, Val Loss: 25148.1120742 +2025-02-27 07:38:22,245 Epoch 1924/2000 +2025-02-27 07:38:40,463 Current Learning Rate: 0.0068406228 +2025-02-27 07:38:40,464 Train Loss: 30396.3454939, Val Loss: 24170.6812652 +2025-02-27 07:38:40,464 Epoch 1925/2000 +2025-02-27 07:38:57,477 Current Learning Rate: 0.0069134172 +2025-02-27 07:38:57,478 Train Loss: 29300.3650999, Val Loss: 23396.7012622 +2025-02-27 07:38:57,478 Epoch 1926/2000 +2025-02-27 07:39:14,663 Current Learning Rate: 0.0069857395 +2025-02-27 07:39:14,664 Train Loss: 28897.2837454, Val Loss: 22643.7085614 +2025-02-27 07:39:14,664 Epoch 1927/2000 +2025-02-27 07:39:31,630 Current Learning Rate: 0.0070575718 +2025-02-27 07:39:31,631 Train Loss: 27420.5442841, Val Loss: 21529.9490572 +2025-02-27 07:39:31,631 Epoch 1928/2000 +2025-02-27 07:39:48,001 Current Learning Rate: 0.0071288965 +2025-02-27 07:39:48,002 Train Loss: 26237.0367135, Val Loss: 20930.9869221 +2025-02-27 07:39:48,002 Epoch 1929/2000 +2025-02-27 07:40:04,741 Current Learning Rate: 0.0071996958 +2025-02-27 07:40:04,742 Train Loss: 25827.9289713, Val Loss: 20158.0725365 +2025-02-27 07:40:04,742 Epoch 1930/2000 +2025-02-27 07:40:20,892 Current Learning Rate: 0.0072699525 +2025-02-27 07:40:20,892 Train Loss: 24994.1932797, Val Loss: 19058.0691150 +2025-02-27 07:40:20,893 Epoch 1931/2000 +2025-02-27 07:40:35,988 Current Learning Rate: 0.0073396491 +2025-02-27 07:40:35,989 Train Loss: 25138.8088305, Val Loss: 18568.7644465 +2025-02-27 07:40:35,989 Epoch 1932/2000 +2025-02-27 07:40:52,653 Current Learning Rate: 0.0074087684 +2025-02-27 07:40:52,653 Train Loss: 24674.1782825, Val Loss: 22672.4118005 +2025-02-27 07:40:52,654 Epoch 1933/2000 +2025-02-27 07:41:08,263 Current Learning Rate: 0.0074772933 +2025-02-27 07:41:08,264 Train Loss: 54120.4723349, Val Loss: 76621.5009124 +2025-02-27 07:41:08,265 Epoch 1934/2000 +2025-02-27 07:41:24,197 Current Learning Rate: 0.0075452071 +2025-02-27 07:41:24,198 Train Loss: 275388.4412258, Val Loss: 26347.1160280 +2025-02-27 07:41:24,198 Epoch 1935/2000 +2025-02-27 07:41:40,190 Current Learning Rate: 0.0076124928 +2025-02-27 07:41:40,190 Train Loss: 22550.4631299, Val Loss: 17360.3121198 +2025-02-27 07:41:40,191 Epoch 1936/2000 +2025-02-27 07:41:56,947 Current Learning Rate: 0.0076791340 +2025-02-27 07:41:56,948 Train Loss: 21260.6454764, Val Loss: 18430.5763382 +2025-02-27 07:41:56,948 Epoch 1937/2000 +2025-02-27 07:42:12,892 Current Learning Rate: 0.0077451141 +2025-02-27 07:42:12,893 Train Loss: 25698.9215760, Val Loss: 15511.9069343 +2025-02-27 07:42:12,893 Epoch 1938/2000 +2025-02-27 07:42:28,990 Current Learning Rate: 0.0078104169 +2025-02-27 07:42:28,990 Train Loss: 173217.0859007, Val Loss: 20297.1749544 +2025-02-27 07:42:28,990 Epoch 1939/2000 +2025-02-27 07:42:45,518 Current Learning Rate: 0.0078750263 +2025-02-27 07:42:45,518 Train Loss: 19246.7087310, Val Loss: 13003.9666971 +2025-02-27 07:42:45,519 Epoch 1940/2000 +2025-02-27 07:43:01,788 Current Learning Rate: 0.0079389263 +2025-02-27 07:43:01,789 Train Loss: 18333.3779807, Val Loss: 12387.3515435 +2025-02-27 07:43:01,789 Epoch 1941/2000 +2025-02-27 07:43:18,991 Current Learning Rate: 0.0080021011 +2025-02-27 07:43:18,992 Train Loss: 34188.1326708, Val Loss: 37505.5390815 +2025-02-27 07:43:18,992 Epoch 1942/2000 +2025-02-27 07:43:34,859 Current Learning Rate: 0.0080645353 +2025-02-27 07:43:34,859 Train Loss: 105737.9139114, Val Loss: 31397.1300943 +2025-02-27 07:43:34,860 Epoch 1943/2000 +2025-02-27 07:43:50,139 Current Learning Rate: 0.0081262133 +2025-02-27 07:43:50,139 Train Loss: 18564.2810402, Val Loss: 10639.2069647 +2025-02-27 07:43:50,140 Epoch 1944/2000 +2025-02-27 07:44:05,460 Current Learning Rate: 0.0081871199 +2025-02-27 07:44:05,461 Train Loss: 20200.2847754, Val Loss: 38685.5847019 +2025-02-27 07:44:05,461 Epoch 1945/2000 +2025-02-27 07:44:21,076 Current Learning Rate: 0.0082472402 +2025-02-27 07:44:21,077 Train Loss: 51879.9090772, Val Loss: 9397.7159367 +2025-02-27 07:44:21,077 Epoch 1946/2000 +2025-02-27 07:44:36,991 Current Learning Rate: 0.0083065593 +2025-02-27 07:44:36,992 Train Loss: 68889.5241741, Val Loss: 82781.9160584 +2025-02-27 07:44:36,992 Epoch 1947/2000 +2025-02-27 07:44:53,159 Current Learning Rate: 0.0083650626 +2025-02-27 07:44:53,160 Train Loss: 21045.0153730, Val Loss: 9224.0104167 +2025-02-27 07:44:53,160 Epoch 1948/2000 +2025-02-27 07:45:09,116 Current Learning Rate: 0.0084227355 +2025-02-27 07:45:09,117 Train Loss: 22759.1292362, Val Loss: 22742.0848540 +2025-02-27 07:45:09,117 Epoch 1949/2000 +2025-02-27 07:45:25,428 Current Learning Rate: 0.0084795640 +2025-02-27 07:45:25,429 Train Loss: 19337.9365482, Val Loss: 9742.5501825 +2025-02-27 07:45:25,429 Epoch 1950/2000 +2025-02-27 07:45:41,653 Current Learning Rate: 0.0085355339 +2025-02-27 07:45:41,654 Train Loss: 136187.3998566, Val Loss: 8779.4135873 +2025-02-27 07:45:41,655 Epoch 1951/2000 +2025-02-27 07:45:57,479 Current Learning Rate: 0.0085906315 +2025-02-27 07:45:57,480 Train Loss: 12167.2494615, Val Loss: 7085.2472057 +2025-02-27 07:45:57,480 Epoch 1952/2000 +2025-02-27 07:46:13,391 Current Learning Rate: 0.0086448431 +2025-02-27 07:46:13,392 Train Loss: 11167.8137524, Val Loss: 6961.8865002 +2025-02-27 07:46:13,392 Epoch 1953/2000 +2025-02-27 07:46:29,273 Current Learning Rate: 0.0086981555 +2025-02-27 07:46:29,274 Train Loss: 10807.9545042, Val Loss: 17031.7803376 +2025-02-27 07:46:29,274 Epoch 1954/2000 +2025-02-27 07:46:45,545 Current Learning Rate: 0.0087505553 +2025-02-27 07:46:45,546 Train Loss: 11556.5764640, Val Loss: 6133.5122985 +2025-02-27 07:46:45,546 Epoch 1955/2000 +2025-02-27 07:47:01,123 Current Learning Rate: 0.0088020298 +2025-02-27 07:47:01,124 Train Loss: 23077.9567272, Val Loss: 6714.6958637 +2025-02-27 07:47:01,124 Epoch 1956/2000 +2025-02-27 07:47:17,994 Current Learning Rate: 0.0088525662 +2025-02-27 07:47:17,994 Train Loss: 13471.4090209, Val Loss: 10775.5983881 +2025-02-27 07:47:17,994 Epoch 1957/2000 +2025-02-27 07:47:34,089 Current Learning Rate: 0.0089021520 +2025-02-27 07:47:34,090 Train Loss: 56209.6539362, Val Loss: 6353.3028247 +2025-02-27 07:47:34,090 Epoch 1958/2000 +2025-02-27 07:47:50,526 Current Learning Rate: 0.0089507751 +2025-02-27 07:47:50,527 Train Loss: 8979.2590297, Val Loss: 5687.1982398 +2025-02-27 07:47:50,527 Epoch 1959/2000 +2025-02-27 07:48:06,422 Current Learning Rate: 0.0089984233 +2025-02-27 07:48:06,423 Train Loss: 10260.4305994, Val Loss: 4996.5670620 +2025-02-27 07:48:06,423 Epoch 1960/2000 +2025-02-27 07:48:22,767 Current Learning Rate: 0.0090450850 +2025-02-27 07:48:22,768 Train Loss: 42690.0886685, Val Loss: 6550.3528931 +2025-02-27 07:48:22,768 Epoch 1961/2000 +2025-02-27 07:48:38,692 Current Learning Rate: 0.0090907486 +2025-02-27 07:48:38,693 Train Loss: 14722.8933020, Val Loss: 4759.5142374 +2025-02-27 07:48:38,693 Epoch 1962/2000 +2025-02-27 07:48:54,760 Current Learning Rate: 0.0091354029 +2025-02-27 07:48:54,761 Train Loss: 7685.2833509, Val Loss: 4717.6868537 +2025-02-27 07:48:54,761 Epoch 1963/2000 +2025-02-27 07:49:10,789 Current Learning Rate: 0.0091790368 +2025-02-27 07:49:10,789 Train Loss: 13174.1104270, Val Loss: 4830.6552235 +2025-02-27 07:49:10,789 Epoch 1964/2000 +2025-02-27 07:49:26,726 Current Learning Rate: 0.0092216396 +2025-02-27 07:49:26,726 Train Loss: 9527.5532887, Val Loss: 4641.7896518 +2025-02-27 07:49:26,726 Epoch 1965/2000 +2025-02-27 07:49:42,345 Current Learning Rate: 0.0092632008 +2025-02-27 07:49:42,345 Train Loss: 12643.2523827, Val Loss: 5417.1477722 +2025-02-27 07:49:42,346 Epoch 1966/2000 +2025-02-27 07:49:58,197 Current Learning Rate: 0.0093037101 +2025-02-27 07:49:58,198 Train Loss: 17534.4389888, Val Loss: 7401.8893514 +2025-02-27 07:49:58,198 Epoch 1967/2000 +2025-02-27 07:50:14,409 Current Learning Rate: 0.0093431576 +2025-02-27 07:50:14,410 Train Loss: 10267.8308161, Val Loss: 6418.0810903 +2025-02-27 07:50:14,410 Epoch 1968/2000 +2025-02-27 07:50:30,251 Current Learning Rate: 0.0093815334 +2025-02-27 07:50:30,252 Train Loss: 22018.9134042, Val Loss: 50471.2530414 +2025-02-27 07:50:30,252 Epoch 1969/2000 +2025-02-27 07:50:45,349 Current Learning Rate: 0.0094188282 +2025-02-27 07:50:45,350 Train Loss: 13019.6852379, Val Loss: 3592.2987473 +2025-02-27 07:50:45,350 Epoch 1970/2000 +2025-02-27 07:51:00,498 Current Learning Rate: 0.0094550326 +2025-02-27 07:51:00,498 Train Loss: 5677.3307879, Val Loss: 3215.6717134 +2025-02-27 07:51:00,499 Epoch 1971/2000 +2025-02-27 07:51:16,156 Current Learning Rate: 0.0094901379 +2025-02-27 07:51:16,156 Train Loss: 9067.8655226, Val Loss: 10986.1325274 +2025-02-27 07:51:16,156 Epoch 1972/2000 +2025-02-27 07:51:31,547 Current Learning Rate: 0.0095241353 +2025-02-27 07:51:31,547 Train Loss: 7208.4670562, Val Loss: 3008.8367739 +2025-02-27 07:51:31,548 Epoch 1973/2000 +2025-02-27 07:51:46,902 Current Learning Rate: 0.0095570164 +2025-02-27 07:51:46,902 Train Loss: 2882194878025.5756836, Val Loss: 21150848845211.0937500 +2025-02-27 07:51:46,902 Epoch 1974/2000 +2025-02-27 07:52:02,703 Current Learning Rate: 0.0095887731 +2025-02-27 07:52:02,704 Train Loss: 642976121621.1601562, Val Loss: 1028148346.0827250 +2025-02-27 07:52:02,704 Epoch 1975/2000 +2025-02-27 07:52:17,698 Current Learning Rate: 0.0096193977 +2025-02-27 07:52:17,699 Train Loss: 619271022.1100091, Val Loss: 364192862.6763991 +2025-02-27 07:52:17,699 Epoch 1976/2000 +2025-02-27 07:52:32,359 Current Learning Rate: 0.0096488824 +2025-02-27 07:52:32,360 Train Loss: 333629363.0538023, Val Loss: 283332297.8102190 +2025-02-27 07:52:32,360 Epoch 1977/2000 +2025-02-27 07:52:47,433 Current Learning Rate: 0.0096772202 +2025-02-27 07:52:47,433 Train Loss: 267855568.0112213, Val Loss: 234631736.0583942 +2025-02-27 07:52:47,434 Epoch 1978/2000 +2025-02-27 07:53:02,639 Current Learning Rate: 0.0097044038 +2025-02-27 07:53:02,639 Train Loss: 226385768.6724777, Val Loss: 200591045.4501216 +2025-02-27 07:53:02,639 Epoch 1979/2000 +2025-02-27 07:53:17,551 Current Learning Rate: 0.0097304268 +2025-02-27 07:53:17,551 Train Loss: 195755703.1319507, Val Loss: 174994851.1922141 +2025-02-27 07:53:17,552 Epoch 1980/2000 +2025-02-27 07:53:32,418 Current Learning Rate: 0.0097552826 +2025-02-27 07:53:32,419 Train Loss: 172162390.9748522, Val Loss: 154973187.7372263 +2025-02-27 07:53:32,419 Epoch 1981/2000 +2025-02-27 07:53:47,257 Current Learning Rate: 0.0097789651 +2025-02-27 07:53:47,257 Train Loss: 153632788.0573089, Val Loss: 139033447.3965937 +2025-02-27 07:53:47,258 Epoch 1982/2000 +2025-02-27 07:54:03,620 Current Learning Rate: 0.0098014684 +2025-02-27 07:54:03,620 Train Loss: 138528807.6785893, Val Loss: 126034247.0072993 +2025-02-27 07:54:03,620 Epoch 1983/2000 +2025-02-27 07:54:21,515 Current Learning Rate: 0.0098227871 +2025-02-27 07:54:21,516 Train Loss: 126551064.3662960, Val Loss: 115035899.9513382 +2025-02-27 07:54:21,516 Epoch 1984/2000 +2025-02-27 07:54:38,231 Current Learning Rate: 0.0098429158 +2025-02-27 07:54:38,231 Train Loss: 115638433.5357179, Val Loss: 105924086.0340633 +2025-02-27 07:54:38,231 Epoch 1985/2000 +2025-02-27 07:54:55,521 Current Learning Rate: 0.0098618496 +2025-02-27 07:54:55,522 Train Loss: 106824788.1534916, Val Loss: 98106082.7250608 +2025-02-27 07:54:55,522 Epoch 1986/2000 +2025-02-27 07:55:13,481 Current Learning Rate: 0.0098795838 +2025-02-27 07:55:13,481 Train Loss: 99114084.4404368, Val Loss: 90892909.0024331 +2025-02-27 07:55:13,482 Epoch 1987/2000 +2025-02-27 07:55:30,782 Current Learning Rate: 0.0098961141 +2025-02-27 07:55:30,783 Train Loss: 92380850.0791504, Val Loss: 84821563.7956204 +2025-02-27 07:55:30,783 Epoch 1988/2000 +2025-02-27 07:55:45,913 Current Learning Rate: 0.0099114363 +2025-02-27 07:55:45,914 Train Loss: 86206087.2457670, Val Loss: 79321444.2822384 +2025-02-27 07:55:45,914 Epoch 1989/2000 +2025-02-27 07:56:01,245 Current Learning Rate: 0.0099255466 +2025-02-27 07:56:01,245 Train Loss: 80728005.8030258, Val Loss: 74323290.3163017 +2025-02-27 07:56:01,245 Epoch 1990/2000 +2025-02-27 07:56:16,815 Current Learning Rate: 0.0099384417 +2025-02-27 07:56:16,816 Train Loss: 75657963.8913936, Val Loss: 69738758.5401460 +2025-02-27 07:56:16,816 Epoch 1991/2000 +2025-02-27 07:56:32,104 Current Learning Rate: 0.0099501183 +2025-02-27 07:56:32,105 Train Loss: 70975993.2030859, Val Loss: 65505860.8272506 +2025-02-27 07:56:32,105 Epoch 1992/2000 +2025-02-27 07:56:47,491 Current Learning Rate: 0.0099605735 +2025-02-27 07:56:47,491 Train Loss: 66997084.7971145, Val Loss: 61700085.2554745 +2025-02-27 07:56:47,492 Epoch 1993/2000 +2025-02-27 07:57:03,372 Current Learning Rate: 0.0099698048 +2025-02-27 07:57:03,373 Train Loss: 63119567.3700030, Val Loss: 58361288.5644769 +2025-02-27 07:57:03,373 Epoch 1994/2000 +2025-02-27 07:57:19,438 Current Learning Rate: 0.0099778098 +2025-02-27 07:57:19,439 Train Loss: 59631208.6083559, Val Loss: 55077865.8880779 +2025-02-27 07:57:19,439 Epoch 1995/2000 +2025-02-27 07:57:34,803 Current Learning Rate: 0.0099845867 +2025-02-27 07:57:34,804 Train Loss: 56427434.8462078, Val Loss: 52289594.5498783 +2025-02-27 07:57:34,804 Epoch 1996/2000 +2025-02-27 07:57:50,064 Current Learning Rate: 0.0099901336 +2025-02-27 07:57:50,065 Train Loss: 53396027.5563571, Val Loss: 49509699.2700730 +2025-02-27 07:57:50,065 Epoch 1997/2000 +2025-02-27 07:58:04,734 Current Learning Rate: 0.0099944494 +2025-02-27 07:58:04,734 Train Loss: 50700280.2284340, Val Loss: 46878994.0632603 +2025-02-27 07:58:04,735 Epoch 1998/2000 +2025-02-27 07:58:20,237 Current Learning Rate: 0.0099975328 +2025-02-27 07:58:20,238 Train Loss: 48150552.4175934, Val Loss: 44520329.3430657 +2025-02-27 07:58:20,238 Epoch 1999/2000 +2025-02-27 07:58:36,739 Current Learning Rate: 0.0099993832 +2025-02-27 07:58:36,740 Train Loss: 45733286.4602745, Val Loss: 42329714.4525547 +2025-02-27 07:58:36,740 Epoch 2000/2000 +2025-02-27 07:58:53,299 Current Learning Rate: 0.0100000000 +2025-02-27 07:58:53,299 Train Loss: 43393155.0906723, Val Loss: 40270734.7931874 +2025-02-27 07:58:58,901 Testing completed and best model saved. +2025-02-27 16:29:56,917 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 16:30:36,719 Loading best model from checkpoint. +2025-02-27 16:30:37,168 Epoch 1/2000 +2025-02-27 16:30:39,281 Reducer buckets have been rebuilt in this iteration. +2025-02-27 16:31:43,386 Current Learning Rate: 0.0099993832 +2025-02-27 16:31:43,611 Train Loss: 132.8605200, Val Loss: 0.2994722 +2025-02-27 16:31:43,611 Epoch 2/2000 +2025-02-27 16:32:04,811 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 16:32:49,864 Loading best model from checkpoint. +2025-02-27 16:33:20,717 Testing completed and best model saved. +2025-02-27 16:35:32,896 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-27 16:36:20,453 Loading best model from checkpoint. +2025-02-27 16:36:55,140 Testing completed and best model saved. +2025-03-01 22:30:12,698 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-01 22:30:55,026 Loading best model from checkpoint. +2025-03-01 22:31:28,894 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Kuro_Unet_exp_128_20250324_training_log.log b/Exp3_Kuroshio_forecasting/logs/Kuro_Unet_exp_128_20250324_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..7e073811b2a513aef0000202810ea7facebbcdca --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Kuro_Unet_exp_128_20250324_training_log.log @@ -0,0 +1,6018 @@ +2025-03-24 16:09:53,677 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-24 16:09:53,713 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-24 16:09:53,799 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-24 16:09:53,817 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-24 16:09:53,825 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-24 16:09:53,843 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-24 16:09:53,848 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-24 16:09:53,850 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-24 16:11:41,812 Epoch 1/2000 +2025-03-24 16:13:40,953 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-03-24 16:13:41,107 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-03-24 16:13:41,442 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-03-24 16:13:41,470 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-03-24 16:13:41,567 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-03-24 16:13:41,572 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-03-24 16:13:41,579 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-03-24 16:13:41,583 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-03-24 16:15:00,073 Epoch 1/2000 +2025-03-24 16:17:11,997 Current Learning Rate: 0.0009999383 +2025-03-24 16:17:12,062 Train Loss: 0.0151061, Val Loss: 0.0102887 +2025-03-24 16:17:12,063 Epoch 2/2000 +2025-03-24 16:19:23,494 Current Learning Rate: 0.0009997533 +2025-03-24 16:19:23,564 Train Loss: 0.0088672, Val Loss: 0.0072783 +2025-03-24 16:19:23,564 Epoch 3/2000 +2025-03-24 16:20:54,894 Current Learning Rate: 0.0009994449 +2025-03-24 16:20:54,959 Train Loss: 0.0058641, Val Loss: 0.0046539 +2025-03-24 16:20:54,959 Epoch 4/2000 +2025-03-24 16:22:00,178 Current Learning Rate: 0.0009990134 +2025-03-24 16:22:00,246 Train Loss: 0.0036977, Val Loss: 0.0031449 +2025-03-24 16:22:00,246 Epoch 5/2000 +2025-03-24 16:23:37,171 Current Learning Rate: 0.0009984587 +2025-03-24 16:23:37,239 Train Loss: 0.0026175, Val Loss: 0.0024990 +2025-03-24 16:23:37,239 Epoch 6/2000 +2025-03-24 16:25:49,008 Current Learning Rate: 0.0009977810 +2025-03-24 16:25:49,086 Train Loss: 0.0021008, Val Loss: 0.0020876 +2025-03-24 16:25:49,086 Epoch 7/2000 +2025-03-24 16:28:02,335 Current Learning Rate: 0.0009969805 +2025-03-24 16:28:02,413 Train Loss: 0.0017872, Val Loss: 0.0017491 +2025-03-24 16:28:02,414 Epoch 8/2000 +2025-03-24 16:30:13,985 Current Learning Rate: 0.0009960574 +2025-03-24 16:30:14,073 Train Loss: 0.0015009, Val Loss: 0.0015769 +2025-03-24 16:30:14,073 Epoch 9/2000 +2025-03-24 16:32:25,297 Current Learning Rate: 0.0009950118 +2025-03-24 16:32:25,367 Train Loss: 0.0014158, Val Loss: 0.0014813 +2025-03-24 16:32:25,367 Epoch 10/2000 +2025-03-24 16:34:38,126 Current Learning Rate: 0.0009938442 +2025-03-24 16:34:38,194 Train Loss: 0.0013255, Val Loss: 0.0013811 +2025-03-24 16:34:38,194 Epoch 11/2000 +2025-03-24 16:36:51,719 Current Learning Rate: 0.0009925547 +2025-03-24 16:36:51,788 Train Loss: 0.0012440, Val Loss: 0.0012779 +2025-03-24 16:36:51,789 Epoch 12/2000 +2025-03-24 16:39:05,224 Current Learning Rate: 0.0009911436 +2025-03-24 16:39:05,291 Train Loss: 0.0011321, Val Loss: 0.0011539 +2025-03-24 16:39:05,291 Epoch 13/2000 +2025-03-24 16:41:17,116 Current Learning Rate: 0.0009896114 +2025-03-24 16:41:17,117 Train Loss: 0.0010356, Val Loss: 0.0013328 +2025-03-24 16:41:17,117 Epoch 14/2000 +2025-03-24 16:43:29,897 Current Learning Rate: 0.0009879584 +2025-03-24 16:43:29,897 Train Loss: 0.0010327, Val Loss: 0.0012103 +2025-03-24 16:43:29,897 Epoch 15/2000 +2025-03-24 16:45:41,799 Current Learning Rate: 0.0009861850 +2025-03-24 16:45:41,799 Train Loss: 0.0009792, Val Loss: 0.0011615 +2025-03-24 16:45:41,800 Epoch 16/2000 +2025-03-24 16:47:54,723 Current Learning Rate: 0.0009842916 +2025-03-24 16:47:54,789 Train Loss: 0.0009533, Val Loss: 0.0011402 +2025-03-24 16:47:54,790 Epoch 17/2000 +2025-03-24 16:50:06,990 Current Learning Rate: 0.0009822787 +2025-03-24 16:50:07,057 Train Loss: 0.0009388, Val Loss: 0.0010381 +2025-03-24 16:50:07,057 Epoch 18/2000 +2025-03-24 16:52:18,892 Current Learning Rate: 0.0009801468 +2025-03-24 16:52:18,961 Train Loss: 0.0008870, Val Loss: 0.0009986 +2025-03-24 16:52:18,961 Epoch 19/2000 +2025-03-24 16:54:30,862 Current Learning Rate: 0.0009778965 +2025-03-24 16:54:31,062 Train Loss: 0.0008419, Val Loss: 0.0009363 +2025-03-24 16:54:31,062 Epoch 20/2000 +2025-03-24 16:56:44,174 Current Learning Rate: 0.0009755283 +2025-03-24 16:56:44,259 Train Loss: 0.0007718, Val Loss: 0.0008380 +2025-03-24 16:56:44,259 Epoch 21/2000 +2025-03-24 16:58:56,332 Current Learning Rate: 0.0009730427 +2025-03-24 16:58:56,332 Train Loss: 0.0007552, Val Loss: 0.0008679 +2025-03-24 16:58:56,332 Epoch 22/2000 +2025-03-24 17:01:08,310 Current Learning Rate: 0.0009704404 +2025-03-24 17:01:08,311 Train Loss: 0.0007651, Val Loss: 0.0008595 +2025-03-24 17:01:08,311 Epoch 23/2000 +2025-03-24 17:03:20,702 Current Learning Rate: 0.0009677220 +2025-03-24 17:03:20,776 Train Loss: 0.0007422, Val Loss: 0.0008344 +2025-03-24 17:03:20,776 Epoch 24/2000 +2025-03-24 17:05:33,547 Current Learning Rate: 0.0009648882 +2025-03-24 17:05:33,621 Train Loss: 0.0007200, Val Loss: 0.0008075 +2025-03-24 17:05:33,621 Epoch 25/2000 +2025-03-24 17:07:46,051 Current Learning Rate: 0.0009619398 +2025-03-24 17:07:46,052 Train Loss: 0.0007135, Val Loss: 0.0008246 +2025-03-24 17:07:46,052 Epoch 26/2000 +2025-03-24 17:09:59,374 Current Learning Rate: 0.0009588773 +2025-03-24 17:09:59,445 Train Loss: 0.0007010, Val Loss: 0.0007910 +2025-03-24 17:09:59,446 Epoch 27/2000 +2025-03-24 17:12:11,105 Current Learning Rate: 0.0009557016 +2025-03-24 17:12:11,106 Train Loss: 0.0006969, Val Loss: 0.0008076 +2025-03-24 17:12:11,106 Epoch 28/2000 +2025-03-24 17:14:23,379 Current Learning Rate: 0.0009524135 +2025-03-24 17:14:23,447 Train Loss: 0.0006854, Val Loss: 0.0007883 +2025-03-24 17:14:23,447 Epoch 29/2000 +2025-03-24 17:16:36,853 Current Learning Rate: 0.0009490138 +2025-03-24 17:16:36,937 Train Loss: 0.0006577, Val Loss: 0.0007751 +2025-03-24 17:16:36,937 Epoch 30/2000 +2025-03-24 17:18:49,165 Current Learning Rate: 0.0009455033 +2025-03-24 17:18:49,242 Train Loss: 0.0006114, Val Loss: 0.0006764 +2025-03-24 17:18:49,243 Epoch 31/2000 +2025-03-24 17:21:01,693 Current Learning Rate: 0.0009418828 +2025-03-24 17:21:01,777 Train Loss: 0.0005793, Val Loss: 0.0006636 +2025-03-24 17:21:01,778 Epoch 32/2000 +2025-03-24 17:23:14,856 Current Learning Rate: 0.0009381533 +2025-03-24 17:23:14,929 Train Loss: 0.0005899, Val Loss: 0.0006603 +2025-03-24 17:23:14,932 Epoch 33/2000 +2025-03-24 17:25:28,225 Current Learning Rate: 0.0009343158 +2025-03-24 17:25:28,225 Train Loss: 0.0005996, Val Loss: 0.0006845 +2025-03-24 17:25:28,226 Epoch 34/2000 +2025-03-24 17:27:42,465 Current Learning Rate: 0.0009303710 +2025-03-24 17:27:42,465 Train Loss: 0.0005827, Val Loss: 0.0006643 +2025-03-24 17:27:42,466 Epoch 35/2000 +2025-03-24 17:29:56,749 Current Learning Rate: 0.0009263201 +2025-03-24 17:29:56,830 Train Loss: 0.0005751, Val Loss: 0.0006332 +2025-03-24 17:29:56,831 Epoch 36/2000 +2025-03-24 17:32:09,552 Current Learning Rate: 0.0009221640 +2025-03-24 17:32:09,553 Train Loss: 0.0005696, Val Loss: 0.0006387 +2025-03-24 17:32:09,553 Epoch 37/2000 +2025-03-24 17:34:22,151 Current Learning Rate: 0.0009179037 +2025-03-24 17:34:22,152 Train Loss: 0.0005649, Val Loss: 0.0006582 +2025-03-24 17:34:22,152 Epoch 38/2000 +2025-03-24 17:36:35,594 Current Learning Rate: 0.0009135403 +2025-03-24 17:36:35,594 Train Loss: 0.0005694, Val Loss: 0.0006487 +2025-03-24 17:36:35,594 Epoch 39/2000 +2025-03-24 17:38:29,109 Current Learning Rate: 0.0009090749 +2025-03-24 17:38:29,109 Train Loss: 0.0005645, Val Loss: 0.0006615 +2025-03-24 17:38:29,110 Epoch 40/2000 +2025-03-24 17:39:35,347 Current Learning Rate: 0.0009045085 +2025-03-24 17:39:35,348 Train Loss: 0.0005502, Val Loss: 0.0006620 +2025-03-24 17:39:35,349 Epoch 41/2000 +2025-03-24 17:40:40,920 Current Learning Rate: 0.0008998423 +2025-03-24 17:40:41,000 Train Loss: 0.0005269, Val Loss: 0.0006274 +2025-03-24 17:40:41,001 Epoch 42/2000 +2025-03-24 17:41:46,319 Current Learning Rate: 0.0008950775 +2025-03-24 17:41:46,403 Train Loss: 0.0004969, Val Loss: 0.0005625 +2025-03-24 17:41:46,403 Epoch 43/2000 +2025-03-24 17:42:51,923 Current Learning Rate: 0.0008902152 +2025-03-24 17:42:51,923 Train Loss: 0.0004922, Val Loss: 0.0005710 +2025-03-24 17:42:51,924 Epoch 44/2000 +2025-03-24 17:43:57,183 Current Learning Rate: 0.0008852566 +2025-03-24 17:43:57,183 Train Loss: 0.0004979, Val Loss: 0.0005817 +2025-03-24 17:43:57,183 Epoch 45/2000 +2025-03-24 17:45:02,448 Current Learning Rate: 0.0008802030 +2025-03-24 17:45:02,449 Train Loss: 0.0005011, Val Loss: 0.0006135 +2025-03-24 17:45:02,449 Epoch 46/2000 +2025-03-24 17:46:07,740 Current Learning Rate: 0.0008750555 +2025-03-24 17:46:07,741 Train Loss: 0.0004991, Val Loss: 0.0005857 +2025-03-24 17:46:07,741 Epoch 47/2000 +2025-03-24 17:47:13,379 Current Learning Rate: 0.0008698155 +2025-03-24 17:47:13,466 Train Loss: 0.0004952, Val Loss: 0.0005555 +2025-03-24 17:47:13,466 Epoch 48/2000 +2025-03-24 17:48:18,732 Current Learning Rate: 0.0008644843 +2025-03-24 17:48:18,732 Train Loss: 0.0004956, Val Loss: 0.0005706 +2025-03-24 17:48:18,733 Epoch 49/2000 +2025-03-24 17:49:24,548 Current Learning Rate: 0.0008590631 +2025-03-24 17:49:24,614 Train Loss: 0.0004921, Val Loss: 0.0005506 +2025-03-24 17:49:24,615 Epoch 50/2000 +2025-03-24 17:50:29,881 Current Learning Rate: 0.0008535534 +2025-03-24 17:50:29,882 Train Loss: 0.0004903, Val Loss: 0.0005565 +2025-03-24 17:50:29,882 Epoch 51/2000 +2025-03-24 17:51:35,700 Current Learning Rate: 0.0008479564 +2025-03-24 17:51:35,777 Train Loss: 0.0004771, Val Loss: 0.0005501 +2025-03-24 17:51:35,778 Epoch 52/2000 +2025-03-24 17:52:40,866 Current Learning Rate: 0.0008422736 +2025-03-24 17:52:40,934 Train Loss: 0.0004647, Val Loss: 0.0005274 +2025-03-24 17:52:40,934 Epoch 53/2000 +2025-03-24 17:53:46,443 Current Learning Rate: 0.0008365063 +2025-03-24 17:53:46,510 Train Loss: 0.0004458, Val Loss: 0.0005018 +2025-03-24 17:53:46,510 Epoch 54/2000 +2025-03-24 17:54:51,460 Current Learning Rate: 0.0008306559 +2025-03-24 17:54:51,460 Train Loss: 0.0004375, Val Loss: 0.0005095 +2025-03-24 17:54:51,460 Epoch 55/2000 +2025-03-24 17:55:57,352 Current Learning Rate: 0.0008247240 +2025-03-24 17:55:57,352 Train Loss: 0.0004439, Val Loss: 0.0005308 +2025-03-24 17:55:57,353 Epoch 56/2000 +2025-03-24 17:57:02,667 Current Learning Rate: 0.0008187120 +2025-03-24 17:57:02,667 Train Loss: 0.0004442, Val Loss: 0.0005308 +2025-03-24 17:57:02,668 Epoch 57/2000 +2025-03-24 17:58:08,101 Current Learning Rate: 0.0008126213 +2025-03-24 17:58:08,101 Train Loss: 0.0004478, Val Loss: 0.0005233 +2025-03-24 17:58:08,102 Epoch 58/2000 +2025-03-24 17:59:13,603 Current Learning Rate: 0.0008064535 +2025-03-24 17:59:13,603 Train Loss: 0.0004437, Val Loss: 0.0005028 +2025-03-24 17:59:13,604 Epoch 59/2000 +2025-03-24 18:00:19,119 Current Learning Rate: 0.0008002101 +2025-03-24 18:00:19,119 Train Loss: 0.0004454, Val Loss: 0.0005129 +2025-03-24 18:00:19,120 Epoch 60/2000 +2025-03-24 18:01:24,586 Current Learning Rate: 0.0007938926 +2025-03-24 18:01:24,666 Train Loss: 0.0004447, Val Loss: 0.0005004 +2025-03-24 18:01:24,666 Epoch 61/2000 +2025-03-24 18:02:29,933 Current Learning Rate: 0.0007875026 +2025-03-24 18:02:29,934 Train Loss: 0.0004428, Val Loss: 0.0005077 +2025-03-24 18:02:29,934 Epoch 62/2000 +2025-03-24 18:03:35,078 Current Learning Rate: 0.0007810417 +2025-03-24 18:03:35,157 Train Loss: 0.0004385, Val Loss: 0.0004880 +2025-03-24 18:03:35,157 Epoch 63/2000 +2025-03-24 18:04:40,081 Current Learning Rate: 0.0007745114 +2025-03-24 18:04:40,151 Train Loss: 0.0004333, Val Loss: 0.0004720 +2025-03-24 18:04:40,152 Epoch 64/2000 +2025-03-24 18:05:45,346 Current Learning Rate: 0.0007679134 +2025-03-24 18:05:45,414 Train Loss: 0.0004230, Val Loss: 0.0004605 +2025-03-24 18:05:45,415 Epoch 65/2000 +2025-03-24 18:06:50,775 Current Learning Rate: 0.0007612493 +2025-03-24 18:06:50,841 Train Loss: 0.0004074, Val Loss: 0.0004580 +2025-03-24 18:06:50,842 Epoch 66/2000 +2025-03-24 18:07:56,300 Current Learning Rate: 0.0007545207 +2025-03-24 18:07:56,300 Train Loss: 0.0003988, Val Loss: 0.0004583 +2025-03-24 18:07:56,301 Epoch 67/2000 +2025-03-24 18:09:01,589 Current Learning Rate: 0.0007477293 +2025-03-24 18:09:01,589 Train Loss: 0.0003994, Val Loss: 0.0004707 +2025-03-24 18:09:01,590 Epoch 68/2000 +2025-03-24 18:10:07,044 Current Learning Rate: 0.0007408768 +2025-03-24 18:10:07,044 Train Loss: 0.0004005, Val Loss: 0.0004778 +2025-03-24 18:10:07,044 Epoch 69/2000 +2025-03-24 18:11:12,523 Current Learning Rate: 0.0007339649 +2025-03-24 18:11:12,523 Train Loss: 0.0003999, Val Loss: 0.0004749 +2025-03-24 18:11:12,524 Epoch 70/2000 +2025-03-24 18:12:18,024 Current Learning Rate: 0.0007269952 +2025-03-24 18:12:18,024 Train Loss: 0.0004021, Val Loss: 0.0004668 +2025-03-24 18:12:18,025 Epoch 71/2000 +2025-03-24 18:13:23,353 Current Learning Rate: 0.0007199696 +2025-03-24 18:13:23,424 Train Loss: 0.0004035, Val Loss: 0.0004550 +2025-03-24 18:13:23,424 Epoch 72/2000 +2025-03-24 18:14:28,840 Current Learning Rate: 0.0007128896 +2025-03-24 18:14:28,903 Train Loss: 0.0004050, Val Loss: 0.0004521 +2025-03-24 18:14:28,904 Epoch 73/2000 +2025-03-24 18:15:34,128 Current Learning Rate: 0.0007057572 +2025-03-24 18:15:34,202 Train Loss: 0.0004040, Val Loss: 0.0004491 +2025-03-24 18:15:34,202 Epoch 74/2000 +2025-03-24 18:16:39,418 Current Learning Rate: 0.0006985739 +2025-03-24 18:16:39,419 Train Loss: 0.0003999, Val Loss: 0.0004520 +2025-03-24 18:16:39,420 Epoch 75/2000 +2025-03-24 18:17:45,178 Current Learning Rate: 0.0006913417 +2025-03-24 18:17:45,255 Train Loss: 0.0003994, Val Loss: 0.0004450 +2025-03-24 18:17:45,255 Epoch 76/2000 +2025-03-24 18:18:50,724 Current Learning Rate: 0.0006840623 +2025-03-24 18:18:50,806 Train Loss: 0.0003941, Val Loss: 0.0004401 +2025-03-24 18:18:50,807 Epoch 77/2000 +2025-03-24 18:19:55,462 Current Learning Rate: 0.0006767374 +2025-03-24 18:19:55,532 Train Loss: 0.0003854, Val Loss: 0.0004245 +2025-03-24 18:19:55,532 Epoch 78/2000 +2025-03-24 18:21:01,354 Current Learning Rate: 0.0006693690 +2025-03-24 18:21:01,442 Train Loss: 0.0003755, Val Loss: 0.0004189 +2025-03-24 18:21:01,442 Epoch 79/2000 +2025-03-24 18:22:06,722 Current Learning Rate: 0.0006619587 +2025-03-24 18:22:06,790 Train Loss: 0.0003699, Val Loss: 0.0004164 +2025-03-24 18:22:06,791 Epoch 80/2000 +2025-03-24 18:23:11,960 Current Learning Rate: 0.0006545085 +2025-03-24 18:23:11,960 Train Loss: 0.0003674, Val Loss: 0.0004211 +2025-03-24 18:23:11,961 Epoch 81/2000 +2025-03-24 18:24:17,407 Current Learning Rate: 0.0006470202 +2025-03-24 18:24:17,408 Train Loss: 0.0003670, Val Loss: 0.0004290 +2025-03-24 18:24:17,408 Epoch 82/2000 +2025-03-24 18:25:22,813 Current Learning Rate: 0.0006394956 +2025-03-24 18:25:22,813 Train Loss: 0.0003666, Val Loss: 0.0004294 +2025-03-24 18:25:22,813 Epoch 83/2000 +2025-03-24 18:26:27,846 Current Learning Rate: 0.0006319365 +2025-03-24 18:26:27,847 Train Loss: 0.0003662, Val Loss: 0.0004255 +2025-03-24 18:26:27,847 Epoch 84/2000 +2025-03-24 18:27:32,901 Current Learning Rate: 0.0006243449 +2025-03-24 18:27:32,966 Train Loss: 0.0003689, Val Loss: 0.0004161 +2025-03-24 18:27:32,967 Epoch 85/2000 +2025-03-24 18:28:38,329 Current Learning Rate: 0.0006167227 +2025-03-24 18:28:38,330 Train Loss: 0.0003716, Val Loss: 0.0004169 +2025-03-24 18:28:38,330 Epoch 86/2000 +2025-03-24 18:29:44,586 Current Learning Rate: 0.0006090716 +2025-03-24 18:29:44,650 Train Loss: 0.0003733, Val Loss: 0.0004160 +2025-03-24 18:29:44,650 Epoch 87/2000 +2025-03-24 18:30:49,629 Current Learning Rate: 0.0006013936 +2025-03-24 18:30:49,697 Train Loss: 0.0003707, Val Loss: 0.0004157 +2025-03-24 18:30:49,698 Epoch 88/2000 +2025-03-24 18:31:55,041 Current Learning Rate: 0.0005936907 +2025-03-24 18:31:55,106 Train Loss: 0.0003693, Val Loss: 0.0004143 +2025-03-24 18:31:55,107 Epoch 89/2000 +2025-03-24 18:33:00,361 Current Learning Rate: 0.0005859646 +2025-03-24 18:33:00,443 Train Loss: 0.0003646, Val Loss: 0.0004057 +2025-03-24 18:33:00,444 Epoch 90/2000 +2025-03-24 18:34:05,986 Current Learning Rate: 0.0005782172 +2025-03-24 18:34:06,054 Train Loss: 0.0003597, Val Loss: 0.0003990 +2025-03-24 18:34:06,055 Epoch 91/2000 +2025-03-24 18:35:11,507 Current Learning Rate: 0.0005704506 +2025-03-24 18:35:11,578 Train Loss: 0.0003539, Val Loss: 0.0003915 +2025-03-24 18:35:11,579 Epoch 92/2000 +2025-03-24 18:36:16,860 Current Learning Rate: 0.0005626666 +2025-03-24 18:36:16,923 Train Loss: 0.0003490, Val Loss: 0.0003886 +2025-03-24 18:36:16,923 Epoch 93/2000 +2025-03-24 18:37:23,181 Current Learning Rate: 0.0005548672 +2025-03-24 18:37:23,266 Train Loss: 0.0003466, Val Loss: 0.0003881 +2025-03-24 18:37:23,266 Epoch 94/2000 +2025-03-24 18:38:28,719 Current Learning Rate: 0.0005470542 +2025-03-24 18:38:28,804 Train Loss: 0.0003449, Val Loss: 0.0003871 +2025-03-24 18:38:28,804 Epoch 95/2000 +2025-03-24 18:39:34,126 Current Learning Rate: 0.0005392295 +2025-03-24 18:39:34,201 Train Loss: 0.0003436, Val Loss: 0.0003866 +2025-03-24 18:39:34,202 Epoch 96/2000 +2025-03-24 18:40:39,307 Current Learning Rate: 0.0005313953 +2025-03-24 18:40:39,377 Train Loss: 0.0003428, Val Loss: 0.0003866 +2025-03-24 18:40:39,378 Epoch 97/2000 +2025-03-24 18:41:44,432 Current Learning Rate: 0.0005235532 +2025-03-24 18:41:44,433 Train Loss: 0.0003436, Val Loss: 0.0003878 +2025-03-24 18:41:44,433 Epoch 98/2000 +2025-03-24 18:42:49,609 Current Learning Rate: 0.0005157054 +2025-03-24 18:42:49,610 Train Loss: 0.0003466, Val Loss: 0.0003913 +2025-03-24 18:42:49,610 Epoch 99/2000 +2025-03-24 18:43:55,235 Current Learning Rate: 0.0005078537 +2025-03-24 18:43:55,236 Train Loss: 0.0003486, Val Loss: 0.0003909 +2025-03-24 18:43:55,236 Epoch 100/2000 +2025-03-24 18:45:00,929 Current Learning Rate: 0.0005000000 +2025-03-24 18:45:01,018 Train Loss: 0.0003482, Val Loss: 0.0003838 +2025-03-24 18:45:01,018 Epoch 101/2000 +2025-03-24 18:46:06,114 Current Learning Rate: 0.0004921463 +2025-03-24 18:46:06,190 Train Loss: 0.0003437, Val Loss: 0.0003789 +2025-03-24 18:46:06,190 Epoch 102/2000 +2025-03-24 18:47:11,030 Current Learning Rate: 0.0004842946 +2025-03-24 18:47:11,106 Train Loss: 0.0003388, Val Loss: 0.0003750 +2025-03-24 18:47:11,106 Epoch 103/2000 +2025-03-24 18:48:16,163 Current Learning Rate: 0.0004764468 +2025-03-24 18:48:16,249 Train Loss: 0.0003345, Val Loss: 0.0003712 +2025-03-24 18:48:16,249 Epoch 104/2000 +2025-03-24 18:49:21,220 Current Learning Rate: 0.0004686047 +2025-03-24 18:49:21,294 Train Loss: 0.0003313, Val Loss: 0.0003687 +2025-03-24 18:49:21,295 Epoch 105/2000 +2025-03-24 18:50:26,729 Current Learning Rate: 0.0004607705 +2025-03-24 18:50:26,816 Train Loss: 0.0003296, Val Loss: 0.0003676 +2025-03-24 18:50:26,816 Epoch 106/2000 +2025-03-24 18:51:32,008 Current Learning Rate: 0.0004529458 +2025-03-24 18:51:32,081 Train Loss: 0.0003287, Val Loss: 0.0003674 +2025-03-24 18:51:32,081 Epoch 107/2000 +2025-03-24 18:52:36,962 Current Learning Rate: 0.0004451328 +2025-03-24 18:52:37,038 Train Loss: 0.0003280, Val Loss: 0.0003674 +2025-03-24 18:52:37,039 Epoch 108/2000 +2025-03-24 18:53:42,608 Current Learning Rate: 0.0004373334 +2025-03-24 18:53:42,608 Train Loss: 0.0003283, Val Loss: 0.0003683 +2025-03-24 18:53:42,609 Epoch 109/2000 +2025-03-24 18:54:48,021 Current Learning Rate: 0.0004295494 +2025-03-24 18:54:48,022 Train Loss: 0.0003308, Val Loss: 0.0003689 +2025-03-24 18:54:48,022 Epoch 110/2000 +2025-03-24 18:55:53,676 Current Learning Rate: 0.0004217828 +2025-03-24 18:55:53,741 Train Loss: 0.0003312, Val Loss: 0.0003630 +2025-03-24 18:55:53,741 Epoch 111/2000 +2025-03-24 18:56:59,086 Current Learning Rate: 0.0004140354 +2025-03-24 18:56:59,150 Train Loss: 0.0003282, Val Loss: 0.0003583 +2025-03-24 18:56:59,151 Epoch 112/2000 +2025-03-24 18:58:04,695 Current Learning Rate: 0.0004063093 +2025-03-24 18:58:04,762 Train Loss: 0.0003246, Val Loss: 0.0003564 +2025-03-24 18:58:04,762 Epoch 113/2000 +2025-03-24 18:59:10,152 Current Learning Rate: 0.0003986064 +2025-03-24 18:59:10,234 Train Loss: 0.0003217, Val Loss: 0.0003550 +2025-03-24 18:59:10,234 Epoch 114/2000 +2025-03-24 19:00:15,965 Current Learning Rate: 0.0003909284 +2025-03-24 19:00:16,036 Train Loss: 0.0003192, Val Loss: 0.0003541 +2025-03-24 19:00:16,037 Epoch 115/2000 +2025-03-24 19:01:21,086 Current Learning Rate: 0.0003832773 +2025-03-24 19:01:21,166 Train Loss: 0.0003173, Val Loss: 0.0003526 +2025-03-24 19:01:21,166 Epoch 116/2000 +2025-03-24 19:02:26,582 Current Learning Rate: 0.0003756551 +2025-03-24 19:02:26,661 Train Loss: 0.0003160, Val Loss: 0.0003512 +2025-03-24 19:02:26,661 Epoch 117/2000 +2025-03-24 19:03:32,015 Current Learning Rate: 0.0003680635 +2025-03-24 19:03:32,095 Train Loss: 0.0003151, Val Loss: 0.0003502 +2025-03-24 19:03:32,096 Epoch 118/2000 +2025-03-24 19:04:37,439 Current Learning Rate: 0.0003605044 +2025-03-24 19:04:37,440 Train Loss: 0.0003147, Val Loss: 0.0003503 +2025-03-24 19:04:37,440 Epoch 119/2000 +2025-03-24 19:05:42,723 Current Learning Rate: 0.0003529798 +2025-03-24 19:05:42,724 Train Loss: 0.0003150, Val Loss: 0.0003507 +2025-03-24 19:05:42,724 Epoch 120/2000 +2025-03-24 19:06:47,293 Current Learning Rate: 0.0003454915 +2025-03-24 19:06:47,356 Train Loss: 0.0003155, Val Loss: 0.0003461 +2025-03-24 19:06:47,356 Epoch 121/2000 +2025-03-24 19:07:52,399 Current Learning Rate: 0.0003380413 +2025-03-24 19:07:52,463 Train Loss: 0.0003151, Val Loss: 0.0003443 +2025-03-24 19:07:52,464 Epoch 122/2000 +2025-03-24 19:08:57,149 Current Learning Rate: 0.0003306310 +2025-03-24 19:08:57,216 Train Loss: 0.0003126, Val Loss: 0.0003419 +2025-03-24 19:08:57,216 Epoch 123/2000 +2025-03-24 19:10:02,108 Current Learning Rate: 0.0003232626 +2025-03-24 19:10:02,177 Train Loss: 0.0003102, Val Loss: 0.0003395 +2025-03-24 19:10:02,177 Epoch 124/2000 +2025-03-24 19:11:07,502 Current Learning Rate: 0.0003159377 +2025-03-24 19:11:07,568 Train Loss: 0.0003084, Val Loss: 0.0003380 +2025-03-24 19:11:07,568 Epoch 125/2000 +2025-03-24 19:12:12,827 Current Learning Rate: 0.0003086583 +2025-03-24 19:12:12,888 Train Loss: 0.0003070, Val Loss: 0.0003370 +2025-03-24 19:12:12,889 Epoch 126/2000 +2025-03-24 19:13:18,508 Current Learning Rate: 0.0003014261 +2025-03-24 19:13:18,581 Train Loss: 0.0003058, Val Loss: 0.0003360 +2025-03-24 19:13:18,581 Epoch 127/2000 +2025-03-24 19:14:23,585 Current Learning Rate: 0.0002942428 +2025-03-24 19:14:23,658 Train Loss: 0.0003050, Val Loss: 0.0003352 +2025-03-24 19:14:23,658 Epoch 128/2000 +2025-03-24 19:15:28,896 Current Learning Rate: 0.0002871104 +2025-03-24 19:15:28,978 Train Loss: 0.0003044, Val Loss: 0.0003343 +2025-03-24 19:15:28,978 Epoch 129/2000 +2025-03-24 19:16:33,939 Current Learning Rate: 0.0002800304 +2025-03-24 19:16:34,025 Train Loss: 0.0003040, Val Loss: 0.0003333 +2025-03-24 19:16:34,025 Epoch 130/2000 +2025-03-24 19:17:39,272 Current Learning Rate: 0.0002730048 +2025-03-24 19:17:39,349 Train Loss: 0.0003036, Val Loss: 0.0003313 +2025-03-24 19:17:39,349 Epoch 131/2000 +2025-03-24 19:18:44,693 Current Learning Rate: 0.0002660351 +2025-03-24 19:18:44,766 Train Loss: 0.0003028, Val Loss: 0.0003297 +2025-03-24 19:18:44,766 Epoch 132/2000 +2025-03-24 19:19:50,080 Current Learning Rate: 0.0002591232 +2025-03-24 19:19:50,155 Train Loss: 0.0003013, Val Loss: 0.0003282 +2025-03-24 19:19:50,156 Epoch 133/2000 +2025-03-24 19:20:55,139 Current Learning Rate: 0.0002522707 +2025-03-24 19:20:55,219 Train Loss: 0.0002997, Val Loss: 0.0003270 +2025-03-24 19:20:55,219 Epoch 134/2000 +2025-03-24 19:22:00,294 Current Learning Rate: 0.0002454793 +2025-03-24 19:22:00,372 Train Loss: 0.0002984, Val Loss: 0.0003260 +2025-03-24 19:22:00,372 Epoch 135/2000 +2025-03-24 19:23:05,442 Current Learning Rate: 0.0002387507 +2025-03-24 19:23:05,517 Train Loss: 0.0002973, Val Loss: 0.0003252 +2025-03-24 19:23:05,517 Epoch 136/2000 +2025-03-24 19:24:10,823 Current Learning Rate: 0.0002320866 +2025-03-24 19:24:10,903 Train Loss: 0.0002964, Val Loss: 0.0003245 +2025-03-24 19:24:10,903 Epoch 137/2000 +2025-03-24 19:25:16,212 Current Learning Rate: 0.0002254886 +2025-03-24 19:25:16,286 Train Loss: 0.0002957, Val Loss: 0.0003239 +2025-03-24 19:25:16,286 Epoch 138/2000 +2025-03-24 19:26:21,501 Current Learning Rate: 0.0002189583 +2025-03-24 19:26:21,584 Train Loss: 0.0002951, Val Loss: 0.0003234 +2025-03-24 19:26:21,584 Epoch 139/2000 +2025-03-24 19:27:26,565 Current Learning Rate: 0.0002124974 +2025-03-24 19:27:26,652 Train Loss: 0.0002945, Val Loss: 0.0003233 +2025-03-24 19:27:26,652 Epoch 140/2000 +2025-03-24 19:28:31,692 Current Learning Rate: 0.0002061074 +2025-03-24 19:28:31,692 Train Loss: 0.0002939, Val Loss: 0.0003234 +2025-03-24 19:28:31,693 Epoch 141/2000 +2025-03-24 19:29:36,922 Current Learning Rate: 0.0001997899 +2025-03-24 19:29:36,997 Train Loss: 0.0002929, Val Loss: 0.0003227 +2025-03-24 19:29:36,997 Epoch 142/2000 +2025-03-24 19:30:42,353 Current Learning Rate: 0.0001935465 +2025-03-24 19:30:42,426 Train Loss: 0.0002918, Val Loss: 0.0003220 +2025-03-24 19:30:42,427 Epoch 143/2000 +2025-03-24 19:31:47,374 Current Learning Rate: 0.0001873787 +2025-03-24 19:31:47,442 Train Loss: 0.0002907, Val Loss: 0.0003212 +2025-03-24 19:31:47,442 Epoch 144/2000 +2025-03-24 19:32:52,933 Current Learning Rate: 0.0001812880 +2025-03-24 19:32:53,002 Train Loss: 0.0002898, Val Loss: 0.0003205 +2025-03-24 19:32:53,002 Epoch 145/2000 +2025-03-24 19:33:58,005 Current Learning Rate: 0.0001752760 +2025-03-24 19:33:58,089 Train Loss: 0.0002891, Val Loss: 0.0003198 +2025-03-24 19:33:58,090 Epoch 146/2000 +2025-03-24 19:35:02,828 Current Learning Rate: 0.0001693441 +2025-03-24 19:35:02,896 Train Loss: 0.0002883, Val Loss: 0.0003191 +2025-03-24 19:35:02,897 Epoch 147/2000 +2025-03-24 19:36:08,179 Current Learning Rate: 0.0001634937 +2025-03-24 19:36:08,258 Train Loss: 0.0002877, Val Loss: 0.0003183 +2025-03-24 19:36:08,258 Epoch 148/2000 +2025-03-24 19:37:13,170 Current Learning Rate: 0.0001577264 +2025-03-24 19:37:13,237 Train Loss: 0.0002870, Val Loss: 0.0003174 +2025-03-24 19:37:13,237 Epoch 149/2000 +2025-03-24 19:38:18,377 Current Learning Rate: 0.0001520436 +2025-03-24 19:38:18,445 Train Loss: 0.0002863, Val Loss: 0.0003164 +2025-03-24 19:38:18,445 Epoch 150/2000 +2025-03-24 19:39:23,462 Current Learning Rate: 0.0001464466 +2025-03-24 19:39:23,531 Train Loss: 0.0002857, Val Loss: 0.0003152 +2025-03-24 19:39:23,531 Epoch 151/2000 +2025-03-24 19:40:28,485 Current Learning Rate: 0.0001409369 +2025-03-24 19:40:28,547 Train Loss: 0.0002849, Val Loss: 0.0003141 +2025-03-24 19:40:28,547 Epoch 152/2000 +2025-03-24 19:41:33,684 Current Learning Rate: 0.0001355157 +2025-03-24 19:41:33,754 Train Loss: 0.0002841, Val Loss: 0.0003131 +2025-03-24 19:41:33,754 Epoch 153/2000 +2025-03-24 19:42:39,115 Current Learning Rate: 0.0001301845 +2025-03-24 19:42:39,177 Train Loss: 0.0002833, Val Loss: 0.0003123 +2025-03-24 19:42:39,177 Epoch 154/2000 +2025-03-24 19:43:44,087 Current Learning Rate: 0.0001249445 +2025-03-24 19:43:44,153 Train Loss: 0.0002826, Val Loss: 0.0003116 +2025-03-24 19:43:44,153 Epoch 155/2000 +2025-03-24 19:44:49,318 Current Learning Rate: 0.0001197970 +2025-03-24 19:44:49,404 Train Loss: 0.0002819, Val Loss: 0.0003110 +2025-03-24 19:44:49,404 Epoch 156/2000 +2025-03-24 19:45:54,351 Current Learning Rate: 0.0001147434 +2025-03-24 19:45:54,414 Train Loss: 0.0002813, Val Loss: 0.0003104 +2025-03-24 19:45:54,414 Epoch 157/2000 +2025-03-24 19:46:59,721 Current Learning Rate: 0.0001097848 +2025-03-24 19:46:59,789 Train Loss: 0.0002806, Val Loss: 0.0003099 +2025-03-24 19:46:59,789 Epoch 158/2000 +2025-03-24 19:48:05,404 Current Learning Rate: 0.0001049225 +2025-03-24 19:48:05,485 Train Loss: 0.0002800, Val Loss: 0.0003094 +2025-03-24 19:48:05,485 Epoch 159/2000 +2025-03-24 19:49:10,665 Current Learning Rate: 0.0001001577 +2025-03-24 19:49:10,741 Train Loss: 0.0002794, Val Loss: 0.0003088 +2025-03-24 19:49:10,741 Epoch 160/2000 +2025-03-24 19:50:15,300 Current Learning Rate: 0.0000954915 +2025-03-24 19:50:15,366 Train Loss: 0.0002788, Val Loss: 0.0003085 +2025-03-24 19:50:15,366 Epoch 161/2000 +2025-03-24 19:51:20,555 Current Learning Rate: 0.0000909251 +2025-03-24 19:51:20,636 Train Loss: 0.0002783, Val Loss: 0.0003083 +2025-03-24 19:51:20,636 Epoch 162/2000 +2025-03-24 19:52:25,975 Current Learning Rate: 0.0000864597 +2025-03-24 19:52:25,976 Train Loss: 0.0002778, Val Loss: 0.0003084 +2025-03-24 19:52:25,976 Epoch 163/2000 +2025-03-24 19:53:31,321 Current Learning Rate: 0.0000820963 +2025-03-24 19:53:31,321 Train Loss: 0.0002773, Val Loss: 0.0003084 +2025-03-24 19:53:31,321 Epoch 164/2000 +2025-03-24 19:54:36,383 Current Learning Rate: 0.0000778360 +2025-03-24 19:54:36,456 Train Loss: 0.0002768, Val Loss: 0.0003080 +2025-03-24 19:54:36,456 Epoch 165/2000 +2025-03-24 19:55:41,517 Current Learning Rate: 0.0000736799 +2025-03-24 19:55:41,587 Train Loss: 0.0002763, Val Loss: 0.0003078 +2025-03-24 19:55:41,587 Epoch 166/2000 +2025-03-24 19:56:46,900 Current Learning Rate: 0.0000696290 +2025-03-24 19:56:47,044 Train Loss: 0.0002758, Val Loss: 0.0003070 +2025-03-24 19:56:47,045 Epoch 167/2000 +2025-03-24 19:57:52,036 Current Learning Rate: 0.0000656842 +2025-03-24 19:57:52,102 Train Loss: 0.0002753, Val Loss: 0.0003059 +2025-03-24 19:57:52,102 Epoch 168/2000 +2025-03-24 19:58:57,442 Current Learning Rate: 0.0000618467 +2025-03-24 19:58:57,519 Train Loss: 0.0002748, Val Loss: 0.0003047 +2025-03-24 19:58:57,519 Epoch 169/2000 +2025-03-24 20:00:03,239 Current Learning Rate: 0.0000581172 +2025-03-24 20:00:03,339 Train Loss: 0.0002743, Val Loss: 0.0003039 +2025-03-24 20:00:03,339 Epoch 170/2000 +2025-03-24 20:01:08,488 Current Learning Rate: 0.0000544967 +2025-03-24 20:01:08,565 Train Loss: 0.0002739, Val Loss: 0.0003033 +2025-03-24 20:01:08,566 Epoch 171/2000 +2025-03-24 20:02:14,005 Current Learning Rate: 0.0000509862 +2025-03-24 20:02:14,078 Train Loss: 0.0002734, Val Loss: 0.0003027 +2025-03-24 20:02:14,078 Epoch 172/2000 +2025-03-24 20:03:19,415 Current Learning Rate: 0.0000475865 +2025-03-24 20:03:19,490 Train Loss: 0.0002730, Val Loss: 0.0003023 +2025-03-24 20:03:19,491 Epoch 173/2000 +2025-03-24 20:04:24,507 Current Learning Rate: 0.0000442984 +2025-03-24 20:04:24,592 Train Loss: 0.0002726, Val Loss: 0.0003019 +2025-03-24 20:04:24,593 Epoch 174/2000 +2025-03-24 20:05:30,757 Current Learning Rate: 0.0000411227 +2025-03-24 20:05:30,836 Train Loss: 0.0002721, Val Loss: 0.0003017 +2025-03-24 20:05:30,836 Epoch 175/2000 +2025-03-24 20:06:36,386 Current Learning Rate: 0.0000380602 +2025-03-24 20:06:36,463 Train Loss: 0.0002718, Val Loss: 0.0003013 +2025-03-24 20:06:36,464 Epoch 176/2000 +2025-03-24 20:07:41,903 Current Learning Rate: 0.0000351118 +2025-03-24 20:07:41,977 Train Loss: 0.0002714, Val Loss: 0.0003010 +2025-03-24 20:07:41,977 Epoch 177/2000 +2025-03-24 20:08:47,710 Current Learning Rate: 0.0000322780 +2025-03-24 20:08:47,787 Train Loss: 0.0002710, Val Loss: 0.0003006 +2025-03-24 20:08:47,787 Epoch 178/2000 +2025-03-24 20:09:53,113 Current Learning Rate: 0.0000295596 +2025-03-24 20:09:53,180 Train Loss: 0.0002707, Val Loss: 0.0003004 +2025-03-24 20:09:53,180 Epoch 179/2000 +2025-03-24 20:10:57,947 Current Learning Rate: 0.0000269573 +2025-03-24 20:10:58,019 Train Loss: 0.0002703, Val Loss: 0.0003001 +2025-03-24 20:10:58,019 Epoch 180/2000 +2025-03-24 20:12:02,906 Current Learning Rate: 0.0000244717 +2025-03-24 20:12:02,977 Train Loss: 0.0002700, Val Loss: 0.0002999 +2025-03-24 20:12:02,977 Epoch 181/2000 +2025-03-24 20:13:08,078 Current Learning Rate: 0.0000221035 +2025-03-24 20:13:08,145 Train Loss: 0.0002697, Val Loss: 0.0002996 +2025-03-24 20:13:08,145 Epoch 182/2000 +2025-03-24 20:14:13,110 Current Learning Rate: 0.0000198532 +2025-03-24 20:14:13,174 Train Loss: 0.0002694, Val Loss: 0.0002993 +2025-03-24 20:14:13,174 Epoch 183/2000 +2025-03-24 20:15:18,900 Current Learning Rate: 0.0000177213 +2025-03-24 20:15:18,978 Train Loss: 0.0002691, Val Loss: 0.0002991 +2025-03-24 20:15:18,978 Epoch 184/2000 +2025-03-24 20:16:24,368 Current Learning Rate: 0.0000157084 +2025-03-24 20:16:24,449 Train Loss: 0.0002688, Val Loss: 0.0002989 +2025-03-24 20:16:24,450 Epoch 185/2000 +2025-03-24 20:17:29,475 Current Learning Rate: 0.0000138150 +2025-03-24 20:17:29,556 Train Loss: 0.0002686, Val Loss: 0.0002988 +2025-03-24 20:17:29,557 Epoch 186/2000 +2025-03-24 20:18:34,244 Current Learning Rate: 0.0000120416 +2025-03-24 20:18:34,317 Train Loss: 0.0002683, Val Loss: 0.0002987 +2025-03-24 20:18:34,318 Epoch 187/2000 +2025-03-24 20:19:39,559 Current Learning Rate: 0.0000103886 +2025-03-24 20:19:39,625 Train Loss: 0.0002681, Val Loss: 0.0002986 +2025-03-24 20:19:39,625 Epoch 188/2000 +2025-03-24 20:20:45,336 Current Learning Rate: 0.0000088564 +2025-03-24 20:20:45,408 Train Loss: 0.0002679, Val Loss: 0.0002984 +2025-03-24 20:20:45,408 Epoch 189/2000 +2025-03-24 20:21:50,924 Current Learning Rate: 0.0000074453 +2025-03-24 20:21:50,995 Train Loss: 0.0002677, Val Loss: 0.0002982 +2025-03-24 20:21:50,995 Epoch 190/2000 +2025-03-24 20:22:56,573 Current Learning Rate: 0.0000061558 +2025-03-24 20:22:56,655 Train Loss: 0.0002675, Val Loss: 0.0002980 +2025-03-24 20:22:56,655 Epoch 191/2000 +2025-03-24 20:24:01,904 Current Learning Rate: 0.0000049882 +2025-03-24 20:24:01,975 Train Loss: 0.0002673, Val Loss: 0.0002979 +2025-03-24 20:24:01,976 Epoch 192/2000 +2025-03-24 20:25:07,318 Current Learning Rate: 0.0000039426 +2025-03-24 20:25:07,391 Train Loss: 0.0002671, Val Loss: 0.0002978 +2025-03-24 20:25:07,392 Epoch 193/2000 +2025-03-24 20:26:12,525 Current Learning Rate: 0.0000030195 +2025-03-24 20:26:12,597 Train Loss: 0.0002670, Val Loss: 0.0002978 +2025-03-24 20:26:12,598 Epoch 194/2000 +2025-03-24 20:27:17,678 Current Learning Rate: 0.0000022190 +2025-03-24 20:27:17,743 Train Loss: 0.0002668, Val Loss: 0.0002977 +2025-03-24 20:27:17,744 Epoch 195/2000 +2025-03-24 20:28:23,096 Current Learning Rate: 0.0000015413 +2025-03-24 20:28:23,164 Train Loss: 0.0002667, Val Loss: 0.0002977 +2025-03-24 20:28:23,164 Epoch 196/2000 +2025-03-24 20:29:28,194 Current Learning Rate: 0.0000009866 +2025-03-24 20:29:28,260 Train Loss: 0.0002666, Val Loss: 0.0002976 +2025-03-24 20:29:28,260 Epoch 197/2000 +2025-03-24 20:30:33,820 Current Learning Rate: 0.0000005551 +2025-03-24 20:30:33,889 Train Loss: 0.0002665, Val Loss: 0.0002976 +2025-03-24 20:30:33,890 Epoch 198/2000 +2025-03-24 20:31:39,366 Current Learning Rate: 0.0000002467 +2025-03-24 20:31:39,434 Train Loss: 0.0002664, Val Loss: 0.0002976 +2025-03-24 20:31:39,434 Epoch 199/2000 +2025-03-24 20:32:44,643 Current Learning Rate: 0.0000000617 +2025-03-24 20:32:44,643 Train Loss: 0.0002664, Val Loss: 0.0002976 +2025-03-24 20:32:44,644 Epoch 200/2000 +2025-03-24 20:33:49,834 Current Learning Rate: 0.0000000000 +2025-03-24 20:33:49,835 Train Loss: 0.0002664, Val Loss: 0.0002976 +2025-03-24 20:33:49,835 Epoch 201/2000 +2025-03-24 20:34:55,083 Current Learning Rate: 0.0000000617 +2025-03-24 20:34:55,083 Train Loss: 0.0002664, Val Loss: 0.0002976 +2025-03-24 20:34:55,083 Epoch 202/2000 +2025-03-24 20:36:00,231 Current Learning Rate: 0.0000002467 +2025-03-24 20:36:00,232 Train Loss: 0.0002664, Val Loss: 0.0002976 +2025-03-24 20:36:00,232 Epoch 203/2000 +2025-03-24 20:37:05,880 Current Learning Rate: 0.0000005551 +2025-03-24 20:37:05,880 Train Loss: 0.0002664, Val Loss: 0.0002976 +2025-03-24 20:37:05,881 Epoch 204/2000 +2025-03-24 20:38:11,022 Current Learning Rate: 0.0000009866 +2025-03-24 20:38:11,107 Train Loss: 0.0002664, Val Loss: 0.0002976 +2025-03-24 20:38:11,108 Epoch 205/2000 +2025-03-24 20:39:16,317 Current Learning Rate: 0.0000015413 +2025-03-24 20:39:16,318 Train Loss: 0.0002665, Val Loss: 0.0002976 +2025-03-24 20:39:16,318 Epoch 206/2000 +2025-03-24 20:40:21,517 Current Learning Rate: 0.0000022190 +2025-03-24 20:40:21,518 Train Loss: 0.0002666, Val Loss: 0.0002976 +2025-03-24 20:40:21,518 Epoch 207/2000 +2025-03-24 20:41:26,867 Current Learning Rate: 0.0000030195 +2025-03-24 20:41:26,868 Train Loss: 0.0002667, Val Loss: 0.0002976 +2025-03-24 20:41:26,868 Epoch 208/2000 +2025-03-24 20:42:31,989 Current Learning Rate: 0.0000039426 +2025-03-24 20:42:31,989 Train Loss: 0.0002668, Val Loss: 0.0002977 +2025-03-24 20:42:31,989 Epoch 209/2000 +2025-03-24 20:43:37,192 Current Learning Rate: 0.0000049882 +2025-03-24 20:43:37,193 Train Loss: 0.0002669, Val Loss: 0.0002977 +2025-03-24 20:43:37,193 Epoch 210/2000 +2025-03-24 20:44:42,722 Current Learning Rate: 0.0000061558 +2025-03-24 20:44:42,722 Train Loss: 0.0002670, Val Loss: 0.0002977 +2025-03-24 20:44:42,723 Epoch 211/2000 +2025-03-24 20:45:48,024 Current Learning Rate: 0.0000074453 +2025-03-24 20:45:48,025 Train Loss: 0.0002672, Val Loss: 0.0002978 +2025-03-24 20:45:48,025 Epoch 212/2000 +2025-03-24 20:46:53,099 Current Learning Rate: 0.0000088564 +2025-03-24 20:46:53,100 Train Loss: 0.0002673, Val Loss: 0.0002979 +2025-03-24 20:46:53,100 Epoch 213/2000 +2025-03-24 20:47:58,341 Current Learning Rate: 0.0000103886 +2025-03-24 20:47:58,341 Train Loss: 0.0002675, Val Loss: 0.0002980 +2025-03-24 20:47:58,341 Epoch 214/2000 +2025-03-24 20:49:03,456 Current Learning Rate: 0.0000120416 +2025-03-24 20:49:03,457 Train Loss: 0.0002676, Val Loss: 0.0002981 +2025-03-24 20:49:03,457 Epoch 215/2000 +2025-03-24 20:50:09,411 Current Learning Rate: 0.0000138150 +2025-03-24 20:50:09,411 Train Loss: 0.0002678, Val Loss: 0.0002982 +2025-03-24 20:50:09,411 Epoch 216/2000 +2025-03-24 20:51:14,522 Current Learning Rate: 0.0000157084 +2025-03-24 20:51:14,522 Train Loss: 0.0002680, Val Loss: 0.0002983 +2025-03-24 20:51:14,522 Epoch 217/2000 +2025-03-24 20:52:19,800 Current Learning Rate: 0.0000177213 +2025-03-24 20:52:19,800 Train Loss: 0.0002682, Val Loss: 0.0002983 +2025-03-24 20:52:19,801 Epoch 218/2000 +2025-03-24 20:53:25,885 Current Learning Rate: 0.0000198532 +2025-03-24 20:53:25,893 Train Loss: 0.0002683, Val Loss: 0.0002984 +2025-03-24 20:53:25,893 Epoch 219/2000 +2025-03-24 20:54:31,311 Current Learning Rate: 0.0000221035 +2025-03-24 20:54:31,311 Train Loss: 0.0002685, Val Loss: 0.0002985 +2025-03-24 20:54:31,312 Epoch 220/2000 +2025-03-24 20:55:36,650 Current Learning Rate: 0.0000244717 +2025-03-24 20:55:36,651 Train Loss: 0.0002687, Val Loss: 0.0002987 +2025-03-24 20:55:36,651 Epoch 221/2000 +2025-03-24 20:56:42,126 Current Learning Rate: 0.0000269573 +2025-03-24 20:56:42,126 Train Loss: 0.0002689, Val Loss: 0.0002988 +2025-03-24 20:56:42,126 Epoch 222/2000 +2025-03-24 20:57:46,902 Current Learning Rate: 0.0000295596 +2025-03-24 20:57:46,902 Train Loss: 0.0002692, Val Loss: 0.0002990 +2025-03-24 20:57:46,903 Epoch 223/2000 +2025-03-24 20:58:51,832 Current Learning Rate: 0.0000322780 +2025-03-24 20:58:51,833 Train Loss: 0.0002694, Val Loss: 0.0002991 +2025-03-24 20:58:51,833 Epoch 224/2000 +2025-03-24 20:59:56,746 Current Learning Rate: 0.0000351118 +2025-03-24 20:59:56,746 Train Loss: 0.0002696, Val Loss: 0.0002993 +2025-03-24 20:59:56,747 Epoch 225/2000 +2025-03-24 21:01:01,888 Current Learning Rate: 0.0000380602 +2025-03-24 21:01:01,889 Train Loss: 0.0002698, Val Loss: 0.0002995 +2025-03-24 21:01:01,889 Epoch 226/2000 +2025-03-24 21:02:07,034 Current Learning Rate: 0.0000411227 +2025-03-24 21:02:07,035 Train Loss: 0.0002701, Val Loss: 0.0002997 +2025-03-24 21:02:07,035 Epoch 227/2000 +2025-03-24 21:03:12,420 Current Learning Rate: 0.0000442984 +2025-03-24 21:03:12,420 Train Loss: 0.0002703, Val Loss: 0.0002999 +2025-03-24 21:03:12,421 Epoch 228/2000 +2025-03-24 21:04:18,365 Current Learning Rate: 0.0000475865 +2025-03-24 21:04:18,365 Train Loss: 0.0002706, Val Loss: 0.0003001 +2025-03-24 21:04:18,366 Epoch 229/2000 +2025-03-24 21:05:23,583 Current Learning Rate: 0.0000509862 +2025-03-24 21:05:23,583 Train Loss: 0.0002708, Val Loss: 0.0003004 +2025-03-24 21:05:23,584 Epoch 230/2000 +2025-03-24 21:06:29,203 Current Learning Rate: 0.0000544967 +2025-03-24 21:06:29,204 Train Loss: 0.0002711, Val Loss: 0.0003009 +2025-03-24 21:06:29,204 Epoch 231/2000 +2025-03-24 21:07:34,536 Current Learning Rate: 0.0000581172 +2025-03-24 21:07:34,536 Train Loss: 0.0002714, Val Loss: 0.0003013 +2025-03-24 21:07:34,536 Epoch 232/2000 +2025-03-24 21:08:39,977 Current Learning Rate: 0.0000618467 +2025-03-24 21:08:39,977 Train Loss: 0.0002716, Val Loss: 0.0003017 +2025-03-24 21:08:39,978 Epoch 233/2000 +2025-03-24 21:09:45,444 Current Learning Rate: 0.0000656842 +2025-03-24 21:09:45,445 Train Loss: 0.0002719, Val Loss: 0.0003020 +2025-03-24 21:09:45,445 Epoch 234/2000 +2025-03-24 21:10:50,676 Current Learning Rate: 0.0000696290 +2025-03-24 21:10:50,677 Train Loss: 0.0002722, Val Loss: 0.0003022 +2025-03-24 21:10:50,677 Epoch 235/2000 +2025-03-24 21:11:56,238 Current Learning Rate: 0.0000736799 +2025-03-24 21:11:56,238 Train Loss: 0.0002724, Val Loss: 0.0003024 +2025-03-24 21:11:56,239 Epoch 236/2000 +2025-03-24 21:13:01,392 Current Learning Rate: 0.0000778360 +2025-03-24 21:13:01,392 Train Loss: 0.0002727, Val Loss: 0.0003025 +2025-03-24 21:13:01,393 Epoch 237/2000 +2025-03-24 21:14:06,409 Current Learning Rate: 0.0000820963 +2025-03-24 21:14:06,409 Train Loss: 0.0002730, Val Loss: 0.0003025 +2025-03-24 21:14:06,410 Epoch 238/2000 +2025-03-24 21:15:11,781 Current Learning Rate: 0.0000864597 +2025-03-24 21:15:11,782 Train Loss: 0.0002733, Val Loss: 0.0003026 +2025-03-24 21:15:11,782 Epoch 239/2000 +2025-03-24 21:16:16,938 Current Learning Rate: 0.0000909251 +2025-03-24 21:16:16,939 Train Loss: 0.0002736, Val Loss: 0.0003027 +2025-03-24 21:16:16,939 Epoch 240/2000 +2025-03-24 21:17:21,849 Current Learning Rate: 0.0000954915 +2025-03-24 21:17:21,849 Train Loss: 0.0002739, Val Loss: 0.0003029 +2025-03-24 21:17:21,850 Epoch 241/2000 +2025-03-24 21:18:27,163 Current Learning Rate: 0.0001001577 +2025-03-24 21:18:27,163 Train Loss: 0.0002742, Val Loss: 0.0003032 +2025-03-24 21:18:27,164 Epoch 242/2000 +2025-03-24 21:19:32,252 Current Learning Rate: 0.0001049225 +2025-03-24 21:19:32,252 Train Loss: 0.0002746, Val Loss: 0.0003035 +2025-03-24 21:19:32,252 Epoch 243/2000 +2025-03-24 21:20:37,469 Current Learning Rate: 0.0001097848 +2025-03-24 21:20:37,470 Train Loss: 0.0002749, Val Loss: 0.0003039 +2025-03-24 21:20:37,470 Epoch 244/2000 +2025-03-24 21:21:42,497 Current Learning Rate: 0.0001147434 +2025-03-24 21:21:42,498 Train Loss: 0.0002752, Val Loss: 0.0003043 +2025-03-24 21:21:42,498 Epoch 245/2000 +2025-03-24 21:22:47,953 Current Learning Rate: 0.0001197970 +2025-03-24 21:22:47,954 Train Loss: 0.0002756, Val Loss: 0.0003047 +2025-03-24 21:22:47,954 Epoch 246/2000 +2025-03-24 21:23:52,991 Current Learning Rate: 0.0001249445 +2025-03-24 21:23:52,992 Train Loss: 0.0002759, Val Loss: 0.0003052 +2025-03-24 21:23:52,992 Epoch 247/2000 +2025-03-24 21:24:58,523 Current Learning Rate: 0.0001301845 +2025-03-24 21:24:58,524 Train Loss: 0.0002763, Val Loss: 0.0003056 +2025-03-24 21:24:58,524 Epoch 248/2000 +2025-03-24 21:26:03,830 Current Learning Rate: 0.0001355157 +2025-03-24 21:26:03,831 Train Loss: 0.0002766, Val Loss: 0.0003060 +2025-03-24 21:26:03,831 Epoch 249/2000 +2025-03-24 21:27:09,155 Current Learning Rate: 0.0001409369 +2025-03-24 21:27:09,155 Train Loss: 0.0002770, Val Loss: 0.0003063 +2025-03-24 21:27:09,156 Epoch 250/2000 +2025-03-24 21:28:14,231 Current Learning Rate: 0.0001464466 +2025-03-24 21:28:14,232 Train Loss: 0.0002774, Val Loss: 0.0003066 +2025-03-24 21:28:14,232 Epoch 251/2000 +2025-03-24 21:29:19,558 Current Learning Rate: 0.0001520436 +2025-03-24 21:29:19,559 Train Loss: 0.0002777, Val Loss: 0.0003069 +2025-03-24 21:29:19,559 Epoch 252/2000 +2025-03-24 21:30:24,325 Current Learning Rate: 0.0001577264 +2025-03-24 21:30:24,325 Train Loss: 0.0002781, Val Loss: 0.0003071 +2025-03-24 21:30:24,326 Epoch 253/2000 +2025-03-24 21:31:29,130 Current Learning Rate: 0.0001634937 +2025-03-24 21:31:29,130 Train Loss: 0.0002785, Val Loss: 0.0003073 +2025-03-24 21:31:29,131 Epoch 254/2000 +2025-03-24 21:32:34,150 Current Learning Rate: 0.0001693441 +2025-03-24 21:32:34,151 Train Loss: 0.0002788, Val Loss: 0.0003075 +2025-03-24 21:32:34,151 Epoch 255/2000 +2025-03-24 21:33:39,182 Current Learning Rate: 0.0001752760 +2025-03-24 21:33:39,183 Train Loss: 0.0002792, Val Loss: 0.0003077 +2025-03-24 21:33:39,183 Epoch 256/2000 +2025-03-24 21:34:44,935 Current Learning Rate: 0.0001812880 +2025-03-24 21:34:44,935 Train Loss: 0.0002796, Val Loss: 0.0003080 +2025-03-24 21:34:44,936 Epoch 257/2000 +2025-03-24 21:35:49,960 Current Learning Rate: 0.0001873787 +2025-03-24 21:35:49,961 Train Loss: 0.0002800, Val Loss: 0.0003084 +2025-03-24 21:35:49,961 Epoch 258/2000 +2025-03-24 21:36:55,366 Current Learning Rate: 0.0001935465 +2025-03-24 21:36:55,367 Train Loss: 0.0002804, Val Loss: 0.0003089 +2025-03-24 21:36:55,367 Epoch 259/2000 +2025-03-24 21:38:00,761 Current Learning Rate: 0.0001997899 +2025-03-24 21:38:00,762 Train Loss: 0.0002808, Val Loss: 0.0003094 +2025-03-24 21:38:00,762 Epoch 260/2000 +2025-03-24 21:39:06,129 Current Learning Rate: 0.0002061074 +2025-03-24 21:39:06,130 Train Loss: 0.0002813, Val Loss: 0.0003099 +2025-03-24 21:39:06,130 Epoch 261/2000 +2025-03-24 21:40:11,673 Current Learning Rate: 0.0002124974 +2025-03-24 21:40:11,674 Train Loss: 0.0002817, Val Loss: 0.0003105 +2025-03-24 21:40:11,674 Epoch 262/2000 +2025-03-24 21:41:16,874 Current Learning Rate: 0.0002189583 +2025-03-24 21:41:16,874 Train Loss: 0.0002821, Val Loss: 0.0003112 +2025-03-24 21:41:16,875 Epoch 263/2000 +2025-03-24 21:42:22,479 Current Learning Rate: 0.0002254886 +2025-03-24 21:42:22,479 Train Loss: 0.0002825, Val Loss: 0.0003119 +2025-03-24 21:42:22,480 Epoch 264/2000 +2025-03-24 21:43:27,636 Current Learning Rate: 0.0002320866 +2025-03-24 21:43:27,637 Train Loss: 0.0002830, Val Loss: 0.0003128 +2025-03-24 21:43:27,637 Epoch 265/2000 +2025-03-24 21:44:32,287 Current Learning Rate: 0.0002387507 +2025-03-24 21:44:32,288 Train Loss: 0.0002834, Val Loss: 0.0003135 +2025-03-24 21:44:32,288 Epoch 266/2000 +2025-03-24 21:45:37,792 Current Learning Rate: 0.0002454793 +2025-03-24 21:45:37,793 Train Loss: 0.0002839, Val Loss: 0.0003143 +2025-03-24 21:45:37,793 Epoch 267/2000 +2025-03-24 21:46:43,054 Current Learning Rate: 0.0002522707 +2025-03-24 21:46:43,054 Train Loss: 0.0002843, Val Loss: 0.0003150 +2025-03-24 21:46:43,054 Epoch 268/2000 +2025-03-24 21:47:48,157 Current Learning Rate: 0.0002591232 +2025-03-24 21:47:48,158 Train Loss: 0.0002848, Val Loss: 0.0003156 +2025-03-24 21:47:48,158 Epoch 269/2000 +2025-03-24 21:48:53,231 Current Learning Rate: 0.0002660351 +2025-03-24 21:48:53,231 Train Loss: 0.0002852, Val Loss: 0.0003163 +2025-03-24 21:48:53,232 Epoch 270/2000 +2025-03-24 21:49:58,414 Current Learning Rate: 0.0002730048 +2025-03-24 21:49:58,414 Train Loss: 0.0002857, Val Loss: 0.0003169 +2025-03-24 21:49:58,415 Epoch 271/2000 +2025-03-24 21:51:04,042 Current Learning Rate: 0.0002800304 +2025-03-24 21:51:04,043 Train Loss: 0.0002861, Val Loss: 0.0003177 +2025-03-24 21:51:04,043 Epoch 272/2000 +2025-03-24 21:52:10,033 Current Learning Rate: 0.0002871104 +2025-03-24 21:52:10,034 Train Loss: 0.0002866, Val Loss: 0.0003185 +2025-03-24 21:52:10,034 Epoch 273/2000 +2025-03-24 21:53:15,695 Current Learning Rate: 0.0002942428 +2025-03-24 21:53:15,695 Train Loss: 0.0002870, Val Loss: 0.0003194 +2025-03-24 21:53:15,696 Epoch 274/2000 +2025-03-24 21:54:21,113 Current Learning Rate: 0.0003014261 +2025-03-24 21:54:21,113 Train Loss: 0.0002874, Val Loss: 0.0003204 +2025-03-24 21:54:21,113 Epoch 275/2000 +2025-03-24 21:55:26,334 Current Learning Rate: 0.0003086583 +2025-03-24 21:55:26,334 Train Loss: 0.0002879, Val Loss: 0.0003214 +2025-03-24 21:55:26,335 Epoch 276/2000 +2025-03-24 21:56:31,497 Current Learning Rate: 0.0003159377 +2025-03-24 21:56:31,498 Train Loss: 0.0002883, Val Loss: 0.0003222 +2025-03-24 21:56:31,498 Epoch 277/2000 +2025-03-24 21:57:36,626 Current Learning Rate: 0.0003232626 +2025-03-24 21:57:36,627 Train Loss: 0.0002888, Val Loss: 0.0003227 +2025-03-24 21:57:36,627 Epoch 278/2000 +2025-03-24 21:58:41,887 Current Learning Rate: 0.0003306310 +2025-03-24 21:58:41,888 Train Loss: 0.0002893, Val Loss: 0.0003235 +2025-03-24 21:58:41,888 Epoch 279/2000 +2025-03-24 21:59:47,331 Current Learning Rate: 0.0003380413 +2025-03-24 21:59:47,331 Train Loss: 0.0002898, Val Loss: 0.0003244 +2025-03-24 21:59:47,331 Epoch 280/2000 +2025-03-24 22:00:52,566 Current Learning Rate: 0.0003454915 +2025-03-24 22:00:52,566 Train Loss: 0.0002903, Val Loss: 0.0003253 +2025-03-24 22:00:52,567 Epoch 281/2000 +2025-03-24 22:01:57,345 Current Learning Rate: 0.0003529798 +2025-03-24 22:01:57,346 Train Loss: 0.0002908, Val Loss: 0.0003263 +2025-03-24 22:01:57,346 Epoch 282/2000 +2025-03-24 22:03:02,639 Current Learning Rate: 0.0003605044 +2025-03-24 22:03:02,640 Train Loss: 0.0002913, Val Loss: 0.0003271 +2025-03-24 22:03:02,641 Epoch 283/2000 +2025-03-24 22:04:07,812 Current Learning Rate: 0.0003680635 +2025-03-24 22:04:07,813 Train Loss: 0.0002918, Val Loss: 0.0003276 +2025-03-24 22:04:07,813 Epoch 284/2000 +2025-03-24 22:05:12,928 Current Learning Rate: 0.0003756551 +2025-03-24 22:05:12,929 Train Loss: 0.0002923, Val Loss: 0.0003282 +2025-03-24 22:05:12,929 Epoch 285/2000 +2025-03-24 22:06:18,404 Current Learning Rate: 0.0003832773 +2025-03-24 22:06:18,405 Train Loss: 0.0002928, Val Loss: 0.0003288 +2025-03-24 22:06:18,405 Epoch 286/2000 +2025-03-24 22:07:24,019 Current Learning Rate: 0.0003909284 +2025-03-24 22:07:24,020 Train Loss: 0.0002933, Val Loss: 0.0003296 +2025-03-24 22:07:24,020 Epoch 287/2000 +2025-03-24 22:08:29,352 Current Learning Rate: 0.0003986064 +2025-03-24 22:08:29,353 Train Loss: 0.0002938, Val Loss: 0.0003305 +2025-03-24 22:08:29,353 Epoch 288/2000 +2025-03-24 22:09:35,515 Current Learning Rate: 0.0004063093 +2025-03-24 22:09:35,515 Train Loss: 0.0002943, Val Loss: 0.0003316 +2025-03-24 22:09:35,516 Epoch 289/2000 +2025-03-24 22:10:40,966 Current Learning Rate: 0.0004140354 +2025-03-24 22:10:40,966 Train Loss: 0.0002949, Val Loss: 0.0003325 +2025-03-24 22:10:40,966 Epoch 290/2000 +2025-03-24 22:11:46,429 Current Learning Rate: 0.0004217828 +2025-03-24 22:11:46,430 Train Loss: 0.0002955, Val Loss: 0.0003335 +2025-03-24 22:11:46,430 Epoch 291/2000 +2025-03-24 22:12:52,012 Current Learning Rate: 0.0004295494 +2025-03-24 22:12:52,012 Train Loss: 0.0002962, Val Loss: 0.0003346 +2025-03-24 22:12:52,013 Epoch 292/2000 +2025-03-24 22:13:57,882 Current Learning Rate: 0.0004373334 +2025-03-24 22:13:57,882 Train Loss: 0.0002974, Val Loss: 0.0003360 +2025-03-24 22:13:57,883 Epoch 293/2000 +2025-03-24 22:15:03,593 Current Learning Rate: 0.0004451328 +2025-03-24 22:15:03,593 Train Loss: 0.0002998, Val Loss: 0.0003377 +2025-03-24 22:15:03,594 Epoch 294/2000 +2025-03-24 22:16:09,093 Current Learning Rate: 0.0004529458 +2025-03-24 22:16:09,094 Train Loss: 0.0003013, Val Loss: 0.0003395 +2025-03-24 22:16:09,094 Epoch 295/2000 +2025-03-24 22:17:14,179 Current Learning Rate: 0.0004607705 +2025-03-24 22:17:14,180 Train Loss: 0.0003023, Val Loss: 0.0003439 +2025-03-24 22:17:14,180 Epoch 296/2000 +2025-03-24 22:18:19,767 Current Learning Rate: 0.0004686047 +2025-03-24 22:18:19,767 Train Loss: 0.0003019, Val Loss: 0.0003448 +2025-03-24 22:18:19,768 Epoch 297/2000 +2025-03-24 22:19:25,037 Current Learning Rate: 0.0004764468 +2025-03-24 22:19:25,038 Train Loss: 0.0003002, Val Loss: 0.0003407 +2025-03-24 22:19:25,038 Epoch 298/2000 +2025-03-24 22:20:30,579 Current Learning Rate: 0.0004842946 +2025-03-24 22:20:30,580 Train Loss: 0.0002978, Val Loss: 0.0003358 +2025-03-24 22:20:30,581 Epoch 299/2000 +2025-03-24 22:21:35,969 Current Learning Rate: 0.0004921463 +2025-03-24 22:21:35,970 Train Loss: 0.0002981, Val Loss: 0.0003390 +2025-03-24 22:21:35,970 Epoch 300/2000 +2025-03-24 22:22:41,269 Current Learning Rate: 0.0005000000 +2025-03-24 22:22:41,269 Train Loss: 0.0002997, Val Loss: 0.0003408 +2025-03-24 22:22:41,270 Epoch 301/2000 +2025-03-24 22:23:46,100 Current Learning Rate: 0.0005078537 +2025-03-24 22:23:46,100 Train Loss: 0.0003014, Val Loss: 0.0003445 +2025-03-24 22:23:46,101 Epoch 302/2000 +2025-03-24 22:24:51,433 Current Learning Rate: 0.0005157054 +2025-03-24 22:24:51,433 Train Loss: 0.0003029, Val Loss: 0.0003438 +2025-03-24 22:24:51,434 Epoch 303/2000 +2025-03-24 22:25:56,622 Current Learning Rate: 0.0005235532 +2025-03-24 22:25:56,622 Train Loss: 0.0003040, Val Loss: 0.0003452 +2025-03-24 22:25:56,623 Epoch 304/2000 +2025-03-24 22:27:01,866 Current Learning Rate: 0.0005313953 +2025-03-24 22:27:01,867 Train Loss: 0.0003050, Val Loss: 0.0003463 +2025-03-24 22:27:01,867 Epoch 305/2000 +2025-03-24 22:28:07,063 Current Learning Rate: 0.0005392295 +2025-03-24 22:28:07,063 Train Loss: 0.0003060, Val Loss: 0.0003481 +2025-03-24 22:28:07,064 Epoch 306/2000 +2025-03-24 22:29:12,322 Current Learning Rate: 0.0005470542 +2025-03-24 22:29:12,323 Train Loss: 0.0003070, Val Loss: 0.0003483 +2025-03-24 22:29:12,323 Epoch 307/2000 +2025-03-24 22:30:17,769 Current Learning Rate: 0.0005548672 +2025-03-24 22:30:17,769 Train Loss: 0.0003061, Val Loss: 0.0003460 +2025-03-24 22:30:17,769 Epoch 308/2000 +2025-03-24 22:31:23,034 Current Learning Rate: 0.0005626666 +2025-03-24 22:31:23,034 Train Loss: 0.0003034, Val Loss: 0.0003443 +2025-03-24 22:31:23,035 Epoch 309/2000 +2025-03-24 22:32:28,624 Current Learning Rate: 0.0005704506 +2025-03-24 22:32:28,624 Train Loss: 0.0003032, Val Loss: 0.0003488 +2025-03-24 22:32:28,625 Epoch 310/2000 +2025-03-24 22:33:33,883 Current Learning Rate: 0.0005782172 +2025-03-24 22:33:33,884 Train Loss: 0.0003060, Val Loss: 0.0003538 +2025-03-24 22:33:33,884 Epoch 311/2000 +2025-03-24 22:34:39,100 Current Learning Rate: 0.0005859646 +2025-03-24 22:34:39,101 Train Loss: 0.0003082, Val Loss: 0.0003513 +2025-03-24 22:34:39,101 Epoch 312/2000 +2025-03-24 22:35:44,671 Current Learning Rate: 0.0005936907 +2025-03-24 22:35:44,672 Train Loss: 0.0003093, Val Loss: 0.0003517 +2025-03-24 22:35:44,672 Epoch 313/2000 +2025-03-24 22:36:50,241 Current Learning Rate: 0.0006013936 +2025-03-24 22:36:50,242 Train Loss: 0.0003090, Val Loss: 0.0003528 +2025-03-24 22:36:50,242 Epoch 314/2000 +2025-03-24 22:37:55,651 Current Learning Rate: 0.0006090716 +2025-03-24 22:37:55,652 Train Loss: 0.0003086, Val Loss: 0.0003533 +2025-03-24 22:37:55,652 Epoch 315/2000 +2025-03-24 22:39:01,189 Current Learning Rate: 0.0006167227 +2025-03-24 22:39:01,189 Train Loss: 0.0003076, Val Loss: 0.0003540 +2025-03-24 22:39:01,189 Epoch 316/2000 +2025-03-24 22:40:06,957 Current Learning Rate: 0.0006243449 +2025-03-24 22:40:06,958 Train Loss: 0.0003090, Val Loss: 0.0003608 +2025-03-24 22:40:06,958 Epoch 317/2000 +2025-03-24 22:41:12,714 Current Learning Rate: 0.0006319365 +2025-03-24 22:41:12,714 Train Loss: 0.0003105, Val Loss: 0.0003559 +2025-03-24 22:41:12,714 Epoch 318/2000 +2025-03-24 22:42:18,167 Current Learning Rate: 0.0006394956 +2025-03-24 22:42:18,167 Train Loss: 0.0003128, Val Loss: 0.0003615 +2025-03-24 22:42:18,168 Epoch 319/2000 +2025-03-24 22:43:23,847 Current Learning Rate: 0.0006470202 +2025-03-24 22:43:23,847 Train Loss: 0.0003141, Val Loss: 0.0003580 +2025-03-24 22:43:23,847 Epoch 320/2000 +2025-03-24 22:44:28,908 Current Learning Rate: 0.0006545085 +2025-03-24 22:44:28,909 Train Loss: 0.0003143, Val Loss: 0.0003610 +2025-03-24 22:44:28,909 Epoch 321/2000 +2025-03-24 22:45:34,483 Current Learning Rate: 0.0006619587 +2025-03-24 22:45:34,484 Train Loss: 0.0003135, Val Loss: 0.0003574 +2025-03-24 22:45:34,484 Epoch 322/2000 +2025-03-24 22:46:39,422 Current Learning Rate: 0.0006693690 +2025-03-24 22:46:39,422 Train Loss: 0.0003113, Val Loss: 0.0003563 +2025-03-24 22:46:39,423 Epoch 323/2000 +2025-03-24 22:47:44,710 Current Learning Rate: 0.0006767374 +2025-03-24 22:47:44,711 Train Loss: 0.0003105, Val Loss: 0.0003656 +2025-03-24 22:47:44,711 Epoch 324/2000 +2025-03-24 22:48:49,934 Current Learning Rate: 0.0006840623 +2025-03-24 22:48:49,934 Train Loss: 0.0003131, Val Loss: 0.0003665 +2025-03-24 22:48:49,935 Epoch 325/2000 +2025-03-24 22:49:54,885 Current Learning Rate: 0.0006913417 +2025-03-24 22:49:54,886 Train Loss: 0.0003164, Val Loss: 0.0003601 +2025-03-24 22:49:54,886 Epoch 326/2000 +2025-03-24 22:51:00,237 Current Learning Rate: 0.0006985739 +2025-03-24 22:51:00,238 Train Loss: 0.0003166, Val Loss: 0.0003592 +2025-03-24 22:51:00,238 Epoch 327/2000 +2025-03-24 22:52:05,434 Current Learning Rate: 0.0007057572 +2025-03-24 22:52:05,435 Train Loss: 0.0003146, Val Loss: 0.0003573 +2025-03-24 22:52:05,435 Epoch 328/2000 +2025-03-24 22:53:10,652 Current Learning Rate: 0.0007128896 +2025-03-24 22:53:10,653 Train Loss: 0.0003127, Val Loss: 0.0003660 +2025-03-24 22:53:10,653 Epoch 329/2000 +2025-03-24 22:54:15,746 Current Learning Rate: 0.0007199696 +2025-03-24 22:54:15,746 Train Loss: 0.0003157, Val Loss: 0.0003765 +2025-03-24 22:54:15,747 Epoch 330/2000 +2025-03-24 22:55:20,910 Current Learning Rate: 0.0007269952 +2025-03-24 22:55:20,910 Train Loss: 0.0003174, Val Loss: 0.0003681 +2025-03-24 22:55:20,911 Epoch 331/2000 +2025-03-24 22:56:26,075 Current Learning Rate: 0.0007339649 +2025-03-24 22:56:26,075 Train Loss: 0.0003190, Val Loss: 0.0003664 +2025-03-24 22:56:26,076 Epoch 332/2000 +2025-03-24 22:57:31,589 Current Learning Rate: 0.0007408768 +2025-03-24 22:57:31,589 Train Loss: 0.0003207, Val Loss: 0.0003638 +2025-03-24 22:57:31,590 Epoch 333/2000 +2025-03-24 22:58:37,180 Current Learning Rate: 0.0007477293 +2025-03-24 22:58:37,181 Train Loss: 0.0003190, Val Loss: 0.0003700 +2025-03-24 22:58:37,181 Epoch 334/2000 +2025-03-24 22:59:42,856 Current Learning Rate: 0.0007545207 +2025-03-24 22:59:42,857 Train Loss: 0.0003164, Val Loss: 0.0003634 +2025-03-24 22:59:42,857 Epoch 335/2000 +2025-03-24 23:00:47,553 Current Learning Rate: 0.0007612493 +2025-03-24 23:00:47,553 Train Loss: 0.0003161, Val Loss: 0.0003710 +2025-03-24 23:00:47,554 Epoch 336/2000 +2025-03-24 23:01:52,512 Current Learning Rate: 0.0007679134 +2025-03-24 23:01:52,512 Train Loss: 0.0003192, Val Loss: 0.0003710 +2025-03-24 23:01:52,513 Epoch 337/2000 +2025-03-24 23:02:57,719 Current Learning Rate: 0.0007745114 +2025-03-24 23:02:57,720 Train Loss: 0.0003197, Val Loss: 0.0003741 +2025-03-24 23:02:57,720 Epoch 338/2000 +2025-03-24 23:04:02,770 Current Learning Rate: 0.0007810417 +2025-03-24 23:04:02,771 Train Loss: 0.0003215, Val Loss: 0.0003669 +2025-03-24 23:04:02,771 Epoch 339/2000 +2025-03-24 23:05:07,827 Current Learning Rate: 0.0007875026 +2025-03-24 23:05:07,828 Train Loss: 0.0003197, Val Loss: 0.0003637 +2025-03-24 23:05:07,828 Epoch 340/2000 +2025-03-24 23:06:13,063 Current Learning Rate: 0.0007938926 +2025-03-24 23:06:13,064 Train Loss: 0.0003166, Val Loss: 0.0003673 +2025-03-24 23:06:13,064 Epoch 341/2000 +2025-03-24 23:07:18,542 Current Learning Rate: 0.0008002101 +2025-03-24 23:07:18,543 Train Loss: 0.0003192, Val Loss: 0.0003801 +2025-03-24 23:07:18,543 Epoch 342/2000 +2025-03-24 23:08:24,006 Current Learning Rate: 0.0008064535 +2025-03-24 23:08:24,006 Train Loss: 0.0003231, Val Loss: 0.0003720 +2025-03-24 23:08:24,006 Epoch 343/2000 +2025-03-24 23:09:29,265 Current Learning Rate: 0.0008126213 +2025-03-24 23:09:29,265 Train Loss: 0.0003210, Val Loss: 0.0003686 +2025-03-24 23:09:29,266 Epoch 344/2000 +2025-03-24 23:10:34,953 Current Learning Rate: 0.0008187120 +2025-03-24 23:10:34,953 Train Loss: 0.0003226, Val Loss: 0.0003748 +2025-03-24 23:10:34,954 Epoch 345/2000 +2025-03-24 23:11:40,330 Current Learning Rate: 0.0008247240 +2025-03-24 23:11:40,331 Train Loss: 0.0003246, Val Loss: 0.0003760 +2025-03-24 23:11:40,331 Epoch 346/2000 +2025-03-24 23:12:45,887 Current Learning Rate: 0.0008306559 +2025-03-24 23:12:45,888 Train Loss: 0.0003295, Val Loss: 0.0003829 +2025-03-24 23:12:45,888 Epoch 347/2000 +2025-03-24 23:13:50,818 Current Learning Rate: 0.0008365063 +2025-03-24 23:13:50,819 Train Loss: 0.0003295, Val Loss: 0.0003744 +2025-03-24 23:13:50,819 Epoch 348/2000 +2025-03-24 23:14:55,799 Current Learning Rate: 0.0008422736 +2025-03-24 23:14:55,800 Train Loss: 0.0003283, Val Loss: 0.0003765 +2025-03-24 23:14:55,800 Epoch 349/2000 +2025-03-24 23:16:00,959 Current Learning Rate: 0.0008479564 +2025-03-24 23:16:00,960 Train Loss: 0.0003243, Val Loss: 0.0003677 +2025-03-24 23:16:00,960 Epoch 350/2000 +2025-03-24 23:17:06,029 Current Learning Rate: 0.0008535534 +2025-03-24 23:17:06,029 Train Loss: 0.0003158, Val Loss: 0.0003603 +2025-03-24 23:17:06,030 Epoch 351/2000 +2025-03-24 23:18:11,346 Current Learning Rate: 0.0008590631 +2025-03-24 23:18:11,346 Train Loss: 0.0003178, Val Loss: 0.0003801 +2025-03-24 23:18:11,346 Epoch 352/2000 +2025-03-24 23:19:16,836 Current Learning Rate: 0.0008644843 +2025-03-24 23:19:16,836 Train Loss: 0.0003223, Val Loss: 0.0003813 +2025-03-24 23:19:16,837 Epoch 353/2000 +2025-03-24 23:20:22,180 Current Learning Rate: 0.0008698155 +2025-03-24 23:20:22,180 Train Loss: 0.0003244, Val Loss: 0.0003752 +2025-03-24 23:20:22,180 Epoch 354/2000 +2025-03-24 23:21:27,973 Current Learning Rate: 0.0008750555 +2025-03-24 23:21:27,974 Train Loss: 0.0003241, Val Loss: 0.0003763 +2025-03-24 23:21:27,974 Epoch 355/2000 +2025-03-24 23:22:33,469 Current Learning Rate: 0.0008802030 +2025-03-24 23:22:33,470 Train Loss: 0.0003214, Val Loss: 0.0003719 +2025-03-24 23:22:33,470 Epoch 356/2000 +2025-03-24 23:23:39,153 Current Learning Rate: 0.0008852566 +2025-03-24 23:23:39,154 Train Loss: 0.0003197, Val Loss: 0.0003749 +2025-03-24 23:23:39,154 Epoch 357/2000 +2025-03-24 23:24:44,645 Current Learning Rate: 0.0008902152 +2025-03-24 23:24:44,646 Train Loss: 0.0003233, Val Loss: 0.0003827 +2025-03-24 23:24:44,646 Epoch 358/2000 +2025-03-24 23:25:50,204 Current Learning Rate: 0.0008950775 +2025-03-24 23:25:50,205 Train Loss: 0.0003229, Val Loss: 0.0003760 +2025-03-24 23:25:50,205 Epoch 359/2000 +2025-03-24 23:26:55,642 Current Learning Rate: 0.0008998423 +2025-03-24 23:26:55,643 Train Loss: 0.0003253, Val Loss: 0.0003817 +2025-03-24 23:26:55,643 Epoch 360/2000 +2025-03-24 23:28:00,793 Current Learning Rate: 0.0009045085 +2025-03-24 23:28:00,794 Train Loss: 0.0003306, Val Loss: 0.0003778 +2025-03-24 23:28:00,794 Epoch 361/2000 +2025-03-24 23:29:06,247 Current Learning Rate: 0.0009090749 +2025-03-24 23:29:06,248 Train Loss: 0.0003297, Val Loss: 0.0003756 +2025-03-24 23:29:06,248 Epoch 362/2000 +2025-03-24 23:30:11,289 Current Learning Rate: 0.0009135403 +2025-03-24 23:30:11,289 Train Loss: 0.0003285, Val Loss: 0.0003792 +2025-03-24 23:30:11,290 Epoch 363/2000 +2025-03-24 23:31:16,643 Current Learning Rate: 0.0009179037 +2025-03-24 23:31:16,643 Train Loss: 0.0003287, Val Loss: 0.0003825 +2025-03-24 23:31:16,643 Epoch 364/2000 +2025-03-24 23:32:22,140 Current Learning Rate: 0.0009221640 +2025-03-24 23:32:22,141 Train Loss: 0.0003263, Val Loss: 0.0003767 +2025-03-24 23:32:22,141 Epoch 365/2000 +2025-03-24 23:33:27,677 Current Learning Rate: 0.0009263201 +2025-03-24 23:33:27,677 Train Loss: 0.0003182, Val Loss: 0.0003631 +2025-03-24 23:33:27,678 Epoch 366/2000 +2025-03-24 23:34:32,779 Current Learning Rate: 0.0009303710 +2025-03-24 23:34:32,780 Train Loss: 0.0003154, Val Loss: 0.0003753 +2025-03-24 23:34:32,780 Epoch 367/2000 +2025-03-24 23:35:37,883 Current Learning Rate: 0.0009343158 +2025-03-24 23:35:37,883 Train Loss: 0.0003210, Val Loss: 0.0003797 +2025-03-24 23:35:37,883 Epoch 368/2000 +2025-03-24 23:36:43,042 Current Learning Rate: 0.0009381533 +2025-03-24 23:36:43,042 Train Loss: 0.0003240, Val Loss: 0.0003792 +2025-03-24 23:36:43,043 Epoch 369/2000 +2025-03-24 23:37:48,491 Current Learning Rate: 0.0009418828 +2025-03-24 23:37:48,491 Train Loss: 0.0003240, Val Loss: 0.0003760 +2025-03-24 23:37:48,492 Epoch 370/2000 +2025-03-24 23:38:53,785 Current Learning Rate: 0.0009455033 +2025-03-24 23:38:53,786 Train Loss: 0.0003262, Val Loss: 0.0003780 +2025-03-24 23:38:53,786 Epoch 371/2000 +2025-03-24 23:39:59,217 Current Learning Rate: 0.0009490138 +2025-03-24 23:39:59,217 Train Loss: 0.0003260, Val Loss: 0.0003776 +2025-03-24 23:39:59,217 Epoch 372/2000 +2025-03-24 23:41:04,672 Current Learning Rate: 0.0009524135 +2025-03-24 23:41:04,672 Train Loss: 0.0003188, Val Loss: 0.0003646 +2025-03-24 23:41:04,672 Epoch 373/2000 +2025-03-24 23:42:10,028 Current Learning Rate: 0.0009557016 +2025-03-24 23:42:10,029 Train Loss: 0.0003163, Val Loss: 0.0003730 +2025-03-24 23:42:10,029 Epoch 374/2000 +2025-03-24 23:43:15,480 Current Learning Rate: 0.0009588773 +2025-03-24 23:43:15,480 Train Loss: 0.0003200, Val Loss: 0.0003773 +2025-03-24 23:43:15,481 Epoch 375/2000 +2025-03-24 23:44:20,982 Current Learning Rate: 0.0009619398 +2025-03-24 23:44:20,982 Train Loss: 0.0003224, Val Loss: 0.0003945 +2025-03-24 23:44:20,983 Epoch 376/2000 +2025-03-24 23:45:26,338 Current Learning Rate: 0.0009648882 +2025-03-24 23:45:26,338 Train Loss: 0.0003262, Val Loss: 0.0003743 +2025-03-24 23:45:26,338 Epoch 377/2000 +2025-03-24 23:46:31,528 Current Learning Rate: 0.0009677220 +2025-03-24 23:46:31,528 Train Loss: 0.0003231, Val Loss: 0.0003736 +2025-03-24 23:46:31,529 Epoch 378/2000 +2025-03-24 23:47:36,895 Current Learning Rate: 0.0009704404 +2025-03-24 23:47:36,895 Train Loss: 0.0003238, Val Loss: 0.0003719 +2025-03-24 23:47:36,896 Epoch 379/2000 +2025-03-24 23:48:42,286 Current Learning Rate: 0.0009730427 +2025-03-24 23:48:42,287 Train Loss: 0.0003242, Val Loss: 0.0003755 +2025-03-24 23:48:42,287 Epoch 380/2000 +2025-03-24 23:49:47,683 Current Learning Rate: 0.0009755283 +2025-03-24 23:49:47,683 Train Loss: 0.0003213, Val Loss: 0.0003771 +2025-03-24 23:49:47,684 Epoch 381/2000 +2025-03-24 23:50:53,269 Current Learning Rate: 0.0009778965 +2025-03-24 23:50:53,270 Train Loss: 0.0003150, Val Loss: 0.0003662 +2025-03-24 23:50:53,270 Epoch 382/2000 +2025-03-24 23:51:58,428 Current Learning Rate: 0.0009801468 +2025-03-24 23:51:58,428 Train Loss: 0.0003152, Val Loss: 0.0003768 +2025-03-24 23:51:58,429 Epoch 383/2000 +2025-03-24 23:53:03,545 Current Learning Rate: 0.0009822787 +2025-03-24 23:53:03,545 Train Loss: 0.0003223, Val Loss: 0.0003802 +2025-03-24 23:53:03,545 Epoch 384/2000 +2025-03-24 23:54:08,821 Current Learning Rate: 0.0009842916 +2025-03-24 23:54:08,822 Train Loss: 0.0003204, Val Loss: 0.0003700 +2025-03-24 23:54:08,823 Epoch 385/2000 +2025-03-24 23:55:14,202 Current Learning Rate: 0.0009861850 +2025-03-24 23:55:14,203 Train Loss: 0.0003191, Val Loss: 0.0003709 +2025-03-24 23:55:14,203 Epoch 386/2000 +2025-03-24 23:56:19,387 Current Learning Rate: 0.0009879584 +2025-03-24 23:56:19,388 Train Loss: 0.0003195, Val Loss: 0.0003666 +2025-03-24 23:56:19,388 Epoch 387/2000 +2025-03-24 23:57:24,943 Current Learning Rate: 0.0009896114 +2025-03-24 23:57:24,943 Train Loss: 0.0003230, Val Loss: 0.0003750 +2025-03-24 23:57:24,944 Epoch 388/2000 +2025-03-24 23:58:30,106 Current Learning Rate: 0.0009911436 +2025-03-24 23:58:30,107 Train Loss: 0.0003245, Val Loss: 0.0003765 +2025-03-24 23:58:30,107 Epoch 389/2000 +2025-03-24 23:59:35,352 Current Learning Rate: 0.0009925547 +2025-03-24 23:59:35,353 Train Loss: 0.0003278, Val Loss: 0.0003818 +2025-03-24 23:59:35,353 Epoch 390/2000 +2025-03-25 00:00:40,556 Current Learning Rate: 0.0009938442 +2025-03-25 00:00:40,557 Train Loss: 0.0003292, Val Loss: 0.0003703 +2025-03-25 00:00:40,557 Epoch 391/2000 +2025-03-25 00:01:46,072 Current Learning Rate: 0.0009950118 +2025-03-25 00:01:46,073 Train Loss: 0.0003239, Val Loss: 0.0003641 +2025-03-25 00:01:46,073 Epoch 392/2000 +2025-03-25 00:02:51,470 Current Learning Rate: 0.0009960574 +2025-03-25 00:02:51,471 Train Loss: 0.0003166, Val Loss: 0.0003565 +2025-03-25 00:02:51,471 Epoch 393/2000 +2025-03-25 00:03:56,539 Current Learning Rate: 0.0009969805 +2025-03-25 00:03:56,539 Train Loss: 0.0003076, Val Loss: 0.0003549 +2025-03-25 00:03:56,540 Epoch 394/2000 +2025-03-25 00:05:02,129 Current Learning Rate: 0.0009977810 +2025-03-25 00:05:02,130 Train Loss: 0.0003111, Val Loss: 0.0003696 +2025-03-25 00:05:02,130 Epoch 395/2000 +2025-03-25 00:06:07,402 Current Learning Rate: 0.0009984587 +2025-03-25 00:06:07,402 Train Loss: 0.0003142, Val Loss: 0.0003729 +2025-03-25 00:06:07,402 Epoch 396/2000 +2025-03-25 00:07:12,902 Current Learning Rate: 0.0009990134 +2025-03-25 00:07:12,903 Train Loss: 0.0003151, Val Loss: 0.0003811 +2025-03-25 00:07:12,904 Epoch 397/2000 +2025-03-25 00:08:18,247 Current Learning Rate: 0.0009994449 +2025-03-25 00:08:18,248 Train Loss: 0.0003167, Val Loss: 0.0003731 +2025-03-25 00:08:18,249 Epoch 398/2000 +2025-03-25 00:09:23,551 Current Learning Rate: 0.0009997533 +2025-03-25 00:09:23,551 Train Loss: 0.0003176, Val Loss: 0.0003637 +2025-03-25 00:09:23,551 Epoch 399/2000 +2025-03-25 00:10:28,759 Current Learning Rate: 0.0009999383 +2025-03-25 00:10:28,760 Train Loss: 0.0003131, Val Loss: 0.0003620 +2025-03-25 00:10:28,760 Epoch 400/2000 +2025-03-25 00:11:33,746 Current Learning Rate: 0.0010000000 +2025-03-25 00:11:33,746 Train Loss: 0.0003086, Val Loss: 0.0003588 +2025-03-25 00:11:33,747 Epoch 401/2000 +2025-03-25 00:12:38,724 Current Learning Rate: 0.0009999383 +2025-03-25 00:12:38,725 Train Loss: 0.0003110, Val Loss: 0.0003700 +2025-03-25 00:12:38,725 Epoch 402/2000 +2025-03-25 00:13:43,291 Current Learning Rate: 0.0009997533 +2025-03-25 00:13:43,292 Train Loss: 0.0003144, Val Loss: 0.0003730 +2025-03-25 00:13:43,292 Epoch 403/2000 +2025-03-25 00:14:48,656 Current Learning Rate: 0.0009994449 +2025-03-25 00:14:48,657 Train Loss: 0.0003123, Val Loss: 0.0003651 +2025-03-25 00:14:48,657 Epoch 404/2000 +2025-03-25 00:15:53,820 Current Learning Rate: 0.0009990134 +2025-03-25 00:15:53,821 Train Loss: 0.0003101, Val Loss: 0.0003603 +2025-03-25 00:15:53,821 Epoch 405/2000 +2025-03-25 00:16:58,966 Current Learning Rate: 0.0009984587 +2025-03-25 00:16:58,966 Train Loss: 0.0003098, Val Loss: 0.0003641 +2025-03-25 00:16:58,967 Epoch 406/2000 +2025-03-25 00:18:04,231 Current Learning Rate: 0.0009977810 +2025-03-25 00:18:04,231 Train Loss: 0.0003123, Val Loss: 0.0003664 +2025-03-25 00:18:04,232 Epoch 407/2000 +2025-03-25 00:19:09,455 Current Learning Rate: 0.0009969805 +2025-03-25 00:19:09,456 Train Loss: 0.0003162, Val Loss: 0.0003707 +2025-03-25 00:19:09,456 Epoch 408/2000 +2025-03-25 00:20:14,543 Current Learning Rate: 0.0009960574 +2025-03-25 00:20:14,544 Train Loss: 0.0003188, Val Loss: 0.0003643 +2025-03-25 00:20:14,544 Epoch 409/2000 +2025-03-25 00:21:20,560 Current Learning Rate: 0.0009950118 +2025-03-25 00:21:20,561 Train Loss: 0.0003193, Val Loss: 0.0003724 +2025-03-25 00:21:20,561 Epoch 410/2000 +2025-03-25 00:22:26,804 Current Learning Rate: 0.0009938442 +2025-03-25 00:22:26,804 Train Loss: 0.0003187, Val Loss: 0.0003595 +2025-03-25 00:22:26,805 Epoch 411/2000 +2025-03-25 00:23:33,040 Current Learning Rate: 0.0009925547 +2025-03-25 00:23:33,041 Train Loss: 0.0003157, Val Loss: 0.0003556 +2025-03-25 00:23:33,041 Epoch 412/2000 +2025-03-25 00:24:38,852 Current Learning Rate: 0.0009911436 +2025-03-25 00:24:38,852 Train Loss: 0.0003169, Val Loss: 0.0003574 +2025-03-25 00:24:38,853 Epoch 413/2000 +2025-03-25 00:25:45,612 Current Learning Rate: 0.0009896114 +2025-03-25 00:25:45,612 Train Loss: 0.0003153, Val Loss: 0.0003560 +2025-03-25 00:25:45,612 Epoch 414/2000 +2025-03-25 00:26:52,197 Current Learning Rate: 0.0009879584 +2025-03-25 00:26:52,198 Train Loss: 0.0003079, Val Loss: 0.0003438 +2025-03-25 00:26:52,198 Epoch 415/2000 +2025-03-25 00:27:57,566 Current Learning Rate: 0.0009861850 +2025-03-25 00:27:57,567 Train Loss: 0.0003000, Val Loss: 0.0003423 +2025-03-25 00:27:57,568 Epoch 416/2000 +2025-03-25 00:29:03,675 Current Learning Rate: 0.0009842916 +2025-03-25 00:29:03,676 Train Loss: 0.0002981, Val Loss: 0.0003494 +2025-03-25 00:29:03,676 Epoch 417/2000 +2025-03-25 00:30:09,274 Current Learning Rate: 0.0009822787 +2025-03-25 00:30:09,275 Train Loss: 0.0003027, Val Loss: 0.0003582 +2025-03-25 00:30:09,275 Epoch 418/2000 +2025-03-25 00:31:16,226 Current Learning Rate: 0.0009801468 +2025-03-25 00:31:16,227 Train Loss: 0.0003054, Val Loss: 0.0003594 +2025-03-25 00:31:16,227 Epoch 419/2000 +2025-03-25 00:32:22,723 Current Learning Rate: 0.0009778965 +2025-03-25 00:32:22,724 Train Loss: 0.0003038, Val Loss: 0.0003616 +2025-03-25 00:32:22,724 Epoch 420/2000 +2025-03-25 00:33:28,981 Current Learning Rate: 0.0009755283 +2025-03-25 00:33:28,981 Train Loss: 0.0003064, Val Loss: 0.0003579 +2025-03-25 00:33:28,982 Epoch 421/2000 +2025-03-25 00:34:34,756 Current Learning Rate: 0.0009730427 +2025-03-25 00:34:34,756 Train Loss: 0.0003081, Val Loss: 0.0003521 +2025-03-25 00:34:34,757 Epoch 422/2000 +2025-03-25 00:35:40,768 Current Learning Rate: 0.0009704404 +2025-03-25 00:35:40,769 Train Loss: 0.0003065, Val Loss: 0.0003547 +2025-03-25 00:35:40,769 Epoch 423/2000 +2025-03-25 00:36:47,658 Current Learning Rate: 0.0009677220 +2025-03-25 00:36:47,658 Train Loss: 0.0003066, Val Loss: 0.0003549 +2025-03-25 00:36:47,659 Epoch 424/2000 +2025-03-25 00:37:53,908 Current Learning Rate: 0.0009648882 +2025-03-25 00:37:53,909 Train Loss: 0.0003074, Val Loss: 0.0003536 +2025-03-25 00:37:53,909 Epoch 425/2000 +2025-03-25 00:39:00,762 Current Learning Rate: 0.0009619398 +2025-03-25 00:39:00,762 Train Loss: 0.0003095, Val Loss: 0.0003494 +2025-03-25 00:39:00,763 Epoch 426/2000 +2025-03-25 00:40:06,787 Current Learning Rate: 0.0009588773 +2025-03-25 00:40:06,788 Train Loss: 0.0003102, Val Loss: 0.0003576 +2025-03-25 00:40:06,788 Epoch 427/2000 +2025-03-25 00:41:13,540 Current Learning Rate: 0.0009557016 +2025-03-25 00:41:13,541 Train Loss: 0.0003101, Val Loss: 0.0003572 +2025-03-25 00:41:13,541 Epoch 428/2000 +2025-03-25 00:42:19,883 Current Learning Rate: 0.0009524135 +2025-03-25 00:42:19,884 Train Loss: 0.0003055, Val Loss: 0.0003477 +2025-03-25 00:42:19,884 Epoch 429/2000 +2025-03-25 00:43:26,122 Current Learning Rate: 0.0009490138 +2025-03-25 00:43:26,123 Train Loss: 0.0002978, Val Loss: 0.0003361 +2025-03-25 00:43:26,123 Epoch 430/2000 +2025-03-25 00:44:32,544 Current Learning Rate: 0.0009455033 +2025-03-25 00:44:32,545 Train Loss: 0.0002924, Val Loss: 0.0003362 +2025-03-25 00:44:32,545 Epoch 431/2000 +2025-03-25 00:45:38,981 Current Learning Rate: 0.0009418828 +2025-03-25 00:45:38,981 Train Loss: 0.0002915, Val Loss: 0.0003442 +2025-03-25 00:45:38,982 Epoch 432/2000 +2025-03-25 00:46:45,074 Current Learning Rate: 0.0009381533 +2025-03-25 00:46:45,075 Train Loss: 0.0002945, Val Loss: 0.0003475 +2025-03-25 00:46:45,075 Epoch 433/2000 +2025-03-25 00:47:51,320 Current Learning Rate: 0.0009343158 +2025-03-25 00:47:51,320 Train Loss: 0.0002959, Val Loss: 0.0003501 +2025-03-25 00:47:51,321 Epoch 434/2000 +2025-03-25 00:48:56,908 Current Learning Rate: 0.0009303710 +2025-03-25 00:48:56,909 Train Loss: 0.0002967, Val Loss: 0.0003452 +2025-03-25 00:48:56,909 Epoch 435/2000 +2025-03-25 00:50:03,461 Current Learning Rate: 0.0009263201 +2025-03-25 00:50:03,461 Train Loss: 0.0002976, Val Loss: 0.0003485 +2025-03-25 00:50:03,462 Epoch 436/2000 +2025-03-25 00:51:09,558 Current Learning Rate: 0.0009221640 +2025-03-25 00:51:09,559 Train Loss: 0.0002972, Val Loss: 0.0003447 +2025-03-25 00:51:09,559 Epoch 437/2000 +2025-03-25 00:52:16,028 Current Learning Rate: 0.0009179037 +2025-03-25 00:52:16,029 Train Loss: 0.0002977, Val Loss: 0.0003440 +2025-03-25 00:52:16,029 Epoch 438/2000 +2025-03-25 00:53:22,462 Current Learning Rate: 0.0009135403 +2025-03-25 00:53:22,463 Train Loss: 0.0002994, Val Loss: 0.0003452 +2025-03-25 00:53:22,463 Epoch 439/2000 +2025-03-25 00:54:29,152 Current Learning Rate: 0.0009090749 +2025-03-25 00:54:29,152 Train Loss: 0.0003000, Val Loss: 0.0003404 +2025-03-25 00:54:29,152 Epoch 440/2000 +2025-03-25 00:55:36,143 Current Learning Rate: 0.0009045085 +2025-03-25 00:55:36,143 Train Loss: 0.0003004, Val Loss: 0.0003439 +2025-03-25 00:55:36,144 Epoch 441/2000 +2025-03-25 00:56:42,562 Current Learning Rate: 0.0008998423 +2025-03-25 00:56:42,563 Train Loss: 0.0003010, Val Loss: 0.0003406 +2025-03-25 00:56:42,563 Epoch 442/2000 +2025-03-25 00:57:48,735 Current Learning Rate: 0.0008950775 +2025-03-25 00:57:48,736 Train Loss: 0.0002984, Val Loss: 0.0003374 +2025-03-25 00:57:48,736 Epoch 443/2000 +2025-03-25 00:58:54,860 Current Learning Rate: 0.0008902152 +2025-03-25 00:58:54,861 Train Loss: 0.0002947, Val Loss: 0.0003337 +2025-03-25 00:58:54,861 Epoch 444/2000 +2025-03-25 01:00:00,356 Current Learning Rate: 0.0008852566 +2025-03-25 01:00:00,357 Train Loss: 0.0002903, Val Loss: 0.0003295 +2025-03-25 01:00:00,357 Epoch 445/2000 +2025-03-25 01:01:06,142 Current Learning Rate: 0.0008802030 +2025-03-25 01:01:06,143 Train Loss: 0.0002870, Val Loss: 0.0003277 +2025-03-25 01:01:06,144 Epoch 446/2000 +2025-03-25 01:02:13,179 Current Learning Rate: 0.0008750555 +2025-03-25 01:02:13,180 Train Loss: 0.0002849, Val Loss: 0.0003280 +2025-03-25 01:02:13,180 Epoch 447/2000 +2025-03-25 01:03:19,419 Current Learning Rate: 0.0008698155 +2025-03-25 01:03:19,419 Train Loss: 0.0002850, Val Loss: 0.0003332 +2025-03-25 01:03:19,420 Epoch 448/2000 +2025-03-25 01:04:24,956 Current Learning Rate: 0.0008644843 +2025-03-25 01:04:24,957 Train Loss: 0.0002857, Val Loss: 0.0003346 +2025-03-25 01:04:24,957 Epoch 449/2000 +2025-03-25 01:05:30,945 Current Learning Rate: 0.0008590631 +2025-03-25 01:05:30,946 Train Loss: 0.0002859, Val Loss: 0.0003358 +2025-03-25 01:05:30,946 Epoch 450/2000 +2025-03-25 01:06:36,960 Current Learning Rate: 0.0008535534 +2025-03-25 01:06:36,960 Train Loss: 0.0002889, Val Loss: 0.0003375 +2025-03-25 01:06:36,960 Epoch 451/2000 +2025-03-25 01:07:43,059 Current Learning Rate: 0.0008479564 +2025-03-25 01:07:43,060 Train Loss: 0.0002893, Val Loss: 0.0003337 +2025-03-25 01:07:43,060 Epoch 452/2000 +2025-03-25 01:08:49,383 Current Learning Rate: 0.0008422736 +2025-03-25 01:08:49,384 Train Loss: 0.0002906, Val Loss: 0.0003336 +2025-03-25 01:08:49,384 Epoch 453/2000 +2025-03-25 01:09:55,178 Current Learning Rate: 0.0008365063 +2025-03-25 01:09:55,178 Train Loss: 0.0002913, Val Loss: 0.0003345 +2025-03-25 01:09:55,179 Epoch 454/2000 +2025-03-25 01:11:01,164 Current Learning Rate: 0.0008306559 +2025-03-25 01:11:01,165 Train Loss: 0.0002935, Val Loss: 0.0003382 +2025-03-25 01:11:01,165 Epoch 455/2000 +2025-03-25 01:12:07,183 Current Learning Rate: 0.0008247240 +2025-03-25 01:12:07,184 Train Loss: 0.0002935, Val Loss: 0.0003374 +2025-03-25 01:12:07,184 Epoch 456/2000 +2025-03-25 01:13:13,540 Current Learning Rate: 0.0008187120 +2025-03-25 01:13:13,540 Train Loss: 0.0002905, Val Loss: 0.0003359 +2025-03-25 01:13:13,541 Epoch 457/2000 +2025-03-25 01:14:20,161 Current Learning Rate: 0.0008126213 +2025-03-25 01:14:20,162 Train Loss: 0.0002868, Val Loss: 0.0003286 +2025-03-25 01:14:20,162 Epoch 458/2000 +2025-03-25 01:15:26,708 Current Learning Rate: 0.0008064535 +2025-03-25 01:15:26,709 Train Loss: 0.0002829, Val Loss: 0.0003228 +2025-03-25 01:15:26,709 Epoch 459/2000 +2025-03-25 01:16:32,220 Current Learning Rate: 0.0008002101 +2025-03-25 01:16:32,220 Train Loss: 0.0002802, Val Loss: 0.0003187 +2025-03-25 01:16:32,221 Epoch 460/2000 +2025-03-25 01:17:38,323 Current Learning Rate: 0.0007938926 +2025-03-25 01:17:38,324 Train Loss: 0.0002788, Val Loss: 0.0003184 +2025-03-25 01:17:38,324 Epoch 461/2000 +2025-03-25 01:18:44,372 Current Learning Rate: 0.0007875026 +2025-03-25 01:18:44,372 Train Loss: 0.0002782, Val Loss: 0.0003201 +2025-03-25 01:18:44,373 Epoch 462/2000 +2025-03-25 01:19:50,606 Current Learning Rate: 0.0007810417 +2025-03-25 01:19:50,607 Train Loss: 0.0002777, Val Loss: 0.0003196 +2025-03-25 01:19:50,607 Epoch 463/2000 +2025-03-25 01:20:57,296 Current Learning Rate: 0.0007745114 +2025-03-25 01:20:57,297 Train Loss: 0.0002775, Val Loss: 0.0003205 +2025-03-25 01:20:57,297 Epoch 464/2000 +2025-03-25 01:22:03,934 Current Learning Rate: 0.0007679134 +2025-03-25 01:22:03,934 Train Loss: 0.0002775, Val Loss: 0.0003205 +2025-03-25 01:22:03,934 Epoch 465/2000 +2025-03-25 01:23:09,536 Current Learning Rate: 0.0007612493 +2025-03-25 01:23:09,536 Train Loss: 0.0002783, Val Loss: 0.0003238 +2025-03-25 01:23:09,537 Epoch 466/2000 +2025-03-25 01:24:16,193 Current Learning Rate: 0.0007545207 +2025-03-25 01:24:16,194 Train Loss: 0.0002811, Val Loss: 0.0003208 +2025-03-25 01:24:16,194 Epoch 467/2000 +2025-03-25 01:25:22,631 Current Learning Rate: 0.0007477293 +2025-03-25 01:25:22,631 Train Loss: 0.0002829, Val Loss: 0.0003266 +2025-03-25 01:25:22,631 Epoch 468/2000 +2025-03-25 01:26:29,172 Current Learning Rate: 0.0007408768 +2025-03-25 01:26:29,173 Train Loss: 0.0002848, Val Loss: 0.0003266 +2025-03-25 01:26:29,173 Epoch 469/2000 +2025-03-25 01:27:35,348 Current Learning Rate: 0.0007339649 +2025-03-25 01:27:35,348 Train Loss: 0.0002838, Val Loss: 0.0003249 +2025-03-25 01:27:35,348 Epoch 470/2000 +2025-03-25 01:28:40,764 Current Learning Rate: 0.0007269952 +2025-03-25 01:28:40,765 Train Loss: 0.0002817, Val Loss: 0.0003246 +2025-03-25 01:28:40,765 Epoch 471/2000 +2025-03-25 01:29:46,657 Current Learning Rate: 0.0007199696 +2025-03-25 01:29:46,657 Train Loss: 0.0002790, Val Loss: 0.0003183 +2025-03-25 01:29:46,658 Epoch 472/2000 +2025-03-25 01:30:52,605 Current Learning Rate: 0.0007128896 +2025-03-25 01:30:52,606 Train Loss: 0.0002765, Val Loss: 0.0003156 +2025-03-25 01:30:52,606 Epoch 473/2000 +2025-03-25 01:31:58,652 Current Learning Rate: 0.0007057572 +2025-03-25 01:31:58,652 Train Loss: 0.0002744, Val Loss: 0.0003126 +2025-03-25 01:31:58,652 Epoch 474/2000 +2025-03-25 01:33:04,527 Current Learning Rate: 0.0006985739 +2025-03-25 01:33:04,528 Train Loss: 0.0002732, Val Loss: 0.0003120 +2025-03-25 01:33:04,528 Epoch 475/2000 +2025-03-25 01:34:10,142 Current Learning Rate: 0.0006913417 +2025-03-25 01:34:10,143 Train Loss: 0.0002724, Val Loss: 0.0003112 +2025-03-25 01:34:10,143 Epoch 476/2000 +2025-03-25 01:35:15,885 Current Learning Rate: 0.0006840623 +2025-03-25 01:35:15,886 Train Loss: 0.0002718, Val Loss: 0.0003107 +2025-03-25 01:35:15,886 Epoch 477/2000 +2025-03-25 01:36:21,990 Current Learning Rate: 0.0006767374 +2025-03-25 01:36:21,990 Train Loss: 0.0002713, Val Loss: 0.0003102 +2025-03-25 01:36:21,990 Epoch 478/2000 +2025-03-25 01:37:28,009 Current Learning Rate: 0.0006693690 +2025-03-25 01:37:28,009 Train Loss: 0.0002711, Val Loss: 0.0003108 +2025-03-25 01:37:28,010 Epoch 479/2000 +2025-03-25 01:38:34,419 Current Learning Rate: 0.0006619587 +2025-03-25 01:38:34,419 Train Loss: 0.0002715, Val Loss: 0.0003130 +2025-03-25 01:38:34,419 Epoch 480/2000 +2025-03-25 01:39:40,581 Current Learning Rate: 0.0006545085 +2025-03-25 01:39:40,582 Train Loss: 0.0002730, Val Loss: 0.0003142 +2025-03-25 01:39:40,582 Epoch 481/2000 +2025-03-25 01:40:46,872 Current Learning Rate: 0.0006470202 +2025-03-25 01:40:46,873 Train Loss: 0.0002768, Val Loss: 0.0003120 +2025-03-25 01:40:46,873 Epoch 482/2000 +2025-03-25 01:41:52,129 Current Learning Rate: 0.0006394956 +2025-03-25 01:41:52,130 Train Loss: 0.0002757, Val Loss: 0.0003130 +2025-03-25 01:41:52,130 Epoch 483/2000 +2025-03-25 01:42:58,544 Current Learning Rate: 0.0006319365 +2025-03-25 01:42:58,545 Train Loss: 0.0002736, Val Loss: 0.0003085 +2025-03-25 01:42:58,545 Epoch 484/2000 +2025-03-25 01:44:04,660 Current Learning Rate: 0.0006243449 +2025-03-25 01:44:04,661 Train Loss: 0.0002726, Val Loss: 0.0003073 +2025-03-25 01:44:04,662 Epoch 485/2000 +2025-03-25 01:45:10,636 Current Learning Rate: 0.0006167227 +2025-03-25 01:45:10,636 Train Loss: 0.0002716, Val Loss: 0.0003056 +2025-03-25 01:45:10,637 Epoch 486/2000 +2025-03-25 01:46:16,332 Current Learning Rate: 0.0006090716 +2025-03-25 01:46:16,333 Train Loss: 0.0002702, Val Loss: 0.0003041 +2025-03-25 01:46:16,334 Epoch 487/2000 +2025-03-25 01:47:22,602 Current Learning Rate: 0.0006013936 +2025-03-25 01:47:22,603 Train Loss: 0.0002684, Val Loss: 0.0003020 +2025-03-25 01:47:22,603 Epoch 488/2000 +2025-03-25 01:48:28,313 Current Learning Rate: 0.0005936907 +2025-03-25 01:48:28,313 Train Loss: 0.0002664, Val Loss: 0.0003010 +2025-03-25 01:48:28,314 Epoch 489/2000 +2025-03-25 01:49:34,302 Current Learning Rate: 0.0005859646 +2025-03-25 01:49:34,302 Train Loss: 0.0002654, Val Loss: 0.0002996 +2025-03-25 01:49:34,302 Epoch 490/2000 +2025-03-25 01:50:40,171 Current Learning Rate: 0.0005782172 +2025-03-25 01:50:40,172 Train Loss: 0.0002650, Val Loss: 0.0002986 +2025-03-25 01:50:40,172 Epoch 491/2000 +2025-03-25 01:51:45,664 Current Learning Rate: 0.0005704506 +2025-03-25 01:51:45,664 Train Loss: 0.0002648, Val Loss: 0.0002983 +2025-03-25 01:51:45,665 Epoch 492/2000 +2025-03-25 01:52:52,171 Current Learning Rate: 0.0005626666 +2025-03-25 01:52:52,171 Train Loss: 0.0002650, Val Loss: 0.0002982 +2025-03-25 01:52:52,172 Epoch 493/2000 +2025-03-25 01:53:58,301 Current Learning Rate: 0.0005548672 +2025-03-25 01:53:58,302 Train Loss: 0.0002659, Val Loss: 0.0003050 +2025-03-25 01:53:58,302 Epoch 494/2000 +2025-03-25 01:55:04,457 Current Learning Rate: 0.0005470542 +2025-03-25 01:55:04,457 Train Loss: 0.0002666, Val Loss: 0.0003035 +2025-03-25 01:55:04,458 Epoch 495/2000 +2025-03-25 01:56:10,865 Current Learning Rate: 0.0005392295 +2025-03-25 01:56:10,866 Train Loss: 0.0002670, Val Loss: 0.0002991 +2025-03-25 01:56:10,866 Epoch 496/2000 +2025-03-25 01:57:17,119 Current Learning Rate: 0.0005313953 +2025-03-25 01:57:17,120 Train Loss: 0.0002671, Val Loss: 0.0002997 +2025-03-25 01:57:17,120 Epoch 497/2000 +2025-03-25 01:58:23,741 Current Learning Rate: 0.0005235532 +2025-03-25 01:58:23,816 Train Loss: 0.0002648, Val Loss: 0.0002972 +2025-03-25 01:58:23,817 Epoch 498/2000 +2025-03-25 01:59:29,912 Current Learning Rate: 0.0005157054 +2025-03-25 01:59:30,001 Train Loss: 0.0002628, Val Loss: 0.0002950 +2025-03-25 01:59:30,001 Epoch 499/2000 +2025-03-25 02:00:35,418 Current Learning Rate: 0.0005078537 +2025-03-25 02:00:35,491 Train Loss: 0.0002613, Val Loss: 0.0002932 +2025-03-25 02:00:35,492 Epoch 500/2000 +2025-03-25 02:01:42,086 Current Learning Rate: 0.0005000000 +2025-03-25 02:01:42,172 Train Loss: 0.0002602, Val Loss: 0.0002926 +2025-03-25 02:01:42,172 Epoch 501/2000 +2025-03-25 02:02:48,522 Current Learning Rate: 0.0004921463 +2025-03-25 02:02:48,522 Train Loss: 0.0002594, Val Loss: 0.0002938 +2025-03-25 02:02:48,523 Epoch 502/2000 +2025-03-25 02:03:54,339 Current Learning Rate: 0.0004842946 +2025-03-25 02:03:54,339 Train Loss: 0.0002590, Val Loss: 0.0002939 +2025-03-25 02:03:54,339 Epoch 503/2000 +2025-03-25 02:05:00,816 Current Learning Rate: 0.0004764468 +2025-03-25 02:05:00,817 Train Loss: 0.0002587, Val Loss: 0.0002940 +2025-03-25 02:05:00,817 Epoch 504/2000 +2025-03-25 02:06:06,591 Current Learning Rate: 0.0004686047 +2025-03-25 02:06:06,592 Train Loss: 0.0002587, Val Loss: 0.0002949 +2025-03-25 02:06:06,592 Epoch 505/2000 +2025-03-25 02:07:12,858 Current Learning Rate: 0.0004607705 +2025-03-25 02:07:12,859 Train Loss: 0.0002590, Val Loss: 0.0002932 +2025-03-25 02:07:12,859 Epoch 506/2000 +2025-03-25 02:08:18,327 Current Learning Rate: 0.0004529458 +2025-03-25 02:08:18,404 Train Loss: 0.0002601, Val Loss: 0.0002891 +2025-03-25 02:08:18,405 Epoch 507/2000 +2025-03-25 02:09:24,157 Current Learning Rate: 0.0004451328 +2025-03-25 02:09:24,240 Train Loss: 0.0002605, Val Loss: 0.0002879 +2025-03-25 02:09:24,241 Epoch 508/2000 +2025-03-25 02:10:30,097 Current Learning Rate: 0.0004373334 +2025-03-25 02:10:30,194 Train Loss: 0.0002587, Val Loss: 0.0002848 +2025-03-25 02:10:30,194 Epoch 509/2000 +2025-03-25 02:11:35,704 Current Learning Rate: 0.0004295494 +2025-03-25 02:11:35,776 Train Loss: 0.0002573, Val Loss: 0.0002838 +2025-03-25 02:11:35,776 Epoch 510/2000 +2025-03-25 02:12:42,218 Current Learning Rate: 0.0004217828 +2025-03-25 02:12:42,284 Train Loss: 0.0002563, Val Loss: 0.0002833 +2025-03-25 02:12:42,284 Epoch 511/2000 +2025-03-25 02:13:48,135 Current Learning Rate: 0.0004140354 +2025-03-25 02:13:48,206 Train Loss: 0.0002555, Val Loss: 0.0002828 +2025-03-25 02:13:48,206 Epoch 512/2000 +2025-03-25 02:14:53,997 Current Learning Rate: 0.0004063093 +2025-03-25 02:14:54,074 Train Loss: 0.0002549, Val Loss: 0.0002825 +2025-03-25 02:14:54,074 Epoch 513/2000 +2025-03-25 02:16:00,189 Current Learning Rate: 0.0003986064 +2025-03-25 02:16:00,264 Train Loss: 0.0002545, Val Loss: 0.0002823 +2025-03-25 02:16:00,265 Epoch 514/2000 +2025-03-25 02:17:07,456 Current Learning Rate: 0.0003909284 +2025-03-25 02:17:07,543 Train Loss: 0.0002543, Val Loss: 0.0002821 +2025-03-25 02:17:07,543 Epoch 515/2000 +2025-03-25 02:18:13,937 Current Learning Rate: 0.0003832773 +2025-03-25 02:18:14,023 Train Loss: 0.0002543, Val Loss: 0.0002818 +2025-03-25 02:18:14,023 Epoch 516/2000 +2025-03-25 02:19:20,353 Current Learning Rate: 0.0003756551 +2025-03-25 02:19:20,436 Train Loss: 0.0002543, Val Loss: 0.0002809 +2025-03-25 02:19:20,437 Epoch 517/2000 +2025-03-25 02:20:26,416 Current Learning Rate: 0.0003680635 +2025-03-25 02:20:26,502 Train Loss: 0.0002540, Val Loss: 0.0002807 +2025-03-25 02:20:26,502 Epoch 518/2000 +2025-03-25 02:21:32,587 Current Learning Rate: 0.0003605044 +2025-03-25 02:21:32,679 Train Loss: 0.0002530, Val Loss: 0.0002801 +2025-03-25 02:21:32,679 Epoch 519/2000 +2025-03-25 02:22:38,553 Current Learning Rate: 0.0003529798 +2025-03-25 02:22:38,641 Train Loss: 0.0002522, Val Loss: 0.0002793 +2025-03-25 02:22:38,641 Epoch 520/2000 +2025-03-25 02:23:45,775 Current Learning Rate: 0.0003454915 +2025-03-25 02:23:45,861 Train Loss: 0.0002516, Val Loss: 0.0002786 +2025-03-25 02:23:45,862 Epoch 521/2000 +2025-03-25 02:24:52,210 Current Learning Rate: 0.0003380413 +2025-03-25 02:24:52,294 Train Loss: 0.0002510, Val Loss: 0.0002781 +2025-03-25 02:24:52,295 Epoch 522/2000 +2025-03-25 02:25:57,808 Current Learning Rate: 0.0003306310 +2025-03-25 02:25:57,901 Train Loss: 0.0002506, Val Loss: 0.0002776 +2025-03-25 02:25:57,901 Epoch 523/2000 +2025-03-25 02:27:04,207 Current Learning Rate: 0.0003232626 +2025-03-25 02:27:04,296 Train Loss: 0.0002502, Val Loss: 0.0002773 +2025-03-25 02:27:04,297 Epoch 524/2000 +2025-03-25 02:28:10,072 Current Learning Rate: 0.0003159377 +2025-03-25 02:28:10,157 Train Loss: 0.0002500, Val Loss: 0.0002771 +2025-03-25 02:28:10,157 Epoch 525/2000 +2025-03-25 02:29:16,620 Current Learning Rate: 0.0003086583 +2025-03-25 02:29:16,698 Train Loss: 0.0002498, Val Loss: 0.0002771 +2025-03-25 02:29:16,698 Epoch 526/2000 +2025-03-25 02:30:21,864 Current Learning Rate: 0.0003014261 +2025-03-25 02:30:21,957 Train Loss: 0.0002495, Val Loss: 0.0002769 +2025-03-25 02:30:21,957 Epoch 527/2000 +2025-03-25 02:31:28,262 Current Learning Rate: 0.0002942428 +2025-03-25 02:31:28,353 Train Loss: 0.0002489, Val Loss: 0.0002765 +2025-03-25 02:31:28,354 Epoch 528/2000 +2025-03-25 02:32:34,592 Current Learning Rate: 0.0002871104 +2025-03-25 02:32:34,664 Train Loss: 0.0002482, Val Loss: 0.0002759 +2025-03-25 02:32:34,665 Epoch 529/2000 +2025-03-25 02:33:40,049 Current Learning Rate: 0.0002800304 +2025-03-25 02:33:40,137 Train Loss: 0.0002476, Val Loss: 0.0002754 +2025-03-25 02:33:40,137 Epoch 530/2000 +2025-03-25 02:34:45,278 Current Learning Rate: 0.0002730048 +2025-03-25 02:34:45,351 Train Loss: 0.0002471, Val Loss: 0.0002749 +2025-03-25 02:34:45,351 Epoch 531/2000 +2025-03-25 02:35:51,319 Current Learning Rate: 0.0002660351 +2025-03-25 02:35:51,409 Train Loss: 0.0002467, Val Loss: 0.0002744 +2025-03-25 02:35:51,409 Epoch 532/2000 +2025-03-25 02:36:57,250 Current Learning Rate: 0.0002591232 +2025-03-25 02:36:57,325 Train Loss: 0.0002463, Val Loss: 0.0002739 +2025-03-25 02:36:57,325 Epoch 533/2000 +2025-03-25 02:38:03,495 Current Learning Rate: 0.0002522707 +2025-03-25 02:38:03,589 Train Loss: 0.0002460, Val Loss: 0.0002734 +2025-03-25 02:38:03,590 Epoch 534/2000 +2025-03-25 02:39:09,341 Current Learning Rate: 0.0002454793 +2025-03-25 02:39:09,431 Train Loss: 0.0002457, Val Loss: 0.0002730 +2025-03-25 02:39:09,432 Epoch 535/2000 +2025-03-25 02:40:15,456 Current Learning Rate: 0.0002387507 +2025-03-25 02:40:15,547 Train Loss: 0.0002453, Val Loss: 0.0002727 +2025-03-25 02:40:15,547 Epoch 536/2000 +2025-03-25 02:41:21,755 Current Learning Rate: 0.0002320866 +2025-03-25 02:41:21,847 Train Loss: 0.0002450, Val Loss: 0.0002725 +2025-03-25 02:41:21,847 Epoch 537/2000 +2025-03-25 02:42:27,995 Current Learning Rate: 0.0002254886 +2025-03-25 02:42:28,089 Train Loss: 0.0002446, Val Loss: 0.0002719 +2025-03-25 02:42:28,089 Epoch 538/2000 +2025-03-25 02:43:34,031 Current Learning Rate: 0.0002189583 +2025-03-25 02:43:34,115 Train Loss: 0.0002441, Val Loss: 0.0002711 +2025-03-25 02:43:34,115 Epoch 539/2000 +2025-03-25 02:44:39,947 Current Learning Rate: 0.0002124974 +2025-03-25 02:44:40,027 Train Loss: 0.0002436, Val Loss: 0.0002705 +2025-03-25 02:44:40,028 Epoch 540/2000 +2025-03-25 02:45:46,364 Current Learning Rate: 0.0002061074 +2025-03-25 02:45:46,451 Train Loss: 0.0002431, Val Loss: 0.0002700 +2025-03-25 02:45:46,451 Epoch 541/2000 +2025-03-25 02:46:52,901 Current Learning Rate: 0.0001997899 +2025-03-25 02:46:52,986 Train Loss: 0.0002428, Val Loss: 0.0002695 +2025-03-25 02:46:52,986 Epoch 542/2000 +2025-03-25 02:47:59,039 Current Learning Rate: 0.0001935465 +2025-03-25 02:47:59,127 Train Loss: 0.0002424, Val Loss: 0.0002691 +2025-03-25 02:47:59,127 Epoch 543/2000 +2025-03-25 02:49:05,334 Current Learning Rate: 0.0001873787 +2025-03-25 02:49:05,427 Train Loss: 0.0002420, Val Loss: 0.0002686 +2025-03-25 02:49:05,428 Epoch 544/2000 +2025-03-25 02:50:11,446 Current Learning Rate: 0.0001812880 +2025-03-25 02:50:11,540 Train Loss: 0.0002416, Val Loss: 0.0002682 +2025-03-25 02:50:11,540 Epoch 545/2000 +2025-03-25 02:51:17,448 Current Learning Rate: 0.0001752760 +2025-03-25 02:51:17,536 Train Loss: 0.0002413, Val Loss: 0.0002678 +2025-03-25 02:51:17,537 Epoch 546/2000 +2025-03-25 02:52:23,610 Current Learning Rate: 0.0001693441 +2025-03-25 02:52:23,700 Train Loss: 0.0002409, Val Loss: 0.0002674 +2025-03-25 02:52:23,700 Epoch 547/2000 +2025-03-25 02:53:30,013 Current Learning Rate: 0.0001634937 +2025-03-25 02:53:30,104 Train Loss: 0.0002405, Val Loss: 0.0002671 +2025-03-25 02:53:30,105 Epoch 548/2000 +2025-03-25 02:54:36,360 Current Learning Rate: 0.0001577264 +2025-03-25 02:54:36,448 Train Loss: 0.0002401, Val Loss: 0.0002669 +2025-03-25 02:54:36,448 Epoch 549/2000 +2025-03-25 02:55:42,451 Current Learning Rate: 0.0001520436 +2025-03-25 02:55:42,544 Train Loss: 0.0002398, Val Loss: 0.0002667 +2025-03-25 02:55:42,545 Epoch 550/2000 +2025-03-25 02:56:49,190 Current Learning Rate: 0.0001464466 +2025-03-25 02:56:49,270 Train Loss: 0.0002395, Val Loss: 0.0002665 +2025-03-25 02:56:49,271 Epoch 551/2000 +2025-03-25 02:57:55,016 Current Learning Rate: 0.0001409369 +2025-03-25 02:57:55,100 Train Loss: 0.0002392, Val Loss: 0.0002664 +2025-03-25 02:57:55,101 Epoch 552/2000 +2025-03-25 02:59:01,391 Current Learning Rate: 0.0001355157 +2025-03-25 02:59:01,471 Train Loss: 0.0002388, Val Loss: 0.0002662 +2025-03-25 02:59:01,472 Epoch 553/2000 +2025-03-25 03:00:07,882 Current Learning Rate: 0.0001301845 +2025-03-25 03:00:07,965 Train Loss: 0.0002385, Val Loss: 0.0002661 +2025-03-25 03:00:07,965 Epoch 554/2000 +2025-03-25 03:01:14,434 Current Learning Rate: 0.0001249445 +2025-03-25 03:01:14,528 Train Loss: 0.0002382, Val Loss: 0.0002661 +2025-03-25 03:01:14,528 Epoch 555/2000 +2025-03-25 03:02:20,083 Current Learning Rate: 0.0001197970 +2025-03-25 03:02:20,085 Train Loss: 0.0002379, Val Loss: 0.0002662 +2025-03-25 03:02:20,085 Epoch 556/2000 +2025-03-25 03:03:26,616 Current Learning Rate: 0.0001147434 +2025-03-25 03:03:26,616 Train Loss: 0.0002377, Val Loss: 0.0002664 +2025-03-25 03:03:26,616 Epoch 557/2000 +2025-03-25 03:04:33,563 Current Learning Rate: 0.0001097848 +2025-03-25 03:04:33,563 Train Loss: 0.0002374, Val Loss: 0.0002666 +2025-03-25 03:04:33,564 Epoch 558/2000 +2025-03-25 03:05:39,838 Current Learning Rate: 0.0001049225 +2025-03-25 03:05:39,839 Train Loss: 0.0002371, Val Loss: 0.0002666 +2025-03-25 03:05:39,839 Epoch 559/2000 +2025-03-25 03:06:46,021 Current Learning Rate: 0.0001001577 +2025-03-25 03:06:46,112 Train Loss: 0.0002368, Val Loss: 0.0002659 +2025-03-25 03:06:46,112 Epoch 560/2000 +2025-03-25 03:07:52,091 Current Learning Rate: 0.0000954915 +2025-03-25 03:07:52,171 Train Loss: 0.0002365, Val Loss: 0.0002650 +2025-03-25 03:07:52,172 Epoch 561/2000 +2025-03-25 03:08:58,096 Current Learning Rate: 0.0000909251 +2025-03-25 03:08:58,162 Train Loss: 0.0002362, Val Loss: 0.0002643 +2025-03-25 03:08:58,162 Epoch 562/2000 +2025-03-25 03:10:04,151 Current Learning Rate: 0.0000864597 +2025-03-25 03:10:04,227 Train Loss: 0.0002359, Val Loss: 0.0002637 +2025-03-25 03:10:04,228 Epoch 563/2000 +2025-03-25 03:11:10,251 Current Learning Rate: 0.0000820963 +2025-03-25 03:11:10,328 Train Loss: 0.0002357, Val Loss: 0.0002633 +2025-03-25 03:11:10,328 Epoch 564/2000 +2025-03-25 03:12:17,064 Current Learning Rate: 0.0000778360 +2025-03-25 03:12:17,155 Train Loss: 0.0002354, Val Loss: 0.0002630 +2025-03-25 03:12:17,156 Epoch 565/2000 +2025-03-25 03:13:22,973 Current Learning Rate: 0.0000736799 +2025-03-25 03:13:23,053 Train Loss: 0.0002351, Val Loss: 0.0002626 +2025-03-25 03:13:23,054 Epoch 566/2000 +2025-03-25 03:14:29,489 Current Learning Rate: 0.0000696290 +2025-03-25 03:14:29,554 Train Loss: 0.0002349, Val Loss: 0.0002622 +2025-03-25 03:14:29,554 Epoch 567/2000 +2025-03-25 03:15:35,506 Current Learning Rate: 0.0000656842 +2025-03-25 03:15:35,579 Train Loss: 0.0002346, Val Loss: 0.0002620 +2025-03-25 03:15:35,579 Epoch 568/2000 +2025-03-25 03:16:41,779 Current Learning Rate: 0.0000618467 +2025-03-25 03:16:41,851 Train Loss: 0.0002344, Val Loss: 0.0002618 +2025-03-25 03:16:41,851 Epoch 569/2000 +2025-03-25 03:17:48,891 Current Learning Rate: 0.0000581172 +2025-03-25 03:17:48,969 Train Loss: 0.0002342, Val Loss: 0.0002616 +2025-03-25 03:17:48,970 Epoch 570/2000 +2025-03-25 03:18:55,105 Current Learning Rate: 0.0000544967 +2025-03-25 03:18:55,182 Train Loss: 0.0002339, Val Loss: 0.0002614 +2025-03-25 03:18:55,182 Epoch 571/2000 +2025-03-25 03:20:00,940 Current Learning Rate: 0.0000509862 +2025-03-25 03:20:01,022 Train Loss: 0.0002337, Val Loss: 0.0002612 +2025-03-25 03:20:01,022 Epoch 572/2000 +2025-03-25 03:21:07,571 Current Learning Rate: 0.0000475865 +2025-03-25 03:21:07,677 Train Loss: 0.0002335, Val Loss: 0.0002611 +2025-03-25 03:21:07,677 Epoch 573/2000 +2025-03-25 03:22:13,930 Current Learning Rate: 0.0000442984 +2025-03-25 03:22:14,012 Train Loss: 0.0002333, Val Loss: 0.0002609 +2025-03-25 03:22:14,012 Epoch 574/2000 +2025-03-25 03:23:20,132 Current Learning Rate: 0.0000411227 +2025-03-25 03:23:20,214 Train Loss: 0.0002331, Val Loss: 0.0002608 +2025-03-25 03:23:20,214 Epoch 575/2000 +2025-03-25 03:24:26,192 Current Learning Rate: 0.0000380602 +2025-03-25 03:24:26,270 Train Loss: 0.0002329, Val Loss: 0.0002606 +2025-03-25 03:24:26,270 Epoch 576/2000 +2025-03-25 03:25:33,248 Current Learning Rate: 0.0000351118 +2025-03-25 03:25:33,329 Train Loss: 0.0002326, Val Loss: 0.0002604 +2025-03-25 03:25:33,330 Epoch 577/2000 +2025-03-25 03:26:39,503 Current Learning Rate: 0.0000322780 +2025-03-25 03:26:39,600 Train Loss: 0.0002325, Val Loss: 0.0002602 +2025-03-25 03:26:39,600 Epoch 578/2000 +2025-03-25 03:27:45,562 Current Learning Rate: 0.0000295596 +2025-03-25 03:27:45,653 Train Loss: 0.0002323, Val Loss: 0.0002600 +2025-03-25 03:27:45,653 Epoch 579/2000 +2025-03-25 03:28:52,486 Current Learning Rate: 0.0000269573 +2025-03-25 03:28:52,568 Train Loss: 0.0002321, Val Loss: 0.0002598 +2025-03-25 03:28:52,569 Epoch 580/2000 +2025-03-25 03:29:59,012 Current Learning Rate: 0.0000244717 +2025-03-25 03:29:59,094 Train Loss: 0.0002319, Val Loss: 0.0002597 +2025-03-25 03:29:59,094 Epoch 581/2000 +2025-03-25 03:31:05,228 Current Learning Rate: 0.0000221035 +2025-03-25 03:31:05,325 Train Loss: 0.0002318, Val Loss: 0.0002596 +2025-03-25 03:31:05,325 Epoch 582/2000 +2025-03-25 03:32:11,802 Current Learning Rate: 0.0000198532 +2025-03-25 03:32:11,886 Train Loss: 0.0002316, Val Loss: 0.0002595 +2025-03-25 03:32:11,886 Epoch 583/2000 +2025-03-25 03:33:18,400 Current Learning Rate: 0.0000177213 +2025-03-25 03:33:18,486 Train Loss: 0.0002314, Val Loss: 0.0002594 +2025-03-25 03:33:18,486 Epoch 584/2000 +2025-03-25 03:34:24,395 Current Learning Rate: 0.0000157084 +2025-03-25 03:34:24,481 Train Loss: 0.0002313, Val Loss: 0.0002593 +2025-03-25 03:34:24,482 Epoch 585/2000 +2025-03-25 03:35:30,402 Current Learning Rate: 0.0000138150 +2025-03-25 03:35:30,489 Train Loss: 0.0002312, Val Loss: 0.0002592 +2025-03-25 03:35:30,489 Epoch 586/2000 +2025-03-25 03:36:36,437 Current Learning Rate: 0.0000120416 +2025-03-25 03:36:36,527 Train Loss: 0.0002310, Val Loss: 0.0002591 +2025-03-25 03:36:36,527 Epoch 587/2000 +2025-03-25 03:37:42,657 Current Learning Rate: 0.0000103886 +2025-03-25 03:37:42,757 Train Loss: 0.0002309, Val Loss: 0.0002590 +2025-03-25 03:37:42,757 Epoch 588/2000 +2025-03-25 03:38:48,790 Current Learning Rate: 0.0000088564 +2025-03-25 03:38:48,871 Train Loss: 0.0002308, Val Loss: 0.0002589 +2025-03-25 03:38:48,871 Epoch 589/2000 +2025-03-25 03:39:54,720 Current Learning Rate: 0.0000074453 +2025-03-25 03:39:54,825 Train Loss: 0.0002307, Val Loss: 0.0002588 +2025-03-25 03:39:54,825 Epoch 590/2000 +2025-03-25 03:41:00,531 Current Learning Rate: 0.0000061558 +2025-03-25 03:41:00,621 Train Loss: 0.0002306, Val Loss: 0.0002588 +2025-03-25 03:41:00,621 Epoch 591/2000 +2025-03-25 03:42:05,975 Current Learning Rate: 0.0000049882 +2025-03-25 03:42:06,049 Train Loss: 0.0002305, Val Loss: 0.0002587 +2025-03-25 03:42:06,050 Epoch 592/2000 +2025-03-25 03:43:12,000 Current Learning Rate: 0.0000039426 +2025-03-25 03:43:12,083 Train Loss: 0.0002304, Val Loss: 0.0002587 +2025-03-25 03:43:12,084 Epoch 593/2000 +2025-03-25 03:44:18,017 Current Learning Rate: 0.0000030195 +2025-03-25 03:44:18,100 Train Loss: 0.0002303, Val Loss: 0.0002587 +2025-03-25 03:44:18,100 Epoch 594/2000 +2025-03-25 03:45:24,016 Current Learning Rate: 0.0000022190 +2025-03-25 03:45:24,090 Train Loss: 0.0002302, Val Loss: 0.0002586 +2025-03-25 03:45:24,091 Epoch 595/2000 +2025-03-25 03:46:29,512 Current Learning Rate: 0.0000015413 +2025-03-25 03:46:29,612 Train Loss: 0.0002301, Val Loss: 0.0002586 +2025-03-25 03:46:29,612 Epoch 596/2000 +2025-03-25 03:47:35,914 Current Learning Rate: 0.0000009866 +2025-03-25 03:47:35,990 Train Loss: 0.0002301, Val Loss: 0.0002586 +2025-03-25 03:47:35,990 Epoch 597/2000 +2025-03-25 03:48:41,420 Current Learning Rate: 0.0000005551 +2025-03-25 03:48:41,495 Train Loss: 0.0002300, Val Loss: 0.0002586 +2025-03-25 03:48:41,495 Epoch 598/2000 +2025-03-25 03:49:48,046 Current Learning Rate: 0.0000002467 +2025-03-25 03:49:48,135 Train Loss: 0.0002300, Val Loss: 0.0002586 +2025-03-25 03:49:48,135 Epoch 599/2000 +2025-03-25 03:50:54,617 Current Learning Rate: 0.0000000617 +2025-03-25 03:50:54,618 Train Loss: 0.0002300, Val Loss: 0.0002586 +2025-03-25 03:50:54,618 Epoch 600/2000 +2025-03-25 03:52:00,583 Current Learning Rate: 0.0000000000 +2025-03-25 03:52:00,583 Train Loss: 0.0002300, Val Loss: 0.0002586 +2025-03-25 03:52:00,584 Epoch 601/2000 +2025-03-25 03:53:06,670 Current Learning Rate: 0.0000000617 +2025-03-25 03:53:06,670 Train Loss: 0.0002299, Val Loss: 0.0002586 +2025-03-25 03:53:06,670 Epoch 602/2000 +2025-03-25 03:54:13,279 Current Learning Rate: 0.0000002467 +2025-03-25 03:54:13,280 Train Loss: 0.0002300, Val Loss: 0.0002586 +2025-03-25 03:54:13,281 Epoch 603/2000 +2025-03-25 03:55:19,427 Current Learning Rate: 0.0000005551 +2025-03-25 03:55:19,428 Train Loss: 0.0002300, Val Loss: 0.0002586 +2025-03-25 03:55:19,428 Epoch 604/2000 +2025-03-25 03:56:24,840 Current Learning Rate: 0.0000009866 +2025-03-25 03:56:24,921 Train Loss: 0.0002300, Val Loss: 0.0002586 +2025-03-25 03:56:24,921 Epoch 605/2000 +2025-03-25 03:57:30,377 Current Learning Rate: 0.0000015413 +2025-03-25 03:57:30,377 Train Loss: 0.0002300, Val Loss: 0.0002586 +2025-03-25 03:57:30,377 Epoch 606/2000 +2025-03-25 03:58:36,469 Current Learning Rate: 0.0000022190 +2025-03-25 03:58:36,470 Train Loss: 0.0002301, Val Loss: 0.0002586 +2025-03-25 03:58:36,470 Epoch 607/2000 +2025-03-25 03:59:42,936 Current Learning Rate: 0.0000030195 +2025-03-25 03:59:42,937 Train Loss: 0.0002301, Val Loss: 0.0002586 +2025-03-25 03:59:42,937 Epoch 608/2000 +2025-03-25 04:00:49,228 Current Learning Rate: 0.0000039426 +2025-03-25 04:00:49,229 Train Loss: 0.0002302, Val Loss: 0.0002586 +2025-03-25 04:00:49,229 Epoch 609/2000 +2025-03-25 04:01:55,634 Current Learning Rate: 0.0000049882 +2025-03-25 04:01:55,634 Train Loss: 0.0002303, Val Loss: 0.0002586 +2025-03-25 04:01:55,634 Epoch 610/2000 +2025-03-25 04:03:02,556 Current Learning Rate: 0.0000061558 +2025-03-25 04:03:02,557 Train Loss: 0.0002303, Val Loss: 0.0002587 +2025-03-25 04:03:02,557 Epoch 611/2000 +2025-03-25 04:04:08,542 Current Learning Rate: 0.0000074453 +2025-03-25 04:04:08,542 Train Loss: 0.0002304, Val Loss: 0.0002587 +2025-03-25 04:04:08,542 Epoch 612/2000 +2025-03-25 04:05:15,000 Current Learning Rate: 0.0000088564 +2025-03-25 04:05:15,000 Train Loss: 0.0002305, Val Loss: 0.0002587 +2025-03-25 04:05:15,001 Epoch 613/2000 +2025-03-25 04:06:21,037 Current Learning Rate: 0.0000103886 +2025-03-25 04:06:21,038 Train Loss: 0.0002306, Val Loss: 0.0002588 +2025-03-25 04:06:21,038 Epoch 614/2000 +2025-03-25 04:07:26,838 Current Learning Rate: 0.0000120416 +2025-03-25 04:07:26,839 Train Loss: 0.0002307, Val Loss: 0.0002588 +2025-03-25 04:07:26,839 Epoch 615/2000 +2025-03-25 04:08:32,171 Current Learning Rate: 0.0000138150 +2025-03-25 04:08:32,171 Train Loss: 0.0002308, Val Loss: 0.0002589 +2025-03-25 04:08:32,172 Epoch 616/2000 +2025-03-25 04:09:38,541 Current Learning Rate: 0.0000157084 +2025-03-25 04:09:38,541 Train Loss: 0.0002309, Val Loss: 0.0002590 +2025-03-25 04:09:38,542 Epoch 617/2000 +2025-03-25 04:10:44,844 Current Learning Rate: 0.0000177213 +2025-03-25 04:10:44,845 Train Loss: 0.0002310, Val Loss: 0.0002591 +2025-03-25 04:10:44,845 Epoch 618/2000 +2025-03-25 04:11:51,135 Current Learning Rate: 0.0000198532 +2025-03-25 04:11:51,135 Train Loss: 0.0002311, Val Loss: 0.0002592 +2025-03-25 04:11:51,136 Epoch 619/2000 +2025-03-25 04:12:56,703 Current Learning Rate: 0.0000221035 +2025-03-25 04:12:56,704 Train Loss: 0.0002312, Val Loss: 0.0002592 +2025-03-25 04:12:56,704 Epoch 620/2000 +2025-03-25 04:14:03,401 Current Learning Rate: 0.0000244717 +2025-03-25 04:14:03,401 Train Loss: 0.0002314, Val Loss: 0.0002593 +2025-03-25 04:14:03,402 Epoch 621/2000 +2025-03-25 04:15:09,951 Current Learning Rate: 0.0000269573 +2025-03-25 04:15:09,952 Train Loss: 0.0002315, Val Loss: 0.0002594 +2025-03-25 04:15:09,952 Epoch 622/2000 +2025-03-25 04:16:16,413 Current Learning Rate: 0.0000295596 +2025-03-25 04:16:16,413 Train Loss: 0.0002316, Val Loss: 0.0002594 +2025-03-25 04:16:16,413 Epoch 623/2000 +2025-03-25 04:17:22,110 Current Learning Rate: 0.0000322780 +2025-03-25 04:17:22,110 Train Loss: 0.0002318, Val Loss: 0.0002595 +2025-03-25 04:17:22,111 Epoch 624/2000 +2025-03-25 04:18:28,421 Current Learning Rate: 0.0000351118 +2025-03-25 04:18:28,422 Train Loss: 0.0002319, Val Loss: 0.0002597 +2025-03-25 04:18:28,422 Epoch 625/2000 +2025-03-25 04:19:34,694 Current Learning Rate: 0.0000380602 +2025-03-25 04:19:34,695 Train Loss: 0.0002321, Val Loss: 0.0002598 +2025-03-25 04:19:34,695 Epoch 626/2000 +2025-03-25 04:20:40,860 Current Learning Rate: 0.0000411227 +2025-03-25 04:20:40,860 Train Loss: 0.0002322, Val Loss: 0.0002599 +2025-03-25 04:20:40,860 Epoch 627/2000 +2025-03-25 04:21:46,423 Current Learning Rate: 0.0000442984 +2025-03-25 04:21:46,423 Train Loss: 0.0002324, Val Loss: 0.0002601 +2025-03-25 04:21:46,424 Epoch 628/2000 +2025-03-25 04:22:52,568 Current Learning Rate: 0.0000475865 +2025-03-25 04:22:52,569 Train Loss: 0.0002325, Val Loss: 0.0002602 +2025-03-25 04:22:52,569 Epoch 629/2000 +2025-03-25 04:23:59,045 Current Learning Rate: 0.0000509862 +2025-03-25 04:23:59,045 Train Loss: 0.0002327, Val Loss: 0.0002603 +2025-03-25 04:23:59,046 Epoch 630/2000 +2025-03-25 04:25:05,215 Current Learning Rate: 0.0000544967 +2025-03-25 04:25:05,216 Train Loss: 0.0002329, Val Loss: 0.0002604 +2025-03-25 04:25:05,216 Epoch 631/2000 +2025-03-25 04:26:12,063 Current Learning Rate: 0.0000581172 +2025-03-25 04:26:12,064 Train Loss: 0.0002331, Val Loss: 0.0002606 +2025-03-25 04:26:12,064 Epoch 632/2000 +2025-03-25 04:27:17,877 Current Learning Rate: 0.0000618467 +2025-03-25 04:27:17,878 Train Loss: 0.0002332, Val Loss: 0.0002608 +2025-03-25 04:27:17,878 Epoch 633/2000 +2025-03-25 04:28:24,099 Current Learning Rate: 0.0000656842 +2025-03-25 04:28:24,099 Train Loss: 0.0002334, Val Loss: 0.0002610 +2025-03-25 04:28:24,099 Epoch 634/2000 +2025-03-25 04:29:29,972 Current Learning Rate: 0.0000696290 +2025-03-25 04:29:29,973 Train Loss: 0.0002336, Val Loss: 0.0002613 +2025-03-25 04:29:29,973 Epoch 635/2000 +2025-03-25 04:30:36,448 Current Learning Rate: 0.0000736799 +2025-03-25 04:30:36,449 Train Loss: 0.0002338, Val Loss: 0.0002615 +2025-03-25 04:30:36,449 Epoch 636/2000 +2025-03-25 04:31:42,366 Current Learning Rate: 0.0000778360 +2025-03-25 04:31:42,367 Train Loss: 0.0002340, Val Loss: 0.0002618 +2025-03-25 04:31:42,367 Epoch 637/2000 +2025-03-25 04:32:48,549 Current Learning Rate: 0.0000820963 +2025-03-25 04:32:48,550 Train Loss: 0.0002342, Val Loss: 0.0002621 +2025-03-25 04:32:48,550 Epoch 638/2000 +2025-03-25 04:33:55,071 Current Learning Rate: 0.0000864597 +2025-03-25 04:33:55,071 Train Loss: 0.0002344, Val Loss: 0.0002624 +2025-03-25 04:33:55,072 Epoch 639/2000 +2025-03-25 04:35:01,245 Current Learning Rate: 0.0000909251 +2025-03-25 04:35:01,246 Train Loss: 0.0002346, Val Loss: 0.0002626 +2025-03-25 04:35:01,246 Epoch 640/2000 +2025-03-25 04:36:07,654 Current Learning Rate: 0.0000954915 +2025-03-25 04:36:07,654 Train Loss: 0.0002348, Val Loss: 0.0002627 +2025-03-25 04:36:07,655 Epoch 641/2000 +2025-03-25 04:37:13,541 Current Learning Rate: 0.0001001577 +2025-03-25 04:37:13,542 Train Loss: 0.0002350, Val Loss: 0.0002627 +2025-03-25 04:37:13,542 Epoch 642/2000 +2025-03-25 04:38:19,386 Current Learning Rate: 0.0001049225 +2025-03-25 04:38:19,394 Train Loss: 0.0002352, Val Loss: 0.0002627 +2025-03-25 04:38:19,394 Epoch 643/2000 +2025-03-25 04:39:25,848 Current Learning Rate: 0.0001097848 +2025-03-25 04:39:25,849 Train Loss: 0.0002354, Val Loss: 0.0002627 +2025-03-25 04:39:25,850 Epoch 644/2000 +2025-03-25 04:40:31,434 Current Learning Rate: 0.0001147434 +2025-03-25 04:40:31,435 Train Loss: 0.0002356, Val Loss: 0.0002628 +2025-03-25 04:40:31,435 Epoch 645/2000 +2025-03-25 04:41:38,144 Current Learning Rate: 0.0001197970 +2025-03-25 04:41:38,144 Train Loss: 0.0002358, Val Loss: 0.0002629 +2025-03-25 04:41:38,145 Epoch 646/2000 +2025-03-25 04:42:43,613 Current Learning Rate: 0.0001249445 +2025-03-25 04:42:43,614 Train Loss: 0.0002360, Val Loss: 0.0002630 +2025-03-25 04:42:43,614 Epoch 647/2000 +2025-03-25 04:43:49,576 Current Learning Rate: 0.0001301845 +2025-03-25 04:43:49,576 Train Loss: 0.0002363, Val Loss: 0.0002631 +2025-03-25 04:43:49,577 Epoch 648/2000 +2025-03-25 04:44:56,062 Current Learning Rate: 0.0001355157 +2025-03-25 04:44:56,063 Train Loss: 0.0002365, Val Loss: 0.0002632 +2025-03-25 04:44:56,063 Epoch 649/2000 +2025-03-25 04:46:01,505 Current Learning Rate: 0.0001409369 +2025-03-25 04:46:01,506 Train Loss: 0.0002367, Val Loss: 0.0002634 +2025-03-25 04:46:01,506 Epoch 650/2000 +2025-03-25 04:47:08,045 Current Learning Rate: 0.0001464466 +2025-03-25 04:47:08,046 Train Loss: 0.0002370, Val Loss: 0.0002636 +2025-03-25 04:47:08,046 Epoch 651/2000 +2025-03-25 04:48:13,659 Current Learning Rate: 0.0001520436 +2025-03-25 04:48:13,659 Train Loss: 0.0002372, Val Loss: 0.0002639 +2025-03-25 04:48:13,660 Epoch 652/2000 +2025-03-25 04:49:20,029 Current Learning Rate: 0.0001577264 +2025-03-25 04:49:20,030 Train Loss: 0.0002375, Val Loss: 0.0002641 +2025-03-25 04:49:20,030 Epoch 653/2000 +2025-03-25 04:50:26,572 Current Learning Rate: 0.0001634937 +2025-03-25 04:50:26,573 Train Loss: 0.0002377, Val Loss: 0.0002644 +2025-03-25 04:50:26,573 Epoch 654/2000 +2025-03-25 04:51:32,178 Current Learning Rate: 0.0001693441 +2025-03-25 04:51:32,178 Train Loss: 0.0002380, Val Loss: 0.0002647 +2025-03-25 04:51:32,178 Epoch 655/2000 +2025-03-25 04:52:38,327 Current Learning Rate: 0.0001752760 +2025-03-25 04:52:38,328 Train Loss: 0.0002382, Val Loss: 0.0002649 +2025-03-25 04:52:38,328 Epoch 656/2000 +2025-03-25 04:53:44,076 Current Learning Rate: 0.0001812880 +2025-03-25 04:53:44,077 Train Loss: 0.0002385, Val Loss: 0.0002652 +2025-03-25 04:53:44,078 Epoch 657/2000 +2025-03-25 04:54:49,624 Current Learning Rate: 0.0001873787 +2025-03-25 04:54:49,625 Train Loss: 0.0002387, Val Loss: 0.0002655 +2025-03-25 04:54:49,626 Epoch 658/2000 +2025-03-25 04:55:55,598 Current Learning Rate: 0.0001935465 +2025-03-25 04:55:55,599 Train Loss: 0.0002390, Val Loss: 0.0002658 +2025-03-25 04:55:55,599 Epoch 659/2000 +2025-03-25 04:57:01,526 Current Learning Rate: 0.0001997899 +2025-03-25 04:57:01,527 Train Loss: 0.0002393, Val Loss: 0.0002661 +2025-03-25 04:57:01,527 Epoch 660/2000 +2025-03-25 04:58:07,851 Current Learning Rate: 0.0002061074 +2025-03-25 04:58:07,852 Train Loss: 0.0002396, Val Loss: 0.0002664 +2025-03-25 04:58:07,852 Epoch 661/2000 +2025-03-25 04:59:13,432 Current Learning Rate: 0.0002124974 +2025-03-25 04:59:13,433 Train Loss: 0.0002398, Val Loss: 0.0002668 +2025-03-25 04:59:13,433 Epoch 662/2000 +2025-03-25 05:00:20,191 Current Learning Rate: 0.0002189583 +2025-03-25 05:00:20,191 Train Loss: 0.0002401, Val Loss: 0.0002671 +2025-03-25 05:00:20,192 Epoch 663/2000 +2025-03-25 05:01:26,267 Current Learning Rate: 0.0002254886 +2025-03-25 05:01:26,268 Train Loss: 0.0002404, Val Loss: 0.0002674 +2025-03-25 05:01:26,268 Epoch 664/2000 +2025-03-25 05:02:31,806 Current Learning Rate: 0.0002320866 +2025-03-25 05:02:31,808 Train Loss: 0.0002407, Val Loss: 0.0002677 +2025-03-25 05:02:31,809 Epoch 665/2000 +2025-03-25 05:03:38,264 Current Learning Rate: 0.0002387507 +2025-03-25 05:03:38,265 Train Loss: 0.0002409, Val Loss: 0.0002680 +2025-03-25 05:03:38,266 Epoch 666/2000 +2025-03-25 05:04:45,292 Current Learning Rate: 0.0002454793 +2025-03-25 05:04:45,293 Train Loss: 0.0002412, Val Loss: 0.0002682 +2025-03-25 05:04:45,293 Epoch 667/2000 +2025-03-25 05:05:51,927 Current Learning Rate: 0.0002522707 +2025-03-25 05:05:51,928 Train Loss: 0.0002415, Val Loss: 0.0002685 +2025-03-25 05:05:51,929 Epoch 668/2000 +2025-03-25 05:06:58,151 Current Learning Rate: 0.0002591232 +2025-03-25 05:06:58,152 Train Loss: 0.0002418, Val Loss: 0.0002688 +2025-03-25 05:06:58,153 Epoch 669/2000 +2025-03-25 05:08:04,111 Current Learning Rate: 0.0002660351 +2025-03-25 05:08:04,112 Train Loss: 0.0002421, Val Loss: 0.0002692 +2025-03-25 05:08:04,112 Epoch 670/2000 +2025-03-25 05:09:10,427 Current Learning Rate: 0.0002730048 +2025-03-25 05:09:10,427 Train Loss: 0.0002424, Val Loss: 0.0002695 +2025-03-25 05:09:10,427 Epoch 671/2000 +2025-03-25 05:10:16,223 Current Learning Rate: 0.0002800304 +2025-03-25 05:10:16,224 Train Loss: 0.0002427, Val Loss: 0.0002699 +2025-03-25 05:10:16,224 Epoch 672/2000 +2025-03-25 05:11:22,207 Current Learning Rate: 0.0002871104 +2025-03-25 05:11:22,208 Train Loss: 0.0002430, Val Loss: 0.0002703 +2025-03-25 05:11:22,209 Epoch 673/2000 +2025-03-25 05:12:27,732 Current Learning Rate: 0.0002942428 +2025-03-25 05:12:27,733 Train Loss: 0.0002433, Val Loss: 0.0002708 +2025-03-25 05:12:27,733 Epoch 674/2000 +2025-03-25 05:13:33,627 Current Learning Rate: 0.0003014261 +2025-03-25 05:13:33,628 Train Loss: 0.0002436, Val Loss: 0.0002712 +2025-03-25 05:13:33,628 Epoch 675/2000 +2025-03-25 05:14:39,908 Current Learning Rate: 0.0003086583 +2025-03-25 05:14:39,909 Train Loss: 0.0002440, Val Loss: 0.0002716 +2025-03-25 05:14:39,909 Epoch 676/2000 +2025-03-25 05:15:45,800 Current Learning Rate: 0.0003159377 +2025-03-25 05:15:45,801 Train Loss: 0.0002443, Val Loss: 0.0002720 +2025-03-25 05:15:45,801 Epoch 677/2000 +2025-03-25 05:16:51,960 Current Learning Rate: 0.0003232626 +2025-03-25 05:16:51,961 Train Loss: 0.0002446, Val Loss: 0.0002725 +2025-03-25 05:16:51,961 Epoch 678/2000 +2025-03-25 05:17:58,519 Current Learning Rate: 0.0003306310 +2025-03-25 05:17:58,519 Train Loss: 0.0002449, Val Loss: 0.0002731 +2025-03-25 05:17:58,520 Epoch 679/2000 +2025-03-25 05:19:05,084 Current Learning Rate: 0.0003380413 +2025-03-25 05:19:05,085 Train Loss: 0.0002452, Val Loss: 0.0002737 +2025-03-25 05:19:05,085 Epoch 680/2000 +2025-03-25 05:20:11,265 Current Learning Rate: 0.0003454915 +2025-03-25 05:20:11,265 Train Loss: 0.0002455, Val Loss: 0.0002745 +2025-03-25 05:20:11,266 Epoch 681/2000 +2025-03-25 05:21:16,955 Current Learning Rate: 0.0003529798 +2025-03-25 05:21:16,955 Train Loss: 0.0002458, Val Loss: 0.0002753 +2025-03-25 05:21:16,956 Epoch 682/2000 +2025-03-25 05:22:23,071 Current Learning Rate: 0.0003605044 +2025-03-25 05:22:23,071 Train Loss: 0.0002461, Val Loss: 0.0002761 +2025-03-25 05:22:23,072 Epoch 683/2000 +2025-03-25 05:23:29,291 Current Learning Rate: 0.0003680635 +2025-03-25 05:23:29,291 Train Loss: 0.0002464, Val Loss: 0.0002769 +2025-03-25 05:23:29,291 Epoch 684/2000 +2025-03-25 05:24:35,168 Current Learning Rate: 0.0003756551 +2025-03-25 05:24:35,169 Train Loss: 0.0002468, Val Loss: 0.0002775 +2025-03-25 05:24:35,169 Epoch 685/2000 +2025-03-25 05:25:41,080 Current Learning Rate: 0.0003832773 +2025-03-25 05:25:41,080 Train Loss: 0.0002471, Val Loss: 0.0002781 +2025-03-25 05:25:41,081 Epoch 686/2000 +2025-03-25 05:26:47,226 Current Learning Rate: 0.0003909284 +2025-03-25 05:26:47,227 Train Loss: 0.0002474, Val Loss: 0.0002787 +2025-03-25 05:26:47,227 Epoch 687/2000 +2025-03-25 05:27:53,504 Current Learning Rate: 0.0003986064 +2025-03-25 05:27:53,504 Train Loss: 0.0002478, Val Loss: 0.0002791 +2025-03-25 05:27:53,504 Epoch 688/2000 +2025-03-25 05:28:59,609 Current Learning Rate: 0.0004063093 +2025-03-25 05:28:59,610 Train Loss: 0.0002481, Val Loss: 0.0002794 +2025-03-25 05:28:59,610 Epoch 689/2000 +2025-03-25 05:30:05,429 Current Learning Rate: 0.0004140354 +2025-03-25 05:30:05,429 Train Loss: 0.0002484, Val Loss: 0.0002797 +2025-03-25 05:30:05,430 Epoch 690/2000 +2025-03-25 05:31:11,167 Current Learning Rate: 0.0004217828 +2025-03-25 05:31:11,167 Train Loss: 0.0002487, Val Loss: 0.0002799 +2025-03-25 05:31:11,168 Epoch 691/2000 +2025-03-25 05:32:17,882 Current Learning Rate: 0.0004295494 +2025-03-25 05:32:17,883 Train Loss: 0.0002490, Val Loss: 0.0002800 +2025-03-25 05:32:17,883 Epoch 692/2000 +2025-03-25 05:33:23,648 Current Learning Rate: 0.0004373334 +2025-03-25 05:33:23,649 Train Loss: 0.0002493, Val Loss: 0.0002803 +2025-03-25 05:33:23,650 Epoch 693/2000 +2025-03-25 05:34:29,295 Current Learning Rate: 0.0004451328 +2025-03-25 05:34:29,296 Train Loss: 0.0002497, Val Loss: 0.0002808 +2025-03-25 05:34:29,296 Epoch 694/2000 +2025-03-25 05:35:35,766 Current Learning Rate: 0.0004529458 +2025-03-25 05:35:35,767 Train Loss: 0.0002500, Val Loss: 0.0002813 +2025-03-25 05:35:35,768 Epoch 695/2000 +2025-03-25 05:36:42,249 Current Learning Rate: 0.0004607705 +2025-03-25 05:36:42,249 Train Loss: 0.0002503, Val Loss: 0.0002819 +2025-03-25 05:36:42,249 Epoch 696/2000 +2025-03-25 05:37:48,758 Current Learning Rate: 0.0004686047 +2025-03-25 05:37:48,758 Train Loss: 0.0002507, Val Loss: 0.0002828 +2025-03-25 05:37:48,759 Epoch 697/2000 +2025-03-25 05:38:54,690 Current Learning Rate: 0.0004764468 +2025-03-25 05:38:54,690 Train Loss: 0.0002511, Val Loss: 0.0002834 +2025-03-25 05:38:54,691 Epoch 698/2000 +2025-03-25 05:40:00,312 Current Learning Rate: 0.0004842946 +2025-03-25 05:40:00,312 Train Loss: 0.0002516, Val Loss: 0.0002838 +2025-03-25 05:40:00,312 Epoch 699/2000 +2025-03-25 05:41:06,945 Current Learning Rate: 0.0004921463 +2025-03-25 05:41:06,946 Train Loss: 0.0002521, Val Loss: 0.0002848 +2025-03-25 05:41:06,946 Epoch 700/2000 +2025-03-25 05:42:12,945 Current Learning Rate: 0.0005000000 +2025-03-25 05:42:12,946 Train Loss: 0.0002531, Val Loss: 0.0002880 +2025-03-25 05:42:12,947 Epoch 701/2000 +2025-03-25 05:43:19,156 Current Learning Rate: 0.0005078537 +2025-03-25 05:43:19,157 Train Loss: 0.0002544, Val Loss: 0.0002921 +2025-03-25 05:43:19,157 Epoch 702/2000 +2025-03-25 05:44:24,035 Current Learning Rate: 0.0005157054 +2025-03-25 05:44:24,036 Train Loss: 0.0002547, Val Loss: 0.0002888 +2025-03-25 05:44:24,036 Epoch 703/2000 +2025-03-25 05:45:30,221 Current Learning Rate: 0.0005235532 +2025-03-25 05:45:30,221 Train Loss: 0.0002542, Val Loss: 0.0002879 +2025-03-25 05:45:30,222 Epoch 704/2000 +2025-03-25 05:46:36,260 Current Learning Rate: 0.0005313953 +2025-03-25 05:46:36,261 Train Loss: 0.0002534, Val Loss: 0.0002858 +2025-03-25 05:46:36,261 Epoch 705/2000 +2025-03-25 05:47:41,983 Current Learning Rate: 0.0005392295 +2025-03-25 05:47:41,984 Train Loss: 0.0002530, Val Loss: 0.0002861 +2025-03-25 05:47:41,985 Epoch 706/2000 +2025-03-25 05:48:48,425 Current Learning Rate: 0.0005470542 +2025-03-25 05:48:48,425 Train Loss: 0.0002532, Val Loss: 0.0002875 +2025-03-25 05:48:48,426 Epoch 707/2000 +2025-03-25 05:49:54,251 Current Learning Rate: 0.0005548672 +2025-03-25 05:49:54,252 Train Loss: 0.0002538, Val Loss: 0.0002884 +2025-03-25 05:49:54,252 Epoch 708/2000 +2025-03-25 05:51:00,176 Current Learning Rate: 0.0005626666 +2025-03-25 05:51:00,176 Train Loss: 0.0002543, Val Loss: 0.0002894 +2025-03-25 05:51:00,177 Epoch 709/2000 +2025-03-25 05:52:05,880 Current Learning Rate: 0.0005704506 +2025-03-25 05:52:05,880 Train Loss: 0.0002549, Val Loss: 0.0002903 +2025-03-25 05:52:05,881 Epoch 710/2000 +2025-03-25 05:53:12,393 Current Learning Rate: 0.0005782172 +2025-03-25 05:53:12,393 Train Loss: 0.0002554, Val Loss: 0.0002914 +2025-03-25 05:53:12,394 Epoch 711/2000 +2025-03-25 05:54:17,987 Current Learning Rate: 0.0005859646 +2025-03-25 05:54:17,988 Train Loss: 0.0002562, Val Loss: 0.0002927 +2025-03-25 05:54:17,988 Epoch 712/2000 +2025-03-25 05:55:23,210 Current Learning Rate: 0.0005936907 +2025-03-25 05:55:23,211 Train Loss: 0.0002572, Val Loss: 0.0002940 +2025-03-25 05:55:23,211 Epoch 713/2000 +2025-03-25 05:56:29,374 Current Learning Rate: 0.0006013936 +2025-03-25 05:56:29,375 Train Loss: 0.0002583, Val Loss: 0.0002946 +2025-03-25 05:56:29,375 Epoch 714/2000 +2025-03-25 05:57:35,828 Current Learning Rate: 0.0006090716 +2025-03-25 05:57:35,828 Train Loss: 0.0002592, Val Loss: 0.0002946 +2025-03-25 05:57:35,828 Epoch 715/2000 +2025-03-25 05:58:41,574 Current Learning Rate: 0.0006167227 +2025-03-25 05:58:41,574 Train Loss: 0.0002593, Val Loss: 0.0002923 +2025-03-25 05:58:41,575 Epoch 716/2000 +2025-03-25 05:59:47,180 Current Learning Rate: 0.0006243449 +2025-03-25 05:59:47,181 Train Loss: 0.0002590, Val Loss: 0.0002914 +2025-03-25 05:59:47,181 Epoch 717/2000 +2025-03-25 06:00:53,251 Current Learning Rate: 0.0006319365 +2025-03-25 06:00:53,252 Train Loss: 0.0002577, Val Loss: 0.0002908 +2025-03-25 06:00:53,252 Epoch 718/2000 +2025-03-25 06:01:59,964 Current Learning Rate: 0.0006394956 +2025-03-25 06:01:59,965 Train Loss: 0.0002567, Val Loss: 0.0002912 +2025-03-25 06:01:59,965 Epoch 719/2000 +2025-03-25 06:03:06,105 Current Learning Rate: 0.0006470202 +2025-03-25 06:03:06,105 Train Loss: 0.0002570, Val Loss: 0.0002926 +2025-03-25 06:03:06,105 Epoch 720/2000 +2025-03-25 06:04:12,955 Current Learning Rate: 0.0006545085 +2025-03-25 06:04:12,956 Train Loss: 0.0002576, Val Loss: 0.0002947 +2025-03-25 06:04:12,956 Epoch 721/2000 +2025-03-25 06:05:19,022 Current Learning Rate: 0.0006619587 +2025-03-25 06:05:19,022 Train Loss: 0.0002583, Val Loss: 0.0002958 +2025-03-25 06:05:19,023 Epoch 722/2000 +2025-03-25 06:06:26,178 Current Learning Rate: 0.0006693690 +2025-03-25 06:06:26,178 Train Loss: 0.0002589, Val Loss: 0.0002970 +2025-03-25 06:06:26,179 Epoch 723/2000 +2025-03-25 06:07:31,827 Current Learning Rate: 0.0006767374 +2025-03-25 06:07:31,827 Train Loss: 0.0002601, Val Loss: 0.0002983 +2025-03-25 06:07:31,828 Epoch 724/2000 +2025-03-25 06:08:37,980 Current Learning Rate: 0.0006840623 +2025-03-25 06:08:37,981 Train Loss: 0.0002621, Val Loss: 0.0002989 +2025-03-25 06:08:37,981 Epoch 725/2000 +2025-03-25 06:09:43,782 Current Learning Rate: 0.0006913417 +2025-03-25 06:09:43,783 Train Loss: 0.0002623, Val Loss: 0.0002983 +2025-03-25 06:09:43,783 Epoch 726/2000 +2025-03-25 06:10:49,733 Current Learning Rate: 0.0006985739 +2025-03-25 06:10:49,734 Train Loss: 0.0002622, Val Loss: 0.0002994 +2025-03-25 06:10:49,734 Epoch 727/2000 +2025-03-25 06:11:56,240 Current Learning Rate: 0.0007057572 +2025-03-25 06:11:56,240 Train Loss: 0.0002619, Val Loss: 0.0003001 +2025-03-25 06:11:56,241 Epoch 728/2000 +2025-03-25 06:13:02,095 Current Learning Rate: 0.0007128896 +2025-03-25 06:13:02,096 Train Loss: 0.0002612, Val Loss: 0.0002983 +2025-03-25 06:13:02,096 Epoch 729/2000 +2025-03-25 06:14:08,680 Current Learning Rate: 0.0007199696 +2025-03-25 06:14:08,680 Train Loss: 0.0002602, Val Loss: 0.0002979 +2025-03-25 06:14:08,680 Epoch 730/2000 +2025-03-25 06:15:14,773 Current Learning Rate: 0.0007269952 +2025-03-25 06:15:14,773 Train Loss: 0.0002601, Val Loss: 0.0002989 +2025-03-25 06:15:14,774 Epoch 731/2000 +2025-03-25 06:16:20,412 Current Learning Rate: 0.0007339649 +2025-03-25 06:16:20,413 Train Loss: 0.0002610, Val Loss: 0.0003001 +2025-03-25 06:16:20,413 Epoch 732/2000 +2025-03-25 06:17:26,734 Current Learning Rate: 0.0007408768 +2025-03-25 06:17:26,735 Train Loss: 0.0002619, Val Loss: 0.0003003 +2025-03-25 06:17:26,735 Epoch 733/2000 +2025-03-25 06:18:32,479 Current Learning Rate: 0.0007477293 +2025-03-25 06:18:32,479 Train Loss: 0.0002632, Val Loss: 0.0003054 +2025-03-25 06:18:32,480 Epoch 734/2000 +2025-03-25 06:19:39,041 Current Learning Rate: 0.0007545207 +2025-03-25 06:19:39,041 Train Loss: 0.0002648, Val Loss: 0.0003003 +2025-03-25 06:19:39,042 Epoch 735/2000 +2025-03-25 06:20:45,161 Current Learning Rate: 0.0007612493 +2025-03-25 06:20:45,161 Train Loss: 0.0002657, Val Loss: 0.0003042 +2025-03-25 06:20:45,161 Epoch 736/2000 +2025-03-25 06:21:51,125 Current Learning Rate: 0.0007679134 +2025-03-25 06:21:51,126 Train Loss: 0.0002659, Val Loss: 0.0003034 +2025-03-25 06:21:51,126 Epoch 737/2000 +2025-03-25 06:22:57,173 Current Learning Rate: 0.0007745114 +2025-03-25 06:22:57,173 Train Loss: 0.0002655, Val Loss: 0.0003032 +2025-03-25 06:22:57,174 Epoch 738/2000 +2025-03-25 06:24:02,087 Current Learning Rate: 0.0007810417 +2025-03-25 06:24:02,087 Train Loss: 0.0002644, Val Loss: 0.0003015 +2025-03-25 06:24:02,087 Epoch 739/2000 +2025-03-25 06:25:08,287 Current Learning Rate: 0.0007875026 +2025-03-25 06:25:08,287 Train Loss: 0.0002627, Val Loss: 0.0003008 +2025-03-25 06:25:08,287 Epoch 740/2000 +2025-03-25 06:26:14,391 Current Learning Rate: 0.0007938926 +2025-03-25 06:26:14,392 Train Loss: 0.0002625, Val Loss: 0.0003030 +2025-03-25 06:26:14,392 Epoch 741/2000 +2025-03-25 06:27:20,818 Current Learning Rate: 0.0008002101 +2025-03-25 06:27:20,819 Train Loss: 0.0002635, Val Loss: 0.0003045 +2025-03-25 06:27:20,819 Epoch 742/2000 +2025-03-25 06:28:26,605 Current Learning Rate: 0.0008064535 +2025-03-25 06:28:26,606 Train Loss: 0.0002647, Val Loss: 0.0003068 +2025-03-25 06:28:26,606 Epoch 743/2000 +2025-03-25 06:29:31,971 Current Learning Rate: 0.0008126213 +2025-03-25 06:29:31,972 Train Loss: 0.0002660, Val Loss: 0.0003104 +2025-03-25 06:29:31,972 Epoch 744/2000 +2025-03-25 06:30:37,938 Current Learning Rate: 0.0008187120 +2025-03-25 06:30:37,939 Train Loss: 0.0002668, Val Loss: 0.0003096 +2025-03-25 06:30:37,939 Epoch 745/2000 +2025-03-25 06:31:44,101 Current Learning Rate: 0.0008247240 +2025-03-25 06:31:44,102 Train Loss: 0.0002677, Val Loss: 0.0003049 +2025-03-25 06:31:44,103 Epoch 746/2000 +2025-03-25 06:32:49,870 Current Learning Rate: 0.0008306559 +2025-03-25 06:32:49,870 Train Loss: 0.0002683, Val Loss: 0.0003087 +2025-03-25 06:32:49,870 Epoch 747/2000 +2025-03-25 06:33:55,531 Current Learning Rate: 0.0008365063 +2025-03-25 06:33:55,531 Train Loss: 0.0002685, Val Loss: 0.0003083 +2025-03-25 06:33:55,531 Epoch 748/2000 +2025-03-25 06:35:02,363 Current Learning Rate: 0.0008422736 +2025-03-25 06:35:02,364 Train Loss: 0.0002684, Val Loss: 0.0003090 +2025-03-25 06:35:02,364 Epoch 749/2000 +2025-03-25 06:36:08,512 Current Learning Rate: 0.0008479564 +2025-03-25 06:36:08,513 Train Loss: 0.0002675, Val Loss: 0.0003072 +2025-03-25 06:36:08,513 Epoch 750/2000 +2025-03-25 06:37:14,661 Current Learning Rate: 0.0008535534 +2025-03-25 06:37:14,661 Train Loss: 0.0002655, Val Loss: 0.0003048 +2025-03-25 06:37:14,661 Epoch 751/2000 +2025-03-25 06:38:20,472 Current Learning Rate: 0.0008590631 +2025-03-25 06:38:20,473 Train Loss: 0.0002652, Val Loss: 0.0003078 +2025-03-25 06:38:20,473 Epoch 752/2000 +2025-03-25 06:39:26,396 Current Learning Rate: 0.0008644843 +2025-03-25 06:39:26,397 Train Loss: 0.0002662, Val Loss: 0.0003090 +2025-03-25 06:39:26,397 Epoch 753/2000 +2025-03-25 06:40:32,731 Current Learning Rate: 0.0008698155 +2025-03-25 06:40:32,731 Train Loss: 0.0002676, Val Loss: 0.0003096 +2025-03-25 06:40:32,732 Epoch 754/2000 +2025-03-25 06:41:38,957 Current Learning Rate: 0.0008750555 +2025-03-25 06:41:38,957 Train Loss: 0.0002679, Val Loss: 0.0003116 +2025-03-25 06:41:38,958 Epoch 755/2000 +2025-03-25 06:42:44,534 Current Learning Rate: 0.0008802030 +2025-03-25 06:42:44,535 Train Loss: 0.0002695, Val Loss: 0.0003133 +2025-03-25 06:42:44,535 Epoch 756/2000 +2025-03-25 06:43:50,585 Current Learning Rate: 0.0008852566 +2025-03-25 06:43:50,586 Train Loss: 0.0002702, Val Loss: 0.0003100 +2025-03-25 06:43:50,586 Epoch 757/2000 +2025-03-25 06:44:56,184 Current Learning Rate: 0.0008902152 +2025-03-25 06:44:56,184 Train Loss: 0.0002709, Val Loss: 0.0003129 +2025-03-25 06:44:56,185 Epoch 758/2000 +2025-03-25 06:46:02,717 Current Learning Rate: 0.0008950775 +2025-03-25 06:46:02,718 Train Loss: 0.0002714, Val Loss: 0.0003128 +2025-03-25 06:46:02,718 Epoch 759/2000 +2025-03-25 06:47:08,893 Current Learning Rate: 0.0008998423 +2025-03-25 06:47:08,893 Train Loss: 0.0002714, Val Loss: 0.0003105 +2025-03-25 06:47:08,894 Epoch 760/2000 +2025-03-25 06:48:15,326 Current Learning Rate: 0.0009045085 +2025-03-25 06:48:15,327 Train Loss: 0.0002706, Val Loss: 0.0003085 +2025-03-25 06:48:15,327 Epoch 761/2000 +2025-03-25 06:49:20,587 Current Learning Rate: 0.0009090749 +2025-03-25 06:49:20,588 Train Loss: 0.0002683, Val Loss: 0.0003062 +2025-03-25 06:49:20,588 Epoch 762/2000 +2025-03-25 06:50:26,683 Current Learning Rate: 0.0009135403 +2025-03-25 06:50:26,683 Train Loss: 0.0002661, Val Loss: 0.0003084 +2025-03-25 06:50:26,684 Epoch 763/2000 +2025-03-25 06:51:33,057 Current Learning Rate: 0.0009179037 +2025-03-25 06:51:33,058 Train Loss: 0.0002671, Val Loss: 0.0003100 +2025-03-25 06:51:33,058 Epoch 764/2000 +2025-03-25 06:52:38,530 Current Learning Rate: 0.0009221640 +2025-03-25 06:52:38,531 Train Loss: 0.0002685, Val Loss: 0.0003119 +2025-03-25 06:52:38,531 Epoch 765/2000 +2025-03-25 06:53:44,435 Current Learning Rate: 0.0009263201 +2025-03-25 06:53:44,435 Train Loss: 0.0002703, Val Loss: 0.0003219 +2025-03-25 06:53:44,436 Epoch 766/2000 +2025-03-25 06:54:51,115 Current Learning Rate: 0.0009303710 +2025-03-25 06:54:51,116 Train Loss: 0.0002709, Val Loss: 0.0003164 +2025-03-25 06:54:51,116 Epoch 767/2000 +2025-03-25 06:55:57,317 Current Learning Rate: 0.0009343158 +2025-03-25 06:55:57,318 Train Loss: 0.0002710, Val Loss: 0.0003180 +2025-03-25 06:55:57,318 Epoch 768/2000 +2025-03-25 06:57:03,426 Current Learning Rate: 0.0009381533 +2025-03-25 06:57:03,426 Train Loss: 0.0002707, Val Loss: 0.0003114 +2025-03-25 06:57:03,427 Epoch 769/2000 +2025-03-25 06:58:10,356 Current Learning Rate: 0.0009418828 +2025-03-25 06:58:10,357 Train Loss: 0.0002704, Val Loss: 0.0003111 +2025-03-25 06:58:10,357 Epoch 770/2000 +2025-03-25 06:59:16,222 Current Learning Rate: 0.0009455033 +2025-03-25 06:59:16,222 Train Loss: 0.0002709, Val Loss: 0.0003129 +2025-03-25 06:59:16,222 Epoch 771/2000 +2025-03-25 07:00:23,158 Current Learning Rate: 0.0009490138 +2025-03-25 07:00:23,159 Train Loss: 0.0002736, Val Loss: 0.0003222 +2025-03-25 07:00:23,159 Epoch 772/2000 +2025-03-25 07:01:29,360 Current Learning Rate: 0.0009524135 +2025-03-25 07:01:29,361 Train Loss: 0.0002759, Val Loss: 0.0003164 +2025-03-25 07:01:29,361 Epoch 773/2000 +2025-03-25 07:02:35,810 Current Learning Rate: 0.0009557016 +2025-03-25 07:02:35,811 Train Loss: 0.0002780, Val Loss: 0.0003160 +2025-03-25 07:02:35,811 Epoch 774/2000 +2025-03-25 07:03:41,874 Current Learning Rate: 0.0009588773 +2025-03-25 07:03:41,875 Train Loss: 0.0002754, Val Loss: 0.0003129 +2025-03-25 07:03:41,875 Epoch 775/2000 +2025-03-25 07:04:47,532 Current Learning Rate: 0.0009619398 +2025-03-25 07:04:47,532 Train Loss: 0.0002725, Val Loss: 0.0003090 +2025-03-25 07:04:47,533 Epoch 776/2000 +2025-03-25 07:05:53,484 Current Learning Rate: 0.0009648882 +2025-03-25 07:05:53,484 Train Loss: 0.0002680, Val Loss: 0.0003060 +2025-03-25 07:05:53,485 Epoch 777/2000 +2025-03-25 07:06:59,588 Current Learning Rate: 0.0009677220 +2025-03-25 07:06:59,589 Train Loss: 0.0002661, Val Loss: 0.0003088 +2025-03-25 07:06:59,590 Epoch 778/2000 +2025-03-25 07:08:05,847 Current Learning Rate: 0.0009704404 +2025-03-25 07:08:05,847 Train Loss: 0.0002678, Val Loss: 0.0003139 +2025-03-25 07:08:05,848 Epoch 779/2000 +2025-03-25 07:09:11,811 Current Learning Rate: 0.0009730427 +2025-03-25 07:09:11,812 Train Loss: 0.0002698, Val Loss: 0.0003159 +2025-03-25 07:09:11,812 Epoch 780/2000 +2025-03-25 07:10:17,841 Current Learning Rate: 0.0009755283 +2025-03-25 07:10:17,842 Train Loss: 0.0002711, Val Loss: 0.0003161 +2025-03-25 07:10:17,842 Epoch 781/2000 +2025-03-25 07:11:24,597 Current Learning Rate: 0.0009778965 +2025-03-25 07:11:24,598 Train Loss: 0.0002717, Val Loss: 0.0003158 +2025-03-25 07:11:24,598 Epoch 782/2000 +2025-03-25 07:12:30,953 Current Learning Rate: 0.0009801468 +2025-03-25 07:12:30,953 Train Loss: 0.0002714, Val Loss: 0.0003148 +2025-03-25 07:12:30,954 Epoch 783/2000 +2025-03-25 07:13:37,304 Current Learning Rate: 0.0009822787 +2025-03-25 07:13:37,305 Train Loss: 0.0002703, Val Loss: 0.0003116 +2025-03-25 07:13:37,305 Epoch 784/2000 +2025-03-25 07:14:42,546 Current Learning Rate: 0.0009842916 +2025-03-25 07:14:42,546 Train Loss: 0.0002688, Val Loss: 0.0003131 +2025-03-25 07:14:42,547 Epoch 785/2000 +2025-03-25 07:15:48,673 Current Learning Rate: 0.0009861850 +2025-03-25 07:15:48,674 Train Loss: 0.0002689, Val Loss: 0.0003138 +2025-03-25 07:15:48,674 Epoch 786/2000 +2025-03-25 07:16:54,789 Current Learning Rate: 0.0009879584 +2025-03-25 07:16:54,789 Train Loss: 0.0002701, Val Loss: 0.0003195 +2025-03-25 07:16:54,789 Epoch 787/2000 +2025-03-25 07:18:00,977 Current Learning Rate: 0.0009896114 +2025-03-25 07:18:00,977 Train Loss: 0.0002733, Val Loss: 0.0003091 +2025-03-25 07:18:00,978 Epoch 788/2000 +2025-03-25 07:19:07,018 Current Learning Rate: 0.0009911436 +2025-03-25 07:19:07,019 Train Loss: 0.0002722, Val Loss: 0.0003113 +2025-03-25 07:19:07,019 Epoch 789/2000 +2025-03-25 07:20:12,416 Current Learning Rate: 0.0009925547 +2025-03-25 07:20:12,417 Train Loss: 0.0002728, Val Loss: 0.0003155 +2025-03-25 07:20:12,417 Epoch 790/2000 +2025-03-25 07:21:18,604 Current Learning Rate: 0.0009938442 +2025-03-25 07:21:18,605 Train Loss: 0.0002748, Val Loss: 0.0003164 +2025-03-25 07:21:18,606 Epoch 791/2000 +2025-03-25 07:22:23,943 Current Learning Rate: 0.0009950118 +2025-03-25 07:22:23,944 Train Loss: 0.0002752, Val Loss: 0.0003177 +2025-03-25 07:22:23,944 Epoch 792/2000 +2025-03-25 07:23:29,518 Current Learning Rate: 0.0009960574 +2025-03-25 07:23:29,518 Train Loss: 0.0002768, Val Loss: 0.0003187 +2025-03-25 07:23:29,519 Epoch 793/2000 +2025-03-25 07:24:35,690 Current Learning Rate: 0.0009969805 +2025-03-25 07:24:35,691 Train Loss: 0.0002778, Val Loss: 0.0003212 +2025-03-25 07:24:35,691 Epoch 794/2000 +2025-03-25 07:25:41,317 Current Learning Rate: 0.0009977810 +2025-03-25 07:25:41,318 Train Loss: 0.0002739, Val Loss: 0.0003156 +2025-03-25 07:25:41,318 Epoch 795/2000 +2025-03-25 07:26:47,864 Current Learning Rate: 0.0009984587 +2025-03-25 07:26:47,864 Train Loss: 0.0002688, Val Loss: 0.0003068 +2025-03-25 07:26:47,865 Epoch 796/2000 +2025-03-25 07:27:54,461 Current Learning Rate: 0.0009990134 +2025-03-25 07:27:54,461 Train Loss: 0.0002657, Val Loss: 0.0003067 +2025-03-25 07:27:54,462 Epoch 797/2000 +2025-03-25 07:29:00,518 Current Learning Rate: 0.0009994449 +2025-03-25 07:29:00,518 Train Loss: 0.0002660, Val Loss: 0.0003115 +2025-03-25 07:29:00,519 Epoch 798/2000 +2025-03-25 07:30:06,590 Current Learning Rate: 0.0009997533 +2025-03-25 07:30:06,591 Train Loss: 0.0002679, Val Loss: 0.0003151 +2025-03-25 07:30:06,591 Epoch 799/2000 +2025-03-25 07:31:12,815 Current Learning Rate: 0.0009999383 +2025-03-25 07:31:12,816 Train Loss: 0.0002693, Val Loss: 0.0003151 +2025-03-25 07:31:12,816 Epoch 800/2000 +2025-03-25 07:32:18,618 Current Learning Rate: 0.0010000000 +2025-03-25 07:32:18,619 Train Loss: 0.0002699, Val Loss: 0.0003199 +2025-03-25 07:32:18,619 Epoch 801/2000 +2025-03-25 07:33:24,458 Current Learning Rate: 0.0009999383 +2025-03-25 07:33:24,459 Train Loss: 0.0002708, Val Loss: 0.0003141 +2025-03-25 07:33:24,459 Epoch 802/2000 +2025-03-25 07:34:30,327 Current Learning Rate: 0.0009997533 +2025-03-25 07:34:30,328 Train Loss: 0.0002711, Val Loss: 0.0003122 +2025-03-25 07:34:30,328 Epoch 803/2000 +2025-03-25 07:35:36,737 Current Learning Rate: 0.0009994449 +2025-03-25 07:35:36,738 Train Loss: 0.0002701, Val Loss: 0.0003098 +2025-03-25 07:35:36,738 Epoch 804/2000 +2025-03-25 07:36:43,067 Current Learning Rate: 0.0009990134 +2025-03-25 07:36:43,068 Train Loss: 0.0002696, Val Loss: 0.0003097 +2025-03-25 07:36:43,068 Epoch 805/2000 +2025-03-25 07:37:49,441 Current Learning Rate: 0.0009984587 +2025-03-25 07:37:49,441 Train Loss: 0.0002689, Val Loss: 0.0003092 +2025-03-25 07:37:49,442 Epoch 806/2000 +2025-03-25 07:38:54,993 Current Learning Rate: 0.0009977810 +2025-03-25 07:38:54,994 Train Loss: 0.0002675, Val Loss: 0.0003081 +2025-03-25 07:38:54,995 Epoch 807/2000 +2025-03-25 07:40:02,005 Current Learning Rate: 0.0009969805 +2025-03-25 07:40:02,005 Train Loss: 0.0002664, Val Loss: 0.0003102 +2025-03-25 07:40:02,005 Epoch 808/2000 +2025-03-25 07:41:07,775 Current Learning Rate: 0.0009960574 +2025-03-25 07:41:07,776 Train Loss: 0.0002670, Val Loss: 0.0003115 +2025-03-25 07:41:07,776 Epoch 809/2000 +2025-03-25 07:42:13,700 Current Learning Rate: 0.0009950118 +2025-03-25 07:42:13,701 Train Loss: 0.0002684, Val Loss: 0.0003171 +2025-03-25 07:42:13,701 Epoch 810/2000 +2025-03-25 07:43:19,959 Current Learning Rate: 0.0009938442 +2025-03-25 07:43:19,960 Train Loss: 0.0002694, Val Loss: 0.0003114 +2025-03-25 07:43:19,960 Epoch 811/2000 +2025-03-25 07:44:25,459 Current Learning Rate: 0.0009925547 +2025-03-25 07:44:25,460 Train Loss: 0.0002703, Val Loss: 0.0003113 +2025-03-25 07:44:25,460 Epoch 812/2000 +2025-03-25 07:45:32,133 Current Learning Rate: 0.0009911436 +2025-03-25 07:45:32,133 Train Loss: 0.0002727, Val Loss: 0.0003154 +2025-03-25 07:45:32,133 Epoch 813/2000 +2025-03-25 07:46:37,974 Current Learning Rate: 0.0009896114 +2025-03-25 07:46:37,974 Train Loss: 0.0002737, Val Loss: 0.0003136 +2025-03-25 07:46:37,975 Epoch 814/2000 +2025-03-25 07:47:44,154 Current Learning Rate: 0.0009879584 +2025-03-25 07:47:44,154 Train Loss: 0.0002742, Val Loss: 0.0003158 +2025-03-25 07:47:44,155 Epoch 815/2000 +2025-03-25 07:48:50,783 Current Learning Rate: 0.0009861850 +2025-03-25 07:48:50,784 Train Loss: 0.0002738, Val Loss: 0.0003147 +2025-03-25 07:48:50,784 Epoch 816/2000 +2025-03-25 07:49:57,677 Current Learning Rate: 0.0009842916 +2025-03-25 07:49:57,678 Train Loss: 0.0002715, Val Loss: 0.0003136 +2025-03-25 07:49:57,678 Epoch 817/2000 +2025-03-25 07:51:04,125 Current Learning Rate: 0.0009822787 +2025-03-25 07:51:04,126 Train Loss: 0.0002676, Val Loss: 0.0003057 +2025-03-25 07:51:04,126 Epoch 818/2000 +2025-03-25 07:52:10,499 Current Learning Rate: 0.0009801468 +2025-03-25 07:52:10,500 Train Loss: 0.0002641, Val Loss: 0.0003030 +2025-03-25 07:52:10,500 Epoch 819/2000 +2025-03-25 07:53:16,715 Current Learning Rate: 0.0009778965 +2025-03-25 07:53:16,715 Train Loss: 0.0002628, Val Loss: 0.0003043 +2025-03-25 07:53:16,715 Epoch 820/2000 +2025-03-25 07:54:22,656 Current Learning Rate: 0.0009755283 +2025-03-25 07:54:22,657 Train Loss: 0.0002632, Val Loss: 0.0003072 +2025-03-25 07:54:22,657 Epoch 821/2000 +2025-03-25 07:55:29,194 Current Learning Rate: 0.0009730427 +2025-03-25 07:55:29,194 Train Loss: 0.0002638, Val Loss: 0.0003084 +2025-03-25 07:55:29,195 Epoch 822/2000 +2025-03-25 07:56:35,503 Current Learning Rate: 0.0009704404 +2025-03-25 07:56:35,503 Train Loss: 0.0002648, Val Loss: 0.0003096 +2025-03-25 07:56:35,504 Epoch 823/2000 +2025-03-25 07:57:41,030 Current Learning Rate: 0.0009677220 +2025-03-25 07:57:41,030 Train Loss: 0.0002662, Val Loss: 0.0003112 +2025-03-25 07:57:41,031 Epoch 824/2000 +2025-03-25 07:58:47,346 Current Learning Rate: 0.0009648882 +2025-03-25 07:58:47,347 Train Loss: 0.0002664, Val Loss: 0.0003114 +2025-03-25 07:58:47,347 Epoch 825/2000 +2025-03-25 07:59:53,371 Current Learning Rate: 0.0009619398 +2025-03-25 07:59:53,372 Train Loss: 0.0002671, Val Loss: 0.0003069 +2025-03-25 07:59:53,372 Epoch 826/2000 +2025-03-25 08:01:00,418 Current Learning Rate: 0.0009588773 +2025-03-25 08:01:00,418 Train Loss: 0.0002686, Val Loss: 0.0003104 +2025-03-25 08:01:00,419 Epoch 827/2000 +2025-03-25 08:02:06,206 Current Learning Rate: 0.0009557016 +2025-03-25 08:02:06,207 Train Loss: 0.0002689, Val Loss: 0.0003089 +2025-03-25 08:02:06,207 Epoch 828/2000 +2025-03-25 08:03:12,571 Current Learning Rate: 0.0009524135 +2025-03-25 08:03:12,571 Train Loss: 0.0002699, Val Loss: 0.0003108 +2025-03-25 08:03:12,572 Epoch 829/2000 +2025-03-25 08:04:19,065 Current Learning Rate: 0.0009490138 +2025-03-25 08:04:19,066 Train Loss: 0.0002709, Val Loss: 0.0003088 +2025-03-25 08:04:19,066 Epoch 830/2000 +2025-03-25 08:05:25,957 Current Learning Rate: 0.0009455033 +2025-03-25 08:05:25,958 Train Loss: 0.0002700, Val Loss: 0.0003117 +2025-03-25 08:05:25,958 Epoch 831/2000 +2025-03-25 08:06:31,794 Current Learning Rate: 0.0009418828 +2025-03-25 08:06:31,795 Train Loss: 0.0002675, Val Loss: 0.0003068 +2025-03-25 08:06:31,795 Epoch 832/2000 +2025-03-25 08:07:38,543 Current Learning Rate: 0.0009381533 +2025-03-25 08:07:38,543 Train Loss: 0.0002642, Val Loss: 0.0003034 +2025-03-25 08:07:38,544 Epoch 833/2000 +2025-03-25 08:08:43,818 Current Learning Rate: 0.0009343158 +2025-03-25 08:08:43,818 Train Loss: 0.0002610, Val Loss: 0.0002981 +2025-03-25 08:08:43,819 Epoch 834/2000 +2025-03-25 08:09:49,831 Current Learning Rate: 0.0009303710 +2025-03-25 08:09:49,832 Train Loss: 0.0002598, Val Loss: 0.0002988 +2025-03-25 08:09:49,833 Epoch 835/2000 +2025-03-25 08:10:56,883 Current Learning Rate: 0.0009263201 +2025-03-25 08:10:56,884 Train Loss: 0.0002595, Val Loss: 0.0003000 +2025-03-25 08:10:56,884 Epoch 836/2000 +2025-03-25 08:12:02,369 Current Learning Rate: 0.0009221640 +2025-03-25 08:12:02,369 Train Loss: 0.0002601, Val Loss: 0.0003020 +2025-03-25 08:12:02,369 Epoch 837/2000 +2025-03-25 08:13:08,616 Current Learning Rate: 0.0009179037 +2025-03-25 08:13:08,617 Train Loss: 0.0002609, Val Loss: 0.0003019 +2025-03-25 08:13:08,617 Epoch 838/2000 +2025-03-25 08:14:14,379 Current Learning Rate: 0.0009135403 +2025-03-25 08:14:14,380 Train Loss: 0.0002622, Val Loss: 0.0003051 +2025-03-25 08:14:14,380 Epoch 839/2000 +2025-03-25 08:15:21,554 Current Learning Rate: 0.0009090749 +2025-03-25 08:15:21,555 Train Loss: 0.0002633, Val Loss: 0.0003023 +2025-03-25 08:15:21,555 Epoch 840/2000 +2025-03-25 08:16:27,684 Current Learning Rate: 0.0009045085 +2025-03-25 08:16:27,685 Train Loss: 0.0002639, Val Loss: 0.0003033 +2025-03-25 08:16:27,685 Epoch 841/2000 +2025-03-25 08:17:33,627 Current Learning Rate: 0.0008998423 +2025-03-25 08:17:33,628 Train Loss: 0.0002650, Val Loss: 0.0003050 +2025-03-25 08:17:33,628 Epoch 842/2000 +2025-03-25 08:18:40,141 Current Learning Rate: 0.0008950775 +2025-03-25 08:18:40,142 Train Loss: 0.0002669, Val Loss: 0.0003064 +2025-03-25 08:18:40,142 Epoch 843/2000 +2025-03-25 08:19:46,930 Current Learning Rate: 0.0008902152 +2025-03-25 08:19:46,931 Train Loss: 0.0002663, Val Loss: 0.0003029 +2025-03-25 08:19:46,932 Epoch 844/2000 +2025-03-25 08:20:52,570 Current Learning Rate: 0.0008852566 +2025-03-25 08:20:52,571 Train Loss: 0.0002648, Val Loss: 0.0002998 +2025-03-25 08:20:52,571 Epoch 845/2000 +2025-03-25 08:21:58,762 Current Learning Rate: 0.0008802030 +2025-03-25 08:21:58,763 Train Loss: 0.0002632, Val Loss: 0.0002977 +2025-03-25 08:21:58,763 Epoch 846/2000 +2025-03-25 08:23:05,149 Current Learning Rate: 0.0008750555 +2025-03-25 08:23:05,150 Train Loss: 0.0002607, Val Loss: 0.0002956 +2025-03-25 08:23:05,150 Epoch 847/2000 +2025-03-25 08:24:11,383 Current Learning Rate: 0.0008698155 +2025-03-25 08:24:11,384 Train Loss: 0.0002580, Val Loss: 0.0002943 +2025-03-25 08:24:11,384 Epoch 848/2000 +2025-03-25 08:25:17,673 Current Learning Rate: 0.0008644843 +2025-03-25 08:25:17,674 Train Loss: 0.0002567, Val Loss: 0.0002941 +2025-03-25 08:25:17,674 Epoch 849/2000 +2025-03-25 08:26:24,035 Current Learning Rate: 0.0008590631 +2025-03-25 08:26:24,036 Train Loss: 0.0002562, Val Loss: 0.0002947 +2025-03-25 08:26:24,036 Epoch 850/2000 +2025-03-25 08:27:30,583 Current Learning Rate: 0.0008535534 +2025-03-25 08:27:30,583 Train Loss: 0.0002559, Val Loss: 0.0002948 +2025-03-25 08:27:30,583 Epoch 851/2000 +2025-03-25 08:28:36,154 Current Learning Rate: 0.0008479564 +2025-03-25 08:28:36,155 Train Loss: 0.0002559, Val Loss: 0.0002951 +2025-03-25 08:28:36,155 Epoch 852/2000 +2025-03-25 08:29:42,618 Current Learning Rate: 0.0008422736 +2025-03-25 08:29:42,619 Train Loss: 0.0002561, Val Loss: 0.0002958 +2025-03-25 08:29:42,620 Epoch 853/2000 +2025-03-25 08:30:48,790 Current Learning Rate: 0.0008365063 +2025-03-25 08:30:48,791 Train Loss: 0.0002571, Val Loss: 0.0002994 +2025-03-25 08:30:48,791 Epoch 854/2000 +2025-03-25 08:31:54,697 Current Learning Rate: 0.0008306559 +2025-03-25 08:31:54,698 Train Loss: 0.0002592, Val Loss: 0.0002984 +2025-03-25 08:31:54,698 Epoch 855/2000 +2025-03-25 08:33:00,603 Current Learning Rate: 0.0008247240 +2025-03-25 08:33:00,603 Train Loss: 0.0002601, Val Loss: 0.0002997 +2025-03-25 08:33:00,604 Epoch 856/2000 +2025-03-25 08:34:06,569 Current Learning Rate: 0.0008187120 +2025-03-25 08:34:06,570 Train Loss: 0.0002620, Val Loss: 0.0002991 +2025-03-25 08:34:06,570 Epoch 857/2000 +2025-03-25 08:35:12,683 Current Learning Rate: 0.0008126213 +2025-03-25 08:35:12,684 Train Loss: 0.0002611, Val Loss: 0.0002959 +2025-03-25 08:35:12,684 Epoch 858/2000 +2025-03-25 08:36:19,321 Current Learning Rate: 0.0008064535 +2025-03-25 08:36:19,322 Train Loss: 0.0002600, Val Loss: 0.0002978 +2025-03-25 08:36:19,322 Epoch 859/2000 +2025-03-25 08:37:25,920 Current Learning Rate: 0.0008002101 +2025-03-25 08:37:25,920 Train Loss: 0.0002588, Val Loss: 0.0002967 +2025-03-25 08:37:25,921 Epoch 860/2000 +2025-03-25 08:38:31,380 Current Learning Rate: 0.0007938926 +2025-03-25 08:38:31,381 Train Loss: 0.0002575, Val Loss: 0.0002933 +2025-03-25 08:38:31,381 Epoch 861/2000 +2025-03-25 08:39:37,251 Current Learning Rate: 0.0007875026 +2025-03-25 08:39:37,252 Train Loss: 0.0002558, Val Loss: 0.0002890 +2025-03-25 08:39:37,253 Epoch 862/2000 +2025-03-25 08:40:43,143 Current Learning Rate: 0.0007810417 +2025-03-25 08:40:43,144 Train Loss: 0.0002540, Val Loss: 0.0002886 +2025-03-25 08:40:43,145 Epoch 863/2000 +2025-03-25 08:41:49,555 Current Learning Rate: 0.0007745114 +2025-03-25 08:41:49,556 Train Loss: 0.0002531, Val Loss: 0.0002890 +2025-03-25 08:41:49,556 Epoch 864/2000 +2025-03-25 08:42:55,767 Current Learning Rate: 0.0007679134 +2025-03-25 08:42:55,767 Train Loss: 0.0002526, Val Loss: 0.0002888 +2025-03-25 08:42:55,768 Epoch 865/2000 +2025-03-25 08:44:02,294 Current Learning Rate: 0.0007612493 +2025-03-25 08:44:02,295 Train Loss: 0.0002521, Val Loss: 0.0002887 +2025-03-25 08:44:02,295 Epoch 866/2000 +2025-03-25 08:45:08,094 Current Learning Rate: 0.0007545207 +2025-03-25 08:45:08,096 Train Loss: 0.0002519, Val Loss: 0.0002888 +2025-03-25 08:45:08,097 Epoch 867/2000 +2025-03-25 08:46:14,413 Current Learning Rate: 0.0007477293 +2025-03-25 08:46:14,414 Train Loss: 0.0002518, Val Loss: 0.0002893 +2025-03-25 08:46:14,414 Epoch 868/2000 +2025-03-25 08:47:20,404 Current Learning Rate: 0.0007408768 +2025-03-25 08:47:20,404 Train Loss: 0.0002522, Val Loss: 0.0002910 +2025-03-25 08:47:20,405 Epoch 869/2000 +2025-03-25 08:48:26,830 Current Learning Rate: 0.0007339649 +2025-03-25 08:48:26,830 Train Loss: 0.0002535, Val Loss: 0.0002919 +2025-03-25 08:48:26,831 Epoch 870/2000 +2025-03-25 08:49:33,196 Current Learning Rate: 0.0007269952 +2025-03-25 08:49:33,196 Train Loss: 0.0002562, Val Loss: 0.0002881 +2025-03-25 08:49:33,197 Epoch 871/2000 +2025-03-25 08:50:39,722 Current Learning Rate: 0.0007199696 +2025-03-25 08:50:39,722 Train Loss: 0.0002564, Val Loss: 0.0002874 +2025-03-25 08:50:39,722 Epoch 872/2000 +2025-03-25 08:51:45,554 Current Learning Rate: 0.0007128896 +2025-03-25 08:51:45,555 Train Loss: 0.0002552, Val Loss: 0.0002953 +2025-03-25 08:51:45,555 Epoch 873/2000 +2025-03-25 08:52:51,629 Current Learning Rate: 0.0007057572 +2025-03-25 08:52:51,630 Train Loss: 0.0002551, Val Loss: 0.0002941 +2025-03-25 08:52:51,630 Epoch 874/2000 +2025-03-25 08:53:58,005 Current Learning Rate: 0.0006985739 +2025-03-25 08:53:58,006 Train Loss: 0.0002540, Val Loss: 0.0002885 +2025-03-25 08:53:58,006 Epoch 875/2000 +2025-03-25 08:55:03,356 Current Learning Rate: 0.0006913417 +2025-03-25 08:55:03,356 Train Loss: 0.0002526, Val Loss: 0.0002866 +2025-03-25 08:55:03,357 Epoch 876/2000 +2025-03-25 08:56:09,861 Current Learning Rate: 0.0006840623 +2025-03-25 08:56:09,861 Train Loss: 0.0002509, Val Loss: 0.0002858 +2025-03-25 08:56:09,862 Epoch 877/2000 +2025-03-25 08:57:15,789 Current Learning Rate: 0.0006767374 +2025-03-25 08:57:15,790 Train Loss: 0.0002492, Val Loss: 0.0002831 +2025-03-25 08:57:15,790 Epoch 878/2000 +2025-03-25 08:58:21,625 Current Learning Rate: 0.0006693690 +2025-03-25 08:58:21,626 Train Loss: 0.0002484, Val Loss: 0.0002803 +2025-03-25 08:58:21,626 Epoch 879/2000 +2025-03-25 08:59:27,613 Current Learning Rate: 0.0006619587 +2025-03-25 08:59:27,614 Train Loss: 0.0002481, Val Loss: 0.0002802 +2025-03-25 08:59:27,615 Epoch 880/2000 +2025-03-25 09:00:33,768 Current Learning Rate: 0.0006545085 +2025-03-25 09:00:33,768 Train Loss: 0.0002480, Val Loss: 0.0002803 +2025-03-25 09:00:33,769 Epoch 881/2000 +2025-03-25 09:01:38,965 Current Learning Rate: 0.0006470202 +2025-03-25 09:01:38,966 Train Loss: 0.0002481, Val Loss: 0.0002803 +2025-03-25 09:01:38,966 Epoch 882/2000 +2025-03-25 09:02:45,132 Current Learning Rate: 0.0006394956 +2025-03-25 09:02:45,133 Train Loss: 0.0002487, Val Loss: 0.0002804 +2025-03-25 09:02:45,134 Epoch 883/2000 +2025-03-25 09:03:51,661 Current Learning Rate: 0.0006319365 +2025-03-25 09:03:51,661 Train Loss: 0.0002499, Val Loss: 0.0002857 +2025-03-25 09:03:51,661 Epoch 884/2000 +2025-03-25 09:04:57,590 Current Learning Rate: 0.0006243449 +2025-03-25 09:04:57,591 Train Loss: 0.0002501, Val Loss: 0.0002876 +2025-03-25 09:04:57,591 Epoch 885/2000 +2025-03-25 09:06:03,319 Current Learning Rate: 0.0006167227 +2025-03-25 09:06:03,320 Train Loss: 0.0002506, Val Loss: 0.0002804 +2025-03-25 09:06:03,320 Epoch 886/2000 +2025-03-25 09:07:09,769 Current Learning Rate: 0.0006090716 +2025-03-25 09:07:09,770 Train Loss: 0.0002500, Val Loss: 0.0002796 +2025-03-25 09:07:09,770 Epoch 887/2000 +2025-03-25 09:08:16,137 Current Learning Rate: 0.0006013936 +2025-03-25 09:08:16,138 Train Loss: 0.0002483, Val Loss: 0.0002785 +2025-03-25 09:08:16,138 Epoch 888/2000 +2025-03-25 09:09:22,548 Current Learning Rate: 0.0005936907 +2025-03-25 09:09:22,549 Train Loss: 0.0002469, Val Loss: 0.0002769 +2025-03-25 09:09:22,549 Epoch 889/2000 +2025-03-25 09:10:28,189 Current Learning Rate: 0.0005859646 +2025-03-25 09:10:28,190 Train Loss: 0.0002457, Val Loss: 0.0002772 +2025-03-25 09:10:28,190 Epoch 890/2000 +2025-03-25 09:11:34,239 Current Learning Rate: 0.0005782172 +2025-03-25 09:11:34,240 Train Loss: 0.0002448, Val Loss: 0.0002774 +2025-03-25 09:11:34,240 Epoch 891/2000 +2025-03-25 09:12:40,714 Current Learning Rate: 0.0005704506 +2025-03-25 09:12:40,715 Train Loss: 0.0002443, Val Loss: 0.0002766 +2025-03-25 09:12:40,715 Epoch 892/2000 +2025-03-25 09:13:47,980 Current Learning Rate: 0.0005626666 +2025-03-25 09:13:47,980 Train Loss: 0.0002439, Val Loss: 0.0002759 +2025-03-25 09:13:47,981 Epoch 893/2000 +2025-03-25 09:14:54,152 Current Learning Rate: 0.0005548672 +2025-03-25 09:14:54,152 Train Loss: 0.0002438, Val Loss: 0.0002759 +2025-03-25 09:14:54,152 Epoch 894/2000 +2025-03-25 09:16:00,062 Current Learning Rate: 0.0005470542 +2025-03-25 09:16:00,062 Train Loss: 0.0002438, Val Loss: 0.0002776 +2025-03-25 09:16:00,062 Epoch 895/2000 +2025-03-25 09:17:06,647 Current Learning Rate: 0.0005392295 +2025-03-25 09:17:06,647 Train Loss: 0.0002442, Val Loss: 0.0002792 +2025-03-25 09:17:06,647 Epoch 896/2000 +2025-03-25 09:18:13,125 Current Learning Rate: 0.0005313953 +2025-03-25 09:18:13,125 Train Loss: 0.0002451, Val Loss: 0.0002742 +2025-03-25 09:18:13,126 Epoch 897/2000 +2025-03-25 09:19:19,097 Current Learning Rate: 0.0005235532 +2025-03-25 09:19:19,097 Train Loss: 0.0002460, Val Loss: 0.0002731 +2025-03-25 09:19:19,098 Epoch 898/2000 +2025-03-25 09:20:25,125 Current Learning Rate: 0.0005157054 +2025-03-25 09:20:25,126 Train Loss: 0.0002448, Val Loss: 0.0002707 +2025-03-25 09:20:25,126 Epoch 899/2000 +2025-03-25 09:21:30,902 Current Learning Rate: 0.0005078537 +2025-03-25 09:21:30,903 Train Loss: 0.0002434, Val Loss: 0.0002696 +2025-03-25 09:21:30,904 Epoch 900/2000 +2025-03-25 09:22:37,029 Current Learning Rate: 0.0005000000 +2025-03-25 09:22:37,030 Train Loss: 0.0002424, Val Loss: 0.0002692 +2025-03-25 09:22:37,030 Epoch 901/2000 +2025-03-25 09:23:43,138 Current Learning Rate: 0.0004921463 +2025-03-25 09:23:43,139 Train Loss: 0.0002416, Val Loss: 0.0002687 +2025-03-25 09:23:43,139 Epoch 902/2000 +2025-03-25 09:24:48,845 Current Learning Rate: 0.0004842946 +2025-03-25 09:24:48,846 Train Loss: 0.0002411, Val Loss: 0.0002686 +2025-03-25 09:24:48,846 Epoch 903/2000 +2025-03-25 09:25:55,167 Current Learning Rate: 0.0004764468 +2025-03-25 09:25:55,168 Train Loss: 0.0002408, Val Loss: 0.0002686 +2025-03-25 09:25:55,168 Epoch 904/2000 +2025-03-25 09:27:01,449 Current Learning Rate: 0.0004686047 +2025-03-25 09:27:01,450 Train Loss: 0.0002407, Val Loss: 0.0002686 +2025-03-25 09:27:01,450 Epoch 905/2000 +2025-03-25 09:28:25,345 Current Learning Rate: 0.0004607705 +2025-03-25 09:28:25,346 Train Loss: 0.0002409, Val Loss: 0.0002684 +2025-03-25 09:28:25,346 Epoch 906/2000 +2025-03-25 09:30:02,082 Current Learning Rate: 0.0004529458 +2025-03-25 09:30:02,082 Train Loss: 0.0002410, Val Loss: 0.0002675 +2025-03-25 09:30:02,083 Epoch 907/2000 +2025-03-25 09:31:37,564 Current Learning Rate: 0.0004451328 +2025-03-25 09:31:37,565 Train Loss: 0.0002407, Val Loss: 0.0002670 +2025-03-25 09:31:37,565 Epoch 908/2000 +2025-03-25 09:33:20,208 Current Learning Rate: 0.0004373334 +2025-03-25 09:33:20,209 Train Loss: 0.0002403, Val Loss: 0.0002666 +2025-03-25 09:33:20,209 Epoch 909/2000 +2025-03-25 09:35:04,199 Current Learning Rate: 0.0004295494 +2025-03-25 09:35:04,200 Train Loss: 0.0002396, Val Loss: 0.0002661 +2025-03-25 09:35:04,200 Epoch 910/2000 +2025-03-25 09:36:49,334 Current Learning Rate: 0.0004217828 +2025-03-25 09:36:49,335 Train Loss: 0.0002390, Val Loss: 0.0002656 +2025-03-25 09:36:49,336 Epoch 911/2000 +2025-03-25 09:38:34,305 Current Learning Rate: 0.0004140354 +2025-03-25 09:38:34,306 Train Loss: 0.0002384, Val Loss: 0.0002651 +2025-03-25 09:38:34,306 Epoch 912/2000 +2025-03-25 09:40:19,463 Current Learning Rate: 0.0004063093 +2025-03-25 09:40:19,464 Train Loss: 0.0002380, Val Loss: 0.0002646 +2025-03-25 09:40:19,464 Epoch 913/2000 +2025-03-25 09:42:03,128 Current Learning Rate: 0.0003986064 +2025-03-25 09:42:03,129 Train Loss: 0.0002377, Val Loss: 0.0002643 +2025-03-25 09:42:03,129 Epoch 914/2000 +2025-03-25 09:43:48,790 Current Learning Rate: 0.0003909284 +2025-03-25 09:43:48,790 Train Loss: 0.0002375, Val Loss: 0.0002642 +2025-03-25 09:43:48,791 Epoch 915/2000 +2025-03-25 09:45:35,845 Current Learning Rate: 0.0003832773 +2025-03-25 09:45:35,846 Train Loss: 0.0002374, Val Loss: 0.0002639 +2025-03-25 09:45:35,846 Epoch 916/2000 +2025-03-25 09:47:22,835 Current Learning Rate: 0.0003756551 +2025-03-25 09:47:22,836 Train Loss: 0.0002373, Val Loss: 0.0002633 +2025-03-25 09:47:22,836 Epoch 917/2000 +2025-03-25 09:49:05,669 Current Learning Rate: 0.0003680635 +2025-03-25 09:49:05,670 Train Loss: 0.0002370, Val Loss: 0.0002632 +2025-03-25 09:49:05,670 Epoch 918/2000 +2025-03-25 09:50:50,876 Current Learning Rate: 0.0003605044 +2025-03-25 09:50:50,876 Train Loss: 0.0002367, Val Loss: 0.0002632 +2025-03-25 09:50:50,877 Epoch 919/2000 +2025-03-25 09:52:39,024 Current Learning Rate: 0.0003529798 +2025-03-25 09:52:39,024 Train Loss: 0.0002362, Val Loss: 0.0002631 +2025-03-25 09:52:39,024 Epoch 920/2000 +2025-03-25 09:54:26,394 Current Learning Rate: 0.0003454915 +2025-03-25 09:54:26,395 Train Loss: 0.0002357, Val Loss: 0.0002628 +2025-03-25 09:54:26,395 Epoch 921/2000 +2025-03-25 09:56:15,557 Current Learning Rate: 0.0003380413 +2025-03-25 09:56:15,558 Train Loss: 0.0002352, Val Loss: 0.0002623 +2025-03-25 09:56:15,558 Epoch 922/2000 +2025-03-25 09:58:05,919 Current Learning Rate: 0.0003306310 +2025-03-25 09:58:05,919 Train Loss: 0.0002348, Val Loss: 0.0002619 +2025-03-25 09:58:05,920 Epoch 923/2000 +2025-03-25 09:59:55,108 Current Learning Rate: 0.0003232626 +2025-03-25 09:59:55,109 Train Loss: 0.0002344, Val Loss: 0.0002615 +2025-03-25 09:59:55,109 Epoch 924/2000 +2025-03-25 10:01:42,755 Current Learning Rate: 0.0003159377 +2025-03-25 10:01:42,755 Train Loss: 0.0002342, Val Loss: 0.0002612 +2025-03-25 10:01:42,756 Epoch 925/2000 +2025-03-25 10:03:30,334 Current Learning Rate: 0.0003086583 +2025-03-25 10:03:30,334 Train Loss: 0.0002339, Val Loss: 0.0002609 +2025-03-25 10:03:30,335 Epoch 926/2000 +2025-03-25 10:05:16,713 Current Learning Rate: 0.0003014261 +2025-03-25 10:05:16,714 Train Loss: 0.0002337, Val Loss: 0.0002604 +2025-03-25 10:05:16,714 Epoch 927/2000 +2025-03-25 10:07:04,931 Current Learning Rate: 0.0002942428 +2025-03-25 10:07:04,931 Train Loss: 0.0002334, Val Loss: 0.0002599 +2025-03-25 10:07:04,931 Epoch 928/2000 +2025-03-25 10:08:50,713 Current Learning Rate: 0.0002871104 +2025-03-25 10:08:50,714 Train Loss: 0.0002332, Val Loss: 0.0002597 +2025-03-25 10:08:50,714 Epoch 929/2000 +2025-03-25 10:09:57,306 Current Learning Rate: 0.0002800304 +2025-03-25 10:09:57,306 Train Loss: 0.0002329, Val Loss: 0.0002596 +2025-03-25 10:09:57,306 Epoch 930/2000 +2025-03-25 10:11:03,701 Current Learning Rate: 0.0002730048 +2025-03-25 10:11:03,701 Train Loss: 0.0002325, Val Loss: 0.0002592 +2025-03-25 10:11:03,701 Epoch 931/2000 +2025-03-25 10:12:14,347 Current Learning Rate: 0.0002660351 +2025-03-25 10:12:14,348 Train Loss: 0.0002321, Val Loss: 0.0002587 +2025-03-25 10:12:14,348 Epoch 932/2000 +2025-03-25 10:13:20,391 Current Learning Rate: 0.0002591232 +2025-03-25 10:13:20,482 Train Loss: 0.0002317, Val Loss: 0.0002582 +2025-03-25 10:13:20,482 Epoch 933/2000 +2025-03-25 10:14:26,531 Current Learning Rate: 0.0002522707 +2025-03-25 10:14:26,610 Train Loss: 0.0002314, Val Loss: 0.0002579 +2025-03-25 10:14:26,610 Epoch 934/2000 +2025-03-25 10:15:39,294 Current Learning Rate: 0.0002454793 +2025-03-25 10:15:39,371 Train Loss: 0.0002311, Val Loss: 0.0002576 +2025-03-25 10:15:39,371 Epoch 935/2000 +2025-03-25 10:16:45,720 Current Learning Rate: 0.0002387507 +2025-03-25 10:16:45,800 Train Loss: 0.0002308, Val Loss: 0.0002574 +2025-03-25 10:16:45,800 Epoch 936/2000 +2025-03-25 10:17:52,384 Current Learning Rate: 0.0002320866 +2025-03-25 10:17:52,479 Train Loss: 0.0002305, Val Loss: 0.0002571 +2025-03-25 10:17:52,479 Epoch 937/2000 +2025-03-25 10:18:58,905 Current Learning Rate: 0.0002254886 +2025-03-25 10:18:58,985 Train Loss: 0.0002303, Val Loss: 0.0002567 +2025-03-25 10:18:58,986 Epoch 938/2000 +2025-03-25 10:20:09,670 Current Learning Rate: 0.0002189583 +2025-03-25 10:20:09,746 Train Loss: 0.0002300, Val Loss: 0.0002562 +2025-03-25 10:20:09,746 Epoch 939/2000 +2025-03-25 10:21:54,942 Current Learning Rate: 0.0002124974 +2025-03-25 10:21:55,027 Train Loss: 0.0002297, Val Loss: 0.0002557 +2025-03-25 10:21:55,027 Epoch 940/2000 +2025-03-25 10:23:22,894 Current Learning Rate: 0.0002061074 +2025-03-25 10:23:22,977 Train Loss: 0.0002293, Val Loss: 0.0002553 +2025-03-25 10:23:22,977 Epoch 941/2000 +2025-03-25 10:25:13,114 Current Learning Rate: 0.0001997899 +2025-03-25 10:25:13,199 Train Loss: 0.0002290, Val Loss: 0.0002551 +2025-03-25 10:25:13,200 Epoch 942/2000 +2025-03-25 10:26:56,081 Current Learning Rate: 0.0001935465 +2025-03-25 10:26:56,188 Train Loss: 0.0002288, Val Loss: 0.0002549 +2025-03-25 10:26:56,188 Epoch 943/2000 +2025-03-25 10:28:40,685 Current Learning Rate: 0.0001873787 +2025-03-25 10:28:40,767 Train Loss: 0.0002285, Val Loss: 0.0002547 +2025-03-25 10:28:40,767 Epoch 944/2000 +2025-03-25 10:30:33,982 Current Learning Rate: 0.0001812880 +2025-03-25 10:30:34,067 Train Loss: 0.0002282, Val Loss: 0.0002545 +2025-03-25 10:30:34,067 Epoch 945/2000 +2025-03-25 10:32:29,440 Current Learning Rate: 0.0001752760 +2025-03-25 10:32:29,514 Train Loss: 0.0002279, Val Loss: 0.0002544 +2025-03-25 10:32:29,514 Epoch 946/2000 +2025-03-25 10:34:27,840 Current Learning Rate: 0.0001693441 +2025-03-25 10:34:27,936 Train Loss: 0.0002277, Val Loss: 0.0002543 +2025-03-25 10:34:27,936 Epoch 947/2000 +2025-03-25 10:36:24,909 Current Learning Rate: 0.0001634937 +2025-03-25 10:36:24,995 Train Loss: 0.0002274, Val Loss: 0.0002543 +2025-03-25 10:36:24,996 Epoch 948/2000 +2025-03-25 10:38:21,668 Current Learning Rate: 0.0001577264 +2025-03-25 10:38:21,744 Train Loss: 0.0002272, Val Loss: 0.0002542 +2025-03-25 10:38:21,744 Epoch 949/2000 +2025-03-25 10:40:12,629 Current Learning Rate: 0.0001520436 +2025-03-25 10:40:12,630 Train Loss: 0.0002270, Val Loss: 0.0002542 +2025-03-25 10:40:12,630 Epoch 950/2000 +2025-03-25 10:42:09,124 Current Learning Rate: 0.0001464466 +2025-03-25 10:42:09,125 Train Loss: 0.0002267, Val Loss: 0.0002542 +2025-03-25 10:42:09,125 Epoch 951/2000 +2025-03-25 10:43:52,777 Current Learning Rate: 0.0001409369 +2025-03-25 10:43:52,778 Train Loss: 0.0002265, Val Loss: 0.0002543 +2025-03-25 10:43:52,778 Epoch 952/2000 +2025-03-25 10:45:38,882 Current Learning Rate: 0.0001355157 +2025-03-25 10:45:38,883 Train Loss: 0.0002263, Val Loss: 0.0002543 +2025-03-25 10:45:38,883 Epoch 953/2000 +2025-03-25 10:47:31,270 Current Learning Rate: 0.0001301845 +2025-03-25 10:47:31,271 Train Loss: 0.0002260, Val Loss: 0.0002543 +2025-03-25 10:47:31,271 Epoch 954/2000 +2025-03-25 10:49:26,521 Current Learning Rate: 0.0001249445 +2025-03-25 10:49:26,601 Train Loss: 0.0002258, Val Loss: 0.0002541 +2025-03-25 10:49:26,601 Epoch 955/2000 +2025-03-25 10:51:23,736 Current Learning Rate: 0.0001197970 +2025-03-25 10:51:23,809 Train Loss: 0.0002256, Val Loss: 0.0002538 +2025-03-25 10:51:23,810 Epoch 956/2000 +2025-03-25 10:53:21,286 Current Learning Rate: 0.0001147434 +2025-03-25 10:53:21,363 Train Loss: 0.0002253, Val Loss: 0.0002534 +2025-03-25 10:53:21,363 Epoch 957/2000 +2025-03-25 10:55:05,091 Current Learning Rate: 0.0001097848 +2025-03-25 10:55:05,175 Train Loss: 0.0002251, Val Loss: 0.0002529 +2025-03-25 10:55:05,176 Epoch 958/2000 +2025-03-25 10:56:55,252 Current Learning Rate: 0.0001049225 +2025-03-25 10:56:55,339 Train Loss: 0.0002249, Val Loss: 0.0002522 +2025-03-25 10:56:55,340 Epoch 959/2000 +2025-03-25 10:58:39,855 Current Learning Rate: 0.0001001577 +2025-03-25 10:58:39,943 Train Loss: 0.0002246, Val Loss: 0.0002518 +2025-03-25 10:58:39,944 Epoch 960/2000 +2025-03-25 11:00:23,831 Current Learning Rate: 0.0000954915 +2025-03-25 11:00:23,910 Train Loss: 0.0002244, Val Loss: 0.0002515 +2025-03-25 11:00:23,910 Epoch 961/2000 +2025-03-25 11:01:29,616 Current Learning Rate: 0.0000909251 +2025-03-25 11:01:29,707 Train Loss: 0.0002242, Val Loss: 0.0002513 +2025-03-25 11:01:29,707 Epoch 962/2000 +2025-03-25 11:02:35,730 Current Learning Rate: 0.0000864597 +2025-03-25 11:02:35,818 Train Loss: 0.0002240, Val Loss: 0.0002509 +2025-03-25 11:02:35,819 Epoch 963/2000 +2025-03-25 11:03:41,227 Current Learning Rate: 0.0000820963 +2025-03-25 11:03:41,297 Train Loss: 0.0002238, Val Loss: 0.0002506 +2025-03-25 11:03:41,297 Epoch 964/2000 +2025-03-25 11:04:47,610 Current Learning Rate: 0.0000778360 +2025-03-25 11:04:47,698 Train Loss: 0.0002236, Val Loss: 0.0002503 +2025-03-25 11:04:47,698 Epoch 965/2000 +2025-03-25 11:05:55,018 Current Learning Rate: 0.0000736799 +2025-03-25 11:05:55,174 Train Loss: 0.0002234, Val Loss: 0.0002501 +2025-03-25 11:05:55,174 Epoch 966/2000 +2025-03-25 11:07:14,874 Current Learning Rate: 0.0000696290 +2025-03-25 11:07:14,995 Train Loss: 0.0002232, Val Loss: 0.0002499 +2025-03-25 11:07:14,995 Epoch 967/2000 +2025-03-25 11:08:28,503 Current Learning Rate: 0.0000656842 +2025-03-25 11:08:28,580 Train Loss: 0.0002230, Val Loss: 0.0002497 +2025-03-25 11:08:28,580 Epoch 968/2000 +2025-03-25 11:09:35,085 Current Learning Rate: 0.0000618467 +2025-03-25 11:09:35,478 Train Loss: 0.0002229, Val Loss: 0.0002496 +2025-03-25 11:09:35,478 Epoch 969/2000 +2025-03-25 11:10:41,473 Current Learning Rate: 0.0000581172 +2025-03-25 11:10:41,570 Train Loss: 0.0002227, Val Loss: 0.0002495 +2025-03-25 11:10:41,570 Epoch 970/2000 +2025-03-25 11:11:49,197 Current Learning Rate: 0.0000544967 +2025-03-25 11:11:49,265 Train Loss: 0.0002225, Val Loss: 0.0002494 +2025-03-25 11:11:49,265 Epoch 971/2000 +2025-03-25 11:12:55,592 Current Learning Rate: 0.0000509862 +2025-03-25 11:12:55,668 Train Loss: 0.0002223, Val Loss: 0.0002493 +2025-03-25 11:12:55,668 Epoch 972/2000 +2025-03-25 11:14:15,380 Current Learning Rate: 0.0000475865 +2025-03-25 11:14:15,458 Train Loss: 0.0002222, Val Loss: 0.0002492 +2025-03-25 11:14:15,458 Epoch 973/2000 +2025-03-25 11:15:21,420 Current Learning Rate: 0.0000442984 +2025-03-25 11:15:21,502 Train Loss: 0.0002220, Val Loss: 0.0002490 +2025-03-25 11:15:21,502 Epoch 974/2000 +2025-03-25 11:16:27,532 Current Learning Rate: 0.0000411227 +2025-03-25 11:16:27,620 Train Loss: 0.0002218, Val Loss: 0.0002488 +2025-03-25 11:16:27,621 Epoch 975/2000 +2025-03-25 11:17:35,010 Current Learning Rate: 0.0000380602 +2025-03-25 11:17:35,093 Train Loss: 0.0002217, Val Loss: 0.0002487 +2025-03-25 11:17:35,094 Epoch 976/2000 +2025-03-25 11:18:51,047 Current Learning Rate: 0.0000351118 +2025-03-25 11:18:51,124 Train Loss: 0.0002215, Val Loss: 0.0002485 +2025-03-25 11:18:51,124 Epoch 977/2000 +2025-03-25 11:19:56,694 Current Learning Rate: 0.0000322780 +2025-03-25 11:19:56,782 Train Loss: 0.0002214, Val Loss: 0.0002484 +2025-03-25 11:19:56,783 Epoch 978/2000 +2025-03-25 11:21:04,364 Current Learning Rate: 0.0000295596 +2025-03-25 11:21:04,432 Train Loss: 0.0002212, Val Loss: 0.0002483 +2025-03-25 11:21:04,433 Epoch 979/2000 +2025-03-25 11:22:20,397 Current Learning Rate: 0.0000269573 +2025-03-25 11:22:20,476 Train Loss: 0.0002211, Val Loss: 0.0002483 +2025-03-25 11:22:20,477 Epoch 980/2000 +2025-03-25 11:23:27,045 Current Learning Rate: 0.0000244717 +2025-03-25 11:23:27,123 Train Loss: 0.0002210, Val Loss: 0.0002482 +2025-03-25 11:23:27,123 Epoch 981/2000 +2025-03-25 11:24:32,875 Current Learning Rate: 0.0000221035 +2025-03-25 11:24:32,961 Train Loss: 0.0002208, Val Loss: 0.0002482 +2025-03-25 11:24:32,961 Epoch 982/2000 +2025-03-25 11:25:38,684 Current Learning Rate: 0.0000198532 +2025-03-25 11:25:38,778 Train Loss: 0.0002207, Val Loss: 0.0002481 +2025-03-25 11:25:38,779 Epoch 983/2000 +2025-03-25 11:26:44,567 Current Learning Rate: 0.0000177213 +2025-03-25 11:26:44,651 Train Loss: 0.0002206, Val Loss: 0.0002480 +2025-03-25 11:26:44,651 Epoch 984/2000 +2025-03-25 11:27:50,938 Current Learning Rate: 0.0000157084 +2025-03-25 11:27:51,023 Train Loss: 0.0002205, Val Loss: 0.0002479 +2025-03-25 11:27:51,023 Epoch 985/2000 +2025-03-25 11:28:56,508 Current Learning Rate: 0.0000138150 +2025-03-25 11:28:56,594 Train Loss: 0.0002204, Val Loss: 0.0002478 +2025-03-25 11:28:56,594 Epoch 986/2000 +2025-03-25 11:30:02,619 Current Learning Rate: 0.0000120416 +2025-03-25 11:30:02,696 Train Loss: 0.0002203, Val Loss: 0.0002477 +2025-03-25 11:30:02,697 Epoch 987/2000 +2025-03-25 11:31:08,054 Current Learning Rate: 0.0000103886 +2025-03-25 11:31:08,150 Train Loss: 0.0002202, Val Loss: 0.0002477 +2025-03-25 11:31:08,151 Epoch 988/2000 +2025-03-25 11:32:14,584 Current Learning Rate: 0.0000088564 +2025-03-25 11:32:14,662 Train Loss: 0.0002201, Val Loss: 0.0002476 +2025-03-25 11:32:14,662 Epoch 989/2000 +2025-03-25 11:33:21,061 Current Learning Rate: 0.0000074453 +2025-03-25 11:33:21,142 Train Loss: 0.0002200, Val Loss: 0.0002476 +2025-03-25 11:33:21,143 Epoch 990/2000 +2025-03-25 11:34:26,428 Current Learning Rate: 0.0000061558 +2025-03-25 11:34:26,521 Train Loss: 0.0002199, Val Loss: 0.0002475 +2025-03-25 11:34:26,522 Epoch 991/2000 +2025-03-25 11:35:33,071 Current Learning Rate: 0.0000049882 +2025-03-25 11:35:33,148 Train Loss: 0.0002198, Val Loss: 0.0002475 +2025-03-25 11:35:33,148 Epoch 992/2000 +2025-03-25 11:36:39,840 Current Learning Rate: 0.0000039426 +2025-03-25 11:36:39,928 Train Loss: 0.0002198, Val Loss: 0.0002475 +2025-03-25 11:36:39,928 Epoch 993/2000 +2025-03-25 11:37:46,361 Current Learning Rate: 0.0000030195 +2025-03-25 11:37:46,438 Train Loss: 0.0002197, Val Loss: 0.0002474 +2025-03-25 11:37:46,439 Epoch 994/2000 +2025-03-25 11:38:52,562 Current Learning Rate: 0.0000022190 +2025-03-25 11:38:52,641 Train Loss: 0.0002197, Val Loss: 0.0002474 +2025-03-25 11:38:52,642 Epoch 995/2000 +2025-03-25 11:39:58,973 Current Learning Rate: 0.0000015413 +2025-03-25 11:39:59,057 Train Loss: 0.0002196, Val Loss: 0.0002474 +2025-03-25 11:39:59,058 Epoch 996/2000 +2025-03-25 11:41:04,546 Current Learning Rate: 0.0000009866 +2025-03-25 11:41:04,547 Train Loss: 0.0002196, Val Loss: 0.0002474 +2025-03-25 11:41:04,547 Epoch 997/2000 +2025-03-25 11:42:10,274 Current Learning Rate: 0.0000005551 +2025-03-25 11:42:10,349 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:42:10,349 Epoch 998/2000 +2025-03-25 11:43:16,937 Current Learning Rate: 0.0000002467 +2025-03-25 11:43:17,025 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:43:17,025 Epoch 999/2000 +2025-03-25 11:44:23,211 Current Learning Rate: 0.0000000617 +2025-03-25 11:44:23,212 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:44:23,212 Epoch 1000/2000 +2025-03-25 11:45:28,875 Current Learning Rate: 0.0000000000 +2025-03-25 11:45:28,876 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:45:28,876 Epoch 1001/2000 +2025-03-25 11:46:35,414 Current Learning Rate: 0.0000000617 +2025-03-25 11:46:35,414 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:46:35,414 Epoch 1002/2000 +2025-03-25 11:47:42,039 Current Learning Rate: 0.0000002467 +2025-03-25 11:47:42,040 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:47:42,040 Epoch 1003/2000 +2025-03-25 11:48:49,254 Current Learning Rate: 0.0000005551 +2025-03-25 11:48:49,254 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:48:49,254 Epoch 1004/2000 +2025-03-25 11:49:55,449 Current Learning Rate: 0.0000009866 +2025-03-25 11:49:55,531 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:49:55,531 Epoch 1005/2000 +2025-03-25 11:51:01,800 Current Learning Rate: 0.0000015413 +2025-03-25 11:51:01,801 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:51:01,801 Epoch 1006/2000 +2025-03-25 11:52:07,899 Current Learning Rate: 0.0000022190 +2025-03-25 11:52:07,900 Train Loss: 0.0002195, Val Loss: 0.0002474 +2025-03-25 11:52:07,900 Epoch 1007/2000 +2025-03-25 11:53:14,207 Current Learning Rate: 0.0000030195 +2025-03-25 11:53:14,289 Train Loss: 0.0002196, Val Loss: 0.0002474 +2025-03-25 11:53:14,290 Epoch 1008/2000 +2025-03-25 11:54:19,954 Current Learning Rate: 0.0000039426 +2025-03-25 11:54:20,062 Train Loss: 0.0002196, Val Loss: 0.0002474 +2025-03-25 11:54:20,063 Epoch 1009/2000 +2025-03-25 11:55:26,417 Current Learning Rate: 0.0000049882 +2025-03-25 11:55:26,418 Train Loss: 0.0002197, Val Loss: 0.0002474 +2025-03-25 11:55:26,418 Epoch 1010/2000 +2025-03-25 11:56:32,710 Current Learning Rate: 0.0000061558 +2025-03-25 11:56:32,711 Train Loss: 0.0002198, Val Loss: 0.0002474 +2025-03-25 11:56:32,711 Epoch 1011/2000 +2025-03-25 11:57:38,826 Current Learning Rate: 0.0000074453 +2025-03-25 11:57:38,826 Train Loss: 0.0002198, Val Loss: 0.0002475 +2025-03-25 11:57:38,827 Epoch 1012/2000 +2025-03-25 11:58:45,256 Current Learning Rate: 0.0000088564 +2025-03-25 11:58:45,256 Train Loss: 0.0002199, Val Loss: 0.0002475 +2025-03-25 11:58:45,257 Epoch 1013/2000 +2025-03-25 11:59:51,798 Current Learning Rate: 0.0000103886 +2025-03-25 11:59:51,798 Train Loss: 0.0002200, Val Loss: 0.0002475 +2025-03-25 11:59:51,798 Epoch 1014/2000 +2025-03-25 12:00:57,635 Current Learning Rate: 0.0000120416 +2025-03-25 12:00:57,635 Train Loss: 0.0002200, Val Loss: 0.0002476 +2025-03-25 12:00:57,636 Epoch 1015/2000 +2025-03-25 12:02:03,414 Current Learning Rate: 0.0000138150 +2025-03-25 12:02:03,415 Train Loss: 0.0002201, Val Loss: 0.0002476 +2025-03-25 12:02:03,415 Epoch 1016/2000 +2025-03-25 12:03:10,156 Current Learning Rate: 0.0000157084 +2025-03-25 12:03:10,157 Train Loss: 0.0002202, Val Loss: 0.0002477 +2025-03-25 12:03:10,158 Epoch 1017/2000 +2025-03-25 12:04:16,299 Current Learning Rate: 0.0000177213 +2025-03-25 12:04:16,299 Train Loss: 0.0002203, Val Loss: 0.0002477 +2025-03-25 12:04:16,300 Epoch 1018/2000 +2025-03-25 12:05:22,608 Current Learning Rate: 0.0000198532 +2025-03-25 12:05:22,609 Train Loss: 0.0002204, Val Loss: 0.0002478 +2025-03-25 12:05:22,609 Epoch 1019/2000 +2025-03-25 12:06:28,427 Current Learning Rate: 0.0000221035 +2025-03-25 12:06:28,427 Train Loss: 0.0002205, Val Loss: 0.0002479 +2025-03-25 12:06:28,427 Epoch 1020/2000 +2025-03-25 12:07:35,224 Current Learning Rate: 0.0000244717 +2025-03-25 12:07:35,225 Train Loss: 0.0002206, Val Loss: 0.0002479 +2025-03-25 12:07:35,225 Epoch 1021/2000 +2025-03-25 12:08:41,001 Current Learning Rate: 0.0000269573 +2025-03-25 12:08:41,002 Train Loss: 0.0002207, Val Loss: 0.0002480 +2025-03-25 12:08:41,002 Epoch 1022/2000 +2025-03-25 12:09:47,024 Current Learning Rate: 0.0000295596 +2025-03-25 12:09:47,025 Train Loss: 0.0002208, Val Loss: 0.0002480 +2025-03-25 12:09:47,025 Epoch 1023/2000 +2025-03-25 12:10:53,154 Current Learning Rate: 0.0000322780 +2025-03-25 12:10:53,155 Train Loss: 0.0002209, Val Loss: 0.0002481 +2025-03-25 12:10:53,155 Epoch 1024/2000 +2025-03-25 12:11:58,917 Current Learning Rate: 0.0000351118 +2025-03-25 12:11:58,917 Train Loss: 0.0002210, Val Loss: 0.0002481 +2025-03-25 12:11:58,918 Epoch 1025/2000 +2025-03-25 12:13:05,158 Current Learning Rate: 0.0000380602 +2025-03-25 12:13:05,158 Train Loss: 0.0002211, Val Loss: 0.0002482 +2025-03-25 12:13:05,158 Epoch 1026/2000 +2025-03-25 12:14:11,771 Current Learning Rate: 0.0000411227 +2025-03-25 12:14:11,771 Train Loss: 0.0002212, Val Loss: 0.0002483 +2025-03-25 12:14:11,771 Epoch 1027/2000 +2025-03-25 12:15:17,956 Current Learning Rate: 0.0000442984 +2025-03-25 12:15:17,957 Train Loss: 0.0002214, Val Loss: 0.0002484 +2025-03-25 12:15:17,957 Epoch 1028/2000 +2025-03-25 12:16:24,185 Current Learning Rate: 0.0000475865 +2025-03-25 12:16:24,185 Train Loss: 0.0002215, Val Loss: 0.0002485 +2025-03-25 12:16:24,186 Epoch 1029/2000 +2025-03-25 12:17:30,442 Current Learning Rate: 0.0000509862 +2025-03-25 12:17:30,443 Train Loss: 0.0002216, Val Loss: 0.0002486 +2025-03-25 12:17:30,443 Epoch 1030/2000 +2025-03-25 12:18:36,690 Current Learning Rate: 0.0000544967 +2025-03-25 12:18:36,691 Train Loss: 0.0002218, Val Loss: 0.0002487 +2025-03-25 12:18:36,691 Epoch 1031/2000 +2025-03-25 12:19:42,915 Current Learning Rate: 0.0000581172 +2025-03-25 12:19:42,915 Train Loss: 0.0002219, Val Loss: 0.0002488 +2025-03-25 12:19:42,916 Epoch 1032/2000 +2025-03-25 12:20:48,802 Current Learning Rate: 0.0000618467 +2025-03-25 12:20:48,803 Train Loss: 0.0002220, Val Loss: 0.0002489 +2025-03-25 12:20:48,804 Epoch 1033/2000 +2025-03-25 12:21:55,174 Current Learning Rate: 0.0000656842 +2025-03-25 12:21:55,174 Train Loss: 0.0002222, Val Loss: 0.0002490 +2025-03-25 12:21:55,174 Epoch 1034/2000 +2025-03-25 12:23:01,555 Current Learning Rate: 0.0000696290 +2025-03-25 12:23:01,556 Train Loss: 0.0002223, Val Loss: 0.0002492 +2025-03-25 12:23:01,556 Epoch 1035/2000 +2025-03-25 12:24:08,287 Current Learning Rate: 0.0000736799 +2025-03-25 12:24:08,287 Train Loss: 0.0002225, Val Loss: 0.0002494 +2025-03-25 12:24:08,288 Epoch 1036/2000 +2025-03-25 12:25:14,047 Current Learning Rate: 0.0000778360 +2025-03-25 12:25:14,047 Train Loss: 0.0002226, Val Loss: 0.0002496 +2025-03-25 12:25:14,048 Epoch 1037/2000 +2025-03-25 12:26:19,889 Current Learning Rate: 0.0000820963 +2025-03-25 12:26:19,890 Train Loss: 0.0002228, Val Loss: 0.0002498 +2025-03-25 12:26:19,890 Epoch 1038/2000 +2025-03-25 12:27:26,174 Current Learning Rate: 0.0000864597 +2025-03-25 12:27:26,174 Train Loss: 0.0002229, Val Loss: 0.0002500 +2025-03-25 12:27:26,174 Epoch 1039/2000 +2025-03-25 12:28:32,421 Current Learning Rate: 0.0000909251 +2025-03-25 12:28:32,422 Train Loss: 0.0002231, Val Loss: 0.0002503 +2025-03-25 12:28:32,422 Epoch 1040/2000 +2025-03-25 12:29:38,068 Current Learning Rate: 0.0000954915 +2025-03-25 12:29:38,069 Train Loss: 0.0002233, Val Loss: 0.0002505 +2025-03-25 12:29:38,070 Epoch 1041/2000 +2025-03-25 12:30:44,089 Current Learning Rate: 0.0001001577 +2025-03-25 12:30:44,089 Train Loss: 0.0002234, Val Loss: 0.0002508 +2025-03-25 12:30:44,090 Epoch 1042/2000 +2025-03-25 12:31:50,284 Current Learning Rate: 0.0001049225 +2025-03-25 12:31:50,285 Train Loss: 0.0002236, Val Loss: 0.0002509 +2025-03-25 12:31:50,285 Epoch 1043/2000 +2025-03-25 12:32:56,791 Current Learning Rate: 0.0001097848 +2025-03-25 12:32:56,791 Train Loss: 0.0002238, Val Loss: 0.0002510 +2025-03-25 12:32:56,792 Epoch 1044/2000 +2025-03-25 12:34:03,042 Current Learning Rate: 0.0001147434 +2025-03-25 12:34:03,043 Train Loss: 0.0002239, Val Loss: 0.0002511 +2025-03-25 12:34:03,043 Epoch 1045/2000 +2025-03-25 12:35:09,238 Current Learning Rate: 0.0001197970 +2025-03-25 12:35:09,238 Train Loss: 0.0002241, Val Loss: 0.0002512 +2025-03-25 12:35:09,238 Epoch 1046/2000 +2025-03-25 12:36:15,545 Current Learning Rate: 0.0001249445 +2025-03-25 12:36:15,545 Train Loss: 0.0002243, Val Loss: 0.0002512 +2025-03-25 12:36:15,546 Epoch 1047/2000 +2025-03-25 12:37:21,402 Current Learning Rate: 0.0001301845 +2025-03-25 12:37:21,403 Train Loss: 0.0002245, Val Loss: 0.0002513 +2025-03-25 12:37:21,403 Epoch 1048/2000 +2025-03-25 12:38:27,026 Current Learning Rate: 0.0001355157 +2025-03-25 12:38:27,027 Train Loss: 0.0002246, Val Loss: 0.0002514 +2025-03-25 12:38:27,027 Epoch 1049/2000 +2025-03-25 12:39:32,490 Current Learning Rate: 0.0001409369 +2025-03-25 12:39:32,491 Train Loss: 0.0002248, Val Loss: 0.0002515 +2025-03-25 12:39:32,492 Epoch 1050/2000 +2025-03-25 12:40:38,125 Current Learning Rate: 0.0001464466 +2025-03-25 12:40:38,125 Train Loss: 0.0002250, Val Loss: 0.0002515 +2025-03-25 12:40:38,126 Epoch 1051/2000 +2025-03-25 12:41:44,733 Current Learning Rate: 0.0001520436 +2025-03-25 12:41:44,734 Train Loss: 0.0002252, Val Loss: 0.0002516 +2025-03-25 12:41:44,734 Epoch 1052/2000 +2025-03-25 12:42:50,849 Current Learning Rate: 0.0001577264 +2025-03-25 12:42:50,850 Train Loss: 0.0002254, Val Loss: 0.0002518 +2025-03-25 12:42:50,850 Epoch 1053/2000 +2025-03-25 12:43:56,971 Current Learning Rate: 0.0001634937 +2025-03-25 12:43:56,971 Train Loss: 0.0002256, Val Loss: 0.0002519 +2025-03-25 12:43:56,972 Epoch 1054/2000 +2025-03-25 12:45:03,000 Current Learning Rate: 0.0001693441 +2025-03-25 12:45:03,000 Train Loss: 0.0002258, Val Loss: 0.0002521 +2025-03-25 12:45:03,001 Epoch 1055/2000 +2025-03-25 12:46:09,275 Current Learning Rate: 0.0001752760 +2025-03-25 12:46:09,275 Train Loss: 0.0002260, Val Loss: 0.0002522 +2025-03-25 12:46:09,276 Epoch 1056/2000 +2025-03-25 12:47:15,502 Current Learning Rate: 0.0001812880 +2025-03-25 12:47:15,502 Train Loss: 0.0002262, Val Loss: 0.0002524 +2025-03-25 12:47:15,503 Epoch 1057/2000 +2025-03-25 12:48:21,768 Current Learning Rate: 0.0001873787 +2025-03-25 12:48:21,769 Train Loss: 0.0002264, Val Loss: 0.0002526 +2025-03-25 12:48:21,769 Epoch 1058/2000 +2025-03-25 12:49:27,597 Current Learning Rate: 0.0001935465 +2025-03-25 12:49:27,598 Train Loss: 0.0002266, Val Loss: 0.0002528 +2025-03-25 12:49:27,598 Epoch 1059/2000 +2025-03-25 12:50:34,776 Current Learning Rate: 0.0001997899 +2025-03-25 12:50:34,777 Train Loss: 0.0002268, Val Loss: 0.0002531 +2025-03-25 12:50:34,777 Epoch 1060/2000 +2025-03-25 12:51:40,219 Current Learning Rate: 0.0002061074 +2025-03-25 12:51:40,219 Train Loss: 0.0002270, Val Loss: 0.0002533 +2025-03-25 12:51:40,220 Epoch 1061/2000 +2025-03-25 12:52:46,054 Current Learning Rate: 0.0002124974 +2025-03-25 12:52:46,055 Train Loss: 0.0002273, Val Loss: 0.0002535 +2025-03-25 12:52:46,055 Epoch 1062/2000 +2025-03-25 12:53:52,430 Current Learning Rate: 0.0002189583 +2025-03-25 12:53:52,431 Train Loss: 0.0002275, Val Loss: 0.0002537 +2025-03-25 12:53:52,431 Epoch 1063/2000 +2025-03-25 12:54:57,953 Current Learning Rate: 0.0002254886 +2025-03-25 12:54:57,954 Train Loss: 0.0002277, Val Loss: 0.0002540 +2025-03-25 12:54:57,955 Epoch 1064/2000 +2025-03-25 12:56:02,764 Current Learning Rate: 0.0002320866 +2025-03-25 12:56:02,765 Train Loss: 0.0002279, Val Loss: 0.0002542 +2025-03-25 12:56:02,765 Epoch 1065/2000 +2025-03-25 12:57:09,187 Current Learning Rate: 0.0002387507 +2025-03-25 12:57:09,188 Train Loss: 0.0002282, Val Loss: 0.0002545 +2025-03-25 12:57:09,188 Epoch 1066/2000 +2025-03-25 12:58:15,562 Current Learning Rate: 0.0002454793 +2025-03-25 12:58:15,562 Train Loss: 0.0002284, Val Loss: 0.0002548 +2025-03-25 12:58:15,563 Epoch 1067/2000 +2025-03-25 12:59:21,531 Current Learning Rate: 0.0002522707 +2025-03-25 12:59:21,532 Train Loss: 0.0002286, Val Loss: 0.0002550 +2025-03-25 12:59:21,532 Epoch 1068/2000 +2025-03-25 13:00:27,031 Current Learning Rate: 0.0002591232 +2025-03-25 13:00:27,032 Train Loss: 0.0002289, Val Loss: 0.0002553 +2025-03-25 13:00:27,033 Epoch 1069/2000 +2025-03-25 13:01:32,916 Current Learning Rate: 0.0002660351 +2025-03-25 13:01:32,917 Train Loss: 0.0002291, Val Loss: 0.0002556 +2025-03-25 13:01:32,917 Epoch 1070/2000 +2025-03-25 13:02:39,686 Current Learning Rate: 0.0002730048 +2025-03-25 13:02:39,686 Train Loss: 0.0002293, Val Loss: 0.0002558 +2025-03-25 13:02:39,687 Epoch 1071/2000 +2025-03-25 13:03:45,729 Current Learning Rate: 0.0002800304 +2025-03-25 13:03:45,730 Train Loss: 0.0002296, Val Loss: 0.0002560 +2025-03-25 13:03:45,730 Epoch 1072/2000 +2025-03-25 13:04:52,222 Current Learning Rate: 0.0002871104 +2025-03-25 13:04:52,222 Train Loss: 0.0002298, Val Loss: 0.0002563 +2025-03-25 13:04:52,223 Epoch 1073/2000 +2025-03-25 13:05:58,063 Current Learning Rate: 0.0002942428 +2025-03-25 13:05:58,064 Train Loss: 0.0002300, Val Loss: 0.0002565 +2025-03-25 13:05:58,064 Epoch 1074/2000 +2025-03-25 13:07:04,456 Current Learning Rate: 0.0003014261 +2025-03-25 13:07:04,456 Train Loss: 0.0002303, Val Loss: 0.0002567 +2025-03-25 13:07:04,456 Epoch 1075/2000 +2025-03-25 13:08:09,448 Current Learning Rate: 0.0003086583 +2025-03-25 13:08:09,448 Train Loss: 0.0002305, Val Loss: 0.0002569 +2025-03-25 13:08:09,449 Epoch 1076/2000 +2025-03-25 13:09:15,871 Current Learning Rate: 0.0003159377 +2025-03-25 13:09:15,872 Train Loss: 0.0002308, Val Loss: 0.0002572 +2025-03-25 13:09:15,872 Epoch 1077/2000 +2025-03-25 13:10:22,685 Current Learning Rate: 0.0003232626 +2025-03-25 13:10:22,686 Train Loss: 0.0002310, Val Loss: 0.0002575 +2025-03-25 13:10:22,686 Epoch 1078/2000 +2025-03-25 13:11:29,091 Current Learning Rate: 0.0003306310 +2025-03-25 13:11:29,092 Train Loss: 0.0002313, Val Loss: 0.0002578 +2025-03-25 13:11:29,092 Epoch 1079/2000 +2025-03-25 13:12:34,879 Current Learning Rate: 0.0003380413 +2025-03-25 13:12:34,879 Train Loss: 0.0002315, Val Loss: 0.0002582 +2025-03-25 13:12:34,880 Epoch 1080/2000 +2025-03-25 13:13:40,762 Current Learning Rate: 0.0003454915 +2025-03-25 13:13:40,763 Train Loss: 0.0002318, Val Loss: 0.0002586 +2025-03-25 13:13:40,763 Epoch 1081/2000 +2025-03-25 13:14:47,120 Current Learning Rate: 0.0003529798 +2025-03-25 13:14:47,120 Train Loss: 0.0002320, Val Loss: 0.0002589 +2025-03-25 13:14:47,120 Epoch 1082/2000 +2025-03-25 13:15:53,049 Current Learning Rate: 0.0003605044 +2025-03-25 13:15:53,050 Train Loss: 0.0002323, Val Loss: 0.0002593 +2025-03-25 13:15:53,050 Epoch 1083/2000 +2025-03-25 13:16:59,530 Current Learning Rate: 0.0003680635 +2025-03-25 13:16:59,531 Train Loss: 0.0002325, Val Loss: 0.0002598 +2025-03-25 13:16:59,531 Epoch 1084/2000 +2025-03-25 13:18:05,516 Current Learning Rate: 0.0003756551 +2025-03-25 13:18:05,516 Train Loss: 0.0002328, Val Loss: 0.0002602 +2025-03-25 13:18:05,517 Epoch 1085/2000 +2025-03-25 13:19:11,758 Current Learning Rate: 0.0003832773 +2025-03-25 13:19:11,759 Train Loss: 0.0002330, Val Loss: 0.0002608 +2025-03-25 13:19:11,759 Epoch 1086/2000 +2025-03-25 13:20:18,353 Current Learning Rate: 0.0003909284 +2025-03-25 13:20:18,353 Train Loss: 0.0002332, Val Loss: 0.0002614 +2025-03-25 13:20:18,353 Epoch 1087/2000 +2025-03-25 13:21:25,324 Current Learning Rate: 0.0003986064 +2025-03-25 13:21:25,324 Train Loss: 0.0002335, Val Loss: 0.0002620 +2025-03-25 13:21:25,325 Epoch 1088/2000 +2025-03-25 13:22:30,844 Current Learning Rate: 0.0004063093 +2025-03-25 13:22:30,845 Train Loss: 0.0002337, Val Loss: 0.0002626 +2025-03-25 13:22:30,845 Epoch 1089/2000 +2025-03-25 13:23:37,513 Current Learning Rate: 0.0004140354 +2025-03-25 13:23:37,513 Train Loss: 0.0002340, Val Loss: 0.0002632 +2025-03-25 13:23:37,514 Epoch 1090/2000 +2025-03-25 13:24:44,284 Current Learning Rate: 0.0004217828 +2025-03-25 13:24:44,285 Train Loss: 0.0002343, Val Loss: 0.0002637 +2025-03-25 13:24:44,285 Epoch 1091/2000 +2025-03-25 13:25:50,928 Current Learning Rate: 0.0004295494 +2025-03-25 13:25:50,929 Train Loss: 0.0002345, Val Loss: 0.0002640 +2025-03-25 13:25:50,929 Epoch 1092/2000 +2025-03-25 13:26:57,386 Current Learning Rate: 0.0004373334 +2025-03-25 13:26:57,386 Train Loss: 0.0002348, Val Loss: 0.0002642 +2025-03-25 13:26:57,387 Epoch 1093/2000 +2025-03-25 13:28:03,910 Current Learning Rate: 0.0004451328 +2025-03-25 13:28:03,910 Train Loss: 0.0002351, Val Loss: 0.0002644 +2025-03-25 13:28:03,910 Epoch 1094/2000 +2025-03-25 13:29:10,129 Current Learning Rate: 0.0004529458 +2025-03-25 13:29:10,129 Train Loss: 0.0002353, Val Loss: 0.0002646 +2025-03-25 13:29:10,130 Epoch 1095/2000 +2025-03-25 13:30:16,193 Current Learning Rate: 0.0004607705 +2025-03-25 13:30:16,194 Train Loss: 0.0002356, Val Loss: 0.0002648 +2025-03-25 13:30:16,194 Epoch 1096/2000 +2025-03-25 13:31:22,563 Current Learning Rate: 0.0004686047 +2025-03-25 13:31:22,563 Train Loss: 0.0002358, Val Loss: 0.0002650 +2025-03-25 13:31:22,564 Epoch 1097/2000 +2025-03-25 13:32:28,474 Current Learning Rate: 0.0004764468 +2025-03-25 13:32:28,474 Train Loss: 0.0002361, Val Loss: 0.0002652 +2025-03-25 13:32:28,475 Epoch 1098/2000 +2025-03-25 13:33:34,291 Current Learning Rate: 0.0004842946 +2025-03-25 13:33:34,292 Train Loss: 0.0002364, Val Loss: 0.0002655 +2025-03-25 13:33:34,292 Epoch 1099/2000 +2025-03-25 13:34:39,841 Current Learning Rate: 0.0004921463 +2025-03-25 13:34:39,841 Train Loss: 0.0002366, Val Loss: 0.0002658 +2025-03-25 13:34:39,841 Epoch 1100/2000 +2025-03-25 13:35:46,883 Current Learning Rate: 0.0005000000 +2025-03-25 13:35:46,884 Train Loss: 0.0002368, Val Loss: 0.0002661 +2025-03-25 13:35:46,884 Epoch 1101/2000 +2025-03-25 13:36:53,021 Current Learning Rate: 0.0005078537 +2025-03-25 13:36:53,021 Train Loss: 0.0002371, Val Loss: 0.0002666 +2025-03-25 13:36:53,022 Epoch 1102/2000 +2025-03-25 13:37:59,005 Current Learning Rate: 0.0005157054 +2025-03-25 13:37:59,006 Train Loss: 0.0002373, Val Loss: 0.0002670 +2025-03-25 13:37:59,006 Epoch 1103/2000 +2025-03-25 13:39:05,420 Current Learning Rate: 0.0005235532 +2025-03-25 13:39:05,421 Train Loss: 0.0002376, Val Loss: 0.0002674 +2025-03-25 13:39:05,421 Epoch 1104/2000 +2025-03-25 13:40:11,584 Current Learning Rate: 0.0005313953 +2025-03-25 13:40:11,585 Train Loss: 0.0002379, Val Loss: 0.0002679 +2025-03-25 13:40:11,585 Epoch 1105/2000 +2025-03-25 13:41:17,215 Current Learning Rate: 0.0005392295 +2025-03-25 13:41:17,215 Train Loss: 0.0002381, Val Loss: 0.0002684 +2025-03-25 13:41:17,215 Epoch 1106/2000 +2025-03-25 13:42:23,819 Current Learning Rate: 0.0005470542 +2025-03-25 13:42:23,819 Train Loss: 0.0002385, Val Loss: 0.0002689 +2025-03-25 13:42:23,819 Epoch 1107/2000 +2025-03-25 13:43:29,462 Current Learning Rate: 0.0005548672 +2025-03-25 13:43:29,463 Train Loss: 0.0002389, Val Loss: 0.0002702 +2025-03-25 13:43:29,463 Epoch 1108/2000 +2025-03-25 13:44:35,312 Current Learning Rate: 0.0005626666 +2025-03-25 13:44:35,313 Train Loss: 0.0002396, Val Loss: 0.0002703 +2025-03-25 13:44:35,313 Epoch 1109/2000 +2025-03-25 13:45:41,850 Current Learning Rate: 0.0005704506 +2025-03-25 13:45:41,851 Train Loss: 0.0002405, Val Loss: 0.0002759 +2025-03-25 13:45:41,851 Epoch 1110/2000 +2025-03-25 13:46:47,991 Current Learning Rate: 0.0005782172 +2025-03-25 13:46:47,991 Train Loss: 0.0002412, Val Loss: 0.0002746 +2025-03-25 13:46:47,992 Epoch 1111/2000 +2025-03-25 13:47:54,185 Current Learning Rate: 0.0005859646 +2025-03-25 13:47:54,185 Train Loss: 0.0002413, Val Loss: 0.0002735 +2025-03-25 13:47:54,185 Epoch 1112/2000 +2025-03-25 13:49:00,101 Current Learning Rate: 0.0005936907 +2025-03-25 13:49:00,102 Train Loss: 0.0002412, Val Loss: 0.0002735 +2025-03-25 13:49:00,102 Epoch 1113/2000 +2025-03-25 13:50:05,786 Current Learning Rate: 0.0006013936 +2025-03-25 13:50:05,786 Train Loss: 0.0002408, Val Loss: 0.0002711 +2025-03-25 13:50:05,787 Epoch 1114/2000 +2025-03-25 13:51:11,748 Current Learning Rate: 0.0006090716 +2025-03-25 13:51:11,748 Train Loss: 0.0002401, Val Loss: 0.0002707 +2025-03-25 13:51:11,748 Epoch 1115/2000 +2025-03-25 13:52:18,650 Current Learning Rate: 0.0006167227 +2025-03-25 13:52:18,651 Train Loss: 0.0002400, Val Loss: 0.0002718 +2025-03-25 13:52:18,651 Epoch 1116/2000 +2025-03-25 13:53:24,718 Current Learning Rate: 0.0006243449 +2025-03-25 13:53:24,719 Train Loss: 0.0002404, Val Loss: 0.0002727 +2025-03-25 13:53:24,719 Epoch 1117/2000 +2025-03-25 13:54:30,577 Current Learning Rate: 0.0006319365 +2025-03-25 13:54:30,578 Train Loss: 0.0002408, Val Loss: 0.0002737 +2025-03-25 13:54:30,578 Epoch 1118/2000 +2025-03-25 13:55:37,129 Current Learning Rate: 0.0006394956 +2025-03-25 13:55:37,130 Train Loss: 0.0002412, Val Loss: 0.0002746 +2025-03-25 13:55:37,130 Epoch 1119/2000 +2025-03-25 13:56:43,697 Current Learning Rate: 0.0006470202 +2025-03-25 13:56:43,698 Train Loss: 0.0002416, Val Loss: 0.0002754 +2025-03-25 13:56:43,698 Epoch 1120/2000 +2025-03-25 13:57:50,850 Current Learning Rate: 0.0006545085 +2025-03-25 13:57:50,851 Train Loss: 0.0002420, Val Loss: 0.0002760 +2025-03-25 13:57:50,851 Epoch 1121/2000 +2025-03-25 13:58:56,403 Current Learning Rate: 0.0006619587 +2025-03-25 13:58:56,404 Train Loss: 0.0002429, Val Loss: 0.0002771 +2025-03-25 13:58:56,405 Epoch 1122/2000 +2025-03-25 14:00:03,599 Current Learning Rate: 0.0006693690 +2025-03-25 14:00:03,601 Train Loss: 0.0002442, Val Loss: 0.0002766 +2025-03-25 14:00:03,602 Epoch 1123/2000 +2025-03-25 14:01:10,735 Current Learning Rate: 0.0006767374 +2025-03-25 14:01:10,735 Train Loss: 0.0002450, Val Loss: 0.0002763 +2025-03-25 14:01:10,735 Epoch 1124/2000 +2025-03-25 14:02:16,585 Current Learning Rate: 0.0006840623 +2025-03-25 14:02:16,586 Train Loss: 0.0002446, Val Loss: 0.0002747 +2025-03-25 14:02:16,586 Epoch 1125/2000 +2025-03-25 14:03:22,823 Current Learning Rate: 0.0006913417 +2025-03-25 14:03:22,824 Train Loss: 0.0002440, Val Loss: 0.0002745 +2025-03-25 14:03:22,824 Epoch 1126/2000 +2025-03-25 14:04:28,777 Current Learning Rate: 0.0006985739 +2025-03-25 14:04:28,778 Train Loss: 0.0002430, Val Loss: 0.0002748 +2025-03-25 14:04:28,778 Epoch 1127/2000 +2025-03-25 14:05:34,434 Current Learning Rate: 0.0007057572 +2025-03-25 14:05:34,435 Train Loss: 0.0002427, Val Loss: 0.0002758 +2025-03-25 14:05:34,435 Epoch 1128/2000 +2025-03-25 14:06:40,599 Current Learning Rate: 0.0007128896 +2025-03-25 14:06:40,599 Train Loss: 0.0002430, Val Loss: 0.0002768 +2025-03-25 14:06:40,599 Epoch 1129/2000 +2025-03-25 14:07:46,272 Current Learning Rate: 0.0007199696 +2025-03-25 14:07:46,272 Train Loss: 0.0002435, Val Loss: 0.0002780 +2025-03-25 14:07:46,273 Epoch 1130/2000 +2025-03-25 14:08:51,951 Current Learning Rate: 0.0007269952 +2025-03-25 14:08:51,951 Train Loss: 0.0002440, Val Loss: 0.0002788 +2025-03-25 14:08:51,952 Epoch 1131/2000 +2025-03-25 14:09:58,411 Current Learning Rate: 0.0007339649 +2025-03-25 14:09:58,412 Train Loss: 0.0002448, Val Loss: 0.0002799 +2025-03-25 14:09:58,413 Epoch 1132/2000 +2025-03-25 14:11:04,292 Current Learning Rate: 0.0007408768 +2025-03-25 14:11:04,293 Train Loss: 0.0002456, Val Loss: 0.0002806 +2025-03-25 14:11:04,293 Epoch 1133/2000 +2025-03-25 14:12:10,332 Current Learning Rate: 0.0007477293 +2025-03-25 14:12:10,333 Train Loss: 0.0002464, Val Loss: 0.0002812 +2025-03-25 14:12:10,333 Epoch 1134/2000 +2025-03-25 14:13:16,969 Current Learning Rate: 0.0007545207 +2025-03-25 14:13:16,970 Train Loss: 0.0002473, Val Loss: 0.0002820 +2025-03-25 14:13:16,970 Epoch 1135/2000 +2025-03-25 14:14:22,655 Current Learning Rate: 0.0007612493 +2025-03-25 14:14:22,656 Train Loss: 0.0002481, Val Loss: 0.0002817 +2025-03-25 14:14:22,656 Epoch 1136/2000 +2025-03-25 14:15:29,549 Current Learning Rate: 0.0007679134 +2025-03-25 14:15:29,550 Train Loss: 0.0002482, Val Loss: 0.0002826 +2025-03-25 14:15:29,550 Epoch 1137/2000 +2025-03-25 14:16:35,391 Current Learning Rate: 0.0007745114 +2025-03-25 14:16:35,391 Train Loss: 0.0002478, Val Loss: 0.0002806 +2025-03-25 14:16:35,392 Epoch 1138/2000 +2025-03-25 14:17:41,094 Current Learning Rate: 0.0007810417 +2025-03-25 14:17:41,094 Train Loss: 0.0002465, Val Loss: 0.0002785 +2025-03-25 14:17:41,095 Epoch 1139/2000 +2025-03-25 14:18:46,874 Current Learning Rate: 0.0007875026 +2025-03-25 14:18:46,875 Train Loss: 0.0002452, Val Loss: 0.0002785 +2025-03-25 14:18:46,875 Epoch 1140/2000 +2025-03-25 14:19:52,966 Current Learning Rate: 0.0007938926 +2025-03-25 14:19:52,966 Train Loss: 0.0002452, Val Loss: 0.0002799 +2025-03-25 14:19:52,967 Epoch 1141/2000 +2025-03-25 14:20:59,306 Current Learning Rate: 0.0008002101 +2025-03-25 14:20:59,307 Train Loss: 0.0002456, Val Loss: 0.0002809 +2025-03-25 14:20:59,307 Epoch 1142/2000 +2025-03-25 14:22:04,803 Current Learning Rate: 0.0008064535 +2025-03-25 14:22:04,803 Train Loss: 0.0002464, Val Loss: 0.0002819 +2025-03-25 14:22:04,804 Epoch 1143/2000 +2025-03-25 14:23:11,547 Current Learning Rate: 0.0008126213 +2025-03-25 14:23:11,548 Train Loss: 0.0002474, Val Loss: 0.0002839 +2025-03-25 14:23:11,548 Epoch 1144/2000 +2025-03-25 14:24:18,291 Current Learning Rate: 0.0008187120 +2025-03-25 14:24:18,291 Train Loss: 0.0002483, Val Loss: 0.0002845 +2025-03-25 14:24:18,292 Epoch 1145/2000 +2025-03-25 14:25:24,401 Current Learning Rate: 0.0008247240 +2025-03-25 14:25:24,402 Train Loss: 0.0002490, Val Loss: 0.0002844 +2025-03-25 14:25:24,402 Epoch 1146/2000 +2025-03-25 14:26:30,400 Current Learning Rate: 0.0008306559 +2025-03-25 14:26:30,401 Train Loss: 0.0002495, Val Loss: 0.0002846 +2025-03-25 14:26:30,401 Epoch 1147/2000 +2025-03-25 14:27:36,448 Current Learning Rate: 0.0008365063 +2025-03-25 14:27:36,449 Train Loss: 0.0002499, Val Loss: 0.0002853 +2025-03-25 14:27:36,449 Epoch 1148/2000 +2025-03-25 14:28:42,602 Current Learning Rate: 0.0008422736 +2025-03-25 14:28:42,602 Train Loss: 0.0002507, Val Loss: 0.0002865 +2025-03-25 14:28:42,602 Epoch 1149/2000 +2025-03-25 14:29:48,190 Current Learning Rate: 0.0008479564 +2025-03-25 14:29:48,191 Train Loss: 0.0002514, Val Loss: 0.0002890 +2025-03-25 14:29:48,191 Epoch 1150/2000 +2025-03-25 14:30:54,192 Current Learning Rate: 0.0008535534 +2025-03-25 14:30:54,192 Train Loss: 0.0002508, Val Loss: 0.0002865 +2025-03-25 14:30:54,193 Epoch 1151/2000 +2025-03-25 14:32:00,732 Current Learning Rate: 0.0008590631 +2025-03-25 14:32:00,733 Train Loss: 0.0002495, Val Loss: 0.0002834 +2025-03-25 14:32:00,733 Epoch 1152/2000 +2025-03-25 14:33:07,085 Current Learning Rate: 0.0008644843 +2025-03-25 14:33:07,086 Train Loss: 0.0002476, Val Loss: 0.0002824 +2025-03-25 14:33:07,086 Epoch 1153/2000 +2025-03-25 14:34:13,709 Current Learning Rate: 0.0008698155 +2025-03-25 14:34:13,709 Train Loss: 0.0002472, Val Loss: 0.0002838 +2025-03-25 14:34:13,709 Epoch 1154/2000 +2025-03-25 14:35:20,369 Current Learning Rate: 0.0008750555 +2025-03-25 14:35:20,369 Train Loss: 0.0002478, Val Loss: 0.0002848 +2025-03-25 14:35:20,369 Epoch 1155/2000 +2025-03-25 14:36:26,907 Current Learning Rate: 0.0008802030 +2025-03-25 14:36:26,908 Train Loss: 0.0002488, Val Loss: 0.0002864 +2025-03-25 14:36:26,909 Epoch 1156/2000 +2025-03-25 14:37:33,356 Current Learning Rate: 0.0008852566 +2025-03-25 14:37:33,357 Train Loss: 0.0002499, Val Loss: 0.0002882 +2025-03-25 14:37:33,357 Epoch 1157/2000 +2025-03-25 14:38:39,625 Current Learning Rate: 0.0008902152 +2025-03-25 14:38:39,625 Train Loss: 0.0002505, Val Loss: 0.0002847 +2025-03-25 14:38:39,626 Epoch 1158/2000 +2025-03-25 14:39:45,771 Current Learning Rate: 0.0008950775 +2025-03-25 14:39:45,772 Train Loss: 0.0002509, Val Loss: 0.0002880 +2025-03-25 14:39:45,772 Epoch 1159/2000 +2025-03-25 14:40:52,134 Current Learning Rate: 0.0008998423 +2025-03-25 14:40:52,134 Train Loss: 0.0002513, Val Loss: 0.0002884 +2025-03-25 14:40:52,135 Epoch 1160/2000 +2025-03-25 14:41:58,185 Current Learning Rate: 0.0009045085 +2025-03-25 14:41:58,186 Train Loss: 0.0002511, Val Loss: 0.0002889 +2025-03-25 14:41:58,186 Epoch 1161/2000 +2025-03-25 14:43:04,617 Current Learning Rate: 0.0009090749 +2025-03-25 14:43:04,618 Train Loss: 0.0002507, Val Loss: 0.0002880 +2025-03-25 14:43:04,618 Epoch 1162/2000 +2025-03-25 14:44:10,861 Current Learning Rate: 0.0009135403 +2025-03-25 14:44:10,861 Train Loss: 0.0002497, Val Loss: 0.0002869 +2025-03-25 14:44:10,862 Epoch 1163/2000 +2025-03-25 14:45:16,789 Current Learning Rate: 0.0009179037 +2025-03-25 14:45:16,790 Train Loss: 0.0002491, Val Loss: 0.0002870 +2025-03-25 14:45:16,790 Epoch 1164/2000 +2025-03-25 14:46:23,631 Current Learning Rate: 0.0009221640 +2025-03-25 14:46:23,631 Train Loss: 0.0002498, Val Loss: 0.0002883 +2025-03-25 14:46:23,632 Epoch 1165/2000 +2025-03-25 14:47:30,141 Current Learning Rate: 0.0009263201 +2025-03-25 14:47:30,142 Train Loss: 0.0002507, Val Loss: 0.0002890 +2025-03-25 14:47:30,143 Epoch 1166/2000 +2025-03-25 14:48:36,627 Current Learning Rate: 0.0009303710 +2025-03-25 14:48:36,628 Train Loss: 0.0002514, Val Loss: 0.0002912 +2025-03-25 14:48:36,628 Epoch 1167/2000 +2025-03-25 14:49:41,802 Current Learning Rate: 0.0009343158 +2025-03-25 14:49:41,803 Train Loss: 0.0002513, Val Loss: 0.0002889 +2025-03-25 14:49:41,804 Epoch 1168/2000 +2025-03-25 14:50:47,807 Current Learning Rate: 0.0009381533 +2025-03-25 14:50:47,808 Train Loss: 0.0002518, Val Loss: 0.0002882 +2025-03-25 14:50:47,809 Epoch 1169/2000 +2025-03-25 14:51:53,483 Current Learning Rate: 0.0009418828 +2025-03-25 14:51:53,484 Train Loss: 0.0002532, Val Loss: 0.0002919 +2025-03-25 14:51:53,485 Epoch 1170/2000 +2025-03-25 14:53:00,221 Current Learning Rate: 0.0009455033 +2025-03-25 14:53:00,222 Train Loss: 0.0002550, Val Loss: 0.0002918 +2025-03-25 14:53:00,222 Epoch 1171/2000 +2025-03-25 14:54:06,078 Current Learning Rate: 0.0009490138 +2025-03-25 14:54:06,079 Train Loss: 0.0002558, Val Loss: 0.0002926 +2025-03-25 14:54:06,079 Epoch 1172/2000 +2025-03-25 14:55:12,407 Current Learning Rate: 0.0009524135 +2025-03-25 14:55:12,408 Train Loss: 0.0002558, Val Loss: 0.0002955 +2025-03-25 14:55:12,408 Epoch 1173/2000 +2025-03-25 14:56:18,646 Current Learning Rate: 0.0009557016 +2025-03-25 14:56:18,646 Train Loss: 0.0002547, Val Loss: 0.0002917 +2025-03-25 14:56:18,647 Epoch 1174/2000 +2025-03-25 14:57:24,762 Current Learning Rate: 0.0009588773 +2025-03-25 14:57:24,763 Train Loss: 0.0002523, Val Loss: 0.0002873 +2025-03-25 14:57:24,763 Epoch 1175/2000 +2025-03-25 14:58:30,683 Current Learning Rate: 0.0009619398 +2025-03-25 14:58:30,684 Train Loss: 0.0002500, Val Loss: 0.0002860 +2025-03-25 14:58:30,684 Epoch 1176/2000 +2025-03-25 14:59:36,496 Current Learning Rate: 0.0009648882 +2025-03-25 14:59:36,496 Train Loss: 0.0002493, Val Loss: 0.0002879 +2025-03-25 14:59:36,497 Epoch 1177/2000 +2025-03-25 15:00:42,643 Current Learning Rate: 0.0009677220 +2025-03-25 15:00:42,644 Train Loss: 0.0002503, Val Loss: 0.0002904 +2025-03-25 15:00:42,644 Epoch 1178/2000 +2025-03-25 15:01:49,630 Current Learning Rate: 0.0009704404 +2025-03-25 15:01:49,630 Train Loss: 0.0002513, Val Loss: 0.0002931 +2025-03-25 15:01:49,631 Epoch 1179/2000 +2025-03-25 15:02:55,238 Current Learning Rate: 0.0009730427 +2025-03-25 15:02:55,239 Train Loss: 0.0002526, Val Loss: 0.0002912 +2025-03-25 15:02:55,239 Epoch 1180/2000 +2025-03-25 15:04:03,020 Current Learning Rate: 0.0009755283 +2025-03-25 15:04:03,021 Train Loss: 0.0002523, Val Loss: 0.0002916 +2025-03-25 15:04:03,021 Epoch 1181/2000 +2025-03-25 15:05:52,224 Current Learning Rate: 0.0009778965 +2025-03-25 15:05:52,225 Train Loss: 0.0002520, Val Loss: 0.0002896 +2025-03-25 15:05:52,225 Epoch 1182/2000 +2025-03-25 15:07:55,343 Current Learning Rate: 0.0009801468 +2025-03-25 15:07:55,343 Train Loss: 0.0002518, Val Loss: 0.0002890 +2025-03-25 15:07:55,344 Epoch 1183/2000 +2025-03-25 15:10:01,256 Current Learning Rate: 0.0009822787 +2025-03-25 15:10:01,257 Train Loss: 0.0002512, Val Loss: 0.0002903 +2025-03-25 15:10:01,257 Epoch 1184/2000 +2025-03-25 15:12:06,328 Current Learning Rate: 0.0009842916 +2025-03-25 15:12:06,329 Train Loss: 0.0002510, Val Loss: 0.0002914 +2025-03-25 15:12:06,329 Epoch 1185/2000 +2025-03-25 15:14:12,456 Current Learning Rate: 0.0009861850 +2025-03-25 15:14:12,457 Train Loss: 0.0002515, Val Loss: 0.0002904 +2025-03-25 15:14:12,457 Epoch 1186/2000 +2025-03-25 15:16:18,324 Current Learning Rate: 0.0009879584 +2025-03-25 15:16:18,324 Train Loss: 0.0002519, Val Loss: 0.0002916 +2025-03-25 15:16:18,325 Epoch 1187/2000 +2025-03-25 15:18:23,976 Current Learning Rate: 0.0009896114 +2025-03-25 15:18:23,977 Train Loss: 0.0002539, Val Loss: 0.0002896 +2025-03-25 15:18:23,977 Epoch 1188/2000 +2025-03-25 15:20:29,371 Current Learning Rate: 0.0009911436 +2025-03-25 15:20:29,372 Train Loss: 0.0002544, Val Loss: 0.0002928 +2025-03-25 15:20:29,372 Epoch 1189/2000 +2025-03-25 15:22:35,817 Current Learning Rate: 0.0009925547 +2025-03-25 15:22:35,818 Train Loss: 0.0002559, Val Loss: 0.0002943 +2025-03-25 15:22:35,818 Epoch 1190/2000 +2025-03-25 15:24:42,773 Current Learning Rate: 0.0009938442 +2025-03-25 15:24:42,774 Train Loss: 0.0002561, Val Loss: 0.0002925 +2025-03-25 15:24:42,774 Epoch 1191/2000 +2025-03-25 15:26:50,089 Current Learning Rate: 0.0009950118 +2025-03-25 15:26:50,090 Train Loss: 0.0002566, Val Loss: 0.0002948 +2025-03-25 15:26:50,090 Epoch 1192/2000 +2025-03-25 15:28:55,717 Current Learning Rate: 0.0009960574 +2025-03-25 15:28:55,718 Train Loss: 0.0002557, Val Loss: 0.0002951 +2025-03-25 15:28:55,718 Epoch 1193/2000 +2025-03-25 15:31:03,698 Current Learning Rate: 0.0009969805 +2025-03-25 15:31:03,699 Train Loss: 0.0002541, Val Loss: 0.0002907 +2025-03-25 15:31:03,699 Epoch 1194/2000 +2025-03-25 15:33:09,124 Current Learning Rate: 0.0009977810 +2025-03-25 15:33:09,125 Train Loss: 0.0002515, Val Loss: 0.0002874 +2025-03-25 15:33:09,126 Epoch 1195/2000 +2025-03-25 15:35:14,539 Current Learning Rate: 0.0009984587 +2025-03-25 15:35:14,540 Train Loss: 0.0002499, Val Loss: 0.0002872 +2025-03-25 15:35:14,540 Epoch 1196/2000 +2025-03-25 15:37:21,263 Current Learning Rate: 0.0009990134 +2025-03-25 15:37:21,264 Train Loss: 0.0002499, Val Loss: 0.0002893 +2025-03-25 15:37:21,264 Epoch 1197/2000 +2025-03-25 15:39:26,973 Current Learning Rate: 0.0009994449 +2025-03-25 15:39:26,973 Train Loss: 0.0002509, Val Loss: 0.0002907 +2025-03-25 15:39:26,973 Epoch 1198/2000 +2025-03-25 15:41:32,832 Current Learning Rate: 0.0009997533 +2025-03-25 15:41:32,833 Train Loss: 0.0002520, Val Loss: 0.0002905 +2025-03-25 15:41:32,833 Epoch 1199/2000 +2025-03-25 15:43:37,678 Current Learning Rate: 0.0009999383 +2025-03-25 15:43:37,679 Train Loss: 0.0002525, Val Loss: 0.0002913 +2025-03-25 15:43:37,680 Epoch 1200/2000 +2025-03-25 15:45:42,778 Current Learning Rate: 0.0010000000 +2025-03-25 15:45:42,779 Train Loss: 0.0002530, Val Loss: 0.0002898 +2025-03-25 15:45:42,779 Epoch 1201/2000 +2025-03-25 15:47:46,107 Current Learning Rate: 0.0009999383 +2025-03-25 15:47:46,108 Train Loss: 0.0002538, Val Loss: 0.0002921 +2025-03-25 15:47:46,108 Epoch 1202/2000 +2025-03-25 15:49:50,332 Current Learning Rate: 0.0009997533 +2025-03-25 15:49:50,333 Train Loss: 0.0002540, Val Loss: 0.0002927 +2025-03-25 15:49:50,333 Epoch 1203/2000 +2025-03-25 15:51:55,822 Current Learning Rate: 0.0009994449 +2025-03-25 15:51:55,822 Train Loss: 0.0002550, Val Loss: 0.0002937 +2025-03-25 15:51:55,823 Epoch 1204/2000 +2025-03-25 15:53:59,959 Current Learning Rate: 0.0009990134 +2025-03-25 15:53:59,959 Train Loss: 0.0002558, Val Loss: 0.0002948 +2025-03-25 15:53:59,960 Epoch 1205/2000 +2025-03-25 15:56:05,247 Current Learning Rate: 0.0009984587 +2025-03-25 15:56:05,248 Train Loss: 0.0002563, Val Loss: 0.0002970 +2025-03-25 15:56:05,249 Epoch 1206/2000 +2025-03-25 15:58:11,272 Current Learning Rate: 0.0009977810 +2025-03-25 15:58:11,272 Train Loss: 0.0002561, Val Loss: 0.0002943 +2025-03-25 15:58:11,273 Epoch 1207/2000 +2025-03-25 16:00:16,670 Current Learning Rate: 0.0009969805 +2025-03-25 16:00:16,670 Train Loss: 0.0002551, Val Loss: 0.0002895 +2025-03-25 16:00:16,671 Epoch 1208/2000 +2025-03-25 16:02:22,854 Current Learning Rate: 0.0009960574 +2025-03-25 16:02:22,855 Train Loss: 0.0002532, Val Loss: 0.0002866 +2025-03-25 16:02:22,855 Epoch 1209/2000 +2025-03-25 16:04:28,539 Current Learning Rate: 0.0009950118 +2025-03-25 16:04:28,540 Train Loss: 0.0002502, Val Loss: 0.0002848 +2025-03-25 16:04:28,540 Epoch 1210/2000 +2025-03-25 16:06:35,571 Current Learning Rate: 0.0009938442 +2025-03-25 16:06:35,571 Train Loss: 0.0002487, Val Loss: 0.0002855 +2025-03-25 16:06:35,571 Epoch 1211/2000 +2025-03-25 16:08:41,963 Current Learning Rate: 0.0009925547 +2025-03-25 16:08:41,964 Train Loss: 0.0002487, Val Loss: 0.0002873 +2025-03-25 16:08:41,964 Epoch 1212/2000 +2025-03-25 16:10:48,064 Current Learning Rate: 0.0009911436 +2025-03-25 16:10:48,065 Train Loss: 0.0002498, Val Loss: 0.0002892 +2025-03-25 16:10:48,065 Epoch 1213/2000 +2025-03-25 16:12:53,908 Current Learning Rate: 0.0009896114 +2025-03-25 16:12:53,908 Train Loss: 0.0002507, Val Loss: 0.0002888 +2025-03-25 16:12:53,908 Epoch 1214/2000 +2025-03-25 16:15:00,492 Current Learning Rate: 0.0009879584 +2025-03-25 16:15:00,492 Train Loss: 0.0002510, Val Loss: 0.0002907 +2025-03-25 16:15:00,493 Epoch 1215/2000 +2025-03-25 16:17:06,769 Current Learning Rate: 0.0009861850 +2025-03-25 16:17:06,770 Train Loss: 0.0002514, Val Loss: 0.0002875 +2025-03-25 16:17:06,770 Epoch 1216/2000 +2025-03-25 16:19:11,961 Current Learning Rate: 0.0009842916 +2025-03-25 16:19:11,962 Train Loss: 0.0002523, Val Loss: 0.0002892 +2025-03-25 16:19:11,962 Epoch 1217/2000 +2025-03-25 16:21:17,585 Current Learning Rate: 0.0009822787 +2025-03-25 16:21:17,586 Train Loss: 0.0002526, Val Loss: 0.0002911 +2025-03-25 16:21:17,586 Epoch 1218/2000 +2025-03-25 16:23:23,512 Current Learning Rate: 0.0009801468 +2025-03-25 16:23:23,512 Train Loss: 0.0002533, Val Loss: 0.0002907 +2025-03-25 16:23:23,513 Epoch 1219/2000 +2025-03-25 16:25:28,678 Current Learning Rate: 0.0009778965 +2025-03-25 16:25:28,679 Train Loss: 0.0002543, Val Loss: 0.0002936 +2025-03-25 16:25:28,679 Epoch 1220/2000 +2025-03-25 16:27:32,029 Current Learning Rate: 0.0009755283 +2025-03-25 16:27:32,030 Train Loss: 0.0002544, Val Loss: 0.0002945 +2025-03-25 16:27:32,030 Epoch 1221/2000 +2025-03-25 16:29:36,567 Current Learning Rate: 0.0009730427 +2025-03-25 16:29:36,567 Train Loss: 0.0002543, Val Loss: 0.0002890 +2025-03-25 16:29:36,568 Epoch 1222/2000 +2025-03-25 16:31:41,940 Current Learning Rate: 0.0009704404 +2025-03-25 16:31:41,940 Train Loss: 0.0002540, Val Loss: 0.0002853 +2025-03-25 16:31:41,940 Epoch 1223/2000 +2025-03-25 16:33:47,790 Current Learning Rate: 0.0009677220 +2025-03-25 16:33:47,791 Train Loss: 0.0002528, Val Loss: 0.0002844 +2025-03-25 16:33:47,791 Epoch 1224/2000 +2025-03-25 16:35:52,671 Current Learning Rate: 0.0009648882 +2025-03-25 16:35:52,672 Train Loss: 0.0002503, Val Loss: 0.0002827 +2025-03-25 16:35:52,672 Epoch 1225/2000 +2025-03-25 16:37:56,735 Current Learning Rate: 0.0009619398 +2025-03-25 16:37:56,735 Train Loss: 0.0002478, Val Loss: 0.0002819 +2025-03-25 16:37:56,736 Epoch 1226/2000 +2025-03-25 16:40:00,077 Current Learning Rate: 0.0009588773 +2025-03-25 16:40:00,077 Train Loss: 0.0002469, Val Loss: 0.0002831 +2025-03-25 16:40:00,077 Epoch 1227/2000 +2025-03-25 16:42:02,917 Current Learning Rate: 0.0009557016 +2025-03-25 16:42:02,918 Train Loss: 0.0002469, Val Loss: 0.0002836 +2025-03-25 16:42:02,918 Epoch 1228/2000 +2025-03-25 16:44:08,023 Current Learning Rate: 0.0009524135 +2025-03-25 16:44:08,024 Train Loss: 0.0002473, Val Loss: 0.0002853 +2025-03-25 16:44:08,024 Epoch 1229/2000 +2025-03-25 16:46:11,824 Current Learning Rate: 0.0009490138 +2025-03-25 16:46:11,825 Train Loss: 0.0002481, Val Loss: 0.0002848 +2025-03-25 16:46:11,825 Epoch 1230/2000 +2025-03-25 16:48:15,954 Current Learning Rate: 0.0009455033 +2025-03-25 16:48:15,955 Train Loss: 0.0002490, Val Loss: 0.0002878 +2025-03-25 16:48:15,955 Epoch 1231/2000 +2025-03-25 16:50:21,863 Current Learning Rate: 0.0009418828 +2025-03-25 16:50:21,864 Train Loss: 0.0002495, Val Loss: 0.0002861 +2025-03-25 16:50:21,864 Epoch 1232/2000 +2025-03-25 16:52:26,501 Current Learning Rate: 0.0009381533 +2025-03-25 16:52:26,501 Train Loss: 0.0002499, Val Loss: 0.0002864 +2025-03-25 16:52:26,502 Epoch 1233/2000 +2025-03-25 16:54:29,611 Current Learning Rate: 0.0009343158 +2025-03-25 16:54:29,611 Train Loss: 0.0002506, Val Loss: 0.0002878 +2025-03-25 16:54:29,612 Epoch 1234/2000 +2025-03-25 16:56:34,904 Current Learning Rate: 0.0009303710 +2025-03-25 16:56:34,905 Train Loss: 0.0002518, Val Loss: 0.0002912 +2025-03-25 16:56:34,905 Epoch 1235/2000 +2025-03-25 16:58:41,535 Current Learning Rate: 0.0009263201 +2025-03-25 16:58:41,535 Train Loss: 0.0002519, Val Loss: 0.0002859 +2025-03-25 16:58:41,536 Epoch 1236/2000 +2025-03-25 17:00:34,082 Current Learning Rate: 0.0009221640 +2025-03-25 17:00:34,083 Train Loss: 0.0002523, Val Loss: 0.0002848 +2025-03-25 17:00:34,083 Epoch 1237/2000 +2025-03-25 17:01:40,430 Current Learning Rate: 0.0009179037 +2025-03-25 17:01:40,431 Train Loss: 0.0002521, Val Loss: 0.0002880 +2025-03-25 17:01:40,432 Epoch 1238/2000 +2025-03-25 17:02:46,405 Current Learning Rate: 0.0009135403 +2025-03-25 17:02:46,405 Train Loss: 0.0002509, Val Loss: 0.0002859 +2025-03-25 17:02:46,406 Epoch 1239/2000 +2025-03-25 17:03:53,295 Current Learning Rate: 0.0009090749 +2025-03-25 17:03:53,296 Train Loss: 0.0002490, Val Loss: 0.0002806 +2025-03-25 17:03:53,297 Epoch 1240/2000 +2025-03-25 17:04:59,141 Current Learning Rate: 0.0009045085 +2025-03-25 17:04:59,141 Train Loss: 0.0002466, Val Loss: 0.0002788 +2025-03-25 17:04:59,142 Epoch 1241/2000 +2025-03-25 17:06:05,343 Current Learning Rate: 0.0008998423 +2025-03-25 17:06:05,344 Train Loss: 0.0002452, Val Loss: 0.0002791 +2025-03-25 17:06:05,344 Epoch 1242/2000 +2025-03-25 17:07:11,422 Current Learning Rate: 0.0008950775 +2025-03-25 17:07:11,423 Train Loss: 0.0002446, Val Loss: 0.0002796 +2025-03-25 17:07:11,423 Epoch 1243/2000 +2025-03-25 17:08:17,437 Current Learning Rate: 0.0008902152 +2025-03-25 17:08:17,438 Train Loss: 0.0002444, Val Loss: 0.0002804 +2025-03-25 17:08:17,438 Epoch 1244/2000 +2025-03-25 17:09:23,344 Current Learning Rate: 0.0008852566 +2025-03-25 17:09:23,344 Train Loss: 0.0002443, Val Loss: 0.0002804 +2025-03-25 17:09:23,345 Epoch 1245/2000 +2025-03-25 17:10:28,939 Current Learning Rate: 0.0008802030 +2025-03-25 17:10:28,939 Train Loss: 0.0002444, Val Loss: 0.0002810 +2025-03-25 17:10:28,939 Epoch 1246/2000 +2025-03-25 17:11:34,796 Current Learning Rate: 0.0008750555 +2025-03-25 17:11:34,797 Train Loss: 0.0002449, Val Loss: 0.0002827 +2025-03-25 17:11:34,797 Epoch 1247/2000 +2025-03-25 17:12:41,101 Current Learning Rate: 0.0008698155 +2025-03-25 17:12:41,102 Train Loss: 0.0002462, Val Loss: 0.0002831 +2025-03-25 17:12:41,102 Epoch 1248/2000 +2025-03-25 17:13:47,282 Current Learning Rate: 0.0008644843 +2025-03-25 17:13:47,282 Train Loss: 0.0002470, Val Loss: 0.0002832 +2025-03-25 17:13:47,282 Epoch 1249/2000 +2025-03-25 17:14:54,184 Current Learning Rate: 0.0008590631 +2025-03-25 17:14:54,185 Train Loss: 0.0002479, Val Loss: 0.0002829 +2025-03-25 17:14:54,185 Epoch 1250/2000 +2025-03-25 17:16:00,244 Current Learning Rate: 0.0008535534 +2025-03-25 17:16:00,244 Train Loss: 0.0002486, Val Loss: 0.0002796 +2025-03-25 17:16:00,245 Epoch 1251/2000 +2025-03-25 17:17:07,578 Current Learning Rate: 0.0008479564 +2025-03-25 17:17:07,578 Train Loss: 0.0002488, Val Loss: 0.0002802 +2025-03-25 17:17:07,578 Epoch 1252/2000 +2025-03-25 17:18:13,697 Current Learning Rate: 0.0008422736 +2025-03-25 17:18:13,698 Train Loss: 0.0002481, Val Loss: 0.0002858 +2025-03-25 17:18:13,698 Epoch 1253/2000 +2025-03-25 17:19:20,243 Current Learning Rate: 0.0008365063 +2025-03-25 17:19:20,244 Train Loss: 0.0002476, Val Loss: 0.0002862 +2025-03-25 17:19:20,244 Epoch 1254/2000 +2025-03-25 17:20:26,718 Current Learning Rate: 0.0008306559 +2025-03-25 17:20:26,718 Train Loss: 0.0002474, Val Loss: 0.0002848 +2025-03-25 17:20:26,719 Epoch 1255/2000 +2025-03-25 17:21:32,070 Current Learning Rate: 0.0008247240 +2025-03-25 17:21:32,070 Train Loss: 0.0002470, Val Loss: 0.0002838 +2025-03-25 17:21:32,071 Epoch 1256/2000 +2025-03-25 17:22:38,473 Current Learning Rate: 0.0008187120 +2025-03-25 17:22:38,473 Train Loss: 0.0002457, Val Loss: 0.0002815 +2025-03-25 17:22:38,474 Epoch 1257/2000 +2025-03-25 17:23:44,661 Current Learning Rate: 0.0008126213 +2025-03-25 17:23:44,662 Train Loss: 0.0002436, Val Loss: 0.0002749 +2025-03-25 17:23:44,662 Epoch 1258/2000 +2025-03-25 17:24:51,293 Current Learning Rate: 0.0008064535 +2025-03-25 17:24:51,294 Train Loss: 0.0002420, Val Loss: 0.0002740 +2025-03-25 17:24:51,294 Epoch 1259/2000 +2025-03-25 17:25:57,843 Current Learning Rate: 0.0008002101 +2025-03-25 17:25:57,843 Train Loss: 0.0002415, Val Loss: 0.0002746 +2025-03-25 17:25:57,844 Epoch 1260/2000 +2025-03-25 17:27:03,491 Current Learning Rate: 0.0007938926 +2025-03-25 17:27:03,491 Train Loss: 0.0002413, Val Loss: 0.0002751 +2025-03-25 17:27:03,492 Epoch 1261/2000 +2025-03-25 17:28:08,955 Current Learning Rate: 0.0007875026 +2025-03-25 17:28:08,956 Train Loss: 0.0002410, Val Loss: 0.0002754 +2025-03-25 17:28:08,956 Epoch 1262/2000 +2025-03-25 17:29:16,059 Current Learning Rate: 0.0007810417 +2025-03-25 17:29:16,060 Train Loss: 0.0002409, Val Loss: 0.0002758 +2025-03-25 17:29:16,060 Epoch 1263/2000 +2025-03-25 17:30:21,993 Current Learning Rate: 0.0007745114 +2025-03-25 17:30:21,994 Train Loss: 0.0002410, Val Loss: 0.0002764 +2025-03-25 17:30:21,994 Epoch 1264/2000 +2025-03-25 17:31:27,797 Current Learning Rate: 0.0007679134 +2025-03-25 17:31:27,797 Train Loss: 0.0002415, Val Loss: 0.0002766 +2025-03-25 17:31:27,797 Epoch 1265/2000 +2025-03-25 17:32:33,948 Current Learning Rate: 0.0007612493 +2025-03-25 17:32:33,949 Train Loss: 0.0002430, Val Loss: 0.0002749 +2025-03-25 17:32:33,949 Epoch 1266/2000 +2025-03-25 17:33:39,188 Current Learning Rate: 0.0007545207 +2025-03-25 17:33:39,188 Train Loss: 0.0002443, Val Loss: 0.0002739 +2025-03-25 17:33:39,189 Epoch 1267/2000 +2025-03-25 17:34:45,959 Current Learning Rate: 0.0007477293 +2025-03-25 17:34:45,960 Train Loss: 0.0002442, Val Loss: 0.0002757 +2025-03-25 17:34:45,960 Epoch 1268/2000 +2025-03-25 17:35:52,039 Current Learning Rate: 0.0007408768 +2025-03-25 17:35:52,039 Train Loss: 0.0002438, Val Loss: 0.0002816 +2025-03-25 17:35:52,040 Epoch 1269/2000 +2025-03-25 17:36:58,464 Current Learning Rate: 0.0007339649 +2025-03-25 17:36:58,465 Train Loss: 0.0002439, Val Loss: 0.0002780 +2025-03-25 17:36:58,465 Epoch 1270/2000 +2025-03-25 17:38:04,189 Current Learning Rate: 0.0007269952 +2025-03-25 17:38:04,190 Train Loss: 0.0002435, Val Loss: 0.0002732 +2025-03-25 17:38:04,190 Epoch 1271/2000 +2025-03-25 17:39:10,623 Current Learning Rate: 0.0007199696 +2025-03-25 17:39:10,623 Train Loss: 0.0002425, Val Loss: 0.0002728 +2025-03-25 17:39:10,623 Epoch 1272/2000 +2025-03-25 17:40:17,138 Current Learning Rate: 0.0007128896 +2025-03-25 17:40:17,138 Train Loss: 0.0002414, Val Loss: 0.0002725 +2025-03-25 17:40:17,139 Epoch 1273/2000 +2025-03-25 17:41:23,207 Current Learning Rate: 0.0007057572 +2025-03-25 17:41:23,207 Train Loss: 0.0002400, Val Loss: 0.0002723 +2025-03-25 17:41:23,208 Epoch 1274/2000 +2025-03-25 17:42:29,256 Current Learning Rate: 0.0006985739 +2025-03-25 17:42:29,260 Train Loss: 0.0002388, Val Loss: 0.0002703 +2025-03-25 17:42:29,263 Epoch 1275/2000 +2025-03-25 17:43:35,377 Current Learning Rate: 0.0006913417 +2025-03-25 17:43:35,377 Train Loss: 0.0002382, Val Loss: 0.0002682 +2025-03-25 17:43:35,378 Epoch 1276/2000 +2025-03-25 17:44:42,029 Current Learning Rate: 0.0006840623 +2025-03-25 17:44:42,029 Train Loss: 0.0002379, Val Loss: 0.0002680 +2025-03-25 17:44:42,030 Epoch 1277/2000 +2025-03-25 17:45:47,732 Current Learning Rate: 0.0006767374 +2025-03-25 17:45:47,732 Train Loss: 0.0002378, Val Loss: 0.0002680 +2025-03-25 17:45:47,733 Epoch 1278/2000 +2025-03-25 17:46:54,089 Current Learning Rate: 0.0006693690 +2025-03-25 17:46:54,089 Train Loss: 0.0002378, Val Loss: 0.0002682 +2025-03-25 17:46:54,090 Epoch 1279/2000 +2025-03-25 17:48:00,969 Current Learning Rate: 0.0006619587 +2025-03-25 17:48:00,970 Train Loss: 0.0002382, Val Loss: 0.0002683 +2025-03-25 17:48:00,970 Epoch 1280/2000 +2025-03-25 17:49:06,403 Current Learning Rate: 0.0006545085 +2025-03-25 17:49:06,404 Train Loss: 0.0002390, Val Loss: 0.0002697 +2025-03-25 17:49:06,404 Epoch 1281/2000 +2025-03-25 17:50:12,629 Current Learning Rate: 0.0006470202 +2025-03-25 17:50:12,630 Train Loss: 0.0002396, Val Loss: 0.0002743 +2025-03-25 17:50:12,630 Epoch 1282/2000 +2025-03-25 17:51:19,246 Current Learning Rate: 0.0006394956 +2025-03-25 17:51:19,247 Train Loss: 0.0002401, Val Loss: 0.0002673 +2025-03-25 17:51:19,247 Epoch 1283/2000 +2025-03-25 17:52:25,764 Current Learning Rate: 0.0006319365 +2025-03-25 17:52:25,765 Train Loss: 0.0002399, Val Loss: 0.0002683 +2025-03-25 17:52:25,765 Epoch 1284/2000 +2025-03-25 17:53:32,817 Current Learning Rate: 0.0006243449 +2025-03-25 17:53:32,818 Train Loss: 0.0002387, Val Loss: 0.0002665 +2025-03-25 17:53:32,819 Epoch 1285/2000 +2025-03-25 17:54:39,102 Current Learning Rate: 0.0006167227 +2025-03-25 17:54:39,102 Train Loss: 0.0002375, Val Loss: 0.0002648 +2025-03-25 17:54:39,103 Epoch 1286/2000 +2025-03-25 17:55:45,291 Current Learning Rate: 0.0006090716 +2025-03-25 17:55:45,292 Train Loss: 0.0002365, Val Loss: 0.0002644 +2025-03-25 17:55:45,297 Epoch 1287/2000 +2025-03-25 17:56:51,251 Current Learning Rate: 0.0006013936 +2025-03-25 17:56:51,252 Train Loss: 0.0002358, Val Loss: 0.0002650 +2025-03-25 17:56:51,253 Epoch 1288/2000 +2025-03-25 17:57:57,352 Current Learning Rate: 0.0005936907 +2025-03-25 17:57:57,352 Train Loss: 0.0002354, Val Loss: 0.0002654 +2025-03-25 17:57:57,353 Epoch 1289/2000 +2025-03-25 17:59:03,950 Current Learning Rate: 0.0005859646 +2025-03-25 17:59:03,951 Train Loss: 0.0002351, Val Loss: 0.0002655 +2025-03-25 17:59:03,951 Epoch 1290/2000 +2025-03-25 18:00:10,649 Current Learning Rate: 0.0005782172 +2025-03-25 18:00:10,649 Train Loss: 0.0002348, Val Loss: 0.0002654 +2025-03-25 18:00:10,650 Epoch 1291/2000 +2025-03-25 18:01:17,152 Current Learning Rate: 0.0005704506 +2025-03-25 18:01:17,153 Train Loss: 0.0002348, Val Loss: 0.0002646 +2025-03-25 18:01:17,153 Epoch 1292/2000 +2025-03-25 18:02:23,615 Current Learning Rate: 0.0005626666 +2025-03-25 18:02:23,616 Train Loss: 0.0002350, Val Loss: 0.0002639 +2025-03-25 18:02:23,616 Epoch 1293/2000 +2025-03-25 18:03:30,133 Current Learning Rate: 0.0005548672 +2025-03-25 18:03:30,134 Train Loss: 0.0002359, Val Loss: 0.0002633 +2025-03-25 18:03:30,134 Epoch 1294/2000 +2025-03-25 18:04:36,143 Current Learning Rate: 0.0005470542 +2025-03-25 18:04:36,144 Train Loss: 0.0002365, Val Loss: 0.0002625 +2025-03-25 18:04:36,144 Epoch 1295/2000 +2025-03-25 18:05:43,176 Current Learning Rate: 0.0005392295 +2025-03-25 18:05:43,176 Train Loss: 0.0002355, Val Loss: 0.0002613 +2025-03-25 18:05:43,177 Epoch 1296/2000 +2025-03-25 18:06:49,431 Current Learning Rate: 0.0005313953 +2025-03-25 18:06:49,431 Train Loss: 0.0002343, Val Loss: 0.0002608 +2025-03-25 18:06:49,431 Epoch 1297/2000 +2025-03-25 18:07:55,579 Current Learning Rate: 0.0005235532 +2025-03-25 18:07:55,580 Train Loss: 0.0002336, Val Loss: 0.0002602 +2025-03-25 18:07:55,580 Epoch 1298/2000 +2025-03-25 18:09:01,697 Current Learning Rate: 0.0005157054 +2025-03-25 18:09:01,697 Train Loss: 0.0002330, Val Loss: 0.0002596 +2025-03-25 18:09:01,698 Epoch 1299/2000 +2025-03-25 18:10:08,348 Current Learning Rate: 0.0005078537 +2025-03-25 18:10:08,348 Train Loss: 0.0002326, Val Loss: 0.0002594 +2025-03-25 18:10:08,349 Epoch 1300/2000 +2025-03-25 18:11:14,820 Current Learning Rate: 0.0005000000 +2025-03-25 18:11:14,821 Train Loss: 0.0002324, Val Loss: 0.0002594 +2025-03-25 18:11:14,821 Epoch 1301/2000 +2025-03-25 18:12:22,050 Current Learning Rate: 0.0004921463 +2025-03-25 18:12:22,051 Train Loss: 0.0002324, Val Loss: 0.0002594 +2025-03-25 18:12:22,051 Epoch 1302/2000 +2025-03-25 18:13:28,727 Current Learning Rate: 0.0004842946 +2025-03-25 18:13:28,728 Train Loss: 0.0002325, Val Loss: 0.0002594 +2025-03-25 18:13:28,728 Epoch 1303/2000 +2025-03-25 18:14:35,806 Current Learning Rate: 0.0004764468 +2025-03-25 18:14:35,806 Train Loss: 0.0002325, Val Loss: 0.0002591 +2025-03-25 18:14:35,807 Epoch 1304/2000 +2025-03-25 18:15:41,858 Current Learning Rate: 0.0004686047 +2025-03-25 18:15:41,859 Train Loss: 0.0002324, Val Loss: 0.0002595 +2025-03-25 18:15:41,859 Epoch 1305/2000 +2025-03-25 18:16:48,458 Current Learning Rate: 0.0004607705 +2025-03-25 18:16:48,459 Train Loss: 0.0002321, Val Loss: 0.0002583 +2025-03-25 18:16:48,459 Epoch 1306/2000 +2025-03-25 18:17:55,275 Current Learning Rate: 0.0004529458 +2025-03-25 18:17:55,276 Train Loss: 0.0002315, Val Loss: 0.0002576 +2025-03-25 18:17:55,276 Epoch 1307/2000 +2025-03-25 18:19:01,894 Current Learning Rate: 0.0004451328 +2025-03-25 18:19:01,895 Train Loss: 0.0002310, Val Loss: 0.0002573 +2025-03-25 18:19:01,895 Epoch 1308/2000 +2025-03-25 18:20:07,805 Current Learning Rate: 0.0004373334 +2025-03-25 18:20:07,806 Train Loss: 0.0002306, Val Loss: 0.0002570 +2025-03-25 18:20:07,807 Epoch 1309/2000 +2025-03-25 18:21:15,238 Current Learning Rate: 0.0004295494 +2025-03-25 18:21:15,239 Train Loss: 0.0002302, Val Loss: 0.0002567 +2025-03-25 18:21:15,239 Epoch 1310/2000 +2025-03-25 18:22:21,934 Current Learning Rate: 0.0004217828 +2025-03-25 18:22:21,934 Train Loss: 0.0002299, Val Loss: 0.0002565 +2025-03-25 18:22:21,935 Epoch 1311/2000 +2025-03-25 18:23:27,343 Current Learning Rate: 0.0004140354 +2025-03-25 18:23:27,344 Train Loss: 0.0002297, Val Loss: 0.0002564 +2025-03-25 18:23:27,345 Epoch 1312/2000 +2025-03-25 18:24:33,160 Current Learning Rate: 0.0004063093 +2025-03-25 18:24:33,161 Train Loss: 0.0002296, Val Loss: 0.0002563 +2025-03-25 18:24:33,161 Epoch 1313/2000 +2025-03-25 18:25:39,472 Current Learning Rate: 0.0003986064 +2025-03-25 18:25:39,473 Train Loss: 0.0002295, Val Loss: 0.0002564 +2025-03-25 18:25:39,473 Epoch 1314/2000 +2025-03-25 18:26:45,485 Current Learning Rate: 0.0003909284 +2025-03-25 18:26:45,486 Train Loss: 0.0002293, Val Loss: 0.0002563 +2025-03-25 18:26:45,486 Epoch 1315/2000 +2025-03-25 18:27:52,667 Current Learning Rate: 0.0003832773 +2025-03-25 18:27:52,667 Train Loss: 0.0002292, Val Loss: 0.0002559 +2025-03-25 18:27:52,668 Epoch 1316/2000 +2025-03-25 18:28:58,678 Current Learning Rate: 0.0003756551 +2025-03-25 18:28:58,679 Train Loss: 0.0002287, Val Loss: 0.0002555 +2025-03-25 18:28:58,679 Epoch 1317/2000 +2025-03-25 18:30:04,667 Current Learning Rate: 0.0003680635 +2025-03-25 18:30:04,667 Train Loss: 0.0002283, Val Loss: 0.0002550 +2025-03-25 18:30:04,668 Epoch 1318/2000 +2025-03-25 18:31:11,205 Current Learning Rate: 0.0003605044 +2025-03-25 18:31:11,206 Train Loss: 0.0002279, Val Loss: 0.0002546 +2025-03-25 18:31:11,206 Epoch 1319/2000 +2025-03-25 18:32:17,094 Current Learning Rate: 0.0003529798 +2025-03-25 18:32:17,095 Train Loss: 0.0002276, Val Loss: 0.0002542 +2025-03-25 18:32:17,095 Epoch 1320/2000 +2025-03-25 18:33:23,534 Current Learning Rate: 0.0003454915 +2025-03-25 18:33:23,535 Train Loss: 0.0002273, Val Loss: 0.0002539 +2025-03-25 18:33:23,535 Epoch 1321/2000 +2025-03-25 18:34:29,567 Current Learning Rate: 0.0003380413 +2025-03-25 18:34:29,567 Train Loss: 0.0002271, Val Loss: 0.0002536 +2025-03-25 18:34:29,568 Epoch 1322/2000 +2025-03-25 18:35:36,428 Current Learning Rate: 0.0003306310 +2025-03-25 18:35:36,428 Train Loss: 0.0002269, Val Loss: 0.0002533 +2025-03-25 18:35:36,429 Epoch 1323/2000 +2025-03-25 18:36:42,918 Current Learning Rate: 0.0003232626 +2025-03-25 18:36:42,918 Train Loss: 0.0002267, Val Loss: 0.0002531 +2025-03-25 18:36:42,919 Epoch 1324/2000 +2025-03-25 18:37:49,555 Current Learning Rate: 0.0003159377 +2025-03-25 18:37:49,555 Train Loss: 0.0002265, Val Loss: 0.0002528 +2025-03-25 18:37:49,556 Epoch 1325/2000 +2025-03-25 18:38:56,234 Current Learning Rate: 0.0003086583 +2025-03-25 18:38:56,235 Train Loss: 0.0002262, Val Loss: 0.0002524 +2025-03-25 18:38:56,235 Epoch 1326/2000 +2025-03-25 18:40:02,133 Current Learning Rate: 0.0003014261 +2025-03-25 18:40:02,134 Train Loss: 0.0002258, Val Loss: 0.0002520 +2025-03-25 18:40:02,134 Epoch 1327/2000 +2025-03-25 18:41:08,211 Current Learning Rate: 0.0002942428 +2025-03-25 18:41:08,211 Train Loss: 0.0002255, Val Loss: 0.0002516 +2025-03-25 18:41:08,212 Epoch 1328/2000 +2025-03-25 18:42:14,310 Current Learning Rate: 0.0002871104 +2025-03-25 18:42:14,311 Train Loss: 0.0002252, Val Loss: 0.0002513 +2025-03-25 18:42:14,311 Epoch 1329/2000 +2025-03-25 18:43:20,619 Current Learning Rate: 0.0002800304 +2025-03-25 18:43:20,620 Train Loss: 0.0002249, Val Loss: 0.0002511 +2025-03-25 18:43:20,621 Epoch 1330/2000 +2025-03-25 18:44:27,059 Current Learning Rate: 0.0002730048 +2025-03-25 18:44:27,060 Train Loss: 0.0002247, Val Loss: 0.0002509 +2025-03-25 18:44:27,061 Epoch 1331/2000 +2025-03-25 18:45:33,511 Current Learning Rate: 0.0002660351 +2025-03-25 18:45:33,511 Train Loss: 0.0002244, Val Loss: 0.0002507 +2025-03-25 18:45:33,511 Epoch 1332/2000 +2025-03-25 18:46:39,545 Current Learning Rate: 0.0002591232 +2025-03-25 18:46:39,546 Train Loss: 0.0002242, Val Loss: 0.0002505 +2025-03-25 18:46:39,546 Epoch 1333/2000 +2025-03-25 18:47:45,503 Current Learning Rate: 0.0002522707 +2025-03-25 18:47:45,503 Train Loss: 0.0002240, Val Loss: 0.0002502 +2025-03-25 18:47:45,504 Epoch 1334/2000 +2025-03-25 18:48:51,809 Current Learning Rate: 0.0002454793 +2025-03-25 18:48:51,810 Train Loss: 0.0002237, Val Loss: 0.0002498 +2025-03-25 18:48:51,810 Epoch 1335/2000 +2025-03-25 18:49:57,610 Current Learning Rate: 0.0002387507 +2025-03-25 18:49:57,611 Train Loss: 0.0002235, Val Loss: 0.0002493 +2025-03-25 18:49:57,611 Epoch 1336/2000 +2025-03-25 18:51:03,543 Current Learning Rate: 0.0002320866 +2025-03-25 18:51:03,544 Train Loss: 0.0002232, Val Loss: 0.0002491 +2025-03-25 18:51:03,544 Epoch 1337/2000 +2025-03-25 18:52:09,240 Current Learning Rate: 0.0002254886 +2025-03-25 18:52:09,240 Train Loss: 0.0002229, Val Loss: 0.0002488 +2025-03-25 18:52:09,241 Epoch 1338/2000 +2025-03-25 18:53:15,468 Current Learning Rate: 0.0002189583 +2025-03-25 18:53:15,469 Train Loss: 0.0002227, Val Loss: 0.0002486 +2025-03-25 18:53:15,469 Epoch 1339/2000 +2025-03-25 18:54:21,415 Current Learning Rate: 0.0002124974 +2025-03-25 18:54:21,416 Train Loss: 0.0002225, Val Loss: 0.0002485 +2025-03-25 18:54:21,416 Epoch 1340/2000 +2025-03-25 18:55:27,842 Current Learning Rate: 0.0002061074 +2025-03-25 18:55:27,843 Train Loss: 0.0002222, Val Loss: 0.0002483 +2025-03-25 18:55:27,843 Epoch 1341/2000 +2025-03-25 18:56:34,021 Current Learning Rate: 0.0001997899 +2025-03-25 18:56:34,021 Train Loss: 0.0002220, Val Loss: 0.0002482 +2025-03-25 18:56:34,022 Epoch 1342/2000 +2025-03-25 18:57:40,327 Current Learning Rate: 0.0001935465 +2025-03-25 18:57:40,327 Train Loss: 0.0002218, Val Loss: 0.0002482 +2025-03-25 18:57:40,328 Epoch 1343/2000 +2025-03-25 18:58:46,282 Current Learning Rate: 0.0001873787 +2025-03-25 18:58:46,283 Train Loss: 0.0002215, Val Loss: 0.0002481 +2025-03-25 18:58:46,283 Epoch 1344/2000 +2025-03-25 18:59:52,094 Current Learning Rate: 0.0001812880 +2025-03-25 18:59:52,095 Train Loss: 0.0002213, Val Loss: 0.0002481 +2025-03-25 18:59:52,095 Epoch 1345/2000 +2025-03-25 19:00:58,517 Current Learning Rate: 0.0001752760 +2025-03-25 19:00:58,518 Train Loss: 0.0002211, Val Loss: 0.0002482 +2025-03-25 19:00:58,518 Epoch 1346/2000 +2025-03-25 19:02:05,181 Current Learning Rate: 0.0001693441 +2025-03-25 19:02:05,182 Train Loss: 0.0002209, Val Loss: 0.0002482 +2025-03-25 19:02:05,183 Epoch 1347/2000 +2025-03-25 19:03:11,351 Current Learning Rate: 0.0001634937 +2025-03-25 19:03:11,354 Train Loss: 0.0002207, Val Loss: 0.0002482 +2025-03-25 19:03:11,355 Epoch 1348/2000 +2025-03-25 19:04:17,724 Current Learning Rate: 0.0001577264 +2025-03-25 19:04:17,724 Train Loss: 0.0002205, Val Loss: 0.0002482 +2025-03-25 19:04:17,725 Epoch 1349/2000 +2025-03-25 19:05:23,360 Current Learning Rate: 0.0001520436 +2025-03-25 19:05:23,361 Train Loss: 0.0002203, Val Loss: 0.0002482 +2025-03-25 19:05:23,361 Epoch 1350/2000 +2025-03-25 19:06:29,779 Current Learning Rate: 0.0001464466 +2025-03-25 19:06:29,780 Train Loss: 0.0002201, Val Loss: 0.0002481 +2025-03-25 19:06:29,780 Epoch 1351/2000 +2025-03-25 19:07:35,744 Current Learning Rate: 0.0001409369 +2025-03-25 19:07:35,744 Train Loss: 0.0002199, Val Loss: 0.0002479 +2025-03-25 19:07:35,745 Epoch 1352/2000 +2025-03-25 19:08:41,962 Current Learning Rate: 0.0001355157 +2025-03-25 19:08:41,963 Train Loss: 0.0002197, Val Loss: 0.0002477 +2025-03-25 19:08:41,963 Epoch 1353/2000 +2025-03-25 19:09:48,088 Current Learning Rate: 0.0001301845 +2025-03-25 19:09:48,176 Train Loss: 0.0002195, Val Loss: 0.0002473 +2025-03-25 19:09:48,177 Epoch 1354/2000 +2025-03-25 19:10:54,246 Current Learning Rate: 0.0001249445 +2025-03-25 19:10:54,323 Train Loss: 0.0002193, Val Loss: 0.0002468 +2025-03-25 19:10:54,323 Epoch 1355/2000 +2025-03-25 19:12:00,089 Current Learning Rate: 0.0001197970 +2025-03-25 19:12:00,198 Train Loss: 0.0002191, Val Loss: 0.0002462 +2025-03-25 19:12:00,199 Epoch 1356/2000 +2025-03-25 19:13:05,634 Current Learning Rate: 0.0001147434 +2025-03-25 19:13:05,718 Train Loss: 0.0002189, Val Loss: 0.0002458 +2025-03-25 19:13:05,719 Epoch 1357/2000 +2025-03-25 19:14:11,339 Current Learning Rate: 0.0001097848 +2025-03-25 19:14:11,411 Train Loss: 0.0002187, Val Loss: 0.0002455 +2025-03-25 19:14:11,411 Epoch 1358/2000 +2025-03-25 19:15:17,277 Current Learning Rate: 0.0001049225 +2025-03-25 19:15:17,360 Train Loss: 0.0002185, Val Loss: 0.0002453 +2025-03-25 19:15:17,361 Epoch 1359/2000 +2025-03-25 19:16:23,599 Current Learning Rate: 0.0001001577 +2025-03-25 19:16:23,685 Train Loss: 0.0002183, Val Loss: 0.0002450 +2025-03-25 19:16:23,686 Epoch 1360/2000 +2025-03-25 19:17:29,999 Current Learning Rate: 0.0000954915 +2025-03-25 19:17:30,084 Train Loss: 0.0002182, Val Loss: 0.0002447 +2025-03-25 19:17:30,084 Epoch 1361/2000 +2025-03-25 19:18:35,852 Current Learning Rate: 0.0000909251 +2025-03-25 19:18:35,960 Train Loss: 0.0002180, Val Loss: 0.0002444 +2025-03-25 19:18:35,961 Epoch 1362/2000 +2025-03-25 19:19:42,516 Current Learning Rate: 0.0000864597 +2025-03-25 19:19:42,598 Train Loss: 0.0002178, Val Loss: 0.0002442 +2025-03-25 19:19:42,599 Epoch 1363/2000 +2025-03-25 19:20:48,623 Current Learning Rate: 0.0000820963 +2025-03-25 19:20:48,718 Train Loss: 0.0002176, Val Loss: 0.0002440 +2025-03-25 19:20:48,718 Epoch 1364/2000 +2025-03-25 19:21:55,012 Current Learning Rate: 0.0000778360 +2025-03-25 19:21:55,094 Train Loss: 0.0002175, Val Loss: 0.0002439 +2025-03-25 19:21:55,094 Epoch 1365/2000 +2025-03-25 19:23:00,348 Current Learning Rate: 0.0000736799 +2025-03-25 19:23:00,428 Train Loss: 0.0002173, Val Loss: 0.0002437 +2025-03-25 19:23:00,428 Epoch 1366/2000 +2025-03-25 19:24:07,496 Current Learning Rate: 0.0000696290 +2025-03-25 19:24:07,593 Train Loss: 0.0002171, Val Loss: 0.0002436 +2025-03-25 19:24:07,594 Epoch 1367/2000 +2025-03-25 19:25:13,956 Current Learning Rate: 0.0000656842 +2025-03-25 19:25:14,066 Train Loss: 0.0002170, Val Loss: 0.0002436 +2025-03-25 19:25:14,066 Epoch 1368/2000 +2025-03-25 19:26:20,263 Current Learning Rate: 0.0000618467 +2025-03-25 19:26:20,344 Train Loss: 0.0002168, Val Loss: 0.0002435 +2025-03-25 19:26:20,344 Epoch 1369/2000 +2025-03-25 19:27:26,699 Current Learning Rate: 0.0000581172 +2025-03-25 19:27:26,798 Train Loss: 0.0002167, Val Loss: 0.0002435 +2025-03-25 19:27:26,799 Epoch 1370/2000 +2025-03-25 19:28:33,057 Current Learning Rate: 0.0000544967 +2025-03-25 19:28:33,155 Train Loss: 0.0002165, Val Loss: 0.0002433 +2025-03-25 19:28:33,155 Epoch 1371/2000 +2025-03-25 19:29:39,263 Current Learning Rate: 0.0000509862 +2025-03-25 19:29:39,345 Train Loss: 0.0002164, Val Loss: 0.0002432 +2025-03-25 19:29:39,346 Epoch 1372/2000 +2025-03-25 19:30:45,396 Current Learning Rate: 0.0000475865 +2025-03-25 19:30:45,488 Train Loss: 0.0002162, Val Loss: 0.0002430 +2025-03-25 19:30:45,489 Epoch 1373/2000 +2025-03-25 19:31:51,419 Current Learning Rate: 0.0000442984 +2025-03-25 19:31:51,506 Train Loss: 0.0002161, Val Loss: 0.0002428 +2025-03-25 19:31:51,506 Epoch 1374/2000 +2025-03-25 19:32:57,398 Current Learning Rate: 0.0000411227 +2025-03-25 19:32:57,478 Train Loss: 0.0002160, Val Loss: 0.0002427 +2025-03-25 19:32:57,479 Epoch 1375/2000 +2025-03-25 19:34:03,526 Current Learning Rate: 0.0000380602 +2025-03-25 19:34:03,619 Train Loss: 0.0002158, Val Loss: 0.0002426 +2025-03-25 19:34:03,619 Epoch 1376/2000 +2025-03-25 19:35:10,236 Current Learning Rate: 0.0000351118 +2025-03-25 19:35:10,327 Train Loss: 0.0002157, Val Loss: 0.0002425 +2025-03-25 19:35:10,327 Epoch 1377/2000 +2025-03-25 19:36:16,842 Current Learning Rate: 0.0000322780 +2025-03-25 19:36:16,917 Train Loss: 0.0002156, Val Loss: 0.0002425 +2025-03-25 19:36:16,917 Epoch 1378/2000 +2025-03-25 19:37:23,505 Current Learning Rate: 0.0000295596 +2025-03-25 19:37:23,590 Train Loss: 0.0002155, Val Loss: 0.0002424 +2025-03-25 19:37:23,591 Epoch 1379/2000 +2025-03-25 19:38:29,164 Current Learning Rate: 0.0000269573 +2025-03-25 19:38:29,266 Train Loss: 0.0002153, Val Loss: 0.0002424 +2025-03-25 19:38:29,266 Epoch 1380/2000 +2025-03-25 19:39:35,009 Current Learning Rate: 0.0000244717 +2025-03-25 19:39:35,105 Train Loss: 0.0002152, Val Loss: 0.0002423 +2025-03-25 19:39:35,105 Epoch 1381/2000 +2025-03-25 19:40:41,821 Current Learning Rate: 0.0000221035 +2025-03-25 19:40:41,925 Train Loss: 0.0002151, Val Loss: 0.0002422 +2025-03-25 19:40:41,925 Epoch 1382/2000 +2025-03-25 19:41:47,626 Current Learning Rate: 0.0000198532 +2025-03-25 19:41:47,701 Train Loss: 0.0002150, Val Loss: 0.0002421 +2025-03-25 19:41:47,701 Epoch 1383/2000 +2025-03-25 19:42:54,021 Current Learning Rate: 0.0000177213 +2025-03-25 19:42:54,108 Train Loss: 0.0002149, Val Loss: 0.0002420 +2025-03-25 19:42:54,109 Epoch 1384/2000 +2025-03-25 19:43:59,767 Current Learning Rate: 0.0000157084 +2025-03-25 19:43:59,848 Train Loss: 0.0002148, Val Loss: 0.0002420 +2025-03-25 19:43:59,849 Epoch 1385/2000 +2025-03-25 19:45:05,659 Current Learning Rate: 0.0000138150 +2025-03-25 19:45:05,745 Train Loss: 0.0002148, Val Loss: 0.0002419 +2025-03-25 19:45:05,745 Epoch 1386/2000 +2025-03-25 19:46:11,804 Current Learning Rate: 0.0000120416 +2025-03-25 19:46:11,899 Train Loss: 0.0002147, Val Loss: 0.0002418 +2025-03-25 19:46:11,900 Epoch 1387/2000 +2025-03-25 19:47:18,349 Current Learning Rate: 0.0000103886 +2025-03-25 19:47:18,433 Train Loss: 0.0002146, Val Loss: 0.0002418 +2025-03-25 19:47:18,433 Epoch 1388/2000 +2025-03-25 19:48:24,034 Current Learning Rate: 0.0000088564 +2025-03-25 19:48:24,118 Train Loss: 0.0002145, Val Loss: 0.0002418 +2025-03-25 19:48:24,118 Epoch 1389/2000 +2025-03-25 19:49:30,198 Current Learning Rate: 0.0000074453 +2025-03-25 19:49:30,279 Train Loss: 0.0002144, Val Loss: 0.0002417 +2025-03-25 19:49:30,279 Epoch 1390/2000 +2025-03-25 19:50:36,537 Current Learning Rate: 0.0000061558 +2025-03-25 19:50:36,624 Train Loss: 0.0002144, Val Loss: 0.0002417 +2025-03-25 19:50:36,624 Epoch 1391/2000 +2025-03-25 19:51:42,954 Current Learning Rate: 0.0000049882 +2025-03-25 19:51:43,055 Train Loss: 0.0002143, Val Loss: 0.0002417 +2025-03-25 19:51:43,056 Epoch 1392/2000 +2025-03-25 19:52:48,937 Current Learning Rate: 0.0000039426 +2025-03-25 19:52:49,019 Train Loss: 0.0002142, Val Loss: 0.0002416 +2025-03-25 19:52:49,019 Epoch 1393/2000 +2025-03-25 19:53:55,428 Current Learning Rate: 0.0000030195 +2025-03-25 19:53:55,535 Train Loss: 0.0002142, Val Loss: 0.0002416 +2025-03-25 19:53:55,536 Epoch 1394/2000 +2025-03-25 19:55:02,130 Current Learning Rate: 0.0000022190 +2025-03-25 19:55:02,131 Train Loss: 0.0002141, Val Loss: 0.0002416 +2025-03-25 19:55:02,131 Epoch 1395/2000 +2025-03-25 19:56:08,979 Current Learning Rate: 0.0000015413 +2025-03-25 19:56:08,980 Train Loss: 0.0002141, Val Loss: 0.0002416 +2025-03-25 19:56:08,980 Epoch 1396/2000 +2025-03-25 19:57:14,134 Current Learning Rate: 0.0000009866 +2025-03-25 19:57:14,135 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 19:57:14,135 Epoch 1397/2000 +2025-03-25 19:58:21,324 Current Learning Rate: 0.0000005551 +2025-03-25 19:58:21,325 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 19:58:21,325 Epoch 1398/2000 +2025-03-25 19:59:27,338 Current Learning Rate: 0.0000002467 +2025-03-25 19:59:27,339 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 19:59:27,339 Epoch 1399/2000 +2025-03-25 20:00:33,708 Current Learning Rate: 0.0000000617 +2025-03-25 20:00:33,709 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 20:00:33,709 Epoch 1400/2000 +2025-03-25 20:01:39,511 Current Learning Rate: 0.0000000000 +2025-03-25 20:01:39,511 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 20:01:39,512 Epoch 1401/2000 +2025-03-25 20:02:45,624 Current Learning Rate: 0.0000000617 +2025-03-25 20:02:45,624 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 20:02:45,625 Epoch 1402/2000 +2025-03-25 20:03:51,750 Current Learning Rate: 0.0000002467 +2025-03-25 20:03:51,750 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 20:03:51,751 Epoch 1403/2000 +2025-03-25 20:04:57,853 Current Learning Rate: 0.0000005551 +2025-03-25 20:04:57,854 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 20:04:57,854 Epoch 1404/2000 +2025-03-25 20:06:04,394 Current Learning Rate: 0.0000009866 +2025-03-25 20:06:04,394 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 20:06:04,394 Epoch 1405/2000 +2025-03-25 20:07:11,064 Current Learning Rate: 0.0000015413 +2025-03-25 20:07:11,064 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 20:07:11,065 Epoch 1406/2000 +2025-03-25 20:08:17,004 Current Learning Rate: 0.0000022190 +2025-03-25 20:08:17,005 Train Loss: 0.0002140, Val Loss: 0.0002416 +2025-03-25 20:08:17,005 Epoch 1407/2000 +2025-03-25 20:09:23,327 Current Learning Rate: 0.0000030195 +2025-03-25 20:09:23,328 Train Loss: 0.0002141, Val Loss: 0.0002416 +2025-03-25 20:09:23,329 Epoch 1408/2000 +2025-03-25 20:10:29,075 Current Learning Rate: 0.0000039426 +2025-03-25 20:10:29,161 Train Loss: 0.0002141, Val Loss: 0.0002416 +2025-03-25 20:10:29,162 Epoch 1409/2000 +2025-03-25 20:11:35,821 Current Learning Rate: 0.0000049882 +2025-03-25 20:11:35,930 Train Loss: 0.0002142, Val Loss: 0.0002416 +2025-03-25 20:11:35,930 Epoch 1410/2000 +2025-03-25 20:12:41,444 Current Learning Rate: 0.0000061558 +2025-03-25 20:12:41,444 Train Loss: 0.0002142, Val Loss: 0.0002416 +2025-03-25 20:12:41,445 Epoch 1411/2000 +2025-03-25 20:13:47,703 Current Learning Rate: 0.0000074453 +2025-03-25 20:13:47,704 Train Loss: 0.0002143, Val Loss: 0.0002416 +2025-03-25 20:13:47,704 Epoch 1412/2000 +2025-03-25 20:14:53,736 Current Learning Rate: 0.0000088564 +2025-03-25 20:14:53,737 Train Loss: 0.0002143, Val Loss: 0.0002417 +2025-03-25 20:14:53,738 Epoch 1413/2000 +2025-03-25 20:15:59,856 Current Learning Rate: 0.0000103886 +2025-03-25 20:15:59,857 Train Loss: 0.0002144, Val Loss: 0.0002417 +2025-03-25 20:15:59,858 Epoch 1414/2000 +2025-03-25 20:17:05,901 Current Learning Rate: 0.0000120416 +2025-03-25 20:17:05,902 Train Loss: 0.0002145, Val Loss: 0.0002417 +2025-03-25 20:17:05,902 Epoch 1415/2000 +2025-03-25 20:18:11,974 Current Learning Rate: 0.0000138150 +2025-03-25 20:18:11,974 Train Loss: 0.0002145, Val Loss: 0.0002418 +2025-03-25 20:18:11,975 Epoch 1416/2000 +2025-03-25 20:19:17,985 Current Learning Rate: 0.0000157084 +2025-03-25 20:19:17,985 Train Loss: 0.0002146, Val Loss: 0.0002418 +2025-03-25 20:19:17,985 Epoch 1417/2000 +2025-03-25 20:20:23,764 Current Learning Rate: 0.0000177213 +2025-03-25 20:20:23,764 Train Loss: 0.0002147, Val Loss: 0.0002418 +2025-03-25 20:20:23,764 Epoch 1418/2000 +2025-03-25 20:21:29,837 Current Learning Rate: 0.0000198532 +2025-03-25 20:21:29,838 Train Loss: 0.0002147, Val Loss: 0.0002419 +2025-03-25 20:21:29,838 Epoch 1419/2000 +2025-03-25 20:22:35,435 Current Learning Rate: 0.0000221035 +2025-03-25 20:22:35,436 Train Loss: 0.0002148, Val Loss: 0.0002420 +2025-03-25 20:22:35,436 Epoch 1420/2000 +2025-03-25 20:23:42,228 Current Learning Rate: 0.0000244717 +2025-03-25 20:23:42,229 Train Loss: 0.0002149, Val Loss: 0.0002420 +2025-03-25 20:23:42,229 Epoch 1421/2000 +2025-03-25 20:24:48,644 Current Learning Rate: 0.0000269573 +2025-03-25 20:24:48,645 Train Loss: 0.0002150, Val Loss: 0.0002421 +2025-03-25 20:24:48,645 Epoch 1422/2000 +2025-03-25 20:25:55,205 Current Learning Rate: 0.0000295596 +2025-03-25 20:25:55,206 Train Loss: 0.0002151, Val Loss: 0.0002422 +2025-03-25 20:25:55,206 Epoch 1423/2000 +2025-03-25 20:27:01,342 Current Learning Rate: 0.0000322780 +2025-03-25 20:27:01,343 Train Loss: 0.0002152, Val Loss: 0.0002422 +2025-03-25 20:27:01,343 Epoch 1424/2000 +2025-03-25 20:28:07,867 Current Learning Rate: 0.0000351118 +2025-03-25 20:28:07,867 Train Loss: 0.0002153, Val Loss: 0.0002422 +2025-03-25 20:28:07,868 Epoch 1425/2000 +2025-03-25 20:29:14,044 Current Learning Rate: 0.0000380602 +2025-03-25 20:29:14,045 Train Loss: 0.0002154, Val Loss: 0.0002423 +2025-03-25 20:29:14,045 Epoch 1426/2000 +2025-03-25 20:30:19,723 Current Learning Rate: 0.0000411227 +2025-03-25 20:30:19,724 Train Loss: 0.0002155, Val Loss: 0.0002423 +2025-03-25 20:30:19,724 Epoch 1427/2000 +2025-03-25 20:31:26,651 Current Learning Rate: 0.0000442984 +2025-03-25 20:31:26,652 Train Loss: 0.0002156, Val Loss: 0.0002424 +2025-03-25 20:31:26,652 Epoch 1428/2000 +2025-03-25 20:32:33,065 Current Learning Rate: 0.0000475865 +2025-03-25 20:32:33,066 Train Loss: 0.0002157, Val Loss: 0.0002425 +2025-03-25 20:32:33,066 Epoch 1429/2000 +2025-03-25 20:33:39,450 Current Learning Rate: 0.0000509862 +2025-03-25 20:33:39,451 Train Loss: 0.0002158, Val Loss: 0.0002426 +2025-03-25 20:33:39,451 Epoch 1430/2000 +2025-03-25 20:34:45,235 Current Learning Rate: 0.0000544967 +2025-03-25 20:34:45,236 Train Loss: 0.0002159, Val Loss: 0.0002427 +2025-03-25 20:34:45,236 Epoch 1431/2000 +2025-03-25 20:35:52,029 Current Learning Rate: 0.0000581172 +2025-03-25 20:35:52,029 Train Loss: 0.0002161, Val Loss: 0.0002428 +2025-03-25 20:35:52,030 Epoch 1432/2000 +2025-03-25 20:36:58,094 Current Learning Rate: 0.0000618467 +2025-03-25 20:36:58,095 Train Loss: 0.0002162, Val Loss: 0.0002429 +2025-03-25 20:36:58,095 Epoch 1433/2000 +2025-03-25 20:38:04,255 Current Learning Rate: 0.0000656842 +2025-03-25 20:38:04,256 Train Loss: 0.0002163, Val Loss: 0.0002429 +2025-03-25 20:38:04,256 Epoch 1434/2000 +2025-03-25 20:39:10,518 Current Learning Rate: 0.0000696290 +2025-03-25 20:39:10,518 Train Loss: 0.0002164, Val Loss: 0.0002430 +2025-03-25 20:39:10,519 Epoch 1435/2000 +2025-03-25 20:40:16,673 Current Learning Rate: 0.0000736799 +2025-03-25 20:40:16,674 Train Loss: 0.0002165, Val Loss: 0.0002431 +2025-03-25 20:40:16,674 Epoch 1436/2000 +2025-03-25 20:41:23,035 Current Learning Rate: 0.0000778360 +2025-03-25 20:41:23,036 Train Loss: 0.0002167, Val Loss: 0.0002432 +2025-03-25 20:41:23,036 Epoch 1437/2000 +2025-03-25 20:42:29,007 Current Learning Rate: 0.0000820963 +2025-03-25 20:42:29,008 Train Loss: 0.0002168, Val Loss: 0.0002434 +2025-03-25 20:42:29,008 Epoch 1438/2000 +2025-03-25 20:43:35,117 Current Learning Rate: 0.0000864597 +2025-03-25 20:43:35,117 Train Loss: 0.0002169, Val Loss: 0.0002436 +2025-03-25 20:43:35,118 Epoch 1439/2000 +2025-03-25 20:44:41,304 Current Learning Rate: 0.0000909251 +2025-03-25 20:44:41,305 Train Loss: 0.0002171, Val Loss: 0.0002438 +2025-03-25 20:44:41,305 Epoch 1440/2000 +2025-03-25 20:45:47,261 Current Learning Rate: 0.0000954915 +2025-03-25 20:45:47,262 Train Loss: 0.0002172, Val Loss: 0.0002440 +2025-03-25 20:45:47,262 Epoch 1441/2000 +2025-03-25 20:46:53,069 Current Learning Rate: 0.0001001577 +2025-03-25 20:46:53,069 Train Loss: 0.0002174, Val Loss: 0.0002442 +2025-03-25 20:46:53,070 Epoch 1442/2000 +2025-03-25 20:47:58,554 Current Learning Rate: 0.0001049225 +2025-03-25 20:47:58,554 Train Loss: 0.0002175, Val Loss: 0.0002444 +2025-03-25 20:47:58,555 Epoch 1443/2000 +2025-03-25 20:49:04,202 Current Learning Rate: 0.0001097848 +2025-03-25 20:49:04,203 Train Loss: 0.0002176, Val Loss: 0.0002447 +2025-03-25 20:49:04,203 Epoch 1444/2000 +2025-03-25 20:50:09,951 Current Learning Rate: 0.0001147434 +2025-03-25 20:50:09,952 Train Loss: 0.0002178, Val Loss: 0.0002449 +2025-03-25 20:50:09,952 Epoch 1445/2000 +2025-03-25 20:51:15,532 Current Learning Rate: 0.0001197970 +2025-03-25 20:51:15,533 Train Loss: 0.0002179, Val Loss: 0.0002450 +2025-03-25 20:51:15,533 Epoch 1446/2000 +2025-03-25 20:52:20,978 Current Learning Rate: 0.0001249445 +2025-03-25 20:52:20,978 Train Loss: 0.0002181, Val Loss: 0.0002451 +2025-03-25 20:52:20,979 Epoch 1447/2000 +2025-03-25 20:53:26,384 Current Learning Rate: 0.0001301845 +2025-03-25 20:53:26,384 Train Loss: 0.0002182, Val Loss: 0.0002451 +2025-03-25 20:53:26,385 Epoch 1448/2000 +2025-03-25 20:54:31,768 Current Learning Rate: 0.0001355157 +2025-03-25 20:54:31,768 Train Loss: 0.0002184, Val Loss: 0.0002452 +2025-03-25 20:54:31,769 Epoch 1449/2000 +2025-03-25 20:55:37,979 Current Learning Rate: 0.0001409369 +2025-03-25 20:55:37,980 Train Loss: 0.0002186, Val Loss: 0.0002453 +2025-03-25 20:55:37,980 Epoch 1450/2000 +2025-03-25 20:56:43,439 Current Learning Rate: 0.0001464466 +2025-03-25 20:56:43,439 Train Loss: 0.0002187, Val Loss: 0.0002453 +2025-03-25 20:56:43,440 Epoch 1451/2000 +2025-03-25 20:57:49,170 Current Learning Rate: 0.0001520436 +2025-03-25 20:57:49,170 Train Loss: 0.0002189, Val Loss: 0.0002454 +2025-03-25 20:57:49,171 Epoch 1452/2000 +2025-03-25 20:58:54,398 Current Learning Rate: 0.0001577264 +2025-03-25 20:58:54,399 Train Loss: 0.0002191, Val Loss: 0.0002455 +2025-03-25 20:58:54,399 Epoch 1453/2000 +2025-03-25 20:59:59,578 Current Learning Rate: 0.0001634937 +2025-03-25 20:59:59,578 Train Loss: 0.0002192, Val Loss: 0.0002456 +2025-03-25 20:59:59,578 Epoch 1454/2000 +2025-03-25 21:01:05,117 Current Learning Rate: 0.0001693441 +2025-03-25 21:01:05,117 Train Loss: 0.0002194, Val Loss: 0.0002457 +2025-03-25 21:01:05,117 Epoch 1455/2000 +2025-03-25 21:02:10,119 Current Learning Rate: 0.0001752760 +2025-03-25 21:02:10,120 Train Loss: 0.0002196, Val Loss: 0.0002458 +2025-03-25 21:02:10,120 Epoch 1456/2000 +2025-03-25 21:03:15,521 Current Learning Rate: 0.0001812880 +2025-03-25 21:03:15,522 Train Loss: 0.0002197, Val Loss: 0.0002459 +2025-03-25 21:03:15,522 Epoch 1457/2000 +2025-03-25 21:04:21,029 Current Learning Rate: 0.0001873787 +2025-03-25 21:04:21,030 Train Loss: 0.0002199, Val Loss: 0.0002460 +2025-03-25 21:04:21,030 Epoch 1458/2000 +2025-03-25 21:05:27,813 Current Learning Rate: 0.0001935465 +2025-03-25 21:05:27,813 Train Loss: 0.0002201, Val Loss: 0.0002462 +2025-03-25 21:05:27,814 Epoch 1459/2000 +2025-03-25 21:06:33,253 Current Learning Rate: 0.0001997899 +2025-03-25 21:06:33,253 Train Loss: 0.0002203, Val Loss: 0.0002463 +2025-03-25 21:06:33,254 Epoch 1460/2000 +2025-03-25 21:07:39,286 Current Learning Rate: 0.0002061074 +2025-03-25 21:07:39,287 Train Loss: 0.0002204, Val Loss: 0.0002465 +2025-03-25 21:07:39,287 Epoch 1461/2000 +2025-03-25 21:08:44,421 Current Learning Rate: 0.0002124974 +2025-03-25 21:08:44,422 Train Loss: 0.0002206, Val Loss: 0.0002466 +2025-03-25 21:08:44,422 Epoch 1462/2000 +2025-03-25 21:09:50,075 Current Learning Rate: 0.0002189583 +2025-03-25 21:09:50,075 Train Loss: 0.0002208, Val Loss: 0.0002468 +2025-03-25 21:09:50,076 Epoch 1463/2000 +2025-03-25 21:10:55,969 Current Learning Rate: 0.0002254886 +2025-03-25 21:10:55,969 Train Loss: 0.0002210, Val Loss: 0.0002470 +2025-03-25 21:10:55,969 Epoch 1464/2000 +2025-03-25 21:12:00,920 Current Learning Rate: 0.0002320866 +2025-03-25 21:12:00,921 Train Loss: 0.0002212, Val Loss: 0.0002472 +2025-03-25 21:12:00,921 Epoch 1465/2000 +2025-03-25 21:13:06,389 Current Learning Rate: 0.0002387507 +2025-03-25 21:13:06,390 Train Loss: 0.0002214, Val Loss: 0.0002474 +2025-03-25 21:13:06,390 Epoch 1466/2000 +2025-03-25 21:14:11,795 Current Learning Rate: 0.0002454793 +2025-03-25 21:14:11,796 Train Loss: 0.0002216, Val Loss: 0.0002476 +2025-03-25 21:14:11,796 Epoch 1467/2000 +2025-03-25 21:15:17,313 Current Learning Rate: 0.0002522707 +2025-03-25 21:15:17,314 Train Loss: 0.0002218, Val Loss: 0.0002478 +2025-03-25 21:15:17,314 Epoch 1468/2000 +2025-03-25 21:16:22,744 Current Learning Rate: 0.0002591232 +2025-03-25 21:16:22,745 Train Loss: 0.0002220, Val Loss: 0.0002480 +2025-03-25 21:16:22,745 Epoch 1469/2000 +2025-03-25 21:17:28,903 Current Learning Rate: 0.0002660351 +2025-03-25 21:17:28,904 Train Loss: 0.0002222, Val Loss: 0.0002483 +2025-03-25 21:17:28,904 Epoch 1470/2000 +2025-03-25 21:18:34,598 Current Learning Rate: 0.0002730048 +2025-03-25 21:18:34,599 Train Loss: 0.0002224, Val Loss: 0.0002485 +2025-03-25 21:18:34,599 Epoch 1471/2000 +2025-03-25 21:19:40,424 Current Learning Rate: 0.0002800304 +2025-03-25 21:19:40,424 Train Loss: 0.0002226, Val Loss: 0.0002488 +2025-03-25 21:19:40,425 Epoch 1472/2000 +2025-03-25 21:20:45,930 Current Learning Rate: 0.0002871104 +2025-03-25 21:20:45,931 Train Loss: 0.0002228, Val Loss: 0.0002490 +2025-03-25 21:20:45,931 Epoch 1473/2000 +2025-03-25 21:21:51,836 Current Learning Rate: 0.0002942428 +2025-03-25 21:21:51,837 Train Loss: 0.0002230, Val Loss: 0.0002492 +2025-03-25 21:21:51,837 Epoch 1474/2000 +2025-03-25 21:22:56,926 Current Learning Rate: 0.0003014261 +2025-03-25 21:22:56,927 Train Loss: 0.0002232, Val Loss: 0.0002495 +2025-03-25 21:22:56,927 Epoch 1475/2000 +2025-03-25 21:24:02,689 Current Learning Rate: 0.0003086583 +2025-03-25 21:24:02,689 Train Loss: 0.0002234, Val Loss: 0.0002497 +2025-03-25 21:24:02,690 Epoch 1476/2000 +2025-03-25 21:25:08,361 Current Learning Rate: 0.0003159377 +2025-03-25 21:25:08,362 Train Loss: 0.0002236, Val Loss: 0.0002498 +2025-03-25 21:25:08,362 Epoch 1477/2000 +2025-03-25 21:26:13,967 Current Learning Rate: 0.0003232626 +2025-03-25 21:26:13,968 Train Loss: 0.0002238, Val Loss: 0.0002500 +2025-03-25 21:26:13,968 Epoch 1478/2000 +2025-03-25 21:27:20,187 Current Learning Rate: 0.0003306310 +2025-03-25 21:27:20,188 Train Loss: 0.0002240, Val Loss: 0.0002502 +2025-03-25 21:27:20,188 Epoch 1479/2000 +2025-03-25 21:28:25,696 Current Learning Rate: 0.0003380413 +2025-03-25 21:28:25,696 Train Loss: 0.0002242, Val Loss: 0.0002504 +2025-03-25 21:28:25,697 Epoch 1480/2000 +2025-03-25 21:29:31,064 Current Learning Rate: 0.0003454915 +2025-03-25 21:29:31,064 Train Loss: 0.0002245, Val Loss: 0.0002506 +2025-03-25 21:29:31,064 Epoch 1481/2000 +2025-03-25 21:30:36,411 Current Learning Rate: 0.0003529798 +2025-03-25 21:30:36,415 Train Loss: 0.0002247, Val Loss: 0.0002508 +2025-03-25 21:30:36,415 Epoch 1482/2000 +2025-03-25 21:31:42,260 Current Learning Rate: 0.0003605044 +2025-03-25 21:31:42,260 Train Loss: 0.0002249, Val Loss: 0.0002510 +2025-03-25 21:31:42,260 Epoch 1483/2000 +2025-03-25 21:32:47,976 Current Learning Rate: 0.0003680635 +2025-03-25 21:32:47,976 Train Loss: 0.0002251, Val Loss: 0.0002513 +2025-03-25 21:32:47,977 Epoch 1484/2000 +2025-03-25 21:33:53,696 Current Learning Rate: 0.0003756551 +2025-03-25 21:33:53,697 Train Loss: 0.0002253, Val Loss: 0.0002516 +2025-03-25 21:33:53,697 Epoch 1485/2000 +2025-03-25 21:34:59,734 Current Learning Rate: 0.0003832773 +2025-03-25 21:34:59,734 Train Loss: 0.0002255, Val Loss: 0.0002519 +2025-03-25 21:34:59,735 Epoch 1486/2000 +2025-03-25 21:36:05,724 Current Learning Rate: 0.0003909284 +2025-03-25 21:36:05,725 Train Loss: 0.0002258, Val Loss: 0.0002522 +2025-03-25 21:36:05,725 Epoch 1487/2000 +2025-03-25 21:37:11,444 Current Learning Rate: 0.0003986064 +2025-03-25 21:37:11,445 Train Loss: 0.0002260, Val Loss: 0.0002525 +2025-03-25 21:37:11,445 Epoch 1488/2000 +2025-03-25 21:38:16,445 Current Learning Rate: 0.0004063093 +2025-03-25 21:38:16,446 Train Loss: 0.0002262, Val Loss: 0.0002529 +2025-03-25 21:38:16,446 Epoch 1489/2000 +2025-03-25 21:39:21,575 Current Learning Rate: 0.0004140354 +2025-03-25 21:39:21,575 Train Loss: 0.0002264, Val Loss: 0.0002532 +2025-03-25 21:39:21,576 Epoch 1490/2000 +2025-03-25 21:40:26,787 Current Learning Rate: 0.0004217828 +2025-03-25 21:40:26,788 Train Loss: 0.0002266, Val Loss: 0.0002536 +2025-03-25 21:40:26,788 Epoch 1491/2000 +2025-03-25 21:41:32,123 Current Learning Rate: 0.0004295494 +2025-03-25 21:41:32,123 Train Loss: 0.0002268, Val Loss: 0.0002540 +2025-03-25 21:41:32,123 Epoch 1492/2000 +2025-03-25 21:42:37,450 Current Learning Rate: 0.0004373334 +2025-03-25 21:42:37,451 Train Loss: 0.0002270, Val Loss: 0.0002544 +2025-03-25 21:42:37,451 Epoch 1493/2000 +2025-03-25 21:43:42,357 Current Learning Rate: 0.0004451328 +2025-03-25 21:43:42,357 Train Loss: 0.0002272, Val Loss: 0.0002549 +2025-03-25 21:43:42,357 Epoch 1494/2000 +2025-03-25 21:44:48,461 Current Learning Rate: 0.0004529458 +2025-03-25 21:44:48,461 Train Loss: 0.0002275, Val Loss: 0.0002555 +2025-03-25 21:44:48,462 Epoch 1495/2000 +2025-03-25 21:45:53,652 Current Learning Rate: 0.0004607705 +2025-03-25 21:45:53,652 Train Loss: 0.0002277, Val Loss: 0.0002560 +2025-03-25 21:45:53,653 Epoch 1496/2000 +2025-03-25 21:46:58,887 Current Learning Rate: 0.0004686047 +2025-03-25 21:46:58,888 Train Loss: 0.0002279, Val Loss: 0.0002566 +2025-03-25 21:46:58,888 Epoch 1497/2000 +2025-03-25 21:48:04,457 Current Learning Rate: 0.0004764468 +2025-03-25 21:48:04,458 Train Loss: 0.0002281, Val Loss: 0.0002569 +2025-03-25 21:48:04,458 Epoch 1498/2000 +2025-03-25 21:49:10,132 Current Learning Rate: 0.0004842946 +2025-03-25 21:49:10,133 Train Loss: 0.0002283, Val Loss: 0.0002570 +2025-03-25 21:49:10,133 Epoch 1499/2000 +2025-03-25 21:50:15,566 Current Learning Rate: 0.0004921463 +2025-03-25 21:50:15,567 Train Loss: 0.0002285, Val Loss: 0.0002569 +2025-03-25 21:50:15,567 Epoch 1500/2000 +2025-03-25 21:51:21,378 Current Learning Rate: 0.0005000000 +2025-03-25 21:51:21,379 Train Loss: 0.0002288, Val Loss: 0.0002568 +2025-03-25 21:51:21,379 Epoch 1501/2000 +2025-03-25 21:52:26,580 Current Learning Rate: 0.0005078537 +2025-03-25 21:52:26,580 Train Loss: 0.0002290, Val Loss: 0.0002569 +2025-03-25 21:52:26,581 Epoch 1502/2000 +2025-03-25 21:53:32,206 Current Learning Rate: 0.0005157054 +2025-03-25 21:53:32,206 Train Loss: 0.0002292, Val Loss: 0.0002571 +2025-03-25 21:53:32,207 Epoch 1503/2000 +2025-03-25 21:54:37,900 Current Learning Rate: 0.0005235532 +2025-03-25 21:54:37,900 Train Loss: 0.0002294, Val Loss: 0.0002574 +2025-03-25 21:54:37,900 Epoch 1504/2000 +2025-03-25 21:55:43,250 Current Learning Rate: 0.0005313953 +2025-03-25 21:55:43,250 Train Loss: 0.0002297, Val Loss: 0.0002578 +2025-03-25 21:55:43,250 Epoch 1505/2000 +2025-03-25 21:56:49,060 Current Learning Rate: 0.0005392295 +2025-03-25 21:56:49,061 Train Loss: 0.0002299, Val Loss: 0.0002582 +2025-03-25 21:56:49,061 Epoch 1506/2000 +2025-03-25 21:57:53,921 Current Learning Rate: 0.0005470542 +2025-03-25 21:57:53,921 Train Loss: 0.0002301, Val Loss: 0.0002587 +2025-03-25 21:57:53,921 Epoch 1507/2000 +2025-03-25 21:58:59,647 Current Learning Rate: 0.0005548672 +2025-03-25 21:58:59,647 Train Loss: 0.0002304, Val Loss: 0.0002592 +2025-03-25 21:58:59,647 Epoch 1508/2000 +2025-03-25 22:00:05,490 Current Learning Rate: 0.0005626666 +2025-03-25 22:00:05,490 Train Loss: 0.0002308, Val Loss: 0.0002595 +2025-03-25 22:00:05,490 Epoch 1509/2000 +2025-03-25 22:01:11,461 Current Learning Rate: 0.0005704506 +2025-03-25 22:01:11,461 Train Loss: 0.0002313, Val Loss: 0.0002630 +2025-03-25 22:01:11,461 Epoch 1510/2000 +2025-03-25 22:02:17,149 Current Learning Rate: 0.0005782172 +2025-03-25 22:02:17,149 Train Loss: 0.0002319, Val Loss: 0.0002625 +2025-03-25 22:02:17,150 Epoch 1511/2000 +2025-03-25 22:03:22,777 Current Learning Rate: 0.0005859646 +2025-03-25 22:03:22,777 Train Loss: 0.0002322, Val Loss: 0.0002608 +2025-03-25 22:03:22,780 Epoch 1512/2000 +2025-03-25 22:04:28,028 Current Learning Rate: 0.0005936907 +2025-03-25 22:04:28,029 Train Loss: 0.0002325, Val Loss: 0.0002614 +2025-03-25 22:04:28,029 Epoch 1513/2000 +2025-03-25 22:05:34,190 Current Learning Rate: 0.0006013936 +2025-03-25 22:05:34,190 Train Loss: 0.0002328, Val Loss: 0.0002626 +2025-03-25 22:05:34,190 Epoch 1514/2000 +2025-03-25 22:06:39,827 Current Learning Rate: 0.0006090716 +2025-03-25 22:06:39,828 Train Loss: 0.0002328, Val Loss: 0.0002621 +2025-03-25 22:06:39,828 Epoch 1515/2000 +2025-03-25 22:07:45,402 Current Learning Rate: 0.0006167227 +2025-03-25 22:07:45,403 Train Loss: 0.0002326, Val Loss: 0.0002610 +2025-03-25 22:07:45,403 Epoch 1516/2000 +2025-03-25 22:08:50,941 Current Learning Rate: 0.0006243449 +2025-03-25 22:08:50,942 Train Loss: 0.0002321, Val Loss: 0.0002612 +2025-03-25 22:08:50,942 Epoch 1517/2000 +2025-03-25 22:09:56,661 Current Learning Rate: 0.0006319365 +2025-03-25 22:09:56,662 Train Loss: 0.0002319, Val Loss: 0.0002615 +2025-03-25 22:09:56,662 Epoch 1518/2000 +2025-03-25 22:11:02,347 Current Learning Rate: 0.0006394956 +2025-03-25 22:11:02,347 Train Loss: 0.0002322, Val Loss: 0.0002623 +2025-03-25 22:11:02,348 Epoch 1519/2000 +2025-03-25 22:12:08,221 Current Learning Rate: 0.0006470202 +2025-03-25 22:12:08,222 Train Loss: 0.0002325, Val Loss: 0.0002629 +2025-03-25 22:12:08,222 Epoch 1520/2000 +2025-03-25 22:13:13,916 Current Learning Rate: 0.0006545085 +2025-03-25 22:13:13,917 Train Loss: 0.0002328, Val Loss: 0.0002636 +2025-03-25 22:13:13,917 Epoch 1521/2000 +2025-03-25 22:14:19,629 Current Learning Rate: 0.0006619587 +2025-03-25 22:14:19,630 Train Loss: 0.0002331, Val Loss: 0.0002642 +2025-03-25 22:14:19,630 Epoch 1522/2000 +2025-03-25 22:15:25,114 Current Learning Rate: 0.0006693690 +2025-03-25 22:15:25,115 Train Loss: 0.0002334, Val Loss: 0.0002647 +2025-03-25 22:15:25,116 Epoch 1523/2000 +2025-03-25 22:16:30,403 Current Learning Rate: 0.0006767374 +2025-03-25 22:16:30,404 Train Loss: 0.0002337, Val Loss: 0.0002649 +2025-03-25 22:16:30,404 Epoch 1524/2000 +2025-03-25 22:17:36,607 Current Learning Rate: 0.0006840623 +2025-03-25 22:17:36,607 Train Loss: 0.0002343, Val Loss: 0.0002650 +2025-03-25 22:17:36,608 Epoch 1525/2000 +2025-03-25 22:18:42,098 Current Learning Rate: 0.0006913417 +2025-03-25 22:18:42,099 Train Loss: 0.0002354, Val Loss: 0.0002660 +2025-03-25 22:18:42,099 Epoch 1526/2000 +2025-03-25 22:19:47,557 Current Learning Rate: 0.0006985739 +2025-03-25 22:19:47,557 Train Loss: 0.0002363, Val Loss: 0.0002643 +2025-03-25 22:19:47,558 Epoch 1527/2000 +2025-03-25 22:20:52,993 Current Learning Rate: 0.0007057572 +2025-03-25 22:20:52,994 Train Loss: 0.0002360, Val Loss: 0.0002649 +2025-03-25 22:20:52,994 Epoch 1528/2000 +2025-03-25 22:21:58,174 Current Learning Rate: 0.0007128896 +2025-03-25 22:21:58,175 Train Loss: 0.0002355, Val Loss: 0.0002654 +2025-03-25 22:21:58,175 Epoch 1529/2000 +2025-03-25 22:23:03,949 Current Learning Rate: 0.0007199696 +2025-03-25 22:23:03,950 Train Loss: 0.0002350, Val Loss: 0.0002650 +2025-03-25 22:23:03,950 Epoch 1530/2000 +2025-03-25 22:24:09,421 Current Learning Rate: 0.0007269952 +2025-03-25 22:24:09,421 Train Loss: 0.0002346, Val Loss: 0.0002656 +2025-03-25 22:24:09,421 Epoch 1531/2000 +2025-03-25 22:25:15,246 Current Learning Rate: 0.0007339649 +2025-03-25 22:25:15,246 Train Loss: 0.0002346, Val Loss: 0.0002665 +2025-03-25 22:25:15,247 Epoch 1532/2000 +2025-03-25 22:26:21,295 Current Learning Rate: 0.0007408768 +2025-03-25 22:26:21,296 Train Loss: 0.0002348, Val Loss: 0.0002673 +2025-03-25 22:26:21,296 Epoch 1533/2000 +2025-03-25 22:27:26,883 Current Learning Rate: 0.0007477293 +2025-03-25 22:27:26,884 Train Loss: 0.0002352, Val Loss: 0.0002682 +2025-03-25 22:27:26,884 Epoch 1534/2000 +2025-03-25 22:28:33,356 Current Learning Rate: 0.0007545207 +2025-03-25 22:28:33,356 Train Loss: 0.0002356, Val Loss: 0.0002691 +2025-03-25 22:28:33,357 Epoch 1535/2000 +2025-03-25 22:29:39,047 Current Learning Rate: 0.0007612493 +2025-03-25 22:29:39,048 Train Loss: 0.0002361, Val Loss: 0.0002702 +2025-03-25 22:29:39,048 Epoch 1536/2000 +2025-03-25 22:30:44,865 Current Learning Rate: 0.0007679134 +2025-03-25 22:30:44,865 Train Loss: 0.0002367, Val Loss: 0.0002707 +2025-03-25 22:30:44,866 Epoch 1537/2000 +2025-03-25 22:31:50,091 Current Learning Rate: 0.0007745114 +2025-03-25 22:31:50,092 Train Loss: 0.0002375, Val Loss: 0.0002701 +2025-03-25 22:31:50,093 Epoch 1538/2000 +2025-03-25 22:32:55,649 Current Learning Rate: 0.0007810417 +2025-03-25 22:32:55,650 Train Loss: 0.0002383, Val Loss: 0.0002702 +2025-03-25 22:32:55,650 Epoch 1539/2000 +2025-03-25 22:34:00,981 Current Learning Rate: 0.0007875026 +2025-03-25 22:34:00,981 Train Loss: 0.0002391, Val Loss: 0.0002697 +2025-03-25 22:34:00,982 Epoch 1540/2000 +2025-03-25 22:35:06,255 Current Learning Rate: 0.0007938926 +2025-03-25 22:35:06,256 Train Loss: 0.0002391, Val Loss: 0.0002689 +2025-03-25 22:35:06,256 Epoch 1541/2000 +2025-03-25 22:36:11,391 Current Learning Rate: 0.0008002101 +2025-03-25 22:36:11,392 Train Loss: 0.0002386, Val Loss: 0.0002682 +2025-03-25 22:36:11,392 Epoch 1542/2000 +2025-03-25 22:37:16,699 Current Learning Rate: 0.0008064535 +2025-03-25 22:37:16,699 Train Loss: 0.0002375, Val Loss: 0.0002684 +2025-03-25 22:37:16,700 Epoch 1543/2000 +2025-03-25 22:38:22,042 Current Learning Rate: 0.0008126213 +2025-03-25 22:38:22,043 Train Loss: 0.0002366, Val Loss: 0.0002687 +2025-03-25 22:38:22,043 Epoch 1544/2000 +2025-03-25 22:39:28,181 Current Learning Rate: 0.0008187120 +2025-03-25 22:39:28,181 Train Loss: 0.0002366, Val Loss: 0.0002696 +2025-03-25 22:39:28,181 Epoch 1545/2000 +2025-03-25 22:40:33,713 Current Learning Rate: 0.0008247240 +2025-03-25 22:40:33,714 Train Loss: 0.0002370, Val Loss: 0.0002706 +2025-03-25 22:40:33,714 Epoch 1546/2000 +2025-03-25 22:41:39,531 Current Learning Rate: 0.0008306559 +2025-03-25 22:41:39,532 Train Loss: 0.0002374, Val Loss: 0.0002713 +2025-03-25 22:41:39,532 Epoch 1547/2000 +2025-03-25 22:42:45,533 Current Learning Rate: 0.0008365063 +2025-03-25 22:42:45,533 Train Loss: 0.0002379, Val Loss: 0.0002720 +2025-03-25 22:42:45,534 Epoch 1548/2000 +2025-03-25 22:43:51,300 Current Learning Rate: 0.0008422736 +2025-03-25 22:43:51,300 Train Loss: 0.0002387, Val Loss: 0.0002732 +2025-03-25 22:43:51,300 Epoch 1549/2000 +2025-03-25 22:44:56,950 Current Learning Rate: 0.0008479564 +2025-03-25 22:44:56,950 Train Loss: 0.0002395, Val Loss: 0.0002743 +2025-03-25 22:44:56,950 Epoch 1550/2000 +2025-03-25 22:46:02,876 Current Learning Rate: 0.0008535534 +2025-03-25 22:46:02,877 Train Loss: 0.0002399, Val Loss: 0.0002736 +2025-03-25 22:46:02,877 Epoch 1551/2000 +2025-03-25 22:47:08,657 Current Learning Rate: 0.0008590631 +2025-03-25 22:47:08,658 Train Loss: 0.0002402, Val Loss: 0.0002746 +2025-03-25 22:47:08,658 Epoch 1552/2000 +2025-03-25 22:48:13,996 Current Learning Rate: 0.0008644843 +2025-03-25 22:48:13,997 Train Loss: 0.0002408, Val Loss: 0.0002739 +2025-03-25 22:48:13,998 Epoch 1553/2000 +2025-03-25 22:49:19,279 Current Learning Rate: 0.0008698155 +2025-03-25 22:49:19,280 Train Loss: 0.0002418, Val Loss: 0.0002757 +2025-03-25 22:49:19,280 Epoch 1554/2000 +2025-03-25 22:50:24,795 Current Learning Rate: 0.0008750555 +2025-03-25 22:50:24,796 Train Loss: 0.0002416, Val Loss: 0.0002745 +2025-03-25 22:50:24,796 Epoch 1555/2000 +2025-03-25 22:51:31,222 Current Learning Rate: 0.0008802030 +2025-03-25 22:51:31,223 Train Loss: 0.0002411, Val Loss: 0.0002726 +2025-03-25 22:51:31,223 Epoch 1556/2000 +2025-03-25 22:52:36,854 Current Learning Rate: 0.0008852566 +2025-03-25 22:52:36,854 Train Loss: 0.0002397, Val Loss: 0.0002710 +2025-03-25 22:52:36,854 Epoch 1557/2000 +2025-03-25 22:53:42,308 Current Learning Rate: 0.0008902152 +2025-03-25 22:53:42,309 Train Loss: 0.0002384, Val Loss: 0.0002712 +2025-03-25 22:53:42,310 Epoch 1558/2000 +2025-03-25 22:54:47,945 Current Learning Rate: 0.0008950775 +2025-03-25 22:54:47,945 Train Loss: 0.0002384, Val Loss: 0.0002724 +2025-03-25 22:54:47,946 Epoch 1559/2000 +2025-03-25 22:55:53,872 Current Learning Rate: 0.0008998423 +2025-03-25 22:55:53,873 Train Loss: 0.0002388, Val Loss: 0.0002731 +2025-03-25 22:55:53,873 Epoch 1560/2000 +2025-03-25 22:57:00,126 Current Learning Rate: 0.0009045085 +2025-03-25 22:57:00,127 Train Loss: 0.0002394, Val Loss: 0.0002740 +2025-03-25 22:57:00,128 Epoch 1561/2000 +2025-03-25 22:58:05,671 Current Learning Rate: 0.0009090749 +2025-03-25 22:58:05,672 Train Loss: 0.0002403, Val Loss: 0.0002750 +2025-03-25 22:58:05,672 Epoch 1562/2000 +2025-03-25 22:59:11,410 Current Learning Rate: 0.0009135403 +2025-03-25 22:59:11,411 Train Loss: 0.0002410, Val Loss: 0.0002759 +2025-03-25 22:59:11,411 Epoch 1563/2000 +2025-03-25 23:00:17,585 Current Learning Rate: 0.0009179037 +2025-03-25 23:00:17,586 Train Loss: 0.0002413, Val Loss: 0.0002762 +2025-03-25 23:00:17,586 Epoch 1564/2000 +2025-03-25 23:01:23,527 Current Learning Rate: 0.0009221640 +2025-03-25 23:01:23,528 Train Loss: 0.0002414, Val Loss: 0.0002760 +2025-03-25 23:01:23,528 Epoch 1565/2000 +2025-03-25 23:02:28,958 Current Learning Rate: 0.0009263201 +2025-03-25 23:02:28,959 Train Loss: 0.0002419, Val Loss: 0.0002773 +2025-03-25 23:02:28,959 Epoch 1566/2000 +2025-03-25 23:03:34,972 Current Learning Rate: 0.0009303710 +2025-03-25 23:03:34,972 Train Loss: 0.0002425, Val Loss: 0.0002771 +2025-03-25 23:03:34,973 Epoch 1567/2000 +2025-03-25 23:04:40,893 Current Learning Rate: 0.0009343158 +2025-03-25 23:04:40,893 Train Loss: 0.0002428, Val Loss: 0.0002775 +2025-03-25 23:04:40,895 Epoch 1568/2000 +2025-03-25 23:05:46,483 Current Learning Rate: 0.0009381533 +2025-03-25 23:05:46,484 Train Loss: 0.0002438, Val Loss: 0.0002802 +2025-03-25 23:05:46,485 Epoch 1569/2000 +2025-03-25 23:06:51,990 Current Learning Rate: 0.0009418828 +2025-03-25 23:06:51,990 Train Loss: 0.0002436, Val Loss: 0.0002788 +2025-03-25 23:06:51,991 Epoch 1570/2000 +2025-03-25 23:07:58,161 Current Learning Rate: 0.0009455033 +2025-03-25 23:07:58,162 Train Loss: 0.0002431, Val Loss: 0.0002770 +2025-03-25 23:07:58,162 Epoch 1571/2000 +2025-03-25 23:09:03,958 Current Learning Rate: 0.0009490138 +2025-03-25 23:09:03,959 Train Loss: 0.0002420, Val Loss: 0.0002744 +2025-03-25 23:09:03,959 Epoch 1572/2000 +2025-03-25 23:10:09,260 Current Learning Rate: 0.0009524135 +2025-03-25 23:10:09,261 Train Loss: 0.0002401, Val Loss: 0.0002735 +2025-03-25 23:10:09,261 Epoch 1573/2000 +2025-03-25 23:11:14,739 Current Learning Rate: 0.0009557016 +2025-03-25 23:11:14,740 Train Loss: 0.0002396, Val Loss: 0.0002745 +2025-03-25 23:11:14,740 Epoch 1574/2000 +2025-03-25 23:12:20,235 Current Learning Rate: 0.0009588773 +2025-03-25 23:12:20,235 Train Loss: 0.0002401, Val Loss: 0.0002754 +2025-03-25 23:12:20,235 Epoch 1575/2000 +2025-03-25 23:13:26,268 Current Learning Rate: 0.0009619398 +2025-03-25 23:13:26,269 Train Loss: 0.0002409, Val Loss: 0.0002770 +2025-03-25 23:13:26,269 Epoch 1576/2000 +2025-03-25 23:14:31,210 Current Learning Rate: 0.0009648882 +2025-03-25 23:14:31,212 Train Loss: 0.0002419, Val Loss: 0.0002766 +2025-03-25 23:14:31,212 Epoch 1577/2000 +2025-03-25 23:15:37,525 Current Learning Rate: 0.0009677220 +2025-03-25 23:15:37,525 Train Loss: 0.0002421, Val Loss: 0.0002765 +2025-03-25 23:15:37,525 Epoch 1578/2000 +2025-03-25 23:16:43,533 Current Learning Rate: 0.0009704404 +2025-03-25 23:16:43,533 Train Loss: 0.0002423, Val Loss: 0.0002783 +2025-03-25 23:16:43,534 Epoch 1579/2000 +2025-03-25 23:17:49,232 Current Learning Rate: 0.0009730427 +2025-03-25 23:17:49,233 Train Loss: 0.0002425, Val Loss: 0.0002784 +2025-03-25 23:17:49,234 Epoch 1580/2000 +2025-03-25 23:18:55,242 Current Learning Rate: 0.0009755283 +2025-03-25 23:18:55,243 Train Loss: 0.0002427, Val Loss: 0.0002787 +2025-03-25 23:18:55,243 Epoch 1581/2000 +2025-03-25 23:20:00,904 Current Learning Rate: 0.0009778965 +2025-03-25 23:20:00,905 Train Loss: 0.0002428, Val Loss: 0.0002780 +2025-03-25 23:20:00,905 Epoch 1582/2000 +2025-03-25 23:21:06,722 Current Learning Rate: 0.0009801468 +2025-03-25 23:21:06,723 Train Loss: 0.0002430, Val Loss: 0.0002782 +2025-03-25 23:21:06,723 Epoch 1583/2000 +2025-03-25 23:22:12,111 Current Learning Rate: 0.0009822787 +2025-03-25 23:22:12,111 Train Loss: 0.0002439, Val Loss: 0.0002795 +2025-03-25 23:22:12,112 Epoch 1584/2000 +2025-03-25 23:23:17,302 Current Learning Rate: 0.0009842916 +2025-03-25 23:23:17,302 Train Loss: 0.0002448, Val Loss: 0.0002812 +2025-03-25 23:23:17,303 Epoch 1585/2000 +2025-03-25 23:24:23,081 Current Learning Rate: 0.0009861850 +2025-03-25 23:24:23,082 Train Loss: 0.0002454, Val Loss: 0.0002842 +2025-03-25 23:24:23,082 Epoch 1586/2000 +2025-03-25 23:25:28,438 Current Learning Rate: 0.0009879584 +2025-03-25 23:25:28,438 Train Loss: 0.0002452, Val Loss: 0.0002812 +2025-03-25 23:25:28,438 Epoch 1587/2000 +2025-03-25 23:26:33,704 Current Learning Rate: 0.0009896114 +2025-03-25 23:26:33,704 Train Loss: 0.0002447, Val Loss: 0.0002785 +2025-03-25 23:26:33,705 Epoch 1588/2000 +2025-03-25 23:27:39,047 Current Learning Rate: 0.0009911436 +2025-03-25 23:27:39,047 Train Loss: 0.0002433, Val Loss: 0.0002755 +2025-03-25 23:27:39,047 Epoch 1589/2000 +2025-03-25 23:28:44,716 Current Learning Rate: 0.0009925547 +2025-03-25 23:28:44,716 Train Loss: 0.0002410, Val Loss: 0.0002745 +2025-03-25 23:28:44,717 Epoch 1590/2000 +2025-03-25 23:29:50,053 Current Learning Rate: 0.0009938442 +2025-03-25 23:29:50,053 Train Loss: 0.0002402, Val Loss: 0.0002756 +2025-03-25 23:29:50,053 Epoch 1591/2000 +2025-03-25 23:30:55,505 Current Learning Rate: 0.0009950118 +2025-03-25 23:30:55,506 Train Loss: 0.0002405, Val Loss: 0.0002767 +2025-03-25 23:30:55,531 Epoch 1592/2000 +2025-03-25 23:32:00,755 Current Learning Rate: 0.0009960574 +2025-03-25 23:32:00,755 Train Loss: 0.0002412, Val Loss: 0.0002773 +2025-03-25 23:32:00,755 Epoch 1593/2000 +2025-03-25 23:33:06,187 Current Learning Rate: 0.0009969805 +2025-03-25 23:33:06,187 Train Loss: 0.0002420, Val Loss: 0.0002777 +2025-03-25 23:33:06,188 Epoch 1594/2000 +2025-03-25 23:34:11,366 Current Learning Rate: 0.0009977810 +2025-03-25 23:34:11,366 Train Loss: 0.0002426, Val Loss: 0.0002778 +2025-03-25 23:34:11,366 Epoch 1595/2000 +2025-03-25 23:35:16,526 Current Learning Rate: 0.0009984587 +2025-03-25 23:35:16,526 Train Loss: 0.0002428, Val Loss: 0.0002785 +2025-03-25 23:35:16,526 Epoch 1596/2000 +2025-03-25 23:36:21,911 Current Learning Rate: 0.0009990134 +2025-03-25 23:36:21,911 Train Loss: 0.0002428, Val Loss: 0.0002795 +2025-03-25 23:36:21,911 Epoch 1597/2000 +2025-03-25 23:37:27,008 Current Learning Rate: 0.0009994449 +2025-03-25 23:37:27,009 Train Loss: 0.0002425, Val Loss: 0.0002790 +2025-03-25 23:37:27,009 Epoch 1598/2000 +2025-03-25 23:38:32,043 Current Learning Rate: 0.0009997533 +2025-03-25 23:38:32,044 Train Loss: 0.0002418, Val Loss: 0.0002785 +2025-03-25 23:38:32,044 Epoch 1599/2000 +2025-03-25 23:39:37,212 Current Learning Rate: 0.0009999383 +2025-03-25 23:39:37,213 Train Loss: 0.0002410, Val Loss: 0.0002776 +2025-03-25 23:39:37,213 Epoch 1600/2000 +2025-03-25 23:40:42,257 Current Learning Rate: 0.0010000000 +2025-03-25 23:40:42,257 Train Loss: 0.0002410, Val Loss: 0.0002777 +2025-03-25 23:40:42,258 Epoch 1601/2000 +2025-03-25 23:41:47,910 Current Learning Rate: 0.0009999383 +2025-03-25 23:41:47,911 Train Loss: 0.0002412, Val Loss: 0.0002781 +2025-03-25 23:41:47,911 Epoch 1602/2000 +2025-03-25 23:42:53,004 Current Learning Rate: 0.0009997533 +2025-03-25 23:42:53,005 Train Loss: 0.0002421, Val Loss: 0.0002790 +2025-03-25 23:42:53,005 Epoch 1603/2000 +2025-03-25 23:43:58,241 Current Learning Rate: 0.0009994449 +2025-03-25 23:43:58,241 Train Loss: 0.0002426, Val Loss: 0.0002787 +2025-03-25 23:43:58,242 Epoch 1604/2000 +2025-03-25 23:45:03,631 Current Learning Rate: 0.0009990134 +2025-03-25 23:45:03,631 Train Loss: 0.0002433, Val Loss: 0.0002810 +2025-03-25 23:45:03,632 Epoch 1605/2000 +2025-03-25 23:46:08,764 Current Learning Rate: 0.0009984587 +2025-03-25 23:46:08,764 Train Loss: 0.0002442, Val Loss: 0.0002810 +2025-03-25 23:46:08,764 Epoch 1606/2000 +2025-03-25 23:47:14,289 Current Learning Rate: 0.0009977810 +2025-03-25 23:47:14,290 Train Loss: 0.0002450, Val Loss: 0.0002808 +2025-03-25 23:47:14,290 Epoch 1607/2000 +2025-03-25 23:48:19,550 Current Learning Rate: 0.0009969805 +2025-03-25 23:48:19,550 Train Loss: 0.0002457, Val Loss: 0.0002817 +2025-03-25 23:48:19,551 Epoch 1608/2000 +2025-03-25 23:49:25,008 Current Learning Rate: 0.0009960574 +2025-03-25 23:49:25,008 Train Loss: 0.0002454, Val Loss: 0.0002810 +2025-03-25 23:49:25,009 Epoch 1609/2000 +2025-03-25 23:50:30,063 Current Learning Rate: 0.0009950118 +2025-03-25 23:50:30,063 Train Loss: 0.0002449, Val Loss: 0.0002769 +2025-03-25 23:50:30,063 Epoch 1610/2000 +2025-03-25 23:51:35,363 Current Learning Rate: 0.0009938442 +2025-03-25 23:51:35,364 Train Loss: 0.0002439, Val Loss: 0.0002755 +2025-03-25 23:51:35,364 Epoch 1611/2000 +2025-03-25 23:52:40,747 Current Learning Rate: 0.0009925547 +2025-03-25 23:52:40,748 Train Loss: 0.0002418, Val Loss: 0.0002740 +2025-03-25 23:52:40,748 Epoch 1612/2000 +2025-03-25 23:53:45,806 Current Learning Rate: 0.0009911436 +2025-03-25 23:53:45,806 Train Loss: 0.0002401, Val Loss: 0.0002742 +2025-03-25 23:53:45,807 Epoch 1613/2000 +2025-03-25 23:54:51,260 Current Learning Rate: 0.0009896114 +2025-03-25 23:54:51,261 Train Loss: 0.0002396, Val Loss: 0.0002751 +2025-03-25 23:54:51,261 Epoch 1614/2000 +2025-03-25 23:55:56,455 Current Learning Rate: 0.0009879584 +2025-03-25 23:55:56,455 Train Loss: 0.0002397, Val Loss: 0.0002757 +2025-03-25 23:55:56,455 Epoch 1615/2000 +2025-03-25 23:57:01,888 Current Learning Rate: 0.0009861850 +2025-03-25 23:57:01,888 Train Loss: 0.0002400, Val Loss: 0.0002764 +2025-03-25 23:57:01,889 Epoch 1616/2000 +2025-03-25 23:58:07,184 Current Learning Rate: 0.0009842916 +2025-03-25 23:58:07,185 Train Loss: 0.0002406, Val Loss: 0.0002760 +2025-03-25 23:58:07,185 Epoch 1617/2000 +2025-03-25 23:59:12,778 Current Learning Rate: 0.0009822787 +2025-03-25 23:59:12,778 Train Loss: 0.0002415, Val Loss: 0.0002772 +2025-03-25 23:59:12,779 Epoch 1618/2000 +2025-03-26 00:00:18,053 Current Learning Rate: 0.0009801468 +2025-03-26 00:00:18,053 Train Loss: 0.0002419, Val Loss: 0.0002787 +2025-03-26 00:00:18,054 Epoch 1619/2000 +2025-03-26 00:01:23,853 Current Learning Rate: 0.0009778965 +2025-03-26 00:01:23,853 Train Loss: 0.0002421, Val Loss: 0.0002783 +2025-03-26 00:01:23,853 Epoch 1620/2000 +2025-03-26 00:02:29,406 Current Learning Rate: 0.0009755283 +2025-03-26 00:02:29,406 Train Loss: 0.0002426, Val Loss: 0.0002787 +2025-03-26 00:02:29,407 Epoch 1621/2000 +2025-03-26 00:03:34,840 Current Learning Rate: 0.0009730427 +2025-03-26 00:03:34,840 Train Loss: 0.0002433, Val Loss: 0.0002787 +2025-03-26 00:03:34,840 Epoch 1622/2000 +2025-03-26 00:04:40,429 Current Learning Rate: 0.0009704404 +2025-03-26 00:04:40,430 Train Loss: 0.0002444, Val Loss: 0.0002800 +2025-03-26 00:04:40,430 Epoch 1623/2000 +2025-03-26 00:05:46,349 Current Learning Rate: 0.0009677220 +2025-03-26 00:05:46,350 Train Loss: 0.0002446, Val Loss: 0.0002749 +2025-03-26 00:05:46,350 Epoch 1624/2000 +2025-03-26 00:06:52,038 Current Learning Rate: 0.0009648882 +2025-03-26 00:06:52,038 Train Loss: 0.0002445, Val Loss: 0.0002792 +2025-03-26 00:06:52,039 Epoch 1625/2000 +2025-03-26 00:07:57,287 Current Learning Rate: 0.0009619398 +2025-03-26 00:07:57,288 Train Loss: 0.0002439, Val Loss: 0.0002770 +2025-03-26 00:07:57,288 Epoch 1626/2000 +2025-03-26 00:09:02,468 Current Learning Rate: 0.0009588773 +2025-03-26 00:09:02,469 Train Loss: 0.0002424, Val Loss: 0.0002739 +2025-03-26 00:09:02,469 Epoch 1627/2000 +2025-03-26 00:10:07,746 Current Learning Rate: 0.0009557016 +2025-03-26 00:10:07,747 Train Loss: 0.0002403, Val Loss: 0.0002718 +2025-03-26 00:10:07,747 Epoch 1628/2000 +2025-03-26 00:11:12,945 Current Learning Rate: 0.0009524135 +2025-03-26 00:11:12,946 Train Loss: 0.0002388, Val Loss: 0.0002725 +2025-03-26 00:11:12,946 Epoch 1629/2000 +2025-03-26 00:12:18,073 Current Learning Rate: 0.0009490138 +2025-03-26 00:12:18,074 Train Loss: 0.0002382, Val Loss: 0.0002732 +2025-03-26 00:12:18,074 Epoch 1630/2000 +2025-03-26 00:13:22,652 Current Learning Rate: 0.0009455033 +2025-03-26 00:13:22,652 Train Loss: 0.0002381, Val Loss: 0.0002740 +2025-03-26 00:13:22,652 Epoch 1631/2000 +2025-03-26 00:14:27,990 Current Learning Rate: 0.0009418828 +2025-03-26 00:14:27,990 Train Loss: 0.0002381, Val Loss: 0.0002738 +2025-03-26 00:14:27,990 Epoch 1632/2000 +2025-03-26 00:15:33,194 Current Learning Rate: 0.0009381533 +2025-03-26 00:15:33,194 Train Loss: 0.0002383, Val Loss: 0.0002742 +2025-03-26 00:15:33,195 Epoch 1633/2000 +2025-03-26 00:16:38,367 Current Learning Rate: 0.0009343158 +2025-03-26 00:16:38,367 Train Loss: 0.0002388, Val Loss: 0.0002754 +2025-03-26 00:16:38,367 Epoch 1634/2000 +2025-03-26 00:17:43,612 Current Learning Rate: 0.0009303710 +2025-03-26 00:17:43,612 Train Loss: 0.0002398, Val Loss: 0.0002767 +2025-03-26 00:17:43,612 Epoch 1635/2000 +2025-03-26 00:18:48,972 Current Learning Rate: 0.0009263201 +2025-03-26 00:18:48,972 Train Loss: 0.0002402, Val Loss: 0.0002760 +2025-03-26 00:18:48,972 Epoch 1636/2000 +2025-03-26 00:19:54,014 Current Learning Rate: 0.0009221640 +2025-03-26 00:19:54,014 Train Loss: 0.0002408, Val Loss: 0.0002764 +2025-03-26 00:19:54,015 Epoch 1637/2000 +2025-03-26 00:20:59,875 Current Learning Rate: 0.0009179037 +2025-03-26 00:20:59,876 Train Loss: 0.0002417, Val Loss: 0.0002756 +2025-03-26 00:20:59,876 Epoch 1638/2000 +2025-03-26 00:22:05,297 Current Learning Rate: 0.0009135403 +2025-03-26 00:22:05,297 Train Loss: 0.0002421, Val Loss: 0.0002721 +2025-03-26 00:22:05,297 Epoch 1639/2000 +2025-03-26 00:23:10,151 Current Learning Rate: 0.0009090749 +2025-03-26 00:23:10,152 Train Loss: 0.0002421, Val Loss: 0.0002747 +2025-03-26 00:23:10,152 Epoch 1640/2000 +2025-03-26 00:24:15,210 Current Learning Rate: 0.0009045085 +2025-03-26 00:24:15,211 Train Loss: 0.0002418, Val Loss: 0.0002757 +2025-03-26 00:24:15,211 Epoch 1641/2000 +2025-03-26 00:25:20,278 Current Learning Rate: 0.0008998423 +2025-03-26 00:25:20,278 Train Loss: 0.0002411, Val Loss: 0.0002760 +2025-03-26 00:25:20,279 Epoch 1642/2000 +2025-03-26 00:26:24,858 Current Learning Rate: 0.0008950775 +2025-03-26 00:26:24,859 Train Loss: 0.0002404, Val Loss: 0.0002771 +2025-03-26 00:26:24,859 Epoch 1643/2000 +2025-03-26 00:27:29,525 Current Learning Rate: 0.0008902152 +2025-03-26 00:27:29,525 Train Loss: 0.0002395, Val Loss: 0.0002767 +2025-03-26 00:27:29,525 Epoch 1644/2000 +2025-03-26 00:28:34,839 Current Learning Rate: 0.0008852566 +2025-03-26 00:28:34,840 Train Loss: 0.0002381, Val Loss: 0.0002744 +2025-03-26 00:28:34,840 Epoch 1645/2000 +2025-03-26 00:29:39,924 Current Learning Rate: 0.0008802030 +2025-03-26 00:29:39,924 Train Loss: 0.0002369, Val Loss: 0.0002704 +2025-03-26 00:29:39,924 Epoch 1646/2000 +2025-03-26 00:30:45,407 Current Learning Rate: 0.0008750555 +2025-03-26 00:30:45,408 Train Loss: 0.0002362, Val Loss: 0.0002704 +2025-03-26 00:30:45,408 Epoch 1647/2000 +2025-03-26 00:31:50,960 Current Learning Rate: 0.0008698155 +2025-03-26 00:31:50,961 Train Loss: 0.0002359, Val Loss: 0.0002706 +2025-03-26 00:31:50,961 Epoch 1648/2000 +2025-03-26 00:32:56,294 Current Learning Rate: 0.0008644843 +2025-03-26 00:32:56,295 Train Loss: 0.0002358, Val Loss: 0.0002708 +2025-03-26 00:32:56,296 Epoch 1649/2000 +2025-03-26 00:34:01,123 Current Learning Rate: 0.0008590631 +2025-03-26 00:34:01,124 Train Loss: 0.0002358, Val Loss: 0.0002712 +2025-03-26 00:34:01,124 Epoch 1650/2000 +2025-03-26 00:35:06,027 Current Learning Rate: 0.0008535534 +2025-03-26 00:35:06,027 Train Loss: 0.0002360, Val Loss: 0.0002717 +2025-03-26 00:35:06,027 Epoch 1651/2000 +2025-03-26 00:36:10,943 Current Learning Rate: 0.0008479564 +2025-03-26 00:36:10,943 Train Loss: 0.0002364, Val Loss: 0.0002724 +2025-03-26 00:36:10,943 Epoch 1652/2000 +2025-03-26 00:37:16,341 Current Learning Rate: 0.0008422736 +2025-03-26 00:37:16,342 Train Loss: 0.0002373, Val Loss: 0.0002719 +2025-03-26 00:37:16,342 Epoch 1653/2000 +2025-03-26 00:38:21,396 Current Learning Rate: 0.0008365063 +2025-03-26 00:38:21,397 Train Loss: 0.0002383, Val Loss: 0.0002711 +2025-03-26 00:38:21,397 Epoch 1654/2000 +2025-03-26 00:39:26,373 Current Learning Rate: 0.0008306559 +2025-03-26 00:39:26,373 Train Loss: 0.0002391, Val Loss: 0.0002691 +2025-03-26 00:39:26,374 Epoch 1655/2000 +2025-03-26 00:40:31,447 Current Learning Rate: 0.0008247240 +2025-03-26 00:40:31,448 Train Loss: 0.0002394, Val Loss: 0.0002689 +2025-03-26 00:40:31,448 Epoch 1656/2000 +2025-03-26 00:41:37,203 Current Learning Rate: 0.0008187120 +2025-03-26 00:41:37,204 Train Loss: 0.0002391, Val Loss: 0.0002760 +2025-03-26 00:41:37,204 Epoch 1657/2000 +2025-03-26 00:42:42,293 Current Learning Rate: 0.0008126213 +2025-03-26 00:42:42,294 Train Loss: 0.0002392, Val Loss: 0.0002727 +2025-03-26 00:42:42,294 Epoch 1658/2000 +2025-03-26 00:43:47,955 Current Learning Rate: 0.0008064535 +2025-03-26 00:43:47,955 Train Loss: 0.0002383, Val Loss: 0.0002694 +2025-03-26 00:43:47,956 Epoch 1659/2000 +2025-03-26 00:44:53,115 Current Learning Rate: 0.0008002101 +2025-03-26 00:44:53,116 Train Loss: 0.0002369, Val Loss: 0.0002690 +2025-03-26 00:44:53,116 Epoch 1660/2000 +2025-03-26 00:45:58,463 Current Learning Rate: 0.0007938926 +2025-03-26 00:45:58,463 Train Loss: 0.0002355, Val Loss: 0.0002681 +2025-03-26 00:45:58,463 Epoch 1661/2000 +2025-03-26 00:47:03,812 Current Learning Rate: 0.0007875026 +2025-03-26 00:47:03,813 Train Loss: 0.0002344, Val Loss: 0.0002652 +2025-03-26 00:47:03,813 Epoch 1662/2000 +2025-03-26 00:48:09,494 Current Learning Rate: 0.0007810417 +2025-03-26 00:48:09,495 Train Loss: 0.0002338, Val Loss: 0.0002644 +2025-03-26 00:48:09,495 Epoch 1663/2000 +2025-03-26 00:49:14,392 Current Learning Rate: 0.0007745114 +2025-03-26 00:49:14,393 Train Loss: 0.0002336, Val Loss: 0.0002647 +2025-03-26 00:49:14,393 Epoch 1664/2000 +2025-03-26 00:50:19,625 Current Learning Rate: 0.0007679134 +2025-03-26 00:50:19,625 Train Loss: 0.0002334, Val Loss: 0.0002648 +2025-03-26 00:50:19,626 Epoch 1665/2000 +2025-03-26 00:51:25,064 Current Learning Rate: 0.0007612493 +2025-03-26 00:51:25,064 Train Loss: 0.0002333, Val Loss: 0.0002650 +2025-03-26 00:51:25,065 Epoch 1666/2000 +2025-03-26 00:52:30,499 Current Learning Rate: 0.0007545207 +2025-03-26 00:52:30,499 Train Loss: 0.0002333, Val Loss: 0.0002652 +2025-03-26 00:52:30,499 Epoch 1667/2000 +2025-03-26 00:53:36,200 Current Learning Rate: 0.0007477293 +2025-03-26 00:53:36,200 Train Loss: 0.0002339, Val Loss: 0.0002658 +2025-03-26 00:53:36,201 Epoch 1668/2000 +2025-03-26 00:54:41,040 Current Learning Rate: 0.0007408768 +2025-03-26 00:54:41,041 Train Loss: 0.0002353, Val Loss: 0.0002649 +2025-03-26 00:54:41,041 Epoch 1669/2000 +2025-03-26 00:55:46,190 Current Learning Rate: 0.0007339649 +2025-03-26 00:55:46,191 Train Loss: 0.0002360, Val Loss: 0.0002703 +2025-03-26 00:55:46,191 Epoch 1670/2000 +2025-03-26 00:56:51,723 Current Learning Rate: 0.0007269952 +2025-03-26 00:56:51,724 Train Loss: 0.0002361, Val Loss: 0.0002649 +2025-03-26 00:56:51,724 Epoch 1671/2000 +2025-03-26 00:57:56,551 Current Learning Rate: 0.0007199696 +2025-03-26 00:57:56,551 Train Loss: 0.0002359, Val Loss: 0.0002630 +2025-03-26 00:57:56,551 Epoch 1672/2000 +2025-03-26 00:59:02,210 Current Learning Rate: 0.0007128896 +2025-03-26 00:59:02,211 Train Loss: 0.0002350, Val Loss: 0.0002632 +2025-03-26 00:59:02,211 Epoch 1673/2000 +2025-03-26 01:00:07,213 Current Learning Rate: 0.0007057572 +2025-03-26 01:00:07,214 Train Loss: 0.0002341, Val Loss: 0.0002621 +2025-03-26 01:00:07,214 Epoch 1674/2000 +2025-03-26 01:01:12,228 Current Learning Rate: 0.0006985739 +2025-03-26 01:01:12,228 Train Loss: 0.0002331, Val Loss: 0.0002611 +2025-03-26 01:01:12,228 Epoch 1675/2000 +2025-03-26 01:02:17,358 Current Learning Rate: 0.0006913417 +2025-03-26 01:02:17,358 Train Loss: 0.0002322, Val Loss: 0.0002614 +2025-03-26 01:02:17,358 Epoch 1676/2000 +2025-03-26 01:03:22,730 Current Learning Rate: 0.0006840623 +2025-03-26 01:03:22,730 Train Loss: 0.0002316, Val Loss: 0.0002617 +2025-03-26 01:03:22,731 Epoch 1677/2000 +2025-03-26 01:04:28,164 Current Learning Rate: 0.0006767374 +2025-03-26 01:04:28,165 Train Loss: 0.0002311, Val Loss: 0.0002611 +2025-03-26 01:04:28,165 Epoch 1678/2000 +2025-03-26 01:05:33,466 Current Learning Rate: 0.0006693690 +2025-03-26 01:05:33,467 Train Loss: 0.0002309, Val Loss: 0.0002602 +2025-03-26 01:05:33,467 Epoch 1679/2000 +2025-03-26 01:06:38,389 Current Learning Rate: 0.0006619587 +2025-03-26 01:06:38,390 Train Loss: 0.0002307, Val Loss: 0.0002598 +2025-03-26 01:06:38,390 Epoch 1680/2000 +2025-03-26 01:07:43,524 Current Learning Rate: 0.0006545085 +2025-03-26 01:07:43,525 Train Loss: 0.0002307, Val Loss: 0.0002596 +2025-03-26 01:07:43,525 Epoch 1681/2000 +2025-03-26 01:08:48,953 Current Learning Rate: 0.0006470202 +2025-03-26 01:08:48,954 Train Loss: 0.0002308, Val Loss: 0.0002600 +2025-03-26 01:08:48,954 Epoch 1682/2000 +2025-03-26 01:09:53,770 Current Learning Rate: 0.0006394956 +2025-03-26 01:09:53,770 Train Loss: 0.0002314, Val Loss: 0.0002645 +2025-03-26 01:09:53,771 Epoch 1683/2000 +2025-03-26 01:10:58,773 Current Learning Rate: 0.0006319365 +2025-03-26 01:10:58,773 Train Loss: 0.0002326, Val Loss: 0.0002590 +2025-03-26 01:10:58,773 Epoch 1684/2000 +2025-03-26 01:12:03,375 Current Learning Rate: 0.0006243449 +2025-03-26 01:12:03,376 Train Loss: 0.0002327, Val Loss: 0.0002586 +2025-03-26 01:12:03,376 Epoch 1685/2000 +2025-03-26 01:13:08,896 Current Learning Rate: 0.0006167227 +2025-03-26 01:13:08,897 Train Loss: 0.0002317, Val Loss: 0.0002571 +2025-03-26 01:13:08,897 Epoch 1686/2000 +2025-03-26 01:14:13,937 Current Learning Rate: 0.0006090716 +2025-03-26 01:14:13,937 Train Loss: 0.0002306, Val Loss: 0.0002566 +2025-03-26 01:14:13,938 Epoch 1687/2000 +2025-03-26 01:15:19,212 Current Learning Rate: 0.0006013936 +2025-03-26 01:15:19,213 Train Loss: 0.0002299, Val Loss: 0.0002561 +2025-03-26 01:15:19,213 Epoch 1688/2000 +2025-03-26 01:16:24,596 Current Learning Rate: 0.0005936907 +2025-03-26 01:16:24,596 Train Loss: 0.0002294, Val Loss: 0.0002558 +2025-03-26 01:16:24,597 Epoch 1689/2000 +2025-03-26 01:17:30,551 Current Learning Rate: 0.0005859646 +2025-03-26 01:17:30,552 Train Loss: 0.0002291, Val Loss: 0.0002557 +2025-03-26 01:17:30,552 Epoch 1690/2000 +2025-03-26 01:18:35,820 Current Learning Rate: 0.0005782172 +2025-03-26 01:18:35,821 Train Loss: 0.0002288, Val Loss: 0.0002557 +2025-03-26 01:18:35,821 Epoch 1691/2000 +2025-03-26 01:19:41,207 Current Learning Rate: 0.0005704506 +2025-03-26 01:19:41,208 Train Loss: 0.0002288, Val Loss: 0.0002556 +2025-03-26 01:19:41,208 Epoch 1692/2000 +2025-03-26 01:20:46,436 Current Learning Rate: 0.0005626666 +2025-03-26 01:20:46,437 Train Loss: 0.0002289, Val Loss: 0.0002558 +2025-03-26 01:20:46,437 Epoch 1693/2000 +2025-03-26 01:21:51,459 Current Learning Rate: 0.0005548672 +2025-03-26 01:21:51,459 Train Loss: 0.0002293, Val Loss: 0.0002558 +2025-03-26 01:21:51,460 Epoch 1694/2000 +2025-03-26 01:22:57,146 Current Learning Rate: 0.0005470542 +2025-03-26 01:22:57,146 Train Loss: 0.0002292, Val Loss: 0.0002559 +2025-03-26 01:22:57,147 Epoch 1695/2000 +2025-03-26 01:24:02,576 Current Learning Rate: 0.0005392295 +2025-03-26 01:24:02,576 Train Loss: 0.0002289, Val Loss: 0.0002558 +2025-03-26 01:24:02,576 Epoch 1696/2000 +2025-03-26 01:25:08,455 Current Learning Rate: 0.0005313953 +2025-03-26 01:25:08,455 Train Loss: 0.0002283, Val Loss: 0.0002549 +2025-03-26 01:25:08,456 Epoch 1697/2000 +2025-03-26 01:26:13,596 Current Learning Rate: 0.0005235532 +2025-03-26 01:26:13,597 Train Loss: 0.0002278, Val Loss: 0.0002544 +2025-03-26 01:26:13,597 Epoch 1698/2000 +2025-03-26 01:27:18,768 Current Learning Rate: 0.0005157054 +2025-03-26 01:27:18,768 Train Loss: 0.0002273, Val Loss: 0.0002538 +2025-03-26 01:27:18,769 Epoch 1699/2000 +2025-03-26 01:28:23,911 Current Learning Rate: 0.0005078537 +2025-03-26 01:28:23,911 Train Loss: 0.0002269, Val Loss: 0.0002533 +2025-03-26 01:28:23,912 Epoch 1700/2000 +2025-03-26 01:29:29,354 Current Learning Rate: 0.0005000000 +2025-03-26 01:29:29,354 Train Loss: 0.0002266, Val Loss: 0.0002530 +2025-03-26 01:29:29,354 Epoch 1701/2000 +2025-03-26 01:30:34,876 Current Learning Rate: 0.0004921463 +2025-03-26 01:30:34,876 Train Loss: 0.0002263, Val Loss: 0.0002528 +2025-03-26 01:30:34,876 Epoch 1702/2000 +2025-03-26 01:31:40,498 Current Learning Rate: 0.0004842946 +2025-03-26 01:31:40,498 Train Loss: 0.0002262, Val Loss: 0.0002527 +2025-03-26 01:31:40,499 Epoch 1703/2000 +2025-03-26 01:32:46,109 Current Learning Rate: 0.0004764468 +2025-03-26 01:32:46,110 Train Loss: 0.0002262, Val Loss: 0.0002527 +2025-03-26 01:32:46,110 Epoch 1704/2000 +2025-03-26 01:33:51,452 Current Learning Rate: 0.0004686047 +2025-03-26 01:33:51,453 Train Loss: 0.0002263, Val Loss: 0.0002533 +2025-03-26 01:33:51,453 Epoch 1705/2000 +2025-03-26 01:34:56,527 Current Learning Rate: 0.0004607705 +2025-03-26 01:34:56,528 Train Loss: 0.0002264, Val Loss: 0.0002527 +2025-03-26 01:34:56,528 Epoch 1706/2000 +2025-03-26 01:36:02,188 Current Learning Rate: 0.0004529458 +2025-03-26 01:36:02,189 Train Loss: 0.0002260, Val Loss: 0.0002527 +2025-03-26 01:36:02,189 Epoch 1707/2000 +2025-03-26 01:37:07,701 Current Learning Rate: 0.0004451328 +2025-03-26 01:37:07,702 Train Loss: 0.0002256, Val Loss: 0.0002524 +2025-03-26 01:37:07,702 Epoch 1708/2000 +2025-03-26 01:38:12,604 Current Learning Rate: 0.0004373334 +2025-03-26 01:38:12,605 Train Loss: 0.0002252, Val Loss: 0.0002519 +2025-03-26 01:38:12,605 Epoch 1709/2000 +2025-03-26 01:39:17,574 Current Learning Rate: 0.0004295494 +2025-03-26 01:39:17,575 Train Loss: 0.0002248, Val Loss: 0.0002515 +2025-03-26 01:39:17,575 Epoch 1710/2000 +2025-03-26 01:40:22,785 Current Learning Rate: 0.0004217828 +2025-03-26 01:40:22,786 Train Loss: 0.0002244, Val Loss: 0.0002512 +2025-03-26 01:40:22,786 Epoch 1711/2000 +2025-03-26 01:41:27,709 Current Learning Rate: 0.0004140354 +2025-03-26 01:41:27,709 Train Loss: 0.0002241, Val Loss: 0.0002509 +2025-03-26 01:41:27,710 Epoch 1712/2000 +2025-03-26 01:42:32,813 Current Learning Rate: 0.0004063093 +2025-03-26 01:42:32,813 Train Loss: 0.0002239, Val Loss: 0.0002506 +2025-03-26 01:42:32,813 Epoch 1713/2000 +2025-03-26 01:43:37,827 Current Learning Rate: 0.0003986064 +2025-03-26 01:43:37,828 Train Loss: 0.0002237, Val Loss: 0.0002503 +2025-03-26 01:43:37,828 Epoch 1714/2000 +2025-03-26 01:44:42,950 Current Learning Rate: 0.0003909284 +2025-03-26 01:44:42,951 Train Loss: 0.0002236, Val Loss: 0.0002501 +2025-03-26 01:44:42,951 Epoch 1715/2000 +2025-03-26 01:45:48,007 Current Learning Rate: 0.0003832773 +2025-03-26 01:45:48,008 Train Loss: 0.0002234, Val Loss: 0.0002497 +2025-03-26 01:45:48,008 Epoch 1716/2000 +2025-03-26 01:46:53,143 Current Learning Rate: 0.0003756551 +2025-03-26 01:46:53,144 Train Loss: 0.0002233, Val Loss: 0.0002493 +2025-03-26 01:46:53,144 Epoch 1717/2000 +2025-03-26 01:47:58,393 Current Learning Rate: 0.0003680635 +2025-03-26 01:47:58,394 Train Loss: 0.0002231, Val Loss: 0.0002489 +2025-03-26 01:47:58,394 Epoch 1718/2000 +2025-03-26 01:49:03,956 Current Learning Rate: 0.0003605044 +2025-03-26 01:49:03,956 Train Loss: 0.0002228, Val Loss: 0.0002486 +2025-03-26 01:49:03,956 Epoch 1719/2000 +2025-03-26 01:50:08,785 Current Learning Rate: 0.0003529798 +2025-03-26 01:50:08,785 Train Loss: 0.0002226, Val Loss: 0.0002484 +2025-03-26 01:50:08,785 Epoch 1720/2000 +2025-03-26 01:51:14,359 Current Learning Rate: 0.0003454915 +2025-03-26 01:51:14,360 Train Loss: 0.0002223, Val Loss: 0.0002481 +2025-03-26 01:51:14,360 Epoch 1721/2000 +2025-03-26 01:52:19,795 Current Learning Rate: 0.0003380413 +2025-03-26 01:52:19,795 Train Loss: 0.0002219, Val Loss: 0.0002477 +2025-03-26 01:52:19,796 Epoch 1722/2000 +2025-03-26 01:53:25,077 Current Learning Rate: 0.0003306310 +2025-03-26 01:53:25,077 Train Loss: 0.0002216, Val Loss: 0.0002474 +2025-03-26 01:53:25,077 Epoch 1723/2000 +2025-03-26 01:54:30,912 Current Learning Rate: 0.0003232626 +2025-03-26 01:54:30,912 Train Loss: 0.0002214, Val Loss: 0.0002471 +2025-03-26 01:54:30,912 Epoch 1724/2000 +2025-03-26 01:55:35,868 Current Learning Rate: 0.0003159377 +2025-03-26 01:55:35,868 Train Loss: 0.0002211, Val Loss: 0.0002469 +2025-03-26 01:55:35,868 Epoch 1725/2000 +2025-03-26 01:56:41,168 Current Learning Rate: 0.0003086583 +2025-03-26 01:56:41,168 Train Loss: 0.0002209, Val Loss: 0.0002467 +2025-03-26 01:56:41,169 Epoch 1726/2000 +2025-03-26 01:57:46,329 Current Learning Rate: 0.0003014261 +2025-03-26 01:57:46,329 Train Loss: 0.0002207, Val Loss: 0.0002465 +2025-03-26 01:57:46,330 Epoch 1727/2000 +2025-03-26 01:58:51,122 Current Learning Rate: 0.0002942428 +2025-03-26 01:58:51,122 Train Loss: 0.0002205, Val Loss: 0.0002464 +2025-03-26 01:58:51,123 Epoch 1728/2000 +2025-03-26 01:59:56,471 Current Learning Rate: 0.0002871104 +2025-03-26 01:59:56,472 Train Loss: 0.0002203, Val Loss: 0.0002462 +2025-03-26 01:59:56,472 Epoch 1729/2000 +2025-03-26 02:01:01,696 Current Learning Rate: 0.0002800304 +2025-03-26 02:01:01,697 Train Loss: 0.0002201, Val Loss: 0.0002460 +2025-03-26 02:01:01,697 Epoch 1730/2000 +2025-03-26 02:02:06,837 Current Learning Rate: 0.0002730048 +2025-03-26 02:02:06,837 Train Loss: 0.0002199, Val Loss: 0.0002457 +2025-03-26 02:02:06,837 Epoch 1731/2000 +2025-03-26 02:03:12,174 Current Learning Rate: 0.0002660351 +2025-03-26 02:03:12,175 Train Loss: 0.0002196, Val Loss: 0.0002453 +2025-03-26 02:03:12,175 Epoch 1732/2000 +2025-03-26 02:04:17,693 Current Learning Rate: 0.0002591232 +2025-03-26 02:04:17,694 Train Loss: 0.0002194, Val Loss: 0.0002451 +2025-03-26 02:04:17,694 Epoch 1733/2000 +2025-03-26 02:05:22,979 Current Learning Rate: 0.0002522707 +2025-03-26 02:05:22,980 Train Loss: 0.0002191, Val Loss: 0.0002449 +2025-03-26 02:05:22,980 Epoch 1734/2000 +2025-03-26 02:06:28,163 Current Learning Rate: 0.0002454793 +2025-03-26 02:06:28,164 Train Loss: 0.0002189, Val Loss: 0.0002447 +2025-03-26 02:06:28,164 Epoch 1735/2000 +2025-03-26 02:07:32,845 Current Learning Rate: 0.0002387507 +2025-03-26 02:07:32,845 Train Loss: 0.0002187, Val Loss: 0.0002446 +2025-03-26 02:07:32,846 Epoch 1736/2000 +2025-03-26 02:08:37,815 Current Learning Rate: 0.0002320866 +2025-03-26 02:08:37,816 Train Loss: 0.0002185, Val Loss: 0.0002444 +2025-03-26 02:08:37,816 Epoch 1737/2000 +2025-03-26 02:09:43,137 Current Learning Rate: 0.0002254886 +2025-03-26 02:09:43,138 Train Loss: 0.0002183, Val Loss: 0.0002443 +2025-03-26 02:09:43,138 Epoch 1738/2000 +2025-03-26 02:10:48,915 Current Learning Rate: 0.0002189583 +2025-03-26 02:10:48,916 Train Loss: 0.0002181, Val Loss: 0.0002443 +2025-03-26 02:10:48,916 Epoch 1739/2000 +2025-03-26 02:11:54,330 Current Learning Rate: 0.0002124974 +2025-03-26 02:11:54,331 Train Loss: 0.0002179, Val Loss: 0.0002443 +2025-03-26 02:11:54,331 Epoch 1740/2000 +2025-03-26 02:12:59,833 Current Learning Rate: 0.0002061074 +2025-03-26 02:12:59,834 Train Loss: 0.0002177, Val Loss: 0.0002443 +2025-03-26 02:12:59,834 Epoch 1741/2000 +2025-03-26 02:14:05,929 Current Learning Rate: 0.0001997899 +2025-03-26 02:14:05,930 Train Loss: 0.0002175, Val Loss: 0.0002443 +2025-03-26 02:14:05,930 Epoch 1742/2000 +2025-03-26 02:15:11,347 Current Learning Rate: 0.0001935465 +2025-03-26 02:15:11,348 Train Loss: 0.0002173, Val Loss: 0.0002444 +2025-03-26 02:15:11,348 Epoch 1743/2000 +2025-03-26 02:16:16,258 Current Learning Rate: 0.0001873787 +2025-03-26 02:16:16,258 Train Loss: 0.0002171, Val Loss: 0.0002444 +2025-03-26 02:16:16,259 Epoch 1744/2000 +2025-03-26 02:17:21,600 Current Learning Rate: 0.0001812880 +2025-03-26 02:17:21,600 Train Loss: 0.0002169, Val Loss: 0.0002444 +2025-03-26 02:17:21,600 Epoch 1745/2000 +2025-03-26 02:18:26,992 Current Learning Rate: 0.0001752760 +2025-03-26 02:18:26,992 Train Loss: 0.0002167, Val Loss: 0.0002444 +2025-03-26 02:18:26,993 Epoch 1746/2000 +2025-03-26 02:19:32,343 Current Learning Rate: 0.0001693441 +2025-03-26 02:19:32,343 Train Loss: 0.0002165, Val Loss: 0.0002443 +2025-03-26 02:19:32,344 Epoch 1747/2000 +2025-03-26 02:20:38,057 Current Learning Rate: 0.0001634937 +2025-03-26 02:20:38,058 Train Loss: 0.0002164, Val Loss: 0.0002442 +2025-03-26 02:20:38,058 Epoch 1748/2000 +2025-03-26 02:21:43,028 Current Learning Rate: 0.0001577264 +2025-03-26 02:21:43,029 Train Loss: 0.0002162, Val Loss: 0.0002440 +2025-03-26 02:21:43,029 Epoch 1749/2000 +2025-03-26 02:22:48,953 Current Learning Rate: 0.0001520436 +2025-03-26 02:22:48,953 Train Loss: 0.0002160, Val Loss: 0.0002437 +2025-03-26 02:22:48,953 Epoch 1750/2000 +2025-03-26 02:23:54,224 Current Learning Rate: 0.0001464466 +2025-03-26 02:23:54,225 Train Loss: 0.0002158, Val Loss: 0.0002433 +2025-03-26 02:23:54,225 Epoch 1751/2000 +2025-03-26 02:24:59,498 Current Learning Rate: 0.0001409369 +2025-03-26 02:24:59,499 Train Loss: 0.0002156, Val Loss: 0.0002428 +2025-03-26 02:24:59,499 Epoch 1752/2000 +2025-03-26 02:26:04,826 Current Learning Rate: 0.0001355157 +2025-03-26 02:26:04,826 Train Loss: 0.0002154, Val Loss: 0.0002423 +2025-03-26 02:26:04,827 Epoch 1753/2000 +2025-03-26 02:27:10,080 Current Learning Rate: 0.0001301845 +2025-03-26 02:27:10,081 Train Loss: 0.0002153, Val Loss: 0.0002419 +2025-03-26 02:27:10,081 Epoch 1754/2000 +2025-03-26 02:28:15,652 Current Learning Rate: 0.0001249445 +2025-03-26 02:28:15,722 Train Loss: 0.0002151, Val Loss: 0.0002416 +2025-03-26 02:28:15,723 Epoch 1755/2000 +2025-03-26 02:29:20,887 Current Learning Rate: 0.0001197970 +2025-03-26 02:29:20,960 Train Loss: 0.0002149, Val Loss: 0.0002413 +2025-03-26 02:29:20,960 Epoch 1756/2000 +2025-03-26 02:30:26,399 Current Learning Rate: 0.0001147434 +2025-03-26 02:30:26,473 Train Loss: 0.0002148, Val Loss: 0.0002410 +2025-03-26 02:30:26,473 Epoch 1757/2000 +2025-03-26 02:31:32,153 Current Learning Rate: 0.0001097848 +2025-03-26 02:31:32,226 Train Loss: 0.0002146, Val Loss: 0.0002408 +2025-03-26 02:31:32,227 Epoch 1758/2000 +2025-03-26 02:32:37,772 Current Learning Rate: 0.0001049225 +2025-03-26 02:32:37,846 Train Loss: 0.0002144, Val Loss: 0.0002405 +2025-03-26 02:32:37,847 Epoch 1759/2000 +2025-03-26 02:33:43,333 Current Learning Rate: 0.0001001577 +2025-03-26 02:33:43,419 Train Loss: 0.0002143, Val Loss: 0.0002403 +2025-03-26 02:33:43,419 Epoch 1760/2000 +2025-03-26 02:34:48,588 Current Learning Rate: 0.0000954915 +2025-03-26 02:34:48,661 Train Loss: 0.0002141, Val Loss: 0.0002401 +2025-03-26 02:34:48,661 Epoch 1761/2000 +2025-03-26 02:35:53,680 Current Learning Rate: 0.0000909251 +2025-03-26 02:35:53,771 Train Loss: 0.0002140, Val Loss: 0.0002400 +2025-03-26 02:35:53,771 Epoch 1762/2000 +2025-03-26 02:36:58,849 Current Learning Rate: 0.0000864597 +2025-03-26 02:36:58,919 Train Loss: 0.0002138, Val Loss: 0.0002398 +2025-03-26 02:36:58,919 Epoch 1763/2000 +2025-03-26 02:38:03,997 Current Learning Rate: 0.0000820963 +2025-03-26 02:38:04,073 Train Loss: 0.0002137, Val Loss: 0.0002397 +2025-03-26 02:38:04,073 Epoch 1764/2000 +2025-03-26 02:39:09,301 Current Learning Rate: 0.0000778360 +2025-03-26 02:39:09,379 Train Loss: 0.0002135, Val Loss: 0.0002397 +2025-03-26 02:39:09,379 Epoch 1765/2000 +2025-03-26 02:40:14,359 Current Learning Rate: 0.0000736799 +2025-03-26 02:40:14,434 Train Loss: 0.0002134, Val Loss: 0.0002396 +2025-03-26 02:40:14,434 Epoch 1766/2000 +2025-03-26 02:41:19,518 Current Learning Rate: 0.0000696290 +2025-03-26 02:41:19,589 Train Loss: 0.0002132, Val Loss: 0.0002396 +2025-03-26 02:41:19,590 Epoch 1767/2000 +2025-03-26 02:42:24,532 Current Learning Rate: 0.0000656842 +2025-03-26 02:42:24,606 Train Loss: 0.0002131, Val Loss: 0.0002395 +2025-03-26 02:42:24,606 Epoch 1768/2000 +2025-03-26 02:43:29,998 Current Learning Rate: 0.0000618467 +2025-03-26 02:43:30,074 Train Loss: 0.0002129, Val Loss: 0.0002394 +2025-03-26 02:43:30,074 Epoch 1769/2000 +2025-03-26 02:44:35,289 Current Learning Rate: 0.0000581172 +2025-03-26 02:44:35,364 Train Loss: 0.0002128, Val Loss: 0.0002393 +2025-03-26 02:44:35,364 Epoch 1770/2000 +2025-03-26 02:45:40,695 Current Learning Rate: 0.0000544967 +2025-03-26 02:45:40,774 Train Loss: 0.0002127, Val Loss: 0.0002391 +2025-03-26 02:45:40,775 Epoch 1771/2000 +2025-03-26 02:46:46,210 Current Learning Rate: 0.0000509862 +2025-03-26 02:46:46,288 Train Loss: 0.0002125, Val Loss: 0.0002389 +2025-03-26 02:46:46,288 Epoch 1772/2000 +2025-03-26 02:47:51,264 Current Learning Rate: 0.0000475865 +2025-03-26 02:47:51,339 Train Loss: 0.0002124, Val Loss: 0.0002388 +2025-03-26 02:47:51,339 Epoch 1773/2000 +2025-03-26 02:48:56,743 Current Learning Rate: 0.0000442984 +2025-03-26 02:48:56,828 Train Loss: 0.0002123, Val Loss: 0.0002386 +2025-03-26 02:48:56,828 Epoch 1774/2000 +2025-03-26 02:50:01,975 Current Learning Rate: 0.0000411227 +2025-03-26 02:50:02,045 Train Loss: 0.0002122, Val Loss: 0.0002385 +2025-03-26 02:50:02,045 Epoch 1775/2000 +2025-03-26 02:51:07,316 Current Learning Rate: 0.0000380602 +2025-03-26 02:51:07,393 Train Loss: 0.0002121, Val Loss: 0.0002385 +2025-03-26 02:51:07,394 Epoch 1776/2000 +2025-03-26 02:52:12,505 Current Learning Rate: 0.0000351118 +2025-03-26 02:52:12,571 Train Loss: 0.0002120, Val Loss: 0.0002385 +2025-03-26 02:52:12,571 Epoch 1777/2000 +2025-03-26 02:53:18,444 Current Learning Rate: 0.0000322780 +2025-03-26 02:53:18,516 Train Loss: 0.0002118, Val Loss: 0.0002384 +2025-03-26 02:53:18,516 Epoch 1778/2000 +2025-03-26 02:54:23,644 Current Learning Rate: 0.0000295596 +2025-03-26 02:54:23,718 Train Loss: 0.0002117, Val Loss: 0.0002384 +2025-03-26 02:54:23,719 Epoch 1779/2000 +2025-03-26 02:55:29,083 Current Learning Rate: 0.0000269573 +2025-03-26 02:55:29,157 Train Loss: 0.0002116, Val Loss: 0.0002383 +2025-03-26 02:55:29,158 Epoch 1780/2000 +2025-03-26 02:56:34,186 Current Learning Rate: 0.0000244717 +2025-03-26 02:56:34,254 Train Loss: 0.0002115, Val Loss: 0.0002382 +2025-03-26 02:56:34,254 Epoch 1781/2000 +2025-03-26 02:57:39,466 Current Learning Rate: 0.0000221035 +2025-03-26 02:57:39,547 Train Loss: 0.0002115, Val Loss: 0.0002382 +2025-03-26 02:57:39,547 Epoch 1782/2000 +2025-03-26 02:58:44,679 Current Learning Rate: 0.0000198532 +2025-03-26 02:58:44,741 Train Loss: 0.0002114, Val Loss: 0.0002381 +2025-03-26 02:58:44,742 Epoch 1783/2000 +2025-03-26 02:59:49,883 Current Learning Rate: 0.0000177213 +2025-03-26 02:59:49,948 Train Loss: 0.0002113, Val Loss: 0.0002380 +2025-03-26 02:59:49,949 Epoch 1784/2000 +2025-03-26 03:00:54,954 Current Learning Rate: 0.0000157084 +2025-03-26 03:00:55,017 Train Loss: 0.0002112, Val Loss: 0.0002379 +2025-03-26 03:00:55,017 Epoch 1785/2000 +2025-03-26 03:02:00,588 Current Learning Rate: 0.0000138150 +2025-03-26 03:02:00,662 Train Loss: 0.0002111, Val Loss: 0.0002379 +2025-03-26 03:02:00,663 Epoch 1786/2000 +2025-03-26 03:03:05,914 Current Learning Rate: 0.0000120416 +2025-03-26 03:03:05,989 Train Loss: 0.0002110, Val Loss: 0.0002378 +2025-03-26 03:03:05,990 Epoch 1787/2000 +2025-03-26 03:04:10,765 Current Learning Rate: 0.0000103886 +2025-03-26 03:04:10,837 Train Loss: 0.0002110, Val Loss: 0.0002378 +2025-03-26 03:04:10,838 Epoch 1788/2000 +2025-03-26 03:05:15,916 Current Learning Rate: 0.0000088564 +2025-03-26 03:05:15,991 Train Loss: 0.0002109, Val Loss: 0.0002378 +2025-03-26 03:05:15,991 Epoch 1789/2000 +2025-03-26 03:06:21,163 Current Learning Rate: 0.0000074453 +2025-03-26 03:06:21,241 Train Loss: 0.0002108, Val Loss: 0.0002377 +2025-03-26 03:06:21,242 Epoch 1790/2000 +2025-03-26 03:07:26,283 Current Learning Rate: 0.0000061558 +2025-03-26 03:07:26,349 Train Loss: 0.0002108, Val Loss: 0.0002377 +2025-03-26 03:07:26,349 Epoch 1791/2000 +2025-03-26 03:08:31,331 Current Learning Rate: 0.0000049882 +2025-03-26 03:08:31,390 Train Loss: 0.0002107, Val Loss: 0.0002377 +2025-03-26 03:08:31,390 Epoch 1792/2000 +2025-03-26 03:09:36,515 Current Learning Rate: 0.0000039426 +2025-03-26 03:09:36,579 Train Loss: 0.0002107, Val Loss: 0.0002377 +2025-03-26 03:09:36,579 Epoch 1793/2000 +2025-03-26 03:10:41,860 Current Learning Rate: 0.0000030195 +2025-03-26 03:10:41,922 Train Loss: 0.0002106, Val Loss: 0.0002377 +2025-03-26 03:10:41,923 Epoch 1794/2000 +2025-03-26 03:11:47,052 Current Learning Rate: 0.0000022190 +2025-03-26 03:11:47,052 Train Loss: 0.0002106, Val Loss: 0.0002377 +2025-03-26 03:11:47,053 Epoch 1795/2000 +2025-03-26 03:12:52,307 Current Learning Rate: 0.0000015413 +2025-03-26 03:12:52,308 Train Loss: 0.0002105, Val Loss: 0.0002377 +2025-03-26 03:12:52,308 Epoch 1796/2000 +2025-03-26 03:13:57,801 Current Learning Rate: 0.0000009866 +2025-03-26 03:13:57,801 Train Loss: 0.0002105, Val Loss: 0.0002377 +2025-03-26 03:13:57,801 Epoch 1797/2000 +2025-03-26 03:15:02,921 Current Learning Rate: 0.0000005551 +2025-03-26 03:15:02,922 Train Loss: 0.0002105, Val Loss: 0.0002377 +2025-03-26 03:15:02,922 Epoch 1798/2000 +2025-03-26 03:16:07,972 Current Learning Rate: 0.0000002467 +2025-03-26 03:16:07,973 Train Loss: 0.0002105, Val Loss: 0.0002377 +2025-03-26 03:16:07,973 Epoch 1799/2000 +2025-03-26 03:17:13,108 Current Learning Rate: 0.0000000617 +2025-03-26 03:17:13,109 Train Loss: 0.0002104, Val Loss: 0.0002377 +2025-03-26 03:17:13,109 Epoch 1800/2000 +2025-03-26 03:18:18,096 Current Learning Rate: 0.0000000000 +2025-03-26 03:18:18,096 Train Loss: 0.0002104, Val Loss: 0.0002377 +2025-03-26 03:18:18,097 Epoch 1801/2000 +2025-03-26 03:19:23,591 Current Learning Rate: 0.0000000617 +2025-03-26 03:19:23,591 Train Loss: 0.0002104, Val Loss: 0.0002377 +2025-03-26 03:19:23,591 Epoch 1802/2000 +2025-03-26 03:20:28,513 Current Learning Rate: 0.0000002467 +2025-03-26 03:20:28,513 Train Loss: 0.0002104, Val Loss: 0.0002377 +2025-03-26 03:20:28,514 Epoch 1803/2000 +2025-03-26 03:21:33,451 Current Learning Rate: 0.0000005551 +2025-03-26 03:21:33,451 Train Loss: 0.0002104, Val Loss: 0.0002377 +2025-03-26 03:21:33,452 Epoch 1804/2000 +2025-03-26 03:22:38,133 Current Learning Rate: 0.0000009866 +2025-03-26 03:22:38,134 Train Loss: 0.0002104, Val Loss: 0.0002377 +2025-03-26 03:22:38,134 Epoch 1805/2000 +2025-03-26 03:23:43,321 Current Learning Rate: 0.0000015413 +2025-03-26 03:23:43,322 Train Loss: 0.0002105, Val Loss: 0.0002377 +2025-03-26 03:23:43,322 Epoch 1806/2000 +2025-03-26 03:24:48,460 Current Learning Rate: 0.0000022190 +2025-03-26 03:24:48,461 Train Loss: 0.0002105, Val Loss: 0.0002377 +2025-03-26 03:24:48,461 Epoch 1807/2000 +2025-03-26 03:25:53,410 Current Learning Rate: 0.0000030195 +2025-03-26 03:25:53,410 Train Loss: 0.0002105, Val Loss: 0.0002377 +2025-03-26 03:25:53,410 Epoch 1808/2000 +2025-03-26 03:26:58,579 Current Learning Rate: 0.0000039426 +2025-03-26 03:26:58,658 Train Loss: 0.0002106, Val Loss: 0.0002377 +2025-03-26 03:26:58,658 Epoch 1809/2000 +2025-03-26 03:28:03,851 Current Learning Rate: 0.0000049882 +2025-03-26 03:28:03,914 Train Loss: 0.0002106, Val Loss: 0.0002377 +2025-03-26 03:28:03,914 Epoch 1810/2000 +2025-03-26 03:29:09,412 Current Learning Rate: 0.0000061558 +2025-03-26 03:29:09,413 Train Loss: 0.0002106, Val Loss: 0.0002377 +2025-03-26 03:29:09,413 Epoch 1811/2000 +2025-03-26 03:30:15,376 Current Learning Rate: 0.0000074453 +2025-03-26 03:30:15,376 Train Loss: 0.0002107, Val Loss: 0.0002377 +2025-03-26 03:30:15,377 Epoch 1812/2000 +2025-03-26 03:31:20,033 Current Learning Rate: 0.0000088564 +2025-03-26 03:31:20,033 Train Loss: 0.0002107, Val Loss: 0.0002377 +2025-03-26 03:31:20,034 Epoch 1813/2000 +2025-03-26 03:32:25,766 Current Learning Rate: 0.0000103886 +2025-03-26 03:32:25,767 Train Loss: 0.0002108, Val Loss: 0.0002377 +2025-03-26 03:32:25,767 Epoch 1814/2000 +2025-03-26 03:33:30,998 Current Learning Rate: 0.0000120416 +2025-03-26 03:33:30,998 Train Loss: 0.0002109, Val Loss: 0.0002378 +2025-03-26 03:33:30,998 Epoch 1815/2000 +2025-03-26 03:34:36,244 Current Learning Rate: 0.0000138150 +2025-03-26 03:34:36,244 Train Loss: 0.0002109, Val Loss: 0.0002378 +2025-03-26 03:34:36,245 Epoch 1816/2000 +2025-03-26 03:35:41,407 Current Learning Rate: 0.0000157084 +2025-03-26 03:35:41,407 Train Loss: 0.0002110, Val Loss: 0.0002378 +2025-03-26 03:35:41,407 Epoch 1817/2000 +2025-03-26 03:36:46,539 Current Learning Rate: 0.0000177213 +2025-03-26 03:36:46,540 Train Loss: 0.0002111, Val Loss: 0.0002378 +2025-03-26 03:36:46,540 Epoch 1818/2000 +2025-03-26 03:37:51,833 Current Learning Rate: 0.0000198532 +2025-03-26 03:37:51,834 Train Loss: 0.0002111, Val Loss: 0.0002379 +2025-03-26 03:37:51,834 Epoch 1819/2000 +2025-03-26 03:38:56,748 Current Learning Rate: 0.0000221035 +2025-03-26 03:38:56,748 Train Loss: 0.0002112, Val Loss: 0.0002379 +2025-03-26 03:38:56,748 Epoch 1820/2000 +2025-03-26 03:40:01,656 Current Learning Rate: 0.0000244717 +2025-03-26 03:40:01,656 Train Loss: 0.0002113, Val Loss: 0.0002380 +2025-03-26 03:40:01,657 Epoch 1821/2000 +2025-03-26 03:41:06,782 Current Learning Rate: 0.0000269573 +2025-03-26 03:41:06,782 Train Loss: 0.0002113, Val Loss: 0.0002381 +2025-03-26 03:41:06,782 Epoch 1822/2000 +2025-03-26 03:42:12,002 Current Learning Rate: 0.0000295596 +2025-03-26 03:42:12,002 Train Loss: 0.0002114, Val Loss: 0.0002381 +2025-03-26 03:42:12,003 Epoch 1823/2000 +2025-03-26 03:43:17,174 Current Learning Rate: 0.0000322780 +2025-03-26 03:43:17,175 Train Loss: 0.0002115, Val Loss: 0.0002382 +2025-03-26 03:43:17,175 Epoch 1824/2000 +2025-03-26 03:44:22,094 Current Learning Rate: 0.0000351118 +2025-03-26 03:44:22,094 Train Loss: 0.0002116, Val Loss: 0.0002382 +2025-03-26 03:44:22,094 Epoch 1825/2000 +2025-03-26 03:45:27,155 Current Learning Rate: 0.0000380602 +2025-03-26 03:45:27,156 Train Loss: 0.0002117, Val Loss: 0.0002382 +2025-03-26 03:45:27,156 Epoch 1826/2000 +2025-03-26 03:46:31,990 Current Learning Rate: 0.0000411227 +2025-03-26 03:46:31,990 Train Loss: 0.0002118, Val Loss: 0.0002383 +2025-03-26 03:46:31,991 Epoch 1827/2000 +2025-03-26 03:47:37,176 Current Learning Rate: 0.0000442984 +2025-03-26 03:47:37,176 Train Loss: 0.0002119, Val Loss: 0.0002383 +2025-03-26 03:47:37,177 Epoch 1828/2000 +2025-03-26 03:48:42,381 Current Learning Rate: 0.0000475865 +2025-03-26 03:48:42,381 Train Loss: 0.0002120, Val Loss: 0.0002384 +2025-03-26 03:48:42,381 Epoch 1829/2000 +2025-03-26 03:49:47,472 Current Learning Rate: 0.0000509862 +2025-03-26 03:49:47,473 Train Loss: 0.0002121, Val Loss: 0.0002385 +2025-03-26 03:49:47,473 Epoch 1830/2000 +2025-03-26 03:50:52,673 Current Learning Rate: 0.0000544967 +2025-03-26 03:50:52,674 Train Loss: 0.0002122, Val Loss: 0.0002386 +2025-03-26 03:50:52,674 Epoch 1831/2000 +2025-03-26 03:51:57,185 Current Learning Rate: 0.0000581172 +2025-03-26 03:51:57,186 Train Loss: 0.0002123, Val Loss: 0.0002387 +2025-03-26 03:51:57,186 Epoch 1832/2000 +2025-03-26 03:53:02,234 Current Learning Rate: 0.0000618467 +2025-03-26 03:53:02,234 Train Loss: 0.0002124, Val Loss: 0.0002388 +2025-03-26 03:53:02,235 Epoch 1833/2000 +2025-03-26 03:54:07,492 Current Learning Rate: 0.0000656842 +2025-03-26 03:54:07,493 Train Loss: 0.0002125, Val Loss: 0.0002389 +2025-03-26 03:54:07,493 Epoch 1834/2000 +2025-03-26 03:55:12,576 Current Learning Rate: 0.0000696290 +2025-03-26 03:55:12,577 Train Loss: 0.0002126, Val Loss: 0.0002389 +2025-03-26 03:55:12,577 Epoch 1835/2000 +2025-03-26 03:56:17,629 Current Learning Rate: 0.0000736799 +2025-03-26 03:56:17,629 Train Loss: 0.0002127, Val Loss: 0.0002390 +2025-03-26 03:56:17,629 Epoch 1836/2000 +2025-03-26 03:57:22,823 Current Learning Rate: 0.0000778360 +2025-03-26 03:57:22,823 Train Loss: 0.0002128, Val Loss: 0.0002391 +2025-03-26 03:57:22,823 Epoch 1837/2000 +2025-03-26 03:58:27,906 Current Learning Rate: 0.0000820963 +2025-03-26 03:58:27,906 Train Loss: 0.0002130, Val Loss: 0.0002391 +2025-03-26 03:58:27,906 Epoch 1838/2000 +2025-03-26 03:59:33,492 Current Learning Rate: 0.0000864597 +2025-03-26 03:59:33,493 Train Loss: 0.0002131, Val Loss: 0.0002393 +2025-03-26 03:59:33,493 Epoch 1839/2000 +2025-03-26 04:00:38,905 Current Learning Rate: 0.0000909251 +2025-03-26 04:00:38,905 Train Loss: 0.0002132, Val Loss: 0.0002394 +2025-03-26 04:00:38,906 Epoch 1840/2000 +2025-03-26 04:01:44,233 Current Learning Rate: 0.0000954915 +2025-03-26 04:01:44,233 Train Loss: 0.0002133, Val Loss: 0.0002396 +2025-03-26 04:01:44,234 Epoch 1841/2000 +2025-03-26 04:02:49,640 Current Learning Rate: 0.0001001577 +2025-03-26 04:02:49,641 Train Loss: 0.0002134, Val Loss: 0.0002398 +2025-03-26 04:02:49,641 Epoch 1842/2000 +2025-03-26 04:03:55,260 Current Learning Rate: 0.0001049225 +2025-03-26 04:03:55,260 Train Loss: 0.0002136, Val Loss: 0.0002400 +2025-03-26 04:03:55,261 Epoch 1843/2000 +2025-03-26 04:05:00,865 Current Learning Rate: 0.0001097848 +2025-03-26 04:05:00,866 Train Loss: 0.0002137, Val Loss: 0.0002401 +2025-03-26 04:05:00,866 Epoch 1844/2000 +2025-03-26 04:06:06,418 Current Learning Rate: 0.0001147434 +2025-03-26 04:06:06,418 Train Loss: 0.0002138, Val Loss: 0.0002404 +2025-03-26 04:06:06,419 Epoch 1845/2000 +2025-03-26 04:07:11,995 Current Learning Rate: 0.0001197970 +2025-03-26 04:07:11,995 Train Loss: 0.0002140, Val Loss: 0.0002406 +2025-03-26 04:07:11,995 Epoch 1846/2000 +2025-03-26 04:08:17,541 Current Learning Rate: 0.0001249445 +2025-03-26 04:08:17,541 Train Loss: 0.0002141, Val Loss: 0.0002408 +2025-03-26 04:08:17,542 Epoch 1847/2000 +2025-03-26 04:09:22,961 Current Learning Rate: 0.0001301845 +2025-03-26 04:09:22,962 Train Loss: 0.0002142, Val Loss: 0.0002409 +2025-03-26 04:09:22,962 Epoch 1848/2000 +2025-03-26 04:10:28,458 Current Learning Rate: 0.0001355157 +2025-03-26 04:10:28,458 Train Loss: 0.0002144, Val Loss: 0.0002411 +2025-03-26 04:10:28,459 Epoch 1849/2000 +2025-03-26 04:11:33,853 Current Learning Rate: 0.0001409369 +2025-03-26 04:11:33,853 Train Loss: 0.0002145, Val Loss: 0.0002411 +2025-03-26 04:11:33,853 Epoch 1850/2000 +2025-03-26 04:12:39,097 Current Learning Rate: 0.0001464466 +2025-03-26 04:12:39,097 Train Loss: 0.0002147, Val Loss: 0.0002412 +2025-03-26 04:12:39,097 Epoch 1851/2000 +2025-03-26 04:13:44,472 Current Learning Rate: 0.0001520436 +2025-03-26 04:13:44,472 Train Loss: 0.0002148, Val Loss: 0.0002413 +2025-03-26 04:13:44,472 Epoch 1852/2000 +2025-03-26 04:14:49,471 Current Learning Rate: 0.0001577264 +2025-03-26 04:14:49,472 Train Loss: 0.0002149, Val Loss: 0.0002414 +2025-03-26 04:14:49,472 Epoch 1853/2000 +2025-03-26 04:15:54,950 Current Learning Rate: 0.0001634937 +2025-03-26 04:15:54,950 Train Loss: 0.0002151, Val Loss: 0.0002415 +2025-03-26 04:15:54,951 Epoch 1854/2000 +2025-03-26 04:17:00,015 Current Learning Rate: 0.0001693441 +2025-03-26 04:17:00,015 Train Loss: 0.0002153, Val Loss: 0.0002416 +2025-03-26 04:17:00,016 Epoch 1855/2000 +2025-03-26 04:18:04,854 Current Learning Rate: 0.0001752760 +2025-03-26 04:18:04,854 Train Loss: 0.0002154, Val Loss: 0.0002417 +2025-03-26 04:18:04,855 Epoch 1856/2000 +2025-03-26 04:19:10,329 Current Learning Rate: 0.0001812880 +2025-03-26 04:19:10,329 Train Loss: 0.0002156, Val Loss: 0.0002417 +2025-03-26 04:19:10,329 Epoch 1857/2000 +2025-03-26 04:20:15,709 Current Learning Rate: 0.0001873787 +2025-03-26 04:20:15,709 Train Loss: 0.0002157, Val Loss: 0.0002418 +2025-03-26 04:20:15,710 Epoch 1858/2000 +2025-03-26 04:21:21,033 Current Learning Rate: 0.0001935465 +2025-03-26 04:21:21,033 Train Loss: 0.0002159, Val Loss: 0.0002418 +2025-03-26 04:21:21,034 Epoch 1859/2000 +2025-03-26 04:22:26,279 Current Learning Rate: 0.0001997899 +2025-03-26 04:22:26,280 Train Loss: 0.0002160, Val Loss: 0.0002419 +2025-03-26 04:22:26,280 Epoch 1860/2000 +2025-03-26 04:23:31,621 Current Learning Rate: 0.0002061074 +2025-03-26 04:23:31,622 Train Loss: 0.0002162, Val Loss: 0.0002420 +2025-03-26 04:23:31,622 Epoch 1861/2000 +2025-03-26 04:24:36,683 Current Learning Rate: 0.0002124974 +2025-03-26 04:24:36,683 Train Loss: 0.0002164, Val Loss: 0.0002422 +2025-03-26 04:24:36,683 Epoch 1862/2000 +2025-03-26 04:25:42,038 Current Learning Rate: 0.0002189583 +2025-03-26 04:25:42,038 Train Loss: 0.0002165, Val Loss: 0.0002423 +2025-03-26 04:25:42,038 Epoch 1863/2000 +2025-03-26 04:26:47,549 Current Learning Rate: 0.0002254886 +2025-03-26 04:26:47,549 Train Loss: 0.0002167, Val Loss: 0.0002424 +2025-03-26 04:26:47,549 Epoch 1864/2000 +2025-03-26 04:27:52,958 Current Learning Rate: 0.0002320866 +2025-03-26 04:27:52,958 Train Loss: 0.0002168, Val Loss: 0.0002426 +2025-03-26 04:27:52,958 Epoch 1865/2000 +2025-03-26 04:28:58,613 Current Learning Rate: 0.0002387507 +2025-03-26 04:28:58,614 Train Loss: 0.0002170, Val Loss: 0.0002427 +2025-03-26 04:28:58,614 Epoch 1866/2000 +2025-03-26 04:30:03,904 Current Learning Rate: 0.0002454793 +2025-03-26 04:30:03,904 Train Loss: 0.0002172, Val Loss: 0.0002428 +2025-03-26 04:30:03,905 Epoch 1867/2000 +2025-03-26 04:31:09,527 Current Learning Rate: 0.0002522707 +2025-03-26 04:31:09,527 Train Loss: 0.0002174, Val Loss: 0.0002430 +2025-03-26 04:31:09,527 Epoch 1868/2000 +2025-03-26 04:32:15,114 Current Learning Rate: 0.0002591232 +2025-03-26 04:32:15,115 Train Loss: 0.0002175, Val Loss: 0.0002432 +2025-03-26 04:32:15,115 Epoch 1869/2000 +2025-03-26 04:33:20,185 Current Learning Rate: 0.0002660351 +2025-03-26 04:33:20,186 Train Loss: 0.0002177, Val Loss: 0.0002434 +2025-03-26 04:33:20,186 Epoch 1870/2000 +2025-03-26 04:34:25,353 Current Learning Rate: 0.0002730048 +2025-03-26 04:34:25,353 Train Loss: 0.0002179, Val Loss: 0.0002436 +2025-03-26 04:34:25,353 Epoch 1871/2000 +2025-03-26 04:35:30,793 Current Learning Rate: 0.0002800304 +2025-03-26 04:35:30,793 Train Loss: 0.0002181, Val Loss: 0.0002438 +2025-03-26 04:35:30,793 Epoch 1872/2000 +2025-03-26 04:36:35,975 Current Learning Rate: 0.0002871104 +2025-03-26 04:36:35,975 Train Loss: 0.0002183, Val Loss: 0.0002440 +2025-03-26 04:36:35,975 Epoch 1873/2000 +2025-03-26 04:37:41,499 Current Learning Rate: 0.0002942428 +2025-03-26 04:37:41,500 Train Loss: 0.0002184, Val Loss: 0.0002442 +2025-03-26 04:37:41,500 Epoch 1874/2000 +2025-03-26 04:38:46,731 Current Learning Rate: 0.0003014261 +2025-03-26 04:38:46,731 Train Loss: 0.0002186, Val Loss: 0.0002445 +2025-03-26 04:38:46,732 Epoch 1875/2000 +2025-03-26 04:39:52,064 Current Learning Rate: 0.0003086583 +2025-03-26 04:39:52,065 Train Loss: 0.0002188, Val Loss: 0.0002447 +2025-03-26 04:39:52,065 Epoch 1876/2000 +2025-03-26 04:40:56,964 Current Learning Rate: 0.0003159377 +2025-03-26 04:40:56,964 Train Loss: 0.0002190, Val Loss: 0.0002449 +2025-03-26 04:40:56,964 Epoch 1877/2000 +2025-03-26 04:42:01,948 Current Learning Rate: 0.0003232626 +2025-03-26 04:42:01,948 Train Loss: 0.0002192, Val Loss: 0.0002452 +2025-03-26 04:42:01,948 Epoch 1878/2000 +2025-03-26 04:43:06,900 Current Learning Rate: 0.0003306310 +2025-03-26 04:43:06,901 Train Loss: 0.0002194, Val Loss: 0.0002454 +2025-03-26 04:43:06,901 Epoch 1879/2000 +2025-03-26 04:44:12,425 Current Learning Rate: 0.0003380413 +2025-03-26 04:44:12,425 Train Loss: 0.0002196, Val Loss: 0.0002456 +2025-03-26 04:44:12,426 Epoch 1880/2000 +2025-03-26 04:45:17,465 Current Learning Rate: 0.0003454915 +2025-03-26 04:45:17,466 Train Loss: 0.0002198, Val Loss: 0.0002457 +2025-03-26 04:45:17,466 Epoch 1881/2000 +2025-03-26 04:46:22,765 Current Learning Rate: 0.0003529798 +2025-03-26 04:46:22,765 Train Loss: 0.0002199, Val Loss: 0.0002459 +2025-03-26 04:46:22,766 Epoch 1882/2000 +2025-03-26 04:47:28,335 Current Learning Rate: 0.0003605044 +2025-03-26 04:47:28,336 Train Loss: 0.0002201, Val Loss: 0.0002460 +2025-03-26 04:47:28,336 Epoch 1883/2000 +2025-03-26 04:48:33,788 Current Learning Rate: 0.0003680635 +2025-03-26 04:48:33,788 Train Loss: 0.0002203, Val Loss: 0.0002462 +2025-03-26 04:48:33,789 Epoch 1884/2000 +2025-03-26 04:49:39,534 Current Learning Rate: 0.0003756551 +2025-03-26 04:49:39,535 Train Loss: 0.0002205, Val Loss: 0.0002463 +2025-03-26 04:49:39,535 Epoch 1885/2000 +2025-03-26 04:50:44,758 Current Learning Rate: 0.0003832773 +2025-03-26 04:50:44,758 Train Loss: 0.0002207, Val Loss: 0.0002465 +2025-03-26 04:50:44,759 Epoch 1886/2000 +2025-03-26 04:51:49,700 Current Learning Rate: 0.0003909284 +2025-03-26 04:51:49,700 Train Loss: 0.0002209, Val Loss: 0.0002467 +2025-03-26 04:51:49,700 Epoch 1887/2000 +2025-03-26 04:52:54,974 Current Learning Rate: 0.0003986064 +2025-03-26 04:52:54,975 Train Loss: 0.0002211, Val Loss: 0.0002469 +2025-03-26 04:52:54,975 Epoch 1888/2000 +2025-03-26 04:53:59,737 Current Learning Rate: 0.0004063093 +2025-03-26 04:53:59,738 Train Loss: 0.0002213, Val Loss: 0.0002471 +2025-03-26 04:53:59,738 Epoch 1889/2000 +2025-03-26 04:55:04,716 Current Learning Rate: 0.0004140354 +2025-03-26 04:55:04,717 Train Loss: 0.0002215, Val Loss: 0.0002474 +2025-03-26 04:55:04,717 Epoch 1890/2000 +2025-03-26 04:56:09,772 Current Learning Rate: 0.0004217828 +2025-03-26 04:56:09,772 Train Loss: 0.0002217, Val Loss: 0.0002476 +2025-03-26 04:56:09,772 Epoch 1891/2000 +2025-03-26 04:57:14,974 Current Learning Rate: 0.0004295494 +2025-03-26 04:57:14,975 Train Loss: 0.0002219, Val Loss: 0.0002479 +2025-03-26 04:57:14,975 Epoch 1892/2000 +2025-03-26 04:58:20,110 Current Learning Rate: 0.0004373334 +2025-03-26 04:58:20,111 Train Loss: 0.0002221, Val Loss: 0.0002482 +2025-03-26 04:58:20,111 Epoch 1893/2000 +2025-03-26 04:59:25,384 Current Learning Rate: 0.0004451328 +2025-03-26 04:59:25,385 Train Loss: 0.0002222, Val Loss: 0.0002485 +2025-03-26 04:59:25,385 Epoch 1894/2000 +2025-03-26 05:00:30,581 Current Learning Rate: 0.0004529458 +2025-03-26 05:00:30,581 Train Loss: 0.0002224, Val Loss: 0.0002488 +2025-03-26 05:00:30,581 Epoch 1895/2000 +2025-03-26 05:01:35,692 Current Learning Rate: 0.0004607705 +2025-03-26 05:01:35,692 Train Loss: 0.0002226, Val Loss: 0.0002492 +2025-03-26 05:01:35,692 Epoch 1896/2000 +2025-03-26 05:02:41,477 Current Learning Rate: 0.0004686047 +2025-03-26 05:02:41,478 Train Loss: 0.0002228, Val Loss: 0.0002495 +2025-03-26 05:02:41,478 Epoch 1897/2000 +2025-03-26 05:03:47,597 Current Learning Rate: 0.0004764468 +2025-03-26 05:03:47,598 Train Loss: 0.0002230, Val Loss: 0.0002499 +2025-03-26 05:03:47,598 Epoch 1898/2000 +2025-03-26 05:04:53,042 Current Learning Rate: 0.0004842946 +2025-03-26 05:04:53,043 Train Loss: 0.0002232, Val Loss: 0.0002503 +2025-03-26 05:04:53,043 Epoch 1899/2000 +2025-03-26 05:05:58,002 Current Learning Rate: 0.0004921463 +2025-03-26 05:05:58,003 Train Loss: 0.0002234, Val Loss: 0.0002507 +2025-03-26 05:05:58,003 Epoch 1900/2000 +2025-03-26 05:07:03,347 Current Learning Rate: 0.0005000000 +2025-03-26 05:07:03,348 Train Loss: 0.0002235, Val Loss: 0.0002512 +2025-03-26 05:07:03,348 Epoch 1901/2000 +2025-03-26 05:08:08,458 Current Learning Rate: 0.0005078537 +2025-03-26 05:08:08,459 Train Loss: 0.0002237, Val Loss: 0.0002516 +2025-03-26 05:08:08,459 Epoch 1902/2000 +2025-03-26 05:09:13,649 Current Learning Rate: 0.0005157054 +2025-03-26 05:09:13,649 Train Loss: 0.0002239, Val Loss: 0.0002520 +2025-03-26 05:09:13,649 Epoch 1903/2000 +2025-03-26 05:10:18,795 Current Learning Rate: 0.0005235532 +2025-03-26 05:10:18,796 Train Loss: 0.0002241, Val Loss: 0.0002522 +2025-03-26 05:10:18,796 Epoch 1904/2000 +2025-03-26 05:11:24,087 Current Learning Rate: 0.0005313953 +2025-03-26 05:11:24,087 Train Loss: 0.0002243, Val Loss: 0.0002522 +2025-03-26 05:11:24,088 Epoch 1905/2000 +2025-03-26 05:12:28,892 Current Learning Rate: 0.0005392295 +2025-03-26 05:12:28,892 Train Loss: 0.0002245, Val Loss: 0.0002521 +2025-03-26 05:12:28,892 Epoch 1906/2000 +2025-03-26 05:13:34,737 Current Learning Rate: 0.0005470542 +2025-03-26 05:13:34,738 Train Loss: 0.0002247, Val Loss: 0.0002521 +2025-03-26 05:13:34,738 Epoch 1907/2000 +2025-03-26 05:14:39,566 Current Learning Rate: 0.0005548672 +2025-03-26 05:14:39,566 Train Loss: 0.0002249, Val Loss: 0.0002524 +2025-03-26 05:14:39,566 Epoch 1908/2000 +2025-03-26 05:15:44,566 Current Learning Rate: 0.0005626666 +2025-03-26 05:15:44,566 Train Loss: 0.0002251, Val Loss: 0.0002526 +2025-03-26 05:15:44,566 Epoch 1909/2000 +2025-03-26 05:16:49,491 Current Learning Rate: 0.0005704506 +2025-03-26 05:16:49,491 Train Loss: 0.0002253, Val Loss: 0.0002529 +2025-03-26 05:16:49,491 Epoch 1910/2000 +2025-03-26 05:17:54,454 Current Learning Rate: 0.0005782172 +2025-03-26 05:17:54,454 Train Loss: 0.0002255, Val Loss: 0.0002533 +2025-03-26 05:17:54,454 Epoch 1911/2000 +2025-03-26 05:18:59,572 Current Learning Rate: 0.0005859646 +2025-03-26 05:18:59,573 Train Loss: 0.0002257, Val Loss: 0.0002537 +2025-03-26 05:18:59,573 Epoch 1912/2000 +2025-03-26 05:20:04,491 Current Learning Rate: 0.0005936907 +2025-03-26 05:20:04,492 Train Loss: 0.0002259, Val Loss: 0.0002542 +2025-03-26 05:20:04,492 Epoch 1913/2000 +2025-03-26 05:21:09,982 Current Learning Rate: 0.0006013936 +2025-03-26 05:21:09,982 Train Loss: 0.0002262, Val Loss: 0.0002548 +2025-03-26 05:21:09,983 Epoch 1914/2000 +2025-03-26 05:22:15,115 Current Learning Rate: 0.0006090716 +2025-03-26 05:22:15,116 Train Loss: 0.0002266, Val Loss: 0.0002558 +2025-03-26 05:22:15,116 Epoch 1915/2000 +2025-03-26 05:23:20,683 Current Learning Rate: 0.0006167227 +2025-03-26 05:23:20,684 Train Loss: 0.0002272, Val Loss: 0.0002585 +2025-03-26 05:23:20,684 Epoch 1916/2000 +2025-03-26 05:24:25,977 Current Learning Rate: 0.0006243449 +2025-03-26 05:24:25,977 Train Loss: 0.0002277, Val Loss: 0.0002556 +2025-03-26 05:24:25,977 Epoch 1917/2000 +2025-03-26 05:25:31,222 Current Learning Rate: 0.0006319365 +2025-03-26 05:25:31,222 Train Loss: 0.0002281, Val Loss: 0.0002557 +2025-03-26 05:25:31,222 Epoch 1918/2000 +2025-03-26 05:26:36,017 Current Learning Rate: 0.0006394956 +2025-03-26 05:26:36,018 Train Loss: 0.0002286, Val Loss: 0.0002565 +2025-03-26 05:26:36,018 Epoch 1919/2000 +2025-03-26 05:27:41,360 Current Learning Rate: 0.0006470202 +2025-03-26 05:27:41,360 Train Loss: 0.0002285, Val Loss: 0.0002573 +2025-03-26 05:27:41,361 Epoch 1920/2000 +2025-03-26 05:28:46,001 Current Learning Rate: 0.0006545085 +2025-03-26 05:28:46,002 Train Loss: 0.0002280, Val Loss: 0.0002568 +2025-03-26 05:28:46,002 Epoch 1921/2000 +2025-03-26 05:29:51,552 Current Learning Rate: 0.0006619587 +2025-03-26 05:29:51,552 Train Loss: 0.0002274, Val Loss: 0.0002555 +2025-03-26 05:29:51,552 Epoch 1922/2000 +2025-03-26 05:30:57,459 Current Learning Rate: 0.0006693690 +2025-03-26 05:30:57,460 Train Loss: 0.0002272, Val Loss: 0.0002563 +2025-03-26 05:30:57,460 Epoch 1923/2000 +2025-03-26 05:32:02,496 Current Learning Rate: 0.0006767374 +2025-03-26 05:32:02,497 Train Loss: 0.0002274, Val Loss: 0.0002570 +2025-03-26 05:32:02,497 Epoch 1924/2000 +2025-03-26 05:33:07,567 Current Learning Rate: 0.0006840623 +2025-03-26 05:33:07,567 Train Loss: 0.0002277, Val Loss: 0.0002576 +2025-03-26 05:33:07,567 Epoch 1925/2000 +2025-03-26 05:34:12,364 Current Learning Rate: 0.0006913417 +2025-03-26 05:34:12,364 Train Loss: 0.0002280, Val Loss: 0.0002581 +2025-03-26 05:34:12,364 Epoch 1926/2000 +2025-03-26 05:35:17,858 Current Learning Rate: 0.0006985739 +2025-03-26 05:35:17,858 Train Loss: 0.0002283, Val Loss: 0.0002588 +2025-03-26 05:35:17,858 Epoch 1927/2000 +2025-03-26 05:36:22,947 Current Learning Rate: 0.0007057572 +2025-03-26 05:36:22,947 Train Loss: 0.0002286, Val Loss: 0.0002591 +2025-03-26 05:36:22,948 Epoch 1928/2000 +2025-03-26 05:37:28,211 Current Learning Rate: 0.0007128896 +2025-03-26 05:37:28,212 Train Loss: 0.0002289, Val Loss: 0.0002593 +2025-03-26 05:37:28,212 Epoch 1929/2000 +2025-03-26 05:38:33,327 Current Learning Rate: 0.0007199696 +2025-03-26 05:38:33,327 Train Loss: 0.0002295, Val Loss: 0.0002602 +2025-03-26 05:38:33,327 Epoch 1930/2000 +2025-03-26 05:39:38,307 Current Learning Rate: 0.0007269952 +2025-03-26 05:39:38,307 Train Loss: 0.0002304, Val Loss: 0.0002604 +2025-03-26 05:39:38,308 Epoch 1931/2000 +2025-03-26 05:40:42,918 Current Learning Rate: 0.0007339649 +2025-03-26 05:40:42,919 Train Loss: 0.0002309, Val Loss: 0.0002592 +2025-03-26 05:40:42,919 Epoch 1932/2000 +2025-03-26 05:41:48,142 Current Learning Rate: 0.0007408768 +2025-03-26 05:41:48,143 Train Loss: 0.0002308, Val Loss: 0.0002596 +2025-03-26 05:41:48,143 Epoch 1933/2000 +2025-03-26 05:42:54,250 Current Learning Rate: 0.0007477293 +2025-03-26 05:42:54,250 Train Loss: 0.0002303, Val Loss: 0.0002604 +2025-03-26 05:42:54,251 Epoch 1934/2000 +2025-03-26 05:44:00,217 Current Learning Rate: 0.0007545207 +2025-03-26 05:44:00,217 Train Loss: 0.0002299, Val Loss: 0.0002601 +2025-03-26 05:44:00,217 Epoch 1935/2000 +2025-03-26 05:45:05,424 Current Learning Rate: 0.0007612493 +2025-03-26 05:45:05,424 Train Loss: 0.0002296, Val Loss: 0.0002603 +2025-03-26 05:45:05,424 Epoch 1936/2000 +2025-03-26 05:46:10,268 Current Learning Rate: 0.0007679134 +2025-03-26 05:46:10,268 Train Loss: 0.0002295, Val Loss: 0.0002608 +2025-03-26 05:46:10,269 Epoch 1937/2000 +2025-03-26 05:47:15,704 Current Learning Rate: 0.0007745114 +2025-03-26 05:47:15,705 Train Loss: 0.0002297, Val Loss: 0.0002614 +2025-03-26 05:47:15,705 Epoch 1938/2000 +2025-03-26 05:48:21,034 Current Learning Rate: 0.0007810417 +2025-03-26 05:48:21,035 Train Loss: 0.0002300, Val Loss: 0.0002623 +2025-03-26 05:48:21,035 Epoch 1939/2000 +2025-03-26 05:49:26,167 Current Learning Rate: 0.0007875026 +2025-03-26 05:49:26,167 Train Loss: 0.0002303, Val Loss: 0.0002630 +2025-03-26 05:49:26,168 Epoch 1940/2000 +2025-03-26 05:50:31,111 Current Learning Rate: 0.0007938926 +2025-03-26 05:50:31,112 Train Loss: 0.0002307, Val Loss: 0.0002638 +2025-03-26 05:50:31,112 Epoch 1941/2000 +2025-03-26 05:51:35,616 Current Learning Rate: 0.0008002101 +2025-03-26 05:51:35,617 Train Loss: 0.0002312, Val Loss: 0.0002644 +2025-03-26 05:51:35,617 Epoch 1942/2000 +2025-03-26 05:52:41,326 Current Learning Rate: 0.0008064535 +2025-03-26 05:52:41,327 Train Loss: 0.0002320, Val Loss: 0.0002642 +2025-03-26 05:52:41,327 Epoch 1943/2000 +2025-03-26 05:53:46,495 Current Learning Rate: 0.0008126213 +2025-03-26 05:53:46,496 Train Loss: 0.0002327, Val Loss: 0.0002641 +2025-03-26 05:53:46,496 Epoch 1944/2000 +2025-03-26 05:54:51,616 Current Learning Rate: 0.0008187120 +2025-03-26 05:54:51,616 Train Loss: 0.0002332, Val Loss: 0.0002638 +2025-03-26 05:54:51,617 Epoch 1945/2000 +2025-03-26 05:55:57,077 Current Learning Rate: 0.0008247240 +2025-03-26 05:55:57,077 Train Loss: 0.0002334, Val Loss: 0.0002626 +2025-03-26 05:55:57,078 Epoch 1946/2000 +2025-03-26 05:57:02,368 Current Learning Rate: 0.0008306559 +2025-03-26 05:57:02,368 Train Loss: 0.0002332, Val Loss: 0.0002622 +2025-03-26 05:57:02,369 Epoch 1947/2000 +2025-03-26 05:58:07,776 Current Learning Rate: 0.0008365063 +2025-03-26 05:58:07,776 Train Loss: 0.0002325, Val Loss: 0.0002625 +2025-03-26 05:58:07,776 Epoch 1948/2000 +2025-03-26 05:59:13,006 Current Learning Rate: 0.0008422736 +2025-03-26 05:59:13,006 Train Loss: 0.0002316, Val Loss: 0.0002627 +2025-03-26 05:59:13,006 Epoch 1949/2000 +2025-03-26 06:00:18,141 Current Learning Rate: 0.0008479564 +2025-03-26 06:00:18,142 Train Loss: 0.0002312, Val Loss: 0.0002632 +2025-03-26 06:00:18,142 Epoch 1950/2000 +2025-03-26 06:01:23,291 Current Learning Rate: 0.0008535534 +2025-03-26 06:01:23,291 Train Loss: 0.0002314, Val Loss: 0.0002644 +2025-03-26 06:01:23,291 Epoch 1951/2000 +2025-03-26 06:02:28,211 Current Learning Rate: 0.0008590631 +2025-03-26 06:02:28,211 Train Loss: 0.0002318, Val Loss: 0.0002652 +2025-03-26 06:02:28,211 Epoch 1952/2000 +2025-03-26 06:03:33,519 Current Learning Rate: 0.0008644843 +2025-03-26 06:03:33,519 Train Loss: 0.0002322, Val Loss: 0.0002658 +2025-03-26 06:03:33,520 Epoch 1953/2000 +2025-03-26 06:04:38,569 Current Learning Rate: 0.0008698155 +2025-03-26 06:04:38,570 Train Loss: 0.0002327, Val Loss: 0.0002667 +2025-03-26 06:04:38,570 Epoch 1954/2000 +2025-03-26 06:05:43,557 Current Learning Rate: 0.0008750555 +2025-03-26 06:05:43,558 Train Loss: 0.0002333, Val Loss: 0.0002672 +2025-03-26 06:05:43,558 Epoch 1955/2000 +2025-03-26 06:06:48,590 Current Learning Rate: 0.0008802030 +2025-03-26 06:06:48,590 Train Loss: 0.0002339, Val Loss: 0.0002664 +2025-03-26 06:06:48,590 Epoch 1956/2000 +2025-03-26 06:07:53,738 Current Learning Rate: 0.0008852566 +2025-03-26 06:07:53,739 Train Loss: 0.0002344, Val Loss: 0.0002677 +2025-03-26 06:07:53,739 Epoch 1957/2000 +2025-03-26 06:08:58,847 Current Learning Rate: 0.0008902152 +2025-03-26 06:08:58,848 Train Loss: 0.0002347, Val Loss: 0.0002672 +2025-03-26 06:08:58,848 Epoch 1958/2000 +2025-03-26 06:10:03,891 Current Learning Rate: 0.0008950775 +2025-03-26 06:10:03,891 Train Loss: 0.0002355, Val Loss: 0.0002672 +2025-03-26 06:10:03,892 Epoch 1959/2000 +2025-03-26 06:11:08,378 Current Learning Rate: 0.0008998423 +2025-03-26 06:11:08,378 Train Loss: 0.0002356, Val Loss: 0.0002666 +2025-03-26 06:11:08,379 Epoch 1960/2000 +2025-03-26 06:12:13,785 Current Learning Rate: 0.0009045085 +2025-03-26 06:12:13,785 Train Loss: 0.0002352, Val Loss: 0.0002650 +2025-03-26 06:12:13,786 Epoch 1961/2000 +2025-03-26 06:13:18,891 Current Learning Rate: 0.0009090749 +2025-03-26 06:13:18,891 Train Loss: 0.0002340, Val Loss: 0.0002649 +2025-03-26 06:13:18,891 Epoch 1962/2000 +2025-03-26 06:14:24,018 Current Learning Rate: 0.0009135403 +2025-03-26 06:14:24,019 Train Loss: 0.0002329, Val Loss: 0.0002649 +2025-03-26 06:14:24,019 Epoch 1963/2000 +2025-03-26 06:15:29,235 Current Learning Rate: 0.0009179037 +2025-03-26 06:15:29,236 Train Loss: 0.0002327, Val Loss: 0.0002656 +2025-03-26 06:15:29,236 Epoch 1964/2000 +2025-03-26 06:16:34,406 Current Learning Rate: 0.0009221640 +2025-03-26 06:16:34,406 Train Loss: 0.0002330, Val Loss: 0.0002666 +2025-03-26 06:16:34,407 Epoch 1965/2000 +2025-03-26 06:17:39,699 Current Learning Rate: 0.0009263201 +2025-03-26 06:17:39,700 Train Loss: 0.0002333, Val Loss: 0.0002672 +2025-03-26 06:17:39,701 Epoch 1966/2000 +2025-03-26 06:18:44,992 Current Learning Rate: 0.0009303710 +2025-03-26 06:18:44,993 Train Loss: 0.0002337, Val Loss: 0.0002679 +2025-03-26 06:18:44,994 Epoch 1967/2000 +2025-03-26 06:19:50,130 Current Learning Rate: 0.0009343158 +2025-03-26 06:19:50,131 Train Loss: 0.0002344, Val Loss: 0.0002687 +2025-03-26 06:19:50,131 Epoch 1968/2000 +2025-03-26 06:20:55,383 Current Learning Rate: 0.0009381533 +2025-03-26 06:20:55,384 Train Loss: 0.0002351, Val Loss: 0.0002698 +2025-03-26 06:20:55,384 Epoch 1969/2000 +2025-03-26 06:22:00,827 Current Learning Rate: 0.0009418828 +2025-03-26 06:22:00,828 Train Loss: 0.0002354, Val Loss: 0.0002689 +2025-03-26 06:22:00,828 Epoch 1970/2000 +2025-03-26 06:23:06,312 Current Learning Rate: 0.0009455033 +2025-03-26 06:23:06,313 Train Loss: 0.0002357, Val Loss: 0.0002700 +2025-03-26 06:23:06,313 Epoch 1971/2000 +2025-03-26 06:24:11,772 Current Learning Rate: 0.0009490138 +2025-03-26 06:24:11,773 Train Loss: 0.0002359, Val Loss: 0.0002696 +2025-03-26 06:24:11,773 Epoch 1972/2000 +2025-03-26 06:25:17,049 Current Learning Rate: 0.0009524135 +2025-03-26 06:25:17,050 Train Loss: 0.0002363, Val Loss: 0.0002697 +2025-03-26 06:25:17,050 Epoch 1973/2000 +2025-03-26 06:26:22,244 Current Learning Rate: 0.0009557016 +2025-03-26 06:26:22,245 Train Loss: 0.0002371, Val Loss: 0.0002697 +2025-03-26 06:26:22,245 Epoch 1974/2000 +2025-03-26 06:27:27,163 Current Learning Rate: 0.0009588773 +2025-03-26 06:27:27,163 Train Loss: 0.0002372, Val Loss: 0.0002703 +2025-03-26 06:27:27,164 Epoch 1975/2000 +2025-03-26 06:28:32,369 Current Learning Rate: 0.0009619398 +2025-03-26 06:28:32,370 Train Loss: 0.0002370, Val Loss: 0.0002686 +2025-03-26 06:28:32,370 Epoch 1976/2000 +2025-03-26 06:29:37,625 Current Learning Rate: 0.0009648882 +2025-03-26 06:29:37,626 Train Loss: 0.0002359, Val Loss: 0.0002675 +2025-03-26 06:29:37,626 Epoch 1977/2000 +2025-03-26 06:30:43,263 Current Learning Rate: 0.0009677220 +2025-03-26 06:30:43,263 Train Loss: 0.0002343, Val Loss: 0.0002666 +2025-03-26 06:30:43,264 Epoch 1978/2000 +2025-03-26 06:31:48,205 Current Learning Rate: 0.0009704404 +2025-03-26 06:31:48,205 Train Loss: 0.0002337, Val Loss: 0.0002670 +2025-03-26 06:31:48,205 Epoch 1979/2000 +2025-03-26 06:32:53,383 Current Learning Rate: 0.0009730427 +2025-03-26 06:32:53,384 Train Loss: 0.0002339, Val Loss: 0.0002680 +2025-03-26 06:32:53,384 Epoch 1980/2000 +2025-03-26 06:33:58,621 Current Learning Rate: 0.0009755283 +2025-03-26 06:33:58,621 Train Loss: 0.0002342, Val Loss: 0.0002690 +2025-03-26 06:33:58,621 Epoch 1981/2000 +2025-03-26 06:35:03,616 Current Learning Rate: 0.0009778965 +2025-03-26 06:35:03,616 Train Loss: 0.0002346, Val Loss: 0.0002695 +2025-03-26 06:35:03,617 Epoch 1982/2000 +2025-03-26 06:36:08,931 Current Learning Rate: 0.0009801468 +2025-03-26 06:36:08,931 Train Loss: 0.0002354, Val Loss: 0.0002701 +2025-03-26 06:36:08,932 Epoch 1983/2000 +2025-03-26 06:37:14,084 Current Learning Rate: 0.0009822787 +2025-03-26 06:37:14,084 Train Loss: 0.0002360, Val Loss: 0.0002708 +2025-03-26 06:37:14,084 Epoch 1984/2000 +2025-03-26 06:38:19,133 Current Learning Rate: 0.0009842916 +2025-03-26 06:38:19,133 Train Loss: 0.0002361, Val Loss: 0.0002709 +2025-03-26 06:38:19,133 Epoch 1985/2000 +2025-03-26 06:39:23,822 Current Learning Rate: 0.0009861850 +2025-03-26 06:39:23,823 Train Loss: 0.0002364, Val Loss: 0.0002712 +2025-03-26 06:39:23,823 Epoch 1986/2000 +2025-03-26 06:40:28,914 Current Learning Rate: 0.0009879584 +2025-03-26 06:40:28,914 Train Loss: 0.0002368, Val Loss: 0.0002710 +2025-03-26 06:40:28,915 Epoch 1987/2000 +2025-03-26 06:41:34,137 Current Learning Rate: 0.0009896114 +2025-03-26 06:41:34,138 Train Loss: 0.0002370, Val Loss: 0.0002709 +2025-03-26 06:41:34,138 Epoch 1988/2000 +2025-03-26 06:42:39,153 Current Learning Rate: 0.0009911436 +2025-03-26 06:42:39,153 Train Loss: 0.0002376, Val Loss: 0.0002715 +2025-03-26 06:42:39,154 Epoch 1989/2000 +2025-03-26 06:43:44,039 Current Learning Rate: 0.0009925547 +2025-03-26 06:43:44,039 Train Loss: 0.0002380, Val Loss: 0.0002711 +2025-03-26 06:43:44,040 Epoch 1990/2000 +2025-03-26 06:44:49,139 Current Learning Rate: 0.0009938442 +2025-03-26 06:44:49,139 Train Loss: 0.0002384, Val Loss: 0.0002703 +2025-03-26 06:44:49,139 Epoch 1991/2000 +2025-03-26 06:45:53,876 Current Learning Rate: 0.0009950118 +2025-03-26 06:45:53,877 Train Loss: 0.0002382, Val Loss: 0.0002691 +2025-03-26 06:45:53,877 Epoch 1992/2000 +2025-03-26 06:46:59,162 Current Learning Rate: 0.0009960574 +2025-03-26 06:46:59,162 Train Loss: 0.0002376, Val Loss: 0.0002699 +2025-03-26 06:46:59,163 Epoch 1993/2000 +2025-03-26 06:48:04,266 Current Learning Rate: 0.0009969805 +2025-03-26 06:48:04,266 Train Loss: 0.0002363, Val Loss: 0.0002679 +2025-03-26 06:48:04,267 Epoch 1994/2000 +2025-03-26 06:49:09,385 Current Learning Rate: 0.0009977810 +2025-03-26 06:49:09,386 Train Loss: 0.0002346, Val Loss: 0.0002679 +2025-03-26 06:49:09,386 Epoch 1995/2000 +2025-03-26 06:50:14,550 Current Learning Rate: 0.0009984587 +2025-03-26 06:50:14,550 Train Loss: 0.0002341, Val Loss: 0.0002683 +2025-03-26 06:50:14,550 Epoch 1996/2000 +2025-03-26 06:51:20,460 Current Learning Rate: 0.0009990134 +2025-03-26 06:51:20,461 Train Loss: 0.0002343, Val Loss: 0.0002688 +2025-03-26 06:51:20,461 Epoch 1997/2000 +2025-03-26 06:52:26,054 Current Learning Rate: 0.0009994449 +2025-03-26 06:52:26,054 Train Loss: 0.0002346, Val Loss: 0.0002693 +2025-03-26 06:52:26,054 Epoch 1998/2000 +2025-03-26 06:53:31,416 Current Learning Rate: 0.0009997533 +2025-03-26 06:53:31,417 Train Loss: 0.0002349, Val Loss: 0.0002696 +2025-03-26 06:53:31,417 Epoch 1999/2000 +2025-03-26 06:54:37,178 Current Learning Rate: 0.0009999383 +2025-03-26 06:54:37,178 Train Loss: 0.0002354, Val Loss: 0.0002709 +2025-03-26 06:54:37,178 Epoch 2000/2000 +2025-03-26 06:55:42,505 Current Learning Rate: 0.0010000000 +2025-03-26 06:55:42,506 Train Loss: 0.0002363, Val Loss: 0.0002714 +2025-03-26 06:55:46,553 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Triton_Gulf_uv_20250218_exp1_training_log.log b/Exp3_Kuroshio_forecasting/logs/Triton_Gulf_uv_20250218_exp1_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..ac67ce1b4167981c211f8ba40c68ef8338f8802d --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Triton_Gulf_uv_20250218_exp1_training_log.log @@ -0,0 +1,7914 @@ +2025-02-18 14:22:36,760 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 14:22:37,188 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 14:22:37,334 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 14:22:37,431 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 14:22:37,518 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 14:22:37,557 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 14:22:37,567 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 14:22:37,575 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 14:23:50,069 Epoch 1/2000 +2025-02-18 14:23:54,713 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:23:54,714 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:24:32,733 Current Learning Rate: 0.0099993832 +2025-02-18 14:24:33,650 Train Loss: 1.2713323, Val Loss: 0.1877115 +2025-02-18 14:24:33,650 Epoch 2/2000 +2025-02-18 14:25:15,291 Current Learning Rate: 0.0099975328 +2025-02-18 14:25:16,850 Train Loss: 0.0585225, Val Loss: 0.0228514 +2025-02-18 14:25:16,850 Epoch 3/2000 +2025-02-18 14:25:58,432 Current Learning Rate: 0.0099944494 +2025-02-18 14:26:00,215 Train Loss: 0.0233733, Val Loss: 0.0184301 +2025-02-18 14:26:00,215 Epoch 4/2000 +2025-02-18 14:26:41,860 Current Learning Rate: 0.0099901336 +2025-02-18 14:26:43,974 Train Loss: 0.0194500, Val Loss: 0.0175159 +2025-02-18 14:26:43,975 Epoch 5/2000 +2025-02-18 14:27:24,631 Current Learning Rate: 0.0099845867 +2025-02-18 14:27:26,016 Train Loss: 0.0187117, Val Loss: 0.0172202 +2025-02-18 14:27:26,017 Epoch 6/2000 +2025-02-18 14:28:07,928 Current Learning Rate: 0.0099778098 +2025-02-18 14:28:09,861 Train Loss: 0.0184526, Val Loss: 0.0170046 +2025-02-18 14:28:09,861 Epoch 7/2000 +2025-02-18 14:28:51,376 Current Learning Rate: 0.0099698048 +2025-02-18 14:28:52,975 Train Loss: 0.0182028, Val Loss: 0.0168065 +2025-02-18 14:28:52,976 Epoch 8/2000 +2025-02-18 14:29:34,082 Current Learning Rate: 0.0099605735 +2025-02-18 14:29:35,359 Train Loss: 0.0179927, Val Loss: 0.0166178 +2025-02-18 14:29:35,360 Epoch 9/2000 +2025-02-18 14:30:16,297 Current Learning Rate: 0.0099501183 +2025-02-18 14:30:17,559 Train Loss: 0.0178226, Val Loss: 0.0164603 +2025-02-18 14:30:17,560 Epoch 10/2000 +2025-02-18 14:30:58,714 Current Learning Rate: 0.0099384417 +2025-02-18 14:31:00,336 Train Loss: 0.0176944, Val Loss: 0.0163473 +2025-02-18 14:31:00,336 Epoch 11/2000 +2025-02-18 14:31:41,999 Current Learning Rate: 0.0099255466 +2025-02-18 14:31:44,022 Train Loss: 0.0175879, Val Loss: 0.0162707 +2025-02-18 14:31:44,023 Epoch 12/2000 +2025-02-18 14:32:25,628 Current Learning Rate: 0.0099114363 +2025-02-18 14:32:27,667 Train Loss: 0.0175217, Val Loss: 0.0162129 +2025-02-18 14:32:27,668 Epoch 13/2000 +2025-02-18 14:33:09,222 Current Learning Rate: 0.0098961141 +2025-02-18 14:33:10,517 Train Loss: 0.0174747, Val Loss: 0.0161656 +2025-02-18 14:33:10,517 Epoch 14/2000 +2025-02-18 14:33:51,665 Current Learning Rate: 0.0098795838 +2025-02-18 14:33:53,007 Train Loss: 0.0174301, Val Loss: 0.0161140 +2025-02-18 14:33:53,007 Epoch 15/2000 +2025-02-18 14:34:34,373 Current Learning Rate: 0.0098618496 +2025-02-18 14:34:36,111 Train Loss: 0.0173940, Val Loss: 0.0160846 +2025-02-18 14:34:36,111 Epoch 16/2000 +2025-02-18 14:35:17,348 Current Learning Rate: 0.0098429158 +2025-02-18 14:35:18,768 Train Loss: 0.0173408, Val Loss: 0.0160427 +2025-02-18 14:35:18,769 Epoch 17/2000 +2025-02-18 14:36:00,132 Current Learning Rate: 0.0098227871 +2025-02-18 14:36:01,307 Train Loss: 0.0172985, Val Loss: 0.0160008 +2025-02-18 14:36:01,307 Epoch 18/2000 +2025-02-18 14:36:42,841 Current Learning Rate: 0.0098014684 +2025-02-18 14:36:44,251 Train Loss: 0.0172659, Val Loss: 0.0159679 +2025-02-18 14:36:44,251 Epoch 19/2000 +2025-02-18 14:37:25,715 Current Learning Rate: 0.0097789651 +2025-02-18 14:37:26,689 Train Loss: 0.0172262, Val Loss: 0.0159340 +2025-02-18 14:37:26,689 Epoch 20/2000 +2025-02-18 14:38:07,787 Current Learning Rate: 0.0097552826 +2025-02-18 14:38:09,042 Train Loss: 0.0171959, Val Loss: 0.0159043 +2025-02-18 14:38:09,042 Epoch 21/2000 +2025-02-18 14:38:49,984 Current Learning Rate: 0.0097304268 +2025-02-18 14:38:51,221 Train Loss: 0.0171638, Val Loss: 0.0158699 +2025-02-18 14:38:51,221 Epoch 22/2000 +2025-02-18 14:39:32,212 Current Learning Rate: 0.0097044038 +2025-02-18 14:39:33,362 Train Loss: 0.0171223, Val Loss: 0.0158384 +2025-02-18 14:39:33,362 Epoch 23/2000 +2025-02-18 14:40:14,368 Current Learning Rate: 0.0096772202 +2025-02-18 14:40:15,739 Train Loss: 0.0171020, Val Loss: 0.0158095 +2025-02-18 14:40:15,746 Epoch 24/2000 +2025-02-18 14:40:56,731 Current Learning Rate: 0.0096488824 +2025-02-18 14:40:58,747 Train Loss: 0.0170595, Val Loss: 0.0157796 +2025-02-18 14:40:58,748 Epoch 25/2000 +2025-02-18 14:41:39,442 Current Learning Rate: 0.0096193977 +2025-02-18 14:41:40,694 Train Loss: 0.0170211, Val Loss: 0.0157428 +2025-02-18 14:41:40,694 Epoch 26/2000 +2025-02-18 14:42:21,620 Current Learning Rate: 0.0095887731 +2025-02-18 14:42:22,913 Train Loss: 0.0169861, Val Loss: 0.0157121 +2025-02-18 14:42:22,913 Epoch 27/2000 +2025-02-18 14:43:03,771 Current Learning Rate: 0.0095570164 +2025-02-18 14:43:05,093 Train Loss: 0.0169497, Val Loss: 0.0156837 +2025-02-18 14:43:05,093 Epoch 28/2000 +2025-02-18 14:43:46,687 Current Learning Rate: 0.0095241353 +2025-02-18 14:43:48,396 Train Loss: 0.0169167, Val Loss: 0.0156643 +2025-02-18 14:43:48,397 Epoch 29/2000 +2025-02-18 14:44:29,936 Current Learning Rate: 0.0094901379 +2025-02-18 14:44:31,106 Train Loss: 0.0168896, Val Loss: 0.0156279 +2025-02-18 14:44:31,106 Epoch 30/2000 +2025-02-18 14:45:12,239 Current Learning Rate: 0.0094550326 +2025-02-18 14:45:13,514 Train Loss: 0.0168502, Val Loss: 0.0155942 +2025-02-18 14:45:13,515 Epoch 31/2000 +2025-02-18 14:45:54,930 Current Learning Rate: 0.0094188282 +2025-02-18 14:45:56,163 Train Loss: 0.0168141, Val Loss: 0.0155678 +2025-02-18 14:45:56,163 Epoch 32/2000 +2025-02-18 14:46:37,373 Current Learning Rate: 0.0093815334 +2025-02-18 14:46:38,786 Train Loss: 0.0167845, Val Loss: 0.0155434 +2025-02-18 14:46:38,786 Epoch 33/2000 +2025-02-18 14:47:19,546 Current Learning Rate: 0.0093431576 +2025-02-18 14:47:20,932 Train Loss: 0.0167530, Val Loss: 0.0155105 +2025-02-18 14:47:20,932 Epoch 34/2000 +2025-02-18 14:48:02,776 Current Learning Rate: 0.0093037101 +2025-02-18 14:48:04,571 Train Loss: 0.0167198, Val Loss: 0.0154825 +2025-02-18 14:48:04,571 Epoch 35/2000 +2025-02-18 14:48:45,155 Current Learning Rate: 0.0092632008 +2025-02-18 14:48:46,210 Train Loss: 0.0166898, Val Loss: 0.0154535 +2025-02-18 14:48:46,210 Epoch 36/2000 +2025-02-18 14:49:27,239 Current Learning Rate: 0.0092216396 +2025-02-18 14:49:28,748 Train Loss: 0.0166454, Val Loss: 0.0154188 +2025-02-18 14:49:28,749 Epoch 37/2000 +2025-02-18 14:50:10,390 Current Learning Rate: 0.0091790368 +2025-02-18 14:50:11,909 Train Loss: 0.0166134, Val Loss: 0.0153854 +2025-02-18 14:50:11,909 Epoch 38/2000 +2025-02-18 14:50:53,496 Current Learning Rate: 0.0091354029 +2025-02-18 14:50:54,748 Train Loss: 0.0165783, Val Loss: 0.0153603 +2025-02-18 14:50:54,750 Epoch 39/2000 +2025-02-18 14:51:36,147 Current Learning Rate: 0.0090907486 +2025-02-18 14:51:37,582 Train Loss: 0.0165472, Val Loss: 0.0153303 +2025-02-18 14:51:37,586 Epoch 40/2000 +2025-02-18 14:52:18,635 Current Learning Rate: 0.0090450850 +2025-02-18 14:52:20,024 Train Loss: 0.0165110, Val Loss: 0.0153032 +2025-02-18 14:52:20,024 Epoch 41/2000 +2025-02-18 14:53:01,140 Current Learning Rate: 0.0089984233 +2025-02-18 14:53:03,447 Train Loss: 0.0164806, Val Loss: 0.0152741 +2025-02-18 14:53:03,447 Epoch 42/2000 +2025-02-18 14:53:44,081 Current Learning Rate: 0.0089507751 +2025-02-18 14:53:45,293 Train Loss: 0.0164557, Val Loss: 0.0152447 +2025-02-18 14:53:45,293 Epoch 43/2000 +2025-02-18 14:54:26,193 Current Learning Rate: 0.0089021520 +2025-02-18 14:54:27,342 Train Loss: 0.0164259, Val Loss: 0.0152163 +2025-02-18 14:54:27,344 Epoch 44/2000 +2025-02-18 14:55:08,637 Current Learning Rate: 0.0088525662 +2025-02-18 14:55:09,938 Train Loss: 0.0163895, Val Loss: 0.0151869 +2025-02-18 14:55:09,938 Epoch 45/2000 +2025-02-18 14:55:50,853 Current Learning Rate: 0.0088020298 +2025-02-18 14:55:51,904 Train Loss: 0.0163617, Val Loss: 0.0151578 +2025-02-18 14:55:51,904 Epoch 46/2000 +2025-02-18 14:56:33,361 Current Learning Rate: 0.0087505553 +2025-02-18 14:56:34,839 Train Loss: 0.0163289, Val Loss: 0.0151264 +2025-02-18 14:56:34,839 Epoch 47/2000 +2025-02-18 14:57:16,179 Current Learning Rate: 0.0086981555 +2025-02-18 14:57:18,129 Train Loss: 0.0163045, Val Loss: 0.0150961 +2025-02-18 14:57:18,130 Epoch 48/2000 +2025-02-18 14:57:59,506 Current Learning Rate: 0.0086448431 +2025-02-18 14:58:00,920 Train Loss: 0.0162678, Val Loss: 0.0150781 +2025-02-18 14:58:00,935 Epoch 49/2000 +2025-02-18 14:58:42,715 Current Learning Rate: 0.0085906315 +2025-02-18 14:58:43,887 Train Loss: 0.0162393, Val Loss: 0.0150428 +2025-02-18 14:58:43,888 Epoch 50/2000 +2025-02-18 14:59:25,216 Current Learning Rate: 0.0085355339 +2025-02-18 14:59:26,516 Train Loss: 0.0161828, Val Loss: 0.0149828 +2025-02-18 14:59:26,518 Epoch 51/2000 +2025-02-18 15:00:07,929 Current Learning Rate: 0.0084795640 +2025-02-18 15:00:09,319 Train Loss: 0.0160165, Val Loss: 0.0146608 +2025-02-18 15:00:09,319 Epoch 52/2000 +2025-02-18 15:00:50,506 Current Learning Rate: 0.0084227355 +2025-02-18 15:00:51,877 Train Loss: 0.0154740, Val Loss: 0.0140274 +2025-02-18 15:00:51,877 Epoch 53/2000 +2025-02-18 15:01:32,604 Current Learning Rate: 0.0083650626 +2025-02-18 15:01:34,621 Train Loss: 0.0147387, Val Loss: 0.0131640 +2025-02-18 15:01:34,622 Epoch 54/2000 +2025-02-18 15:02:16,254 Current Learning Rate: 0.0083065593 +2025-02-18 15:02:17,701 Train Loss: 0.0138583, Val Loss: 0.0124248 +2025-02-18 15:02:17,703 Epoch 55/2000 +2025-02-18 15:02:59,285 Current Learning Rate: 0.0082472402 +2025-02-18 15:03:01,246 Train Loss: 0.0123703, Val Loss: 0.0105863 +2025-02-18 15:03:01,246 Epoch 56/2000 +2025-02-18 15:03:41,906 Current Learning Rate: 0.0081871199 +2025-02-18 15:03:43,092 Train Loss: 0.0110420, Val Loss: 0.0094930 +2025-02-18 15:03:43,093 Epoch 57/2000 +2025-02-18 15:04:24,696 Current Learning Rate: 0.0081262133 +2025-02-18 15:04:26,271 Train Loss: 0.0101265, Val Loss: 0.0088337 +2025-02-18 15:04:26,288 Epoch 58/2000 +2025-02-18 15:05:06,953 Current Learning Rate: 0.0080645353 +2025-02-18 15:05:08,107 Train Loss: 0.0093373, Val Loss: 0.0085233 +2025-02-18 15:05:08,107 Epoch 59/2000 +2025-02-18 15:05:49,330 Current Learning Rate: 0.0080021011 +2025-02-18 15:05:50,968 Train Loss: 0.0090224, Val Loss: 0.0078853 +2025-02-18 15:05:50,968 Epoch 60/2000 +2025-02-18 15:06:32,030 Current Learning Rate: 0.0079389263 +2025-02-18 15:06:33,531 Train Loss: 0.0081421, Val Loss: 0.0070970 +2025-02-18 15:06:33,531 Epoch 61/2000 +2025-02-18 15:07:15,057 Current Learning Rate: 0.0078750263 +2025-02-18 15:07:16,865 Train Loss: 0.0078801, Val Loss: 0.0069114 +2025-02-18 15:07:16,866 Epoch 62/2000 +2025-02-18 15:07:58,385 Current Learning Rate: 0.0078104169 +2025-02-18 15:07:59,752 Train Loss: 0.0075112, Val Loss: 0.0065039 +2025-02-18 15:07:59,752 Epoch 63/2000 +2025-02-18 15:08:40,991 Current Learning Rate: 0.0077451141 +2025-02-18 15:08:42,710 Train Loss: 0.0069967, Val Loss: 0.0062146 +2025-02-18 15:08:42,711 Epoch 64/2000 +2025-02-18 15:09:23,984 Current Learning Rate: 0.0076791340 +2025-02-18 15:09:25,030 Train Loss: 0.0068503, Val Loss: 0.0061507 +2025-02-18 15:09:25,031 Epoch 65/2000 +2025-02-18 15:10:06,356 Current Learning Rate: 0.0076124928 +2025-02-18 15:10:07,746 Train Loss: 0.0064475, Val Loss: 0.0057761 +2025-02-18 15:10:07,747 Epoch 66/2000 +2025-02-18 15:10:48,867 Current Learning Rate: 0.0075452071 +2025-02-18 15:10:50,385 Train Loss: 0.0060769, Val Loss: 0.0054408 +2025-02-18 15:10:50,390 Epoch 67/2000 +2025-02-18 15:11:32,094 Current Learning Rate: 0.0074772933 +2025-02-18 15:11:34,181 Train Loss: 0.0059078, Val Loss: 0.0053517 +2025-02-18 15:11:34,181 Epoch 68/2000 +2025-02-18 15:12:15,050 Current Learning Rate: 0.0074087684 +2025-02-18 15:12:16,753 Train Loss: 0.0056297, Val Loss: 0.0050737 +2025-02-18 15:12:16,753 Epoch 69/2000 +2025-02-18 15:12:58,249 Current Learning Rate: 0.0073396491 +2025-02-18 15:13:00,021 Train Loss: 0.0056451, Val Loss: 0.0049415 +2025-02-18 15:13:00,022 Epoch 70/2000 +2025-02-18 15:13:41,457 Current Learning Rate: 0.0072699525 +2025-02-18 15:13:42,944 Train Loss: 0.0053579, Val Loss: 0.0048111 +2025-02-18 15:13:42,945 Epoch 71/2000 +2025-02-18 15:14:24,154 Current Learning Rate: 0.0071996958 +2025-02-18 15:14:24,155 Train Loss: 0.0052464, Val Loss: 0.0048742 +2025-02-18 15:14:24,155 Epoch 72/2000 +2025-02-18 15:15:06,525 Current Learning Rate: 0.0071288965 +2025-02-18 15:15:08,155 Train Loss: 0.0051129, Val Loss: 0.0046838 +2025-02-18 15:15:08,155 Epoch 73/2000 +2025-02-18 15:15:48,998 Current Learning Rate: 0.0070575718 +2025-02-18 15:15:50,599 Train Loss: 0.0049464, Val Loss: 0.0045638 +2025-02-18 15:15:50,600 Epoch 74/2000 +2025-02-18 15:16:31,815 Current Learning Rate: 0.0069857395 +2025-02-18 15:16:33,325 Train Loss: 0.0050081, Val Loss: 0.0045385 +2025-02-18 15:16:33,325 Epoch 75/2000 +2025-02-18 15:17:15,002 Current Learning Rate: 0.0069134172 +2025-02-18 15:17:16,714 Train Loss: 0.0048070, Val Loss: 0.0044704 +2025-02-18 15:17:16,715 Epoch 76/2000 +2025-02-18 15:17:57,464 Current Learning Rate: 0.0068406228 +2025-02-18 15:17:58,519 Train Loss: 0.0047897, Val Loss: 0.0043314 +2025-02-18 15:17:58,519 Epoch 77/2000 +2025-02-18 15:18:39,679 Current Learning Rate: 0.0067673742 +2025-02-18 15:18:40,795 Train Loss: 0.0048592, Val Loss: 0.0042489 +2025-02-18 15:18:40,796 Epoch 78/2000 +2025-02-18 15:19:22,456 Current Learning Rate: 0.0066936896 +2025-02-18 15:19:24,376 Train Loss: 0.0048049, Val Loss: 0.0042023 +2025-02-18 15:19:24,376 Epoch 79/2000 +2025-02-18 15:20:05,044 Current Learning Rate: 0.0066195871 +2025-02-18 15:20:06,212 Train Loss: 0.0045723, Val Loss: 0.0041746 +2025-02-18 15:20:06,219 Epoch 80/2000 +2025-02-18 15:20:47,438 Current Learning Rate: 0.0065450850 +2025-02-18 15:20:48,457 Train Loss: 0.0044051, Val Loss: 0.0041263 +2025-02-18 15:20:48,458 Epoch 81/2000 +2025-02-18 15:21:29,466 Current Learning Rate: 0.0064702016 +2025-02-18 15:21:30,749 Train Loss: 0.0043537, Val Loss: 0.0040353 +2025-02-18 15:21:30,749 Epoch 82/2000 +2025-02-18 15:22:11,845 Current Learning Rate: 0.0063949555 +2025-02-18 15:22:11,846 Train Loss: 0.0043434, Val Loss: 0.0044553 +2025-02-18 15:22:11,846 Epoch 83/2000 +2025-02-18 15:22:53,875 Current Learning Rate: 0.0063193652 +2025-02-18 15:22:55,107 Train Loss: 0.0041930, Val Loss: 0.0039030 +2025-02-18 15:22:55,108 Epoch 84/2000 +2025-02-18 15:23:36,401 Current Learning Rate: 0.0062434494 +2025-02-18 15:23:37,708 Train Loss: 0.0041489, Val Loss: 0.0038140 +2025-02-18 15:23:37,714 Epoch 85/2000 +2025-02-18 15:24:18,859 Current Learning Rate: 0.0061672268 +2025-02-18 15:24:20,611 Train Loss: 0.0042178, Val Loss: 0.0037432 +2025-02-18 15:24:20,614 Epoch 86/2000 +2025-02-18 15:25:02,294 Current Learning Rate: 0.0060907162 +2025-02-18 15:25:04,541 Train Loss: 0.0039734, Val Loss: 0.0036961 +2025-02-18 15:25:04,541 Epoch 87/2000 +2025-02-18 15:25:46,072 Current Learning Rate: 0.0060139365 +2025-02-18 15:25:47,737 Train Loss: 0.0041091, Val Loss: 0.0036200 +2025-02-18 15:25:47,737 Epoch 88/2000 +2025-02-18 15:26:29,289 Current Learning Rate: 0.0059369066 +2025-02-18 15:26:31,319 Train Loss: 0.0037831, Val Loss: 0.0035179 +2025-02-18 15:26:31,321 Epoch 89/2000 +2025-02-18 15:27:12,933 Current Learning Rate: 0.0058596455 +2025-02-18 15:27:14,854 Train Loss: 0.0036041, Val Loss: 0.0034365 +2025-02-18 15:27:14,854 Epoch 90/2000 +2025-02-18 15:27:56,338 Current Learning Rate: 0.0057821723 +2025-02-18 15:27:57,672 Train Loss: 0.0035541, Val Loss: 0.0033922 +2025-02-18 15:27:57,672 Epoch 91/2000 +2025-02-18 15:28:38,584 Current Learning Rate: 0.0057045062 +2025-02-18 15:28:39,892 Train Loss: 0.0036483, Val Loss: 0.0033826 +2025-02-18 15:28:39,892 Epoch 92/2000 +2025-02-18 15:29:20,897 Current Learning Rate: 0.0056266662 +2025-02-18 15:29:21,992 Train Loss: 0.0034998, Val Loss: 0.0033098 +2025-02-18 15:29:21,992 Epoch 93/2000 +2025-02-18 15:30:03,529 Current Learning Rate: 0.0055486716 +2025-02-18 15:30:03,530 Train Loss: 0.0035329, Val Loss: 0.0034519 +2025-02-18 15:30:03,530 Epoch 94/2000 +2025-02-18 15:30:45,819 Current Learning Rate: 0.0054705416 +2025-02-18 15:30:47,627 Train Loss: 0.0034917, Val Loss: 0.0032451 +2025-02-18 15:30:47,627 Epoch 95/2000 +2025-02-18 15:31:29,015 Current Learning Rate: 0.0053922955 +2025-02-18 15:31:30,408 Train Loss: 0.0032342, Val Loss: 0.0032376 +2025-02-18 15:31:30,409 Epoch 96/2000 +2025-02-18 15:32:11,798 Current Learning Rate: 0.0053139526 +2025-02-18 15:32:13,357 Train Loss: 0.0036386, Val Loss: 0.0031401 +2025-02-18 15:32:13,357 Epoch 97/2000 +2025-02-18 15:32:54,163 Current Learning Rate: 0.0052355323 +2025-02-18 15:32:55,492 Train Loss: 0.0032044, Val Loss: 0.0030463 +2025-02-18 15:32:55,492 Epoch 98/2000 +2025-02-18 15:33:36,696 Current Learning Rate: 0.0051570538 +2025-02-18 15:33:36,697 Train Loss: 0.0032658, Val Loss: 0.0030497 +2025-02-18 15:33:36,697 Epoch 99/2000 +2025-02-18 15:34:18,621 Current Learning Rate: 0.0050785366 +2025-02-18 15:34:19,711 Train Loss: 0.0031274, Val Loss: 0.0029924 +2025-02-18 15:34:19,711 Epoch 100/2000 +2025-02-18 15:35:00,521 Current Learning Rate: 0.0050000000 +2025-02-18 15:35:01,539 Train Loss: 0.0030784, Val Loss: 0.0029593 +2025-02-18 15:35:01,539 Epoch 101/2000 +2025-02-18 15:35:42,646 Current Learning Rate: 0.0049214634 +2025-02-18 15:35:43,774 Train Loss: 0.0030786, Val Loss: 0.0028907 +2025-02-18 15:35:43,775 Epoch 102/2000 +2025-02-18 15:36:24,739 Current Learning Rate: 0.0048429462 +2025-02-18 15:36:26,379 Train Loss: 0.0030722, Val Loss: 0.0028884 +2025-02-18 15:36:26,379 Epoch 103/2000 +2025-02-18 15:37:07,219 Current Learning Rate: 0.0047644677 +2025-02-18 15:37:08,517 Train Loss: 0.0029191, Val Loss: 0.0028590 +2025-02-18 15:37:08,518 Epoch 104/2000 +2025-02-18 15:37:50,333 Current Learning Rate: 0.0046860474 +2025-02-18 15:37:50,334 Train Loss: 0.0031066, Val Loss: 0.0029825 +2025-02-18 15:37:50,334 Epoch 105/2000 +2025-02-18 15:38:32,212 Current Learning Rate: 0.0046077045 +2025-02-18 15:38:32,213 Train Loss: 0.0030302, Val Loss: 0.0029459 +2025-02-18 15:38:32,214 Epoch 106/2000 +2025-02-18 15:39:14,436 Current Learning Rate: 0.0045294584 +2025-02-18 15:39:14,436 Train Loss: 0.0030785, Val Loss: 0.0028987 +2025-02-18 15:39:14,436 Epoch 107/2000 +2025-02-18 15:39:56,111 Current Learning Rate: 0.0044513284 +2025-02-18 15:39:56,111 Train Loss: 0.0028277, Val Loss: 0.0028687 +2025-02-18 15:39:56,111 Epoch 108/2000 +2025-02-18 15:40:38,153 Current Learning Rate: 0.0043733338 +2025-02-18 15:40:40,161 Train Loss: 0.0027936, Val Loss: 0.0027492 +2025-02-18 15:40:40,162 Epoch 109/2000 +2025-02-18 15:41:21,853 Current Learning Rate: 0.0042954938 +2025-02-18 15:41:23,302 Train Loss: 0.0030450, Val Loss: 0.0027433 +2025-02-18 15:41:23,303 Epoch 110/2000 +2025-02-18 15:42:05,034 Current Learning Rate: 0.0042178277 +2025-02-18 15:42:06,519 Train Loss: 0.0029646, Val Loss: 0.0026839 +2025-02-18 15:42:06,519 Epoch 111/2000 +2025-02-18 15:42:48,546 Current Learning Rate: 0.0041403545 +2025-02-18 15:42:50,367 Train Loss: 0.0026406, Val Loss: 0.0026818 +2025-02-18 15:42:50,368 Epoch 112/2000 +2025-02-18 15:43:31,376 Current Learning Rate: 0.0040630934 +2025-02-18 15:43:33,111 Train Loss: 0.0025529, Val Loss: 0.0026616 +2025-02-18 15:43:33,112 Epoch 113/2000 +2025-02-18 15:44:14,086 Current Learning Rate: 0.0039860635 +2025-02-18 15:44:14,087 Train Loss: 0.0027606, Val Loss: 0.0027820 +2025-02-18 15:44:14,087 Epoch 114/2000 +2025-02-18 15:44:56,303 Current Learning Rate: 0.0039092838 +2025-02-18 15:44:56,304 Train Loss: 0.0027141, Val Loss: 0.0026850 +2025-02-18 15:44:56,304 Epoch 115/2000 +2025-02-18 15:45:38,043 Current Learning Rate: 0.0038327732 +2025-02-18 15:45:39,218 Train Loss: 0.0032804, Val Loss: 0.0026386 +2025-02-18 15:45:39,218 Epoch 116/2000 +2025-02-18 15:46:20,682 Current Learning Rate: 0.0037565506 +2025-02-18 15:46:22,058 Train Loss: 0.0027418, Val Loss: 0.0025669 +2025-02-18 15:46:22,059 Epoch 117/2000 +2025-02-18 15:47:03,022 Current Learning Rate: 0.0036806348 +2025-02-18 15:47:04,446 Train Loss: 0.0027382, Val Loss: 0.0025666 +2025-02-18 15:47:04,446 Epoch 118/2000 +2025-02-18 15:47:45,799 Current Learning Rate: 0.0036050445 +2025-02-18 15:47:47,381 Train Loss: 0.0025141, Val Loss: 0.0024873 +2025-02-18 15:47:47,381 Epoch 119/2000 +2025-02-18 15:48:29,108 Current Learning Rate: 0.0035297984 +2025-02-18 15:48:30,567 Train Loss: 0.0027708, Val Loss: 0.0024813 +2025-02-18 15:48:30,567 Epoch 120/2000 +2025-02-18 15:49:11,955 Current Learning Rate: 0.0034549150 +2025-02-18 15:49:11,956 Train Loss: 0.0024446, Val Loss: 0.0025053 +2025-02-18 15:49:11,956 Epoch 121/2000 +2025-02-18 15:49:53,921 Current Learning Rate: 0.0033804129 +2025-02-18 15:49:53,922 Train Loss: 0.0025420, Val Loss: 0.0025618 +2025-02-18 15:49:53,922 Epoch 122/2000 +2025-02-18 15:50:35,701 Current Learning Rate: 0.0033063104 +2025-02-18 15:50:35,702 Train Loss: 0.0025295, Val Loss: 0.0025013 +2025-02-18 15:50:35,702 Epoch 123/2000 +2025-02-18 15:51:17,604 Current Learning Rate: 0.0032326258 +2025-02-18 15:51:18,912 Train Loss: 0.0023033, Val Loss: 0.0024400 +2025-02-18 15:51:18,912 Epoch 124/2000 +2025-02-18 15:52:00,121 Current Learning Rate: 0.0031593772 +2025-02-18 15:52:01,627 Train Loss: 0.0024821, Val Loss: 0.0024185 +2025-02-18 15:52:01,628 Epoch 125/2000 +2025-02-18 15:52:42,494 Current Learning Rate: 0.0030865828 +2025-02-18 15:52:43,678 Train Loss: 0.0024515, Val Loss: 0.0023806 +2025-02-18 15:52:43,678 Epoch 126/2000 +2025-02-18 15:53:25,488 Current Learning Rate: 0.0030142605 +2025-02-18 15:53:26,877 Train Loss: 0.0022217, Val Loss: 0.0023296 +2025-02-18 15:53:26,878 Epoch 127/2000 +2025-02-18 15:54:08,391 Current Learning Rate: 0.0029424282 +2025-02-18 15:54:09,570 Train Loss: 0.0023471, Val Loss: 0.0023265 +2025-02-18 15:54:09,570 Epoch 128/2000 +2025-02-18 15:54:50,457 Current Learning Rate: 0.0028711035 +2025-02-18 15:54:50,458 Train Loss: 0.0023360, Val Loss: 0.0023666 +2025-02-18 15:54:50,458 Epoch 129/2000 +2025-02-18 15:55:32,442 Current Learning Rate: 0.0028003042 +2025-02-18 15:55:32,443 Train Loss: 0.0026158, Val Loss: 0.0023867 +2025-02-18 15:55:32,443 Epoch 130/2000 +2025-02-18 15:56:14,603 Current Learning Rate: 0.0027300475 +2025-02-18 15:56:15,674 Train Loss: 0.0021960, Val Loss: 0.0022482 +2025-02-18 15:56:15,674 Epoch 131/2000 +2025-02-18 15:56:57,363 Current Learning Rate: 0.0026603509 +2025-02-18 15:56:58,765 Train Loss: 0.0023145, Val Loss: 0.0022383 +2025-02-18 15:56:58,766 Epoch 132/2000 +2025-02-18 15:57:39,678 Current Learning Rate: 0.0025912316 +2025-02-18 15:57:40,801 Train Loss: 0.0021741, Val Loss: 0.0022179 +2025-02-18 15:57:40,801 Epoch 133/2000 +2025-02-18 15:58:22,036 Current Learning Rate: 0.0025227067 +2025-02-18 15:58:22,037 Train Loss: 0.0024336, Val Loss: 0.0022363 +2025-02-18 15:58:22,037 Epoch 134/2000 +2025-02-18 15:59:03,940 Current Learning Rate: 0.0024547929 +2025-02-18 15:59:03,940 Train Loss: 0.0023936, Val Loss: 0.0022878 +2025-02-18 15:59:03,940 Epoch 135/2000 +2025-02-18 15:59:45,966 Current Learning Rate: 0.0023875072 +2025-02-18 15:59:47,042 Train Loss: 0.0021375, Val Loss: 0.0021983 +2025-02-18 15:59:47,043 Epoch 136/2000 +2025-02-18 16:00:28,800 Current Learning Rate: 0.0023208660 +2025-02-18 16:00:30,358 Train Loss: 0.0024073, Val Loss: 0.0021722 +2025-02-18 16:00:30,359 Epoch 137/2000 +2025-02-18 16:01:11,704 Current Learning Rate: 0.0022548859 +2025-02-18 16:01:13,140 Train Loss: 0.0020079, Val Loss: 0.0021674 +2025-02-18 16:01:13,141 Epoch 138/2000 +2025-02-18 16:01:54,091 Current Learning Rate: 0.0021895831 +2025-02-18 16:01:54,092 Train Loss: 0.0021660, Val Loss: 0.0021771 +2025-02-18 16:01:54,092 Epoch 139/2000 +2025-02-18 16:02:36,519 Current Learning Rate: 0.0021249737 +2025-02-18 16:02:38,076 Train Loss: 0.0021857, Val Loss: 0.0021324 +2025-02-18 16:02:38,078 Epoch 140/2000 +2025-02-18 16:03:19,909 Current Learning Rate: 0.0020610737 +2025-02-18 16:03:21,908 Train Loss: 0.0019805, Val Loss: 0.0021122 +2025-02-18 16:03:21,908 Epoch 141/2000 +2025-02-18 16:04:03,552 Current Learning Rate: 0.0019978989 +2025-02-18 16:04:03,553 Train Loss: 0.0020986, Val Loss: 0.0021461 +2025-02-18 16:04:03,553 Epoch 142/2000 +2025-02-18 16:04:45,085 Current Learning Rate: 0.0019354647 +2025-02-18 16:04:46,576 Train Loss: 0.0020708, Val Loss: 0.0020922 +2025-02-18 16:04:46,576 Epoch 143/2000 +2025-02-18 16:05:27,690 Current Learning Rate: 0.0018737867 +2025-02-18 16:05:29,271 Train Loss: 0.0021875, Val Loss: 0.0020832 +2025-02-18 16:05:29,272 Epoch 144/2000 +2025-02-18 16:06:10,499 Current Learning Rate: 0.0018128801 +2025-02-18 16:06:10,500 Train Loss: 0.0019904, Val Loss: 0.0020860 +2025-02-18 16:06:10,501 Epoch 145/2000 +2025-02-18 16:06:52,348 Current Learning Rate: 0.0017527598 +2025-02-18 16:06:52,348 Train Loss: 0.0022660, Val Loss: 0.0020959 +2025-02-18 16:06:52,349 Epoch 146/2000 +2025-02-18 16:07:34,636 Current Learning Rate: 0.0016934407 +2025-02-18 16:07:36,213 Train Loss: 0.0019734, Val Loss: 0.0020127 +2025-02-18 16:07:36,213 Epoch 147/2000 +2025-02-18 16:08:16,973 Current Learning Rate: 0.0016349374 +2025-02-18 16:08:18,032 Train Loss: 0.0020415, Val Loss: 0.0020037 +2025-02-18 16:08:18,033 Epoch 148/2000 +2025-02-18 16:08:59,706 Current Learning Rate: 0.0015772645 +2025-02-18 16:08:59,707 Train Loss: 0.0020007, Val Loss: 0.0020610 +2025-02-18 16:08:59,708 Epoch 149/2000 +2025-02-18 16:09:42,195 Current Learning Rate: 0.0015204360 +2025-02-18 16:09:42,196 Train Loss: 0.0019330, Val Loss: 0.0020208 +2025-02-18 16:09:42,196 Epoch 150/2000 +2025-02-18 16:10:23,732 Current Learning Rate: 0.0014644661 +2025-02-18 16:10:23,732 Train Loss: 0.0020180, Val Loss: 0.0020590 +2025-02-18 16:10:23,732 Epoch 151/2000 +2025-02-18 16:11:05,828 Current Learning Rate: 0.0014093685 +2025-02-18 16:11:05,829 Train Loss: 0.0022267, Val Loss: 0.0021456 +2025-02-18 16:11:05,829 Epoch 152/2000 +2025-02-18 16:11:47,789 Current Learning Rate: 0.0013551569 +2025-02-18 16:11:49,121 Train Loss: 0.0020824, Val Loss: 0.0019915 +2025-02-18 16:11:49,121 Epoch 153/2000 +2025-02-18 16:12:30,141 Current Learning Rate: 0.0013018445 +2025-02-18 16:12:30,141 Train Loss: 0.0021292, Val Loss: 0.0020764 +2025-02-18 16:12:30,142 Epoch 154/2000 +2025-02-18 16:13:12,438 Current Learning Rate: 0.0012494447 +2025-02-18 16:13:12,439 Train Loss: 0.0018767, Val Loss: 0.0020179 +2025-02-18 16:13:12,439 Epoch 155/2000 +2025-02-18 16:13:54,349 Current Learning Rate: 0.0011979702 +2025-02-18 16:13:56,160 Train Loss: 0.0017822, Val Loss: 0.0019867 +2025-02-18 16:13:56,161 Epoch 156/2000 +2025-02-18 16:14:36,983 Current Learning Rate: 0.0011474338 +2025-02-18 16:14:38,550 Train Loss: 0.0018670, Val Loss: 0.0019719 +2025-02-18 16:14:38,550 Epoch 157/2000 +2025-02-18 16:15:19,235 Current Learning Rate: 0.0010978480 +2025-02-18 16:15:20,447 Train Loss: 0.0018423, Val Loss: 0.0019534 +2025-02-18 16:15:20,447 Epoch 158/2000 +2025-02-18 16:16:01,269 Current Learning Rate: 0.0010492249 +2025-02-18 16:16:01,269 Train Loss: 0.0019433, Val Loss: 0.0019592 +2025-02-18 16:16:01,269 Epoch 159/2000 +2025-02-18 16:16:43,208 Current Learning Rate: 0.0010015767 +2025-02-18 16:16:44,326 Train Loss: 0.0019110, Val Loss: 0.0018985 +2025-02-18 16:16:44,327 Epoch 160/2000 +2025-02-18 16:17:25,787 Current Learning Rate: 0.0009549150 +2025-02-18 16:17:25,789 Train Loss: 0.0019821, Val Loss: 0.0019202 +2025-02-18 16:17:25,789 Epoch 161/2000 +2025-02-18 16:18:07,920 Current Learning Rate: 0.0009092514 +2025-02-18 16:18:09,567 Train Loss: 0.0019917, Val Loss: 0.0018956 +2025-02-18 16:18:09,567 Epoch 162/2000 +2025-02-18 16:18:50,830 Current Learning Rate: 0.0008645971 +2025-02-18 16:18:50,831 Train Loss: 0.0020351, Val Loss: 0.0019139 +2025-02-18 16:18:50,831 Epoch 163/2000 +2025-02-18 16:19:33,525 Current Learning Rate: 0.0008209632 +2025-02-18 16:19:35,190 Train Loss: 0.0020490, Val Loss: 0.0018806 +2025-02-18 16:19:35,190 Epoch 164/2000 +2025-02-18 16:20:16,704 Current Learning Rate: 0.0007783604 +2025-02-18 16:20:18,100 Train Loss: 0.0019353, Val Loss: 0.0018786 +2025-02-18 16:20:18,100 Epoch 165/2000 +2025-02-18 16:20:59,077 Current Learning Rate: 0.0007367992 +2025-02-18 16:21:00,358 Train Loss: 0.0019194, Val Loss: 0.0018391 +2025-02-18 16:21:00,358 Epoch 166/2000 +2025-02-18 16:21:41,686 Current Learning Rate: 0.0006962899 +2025-02-18 16:21:43,052 Train Loss: 0.0019329, Val Loss: 0.0018355 +2025-02-18 16:21:43,053 Epoch 167/2000 +2025-02-18 16:22:24,475 Current Learning Rate: 0.0006568424 +2025-02-18 16:22:25,998 Train Loss: 0.0017932, Val Loss: 0.0018320 +2025-02-18 16:22:25,999 Epoch 168/2000 +2025-02-18 16:23:07,188 Current Learning Rate: 0.0006184666 +2025-02-18 16:23:08,254 Train Loss: 0.0017510, Val Loss: 0.0018238 +2025-02-18 16:23:08,256 Epoch 169/2000 +2025-02-18 16:23:50,196 Current Learning Rate: 0.0005811718 +2025-02-18 16:23:52,052 Train Loss: 0.0018346, Val Loss: 0.0018201 +2025-02-18 16:23:52,052 Epoch 170/2000 +2025-02-18 16:24:32,913 Current Learning Rate: 0.0005449674 +2025-02-18 16:24:34,332 Train Loss: 0.0016690, Val Loss: 0.0018113 +2025-02-18 16:24:34,333 Epoch 171/2000 +2025-02-18 16:25:15,558 Current Learning Rate: 0.0005098621 +2025-02-18 16:25:16,570 Train Loss: 0.0019035, Val Loss: 0.0018066 +2025-02-18 16:25:16,570 Epoch 172/2000 +2025-02-18 16:25:58,071 Current Learning Rate: 0.0004758647 +2025-02-18 16:26:00,026 Train Loss: 0.0018223, Val Loss: 0.0017994 +2025-02-18 16:26:00,026 Epoch 173/2000 +2025-02-18 16:26:41,622 Current Learning Rate: 0.0004429836 +2025-02-18 16:26:43,608 Train Loss: 0.0017544, Val Loss: 0.0017920 +2025-02-18 16:26:43,610 Epoch 174/2000 +2025-02-18 16:27:24,820 Current Learning Rate: 0.0004112269 +2025-02-18 16:27:26,035 Train Loss: 0.0017483, Val Loss: 0.0017863 +2025-02-18 16:27:26,036 Epoch 175/2000 +2025-02-18 16:28:07,685 Current Learning Rate: 0.0003806023 +2025-02-18 16:28:07,686 Train Loss: 0.0018096, Val Loss: 0.0017879 +2025-02-18 16:28:07,687 Epoch 176/2000 +2025-02-18 16:28:49,557 Current Learning Rate: 0.0003511176 +2025-02-18 16:28:50,811 Train Loss: 0.0016585, Val Loss: 0.0017798 +2025-02-18 16:28:50,813 Epoch 177/2000 +2025-02-18 16:29:32,530 Current Learning Rate: 0.0003227798 +2025-02-18 16:29:34,043 Train Loss: 0.0017152, Val Loss: 0.0017755 +2025-02-18 16:29:34,044 Epoch 178/2000 +2025-02-18 16:30:15,136 Current Learning Rate: 0.0002955962 +2025-02-18 16:30:16,612 Train Loss: 0.0017738, Val Loss: 0.0017746 +2025-02-18 16:30:16,612 Epoch 179/2000 +2025-02-18 16:30:58,134 Current Learning Rate: 0.0002695732 +2025-02-18 16:30:59,486 Train Loss: 0.0018169, Val Loss: 0.0017714 +2025-02-18 16:30:59,487 Epoch 180/2000 +2025-02-18 16:31:40,370 Current Learning Rate: 0.0002447174 +2025-02-18 16:31:40,371 Train Loss: 0.0018590, Val Loss: 0.0017750 +2025-02-18 16:31:40,371 Epoch 181/2000 +2025-02-18 16:32:22,552 Current Learning Rate: 0.0002210349 +2025-02-18 16:32:23,522 Train Loss: 0.0017238, Val Loss: 0.0017668 +2025-02-18 16:32:23,522 Epoch 182/2000 +2025-02-18 16:33:04,571 Current Learning Rate: 0.0001985316 +2025-02-18 16:33:05,561 Train Loss: 0.0017680, Val Loss: 0.0017596 +2025-02-18 16:33:05,561 Epoch 183/2000 +2025-02-18 16:33:46,823 Current Learning Rate: 0.0001772129 +2025-02-18 16:33:47,656 Train Loss: 0.0019999, Val Loss: 0.0017549 +2025-02-18 16:33:47,657 Epoch 184/2000 +2025-02-18 16:34:29,127 Current Learning Rate: 0.0001570842 +2025-02-18 16:34:30,034 Train Loss: 0.0016876, Val Loss: 0.0017536 +2025-02-18 16:34:30,036 Epoch 185/2000 +2025-02-18 16:35:11,420 Current Learning Rate: 0.0001381504 +2025-02-18 16:35:12,337 Train Loss: 0.0017630, Val Loss: 0.0017517 +2025-02-18 16:35:12,339 Epoch 186/2000 +2025-02-18 16:35:53,772 Current Learning Rate: 0.0001204162 +2025-02-18 16:35:55,403 Train Loss: 0.0015955, Val Loss: 0.0017502 +2025-02-18 16:35:55,405 Epoch 187/2000 +2025-02-18 16:36:36,806 Current Learning Rate: 0.0001038859 +2025-02-18 16:36:38,637 Train Loss: 0.0019637, Val Loss: 0.0017484 +2025-02-18 16:36:38,638 Epoch 188/2000 +2025-02-18 16:37:19,585 Current Learning Rate: 0.0000885637 +2025-02-18 16:37:20,932 Train Loss: 0.0017018, Val Loss: 0.0017462 +2025-02-18 16:37:20,932 Epoch 189/2000 +2025-02-18 16:38:02,710 Current Learning Rate: 0.0000744534 +2025-02-18 16:38:05,043 Train Loss: 0.0017627, Val Loss: 0.0017446 +2025-02-18 16:38:05,043 Epoch 190/2000 +2025-02-18 16:38:46,741 Current Learning Rate: 0.0000615583 +2025-02-18 16:38:48,659 Train Loss: 0.0016332, Val Loss: 0.0017433 +2025-02-18 16:38:48,660 Epoch 191/2000 +2025-02-18 16:39:30,267 Current Learning Rate: 0.0000498817 +2025-02-18 16:39:31,988 Train Loss: 0.0017502, Val Loss: 0.0017424 +2025-02-18 16:39:31,988 Epoch 192/2000 +2025-02-18 16:40:13,372 Current Learning Rate: 0.0000394265 +2025-02-18 16:40:14,814 Train Loss: 0.0017487, Val Loss: 0.0017416 +2025-02-18 16:40:14,814 Epoch 193/2000 +2025-02-18 16:40:55,669 Current Learning Rate: 0.0000301952 +2025-02-18 16:40:56,878 Train Loss: 0.0016640, Val Loss: 0.0017413 +2025-02-18 16:40:56,878 Epoch 194/2000 +2025-02-18 16:41:38,048 Current Learning Rate: 0.0000221902 +2025-02-18 16:41:39,473 Train Loss: 0.0016934, Val Loss: 0.0017409 +2025-02-18 16:41:39,473 Epoch 195/2000 +2025-02-18 16:42:20,581 Current Learning Rate: 0.0000154133 +2025-02-18 16:42:20,582 Train Loss: 0.0016525, Val Loss: 0.0017411 +2025-02-18 16:42:20,582 Epoch 196/2000 +2025-02-18 16:43:02,785 Current Learning Rate: 0.0000098664 +2025-02-18 16:43:02,786 Train Loss: 0.0017709, Val Loss: 0.0017421 +2025-02-18 16:43:02,786 Epoch 197/2000 +2025-02-18 16:43:45,299 Current Learning Rate: 0.0000055506 +2025-02-18 16:43:45,300 Train Loss: 0.0017882, Val Loss: 0.0017412 +2025-02-18 16:43:45,300 Epoch 198/2000 +2025-02-18 16:44:27,850 Current Learning Rate: 0.0000024672 +2025-02-18 16:44:27,850 Train Loss: 0.0018726, Val Loss: 0.0017412 +2025-02-18 16:44:27,851 Epoch 199/2000 +2025-02-18 16:45:09,604 Current Learning Rate: 0.0000006168 +2025-02-18 16:45:09,604 Train Loss: 0.0016690, Val Loss: 0.0017410 +2025-02-18 16:45:09,604 Epoch 200/2000 +2025-02-18 16:45:52,013 Current Learning Rate: 0.0000000000 +2025-02-18 16:45:53,396 Train Loss: 0.0016884, Val Loss: 0.0017407 +2025-02-18 16:45:53,397 Epoch 201/2000 +2025-02-18 16:46:34,605 Current Learning Rate: 0.0000006168 +2025-02-18 16:46:34,605 Train Loss: 0.0016745, Val Loss: 0.0017409 +2025-02-18 16:46:34,605 Epoch 202/2000 +2025-02-18 16:47:16,985 Current Learning Rate: 0.0000024672 +2025-02-18 16:47:16,986 Train Loss: 0.0016994, Val Loss: 0.0017408 +2025-02-18 16:47:16,987 Epoch 203/2000 +2025-02-18 16:47:58,737 Current Learning Rate: 0.0000055506 +2025-02-18 16:48:00,263 Train Loss: 0.0018111, Val Loss: 0.0017406 +2025-02-18 16:48:00,263 Epoch 204/2000 +2025-02-18 16:48:42,166 Current Learning Rate: 0.0000098664 +2025-02-18 16:48:42,167 Train Loss: 0.0016745, Val Loss: 0.0017409 +2025-02-18 16:48:42,167 Epoch 205/2000 +2025-02-18 16:49:23,801 Current Learning Rate: 0.0000154133 +2025-02-18 16:49:23,801 Train Loss: 0.0016672, Val Loss: 0.0017408 +2025-02-18 16:49:23,802 Epoch 206/2000 +2025-02-18 16:50:05,929 Current Learning Rate: 0.0000221902 +2025-02-18 16:50:08,008 Train Loss: 0.0017572, Val Loss: 0.0017405 +2025-02-18 16:50:08,009 Epoch 207/2000 +2025-02-18 16:50:49,255 Current Learning Rate: 0.0000301952 +2025-02-18 16:50:50,670 Train Loss: 0.0016598, Val Loss: 0.0017399 +2025-02-18 16:50:50,671 Epoch 208/2000 +2025-02-18 16:51:32,022 Current Learning Rate: 0.0000394265 +2025-02-18 16:51:32,023 Train Loss: 0.0019802, Val Loss: 0.0017404 +2025-02-18 16:51:32,023 Epoch 209/2000 +2025-02-18 16:52:14,522 Current Learning Rate: 0.0000498817 +2025-02-18 16:52:16,005 Train Loss: 0.0017660, Val Loss: 0.0017390 +2025-02-18 16:52:16,005 Epoch 210/2000 +2025-02-18 16:52:57,548 Current Learning Rate: 0.0000615583 +2025-02-18 16:52:58,814 Train Loss: 0.0016666, Val Loss: 0.0017384 +2025-02-18 16:52:58,814 Epoch 211/2000 +2025-02-18 16:53:40,618 Current Learning Rate: 0.0000744534 +2025-02-18 16:53:41,878 Train Loss: 0.0017613, Val Loss: 0.0017378 +2025-02-18 16:53:41,879 Epoch 212/2000 +2025-02-18 16:54:23,574 Current Learning Rate: 0.0000885637 +2025-02-18 16:54:23,575 Train Loss: 0.0016547, Val Loss: 0.0017379 +2025-02-18 16:54:23,575 Epoch 213/2000 +2025-02-18 16:55:05,691 Current Learning Rate: 0.0001038859 +2025-02-18 16:55:05,692 Train Loss: 0.0016648, Val Loss: 0.0017383 +2025-02-18 16:55:05,692 Epoch 214/2000 +2025-02-18 16:55:47,634 Current Learning Rate: 0.0001204162 +2025-02-18 16:55:48,887 Train Loss: 0.0016908, Val Loss: 0.0017373 +2025-02-18 16:55:48,887 Epoch 215/2000 +2025-02-18 16:56:29,924 Current Learning Rate: 0.0001381504 +2025-02-18 16:56:29,924 Train Loss: 0.0020240, Val Loss: 0.0017382 +2025-02-18 16:56:29,925 Epoch 216/2000 +2025-02-18 16:57:11,826 Current Learning Rate: 0.0001570842 +2025-02-18 16:57:12,758 Train Loss: 0.0019309, Val Loss: 0.0017365 +2025-02-18 16:57:12,760 Epoch 217/2000 +2025-02-18 16:57:54,038 Current Learning Rate: 0.0001772129 +2025-02-18 16:57:54,039 Train Loss: 0.0018199, Val Loss: 0.0017462 +2025-02-18 16:57:54,039 Epoch 218/2000 +2025-02-18 16:58:36,635 Current Learning Rate: 0.0001985316 +2025-02-18 16:58:38,266 Train Loss: 0.0018424, Val Loss: 0.0017361 +2025-02-18 16:58:38,266 Epoch 219/2000 +2025-02-18 16:59:20,038 Current Learning Rate: 0.0002210349 +2025-02-18 16:59:21,775 Train Loss: 0.0022452, Val Loss: 0.0017359 +2025-02-18 16:59:21,775 Epoch 220/2000 +2025-02-18 17:00:03,233 Current Learning Rate: 0.0002447174 +2025-02-18 17:00:03,234 Train Loss: 0.0021426, Val Loss: 0.0017399 +2025-02-18 17:00:03,234 Epoch 221/2000 +2025-02-18 17:00:45,065 Current Learning Rate: 0.0002695732 +2025-02-18 17:00:46,474 Train Loss: 0.0017566, Val Loss: 0.0017346 +2025-02-18 17:00:46,474 Epoch 222/2000 +2025-02-18 17:01:28,205 Current Learning Rate: 0.0002955962 +2025-02-18 17:01:30,073 Train Loss: 0.0016008, Val Loss: 0.0017277 +2025-02-18 17:01:30,073 Epoch 223/2000 +2025-02-18 17:02:11,200 Current Learning Rate: 0.0003227798 +2025-02-18 17:02:11,201 Train Loss: 0.0018943, Val Loss: 0.0017290 +2025-02-18 17:02:11,202 Epoch 224/2000 +2025-02-18 17:02:53,095 Current Learning Rate: 0.0003511176 +2025-02-18 17:02:54,765 Train Loss: 0.0017643, Val Loss: 0.0017249 +2025-02-18 17:02:54,766 Epoch 225/2000 +2025-02-18 17:03:35,387 Current Learning Rate: 0.0003806023 +2025-02-18 17:03:37,170 Train Loss: 0.0016681, Val Loss: 0.0017219 +2025-02-18 17:03:37,171 Epoch 226/2000 +2025-02-18 17:04:18,708 Current Learning Rate: 0.0004112269 +2025-02-18 17:04:20,403 Train Loss: 0.0016686, Val Loss: 0.0017218 +2025-02-18 17:04:20,404 Epoch 227/2000 +2025-02-18 17:05:01,616 Current Learning Rate: 0.0004429836 +2025-02-18 17:05:01,617 Train Loss: 0.0019731, Val Loss: 0.0017253 +2025-02-18 17:05:01,617 Epoch 228/2000 +2025-02-18 17:05:43,710 Current Learning Rate: 0.0004758647 +2025-02-18 17:05:43,711 Train Loss: 0.0017136, Val Loss: 0.0017456 +2025-02-18 17:05:43,711 Epoch 229/2000 +2025-02-18 17:06:25,733 Current Learning Rate: 0.0005098621 +2025-02-18 17:06:27,538 Train Loss: 0.0016452, Val Loss: 0.0017139 +2025-02-18 17:06:27,539 Epoch 230/2000 +2025-02-18 17:07:09,211 Current Learning Rate: 0.0005449674 +2025-02-18 17:07:09,212 Train Loss: 0.0018787, Val Loss: 0.0017430 +2025-02-18 17:07:09,213 Epoch 231/2000 +2025-02-18 17:07:50,913 Current Learning Rate: 0.0005811718 +2025-02-18 17:07:52,344 Train Loss: 0.0017876, Val Loss: 0.0017118 +2025-02-18 17:07:52,345 Epoch 232/2000 +2025-02-18 17:08:33,386 Current Learning Rate: 0.0006184666 +2025-02-18 17:08:34,783 Train Loss: 0.0017348, Val Loss: 0.0016973 +2025-02-18 17:08:34,783 Epoch 233/2000 +2025-02-18 17:09:16,650 Current Learning Rate: 0.0006568424 +2025-02-18 17:09:17,684 Train Loss: 0.0017158, Val Loss: 0.0016921 +2025-02-18 17:09:17,684 Epoch 234/2000 +2025-02-18 17:09:59,395 Current Learning Rate: 0.0006962899 +2025-02-18 17:10:00,465 Train Loss: 0.0017112, Val Loss: 0.0016919 +2025-02-18 17:10:00,465 Epoch 235/2000 +2025-02-18 17:10:42,076 Current Learning Rate: 0.0007367992 +2025-02-18 17:10:43,194 Train Loss: 0.0016566, Val Loss: 0.0016845 +2025-02-18 17:10:43,194 Epoch 236/2000 +2025-02-18 17:11:24,698 Current Learning Rate: 0.0007783604 +2025-02-18 17:11:25,924 Train Loss: 0.0016330, Val Loss: 0.0016760 +2025-02-18 17:11:25,925 Epoch 237/2000 +2025-02-18 17:12:06,980 Current Learning Rate: 0.0008209632 +2025-02-18 17:12:06,981 Train Loss: 0.0019498, Val Loss: 0.0016924 +2025-02-18 17:12:06,981 Epoch 238/2000 +2025-02-18 17:12:48,967 Current Learning Rate: 0.0008645971 +2025-02-18 17:12:48,975 Train Loss: 0.0020079, Val Loss: 0.0016969 +2025-02-18 17:12:48,975 Epoch 239/2000 +2025-02-18 17:13:31,162 Current Learning Rate: 0.0009092514 +2025-02-18 17:13:31,163 Train Loss: 0.0015317, Val Loss: 0.0016777 +2025-02-18 17:13:31,163 Epoch 240/2000 +2025-02-18 17:14:13,514 Current Learning Rate: 0.0009549150 +2025-02-18 17:14:13,514 Train Loss: 0.0017634, Val Loss: 0.0017104 +2025-02-18 17:14:13,515 Epoch 241/2000 +2025-02-18 17:14:55,472 Current Learning Rate: 0.0010015767 +2025-02-18 17:14:55,473 Train Loss: 0.0018392, Val Loss: 0.0017129 +2025-02-18 17:14:55,473 Epoch 242/2000 +2025-02-18 17:15:37,981 Current Learning Rate: 0.0010492249 +2025-02-18 17:15:38,007 Train Loss: 0.0017197, Val Loss: 0.0016939 +2025-02-18 17:15:38,008 Epoch 243/2000 +2025-02-18 17:16:19,408 Current Learning Rate: 0.0010978480 +2025-02-18 17:16:20,359 Train Loss: 0.0016534, Val Loss: 0.0016682 +2025-02-18 17:16:20,359 Epoch 244/2000 +2025-02-18 17:17:02,088 Current Learning Rate: 0.0011474338 +2025-02-18 17:17:02,089 Train Loss: 0.0018344, Val Loss: 0.0017232 +2025-02-18 17:17:02,090 Epoch 245/2000 +2025-02-18 17:17:44,425 Current Learning Rate: 0.0011979702 +2025-02-18 17:17:44,426 Train Loss: 0.0017448, Val Loss: 0.0017632 +2025-02-18 17:17:44,426 Epoch 246/2000 +2025-02-18 17:18:26,472 Current Learning Rate: 0.0012494447 +2025-02-18 17:18:26,473 Train Loss: 0.0017844, Val Loss: 0.0017023 +2025-02-18 17:18:26,473 Epoch 247/2000 +2025-02-18 17:19:08,767 Current Learning Rate: 0.0013018445 +2025-02-18 17:19:08,768 Train Loss: 0.0016408, Val Loss: 0.0016908 +2025-02-18 17:19:08,768 Epoch 248/2000 +2025-02-18 17:19:50,576 Current Learning Rate: 0.0013551569 +2025-02-18 17:19:50,577 Train Loss: 0.0021660, Val Loss: 0.0018092 +2025-02-18 17:19:50,577 Epoch 249/2000 +2025-02-18 17:20:32,526 Current Learning Rate: 0.0014093685 +2025-02-18 17:20:32,526 Train Loss: 0.0016827, Val Loss: 0.0016840 +2025-02-18 17:20:32,527 Epoch 250/2000 +2025-02-18 17:21:14,451 Current Learning Rate: 0.0014644661 +2025-02-18 17:21:16,302 Train Loss: 0.0016643, Val Loss: 0.0016433 +2025-02-18 17:21:16,303 Epoch 251/2000 +2025-02-18 17:21:57,766 Current Learning Rate: 0.0015204360 +2025-02-18 17:21:57,767 Train Loss: 0.0018988, Val Loss: 0.0019761 +2025-02-18 17:21:57,768 Epoch 252/2000 +2025-02-18 17:22:39,713 Current Learning Rate: 0.0015772645 +2025-02-18 17:22:39,714 Train Loss: 0.0024260, Val Loss: 0.0017071 +2025-02-18 17:22:39,714 Epoch 253/2000 +2025-02-18 17:23:21,733 Current Learning Rate: 0.0016349374 +2025-02-18 17:23:23,158 Train Loss: 0.0015489, Val Loss: 0.0016318 +2025-02-18 17:23:23,158 Epoch 254/2000 +2025-02-18 17:24:04,197 Current Learning Rate: 0.0016934407 +2025-02-18 17:24:04,198 Train Loss: 0.0016413, Val Loss: 0.0017523 +2025-02-18 17:24:04,198 Epoch 255/2000 +2025-02-18 17:24:46,563 Current Learning Rate: 0.0017527598 +2025-02-18 17:24:46,564 Train Loss: 0.0016074, Val Loss: 0.0016386 +2025-02-18 17:24:46,564 Epoch 256/2000 +2025-02-18 17:25:28,150 Current Learning Rate: 0.0018128801 +2025-02-18 17:25:28,150 Train Loss: 0.0018730, Val Loss: 0.0018066 +2025-02-18 17:25:28,150 Epoch 257/2000 +2025-02-18 17:26:10,586 Current Learning Rate: 0.0018737867 +2025-02-18 17:26:10,586 Train Loss: 0.0018759, Val Loss: 0.0019966 +2025-02-18 17:26:10,587 Epoch 258/2000 +2025-02-18 17:26:52,139 Current Learning Rate: 0.0019354647 +2025-02-18 17:26:52,139 Train Loss: 0.0016847, Val Loss: 0.0016902 +2025-02-18 17:26:52,140 Epoch 259/2000 +2025-02-18 17:27:34,595 Current Learning Rate: 0.0019978989 +2025-02-18 17:27:34,595 Train Loss: 0.0014842, Val Loss: 0.0017114 +2025-02-18 17:27:34,595 Epoch 260/2000 +2025-02-18 17:28:16,592 Current Learning Rate: 0.0020610737 +2025-02-18 17:28:16,592 Train Loss: 0.0018388, Val Loss: 0.0017259 +2025-02-18 17:28:16,593 Epoch 261/2000 +2025-02-18 17:28:58,315 Current Learning Rate: 0.0021249737 +2025-02-18 17:28:58,315 Train Loss: 0.0020840, Val Loss: 0.0018863 +2025-02-18 17:28:58,315 Epoch 262/2000 +2025-02-18 17:29:40,308 Current Learning Rate: 0.0021895831 +2025-02-18 17:29:40,309 Train Loss: 0.0018472, Val Loss: 0.0017549 +2025-02-18 17:29:40,309 Epoch 263/2000 +2025-02-18 17:30:22,581 Current Learning Rate: 0.0022548859 +2025-02-18 17:30:24,021 Train Loss: 0.0016385, Val Loss: 0.0015944 +2025-02-18 17:30:24,022 Epoch 264/2000 +2025-02-18 17:31:05,091 Current Learning Rate: 0.0023208660 +2025-02-18 17:31:05,091 Train Loss: 0.0023224, Val Loss: 0.0018926 +2025-02-18 17:31:05,092 Epoch 265/2000 +2025-02-18 17:31:47,124 Current Learning Rate: 0.0023875072 +2025-02-18 17:31:47,124 Train Loss: 0.0018834, Val Loss: 0.0016311 +2025-02-18 17:31:47,125 Epoch 266/2000 +2025-02-18 17:32:29,361 Current Learning Rate: 0.0024547929 +2025-02-18 17:32:29,362 Train Loss: 0.0018385, Val Loss: 0.0017110 +2025-02-18 17:32:29,362 Epoch 267/2000 +2025-02-18 17:33:11,571 Current Learning Rate: 0.0025227067 +2025-02-18 17:33:11,572 Train Loss: 0.0019732, Val Loss: 0.0019323 +2025-02-18 17:33:11,572 Epoch 268/2000 +2025-02-18 17:33:53,395 Current Learning Rate: 0.0025912316 +2025-02-18 17:33:53,395 Train Loss: 0.0024165, Val Loss: 0.0020289 +2025-02-18 17:33:53,395 Epoch 269/2000 +2025-02-18 17:34:35,416 Current Learning Rate: 0.0026603509 +2025-02-18 17:34:35,416 Train Loss: 0.0016660, Val Loss: 0.0016108 +2025-02-18 17:34:35,416 Epoch 270/2000 +2025-02-18 17:35:17,315 Current Learning Rate: 0.0027300475 +2025-02-18 17:35:17,315 Train Loss: 0.0026150, Val Loss: 0.0031712 +2025-02-18 17:35:17,316 Epoch 271/2000 +2025-02-18 17:35:59,034 Current Learning Rate: 0.0028003042 +2025-02-18 17:35:59,034 Train Loss: 0.0020315, Val Loss: 0.0017251 +2025-02-18 17:35:59,035 Epoch 272/2000 +2025-02-18 17:36:41,098 Current Learning Rate: 0.0028711035 +2025-02-18 17:36:41,099 Train Loss: 0.0015845, Val Loss: 0.0016192 +2025-02-18 17:36:41,099 Epoch 273/2000 +2025-02-18 17:37:23,213 Current Learning Rate: 0.0029424282 +2025-02-18 17:37:25,127 Train Loss: 0.0016111, Val Loss: 0.0015590 +2025-02-18 17:37:25,128 Epoch 274/2000 +2025-02-18 17:38:06,614 Current Learning Rate: 0.0030142605 +2025-02-18 17:38:06,615 Train Loss: 0.0022658, Val Loss: 0.0019700 +2025-02-18 17:38:06,616 Epoch 275/2000 +2025-02-18 17:38:48,364 Current Learning Rate: 0.0030865828 +2025-02-18 17:38:48,365 Train Loss: 0.0017028, Val Loss: 0.0016376 +2025-02-18 17:38:48,365 Epoch 276/2000 +2025-02-18 17:39:30,550 Current Learning Rate: 0.0031593772 +2025-02-18 17:39:30,551 Train Loss: 0.0015553, Val Loss: 0.0021934 +2025-02-18 17:39:30,551 Epoch 277/2000 +2025-02-18 17:40:12,769 Current Learning Rate: 0.0032326258 +2025-02-18 17:40:12,769 Train Loss: 0.0024115, Val Loss: 0.0016707 +2025-02-18 17:40:12,769 Epoch 278/2000 +2025-02-18 17:40:54,578 Current Learning Rate: 0.0033063104 +2025-02-18 17:40:55,682 Train Loss: 0.0015668, Val Loss: 0.0015343 +2025-02-18 17:40:55,683 Epoch 279/2000 +2025-02-18 17:41:36,570 Current Learning Rate: 0.0033804129 +2025-02-18 17:41:36,571 Train Loss: 0.0020592, Val Loss: 0.0021991 +2025-02-18 17:41:36,571 Epoch 280/2000 +2025-02-18 17:42:19,096 Current Learning Rate: 0.0034549150 +2025-02-18 17:42:19,096 Train Loss: 0.0017902, Val Loss: 0.0015586 +2025-02-18 17:42:19,096 Epoch 281/2000 +2025-02-18 17:43:00,503 Current Learning Rate: 0.0035297984 +2025-02-18 17:43:00,503 Train Loss: 0.0015316, Val Loss: 0.0025620 +2025-02-18 17:43:00,503 Epoch 282/2000 +2025-02-18 17:43:43,325 Current Learning Rate: 0.0036050445 +2025-02-18 17:43:43,326 Train Loss: 0.0029647, Val Loss: 0.0017013 +2025-02-18 17:43:43,326 Epoch 283/2000 +2025-02-18 17:44:25,447 Current Learning Rate: 0.0036806348 +2025-02-18 17:44:25,448 Train Loss: 0.0015455, Val Loss: 0.0016042 +2025-02-18 17:44:25,448 Epoch 284/2000 +2025-02-18 17:45:07,716 Current Learning Rate: 0.0037565506 +2025-02-18 17:45:07,717 Train Loss: 0.0020497, Val Loss: 0.0016840 +2025-02-18 17:45:07,717 Epoch 285/2000 +2025-02-18 17:45:49,539 Current Learning Rate: 0.0038327732 +2025-02-18 17:45:51,108 Train Loss: 0.0015508, Val Loss: 0.0014593 +2025-02-18 17:45:51,108 Epoch 286/2000 +2025-02-18 17:46:31,870 Current Learning Rate: 0.0039092838 +2025-02-18 17:46:31,871 Train Loss: 0.0015400, Val Loss: 0.0014858 +2025-02-18 17:46:31,871 Epoch 287/2000 +2025-02-18 17:47:13,732 Current Learning Rate: 0.0039860635 +2025-02-18 17:47:13,733 Train Loss: 0.0020546, Val Loss: 0.0020457 +2025-02-18 17:47:13,733 Epoch 288/2000 +2025-02-18 17:47:55,729 Current Learning Rate: 0.0040630934 +2025-02-18 17:47:55,730 Train Loss: 0.0015843, Val Loss: 0.0015195 +2025-02-18 17:47:55,730 Epoch 289/2000 +2025-02-18 17:48:37,705 Current Learning Rate: 0.0041403545 +2025-02-18 17:48:37,706 Train Loss: 0.0028120, Val Loss: 0.0017338 +2025-02-18 17:48:37,706 Epoch 290/2000 +2025-02-18 17:49:19,714 Current Learning Rate: 0.0042178277 +2025-02-18 17:49:19,714 Train Loss: 0.0018246, Val Loss: 0.0015469 +2025-02-18 17:49:19,715 Epoch 291/2000 +2025-02-18 17:50:01,610 Current Learning Rate: 0.0042954938 +2025-02-18 17:50:03,952 Train Loss: 0.0016412, Val Loss: 0.0014233 +2025-02-18 17:50:03,952 Epoch 292/2000 +2025-02-18 17:50:44,928 Current Learning Rate: 0.0043733338 +2025-02-18 17:50:44,929 Train Loss: 0.0018163, Val Loss: 0.0063317 +2025-02-18 17:50:44,929 Epoch 293/2000 +2025-02-18 17:51:27,328 Current Learning Rate: 0.0044513284 +2025-02-18 17:51:27,329 Train Loss: 0.0029702, Val Loss: 0.0015472 +2025-02-18 17:51:27,329 Epoch 294/2000 +2025-02-18 17:52:09,312 Current Learning Rate: 0.0045294584 +2025-02-18 17:52:09,313 Train Loss: 0.0016110, Val Loss: 0.0015450 +2025-02-18 17:52:09,313 Epoch 295/2000 +2025-02-18 17:52:51,594 Current Learning Rate: 0.0046077045 +2025-02-18 17:52:51,595 Train Loss: 0.0012932, Val Loss: 0.0014242 +2025-02-18 17:52:51,596 Epoch 296/2000 +2025-02-18 17:53:33,939 Current Learning Rate: 0.0046860474 +2025-02-18 17:53:34,962 Train Loss: 0.0015528, Val Loss: 0.0014164 +2025-02-18 17:53:34,962 Epoch 297/2000 +2025-02-18 17:54:16,642 Current Learning Rate: 0.0047644677 +2025-02-18 17:54:16,643 Train Loss: 0.0017436, Val Loss: 0.0017601 +2025-02-18 17:54:16,643 Epoch 298/2000 +2025-02-18 17:54:58,703 Current Learning Rate: 0.0048429462 +2025-02-18 17:54:58,703 Train Loss: 0.0015527, Val Loss: 0.0017430 +2025-02-18 17:54:58,703 Epoch 299/2000 +2025-02-18 17:55:40,578 Current Learning Rate: 0.0049214634 +2025-02-18 17:55:40,578 Train Loss: 0.0025417, Val Loss: 0.0016193 +2025-02-18 17:55:40,579 Epoch 300/2000 +2025-02-18 17:56:22,710 Current Learning Rate: 0.0050000000 +2025-02-18 17:56:22,711 Train Loss: 0.0014638, Val Loss: 0.0015500 +2025-02-18 17:56:22,711 Epoch 301/2000 +2025-02-18 17:57:04,467 Current Learning Rate: 0.0050785366 +2025-02-18 17:57:05,869 Train Loss: 0.0013028, Val Loss: 0.0013837 +2025-02-18 17:57:05,891 Epoch 302/2000 +2025-02-18 17:57:46,853 Current Learning Rate: 0.0051570538 +2025-02-18 17:57:46,853 Train Loss: 0.0012835, Val Loss: 0.0014938 +2025-02-18 17:57:46,854 Epoch 303/2000 +2025-02-18 17:58:28,912 Current Learning Rate: 0.0052355323 +2025-02-18 17:58:28,912 Train Loss: 0.0044144, Val Loss: 0.0023768 +2025-02-18 17:58:28,913 Epoch 304/2000 +2025-02-18 17:59:11,121 Current Learning Rate: 0.0053139526 +2025-02-18 17:59:11,122 Train Loss: 0.0016570, Val Loss: 0.0014652 +2025-02-18 17:59:11,123 Epoch 305/2000 +2025-02-18 17:59:52,705 Current Learning Rate: 0.0053922955 +2025-02-18 17:59:54,172 Train Loss: 0.0013686, Val Loss: 0.0013476 +2025-02-18 17:59:54,172 Epoch 306/2000 +2025-02-18 18:00:35,856 Current Learning Rate: 0.0054705416 +2025-02-18 18:00:37,841 Train Loss: 0.0013546, Val Loss: 0.0013166 +2025-02-18 18:00:37,841 Epoch 307/2000 +2025-02-18 18:01:18,502 Current Learning Rate: 0.0055486716 +2025-02-18 18:01:18,503 Train Loss: 0.0014111, Val Loss: 0.0013312 +2025-02-18 18:01:18,503 Epoch 308/2000 +2025-02-18 18:02:01,066 Current Learning Rate: 0.0056266662 +2025-02-18 18:02:01,067 Train Loss: 0.0016744, Val Loss: 0.0015021 +2025-02-18 18:02:01,067 Epoch 309/2000 +2025-02-18 18:02:43,111 Current Learning Rate: 0.0057045062 +2025-02-18 18:02:44,606 Train Loss: 0.0012878, Val Loss: 0.0012889 +2025-02-18 18:02:44,607 Epoch 310/2000 +2025-02-18 18:03:26,057 Current Learning Rate: 0.0057821723 +2025-02-18 18:03:27,349 Train Loss: 0.0013382, Val Loss: 0.0012536 +2025-02-18 18:03:27,350 Epoch 311/2000 +2025-02-18 18:04:08,125 Current Learning Rate: 0.0058596455 +2025-02-18 18:04:08,126 Train Loss: 0.0022607, Val Loss: 0.0022936 +2025-02-18 18:04:08,126 Epoch 312/2000 +2025-02-18 18:04:50,063 Current Learning Rate: 0.0059369066 +2025-02-18 18:04:50,064 Train Loss: 0.0014946, Val Loss: 0.0012776 +2025-02-18 18:04:50,064 Epoch 313/2000 +2025-02-18 18:05:32,127 Current Learning Rate: 0.0060139365 +2025-02-18 18:05:32,127 Train Loss: 0.0013216, Val Loss: 0.0012649 +2025-02-18 18:05:32,128 Epoch 314/2000 +2025-02-18 18:06:13,915 Current Learning Rate: 0.0060907162 +2025-02-18 18:06:14,928 Train Loss: 0.0012000, Val Loss: 0.0011900 +2025-02-18 18:06:14,928 Epoch 315/2000 +2025-02-18 18:06:56,137 Current Learning Rate: 0.0061672268 +2025-02-18 18:06:57,270 Train Loss: 0.0011508, Val Loss: 0.0011799 +2025-02-18 18:06:57,271 Epoch 316/2000 +2025-02-18 18:07:38,676 Current Learning Rate: 0.0062434494 +2025-02-18 18:07:38,677 Train Loss: 0.0014119, Val Loss: 0.0012127 +2025-02-18 18:07:38,677 Epoch 317/2000 +2025-02-18 18:08:20,675 Current Learning Rate: 0.0063193652 +2025-02-18 18:08:20,676 Train Loss: 0.0013107, Val Loss: 0.0023704 +2025-02-18 18:08:20,677 Epoch 318/2000 +2025-02-18 18:09:02,569 Current Learning Rate: 0.0063949555 +2025-02-18 18:09:02,570 Train Loss: 0.0014809, Val Loss: 0.0012692 +2025-02-18 18:09:02,570 Epoch 319/2000 +2025-02-18 18:09:44,506 Current Learning Rate: 0.0064702016 +2025-02-18 18:09:44,506 Train Loss: 0.0013341, Val Loss: 0.0011892 +2025-02-18 18:09:44,507 Epoch 320/2000 +2025-02-18 18:10:26,877 Current Learning Rate: 0.0065450850 +2025-02-18 18:10:26,877 Train Loss: 0.0013506, Val Loss: 0.0012130 +2025-02-18 18:10:26,877 Epoch 321/2000 +2025-02-18 18:11:08,819 Current Learning Rate: 0.0066195871 +2025-02-18 18:11:08,820 Train Loss: 0.0015729, Val Loss: 0.0019253 +2025-02-18 18:11:08,820 Epoch 322/2000 +2025-02-18 18:11:50,997 Current Learning Rate: 0.0066936896 +2025-02-18 18:11:50,998 Train Loss: 0.0017395, Val Loss: 0.0013357 +2025-02-18 18:11:50,998 Epoch 323/2000 +2025-02-18 18:12:32,571 Current Learning Rate: 0.0067673742 +2025-02-18 18:12:34,139 Train Loss: 0.0010730, Val Loss: 0.0011544 +2025-02-18 18:12:34,139 Epoch 324/2000 +2025-02-18 18:13:15,141 Current Learning Rate: 0.0068406228 +2025-02-18 18:13:15,142 Train Loss: 0.0012745, Val Loss: 0.0011848 +2025-02-18 18:13:15,142 Epoch 325/2000 +2025-02-18 18:13:56,757 Current Learning Rate: 0.0069134172 +2025-02-18 18:13:56,757 Train Loss: 0.0016024, Val Loss: 0.0019904 +2025-02-18 18:13:56,759 Epoch 326/2000 +2025-02-18 18:14:38,838 Current Learning Rate: 0.0069857395 +2025-02-18 18:14:38,838 Train Loss: 0.0020954, Val Loss: 0.0012449 +2025-02-18 18:14:38,839 Epoch 327/2000 +2025-02-18 18:15:20,466 Current Learning Rate: 0.0070575718 +2025-02-18 18:15:21,525 Train Loss: 0.0011917, Val Loss: 0.0011023 +2025-02-18 18:15:21,526 Epoch 328/2000 +2025-02-18 18:16:03,001 Current Learning Rate: 0.0071288965 +2025-02-18 18:16:04,577 Train Loss: 0.0010282, Val Loss: 0.0010505 +2025-02-18 18:16:04,577 Epoch 329/2000 +2025-02-18 18:16:45,965 Current Learning Rate: 0.0071996958 +2025-02-18 18:16:45,966 Train Loss: 0.0012060, Val Loss: 0.0010773 +2025-02-18 18:16:45,966 Epoch 330/2000 +2025-02-18 18:17:27,761 Current Learning Rate: 0.0072699525 +2025-02-18 18:17:27,761 Train Loss: 0.0010776, Val Loss: 0.0010650 +2025-02-18 18:17:27,761 Epoch 331/2000 +2025-02-18 18:18:09,813 Current Learning Rate: 0.0073396491 +2025-02-18 18:18:09,813 Train Loss: 0.0011577, Val Loss: 0.0011705 +2025-02-18 18:18:09,813 Epoch 332/2000 +2025-02-18 18:18:51,894 Current Learning Rate: 0.0074087684 +2025-02-18 18:18:51,894 Train Loss: 0.0010904, Val Loss: 0.0011583 +2025-02-18 18:18:51,894 Epoch 333/2000 +2025-02-18 18:19:33,771 Current Learning Rate: 0.0074772933 +2025-02-18 18:19:33,772 Train Loss: 0.0013879, Val Loss: 0.0013653 +2025-02-18 18:19:33,772 Epoch 334/2000 +2025-02-18 18:20:16,098 Current Learning Rate: 0.0075452071 +2025-02-18 18:20:16,098 Train Loss: 0.0014281, Val Loss: 0.0011852 +2025-02-18 18:20:16,099 Epoch 335/2000 +2025-02-18 18:20:57,931 Current Learning Rate: 0.0076124928 +2025-02-18 18:20:57,932 Train Loss: 0.0013969, Val Loss: 0.0010668 +2025-02-18 18:20:57,932 Epoch 336/2000 +2025-02-18 18:21:39,591 Current Learning Rate: 0.0076791340 +2025-02-18 18:21:40,752 Train Loss: 0.0009118, Val Loss: 0.0009649 +2025-02-18 18:21:40,752 Epoch 337/2000 +2025-02-18 18:22:21,685 Current Learning Rate: 0.0077451141 +2025-02-18 18:22:21,686 Train Loss: 0.0012383, Val Loss: 0.0010181 +2025-02-18 18:22:21,686 Epoch 338/2000 +2025-02-18 18:23:03,957 Current Learning Rate: 0.0078104169 +2025-02-18 18:23:03,957 Train Loss: 0.0012383, Val Loss: 0.0010559 +2025-02-18 18:23:03,958 Epoch 339/2000 +2025-02-18 18:23:45,640 Current Learning Rate: 0.0078750263 +2025-02-18 18:23:45,640 Train Loss: 0.0009873, Val Loss: 0.0010110 +2025-02-18 18:23:45,640 Epoch 340/2000 +2025-02-18 18:24:27,741 Current Learning Rate: 0.0079389263 +2025-02-18 18:24:27,742 Train Loss: 0.0008993, Val Loss: 0.0011927 +2025-02-18 18:24:27,742 Epoch 341/2000 +2025-02-18 18:25:10,182 Current Learning Rate: 0.0080021011 +2025-02-18 18:25:10,183 Train Loss: 0.0011391, Val Loss: 0.0010520 +2025-02-18 18:25:10,183 Epoch 342/2000 +2025-02-18 18:25:52,028 Current Learning Rate: 0.0080645353 +2025-02-18 18:25:52,029 Train Loss: 0.0013693, Val Loss: 0.0011954 +2025-02-18 18:25:52,029 Epoch 343/2000 +2025-02-18 18:26:34,129 Current Learning Rate: 0.0081262133 +2025-02-18 18:26:34,130 Train Loss: 0.0013792, Val Loss: 0.0010096 +2025-02-18 18:26:34,130 Epoch 344/2000 +2025-02-18 18:27:16,061 Current Learning Rate: 0.0081871199 +2025-02-18 18:27:16,062 Train Loss: 0.0011318, Val Loss: 0.0009873 +2025-02-18 18:27:16,063 Epoch 345/2000 +2025-02-18 18:27:58,172 Current Learning Rate: 0.0082472402 +2025-02-18 18:27:58,173 Train Loss: 0.0010808, Val Loss: 0.0012554 +2025-02-18 18:27:58,173 Epoch 346/2000 +2025-02-18 18:28:40,042 Current Learning Rate: 0.0083065593 +2025-02-18 18:28:40,043 Train Loss: 0.0012310, Val Loss: 0.0011112 +2025-02-18 18:28:40,043 Epoch 347/2000 +2025-02-18 18:29:22,289 Current Learning Rate: 0.0083650626 +2025-02-18 18:29:22,290 Train Loss: 0.0012932, Val Loss: 0.0010920 +2025-02-18 18:29:22,290 Epoch 348/2000 +2025-02-18 18:30:04,733 Current Learning Rate: 0.0084227355 +2025-02-18 18:30:04,733 Train Loss: 0.0009327, Val Loss: 0.0009964 +2025-02-18 18:30:04,733 Epoch 349/2000 +2025-02-18 18:30:47,143 Current Learning Rate: 0.0084795640 +2025-02-18 18:30:47,143 Train Loss: 0.0011171, Val Loss: 0.0010743 +2025-02-18 18:30:47,144 Epoch 350/2000 +2025-02-18 18:31:29,496 Current Learning Rate: 0.0085355339 +2025-02-18 18:31:29,496 Train Loss: 0.0012911, Val Loss: 0.0011814 +2025-02-18 18:31:29,496 Epoch 351/2000 +2025-02-18 18:32:11,428 Current Learning Rate: 0.0085906315 +2025-02-18 18:32:11,429 Train Loss: 0.0013600, Val Loss: 0.0010618 +2025-02-18 18:32:11,429 Epoch 352/2000 +2025-02-18 18:32:53,485 Current Learning Rate: 0.0086448431 +2025-02-18 18:32:55,277 Train Loss: 0.0009300, Val Loss: 0.0009471 +2025-02-18 18:32:55,277 Epoch 353/2000 +2025-02-18 18:33:36,811 Current Learning Rate: 0.0086981555 +2025-02-18 18:33:38,592 Train Loss: 0.0011415, Val Loss: 0.0009114 +2025-02-18 18:33:38,592 Epoch 354/2000 +2025-02-18 18:34:20,135 Current Learning Rate: 0.0087505553 +2025-02-18 18:34:20,135 Train Loss: 0.0009566, Val Loss: 0.0009206 +2025-02-18 18:34:20,136 Epoch 355/2000 +2025-02-18 18:35:01,799 Current Learning Rate: 0.0088020298 +2025-02-18 18:35:04,298 Train Loss: 0.0009845, Val Loss: 0.0008380 +2025-02-18 18:35:04,298 Epoch 356/2000 +2025-02-18 18:35:45,578 Current Learning Rate: 0.0088525662 +2025-02-18 18:35:47,410 Train Loss: 0.0009216, Val Loss: 0.0008178 +2025-02-18 18:35:47,410 Epoch 357/2000 +2025-02-18 18:36:28,478 Current Learning Rate: 0.0089021520 +2025-02-18 18:36:28,479 Train Loss: 0.0009710, Val Loss: 0.0008633 +2025-02-18 18:36:28,479 Epoch 358/2000 +2025-02-18 18:37:10,566 Current Learning Rate: 0.0089507751 +2025-02-18 18:37:10,567 Train Loss: 0.0009832, Val Loss: 0.0008622 +2025-02-18 18:37:10,567 Epoch 359/2000 +2025-02-18 18:37:52,640 Current Learning Rate: 0.0089984233 +2025-02-18 18:37:52,641 Train Loss: 0.0011419, Val Loss: 0.0009875 +2025-02-18 18:37:52,641 Epoch 360/2000 +2025-02-18 18:38:34,425 Current Learning Rate: 0.0090450850 +2025-02-18 18:38:34,426 Train Loss: 0.0011875, Val Loss: 0.0008943 +2025-02-18 18:38:34,426 Epoch 361/2000 +2025-02-18 18:39:16,281 Current Learning Rate: 0.0090907486 +2025-02-18 18:39:16,281 Train Loss: 0.0010435, Val Loss: 0.0008241 +2025-02-18 18:39:16,281 Epoch 362/2000 +2025-02-18 18:39:58,218 Current Learning Rate: 0.0091354029 +2025-02-18 18:39:58,218 Train Loss: 0.0008685, Val Loss: 0.0008383 +2025-02-18 18:39:58,218 Epoch 363/2000 +2025-02-18 18:40:40,694 Current Learning Rate: 0.0091790368 +2025-02-18 18:40:42,367 Train Loss: 0.0008282, Val Loss: 0.0008067 +2025-02-18 18:40:42,368 Epoch 364/2000 +2025-02-18 18:41:24,062 Current Learning Rate: 0.0092216396 +2025-02-18 18:41:26,011 Train Loss: 0.0008652, Val Loss: 0.0007782 +2025-02-18 18:41:26,012 Epoch 365/2000 +2025-02-18 18:42:06,710 Current Learning Rate: 0.0092632008 +2025-02-18 18:42:06,710 Train Loss: 0.0010616, Val Loss: 0.0007939 +2025-02-18 18:42:06,710 Epoch 366/2000 +2025-02-18 18:42:48,857 Current Learning Rate: 0.0093037101 +2025-02-18 18:42:48,857 Train Loss: 0.0007988, Val Loss: 0.0008904 +2025-02-18 18:42:48,857 Epoch 367/2000 +2025-02-18 18:43:31,035 Current Learning Rate: 0.0093431576 +2025-02-18 18:43:31,036 Train Loss: 0.0007915, Val Loss: 0.0008739 +2025-02-18 18:43:31,036 Epoch 368/2000 +2025-02-18 18:44:12,848 Current Learning Rate: 0.0093815334 +2025-02-18 18:44:12,850 Train Loss: 0.0009250, Val Loss: 0.0007952 +2025-02-18 18:44:12,850 Epoch 369/2000 +2025-02-18 18:44:54,908 Current Learning Rate: 0.0094188282 +2025-02-18 18:44:56,664 Train Loss: 0.0007022, Val Loss: 0.0007626 +2025-02-18 18:44:56,664 Epoch 370/2000 +2025-02-18 18:45:37,900 Current Learning Rate: 0.0094550326 +2025-02-18 18:45:37,901 Train Loss: 0.0008208, Val Loss: 0.0008126 +2025-02-18 18:45:37,901 Epoch 371/2000 +2025-02-18 18:46:20,066 Current Learning Rate: 0.0094901379 +2025-02-18 18:46:20,067 Train Loss: 0.0010265, Val Loss: 0.0009555 +2025-02-18 18:46:20,067 Epoch 372/2000 +2025-02-18 18:47:02,657 Current Learning Rate: 0.0095241353 +2025-02-18 18:47:02,657 Train Loss: 0.0012776, Val Loss: 0.0008928 +2025-02-18 18:47:02,657 Epoch 373/2000 +2025-02-18 18:47:45,084 Current Learning Rate: 0.0095570164 +2025-02-18 18:47:45,084 Train Loss: 0.0010268, Val Loss: 0.0008200 +2025-02-18 18:47:45,085 Epoch 374/2000 +2025-02-18 18:48:26,995 Current Learning Rate: 0.0095887731 +2025-02-18 18:48:26,996 Train Loss: 0.0010547, Val Loss: 0.0008072 +2025-02-18 18:48:26,996 Epoch 375/2000 +2025-02-18 18:49:09,166 Current Learning Rate: 0.0096193977 +2025-02-18 18:49:09,166 Train Loss: 0.0008614, Val Loss: 0.0007851 +2025-02-18 18:49:09,166 Epoch 376/2000 +2025-02-18 18:49:51,616 Current Learning Rate: 0.0096488824 +2025-02-18 18:49:51,616 Train Loss: 0.0008251, Val Loss: 0.0007705 +2025-02-18 18:49:51,616 Epoch 377/2000 +2025-02-18 18:50:32,893 Current Learning Rate: 0.0096772202 +2025-02-18 18:50:33,809 Train Loss: 0.0006624, Val Loss: 0.0007596 +2025-02-18 18:50:33,810 Epoch 378/2000 +2025-02-18 18:51:15,237 Current Learning Rate: 0.0097044038 +2025-02-18 18:51:15,239 Train Loss: 0.0008598, Val Loss: 0.0007849 +2025-02-18 18:51:15,239 Epoch 379/2000 +2025-02-18 18:51:57,160 Current Learning Rate: 0.0097304268 +2025-02-18 18:51:58,626 Train Loss: 0.0011610, Val Loss: 0.0007443 +2025-02-18 18:51:58,626 Epoch 380/2000 +2025-02-18 18:52:39,575 Current Learning Rate: 0.0097552826 +2025-02-18 18:52:41,107 Train Loss: 0.0007582, Val Loss: 0.0006625 +2025-02-18 18:52:41,107 Epoch 381/2000 +2025-02-18 18:53:22,716 Current Learning Rate: 0.0097789651 +2025-02-18 18:53:22,716 Train Loss: 0.0009193, Val Loss: 0.0007103 +2025-02-18 18:53:22,716 Epoch 382/2000 +2025-02-18 18:54:04,844 Current Learning Rate: 0.0098014684 +2025-02-18 18:54:04,844 Train Loss: 0.0007107, Val Loss: 0.0006871 +2025-02-18 18:54:04,844 Epoch 383/2000 +2025-02-18 18:54:46,727 Current Learning Rate: 0.0098227871 +2025-02-18 18:54:46,727 Train Loss: 0.0008805, Val Loss: 0.0007235 +2025-02-18 18:54:46,728 Epoch 384/2000 +2025-02-18 18:55:29,104 Current Learning Rate: 0.0098429158 +2025-02-18 18:55:31,185 Train Loss: 0.0008129, Val Loss: 0.0006509 +2025-02-18 18:55:31,185 Epoch 385/2000 +2025-02-18 18:56:12,071 Current Learning Rate: 0.0098618496 +2025-02-18 18:56:12,072 Train Loss: 0.0008680, Val Loss: 0.0007852 +2025-02-18 18:56:12,072 Epoch 386/2000 +2025-02-18 18:56:54,660 Current Learning Rate: 0.0098795838 +2025-02-18 18:56:54,661 Train Loss: 0.0010028, Val Loss: 0.0007055 +2025-02-18 18:56:54,662 Epoch 387/2000 +2025-02-18 18:57:36,818 Current Learning Rate: 0.0098961141 +2025-02-18 18:57:36,818 Train Loss: 0.0008143, Val Loss: 0.0006920 +2025-02-18 18:57:36,819 Epoch 388/2000 +2025-02-18 18:58:18,813 Current Learning Rate: 0.0099114363 +2025-02-18 18:58:18,814 Train Loss: 0.0006522, Val Loss: 0.0006845 +2025-02-18 18:58:18,814 Epoch 389/2000 +2025-02-18 18:59:01,014 Current Learning Rate: 0.0099255466 +2025-02-18 18:59:01,015 Train Loss: 0.0007134, Val Loss: 0.0006953 +2025-02-18 18:59:01,015 Epoch 390/2000 +2025-02-18 18:59:42,976 Current Learning Rate: 0.0099384417 +2025-02-18 18:59:43,914 Train Loss: 0.0007659, Val Loss: 0.0006227 +2025-02-18 18:59:43,914 Epoch 391/2000 +2025-02-18 19:00:25,802 Current Learning Rate: 0.0099501183 +2025-02-18 19:00:27,440 Train Loss: 0.0005588, Val Loss: 0.0006047 +2025-02-18 19:00:27,440 Epoch 392/2000 +2025-02-18 19:01:08,636 Current Learning Rate: 0.0099605735 +2025-02-18 19:01:08,637 Train Loss: 0.0009382, Val Loss: 0.0006760 +2025-02-18 19:01:08,637 Epoch 393/2000 +2025-02-18 19:01:51,088 Current Learning Rate: 0.0099698048 +2025-02-18 19:01:51,088 Train Loss: 0.0006468, Val Loss: 0.0007493 +2025-02-18 19:01:51,088 Epoch 394/2000 +2025-02-18 19:02:33,237 Current Learning Rate: 0.0099778098 +2025-02-18 19:02:33,238 Train Loss: 0.0006580, Val Loss: 0.0007147 +2025-02-18 19:02:33,238 Epoch 395/2000 +2025-02-18 19:03:15,051 Current Learning Rate: 0.0099845867 +2025-02-18 19:03:15,052 Train Loss: 0.0006028, Val Loss: 0.0006751 +2025-02-18 19:03:15,052 Epoch 396/2000 +2025-02-18 19:03:57,222 Current Learning Rate: 0.0099901336 +2025-02-18 19:03:57,222 Train Loss: 0.0006520, Val Loss: 0.0006788 +2025-02-18 19:03:57,222 Epoch 397/2000 +2025-02-18 19:04:39,113 Current Learning Rate: 0.0099944494 +2025-02-18 19:04:39,114 Train Loss: 0.0007369, Val Loss: 0.0007059 +2025-02-18 19:04:39,114 Epoch 398/2000 +2025-02-18 19:05:21,467 Current Learning Rate: 0.0099975328 +2025-02-18 19:05:21,468 Train Loss: 0.0005915, Val Loss: 0.0006115 +2025-02-18 19:05:21,468 Epoch 399/2000 +2025-02-18 19:06:03,750 Current Learning Rate: 0.0099993832 +2025-02-18 19:06:03,750 Train Loss: 0.0009239, Val Loss: 0.0007050 +2025-02-18 19:06:03,750 Epoch 400/2000 +2025-02-18 19:06:45,824 Current Learning Rate: 0.0100000000 +2025-02-18 19:06:45,825 Train Loss: 0.0006802, Val Loss: 0.0006834 +2025-02-18 19:06:45,826 Epoch 401/2000 +2025-02-18 19:07:28,026 Current Learning Rate: 0.0099993832 +2025-02-18 19:07:28,027 Train Loss: 0.0007217, Val Loss: 0.0006685 +2025-02-18 19:07:28,027 Epoch 402/2000 +2025-02-18 19:08:09,902 Current Learning Rate: 0.0099975328 +2025-02-18 19:08:09,902 Train Loss: 0.0006212, Val Loss: 0.0006712 +2025-02-18 19:08:09,903 Epoch 403/2000 +2025-02-18 19:08:51,602 Current Learning Rate: 0.0099944494 +2025-02-18 19:08:51,603 Train Loss: 0.0005316, Val Loss: 0.0006669 +2025-02-18 19:08:51,603 Epoch 404/2000 +2025-02-18 19:09:34,068 Current Learning Rate: 0.0099901336 +2025-02-18 19:09:34,069 Train Loss: 0.0005379, Val Loss: 0.0006210 +2025-02-18 19:09:34,069 Epoch 405/2000 +2025-02-18 19:10:16,079 Current Learning Rate: 0.0099845867 +2025-02-18 19:10:16,084 Train Loss: 0.0007584, Val Loss: 0.0007061 +2025-02-18 19:10:16,086 Epoch 406/2000 +2025-02-18 19:10:57,839 Current Learning Rate: 0.0099778098 +2025-02-18 19:10:57,839 Train Loss: 0.0006947, Val Loss: 0.0006958 +2025-02-18 19:10:57,840 Epoch 407/2000 +2025-02-18 19:11:39,846 Current Learning Rate: 0.0099698048 +2025-02-18 19:11:40,977 Train Loss: 0.0006130, Val Loss: 0.0006041 +2025-02-18 19:11:40,978 Epoch 408/2000 +2025-02-18 19:12:22,618 Current Learning Rate: 0.0099605735 +2025-02-18 19:12:22,619 Train Loss: 0.0007180, Val Loss: 0.0006334 +2025-02-18 19:12:22,619 Epoch 409/2000 +2025-02-18 19:13:04,723 Current Learning Rate: 0.0099501183 +2025-02-18 19:13:04,724 Train Loss: 0.0009343, Val Loss: 0.0006998 +2025-02-18 19:13:04,724 Epoch 410/2000 +2025-02-18 19:13:47,035 Current Learning Rate: 0.0099384417 +2025-02-18 19:13:47,035 Train Loss: 0.0006732, Val Loss: 0.0006672 +2025-02-18 19:13:47,035 Epoch 411/2000 +2025-02-18 19:14:28,759 Current Learning Rate: 0.0099255466 +2025-02-18 19:14:28,760 Train Loss: 0.0006974, Val Loss: 0.0006440 +2025-02-18 19:14:28,761 Epoch 412/2000 +2025-02-18 19:15:10,310 Current Learning Rate: 0.0099114363 +2025-02-18 19:15:10,311 Train Loss: 0.0008533, Val Loss: 0.0006498 +2025-02-18 19:15:10,311 Epoch 413/2000 +2025-02-18 19:15:52,498 Current Learning Rate: 0.0098961141 +2025-02-18 19:15:53,696 Train Loss: 0.0005820, Val Loss: 0.0005948 +2025-02-18 19:15:53,696 Epoch 414/2000 +2025-02-18 19:16:35,280 Current Learning Rate: 0.0098795838 +2025-02-18 19:16:35,281 Train Loss: 0.0005831, Val Loss: 0.0005997 +2025-02-18 19:16:35,281 Epoch 415/2000 +2025-02-18 19:17:16,845 Current Learning Rate: 0.0098618496 +2025-02-18 19:17:16,846 Train Loss: 0.0005954, Val Loss: 0.0006401 +2025-02-18 19:17:16,846 Epoch 416/2000 +2025-02-18 19:17:58,941 Current Learning Rate: 0.0098429158 +2025-02-18 19:18:00,395 Train Loss: 0.0007465, Val Loss: 0.0005905 +2025-02-18 19:18:00,395 Epoch 417/2000 +2025-02-18 19:18:42,085 Current Learning Rate: 0.0098227871 +2025-02-18 19:18:42,086 Train Loss: 0.0006766, Val Loss: 0.0006221 +2025-02-18 19:18:42,086 Epoch 418/2000 +2025-02-18 19:19:23,799 Current Learning Rate: 0.0098014684 +2025-02-18 19:19:23,799 Train Loss: 0.0005913, Val Loss: 0.0006298 +2025-02-18 19:19:23,800 Epoch 419/2000 +2025-02-18 19:20:05,794 Current Learning Rate: 0.0097789651 +2025-02-18 19:20:05,794 Train Loss: 0.0007962, Val Loss: 0.0006754 +2025-02-18 19:20:05,794 Epoch 420/2000 +2025-02-18 19:20:48,010 Current Learning Rate: 0.0097552826 +2025-02-18 19:20:48,011 Train Loss: 0.0007719, Val Loss: 0.0007072 +2025-02-18 19:20:48,011 Epoch 421/2000 +2025-02-18 19:21:30,247 Current Learning Rate: 0.0097304268 +2025-02-18 19:21:30,248 Train Loss: 0.0007165, Val Loss: 0.0008461 +2025-02-18 19:21:30,248 Epoch 422/2000 +2025-02-18 19:22:12,085 Current Learning Rate: 0.0097044038 +2025-02-18 19:22:12,087 Train Loss: 0.0009111, Val Loss: 0.0007067 +2025-02-18 19:22:12,087 Epoch 423/2000 +2025-02-18 19:22:54,291 Current Learning Rate: 0.0096772202 +2025-02-18 19:22:54,291 Train Loss: 0.0005967, Val Loss: 0.0006159 +2025-02-18 19:22:54,292 Epoch 424/2000 +2025-02-18 19:23:36,761 Current Learning Rate: 0.0096488824 +2025-02-18 19:23:38,201 Train Loss: 0.0005798, Val Loss: 0.0005475 +2025-02-18 19:23:38,202 Epoch 425/2000 +2025-02-18 19:24:19,643 Current Learning Rate: 0.0096193977 +2025-02-18 19:24:21,295 Train Loss: 0.0006066, Val Loss: 0.0005124 +2025-02-18 19:24:21,296 Epoch 426/2000 +2025-02-18 19:25:02,494 Current Learning Rate: 0.0095887731 +2025-02-18 19:25:02,495 Train Loss: 0.0009191, Val Loss: 0.0005508 +2025-02-18 19:25:02,496 Epoch 427/2000 +2025-02-18 19:25:44,641 Current Learning Rate: 0.0095570164 +2025-02-18 19:25:44,642 Train Loss: 0.0005025, Val Loss: 0.0005318 +2025-02-18 19:25:44,643 Epoch 428/2000 +2025-02-18 19:26:26,818 Current Learning Rate: 0.0095241353 +2025-02-18 19:26:26,818 Train Loss: 0.0007160, Val Loss: 0.0005306 +2025-02-18 19:26:26,818 Epoch 429/2000 +2025-02-18 19:27:08,695 Current Learning Rate: 0.0094901379 +2025-02-18 19:27:08,696 Train Loss: 0.0005730, Val Loss: 0.0005305 +2025-02-18 19:27:08,696 Epoch 430/2000 +2025-02-18 19:27:50,926 Current Learning Rate: 0.0094550326 +2025-02-18 19:27:50,927 Train Loss: 0.0005401, Val Loss: 0.0005623 +2025-02-18 19:27:50,930 Epoch 431/2000 +2025-02-18 19:28:32,937 Current Learning Rate: 0.0094188282 +2025-02-18 19:28:32,938 Train Loss: 0.0006020, Val Loss: 0.0006503 +2025-02-18 19:28:32,938 Epoch 432/2000 +2025-02-18 19:29:14,658 Current Learning Rate: 0.0093815334 +2025-02-18 19:29:14,659 Train Loss: 0.0007239, Val Loss: 0.0006138 +2025-02-18 19:29:14,659 Epoch 433/2000 +2025-02-18 19:29:57,166 Current Learning Rate: 0.0093431576 +2025-02-18 19:29:58,647 Train Loss: 0.0005680, Val Loss: 0.0004980 +2025-02-18 19:29:58,647 Epoch 434/2000 +2025-02-18 19:30:40,274 Current Learning Rate: 0.0093037101 +2025-02-18 19:30:42,184 Train Loss: 0.0004935, Val Loss: 0.0004756 +2025-02-18 19:30:42,185 Epoch 435/2000 +2025-02-18 19:31:23,643 Current Learning Rate: 0.0092632008 +2025-02-18 19:31:23,644 Train Loss: 0.0005562, Val Loss: 0.0004907 +2025-02-18 19:31:23,645 Epoch 436/2000 +2025-02-18 19:32:05,716 Current Learning Rate: 0.0092216396 +2025-02-18 19:32:05,716 Train Loss: 0.0003976, Val Loss: 0.0004793 +2025-02-18 19:32:05,716 Epoch 437/2000 +2025-02-18 19:32:48,391 Current Learning Rate: 0.0091790368 +2025-02-18 19:32:48,392 Train Loss: 0.0003841, Val Loss: 0.0005024 +2025-02-18 19:32:48,392 Epoch 438/2000 +2025-02-18 19:33:30,322 Current Learning Rate: 0.0091354029 +2025-02-18 19:33:30,322 Train Loss: 0.0006027, Val Loss: 0.0005281 +2025-02-18 19:33:30,323 Epoch 439/2000 +2025-02-18 19:34:13,167 Current Learning Rate: 0.0090907486 +2025-02-18 19:34:13,168 Train Loss: 0.0006028, Val Loss: 0.0005513 +2025-02-18 19:34:13,168 Epoch 440/2000 +2025-02-18 19:34:55,338 Current Learning Rate: 0.0090450850 +2025-02-18 19:34:55,339 Train Loss: 0.0008756, Val Loss: 0.0006729 +2025-02-18 19:34:55,339 Epoch 441/2000 +2025-02-18 19:35:37,783 Current Learning Rate: 0.0089984233 +2025-02-18 19:35:37,783 Train Loss: 0.0008495, Val Loss: 0.0007859 +2025-02-18 19:35:37,783 Epoch 442/2000 +2025-02-18 19:36:19,585 Current Learning Rate: 0.0089507751 +2025-02-18 19:36:19,586 Train Loss: 0.0006611, Val Loss: 0.0011604 +2025-02-18 19:36:19,586 Epoch 443/2000 +2025-02-18 19:37:01,939 Current Learning Rate: 0.0089021520 +2025-02-18 19:37:01,940 Train Loss: 0.0014813, Val Loss: 0.0011819 +2025-02-18 19:37:01,940 Epoch 444/2000 +2025-02-18 19:37:43,827 Current Learning Rate: 0.0088525662 +2025-02-18 19:37:43,828 Train Loss: 0.0008577, Val Loss: 0.0006556 +2025-02-18 19:37:43,828 Epoch 445/2000 +2025-02-18 19:38:25,575 Current Learning Rate: 0.0088020298 +2025-02-18 19:38:25,575 Train Loss: 0.0007838, Val Loss: 0.0011348 +2025-02-18 19:38:25,575 Epoch 446/2000 +2025-02-18 19:39:07,688 Current Learning Rate: 0.0087505553 +2025-02-18 19:39:07,689 Train Loss: 0.0006052, Val Loss: 0.0007102 +2025-02-18 19:39:07,689 Epoch 447/2000 +2025-02-18 19:39:49,232 Current Learning Rate: 0.0086981555 +2025-02-18 19:39:49,232 Train Loss: 0.0007764, Val Loss: 0.0005680 +2025-02-18 19:39:49,232 Epoch 448/2000 +2025-02-18 19:40:31,395 Current Learning Rate: 0.0086448431 +2025-02-18 19:40:31,395 Train Loss: 0.0005562, Val Loss: 0.0005067 +2025-02-18 19:40:31,396 Epoch 449/2000 +2025-02-18 19:41:13,661 Current Learning Rate: 0.0085906315 +2025-02-18 19:41:13,662 Train Loss: 0.0005712, Val Loss: 0.0005141 +2025-02-18 19:41:13,662 Epoch 450/2000 +2025-02-18 19:41:56,302 Current Learning Rate: 0.0085355339 +2025-02-18 19:41:56,302 Train Loss: 0.0006742, Val Loss: 0.0005745 +2025-02-18 19:41:56,303 Epoch 451/2000 +2025-02-18 19:42:38,200 Current Learning Rate: 0.0084795640 +2025-02-18 19:42:38,201 Train Loss: 0.0006229, Val Loss: 0.0005439 +2025-02-18 19:42:38,201 Epoch 452/2000 +2025-02-18 19:43:20,434 Current Learning Rate: 0.0084227355 +2025-02-18 19:43:22,271 Train Loss: 0.0005119, Val Loss: 0.0004677 +2025-02-18 19:43:22,278 Epoch 453/2000 +2025-02-18 19:44:03,834 Current Learning Rate: 0.0083650626 +2025-02-18 19:44:03,834 Train Loss: 0.0007364, Val Loss: 0.0007350 +2025-02-18 19:44:03,835 Epoch 454/2000 +2025-02-18 19:44:45,360 Current Learning Rate: 0.0083065593 +2025-02-18 19:44:45,360 Train Loss: 0.0007379, Val Loss: 0.0005459 +2025-02-18 19:44:45,360 Epoch 455/2000 +2025-02-18 19:45:27,572 Current Learning Rate: 0.0082472402 +2025-02-18 19:45:27,572 Train Loss: 0.0006996, Val Loss: 0.0005822 +2025-02-18 19:45:27,572 Epoch 456/2000 +2025-02-18 19:46:09,659 Current Learning Rate: 0.0081871199 +2025-02-18 19:46:09,660 Train Loss: 0.0004520, Val Loss: 0.0004866 +2025-02-18 19:46:09,660 Epoch 457/2000 +2025-02-18 19:46:51,252 Current Learning Rate: 0.0081262133 +2025-02-18 19:46:51,253 Train Loss: 0.0006052, Val Loss: 0.0005053 +2025-02-18 19:46:51,253 Epoch 458/2000 +2025-02-18 19:47:33,742 Current Learning Rate: 0.0080645353 +2025-02-18 19:47:33,743 Train Loss: 0.0006190, Val Loss: 0.0004781 +2025-02-18 19:47:33,743 Epoch 459/2000 +2025-02-18 19:48:15,562 Current Learning Rate: 0.0080021011 +2025-02-18 19:48:15,563 Train Loss: 0.0006041, Val Loss: 0.0004863 +2025-02-18 19:48:15,563 Epoch 460/2000 +2025-02-18 19:48:57,204 Current Learning Rate: 0.0079389263 +2025-02-18 19:48:58,573 Train Loss: 0.0003799, Val Loss: 0.0004317 +2025-02-18 19:48:58,574 Epoch 461/2000 +2025-02-18 19:49:40,305 Current Learning Rate: 0.0078750263 +2025-02-18 19:49:40,305 Train Loss: 0.0005422, Val Loss: 0.0006141 +2025-02-18 19:49:40,306 Epoch 462/2000 +2025-02-18 19:50:22,185 Current Learning Rate: 0.0078104169 +2025-02-18 19:50:22,185 Train Loss: 0.0007853, Val Loss: 0.0004837 +2025-02-18 19:50:22,185 Epoch 463/2000 +2025-02-18 19:51:03,731 Current Learning Rate: 0.0077451141 +2025-02-18 19:51:04,646 Train Loss: 0.0003548, Val Loss: 0.0004008 +2025-02-18 19:51:04,646 Epoch 464/2000 +2025-02-18 19:51:45,923 Current Learning Rate: 0.0076791340 +2025-02-18 19:51:45,924 Train Loss: 0.0004980, Val Loss: 0.0004498 +2025-02-18 19:51:45,924 Epoch 465/2000 +2025-02-18 19:52:28,203 Current Learning Rate: 0.0076124928 +2025-02-18 19:52:28,204 Train Loss: 0.0005148, Val Loss: 0.0004187 +2025-02-18 19:52:28,204 Epoch 466/2000 +2025-02-18 19:53:10,580 Current Learning Rate: 0.0075452071 +2025-02-18 19:53:10,580 Train Loss: 0.0004491, Val Loss: 0.0004449 +2025-02-18 19:53:10,581 Epoch 467/2000 +2025-02-18 19:53:52,667 Current Learning Rate: 0.0074772933 +2025-02-18 19:53:54,330 Train Loss: 0.0003183, Val Loss: 0.0003733 +2025-02-18 19:53:54,330 Epoch 468/2000 +2025-02-18 19:54:35,428 Current Learning Rate: 0.0074087684 +2025-02-18 19:54:35,429 Train Loss: 0.0004790, Val Loss: 0.0004833 +2025-02-18 19:54:35,429 Epoch 469/2000 +2025-02-18 19:55:17,493 Current Learning Rate: 0.0073396491 +2025-02-18 19:55:17,493 Train Loss: 0.0004525, Val Loss: 0.0003997 +2025-02-18 19:55:17,494 Epoch 470/2000 +2025-02-18 19:55:59,631 Current Learning Rate: 0.0072699525 +2025-02-18 19:55:59,632 Train Loss: 0.0005499, Val Loss: 0.0005679 +2025-02-18 19:55:59,632 Epoch 471/2000 +2025-02-18 19:56:42,046 Current Learning Rate: 0.0071996958 +2025-02-18 19:56:42,046 Train Loss: 0.0007884, Val Loss: 0.0004443 +2025-02-18 19:56:42,046 Epoch 472/2000 +2025-02-18 19:57:24,158 Current Learning Rate: 0.0071288965 +2025-02-18 19:57:24,158 Train Loss: 0.0005725, Val Loss: 0.0005078 +2025-02-18 19:57:24,158 Epoch 473/2000 +2025-02-18 19:58:06,154 Current Learning Rate: 0.0070575718 +2025-02-18 19:58:06,154 Train Loss: 0.0004556, Val Loss: 0.0004127 +2025-02-18 19:58:06,155 Epoch 474/2000 +2025-02-18 19:58:48,325 Current Learning Rate: 0.0069857395 +2025-02-18 19:58:49,976 Train Loss: 0.0004045, Val Loss: 0.0003711 +2025-02-18 19:58:49,976 Epoch 475/2000 +2025-02-18 19:59:30,980 Current Learning Rate: 0.0069134172 +2025-02-18 19:59:30,981 Train Loss: 0.0004232, Val Loss: 0.0003744 +2025-02-18 19:59:30,981 Epoch 476/2000 +2025-02-18 20:00:13,314 Current Learning Rate: 0.0068406228 +2025-02-18 20:00:13,314 Train Loss: 0.0003766, Val Loss: 0.0003917 +2025-02-18 20:00:13,314 Epoch 477/2000 +2025-02-18 20:00:55,367 Current Learning Rate: 0.0067673742 +2025-02-18 20:00:55,367 Train Loss: 0.0003884, Val Loss: 0.0003944 +2025-02-18 20:00:55,368 Epoch 478/2000 +2025-02-18 20:01:37,131 Current Learning Rate: 0.0066936896 +2025-02-18 20:01:37,131 Train Loss: 0.0006075, Val Loss: 0.0004759 +2025-02-18 20:01:37,132 Epoch 479/2000 +2025-02-18 20:02:19,491 Current Learning Rate: 0.0066195871 +2025-02-18 20:02:19,491 Train Loss: 0.0004698, Val Loss: 0.0004121 +2025-02-18 20:02:19,492 Epoch 480/2000 +2025-02-18 20:03:01,602 Current Learning Rate: 0.0065450850 +2025-02-18 20:03:01,602 Train Loss: 0.0004237, Val Loss: 0.0003810 +2025-02-18 20:03:01,602 Epoch 481/2000 +2025-02-18 20:03:43,782 Current Learning Rate: 0.0064702016 +2025-02-18 20:03:43,783 Train Loss: 0.0004413, Val Loss: 0.0003857 +2025-02-18 20:03:43,783 Epoch 482/2000 +2025-02-18 20:04:25,655 Current Learning Rate: 0.0063949555 +2025-02-18 20:04:25,656 Train Loss: 0.0003080, Val Loss: 0.0003937 +2025-02-18 20:04:25,656 Epoch 483/2000 +2025-02-18 20:05:07,769 Current Learning Rate: 0.0063193652 +2025-02-18 20:05:07,770 Train Loss: 0.0005708, Val Loss: 0.0005771 +2025-02-18 20:05:07,770 Epoch 484/2000 +2025-02-18 20:05:49,270 Current Learning Rate: 0.0062434494 +2025-02-18 20:05:49,270 Train Loss: 0.0004042, Val Loss: 0.0004315 +2025-02-18 20:05:49,271 Epoch 485/2000 +2025-02-18 20:06:31,490 Current Learning Rate: 0.0061672268 +2025-02-18 20:06:31,490 Train Loss: 0.0004098, Val Loss: 0.0004201 +2025-02-18 20:06:31,490 Epoch 486/2000 +2025-02-18 20:07:14,420 Current Learning Rate: 0.0060907162 +2025-02-18 20:07:14,421 Train Loss: 0.0005263, Val Loss: 0.0004223 +2025-02-18 20:07:14,421 Epoch 487/2000 +2025-02-18 20:07:56,345 Current Learning Rate: 0.0060139365 +2025-02-18 20:07:56,345 Train Loss: 0.0003679, Val Loss: 0.0003711 +2025-02-18 20:07:56,345 Epoch 488/2000 +2025-02-18 20:08:38,518 Current Learning Rate: 0.0059369066 +2025-02-18 20:08:38,519 Train Loss: 0.0005354, Val Loss: 0.0004612 +2025-02-18 20:08:38,519 Epoch 489/2000 +2025-02-18 20:09:20,553 Current Learning Rate: 0.0058596455 +2025-02-18 20:09:20,554 Train Loss: 0.0004731, Val Loss: 0.0003831 +2025-02-18 20:09:20,554 Epoch 490/2000 +2025-02-18 20:10:02,945 Current Learning Rate: 0.0057821723 +2025-02-18 20:10:02,945 Train Loss: 0.0004212, Val Loss: 0.0004122 +2025-02-18 20:10:02,946 Epoch 491/2000 +2025-02-18 20:10:45,015 Current Learning Rate: 0.0057045062 +2025-02-18 20:10:46,927 Train Loss: 0.0004982, Val Loss: 0.0003638 +2025-02-18 20:10:46,927 Epoch 492/2000 +2025-02-18 20:11:28,410 Current Learning Rate: 0.0056266662 +2025-02-18 20:11:29,934 Train Loss: 0.0005396, Val Loss: 0.0003375 +2025-02-18 20:11:29,941 Epoch 493/2000 +2025-02-18 20:12:10,841 Current Learning Rate: 0.0055486716 +2025-02-18 20:12:10,842 Train Loss: 0.0004773, Val Loss: 0.0004535 +2025-02-18 20:12:10,842 Epoch 494/2000 +2025-02-18 20:12:53,082 Current Learning Rate: 0.0054705416 +2025-02-18 20:12:53,083 Train Loss: 0.0005965, Val Loss: 0.0004187 +2025-02-18 20:12:53,083 Epoch 495/2000 +2025-02-18 20:13:34,979 Current Learning Rate: 0.0053922955 +2025-02-18 20:13:34,980 Train Loss: 0.0005241, Val Loss: 0.0003464 +2025-02-18 20:13:34,980 Epoch 496/2000 +2025-02-18 20:14:16,804 Current Learning Rate: 0.0053139526 +2025-02-18 20:14:18,014 Train Loss: 0.0002688, Val Loss: 0.0003203 +2025-02-18 20:14:18,014 Epoch 497/2000 +2025-02-18 20:14:59,752 Current Learning Rate: 0.0052355323 +2025-02-18 20:14:59,753 Train Loss: 0.0004406, Val Loss: 0.0003698 +2025-02-18 20:14:59,754 Epoch 498/2000 +2025-02-18 20:15:41,709 Current Learning Rate: 0.0051570538 +2025-02-18 20:15:41,709 Train Loss: 0.0003715, Val Loss: 0.0003484 +2025-02-18 20:15:41,710 Epoch 499/2000 +2025-02-18 20:16:23,905 Current Learning Rate: 0.0050785366 +2025-02-18 20:16:23,905 Train Loss: 0.0003781, Val Loss: 0.0003582 +2025-02-18 20:16:23,906 Epoch 500/2000 +2025-02-18 20:17:06,158 Current Learning Rate: 0.0050000000 +2025-02-18 20:17:06,159 Train Loss: 0.0002624, Val Loss: 0.0003242 +2025-02-18 20:17:06,159 Epoch 501/2000 +2025-02-18 20:17:48,439 Current Learning Rate: 0.0049214634 +2025-02-18 20:17:48,439 Train Loss: 0.0003119, Val Loss: 0.0003366 +2025-02-18 20:17:48,440 Epoch 502/2000 +2025-02-18 20:18:30,564 Current Learning Rate: 0.0048429462 +2025-02-18 20:18:30,565 Train Loss: 0.0003463, Val Loss: 0.0003309 +2025-02-18 20:18:30,565 Epoch 503/2000 +2025-02-18 20:19:12,460 Current Learning Rate: 0.0047644677 +2025-02-18 20:19:12,461 Train Loss: 0.0003708, Val Loss: 0.0003349 +2025-02-18 20:19:12,461 Epoch 504/2000 +2025-02-18 20:19:54,139 Current Learning Rate: 0.0046860474 +2025-02-18 20:19:54,139 Train Loss: 0.0003091, Val Loss: 0.0003272 +2025-02-18 20:19:54,140 Epoch 505/2000 +2025-02-18 20:20:36,715 Current Learning Rate: 0.0046077045 +2025-02-18 20:20:36,715 Train Loss: 0.0003108, Val Loss: 0.0003311 +2025-02-18 20:20:36,715 Epoch 506/2000 +2025-02-18 20:21:19,174 Current Learning Rate: 0.0045294584 +2025-02-18 20:21:20,959 Train Loss: 0.0004131, Val Loss: 0.0003141 +2025-02-18 20:21:20,963 Epoch 507/2000 +2025-02-18 20:22:02,761 Current Learning Rate: 0.0044513284 +2025-02-18 20:22:04,352 Train Loss: 0.0002722, Val Loss: 0.0003069 +2025-02-18 20:22:04,353 Epoch 508/2000 +2025-02-18 20:22:46,224 Current Learning Rate: 0.0043733338 +2025-02-18 20:22:46,225 Train Loss: 0.0004167, Val Loss: 0.0003525 +2025-02-18 20:22:46,225 Epoch 509/2000 +2025-02-18 20:23:28,091 Current Learning Rate: 0.0042954938 +2025-02-18 20:23:28,091 Train Loss: 0.0003157, Val Loss: 0.0003312 +2025-02-18 20:23:28,092 Epoch 510/2000 +2025-02-18 20:24:10,340 Current Learning Rate: 0.0042178277 +2025-02-18 20:24:10,343 Train Loss: 0.0003594, Val Loss: 0.0003238 +2025-02-18 20:24:10,344 Epoch 511/2000 +2025-02-18 20:24:52,729 Current Learning Rate: 0.0041403545 +2025-02-18 20:24:52,729 Train Loss: 0.0003570, Val Loss: 0.0003205 +2025-02-18 20:24:52,729 Epoch 512/2000 +2025-02-18 20:25:34,583 Current Learning Rate: 0.0040630934 +2025-02-18 20:25:34,584 Train Loss: 0.0004469, Val Loss: 0.0003230 +2025-02-18 20:25:34,584 Epoch 513/2000 +2025-02-18 20:26:16,918 Current Learning Rate: 0.0039860635 +2025-02-18 20:26:18,000 Train Loss: 0.0002437, Val Loss: 0.0003013 +2025-02-18 20:26:18,001 Epoch 514/2000 +2025-02-18 20:26:59,361 Current Learning Rate: 0.0039092838 +2025-02-18 20:26:59,362 Train Loss: 0.0004149, Val Loss: 0.0003221 +2025-02-18 20:26:59,363 Epoch 515/2000 +2025-02-18 20:27:41,329 Current Learning Rate: 0.0038327732 +2025-02-18 20:27:42,794 Train Loss: 0.0003974, Val Loss: 0.0002993 +2025-02-18 20:27:42,795 Epoch 516/2000 +2025-02-18 20:28:23,993 Current Learning Rate: 0.0037565506 +2025-02-18 20:28:25,306 Train Loss: 0.0002519, Val Loss: 0.0002987 +2025-02-18 20:28:25,307 Epoch 517/2000 +2025-02-18 20:29:07,080 Current Learning Rate: 0.0036806348 +2025-02-18 20:29:08,511 Train Loss: 0.0002545, Val Loss: 0.0002942 +2025-02-18 20:29:08,511 Epoch 518/2000 +2025-02-18 20:29:50,018 Current Learning Rate: 0.0036050445 +2025-02-18 20:29:51,597 Train Loss: 0.0002820, Val Loss: 0.0002911 +2025-02-18 20:29:51,598 Epoch 519/2000 +2025-02-18 20:30:32,915 Current Learning Rate: 0.0035297984 +2025-02-18 20:30:34,779 Train Loss: 0.0002594, Val Loss: 0.0002882 +2025-02-18 20:30:34,786 Epoch 520/2000 +2025-02-18 20:31:16,049 Current Learning Rate: 0.0034549150 +2025-02-18 20:31:17,757 Train Loss: 0.0003465, Val Loss: 0.0002798 +2025-02-18 20:31:17,759 Epoch 521/2000 +2025-02-18 20:31:58,990 Current Learning Rate: 0.0033804129 +2025-02-18 20:31:58,990 Train Loss: 0.0002683, Val Loss: 0.0002808 +2025-02-18 20:31:58,991 Epoch 522/2000 +2025-02-18 20:32:40,628 Current Learning Rate: 0.0033063104 +2025-02-18 20:32:41,950 Train Loss: 0.0002855, Val Loss: 0.0002790 +2025-02-18 20:32:41,951 Epoch 523/2000 +2025-02-18 20:33:22,672 Current Learning Rate: 0.0032326258 +2025-02-18 20:33:22,673 Train Loss: 0.0002464, Val Loss: 0.0002810 +2025-02-18 20:33:22,673 Epoch 524/2000 +2025-02-18 20:34:05,478 Current Learning Rate: 0.0031593772 +2025-02-18 20:34:06,818 Train Loss: 0.0002622, Val Loss: 0.0002769 +2025-02-18 20:34:06,818 Epoch 525/2000 +2025-02-18 20:34:47,784 Current Learning Rate: 0.0030865828 +2025-02-18 20:34:47,785 Train Loss: 0.0003272, Val Loss: 0.0002777 +2025-02-18 20:34:47,785 Epoch 526/2000 +2025-02-18 20:35:29,789 Current Learning Rate: 0.0030142605 +2025-02-18 20:35:29,789 Train Loss: 0.0002613, Val Loss: 0.0002784 +2025-02-18 20:35:29,790 Epoch 527/2000 +2025-02-18 20:36:12,626 Current Learning Rate: 0.0029424282 +2025-02-18 20:36:12,627 Train Loss: 0.0002462, Val Loss: 0.0002777 +2025-02-18 20:36:12,627 Epoch 528/2000 +2025-02-18 20:36:54,346 Current Learning Rate: 0.0028711035 +2025-02-18 20:36:54,347 Train Loss: 0.0004061, Val Loss: 0.0003103 +2025-02-18 20:36:54,347 Epoch 529/2000 +2025-02-18 20:37:36,459 Current Learning Rate: 0.0028003042 +2025-02-18 20:37:38,279 Train Loss: 0.0002571, Val Loss: 0.0002736 +2025-02-18 20:37:38,280 Epoch 530/2000 +2025-02-18 20:38:19,946 Current Learning Rate: 0.0027300475 +2025-02-18 20:38:19,947 Train Loss: 0.0003015, Val Loss: 0.0002777 +2025-02-18 20:38:19,947 Epoch 531/2000 +2025-02-18 20:39:01,578 Current Learning Rate: 0.0026603509 +2025-02-18 20:39:01,578 Train Loss: 0.0002290, Val Loss: 0.0002745 +2025-02-18 20:39:01,578 Epoch 532/2000 +2025-02-18 20:39:43,348 Current Learning Rate: 0.0025912316 +2025-02-18 20:39:44,767 Train Loss: 0.0002785, Val Loss: 0.0002722 +2025-02-18 20:39:44,768 Epoch 533/2000 +2025-02-18 20:40:25,591 Current Learning Rate: 0.0025227067 +2025-02-18 20:40:25,592 Train Loss: 0.0002385, Val Loss: 0.0002724 +2025-02-18 20:40:25,592 Epoch 534/2000 +2025-02-18 20:41:07,652 Current Learning Rate: 0.0024547929 +2025-02-18 20:41:07,653 Train Loss: 0.0002859, Val Loss: 0.0002752 +2025-02-18 20:41:07,653 Epoch 535/2000 +2025-02-18 20:41:50,035 Current Learning Rate: 0.0023875072 +2025-02-18 20:41:50,035 Train Loss: 0.0002662, Val Loss: 0.0002760 +2025-02-18 20:41:50,036 Epoch 536/2000 +2025-02-18 20:42:31,685 Current Learning Rate: 0.0023208660 +2025-02-18 20:42:33,101 Train Loss: 0.0002035, Val Loss: 0.0002720 +2025-02-18 20:42:33,101 Epoch 537/2000 +2025-02-18 20:43:14,093 Current Learning Rate: 0.0022548859 +2025-02-18 20:43:14,093 Train Loss: 0.0002837, Val Loss: 0.0002741 +2025-02-18 20:43:14,094 Epoch 538/2000 +2025-02-18 20:43:56,295 Current Learning Rate: 0.0021895831 +2025-02-18 20:43:57,275 Train Loss: 0.0002225, Val Loss: 0.0002689 +2025-02-18 20:43:57,275 Epoch 539/2000 +2025-02-18 20:44:38,748 Current Learning Rate: 0.0021249737 +2025-02-18 20:44:38,749 Train Loss: 0.0002591, Val Loss: 0.0002701 +2025-02-18 20:44:38,749 Epoch 540/2000 +2025-02-18 20:45:20,845 Current Learning Rate: 0.0020610737 +2025-02-18 20:45:22,517 Train Loss: 0.0002778, Val Loss: 0.0002660 +2025-02-18 20:45:22,517 Epoch 541/2000 +2025-02-18 20:46:03,577 Current Learning Rate: 0.0019978989 +2025-02-18 20:46:03,578 Train Loss: 0.0002207, Val Loss: 0.0002723 +2025-02-18 20:46:03,579 Epoch 542/2000 +2025-02-18 20:46:45,639 Current Learning Rate: 0.0019354647 +2025-02-18 20:46:45,639 Train Loss: 0.0003223, Val Loss: 0.0002761 +2025-02-18 20:46:45,640 Epoch 543/2000 +2025-02-18 20:47:27,624 Current Learning Rate: 0.0018737867 +2025-02-18 20:47:27,625 Train Loss: 0.0002946, Val Loss: 0.0002819 +2025-02-18 20:47:27,625 Epoch 544/2000 +2025-02-18 20:48:09,321 Current Learning Rate: 0.0018128801 +2025-02-18 20:48:10,974 Train Loss: 0.0002306, Val Loss: 0.0002601 +2025-02-18 20:48:10,974 Epoch 545/2000 +2025-02-18 20:48:51,761 Current Learning Rate: 0.0017527598 +2025-02-18 20:48:53,433 Train Loss: 0.0001874, Val Loss: 0.0002584 +2025-02-18 20:48:53,433 Epoch 546/2000 +2025-02-18 20:49:35,238 Current Learning Rate: 0.0016934407 +2025-02-18 20:49:35,239 Train Loss: 0.0003886, Val Loss: 0.0002627 +2025-02-18 20:49:35,240 Epoch 547/2000 +2025-02-18 20:50:17,124 Current Learning Rate: 0.0016349374 +2025-02-18 20:50:17,125 Train Loss: 0.0002434, Val Loss: 0.0002609 +2025-02-18 20:50:17,125 Epoch 548/2000 +2025-02-18 20:50:59,068 Current Learning Rate: 0.0015772645 +2025-02-18 20:51:00,255 Train Loss: 0.0002927, Val Loss: 0.0002559 +2025-02-18 20:51:00,256 Epoch 549/2000 +2025-02-18 20:51:41,553 Current Learning Rate: 0.0015204360 +2025-02-18 20:51:43,051 Train Loss: 0.0002755, Val Loss: 0.0002527 +2025-02-18 20:51:43,052 Epoch 550/2000 +2025-02-18 20:52:24,836 Current Learning Rate: 0.0014644661 +2025-02-18 20:52:25,865 Train Loss: 0.0002452, Val Loss: 0.0002502 +2025-02-18 20:52:25,865 Epoch 551/2000 +2025-02-18 20:53:06,808 Current Learning Rate: 0.0014093685 +2025-02-18 20:53:07,921 Train Loss: 0.0002218, Val Loss: 0.0002492 +2025-02-18 20:53:07,922 Epoch 552/2000 +2025-02-18 20:53:49,691 Current Learning Rate: 0.0013551569 +2025-02-18 20:53:51,572 Train Loss: 0.0002499, Val Loss: 0.0002487 +2025-02-18 20:53:51,572 Epoch 553/2000 +2025-02-18 20:54:33,009 Current Learning Rate: 0.0013018445 +2025-02-18 20:54:33,025 Train Loss: 0.0002559, Val Loss: 0.0002514 +2025-02-18 20:54:33,025 Epoch 554/2000 +2025-02-18 20:55:15,152 Current Learning Rate: 0.0012494447 +2025-02-18 20:55:15,152 Train Loss: 0.0002279, Val Loss: 0.0002492 +2025-02-18 20:55:15,152 Epoch 555/2000 +2025-02-18 20:55:57,511 Current Learning Rate: 0.0011979702 +2025-02-18 20:55:58,935 Train Loss: 0.0001903, Val Loss: 0.0002483 +2025-02-18 20:55:58,935 Epoch 556/2000 +2025-02-18 20:56:39,912 Current Learning Rate: 0.0011474338 +2025-02-18 20:56:39,913 Train Loss: 0.0002024, Val Loss: 0.0002522 +2025-02-18 20:56:39,913 Epoch 557/2000 +2025-02-18 20:57:22,321 Current Learning Rate: 0.0010978480 +2025-02-18 20:57:22,322 Train Loss: 0.0002124, Val Loss: 0.0002600 +2025-02-18 20:57:22,323 Epoch 558/2000 +2025-02-18 20:58:04,554 Current Learning Rate: 0.0010492249 +2025-02-18 20:58:04,555 Train Loss: 0.0002143, Val Loss: 0.0002581 +2025-02-18 20:58:04,555 Epoch 559/2000 +2025-02-18 20:58:46,628 Current Learning Rate: 0.0010015767 +2025-02-18 20:58:46,629 Train Loss: 0.0002643, Val Loss: 0.0002547 +2025-02-18 20:58:46,629 Epoch 560/2000 +2025-02-18 20:59:29,019 Current Learning Rate: 0.0009549150 +2025-02-18 20:59:29,020 Train Loss: 0.0003135, Val Loss: 0.0002499 +2025-02-18 20:59:29,020 Epoch 561/2000 +2025-02-18 21:00:11,028 Current Learning Rate: 0.0009092514 +2025-02-18 21:00:12,596 Train Loss: 0.0002040, Val Loss: 0.0002471 +2025-02-18 21:00:12,596 Epoch 562/2000 +2025-02-18 21:00:53,882 Current Learning Rate: 0.0008645971 +2025-02-18 21:00:54,972 Train Loss: 0.0003344, Val Loss: 0.0002460 +2025-02-18 21:00:54,978 Epoch 563/2000 +2025-02-18 21:01:36,353 Current Learning Rate: 0.0008209632 +2025-02-18 21:01:36,354 Train Loss: 0.0002724, Val Loss: 0.0002516 +2025-02-18 21:01:36,354 Epoch 564/2000 +2025-02-18 21:02:18,073 Current Learning Rate: 0.0007783604 +2025-02-18 21:02:19,356 Train Loss: 0.0002135, Val Loss: 0.0002449 +2025-02-18 21:02:19,356 Epoch 565/2000 +2025-02-18 21:03:00,283 Current Learning Rate: 0.0007367992 +2025-02-18 21:03:00,284 Train Loss: 0.0003281, Val Loss: 0.0002453 +2025-02-18 21:03:00,284 Epoch 566/2000 +2025-02-18 21:03:42,637 Current Learning Rate: 0.0006962899 +2025-02-18 21:03:44,278 Train Loss: 0.0002693, Val Loss: 0.0002442 +2025-02-18 21:03:44,279 Epoch 567/2000 +2025-02-18 21:04:25,125 Current Learning Rate: 0.0006568424 +2025-02-18 21:04:25,126 Train Loss: 0.0002088, Val Loss: 0.0002459 +2025-02-18 21:04:25,126 Epoch 568/2000 +2025-02-18 21:05:07,461 Current Learning Rate: 0.0006184666 +2025-02-18 21:05:07,462 Train Loss: 0.0002961, Val Loss: 0.0002485 +2025-02-18 21:05:07,462 Epoch 569/2000 +2025-02-18 21:05:49,791 Current Learning Rate: 0.0005811718 +2025-02-18 21:05:49,792 Train Loss: 0.0002460, Val Loss: 0.0002454 +2025-02-18 21:05:49,792 Epoch 570/2000 +2025-02-18 21:06:31,776 Current Learning Rate: 0.0005449674 +2025-02-18 21:06:31,776 Train Loss: 0.0002234, Val Loss: 0.0002458 +2025-02-18 21:06:31,777 Epoch 571/2000 +2025-02-18 21:07:13,572 Current Learning Rate: 0.0005098621 +2025-02-18 21:07:14,708 Train Loss: 0.0002209, Val Loss: 0.0002413 +2025-02-18 21:07:14,708 Epoch 572/2000 +2025-02-18 21:07:56,224 Current Learning Rate: 0.0004758647 +2025-02-18 21:07:57,576 Train Loss: 0.0002310, Val Loss: 0.0002402 +2025-02-18 21:07:57,576 Epoch 573/2000 +2025-02-18 21:08:38,349 Current Learning Rate: 0.0004429836 +2025-02-18 21:08:39,214 Train Loss: 0.0002686, Val Loss: 0.0002395 +2025-02-18 21:08:39,215 Epoch 574/2000 +2025-02-18 21:09:20,646 Current Learning Rate: 0.0004112269 +2025-02-18 21:09:22,171 Train Loss: 0.0002137, Val Loss: 0.0002391 +2025-02-18 21:09:22,171 Epoch 575/2000 +2025-02-18 21:10:03,794 Current Learning Rate: 0.0003806023 +2025-02-18 21:10:05,487 Train Loss: 0.0002115, Val Loss: 0.0002389 +2025-02-18 21:10:05,487 Epoch 576/2000 +2025-02-18 21:10:47,021 Current Learning Rate: 0.0003511176 +2025-02-18 21:10:48,548 Train Loss: 0.0002499, Val Loss: 0.0002388 +2025-02-18 21:10:48,555 Epoch 577/2000 +2025-02-18 21:11:30,099 Current Learning Rate: 0.0003227798 +2025-02-18 21:11:31,832 Train Loss: 0.0002456, Val Loss: 0.0002387 +2025-02-18 21:11:31,833 Epoch 578/2000 +2025-02-18 21:12:12,502 Current Learning Rate: 0.0002955962 +2025-02-18 21:12:13,510 Train Loss: 0.0002681, Val Loss: 0.0002386 +2025-02-18 21:12:13,511 Epoch 579/2000 +2025-02-18 21:12:55,334 Current Learning Rate: 0.0002695732 +2025-02-18 21:12:56,534 Train Loss: 0.0003273, Val Loss: 0.0002385 +2025-02-18 21:12:56,534 Epoch 580/2000 +2025-02-18 21:13:37,463 Current Learning Rate: 0.0002447174 +2025-02-18 21:13:38,490 Train Loss: 0.0002011, Val Loss: 0.0002381 +2025-02-18 21:13:38,493 Epoch 581/2000 +2025-02-18 21:14:20,015 Current Learning Rate: 0.0002210349 +2025-02-18 21:14:21,380 Train Loss: 0.0002540, Val Loss: 0.0002378 +2025-02-18 21:14:21,383 Epoch 582/2000 +2025-02-18 21:15:03,040 Current Learning Rate: 0.0001985316 +2025-02-18 21:15:04,888 Train Loss: 0.0002318, Val Loss: 0.0002376 +2025-02-18 21:15:04,888 Epoch 583/2000 +2025-02-18 21:15:46,653 Current Learning Rate: 0.0001772129 +2025-02-18 21:15:46,654 Train Loss: 0.0002896, Val Loss: 0.0002380 +2025-02-18 21:15:46,655 Epoch 584/2000 +2025-02-18 21:16:28,148 Current Learning Rate: 0.0001570842 +2025-02-18 21:16:29,400 Train Loss: 0.0003380, Val Loss: 0.0002376 +2025-02-18 21:16:29,400 Epoch 585/2000 +2025-02-18 21:17:10,647 Current Learning Rate: 0.0001381504 +2025-02-18 21:17:10,648 Train Loss: 0.0002447, Val Loss: 0.0002379 +2025-02-18 21:17:10,648 Epoch 586/2000 +2025-02-18 21:17:52,691 Current Learning Rate: 0.0001204162 +2025-02-18 21:17:54,123 Train Loss: 0.0002028, Val Loss: 0.0002375 +2025-02-18 21:17:54,123 Epoch 587/2000 +2025-02-18 21:18:35,216 Current Learning Rate: 0.0001038859 +2025-02-18 21:18:36,512 Train Loss: 0.0002826, Val Loss: 0.0002375 +2025-02-18 21:18:36,512 Epoch 588/2000 +2025-02-18 21:19:17,692 Current Learning Rate: 0.0000885637 +2025-02-18 21:19:19,234 Train Loss: 0.0001746, Val Loss: 0.0002374 +2025-02-18 21:19:19,235 Epoch 589/2000 +2025-02-18 21:20:00,428 Current Learning Rate: 0.0000744534 +2025-02-18 21:20:01,528 Train Loss: 0.0002355, Val Loss: 0.0002372 +2025-02-18 21:20:01,531 Epoch 590/2000 +2025-02-18 21:20:43,363 Current Learning Rate: 0.0000615583 +2025-02-18 21:20:44,780 Train Loss: 0.0003048, Val Loss: 0.0002372 +2025-02-18 21:20:44,781 Epoch 591/2000 +2025-02-18 21:21:25,900 Current Learning Rate: 0.0000498817 +2025-02-18 21:21:25,901 Train Loss: 0.0002310, Val Loss: 0.0002373 +2025-02-18 21:21:25,901 Epoch 592/2000 +2025-02-18 21:22:07,805 Current Learning Rate: 0.0000394265 +2025-02-18 21:22:07,806 Train Loss: 0.0002854, Val Loss: 0.0002372 +2025-02-18 21:22:07,806 Epoch 593/2000 +2025-02-18 21:22:50,003 Current Learning Rate: 0.0000301952 +2025-02-18 21:22:50,003 Train Loss: 0.0002099, Val Loss: 0.0002373 +2025-02-18 21:22:50,004 Epoch 594/2000 +2025-02-18 21:23:32,758 Current Learning Rate: 0.0000221902 +2025-02-18 21:23:34,592 Train Loss: 0.0002825, Val Loss: 0.0002371 +2025-02-18 21:23:34,592 Epoch 595/2000 +2025-02-18 21:24:16,216 Current Learning Rate: 0.0000154133 +2025-02-18 21:24:16,216 Train Loss: 0.0002642, Val Loss: 0.0002372 +2025-02-18 21:24:16,216 Epoch 596/2000 +2025-02-18 21:24:58,255 Current Learning Rate: 0.0000098664 +2025-02-18 21:25:00,223 Train Loss: 0.0002336, Val Loss: 0.0002371 +2025-02-18 21:25:00,225 Epoch 597/2000 +2025-02-18 21:25:41,672 Current Learning Rate: 0.0000055506 +2025-02-18 21:25:41,673 Train Loss: 0.0001997, Val Loss: 0.0002371 +2025-02-18 21:25:41,673 Epoch 598/2000 +2025-02-18 21:26:23,663 Current Learning Rate: 0.0000024672 +2025-02-18 21:26:23,664 Train Loss: 0.0001900, Val Loss: 0.0002372 +2025-02-18 21:26:23,664 Epoch 599/2000 +2025-02-18 21:27:05,936 Current Learning Rate: 0.0000006168 +2025-02-18 21:27:05,937 Train Loss: 0.0002084, Val Loss: 0.0002372 +2025-02-18 21:27:05,939 Epoch 600/2000 +2025-02-18 21:27:48,641 Current Learning Rate: 0.0000000000 +2025-02-18 21:27:48,641 Train Loss: 0.0002021, Val Loss: 0.0002372 +2025-02-18 21:27:48,642 Epoch 601/2000 +2025-02-18 21:28:30,510 Current Learning Rate: 0.0000006168 +2025-02-18 21:28:32,287 Train Loss: 0.0003145, Val Loss: 0.0002371 +2025-02-18 21:28:32,287 Epoch 602/2000 +2025-02-18 21:29:13,172 Current Learning Rate: 0.0000024672 +2025-02-18 21:29:14,830 Train Loss: 0.0002494, Val Loss: 0.0002370 +2025-02-18 21:29:14,830 Epoch 603/2000 +2025-02-18 21:29:56,334 Current Learning Rate: 0.0000055506 +2025-02-18 21:29:56,335 Train Loss: 0.0001790, Val Loss: 0.0002372 +2025-02-18 21:29:56,335 Epoch 604/2000 +2025-02-18 21:30:38,288 Current Learning Rate: 0.0000098664 +2025-02-18 21:30:38,288 Train Loss: 0.0002461, Val Loss: 0.0002371 +2025-02-18 21:30:38,289 Epoch 605/2000 +2025-02-18 21:31:20,161 Current Learning Rate: 0.0000154133 +2025-02-18 21:31:20,162 Train Loss: 0.0002005, Val Loss: 0.0002372 +2025-02-18 21:31:20,162 Epoch 606/2000 +2025-02-18 21:32:02,448 Current Learning Rate: 0.0000221902 +2025-02-18 21:32:02,449 Train Loss: 0.0002321, Val Loss: 0.0002370 +2025-02-18 21:32:02,449 Epoch 607/2000 +2025-02-18 21:32:44,193 Current Learning Rate: 0.0000301952 +2025-02-18 21:32:44,194 Train Loss: 0.0001753, Val Loss: 0.0002372 +2025-02-18 21:32:44,194 Epoch 608/2000 +2025-02-18 21:33:26,367 Current Learning Rate: 0.0000394265 +2025-02-18 21:33:26,368 Train Loss: 0.0002089, Val Loss: 0.0002371 +2025-02-18 21:33:26,368 Epoch 609/2000 +2025-02-18 21:34:08,358 Current Learning Rate: 0.0000498817 +2025-02-18 21:34:08,358 Train Loss: 0.0002577, Val Loss: 0.0002372 +2025-02-18 21:34:08,359 Epoch 610/2000 +2025-02-18 21:34:50,157 Current Learning Rate: 0.0000615583 +2025-02-18 21:34:50,158 Train Loss: 0.0001925, Val Loss: 0.0002371 +2025-02-18 21:34:50,158 Epoch 611/2000 +2025-02-18 21:35:32,395 Current Learning Rate: 0.0000744534 +2025-02-18 21:35:32,396 Train Loss: 0.0002486, Val Loss: 0.0002371 +2025-02-18 21:35:32,396 Epoch 612/2000 +2025-02-18 21:36:14,798 Current Learning Rate: 0.0000885637 +2025-02-18 21:36:14,799 Train Loss: 0.0002794, Val Loss: 0.0002371 +2025-02-18 21:36:14,799 Epoch 613/2000 +2025-02-18 21:36:57,101 Current Learning Rate: 0.0001038859 +2025-02-18 21:36:57,101 Train Loss: 0.0002004, Val Loss: 0.0002371 +2025-02-18 21:36:57,101 Epoch 614/2000 +2025-02-18 21:37:38,980 Current Learning Rate: 0.0001204162 +2025-02-18 21:37:40,761 Train Loss: 0.0002439, Val Loss: 0.0002369 +2025-02-18 21:37:40,761 Epoch 615/2000 +2025-02-18 21:38:21,832 Current Learning Rate: 0.0001381504 +2025-02-18 21:38:21,833 Train Loss: 0.0001739, Val Loss: 0.0002370 +2025-02-18 21:38:21,833 Epoch 616/2000 +2025-02-18 21:39:03,535 Current Learning Rate: 0.0001570842 +2025-02-18 21:39:03,536 Train Loss: 0.0002387, Val Loss: 0.0002371 +2025-02-18 21:39:03,536 Epoch 617/2000 +2025-02-18 21:39:45,807 Current Learning Rate: 0.0001772129 +2025-02-18 21:39:45,807 Train Loss: 0.0002783, Val Loss: 0.0002370 +2025-02-18 21:39:45,808 Epoch 618/2000 +2025-02-18 21:40:28,243 Current Learning Rate: 0.0001985316 +2025-02-18 21:40:28,244 Train Loss: 0.0001920, Val Loss: 0.0002370 +2025-02-18 21:40:28,244 Epoch 619/2000 +2025-02-18 21:41:10,358 Current Learning Rate: 0.0002210349 +2025-02-18 21:41:11,708 Train Loss: 0.0002135, Val Loss: 0.0002368 +2025-02-18 21:41:11,708 Epoch 620/2000 +2025-02-18 21:41:52,692 Current Learning Rate: 0.0002447174 +2025-02-18 21:41:52,693 Train Loss: 0.0002199, Val Loss: 0.0002369 +2025-02-18 21:41:52,693 Epoch 621/2000 +2025-02-18 21:42:34,786 Current Learning Rate: 0.0002695732 +2025-02-18 21:42:34,786 Train Loss: 0.0002523, Val Loss: 0.0002375 +2025-02-18 21:42:34,786 Epoch 622/2000 +2025-02-18 21:43:16,938 Current Learning Rate: 0.0002955962 +2025-02-18 21:43:16,939 Train Loss: 0.0002158, Val Loss: 0.0002372 +2025-02-18 21:43:16,939 Epoch 623/2000 +2025-02-18 21:43:59,366 Current Learning Rate: 0.0003227798 +2025-02-18 21:43:59,367 Train Loss: 0.0002252, Val Loss: 0.0002377 +2025-02-18 21:43:59,367 Epoch 624/2000 +2025-02-18 21:44:41,360 Current Learning Rate: 0.0003511176 +2025-02-18 21:44:41,361 Train Loss: 0.0001752, Val Loss: 0.0002381 +2025-02-18 21:44:41,361 Epoch 625/2000 +2025-02-18 21:45:23,603 Current Learning Rate: 0.0003806023 +2025-02-18 21:45:23,604 Train Loss: 0.0002751, Val Loss: 0.0002408 +2025-02-18 21:45:23,605 Epoch 626/2000 +2025-02-18 21:46:05,636 Current Learning Rate: 0.0004112269 +2025-02-18 21:46:05,637 Train Loss: 0.0002363, Val Loss: 0.0002399 +2025-02-18 21:46:05,637 Epoch 627/2000 +2025-02-18 23:52:05,727 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 23:52:05,770 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 23:52:05,813 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 23:52:05,842 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 23:52:05,861 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 23:52:05,881 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 23:52:05,933 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 23:52:05,952 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 23:52:27,214 Loading best model from checkpoint. +2025-02-18 23:52:27,216 Error loading model checkpoint: Ran out of input +2025-02-18 23:52:27,216 Epoch 1/2000 +2025-02-18 23:52:31,766 Reducer buckets have been rebuilt in this iteration. +2025-02-18 23:52:31,768 Reducer buckets have been rebuilt in this iteration. +2025-02-18 23:52:31,768 Reducer buckets have been rebuilt in this iteration. +2025-02-18 23:52:31,768 Reducer buckets have been rebuilt in this iteration. +2025-02-18 23:52:31,773 Reducer buckets have been rebuilt in this iteration. +2025-02-18 23:52:31,773 Reducer buckets have been rebuilt in this iteration. +2025-02-18 23:52:31,773 Reducer buckets have been rebuilt in this iteration. +2025-02-18 23:52:31,773 Reducer buckets have been rebuilt in this iteration. +2025-02-18 23:53:10,369 Current Learning Rate: 0.0099993832 +2025-02-18 23:53:12,083 Train Loss: 1.2713323, Val Loss: 0.1877115 +2025-02-18 23:53:12,083 Epoch 2/2000 +2025-02-18 23:53:53,600 Current Learning Rate: 0.0099975328 +2025-02-18 23:53:55,441 Train Loss: 0.0585225, Val Loss: 0.0228514 +2025-02-18 23:53:55,442 Epoch 3/2000 +2025-02-18 23:54:37,453 Current Learning Rate: 0.0099944494 +2025-02-18 23:54:38,813 Train Loss: 0.0233733, Val Loss: 0.0184301 +2025-02-18 23:54:38,813 Epoch 4/2000 +2025-02-18 23:55:20,752 Current Learning Rate: 0.0099901336 +2025-02-18 23:55:22,382 Train Loss: 0.0194500, Val Loss: 0.0175159 +2025-02-18 23:55:22,382 Epoch 5/2000 +2025-02-18 23:56:03,202 Current Learning Rate: 0.0099845867 +2025-02-18 23:56:04,949 Train Loss: 0.0187117, Val Loss: 0.0172202 +2025-02-18 23:56:04,949 Epoch 6/2000 +2025-02-18 23:56:47,157 Current Learning Rate: 0.0099778098 +2025-02-18 23:56:48,598 Train Loss: 0.0184526, Val Loss: 0.0170046 +2025-02-18 23:56:48,598 Epoch 7/2000 +2025-02-18 23:57:31,121 Current Learning Rate: 0.0099698048 +2025-02-18 23:57:32,821 Train Loss: 0.0182028, Val Loss: 0.0168065 +2025-02-18 23:57:32,822 Epoch 8/2000 +2025-02-18 23:58:14,404 Current Learning Rate: 0.0099605735 +2025-02-18 23:58:15,680 Train Loss: 0.0179927, Val Loss: 0.0166178 +2025-02-18 23:58:15,681 Epoch 9/2000 +2025-02-18 23:58:57,169 Current Learning Rate: 0.0099501183 +2025-02-18 23:58:58,154 Train Loss: 0.0178226, Val Loss: 0.0164603 +2025-02-18 23:58:58,154 Epoch 10/2000 +2025-02-18 23:59:39,714 Current Learning Rate: 0.0099384417 +2025-02-18 23:59:40,842 Train Loss: 0.0176944, Val Loss: 0.0163473 +2025-02-18 23:59:40,842 Epoch 11/2000 +2025-02-19 00:00:23,373 Current Learning Rate: 0.0099255466 +2025-02-19 00:00:24,376 Train Loss: 0.0175879, Val Loss: 0.0162707 +2025-02-19 00:00:24,376 Epoch 12/2000 +2025-02-19 00:01:06,702 Current Learning Rate: 0.0099114363 +2025-02-19 00:01:08,572 Train Loss: 0.0175217, Val Loss: 0.0162129 +2025-02-19 00:01:08,572 Epoch 13/2000 +2025-02-19 00:01:49,670 Current Learning Rate: 0.0098961141 +2025-02-19 00:01:51,250 Train Loss: 0.0174747, Val Loss: 0.0161656 +2025-02-19 00:01:51,251 Epoch 14/2000 +2025-02-19 00:02:32,067 Current Learning Rate: 0.0098795838 +2025-02-19 00:02:33,064 Train Loss: 0.0174301, Val Loss: 0.0161140 +2025-02-19 00:02:33,064 Epoch 15/2000 +2025-02-19 00:03:15,371 Current Learning Rate: 0.0098618496 +2025-02-19 00:03:16,659 Train Loss: 0.0173940, Val Loss: 0.0160846 +2025-02-19 00:03:16,659 Epoch 16/2000 +2025-02-19 00:03:59,001 Current Learning Rate: 0.0098429158 +2025-02-19 00:04:01,014 Train Loss: 0.0173408, Val Loss: 0.0160427 +2025-02-19 00:04:01,014 Epoch 17/2000 +2025-02-19 00:04:42,319 Current Learning Rate: 0.0098227871 +2025-02-19 00:04:43,892 Train Loss: 0.0172985, Val Loss: 0.0160008 +2025-02-19 00:04:43,893 Epoch 18/2000 +2025-02-19 00:05:25,058 Current Learning Rate: 0.0098014684 +2025-02-19 00:05:26,202 Train Loss: 0.0172659, Val Loss: 0.0159679 +2025-02-19 00:05:26,202 Epoch 19/2000 +2025-02-19 00:06:08,596 Current Learning Rate: 0.0097789651 +2025-02-19 00:06:10,456 Train Loss: 0.0172262, Val Loss: 0.0159340 +2025-02-19 00:06:10,457 Epoch 20/2000 +2025-02-19 00:06:52,677 Current Learning Rate: 0.0097552826 +2025-02-19 00:06:53,769 Train Loss: 0.0171959, Val Loss: 0.0159043 +2025-02-19 00:06:53,770 Epoch 21/2000 +2025-02-19 00:07:36,145 Current Learning Rate: 0.0097304268 +2025-02-19 00:07:37,870 Train Loss: 0.0171638, Val Loss: 0.0158699 +2025-02-19 00:07:37,870 Epoch 22/2000 +2025-02-19 00:08:20,126 Current Learning Rate: 0.0097044038 +2025-02-19 00:08:21,544 Train Loss: 0.0171223, Val Loss: 0.0158384 +2025-02-19 00:08:21,544 Epoch 23/2000 +2025-02-19 00:09:04,151 Current Learning Rate: 0.0096772202 +2025-02-19 00:09:06,037 Train Loss: 0.0171020, Val Loss: 0.0158095 +2025-02-19 00:09:06,038 Epoch 24/2000 +2025-02-19 00:09:47,974 Current Learning Rate: 0.0096488824 +2025-02-19 00:09:49,763 Train Loss: 0.0170595, Val Loss: 0.0157796 +2025-02-19 00:09:49,763 Epoch 25/2000 +2025-02-19 00:10:31,925 Current Learning Rate: 0.0096193977 +2025-02-19 00:10:33,663 Train Loss: 0.0170211, Val Loss: 0.0157428 +2025-02-19 00:10:33,664 Epoch 26/2000 +2025-02-19 00:11:14,957 Current Learning Rate: 0.0095887731 +2025-02-19 00:11:16,407 Train Loss: 0.0169861, Val Loss: 0.0157121 +2025-02-19 00:11:16,408 Epoch 27/2000 +2025-02-19 00:11:58,587 Current Learning Rate: 0.0095570164 +2025-02-19 00:12:00,081 Train Loss: 0.0169497, Val Loss: 0.0156837 +2025-02-19 00:12:00,081 Epoch 28/2000 +2025-02-19 00:12:41,766 Current Learning Rate: 0.0095241353 +2025-02-19 00:12:42,968 Train Loss: 0.0169167, Val Loss: 0.0156643 +2025-02-19 00:12:42,974 Epoch 29/2000 +2025-02-19 00:13:23,745 Current Learning Rate: 0.0094901379 +2025-02-19 00:13:24,630 Train Loss: 0.0168896, Val Loss: 0.0156279 +2025-02-19 00:13:24,630 Epoch 30/2000 +2025-02-19 00:14:06,134 Current Learning Rate: 0.0094550326 +2025-02-19 00:14:07,101 Train Loss: 0.0168502, Val Loss: 0.0155942 +2025-02-19 00:14:07,102 Epoch 31/2000 +2025-02-19 00:14:48,172 Current Learning Rate: 0.0094188282 +2025-02-19 00:14:49,337 Train Loss: 0.0168141, Val Loss: 0.0155678 +2025-02-19 00:14:49,341 Epoch 32/2000 +2025-02-19 00:15:30,212 Current Learning Rate: 0.0093815334 +2025-02-19 00:15:31,077 Train Loss: 0.0167845, Val Loss: 0.0155434 +2025-02-19 00:15:31,078 Epoch 33/2000 +2025-02-19 00:16:12,499 Current Learning Rate: 0.0093431576 +2025-02-19 00:16:13,769 Train Loss: 0.0167530, Val Loss: 0.0155105 +2025-02-19 00:16:13,770 Epoch 34/2000 +2025-02-19 00:16:56,059 Current Learning Rate: 0.0093037101 +2025-02-19 00:16:57,867 Train Loss: 0.0167198, Val Loss: 0.0154825 +2025-02-19 00:16:57,867 Epoch 35/2000 +2025-02-19 00:17:39,942 Current Learning Rate: 0.0092632008 +2025-02-19 00:17:41,784 Train Loss: 0.0166898, Val Loss: 0.0154535 +2025-02-19 00:17:41,784 Epoch 36/2000 +2025-02-19 00:18:22,440 Current Learning Rate: 0.0092216396 +2025-02-19 00:18:23,654 Train Loss: 0.0166454, Val Loss: 0.0154188 +2025-02-19 00:18:23,654 Epoch 37/2000 +2025-02-19 00:19:05,536 Current Learning Rate: 0.0091790368 +2025-02-19 00:19:07,121 Train Loss: 0.0166134, Val Loss: 0.0153854 +2025-02-19 00:19:07,121 Epoch 38/2000 +2025-02-19 00:19:49,226 Current Learning Rate: 0.0091354029 +2025-02-19 00:19:51,145 Train Loss: 0.0165783, Val Loss: 0.0153603 +2025-02-19 00:19:51,145 Epoch 39/2000 +2025-02-19 00:20:32,644 Current Learning Rate: 0.0090907486 +2025-02-19 00:20:34,321 Train Loss: 0.0165472, Val Loss: 0.0153303 +2025-02-19 00:20:34,322 Epoch 40/2000 +2025-02-19 00:21:16,292 Current Learning Rate: 0.0090450850 +2025-02-19 00:21:17,661 Train Loss: 0.0165110, Val Loss: 0.0153032 +2025-02-19 00:21:17,661 Epoch 41/2000 +2025-02-19 00:21:58,529 Current Learning Rate: 0.0089984233 +2025-02-19 00:21:59,493 Train Loss: 0.0164806, Val Loss: 0.0152741 +2025-02-19 00:21:59,493 Epoch 42/2000 +2025-02-19 00:22:41,620 Current Learning Rate: 0.0089507751 +2025-02-19 00:22:43,019 Train Loss: 0.0164557, Val Loss: 0.0152447 +2025-02-19 00:22:43,019 Epoch 43/2000 +2025-02-19 00:23:25,155 Current Learning Rate: 0.0089021520 +2025-02-19 00:23:27,049 Train Loss: 0.0164259, Val Loss: 0.0152163 +2025-02-19 00:23:27,050 Epoch 44/2000 +2025-02-19 00:24:08,439 Current Learning Rate: 0.0088525662 +2025-02-19 00:24:10,352 Train Loss: 0.0163895, Val Loss: 0.0151869 +2025-02-19 00:24:10,352 Epoch 45/2000 +2025-02-19 00:24:51,072 Current Learning Rate: 0.0088020298 +2025-02-19 00:24:52,058 Train Loss: 0.0163617, Val Loss: 0.0151578 +2025-02-19 00:24:52,059 Epoch 46/2000 +2025-02-19 00:25:33,388 Current Learning Rate: 0.0087505553 +2025-02-19 00:25:34,614 Train Loss: 0.0163289, Val Loss: 0.0151264 +2025-02-19 00:25:34,614 Epoch 47/2000 +2025-02-19 00:26:16,598 Current Learning Rate: 0.0086981555 +2025-02-19 00:26:18,210 Train Loss: 0.0163045, Val Loss: 0.0150961 +2025-02-19 00:26:18,210 Epoch 48/2000 +2025-02-19 00:26:59,790 Current Learning Rate: 0.0086448431 +2025-02-19 00:27:01,011 Train Loss: 0.0162678, Val Loss: 0.0150781 +2025-02-19 00:27:01,014 Epoch 49/2000 +2025-02-19 00:27:42,101 Current Learning Rate: 0.0085906315 +2025-02-19 00:27:43,304 Train Loss: 0.0162393, Val Loss: 0.0150428 +2025-02-19 00:27:43,305 Epoch 50/2000 +2025-02-19 00:28:25,522 Current Learning Rate: 0.0085355339 +2025-02-19 00:28:27,438 Train Loss: 0.0161828, Val Loss: 0.0149828 +2025-02-19 00:28:27,439 Epoch 51/2000 +2025-02-19 00:29:09,567 Current Learning Rate: 0.0084795640 +2025-02-19 00:29:11,486 Train Loss: 0.0160165, Val Loss: 0.0146608 +2025-02-19 00:29:11,486 Epoch 52/2000 +2025-02-19 00:29:53,739 Current Learning Rate: 0.0084227355 +2025-02-19 00:29:55,676 Train Loss: 0.0154740, Val Loss: 0.0140274 +2025-02-19 00:29:55,677 Epoch 53/2000 +2025-02-19 00:30:36,361 Current Learning Rate: 0.0083650626 +2025-02-19 00:30:37,760 Train Loss: 0.0147387, Val Loss: 0.0131640 +2025-02-19 00:30:37,761 Epoch 54/2000 +2025-02-19 00:31:19,758 Current Learning Rate: 0.0083065593 +2025-02-19 00:31:21,238 Train Loss: 0.0138583, Val Loss: 0.0124248 +2025-02-19 00:31:21,238 Epoch 55/2000 +2025-02-19 00:32:03,404 Current Learning Rate: 0.0082472402 +2025-02-19 00:32:05,514 Train Loss: 0.0123703, Val Loss: 0.0105863 +2025-02-19 00:32:05,515 Epoch 56/2000 +2025-02-19 00:32:47,576 Current Learning Rate: 0.0081871199 +2025-02-19 00:32:49,309 Train Loss: 0.0110420, Val Loss: 0.0094930 +2025-02-19 00:32:49,310 Epoch 57/2000 +2025-02-19 00:33:30,484 Current Learning Rate: 0.0081262133 +2025-02-19 00:33:32,383 Train Loss: 0.0101265, Val Loss: 0.0088337 +2025-02-19 00:33:32,383 Epoch 58/2000 +2025-02-19 00:34:12,892 Current Learning Rate: 0.0080645353 +2025-02-19 00:34:14,107 Train Loss: 0.0093373, Val Loss: 0.0085233 +2025-02-19 00:34:14,114 Epoch 59/2000 +2025-02-19 00:34:54,973 Current Learning Rate: 0.0080021011 +2025-02-19 00:34:56,148 Train Loss: 0.0090224, Val Loss: 0.0078853 +2025-02-19 00:34:56,148 Epoch 60/2000 +2025-02-19 00:35:37,527 Current Learning Rate: 0.0079389263 +2025-02-19 00:35:38,760 Train Loss: 0.0081421, Val Loss: 0.0070970 +2025-02-19 00:35:38,760 Epoch 61/2000 +2025-02-19 00:36:21,233 Current Learning Rate: 0.0078750263 +2025-02-19 00:36:22,655 Train Loss: 0.0078801, Val Loss: 0.0069114 +2025-02-19 00:36:22,656 Epoch 62/2000 +2025-02-19 00:37:04,875 Current Learning Rate: 0.0078104169 +2025-02-19 00:37:06,008 Train Loss: 0.0075112, Val Loss: 0.0065039 +2025-02-19 00:37:06,009 Epoch 63/2000 +2025-02-19 00:37:48,191 Current Learning Rate: 0.0077451141 +2025-02-19 00:37:49,640 Train Loss: 0.0069967, Val Loss: 0.0062146 +2025-02-19 00:37:49,641 Epoch 64/2000 +2025-02-19 00:38:31,772 Current Learning Rate: 0.0076791340 +2025-02-19 00:38:33,510 Train Loss: 0.0068503, Val Loss: 0.0061507 +2025-02-19 00:38:33,511 Epoch 65/2000 +2025-02-19 00:39:14,471 Current Learning Rate: 0.0076124928 +2025-02-19 00:39:16,122 Train Loss: 0.0064475, Val Loss: 0.0057761 +2025-02-19 00:39:16,122 Epoch 66/2000 +2025-02-19 00:39:58,325 Current Learning Rate: 0.0075452071 +2025-02-19 00:39:59,544 Train Loss: 0.0060769, Val Loss: 0.0054408 +2025-02-19 00:39:59,544 Epoch 67/2000 +2025-02-19 00:40:41,288 Current Learning Rate: 0.0074772933 +2025-02-19 00:40:43,024 Train Loss: 0.0059078, Val Loss: 0.0053517 +2025-02-19 00:40:43,024 Epoch 68/2000 +2025-02-19 00:41:24,543 Current Learning Rate: 0.0074087684 +2025-02-19 00:41:26,255 Train Loss: 0.0056297, Val Loss: 0.0050737 +2025-02-19 00:41:26,257 Epoch 69/2000 +2025-02-19 00:42:08,546 Current Learning Rate: 0.0073396491 +2025-02-19 00:42:10,355 Train Loss: 0.0056451, Val Loss: 0.0049415 +2025-02-19 00:42:10,356 Epoch 70/2000 +2025-02-19 00:42:50,968 Current Learning Rate: 0.0072699525 +2025-02-19 00:42:52,292 Train Loss: 0.0053579, Val Loss: 0.0048111 +2025-02-19 00:42:52,293 Epoch 71/2000 +2025-02-19 00:43:33,823 Current Learning Rate: 0.0071996958 +2025-02-19 00:43:33,824 Train Loss: 0.0052464, Val Loss: 0.0048742 +2025-02-19 00:43:33,824 Epoch 72/2000 +2025-02-19 00:44:16,325 Current Learning Rate: 0.0071288965 +2025-02-19 00:44:17,905 Train Loss: 0.0051129, Val Loss: 0.0046838 +2025-02-19 00:44:17,905 Epoch 73/2000 +2025-02-19 00:45:00,295 Current Learning Rate: 0.0070575718 +2025-02-19 00:45:01,414 Train Loss: 0.0049464, Val Loss: 0.0045638 +2025-02-19 00:45:01,414 Epoch 74/2000 +2025-02-19 00:45:42,364 Current Learning Rate: 0.0069857395 +2025-02-19 00:45:43,211 Train Loss: 0.0050081, Val Loss: 0.0045385 +2025-02-19 00:45:43,212 Epoch 75/2000 +2025-02-19 00:46:25,484 Current Learning Rate: 0.0069134172 +2025-02-19 00:46:27,327 Train Loss: 0.0048070, Val Loss: 0.0044704 +2025-02-19 00:46:27,327 Epoch 76/2000 +2025-02-19 00:47:09,481 Current Learning Rate: 0.0068406228 +2025-02-19 00:47:11,414 Train Loss: 0.0047897, Val Loss: 0.0043314 +2025-02-19 00:47:11,414 Epoch 77/2000 +2025-02-19 00:47:53,672 Current Learning Rate: 0.0067673742 +2025-02-19 00:47:55,268 Train Loss: 0.0048592, Val Loss: 0.0042489 +2025-02-19 00:47:55,269 Epoch 78/2000 +2025-02-19 00:48:37,382 Current Learning Rate: 0.0066936896 +2025-02-19 00:48:39,190 Train Loss: 0.0048049, Val Loss: 0.0042023 +2025-02-19 00:48:39,191 Epoch 79/2000 +2025-02-19 00:49:19,872 Current Learning Rate: 0.0066195871 +2025-02-19 00:49:21,111 Train Loss: 0.0045723, Val Loss: 0.0041746 +2025-02-19 00:49:21,112 Epoch 80/2000 +2025-02-19 00:50:02,234 Current Learning Rate: 0.0065450850 +2025-02-19 00:50:04,365 Train Loss: 0.0044051, Val Loss: 0.0041263 +2025-02-19 00:50:04,365 Epoch 81/2000 +2025-02-19 00:50:46,072 Current Learning Rate: 0.0064702016 +2025-02-19 00:50:47,475 Train Loss: 0.0043537, Val Loss: 0.0040353 +2025-02-19 00:50:47,475 Epoch 82/2000 +2025-02-19 00:51:29,286 Current Learning Rate: 0.0063949555 +2025-02-19 00:51:29,287 Train Loss: 0.0043434, Val Loss: 0.0044553 +2025-02-19 00:51:29,287 Epoch 83/2000 +2025-02-19 00:52:10,937 Current Learning Rate: 0.0063193652 +2025-02-19 00:52:12,230 Train Loss: 0.0041930, Val Loss: 0.0039030 +2025-02-19 00:52:12,230 Epoch 84/2000 +2025-02-19 00:52:53,120 Current Learning Rate: 0.0062434494 +2025-02-19 00:52:54,213 Train Loss: 0.0041489, Val Loss: 0.0038140 +2025-02-19 00:52:54,214 Epoch 85/2000 +2025-02-19 00:53:35,623 Current Learning Rate: 0.0061672268 +2025-02-19 00:53:37,469 Train Loss: 0.0042178, Val Loss: 0.0037432 +2025-02-19 00:53:37,469 Epoch 86/2000 +2025-02-19 00:54:18,808 Current Learning Rate: 0.0060907162 +2025-02-19 00:54:20,765 Train Loss: 0.0039734, Val Loss: 0.0036961 +2025-02-19 00:54:20,766 Epoch 87/2000 +2025-02-19 00:55:01,590 Current Learning Rate: 0.0060139365 +2025-02-19 00:55:03,675 Train Loss: 0.0041091, Val Loss: 0.0036200 +2025-02-19 00:55:03,678 Epoch 88/2000 +2025-02-19 00:55:44,651 Current Learning Rate: 0.0059369066 +2025-02-19 00:55:46,024 Train Loss: 0.0037831, Val Loss: 0.0035179 +2025-02-19 00:55:46,024 Epoch 89/2000 +2025-02-19 00:56:27,883 Current Learning Rate: 0.0058596455 +2025-02-19 00:56:29,511 Train Loss: 0.0036041, Val Loss: 0.0034365 +2025-02-19 00:56:29,511 Epoch 90/2000 +2025-02-19 00:57:10,479 Current Learning Rate: 0.0057821723 +2025-02-19 00:57:12,398 Train Loss: 0.0035541, Val Loss: 0.0033922 +2025-02-19 00:57:12,398 Epoch 91/2000 +2025-02-19 00:57:53,179 Current Learning Rate: 0.0057045062 +2025-02-19 00:57:54,536 Train Loss: 0.0036483, Val Loss: 0.0033826 +2025-02-19 00:57:54,536 Epoch 92/2000 +2025-02-19 00:58:35,390 Current Learning Rate: 0.0056266662 +2025-02-19 00:58:36,836 Train Loss: 0.0034998, Val Loss: 0.0033098 +2025-02-19 00:58:36,837 Epoch 93/2000 +2025-02-19 00:59:17,820 Current Learning Rate: 0.0055486716 +2025-02-19 00:59:17,821 Train Loss: 0.0035329, Val Loss: 0.0034519 +2025-02-19 00:59:17,821 Epoch 94/2000 +2025-02-19 01:00:00,650 Current Learning Rate: 0.0054705416 +2025-02-19 01:00:02,300 Train Loss: 0.0034917, Val Loss: 0.0032451 +2025-02-19 01:00:02,300 Epoch 95/2000 +2025-02-19 01:00:43,771 Current Learning Rate: 0.0053922955 +2025-02-19 01:00:45,685 Train Loss: 0.0032342, Val Loss: 0.0032376 +2025-02-19 01:00:45,685 Epoch 96/2000 +2025-02-19 01:01:26,269 Current Learning Rate: 0.0053139526 +2025-02-19 01:01:27,554 Train Loss: 0.0036386, Val Loss: 0.0031401 +2025-02-19 01:01:27,559 Epoch 97/2000 +2025-02-19 01:02:08,472 Current Learning Rate: 0.0052355323 +2025-02-19 01:02:09,855 Train Loss: 0.0032044, Val Loss: 0.0030463 +2025-02-19 01:02:09,855 Epoch 98/2000 +2025-02-19 01:02:51,191 Current Learning Rate: 0.0051570538 +2025-02-19 01:02:51,191 Train Loss: 0.0032658, Val Loss: 0.0030497 +2025-02-19 01:02:51,192 Epoch 99/2000 +2025-02-19 01:03:33,084 Current Learning Rate: 0.0050785366 +2025-02-19 01:03:34,532 Train Loss: 0.0031274, Val Loss: 0.0029924 +2025-02-19 01:03:34,532 Epoch 100/2000 +2025-02-19 01:04:16,717 Current Learning Rate: 0.0050000000 +2025-02-19 01:04:18,460 Train Loss: 0.0030784, Val Loss: 0.0029593 +2025-02-19 01:04:18,460 Epoch 101/2000 +2025-02-19 01:04:59,987 Current Learning Rate: 0.0049214634 +2025-02-19 01:05:01,534 Train Loss: 0.0030786, Val Loss: 0.0028907 +2025-02-19 01:05:01,534 Epoch 102/2000 +2025-02-19 01:05:42,713 Current Learning Rate: 0.0048429462 +2025-02-19 01:05:44,437 Train Loss: 0.0030722, Val Loss: 0.0028884 +2025-02-19 01:05:44,438 Epoch 103/2000 +2025-02-19 01:06:25,903 Current Learning Rate: 0.0047644677 +2025-02-19 01:06:27,286 Train Loss: 0.0029191, Val Loss: 0.0028590 +2025-02-19 01:06:27,286 Epoch 104/2000 +2025-02-19 01:07:08,045 Current Learning Rate: 0.0046860474 +2025-02-19 01:07:08,045 Train Loss: 0.0031066, Val Loss: 0.0029825 +2025-02-19 01:07:08,046 Epoch 105/2000 +2025-02-19 01:07:50,916 Current Learning Rate: 0.0046077045 +2025-02-19 01:07:50,917 Train Loss: 0.0030302, Val Loss: 0.0029459 +2025-02-19 01:07:50,918 Epoch 106/2000 +2025-02-19 01:08:33,422 Current Learning Rate: 0.0045294584 +2025-02-19 01:08:33,423 Train Loss: 0.0030785, Val Loss: 0.0028987 +2025-02-19 01:08:33,423 Epoch 107/2000 +2025-02-19 01:09:15,293 Current Learning Rate: 0.0044513284 +2025-02-19 01:09:15,294 Train Loss: 0.0028277, Val Loss: 0.0028687 +2025-02-19 01:09:15,294 Epoch 108/2000 +2025-02-19 01:09:57,945 Current Learning Rate: 0.0043733338 +2025-02-19 01:10:00,084 Train Loss: 0.0027936, Val Loss: 0.0027492 +2025-02-19 01:10:00,084 Epoch 109/2000 +2025-02-19 01:10:41,573 Current Learning Rate: 0.0042954938 +2025-02-19 01:10:42,792 Train Loss: 0.0030450, Val Loss: 0.0027433 +2025-02-19 01:10:42,793 Epoch 110/2000 +2025-02-19 01:11:24,042 Current Learning Rate: 0.0042178277 +2025-02-19 01:11:25,495 Train Loss: 0.0029646, Val Loss: 0.0026839 +2025-02-19 01:11:25,495 Epoch 111/2000 +2025-02-19 01:12:06,261 Current Learning Rate: 0.0041403545 +2025-02-19 01:12:07,147 Train Loss: 0.0026406, Val Loss: 0.0026818 +2025-02-19 01:12:07,147 Epoch 112/2000 +2025-02-19 01:12:49,296 Current Learning Rate: 0.0040630934 +2025-02-19 01:12:50,918 Train Loss: 0.0025529, Val Loss: 0.0026616 +2025-02-19 01:12:50,919 Epoch 113/2000 +2025-02-19 01:13:32,620 Current Learning Rate: 0.0039860635 +2025-02-19 01:13:32,621 Train Loss: 0.0027606, Val Loss: 0.0027820 +2025-02-19 01:13:32,621 Epoch 114/2000 +2025-02-19 01:14:14,666 Current Learning Rate: 0.0039092838 +2025-02-19 01:14:14,666 Train Loss: 0.0027141, Val Loss: 0.0026850 +2025-02-19 01:14:14,667 Epoch 115/2000 +2025-02-19 01:14:56,804 Current Learning Rate: 0.0038327732 +2025-02-19 01:14:58,494 Train Loss: 0.0032804, Val Loss: 0.0026386 +2025-02-19 01:14:58,502 Epoch 116/2000 +2025-02-19 01:15:39,812 Current Learning Rate: 0.0037565506 +2025-02-19 01:15:41,736 Train Loss: 0.0027418, Val Loss: 0.0025669 +2025-02-19 01:15:41,736 Epoch 117/2000 +2025-02-19 01:16:23,911 Current Learning Rate: 0.0036806348 +2025-02-19 01:16:25,042 Train Loss: 0.0027382, Val Loss: 0.0025666 +2025-02-19 01:16:25,045 Epoch 118/2000 +2025-02-19 01:17:07,679 Current Learning Rate: 0.0036050445 +2025-02-19 01:17:08,896 Train Loss: 0.0025141, Val Loss: 0.0024873 +2025-02-19 01:17:08,897 Epoch 119/2000 +2025-02-19 01:17:49,897 Current Learning Rate: 0.0035297984 +2025-02-19 01:17:51,565 Train Loss: 0.0027708, Val Loss: 0.0024813 +2025-02-19 01:17:51,566 Epoch 120/2000 +2025-02-19 01:18:32,628 Current Learning Rate: 0.0034549150 +2025-02-19 01:18:32,629 Train Loss: 0.0024446, Val Loss: 0.0025053 +2025-02-19 01:18:32,629 Epoch 121/2000 +2025-02-19 01:19:14,788 Current Learning Rate: 0.0033804129 +2025-02-19 01:19:14,789 Train Loss: 0.0025420, Val Loss: 0.0025618 +2025-02-19 01:19:14,789 Epoch 122/2000 +2025-02-19 01:19:57,815 Current Learning Rate: 0.0033063104 +2025-02-19 01:19:57,815 Train Loss: 0.0025295, Val Loss: 0.0025013 +2025-02-19 01:19:57,816 Epoch 123/2000 +2025-02-19 01:20:39,525 Current Learning Rate: 0.0032326258 +2025-02-19 01:20:40,691 Train Loss: 0.0023033, Val Loss: 0.0024400 +2025-02-19 01:20:40,691 Epoch 124/2000 +2025-02-19 01:21:21,799 Current Learning Rate: 0.0031593772 +2025-02-19 01:21:23,350 Train Loss: 0.0024821, Val Loss: 0.0024185 +2025-02-19 01:21:23,350 Epoch 125/2000 +2025-02-19 01:22:04,078 Current Learning Rate: 0.0030865828 +2025-02-19 01:22:05,077 Train Loss: 0.0024515, Val Loss: 0.0023806 +2025-02-19 01:22:05,077 Epoch 126/2000 +2025-02-19 01:22:47,232 Current Learning Rate: 0.0030142605 +2025-02-19 01:22:49,147 Train Loss: 0.0022217, Val Loss: 0.0023296 +2025-02-19 01:22:49,147 Epoch 127/2000 +2025-02-19 01:23:31,347 Current Learning Rate: 0.0029424282 +2025-02-19 01:23:33,077 Train Loss: 0.0023471, Val Loss: 0.0023265 +2025-02-19 01:23:33,077 Epoch 128/2000 +2025-02-19 01:24:15,332 Current Learning Rate: 0.0028711035 +2025-02-19 01:24:15,333 Train Loss: 0.0023360, Val Loss: 0.0023666 +2025-02-19 01:24:15,333 Epoch 129/2000 +2025-02-19 01:24:57,100 Current Learning Rate: 0.0028003042 +2025-02-19 01:24:57,100 Train Loss: 0.0026158, Val Loss: 0.0023867 +2025-02-19 01:24:57,100 Epoch 130/2000 +2025-02-19 01:25:39,222 Current Learning Rate: 0.0027300475 +2025-02-19 01:25:40,250 Train Loss: 0.0021960, Val Loss: 0.0022482 +2025-02-19 01:25:40,251 Epoch 131/2000 +2025-02-19 01:26:21,577 Current Learning Rate: 0.0026603509 +2025-02-19 01:26:22,700 Train Loss: 0.0023145, Val Loss: 0.0022383 +2025-02-19 01:26:22,701 Epoch 132/2000 +2025-02-19 01:27:04,973 Current Learning Rate: 0.0025912316 +2025-02-19 01:27:06,812 Train Loss: 0.0021741, Val Loss: 0.0022179 +2025-02-19 01:27:06,813 Epoch 133/2000 +2025-02-19 01:27:48,958 Current Learning Rate: 0.0025227067 +2025-02-19 01:27:48,959 Train Loss: 0.0024336, Val Loss: 0.0022363 +2025-02-19 01:27:48,959 Epoch 134/2000 +2025-02-19 01:28:31,413 Current Learning Rate: 0.0024547929 +2025-02-19 01:28:31,413 Train Loss: 0.0023936, Val Loss: 0.0022878 +2025-02-19 01:28:31,414 Epoch 135/2000 +2025-02-19 01:29:13,980 Current Learning Rate: 0.0023875072 +2025-02-19 01:29:15,279 Train Loss: 0.0021375, Val Loss: 0.0021983 +2025-02-19 01:29:15,280 Epoch 136/2000 +2025-02-19 01:29:56,153 Current Learning Rate: 0.0023208660 +2025-02-19 01:29:57,379 Train Loss: 0.0024073, Val Loss: 0.0021722 +2025-02-19 01:29:57,379 Epoch 137/2000 +2025-02-19 01:30:38,497 Current Learning Rate: 0.0022548859 +2025-02-19 01:30:39,799 Train Loss: 0.0020079, Val Loss: 0.0021674 +2025-02-19 01:30:39,800 Epoch 138/2000 +2025-02-19 01:31:22,095 Current Learning Rate: 0.0021895831 +2025-02-19 01:31:22,095 Train Loss: 0.0021660, Val Loss: 0.0021771 +2025-02-19 01:31:22,096 Epoch 139/2000 +2025-02-19 01:32:04,526 Current Learning Rate: 0.0021249737 +2025-02-19 01:32:06,405 Train Loss: 0.0021857, Val Loss: 0.0021324 +2025-02-19 01:32:06,405 Epoch 140/2000 +2025-02-19 01:32:48,591 Current Learning Rate: 0.0020610737 +2025-02-19 01:32:50,309 Train Loss: 0.0019805, Val Loss: 0.0021122 +2025-02-19 01:32:50,309 Epoch 141/2000 +2025-02-19 01:33:32,646 Current Learning Rate: 0.0019978989 +2025-02-19 01:33:32,647 Train Loss: 0.0020986, Val Loss: 0.0021461 +2025-02-19 01:33:32,647 Epoch 142/2000 +2025-02-19 01:34:14,426 Current Learning Rate: 0.0019354647 +2025-02-19 01:34:15,579 Train Loss: 0.0020708, Val Loss: 0.0020922 +2025-02-19 01:34:15,580 Epoch 143/2000 +2025-02-19 01:34:56,969 Current Learning Rate: 0.0018737867 +2025-02-19 01:34:58,329 Train Loss: 0.0021875, Val Loss: 0.0020832 +2025-02-19 01:34:58,330 Epoch 144/2000 +2025-02-19 01:35:39,741 Current Learning Rate: 0.0018128801 +2025-02-19 01:35:39,741 Train Loss: 0.0019904, Val Loss: 0.0020860 +2025-02-19 01:35:39,742 Epoch 145/2000 +2025-02-19 01:36:21,653 Current Learning Rate: 0.0017527598 +2025-02-19 01:36:21,654 Train Loss: 0.0022660, Val Loss: 0.0020959 +2025-02-19 01:36:21,654 Epoch 146/2000 +2025-02-19 01:37:04,200 Current Learning Rate: 0.0016934407 +2025-02-19 01:37:06,033 Train Loss: 0.0019734, Val Loss: 0.0020127 +2025-02-19 01:37:06,033 Epoch 147/2000 +2025-02-19 01:37:46,777 Current Learning Rate: 0.0016349374 +2025-02-19 01:37:47,773 Train Loss: 0.0020415, Val Loss: 0.0020037 +2025-02-19 01:37:47,773 Epoch 148/2000 +2025-02-19 01:38:30,057 Current Learning Rate: 0.0015772645 +2025-02-19 01:38:30,057 Train Loss: 0.0020007, Val Loss: 0.0020610 +2025-02-19 01:38:30,058 Epoch 149/2000 +2025-02-19 01:39:12,321 Current Learning Rate: 0.0015204360 +2025-02-19 01:39:12,321 Train Loss: 0.0019330, Val Loss: 0.0020208 +2025-02-19 01:39:12,322 Epoch 150/2000 +2025-02-19 01:39:54,262 Current Learning Rate: 0.0014644661 +2025-02-19 01:39:54,262 Train Loss: 0.0020180, Val Loss: 0.0020590 +2025-02-19 01:39:54,262 Epoch 151/2000 +2025-02-19 01:40:36,877 Current Learning Rate: 0.0014093685 +2025-02-19 01:40:36,878 Train Loss: 0.0022267, Val Loss: 0.0021456 +2025-02-19 01:40:36,878 Epoch 152/2000 +2025-02-19 01:41:18,769 Current Learning Rate: 0.0013551569 +2025-02-19 01:41:20,343 Train Loss: 0.0020824, Val Loss: 0.0019915 +2025-02-19 01:41:20,351 Epoch 153/2000 +2025-02-19 01:42:02,584 Current Learning Rate: 0.0013018445 +2025-02-19 01:42:02,585 Train Loss: 0.0021292, Val Loss: 0.0020764 +2025-02-19 01:42:02,585 Epoch 154/2000 +2025-02-19 01:42:44,546 Current Learning Rate: 0.0012494447 +2025-02-19 01:42:44,547 Train Loss: 0.0018767, Val Loss: 0.0020179 +2025-02-19 01:42:44,547 Epoch 155/2000 +2025-02-19 01:43:26,970 Current Learning Rate: 0.0011979702 +2025-02-19 01:43:28,659 Train Loss: 0.0017822, Val Loss: 0.0019867 +2025-02-19 01:43:28,659 Epoch 156/2000 +2025-02-19 01:44:10,002 Current Learning Rate: 0.0011474338 +2025-02-19 01:44:12,004 Train Loss: 0.0018670, Val Loss: 0.0019719 +2025-02-19 01:44:12,005 Epoch 157/2000 +2025-02-19 01:44:52,671 Current Learning Rate: 0.0010978480 +2025-02-19 01:44:53,894 Train Loss: 0.0018423, Val Loss: 0.0019534 +2025-02-19 01:44:53,894 Epoch 158/2000 +2025-02-19 01:45:34,830 Current Learning Rate: 0.0010492249 +2025-02-19 01:45:34,832 Train Loss: 0.0019433, Val Loss: 0.0019592 +2025-02-19 01:45:34,832 Epoch 159/2000 +2025-02-19 01:46:17,618 Current Learning Rate: 0.0010015767 +2025-02-19 01:46:19,609 Train Loss: 0.0019110, Val Loss: 0.0018985 +2025-02-19 01:46:19,609 Epoch 160/2000 +2025-02-19 01:47:01,568 Current Learning Rate: 0.0009549150 +2025-02-19 01:47:01,569 Train Loss: 0.0019821, Val Loss: 0.0019202 +2025-02-19 01:47:01,570 Epoch 161/2000 +2025-02-19 01:47:44,100 Current Learning Rate: 0.0009092514 +2025-02-19 01:47:46,003 Train Loss: 0.0019917, Val Loss: 0.0018956 +2025-02-19 01:47:46,004 Epoch 162/2000 +2025-02-19 01:48:28,326 Current Learning Rate: 0.0008645971 +2025-02-19 01:48:28,327 Train Loss: 0.0020351, Val Loss: 0.0019139 +2025-02-19 01:48:28,327 Epoch 163/2000 +2025-02-19 01:49:09,950 Current Learning Rate: 0.0008209632 +2025-02-19 01:49:11,189 Train Loss: 0.0020490, Val Loss: 0.0018806 +2025-02-19 01:49:11,190 Epoch 164/2000 +2025-02-19 01:49:53,280 Current Learning Rate: 0.0007783604 +2025-02-19 01:49:54,471 Train Loss: 0.0019353, Val Loss: 0.0018786 +2025-02-19 01:49:54,472 Epoch 165/2000 +2025-02-19 01:50:36,521 Current Learning Rate: 0.0007367992 +2025-02-19 01:50:38,067 Train Loss: 0.0019194, Val Loss: 0.0018391 +2025-02-19 01:50:38,067 Epoch 166/2000 +2025-02-19 01:51:19,041 Current Learning Rate: 0.0006962899 +2025-02-19 01:51:20,297 Train Loss: 0.0019329, Val Loss: 0.0018355 +2025-02-19 01:51:20,297 Epoch 167/2000 +2025-02-19 01:52:02,021 Current Learning Rate: 0.0006568424 +2025-02-19 01:52:04,404 Train Loss: 0.0017932, Val Loss: 0.0018320 +2025-02-19 01:52:04,404 Epoch 168/2000 +2025-02-19 01:52:46,877 Current Learning Rate: 0.0006184666 +2025-02-19 01:52:48,949 Train Loss: 0.0017510, Val Loss: 0.0018238 +2025-02-19 01:52:48,955 Epoch 169/2000 +2025-02-19 01:53:30,548 Current Learning Rate: 0.0005811718 +2025-02-19 01:53:31,864 Train Loss: 0.0018346, Val Loss: 0.0018201 +2025-02-19 01:53:31,864 Epoch 170/2000 +2025-02-19 01:54:14,405 Current Learning Rate: 0.0005449674 +2025-02-19 01:54:16,348 Train Loss: 0.0016690, Val Loss: 0.0018113 +2025-02-19 01:54:16,349 Epoch 171/2000 +2025-02-19 01:54:57,101 Current Learning Rate: 0.0005098621 +2025-02-19 01:54:58,630 Train Loss: 0.0019035, Val Loss: 0.0018066 +2025-02-19 01:54:58,631 Epoch 172/2000 +2025-02-19 01:55:39,658 Current Learning Rate: 0.0004758647 +2025-02-19 01:55:41,036 Train Loss: 0.0018223, Val Loss: 0.0017994 +2025-02-19 01:55:41,036 Epoch 173/2000 +2025-02-19 01:56:23,566 Current Learning Rate: 0.0004429836 +2025-02-19 01:56:25,213 Train Loss: 0.0017544, Val Loss: 0.0017920 +2025-02-19 01:56:25,213 Epoch 174/2000 +2025-02-19 01:57:06,315 Current Learning Rate: 0.0004112269 +2025-02-19 01:57:07,756 Train Loss: 0.0017483, Val Loss: 0.0017863 +2025-02-19 01:57:07,756 Epoch 175/2000 +2025-02-19 01:57:49,738 Current Learning Rate: 0.0003806023 +2025-02-19 01:57:49,740 Train Loss: 0.0018096, Val Loss: 0.0017879 +2025-02-19 01:57:49,740 Epoch 176/2000 +2025-02-19 01:58:32,459 Current Learning Rate: 0.0003511176 +2025-02-19 01:58:34,413 Train Loss: 0.0016585, Val Loss: 0.0017798 +2025-02-19 01:58:34,414 Epoch 177/2000 +2025-02-19 01:59:16,743 Current Learning Rate: 0.0003227798 +2025-02-19 01:59:18,453 Train Loss: 0.0017152, Val Loss: 0.0017755 +2025-02-19 01:59:18,453 Epoch 178/2000 +2025-02-19 01:59:59,366 Current Learning Rate: 0.0002955962 +2025-02-19 02:00:00,336 Train Loss: 0.0017738, Val Loss: 0.0017746 +2025-02-19 02:00:00,338 Epoch 179/2000 +2025-02-19 02:00:42,489 Current Learning Rate: 0.0002695732 +2025-02-19 02:00:44,335 Train Loss: 0.0018169, Val Loss: 0.0017714 +2025-02-19 02:00:44,335 Epoch 180/2000 +2025-02-19 02:01:25,940 Current Learning Rate: 0.0002447174 +2025-02-19 02:01:25,941 Train Loss: 0.0018590, Val Loss: 0.0017750 +2025-02-19 02:01:25,942 Epoch 181/2000 +2025-02-19 02:02:07,692 Current Learning Rate: 0.0002210349 +2025-02-19 02:02:09,007 Train Loss: 0.0017238, Val Loss: 0.0017668 +2025-02-19 02:02:09,007 Epoch 182/2000 +2025-02-19 02:02:51,505 Current Learning Rate: 0.0001985316 +2025-02-19 02:02:52,884 Train Loss: 0.0017680, Val Loss: 0.0017596 +2025-02-19 02:02:52,884 Epoch 183/2000 +2025-02-19 02:03:34,092 Current Learning Rate: 0.0001772129 +2025-02-19 02:03:35,331 Train Loss: 0.0019999, Val Loss: 0.0017549 +2025-02-19 02:03:35,332 Epoch 184/2000 +2025-02-19 02:04:17,837 Current Learning Rate: 0.0001570842 +2025-02-19 02:04:19,404 Train Loss: 0.0016876, Val Loss: 0.0017536 +2025-02-19 02:04:19,406 Epoch 185/2000 +2025-02-19 02:05:01,242 Current Learning Rate: 0.0001381504 +2025-02-19 02:05:03,343 Train Loss: 0.0017630, Val Loss: 0.0017517 +2025-02-19 02:05:03,343 Epoch 186/2000 +2025-02-19 02:05:44,970 Current Learning Rate: 0.0001204162 +2025-02-19 02:05:46,242 Train Loss: 0.0015955, Val Loss: 0.0017502 +2025-02-19 02:05:46,242 Epoch 187/2000 +2025-02-19 02:06:28,055 Current Learning Rate: 0.0001038859 +2025-02-19 02:06:29,478 Train Loss: 0.0019637, Val Loss: 0.0017484 +2025-02-19 02:06:29,479 Epoch 188/2000 +2025-02-19 02:07:10,941 Current Learning Rate: 0.0000885637 +2025-02-19 02:07:12,385 Train Loss: 0.0017018, Val Loss: 0.0017462 +2025-02-19 02:07:12,386 Epoch 189/2000 +2025-02-19 02:07:54,657 Current Learning Rate: 0.0000744534 +2025-02-19 02:07:56,237 Train Loss: 0.0017627, Val Loss: 0.0017446 +2025-02-19 02:07:56,237 Epoch 190/2000 +2025-02-19 02:08:37,274 Current Learning Rate: 0.0000615583 +2025-02-19 02:08:38,604 Train Loss: 0.0016332, Val Loss: 0.0017433 +2025-02-19 02:08:38,604 Epoch 191/2000 +2025-02-19 02:09:21,235 Current Learning Rate: 0.0000498817 +2025-02-19 02:09:23,087 Train Loss: 0.0017502, Val Loss: 0.0017424 +2025-02-19 02:09:23,087 Epoch 192/2000 +2025-02-19 02:10:05,470 Current Learning Rate: 0.0000394265 +2025-02-19 02:10:07,023 Train Loss: 0.0017487, Val Loss: 0.0017416 +2025-02-19 02:10:07,024 Epoch 193/2000 +2025-02-19 02:10:47,826 Current Learning Rate: 0.0000301952 +2025-02-19 02:10:48,811 Train Loss: 0.0016640, Val Loss: 0.0017413 +2025-02-19 02:10:48,811 Epoch 194/2000 +2025-02-19 02:11:30,936 Current Learning Rate: 0.0000221902 +2025-02-19 02:11:32,853 Train Loss: 0.0016934, Val Loss: 0.0017409 +2025-02-19 02:11:32,853 Epoch 195/2000 +2025-02-19 02:12:13,607 Current Learning Rate: 0.0000154133 +2025-02-19 02:12:13,608 Train Loss: 0.0016525, Val Loss: 0.0017411 +2025-02-19 02:12:13,608 Epoch 196/2000 +2025-02-19 02:12:56,113 Current Learning Rate: 0.0000098664 +2025-02-19 02:12:56,113 Train Loss: 0.0017709, Val Loss: 0.0017421 +2025-02-19 02:12:56,114 Epoch 197/2000 +2025-02-19 02:13:38,625 Current Learning Rate: 0.0000055506 +2025-02-19 02:13:38,625 Train Loss: 0.0017882, Val Loss: 0.0017412 +2025-02-19 02:13:38,626 Epoch 198/2000 +2025-02-19 02:14:20,984 Current Learning Rate: 0.0000024672 +2025-02-19 02:14:20,984 Train Loss: 0.0018726, Val Loss: 0.0017412 +2025-02-19 02:14:20,985 Epoch 199/2000 +2025-02-19 02:15:03,213 Current Learning Rate: 0.0000006168 +2025-02-19 02:15:03,214 Train Loss: 0.0016690, Val Loss: 0.0017410 +2025-02-19 02:15:03,214 Epoch 200/2000 +2025-02-19 02:15:45,430 Current Learning Rate: 0.0000000000 +2025-02-19 02:15:46,998 Train Loss: 0.0016884, Val Loss: 0.0017407 +2025-02-19 02:15:46,998 Epoch 201/2000 +2025-02-19 02:16:28,222 Current Learning Rate: 0.0000006168 +2025-02-19 02:16:28,223 Train Loss: 0.0016745, Val Loss: 0.0017409 +2025-02-19 02:16:28,223 Epoch 202/2000 +2025-02-19 02:17:10,929 Current Learning Rate: 0.0000024672 +2025-02-19 02:17:10,930 Train Loss: 0.0016994, Val Loss: 0.0017408 +2025-02-19 02:17:10,930 Epoch 203/2000 +2025-02-19 02:17:53,510 Current Learning Rate: 0.0000055506 +2025-02-19 02:17:55,159 Train Loss: 0.0018111, Val Loss: 0.0017406 +2025-02-19 02:17:55,159 Epoch 204/2000 +2025-02-19 02:18:36,962 Current Learning Rate: 0.0000098664 +2025-02-19 02:18:36,964 Train Loss: 0.0016745, Val Loss: 0.0017409 +2025-02-19 02:18:36,965 Epoch 205/2000 +2025-02-19 02:19:18,614 Current Learning Rate: 0.0000154133 +2025-02-19 02:19:18,614 Train Loss: 0.0016672, Val Loss: 0.0017408 +2025-02-19 02:19:18,615 Epoch 206/2000 +2025-02-19 02:20:01,032 Current Learning Rate: 0.0000221902 +2025-02-19 02:20:02,475 Train Loss: 0.0017572, Val Loss: 0.0017405 +2025-02-19 02:20:02,475 Epoch 207/2000 +2025-02-19 02:20:43,623 Current Learning Rate: 0.0000301952 +2025-02-19 02:20:44,878 Train Loss: 0.0016598, Val Loss: 0.0017399 +2025-02-19 02:20:44,878 Epoch 208/2000 +2025-02-19 02:21:26,177 Current Learning Rate: 0.0000394265 +2025-02-19 02:21:26,179 Train Loss: 0.0019802, Val Loss: 0.0017404 +2025-02-19 02:21:26,179 Epoch 209/2000 +2025-02-19 02:22:07,914 Current Learning Rate: 0.0000498817 +2025-02-19 02:22:09,103 Train Loss: 0.0017660, Val Loss: 0.0017390 +2025-02-19 02:22:09,103 Epoch 210/2000 +2025-02-19 02:22:51,293 Current Learning Rate: 0.0000615583 +2025-02-19 02:22:53,106 Train Loss: 0.0016666, Val Loss: 0.0017384 +2025-02-19 02:22:53,106 Epoch 211/2000 +2025-02-19 02:23:35,557 Current Learning Rate: 0.0000744534 +2025-02-19 02:23:37,451 Train Loss: 0.0017613, Val Loss: 0.0017378 +2025-02-19 02:23:37,452 Epoch 212/2000 +2025-02-19 02:24:19,056 Current Learning Rate: 0.0000885637 +2025-02-19 02:24:19,057 Train Loss: 0.0016547, Val Loss: 0.0017379 +2025-02-19 02:24:19,058 Epoch 213/2000 +2025-02-19 02:25:01,531 Current Learning Rate: 0.0001038859 +2025-02-19 02:25:01,532 Train Loss: 0.0016648, Val Loss: 0.0017383 +2025-02-19 02:25:01,532 Epoch 214/2000 +2025-02-19 02:25:43,841 Current Learning Rate: 0.0001204162 +2025-02-19 02:25:45,205 Train Loss: 0.0016908, Val Loss: 0.0017373 +2025-02-19 02:25:45,205 Epoch 215/2000 +2025-02-19 02:26:25,954 Current Learning Rate: 0.0001381504 +2025-02-19 02:26:25,955 Train Loss: 0.0020240, Val Loss: 0.0017382 +2025-02-19 02:26:25,955 Epoch 216/2000 +2025-02-19 02:27:08,344 Current Learning Rate: 0.0001570842 +2025-02-19 02:27:09,676 Train Loss: 0.0019309, Val Loss: 0.0017365 +2025-02-19 02:27:09,676 Epoch 217/2000 +2025-02-19 02:27:51,983 Current Learning Rate: 0.0001772129 +2025-02-19 02:27:51,984 Train Loss: 0.0018199, Val Loss: 0.0017462 +2025-02-19 02:27:51,984 Epoch 218/2000 +2025-02-19 02:28:34,594 Current Learning Rate: 0.0001985316 +2025-02-19 02:28:36,456 Train Loss: 0.0018424, Val Loss: 0.0017361 +2025-02-19 02:28:36,456 Epoch 219/2000 +2025-02-19 02:29:18,543 Current Learning Rate: 0.0002210349 +2025-02-19 02:29:20,497 Train Loss: 0.0022452, Val Loss: 0.0017359 +2025-02-19 02:29:20,497 Epoch 220/2000 +2025-02-19 02:30:01,221 Current Learning Rate: 0.0002447174 +2025-02-19 02:30:01,222 Train Loss: 0.0021426, Val Loss: 0.0017399 +2025-02-19 02:30:01,222 Epoch 221/2000 +2025-02-19 02:30:43,482 Current Learning Rate: 0.0002695732 +2025-02-19 02:30:44,850 Train Loss: 0.0017566, Val Loss: 0.0017346 +2025-02-19 02:30:44,851 Epoch 222/2000 +2025-02-19 02:31:26,000 Current Learning Rate: 0.0002955962 +2025-02-19 02:31:27,289 Train Loss: 0.0016008, Val Loss: 0.0017277 +2025-02-19 02:31:27,289 Epoch 223/2000 +2025-02-19 02:32:08,188 Current Learning Rate: 0.0003227798 +2025-02-19 02:32:08,189 Train Loss: 0.0018943, Val Loss: 0.0017290 +2025-02-19 02:32:08,189 Epoch 224/2000 +2025-02-19 02:32:51,406 Current Learning Rate: 0.0003511176 +2025-02-19 02:32:53,110 Train Loss: 0.0017643, Val Loss: 0.0017249 +2025-02-19 02:32:53,110 Epoch 225/2000 +2025-02-19 02:33:34,048 Current Learning Rate: 0.0003806023 +2025-02-19 02:33:35,446 Train Loss: 0.0016681, Val Loss: 0.0017219 +2025-02-19 02:33:35,446 Epoch 226/2000 +2025-02-19 02:34:16,767 Current Learning Rate: 0.0004112269 +2025-02-19 02:34:17,750 Train Loss: 0.0016686, Val Loss: 0.0017218 +2025-02-19 02:34:17,752 Epoch 227/2000 +2025-02-19 02:34:58,792 Current Learning Rate: 0.0004429836 +2025-02-19 02:34:58,793 Train Loss: 0.0019731, Val Loss: 0.0017253 +2025-02-19 02:34:58,793 Epoch 228/2000 +2025-02-19 02:35:41,856 Current Learning Rate: 0.0004758647 +2025-02-19 02:35:41,857 Train Loss: 0.0017136, Val Loss: 0.0017456 +2025-02-19 02:35:41,857 Epoch 229/2000 +2025-02-19 02:36:24,428 Current Learning Rate: 0.0005098621 +2025-02-19 02:36:25,999 Train Loss: 0.0016452, Val Loss: 0.0017139 +2025-02-19 02:36:25,999 Epoch 230/2000 +2025-02-19 02:37:08,493 Current Learning Rate: 0.0005449674 +2025-02-19 02:37:08,494 Train Loss: 0.0018787, Val Loss: 0.0017430 +2025-02-19 02:37:08,494 Epoch 231/2000 +2025-02-19 02:37:51,016 Current Learning Rate: 0.0005811718 +2025-02-19 02:37:53,063 Train Loss: 0.0017876, Val Loss: 0.0017118 +2025-02-19 02:37:53,063 Epoch 232/2000 +2025-02-19 02:38:34,213 Current Learning Rate: 0.0006184666 +2025-02-19 02:38:35,584 Train Loss: 0.0017348, Val Loss: 0.0016973 +2025-02-19 02:38:35,585 Epoch 233/2000 +2025-02-19 02:39:18,113 Current Learning Rate: 0.0006568424 +2025-02-19 02:39:19,768 Train Loss: 0.0017158, Val Loss: 0.0016921 +2025-02-19 02:39:19,768 Epoch 234/2000 +2025-02-19 02:40:02,179 Current Learning Rate: 0.0006962899 +2025-02-19 02:40:04,523 Train Loss: 0.0017112, Val Loss: 0.0016919 +2025-02-19 02:40:04,523 Epoch 235/2000 +2025-02-19 02:40:46,709 Current Learning Rate: 0.0007367992 +2025-02-19 02:40:48,624 Train Loss: 0.0016566, Val Loss: 0.0016845 +2025-02-19 02:40:48,625 Epoch 236/2000 +2025-02-19 02:41:30,386 Current Learning Rate: 0.0007783604 +2025-02-19 02:41:31,348 Train Loss: 0.0016330, Val Loss: 0.0016760 +2025-02-19 02:41:31,348 Epoch 237/2000 +2025-02-19 02:42:13,252 Current Learning Rate: 0.0008209632 +2025-02-19 02:42:13,255 Train Loss: 0.0019498, Val Loss: 0.0016924 +2025-02-19 02:42:13,256 Epoch 238/2000 +2025-02-19 02:42:55,322 Current Learning Rate: 0.0008645971 +2025-02-19 02:42:55,323 Train Loss: 0.0020079, Val Loss: 0.0016969 +2025-02-19 02:42:55,323 Epoch 239/2000 +2025-02-19 02:43:38,074 Current Learning Rate: 0.0009092514 +2025-02-19 02:43:38,074 Train Loss: 0.0015317, Val Loss: 0.0016777 +2025-02-19 02:43:38,075 Epoch 240/2000 +2025-02-19 02:44:20,499 Current Learning Rate: 0.0009549150 +2025-02-19 02:44:20,500 Train Loss: 0.0017634, Val Loss: 0.0017104 +2025-02-19 02:44:20,500 Epoch 241/2000 +2025-02-19 02:45:03,047 Current Learning Rate: 0.0010015767 +2025-02-19 02:45:03,048 Train Loss: 0.0018392, Val Loss: 0.0017129 +2025-02-19 02:45:03,048 Epoch 242/2000 +2025-02-19 02:45:45,712 Current Learning Rate: 0.0010492249 +2025-02-19 02:45:45,713 Train Loss: 0.0017197, Val Loss: 0.0016939 +2025-02-19 02:45:45,713 Epoch 243/2000 +2025-02-19 02:46:28,104 Current Learning Rate: 0.0010978480 +2025-02-19 02:46:29,303 Train Loss: 0.0016534, Val Loss: 0.0016682 +2025-02-19 02:46:29,308 Epoch 244/2000 +2025-02-19 02:47:11,812 Current Learning Rate: 0.0011474338 +2025-02-19 02:47:11,813 Train Loss: 0.0018344, Val Loss: 0.0017232 +2025-02-19 02:47:11,813 Epoch 245/2000 +2025-02-19 02:47:53,613 Current Learning Rate: 0.0011979702 +2025-02-19 02:47:53,614 Train Loss: 0.0017448, Val Loss: 0.0017632 +2025-02-19 02:47:53,614 Epoch 246/2000 +2025-02-19 02:48:35,679 Current Learning Rate: 0.0012494447 +2025-02-19 02:48:35,680 Train Loss: 0.0017844, Val Loss: 0.0017023 +2025-02-19 02:48:35,680 Epoch 247/2000 +2025-02-19 02:49:18,001 Current Learning Rate: 0.0013018445 +2025-02-19 02:49:18,001 Train Loss: 0.0016408, Val Loss: 0.0016908 +2025-02-19 02:49:18,002 Epoch 248/2000 +2025-02-19 02:50:00,672 Current Learning Rate: 0.0013551569 +2025-02-19 02:50:00,672 Train Loss: 0.0021660, Val Loss: 0.0018092 +2025-02-19 02:50:00,673 Epoch 249/2000 +2025-02-19 02:50:43,503 Current Learning Rate: 0.0014093685 +2025-02-19 02:50:43,503 Train Loss: 0.0016827, Val Loss: 0.0016840 +2025-02-19 02:50:43,504 Epoch 250/2000 +2025-02-19 02:51:25,658 Current Learning Rate: 0.0014644661 +2025-02-19 02:51:27,234 Train Loss: 0.0016643, Val Loss: 0.0016433 +2025-02-19 02:51:27,234 Epoch 251/2000 +2025-02-19 02:52:09,381 Current Learning Rate: 0.0015204360 +2025-02-19 02:52:09,382 Train Loss: 0.0018988, Val Loss: 0.0019761 +2025-02-19 02:52:09,382 Epoch 252/2000 +2025-02-19 02:52:51,070 Current Learning Rate: 0.0015772645 +2025-02-19 02:52:51,071 Train Loss: 0.0024260, Val Loss: 0.0017071 +2025-02-19 02:52:51,072 Epoch 253/2000 +2025-02-19 02:53:34,252 Current Learning Rate: 0.0016349374 +2025-02-19 02:53:36,309 Train Loss: 0.0015489, Val Loss: 0.0016318 +2025-02-19 02:53:36,310 Epoch 254/2000 +2025-02-19 02:54:18,166 Current Learning Rate: 0.0016934407 +2025-02-19 02:54:18,167 Train Loss: 0.0016413, Val Loss: 0.0017523 +2025-02-19 02:54:18,168 Epoch 255/2000 +2025-02-19 02:55:01,210 Current Learning Rate: 0.0017527598 +2025-02-19 02:55:01,210 Train Loss: 0.0016074, Val Loss: 0.0016386 +2025-02-19 02:55:01,210 Epoch 256/2000 +2025-02-19 02:55:43,072 Current Learning Rate: 0.0018128801 +2025-02-19 02:55:43,073 Train Loss: 0.0018730, Val Loss: 0.0018066 +2025-02-19 02:55:43,073 Epoch 257/2000 +2025-02-19 02:56:25,418 Current Learning Rate: 0.0018737867 +2025-02-19 02:56:25,418 Train Loss: 0.0018759, Val Loss: 0.0019966 +2025-02-19 02:56:25,419 Epoch 258/2000 +2025-02-19 02:57:08,982 Current Learning Rate: 0.0019354647 +2025-02-19 02:57:08,982 Train Loss: 0.0016847, Val Loss: 0.0016902 +2025-02-19 02:57:08,983 Epoch 259/2000 +2025-02-19 02:57:50,762 Current Learning Rate: 0.0019978989 +2025-02-19 02:57:50,762 Train Loss: 0.0014842, Val Loss: 0.0017114 +2025-02-19 02:57:50,762 Epoch 260/2000 +2025-02-19 02:58:33,377 Current Learning Rate: 0.0020610737 +2025-02-19 02:58:33,379 Train Loss: 0.0018388, Val Loss: 0.0017259 +2025-02-19 02:58:33,379 Epoch 261/2000 +2025-02-19 02:59:16,117 Current Learning Rate: 0.0021249737 +2025-02-19 02:59:16,117 Train Loss: 0.0020840, Val Loss: 0.0018863 +2025-02-19 02:59:16,118 Epoch 262/2000 +2025-02-19 02:59:57,909 Current Learning Rate: 0.0021895831 +2025-02-19 02:59:57,910 Train Loss: 0.0018472, Val Loss: 0.0017549 +2025-02-19 02:59:57,910 Epoch 263/2000 +2025-02-19 03:00:40,502 Current Learning Rate: 0.0022548859 +2025-02-19 03:00:41,967 Train Loss: 0.0016385, Val Loss: 0.0015944 +2025-02-19 03:00:41,968 Epoch 264/2000 +2025-02-19 03:01:24,432 Current Learning Rate: 0.0023208660 +2025-02-19 03:01:24,433 Train Loss: 0.0023224, Val Loss: 0.0018926 +2025-02-19 03:01:24,433 Epoch 265/2000 +2025-02-19 03:02:07,092 Current Learning Rate: 0.0023875072 +2025-02-19 03:02:07,093 Train Loss: 0.0018834, Val Loss: 0.0016311 +2025-02-19 03:02:07,093 Epoch 266/2000 +2025-02-19 03:02:48,801 Current Learning Rate: 0.0024547929 +2025-02-19 03:02:48,802 Train Loss: 0.0018385, Val Loss: 0.0017110 +2025-02-19 03:02:48,802 Epoch 267/2000 +2025-02-19 03:03:31,841 Current Learning Rate: 0.0025227067 +2025-02-19 03:03:31,842 Train Loss: 0.0019732, Val Loss: 0.0019323 +2025-02-19 03:03:31,842 Epoch 268/2000 +2025-02-19 03:04:13,801 Current Learning Rate: 0.0025912316 +2025-02-19 03:04:13,801 Train Loss: 0.0024165, Val Loss: 0.0020289 +2025-02-19 03:04:13,801 Epoch 269/2000 +2025-02-19 03:04:56,658 Current Learning Rate: 0.0026603509 +2025-02-19 03:04:56,658 Train Loss: 0.0016660, Val Loss: 0.0016108 +2025-02-19 03:04:56,659 Epoch 270/2000 +2025-02-19 03:05:38,590 Current Learning Rate: 0.0027300475 +2025-02-19 03:05:38,590 Train Loss: 0.0026150, Val Loss: 0.0031712 +2025-02-19 03:05:38,591 Epoch 271/2000 +2025-02-19 03:06:21,401 Current Learning Rate: 0.0028003042 +2025-02-19 03:06:21,401 Train Loss: 0.0020315, Val Loss: 0.0017251 +2025-02-19 03:06:21,402 Epoch 272/2000 +2025-02-19 03:07:03,551 Current Learning Rate: 0.0028711035 +2025-02-19 03:07:03,552 Train Loss: 0.0015845, Val Loss: 0.0016192 +2025-02-19 03:07:03,552 Epoch 273/2000 +2025-02-19 03:07:45,954 Current Learning Rate: 0.0029424282 +2025-02-19 03:07:47,865 Train Loss: 0.0016111, Val Loss: 0.0015590 +2025-02-19 03:07:47,865 Epoch 274/2000 +2025-02-19 03:08:29,343 Current Learning Rate: 0.0030142605 +2025-02-19 03:08:29,345 Train Loss: 0.0022658, Val Loss: 0.0019700 +2025-02-19 03:08:29,345 Epoch 275/2000 +2025-02-19 03:09:11,457 Current Learning Rate: 0.0030865828 +2025-02-19 03:09:11,458 Train Loss: 0.0017028, Val Loss: 0.0016376 +2025-02-19 03:09:11,458 Epoch 276/2000 +2025-02-19 03:09:53,832 Current Learning Rate: 0.0031593772 +2025-02-19 03:09:53,832 Train Loss: 0.0015553, Val Loss: 0.0021934 +2025-02-19 03:09:53,833 Epoch 277/2000 +2025-02-19 03:10:36,394 Current Learning Rate: 0.0032326258 +2025-02-19 03:10:36,398 Train Loss: 0.0024115, Val Loss: 0.0016707 +2025-02-19 03:10:36,398 Epoch 278/2000 +2025-02-19 03:11:18,880 Current Learning Rate: 0.0033063104 +2025-02-19 03:11:20,428 Train Loss: 0.0015668, Val Loss: 0.0015343 +2025-02-19 03:11:20,428 Epoch 279/2000 +2025-02-19 03:12:01,304 Current Learning Rate: 0.0033804129 +2025-02-19 03:12:01,305 Train Loss: 0.0020592, Val Loss: 0.0021991 +2025-02-19 03:12:01,305 Epoch 280/2000 +2025-02-19 03:12:43,824 Current Learning Rate: 0.0034549150 +2025-02-19 03:12:43,824 Train Loss: 0.0017902, Val Loss: 0.0015586 +2025-02-19 03:12:43,825 Epoch 281/2000 +2025-02-19 03:13:26,686 Current Learning Rate: 0.0035297984 +2025-02-19 03:13:26,686 Train Loss: 0.0015316, Val Loss: 0.0025620 +2025-02-19 03:13:26,687 Epoch 282/2000 +2025-02-19 03:14:08,373 Current Learning Rate: 0.0036050445 +2025-02-19 03:14:08,374 Train Loss: 0.0029647, Val Loss: 0.0017013 +2025-02-19 03:14:08,374 Epoch 283/2000 +2025-02-19 03:14:50,887 Current Learning Rate: 0.0036806348 +2025-02-19 03:14:50,888 Train Loss: 0.0015455, Val Loss: 0.0016042 +2025-02-19 03:14:50,888 Epoch 284/2000 +2025-02-19 03:15:33,459 Current Learning Rate: 0.0037565506 +2025-02-19 03:15:33,459 Train Loss: 0.0020497, Val Loss: 0.0016840 +2025-02-19 03:15:33,460 Epoch 285/2000 +2025-02-19 03:16:15,828 Current Learning Rate: 0.0038327732 +2025-02-19 03:16:17,176 Train Loss: 0.0015508, Val Loss: 0.0014593 +2025-02-19 03:16:17,177 Epoch 286/2000 +2025-02-19 03:16:58,676 Current Learning Rate: 0.0039092838 +2025-02-19 03:16:58,677 Train Loss: 0.0015400, Val Loss: 0.0014858 +2025-02-19 03:16:58,677 Epoch 287/2000 +2025-02-19 03:17:40,873 Current Learning Rate: 0.0039860635 +2025-02-19 03:17:40,873 Train Loss: 0.0020546, Val Loss: 0.0020457 +2025-02-19 03:17:40,873 Epoch 288/2000 +2025-02-19 03:18:23,720 Current Learning Rate: 0.0040630934 +2025-02-19 03:18:23,720 Train Loss: 0.0015843, Val Loss: 0.0015195 +2025-02-19 03:18:23,720 Epoch 289/2000 +2025-02-19 03:19:06,357 Current Learning Rate: 0.0041403545 +2025-02-19 03:19:06,357 Train Loss: 0.0028120, Val Loss: 0.0017338 +2025-02-19 03:19:06,358 Epoch 290/2000 +2025-02-19 03:19:49,034 Current Learning Rate: 0.0042178277 +2025-02-19 03:19:49,034 Train Loss: 0.0018246, Val Loss: 0.0015469 +2025-02-19 03:19:49,035 Epoch 291/2000 +2025-02-19 03:20:31,583 Current Learning Rate: 0.0042954938 +2025-02-19 03:20:33,612 Train Loss: 0.0016412, Val Loss: 0.0014233 +2025-02-19 03:20:33,612 Epoch 292/2000 +2025-02-19 03:21:14,526 Current Learning Rate: 0.0043733338 +2025-02-19 03:21:14,527 Train Loss: 0.0018163, Val Loss: 0.0063317 +2025-02-19 03:21:14,527 Epoch 293/2000 +2025-02-19 03:21:57,289 Current Learning Rate: 0.0044513284 +2025-02-19 03:21:57,289 Train Loss: 0.0029702, Val Loss: 0.0015472 +2025-02-19 03:21:57,290 Epoch 294/2000 +2025-02-19 03:22:39,891 Current Learning Rate: 0.0045294584 +2025-02-19 03:22:39,892 Train Loss: 0.0016110, Val Loss: 0.0015450 +2025-02-19 03:22:39,892 Epoch 295/2000 +2025-02-19 03:23:21,649 Current Learning Rate: 0.0046077045 +2025-02-19 03:23:21,650 Train Loss: 0.0012932, Val Loss: 0.0014242 +2025-02-19 03:23:21,650 Epoch 296/2000 +2025-02-19 03:24:04,303 Current Learning Rate: 0.0046860474 +2025-02-19 03:24:05,544 Train Loss: 0.0015528, Val Loss: 0.0014164 +2025-02-19 03:24:05,544 Epoch 297/2000 +2025-02-19 03:24:48,091 Current Learning Rate: 0.0047644677 +2025-02-19 03:24:48,091 Train Loss: 0.0017436, Val Loss: 0.0017601 +2025-02-19 03:24:48,092 Epoch 298/2000 +2025-02-19 03:25:30,522 Current Learning Rate: 0.0048429462 +2025-02-19 03:25:30,523 Train Loss: 0.0015527, Val Loss: 0.0017430 +2025-02-19 03:25:30,523 Epoch 299/2000 +2025-02-19 03:26:12,517 Current Learning Rate: 0.0049214634 +2025-02-19 03:26:12,517 Train Loss: 0.0025417, Val Loss: 0.0016193 +2025-02-19 03:26:12,517 Epoch 300/2000 +2025-02-19 03:26:55,840 Current Learning Rate: 0.0050000000 +2025-02-19 03:26:55,840 Train Loss: 0.0014638, Val Loss: 0.0015500 +2025-02-19 03:26:55,841 Epoch 301/2000 +2025-02-19 03:27:37,640 Current Learning Rate: 0.0050785366 +2025-02-19 03:27:39,166 Train Loss: 0.0013028, Val Loss: 0.0013837 +2025-02-19 03:27:39,166 Epoch 302/2000 +2025-02-19 03:28:21,059 Current Learning Rate: 0.0051570538 +2025-02-19 03:28:21,060 Train Loss: 0.0012835, Val Loss: 0.0014938 +2025-02-19 03:28:21,060 Epoch 303/2000 +2025-02-19 03:29:03,911 Current Learning Rate: 0.0052355323 +2025-02-19 03:29:03,911 Train Loss: 0.0044144, Val Loss: 0.0023768 +2025-02-19 03:29:03,912 Epoch 304/2000 +2025-02-19 03:29:46,666 Current Learning Rate: 0.0053139526 +2025-02-19 03:29:46,667 Train Loss: 0.0016570, Val Loss: 0.0014652 +2025-02-19 03:29:46,667 Epoch 305/2000 +2025-02-19 03:30:28,693 Current Learning Rate: 0.0053922955 +2025-02-19 03:30:30,513 Train Loss: 0.0013686, Val Loss: 0.0013476 +2025-02-19 03:30:30,513 Epoch 306/2000 +2025-02-19 03:31:11,690 Current Learning Rate: 0.0054705416 +2025-02-19 03:31:13,172 Train Loss: 0.0013546, Val Loss: 0.0013166 +2025-02-19 03:31:13,172 Epoch 307/2000 +2025-02-19 03:31:54,466 Current Learning Rate: 0.0055486716 +2025-02-19 03:31:54,467 Train Loss: 0.0014111, Val Loss: 0.0013312 +2025-02-19 03:31:54,467 Epoch 308/2000 +2025-02-19 03:32:37,424 Current Learning Rate: 0.0056266662 +2025-02-19 03:32:37,424 Train Loss: 0.0016744, Val Loss: 0.0015021 +2025-02-19 03:32:37,425 Epoch 309/2000 +2025-02-19 03:33:19,637 Current Learning Rate: 0.0057045062 +2025-02-19 03:33:21,474 Train Loss: 0.0012878, Val Loss: 0.0012889 +2025-02-19 03:33:21,475 Epoch 310/2000 +2025-02-19 03:34:02,316 Current Learning Rate: 0.0057821723 +2025-02-19 03:34:03,690 Train Loss: 0.0013382, Val Loss: 0.0012536 +2025-02-19 03:34:03,690 Epoch 311/2000 +2025-02-19 03:34:45,237 Current Learning Rate: 0.0058596455 +2025-02-19 03:34:45,264 Train Loss: 0.0022607, Val Loss: 0.0022936 +2025-02-19 03:34:45,264 Epoch 312/2000 +2025-02-19 03:35:27,672 Current Learning Rate: 0.0059369066 +2025-02-19 03:35:27,673 Train Loss: 0.0014946, Val Loss: 0.0012776 +2025-02-19 03:35:27,673 Epoch 313/2000 +2025-02-19 03:36:09,312 Current Learning Rate: 0.0060139365 +2025-02-19 03:36:09,313 Train Loss: 0.0013216, Val Loss: 0.0012649 +2025-02-19 03:36:09,313 Epoch 314/2000 +2025-02-19 03:36:52,931 Current Learning Rate: 0.0060907162 +2025-02-19 03:36:54,981 Train Loss: 0.0012000, Val Loss: 0.0011900 +2025-02-19 03:36:54,982 Epoch 315/2000 +2025-02-19 03:37:36,600 Current Learning Rate: 0.0061672268 +2025-02-19 03:37:38,075 Train Loss: 0.0011508, Val Loss: 0.0011799 +2025-02-19 03:37:38,077 Epoch 316/2000 +2025-02-19 03:38:19,055 Current Learning Rate: 0.0062434494 +2025-02-19 03:38:19,056 Train Loss: 0.0014119, Val Loss: 0.0012127 +2025-02-19 03:38:19,056 Epoch 317/2000 +2025-02-19 03:39:01,129 Current Learning Rate: 0.0063193652 +2025-02-19 03:39:01,130 Train Loss: 0.0013107, Val Loss: 0.0023704 +2025-02-19 03:39:01,134 Epoch 318/2000 +2025-02-19 03:39:44,652 Current Learning Rate: 0.0063949555 +2025-02-19 03:39:44,653 Train Loss: 0.0014809, Val Loss: 0.0012692 +2025-02-19 03:39:44,653 Epoch 319/2000 +2025-02-19 03:40:26,739 Current Learning Rate: 0.0064702016 +2025-02-19 03:40:26,739 Train Loss: 0.0013341, Val Loss: 0.0011892 +2025-02-19 03:40:26,740 Epoch 320/2000 +2025-02-19 03:41:09,244 Current Learning Rate: 0.0065450850 +2025-02-19 03:41:09,245 Train Loss: 0.0013506, Val Loss: 0.0012130 +2025-02-19 03:41:09,245 Epoch 321/2000 +2025-02-19 03:41:52,001 Current Learning Rate: 0.0066195871 +2025-02-19 03:41:52,002 Train Loss: 0.0015729, Val Loss: 0.0019253 +2025-02-19 03:41:52,002 Epoch 322/2000 +2025-02-19 03:42:34,104 Current Learning Rate: 0.0066936896 +2025-02-19 03:42:34,104 Train Loss: 0.0017395, Val Loss: 0.0013357 +2025-02-19 03:42:34,105 Epoch 323/2000 +2025-02-19 03:43:16,732 Current Learning Rate: 0.0067673742 +2025-02-19 03:43:18,710 Train Loss: 0.0010730, Val Loss: 0.0011544 +2025-02-19 03:43:18,711 Epoch 324/2000 +2025-02-19 03:43:59,506 Current Learning Rate: 0.0068406228 +2025-02-19 03:43:59,506 Train Loss: 0.0012745, Val Loss: 0.0011848 +2025-02-19 03:43:59,507 Epoch 325/2000 +2025-02-19 03:44:42,353 Current Learning Rate: 0.0069134172 +2025-02-19 03:44:42,353 Train Loss: 0.0016024, Val Loss: 0.0019904 +2025-02-19 03:44:42,353 Epoch 326/2000 +2025-02-19 03:45:25,420 Current Learning Rate: 0.0069857395 +2025-02-19 03:45:25,420 Train Loss: 0.0020954, Val Loss: 0.0012449 +2025-02-19 03:45:25,421 Epoch 327/2000 +2025-02-19 03:46:07,227 Current Learning Rate: 0.0070575718 +2025-02-19 03:46:09,085 Train Loss: 0.0011917, Val Loss: 0.0011023 +2025-02-19 03:46:09,085 Epoch 328/2000 +2025-02-19 03:46:50,314 Current Learning Rate: 0.0071288965 +2025-02-19 03:46:51,982 Train Loss: 0.0010282, Val Loss: 0.0010505 +2025-02-19 03:46:51,982 Epoch 329/2000 +2025-02-19 03:47:32,981 Current Learning Rate: 0.0071996958 +2025-02-19 03:47:32,982 Train Loss: 0.0012060, Val Loss: 0.0010773 +2025-02-19 03:47:32,982 Epoch 330/2000 +2025-02-19 03:48:15,355 Current Learning Rate: 0.0072699525 +2025-02-19 03:48:15,356 Train Loss: 0.0010776, Val Loss: 0.0010650 +2025-02-19 03:48:15,356 Epoch 331/2000 +2025-02-19 03:48:57,490 Current Learning Rate: 0.0073396491 +2025-02-19 03:48:57,490 Train Loss: 0.0011577, Val Loss: 0.0011705 +2025-02-19 03:48:57,490 Epoch 332/2000 +2025-02-19 03:49:40,411 Current Learning Rate: 0.0074087684 +2025-02-19 03:49:40,412 Train Loss: 0.0010904, Val Loss: 0.0011583 +2025-02-19 03:49:40,412 Epoch 333/2000 +2025-02-19 03:50:22,821 Current Learning Rate: 0.0074772933 +2025-02-19 03:50:22,821 Train Loss: 0.0013879, Val Loss: 0.0013653 +2025-02-19 03:50:22,822 Epoch 334/2000 +2025-02-19 03:51:05,547 Current Learning Rate: 0.0075452071 +2025-02-19 03:51:05,548 Train Loss: 0.0014281, Val Loss: 0.0011852 +2025-02-19 03:51:05,548 Epoch 335/2000 +2025-02-19 03:51:47,814 Current Learning Rate: 0.0076124928 +2025-02-19 03:51:47,814 Train Loss: 0.0013969, Val Loss: 0.0010668 +2025-02-19 03:51:47,814 Epoch 336/2000 +2025-02-19 03:52:30,266 Current Learning Rate: 0.0076791340 +2025-02-19 03:52:32,195 Train Loss: 0.0009118, Val Loss: 0.0009649 +2025-02-19 03:52:32,195 Epoch 337/2000 +2025-02-19 03:53:14,583 Current Learning Rate: 0.0077451141 +2025-02-19 03:53:14,583 Train Loss: 0.0012383, Val Loss: 0.0010181 +2025-02-19 03:53:14,584 Epoch 338/2000 +2025-02-19 03:53:57,092 Current Learning Rate: 0.0078104169 +2025-02-19 03:53:57,092 Train Loss: 0.0012383, Val Loss: 0.0010559 +2025-02-19 03:53:57,093 Epoch 339/2000 +2025-02-19 03:54:39,627 Current Learning Rate: 0.0078750263 +2025-02-19 03:54:39,627 Train Loss: 0.0009873, Val Loss: 0.0010110 +2025-02-19 03:54:39,627 Epoch 340/2000 +2025-02-19 03:55:21,965 Current Learning Rate: 0.0079389263 +2025-02-19 03:55:21,966 Train Loss: 0.0008993, Val Loss: 0.0011927 +2025-02-19 03:55:21,966 Epoch 341/2000 +2025-02-19 03:56:04,989 Current Learning Rate: 0.0080021011 +2025-02-19 03:56:04,990 Train Loss: 0.0011391, Val Loss: 0.0010520 +2025-02-19 03:56:04,990 Epoch 342/2000 +2025-02-19 03:56:47,863 Current Learning Rate: 0.0080645353 +2025-02-19 03:56:47,863 Train Loss: 0.0013693, Val Loss: 0.0011954 +2025-02-19 03:56:47,864 Epoch 343/2000 +2025-02-19 03:57:29,763 Current Learning Rate: 0.0081262133 +2025-02-19 03:57:29,764 Train Loss: 0.0013792, Val Loss: 0.0010096 +2025-02-19 03:57:29,764 Epoch 344/2000 +2025-02-19 03:58:12,309 Current Learning Rate: 0.0081871199 +2025-02-19 03:58:12,309 Train Loss: 0.0011318, Val Loss: 0.0009873 +2025-02-19 03:58:12,310 Epoch 345/2000 +2025-02-19 03:58:54,922 Current Learning Rate: 0.0082472402 +2025-02-19 03:58:54,923 Train Loss: 0.0010808, Val Loss: 0.0012554 +2025-02-19 03:58:54,924 Epoch 346/2000 +2025-02-19 03:59:37,170 Current Learning Rate: 0.0083065593 +2025-02-19 03:59:37,171 Train Loss: 0.0012310, Val Loss: 0.0011112 +2025-02-19 03:59:37,171 Epoch 347/2000 +2025-02-19 04:00:20,038 Current Learning Rate: 0.0083650626 +2025-02-19 04:00:20,038 Train Loss: 0.0012932, Val Loss: 0.0010920 +2025-02-19 04:00:20,038 Epoch 348/2000 +2025-02-19 04:01:01,878 Current Learning Rate: 0.0084227355 +2025-02-19 04:01:01,879 Train Loss: 0.0009327, Val Loss: 0.0009964 +2025-02-19 04:01:01,879 Epoch 349/2000 +2025-02-19 04:01:44,201 Current Learning Rate: 0.0084795640 +2025-02-19 04:01:44,202 Train Loss: 0.0011171, Val Loss: 0.0010743 +2025-02-19 04:01:44,202 Epoch 350/2000 +2025-02-19 04:02:27,618 Current Learning Rate: 0.0085355339 +2025-02-19 04:02:27,618 Train Loss: 0.0012911, Val Loss: 0.0011814 +2025-02-19 04:02:27,619 Epoch 351/2000 +2025-02-19 04:03:10,415 Current Learning Rate: 0.0085906315 +2025-02-19 04:03:10,415 Train Loss: 0.0013600, Val Loss: 0.0010618 +2025-02-19 04:03:10,416 Epoch 352/2000 +2025-02-19 04:03:52,784 Current Learning Rate: 0.0086448431 +2025-02-19 04:03:54,575 Train Loss: 0.0009300, Val Loss: 0.0009471 +2025-02-19 04:03:54,575 Epoch 353/2000 +2025-02-19 04:04:35,529 Current Learning Rate: 0.0086981555 +2025-02-19 04:04:36,917 Train Loss: 0.0011415, Val Loss: 0.0009114 +2025-02-19 04:04:36,917 Epoch 354/2000 +2025-02-19 04:05:18,487 Current Learning Rate: 0.0087505553 +2025-02-19 04:05:18,488 Train Loss: 0.0009566, Val Loss: 0.0009206 +2025-02-19 04:05:18,488 Epoch 355/2000 +2025-02-19 04:06:00,876 Current Learning Rate: 0.0088020298 +2025-02-19 04:06:02,611 Train Loss: 0.0009845, Val Loss: 0.0008380 +2025-02-19 04:06:02,611 Epoch 356/2000 +2025-02-19 04:06:43,665 Current Learning Rate: 0.0088525662 +2025-02-19 04:06:44,848 Train Loss: 0.0009216, Val Loss: 0.0008178 +2025-02-19 04:06:44,848 Epoch 357/2000 +2025-02-19 04:07:26,089 Current Learning Rate: 0.0089021520 +2025-02-19 04:07:26,090 Train Loss: 0.0009710, Val Loss: 0.0008633 +2025-02-19 04:07:26,090 Epoch 358/2000 +2025-02-19 04:08:08,733 Current Learning Rate: 0.0089507751 +2025-02-19 04:08:08,734 Train Loss: 0.0009832, Val Loss: 0.0008622 +2025-02-19 04:08:08,734 Epoch 359/2000 +2025-02-19 04:08:51,584 Current Learning Rate: 0.0089984233 +2025-02-19 04:08:51,585 Train Loss: 0.0011419, Val Loss: 0.0009875 +2025-02-19 04:08:51,585 Epoch 360/2000 +2025-02-19 04:09:34,311 Current Learning Rate: 0.0090450850 +2025-02-19 04:09:34,312 Train Loss: 0.0011875, Val Loss: 0.0008943 +2025-02-19 04:09:34,312 Epoch 361/2000 +2025-02-19 04:10:16,229 Current Learning Rate: 0.0090907486 +2025-02-19 04:10:16,230 Train Loss: 0.0010435, Val Loss: 0.0008241 +2025-02-19 04:10:16,231 Epoch 362/2000 +2025-02-19 04:10:58,708 Current Learning Rate: 0.0091354029 +2025-02-19 04:10:58,709 Train Loss: 0.0008685, Val Loss: 0.0008383 +2025-02-19 04:10:58,709 Epoch 363/2000 +2025-02-19 04:11:41,594 Current Learning Rate: 0.0091790368 +2025-02-19 04:11:43,256 Train Loss: 0.0008282, Val Loss: 0.0008067 +2025-02-19 04:11:43,271 Epoch 364/2000 +2025-02-19 04:12:25,895 Current Learning Rate: 0.0092216396 +2025-02-19 04:12:27,986 Train Loss: 0.0008652, Val Loss: 0.0007782 +2025-02-19 04:12:27,986 Epoch 365/2000 +2025-02-19 04:13:10,300 Current Learning Rate: 0.0092632008 +2025-02-19 04:13:10,301 Train Loss: 0.0010616, Val Loss: 0.0007939 +2025-02-19 04:13:10,302 Epoch 366/2000 +2025-02-19 04:13:51,864 Current Learning Rate: 0.0093037101 +2025-02-19 04:13:51,865 Train Loss: 0.0007988, Val Loss: 0.0008904 +2025-02-19 04:13:51,865 Epoch 367/2000 +2025-02-19 04:14:34,354 Current Learning Rate: 0.0093431576 +2025-02-19 04:14:34,354 Train Loss: 0.0007915, Val Loss: 0.0008739 +2025-02-19 04:14:34,354 Epoch 368/2000 +2025-02-19 04:15:17,398 Current Learning Rate: 0.0093815334 +2025-02-19 04:15:17,398 Train Loss: 0.0009250, Val Loss: 0.0007952 +2025-02-19 04:15:17,399 Epoch 369/2000 +2025-02-19 04:16:00,088 Current Learning Rate: 0.0094188282 +2025-02-19 04:16:02,421 Train Loss: 0.0007022, Val Loss: 0.0007626 +2025-02-19 04:16:02,421 Epoch 370/2000 +2025-02-19 04:16:44,838 Current Learning Rate: 0.0094550326 +2025-02-19 04:16:44,839 Train Loss: 0.0008208, Val Loss: 0.0008126 +2025-02-19 04:16:44,839 Epoch 371/2000 +2025-02-19 04:17:27,455 Current Learning Rate: 0.0094901379 +2025-02-19 04:17:27,455 Train Loss: 0.0010265, Val Loss: 0.0009555 +2025-02-19 04:17:27,456 Epoch 372/2000 +2025-02-19 04:18:09,488 Current Learning Rate: 0.0095241353 +2025-02-19 04:18:09,489 Train Loss: 0.0012776, Val Loss: 0.0008928 +2025-02-19 04:18:09,491 Epoch 373/2000 +2025-02-19 04:18:51,417 Current Learning Rate: 0.0095570164 +2025-02-19 04:18:51,418 Train Loss: 0.0010268, Val Loss: 0.0008200 +2025-02-19 04:18:51,418 Epoch 374/2000 +2025-02-19 04:19:35,251 Current Learning Rate: 0.0095887731 +2025-02-19 04:19:35,251 Train Loss: 0.0010547, Val Loss: 0.0008072 +2025-02-19 04:19:35,252 Epoch 375/2000 +2025-02-19 04:20:17,905 Current Learning Rate: 0.0096193977 +2025-02-19 04:20:17,906 Train Loss: 0.0008614, Val Loss: 0.0007851 +2025-02-19 04:20:17,906 Epoch 376/2000 +2025-02-19 04:21:00,374 Current Learning Rate: 0.0096488824 +2025-02-19 04:21:00,375 Train Loss: 0.0008251, Val Loss: 0.0007705 +2025-02-19 04:21:00,376 Epoch 377/2000 +2025-02-19 04:21:42,259 Current Learning Rate: 0.0096772202 +2025-02-19 04:21:44,314 Train Loss: 0.0006624, Val Loss: 0.0007596 +2025-02-19 04:21:44,314 Epoch 378/2000 +2025-02-19 04:22:25,076 Current Learning Rate: 0.0097044038 +2025-02-19 04:22:25,078 Train Loss: 0.0008598, Val Loss: 0.0007849 +2025-02-19 04:22:25,078 Epoch 379/2000 +2025-02-19 04:23:07,482 Current Learning Rate: 0.0097304268 +2025-02-19 04:23:09,178 Train Loss: 0.0011610, Val Loss: 0.0007443 +2025-02-19 04:23:09,178 Epoch 380/2000 +2025-02-19 04:23:50,705 Current Learning Rate: 0.0097552826 +2025-02-19 04:23:52,138 Train Loss: 0.0007582, Val Loss: 0.0006625 +2025-02-19 04:23:52,138 Epoch 381/2000 +2025-02-19 04:24:33,062 Current Learning Rate: 0.0097789651 +2025-02-19 04:24:33,063 Train Loss: 0.0009193, Val Loss: 0.0007103 +2025-02-19 04:24:33,063 Epoch 382/2000 +2025-02-19 04:25:16,011 Current Learning Rate: 0.0098014684 +2025-02-19 04:25:16,012 Train Loss: 0.0007107, Val Loss: 0.0006871 +2025-02-19 04:25:16,012 Epoch 383/2000 +2025-02-19 04:25:57,619 Current Learning Rate: 0.0098227871 +2025-02-19 04:25:57,620 Train Loss: 0.0008805, Val Loss: 0.0007235 +2025-02-19 04:25:57,620 Epoch 384/2000 +2025-02-19 04:26:40,181 Current Learning Rate: 0.0098429158 +2025-02-19 04:26:41,580 Train Loss: 0.0008129, Val Loss: 0.0006509 +2025-02-19 04:26:41,581 Epoch 385/2000 +2025-02-19 04:27:23,719 Current Learning Rate: 0.0098618496 +2025-02-19 04:27:23,720 Train Loss: 0.0008680, Val Loss: 0.0007852 +2025-02-19 04:27:23,722 Epoch 386/2000 +2025-02-19 04:28:05,381 Current Learning Rate: 0.0098795838 +2025-02-19 04:28:05,381 Train Loss: 0.0010028, Val Loss: 0.0007055 +2025-02-19 04:28:05,382 Epoch 387/2000 +2025-02-19 04:28:48,203 Current Learning Rate: 0.0098961141 +2025-02-19 04:28:48,204 Train Loss: 0.0008143, Val Loss: 0.0006920 +2025-02-19 04:28:48,204 Epoch 388/2000 +2025-02-19 04:29:30,021 Current Learning Rate: 0.0099114363 +2025-02-19 04:29:30,022 Train Loss: 0.0006522, Val Loss: 0.0006845 +2025-02-19 04:29:30,022 Epoch 389/2000 +2025-02-19 04:30:12,454 Current Learning Rate: 0.0099255466 +2025-02-19 04:30:12,455 Train Loss: 0.0007134, Val Loss: 0.0006953 +2025-02-19 04:30:12,455 Epoch 390/2000 +2025-02-19 04:30:54,628 Current Learning Rate: 0.0099384417 +2025-02-19 04:30:55,932 Train Loss: 0.0007659, Val Loss: 0.0006227 +2025-02-19 04:30:55,933 Epoch 391/2000 +2025-02-19 04:31:38,005 Current Learning Rate: 0.0099501183 +2025-02-19 04:31:39,889 Train Loss: 0.0005588, Val Loss: 0.0006047 +2025-02-19 04:31:39,889 Epoch 392/2000 +2025-02-19 04:32:21,791 Current Learning Rate: 0.0099605735 +2025-02-19 04:32:21,792 Train Loss: 0.0009382, Val Loss: 0.0006760 +2025-02-19 04:32:21,793 Epoch 393/2000 +2025-02-19 04:33:03,680 Current Learning Rate: 0.0099698048 +2025-02-19 04:33:03,680 Train Loss: 0.0006468, Val Loss: 0.0007493 +2025-02-19 04:33:03,681 Epoch 394/2000 +2025-02-19 04:33:45,485 Current Learning Rate: 0.0099778098 +2025-02-19 04:33:45,485 Train Loss: 0.0006580, Val Loss: 0.0007147 +2025-02-19 04:33:45,485 Epoch 395/2000 +2025-02-19 04:34:28,458 Current Learning Rate: 0.0099845867 +2025-02-19 04:34:28,458 Train Loss: 0.0006028, Val Loss: 0.0006751 +2025-02-19 04:34:28,459 Epoch 396/2000 +2025-02-19 04:35:10,800 Current Learning Rate: 0.0099901336 +2025-02-19 04:35:10,800 Train Loss: 0.0006520, Val Loss: 0.0006788 +2025-02-19 04:35:10,800 Epoch 397/2000 +2025-02-19 04:35:53,246 Current Learning Rate: 0.0099944494 +2025-02-19 04:35:53,247 Train Loss: 0.0007369, Val Loss: 0.0007059 +2025-02-19 04:35:53,247 Epoch 398/2000 +2025-02-19 04:36:35,822 Current Learning Rate: 0.0099975328 +2025-02-19 04:36:35,823 Train Loss: 0.0005915, Val Loss: 0.0006115 +2025-02-19 04:36:35,823 Epoch 399/2000 +2025-02-19 04:37:17,513 Current Learning Rate: 0.0099993832 +2025-02-19 04:37:17,513 Train Loss: 0.0009239, Val Loss: 0.0007050 +2025-02-19 04:37:17,514 Epoch 400/2000 +2025-02-19 04:38:00,219 Current Learning Rate: 0.0100000000 +2025-02-19 04:38:00,219 Train Loss: 0.0006802, Val Loss: 0.0006834 +2025-02-19 04:38:00,219 Epoch 401/2000 +2025-02-19 04:38:42,561 Current Learning Rate: 0.0099993832 +2025-02-19 04:38:42,562 Train Loss: 0.0007217, Val Loss: 0.0006685 +2025-02-19 04:38:42,562 Epoch 402/2000 +2025-02-19 04:39:24,860 Current Learning Rate: 0.0099975328 +2025-02-19 04:39:24,861 Train Loss: 0.0006212, Val Loss: 0.0006712 +2025-02-19 04:39:24,861 Epoch 403/2000 +2025-02-19 04:40:06,889 Current Learning Rate: 0.0099944494 +2025-02-19 04:40:06,889 Train Loss: 0.0005316, Val Loss: 0.0006669 +2025-02-19 04:40:06,889 Epoch 404/2000 +2025-02-19 04:40:49,225 Current Learning Rate: 0.0099901336 +2025-02-19 04:40:49,226 Train Loss: 0.0005379, Val Loss: 0.0006210 +2025-02-19 04:40:49,226 Epoch 405/2000 +2025-02-19 04:41:31,455 Current Learning Rate: 0.0099845867 +2025-02-19 04:41:31,456 Train Loss: 0.0007584, Val Loss: 0.0007061 +2025-02-19 04:41:31,456 Epoch 406/2000 +2025-02-19 04:42:13,307 Current Learning Rate: 0.0099778098 +2025-02-19 04:42:13,307 Train Loss: 0.0006947, Val Loss: 0.0006958 +2025-02-19 04:42:13,307 Epoch 407/2000 +2025-02-19 04:42:55,140 Current Learning Rate: 0.0099698048 +2025-02-19 04:42:56,423 Train Loss: 0.0006130, Val Loss: 0.0006041 +2025-02-19 04:42:56,423 Epoch 408/2000 +2025-02-19 04:43:38,490 Current Learning Rate: 0.0099605735 +2025-02-19 04:43:38,490 Train Loss: 0.0007180, Val Loss: 0.0006334 +2025-02-19 04:43:38,491 Epoch 409/2000 +2025-02-19 04:44:20,955 Current Learning Rate: 0.0099501183 +2025-02-19 04:44:20,955 Train Loss: 0.0009343, Val Loss: 0.0006998 +2025-02-19 04:44:20,956 Epoch 410/2000 +2025-02-19 04:45:03,441 Current Learning Rate: 0.0099384417 +2025-02-19 04:45:03,442 Train Loss: 0.0006732, Val Loss: 0.0006672 +2025-02-19 04:45:03,442 Epoch 411/2000 +2025-02-19 04:45:45,470 Current Learning Rate: 0.0099255466 +2025-02-19 04:45:45,471 Train Loss: 0.0006974, Val Loss: 0.0006440 +2025-02-19 04:45:45,471 Epoch 412/2000 +2025-02-19 04:46:27,995 Current Learning Rate: 0.0099114363 +2025-02-19 04:46:27,995 Train Loss: 0.0008533, Val Loss: 0.0006498 +2025-02-19 04:46:27,996 Epoch 413/2000 +2025-02-19 04:47:10,433 Current Learning Rate: 0.0098961141 +2025-02-19 04:47:12,358 Train Loss: 0.0005820, Val Loss: 0.0005948 +2025-02-19 04:47:12,358 Epoch 414/2000 +2025-02-19 04:47:53,447 Current Learning Rate: 0.0098795838 +2025-02-19 04:47:53,447 Train Loss: 0.0005831, Val Loss: 0.0005997 +2025-02-19 04:47:53,447 Epoch 415/2000 +2025-02-19 04:48:35,575 Current Learning Rate: 0.0098618496 +2025-02-19 04:48:35,575 Train Loss: 0.0005954, Val Loss: 0.0006401 +2025-02-19 04:48:35,576 Epoch 416/2000 +2025-02-19 04:49:18,056 Current Learning Rate: 0.0098429158 +2025-02-19 04:49:19,420 Train Loss: 0.0007465, Val Loss: 0.0005905 +2025-02-19 04:49:19,421 Epoch 417/2000 +2025-02-19 04:50:00,798 Current Learning Rate: 0.0098227871 +2025-02-19 04:50:00,799 Train Loss: 0.0006766, Val Loss: 0.0006221 +2025-02-19 04:50:00,799 Epoch 418/2000 +2025-02-19 04:50:42,908 Current Learning Rate: 0.0098014684 +2025-02-19 04:50:42,909 Train Loss: 0.0005913, Val Loss: 0.0006298 +2025-02-19 04:50:42,910 Epoch 419/2000 +2025-02-19 04:51:24,617 Current Learning Rate: 0.0097789651 +2025-02-19 04:51:24,618 Train Loss: 0.0007962, Val Loss: 0.0006754 +2025-02-19 04:51:24,618 Epoch 420/2000 +2025-02-19 04:52:07,251 Current Learning Rate: 0.0097552826 +2025-02-19 04:52:07,252 Train Loss: 0.0007719, Val Loss: 0.0007072 +2025-02-19 04:52:07,252 Epoch 421/2000 +2025-02-19 04:52:50,164 Current Learning Rate: 0.0097304268 +2025-02-19 04:52:50,165 Train Loss: 0.0007165, Val Loss: 0.0008461 +2025-02-19 04:52:50,165 Epoch 422/2000 +2025-02-19 04:53:32,754 Current Learning Rate: 0.0097044038 +2025-02-19 04:53:32,754 Train Loss: 0.0009111, Val Loss: 0.0007067 +2025-02-19 04:53:32,754 Epoch 423/2000 +2025-02-19 04:54:14,486 Current Learning Rate: 0.0096772202 +2025-02-19 04:54:14,486 Train Loss: 0.0005967, Val Loss: 0.0006159 +2025-02-19 04:54:14,486 Epoch 424/2000 +2025-02-19 04:54:57,500 Current Learning Rate: 0.0096488824 +2025-02-19 04:54:59,172 Train Loss: 0.0005798, Val Loss: 0.0005475 +2025-02-19 04:54:59,173 Epoch 425/2000 +2025-02-19 04:55:40,465 Current Learning Rate: 0.0096193977 +2025-02-19 04:55:42,194 Train Loss: 0.0006066, Val Loss: 0.0005124 +2025-02-19 04:55:42,194 Epoch 426/2000 +2025-02-19 04:56:23,812 Current Learning Rate: 0.0095887731 +2025-02-19 04:56:23,813 Train Loss: 0.0009191, Val Loss: 0.0005508 +2025-02-19 04:56:23,813 Epoch 427/2000 +2025-02-19 04:57:06,896 Current Learning Rate: 0.0095570164 +2025-02-19 04:57:06,896 Train Loss: 0.0005025, Val Loss: 0.0005318 +2025-02-19 04:57:06,897 Epoch 428/2000 +2025-02-19 04:57:49,196 Current Learning Rate: 0.0095241353 +2025-02-19 04:57:49,196 Train Loss: 0.0007160, Val Loss: 0.0005306 +2025-02-19 04:57:49,197 Epoch 429/2000 +2025-02-19 04:58:30,796 Current Learning Rate: 0.0094901379 +2025-02-19 04:58:30,796 Train Loss: 0.0005730, Val Loss: 0.0005305 +2025-02-19 04:58:30,797 Epoch 430/2000 +2025-02-19 04:59:12,769 Current Learning Rate: 0.0094550326 +2025-02-19 04:59:12,770 Train Loss: 0.0005401, Val Loss: 0.0005623 +2025-02-19 04:59:12,770 Epoch 431/2000 +2025-02-19 04:59:55,209 Current Learning Rate: 0.0094188282 +2025-02-19 04:59:55,210 Train Loss: 0.0006020, Val Loss: 0.0006503 +2025-02-19 04:59:55,210 Epoch 432/2000 +2025-02-19 05:00:37,315 Current Learning Rate: 0.0093815334 +2025-02-19 05:00:37,316 Train Loss: 0.0007239, Val Loss: 0.0006138 +2025-02-19 05:00:37,316 Epoch 433/2000 +2025-02-19 05:01:19,905 Current Learning Rate: 0.0093431576 +2025-02-19 05:01:21,522 Train Loss: 0.0005680, Val Loss: 0.0004980 +2025-02-19 05:01:21,522 Epoch 434/2000 +2025-02-19 05:02:02,519 Current Learning Rate: 0.0093037101 +2025-02-19 05:02:04,119 Train Loss: 0.0004935, Val Loss: 0.0004756 +2025-02-19 05:02:04,120 Epoch 435/2000 +2025-02-19 05:02:45,634 Current Learning Rate: 0.0092632008 +2025-02-19 05:02:45,635 Train Loss: 0.0005562, Val Loss: 0.0004907 +2025-02-19 05:02:45,635 Epoch 436/2000 +2025-02-19 05:03:27,333 Current Learning Rate: 0.0092216396 +2025-02-19 05:03:27,333 Train Loss: 0.0003976, Val Loss: 0.0004793 +2025-02-19 05:03:27,333 Epoch 437/2000 +2025-02-19 05:04:09,511 Current Learning Rate: 0.0091790368 +2025-02-19 05:04:09,511 Train Loss: 0.0003841, Val Loss: 0.0005024 +2025-02-19 05:04:09,512 Epoch 438/2000 +2025-02-19 05:04:52,164 Current Learning Rate: 0.0091354029 +2025-02-19 05:04:52,164 Train Loss: 0.0006027, Val Loss: 0.0005281 +2025-02-19 05:04:52,165 Epoch 439/2000 +2025-02-19 05:05:34,568 Current Learning Rate: 0.0090907486 +2025-02-19 05:05:34,569 Train Loss: 0.0006028, Val Loss: 0.0005513 +2025-02-19 05:05:34,569 Epoch 440/2000 +2025-02-19 05:06:16,815 Current Learning Rate: 0.0090450850 +2025-02-19 05:06:16,816 Train Loss: 0.0008756, Val Loss: 0.0006729 +2025-02-19 05:06:16,816 Epoch 441/2000 +2025-02-19 05:06:59,148 Current Learning Rate: 0.0089984233 +2025-02-19 05:06:59,149 Train Loss: 0.0008495, Val Loss: 0.0007859 +2025-02-19 05:06:59,149 Epoch 442/2000 +2025-02-19 05:07:41,008 Current Learning Rate: 0.0089507751 +2025-02-19 05:07:41,009 Train Loss: 0.0006611, Val Loss: 0.0011604 +2025-02-19 05:07:41,009 Epoch 443/2000 +2025-02-19 05:08:23,461 Current Learning Rate: 0.0089021520 +2025-02-19 05:08:23,462 Train Loss: 0.0014813, Val Loss: 0.0011819 +2025-02-19 05:08:23,462 Epoch 444/2000 +2025-02-19 05:09:05,278 Current Learning Rate: 0.0088525662 +2025-02-19 05:09:05,278 Train Loss: 0.0008577, Val Loss: 0.0006556 +2025-02-19 05:09:05,278 Epoch 445/2000 +2025-02-19 05:09:47,635 Current Learning Rate: 0.0088020298 +2025-02-19 05:09:47,636 Train Loss: 0.0007838, Val Loss: 0.0011348 +2025-02-19 05:09:47,636 Epoch 446/2000 +2025-02-19 05:10:29,446 Current Learning Rate: 0.0087505553 +2025-02-19 05:10:29,447 Train Loss: 0.0006052, Val Loss: 0.0007102 +2025-02-19 05:10:29,447 Epoch 447/2000 +2025-02-19 05:11:11,859 Current Learning Rate: 0.0086981555 +2025-02-19 05:11:11,859 Train Loss: 0.0007764, Val Loss: 0.0005680 +2025-02-19 05:11:11,860 Epoch 448/2000 +2025-02-19 05:11:53,488 Current Learning Rate: 0.0086448431 +2025-02-19 05:11:53,488 Train Loss: 0.0005562, Val Loss: 0.0005067 +2025-02-19 05:11:53,488 Epoch 449/2000 +2025-02-19 05:12:35,594 Current Learning Rate: 0.0085906315 +2025-02-19 05:12:35,595 Train Loss: 0.0005712, Val Loss: 0.0005141 +2025-02-19 05:12:35,595 Epoch 450/2000 +2025-02-19 05:13:17,776 Current Learning Rate: 0.0085355339 +2025-02-19 05:13:17,777 Train Loss: 0.0006742, Val Loss: 0.0005745 +2025-02-19 05:13:17,779 Epoch 451/2000 +2025-02-19 05:14:00,141 Current Learning Rate: 0.0084795640 +2025-02-19 05:14:00,142 Train Loss: 0.0006229, Val Loss: 0.0005439 +2025-02-19 05:14:00,142 Epoch 452/2000 +2025-02-19 05:14:43,061 Current Learning Rate: 0.0084227355 +2025-02-19 05:14:44,930 Train Loss: 0.0005119, Val Loss: 0.0004677 +2025-02-19 05:14:44,930 Epoch 453/2000 +2025-02-19 05:15:26,644 Current Learning Rate: 0.0083650626 +2025-02-19 05:15:26,645 Train Loss: 0.0007364, Val Loss: 0.0007350 +2025-02-19 05:15:26,645 Epoch 454/2000 +2025-02-19 05:16:08,705 Current Learning Rate: 0.0083065593 +2025-02-19 05:16:08,705 Train Loss: 0.0007379, Val Loss: 0.0005459 +2025-02-19 05:16:08,706 Epoch 455/2000 +2025-02-19 05:16:51,546 Current Learning Rate: 0.0082472402 +2025-02-19 05:16:51,547 Train Loss: 0.0006996, Val Loss: 0.0005822 +2025-02-19 05:16:51,547 Epoch 456/2000 +2025-02-19 05:17:34,090 Current Learning Rate: 0.0081871199 +2025-02-19 05:17:34,090 Train Loss: 0.0004520, Val Loss: 0.0004866 +2025-02-19 05:17:34,090 Epoch 457/2000 +2025-02-19 05:18:15,498 Current Learning Rate: 0.0081262133 +2025-02-19 05:18:15,498 Train Loss: 0.0006052, Val Loss: 0.0005053 +2025-02-19 05:18:15,498 Epoch 458/2000 +2025-02-19 05:18:58,170 Current Learning Rate: 0.0080645353 +2025-02-19 05:18:58,171 Train Loss: 0.0006190, Val Loss: 0.0004781 +2025-02-19 05:18:58,171 Epoch 459/2000 +2025-02-19 05:19:40,010 Current Learning Rate: 0.0080021011 +2025-02-19 05:19:40,011 Train Loss: 0.0006041, Val Loss: 0.0004863 +2025-02-19 05:19:40,011 Epoch 460/2000 +2025-02-19 05:20:22,747 Current Learning Rate: 0.0079389263 +2025-02-19 05:20:23,612 Train Loss: 0.0003799, Val Loss: 0.0004317 +2025-02-19 05:20:23,612 Epoch 461/2000 +2025-02-19 05:21:04,471 Current Learning Rate: 0.0078750263 +2025-02-19 05:21:04,472 Train Loss: 0.0005422, Val Loss: 0.0006141 +2025-02-19 05:21:04,472 Epoch 462/2000 +2025-02-19 05:21:46,753 Current Learning Rate: 0.0078104169 +2025-02-19 05:21:46,754 Train Loss: 0.0007853, Val Loss: 0.0004837 +2025-02-19 05:21:46,754 Epoch 463/2000 +2025-02-19 05:22:29,624 Current Learning Rate: 0.0077451141 +2025-02-19 05:22:31,132 Train Loss: 0.0003548, Val Loss: 0.0004008 +2025-02-19 05:22:31,132 Epoch 464/2000 +2025-02-19 05:23:12,721 Current Learning Rate: 0.0076791340 +2025-02-19 05:23:12,722 Train Loss: 0.0004980, Val Loss: 0.0004498 +2025-02-19 05:23:12,722 Epoch 465/2000 +2025-02-19 05:23:55,091 Current Learning Rate: 0.0076124928 +2025-02-19 05:23:55,092 Train Loss: 0.0005148, Val Loss: 0.0004187 +2025-02-19 05:23:55,092 Epoch 466/2000 +2025-02-19 05:24:37,146 Current Learning Rate: 0.0075452071 +2025-02-19 05:24:37,146 Train Loss: 0.0004491, Val Loss: 0.0004449 +2025-02-19 05:24:37,147 Epoch 467/2000 +2025-02-19 05:25:18,997 Current Learning Rate: 0.0074772933 +2025-02-19 05:25:20,254 Train Loss: 0.0003183, Val Loss: 0.0003733 +2025-02-19 05:25:20,254 Epoch 468/2000 +2025-02-19 05:26:01,763 Current Learning Rate: 0.0074087684 +2025-02-19 05:26:01,764 Train Loss: 0.0004790, Val Loss: 0.0004833 +2025-02-19 05:26:01,764 Epoch 469/2000 +2025-02-19 05:26:44,224 Current Learning Rate: 0.0073396491 +2025-02-19 05:26:44,225 Train Loss: 0.0004525, Val Loss: 0.0003997 +2025-02-19 05:26:44,225 Epoch 470/2000 +2025-02-19 05:27:26,415 Current Learning Rate: 0.0072699525 +2025-02-19 05:27:26,416 Train Loss: 0.0005499, Val Loss: 0.0005679 +2025-02-19 05:27:26,417 Epoch 471/2000 +2025-02-19 05:28:08,356 Current Learning Rate: 0.0071996958 +2025-02-19 05:28:08,356 Train Loss: 0.0007884, Val Loss: 0.0004443 +2025-02-19 05:28:08,357 Epoch 472/2000 +2025-02-19 05:28:50,440 Current Learning Rate: 0.0071288965 +2025-02-19 05:28:50,440 Train Loss: 0.0005725, Val Loss: 0.0005078 +2025-02-19 05:28:50,440 Epoch 473/2000 +2025-02-19 05:29:32,509 Current Learning Rate: 0.0070575718 +2025-02-19 05:29:32,509 Train Loss: 0.0004556, Val Loss: 0.0004127 +2025-02-19 05:29:32,509 Epoch 474/2000 +2025-02-19 05:30:14,946 Current Learning Rate: 0.0069857395 +2025-02-19 05:30:16,391 Train Loss: 0.0004045, Val Loss: 0.0003711 +2025-02-19 05:30:16,391 Epoch 475/2000 +2025-02-19 05:30:58,562 Current Learning Rate: 0.0069134172 +2025-02-19 05:30:58,563 Train Loss: 0.0004232, Val Loss: 0.0003744 +2025-02-19 05:30:58,563 Epoch 476/2000 +2025-02-19 05:31:40,836 Current Learning Rate: 0.0068406228 +2025-02-19 05:31:40,836 Train Loss: 0.0003766, Val Loss: 0.0003917 +2025-02-19 05:31:40,836 Epoch 477/2000 +2025-02-19 05:32:22,221 Current Learning Rate: 0.0067673742 +2025-02-19 05:32:22,221 Train Loss: 0.0003884, Val Loss: 0.0003944 +2025-02-19 05:32:22,221 Epoch 478/2000 +2025-02-19 05:33:04,966 Current Learning Rate: 0.0066936896 +2025-02-19 05:33:04,967 Train Loss: 0.0006075, Val Loss: 0.0004759 +2025-02-19 05:33:04,967 Epoch 479/2000 +2025-02-19 05:33:46,987 Current Learning Rate: 0.0066195871 +2025-02-19 05:33:46,988 Train Loss: 0.0004698, Val Loss: 0.0004121 +2025-02-19 05:33:46,988 Epoch 480/2000 +2025-02-19 05:34:28,905 Current Learning Rate: 0.0065450850 +2025-02-19 05:34:28,906 Train Loss: 0.0004237, Val Loss: 0.0003810 +2025-02-19 05:34:28,906 Epoch 481/2000 +2025-02-19 05:35:11,126 Current Learning Rate: 0.0064702016 +2025-02-19 05:35:11,127 Train Loss: 0.0004413, Val Loss: 0.0003857 +2025-02-19 05:35:11,127 Epoch 482/2000 +2025-02-19 05:35:53,000 Current Learning Rate: 0.0063949555 +2025-02-19 05:35:53,000 Train Loss: 0.0003080, Val Loss: 0.0003937 +2025-02-19 05:35:53,001 Epoch 483/2000 +2025-02-19 05:36:35,037 Current Learning Rate: 0.0063193652 +2025-02-19 05:36:35,037 Train Loss: 0.0005708, Val Loss: 0.0005771 +2025-02-19 05:36:35,037 Epoch 484/2000 +2025-02-19 05:37:17,639 Current Learning Rate: 0.0062434494 +2025-02-19 05:37:17,639 Train Loss: 0.0004042, Val Loss: 0.0004315 +2025-02-19 05:37:17,640 Epoch 485/2000 +2025-02-19 05:38:00,158 Current Learning Rate: 0.0061672268 +2025-02-19 05:38:00,158 Train Loss: 0.0004098, Val Loss: 0.0004201 +2025-02-19 05:38:00,158 Epoch 486/2000 +2025-02-19 05:38:41,705 Current Learning Rate: 0.0060907162 +2025-02-19 05:38:41,705 Train Loss: 0.0005263, Val Loss: 0.0004223 +2025-02-19 05:38:41,706 Epoch 487/2000 +2025-02-19 05:39:23,777 Current Learning Rate: 0.0060139365 +2025-02-19 05:39:23,778 Train Loss: 0.0003679, Val Loss: 0.0003711 +2025-02-19 05:39:23,778 Epoch 488/2000 +2025-02-19 05:40:05,715 Current Learning Rate: 0.0059369066 +2025-02-19 05:40:05,715 Train Loss: 0.0005354, Val Loss: 0.0004612 +2025-02-19 05:40:05,715 Epoch 489/2000 +2025-02-19 05:40:47,902 Current Learning Rate: 0.0058596455 +2025-02-19 05:40:47,902 Train Loss: 0.0004731, Val Loss: 0.0003831 +2025-02-19 05:40:47,903 Epoch 490/2000 +2025-02-19 05:41:30,989 Current Learning Rate: 0.0057821723 +2025-02-19 05:41:30,990 Train Loss: 0.0004212, Val Loss: 0.0004122 +2025-02-19 05:41:30,990 Epoch 491/2000 +2025-02-19 05:42:12,664 Current Learning Rate: 0.0057045062 +2025-02-19 05:42:14,170 Train Loss: 0.0004982, Val Loss: 0.0003638 +2025-02-19 05:42:14,171 Epoch 492/2000 +2025-02-19 05:42:55,278 Current Learning Rate: 0.0056266662 +2025-02-19 05:42:56,966 Train Loss: 0.0005396, Val Loss: 0.0003375 +2025-02-19 05:42:56,966 Epoch 493/2000 +2025-02-19 05:43:37,736 Current Learning Rate: 0.0055486716 +2025-02-19 05:43:37,737 Train Loss: 0.0004773, Val Loss: 0.0004535 +2025-02-19 05:43:37,737 Epoch 494/2000 +2025-02-19 05:44:20,641 Current Learning Rate: 0.0054705416 +2025-02-19 05:44:20,641 Train Loss: 0.0005965, Val Loss: 0.0004187 +2025-02-19 05:44:20,641 Epoch 495/2000 +2025-02-19 05:45:02,181 Current Learning Rate: 0.0053922955 +2025-02-19 05:45:02,181 Train Loss: 0.0005241, Val Loss: 0.0003464 +2025-02-19 05:45:02,182 Epoch 496/2000 +2025-02-19 05:45:44,287 Current Learning Rate: 0.0053139526 +2025-02-19 05:45:45,501 Train Loss: 0.0002688, Val Loss: 0.0003203 +2025-02-19 05:45:45,502 Epoch 497/2000 +2025-02-19 05:46:27,533 Current Learning Rate: 0.0052355323 +2025-02-19 05:46:27,533 Train Loss: 0.0004406, Val Loss: 0.0003698 +2025-02-19 05:46:27,533 Epoch 498/2000 +2025-02-19 05:47:09,929 Current Learning Rate: 0.0051570538 +2025-02-19 05:47:09,930 Train Loss: 0.0003715, Val Loss: 0.0003484 +2025-02-19 05:47:09,930 Epoch 499/2000 +2025-02-19 05:47:51,553 Current Learning Rate: 0.0050785366 +2025-02-19 05:47:51,553 Train Loss: 0.0003781, Val Loss: 0.0003582 +2025-02-19 05:47:51,553 Epoch 500/2000 +2025-02-19 05:48:33,695 Current Learning Rate: 0.0050000000 +2025-02-19 05:48:33,696 Train Loss: 0.0002624, Val Loss: 0.0003242 +2025-02-19 05:48:33,696 Epoch 501/2000 +2025-02-19 05:49:15,594 Current Learning Rate: 0.0049214634 +2025-02-19 05:49:15,595 Train Loss: 0.0003119, Val Loss: 0.0003366 +2025-02-19 05:49:15,595 Epoch 502/2000 +2025-02-19 05:49:58,360 Current Learning Rate: 0.0048429462 +2025-02-19 05:49:58,361 Train Loss: 0.0003463, Val Loss: 0.0003309 +2025-02-19 05:49:58,361 Epoch 503/2000 +2025-02-19 05:50:40,016 Current Learning Rate: 0.0047644677 +2025-02-19 05:50:40,017 Train Loss: 0.0003708, Val Loss: 0.0003349 +2025-02-19 05:50:40,017 Epoch 504/2000 +2025-02-19 05:51:22,363 Current Learning Rate: 0.0046860474 +2025-02-19 05:51:22,364 Train Loss: 0.0003091, Val Loss: 0.0003272 +2025-02-19 05:51:22,364 Epoch 505/2000 +2025-02-19 05:52:04,851 Current Learning Rate: 0.0046077045 +2025-02-19 05:52:04,851 Train Loss: 0.0003108, Val Loss: 0.0003311 +2025-02-19 05:52:04,852 Epoch 506/2000 +2025-02-19 05:52:46,945 Current Learning Rate: 0.0045294584 +2025-02-19 05:52:48,286 Train Loss: 0.0004131, Val Loss: 0.0003141 +2025-02-19 05:52:48,286 Epoch 507/2000 +2025-02-19 05:53:30,079 Current Learning Rate: 0.0044513284 +2025-02-19 05:53:31,989 Train Loss: 0.0002722, Val Loss: 0.0003069 +2025-02-19 05:53:31,990 Epoch 508/2000 +2025-02-19 05:54:13,863 Current Learning Rate: 0.0043733338 +2025-02-19 05:54:13,864 Train Loss: 0.0004167, Val Loss: 0.0003525 +2025-02-19 05:54:13,864 Epoch 509/2000 +2025-02-19 05:54:56,559 Current Learning Rate: 0.0042954938 +2025-02-19 05:54:56,559 Train Loss: 0.0003157, Val Loss: 0.0003312 +2025-02-19 05:54:56,560 Epoch 510/2000 +2025-02-19 05:55:38,307 Current Learning Rate: 0.0042178277 +2025-02-19 05:55:38,308 Train Loss: 0.0003594, Val Loss: 0.0003238 +2025-02-19 05:55:38,308 Epoch 511/2000 +2025-02-19 05:56:20,090 Current Learning Rate: 0.0041403545 +2025-02-19 05:56:20,090 Train Loss: 0.0003570, Val Loss: 0.0003205 +2025-02-19 05:56:20,090 Epoch 512/2000 +2025-02-19 05:57:03,064 Current Learning Rate: 0.0040630934 +2025-02-19 05:57:03,065 Train Loss: 0.0004469, Val Loss: 0.0003230 +2025-02-19 05:57:03,065 Epoch 513/2000 +2025-02-19 05:57:45,600 Current Learning Rate: 0.0039860635 +2025-02-19 05:57:47,377 Train Loss: 0.0002437, Val Loss: 0.0003013 +2025-02-19 05:57:47,377 Epoch 514/2000 +2025-02-19 05:58:28,902 Current Learning Rate: 0.0039092838 +2025-02-19 05:58:28,903 Train Loss: 0.0004149, Val Loss: 0.0003221 +2025-02-19 05:58:28,903 Epoch 515/2000 +2025-02-19 05:59:11,420 Current Learning Rate: 0.0038327732 +2025-02-19 05:59:13,222 Train Loss: 0.0003974, Val Loss: 0.0002993 +2025-02-19 05:59:13,222 Epoch 516/2000 +2025-02-19 05:59:54,418 Current Learning Rate: 0.0037565506 +2025-02-19 05:59:55,781 Train Loss: 0.0002519, Val Loss: 0.0002987 +2025-02-19 05:59:55,781 Epoch 517/2000 +2025-02-19 06:00:37,676 Current Learning Rate: 0.0036806348 +2025-02-19 06:00:39,317 Train Loss: 0.0002545, Val Loss: 0.0002942 +2025-02-19 06:00:39,318 Epoch 518/2000 +2025-02-19 06:01:20,791 Current Learning Rate: 0.0036050445 +2025-02-19 06:01:21,952 Train Loss: 0.0002820, Val Loss: 0.0002911 +2025-02-19 06:01:21,952 Epoch 519/2000 +2025-02-19 06:02:03,476 Current Learning Rate: 0.0035297984 +2025-02-19 06:02:04,952 Train Loss: 0.0002594, Val Loss: 0.0002882 +2025-02-19 06:02:04,952 Epoch 520/2000 +2025-02-19 06:02:47,245 Current Learning Rate: 0.0034549150 +2025-02-19 06:02:48,590 Train Loss: 0.0003465, Val Loss: 0.0002798 +2025-02-19 06:02:48,590 Epoch 521/2000 +2025-02-19 06:03:30,909 Current Learning Rate: 0.0033804129 +2025-02-19 06:03:30,909 Train Loss: 0.0002683, Val Loss: 0.0002808 +2025-02-19 06:03:30,910 Epoch 522/2000 +2025-02-19 06:04:12,685 Current Learning Rate: 0.0033063104 +2025-02-19 06:04:13,846 Train Loss: 0.0002855, Val Loss: 0.0002790 +2025-02-19 06:04:13,847 Epoch 523/2000 +2025-02-19 06:04:56,044 Current Learning Rate: 0.0032326258 +2025-02-19 06:04:56,044 Train Loss: 0.0002464, Val Loss: 0.0002810 +2025-02-19 06:04:56,044 Epoch 524/2000 +2025-02-19 06:05:38,338 Current Learning Rate: 0.0031593772 +2025-02-19 06:05:39,459 Train Loss: 0.0002622, Val Loss: 0.0002769 +2025-02-19 06:05:39,459 Epoch 525/2000 +2025-02-19 06:06:21,400 Current Learning Rate: 0.0030865828 +2025-02-19 06:06:21,401 Train Loss: 0.0003272, Val Loss: 0.0002777 +2025-02-19 06:06:21,401 Epoch 526/2000 +2025-02-19 06:07:03,123 Current Learning Rate: 0.0030142605 +2025-02-19 06:07:03,124 Train Loss: 0.0002613, Val Loss: 0.0002784 +2025-02-19 06:07:03,124 Epoch 527/2000 +2025-02-19 06:07:45,420 Current Learning Rate: 0.0029424282 +2025-02-19 06:07:45,421 Train Loss: 0.0002462, Val Loss: 0.0002777 +2025-02-19 06:07:45,421 Epoch 528/2000 +2025-02-19 06:08:27,818 Current Learning Rate: 0.0028711035 +2025-02-19 06:08:27,819 Train Loss: 0.0004061, Val Loss: 0.0003103 +2025-02-19 06:08:27,819 Epoch 529/2000 +2025-02-19 06:09:09,475 Current Learning Rate: 0.0028003042 +2025-02-19 06:09:10,600 Train Loss: 0.0002571, Val Loss: 0.0002736 +2025-02-19 06:09:10,601 Epoch 530/2000 +2025-02-19 06:09:51,522 Current Learning Rate: 0.0027300475 +2025-02-19 06:09:51,523 Train Loss: 0.0003015, Val Loss: 0.0002777 +2025-02-19 06:09:51,523 Epoch 531/2000 +2025-02-19 06:10:33,712 Current Learning Rate: 0.0026603509 +2025-02-19 06:10:33,714 Train Loss: 0.0002290, Val Loss: 0.0002745 +2025-02-19 06:10:33,714 Epoch 532/2000 +2025-02-19 06:11:15,611 Current Learning Rate: 0.0025912316 +2025-02-19 06:11:16,612 Train Loss: 0.0002785, Val Loss: 0.0002722 +2025-02-19 06:11:16,612 Epoch 533/2000 +2025-02-19 06:11:58,671 Current Learning Rate: 0.0025227067 +2025-02-19 06:11:58,672 Train Loss: 0.0002385, Val Loss: 0.0002724 +2025-02-19 06:11:58,672 Epoch 534/2000 +2025-02-19 06:12:40,916 Current Learning Rate: 0.0024547929 +2025-02-19 06:12:40,917 Train Loss: 0.0002859, Val Loss: 0.0002752 +2025-02-19 06:12:40,917 Epoch 535/2000 +2025-02-19 06:13:22,650 Current Learning Rate: 0.0023875072 +2025-02-19 06:13:22,651 Train Loss: 0.0002662, Val Loss: 0.0002760 +2025-02-19 06:13:22,651 Epoch 536/2000 +2025-02-19 06:14:04,821 Current Learning Rate: 0.0023208660 +2025-02-19 06:14:06,019 Train Loss: 0.0002035, Val Loss: 0.0002720 +2025-02-19 06:14:06,019 Epoch 537/2000 +2025-02-19 06:14:47,243 Current Learning Rate: 0.0022548859 +2025-02-19 06:14:47,244 Train Loss: 0.0002837, Val Loss: 0.0002741 +2025-02-19 06:14:47,245 Epoch 538/2000 +2025-02-19 06:15:29,728 Current Learning Rate: 0.0021895831 +2025-02-19 06:15:31,553 Train Loss: 0.0002225, Val Loss: 0.0002689 +2025-02-19 06:15:31,553 Epoch 539/2000 +2025-02-19 06:16:13,629 Current Learning Rate: 0.0021249737 +2025-02-19 06:16:13,630 Train Loss: 0.0002591, Val Loss: 0.0002701 +2025-02-19 06:16:13,630 Epoch 540/2000 +2025-02-19 06:16:55,152 Current Learning Rate: 0.0020610737 +2025-02-19 06:16:56,485 Train Loss: 0.0002778, Val Loss: 0.0002660 +2025-02-19 06:16:56,485 Epoch 541/2000 +2025-02-19 06:17:37,375 Current Learning Rate: 0.0019978989 +2025-02-19 06:17:37,376 Train Loss: 0.0002207, Val Loss: 0.0002723 +2025-02-19 06:17:37,376 Epoch 542/2000 +2025-02-19 06:18:19,945 Current Learning Rate: 0.0019354647 +2025-02-19 06:18:19,946 Train Loss: 0.0003223, Val Loss: 0.0002761 +2025-02-19 06:18:19,946 Epoch 543/2000 +2025-02-19 06:19:02,032 Current Learning Rate: 0.0018737867 +2025-02-19 06:19:02,033 Train Loss: 0.0002946, Val Loss: 0.0002819 +2025-02-19 06:19:02,033 Epoch 544/2000 +2025-02-19 06:19:44,646 Current Learning Rate: 0.0018128801 +2025-02-19 06:19:46,183 Train Loss: 0.0002306, Val Loss: 0.0002601 +2025-02-19 06:19:46,183 Epoch 545/2000 +2025-02-19 06:20:27,315 Current Learning Rate: 0.0017527598 +2025-02-19 06:20:28,863 Train Loss: 0.0001874, Val Loss: 0.0002584 +2025-02-19 06:20:28,863 Epoch 546/2000 +2025-02-19 06:21:09,753 Current Learning Rate: 0.0016934407 +2025-02-19 06:21:09,754 Train Loss: 0.0003886, Val Loss: 0.0002627 +2025-02-19 06:21:09,754 Epoch 547/2000 +2025-02-19 06:21:52,213 Current Learning Rate: 0.0016349374 +2025-02-19 06:21:52,213 Train Loss: 0.0002434, Val Loss: 0.0002609 +2025-02-19 06:21:52,214 Epoch 548/2000 +2025-02-19 06:22:34,241 Current Learning Rate: 0.0015772645 +2025-02-19 06:22:35,574 Train Loss: 0.0002927, Val Loss: 0.0002559 +2025-02-19 06:22:35,574 Epoch 549/2000 +2025-02-19 06:23:17,405 Current Learning Rate: 0.0015204360 +2025-02-19 06:23:19,285 Train Loss: 0.0002755, Val Loss: 0.0002527 +2025-02-19 06:23:19,285 Epoch 550/2000 +2025-02-19 06:24:00,079 Current Learning Rate: 0.0014644661 +2025-02-19 06:24:01,382 Train Loss: 0.0002452, Val Loss: 0.0002502 +2025-02-19 06:24:01,383 Epoch 551/2000 +2025-02-19 06:24:42,573 Current Learning Rate: 0.0014093685 +2025-02-19 06:24:44,016 Train Loss: 0.0002218, Val Loss: 0.0002492 +2025-02-19 06:24:44,017 Epoch 552/2000 +2025-02-19 06:25:24,848 Current Learning Rate: 0.0013551569 +2025-02-19 06:25:25,948 Train Loss: 0.0002499, Val Loss: 0.0002487 +2025-02-19 06:25:25,949 Epoch 553/2000 +2025-02-19 06:26:07,529 Current Learning Rate: 0.0013018445 +2025-02-19 06:26:07,530 Train Loss: 0.0002559, Val Loss: 0.0002514 +2025-02-19 06:26:07,530 Epoch 554/2000 +2025-02-19 06:26:49,393 Current Learning Rate: 0.0012494447 +2025-02-19 06:26:49,394 Train Loss: 0.0002279, Val Loss: 0.0002492 +2025-02-19 06:26:49,394 Epoch 555/2000 +2025-02-19 06:27:31,777 Current Learning Rate: 0.0011979702 +2025-02-19 06:27:32,722 Train Loss: 0.0001903, Val Loss: 0.0002483 +2025-02-19 06:27:32,722 Epoch 556/2000 +2025-02-19 06:28:14,990 Current Learning Rate: 0.0011474338 +2025-02-19 06:28:14,991 Train Loss: 0.0002024, Val Loss: 0.0002522 +2025-02-19 06:28:14,991 Epoch 557/2000 +2025-02-19 06:28:57,488 Current Learning Rate: 0.0010978480 +2025-02-19 06:28:57,488 Train Loss: 0.0002124, Val Loss: 0.0002600 +2025-02-19 06:28:57,488 Epoch 558/2000 +2025-02-19 06:29:39,872 Current Learning Rate: 0.0010492249 +2025-02-19 06:29:39,873 Train Loss: 0.0002143, Val Loss: 0.0002581 +2025-02-19 06:29:39,873 Epoch 559/2000 +2025-02-19 06:30:21,245 Current Learning Rate: 0.0010015767 +2025-02-19 06:30:21,246 Train Loss: 0.0002643, Val Loss: 0.0002547 +2025-02-19 06:30:21,246 Epoch 560/2000 +2025-02-19 06:31:04,032 Current Learning Rate: 0.0009549150 +2025-02-19 06:31:04,033 Train Loss: 0.0003135, Val Loss: 0.0002499 +2025-02-19 06:31:04,033 Epoch 561/2000 +2025-02-19 06:31:46,117 Current Learning Rate: 0.0009092514 +2025-02-19 06:31:48,069 Train Loss: 0.0002040, Val Loss: 0.0002471 +2025-02-19 06:31:48,070 Epoch 562/2000 +2025-02-19 06:32:30,394 Current Learning Rate: 0.0008645971 +2025-02-19 06:32:32,037 Train Loss: 0.0003344, Val Loss: 0.0002460 +2025-02-19 06:32:32,038 Epoch 563/2000 +2025-02-19 06:33:14,345 Current Learning Rate: 0.0008209632 +2025-02-19 06:33:14,346 Train Loss: 0.0002724, Val Loss: 0.0002516 +2025-02-19 06:33:14,346 Epoch 564/2000 +2025-02-19 06:33:56,074 Current Learning Rate: 0.0007783604 +2025-02-19 06:33:57,430 Train Loss: 0.0002135, Val Loss: 0.0002449 +2025-02-19 06:33:57,430 Epoch 565/2000 +2025-02-19 06:34:38,507 Current Learning Rate: 0.0007367992 +2025-02-19 06:34:38,508 Train Loss: 0.0003281, Val Loss: 0.0002453 +2025-02-19 06:34:38,509 Epoch 566/2000 +2025-02-19 06:35:20,981 Current Learning Rate: 0.0006962899 +2025-02-19 06:35:22,895 Train Loss: 0.0002693, Val Loss: 0.0002442 +2025-02-19 06:35:22,895 Epoch 567/2000 +2025-02-19 06:36:04,780 Current Learning Rate: 0.0006568424 +2025-02-19 06:36:04,781 Train Loss: 0.0002088, Val Loss: 0.0002459 +2025-02-19 06:36:04,781 Epoch 568/2000 +2025-02-19 06:36:47,388 Current Learning Rate: 0.0006184666 +2025-02-19 06:36:47,388 Train Loss: 0.0002961, Val Loss: 0.0002485 +2025-02-19 06:36:47,388 Epoch 569/2000 +2025-02-19 06:37:29,483 Current Learning Rate: 0.0005811718 +2025-02-19 06:37:29,484 Train Loss: 0.0002460, Val Loss: 0.0002454 +2025-02-19 06:37:29,484 Epoch 570/2000 +2025-02-19 06:38:11,812 Current Learning Rate: 0.0005449674 +2025-02-19 06:38:11,812 Train Loss: 0.0002234, Val Loss: 0.0002458 +2025-02-19 06:38:11,813 Epoch 571/2000 +2025-02-19 06:38:54,001 Current Learning Rate: 0.0005098621 +2025-02-19 06:38:55,717 Train Loss: 0.0002209, Val Loss: 0.0002413 +2025-02-19 06:38:55,718 Epoch 572/2000 +2025-02-19 06:39:36,303 Current Learning Rate: 0.0004758647 +2025-02-19 06:39:37,578 Train Loss: 0.0002310, Val Loss: 0.0002402 +2025-02-19 06:39:37,579 Epoch 573/2000 +2025-02-19 06:40:18,850 Current Learning Rate: 0.0004429836 +2025-02-19 06:40:20,178 Train Loss: 0.0002686, Val Loss: 0.0002395 +2025-02-19 06:40:20,179 Epoch 574/2000 +2025-02-19 06:41:01,175 Current Learning Rate: 0.0004112269 +2025-02-19 06:41:02,923 Train Loss: 0.0002137, Val Loss: 0.0002391 +2025-02-19 06:41:02,923 Epoch 575/2000 +2025-02-19 06:41:44,366 Current Learning Rate: 0.0003806023 +2025-02-19 06:41:45,698 Train Loss: 0.0002115, Val Loss: 0.0002389 +2025-02-19 06:41:45,699 Epoch 576/2000 +2025-02-19 06:42:26,649 Current Learning Rate: 0.0003511176 +2025-02-19 06:42:27,708 Train Loss: 0.0002499, Val Loss: 0.0002388 +2025-02-19 06:42:27,709 Epoch 577/2000 +2025-02-19 06:43:08,838 Current Learning Rate: 0.0003227798 +2025-02-19 06:43:10,060 Train Loss: 0.0002456, Val Loss: 0.0002387 +2025-02-19 06:43:10,060 Epoch 578/2000 +2025-02-19 06:43:50,855 Current Learning Rate: 0.0002955962 +2025-02-19 06:43:52,294 Train Loss: 0.0002681, Val Loss: 0.0002386 +2025-02-19 06:43:52,295 Epoch 579/2000 +2025-02-19 06:44:34,580 Current Learning Rate: 0.0002695732 +2025-02-19 06:44:36,377 Train Loss: 0.0003273, Val Loss: 0.0002385 +2025-02-19 06:44:36,377 Epoch 580/2000 +2025-02-19 06:45:18,224 Current Learning Rate: 0.0002447174 +2025-02-19 06:45:19,536 Train Loss: 0.0002011, Val Loss: 0.0002381 +2025-02-19 06:45:19,536 Epoch 581/2000 +2025-02-19 06:46:01,176 Current Learning Rate: 0.0002210349 +2025-02-19 06:46:03,070 Train Loss: 0.0002540, Val Loss: 0.0002378 +2025-02-19 06:46:03,070 Epoch 582/2000 +2025-02-19 06:46:45,243 Current Learning Rate: 0.0001985316 +2025-02-19 06:46:46,326 Train Loss: 0.0002318, Val Loss: 0.0002376 +2025-02-19 06:46:46,326 Epoch 583/2000 +2025-02-19 06:47:28,556 Current Learning Rate: 0.0001772129 +2025-02-19 06:47:28,556 Train Loss: 0.0002896, Val Loss: 0.0002380 +2025-02-19 06:47:28,557 Epoch 584/2000 +2025-02-19 06:48:11,064 Current Learning Rate: 0.0001570842 +2025-02-19 06:48:12,664 Train Loss: 0.0003380, Val Loss: 0.0002376 +2025-02-19 06:48:12,665 Epoch 585/2000 +2025-02-19 06:48:54,897 Current Learning Rate: 0.0001381504 +2025-02-19 06:48:54,898 Train Loss: 0.0002447, Val Loss: 0.0002379 +2025-02-19 06:48:54,898 Epoch 586/2000 +2025-02-19 06:49:37,291 Current Learning Rate: 0.0001204162 +2025-02-19 06:49:38,242 Train Loss: 0.0002028, Val Loss: 0.0002375 +2025-02-19 06:49:38,242 Epoch 587/2000 +2025-02-19 06:50:20,475 Current Learning Rate: 0.0001038859 +2025-02-19 06:50:21,464 Train Loss: 0.0002826, Val Loss: 0.0002375 +2025-02-19 06:50:21,465 Epoch 588/2000 +2025-02-19 06:51:03,817 Current Learning Rate: 0.0000885637 +2025-02-19 06:51:05,653 Train Loss: 0.0001746, Val Loss: 0.0002374 +2025-02-19 06:51:05,653 Epoch 589/2000 +2025-02-19 06:51:46,892 Current Learning Rate: 0.0000744534 +2025-02-19 06:51:48,611 Train Loss: 0.0002355, Val Loss: 0.0002372 +2025-02-19 06:51:48,611 Epoch 590/2000 +2025-02-19 06:52:29,853 Current Learning Rate: 0.0000615583 +2025-02-19 06:52:31,775 Train Loss: 0.0003048, Val Loss: 0.0002372 +2025-02-19 06:52:31,775 Epoch 591/2000 +2025-02-19 06:53:14,199 Current Learning Rate: 0.0000498817 +2025-02-19 06:53:14,199 Train Loss: 0.0002310, Val Loss: 0.0002373 +2025-02-19 06:53:14,200 Epoch 592/2000 +2025-02-19 06:53:56,207 Current Learning Rate: 0.0000394265 +2025-02-19 06:53:56,208 Train Loss: 0.0002854, Val Loss: 0.0002372 +2025-02-19 06:53:56,208 Epoch 593/2000 +2025-02-19 06:54:38,519 Current Learning Rate: 0.0000301952 +2025-02-19 06:54:38,521 Train Loss: 0.0002099, Val Loss: 0.0002373 +2025-02-19 06:54:38,522 Epoch 594/2000 +2025-02-19 06:55:21,304 Current Learning Rate: 0.0000221902 +2025-02-19 06:55:23,146 Train Loss: 0.0002825, Val Loss: 0.0002371 +2025-02-19 06:55:23,146 Epoch 595/2000 +2025-02-19 06:56:04,975 Current Learning Rate: 0.0000154133 +2025-02-19 06:56:04,976 Train Loss: 0.0002642, Val Loss: 0.0002372 +2025-02-19 06:56:04,976 Epoch 596/2000 +2025-02-19 06:56:47,498 Current Learning Rate: 0.0000098664 +2025-02-19 06:56:48,911 Train Loss: 0.0002336, Val Loss: 0.0002371 +2025-02-19 06:56:48,912 Epoch 597/2000 +2025-02-19 06:57:30,487 Current Learning Rate: 0.0000055506 +2025-02-19 06:57:30,488 Train Loss: 0.0001997, Val Loss: 0.0002371 +2025-02-19 06:57:30,489 Epoch 598/2000 +2025-02-19 06:58:12,838 Current Learning Rate: 0.0000024672 +2025-02-19 06:58:12,839 Train Loss: 0.0001900, Val Loss: 0.0002372 +2025-02-19 06:58:12,839 Epoch 599/2000 +2025-02-19 06:58:55,614 Current Learning Rate: 0.0000006168 +2025-02-19 06:58:55,615 Train Loss: 0.0002084, Val Loss: 0.0002372 +2025-02-19 06:58:55,615 Epoch 600/2000 +2025-02-19 06:59:37,848 Current Learning Rate: 0.0000000000 +2025-02-19 06:59:37,849 Train Loss: 0.0002021, Val Loss: 0.0002372 +2025-02-19 06:59:37,849 Epoch 601/2000 +2025-02-19 07:00:19,658 Current Learning Rate: 0.0000006168 +2025-02-19 07:00:20,916 Train Loss: 0.0003145, Val Loss: 0.0002371 +2025-02-19 07:00:20,917 Epoch 602/2000 +2025-02-19 07:01:01,984 Current Learning Rate: 0.0000024672 +2025-02-19 07:01:04,496 Train Loss: 0.0002494, Val Loss: 0.0002370 +2025-02-19 07:01:04,497 Epoch 603/2000 +2025-02-19 07:01:45,084 Current Learning Rate: 0.0000055506 +2025-02-19 07:01:45,085 Train Loss: 0.0001790, Val Loss: 0.0002372 +2025-02-19 07:01:45,085 Epoch 604/2000 +2025-02-19 07:02:27,601 Current Learning Rate: 0.0000098664 +2025-02-19 07:02:27,601 Train Loss: 0.0002461, Val Loss: 0.0002371 +2025-02-19 07:02:27,601 Epoch 605/2000 +2025-02-19 07:03:09,523 Current Learning Rate: 0.0000154133 +2025-02-19 07:03:09,523 Train Loss: 0.0002005, Val Loss: 0.0002372 +2025-02-19 07:03:09,523 Epoch 606/2000 +2025-02-19 07:03:51,270 Current Learning Rate: 0.0000221902 +2025-02-19 07:03:51,270 Train Loss: 0.0002321, Val Loss: 0.0002370 +2025-02-19 07:03:51,271 Epoch 607/2000 +2025-02-19 07:04:34,195 Current Learning Rate: 0.0000301952 +2025-02-19 07:04:34,196 Train Loss: 0.0001753, Val Loss: 0.0002372 +2025-02-19 07:04:34,196 Epoch 608/2000 +2025-02-19 07:05:16,732 Current Learning Rate: 0.0000394265 +2025-02-19 07:05:16,733 Train Loss: 0.0002089, Val Loss: 0.0002371 +2025-02-19 07:05:16,733 Epoch 609/2000 +2025-02-19 07:05:59,342 Current Learning Rate: 0.0000498817 +2025-02-19 07:05:59,343 Train Loss: 0.0002577, Val Loss: 0.0002372 +2025-02-19 07:05:59,343 Epoch 610/2000 +2025-02-19 07:06:41,711 Current Learning Rate: 0.0000615583 +2025-02-19 07:06:41,712 Train Loss: 0.0001925, Val Loss: 0.0002371 +2025-02-19 07:06:41,712 Epoch 611/2000 +2025-02-19 07:07:24,263 Current Learning Rate: 0.0000744534 +2025-02-19 07:07:24,264 Train Loss: 0.0002486, Val Loss: 0.0002371 +2025-02-19 07:07:24,264 Epoch 612/2000 +2025-02-19 07:08:06,595 Current Learning Rate: 0.0000885637 +2025-02-19 07:08:06,595 Train Loss: 0.0002794, Val Loss: 0.0002371 +2025-02-19 07:08:06,596 Epoch 613/2000 +2025-02-19 07:08:49,087 Current Learning Rate: 0.0001038859 +2025-02-19 07:08:49,088 Train Loss: 0.0002004, Val Loss: 0.0002371 +2025-02-19 07:08:49,088 Epoch 614/2000 +2025-02-19 07:09:30,817 Current Learning Rate: 0.0001204162 +2025-02-19 07:09:32,618 Train Loss: 0.0002439, Val Loss: 0.0002369 +2025-02-19 07:09:32,618 Epoch 615/2000 +2025-02-19 07:10:14,463 Current Learning Rate: 0.0001381504 +2025-02-19 07:10:14,464 Train Loss: 0.0001739, Val Loss: 0.0002370 +2025-02-19 07:10:14,465 Epoch 616/2000 +2025-02-19 07:10:56,392 Current Learning Rate: 0.0001570842 +2025-02-19 07:10:56,392 Train Loss: 0.0002387, Val Loss: 0.0002371 +2025-02-19 07:10:56,392 Epoch 617/2000 +2025-02-19 07:11:39,078 Current Learning Rate: 0.0001772129 +2025-02-19 07:11:39,079 Train Loss: 0.0002783, Val Loss: 0.0002370 +2025-02-19 07:11:39,079 Epoch 618/2000 +2025-02-19 07:12:20,976 Current Learning Rate: 0.0001985316 +2025-02-19 07:12:20,976 Train Loss: 0.0001920, Val Loss: 0.0002370 +2025-02-19 07:12:20,976 Epoch 619/2000 +2025-02-19 07:13:03,761 Current Learning Rate: 0.0002210349 +2025-02-19 07:13:05,631 Train Loss: 0.0002135, Val Loss: 0.0002368 +2025-02-19 07:13:05,635 Epoch 620/2000 +2025-02-19 07:13:46,239 Current Learning Rate: 0.0002447174 +2025-02-19 07:13:46,240 Train Loss: 0.0002199, Val Loss: 0.0002369 +2025-02-19 07:13:46,240 Epoch 621/2000 +2025-02-19 07:14:28,650 Current Learning Rate: 0.0002695732 +2025-02-19 07:14:28,650 Train Loss: 0.0002523, Val Loss: 0.0002375 +2025-02-19 07:14:28,651 Epoch 622/2000 +2025-02-19 07:15:10,564 Current Learning Rate: 0.0002955962 +2025-02-19 07:15:10,564 Train Loss: 0.0002158, Val Loss: 0.0002372 +2025-02-19 07:15:10,565 Epoch 623/2000 +2025-02-19 07:15:53,375 Current Learning Rate: 0.0003227798 +2025-02-19 07:15:53,376 Train Loss: 0.0002252, Val Loss: 0.0002377 +2025-02-19 07:15:53,376 Epoch 624/2000 +2025-02-19 07:16:35,143 Current Learning Rate: 0.0003511176 +2025-02-19 07:16:35,143 Train Loss: 0.0001752, Val Loss: 0.0002381 +2025-02-19 07:16:35,143 Epoch 625/2000 +2025-02-19 07:17:18,077 Current Learning Rate: 0.0003806023 +2025-02-19 07:17:18,078 Train Loss: 0.0002751, Val Loss: 0.0002408 +2025-02-19 07:17:18,078 Epoch 626/2000 +2025-02-19 07:18:00,525 Current Learning Rate: 0.0004112269 +2025-02-19 07:18:00,529 Train Loss: 0.0002363, Val Loss: 0.0002399 +2025-02-19 07:18:00,529 Epoch 627/2000 +2025-02-19 07:18:42,127 Current Learning Rate: 0.0004429836 +2025-02-19 07:18:42,127 Train Loss: 0.0002647, Val Loss: 0.0002398 +2025-02-19 07:18:42,128 Epoch 628/2000 +2025-02-19 07:19:24,994 Current Learning Rate: 0.0004758647 +2025-02-19 07:19:24,994 Train Loss: 0.0002519, Val Loss: 0.0002401 +2025-02-19 07:19:24,995 Epoch 629/2000 +2025-02-19 07:20:07,321 Current Learning Rate: 0.0005098621 +2025-02-19 07:20:07,321 Train Loss: 0.0002105, Val Loss: 0.0002384 +2025-02-19 07:20:07,321 Epoch 630/2000 +2025-02-19 07:20:48,834 Current Learning Rate: 0.0005449674 +2025-02-19 07:20:48,835 Train Loss: 0.0001916, Val Loss: 0.0002402 +2025-02-19 07:20:48,835 Epoch 631/2000 +2025-02-19 07:21:31,667 Current Learning Rate: 0.0005811718 +2025-02-19 07:21:31,667 Train Loss: 0.0002547, Val Loss: 0.0002403 +2025-02-19 07:21:31,668 Epoch 632/2000 +2025-02-19 07:22:13,996 Current Learning Rate: 0.0006184666 +2025-02-19 07:22:13,997 Train Loss: 0.0002452, Val Loss: 0.0002393 +2025-02-19 07:22:13,997 Epoch 633/2000 +2025-02-19 07:22:56,002 Current Learning Rate: 0.0006568424 +2025-02-19 07:22:56,002 Train Loss: 0.0001869, Val Loss: 0.0002376 +2025-02-19 07:22:56,002 Epoch 634/2000 +2025-02-19 07:23:37,789 Current Learning Rate: 0.0006962899 +2025-02-19 07:23:37,790 Train Loss: 0.0002581, Val Loss: 0.0002383 +2025-02-19 07:23:37,790 Epoch 635/2000 +2025-02-19 07:24:19,822 Current Learning Rate: 0.0007367992 +2025-02-19 07:24:19,822 Train Loss: 0.0002577, Val Loss: 0.0002386 +2025-02-19 07:24:19,823 Epoch 636/2000 +2025-02-19 07:25:02,201 Current Learning Rate: 0.0007783604 +2025-02-19 07:25:02,206 Train Loss: 0.0002433, Val Loss: 0.0002412 +2025-02-19 07:25:02,207 Epoch 637/2000 +2025-02-19 07:25:44,125 Current Learning Rate: 0.0008209632 +2025-02-19 07:25:44,126 Train Loss: 0.0001948, Val Loss: 0.0002384 +2025-02-19 07:25:44,126 Epoch 638/2000 +2025-02-19 07:26:26,315 Current Learning Rate: 0.0008645971 +2025-02-19 07:26:27,784 Train Loss: 0.0001859, Val Loss: 0.0002368 +2025-02-19 07:26:27,785 Epoch 639/2000 +2025-02-19 07:27:10,104 Current Learning Rate: 0.0009092514 +2025-02-19 07:27:10,104 Train Loss: 0.0003203, Val Loss: 0.0002578 +2025-02-19 07:27:10,104 Epoch 640/2000 +2025-02-19 07:27:52,284 Current Learning Rate: 0.0009549150 +2025-02-19 07:27:52,284 Train Loss: 0.0002427, Val Loss: 0.0002446 +2025-02-19 07:27:52,284 Epoch 641/2000 +2025-02-19 07:28:34,424 Current Learning Rate: 0.0010015767 +2025-02-19 07:28:34,424 Train Loss: 0.0001816, Val Loss: 0.0002370 +2025-02-19 07:28:34,424 Epoch 642/2000 +2025-02-19 07:29:15,765 Current Learning Rate: 0.0010492249 +2025-02-19 07:29:15,765 Train Loss: 0.0002813, Val Loss: 0.0002773 +2025-02-19 07:29:15,766 Epoch 643/2000 +2025-02-19 07:29:57,416 Current Learning Rate: 0.0010978480 +2025-02-19 07:29:57,417 Train Loss: 0.0002752, Val Loss: 0.0002428 +2025-02-19 07:29:57,417 Epoch 644/2000 +2025-02-19 07:30:39,575 Current Learning Rate: 0.0011474338 +2025-02-19 07:30:39,575 Train Loss: 0.0002347, Val Loss: 0.0002417 +2025-02-19 07:30:39,575 Epoch 645/2000 +2025-02-19 07:31:22,017 Current Learning Rate: 0.0011979702 +2025-02-19 07:31:22,018 Train Loss: 0.0002336, Val Loss: 0.0002432 +2025-02-19 07:31:22,018 Epoch 646/2000 +2025-02-19 07:32:03,980 Current Learning Rate: 0.0012494447 +2025-02-19 07:32:03,981 Train Loss: 0.0002189, Val Loss: 0.0002428 +2025-02-19 07:32:03,981 Epoch 647/2000 +2025-02-19 07:32:46,910 Current Learning Rate: 0.0013018445 +2025-02-19 07:32:46,911 Train Loss: 0.0001917, Val Loss: 0.0002478 +2025-02-19 07:32:46,911 Epoch 648/2000 +2025-02-19 07:33:29,101 Current Learning Rate: 0.0013551569 +2025-02-19 07:33:29,101 Train Loss: 0.0001939, Val Loss: 0.0002375 +2025-02-19 07:33:29,102 Epoch 649/2000 +2025-02-19 07:34:10,953 Current Learning Rate: 0.0014093685 +2025-02-19 07:34:10,954 Train Loss: 0.0002210, Val Loss: 0.0002384 +2025-02-19 07:34:10,954 Epoch 650/2000 +2025-02-19 07:34:53,037 Current Learning Rate: 0.0014644661 +2025-02-19 07:34:53,037 Train Loss: 0.0002128, Val Loss: 0.0002448 +2025-02-19 07:34:53,038 Epoch 651/2000 +2025-02-19 07:35:35,199 Current Learning Rate: 0.0015204360 +2025-02-19 07:35:35,199 Train Loss: 0.0002916, Val Loss: 0.0002456 +2025-02-19 07:35:35,200 Epoch 652/2000 +2025-02-19 07:36:17,390 Current Learning Rate: 0.0015772645 +2025-02-19 07:36:17,390 Train Loss: 0.0002577, Val Loss: 0.0002472 +2025-02-19 07:36:17,390 Epoch 653/2000 +2025-02-19 07:36:59,396 Current Learning Rate: 0.0016349374 +2025-02-19 07:36:59,396 Train Loss: 0.0004794, Val Loss: 0.0002895 +2025-02-19 07:36:59,396 Epoch 654/2000 +2025-02-19 07:37:42,285 Current Learning Rate: 0.0016934407 +2025-02-19 07:37:42,286 Train Loss: 0.0002776, Val Loss: 0.0002684 +2025-02-19 07:37:42,286 Epoch 655/2000 +2025-02-19 07:38:24,472 Current Learning Rate: 0.0017527598 +2025-02-19 07:38:24,473 Train Loss: 0.0002408, Val Loss: 0.0002488 +2025-02-19 07:38:24,473 Epoch 656/2000 +2025-02-19 07:39:06,746 Current Learning Rate: 0.0018128801 +2025-02-19 07:39:06,746 Train Loss: 0.0002114, Val Loss: 0.0002393 +2025-02-19 07:39:06,746 Epoch 657/2000 +2025-02-19 07:39:48,332 Current Learning Rate: 0.0018737867 +2025-02-19 07:39:48,333 Train Loss: 0.0002913, Val Loss: 0.0002595 +2025-02-19 07:39:48,333 Epoch 658/2000 +2025-02-19 07:40:30,363 Current Learning Rate: 0.0019354647 +2025-02-19 07:40:30,364 Train Loss: 0.0002702, Val Loss: 0.0002475 +2025-02-19 07:40:30,364 Epoch 659/2000 +2025-02-19 07:41:13,118 Current Learning Rate: 0.0019978989 +2025-02-19 07:41:13,119 Train Loss: 0.0003239, Val Loss: 0.0002651 +2025-02-19 07:41:13,119 Epoch 660/2000 +2025-02-19 07:41:55,370 Current Learning Rate: 0.0020610737 +2025-02-19 07:41:55,370 Train Loss: 0.0002177, Val Loss: 0.0002501 +2025-02-19 07:41:55,371 Epoch 661/2000 +2025-02-19 07:42:37,242 Current Learning Rate: 0.0021249737 +2025-02-19 07:42:37,243 Train Loss: 0.0003259, Val Loss: 0.0002693 +2025-02-19 07:42:37,243 Epoch 662/2000 +2025-02-19 07:43:19,343 Current Learning Rate: 0.0021895831 +2025-02-19 07:43:19,343 Train Loss: 0.0002933, Val Loss: 0.0002791 +2025-02-19 07:43:19,343 Epoch 663/2000 +2025-02-19 07:44:01,770 Current Learning Rate: 0.0022548859 +2025-02-19 07:44:01,770 Train Loss: 0.0002875, Val Loss: 0.0002821 +2025-02-19 07:44:01,770 Epoch 664/2000 +2025-02-19 07:44:43,811 Current Learning Rate: 0.0023208660 +2025-02-19 07:44:43,812 Train Loss: 0.0002538, Val Loss: 0.0002802 +2025-02-19 07:44:43,812 Epoch 665/2000 +2025-02-19 07:45:25,646 Current Learning Rate: 0.0023875072 +2025-02-19 07:45:25,646 Train Loss: 0.0002558, Val Loss: 0.0002512 +2025-02-19 07:45:25,646 Epoch 666/2000 +2025-02-19 07:46:07,798 Current Learning Rate: 0.0024547929 +2025-02-19 07:46:07,798 Train Loss: 0.0002212, Val Loss: 0.0002557 +2025-02-19 07:46:07,798 Epoch 667/2000 +2025-02-19 07:46:50,892 Current Learning Rate: 0.0025227067 +2025-02-19 07:46:50,892 Train Loss: 0.0003808, Val Loss: 0.0003113 +2025-02-19 07:46:50,893 Epoch 668/2000 +2025-02-19 07:47:33,100 Current Learning Rate: 0.0025912316 +2025-02-19 07:47:33,100 Train Loss: 0.0004104, Val Loss: 0.0006509 +2025-02-19 07:47:33,101 Epoch 669/2000 +2025-02-19 07:48:15,345 Current Learning Rate: 0.0026603509 +2025-02-19 07:48:15,346 Train Loss: 0.0004954, Val Loss: 0.0003491 +2025-02-19 07:48:15,346 Epoch 670/2000 +2025-02-19 07:48:57,189 Current Learning Rate: 0.0027300475 +2025-02-19 07:48:57,190 Train Loss: 0.0002983, Val Loss: 0.0005237 +2025-02-19 07:48:57,191 Epoch 671/2000 +2025-02-19 07:49:39,957 Current Learning Rate: 0.0028003042 +2025-02-19 07:49:39,957 Train Loss: 0.0004556, Val Loss: 0.0003442 +2025-02-19 07:49:39,958 Epoch 672/2000 +2025-02-19 07:50:22,388 Current Learning Rate: 0.0028711035 +2025-02-19 07:50:22,389 Train Loss: 0.0003345, Val Loss: 0.0003035 +2025-02-19 07:50:22,390 Epoch 673/2000 +2025-02-19 07:51:04,958 Current Learning Rate: 0.0029424282 +2025-02-19 07:51:04,959 Train Loss: 0.0002505, Val Loss: 0.0002670 +2025-02-19 07:51:04,959 Epoch 674/2000 +2025-02-19 07:51:47,354 Current Learning Rate: 0.0030142605 +2025-02-19 07:51:47,355 Train Loss: 0.0002686, Val Loss: 0.0002657 +2025-02-19 07:51:47,355 Epoch 675/2000 +2025-02-19 07:52:29,379 Current Learning Rate: 0.0030865828 +2025-02-19 07:52:29,380 Train Loss: 0.0003683, Val Loss: 0.0003607 +2025-02-19 07:52:29,380 Epoch 676/2000 +2025-02-19 07:53:11,313 Current Learning Rate: 0.0031593772 +2025-02-19 07:53:11,314 Train Loss: 0.0003030, Val Loss: 0.0002968 +2025-02-19 07:53:11,314 Epoch 677/2000 +2025-02-19 07:53:54,034 Current Learning Rate: 0.0032326258 +2025-02-19 07:53:54,035 Train Loss: 0.0002228, Val Loss: 0.0002677 +2025-02-19 07:53:54,035 Epoch 678/2000 +2025-02-19 07:54:36,144 Current Learning Rate: 0.0033063104 +2025-02-19 07:54:36,145 Train Loss: 0.0002338, Val Loss: 0.0002545 +2025-02-19 07:54:36,145 Epoch 679/2000 +2025-02-19 07:55:18,466 Current Learning Rate: 0.0033804129 +2025-02-19 07:55:18,467 Train Loss: 0.0001908, Val Loss: 0.0002461 +2025-02-19 07:55:18,467 Epoch 680/2000 +2025-02-19 07:56:01,072 Current Learning Rate: 0.0034549150 +2025-02-19 07:56:01,073 Train Loss: 0.0002083, Val Loss: 0.0002523 +2025-02-19 07:56:01,073 Epoch 681/2000 +2025-02-19 07:56:42,694 Current Learning Rate: 0.0035297984 +2025-02-19 07:56:42,695 Train Loss: 0.0002374, Val Loss: 0.0002554 +2025-02-19 07:56:42,695 Epoch 682/2000 +2025-02-19 07:57:24,617 Current Learning Rate: 0.0036050445 +2025-02-19 07:57:24,617 Train Loss: 0.0003086, Val Loss: 0.0003118 +2025-02-19 07:57:24,617 Epoch 683/2000 +2025-02-19 07:58:07,063 Current Learning Rate: 0.0036806348 +2025-02-19 07:58:07,064 Train Loss: 0.0002155, Val Loss: 0.0002764 +2025-02-19 07:58:07,064 Epoch 684/2000 +2025-02-19 07:58:49,501 Current Learning Rate: 0.0037565506 +2025-02-19 07:58:49,501 Train Loss: 0.0003324, Val Loss: 0.0003044 +2025-02-19 07:58:49,502 Epoch 685/2000 +2025-02-19 07:59:31,967 Current Learning Rate: 0.0038327732 +2025-02-19 07:59:31,968 Train Loss: 0.0002889, Val Loss: 0.0003545 +2025-02-19 07:59:31,968 Epoch 686/2000 +2025-02-19 08:00:14,612 Current Learning Rate: 0.0039092838 +2025-02-19 08:00:14,613 Train Loss: 0.0003394, Val Loss: 0.0002843 +2025-02-19 08:00:14,613 Epoch 687/2000 +2025-02-19 08:00:56,386 Current Learning Rate: 0.0039860635 +2025-02-19 08:00:56,386 Train Loss: 0.0002550, Val Loss: 0.0002777 +2025-02-19 08:00:56,387 Epoch 688/2000 +2025-02-19 08:01:39,051 Current Learning Rate: 0.0040630934 +2025-02-19 08:01:39,051 Train Loss: 0.0002982, Val Loss: 0.0003284 +2025-02-19 08:01:39,051 Epoch 689/2000 +2025-02-19 08:02:21,206 Current Learning Rate: 0.0041403545 +2025-02-19 08:02:21,207 Train Loss: 0.0003345, Val Loss: 0.0003489 +2025-02-19 08:02:21,207 Epoch 690/2000 +2025-02-19 08:03:03,355 Current Learning Rate: 0.0042178277 +2025-02-19 08:03:03,356 Train Loss: 0.0003361, Val Loss: 0.0003024 +2025-02-19 08:03:03,357 Epoch 691/2000 +2025-02-19 08:03:45,217 Current Learning Rate: 0.0042954938 +2025-02-19 08:03:45,218 Train Loss: 0.0003063, Val Loss: 0.0002888 +2025-02-19 08:03:45,218 Epoch 692/2000 +2025-02-19 08:04:27,599 Current Learning Rate: 0.0043733338 +2025-02-19 08:04:27,600 Train Loss: 0.0002462, Val Loss: 0.0002904 +2025-02-19 08:04:27,600 Epoch 693/2000 +2025-02-19 08:05:10,289 Current Learning Rate: 0.0044513284 +2025-02-19 08:05:10,290 Train Loss: 0.0003479, Val Loss: 0.0003365 +2025-02-19 08:05:10,290 Epoch 694/2000 +2025-02-19 08:05:52,646 Current Learning Rate: 0.0045294584 +2025-02-19 08:05:52,646 Train Loss: 0.0004360, Val Loss: 0.0003609 +2025-02-19 08:05:52,646 Epoch 695/2000 +2025-02-19 08:06:34,962 Current Learning Rate: 0.0046077045 +2025-02-19 08:06:34,962 Train Loss: 0.0002802, Val Loss: 0.0003584 +2025-02-19 08:06:34,962 Epoch 696/2000 +2025-02-19 08:07:17,356 Current Learning Rate: 0.0046860474 +2025-02-19 08:07:17,357 Train Loss: 0.0002687, Val Loss: 0.0003193 +2025-02-19 08:07:17,357 Epoch 697/2000 +2025-02-19 08:07:59,384 Current Learning Rate: 0.0047644677 +2025-02-19 08:07:59,385 Train Loss: 0.0003666, Val Loss: 0.0003625 +2025-02-19 08:07:59,385 Epoch 698/2000 +2025-02-19 08:08:41,990 Current Learning Rate: 0.0048429462 +2025-02-19 08:08:41,991 Train Loss: 0.0003011, Val Loss: 0.0003517 +2025-02-19 08:08:42,010 Epoch 699/2000 +2025-02-19 08:09:24,285 Current Learning Rate: 0.0049214634 +2025-02-19 08:09:24,286 Train Loss: 0.0005195, Val Loss: 0.0003861 +2025-02-19 08:09:24,286 Epoch 700/2000 +2025-02-19 08:10:06,619 Current Learning Rate: 0.0050000000 +2025-02-19 08:10:06,619 Train Loss: 0.0003301, Val Loss: 0.0003612 +2025-02-19 08:10:06,619 Epoch 701/2000 +2025-02-19 08:10:49,033 Current Learning Rate: 0.0050785366 +2025-02-19 08:10:49,033 Train Loss: 0.0003648, Val Loss: 0.0003463 +2025-02-19 08:10:49,033 Epoch 702/2000 +2025-02-19 08:11:30,446 Current Learning Rate: 0.0051570538 +2025-02-19 08:11:30,446 Train Loss: 0.0003002, Val Loss: 0.0003217 +2025-02-19 08:11:30,446 Epoch 703/2000 +2025-02-19 08:12:12,520 Current Learning Rate: 0.0052355323 +2025-02-19 08:12:12,520 Train Loss: 0.0003252, Val Loss: 0.0003043 +2025-02-19 08:12:12,521 Epoch 704/2000 +2025-02-19 08:12:54,491 Current Learning Rate: 0.0053139526 +2025-02-19 08:12:54,492 Train Loss: 0.0003919, Val Loss: 0.0003349 +2025-02-19 08:12:54,492 Epoch 705/2000 +2025-02-19 08:13:37,023 Current Learning Rate: 0.0053922955 +2025-02-19 08:13:37,023 Train Loss: 0.0003203, Val Loss: 0.0003031 +2025-02-19 08:13:37,024 Epoch 706/2000 +2025-02-19 08:14:19,147 Current Learning Rate: 0.0054705416 +2025-02-19 08:14:19,147 Train Loss: 0.0002619, Val Loss: 0.0003003 +2025-02-19 08:14:19,148 Epoch 707/2000 +2025-02-19 08:15:00,388 Current Learning Rate: 0.0055486716 +2025-02-19 08:15:00,389 Train Loss: 0.0003916, Val Loss: 0.0003082 +2025-02-19 08:15:00,389 Epoch 708/2000 +2025-02-19 08:15:43,316 Current Learning Rate: 0.0056266662 +2025-02-19 08:15:43,316 Train Loss: 0.0003771, Val Loss: 0.0003041 +2025-02-19 08:15:43,317 Epoch 709/2000 +2025-02-19 08:16:25,852 Current Learning Rate: 0.0057045062 +2025-02-19 08:16:25,852 Train Loss: 0.0003420, Val Loss: 0.0002982 +2025-02-19 08:16:25,852 Epoch 710/2000 +2025-02-19 08:17:08,454 Current Learning Rate: 0.0057821723 +2025-02-19 08:17:08,454 Train Loss: 0.0002496, Val Loss: 0.0002904 +2025-02-19 08:17:08,455 Epoch 711/2000 +2025-02-19 08:17:49,976 Current Learning Rate: 0.0058596455 +2025-02-19 08:17:49,976 Train Loss: 0.0002878, Val Loss: 0.0003422 +2025-02-19 08:17:49,976 Epoch 712/2000 +2025-02-19 08:18:32,140 Current Learning Rate: 0.0059369066 +2025-02-19 08:18:32,141 Train Loss: 0.0003151, Val Loss: 0.0003491 +2025-02-19 08:18:32,141 Epoch 713/2000 +2025-02-19 08:19:14,227 Current Learning Rate: 0.0060139365 +2025-02-19 08:19:14,227 Train Loss: 0.0003117, Val Loss: 0.0003364 +2025-02-19 08:19:14,227 Epoch 714/2000 +2025-02-19 08:19:56,438 Current Learning Rate: 0.0060907162 +2025-02-19 08:19:56,438 Train Loss: 0.0003786, Val Loss: 0.0003138 +2025-02-19 08:19:56,438 Epoch 715/2000 +2025-02-19 08:20:38,698 Current Learning Rate: 0.0061672268 +2025-02-19 08:20:38,698 Train Loss: 0.0005672, Val Loss: 0.0003957 +2025-02-19 08:20:38,699 Epoch 716/2000 +2025-02-19 08:21:20,831 Current Learning Rate: 0.0062434494 +2025-02-19 08:21:20,832 Train Loss: 0.0002956, Val Loss: 0.0003159 +2025-02-19 08:21:20,832 Epoch 717/2000 +2025-02-19 08:22:03,018 Current Learning Rate: 0.0063193652 +2025-02-19 08:22:03,019 Train Loss: 0.0003038, Val Loss: 0.0003372 +2025-02-19 08:22:03,019 Epoch 718/2000 +2025-02-19 08:22:45,188 Current Learning Rate: 0.0063949555 +2025-02-19 08:22:45,188 Train Loss: 0.0002473, Val Loss: 0.0003249 +2025-02-19 08:22:45,188 Epoch 719/2000 +2025-02-19 08:23:26,597 Current Learning Rate: 0.0064702016 +2025-02-19 08:23:26,598 Train Loss: 0.0003983, Val Loss: 0.0003876 +2025-02-19 08:23:26,598 Epoch 720/2000 +2025-02-19 08:24:09,083 Current Learning Rate: 0.0065450850 +2025-02-19 08:24:09,083 Train Loss: 0.0003770, Val Loss: 0.0004178 +2025-02-19 08:24:09,083 Epoch 721/2000 +2025-02-19 08:24:52,068 Current Learning Rate: 0.0066195871 +2025-02-19 08:24:52,069 Train Loss: 0.0003907, Val Loss: 0.0003751 +2025-02-19 08:24:52,069 Epoch 722/2000 +2025-02-19 08:25:34,058 Current Learning Rate: 0.0066936896 +2025-02-19 08:25:34,059 Train Loss: 0.0003827, Val Loss: 0.0003355 +2025-02-19 08:25:34,059 Epoch 723/2000 +2025-02-19 08:26:15,808 Current Learning Rate: 0.0067673742 +2025-02-19 08:26:15,808 Train Loss: 0.0002732, Val Loss: 0.0003048 +2025-02-19 08:26:15,809 Epoch 724/2000 +2025-02-19 08:26:58,017 Current Learning Rate: 0.0068406228 +2025-02-19 08:26:58,017 Train Loss: 0.0003315, Val Loss: 0.0003806 +2025-02-19 08:26:58,017 Epoch 725/2000 +2025-02-19 08:27:39,844 Current Learning Rate: 0.0069134172 +2025-02-19 08:27:39,845 Train Loss: 0.0004120, Val Loss: 0.0003432 +2025-02-19 08:27:39,845 Epoch 726/2000 +2025-02-19 08:28:22,032 Current Learning Rate: 0.0069857395 +2025-02-19 08:28:22,033 Train Loss: 0.0002649, Val Loss: 0.0003330 +2025-02-19 08:28:22,033 Epoch 727/2000 +2025-02-19 08:29:04,473 Current Learning Rate: 0.0070575718 +2025-02-19 08:29:04,473 Train Loss: 0.0003888, Val Loss: 0.0004027 +2025-02-19 08:29:04,474 Epoch 728/2000 +2025-02-19 08:29:46,521 Current Learning Rate: 0.0071288965 +2025-02-19 08:29:46,522 Train Loss: 0.0004294, Val Loss: 0.0003806 +2025-02-19 08:29:46,522 Epoch 729/2000 +2025-02-19 08:30:28,938 Current Learning Rate: 0.0071996958 +2025-02-19 08:30:28,939 Train Loss: 0.0002661, Val Loss: 0.0003254 +2025-02-19 08:30:28,939 Epoch 730/2000 +2025-02-19 08:31:10,562 Current Learning Rate: 0.0072699525 +2025-02-19 08:31:10,563 Train Loss: 0.0004338, Val Loss: 0.0004343 +2025-02-19 08:31:10,563 Epoch 731/2000 +2025-02-19 08:31:53,182 Current Learning Rate: 0.0073396491 +2025-02-19 08:31:53,183 Train Loss: 0.0004643, Val Loss: 0.0004133 +2025-02-19 08:31:53,183 Epoch 732/2000 +2025-02-19 08:32:35,723 Current Learning Rate: 0.0074087684 +2025-02-19 08:32:35,723 Train Loss: 0.0004339, Val Loss: 0.0003724 +2025-02-19 08:32:35,724 Epoch 733/2000 +2025-02-19 08:33:17,865 Current Learning Rate: 0.0074772933 +2025-02-19 08:33:17,866 Train Loss: 0.0002832, Val Loss: 0.0003381 +2025-02-19 08:33:17,866 Epoch 734/2000 +2025-02-19 08:33:59,918 Current Learning Rate: 0.0075452071 +2025-02-19 08:33:59,918 Train Loss: 0.0003939, Val Loss: 0.0004025 +2025-02-19 08:33:59,919 Epoch 735/2000 +2025-02-19 08:34:42,934 Current Learning Rate: 0.0076124928 +2025-02-19 08:34:42,934 Train Loss: 0.0003956, Val Loss: 0.0004171 +2025-02-19 08:34:42,934 Epoch 736/2000 +2025-02-19 08:35:25,378 Current Learning Rate: 0.0076791340 +2025-02-19 08:35:25,378 Train Loss: 0.0003780, Val Loss: 0.0004246 +2025-02-19 08:35:25,378 Epoch 737/2000 +2025-02-19 08:36:07,007 Current Learning Rate: 0.0077451141 +2025-02-19 08:36:07,008 Train Loss: 0.0003736, Val Loss: 0.0004223 +2025-02-19 08:36:07,008 Epoch 738/2000 +2025-02-19 08:36:48,708 Current Learning Rate: 0.0078104169 +2025-02-19 08:36:48,709 Train Loss: 0.0003878, Val Loss: 0.0003883 +2025-02-19 08:36:48,709 Epoch 739/2000 +2025-02-19 08:37:30,852 Current Learning Rate: 0.0078750263 +2025-02-19 08:37:30,853 Train Loss: 0.0005845, Val Loss: 0.0007616 +2025-02-19 08:37:30,853 Epoch 740/2000 +2025-02-19 08:38:13,549 Current Learning Rate: 0.0079389263 +2025-02-19 08:38:13,550 Train Loss: 0.0005016, Val Loss: 0.0004309 +2025-02-19 08:38:13,550 Epoch 741/2000 +2025-02-19 08:38:55,509 Current Learning Rate: 0.0080021011 +2025-02-19 08:38:55,509 Train Loss: 0.0004296, Val Loss: 0.0005207 +2025-02-19 08:38:55,509 Epoch 742/2000 +2025-02-19 08:39:37,538 Current Learning Rate: 0.0080645353 +2025-02-19 08:39:37,539 Train Loss: 0.0003211, Val Loss: 0.0003339 +2025-02-19 08:39:37,539 Epoch 743/2000 +2025-02-19 08:40:20,410 Current Learning Rate: 0.0081262133 +2025-02-19 08:40:20,410 Train Loss: 0.0004299, Val Loss: 0.0006630 +2025-02-19 08:40:20,410 Epoch 744/2000 +2025-02-19 08:41:02,931 Current Learning Rate: 0.0081871199 +2025-02-19 08:41:02,931 Train Loss: 0.0003806, Val Loss: 0.0003425 +2025-02-19 08:41:02,932 Epoch 745/2000 +2025-02-19 08:41:44,485 Current Learning Rate: 0.0082472402 +2025-02-19 08:41:44,485 Train Loss: 0.0002521, Val Loss: 0.0002972 +2025-02-19 08:41:44,486 Epoch 746/2000 +2025-02-19 08:42:26,497 Current Learning Rate: 0.0083065593 +2025-02-19 08:42:26,497 Train Loss: 0.0003984, Val Loss: 0.0003115 +2025-02-19 08:42:26,498 Epoch 747/2000 +2025-02-19 08:43:08,978 Current Learning Rate: 0.0083650626 +2025-02-19 08:43:08,979 Train Loss: 0.0003764, Val Loss: 0.0003453 +2025-02-19 08:43:08,979 Epoch 748/2000 +2025-02-19 08:43:50,797 Current Learning Rate: 0.0084227355 +2025-02-19 08:43:50,798 Train Loss: 0.0003750, Val Loss: 0.0003580 +2025-02-19 08:43:50,798 Epoch 749/2000 +2025-02-19 08:44:33,301 Current Learning Rate: 0.0084795640 +2025-02-19 08:44:33,301 Train Loss: 0.0002551, Val Loss: 0.0003045 +2025-02-19 08:44:33,301 Epoch 750/2000 +2025-02-19 08:45:15,579 Current Learning Rate: 0.0085355339 +2025-02-19 08:45:15,579 Train Loss: 0.0003559, Val Loss: 0.0003591 +2025-02-19 08:45:15,579 Epoch 751/2000 +2025-02-19 08:45:57,868 Current Learning Rate: 0.0085906315 +2025-02-19 08:45:57,869 Train Loss: 0.0003607, Val Loss: 0.0003361 +2025-02-19 08:45:57,869 Epoch 752/2000 +2025-02-19 08:46:40,247 Current Learning Rate: 0.0086448431 +2025-02-19 08:46:40,248 Train Loss: 0.0003204, Val Loss: 0.0003289 +2025-02-19 08:46:40,248 Epoch 753/2000 +2025-02-19 08:47:22,618 Current Learning Rate: 0.0086981555 +2025-02-19 08:47:22,618 Train Loss: 0.0003136, Val Loss: 0.0003184 +2025-02-19 08:47:22,618 Epoch 754/2000 +2025-02-19 08:48:05,548 Current Learning Rate: 0.0087505553 +2025-02-19 08:48:05,549 Train Loss: 0.0002781, Val Loss: 0.0002845 +2025-02-19 08:48:05,549 Epoch 755/2000 +2025-02-19 08:48:47,861 Current Learning Rate: 0.0088020298 +2025-02-19 08:48:47,861 Train Loss: 0.0002399, Val Loss: 0.0003239 +2025-02-19 08:48:47,862 Epoch 756/2000 +2025-02-19 08:49:29,974 Current Learning Rate: 0.0088525662 +2025-02-19 08:49:29,974 Train Loss: 0.0002316, Val Loss: 0.0003127 +2025-02-19 08:49:29,975 Epoch 757/2000 +2025-02-19 08:50:11,614 Current Learning Rate: 0.0089021520 +2025-02-19 08:50:11,615 Train Loss: 0.0003025, Val Loss: 0.0003323 +2025-02-19 08:50:11,615 Epoch 758/2000 +2025-02-19 08:50:54,125 Current Learning Rate: 0.0089507751 +2025-02-19 08:50:54,125 Train Loss: 0.0002754, Val Loss: 0.0003341 +2025-02-19 08:50:54,126 Epoch 759/2000 +2025-02-19 08:51:35,587 Current Learning Rate: 0.0089984233 +2025-02-19 08:51:35,588 Train Loss: 0.0003189, Val Loss: 0.0003535 +2025-02-19 08:51:35,588 Epoch 760/2000 +2025-02-19 08:52:18,537 Current Learning Rate: 0.0090450850 +2025-02-19 08:52:18,538 Train Loss: 0.0003448, Val Loss: 0.0003251 +2025-02-19 08:52:18,538 Epoch 761/2000 +2025-02-19 08:53:00,991 Current Learning Rate: 0.0090907486 +2025-02-19 08:53:00,992 Train Loss: 0.0004004, Val Loss: 0.0004170 +2025-02-19 08:53:00,992 Epoch 762/2000 +2025-02-19 08:53:43,320 Current Learning Rate: 0.0091354029 +2025-02-19 08:53:43,321 Train Loss: 0.0004323, Val Loss: 0.0003578 +2025-02-19 08:53:43,321 Epoch 763/2000 +2025-02-19 08:54:25,781 Current Learning Rate: 0.0091790368 +2025-02-19 08:54:25,782 Train Loss: 0.0003679, Val Loss: 0.0003554 +2025-02-19 08:54:25,782 Epoch 764/2000 +2025-02-19 08:55:07,524 Current Learning Rate: 0.0092216396 +2025-02-19 08:55:07,525 Train Loss: 0.0002570, Val Loss: 0.0003138 +2025-02-19 08:55:07,525 Epoch 765/2000 +2025-02-19 08:55:49,541 Current Learning Rate: 0.0092632008 +2025-02-19 08:55:49,548 Train Loss: 0.0003031, Val Loss: 0.0002954 +2025-02-19 08:55:49,548 Epoch 766/2000 +2025-02-19 08:56:32,197 Current Learning Rate: 0.0093037101 +2025-02-19 08:56:32,198 Train Loss: 0.0002658, Val Loss: 0.0003406 +2025-02-19 08:56:32,198 Epoch 767/2000 +2025-02-19 08:57:14,984 Current Learning Rate: 0.0093431576 +2025-02-19 08:57:14,985 Train Loss: 0.0003225, Val Loss: 0.0003082 +2025-02-19 08:57:14,985 Epoch 768/2000 +2025-02-19 08:57:57,192 Current Learning Rate: 0.0093815334 +2025-02-19 08:57:57,192 Train Loss: 0.0003149, Val Loss: 0.0003478 +2025-02-19 08:57:57,193 Epoch 769/2000 +2025-02-19 08:58:38,882 Current Learning Rate: 0.0094188282 +2025-02-19 08:58:38,883 Train Loss: 0.0003987, Val Loss: 0.0003626 +2025-02-19 08:58:38,883 Epoch 770/2000 +2025-02-19 08:59:21,022 Current Learning Rate: 0.0094550326 +2025-02-19 08:59:21,023 Train Loss: 0.0003963, Val Loss: 0.0004802 +2025-02-19 08:59:21,023 Epoch 771/2000 +2025-02-19 09:00:03,875 Current Learning Rate: 0.0094901379 +2025-02-19 09:00:03,875 Train Loss: 0.0004145, Val Loss: 0.0003850 +2025-02-19 09:00:03,876 Epoch 772/2000 +2025-02-19 09:00:46,055 Current Learning Rate: 0.0095241353 +2025-02-19 09:00:46,056 Train Loss: 0.0003176, Val Loss: 0.0003589 +2025-02-19 09:00:46,057 Epoch 773/2000 +2025-02-19 09:01:28,107 Current Learning Rate: 0.0095570164 +2025-02-19 09:01:28,107 Train Loss: 0.0003245, Val Loss: 0.0003176 +2025-02-19 09:01:28,108 Epoch 774/2000 +2025-02-19 09:02:10,274 Current Learning Rate: 0.0095887731 +2025-02-19 09:02:10,275 Train Loss: 0.0004123, Val Loss: 0.0004311 +2025-02-19 09:02:10,275 Epoch 775/2000 +2025-02-19 09:02:52,322 Current Learning Rate: 0.0096193977 +2025-02-19 09:02:52,322 Train Loss: 0.0003604, Val Loss: 0.0003609 +2025-02-19 09:02:52,323 Epoch 776/2000 +2025-02-19 09:03:34,272 Current Learning Rate: 0.0096488824 +2025-02-19 09:03:34,272 Train Loss: 0.0003802, Val Loss: 0.0005324 +2025-02-19 09:03:34,273 Epoch 777/2000 +2025-02-19 09:04:17,147 Current Learning Rate: 0.0096772202 +2025-02-19 09:04:17,148 Train Loss: 0.0003407, Val Loss: 0.0003643 +2025-02-19 09:04:17,148 Epoch 778/2000 +2025-02-19 09:04:58,660 Current Learning Rate: 0.0097044038 +2025-02-19 09:04:58,660 Train Loss: 0.0003493, Val Loss: 0.0003573 +2025-02-19 09:04:58,660 Epoch 779/2000 +2025-02-19 09:05:41,639 Current Learning Rate: 0.0097304268 +2025-02-19 09:05:41,640 Train Loss: 0.0003664, Val Loss: 0.0003803 +2025-02-19 09:05:41,640 Epoch 780/2000 +2025-02-19 09:06:23,584 Current Learning Rate: 0.0097552826 +2025-02-19 09:06:23,585 Train Loss: 0.0003585, Val Loss: 0.0003317 +2025-02-19 09:06:23,585 Epoch 781/2000 +2025-02-19 09:07:05,865 Current Learning Rate: 0.0097789651 +2025-02-19 09:07:05,866 Train Loss: 0.0002645, Val Loss: 0.0003162 +2025-02-19 09:07:05,866 Epoch 782/2000 +2025-02-19 09:07:48,419 Current Learning Rate: 0.0098014684 +2025-02-19 09:07:48,420 Train Loss: 0.0003188, Val Loss: 0.0003147 +2025-02-19 09:07:48,420 Epoch 783/2000 +2025-02-19 09:08:29,785 Current Learning Rate: 0.0098227871 +2025-02-19 09:08:29,785 Train Loss: 0.0002702, Val Loss: 0.0003191 +2025-02-19 09:08:29,786 Epoch 784/2000 +2025-02-19 09:09:12,755 Current Learning Rate: 0.0098429158 +2025-02-19 09:09:12,755 Train Loss: 0.0002776, Val Loss: 0.0003687 +2025-02-19 09:09:12,755 Epoch 785/2000 +2025-02-19 09:09:54,539 Current Learning Rate: 0.0098618496 +2025-02-19 09:09:54,540 Train Loss: 0.0003539, Val Loss: 0.0002944 +2025-02-19 09:09:54,540 Epoch 786/2000 +2025-02-19 09:10:37,042 Current Learning Rate: 0.0098795838 +2025-02-19 09:10:37,043 Train Loss: 0.0003861, Val Loss: 0.0002801 +2025-02-19 09:10:37,043 Epoch 787/2000 +2025-02-19 09:11:19,345 Current Learning Rate: 0.0098961141 +2025-02-19 09:11:19,345 Train Loss: 0.0003542, Val Loss: 0.0003659 +2025-02-19 09:11:19,346 Epoch 788/2000 +2025-02-19 09:12:01,304 Current Learning Rate: 0.0099114363 +2025-02-19 09:12:01,304 Train Loss: 0.0003427, Val Loss: 0.0003504 +2025-02-19 09:12:01,304 Epoch 789/2000 +2025-02-19 09:12:43,325 Current Learning Rate: 0.0099255466 +2025-02-19 09:12:43,326 Train Loss: 0.0003668, Val Loss: 0.0004341 +2025-02-19 09:12:43,326 Epoch 790/2000 +2025-02-19 09:13:25,876 Current Learning Rate: 0.0099384417 +2025-02-19 09:13:25,876 Train Loss: 0.0002683, Val Loss: 0.0003132 +2025-02-19 09:13:25,876 Epoch 791/2000 +2025-02-19 09:14:08,363 Current Learning Rate: 0.0099501183 +2025-02-19 09:14:08,363 Train Loss: 0.0002492, Val Loss: 0.0002848 +2025-02-19 09:14:08,363 Epoch 792/2000 +2025-02-19 09:14:49,936 Current Learning Rate: 0.0099605735 +2025-02-19 09:14:49,937 Train Loss: 0.0003328, Val Loss: 0.0003217 +2025-02-19 09:14:49,937 Epoch 793/2000 +2025-02-19 09:15:32,933 Current Learning Rate: 0.0099698048 +2025-02-19 09:15:32,933 Train Loss: 0.0002965, Val Loss: 0.0003398 +2025-02-19 09:15:32,934 Epoch 794/2000 +2025-02-19 09:16:15,178 Current Learning Rate: 0.0099778098 +2025-02-19 09:16:15,179 Train Loss: 0.0003410, Val Loss: 0.0003797 +2025-02-19 09:16:15,179 Epoch 795/2000 +2025-02-19 09:16:57,140 Current Learning Rate: 0.0099845867 +2025-02-19 09:16:57,141 Train Loss: 0.0002314, Val Loss: 0.0002788 +2025-02-19 09:16:57,141 Epoch 796/2000 +2025-02-19 09:17:39,161 Current Learning Rate: 0.0099901336 +2025-02-19 09:17:39,161 Train Loss: 0.0002599, Val Loss: 0.0002648 +2025-02-19 09:17:39,162 Epoch 797/2000 +2025-02-19 09:18:22,107 Current Learning Rate: 0.0099944494 +2025-02-19 09:18:22,107 Train Loss: 0.0003162, Val Loss: 0.0003370 +2025-02-19 09:18:22,107 Epoch 798/2000 +2025-02-19 09:19:04,727 Current Learning Rate: 0.0099975328 +2025-02-19 09:19:04,728 Train Loss: 0.0002453, Val Loss: 0.0002912 +2025-02-19 09:19:04,728 Epoch 799/2000 +2025-02-19 09:19:47,354 Current Learning Rate: 0.0099993832 +2025-02-19 09:19:47,355 Train Loss: 0.0002422, Val Loss: 0.0002889 +2025-02-19 09:19:47,355 Epoch 800/2000 +2025-02-19 09:20:29,846 Current Learning Rate: 0.0100000000 +2025-02-19 09:20:29,846 Train Loss: 0.0002748, Val Loss: 0.0003038 +2025-02-19 09:20:29,846 Epoch 801/2000 +2025-02-19 09:21:11,641 Current Learning Rate: 0.0099993832 +2025-02-19 09:21:11,641 Train Loss: 0.0003209, Val Loss: 0.0003034 +2025-02-19 09:21:11,641 Epoch 802/2000 +2025-02-19 09:21:54,212 Current Learning Rate: 0.0099975328 +2025-02-19 09:21:54,213 Train Loss: 0.0002861, Val Loss: 0.0003200 +2025-02-19 09:21:54,213 Epoch 803/2000 +2025-02-19 09:22:36,751 Current Learning Rate: 0.0099944494 +2025-02-19 09:22:36,752 Train Loss: 0.0003006, Val Loss: 0.0003551 +2025-02-19 09:22:36,752 Epoch 804/2000 +2025-02-19 09:23:19,069 Current Learning Rate: 0.0099901336 +2025-02-19 09:23:19,070 Train Loss: 0.0002620, Val Loss: 0.0003953 +2025-02-19 09:23:19,070 Epoch 805/2000 +2025-02-19 09:24:01,539 Current Learning Rate: 0.0099845867 +2025-02-19 09:24:01,540 Train Loss: 0.0003477, Val Loss: 0.0003762 +2025-02-19 09:24:01,540 Epoch 806/2000 +2025-02-19 09:24:43,816 Current Learning Rate: 0.0099778098 +2025-02-19 09:24:43,817 Train Loss: 0.0004498, Val Loss: 0.0003950 +2025-02-19 09:24:43,820 Epoch 807/2000 +2025-02-19 09:25:26,042 Current Learning Rate: 0.0099698048 +2025-02-19 09:25:26,043 Train Loss: 0.0004265, Val Loss: 0.0004768 +2025-02-19 09:25:26,043 Epoch 808/2000 +2025-02-19 09:26:08,567 Current Learning Rate: 0.0099605735 +2025-02-19 09:26:08,568 Train Loss: 0.0003728, Val Loss: 0.0003339 +2025-02-19 09:26:08,568 Epoch 809/2000 +2025-02-19 09:26:50,994 Current Learning Rate: 0.0099501183 +2025-02-19 09:26:50,995 Train Loss: 0.0002524, Val Loss: 0.0002946 +2025-02-19 09:26:50,995 Epoch 810/2000 +2025-02-19 09:27:33,327 Current Learning Rate: 0.0099384417 +2025-02-19 09:27:33,327 Train Loss: 0.0003094, Val Loss: 0.0003109 +2025-02-19 09:27:33,327 Epoch 811/2000 +2025-02-19 09:28:15,142 Current Learning Rate: 0.0099255466 +2025-02-19 09:28:15,143 Train Loss: 0.0002937, Val Loss: 0.0003070 +2025-02-19 09:28:15,144 Epoch 812/2000 +2025-02-19 09:28:57,192 Current Learning Rate: 0.0099114363 +2025-02-19 09:28:57,193 Train Loss: 0.0003389, Val Loss: 0.0003496 +2025-02-19 09:28:57,193 Epoch 813/2000 +2025-02-19 09:29:40,155 Current Learning Rate: 0.0098961141 +2025-02-19 09:29:40,156 Train Loss: 0.0003869, Val Loss: 0.0004789 +2025-02-19 09:29:40,156 Epoch 814/2000 +2025-02-19 09:30:22,187 Current Learning Rate: 0.0098795838 +2025-02-19 09:30:22,188 Train Loss: 0.0004648, Val Loss: 0.0004923 +2025-02-19 09:30:22,189 Epoch 815/2000 +2025-02-19 09:31:04,714 Current Learning Rate: 0.0098618496 +2025-02-19 09:31:04,715 Train Loss: 0.0004396, Val Loss: 0.0004313 +2025-02-19 09:31:04,715 Epoch 816/2000 +2025-02-19 09:31:46,978 Current Learning Rate: 0.0098429158 +2025-02-19 09:31:46,978 Train Loss: 0.0003012, Val Loss: 0.0003065 +2025-02-19 09:31:46,979 Epoch 817/2000 +2025-02-19 09:32:28,642 Current Learning Rate: 0.0098227871 +2025-02-19 09:32:28,642 Train Loss: 0.0005200, Val Loss: 0.0002970 +2025-02-19 09:32:28,642 Epoch 818/2000 +2025-02-19 09:33:10,795 Current Learning Rate: 0.0098014684 +2025-02-19 09:33:10,795 Train Loss: 0.0003138, Val Loss: 0.0002644 +2025-02-19 09:33:10,795 Epoch 819/2000 +2025-02-19 09:33:53,104 Current Learning Rate: 0.0097789651 +2025-02-19 09:33:53,105 Train Loss: 0.0002579, Val Loss: 0.0002757 +2025-02-19 09:33:53,105 Epoch 820/2000 +2025-02-19 09:34:35,950 Current Learning Rate: 0.0097552826 +2025-02-19 09:34:35,951 Train Loss: 0.0002076, Val Loss: 0.0002595 +2025-02-19 09:34:35,951 Epoch 821/2000 +2025-02-19 09:35:18,141 Current Learning Rate: 0.0097304268 +2025-02-19 09:35:18,142 Train Loss: 0.0003254, Val Loss: 0.0003377 +2025-02-19 09:35:18,142 Epoch 822/2000 +2025-02-19 09:36:00,495 Current Learning Rate: 0.0097044038 +2025-02-19 09:36:00,496 Train Loss: 0.0002522, Val Loss: 0.0002549 +2025-02-19 09:36:00,496 Epoch 823/2000 +2025-02-19 09:36:41,881 Current Learning Rate: 0.0096772202 +2025-02-19 09:36:41,881 Train Loss: 0.0002909, Val Loss: 0.0002781 +2025-02-19 09:36:41,881 Epoch 824/2000 +2025-02-19 09:37:24,139 Current Learning Rate: 0.0096488824 +2025-02-19 09:37:24,140 Train Loss: 0.0001926, Val Loss: 0.0002436 +2025-02-19 09:37:24,140 Epoch 825/2000 +2025-02-19 09:38:06,754 Current Learning Rate: 0.0096193977 +2025-02-19 09:38:06,755 Train Loss: 0.0002949, Val Loss: 0.0002470 +2025-02-19 09:38:06,755 Epoch 826/2000 +2025-02-19 09:38:48,443 Current Learning Rate: 0.0095887731 +2025-02-19 09:38:48,444 Train Loss: 0.0001835, Val Loss: 0.0002463 +2025-02-19 09:38:48,444 Epoch 827/2000 +2025-02-19 09:39:30,974 Current Learning Rate: 0.0095570164 +2025-02-19 09:39:30,974 Train Loss: 0.0002468, Val Loss: 0.0002695 +2025-02-19 09:39:30,975 Epoch 828/2000 +2025-02-19 09:40:12,496 Current Learning Rate: 0.0095241353 +2025-02-19 09:40:12,497 Train Loss: 0.0003328, Val Loss: 0.0003059 +2025-02-19 09:40:12,497 Epoch 829/2000 +2025-02-19 09:40:55,226 Current Learning Rate: 0.0094901379 +2025-02-19 09:40:55,226 Train Loss: 0.0003580, Val Loss: 0.0003440 +2025-02-19 09:40:55,226 Epoch 830/2000 +2025-02-19 09:41:37,389 Current Learning Rate: 0.0094550326 +2025-02-19 09:41:37,389 Train Loss: 0.0002378, Val Loss: 0.0002563 +2025-02-19 09:41:37,390 Epoch 831/2000 +2025-02-19 09:42:19,333 Current Learning Rate: 0.0094188282 +2025-02-19 09:42:19,334 Train Loss: 0.0004114, Val Loss: 0.0003001 +2025-02-19 09:42:19,334 Epoch 832/2000 +2025-02-19 09:43:01,627 Current Learning Rate: 0.0093815334 +2025-02-19 09:43:01,628 Train Loss: 0.0003030, Val Loss: 0.0002988 +2025-02-19 09:43:01,628 Epoch 833/2000 +2025-02-19 09:43:43,218 Current Learning Rate: 0.0093431576 +2025-02-19 09:43:43,219 Train Loss: 0.0003900, Val Loss: 0.0003722 +2025-02-19 09:43:43,219 Epoch 834/2000 +2025-02-19 09:44:25,506 Current Learning Rate: 0.0093037101 +2025-02-19 09:44:25,506 Train Loss: 0.0003291, Val Loss: 0.0002643 +2025-02-19 09:44:25,506 Epoch 835/2000 +2025-02-19 09:45:08,202 Current Learning Rate: 0.0092632008 +2025-02-19 09:45:08,203 Train Loss: 0.0002980, Val Loss: 0.0002692 +2025-02-19 09:45:08,203 Epoch 836/2000 +2025-02-19 09:45:50,691 Current Learning Rate: 0.0092216396 +2025-02-19 09:45:50,692 Train Loss: 0.0003082, Val Loss: 0.0003760 +2025-02-19 09:45:50,692 Epoch 837/2000 +2025-02-19 09:46:32,492 Current Learning Rate: 0.0091790368 +2025-02-19 09:46:32,493 Train Loss: 0.0003202, Val Loss: 0.0004242 +2025-02-19 09:46:32,493 Epoch 838/2000 +2025-02-19 09:47:15,411 Current Learning Rate: 0.0091354029 +2025-02-19 09:47:15,411 Train Loss: 0.0003163, Val Loss: 0.0003254 +2025-02-19 09:47:15,411 Epoch 839/2000 +2025-02-19 09:47:57,364 Current Learning Rate: 0.0090907486 +2025-02-19 09:47:57,365 Train Loss: 0.0002640, Val Loss: 0.0002732 +2025-02-19 09:47:57,365 Epoch 840/2000 +2025-02-19 09:48:39,952 Current Learning Rate: 0.0090450850 +2025-02-19 09:48:39,952 Train Loss: 0.0002808, Val Loss: 0.0002567 +2025-02-19 09:48:39,952 Epoch 841/2000 +2025-02-19 09:49:22,523 Current Learning Rate: 0.0089984233 +2025-02-19 09:49:22,524 Train Loss: 0.0002309, Val Loss: 0.0002523 +2025-02-19 09:49:22,524 Epoch 842/2000 +2025-02-19 09:50:05,060 Current Learning Rate: 0.0089507751 +2025-02-19 09:50:05,060 Train Loss: 0.0002402, Val Loss: 0.0002515 +2025-02-19 09:50:05,061 Epoch 843/2000 +2025-02-19 09:50:47,560 Current Learning Rate: 0.0089021520 +2025-02-19 09:50:49,438 Train Loss: 0.0002266, Val Loss: 0.0002325 +2025-02-19 09:50:49,440 Epoch 844/2000 +2025-02-19 09:51:31,615 Current Learning Rate: 0.0088525662 +2025-02-19 09:51:31,616 Train Loss: 0.0002412, Val Loss: 0.0002594 +2025-02-19 09:51:31,616 Epoch 845/2000 +2025-02-19 09:52:13,923 Current Learning Rate: 0.0088020298 +2025-02-19 09:52:13,923 Train Loss: 0.0002088, Val Loss: 0.0002422 +2025-02-19 09:52:13,923 Epoch 846/2000 +2025-02-19 09:52:55,323 Current Learning Rate: 0.0087505553 +2025-02-19 09:52:56,642 Train Loss: 0.0001662, Val Loss: 0.0002301 +2025-02-19 09:52:56,654 Epoch 847/2000 +2025-02-19 09:53:38,068 Current Learning Rate: 0.0086981555 +2025-02-19 09:53:38,069 Train Loss: 0.0002620, Val Loss: 0.0002637 +2025-02-19 09:53:38,069 Epoch 848/2000 +2025-02-19 09:54:20,828 Current Learning Rate: 0.0086448431 +2025-02-19 09:54:20,829 Train Loss: 0.0001964, Val Loss: 0.0002600 +2025-02-19 09:54:20,829 Epoch 849/2000 +2025-02-19 09:55:02,378 Current Learning Rate: 0.0085906315 +2025-02-19 09:55:02,379 Train Loss: 0.0002463, Val Loss: 0.0002544 +2025-02-19 09:55:02,379 Epoch 850/2000 +2025-02-19 09:55:44,909 Current Learning Rate: 0.0085355339 +2025-02-19 09:55:44,910 Train Loss: 0.0003749, Val Loss: 0.0003094 +2025-02-19 09:55:44,910 Epoch 851/2000 +2025-02-19 09:56:27,367 Current Learning Rate: 0.0084795640 +2025-02-19 09:56:27,367 Train Loss: 0.0003627, Val Loss: 0.0003507 +2025-02-19 09:56:27,368 Epoch 852/2000 +2025-02-19 09:57:09,076 Current Learning Rate: 0.0084227355 +2025-02-19 09:57:09,077 Train Loss: 0.0003637, Val Loss: 0.0004315 +2025-02-19 09:57:09,077 Epoch 853/2000 +2025-02-19 09:57:51,504 Current Learning Rate: 0.0083650626 +2025-02-19 09:57:51,507 Train Loss: 0.0003073, Val Loss: 0.0002462 +2025-02-19 09:57:51,507 Epoch 854/2000 +2025-02-19 09:58:33,816 Current Learning Rate: 0.0083065593 +2025-02-19 09:58:33,816 Train Loss: 0.0002204, Val Loss: 0.0002331 +2025-02-19 09:58:33,816 Epoch 855/2000 +2025-02-19 09:59:15,803 Current Learning Rate: 0.0082472402 +2025-02-19 09:59:15,803 Train Loss: 0.0002401, Val Loss: 0.0002473 +2025-02-19 09:59:15,804 Epoch 856/2000 +2025-02-19 09:59:57,310 Current Learning Rate: 0.0081871199 +2025-02-19 09:59:57,311 Train Loss: 0.0003096, Val Loss: 0.0002357 +2025-02-19 09:59:57,311 Epoch 857/2000 +2025-02-19 10:00:39,608 Current Learning Rate: 0.0081262133 +2025-02-19 10:00:39,608 Train Loss: 0.0002453, Val Loss: 0.0002303 +2025-02-19 10:00:39,609 Epoch 858/2000 +2025-02-19 10:01:22,257 Current Learning Rate: 0.0080645353 +2025-02-19 10:01:22,258 Train Loss: 0.0002163, Val Loss: 0.0002506 +2025-02-19 10:01:22,258 Epoch 859/2000 +2025-02-19 10:02:04,062 Current Learning Rate: 0.0080021011 +2025-02-19 10:02:04,062 Train Loss: 0.0002892, Val Loss: 0.0002468 +2025-02-19 10:02:04,063 Epoch 860/2000 +2025-02-19 10:02:46,129 Current Learning Rate: 0.0079389263 +2025-02-19 10:02:46,129 Train Loss: 0.0002395, Val Loss: 0.0002336 +2025-02-19 10:02:46,129 Epoch 861/2000 +2025-02-19 10:03:28,651 Current Learning Rate: 0.0078750263 +2025-02-19 10:03:30,500 Train Loss: 0.0001670, Val Loss: 0.0002071 +2025-02-19 10:03:30,501 Epoch 862/2000 +2025-02-19 10:04:12,812 Current Learning Rate: 0.0078104169 +2025-02-19 10:04:12,813 Train Loss: 0.0001887, Val Loss: 0.0002428 +2025-02-19 10:04:12,813 Epoch 863/2000 +2025-02-19 10:04:55,328 Current Learning Rate: 0.0077451141 +2025-02-19 10:04:55,329 Train Loss: 0.0002108, Val Loss: 0.0002136 +2025-02-19 10:04:55,329 Epoch 864/2000 +2025-02-19 10:05:37,220 Current Learning Rate: 0.0076791340 +2025-02-19 10:05:37,221 Train Loss: 0.0002516, Val Loss: 0.0002374 +2025-02-19 10:05:37,221 Epoch 865/2000 +2025-02-19 10:06:20,035 Current Learning Rate: 0.0076124928 +2025-02-19 10:06:20,035 Train Loss: 0.0002630, Val Loss: 0.0002278 +2025-02-19 10:06:20,036 Epoch 866/2000 +2025-02-19 10:07:02,529 Current Learning Rate: 0.0075452071 +2025-02-19 10:07:04,833 Train Loss: 0.0002303, Val Loss: 0.0002053 +2025-02-19 10:07:04,833 Epoch 867/2000 +2025-02-19 10:07:46,990 Current Learning Rate: 0.0074772933 +2025-02-19 10:07:48,754 Train Loss: 0.0001676, Val Loss: 0.0001925 +2025-02-19 10:07:48,754 Epoch 868/2000 +2025-02-19 10:08:29,682 Current Learning Rate: 0.0074087684 +2025-02-19 10:08:29,683 Train Loss: 0.0002492, Val Loss: 0.0002075 +2025-02-19 10:08:29,684 Epoch 869/2000 +2025-02-19 10:09:12,243 Current Learning Rate: 0.0073396491 +2025-02-19 10:09:13,705 Train Loss: 0.0001736, Val Loss: 0.0001923 +2025-02-19 10:09:13,705 Epoch 870/2000 +2025-02-19 10:09:55,612 Current Learning Rate: 0.0072699525 +2025-02-19 10:09:55,612 Train Loss: 0.0001814, Val Loss: 0.0001936 +2025-02-19 10:09:55,612 Epoch 871/2000 +2025-02-19 10:10:37,142 Current Learning Rate: 0.0071996958 +2025-02-19 10:10:38,431 Train Loss: 0.0001449, Val Loss: 0.0001923 +2025-02-19 10:10:38,431 Epoch 872/2000 +2025-02-19 10:11:20,302 Current Learning Rate: 0.0071288965 +2025-02-19 10:11:20,303 Train Loss: 0.0001718, Val Loss: 0.0001945 +2025-02-19 10:11:20,304 Epoch 873/2000 +2025-02-19 10:12:02,701 Current Learning Rate: 0.0070575718 +2025-02-19 10:12:02,701 Train Loss: 0.0001875, Val Loss: 0.0001977 +2025-02-19 10:12:02,702 Epoch 874/2000 +2025-02-19 10:12:44,425 Current Learning Rate: 0.0069857395 +2025-02-19 10:12:44,426 Train Loss: 0.0001919, Val Loss: 0.0001975 +2025-02-19 10:12:44,426 Epoch 875/2000 +2025-02-19 10:13:27,345 Current Learning Rate: 0.0069134172 +2025-02-19 10:13:27,345 Train Loss: 0.0003245, Val Loss: 0.0002414 +2025-02-19 10:13:27,346 Epoch 876/2000 +2025-02-19 10:14:09,527 Current Learning Rate: 0.0068406228 +2025-02-19 10:14:09,528 Train Loss: 0.0002015, Val Loss: 0.0002279 +2025-02-19 10:14:09,528 Epoch 877/2000 +2025-02-19 10:14:51,124 Current Learning Rate: 0.0067673742 +2025-02-19 10:14:51,124 Train Loss: 0.0001882, Val Loss: 0.0002195 +2025-02-19 10:14:51,125 Epoch 878/2000 +2025-02-19 10:15:33,667 Current Learning Rate: 0.0066936896 +2025-02-19 10:15:33,668 Train Loss: 0.0001969, Val Loss: 0.0002131 +2025-02-19 10:15:33,668 Epoch 879/2000 +2025-02-19 10:16:15,115 Current Learning Rate: 0.0066195871 +2025-02-19 10:16:15,116 Train Loss: 0.0002239, Val Loss: 0.0002057 +2025-02-19 10:16:15,116 Epoch 880/2000 +2025-02-19 10:16:57,870 Current Learning Rate: 0.0065450850 +2025-02-19 10:16:57,870 Train Loss: 0.0002296, Val Loss: 0.0002071 +2025-02-19 10:16:57,871 Epoch 881/2000 +2025-02-19 10:17:39,791 Current Learning Rate: 0.0064702016 +2025-02-19 10:17:39,793 Train Loss: 0.0001626, Val Loss: 0.0002514 +2025-02-19 10:17:39,793 Epoch 882/2000 +2025-02-19 10:18:21,736 Current Learning Rate: 0.0063949555 +2025-02-19 10:18:21,736 Train Loss: 0.0001940, Val Loss: 0.0002096 +2025-02-19 10:18:21,736 Epoch 883/2000 +2025-02-19 10:19:03,947 Current Learning Rate: 0.0063193652 +2025-02-19 10:19:03,948 Train Loss: 0.0001914, Val Loss: 0.0001958 +2025-02-19 10:19:03,948 Epoch 884/2000 +2025-02-19 10:19:46,326 Current Learning Rate: 0.0062434494 +2025-02-19 10:19:48,328 Train Loss: 0.0001925, Val Loss: 0.0001901 +2025-02-19 10:19:48,328 Epoch 885/2000 +2025-02-19 10:20:30,041 Current Learning Rate: 0.0061672268 +2025-02-19 10:20:30,041 Train Loss: 0.0002090, Val Loss: 0.0001961 +2025-02-19 10:20:30,041 Epoch 886/2000 +2025-02-19 10:21:11,537 Current Learning Rate: 0.0060907162 +2025-02-19 10:21:12,745 Train Loss: 0.0001945, Val Loss: 0.0001901 +2025-02-19 10:21:12,746 Epoch 887/2000 +2025-02-19 10:21:54,702 Current Learning Rate: 0.0060139365 +2025-02-19 10:21:54,703 Train Loss: 0.0001770, Val Loss: 0.0001928 +2025-02-19 10:21:54,703 Epoch 888/2000 +2025-02-19 10:22:36,731 Current Learning Rate: 0.0059369066 +2025-02-19 10:22:36,731 Train Loss: 0.0001938, Val Loss: 0.0001952 +2025-02-19 10:22:36,732 Epoch 889/2000 +2025-02-19 10:23:18,410 Current Learning Rate: 0.0058596455 +2025-02-19 10:23:18,411 Train Loss: 0.0002157, Val Loss: 0.0001977 +2025-02-19 10:23:18,411 Epoch 890/2000 +2025-02-19 10:24:01,132 Current Learning Rate: 0.0057821723 +2025-02-19 10:24:01,132 Train Loss: 0.0001466, Val Loss: 0.0001938 +2025-02-19 10:24:01,132 Epoch 891/2000 +2025-02-19 10:24:42,865 Current Learning Rate: 0.0057045062 +2025-02-19 10:24:42,866 Train Loss: 0.0001800, Val Loss: 0.0001945 +2025-02-19 10:24:42,867 Epoch 892/2000 +2025-02-19 10:25:24,808 Current Learning Rate: 0.0056266662 +2025-02-19 10:25:24,808 Train Loss: 0.0001889, Val Loss: 0.0002045 +2025-02-19 10:25:24,808 Epoch 893/2000 +2025-02-19 10:26:07,760 Current Learning Rate: 0.0055486716 +2025-02-19 10:26:07,760 Train Loss: 0.0001317, Val Loss: 0.0001963 +2025-02-19 10:26:07,761 Epoch 894/2000 +2025-02-19 10:26:50,105 Current Learning Rate: 0.0054705416 +2025-02-19 10:26:50,106 Train Loss: 0.0001998, Val Loss: 0.0002427 +2025-02-19 10:26:50,107 Epoch 895/2000 +2025-02-19 10:27:31,675 Current Learning Rate: 0.0053922955 +2025-02-19 10:27:31,675 Train Loss: 0.0002311, Val Loss: 0.0002314 +2025-02-19 10:27:31,676 Epoch 896/2000 +2025-02-19 10:28:13,882 Current Learning Rate: 0.0053139526 +2025-02-19 10:28:13,883 Train Loss: 0.0001878, Val Loss: 0.0002171 +2025-02-19 10:28:13,883 Epoch 897/2000 +2025-02-19 10:28:56,060 Current Learning Rate: 0.0052355323 +2025-02-19 10:28:56,060 Train Loss: 0.0002466, Val Loss: 0.0002435 +2025-02-19 10:28:56,060 Epoch 898/2000 +2025-02-19 10:29:37,933 Current Learning Rate: 0.0051570538 +2025-02-19 10:29:37,934 Train Loss: 0.0002102, Val Loss: 0.0002427 +2025-02-19 10:29:37,934 Epoch 899/2000 +2025-02-19 10:30:19,957 Current Learning Rate: 0.0050785366 +2025-02-19 10:30:19,958 Train Loss: 0.0002418, Val Loss: 0.0002012 +2025-02-19 10:30:19,959 Epoch 900/2000 +2025-02-19 10:31:01,798 Current Learning Rate: 0.0050000000 +2025-02-19 10:31:03,812 Train Loss: 0.0002469, Val Loss: 0.0001896 +2025-02-19 10:31:03,813 Epoch 901/2000 +2025-02-19 10:31:46,229 Current Learning Rate: 0.0049214634 +2025-02-19 10:31:47,921 Train Loss: 0.0002076, Val Loss: 0.0001786 +2025-02-19 10:31:47,921 Epoch 902/2000 +2025-02-19 10:32:29,851 Current Learning Rate: 0.0048429462 +2025-02-19 10:32:31,698 Train Loss: 0.0001614, Val Loss: 0.0001745 +2025-02-19 10:32:31,711 Epoch 903/2000 +2025-02-19 10:33:14,134 Current Learning Rate: 0.0047644677 +2025-02-19 10:33:16,071 Train Loss: 0.0001205, Val Loss: 0.0001742 +2025-02-19 10:33:16,071 Epoch 904/2000 +2025-02-19 10:33:57,075 Current Learning Rate: 0.0046860474 +2025-02-19 10:33:57,076 Train Loss: 0.0001737, Val Loss: 0.0001772 +2025-02-19 10:33:57,076 Epoch 905/2000 +2025-02-19 10:34:39,944 Current Learning Rate: 0.0046077045 +2025-02-19 10:34:41,502 Train Loss: 0.0001597, Val Loss: 0.0001712 +2025-02-19 10:34:41,502 Epoch 906/2000 +2025-02-19 10:35:23,757 Current Learning Rate: 0.0045294584 +2025-02-19 10:35:25,543 Train Loss: 0.0001439, Val Loss: 0.0001686 +2025-02-19 10:35:25,544 Epoch 907/2000 +2025-02-19 10:36:06,192 Current Learning Rate: 0.0044513284 +2025-02-19 10:36:06,193 Train Loss: 0.0001640, Val Loss: 0.0001783 +2025-02-19 10:36:06,193 Epoch 908/2000 +2025-02-19 10:36:49,005 Current Learning Rate: 0.0043733338 +2025-02-19 10:36:49,005 Train Loss: 0.0002195, Val Loss: 0.0002013 +2025-02-19 10:36:49,006 Epoch 909/2000 +2025-02-19 10:37:30,581 Current Learning Rate: 0.0042954938 +2025-02-19 10:37:30,582 Train Loss: 0.0002057, Val Loss: 0.0001801 +2025-02-19 10:37:30,582 Epoch 910/2000 +2025-02-19 10:38:12,552 Current Learning Rate: 0.0042178277 +2025-02-19 10:38:13,465 Train Loss: 0.0001567, Val Loss: 0.0001673 +2025-02-19 10:38:13,465 Epoch 911/2000 +2025-02-19 10:38:55,640 Current Learning Rate: 0.0041403545 +2025-02-19 10:38:55,641 Train Loss: 0.0002002, Val Loss: 0.0001698 +2025-02-19 10:38:55,641 Epoch 912/2000 +2025-02-19 10:39:37,818 Current Learning Rate: 0.0040630934 +2025-02-19 10:39:39,700 Train Loss: 0.0001609, Val Loss: 0.0001633 +2025-02-19 10:39:39,706 Epoch 913/2000 +2025-02-19 10:40:20,730 Current Learning Rate: 0.0039860635 +2025-02-19 10:40:20,731 Train Loss: 0.0001846, Val Loss: 0.0001646 +2025-02-19 10:40:20,731 Epoch 914/2000 +2025-02-19 10:41:02,911 Current Learning Rate: 0.0039092838 +2025-02-19 10:41:02,911 Train Loss: 0.0001454, Val Loss: 0.0001658 +2025-02-19 10:41:02,911 Epoch 915/2000 +2025-02-19 10:41:45,889 Current Learning Rate: 0.0038327732 +2025-02-19 10:41:45,890 Train Loss: 0.0001666, Val Loss: 0.0001676 +2025-02-19 10:41:45,890 Epoch 916/2000 +2025-02-19 10:42:28,291 Current Learning Rate: 0.0037565506 +2025-02-19 10:42:28,291 Train Loss: 0.0001815, Val Loss: 0.0001671 +2025-02-19 10:42:28,292 Epoch 917/2000 +2025-02-19 10:43:10,115 Current Learning Rate: 0.0036806348 +2025-02-19 10:43:10,115 Train Loss: 0.0001311, Val Loss: 0.0001668 +2025-02-19 10:43:10,115 Epoch 918/2000 +2025-02-19 10:43:53,147 Current Learning Rate: 0.0036050445 +2025-02-19 10:43:53,147 Train Loss: 0.0001446, Val Loss: 0.0001700 +2025-02-19 10:43:53,148 Epoch 919/2000 +2025-02-19 10:44:35,272 Current Learning Rate: 0.0035297984 +2025-02-19 10:44:35,273 Train Loss: 0.0002006, Val Loss: 0.0001720 +2025-02-19 10:44:35,273 Epoch 920/2000 +2025-02-19 10:45:16,920 Current Learning Rate: 0.0034549150 +2025-02-19 10:45:16,921 Train Loss: 0.0001946, Val Loss: 0.0001785 +2025-02-19 10:45:16,921 Epoch 921/2000 +2025-02-19 10:45:59,634 Current Learning Rate: 0.0033804129 +2025-02-19 10:45:59,635 Train Loss: 0.0002055, Val Loss: 0.0001786 +2025-02-19 10:45:59,635 Epoch 922/2000 +2025-02-19 10:46:42,110 Current Learning Rate: 0.0033063104 +2025-02-19 10:46:42,110 Train Loss: 0.0001138, Val Loss: 0.0001659 +2025-02-19 10:46:42,110 Epoch 923/2000 +2025-02-19 10:47:24,387 Current Learning Rate: 0.0032326258 +2025-02-19 10:47:24,387 Train Loss: 0.0001566, Val Loss: 0.0001700 +2025-02-19 10:47:24,387 Epoch 924/2000 +2025-02-19 10:48:06,102 Current Learning Rate: 0.0031593772 +2025-02-19 10:48:06,103 Train Loss: 0.0001715, Val Loss: 0.0001661 +2025-02-19 10:48:06,103 Epoch 925/2000 +2025-02-19 10:48:48,670 Current Learning Rate: 0.0030865828 +2025-02-19 10:48:50,570 Train Loss: 0.0001347, Val Loss: 0.0001628 +2025-02-19 10:48:50,570 Epoch 926/2000 +2025-02-19 10:49:32,723 Current Learning Rate: 0.0030142605 +2025-02-19 10:49:34,328 Train Loss: 0.0001383, Val Loss: 0.0001609 +2025-02-19 10:49:34,328 Epoch 927/2000 +2025-02-19 10:50:16,344 Current Learning Rate: 0.0029424282 +2025-02-19 10:50:16,345 Train Loss: 0.0001582, Val Loss: 0.0001619 +2025-02-19 10:50:16,345 Epoch 928/2000 +2025-02-19 10:50:58,698 Current Learning Rate: 0.0028711035 +2025-02-19 10:51:00,211 Train Loss: 0.0001311, Val Loss: 0.0001590 +2025-02-19 10:51:00,212 Epoch 929/2000 +2025-02-19 10:51:41,892 Current Learning Rate: 0.0028003042 +2025-02-19 10:51:42,968 Train Loss: 0.0001313, Val Loss: 0.0001564 +2025-02-19 10:51:42,968 Epoch 930/2000 +2025-02-19 10:52:23,735 Current Learning Rate: 0.0027300475 +2025-02-19 10:52:24,980 Train Loss: 0.0001372, Val Loss: 0.0001548 +2025-02-19 10:52:24,980 Epoch 931/2000 +2025-02-19 10:53:06,229 Current Learning Rate: 0.0026603509 +2025-02-19 10:53:06,231 Train Loss: 0.0001482, Val Loss: 0.0001551 +2025-02-19 10:53:06,232 Epoch 932/2000 +2025-02-19 10:53:48,896 Current Learning Rate: 0.0025912316 +2025-02-19 10:53:50,029 Train Loss: 0.0001485, Val Loss: 0.0001542 +2025-02-19 10:53:50,029 Epoch 933/2000 +2025-02-19 10:54:32,448 Current Learning Rate: 0.0025227067 +2025-02-19 10:54:32,449 Train Loss: 0.0001400, Val Loss: 0.0001543 +2025-02-19 10:54:32,449 Epoch 934/2000 +2025-02-19 10:55:14,838 Current Learning Rate: 0.0024547929 +2025-02-19 10:55:16,351 Train Loss: 0.0001432, Val Loss: 0.0001541 +2025-02-19 10:55:16,352 Epoch 935/2000 +2025-02-19 10:55:58,191 Current Learning Rate: 0.0023875072 +2025-02-19 10:55:59,952 Train Loss: 0.0001080, Val Loss: 0.0001535 +2025-02-19 10:55:59,953 Epoch 936/2000 +2025-02-19 10:56:42,101 Current Learning Rate: 0.0023208660 +2025-02-19 10:56:42,102 Train Loss: 0.0001234, Val Loss: 0.0001539 +2025-02-19 10:56:42,102 Epoch 937/2000 +2025-02-19 10:57:23,864 Current Learning Rate: 0.0022548859 +2025-02-19 10:57:23,865 Train Loss: 0.0001602, Val Loss: 0.0001535 +2025-02-19 10:57:23,865 Epoch 938/2000 +2025-02-19 10:58:06,434 Current Learning Rate: 0.0021895831 +2025-02-19 10:58:08,316 Train Loss: 0.0001006, Val Loss: 0.0001516 +2025-02-19 10:58:08,316 Epoch 939/2000 +2025-02-19 10:58:50,627 Current Learning Rate: 0.0021249737 +2025-02-19 10:58:50,628 Train Loss: 0.0001475, Val Loss: 0.0001520 +2025-02-19 10:58:50,628 Epoch 940/2000 +2025-02-19 10:59:32,528 Current Learning Rate: 0.0020610737 +2025-02-19 10:59:33,597 Train Loss: 0.0001363, Val Loss: 0.0001508 +2025-02-19 10:59:33,597 Epoch 941/2000 +2025-02-19 11:00:14,596 Current Learning Rate: 0.0019978989 +2025-02-19 11:00:14,597 Train Loss: 0.0001734, Val Loss: 0.0001520 +2025-02-19 11:00:14,597 Epoch 942/2000 +2025-02-19 11:00:56,911 Current Learning Rate: 0.0019354647 +2025-02-19 11:00:56,911 Train Loss: 0.0001725, Val Loss: 0.0001519 +2025-02-19 11:00:56,912 Epoch 943/2000 +2025-02-19 11:01:39,663 Current Learning Rate: 0.0018737867 +2025-02-19 11:01:39,664 Train Loss: 0.0001246, Val Loss: 0.0001520 +2025-02-19 11:01:39,664 Epoch 944/2000 +2025-02-19 11:02:21,653 Current Learning Rate: 0.0018128801 +2025-02-19 11:02:21,654 Train Loss: 0.0001091, Val Loss: 0.0001528 +2025-02-19 11:02:21,654 Epoch 945/2000 +2025-02-19 11:03:04,661 Current Learning Rate: 0.0017527598 +2025-02-19 11:03:04,661 Train Loss: 0.0001803, Val Loss: 0.0001568 +2025-02-19 11:03:04,662 Epoch 946/2000 +2025-02-19 11:03:47,151 Current Learning Rate: 0.0016934407 +2025-02-19 11:03:47,153 Train Loss: 0.0001456, Val Loss: 0.0001559 +2025-02-19 11:03:47,153 Epoch 947/2000 +2025-02-19 11:04:29,595 Current Learning Rate: 0.0016349374 +2025-02-19 11:04:29,595 Train Loss: 0.0001177, Val Loss: 0.0001549 +2025-02-19 11:04:29,595 Epoch 948/2000 +2025-02-19 11:05:12,056 Current Learning Rate: 0.0015772645 +2025-02-19 11:05:12,057 Train Loss: 0.0001190, Val Loss: 0.0001510 +2025-02-19 11:05:12,057 Epoch 949/2000 +2025-02-19 11:05:53,581 Current Learning Rate: 0.0015204360 +2025-02-19 11:05:54,461 Train Loss: 0.0001215, Val Loss: 0.0001483 +2025-02-19 11:05:54,461 Epoch 950/2000 +2025-02-19 11:06:36,942 Current Learning Rate: 0.0014644661 +2025-02-19 11:06:38,754 Train Loss: 0.0001223, Val Loss: 0.0001482 +2025-02-19 11:06:38,755 Epoch 951/2000 +2025-02-19 11:07:21,202 Current Learning Rate: 0.0014093685 +2025-02-19 11:07:21,203 Train Loss: 0.0001586, Val Loss: 0.0001495 +2025-02-19 11:07:21,203 Epoch 952/2000 +2025-02-19 11:08:02,514 Current Learning Rate: 0.0013551569 +2025-02-19 11:08:03,650 Train Loss: 0.0001185, Val Loss: 0.0001481 +2025-02-19 11:08:03,650 Epoch 953/2000 +2025-02-19 11:08:45,200 Current Learning Rate: 0.0013018445 +2025-02-19 11:08:46,799 Train Loss: 0.0001285, Val Loss: 0.0001480 +2025-02-19 11:08:46,799 Epoch 954/2000 +2025-02-19 11:09:29,137 Current Learning Rate: 0.0012494447 +2025-02-19 11:09:30,485 Train Loss: 0.0000960, Val Loss: 0.0001475 +2025-02-19 11:09:30,486 Epoch 955/2000 +2025-02-19 11:10:12,678 Current Learning Rate: 0.0011979702 +2025-02-19 11:10:12,679 Train Loss: 0.0001219, Val Loss: 0.0001478 +2025-02-19 11:10:12,679 Epoch 956/2000 +2025-02-19 11:10:54,647 Current Learning Rate: 0.0011474338 +2025-02-19 11:10:54,647 Train Loss: 0.0001039, Val Loss: 0.0001476 +2025-02-19 11:10:54,647 Epoch 957/2000 +2025-02-19 11:11:37,219 Current Learning Rate: 0.0010978480 +2025-02-19 11:11:39,116 Train Loss: 0.0001547, Val Loss: 0.0001472 +2025-02-19 11:11:39,117 Epoch 958/2000 +2025-02-19 11:12:19,929 Current Learning Rate: 0.0010492249 +2025-02-19 11:12:19,930 Train Loss: 0.0001661, Val Loss: 0.0001488 +2025-02-19 11:12:19,930 Epoch 959/2000 +2025-02-19 11:13:02,148 Current Learning Rate: 0.0010015767 +2025-02-19 11:13:02,148 Train Loss: 0.0001300, Val Loss: 0.0001478 +2025-02-19 11:13:02,149 Epoch 960/2000 +2025-02-19 11:13:44,340 Current Learning Rate: 0.0009549150 +2025-02-19 11:13:44,341 Train Loss: 0.0001510, Val Loss: 0.0001482 +2025-02-19 11:13:44,341 Epoch 961/2000 +2025-02-19 11:14:26,725 Current Learning Rate: 0.0009092514 +2025-02-19 11:14:26,725 Train Loss: 0.0002214, Val Loss: 0.0001485 +2025-02-19 11:14:26,725 Epoch 962/2000 +2025-02-19 11:15:09,070 Current Learning Rate: 0.0008645971 +2025-02-19 11:15:09,070 Train Loss: 0.0001286, Val Loss: 0.0001478 +2025-02-19 11:15:09,070 Epoch 963/2000 +2025-02-19 11:15:51,158 Current Learning Rate: 0.0008209632 +2025-02-19 11:15:51,158 Train Loss: 0.0001404, Val Loss: 0.0001478 +2025-02-19 11:15:51,159 Epoch 964/2000 +2025-02-19 11:16:33,419 Current Learning Rate: 0.0007783604 +2025-02-19 11:16:35,351 Train Loss: 0.0001066, Val Loss: 0.0001460 +2025-02-19 11:16:35,351 Epoch 965/2000 +2025-02-19 11:17:16,558 Current Learning Rate: 0.0007367992 +2025-02-19 11:17:18,419 Train Loss: 0.0001468, Val Loss: 0.0001453 +2025-02-19 11:17:18,419 Epoch 966/2000 +2025-02-19 11:17:59,193 Current Learning Rate: 0.0006962899 +2025-02-19 11:18:00,720 Train Loss: 0.0001308, Val Loss: 0.0001452 +2025-02-19 11:18:00,727 Epoch 967/2000 +2025-02-19 11:18:42,686 Current Learning Rate: 0.0006568424 +2025-02-19 11:18:44,570 Train Loss: 0.0001128, Val Loss: 0.0001450 +2025-02-19 11:18:44,571 Epoch 968/2000 +2025-02-19 11:19:25,502 Current Learning Rate: 0.0006184666 +2025-02-19 11:19:25,503 Train Loss: 0.0001174, Val Loss: 0.0001451 +2025-02-19 11:19:25,504 Epoch 969/2000 +2025-02-19 11:20:07,377 Current Learning Rate: 0.0005811718 +2025-02-19 11:20:08,331 Train Loss: 0.0001331, Val Loss: 0.0001449 +2025-02-19 11:20:08,331 Epoch 970/2000 +2025-02-19 11:20:50,542 Current Learning Rate: 0.0005449674 +2025-02-19 11:20:52,275 Train Loss: 0.0001318, Val Loss: 0.0001446 +2025-02-19 11:20:52,282 Epoch 971/2000 +2025-02-19 11:21:33,359 Current Learning Rate: 0.0005098621 +2025-02-19 11:21:35,308 Train Loss: 0.0000944, Val Loss: 0.0001445 +2025-02-19 11:21:35,308 Epoch 972/2000 +2025-02-19 11:22:15,988 Current Learning Rate: 0.0004758647 +2025-02-19 11:22:17,195 Train Loss: 0.0001074, Val Loss: 0.0001444 +2025-02-19 11:22:17,196 Epoch 973/2000 +2025-02-19 11:22:58,439 Current Learning Rate: 0.0004429836 +2025-02-19 11:22:59,665 Train Loss: 0.0001101, Val Loss: 0.0001443 +2025-02-19 11:22:59,666 Epoch 974/2000 +2025-02-19 11:23:40,697 Current Learning Rate: 0.0004112269 +2025-02-19 11:23:42,665 Train Loss: 0.0001632, Val Loss: 0.0001442 +2025-02-19 11:23:42,665 Epoch 975/2000 +2025-02-19 11:24:24,355 Current Learning Rate: 0.0003806023 +2025-02-19 11:24:24,355 Train Loss: 0.0001647, Val Loss: 0.0001443 +2025-02-19 11:24:24,355 Epoch 976/2000 +2025-02-19 11:25:06,304 Current Learning Rate: 0.0003511176 +2025-02-19 11:25:07,441 Train Loss: 0.0001129, Val Loss: 0.0001442 +2025-02-19 11:25:07,442 Epoch 977/2000 +2025-02-19 11:25:49,483 Current Learning Rate: 0.0003227798 +2025-02-19 11:25:51,196 Train Loss: 0.0001146, Val Loss: 0.0001441 +2025-02-19 11:25:51,197 Epoch 978/2000 +2025-02-19 11:26:31,849 Current Learning Rate: 0.0002955962 +2025-02-19 11:26:31,850 Train Loss: 0.0001799, Val Loss: 0.0001444 +2025-02-19 11:26:31,851 Epoch 979/2000 +2025-02-19 11:27:14,240 Current Learning Rate: 0.0002695732 +2025-02-19 11:27:14,241 Train Loss: 0.0001584, Val Loss: 0.0001441 +2025-02-19 11:27:14,241 Epoch 980/2000 +2025-02-19 11:27:55,975 Current Learning Rate: 0.0002447174 +2025-02-19 11:27:55,976 Train Loss: 0.0001180, Val Loss: 0.0001441 +2025-02-19 11:27:55,976 Epoch 981/2000 +2025-02-19 11:28:39,095 Current Learning Rate: 0.0002210349 +2025-02-19 11:28:40,467 Train Loss: 0.0001421, Val Loss: 0.0001440 +2025-02-19 11:28:40,467 Epoch 982/2000 +2025-02-19 11:29:22,377 Current Learning Rate: 0.0001985316 +2025-02-19 11:29:23,721 Train Loss: 0.0001260, Val Loss: 0.0001439 +2025-02-19 11:29:23,722 Epoch 983/2000 +2025-02-19 11:30:05,789 Current Learning Rate: 0.0001772129 +2025-02-19 11:30:07,791 Train Loss: 0.0001358, Val Loss: 0.0001439 +2025-02-19 11:30:07,791 Epoch 984/2000 +2025-02-19 11:30:48,575 Current Learning Rate: 0.0001570842 +2025-02-19 11:30:49,717 Train Loss: 0.0001091, Val Loss: 0.0001439 +2025-02-19 11:30:49,718 Epoch 985/2000 +2025-02-19 11:31:30,865 Current Learning Rate: 0.0001381504 +2025-02-19 11:31:32,438 Train Loss: 0.0001428, Val Loss: 0.0001438 +2025-02-19 11:31:32,438 Epoch 986/2000 +2025-02-19 11:32:14,620 Current Learning Rate: 0.0001204162 +2025-02-19 11:32:14,621 Train Loss: 0.0001130, Val Loss: 0.0001439 +2025-02-19 11:32:14,621 Epoch 987/2000 +2025-02-19 11:32:56,950 Current Learning Rate: 0.0001038859 +2025-02-19 11:32:58,777 Train Loss: 0.0001209, Val Loss: 0.0001438 +2025-02-19 11:32:58,777 Epoch 988/2000 +2025-02-19 11:33:39,451 Current Learning Rate: 0.0000885637 +2025-02-19 11:33:39,452 Train Loss: 0.0001443, Val Loss: 0.0001438 +2025-02-19 11:33:39,452 Epoch 989/2000 +2025-02-19 11:34:21,921 Current Learning Rate: 0.0000744534 +2025-02-19 11:34:21,922 Train Loss: 0.0001539, Val Loss: 0.0001438 +2025-02-19 11:34:21,922 Epoch 990/2000 +2025-02-19 11:35:04,782 Current Learning Rate: 0.0000615583 +2025-02-19 11:35:06,558 Train Loss: 0.0001267, Val Loss: 0.0001438 +2025-02-19 11:35:06,562 Epoch 991/2000 +2025-02-19 11:35:48,848 Current Learning Rate: 0.0000498817 +2025-02-19 11:35:50,336 Train Loss: 0.0001263, Val Loss: 0.0001438 +2025-02-19 11:35:50,337 Epoch 992/2000 +2025-02-19 11:36:31,367 Current Learning Rate: 0.0000394265 +2025-02-19 11:36:31,369 Train Loss: 0.0001166, Val Loss: 0.0001438 +2025-02-19 11:36:31,369 Epoch 993/2000 +2025-02-19 11:37:13,537 Current Learning Rate: 0.0000301952 +2025-02-19 11:37:13,537 Train Loss: 0.0001293, Val Loss: 0.0001438 +2025-02-19 11:37:13,537 Epoch 994/2000 +2025-02-19 11:37:56,625 Current Learning Rate: 0.0000221902 +2025-02-19 11:37:58,485 Train Loss: 0.0001346, Val Loss: 0.0001437 +2025-02-19 11:37:58,485 Epoch 995/2000 +2025-02-19 11:38:39,951 Current Learning Rate: 0.0000154133 +2025-02-19 11:38:41,588 Train Loss: 0.0001486, Val Loss: 0.0001437 +2025-02-19 11:38:41,589 Epoch 996/2000 +2025-02-19 11:39:23,081 Current Learning Rate: 0.0000098664 +2025-02-19 11:39:23,082 Train Loss: 0.0001695, Val Loss: 0.0001438 +2025-02-19 11:39:23,082 Epoch 997/2000 +2025-02-19 11:40:06,039 Current Learning Rate: 0.0000055506 +2025-02-19 11:40:07,710 Train Loss: 0.0001832, Val Loss: 0.0001437 +2025-02-19 11:40:07,710 Epoch 998/2000 +2025-02-19 11:40:48,493 Current Learning Rate: 0.0000024672 +2025-02-19 11:40:48,494 Train Loss: 0.0001657, Val Loss: 0.0001438 +2025-02-19 11:40:48,494 Epoch 999/2000 +2025-02-19 11:41:30,956 Current Learning Rate: 0.0000006168 +2025-02-19 11:41:30,957 Train Loss: 0.0001426, Val Loss: 0.0001438 +2025-02-19 11:41:30,957 Epoch 1000/2000 +2025-02-19 11:42:13,236 Current Learning Rate: 0.0000000000 +2025-02-19 11:42:13,237 Train Loss: 0.0001438, Val Loss: 0.0001438 +2025-02-19 11:42:13,237 Epoch 1001/2000 +2025-02-19 11:42:55,795 Current Learning Rate: 0.0000006168 +2025-02-19 11:42:55,796 Train Loss: 0.0001048, Val Loss: 0.0001438 +2025-02-19 11:42:55,796 Epoch 1002/2000 +2025-02-19 11:43:38,204 Current Learning Rate: 0.0000024672 +2025-02-19 11:43:38,207 Train Loss: 0.0001221, Val Loss: 0.0001438 +2025-02-19 11:43:38,207 Epoch 1003/2000 +2025-02-19 11:44:19,837 Current Learning Rate: 0.0000055506 +2025-02-19 11:44:19,837 Train Loss: 0.0001121, Val Loss: 0.0001439 +2025-02-19 11:44:19,838 Epoch 1004/2000 +2025-02-19 11:45:02,047 Current Learning Rate: 0.0000098664 +2025-02-19 11:45:02,048 Train Loss: 0.0001358, Val Loss: 0.0001438 +2025-02-19 11:45:02,048 Epoch 1005/2000 +2025-02-19 11:45:44,192 Current Learning Rate: 0.0000154133 +2025-02-19 11:45:44,192 Train Loss: 0.0001528, Val Loss: 0.0001438 +2025-02-19 11:45:44,193 Epoch 1006/2000 +2025-02-19 11:46:26,505 Current Learning Rate: 0.0000221902 +2025-02-19 11:46:26,506 Train Loss: 0.0001113, Val Loss: 0.0001438 +2025-02-19 11:46:26,507 Epoch 1007/2000 +2025-02-19 11:47:08,737 Current Learning Rate: 0.0000301952 +2025-02-19 11:47:08,737 Train Loss: 0.0001027, Val Loss: 0.0001438 +2025-02-19 11:47:08,738 Epoch 1008/2000 +2025-02-19 11:47:51,157 Current Learning Rate: 0.0000394265 +2025-02-19 11:47:53,043 Train Loss: 0.0001583, Val Loss: 0.0001437 +2025-02-19 11:47:53,044 Epoch 1009/2000 +2025-02-19 11:48:35,350 Current Learning Rate: 0.0000498817 +2025-02-19 11:48:35,350 Train Loss: 0.0001609, Val Loss: 0.0001437 +2025-02-19 11:48:35,351 Epoch 1010/2000 +2025-02-19 11:49:17,568 Current Learning Rate: 0.0000615583 +2025-02-19 11:49:17,569 Train Loss: 0.0001213, Val Loss: 0.0001439 +2025-02-19 11:49:17,569 Epoch 1011/2000 +2025-02-19 11:49:59,920 Current Learning Rate: 0.0000744534 +2025-02-19 11:49:59,920 Train Loss: 0.0001378, Val Loss: 0.0001438 +2025-02-19 11:49:59,920 Epoch 1012/2000 +2025-02-19 11:50:41,432 Current Learning Rate: 0.0000885637 +2025-02-19 11:50:41,432 Train Loss: 0.0001241, Val Loss: 0.0001438 +2025-02-19 11:50:41,433 Epoch 1013/2000 +2025-02-19 11:51:24,055 Current Learning Rate: 0.0001038859 +2025-02-19 11:51:24,055 Train Loss: 0.0001201, Val Loss: 0.0001437 +2025-02-19 11:51:24,056 Epoch 1014/2000 +2025-02-19 11:52:06,091 Current Learning Rate: 0.0001204162 +2025-02-19 11:52:06,091 Train Loss: 0.0001527, Val Loss: 0.0001438 +2025-02-19 11:52:06,091 Epoch 1015/2000 +2025-02-19 11:52:47,797 Current Learning Rate: 0.0001381504 +2025-02-19 11:52:47,798 Train Loss: 0.0001214, Val Loss: 0.0001438 +2025-02-19 11:52:47,798 Epoch 1016/2000 +2025-02-19 11:53:30,032 Current Learning Rate: 0.0001570842 +2025-02-19 11:53:30,033 Train Loss: 0.0001541, Val Loss: 0.0001438 +2025-02-19 11:53:30,033 Epoch 1017/2000 +2025-02-19 11:54:12,556 Current Learning Rate: 0.0001772129 +2025-02-19 11:54:12,557 Train Loss: 0.0001198, Val Loss: 0.0001437 +2025-02-19 11:54:12,557 Epoch 1018/2000 +2025-02-19 11:54:55,571 Current Learning Rate: 0.0001985316 +2025-02-19 11:54:55,572 Train Loss: 0.0001162, Val Loss: 0.0001438 +2025-02-19 11:54:55,572 Epoch 1019/2000 +2025-02-19 11:55:37,627 Current Learning Rate: 0.0002210349 +2025-02-19 11:55:37,628 Train Loss: 0.0000882, Val Loss: 0.0001437 +2025-02-19 11:55:37,628 Epoch 1020/2000 +2025-02-19 11:56:19,386 Current Learning Rate: 0.0002447174 +2025-02-19 11:56:19,387 Train Loss: 0.0001306, Val Loss: 0.0001437 +2025-02-19 11:56:19,387 Epoch 1021/2000 +2025-02-19 11:57:01,962 Current Learning Rate: 0.0002695732 +2025-02-19 11:57:01,963 Train Loss: 0.0001737, Val Loss: 0.0001441 +2025-02-19 11:57:01,963 Epoch 1022/2000 +2025-02-19 11:57:44,749 Current Learning Rate: 0.0002955962 +2025-02-19 11:57:44,761 Train Loss: 0.0001276, Val Loss: 0.0001438 +2025-02-19 11:57:44,762 Epoch 1023/2000 +2025-02-19 11:58:26,599 Current Learning Rate: 0.0003227798 +2025-02-19 11:58:26,599 Train Loss: 0.0001684, Val Loss: 0.0001439 +2025-02-19 11:58:26,599 Epoch 1024/2000 +2025-02-19 11:59:09,004 Current Learning Rate: 0.0003511176 +2025-02-19 11:59:09,004 Train Loss: 0.0001157, Val Loss: 0.0001439 +2025-02-19 11:59:09,005 Epoch 1025/2000 +2025-02-19 11:59:51,874 Current Learning Rate: 0.0003806023 +2025-02-19 11:59:51,875 Train Loss: 0.0001447, Val Loss: 0.0001440 +2025-02-19 11:59:51,875 Epoch 1026/2000 +2025-02-19 12:00:34,287 Current Learning Rate: 0.0004112269 +2025-02-19 12:00:34,287 Train Loss: 0.0001824, Val Loss: 0.0001442 +2025-02-19 12:00:34,288 Epoch 1027/2000 +2025-02-19 12:01:16,466 Current Learning Rate: 0.0004429836 +2025-02-19 12:01:16,467 Train Loss: 0.0001353, Val Loss: 0.0001441 +2025-02-19 12:01:16,467 Epoch 1028/2000 +2025-02-19 12:01:58,880 Current Learning Rate: 0.0004758647 +2025-02-19 12:01:58,883 Train Loss: 0.0001597, Val Loss: 0.0001444 +2025-02-19 12:01:58,887 Epoch 1029/2000 +2025-02-19 12:02:41,183 Current Learning Rate: 0.0005098621 +2025-02-19 12:02:41,184 Train Loss: 0.0001711, Val Loss: 0.0001447 +2025-02-19 12:02:41,184 Epoch 1030/2000 +2025-02-19 12:03:23,149 Current Learning Rate: 0.0005449674 +2025-02-19 12:03:23,150 Train Loss: 0.0001266, Val Loss: 0.0001449 +2025-02-19 12:03:23,151 Epoch 1031/2000 +2025-02-19 12:04:05,897 Current Learning Rate: 0.0005811718 +2025-02-19 12:04:05,897 Train Loss: 0.0001136, Val Loss: 0.0001449 +2025-02-19 12:04:05,897 Epoch 1032/2000 +2025-02-19 12:04:48,357 Current Learning Rate: 0.0006184666 +2025-02-19 12:04:48,358 Train Loss: 0.0001412, Val Loss: 0.0001452 +2025-02-19 12:04:48,358 Epoch 1033/2000 +2025-02-19 12:05:29,788 Current Learning Rate: 0.0006568424 +2025-02-19 12:05:29,789 Train Loss: 0.0001121, Val Loss: 0.0001446 +2025-02-19 12:05:29,789 Epoch 1034/2000 +2025-02-19 12:06:12,217 Current Learning Rate: 0.0006962899 +2025-02-19 12:06:12,218 Train Loss: 0.0001219, Val Loss: 0.0001447 +2025-02-19 12:06:12,218 Epoch 1035/2000 +2025-02-19 12:06:54,861 Current Learning Rate: 0.0007367992 +2025-02-19 12:06:54,861 Train Loss: 0.0001293, Val Loss: 0.0001449 +2025-02-19 12:06:54,862 Epoch 1036/2000 +2025-02-19 12:07:37,266 Current Learning Rate: 0.0007783604 +2025-02-19 12:07:37,267 Train Loss: 0.0001342, Val Loss: 0.0001454 +2025-02-19 12:07:37,267 Epoch 1037/2000 +2025-02-19 12:08:18,566 Current Learning Rate: 0.0008209632 +2025-02-19 12:08:18,566 Train Loss: 0.0000886, Val Loss: 0.0001448 +2025-02-19 12:08:18,567 Epoch 1038/2000 +2025-02-19 12:09:00,661 Current Learning Rate: 0.0008645971 +2025-02-19 12:09:00,662 Train Loss: 0.0001090, Val Loss: 0.0001448 +2025-02-19 12:09:00,662 Epoch 1039/2000 +2025-02-19 12:09:42,810 Current Learning Rate: 0.0009092514 +2025-02-19 12:09:42,810 Train Loss: 0.0001496, Val Loss: 0.0001447 +2025-02-19 12:09:42,810 Epoch 1040/2000 +2025-02-19 12:10:25,716 Current Learning Rate: 0.0009549150 +2025-02-19 12:10:25,717 Train Loss: 0.0001222, Val Loss: 0.0001441 +2025-02-19 12:10:25,717 Epoch 1041/2000 +2025-02-19 12:11:07,989 Current Learning Rate: 0.0010015767 +2025-02-19 12:11:07,989 Train Loss: 0.0001344, Val Loss: 0.0001445 +2025-02-19 12:11:07,990 Epoch 1042/2000 +2025-02-19 12:11:49,414 Current Learning Rate: 0.0010492249 +2025-02-19 12:11:49,414 Train Loss: 0.0001297, Val Loss: 0.0001446 +2025-02-19 12:11:49,415 Epoch 1043/2000 +2025-02-19 12:12:30,890 Current Learning Rate: 0.0010978480 +2025-02-19 12:12:30,890 Train Loss: 0.0001161, Val Loss: 0.0001444 +2025-02-19 12:12:30,891 Epoch 1044/2000 +2025-02-19 12:13:12,795 Current Learning Rate: 0.0011474338 +2025-02-19 12:13:12,795 Train Loss: 0.0001084, Val Loss: 0.0001464 +2025-02-19 12:13:12,796 Epoch 1045/2000 +2025-02-19 12:13:55,182 Current Learning Rate: 0.0011979702 +2025-02-19 12:13:55,182 Train Loss: 0.0001657, Val Loss: 0.0001487 +2025-02-19 12:13:55,182 Epoch 1046/2000 +2025-02-19 12:14:37,330 Current Learning Rate: 0.0012494447 +2025-02-19 12:14:37,331 Train Loss: 0.0001013, Val Loss: 0.0001453 +2025-02-19 12:14:37,331 Epoch 1047/2000 +2025-02-19 12:15:19,416 Current Learning Rate: 0.0013018445 +2025-02-19 12:15:19,417 Train Loss: 0.0001370, Val Loss: 0.0001458 +2025-02-19 12:15:19,417 Epoch 1048/2000 +2025-02-19 12:16:02,511 Current Learning Rate: 0.0013551569 +2025-02-19 12:16:02,513 Train Loss: 0.0001308, Val Loss: 0.0001449 +2025-02-19 12:16:02,513 Epoch 1049/2000 +2025-02-19 12:16:44,401 Current Learning Rate: 0.0014093685 +2025-02-19 12:16:44,401 Train Loss: 0.0001143, Val Loss: 0.0001462 +2025-02-19 12:16:44,402 Epoch 1050/2000 +2025-02-19 12:17:27,161 Current Learning Rate: 0.0014644661 +2025-02-19 12:17:27,161 Train Loss: 0.0001946, Val Loss: 0.0001537 +2025-02-19 12:17:27,161 Epoch 1051/2000 +2025-02-19 12:18:09,600 Current Learning Rate: 0.0015204360 +2025-02-19 12:18:09,601 Train Loss: 0.0000950, Val Loss: 0.0001452 +2025-02-19 12:18:09,601 Epoch 1052/2000 +2025-02-19 12:18:51,121 Current Learning Rate: 0.0015772645 +2025-02-19 12:18:51,122 Train Loss: 0.0001430, Val Loss: 0.0001499 +2025-02-19 12:18:51,122 Epoch 1053/2000 +2025-02-19 12:19:33,198 Current Learning Rate: 0.0016349374 +2025-02-19 12:19:33,198 Train Loss: 0.0001041, Val Loss: 0.0001450 +2025-02-19 12:19:33,198 Epoch 1054/2000 +2025-02-19 12:20:16,114 Current Learning Rate: 0.0016934407 +2025-02-19 12:20:17,657 Train Loss: 0.0000937, Val Loss: 0.0001434 +2025-02-19 12:20:17,657 Epoch 1055/2000 +2025-02-19 12:20:59,445 Current Learning Rate: 0.0017527598 +2025-02-19 12:20:59,446 Train Loss: 0.0001587, Val Loss: 0.0001616 +2025-02-19 12:20:59,446 Epoch 1056/2000 +2025-02-19 12:21:41,517 Current Learning Rate: 0.0018128801 +2025-02-19 12:21:41,518 Train Loss: 0.0001737, Val Loss: 0.0001596 +2025-02-19 12:21:41,518 Epoch 1057/2000 +2025-02-19 12:22:23,787 Current Learning Rate: 0.0018737867 +2025-02-19 12:22:23,788 Train Loss: 0.0000983, Val Loss: 0.0001460 +2025-02-19 12:22:23,788 Epoch 1058/2000 +2025-02-19 12:23:05,899 Current Learning Rate: 0.0019354647 +2025-02-19 12:23:05,900 Train Loss: 0.0001590, Val Loss: 0.0001530 +2025-02-19 12:23:05,900 Epoch 1059/2000 +2025-02-19 12:23:48,399 Current Learning Rate: 0.0019978989 +2025-02-19 12:23:48,400 Train Loss: 0.0001447, Val Loss: 0.0001477 +2025-02-19 12:23:48,400 Epoch 1060/2000 +2025-02-19 12:24:31,084 Current Learning Rate: 0.0020610737 +2025-02-19 12:24:31,085 Train Loss: 0.0001366, Val Loss: 0.0001510 +2025-02-19 12:24:31,085 Epoch 1061/2000 +2025-02-19 12:25:13,403 Current Learning Rate: 0.0021249737 +2025-02-19 12:25:13,403 Train Loss: 0.0001166, Val Loss: 0.0001506 +2025-02-19 12:25:13,403 Epoch 1062/2000 +2025-02-19 12:25:55,439 Current Learning Rate: 0.0021895831 +2025-02-19 12:25:55,439 Train Loss: 0.0001338, Val Loss: 0.0001510 +2025-02-19 12:25:55,440 Epoch 1063/2000 +2025-02-19 12:26:37,881 Current Learning Rate: 0.0022548859 +2025-02-19 12:26:37,882 Train Loss: 0.0001496, Val Loss: 0.0001520 +2025-02-19 12:26:37,882 Epoch 1064/2000 +2025-02-19 12:27:19,866 Current Learning Rate: 0.0023208660 +2025-02-19 12:27:19,867 Train Loss: 0.0001709, Val Loss: 0.0001529 +2025-02-19 12:27:19,867 Epoch 1065/2000 +2025-02-19 12:28:02,040 Current Learning Rate: 0.0023875072 +2025-02-19 12:28:02,041 Train Loss: 0.0001221, Val Loss: 0.0001515 +2025-02-19 12:28:02,041 Epoch 1066/2000 +2025-02-19 12:28:44,077 Current Learning Rate: 0.0024547929 +2025-02-19 12:28:44,078 Train Loss: 0.0001371, Val Loss: 0.0001564 +2025-02-19 12:28:44,078 Epoch 1067/2000 +2025-02-19 12:29:26,478 Current Learning Rate: 0.0025227067 +2025-02-19 12:29:26,478 Train Loss: 0.0001175, Val Loss: 0.0001523 +2025-02-19 12:29:26,479 Epoch 1068/2000 +2025-02-19 12:30:08,784 Current Learning Rate: 0.0025912316 +2025-02-19 12:30:08,784 Train Loss: 0.0001296, Val Loss: 0.0001506 +2025-02-19 12:30:08,785 Epoch 1069/2000 +2025-02-19 12:30:51,641 Current Learning Rate: 0.0026603509 +2025-02-19 12:30:51,642 Train Loss: 0.0001469, Val Loss: 0.0001568 +2025-02-19 12:30:51,642 Epoch 1070/2000 +2025-02-19 12:31:33,461 Current Learning Rate: 0.0027300475 +2025-02-19 12:31:33,462 Train Loss: 0.0002493, Val Loss: 0.0001804 +2025-02-19 12:31:33,462 Epoch 1071/2000 +2025-02-19 12:32:15,728 Current Learning Rate: 0.0028003042 +2025-02-19 12:32:15,729 Train Loss: 0.0001280, Val Loss: 0.0001589 +2025-02-19 12:32:15,730 Epoch 1072/2000 +2025-02-19 12:32:57,979 Current Learning Rate: 0.0028711035 +2025-02-19 12:32:57,982 Train Loss: 0.0001096, Val Loss: 0.0001540 +2025-02-19 12:32:57,982 Epoch 1073/2000 +2025-02-19 12:33:40,920 Current Learning Rate: 0.0029424282 +2025-02-19 12:33:40,921 Train Loss: 0.0001308, Val Loss: 0.0001531 +2025-02-19 12:33:40,921 Epoch 1074/2000 +2025-02-19 12:34:23,247 Current Learning Rate: 0.0030142605 +2025-02-19 12:34:23,247 Train Loss: 0.0001241, Val Loss: 0.0001499 +2025-02-19 12:34:23,248 Epoch 1075/2000 +2025-02-19 12:35:05,193 Current Learning Rate: 0.0030865828 +2025-02-19 12:35:05,194 Train Loss: 0.0001910, Val Loss: 0.0001613 +2025-02-19 12:35:05,194 Epoch 1076/2000 +2025-02-19 12:35:47,307 Current Learning Rate: 0.0031593772 +2025-02-19 12:35:47,307 Train Loss: 0.0001411, Val Loss: 0.0001590 +2025-02-19 12:35:47,307 Epoch 1077/2000 +2025-02-19 12:36:29,226 Current Learning Rate: 0.0032326258 +2025-02-19 12:36:29,227 Train Loss: 0.0001436, Val Loss: 0.0001657 +2025-02-19 12:36:29,227 Epoch 1078/2000 +2025-02-19 12:37:11,536 Current Learning Rate: 0.0033063104 +2025-02-19 12:37:11,537 Train Loss: 0.0001327, Val Loss: 0.0001556 +2025-02-19 12:37:11,537 Epoch 1079/2000 +2025-02-19 12:37:53,953 Current Learning Rate: 0.0033804129 +2025-02-19 12:37:53,953 Train Loss: 0.0001840, Val Loss: 0.0001839 +2025-02-19 12:37:53,954 Epoch 1080/2000 +2025-02-19 12:38:36,271 Current Learning Rate: 0.0034549150 +2025-02-19 12:38:36,271 Train Loss: 0.0001659, Val Loss: 0.0001769 +2025-02-19 12:38:36,271 Epoch 1081/2000 +2025-02-19 12:39:18,165 Current Learning Rate: 0.0035297984 +2025-02-19 12:39:18,166 Train Loss: 0.0001606, Val Loss: 0.0001713 +2025-02-19 12:39:18,167 Epoch 1082/2000 +2025-02-19 12:40:00,078 Current Learning Rate: 0.0036050445 +2025-02-19 12:40:00,078 Train Loss: 0.0001189, Val Loss: 0.0001724 +2025-02-19 12:40:00,078 Epoch 1083/2000 +2025-02-19 12:40:43,042 Current Learning Rate: 0.0036806348 +2025-02-19 12:40:43,042 Train Loss: 0.0001824, Val Loss: 0.0001830 +2025-02-19 12:40:43,042 Epoch 1084/2000 +2025-02-19 12:41:25,620 Current Learning Rate: 0.0037565506 +2025-02-19 12:41:25,621 Train Loss: 0.0001132, Val Loss: 0.0001613 +2025-02-19 12:41:25,621 Epoch 1085/2000 +2025-02-19 12:42:08,002 Current Learning Rate: 0.0038327732 +2025-02-19 12:42:08,003 Train Loss: 0.0002399, Val Loss: 0.0002005 +2025-02-19 12:42:08,003 Epoch 1086/2000 +2025-02-19 12:42:50,476 Current Learning Rate: 0.0039092838 +2025-02-19 12:42:50,476 Train Loss: 0.0002117, Val Loss: 0.0001952 +2025-02-19 12:42:50,477 Epoch 1087/2000 +2025-02-19 12:43:32,830 Current Learning Rate: 0.0039860635 +2025-02-19 12:43:32,830 Train Loss: 0.0002019, Val Loss: 0.0001762 +2025-02-19 12:43:32,830 Epoch 1088/2000 +2025-02-19 12:44:15,206 Current Learning Rate: 0.0040630934 +2025-02-19 12:44:15,206 Train Loss: 0.0001181, Val Loss: 0.0001567 +2025-02-19 12:44:15,206 Epoch 1089/2000 +2025-02-19 12:44:56,910 Current Learning Rate: 0.0041403545 +2025-02-19 12:44:56,911 Train Loss: 0.0001423, Val Loss: 0.0001763 +2025-02-19 12:44:56,911 Epoch 1090/2000 +2025-02-19 12:45:38,553 Current Learning Rate: 0.0042178277 +2025-02-19 12:45:38,553 Train Loss: 0.0001616, Val Loss: 0.0001633 +2025-02-19 12:45:38,553 Epoch 1091/2000 +2025-02-19 12:46:20,452 Current Learning Rate: 0.0042954938 +2025-02-19 12:46:20,452 Train Loss: 0.0001129, Val Loss: 0.0001663 +2025-02-19 12:46:20,452 Epoch 1092/2000 +2025-02-19 12:47:03,157 Current Learning Rate: 0.0043733338 +2025-02-19 12:47:03,157 Train Loss: 0.0001896, Val Loss: 0.0002457 +2025-02-19 12:47:03,158 Epoch 1093/2000 +2025-02-19 12:47:45,577 Current Learning Rate: 0.0044513284 +2025-02-19 12:47:45,578 Train Loss: 0.0001563, Val Loss: 0.0002070 +2025-02-19 12:47:45,578 Epoch 1094/2000 +2025-02-19 12:48:27,362 Current Learning Rate: 0.0045294584 +2025-02-19 12:48:27,363 Train Loss: 0.0002370, Val Loss: 0.0002020 +2025-02-19 12:48:27,363 Epoch 1095/2000 +2025-02-19 12:49:09,148 Current Learning Rate: 0.0046077045 +2025-02-19 12:49:09,149 Train Loss: 0.0001726, Val Loss: 0.0002041 +2025-02-19 12:49:09,149 Epoch 1096/2000 +2025-02-19 12:49:51,444 Current Learning Rate: 0.0046860474 +2025-02-19 12:49:51,445 Train Loss: 0.0001593, Val Loss: 0.0001721 +2025-02-19 12:49:51,445 Epoch 1097/2000 +2025-02-19 12:50:33,843 Current Learning Rate: 0.0047644677 +2025-02-19 12:50:33,843 Train Loss: 0.0001578, Val Loss: 0.0001655 +2025-02-19 12:50:33,843 Epoch 1098/2000 +2025-02-19 12:51:15,509 Current Learning Rate: 0.0048429462 +2025-02-19 12:51:15,510 Train Loss: 0.0002148, Val Loss: 0.0001757 +2025-02-19 12:51:15,510 Epoch 1099/2000 +2025-02-19 12:51:57,547 Current Learning Rate: 0.0049214634 +2025-02-19 12:51:57,547 Train Loss: 0.0001446, Val Loss: 0.0001868 +2025-02-19 12:51:57,547 Epoch 1100/2000 +2025-02-19 12:52:40,192 Current Learning Rate: 0.0050000000 +2025-02-19 12:52:40,192 Train Loss: 0.0001316, Val Loss: 0.0001598 +2025-02-19 12:52:40,193 Epoch 1101/2000 +2025-02-19 12:53:22,552 Current Learning Rate: 0.0050785366 +2025-02-19 12:53:22,553 Train Loss: 0.0001596, Val Loss: 0.0001671 +2025-02-19 12:53:22,553 Epoch 1102/2000 +2025-02-19 12:54:04,947 Current Learning Rate: 0.0051570538 +2025-02-19 12:54:04,948 Train Loss: 0.0001294, Val Loss: 0.0001663 +2025-02-19 12:54:04,948 Epoch 1103/2000 +2025-02-19 12:54:47,718 Current Learning Rate: 0.0052355323 +2025-02-19 12:54:47,719 Train Loss: 0.0001688, Val Loss: 0.0002068 +2025-02-19 12:54:47,719 Epoch 1104/2000 +2025-02-19 12:55:29,632 Current Learning Rate: 0.0053139526 +2025-02-19 12:55:29,632 Train Loss: 0.0002099, Val Loss: 0.0002270 +2025-02-19 12:55:29,633 Epoch 1105/2000 +2025-02-19 12:56:11,665 Current Learning Rate: 0.0053922955 +2025-02-19 12:56:11,665 Train Loss: 0.0001792, Val Loss: 0.0002228 +2025-02-19 12:56:11,666 Epoch 1106/2000 +2025-02-19 12:56:53,767 Current Learning Rate: 0.0054705416 +2025-02-19 12:56:53,768 Train Loss: 0.0002028, Val Loss: 0.0002797 +2025-02-19 12:56:53,768 Epoch 1107/2000 +2025-02-19 12:57:36,753 Current Learning Rate: 0.0055486716 +2025-02-19 12:57:36,754 Train Loss: 0.0002270, Val Loss: 0.0002976 +2025-02-19 12:57:36,754 Epoch 1108/2000 +2025-02-19 12:58:19,277 Current Learning Rate: 0.0056266662 +2025-02-19 12:58:19,277 Train Loss: 0.0002125, Val Loss: 0.0002516 +2025-02-19 12:58:19,278 Epoch 1109/2000 +2025-02-19 12:59:01,673 Current Learning Rate: 0.0057045062 +2025-02-19 12:59:01,674 Train Loss: 0.0002487, Val Loss: 0.0002315 +2025-02-19 12:59:01,674 Epoch 1110/2000 +2025-02-19 12:59:44,207 Current Learning Rate: 0.0057821723 +2025-02-19 12:59:44,207 Train Loss: 0.0002261, Val Loss: 0.0002027 +2025-02-19 12:59:44,208 Epoch 1111/2000 +2025-02-19 13:00:26,579 Current Learning Rate: 0.0058596455 +2025-02-19 13:00:26,580 Train Loss: 0.0002362, Val Loss: 0.0001919 +2025-02-19 13:00:26,580 Epoch 1112/2000 +2025-02-19 13:01:08,202 Current Learning Rate: 0.0059369066 +2025-02-19 13:01:08,203 Train Loss: 0.0002069, Val Loss: 0.0001837 +2025-02-19 13:01:08,203 Epoch 1113/2000 +2025-02-19 13:01:50,346 Current Learning Rate: 0.0060139365 +2025-02-19 13:01:50,347 Train Loss: 0.0003135, Val Loss: 0.0002316 +2025-02-19 13:01:50,347 Epoch 1114/2000 +2025-02-19 13:02:32,710 Current Learning Rate: 0.0060907162 +2025-02-19 13:02:32,710 Train Loss: 0.0002733, Val Loss: 0.0001897 +2025-02-19 13:02:32,711 Epoch 1115/2000 +2025-02-19 13:03:15,494 Current Learning Rate: 0.0061672268 +2025-02-19 13:03:15,494 Train Loss: 0.0001470, Val Loss: 0.0001702 +2025-02-19 13:03:15,495 Epoch 1116/2000 +2025-02-19 13:03:58,052 Current Learning Rate: 0.0062434494 +2025-02-19 13:03:58,053 Train Loss: 0.0001813, Val Loss: 0.0001769 +2025-02-19 13:03:58,053 Epoch 1117/2000 +2025-02-19 13:04:40,547 Current Learning Rate: 0.0063193652 +2025-02-19 13:04:40,548 Train Loss: 0.0001782, Val Loss: 0.0002198 +2025-02-19 13:04:40,548 Epoch 1118/2000 +2025-02-19 13:05:22,888 Current Learning Rate: 0.0063949555 +2025-02-19 13:05:22,889 Train Loss: 0.0002772, Val Loss: 0.0002026 +2025-02-19 13:05:22,889 Epoch 1119/2000 +2025-02-19 13:06:04,563 Current Learning Rate: 0.0064702016 +2025-02-19 13:06:04,564 Train Loss: 0.0002211, Val Loss: 0.0002022 +2025-02-19 13:06:04,564 Epoch 1120/2000 +2025-02-19 13:06:46,526 Current Learning Rate: 0.0065450850 +2025-02-19 13:06:46,526 Train Loss: 0.0001570, Val Loss: 0.0002091 +2025-02-19 13:06:46,526 Epoch 1121/2000 +2025-02-19 13:07:29,378 Current Learning Rate: 0.0066195871 +2025-02-19 13:07:29,379 Train Loss: 0.0002027, Val Loss: 0.0001975 +2025-02-19 13:07:29,379 Epoch 1122/2000 +2025-02-19 13:08:11,037 Current Learning Rate: 0.0066936896 +2025-02-19 13:08:11,038 Train Loss: 0.0001509, Val Loss: 0.0001921 +2025-02-19 13:08:11,038 Epoch 1123/2000 +2025-02-19 13:08:53,431 Current Learning Rate: 0.0067673742 +2025-02-19 13:08:53,431 Train Loss: 0.0002708, Val Loss: 0.0002644 +2025-02-19 13:08:53,432 Epoch 1124/2000 +2025-02-19 13:09:35,330 Current Learning Rate: 0.0068406228 +2025-02-19 13:09:35,330 Train Loss: 0.0002365, Val Loss: 0.0002505 +2025-02-19 13:09:35,331 Epoch 1125/2000 +2025-02-19 13:10:17,898 Current Learning Rate: 0.0069134172 +2025-02-19 13:10:17,899 Train Loss: 0.0002604, Val Loss: 0.0002253 +2025-02-19 13:10:17,899 Epoch 1126/2000 +2025-02-19 13:10:59,765 Current Learning Rate: 0.0069857395 +2025-02-19 13:10:59,765 Train Loss: 0.0002276, Val Loss: 0.0002418 +2025-02-19 13:10:59,765 Epoch 1127/2000 +2025-02-19 13:11:41,779 Current Learning Rate: 0.0070575718 +2025-02-19 13:11:41,780 Train Loss: 0.0002578, Val Loss: 0.0002592 +2025-02-19 13:11:41,780 Epoch 1128/2000 +2025-02-19 13:12:24,152 Current Learning Rate: 0.0071288965 +2025-02-19 13:12:24,152 Train Loss: 0.0002164, Val Loss: 0.0002548 +2025-02-19 13:12:24,153 Epoch 1129/2000 +2025-02-19 13:13:06,278 Current Learning Rate: 0.0071996958 +2025-02-19 13:13:06,279 Train Loss: 0.0002487, Val Loss: 0.0002697 +2025-02-19 13:13:06,279 Epoch 1130/2000 +2025-02-19 13:13:48,840 Current Learning Rate: 0.0072699525 +2025-02-19 13:13:48,840 Train Loss: 0.0002083, Val Loss: 0.0002952 +2025-02-19 13:13:48,840 Epoch 1131/2000 +2025-02-19 13:14:30,758 Current Learning Rate: 0.0073396491 +2025-02-19 13:14:30,759 Train Loss: 0.0002671, Val Loss: 0.0002714 +2025-02-19 13:14:30,760 Epoch 1132/2000 +2025-02-19 13:15:13,370 Current Learning Rate: 0.0074087684 +2025-02-19 13:15:13,371 Train Loss: 0.0002305, Val Loss: 0.0002264 +2025-02-19 13:15:13,371 Epoch 1133/2000 +2025-02-19 13:15:55,587 Current Learning Rate: 0.0074772933 +2025-02-19 13:15:55,588 Train Loss: 0.0002107, Val Loss: 0.0002091 +2025-02-19 13:15:55,588 Epoch 1134/2000 +2025-02-19 13:16:37,178 Current Learning Rate: 0.0075452071 +2025-02-19 13:16:37,179 Train Loss: 0.0001699, Val Loss: 0.0001872 +2025-02-19 13:16:37,179 Epoch 1135/2000 +2025-02-19 13:17:19,441 Current Learning Rate: 0.0076124928 +2025-02-19 13:17:19,443 Train Loss: 0.0001786, Val Loss: 0.0002335 +2025-02-19 13:17:19,443 Epoch 1136/2000 +2025-02-19 13:18:01,477 Current Learning Rate: 0.0076791340 +2025-02-19 13:18:01,478 Train Loss: 0.0002975, Val Loss: 0.0002418 +2025-02-19 13:18:01,478 Epoch 1137/2000 +2025-02-19 13:18:44,517 Current Learning Rate: 0.0077451141 +2025-02-19 13:18:44,518 Train Loss: 0.0002291, Val Loss: 0.0003441 +2025-02-19 13:18:44,518 Epoch 1138/2000 +2025-02-19 13:19:26,102 Current Learning Rate: 0.0078104169 +2025-02-19 13:19:26,102 Train Loss: 0.0023382, Val Loss: 0.0032763 +2025-02-19 13:19:26,103 Epoch 1139/2000 +2025-02-19 13:20:08,803 Current Learning Rate: 0.0078750263 +2025-02-19 13:20:08,804 Train Loss: 0.0013620, Val Loss: 0.0004987 +2025-02-19 13:20:08,804 Epoch 1140/2000 +2025-02-19 13:20:50,609 Current Learning Rate: 0.0079389263 +2025-02-19 13:20:50,609 Train Loss: 0.0003463, Val Loss: 0.0002614 +2025-02-19 13:20:50,609 Epoch 1141/2000 +2025-02-19 13:21:33,087 Current Learning Rate: 0.0080021011 +2025-02-19 13:21:33,088 Train Loss: 0.0002269, Val Loss: 0.0002121 +2025-02-19 13:21:33,088 Epoch 1142/2000 +2025-02-19 13:22:15,684 Current Learning Rate: 0.0080645353 +2025-02-19 13:22:15,684 Train Loss: 0.0001901, Val Loss: 0.0002413 +2025-02-19 13:22:15,685 Epoch 1143/2000 +2025-02-19 13:22:58,154 Current Learning Rate: 0.0081262133 +2025-02-19 13:22:58,155 Train Loss: 0.0002556, Val Loss: 0.0002224 +2025-02-19 13:22:58,155 Epoch 1144/2000 +2025-02-19 13:23:40,666 Current Learning Rate: 0.0081871199 +2025-02-19 13:23:40,667 Train Loss: 0.0002128, Val Loss: 0.0002427 +2025-02-19 13:23:40,668 Epoch 1145/2000 +2025-02-19 13:24:22,354 Current Learning Rate: 0.0082472402 +2025-02-19 13:24:22,354 Train Loss: 0.0002196, Val Loss: 0.0002321 +2025-02-19 13:24:22,355 Epoch 1146/2000 +2025-02-19 13:25:04,921 Current Learning Rate: 0.0083065593 +2025-02-19 13:25:04,922 Train Loss: 0.0003354, Val Loss: 0.0002860 +2025-02-19 13:25:04,922 Epoch 1147/2000 +2025-02-19 13:25:47,482 Current Learning Rate: 0.0083650626 +2025-02-19 13:25:47,483 Train Loss: 0.0002021, Val Loss: 0.0002360 +2025-02-19 13:25:47,483 Epoch 1148/2000 +2025-02-19 13:26:29,197 Current Learning Rate: 0.0084227355 +2025-02-19 13:26:29,198 Train Loss: 0.0002405, Val Loss: 0.0002483 +2025-02-19 13:26:29,198 Epoch 1149/2000 +2025-02-19 13:27:11,671 Current Learning Rate: 0.0084795640 +2025-02-19 13:27:11,671 Train Loss: 0.0002579, Val Loss: 0.0003024 +2025-02-19 13:27:11,671 Epoch 1150/2000 +2025-02-19 13:27:53,689 Current Learning Rate: 0.0085355339 +2025-02-19 13:27:53,690 Train Loss: 0.0004028, Val Loss: 0.0006508 +2025-02-19 13:27:53,690 Epoch 1151/2000 +2025-02-19 13:28:36,470 Current Learning Rate: 0.0085906315 +2025-02-19 13:28:36,470 Train Loss: 0.0004345, Val Loss: 0.0004311 +2025-02-19 13:28:36,470 Epoch 1152/2000 +2025-02-19 13:29:18,487 Current Learning Rate: 0.0086448431 +2025-02-19 13:29:18,487 Train Loss: 0.0003884, Val Loss: 0.0004781 +2025-02-19 13:29:18,488 Epoch 1153/2000 +2025-02-19 13:30:00,678 Current Learning Rate: 0.0086981555 +2025-02-19 13:30:00,678 Train Loss: 0.0003101, Val Loss: 0.0004376 +2025-02-19 13:30:00,678 Epoch 1154/2000 +2025-02-19 13:30:42,950 Current Learning Rate: 0.0087505553 +2025-02-19 13:30:42,950 Train Loss: 0.0002247, Val Loss: 0.0002304 +2025-02-19 13:30:42,950 Epoch 1155/2000 +2025-02-19 13:31:24,869 Current Learning Rate: 0.0088020298 +2025-02-19 13:31:24,869 Train Loss: 0.0002326, Val Loss: 0.0002712 +2025-02-19 13:31:24,869 Epoch 1156/2000 +2025-02-19 13:32:07,222 Current Learning Rate: 0.0088525662 +2025-02-19 13:32:07,223 Train Loss: 0.0002316, Val Loss: 0.0002165 +2025-02-19 13:32:07,223 Epoch 1157/2000 +2025-02-19 13:32:49,019 Current Learning Rate: 0.0089021520 +2025-02-19 13:32:49,019 Train Loss: 0.0001506, Val Loss: 0.0001975 +2025-02-19 13:32:49,019 Epoch 1158/2000 +2025-02-19 13:33:30,449 Current Learning Rate: 0.0089507751 +2025-02-19 13:33:30,449 Train Loss: 0.0002670, Val Loss: 0.0003815 +2025-02-19 13:33:30,450 Epoch 1159/2000 +2025-02-19 13:34:12,290 Current Learning Rate: 0.0089984233 +2025-02-19 13:34:12,291 Train Loss: 0.0003771, Val Loss: 0.0002460 +2025-02-19 13:34:12,291 Epoch 1160/2000 +2025-02-19 13:34:54,468 Current Learning Rate: 0.0090450850 +2025-02-19 13:34:54,469 Train Loss: 0.0001793, Val Loss: 0.0002131 +2025-02-19 13:34:54,469 Epoch 1161/2000 +2025-02-19 13:35:36,447 Current Learning Rate: 0.0090907486 +2025-02-19 13:35:36,447 Train Loss: 0.0001573, Val Loss: 0.0002061 +2025-02-19 13:35:36,447 Epoch 1162/2000 +2025-02-19 13:36:19,103 Current Learning Rate: 0.0091354029 +2025-02-19 13:36:19,104 Train Loss: 0.0002408, Val Loss: 0.0002080 +2025-02-19 13:36:19,104 Epoch 1163/2000 +2025-02-19 13:37:00,560 Current Learning Rate: 0.0091790368 +2025-02-19 13:37:00,560 Train Loss: 0.0003083, Val Loss: 0.0002568 +2025-02-19 13:37:00,560 Epoch 1164/2000 +2025-02-19 13:37:42,816 Current Learning Rate: 0.0092216396 +2025-02-19 13:37:42,817 Train Loss: 0.0002384, Val Loss: 0.0002430 +2025-02-19 13:37:42,817 Epoch 1165/2000 +2025-02-19 13:38:24,464 Current Learning Rate: 0.0092632008 +2025-02-19 13:38:24,465 Train Loss: 0.0002233, Val Loss: 0.0002381 +2025-02-19 13:38:24,465 Epoch 1166/2000 +2025-02-19 13:39:07,054 Current Learning Rate: 0.0093037101 +2025-02-19 13:39:07,055 Train Loss: 0.0002764, Val Loss: 0.0002309 +2025-02-19 13:39:07,055 Epoch 1167/2000 +2025-02-19 13:39:49,253 Current Learning Rate: 0.0093431576 +2025-02-19 13:39:49,253 Train Loss: 0.0002584, Val Loss: 0.0002211 +2025-02-19 13:39:49,253 Epoch 1168/2000 +2025-02-19 13:40:31,530 Current Learning Rate: 0.0093815334 +2025-02-19 13:40:31,531 Train Loss: 0.0003225, Val Loss: 0.0002552 +2025-02-19 13:40:31,531 Epoch 1169/2000 +2025-02-19 13:41:13,780 Current Learning Rate: 0.0094188282 +2025-02-19 13:41:13,781 Train Loss: 0.0002395, Val Loss: 0.0002569 +2025-02-19 13:41:13,781 Epoch 1170/2000 +2025-02-19 13:41:55,569 Current Learning Rate: 0.0094550326 +2025-02-19 13:41:55,570 Train Loss: 0.0002197, Val Loss: 0.0002329 +2025-02-19 13:41:55,570 Epoch 1171/2000 +2025-02-19 13:42:38,137 Current Learning Rate: 0.0094901379 +2025-02-19 13:42:38,138 Train Loss: 0.0002297, Val Loss: 0.0002454 +2025-02-19 13:42:38,138 Epoch 1172/2000 +2025-02-19 13:43:20,380 Current Learning Rate: 0.0095241353 +2025-02-19 13:43:20,380 Train Loss: 0.0002180, Val Loss: 0.0002914 +2025-02-19 13:43:20,381 Epoch 1173/2000 +2025-02-19 13:44:02,586 Current Learning Rate: 0.0095570164 +2025-02-19 13:44:02,587 Train Loss: 0.0002646, Val Loss: 0.0002740 +2025-02-19 13:44:02,587 Epoch 1174/2000 +2025-02-19 13:44:44,944 Current Learning Rate: 0.0095887731 +2025-02-19 13:44:44,944 Train Loss: 0.0002990, Val Loss: 0.0002360 +2025-02-19 13:44:44,944 Epoch 1175/2000 +2025-02-19 13:45:27,495 Current Learning Rate: 0.0096193977 +2025-02-19 13:45:27,496 Train Loss: 0.0002073, Val Loss: 0.0002180 +2025-02-19 13:45:27,496 Epoch 1176/2000 +2025-02-19 13:46:09,688 Current Learning Rate: 0.0096488824 +2025-02-19 13:46:09,689 Train Loss: 0.0001677, Val Loss: 0.0002261 +2025-02-19 13:46:09,689 Epoch 1177/2000 +2025-02-19 13:46:51,484 Current Learning Rate: 0.0096772202 +2025-02-19 13:46:51,484 Train Loss: 0.0001842, Val Loss: 0.0002575 +2025-02-19 13:46:51,484 Epoch 1178/2000 +2025-02-19 13:47:34,053 Current Learning Rate: 0.0097044038 +2025-02-19 13:47:34,054 Train Loss: 0.0002297, Val Loss: 0.0002686 +2025-02-19 13:47:34,054 Epoch 1179/2000 +2025-02-19 13:48:15,878 Current Learning Rate: 0.0097304268 +2025-02-19 13:48:15,879 Train Loss: 0.0003759, Val Loss: 0.0002627 +2025-02-19 13:48:15,879 Epoch 1180/2000 +2025-02-19 13:48:57,850 Current Learning Rate: 0.0097552826 +2025-02-19 13:48:57,850 Train Loss: 0.0002914, Val Loss: 0.0002834 +2025-02-19 13:48:57,851 Epoch 1181/2000 +2025-02-19 13:49:39,657 Current Learning Rate: 0.0097789651 +2025-02-19 13:49:39,657 Train Loss: 0.0002908, Val Loss: 0.0002451 +2025-02-19 13:49:39,657 Epoch 1182/2000 +2025-02-19 13:50:21,766 Current Learning Rate: 0.0098014684 +2025-02-19 13:50:21,767 Train Loss: 0.0002527, Val Loss: 0.0002757 +2025-02-19 13:50:21,767 Epoch 1183/2000 +2025-02-19 13:51:04,218 Current Learning Rate: 0.0098227871 +2025-02-19 13:51:04,219 Train Loss: 0.0002253, Val Loss: 0.0002379 +2025-02-19 13:51:04,219 Epoch 1184/2000 +2025-02-19 13:51:47,386 Current Learning Rate: 0.0098429158 +2025-02-19 13:51:47,387 Train Loss: 0.0002205, Val Loss: 0.0002444 +2025-02-19 13:51:47,387 Epoch 1185/2000 +2025-02-19 13:52:29,729 Current Learning Rate: 0.0098618496 +2025-02-19 13:52:29,729 Train Loss: 0.0001862, Val Loss: 0.0002031 +2025-02-19 13:52:29,730 Epoch 1186/2000 +2025-02-19 13:53:12,149 Current Learning Rate: 0.0098795838 +2025-02-19 13:53:12,149 Train Loss: 0.0002301, Val Loss: 0.0002153 +2025-02-19 13:53:12,150 Epoch 1187/2000 +2025-02-19 13:53:53,942 Current Learning Rate: 0.0098961141 +2025-02-19 13:53:53,942 Train Loss: 0.0002523, Val Loss: 0.0002782 +2025-02-19 13:53:53,943 Epoch 1188/2000 +2025-02-19 13:54:36,723 Current Learning Rate: 0.0099114363 +2025-02-19 13:54:36,724 Train Loss: 0.0002066, Val Loss: 0.0003214 +2025-02-19 13:54:36,724 Epoch 1189/2000 +2025-02-19 13:55:19,226 Current Learning Rate: 0.0099255466 +2025-02-19 13:55:19,227 Train Loss: 0.0002971, Val Loss: 0.0002629 +2025-02-19 13:55:19,227 Epoch 1190/2000 +2025-02-19 13:56:01,674 Current Learning Rate: 0.0099384417 +2025-02-19 13:56:01,675 Train Loss: 0.0002703, Val Loss: 0.0002672 +2025-02-19 13:56:01,676 Epoch 1191/2000 +2025-02-19 13:56:43,641 Current Learning Rate: 0.0099501183 +2025-02-19 13:56:43,641 Train Loss: 0.0002029, Val Loss: 0.0002400 +2025-02-19 13:56:43,642 Epoch 1192/2000 +2025-02-19 13:57:26,568 Current Learning Rate: 0.0099605735 +2025-02-19 13:57:26,569 Train Loss: 0.0002391, Val Loss: 0.0002149 +2025-02-19 13:57:26,569 Epoch 1193/2000 +2025-02-19 13:58:08,336 Current Learning Rate: 0.0099698048 +2025-02-19 13:58:08,336 Train Loss: 0.0002001, Val Loss: 0.0002242 +2025-02-19 13:58:08,337 Epoch 1194/2000 +2025-02-19 13:58:50,456 Current Learning Rate: 0.0099778098 +2025-02-19 13:58:50,456 Train Loss: 0.0002923, Val Loss: 0.0001914 +2025-02-19 13:58:50,456 Epoch 1195/2000 +2025-02-19 13:59:33,253 Current Learning Rate: 0.0099845867 +2025-02-19 13:59:33,254 Train Loss: 0.0001615, Val Loss: 0.0001817 +2025-02-19 13:59:33,254 Epoch 1196/2000 +2025-02-19 14:00:15,626 Current Learning Rate: 0.0099901336 +2025-02-19 14:00:15,627 Train Loss: 0.0001818, Val Loss: 0.0001883 +2025-02-19 14:00:15,627 Epoch 1197/2000 +2025-02-19 14:00:57,347 Current Learning Rate: 0.0099944494 +2025-02-19 14:00:57,348 Train Loss: 0.0002169, Val Loss: 0.0002434 +2025-02-19 14:00:57,348 Epoch 1198/2000 +2025-02-19 14:01:39,417 Current Learning Rate: 0.0099975328 +2025-02-19 14:01:39,417 Train Loss: 0.0001647, Val Loss: 0.0001866 +2025-02-19 14:01:39,418 Epoch 1199/2000 +2025-02-19 14:02:21,853 Current Learning Rate: 0.0099993832 +2025-02-19 14:02:21,854 Train Loss: 0.0002664, Val Loss: 0.0003024 +2025-02-19 14:02:21,854 Epoch 1200/2000 +2025-02-19 14:03:04,087 Current Learning Rate: 0.0100000000 +2025-02-19 14:03:04,088 Train Loss: 0.0002719, Val Loss: 0.0002460 +2025-02-19 14:03:04,088 Epoch 1201/2000 +2025-02-19 14:03:46,742 Current Learning Rate: 0.0099993832 +2025-02-19 14:03:46,743 Train Loss: 0.0002651, Val Loss: 0.0002228 +2025-02-19 14:03:46,743 Epoch 1202/2000 +2025-02-19 14:04:28,555 Current Learning Rate: 0.0099975328 +2025-02-19 14:04:28,556 Train Loss: 0.0001665, Val Loss: 0.0002318 +2025-02-19 14:04:28,556 Epoch 1203/2000 +2025-02-19 14:05:10,862 Current Learning Rate: 0.0099944494 +2025-02-19 14:05:10,862 Train Loss: 0.0002868, Val Loss: 0.0002464 +2025-02-19 14:05:10,862 Epoch 1204/2000 +2025-02-19 14:05:53,400 Current Learning Rate: 0.0099901336 +2025-02-19 14:05:53,401 Train Loss: 0.0002940, Val Loss: 0.0002515 +2025-02-19 14:05:53,401 Epoch 1205/2000 +2025-02-19 14:06:35,553 Current Learning Rate: 0.0099845867 +2025-02-19 14:06:35,554 Train Loss: 0.0002447, Val Loss: 0.0003228 +2025-02-19 14:06:35,554 Epoch 1206/2000 +2025-02-19 14:07:17,669 Current Learning Rate: 0.0099778098 +2025-02-19 14:07:17,670 Train Loss: 0.0003078, Val Loss: 0.0002793 +2025-02-19 14:07:17,670 Epoch 1207/2000 +2025-02-19 14:07:59,819 Current Learning Rate: 0.0099698048 +2025-02-19 14:07:59,819 Train Loss: 0.0002678, Val Loss: 0.0002355 +2025-02-19 14:07:59,820 Epoch 1208/2000 +2025-02-19 14:08:41,389 Current Learning Rate: 0.0099605735 +2025-02-19 14:08:41,389 Train Loss: 0.0002296, Val Loss: 0.0002388 +2025-02-19 14:08:41,390 Epoch 1209/2000 +2025-02-19 14:09:24,404 Current Learning Rate: 0.0099501183 +2025-02-19 14:09:24,404 Train Loss: 0.0002542, Val Loss: 0.0002963 +2025-02-19 14:09:24,404 Epoch 1210/2000 +2025-02-19 14:10:06,797 Current Learning Rate: 0.0099384417 +2025-02-19 14:10:06,798 Train Loss: 0.0001936, Val Loss: 0.0002170 +2025-02-19 14:10:06,798 Epoch 1211/2000 +2025-02-19 14:10:49,164 Current Learning Rate: 0.0099255466 +2025-02-19 14:10:49,165 Train Loss: 0.0002173, Val Loss: 0.0002028 +2025-02-19 14:10:49,165 Epoch 1212/2000 +2025-02-19 14:11:31,356 Current Learning Rate: 0.0099114363 +2025-02-19 14:11:31,357 Train Loss: 0.0002264, Val Loss: 0.0002248 +2025-02-19 14:11:31,358 Epoch 1213/2000 +2025-02-19 14:12:13,517 Current Learning Rate: 0.0098961141 +2025-02-19 14:12:13,518 Train Loss: 0.0001839, Val Loss: 0.0002021 +2025-02-19 14:12:13,518 Epoch 1214/2000 +2025-02-19 14:12:56,027 Current Learning Rate: 0.0098795838 +2025-02-19 14:12:56,028 Train Loss: 0.0001973, Val Loss: 0.0001982 +2025-02-19 14:12:56,028 Epoch 1215/2000 +2025-02-19 14:13:38,368 Current Learning Rate: 0.0098618496 +2025-02-19 14:13:38,369 Train Loss: 0.0001745, Val Loss: 0.0002014 +2025-02-19 14:13:38,369 Epoch 1216/2000 +2025-02-19 14:14:20,930 Current Learning Rate: 0.0098429158 +2025-02-19 14:14:20,931 Train Loss: 0.0001793, Val Loss: 0.0001791 +2025-02-19 14:14:20,931 Epoch 1217/2000 +2025-02-19 14:15:03,432 Current Learning Rate: 0.0098227871 +2025-02-19 14:15:03,432 Train Loss: 0.0001886, Val Loss: 0.0001914 +2025-02-19 14:15:03,433 Epoch 1218/2000 +2025-02-19 14:15:44,889 Current Learning Rate: 0.0098014684 +2025-02-19 14:15:44,890 Train Loss: 0.0001789, Val Loss: 0.0001904 +2025-02-19 14:15:44,890 Epoch 1219/2000 +2025-02-19 14:16:27,908 Current Learning Rate: 0.0097789651 +2025-02-19 14:16:27,908 Train Loss: 0.0001721, Val Loss: 0.0001994 +2025-02-19 14:16:27,908 Epoch 1220/2000 +2025-02-19 14:17:09,961 Current Learning Rate: 0.0097552826 +2025-02-19 14:17:09,962 Train Loss: 0.0001862, Val Loss: 0.0002094 +2025-02-19 14:17:09,962 Epoch 1221/2000 +2025-02-19 14:17:52,621 Current Learning Rate: 0.0097304268 +2025-02-19 14:17:52,622 Train Loss: 0.0002349, Val Loss: 0.0002255 +2025-02-19 14:17:52,622 Epoch 1222/2000 +2025-02-19 14:18:34,418 Current Learning Rate: 0.0097044038 +2025-02-19 14:18:34,419 Train Loss: 0.0002171, Val Loss: 0.0002040 +2025-02-19 14:18:34,419 Epoch 1223/2000 +2025-02-19 14:19:17,288 Current Learning Rate: 0.0096772202 +2025-02-19 14:19:17,289 Train Loss: 0.0001741, Val Loss: 0.0001951 +2025-02-19 14:19:17,289 Epoch 1224/2000 +2025-02-19 14:19:58,728 Current Learning Rate: 0.0096488824 +2025-02-19 14:19:58,728 Train Loss: 0.0001559, Val Loss: 0.0001833 +2025-02-19 14:19:58,729 Epoch 1225/2000 +2025-02-19 14:20:40,889 Current Learning Rate: 0.0096193977 +2025-02-19 14:20:40,889 Train Loss: 0.0001516, Val Loss: 0.0001909 +2025-02-19 14:20:40,890 Epoch 1226/2000 +2025-02-19 14:21:23,424 Current Learning Rate: 0.0095887731 +2025-02-19 14:21:23,425 Train Loss: 0.0001835, Val Loss: 0.0001820 +2025-02-19 14:21:23,425 Epoch 1227/2000 +2025-02-19 14:22:06,120 Current Learning Rate: 0.0095570164 +2025-02-19 14:22:06,120 Train Loss: 0.0001919, Val Loss: 0.0002005 +2025-02-19 14:22:06,121 Epoch 1228/2000 +2025-02-19 14:22:48,134 Current Learning Rate: 0.0095241353 +2025-02-19 14:22:48,134 Train Loss: 0.0001803, Val Loss: 0.0001876 +2025-02-19 14:22:48,135 Epoch 1229/2000 +2025-02-19 14:23:30,192 Current Learning Rate: 0.0094901379 +2025-02-19 14:23:30,193 Train Loss: 0.0002598, Val Loss: 0.0001962 +2025-02-19 14:23:30,193 Epoch 1230/2000 +2025-02-19 14:24:12,606 Current Learning Rate: 0.0094550326 +2025-02-19 14:24:12,607 Train Loss: 0.0001980, Val Loss: 0.0002233 +2025-02-19 14:24:12,607 Epoch 1231/2000 +2025-02-19 14:24:54,754 Current Learning Rate: 0.0094188282 +2025-02-19 14:24:54,754 Train Loss: 0.0001980, Val Loss: 0.0002386 +2025-02-19 14:24:54,754 Epoch 1232/2000 +2025-02-19 14:25:36,965 Current Learning Rate: 0.0093815334 +2025-02-19 14:25:36,965 Train Loss: 0.0001768, Val Loss: 0.0002816 +2025-02-19 14:25:36,966 Epoch 1233/2000 +2025-02-19 14:26:18,743 Current Learning Rate: 0.0093431576 +2025-02-19 14:26:18,743 Train Loss: 0.0002907, Val Loss: 0.0002671 +2025-02-19 14:26:18,743 Epoch 1234/2000 +2025-02-19 14:27:01,306 Current Learning Rate: 0.0093037101 +2025-02-19 14:27:01,307 Train Loss: 0.0004224, Val Loss: 0.0002443 +2025-02-19 14:27:01,308 Epoch 1235/2000 +2025-02-19 14:27:43,766 Current Learning Rate: 0.0092632008 +2025-02-19 14:27:43,767 Train Loss: 0.0002431, Val Loss: 0.0003026 +2025-02-19 14:27:43,767 Epoch 1236/2000 +2025-02-19 14:28:26,084 Current Learning Rate: 0.0092216396 +2025-02-19 14:28:26,085 Train Loss: 0.0002409, Val Loss: 0.0002395 +2025-02-19 14:28:26,085 Epoch 1237/2000 +2025-02-19 14:29:08,650 Current Learning Rate: 0.0091790368 +2025-02-19 14:29:08,650 Train Loss: 0.0002162, Val Loss: 0.0002014 +2025-02-19 14:29:08,650 Epoch 1238/2000 +2025-02-19 14:29:50,235 Current Learning Rate: 0.0091354029 +2025-02-19 14:29:50,235 Train Loss: 0.0002581, Val Loss: 0.0002200 +2025-02-19 14:29:50,236 Epoch 1239/2000 +2025-02-19 14:30:32,614 Current Learning Rate: 0.0090907486 +2025-02-19 14:30:32,614 Train Loss: 0.0001874, Val Loss: 0.0001987 +2025-02-19 14:30:32,615 Epoch 1240/2000 +2025-02-19 14:31:15,284 Current Learning Rate: 0.0090450850 +2025-02-19 14:31:15,284 Train Loss: 0.0002256, Val Loss: 0.0002165 +2025-02-19 14:31:15,284 Epoch 1241/2000 +2025-02-19 14:31:57,366 Current Learning Rate: 0.0089984233 +2025-02-19 14:31:57,367 Train Loss: 0.0002085, Val Loss: 0.0002221 +2025-02-19 14:31:57,367 Epoch 1242/2000 +2025-02-19 14:32:39,733 Current Learning Rate: 0.0089507751 +2025-02-19 14:32:39,733 Train Loss: 0.0001634, Val Loss: 0.0001835 +2025-02-19 14:32:39,733 Epoch 1243/2000 +2025-02-19 14:33:21,889 Current Learning Rate: 0.0089021520 +2025-02-19 14:33:21,890 Train Loss: 0.0001988, Val Loss: 0.0001909 +2025-02-19 14:33:21,890 Epoch 1244/2000 +2025-02-19 14:34:03,531 Current Learning Rate: 0.0088525662 +2025-02-19 14:34:03,531 Train Loss: 0.0001807, Val Loss: 0.0001771 +2025-02-19 14:34:03,531 Epoch 1245/2000 +2025-02-19 14:34:45,733 Current Learning Rate: 0.0088020298 +2025-02-19 14:34:45,733 Train Loss: 0.0001736, Val Loss: 0.0001734 +2025-02-19 14:34:45,733 Epoch 1246/2000 +2025-02-19 14:35:28,286 Current Learning Rate: 0.0087505553 +2025-02-19 14:35:28,286 Train Loss: 0.0001292, Val Loss: 0.0001642 +2025-02-19 14:35:28,287 Epoch 1247/2000 +2025-02-19 14:36:10,490 Current Learning Rate: 0.0086981555 +2025-02-19 14:36:10,490 Train Loss: 0.0001689, Val Loss: 0.0001677 +2025-02-19 14:36:10,491 Epoch 1248/2000 +2025-02-19 14:36:52,365 Current Learning Rate: 0.0086448431 +2025-02-19 14:36:52,366 Train Loss: 0.0001446, Val Loss: 0.0001763 +2025-02-19 14:36:52,366 Epoch 1249/2000 +2025-02-19 14:37:34,580 Current Learning Rate: 0.0085906315 +2025-02-19 14:37:34,581 Train Loss: 0.0001881, Val Loss: 0.0001863 +2025-02-19 14:37:34,581 Epoch 1250/2000 +2025-02-19 14:38:17,004 Current Learning Rate: 0.0085355339 +2025-02-19 14:38:17,004 Train Loss: 0.0001438, Val Loss: 0.0001934 +2025-02-19 14:38:17,005 Epoch 1251/2000 +2025-02-19 14:38:58,967 Current Learning Rate: 0.0084795640 +2025-02-19 14:38:58,968 Train Loss: 0.0001941, Val Loss: 0.0002212 +2025-02-19 14:38:58,968 Epoch 1252/2000 +2025-02-19 14:39:40,653 Current Learning Rate: 0.0084227355 +2025-02-19 14:39:40,653 Train Loss: 0.0002531, Val Loss: 0.0002182 +2025-02-19 14:39:40,654 Epoch 1253/2000 +2025-02-19 14:40:23,652 Current Learning Rate: 0.0083650626 +2025-02-19 14:40:23,652 Train Loss: 0.0001698, Val Loss: 0.0001972 +2025-02-19 14:40:23,653 Epoch 1254/2000 +2025-02-19 14:41:06,354 Current Learning Rate: 0.0083065593 +2025-02-19 14:41:06,354 Train Loss: 0.0002032, Val Loss: 0.0002247 +2025-02-19 14:41:06,355 Epoch 1255/2000 +2025-02-19 14:41:47,719 Current Learning Rate: 0.0082472402 +2025-02-19 14:41:47,720 Train Loss: 0.0002335, Val Loss: 0.0002256 +2025-02-19 14:41:47,720 Epoch 1256/2000 +2025-02-19 14:42:29,822 Current Learning Rate: 0.0081871199 +2025-02-19 14:42:29,822 Train Loss: 0.0002612, Val Loss: 0.0002806 +2025-02-19 14:42:29,822 Epoch 1257/2000 +2025-02-19 14:43:12,174 Current Learning Rate: 0.0081262133 +2025-02-19 14:43:12,175 Train Loss: 0.0002230, Val Loss: 0.0002670 +2025-02-19 14:43:12,175 Epoch 1258/2000 +2025-02-19 14:43:54,104 Current Learning Rate: 0.0080645353 +2025-02-19 14:43:54,104 Train Loss: 0.0003239, Val Loss: 0.0003028 +2025-02-19 14:43:54,105 Epoch 1259/2000 +2025-02-19 14:44:37,143 Current Learning Rate: 0.0080021011 +2025-02-19 14:44:37,144 Train Loss: 0.0002129, Val Loss: 0.0002132 +2025-02-19 14:44:37,144 Epoch 1260/2000 +2025-02-19 14:45:19,598 Current Learning Rate: 0.0079389263 +2025-02-19 14:45:19,598 Train Loss: 0.0002141, Val Loss: 0.0001820 +2025-02-19 14:45:19,598 Epoch 1261/2000 +2025-02-19 14:46:01,575 Current Learning Rate: 0.0078750263 +2025-02-19 14:46:01,576 Train Loss: 0.0002098, Val Loss: 0.0001806 +2025-02-19 14:46:01,576 Epoch 1262/2000 +2025-02-19 14:46:43,298 Current Learning Rate: 0.0078104169 +2025-02-19 14:46:43,298 Train Loss: 0.0001880, Val Loss: 0.0001751 +2025-02-19 14:46:43,299 Epoch 1263/2000 +2025-02-19 14:47:25,302 Current Learning Rate: 0.0077451141 +2025-02-19 14:47:25,303 Train Loss: 0.0001728, Val Loss: 0.0001762 +2025-02-19 14:47:25,303 Epoch 1264/2000 +2025-02-19 14:48:08,146 Current Learning Rate: 0.0076791340 +2025-02-19 14:48:08,147 Train Loss: 0.0001760, Val Loss: 0.0001795 +2025-02-19 14:48:08,147 Epoch 1265/2000 +2025-02-19 14:48:49,767 Current Learning Rate: 0.0076124928 +2025-02-19 14:48:49,767 Train Loss: 0.0001897, Val Loss: 0.0001736 +2025-02-19 14:48:49,767 Epoch 1266/2000 +2025-02-19 14:49:31,769 Current Learning Rate: 0.0075452071 +2025-02-19 14:49:31,769 Train Loss: 0.0001857, Val Loss: 0.0001612 +2025-02-19 14:49:31,770 Epoch 1267/2000 +2025-02-19 14:50:14,141 Current Learning Rate: 0.0074772933 +2025-02-19 14:50:14,142 Train Loss: 0.0001976, Val Loss: 0.0001655 +2025-02-19 14:50:14,142 Epoch 1268/2000 +2025-02-19 14:50:56,299 Current Learning Rate: 0.0074087684 +2025-02-19 14:50:56,300 Train Loss: 0.0001423, Val Loss: 0.0001640 +2025-02-19 14:50:56,300 Epoch 1269/2000 +2025-02-19 14:51:38,303 Current Learning Rate: 0.0073396491 +2025-02-19 14:51:38,304 Train Loss: 0.0001533, Val Loss: 0.0001562 +2025-02-19 14:51:38,304 Epoch 1270/2000 +2025-02-19 14:52:20,150 Current Learning Rate: 0.0072699525 +2025-02-19 14:52:20,151 Train Loss: 0.0001959, Val Loss: 0.0001558 +2025-02-19 14:52:20,151 Epoch 1271/2000 +2025-02-19 14:53:02,301 Current Learning Rate: 0.0071996958 +2025-02-19 14:53:02,302 Train Loss: 0.0001043, Val Loss: 0.0001599 +2025-02-19 14:53:02,302 Epoch 1272/2000 +2025-02-19 14:53:44,521 Current Learning Rate: 0.0071288965 +2025-02-19 14:53:44,522 Train Loss: 0.0001563, Val Loss: 0.0001712 +2025-02-19 14:53:44,522 Epoch 1273/2000 +2025-02-19 14:54:27,558 Current Learning Rate: 0.0070575718 +2025-02-19 14:54:27,559 Train Loss: 0.0001351, Val Loss: 0.0002030 +2025-02-19 14:54:27,559 Epoch 1274/2000 +2025-02-19 14:55:09,250 Current Learning Rate: 0.0069857395 +2025-02-19 14:55:09,251 Train Loss: 0.0001447, Val Loss: 0.0002487 +2025-02-19 14:55:09,251 Epoch 1275/2000 +2025-02-19 14:55:51,747 Current Learning Rate: 0.0069134172 +2025-02-19 14:55:51,748 Train Loss: 0.0001861, Val Loss: 0.0002347 +2025-02-19 14:55:51,748 Epoch 1276/2000 +2025-02-19 14:56:34,216 Current Learning Rate: 0.0068406228 +2025-02-19 14:56:34,217 Train Loss: 0.0002019, Val Loss: 0.0002097 +2025-02-19 14:56:34,217 Epoch 1277/2000 +2025-02-19 14:57:16,178 Current Learning Rate: 0.0067673742 +2025-02-19 14:57:16,179 Train Loss: 0.0001914, Val Loss: 0.0001597 +2025-02-19 14:57:16,179 Epoch 1278/2000 +2025-02-19 14:57:59,065 Current Learning Rate: 0.0066936896 +2025-02-19 14:57:59,066 Train Loss: 0.0001264, Val Loss: 0.0001663 +2025-02-19 14:57:59,066 Epoch 1279/2000 +2025-02-19 14:58:41,519 Current Learning Rate: 0.0066195871 +2025-02-19 14:58:41,520 Train Loss: 0.0001531, Val Loss: 0.0001602 +2025-02-19 14:58:41,520 Epoch 1280/2000 +2025-02-19 14:59:24,202 Current Learning Rate: 0.0065450850 +2025-02-19 14:59:24,203 Train Loss: 0.0001023, Val Loss: 0.0001550 +2025-02-19 14:59:24,203 Epoch 1281/2000 +2025-02-19 15:00:05,883 Current Learning Rate: 0.0064702016 +2025-02-19 15:00:05,884 Train Loss: 0.0001593, Val Loss: 0.0001616 +2025-02-19 15:00:05,885 Epoch 1282/2000 +2025-02-19 15:00:48,444 Current Learning Rate: 0.0063949555 +2025-02-19 15:00:48,450 Train Loss: 0.0001550, Val Loss: 0.0001692 +2025-02-19 15:00:48,450 Epoch 1283/2000 +2025-02-19 15:01:30,681 Current Learning Rate: 0.0063193652 +2025-02-19 15:01:30,682 Train Loss: 0.0001356, Val Loss: 0.0001626 +2025-02-19 15:01:30,682 Epoch 1284/2000 +2025-02-19 15:02:12,972 Current Learning Rate: 0.0062434494 +2025-02-19 15:02:12,973 Train Loss: 0.0001633, Val Loss: 0.0001696 +2025-02-19 15:02:12,973 Epoch 1285/2000 +2025-02-19 15:02:55,037 Current Learning Rate: 0.0061672268 +2025-02-19 15:02:55,037 Train Loss: 0.0001517, Val Loss: 0.0001664 +2025-02-19 15:02:55,038 Epoch 1286/2000 +2025-02-19 15:03:37,228 Current Learning Rate: 0.0060907162 +2025-02-19 15:03:37,239 Train Loss: 0.0001675, Val Loss: 0.0001654 +2025-02-19 15:03:37,239 Epoch 1287/2000 +2025-02-19 15:04:19,817 Current Learning Rate: 0.0060139365 +2025-02-19 15:04:19,817 Train Loss: 0.0001338, Val Loss: 0.0001598 +2025-02-19 15:04:19,818 Epoch 1288/2000 +2025-02-19 15:05:01,818 Current Learning Rate: 0.0059369066 +2025-02-19 15:05:01,818 Train Loss: 0.0001226, Val Loss: 0.0001605 +2025-02-19 15:05:01,819 Epoch 1289/2000 +2025-02-19 15:05:44,036 Current Learning Rate: 0.0058596455 +2025-02-19 15:05:44,036 Train Loss: 0.0001697, Val Loss: 0.0001533 +2025-02-19 15:05:44,037 Epoch 1290/2000 +2025-02-19 15:06:26,676 Current Learning Rate: 0.0057821723 +2025-02-19 15:06:26,677 Train Loss: 0.0001730, Val Loss: 0.0001599 +2025-02-19 15:06:26,677 Epoch 1291/2000 +2025-02-19 15:07:08,796 Current Learning Rate: 0.0057045062 +2025-02-19 15:07:08,796 Train Loss: 0.0001752, Val Loss: 0.0001701 +2025-02-19 15:07:08,797 Epoch 1292/2000 +2025-02-19 15:07:51,033 Current Learning Rate: 0.0056266662 +2025-02-19 15:07:51,034 Train Loss: 0.0001393, Val Loss: 0.0001760 +2025-02-19 15:07:51,034 Epoch 1293/2000 +2025-02-19 15:08:33,614 Current Learning Rate: 0.0055486716 +2025-02-19 15:08:33,615 Train Loss: 0.0001784, Val Loss: 0.0001566 +2025-02-19 15:08:33,616 Epoch 1294/2000 +2025-02-19 15:09:16,285 Current Learning Rate: 0.0054705416 +2025-02-19 15:09:16,286 Train Loss: 0.0001447, Val Loss: 0.0001444 +2025-02-19 15:09:16,287 Epoch 1295/2000 +2025-02-19 15:09:57,881 Current Learning Rate: 0.0053922955 +2025-02-19 15:09:57,881 Train Loss: 0.0001446, Val Loss: 0.0001499 +2025-02-19 15:09:57,881 Epoch 1296/2000 +2025-02-19 15:10:40,077 Current Learning Rate: 0.0053139526 +2025-02-19 15:10:40,078 Train Loss: 0.0001422, Val Loss: 0.0001442 +2025-02-19 15:10:40,078 Epoch 1297/2000 +2025-02-19 15:11:22,097 Current Learning Rate: 0.0052355323 +2025-02-19 15:11:23,630 Train Loss: 0.0000999, Val Loss: 0.0001394 +2025-02-19 15:11:23,631 Epoch 1298/2000 +2025-02-19 15:12:05,026 Current Learning Rate: 0.0051570538 +2025-02-19 15:12:05,026 Train Loss: 0.0001616, Val Loss: 0.0001418 +2025-02-19 15:12:05,027 Epoch 1299/2000 +2025-02-19 15:12:47,538 Current Learning Rate: 0.0050785366 +2025-02-19 15:12:47,539 Train Loss: 0.0001598, Val Loss: 0.0001418 +2025-02-19 15:12:47,539 Epoch 1300/2000 +2025-02-19 15:13:29,875 Current Learning Rate: 0.0050000000 +2025-02-19 15:13:29,875 Train Loss: 0.0001950, Val Loss: 0.0001481 +2025-02-19 15:13:29,875 Epoch 1301/2000 +2025-02-19 15:14:12,901 Current Learning Rate: 0.0049214634 +2025-02-19 15:14:12,912 Train Loss: 0.0001567, Val Loss: 0.0001425 +2025-02-19 15:14:12,912 Epoch 1302/2000 +2025-02-19 15:14:55,302 Current Learning Rate: 0.0048429462 +2025-02-19 15:14:56,694 Train Loss: 0.0001590, Val Loss: 0.0001347 +2025-02-19 15:14:56,699 Epoch 1303/2000 +2025-02-19 15:15:38,326 Current Learning Rate: 0.0047644677 +2025-02-19 15:15:39,919 Train Loss: 0.0001235, Val Loss: 0.0001329 +2025-02-19 15:15:39,919 Epoch 1304/2000 +2025-02-19 15:16:22,046 Current Learning Rate: 0.0046860474 +2025-02-19 15:16:22,047 Train Loss: 0.0001188, Val Loss: 0.0001347 +2025-02-19 15:16:22,047 Epoch 1305/2000 +2025-02-19 15:17:03,518 Current Learning Rate: 0.0046077045 +2025-02-19 15:17:03,519 Train Loss: 0.0001559, Val Loss: 0.0001340 +2025-02-19 15:17:03,519 Epoch 1306/2000 +2025-02-19 15:17:46,052 Current Learning Rate: 0.0045294584 +2025-02-19 15:17:47,607 Train Loss: 0.0000937, Val Loss: 0.0001328 +2025-02-19 15:17:47,614 Epoch 1307/2000 +2025-02-19 15:18:28,557 Current Learning Rate: 0.0044513284 +2025-02-19 15:18:29,890 Train Loss: 0.0001219, Val Loss: 0.0001326 +2025-02-19 15:18:29,893 Epoch 1308/2000 +2025-02-19 15:19:11,085 Current Learning Rate: 0.0043733338 +2025-02-19 15:19:11,086 Train Loss: 0.0001128, Val Loss: 0.0001328 +2025-02-19 15:19:11,086 Epoch 1309/2000 +2025-02-19 15:19:52,730 Current Learning Rate: 0.0042954938 +2025-02-19 15:19:53,738 Train Loss: 0.0001043, Val Loss: 0.0001326 +2025-02-19 15:19:53,740 Epoch 1310/2000 +2025-02-19 15:20:35,087 Current Learning Rate: 0.0042178277 +2025-02-19 15:20:35,087 Train Loss: 0.0001450, Val Loss: 0.0001373 +2025-02-19 15:20:35,087 Epoch 1311/2000 +2025-02-19 15:21:17,207 Current Learning Rate: 0.0041403545 +2025-02-19 15:21:17,207 Train Loss: 0.0001891, Val Loss: 0.0001893 +2025-02-19 15:21:17,208 Epoch 1312/2000 +2025-02-19 15:21:59,140 Current Learning Rate: 0.0040630934 +2025-02-19 15:21:59,141 Train Loss: 0.0002018, Val Loss: 0.0001486 +2025-02-19 15:21:59,141 Epoch 1313/2000 +2025-02-19 15:22:41,507 Current Learning Rate: 0.0039860635 +2025-02-19 15:22:41,508 Train Loss: 0.0001436, Val Loss: 0.0001563 +2025-02-19 15:22:41,508 Epoch 1314/2000 +2025-02-19 15:23:24,040 Current Learning Rate: 0.0039092838 +2025-02-19 15:23:24,040 Train Loss: 0.0000862, Val Loss: 0.0001427 +2025-02-19 15:23:24,040 Epoch 1315/2000 +2025-02-19 15:24:06,442 Current Learning Rate: 0.0038327732 +2025-02-19 15:24:06,443 Train Loss: 0.0001201, Val Loss: 0.0001440 +2025-02-19 15:24:06,443 Epoch 1316/2000 +2025-02-19 15:24:48,994 Current Learning Rate: 0.0037565506 +2025-02-19 15:24:48,995 Train Loss: 0.0001298, Val Loss: 0.0001451 +2025-02-19 15:24:48,995 Epoch 1317/2000 +2025-02-19 15:25:31,437 Current Learning Rate: 0.0036806348 +2025-02-19 15:25:31,437 Train Loss: 0.0001040, Val Loss: 0.0001428 +2025-02-19 15:25:31,437 Epoch 1318/2000 +2025-02-19 15:26:13,002 Current Learning Rate: 0.0036050445 +2025-02-19 15:26:13,002 Train Loss: 0.0000885, Val Loss: 0.0001376 +2025-02-19 15:26:13,003 Epoch 1319/2000 +2025-02-19 15:26:55,710 Current Learning Rate: 0.0035297984 +2025-02-19 15:26:55,711 Train Loss: 0.0001409, Val Loss: 0.0001358 +2025-02-19 15:26:55,711 Epoch 1320/2000 +2025-02-19 15:27:37,978 Current Learning Rate: 0.0034549150 +2025-02-19 15:27:39,427 Train Loss: 0.0000917, Val Loss: 0.0001314 +2025-02-19 15:27:39,427 Epoch 1321/2000 +2025-02-19 15:28:21,540 Current Learning Rate: 0.0033804129 +2025-02-19 15:28:21,540 Train Loss: 0.0001164, Val Loss: 0.0001358 +2025-02-19 15:28:21,541 Epoch 1322/2000 +2025-02-19 15:29:04,001 Current Learning Rate: 0.0033063104 +2025-02-19 15:29:04,002 Train Loss: 0.0001373, Val Loss: 0.0001435 +2025-02-19 15:29:04,002 Epoch 1323/2000 +2025-02-19 15:29:46,148 Current Learning Rate: 0.0032326258 +2025-02-19 15:29:46,148 Train Loss: 0.0001273, Val Loss: 0.0001345 +2025-02-19 15:29:46,148 Epoch 1324/2000 +2025-02-19 15:30:27,768 Current Learning Rate: 0.0031593772 +2025-02-19 15:30:29,088 Train Loss: 0.0001247, Val Loss: 0.0001293 +2025-02-19 15:30:29,088 Epoch 1325/2000 +2025-02-19 15:31:10,957 Current Learning Rate: 0.0030865828 +2025-02-19 15:31:10,958 Train Loss: 0.0001169, Val Loss: 0.0001295 +2025-02-19 15:31:10,958 Epoch 1326/2000 +2025-02-19 15:31:52,651 Current Learning Rate: 0.0030142605 +2025-02-19 15:31:52,651 Train Loss: 0.0001225, Val Loss: 0.0001309 +2025-02-19 15:31:52,651 Epoch 1327/2000 +2025-02-19 15:32:34,952 Current Learning Rate: 0.0029424282 +2025-02-19 15:32:34,952 Train Loss: 0.0001392, Val Loss: 0.0001311 +2025-02-19 15:32:34,953 Epoch 1328/2000 +2025-02-19 15:33:17,261 Current Learning Rate: 0.0028711035 +2025-02-19 15:33:17,264 Train Loss: 0.0001184, Val Loss: 0.0001309 +2025-02-19 15:33:17,264 Epoch 1329/2000 +2025-02-19 15:33:59,729 Current Learning Rate: 0.0028003042 +2025-02-19 15:33:59,730 Train Loss: 0.0001191, Val Loss: 0.0001308 +2025-02-19 15:33:59,730 Epoch 1330/2000 +2025-02-19 15:34:41,463 Current Learning Rate: 0.0027300475 +2025-02-19 15:34:43,001 Train Loss: 0.0001007, Val Loss: 0.0001280 +2025-02-19 15:34:43,002 Epoch 1331/2000 +2025-02-19 15:35:23,792 Current Learning Rate: 0.0026603509 +2025-02-19 15:35:24,895 Train Loss: 0.0001106, Val Loss: 0.0001262 +2025-02-19 15:35:24,895 Epoch 1332/2000 +2025-02-19 15:36:06,996 Current Learning Rate: 0.0025912316 +2025-02-19 15:36:08,831 Train Loss: 0.0001416, Val Loss: 0.0001256 +2025-02-19 15:36:08,831 Epoch 1333/2000 +2025-02-19 15:36:50,825 Current Learning Rate: 0.0025227067 +2025-02-19 15:36:52,310 Train Loss: 0.0000984, Val Loss: 0.0001246 +2025-02-19 15:36:52,310 Epoch 1334/2000 +2025-02-19 15:37:33,835 Current Learning Rate: 0.0024547929 +2025-02-19 15:37:33,836 Train Loss: 0.0001013, Val Loss: 0.0001249 +2025-02-19 15:37:33,836 Epoch 1335/2000 +2025-02-19 15:38:15,796 Current Learning Rate: 0.0023875072 +2025-02-19 15:38:16,814 Train Loss: 0.0000885, Val Loss: 0.0001245 +2025-02-19 15:38:16,814 Epoch 1336/2000 +2025-02-19 15:38:57,660 Current Learning Rate: 0.0023208660 +2025-02-19 15:38:57,661 Train Loss: 0.0001159, Val Loss: 0.0001259 +2025-02-19 15:38:57,661 Epoch 1337/2000 +2025-02-19 15:39:39,686 Current Learning Rate: 0.0022548859 +2025-02-19 15:39:40,722 Train Loss: 0.0001217, Val Loss: 0.0001241 +2025-02-19 15:39:40,723 Epoch 1338/2000 +2025-02-19 15:40:22,759 Current Learning Rate: 0.0021895831 +2025-02-19 15:40:22,759 Train Loss: 0.0000876, Val Loss: 0.0001243 +2025-02-19 15:40:22,760 Epoch 1339/2000 +2025-02-19 15:41:04,522 Current Learning Rate: 0.0021249737 +2025-02-19 15:41:05,756 Train Loss: 0.0000952, Val Loss: 0.0001237 +2025-02-19 15:41:05,756 Epoch 1340/2000 +2025-02-19 15:41:46,626 Current Learning Rate: 0.0020610737 +2025-02-19 15:41:47,540 Train Loss: 0.0000898, Val Loss: 0.0001232 +2025-02-19 15:41:47,551 Epoch 1341/2000 +2025-02-19 15:42:29,378 Current Learning Rate: 0.0019978989 +2025-02-19 15:42:29,379 Train Loss: 0.0000887, Val Loss: 0.0001238 +2025-02-19 15:42:29,379 Epoch 1342/2000 +2025-02-19 15:43:11,834 Current Learning Rate: 0.0019354647 +2025-02-19 15:43:11,835 Train Loss: 0.0001085, Val Loss: 0.0001235 +2025-02-19 15:43:11,835 Epoch 1343/2000 +2025-02-19 15:43:53,637 Current Learning Rate: 0.0018737867 +2025-02-19 15:43:53,638 Train Loss: 0.0001296, Val Loss: 0.0001244 +2025-02-19 15:43:53,638 Epoch 1344/2000 +2025-02-19 15:44:36,461 Current Learning Rate: 0.0018128801 +2025-02-19 15:44:36,461 Train Loss: 0.0001335, Val Loss: 0.0001243 +2025-02-19 15:44:36,462 Epoch 1345/2000 +2025-02-19 15:45:18,446 Current Learning Rate: 0.0017527598 +2025-02-19 15:45:18,447 Train Loss: 0.0000764, Val Loss: 0.0001234 +2025-02-19 15:45:18,447 Epoch 1346/2000 +2025-02-19 15:46:00,334 Current Learning Rate: 0.0016934407 +2025-02-19 15:46:01,992 Train Loss: 0.0000998, Val Loss: 0.0001232 +2025-02-19 15:46:01,993 Epoch 1347/2000 +2025-02-19 15:46:43,257 Current Learning Rate: 0.0016349374 +2025-02-19 15:46:43,258 Train Loss: 0.0001458, Val Loss: 0.0001237 +2025-02-19 15:46:43,258 Epoch 1348/2000 +2025-02-19 15:47:25,064 Current Learning Rate: 0.0015772645 +2025-02-19 15:47:26,264 Train Loss: 0.0001109, Val Loss: 0.0001219 +2025-02-19 15:47:26,265 Epoch 1349/2000 +2025-02-19 15:48:08,058 Current Learning Rate: 0.0015204360 +2025-02-19 15:48:09,938 Train Loss: 0.0001044, Val Loss: 0.0001218 +2025-02-19 15:48:09,938 Epoch 1350/2000 +2025-02-19 15:48:52,139 Current Learning Rate: 0.0014644661 +2025-02-19 15:48:53,323 Train Loss: 0.0000976, Val Loss: 0.0001215 +2025-02-19 15:48:53,324 Epoch 1351/2000 +2025-02-19 15:49:35,531 Current Learning Rate: 0.0014093685 +2025-02-19 15:49:35,532 Train Loss: 0.0001519, Val Loss: 0.0001224 +2025-02-19 15:49:35,532 Epoch 1352/2000 +2025-02-19 15:50:17,551 Current Learning Rate: 0.0013551569 +2025-02-19 15:50:17,552 Train Loss: 0.0001506, Val Loss: 0.0001226 +2025-02-19 15:50:17,552 Epoch 1353/2000 +2025-02-19 15:50:59,219 Current Learning Rate: 0.0013018445 +2025-02-19 15:50:59,219 Train Loss: 0.0000748, Val Loss: 0.0001218 +2025-02-19 15:50:59,219 Epoch 1354/2000 +2025-02-19 15:51:40,941 Current Learning Rate: 0.0012494447 +2025-02-19 15:51:42,215 Train Loss: 0.0000969, Val Loss: 0.0001213 +2025-02-19 15:51:42,215 Epoch 1355/2000 +2025-02-19 15:52:24,326 Current Learning Rate: 0.0011979702 +2025-02-19 15:52:25,986 Train Loss: 0.0000863, Val Loss: 0.0001212 +2025-02-19 15:52:25,986 Epoch 1356/2000 +2025-02-19 15:53:07,320 Current Learning Rate: 0.0011474338 +2025-02-19 15:53:08,517 Train Loss: 0.0001006, Val Loss: 0.0001205 +2025-02-19 15:53:08,518 Epoch 1357/2000 +2025-02-19 15:53:49,490 Current Learning Rate: 0.0010978480 +2025-02-19 15:53:50,690 Train Loss: 0.0000826, Val Loss: 0.0001202 +2025-02-19 15:53:50,691 Epoch 1358/2000 +2025-02-19 15:54:32,061 Current Learning Rate: 0.0010492249 +2025-02-19 15:54:32,062 Train Loss: 0.0001372, Val Loss: 0.0001206 +2025-02-19 15:54:32,067 Epoch 1359/2000 +2025-02-19 15:55:14,480 Current Learning Rate: 0.0010015767 +2025-02-19 15:55:15,617 Train Loss: 0.0000821, Val Loss: 0.0001201 +2025-02-19 15:55:15,617 Epoch 1360/2000 +2025-02-19 15:55:56,440 Current Learning Rate: 0.0009549150 +2025-02-19 15:55:56,441 Train Loss: 0.0001421, Val Loss: 0.0001202 +2025-02-19 15:55:56,441 Epoch 1361/2000 +2025-02-19 15:56:38,612 Current Learning Rate: 0.0009092514 +2025-02-19 15:56:39,841 Train Loss: 0.0000927, Val Loss: 0.0001199 +2025-02-19 15:56:39,841 Epoch 1362/2000 +2025-02-19 15:57:21,031 Current Learning Rate: 0.0008645971 +2025-02-19 15:57:22,180 Train Loss: 0.0001059, Val Loss: 0.0001199 +2025-02-19 15:57:22,180 Epoch 1363/2000 +2025-02-19 15:58:03,256 Current Learning Rate: 0.0008209632 +2025-02-19 15:58:04,532 Train Loss: 0.0001143, Val Loss: 0.0001199 +2025-02-19 15:58:04,532 Epoch 1364/2000 +2025-02-19 15:58:45,289 Current Learning Rate: 0.0007783604 +2025-02-19 15:58:46,375 Train Loss: 0.0001299, Val Loss: 0.0001197 +2025-02-19 15:58:46,378 Epoch 1365/2000 +2025-02-19 15:59:27,452 Current Learning Rate: 0.0007367992 +2025-02-19 15:59:29,305 Train Loss: 0.0000947, Val Loss: 0.0001196 +2025-02-19 15:59:29,305 Epoch 1366/2000 +2025-02-19 16:00:11,392 Current Learning Rate: 0.0006962899 +2025-02-19 16:00:11,393 Train Loss: 0.0001255, Val Loss: 0.0001196 +2025-02-19 16:00:11,393 Epoch 1367/2000 +2025-02-19 16:00:52,837 Current Learning Rate: 0.0006568424 +2025-02-19 16:00:54,068 Train Loss: 0.0001049, Val Loss: 0.0001196 +2025-02-19 16:00:54,070 Epoch 1368/2000 +2025-02-19 16:01:35,210 Current Learning Rate: 0.0006184666 +2025-02-19 16:01:36,377 Train Loss: 0.0001114, Val Loss: 0.0001195 +2025-02-19 16:01:36,378 Epoch 1369/2000 +2025-02-19 16:02:17,223 Current Learning Rate: 0.0005811718 +2025-02-19 16:02:18,082 Train Loss: 0.0000842, Val Loss: 0.0001194 +2025-02-19 16:02:18,082 Epoch 1370/2000 +2025-02-19 16:03:00,238 Current Learning Rate: 0.0005449674 +2025-02-19 16:03:01,968 Train Loss: 0.0000936, Val Loss: 0.0001193 +2025-02-19 16:03:01,969 Epoch 1371/2000 +2025-02-19 16:03:43,999 Current Learning Rate: 0.0005098621 +2025-02-19 16:03:44,000 Train Loss: 0.0001087, Val Loss: 0.0001194 +2025-02-19 16:03:44,000 Epoch 1372/2000 +2025-02-19 16:04:25,523 Current Learning Rate: 0.0004758647 +2025-02-19 16:04:26,396 Train Loss: 0.0001008, Val Loss: 0.0001192 +2025-02-19 16:04:26,397 Epoch 1373/2000 +2025-02-19 16:05:07,589 Current Learning Rate: 0.0004429836 +2025-02-19 16:05:07,589 Train Loss: 0.0001405, Val Loss: 0.0001194 +2025-02-19 16:05:07,589 Epoch 1374/2000 +2025-02-19 16:05:50,419 Current Learning Rate: 0.0004112269 +2025-02-19 16:05:50,419 Train Loss: 0.0000970, Val Loss: 0.0001194 +2025-02-19 16:05:50,419 Epoch 1375/2000 +2025-02-19 16:06:32,273 Current Learning Rate: 0.0003806023 +2025-02-19 16:06:32,274 Train Loss: 0.0001238, Val Loss: 0.0001194 +2025-02-19 16:06:32,274 Epoch 1376/2000 +2025-02-19 16:07:15,355 Current Learning Rate: 0.0003511176 +2025-02-19 16:07:15,355 Train Loss: 0.0000958, Val Loss: 0.0001192 +2025-02-19 16:07:15,355 Epoch 1377/2000 +2025-02-19 16:07:57,583 Current Learning Rate: 0.0003227798 +2025-02-19 16:07:59,416 Train Loss: 0.0000765, Val Loss: 0.0001192 +2025-02-19 16:07:59,416 Epoch 1378/2000 +2025-02-19 16:08:41,646 Current Learning Rate: 0.0002955962 +2025-02-19 16:08:42,876 Train Loss: 0.0001222, Val Loss: 0.0001191 +2025-02-19 16:08:42,876 Epoch 1379/2000 +2025-02-19 16:09:23,544 Current Learning Rate: 0.0002695732 +2025-02-19 16:09:23,544 Train Loss: 0.0001250, Val Loss: 0.0001194 +2025-02-19 16:09:23,545 Epoch 1380/2000 +2025-02-19 16:10:05,602 Current Learning Rate: 0.0002447174 +2025-02-19 16:10:05,603 Train Loss: 0.0000934, Val Loss: 0.0001192 +2025-02-19 16:10:05,603 Epoch 1381/2000 +2025-02-19 16:10:47,931 Current Learning Rate: 0.0002210349 +2025-02-19 16:10:49,694 Train Loss: 0.0000889, Val Loss: 0.0001191 +2025-02-19 16:10:49,695 Epoch 1382/2000 +2025-02-19 16:11:30,451 Current Learning Rate: 0.0001985316 +2025-02-19 16:11:30,451 Train Loss: 0.0000803, Val Loss: 0.0001191 +2025-02-19 16:11:30,452 Epoch 1383/2000 +2025-02-19 16:12:13,032 Current Learning Rate: 0.0001772129 +2025-02-19 16:12:13,032 Train Loss: 0.0000820, Val Loss: 0.0001192 +2025-02-19 16:12:13,033 Epoch 1384/2000 +2025-02-19 16:12:55,406 Current Learning Rate: 0.0001570842 +2025-02-19 16:12:55,407 Train Loss: 0.0001236, Val Loss: 0.0001191 +2025-02-19 16:12:55,407 Epoch 1385/2000 +2025-02-19 16:13:37,441 Current Learning Rate: 0.0001381504 +2025-02-19 16:13:37,447 Train Loss: 0.0001294, Val Loss: 0.0001192 +2025-02-19 16:13:37,447 Epoch 1386/2000 +2025-02-19 16:14:19,600 Current Learning Rate: 0.0001204162 +2025-02-19 16:14:19,602 Train Loss: 0.0000919, Val Loss: 0.0001192 +2025-02-19 16:14:19,602 Epoch 1387/2000 +2025-02-19 16:15:01,595 Current Learning Rate: 0.0001038859 +2025-02-19 16:15:01,595 Train Loss: 0.0001338, Val Loss: 0.0001191 +2025-02-19 16:15:01,595 Epoch 1388/2000 +2025-02-19 16:15:44,658 Current Learning Rate: 0.0000885637 +2025-02-19 16:15:44,667 Train Loss: 0.0001361, Val Loss: 0.0001191 +2025-02-19 16:15:44,667 Epoch 1389/2000 +2025-02-19 16:16:27,077 Current Learning Rate: 0.0000744534 +2025-02-19 16:16:27,078 Train Loss: 0.0000858, Val Loss: 0.0001191 +2025-02-19 16:16:27,079 Epoch 1390/2000 +2025-02-19 16:17:09,316 Current Learning Rate: 0.0000615583 +2025-02-19 16:17:11,174 Train Loss: 0.0001174, Val Loss: 0.0001190 +2025-02-19 16:17:11,174 Epoch 1391/2000 +2025-02-19 16:17:52,786 Current Learning Rate: 0.0000498817 +2025-02-19 16:17:52,787 Train Loss: 0.0001128, Val Loss: 0.0001191 +2025-02-19 16:17:52,787 Epoch 1392/2000 +2025-02-19 16:18:35,259 Current Learning Rate: 0.0000394265 +2025-02-19 16:18:35,260 Train Loss: 0.0001163, Val Loss: 0.0001191 +2025-02-19 16:18:35,261 Epoch 1393/2000 +2025-02-19 16:19:17,573 Current Learning Rate: 0.0000301952 +2025-02-19 16:19:17,573 Train Loss: 0.0000802, Val Loss: 0.0001191 +2025-02-19 16:19:17,573 Epoch 1394/2000 +2025-02-19 16:19:59,551 Current Learning Rate: 0.0000221902 +2025-02-19 16:19:59,551 Train Loss: 0.0001086, Val Loss: 0.0001191 +2025-02-19 16:19:59,551 Epoch 1395/2000 +2025-02-19 16:20:41,297 Current Learning Rate: 0.0000154133 +2025-02-19 16:20:41,297 Train Loss: 0.0001032, Val Loss: 0.0001191 +2025-02-19 16:20:41,298 Epoch 1396/2000 +2025-02-19 16:21:24,355 Current Learning Rate: 0.0000098664 +2025-02-19 16:21:24,356 Train Loss: 0.0000897, Val Loss: 0.0001191 +2025-02-19 16:21:24,356 Epoch 1397/2000 +2025-02-19 16:22:06,183 Current Learning Rate: 0.0000055506 +2025-02-19 16:22:07,228 Train Loss: 0.0001144, Val Loss: 0.0001190 +2025-02-19 16:22:07,228 Epoch 1398/2000 +2025-02-19 16:22:49,086 Current Learning Rate: 0.0000024672 +2025-02-19 16:22:49,087 Train Loss: 0.0000719, Val Loss: 0.0001191 +2025-02-19 16:22:49,087 Epoch 1399/2000 +2025-02-19 16:23:31,371 Current Learning Rate: 0.0000006168 +2025-02-19 16:23:31,372 Train Loss: 0.0000799, Val Loss: 0.0001191 +2025-02-19 16:23:31,372 Epoch 1400/2000 +2025-02-19 16:24:13,221 Current Learning Rate: 0.0000000000 +2025-02-19 16:24:14,858 Train Loss: 0.0001157, Val Loss: 0.0001189 +2025-02-19 16:24:14,858 Epoch 1401/2000 +2025-02-19 16:24:56,230 Current Learning Rate: 0.0000006168 +2025-02-19 16:24:56,231 Train Loss: 0.0001094, Val Loss: 0.0001190 +2025-02-19 16:24:56,231 Epoch 1402/2000 +2025-02-19 16:25:38,202 Current Learning Rate: 0.0000024672 +2025-02-19 16:25:38,206 Train Loss: 0.0000882, Val Loss: 0.0001190 +2025-02-19 16:25:38,206 Epoch 1403/2000 +2025-02-19 16:26:20,488 Current Learning Rate: 0.0000055506 +2025-02-19 16:26:20,489 Train Loss: 0.0000943, Val Loss: 0.0001190 +2025-02-19 16:26:20,489 Epoch 1404/2000 +2025-02-19 16:27:02,561 Current Learning Rate: 0.0000098664 +2025-02-19 16:27:02,562 Train Loss: 0.0000810, Val Loss: 0.0001190 +2025-02-19 16:27:02,562 Epoch 1405/2000 +2025-02-19 16:27:45,409 Current Learning Rate: 0.0000154133 +2025-02-19 16:27:45,410 Train Loss: 0.0001105, Val Loss: 0.0001190 +2025-02-19 16:27:45,410 Epoch 1406/2000 +2025-02-19 16:28:26,979 Current Learning Rate: 0.0000221902 +2025-02-19 16:28:26,979 Train Loss: 0.0000935, Val Loss: 0.0001191 +2025-02-19 16:28:26,979 Epoch 1407/2000 +2025-02-19 16:29:09,573 Current Learning Rate: 0.0000301952 +2025-02-19 16:29:09,574 Train Loss: 0.0001118, Val Loss: 0.0001190 +2025-02-19 16:29:09,574 Epoch 1408/2000 +2025-02-19 16:29:51,455 Current Learning Rate: 0.0000394265 +2025-02-19 16:29:51,456 Train Loss: 0.0000923, Val Loss: 0.0001190 +2025-02-19 16:29:51,456 Epoch 1409/2000 +2025-02-19 16:30:33,071 Current Learning Rate: 0.0000498817 +2025-02-19 16:30:33,071 Train Loss: 0.0000916, Val Loss: 0.0001191 +2025-02-19 16:30:33,071 Epoch 1410/2000 +2025-02-19 16:31:14,838 Current Learning Rate: 0.0000615583 +2025-02-19 16:31:14,839 Train Loss: 0.0000832, Val Loss: 0.0001191 +2025-02-19 16:31:14,839 Epoch 1411/2000 +2025-02-19 16:31:56,842 Current Learning Rate: 0.0000744534 +2025-02-19 16:31:56,843 Train Loss: 0.0001007, Val Loss: 0.0001190 +2025-02-19 16:31:56,843 Epoch 1412/2000 +2025-02-19 16:32:38,829 Current Learning Rate: 0.0000885637 +2025-02-19 16:32:38,830 Train Loss: 0.0000846, Val Loss: 0.0001191 +2025-02-19 16:32:38,830 Epoch 1413/2000 +2025-02-19 16:33:20,763 Current Learning Rate: 0.0001038859 +2025-02-19 16:33:20,764 Train Loss: 0.0001026, Val Loss: 0.0001191 +2025-02-19 16:33:20,764 Epoch 1414/2000 +2025-02-19 16:34:02,884 Current Learning Rate: 0.0001204162 +2025-02-19 16:34:02,885 Train Loss: 0.0001121, Val Loss: 0.0001191 +2025-02-19 16:34:02,885 Epoch 1415/2000 +2025-02-19 16:34:44,586 Current Learning Rate: 0.0001381504 +2025-02-19 16:34:44,586 Train Loss: 0.0000729, Val Loss: 0.0001191 +2025-02-19 16:34:44,586 Epoch 1416/2000 +2025-02-19 16:35:27,573 Current Learning Rate: 0.0001570842 +2025-02-19 16:35:27,573 Train Loss: 0.0000715, Val Loss: 0.0001190 +2025-02-19 16:35:27,574 Epoch 1417/2000 +2025-02-19 16:36:09,380 Current Learning Rate: 0.0001772129 +2025-02-19 16:36:09,380 Train Loss: 0.0001162, Val Loss: 0.0001191 +2025-02-19 16:36:09,381 Epoch 1418/2000 +2025-02-19 16:36:51,996 Current Learning Rate: 0.0001985316 +2025-02-19 16:36:51,996 Train Loss: 0.0000817, Val Loss: 0.0001190 +2025-02-19 16:36:51,996 Epoch 1419/2000 +2025-02-19 16:37:34,547 Current Learning Rate: 0.0002210349 +2025-02-19 16:37:34,548 Train Loss: 0.0000943, Val Loss: 0.0001190 +2025-02-19 16:37:34,549 Epoch 1420/2000 +2025-02-19 16:38:15,998 Current Learning Rate: 0.0002447174 +2025-02-19 16:38:15,998 Train Loss: 0.0001031, Val Loss: 0.0001191 +2025-02-19 16:38:15,999 Epoch 1421/2000 +2025-02-19 16:38:58,172 Current Learning Rate: 0.0002695732 +2025-02-19 16:38:59,343 Train Loss: 0.0001151, Val Loss: 0.0001189 +2025-02-19 16:38:59,343 Epoch 1422/2000 +2025-02-19 16:39:40,334 Current Learning Rate: 0.0002955962 +2025-02-19 16:39:40,335 Train Loss: 0.0000749, Val Loss: 0.0001190 +2025-02-19 16:39:40,335 Epoch 1423/2000 +2025-02-19 16:40:22,625 Current Learning Rate: 0.0003227798 +2025-02-19 16:40:22,625 Train Loss: 0.0000841, Val Loss: 0.0001190 +2025-02-19 16:40:22,625 Epoch 1424/2000 +2025-02-19 16:41:05,618 Current Learning Rate: 0.0003511176 +2025-02-19 16:41:05,619 Train Loss: 0.0001236, Val Loss: 0.0001191 +2025-02-19 16:41:05,619 Epoch 1425/2000 +2025-02-19 16:41:47,256 Current Learning Rate: 0.0003806023 +2025-02-19 16:41:47,256 Train Loss: 0.0001127, Val Loss: 0.0001189 +2025-02-19 16:41:47,256 Epoch 1426/2000 +2025-02-19 16:42:29,331 Current Learning Rate: 0.0004112269 +2025-02-19 16:42:29,331 Train Loss: 0.0001015, Val Loss: 0.0001192 +2025-02-19 16:42:29,331 Epoch 1427/2000 +2025-02-19 16:43:11,990 Current Learning Rate: 0.0004429836 +2025-02-19 16:43:11,991 Train Loss: 0.0001127, Val Loss: 0.0001192 +2025-02-19 16:43:11,991 Epoch 1428/2000 +2025-02-19 16:43:53,986 Current Learning Rate: 0.0004758647 +2025-02-19 16:43:53,987 Train Loss: 0.0000994, Val Loss: 0.0001190 +2025-02-19 16:43:53,988 Epoch 1429/2000 +2025-02-19 16:44:35,825 Current Learning Rate: 0.0005098621 +2025-02-19 16:44:35,825 Train Loss: 0.0000883, Val Loss: 0.0001191 +2025-02-19 16:44:35,826 Epoch 1430/2000 +2025-02-19 16:45:19,564 Current Learning Rate: 0.0005449674 +2025-02-19 16:45:19,565 Train Loss: 0.0000950, Val Loss: 0.0001191 +2025-02-19 16:45:19,565 Epoch 1431/2000 +2025-02-19 16:46:01,243 Current Learning Rate: 0.0005811718 +2025-02-19 16:46:01,243 Train Loss: 0.0001132, Val Loss: 0.0001193 +2025-02-19 16:46:01,244 Epoch 1432/2000 +2025-02-19 16:46:43,261 Current Learning Rate: 0.0006184666 +2025-02-19 16:46:43,262 Train Loss: 0.0001215, Val Loss: 0.0001194 +2025-02-19 16:46:43,262 Epoch 1433/2000 +2025-02-19 16:47:26,274 Current Learning Rate: 0.0006568424 +2025-02-19 16:47:26,275 Train Loss: 0.0000967, Val Loss: 0.0001194 +2025-02-19 16:47:26,275 Epoch 1434/2000 +2025-02-19 16:48:07,973 Current Learning Rate: 0.0006962899 +2025-02-19 16:48:07,974 Train Loss: 0.0000890, Val Loss: 0.0001192 +2025-02-19 16:48:07,974 Epoch 1435/2000 +2025-02-19 16:48:50,342 Current Learning Rate: 0.0007367992 +2025-02-19 16:48:50,343 Train Loss: 0.0001034, Val Loss: 0.0001191 +2025-02-19 16:48:50,343 Epoch 1436/2000 +2025-02-19 16:49:32,274 Current Learning Rate: 0.0007783604 +2025-02-19 16:49:32,274 Train Loss: 0.0001109, Val Loss: 0.0001194 +2025-02-19 16:49:32,275 Epoch 1437/2000 +2025-02-19 16:50:14,502 Current Learning Rate: 0.0008209632 +2025-02-19 16:50:14,502 Train Loss: 0.0000890, Val Loss: 0.0001192 +2025-02-19 16:50:14,503 Epoch 1438/2000 +2025-02-19 16:50:56,494 Current Learning Rate: 0.0008645971 +2025-02-19 16:50:56,495 Train Loss: 0.0001386, Val Loss: 0.0001203 +2025-02-19 16:50:56,496 Epoch 1439/2000 +2025-02-19 16:51:38,446 Current Learning Rate: 0.0009092514 +2025-02-19 16:51:38,447 Train Loss: 0.0000731, Val Loss: 0.0001191 +2025-02-19 16:51:38,447 Epoch 1440/2000 +2025-02-19 16:52:20,620 Current Learning Rate: 0.0009549150 +2025-02-19 16:52:20,620 Train Loss: 0.0000895, Val Loss: 0.0001192 +2025-02-19 16:52:20,621 Epoch 1441/2000 +2025-02-19 16:53:02,789 Current Learning Rate: 0.0010015767 +2025-02-19 16:53:02,789 Train Loss: 0.0001377, Val Loss: 0.0001201 +2025-02-19 16:53:02,789 Epoch 1442/2000 +2025-02-19 16:53:45,118 Current Learning Rate: 0.0010492249 +2025-02-19 16:53:45,119 Train Loss: 0.0000921, Val Loss: 0.0001199 +2025-02-19 16:53:45,119 Epoch 1443/2000 +2025-02-19 16:54:28,228 Current Learning Rate: 0.0010978480 +2025-02-19 16:54:28,229 Train Loss: 0.0001144, Val Loss: 0.0001202 +2025-02-19 16:54:28,229 Epoch 1444/2000 +2025-02-19 16:55:10,581 Current Learning Rate: 0.0011474338 +2025-02-19 16:55:10,581 Train Loss: 0.0001073, Val Loss: 0.0001211 +2025-02-19 16:55:10,581 Epoch 1445/2000 +2025-02-19 16:55:52,004 Current Learning Rate: 0.0011979702 +2025-02-19 16:55:52,004 Train Loss: 0.0001148, Val Loss: 0.0001209 +2025-02-19 16:55:52,004 Epoch 1446/2000 +2025-02-19 16:56:34,334 Current Learning Rate: 0.0012494447 +2025-02-19 16:56:34,335 Train Loss: 0.0001173, Val Loss: 0.0001208 +2025-02-19 16:56:34,335 Epoch 1447/2000 +2025-02-19 16:57:16,909 Current Learning Rate: 0.0013018445 +2025-02-19 16:57:16,909 Train Loss: 0.0001053, Val Loss: 0.0001203 +2025-02-19 16:57:16,910 Epoch 1448/2000 +2025-02-19 16:57:58,393 Current Learning Rate: 0.0013551569 +2025-02-19 16:57:58,394 Train Loss: 0.0000728, Val Loss: 0.0001195 +2025-02-19 16:57:58,394 Epoch 1449/2000 +2025-02-19 16:58:41,719 Current Learning Rate: 0.0014093685 +2025-02-19 16:58:41,720 Train Loss: 0.0000928, Val Loss: 0.0001197 +2025-02-19 16:58:41,720 Epoch 1450/2000 +2025-02-19 16:59:23,931 Current Learning Rate: 0.0014644661 +2025-02-19 16:59:23,932 Train Loss: 0.0001132, Val Loss: 0.0001208 +2025-02-19 16:59:23,932 Epoch 1451/2000 +2025-02-19 17:00:05,933 Current Learning Rate: 0.0015204360 +2025-02-19 17:00:05,934 Train Loss: 0.0001174, Val Loss: 0.0001221 +2025-02-19 17:00:05,934 Epoch 1452/2000 +2025-02-19 17:00:48,882 Current Learning Rate: 0.0015772645 +2025-02-19 17:00:48,883 Train Loss: 0.0000943, Val Loss: 0.0001211 +2025-02-19 17:00:48,883 Epoch 1453/2000 +2025-02-19 17:01:30,544 Current Learning Rate: 0.0016349374 +2025-02-19 17:01:30,545 Train Loss: 0.0000893, Val Loss: 0.0001197 +2025-02-19 17:01:30,545 Epoch 1454/2000 +2025-02-19 17:02:12,475 Current Learning Rate: 0.0016934407 +2025-02-19 17:02:12,476 Train Loss: 0.0000771, Val Loss: 0.0001198 +2025-02-19 17:02:12,476 Epoch 1455/2000 +2025-02-19 17:02:54,694 Current Learning Rate: 0.0017527598 +2025-02-19 17:02:54,695 Train Loss: 0.0001178, Val Loss: 0.0001206 +2025-02-19 17:02:54,695 Epoch 1456/2000 +2025-02-19 17:03:36,724 Current Learning Rate: 0.0018128801 +2025-02-19 17:03:36,725 Train Loss: 0.0000820, Val Loss: 0.0001206 +2025-02-19 17:03:36,725 Epoch 1457/2000 +2025-02-19 17:04:19,594 Current Learning Rate: 0.0018737867 +2025-02-19 17:04:19,595 Train Loss: 0.0001042, Val Loss: 0.0001248 +2025-02-19 17:04:19,599 Epoch 1458/2000 +2025-02-19 17:05:01,210 Current Learning Rate: 0.0019354647 +2025-02-19 17:05:01,210 Train Loss: 0.0001173, Val Loss: 0.0001210 +2025-02-19 17:05:01,210 Epoch 1459/2000 +2025-02-19 17:05:43,544 Current Learning Rate: 0.0019978989 +2025-02-19 17:05:43,545 Train Loss: 0.0001216, Val Loss: 0.0001220 +2025-02-19 17:05:43,545 Epoch 1460/2000 +2025-02-19 17:06:26,160 Current Learning Rate: 0.0020610737 +2025-02-19 17:06:26,160 Train Loss: 0.0001093, Val Loss: 0.0001217 +2025-02-19 17:06:26,161 Epoch 1461/2000 +2025-02-19 17:07:08,419 Current Learning Rate: 0.0021249737 +2025-02-19 17:07:08,419 Train Loss: 0.0000820, Val Loss: 0.0001235 +2025-02-19 17:07:08,419 Epoch 1462/2000 +2025-02-19 17:07:50,778 Current Learning Rate: 0.0021895831 +2025-02-19 17:07:50,778 Train Loss: 0.0001120, Val Loss: 0.0001212 +2025-02-19 17:07:50,778 Epoch 1463/2000 +2025-02-19 17:08:33,078 Current Learning Rate: 0.0022548859 +2025-02-19 17:08:33,078 Train Loss: 0.0000893, Val Loss: 0.0001210 +2025-02-19 17:08:33,078 Epoch 1464/2000 +2025-02-19 17:09:15,352 Current Learning Rate: 0.0023208660 +2025-02-19 17:09:15,352 Train Loss: 0.0000856, Val Loss: 0.0001217 +2025-02-19 17:09:15,352 Epoch 1465/2000 +2025-02-19 17:09:57,060 Current Learning Rate: 0.0023875072 +2025-02-19 17:09:57,061 Train Loss: 0.0001199, Val Loss: 0.0001278 +2025-02-19 17:09:57,061 Epoch 1466/2000 +2025-02-19 17:10:39,603 Current Learning Rate: 0.0024547929 +2025-02-19 17:10:39,604 Train Loss: 0.0000789, Val Loss: 0.0001202 +2025-02-19 17:10:39,604 Epoch 1467/2000 +2025-02-19 17:11:21,961 Current Learning Rate: 0.0025227067 +2025-02-19 17:11:21,962 Train Loss: 0.0001280, Val Loss: 0.0001321 +2025-02-19 17:11:21,962 Epoch 1468/2000 +2025-02-19 17:12:04,223 Current Learning Rate: 0.0025912316 +2025-02-19 17:12:04,223 Train Loss: 0.0000986, Val Loss: 0.0001311 +2025-02-19 17:12:04,224 Epoch 1469/2000 +2025-02-19 17:12:45,967 Current Learning Rate: 0.0026603509 +2025-02-19 17:12:45,967 Train Loss: 0.0000862, Val Loss: 0.0001241 +2025-02-19 17:12:45,968 Epoch 1470/2000 +2025-02-19 17:13:28,395 Current Learning Rate: 0.0027300475 +2025-02-19 17:13:28,396 Train Loss: 0.0001116, Val Loss: 0.0001296 +2025-02-19 17:13:28,396 Epoch 1471/2000 +2025-02-19 17:14:09,679 Current Learning Rate: 0.0028003042 +2025-02-19 17:14:09,680 Train Loss: 0.0001095, Val Loss: 0.0001311 +2025-02-19 17:14:09,680 Epoch 1472/2000 +2025-02-19 17:14:51,363 Current Learning Rate: 0.0028711035 +2025-02-19 17:14:51,364 Train Loss: 0.0001069, Val Loss: 0.0001238 +2025-02-19 17:14:51,364 Epoch 1473/2000 +2025-02-19 17:15:33,949 Current Learning Rate: 0.0029424282 +2025-02-19 17:15:33,950 Train Loss: 0.0000937, Val Loss: 0.0001303 +2025-02-19 17:15:33,950 Epoch 1474/2000 +2025-02-19 17:16:17,005 Current Learning Rate: 0.0030142605 +2025-02-19 17:16:17,005 Train Loss: 0.0001296, Val Loss: 0.0001364 +2025-02-19 17:16:17,005 Epoch 1475/2000 +2025-02-19 17:16:58,757 Current Learning Rate: 0.0030865828 +2025-02-19 17:16:58,758 Train Loss: 0.0001505, Val Loss: 0.0001439 +2025-02-19 17:16:58,758 Epoch 1476/2000 +2025-02-19 17:17:40,608 Current Learning Rate: 0.0031593772 +2025-02-19 17:17:40,608 Train Loss: 0.0001044, Val Loss: 0.0001274 +2025-02-19 17:17:40,608 Epoch 1477/2000 +2025-02-19 17:18:22,569 Current Learning Rate: 0.0032326258 +2025-02-19 17:18:22,570 Train Loss: 0.0000948, Val Loss: 0.0001331 +2025-02-19 17:18:22,570 Epoch 1478/2000 +2025-02-19 17:19:04,738 Current Learning Rate: 0.0033063104 +2025-02-19 17:19:04,738 Train Loss: 0.0001241, Val Loss: 0.0001262 +2025-02-19 17:19:04,739 Epoch 1479/2000 +2025-02-19 17:19:47,540 Current Learning Rate: 0.0033804129 +2025-02-19 17:19:47,541 Train Loss: 0.0000965, Val Loss: 0.0001258 +2025-02-19 17:19:47,541 Epoch 1480/2000 +2025-02-19 17:20:29,848 Current Learning Rate: 0.0034549150 +2025-02-19 17:20:29,849 Train Loss: 0.0001531, Val Loss: 0.0001357 +2025-02-19 17:20:29,849 Epoch 1481/2000 +2025-02-19 17:21:12,481 Current Learning Rate: 0.0035297984 +2025-02-19 17:21:12,481 Train Loss: 0.0000939, Val Loss: 0.0001235 +2025-02-19 17:21:12,482 Epoch 1482/2000 +2025-02-19 17:21:54,900 Current Learning Rate: 0.0036050445 +2025-02-19 17:21:54,900 Train Loss: 0.0001037, Val Loss: 0.0001242 +2025-02-19 17:21:54,900 Epoch 1483/2000 +2025-02-19 17:22:36,876 Current Learning Rate: 0.0036806348 +2025-02-19 17:22:36,876 Train Loss: 0.0000954, Val Loss: 0.0001290 +2025-02-19 17:22:36,877 Epoch 1484/2000 +2025-02-19 17:23:19,488 Current Learning Rate: 0.0037565506 +2025-02-19 17:23:19,489 Train Loss: 0.0001024, Val Loss: 0.0001284 +2025-02-19 17:23:19,489 Epoch 1485/2000 +2025-02-19 17:24:01,894 Current Learning Rate: 0.0038327732 +2025-02-19 17:24:01,895 Train Loss: 0.0001322, Val Loss: 0.0001283 +2025-02-19 17:24:01,895 Epoch 1486/2000 +2025-02-19 17:24:43,530 Current Learning Rate: 0.0039092838 +2025-02-19 17:24:43,531 Train Loss: 0.0002190, Val Loss: 0.0001564 +2025-02-19 17:24:43,531 Epoch 1487/2000 +2025-02-19 17:25:25,473 Current Learning Rate: 0.0039860635 +2025-02-19 17:25:25,473 Train Loss: 0.0001268, Val Loss: 0.0001408 +2025-02-19 17:25:25,473 Epoch 1488/2000 +2025-02-19 17:26:07,953 Current Learning Rate: 0.0040630934 +2025-02-19 17:26:07,953 Train Loss: 0.0001426, Val Loss: 0.0001351 +2025-02-19 17:26:07,953 Epoch 1489/2000 +2025-02-19 17:26:50,332 Current Learning Rate: 0.0041403545 +2025-02-19 17:26:50,332 Train Loss: 0.0001284, Val Loss: 0.0001572 +2025-02-19 17:26:50,332 Epoch 1490/2000 +2025-02-19 17:27:32,410 Current Learning Rate: 0.0042178277 +2025-02-19 17:27:32,410 Train Loss: 0.0001877, Val Loss: 0.0001710 +2025-02-19 17:27:32,410 Epoch 1491/2000 +2025-02-19 17:28:14,236 Current Learning Rate: 0.0042954938 +2025-02-19 17:28:14,236 Train Loss: 0.0001294, Val Loss: 0.0001446 +2025-02-19 17:28:14,236 Epoch 1492/2000 +2025-02-19 17:28:56,477 Current Learning Rate: 0.0043733338 +2025-02-19 17:28:56,478 Train Loss: 0.0000884, Val Loss: 0.0001337 +2025-02-19 17:28:56,478 Epoch 1493/2000 +2025-02-19 17:29:38,680 Current Learning Rate: 0.0044513284 +2025-02-19 17:29:38,681 Train Loss: 0.0001209, Val Loss: 0.0001406 +2025-02-19 17:29:38,681 Epoch 1494/2000 +2025-02-19 17:30:20,670 Current Learning Rate: 0.0045294584 +2025-02-19 17:30:20,670 Train Loss: 0.0001164, Val Loss: 0.0001495 +2025-02-19 17:30:20,671 Epoch 1495/2000 +2025-02-19 17:31:02,902 Current Learning Rate: 0.0046077045 +2025-02-19 17:31:02,903 Train Loss: 0.0002243, Val Loss: 0.0001538 +2025-02-19 17:31:02,903 Epoch 1496/2000 +2025-02-19 17:31:44,639 Current Learning Rate: 0.0046860474 +2025-02-19 17:31:44,639 Train Loss: 0.0001005, Val Loss: 0.0001371 +2025-02-19 17:31:44,640 Epoch 1497/2000 +2025-02-19 17:32:27,366 Current Learning Rate: 0.0047644677 +2025-02-19 17:32:27,367 Train Loss: 0.0001325, Val Loss: 0.0001482 +2025-02-19 17:32:27,367 Epoch 1498/2000 +2025-02-19 17:33:08,540 Current Learning Rate: 0.0048429462 +2025-02-19 17:33:08,540 Train Loss: 0.0001365, Val Loss: 0.0001418 +2025-02-19 17:33:08,541 Epoch 1499/2000 +2025-02-19 17:33:50,920 Current Learning Rate: 0.0049214634 +2025-02-19 17:33:50,920 Train Loss: 0.0001323, Val Loss: 0.0001406 +2025-02-19 17:33:50,920 Epoch 1500/2000 +2025-02-19 17:34:33,042 Current Learning Rate: 0.0050000000 +2025-02-19 17:34:33,043 Train Loss: 0.0001460, Val Loss: 0.0001656 +2025-02-19 17:34:33,043 Epoch 1501/2000 +2025-02-19 17:35:15,791 Current Learning Rate: 0.0050785366 +2025-02-19 17:35:15,791 Train Loss: 0.0001130, Val Loss: 0.0001432 +2025-02-19 17:35:15,791 Epoch 1502/2000 +2025-02-19 17:35:58,257 Current Learning Rate: 0.0051570538 +2025-02-19 17:35:58,257 Train Loss: 0.0001366, Val Loss: 0.0001444 +2025-02-19 17:35:58,258 Epoch 1503/2000 +2025-02-19 17:36:40,177 Current Learning Rate: 0.0052355323 +2025-02-19 17:36:40,178 Train Loss: 0.0001431, Val Loss: 0.0001416 +2025-02-19 17:36:40,178 Epoch 1504/2000 +2025-02-19 17:37:22,653 Current Learning Rate: 0.0053139526 +2025-02-19 17:37:22,654 Train Loss: 0.0001081, Val Loss: 0.0001366 +2025-02-19 17:37:22,654 Epoch 1505/2000 +2025-02-19 17:38:04,400 Current Learning Rate: 0.0053922955 +2025-02-19 17:38:04,400 Train Loss: 0.0001047, Val Loss: 0.0001371 +2025-02-19 17:38:04,400 Epoch 1506/2000 +2025-02-19 17:38:46,910 Current Learning Rate: 0.0054705416 +2025-02-19 17:38:46,910 Train Loss: 0.0001661, Val Loss: 0.0001616 +2025-02-19 17:38:46,910 Epoch 1507/2000 +2025-02-19 17:39:29,162 Current Learning Rate: 0.0055486716 +2025-02-19 17:39:29,163 Train Loss: 0.0001377, Val Loss: 0.0001544 +2025-02-19 17:39:29,163 Epoch 1508/2000 +2025-02-19 17:40:11,373 Current Learning Rate: 0.0056266662 +2025-02-19 17:40:11,374 Train Loss: 0.0001201, Val Loss: 0.0001425 +2025-02-19 17:40:11,374 Epoch 1509/2000 +2025-02-19 17:40:53,657 Current Learning Rate: 0.0057045062 +2025-02-19 17:40:53,657 Train Loss: 0.0001097, Val Loss: 0.0001564 +2025-02-19 17:40:53,657 Epoch 1510/2000 +2025-02-19 17:41:35,613 Current Learning Rate: 0.0057821723 +2025-02-19 17:41:35,613 Train Loss: 0.0001746, Val Loss: 0.0001567 +2025-02-19 17:41:35,613 Epoch 1511/2000 +2025-02-19 17:42:17,253 Current Learning Rate: 0.0058596455 +2025-02-19 17:42:17,253 Train Loss: 0.0001438, Val Loss: 0.0001593 +2025-02-19 17:42:17,254 Epoch 1512/2000 +2025-02-19 17:42:59,256 Current Learning Rate: 0.0059369066 +2025-02-19 17:42:59,256 Train Loss: 0.0001392, Val Loss: 0.0001671 +2025-02-19 17:42:59,256 Epoch 1513/2000 +2025-02-19 17:43:42,242 Current Learning Rate: 0.0060139365 +2025-02-19 17:43:42,282 Train Loss: 0.0001480, Val Loss: 0.0001813 +2025-02-19 17:43:42,282 Epoch 1514/2000 +2025-02-19 17:44:24,585 Current Learning Rate: 0.0060907162 +2025-02-19 17:44:24,586 Train Loss: 0.0001419, Val Loss: 0.0001480 +2025-02-19 17:44:24,587 Epoch 1515/2000 +2025-02-19 17:45:06,383 Current Learning Rate: 0.0061672268 +2025-02-19 17:45:06,384 Train Loss: 0.0001648, Val Loss: 0.0001971 +2025-02-19 17:45:06,384 Epoch 1516/2000 +2025-02-19 17:45:48,632 Current Learning Rate: 0.0062434494 +2025-02-19 17:45:48,632 Train Loss: 0.0002136, Val Loss: 0.0002173 +2025-02-19 17:45:48,632 Epoch 1517/2000 +2025-02-19 17:46:30,420 Current Learning Rate: 0.0063193652 +2025-02-19 17:46:30,421 Train Loss: 0.0001653, Val Loss: 0.0002044 +2025-02-19 17:46:30,421 Epoch 1518/2000 +2025-02-19 17:47:12,016 Current Learning Rate: 0.0063949555 +2025-02-19 17:47:12,017 Train Loss: 0.0001826, Val Loss: 0.0002048 +2025-02-19 17:47:12,017 Epoch 1519/2000 +2025-02-19 17:47:54,376 Current Learning Rate: 0.0064702016 +2025-02-19 17:47:54,376 Train Loss: 0.0001570, Val Loss: 0.0002732 +2025-02-19 17:47:54,376 Epoch 1520/2000 +2025-02-19 17:48:36,619 Current Learning Rate: 0.0065450850 +2025-02-19 17:48:36,619 Train Loss: 0.0002686, Val Loss: 0.0002084 +2025-02-19 17:48:36,619 Epoch 1521/2000 +2025-02-19 17:49:18,800 Current Learning Rate: 0.0066195871 +2025-02-19 17:49:18,801 Train Loss: 0.0001766, Val Loss: 0.0001830 +2025-02-19 17:49:18,801 Epoch 1522/2000 +2025-02-19 17:50:01,028 Current Learning Rate: 0.0066936896 +2025-02-19 17:50:01,029 Train Loss: 0.0002439, Val Loss: 0.0002954 +2025-02-19 17:50:01,029 Epoch 1523/2000 +2025-02-19 17:50:42,968 Current Learning Rate: 0.0067673742 +2025-02-19 17:50:42,969 Train Loss: 0.0005013, Val Loss: 0.0004900 +2025-02-19 17:50:42,969 Epoch 1524/2000 +2025-02-19 17:51:25,304 Current Learning Rate: 0.0068406228 +2025-02-19 17:51:25,305 Train Loss: 0.0003077, Val Loss: 0.0002367 +2025-02-19 17:51:25,305 Epoch 1525/2000 +2025-02-19 17:52:07,627 Current Learning Rate: 0.0069134172 +2025-02-19 17:52:07,627 Train Loss: 0.0002069, Val Loss: 0.0002473 +2025-02-19 17:52:07,627 Epoch 1526/2000 +2025-02-19 17:52:49,950 Current Learning Rate: 0.0069857395 +2025-02-19 17:52:49,951 Train Loss: 0.0002818, Val Loss: 0.0002135 +2025-02-19 17:52:49,951 Epoch 1527/2000 +2025-02-19 17:53:31,580 Current Learning Rate: 0.0070575718 +2025-02-19 17:53:31,581 Train Loss: 0.0001810, Val Loss: 0.0001823 +2025-02-19 17:53:31,581 Epoch 1528/2000 +2025-02-19 17:54:13,614 Current Learning Rate: 0.0071288965 +2025-02-19 17:54:13,614 Train Loss: 0.0001646, Val Loss: 0.0001988 +2025-02-19 17:54:13,614 Epoch 1529/2000 +2025-02-19 17:54:55,312 Current Learning Rate: 0.0071996958 +2025-02-19 17:54:55,313 Train Loss: 0.0001292, Val Loss: 0.0001578 +2025-02-19 17:54:55,313 Epoch 1530/2000 +2025-02-19 17:55:37,320 Current Learning Rate: 0.0072699525 +2025-02-19 17:55:37,321 Train Loss: 0.0002393, Val Loss: 0.0002145 +2025-02-19 17:55:37,321 Epoch 1531/2000 +2025-02-19 17:56:19,610 Current Learning Rate: 0.0073396491 +2025-02-19 17:56:19,611 Train Loss: 0.0002366, Val Loss: 0.0002011 +2025-02-19 17:56:19,611 Epoch 1532/2000 +2025-02-19 17:57:01,597 Current Learning Rate: 0.0074087684 +2025-02-19 17:57:01,598 Train Loss: 0.0001595, Val Loss: 0.0001712 +2025-02-19 17:57:01,598 Epoch 1533/2000 +2025-02-19 17:57:44,818 Current Learning Rate: 0.0074772933 +2025-02-19 17:57:44,818 Train Loss: 0.0001605, Val Loss: 0.0001594 +2025-02-19 17:57:44,818 Epoch 1534/2000 +2025-02-19 17:58:26,705 Current Learning Rate: 0.0075452071 +2025-02-19 17:58:26,705 Train Loss: 0.0001570, Val Loss: 0.0001604 +2025-02-19 17:58:26,705 Epoch 1535/2000 +2025-02-19 17:59:09,205 Current Learning Rate: 0.0076124928 +2025-02-19 17:59:09,206 Train Loss: 0.0001347, Val Loss: 0.0001596 +2025-02-19 17:59:09,206 Epoch 1536/2000 +2025-02-19 17:59:51,734 Current Learning Rate: 0.0076791340 +2025-02-19 17:59:51,734 Train Loss: 0.0001437, Val Loss: 0.0001673 +2025-02-19 17:59:51,735 Epoch 1537/2000 +2025-02-19 18:00:33,696 Current Learning Rate: 0.0077451141 +2025-02-19 18:00:33,697 Train Loss: 0.0001140, Val Loss: 0.0001664 +2025-02-19 18:00:33,697 Epoch 1538/2000 +2025-02-19 18:01:16,730 Current Learning Rate: 0.0078104169 +2025-02-19 18:01:16,731 Train Loss: 0.0001579, Val Loss: 0.0001707 +2025-02-19 18:01:16,731 Epoch 1539/2000 +2025-02-19 18:01:58,923 Current Learning Rate: 0.0078750263 +2025-02-19 18:01:58,924 Train Loss: 0.0001752, Val Loss: 0.0001784 +2025-02-19 18:01:58,924 Epoch 1540/2000 +2025-02-19 18:02:40,451 Current Learning Rate: 0.0079389263 +2025-02-19 18:02:40,452 Train Loss: 0.0002210, Val Loss: 0.0002151 +2025-02-19 18:02:40,452 Epoch 1541/2000 +2025-02-19 18:03:23,024 Current Learning Rate: 0.0080021011 +2025-02-19 18:03:23,024 Train Loss: 0.0001905, Val Loss: 0.0001793 +2025-02-19 18:03:23,025 Epoch 1542/2000 +2025-02-19 18:04:04,907 Current Learning Rate: 0.0080645353 +2025-02-19 18:04:04,907 Train Loss: 0.0002215, Val Loss: 0.0002186 +2025-02-19 18:04:04,907 Epoch 1543/2000 +2025-02-19 18:04:47,251 Current Learning Rate: 0.0081262133 +2025-02-19 18:04:47,251 Train Loss: 0.0002055, Val Loss: 0.0001939 +2025-02-19 18:04:47,251 Epoch 1544/2000 +2025-02-19 18:05:29,552 Current Learning Rate: 0.0081871199 +2025-02-19 18:05:29,552 Train Loss: 0.0001945, Val Loss: 0.0001905 +2025-02-19 18:05:29,552 Epoch 1545/2000 +2025-02-19 18:06:11,686 Current Learning Rate: 0.0082472402 +2025-02-19 18:06:11,687 Train Loss: 0.0001640, Val Loss: 0.0002167 +2025-02-19 18:06:11,687 Epoch 1546/2000 +2025-02-19 18:06:53,302 Current Learning Rate: 0.0083065593 +2025-02-19 18:06:53,303 Train Loss: 0.0001826, Val Loss: 0.0002560 +2025-02-19 18:06:53,303 Epoch 1547/2000 +2025-02-19 18:07:36,282 Current Learning Rate: 0.0083650626 +2025-02-19 18:07:36,282 Train Loss: 0.0002406, Val Loss: 0.0002182 +2025-02-19 18:07:36,283 Epoch 1548/2000 +2025-02-19 18:08:18,718 Current Learning Rate: 0.0084227355 +2025-02-19 18:08:18,719 Train Loss: 0.0001751, Val Loss: 0.0001747 +2025-02-19 18:08:18,719 Epoch 1549/2000 +2025-02-19 18:09:01,145 Current Learning Rate: 0.0084795640 +2025-02-19 18:09:01,145 Train Loss: 0.0001911, Val Loss: 0.0001912 +2025-02-19 18:09:01,145 Epoch 1550/2000 +2025-02-19 18:09:43,608 Current Learning Rate: 0.0085355339 +2025-02-19 18:09:43,608 Train Loss: 0.0002183, Val Loss: 0.0001800 +2025-02-19 18:09:43,609 Epoch 1551/2000 +2025-02-19 18:10:25,026 Current Learning Rate: 0.0085906315 +2025-02-19 18:10:25,026 Train Loss: 0.0002671, Val Loss: 0.0002087 +2025-02-19 18:10:25,026 Epoch 1552/2000 +2025-02-19 18:11:07,784 Current Learning Rate: 0.0086448431 +2025-02-19 18:11:07,785 Train Loss: 0.0001370, Val Loss: 0.0001862 +2025-02-19 18:11:07,785 Epoch 1553/2000 +2025-02-19 18:11:50,322 Current Learning Rate: 0.0086981555 +2025-02-19 18:11:50,322 Train Loss: 0.0001737, Val Loss: 0.0002226 +2025-02-19 18:11:50,323 Epoch 1554/2000 +2025-02-19 18:12:31,801 Current Learning Rate: 0.0087505553 +2025-02-19 18:12:31,802 Train Loss: 0.0002027, Val Loss: 0.0001994 +2025-02-19 18:12:31,802 Epoch 1555/2000 +2025-02-19 18:13:13,899 Current Learning Rate: 0.0088020298 +2025-02-19 18:13:13,899 Train Loss: 0.0002534, Val Loss: 0.0001976 +2025-02-19 18:13:13,900 Epoch 1556/2000 +2025-02-19 18:13:55,952 Current Learning Rate: 0.0088525662 +2025-02-19 18:13:55,953 Train Loss: 0.0001649, Val Loss: 0.0001678 +2025-02-19 18:13:55,953 Epoch 1557/2000 +2025-02-19 18:14:37,712 Current Learning Rate: 0.0089021520 +2025-02-19 18:14:37,712 Train Loss: 0.0001529, Val Loss: 0.0001860 +2025-02-19 18:14:37,713 Epoch 1558/2000 +2025-02-19 18:15:19,935 Current Learning Rate: 0.0089507751 +2025-02-19 18:15:19,935 Train Loss: 0.0001462, Val Loss: 0.0001917 +2025-02-19 18:15:19,935 Epoch 1559/2000 +2025-02-19 18:16:01,669 Current Learning Rate: 0.0089984233 +2025-02-19 18:16:01,669 Train Loss: 0.0002105, Val Loss: 0.0002367 +2025-02-19 18:16:01,670 Epoch 1560/2000 +2025-02-19 18:16:43,544 Current Learning Rate: 0.0090450850 +2025-02-19 18:16:43,544 Train Loss: 0.0001349, Val Loss: 0.0001966 +2025-02-19 18:16:43,544 Epoch 1561/2000 +2025-02-19 18:17:25,487 Current Learning Rate: 0.0090907486 +2025-02-19 18:17:25,487 Train Loss: 0.0002917, Val Loss: 0.0002303 +2025-02-19 18:17:25,488 Epoch 1562/2000 +2025-02-19 18:18:08,302 Current Learning Rate: 0.0091354029 +2025-02-19 18:18:08,302 Train Loss: 0.0002164, Val Loss: 0.0002439 +2025-02-19 18:18:08,303 Epoch 1563/2000 +2025-02-19 18:18:50,479 Current Learning Rate: 0.0091790368 +2025-02-19 18:18:50,479 Train Loss: 0.0001993, Val Loss: 0.0002262 +2025-02-19 18:18:50,479 Epoch 1564/2000 +2025-02-19 18:19:32,132 Current Learning Rate: 0.0092216396 +2025-02-19 18:19:32,132 Train Loss: 0.0002344, Val Loss: 0.0002602 +2025-02-19 18:19:32,133 Epoch 1565/2000 +2025-02-19 18:20:14,662 Current Learning Rate: 0.0092632008 +2025-02-19 18:20:14,663 Train Loss: 0.0001757, Val Loss: 0.0002263 +2025-02-19 18:20:14,663 Epoch 1566/2000 +2025-02-19 18:20:57,044 Current Learning Rate: 0.0093037101 +2025-02-19 18:20:57,044 Train Loss: 0.0002122, Val Loss: 0.0002510 +2025-02-19 18:20:57,044 Epoch 1567/2000 +2025-02-19 18:21:38,705 Current Learning Rate: 0.0093431576 +2025-02-19 18:21:38,705 Train Loss: 0.0001988, Val Loss: 0.0003131 +2025-02-19 18:21:38,705 Epoch 1568/2000 +2025-02-19 18:22:21,053 Current Learning Rate: 0.0093815334 +2025-02-19 18:22:21,054 Train Loss: 0.0002599, Val Loss: 0.0002879 +2025-02-19 18:22:21,055 Epoch 1569/2000 +2025-02-19 18:23:03,509 Current Learning Rate: 0.0094188282 +2025-02-19 18:23:03,510 Train Loss: 0.0002432, Val Loss: 0.0002270 +2025-02-19 18:23:03,510 Epoch 1570/2000 +2025-02-19 18:23:45,042 Current Learning Rate: 0.0094550326 +2025-02-19 18:23:45,043 Train Loss: 0.0002428, Val Loss: 0.0002229 +2025-02-19 18:23:45,043 Epoch 1571/2000 +2025-02-19 18:24:26,939 Current Learning Rate: 0.0094901379 +2025-02-19 18:24:26,939 Train Loss: 0.0001933, Val Loss: 0.0002347 +2025-02-19 18:24:26,939 Epoch 1572/2000 +2025-02-19 18:25:09,457 Current Learning Rate: 0.0095241353 +2025-02-19 18:25:09,457 Train Loss: 0.0001977, Val Loss: 0.0002408 +2025-02-19 18:25:09,457 Epoch 1573/2000 +2025-02-19 18:25:52,223 Current Learning Rate: 0.0095570164 +2025-02-19 18:25:52,224 Train Loss: 0.0002296, Val Loss: 0.0002417 +2025-02-19 18:25:52,224 Epoch 1574/2000 +2025-02-19 18:26:34,572 Current Learning Rate: 0.0095887731 +2025-02-19 18:26:34,573 Train Loss: 0.0002936, Val Loss: 0.0003214 +2025-02-19 18:26:34,573 Epoch 1575/2000 +2025-02-19 18:27:16,391 Current Learning Rate: 0.0096193977 +2025-02-19 18:27:16,392 Train Loss: 0.0002105, Val Loss: 0.0002674 +2025-02-19 18:27:16,392 Epoch 1576/2000 +2025-02-19 18:27:58,737 Current Learning Rate: 0.0096488824 +2025-02-19 18:27:58,738 Train Loss: 0.0001889, Val Loss: 0.0002266 +2025-02-19 18:27:58,738 Epoch 1577/2000 +2025-02-19 18:28:41,136 Current Learning Rate: 0.0096772202 +2025-02-19 18:28:41,137 Train Loss: 0.0002328, Val Loss: 0.0001948 +2025-02-19 18:28:41,137 Epoch 1578/2000 +2025-02-19 18:29:22,580 Current Learning Rate: 0.0097044038 +2025-02-19 18:29:22,580 Train Loss: 0.0002382, Val Loss: 0.0002225 +2025-02-19 18:29:22,580 Epoch 1579/2000 +2025-02-19 18:30:05,202 Current Learning Rate: 0.0097304268 +2025-02-19 18:30:05,202 Train Loss: 0.0002362, Val Loss: 0.0002604 +2025-02-19 18:30:05,202 Epoch 1580/2000 +2025-02-19 18:30:47,164 Current Learning Rate: 0.0097552826 +2025-02-19 18:30:47,165 Train Loss: 0.0002510, Val Loss: 0.0002218 +2025-02-19 18:30:47,165 Epoch 1581/2000 +2025-02-19 18:31:29,734 Current Learning Rate: 0.0097789651 +2025-02-19 18:31:29,734 Train Loss: 0.0001485, Val Loss: 0.0001646 +2025-02-19 18:31:29,734 Epoch 1582/2000 +2025-02-19 18:32:11,465 Current Learning Rate: 0.0098014684 +2025-02-19 18:32:11,466 Train Loss: 0.0004360, Val Loss: 0.0003071 +2025-02-19 18:32:11,466 Epoch 1583/2000 +2025-02-19 18:32:53,939 Current Learning Rate: 0.0098227871 +2025-02-19 18:32:53,940 Train Loss: 0.0002581, Val Loss: 0.0001955 +2025-02-19 18:32:53,940 Epoch 1584/2000 +2025-02-19 18:33:35,807 Current Learning Rate: 0.0098429158 +2025-02-19 18:33:35,807 Train Loss: 0.0001753, Val Loss: 0.0001553 +2025-02-19 18:33:35,808 Epoch 1585/2000 +2025-02-19 18:34:18,501 Current Learning Rate: 0.0098618496 +2025-02-19 18:34:18,502 Train Loss: 0.0001951, Val Loss: 0.0001734 +2025-02-19 18:34:18,502 Epoch 1586/2000 +2025-02-19 18:35:00,219 Current Learning Rate: 0.0098795838 +2025-02-19 18:35:00,219 Train Loss: 0.0001746, Val Loss: 0.0002141 +2025-02-19 18:35:00,219 Epoch 1587/2000 +2025-02-19 18:35:42,353 Current Learning Rate: 0.0098961141 +2025-02-19 18:35:42,354 Train Loss: 0.0002262, Val Loss: 0.0001750 +2025-02-19 18:35:42,354 Epoch 1588/2000 +2025-02-19 18:36:24,284 Current Learning Rate: 0.0099114363 +2025-02-19 18:36:24,284 Train Loss: 0.0002378, Val Loss: 0.0001872 +2025-02-19 18:36:24,285 Epoch 1589/2000 +2025-02-19 18:37:06,563 Current Learning Rate: 0.0099255466 +2025-02-19 18:37:06,563 Train Loss: 0.0001955, Val Loss: 0.0001997 +2025-02-19 18:37:06,563 Epoch 1590/2000 +2025-02-19 18:37:48,500 Current Learning Rate: 0.0099384417 +2025-02-19 18:37:48,501 Train Loss: 0.0001642, Val Loss: 0.0001706 +2025-02-19 18:37:48,501 Epoch 1591/2000 +2025-02-19 18:38:30,540 Current Learning Rate: 0.0099501183 +2025-02-19 18:38:30,541 Train Loss: 0.0001422, Val Loss: 0.0001846 +2025-02-19 18:38:30,541 Epoch 1592/2000 +2025-02-19 18:39:12,926 Current Learning Rate: 0.0099605735 +2025-02-19 18:39:12,927 Train Loss: 0.0001972, Val Loss: 0.0002144 +2025-02-19 18:39:12,927 Epoch 1593/2000 +2025-02-19 18:39:55,008 Current Learning Rate: 0.0099698048 +2025-02-19 18:39:55,009 Train Loss: 0.0001720, Val Loss: 0.0001847 +2025-02-19 18:39:55,009 Epoch 1594/2000 +2025-02-19 18:40:37,956 Current Learning Rate: 0.0099778098 +2025-02-19 18:40:37,956 Train Loss: 0.0001614, Val Loss: 0.0001701 +2025-02-19 18:40:37,957 Epoch 1595/2000 +2025-02-19 18:41:20,307 Current Learning Rate: 0.0099845867 +2025-02-19 18:41:20,308 Train Loss: 0.0001849, Val Loss: 0.0002082 +2025-02-19 18:41:20,308 Epoch 1596/2000 +2025-02-19 18:42:02,743 Current Learning Rate: 0.0099901336 +2025-02-19 18:42:02,744 Train Loss: 0.0002210, Val Loss: 0.0002022 +2025-02-19 18:42:02,744 Epoch 1597/2000 +2025-02-19 18:42:44,611 Current Learning Rate: 0.0099944494 +2025-02-19 18:42:44,611 Train Loss: 0.0001638, Val Loss: 0.0002100 +2025-02-19 18:42:44,611 Epoch 1598/2000 +2025-02-19 18:43:27,162 Current Learning Rate: 0.0099975328 +2025-02-19 18:43:27,162 Train Loss: 0.0001671, Val Loss: 0.0002202 +2025-02-19 18:43:27,163 Epoch 1599/2000 +2025-02-19 18:44:09,144 Current Learning Rate: 0.0099993832 +2025-02-19 18:44:09,144 Train Loss: 0.0002110, Val Loss: 0.0002232 +2025-02-19 18:44:09,145 Epoch 1600/2000 +2025-02-19 18:44:51,370 Current Learning Rate: 0.0100000000 +2025-02-19 18:44:51,371 Train Loss: 0.0002391, Val Loss: 0.0002677 +2025-02-19 18:44:51,371 Epoch 1601/2000 +2025-02-19 18:45:34,335 Current Learning Rate: 0.0099993832 +2025-02-19 18:45:34,335 Train Loss: 0.0001981, Val Loss: 0.0002663 +2025-02-19 18:45:34,336 Epoch 1602/2000 +2025-02-19 18:46:15,822 Current Learning Rate: 0.0099975328 +2025-02-19 18:46:15,823 Train Loss: 0.0002709, Val Loss: 0.0003803 +2025-02-19 18:46:15,823 Epoch 1603/2000 +2025-02-19 18:46:58,642 Current Learning Rate: 0.0099944494 +2025-02-19 18:46:58,642 Train Loss: 0.0017988, Val Loss: 0.0026894 +2025-02-19 18:46:58,642 Epoch 1604/2000 +2025-02-19 18:47:40,208 Current Learning Rate: 0.0099901336 +2025-02-19 18:47:40,209 Train Loss: 0.0011875, Val Loss: 0.0004501 +2025-02-19 18:47:40,210 Epoch 1605/2000 +2025-02-19 18:48:22,145 Current Learning Rate: 0.0099845867 +2025-02-19 18:48:22,146 Train Loss: 0.0003099, Val Loss: 0.0002145 +2025-02-19 18:48:22,146 Epoch 1606/2000 +2025-02-19 18:49:05,548 Current Learning Rate: 0.0099778098 +2025-02-19 18:49:05,549 Train Loss: 0.0001487, Val Loss: 0.0001755 +2025-02-19 18:49:05,549 Epoch 1607/2000 +2025-02-19 18:49:47,594 Current Learning Rate: 0.0099698048 +2025-02-19 18:49:47,594 Train Loss: 0.0001604, Val Loss: 0.0001619 +2025-02-19 18:49:47,594 Epoch 1608/2000 +2025-02-19 18:50:30,597 Current Learning Rate: 0.0099605735 +2025-02-19 18:50:30,597 Train Loss: 0.0001683, Val Loss: 0.0001806 +2025-02-19 18:50:30,597 Epoch 1609/2000 +2025-02-19 18:51:12,139 Current Learning Rate: 0.0099501183 +2025-02-19 18:51:12,140 Train Loss: 0.0002095, Val Loss: 0.0001992 +2025-02-19 18:51:12,140 Epoch 1610/2000 +2025-02-19 18:51:54,447 Current Learning Rate: 0.0099384417 +2025-02-19 18:51:54,448 Train Loss: 0.0001514, Val Loss: 0.0001797 +2025-02-19 18:51:54,448 Epoch 1611/2000 +2025-02-19 18:52:37,088 Current Learning Rate: 0.0099255466 +2025-02-19 18:52:37,088 Train Loss: 0.0001654, Val Loss: 0.0001655 +2025-02-19 18:52:37,088 Epoch 1612/2000 +2025-02-19 18:53:18,833 Current Learning Rate: 0.0099114363 +2025-02-19 18:53:18,833 Train Loss: 0.0001974, Val Loss: 0.0001672 +2025-02-19 18:53:18,834 Epoch 1613/2000 +2025-02-19 18:54:00,889 Current Learning Rate: 0.0098961141 +2025-02-19 18:54:00,890 Train Loss: 0.0001397, Val Loss: 0.0001614 +2025-02-19 18:54:00,890 Epoch 1614/2000 +2025-02-19 18:54:43,681 Current Learning Rate: 0.0098795838 +2025-02-19 18:54:43,682 Train Loss: 0.0001599, Val Loss: 0.0001701 +2025-02-19 18:54:43,684 Epoch 1615/2000 +2025-02-19 18:55:25,877 Current Learning Rate: 0.0098618496 +2025-02-19 18:55:25,877 Train Loss: 0.0001549, Val Loss: 0.0001764 +2025-02-19 18:55:25,877 Epoch 1616/2000 +2025-02-19 18:56:08,131 Current Learning Rate: 0.0098429158 +2025-02-19 18:56:08,132 Train Loss: 0.0002382, Val Loss: 0.0002232 +2025-02-19 18:56:08,132 Epoch 1617/2000 +2025-02-19 18:56:49,775 Current Learning Rate: 0.0098227871 +2025-02-19 18:56:49,776 Train Loss: 0.0002299, Val Loss: 0.0002315 +2025-02-19 18:56:49,776 Epoch 1618/2000 +2025-02-19 18:57:31,763 Current Learning Rate: 0.0098014684 +2025-02-19 18:57:31,764 Train Loss: 0.0002194, Val Loss: 0.0002296 +2025-02-19 18:57:31,764 Epoch 1619/2000 +2025-02-19 18:58:13,656 Current Learning Rate: 0.0097789651 +2025-02-19 18:58:13,657 Train Loss: 0.0002257, Val Loss: 0.0001856 +2025-02-19 18:58:13,657 Epoch 1620/2000 +2025-02-19 18:58:56,034 Current Learning Rate: 0.0097552826 +2025-02-19 18:58:56,034 Train Loss: 0.0001520, Val Loss: 0.0001900 +2025-02-19 18:58:56,034 Epoch 1621/2000 +2025-02-19 18:59:38,129 Current Learning Rate: 0.0097304268 +2025-02-19 18:59:38,130 Train Loss: 0.0001631, Val Loss: 0.0001700 +2025-02-19 18:59:38,130 Epoch 1622/2000 +2025-02-19 19:00:19,957 Current Learning Rate: 0.0097044038 +2025-02-19 19:00:19,958 Train Loss: 0.0001582, Val Loss: 0.0001623 +2025-02-19 19:00:19,959 Epoch 1623/2000 +2025-02-19 19:01:01,954 Current Learning Rate: 0.0096772202 +2025-02-19 19:01:01,955 Train Loss: 0.0001482, Val Loss: 0.0001789 +2025-02-19 19:01:01,955 Epoch 1624/2000 +2025-02-19 19:01:44,281 Current Learning Rate: 0.0096488824 +2025-02-19 19:01:44,281 Train Loss: 0.0001538, Val Loss: 0.0001620 +2025-02-19 19:01:44,281 Epoch 1625/2000 +2025-02-19 19:02:26,192 Current Learning Rate: 0.0096193977 +2025-02-19 19:02:26,193 Train Loss: 0.0001121, Val Loss: 0.0001549 +2025-02-19 19:02:26,193 Epoch 1626/2000 +2025-02-19 19:03:07,998 Current Learning Rate: 0.0095887731 +2025-02-19 19:03:07,999 Train Loss: 0.0001243, Val Loss: 0.0001621 +2025-02-19 19:03:08,000 Epoch 1627/2000 +2025-02-19 19:03:50,141 Current Learning Rate: 0.0095570164 +2025-02-19 19:03:50,141 Train Loss: 0.0001782, Val Loss: 0.0001946 +2025-02-19 19:03:50,141 Epoch 1628/2000 +2025-02-19 19:04:32,354 Current Learning Rate: 0.0095241353 +2025-02-19 19:04:32,355 Train Loss: 0.0002040, Val Loss: 0.0002017 +2025-02-19 19:04:32,355 Epoch 1629/2000 +2025-02-19 19:05:14,697 Current Learning Rate: 0.0094901379 +2025-02-19 19:05:14,698 Train Loss: 0.0002113, Val Loss: 0.0001622 +2025-02-19 19:05:14,698 Epoch 1630/2000 +2025-02-19 19:05:56,496 Current Learning Rate: 0.0094550326 +2025-02-19 19:05:56,496 Train Loss: 0.0001861, Val Loss: 0.0001569 +2025-02-19 19:05:56,496 Epoch 1631/2000 +2025-02-19 19:06:38,242 Current Learning Rate: 0.0094188282 +2025-02-19 19:06:38,243 Train Loss: 0.0001439, Val Loss: 0.0001524 +2025-02-19 19:06:38,244 Epoch 1632/2000 +2025-02-19 19:07:21,100 Current Learning Rate: 0.0093815334 +2025-02-19 19:07:21,101 Train Loss: 0.0001283, Val Loss: 0.0001511 +2025-02-19 19:07:21,101 Epoch 1633/2000 +2025-02-19 19:08:03,035 Current Learning Rate: 0.0093431576 +2025-02-19 19:08:03,035 Train Loss: 0.0001206, Val Loss: 0.0001560 +2025-02-19 19:08:03,036 Epoch 1634/2000 +2025-02-19 19:08:44,665 Current Learning Rate: 0.0093037101 +2025-02-19 19:08:44,665 Train Loss: 0.0001768, Val Loss: 0.0001866 +2025-02-19 19:08:44,665 Epoch 1635/2000 +2025-02-19 19:09:26,895 Current Learning Rate: 0.0092632008 +2025-02-19 19:09:26,895 Train Loss: 0.0002033, Val Loss: 0.0001858 +2025-02-19 19:09:26,896 Epoch 1636/2000 +2025-02-19 19:10:09,012 Current Learning Rate: 0.0092216396 +2025-02-19 19:10:09,013 Train Loss: 0.0001831, Val Loss: 0.0001986 +2025-02-19 19:10:09,013 Epoch 1637/2000 +2025-02-19 19:10:51,683 Current Learning Rate: 0.0091790368 +2025-02-19 19:10:51,683 Train Loss: 0.0001608, Val Loss: 0.0001862 +2025-02-19 19:10:51,684 Epoch 1638/2000 +2025-02-19 19:11:33,698 Current Learning Rate: 0.0091354029 +2025-02-19 19:11:33,699 Train Loss: 0.0002221, Val Loss: 0.0001727 +2025-02-19 19:11:33,699 Epoch 1639/2000 +2025-02-19 19:12:16,298 Current Learning Rate: 0.0090907486 +2025-02-19 19:12:16,299 Train Loss: 0.0001560, Val Loss: 0.0001638 +2025-02-19 19:12:16,299 Epoch 1640/2000 +2025-02-19 19:12:57,789 Current Learning Rate: 0.0090450850 +2025-02-19 19:12:57,789 Train Loss: 0.0002348, Val Loss: 0.0002077 +2025-02-19 19:12:57,789 Epoch 1641/2000 +2025-02-19 19:13:39,957 Current Learning Rate: 0.0089984233 +2025-02-19 19:13:39,957 Train Loss: 0.0001462, Val Loss: 0.0001673 +2025-02-19 19:13:39,957 Epoch 1642/2000 +2025-02-19 19:14:22,539 Current Learning Rate: 0.0089507751 +2025-02-19 19:14:22,540 Train Loss: 0.0001146, Val Loss: 0.0001434 +2025-02-19 19:14:22,540 Epoch 1643/2000 +2025-02-19 19:15:05,286 Current Learning Rate: 0.0089021520 +2025-02-19 19:15:05,287 Train Loss: 0.0001388, Val Loss: 0.0001399 +2025-02-19 19:15:05,287 Epoch 1644/2000 +2025-02-19 19:15:47,641 Current Learning Rate: 0.0088525662 +2025-02-19 19:15:47,642 Train Loss: 0.0001166, Val Loss: 0.0001377 +2025-02-19 19:15:47,642 Epoch 1645/2000 +2025-02-19 19:16:29,314 Current Learning Rate: 0.0088020298 +2025-02-19 19:16:29,314 Train Loss: 0.0001578, Val Loss: 0.0001429 +2025-02-19 19:16:29,314 Epoch 1646/2000 +2025-02-19 19:17:11,210 Current Learning Rate: 0.0087505553 +2025-02-19 19:17:11,211 Train Loss: 0.0001040, Val Loss: 0.0001378 +2025-02-19 19:17:11,211 Epoch 1647/2000 +2025-02-19 19:17:53,830 Current Learning Rate: 0.0086981555 +2025-02-19 19:17:53,831 Train Loss: 0.0001377, Val Loss: 0.0001365 +2025-02-19 19:17:53,831 Epoch 1648/2000 +2025-02-19 19:18:35,538 Current Learning Rate: 0.0086448431 +2025-02-19 19:18:35,538 Train Loss: 0.0001701, Val Loss: 0.0001486 +2025-02-19 19:18:35,538 Epoch 1649/2000 +2025-02-19 19:19:17,720 Current Learning Rate: 0.0085906315 +2025-02-19 19:19:17,721 Train Loss: 0.0001248, Val Loss: 0.0001412 +2025-02-19 19:19:17,721 Epoch 1650/2000 +2025-02-19 19:20:00,536 Current Learning Rate: 0.0085355339 +2025-02-19 19:20:00,536 Train Loss: 0.0001213, Val Loss: 0.0001354 +2025-02-19 19:20:00,537 Epoch 1651/2000 +2025-02-19 19:20:42,319 Current Learning Rate: 0.0084795640 +2025-02-19 19:20:42,320 Train Loss: 0.0001323, Val Loss: 0.0001346 +2025-02-19 19:20:42,320 Epoch 1652/2000 +2025-02-19 19:21:24,391 Current Learning Rate: 0.0084227355 +2025-02-19 19:21:24,392 Train Loss: 0.0001204, Val Loss: 0.0001463 +2025-02-19 19:21:24,392 Epoch 1653/2000 +2025-02-19 19:22:06,452 Current Learning Rate: 0.0083650626 +2025-02-19 19:22:06,453 Train Loss: 0.0001125, Val Loss: 0.0001505 +2025-02-19 19:22:06,453 Epoch 1654/2000 +2025-02-19 19:22:48,427 Current Learning Rate: 0.0083065593 +2025-02-19 19:22:48,428 Train Loss: 0.0001049, Val Loss: 0.0001437 +2025-02-19 19:22:48,428 Epoch 1655/2000 +2025-02-19 19:23:30,719 Current Learning Rate: 0.0082472402 +2025-02-19 19:23:30,720 Train Loss: 0.0001349, Val Loss: 0.0001413 +2025-02-19 19:23:30,720 Epoch 1656/2000 +2025-02-19 19:24:13,014 Current Learning Rate: 0.0081871199 +2025-02-19 19:24:13,015 Train Loss: 0.0001472, Val Loss: 0.0001554 +2025-02-19 19:24:13,016 Epoch 1657/2000 +2025-02-19 19:24:55,804 Current Learning Rate: 0.0081262133 +2025-02-19 19:24:55,805 Train Loss: 0.0001491, Val Loss: 0.0001591 +2025-02-19 19:24:55,805 Epoch 1658/2000 +2025-02-19 19:25:38,028 Current Learning Rate: 0.0080645353 +2025-02-19 19:25:38,029 Train Loss: 0.0001415, Val Loss: 0.0001532 +2025-02-19 19:25:38,029 Epoch 1659/2000 +2025-02-19 19:26:19,661 Current Learning Rate: 0.0080021011 +2025-02-19 19:26:19,662 Train Loss: 0.0001344, Val Loss: 0.0001509 +2025-02-19 19:26:19,662 Epoch 1660/2000 +2025-02-19 19:27:02,662 Current Learning Rate: 0.0079389263 +2025-02-19 19:27:02,663 Train Loss: 0.0001497, Val Loss: 0.0001737 +2025-02-19 19:27:02,663 Epoch 1661/2000 +2025-02-19 19:27:44,626 Current Learning Rate: 0.0078750263 +2025-02-19 19:27:44,626 Train Loss: 0.0001704, Val Loss: 0.0001501 +2025-02-19 19:27:44,626 Epoch 1662/2000 +2025-02-19 19:28:26,806 Current Learning Rate: 0.0078104169 +2025-02-19 19:28:26,807 Train Loss: 0.0001481, Val Loss: 0.0001580 +2025-02-19 19:28:26,807 Epoch 1663/2000 +2025-02-19 19:29:09,819 Current Learning Rate: 0.0077451141 +2025-02-19 19:29:09,819 Train Loss: 0.0001447, Val Loss: 0.0001761 +2025-02-19 19:29:09,820 Epoch 1664/2000 +2025-02-19 19:29:52,367 Current Learning Rate: 0.0076791340 +2025-02-19 19:29:52,368 Train Loss: 0.0001525, Val Loss: 0.0001654 +2025-02-19 19:29:52,368 Epoch 1665/2000 +2025-02-19 19:30:34,463 Current Learning Rate: 0.0076124928 +2025-02-19 19:30:34,463 Train Loss: 0.0001580, Val Loss: 0.0001702 +2025-02-19 19:30:34,464 Epoch 1666/2000 +2025-02-19 19:31:16,888 Current Learning Rate: 0.0075452071 +2025-02-19 19:31:16,888 Train Loss: 0.0001328, Val Loss: 0.0001608 +2025-02-19 19:31:16,889 Epoch 1667/2000 +2025-02-19 19:31:59,320 Current Learning Rate: 0.0074772933 +2025-02-19 19:31:59,320 Train Loss: 0.0001114, Val Loss: 0.0001724 +2025-02-19 19:31:59,320 Epoch 1668/2000 +2025-02-19 19:32:41,634 Current Learning Rate: 0.0074087684 +2025-02-19 19:32:41,635 Train Loss: 0.0001478, Val Loss: 0.0001945 +2025-02-19 19:32:41,635 Epoch 1669/2000 +2025-02-19 19:33:23,541 Current Learning Rate: 0.0073396491 +2025-02-19 19:33:23,542 Train Loss: 0.0002006, Val Loss: 0.0002140 +2025-02-19 19:33:23,542 Epoch 1670/2000 +2025-02-19 19:34:05,531 Current Learning Rate: 0.0072699525 +2025-02-19 19:34:05,531 Train Loss: 0.0001784, Val Loss: 0.0001918 +2025-02-19 19:34:05,531 Epoch 1671/2000 +2025-02-19 19:34:47,874 Current Learning Rate: 0.0071996958 +2025-02-19 19:34:47,875 Train Loss: 0.0001512, Val Loss: 0.0001468 +2025-02-19 19:34:47,875 Epoch 1672/2000 +2025-02-19 19:35:30,234 Current Learning Rate: 0.0071288965 +2025-02-19 19:35:30,235 Train Loss: 0.0001667, Val Loss: 0.0001679 +2025-02-19 19:35:30,235 Epoch 1673/2000 +2025-02-19 19:36:11,895 Current Learning Rate: 0.0070575718 +2025-02-19 19:36:11,896 Train Loss: 0.0002170, Val Loss: 0.0001727 +2025-02-19 19:36:11,896 Epoch 1674/2000 +2025-02-19 19:36:54,012 Current Learning Rate: 0.0069857395 +2025-02-19 19:36:54,012 Train Loss: 0.0001519, Val Loss: 0.0001360 +2025-02-19 19:36:54,013 Epoch 1675/2000 +2025-02-19 19:37:36,248 Current Learning Rate: 0.0069134172 +2025-02-19 19:37:36,248 Train Loss: 0.0001128, Val Loss: 0.0001306 +2025-02-19 19:37:36,249 Epoch 1676/2000 +2025-02-19 19:38:18,587 Current Learning Rate: 0.0068406228 +2025-02-19 19:38:18,587 Train Loss: 0.0001366, Val Loss: 0.0001327 +2025-02-19 19:38:18,587 Epoch 1677/2000 +2025-02-19 19:39:00,919 Current Learning Rate: 0.0067673742 +2025-02-19 19:39:00,920 Train Loss: 0.0000812, Val Loss: 0.0001255 +2025-02-19 19:39:00,920 Epoch 1678/2000 +2025-02-19 19:39:43,781 Current Learning Rate: 0.0066936896 +2025-02-19 19:39:43,781 Train Loss: 0.0000884, Val Loss: 0.0001242 +2025-02-19 19:39:43,782 Epoch 1679/2000 +2025-02-19 19:40:26,022 Current Learning Rate: 0.0066195871 +2025-02-19 19:40:26,022 Train Loss: 0.0000923, Val Loss: 0.0001249 +2025-02-19 19:40:26,023 Epoch 1680/2000 +2025-02-19 19:41:07,705 Current Learning Rate: 0.0065450850 +2025-02-19 19:41:07,706 Train Loss: 0.0001198, Val Loss: 0.0001287 +2025-02-19 19:41:07,706 Epoch 1681/2000 +2025-02-19 19:41:49,867 Current Learning Rate: 0.0064702016 +2025-02-19 19:41:49,867 Train Loss: 0.0001131, Val Loss: 0.0001262 +2025-02-19 19:41:49,868 Epoch 1682/2000 +2025-02-19 19:42:31,818 Current Learning Rate: 0.0063949555 +2025-02-19 19:42:31,818 Train Loss: 0.0001293, Val Loss: 0.0001255 +2025-02-19 19:42:31,819 Epoch 1683/2000 +2025-02-19 19:43:14,154 Current Learning Rate: 0.0063193652 +2025-02-19 19:43:14,154 Train Loss: 0.0000750, Val Loss: 0.0001214 +2025-02-19 19:43:14,155 Epoch 1684/2000 +2025-02-19 19:43:56,367 Current Learning Rate: 0.0062434494 +2025-02-19 19:43:56,367 Train Loss: 0.0001517, Val Loss: 0.0001246 +2025-02-19 19:43:56,368 Epoch 1685/2000 +2025-02-19 19:44:37,872 Current Learning Rate: 0.0061672268 +2025-02-19 19:44:37,873 Train Loss: 0.0001084, Val Loss: 0.0001264 +2025-02-19 19:44:37,873 Epoch 1686/2000 +2025-02-19 19:45:19,811 Current Learning Rate: 0.0060907162 +2025-02-19 19:45:19,811 Train Loss: 0.0001111, Val Loss: 0.0001253 +2025-02-19 19:45:19,811 Epoch 1687/2000 +2025-02-19 19:46:02,015 Current Learning Rate: 0.0060139365 +2025-02-19 19:46:02,016 Train Loss: 0.0001202, Val Loss: 0.0001348 +2025-02-19 19:46:02,016 Epoch 1688/2000 +2025-02-19 19:46:44,604 Current Learning Rate: 0.0059369066 +2025-02-19 19:46:44,605 Train Loss: 0.0001283, Val Loss: 0.0001486 +2025-02-19 19:46:44,605 Epoch 1689/2000 +2025-02-19 19:47:25,959 Current Learning Rate: 0.0058596455 +2025-02-19 19:47:25,959 Train Loss: 0.0001517, Val Loss: 0.0001781 +2025-02-19 19:47:25,960 Epoch 1690/2000 +2025-02-19 19:48:08,741 Current Learning Rate: 0.0057821723 +2025-02-19 19:48:08,741 Train Loss: 0.0001727, Val Loss: 0.0001920 +2025-02-19 19:48:08,741 Epoch 1691/2000 +2025-02-19 19:48:50,436 Current Learning Rate: 0.0057045062 +2025-02-19 19:48:50,436 Train Loss: 0.0002040, Val Loss: 0.0001570 +2025-02-19 19:48:50,436 Epoch 1692/2000 +2025-02-19 19:49:32,707 Current Learning Rate: 0.0056266662 +2025-02-19 19:49:32,708 Train Loss: 0.0001860, Val Loss: 0.0001410 +2025-02-19 19:49:32,708 Epoch 1693/2000 +2025-02-19 19:50:15,141 Current Learning Rate: 0.0055486716 +2025-02-19 19:50:15,142 Train Loss: 0.0001333, Val Loss: 0.0001575 +2025-02-19 19:50:15,142 Epoch 1694/2000 +2025-02-19 19:50:57,710 Current Learning Rate: 0.0054705416 +2025-02-19 19:50:57,710 Train Loss: 0.0001076, Val Loss: 0.0001389 +2025-02-19 19:50:57,710 Epoch 1695/2000 +2025-02-19 19:51:39,283 Current Learning Rate: 0.0053922955 +2025-02-19 19:51:39,284 Train Loss: 0.0000832, Val Loss: 0.0001227 +2025-02-19 19:51:39,284 Epoch 1696/2000 +2025-02-19 19:52:21,562 Current Learning Rate: 0.0053139526 +2025-02-19 19:52:21,562 Train Loss: 0.0000813, Val Loss: 0.0001211 +2025-02-19 19:52:21,562 Epoch 1697/2000 +2025-02-19 19:53:04,499 Current Learning Rate: 0.0052355323 +2025-02-19 19:53:04,500 Train Loss: 0.0001052, Val Loss: 0.0001231 +2025-02-19 19:53:04,500 Epoch 1698/2000 +2025-02-19 19:53:46,109 Current Learning Rate: 0.0051570538 +2025-02-19 19:53:46,109 Train Loss: 0.0001343, Val Loss: 0.0001266 +2025-02-19 19:53:46,109 Epoch 1699/2000 +2025-02-19 19:54:28,990 Current Learning Rate: 0.0050785366 +2025-02-19 19:54:28,991 Train Loss: 0.0001057, Val Loss: 0.0001243 +2025-02-19 19:54:28,991 Epoch 1700/2000 +2025-02-19 19:55:11,453 Current Learning Rate: 0.0050000000 +2025-02-19 19:55:11,454 Train Loss: 0.0001030, Val Loss: 0.0001225 +2025-02-19 19:55:11,454 Epoch 1701/2000 +2025-02-19 19:55:53,156 Current Learning Rate: 0.0049214634 +2025-02-19 19:55:53,157 Train Loss: 0.0001093, Val Loss: 0.0001228 +2025-02-19 19:55:53,157 Epoch 1702/2000 +2025-02-19 19:56:35,526 Current Learning Rate: 0.0048429462 +2025-02-19 19:56:35,526 Train Loss: 0.0001372, Val Loss: 0.0001239 +2025-02-19 19:56:35,527 Epoch 1703/2000 +2025-02-19 19:57:18,741 Current Learning Rate: 0.0047644677 +2025-02-19 19:57:18,742 Train Loss: 0.0001166, Val Loss: 0.0001229 +2025-02-19 19:57:18,745 Epoch 1704/2000 +2025-02-19 19:58:01,469 Current Learning Rate: 0.0046860474 +2025-02-19 19:58:01,470 Train Loss: 0.0001016, Val Loss: 0.0001264 +2025-02-19 19:58:01,470 Epoch 1705/2000 +2025-02-19 19:58:43,280 Current Learning Rate: 0.0046077045 +2025-02-19 19:58:43,282 Train Loss: 0.0001066, Val Loss: 0.0001287 +2025-02-19 19:58:43,283 Epoch 1706/2000 +2025-02-19 19:59:26,114 Current Learning Rate: 0.0045294584 +2025-02-19 19:59:26,115 Train Loss: 0.0001056, Val Loss: 0.0001287 +2025-02-19 19:59:26,115 Epoch 1707/2000 +2025-02-19 20:00:08,718 Current Learning Rate: 0.0044513284 +2025-02-19 20:00:08,718 Train Loss: 0.0000951, Val Loss: 0.0001278 +2025-02-19 20:00:08,718 Epoch 1708/2000 +2025-02-19 20:00:51,354 Current Learning Rate: 0.0043733338 +2025-02-19 20:00:51,355 Train Loss: 0.0001050, Val Loss: 0.0001255 +2025-02-19 20:00:51,355 Epoch 1709/2000 +2025-02-19 20:01:33,842 Current Learning Rate: 0.0042954938 +2025-02-19 20:01:33,843 Train Loss: 0.0001249, Val Loss: 0.0001220 +2025-02-19 20:01:33,843 Epoch 1710/2000 +2025-02-19 20:02:15,243 Current Learning Rate: 0.0042178277 +2025-02-19 20:02:15,243 Train Loss: 0.0001502, Val Loss: 0.0001216 +2025-02-19 20:02:15,243 Epoch 1711/2000 +2025-02-19 20:02:57,490 Current Learning Rate: 0.0041403545 +2025-02-19 20:02:57,491 Train Loss: 0.0001365, Val Loss: 0.0001216 +2025-02-19 20:02:57,491 Epoch 1712/2000 +2025-02-19 20:03:39,918 Current Learning Rate: 0.0040630934 +2025-02-19 20:03:39,918 Train Loss: 0.0001002, Val Loss: 0.0001271 +2025-02-19 20:03:39,919 Epoch 1713/2000 +2025-02-19 20:04:21,710 Current Learning Rate: 0.0039860635 +2025-02-19 20:04:21,711 Train Loss: 0.0000963, Val Loss: 0.0001339 +2025-02-19 20:04:21,711 Epoch 1714/2000 +2025-02-19 20:05:04,720 Current Learning Rate: 0.0039092838 +2025-02-19 20:05:04,721 Train Loss: 0.0000908, Val Loss: 0.0001340 +2025-02-19 20:05:04,721 Epoch 1715/2000 +2025-02-19 20:05:47,153 Current Learning Rate: 0.0038327732 +2025-02-19 20:05:47,154 Train Loss: 0.0001419, Val Loss: 0.0001294 +2025-02-19 20:05:47,154 Epoch 1716/2000 +2025-02-19 20:06:29,374 Current Learning Rate: 0.0037565506 +2025-02-19 20:06:29,374 Train Loss: 0.0001145, Val Loss: 0.0001230 +2025-02-19 20:06:29,375 Epoch 1717/2000 +2025-02-19 20:07:10,979 Current Learning Rate: 0.0036806348 +2025-02-19 20:07:10,980 Train Loss: 0.0001081, Val Loss: 0.0001200 +2025-02-19 20:07:10,980 Epoch 1718/2000 +2025-02-19 20:07:53,971 Current Learning Rate: 0.0036050445 +2025-02-19 20:07:53,972 Train Loss: 0.0001287, Val Loss: 0.0001215 +2025-02-19 20:07:53,972 Epoch 1719/2000 +2025-02-19 20:08:36,418 Current Learning Rate: 0.0035297984 +2025-02-19 20:08:37,493 Train Loss: 0.0000794, Val Loss: 0.0001155 +2025-02-19 20:08:37,493 Epoch 1720/2000 +2025-02-19 20:09:19,707 Current Learning Rate: 0.0034549150 +2025-02-19 20:09:21,632 Train Loss: 0.0000673, Val Loss: 0.0001143 +2025-02-19 20:09:21,633 Epoch 1721/2000 +2025-02-19 20:10:02,268 Current Learning Rate: 0.0033804129 +2025-02-19 20:10:02,268 Train Loss: 0.0000730, Val Loss: 0.0001151 +2025-02-19 20:10:02,269 Epoch 1722/2000 +2025-02-19 20:10:44,511 Current Learning Rate: 0.0033063104 +2025-02-19 20:10:45,792 Train Loss: 0.0000753, Val Loss: 0.0001137 +2025-02-19 20:10:45,793 Epoch 1723/2000 +2025-02-19 20:11:28,195 Current Learning Rate: 0.0032326258 +2025-02-19 20:11:28,196 Train Loss: 0.0000778, Val Loss: 0.0001145 +2025-02-19 20:11:28,196 Epoch 1724/2000 +2025-02-19 20:12:10,721 Current Learning Rate: 0.0031593772 +2025-02-19 20:12:10,721 Train Loss: 0.0001163, Val Loss: 0.0001146 +2025-02-19 20:12:10,722 Epoch 1725/2000 +2025-02-19 20:12:52,312 Current Learning Rate: 0.0030865828 +2025-02-19 20:12:52,312 Train Loss: 0.0001019, Val Loss: 0.0001143 +2025-02-19 20:12:52,312 Epoch 1726/2000 +2025-02-19 20:13:34,592 Current Learning Rate: 0.0030142605 +2025-02-19 20:13:34,592 Train Loss: 0.0001272, Val Loss: 0.0001146 +2025-02-19 20:13:34,592 Epoch 1727/2000 +2025-02-19 20:14:16,537 Current Learning Rate: 0.0029424282 +2025-02-19 20:14:16,538 Train Loss: 0.0001065, Val Loss: 0.0001141 +2025-02-19 20:14:16,538 Epoch 1728/2000 +2025-02-19 20:14:59,180 Current Learning Rate: 0.0028711035 +2025-02-19 20:14:59,180 Train Loss: 0.0001253, Val Loss: 0.0001150 +2025-02-19 20:14:59,180 Epoch 1729/2000 +2025-02-19 20:15:41,210 Current Learning Rate: 0.0028003042 +2025-02-19 20:15:41,210 Train Loss: 0.0001123, Val Loss: 0.0001142 +2025-02-19 20:15:41,210 Epoch 1730/2000 +2025-02-19 20:16:23,417 Current Learning Rate: 0.0027300475 +2025-02-19 20:16:23,417 Train Loss: 0.0001127, Val Loss: 0.0001148 +2025-02-19 20:16:23,418 Epoch 1731/2000 +2025-02-19 20:17:05,018 Current Learning Rate: 0.0026603509 +2025-02-19 20:17:05,019 Train Loss: 0.0000944, Val Loss: 0.0001141 +2025-02-19 20:17:05,019 Epoch 1732/2000 +2025-02-19 20:17:47,603 Current Learning Rate: 0.0025912316 +2025-02-19 20:17:47,604 Train Loss: 0.0001149, Val Loss: 0.0001138 +2025-02-19 20:17:47,604 Epoch 1733/2000 +2025-02-19 20:18:29,397 Current Learning Rate: 0.0025227067 +2025-02-19 20:18:30,307 Train Loss: 0.0000743, Val Loss: 0.0001133 +2025-02-19 20:18:30,307 Epoch 1734/2000 +2025-02-19 20:19:11,805 Current Learning Rate: 0.0024547929 +2025-02-19 20:19:11,806 Train Loss: 0.0001103, Val Loss: 0.0001138 +2025-02-19 20:19:11,806 Epoch 1735/2000 +2025-02-19 20:19:54,264 Current Learning Rate: 0.0023875072 +2025-02-19 20:19:54,264 Train Loss: 0.0001188, Val Loss: 0.0001136 +2025-02-19 20:19:54,264 Epoch 1736/2000 +2025-02-19 20:20:36,080 Current Learning Rate: 0.0023208660 +2025-02-19 20:20:37,737 Train Loss: 0.0001368, Val Loss: 0.0001130 +2025-02-19 20:20:37,737 Epoch 1737/2000 +2025-02-19 20:21:18,215 Current Learning Rate: 0.0022548859 +2025-02-19 20:21:19,151 Train Loss: 0.0001115, Val Loss: 0.0001130 +2025-02-19 20:21:19,151 Epoch 1738/2000 +2025-02-19 20:22:00,861 Current Learning Rate: 0.0021895831 +2025-02-19 20:22:00,861 Train Loss: 0.0000997, Val Loss: 0.0001136 +2025-02-19 20:22:00,862 Epoch 1739/2000 +2025-02-19 20:22:42,398 Current Learning Rate: 0.0021249737 +2025-02-19 20:22:42,398 Train Loss: 0.0000942, Val Loss: 0.0001149 +2025-02-19 20:22:42,398 Epoch 1740/2000 +2025-02-19 20:23:24,313 Current Learning Rate: 0.0020610737 +2025-02-19 20:23:24,314 Train Loss: 0.0000764, Val Loss: 0.0001143 +2025-02-19 20:23:24,314 Epoch 1741/2000 +2025-02-19 20:24:06,655 Current Learning Rate: 0.0019978989 +2025-02-19 20:24:06,656 Train Loss: 0.0000780, Val Loss: 0.0001152 +2025-02-19 20:24:06,656 Epoch 1742/2000 +2025-02-19 20:24:49,118 Current Learning Rate: 0.0019354647 +2025-02-19 20:24:49,147 Train Loss: 0.0000936, Val Loss: 0.0001139 +2025-02-19 20:24:49,147 Epoch 1743/2000 +2025-02-19 20:25:31,072 Current Learning Rate: 0.0018737867 +2025-02-19 20:25:32,322 Train Loss: 0.0000963, Val Loss: 0.0001122 +2025-02-19 20:25:32,323 Epoch 1744/2000 +2025-02-19 20:26:13,257 Current Learning Rate: 0.0018128801 +2025-02-19 20:26:14,339 Train Loss: 0.0000733, Val Loss: 0.0001115 +2025-02-19 20:26:14,339 Epoch 1745/2000 +2025-02-19 20:26:56,144 Current Learning Rate: 0.0017527598 +2025-02-19 20:26:57,730 Train Loss: 0.0001209, Val Loss: 0.0001114 +2025-02-19 20:26:57,730 Epoch 1746/2000 +2025-02-19 20:27:39,900 Current Learning Rate: 0.0016934407 +2025-02-19 20:27:40,977 Train Loss: 0.0000831, Val Loss: 0.0001108 +2025-02-19 20:27:40,978 Epoch 1747/2000 +2025-02-19 20:28:21,946 Current Learning Rate: 0.0016349374 +2025-02-19 20:28:22,984 Train Loss: 0.0000914, Val Loss: 0.0001103 +2025-02-19 20:28:22,984 Epoch 1748/2000 +2025-02-19 20:29:04,606 Current Learning Rate: 0.0015772645 +2025-02-19 20:29:04,607 Train Loss: 0.0000841, Val Loss: 0.0001103 +2025-02-19 20:29:04,607 Epoch 1749/2000 +2025-02-19 20:29:47,157 Current Learning Rate: 0.0015204360 +2025-02-19 20:29:47,158 Train Loss: 0.0000989, Val Loss: 0.0001106 +2025-02-19 20:29:47,158 Epoch 1750/2000 +2025-02-19 20:30:28,685 Current Learning Rate: 0.0014644661 +2025-02-19 20:30:29,916 Train Loss: 0.0001104, Val Loss: 0.0001102 +2025-02-19 20:30:29,916 Epoch 1751/2000 +2025-02-19 20:31:10,874 Current Learning Rate: 0.0014093685 +2025-02-19 20:31:12,266 Train Loss: 0.0000957, Val Loss: 0.0001099 +2025-02-19 20:31:12,267 Epoch 1752/2000 +2025-02-19 20:31:54,269 Current Learning Rate: 0.0013551569 +2025-02-19 20:31:56,298 Train Loss: 0.0000923, Val Loss: 0.0001097 +2025-02-19 20:31:56,299 Epoch 1753/2000 +2025-02-19 20:32:38,734 Current Learning Rate: 0.0013018445 +2025-02-19 20:32:40,423 Train Loss: 0.0000914, Val Loss: 0.0001096 +2025-02-19 20:32:40,424 Epoch 1754/2000 +2025-02-19 20:33:22,801 Current Learning Rate: 0.0012494447 +2025-02-19 20:33:22,802 Train Loss: 0.0001162, Val Loss: 0.0001097 +2025-02-19 20:33:22,802 Epoch 1755/2000 +2025-02-19 20:34:05,098 Current Learning Rate: 0.0011979702 +2025-02-19 20:34:07,059 Train Loss: 0.0000746, Val Loss: 0.0001093 +2025-02-19 20:34:07,060 Epoch 1756/2000 +2025-02-19 20:34:49,245 Current Learning Rate: 0.0011474338 +2025-02-19 20:34:51,119 Train Loss: 0.0000924, Val Loss: 0.0001093 +2025-02-19 20:34:51,120 Epoch 1757/2000 +2025-02-19 20:35:32,604 Current Learning Rate: 0.0010978480 +2025-02-19 20:35:33,714 Train Loss: 0.0000880, Val Loss: 0.0001092 +2025-02-19 20:35:33,715 Epoch 1758/2000 +2025-02-19 20:36:15,342 Current Learning Rate: 0.0010492249 +2025-02-19 20:36:15,343 Train Loss: 0.0000983, Val Loss: 0.0001095 +2025-02-19 20:36:15,347 Epoch 1759/2000 +2025-02-19 20:36:57,056 Current Learning Rate: 0.0010015767 +2025-02-19 20:36:57,057 Train Loss: 0.0000957, Val Loss: 0.0001092 +2025-02-19 20:36:57,057 Epoch 1760/2000 +2025-02-19 20:37:39,357 Current Learning Rate: 0.0009549150 +2025-02-19 20:37:40,663 Train Loss: 0.0000895, Val Loss: 0.0001091 +2025-02-19 20:37:40,664 Epoch 1761/2000 +2025-02-19 20:38:22,494 Current Learning Rate: 0.0009092514 +2025-02-19 20:38:22,495 Train Loss: 0.0001784, Val Loss: 0.0001096 +2025-02-19 20:38:22,495 Epoch 1762/2000 +2025-02-19 20:39:04,860 Current Learning Rate: 0.0008645971 +2025-02-19 20:39:04,860 Train Loss: 0.0000589, Val Loss: 0.0001092 +2025-02-19 20:39:04,861 Epoch 1763/2000 +2025-02-19 20:39:46,559 Current Learning Rate: 0.0008209632 +2025-02-19 20:39:48,299 Train Loss: 0.0000742, Val Loss: 0.0001090 +2025-02-19 20:39:48,300 Epoch 1764/2000 +2025-02-19 20:40:29,738 Current Learning Rate: 0.0007783604 +2025-02-19 20:40:31,632 Train Loss: 0.0000786, Val Loss: 0.0001090 +2025-02-19 20:40:31,633 Epoch 1765/2000 +2025-02-19 20:41:12,250 Current Learning Rate: 0.0007367992 +2025-02-19 20:41:13,496 Train Loss: 0.0001112, Val Loss: 0.0001089 +2025-02-19 20:41:13,496 Epoch 1766/2000 +2025-02-19 20:41:55,141 Current Learning Rate: 0.0006962899 +2025-02-19 20:41:55,142 Train Loss: 0.0000981, Val Loss: 0.0001089 +2025-02-19 20:41:55,142 Epoch 1767/2000 +2025-02-19 20:42:37,402 Current Learning Rate: 0.0006568424 +2025-02-19 20:42:39,140 Train Loss: 0.0000777, Val Loss: 0.0001089 +2025-02-19 20:42:39,140 Epoch 1768/2000 +2025-02-19 20:43:21,432 Current Learning Rate: 0.0006184666 +2025-02-19 20:43:23,089 Train Loss: 0.0000580, Val Loss: 0.0001087 +2025-02-19 20:43:23,089 Epoch 1769/2000 +2025-02-19 20:44:05,484 Current Learning Rate: 0.0005811718 +2025-02-19 20:44:05,485 Train Loss: 0.0001199, Val Loss: 0.0001089 +2025-02-19 20:44:05,485 Epoch 1770/2000 +2025-02-19 20:44:47,647 Current Learning Rate: 0.0005449674 +2025-02-19 20:44:48,818 Train Loss: 0.0001163, Val Loss: 0.0001087 +2025-02-19 20:44:48,819 Epoch 1771/2000 +2025-02-19 20:45:30,382 Current Learning Rate: 0.0005098621 +2025-02-19 20:45:30,383 Train Loss: 0.0001073, Val Loss: 0.0001088 +2025-02-19 20:45:30,385 Epoch 1772/2000 +2025-02-19 20:46:12,953 Current Learning Rate: 0.0004758647 +2025-02-19 20:46:12,954 Train Loss: 0.0000763, Val Loss: 0.0001087 +2025-02-19 20:46:12,954 Epoch 1773/2000 +2025-02-19 20:46:55,091 Current Learning Rate: 0.0004429836 +2025-02-19 20:46:57,016 Train Loss: 0.0000757, Val Loss: 0.0001086 +2025-02-19 20:46:57,016 Epoch 1774/2000 +2025-02-19 20:47:38,772 Current Learning Rate: 0.0004112269 +2025-02-19 20:47:38,772 Train Loss: 0.0000922, Val Loss: 0.0001086 +2025-02-19 20:47:38,773 Epoch 1775/2000 +2025-02-19 20:48:20,638 Current Learning Rate: 0.0003806023 +2025-02-19 20:48:20,638 Train Loss: 0.0000721, Val Loss: 0.0001086 +2025-02-19 20:48:20,639 Epoch 1776/2000 +2025-02-19 20:49:02,674 Current Learning Rate: 0.0003511176 +2025-02-19 20:49:04,204 Train Loss: 0.0000775, Val Loss: 0.0001085 +2025-02-19 20:49:04,205 Epoch 1777/2000 +2025-02-19 20:49:45,755 Current Learning Rate: 0.0003227798 +2025-02-19 20:49:47,080 Train Loss: 0.0000762, Val Loss: 0.0001085 +2025-02-19 20:49:47,081 Epoch 1778/2000 +2025-02-19 20:50:29,088 Current Learning Rate: 0.0002955962 +2025-02-19 20:50:29,089 Train Loss: 0.0000812, Val Loss: 0.0001085 +2025-02-19 20:50:29,089 Epoch 1779/2000 +2025-02-19 20:51:10,427 Current Learning Rate: 0.0002695732 +2025-02-19 20:51:10,428 Train Loss: 0.0000577, Val Loss: 0.0001085 +2025-02-19 20:51:10,428 Epoch 1780/2000 +2025-02-19 20:51:52,538 Current Learning Rate: 0.0002447174 +2025-02-19 20:51:52,539 Train Loss: 0.0000783, Val Loss: 0.0001085 +2025-02-19 20:51:52,539 Epoch 1781/2000 +2025-02-19 20:52:35,010 Current Learning Rate: 0.0002210349 +2025-02-19 20:52:35,010 Train Loss: 0.0001121, Val Loss: 0.0001086 +2025-02-19 20:52:35,011 Epoch 1782/2000 +2025-02-19 20:53:16,582 Current Learning Rate: 0.0001985316 +2025-02-19 20:53:16,583 Train Loss: 0.0001393, Val Loss: 0.0001085 +2025-02-19 20:53:16,583 Epoch 1783/2000 +2025-02-19 20:53:58,483 Current Learning Rate: 0.0001772129 +2025-02-19 20:53:58,484 Train Loss: 0.0000899, Val Loss: 0.0001085 +2025-02-19 20:53:58,484 Epoch 1784/2000 +2025-02-19 20:54:40,988 Current Learning Rate: 0.0001570842 +2025-02-19 20:54:40,988 Train Loss: 0.0000820, Val Loss: 0.0001086 +2025-02-19 20:54:40,988 Epoch 1785/2000 +2025-02-19 20:55:23,127 Current Learning Rate: 0.0001381504 +2025-02-19 20:55:23,128 Train Loss: 0.0000724, Val Loss: 0.0001086 +2025-02-19 20:55:23,128 Epoch 1786/2000 +2025-02-19 20:56:05,203 Current Learning Rate: 0.0001204162 +2025-02-19 20:56:05,203 Train Loss: 0.0000727, Val Loss: 0.0001085 +2025-02-19 20:56:05,204 Epoch 1787/2000 +2025-02-19 20:56:47,829 Current Learning Rate: 0.0001038859 +2025-02-19 20:56:47,829 Train Loss: 0.0000885, Val Loss: 0.0001085 +2025-02-19 20:56:47,829 Epoch 1788/2000 +2025-02-19 20:57:30,261 Current Learning Rate: 0.0000885637 +2025-02-19 20:57:30,261 Train Loss: 0.0000836, Val Loss: 0.0001085 +2025-02-19 20:57:30,261 Epoch 1789/2000 +2025-02-19 20:58:12,116 Current Learning Rate: 0.0000744534 +2025-02-19 20:58:13,453 Train Loss: 0.0000991, Val Loss: 0.0001084 +2025-02-19 20:58:13,453 Epoch 1790/2000 +2025-02-19 20:58:54,766 Current Learning Rate: 0.0000615583 +2025-02-19 20:58:56,597 Train Loss: 0.0000934, Val Loss: 0.0001084 +2025-02-19 20:58:56,597 Epoch 1791/2000 +2025-02-19 20:59:37,562 Current Learning Rate: 0.0000498817 +2025-02-19 20:59:37,563 Train Loss: 0.0000832, Val Loss: 0.0001085 +2025-02-19 20:59:37,563 Epoch 1792/2000 +2025-02-19 21:00:19,345 Current Learning Rate: 0.0000394265 +2025-02-19 21:00:19,345 Train Loss: 0.0000925, Val Loss: 0.0001084 +2025-02-19 21:00:19,346 Epoch 1793/2000 +2025-02-19 21:01:01,849 Current Learning Rate: 0.0000301952 +2025-02-19 21:01:01,850 Train Loss: 0.0000613, Val Loss: 0.0001085 +2025-02-19 21:01:01,850 Epoch 1794/2000 +2025-02-19 21:01:44,748 Current Learning Rate: 0.0000221902 +2025-02-19 21:01:44,748 Train Loss: 0.0000739, Val Loss: 0.0001084 +2025-02-19 21:01:44,748 Epoch 1795/2000 +2025-02-19 21:02:27,115 Current Learning Rate: 0.0000154133 +2025-02-19 21:02:29,160 Train Loss: 0.0001015, Val Loss: 0.0001084 +2025-02-19 21:02:29,160 Epoch 1796/2000 +2025-02-19 21:03:10,498 Current Learning Rate: 0.0000098664 +2025-02-19 21:03:10,499 Train Loss: 0.0000976, Val Loss: 0.0001085 +2025-02-19 21:03:10,499 Epoch 1797/2000 +2025-02-19 21:03:52,148 Current Learning Rate: 0.0000055506 +2025-02-19 21:03:52,148 Train Loss: 0.0000871, Val Loss: 0.0001084 +2025-02-19 21:03:52,148 Epoch 1798/2000 +2025-02-19 21:04:34,685 Current Learning Rate: 0.0000024672 +2025-02-19 21:04:34,686 Train Loss: 0.0000929, Val Loss: 0.0001084 +2025-02-19 21:04:34,686 Epoch 1799/2000 +2025-02-19 21:05:16,292 Current Learning Rate: 0.0000006168 +2025-02-19 21:05:17,474 Train Loss: 0.0001103, Val Loss: 0.0001084 +2025-02-19 21:05:17,474 Epoch 1800/2000 +2025-02-19 21:05:58,458 Current Learning Rate: 0.0000000000 +2025-02-19 21:05:58,459 Train Loss: 0.0001070, Val Loss: 0.0001084 +2025-02-19 21:05:58,459 Epoch 1801/2000 +2025-02-19 21:06:40,601 Current Learning Rate: 0.0000006168 +2025-02-19 21:06:40,602 Train Loss: 0.0000927, Val Loss: 0.0001084 +2025-02-19 21:06:40,602 Epoch 1802/2000 +2025-02-19 21:07:22,975 Current Learning Rate: 0.0000024672 +2025-02-19 21:07:22,976 Train Loss: 0.0001148, Val Loss: 0.0001084 +2025-02-19 21:07:22,976 Epoch 1803/2000 +2025-02-19 21:08:05,965 Current Learning Rate: 0.0000055506 +2025-02-19 21:08:05,966 Train Loss: 0.0000911, Val Loss: 0.0001084 +2025-02-19 21:08:05,966 Epoch 1804/2000 +2025-02-19 21:08:48,477 Current Learning Rate: 0.0000098664 +2025-02-19 21:08:48,478 Train Loss: 0.0000754, Val Loss: 0.0001085 +2025-02-19 21:08:48,478 Epoch 1805/2000 +2025-02-19 21:09:30,636 Current Learning Rate: 0.0000154133 +2025-02-19 21:09:30,637 Train Loss: 0.0000761, Val Loss: 0.0001084 +2025-02-19 21:09:30,638 Epoch 1806/2000 +2025-02-19 21:10:12,577 Current Learning Rate: 0.0000221902 +2025-02-19 21:10:12,577 Train Loss: 0.0000719, Val Loss: 0.0001085 +2025-02-19 21:10:12,577 Epoch 1807/2000 +2025-02-19 21:10:54,877 Current Learning Rate: 0.0000301952 +2025-02-19 21:10:54,878 Train Loss: 0.0000971, Val Loss: 0.0001084 +2025-02-19 21:10:54,878 Epoch 1808/2000 +2025-02-19 21:11:36,877 Current Learning Rate: 0.0000394265 +2025-02-19 21:11:36,878 Train Loss: 0.0000768, Val Loss: 0.0001084 +2025-02-19 21:11:36,878 Epoch 1809/2000 +2025-02-19 21:12:20,111 Current Learning Rate: 0.0000498817 +2025-02-19 21:12:20,112 Train Loss: 0.0001093, Val Loss: 0.0001084 +2025-02-19 21:12:20,112 Epoch 1810/2000 +2025-02-19 21:13:01,847 Current Learning Rate: 0.0000615583 +2025-02-19 21:13:01,847 Train Loss: 0.0000774, Val Loss: 0.0001084 +2025-02-19 21:13:01,847 Epoch 1811/2000 +2025-02-19 21:13:44,555 Current Learning Rate: 0.0000744534 +2025-02-19 21:13:44,555 Train Loss: 0.0000990, Val Loss: 0.0001084 +2025-02-19 21:13:44,556 Epoch 1812/2000 +2025-02-19 21:14:27,092 Current Learning Rate: 0.0000885637 +2025-02-19 21:14:27,092 Train Loss: 0.0001172, Val Loss: 0.0001084 +2025-02-19 21:14:27,093 Epoch 1813/2000 +2025-02-19 21:15:09,557 Current Learning Rate: 0.0001038859 +2025-02-19 21:15:09,558 Train Loss: 0.0000827, Val Loss: 0.0001085 +2025-02-19 21:15:09,558 Epoch 1814/2000 +2025-02-19 21:15:51,227 Current Learning Rate: 0.0001204162 +2025-02-19 21:15:51,228 Train Loss: 0.0000723, Val Loss: 0.0001085 +2025-02-19 21:15:51,228 Epoch 1815/2000 +2025-02-19 21:16:33,336 Current Learning Rate: 0.0001381504 +2025-02-19 21:16:33,337 Train Loss: 0.0000678, Val Loss: 0.0001085 +2025-02-19 21:16:33,337 Epoch 1816/2000 +2025-02-19 21:17:15,764 Current Learning Rate: 0.0001570842 +2025-02-19 21:17:15,764 Train Loss: 0.0001284, Val Loss: 0.0001085 +2025-02-19 21:17:15,765 Epoch 1817/2000 +2025-02-19 21:17:58,025 Current Learning Rate: 0.0001772129 +2025-02-19 21:17:58,026 Train Loss: 0.0000619, Val Loss: 0.0001085 +2025-02-19 21:17:58,026 Epoch 1818/2000 +2025-02-19 21:18:40,299 Current Learning Rate: 0.0001985316 +2025-02-19 21:18:40,299 Train Loss: 0.0000845, Val Loss: 0.0001085 +2025-02-19 21:18:40,300 Epoch 1819/2000 +2025-02-19 21:19:22,471 Current Learning Rate: 0.0002210349 +2025-02-19 21:19:22,472 Train Loss: 0.0000610, Val Loss: 0.0001085 +2025-02-19 21:19:22,472 Epoch 1820/2000 +2025-02-19 21:20:04,709 Current Learning Rate: 0.0002447174 +2025-02-19 21:20:04,710 Train Loss: 0.0000904, Val Loss: 0.0001085 +2025-02-19 21:20:04,710 Epoch 1821/2000 +2025-02-19 21:20:47,051 Current Learning Rate: 0.0002695732 +2025-02-19 21:20:47,052 Train Loss: 0.0000853, Val Loss: 0.0001084 +2025-02-19 21:20:47,052 Epoch 1822/2000 +2025-02-19 21:21:29,043 Current Learning Rate: 0.0002955962 +2025-02-19 21:21:29,043 Train Loss: 0.0000835, Val Loss: 0.0001084 +2025-02-19 21:21:29,043 Epoch 1823/2000 +2025-02-19 21:22:11,979 Current Learning Rate: 0.0003227798 +2025-02-19 21:22:11,979 Train Loss: 0.0001025, Val Loss: 0.0001086 +2025-02-19 21:22:11,979 Epoch 1824/2000 +2025-02-19 21:22:54,161 Current Learning Rate: 0.0003511176 +2025-02-19 21:22:54,163 Train Loss: 0.0000996, Val Loss: 0.0001085 +2025-02-19 21:22:54,163 Epoch 1825/2000 +2025-02-19 21:23:36,249 Current Learning Rate: 0.0003806023 +2025-02-19 21:23:36,250 Train Loss: 0.0000655, Val Loss: 0.0001085 +2025-02-19 21:23:36,250 Epoch 1826/2000 +2025-02-19 21:24:18,774 Current Learning Rate: 0.0004112269 +2025-02-19 21:24:18,775 Train Loss: 0.0000745, Val Loss: 0.0001085 +2025-02-19 21:24:18,775 Epoch 1827/2000 +2025-02-19 21:25:00,478 Current Learning Rate: 0.0004429836 +2025-02-19 21:25:00,479 Train Loss: 0.0000694, Val Loss: 0.0001084 +2025-02-19 21:25:00,480 Epoch 1828/2000 +2025-02-19 21:25:42,852 Current Learning Rate: 0.0004758647 +2025-02-19 21:25:42,852 Train Loss: 0.0000854, Val Loss: 0.0001085 +2025-02-19 21:25:42,852 Epoch 1829/2000 +2025-02-19 21:26:24,565 Current Learning Rate: 0.0005098621 +2025-02-19 21:26:24,566 Train Loss: 0.0001015, Val Loss: 0.0001084 +2025-02-19 21:26:24,566 Epoch 1830/2000 +2025-02-19 21:27:06,979 Current Learning Rate: 0.0005449674 +2025-02-19 21:27:06,979 Train Loss: 0.0000989, Val Loss: 0.0001086 +2025-02-19 21:27:06,979 Epoch 1831/2000 +2025-02-19 21:27:49,679 Current Learning Rate: 0.0005811718 +2025-02-19 21:27:49,679 Train Loss: 0.0000877, Val Loss: 0.0001086 +2025-02-19 21:27:49,680 Epoch 1832/2000 +2025-02-19 21:28:32,152 Current Learning Rate: 0.0006184666 +2025-02-19 21:28:32,153 Train Loss: 0.0000799, Val Loss: 0.0001088 +2025-02-19 21:28:32,153 Epoch 1833/2000 +2025-02-19 21:29:14,074 Current Learning Rate: 0.0006568424 +2025-02-19 21:29:14,074 Train Loss: 0.0000711, Val Loss: 0.0001087 +2025-02-19 21:29:14,075 Epoch 1834/2000 +2025-02-19 21:29:56,950 Current Learning Rate: 0.0006962899 +2025-02-19 21:29:56,950 Train Loss: 0.0000655, Val Loss: 0.0001087 +2025-02-19 21:29:56,951 Epoch 1835/2000 +2025-02-19 21:30:39,436 Current Learning Rate: 0.0007367992 +2025-02-19 21:30:39,437 Train Loss: 0.0000835, Val Loss: 0.0001086 +2025-02-19 21:30:39,437 Epoch 1836/2000 +2025-02-19 21:31:21,363 Current Learning Rate: 0.0007783604 +2025-02-19 21:31:21,364 Train Loss: 0.0001190, Val Loss: 0.0001088 +2025-02-19 21:31:21,364 Epoch 1837/2000 +2025-02-19 21:32:03,117 Current Learning Rate: 0.0008209632 +2025-02-19 21:32:03,117 Train Loss: 0.0000580, Val Loss: 0.0001087 +2025-02-19 21:32:03,118 Epoch 1838/2000 +2025-02-19 21:32:45,938 Current Learning Rate: 0.0008645971 +2025-02-19 21:32:45,938 Train Loss: 0.0001006, Val Loss: 0.0001087 +2025-02-19 21:32:45,938 Epoch 1839/2000 +2025-02-19 21:33:27,840 Current Learning Rate: 0.0009092514 +2025-02-19 21:33:27,840 Train Loss: 0.0000817, Val Loss: 0.0001087 +2025-02-19 21:33:27,840 Epoch 1840/2000 +2025-02-19 21:34:09,952 Current Learning Rate: 0.0009549150 +2025-02-19 21:34:09,952 Train Loss: 0.0000669, Val Loss: 0.0001087 +2025-02-19 21:34:09,953 Epoch 1841/2000 +2025-02-19 21:34:53,057 Current Learning Rate: 0.0010015767 +2025-02-19 21:34:53,058 Train Loss: 0.0000752, Val Loss: 0.0001086 +2025-02-19 21:34:53,058 Epoch 1842/2000 +2025-02-19 21:35:35,363 Current Learning Rate: 0.0010492249 +2025-02-19 21:35:35,363 Train Loss: 0.0001038, Val Loss: 0.0001089 +2025-02-19 21:35:35,363 Epoch 1843/2000 +2025-02-19 21:36:17,842 Current Learning Rate: 0.0010978480 +2025-02-19 21:36:17,842 Train Loss: 0.0001300, Val Loss: 0.0001103 +2025-02-19 21:36:17,843 Epoch 1844/2000 +2025-02-19 21:36:59,923 Current Learning Rate: 0.0011474338 +2025-02-19 21:36:59,924 Train Loss: 0.0001280, Val Loss: 0.0001132 +2025-02-19 21:36:59,924 Epoch 1845/2000 +2025-02-19 21:37:42,332 Current Learning Rate: 0.0011979702 +2025-02-19 21:37:42,333 Train Loss: 0.0000817, Val Loss: 0.0001114 +2025-02-19 21:37:42,333 Epoch 1846/2000 +2025-02-19 21:38:24,322 Current Learning Rate: 0.0012494447 +2025-02-19 21:38:24,323 Train Loss: 0.0001074, Val Loss: 0.0001112 +2025-02-19 21:38:24,323 Epoch 1847/2000 +2025-02-19 21:39:06,410 Current Learning Rate: 0.0013018445 +2025-02-19 21:39:06,411 Train Loss: 0.0001053, Val Loss: 0.0001115 +2025-02-19 21:39:06,411 Epoch 1848/2000 +2025-02-19 21:39:48,893 Current Learning Rate: 0.0013551569 +2025-02-19 21:39:48,894 Train Loss: 0.0000753, Val Loss: 0.0001093 +2025-02-19 21:39:48,894 Epoch 1849/2000 +2025-02-19 21:40:30,976 Current Learning Rate: 0.0014093685 +2025-02-19 21:40:30,976 Train Loss: 0.0001026, Val Loss: 0.0001093 +2025-02-19 21:40:30,976 Epoch 1850/2000 +2025-02-19 21:41:12,476 Current Learning Rate: 0.0014644661 +2025-02-19 21:41:12,476 Train Loss: 0.0000850, Val Loss: 0.0001095 +2025-02-19 21:41:12,476 Epoch 1851/2000 +2025-02-19 21:41:55,163 Current Learning Rate: 0.0015204360 +2025-02-19 21:41:55,163 Train Loss: 0.0000988, Val Loss: 0.0001095 +2025-02-19 21:41:55,163 Epoch 1852/2000 +2025-02-19 21:42:37,653 Current Learning Rate: 0.0015772645 +2025-02-19 21:42:37,653 Train Loss: 0.0000827, Val Loss: 0.0001098 +2025-02-19 21:42:37,654 Epoch 1853/2000 +2025-02-19 21:43:19,828 Current Learning Rate: 0.0016349374 +2025-02-19 21:43:19,828 Train Loss: 0.0000946, Val Loss: 0.0001093 +2025-02-19 21:43:19,828 Epoch 1854/2000 +2025-02-19 21:44:01,512 Current Learning Rate: 0.0016934407 +2025-02-19 21:44:01,513 Train Loss: 0.0000821, Val Loss: 0.0001089 +2025-02-19 21:44:01,513 Epoch 1855/2000 +2025-02-19 21:44:43,600 Current Learning Rate: 0.0017527598 +2025-02-19 21:44:43,601 Train Loss: 0.0001145, Val Loss: 0.0001110 +2025-02-19 21:44:43,601 Epoch 1856/2000 +2025-02-19 21:45:25,500 Current Learning Rate: 0.0018128801 +2025-02-19 21:45:25,500 Train Loss: 0.0000860, Val Loss: 0.0001103 +2025-02-19 21:45:25,500 Epoch 1857/2000 +2025-02-19 21:46:07,542 Current Learning Rate: 0.0018737867 +2025-02-19 21:46:07,543 Train Loss: 0.0000707, Val Loss: 0.0001090 +2025-02-19 21:46:07,543 Epoch 1858/2000 +2025-02-19 21:46:50,485 Current Learning Rate: 0.0019354647 +2025-02-19 21:46:50,486 Train Loss: 0.0000698, Val Loss: 0.0001125 +2025-02-19 21:46:50,486 Epoch 1859/2000 +2025-02-19 21:47:32,344 Current Learning Rate: 0.0019978989 +2025-02-19 21:47:32,345 Train Loss: 0.0000902, Val Loss: 0.0001114 +2025-02-19 21:47:32,345 Epoch 1860/2000 +2025-02-19 21:48:14,101 Current Learning Rate: 0.0020610737 +2025-02-19 21:48:14,102 Train Loss: 0.0000630, Val Loss: 0.0001094 +2025-02-19 21:48:14,102 Epoch 1861/2000 +2025-02-19 21:48:56,351 Current Learning Rate: 0.0021249737 +2025-02-19 21:48:56,351 Train Loss: 0.0001229, Val Loss: 0.0001169 +2025-02-19 21:48:56,351 Epoch 1862/2000 +2025-02-19 21:49:38,548 Current Learning Rate: 0.0021895831 +2025-02-19 21:49:38,548 Train Loss: 0.0001077, Val Loss: 0.0001123 +2025-02-19 21:49:38,548 Epoch 1863/2000 +2025-02-19 21:50:21,661 Current Learning Rate: 0.0022548859 +2025-02-19 21:50:21,662 Train Loss: 0.0000826, Val Loss: 0.0001096 +2025-02-19 21:50:21,662 Epoch 1864/2000 +2025-02-19 21:51:04,007 Current Learning Rate: 0.0023208660 +2025-02-19 21:51:04,007 Train Loss: 0.0000857, Val Loss: 0.0001100 +2025-02-19 21:51:04,008 Epoch 1865/2000 +2025-02-19 21:51:46,369 Current Learning Rate: 0.0023875072 +2025-02-19 21:51:46,370 Train Loss: 0.0001144, Val Loss: 0.0001104 +2025-02-19 21:51:46,370 Epoch 1866/2000 +2025-02-19 21:52:28,717 Current Learning Rate: 0.0024547929 +2025-02-19 21:52:28,717 Train Loss: 0.0000987, Val Loss: 0.0001117 +2025-02-19 21:52:28,717 Epoch 1867/2000 +2025-02-19 21:53:10,761 Current Learning Rate: 0.0025227067 +2025-02-19 21:53:10,762 Train Loss: 0.0001176, Val Loss: 0.0001111 +2025-02-19 21:53:10,762 Epoch 1868/2000 +2025-02-19 21:53:53,293 Current Learning Rate: 0.0025912316 +2025-02-19 21:53:53,293 Train Loss: 0.0000947, Val Loss: 0.0001105 +2025-02-19 21:53:53,293 Epoch 1869/2000 +2025-02-19 21:54:35,666 Current Learning Rate: 0.0026603509 +2025-02-19 21:54:35,666 Train Loss: 0.0000950, Val Loss: 0.0001124 +2025-02-19 21:54:35,667 Epoch 1870/2000 +2025-02-19 21:55:17,500 Current Learning Rate: 0.0027300475 +2025-02-19 21:55:17,500 Train Loss: 0.0000850, Val Loss: 0.0001112 +2025-02-19 21:55:17,501 Epoch 1871/2000 +2025-02-19 21:56:00,494 Current Learning Rate: 0.0028003042 +2025-02-19 21:56:00,495 Train Loss: 0.0000972, Val Loss: 0.0001168 +2025-02-19 21:56:00,495 Epoch 1872/2000 +2025-02-19 21:56:42,252 Current Learning Rate: 0.0028711035 +2025-02-19 21:56:42,252 Train Loss: 0.0000708, Val Loss: 0.0001097 +2025-02-19 21:56:42,252 Epoch 1873/2000 +2025-02-19 21:57:24,687 Current Learning Rate: 0.0029424282 +2025-02-19 21:57:24,687 Train Loss: 0.0001178, Val Loss: 0.0001356 +2025-02-19 21:57:24,688 Epoch 1874/2000 +2025-02-19 21:58:07,024 Current Learning Rate: 0.0030142605 +2025-02-19 21:58:07,024 Train Loss: 0.0000852, Val Loss: 0.0001131 +2025-02-19 21:58:07,024 Epoch 1875/2000 +2025-02-19 21:58:49,278 Current Learning Rate: 0.0030865828 +2025-02-19 21:58:49,279 Train Loss: 0.0001046, Val Loss: 0.0001159 +2025-02-19 21:58:49,279 Epoch 1876/2000 +2025-02-19 21:59:32,267 Current Learning Rate: 0.0031593772 +2025-02-19 21:59:32,268 Train Loss: 0.0000999, Val Loss: 0.0001189 +2025-02-19 21:59:32,268 Epoch 1877/2000 +2025-02-19 22:00:14,047 Current Learning Rate: 0.0032326258 +2025-02-19 22:00:14,048 Train Loss: 0.0000896, Val Loss: 0.0001117 +2025-02-19 22:00:14,048 Epoch 1878/2000 +2025-02-19 22:00:56,901 Current Learning Rate: 0.0033063104 +2025-02-19 22:00:56,901 Train Loss: 0.0000847, Val Loss: 0.0001129 +2025-02-19 22:00:56,902 Epoch 1879/2000 +2025-02-19 22:01:39,071 Current Learning Rate: 0.0033804129 +2025-02-19 22:01:39,072 Train Loss: 0.0001001, Val Loss: 0.0001128 +2025-02-19 22:01:39,072 Epoch 1880/2000 +2025-02-19 22:02:21,603 Current Learning Rate: 0.0034549150 +2025-02-19 22:02:21,604 Train Loss: 0.0000642, Val Loss: 0.0001104 +2025-02-19 22:02:21,604 Epoch 1881/2000 +2025-02-19 22:03:03,269 Current Learning Rate: 0.0035297984 +2025-02-19 22:03:03,270 Train Loss: 0.0000899, Val Loss: 0.0001111 +2025-02-19 22:03:03,270 Epoch 1882/2000 +2025-02-19 22:03:45,445 Current Learning Rate: 0.0036050445 +2025-02-19 22:03:45,445 Train Loss: 0.0000993, Val Loss: 0.0001146 +2025-02-19 22:03:45,445 Epoch 1883/2000 +2025-02-19 22:04:27,527 Current Learning Rate: 0.0036806348 +2025-02-19 22:04:27,527 Train Loss: 0.0001283, Val Loss: 0.0001177 +2025-02-19 22:04:27,527 Epoch 1884/2000 +2025-02-19 22:05:10,212 Current Learning Rate: 0.0037565506 +2025-02-19 22:05:10,213 Train Loss: 0.0001095, Val Loss: 0.0001151 +2025-02-19 22:05:10,213 Epoch 1885/2000 +2025-02-19 22:05:52,638 Current Learning Rate: 0.0038327732 +2025-02-19 22:05:52,639 Train Loss: 0.0000720, Val Loss: 0.0001112 +2025-02-19 22:05:52,639 Epoch 1886/2000 +2025-02-19 22:06:34,610 Current Learning Rate: 0.0039092838 +2025-02-19 22:06:34,610 Train Loss: 0.0000696, Val Loss: 0.0001109 +2025-02-19 22:06:34,611 Epoch 1887/2000 +2025-02-19 22:07:17,430 Current Learning Rate: 0.0039860635 +2025-02-19 22:07:17,431 Train Loss: 0.0001160, Val Loss: 0.0001203 +2025-02-19 22:07:17,431 Epoch 1888/2000 +2025-02-19 22:07:59,951 Current Learning Rate: 0.0040630934 +2025-02-19 22:07:59,951 Train Loss: 0.0000802, Val Loss: 0.0001171 +2025-02-19 22:07:59,951 Epoch 1889/2000 +2025-02-19 22:08:41,836 Current Learning Rate: 0.0041403545 +2025-02-19 22:08:41,836 Train Loss: 0.0000835, Val Loss: 0.0001185 +2025-02-19 22:08:41,837 Epoch 1890/2000 +2025-02-19 22:09:24,802 Current Learning Rate: 0.0042178277 +2025-02-19 22:09:24,802 Train Loss: 0.0000976, Val Loss: 0.0001265 +2025-02-19 22:09:24,802 Epoch 1891/2000 +2025-02-19 22:10:06,550 Current Learning Rate: 0.0042954938 +2025-02-19 22:10:06,551 Train Loss: 0.0001235, Val Loss: 0.0001282 +2025-02-19 22:10:06,551 Epoch 1892/2000 +2025-02-19 22:10:48,588 Current Learning Rate: 0.0043733338 +2025-02-19 22:10:48,588 Train Loss: 0.0001125, Val Loss: 0.0001505 +2025-02-19 22:10:48,589 Epoch 1893/2000 +2025-02-19 22:11:30,766 Current Learning Rate: 0.0044513284 +2025-02-19 22:11:30,767 Train Loss: 0.0000900, Val Loss: 0.0001332 +2025-02-19 22:11:30,767 Epoch 1894/2000 +2025-02-19 22:12:12,848 Current Learning Rate: 0.0045294584 +2025-02-19 22:12:12,848 Train Loss: 0.0001489, Val Loss: 0.0001338 +2025-02-19 22:12:12,849 Epoch 1895/2000 +2025-02-19 22:12:55,427 Current Learning Rate: 0.0046077045 +2025-02-19 22:12:55,428 Train Loss: 0.0001402, Val Loss: 0.0001526 +2025-02-19 22:12:55,428 Epoch 1896/2000 +2025-02-19 22:13:37,658 Current Learning Rate: 0.0046860474 +2025-02-19 22:13:37,659 Train Loss: 0.0000976, Val Loss: 0.0001262 +2025-02-19 22:13:37,659 Epoch 1897/2000 +2025-02-19 22:14:20,073 Current Learning Rate: 0.0047644677 +2025-02-19 22:14:20,074 Train Loss: 0.0001413, Val Loss: 0.0001328 +2025-02-19 22:14:20,074 Epoch 1898/2000 +2025-02-19 22:15:02,521 Current Learning Rate: 0.0048429462 +2025-02-19 22:15:02,522 Train Loss: 0.0001154, Val Loss: 0.0001220 +2025-02-19 22:15:02,551 Epoch 1899/2000 +2025-02-19 22:15:44,805 Current Learning Rate: 0.0049214634 +2025-02-19 22:15:44,806 Train Loss: 0.0000972, Val Loss: 0.0001162 +2025-02-19 22:15:44,806 Epoch 1900/2000 +2025-02-19 22:16:27,072 Current Learning Rate: 0.0050000000 +2025-02-19 22:16:27,072 Train Loss: 0.0001036, Val Loss: 0.0001208 +2025-02-19 22:16:27,073 Epoch 1901/2000 +2025-02-19 22:17:09,475 Current Learning Rate: 0.0050785366 +2025-02-19 22:17:09,475 Train Loss: 0.0001100, Val Loss: 0.0001332 +2025-02-19 22:17:09,476 Epoch 1902/2000 +2025-02-19 22:17:51,725 Current Learning Rate: 0.0051570538 +2025-02-19 22:17:51,726 Train Loss: 0.0000968, Val Loss: 0.0001340 +2025-02-19 22:17:51,726 Epoch 1903/2000 +2025-02-19 22:18:33,876 Current Learning Rate: 0.0052355323 +2025-02-19 22:18:33,876 Train Loss: 0.0001534, Val Loss: 0.0001496 +2025-02-19 22:18:33,877 Epoch 1904/2000 +2025-02-19 22:19:16,188 Current Learning Rate: 0.0053139526 +2025-02-19 22:19:16,189 Train Loss: 0.0001425, Val Loss: 0.0001565 +2025-02-19 22:19:16,189 Epoch 1905/2000 +2025-02-19 22:19:58,330 Current Learning Rate: 0.0053922955 +2025-02-19 22:19:58,330 Train Loss: 0.0001329, Val Loss: 0.0001422 +2025-02-19 22:19:58,331 Epoch 1906/2000 +2025-02-19 22:20:39,655 Current Learning Rate: 0.0054705416 +2025-02-19 22:20:39,655 Train Loss: 0.0001021, Val Loss: 0.0001395 +2025-02-19 22:20:39,655 Epoch 1907/2000 +2025-02-19 22:21:22,481 Current Learning Rate: 0.0055486716 +2025-02-19 22:21:22,482 Train Loss: 0.0000800, Val Loss: 0.0001192 +2025-02-19 22:21:22,482 Epoch 1908/2000 +2025-02-19 22:22:04,372 Current Learning Rate: 0.0056266662 +2025-02-19 22:22:04,372 Train Loss: 0.0000891, Val Loss: 0.0001150 +2025-02-19 22:22:04,372 Epoch 1909/2000 +2025-02-19 22:22:46,300 Current Learning Rate: 0.0057045062 +2025-02-19 22:22:46,301 Train Loss: 0.0000956, Val Loss: 0.0001321 +2025-02-19 22:22:46,301 Epoch 1910/2000 +2025-02-19 22:23:28,499 Current Learning Rate: 0.0057821723 +2025-02-19 22:23:28,499 Train Loss: 0.0000832, Val Loss: 0.0001234 +2025-02-19 22:23:28,500 Epoch 1911/2000 +2025-02-19 22:24:10,574 Current Learning Rate: 0.0058596455 +2025-02-19 22:24:10,574 Train Loss: 0.0001440, Val Loss: 0.0001340 +2025-02-19 22:24:10,574 Epoch 1912/2000 +2025-02-19 22:24:53,001 Current Learning Rate: 0.0059369066 +2025-02-19 22:24:53,004 Train Loss: 0.0001243, Val Loss: 0.0001322 +2025-02-19 22:24:53,007 Epoch 1913/2000 +2025-02-19 22:25:35,383 Current Learning Rate: 0.0060139365 +2025-02-19 22:25:35,383 Train Loss: 0.0000960, Val Loss: 0.0001394 +2025-02-19 22:25:35,384 Epoch 1914/2000 +2025-02-19 22:26:17,133 Current Learning Rate: 0.0060907162 +2025-02-19 22:26:17,133 Train Loss: 0.0000996, Val Loss: 0.0001283 +2025-02-19 22:26:17,133 Epoch 1915/2000 +2025-02-19 22:27:00,111 Current Learning Rate: 0.0061672268 +2025-02-19 22:27:00,112 Train Loss: 0.0001092, Val Loss: 0.0001367 +2025-02-19 22:27:00,112 Epoch 1916/2000 +2025-02-19 22:27:42,397 Current Learning Rate: 0.0062434494 +2025-02-19 22:27:42,398 Train Loss: 0.0000911, Val Loss: 0.0001210 +2025-02-19 22:27:42,398 Epoch 1917/2000 +2025-02-19 22:28:24,680 Current Learning Rate: 0.0063193652 +2025-02-19 22:28:24,681 Train Loss: 0.0001419, Val Loss: 0.0001528 +2025-02-19 22:28:24,681 Epoch 1918/2000 +2025-02-19 22:29:07,007 Current Learning Rate: 0.0063949555 +2025-02-19 22:29:07,008 Train Loss: 0.0001463, Val Loss: 0.0001431 +2025-02-19 22:29:07,008 Epoch 1919/2000 +2025-02-19 22:29:49,362 Current Learning Rate: 0.0064702016 +2025-02-19 22:29:49,363 Train Loss: 0.0001140, Val Loss: 0.0001392 +2025-02-19 22:29:49,363 Epoch 1920/2000 +2025-02-19 22:30:31,549 Current Learning Rate: 0.0065450850 +2025-02-19 22:30:31,549 Train Loss: 0.0001505, Val Loss: 0.0001467 +2025-02-19 22:30:31,550 Epoch 1921/2000 +2025-02-19 22:31:13,573 Current Learning Rate: 0.0066195871 +2025-02-19 22:31:13,574 Train Loss: 0.0001019, Val Loss: 0.0001287 +2025-02-19 22:31:13,574 Epoch 1922/2000 +2025-02-19 22:31:55,370 Current Learning Rate: 0.0066936896 +2025-02-19 22:31:55,372 Train Loss: 0.0000953, Val Loss: 0.0001329 +2025-02-19 22:31:55,372 Epoch 1923/2000 +2025-02-19 22:32:37,376 Current Learning Rate: 0.0067673742 +2025-02-19 22:32:37,377 Train Loss: 0.0001637, Val Loss: 0.0001543 +2025-02-19 22:32:37,377 Epoch 1924/2000 +2025-02-19 22:33:19,652 Current Learning Rate: 0.0068406228 +2025-02-19 22:33:19,653 Train Loss: 0.0001346, Val Loss: 0.0001441 +2025-02-19 22:33:19,653 Epoch 1925/2000 +2025-02-19 22:34:01,381 Current Learning Rate: 0.0069134172 +2025-02-19 22:34:01,382 Train Loss: 0.0001321, Val Loss: 0.0001538 +2025-02-19 22:34:01,382 Epoch 1926/2000 +2025-02-19 22:34:44,562 Current Learning Rate: 0.0069857395 +2025-02-19 22:34:44,563 Train Loss: 0.0001683, Val Loss: 0.0001617 +2025-02-19 22:34:44,563 Epoch 1927/2000 +2025-02-19 22:35:27,105 Current Learning Rate: 0.0070575718 +2025-02-19 22:35:27,105 Train Loss: 0.0001089, Val Loss: 0.0001478 +2025-02-19 22:35:27,106 Epoch 1928/2000 +2025-02-19 22:36:09,691 Current Learning Rate: 0.0071288965 +2025-02-19 22:36:09,692 Train Loss: 0.0001447, Val Loss: 0.0001574 +2025-02-19 22:36:09,692 Epoch 1929/2000 +2025-02-19 22:36:52,179 Current Learning Rate: 0.0071996958 +2025-02-19 22:36:52,180 Train Loss: 0.0001252, Val Loss: 0.0001554 +2025-02-19 22:36:52,180 Epoch 1930/2000 +2025-02-19 22:37:34,145 Current Learning Rate: 0.0072699525 +2025-02-19 22:37:34,145 Train Loss: 0.0001056, Val Loss: 0.0001865 +2025-02-19 22:37:34,146 Epoch 1931/2000 +2025-02-19 22:38:16,952 Current Learning Rate: 0.0073396491 +2025-02-19 22:38:16,952 Train Loss: 0.0001800, Val Loss: 0.0002176 +2025-02-19 22:38:16,952 Epoch 1932/2000 +2025-02-19 22:38:59,356 Current Learning Rate: 0.0074087684 +2025-02-19 22:38:59,356 Train Loss: 0.0001851, Val Loss: 0.0001904 +2025-02-19 22:38:59,356 Epoch 1933/2000 +2025-02-19 22:39:41,904 Current Learning Rate: 0.0074772933 +2025-02-19 22:39:41,905 Train Loss: 0.0001925, Val Loss: 0.0001784 +2025-02-19 22:39:41,905 Epoch 1934/2000 +2025-02-19 22:40:24,317 Current Learning Rate: 0.0075452071 +2025-02-19 22:40:24,318 Train Loss: 0.0001543, Val Loss: 0.0002164 +2025-02-19 22:40:24,318 Epoch 1935/2000 +2025-02-19 22:41:06,058 Current Learning Rate: 0.0076124928 +2025-02-19 22:41:06,059 Train Loss: 0.0001793, Val Loss: 0.0002206 +2025-02-19 22:41:06,059 Epoch 1936/2000 +2025-02-19 22:41:48,120 Current Learning Rate: 0.0076791340 +2025-02-19 22:41:48,121 Train Loss: 0.0001668, Val Loss: 0.0001478 +2025-02-19 22:41:48,121 Epoch 1937/2000 +2025-02-19 22:42:30,487 Current Learning Rate: 0.0077451141 +2025-02-19 22:42:30,488 Train Loss: 0.0001232, Val Loss: 0.0001596 +2025-02-19 22:42:30,489 Epoch 1938/2000 +2025-02-19 22:43:13,245 Current Learning Rate: 0.0078104169 +2025-02-19 22:43:13,245 Train Loss: 0.0001629, Val Loss: 0.0001607 +2025-02-19 22:43:13,246 Epoch 1939/2000 +2025-02-19 22:43:55,637 Current Learning Rate: 0.0078750263 +2025-02-19 22:43:55,638 Train Loss: 0.0001516, Val Loss: 0.0001655 +2025-02-19 22:43:55,638 Epoch 1940/2000 +2025-02-19 22:44:37,235 Current Learning Rate: 0.0079389263 +2025-02-19 22:44:37,235 Train Loss: 0.0001898, Val Loss: 0.0002017 +2025-02-19 22:44:37,236 Epoch 1941/2000 +2025-02-19 22:45:19,206 Current Learning Rate: 0.0080021011 +2025-02-19 22:45:19,206 Train Loss: 0.0001266, Val Loss: 0.0001591 +2025-02-19 22:45:19,206 Epoch 1942/2000 +2025-02-19 22:46:01,103 Current Learning Rate: 0.0080645353 +2025-02-19 22:46:01,103 Train Loss: 0.0002042, Val Loss: 0.0001687 +2025-02-19 22:46:01,103 Epoch 1943/2000 +2025-02-19 22:46:44,253 Current Learning Rate: 0.0081262133 +2025-02-19 22:46:44,254 Train Loss: 0.0002302, Val Loss: 0.0002141 +2025-02-19 22:46:44,254 Epoch 1944/2000 +2025-02-19 22:47:26,626 Current Learning Rate: 0.0081871199 +2025-02-19 22:47:26,627 Train Loss: 0.0003717, Val Loss: 0.0002818 +2025-02-19 22:47:26,627 Epoch 1945/2000 +2025-02-19 22:48:08,302 Current Learning Rate: 0.0082472402 +2025-02-19 22:48:08,303 Train Loss: 0.0001707, Val Loss: 0.0001702 +2025-02-19 22:48:08,303 Epoch 1946/2000 +2025-02-19 22:48:51,207 Current Learning Rate: 0.0083065593 +2025-02-19 22:48:51,207 Train Loss: 0.0002534, Val Loss: 0.0001818 +2025-02-19 22:48:51,207 Epoch 1947/2000 +2025-02-19 22:49:33,407 Current Learning Rate: 0.0083650626 +2025-02-19 22:49:33,408 Train Loss: 0.0001095, Val Loss: 0.0001492 +2025-02-19 22:49:33,408 Epoch 1948/2000 +2025-02-19 22:50:15,758 Current Learning Rate: 0.0084227355 +2025-02-19 22:50:15,759 Train Loss: 0.0001998, Val Loss: 0.0002220 +2025-02-19 22:50:15,759 Epoch 1949/2000 +2025-02-19 22:50:57,709 Current Learning Rate: 0.0084795640 +2025-02-19 22:50:57,710 Train Loss: 0.0002730, Val Loss: 0.0004523 +2025-02-19 22:50:57,710 Epoch 1950/2000 +2025-02-19 22:51:40,202 Current Learning Rate: 0.0085355339 +2025-02-19 22:51:40,202 Train Loss: 0.0002057, Val Loss: 0.0001969 +2025-02-19 22:51:40,202 Epoch 1951/2000 +2025-02-19 22:52:22,280 Current Learning Rate: 0.0085906315 +2025-02-19 22:52:22,281 Train Loss: 0.0002039, Val Loss: 0.0002044 +2025-02-19 22:52:22,281 Epoch 1952/2000 +2025-02-19 22:53:04,737 Current Learning Rate: 0.0086448431 +2025-02-19 22:53:04,738 Train Loss: 0.0002607, Val Loss: 0.0002117 +2025-02-19 22:53:04,738 Epoch 1953/2000 +2025-02-19 22:53:47,057 Current Learning Rate: 0.0086981555 +2025-02-19 22:53:47,058 Train Loss: 0.0001919, Val Loss: 0.0001806 +2025-02-19 22:53:47,058 Epoch 1954/2000 +2025-02-19 22:54:28,593 Current Learning Rate: 0.0087505553 +2025-02-19 22:54:28,594 Train Loss: 0.0002016, Val Loss: 0.0002376 +2025-02-19 22:54:28,594 Epoch 1955/2000 +2025-02-19 22:55:11,366 Current Learning Rate: 0.0088020298 +2025-02-19 22:55:11,367 Train Loss: 0.0001624, Val Loss: 0.0002090 +2025-02-19 22:55:11,367 Epoch 1956/2000 +2025-02-19 22:55:53,217 Current Learning Rate: 0.0088525662 +2025-02-19 22:55:53,218 Train Loss: 0.0002190, Val Loss: 0.0001754 +2025-02-19 22:55:53,218 Epoch 1957/2000 +2025-02-19 22:56:35,310 Current Learning Rate: 0.0089021520 +2025-02-19 22:56:35,311 Train Loss: 0.0001496, Val Loss: 0.0001489 +2025-02-19 22:56:35,311 Epoch 1958/2000 +2025-02-19 22:57:17,140 Current Learning Rate: 0.0089507751 +2025-02-19 22:57:17,141 Train Loss: 0.0001191, Val Loss: 0.0001490 +2025-02-19 22:57:17,141 Epoch 1959/2000 +2025-02-19 22:57:59,581 Current Learning Rate: 0.0089984233 +2025-02-19 22:57:59,582 Train Loss: 0.0001000, Val Loss: 0.0001413 +2025-02-19 22:57:59,582 Epoch 1960/2000 +2025-02-19 22:58:42,099 Current Learning Rate: 0.0090450850 +2025-02-19 22:58:42,100 Train Loss: 0.0001847, Val Loss: 0.0001682 +2025-02-19 22:58:42,100 Epoch 1961/2000 +2025-02-19 22:59:24,450 Current Learning Rate: 0.0090907486 +2025-02-19 22:59:24,451 Train Loss: 0.0001196, Val Loss: 0.0001644 +2025-02-19 22:59:24,451 Epoch 1962/2000 +2025-02-19 23:00:06,211 Current Learning Rate: 0.0091354029 +2025-02-19 23:00:06,211 Train Loss: 0.0001267, Val Loss: 0.0001576 +2025-02-19 23:00:06,212 Epoch 1963/2000 +2025-02-19 23:00:48,509 Current Learning Rate: 0.0091790368 +2025-02-19 23:00:48,509 Train Loss: 0.0001523, Val Loss: 0.0001555 +2025-02-19 23:00:48,509 Epoch 1964/2000 +2025-02-19 23:01:30,403 Current Learning Rate: 0.0092216396 +2025-02-19 23:01:30,404 Train Loss: 0.0001647, Val Loss: 0.0001812 +2025-02-19 23:01:30,405 Epoch 1965/2000 +2025-02-19 23:02:12,250 Current Learning Rate: 0.0092632008 +2025-02-19 23:02:12,251 Train Loss: 0.0001409, Val Loss: 0.0001682 +2025-02-19 23:02:12,251 Epoch 1966/2000 +2025-02-19 23:02:54,691 Current Learning Rate: 0.0093037101 +2025-02-19 23:02:54,691 Train Loss: 0.0002039, Val Loss: 0.0001932 +2025-02-19 23:02:54,692 Epoch 1967/2000 +2025-02-19 23:03:36,636 Current Learning Rate: 0.0093431576 +2025-02-19 23:03:36,637 Train Loss: 0.0001362, Val Loss: 0.0001742 +2025-02-19 23:03:36,637 Epoch 1968/2000 +2025-02-19 23:04:18,029 Current Learning Rate: 0.0093815334 +2025-02-19 23:04:18,030 Train Loss: 0.0001362, Val Loss: 0.0001519 +2025-02-19 23:04:18,030 Epoch 1969/2000 +2025-02-19 23:05:00,582 Current Learning Rate: 0.0094188282 +2025-02-19 23:05:00,583 Train Loss: 0.0001752, Val Loss: 0.0001738 +2025-02-19 23:05:00,583 Epoch 1970/2000 +2025-02-19 23:05:43,239 Current Learning Rate: 0.0094550326 +2025-02-19 23:05:43,240 Train Loss: 0.0001188, Val Loss: 0.0001660 +2025-02-19 23:05:43,240 Epoch 1971/2000 +2025-02-19 23:06:24,937 Current Learning Rate: 0.0094901379 +2025-02-19 23:06:24,938 Train Loss: 0.0001203, Val Loss: 0.0001414 +2025-02-19 23:06:24,938 Epoch 1972/2000 +2025-02-19 23:07:06,896 Current Learning Rate: 0.0095241353 +2025-02-19 23:07:06,897 Train Loss: 0.0001439, Val Loss: 0.0001545 +2025-02-19 23:07:06,897 Epoch 1973/2000 +2025-02-19 23:07:49,046 Current Learning Rate: 0.0095570164 +2025-02-19 23:07:49,046 Train Loss: 0.0001527, Val Loss: 0.0001759 +2025-02-19 23:07:49,047 Epoch 1974/2000 +2025-02-19 23:08:31,337 Current Learning Rate: 0.0095887731 +2025-02-19 23:08:31,338 Train Loss: 0.0001655, Val Loss: 0.0001495 +2025-02-19 23:08:31,338 Epoch 1975/2000 +2025-02-19 23:09:13,251 Current Learning Rate: 0.0096193977 +2025-02-19 23:09:13,251 Train Loss: 0.0001409, Val Loss: 0.0001636 +2025-02-19 23:09:13,251 Epoch 1976/2000 +2025-02-19 23:09:55,300 Current Learning Rate: 0.0096488824 +2025-02-19 23:09:55,300 Train Loss: 0.0001358, Val Loss: 0.0001756 +2025-02-19 23:09:55,300 Epoch 1977/2000 +2025-02-19 23:10:36,993 Current Learning Rate: 0.0096772202 +2025-02-19 23:10:36,993 Train Loss: 0.0001465, Val Loss: 0.0001756 +2025-02-19 23:10:36,994 Epoch 1978/2000 +2025-02-19 23:11:19,409 Current Learning Rate: 0.0097044038 +2025-02-19 23:11:19,409 Train Loss: 0.0001653, Val Loss: 0.0001840 +2025-02-19 23:11:19,409 Epoch 1979/2000 +2025-02-19 23:12:01,481 Current Learning Rate: 0.0097304268 +2025-02-19 23:12:01,481 Train Loss: 0.0001586, Val Loss: 0.0001853 +2025-02-19 23:12:01,482 Epoch 1980/2000 +2025-02-19 23:12:43,241 Current Learning Rate: 0.0097552826 +2025-02-19 23:12:43,242 Train Loss: 0.0001534, Val Loss: 0.0002021 +2025-02-19 23:12:43,242 Epoch 1981/2000 +2025-02-19 23:13:26,201 Current Learning Rate: 0.0097789651 +2025-02-19 23:13:26,201 Train Loss: 0.0002067, Val Loss: 0.0002214 +2025-02-19 23:13:26,202 Epoch 1982/2000 +2025-02-19 23:14:08,250 Current Learning Rate: 0.0098014684 +2025-02-19 23:14:08,251 Train Loss: 0.0001967, Val Loss: 0.0002134 +2025-02-19 23:14:08,251 Epoch 1983/2000 +2025-02-19 23:14:50,553 Current Learning Rate: 0.0098227871 +2025-02-19 23:14:50,553 Train Loss: 0.0002215, Val Loss: 0.0002174 +2025-02-19 23:14:50,554 Epoch 1984/2000 +2025-02-19 23:15:32,803 Current Learning Rate: 0.0098429158 +2025-02-19 23:15:32,803 Train Loss: 0.0001811, Val Loss: 0.0002417 +2025-02-19 23:15:32,806 Epoch 1985/2000 +2025-02-19 23:16:15,361 Current Learning Rate: 0.0098618496 +2025-02-19 23:16:15,361 Train Loss: 0.0002305, Val Loss: 0.0002748 +2025-02-19 23:16:15,361 Epoch 1986/2000 +2025-02-19 23:16:57,519 Current Learning Rate: 0.0098795838 +2025-02-19 23:16:57,519 Train Loss: 0.0002369, Val Loss: 0.0002484 +2025-02-19 23:16:57,520 Epoch 1987/2000 +2025-02-19 23:17:39,956 Current Learning Rate: 0.0098961141 +2025-02-19 23:17:39,956 Train Loss: 0.0001961, Val Loss: 0.0002165 +2025-02-19 23:17:39,957 Epoch 1988/2000 +2025-02-19 23:18:22,319 Current Learning Rate: 0.0099114363 +2025-02-19 23:18:22,319 Train Loss: 0.0002211, Val Loss: 0.0002122 +2025-02-19 23:18:22,319 Epoch 1989/2000 +2025-02-19 23:19:03,976 Current Learning Rate: 0.0099255466 +2025-02-19 23:19:03,978 Train Loss: 0.0001995, Val Loss: 0.0002037 +2025-02-19 23:19:03,978 Epoch 1990/2000 +2025-02-19 23:19:46,445 Current Learning Rate: 0.0099384417 +2025-02-19 23:19:46,445 Train Loss: 0.0002718, Val Loss: 0.0003817 +2025-02-19 23:19:46,445 Epoch 1991/2000 +2025-02-19 23:20:28,822 Current Learning Rate: 0.0099501183 +2025-02-19 23:20:28,823 Train Loss: 0.0002095, Val Loss: 0.0002040 +2025-02-19 23:20:28,823 Epoch 1992/2000 +2025-02-19 23:21:10,953 Current Learning Rate: 0.0099605735 +2025-02-19 23:21:10,954 Train Loss: 0.0001510, Val Loss: 0.0001694 +2025-02-19 23:21:10,954 Epoch 1993/2000 +2025-02-19 23:21:53,160 Current Learning Rate: 0.0099698048 +2025-02-19 23:21:53,161 Train Loss: 0.0002271, Val Loss: 0.0001779 +2025-02-19 23:21:53,161 Epoch 1994/2000 +2025-02-19 23:22:35,745 Current Learning Rate: 0.0099778098 +2025-02-19 23:22:35,746 Train Loss: 0.0001492, Val Loss: 0.0001818 +2025-02-19 23:22:35,746 Epoch 1995/2000 +2025-02-19 23:23:18,030 Current Learning Rate: 0.0099845867 +2025-02-19 23:23:18,030 Train Loss: 0.0001904, Val Loss: 0.0002779 +2025-02-19 23:23:18,031 Epoch 1996/2000 +2025-02-19 23:24:00,194 Current Learning Rate: 0.0099901336 +2025-02-19 23:24:00,195 Train Loss: 0.0001541, Val Loss: 0.0001704 +2025-02-19 23:24:00,196 Epoch 1997/2000 +2025-02-19 23:24:42,119 Current Learning Rate: 0.0099944494 +2025-02-19 23:24:42,120 Train Loss: 0.0003314, Val Loss: 0.0007625 +2025-02-19 23:24:42,120 Epoch 1998/2000 +2025-02-19 23:25:24,206 Current Learning Rate: 0.0099975328 +2025-02-19 23:25:24,207 Train Loss: 0.0002247, Val Loss: 0.0001969 +2025-02-19 23:25:24,207 Epoch 1999/2000 +2025-02-19 23:26:07,512 Current Learning Rate: 0.0099993832 +2025-02-19 23:26:07,512 Train Loss: 0.0001910, Val Loss: 0.0002180 +2025-02-19 23:26:07,513 Epoch 2000/2000 +2025-02-19 23:26:50,011 Current Learning Rate: 0.0100000000 +2025-02-19 23:26:50,013 Train Loss: 0.0004258, Val Loss: 0.0009987 +2025-02-19 23:26:54,898 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/logs/Triton_Kuroshio_uv_20250218_exp1_training_log.log b/Exp3_Kuroshio_forecasting/logs/Triton_Kuroshio_uv_20250218_exp1_training_log.log new file mode 100644 index 0000000000000000000000000000000000000000..0b53ffc7783f5b97168ad9fffb696d8a260115a1 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/logs/Triton_Kuroshio_uv_20250218_exp1_training_log.log @@ -0,0 +1,6037 @@ +2025-02-18 14:15:03,208 Added key: store_based_barrier_key:1 to store for rank: 3 +2025-02-18 14:15:03,302 Added key: store_based_barrier_key:1 to store for rank: 6 +2025-02-18 14:15:03,311 Added key: store_based_barrier_key:1 to store for rank: 4 +2025-02-18 14:15:03,389 Added key: store_based_barrier_key:1 to store for rank: 0 +2025-02-18 14:15:03,415 Added key: store_based_barrier_key:1 to store for rank: 2 +2025-02-18 14:15:03,437 Added key: store_based_barrier_key:1 to store for rank: 7 +2025-02-18 14:15:03,444 Added key: store_based_barrier_key:1 to store for rank: 1 +2025-02-18 14:15:03,461 Added key: store_based_barrier_key:1 to store for rank: 5 +2025-02-18 14:15:25,625 Epoch 1/2000 +2025-02-18 14:15:30,589 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,589 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:15:30,591 Reducer buckets have been rebuilt in this iteration. +2025-02-18 14:16:09,968 Current Learning Rate: 0.0099993832 +2025-02-18 14:16:11,879 Train Loss: 1.3628420, Val Loss: 0.1698331 +2025-02-18 14:16:11,879 Epoch 2/2000 +2025-02-18 14:16:54,193 Current Learning Rate: 0.0099975328 +2025-02-18 14:16:55,899 Train Loss: 0.0687663, Val Loss: 0.0357662 +2025-02-18 14:16:55,900 Epoch 3/2000 +2025-02-18 14:17:37,571 Current Learning Rate: 0.0099944494 +2025-02-18 14:17:39,033 Train Loss: 0.0274687, Val Loss: 0.0195614 +2025-02-18 14:17:39,036 Epoch 4/2000 +2025-02-18 14:18:20,798 Current Learning Rate: 0.0099901336 +2025-02-18 14:18:21,846 Train Loss: 0.0202074, Val Loss: 0.0180349 +2025-02-18 14:18:21,849 Epoch 5/2000 +2025-02-18 14:19:03,419 Current Learning Rate: 0.0099845867 +2025-02-18 14:19:04,787 Train Loss: 0.0193941, Val Loss: 0.0177117 +2025-02-18 14:19:04,788 Epoch 6/2000 +2025-02-18 14:19:47,265 Current Learning Rate: 0.0099778098 +2025-02-18 14:19:49,257 Train Loss: 0.0191047, Val Loss: 0.0175423 +2025-02-18 14:19:49,257 Epoch 7/2000 +2025-02-18 14:20:30,631 Current Learning Rate: 0.0099698048 +2025-02-18 14:20:32,449 Train Loss: 0.0189317, Val Loss: 0.0174137 +2025-02-18 14:20:32,449 Epoch 8/2000 +2025-02-18 14:21:14,244 Current Learning Rate: 0.0099605735 +2025-02-18 14:21:15,711 Train Loss: 0.0187991, Val Loss: 0.0173049 +2025-02-18 14:21:15,711 Epoch 9/2000 +2025-02-18 14:21:57,838 Current Learning Rate: 0.0099501183 +2025-02-18 14:21:59,767 Train Loss: 0.0186761, Val Loss: 0.0172068 +2025-02-18 14:21:59,767 Epoch 10/2000 +2025-02-18 14:22:42,174 Current Learning Rate: 0.0099384417 +2025-02-18 14:22:44,221 Train Loss: 0.0185759, Val Loss: 0.0171160 +2025-02-18 14:22:44,222 Epoch 11/2000 +2025-02-18 14:23:27,348 Current Learning Rate: 0.0099255466 +2025-02-18 14:23:29,500 Train Loss: 0.0184690, Val Loss: 0.0170284 +2025-02-18 14:23:29,501 Epoch 12/2000 +2025-02-18 14:24:12,728 Current Learning Rate: 0.0099114363 +2025-02-18 14:24:14,554 Train Loss: 0.0183741, Val Loss: 0.0169453 +2025-02-18 14:24:14,555 Epoch 13/2000 +2025-02-18 14:24:57,049 Current Learning Rate: 0.0098961141 +2025-02-18 14:24:59,074 Train Loss: 0.0182787, Val Loss: 0.0168658 +2025-02-18 14:24:59,075 Epoch 14/2000 +2025-02-18 14:25:40,634 Current Learning Rate: 0.0098795838 +2025-02-18 14:25:42,153 Train Loss: 0.0181902, Val Loss: 0.0167882 +2025-02-18 14:25:42,154 Epoch 15/2000 +2025-02-18 14:26:24,721 Current Learning Rate: 0.0098618496 +2025-02-18 14:26:26,667 Train Loss: 0.0181102, Val Loss: 0.0167148 +2025-02-18 14:26:26,668 Epoch 16/2000 +2025-02-18 14:27:08,442 Current Learning Rate: 0.0098429158 +2025-02-18 14:27:09,549 Train Loss: 0.0180223, Val Loss: 0.0166435 +2025-02-18 14:27:09,562 Epoch 17/2000 +2025-02-18 14:27:51,189 Current Learning Rate: 0.0098227871 +2025-02-18 14:27:52,566 Train Loss: 0.0179441, Val Loss: 0.0165739 +2025-02-18 14:27:52,567 Epoch 18/2000 +2025-02-18 14:28:35,055 Current Learning Rate: 0.0098014684 +2025-02-18 14:28:37,323 Train Loss: 0.0178550, Val Loss: 0.0164465 +2025-02-18 14:28:37,323 Epoch 19/2000 +2025-02-18 14:29:19,996 Current Learning Rate: 0.0097789651 +2025-02-18 14:29:21,663 Train Loss: 0.0176387, Val Loss: 0.0162635 +2025-02-18 14:29:21,663 Epoch 20/2000 +2025-02-18 14:30:04,707 Current Learning Rate: 0.0097552826 +2025-02-18 14:30:06,649 Train Loss: 0.0174251, Val Loss: 0.0161026 +2025-02-18 14:30:06,650 Epoch 21/2000 +2025-02-18 14:30:48,728 Current Learning Rate: 0.0097304268 +2025-02-18 14:30:50,911 Train Loss: 0.0172205, Val Loss: 0.0159079 +2025-02-18 14:30:50,911 Epoch 22/2000 +2025-02-18 14:31:32,310 Current Learning Rate: 0.0097044038 +2025-02-18 14:31:34,096 Train Loss: 0.0169694, Val Loss: 0.0156002 +2025-02-18 14:31:34,096 Epoch 23/2000 +2025-02-18 14:32:15,744 Current Learning Rate: 0.0096772202 +2025-02-18 14:32:17,783 Train Loss: 0.0165015, Val Loss: 0.0151188 +2025-02-18 14:32:17,784 Epoch 24/2000 +2025-02-18 14:32:59,680 Current Learning Rate: 0.0096488824 +2025-02-18 14:33:00,999 Train Loss: 0.0158210, Val Loss: 0.0143949 +2025-02-18 14:33:00,999 Epoch 25/2000 +2025-02-18 14:33:44,305 Current Learning Rate: 0.0096193977 +2025-02-18 14:33:46,362 Train Loss: 0.0150978, Val Loss: 0.0137453 +2025-02-18 14:33:46,362 Epoch 26/2000 +2025-02-18 14:34:27,838 Current Learning Rate: 0.0095887731 +2025-02-18 14:34:29,394 Train Loss: 0.0146063, Val Loss: 0.0133372 +2025-02-18 14:34:29,395 Epoch 27/2000 +2025-02-18 14:35:12,328 Current Learning Rate: 0.0095570164 +2025-02-18 14:35:14,297 Train Loss: 0.0141579, Val Loss: 0.0129903 +2025-02-18 14:35:14,297 Epoch 28/2000 +2025-02-18 14:35:56,478 Current Learning Rate: 0.0095241353 +2025-02-18 14:35:58,229 Train Loss: 0.0138481, Val Loss: 0.0127644 +2025-02-18 14:35:58,230 Epoch 29/2000 +2025-02-18 14:36:41,151 Current Learning Rate: 0.0094901379 +2025-02-18 14:36:42,490 Train Loss: 0.0135901, Val Loss: 0.0124894 +2025-02-18 14:36:42,490 Epoch 30/2000 +2025-02-18 14:37:25,197 Current Learning Rate: 0.0094550326 +2025-02-18 14:37:26,963 Train Loss: 0.0133189, Val Loss: 0.0121462 +2025-02-18 14:37:26,964 Epoch 31/2000 +2025-02-18 14:38:08,257 Current Learning Rate: 0.0094188282 +2025-02-18 14:38:09,109 Train Loss: 0.0128272, Val Loss: 0.0116701 +2025-02-18 14:38:09,110 Epoch 32/2000 +2025-02-18 14:38:52,114 Current Learning Rate: 0.0093815334 +2025-02-18 14:38:53,648 Train Loss: 0.0122802, Val Loss: 0.0112547 +2025-02-18 14:38:53,655 Epoch 33/2000 +2025-02-18 14:39:36,240 Current Learning Rate: 0.0093431576 +2025-02-18 14:39:37,614 Train Loss: 0.0117762, Val Loss: 0.0106204 +2025-02-18 14:39:37,614 Epoch 34/2000 +2025-02-18 14:40:19,881 Current Learning Rate: 0.0093037101 +2025-02-18 14:40:21,787 Train Loss: 0.0112313, Val Loss: 0.0101279 +2025-02-18 14:40:21,787 Epoch 35/2000 +2025-02-18 14:41:03,572 Current Learning Rate: 0.0092632008 +2025-02-18 14:41:05,186 Train Loss: 0.0108187, Val Loss: 0.0096379 +2025-02-18 14:41:05,187 Epoch 36/2000 +2025-02-18 14:41:48,547 Current Learning Rate: 0.0092216396 +2025-02-18 14:41:50,333 Train Loss: 0.0102309, Val Loss: 0.0092521 +2025-02-18 14:41:50,333 Epoch 37/2000 +2025-02-18 14:42:32,710 Current Learning Rate: 0.0091790368 +2025-02-18 14:42:34,137 Train Loss: 0.0098315, Val Loss: 0.0088337 +2025-02-18 14:42:34,137 Epoch 38/2000 +2025-02-18 14:43:17,308 Current Learning Rate: 0.0091354029 +2025-02-18 14:43:18,961 Train Loss: 0.0093248, Val Loss: 0.0083890 +2025-02-18 14:43:18,961 Epoch 39/2000 +2025-02-18 14:44:01,556 Current Learning Rate: 0.0090907486 +2025-02-18 14:44:04,120 Train Loss: 0.0087695, Val Loss: 0.0080588 +2025-02-18 14:44:04,120 Epoch 40/2000 +2025-02-18 14:44:45,580 Current Learning Rate: 0.0090450850 +2025-02-18 14:44:47,208 Train Loss: 0.0081769, Val Loss: 0.0073063 +2025-02-18 14:44:47,210 Epoch 41/2000 +2025-02-18 14:45:29,054 Current Learning Rate: 0.0089984233 +2025-02-18 14:45:30,702 Train Loss: 0.0076267, Val Loss: 0.0069956 +2025-02-18 14:45:30,703 Epoch 42/2000 +2025-02-18 14:46:14,279 Current Learning Rate: 0.0089507751 +2025-02-18 14:46:14,280 Train Loss: 0.0076356, Val Loss: 0.0090246 +2025-02-18 14:46:14,281 Epoch 43/2000 +2025-02-18 14:46:57,793 Current Learning Rate: 0.0089021520 +2025-02-18 14:46:59,597 Train Loss: 0.0074178, Val Loss: 0.0064407 +2025-02-18 14:46:59,598 Epoch 44/2000 +2025-02-18 14:47:43,250 Current Learning Rate: 0.0088525662 +2025-02-18 14:47:45,064 Train Loss: 0.0068171, Val Loss: 0.0060696 +2025-02-18 14:47:45,064 Epoch 45/2000 +2025-02-18 14:48:27,630 Current Learning Rate: 0.0088020298 +2025-02-18 14:48:29,252 Train Loss: 0.0066029, Val Loss: 0.0059524 +2025-02-18 14:48:29,252 Epoch 46/2000 +2025-02-18 14:49:11,638 Current Learning Rate: 0.0087505553 +2025-02-18 14:49:13,527 Train Loss: 0.0065815, Val Loss: 0.0057815 +2025-02-18 14:49:13,528 Epoch 47/2000 +2025-02-18 14:49:56,170 Current Learning Rate: 0.0086981555 +2025-02-18 14:49:56,171 Train Loss: 0.0065231, Val Loss: 0.0061614 +2025-02-18 14:49:56,171 Epoch 48/2000 +2025-02-18 14:50:38,244 Current Learning Rate: 0.0086448431 +2025-02-18 14:50:39,788 Train Loss: 0.0062545, Val Loss: 0.0056254 +2025-02-18 14:50:39,788 Epoch 49/2000 +2025-02-18 14:51:21,523 Current Learning Rate: 0.0085906315 +2025-02-18 14:51:23,034 Train Loss: 0.0062356, Val Loss: 0.0054859 +2025-02-18 14:51:23,034 Epoch 50/2000 +2025-02-18 14:52:04,824 Current Learning Rate: 0.0085355339 +2025-02-18 14:52:06,781 Train Loss: 0.0059626, Val Loss: 0.0053286 +2025-02-18 14:52:06,781 Epoch 51/2000 +2025-02-18 14:52:48,477 Current Learning Rate: 0.0084795640 +2025-02-18 14:52:50,316 Train Loss: 0.0060748, Val Loss: 0.0053129 +2025-02-18 14:52:50,316 Epoch 52/2000 +2025-02-18 14:53:33,232 Current Learning Rate: 0.0084227355 +2025-02-18 14:53:35,167 Train Loss: 0.0054856, Val Loss: 0.0050178 +2025-02-18 14:53:35,167 Epoch 53/2000 +2025-02-18 14:54:18,383 Current Learning Rate: 0.0083650626 +2025-02-18 14:54:20,319 Train Loss: 0.0056379, Val Loss: 0.0049119 +2025-02-18 14:54:20,319 Epoch 54/2000 +2025-02-18 14:55:03,575 Current Learning Rate: 0.0083065593 +2025-02-18 14:55:03,576 Train Loss: 0.0057683, Val Loss: 0.0053156 +2025-02-18 14:55:03,577 Epoch 55/2000 +2025-02-18 14:55:45,603 Current Learning Rate: 0.0082472402 +2025-02-18 14:55:46,977 Train Loss: 0.0053413, Val Loss: 0.0048144 +2025-02-18 14:55:46,978 Epoch 56/2000 +2025-02-18 14:56:28,648 Current Learning Rate: 0.0081871199 +2025-02-18 14:56:30,211 Train Loss: 0.0051032, Val Loss: 0.0047444 +2025-02-18 14:56:30,211 Epoch 57/2000 +2025-02-18 14:57:11,673 Current Learning Rate: 0.0081262133 +2025-02-18 14:57:13,453 Train Loss: 0.0049701, Val Loss: 0.0045479 +2025-02-18 14:57:13,453 Epoch 58/2000 +2025-02-18 14:57:54,813 Current Learning Rate: 0.0080645353 +2025-02-18 14:57:54,814 Train Loss: 0.0047992, Val Loss: 0.0050299 +2025-02-18 14:57:54,814 Epoch 59/2000 +2025-02-18 14:58:38,162 Current Learning Rate: 0.0080021011 +2025-02-18 14:58:40,130 Train Loss: 0.0049871, Val Loss: 0.0042919 +2025-02-18 14:58:40,131 Epoch 60/2000 +2025-02-18 14:59:21,637 Current Learning Rate: 0.0079389263 +2025-02-18 14:59:21,638 Train Loss: 0.0046179, Val Loss: 0.0044405 +2025-02-18 14:59:21,638 Epoch 61/2000 +2025-02-18 15:00:04,326 Current Learning Rate: 0.0078750263 +2025-02-18 15:00:04,327 Train Loss: 0.0049569, Val Loss: 0.0044025 +2025-02-18 15:00:04,327 Epoch 62/2000 +2025-02-18 15:00:46,724 Current Learning Rate: 0.0078104169 +2025-02-18 15:00:46,724 Train Loss: 0.0047013, Val Loss: 0.0046409 +2025-02-18 15:00:46,724 Epoch 63/2000 +2025-02-18 15:01:29,451 Current Learning Rate: 0.0077451141 +2025-02-18 15:01:30,719 Train Loss: 0.0044193, Val Loss: 0.0039515 +2025-02-18 15:01:30,719 Epoch 64/2000 +2025-02-18 15:02:13,483 Current Learning Rate: 0.0076791340 +2025-02-18 15:02:13,484 Train Loss: 0.0046267, Val Loss: 0.0039819 +2025-02-18 15:02:13,485 Epoch 65/2000 +2025-02-18 15:02:55,725 Current Learning Rate: 0.0076124928 +2025-02-18 15:02:56,885 Train Loss: 0.0042735, Val Loss: 0.0039007 +2025-02-18 15:02:56,886 Epoch 66/2000 +2025-02-18 15:03:38,702 Current Learning Rate: 0.0075452071 +2025-02-18 15:03:40,434 Train Loss: 0.0040702, Val Loss: 0.0036998 +2025-02-18 15:03:40,435 Epoch 67/2000 +2025-02-18 15:04:21,978 Current Learning Rate: 0.0074772933 +2025-02-18 15:04:21,979 Train Loss: 0.0041402, Val Loss: 0.0043896 +2025-02-18 15:04:21,979 Epoch 68/2000 +2025-02-18 15:05:05,501 Current Learning Rate: 0.0074087684 +2025-02-18 15:05:05,502 Train Loss: 0.0040681, Val Loss: 0.0037126 +2025-02-18 15:05:05,502 Epoch 69/2000 +2025-02-18 15:05:47,627 Current Learning Rate: 0.0073396491 +2025-02-18 15:05:49,693 Train Loss: 0.0041826, Val Loss: 0.0035981 +2025-02-18 15:05:49,697 Epoch 70/2000 +2025-02-18 15:06:31,219 Current Learning Rate: 0.0072699525 +2025-02-18 15:06:33,095 Train Loss: 0.0039610, Val Loss: 0.0035859 +2025-02-18 15:06:33,096 Epoch 71/2000 +2025-02-18 15:07:16,333 Current Learning Rate: 0.0071996958 +2025-02-18 15:07:16,334 Train Loss: 0.0040461, Val Loss: 0.0038279 +2025-02-18 15:07:16,334 Epoch 72/2000 +2025-02-18 15:07:59,643 Current Learning Rate: 0.0071288965 +2025-02-18 15:08:01,314 Train Loss: 0.0038196, Val Loss: 0.0034883 +2025-02-18 15:08:01,314 Epoch 73/2000 +2025-02-18 15:08:44,217 Current Learning Rate: 0.0070575718 +2025-02-18 15:08:44,218 Train Loss: 0.0037394, Val Loss: 0.0035490 +2025-02-18 15:08:44,219 Epoch 74/2000 +2025-02-18 15:09:26,810 Current Learning Rate: 0.0069857395 +2025-02-18 15:09:26,811 Train Loss: 0.0039330, Val Loss: 0.0036724 +2025-02-18 15:09:26,821 Epoch 75/2000 +2025-02-18 15:10:09,949 Current Learning Rate: 0.0069134172 +2025-02-18 15:10:09,950 Train Loss: 0.0036523, Val Loss: 0.0037484 +2025-02-18 15:10:09,950 Epoch 76/2000 +2025-02-18 15:10:52,358 Current Learning Rate: 0.0068406228 +2025-02-18 15:10:53,681 Train Loss: 0.0038443, Val Loss: 0.0034668 +2025-02-18 15:10:53,690 Epoch 77/2000 +2025-02-18 15:11:36,386 Current Learning Rate: 0.0067673742 +2025-02-18 15:11:38,111 Train Loss: 0.0038872, Val Loss: 0.0033315 +2025-02-18 15:11:38,111 Epoch 78/2000 +2025-02-18 15:12:20,782 Current Learning Rate: 0.0066936896 +2025-02-18 15:12:22,756 Train Loss: 0.0038132, Val Loss: 0.0032982 +2025-02-18 15:12:22,756 Epoch 79/2000 +2025-02-18 15:13:05,301 Current Learning Rate: 0.0066195871 +2025-02-18 15:13:07,177 Train Loss: 0.0035646, Val Loss: 0.0031638 +2025-02-18 15:13:07,178 Epoch 80/2000 +2025-02-18 15:13:50,518 Current Learning Rate: 0.0065450850 +2025-02-18 15:13:50,519 Train Loss: 0.0034006, Val Loss: 0.0031946 +2025-02-18 15:13:50,519 Epoch 81/2000 +2025-02-18 15:14:33,180 Current Learning Rate: 0.0064702016 +2025-02-18 15:14:34,976 Train Loss: 0.0033588, Val Loss: 0.0030893 +2025-02-18 15:14:34,977 Epoch 82/2000 +2025-02-18 15:15:17,469 Current Learning Rate: 0.0063949555 +2025-02-18 15:15:19,359 Train Loss: 0.0033879, Val Loss: 0.0030405 +2025-02-18 15:15:19,359 Epoch 83/2000 +2025-02-18 15:16:01,120 Current Learning Rate: 0.0063193652 +2025-02-18 15:16:03,834 Train Loss: 0.0031879, Val Loss: 0.0029718 +2025-02-18 15:16:03,835 Epoch 84/2000 +2025-02-18 15:16:45,422 Current Learning Rate: 0.0062434494 +2025-02-18 15:16:45,423 Train Loss: 0.0032266, Val Loss: 0.0031460 +2025-02-18 15:16:45,423 Epoch 85/2000 +2025-02-18 15:17:28,969 Current Learning Rate: 0.0061672268 +2025-02-18 15:17:30,500 Train Loss: 0.0034299, Val Loss: 0.0029372 +2025-02-18 15:17:30,501 Epoch 86/2000 +2025-02-18 15:18:13,226 Current Learning Rate: 0.0060907162 +2025-02-18 15:18:13,227 Train Loss: 0.0031645, Val Loss: 0.0029600 +2025-02-18 15:18:13,228 Epoch 87/2000 +2025-02-18 15:18:55,920 Current Learning Rate: 0.0060139365 +2025-02-18 15:18:55,921 Train Loss: 0.0034704, Val Loss: 0.0032086 +2025-02-18 15:18:55,921 Epoch 88/2000 +2025-02-18 15:19:38,466 Current Learning Rate: 0.0059369066 +2025-02-18 15:19:40,324 Train Loss: 0.0032061, Val Loss: 0.0029067 +2025-02-18 15:19:40,325 Epoch 89/2000 +2025-02-18 15:20:22,570 Current Learning Rate: 0.0058596455 +2025-02-18 15:20:24,452 Train Loss: 0.0030000, Val Loss: 0.0028417 +2025-02-18 15:20:24,452 Epoch 90/2000 +2025-02-18 15:21:05,591 Current Learning Rate: 0.0057821723 +2025-02-18 15:21:06,909 Train Loss: 0.0029832, Val Loss: 0.0028316 +2025-02-18 15:21:06,909 Epoch 91/2000 +2025-02-18 15:21:48,842 Current Learning Rate: 0.0057045062 +2025-02-18 15:21:48,843 Train Loss: 0.0031305, Val Loss: 0.0028579 +2025-02-18 15:21:48,843 Epoch 92/2000 +2025-02-18 15:22:32,087 Current Learning Rate: 0.0056266662 +2025-02-18 15:22:32,088 Train Loss: 0.0030638, Val Loss: 0.0029005 +2025-02-18 15:22:32,088 Epoch 93/2000 +2025-02-18 15:23:15,526 Current Learning Rate: 0.0055486716 +2025-02-18 15:23:17,138 Train Loss: 0.0031297, Val Loss: 0.0028207 +2025-02-18 15:23:17,138 Epoch 94/2000 +2025-02-18 15:23:59,840 Current Learning Rate: 0.0054705416 +2025-02-18 15:23:59,841 Train Loss: 0.0031764, Val Loss: 0.0029541 +2025-02-18 15:23:59,841 Epoch 95/2000 +2025-02-18 15:24:42,563 Current Learning Rate: 0.0053922955 +2025-02-18 15:24:42,564 Train Loss: 0.0028410, Val Loss: 0.0028266 +2025-02-18 15:24:42,564 Epoch 96/2000 +2025-02-18 15:25:25,665 Current Learning Rate: 0.0053139526 +2025-02-18 15:25:25,666 Train Loss: 0.0031975, Val Loss: 0.0028658 +2025-02-18 15:25:25,666 Epoch 97/2000 +2025-02-18 15:26:08,886 Current Learning Rate: 0.0052355323 +2025-02-18 15:26:10,570 Train Loss: 0.0029171, Val Loss: 0.0027451 +2025-02-18 15:26:10,581 Epoch 98/2000 +2025-02-18 15:26:53,594 Current Learning Rate: 0.0051570538 +2025-02-18 15:26:55,286 Train Loss: 0.0030150, Val Loss: 0.0027333 +2025-02-18 15:26:55,286 Epoch 99/2000 +2025-02-18 15:27:37,802 Current Learning Rate: 0.0050785366 +2025-02-18 15:27:39,704 Train Loss: 0.0029330, Val Loss: 0.0027265 +2025-02-18 15:27:39,705 Epoch 100/2000 +2025-02-18 15:28:21,599 Current Learning Rate: 0.0050000000 +2025-02-18 15:28:23,311 Train Loss: 0.0028290, Val Loss: 0.0026971 +2025-02-18 15:28:23,311 Epoch 101/2000 +2025-02-18 15:29:05,828 Current Learning Rate: 0.0049214634 +2025-02-18 15:29:07,674 Train Loss: 0.0028858, Val Loss: 0.0026960 +2025-02-18 15:29:07,674 Epoch 102/2000 +2025-02-18 15:29:49,923 Current Learning Rate: 0.0048429462 +2025-02-18 15:29:51,663 Train Loss: 0.0029532, Val Loss: 0.0026186 +2025-02-18 15:29:51,663 Epoch 103/2000 +2025-02-18 15:30:33,303 Current Learning Rate: 0.0047644677 +2025-02-18 15:30:33,304 Train Loss: 0.0027671, Val Loss: 0.0026594 +2025-02-18 15:30:33,305 Epoch 104/2000 +2025-02-18 15:31:16,313 Current Learning Rate: 0.0046860474 +2025-02-18 15:31:16,316 Train Loss: 0.0029455, Val Loss: 0.0027470 +2025-02-18 15:31:16,317 Epoch 105/2000 +2025-02-18 15:31:58,554 Current Learning Rate: 0.0046077045 +2025-02-18 15:31:58,555 Train Loss: 0.0029949, Val Loss: 0.0027208 +2025-02-18 15:31:58,555 Epoch 106/2000 +2025-02-18 15:32:41,495 Current Learning Rate: 0.0045294584 +2025-02-18 15:32:41,496 Train Loss: 0.0031252, Val Loss: 0.0027911 +2025-02-18 15:32:41,496 Epoch 107/2000 +2025-02-18 15:33:23,791 Current Learning Rate: 0.0044513284 +2025-02-18 15:33:25,313 Train Loss: 0.0027322, Val Loss: 0.0025305 +2025-02-18 15:33:25,314 Epoch 108/2000 +2025-02-18 15:34:07,154 Current Learning Rate: 0.0043733338 +2025-02-18 15:34:07,155 Train Loss: 0.0027422, Val Loss: 0.0025760 +2025-02-18 15:34:07,155 Epoch 109/2000 +2025-02-18 15:34:51,055 Current Learning Rate: 0.0042954938 +2025-02-18 15:34:51,056 Train Loss: 0.0030014, Val Loss: 0.0025514 +2025-02-18 15:34:51,056 Epoch 110/2000 +2025-02-18 15:35:34,206 Current Learning Rate: 0.0042178277 +2025-02-18 15:35:34,207 Train Loss: 0.0030148, Val Loss: 0.0026140 +2025-02-18 15:35:34,207 Epoch 111/2000 +2025-02-18 15:36:16,790 Current Learning Rate: 0.0041403545 +2025-02-18 15:36:18,205 Train Loss: 0.0026457, Val Loss: 0.0025104 +2025-02-18 15:36:18,205 Epoch 112/2000 +2025-02-18 15:37:01,153 Current Learning Rate: 0.0040630934 +2025-02-18 15:37:03,763 Train Loss: 0.0026000, Val Loss: 0.0024824 +2025-02-18 15:37:03,763 Epoch 113/2000 +2025-02-18 15:37:45,215 Current Learning Rate: 0.0039860635 +2025-02-18 15:37:46,673 Train Loss: 0.0027027, Val Loss: 0.0024382 +2025-02-18 15:37:46,674 Epoch 114/2000 +2025-02-18 15:38:28,234 Current Learning Rate: 0.0039092838 +2025-02-18 15:38:28,235 Train Loss: 0.0026419, Val Loss: 0.0024624 +2025-02-18 15:38:28,235 Epoch 115/2000 +2025-02-18 15:39:11,099 Current Learning Rate: 0.0038327732 +2025-02-18 15:39:11,100 Train Loss: 0.0031512, Val Loss: 0.0024630 +2025-02-18 15:39:11,100 Epoch 116/2000 +2025-02-18 15:39:53,683 Current Learning Rate: 0.0037565506 +2025-02-18 15:39:53,684 Train Loss: 0.0027621, Val Loss: 0.0024817 +2025-02-18 15:39:53,684 Epoch 117/2000 +2025-02-18 15:40:37,852 Current Learning Rate: 0.0036806348 +2025-02-18 15:40:37,853 Train Loss: 0.0027294, Val Loss: 0.0025535 +2025-02-18 15:40:37,853 Epoch 118/2000 +2025-02-18 15:41:20,361 Current Learning Rate: 0.0036050445 +2025-02-18 15:41:20,362 Train Loss: 0.0025651, Val Loss: 0.0025561 +2025-02-18 15:41:20,362 Epoch 119/2000 +2025-02-18 15:42:02,818 Current Learning Rate: 0.0035297984 +2025-02-18 15:42:02,818 Train Loss: 0.0031162, Val Loss: 0.0025939 +2025-02-18 15:42:02,819 Epoch 120/2000 +2025-02-18 15:42:45,830 Current Learning Rate: 0.0034549150 +2025-02-18 15:42:45,831 Train Loss: 0.0027383, Val Loss: 0.0025477 +2025-02-18 15:42:45,831 Epoch 121/2000 +2025-02-18 15:43:28,082 Current Learning Rate: 0.0033804129 +2025-02-18 15:43:29,869 Train Loss: 0.0027425, Val Loss: 0.0023956 +2025-02-18 15:43:29,869 Epoch 122/2000 +2025-02-18 15:44:12,125 Current Learning Rate: 0.0033063104 +2025-02-18 15:44:12,126 Train Loss: 0.0026657, Val Loss: 0.0024754 +2025-02-18 15:44:12,127 Epoch 123/2000 +2025-02-18 15:44:55,260 Current Learning Rate: 0.0032326258 +2025-02-18 15:44:57,403 Train Loss: 0.0024242, Val Loss: 0.0023507 +2025-02-18 15:44:57,403 Epoch 124/2000 +2025-02-18 15:45:40,537 Current Learning Rate: 0.0031593772 +2025-02-18 15:45:40,537 Train Loss: 0.0025713, Val Loss: 0.0023812 +2025-02-18 15:45:40,538 Epoch 125/2000 +2025-02-18 15:46:23,441 Current Learning Rate: 0.0030865828 +2025-02-18 15:46:25,521 Train Loss: 0.0025928, Val Loss: 0.0023480 +2025-02-18 15:46:25,521 Epoch 126/2000 +2025-02-18 15:47:07,296 Current Learning Rate: 0.0030142605 +2025-02-18 15:47:08,912 Train Loss: 0.0024050, Val Loss: 0.0023392 +2025-02-18 15:47:08,914 Epoch 127/2000 +2025-02-18 15:47:50,737 Current Learning Rate: 0.0029424282 +2025-02-18 15:47:52,171 Train Loss: 0.0025779, Val Loss: 0.0023248 +2025-02-18 15:47:52,171 Epoch 128/2000 +2025-02-18 15:48:34,452 Current Learning Rate: 0.0028711035 +2025-02-18 15:48:34,453 Train Loss: 0.0025542, Val Loss: 0.0024302 +2025-02-18 15:48:34,453 Epoch 129/2000 +2025-02-18 15:49:17,368 Current Learning Rate: 0.0028003042 +2025-02-18 15:49:17,369 Train Loss: 0.0027851, Val Loss: 0.0023806 +2025-02-18 15:49:17,369 Epoch 130/2000 +2025-02-18 15:49:59,989 Current Learning Rate: 0.0027300475 +2025-02-18 15:49:59,990 Train Loss: 0.0024214, Val Loss: 0.0023312 +2025-02-18 15:49:59,991 Epoch 131/2000 +2025-02-18 15:50:43,407 Current Learning Rate: 0.0026603509 +2025-02-18 15:50:45,584 Train Loss: 0.0025682, Val Loss: 0.0022957 +2025-02-18 15:50:45,584 Epoch 132/2000 +2025-02-18 15:51:28,546 Current Learning Rate: 0.0025912316 +2025-02-18 15:51:30,558 Train Loss: 0.0023948, Val Loss: 0.0022712 +2025-02-18 15:51:30,559 Epoch 133/2000 +2025-02-18 15:52:13,160 Current Learning Rate: 0.0025227067 +2025-02-18 15:52:13,161 Train Loss: 0.0026540, Val Loss: 0.0023643 +2025-02-18 15:52:13,161 Epoch 134/2000 +2025-02-18 15:52:55,851 Current Learning Rate: 0.0024547929 +2025-02-18 15:52:55,852 Train Loss: 0.0026912, Val Loss: 0.0022917 +2025-02-18 15:52:55,853 Epoch 135/2000 +2025-02-18 15:53:38,329 Current Learning Rate: 0.0023875072 +2025-02-18 15:53:39,791 Train Loss: 0.0023403, Val Loss: 0.0022606 +2025-02-18 15:53:39,791 Epoch 136/2000 +2025-02-18 15:54:21,966 Current Learning Rate: 0.0023208660 +2025-02-18 15:54:21,967 Train Loss: 0.0025837, Val Loss: 0.0022768 +2025-02-18 15:54:21,967 Epoch 137/2000 +2025-02-18 15:55:04,893 Current Learning Rate: 0.0022548859 +2025-02-18 15:55:06,420 Train Loss: 0.0022579, Val Loss: 0.0022366 +2025-02-18 15:55:06,420 Epoch 138/2000 +2025-02-18 15:55:49,472 Current Learning Rate: 0.0021895831 +2025-02-18 15:55:51,165 Train Loss: 0.0023904, Val Loss: 0.0022284 +2025-02-18 15:55:51,165 Epoch 139/2000 +2025-02-18 15:56:33,771 Current Learning Rate: 0.0021249737 +2025-02-18 15:56:33,772 Train Loss: 0.0024008, Val Loss: 0.0022358 +2025-02-18 15:56:33,772 Epoch 140/2000 +2025-02-18 15:57:16,026 Current Learning Rate: 0.0020610737 +2025-02-18 15:57:17,931 Train Loss: 0.0022246, Val Loss: 0.0022110 +2025-02-18 15:57:17,932 Epoch 141/2000 +2025-02-18 15:57:59,378 Current Learning Rate: 0.0019978989 +2025-02-18 15:57:59,378 Train Loss: 0.0023086, Val Loss: 0.0022171 +2025-02-18 15:57:59,379 Epoch 142/2000 +2025-02-18 15:58:41,749 Current Learning Rate: 0.0019354647 +2025-02-18 15:58:42,831 Train Loss: 0.0022637, Val Loss: 0.0021975 +2025-02-18 15:58:42,831 Epoch 143/2000 +2025-02-18 15:59:24,774 Current Learning Rate: 0.0018737867 +2025-02-18 15:59:24,776 Train Loss: 0.0024424, Val Loss: 0.0022221 +2025-02-18 15:59:24,776 Epoch 144/2000 +2025-02-18 16:00:08,319 Current Learning Rate: 0.0018128801 +2025-02-18 16:00:08,319 Train Loss: 0.0022830, Val Loss: 0.0022219 +2025-02-18 16:00:08,320 Epoch 145/2000 +2025-02-18 16:00:50,699 Current Learning Rate: 0.0017527598 +2025-02-18 16:00:50,700 Train Loss: 0.0025422, Val Loss: 0.0022204 +2025-02-18 16:00:50,701 Epoch 146/2000 +2025-02-18 16:01:33,665 Current Learning Rate: 0.0016934407 +2025-02-18 16:01:35,648 Train Loss: 0.0022766, Val Loss: 0.0021824 +2025-02-18 16:01:35,648 Epoch 147/2000 +2025-02-18 16:02:18,678 Current Learning Rate: 0.0016349374 +2025-02-18 16:02:18,679 Train Loss: 0.0023299, Val Loss: 0.0022020 +2025-02-18 16:02:18,679 Epoch 148/2000 +2025-02-18 16:03:01,210 Current Learning Rate: 0.0015772645 +2025-02-18 16:03:01,210 Train Loss: 0.0023008, Val Loss: 0.0022081 +2025-02-18 16:03:01,211 Epoch 149/2000 +2025-02-18 16:03:44,894 Current Learning Rate: 0.0015204360 +2025-02-18 16:03:46,886 Train Loss: 0.0022495, Val Loss: 0.0021472 +2025-02-18 16:03:46,886 Epoch 150/2000 +2025-02-18 16:04:28,626 Current Learning Rate: 0.0014644661 +2025-02-18 16:04:29,826 Train Loss: 0.0022816, Val Loss: 0.0021302 +2025-02-18 16:04:29,827 Epoch 151/2000 +2025-02-18 16:05:13,144 Current Learning Rate: 0.0014093685 +2025-02-18 16:05:13,145 Train Loss: 0.0025645, Val Loss: 0.0021428 +2025-02-18 16:05:13,145 Epoch 152/2000 +2025-02-18 16:05:55,306 Current Learning Rate: 0.0013551569 +2025-02-18 16:05:55,307 Train Loss: 0.0024435, Val Loss: 0.0021654 +2025-02-18 16:05:55,307 Epoch 153/2000 +2025-02-18 16:06:37,669 Current Learning Rate: 0.0013018445 +2025-02-18 16:06:39,081 Train Loss: 0.0025198, Val Loss: 0.0021174 +2025-02-18 16:06:39,088 Epoch 154/2000 +2025-02-18 16:07:22,351 Current Learning Rate: 0.0012494447 +2025-02-18 16:07:24,243 Train Loss: 0.0021932, Val Loss: 0.0020996 +2025-02-18 16:07:24,244 Epoch 155/2000 +2025-02-18 16:08:07,232 Current Learning Rate: 0.0011979702 +2025-02-18 16:08:07,233 Train Loss: 0.0021102, Val Loss: 0.0021050 +2025-02-18 16:08:07,234 Epoch 156/2000 +2025-02-18 16:08:50,923 Current Learning Rate: 0.0011474338 +2025-02-18 16:08:52,954 Train Loss: 0.0021750, Val Loss: 0.0020915 +2025-02-18 16:08:52,954 Epoch 157/2000 +2025-02-18 16:09:35,087 Current Learning Rate: 0.0010978480 +2025-02-18 16:09:35,088 Train Loss: 0.0021748, Val Loss: 0.0021046 +2025-02-18 16:09:35,088 Epoch 158/2000 +2025-02-18 16:10:18,820 Current Learning Rate: 0.0010492249 +2025-02-18 16:10:18,821 Train Loss: 0.0022929, Val Loss: 0.0020975 +2025-02-18 16:10:18,821 Epoch 159/2000 +2025-02-18 16:11:02,377 Current Learning Rate: 0.0010015767 +2025-02-18 16:11:02,377 Train Loss: 0.0022445, Val Loss: 0.0020939 +2025-02-18 16:11:02,378 Epoch 160/2000 +2025-02-18 16:11:45,112 Current Learning Rate: 0.0009549150 +2025-02-18 16:11:47,072 Train Loss: 0.0023150, Val Loss: 0.0020744 +2025-02-18 16:11:47,072 Epoch 161/2000 +2025-02-18 16:12:28,392 Current Learning Rate: 0.0009092514 +2025-02-18 16:12:29,439 Train Loss: 0.0023384, Val Loss: 0.0020734 +2025-02-18 16:12:29,442 Epoch 162/2000 +2025-02-18 16:13:11,385 Current Learning Rate: 0.0008645971 +2025-02-18 16:13:11,386 Train Loss: 0.0024140, Val Loss: 0.0020823 +2025-02-18 16:13:11,386 Epoch 163/2000 +2025-02-18 16:13:53,692 Current Learning Rate: 0.0008209632 +2025-02-18 16:13:53,693 Train Loss: 0.0024373, Val Loss: 0.0020741 +2025-02-18 16:13:53,693 Epoch 164/2000 +2025-02-18 16:14:36,506 Current Learning Rate: 0.0007783604 +2025-02-18 16:14:38,472 Train Loss: 0.0022746, Val Loss: 0.0020577 +2025-02-18 16:14:38,473 Epoch 165/2000 +2025-02-18 16:15:21,574 Current Learning Rate: 0.0007367992 +2025-02-18 16:15:21,575 Train Loss: 0.0022988, Val Loss: 0.0020613 +2025-02-18 16:15:21,575 Epoch 166/2000 +2025-02-18 16:16:04,434 Current Learning Rate: 0.0006962899 +2025-02-18 16:16:05,795 Train Loss: 0.0023153, Val Loss: 0.0020529 +2025-02-18 16:16:05,796 Epoch 167/2000 +2025-02-18 16:16:47,301 Current Learning Rate: 0.0006568424 +2025-02-18 16:16:48,693 Train Loss: 0.0021537, Val Loss: 0.0020437 +2025-02-18 16:16:48,694 Epoch 168/2000 +2025-02-18 16:17:32,178 Current Learning Rate: 0.0006184666 +2025-02-18 16:17:34,047 Train Loss: 0.0020853, Val Loss: 0.0020344 +2025-02-18 16:17:34,047 Epoch 169/2000 +2025-02-18 16:18:16,768 Current Learning Rate: 0.0005811718 +2025-02-18 16:18:18,900 Train Loss: 0.0022062, Val Loss: 0.0020296 +2025-02-18 16:18:18,900 Epoch 170/2000 +2025-02-18 16:19:01,729 Current Learning Rate: 0.0005449674 +2025-02-18 16:19:03,786 Train Loss: 0.0020276, Val Loss: 0.0020274 +2025-02-18 16:19:03,786 Epoch 171/2000 +2025-02-18 16:19:45,107 Current Learning Rate: 0.0005098621 +2025-02-18 16:19:46,453 Train Loss: 0.0022592, Val Loss: 0.0020274 +2025-02-18 16:19:46,454 Epoch 172/2000 +2025-02-18 16:20:28,147 Current Learning Rate: 0.0004758647 +2025-02-18 16:20:28,148 Train Loss: 0.0022123, Val Loss: 0.0020308 +2025-02-18 16:20:28,148 Epoch 173/2000 +2025-02-18 16:21:11,787 Current Learning Rate: 0.0004429836 +2025-02-18 16:21:13,859 Train Loss: 0.0021227, Val Loss: 0.0020174 +2025-02-18 16:21:13,860 Epoch 174/2000 +2025-02-18 16:21:55,363 Current Learning Rate: 0.0004112269 +2025-02-18 16:21:57,456 Train Loss: 0.0021057, Val Loss: 0.0020160 +2025-02-18 16:21:57,457 Epoch 175/2000 +2025-02-18 16:22:39,165 Current Learning Rate: 0.0003806023 +2025-02-18 16:22:39,166 Train Loss: 0.0022247, Val Loss: 0.0020161 +2025-02-18 16:22:39,166 Epoch 176/2000 +2025-02-18 16:23:22,391 Current Learning Rate: 0.0003511176 +2025-02-18 16:23:24,313 Train Loss: 0.0020290, Val Loss: 0.0020113 +2025-02-18 16:23:24,313 Epoch 177/2000 +2025-02-18 16:24:07,311 Current Learning Rate: 0.0003227798 +2025-02-18 16:24:08,888 Train Loss: 0.0020590, Val Loss: 0.0020087 +2025-02-18 16:24:08,889 Epoch 178/2000 +2025-02-18 16:24:51,745 Current Learning Rate: 0.0002955962 +2025-02-18 16:24:51,746 Train Loss: 0.0021380, Val Loss: 0.0020131 +2025-02-18 16:24:51,746 Epoch 179/2000 +2025-02-18 16:25:33,992 Current Learning Rate: 0.0002695732 +2025-02-18 16:25:35,918 Train Loss: 0.0021452, Val Loss: 0.0020065 +2025-02-18 16:25:35,918 Epoch 180/2000 +2025-02-18 16:26:18,046 Current Learning Rate: 0.0002447174 +2025-02-18 16:26:18,048 Train Loss: 0.0022274, Val Loss: 0.0020067 +2025-02-18 16:26:18,048 Epoch 181/2000 +2025-02-18 16:27:01,311 Current Learning Rate: 0.0002210349 +2025-02-18 16:27:04,017 Train Loss: 0.0021063, Val Loss: 0.0020031 +2025-02-18 16:27:04,018 Epoch 182/2000 +2025-02-18 16:27:46,352 Current Learning Rate: 0.0001985316 +2025-02-18 16:27:46,353 Train Loss: 0.0021554, Val Loss: 0.0020034 +2025-02-18 16:27:46,353 Epoch 183/2000 +2025-02-18 16:28:29,309 Current Learning Rate: 0.0001772129 +2025-02-18 16:28:30,795 Train Loss: 0.0024092, Val Loss: 0.0020027 +2025-02-18 16:28:30,795 Epoch 184/2000 +2025-02-18 16:29:12,078 Current Learning Rate: 0.0001570842 +2025-02-18 16:29:13,558 Train Loss: 0.0020361, Val Loss: 0.0019969 +2025-02-18 16:29:13,559 Epoch 185/2000 +2025-02-18 16:29:56,010 Current Learning Rate: 0.0001381504 +2025-02-18 16:29:57,610 Train Loss: 0.0021787, Val Loss: 0.0019946 +2025-02-18 16:29:57,626 Epoch 186/2000 +2025-02-18 16:30:39,171 Current Learning Rate: 0.0001204162 +2025-02-18 16:30:40,913 Train Loss: 0.0019766, Val Loss: 0.0019902 +2025-02-18 16:30:40,913 Epoch 187/2000 +2025-02-18 16:31:23,907 Current Learning Rate: 0.0001038859 +2025-02-18 16:31:25,253 Train Loss: 0.0022846, Val Loss: 0.0019884 +2025-02-18 16:31:25,254 Epoch 188/2000 +2025-02-18 16:32:08,068 Current Learning Rate: 0.0000885637 +2025-02-18 16:32:09,313 Train Loss: 0.0020690, Val Loss: 0.0019866 +2025-02-18 16:32:09,313 Epoch 189/2000 +2025-02-18 16:32:51,798 Current Learning Rate: 0.0000744534 +2025-02-18 16:32:53,347 Train Loss: 0.0021456, Val Loss: 0.0019859 +2025-02-18 16:32:53,347 Epoch 190/2000 +2025-02-18 16:33:35,036 Current Learning Rate: 0.0000615583 +2025-02-18 16:33:35,037 Train Loss: 0.0020096, Val Loss: 0.0019868 +2025-02-18 16:33:35,037 Epoch 191/2000 +2025-02-18 16:34:17,951 Current Learning Rate: 0.0000498817 +2025-02-18 16:34:17,951 Train Loss: 0.0021014, Val Loss: 0.0019870 +2025-02-18 16:34:17,952 Epoch 192/2000 +2025-02-18 16:35:00,377 Current Learning Rate: 0.0000394265 +2025-02-18 16:35:01,751 Train Loss: 0.0021133, Val Loss: 0.0019855 +2025-02-18 16:35:01,751 Epoch 193/2000 +2025-02-18 16:35:43,333 Current Learning Rate: 0.0000301952 +2025-02-18 16:35:43,337 Train Loss: 0.0020557, Val Loss: 0.0019860 +2025-02-18 16:35:43,338 Epoch 194/2000 +2025-02-18 16:36:26,163 Current Learning Rate: 0.0000221902 +2025-02-18 16:36:27,830 Train Loss: 0.0020902, Val Loss: 0.0019851 +2025-02-18 16:36:27,831 Epoch 195/2000 +2025-02-18 16:37:09,739 Current Learning Rate: 0.0000154133 +2025-02-18 16:37:11,376 Train Loss: 0.0020577, Val Loss: 0.0019844 +2025-02-18 16:37:11,376 Epoch 196/2000 +2025-02-18 16:37:52,870 Current Learning Rate: 0.0000098664 +2025-02-18 16:37:54,391 Train Loss: 0.0021702, Val Loss: 0.0019841 +2025-02-18 16:37:54,392 Epoch 197/2000 +2025-02-18 16:38:36,351 Current Learning Rate: 0.0000055506 +2025-02-18 16:38:38,011 Train Loss: 0.0021440, Val Loss: 0.0019836 +2025-02-18 16:38:38,012 Epoch 198/2000 +2025-02-18 16:39:19,730 Current Learning Rate: 0.0000024672 +2025-02-18 16:39:21,329 Train Loss: 0.0022741, Val Loss: 0.0019835 +2025-02-18 16:39:21,330 Epoch 199/2000 +2025-02-18 16:40:04,695 Current Learning Rate: 0.0000006168 +2025-02-18 16:40:06,773 Train Loss: 0.0020641, Val Loss: 0.0019835 +2025-02-18 16:40:06,774 Epoch 200/2000 +2025-02-18 16:40:49,032 Current Learning Rate: 0.0000000000 +2025-02-18 16:40:51,054 Train Loss: 0.0020595, Val Loss: 0.0019833 +2025-02-18 16:40:51,055 Epoch 201/2000 +2025-02-18 16:41:34,126 Current Learning Rate: 0.0000006168 +2025-02-18 16:41:34,126 Train Loss: 0.0020695, Val Loss: 0.0019834 +2025-02-18 16:41:34,126 Epoch 202/2000 +2025-02-18 16:42:16,596 Current Learning Rate: 0.0000024672 +2025-02-18 16:42:16,596 Train Loss: 0.0020921, Val Loss: 0.0019834 +2025-02-18 16:42:16,596 Epoch 203/2000 +2025-02-18 16:42:59,368 Current Learning Rate: 0.0000055506 +2025-02-18 16:42:59,369 Train Loss: 0.0021943, Val Loss: 0.0019834 +2025-02-18 16:42:59,369 Epoch 204/2000 +2025-02-18 16:43:42,953 Current Learning Rate: 0.0000098664 +2025-02-18 16:43:42,954 Train Loss: 0.0020710, Val Loss: 0.0019834 +2025-02-18 16:43:42,955 Epoch 205/2000 +2025-02-18 16:44:26,379 Current Learning Rate: 0.0000154133 +2025-02-18 16:44:26,380 Train Loss: 0.0020787, Val Loss: 0.0019834 +2025-02-18 16:44:26,380 Epoch 206/2000 +2025-02-18 16:45:08,473 Current Learning Rate: 0.0000221902 +2025-02-18 16:45:08,474 Train Loss: 0.0021324, Val Loss: 0.0019837 +2025-02-18 16:45:08,474 Epoch 207/2000 +2025-02-18 16:45:51,237 Current Learning Rate: 0.0000301952 +2025-02-18 16:45:51,237 Train Loss: 0.0020701, Val Loss: 0.0019840 +2025-02-18 16:45:51,237 Epoch 208/2000 +2025-02-18 16:46:34,893 Current Learning Rate: 0.0000394265 +2025-02-18 16:46:34,893 Train Loss: 0.0023016, Val Loss: 0.0019862 +2025-02-18 16:46:34,894 Epoch 209/2000 +2025-02-18 16:47:17,151 Current Learning Rate: 0.0000498817 +2025-02-18 16:47:17,151 Train Loss: 0.0021189, Val Loss: 0.0019850 +2025-02-18 16:47:17,151 Epoch 210/2000 +2025-02-18 16:48:00,254 Current Learning Rate: 0.0000615583 +2025-02-18 16:48:00,255 Train Loss: 0.0020372, Val Loss: 0.0019842 +2025-02-18 16:48:00,255 Epoch 211/2000 +2025-02-18 16:48:42,868 Current Learning Rate: 0.0000744534 +2025-02-18 16:48:44,324 Train Loss: 0.0020863, Val Loss: 0.0019831 +2025-02-18 16:48:44,324 Epoch 212/2000 +2025-02-18 16:49:25,874 Current Learning Rate: 0.0000885637 +2025-02-18 16:49:27,615 Train Loss: 0.0020293, Val Loss: 0.0019824 +2025-02-18 16:49:27,616 Epoch 213/2000 +2025-02-18 16:50:10,728 Current Learning Rate: 0.0001038859 +2025-02-18 16:50:10,728 Train Loss: 0.0020420, Val Loss: 0.0019828 +2025-02-18 16:50:10,728 Epoch 214/2000 +2025-02-18 16:50:53,667 Current Learning Rate: 0.0001204162 +2025-02-18 16:50:54,850 Train Loss: 0.0021052, Val Loss: 0.0019823 +2025-02-18 16:50:54,850 Epoch 215/2000 +2025-02-18 16:51:37,062 Current Learning Rate: 0.0001381504 +2025-02-18 16:51:37,063 Train Loss: 0.0024028, Val Loss: 0.0019872 +2025-02-18 16:51:37,064 Epoch 216/2000 +2025-02-18 16:52:19,490 Current Learning Rate: 0.0001570842 +2025-02-18 16:52:19,490 Train Loss: 0.0022716, Val Loss: 0.0019895 +2025-02-18 16:52:19,491 Epoch 217/2000 +2025-02-18 16:53:02,683 Current Learning Rate: 0.0001772129 +2025-02-18 16:53:02,684 Train Loss: 0.0022299, Val Loss: 0.0019912 +2025-02-18 16:53:02,684 Epoch 218/2000 +2025-02-18 16:53:46,070 Current Learning Rate: 0.0001985316 +2025-02-18 16:53:46,070 Train Loss: 0.0021929, Val Loss: 0.0019872 +2025-02-18 16:53:46,070 Epoch 219/2000 +2025-02-18 16:54:28,333 Current Learning Rate: 0.0002210349 +2025-02-18 16:54:28,334 Train Loss: 0.0026620, Val Loss: 0.0019922 +2025-02-18 16:54:28,334 Epoch 220/2000 +2025-02-18 16:55:11,390 Current Learning Rate: 0.0002447174 +2025-02-18 16:55:11,391 Train Loss: 0.0025068, Val Loss: 0.0020201 +2025-02-18 16:55:11,401 Epoch 221/2000 +2025-02-18 16:55:54,526 Current Learning Rate: 0.0002695732 +2025-02-18 16:55:54,527 Train Loss: 0.0021727, Val Loss: 0.0019875 +2025-02-18 16:55:54,527 Epoch 222/2000 +2025-02-18 16:56:37,924 Current Learning Rate: 0.0002955962 +2025-02-18 16:56:37,924 Train Loss: 0.0019936, Val Loss: 0.0019841 +2025-02-18 16:56:37,925 Epoch 223/2000 +2025-02-18 16:57:19,772 Current Learning Rate: 0.0003227798 +2025-02-18 16:57:19,772 Train Loss: 0.0022919, Val Loss: 0.0019945 +2025-02-18 16:57:19,773 Epoch 224/2000 +2025-02-18 16:58:03,506 Current Learning Rate: 0.0003511176 +2025-02-18 16:58:05,072 Train Loss: 0.0021292, Val Loss: 0.0019816 +2025-02-18 16:58:05,073 Epoch 225/2000 +2025-02-18 16:58:46,452 Current Learning Rate: 0.0003806023 +2025-02-18 16:58:46,454 Train Loss: 0.0020398, Val Loss: 0.0019823 +2025-02-18 16:58:46,454 Epoch 226/2000 +2025-02-18 16:59:29,473 Current Learning Rate: 0.0004112269 +2025-02-18 16:59:30,824 Train Loss: 0.0020794, Val Loss: 0.0019798 +2025-02-18 16:59:30,824 Epoch 227/2000 +2025-02-18 17:00:13,891 Current Learning Rate: 0.0004429836 +2025-02-18 17:00:13,892 Train Loss: 0.0023814, Val Loss: 0.0019832 +2025-02-18 17:00:13,892 Epoch 228/2000 +2025-02-18 17:00:57,227 Current Learning Rate: 0.0004758647 +2025-02-18 17:00:57,228 Train Loss: 0.0021208, Val Loss: 0.0019859 +2025-02-18 17:00:57,228 Epoch 229/2000 +2025-02-18 17:01:40,463 Current Learning Rate: 0.0005098621 +2025-02-18 17:01:40,464 Train Loss: 0.0020718, Val Loss: 0.0019910 +2025-02-18 17:01:40,464 Epoch 230/2000 +2025-02-18 17:02:23,731 Current Learning Rate: 0.0005449674 +2025-02-18 17:02:23,732 Train Loss: 0.0022761, Val Loss: 0.0019842 +2025-02-18 17:02:23,732 Epoch 231/2000 +2025-02-18 17:03:06,218 Current Learning Rate: 0.0005811718 +2025-02-18 17:03:07,966 Train Loss: 0.0021898, Val Loss: 0.0019768 +2025-02-18 17:03:07,967 Epoch 232/2000 +2025-02-18 17:03:50,246 Current Learning Rate: 0.0006184666 +2025-02-18 17:03:51,275 Train Loss: 0.0020886, Val Loss: 0.0019726 +2025-02-18 17:03:51,276 Epoch 233/2000 +2025-02-18 17:04:33,764 Current Learning Rate: 0.0006568424 +2025-02-18 17:04:33,765 Train Loss: 0.0021526, Val Loss: 0.0020102 +2025-02-18 17:04:33,765 Epoch 234/2000 +2025-02-18 17:05:17,002 Current Learning Rate: 0.0006962899 +2025-02-18 17:05:17,002 Train Loss: 0.0021231, Val Loss: 0.0019973 +2025-02-18 17:05:17,003 Epoch 235/2000 +2025-02-18 17:05:59,511 Current Learning Rate: 0.0007367992 +2025-02-18 17:05:59,511 Train Loss: 0.0021176, Val Loss: 0.0019983 +2025-02-18 17:05:59,512 Epoch 236/2000 +2025-02-18 17:06:43,512 Current Learning Rate: 0.0007783604 +2025-02-18 17:06:43,512 Train Loss: 0.0020430, Val Loss: 0.0019853 +2025-02-18 17:06:43,513 Epoch 237/2000 +2025-02-18 17:07:26,475 Current Learning Rate: 0.0008209632 +2025-02-18 17:07:26,476 Train Loss: 0.0023675, Val Loss: 0.0020038 +2025-02-18 17:07:26,476 Epoch 238/2000 +2025-02-18 17:08:08,690 Current Learning Rate: 0.0008645971 +2025-02-18 17:08:08,691 Train Loss: 0.0023511, Val Loss: 0.0019897 +2025-02-18 17:08:08,692 Epoch 239/2000 +2025-02-18 17:08:51,622 Current Learning Rate: 0.0009092514 +2025-02-18 17:08:53,349 Train Loss: 0.0019571, Val Loss: 0.0019558 +2025-02-18 17:08:53,349 Epoch 240/2000 +2025-02-18 17:09:36,112 Current Learning Rate: 0.0009549150 +2025-02-18 17:09:36,113 Train Loss: 0.0021965, Val Loss: 0.0019822 +2025-02-18 17:09:36,114 Epoch 241/2000 +2025-02-18 17:10:18,236 Current Learning Rate: 0.0010015767 +2025-02-18 17:10:18,236 Train Loss: 0.0022462, Val Loss: 0.0019775 +2025-02-18 17:10:18,237 Epoch 242/2000 +2025-02-18 17:11:01,233 Current Learning Rate: 0.0010492249 +2025-02-18 17:11:01,234 Train Loss: 0.0021528, Val Loss: 0.0019719 +2025-02-18 17:11:01,235 Epoch 243/2000 +2025-02-18 17:11:44,787 Current Learning Rate: 0.0010978480 +2025-02-18 17:11:44,787 Train Loss: 0.0021179, Val Loss: 0.0019795 +2025-02-18 17:11:44,788 Epoch 244/2000 +2025-02-18 17:12:28,071 Current Learning Rate: 0.0011474338 +2025-02-18 17:12:28,072 Train Loss: 0.0020616, Val Loss: 0.0019595 +2025-02-18 17:12:28,072 Epoch 245/2000 +2025-02-18 17:13:10,758 Current Learning Rate: 0.0011979702 +2025-02-18 17:13:10,759 Train Loss: 0.0022047, Val Loss: 0.0019963 +2025-02-18 17:13:10,759 Epoch 246/2000 +2025-02-18 17:13:53,165 Current Learning Rate: 0.0012494447 +2025-02-18 17:13:53,166 Train Loss: 0.0022352, Val Loss: 0.0020700 +2025-02-18 17:13:53,166 Epoch 247/2000 +2025-02-18 17:14:36,330 Current Learning Rate: 0.0013018445 +2025-02-18 17:14:36,331 Train Loss: 0.0020563, Val Loss: 0.0019558 +2025-02-18 17:14:36,331 Epoch 248/2000 +2025-02-18 17:15:18,763 Current Learning Rate: 0.0013551569 +2025-02-18 17:15:18,763 Train Loss: 0.0024619, Val Loss: 0.0021336 +2025-02-18 17:15:18,764 Epoch 249/2000 +2025-02-18 17:16:01,508 Current Learning Rate: 0.0014093685 +2025-02-18 17:16:01,509 Train Loss: 0.0022092, Val Loss: 0.0022978 +2025-02-18 17:16:01,509 Epoch 250/2000 +2025-02-18 17:16:44,431 Current Learning Rate: 0.0014644661 +2025-02-18 17:16:44,431 Train Loss: 0.0021759, Val Loss: 0.0020078 +2025-02-18 17:16:44,432 Epoch 251/2000 +2025-02-18 17:17:27,816 Current Learning Rate: 0.0015204360 +2025-02-18 17:17:27,817 Train Loss: 0.0023476, Val Loss: 0.0021104 +2025-02-18 17:17:27,818 Epoch 252/2000 +2025-02-18 17:18:11,339 Current Learning Rate: 0.0015772645 +2025-02-18 17:18:11,340 Train Loss: 0.0024153, Val Loss: 0.0021297 +2025-02-18 17:18:11,340 Epoch 253/2000 +2025-02-18 17:18:54,453 Current Learning Rate: 0.0016349374 +2025-02-18 17:18:54,454 Train Loss: 0.0020383, Val Loss: 0.0019624 +2025-02-18 17:18:54,454 Epoch 254/2000 +2025-02-18 17:19:37,857 Current Learning Rate: 0.0016934407 +2025-02-18 17:19:37,857 Train Loss: 0.0025406, Val Loss: 0.0020266 +2025-02-18 17:19:37,857 Epoch 255/2000 +2025-02-18 17:20:21,308 Current Learning Rate: 0.0017527598 +2025-02-18 17:20:23,471 Train Loss: 0.0022011, Val Loss: 0.0019523 +2025-02-18 17:20:23,472 Epoch 256/2000 +2025-02-18 17:21:06,460 Current Learning Rate: 0.0018128801 +2025-02-18 17:21:06,461 Train Loss: 0.0022263, Val Loss: 0.0019919 +2025-02-18 17:21:06,462 Epoch 257/2000 +2025-02-18 17:21:49,862 Current Learning Rate: 0.0018737867 +2025-02-18 17:21:49,863 Train Loss: 0.0020372, Val Loss: 0.0020744 +2025-02-18 17:21:49,863 Epoch 258/2000 +2025-02-18 17:22:33,125 Current Learning Rate: 0.0019354647 +2025-02-18 17:22:33,125 Train Loss: 0.0021006, Val Loss: 0.0020085 +2025-02-18 17:22:33,126 Epoch 259/2000 +2025-02-18 17:23:14,896 Current Learning Rate: 0.0019978989 +2025-02-18 17:23:16,324 Train Loss: 0.0022565, Val Loss: 0.0019517 +2025-02-18 17:23:16,340 Epoch 260/2000 +2025-02-18 17:23:58,569 Current Learning Rate: 0.0020610737 +2025-02-18 17:23:58,569 Train Loss: 0.0021173, Val Loss: 0.0020114 +2025-02-18 17:23:58,570 Epoch 261/2000 +2025-02-18 17:24:42,252 Current Learning Rate: 0.0021249737 +2025-02-18 17:24:44,287 Train Loss: 0.0020451, Val Loss: 0.0019251 +2025-02-18 17:24:44,287 Epoch 262/2000 +2025-02-18 17:25:27,305 Current Learning Rate: 0.0021895831 +2025-02-18 17:25:27,307 Train Loss: 0.0028658, Val Loss: 0.0025279 +2025-02-18 17:25:27,309 Epoch 263/2000 +2025-02-18 17:26:09,671 Current Learning Rate: 0.0022548859 +2025-02-18 17:26:09,671 Train Loss: 0.0022533, Val Loss: 0.0020600 +2025-02-18 17:26:09,672 Epoch 264/2000 +2025-02-18 17:26:52,712 Current Learning Rate: 0.0023208660 +2025-02-18 17:26:52,713 Train Loss: 0.0021905, Val Loss: 0.0020418 +2025-02-18 17:26:52,713 Epoch 265/2000 +2025-02-18 17:27:35,513 Current Learning Rate: 0.0023875072 +2025-02-18 17:27:35,514 Train Loss: 0.0024926, Val Loss: 0.0022231 +2025-02-18 17:27:35,514 Epoch 266/2000 +2025-02-18 17:28:18,201 Current Learning Rate: 0.0024547929 +2025-02-18 17:28:18,202 Train Loss: 0.0023036, Val Loss: 0.0020216 +2025-02-18 17:28:18,202 Epoch 267/2000 +2025-02-18 17:29:00,649 Current Learning Rate: 0.0025227067 +2025-02-18 17:29:00,650 Train Loss: 0.0027150, Val Loss: 0.0021037 +2025-02-18 17:29:00,650 Epoch 268/2000 +2025-02-18 17:29:44,410 Current Learning Rate: 0.0025912316 +2025-02-18 17:29:44,411 Train Loss: 0.0029222, Val Loss: 0.0020365 +2025-02-18 17:29:44,411 Epoch 269/2000 +2025-02-18 17:30:27,360 Current Learning Rate: 0.0026603509 +2025-02-18 17:30:27,361 Train Loss: 0.0022119, Val Loss: 0.0020850 +2025-02-18 17:30:27,361 Epoch 270/2000 +2025-02-18 17:31:09,717 Current Learning Rate: 0.0027300475 +2025-02-18 17:31:09,718 Train Loss: 0.0022681, Val Loss: 0.0020540 +2025-02-18 17:31:09,718 Epoch 271/2000 +2025-02-18 17:31:52,295 Current Learning Rate: 0.0028003042 +2025-02-18 17:31:52,296 Train Loss: 0.0024327, Val Loss: 0.0021627 +2025-02-18 17:31:52,296 Epoch 272/2000 +2025-02-18 17:32:35,620 Current Learning Rate: 0.0028711035 +2025-02-18 17:32:35,621 Train Loss: 0.0022867, Val Loss: 0.0021199 +2025-02-18 17:32:35,621 Epoch 273/2000 +2025-02-18 17:33:17,806 Current Learning Rate: 0.0029424282 +2025-02-18 17:33:17,807 Train Loss: 0.0034277, Val Loss: 0.0022840 +2025-02-18 17:33:17,807 Epoch 274/2000 +2025-02-18 17:34:00,432 Current Learning Rate: 0.0030142605 +2025-02-18 17:34:00,433 Train Loss: 0.0019804, Val Loss: 0.0019544 +2025-02-18 17:34:00,453 Epoch 275/2000 +2025-02-18 17:34:43,828 Current Learning Rate: 0.0030865828 +2025-02-18 17:34:45,879 Train Loss: 0.0022005, Val Loss: 0.0018912 +2025-02-18 17:34:45,879 Epoch 276/2000 +2025-02-18 17:35:28,669 Current Learning Rate: 0.0031593772 +2025-02-18 17:35:28,670 Train Loss: 0.0022289, Val Loss: 0.0020203 +2025-02-18 17:35:28,671 Epoch 277/2000 +2025-02-18 17:36:11,497 Current Learning Rate: 0.0032326258 +2025-02-18 17:36:11,498 Train Loss: 0.0023485, Val Loss: 0.0022531 +2025-02-18 17:36:11,498 Epoch 278/2000 +2025-02-18 17:36:54,326 Current Learning Rate: 0.0033063104 +2025-02-18 17:36:54,326 Train Loss: 0.0020823, Val Loss: 0.0019360 +2025-02-18 17:36:54,327 Epoch 279/2000 +2025-02-18 17:37:37,229 Current Learning Rate: 0.0033804129 +2025-02-18 17:37:37,230 Train Loss: 0.0036312, Val Loss: 0.0023923 +2025-02-18 17:37:37,230 Epoch 280/2000 +2025-02-18 17:38:20,275 Current Learning Rate: 0.0034549150 +2025-02-18 17:38:20,276 Train Loss: 0.0022580, Val Loss: 0.0020475 +2025-02-18 17:38:20,276 Epoch 281/2000 +2025-02-18 17:39:02,899 Current Learning Rate: 0.0035297984 +2025-02-18 17:39:04,322 Train Loss: 0.0018580, Val Loss: 0.0018449 +2025-02-18 17:39:04,322 Epoch 282/2000 +2025-02-18 17:39:46,966 Current Learning Rate: 0.0036050445 +2025-02-18 17:39:46,967 Train Loss: 0.0035762, Val Loss: 0.0023837 +2025-02-18 17:39:46,967 Epoch 283/2000 +2025-02-18 17:40:29,857 Current Learning Rate: 0.0036806348 +2025-02-18 17:40:29,857 Train Loss: 0.0020949, Val Loss: 0.0019122 +2025-02-18 17:40:29,858 Epoch 284/2000 +2025-02-18 17:41:12,868 Current Learning Rate: 0.0037565506 +2025-02-18 17:41:12,869 Train Loss: 0.0020912, Val Loss: 0.0018673 +2025-02-18 17:41:12,869 Epoch 285/2000 +2025-02-18 17:41:56,081 Current Learning Rate: 0.0038327732 +2025-02-18 17:41:57,701 Train Loss: 0.0020819, Val Loss: 0.0018445 +2025-02-18 17:41:57,701 Epoch 286/2000 +2025-02-18 17:42:40,442 Current Learning Rate: 0.0039092838 +2025-02-18 17:42:40,443 Train Loss: 0.0022515, Val Loss: 0.0021071 +2025-02-18 17:42:40,443 Epoch 287/2000 +2025-02-18 17:43:23,583 Current Learning Rate: 0.0039860635 +2025-02-18 17:43:23,584 Train Loss: 0.0024569, Val Loss: 0.0025974 +2025-02-18 17:43:23,584 Epoch 288/2000 +2025-02-18 17:44:06,549 Current Learning Rate: 0.0040630934 +2025-02-18 17:44:06,550 Train Loss: 0.0021944, Val Loss: 0.0020369 +2025-02-18 17:44:06,550 Epoch 289/2000 +2025-02-18 17:44:49,814 Current Learning Rate: 0.0041403545 +2025-02-18 17:44:49,815 Train Loss: 0.0029261, Val Loss: 0.0023501 +2025-02-18 17:44:49,815 Epoch 290/2000 +2025-02-18 17:45:33,030 Current Learning Rate: 0.0042178277 +2025-02-18 17:45:33,031 Train Loss: 0.0022571, Val Loss: 0.0018816 +2025-02-18 17:45:33,031 Epoch 291/2000 +2025-02-18 17:46:15,380 Current Learning Rate: 0.0042954938 +2025-02-18 17:46:15,380 Train Loss: 0.0027410, Val Loss: 0.0026028 +2025-02-18 17:46:15,380 Epoch 292/2000 +2025-02-18 17:46:59,168 Current Learning Rate: 0.0043733338 +2025-02-18 17:46:59,169 Train Loss: 0.0023064, Val Loss: 0.0020107 +2025-02-18 17:46:59,169 Epoch 293/2000 +2025-02-18 17:47:42,336 Current Learning Rate: 0.0044513284 +2025-02-18 17:47:42,336 Train Loss: 0.0022670, Val Loss: 0.0019972 +2025-02-18 17:47:42,337 Epoch 294/2000 +2025-02-18 17:48:25,761 Current Learning Rate: 0.0045294584 +2025-02-18 17:48:25,761 Train Loss: 0.0025803, Val Loss: 0.0021324 +2025-02-18 17:48:25,762 Epoch 295/2000 +2025-02-18 17:49:08,391 Current Learning Rate: 0.0046077045 +2025-02-18 17:49:08,392 Train Loss: 0.0018848, Val Loss: 0.0019026 +2025-02-18 17:49:08,392 Epoch 296/2000 +2025-02-18 17:49:51,576 Current Learning Rate: 0.0046860474 +2025-02-18 17:49:51,576 Train Loss: 0.0027394, Val Loss: 0.0030746 +2025-02-18 17:49:51,576 Epoch 297/2000 +2025-02-18 17:50:33,927 Current Learning Rate: 0.0047644677 +2025-02-18 17:50:33,928 Train Loss: 0.0026139, Val Loss: 0.0021545 +2025-02-18 17:50:33,928 Epoch 298/2000 +2025-02-18 17:51:17,310 Current Learning Rate: 0.0048429462 +2025-02-18 17:51:17,310 Train Loss: 0.0020331, Val Loss: 0.0018768 +2025-02-18 17:51:17,310 Epoch 299/2000 +2025-02-18 17:52:00,259 Current Learning Rate: 0.0049214634 +2025-02-18 17:52:00,259 Train Loss: 0.0029920, Val Loss: 0.0021373 +2025-02-18 17:52:00,261 Epoch 300/2000 +2025-02-18 17:52:42,781 Current Learning Rate: 0.0050000000 +2025-02-18 17:52:44,146 Train Loss: 0.0018674, Val Loss: 0.0018172 +2025-02-18 17:52:44,152 Epoch 301/2000 +2025-02-18 17:53:25,902 Current Learning Rate: 0.0050785366 +2025-02-18 17:53:25,903 Train Loss: 0.0026097, Val Loss: 0.0023666 +2025-02-18 17:53:25,903 Epoch 302/2000 +2025-02-18 17:54:08,415 Current Learning Rate: 0.0051570538 +2025-02-18 17:54:08,415 Train Loss: 0.0020608, Val Loss: 0.0020070 +2025-02-18 17:54:08,416 Epoch 303/2000 +2025-02-18 17:54:51,147 Current Learning Rate: 0.0052355323 +2025-02-18 17:54:52,736 Train Loss: 0.0018909, Val Loss: 0.0017573 +2025-02-18 17:54:52,736 Epoch 304/2000 +2025-02-18 17:55:34,551 Current Learning Rate: 0.0053139526 +2025-02-18 17:55:36,133 Train Loss: 0.0018080, Val Loss: 0.0017460 +2025-02-18 17:55:36,133 Epoch 305/2000 +2025-02-18 17:56:18,434 Current Learning Rate: 0.0053922955 +2025-02-18 17:56:18,435 Train Loss: 0.0035959, Val Loss: 0.0024591 +2025-02-18 17:56:18,436 Epoch 306/2000 +2025-02-18 17:57:01,706 Current Learning Rate: 0.0054705416 +2025-02-18 17:57:01,706 Train Loss: 0.0021499, Val Loss: 0.0018718 +2025-02-18 17:57:01,707 Epoch 307/2000 +2025-02-18 17:57:45,246 Current Learning Rate: 0.0055486716 +2025-02-18 17:57:45,256 Train Loss: 0.0019959, Val Loss: 0.0017491 +2025-02-18 17:57:45,257 Epoch 308/2000 +2025-02-18 17:58:28,821 Current Learning Rate: 0.0056266662 +2025-02-18 17:58:28,821 Train Loss: 0.0017812, Val Loss: 0.0019117 +2025-02-18 17:58:28,822 Epoch 309/2000 +2025-02-18 17:59:12,171 Current Learning Rate: 0.0057045062 +2025-02-18 17:59:12,172 Train Loss: 0.0023382, Val Loss: 0.0019419 +2025-02-18 17:59:12,172 Epoch 310/2000 +2025-02-18 17:59:55,710 Current Learning Rate: 0.0057821723 +2025-02-18 17:59:55,711 Train Loss: 0.0018849, Val Loss: 0.0018889 +2025-02-18 17:59:55,711 Epoch 311/2000 +2025-02-18 18:00:38,906 Current Learning Rate: 0.0058596455 +2025-02-18 18:00:38,906 Train Loss: 0.0019797, Val Loss: 0.0018297 +2025-02-18 18:00:38,907 Epoch 312/2000 +2025-02-18 18:01:21,390 Current Learning Rate: 0.0059369066 +2025-02-18 18:01:21,390 Train Loss: 0.0018032, Val Loss: 0.0018692 +2025-02-18 18:01:21,391 Epoch 313/2000 +2025-02-18 18:02:04,183 Current Learning Rate: 0.0060139365 +2025-02-18 18:02:04,184 Train Loss: 0.0020518, Val Loss: 0.0020126 +2025-02-18 18:02:04,184 Epoch 314/2000 +2025-02-18 18:02:47,470 Current Learning Rate: 0.0060907162 +2025-02-18 18:02:48,999 Train Loss: 0.0019250, Val Loss: 0.0017355 +2025-02-18 18:02:48,999 Epoch 315/2000 +2025-02-18 18:03:30,653 Current Learning Rate: 0.0061672268 +2025-02-18 18:03:30,654 Train Loss: 0.0018704, Val Loss: 0.0018669 +2025-02-18 18:03:30,655 Epoch 316/2000 +2025-02-18 18:04:14,478 Current Learning Rate: 0.0062434494 +2025-02-18 18:04:14,479 Train Loss: 0.0025219, Val Loss: 0.0022951 +2025-02-18 18:04:14,479 Epoch 317/2000 +2025-02-18 18:04:57,930 Current Learning Rate: 0.0063193652 +2025-02-18 18:04:57,930 Train Loss: 0.0018168, Val Loss: 0.0017720 +2025-02-18 18:04:57,930 Epoch 318/2000 +2025-02-18 18:05:41,091 Current Learning Rate: 0.0063949555 +2025-02-18 18:05:41,092 Train Loss: 0.0017140, Val Loss: 0.0017393 +2025-02-18 18:05:41,092 Epoch 319/2000 +2025-02-18 18:06:23,492 Current Learning Rate: 0.0064702016 +2025-02-18 18:06:23,492 Train Loss: 0.0024777, Val Loss: 0.0019097 +2025-02-18 18:06:23,493 Epoch 320/2000 +2025-02-18 18:07:06,845 Current Learning Rate: 0.0065450850 +2025-02-18 18:07:06,846 Train Loss: 0.0021007, Val Loss: 0.0019510 +2025-02-18 18:07:06,846 Epoch 321/2000 +2025-02-18 18:07:49,549 Current Learning Rate: 0.0066195871 +2025-02-18 18:07:49,549 Train Loss: 0.0019580, Val Loss: 0.0018938 +2025-02-18 18:07:49,550 Epoch 322/2000 +2025-02-18 18:08:32,763 Current Learning Rate: 0.0066936896 +2025-02-18 18:08:32,763 Train Loss: 0.0020908, Val Loss: 0.0024106 +2025-02-18 18:08:32,764 Epoch 323/2000 +2025-02-18 18:09:15,017 Current Learning Rate: 0.0067673742 +2025-02-18 18:09:16,660 Train Loss: 0.0017726, Val Loss: 0.0016919 +2025-02-18 18:09:16,660 Epoch 324/2000 +2025-02-18 18:09:59,860 Current Learning Rate: 0.0068406228 +2025-02-18 18:09:59,861 Train Loss: 0.0022114, Val Loss: 0.0018984 +2025-02-18 18:09:59,861 Epoch 325/2000 +2025-02-18 18:10:41,735 Current Learning Rate: 0.0069134172 +2025-02-18 18:10:41,736 Train Loss: 0.0022210, Val Loss: 0.0017853 +2025-02-18 18:10:41,736 Epoch 326/2000 +2025-02-18 18:11:24,252 Current Learning Rate: 0.0069857395 +2025-02-18 18:11:24,253 Train Loss: 0.0019125, Val Loss: 0.0018118 +2025-02-18 18:11:24,257 Epoch 327/2000 +2025-02-18 18:12:07,319 Current Learning Rate: 0.0070575718 +2025-02-18 18:12:07,320 Train Loss: 0.0017428, Val Loss: 0.0017612 +2025-02-18 18:12:07,320 Epoch 328/2000 +2025-02-18 18:12:50,182 Current Learning Rate: 0.0071288965 +2025-02-18 18:12:51,888 Train Loss: 0.0017766, Val Loss: 0.0015894 +2025-02-18 18:12:51,889 Epoch 329/2000 +2025-02-18 18:13:34,386 Current Learning Rate: 0.0071996958 +2025-02-18 18:13:34,386 Train Loss: 0.0019559, Val Loss: 0.0020721 +2025-02-18 18:13:34,387 Epoch 330/2000 +2025-02-18 18:14:16,352 Current Learning Rate: 0.0072699525 +2025-02-18 18:14:16,353 Train Loss: 0.0018707, Val Loss: 0.0018036 +2025-02-18 18:14:16,353 Epoch 331/2000 +2025-02-18 18:14:59,831 Current Learning Rate: 0.0073396491 +2025-02-18 18:14:59,832 Train Loss: 0.0016481, Val Loss: 0.0015985 +2025-02-18 18:14:59,832 Epoch 332/2000 +2025-02-18 18:15:43,156 Current Learning Rate: 0.0074087684 +2025-02-18 18:15:43,156 Train Loss: 0.0018216, Val Loss: 0.0016206 +2025-02-18 18:15:43,157 Epoch 333/2000 +2025-02-18 18:16:26,066 Current Learning Rate: 0.0074772933 +2025-02-18 18:16:26,067 Train Loss: 0.0019091, Val Loss: 0.0017353 +2025-02-18 18:16:26,067 Epoch 334/2000 +2025-02-18 18:17:08,244 Current Learning Rate: 0.0075452071 +2025-02-18 18:17:08,245 Train Loss: 0.0018253, Val Loss: 0.0016177 +2025-02-18 18:17:08,245 Epoch 335/2000 +2025-02-18 18:17:50,613 Current Learning Rate: 0.0076124928 +2025-02-18 18:17:50,613 Train Loss: 0.0020595, Val Loss: 0.0016002 +2025-02-18 18:17:50,613 Epoch 336/2000 +2025-02-18 18:18:33,309 Current Learning Rate: 0.0076791340 +2025-02-18 18:18:33,310 Train Loss: 0.0015516, Val Loss: 0.0016030 +2025-02-18 18:18:33,310 Epoch 337/2000 +2025-02-18 18:19:16,693 Current Learning Rate: 0.0077451141 +2025-02-18 18:19:16,694 Train Loss: 0.0017143, Val Loss: 0.0017131 +2025-02-18 18:19:16,694 Epoch 338/2000 +2025-02-18 18:19:59,626 Current Learning Rate: 0.0078104169 +2025-02-18 18:19:59,627 Train Loss: 0.0019217, Val Loss: 0.0020587 +2025-02-18 18:19:59,627 Epoch 339/2000 +2025-02-18 18:20:43,199 Current Learning Rate: 0.0078750263 +2025-02-18 18:20:43,200 Train Loss: 0.0016958, Val Loss: 0.0016010 +2025-02-18 18:20:43,200 Epoch 340/2000 +2025-02-18 18:21:26,017 Current Learning Rate: 0.0079389263 +2025-02-18 18:21:27,238 Train Loss: 0.0014670, Val Loss: 0.0015100 +2025-02-18 18:21:27,238 Epoch 341/2000 +2025-02-18 18:22:10,548 Current Learning Rate: 0.0080021011 +2025-02-18 18:22:10,548 Train Loss: 0.0019157, Val Loss: 0.0018127 +2025-02-18 18:22:10,548 Epoch 342/2000 +2025-02-18 18:22:53,166 Current Learning Rate: 0.0080645353 +2025-02-18 18:22:53,167 Train Loss: 0.0018622, Val Loss: 0.0016093 +2025-02-18 18:22:53,167 Epoch 343/2000 +2025-02-18 18:23:35,993 Current Learning Rate: 0.0081262133 +2025-02-18 18:23:35,994 Train Loss: 0.0018541, Val Loss: 0.0016999 +2025-02-18 18:23:35,994 Epoch 344/2000 +2025-02-18 18:24:19,508 Current Learning Rate: 0.0081871199 +2025-02-18 18:24:19,509 Train Loss: 0.0016666, Val Loss: 0.0016238 +2025-02-18 18:24:19,509 Epoch 345/2000 +2025-02-18 18:25:01,663 Current Learning Rate: 0.0082472402 +2025-02-18 18:25:01,664 Train Loss: 0.0017156, Val Loss: 0.0016722 +2025-02-18 18:25:01,664 Epoch 346/2000 +2025-02-18 18:25:45,420 Current Learning Rate: 0.0083065593 +2025-02-18 18:25:45,421 Train Loss: 0.0017596, Val Loss: 0.0016487 +2025-02-18 18:25:45,421 Epoch 347/2000 +2025-02-18 18:26:28,790 Current Learning Rate: 0.0083650626 +2025-02-18 18:26:28,790 Train Loss: 0.0017325, Val Loss: 0.0017112 +2025-02-18 18:26:28,790 Epoch 348/2000 +2025-02-18 18:27:11,358 Current Learning Rate: 0.0084227355 +2025-02-18 18:27:11,361 Train Loss: 0.0015121, Val Loss: 0.0015322 +2025-02-18 18:27:11,362 Epoch 349/2000 +2025-02-18 18:27:53,969 Current Learning Rate: 0.0084795640 +2025-02-18 18:27:53,970 Train Loss: 0.0017999, Val Loss: 0.0017936 +2025-02-18 18:27:53,971 Epoch 350/2000 +2025-02-18 18:28:36,639 Current Learning Rate: 0.0085355339 +2025-02-18 18:28:36,639 Train Loss: 0.0016998, Val Loss: 0.0015861 +2025-02-18 18:28:36,639 Epoch 351/2000 +2025-02-18 18:29:20,073 Current Learning Rate: 0.0085906315 +2025-02-18 18:29:20,073 Train Loss: 0.0019386, Val Loss: 0.0018972 +2025-02-18 18:29:20,074 Epoch 352/2000 +2025-02-18 18:30:02,899 Current Learning Rate: 0.0086448431 +2025-02-18 18:30:02,900 Train Loss: 0.0015820, Val Loss: 0.0017853 +2025-02-18 18:30:02,900 Epoch 353/2000 +2025-02-18 18:30:45,903 Current Learning Rate: 0.0086981555 +2025-02-18 18:30:45,904 Train Loss: 0.0017570, Val Loss: 0.0017251 +2025-02-18 18:30:45,904 Epoch 354/2000 +2025-02-18 18:31:28,902 Current Learning Rate: 0.0087505553 +2025-02-18 18:31:28,903 Train Loss: 0.0015964, Val Loss: 0.0016704 +2025-02-18 18:31:28,903 Epoch 355/2000 +2025-02-18 18:32:12,132 Current Learning Rate: 0.0088020298 +2025-02-18 18:32:12,132 Train Loss: 0.0015286, Val Loss: 0.0016506 +2025-02-18 18:32:12,133 Epoch 356/2000 +2025-02-18 18:32:54,634 Current Learning Rate: 0.0088525662 +2025-02-18 18:32:54,637 Train Loss: 0.0015187, Val Loss: 0.0015421 +2025-02-18 18:32:54,639 Epoch 357/2000 +2025-02-18 18:33:38,046 Current Learning Rate: 0.0089021520 +2025-02-18 18:33:38,047 Train Loss: 0.0021062, Val Loss: 0.0032683 +2025-02-18 18:33:38,047 Epoch 358/2000 +2025-02-18 18:34:21,470 Current Learning Rate: 0.0089507751 +2025-02-18 18:34:21,471 Train Loss: 0.0021825, Val Loss: 0.0015852 +2025-02-18 18:34:21,471 Epoch 359/2000 +2025-02-18 18:35:04,446 Current Learning Rate: 0.0089984233 +2025-02-18 18:35:05,800 Train Loss: 0.0018197, Val Loss: 0.0014681 +2025-02-18 18:35:05,800 Epoch 360/2000 +2025-02-18 18:35:48,574 Current Learning Rate: 0.0090450850 +2025-02-18 18:35:48,575 Train Loss: 0.0016813, Val Loss: 0.0021415 +2025-02-18 18:35:48,575 Epoch 361/2000 +2025-02-18 18:36:31,883 Current Learning Rate: 0.0090907486 +2025-02-18 18:36:34,074 Train Loss: 0.0015965, Val Loss: 0.0014384 +2025-02-18 18:36:34,075 Epoch 362/2000 +2025-02-18 18:37:17,082 Current Learning Rate: 0.0091354029 +2025-02-18 18:37:18,832 Train Loss: 0.0013874, Val Loss: 0.0014136 +2025-02-18 18:37:18,832 Epoch 363/2000 +2025-02-18 18:38:01,391 Current Learning Rate: 0.0091790368 +2025-02-18 18:38:01,392 Train Loss: 0.0014922, Val Loss: 0.0016143 +2025-02-18 18:38:01,392 Epoch 364/2000 +2025-02-18 18:38:44,307 Current Learning Rate: 0.0092216396 +2025-02-18 18:38:45,517 Train Loss: 0.0013926, Val Loss: 0.0013412 +2025-02-18 18:38:45,517 Epoch 365/2000 +2025-02-18 18:39:28,642 Current Learning Rate: 0.0092632008 +2025-02-18 18:39:28,643 Train Loss: 0.0016428, Val Loss: 0.0016724 +2025-02-18 18:39:28,643 Epoch 366/2000 +2025-02-18 18:40:12,059 Current Learning Rate: 0.0093037101 +2025-02-18 18:40:12,059 Train Loss: 0.0013429, Val Loss: 0.0015838 +2025-02-18 18:40:12,059 Epoch 367/2000 +2025-02-18 18:40:55,793 Current Learning Rate: 0.0093431576 +2025-02-18 18:40:55,794 Train Loss: 0.0012813, Val Loss: 0.0013530 +2025-02-18 18:40:55,794 Epoch 368/2000 +2025-02-18 18:41:38,495 Current Learning Rate: 0.0093815334 +2025-02-18 18:41:38,496 Train Loss: 0.0014401, Val Loss: 0.0013843 +2025-02-18 18:41:38,496 Epoch 369/2000 +2025-02-18 18:42:21,662 Current Learning Rate: 0.0094188282 +2025-02-18 18:42:21,663 Train Loss: 0.0012297, Val Loss: 0.0014001 +2025-02-18 18:42:21,663 Epoch 370/2000 +2025-02-18 18:43:05,064 Current Learning Rate: 0.0094550326 +2025-02-18 18:43:05,064 Train Loss: 0.0012705, Val Loss: 0.0018555 +2025-02-18 18:43:05,064 Epoch 371/2000 +2025-02-18 18:43:48,083 Current Learning Rate: 0.0094901379 +2025-02-18 18:43:48,084 Train Loss: 0.0017594, Val Loss: 0.0017246 +2025-02-18 18:43:48,084 Epoch 372/2000 +2025-02-18 18:44:30,913 Current Learning Rate: 0.0095241353 +2025-02-18 18:44:30,913 Train Loss: 0.0016862, Val Loss: 0.0015140 +2025-02-18 18:44:30,913 Epoch 373/2000 +2025-02-18 18:45:14,029 Current Learning Rate: 0.0095570164 +2025-02-18 18:45:14,029 Train Loss: 0.0014067, Val Loss: 0.0013510 +2025-02-18 18:45:14,029 Epoch 374/2000 +2025-02-18 18:45:56,673 Current Learning Rate: 0.0095887731 +2025-02-18 18:45:57,729 Train Loss: 0.0013898, Val Loss: 0.0013164 +2025-02-18 18:45:57,732 Epoch 375/2000 +2025-02-18 18:46:39,493 Current Learning Rate: 0.0096193977 +2025-02-18 18:46:39,494 Train Loss: 0.0013693, Val Loss: 0.0013573 +2025-02-18 18:46:39,494 Epoch 376/2000 +2025-02-18 18:47:22,583 Current Learning Rate: 0.0096488824 +2025-02-18 18:47:22,583 Train Loss: 0.0013377, Val Loss: 0.0013611 +2025-02-18 18:47:22,584 Epoch 377/2000 +2025-02-18 18:48:05,544 Current Learning Rate: 0.0096772202 +2025-02-18 18:48:06,887 Train Loss: 0.0011529, Val Loss: 0.0012729 +2025-02-18 18:48:06,887 Epoch 378/2000 +2025-02-18 18:48:48,128 Current Learning Rate: 0.0097044038 +2025-02-18 18:48:49,263 Train Loss: 0.0012668, Val Loss: 0.0012526 +2025-02-18 18:48:49,264 Epoch 379/2000 +2025-02-18 18:49:31,857 Current Learning Rate: 0.0097304268 +2025-02-18 18:49:31,858 Train Loss: 0.0015333, Val Loss: 0.0013690 +2025-02-18 18:49:31,858 Epoch 380/2000 +2025-02-18 18:50:14,927 Current Learning Rate: 0.0097552826 +2025-02-18 18:50:14,928 Train Loss: 0.0012782, Val Loss: 0.0012590 +2025-02-18 18:50:14,928 Epoch 381/2000 +2025-02-18 18:50:57,867 Current Learning Rate: 0.0097789651 +2025-02-18 18:50:57,868 Train Loss: 0.0015421, Val Loss: 0.0014658 +2025-02-18 18:50:57,868 Epoch 382/2000 +2025-02-18 18:51:40,749 Current Learning Rate: 0.0098014684 +2025-02-18 18:51:42,337 Train Loss: 0.0012468, Val Loss: 0.0012293 +2025-02-18 18:51:42,337 Epoch 383/2000 +2025-02-18 18:52:25,160 Current Learning Rate: 0.0098227871 +2025-02-18 18:52:25,161 Train Loss: 0.0012961, Val Loss: 0.0012819 +2025-02-18 18:52:25,161 Epoch 384/2000 +2025-02-18 18:53:07,355 Current Learning Rate: 0.0098429158 +2025-02-18 18:53:07,355 Train Loss: 0.0013461, Val Loss: 0.0013925 +2025-02-18 18:53:07,356 Epoch 385/2000 +2025-02-18 18:53:50,623 Current Learning Rate: 0.0098618496 +2025-02-18 18:53:50,624 Train Loss: 0.0014020, Val Loss: 0.0013148 +2025-02-18 18:53:50,624 Epoch 386/2000 +2025-02-18 18:54:33,782 Current Learning Rate: 0.0098795838 +2025-02-18 18:54:33,782 Train Loss: 0.0017544, Val Loss: 0.0015577 +2025-02-18 18:54:33,782 Epoch 387/2000 +2025-02-18 18:55:16,293 Current Learning Rate: 0.0098961141 +2025-02-18 18:55:16,294 Train Loss: 0.0015864, Val Loss: 0.0017547 +2025-02-18 18:55:16,294 Epoch 388/2000 +2025-02-18 18:55:59,068 Current Learning Rate: 0.0099114363 +2025-02-18 18:55:59,068 Train Loss: 0.0012810, Val Loss: 0.0013886 +2025-02-18 18:55:59,069 Epoch 389/2000 +2025-02-18 18:56:42,192 Current Learning Rate: 0.0099255466 +2025-02-18 18:56:42,192 Train Loss: 0.0012414, Val Loss: 0.0014214 +2025-02-18 18:56:42,193 Epoch 390/2000 +2025-02-18 18:57:25,765 Current Learning Rate: 0.0099384417 +2025-02-18 18:57:25,766 Train Loss: 0.0013963, Val Loss: 0.0012380 +2025-02-18 18:57:25,766 Epoch 391/2000 +2025-02-18 18:58:09,021 Current Learning Rate: 0.0099501183 +2025-02-18 18:58:09,021 Train Loss: 0.0010892, Val Loss: 0.0012810 +2025-02-18 18:58:09,021 Epoch 392/2000 +2025-02-18 18:58:52,156 Current Learning Rate: 0.0099605735 +2025-02-18 18:58:52,156 Train Loss: 0.0014638, Val Loss: 0.0013840 +2025-02-18 18:58:52,156 Epoch 393/2000 +2025-02-18 18:59:35,203 Current Learning Rate: 0.0099698048 +2025-02-18 18:59:37,270 Train Loss: 0.0012113, Val Loss: 0.0012240 +2025-02-18 18:59:37,270 Epoch 394/2000 +2025-02-18 19:00:20,559 Current Learning Rate: 0.0099778098 +2025-02-18 19:00:20,561 Train Loss: 0.0011264, Val Loss: 0.0013557 +2025-02-18 19:00:20,569 Epoch 395/2000 +2025-02-18 19:01:03,777 Current Learning Rate: 0.0099845867 +2025-02-18 19:01:05,631 Train Loss: 0.0010732, Val Loss: 0.0012006 +2025-02-18 19:01:05,632 Epoch 396/2000 +2025-02-18 19:01:47,097 Current Learning Rate: 0.0099901336 +2025-02-18 19:01:48,929 Train Loss: 0.0011134, Val Loss: 0.0011634 +2025-02-18 19:01:48,930 Epoch 397/2000 +2025-02-18 19:02:31,762 Current Learning Rate: 0.0099944494 +2025-02-18 19:02:31,763 Train Loss: 0.0012579, Val Loss: 0.0014505 +2025-02-18 19:02:31,764 Epoch 398/2000 +2025-02-18 19:03:14,594 Current Learning Rate: 0.0099975328 +2025-02-18 19:03:15,966 Train Loss: 0.0010476, Val Loss: 0.0011492 +2025-02-18 19:03:15,966 Epoch 399/2000 +2025-02-18 19:03:57,921 Current Learning Rate: 0.0099993832 +2025-02-18 19:03:57,921 Train Loss: 0.0014083, Val Loss: 0.0016155 +2025-02-18 19:03:57,922 Epoch 400/2000 +2025-02-18 19:04:41,030 Current Learning Rate: 0.0100000000 +2025-02-18 19:04:41,030 Train Loss: 0.0012435, Val Loss: 0.0012054 +2025-02-18 19:04:41,031 Epoch 401/2000 +2025-02-18 19:05:23,639 Current Learning Rate: 0.0099993832 +2025-02-18 19:05:23,640 Train Loss: 0.0011665, Val Loss: 0.0013673 +2025-02-18 19:05:23,640 Epoch 402/2000 +2025-02-18 19:06:06,146 Current Learning Rate: 0.0099975328 +2025-02-18 19:06:07,553 Train Loss: 0.0010835, Val Loss: 0.0011264 +2025-02-18 19:06:07,553 Epoch 403/2000 +2025-02-18 19:06:49,124 Current Learning Rate: 0.0099944494 +2025-02-18 19:06:50,602 Train Loss: 0.0009434, Val Loss: 0.0011085 +2025-02-18 19:06:50,602 Epoch 404/2000 +2025-02-18 19:07:32,088 Current Learning Rate: 0.0099901336 +2025-02-18 19:07:33,261 Train Loss: 0.0008888, Val Loss: 0.0010331 +2025-02-18 19:07:33,261 Epoch 405/2000 +2025-02-18 19:08:15,585 Current Learning Rate: 0.0099845867 +2025-02-18 19:08:15,586 Train Loss: 0.0011416, Val Loss: 0.0010873 +2025-02-18 19:08:15,586 Epoch 406/2000 +2025-02-18 19:08:57,840 Current Learning Rate: 0.0099778098 +2025-02-18 19:08:57,840 Train Loss: 0.0010608, Val Loss: 0.0010907 +2025-02-18 19:08:57,840 Epoch 407/2000 +2025-02-18 19:09:41,177 Current Learning Rate: 0.0099698048 +2025-02-18 19:09:41,177 Train Loss: 0.0009946, Val Loss: 0.0011131 +2025-02-18 19:09:41,178 Epoch 408/2000 +2025-02-18 19:10:24,325 Current Learning Rate: 0.0099605735 +2025-02-18 19:10:24,326 Train Loss: 0.0010914, Val Loss: 0.0011571 +2025-02-18 19:10:24,326 Epoch 409/2000 +2025-02-18 19:11:07,432 Current Learning Rate: 0.0099501183 +2025-02-18 19:11:07,433 Train Loss: 0.0011023, Val Loss: 0.0010870 +2025-02-18 19:11:07,433 Epoch 410/2000 +2025-02-18 19:11:50,514 Current Learning Rate: 0.0099384417 +2025-02-18 19:11:52,535 Train Loss: 0.0010144, Val Loss: 0.0010130 +2025-02-18 19:11:52,535 Epoch 411/2000 +2025-02-18 19:12:34,603 Current Learning Rate: 0.0099255466 +2025-02-18 19:12:34,605 Train Loss: 0.0010780, Val Loss: 0.0010758 +2025-02-18 19:12:34,605 Epoch 412/2000 +2025-02-18 19:13:16,926 Current Learning Rate: 0.0099114363 +2025-02-18 19:13:16,927 Train Loss: 0.0011290, Val Loss: 0.0010530 +2025-02-18 19:13:16,927 Epoch 413/2000 +2025-02-18 19:13:59,534 Current Learning Rate: 0.0098961141 +2025-02-18 19:13:59,536 Train Loss: 0.0009785, Val Loss: 0.0010945 +2025-02-18 19:13:59,536 Epoch 414/2000 +2025-02-18 19:14:42,628 Current Learning Rate: 0.0098795838 +2025-02-18 19:14:44,484 Train Loss: 0.0008982, Val Loss: 0.0009911 +2025-02-18 19:14:44,484 Epoch 415/2000 +2025-02-18 19:15:27,525 Current Learning Rate: 0.0098618496 +2025-02-18 19:15:27,525 Train Loss: 0.0008983, Val Loss: 0.0010050 +2025-02-18 19:15:27,526 Epoch 416/2000 +2025-02-18 19:16:09,864 Current Learning Rate: 0.0098429158 +2025-02-18 19:16:09,866 Train Loss: 0.0012341, Val Loss: 0.0012834 +2025-02-18 19:16:09,866 Epoch 417/2000 +2025-02-18 19:16:52,899 Current Learning Rate: 0.0098227871 +2025-02-18 19:16:52,899 Train Loss: 0.0009907, Val Loss: 0.0010211 +2025-02-18 19:16:52,900 Epoch 418/2000 +2025-02-18 19:17:35,712 Current Learning Rate: 0.0098014684 +2025-02-18 19:17:37,443 Train Loss: 0.0009190, Val Loss: 0.0009890 +2025-02-18 19:17:37,443 Epoch 419/2000 +2025-02-18 19:18:20,260 Current Learning Rate: 0.0097789651 +2025-02-18 19:18:20,261 Train Loss: 0.0009876, Val Loss: 0.0010098 +2025-02-18 19:18:20,261 Epoch 420/2000 +2025-02-18 19:19:03,520 Current Learning Rate: 0.0097552826 +2025-02-18 19:19:03,520 Train Loss: 0.0009777, Val Loss: 0.0010709 +2025-02-18 19:19:03,521 Epoch 421/2000 +2025-02-18 19:19:46,973 Current Learning Rate: 0.0097304268 +2025-02-18 19:19:46,973 Train Loss: 0.0010375, Val Loss: 0.0010619 +2025-02-18 19:19:46,973 Epoch 422/2000 +2025-02-18 19:20:30,281 Current Learning Rate: 0.0097044038 +2025-02-18 19:20:30,282 Train Loss: 0.0011694, Val Loss: 0.0010593 +2025-02-18 19:20:30,282 Epoch 423/2000 +2025-02-18 19:21:13,502 Current Learning Rate: 0.0096772202 +2025-02-18 19:21:13,503 Train Loss: 0.0010318, Val Loss: 0.0011026 +2025-02-18 19:21:13,503 Epoch 424/2000 +2025-02-18 19:21:57,125 Current Learning Rate: 0.0096488824 +2025-02-18 19:21:58,853 Train Loss: 0.0009790, Val Loss: 0.0008923 +2025-02-18 19:21:58,853 Epoch 425/2000 +2025-02-18 19:22:40,909 Current Learning Rate: 0.0096193977 +2025-02-18 19:22:40,910 Train Loss: 0.0010028, Val Loss: 0.0010390 +2025-02-18 19:22:40,910 Epoch 426/2000 +2025-02-18 19:23:24,187 Current Learning Rate: 0.0095887731 +2025-02-18 19:23:24,188 Train Loss: 0.0012474, Val Loss: 0.0010552 +2025-02-18 19:23:24,188 Epoch 427/2000 +2025-02-18 19:24:06,971 Current Learning Rate: 0.0095570164 +2025-02-18 19:24:06,971 Train Loss: 0.0009328, Val Loss: 0.0008956 +2025-02-18 19:24:06,971 Epoch 428/2000 +2025-02-18 19:24:49,573 Current Learning Rate: 0.0095241353 +2025-02-18 19:24:49,574 Train Loss: 0.0010250, Val Loss: 0.0009040 +2025-02-18 19:24:49,574 Epoch 429/2000 +2025-02-18 19:25:33,175 Current Learning Rate: 0.0094901379 +2025-02-18 19:25:33,175 Train Loss: 0.0008876, Val Loss: 0.0009556 +2025-02-18 19:25:33,176 Epoch 430/2000 +2025-02-18 19:26:16,275 Current Learning Rate: 0.0094550326 +2025-02-18 19:26:17,573 Train Loss: 0.0008340, Val Loss: 0.0008763 +2025-02-18 19:26:17,573 Epoch 431/2000 +2025-02-18 19:27:00,468 Current Learning Rate: 0.0094188282 +2025-02-18 19:27:00,469 Train Loss: 0.0008833, Val Loss: 0.0009338 +2025-02-18 19:27:00,469 Epoch 432/2000 +2025-02-18 19:27:42,605 Current Learning Rate: 0.0093815334 +2025-02-18 19:27:44,514 Train Loss: 0.0009102, Val Loss: 0.0008535 +2025-02-18 19:27:44,515 Epoch 433/2000 +2025-02-18 19:28:27,554 Current Learning Rate: 0.0093431576 +2025-02-18 19:28:29,593 Train Loss: 0.0007994, Val Loss: 0.0008042 +2025-02-18 19:28:29,593 Epoch 434/2000 +2025-02-18 19:29:12,397 Current Learning Rate: 0.0093037101 +2025-02-18 19:29:14,361 Train Loss: 0.0007456, Val Loss: 0.0008031 +2025-02-18 19:29:14,362 Epoch 435/2000 +2025-02-18 19:29:56,702 Current Learning Rate: 0.0092632008 +2025-02-18 19:29:58,165 Train Loss: 0.0007755, Val Loss: 0.0008031 +2025-02-18 19:29:58,166 Epoch 436/2000 +2025-02-18 19:30:41,345 Current Learning Rate: 0.0092216396 +2025-02-18 19:30:43,344 Train Loss: 0.0007013, Val Loss: 0.0007970 +2025-02-18 19:30:43,345 Epoch 437/2000 +2025-02-18 19:31:25,150 Current Learning Rate: 0.0091790368 +2025-02-18 19:31:25,151 Train Loss: 0.0006960, Val Loss: 0.0007992 +2025-02-18 19:31:25,151 Epoch 438/2000 +2025-02-18 19:32:08,564 Current Learning Rate: 0.0091354029 +2025-02-18 19:32:08,565 Train Loss: 0.0007976, Val Loss: 0.0008266 +2025-02-18 19:32:08,565 Epoch 439/2000 +2025-02-18 19:32:52,143 Current Learning Rate: 0.0090907486 +2025-02-18 19:32:52,143 Train Loss: 0.0008247, Val Loss: 0.0008597 +2025-02-18 19:32:52,144 Epoch 440/2000 +2025-02-18 19:33:34,864 Current Learning Rate: 0.0090450850 +2025-02-18 19:33:34,865 Train Loss: 0.0009173, Val Loss: 0.0009132 +2025-02-18 19:33:34,865 Epoch 441/2000 +2025-02-18 19:34:18,025 Current Learning Rate: 0.0089984233 +2025-02-18 19:34:18,026 Train Loss: 0.0006895, Val Loss: 0.0008034 +2025-02-18 19:34:18,026 Epoch 442/2000 +2025-02-18 19:35:00,925 Current Learning Rate: 0.0089507751 +2025-02-18 19:35:00,925 Train Loss: 0.0007486, Val Loss: 0.0008509 +2025-02-18 19:35:00,926 Epoch 443/2000 +2025-02-18 19:35:43,106 Current Learning Rate: 0.0089021520 +2025-02-18 19:35:43,106 Train Loss: 0.0011021, Val Loss: 0.0008607 +2025-02-18 19:35:43,107 Epoch 444/2000 +2025-02-18 19:36:26,033 Current Learning Rate: 0.0088525662 +2025-02-18 19:36:26,034 Train Loss: 0.0008986, Val Loss: 0.0009608 +2025-02-18 19:36:26,035 Epoch 445/2000 +2025-02-18 19:37:08,657 Current Learning Rate: 0.0088020298 +2025-02-18 19:37:08,658 Train Loss: 0.0008676, Val Loss: 0.0009627 +2025-02-18 19:37:08,658 Epoch 446/2000 +2025-02-18 19:37:52,301 Current Learning Rate: 0.0087505553 +2025-02-18 19:37:52,302 Train Loss: 0.0007821, Val Loss: 0.0008340 +2025-02-18 19:37:52,303 Epoch 447/2000 +2025-02-18 19:38:35,869 Current Learning Rate: 0.0086981555 +2025-02-18 19:38:35,870 Train Loss: 0.0009361, Val Loss: 0.0008472 +2025-02-18 19:38:35,870 Epoch 448/2000 +2025-02-18 19:39:19,117 Current Learning Rate: 0.0086448431 +2025-02-18 19:39:21,257 Train Loss: 0.0007113, Val Loss: 0.0007563 +2025-02-18 19:39:21,257 Epoch 449/2000 +2025-02-18 19:40:04,305 Current Learning Rate: 0.0085906315 +2025-02-18 19:40:04,306 Train Loss: 0.0007372, Val Loss: 0.0007889 +2025-02-18 19:40:04,306 Epoch 450/2000 +2025-02-18 19:40:47,432 Current Learning Rate: 0.0085355339 +2025-02-18 19:40:47,432 Train Loss: 0.0008533, Val Loss: 0.0008757 +2025-02-18 19:40:47,432 Epoch 451/2000 +2025-02-18 19:41:29,680 Current Learning Rate: 0.0084795640 +2025-02-18 19:41:31,265 Train Loss: 0.0007861, Val Loss: 0.0007555 +2025-02-18 19:41:31,266 Epoch 452/2000 +2025-02-18 19:42:12,721 Current Learning Rate: 0.0084227355 +2025-02-18 19:42:12,723 Train Loss: 0.0006757, Val Loss: 0.0007673 +2025-02-18 19:42:12,723 Epoch 453/2000 +2025-02-18 19:42:55,405 Current Learning Rate: 0.0083650626 +2025-02-18 19:42:55,406 Train Loss: 0.0008313, Val Loss: 0.0007784 +2025-02-18 19:42:55,406 Epoch 454/2000 +2025-02-18 19:43:38,638 Current Learning Rate: 0.0083065593 +2025-02-18 19:43:38,638 Train Loss: 0.0007668, Val Loss: 0.0007979 +2025-02-18 19:43:38,638 Epoch 455/2000 +2025-02-18 19:44:20,814 Current Learning Rate: 0.0082472402 +2025-02-18 19:44:20,815 Train Loss: 0.0008555, Val Loss: 0.0010283 +2025-02-18 19:44:20,815 Epoch 456/2000 +2025-02-18 19:45:03,393 Current Learning Rate: 0.0081871199 +2025-02-18 19:45:05,072 Train Loss: 0.0007266, Val Loss: 0.0007519 +2025-02-18 19:45:05,072 Epoch 457/2000 +2025-02-18 19:45:46,775 Current Learning Rate: 0.0081262133 +2025-02-18 19:45:46,777 Train Loss: 0.0007581, Val Loss: 0.0007625 +2025-02-18 19:45:46,777 Epoch 458/2000 +2025-02-18 19:46:29,572 Current Learning Rate: 0.0080645353 +2025-02-18 19:46:29,573 Train Loss: 0.0008003, Val Loss: 0.0007955 +2025-02-18 19:46:29,573 Epoch 459/2000 +2025-02-18 19:47:12,594 Current Learning Rate: 0.0080021011 +2025-02-18 19:47:14,438 Train Loss: 0.0008082, Val Loss: 0.0007480 +2025-02-18 19:47:14,439 Epoch 460/2000 +2025-02-18 19:47:55,877 Current Learning Rate: 0.0079389263 +2025-02-18 19:47:56,988 Train Loss: 0.0006230, Val Loss: 0.0007110 +2025-02-18 19:47:56,988 Epoch 461/2000 +2025-02-18 19:48:38,381 Current Learning Rate: 0.0078750263 +2025-02-18 19:48:38,381 Train Loss: 0.0007029, Val Loss: 0.0007509 +2025-02-18 19:48:38,382 Epoch 462/2000 +2025-02-18 19:49:21,463 Current Learning Rate: 0.0078104169 +2025-02-18 19:49:21,464 Train Loss: 0.0007934, Val Loss: 0.0007817 +2025-02-18 19:49:21,464 Epoch 463/2000 +2025-02-18 19:50:04,408 Current Learning Rate: 0.0077451141 +2025-02-18 19:50:04,408 Train Loss: 0.0006062, Val Loss: 0.0007356 +2025-02-18 19:50:04,409 Epoch 464/2000 +2025-02-18 19:50:47,602 Current Learning Rate: 0.0076791340 +2025-02-18 19:50:47,602 Train Loss: 0.0006761, Val Loss: 0.0007192 +2025-02-18 19:50:47,602 Epoch 465/2000 +2025-02-18 19:51:31,126 Current Learning Rate: 0.0076124928 +2025-02-18 19:51:31,126 Train Loss: 0.0007075, Val Loss: 0.0007434 +2025-02-18 19:51:31,126 Epoch 466/2000 +2025-02-18 19:52:13,159 Current Learning Rate: 0.0075452071 +2025-02-18 19:52:13,160 Train Loss: 0.0006940, Val Loss: 0.0007113 +2025-02-18 19:52:13,160 Epoch 467/2000 +2025-02-18 19:52:55,713 Current Learning Rate: 0.0074772933 +2025-02-18 19:52:55,713 Train Loss: 0.0006135, Val Loss: 0.0007139 +2025-02-18 19:52:55,713 Epoch 468/2000 +2025-02-18 19:53:38,999 Current Learning Rate: 0.0074087684 +2025-02-18 19:53:39,000 Train Loss: 0.0007084, Val Loss: 0.0007598 +2025-02-18 19:53:39,000 Epoch 469/2000 +2025-02-18 19:54:21,461 Current Learning Rate: 0.0073396491 +2025-02-18 19:54:23,305 Train Loss: 0.0007285, Val Loss: 0.0006965 +2025-02-18 19:54:23,305 Epoch 470/2000 +2025-02-18 19:55:05,022 Current Learning Rate: 0.0072699525 +2025-02-18 19:55:05,025 Train Loss: 0.0007479, Val Loss: 0.0007113 +2025-02-18 19:55:05,033 Epoch 471/2000 +2025-02-18 19:55:48,334 Current Learning Rate: 0.0071996958 +2025-02-18 19:55:48,335 Train Loss: 0.0008276, Val Loss: 0.0007131 +2025-02-18 19:55:48,335 Epoch 472/2000 +2025-02-18 19:56:30,130 Current Learning Rate: 0.0071288965 +2025-02-18 19:56:30,131 Train Loss: 0.0007012, Val Loss: 0.0007755 +2025-02-18 19:56:30,131 Epoch 473/2000 +2025-02-18 19:57:13,513 Current Learning Rate: 0.0070575718 +2025-02-18 19:57:13,513 Train Loss: 0.0006384, Val Loss: 0.0007046 +2025-02-18 19:57:13,514 Epoch 474/2000 +2025-02-18 19:57:55,389 Current Learning Rate: 0.0069857395 +2025-02-18 19:57:56,698 Train Loss: 0.0006106, Val Loss: 0.0006879 +2025-02-18 19:57:56,706 Epoch 475/2000 +2025-02-18 19:58:39,630 Current Learning Rate: 0.0069134172 +2025-02-18 19:58:41,550 Train Loss: 0.0005941, Val Loss: 0.0006362 +2025-02-18 19:58:41,551 Epoch 476/2000 +2025-02-18 19:59:23,669 Current Learning Rate: 0.0068406228 +2025-02-18 19:59:25,357 Train Loss: 0.0005749, Val Loss: 0.0006203 +2025-02-18 19:59:25,357 Epoch 477/2000 +2025-02-18 20:00:08,471 Current Learning Rate: 0.0067673742 +2025-02-18 20:00:08,472 Train Loss: 0.0005808, Val Loss: 0.0006563 +2025-02-18 20:00:08,473 Epoch 478/2000 +2025-02-18 20:00:50,511 Current Learning Rate: 0.0066936896 +2025-02-18 20:00:50,512 Train Loss: 0.0007293, Val Loss: 0.0007316 +2025-02-18 20:00:50,512 Epoch 479/2000 +2025-02-18 20:01:32,700 Current Learning Rate: 0.0066195871 +2025-02-18 20:01:32,700 Train Loss: 0.0006753, Val Loss: 0.0006559 +2025-02-18 20:01:32,701 Epoch 480/2000 +2025-02-18 20:02:16,248 Current Learning Rate: 0.0065450850 +2025-02-18 20:02:16,249 Train Loss: 0.0006014, Val Loss: 0.0006347 +2025-02-18 20:02:16,249 Epoch 481/2000 +2025-02-18 20:02:59,534 Current Learning Rate: 0.0064702016 +2025-02-18 20:02:59,535 Train Loss: 0.0005768, Val Loss: 0.0006270 +2025-02-18 20:02:59,535 Epoch 482/2000 +2025-02-18 20:03:41,788 Current Learning Rate: 0.0063949555 +2025-02-18 20:03:41,788 Train Loss: 0.0004969, Val Loss: 0.0006244 +2025-02-18 20:03:41,789 Epoch 483/2000 +2025-02-18 20:04:25,008 Current Learning Rate: 0.0063193652 +2025-02-18 20:04:25,009 Train Loss: 0.0006966, Val Loss: 0.0007884 +2025-02-18 20:04:25,009 Epoch 484/2000 +2025-02-18 20:05:08,598 Current Learning Rate: 0.0062434494 +2025-02-18 20:05:08,598 Train Loss: 0.0005425, Val Loss: 0.0006594 +2025-02-18 20:05:08,598 Epoch 485/2000 +2025-02-18 20:05:50,586 Current Learning Rate: 0.0061672268 +2025-02-18 20:05:50,587 Train Loss: 0.0005567, Val Loss: 0.0006676 +2025-02-18 20:05:50,587 Epoch 486/2000 +2025-02-18 20:06:33,881 Current Learning Rate: 0.0060907162 +2025-02-18 20:06:33,882 Train Loss: 0.0007032, Val Loss: 0.0006918 +2025-02-18 20:06:33,882 Epoch 487/2000 +2025-02-18 20:07:16,328 Current Learning Rate: 0.0060139365 +2025-02-18 20:07:16,329 Train Loss: 0.0005944, Val Loss: 0.0006368 +2025-02-18 20:07:16,329 Epoch 488/2000 +2025-02-18 20:08:00,116 Current Learning Rate: 0.0059369066 +2025-02-18 20:08:00,116 Train Loss: 0.0007418, Val Loss: 0.0008033 +2025-02-18 20:08:00,117 Epoch 489/2000 +2025-02-18 20:08:42,409 Current Learning Rate: 0.0058596455 +2025-02-18 20:08:42,410 Train Loss: 0.0006701, Val Loss: 0.0007014 +2025-02-18 20:08:42,411 Epoch 490/2000 +2025-02-18 20:09:25,832 Current Learning Rate: 0.0057821723 +2025-02-18 20:09:25,833 Train Loss: 0.0006095, Val Loss: 0.0007286 +2025-02-18 20:09:25,834 Epoch 491/2000 +2025-02-18 20:10:09,101 Current Learning Rate: 0.0057045062 +2025-02-18 20:10:09,102 Train Loss: 0.0006757, Val Loss: 0.0006271 +2025-02-18 20:10:09,102 Epoch 492/2000 +2025-02-18 20:10:51,410 Current Learning Rate: 0.0056266662 +2025-02-18 20:10:51,411 Train Loss: 0.0007621, Val Loss: 0.0006948 +2025-02-18 20:10:51,411 Epoch 493/2000 +2025-02-18 20:11:34,226 Current Learning Rate: 0.0055486716 +2025-02-18 20:11:34,226 Train Loss: 0.0006900, Val Loss: 0.0010454 +2025-02-18 20:11:34,227 Epoch 494/2000 +2025-02-18 20:12:17,643 Current Learning Rate: 0.0054705416 +2025-02-18 20:12:17,644 Train Loss: 0.0008437, Val Loss: 0.0007571 +2025-02-18 20:12:17,644 Epoch 495/2000 +2025-02-18 20:13:01,049 Current Learning Rate: 0.0053922955 +2025-02-18 20:13:03,367 Train Loss: 0.0006913, Val Loss: 0.0006169 +2025-02-18 20:13:03,367 Epoch 496/2000 +2025-02-18 20:13:46,033 Current Learning Rate: 0.0053139526 +2025-02-18 20:13:47,886 Train Loss: 0.0004793, Val Loss: 0.0005901 +2025-02-18 20:13:47,886 Epoch 497/2000 +2025-02-18 20:14:29,636 Current Learning Rate: 0.0052355323 +2025-02-18 20:14:29,637 Train Loss: 0.0005662, Val Loss: 0.0006020 +2025-02-18 20:14:29,638 Epoch 498/2000 +2025-02-18 20:15:13,105 Current Learning Rate: 0.0051570538 +2025-02-18 20:15:14,866 Train Loss: 0.0005533, Val Loss: 0.0005840 +2025-02-18 20:15:14,866 Epoch 499/2000 +2025-02-18 20:15:56,324 Current Learning Rate: 0.0050785366 +2025-02-18 20:15:57,918 Train Loss: 0.0005304, Val Loss: 0.0005641 +2025-02-18 20:15:57,918 Epoch 500/2000 +2025-02-18 20:16:40,881 Current Learning Rate: 0.0050000000 +2025-02-18 20:16:40,882 Train Loss: 0.0004447, Val Loss: 0.0005674 +2025-02-18 20:16:40,883 Epoch 501/2000 +2025-02-18 20:17:23,002 Current Learning Rate: 0.0049214634 +2025-02-18 20:17:23,003 Train Loss: 0.0004908, Val Loss: 0.0005810 +2025-02-18 20:17:23,003 Epoch 502/2000 +2025-02-18 20:18:06,050 Current Learning Rate: 0.0048429462 +2025-02-18 20:18:06,050 Train Loss: 0.0004975, Val Loss: 0.0005829 +2025-02-18 20:18:06,051 Epoch 503/2000 +2025-02-18 20:18:48,804 Current Learning Rate: 0.0047644677 +2025-02-18 20:18:50,634 Train Loss: 0.0005299, Val Loss: 0.0005455 +2025-02-18 20:18:50,634 Epoch 504/2000 +2025-02-18 20:19:32,043 Current Learning Rate: 0.0046860474 +2025-02-18 20:19:32,043 Train Loss: 0.0004964, Val Loss: 0.0005700 +2025-02-18 20:19:32,044 Epoch 505/2000 +2025-02-18 20:20:15,400 Current Learning Rate: 0.0046077045 +2025-02-18 20:20:15,401 Train Loss: 0.0004832, Val Loss: 0.0005934 +2025-02-18 20:20:15,401 Epoch 506/2000 +2025-02-18 20:20:58,350 Current Learning Rate: 0.0045294584 +2025-02-18 20:20:58,350 Train Loss: 0.0005651, Val Loss: 0.0005681 +2025-02-18 20:20:58,376 Epoch 507/2000 +2025-02-18 20:21:41,711 Current Learning Rate: 0.0044513284 +2025-02-18 20:21:41,712 Train Loss: 0.0004447, Val Loss: 0.0005573 +2025-02-18 20:21:41,712 Epoch 508/2000 +2025-02-18 20:22:25,072 Current Learning Rate: 0.0043733338 +2025-02-18 20:22:27,092 Train Loss: 0.0005681, Val Loss: 0.0005372 +2025-02-18 20:22:27,092 Epoch 509/2000 +2025-02-18 20:23:10,305 Current Learning Rate: 0.0042954938 +2025-02-18 20:23:10,306 Train Loss: 0.0004749, Val Loss: 0.0005407 +2025-02-18 20:23:10,306 Epoch 510/2000 +2025-02-18 20:23:53,486 Current Learning Rate: 0.0042178277 +2025-02-18 20:23:55,236 Train Loss: 0.0005136, Val Loss: 0.0005314 +2025-02-18 20:23:55,236 Epoch 511/2000 +2025-02-18 20:24:36,729 Current Learning Rate: 0.0041403545 +2025-02-18 20:24:38,347 Train Loss: 0.0004961, Val Loss: 0.0005310 +2025-02-18 20:24:38,348 Epoch 512/2000 +2025-02-18 20:25:20,861 Current Learning Rate: 0.0040630934 +2025-02-18 20:25:23,044 Train Loss: 0.0005754, Val Loss: 0.0005265 +2025-02-18 20:25:23,044 Epoch 513/2000 +2025-02-18 20:26:06,165 Current Learning Rate: 0.0039860635 +2025-02-18 20:26:08,224 Train Loss: 0.0004098, Val Loss: 0.0005233 +2025-02-18 20:26:08,224 Epoch 514/2000 +2025-02-18 20:26:51,562 Current Learning Rate: 0.0039092838 +2025-02-18 20:26:51,563 Train Loss: 0.0005606, Val Loss: 0.0005408 +2025-02-18 20:26:51,563 Epoch 515/2000 +2025-02-18 20:27:35,053 Current Learning Rate: 0.0038327732 +2025-02-18 20:27:35,053 Train Loss: 0.0005602, Val Loss: 0.0005325 +2025-02-18 20:27:35,054 Epoch 516/2000 +2025-02-18 20:28:18,209 Current Learning Rate: 0.0037565506 +2025-02-18 20:28:18,210 Train Loss: 0.0004406, Val Loss: 0.0005445 +2025-02-18 20:28:18,210 Epoch 517/2000 +2025-02-18 20:29:01,143 Current Learning Rate: 0.0036806348 +2025-02-18 20:29:01,144 Train Loss: 0.0004557, Val Loss: 0.0005442 +2025-02-18 20:29:01,144 Epoch 518/2000 +2025-02-18 20:29:44,007 Current Learning Rate: 0.0036050445 +2025-02-18 20:29:44,007 Train Loss: 0.0004850, Val Loss: 0.0005578 +2025-02-18 20:29:44,007 Epoch 519/2000 +2025-02-18 20:30:26,371 Current Learning Rate: 0.0035297984 +2025-02-18 20:30:26,372 Train Loss: 0.0004421, Val Loss: 0.0005278 +2025-02-18 20:30:26,372 Epoch 520/2000 +2025-02-18 20:31:10,020 Current Learning Rate: 0.0034549150 +2025-02-18 20:31:10,021 Train Loss: 0.0005167, Val Loss: 0.0005400 +2025-02-18 20:31:10,021 Epoch 521/2000 +2025-02-18 20:31:53,457 Current Learning Rate: 0.0033804129 +2025-02-18 20:31:55,345 Train Loss: 0.0004615, Val Loss: 0.0005043 +2025-02-18 20:31:55,345 Epoch 522/2000 +2025-02-18 20:32:36,845 Current Learning Rate: 0.0033063104 +2025-02-18 20:32:38,361 Train Loss: 0.0004535, Val Loss: 0.0004982 +2025-02-18 20:32:38,361 Epoch 523/2000 +2025-02-18 20:33:20,891 Current Learning Rate: 0.0032326258 +2025-02-18 20:33:20,891 Train Loss: 0.0004284, Val Loss: 0.0004999 +2025-02-18 20:33:20,892 Epoch 524/2000 +2025-02-18 20:34:04,011 Current Learning Rate: 0.0031593772 +2025-02-18 20:34:05,653 Train Loss: 0.0004283, Val Loss: 0.0004947 +2025-02-18 20:34:05,653 Epoch 525/2000 +2025-02-18 20:34:47,174 Current Learning Rate: 0.0030865828 +2025-02-18 20:34:47,176 Train Loss: 0.0005067, Val Loss: 0.0005016 +2025-02-18 20:34:47,176 Epoch 526/2000 +2025-02-18 20:35:30,445 Current Learning Rate: 0.0030142605 +2025-02-18 20:35:30,446 Train Loss: 0.0004307, Val Loss: 0.0005006 +2025-02-18 20:35:30,446 Epoch 527/2000 +2025-02-18 20:36:12,391 Current Learning Rate: 0.0029424282 +2025-02-18 20:36:12,392 Train Loss: 0.0004258, Val Loss: 0.0004951 +2025-02-18 20:36:12,392 Epoch 528/2000 +2025-02-18 20:36:55,051 Current Learning Rate: 0.0028711035 +2025-02-18 20:36:55,052 Train Loss: 0.0005415, Val Loss: 0.0005146 +2025-02-18 20:36:55,052 Epoch 529/2000 +2025-02-18 20:37:38,487 Current Learning Rate: 0.0028003042 +2025-02-18 20:37:40,731 Train Loss: 0.0004089, Val Loss: 0.0004895 +2025-02-18 20:37:40,732 Epoch 530/2000 +2025-02-18 20:38:22,034 Current Learning Rate: 0.0027300475 +2025-02-18 20:38:23,590 Train Loss: 0.0004597, Val Loss: 0.0004862 +2025-02-18 20:38:23,590 Epoch 531/2000 +2025-02-18 20:39:05,008 Current Learning Rate: 0.0026603509 +2025-02-18 20:39:05,009 Train Loss: 0.0003936, Val Loss: 0.0004920 +2025-02-18 20:39:05,009 Epoch 532/2000 +2025-02-18 20:39:47,966 Current Learning Rate: 0.0025912316 +2025-02-18 20:39:47,967 Train Loss: 0.0004479, Val Loss: 0.0004926 +2025-02-18 20:39:47,967 Epoch 533/2000 +2025-02-18 20:40:30,548 Current Learning Rate: 0.0025227067 +2025-02-18 20:40:31,948 Train Loss: 0.0004063, Val Loss: 0.0004787 +2025-02-18 20:40:31,948 Epoch 534/2000 +2025-02-18 20:41:13,657 Current Learning Rate: 0.0024547929 +2025-02-18 20:41:13,658 Train Loss: 0.0004758, Val Loss: 0.0004793 +2025-02-18 20:41:13,658 Epoch 535/2000 +2025-02-18 20:41:56,761 Current Learning Rate: 0.0023875072 +2025-02-18 20:41:58,702 Train Loss: 0.0004320, Val Loss: 0.0004662 +2025-02-18 20:41:58,702 Epoch 536/2000 +2025-02-18 20:42:41,168 Current Learning Rate: 0.0023208660 +2025-02-18 20:42:43,309 Train Loss: 0.0003620, Val Loss: 0.0004593 +2025-02-18 20:42:43,310 Epoch 537/2000 +2025-02-18 20:43:26,235 Current Learning Rate: 0.0022548859 +2025-02-18 20:43:28,115 Train Loss: 0.0004216, Val Loss: 0.0004585 +2025-02-18 20:43:28,115 Epoch 538/2000 +2025-02-18 20:44:11,365 Current Learning Rate: 0.0021895831 +2025-02-18 20:44:13,273 Train Loss: 0.0003794, Val Loss: 0.0004549 +2025-02-18 20:44:13,273 Epoch 539/2000 +2025-02-18 20:44:55,584 Current Learning Rate: 0.0021249737 +2025-02-18 20:44:55,586 Train Loss: 0.0004229, Val Loss: 0.0004557 +2025-02-18 20:44:55,586 Epoch 540/2000 +2025-02-18 20:45:38,276 Current Learning Rate: 0.0020610737 +2025-02-18 20:45:38,277 Train Loss: 0.0004247, Val Loss: 0.0004567 +2025-02-18 20:45:38,277 Epoch 541/2000 +2025-02-18 20:46:20,546 Current Learning Rate: 0.0019978989 +2025-02-18 20:46:20,546 Train Loss: 0.0003781, Val Loss: 0.0004562 +2025-02-18 20:46:20,546 Epoch 542/2000 +2025-02-18 20:47:04,095 Current Learning Rate: 0.0019354647 +2025-02-18 20:47:04,096 Train Loss: 0.0004617, Val Loss: 0.0004560 +2025-02-18 20:47:04,096 Epoch 543/2000 +2025-02-18 20:47:47,453 Current Learning Rate: 0.0018737867 +2025-02-18 20:47:47,454 Train Loss: 0.0004313, Val Loss: 0.0004576 +2025-02-18 20:47:47,454 Epoch 544/2000 +2025-02-18 20:48:29,907 Current Learning Rate: 0.0018128801 +2025-02-18 20:48:31,997 Train Loss: 0.0003838, Val Loss: 0.0004493 +2025-02-18 20:48:31,997 Epoch 545/2000 +2025-02-18 20:49:13,627 Current Learning Rate: 0.0017527598 +2025-02-18 20:49:15,090 Train Loss: 0.0003385, Val Loss: 0.0004434 +2025-02-18 20:49:15,091 Epoch 546/2000 +2025-02-18 20:49:57,984 Current Learning Rate: 0.0016934407 +2025-02-18 20:49:57,985 Train Loss: 0.0005021, Val Loss: 0.0004460 +2025-02-18 20:49:57,985 Epoch 547/2000 +2025-02-18 20:50:40,137 Current Learning Rate: 0.0016349374 +2025-02-18 20:50:41,886 Train Loss: 0.0003976, Val Loss: 0.0004426 +2025-02-18 20:50:41,887 Epoch 548/2000 +2025-02-18 20:51:25,348 Current Learning Rate: 0.0015772645 +2025-02-18 20:51:27,383 Train Loss: 0.0004444, Val Loss: 0.0004412 +2025-02-18 20:51:27,383 Epoch 549/2000 +2025-02-18 20:52:09,052 Current Learning Rate: 0.0015204360 +2025-02-18 20:52:10,285 Train Loss: 0.0004327, Val Loss: 0.0004386 +2025-02-18 20:52:10,285 Epoch 550/2000 +2025-02-18 20:52:52,118 Current Learning Rate: 0.0014644661 +2025-02-18 20:52:54,077 Train Loss: 0.0003942, Val Loss: 0.0004373 +2025-02-18 20:52:54,077 Epoch 551/2000 +2025-02-18 20:53:37,445 Current Learning Rate: 0.0014093685 +2025-02-18 20:53:39,437 Train Loss: 0.0003673, Val Loss: 0.0004368 +2025-02-18 20:53:39,437 Epoch 552/2000 +2025-02-18 20:54:22,654 Current Learning Rate: 0.0013551569 +2025-02-18 20:54:22,655 Train Loss: 0.0004019, Val Loss: 0.0004375 +2025-02-18 20:54:22,655 Epoch 553/2000 +2025-02-18 20:55:04,901 Current Learning Rate: 0.0013018445 +2025-02-18 20:55:04,902 Train Loss: 0.0003794, Val Loss: 0.0004380 +2025-02-18 20:55:04,902 Epoch 554/2000 +2025-02-18 20:55:47,579 Current Learning Rate: 0.0012494447 +2025-02-18 20:55:49,198 Train Loss: 0.0003833, Val Loss: 0.0004350 +2025-02-18 20:55:49,199 Epoch 555/2000 +2025-02-18 20:56:30,979 Current Learning Rate: 0.0011979702 +2025-02-18 20:56:32,534 Train Loss: 0.0003432, Val Loss: 0.0004326 +2025-02-18 20:56:32,540 Epoch 556/2000 +2025-02-18 20:57:14,279 Current Learning Rate: 0.0011474338 +2025-02-18 20:57:16,140 Train Loss: 0.0003477, Val Loss: 0.0004322 +2025-02-18 20:57:16,140 Epoch 557/2000 +2025-02-18 20:57:57,652 Current Learning Rate: 0.0010978480 +2025-02-18 20:57:59,332 Train Loss: 0.0003552, Val Loss: 0.0004289 +2025-02-18 20:57:59,333 Epoch 558/2000 +2025-02-18 20:58:42,263 Current Learning Rate: 0.0010492249 +2025-02-18 20:58:44,235 Train Loss: 0.0003648, Val Loss: 0.0004270 +2025-02-18 20:58:44,236 Epoch 559/2000 +2025-02-18 20:59:27,131 Current Learning Rate: 0.0010015767 +2025-02-18 20:59:27,132 Train Loss: 0.0004117, Val Loss: 0.0004274 +2025-02-18 20:59:27,132 Epoch 560/2000 +2025-02-18 21:00:09,478 Current Learning Rate: 0.0009549150 +2025-02-18 21:00:10,738 Train Loss: 0.0004396, Val Loss: 0.0004259 +2025-02-18 21:00:10,738 Epoch 561/2000 +2025-02-18 21:00:52,640 Current Learning Rate: 0.0009092514 +2025-02-18 21:00:54,064 Train Loss: 0.0003469, Val Loss: 0.0004241 +2025-02-18 21:00:54,064 Epoch 562/2000 +2025-02-18 21:01:36,985 Current Learning Rate: 0.0008645971 +2025-02-18 21:01:36,986 Train Loss: 0.0004716, Val Loss: 0.0004244 +2025-02-18 21:01:36,986 Epoch 563/2000 +2025-02-18 21:02:19,448 Current Learning Rate: 0.0008209632 +2025-02-18 21:02:20,474 Train Loss: 0.0004245, Val Loss: 0.0004240 +2025-02-18 21:02:20,474 Epoch 564/2000 +2025-02-18 21:03:02,957 Current Learning Rate: 0.0007783604 +2025-02-18 21:03:04,556 Train Loss: 0.0003625, Val Loss: 0.0004216 +2025-02-18 21:03:04,556 Epoch 565/2000 +2025-02-18 21:03:47,695 Current Learning Rate: 0.0007367992 +2025-02-18 21:03:49,110 Train Loss: 0.0004495, Val Loss: 0.0004207 +2025-02-18 21:03:49,111 Epoch 566/2000 +2025-02-18 21:04:31,927 Current Learning Rate: 0.0006962899 +2025-02-18 21:04:33,694 Train Loss: 0.0004247, Val Loss: 0.0004194 +2025-02-18 21:04:33,694 Epoch 567/2000 +2025-02-18 21:05:15,283 Current Learning Rate: 0.0006568424 +2025-02-18 21:05:17,113 Train Loss: 0.0003381, Val Loss: 0.0004184 +2025-02-18 21:05:17,113 Epoch 568/2000 +2025-02-18 21:05:59,666 Current Learning Rate: 0.0006184666 +2025-02-18 21:05:59,667 Train Loss: 0.0004187, Val Loss: 0.0004187 +2025-02-18 21:05:59,667 Epoch 569/2000 +2025-02-18 21:06:42,056 Current Learning Rate: 0.0005811718 +2025-02-18 21:06:43,958 Train Loss: 0.0003780, Val Loss: 0.0004168 +2025-02-18 21:06:43,959 Epoch 570/2000 +2025-02-18 21:07:26,007 Current Learning Rate: 0.0005449674 +2025-02-18 21:07:27,238 Train Loss: 0.0003661, Val Loss: 0.0004166 +2025-02-18 21:07:27,238 Epoch 571/2000 +2025-02-18 21:08:09,969 Current Learning Rate: 0.0005098621 +2025-02-18 21:08:11,778 Train Loss: 0.0003576, Val Loss: 0.0004156 +2025-02-18 21:08:11,779 Epoch 572/2000 +2025-02-18 21:08:54,791 Current Learning Rate: 0.0004758647 +2025-02-18 21:08:54,791 Train Loss: 0.0003587, Val Loss: 0.0004156 +2025-02-18 21:08:54,792 Epoch 573/2000 +2025-02-18 21:09:37,743 Current Learning Rate: 0.0004429836 +2025-02-18 21:09:39,757 Train Loss: 0.0003924, Val Loss: 0.0004152 +2025-02-18 21:09:39,758 Epoch 574/2000 +2025-02-18 21:10:22,656 Current Learning Rate: 0.0004112269 +2025-02-18 21:10:24,766 Train Loss: 0.0003581, Val Loss: 0.0004147 +2025-02-18 21:10:24,766 Epoch 575/2000 +2025-02-18 21:11:07,395 Current Learning Rate: 0.0003806023 +2025-02-18 21:11:09,587 Train Loss: 0.0003441, Val Loss: 0.0004141 +2025-02-18 21:11:09,588 Epoch 576/2000 +2025-02-18 21:11:51,729 Current Learning Rate: 0.0003511176 +2025-02-18 21:11:51,730 Train Loss: 0.0003744, Val Loss: 0.0004144 +2025-02-18 21:11:51,730 Epoch 577/2000 +2025-02-18 21:12:34,245 Current Learning Rate: 0.0003227798 +2025-02-18 21:12:36,179 Train Loss: 0.0003960, Val Loss: 0.0004140 +2025-02-18 21:12:36,180 Epoch 578/2000 +2025-02-18 21:13:17,896 Current Learning Rate: 0.0002955962 +2025-02-18 21:13:19,546 Train Loss: 0.0004083, Val Loss: 0.0004134 +2025-02-18 21:13:19,547 Epoch 579/2000 +2025-02-18 21:14:01,137 Current Learning Rate: 0.0002695732 +2025-02-18 21:14:02,564 Train Loss: 0.0004647, Val Loss: 0.0004130 +2025-02-18 21:14:02,564 Epoch 580/2000 +2025-02-18 21:14:45,156 Current Learning Rate: 0.0002447174 +2025-02-18 21:14:47,198 Train Loss: 0.0003394, Val Loss: 0.0004125 +2025-02-18 21:14:47,198 Epoch 581/2000 +2025-02-18 21:15:28,656 Current Learning Rate: 0.0002210349 +2025-02-18 21:15:29,782 Train Loss: 0.0003907, Val Loss: 0.0004123 +2025-02-18 21:15:29,785 Epoch 582/2000 +2025-02-18 21:16:12,448 Current Learning Rate: 0.0001985316 +2025-02-18 21:16:14,181 Train Loss: 0.0003728, Val Loss: 0.0004117 +2025-02-18 21:16:14,182 Epoch 583/2000 +2025-02-18 21:16:57,133 Current Learning Rate: 0.0001772129 +2025-02-18 21:16:57,133 Train Loss: 0.0004333, Val Loss: 0.0004123 +2025-02-18 21:16:57,134 Epoch 584/2000 +2025-02-18 21:17:39,609 Current Learning Rate: 0.0001570842 +2025-02-18 21:17:41,438 Train Loss: 0.0004582, Val Loss: 0.0004115 +2025-02-18 21:17:41,439 Epoch 585/2000 +2025-02-18 21:18:24,860 Current Learning Rate: 0.0001381504 +2025-02-18 21:18:26,515 Train Loss: 0.0003865, Val Loss: 0.0004112 +2025-02-18 21:18:26,515 Epoch 586/2000 +2025-02-18 21:19:10,021 Current Learning Rate: 0.0001204162 +2025-02-18 21:19:11,797 Train Loss: 0.0003433, Val Loss: 0.0004109 +2025-02-18 21:19:11,798 Epoch 587/2000 +2025-02-18 21:19:54,935 Current Learning Rate: 0.0001038859 +2025-02-18 21:19:56,861 Train Loss: 0.0004351, Val Loss: 0.0004105 +2025-02-18 21:19:56,862 Epoch 588/2000 +2025-02-18 21:20:39,446 Current Learning Rate: 0.0000885637 +2025-02-18 21:20:40,841 Train Loss: 0.0003127, Val Loss: 0.0004104 +2025-02-18 21:20:40,842 Epoch 589/2000 +2025-02-18 21:21:22,685 Current Learning Rate: 0.0000744534 +2025-02-18 21:21:23,936 Train Loss: 0.0003826, Val Loss: 0.0004103 +2025-02-18 21:21:23,936 Epoch 590/2000 +2025-02-18 21:22:07,039 Current Learning Rate: 0.0000615583 +2025-02-18 21:22:08,585 Train Loss: 0.0004287, Val Loss: 0.0004101 +2025-02-18 21:22:08,585 Epoch 591/2000 +2025-02-18 21:22:50,676 Current Learning Rate: 0.0000498817 +2025-02-18 21:22:50,680 Train Loss: 0.0003664, Val Loss: 0.0004104 +2025-02-18 21:22:50,680 Epoch 592/2000 +2025-02-18 21:23:33,061 Current Learning Rate: 0.0000394265 +2025-02-18 21:23:33,062 Train Loss: 0.0004313, Val Loss: 0.0004102 +2025-02-18 21:23:33,062 Epoch 593/2000 +2025-02-18 21:24:16,256 Current Learning Rate: 0.0000301952 +2025-02-18 21:24:17,865 Train Loss: 0.0003584, Val Loss: 0.0004101 +2025-02-18 21:24:17,865 Epoch 594/2000 +2025-02-18 21:25:00,136 Current Learning Rate: 0.0000221902 +2025-02-18 21:25:02,348 Train Loss: 0.0004048, Val Loss: 0.0004099 +2025-02-18 21:25:02,349 Epoch 595/2000 +2025-02-18 21:25:44,168 Current Learning Rate: 0.0000154133 +2025-02-18 21:25:44,169 Train Loss: 0.0004053, Val Loss: 0.0004099 +2025-02-18 21:25:44,169 Epoch 596/2000 +2025-02-18 21:26:27,927 Current Learning Rate: 0.0000098664 +2025-02-18 21:26:29,856 Train Loss: 0.0003801, Val Loss: 0.0004098 +2025-02-18 21:26:29,857 Epoch 597/2000 +2025-02-18 21:27:12,769 Current Learning Rate: 0.0000055506 +2025-02-18 21:27:12,769 Train Loss: 0.0003361, Val Loss: 0.0004099 +2025-02-18 21:27:12,770 Epoch 598/2000 +2025-02-18 21:27:56,003 Current Learning Rate: 0.0000024672 +2025-02-18 21:27:56,004 Train Loss: 0.0003320, Val Loss: 0.0004099 +2025-02-18 21:27:56,004 Epoch 599/2000 +2025-02-18 21:28:39,329 Current Learning Rate: 0.0000006168 +2025-02-18 21:28:39,330 Train Loss: 0.0003529, Val Loss: 0.0004099 +2025-02-18 21:28:39,330 Epoch 600/2000 +2025-02-18 21:29:22,416 Current Learning Rate: 0.0000000000 +2025-02-18 21:29:22,416 Train Loss: 0.0003468, Val Loss: 0.0004099 +2025-02-18 21:29:22,416 Epoch 601/2000 +2025-02-18 21:30:04,732 Current Learning Rate: 0.0000006168 +2025-02-18 21:30:06,293 Train Loss: 0.0004434, Val Loss: 0.0004098 +2025-02-18 21:30:06,296 Epoch 602/2000 +2025-02-18 21:30:49,010 Current Learning Rate: 0.0000024672 +2025-02-18 21:30:50,970 Train Loss: 0.0003937, Val Loss: 0.0004098 +2025-02-18 21:30:50,971 Epoch 603/2000 +2025-02-18 21:31:34,015 Current Learning Rate: 0.0000055506 +2025-02-18 21:31:34,016 Train Loss: 0.0003213, Val Loss: 0.0004099 +2025-02-18 21:31:34,016 Epoch 604/2000 +2025-02-18 21:32:17,134 Current Learning Rate: 0.0000098664 +2025-02-18 21:32:17,135 Train Loss: 0.0003881, Val Loss: 0.0004099 +2025-02-18 21:32:17,135 Epoch 605/2000 +2025-02-18 21:33:00,491 Current Learning Rate: 0.0000154133 +2025-02-18 21:33:00,491 Train Loss: 0.0003433, Val Loss: 0.0004099 +2025-02-18 21:33:00,492 Epoch 606/2000 +2025-02-18 21:33:44,063 Current Learning Rate: 0.0000221902 +2025-02-18 21:33:45,818 Train Loss: 0.0003753, Val Loss: 0.0004097 +2025-02-18 21:33:45,818 Epoch 607/2000 +2025-02-18 21:34:29,071 Current Learning Rate: 0.0000301952 +2025-02-18 21:34:29,072 Train Loss: 0.0003136, Val Loss: 0.0004098 +2025-02-18 21:34:29,072 Epoch 608/2000 +2025-02-18 21:35:12,360 Current Learning Rate: 0.0000394265 +2025-02-18 21:35:12,360 Train Loss: 0.0003379, Val Loss: 0.0004099 +2025-02-18 21:35:12,396 Epoch 609/2000 +2025-02-18 21:35:55,678 Current Learning Rate: 0.0000498817 +2025-02-18 21:35:55,678 Train Loss: 0.0003809, Val Loss: 0.0004100 +2025-02-18 21:35:55,678 Epoch 610/2000 +2025-02-18 21:36:38,745 Current Learning Rate: 0.0000615583 +2025-02-18 21:36:38,745 Train Loss: 0.0003329, Val Loss: 0.0004102 +2025-02-18 21:36:38,745 Epoch 611/2000 +2025-02-18 21:37:21,786 Current Learning Rate: 0.0000744534 +2025-02-18 21:37:21,786 Train Loss: 0.0003848, Val Loss: 0.0004101 +2025-02-18 21:37:21,786 Epoch 612/2000 +2025-02-18 21:38:03,786 Current Learning Rate: 0.0000885637 +2025-02-18 21:38:03,787 Train Loss: 0.0004202, Val Loss: 0.0004098 +2025-02-18 21:38:03,787 Epoch 613/2000 +2025-02-18 21:38:46,450 Current Learning Rate: 0.0001038859 +2025-02-18 21:38:46,451 Train Loss: 0.0003490, Val Loss: 0.0004099 +2025-02-18 21:38:46,452 Epoch 614/2000 +2025-02-18 21:39:28,803 Current Learning Rate: 0.0001204162 +2025-02-18 21:39:28,804 Train Loss: 0.0003789, Val Loss: 0.0004099 +2025-02-18 21:39:28,804 Epoch 615/2000 +2025-02-18 21:40:11,467 Current Learning Rate: 0.0001381504 +2025-02-18 21:40:11,468 Train Loss: 0.0003130, Val Loss: 0.0004100 +2025-02-18 21:40:11,468 Epoch 616/2000 +2025-02-18 21:40:53,379 Current Learning Rate: 0.0001570842 +2025-02-18 21:40:53,379 Train Loss: 0.0003750, Val Loss: 0.0004101 +2025-02-18 21:40:53,380 Epoch 617/2000 +2025-02-18 21:41:36,577 Current Learning Rate: 0.0001772129 +2025-02-18 21:41:36,577 Train Loss: 0.0004359, Val Loss: 0.0004105 +2025-02-18 21:41:36,577 Epoch 618/2000 +2025-02-18 21:42:18,826 Current Learning Rate: 0.0001985316 +2025-02-18 21:42:18,826 Train Loss: 0.0003375, Val Loss: 0.0004104 +2025-02-18 21:42:18,826 Epoch 619/2000 +2025-02-18 21:43:01,769 Current Learning Rate: 0.0002210349 +2025-02-18 21:43:01,770 Train Loss: 0.0003533, Val Loss: 0.0004111 +2025-02-18 21:43:01,770 Epoch 620/2000 +2025-02-18 21:43:44,160 Current Learning Rate: 0.0002447174 +2025-02-18 21:43:44,160 Train Loss: 0.0003522, Val Loss: 0.0004102 +2025-02-18 21:43:44,160 Epoch 621/2000 +2025-02-18 21:44:26,844 Current Learning Rate: 0.0002695732 +2025-02-18 21:44:26,845 Train Loss: 0.0004008, Val Loss: 0.0004105 +2025-02-18 21:44:26,845 Epoch 622/2000 +2025-02-18 21:45:09,653 Current Learning Rate: 0.0002955962 +2025-02-18 21:45:09,654 Train Loss: 0.0003616, Val Loss: 0.0004104 +2025-02-18 21:45:09,654 Epoch 623/2000 +2025-02-18 21:45:52,342 Current Learning Rate: 0.0003227798 +2025-02-18 21:45:52,346 Train Loss: 0.0003655, Val Loss: 0.0004104 +2025-02-18 21:45:52,347 Epoch 624/2000 +2025-02-18 21:46:34,750 Current Learning Rate: 0.0003511176 +2025-02-18 21:46:34,753 Train Loss: 0.0003116, Val Loss: 0.0004102 +2025-02-18 21:46:34,753 Epoch 625/2000 +2025-02-18 21:47:17,584 Current Learning Rate: 0.0003806023 +2025-02-18 21:47:17,585 Train Loss: 0.0004073, Val Loss: 0.0004108 +2025-02-18 21:47:17,586 Epoch 626/2000 +2025-02-18 21:48:00,340 Current Learning Rate: 0.0004112269 +2025-02-18 21:48:00,340 Train Loss: 0.0003908, Val Loss: 0.0004101 +2025-02-18 21:48:00,341 Epoch 627/2000 +2025-02-18 21:48:42,663 Current Learning Rate: 0.0004429836 +2025-02-18 21:48:42,664 Train Loss: 0.0003863, Val Loss: 0.0004107 +2025-02-18 21:48:42,665 Epoch 628/2000 +2025-02-18 21:49:25,710 Current Learning Rate: 0.0004758647 +2025-02-18 21:49:25,711 Train Loss: 0.0004113, Val Loss: 0.0004110 +2025-02-18 21:49:25,712 Epoch 629/2000 +2025-02-18 21:50:07,686 Current Learning Rate: 0.0005098621 +2025-02-18 21:50:07,686 Train Loss: 0.0003493, Val Loss: 0.0004115 +2025-02-18 21:50:07,687 Epoch 630/2000 +2025-02-18 21:50:50,145 Current Learning Rate: 0.0005449674 +2025-02-18 21:50:50,147 Train Loss: 0.0003358, Val Loss: 0.0004107 +2025-02-18 21:50:50,147 Epoch 631/2000 +2025-02-18 21:51:32,875 Current Learning Rate: 0.0005811718 +2025-02-18 21:51:32,876 Train Loss: 0.0003717, Val Loss: 0.0004107 +2025-02-18 21:51:32,877 Epoch 632/2000 +2025-02-18 21:52:15,755 Current Learning Rate: 0.0006184666 +2025-02-18 21:52:15,756 Train Loss: 0.0003810, Val Loss: 0.0004112 +2025-02-18 21:52:15,757 Epoch 633/2000 +2025-02-18 21:52:58,791 Current Learning Rate: 0.0006568424 +2025-02-18 21:52:58,792 Train Loss: 0.0003285, Val Loss: 0.0004111 +2025-02-18 21:52:58,793 Epoch 634/2000 +2025-02-18 21:53:41,915 Current Learning Rate: 0.0006962899 +2025-02-18 21:53:41,916 Train Loss: 0.0003944, Val Loss: 0.0004116 +2025-02-18 21:53:41,916 Epoch 635/2000 +2025-02-18 21:54:24,033 Current Learning Rate: 0.0007367992 +2025-02-18 21:54:24,034 Train Loss: 0.0003961, Val Loss: 0.0004128 +2025-02-18 21:54:24,035 Epoch 636/2000 +2025-02-18 21:55:06,312 Current Learning Rate: 0.0007783604 +2025-02-18 21:55:06,313 Train Loss: 0.0003761, Val Loss: 0.0004128 +2025-02-18 21:55:06,313 Epoch 637/2000 +2025-02-18 21:55:49,323 Current Learning Rate: 0.0008209632 +2025-02-18 21:55:49,324 Train Loss: 0.0003402, Val Loss: 0.0004116 +2025-02-18 21:55:49,324 Epoch 638/2000 +2025-02-18 21:56:32,300 Current Learning Rate: 0.0008645971 +2025-02-18 21:56:32,300 Train Loss: 0.0003285, Val Loss: 0.0004131 +2025-02-18 21:56:32,301 Epoch 639/2000 +2025-02-18 21:57:15,206 Current Learning Rate: 0.0009092514 +2025-02-18 21:57:15,206 Train Loss: 0.0005459, Val Loss: 0.0005489 +2025-02-18 21:57:15,207 Epoch 640/2000 +2025-02-18 21:57:58,249 Current Learning Rate: 0.0009549150 +2025-02-18 21:57:58,250 Train Loss: 0.0004380, Val Loss: 0.0004273 +2025-02-18 21:57:58,250 Epoch 641/2000 +2025-02-18 21:58:40,376 Current Learning Rate: 0.0010015767 +2025-02-18 21:58:40,376 Train Loss: 0.0003262, Val Loss: 0.0004157 +2025-02-18 21:58:40,376 Epoch 642/2000 +2025-02-18 21:59:22,982 Current Learning Rate: 0.0010492249 +2025-02-18 21:59:22,982 Train Loss: 0.0004232, Val Loss: 0.0004367 +2025-02-18 21:59:22,982 Epoch 643/2000 +2025-02-18 22:00:05,451 Current Learning Rate: 0.0010978480 +2025-02-18 22:00:05,452 Train Loss: 0.0004041, Val Loss: 0.0004225 +2025-02-18 22:00:05,452 Epoch 644/2000 +2025-02-18 22:00:48,433 Current Learning Rate: 0.0011474338 +2025-02-18 22:00:48,433 Train Loss: 0.0003833, Val Loss: 0.0004136 +2025-02-18 22:00:48,434 Epoch 645/2000 +2025-02-18 22:01:31,413 Current Learning Rate: 0.0011979702 +2025-02-18 22:01:31,413 Train Loss: 0.0003760, Val Loss: 0.0004149 +2025-02-18 22:01:31,414 Epoch 646/2000 +2025-02-18 22:02:13,793 Current Learning Rate: 0.0012494447 +2025-02-18 22:02:13,793 Train Loss: 0.0003510, Val Loss: 0.0004119 +2025-02-18 22:02:13,794 Epoch 647/2000 +2025-02-18 22:02:55,781 Current Learning Rate: 0.0013018445 +2025-02-18 22:02:55,782 Train Loss: 0.0003317, Val Loss: 0.0004520 +2025-02-18 22:02:55,782 Epoch 648/2000 +2025-02-18 22:03:38,192 Current Learning Rate: 0.0013551569 +2025-02-18 22:03:38,193 Train Loss: 0.0003420, Val Loss: 0.0004115 +2025-02-18 22:03:38,193 Epoch 649/2000 +2025-02-18 22:04:20,694 Current Learning Rate: 0.0014093685 +2025-02-18 22:04:20,695 Train Loss: 0.0003548, Val Loss: 0.0004166 +2025-02-18 22:04:20,695 Epoch 650/2000 +2025-02-18 22:05:02,956 Current Learning Rate: 0.0014644661 +2025-02-18 22:05:02,956 Train Loss: 0.0003625, Val Loss: 0.0004158 +2025-02-18 22:05:02,956 Epoch 651/2000 +2025-02-18 22:05:45,828 Current Learning Rate: 0.0015204360 +2025-02-18 22:05:45,829 Train Loss: 0.0004189, Val Loss: 0.0004243 +2025-02-18 22:05:45,830 Epoch 652/2000 +2025-02-18 22:06:28,578 Current Learning Rate: 0.0015772645 +2025-02-18 22:06:28,579 Train Loss: 0.0004001, Val Loss: 0.0004230 +2025-02-18 22:06:28,579 Epoch 653/2000 +2025-02-18 22:07:10,914 Current Learning Rate: 0.0016349374 +2025-02-18 22:07:10,914 Train Loss: 0.0005663, Val Loss: 0.0004460 +2025-02-18 22:07:10,915 Epoch 654/2000 +2025-02-18 22:07:53,150 Current Learning Rate: 0.0016934407 +2025-02-18 22:07:53,151 Train Loss: 0.0004263, Val Loss: 0.0004317 +2025-02-18 22:07:53,151 Epoch 655/2000 +2025-02-18 22:08:35,964 Current Learning Rate: 0.0017527598 +2025-02-18 22:08:35,964 Train Loss: 0.0003662, Val Loss: 0.0004192 +2025-02-18 22:08:35,965 Epoch 656/2000 +2025-02-18 22:09:18,179 Current Learning Rate: 0.0018128801 +2025-02-18 22:09:19,949 Train Loss: 0.0003390, Val Loss: 0.0004083 +2025-02-18 22:09:19,950 Epoch 657/2000 +2025-02-18 22:10:02,685 Current Learning Rate: 0.0018737867 +2025-02-18 22:10:02,686 Train Loss: 0.0003803, Val Loss: 0.0004115 +2025-02-18 22:10:02,686 Epoch 658/2000 +2025-02-18 22:10:45,108 Current Learning Rate: 0.0019354647 +2025-02-18 22:10:45,109 Train Loss: 0.0004103, Val Loss: 0.0004344 +2025-02-18 22:10:45,109 Epoch 659/2000 +2025-02-18 22:11:27,724 Current Learning Rate: 0.0019978989 +2025-02-18 22:11:27,725 Train Loss: 0.0005296, Val Loss: 0.0004354 +2025-02-18 22:11:27,725 Epoch 660/2000 +2025-02-18 22:12:10,694 Current Learning Rate: 0.0020610737 +2025-02-18 22:12:10,694 Train Loss: 0.0003503, Val Loss: 0.0004168 +2025-02-18 22:12:10,695 Epoch 661/2000 +2025-02-18 22:12:53,261 Current Learning Rate: 0.0021249737 +2025-02-18 22:12:53,262 Train Loss: 0.0004559, Val Loss: 0.0004206 +2025-02-18 22:12:53,262 Epoch 662/2000 +2025-02-18 22:13:34,886 Current Learning Rate: 0.0021895831 +2025-02-18 22:13:34,886 Train Loss: 0.0004105, Val Loss: 0.0004178 +2025-02-18 22:13:34,887 Epoch 663/2000 +2025-02-18 22:14:18,021 Current Learning Rate: 0.0022548859 +2025-02-18 22:14:18,021 Train Loss: 0.0004185, Val Loss: 0.0004308 +2025-02-18 22:14:18,022 Epoch 664/2000 +2025-02-18 22:15:00,950 Current Learning Rate: 0.0023208660 +2025-02-18 22:15:00,951 Train Loss: 0.0003724, Val Loss: 0.0004262 +2025-02-18 22:15:00,951 Epoch 665/2000 +2025-02-18 22:15:42,850 Current Learning Rate: 0.0023875072 +2025-02-18 22:15:42,850 Train Loss: 0.0003920, Val Loss: 0.0004233 +2025-02-18 22:15:42,850 Epoch 666/2000 +2025-02-18 22:16:25,801 Current Learning Rate: 0.0024547929 +2025-02-18 22:16:25,801 Train Loss: 0.0003366, Val Loss: 0.0004418 +2025-02-18 22:16:25,802 Epoch 667/2000 +2025-02-18 22:17:07,886 Current Learning Rate: 0.0025227067 +2025-02-18 22:17:07,886 Train Loss: 0.0006202, Val Loss: 0.0005926 +2025-02-18 22:17:07,887 Epoch 668/2000 +2025-02-18 22:17:50,741 Current Learning Rate: 0.0025912316 +2025-02-18 22:17:50,746 Train Loss: 0.0006037, Val Loss: 0.0007330 +2025-02-18 22:17:50,747 Epoch 669/2000 +2025-02-18 22:18:33,194 Current Learning Rate: 0.0026603509 +2025-02-18 22:18:33,194 Train Loss: 0.0006445, Val Loss: 0.0005230 +2025-02-18 22:18:33,195 Epoch 670/2000 +2025-02-18 22:19:15,503 Current Learning Rate: 0.0027300475 +2025-02-18 22:19:15,503 Train Loss: 0.0004145, Val Loss: 0.0004489 +2025-02-18 22:19:15,503 Epoch 671/2000 +2025-02-18 22:19:58,665 Current Learning Rate: 0.0028003042 +2025-02-18 22:19:58,667 Train Loss: 0.0004780, Val Loss: 0.0004671 +2025-02-18 22:19:58,668 Epoch 672/2000 +2025-02-18 22:20:41,103 Current Learning Rate: 0.0028711035 +2025-02-18 22:20:41,104 Train Loss: 0.0004208, Val Loss: 0.0004300 +2025-02-18 22:20:41,104 Epoch 673/2000 +2025-02-18 22:21:23,732 Current Learning Rate: 0.0029424282 +2025-02-18 22:21:23,733 Train Loss: 0.0003734, Val Loss: 0.0004165 +2025-02-18 22:21:23,733 Epoch 674/2000 +2025-02-18 22:22:06,577 Current Learning Rate: 0.0030142605 +2025-02-18 22:22:06,578 Train Loss: 0.0003577, Val Loss: 0.0004265 +2025-02-18 22:22:06,578 Epoch 675/2000 +2025-02-18 22:22:50,092 Current Learning Rate: 0.0030865828 +2025-02-18 22:22:50,093 Train Loss: 0.0004445, Val Loss: 0.0004348 +2025-02-18 22:22:50,093 Epoch 676/2000 +2025-02-18 22:23:33,077 Current Learning Rate: 0.0031593772 +2025-02-18 22:23:33,077 Train Loss: 0.0003811, Val Loss: 0.0004417 +2025-02-18 22:23:33,077 Epoch 677/2000 +2025-02-18 22:24:15,946 Current Learning Rate: 0.0032326258 +2025-02-18 22:24:15,946 Train Loss: 0.0003253, Val Loss: 0.0004101 +2025-02-18 22:24:15,957 Epoch 678/2000 +2025-02-18 22:24:58,532 Current Learning Rate: 0.0033063104 +2025-02-18 22:24:58,533 Train Loss: 0.0003587, Val Loss: 0.0004156 +2025-02-18 22:24:58,533 Epoch 679/2000 +2025-02-18 22:25:41,869 Current Learning Rate: 0.0033804129 +2025-02-18 22:25:41,870 Train Loss: 0.0003160, Val Loss: 0.0004124 +2025-02-18 22:25:41,870 Epoch 680/2000 +2025-02-18 22:26:25,159 Current Learning Rate: 0.0034549150 +2025-02-18 22:26:25,160 Train Loss: 0.0003565, Val Loss: 0.0005271 +2025-02-18 22:26:25,160 Epoch 681/2000 +2025-02-18 22:27:08,665 Current Learning Rate: 0.0035297984 +2025-02-18 22:27:08,666 Train Loss: 0.0004231, Val Loss: 0.0004346 +2025-02-18 22:27:08,666 Epoch 682/2000 +2025-02-18 22:27:52,108 Current Learning Rate: 0.0036050445 +2025-02-18 22:27:52,108 Train Loss: 0.0005557, Val Loss: 0.0006193 +2025-02-18 22:27:52,109 Epoch 683/2000 +2025-02-18 22:28:35,237 Current Learning Rate: 0.0036806348 +2025-02-18 22:28:35,238 Train Loss: 0.0004185, Val Loss: 0.0005024 +2025-02-18 22:28:35,238 Epoch 684/2000 +2025-02-18 22:29:18,652 Current Learning Rate: 0.0037565506 +2025-02-18 22:29:18,653 Train Loss: 0.0005002, Val Loss: 0.0005074 +2025-02-18 22:29:18,656 Epoch 685/2000 +2025-02-18 22:30:01,012 Current Learning Rate: 0.0038327732 +2025-02-18 22:30:01,013 Train Loss: 0.0004067, Val Loss: 0.0005069 +2025-02-18 22:30:01,013 Epoch 686/2000 +2025-02-18 22:30:43,172 Current Learning Rate: 0.0039092838 +2025-02-18 22:30:43,173 Train Loss: 0.0005250, Val Loss: 0.0005281 +2025-02-18 22:30:43,173 Epoch 687/2000 +2025-02-18 22:31:26,506 Current Learning Rate: 0.0039860635 +2025-02-18 22:31:26,507 Train Loss: 0.0004353, Val Loss: 0.0004897 +2025-02-18 22:31:26,507 Epoch 688/2000 +2025-02-18 22:32:09,334 Current Learning Rate: 0.0040630934 +2025-02-18 22:32:09,335 Train Loss: 0.0004570, Val Loss: 0.0004984 +2025-02-18 22:32:09,335 Epoch 689/2000 +2025-02-18 22:32:52,203 Current Learning Rate: 0.0041403545 +2025-02-18 22:32:52,203 Train Loss: 0.0004216, Val Loss: 0.0004410 +2025-02-18 22:32:52,203 Epoch 690/2000 +2025-02-18 22:33:34,621 Current Learning Rate: 0.0042178277 +2025-02-18 22:33:34,621 Train Loss: 0.0005267, Val Loss: 0.0004886 +2025-02-18 22:33:34,622 Epoch 691/2000 +2025-02-18 22:34:17,374 Current Learning Rate: 0.0042954938 +2025-02-18 22:34:17,374 Train Loss: 0.0004490, Val Loss: 0.0004630 +2025-02-18 22:34:17,375 Epoch 692/2000 +2025-02-18 22:34:59,864 Current Learning Rate: 0.0043733338 +2025-02-18 22:34:59,865 Train Loss: 0.0004000, Val Loss: 0.0004601 +2025-02-18 22:34:59,865 Epoch 693/2000 +2025-02-18 22:35:42,650 Current Learning Rate: 0.0044513284 +2025-02-18 22:35:42,651 Train Loss: 0.0004519, Val Loss: 0.0004574 +2025-02-18 22:35:42,651 Epoch 694/2000 +2025-02-18 22:36:25,643 Current Learning Rate: 0.0045294584 +2025-02-18 22:36:25,643 Train Loss: 0.0006276, Val Loss: 0.0005118 +2025-02-18 22:36:25,643 Epoch 695/2000 +2025-02-18 22:37:08,079 Current Learning Rate: 0.0046077045 +2025-02-18 22:37:08,079 Train Loss: 0.0004055, Val Loss: 0.0004691 +2025-02-18 22:37:08,080 Epoch 696/2000 +2025-02-18 22:37:50,453 Current Learning Rate: 0.0046860474 +2025-02-18 22:37:50,454 Train Loss: 0.0003858, Val Loss: 0.0004559 +2025-02-18 22:37:50,454 Epoch 697/2000 +2025-02-18 22:38:32,864 Current Learning Rate: 0.0047644677 +2025-02-18 22:38:32,865 Train Loss: 0.0004747, Val Loss: 0.0005148 +2025-02-18 22:38:32,865 Epoch 698/2000 +2025-02-18 22:39:15,060 Current Learning Rate: 0.0048429462 +2025-02-18 22:39:15,060 Train Loss: 0.0004183, Val Loss: 0.0005884 +2025-02-18 22:39:15,061 Epoch 699/2000 +2025-02-18 22:39:58,202 Current Learning Rate: 0.0049214634 +2025-02-18 22:39:58,203 Train Loss: 0.0006692, Val Loss: 0.0005094 +2025-02-18 22:39:58,203 Epoch 700/2000 +2025-02-18 22:40:41,254 Current Learning Rate: 0.0050000000 +2025-02-18 22:40:41,255 Train Loss: 0.0004523, Val Loss: 0.0005430 +2025-02-18 22:40:41,255 Epoch 701/2000 +2025-02-18 22:41:24,365 Current Learning Rate: 0.0050785366 +2025-02-18 22:41:24,366 Train Loss: 0.0004633, Val Loss: 0.0005357 +2025-02-18 22:41:24,366 Epoch 702/2000 +2025-02-18 22:42:06,576 Current Learning Rate: 0.0051570538 +2025-02-18 22:42:06,577 Train Loss: 0.0004168, Val Loss: 0.0005046 +2025-02-18 22:42:06,577 Epoch 703/2000 +2025-02-18 22:42:48,466 Current Learning Rate: 0.0052355323 +2025-02-18 22:42:48,466 Train Loss: 0.0004804, Val Loss: 0.0004856 +2025-02-18 22:42:48,466 Epoch 704/2000 +2025-02-18 22:43:31,664 Current Learning Rate: 0.0053139526 +2025-02-18 22:43:31,665 Train Loss: 0.0005865, Val Loss: 0.0005379 +2025-02-18 22:43:31,665 Epoch 705/2000 +2025-02-18 22:44:14,184 Current Learning Rate: 0.0053922955 +2025-02-18 22:44:14,184 Train Loss: 0.0004807, Val Loss: 0.0004786 +2025-02-18 22:44:14,185 Epoch 706/2000 +2025-02-18 22:44:56,820 Current Learning Rate: 0.0054705416 +2025-02-18 22:44:56,820 Train Loss: 0.0004413, Val Loss: 0.0004840 +2025-02-18 22:44:56,821 Epoch 707/2000 +2025-02-18 22:45:38,664 Current Learning Rate: 0.0055486716 +2025-02-18 22:45:38,665 Train Loss: 0.0006430, Val Loss: 0.0006019 +2025-02-18 22:45:38,665 Epoch 708/2000 +2025-02-18 22:46:21,772 Current Learning Rate: 0.0056266662 +2025-02-18 22:46:21,773 Train Loss: 0.0006133, Val Loss: 0.0006036 +2025-02-18 22:46:21,773 Epoch 709/2000 +2025-02-18 22:47:04,724 Current Learning Rate: 0.0057045062 +2025-02-18 22:47:04,724 Train Loss: 0.0005017, Val Loss: 0.0004882 +2025-02-18 22:47:04,724 Epoch 710/2000 +2025-02-18 22:47:46,747 Current Learning Rate: 0.0057821723 +2025-02-18 22:47:46,747 Train Loss: 0.0003873, Val Loss: 0.0004441 +2025-02-18 22:47:46,748 Epoch 711/2000 +2025-02-18 22:48:28,782 Current Learning Rate: 0.0058596455 +2025-02-18 22:48:28,783 Train Loss: 0.0003793, Val Loss: 0.0004474 +2025-02-18 22:48:28,783 Epoch 712/2000 +2025-02-18 22:49:11,188 Current Learning Rate: 0.0059369066 +2025-02-18 22:49:11,188 Train Loss: 0.0003912, Val Loss: 0.0004489 +2025-02-18 22:49:11,189 Epoch 713/2000 +2025-02-18 22:49:53,250 Current Learning Rate: 0.0060139365 +2025-02-18 22:49:53,250 Train Loss: 0.0004673, Val Loss: 0.0005099 +2025-02-18 22:49:53,250 Epoch 714/2000 +2025-02-18 22:50:35,473 Current Learning Rate: 0.0060907162 +2025-02-18 22:50:35,474 Train Loss: 0.0005575, Val Loss: 0.0004955 +2025-02-18 22:50:35,474 Epoch 715/2000 +2025-02-18 22:51:17,685 Current Learning Rate: 0.0061672268 +2025-02-18 22:51:17,686 Train Loss: 0.0008100, Val Loss: 0.0005871 +2025-02-18 22:51:17,686 Epoch 716/2000 +2025-02-18 22:51:59,989 Current Learning Rate: 0.0062434494 +2025-02-18 22:51:59,990 Train Loss: 0.0004465, Val Loss: 0.0005548 +2025-02-18 22:51:59,990 Epoch 717/2000 +2025-02-18 22:52:42,512 Current Learning Rate: 0.0063193652 +2025-02-18 22:52:42,513 Train Loss: 0.0004861, Val Loss: 0.0005530 +2025-02-18 22:52:42,513 Epoch 718/2000 +2025-02-18 22:53:25,059 Current Learning Rate: 0.0063949555 +2025-02-18 22:53:25,060 Train Loss: 0.0004071, Val Loss: 0.0004818 +2025-02-18 22:53:25,060 Epoch 719/2000 +2025-02-18 22:54:08,231 Current Learning Rate: 0.0064702016 +2025-02-18 22:54:08,231 Train Loss: 0.0004972, Val Loss: 0.0005260 +2025-02-18 22:54:08,232 Epoch 720/2000 +2025-02-18 22:54:50,523 Current Learning Rate: 0.0065450850 +2025-02-18 22:54:50,524 Train Loss: 0.0004369, Val Loss: 0.0005225 +2025-02-18 22:54:50,524 Epoch 721/2000 +2025-02-18 22:55:33,354 Current Learning Rate: 0.0066195871 +2025-02-18 22:55:33,354 Train Loss: 0.0004523, Val Loss: 0.0004660 +2025-02-18 22:55:33,354 Epoch 722/2000 +2025-02-18 22:56:15,387 Current Learning Rate: 0.0066936896 +2025-02-18 22:56:15,388 Train Loss: 0.0005368, Val Loss: 0.0006312 +2025-02-18 22:56:15,388 Epoch 723/2000 +2025-02-18 22:56:57,996 Current Learning Rate: 0.0067673742 +2025-02-18 22:56:57,997 Train Loss: 0.0004096, Val Loss: 0.0004782 +2025-02-18 22:56:57,997 Epoch 724/2000 +2025-02-18 22:57:41,162 Current Learning Rate: 0.0068406228 +2025-02-18 22:57:41,162 Train Loss: 0.0004552, Val Loss: 0.0004694 +2025-02-18 22:57:41,162 Epoch 725/2000 +2025-02-18 22:58:23,917 Current Learning Rate: 0.0069134172 +2025-02-18 22:58:23,918 Train Loss: 0.0005421, Val Loss: 0.0005071 +2025-02-18 22:58:23,918 Epoch 726/2000 +2025-02-18 22:59:06,892 Current Learning Rate: 0.0069857395 +2025-02-18 22:59:06,893 Train Loss: 0.0003859, Val Loss: 0.0004395 +2025-02-18 22:59:06,893 Epoch 727/2000 +2025-02-18 22:59:49,767 Current Learning Rate: 0.0070575718 +2025-02-18 22:59:49,767 Train Loss: 0.0004050, Val Loss: 0.0004492 +2025-02-18 22:59:49,768 Epoch 728/2000 +2025-02-18 23:00:32,399 Current Learning Rate: 0.0071288965 +2025-02-18 23:00:32,399 Train Loss: 0.0004189, Val Loss: 0.0004836 +2025-02-18 23:00:32,400 Epoch 729/2000 +2025-02-18 23:01:14,952 Current Learning Rate: 0.0071996958 +2025-02-18 23:01:14,953 Train Loss: 0.0003604, Val Loss: 0.0004482 +2025-02-18 23:01:14,953 Epoch 730/2000 +2025-02-18 23:01:58,461 Current Learning Rate: 0.0072699525 +2025-02-18 23:01:58,462 Train Loss: 0.0005782, Val Loss: 0.0005466 +2025-02-18 23:01:58,462 Epoch 731/2000 +2025-02-18 23:02:40,915 Current Learning Rate: 0.0073396491 +2025-02-18 23:02:40,915 Train Loss: 0.0006507, Val Loss: 0.0007111 +2025-02-18 23:02:40,916 Epoch 732/2000 +2025-02-18 23:03:23,896 Current Learning Rate: 0.0074087684 +2025-02-18 23:03:23,897 Train Loss: 0.0005913, Val Loss: 0.0005599 +2025-02-18 23:03:23,897 Epoch 733/2000 +2025-02-18 23:04:07,824 Current Learning Rate: 0.0074772933 +2025-02-18 23:04:07,825 Train Loss: 0.0004286, Val Loss: 0.0005166 +2025-02-18 23:04:07,825 Epoch 734/2000 +2025-02-18 23:04:50,750 Current Learning Rate: 0.0075452071 +2025-02-18 23:04:50,751 Train Loss: 0.0004992, Val Loss: 0.0005094 +2025-02-18 23:04:50,751 Epoch 735/2000 +2025-02-18 23:05:34,489 Current Learning Rate: 0.0076124928 +2025-02-18 23:05:34,490 Train Loss: 0.0004641, Val Loss: 0.0005014 +2025-02-18 23:05:34,490 Epoch 736/2000 +2025-02-18 23:06:17,652 Current Learning Rate: 0.0076791340 +2025-02-18 23:06:17,652 Train Loss: 0.0004634, Val Loss: 0.0007762 +2025-02-18 23:06:17,653 Epoch 737/2000 +2025-02-18 23:07:00,949 Current Learning Rate: 0.0077451141 +2025-02-18 23:07:00,950 Train Loss: 0.0005219, Val Loss: 0.0004944 +2025-02-18 23:07:00,950 Epoch 738/2000 +2025-02-18 23:07:44,312 Current Learning Rate: 0.0078104169 +2025-02-18 23:07:44,313 Train Loss: 0.0004473, Val Loss: 0.0004976 +2025-02-18 23:07:44,313 Epoch 739/2000 +2025-02-18 23:08:27,326 Current Learning Rate: 0.0078750263 +2025-02-18 23:08:27,327 Train Loss: 0.0006260, Val Loss: 0.0006472 +2025-02-18 23:08:27,327 Epoch 740/2000 +2025-02-18 23:09:10,495 Current Learning Rate: 0.0079389263 +2025-02-18 23:09:10,496 Train Loss: 0.0006513, Val Loss: 0.0006764 +2025-02-18 23:09:10,496 Epoch 741/2000 +2025-02-18 23:09:54,309 Current Learning Rate: 0.0080021011 +2025-02-18 23:09:54,309 Train Loss: 0.0005785, Val Loss: 0.0007031 +2025-02-18 23:09:54,309 Epoch 742/2000 +2025-02-18 23:10:37,474 Current Learning Rate: 0.0080645353 +2025-02-18 23:10:37,474 Train Loss: 0.0004421, Val Loss: 0.0004960 +2025-02-18 23:10:37,475 Epoch 743/2000 +2025-02-18 23:11:19,944 Current Learning Rate: 0.0081262133 +2025-02-18 23:11:19,944 Train Loss: 0.0005144, Val Loss: 0.0005616 +2025-02-18 23:11:19,945 Epoch 744/2000 +2025-02-18 23:12:02,909 Current Learning Rate: 0.0081871199 +2025-02-18 23:12:02,911 Train Loss: 0.0004935, Val Loss: 0.0005162 +2025-02-18 23:12:02,912 Epoch 745/2000 +2025-02-18 23:12:46,786 Current Learning Rate: 0.0082472402 +2025-02-18 23:12:46,786 Train Loss: 0.0003768, Val Loss: 0.0004637 +2025-02-18 23:12:46,786 Epoch 746/2000 +2025-02-18 23:13:29,046 Current Learning Rate: 0.0083065593 +2025-02-18 23:13:29,047 Train Loss: 0.0005217, Val Loss: 0.0004631 +2025-02-18 23:13:29,047 Epoch 747/2000 +2025-02-18 23:14:11,847 Current Learning Rate: 0.0083650626 +2025-02-18 23:14:11,848 Train Loss: 0.0005544, Val Loss: 0.0005617 +2025-02-18 23:14:11,848 Epoch 748/2000 +2025-02-18 23:14:56,173 Current Learning Rate: 0.0084227355 +2025-02-18 23:14:56,173 Train Loss: 0.0005401, Val Loss: 0.0005333 +2025-02-18 23:14:56,174 Epoch 749/2000 +2025-02-18 23:15:39,348 Current Learning Rate: 0.0084795640 +2025-02-18 23:15:39,348 Train Loss: 0.0003724, Val Loss: 0.0004718 +2025-02-18 23:15:39,348 Epoch 750/2000 +2025-02-18 23:16:22,395 Current Learning Rate: 0.0085355339 +2025-02-18 23:16:22,395 Train Loss: 0.0004473, Val Loss: 0.0004963 +2025-02-18 23:16:22,396 Epoch 751/2000 +2025-02-18 23:17:05,527 Current Learning Rate: 0.0085906315 +2025-02-18 23:17:05,527 Train Loss: 0.0005019, Val Loss: 0.0005205 +2025-02-18 23:17:05,528 Epoch 752/2000 +2025-02-18 23:17:48,160 Current Learning Rate: 0.0086448431 +2025-02-18 23:17:48,161 Train Loss: 0.0004212, Val Loss: 0.0004955 +2025-02-18 23:17:48,161 Epoch 753/2000 +2025-02-18 23:18:30,793 Current Learning Rate: 0.0086981555 +2025-02-18 23:18:30,794 Train Loss: 0.0005081, Val Loss: 0.0006728 +2025-02-18 23:18:30,795 Epoch 754/2000 +2025-02-18 23:19:13,339 Current Learning Rate: 0.0087505553 +2025-02-18 23:19:13,340 Train Loss: 0.0005041, Val Loss: 0.0005740 +2025-02-18 23:19:13,340 Epoch 755/2000 +2025-02-18 23:19:56,175 Current Learning Rate: 0.0088020298 +2025-02-18 23:19:56,176 Train Loss: 0.0004600, Val Loss: 0.0005830 +2025-02-18 23:19:56,176 Epoch 756/2000 +2025-02-18 23:20:39,888 Current Learning Rate: 0.0088525662 +2025-02-18 23:20:39,889 Train Loss: 0.0004196, Val Loss: 0.0005640 +2025-02-18 23:20:39,889 Epoch 757/2000 +2025-02-18 23:21:23,231 Current Learning Rate: 0.0089021520 +2025-02-18 23:21:23,232 Train Loss: 0.0004903, Val Loss: 0.0006048 +2025-02-18 23:21:23,232 Epoch 758/2000 +2025-02-18 23:22:05,629 Current Learning Rate: 0.0089507751 +2025-02-18 23:22:05,630 Train Loss: 0.0005105, Val Loss: 0.0006871 +2025-02-18 23:22:05,630 Epoch 759/2000 +2025-02-18 23:22:48,135 Current Learning Rate: 0.0089984233 +2025-02-18 23:22:48,135 Train Loss: 0.0005306, Val Loss: 0.0006523 +2025-02-18 23:22:48,136 Epoch 760/2000 +2025-02-18 23:23:30,607 Current Learning Rate: 0.0090450850 +2025-02-18 23:23:30,608 Train Loss: 0.0004529, Val Loss: 0.0004818 +2025-02-18 23:23:30,608 Epoch 761/2000 +2025-02-18 23:24:14,134 Current Learning Rate: 0.0090907486 +2025-02-18 23:24:14,135 Train Loss: 0.0006053, Val Loss: 0.0006380 +2025-02-18 23:24:14,135 Epoch 762/2000 +2025-02-18 23:24:57,424 Current Learning Rate: 0.0091354029 +2025-02-18 23:24:57,425 Train Loss: 0.0006056, Val Loss: 0.0005981 +2025-02-18 23:24:57,426 Epoch 763/2000 +2025-02-18 23:25:40,598 Current Learning Rate: 0.0091790368 +2025-02-18 23:25:40,598 Train Loss: 0.0005712, Val Loss: 0.0006514 +2025-02-18 23:25:40,599 Epoch 764/2000 +2025-02-18 23:26:23,935 Current Learning Rate: 0.0092216396 +2025-02-18 23:26:23,936 Train Loss: 0.0004432, Val Loss: 0.0004766 +2025-02-18 23:26:23,936 Epoch 765/2000 +2025-02-18 23:27:06,852 Current Learning Rate: 0.0092632008 +2025-02-18 23:27:06,852 Train Loss: 0.0005074, Val Loss: 0.0004895 +2025-02-18 23:27:06,853 Epoch 766/2000 +2025-02-18 23:27:50,025 Current Learning Rate: 0.0093037101 +2025-02-18 23:27:50,026 Train Loss: 0.0004087, Val Loss: 0.0004661 +2025-02-18 23:27:50,026 Epoch 767/2000 +2025-02-18 23:28:32,965 Current Learning Rate: 0.0093431576 +2025-02-18 23:28:32,965 Train Loss: 0.0005126, Val Loss: 0.0005074 +2025-02-18 23:28:32,966 Epoch 768/2000 +2025-02-18 23:29:16,273 Current Learning Rate: 0.0093815334 +2025-02-18 23:29:16,273 Train Loss: 0.0004244, Val Loss: 0.0004861 +2025-02-18 23:29:16,273 Epoch 769/2000 +2025-02-18 23:29:58,700 Current Learning Rate: 0.0094188282 +2025-02-18 23:29:58,701 Train Loss: 0.0004482, Val Loss: 0.0004930 +2025-02-18 23:29:58,701 Epoch 770/2000 +2025-02-18 23:30:41,063 Current Learning Rate: 0.0094550326 +2025-02-18 23:30:41,066 Train Loss: 0.0004860, Val Loss: 0.0005334 +2025-02-18 23:30:41,069 Epoch 771/2000 +2025-02-18 23:31:24,412 Current Learning Rate: 0.0094901379 +2025-02-18 23:31:24,413 Train Loss: 0.0004707, Val Loss: 0.0004928 +2025-02-18 23:31:24,413 Epoch 772/2000 +2025-02-18 23:32:07,436 Current Learning Rate: 0.0095241353 +2025-02-18 23:32:07,437 Train Loss: 0.0003521, Val Loss: 0.0004809 +2025-02-18 23:32:07,437 Epoch 773/2000 +2025-02-18 23:32:51,213 Current Learning Rate: 0.0095570164 +2025-02-18 23:32:51,214 Train Loss: 0.0004144, Val Loss: 0.0004836 +2025-02-18 23:32:51,214 Epoch 774/2000 +2025-02-18 23:33:33,403 Current Learning Rate: 0.0095887731 +2025-02-18 23:33:33,404 Train Loss: 0.0005278, Val Loss: 0.0005324 +2025-02-18 23:33:33,404 Epoch 775/2000 +2025-02-18 23:34:17,112 Current Learning Rate: 0.0096193977 +2025-02-18 23:34:17,112 Train Loss: 0.0005132, Val Loss: 0.0005153 +2025-02-18 23:34:17,113 Epoch 776/2000 +2025-02-18 23:35:00,578 Current Learning Rate: 0.0096488824 +2025-02-18 23:35:00,579 Train Loss: 0.0004904, Val Loss: 0.0005072 +2025-02-18 23:35:00,579 Epoch 777/2000 +2025-02-18 23:35:42,936 Current Learning Rate: 0.0096772202 +2025-02-18 23:35:42,936 Train Loss: 0.0003956, Val Loss: 0.0004291 +2025-02-18 23:35:42,937 Epoch 778/2000 +2025-02-18 23:36:25,591 Current Learning Rate: 0.0097044038 +2025-02-18 23:36:25,591 Train Loss: 0.0004297, Val Loss: 0.0004614 +2025-02-18 23:36:25,591 Epoch 779/2000 +2025-02-18 23:37:09,017 Current Learning Rate: 0.0097304268 +2025-02-18 23:37:09,018 Train Loss: 0.0004565, Val Loss: 0.0004832 +2025-02-18 23:37:09,018 Epoch 780/2000 +2025-02-18 23:37:51,649 Current Learning Rate: 0.0097552826 +2025-02-18 23:37:51,650 Train Loss: 0.0003956, Val Loss: 0.0004412 +2025-02-18 23:37:51,650 Epoch 781/2000 +2025-02-18 23:38:35,230 Current Learning Rate: 0.0097789651 +2025-02-18 23:38:35,230 Train Loss: 0.0003159, Val Loss: 0.0004296 +2025-02-18 23:38:35,230 Epoch 782/2000 +2025-02-18 23:39:18,506 Current Learning Rate: 0.0098014684 +2025-02-18 23:39:18,506 Train Loss: 0.0004671, Val Loss: 0.0005191 +2025-02-18 23:39:18,506 Epoch 783/2000 +2025-02-18 23:40:01,677 Current Learning Rate: 0.0098227871 +2025-02-18 23:40:01,677 Train Loss: 0.0005046, Val Loss: 0.0005108 +2025-02-18 23:40:01,678 Epoch 784/2000 +2025-02-18 23:40:44,941 Current Learning Rate: 0.0098429158 +2025-02-18 23:40:44,942 Train Loss: 0.0005220, Val Loss: 0.0006367 +2025-02-18 23:40:44,942 Epoch 785/2000 +2025-02-18 23:41:28,435 Current Learning Rate: 0.0098618496 +2025-02-18 23:41:28,435 Train Loss: 0.0005533, Val Loss: 0.0005310 +2025-02-18 23:41:28,436 Epoch 786/2000 +2025-02-18 23:42:11,232 Current Learning Rate: 0.0098795838 +2025-02-18 23:42:11,232 Train Loss: 0.0005087, Val Loss: 0.0005000 +2025-02-18 23:42:11,233 Epoch 787/2000 +2025-02-18 23:42:54,325 Current Learning Rate: 0.0098961141 +2025-02-18 23:42:54,327 Train Loss: 0.0004590, Val Loss: 0.0004898 +2025-02-18 23:42:54,327 Epoch 788/2000 +2025-02-18 23:43:37,303 Current Learning Rate: 0.0099114363 +2025-02-18 23:43:37,304 Train Loss: 0.0004490, Val Loss: 0.0004844 +2025-02-18 23:43:37,304 Epoch 789/2000 +2025-02-18 23:44:19,846 Current Learning Rate: 0.0099255466 +2025-02-18 23:44:19,846 Train Loss: 0.0004810, Val Loss: 0.0005824 +2025-02-18 23:44:19,846 Epoch 790/2000 +2025-02-18 23:45:01,907 Current Learning Rate: 0.0099384417 +2025-02-18 23:45:01,908 Train Loss: 0.0004144, Val Loss: 0.0004847 +2025-02-18 23:45:01,909 Epoch 791/2000 +2025-02-18 23:45:44,868 Current Learning Rate: 0.0099501183 +2025-02-18 23:45:44,868 Train Loss: 0.0004000, Val Loss: 0.0004701 +2025-02-18 23:45:44,868 Epoch 792/2000 +2025-02-18 23:46:27,840 Current Learning Rate: 0.0099605735 +2025-02-18 23:46:27,840 Train Loss: 0.0005417, Val Loss: 0.0005281 +2025-02-18 23:46:27,841 Epoch 793/2000 +2025-02-18 23:47:10,712 Current Learning Rate: 0.0099698048 +2025-02-18 23:47:10,713 Train Loss: 0.0005414, Val Loss: 0.0005604 +2025-02-18 23:47:10,713 Epoch 794/2000 +2025-02-18 23:47:53,260 Current Learning Rate: 0.0099778098 +2025-02-18 23:47:53,261 Train Loss: 0.0006029, Val Loss: 0.0005738 +2025-02-18 23:47:53,261 Epoch 795/2000 +2025-02-18 23:48:36,277 Current Learning Rate: 0.0099845867 +2025-02-18 23:48:36,277 Train Loss: 0.0004438, Val Loss: 0.0004629 +2025-02-18 23:48:36,277 Epoch 796/2000 +2025-02-18 23:49:19,161 Current Learning Rate: 0.0099901336 +2025-02-18 23:49:19,161 Train Loss: 0.0004035, Val Loss: 0.0004726 +2025-02-18 23:49:19,161 Epoch 797/2000 +2025-02-18 23:50:02,197 Current Learning Rate: 0.0099944494 +2025-02-18 23:50:02,198 Train Loss: 0.0004470, Val Loss: 0.0005083 +2025-02-18 23:50:02,198 Epoch 798/2000 +2025-02-18 23:50:45,316 Current Learning Rate: 0.0099975328 +2025-02-18 23:50:45,316 Train Loss: 0.0003833, Val Loss: 0.0004289 +2025-02-18 23:50:45,317 Epoch 799/2000 +2025-02-18 23:51:27,681 Current Learning Rate: 0.0099993832 +2025-02-18 23:51:27,681 Train Loss: 0.0003646, Val Loss: 0.0004805 +2025-02-18 23:51:27,682 Epoch 800/2000 +2025-02-18 23:52:09,979 Current Learning Rate: 0.0100000000 +2025-02-18 23:52:09,979 Train Loss: 0.0003941, Val Loss: 0.0004538 +2025-02-18 23:52:09,980 Epoch 801/2000 +2025-02-18 23:52:52,248 Current Learning Rate: 0.0099993832 +2025-02-18 23:52:52,249 Train Loss: 0.0004818, Val Loss: 0.0004854 +2025-02-18 23:52:52,249 Epoch 802/2000 +2025-02-18 23:53:34,736 Current Learning Rate: 0.0099975328 +2025-02-18 23:53:34,737 Train Loss: 0.0004143, Val Loss: 0.0004645 +2025-02-18 23:53:34,737 Epoch 803/2000 +2025-02-18 23:54:16,794 Current Learning Rate: 0.0099944494 +2025-02-18 23:54:16,794 Train Loss: 0.0004518, Val Loss: 0.0004697 +2025-02-18 23:54:16,795 Epoch 804/2000 +2025-02-18 23:54:58,809 Current Learning Rate: 0.0099901336 +2025-02-18 23:54:58,810 Train Loss: 0.0003482, Val Loss: 0.0004384 +2025-02-18 23:54:58,810 Epoch 805/2000 +2025-02-18 23:55:42,062 Current Learning Rate: 0.0099845867 +2025-02-18 23:55:42,062 Train Loss: 0.0004466, Val Loss: 0.0005324 +2025-02-18 23:55:42,063 Epoch 806/2000 +2025-02-18 23:56:24,334 Current Learning Rate: 0.0099778098 +2025-02-18 23:56:24,334 Train Loss: 0.0005096, Val Loss: 0.0004557 +2025-02-18 23:56:24,335 Epoch 807/2000 +2025-02-18 23:57:07,299 Current Learning Rate: 0.0099698048 +2025-02-18 23:57:07,299 Train Loss: 0.0004716, Val Loss: 0.0004675 +2025-02-18 23:57:07,300 Epoch 808/2000 +2025-02-18 23:57:49,304 Current Learning Rate: 0.0099605735 +2025-02-18 23:57:49,305 Train Loss: 0.0004419, Val Loss: 0.0004450 +2025-02-18 23:57:49,305 Epoch 809/2000 +2025-02-18 23:58:32,643 Current Learning Rate: 0.0099501183 +2025-02-18 23:58:32,643 Train Loss: 0.0003360, Val Loss: 0.0004327 +2025-02-18 23:58:32,644 Epoch 810/2000 +2025-02-18 23:59:14,533 Current Learning Rate: 0.0099384417 +2025-02-18 23:59:14,534 Train Loss: 0.0004934, Val Loss: 0.0004444 +2025-02-18 23:59:14,534 Epoch 811/2000 +2025-02-18 23:59:57,529 Current Learning Rate: 0.0099255466 +2025-02-18 23:59:57,530 Train Loss: 0.0003976, Val Loss: 0.0004333 +2025-02-18 23:59:57,530 Epoch 812/2000 +2025-02-19 00:00:40,415 Current Learning Rate: 0.0099114363 +2025-02-19 00:00:40,415 Train Loss: 0.0004216, Val Loss: 0.0004356 +2025-02-19 00:00:40,416 Epoch 813/2000 +2025-02-19 00:01:23,349 Current Learning Rate: 0.0098961141 +2025-02-19 00:01:24,706 Train Loss: 0.0004006, Val Loss: 0.0004066 +2025-02-19 00:01:24,706 Epoch 814/2000 +2025-02-19 00:02:07,078 Current Learning Rate: 0.0098795838 +2025-02-19 00:02:07,080 Train Loss: 0.0004658, Val Loss: 0.0004701 +2025-02-19 00:02:07,080 Epoch 815/2000 +2025-02-19 00:02:49,507 Current Learning Rate: 0.0098618496 +2025-02-19 00:02:49,507 Train Loss: 0.0005075, Val Loss: 0.0005421 +2025-02-19 00:02:49,507 Epoch 816/2000 +2025-02-19 00:03:31,989 Current Learning Rate: 0.0098429158 +2025-02-19 00:03:31,990 Train Loss: 0.0004353, Val Loss: 0.0004457 +2025-02-19 00:03:31,990 Epoch 817/2000 +2025-02-19 00:04:13,808 Current Learning Rate: 0.0098227871 +2025-02-19 00:04:13,809 Train Loss: 0.0006612, Val Loss: 0.0006060 +2025-02-19 00:04:13,809 Epoch 818/2000 +2025-02-19 00:04:55,928 Current Learning Rate: 0.0098014684 +2025-02-19 00:04:55,929 Train Loss: 0.0004894, Val Loss: 0.0005182 +2025-02-19 00:04:55,929 Epoch 819/2000 +2025-02-19 00:05:38,617 Current Learning Rate: 0.0097789651 +2025-02-19 00:05:38,617 Train Loss: 0.0004411, Val Loss: 0.0004686 +2025-02-19 00:05:38,618 Epoch 820/2000 +2025-02-19 00:06:21,098 Current Learning Rate: 0.0097552826 +2025-02-19 00:06:21,099 Train Loss: 0.0003869, Val Loss: 0.0004599 +2025-02-19 00:06:21,099 Epoch 821/2000 +2025-02-19 00:07:03,606 Current Learning Rate: 0.0097304268 +2025-02-19 00:07:03,606 Train Loss: 0.0006094, Val Loss: 0.0005816 +2025-02-19 00:07:03,607 Epoch 822/2000 +2025-02-19 00:07:46,334 Current Learning Rate: 0.0097044038 +2025-02-19 00:07:46,334 Train Loss: 0.0004938, Val Loss: 0.0004962 +2025-02-19 00:07:46,335 Epoch 823/2000 +2025-02-19 00:08:28,719 Current Learning Rate: 0.0096772202 +2025-02-19 00:08:28,720 Train Loss: 0.0005204, Val Loss: 0.0005168 +2025-02-19 00:08:28,720 Epoch 824/2000 +2025-02-19 00:09:11,194 Current Learning Rate: 0.0096488824 +2025-02-19 00:09:11,195 Train Loss: 0.0003909, Val Loss: 0.0005464 +2025-02-19 00:09:11,195 Epoch 825/2000 +2025-02-19 00:09:54,086 Current Learning Rate: 0.0096193977 +2025-02-19 00:09:54,086 Train Loss: 0.0005129, Val Loss: 0.0004922 +2025-02-19 00:09:54,087 Epoch 826/2000 +2025-02-19 00:10:36,022 Current Learning Rate: 0.0095887731 +2025-02-19 00:10:36,022 Train Loss: 0.0003842, Val Loss: 0.0004128 +2025-02-19 00:10:36,022 Epoch 827/2000 +2025-02-19 00:11:19,500 Current Learning Rate: 0.0095570164 +2025-02-19 00:11:19,501 Train Loss: 0.0004824, Val Loss: 0.0004583 +2025-02-19 00:11:19,501 Epoch 828/2000 +2025-02-19 00:12:01,979 Current Learning Rate: 0.0095241353 +2025-02-19 00:12:01,979 Train Loss: 0.0005277, Val Loss: 0.0004502 +2025-02-19 00:12:01,979 Epoch 829/2000 +2025-02-19 00:12:44,029 Current Learning Rate: 0.0094901379 +2025-02-19 00:12:44,029 Train Loss: 0.0004591, Val Loss: 0.0004959 +2025-02-19 00:12:44,030 Epoch 830/2000 +2025-02-19 00:13:26,724 Current Learning Rate: 0.0094550326 +2025-02-19 00:13:27,919 Train Loss: 0.0003378, Val Loss: 0.0003812 +2025-02-19 00:13:27,919 Epoch 831/2000 +2025-02-19 00:14:09,872 Current Learning Rate: 0.0094188282 +2025-02-19 00:14:09,873 Train Loss: 0.0005815, Val Loss: 0.0006134 +2025-02-19 00:14:09,874 Epoch 832/2000 +2025-02-19 00:14:52,944 Current Learning Rate: 0.0093815334 +2025-02-19 00:14:52,945 Train Loss: 0.0004553, Val Loss: 0.0004320 +2025-02-19 00:14:52,945 Epoch 833/2000 +2025-02-19 00:15:35,689 Current Learning Rate: 0.0093431576 +2025-02-19 00:15:35,689 Train Loss: 0.0005026, Val Loss: 0.0004189 +2025-02-19 00:15:35,689 Epoch 834/2000 +2025-02-19 00:16:18,167 Current Learning Rate: 0.0093037101 +2025-02-19 00:16:18,168 Train Loss: 0.0003888, Val Loss: 0.0003929 +2025-02-19 00:16:18,168 Epoch 835/2000 +2025-02-19 00:17:00,811 Current Learning Rate: 0.0092632008 +2025-02-19 00:17:00,812 Train Loss: 0.0003395, Val Loss: 0.0003839 +2025-02-19 00:17:00,812 Epoch 836/2000 +2025-02-19 00:17:43,683 Current Learning Rate: 0.0092216396 +2025-02-19 00:17:45,588 Train Loss: 0.0003507, Val Loss: 0.0003775 +2025-02-19 00:17:45,589 Epoch 837/2000 +2025-02-19 00:18:28,098 Current Learning Rate: 0.0091790368 +2025-02-19 00:18:28,099 Train Loss: 0.0003330, Val Loss: 0.0003904 +2025-02-19 00:18:28,099 Epoch 838/2000 +2025-02-19 00:19:10,414 Current Learning Rate: 0.0091354029 +2025-02-19 00:19:10,415 Train Loss: 0.0003368, Val Loss: 0.0003842 +2025-02-19 00:19:10,415 Epoch 839/2000 +2025-02-19 00:19:53,425 Current Learning Rate: 0.0090907486 +2025-02-19 00:19:53,425 Train Loss: 0.0003162, Val Loss: 0.0003831 +2025-02-19 00:19:53,425 Epoch 840/2000 +2025-02-19 00:20:36,359 Current Learning Rate: 0.0090450850 +2025-02-19 00:20:38,002 Train Loss: 0.0003632, Val Loss: 0.0003722 +2025-02-19 00:20:38,002 Epoch 841/2000 +2025-02-19 00:21:20,095 Current Learning Rate: 0.0089984233 +2025-02-19 00:21:20,096 Train Loss: 0.0003143, Val Loss: 0.0003877 +2025-02-19 00:21:20,096 Epoch 842/2000 +2025-02-19 00:22:02,877 Current Learning Rate: 0.0089507751 +2025-02-19 00:22:02,877 Train Loss: 0.0003477, Val Loss: 0.0003846 +2025-02-19 00:22:02,878 Epoch 843/2000 +2025-02-19 00:22:45,545 Current Learning Rate: 0.0089021520 +2025-02-19 00:22:45,546 Train Loss: 0.0003538, Val Loss: 0.0003870 +2025-02-19 00:22:45,546 Epoch 844/2000 +2025-02-19 00:23:28,618 Current Learning Rate: 0.0088525662 +2025-02-19 00:23:28,618 Train Loss: 0.0003649, Val Loss: 0.0004155 +2025-02-19 00:23:28,619 Epoch 845/2000 +2025-02-19 00:24:11,706 Current Learning Rate: 0.0088020298 +2025-02-19 00:24:11,706 Train Loss: 0.0003234, Val Loss: 0.0004249 +2025-02-19 00:24:11,706 Epoch 846/2000 +2025-02-19 00:24:54,963 Current Learning Rate: 0.0087505553 +2025-02-19 00:24:54,964 Train Loss: 0.0002978, Val Loss: 0.0003928 +2025-02-19 00:24:54,964 Epoch 847/2000 +2025-02-19 00:25:38,005 Current Learning Rate: 0.0086981555 +2025-02-19 00:25:38,005 Train Loss: 0.0004270, Val Loss: 0.0004308 +2025-02-19 00:25:38,005 Epoch 848/2000 +2025-02-19 00:26:20,458 Current Learning Rate: 0.0086448431 +2025-02-19 00:26:21,754 Train Loss: 0.0003397, Val Loss: 0.0003720 +2025-02-19 00:26:21,754 Epoch 849/2000 +2025-02-19 00:27:03,151 Current Learning Rate: 0.0085906315 +2025-02-19 00:27:03,152 Train Loss: 0.0004277, Val Loss: 0.0004484 +2025-02-19 00:27:03,152 Epoch 850/2000 +2025-02-19 00:27:45,856 Current Learning Rate: 0.0085355339 +2025-02-19 00:27:45,856 Train Loss: 0.0004994, Val Loss: 0.0004902 +2025-02-19 00:27:45,857 Epoch 851/2000 +2025-02-19 00:28:28,914 Current Learning Rate: 0.0084795640 +2025-02-19 00:28:28,915 Train Loss: 0.0004288, Val Loss: 0.0004524 +2025-02-19 00:28:28,915 Epoch 852/2000 +2025-02-19 00:29:12,121 Current Learning Rate: 0.0084227355 +2025-02-19 00:29:12,122 Train Loss: 0.0003680, Val Loss: 0.0003966 +2025-02-19 00:29:12,122 Epoch 853/2000 +2025-02-19 00:29:54,602 Current Learning Rate: 0.0083650626 +2025-02-19 00:29:54,603 Train Loss: 0.0003345, Val Loss: 0.0003801 +2025-02-19 00:29:54,603 Epoch 854/2000 +2025-02-19 00:30:37,819 Current Learning Rate: 0.0083065593 +2025-02-19 00:30:39,411 Train Loss: 0.0002856, Val Loss: 0.0003561 +2025-02-19 00:30:39,413 Epoch 855/2000 +2025-02-19 00:31:22,612 Current Learning Rate: 0.0082472402 +2025-02-19 00:31:22,613 Train Loss: 0.0003230, Val Loss: 0.0003753 +2025-02-19 00:31:22,613 Epoch 856/2000 +2025-02-19 00:32:04,843 Current Learning Rate: 0.0081871199 +2025-02-19 00:32:04,843 Train Loss: 0.0003978, Val Loss: 0.0003806 +2025-02-19 00:32:04,843 Epoch 857/2000 +2025-02-19 00:32:48,608 Current Learning Rate: 0.0081262133 +2025-02-19 00:32:48,609 Train Loss: 0.0003266, Val Loss: 0.0003707 +2025-02-19 00:32:48,609 Epoch 858/2000 +2025-02-19 00:33:31,050 Current Learning Rate: 0.0080645353 +2025-02-19 00:33:31,051 Train Loss: 0.0003065, Val Loss: 0.0003960 +2025-02-19 00:33:31,051 Epoch 859/2000 +2025-02-19 00:34:13,778 Current Learning Rate: 0.0080021011 +2025-02-19 00:34:13,779 Train Loss: 0.0003886, Val Loss: 0.0003809 +2025-02-19 00:34:13,779 Epoch 860/2000 +2025-02-19 00:34:56,972 Current Learning Rate: 0.0079389263 +2025-02-19 00:34:56,973 Train Loss: 0.0003327, Val Loss: 0.0003590 +2025-02-19 00:34:56,974 Epoch 861/2000 +2025-02-19 00:35:40,335 Current Learning Rate: 0.0078750263 +2025-02-19 00:35:41,696 Train Loss: 0.0002592, Val Loss: 0.0003280 +2025-02-19 00:35:41,696 Epoch 862/2000 +2025-02-19 00:36:24,817 Current Learning Rate: 0.0078104169 +2025-02-19 00:36:24,818 Train Loss: 0.0002728, Val Loss: 0.0003468 +2025-02-19 00:36:24,818 Epoch 863/2000 +2025-02-19 00:37:07,767 Current Learning Rate: 0.0077451141 +2025-02-19 00:37:07,767 Train Loss: 0.0002990, Val Loss: 0.0003526 +2025-02-19 00:37:07,768 Epoch 864/2000 +2025-02-19 00:37:50,539 Current Learning Rate: 0.0076791340 +2025-02-19 00:37:50,541 Train Loss: 0.0003495, Val Loss: 0.0003923 +2025-02-19 00:37:50,545 Epoch 865/2000 +2025-02-19 00:38:33,115 Current Learning Rate: 0.0076124928 +2025-02-19 00:38:33,116 Train Loss: 0.0003653, Val Loss: 0.0003805 +2025-02-19 00:38:33,116 Epoch 866/2000 +2025-02-19 00:39:15,666 Current Learning Rate: 0.0075452071 +2025-02-19 00:39:15,666 Train Loss: 0.0003262, Val Loss: 0.0003504 +2025-02-19 00:39:15,666 Epoch 867/2000 +2025-02-19 00:39:58,385 Current Learning Rate: 0.0074772933 +2025-02-19 00:39:58,386 Train Loss: 0.0002878, Val Loss: 0.0003503 +2025-02-19 00:39:58,386 Epoch 868/2000 +2025-02-19 00:40:41,309 Current Learning Rate: 0.0074087684 +2025-02-19 00:40:41,310 Train Loss: 0.0003398, Val Loss: 0.0003519 +2025-02-19 00:40:41,310 Epoch 869/2000 +2025-02-19 00:41:23,184 Current Learning Rate: 0.0073396491 +2025-02-19 00:41:23,185 Train Loss: 0.0002796, Val Loss: 0.0003315 +2025-02-19 00:41:23,185 Epoch 870/2000 +2025-02-19 00:42:06,080 Current Learning Rate: 0.0072699525 +2025-02-19 00:42:06,081 Train Loss: 0.0002905, Val Loss: 0.0003289 +2025-02-19 00:42:06,081 Epoch 871/2000 +2025-02-19 00:42:48,917 Current Learning Rate: 0.0071996958 +2025-02-19 00:42:50,654 Train Loss: 0.0002498, Val Loss: 0.0003238 +2025-02-19 00:42:50,654 Epoch 872/2000 +2025-02-19 00:43:33,487 Current Learning Rate: 0.0071288965 +2025-02-19 00:43:33,488 Train Loss: 0.0002763, Val Loss: 0.0003293 +2025-02-19 00:43:33,488 Epoch 873/2000 +2025-02-19 00:44:16,269 Current Learning Rate: 0.0070575718 +2025-02-19 00:44:17,893 Train Loss: 0.0002803, Val Loss: 0.0003193 +2025-02-19 00:44:17,893 Epoch 874/2000 +2025-02-19 00:45:00,536 Current Learning Rate: 0.0069857395 +2025-02-19 00:45:00,537 Train Loss: 0.0002881, Val Loss: 0.0003243 +2025-02-19 00:45:00,537 Epoch 875/2000 +2025-02-19 00:45:43,388 Current Learning Rate: 0.0069134172 +2025-02-19 00:45:43,389 Train Loss: 0.0004537, Val Loss: 0.0003772 +2025-02-19 00:45:43,389 Epoch 876/2000 +2025-02-19 00:46:26,162 Current Learning Rate: 0.0068406228 +2025-02-19 00:46:26,162 Train Loss: 0.0003113, Val Loss: 0.0003422 +2025-02-19 00:46:26,162 Epoch 877/2000 +2025-02-19 00:47:08,931 Current Learning Rate: 0.0067673742 +2025-02-19 00:47:08,932 Train Loss: 0.0003074, Val Loss: 0.0003595 +2025-02-19 00:47:08,932 Epoch 878/2000 +2025-02-19 00:47:50,979 Current Learning Rate: 0.0066936896 +2025-02-19 00:47:50,979 Train Loss: 0.0003054, Val Loss: 0.0003442 +2025-02-19 00:47:50,980 Epoch 879/2000 +2025-02-19 00:48:33,647 Current Learning Rate: 0.0066195871 +2025-02-19 00:48:33,648 Train Loss: 0.0003647, Val Loss: 0.0004031 +2025-02-19 00:48:33,648 Epoch 880/2000 +2025-02-19 00:49:16,468 Current Learning Rate: 0.0065450850 +2025-02-19 00:49:16,469 Train Loss: 0.0003204, Val Loss: 0.0003457 +2025-02-19 00:49:16,469 Epoch 881/2000 +2025-02-19 00:49:59,400 Current Learning Rate: 0.0064702016 +2025-02-19 00:49:59,401 Train Loss: 0.0002452, Val Loss: 0.0003482 +2025-02-19 00:49:59,401 Epoch 882/2000 +2025-02-19 00:50:42,485 Current Learning Rate: 0.0063949555 +2025-02-19 00:50:42,486 Train Loss: 0.0002904, Val Loss: 0.0003728 +2025-02-19 00:50:42,486 Epoch 883/2000 +2025-02-19 00:51:24,899 Current Learning Rate: 0.0063193652 +2025-02-19 00:51:24,899 Train Loss: 0.0003210, Val Loss: 0.0004662 +2025-02-19 00:51:24,899 Epoch 884/2000 +2025-02-19 00:52:07,366 Current Learning Rate: 0.0062434494 +2025-02-19 00:52:07,367 Train Loss: 0.0003640, Val Loss: 0.0003680 +2025-02-19 00:52:07,367 Epoch 885/2000 +2025-02-19 00:52:49,836 Current Learning Rate: 0.0061672268 +2025-02-19 00:52:49,837 Train Loss: 0.0003595, Val Loss: 0.0003551 +2025-02-19 00:52:49,844 Epoch 886/2000 +2025-02-19 00:53:32,551 Current Learning Rate: 0.0060907162 +2025-02-19 00:53:32,551 Train Loss: 0.0003183, Val Loss: 0.0003223 +2025-02-19 00:53:32,551 Epoch 887/2000 +2025-02-19 00:54:14,139 Current Learning Rate: 0.0060139365 +2025-02-19 00:54:14,139 Train Loss: 0.0002886, Val Loss: 0.0003399 +2025-02-19 00:54:14,139 Epoch 888/2000 +2025-02-19 00:54:56,563 Current Learning Rate: 0.0059369066 +2025-02-19 00:54:56,563 Train Loss: 0.0003013, Val Loss: 0.0003390 +2025-02-19 00:54:56,564 Epoch 889/2000 +2025-02-19 00:55:38,941 Current Learning Rate: 0.0058596455 +2025-02-19 00:55:38,942 Train Loss: 0.0003425, Val Loss: 0.0003523 +2025-02-19 00:55:38,942 Epoch 890/2000 +2025-02-19 00:56:21,674 Current Learning Rate: 0.0057821723 +2025-02-19 00:56:21,674 Train Loss: 0.0002476, Val Loss: 0.0003302 +2025-02-19 00:56:21,674 Epoch 891/2000 +2025-02-19 00:57:04,231 Current Learning Rate: 0.0057045062 +2025-02-19 00:57:04,232 Train Loss: 0.0002801, Val Loss: 0.0003401 +2025-02-19 00:57:04,232 Epoch 892/2000 +2025-02-19 00:57:47,286 Current Learning Rate: 0.0056266662 +2025-02-19 00:57:47,287 Train Loss: 0.0002960, Val Loss: 0.0003337 +2025-02-19 00:57:47,287 Epoch 893/2000 +2025-02-19 00:58:29,800 Current Learning Rate: 0.0055486716 +2025-02-19 00:58:30,929 Train Loss: 0.0002226, Val Loss: 0.0003179 +2025-02-19 00:58:30,929 Epoch 894/2000 +2025-02-19 00:59:12,911 Current Learning Rate: 0.0054705416 +2025-02-19 00:59:12,913 Train Loss: 0.0003057, Val Loss: 0.0003780 +2025-02-19 00:59:12,913 Epoch 895/2000 +2025-02-19 00:59:56,134 Current Learning Rate: 0.0053922955 +2025-02-19 00:59:56,135 Train Loss: 0.0002994, Val Loss: 0.0003461 +2025-02-19 00:59:56,135 Epoch 896/2000 +2025-02-19 01:00:38,633 Current Learning Rate: 0.0053139526 +2025-02-19 01:00:38,634 Train Loss: 0.0002500, Val Loss: 0.0003306 +2025-02-19 01:00:38,634 Epoch 897/2000 +2025-02-19 01:01:21,482 Current Learning Rate: 0.0052355323 +2025-02-19 01:01:21,482 Train Loss: 0.0002808, Val Loss: 0.0003420 +2025-02-19 01:01:21,482 Epoch 898/2000 +2025-02-19 01:02:03,659 Current Learning Rate: 0.0051570538 +2025-02-19 01:02:03,660 Train Loss: 0.0002643, Val Loss: 0.0003227 +2025-02-19 01:02:03,660 Epoch 899/2000 +2025-02-19 01:02:46,788 Current Learning Rate: 0.0050785366 +2025-02-19 01:02:46,789 Train Loss: 0.0003070, Val Loss: 0.0003327 +2025-02-19 01:02:46,789 Epoch 900/2000 +2025-02-19 01:03:28,822 Current Learning Rate: 0.0050000000 +2025-02-19 01:03:30,014 Train Loss: 0.0003231, Val Loss: 0.0003152 +2025-02-19 01:03:30,014 Epoch 901/2000 +2025-02-19 01:04:11,404 Current Learning Rate: 0.0049214634 +2025-02-19 01:04:12,762 Train Loss: 0.0002909, Val Loss: 0.0003041 +2025-02-19 01:04:12,762 Epoch 902/2000 +2025-02-19 01:04:54,000 Current Learning Rate: 0.0048429462 +2025-02-19 01:04:54,000 Train Loss: 0.0002575, Val Loss: 0.0003045 +2025-02-19 01:04:54,001 Epoch 903/2000 +2025-02-19 01:05:37,272 Current Learning Rate: 0.0047644677 +2025-02-19 01:05:39,050 Train Loss: 0.0002042, Val Loss: 0.0002983 +2025-02-19 01:05:39,051 Epoch 904/2000 +2025-02-19 01:06:21,563 Current Learning Rate: 0.0046860474 +2025-02-19 01:06:21,564 Train Loss: 0.0002590, Val Loss: 0.0002988 +2025-02-19 01:06:21,564 Epoch 905/2000 +2025-02-19 01:07:04,546 Current Learning Rate: 0.0046077045 +2025-02-19 01:07:05,982 Train Loss: 0.0002434, Val Loss: 0.0002903 +2025-02-19 01:07:05,982 Epoch 906/2000 +2025-02-19 01:07:48,818 Current Learning Rate: 0.0045294584 +2025-02-19 01:07:49,989 Train Loss: 0.0002208, Val Loss: 0.0002861 +2025-02-19 01:07:49,989 Epoch 907/2000 +2025-02-19 01:08:32,230 Current Learning Rate: 0.0044513284 +2025-02-19 01:08:32,231 Train Loss: 0.0002553, Val Loss: 0.0002958 +2025-02-19 01:08:32,231 Epoch 908/2000 +2025-02-19 01:09:14,470 Current Learning Rate: 0.0043733338 +2025-02-19 01:09:14,471 Train Loss: 0.0002997, Val Loss: 0.0003040 +2025-02-19 01:09:14,471 Epoch 909/2000 +2025-02-19 01:09:57,164 Current Learning Rate: 0.0042954938 +2025-02-19 01:09:57,165 Train Loss: 0.0002956, Val Loss: 0.0003092 +2025-02-19 01:09:57,165 Epoch 910/2000 +2025-02-19 01:10:40,190 Current Learning Rate: 0.0042178277 +2025-02-19 01:10:40,191 Train Loss: 0.0002455, Val Loss: 0.0002910 +2025-02-19 01:10:40,191 Epoch 911/2000 +2025-02-19 01:11:23,129 Current Learning Rate: 0.0041403545 +2025-02-19 01:11:23,129 Train Loss: 0.0002776, Val Loss: 0.0002980 +2025-02-19 01:11:23,129 Epoch 912/2000 +2025-02-19 01:12:06,385 Current Learning Rate: 0.0040630934 +2025-02-19 01:12:06,386 Train Loss: 0.0002499, Val Loss: 0.0002981 +2025-02-19 01:12:06,386 Epoch 913/2000 +2025-02-19 01:12:49,288 Current Learning Rate: 0.0039860635 +2025-02-19 01:12:49,289 Train Loss: 0.0002775, Val Loss: 0.0002908 +2025-02-19 01:12:49,289 Epoch 914/2000 +2025-02-19 01:13:31,999 Current Learning Rate: 0.0039092838 +2025-02-19 01:13:33,960 Train Loss: 0.0002435, Val Loss: 0.0002789 +2025-02-19 01:13:33,961 Epoch 915/2000 +2025-02-19 01:14:16,323 Current Learning Rate: 0.0038327732 +2025-02-19 01:14:16,325 Train Loss: 0.0002526, Val Loss: 0.0002906 +2025-02-19 01:14:16,325 Epoch 916/2000 +2025-02-19 01:14:58,827 Current Learning Rate: 0.0037565506 +2025-02-19 01:14:58,828 Train Loss: 0.0002669, Val Loss: 0.0002921 +2025-02-19 01:14:58,828 Epoch 917/2000 +2025-02-19 01:15:41,853 Current Learning Rate: 0.0036806348 +2025-02-19 01:15:41,854 Train Loss: 0.0002164, Val Loss: 0.0002792 +2025-02-19 01:15:41,854 Epoch 918/2000 +2025-02-19 01:16:23,813 Current Learning Rate: 0.0036050445 +2025-02-19 01:16:23,814 Train Loss: 0.0002255, Val Loss: 0.0002794 +2025-02-19 01:16:23,814 Epoch 919/2000 +2025-02-19 01:17:07,086 Current Learning Rate: 0.0035297984 +2025-02-19 01:17:07,087 Train Loss: 0.0002869, Val Loss: 0.0002800 +2025-02-19 01:17:07,087 Epoch 920/2000 +2025-02-19 01:17:49,383 Current Learning Rate: 0.0034549150 +2025-02-19 01:17:50,793 Train Loss: 0.0002698, Val Loss: 0.0002749 +2025-02-19 01:17:50,793 Epoch 921/2000 +2025-02-19 01:18:32,693 Current Learning Rate: 0.0033804129 +2025-02-19 01:18:32,693 Train Loss: 0.0002612, Val Loss: 0.0002751 +2025-02-19 01:18:32,694 Epoch 922/2000 +2025-02-19 01:19:15,670 Current Learning Rate: 0.0033063104 +2025-02-19 01:19:17,150 Train Loss: 0.0001899, Val Loss: 0.0002719 +2025-02-19 01:19:17,150 Epoch 923/2000 +2025-02-19 01:19:58,305 Current Learning Rate: 0.0032326258 +2025-02-19 01:19:59,527 Train Loss: 0.0002260, Val Loss: 0.0002717 +2025-02-19 01:19:59,527 Epoch 924/2000 +2025-02-19 01:20:41,135 Current Learning Rate: 0.0031593772 +2025-02-19 01:20:41,136 Train Loss: 0.0002574, Val Loss: 0.0002725 +2025-02-19 01:20:41,136 Epoch 925/2000 +2025-02-19 01:21:24,003 Current Learning Rate: 0.0030865828 +2025-02-19 01:21:24,004 Train Loss: 0.0002205, Val Loss: 0.0002747 +2025-02-19 01:21:24,004 Epoch 926/2000 +2025-02-19 01:22:07,051 Current Learning Rate: 0.0030142605 +2025-02-19 01:22:07,051 Train Loss: 0.0002199, Val Loss: 0.0002769 +2025-02-19 01:22:07,052 Epoch 927/2000 +2025-02-19 01:22:49,094 Current Learning Rate: 0.0029424282 +2025-02-19 01:22:49,094 Train Loss: 0.0002543, Val Loss: 0.0002740 +2025-02-19 01:22:49,094 Epoch 928/2000 +2025-02-19 01:23:31,325 Current Learning Rate: 0.0028711035 +2025-02-19 01:23:32,391 Train Loss: 0.0002096, Val Loss: 0.0002696 +2025-02-19 01:23:32,392 Epoch 929/2000 +2025-02-19 01:24:13,969 Current Learning Rate: 0.0028003042 +2025-02-19 01:24:13,971 Train Loss: 0.0002272, Val Loss: 0.0002752 +2025-02-19 01:24:13,971 Epoch 930/2000 +2025-02-19 01:24:56,385 Current Learning Rate: 0.0027300475 +2025-02-19 01:24:56,386 Train Loss: 0.0002240, Val Loss: 0.0002701 +2025-02-19 01:24:56,386 Epoch 931/2000 +2025-02-19 01:25:39,502 Current Learning Rate: 0.0026603509 +2025-02-19 01:25:39,503 Train Loss: 0.0002242, Val Loss: 0.0002711 +2025-02-19 01:25:39,503 Epoch 932/2000 +2025-02-19 01:26:22,401 Current Learning Rate: 0.0025912316 +2025-02-19 01:26:24,370 Train Loss: 0.0002436, Val Loss: 0.0002676 +2025-02-19 01:26:24,371 Epoch 933/2000 +2025-02-19 01:27:05,570 Current Learning Rate: 0.0025227067 +2025-02-19 01:27:07,078 Train Loss: 0.0002148, Val Loss: 0.0002638 +2025-02-19 01:27:07,086 Epoch 934/2000 +2025-02-19 01:27:49,081 Current Learning Rate: 0.0024547929 +2025-02-19 01:27:50,702 Train Loss: 0.0002251, Val Loss: 0.0002619 +2025-02-19 01:27:50,702 Epoch 935/2000 +2025-02-19 01:28:31,754 Current Learning Rate: 0.0023875072 +2025-02-19 01:28:33,036 Train Loss: 0.0001827, Val Loss: 0.0002602 +2025-02-19 01:28:33,036 Epoch 936/2000 +2025-02-19 01:29:14,392 Current Learning Rate: 0.0023208660 +2025-02-19 01:29:15,488 Train Loss: 0.0002014, Val Loss: 0.0002590 +2025-02-19 01:29:15,489 Epoch 937/2000 +2025-02-19 01:29:57,083 Current Learning Rate: 0.0022548859 +2025-02-19 01:29:58,469 Train Loss: 0.0002362, Val Loss: 0.0002579 +2025-02-19 01:29:58,469 Epoch 938/2000 +2025-02-19 01:30:39,993 Current Learning Rate: 0.0021895831 +2025-02-19 01:30:41,455 Train Loss: 0.0001736, Val Loss: 0.0002557 +2025-02-19 01:30:41,458 Epoch 939/2000 +2025-02-19 01:31:24,250 Current Learning Rate: 0.0021249737 +2025-02-19 01:31:24,251 Train Loss: 0.0002258, Val Loss: 0.0002566 +2025-02-19 01:31:24,251 Epoch 940/2000 +2025-02-19 01:32:07,128 Current Learning Rate: 0.0020610737 +2025-02-19 01:32:08,622 Train Loss: 0.0002203, Val Loss: 0.0002551 +2025-02-19 01:32:08,622 Epoch 941/2000 +2025-02-19 01:32:50,378 Current Learning Rate: 0.0019978989 +2025-02-19 01:32:50,379 Train Loss: 0.0002519, Val Loss: 0.0002554 +2025-02-19 01:32:50,379 Epoch 942/2000 +2025-02-19 01:33:33,066 Current Learning Rate: 0.0019354647 +2025-02-19 01:33:34,782 Train Loss: 0.0002501, Val Loss: 0.0002541 +2025-02-19 01:33:34,784 Epoch 943/2000 +2025-02-19 01:34:17,343 Current Learning Rate: 0.0018737867 +2025-02-19 01:34:19,231 Train Loss: 0.0002052, Val Loss: 0.0002531 +2025-02-19 01:34:19,231 Epoch 944/2000 +2025-02-19 01:35:02,390 Current Learning Rate: 0.0018128801 +2025-02-19 01:35:04,389 Train Loss: 0.0001850, Val Loss: 0.0002522 +2025-02-19 01:35:04,389 Epoch 945/2000 +2025-02-19 01:35:47,356 Current Learning Rate: 0.0017527598 +2025-02-19 01:35:47,357 Train Loss: 0.0002705, Val Loss: 0.0002529 +2025-02-19 01:35:47,357 Epoch 946/2000 +2025-02-19 01:36:30,434 Current Learning Rate: 0.0016934407 +2025-02-19 01:36:31,908 Train Loss: 0.0002173, Val Loss: 0.0002519 +2025-02-19 01:36:31,908 Epoch 947/2000 +2025-02-19 01:37:14,105 Current Learning Rate: 0.0016349374 +2025-02-19 01:37:15,502 Train Loss: 0.0001947, Val Loss: 0.0002517 +2025-02-19 01:37:15,502 Epoch 948/2000 +2025-02-19 01:37:57,388 Current Learning Rate: 0.0015772645 +2025-02-19 01:37:57,389 Train Loss: 0.0001892, Val Loss: 0.0002519 +2025-02-19 01:37:57,389 Epoch 949/2000 +2025-02-19 01:38:40,050 Current Learning Rate: 0.0015204360 +2025-02-19 01:38:41,940 Train Loss: 0.0001884, Val Loss: 0.0002515 +2025-02-19 01:38:41,940 Epoch 950/2000 +2025-02-19 01:39:24,606 Current Learning Rate: 0.0014644661 +2025-02-19 01:39:26,431 Train Loss: 0.0001907, Val Loss: 0.0002505 +2025-02-19 01:39:26,431 Epoch 951/2000 +2025-02-19 01:40:08,522 Current Learning Rate: 0.0014093685 +2025-02-19 01:40:09,642 Train Loss: 0.0002312, Val Loss: 0.0002488 +2025-02-19 01:40:09,657 Epoch 952/2000 +2025-02-19 01:40:51,824 Current Learning Rate: 0.0013551569 +2025-02-19 01:40:53,342 Train Loss: 0.0001940, Val Loss: 0.0002480 +2025-02-19 01:40:53,343 Epoch 953/2000 +2025-02-19 01:41:35,574 Current Learning Rate: 0.0013018445 +2025-02-19 01:41:37,275 Train Loss: 0.0002065, Val Loss: 0.0002470 +2025-02-19 01:41:37,275 Epoch 954/2000 +2025-02-19 01:42:20,208 Current Learning Rate: 0.0012494447 +2025-02-19 01:42:21,900 Train Loss: 0.0001652, Val Loss: 0.0002463 +2025-02-19 01:42:21,900 Epoch 955/2000 +2025-02-19 01:43:04,933 Current Learning Rate: 0.0011979702 +2025-02-19 01:43:04,942 Train Loss: 0.0001936, Val Loss: 0.0002464 +2025-02-19 01:43:04,943 Epoch 956/2000 +2025-02-19 01:43:47,806 Current Learning Rate: 0.0011474338 +2025-02-19 01:43:49,643 Train Loss: 0.0001767, Val Loss: 0.0002458 +2025-02-19 01:43:49,643 Epoch 957/2000 +2025-02-19 01:44:31,073 Current Learning Rate: 0.0010978480 +2025-02-19 01:44:31,074 Train Loss: 0.0002368, Val Loss: 0.0002462 +2025-02-19 01:44:31,074 Epoch 958/2000 +2025-02-19 01:45:14,623 Current Learning Rate: 0.0010492249 +2025-02-19 01:45:16,688 Train Loss: 0.0002402, Val Loss: 0.0002456 +2025-02-19 01:45:16,689 Epoch 959/2000 +2025-02-19 01:45:58,954 Current Learning Rate: 0.0010015767 +2025-02-19 01:46:00,493 Train Loss: 0.0001970, Val Loss: 0.0002446 +2025-02-19 01:46:00,494 Epoch 960/2000 +2025-02-19 01:46:42,534 Current Learning Rate: 0.0009549150 +2025-02-19 01:46:42,535 Train Loss: 0.0002244, Val Loss: 0.0002450 +2025-02-19 01:46:42,535 Epoch 961/2000 +2025-02-19 01:47:26,138 Current Learning Rate: 0.0009092514 +2025-02-19 01:47:26,139 Train Loss: 0.0003043, Val Loss: 0.0002449 +2025-02-19 01:47:26,139 Epoch 962/2000 +2025-02-19 01:48:09,507 Current Learning Rate: 0.0008645971 +2025-02-19 01:48:11,575 Train Loss: 0.0002121, Val Loss: 0.0002443 +2025-02-19 01:48:11,575 Epoch 963/2000 +2025-02-19 01:48:54,842 Current Learning Rate: 0.0008209632 +2025-02-19 01:48:56,954 Train Loss: 0.0002140, Val Loss: 0.0002442 +2025-02-19 01:48:56,955 Epoch 964/2000 +2025-02-19 01:49:40,630 Current Learning Rate: 0.0007783604 +2025-02-19 01:49:42,454 Train Loss: 0.0001741, Val Loss: 0.0002437 +2025-02-19 01:49:42,454 Epoch 965/2000 +2025-02-19 01:50:25,641 Current Learning Rate: 0.0007367992 +2025-02-19 01:50:27,274 Train Loss: 0.0002211, Val Loss: 0.0002434 +2025-02-19 01:50:27,275 Epoch 966/2000 +2025-02-19 01:51:09,831 Current Learning Rate: 0.0006962899 +2025-02-19 01:51:10,767 Train Loss: 0.0001928, Val Loss: 0.0002431 +2025-02-19 01:51:10,768 Epoch 967/2000 +2025-02-19 01:51:52,774 Current Learning Rate: 0.0006568424 +2025-02-19 01:51:53,952 Train Loss: 0.0001866, Val Loss: 0.0002428 +2025-02-19 01:51:53,953 Epoch 968/2000 +2025-02-19 01:52:35,603 Current Learning Rate: 0.0006184666 +2025-02-19 01:52:35,604 Train Loss: 0.0001941, Val Loss: 0.0002429 +2025-02-19 01:52:35,604 Epoch 969/2000 +2025-02-19 01:53:18,960 Current Learning Rate: 0.0005811718 +2025-02-19 01:53:18,960 Train Loss: 0.0002083, Val Loss: 0.0002431 +2025-02-19 01:53:18,960 Epoch 970/2000 +2025-02-19 01:54:02,211 Current Learning Rate: 0.0005449674 +2025-02-19 01:54:03,914 Train Loss: 0.0002040, Val Loss: 0.0002424 +2025-02-19 01:54:03,915 Epoch 971/2000 +2025-02-19 01:54:46,386 Current Learning Rate: 0.0005098621 +2025-02-19 01:54:48,457 Train Loss: 0.0001605, Val Loss: 0.0002422 +2025-02-19 01:54:48,457 Epoch 972/2000 +2025-02-19 01:55:30,788 Current Learning Rate: 0.0004758647 +2025-02-19 01:55:32,516 Train Loss: 0.0001745, Val Loss: 0.0002421 +2025-02-19 01:55:32,516 Epoch 973/2000 +2025-02-19 01:56:14,135 Current Learning Rate: 0.0004429836 +2025-02-19 01:56:15,724 Train Loss: 0.0001826, Val Loss: 0.0002419 +2025-02-19 01:56:15,724 Epoch 974/2000 +2025-02-19 01:56:58,804 Current Learning Rate: 0.0004112269 +2025-02-19 01:56:58,805 Train Loss: 0.0002467, Val Loss: 0.0002421 +2025-02-19 01:56:58,805 Epoch 975/2000 +2025-02-19 01:57:41,550 Current Learning Rate: 0.0003806023 +2025-02-19 01:57:43,701 Train Loss: 0.0002341, Val Loss: 0.0002418 +2025-02-19 01:57:43,701 Epoch 976/2000 +2025-02-19 01:58:26,478 Current Learning Rate: 0.0003511176 +2025-02-19 01:58:26,479 Train Loss: 0.0001757, Val Loss: 0.0002418 +2025-02-19 01:58:26,480 Epoch 977/2000 +2025-02-19 01:59:09,330 Current Learning Rate: 0.0003227798 +2025-02-19 01:59:10,909 Train Loss: 0.0001768, Val Loss: 0.0002416 +2025-02-19 01:59:10,909 Epoch 978/2000 +2025-02-19 01:59:52,454 Current Learning Rate: 0.0002955962 +2025-02-19 01:59:52,457 Train Loss: 0.0002672, Val Loss: 0.0002418 +2025-02-19 01:59:52,461 Epoch 979/2000 +2025-02-19 02:00:36,021 Current Learning Rate: 0.0002695732 +2025-02-19 02:00:37,816 Train Loss: 0.0002301, Val Loss: 0.0002416 +2025-02-19 02:00:37,817 Epoch 980/2000 +2025-02-19 02:01:20,378 Current Learning Rate: 0.0002447174 +2025-02-19 02:01:22,232 Train Loss: 0.0001873, Val Loss: 0.0002414 +2025-02-19 02:01:22,233 Epoch 981/2000 +2025-02-19 02:02:05,316 Current Learning Rate: 0.0002210349 +2025-02-19 02:02:07,409 Train Loss: 0.0002151, Val Loss: 0.0002412 +2025-02-19 02:02:07,412 Epoch 982/2000 +2025-02-19 02:02:48,898 Current Learning Rate: 0.0001985316 +2025-02-19 02:02:50,670 Train Loss: 0.0001875, Val Loss: 0.0002410 +2025-02-19 02:02:50,673 Epoch 983/2000 +2025-02-19 02:03:32,828 Current Learning Rate: 0.0001772129 +2025-02-19 02:03:34,937 Train Loss: 0.0002122, Val Loss: 0.0002408 +2025-02-19 02:03:34,937 Epoch 984/2000 +2025-02-19 02:04:18,252 Current Learning Rate: 0.0001570842 +2025-02-19 02:04:20,174 Train Loss: 0.0001830, Val Loss: 0.0002407 +2025-02-19 02:04:20,174 Epoch 985/2000 +2025-02-19 02:05:02,298 Current Learning Rate: 0.0001381504 +2025-02-19 02:05:03,853 Train Loss: 0.0002186, Val Loss: 0.0002407 +2025-02-19 02:05:03,853 Epoch 986/2000 +2025-02-19 02:05:46,071 Current Learning Rate: 0.0001204162 +2025-02-19 02:05:47,564 Train Loss: 0.0001752, Val Loss: 0.0002405 +2025-02-19 02:05:47,564 Epoch 987/2000 +2025-02-19 02:06:30,825 Current Learning Rate: 0.0001038859 +2025-02-19 02:06:30,825 Train Loss: 0.0001828, Val Loss: 0.0002406 +2025-02-19 02:06:30,826 Epoch 988/2000 +2025-02-19 02:07:13,711 Current Learning Rate: 0.0000885637 +2025-02-19 02:07:15,314 Train Loss: 0.0002191, Val Loss: 0.0002405 +2025-02-19 02:07:15,314 Epoch 989/2000 +2025-02-19 02:07:56,615 Current Learning Rate: 0.0000744534 +2025-02-19 02:07:57,978 Train Loss: 0.0002214, Val Loss: 0.0002405 +2025-02-19 02:07:57,986 Epoch 990/2000 +2025-02-19 02:08:40,035 Current Learning Rate: 0.0000615583 +2025-02-19 02:08:40,036 Train Loss: 0.0001844, Val Loss: 0.0002406 +2025-02-19 02:08:40,037 Epoch 991/2000 +2025-02-19 02:09:22,797 Current Learning Rate: 0.0000498817 +2025-02-19 02:09:22,798 Train Loss: 0.0001921, Val Loss: 0.0002406 +2025-02-19 02:09:22,799 Epoch 992/2000 +2025-02-19 02:10:06,294 Current Learning Rate: 0.0000394265 +2025-02-19 02:10:06,294 Train Loss: 0.0001921, Val Loss: 0.0002405 +2025-02-19 02:10:06,295 Epoch 993/2000 +2025-02-19 02:10:49,605 Current Learning Rate: 0.0000301952 +2025-02-19 02:10:51,653 Train Loss: 0.0001966, Val Loss: 0.0002404 +2025-02-19 02:10:51,654 Epoch 994/2000 +2025-02-19 02:11:34,226 Current Learning Rate: 0.0000221902 +2025-02-19 02:11:35,736 Train Loss: 0.0001957, Val Loss: 0.0002403 +2025-02-19 02:11:35,736 Epoch 995/2000 +2025-02-19 02:12:17,386 Current Learning Rate: 0.0000154133 +2025-02-19 02:12:19,186 Train Loss: 0.0002238, Val Loss: 0.0002403 +2025-02-19 02:12:19,187 Epoch 996/2000 +2025-02-19 02:13:02,743 Current Learning Rate: 0.0000098664 +2025-02-19 02:13:02,744 Train Loss: 0.0002499, Val Loss: 0.0002404 +2025-02-19 02:13:02,744 Epoch 997/2000 +2025-02-19 02:13:45,479 Current Learning Rate: 0.0000055506 +2025-02-19 02:13:45,479 Train Loss: 0.0002550, Val Loss: 0.0002403 +2025-02-19 02:13:45,479 Epoch 998/2000 +2025-02-19 02:14:27,901 Current Learning Rate: 0.0000024672 +2025-02-19 02:14:27,902 Train Loss: 0.0002278, Val Loss: 0.0002403 +2025-02-19 02:14:27,902 Epoch 999/2000 +2025-02-19 02:15:10,109 Current Learning Rate: 0.0000006168 +2025-02-19 02:15:10,110 Train Loss: 0.0002092, Val Loss: 0.0002404 +2025-02-19 02:15:10,110 Epoch 1000/2000 +2025-02-19 02:15:52,914 Current Learning Rate: 0.0000000000 +2025-02-19 02:15:52,915 Train Loss: 0.0002107, Val Loss: 0.0002403 +2025-02-19 02:15:52,915 Epoch 1001/2000 +2025-02-19 02:16:36,119 Current Learning Rate: 0.0000006168 +2025-02-19 02:16:36,119 Train Loss: 0.0001768, Val Loss: 0.0002404 +2025-02-19 02:16:36,120 Epoch 1002/2000 +2025-02-19 02:17:19,078 Current Learning Rate: 0.0000024672 +2025-02-19 02:17:19,079 Train Loss: 0.0001856, Val Loss: 0.0002404 +2025-02-19 02:17:19,079 Epoch 1003/2000 +2025-02-19 02:18:01,039 Current Learning Rate: 0.0000055506 +2025-02-19 02:18:01,040 Train Loss: 0.0001862, Val Loss: 0.0002404 +2025-02-19 02:18:01,040 Epoch 1004/2000 +2025-02-19 02:18:43,205 Current Learning Rate: 0.0000098664 +2025-02-19 02:18:43,206 Train Loss: 0.0002128, Val Loss: 0.0002404 +2025-02-19 02:18:43,206 Epoch 1005/2000 +2025-02-19 02:19:25,497 Current Learning Rate: 0.0000154133 +2025-02-19 02:19:25,498 Train Loss: 0.0002376, Val Loss: 0.0002404 +2025-02-19 02:19:25,498 Epoch 1006/2000 +2025-02-19 02:20:08,309 Current Learning Rate: 0.0000221902 +2025-02-19 02:20:08,309 Train Loss: 0.0001808, Val Loss: 0.0002404 +2025-02-19 02:20:08,310 Epoch 1007/2000 +2025-02-19 02:20:50,855 Current Learning Rate: 0.0000301952 +2025-02-19 02:20:50,856 Train Loss: 0.0001658, Val Loss: 0.0002404 +2025-02-19 02:20:50,856 Epoch 1008/2000 +2025-02-19 02:21:33,196 Current Learning Rate: 0.0000394265 +2025-02-19 02:21:33,197 Train Loss: 0.0002466, Val Loss: 0.0002404 +2025-02-19 02:21:33,197 Epoch 1009/2000 +2025-02-19 02:22:16,233 Current Learning Rate: 0.0000498817 +2025-02-19 02:22:16,234 Train Loss: 0.0002309, Val Loss: 0.0002405 +2025-02-19 02:22:16,234 Epoch 1010/2000 +2025-02-19 02:22:58,350 Current Learning Rate: 0.0000615583 +2025-02-19 02:22:58,351 Train Loss: 0.0001931, Val Loss: 0.0002405 +2025-02-19 02:22:58,351 Epoch 1011/2000 +2025-02-19 02:23:40,729 Current Learning Rate: 0.0000744534 +2025-02-19 02:23:40,729 Train Loss: 0.0002091, Val Loss: 0.0002405 +2025-02-19 02:23:40,730 Epoch 1012/2000 +2025-02-19 02:24:23,413 Current Learning Rate: 0.0000885637 +2025-02-19 02:24:23,414 Train Loss: 0.0001961, Val Loss: 0.0002404 +2025-02-19 02:24:23,414 Epoch 1013/2000 +2025-02-19 02:25:05,268 Current Learning Rate: 0.0001038859 +2025-02-19 02:25:05,269 Train Loss: 0.0001905, Val Loss: 0.0002404 +2025-02-19 02:25:05,269 Epoch 1014/2000 +2025-02-19 02:25:48,047 Current Learning Rate: 0.0001204162 +2025-02-19 02:25:48,048 Train Loss: 0.0002255, Val Loss: 0.0002404 +2025-02-19 02:25:48,048 Epoch 1015/2000 +2025-02-19 02:26:31,121 Current Learning Rate: 0.0001381504 +2025-02-19 02:26:31,122 Train Loss: 0.0001880, Val Loss: 0.0002405 +2025-02-19 02:26:31,122 Epoch 1016/2000 +2025-02-19 02:27:13,479 Current Learning Rate: 0.0001570842 +2025-02-19 02:27:13,480 Train Loss: 0.0002158, Val Loss: 0.0002405 +2025-02-19 02:27:13,480 Epoch 1017/2000 +2025-02-19 02:27:55,659 Current Learning Rate: 0.0001772129 +2025-02-19 02:27:55,659 Train Loss: 0.0001954, Val Loss: 0.0002406 +2025-02-19 02:27:55,660 Epoch 1018/2000 +2025-02-19 02:28:38,086 Current Learning Rate: 0.0001985316 +2025-02-19 02:28:38,087 Train Loss: 0.0001834, Val Loss: 0.0002407 +2025-02-19 02:28:38,087 Epoch 1019/2000 +2025-02-19 02:29:21,202 Current Learning Rate: 0.0002210349 +2025-02-19 02:29:21,203 Train Loss: 0.0001529, Val Loss: 0.0002408 +2025-02-19 02:29:21,203 Epoch 1020/2000 +2025-02-19 02:30:03,353 Current Learning Rate: 0.0002447174 +2025-02-19 02:30:03,353 Train Loss: 0.0002022, Val Loss: 0.0002408 +2025-02-19 02:30:03,353 Epoch 1021/2000 +2025-02-19 02:30:45,509 Current Learning Rate: 0.0002695732 +2025-02-19 02:30:45,510 Train Loss: 0.0002511, Val Loss: 0.0002411 +2025-02-19 02:30:45,510 Epoch 1022/2000 +2025-02-19 02:31:28,403 Current Learning Rate: 0.0002955962 +2025-02-19 02:31:28,403 Train Loss: 0.0001961, Val Loss: 0.0002411 +2025-02-19 02:31:28,403 Epoch 1023/2000 +2025-02-19 02:32:10,349 Current Learning Rate: 0.0003227798 +2025-02-19 02:32:10,349 Train Loss: 0.0002385, Val Loss: 0.0002411 +2025-02-19 02:32:10,350 Epoch 1024/2000 +2025-02-19 02:32:52,876 Current Learning Rate: 0.0003511176 +2025-02-19 02:32:52,877 Train Loss: 0.0001825, Val Loss: 0.0002408 +2025-02-19 02:32:52,877 Epoch 1025/2000 +2025-02-19 02:33:36,004 Current Learning Rate: 0.0003806023 +2025-02-19 02:33:36,004 Train Loss: 0.0002116, Val Loss: 0.0002408 +2025-02-19 02:33:36,005 Epoch 1026/2000 +2025-02-19 02:34:19,114 Current Learning Rate: 0.0004112269 +2025-02-19 02:34:19,114 Train Loss: 0.0002668, Val Loss: 0.0002409 +2025-02-19 02:34:19,114 Epoch 1027/2000 +2025-02-19 02:35:01,809 Current Learning Rate: 0.0004429836 +2025-02-19 02:35:01,810 Train Loss: 0.0001989, Val Loss: 0.0002409 +2025-02-19 02:35:01,810 Epoch 1028/2000 +2025-02-19 02:35:44,730 Current Learning Rate: 0.0004758647 +2025-02-19 02:35:44,731 Train Loss: 0.0002409, Val Loss: 0.0002410 +2025-02-19 02:35:44,731 Epoch 1029/2000 +2025-02-19 02:36:27,606 Current Learning Rate: 0.0005098621 +2025-02-19 02:36:27,606 Train Loss: 0.0002303, Val Loss: 0.0002411 +2025-02-19 02:36:27,606 Epoch 1030/2000 +2025-02-19 02:37:09,911 Current Learning Rate: 0.0005449674 +2025-02-19 02:37:09,912 Train Loss: 0.0001906, Val Loss: 0.0002410 +2025-02-19 02:37:09,912 Epoch 1031/2000 +2025-02-19 02:37:52,823 Current Learning Rate: 0.0005811718 +2025-02-19 02:37:52,824 Train Loss: 0.0001867, Val Loss: 0.0002411 +2025-02-19 02:37:52,824 Epoch 1032/2000 +2025-02-19 02:38:35,812 Current Learning Rate: 0.0006184666 +2025-02-19 02:38:35,812 Train Loss: 0.0002085, Val Loss: 0.0002412 +2025-02-19 02:38:35,844 Epoch 1033/2000 +2025-02-19 02:39:18,909 Current Learning Rate: 0.0006568424 +2025-02-19 02:39:18,909 Train Loss: 0.0001863, Val Loss: 0.0002415 +2025-02-19 02:39:18,909 Epoch 1034/2000 +2025-02-19 02:40:01,934 Current Learning Rate: 0.0006962899 +2025-02-19 02:40:01,950 Train Loss: 0.0001951, Val Loss: 0.0002417 +2025-02-19 02:40:01,950 Epoch 1035/2000 +2025-02-19 02:40:44,917 Current Learning Rate: 0.0007367992 +2025-02-19 02:40:44,917 Train Loss: 0.0002074, Val Loss: 0.0002412 +2025-02-19 02:40:44,918 Epoch 1036/2000 +2025-02-19 02:41:27,037 Current Learning Rate: 0.0007783604 +2025-02-19 02:41:27,038 Train Loss: 0.0002001, Val Loss: 0.0002414 +2025-02-19 02:41:27,038 Epoch 1037/2000 +2025-02-19 02:42:09,820 Current Learning Rate: 0.0008209632 +2025-02-19 02:42:09,820 Train Loss: 0.0001536, Val Loss: 0.0002410 +2025-02-19 02:42:09,821 Epoch 1038/2000 +2025-02-19 02:42:52,863 Current Learning Rate: 0.0008645971 +2025-02-19 02:42:52,863 Train Loss: 0.0001840, Val Loss: 0.0002419 +2025-02-19 02:42:52,864 Epoch 1039/2000 +2025-02-19 02:43:35,671 Current Learning Rate: 0.0009092514 +2025-02-19 02:43:35,671 Train Loss: 0.0002240, Val Loss: 0.0002426 +2025-02-19 02:43:35,672 Epoch 1040/2000 +2025-02-19 02:44:18,664 Current Learning Rate: 0.0009549150 +2025-02-19 02:44:18,664 Train Loss: 0.0001873, Val Loss: 0.0002443 +2025-02-19 02:44:18,665 Epoch 1041/2000 +2025-02-19 02:45:01,305 Current Learning Rate: 0.0010015767 +2025-02-19 02:45:01,306 Train Loss: 0.0002169, Val Loss: 0.0002440 +2025-02-19 02:45:01,306 Epoch 1042/2000 +2025-02-19 02:45:44,574 Current Learning Rate: 0.0010492249 +2025-02-19 02:45:44,575 Train Loss: 0.0001987, Val Loss: 0.0002429 +2025-02-19 02:45:44,575 Epoch 1043/2000 +2025-02-19 02:46:27,623 Current Learning Rate: 0.0010978480 +2025-02-19 02:46:27,624 Train Loss: 0.0001909, Val Loss: 0.0002433 +2025-02-19 02:46:27,624 Epoch 1044/2000 +2025-02-19 02:47:09,458 Current Learning Rate: 0.0011474338 +2025-02-19 02:47:09,458 Train Loss: 0.0001822, Val Loss: 0.0002547 +2025-02-19 02:47:09,458 Epoch 1045/2000 +2025-02-19 02:47:52,697 Current Learning Rate: 0.0011979702 +2025-02-19 02:47:52,698 Train Loss: 0.0002643, Val Loss: 0.0002500 +2025-02-19 02:47:52,698 Epoch 1046/2000 +2025-02-19 02:48:35,620 Current Learning Rate: 0.0012494447 +2025-02-19 02:48:35,621 Train Loss: 0.0001741, Val Loss: 0.0002426 +2025-02-19 02:48:35,621 Epoch 1047/2000 +2025-02-19 02:49:17,580 Current Learning Rate: 0.0013018445 +2025-02-19 02:49:17,580 Train Loss: 0.0002105, Val Loss: 0.0002442 +2025-02-19 02:49:17,581 Epoch 1048/2000 +2025-02-19 02:50:00,683 Current Learning Rate: 0.0013551569 +2025-02-19 02:50:00,683 Train Loss: 0.0002068, Val Loss: 0.0002428 +2025-02-19 02:50:00,684 Epoch 1049/2000 +2025-02-19 02:50:42,972 Current Learning Rate: 0.0014093685 +2025-02-19 02:50:42,972 Train Loss: 0.0001789, Val Loss: 0.0002421 +2025-02-19 02:50:42,973 Epoch 1050/2000 +2025-02-19 02:51:25,472 Current Learning Rate: 0.0014644661 +2025-02-19 02:51:25,473 Train Loss: 0.0002605, Val Loss: 0.0002467 +2025-02-19 02:51:25,473 Epoch 1051/2000 +2025-02-19 02:52:08,662 Current Learning Rate: 0.0015204360 +2025-02-19 02:52:08,667 Train Loss: 0.0001630, Val Loss: 0.0002442 +2025-02-19 02:52:08,667 Epoch 1052/2000 +2025-02-19 02:52:51,733 Current Learning Rate: 0.0015772645 +2025-02-19 02:52:51,734 Train Loss: 0.0002045, Val Loss: 0.0002469 +2025-02-19 02:52:51,734 Epoch 1053/2000 +2025-02-19 02:53:34,550 Current Learning Rate: 0.0016349374 +2025-02-19 02:53:34,550 Train Loss: 0.0001714, Val Loss: 0.0002444 +2025-02-19 02:53:34,550 Epoch 1054/2000 +2025-02-19 02:54:17,575 Current Learning Rate: 0.0016934407 +2025-02-19 02:54:17,576 Train Loss: 0.0001602, Val Loss: 0.0002427 +2025-02-19 02:54:17,576 Epoch 1055/2000 +2025-02-19 02:54:59,779 Current Learning Rate: 0.0017527598 +2025-02-19 02:54:59,779 Train Loss: 0.0002511, Val Loss: 0.0002737 +2025-02-19 02:54:59,780 Epoch 1056/2000 +2025-02-19 02:55:41,777 Current Learning Rate: 0.0018128801 +2025-02-19 02:55:41,778 Train Loss: 0.0002528, Val Loss: 0.0002682 +2025-02-19 02:55:41,778 Epoch 1057/2000 +2025-02-19 02:56:25,096 Current Learning Rate: 0.0018737867 +2025-02-19 02:56:25,096 Train Loss: 0.0001692, Val Loss: 0.0002470 +2025-02-19 02:56:25,097 Epoch 1058/2000 +2025-02-19 02:57:07,568 Current Learning Rate: 0.0019354647 +2025-02-19 02:57:07,569 Train Loss: 0.0002568, Val Loss: 0.0002665 +2025-02-19 02:57:07,569 Epoch 1059/2000 +2025-02-19 02:57:50,342 Current Learning Rate: 0.0019978989 +2025-02-19 02:57:50,350 Train Loss: 0.0002297, Val Loss: 0.0002527 +2025-02-19 02:57:50,353 Epoch 1060/2000 +2025-02-19 02:58:33,580 Current Learning Rate: 0.0020610737 +2025-02-19 02:58:33,581 Train Loss: 0.0002015, Val Loss: 0.0002620 +2025-02-19 02:58:33,581 Epoch 1061/2000 +2025-02-19 02:59:15,640 Current Learning Rate: 0.0021249737 +2025-02-19 02:59:15,641 Train Loss: 0.0001785, Val Loss: 0.0002485 +2025-02-19 02:59:15,641 Epoch 1062/2000 +2025-02-19 02:59:58,039 Current Learning Rate: 0.0021895831 +2025-02-19 02:59:58,040 Train Loss: 0.0001988, Val Loss: 0.0002520 +2025-02-19 02:59:58,040 Epoch 1063/2000 +2025-02-19 03:00:41,340 Current Learning Rate: 0.0022548859 +2025-02-19 03:00:41,340 Train Loss: 0.0002370, Val Loss: 0.0002510 +2025-02-19 03:00:41,341 Epoch 1064/2000 +2025-02-19 03:01:24,515 Current Learning Rate: 0.0023208660 +2025-02-19 03:01:24,515 Train Loss: 0.0002544, Val Loss: 0.0002793 +2025-02-19 03:01:24,515 Epoch 1065/2000 +2025-02-19 03:02:07,061 Current Learning Rate: 0.0023875072 +2025-02-19 03:02:07,061 Train Loss: 0.0001976, Val Loss: 0.0002534 +2025-02-19 03:02:07,062 Epoch 1066/2000 +2025-02-19 03:02:49,896 Current Learning Rate: 0.0024547929 +2025-02-19 03:02:49,896 Train Loss: 0.0002091, Val Loss: 0.0002630 +2025-02-19 03:02:49,896 Epoch 1067/2000 +2025-02-19 03:03:32,254 Current Learning Rate: 0.0025227067 +2025-02-19 03:03:32,255 Train Loss: 0.0001926, Val Loss: 0.0002540 +2025-02-19 03:03:32,255 Epoch 1068/2000 +2025-02-19 03:04:15,089 Current Learning Rate: 0.0025912316 +2025-02-19 03:04:15,090 Train Loss: 0.0002061, Val Loss: 0.0002576 +2025-02-19 03:04:15,090 Epoch 1069/2000 +2025-02-19 03:04:58,923 Current Learning Rate: 0.0026603509 +2025-02-19 03:04:58,924 Train Loss: 0.0002317, Val Loss: 0.0002534 +2025-02-19 03:04:58,925 Epoch 1070/2000 +2025-02-19 03:05:42,697 Current Learning Rate: 0.0027300475 +2025-02-19 03:05:42,697 Train Loss: 0.0002962, Val Loss: 0.0002856 +2025-02-19 03:05:42,698 Epoch 1071/2000 +2025-02-19 03:06:25,295 Current Learning Rate: 0.0028003042 +2025-02-19 03:06:25,296 Train Loss: 0.0001934, Val Loss: 0.0002571 +2025-02-19 03:06:25,296 Epoch 1072/2000 +2025-02-19 03:07:08,345 Current Learning Rate: 0.0028711035 +2025-02-19 03:07:08,346 Train Loss: 0.0001742, Val Loss: 0.0002518 +2025-02-19 03:07:08,346 Epoch 1073/2000 +2025-02-19 03:07:51,696 Current Learning Rate: 0.0029424282 +2025-02-19 03:07:51,696 Train Loss: 0.0001922, Val Loss: 0.0002530 +2025-02-19 03:07:51,697 Epoch 1074/2000 +2025-02-19 03:08:34,096 Current Learning Rate: 0.0030142605 +2025-02-19 03:08:34,097 Train Loss: 0.0001934, Val Loss: 0.0002503 +2025-02-19 03:08:34,097 Epoch 1075/2000 +2025-02-19 03:09:17,150 Current Learning Rate: 0.0030865828 +2025-02-19 03:09:17,151 Train Loss: 0.0002513, Val Loss: 0.0002570 +2025-02-19 03:09:17,151 Epoch 1076/2000 +2025-02-19 03:10:00,981 Current Learning Rate: 0.0031593772 +2025-02-19 03:10:00,982 Train Loss: 0.0002130, Val Loss: 0.0002609 +2025-02-19 03:10:00,982 Epoch 1077/2000 +2025-02-19 03:10:44,021 Current Learning Rate: 0.0032326258 +2025-02-19 03:10:44,022 Train Loss: 0.0002131, Val Loss: 0.0002551 +2025-02-19 03:10:44,022 Epoch 1078/2000 +2025-02-19 03:11:27,257 Current Learning Rate: 0.0033063104 +2025-02-19 03:11:27,257 Train Loss: 0.0001940, Val Loss: 0.0002535 +2025-02-19 03:11:27,258 Epoch 1079/2000 +2025-02-19 03:12:10,473 Current Learning Rate: 0.0033804129 +2025-02-19 03:12:10,474 Train Loss: 0.0002639, Val Loss: 0.0002960 +2025-02-19 03:12:10,474 Epoch 1080/2000 +2025-02-19 03:12:52,938 Current Learning Rate: 0.0034549150 +2025-02-19 03:12:52,939 Train Loss: 0.0002487, Val Loss: 0.0002833 +2025-02-19 03:12:52,939 Epoch 1081/2000 +2025-02-19 03:13:36,263 Current Learning Rate: 0.0035297984 +2025-02-19 03:13:36,263 Train Loss: 0.0002314, Val Loss: 0.0002922 +2025-02-19 03:13:36,264 Epoch 1082/2000 +2025-02-19 03:14:19,263 Current Learning Rate: 0.0036050445 +2025-02-19 03:14:19,264 Train Loss: 0.0001890, Val Loss: 0.0002646 +2025-02-19 03:14:19,264 Epoch 1083/2000 +2025-02-19 03:15:02,656 Current Learning Rate: 0.0036806348 +2025-02-19 03:15:02,657 Train Loss: 0.0002519, Val Loss: 0.0002780 +2025-02-19 03:15:02,657 Epoch 1084/2000 +2025-02-19 03:15:45,570 Current Learning Rate: 0.0037565506 +2025-02-19 03:15:45,571 Train Loss: 0.0001779, Val Loss: 0.0002759 +2025-02-19 03:15:45,571 Epoch 1085/2000 +2025-02-19 03:16:28,430 Current Learning Rate: 0.0038327732 +2025-02-19 03:16:28,431 Train Loss: 0.0003325, Val Loss: 0.0003134 +2025-02-19 03:16:28,432 Epoch 1086/2000 +2025-02-19 03:17:10,905 Current Learning Rate: 0.0039092838 +2025-02-19 03:17:10,905 Train Loss: 0.0002747, Val Loss: 0.0002782 +2025-02-19 03:17:10,905 Epoch 1087/2000 +2025-02-19 03:17:54,353 Current Learning Rate: 0.0039860635 +2025-02-19 03:17:54,354 Train Loss: 0.0002859, Val Loss: 0.0003184 +2025-02-19 03:17:54,354 Epoch 1088/2000 +2025-02-19 03:18:37,800 Current Learning Rate: 0.0040630934 +2025-02-19 03:18:37,801 Train Loss: 0.0002034, Val Loss: 0.0002639 +2025-02-19 03:18:37,801 Epoch 1089/2000 +2025-02-19 03:19:21,001 Current Learning Rate: 0.0041403545 +2025-02-19 03:19:21,001 Train Loss: 0.0002202, Val Loss: 0.0002687 +2025-02-19 03:19:21,001 Epoch 1090/2000 +2025-02-19 03:20:04,362 Current Learning Rate: 0.0042178277 +2025-02-19 03:20:04,363 Train Loss: 0.0002341, Val Loss: 0.0002729 +2025-02-19 03:20:04,363 Epoch 1091/2000 +2025-02-19 03:20:47,775 Current Learning Rate: 0.0042954938 +2025-02-19 03:20:47,775 Train Loss: 0.0002009, Val Loss: 0.0003032 +2025-02-19 03:20:47,775 Epoch 1092/2000 +2025-02-19 03:21:30,223 Current Learning Rate: 0.0043733338 +2025-02-19 03:21:30,223 Train Loss: 0.0003220, Val Loss: 0.0004474 +2025-02-19 03:21:30,224 Epoch 1093/2000 +2025-02-19 03:22:12,909 Current Learning Rate: 0.0044513284 +2025-02-19 03:22:12,910 Train Loss: 0.0002945, Val Loss: 0.0002943 +2025-02-19 03:22:12,911 Epoch 1094/2000 +2025-02-19 03:22:55,486 Current Learning Rate: 0.0045294584 +2025-02-19 03:22:55,487 Train Loss: 0.0002949, Val Loss: 0.0002978 +2025-02-19 03:22:55,487 Epoch 1095/2000 +2025-02-19 03:23:38,276 Current Learning Rate: 0.0046077045 +2025-02-19 03:23:38,276 Train Loss: 0.0002308, Val Loss: 0.0002898 +2025-02-19 03:23:38,276 Epoch 1096/2000 +2025-02-19 03:24:21,624 Current Learning Rate: 0.0046860474 +2025-02-19 03:24:21,625 Train Loss: 0.0002064, Val Loss: 0.0002857 +2025-02-19 03:24:21,625 Epoch 1097/2000 +2025-02-19 03:25:03,985 Current Learning Rate: 0.0047644677 +2025-02-19 03:25:03,986 Train Loss: 0.0002253, Val Loss: 0.0002676 +2025-02-19 03:25:03,986 Epoch 1098/2000 +2025-02-19 03:25:47,312 Current Learning Rate: 0.0048429462 +2025-02-19 03:25:47,313 Train Loss: 0.0002980, Val Loss: 0.0002916 +2025-02-19 03:25:47,313 Epoch 1099/2000 +2025-02-19 03:26:31,052 Current Learning Rate: 0.0049214634 +2025-02-19 03:26:31,052 Train Loss: 0.0002462, Val Loss: 0.0003128 +2025-02-19 03:26:31,053 Epoch 1100/2000 +2025-02-19 03:27:14,228 Current Learning Rate: 0.0050000000 +2025-02-19 03:27:14,229 Train Loss: 0.0002166, Val Loss: 0.0002886 +2025-02-19 03:27:14,229 Epoch 1101/2000 +2025-02-19 03:27:56,619 Current Learning Rate: 0.0050785366 +2025-02-19 03:27:56,620 Train Loss: 0.0003070, Val Loss: 0.0003832 +2025-02-19 03:27:56,620 Epoch 1102/2000 +2025-02-19 03:28:38,845 Current Learning Rate: 0.0051570538 +2025-02-19 03:28:38,846 Train Loss: 0.0002541, Val Loss: 0.0002917 +2025-02-19 03:28:38,846 Epoch 1103/2000 +2025-02-19 03:29:21,417 Current Learning Rate: 0.0052355323 +2025-02-19 03:29:21,418 Train Loss: 0.0002667, Val Loss: 0.0003610 +2025-02-19 03:29:21,418 Epoch 1104/2000 +2025-02-19 03:30:03,744 Current Learning Rate: 0.0053139526 +2025-02-19 03:30:03,745 Train Loss: 0.0003004, Val Loss: 0.0003231 +2025-02-19 03:30:03,745 Epoch 1105/2000 +2025-02-19 03:30:47,024 Current Learning Rate: 0.0053922955 +2025-02-19 03:30:47,025 Train Loss: 0.0002553, Val Loss: 0.0003014 +2025-02-19 03:30:47,025 Epoch 1106/2000 +2025-02-19 03:31:30,027 Current Learning Rate: 0.0054705416 +2025-02-19 03:31:30,028 Train Loss: 0.0002604, Val Loss: 0.0003149 +2025-02-19 03:31:30,028 Epoch 1107/2000 +2025-02-19 03:32:12,247 Current Learning Rate: 0.0055486716 +2025-02-19 03:32:12,248 Train Loss: 0.0002063, Val Loss: 0.0002979 +2025-02-19 03:32:12,248 Epoch 1108/2000 +2025-02-19 03:32:54,546 Current Learning Rate: 0.0056266662 +2025-02-19 03:32:54,546 Train Loss: 0.0002328, Val Loss: 0.0002973 +2025-02-19 03:32:54,547 Epoch 1109/2000 +2025-02-19 03:33:37,425 Current Learning Rate: 0.0057045062 +2025-02-19 03:33:37,426 Train Loss: 0.0003118, Val Loss: 0.0003333 +2025-02-19 03:33:37,426 Epoch 1110/2000 +2025-02-19 03:34:19,891 Current Learning Rate: 0.0057821723 +2025-02-19 03:34:19,891 Train Loss: 0.0002921, Val Loss: 0.0003189 +2025-02-19 03:34:19,892 Epoch 1111/2000 +2025-02-19 03:35:02,765 Current Learning Rate: 0.0058596455 +2025-02-19 03:35:02,765 Train Loss: 0.0003431, Val Loss: 0.0003513 +2025-02-19 03:35:02,766 Epoch 1112/2000 +2025-02-19 03:35:45,607 Current Learning Rate: 0.0059369066 +2025-02-19 03:35:45,608 Train Loss: 0.0003520, Val Loss: 0.0003310 +2025-02-19 03:35:45,608 Epoch 1113/2000 +2025-02-19 03:36:28,595 Current Learning Rate: 0.0060139365 +2025-02-19 03:36:28,596 Train Loss: 0.0004791, Val Loss: 0.0003653 +2025-02-19 03:36:28,596 Epoch 1114/2000 +2025-02-19 03:37:11,420 Current Learning Rate: 0.0060907162 +2025-02-19 03:37:11,421 Train Loss: 0.0003725, Val Loss: 0.0003362 +2025-02-19 03:37:11,421 Epoch 1115/2000 +2025-02-19 03:37:53,178 Current Learning Rate: 0.0061672268 +2025-02-19 03:37:53,178 Train Loss: 0.0002529, Val Loss: 0.0003042 +2025-02-19 03:37:53,179 Epoch 1116/2000 +2025-02-19 03:38:35,640 Current Learning Rate: 0.0062434494 +2025-02-19 03:38:35,641 Train Loss: 0.0002894, Val Loss: 0.0003179 +2025-02-19 03:38:35,641 Epoch 1117/2000 +2025-02-19 03:39:18,267 Current Learning Rate: 0.0063193652 +2025-02-19 03:39:18,267 Train Loss: 0.0002789, Val Loss: 0.0003679 +2025-02-19 03:39:18,267 Epoch 1118/2000 +2025-02-19 03:40:00,360 Current Learning Rate: 0.0063949555 +2025-02-19 03:40:00,361 Train Loss: 0.0003663, Val Loss: 0.0003915 +2025-02-19 03:40:00,361 Epoch 1119/2000 +2025-02-19 03:40:42,402 Current Learning Rate: 0.0064702016 +2025-02-19 03:40:42,403 Train Loss: 0.0003640, Val Loss: 0.0004146 +2025-02-19 03:40:42,403 Epoch 1120/2000 +2025-02-19 03:41:25,325 Current Learning Rate: 0.0065450850 +2025-02-19 03:41:25,325 Train Loss: 0.0003236, Val Loss: 0.0003426 +2025-02-19 03:41:25,326 Epoch 1121/2000 +2025-02-19 03:42:07,944 Current Learning Rate: 0.0066195871 +2025-02-19 03:42:07,945 Train Loss: 0.0003442, Val Loss: 0.0003151 +2025-02-19 03:42:07,945 Epoch 1122/2000 +2025-02-19 03:42:50,530 Current Learning Rate: 0.0066936896 +2025-02-19 03:42:50,531 Train Loss: 0.0002456, Val Loss: 0.0003012 +2025-02-19 03:42:50,531 Epoch 1123/2000 +2025-02-19 03:43:33,340 Current Learning Rate: 0.0067673742 +2025-02-19 03:43:33,340 Train Loss: 0.0003050, Val Loss: 0.0004627 +2025-02-19 03:43:33,341 Epoch 1124/2000 +2025-02-19 03:44:15,499 Current Learning Rate: 0.0068406228 +2025-02-19 03:44:15,499 Train Loss: 0.0004283, Val Loss: 0.0004645 +2025-02-19 03:44:15,500 Epoch 1125/2000 +2025-02-19 03:44:57,913 Current Learning Rate: 0.0069134172 +2025-02-19 03:44:57,913 Train Loss: 0.0003717, Val Loss: 0.0003399 +2025-02-19 03:44:57,914 Epoch 1126/2000 +2025-02-19 03:45:40,362 Current Learning Rate: 0.0069857395 +2025-02-19 03:45:40,362 Train Loss: 0.0002985, Val Loss: 0.0003369 +2025-02-19 03:45:40,363 Epoch 1127/2000 +2025-02-19 03:46:22,432 Current Learning Rate: 0.0070575718 +2025-02-19 03:46:22,432 Train Loss: 0.0003203, Val Loss: 0.0003092 +2025-02-19 03:46:22,432 Epoch 1128/2000 +2025-02-19 03:47:05,642 Current Learning Rate: 0.0071288965 +2025-02-19 03:47:05,642 Train Loss: 0.0002447, Val Loss: 0.0002961 +2025-02-19 03:47:05,643 Epoch 1129/2000 +2025-02-19 03:47:48,498 Current Learning Rate: 0.0071996958 +2025-02-19 03:47:48,499 Train Loss: 0.0002411, Val Loss: 0.0002873 +2025-02-19 03:47:48,499 Epoch 1130/2000 +2025-02-19 03:48:31,327 Current Learning Rate: 0.0072699525 +2025-02-19 03:48:31,328 Train Loss: 0.0002039, Val Loss: 0.0002897 +2025-02-19 03:48:31,328 Epoch 1131/2000 +2025-02-19 03:49:13,409 Current Learning Rate: 0.0073396491 +2025-02-19 03:49:13,410 Train Loss: 0.0002713, Val Loss: 0.0003436 +2025-02-19 03:49:13,410 Epoch 1132/2000 +2025-02-19 03:49:55,525 Current Learning Rate: 0.0074087684 +2025-02-19 03:49:55,526 Train Loss: 0.0002606, Val Loss: 0.0003267 +2025-02-19 03:49:55,526 Epoch 1133/2000 +2025-02-19 03:50:37,816 Current Learning Rate: 0.0074772933 +2025-02-19 03:50:37,817 Train Loss: 0.0003241, Val Loss: 0.0003345 +2025-02-19 03:50:37,817 Epoch 1134/2000 +2025-02-19 03:51:20,448 Current Learning Rate: 0.0075452071 +2025-02-19 03:51:20,448 Train Loss: 0.0002496, Val Loss: 0.0003114 +2025-02-19 03:51:20,449 Epoch 1135/2000 +2025-02-19 03:52:02,854 Current Learning Rate: 0.0076124928 +2025-02-19 03:52:02,855 Train Loss: 0.0002655, Val Loss: 0.0003490 +2025-02-19 03:52:02,855 Epoch 1136/2000 +2025-02-19 03:52:44,720 Current Learning Rate: 0.0076791340 +2025-02-19 03:52:44,721 Train Loss: 0.0003818, Val Loss: 0.0003598 +2025-02-19 03:52:44,721 Epoch 1137/2000 +2025-02-19 03:53:27,235 Current Learning Rate: 0.0077451141 +2025-02-19 03:53:27,235 Train Loss: 0.0003065, Val Loss: 0.0003834 +2025-02-19 03:53:27,236 Epoch 1138/2000 +2025-02-19 03:54:09,662 Current Learning Rate: 0.0078104169 +2025-02-19 03:54:09,662 Train Loss: 0.0003612, Val Loss: 0.0003496 +2025-02-19 03:54:09,663 Epoch 1139/2000 +2025-02-19 03:54:52,962 Current Learning Rate: 0.0078750263 +2025-02-19 03:54:52,962 Train Loss: 0.0002888, Val Loss: 0.0003206 +2025-02-19 03:54:52,963 Epoch 1140/2000 +2025-02-19 03:55:35,924 Current Learning Rate: 0.0079389263 +2025-02-19 03:55:35,925 Train Loss: 0.0003172, Val Loss: 0.0003506 +2025-02-19 03:55:35,925 Epoch 1141/2000 +2025-02-19 03:56:18,110 Current Learning Rate: 0.0080021011 +2025-02-19 03:56:18,111 Train Loss: 0.0003200, Val Loss: 0.0003664 +2025-02-19 03:56:18,112 Epoch 1142/2000 +2025-02-19 03:57:00,244 Current Learning Rate: 0.0080645353 +2025-02-19 03:57:00,245 Train Loss: 0.0003015, Val Loss: 0.0004287 +2025-02-19 03:57:00,245 Epoch 1143/2000 +2025-02-19 03:57:43,627 Current Learning Rate: 0.0081262133 +2025-02-19 03:57:43,627 Train Loss: 0.0003808, Val Loss: 0.0004373 +2025-02-19 03:57:43,627 Epoch 1144/2000 +2025-02-19 03:58:26,253 Current Learning Rate: 0.0081871199 +2025-02-19 03:58:26,254 Train Loss: 0.0003841, Val Loss: 0.0004210 +2025-02-19 03:58:26,254 Epoch 1145/2000 +2025-02-19 03:59:09,262 Current Learning Rate: 0.0082472402 +2025-02-19 03:59:09,263 Train Loss: 0.0003528, Val Loss: 0.0003438 +2025-02-19 03:59:09,263 Epoch 1146/2000 +2025-02-19 03:59:51,501 Current Learning Rate: 0.0083065593 +2025-02-19 03:59:51,502 Train Loss: 0.0004203, Val Loss: 0.0003761 +2025-02-19 03:59:51,502 Epoch 1147/2000 +2025-02-19 04:00:34,033 Current Learning Rate: 0.0083650626 +2025-02-19 04:00:34,034 Train Loss: 0.0002930, Val Loss: 0.0003615 +2025-02-19 04:00:34,034 Epoch 1148/2000 +2025-02-19 04:01:16,362 Current Learning Rate: 0.0084227355 +2025-02-19 04:01:16,362 Train Loss: 0.0003442, Val Loss: 0.0004073 +2025-02-19 04:01:16,362 Epoch 1149/2000 +2025-02-19 04:01:59,011 Current Learning Rate: 0.0084795640 +2025-02-19 04:01:59,011 Train Loss: 0.0003440, Val Loss: 0.0003918 +2025-02-19 04:01:59,012 Epoch 1150/2000 +2025-02-19 04:02:41,427 Current Learning Rate: 0.0085355339 +2025-02-19 04:02:41,427 Train Loss: 0.0004297, Val Loss: 0.0004923 +2025-02-19 04:02:41,427 Epoch 1151/2000 +2025-02-19 04:03:23,620 Current Learning Rate: 0.0085906315 +2025-02-19 04:03:23,621 Train Loss: 0.0004179, Val Loss: 0.0004456 +2025-02-19 04:03:23,621 Epoch 1152/2000 +2025-02-19 04:04:05,571 Current Learning Rate: 0.0086448431 +2025-02-19 04:04:05,571 Train Loss: 0.0003730, Val Loss: 0.0003580 +2025-02-19 04:04:05,572 Epoch 1153/2000 +2025-02-19 04:04:48,461 Current Learning Rate: 0.0086981555 +2025-02-19 04:04:48,462 Train Loss: 0.0002873, Val Loss: 0.0003592 +2025-02-19 04:04:48,462 Epoch 1154/2000 +2025-02-19 04:05:31,528 Current Learning Rate: 0.0087505553 +2025-02-19 04:05:31,528 Train Loss: 0.0002449, Val Loss: 0.0003035 +2025-02-19 04:05:31,528 Epoch 1155/2000 +2025-02-19 04:06:14,439 Current Learning Rate: 0.0088020298 +2025-02-19 04:06:14,440 Train Loss: 0.0002925, Val Loss: 0.0003313 +2025-02-19 04:06:14,440 Epoch 1156/2000 +2025-02-19 04:06:56,342 Current Learning Rate: 0.0088525662 +2025-02-19 04:06:56,343 Train Loss: 0.0003326, Val Loss: 0.0003231 +2025-02-19 04:06:56,344 Epoch 1157/2000 +2025-02-19 04:07:38,896 Current Learning Rate: 0.0089021520 +2025-02-19 04:07:38,897 Train Loss: 0.0002309, Val Loss: 0.0003207 +2025-02-19 04:07:38,897 Epoch 1158/2000 +2025-02-19 04:08:21,764 Current Learning Rate: 0.0089507751 +2025-02-19 04:08:21,765 Train Loss: 0.0003062, Val Loss: 0.0003277 +2025-02-19 04:08:21,765 Epoch 1159/2000 +2025-02-19 04:09:04,089 Current Learning Rate: 0.0089984233 +2025-02-19 04:09:04,089 Train Loss: 0.0003627, Val Loss: 0.0003142 +2025-02-19 04:09:04,090 Epoch 1160/2000 +2025-02-19 04:09:46,154 Current Learning Rate: 0.0090450850 +2025-02-19 04:09:46,155 Train Loss: 0.0002264, Val Loss: 0.0003045 +2025-02-19 04:09:46,155 Epoch 1161/2000 +2025-02-19 04:10:29,378 Current Learning Rate: 0.0090907486 +2025-02-19 04:10:29,379 Train Loss: 0.0002137, Val Loss: 0.0003212 +2025-02-19 04:10:29,379 Epoch 1162/2000 +2025-02-19 04:11:12,475 Current Learning Rate: 0.0091354029 +2025-02-19 04:11:12,475 Train Loss: 0.0002972, Val Loss: 0.0003439 +2025-02-19 04:11:12,476 Epoch 1163/2000 +2025-02-19 04:11:55,531 Current Learning Rate: 0.0091790368 +2025-02-19 04:11:55,532 Train Loss: 0.0003241, Val Loss: 0.0003683 +2025-02-19 04:11:55,532 Epoch 1164/2000 +2025-02-19 04:12:38,417 Current Learning Rate: 0.0092216396 +2025-02-19 04:12:38,417 Train Loss: 0.0002868, Val Loss: 0.0003638 +2025-02-19 04:12:38,418 Epoch 1165/2000 +2025-02-19 04:13:20,180 Current Learning Rate: 0.0092632008 +2025-02-19 04:13:20,181 Train Loss: 0.0002849, Val Loss: 0.0003661 +2025-02-19 04:13:20,181 Epoch 1166/2000 +2025-02-19 04:14:02,579 Current Learning Rate: 0.0093037101 +2025-02-19 04:14:02,579 Train Loss: 0.0003597, Val Loss: 0.0003550 +2025-02-19 04:14:02,579 Epoch 1167/2000 +2025-02-19 04:14:44,887 Current Learning Rate: 0.0093431576 +2025-02-19 04:14:44,888 Train Loss: 0.0003497, Val Loss: 0.0003689 +2025-02-19 04:14:44,888 Epoch 1168/2000 +2025-02-19 04:15:27,353 Current Learning Rate: 0.0093815334 +2025-02-19 04:15:27,353 Train Loss: 0.0004312, Val Loss: 0.0004013 +2025-02-19 04:15:27,354 Epoch 1169/2000 +2025-02-19 04:16:10,783 Current Learning Rate: 0.0094188282 +2025-02-19 04:16:10,783 Train Loss: 0.0003465, Val Loss: 0.0003921 +2025-02-19 04:16:10,784 Epoch 1170/2000 +2025-02-19 04:16:53,638 Current Learning Rate: 0.0094550326 +2025-02-19 04:16:53,639 Train Loss: 0.0002985, Val Loss: 0.0004199 +2025-02-19 04:16:53,639 Epoch 1171/2000 +2025-02-19 04:17:36,038 Current Learning Rate: 0.0094901379 +2025-02-19 04:17:36,038 Train Loss: 0.0003318, Val Loss: 0.0004006 +2025-02-19 04:17:36,039 Epoch 1172/2000 +2025-02-19 04:18:18,977 Current Learning Rate: 0.0095241353 +2025-02-19 04:18:18,978 Train Loss: 0.0003520, Val Loss: 0.0003934 +2025-02-19 04:18:18,978 Epoch 1173/2000 +2025-02-19 04:19:01,318 Current Learning Rate: 0.0095570164 +2025-02-19 04:19:01,319 Train Loss: 0.0004049, Val Loss: 0.0004106 +2025-02-19 04:19:01,319 Epoch 1174/2000 +2025-02-19 04:19:43,869 Current Learning Rate: 0.0095887731 +2025-02-19 04:19:43,870 Train Loss: 0.0004068, Val Loss: 0.0004243 +2025-02-19 04:19:43,870 Epoch 1175/2000 +2025-02-19 04:20:26,761 Current Learning Rate: 0.0096193977 +2025-02-19 04:20:26,762 Train Loss: 0.0003134, Val Loss: 0.0003380 +2025-02-19 04:20:26,762 Epoch 1176/2000 +2025-02-19 04:21:10,058 Current Learning Rate: 0.0096488824 +2025-02-19 04:21:10,059 Train Loss: 0.0002539, Val Loss: 0.0003542 +2025-02-19 04:21:10,059 Epoch 1177/2000 +2025-02-19 04:21:53,368 Current Learning Rate: 0.0096772202 +2025-02-19 04:21:53,369 Train Loss: 0.0002805, Val Loss: 0.0004002 +2025-02-19 04:21:53,369 Epoch 1178/2000 +2025-02-19 04:22:36,593 Current Learning Rate: 0.0097044038 +2025-02-19 04:22:36,594 Train Loss: 0.0003524, Val Loss: 0.0004084 +2025-02-19 04:22:36,594 Epoch 1179/2000 +2025-02-19 04:23:20,162 Current Learning Rate: 0.0097304268 +2025-02-19 04:23:20,162 Train Loss: 0.0004537, Val Loss: 0.0004119 +2025-02-19 04:23:20,163 Epoch 1180/2000 +2025-02-19 04:24:03,971 Current Learning Rate: 0.0097552826 +2025-02-19 04:24:03,972 Train Loss: 0.0003992, Val Loss: 0.0004051 +2025-02-19 04:24:03,972 Epoch 1181/2000 +2025-02-19 04:24:45,864 Current Learning Rate: 0.0097789651 +2025-02-19 04:24:45,865 Train Loss: 0.0003883, Val Loss: 0.0003950 +2025-02-19 04:24:45,865 Epoch 1182/2000 +2025-02-19 04:25:28,523 Current Learning Rate: 0.0098014684 +2025-02-19 04:25:28,524 Train Loss: 0.0003235, Val Loss: 0.0003632 +2025-02-19 04:25:28,524 Epoch 1183/2000 +2025-02-19 04:26:12,298 Current Learning Rate: 0.0098227871 +2025-02-19 04:26:12,299 Train Loss: 0.0002952, Val Loss: 0.0003477 +2025-02-19 04:26:12,299 Epoch 1184/2000 +2025-02-19 04:26:54,488 Current Learning Rate: 0.0098429158 +2025-02-19 04:26:54,489 Train Loss: 0.0002845, Val Loss: 0.0003347 +2025-02-19 04:26:54,489 Epoch 1185/2000 +2025-02-19 04:27:37,628 Current Learning Rate: 0.0098618496 +2025-02-19 04:27:37,628 Train Loss: 0.0002304, Val Loss: 0.0003176 +2025-02-19 04:27:37,629 Epoch 1186/2000 +2025-02-19 04:28:20,967 Current Learning Rate: 0.0098795838 +2025-02-19 04:28:20,968 Train Loss: 0.0003224, Val Loss: 0.0004063 +2025-02-19 04:28:20,968 Epoch 1187/2000 +2025-02-19 04:29:03,176 Current Learning Rate: 0.0098961141 +2025-02-19 04:29:03,177 Train Loss: 0.0003911, Val Loss: 0.0003631 +2025-02-19 04:29:03,177 Epoch 1188/2000 +2025-02-19 04:29:46,280 Current Learning Rate: 0.0099114363 +2025-02-19 04:29:46,280 Train Loss: 0.0002728, Val Loss: 0.0003342 +2025-02-19 04:29:46,281 Epoch 1189/2000 +2025-02-19 04:30:28,980 Current Learning Rate: 0.0099255466 +2025-02-19 04:30:28,980 Train Loss: 0.0003905, Val Loss: 0.0004208 +2025-02-19 04:30:28,981 Epoch 1190/2000 +2025-02-19 04:31:12,058 Current Learning Rate: 0.0099384417 +2025-02-19 04:31:12,060 Train Loss: 0.0004146, Val Loss: 0.0005108 +2025-02-19 04:31:12,061 Epoch 1191/2000 +2025-02-19 04:31:54,883 Current Learning Rate: 0.0099501183 +2025-02-19 04:31:54,883 Train Loss: 0.0003339, Val Loss: 0.0004398 +2025-02-19 04:31:54,883 Epoch 1192/2000 +2025-02-19 04:32:37,650 Current Learning Rate: 0.0099605735 +2025-02-19 04:32:37,651 Train Loss: 0.0005730, Val Loss: 0.0007198 +2025-02-19 04:32:37,651 Epoch 1193/2000 +2025-02-19 04:33:20,077 Current Learning Rate: 0.0099698048 +2025-02-19 04:33:20,078 Train Loss: 0.0005289, Val Loss: 0.0015931 +2025-02-19 04:33:20,079 Epoch 1194/2000 +2025-02-19 04:34:02,892 Current Learning Rate: 0.0099778098 +2025-02-19 04:34:02,893 Train Loss: 0.0008597, Val Loss: 0.0004949 +2025-02-19 04:34:02,893 Epoch 1195/2000 +2025-02-19 04:34:45,305 Current Learning Rate: 0.0099845867 +2025-02-19 04:34:45,306 Train Loss: 0.0003657, Val Loss: 0.0004646 +2025-02-19 04:34:45,306 Epoch 1196/2000 +2025-02-19 04:35:27,921 Current Learning Rate: 0.0099901336 +2025-02-19 04:35:27,922 Train Loss: 0.0003482, Val Loss: 0.0003574 +2025-02-19 04:35:27,922 Epoch 1197/2000 +2025-02-19 04:36:11,019 Current Learning Rate: 0.0099944494 +2025-02-19 04:36:11,019 Train Loss: 0.0003333, Val Loss: 0.0004034 +2025-02-19 04:36:11,020 Epoch 1198/2000 +2025-02-19 04:36:54,726 Current Learning Rate: 0.0099975328 +2025-02-19 04:36:54,726 Train Loss: 0.0002702, Val Loss: 0.0003077 +2025-02-19 04:36:54,726 Epoch 1199/2000 +2025-02-19 04:37:38,170 Current Learning Rate: 0.0099993832 +2025-02-19 04:37:38,171 Train Loss: 0.0003345, Val Loss: 0.0003255 +2025-02-19 04:37:38,171 Epoch 1200/2000 +2025-02-19 04:38:21,367 Current Learning Rate: 0.0100000000 +2025-02-19 04:38:21,368 Train Loss: 0.0003558, Val Loss: 0.0004124 +2025-02-19 04:38:21,368 Epoch 1201/2000 +2025-02-19 04:39:04,828 Current Learning Rate: 0.0099993832 +2025-02-19 04:39:04,829 Train Loss: 0.0003815, Val Loss: 0.0003488 +2025-02-19 04:39:04,829 Epoch 1202/2000 +2025-02-19 04:39:46,715 Current Learning Rate: 0.0099975328 +2025-02-19 04:39:46,716 Train Loss: 0.0002571, Val Loss: 0.0002980 +2025-02-19 04:39:46,716 Epoch 1203/2000 +2025-02-19 04:40:29,629 Current Learning Rate: 0.0099944494 +2025-02-19 04:40:29,630 Train Loss: 0.0004342, Val Loss: 0.0003882 +2025-02-19 04:40:29,630 Epoch 1204/2000 +2025-02-19 04:41:12,975 Current Learning Rate: 0.0099901336 +2025-02-19 04:41:12,975 Train Loss: 0.0003849, Val Loss: 0.0003566 +2025-02-19 04:41:12,975 Epoch 1205/2000 +2025-02-19 04:41:56,128 Current Learning Rate: 0.0099845867 +2025-02-19 04:41:56,129 Train Loss: 0.0003055, Val Loss: 0.0003039 +2025-02-19 04:41:56,129 Epoch 1206/2000 +2025-02-19 04:42:39,641 Current Learning Rate: 0.0099778098 +2025-02-19 04:42:39,642 Train Loss: 0.0003043, Val Loss: 0.0003126 +2025-02-19 04:42:39,643 Epoch 1207/2000 +2025-02-19 04:43:22,620 Current Learning Rate: 0.0099698048 +2025-02-19 04:43:22,620 Train Loss: 0.0002716, Val Loss: 0.0003041 +2025-02-19 04:43:22,621 Epoch 1208/2000 +2025-02-19 04:44:05,795 Current Learning Rate: 0.0099605735 +2025-02-19 04:44:05,795 Train Loss: 0.0002467, Val Loss: 0.0002967 +2025-02-19 04:44:05,796 Epoch 1209/2000 +2025-02-19 04:44:48,574 Current Learning Rate: 0.0099501183 +2025-02-19 04:44:48,575 Train Loss: 0.0002712, Val Loss: 0.0002819 +2025-02-19 04:44:48,575 Epoch 1210/2000 +2025-02-19 04:45:31,934 Current Learning Rate: 0.0099384417 +2025-02-19 04:45:31,935 Train Loss: 0.0002193, Val Loss: 0.0002984 +2025-02-19 04:45:31,935 Epoch 1211/2000 +2025-02-19 04:46:15,185 Current Learning Rate: 0.0099255466 +2025-02-19 04:46:15,186 Train Loss: 0.0002582, Val Loss: 0.0003117 +2025-02-19 04:46:15,186 Epoch 1212/2000 +2025-02-19 04:46:58,076 Current Learning Rate: 0.0099114363 +2025-02-19 04:46:58,077 Train Loss: 0.0003287, Val Loss: 0.0004310 +2025-02-19 04:46:58,077 Epoch 1213/2000 +2025-02-19 04:47:40,225 Current Learning Rate: 0.0098961141 +2025-02-19 04:47:40,225 Train Loss: 0.0002960, Val Loss: 0.0003451 +2025-02-19 04:47:40,225 Epoch 1214/2000 +2025-02-19 04:48:22,323 Current Learning Rate: 0.0098795838 +2025-02-19 04:48:22,324 Train Loss: 0.0003305, Val Loss: 0.0003503 +2025-02-19 04:48:22,324 Epoch 1215/2000 +2025-02-19 04:49:04,692 Current Learning Rate: 0.0098618496 +2025-02-19 04:49:04,692 Train Loss: 0.0003031, Val Loss: 0.0003275 +2025-02-19 04:49:04,692 Epoch 1216/2000 +2025-02-19 04:49:47,698 Current Learning Rate: 0.0098429158 +2025-02-19 04:49:47,699 Train Loss: 0.0003185, Val Loss: 0.0003507 +2025-02-19 04:49:47,699 Epoch 1217/2000 +2025-02-19 04:50:29,853 Current Learning Rate: 0.0098227871 +2025-02-19 04:50:29,854 Train Loss: 0.0003555, Val Loss: 0.0003752 +2025-02-19 04:50:29,854 Epoch 1218/2000 +2025-02-19 04:51:11,832 Current Learning Rate: 0.0098014684 +2025-02-19 04:51:11,832 Train Loss: 0.0003265, Val Loss: 0.0003389 +2025-02-19 04:51:11,833 Epoch 1219/2000 +2025-02-19 04:51:55,207 Current Learning Rate: 0.0097789651 +2025-02-19 04:51:55,208 Train Loss: 0.0002837, Val Loss: 0.0003092 +2025-02-19 04:51:55,208 Epoch 1220/2000 +2025-02-19 04:52:37,705 Current Learning Rate: 0.0097552826 +2025-02-19 04:52:37,706 Train Loss: 0.0002819, Val Loss: 0.0003282 +2025-02-19 04:52:37,706 Epoch 1221/2000 +2025-02-19 04:53:20,546 Current Learning Rate: 0.0097304268 +2025-02-19 04:53:20,546 Train Loss: 0.0003215, Val Loss: 0.0003409 +2025-02-19 04:53:20,547 Epoch 1222/2000 +2025-02-19 04:54:02,502 Current Learning Rate: 0.0097044038 +2025-02-19 04:54:02,503 Train Loss: 0.0002931, Val Loss: 0.0002935 +2025-02-19 04:54:02,503 Epoch 1223/2000 +2025-02-19 04:54:45,153 Current Learning Rate: 0.0096772202 +2025-02-19 04:54:45,154 Train Loss: 0.0002800, Val Loss: 0.0003279 +2025-02-19 04:54:45,154 Epoch 1224/2000 +2025-02-19 04:55:27,369 Current Learning Rate: 0.0096488824 +2025-02-19 04:55:27,369 Train Loss: 0.0002484, Val Loss: 0.0003220 +2025-02-19 04:55:27,369 Epoch 1225/2000 +2025-02-19 04:56:09,613 Current Learning Rate: 0.0096193977 +2025-02-19 04:56:09,613 Train Loss: 0.0002307, Val Loss: 0.0002847 +2025-02-19 04:56:09,613 Epoch 1226/2000 +2025-02-19 04:56:51,489 Current Learning Rate: 0.0095887731 +2025-02-19 04:56:51,489 Train Loss: 0.0003054, Val Loss: 0.0003279 +2025-02-19 04:56:51,490 Epoch 1227/2000 +2025-02-19 04:57:34,015 Current Learning Rate: 0.0095570164 +2025-02-19 04:57:34,016 Train Loss: 0.0002774, Val Loss: 0.0003250 +2025-02-19 04:57:34,016 Epoch 1228/2000 +2025-02-19 04:58:17,051 Current Learning Rate: 0.0095241353 +2025-02-19 04:58:17,051 Train Loss: 0.0002541, Val Loss: 0.0002957 +2025-02-19 04:58:17,052 Epoch 1229/2000 +2025-02-19 04:58:58,955 Current Learning Rate: 0.0094901379 +2025-02-19 04:58:58,956 Train Loss: 0.0003336, Val Loss: 0.0003201 +2025-02-19 04:58:58,956 Epoch 1230/2000 +2025-02-19 04:59:41,074 Current Learning Rate: 0.0094550326 +2025-02-19 04:59:41,075 Train Loss: 0.0002789, Val Loss: 0.0003138 +2025-02-19 04:59:41,075 Epoch 1231/2000 +2025-02-19 05:00:23,194 Current Learning Rate: 0.0094188282 +2025-02-19 05:00:23,195 Train Loss: 0.0002877, Val Loss: 0.0003305 +2025-02-19 05:00:23,195 Epoch 1232/2000 +2025-02-19 05:01:05,466 Current Learning Rate: 0.0093815334 +2025-02-19 05:01:05,467 Train Loss: 0.0002423, Val Loss: 0.0003018 +2025-02-19 05:01:05,467 Epoch 1233/2000 +2025-02-19 05:01:48,507 Current Learning Rate: 0.0093431576 +2025-02-19 05:01:48,508 Train Loss: 0.0003048, Val Loss: 0.0003066 +2025-02-19 05:01:48,508 Epoch 1234/2000 +2025-02-19 05:02:31,691 Current Learning Rate: 0.0093037101 +2025-02-19 05:02:31,692 Train Loss: 0.0003971, Val Loss: 0.0003299 +2025-02-19 05:02:31,692 Epoch 1235/2000 +2025-02-19 05:03:14,686 Current Learning Rate: 0.0092632008 +2025-02-19 05:03:14,686 Train Loss: 0.0002575, Val Loss: 0.0003031 +2025-02-19 05:03:14,686 Epoch 1236/2000 +2025-02-19 05:03:57,696 Current Learning Rate: 0.0092216396 +2025-02-19 05:03:57,697 Train Loss: 0.0002736, Val Loss: 0.0002998 +2025-02-19 05:03:57,697 Epoch 1237/2000 +2025-02-19 05:04:40,840 Current Learning Rate: 0.0091790368 +2025-02-19 05:04:40,841 Train Loss: 0.0002595, Val Loss: 0.0002812 +2025-02-19 05:04:40,841 Epoch 1238/2000 +2025-02-19 05:05:23,497 Current Learning Rate: 0.0091354029 +2025-02-19 05:05:23,497 Train Loss: 0.0003125, Val Loss: 0.0002800 +2025-02-19 05:05:23,497 Epoch 1239/2000 +2025-02-19 05:06:05,663 Current Learning Rate: 0.0090907486 +2025-02-19 05:06:05,663 Train Loss: 0.0002220, Val Loss: 0.0002693 +2025-02-19 05:06:05,664 Epoch 1240/2000 +2025-02-19 05:06:48,578 Current Learning Rate: 0.0090450850 +2025-02-19 05:06:48,578 Train Loss: 0.0002700, Val Loss: 0.0002810 +2025-02-19 05:06:48,578 Epoch 1241/2000 +2025-02-19 05:07:31,256 Current Learning Rate: 0.0089984233 +2025-02-19 05:07:31,257 Train Loss: 0.0002639, Val Loss: 0.0002961 +2025-02-19 05:07:31,257 Epoch 1242/2000 +2025-02-19 05:08:13,969 Current Learning Rate: 0.0089507751 +2025-02-19 05:08:13,969 Train Loss: 0.0002100, Val Loss: 0.0002593 +2025-02-19 05:08:13,970 Epoch 1243/2000 +2025-02-19 05:08:56,921 Current Learning Rate: 0.0089021520 +2025-02-19 05:08:56,922 Train Loss: 0.0002544, Val Loss: 0.0002705 +2025-02-19 05:08:56,922 Epoch 1244/2000 +2025-02-19 05:09:39,700 Current Learning Rate: 0.0088525662 +2025-02-19 05:09:39,701 Train Loss: 0.0002683, Val Loss: 0.0003015 +2025-02-19 05:09:39,701 Epoch 1245/2000 +2025-02-19 05:10:21,818 Current Learning Rate: 0.0088020298 +2025-02-19 05:10:21,819 Train Loss: 0.0002700, Val Loss: 0.0002721 +2025-02-19 05:10:21,819 Epoch 1246/2000 +2025-02-19 05:11:05,185 Current Learning Rate: 0.0087505553 +2025-02-19 05:11:05,186 Train Loss: 0.0002013, Val Loss: 0.0002730 +2025-02-19 05:11:05,186 Epoch 1247/2000 +2025-02-19 05:11:47,303 Current Learning Rate: 0.0086981555 +2025-02-19 05:11:47,303 Train Loss: 0.0002540, Val Loss: 0.0002971 +2025-02-19 05:11:47,303 Epoch 1248/2000 +2025-02-19 05:12:29,779 Current Learning Rate: 0.0086448431 +2025-02-19 05:12:29,780 Train Loss: 0.0002141, Val Loss: 0.0003020 +2025-02-19 05:12:29,780 Epoch 1249/2000 +2025-02-19 05:13:12,511 Current Learning Rate: 0.0085906315 +2025-02-19 05:13:12,512 Train Loss: 0.0002372, Val Loss: 0.0002926 +2025-02-19 05:13:12,512 Epoch 1250/2000 +2025-02-19 05:13:55,191 Current Learning Rate: 0.0085355339 +2025-02-19 05:13:55,192 Train Loss: 0.0002058, Val Loss: 0.0002994 +2025-02-19 05:13:55,192 Epoch 1251/2000 +2025-02-19 05:14:38,081 Current Learning Rate: 0.0084795640 +2025-02-19 05:14:38,081 Train Loss: 0.0002717, Val Loss: 0.0003850 +2025-02-19 05:14:38,081 Epoch 1252/2000 +2025-02-19 05:15:20,883 Current Learning Rate: 0.0084227355 +2025-02-19 05:15:20,884 Train Loss: 0.0003723, Val Loss: 0.0003072 +2025-02-19 05:15:20,884 Epoch 1253/2000 +2025-02-19 05:16:03,321 Current Learning Rate: 0.0083650626 +2025-02-19 05:16:03,322 Train Loss: 0.0002526, Val Loss: 0.0002829 +2025-02-19 05:16:03,322 Epoch 1254/2000 +2025-02-19 05:16:46,013 Current Learning Rate: 0.0083065593 +2025-02-19 05:16:46,014 Train Loss: 0.0002701, Val Loss: 0.0002778 +2025-02-19 05:16:46,014 Epoch 1255/2000 +2025-02-19 05:17:29,029 Current Learning Rate: 0.0082472402 +2025-02-19 05:17:29,030 Train Loss: 0.0002764, Val Loss: 0.0003050 +2025-02-19 05:17:29,030 Epoch 1256/2000 +2025-02-19 05:18:11,815 Current Learning Rate: 0.0081871199 +2025-02-19 05:18:11,816 Train Loss: 0.0002992, Val Loss: 0.0003072 +2025-02-19 05:18:11,816 Epoch 1257/2000 +2025-02-19 05:18:53,883 Current Learning Rate: 0.0081262133 +2025-02-19 05:18:53,884 Train Loss: 0.0002123, Val Loss: 0.0003024 +2025-02-19 05:18:53,884 Epoch 1258/2000 +2025-02-19 05:19:36,873 Current Learning Rate: 0.0080645353 +2025-02-19 05:19:36,873 Train Loss: 0.0002702, Val Loss: 0.0003362 +2025-02-19 05:19:36,874 Epoch 1259/2000 +2025-02-19 05:20:18,817 Current Learning Rate: 0.0080021011 +2025-02-19 05:20:18,818 Train Loss: 0.0002418, Val Loss: 0.0003292 +2025-02-19 05:20:18,818 Epoch 1260/2000 +2025-02-19 05:21:01,479 Current Learning Rate: 0.0079389263 +2025-02-19 05:21:01,480 Train Loss: 0.0002907, Val Loss: 0.0003881 +2025-02-19 05:21:01,480 Epoch 1261/2000 +2025-02-19 05:21:44,416 Current Learning Rate: 0.0078750263 +2025-02-19 05:21:44,417 Train Loss: 0.0003452, Val Loss: 0.0003645 +2025-02-19 05:21:44,417 Epoch 1262/2000 +2025-02-19 05:22:26,910 Current Learning Rate: 0.0078104169 +2025-02-19 05:22:26,911 Train Loss: 0.0003222, Val Loss: 0.0003064 +2025-02-19 05:22:26,911 Epoch 1263/2000 +2025-02-19 05:23:09,809 Current Learning Rate: 0.0077451141 +2025-02-19 05:23:09,809 Train Loss: 0.0002808, Val Loss: 0.0002959 +2025-02-19 05:23:09,810 Epoch 1264/2000 +2025-02-19 05:23:51,636 Current Learning Rate: 0.0076791340 +2025-02-19 05:23:51,636 Train Loss: 0.0002716, Val Loss: 0.0002927 +2025-02-19 05:23:51,636 Epoch 1265/2000 +2025-02-19 05:24:34,551 Current Learning Rate: 0.0076124928 +2025-02-19 05:24:34,552 Train Loss: 0.0002731, Val Loss: 0.0002681 +2025-02-19 05:24:34,552 Epoch 1266/2000 +2025-02-19 05:25:17,697 Current Learning Rate: 0.0075452071 +2025-02-19 05:25:17,698 Train Loss: 0.0002551, Val Loss: 0.0002604 +2025-02-19 05:25:17,698 Epoch 1267/2000 +2025-02-19 05:26:00,820 Current Learning Rate: 0.0074772933 +2025-02-19 05:26:00,821 Train Loss: 0.0002330, Val Loss: 0.0002588 +2025-02-19 05:26:00,821 Epoch 1268/2000 +2025-02-19 05:26:42,966 Current Learning Rate: 0.0074087684 +2025-02-19 05:26:42,967 Train Loss: 0.0002070, Val Loss: 0.0002747 +2025-02-19 05:26:42,967 Epoch 1269/2000 +2025-02-19 05:27:24,943 Current Learning Rate: 0.0073396491 +2025-02-19 05:27:24,944 Train Loss: 0.0002312, Val Loss: 0.0002729 +2025-02-19 05:27:24,944 Epoch 1270/2000 +2025-02-19 05:28:07,090 Current Learning Rate: 0.0072699525 +2025-02-19 05:28:07,090 Train Loss: 0.0002523, Val Loss: 0.0002654 +2025-02-19 05:28:07,091 Epoch 1271/2000 +2025-02-19 05:28:50,043 Current Learning Rate: 0.0071996958 +2025-02-19 05:28:50,044 Train Loss: 0.0001803, Val Loss: 0.0002642 +2025-02-19 05:28:50,044 Epoch 1272/2000 +2025-02-19 05:29:31,973 Current Learning Rate: 0.0071288965 +2025-02-19 05:29:31,975 Train Loss: 0.0002394, Val Loss: 0.0002683 +2025-02-19 05:29:31,976 Epoch 1273/2000 +2025-02-19 05:30:15,230 Current Learning Rate: 0.0070575718 +2025-02-19 05:30:15,248 Train Loss: 0.0002191, Val Loss: 0.0002703 +2025-02-19 05:30:15,249 Epoch 1274/2000 +2025-02-19 05:30:58,431 Current Learning Rate: 0.0069857395 +2025-02-19 05:30:58,432 Train Loss: 0.0002086, Val Loss: 0.0002544 +2025-02-19 05:30:58,432 Epoch 1275/2000 +2025-02-19 05:31:40,594 Current Learning Rate: 0.0069134172 +2025-02-19 05:31:40,595 Train Loss: 0.0002234, Val Loss: 0.0002655 +2025-02-19 05:31:40,595 Epoch 1276/2000 +2025-02-19 05:32:23,008 Current Learning Rate: 0.0068406228 +2025-02-19 05:32:23,009 Train Loss: 0.0002214, Val Loss: 0.0003035 +2025-02-19 05:32:23,011 Epoch 1277/2000 +2025-02-19 05:33:06,259 Current Learning Rate: 0.0067673742 +2025-02-19 05:33:06,259 Train Loss: 0.0002642, Val Loss: 0.0003223 +2025-02-19 05:33:06,259 Epoch 1278/2000 +2025-02-19 05:33:49,250 Current Learning Rate: 0.0066936896 +2025-02-19 05:33:49,250 Train Loss: 0.0002282, Val Loss: 0.0002573 +2025-02-19 05:33:49,251 Epoch 1279/2000 +2025-02-19 05:34:31,878 Current Learning Rate: 0.0066195871 +2025-02-19 05:34:31,879 Train Loss: 0.0002282, Val Loss: 0.0002570 +2025-02-19 05:34:31,879 Epoch 1280/2000 +2025-02-19 05:35:14,927 Current Learning Rate: 0.0065450850 +2025-02-19 05:35:16,499 Train Loss: 0.0001579, Val Loss: 0.0002373 +2025-02-19 05:35:16,499 Epoch 1281/2000 +2025-02-19 05:35:57,624 Current Learning Rate: 0.0064702016 +2025-02-19 05:35:57,625 Train Loss: 0.0002475, Val Loss: 0.0002616 +2025-02-19 05:35:57,625 Epoch 1282/2000 +2025-02-19 05:36:41,147 Current Learning Rate: 0.0063949555 +2025-02-19 05:36:41,148 Train Loss: 0.0002361, Val Loss: 0.0002520 +2025-02-19 05:36:41,148 Epoch 1283/2000 +2025-02-19 05:37:24,309 Current Learning Rate: 0.0063193652 +2025-02-19 05:37:24,310 Train Loss: 0.0001925, Val Loss: 0.0002398 +2025-02-19 05:37:24,310 Epoch 1284/2000 +2025-02-19 05:38:07,087 Current Learning Rate: 0.0062434494 +2025-02-19 05:38:07,088 Train Loss: 0.0002231, Val Loss: 0.0002411 +2025-02-19 05:38:07,088 Epoch 1285/2000 +2025-02-19 05:38:50,004 Current Learning Rate: 0.0061672268 +2025-02-19 05:38:50,004 Train Loss: 0.0002115, Val Loss: 0.0002465 +2025-02-19 05:38:50,007 Epoch 1286/2000 +2025-02-19 05:39:33,379 Current Learning Rate: 0.0060907162 +2025-02-19 05:39:33,380 Train Loss: 0.0002359, Val Loss: 0.0002487 +2025-02-19 05:39:33,380 Epoch 1287/2000 +2025-02-19 05:40:15,829 Current Learning Rate: 0.0060139365 +2025-02-19 05:40:15,829 Train Loss: 0.0001992, Val Loss: 0.0002509 +2025-02-19 05:40:15,830 Epoch 1288/2000 +2025-02-19 05:40:59,004 Current Learning Rate: 0.0059369066 +2025-02-19 05:40:59,005 Train Loss: 0.0001793, Val Loss: 0.0002589 +2025-02-19 05:40:59,005 Epoch 1289/2000 +2025-02-19 05:41:41,574 Current Learning Rate: 0.0058596455 +2025-02-19 05:41:41,575 Train Loss: 0.0002360, Val Loss: 0.0002491 +2025-02-19 05:41:41,575 Epoch 1290/2000 +2025-02-19 05:42:24,093 Current Learning Rate: 0.0057821723 +2025-02-19 05:42:24,094 Train Loss: 0.0002294, Val Loss: 0.0002512 +2025-02-19 05:42:24,094 Epoch 1291/2000 +2025-02-19 05:43:06,658 Current Learning Rate: 0.0057045062 +2025-02-19 05:43:06,659 Train Loss: 0.0002405, Val Loss: 0.0002643 +2025-02-19 05:43:06,659 Epoch 1292/2000 +2025-02-19 05:43:49,847 Current Learning Rate: 0.0056266662 +2025-02-19 05:43:49,848 Train Loss: 0.0002038, Val Loss: 0.0002624 +2025-02-19 05:43:49,849 Epoch 1293/2000 +2025-02-19 05:44:32,539 Current Learning Rate: 0.0055486716 +2025-02-19 05:44:32,540 Train Loss: 0.0002392, Val Loss: 0.0002590 +2025-02-19 05:44:32,540 Epoch 1294/2000 +2025-02-19 05:45:15,826 Current Learning Rate: 0.0054705416 +2025-02-19 05:45:15,827 Train Loss: 0.0002009, Val Loss: 0.0002517 +2025-02-19 05:45:15,827 Epoch 1295/2000 +2025-02-19 05:45:59,245 Current Learning Rate: 0.0053922955 +2025-02-19 05:45:59,246 Train Loss: 0.0002074, Val Loss: 0.0002411 +2025-02-19 05:45:59,247 Epoch 1296/2000 +2025-02-19 05:46:42,238 Current Learning Rate: 0.0053139526 +2025-02-19 05:46:42,238 Train Loss: 0.0002000, Val Loss: 0.0002422 +2025-02-19 05:46:42,238 Epoch 1297/2000 +2025-02-19 05:47:25,482 Current Learning Rate: 0.0052355323 +2025-02-19 05:47:25,483 Train Loss: 0.0001729, Val Loss: 0.0002510 +2025-02-19 05:47:25,483 Epoch 1298/2000 +2025-02-19 05:48:08,457 Current Learning Rate: 0.0051570538 +2025-02-19 05:48:08,457 Train Loss: 0.0002268, Val Loss: 0.0002501 +2025-02-19 05:48:08,458 Epoch 1299/2000 +2025-02-19 05:48:51,758 Current Learning Rate: 0.0050785366 +2025-02-19 05:48:51,759 Train Loss: 0.0002405, Val Loss: 0.0002444 +2025-02-19 05:48:51,759 Epoch 1300/2000 +2025-02-19 05:49:34,470 Current Learning Rate: 0.0050000000 +2025-02-19 05:49:34,470 Train Loss: 0.0002857, Val Loss: 0.0002506 +2025-02-19 05:49:34,471 Epoch 1301/2000 +2025-02-19 05:50:16,815 Current Learning Rate: 0.0049214634 +2025-02-19 05:50:16,815 Train Loss: 0.0002398, Val Loss: 0.0002415 +2025-02-19 05:50:16,816 Epoch 1302/2000 +2025-02-19 05:50:59,622 Current Learning Rate: 0.0048429462 +2025-02-19 05:51:01,082 Train Loss: 0.0002251, Val Loss: 0.0002352 +2025-02-19 05:51:01,083 Epoch 1303/2000 +2025-02-19 05:51:44,258 Current Learning Rate: 0.0047644677 +2025-02-19 05:51:46,001 Train Loss: 0.0001888, Val Loss: 0.0002289 +2025-02-19 05:51:46,003 Epoch 1304/2000 +2025-02-19 05:52:27,774 Current Learning Rate: 0.0046860474 +2025-02-19 05:52:27,776 Train Loss: 0.0001850, Val Loss: 0.0002353 +2025-02-19 05:52:27,776 Epoch 1305/2000 +2025-02-19 05:53:10,985 Current Learning Rate: 0.0046077045 +2025-02-19 05:53:10,986 Train Loss: 0.0002298, Val Loss: 0.0002313 +2025-02-19 05:53:10,986 Epoch 1306/2000 +2025-02-19 05:53:54,132 Current Learning Rate: 0.0045294584 +2025-02-19 05:53:54,133 Train Loss: 0.0001596, Val Loss: 0.0002313 +2025-02-19 05:53:54,133 Epoch 1307/2000 +2025-02-19 05:54:36,532 Current Learning Rate: 0.0044513284 +2025-02-19 05:54:36,533 Train Loss: 0.0001983, Val Loss: 0.0002302 +2025-02-19 05:54:36,534 Epoch 1308/2000 +2025-02-19 05:55:19,006 Current Learning Rate: 0.0043733338 +2025-02-19 05:55:19,007 Train Loss: 0.0001755, Val Loss: 0.0002332 +2025-02-19 05:55:19,007 Epoch 1309/2000 +2025-02-19 05:56:02,230 Current Learning Rate: 0.0042954938 +2025-02-19 05:56:02,231 Train Loss: 0.0001757, Val Loss: 0.0002301 +2025-02-19 05:56:02,231 Epoch 1310/2000 +2025-02-19 05:56:45,651 Current Learning Rate: 0.0042178277 +2025-02-19 05:56:45,653 Train Loss: 0.0002220, Val Loss: 0.0002334 +2025-02-19 05:56:45,653 Epoch 1311/2000 +2025-02-19 05:57:28,071 Current Learning Rate: 0.0041403545 +2025-02-19 05:57:28,072 Train Loss: 0.0002170, Val Loss: 0.0002575 +2025-02-19 05:57:28,072 Epoch 1312/2000 +2025-02-19 05:58:11,618 Current Learning Rate: 0.0040630934 +2025-02-19 05:58:11,619 Train Loss: 0.0002336, Val Loss: 0.0002394 +2025-02-19 05:58:11,619 Epoch 1313/2000 +2025-02-19 05:58:54,646 Current Learning Rate: 0.0039860635 +2025-02-19 05:58:54,646 Train Loss: 0.0002126, Val Loss: 0.0002306 +2025-02-19 05:58:54,647 Epoch 1314/2000 +2025-02-19 05:59:37,271 Current Learning Rate: 0.0039092838 +2025-02-19 05:59:38,797 Train Loss: 0.0001485, Val Loss: 0.0002283 +2025-02-19 05:59:38,797 Epoch 1315/2000 +2025-02-19 06:00:20,352 Current Learning Rate: 0.0038327732 +2025-02-19 06:00:21,391 Train Loss: 0.0001845, Val Loss: 0.0002241 +2025-02-19 06:00:21,391 Epoch 1316/2000 +2025-02-19 06:01:03,445 Current Learning Rate: 0.0037565506 +2025-02-19 06:01:04,606 Train Loss: 0.0001865, Val Loss: 0.0002203 +2025-02-19 06:01:04,607 Epoch 1317/2000 +2025-02-19 06:01:46,459 Current Learning Rate: 0.0036806348 +2025-02-19 06:01:48,358 Train Loss: 0.0001609, Val Loss: 0.0002167 +2025-02-19 06:01:48,358 Epoch 1318/2000 +2025-02-19 06:02:31,373 Current Learning Rate: 0.0036050445 +2025-02-19 06:02:33,106 Train Loss: 0.0001382, Val Loss: 0.0002156 +2025-02-19 06:02:33,106 Epoch 1319/2000 +2025-02-19 06:03:15,809 Current Learning Rate: 0.0035297984 +2025-02-19 06:03:15,809 Train Loss: 0.0001949, Val Loss: 0.0002168 +2025-02-19 06:03:15,810 Epoch 1320/2000 +2025-02-19 06:03:57,648 Current Learning Rate: 0.0034549150 +2025-02-19 06:03:59,411 Train Loss: 0.0001356, Val Loss: 0.0002128 +2025-02-19 06:03:59,412 Epoch 1321/2000 +2025-02-19 06:04:42,236 Current Learning Rate: 0.0033804129 +2025-02-19 06:04:42,237 Train Loss: 0.0001599, Val Loss: 0.0002132 +2025-02-19 06:04:42,237 Epoch 1322/2000 +2025-02-19 06:05:25,157 Current Learning Rate: 0.0033063104 +2025-02-19 06:05:26,805 Train Loss: 0.0001727, Val Loss: 0.0002122 +2025-02-19 06:05:26,805 Epoch 1323/2000 +2025-02-19 06:06:09,337 Current Learning Rate: 0.0032326258 +2025-02-19 06:06:09,338 Train Loss: 0.0001787, Val Loss: 0.0002130 +2025-02-19 06:06:09,339 Epoch 1324/2000 +2025-02-19 06:06:52,146 Current Learning Rate: 0.0031593772 +2025-02-19 06:06:53,992 Train Loss: 0.0001712, Val Loss: 0.0002113 +2025-02-19 06:06:53,992 Epoch 1325/2000 +2025-02-19 06:07:35,329 Current Learning Rate: 0.0030865828 +2025-02-19 06:07:35,330 Train Loss: 0.0001806, Val Loss: 0.0002148 +2025-02-19 06:07:35,330 Epoch 1326/2000 +2025-02-19 06:08:18,518 Current Learning Rate: 0.0030142605 +2025-02-19 06:08:18,518 Train Loss: 0.0001908, Val Loss: 0.0002160 +2025-02-19 06:08:18,519 Epoch 1327/2000 +2025-02-19 06:09:00,577 Current Learning Rate: 0.0029424282 +2025-02-19 06:09:00,577 Train Loss: 0.0002026, Val Loss: 0.0002139 +2025-02-19 06:09:00,578 Epoch 1328/2000 +2025-02-19 06:09:43,062 Current Learning Rate: 0.0028711035 +2025-02-19 06:09:44,376 Train Loss: 0.0001698, Val Loss: 0.0002098 +2025-02-19 06:09:44,377 Epoch 1329/2000 +2025-02-19 06:10:25,957 Current Learning Rate: 0.0028003042 +2025-02-19 06:10:25,958 Train Loss: 0.0001617, Val Loss: 0.0002099 +2025-02-19 06:10:25,958 Epoch 1330/2000 +2025-02-19 06:11:08,271 Current Learning Rate: 0.0027300475 +2025-02-19 06:11:10,143 Train Loss: 0.0001564, Val Loss: 0.0002087 +2025-02-19 06:11:10,144 Epoch 1331/2000 +2025-02-19 06:11:51,439 Current Learning Rate: 0.0026603509 +2025-02-19 06:11:51,440 Train Loss: 0.0001679, Val Loss: 0.0002089 +2025-02-19 06:11:51,441 Epoch 1332/2000 +2025-02-19 06:12:33,918 Current Learning Rate: 0.0025912316 +2025-02-19 06:12:33,919 Train Loss: 0.0001998, Val Loss: 0.0002101 +2025-02-19 06:12:33,919 Epoch 1333/2000 +2025-02-19 06:13:17,183 Current Learning Rate: 0.0025227067 +2025-02-19 06:13:17,183 Train Loss: 0.0001545, Val Loss: 0.0002087 +2025-02-19 06:13:17,184 Epoch 1334/2000 +2025-02-19 06:14:00,313 Current Learning Rate: 0.0024547929 +2025-02-19 06:14:02,100 Train Loss: 0.0001540, Val Loss: 0.0002071 +2025-02-19 06:14:02,101 Epoch 1335/2000 +2025-02-19 06:14:43,470 Current Learning Rate: 0.0023875072 +2025-02-19 06:14:45,190 Train Loss: 0.0001462, Val Loss: 0.0002068 +2025-02-19 06:14:45,191 Epoch 1336/2000 +2025-02-19 06:15:28,107 Current Learning Rate: 0.0023208660 +2025-02-19 06:15:29,456 Train Loss: 0.0001739, Val Loss: 0.0002058 +2025-02-19 06:15:29,456 Epoch 1337/2000 +2025-02-19 06:16:10,649 Current Learning Rate: 0.0022548859 +2025-02-19 06:16:11,436 Train Loss: 0.0001912, Val Loss: 0.0002055 +2025-02-19 06:16:11,437 Epoch 1338/2000 +2025-02-19 06:16:53,713 Current Learning Rate: 0.0021895831 +2025-02-19 06:16:53,715 Train Loss: 0.0001450, Val Loss: 0.0002069 +2025-02-19 06:16:53,715 Epoch 1339/2000 +2025-02-19 06:17:35,730 Current Learning Rate: 0.0021249737 +2025-02-19 06:17:36,643 Train Loss: 0.0001487, Val Loss: 0.0002048 +2025-02-19 06:17:36,643 Epoch 1340/2000 +2025-02-19 06:18:18,130 Current Learning Rate: 0.0020610737 +2025-02-19 06:18:19,300 Train Loss: 0.0001350, Val Loss: 0.0002045 +2025-02-19 06:18:19,301 Epoch 1341/2000 +2025-02-19 06:19:02,143 Current Learning Rate: 0.0019978989 +2025-02-19 06:19:02,144 Train Loss: 0.0001432, Val Loss: 0.0002045 +2025-02-19 06:19:02,144 Epoch 1342/2000 +2025-02-19 06:19:45,023 Current Learning Rate: 0.0019354647 +2025-02-19 06:19:46,467 Train Loss: 0.0001644, Val Loss: 0.0002041 +2025-02-19 06:19:46,467 Epoch 1343/2000 +2025-02-19 06:20:27,687 Current Learning Rate: 0.0018737867 +2025-02-19 06:20:29,137 Train Loss: 0.0001844, Val Loss: 0.0002034 +2025-02-19 06:20:29,137 Epoch 1344/2000 +2025-02-19 06:21:10,768 Current Learning Rate: 0.0018128801 +2025-02-19 06:21:11,857 Train Loss: 0.0001955, Val Loss: 0.0002032 +2025-02-19 06:21:11,858 Epoch 1345/2000 +2025-02-19 06:21:53,667 Current Learning Rate: 0.0017527598 +2025-02-19 06:21:55,392 Train Loss: 0.0001249, Val Loss: 0.0002016 +2025-02-19 06:21:55,393 Epoch 1346/2000 +2025-02-19 06:22:36,397 Current Learning Rate: 0.0016934407 +2025-02-19 06:22:37,942 Train Loss: 0.0001503, Val Loss: 0.0002009 +2025-02-19 06:22:37,944 Epoch 1347/2000 +2025-02-19 06:23:20,739 Current Learning Rate: 0.0016349374 +2025-02-19 06:23:20,739 Train Loss: 0.0001960, Val Loss: 0.0002011 +2025-02-19 06:23:20,739 Epoch 1348/2000 +2025-02-19 06:24:03,696 Current Learning Rate: 0.0015772645 +2025-02-19 06:24:05,537 Train Loss: 0.0001704, Val Loss: 0.0002005 +2025-02-19 06:24:05,537 Epoch 1349/2000 +2025-02-19 06:24:47,394 Current Learning Rate: 0.0015204360 +2025-02-19 06:24:48,900 Train Loss: 0.0001617, Val Loss: 0.0002000 +2025-02-19 06:24:48,901 Epoch 1350/2000 +2025-02-19 06:25:31,783 Current Learning Rate: 0.0014644661 +2025-02-19 06:25:33,498 Train Loss: 0.0001454, Val Loss: 0.0001996 +2025-02-19 06:25:33,498 Epoch 1351/2000 +2025-02-19 06:26:16,092 Current Learning Rate: 0.0014093685 +2025-02-19 06:26:16,093 Train Loss: 0.0002113, Val Loss: 0.0002000 +2025-02-19 06:26:16,093 Epoch 1352/2000 +2025-02-19 06:26:59,198 Current Learning Rate: 0.0013551569 +2025-02-19 06:26:59,198 Train Loss: 0.0002045, Val Loss: 0.0001997 +2025-02-19 06:26:59,199 Epoch 1353/2000 +2025-02-19 06:27:42,149 Current Learning Rate: 0.0013018445 +2025-02-19 06:27:43,943 Train Loss: 0.0001216, Val Loss: 0.0001989 +2025-02-19 06:27:43,943 Epoch 1354/2000 +2025-02-19 06:28:26,091 Current Learning Rate: 0.0012494447 +2025-02-19 06:28:26,092 Train Loss: 0.0001490, Val Loss: 0.0001990 +2025-02-19 06:28:26,094 Epoch 1355/2000 +2025-02-19 06:29:08,604 Current Learning Rate: 0.0011979702 +2025-02-19 06:29:10,303 Train Loss: 0.0001298, Val Loss: 0.0001985 +2025-02-19 06:29:10,303 Epoch 1356/2000 +2025-02-19 06:29:51,997 Current Learning Rate: 0.0011474338 +2025-02-19 06:29:53,412 Train Loss: 0.0001464, Val Loss: 0.0001984 +2025-02-19 06:29:53,413 Epoch 1357/2000 +2025-02-19 06:30:35,036 Current Learning Rate: 0.0010978480 +2025-02-19 06:30:36,475 Train Loss: 0.0001327, Val Loss: 0.0001981 +2025-02-19 06:30:36,476 Epoch 1358/2000 +2025-02-19 06:31:18,104 Current Learning Rate: 0.0010492249 +2025-02-19 06:31:18,105 Train Loss: 0.0002118, Val Loss: 0.0001984 +2025-02-19 06:31:18,105 Epoch 1359/2000 +2025-02-19 06:32:00,966 Current Learning Rate: 0.0010015767 +2025-02-19 06:32:00,966 Train Loss: 0.0001323, Val Loss: 0.0001983 +2025-02-19 06:32:00,966 Epoch 1360/2000 +2025-02-19 06:32:43,892 Current Learning Rate: 0.0009549150 +2025-02-19 06:32:45,203 Train Loss: 0.0001945, Val Loss: 0.0001980 +2025-02-19 06:32:45,203 Epoch 1361/2000 +2025-02-19 06:33:28,179 Current Learning Rate: 0.0009092514 +2025-02-19 06:33:29,799 Train Loss: 0.0001507, Val Loss: 0.0001975 +2025-02-19 06:33:29,800 Epoch 1362/2000 +2025-02-19 06:34:12,701 Current Learning Rate: 0.0008645971 +2025-02-19 06:34:13,778 Train Loss: 0.0001549, Val Loss: 0.0001973 +2025-02-19 06:34:13,779 Epoch 1363/2000 +2025-02-19 06:34:56,283 Current Learning Rate: 0.0008209632 +2025-02-19 06:34:57,467 Train Loss: 0.0001584, Val Loss: 0.0001969 +2025-02-19 06:34:57,468 Epoch 1364/2000 +2025-02-19 06:35:40,163 Current Learning Rate: 0.0007783604 +2025-02-19 06:35:41,744 Train Loss: 0.0001905, Val Loss: 0.0001968 +2025-02-19 06:35:41,744 Epoch 1365/2000 +2025-02-19 06:36:24,575 Current Learning Rate: 0.0007367992 +2025-02-19 06:36:25,683 Train Loss: 0.0001476, Val Loss: 0.0001966 +2025-02-19 06:36:25,683 Epoch 1366/2000 +2025-02-19 06:37:08,716 Current Learning Rate: 0.0006962899 +2025-02-19 06:37:08,717 Train Loss: 0.0001873, Val Loss: 0.0001968 +2025-02-19 06:37:08,717 Epoch 1367/2000 +2025-02-19 06:37:51,641 Current Learning Rate: 0.0006568424 +2025-02-19 06:37:52,811 Train Loss: 0.0001570, Val Loss: 0.0001966 +2025-02-19 06:37:52,811 Epoch 1368/2000 +2025-02-19 06:38:35,639 Current Learning Rate: 0.0006184666 +2025-02-19 06:38:37,401 Train Loss: 0.0001603, Val Loss: 0.0001965 +2025-02-19 06:38:37,401 Epoch 1369/2000 +2025-02-19 06:39:20,140 Current Learning Rate: 0.0005811718 +2025-02-19 06:39:21,879 Train Loss: 0.0001335, Val Loss: 0.0001962 +2025-02-19 06:39:21,880 Epoch 1370/2000 +2025-02-19 06:40:04,744 Current Learning Rate: 0.0005449674 +2025-02-19 06:40:06,493 Train Loss: 0.0001383, Val Loss: 0.0001962 +2025-02-19 06:40:06,493 Epoch 1371/2000 +2025-02-19 06:40:49,048 Current Learning Rate: 0.0005098621 +2025-02-19 06:40:50,807 Train Loss: 0.0001674, Val Loss: 0.0001961 +2025-02-19 06:40:50,807 Epoch 1372/2000 +2025-02-19 06:41:33,702 Current Learning Rate: 0.0004758647 +2025-02-19 06:41:33,703 Train Loss: 0.0001510, Val Loss: 0.0001962 +2025-02-19 06:41:33,703 Epoch 1373/2000 +2025-02-19 06:42:16,063 Current Learning Rate: 0.0004429836 +2025-02-19 06:42:16,064 Train Loss: 0.0002038, Val Loss: 0.0001962 +2025-02-19 06:42:16,064 Epoch 1374/2000 +2025-02-19 06:42:58,439 Current Learning Rate: 0.0004112269 +2025-02-19 06:42:58,440 Train Loss: 0.0001509, Val Loss: 0.0001962 +2025-02-19 06:42:58,440 Epoch 1375/2000 +2025-02-19 06:43:40,935 Current Learning Rate: 0.0003806023 +2025-02-19 06:43:42,264 Train Loss: 0.0001811, Val Loss: 0.0001961 +2025-02-19 06:43:42,264 Epoch 1376/2000 +2025-02-19 06:44:24,636 Current Learning Rate: 0.0003511176 +2025-02-19 06:44:26,103 Train Loss: 0.0001462, Val Loss: 0.0001959 +2025-02-19 06:44:26,103 Epoch 1377/2000 +2025-02-19 06:45:08,087 Current Learning Rate: 0.0003227798 +2025-02-19 06:45:09,952 Train Loss: 0.0001257, Val Loss: 0.0001958 +2025-02-19 06:45:09,952 Epoch 1378/2000 +2025-02-19 06:45:52,391 Current Learning Rate: 0.0002955962 +2025-02-19 06:45:54,216 Train Loss: 0.0001675, Val Loss: 0.0001957 +2025-02-19 06:45:54,216 Epoch 1379/2000 +2025-02-19 06:46:35,743 Current Learning Rate: 0.0002695732 +2025-02-19 06:46:36,981 Train Loss: 0.0001873, Val Loss: 0.0001955 +2025-02-19 06:46:36,981 Epoch 1380/2000 +2025-02-19 06:47:19,012 Current Learning Rate: 0.0002447174 +2025-02-19 06:47:20,141 Train Loss: 0.0001514, Val Loss: 0.0001955 +2025-02-19 06:47:20,141 Epoch 1381/2000 +2025-02-19 06:48:01,537 Current Learning Rate: 0.0002210349 +2025-02-19 06:48:03,716 Train Loss: 0.0001452, Val Loss: 0.0001953 +2025-02-19 06:48:03,716 Epoch 1382/2000 +2025-02-19 06:48:45,279 Current Learning Rate: 0.0001985316 +2025-02-19 06:48:47,022 Train Loss: 0.0001319, Val Loss: 0.0001953 +2025-02-19 06:48:47,022 Epoch 1383/2000 +2025-02-19 06:49:29,932 Current Learning Rate: 0.0001772129 +2025-02-19 06:49:31,745 Train Loss: 0.0001265, Val Loss: 0.0001952 +2025-02-19 06:49:31,745 Epoch 1384/2000 +2025-02-19 06:50:14,021 Current Learning Rate: 0.0001570842 +2025-02-19 06:50:14,022 Train Loss: 0.0001790, Val Loss: 0.0001952 +2025-02-19 06:50:14,023 Epoch 1385/2000 +2025-02-19 06:50:56,620 Current Learning Rate: 0.0001381504 +2025-02-19 06:50:58,333 Train Loss: 0.0001809, Val Loss: 0.0001952 +2025-02-19 06:50:58,334 Epoch 1386/2000 +2025-02-19 06:51:40,179 Current Learning Rate: 0.0001204162 +2025-02-19 06:51:40,179 Train Loss: 0.0001462, Val Loss: 0.0001952 +2025-02-19 06:51:40,179 Epoch 1387/2000 +2025-02-19 06:52:23,168 Current Learning Rate: 0.0001038859 +2025-02-19 06:52:24,381 Train Loss: 0.0001917, Val Loss: 0.0001951 +2025-02-19 06:52:24,381 Epoch 1388/2000 +2025-02-19 06:53:07,148 Current Learning Rate: 0.0000885637 +2025-02-19 06:53:07,149 Train Loss: 0.0001766, Val Loss: 0.0001952 +2025-02-19 06:53:07,149 Epoch 1389/2000 +2025-02-19 06:53:50,088 Current Learning Rate: 0.0000744534 +2025-02-19 06:53:50,089 Train Loss: 0.0001360, Val Loss: 0.0001952 +2025-02-19 06:53:50,089 Epoch 1390/2000 +2025-02-19 06:54:32,932 Current Learning Rate: 0.0000615583 +2025-02-19 06:54:34,025 Train Loss: 0.0001689, Val Loss: 0.0001951 +2025-02-19 06:54:34,025 Epoch 1391/2000 +2025-02-19 06:55:16,217 Current Learning Rate: 0.0000498817 +2025-02-19 06:55:16,218 Train Loss: 0.0001715, Val Loss: 0.0001951 +2025-02-19 06:55:16,218 Epoch 1392/2000 +2025-02-19 06:55:59,315 Current Learning Rate: 0.0000394265 +2025-02-19 06:56:01,175 Train Loss: 0.0001693, Val Loss: 0.0001951 +2025-02-19 06:56:01,175 Epoch 1393/2000 +2025-02-19 06:56:43,978 Current Learning Rate: 0.0000301952 +2025-02-19 06:56:45,615 Train Loss: 0.0001297, Val Loss: 0.0001951 +2025-02-19 06:56:45,615 Epoch 1394/2000 +2025-02-19 06:57:28,498 Current Learning Rate: 0.0000221902 +2025-02-19 06:57:30,490 Train Loss: 0.0001561, Val Loss: 0.0001951 +2025-02-19 06:57:30,500 Epoch 1395/2000 +2025-02-19 06:58:11,981 Current Learning Rate: 0.0000154133 +2025-02-19 06:58:13,378 Train Loss: 0.0001475, Val Loss: 0.0001950 +2025-02-19 06:58:13,378 Epoch 1396/2000 +2025-02-19 06:58:56,290 Current Learning Rate: 0.0000098664 +2025-02-19 06:58:56,291 Train Loss: 0.0001332, Val Loss: 0.0001951 +2025-02-19 06:58:56,292 Epoch 1397/2000 +2025-02-19 06:59:39,295 Current Learning Rate: 0.0000055506 +2025-02-19 06:59:41,044 Train Loss: 0.0001668, Val Loss: 0.0001950 +2025-02-19 06:59:41,044 Epoch 1398/2000 +2025-02-19 07:00:22,513 Current Learning Rate: 0.0000024672 +2025-02-19 07:00:22,514 Train Loss: 0.0001177, Val Loss: 0.0001951 +2025-02-19 07:00:22,515 Epoch 1399/2000 +2025-02-19 07:01:04,942 Current Learning Rate: 0.0000006168 +2025-02-19 07:01:04,943 Train Loss: 0.0001249, Val Loss: 0.0001951 +2025-02-19 07:01:04,943 Epoch 1400/2000 +2025-02-19 07:01:47,740 Current Learning Rate: 0.0000000000 +2025-02-19 07:01:49,925 Train Loss: 0.0001752, Val Loss: 0.0001950 +2025-02-19 07:01:49,926 Epoch 1401/2000 +2025-02-19 07:02:32,866 Current Learning Rate: 0.0000006168 +2025-02-19 07:02:32,866 Train Loss: 0.0001747, Val Loss: 0.0001950 +2025-02-19 07:02:32,867 Epoch 1402/2000 +2025-02-19 07:03:16,079 Current Learning Rate: 0.0000024672 +2025-02-19 07:03:16,080 Train Loss: 0.0001382, Val Loss: 0.0001950 +2025-02-19 07:03:16,080 Epoch 1403/2000 +2025-02-19 07:03:58,845 Current Learning Rate: 0.0000055506 +2025-02-19 07:03:58,846 Train Loss: 0.0001455, Val Loss: 0.0001950 +2025-02-19 07:03:58,846 Epoch 1404/2000 +2025-02-19 07:04:42,380 Current Learning Rate: 0.0000098664 +2025-02-19 07:04:42,380 Train Loss: 0.0001338, Val Loss: 0.0001950 +2025-02-19 07:04:42,380 Epoch 1405/2000 +2025-02-19 07:05:25,461 Current Learning Rate: 0.0000154133 +2025-02-19 07:05:25,462 Train Loss: 0.0001571, Val Loss: 0.0001950 +2025-02-19 07:05:25,462 Epoch 1406/2000 +2025-02-19 07:06:08,755 Current Learning Rate: 0.0000221902 +2025-02-19 07:06:08,755 Train Loss: 0.0001550, Val Loss: 0.0001950 +2025-02-19 07:06:08,755 Epoch 1407/2000 +2025-02-19 07:06:51,954 Current Learning Rate: 0.0000301952 +2025-02-19 07:06:51,954 Train Loss: 0.0001694, Val Loss: 0.0001950 +2025-02-19 07:06:51,955 Epoch 1408/2000 +2025-02-19 07:07:34,195 Current Learning Rate: 0.0000394265 +2025-02-19 07:07:34,196 Train Loss: 0.0001479, Val Loss: 0.0001950 +2025-02-19 07:07:34,196 Epoch 1409/2000 +2025-02-19 07:08:16,890 Current Learning Rate: 0.0000498817 +2025-02-19 07:08:16,891 Train Loss: 0.0001402, Val Loss: 0.0001951 +2025-02-19 07:08:16,891 Epoch 1410/2000 +2025-02-19 07:08:59,892 Current Learning Rate: 0.0000615583 +2025-02-19 07:08:59,893 Train Loss: 0.0001351, Val Loss: 0.0001951 +2025-02-19 07:08:59,893 Epoch 1411/2000 +2025-02-19 07:09:43,458 Current Learning Rate: 0.0000744534 +2025-02-19 07:09:43,459 Train Loss: 0.0001465, Val Loss: 0.0001951 +2025-02-19 07:09:43,459 Epoch 1412/2000 +2025-02-19 07:10:26,831 Current Learning Rate: 0.0000885637 +2025-02-19 07:10:26,832 Train Loss: 0.0001370, Val Loss: 0.0001952 +2025-02-19 07:10:26,832 Epoch 1413/2000 +2025-02-19 07:11:09,736 Current Learning Rate: 0.0001038859 +2025-02-19 07:11:09,737 Train Loss: 0.0001614, Val Loss: 0.0001952 +2025-02-19 07:11:09,737 Epoch 1414/2000 +2025-02-19 07:11:52,235 Current Learning Rate: 0.0001204162 +2025-02-19 07:11:52,236 Train Loss: 0.0001562, Val Loss: 0.0001951 +2025-02-19 07:11:52,237 Epoch 1415/2000 +2025-02-19 07:12:34,725 Current Learning Rate: 0.0001381504 +2025-02-19 07:12:34,725 Train Loss: 0.0001188, Val Loss: 0.0001951 +2025-02-19 07:12:34,725 Epoch 1416/2000 +2025-02-19 07:13:18,166 Current Learning Rate: 0.0001570842 +2025-02-19 07:13:18,167 Train Loss: 0.0001191, Val Loss: 0.0001951 +2025-02-19 07:13:18,167 Epoch 1417/2000 +2025-02-19 07:14:00,929 Current Learning Rate: 0.0001772129 +2025-02-19 07:14:00,930 Train Loss: 0.0001645, Val Loss: 0.0001951 +2025-02-19 07:14:00,931 Epoch 1418/2000 +2025-02-19 07:14:43,697 Current Learning Rate: 0.0001985316 +2025-02-19 07:14:43,698 Train Loss: 0.0001306, Val Loss: 0.0001951 +2025-02-19 07:14:43,698 Epoch 1419/2000 +2025-02-19 07:15:26,173 Current Learning Rate: 0.0002210349 +2025-02-19 07:15:26,174 Train Loss: 0.0001461, Val Loss: 0.0001952 +2025-02-19 07:15:26,174 Epoch 1420/2000 +2025-02-19 07:16:08,869 Current Learning Rate: 0.0002447174 +2025-02-19 07:16:08,870 Train Loss: 0.0001468, Val Loss: 0.0001953 +2025-02-19 07:16:08,870 Epoch 1421/2000 +2025-02-19 07:16:52,213 Current Learning Rate: 0.0002695732 +2025-02-19 07:16:52,214 Train Loss: 0.0001603, Val Loss: 0.0001955 +2025-02-19 07:16:52,214 Epoch 1422/2000 +2025-02-19 07:17:35,416 Current Learning Rate: 0.0002955962 +2025-02-19 07:17:35,417 Train Loss: 0.0001236, Val Loss: 0.0001956 +2025-02-19 07:17:35,417 Epoch 1423/2000 +2025-02-19 07:18:18,342 Current Learning Rate: 0.0003227798 +2025-02-19 07:18:18,343 Train Loss: 0.0001326, Val Loss: 0.0001954 +2025-02-19 07:18:18,343 Epoch 1424/2000 +2025-02-19 07:19:00,444 Current Learning Rate: 0.0003511176 +2025-02-19 07:19:00,445 Train Loss: 0.0001816, Val Loss: 0.0001953 +2025-02-19 07:19:00,445 Epoch 1425/2000 +2025-02-19 07:19:43,361 Current Learning Rate: 0.0003806023 +2025-02-19 07:19:43,362 Train Loss: 0.0001778, Val Loss: 0.0001957 +2025-02-19 07:19:43,362 Epoch 1426/2000 +2025-02-19 07:20:25,879 Current Learning Rate: 0.0004112269 +2025-02-19 07:20:25,880 Train Loss: 0.0001551, Val Loss: 0.0001955 +2025-02-19 07:20:25,880 Epoch 1427/2000 +2025-02-19 07:21:07,927 Current Learning Rate: 0.0004429836 +2025-02-19 07:21:07,928 Train Loss: 0.0001642, Val Loss: 0.0001955 +2025-02-19 07:21:07,928 Epoch 1428/2000 +2025-02-19 07:21:50,554 Current Learning Rate: 0.0004758647 +2025-02-19 07:21:50,555 Train Loss: 0.0001581, Val Loss: 0.0001954 +2025-02-19 07:21:50,555 Epoch 1429/2000 +2025-02-19 07:22:33,968 Current Learning Rate: 0.0005098621 +2025-02-19 07:22:33,968 Train Loss: 0.0001382, Val Loss: 0.0001953 +2025-02-19 07:22:33,969 Epoch 1430/2000 +2025-02-19 07:23:16,284 Current Learning Rate: 0.0005449674 +2025-02-19 07:23:16,285 Train Loss: 0.0001418, Val Loss: 0.0001955 +2025-02-19 07:23:16,286 Epoch 1431/2000 +2025-02-19 07:23:58,924 Current Learning Rate: 0.0005811718 +2025-02-19 07:23:58,924 Train Loss: 0.0001684, Val Loss: 0.0001957 +2025-02-19 07:23:58,924 Epoch 1432/2000 +2025-02-19 07:24:42,267 Current Learning Rate: 0.0006184666 +2025-02-19 07:24:42,267 Train Loss: 0.0001765, Val Loss: 0.0001958 +2025-02-19 07:24:42,268 Epoch 1433/2000 +2025-02-19 07:25:24,393 Current Learning Rate: 0.0006568424 +2025-02-19 07:25:24,394 Train Loss: 0.0001495, Val Loss: 0.0001956 +2025-02-19 07:25:24,394 Epoch 1434/2000 +2025-02-19 07:26:07,348 Current Learning Rate: 0.0006962899 +2025-02-19 07:26:07,349 Train Loss: 0.0001431, Val Loss: 0.0001956 +2025-02-19 07:26:07,349 Epoch 1435/2000 +2025-02-19 07:26:49,238 Current Learning Rate: 0.0007367992 +2025-02-19 07:26:49,239 Train Loss: 0.0001518, Val Loss: 0.0001957 +2025-02-19 07:26:49,239 Epoch 1436/2000 +2025-02-19 07:27:31,465 Current Learning Rate: 0.0007783604 +2025-02-19 07:27:31,466 Train Loss: 0.0001716, Val Loss: 0.0001960 +2025-02-19 07:27:31,466 Epoch 1437/2000 +2025-02-19 07:28:14,564 Current Learning Rate: 0.0008209632 +2025-02-19 07:28:14,564 Train Loss: 0.0001429, Val Loss: 0.0001957 +2025-02-19 07:28:14,565 Epoch 1438/2000 +2025-02-19 07:28:56,776 Current Learning Rate: 0.0008645971 +2025-02-19 07:28:56,777 Train Loss: 0.0001973, Val Loss: 0.0001964 +2025-02-19 07:28:56,777 Epoch 1439/2000 +2025-02-19 07:29:39,241 Current Learning Rate: 0.0009092514 +2025-02-19 07:29:39,241 Train Loss: 0.0001190, Val Loss: 0.0001956 +2025-02-19 07:29:39,242 Epoch 1440/2000 +2025-02-19 07:30:22,133 Current Learning Rate: 0.0009549150 +2025-02-19 07:30:22,133 Train Loss: 0.0001409, Val Loss: 0.0001958 +2025-02-19 07:30:22,133 Epoch 1441/2000 +2025-02-19 07:31:05,119 Current Learning Rate: 0.0010015767 +2025-02-19 07:31:05,120 Train Loss: 0.0001946, Val Loss: 0.0001972 +2025-02-19 07:31:05,120 Epoch 1442/2000 +2025-02-19 07:31:47,480 Current Learning Rate: 0.0010492249 +2025-02-19 07:31:47,481 Train Loss: 0.0001441, Val Loss: 0.0001968 +2025-02-19 07:31:47,481 Epoch 1443/2000 +2025-02-19 07:32:30,057 Current Learning Rate: 0.0010978480 +2025-02-19 07:32:30,058 Train Loss: 0.0001590, Val Loss: 0.0001968 +2025-02-19 07:32:30,058 Epoch 1444/2000 +2025-02-19 07:33:12,069 Current Learning Rate: 0.0011474338 +2025-02-19 07:33:12,070 Train Loss: 0.0001558, Val Loss: 0.0001981 +2025-02-19 07:33:12,070 Epoch 1445/2000 +2025-02-19 07:33:54,369 Current Learning Rate: 0.0011979702 +2025-02-19 07:33:54,370 Train Loss: 0.0001661, Val Loss: 0.0001973 +2025-02-19 07:33:54,371 Epoch 1446/2000 +2025-02-19 07:34:36,343 Current Learning Rate: 0.0012494447 +2025-02-19 07:34:36,343 Train Loss: 0.0001696, Val Loss: 0.0001982 +2025-02-19 07:34:36,344 Epoch 1447/2000 +2025-02-19 07:35:19,023 Current Learning Rate: 0.0013018445 +2025-02-19 07:35:19,024 Train Loss: 0.0001508, Val Loss: 0.0001969 +2025-02-19 07:35:19,024 Epoch 1448/2000 +2025-02-19 07:36:01,460 Current Learning Rate: 0.0013551569 +2025-02-19 07:36:01,460 Train Loss: 0.0001188, Val Loss: 0.0001963 +2025-02-19 07:36:01,461 Epoch 1449/2000 +2025-02-19 07:36:43,924 Current Learning Rate: 0.0014093685 +2025-02-19 07:36:43,925 Train Loss: 0.0001430, Val Loss: 0.0001983 +2025-02-19 07:36:43,925 Epoch 1450/2000 +2025-02-19 07:37:26,178 Current Learning Rate: 0.0014644661 +2025-02-19 07:37:26,178 Train Loss: 0.0001637, Val Loss: 0.0001976 +2025-02-19 07:37:26,178 Epoch 1451/2000 +2025-02-19 07:38:08,176 Current Learning Rate: 0.0015204360 +2025-02-19 07:38:08,177 Train Loss: 0.0001851, Val Loss: 0.0002001 +2025-02-19 07:38:08,177 Epoch 1452/2000 +2025-02-19 07:38:50,761 Current Learning Rate: 0.0015772645 +2025-02-19 07:38:50,762 Train Loss: 0.0001441, Val Loss: 0.0001977 +2025-02-19 07:38:50,762 Epoch 1453/2000 +2025-02-19 07:39:33,883 Current Learning Rate: 0.0016349374 +2025-02-19 07:39:33,883 Train Loss: 0.0001329, Val Loss: 0.0001964 +2025-02-19 07:39:33,883 Epoch 1454/2000 +2025-02-19 07:40:15,781 Current Learning Rate: 0.0016934407 +2025-02-19 07:40:15,781 Train Loss: 0.0001266, Val Loss: 0.0001959 +2025-02-19 07:40:15,781 Epoch 1455/2000 +2025-02-19 07:40:59,063 Current Learning Rate: 0.0017527598 +2025-02-19 07:40:59,063 Train Loss: 0.0001695, Val Loss: 0.0001972 +2025-02-19 07:40:59,064 Epoch 1456/2000 +2025-02-19 07:41:41,674 Current Learning Rate: 0.0018128801 +2025-02-19 07:41:41,674 Train Loss: 0.0001289, Val Loss: 0.0001970 +2025-02-19 07:41:41,675 Epoch 1457/2000 +2025-02-19 07:42:23,564 Current Learning Rate: 0.0018737867 +2025-02-19 07:42:23,565 Train Loss: 0.0001478, Val Loss: 0.0001978 +2025-02-19 07:42:23,565 Epoch 1458/2000 +2025-02-19 07:43:06,594 Current Learning Rate: 0.0019354647 +2025-02-19 07:43:06,594 Train Loss: 0.0001802, Val Loss: 0.0001977 +2025-02-19 07:43:06,594 Epoch 1459/2000 +2025-02-19 07:43:49,553 Current Learning Rate: 0.0019978989 +2025-02-19 07:43:49,553 Train Loss: 0.0001816, Val Loss: 0.0002013 +2025-02-19 07:43:49,553 Epoch 1460/2000 +2025-02-19 07:44:32,429 Current Learning Rate: 0.0020610737 +2025-02-19 07:44:32,430 Train Loss: 0.0001608, Val Loss: 0.0001992 +2025-02-19 07:44:32,430 Epoch 1461/2000 +2025-02-19 07:45:15,467 Current Learning Rate: 0.0021249737 +2025-02-19 07:45:15,467 Train Loss: 0.0001295, Val Loss: 0.0002001 +2025-02-19 07:45:15,468 Epoch 1462/2000 +2025-02-19 07:45:57,528 Current Learning Rate: 0.0021895831 +2025-02-19 07:45:57,529 Train Loss: 0.0001627, Val Loss: 0.0001976 +2025-02-19 07:45:57,529 Epoch 1463/2000 +2025-02-19 07:46:40,580 Current Learning Rate: 0.0022548859 +2025-02-19 07:46:40,580 Train Loss: 0.0001390, Val Loss: 0.0001990 +2025-02-19 07:46:40,581 Epoch 1464/2000 +2025-02-19 07:47:22,593 Current Learning Rate: 0.0023208660 +2025-02-19 07:47:22,594 Train Loss: 0.0001428, Val Loss: 0.0002021 +2025-02-19 07:47:22,594 Epoch 1465/2000 +2025-02-19 07:48:05,379 Current Learning Rate: 0.0023875072 +2025-02-19 07:48:05,379 Train Loss: 0.0002034, Val Loss: 0.0002133 +2025-02-19 07:48:05,380 Epoch 1466/2000 +2025-02-19 07:48:48,294 Current Learning Rate: 0.0024547929 +2025-02-19 07:48:48,295 Train Loss: 0.0001337, Val Loss: 0.0002009 +2025-02-19 07:48:48,295 Epoch 1467/2000 +2025-02-19 07:49:31,187 Current Learning Rate: 0.0025227067 +2025-02-19 07:49:31,188 Train Loss: 0.0001948, Val Loss: 0.0002251 +2025-02-19 07:49:31,188 Epoch 1468/2000 +2025-02-19 07:50:14,190 Current Learning Rate: 0.0025912316 +2025-02-19 07:50:14,191 Train Loss: 0.0001540, Val Loss: 0.0002046 +2025-02-19 07:50:14,191 Epoch 1469/2000 +2025-02-19 07:50:57,107 Current Learning Rate: 0.0026603509 +2025-02-19 07:50:57,107 Train Loss: 0.0001357, Val Loss: 0.0001993 +2025-02-19 07:50:57,108 Epoch 1470/2000 +2025-02-19 07:51:39,828 Current Learning Rate: 0.0027300475 +2025-02-19 07:51:39,828 Train Loss: 0.0001682, Val Loss: 0.0002071 +2025-02-19 07:51:39,829 Epoch 1471/2000 +2025-02-19 07:52:22,560 Current Learning Rate: 0.0028003042 +2025-02-19 07:52:22,560 Train Loss: 0.0001686, Val Loss: 0.0002344 +2025-02-19 07:52:22,560 Epoch 1472/2000 +2025-02-19 07:53:05,615 Current Learning Rate: 0.0028711035 +2025-02-19 07:53:05,615 Train Loss: 0.0001732, Val Loss: 0.0002047 +2025-02-19 07:53:05,615 Epoch 1473/2000 +2025-02-19 07:53:47,800 Current Learning Rate: 0.0029424282 +2025-02-19 07:53:47,801 Train Loss: 0.0001470, Val Loss: 0.0002015 +2025-02-19 07:53:47,801 Epoch 1474/2000 +2025-02-19 07:54:30,930 Current Learning Rate: 0.0030142605 +2025-02-19 07:54:30,931 Train Loss: 0.0001695, Val Loss: 0.0002009 +2025-02-19 07:54:30,931 Epoch 1475/2000 +2025-02-19 07:55:13,890 Current Learning Rate: 0.0030865828 +2025-02-19 07:55:13,891 Train Loss: 0.0001791, Val Loss: 0.0002123 +2025-02-19 07:55:13,891 Epoch 1476/2000 +2025-02-19 07:55:56,883 Current Learning Rate: 0.0031593772 +2025-02-19 07:55:56,883 Train Loss: 0.0001339, Val Loss: 0.0002033 +2025-02-19 07:55:56,884 Epoch 1477/2000 +2025-02-19 07:56:39,587 Current Learning Rate: 0.0032326258 +2025-02-19 07:56:39,588 Train Loss: 0.0001420, Val Loss: 0.0002072 +2025-02-19 07:56:39,588 Epoch 1478/2000 +2025-02-19 07:57:21,698 Current Learning Rate: 0.0033063104 +2025-02-19 07:57:21,699 Train Loss: 0.0001889, Val Loss: 0.0002220 +2025-02-19 07:57:21,699 Epoch 1479/2000 +2025-02-19 07:58:04,838 Current Learning Rate: 0.0033804129 +2025-02-19 07:58:04,838 Train Loss: 0.0001504, Val Loss: 0.0002127 +2025-02-19 07:58:04,839 Epoch 1480/2000 +2025-02-19 07:58:47,368 Current Learning Rate: 0.0034549150 +2025-02-19 07:58:47,377 Train Loss: 0.0002265, Val Loss: 0.0002265 +2025-02-19 07:58:47,384 Epoch 1481/2000 +2025-02-19 07:59:30,479 Current Learning Rate: 0.0035297984 +2025-02-19 07:59:30,480 Train Loss: 0.0001680, Val Loss: 0.0002286 +2025-02-19 07:59:30,481 Epoch 1482/2000 +2025-02-19 08:00:13,167 Current Learning Rate: 0.0036050445 +2025-02-19 08:00:13,167 Train Loss: 0.0001633, Val Loss: 0.0002089 +2025-02-19 08:00:13,167 Epoch 1483/2000 +2025-02-19 08:00:56,106 Current Learning Rate: 0.0036806348 +2025-02-19 08:00:56,107 Train Loss: 0.0001451, Val Loss: 0.0002081 +2025-02-19 08:00:56,107 Epoch 1484/2000 +2025-02-19 08:01:39,142 Current Learning Rate: 0.0037565506 +2025-02-19 08:01:39,142 Train Loss: 0.0001489, Val Loss: 0.0002080 +2025-02-19 08:01:39,142 Epoch 1485/2000 +2025-02-19 08:02:22,079 Current Learning Rate: 0.0038327732 +2025-02-19 08:02:22,079 Train Loss: 0.0002045, Val Loss: 0.0002199 +2025-02-19 08:02:22,079 Epoch 1486/2000 +2025-02-19 08:03:04,020 Current Learning Rate: 0.0039092838 +2025-02-19 08:03:04,021 Train Loss: 0.0003976, Val Loss: 0.0002651 +2025-02-19 08:03:04,021 Epoch 1487/2000 +2025-02-19 08:03:46,943 Current Learning Rate: 0.0039860635 +2025-02-19 08:03:46,943 Train Loss: 0.0002031, Val Loss: 0.0002236 +2025-02-19 08:03:46,943 Epoch 1488/2000 +2025-02-19 08:04:29,872 Current Learning Rate: 0.0040630934 +2025-02-19 08:04:29,872 Train Loss: 0.0002540, Val Loss: 0.0002175 +2025-02-19 08:04:29,873 Epoch 1489/2000 +2025-02-19 08:05:12,694 Current Learning Rate: 0.0041403545 +2025-02-19 08:05:12,695 Train Loss: 0.0001855, Val Loss: 0.0002180 +2025-02-19 08:05:12,695 Epoch 1490/2000 +2025-02-19 08:05:55,463 Current Learning Rate: 0.0042178277 +2025-02-19 08:05:55,464 Train Loss: 0.0002066, Val Loss: 0.0002193 +2025-02-19 08:05:55,464 Epoch 1491/2000 +2025-02-19 08:06:38,285 Current Learning Rate: 0.0042954938 +2025-02-19 08:06:38,285 Train Loss: 0.0001675, Val Loss: 0.0002054 +2025-02-19 08:06:38,285 Epoch 1492/2000 +2025-02-19 08:07:20,383 Current Learning Rate: 0.0043733338 +2025-02-19 08:07:20,383 Train Loss: 0.0001201, Val Loss: 0.0002018 +2025-02-19 08:07:20,383 Epoch 1493/2000 +2025-02-19 08:08:02,498 Current Learning Rate: 0.0044513284 +2025-02-19 08:08:02,499 Train Loss: 0.0001626, Val Loss: 0.0002240 +2025-02-19 08:08:02,499 Epoch 1494/2000 +2025-02-19 08:08:44,793 Current Learning Rate: 0.0045294584 +2025-02-19 08:08:44,794 Train Loss: 0.0001457, Val Loss: 0.0002082 +2025-02-19 08:08:44,794 Epoch 1495/2000 +2025-02-19 08:09:26,759 Current Learning Rate: 0.0046077045 +2025-02-19 08:09:26,759 Train Loss: 0.0004107, Val Loss: 0.0002833 +2025-02-19 08:09:26,759 Epoch 1496/2000 +2025-02-19 08:10:08,953 Current Learning Rate: 0.0046860474 +2025-02-19 08:10:08,954 Train Loss: 0.0001688, Val Loss: 0.0002164 +2025-02-19 08:10:08,954 Epoch 1497/2000 +2025-02-19 08:10:51,890 Current Learning Rate: 0.0047644677 +2025-02-19 08:10:51,891 Train Loss: 0.0001997, Val Loss: 0.0002262 +2025-02-19 08:10:51,891 Epoch 1498/2000 +2025-02-19 08:11:33,874 Current Learning Rate: 0.0048429462 +2025-02-19 08:11:33,874 Train Loss: 0.0001954, Val Loss: 0.0002259 +2025-02-19 08:11:33,875 Epoch 1499/2000 +2025-02-19 08:12:16,451 Current Learning Rate: 0.0049214634 +2025-02-19 08:12:16,452 Train Loss: 0.0001865, Val Loss: 0.0002165 +2025-02-19 08:12:16,452 Epoch 1500/2000 +2025-02-19 08:12:59,403 Current Learning Rate: 0.0050000000 +2025-02-19 08:12:59,404 Train Loss: 0.0001820, Val Loss: 0.0002292 +2025-02-19 08:12:59,404 Epoch 1501/2000 +2025-02-19 08:13:41,519 Current Learning Rate: 0.0050785366 +2025-02-19 08:13:41,519 Train Loss: 0.0001468, Val Loss: 0.0002126 +2025-02-19 08:13:41,520 Epoch 1502/2000 +2025-02-19 08:14:24,491 Current Learning Rate: 0.0051570538 +2025-02-19 08:14:24,492 Train Loss: 0.0002013, Val Loss: 0.0002498 +2025-02-19 08:14:24,492 Epoch 1503/2000 +2025-02-19 08:15:07,078 Current Learning Rate: 0.0052355323 +2025-02-19 08:15:07,079 Train Loss: 0.0001968, Val Loss: 0.0002388 +2025-02-19 08:15:07,079 Epoch 1504/2000 +2025-02-19 08:15:49,792 Current Learning Rate: 0.0053139526 +2025-02-19 08:15:49,792 Train Loss: 0.0001710, Val Loss: 0.0002253 +2025-02-19 08:15:49,792 Epoch 1505/2000 +2025-02-19 08:16:32,705 Current Learning Rate: 0.0053922955 +2025-02-19 08:16:32,706 Train Loss: 0.0001604, Val Loss: 0.0002181 +2025-02-19 08:16:32,707 Epoch 1506/2000 +2025-02-19 08:17:15,674 Current Learning Rate: 0.0054705416 +2025-02-19 08:17:15,675 Train Loss: 0.0002435, Val Loss: 0.0002649 +2025-02-19 08:17:15,675 Epoch 1507/2000 +2025-02-19 08:17:58,042 Current Learning Rate: 0.0055486716 +2025-02-19 08:17:58,042 Train Loss: 0.0002222, Val Loss: 0.0002365 +2025-02-19 08:17:58,042 Epoch 1508/2000 +2025-02-19 08:18:40,363 Current Learning Rate: 0.0056266662 +2025-02-19 08:18:40,363 Train Loss: 0.0001988, Val Loss: 0.0002559 +2025-02-19 08:18:40,364 Epoch 1509/2000 +2025-02-19 08:19:22,871 Current Learning Rate: 0.0057045062 +2025-02-19 08:19:22,871 Train Loss: 0.0001844, Val Loss: 0.0002882 +2025-02-19 08:19:22,871 Epoch 1510/2000 +2025-02-19 08:20:06,022 Current Learning Rate: 0.0057821723 +2025-02-19 08:20:06,023 Train Loss: 0.0002560, Val Loss: 0.0003296 +2025-02-19 08:20:06,023 Epoch 1511/2000 +2025-02-19 08:20:49,215 Current Learning Rate: 0.0058596455 +2025-02-19 08:20:49,215 Train Loss: 0.0002513, Val Loss: 0.0002483 +2025-02-19 08:20:49,216 Epoch 1512/2000 +2025-02-19 08:21:32,706 Current Learning Rate: 0.0059369066 +2025-02-19 08:21:32,707 Train Loss: 0.0002178, Val Loss: 0.0002721 +2025-02-19 08:21:32,707 Epoch 1513/2000 +2025-02-19 08:22:15,210 Current Learning Rate: 0.0060139365 +2025-02-19 08:22:15,210 Train Loss: 0.0002082, Val Loss: 0.0002953 +2025-02-19 08:22:15,211 Epoch 1514/2000 +2025-02-19 08:22:57,401 Current Learning Rate: 0.0060907162 +2025-02-19 08:22:57,401 Train Loss: 0.0002167, Val Loss: 0.0002572 +2025-02-19 08:22:57,402 Epoch 1515/2000 +2025-02-19 08:23:39,877 Current Learning Rate: 0.0061672268 +2025-02-19 08:23:39,877 Train Loss: 0.0002100, Val Loss: 0.0002608 +2025-02-19 08:23:39,878 Epoch 1516/2000 +2025-02-19 08:24:23,054 Current Learning Rate: 0.0062434494 +2025-02-19 08:24:23,054 Train Loss: 0.0002465, Val Loss: 0.0002791 +2025-02-19 08:24:23,067 Epoch 1517/2000 +2025-02-19 08:25:05,590 Current Learning Rate: 0.0063193652 +2025-02-19 08:25:05,591 Train Loss: 0.0002212, Val Loss: 0.0002738 +2025-02-19 08:25:05,591 Epoch 1518/2000 +2025-02-19 08:25:49,613 Current Learning Rate: 0.0063949555 +2025-02-19 08:25:49,614 Train Loss: 0.0002870, Val Loss: 0.0003399 +2025-02-19 08:25:49,614 Epoch 1519/2000 +2025-02-19 08:26:32,891 Current Learning Rate: 0.0064702016 +2025-02-19 08:26:32,892 Train Loss: 0.0002522, Val Loss: 0.0003258 +2025-02-19 08:26:32,892 Epoch 1520/2000 +2025-02-19 08:27:15,615 Current Learning Rate: 0.0065450850 +2025-02-19 08:27:15,615 Train Loss: 0.0002557, Val Loss: 0.0002717 +2025-02-19 08:27:15,615 Epoch 1521/2000 +2025-02-19 08:27:57,952 Current Learning Rate: 0.0066195871 +2025-02-19 08:27:57,953 Train Loss: 0.0002116, Val Loss: 0.0002916 +2025-02-19 08:27:57,953 Epoch 1522/2000 +2025-02-19 08:28:41,020 Current Learning Rate: 0.0066936896 +2025-02-19 08:28:41,021 Train Loss: 0.0002302, Val Loss: 0.0002633 +2025-02-19 08:28:41,021 Epoch 1523/2000 +2025-02-19 08:29:24,670 Current Learning Rate: 0.0067673742 +2025-02-19 08:29:24,670 Train Loss: 0.0002558, Val Loss: 0.0002535 +2025-02-19 08:29:24,671 Epoch 1524/2000 +2025-02-19 08:30:06,915 Current Learning Rate: 0.0068406228 +2025-02-19 08:30:06,916 Train Loss: 0.0001685, Val Loss: 0.0002497 +2025-02-19 08:30:06,917 Epoch 1525/2000 +2025-02-19 08:30:50,259 Current Learning Rate: 0.0069134172 +2025-02-19 08:30:50,259 Train Loss: 0.0001982, Val Loss: 0.0002606 +2025-02-19 08:30:50,260 Epoch 1526/2000 +2025-02-19 08:31:32,710 Current Learning Rate: 0.0069857395 +2025-02-19 08:31:32,711 Train Loss: 0.0002570, Val Loss: 0.0002472 +2025-02-19 08:31:32,711 Epoch 1527/2000 +2025-02-19 08:32:14,865 Current Learning Rate: 0.0070575718 +2025-02-19 08:32:14,865 Train Loss: 0.0002172, Val Loss: 0.0003000 +2025-02-19 08:32:14,866 Epoch 1528/2000 +2025-02-19 08:32:58,001 Current Learning Rate: 0.0071288965 +2025-02-19 08:32:58,002 Train Loss: 0.0002228, Val Loss: 0.0003270 +2025-02-19 08:32:58,002 Epoch 1529/2000 +2025-02-19 08:33:40,614 Current Learning Rate: 0.0071996958 +2025-02-19 08:33:40,614 Train Loss: 0.0001987, Val Loss: 0.0002412 +2025-02-19 08:33:40,614 Epoch 1530/2000 +2025-02-19 08:34:23,652 Current Learning Rate: 0.0072699525 +2025-02-19 08:34:23,652 Train Loss: 0.0006390, Val Loss: 0.0007646 +2025-02-19 08:34:23,652 Epoch 1531/2000 +2025-02-19 08:35:06,696 Current Learning Rate: 0.0073396491 +2025-02-19 08:35:06,697 Train Loss: 0.0011702, Val Loss: 0.0004564 +2025-02-19 08:35:06,697 Epoch 1532/2000 +2025-02-19 08:35:49,230 Current Learning Rate: 0.0074087684 +2025-02-19 08:35:49,231 Train Loss: 0.0003034, Val Loss: 0.0002843 +2025-02-19 08:35:49,232 Epoch 1533/2000 +2025-02-19 08:36:31,424 Current Learning Rate: 0.0074772933 +2025-02-19 08:36:31,425 Train Loss: 0.0002477, Val Loss: 0.0002916 +2025-02-19 08:36:31,425 Epoch 1534/2000 +2025-02-19 08:37:13,755 Current Learning Rate: 0.0075452071 +2025-02-19 08:37:13,755 Train Loss: 0.0002565, Val Loss: 0.0002823 +2025-02-19 08:37:13,755 Epoch 1535/2000 +2025-02-19 08:37:56,447 Current Learning Rate: 0.0076124928 +2025-02-19 08:37:56,470 Train Loss: 0.0002207, Val Loss: 0.0002487 +2025-02-19 08:37:56,471 Epoch 1536/2000 +2025-02-19 08:38:39,462 Current Learning Rate: 0.0076791340 +2025-02-19 08:38:39,462 Train Loss: 0.0002332, Val Loss: 0.0002517 +2025-02-19 08:38:39,463 Epoch 1537/2000 +2025-02-19 08:39:21,757 Current Learning Rate: 0.0077451141 +2025-02-19 08:39:21,758 Train Loss: 0.0001866, Val Loss: 0.0002585 +2025-02-19 08:39:21,758 Epoch 1538/2000 +2025-02-19 08:40:04,213 Current Learning Rate: 0.0078104169 +2025-02-19 08:40:04,214 Train Loss: 0.0002167, Val Loss: 0.0002570 +2025-02-19 08:40:04,214 Epoch 1539/2000 +2025-02-19 08:40:46,443 Current Learning Rate: 0.0078750263 +2025-02-19 08:40:46,444 Train Loss: 0.0002286, Val Loss: 0.0002564 +2025-02-19 08:40:46,445 Epoch 1540/2000 +2025-02-19 08:41:28,759 Current Learning Rate: 0.0079389263 +2025-02-19 08:41:28,759 Train Loss: 0.0002637, Val Loss: 0.0002583 +2025-02-19 08:41:28,760 Epoch 1541/2000 +2025-02-19 08:42:11,881 Current Learning Rate: 0.0080021011 +2025-02-19 08:42:11,881 Train Loss: 0.0002244, Val Loss: 0.0002482 +2025-02-19 08:42:11,881 Epoch 1542/2000 +2025-02-19 08:42:53,861 Current Learning Rate: 0.0080645353 +2025-02-19 08:42:53,861 Train Loss: 0.0002571, Val Loss: 0.0002690 +2025-02-19 08:42:53,861 Epoch 1543/2000 +2025-02-19 08:43:36,844 Current Learning Rate: 0.0081262133 +2025-02-19 08:43:36,844 Train Loss: 0.0002353, Val Loss: 0.0002479 +2025-02-19 08:43:36,844 Epoch 1544/2000 +2025-02-19 08:44:19,353 Current Learning Rate: 0.0081871199 +2025-02-19 08:44:19,354 Train Loss: 0.0002326, Val Loss: 0.0002681 +2025-02-19 08:44:19,354 Epoch 1545/2000 +2025-02-19 08:45:01,954 Current Learning Rate: 0.0082472402 +2025-02-19 08:45:01,955 Train Loss: 0.0002169, Val Loss: 0.0002961 +2025-02-19 08:45:01,955 Epoch 1546/2000 +2025-02-19 08:45:43,918 Current Learning Rate: 0.0083065593 +2025-02-19 08:45:43,919 Train Loss: 0.0002374, Val Loss: 0.0002582 +2025-02-19 08:45:43,919 Epoch 1547/2000 +2025-02-19 08:46:26,835 Current Learning Rate: 0.0083650626 +2025-02-19 08:46:26,836 Train Loss: 0.0002647, Val Loss: 0.0002561 +2025-02-19 08:46:26,836 Epoch 1548/2000 +2025-02-19 08:47:09,699 Current Learning Rate: 0.0084227355 +2025-02-19 08:47:09,699 Train Loss: 0.0001918, Val Loss: 0.0002443 +2025-02-19 08:47:09,699 Epoch 1549/2000 +2025-02-19 08:47:52,098 Current Learning Rate: 0.0084795640 +2025-02-19 08:47:52,098 Train Loss: 0.0002346, Val Loss: 0.0002501 +2025-02-19 08:47:52,099 Epoch 1550/2000 +2025-02-19 08:48:34,665 Current Learning Rate: 0.0085355339 +2025-02-19 08:48:34,665 Train Loss: 0.0002625, Val Loss: 0.0002557 +2025-02-19 08:48:34,665 Epoch 1551/2000 +2025-02-19 08:49:17,182 Current Learning Rate: 0.0085906315 +2025-02-19 08:49:17,182 Train Loss: 0.0003306, Val Loss: 0.0002691 +2025-02-19 08:49:17,182 Epoch 1552/2000 +2025-02-19 08:50:00,131 Current Learning Rate: 0.0086448431 +2025-02-19 08:50:00,132 Train Loss: 0.0001846, Val Loss: 0.0002536 +2025-02-19 08:50:00,132 Epoch 1553/2000 +2025-02-19 08:50:42,022 Current Learning Rate: 0.0086981555 +2025-02-19 08:50:42,022 Train Loss: 0.0002053, Val Loss: 0.0002573 +2025-02-19 08:50:42,023 Epoch 1554/2000 +2025-02-19 08:51:24,493 Current Learning Rate: 0.0087505553 +2025-02-19 08:51:24,493 Train Loss: 0.0002493, Val Loss: 0.0002722 +2025-02-19 08:51:24,493 Epoch 1555/2000 +2025-02-19 08:52:07,767 Current Learning Rate: 0.0088020298 +2025-02-19 08:52:07,768 Train Loss: 0.0003195, Val Loss: 0.0002814 +2025-02-19 08:52:07,768 Epoch 1556/2000 +2025-02-19 08:52:50,868 Current Learning Rate: 0.0088525662 +2025-02-19 08:52:50,869 Train Loss: 0.0002280, Val Loss: 0.0002864 +2025-02-19 08:52:50,869 Epoch 1557/2000 +2025-02-19 08:53:32,911 Current Learning Rate: 0.0089021520 +2025-02-19 08:53:32,912 Train Loss: 0.0002404, Val Loss: 0.0002823 +2025-02-19 08:53:32,912 Epoch 1558/2000 +2025-02-19 08:54:15,212 Current Learning Rate: 0.0089507751 +2025-02-19 08:54:15,212 Train Loss: 0.0002315, Val Loss: 0.0002688 +2025-02-19 08:54:15,212 Epoch 1559/2000 +2025-02-19 08:54:57,581 Current Learning Rate: 0.0089984233 +2025-02-19 08:54:57,582 Train Loss: 0.0002853, Val Loss: 0.0003297 +2025-02-19 08:54:57,582 Epoch 1560/2000 +2025-02-19 08:55:39,988 Current Learning Rate: 0.0090450850 +2025-02-19 08:55:39,988 Train Loss: 0.0002139, Val Loss: 0.0002876 +2025-02-19 08:55:39,988 Epoch 1561/2000 +2025-02-19 08:56:22,629 Current Learning Rate: 0.0090907486 +2025-02-19 08:56:22,662 Train Loss: 0.0002782, Val Loss: 0.0003619 +2025-02-19 08:56:22,663 Epoch 1562/2000 +2025-02-19 08:57:05,576 Current Learning Rate: 0.0091354029 +2025-02-19 08:57:05,582 Train Loss: 0.0002684, Val Loss: 0.0003981 +2025-02-19 08:57:05,583 Epoch 1563/2000 +2025-02-19 08:57:48,427 Current Learning Rate: 0.0091790368 +2025-02-19 08:57:48,428 Train Loss: 0.0002909, Val Loss: 0.0003039 +2025-02-19 08:57:48,428 Epoch 1564/2000 +2025-02-19 08:58:31,779 Current Learning Rate: 0.0092216396 +2025-02-19 08:58:31,779 Train Loss: 0.0003100, Val Loss: 0.0004087 +2025-02-19 08:58:31,779 Epoch 1565/2000 +2025-02-19 08:59:14,876 Current Learning Rate: 0.0092632008 +2025-02-19 08:59:14,876 Train Loss: 0.0002565, Val Loss: 0.0003147 +2025-02-19 08:59:14,877 Epoch 1566/2000 +2025-02-19 08:59:56,864 Current Learning Rate: 0.0093037101 +2025-02-19 08:59:56,865 Train Loss: 0.0002524, Val Loss: 0.0003331 +2025-02-19 08:59:56,865 Epoch 1567/2000 +2025-02-19 09:00:40,291 Current Learning Rate: 0.0093431576 +2025-02-19 09:00:40,292 Train Loss: 0.0002138, Val Loss: 0.0003181 +2025-02-19 09:00:40,292 Epoch 1568/2000 +2025-02-19 09:01:22,366 Current Learning Rate: 0.0093815334 +2025-02-19 09:01:22,366 Train Loss: 0.0003294, Val Loss: 0.0004356 +2025-02-19 09:01:22,367 Epoch 1569/2000 +2025-02-19 09:02:05,118 Current Learning Rate: 0.0094188282 +2025-02-19 09:02:05,119 Train Loss: 0.0002855, Val Loss: 0.0003168 +2025-02-19 09:02:05,119 Epoch 1570/2000 +2025-02-19 09:02:48,224 Current Learning Rate: 0.0094550326 +2025-02-19 09:02:48,225 Train Loss: 0.0002839, Val Loss: 0.0003345 +2025-02-19 09:02:48,225 Epoch 1571/2000 +2025-02-19 09:03:31,206 Current Learning Rate: 0.0094901379 +2025-02-19 09:03:31,206 Train Loss: 0.0002302, Val Loss: 0.0003205 +2025-02-19 09:03:31,206 Epoch 1572/2000 +2025-02-19 09:04:13,715 Current Learning Rate: 0.0095241353 +2025-02-19 09:04:13,716 Train Loss: 0.0002399, Val Loss: 0.0003471 +2025-02-19 09:04:13,716 Epoch 1573/2000 +2025-02-19 09:04:56,137 Current Learning Rate: 0.0095570164 +2025-02-19 09:04:56,138 Train Loss: 0.0002557, Val Loss: 0.0003277 +2025-02-19 09:04:56,139 Epoch 1574/2000 +2025-02-19 09:05:38,987 Current Learning Rate: 0.0095887731 +2025-02-19 09:05:38,987 Train Loss: 0.0003472, Val Loss: 0.0003464 +2025-02-19 09:05:38,988 Epoch 1575/2000 +2025-02-19 09:06:20,933 Current Learning Rate: 0.0096193977 +2025-02-19 09:06:20,934 Train Loss: 0.0002312, Val Loss: 0.0002899 +2025-02-19 09:06:20,934 Epoch 1576/2000 +2025-02-19 09:07:03,799 Current Learning Rate: 0.0096488824 +2025-02-19 09:07:03,799 Train Loss: 0.0002064, Val Loss: 0.0002947 +2025-02-19 09:07:03,800 Epoch 1577/2000 +2025-02-19 09:07:46,990 Current Learning Rate: 0.0096772202 +2025-02-19 09:07:46,991 Train Loss: 0.0002814, Val Loss: 0.0003216 +2025-02-19 09:07:47,018 Epoch 1578/2000 +2025-02-19 09:08:28,920 Current Learning Rate: 0.0097044038 +2025-02-19 09:08:28,921 Train Loss: 0.0002777, Val Loss: 0.0003434 +2025-02-19 09:08:28,922 Epoch 1579/2000 +2025-02-19 09:09:11,370 Current Learning Rate: 0.0097304268 +2025-02-19 09:09:11,371 Train Loss: 0.0003055, Val Loss: 0.0003128 +2025-02-19 09:09:11,371 Epoch 1580/2000 +2025-02-19 09:09:54,257 Current Learning Rate: 0.0097552826 +2025-02-19 09:09:54,258 Train Loss: 0.0003652, Val Loss: 0.0004352 +2025-02-19 09:09:54,258 Epoch 1581/2000 +2025-02-19 09:10:37,224 Current Learning Rate: 0.0097789651 +2025-02-19 09:10:37,225 Train Loss: 0.0002872, Val Loss: 0.0003580 +2025-02-19 09:10:37,225 Epoch 1582/2000 +2025-02-19 09:11:20,234 Current Learning Rate: 0.0098014684 +2025-02-19 09:11:20,234 Train Loss: 0.0005867, Val Loss: 0.0005102 +2025-02-19 09:11:20,235 Epoch 1583/2000 +2025-02-19 09:12:03,101 Current Learning Rate: 0.0098227871 +2025-02-19 09:12:03,102 Train Loss: 0.0004367, Val Loss: 0.0003194 +2025-02-19 09:12:03,102 Epoch 1584/2000 +2025-02-19 09:12:46,051 Current Learning Rate: 0.0098429158 +2025-02-19 09:12:46,051 Train Loss: 0.0003085, Val Loss: 0.0003412 +2025-02-19 09:12:46,051 Epoch 1585/2000 +2025-02-19 09:13:28,185 Current Learning Rate: 0.0098618496 +2025-02-19 09:13:28,186 Train Loss: 0.0003700, Val Loss: 0.0003821 +2025-02-19 09:13:28,186 Epoch 1586/2000 +2025-02-19 09:14:11,328 Current Learning Rate: 0.0098795838 +2025-02-19 09:14:11,329 Train Loss: 0.0002836, Val Loss: 0.0002979 +2025-02-19 09:14:11,329 Epoch 1587/2000 +2025-02-19 09:14:54,638 Current Learning Rate: 0.0098961141 +2025-02-19 09:14:54,639 Train Loss: 0.0002756, Val Loss: 0.0002496 +2025-02-19 09:14:54,639 Epoch 1588/2000 +2025-02-19 09:15:36,515 Current Learning Rate: 0.0099114363 +2025-02-19 09:15:36,515 Train Loss: 0.0002898, Val Loss: 0.0002686 +2025-02-19 09:15:36,516 Epoch 1589/2000 +2025-02-19 09:16:19,076 Current Learning Rate: 0.0099255466 +2025-02-19 09:16:19,077 Train Loss: 0.0002404, Val Loss: 0.0002352 +2025-02-19 09:16:19,077 Epoch 1590/2000 +2025-02-19 09:17:02,084 Current Learning Rate: 0.0099384417 +2025-02-19 09:17:02,085 Train Loss: 0.0002026, Val Loss: 0.0002316 +2025-02-19 09:17:02,085 Epoch 1591/2000 +2025-02-19 09:17:44,639 Current Learning Rate: 0.0099501183 +2025-02-19 09:17:44,639 Train Loss: 0.0001676, Val Loss: 0.0002330 +2025-02-19 09:17:44,640 Epoch 1592/2000 +2025-02-19 09:18:27,135 Current Learning Rate: 0.0099605735 +2025-02-19 09:18:27,136 Train Loss: 0.0002034, Val Loss: 0.0002348 +2025-02-19 09:18:27,136 Epoch 1593/2000 +2025-02-19 09:19:09,203 Current Learning Rate: 0.0099698048 +2025-02-19 09:19:09,204 Train Loss: 0.0001832, Val Loss: 0.0002477 +2025-02-19 09:19:09,204 Epoch 1594/2000 +2025-02-19 09:19:51,481 Current Learning Rate: 0.0099778098 +2025-02-19 09:19:51,481 Train Loss: 0.0001805, Val Loss: 0.0002525 +2025-02-19 09:19:51,481 Epoch 1595/2000 +2025-02-19 09:20:34,170 Current Learning Rate: 0.0099845867 +2025-02-19 09:20:34,170 Train Loss: 0.0002115, Val Loss: 0.0002476 +2025-02-19 09:20:34,171 Epoch 1596/2000 +2025-02-19 09:21:16,605 Current Learning Rate: 0.0099901336 +2025-02-19 09:21:16,605 Train Loss: 0.0002473, Val Loss: 0.0002739 +2025-02-19 09:21:16,606 Epoch 1597/2000 +2025-02-19 09:21:59,495 Current Learning Rate: 0.0099944494 +2025-02-19 09:21:59,496 Train Loss: 0.0002047, Val Loss: 0.0002565 +2025-02-19 09:21:59,496 Epoch 1598/2000 +2025-02-19 09:22:42,799 Current Learning Rate: 0.0099975328 +2025-02-19 09:22:42,799 Train Loss: 0.0001905, Val Loss: 0.0002592 +2025-02-19 09:22:42,800 Epoch 1599/2000 +2025-02-19 09:23:25,193 Current Learning Rate: 0.0099993832 +2025-02-19 09:23:25,194 Train Loss: 0.0002237, Val Loss: 0.0002779 +2025-02-19 09:23:25,194 Epoch 1600/2000 +2025-02-19 09:24:08,309 Current Learning Rate: 0.0100000000 +2025-02-19 09:24:08,310 Train Loss: 0.0002927, Val Loss: 0.0002949 +2025-02-19 09:24:08,310 Epoch 1601/2000 +2025-02-19 09:24:51,201 Current Learning Rate: 0.0099993832 +2025-02-19 09:24:51,201 Train Loss: 0.0002353, Val Loss: 0.0002670 +2025-02-19 09:24:51,202 Epoch 1602/2000 +2025-02-19 09:25:34,175 Current Learning Rate: 0.0099975328 +2025-02-19 09:25:34,175 Train Loss: 0.0002587, Val Loss: 0.0002710 +2025-02-19 09:25:34,175 Epoch 1603/2000 +2025-02-19 09:26:16,988 Current Learning Rate: 0.0099944494 +2025-02-19 09:26:16,988 Train Loss: 0.0002121, Val Loss: 0.0002577 +2025-02-19 09:26:16,988 Epoch 1604/2000 +2025-02-19 09:26:59,569 Current Learning Rate: 0.0099901336 +2025-02-19 09:26:59,570 Train Loss: 0.0002278, Val Loss: 0.0002654 +2025-02-19 09:26:59,570 Epoch 1605/2000 +2025-02-19 09:27:41,682 Current Learning Rate: 0.0099845867 +2025-02-19 09:27:41,683 Train Loss: 0.0002458, Val Loss: 0.0002833 +2025-02-19 09:27:41,684 Epoch 1606/2000 +2025-02-19 09:28:24,761 Current Learning Rate: 0.0099778098 +2025-02-19 09:28:24,761 Train Loss: 0.0001878, Val Loss: 0.0002676 +2025-02-19 09:28:24,762 Epoch 1607/2000 +2025-02-19 09:29:07,131 Current Learning Rate: 0.0099698048 +2025-02-19 09:29:07,132 Train Loss: 0.0002379, Val Loss: 0.0002700 +2025-02-19 09:29:07,132 Epoch 1608/2000 +2025-02-19 09:29:49,596 Current Learning Rate: 0.0099605735 +2025-02-19 09:29:49,597 Train Loss: 0.0002581, Val Loss: 0.0002632 +2025-02-19 09:29:49,598 Epoch 1609/2000 +2025-02-19 09:30:32,361 Current Learning Rate: 0.0099501183 +2025-02-19 09:30:32,361 Train Loss: 0.0003075, Val Loss: 0.0002759 +2025-02-19 09:30:32,362 Epoch 1610/2000 +2025-02-19 09:31:14,803 Current Learning Rate: 0.0099384417 +2025-02-19 09:31:14,804 Train Loss: 0.0002475, Val Loss: 0.0003342 +2025-02-19 09:31:14,804 Epoch 1611/2000 +2025-02-19 09:31:57,354 Current Learning Rate: 0.0099255466 +2025-02-19 09:31:57,355 Train Loss: 0.0002522, Val Loss: 0.0002756 +2025-02-19 09:31:57,355 Epoch 1612/2000 +2025-02-19 09:32:40,478 Current Learning Rate: 0.0099114363 +2025-02-19 09:32:40,478 Train Loss: 0.0002776, Val Loss: 0.0002887 +2025-02-19 09:32:40,479 Epoch 1613/2000 +2025-02-19 09:33:22,657 Current Learning Rate: 0.0098961141 +2025-02-19 09:33:22,657 Train Loss: 0.0002157, Val Loss: 0.0002747 +2025-02-19 09:33:22,658 Epoch 1614/2000 +2025-02-19 09:34:04,620 Current Learning Rate: 0.0098795838 +2025-02-19 09:34:04,621 Train Loss: 0.0002304, Val Loss: 0.0002682 +2025-02-19 09:34:04,621 Epoch 1615/2000 +2025-02-19 09:34:47,192 Current Learning Rate: 0.0098618496 +2025-02-19 09:34:47,192 Train Loss: 0.0001979, Val Loss: 0.0002703 +2025-02-19 09:34:47,193 Epoch 1616/2000 +2025-02-19 09:35:30,351 Current Learning Rate: 0.0098429158 +2025-02-19 09:35:30,352 Train Loss: 0.0002676, Val Loss: 0.0002633 +2025-02-19 09:35:30,352 Epoch 1617/2000 +2025-02-19 09:36:12,322 Current Learning Rate: 0.0098227871 +2025-02-19 09:36:12,322 Train Loss: 0.0002367, Val Loss: 0.0002702 +2025-02-19 09:36:12,323 Epoch 1618/2000 +2025-02-19 09:36:54,959 Current Learning Rate: 0.0098014684 +2025-02-19 09:36:54,960 Train Loss: 0.0002292, Val Loss: 0.0002609 +2025-02-19 09:36:54,960 Epoch 1619/2000 +2025-02-19 09:37:38,086 Current Learning Rate: 0.0097789651 +2025-02-19 09:37:38,087 Train Loss: 0.0002406, Val Loss: 0.0002725 +2025-02-19 09:37:38,087 Epoch 1620/2000 +2025-02-19 09:38:21,246 Current Learning Rate: 0.0097552826 +2025-02-19 09:38:21,248 Train Loss: 0.0001725, Val Loss: 0.0003059 +2025-02-19 09:38:21,248 Epoch 1621/2000 +2025-02-19 09:39:03,394 Current Learning Rate: 0.0097304268 +2025-02-19 09:39:03,395 Train Loss: 0.0002304, Val Loss: 0.0002967 +2025-02-19 09:39:03,395 Epoch 1622/2000 +2025-02-19 09:39:45,495 Current Learning Rate: 0.0097044038 +2025-02-19 09:39:45,496 Train Loss: 0.0002388, Val Loss: 0.0002990 +2025-02-19 09:39:45,497 Epoch 1623/2000 +2025-02-19 09:40:28,714 Current Learning Rate: 0.0096772202 +2025-02-19 09:40:28,714 Train Loss: 0.0002905, Val Loss: 0.0005311 +2025-02-19 09:40:28,731 Epoch 1624/2000 +2025-02-19 09:41:11,763 Current Learning Rate: 0.0096488824 +2025-02-19 09:41:11,764 Train Loss: 0.0004078, Val Loss: 0.0003294 +2025-02-19 09:41:11,764 Epoch 1625/2000 +2025-02-19 09:41:54,919 Current Learning Rate: 0.0096193977 +2025-02-19 09:41:54,920 Train Loss: 0.0002248, Val Loss: 0.0002973 +2025-02-19 09:41:54,920 Epoch 1626/2000 +2025-02-19 09:42:38,383 Current Learning Rate: 0.0095887731 +2025-02-19 09:42:38,384 Train Loss: 0.0002325, Val Loss: 0.0002689 +2025-02-19 09:42:38,410 Epoch 1627/2000 +2025-02-19 09:43:21,573 Current Learning Rate: 0.0095570164 +2025-02-19 09:43:21,574 Train Loss: 0.0002908, Val Loss: 0.0002823 +2025-02-19 09:43:21,574 Epoch 1628/2000 +2025-02-19 09:44:04,066 Current Learning Rate: 0.0095241353 +2025-02-19 09:44:04,067 Train Loss: 0.0002969, Val Loss: 0.0003105 +2025-02-19 09:44:04,067 Epoch 1629/2000 +2025-02-19 09:44:46,454 Current Learning Rate: 0.0094901379 +2025-02-19 09:44:46,454 Train Loss: 0.0003243, Val Loss: 0.0002674 +2025-02-19 09:44:46,454 Epoch 1630/2000 +2025-02-19 09:45:29,671 Current Learning Rate: 0.0094550326 +2025-02-19 09:45:29,671 Train Loss: 0.0002718, Val Loss: 0.0002601 +2025-02-19 09:45:29,671 Epoch 1631/2000 +2025-02-19 09:46:11,831 Current Learning Rate: 0.0094188282 +2025-02-19 09:46:11,832 Train Loss: 0.0002243, Val Loss: 0.0002562 +2025-02-19 09:46:11,832 Epoch 1632/2000 +2025-02-19 09:46:54,900 Current Learning Rate: 0.0093815334 +2025-02-19 09:46:54,901 Train Loss: 0.0002142, Val Loss: 0.0002776 +2025-02-19 09:46:54,901 Epoch 1633/2000 +2025-02-19 09:47:37,671 Current Learning Rate: 0.0093431576 +2025-02-19 09:47:37,672 Train Loss: 0.0001991, Val Loss: 0.0002457 +2025-02-19 09:47:37,672 Epoch 1634/2000 +2025-02-19 09:48:20,161 Current Learning Rate: 0.0093037101 +2025-02-19 09:48:20,162 Train Loss: 0.0002672, Val Loss: 0.0002986 +2025-02-19 09:48:20,162 Epoch 1635/2000 +2025-02-19 09:49:02,908 Current Learning Rate: 0.0092632008 +2025-02-19 09:49:02,909 Train Loss: 0.0002957, Val Loss: 0.0003189 +2025-02-19 09:49:02,909 Epoch 1636/2000 +2025-02-19 09:49:46,008 Current Learning Rate: 0.0092216396 +2025-02-19 09:49:46,009 Train Loss: 0.0002988, Val Loss: 0.0003616 +2025-02-19 09:49:46,009 Epoch 1637/2000 +2025-02-19 09:50:29,052 Current Learning Rate: 0.0091790368 +2025-02-19 09:50:29,053 Train Loss: 0.0002641, Val Loss: 0.0004105 +2025-02-19 09:50:29,053 Epoch 1638/2000 +2025-02-19 09:51:11,860 Current Learning Rate: 0.0091354029 +2025-02-19 09:51:11,862 Train Loss: 0.0003434, Val Loss: 0.0003362 +2025-02-19 09:51:11,862 Epoch 1639/2000 +2025-02-19 09:51:54,988 Current Learning Rate: 0.0090907486 +2025-02-19 09:51:54,989 Train Loss: 0.0002708, Val Loss: 0.0002941 +2025-02-19 09:51:54,989 Epoch 1640/2000 +2025-02-19 09:52:38,360 Current Learning Rate: 0.0090450850 +2025-02-19 09:52:38,361 Train Loss: 0.0004133, Val Loss: 0.0004922 +2025-02-19 09:52:38,361 Epoch 1641/2000 +2025-02-19 09:53:22,131 Current Learning Rate: 0.0089984233 +2025-02-19 09:53:22,131 Train Loss: 0.0003010, Val Loss: 0.0002547 +2025-02-19 09:53:22,132 Epoch 1642/2000 +2025-02-19 09:54:04,699 Current Learning Rate: 0.0089507751 +2025-02-19 09:54:04,700 Train Loss: 0.0001816, Val Loss: 0.0002466 +2025-02-19 09:54:04,700 Epoch 1643/2000 +2025-02-19 09:54:47,929 Current Learning Rate: 0.0089021520 +2025-02-19 09:54:47,930 Train Loss: 0.0002275, Val Loss: 0.0002345 +2025-02-19 09:54:47,930 Epoch 1644/2000 +2025-02-19 09:55:30,883 Current Learning Rate: 0.0088525662 +2025-02-19 09:55:30,883 Train Loss: 0.0001878, Val Loss: 0.0002415 +2025-02-19 09:55:30,884 Epoch 1645/2000 +2025-02-19 09:56:13,953 Current Learning Rate: 0.0088020298 +2025-02-19 09:56:13,954 Train Loss: 0.0002528, Val Loss: 0.0002482 +2025-02-19 09:56:13,954 Epoch 1646/2000 +2025-02-19 09:56:57,409 Current Learning Rate: 0.0087505553 +2025-02-19 09:56:57,410 Train Loss: 0.0001908, Val Loss: 0.0002499 +2025-02-19 09:56:57,410 Epoch 1647/2000 +2025-02-19 09:57:39,847 Current Learning Rate: 0.0086981555 +2025-02-19 09:57:39,848 Train Loss: 0.0002432, Val Loss: 0.0002435 +2025-02-19 09:57:39,849 Epoch 1648/2000 +2025-02-19 09:58:22,417 Current Learning Rate: 0.0086448431 +2025-02-19 09:58:22,418 Train Loss: 0.0002923, Val Loss: 0.0003018 +2025-02-19 09:58:22,418 Epoch 1649/2000 +2025-02-19 09:59:05,106 Current Learning Rate: 0.0085906315 +2025-02-19 09:59:05,107 Train Loss: 0.0002096, Val Loss: 0.0002283 +2025-02-19 09:59:05,107 Epoch 1650/2000 +2025-02-19 09:59:48,233 Current Learning Rate: 0.0085355339 +2025-02-19 09:59:48,234 Train Loss: 0.0001936, Val Loss: 0.0002239 +2025-02-19 09:59:48,234 Epoch 1651/2000 +2025-02-19 10:00:32,028 Current Learning Rate: 0.0084795640 +2025-02-19 10:00:32,029 Train Loss: 0.0002051, Val Loss: 0.0002288 +2025-02-19 10:00:32,029 Epoch 1652/2000 +2025-02-19 10:01:15,682 Current Learning Rate: 0.0084227355 +2025-02-19 10:01:15,683 Train Loss: 0.0001934, Val Loss: 0.0002536 +2025-02-19 10:01:15,684 Epoch 1653/2000 +2025-02-19 10:01:58,054 Current Learning Rate: 0.0083650626 +2025-02-19 10:01:58,055 Train Loss: 0.0001884, Val Loss: 0.0002708 +2025-02-19 10:01:58,055 Epoch 1654/2000 +2025-02-19 10:02:40,231 Current Learning Rate: 0.0083065593 +2025-02-19 10:02:40,231 Train Loss: 0.0001937, Val Loss: 0.0002945 +2025-02-19 10:02:40,231 Epoch 1655/2000 +2025-02-19 10:03:23,175 Current Learning Rate: 0.0082472402 +2025-02-19 10:03:23,175 Train Loss: 0.0002469, Val Loss: 0.0002605 +2025-02-19 10:03:23,176 Epoch 1656/2000 +2025-02-19 10:04:05,877 Current Learning Rate: 0.0081871199 +2025-02-19 10:04:05,878 Train Loss: 0.0002953, Val Loss: 0.0002979 +2025-02-19 10:04:05,878 Epoch 1657/2000 +2025-02-19 10:04:49,286 Current Learning Rate: 0.0081262133 +2025-02-19 10:04:49,287 Train Loss: 0.0002433, Val Loss: 0.0002573 +2025-02-19 10:04:49,287 Epoch 1658/2000 +2025-02-19 10:05:32,426 Current Learning Rate: 0.0080645353 +2025-02-19 10:05:32,426 Train Loss: 0.0002056, Val Loss: 0.0002339 +2025-02-19 10:05:32,426 Epoch 1659/2000 +2025-02-19 10:06:14,552 Current Learning Rate: 0.0080021011 +2025-02-19 10:06:14,552 Train Loss: 0.0001758, Val Loss: 0.0002204 +2025-02-19 10:06:14,552 Epoch 1660/2000 +2025-02-19 10:06:57,614 Current Learning Rate: 0.0079389263 +2025-02-19 10:06:57,615 Train Loss: 0.0001951, Val Loss: 0.0002218 +2025-02-19 10:06:57,615 Epoch 1661/2000 +2025-02-19 10:07:40,525 Current Learning Rate: 0.0078750263 +2025-02-19 10:07:40,526 Train Loss: 0.0002073, Val Loss: 0.0002191 +2025-02-19 10:07:40,526 Epoch 1662/2000 +2025-02-19 10:08:23,576 Current Learning Rate: 0.0078104169 +2025-02-19 10:08:23,577 Train Loss: 0.0001961, Val Loss: 0.0002135 +2025-02-19 10:08:23,577 Epoch 1663/2000 +2025-02-19 10:09:06,303 Current Learning Rate: 0.0077451141 +2025-02-19 10:09:06,304 Train Loss: 0.0001724, Val Loss: 0.0002208 +2025-02-19 10:09:06,304 Epoch 1664/2000 +2025-02-19 10:09:48,667 Current Learning Rate: 0.0076791340 +2025-02-19 10:09:48,667 Train Loss: 0.0001803, Val Loss: 0.0002248 +2025-02-19 10:09:48,667 Epoch 1665/2000 +2025-02-19 10:10:31,222 Current Learning Rate: 0.0076124928 +2025-02-19 10:10:31,223 Train Loss: 0.0001806, Val Loss: 0.0002272 +2025-02-19 10:10:31,223 Epoch 1666/2000 +2025-02-19 10:11:13,651 Current Learning Rate: 0.0075452071 +2025-02-19 10:11:13,652 Train Loss: 0.0001736, Val Loss: 0.0002255 +2025-02-19 10:11:13,652 Epoch 1667/2000 +2025-02-19 10:11:56,233 Current Learning Rate: 0.0074772933 +2025-02-19 10:11:56,234 Train Loss: 0.0001418, Val Loss: 0.0002225 +2025-02-19 10:11:56,234 Epoch 1668/2000 +2025-02-19 10:12:38,573 Current Learning Rate: 0.0074087684 +2025-02-19 10:12:38,574 Train Loss: 0.0001704, Val Loss: 0.0002238 +2025-02-19 10:12:38,574 Epoch 1669/2000 +2025-02-19 10:13:21,175 Current Learning Rate: 0.0073396491 +2025-02-19 10:13:21,175 Train Loss: 0.0002268, Val Loss: 0.0002252 +2025-02-19 10:13:21,175 Epoch 1670/2000 +2025-02-19 10:14:04,527 Current Learning Rate: 0.0072699525 +2025-02-19 10:14:04,527 Train Loss: 0.0001976, Val Loss: 0.0002204 +2025-02-19 10:14:04,527 Epoch 1671/2000 +2025-02-19 10:14:47,697 Current Learning Rate: 0.0071996958 +2025-02-19 10:14:47,698 Train Loss: 0.0001554, Val Loss: 0.0002144 +2025-02-19 10:14:47,698 Epoch 1672/2000 +2025-02-19 10:15:30,502 Current Learning Rate: 0.0071288965 +2025-02-19 10:15:30,502 Train Loss: 0.0001801, Val Loss: 0.0002126 +2025-02-19 10:15:30,502 Epoch 1673/2000 +2025-02-19 10:16:13,637 Current Learning Rate: 0.0070575718 +2025-02-19 10:16:13,637 Train Loss: 0.0002318, Val Loss: 0.0002162 +2025-02-19 10:16:13,638 Epoch 1674/2000 +2025-02-19 10:16:56,130 Current Learning Rate: 0.0069857395 +2025-02-19 10:16:56,131 Train Loss: 0.0001830, Val Loss: 0.0002118 +2025-02-19 10:16:56,131 Epoch 1675/2000 +2025-02-19 10:17:38,892 Current Learning Rate: 0.0069134172 +2025-02-19 10:17:38,893 Train Loss: 0.0001627, Val Loss: 0.0002141 +2025-02-19 10:17:38,893 Epoch 1676/2000 +2025-02-19 10:18:21,555 Current Learning Rate: 0.0068406228 +2025-02-19 10:18:21,555 Train Loss: 0.0002087, Val Loss: 0.0002275 +2025-02-19 10:18:21,556 Epoch 1677/2000 +2025-02-19 10:19:03,574 Current Learning Rate: 0.0067673742 +2025-02-19 10:19:03,575 Train Loss: 0.0001394, Val Loss: 0.0002252 +2025-02-19 10:19:03,575 Epoch 1678/2000 +2025-02-19 10:19:45,977 Current Learning Rate: 0.0066936896 +2025-02-19 10:19:45,977 Train Loss: 0.0001526, Val Loss: 0.0002338 +2025-02-19 10:19:45,977 Epoch 1679/2000 +2025-02-19 10:20:29,134 Current Learning Rate: 0.0066195871 +2025-02-19 10:20:29,135 Train Loss: 0.0001597, Val Loss: 0.0002561 +2025-02-19 10:20:29,135 Epoch 1680/2000 +2025-02-19 10:21:12,176 Current Learning Rate: 0.0065450850 +2025-02-19 10:21:12,177 Train Loss: 0.0002065, Val Loss: 0.0002654 +2025-02-19 10:21:12,177 Epoch 1681/2000 +2025-02-19 10:21:54,847 Current Learning Rate: 0.0064702016 +2025-02-19 10:21:54,848 Train Loss: 0.0002071, Val Loss: 0.0002726 +2025-02-19 10:21:54,848 Epoch 1682/2000 +2025-02-19 10:22:37,788 Current Learning Rate: 0.0063949555 +2025-02-19 10:22:37,789 Train Loss: 0.0002267, Val Loss: 0.0002320 +2025-02-19 10:22:37,789 Epoch 1683/2000 +2025-02-19 10:23:20,033 Current Learning Rate: 0.0063193652 +2025-02-19 10:23:20,034 Train Loss: 0.0001583, Val Loss: 0.0002230 +2025-02-19 10:23:20,034 Epoch 1684/2000 +2025-02-19 10:24:03,351 Current Learning Rate: 0.0062434494 +2025-02-19 10:24:03,352 Train Loss: 0.0002333, Val Loss: 0.0002360 +2025-02-19 10:24:03,352 Epoch 1685/2000 +2025-02-19 10:24:46,290 Current Learning Rate: 0.0061672268 +2025-02-19 10:24:46,290 Train Loss: 0.0001720, Val Loss: 0.0002120 +2025-02-19 10:24:46,290 Epoch 1686/2000 +2025-02-19 10:25:29,342 Current Learning Rate: 0.0060907162 +2025-02-19 10:25:29,343 Train Loss: 0.0001615, Val Loss: 0.0002056 +2025-02-19 10:25:29,343 Epoch 1687/2000 +2025-02-19 10:26:11,949 Current Learning Rate: 0.0060139365 +2025-02-19 10:26:11,949 Train Loss: 0.0001668, Val Loss: 0.0002014 +2025-02-19 10:26:11,949 Epoch 1688/2000 +2025-02-19 10:26:54,898 Current Learning Rate: 0.0059369066 +2025-02-19 10:26:54,899 Train Loss: 0.0001672, Val Loss: 0.0002014 +2025-02-19 10:26:54,899 Epoch 1689/2000 +2025-02-19 10:27:37,572 Current Learning Rate: 0.0058596455 +2025-02-19 10:27:37,572 Train Loss: 0.0001726, Val Loss: 0.0002031 +2025-02-19 10:27:37,573 Epoch 1690/2000 +2025-02-19 10:28:19,338 Current Learning Rate: 0.0057821723 +2025-02-19 10:28:19,338 Train Loss: 0.0001944, Val Loss: 0.0002086 +2025-02-19 10:28:19,339 Epoch 1691/2000 +2025-02-19 10:29:01,941 Current Learning Rate: 0.0057045062 +2025-02-19 10:29:01,942 Train Loss: 0.0002045, Val Loss: 0.0002185 +2025-02-19 10:29:01,942 Epoch 1692/2000 +2025-02-19 10:29:44,412 Current Learning Rate: 0.0056266662 +2025-02-19 10:29:44,413 Train Loss: 0.0001981, Val Loss: 0.0002047 +2025-02-19 10:29:44,413 Epoch 1693/2000 +2025-02-19 10:30:27,621 Current Learning Rate: 0.0055486716 +2025-02-19 10:30:27,622 Train Loss: 0.0001535, Val Loss: 0.0002047 +2025-02-19 10:30:27,622 Epoch 1694/2000 +2025-02-19 10:31:10,958 Current Learning Rate: 0.0054705416 +2025-02-19 10:31:10,958 Train Loss: 0.0001417, Val Loss: 0.0002046 +2025-02-19 10:31:10,959 Epoch 1695/2000 +2025-02-19 10:31:53,852 Current Learning Rate: 0.0053922955 +2025-02-19 10:31:53,852 Train Loss: 0.0001232, Val Loss: 0.0002031 +2025-02-19 10:31:53,887 Epoch 1696/2000 +2025-02-19 10:32:36,908 Current Learning Rate: 0.0053139526 +2025-02-19 10:32:36,909 Train Loss: 0.0001290, Val Loss: 0.0002043 +2025-02-19 10:32:36,909 Epoch 1697/2000 +2025-02-19 10:33:18,765 Current Learning Rate: 0.0052355323 +2025-02-19 10:33:18,765 Train Loss: 0.0001534, Val Loss: 0.0002079 +2025-02-19 10:33:18,766 Epoch 1698/2000 +2025-02-19 10:34:01,022 Current Learning Rate: 0.0051570538 +2025-02-19 10:34:01,023 Train Loss: 0.0001871, Val Loss: 0.0002070 +2025-02-19 10:34:01,023 Epoch 1699/2000 +2025-02-19 10:34:43,310 Current Learning Rate: 0.0050785366 +2025-02-19 10:34:43,311 Train Loss: 0.0001549, Val Loss: 0.0002044 +2025-02-19 10:34:43,311 Epoch 1700/2000 +2025-02-19 10:35:25,658 Current Learning Rate: 0.0050000000 +2025-02-19 10:35:25,659 Train Loss: 0.0001552, Val Loss: 0.0002023 +2025-02-19 10:35:25,659 Epoch 1701/2000 +2025-02-19 10:36:08,029 Current Learning Rate: 0.0049214634 +2025-02-19 10:36:08,030 Train Loss: 0.0001636, Val Loss: 0.0002007 +2025-02-19 10:36:08,030 Epoch 1702/2000 +2025-02-19 10:36:50,268 Current Learning Rate: 0.0048429462 +2025-02-19 10:36:50,269 Train Loss: 0.0001980, Val Loss: 0.0001999 +2025-02-19 10:36:50,269 Epoch 1703/2000 +2025-02-19 10:37:33,649 Current Learning Rate: 0.0047644677 +2025-02-19 10:37:33,650 Train Loss: 0.0001826, Val Loss: 0.0002055 +2025-02-19 10:37:33,650 Epoch 1704/2000 +2025-02-19 10:38:16,344 Current Learning Rate: 0.0046860474 +2025-02-19 10:38:16,344 Train Loss: 0.0001670, Val Loss: 0.0002095 +2025-02-19 10:38:16,345 Epoch 1705/2000 +2025-02-19 10:38:58,866 Current Learning Rate: 0.0046077045 +2025-02-19 10:38:58,867 Train Loss: 0.0001822, Val Loss: 0.0002067 +2025-02-19 10:38:58,868 Epoch 1706/2000 +2025-02-19 10:39:40,846 Current Learning Rate: 0.0045294584 +2025-02-19 10:39:40,847 Train Loss: 0.0001717, Val Loss: 0.0002086 +2025-02-19 10:39:40,847 Epoch 1707/2000 +2025-02-19 10:40:23,375 Current Learning Rate: 0.0044513284 +2025-02-19 10:40:23,375 Train Loss: 0.0001635, Val Loss: 0.0002265 +2025-02-19 10:40:23,375 Epoch 1708/2000 +2025-02-19 10:41:06,307 Current Learning Rate: 0.0043733338 +2025-02-19 10:41:06,307 Train Loss: 0.0001665, Val Loss: 0.0002255 +2025-02-19 10:41:06,308 Epoch 1709/2000 +2025-02-19 10:41:48,429 Current Learning Rate: 0.0042954938 +2025-02-19 10:41:48,430 Train Loss: 0.0001882, Val Loss: 0.0002023 +2025-02-19 10:41:48,430 Epoch 1710/2000 +2025-02-19 10:42:30,935 Current Learning Rate: 0.0042178277 +2025-02-19 10:42:30,936 Train Loss: 0.0002167, Val Loss: 0.0002014 +2025-02-19 10:42:30,936 Epoch 1711/2000 +2025-02-19 10:43:13,367 Current Learning Rate: 0.0041403545 +2025-02-19 10:43:13,367 Train Loss: 0.0001915, Val Loss: 0.0002015 +2025-02-19 10:43:13,368 Epoch 1712/2000 +2025-02-19 10:43:55,743 Current Learning Rate: 0.0040630934 +2025-02-19 10:43:55,743 Train Loss: 0.0001431, Val Loss: 0.0001962 +2025-02-19 10:43:55,744 Epoch 1713/2000 +2025-02-19 10:44:38,233 Current Learning Rate: 0.0039860635 +2025-02-19 10:44:39,737 Train Loss: 0.0001364, Val Loss: 0.0001943 +2025-02-19 10:44:39,737 Epoch 1714/2000 +2025-02-19 10:45:22,551 Current Learning Rate: 0.0039092838 +2025-02-19 10:45:24,597 Train Loss: 0.0001314, Val Loss: 0.0001934 +2025-02-19 10:45:24,598 Epoch 1715/2000 +2025-02-19 10:46:07,167 Current Learning Rate: 0.0038327732 +2025-02-19 10:46:09,093 Train Loss: 0.0002020, Val Loss: 0.0001908 +2025-02-19 10:46:09,093 Epoch 1716/2000 +2025-02-19 10:46:51,822 Current Learning Rate: 0.0037565506 +2025-02-19 10:46:53,872 Train Loss: 0.0001500, Val Loss: 0.0001890 +2025-02-19 10:46:53,872 Epoch 1717/2000 +2025-02-19 10:47:35,049 Current Learning Rate: 0.0036806348 +2025-02-19 10:47:36,818 Train Loss: 0.0001583, Val Loss: 0.0001882 +2025-02-19 10:47:36,819 Epoch 1718/2000 +2025-02-19 10:48:18,213 Current Learning Rate: 0.0036050445 +2025-02-19 10:48:18,214 Train Loss: 0.0001931, Val Loss: 0.0001923 +2025-02-19 10:48:18,214 Epoch 1719/2000 +2025-02-19 10:49:00,414 Current Learning Rate: 0.0035297984 +2025-02-19 10:49:00,414 Train Loss: 0.0001274, Val Loss: 0.0001897 +2025-02-19 10:49:00,415 Epoch 1720/2000 +2025-02-19 10:49:43,169 Current Learning Rate: 0.0034549150 +2025-02-19 10:49:43,169 Train Loss: 0.0001133, Val Loss: 0.0001902 +2025-02-19 10:49:43,170 Epoch 1721/2000 +2025-02-19 10:50:25,447 Current Learning Rate: 0.0033804129 +2025-02-19 10:50:25,449 Train Loss: 0.0001188, Val Loss: 0.0001900 +2025-02-19 10:50:25,449 Epoch 1722/2000 +2025-02-19 10:51:07,680 Current Learning Rate: 0.0033063104 +2025-02-19 10:51:07,680 Train Loss: 0.0001248, Val Loss: 0.0001912 +2025-02-19 10:51:07,681 Epoch 1723/2000 +2025-02-19 10:51:50,357 Current Learning Rate: 0.0032326258 +2025-02-19 10:51:50,357 Train Loss: 0.0001197, Val Loss: 0.0001914 +2025-02-19 10:51:50,358 Epoch 1724/2000 +2025-02-19 10:52:33,306 Current Learning Rate: 0.0031593772 +2025-02-19 10:52:33,306 Train Loss: 0.0001763, Val Loss: 0.0001917 +2025-02-19 10:52:33,306 Epoch 1725/2000 +2025-02-19 10:53:16,277 Current Learning Rate: 0.0030865828 +2025-02-19 10:53:16,278 Train Loss: 0.0001528, Val Loss: 0.0001898 +2025-02-19 10:53:16,278 Epoch 1726/2000 +2025-02-19 10:53:59,220 Current Learning Rate: 0.0030142605 +2025-02-19 10:53:59,220 Train Loss: 0.0001919, Val Loss: 0.0001885 +2025-02-19 10:53:59,221 Epoch 1727/2000 +2025-02-19 10:54:41,168 Current Learning Rate: 0.0029424282 +2025-02-19 10:54:42,781 Train Loss: 0.0001527, Val Loss: 0.0001860 +2025-02-19 10:54:42,782 Epoch 1728/2000 +2025-02-19 10:55:24,580 Current Learning Rate: 0.0028711035 +2025-02-19 10:55:24,581 Train Loss: 0.0001788, Val Loss: 0.0001863 +2025-02-19 10:55:24,581 Epoch 1729/2000 +2025-02-19 10:56:07,480 Current Learning Rate: 0.0028003042 +2025-02-19 10:56:09,049 Train Loss: 0.0001536, Val Loss: 0.0001857 +2025-02-19 10:56:09,050 Epoch 1730/2000 +2025-02-19 10:56:50,612 Current Learning Rate: 0.0027300475 +2025-02-19 10:56:51,968 Train Loss: 0.0001604, Val Loss: 0.0001852 +2025-02-19 10:56:51,968 Epoch 1731/2000 +2025-02-19 10:57:33,530 Current Learning Rate: 0.0026603509 +2025-02-19 10:57:35,070 Train Loss: 0.0001426, Val Loss: 0.0001848 +2025-02-19 10:57:35,071 Epoch 1732/2000 +2025-02-19 10:58:17,582 Current Learning Rate: 0.0025912316 +2025-02-19 10:58:19,516 Train Loss: 0.0001639, Val Loss: 0.0001834 +2025-02-19 10:58:19,516 Epoch 1733/2000 +2025-02-19 10:59:01,818 Current Learning Rate: 0.0025227067 +2025-02-19 10:59:04,175 Train Loss: 0.0001128, Val Loss: 0.0001827 +2025-02-19 10:59:04,176 Epoch 1734/2000 +2025-02-19 10:59:47,503 Current Learning Rate: 0.0024547929 +2025-02-19 10:59:47,504 Train Loss: 0.0001674, Val Loss: 0.0001830 +2025-02-19 10:59:47,504 Epoch 1735/2000 +2025-02-19 11:00:31,070 Current Learning Rate: 0.0023875072 +2025-02-19 11:00:31,071 Train Loss: 0.0001733, Val Loss: 0.0001829 +2025-02-19 11:00:31,071 Epoch 1736/2000 +2025-02-19 11:01:13,992 Current Learning Rate: 0.0023208660 +2025-02-19 11:01:15,800 Train Loss: 0.0001913, Val Loss: 0.0001820 +2025-02-19 11:01:15,801 Epoch 1737/2000 +2025-02-19 11:01:58,281 Current Learning Rate: 0.0022548859 +2025-02-19 11:01:59,906 Train Loss: 0.0001643, Val Loss: 0.0001818 +2025-02-19 11:01:59,907 Epoch 1738/2000 +2025-02-19 11:02:42,284 Current Learning Rate: 0.0021895831 +2025-02-19 11:02:43,516 Train Loss: 0.0001467, Val Loss: 0.0001811 +2025-02-19 11:02:43,517 Epoch 1739/2000 +2025-02-19 11:02:55,275 Loading best model from checkpoint. +2025-02-19 11:02:56,158 Error loading model checkpoint directly: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-19 11:02:56,161 Attempting to fix the state_dict by removing "module." prefix. +2025-02-19 11:02:56,187 Model loaded successfully after fixing state_dict. +2025-02-19 11:03:27,534 Current Learning Rate: 0.0021249737 +2025-02-19 11:03:29,375 Train Loss: 0.0001328, Val Loss: 0.0001803 +2025-02-19 11:03:29,378 Epoch 1740/2000 +2025-02-19 11:04:11,590 Current Learning Rate: 0.0020610737 +2025-02-19 11:04:11,593 Train Loss: 0.0001162, Val Loss: 0.0001806 +2025-02-19 11:04:11,594 Epoch 1741/2000 +2025-02-19 11:04:54,293 Current Learning Rate: 0.0019978989 +2025-02-19 11:04:55,557 Train Loss: 0.0001201, Val Loss: 0.0001802 +2025-02-19 11:04:55,560 Epoch 1742/2000 +2025-02-19 11:05:37,312 Current Learning Rate: 0.0019354647 +2025-02-19 11:05:37,314 Train Loss: 0.0001346, Val Loss: 0.0001804 +2025-02-19 11:05:37,315 Epoch 1743/2000 +2025-02-19 11:06:16,746 Animation.save using +2025-02-19 11:06:20,942 Current Learning Rate: 0.0018737867 +2025-02-19 11:06:22,787 Train Loss: 0.0001412, Val Loss: 0.0001800 +2025-02-19 11:06:22,792 Epoch 1744/2000 +2025-02-19 11:07:06,142 Current Learning Rate: 0.0018128801 +2025-02-19 11:07:07,991 Train Loss: 0.0001156, Val Loss: 0.0001790 +2025-02-19 11:07:07,994 Epoch 1745/2000 +2025-02-19 11:07:51,431 Current Learning Rate: 0.0017527598 +2025-02-19 11:07:53,633 Train Loss: 0.0001673, Val Loss: 0.0001788 +2025-02-19 11:07:53,640 Epoch 1746/2000 +2025-02-19 11:08:35,163 Current Learning Rate: 0.0016934407 +2025-02-19 11:08:36,252 Train Loss: 0.0001235, Val Loss: 0.0001781 +2025-02-19 11:08:36,253 Epoch 1747/2000 +2025-02-19 11:08:44,955 Loading best model from checkpoint. +2025-02-19 11:08:45,663 Error loading model checkpoint directly: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-19 11:08:45,666 Attempting to fix the state_dict by removing "module." prefix. +2025-02-19 11:08:45,685 Model loaded successfully after fixing state_dict. +2025-02-19 11:09:18,082 Current Learning Rate: 0.0016349374 +2025-02-19 11:09:19,211 Train Loss: 0.0001334, Val Loss: 0.0001779 +2025-02-19 11:09:19,214 Epoch 1748/2000 +2025-02-19 11:10:02,209 Current Learning Rate: 0.0015772645 +2025-02-19 11:10:04,005 Train Loss: 0.0001303, Val Loss: 0.0001775 +2025-02-19 11:10:04,030 Epoch 1749/2000 +2025-02-19 11:10:46,588 Current Learning Rate: 0.0015204360 +2025-02-19 11:10:46,590 Train Loss: 0.0001463, Val Loss: 0.0001778 +2025-02-19 11:10:46,591 Epoch 1750/2000 +2025-02-19 11:11:19,697 Loading best model from checkpoint. +2025-02-19 11:11:20,429 Error loading model checkpoint directly: Error(s) in loading state_dict for Triton: + Missing key(s) in state_dict: "atmospheric_encoder.enc.0.conv.conv.weight", "atmospheric_encoder.enc.0.conv.conv.bias", "atmospheric_encoder.enc.0.conv.norm.weight", "atmospheric_encoder.enc.0.conv.norm.bias", "atmospheric_encoder.enc.1.conv.conv.weight", "atmospheric_encoder.enc.1.conv.conv.bias", "atmospheric_encoder.enc.1.conv.norm.weight", "atmospheric_encoder.enc.1.conv.norm.bias", "atmospheric_encoder.enc.2.conv.conv.weight", "atmospheric_encoder.enc.2.conv.conv.bias", "atmospheric_encoder.enc.2.conv.norm.weight", "atmospheric_encoder.enc.2.conv.norm.bias", "atmospheric_encoder.enc.3.conv.conv.weight", "atmospheric_encoder.enc.3.conv.conv.bias", "atmospheric_encoder.enc.3.conv.norm.weight", "atmospheric_encoder.enc.3.conv.norm.bias", "temporal_evolution.enc.0.block.pos_embed.weight", "temporal_evolution.enc.0.block.pos_embed.bias", "temporal_evolution.enc.0.block.norm1.weight", "temporal_evolution.enc.0.block.norm1.bias", "temporal_evolution.enc.0.block.norm1.running_mean", "temporal_evolution.enc.0.block.norm1.running_var", "temporal_evolution.enc.0.block.conv1.weight", "temporal_evolution.enc.0.block.conv1.bias", "temporal_evolution.enc.0.block.conv2.weight", "temporal_evolution.enc.0.block.conv2.bias", "temporal_evolution.enc.0.block.attn.weight", "temporal_evolution.enc.0.block.attn.bias", "temporal_evolution.enc.0.block.norm2.weight", "temporal_evolution.enc.0.block.norm2.bias", "temporal_evolution.enc.0.block.norm2.running_mean", "temporal_evolution.enc.0.block.norm2.running_var", "temporal_evolution.enc.0.block.mlp.fc1.weight", "temporal_evolution.enc.0.block.mlp.fc1.bias", "temporal_evolution.enc.0.block.mlp.fc2.weight", "temporal_evolution.enc.0.block.mlp.fc2.bias", "temporal_evolution.enc.0.reduction.weight", "temporal_evolution.enc.0.reduction.bias", "temporal_evolution.enc.1.block.gamma_1", "temporal_evolution.enc.1.block.gamma_2", "temporal_evolution.enc.1.block.pos_embed.weight", "temporal_evolution.enc.1.block.pos_embed.bias", "temporal_evolution.enc.1.block.norm1.weight", "temporal_evolution.enc.1.block.norm1.bias", "temporal_evolution.enc.1.block.attn.qkv.weight", "temporal_evolution.enc.1.block.attn.qkv.bias", "temporal_evolution.enc.1.block.attn.proj.weight", "temporal_evolution.enc.1.block.attn.proj.bias", "temporal_evolution.enc.1.block.norm2.weight", "temporal_evolution.enc.1.block.norm2.bias", "temporal_evolution.enc.1.block.mlp.fc1.weight", "temporal_evolution.enc.1.block.mlp.fc1.bias", "temporal_evolution.enc.1.block.mlp.fc2.weight", "temporal_evolution.enc.1.block.mlp.fc2.bias", "temporal_evolution.enc.2.block.gamma_1", "temporal_evolution.enc.2.block.gamma_2", "temporal_evolution.enc.2.block.pos_embed.weight", "temporal_evolution.enc.2.block.pos_embed.bias", "temporal_evolution.enc.2.block.norm1.weight", "temporal_evolution.enc.2.block.norm1.bias", "temporal_evolution.enc.2.block.attn.qkv.weight", "temporal_evolution.enc.2.block.attn.qkv.bias", "temporal_evolution.enc.2.block.attn.proj.weight", "temporal_evolution.enc.2.block.attn.proj.bias", "temporal_evolution.enc.2.block.norm2.weight", "temporal_evolution.enc.2.block.norm2.bias", "temporal_evolution.enc.2.block.mlp.fc1.weight", "temporal_evolution.enc.2.block.mlp.fc1.bias", "temporal_evolution.enc.2.block.mlp.fc2.weight", "temporal_evolution.enc.2.block.mlp.fc2.bias", "temporal_evolution.enc.3.block.gamma_1", "temporal_evolution.enc.3.block.gamma_2", "temporal_evolution.enc.3.block.pos_embed.weight", "temporal_evolution.enc.3.block.pos_embed.bias", "temporal_evolution.enc.3.block.norm1.weight", "temporal_evolution.enc.3.block.norm1.bias", "temporal_evolution.enc.3.block.attn.qkv.weight", "temporal_evolution.enc.3.block.attn.qkv.bias", "temporal_evolution.enc.3.block.attn.proj.weight", "temporal_evolution.enc.3.block.attn.proj.bias", "temporal_evolution.enc.3.block.norm2.weight", "temporal_evolution.enc.3.block.norm2.bias", "temporal_evolution.enc.3.block.mlp.fc1.weight", "temporal_evolution.enc.3.block.mlp.fc1.bias", "temporal_evolution.enc.3.block.mlp.fc2.weight", "temporal_evolution.enc.3.block.mlp.fc2.bias", "temporal_evolution.enc.4.block.gamma_1", "temporal_evolution.enc.4.block.gamma_2", "temporal_evolution.enc.4.block.pos_embed.weight", "temporal_evolution.enc.4.block.pos_embed.bias", "temporal_evolution.enc.4.block.norm1.weight", "temporal_evolution.enc.4.block.norm1.bias", "temporal_evolution.enc.4.block.attn.qkv.weight", "temporal_evolution.enc.4.block.attn.qkv.bias", "temporal_evolution.enc.4.block.attn.proj.weight", "temporal_evolution.enc.4.block.attn.proj.bias", "temporal_evolution.enc.4.block.norm2.weight", "temporal_evolution.enc.4.block.norm2.bias", "temporal_evolution.enc.4.block.mlp.fc1.weight", "temporal_evolution.enc.4.block.mlp.fc1.bias", "temporal_evolution.enc.4.block.mlp.fc2.weight", "temporal_evolution.enc.4.block.mlp.fc2.bias", "temporal_evolution.enc.5.block.gamma_1", "temporal_evolution.enc.5.block.gamma_2", "temporal_evolution.enc.5.block.pos_embed.weight", "temporal_evolution.enc.5.block.pos_embed.bias", "temporal_evolution.enc.5.block.norm1.weight", "temporal_evolution.enc.5.block.norm1.bias", "temporal_evolution.enc.5.block.attn.qkv.weight", "temporal_evolution.enc.5.block.attn.qkv.bias", "temporal_evolution.enc.5.block.attn.proj.weight", "temporal_evolution.enc.5.block.attn.proj.bias", "temporal_evolution.enc.5.block.norm2.weight", "temporal_evolution.enc.5.block.norm2.bias", "temporal_evolution.enc.5.block.mlp.fc1.weight", "temporal_evolution.enc.5.block.mlp.fc1.bias", "temporal_evolution.enc.5.block.mlp.fc2.weight", "temporal_evolution.enc.5.block.mlp.fc2.bias", "temporal_evolution.enc.6.block.gamma_1", "temporal_evolution.enc.6.block.gamma_2", "temporal_evolution.enc.6.block.pos_embed.weight", "temporal_evolution.enc.6.block.pos_embed.bias", "temporal_evolution.enc.6.block.norm1.weight", "temporal_evolution.enc.6.block.norm1.bias", "temporal_evolution.enc.6.block.attn.qkv.weight", "temporal_evolution.enc.6.block.attn.qkv.bias", "temporal_evolution.enc.6.block.attn.proj.weight", "temporal_evolution.enc.6.block.attn.proj.bias", "temporal_evolution.enc.6.block.norm2.weight", "temporal_evolution.enc.6.block.norm2.bias", "temporal_evolution.enc.6.block.mlp.fc1.weight", "temporal_evolution.enc.6.block.mlp.fc1.bias", "temporal_evolution.enc.6.block.mlp.fc2.weight", "temporal_evolution.enc.6.block.mlp.fc2.bias", "temporal_evolution.enc.7.block.pos_embed.weight", "temporal_evolution.enc.7.block.pos_embed.bias", "temporal_evolution.enc.7.block.norm1.weight", "temporal_evolution.enc.7.block.norm1.bias", "temporal_evolution.enc.7.block.norm1.running_mean", "temporal_evolution.enc.7.block.norm1.running_var", "temporal_evolution.enc.7.block.conv1.weight", "temporal_evolution.enc.7.block.conv1.bias", "temporal_evolution.enc.7.block.conv2.weight", "temporal_evolution.enc.7.block.conv2.bias", "temporal_evolution.enc.7.block.attn.weight", "temporal_evolution.enc.7.block.attn.bias", "temporal_evolution.enc.7.block.norm2.weight", "temporal_evolution.enc.7.block.norm2.bias", "temporal_evolution.enc.7.block.norm2.running_mean", "temporal_evolution.enc.7.block.norm2.running_var", "temporal_evolution.enc.7.block.mlp.fc1.weight", "temporal_evolution.enc.7.block.mlp.fc1.bias", "temporal_evolution.enc.7.block.mlp.fc2.weight", "temporal_evolution.enc.7.block.mlp.fc2.bias", "temporal_evolution.enc.7.reduction.weight", "temporal_evolution.enc.7.reduction.bias", "atmospheric_decoder.dec.0.conv.conv.weight", "atmospheric_decoder.dec.0.conv.conv.bias", "atmospheric_decoder.dec.0.conv.norm.weight", "atmospheric_decoder.dec.0.conv.norm.bias", "atmospheric_decoder.dec.1.conv.conv.weight", "atmospheric_decoder.dec.1.conv.conv.bias", "atmospheric_decoder.dec.1.conv.norm.weight", "atmospheric_decoder.dec.1.conv.norm.bias", "atmospheric_decoder.dec.2.conv.conv.weight", "atmospheric_decoder.dec.2.conv.conv.bias", "atmospheric_decoder.dec.2.conv.norm.weight", "atmospheric_decoder.dec.2.conv.norm.bias", "atmospheric_decoder.dec.3.conv.conv.weight", "atmospheric_decoder.dec.3.conv.conv.bias", "atmospheric_decoder.dec.3.conv.norm.weight", "atmospheric_decoder.dec.3.conv.norm.bias", "atmospheric_decoder.readout.weight", "atmospheric_decoder.readout.bias". + Unexpected key(s) in state_dict: "module.atmospheric_encoder.enc.0.conv.conv.weight", "module.atmospheric_encoder.enc.0.conv.conv.bias", "module.atmospheric_encoder.enc.0.conv.norm.weight", "module.atmospheric_encoder.enc.0.conv.norm.bias", "module.atmospheric_encoder.enc.1.conv.conv.weight", "module.atmospheric_encoder.enc.1.conv.conv.bias", "module.atmospheric_encoder.enc.1.conv.norm.weight", "module.atmospheric_encoder.enc.1.conv.norm.bias", "module.atmospheric_encoder.enc.2.conv.conv.weight", "module.atmospheric_encoder.enc.2.conv.conv.bias", "module.atmospheric_encoder.enc.2.conv.norm.weight", "module.atmospheric_encoder.enc.2.conv.norm.bias", "module.atmospheric_encoder.enc.3.conv.conv.weight", "module.atmospheric_encoder.enc.3.conv.conv.bias", "module.atmospheric_encoder.enc.3.conv.norm.weight", "module.atmospheric_encoder.enc.3.conv.norm.bias", "module.temporal_evolution.enc.0.block.pos_embed.weight", "module.temporal_evolution.enc.0.block.pos_embed.bias", "module.temporal_evolution.enc.0.block.norm1.weight", "module.temporal_evolution.enc.0.block.norm1.bias", "module.temporal_evolution.enc.0.block.norm1.running_mean", "module.temporal_evolution.enc.0.block.norm1.running_var", "module.temporal_evolution.enc.0.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.0.block.conv1.weight", "module.temporal_evolution.enc.0.block.conv1.bias", "module.temporal_evolution.enc.0.block.conv2.weight", "module.temporal_evolution.enc.0.block.conv2.bias", "module.temporal_evolution.enc.0.block.attn.weight", "module.temporal_evolution.enc.0.block.attn.bias", "module.temporal_evolution.enc.0.block.norm2.weight", "module.temporal_evolution.enc.0.block.norm2.bias", "module.temporal_evolution.enc.0.block.norm2.running_mean", "module.temporal_evolution.enc.0.block.norm2.running_var", "module.temporal_evolution.enc.0.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.0.block.mlp.fc1.weight", "module.temporal_evolution.enc.0.block.mlp.fc1.bias", "module.temporal_evolution.enc.0.block.mlp.fc2.weight", "module.temporal_evolution.enc.0.block.mlp.fc2.bias", "module.temporal_evolution.enc.0.reduction.weight", "module.temporal_evolution.enc.0.reduction.bias", "module.temporal_evolution.enc.1.block.gamma_1", "module.temporal_evolution.enc.1.block.gamma_2", "module.temporal_evolution.enc.1.block.pos_embed.weight", "module.temporal_evolution.enc.1.block.pos_embed.bias", "module.temporal_evolution.enc.1.block.norm1.weight", "module.temporal_evolution.enc.1.block.norm1.bias", "module.temporal_evolution.enc.1.block.attn.qkv.weight", "module.temporal_evolution.enc.1.block.attn.qkv.bias", "module.temporal_evolution.enc.1.block.attn.proj.weight", "module.temporal_evolution.enc.1.block.attn.proj.bias", "module.temporal_evolution.enc.1.block.norm2.weight", "module.temporal_evolution.enc.1.block.norm2.bias", "module.temporal_evolution.enc.1.block.mlp.fc1.weight", "module.temporal_evolution.enc.1.block.mlp.fc1.bias", "module.temporal_evolution.enc.1.block.mlp.fc2.weight", "module.temporal_evolution.enc.1.block.mlp.fc2.bias", "module.temporal_evolution.enc.2.block.gamma_1", "module.temporal_evolution.enc.2.block.gamma_2", "module.temporal_evolution.enc.2.block.pos_embed.weight", "module.temporal_evolution.enc.2.block.pos_embed.bias", "module.temporal_evolution.enc.2.block.norm1.weight", "module.temporal_evolution.enc.2.block.norm1.bias", "module.temporal_evolution.enc.2.block.attn.qkv.weight", "module.temporal_evolution.enc.2.block.attn.qkv.bias", "module.temporal_evolution.enc.2.block.attn.proj.weight", "module.temporal_evolution.enc.2.block.attn.proj.bias", "module.temporal_evolution.enc.2.block.norm2.weight", "module.temporal_evolution.enc.2.block.norm2.bias", "module.temporal_evolution.enc.2.block.mlp.fc1.weight", "module.temporal_evolution.enc.2.block.mlp.fc1.bias", "module.temporal_evolution.enc.2.block.mlp.fc2.weight", "module.temporal_evolution.enc.2.block.mlp.fc2.bias", "module.temporal_evolution.enc.3.block.gamma_1", "module.temporal_evolution.enc.3.block.gamma_2", "module.temporal_evolution.enc.3.block.pos_embed.weight", "module.temporal_evolution.enc.3.block.pos_embed.bias", "module.temporal_evolution.enc.3.block.norm1.weight", "module.temporal_evolution.enc.3.block.norm1.bias", "module.temporal_evolution.enc.3.block.attn.qkv.weight", "module.temporal_evolution.enc.3.block.attn.qkv.bias", "module.temporal_evolution.enc.3.block.attn.proj.weight", "module.temporal_evolution.enc.3.block.attn.proj.bias", "module.temporal_evolution.enc.3.block.norm2.weight", "module.temporal_evolution.enc.3.block.norm2.bias", "module.temporal_evolution.enc.3.block.mlp.fc1.weight", "module.temporal_evolution.enc.3.block.mlp.fc1.bias", "module.temporal_evolution.enc.3.block.mlp.fc2.weight", "module.temporal_evolution.enc.3.block.mlp.fc2.bias", "module.temporal_evolution.enc.4.block.gamma_1", "module.temporal_evolution.enc.4.block.gamma_2", "module.temporal_evolution.enc.4.block.pos_embed.weight", "module.temporal_evolution.enc.4.block.pos_embed.bias", "module.temporal_evolution.enc.4.block.norm1.weight", "module.temporal_evolution.enc.4.block.norm1.bias", "module.temporal_evolution.enc.4.block.attn.qkv.weight", "module.temporal_evolution.enc.4.block.attn.qkv.bias", "module.temporal_evolution.enc.4.block.attn.proj.weight", "module.temporal_evolution.enc.4.block.attn.proj.bias", "module.temporal_evolution.enc.4.block.norm2.weight", "module.temporal_evolution.enc.4.block.norm2.bias", "module.temporal_evolution.enc.4.block.mlp.fc1.weight", "module.temporal_evolution.enc.4.block.mlp.fc1.bias", "module.temporal_evolution.enc.4.block.mlp.fc2.weight", "module.temporal_evolution.enc.4.block.mlp.fc2.bias", "module.temporal_evolution.enc.5.block.gamma_1", "module.temporal_evolution.enc.5.block.gamma_2", "module.temporal_evolution.enc.5.block.pos_embed.weight", "module.temporal_evolution.enc.5.block.pos_embed.bias", "module.temporal_evolution.enc.5.block.norm1.weight", "module.temporal_evolution.enc.5.block.norm1.bias", "module.temporal_evolution.enc.5.block.attn.qkv.weight", "module.temporal_evolution.enc.5.block.attn.qkv.bias", "module.temporal_evolution.enc.5.block.attn.proj.weight", "module.temporal_evolution.enc.5.block.attn.proj.bias", "module.temporal_evolution.enc.5.block.norm2.weight", "module.temporal_evolution.enc.5.block.norm2.bias", "module.temporal_evolution.enc.5.block.mlp.fc1.weight", "module.temporal_evolution.enc.5.block.mlp.fc1.bias", "module.temporal_evolution.enc.5.block.mlp.fc2.weight", "module.temporal_evolution.enc.5.block.mlp.fc2.bias", "module.temporal_evolution.enc.6.block.gamma_1", "module.temporal_evolution.enc.6.block.gamma_2", "module.temporal_evolution.enc.6.block.pos_embed.weight", "module.temporal_evolution.enc.6.block.pos_embed.bias", "module.temporal_evolution.enc.6.block.norm1.weight", "module.temporal_evolution.enc.6.block.norm1.bias", "module.temporal_evolution.enc.6.block.attn.qkv.weight", "module.temporal_evolution.enc.6.block.attn.qkv.bias", "module.temporal_evolution.enc.6.block.attn.proj.weight", "module.temporal_evolution.enc.6.block.attn.proj.bias", "module.temporal_evolution.enc.6.block.norm2.weight", "module.temporal_evolution.enc.6.block.norm2.bias", "module.temporal_evolution.enc.6.block.mlp.fc1.weight", "module.temporal_evolution.enc.6.block.mlp.fc1.bias", "module.temporal_evolution.enc.6.block.mlp.fc2.weight", "module.temporal_evolution.enc.6.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.block.pos_embed.weight", "module.temporal_evolution.enc.7.block.pos_embed.bias", "module.temporal_evolution.enc.7.block.norm1.weight", "module.temporal_evolution.enc.7.block.norm1.bias", "module.temporal_evolution.enc.7.block.norm1.running_mean", "module.temporal_evolution.enc.7.block.norm1.running_var", "module.temporal_evolution.enc.7.block.norm1.num_batches_tracked", "module.temporal_evolution.enc.7.block.conv1.weight", "module.temporal_evolution.enc.7.block.conv1.bias", "module.temporal_evolution.enc.7.block.conv2.weight", "module.temporal_evolution.enc.7.block.conv2.bias", "module.temporal_evolution.enc.7.block.attn.weight", "module.temporal_evolution.enc.7.block.attn.bias", "module.temporal_evolution.enc.7.block.norm2.weight", "module.temporal_evolution.enc.7.block.norm2.bias", "module.temporal_evolution.enc.7.block.norm2.running_mean", "module.temporal_evolution.enc.7.block.norm2.running_var", "module.temporal_evolution.enc.7.block.norm2.num_batches_tracked", "module.temporal_evolution.enc.7.block.mlp.fc1.weight", "module.temporal_evolution.enc.7.block.mlp.fc1.bias", "module.temporal_evolution.enc.7.block.mlp.fc2.weight", "module.temporal_evolution.enc.7.block.mlp.fc2.bias", "module.temporal_evolution.enc.7.reduction.weight", "module.temporal_evolution.enc.7.reduction.bias", "module.atmospheric_decoder.dec.0.conv.conv.weight", "module.atmospheric_decoder.dec.0.conv.conv.bias", "module.atmospheric_decoder.dec.0.conv.norm.weight", "module.atmospheric_decoder.dec.0.conv.norm.bias", "module.atmospheric_decoder.dec.1.conv.conv.weight", "module.atmospheric_decoder.dec.1.conv.conv.bias", "module.atmospheric_decoder.dec.1.conv.norm.weight", "module.atmospheric_decoder.dec.1.conv.norm.bias", "module.atmospheric_decoder.dec.2.conv.conv.weight", "module.atmospheric_decoder.dec.2.conv.conv.bias", "module.atmospheric_decoder.dec.2.conv.norm.weight", "module.atmospheric_decoder.dec.2.conv.norm.bias", "module.atmospheric_decoder.dec.3.conv.conv.weight", "module.atmospheric_decoder.dec.3.conv.conv.bias", "module.atmospheric_decoder.dec.3.conv.norm.weight", "module.atmospheric_decoder.dec.3.conv.norm.bias", "module.atmospheric_decoder.readout.weight", "module.atmospheric_decoder.readout.bias". +2025-02-19 11:11:20,431 Attempting to fix the state_dict by removing "module." prefix. +2025-02-19 11:11:20,450 Model loaded successfully after fixing state_dict. +2025-02-19 11:11:29,596 Current Learning Rate: 0.0014644661 +2025-02-19 11:11:29,597 Train Loss: 0.0001557, Val Loss: 0.0001785 +2025-02-19 11:11:29,598 Epoch 1751/2000 +2025-02-19 11:12:12,000 Current Learning Rate: 0.0014093685 +2025-02-19 11:12:12,002 Train Loss: 0.0001411, Val Loss: 0.0001776 +2025-02-19 11:12:12,003 Epoch 1752/2000 +2025-02-19 11:12:54,734 Current Learning Rate: 0.0013551569 +2025-02-19 11:12:56,388 Train Loss: 0.0001373, Val Loss: 0.0001771 +2025-02-19 11:12:56,391 Epoch 1753/2000 +2025-02-19 11:13:37,749 Current Learning Rate: 0.0013018445 +2025-02-19 11:13:37,751 Train Loss: 0.0001384, Val Loss: 0.0001774 +2025-02-19 11:13:37,752 Epoch 1754/2000 +2025-02-19 11:14:20,731 Current Learning Rate: 0.0012494447 +2025-02-19 11:14:22,637 Train Loss: 0.0001578, Val Loss: 0.0001769 +2025-02-19 11:14:22,642 Epoch 1755/2000 +2025-02-19 11:14:47,987 Animation.save using +2025-02-19 11:15:05,149 Current Learning Rate: 0.0011979702 +2025-02-19 11:15:07,072 Train Loss: 0.0001192, Val Loss: 0.0001768 +2025-02-19 11:15:07,083 Epoch 1756/2000 +2025-02-19 11:15:48,401 Current Learning Rate: 0.0011474338 +2025-02-19 11:15:49,919 Train Loss: 0.0001372, Val Loss: 0.0001765 +2025-02-19 11:15:49,937 Epoch 1757/2000 +2025-02-19 11:16:31,266 Current Learning Rate: 0.0010978480 +2025-02-19 11:16:33,116 Train Loss: 0.0001330, Val Loss: 0.0001763 +2025-02-19 11:16:33,119 Epoch 1758/2000 +2025-02-19 11:17:14,593 Current Learning Rate: 0.0010492249 +2025-02-19 11:17:14,596 Train Loss: 0.0001402, Val Loss: 0.0001765 +2025-02-19 11:17:14,597 Epoch 1759/2000 +2025-02-19 11:17:57,568 Current Learning Rate: 0.0010015767 +2025-02-19 11:17:59,466 Train Loss: 0.0001441, Val Loss: 0.0001760 +2025-02-19 11:17:59,469 Epoch 1760/2000 +2025-02-19 11:18:42,065 Current Learning Rate: 0.0009549150 +2025-02-19 11:18:44,162 Train Loss: 0.0001377, Val Loss: 0.0001759 +2025-02-19 11:18:44,165 Epoch 1761/2000 +2025-02-19 11:19:27,020 Current Learning Rate: 0.0009092514 +2025-02-19 11:19:27,022 Train Loss: 0.0002455, Val Loss: 0.0001764 +2025-02-19 11:19:27,023 Epoch 1762/2000 +2025-02-19 11:20:09,077 Current Learning Rate: 0.0008645971 +2025-02-19 11:20:10,710 Train Loss: 0.0000958, Val Loss: 0.0001755 +2025-02-19 11:20:10,731 Epoch 1763/2000 +2025-02-19 11:20:52,362 Current Learning Rate: 0.0008209632 +2025-02-19 11:20:52,364 Train Loss: 0.0001152, Val Loss: 0.0001756 +2025-02-19 11:20:52,365 Epoch 1764/2000 +2025-02-19 11:21:34,536 Current Learning Rate: 0.0007783604 +2025-02-19 11:21:34,538 Train Loss: 0.0001213, Val Loss: 0.0001756 +2025-02-19 11:21:34,539 Epoch 1765/2000 +2025-02-19 11:22:16,821 Current Learning Rate: 0.0007367992 +2025-02-19 11:22:18,466 Train Loss: 0.0001546, Val Loss: 0.0001754 +2025-02-19 11:22:18,473 Epoch 1766/2000 +2025-02-19 11:22:59,791 Current Learning Rate: 0.0006962899 +2025-02-19 11:22:59,793 Train Loss: 0.0001509, Val Loss: 0.0001757 +2025-02-19 11:22:59,795 Epoch 1767/2000 +2025-02-19 11:23:42,999 Current Learning Rate: 0.0006568424 +2025-02-19 11:23:44,929 Train Loss: 0.0001195, Val Loss: 0.0001753 +2025-02-19 11:23:44,932 Epoch 1768/2000 +2025-02-19 11:24:27,617 Current Learning Rate: 0.0006184666 +2025-02-19 11:24:29,700 Train Loss: 0.0000939, Val Loss: 0.0001752 +2025-02-19 11:24:29,704 Epoch 1769/2000 +2025-02-19 11:25:12,818 Current Learning Rate: 0.0005811718 +2025-02-19 11:25:14,711 Train Loss: 0.0001764, Val Loss: 0.0001752 +2025-02-19 11:25:14,714 Epoch 1770/2000 +2025-02-19 11:25:57,441 Current Learning Rate: 0.0005449674 +2025-02-19 11:25:57,442 Train Loss: 0.0001636, Val Loss: 0.0001752 +2025-02-19 11:25:57,443 Epoch 1771/2000 +2025-02-19 11:26:40,233 Current Learning Rate: 0.0005098621 +2025-02-19 11:26:42,090 Train Loss: 0.0001524, Val Loss: 0.0001751 +2025-02-19 11:26:42,095 Epoch 1772/2000 +2025-02-19 11:27:24,449 Current Learning Rate: 0.0004758647 +2025-02-19 11:27:26,282 Train Loss: 0.0001196, Val Loss: 0.0001750 +2025-02-19 11:27:26,285 Epoch 1773/2000 +2025-02-19 11:28:07,767 Current Learning Rate: 0.0004429836 +2025-02-19 11:28:09,145 Train Loss: 0.0001205, Val Loss: 0.0001750 +2025-02-19 11:28:09,148 Epoch 1774/2000 +2025-02-19 11:28:50,611 Current Learning Rate: 0.0004112269 +2025-02-19 11:28:52,227 Train Loss: 0.0001419, Val Loss: 0.0001748 +2025-02-19 11:28:52,230 Epoch 1775/2000 +2025-02-19 11:29:35,014 Current Learning Rate: 0.0003806023 +2025-02-19 11:29:37,108 Train Loss: 0.0001105, Val Loss: 0.0001748 +2025-02-19 11:29:37,117 Epoch 1776/2000 +2025-02-19 11:30:19,674 Current Learning Rate: 0.0003511176 +2025-02-19 11:30:21,635 Train Loss: 0.0001147, Val Loss: 0.0001747 +2025-02-19 11:30:21,639 Epoch 1777/2000 +2025-02-19 11:31:04,174 Current Learning Rate: 0.0003227798 +2025-02-19 11:31:05,900 Train Loss: 0.0001228, Val Loss: 0.0001745 +2025-02-19 11:31:05,902 Epoch 1778/2000 +2025-02-19 11:31:47,100 Current Learning Rate: 0.0002955962 +2025-02-19 11:31:48,390 Train Loss: 0.0001214, Val Loss: 0.0001745 +2025-02-19 11:31:48,392 Epoch 1779/2000 +2025-02-19 11:32:30,885 Current Learning Rate: 0.0002695732 +2025-02-19 11:32:32,828 Train Loss: 0.0000931, Val Loss: 0.0001744 +2025-02-19 11:32:32,831 Epoch 1780/2000 +2025-02-19 11:33:14,304 Current Learning Rate: 0.0002447174 +2025-02-19 11:33:15,737 Train Loss: 0.0001198, Val Loss: 0.0001743 +2025-02-19 11:33:15,746 Epoch 1781/2000 +2025-02-19 11:33:58,237 Current Learning Rate: 0.0002210349 +2025-02-19 11:33:58,239 Train Loss: 0.0001610, Val Loss: 0.0001744 +2025-02-19 11:33:58,240 Epoch 1782/2000 +2025-02-19 11:34:40,850 Current Learning Rate: 0.0001985316 +2025-02-19 11:34:42,710 Train Loss: 0.0001984, Val Loss: 0.0001743 +2025-02-19 11:34:42,713 Epoch 1783/2000 +2025-02-19 11:35:24,907 Current Learning Rate: 0.0001772129 +2025-02-19 11:35:24,939 Train Loss: 0.0001247, Val Loss: 0.0001743 +2025-02-19 11:35:24,953 Epoch 1784/2000 +2025-02-19 11:36:07,362 Current Learning Rate: 0.0001570842 +2025-02-19 11:36:08,788 Train Loss: 0.0001221, Val Loss: 0.0001743 +2025-02-19 11:36:08,801 Epoch 1785/2000 +2025-02-19 11:36:51,782 Current Learning Rate: 0.0001381504 +2025-02-19 11:36:51,784 Train Loss: 0.0001060, Val Loss: 0.0001743 +2025-02-19 11:36:51,785 Epoch 1786/2000 +2025-02-19 11:37:33,812 Current Learning Rate: 0.0001204162 +2025-02-19 11:37:33,814 Train Loss: 0.0001111, Val Loss: 0.0001743 +2025-02-19 11:37:33,815 Epoch 1787/2000 +2025-02-19 11:38:15,886 Current Learning Rate: 0.0001038859 +2025-02-19 11:38:15,888 Train Loss: 0.0001388, Val Loss: 0.0001743 +2025-02-19 11:38:15,889 Epoch 1788/2000 +2025-02-19 11:38:58,895 Current Learning Rate: 0.0000885637 +2025-02-19 11:38:58,898 Train Loss: 0.0001245, Val Loss: 0.0001743 +2025-02-19 11:38:58,899 Epoch 1789/2000 +2025-02-19 11:39:41,920 Current Learning Rate: 0.0000744534 +2025-02-19 11:39:43,671 Train Loss: 0.0001501, Val Loss: 0.0001742 +2025-02-19 11:39:43,674 Epoch 1790/2000 +2025-02-19 11:40:25,875 Current Learning Rate: 0.0000615583 +2025-02-19 11:40:25,877 Train Loss: 0.0001453, Val Loss: 0.0001743 +2025-02-19 11:40:25,878 Epoch 1791/2000 +2025-02-19 11:41:09,006 Current Learning Rate: 0.0000498817 +2025-02-19 11:41:09,008 Train Loss: 0.0001278, Val Loss: 0.0001742 +2025-02-19 11:41:09,009 Epoch 1792/2000 +2025-02-19 11:41:51,930 Current Learning Rate: 0.0000394265 +2025-02-19 11:41:53,386 Train Loss: 0.0001380, Val Loss: 0.0001742 +2025-02-19 11:41:53,392 Epoch 1793/2000 +2025-02-19 11:42:34,808 Current Learning Rate: 0.0000301952 +2025-02-19 11:42:34,810 Train Loss: 0.0000980, Val Loss: 0.0001742 +2025-02-19 11:42:34,811 Epoch 1794/2000 +2025-02-19 11:43:17,880 Current Learning Rate: 0.0000221902 +2025-02-19 11:43:19,241 Train Loss: 0.0001129, Val Loss: 0.0001742 +2025-02-19 11:43:19,244 Epoch 1795/2000 +2025-02-19 11:44:02,098 Current Learning Rate: 0.0000154133 +2025-02-19 11:44:03,559 Train Loss: 0.0001571, Val Loss: 0.0001741 +2025-02-19 11:44:03,562 Epoch 1796/2000 +2025-02-19 11:44:46,260 Current Learning Rate: 0.0000098664 +2025-02-19 11:44:46,262 Train Loss: 0.0001403, Val Loss: 0.0001742 +2025-02-19 11:44:46,263 Epoch 1797/2000 +2025-02-19 11:45:29,552 Current Learning Rate: 0.0000055506 +2025-02-19 11:45:29,554 Train Loss: 0.0001222, Val Loss: 0.0001742 +2025-02-19 11:45:29,581 Epoch 1798/2000 +2025-02-19 11:46:11,982 Current Learning Rate: 0.0000024672 +2025-02-19 11:46:11,985 Train Loss: 0.0001442, Val Loss: 0.0001742 +2025-02-19 11:46:11,986 Epoch 1799/2000 +2025-02-19 11:46:54,882 Current Learning Rate: 0.0000006168 +2025-02-19 11:46:56,639 Train Loss: 0.0001533, Val Loss: 0.0001741 +2025-02-19 11:46:56,642 Epoch 1800/2000 +2025-02-19 11:47:39,456 Current Learning Rate: 0.0000000000 +2025-02-19 11:47:39,459 Train Loss: 0.0001512, Val Loss: 0.0001741 +2025-02-19 11:47:39,460 Epoch 1801/2000 +2025-02-19 11:48:22,567 Current Learning Rate: 0.0000006168 +2025-02-19 11:48:22,586 Train Loss: 0.0001399, Val Loss: 0.0001742 +2025-02-19 11:48:22,588 Epoch 1802/2000 +2025-02-19 11:49:05,407 Current Learning Rate: 0.0000024672 +2025-02-19 11:49:05,409 Train Loss: 0.0001749, Val Loss: 0.0001741 +2025-02-19 11:49:05,410 Epoch 1803/2000 +2025-02-19 11:49:47,644 Current Learning Rate: 0.0000055506 +2025-02-19 11:49:47,646 Train Loss: 0.0001430, Val Loss: 0.0001741 +2025-02-19 11:49:47,647 Epoch 1804/2000 +2025-02-19 11:50:30,011 Current Learning Rate: 0.0000098664 +2025-02-19 11:50:30,013 Train Loss: 0.0001170, Val Loss: 0.0001742 +2025-02-19 11:50:30,014 Epoch 1805/2000 +2025-02-19 11:51:11,955 Current Learning Rate: 0.0000154133 +2025-02-19 11:51:11,957 Train Loss: 0.0001191, Val Loss: 0.0001742 +2025-02-19 11:51:11,958 Epoch 1806/2000 +2025-02-19 11:51:54,716 Current Learning Rate: 0.0000221902 +2025-02-19 11:51:54,718 Train Loss: 0.0001110, Val Loss: 0.0001742 +2025-02-19 11:51:54,719 Epoch 1807/2000 +2025-02-19 11:52:36,481 Current Learning Rate: 0.0000301952 +2025-02-19 11:52:36,483 Train Loss: 0.0001361, Val Loss: 0.0001741 +2025-02-19 11:52:36,484 Epoch 1808/2000 +2025-02-19 11:53:18,753 Current Learning Rate: 0.0000394265 +2025-02-19 11:53:18,755 Train Loss: 0.0001192, Val Loss: 0.0001742 +2025-02-19 11:53:18,756 Epoch 1809/2000 +2025-02-19 11:54:01,167 Current Learning Rate: 0.0000498817 +2025-02-19 11:54:01,169 Train Loss: 0.0001584, Val Loss: 0.0001741 +2025-02-19 11:54:01,170 Epoch 1810/2000 +2025-02-19 11:54:44,568 Current Learning Rate: 0.0000615583 +2025-02-19 11:54:44,570 Train Loss: 0.0001216, Val Loss: 0.0001742 +2025-02-19 11:54:44,571 Epoch 1811/2000 +2025-02-19 11:55:27,446 Current Learning Rate: 0.0000744534 +2025-02-19 11:55:27,449 Train Loss: 0.0001477, Val Loss: 0.0001742 +2025-02-19 11:55:27,450 Epoch 1812/2000 +2025-02-19 11:56:10,295 Current Learning Rate: 0.0000885637 +2025-02-19 11:56:10,323 Train Loss: 0.0001707, Val Loss: 0.0001742 +2025-02-19 11:56:10,335 Epoch 1813/2000 +2025-02-19 11:56:53,208 Current Learning Rate: 0.0001038859 +2025-02-19 11:56:53,210 Train Loss: 0.0001253, Val Loss: 0.0001742 +2025-02-19 11:56:53,211 Epoch 1814/2000 +2025-02-19 11:57:35,351 Current Learning Rate: 0.0001204162 +2025-02-19 11:57:35,354 Train Loss: 0.0001129, Val Loss: 0.0001743 +2025-02-19 11:57:35,355 Epoch 1815/2000 +2025-02-19 11:58:17,273 Current Learning Rate: 0.0001381504 +2025-02-19 11:58:17,276 Train Loss: 0.0001055, Val Loss: 0.0001742 +2025-02-19 11:58:17,277 Epoch 1816/2000 +2025-02-19 11:59:00,086 Current Learning Rate: 0.0001570842 +2025-02-19 11:59:00,089 Train Loss: 0.0001858, Val Loss: 0.0001742 +2025-02-19 11:59:00,090 Epoch 1817/2000 +2025-02-19 11:59:42,882 Current Learning Rate: 0.0001772129 +2025-02-19 11:59:42,918 Train Loss: 0.0000986, Val Loss: 0.0001742 +2025-02-19 11:59:42,934 Epoch 1818/2000 +2025-02-19 12:00:24,837 Current Learning Rate: 0.0001985316 +2025-02-19 12:00:24,839 Train Loss: 0.0001297, Val Loss: 0.0001742 +2025-02-19 12:00:24,841 Epoch 1819/2000 +2025-02-19 12:01:08,031 Current Learning Rate: 0.0002210349 +2025-02-19 12:01:08,045 Train Loss: 0.0000983, Val Loss: 0.0001743 +2025-02-19 12:01:08,055 Epoch 1820/2000 +2025-02-19 12:01:50,556 Current Learning Rate: 0.0002447174 +2025-02-19 12:01:50,559 Train Loss: 0.0001394, Val Loss: 0.0001742 +2025-02-19 12:01:50,560 Epoch 1821/2000 +2025-02-19 12:02:33,642 Current Learning Rate: 0.0002695732 +2025-02-19 12:02:33,644 Train Loss: 0.0001232, Val Loss: 0.0001745 +2025-02-19 12:02:33,646 Epoch 1822/2000 +2025-02-19 12:03:16,185 Current Learning Rate: 0.0002955962 +2025-02-19 12:03:16,188 Train Loss: 0.0001277, Val Loss: 0.0001745 +2025-02-19 12:03:16,189 Epoch 1823/2000 +2025-02-19 12:03:59,199 Current Learning Rate: 0.0003227798 +2025-02-19 12:03:59,201 Train Loss: 0.0001523, Val Loss: 0.0001745 +2025-02-19 12:03:59,202 Epoch 1824/2000 +2025-02-19 12:04:42,066 Current Learning Rate: 0.0003511176 +2025-02-19 12:04:42,068 Train Loss: 0.0001470, Val Loss: 0.0001744 +2025-02-19 12:04:42,069 Epoch 1825/2000 +2025-02-19 12:05:23,947 Current Learning Rate: 0.0003806023 +2025-02-19 12:05:23,949 Train Loss: 0.0001038, Val Loss: 0.0001745 +2025-02-19 12:05:23,950 Epoch 1826/2000 +2025-02-19 12:06:06,651 Current Learning Rate: 0.0004112269 +2025-02-19 12:06:06,653 Train Loss: 0.0001119, Val Loss: 0.0001745 +2025-02-19 12:06:06,654 Epoch 1827/2000 +2025-02-19 12:06:50,039 Current Learning Rate: 0.0004429836 +2025-02-19 12:06:50,041 Train Loss: 0.0001061, Val Loss: 0.0001744 +2025-02-19 12:06:50,042 Epoch 1828/2000 +2025-02-19 12:07:32,401 Current Learning Rate: 0.0004758647 +2025-02-19 12:07:32,403 Train Loss: 0.0001283, Val Loss: 0.0001744 +2025-02-19 12:07:32,404 Epoch 1829/2000 +2025-02-19 12:08:15,451 Current Learning Rate: 0.0005098621 +2025-02-19 12:08:15,453 Train Loss: 0.0001414, Val Loss: 0.0001744 +2025-02-19 12:08:15,454 Epoch 1830/2000 +2025-02-19 12:08:57,536 Current Learning Rate: 0.0005449674 +2025-02-19 12:08:57,548 Train Loss: 0.0001372, Val Loss: 0.0001744 +2025-02-19 12:08:57,549 Epoch 1831/2000 +2025-02-19 12:09:41,181 Current Learning Rate: 0.0005811718 +2025-02-19 12:09:41,183 Train Loss: 0.0001302, Val Loss: 0.0001746 +2025-02-19 12:09:41,185 Epoch 1832/2000 +2025-02-19 12:10:23,757 Current Learning Rate: 0.0006184666 +2025-02-19 12:10:23,770 Train Loss: 0.0001197, Val Loss: 0.0001746 +2025-02-19 12:10:23,771 Epoch 1833/2000 +2025-02-19 12:11:05,801 Current Learning Rate: 0.0006568424 +2025-02-19 12:11:05,803 Train Loss: 0.0001108, Val Loss: 0.0001746 +2025-02-19 12:11:05,804 Epoch 1834/2000 +2025-02-19 12:11:48,264 Current Learning Rate: 0.0006962899 +2025-02-19 12:11:48,266 Train Loss: 0.0001037, Val Loss: 0.0001744 +2025-02-19 12:11:48,267 Epoch 1835/2000 +2025-02-19 12:12:30,671 Current Learning Rate: 0.0007367992 +2025-02-19 12:12:30,673 Train Loss: 0.0001293, Val Loss: 0.0001746 +2025-02-19 12:12:30,674 Epoch 1836/2000 +2025-02-19 12:13:12,733 Current Learning Rate: 0.0007783604 +2025-02-19 12:13:12,736 Train Loss: 0.0001777, Val Loss: 0.0001746 +2025-02-19 12:13:12,737 Epoch 1837/2000 +2025-02-19 12:13:55,874 Current Learning Rate: 0.0008209632 +2025-02-19 12:13:55,876 Train Loss: 0.0000935, Val Loss: 0.0001746 +2025-02-19 12:13:55,877 Epoch 1838/2000 +2025-02-19 12:14:37,760 Current Learning Rate: 0.0008645971 +2025-02-19 12:14:37,763 Train Loss: 0.0001468, Val Loss: 0.0001747 +2025-02-19 12:14:37,765 Epoch 1839/2000 +2025-02-19 12:15:20,912 Current Learning Rate: 0.0009092514 +2025-02-19 12:15:20,924 Train Loss: 0.0001294, Val Loss: 0.0001750 +2025-02-19 12:15:20,924 Epoch 1840/2000 +2025-02-19 12:16:04,075 Current Learning Rate: 0.0009549150 +2025-02-19 12:16:04,077 Train Loss: 0.0001095, Val Loss: 0.0001759 +2025-02-19 12:16:04,078 Epoch 1841/2000 +2025-02-19 12:16:47,103 Current Learning Rate: 0.0010015767 +2025-02-19 12:16:47,105 Train Loss: 0.0001207, Val Loss: 0.0001748 +2025-02-19 12:16:47,106 Epoch 1842/2000 +2025-02-19 12:17:29,899 Current Learning Rate: 0.0010492249 +2025-02-19 12:17:29,901 Train Loss: 0.0001532, Val Loss: 0.0001756 +2025-02-19 12:17:29,902 Epoch 1843/2000 +2025-02-19 12:18:12,923 Current Learning Rate: 0.0010978480 +2025-02-19 12:18:12,925 Train Loss: 0.0001866, Val Loss: 0.0001762 +2025-02-19 12:18:12,926 Epoch 1844/2000 +2025-02-19 12:18:55,950 Current Learning Rate: 0.0011474338 +2025-02-19 12:18:55,951 Train Loss: 0.0001799, Val Loss: 0.0001807 +2025-02-19 12:18:55,952 Epoch 1845/2000 +2025-02-19 12:19:38,629 Current Learning Rate: 0.0011979702 +2025-02-19 12:19:38,634 Train Loss: 0.0001183, Val Loss: 0.0001773 +2025-02-19 12:19:38,637 Epoch 1846/2000 +2025-02-19 12:20:22,246 Current Learning Rate: 0.0012494447 +2025-02-19 12:20:22,252 Train Loss: 0.0001512, Val Loss: 0.0001773 +2025-02-19 12:20:22,253 Epoch 1847/2000 +2025-02-19 12:21:05,662 Current Learning Rate: 0.0013018445 +2025-02-19 12:21:05,664 Train Loss: 0.0001506, Val Loss: 0.0001775 +2025-02-19 12:21:05,665 Epoch 1848/2000 +2025-02-19 12:21:49,080 Current Learning Rate: 0.0013551569 +2025-02-19 12:21:49,082 Train Loss: 0.0001220, Val Loss: 0.0001756 +2025-02-19 12:21:49,083 Epoch 1849/2000 +2025-02-19 12:22:32,706 Current Learning Rate: 0.0014093685 +2025-02-19 12:22:32,708 Train Loss: 0.0001570, Val Loss: 0.0001761 +2025-02-19 12:22:32,709 Epoch 1850/2000 +2025-02-19 12:23:15,700 Current Learning Rate: 0.0014644661 +2025-02-19 12:23:15,702 Train Loss: 0.0001315, Val Loss: 0.0001758 +2025-02-19 12:23:15,704 Epoch 1851/2000 +2025-02-19 12:23:59,036 Current Learning Rate: 0.0015204360 +2025-02-19 12:23:59,039 Train Loss: 0.0001497, Val Loss: 0.0001762 +2025-02-19 12:23:59,074 Epoch 1852/2000 +2025-02-19 12:24:42,234 Current Learning Rate: 0.0015772645 +2025-02-19 12:24:42,238 Train Loss: 0.0001273, Val Loss: 0.0001767 +2025-02-19 12:24:42,246 Epoch 1853/2000 +2025-02-19 12:25:24,558 Current Learning Rate: 0.0016349374 +2025-02-19 12:25:24,560 Train Loss: 0.0001386, Val Loss: 0.0001770 +2025-02-19 12:25:24,562 Epoch 1854/2000 +2025-02-19 12:26:07,061 Current Learning Rate: 0.0016934407 +2025-02-19 12:26:07,063 Train Loss: 0.0001244, Val Loss: 0.0001767 +2025-02-19 12:26:07,064 Epoch 1855/2000 +2025-02-19 12:26:50,560 Current Learning Rate: 0.0017527598 +2025-02-19 12:26:50,562 Train Loss: 0.0001597, Val Loss: 0.0001763 +2025-02-19 12:26:50,564 Epoch 1856/2000 +2025-02-19 12:27:33,496 Current Learning Rate: 0.0018128801 +2025-02-19 12:27:33,498 Train Loss: 0.0001338, Val Loss: 0.0001765 +2025-02-19 12:27:33,499 Epoch 1857/2000 +2025-02-19 12:28:16,591 Current Learning Rate: 0.0018737867 +2025-02-19 12:28:16,593 Train Loss: 0.0001112, Val Loss: 0.0001756 +2025-02-19 12:28:16,594 Epoch 1858/2000 +2025-02-19 12:28:59,714 Current Learning Rate: 0.0019354647 +2025-02-19 12:28:59,728 Train Loss: 0.0001042, Val Loss: 0.0001755 +2025-02-19 12:28:59,729 Epoch 1859/2000 +2025-02-19 12:29:42,375 Current Learning Rate: 0.0019978989 +2025-02-19 12:29:42,378 Train Loss: 0.0001398, Val Loss: 0.0001781 +2025-02-19 12:29:42,380 Epoch 1860/2000 +2025-02-19 12:30:25,218 Current Learning Rate: 0.0020610737 +2025-02-19 12:30:25,220 Train Loss: 0.0001001, Val Loss: 0.0001758 +2025-02-19 12:30:25,221 Epoch 1861/2000 +2025-02-19 12:31:07,200 Current Learning Rate: 0.0021249737 +2025-02-19 12:31:07,202 Train Loss: 0.0001824, Val Loss: 0.0001930 +2025-02-19 12:31:07,203 Epoch 1862/2000 +2025-02-19 12:31:50,163 Current Learning Rate: 0.0021895831 +2025-02-19 12:31:50,165 Train Loss: 0.0001672, Val Loss: 0.0001807 +2025-02-19 12:31:50,166 Epoch 1863/2000 +2025-02-19 12:32:33,415 Current Learning Rate: 0.0022548859 +2025-02-19 12:32:33,452 Train Loss: 0.0001289, Val Loss: 0.0001816 +2025-02-19 12:32:33,464 Epoch 1864/2000 +2025-02-19 12:33:16,080 Current Learning Rate: 0.0023208660 +2025-02-19 12:33:16,082 Train Loss: 0.0001369, Val Loss: 0.0001785 +2025-02-19 12:33:16,083 Epoch 1865/2000 +2025-02-19 12:33:58,243 Current Learning Rate: 0.0023875072 +2025-02-19 12:33:58,244 Train Loss: 0.0001618, Val Loss: 0.0001838 +2025-02-19 12:33:58,246 Epoch 1866/2000 +2025-02-19 12:34:40,625 Current Learning Rate: 0.0024547929 +2025-02-19 12:34:40,631 Train Loss: 0.0001429, Val Loss: 0.0001811 +2025-02-19 12:34:40,635 Epoch 1867/2000 +2025-02-19 12:35:23,548 Current Learning Rate: 0.0025227067 +2025-02-19 12:35:23,550 Train Loss: 0.0001820, Val Loss: 0.0001815 +2025-02-19 12:35:23,551 Epoch 1868/2000 +2025-02-19 12:36:06,908 Current Learning Rate: 0.0025912316 +2025-02-19 12:36:06,910 Train Loss: 0.0001412, Val Loss: 0.0001773 +2025-02-19 12:36:06,911 Epoch 1869/2000 +2025-02-19 12:36:49,766 Current Learning Rate: 0.0026603509 +2025-02-19 12:36:49,768 Train Loss: 0.0001425, Val Loss: 0.0001778 +2025-02-19 12:36:49,770 Epoch 1870/2000 +2025-02-19 12:37:32,337 Current Learning Rate: 0.0027300475 +2025-02-19 12:37:32,339 Train Loss: 0.0001261, Val Loss: 0.0001773 +2025-02-19 12:37:32,340 Epoch 1871/2000 +2025-02-19 12:38:15,409 Current Learning Rate: 0.0028003042 +2025-02-19 12:38:15,411 Train Loss: 0.0001707, Val Loss: 0.0002012 +2025-02-19 12:38:15,412 Epoch 1872/2000 +2025-02-19 12:38:58,870 Current Learning Rate: 0.0028711035 +2025-02-19 12:38:58,880 Train Loss: 0.0001108, Val Loss: 0.0001784 +2025-02-19 12:38:58,883 Epoch 1873/2000 +2025-02-19 12:39:42,503 Current Learning Rate: 0.0029424282 +2025-02-19 12:39:42,505 Train Loss: 0.0001836, Val Loss: 0.0001955 +2025-02-19 12:39:42,507 Epoch 1874/2000 +2025-02-19 12:40:24,627 Current Learning Rate: 0.0030142605 +2025-02-19 12:40:24,630 Train Loss: 0.0001284, Val Loss: 0.0001809 +2025-02-19 12:40:24,632 Epoch 1875/2000 +2025-02-19 12:41:07,310 Current Learning Rate: 0.0030865828 +2025-02-19 12:41:07,317 Train Loss: 0.0001500, Val Loss: 0.0001807 +2025-02-19 12:41:07,322 Epoch 1876/2000 +2025-02-19 12:41:50,456 Current Learning Rate: 0.0031593772 +2025-02-19 12:41:50,458 Train Loss: 0.0001474, Val Loss: 0.0001829 +2025-02-19 12:41:50,459 Epoch 1877/2000 +2025-02-19 12:42:32,299 Current Learning Rate: 0.0032326258 +2025-02-19 12:42:32,301 Train Loss: 0.0001471, Val Loss: 0.0001927 +2025-02-19 12:42:32,302 Epoch 1878/2000 +2025-02-19 12:43:15,459 Current Learning Rate: 0.0033063104 +2025-02-19 12:43:15,461 Train Loss: 0.0001239, Val Loss: 0.0001840 +2025-02-19 12:43:15,462 Epoch 1879/2000 +2025-02-19 12:43:57,982 Current Learning Rate: 0.0033804129 +2025-02-19 12:43:57,985 Train Loss: 0.0001551, Val Loss: 0.0001843 +2025-02-19 12:43:57,985 Epoch 1880/2000 +2025-02-19 12:44:41,661 Current Learning Rate: 0.0034549150 +2025-02-19 12:44:41,662 Train Loss: 0.0001069, Val Loss: 0.0001824 +2025-02-19 12:44:41,663 Epoch 1881/2000 +2025-02-19 12:45:24,891 Current Learning Rate: 0.0035297984 +2025-02-19 12:45:24,892 Train Loss: 0.0001347, Val Loss: 0.0001799 +2025-02-19 12:45:24,893 Epoch 1882/2000 +2025-02-19 12:46:07,836 Current Learning Rate: 0.0036050445 +2025-02-19 12:46:07,838 Train Loss: 0.0001391, Val Loss: 0.0001903 +2025-02-19 12:46:07,839 Epoch 1883/2000 +2025-02-19 12:46:50,953 Current Learning Rate: 0.0036806348 +2025-02-19 12:46:50,955 Train Loss: 0.0001913, Val Loss: 0.0001860 +2025-02-19 12:46:50,956 Epoch 1884/2000 +2025-02-19 12:47:33,816 Current Learning Rate: 0.0037565506 +2025-02-19 12:47:33,817 Train Loss: 0.0001617, Val Loss: 0.0001902 +2025-02-19 12:47:33,818 Epoch 1885/2000 +2025-02-19 12:48:16,277 Current Learning Rate: 0.0038327732 +2025-02-19 12:48:16,296 Train Loss: 0.0001117, Val Loss: 0.0001784 +2025-02-19 12:48:16,308 Epoch 1886/2000 +2025-02-19 12:48:58,598 Current Learning Rate: 0.0039092838 +2025-02-19 12:48:58,600 Train Loss: 0.0001097, Val Loss: 0.0001794 +2025-02-19 12:48:58,601 Epoch 1887/2000 +2025-02-19 12:49:41,327 Current Learning Rate: 0.0039860635 +2025-02-19 12:49:41,329 Train Loss: 0.0001837, Val Loss: 0.0002079 +2025-02-19 12:49:41,330 Epoch 1888/2000 +2025-02-19 12:50:24,139 Current Learning Rate: 0.0040630934 +2025-02-19 12:50:24,141 Train Loss: 0.0001462, Val Loss: 0.0002011 +2025-02-19 12:50:24,142 Epoch 1889/2000 +2025-02-19 12:51:07,131 Current Learning Rate: 0.0041403545 +2025-02-19 12:51:07,133 Train Loss: 0.0001310, Val Loss: 0.0001901 +2025-02-19 12:51:07,134 Epoch 1890/2000 +2025-02-19 12:51:50,033 Current Learning Rate: 0.0042178277 +2025-02-19 12:51:50,034 Train Loss: 0.0001309, Val Loss: 0.0001880 +2025-02-19 12:51:50,035 Epoch 1891/2000 +2025-02-19 12:52:33,061 Current Learning Rate: 0.0042954938 +2025-02-19 12:52:33,063 Train Loss: 0.0001679, Val Loss: 0.0001867 +2025-02-19 12:52:33,064 Epoch 1892/2000 +2025-02-19 12:53:16,272 Current Learning Rate: 0.0043733338 +2025-02-19 12:53:16,274 Train Loss: 0.0001340, Val Loss: 0.0001908 +2025-02-19 12:53:16,275 Epoch 1893/2000 +2025-02-19 12:53:58,604 Current Learning Rate: 0.0044513284 +2025-02-19 12:53:58,606 Train Loss: 0.0001083, Val Loss: 0.0001788 +2025-02-19 12:53:58,607 Epoch 1894/2000 +2025-02-19 12:54:40,754 Current Learning Rate: 0.0045294584 +2025-02-19 12:54:40,756 Train Loss: 0.0001780, Val Loss: 0.0002197 +2025-02-19 12:54:40,757 Epoch 1895/2000 +2025-02-19 12:55:23,753 Current Learning Rate: 0.0046077045 +2025-02-19 12:55:23,755 Train Loss: 0.0001931, Val Loss: 0.0002177 +2025-02-19 12:55:23,756 Epoch 1896/2000 +2025-02-19 12:56:06,207 Current Learning Rate: 0.0046860474 +2025-02-19 12:56:06,209 Train Loss: 0.0001484, Val Loss: 0.0001954 +2025-02-19 12:56:06,210 Epoch 1897/2000 +2025-02-19 12:56:48,579 Current Learning Rate: 0.0047644677 +2025-02-19 12:56:48,581 Train Loss: 0.0002076, Val Loss: 0.0002016 +2025-02-19 12:56:48,582 Epoch 1898/2000 +2025-02-19 12:57:30,641 Current Learning Rate: 0.0048429462 +2025-02-19 12:57:30,643 Train Loss: 0.0001683, Val Loss: 0.0001948 +2025-02-19 12:57:30,644 Epoch 1899/2000 +2025-02-19 12:58:13,226 Current Learning Rate: 0.0049214634 +2025-02-19 12:58:13,227 Train Loss: 0.0001474, Val Loss: 0.0001895 +2025-02-19 12:58:13,228 Epoch 1900/2000 +2025-02-19 12:58:56,372 Current Learning Rate: 0.0050000000 +2025-02-19 12:58:56,374 Train Loss: 0.0001495, Val Loss: 0.0001913 +2025-02-19 12:58:56,375 Epoch 1901/2000 +2025-02-19 12:59:39,317 Current Learning Rate: 0.0050785366 +2025-02-19 12:59:39,319 Train Loss: 0.0001509, Val Loss: 0.0001909 +2025-02-19 12:59:39,320 Epoch 1902/2000 +2025-02-19 13:00:21,728 Current Learning Rate: 0.0051570538 +2025-02-19 13:00:21,730 Train Loss: 0.0001277, Val Loss: 0.0001865 +2025-02-19 13:00:21,731 Epoch 1903/2000 +2025-02-19 13:01:03,791 Current Learning Rate: 0.0052355323 +2025-02-19 13:01:03,793 Train Loss: 0.0001968, Val Loss: 0.0002157 +2025-02-19 13:01:03,794 Epoch 1904/2000 +2025-02-19 13:01:46,164 Current Learning Rate: 0.0053139526 +2025-02-19 13:01:46,166 Train Loss: 0.0001786, Val Loss: 0.0002102 +2025-02-19 13:01:46,167 Epoch 1905/2000 +2025-02-19 13:02:28,463 Current Learning Rate: 0.0053922955 +2025-02-19 13:02:28,467 Train Loss: 0.0002122, Val Loss: 0.0002331 +2025-02-19 13:02:28,468 Epoch 1906/2000 +2025-02-19 13:03:10,775 Current Learning Rate: 0.0054705416 +2025-02-19 13:03:10,777 Train Loss: 0.0001955, Val Loss: 0.0002446 +2025-02-19 13:03:10,780 Epoch 1907/2000 +2025-02-19 13:03:53,641 Current Learning Rate: 0.0055486716 +2025-02-19 13:03:53,643 Train Loss: 0.0001329, Val Loss: 0.0002061 +2025-02-19 13:03:53,644 Epoch 1908/2000 +2025-02-19 13:04:36,065 Current Learning Rate: 0.0056266662 +2025-02-19 13:04:36,067 Train Loss: 0.0001447, Val Loss: 0.0002013 +2025-02-19 13:04:36,068 Epoch 1909/2000 +2025-02-19 13:05:18,580 Current Learning Rate: 0.0057045062 +2025-02-19 13:05:18,582 Train Loss: 0.0001538, Val Loss: 0.0002521 +2025-02-19 13:05:18,583 Epoch 1910/2000 +2025-02-19 13:06:00,780 Current Learning Rate: 0.0057821723 +2025-02-19 13:06:00,782 Train Loss: 0.0001372, Val Loss: 0.0002132 +2025-02-19 13:06:00,784 Epoch 1911/2000 +2025-02-19 13:06:42,989 Current Learning Rate: 0.0058596455 +2025-02-19 13:06:42,991 Train Loss: 0.0002274, Val Loss: 0.0002715 +2025-02-19 13:06:42,992 Epoch 1912/2000 +2025-02-19 13:07:25,288 Current Learning Rate: 0.0059369066 +2025-02-19 13:07:25,290 Train Loss: 0.0002275, Val Loss: 0.0002161 +2025-02-19 13:07:25,291 Epoch 1913/2000 +2025-02-19 13:08:08,528 Current Learning Rate: 0.0060139365 +2025-02-19 13:08:08,530 Train Loss: 0.0001516, Val Loss: 0.0002050 +2025-02-19 13:08:08,531 Epoch 1914/2000 +2025-02-19 13:08:50,689 Current Learning Rate: 0.0060907162 +2025-02-19 13:08:50,690 Train Loss: 0.0001514, Val Loss: 0.0002046 +2025-02-19 13:08:50,691 Epoch 1915/2000 +2025-02-19 13:09:33,697 Current Learning Rate: 0.0061672268 +2025-02-19 13:09:33,698 Train Loss: 0.0001509, Val Loss: 0.0002016 +2025-02-19 13:09:33,699 Epoch 1916/2000 +2025-02-19 13:10:16,147 Current Learning Rate: 0.0062434494 +2025-02-19 13:10:16,149 Train Loss: 0.0001351, Val Loss: 0.0002077 +2025-02-19 13:10:16,150 Epoch 1917/2000 +2025-02-19 13:10:59,057 Current Learning Rate: 0.0063193652 +2025-02-19 13:10:59,059 Train Loss: 0.0001638, Val Loss: 0.0002379 +2025-02-19 13:10:59,060 Epoch 1918/2000 +2025-02-19 13:11:41,942 Current Learning Rate: 0.0063949555 +2025-02-19 13:11:41,944 Train Loss: 0.0001867, Val Loss: 0.0002167 +2025-02-19 13:11:41,981 Epoch 1919/2000 +2025-02-19 13:12:24,970 Current Learning Rate: 0.0064702016 +2025-02-19 13:12:24,972 Train Loss: 0.0001658, Val Loss: 0.0002154 +2025-02-19 13:12:24,973 Epoch 1920/2000 +2025-02-19 13:13:07,878 Current Learning Rate: 0.0065450850 +2025-02-19 13:13:07,880 Train Loss: 0.0002318, Val Loss: 0.0002632 +2025-02-19 13:13:07,881 Epoch 1921/2000 +2025-02-19 13:13:50,872 Current Learning Rate: 0.0066195871 +2025-02-19 13:13:50,874 Train Loss: 0.0001913, Val Loss: 0.0002284 +2025-02-19 13:13:50,875 Epoch 1922/2000 +2025-02-19 13:14:33,804 Current Learning Rate: 0.0066936896 +2025-02-19 13:14:33,806 Train Loss: 0.0001459, Val Loss: 0.0002138 +2025-02-19 13:14:33,807 Epoch 1923/2000 +2025-02-19 13:15:16,813 Current Learning Rate: 0.0067673742 +2025-02-19 13:15:16,815 Train Loss: 0.0002812, Val Loss: 0.0002483 +2025-02-19 13:15:16,816 Epoch 1924/2000 +2025-02-19 13:15:59,207 Current Learning Rate: 0.0068406228 +2025-02-19 13:15:59,209 Train Loss: 0.0002145, Val Loss: 0.0002288 +2025-02-19 13:15:59,210 Epoch 1925/2000 +2025-02-19 13:16:41,938 Current Learning Rate: 0.0069134172 +2025-02-19 13:16:41,940 Train Loss: 0.0001984, Val Loss: 0.0002528 +2025-02-19 13:16:41,941 Epoch 1926/2000 +2025-02-19 13:17:25,177 Current Learning Rate: 0.0069857395 +2025-02-19 13:17:25,198 Train Loss: 0.0002265, Val Loss: 0.0002222 +2025-02-19 13:17:25,199 Epoch 1927/2000 +2025-02-19 13:18:07,944 Current Learning Rate: 0.0070575718 +2025-02-19 13:18:07,945 Train Loss: 0.0001428, Val Loss: 0.0002000 +2025-02-19 13:18:07,947 Epoch 1928/2000 +2025-02-19 13:18:50,744 Current Learning Rate: 0.0071288965 +2025-02-19 13:18:50,746 Train Loss: 0.0001800, Val Loss: 0.0002026 +2025-02-19 13:18:50,747 Epoch 1929/2000 +2025-02-19 13:19:33,919 Current Learning Rate: 0.0071996958 +2025-02-19 13:19:33,921 Train Loss: 0.0001496, Val Loss: 0.0002037 +2025-02-19 13:19:33,921 Epoch 1930/2000 +2025-02-19 13:20:16,809 Current Learning Rate: 0.0072699525 +2025-02-19 13:20:16,810 Train Loss: 0.0001270, Val Loss: 0.0001976 +2025-02-19 13:20:16,811 Epoch 1931/2000 +2025-02-19 13:20:59,522 Current Learning Rate: 0.0073396491 +2025-02-19 13:20:59,524 Train Loss: 0.0001876, Val Loss: 0.0002109 +2025-02-19 13:20:59,525 Epoch 1932/2000 +2025-02-19 13:21:41,327 Current Learning Rate: 0.0074087684 +2025-02-19 13:21:41,329 Train Loss: 0.0002204, Val Loss: 0.0002758 +2025-02-19 13:21:41,330 Epoch 1933/2000 +2025-02-19 13:22:24,108 Current Learning Rate: 0.0074772933 +2025-02-19 13:22:24,110 Train Loss: 0.0002512, Val Loss: 0.0003013 +2025-02-19 13:22:24,111 Epoch 1934/2000 +2025-02-19 13:23:06,154 Current Learning Rate: 0.0075452071 +2025-02-19 13:23:06,156 Train Loss: 0.0001801, Val Loss: 0.0002345 +2025-02-19 13:23:06,157 Epoch 1935/2000 +2025-02-19 13:23:49,081 Current Learning Rate: 0.0076124928 +2025-02-19 13:23:49,083 Train Loss: 0.0001786, Val Loss: 0.0002157 +2025-02-19 13:23:49,084 Epoch 1936/2000 +2025-02-19 13:24:32,073 Current Learning Rate: 0.0076791340 +2025-02-19 13:24:32,076 Train Loss: 0.0001962, Val Loss: 0.0002676 +2025-02-19 13:24:32,079 Epoch 1937/2000 +2025-02-19 13:25:14,555 Current Learning Rate: 0.0077451141 +2025-02-19 13:25:14,556 Train Loss: 0.0001862, Val Loss: 0.0003078 +2025-02-19 13:25:14,557 Epoch 1938/2000 +2025-02-19 13:25:57,598 Current Learning Rate: 0.0078104169 +2025-02-19 13:25:57,600 Train Loss: 0.0002606, Val Loss: 0.0002930 +2025-02-19 13:25:57,624 Epoch 1939/2000 +2025-02-19 13:26:40,458 Current Learning Rate: 0.0078750263 +2025-02-19 13:26:40,459 Train Loss: 0.0002308, Val Loss: 0.0002649 +2025-02-19 13:26:40,460 Epoch 1940/2000 +2025-02-19 13:27:23,032 Current Learning Rate: 0.0079389263 +2025-02-19 13:27:23,034 Train Loss: 0.0002622, Val Loss: 0.0003057 +2025-02-19 13:27:23,034 Epoch 1941/2000 +2025-02-19 13:28:05,654 Current Learning Rate: 0.0080021011 +2025-02-19 13:28:05,674 Train Loss: 0.0001882, Val Loss: 0.0002389 +2025-02-19 13:28:05,684 Epoch 1942/2000 +2025-02-19 13:28:48,651 Current Learning Rate: 0.0080645353 +2025-02-19 13:28:48,653 Train Loss: 0.0002612, Val Loss: 0.0002628 +2025-02-19 13:28:48,654 Epoch 1943/2000 +2025-02-19 13:29:31,359 Current Learning Rate: 0.0081262133 +2025-02-19 13:29:31,365 Train Loss: 0.0002431, Val Loss: 0.0002704 +2025-02-19 13:29:31,380 Epoch 1944/2000 +2025-02-19 13:30:13,373 Current Learning Rate: 0.0081871199 +2025-02-19 13:30:13,375 Train Loss: 0.0002086, Val Loss: 0.0002460 +2025-02-19 13:30:13,376 Epoch 1945/2000 +2025-02-19 13:30:55,763 Current Learning Rate: 0.0082472402 +2025-02-19 13:30:55,764 Train Loss: 0.0001597, Val Loss: 0.0002605 +2025-02-19 13:30:55,766 Epoch 1946/2000 +2025-02-19 13:31:38,097 Current Learning Rate: 0.0083065593 +2025-02-19 13:31:38,100 Train Loss: 0.0002475, Val Loss: 0.0002503 +2025-02-19 13:31:38,101 Epoch 1947/2000 +2025-02-19 13:32:19,906 Current Learning Rate: 0.0083650626 +2025-02-19 13:32:19,908 Train Loss: 0.0001475, Val Loss: 0.0002258 +2025-02-19 13:32:19,909 Epoch 1948/2000 +2025-02-19 13:33:03,173 Current Learning Rate: 0.0084227355 +2025-02-19 13:33:03,175 Train Loss: 0.0002425, Val Loss: 0.0002687 +2025-02-19 13:33:03,176 Epoch 1949/2000 +2025-02-19 13:33:44,897 Current Learning Rate: 0.0084795640 +2025-02-19 13:33:44,899 Train Loss: 0.0004316, Val Loss: 0.0004006 +2025-02-19 13:33:44,900 Epoch 1950/2000 +2025-02-19 13:34:27,879 Current Learning Rate: 0.0085355339 +2025-02-19 13:34:27,918 Train Loss: 0.0002529, Val Loss: 0.0002580 +2025-02-19 13:34:27,953 Epoch 1951/2000 +2025-02-19 13:35:10,671 Current Learning Rate: 0.0085906315 +2025-02-19 13:35:10,673 Train Loss: 0.0002903, Val Loss: 0.0002955 +2025-02-19 13:35:10,674 Epoch 1952/2000 +2025-02-19 13:35:53,550 Current Learning Rate: 0.0086448431 +2025-02-19 13:35:53,553 Train Loss: 0.0003255, Val Loss: 0.0003264 +2025-02-19 13:35:53,554 Epoch 1953/2000 +2025-02-19 13:36:35,914 Current Learning Rate: 0.0086981555 +2025-02-19 13:36:35,916 Train Loss: 0.0002436, Val Loss: 0.0002938 +2025-02-19 13:36:35,917 Epoch 1954/2000 +2025-02-19 13:37:18,312 Current Learning Rate: 0.0087505553 +2025-02-19 13:37:18,314 Train Loss: 0.0002444, Val Loss: 0.0002661 +2025-02-19 13:37:18,315 Epoch 1955/2000 +2025-02-19 13:38:00,641 Current Learning Rate: 0.0088020298 +2025-02-19 13:38:00,643 Train Loss: 0.0001847, Val Loss: 0.0002517 +2025-02-19 13:38:00,644 Epoch 1956/2000 +2025-02-19 13:38:42,456 Current Learning Rate: 0.0088525662 +2025-02-19 13:38:42,469 Train Loss: 0.0003018, Val Loss: 0.0002881 +2025-02-19 13:38:42,475 Epoch 1957/2000 +2025-02-19 13:39:24,647 Current Learning Rate: 0.0089021520 +2025-02-19 13:39:24,650 Train Loss: 0.0002253, Val Loss: 0.0002416 +2025-02-19 13:39:24,651 Epoch 1958/2000 +2025-02-19 13:40:08,015 Current Learning Rate: 0.0089507751 +2025-02-19 13:40:08,061 Train Loss: 0.0001915, Val Loss: 0.0002403 +2025-02-19 13:40:08,063 Epoch 1959/2000 +2025-02-19 13:40:50,904 Current Learning Rate: 0.0089984233 +2025-02-19 13:40:50,906 Train Loss: 0.0001649, Val Loss: 0.0002827 +2025-02-19 13:40:50,907 Epoch 1960/2000 +2025-02-19 13:41:33,015 Current Learning Rate: 0.0090450850 +2025-02-19 13:41:33,017 Train Loss: 0.0002614, Val Loss: 0.0002810 +2025-02-19 13:41:33,019 Epoch 1961/2000 +2025-02-19 13:42:15,236 Current Learning Rate: 0.0090907486 +2025-02-19 13:42:15,238 Train Loss: 0.0001902, Val Loss: 0.0002588 +2025-02-19 13:42:15,239 Epoch 1962/2000 +2025-02-19 13:42:58,812 Current Learning Rate: 0.0091354029 +2025-02-19 13:42:58,814 Train Loss: 0.0002041, Val Loss: 0.0002960 +2025-02-19 13:42:58,815 Epoch 1963/2000 +2025-02-19 13:43:42,064 Current Learning Rate: 0.0091790368 +2025-02-19 13:43:42,066 Train Loss: 0.0002398, Val Loss: 0.0002979 +2025-02-19 13:43:42,067 Epoch 1964/2000 +2025-02-19 13:44:25,348 Current Learning Rate: 0.0092216396 +2025-02-19 13:44:25,350 Train Loss: 0.0002669, Val Loss: 0.0003629 +2025-02-19 13:44:25,351 Epoch 1965/2000 +2025-02-19 13:45:08,370 Current Learning Rate: 0.0092632008 +2025-02-19 13:45:08,372 Train Loss: 0.0002395, Val Loss: 0.0003153 +2025-02-19 13:45:08,373 Epoch 1966/2000 +2025-02-19 13:45:51,845 Current Learning Rate: 0.0093037101 +2025-02-19 13:45:51,846 Train Loss: 0.0002947, Val Loss: 0.0002661 +2025-02-19 13:45:51,847 Epoch 1967/2000 +2025-02-19 13:46:34,606 Current Learning Rate: 0.0093431576 +2025-02-19 13:46:34,608 Train Loss: 0.0002339, Val Loss: 0.0002777 +2025-02-19 13:46:34,610 Epoch 1968/2000 +2025-02-19 13:47:17,360 Current Learning Rate: 0.0093815334 +2025-02-19 13:47:17,363 Train Loss: 0.0002310, Val Loss: 0.0002872 +2025-02-19 13:47:17,363 Epoch 1969/2000 +2025-02-19 13:48:00,319 Current Learning Rate: 0.0094188282 +2025-02-19 13:48:00,322 Train Loss: 0.0003090, Val Loss: 0.0003310 +2025-02-19 13:48:00,323 Epoch 1970/2000 +2025-02-19 13:48:44,101 Current Learning Rate: 0.0094550326 +2025-02-19 13:48:44,103 Train Loss: 0.0002036, Val Loss: 0.0002684 +2025-02-19 13:48:44,104 Epoch 1971/2000 +2025-02-19 13:49:27,175 Current Learning Rate: 0.0094901379 +2025-02-19 13:49:27,176 Train Loss: 0.0001900, Val Loss: 0.0002480 +2025-02-19 13:49:27,178 Epoch 1972/2000 +2025-02-19 13:50:10,158 Current Learning Rate: 0.0095241353 +2025-02-19 13:50:10,160 Train Loss: 0.0002441, Val Loss: 0.0002496 +2025-02-19 13:50:10,161 Epoch 1973/2000 +2025-02-19 13:50:53,102 Current Learning Rate: 0.0095570164 +2025-02-19 13:50:53,106 Train Loss: 0.0002433, Val Loss: 0.0002842 +2025-02-19 13:50:53,107 Epoch 1974/2000 +2025-02-19 13:51:35,785 Current Learning Rate: 0.0095887731 +2025-02-19 13:51:35,786 Train Loss: 0.0002307, Val Loss: 0.0002594 +2025-02-19 13:51:35,788 Epoch 1975/2000 +2025-02-19 13:52:19,247 Current Learning Rate: 0.0096193977 +2025-02-19 13:52:19,249 Train Loss: 0.0025567, Val Loss: 0.0024394 +2025-02-19 13:52:19,250 Epoch 1976/2000 +2025-02-19 13:53:02,547 Current Learning Rate: 0.0096488824 +2025-02-19 13:53:02,550 Train Loss: 0.0015209, Val Loss: 0.0006062 +2025-02-19 13:53:02,552 Epoch 1977/2000 +2025-02-19 13:53:46,008 Current Learning Rate: 0.0096772202 +2025-02-19 13:53:46,019 Train Loss: 0.0006244, Val Loss: 0.0005558 +2025-02-19 13:53:46,032 Epoch 1978/2000 +2025-02-19 13:54:28,691 Current Learning Rate: 0.0097044038 +2025-02-19 13:54:28,693 Train Loss: 0.0006278, Val Loss: 0.0008046 +2025-02-19 13:54:28,694 Epoch 1979/2000 +2025-02-19 13:55:11,953 Current Learning Rate: 0.0097304268 +2025-02-19 13:55:11,955 Train Loss: 0.0006101, Val Loss: 0.0028847 +2025-02-19 13:55:11,956 Epoch 1980/2000 +2025-02-19 13:55:55,610 Current Learning Rate: 0.0097552826 +2025-02-19 13:55:55,612 Train Loss: 0.0006145, Val Loss: 0.0009004 +2025-02-19 13:55:55,613 Epoch 1981/2000 +2025-02-19 13:56:39,183 Current Learning Rate: 0.0097789651 +2025-02-19 13:56:39,185 Train Loss: 0.0006718, Val Loss: 0.0005532 +2025-02-19 13:56:39,186 Epoch 1982/2000 +2025-02-19 13:57:22,287 Current Learning Rate: 0.0098014684 +2025-02-19 13:57:22,289 Train Loss: 0.0004988, Val Loss: 0.0005209 +2025-02-19 13:57:22,291 Epoch 1983/2000 +2025-02-19 13:58:05,719 Current Learning Rate: 0.0098227871 +2025-02-19 13:58:05,721 Train Loss: 0.0005861, Val Loss: 0.0004734 +2025-02-19 13:58:05,722 Epoch 1984/2000 +2025-02-19 13:58:48,271 Current Learning Rate: 0.0098429158 +2025-02-19 13:58:48,274 Train Loss: 0.0002894, Val Loss: 0.0003020 +2025-02-19 13:58:48,275 Epoch 1985/2000 +2025-02-19 13:59:31,616 Current Learning Rate: 0.0098618496 +2025-02-19 13:59:31,618 Train Loss: 0.0003156, Val Loss: 0.0002799 +2025-02-19 13:59:31,619 Epoch 1986/2000 +2025-02-19 14:00:14,060 Current Learning Rate: 0.0098795838 +2025-02-19 14:00:14,062 Train Loss: 0.0002834, Val Loss: 0.0002853 +2025-02-19 14:00:14,063 Epoch 1987/2000 +2025-02-19 14:00:57,253 Current Learning Rate: 0.0098961141 +2025-02-19 14:00:57,257 Train Loss: 0.0002277, Val Loss: 0.0002562 +2025-02-19 14:00:57,258 Epoch 1988/2000 +2025-02-19 14:01:39,942 Current Learning Rate: 0.0099114363 +2025-02-19 14:01:39,943 Train Loss: 0.0003088, Val Loss: 0.0002971 +2025-02-19 14:01:39,944 Epoch 1989/2000 +2025-02-19 14:02:22,476 Current Learning Rate: 0.0099255466 +2025-02-19 14:02:22,478 Train Loss: 0.0002795, Val Loss: 0.0003608 +2025-02-19 14:02:22,479 Epoch 1990/2000 +2025-02-19 14:03:05,339 Current Learning Rate: 0.0099384417 +2025-02-19 14:03:05,341 Train Loss: 0.0003420, Val Loss: 0.0002915 +2025-02-19 14:03:05,342 Epoch 1991/2000 +2025-02-19 14:03:49,211 Current Learning Rate: 0.0099501183 +2025-02-19 14:03:49,233 Train Loss: 0.0002168, Val Loss: 0.0002731 +2025-02-19 14:03:49,245 Epoch 1992/2000 +2025-02-19 14:04:31,933 Current Learning Rate: 0.0099605735 +2025-02-19 14:04:31,935 Train Loss: 0.0002011, Val Loss: 0.0002687 +2025-02-19 14:04:31,936 Epoch 1993/2000 +2025-02-19 14:05:15,740 Current Learning Rate: 0.0099698048 +2025-02-19 14:05:15,742 Train Loss: 0.0002759, Val Loss: 0.0002570 +2025-02-19 14:05:15,743 Epoch 1994/2000 +2025-02-19 14:05:58,944 Current Learning Rate: 0.0099778098 +2025-02-19 14:05:58,945 Train Loss: 0.0001932, Val Loss: 0.0002441 +2025-02-19 14:05:58,947 Epoch 1995/2000 +2025-02-19 14:06:41,994 Current Learning Rate: 0.0099845867 +2025-02-19 14:06:41,996 Train Loss: 0.0002042, Val Loss: 0.0002350 +2025-02-19 14:06:41,997 Epoch 1996/2000 +2025-02-19 14:07:25,018 Current Learning Rate: 0.0099901336 +2025-02-19 14:07:25,020 Train Loss: 0.0001661, Val Loss: 0.0002245 +2025-02-19 14:07:25,021 Epoch 1997/2000 +2025-02-19 14:08:08,176 Current Learning Rate: 0.0099944494 +2025-02-19 14:08:08,193 Train Loss: 0.0002242, Val Loss: 0.0002363 +2025-02-19 14:08:08,198 Epoch 1998/2000 +2025-02-19 14:08:50,579 Current Learning Rate: 0.0099975328 +2025-02-19 14:08:50,580 Train Loss: 0.0001969, Val Loss: 0.0002482 +2025-02-19 14:08:50,581 Epoch 1999/2000 +2025-02-19 14:09:32,908 Current Learning Rate: 0.0099993832 +2025-02-19 14:09:32,910 Train Loss: 0.0002183, Val Loss: 0.0002514 +2025-02-19 14:09:32,911 Epoch 2000/2000 +2025-02-19 14:10:15,485 Current Learning Rate: 0.0100000000 +2025-02-19 14:10:15,487 Train Loss: 0.0002445, Val Loss: 0.0002415 +2025-02-19 14:10:20,008 Testing completed and best model saved. diff --git a/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/Triton_model-checkpoint.py b/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/Triton_model-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..1606b32bdbc4e7117cf479f9e2ed7ba21c17eaa2 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/Triton_model-checkpoint.py @@ -0,0 +1,507 @@ +import torch +from torch import nn +import math +from timm.layers import DropPath, trunc_normal_ + +def stride_generator(N, reverse=False): + strides = [1, 2] * 10 + if reverse: + return list(reversed(strides[:N])) + else: + return strides[:N] + +class MLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(MLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class ConvMLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(ConvMLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super(Attention, self).__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = qkv[0], qkv[1], qkv[2] + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class ConvBlock(nn.Module): + def __init__( + self, + dim, + num_heads=4, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(ConvBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = nn.BatchNorm2d(dim) + self.conv1 = nn.Conv2d(dim, dim, 1) + self.conv2 = nn.Conv2d(dim, dim, 1) + self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ConvMLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = ( + m.kernel_size[0] * m.kernel_size[1] * m.out_channels + ) + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + x = x + self.pos_embed(x) + x = x + self.drop_path( + self.conv2(self.attn(self.conv1(self.norm1(x)))) + ) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + +class SelfAttentionBlock(nn.Module): + def __init__( + self, + dim, + num_heads, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + init_value=1e-6, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(SelfAttentionBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop + ) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma_1', 'gamma_2'} + + def forward(self, x): + x = x + self.pos_embed(x) + B, N, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x))) + x = x.transpose(1, 2).reshape(B, N, H, W) + return x + +def UniformerSubBlock( + embed_dims, + mlp_ratio=4., + drop=0., + drop_path=0., + init_value=1e-6, + block_type='Conv' +): + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + return ConvBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + else: + return SelfAttentionBlock( + dim=embed_dims, + num_heads=8, + mlp_ratio=mlp_ratio, + qkv_bias=True, + drop=drop, + drop_path=drop_path, + init_value=init_value + ) + +class SpatioTemporalEvolutionBlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + input_resolution=None, + mlp_ratio=8., + drop=0.0, + drop_path=0.0, + layer_i=0 + ): + super(SpatioTemporalEvolutionBlock, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv' + self.block = UniformerSubBlock( + in_channels, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + block_type=block_type + ) + + if in_channels != out_channels: + self.reduction = nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0 + ) + + def forward(self, x): + z = self.block(x) + if self.in_channels != self.out_channels: + z = self.reduction(z) + return z + +class SpatioTemporalEvolution(nn.Module): + def __init__( + self, + channel_in, + channel_hid, + N2, + input_resolution=None, + mlp_ratio=4., + drop=0.0, + drop_path=0.1 + ): + super(SpatioTemporalEvolution, self).__init__() + assert N2 >= 2 and mlp_ratio > 1 + self.N2 = N2 + dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)] + + evolution_layers = [SpatioTemporalEvolutionBlock( + channel_in, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[0], + layer_i=0 + )] + + for i in range(1, N2 - 1): + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[i], + layer_i=i + )) + + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_in, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + layer_i=N2 - 1 + )) + self.enc = nn.Sequential(*evolution_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T * C, H, W) + z = x + for i in range(self.N2): + z = self.enc[i](z) + y = z.reshape(B, T, C, H, W) + return y + +class BasicConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + transpose=False, + act_norm=False + ): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if not transpose: + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + else: + self.conv = nn.ConvTranspose2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + output_padding=stride // 2 + ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + +class ConvDynamicsLayer(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvDynamicsLayer, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d( + C_in, + C_out, + kernel_size=3, + stride=stride, + padding=1, + transpose=transpose, + act_norm=act_norm + ) + + def forward(self, x): + y = self.conv(x) + return y + +class MultiGroupConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + groups, + act_norm=False + ): + super(MultiGroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups + ) + self.norm = nn.GroupNorm(groups, out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class AtmosphericEncoder(nn.Module): + def __init__(self, C_in, spatial_hidden_dim, num_spatial_layers): + super(AtmosphericEncoder, self).__init__() + strides = stride_generator(num_spatial_layers) + self.enc = nn.Sequential( + ConvDynamicsLayer(C_in, spatial_hidden_dim, stride=strides[0]), + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s) for s in strides[1:]] + ) + + def forward(self, x): + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1, len(self.enc)): + latent = self.enc[i](latent) + return latent, enc1 + +class AtmosphericDecoder(nn.Module): + def __init__(self, spatial_hidden_dim, C_out, num_spatial_layers): + super(AtmosphericDecoder, self).__init__() + strides = stride_generator(num_spatial_layers, reverse=True) + self.dec = nn.Sequential( + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s, transpose=True) for s in strides[:-1]], + ConvDynamicsLayer(2 * spatial_hidden_dim, spatial_hidden_dim, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(spatial_hidden_dim, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0, len(self.dec) - 1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + +class Triton(nn.Module): + def __init__( + self, + shape_in, + spatial_hidden_dim=64, + output_channels=4, + temporal_hidden_dim=128, + num_spatial_layers=4, + num_temporal_layers=8, + in_time_seq_length=10, + out_time_seq_length=10 + ): + super(Triton, self).__init__() + T, C, H, W = shape_in + self.H1 = int(H / 2 ** (num_spatial_layers / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (num_spatial_layers / 2)) + self.W1 = int(W / 2 ** (num_spatial_layers / 2)) + self.output_dim = output_channels + self.input_time_seq_length = in_time_seq_length + self.output_time_seq_length = out_time_seq_length + + self.atmospheric_encoder = AtmosphericEncoder(C, spatial_hidden_dim, num_spatial_layers) + self.temporal_evolution = SpatioTemporalEvolution( + T * spatial_hidden_dim, + temporal_hidden_dim, + num_temporal_layers, + input_resolution=[self.H1, self.W1], + mlp_ratio=4.0, + drop_path=0.1 + ) + self.atmospheric_decoder = AtmosphericDecoder(spatial_hidden_dim, self.output_dim, num_spatial_layers) + + def forward(self, input_state): + """ + 1. Reshape the input state to match the encoder's input requirements. + 2. Extract features using the Atmospheric Encoder and obtain skip connections. + 3. Perform spatio-temporal evolution on the encoded features. + 4. Decode the evolved features to generate the final output. + """ + batch_size, temporal_length, channels, height, width = input_state.shape + reshaped_input = input_state.view(batch_size * temporal_length, channels, height, width) + + encoded_features, skip_connection = self.atmospheric_encoder(reshaped_input) + _, encoded_channels, encoded_height, encoded_width = encoded_features.shape + encoded_features = encoded_features.view(batch_size, temporal_length, encoded_channels, encoded_height, encoded_width) + + temporal_bias = encoded_features + temporal_hidden = self.temporal_evolution(temporal_bias) + reshaped_hidden = temporal_hidden.view(batch_size * temporal_length, encoded_channels, encoded_height, encoded_width) + + decoded_output = self.atmospheric_decoder(reshaped_hidden, skip_connection) + final_output = decoded_output.view(batch_size, temporal_length, -1, height, width) + + return final_output + + +def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + +if __name__ == '__main__': + inputs = torch.randn(1, 10, 2, 128, 128) + model = Triton( + shape_in=(10, 2, 128, 128), + spatial_hidden_dim=32, + output_channels=1, + temporal_hidden_dim=64, + num_spatial_layers=4, + num_temporal_layers=8) + output = model(inputs) + print(output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/Triton_model_step_finetune-checkpoint.py b/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/Triton_model_step_finetune-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..714e71977bbd7044d94f3bb7a26ef3f837e98f7b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/Triton_model_step_finetune-checkpoint.py @@ -0,0 +1,534 @@ +import torch +from torch import nn +import math +from timm.layers import DropPath, trunc_normal_ + +def stride_generator(N, reverse=False): + strides = [1, 2] * 10 + if reverse: + return list(reversed(strides[:N])) + else: + return strides[:N] + +class MLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(MLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class ConvMLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(ConvMLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super(Attention, self).__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = qkv[0], qkv[1], qkv[2] + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class ConvBlock(nn.Module): + def __init__( + self, + dim, + num_heads=4, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(ConvBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = nn.BatchNorm2d(dim) + self.conv1 = nn.Conv2d(dim, dim, 1) + self.conv2 = nn.Conv2d(dim, dim, 1) + self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ConvMLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = ( + m.kernel_size[0] * m.kernel_size[1] * m.out_channels + ) + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + x = x + self.pos_embed(x) + x = x + self.drop_path( + self.conv2(self.attn(self.conv1(self.norm1(x)))) + ) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + +class SelfAttentionBlock(nn.Module): + def __init__( + self, + dim, + num_heads, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + init_value=1e-6, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(SelfAttentionBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop + ) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma_1', 'gamma_2'} + + def forward(self, x): + x = x + self.pos_embed(x) + B, N, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x))) + x = x.transpose(1, 2).reshape(B, N, H, W) + return x + +def UniformerSubBlock( + embed_dims, + mlp_ratio=4., + drop=0., + drop_path=0., + init_value=1e-6, + block_type='Conv' +): + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + # return ConvBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + return SelfAttentionBlock( + dim=embed_dims, + num_heads=8, + mlp_ratio=mlp_ratio, + qkv_bias=True, + drop=drop, + drop_path=drop_path, + init_value=init_value + ) + else: + return SelfAttentionBlock( + dim=embed_dims, + num_heads=8, + mlp_ratio=mlp_ratio, + qkv_bias=True, + drop=drop, + drop_path=drop_path, + init_value=init_value + ) + +class SpatioTemporalEvolutionBlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + input_resolution=None, + mlp_ratio=8., + drop=0.0, + drop_path=0.0, + layer_i=0 + ): + super(SpatioTemporalEvolutionBlock, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv' + self.block = UniformerSubBlock( + in_channels, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + block_type=block_type + ) + + if in_channels != out_channels: + self.reduction = nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0 + ) + + def forward(self, x): + z = self.block(x) + if self.in_channels != self.out_channels: + z = self.reduction(z) + return z + +class SpatioTemporalEvolution(nn.Module): + def __init__( + self, + channel_in, + channel_hid, + N2, + input_resolution=None, + mlp_ratio=4., + drop=0.0, + drop_path=0.1 + ): + super(SpatioTemporalEvolution, self).__init__() + assert N2 >= 2 and mlp_ratio > 1 + self.N2 = N2 + dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)] + + evolution_layers = [SpatioTemporalEvolutionBlock( + channel_in, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[0], + layer_i=0 + )] + + for i in range(1, N2 - 1): + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[i], + layer_i=i + )) + + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_in, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + layer_i=N2 - 1 + )) + self.enc = nn.Sequential(*evolution_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T * C, H, W) + z = x + for i in range(self.N2): + z = self.enc[i](z) + y = z.reshape(B, T, C, H, W) + return y + +class BasicConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + transpose=False, + act_norm=False + ): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if not transpose: + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + else: + self.conv = nn.ConvTranspose2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + output_padding=stride // 2 + ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + +class ConvDynamicsLayer(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvDynamicsLayer, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d( + C_in, + C_out, + kernel_size=3, + stride=stride, + padding=1, + transpose=transpose, + act_norm=act_norm + ) + + def forward(self, x): + y = self.conv(x) + return y + +class MultiGroupConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + groups, + act_norm=False + ): + super(MultiGroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups + ) + self.norm = nn.GroupNorm(groups, out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class AtmosphericEncoder(nn.Module): + def __init__(self, C_in, spatial_hidden_dim, num_spatial_layers): + super(AtmosphericEncoder, self).__init__() + strides = stride_generator(num_spatial_layers) + self.enc = nn.Sequential( + ConvDynamicsLayer(C_in, spatial_hidden_dim, stride=strides[0]), + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s) for s in strides[1:]] + ) + + def forward(self, x): + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1, len(self.enc)): + latent = self.enc[i](latent) + return latent, enc1 + +class AtmosphericDecoder(nn.Module): + def __init__(self, spatial_hidden_dim, C_out, num_spatial_layers): + super(AtmosphericDecoder, self).__init__() + strides = stride_generator(num_spatial_layers, reverse=True) + self.dec = nn.Sequential( + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s, transpose=True) for s in strides[:-1]], + ConvDynamicsLayer(2 * spatial_hidden_dim, spatial_hidden_dim, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(spatial_hidden_dim, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0, len(self.dec) - 1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + +class Triton_finetune(nn.Module): + def __init__( + self, + shape_in, + spatial_hidden_dim=64, + output_channels=4, + temporal_hidden_dim=128, + num_spatial_layers=4, + num_temporal_layers=8, + in_time_seq_length=10, + out_time_seq_length=40 + ): + super(Triton_finetune, self).__init__() + T, C, H, W = shape_in + self.H1 = int(H / 2 ** (num_spatial_layers / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (num_spatial_layers / 2)) + self.W1 = int(W / 2 ** (num_spatial_layers / 2)) + self.output_dim = output_channels + self.input_time_seq_length = in_time_seq_length + self.output_time_seq_length = out_time_seq_length + + self.atmospheric_encoder = AtmosphericEncoder(C, spatial_hidden_dim, num_spatial_layers) + self.temporal_evolution = SpatioTemporalEvolution( + T * spatial_hidden_dim, + temporal_hidden_dim, + num_temporal_layers, + input_resolution=[self.H1, self.W1], + mlp_ratio=4.0, + drop_path=0.1 + ) + self.atmospheric_decoder = AtmosphericDecoder(spatial_hidden_dim, self.output_dim, num_spatial_layers) + + def _single_forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + embed, skip = self.atmospheric_encoder(x) + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + temporal_bias = z + temporal_hidden = self.temporal_evolution(temporal_bias) + hid = temporal_hidden.view(B*T, C_, H_, W_) + + Y = self.atmospheric_decoder(hid, skip) + return Y.reshape(B, T, -1, H, W) + + def forward(self, input_sequence): + + base_pred = self._single_forward(input_sequence) + + if self.output_time_seq_length == self.input_time_seq_length: + return base_pred + if self.output_time_seq_length < self.input_time_seq_length: + return base_pred[:, :self.output_time_seq_length] + + predictions = [base_pred] + d = self.output_time_seq_length // self.input_time_seq_length + m = self.output_time_seq_length % self.input_time_seq_length + + for _ in range(1, d): + new_pred = self._single_forward(predictions[-1]) + predictions.append(new_pred) + + if m > 0: + final_pred = self._single_forward(predictions[-1])[:, :m] + predictions.append(final_pred) + + return torch.cat(predictions, dim=1) + + +def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + +if __name__ == '__main__': + inputs = torch.randn(1, 10, 2, 256, 256) + model = Triton_finetune( + shape_in=(10, 2, 256, 256), + spatial_hidden_dim=256, + output_channels=2, + temporal_hidden_dim=512, + num_spatial_layers=4, + num_temporal_layers=8, + in_time_seq_length=10, + out_time_seq_length=20) + output = model(inputs) + print(inputs.shape) + print(output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/nmo_fourier-checkpoint.py b/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/nmo_fourier-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..4978108f534ef23484a93674733d388e2300132f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/nmo_fourier-checkpoint.py @@ -0,0 +1,194 @@ +import torch +from torch import nn +from model.modules_api.modules import ConvSC, Inception +from model.modules_api.fouriermodules import * +from model.modules_api.evolution import Spatio_temporal_evolution +import math + +def stride_generator(N, reverse=False): + strides = [1, 2]*10 + if reverse: return list(reversed(strides[:N])) + else: return strides[:N] + +class Encoder(nn.Module): + def __init__(self,C_in, C_hid, N_S): + super(Encoder,self).__init__() + strides = stride_generator(N_S) + self.enc = nn.Sequential( + ConvSC(C_in, C_hid, stride=strides[0]), + *[ConvSC(C_hid, C_hid, stride=s) for s in strides[1:]] + ) + + def forward(self,x): + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1,len(self.enc)): + latent = self.enc[i](latent) + return latent,enc1 + + +class Decoder(nn.Module): + def __init__(self,C_hid, C_out, N_S): + super(Decoder,self).__init__() + strides = stride_generator(N_S, reverse=True) + self.dec = nn.Sequential( + *[ConvSC(C_hid, C_hid, stride=s, transpose=True) for s in strides[:-1]], + ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(C_hid, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0,len(self.dec)-1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + + + + +class Temporal_evo(nn.Module): + def __init__(self, channel_in, channel_hid, N_T, h, w, incep_ker=[3, 5, 7, 11], groups=8): + super(Temporal_evo, self).__init__() + + self.N_T = N_T + enc_layers = [Inception(channel_in, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] + for i in range(1, N_T - 1): + enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + + dec_layers = [Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] + for i in range(1, N_T - 1): + dec_layers.append( + Inception(2 * channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + dec_layers.append(Inception(2 * channel_hid, channel_hid // 2, channel_in, incep_ker=incep_ker, groups=groups)) + norm_layer = partial(nn.LayerNorm, eps=1e-6) + self.norm = norm_layer(channel_hid) + + self.enc = nn.Sequential(*enc_layers) + dpr = [x.item() for x in torch.linspace(0, 0, 12)] + self.h = h + self.w = w + self.blocks = nn.ModuleList([FourierNetBlock( + dim=channel_hid, + mlp_ratio=4, + drop=0., + drop_path=dpr[i], + act_layer=nn.GELU, + norm_layer=norm_layer, + h = self.h, + w = self.w) + for i in range(12) + ]) + self.dec = nn.Sequential(*dec_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + bias = x + x = x.reshape(B, T * C, H, W) + + # downsampling + skips = [] + z = x + for i in range(self.N_T): + z = self.enc[i](z) + if i < self.N_T - 1: + skips.append(z) + + # Spectral Domain + B, D, H, W = z.shape + N = H * W + z = z.permute(0, 2, 3, 1) + z = z.view(B, N, D) + for blk in self.blocks: + z = blk(z) + z = self.norm(z).permute(0, 2, 1) + + z = z.reshape(B, D, H, W) + + # upsampling + z = self.dec[0](z) + for i in range(1, self.N_T): + z = self.dec[i](torch.cat([z, skips[-i]], dim=1)) + + y = z.reshape(B, T, C, H, W) + return y + bias + + + +class NMOModel(nn.Module): + def __init__(self, shape_in, model_type='uniformer', hid_S=64, output_dim = 4, hid_T=128, N_S=4, N_T=8, incep_ker=[3,5,7,11], groups=4, + in_time_seq_length=10, out_time_seq_length=10): + super(NMOModel, self).__init__() + T, C, H, W = shape_in + self.H1 = int(H / 2 ** (N_S / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (N_S / 2)) + self.W1 = int(W / 2 ** (N_S / 2)) + self.out_dim = output_dim + self.in_time_seq_length = in_time_seq_length + self.out_time_seq_length = out_time_seq_length + self.enc = Encoder(C, hid_S, N_S) + self.hid = Temporal_evo(T*hid_S, hid_T, N_T, self.H1, self.W1, incep_ker, groups) # + self.temporal_evolution = Spatio_temporal_evolution(T*hid_S, hid_T, N_T, + input_resolution=[self.H1, self.W1], + model_type = model_type, + mlp_ratio=4., + drop_path=0.1) + + self.dec = Decoder(hid_S, self.out_dim, N_S) + + + def _forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + + embed, skip = self.enc(x) + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + bias = z + bias_hid = self.temporal_evolution(bias) + hid = bias_hid.reshape(B*T, C_, H_, W_) + + Y = self.dec(hid, skip) + Y = Y.reshape(B, T, -1, H, W) + return Y + + def forward(self, xx): + yy = self._forward(xx) + in_time_seq_length, out_time_seq_length = self.in_time_seq_length, self.out_time_seq_length + if out_time_seq_length == in_time_seq_length: + y_pred = yy + if out_time_seq_length < in_time_seq_length: + y_pred = yy[:, :out_time_seq_length] + elif out_time_seq_length > in_time_seq_length: + y_pred = [yy] + d = out_time_seq_length // in_time_seq_length + m = out_time_seq_length % in_time_seq_length + + for _ in range(1, d): + cur_seq = self._forward(y_pred[-1]) + y_pred.append(cur_seq) + + if m != 0: + cur_seq = self._forward(y_pred[-1]) + y_pred.append(cur_seq[:, :m]) + + y_pred = torch.cat(y_pred, dim=1) + + return y_pred + +def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + + +if __name__ == '__main__': + inputs = torch.randn(1, 10, 7, 600, 600) + model = NMOModel(shape_in=(10, 7, 600, 600), hid_S=64, output_dim = 1, hid_T=128) + # print(model) + output = model(inputs) + + print(output.shape) + + # Print the number of parameters + #print(f'The model has {count_parameters(model):,} trainable parameters.') diff --git a/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/test-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/test-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..4c5928088495442cb90fe4d174b979d10f383484 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/.ipynb_checkpoints/test-checkpoint.ipynb @@ -0,0 +1,549 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "b141979d-b258-443e-96b9-7331d009160a", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "import math\n", + "from timm.layers import DropPath, trunc_normal_\n", + "\n", + "def stride_generator(N, reverse=False):\n", + " strides = [1, 2] * 10\n", + " if reverse:\n", + " return list(reversed(strides[:N]))\n", + " else:\n", + " return strides[:N]\n", + " \n", + "class MLP(nn.Module):\n", + " def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):\n", + " super(MLP, self).__init__()\n", + " out_features = out_features or in_features\n", + " hidden_features = hidden_features or in_features\n", + " self.fc1 = nn.Linear(in_features, hidden_features)\n", + " self.act = act_layer()\n", + " self.fc2 = nn.Linear(hidden_features, out_features)\n", + " self.drop = nn.Dropout(drop)\n", + "\n", + " def forward(self, x):\n", + " x = self.fc1(x)\n", + " x = self.act(x)\n", + " x = self.drop(x)\n", + " x = self.fc2(x)\n", + " x = self.drop(x)\n", + " return x\n", + "\n", + "class ConvMLP(nn.Module):\n", + " def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):\n", + " super(ConvMLP, self).__init__()\n", + " out_features = out_features or in_features\n", + " hidden_features = hidden_features or in_features\n", + " self.fc1 = nn.Conv2d(in_features, hidden_features, 1)\n", + " self.act = act_layer()\n", + " self.fc2 = nn.Conv2d(hidden_features, out_features, 1)\n", + " self.drop = nn.Dropout(drop)\n", + "\n", + " def forward(self, x):\n", + " x = self.fc1(x)\n", + " x = self.act(x)\n", + " x = self.drop(x)\n", + " x = self.fc2(x)\n", + " x = self.drop(x)\n", + " return x\n", + "\n", + "class Attention(nn.Module):\n", + " def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.):\n", + " super(Attention, self).__init__()\n", + " self.num_heads = num_heads\n", + " head_dim = dim // num_heads\n", + " self.scale = qk_scale or head_dim ** -0.5\n", + "\n", + " self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)\n", + " self.attn_drop = nn.Dropout(attn_drop)\n", + " self.proj = nn.Linear(dim, dim)\n", + " self.proj_drop = nn.Dropout(proj_drop)\n", + "\n", + " def forward(self, x):\n", + " B, N, C = x.shape\n", + " qkv = (\n", + " self.qkv(x)\n", + " .reshape(B, N, 3, self.num_heads, C // self.num_heads)\n", + " .permute(2, 0, 3, 1, 4)\n", + " )\n", + " q, k, v = qkv[0], qkv[1], qkv[2]\n", + "\n", + " attn = (q @ k.transpose(-2, -1)) * self.scale\n", + " attn = attn.softmax(dim=-1)\n", + " attn = self.attn_drop(attn)\n", + "\n", + " x = (attn @ v).transpose(1, 2).reshape(B, N, C)\n", + " x = self.proj(x)\n", + " x = self.proj_drop(x)\n", + " return x\n", + "\n", + "class ConvBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " num_heads=4,\n", + " mlp_ratio=4.,\n", + " qkv_bias=False,\n", + " qk_scale=None,\n", + " drop=0.,\n", + " attn_drop=0.,\n", + " drop_path=0.,\n", + " act_layer=nn.GELU,\n", + " norm_layer=nn.LayerNorm\n", + " ):\n", + " super(ConvBlock, self).__init__()\n", + " self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim)\n", + " self.norm1 = nn.BatchNorm2d(dim)\n", + " self.conv1 = nn.Conv2d(dim, dim, 1)\n", + " self.conv2 = nn.Conv2d(dim, dim, 1)\n", + " self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim)\n", + " self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n", + " self.norm2 = nn.BatchNorm2d(dim)\n", + " mlp_hidden_dim = int(dim * mlp_ratio)\n", + " self.mlp = ConvMLP(\n", + " in_features=dim,\n", + " hidden_features=mlp_hidden_dim,\n", + " act_layer=act_layer,\n", + " drop=drop\n", + " )\n", + "\n", + " self.apply(self._init_weights)\n", + "\n", + " def _init_weights(self, m):\n", + " if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)):\n", + " nn.init.constant_(m.bias, 0)\n", + " nn.init.constant_(m.weight, 1.0)\n", + " elif isinstance(m, nn.Conv2d):\n", + " fan_out = (\n", + " m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n", + " )\n", + " fan_out //= m.groups\n", + " m.weight.data.normal_(0, math.sqrt(2.0 / fan_out))\n", + " if m.bias is not None:\n", + " m.bias.data.zero_()\n", + "\n", + " @torch.jit.ignore\n", + " def no_weight_decay(self):\n", + " return {}\n", + "\n", + " def forward(self, x):\n", + " x = x + self.pos_embed(x)\n", + " x = x + self.drop_path(\n", + " self.conv2(self.attn(self.conv1(self.norm1(x))))\n", + " )\n", + " x = x + self.drop_path(self.mlp(self.norm2(x)))\n", + " return x\n", + "\n", + "class SelfAttentionBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " num_heads,\n", + " mlp_ratio=4.,\n", + " qkv_bias=False,\n", + " qk_scale=None,\n", + " drop=0.,\n", + " attn_drop=0.,\n", + " drop_path=0.,\n", + " init_value=1e-6,\n", + " act_layer=nn.GELU,\n", + " norm_layer=nn.LayerNorm\n", + " ):\n", + " super(SelfAttentionBlock, self).__init__()\n", + " self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim)\n", + " self.norm1 = norm_layer(dim)\n", + " self.attn = Attention(\n", + " dim,\n", + " num_heads=num_heads,\n", + " qkv_bias=qkv_bias,\n", + " qk_scale=qk_scale,\n", + " attn_drop=attn_drop,\n", + " proj_drop=drop\n", + " )\n", + " self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n", + " self.norm2 = norm_layer(dim)\n", + " mlp_hidden_dim = int(dim * mlp_ratio)\n", + " self.mlp = MLP(\n", + " in_features=dim,\n", + " hidden_features=mlp_hidden_dim,\n", + " act_layer=act_layer,\n", + " drop=drop\n", + " )\n", + " self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True)\n", + " self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True)\n", + "\n", + " self.apply(self._init_weights)\n", + "\n", + " def _init_weights(self, m):\n", + " if isinstance(m, nn.Linear):\n", + " trunc_normal_(m.weight, std=.02)\n", + " if isinstance(m, nn.Linear) and m.bias is not None:\n", + " nn.init.constant_(m.bias, 0)\n", + " elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)):\n", + " nn.init.constant_(m.bias, 0)\n", + " nn.init.constant_(m.weight, 1.0)\n", + "\n", + " @torch.jit.ignore\n", + " def no_weight_decay(self):\n", + " return {'gamma_1', 'gamma_2'}\n", + "\n", + " def forward(self, x):\n", + " x = x + self.pos_embed(x)\n", + " B, N, H, W = x.shape\n", + " x = x.flatten(2).transpose(1, 2)\n", + " x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x)))\n", + " x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x)))\n", + " x = x.transpose(1, 2).reshape(B, N, H, W)\n", + " return x\n", + "\n", + "def UniformerSubBlock(\n", + " embed_dims,\n", + " mlp_ratio=4.,\n", + " drop=0.,\n", + " drop_path=0.,\n", + " init_value=1e-6,\n", + " block_type='Conv'\n", + "):\n", + " assert block_type in ['Conv', 'MHSA']\n", + " if block_type == 'Conv':\n", + " return ConvBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path)\n", + " else:\n", + " return SelfAttentionBlock(\n", + " dim=embed_dims,\n", + " num_heads=8,\n", + " mlp_ratio=mlp_ratio,\n", + " qkv_bias=True,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " init_value=init_value\n", + " )\n", + "\n", + "class SpatioTemporalEvolutionBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " input_resolution=None,\n", + " mlp_ratio=8.,\n", + " drop=0.0,\n", + " drop_path=0.0,\n", + " layer_i=0\n", + " ):\n", + " super(SpatioTemporalEvolutionBlock, self).__init__()\n", + " self.in_channels = in_channels\n", + " self.out_channels = out_channels\n", + " block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv'\n", + " self.block = UniformerSubBlock(\n", + " in_channels,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " block_type=block_type\n", + " )\n", + "\n", + " if in_channels != out_channels:\n", + " self.reduction = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=1,\n", + " stride=1,\n", + " padding=0\n", + " )\n", + "\n", + " def forward(self, x):\n", + " z = self.block(x)\n", + " if self.in_channels != self.out_channels:\n", + " z = self.reduction(z)\n", + " return z\n", + "\n", + "class SpatioTemporalEvolution(nn.Module):\n", + " def __init__(\n", + " self,\n", + " channel_in,\n", + " channel_hid,\n", + " N2,\n", + " input_resolution=None,\n", + " mlp_ratio=4.,\n", + " drop=0.0,\n", + " drop_path=0.1\n", + " ):\n", + " super(SpatioTemporalEvolution, self).__init__()\n", + " assert N2 >= 2 and mlp_ratio > 1\n", + " self.N2 = N2\n", + " dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)]\n", + "\n", + " evolution_layers = [SpatioTemporalEvolutionBlock(\n", + " channel_in,\n", + " channel_hid,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=dpr[0],\n", + " layer_i=0\n", + " )]\n", + "\n", + " for i in range(1, N2 - 1):\n", + " evolution_layers.append(SpatioTemporalEvolutionBlock(\n", + " channel_hid,\n", + " channel_hid,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=dpr[i],\n", + " layer_i=i\n", + " ))\n", + "\n", + " evolution_layers.append(SpatioTemporalEvolutionBlock(\n", + " channel_hid,\n", + " channel_in,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " layer_i=N2 - 1\n", + " ))\n", + " self.enc = nn.Sequential(*evolution_layers)\n", + "\n", + " def forward(self, x):\n", + " B, T, C, H, W = x.shape\n", + " x = x.reshape(B, T * C, H, W)\n", + " z = x\n", + " for i in range(self.N2):\n", + " z = self.enc[i](z)\n", + " y = z.reshape(B, T, C, H, W)\n", + " return y\n", + "\n", + "class BasicConv2d(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size,\n", + " stride,\n", + " padding,\n", + " transpose=False,\n", + " act_norm=False\n", + " ):\n", + " super(BasicConv2d, self).__init__()\n", + " self.act_norm = act_norm\n", + " if not transpose:\n", + " self.conv = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding\n", + " )\n", + " else:\n", + " self.conv = nn.ConvTranspose2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding,\n", + " output_padding=stride // 2\n", + " )\n", + " self.norm = nn.GroupNorm(2, out_channels)\n", + " self.act = nn.LeakyReLU(0.2, inplace=True)\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " if self.act_norm:\n", + " y = self.act(self.norm(y))\n", + " return y\n", + "\n", + "class ConvDynamicsLayer(nn.Module):\n", + " def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True):\n", + " super(ConvDynamicsLayer, self).__init__()\n", + " if stride == 1:\n", + " transpose = False\n", + " self.conv = BasicConv2d(\n", + " C_in,\n", + " C_out,\n", + " kernel_size=3,\n", + " stride=stride,\n", + " padding=1,\n", + " transpose=transpose,\n", + " act_norm=act_norm\n", + " )\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " return y\n", + "\n", + "class MultiGroupConv2d(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size,\n", + " stride,\n", + " padding,\n", + " groups,\n", + " act_norm=False\n", + " ):\n", + " super(MultiGroupConv2d, self).__init__()\n", + " self.act_norm = act_norm\n", + " if in_channels % groups != 0:\n", + " groups = 1\n", + " self.conv = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding,\n", + " groups=groups\n", + " )\n", + " self.norm = nn.GroupNorm(groups, out_channels)\n", + " self.activate = nn.LeakyReLU(0.2, inplace=True)\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " if self.act_norm:\n", + " y = self.activate(self.norm(y))\n", + " return y\n", + "\n", + "\n", + "class AtmosphericEncoder(nn.Module):\n", + " def __init__(self, C_in, spatial_hidden_dim, num_spatial_layers):\n", + " super(AtmosphericEncoder, self).__init__()\n", + " strides = stride_generator(num_spatial_layers)\n", + " self.enc = nn.Sequential(\n", + " ConvDynamicsLayer(C_in, spatial_hidden_dim, stride=strides[0]),\n", + " *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s) for s in strides[1:]]\n", + " )\n", + "\n", + " def forward(self, x):\n", + " enc1 = self.enc[0](x)\n", + " latent = enc1\n", + " for i in range(1, len(self.enc)):\n", + " latent = self.enc[i](latent)\n", + " return latent, enc1\n", + "\n", + "class AtmosphericDecoder(nn.Module):\n", + " def __init__(self, spatial_hidden_dim, C_out, num_spatial_layers):\n", + " super(AtmosphericDecoder, self).__init__()\n", + " strides = stride_generator(num_spatial_layers, reverse=True)\n", + " self.dec = nn.Sequential(\n", + " *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s, transpose=True) for s in strides[:-1]],\n", + " ConvDynamicsLayer(2 * spatial_hidden_dim, spatial_hidden_dim, stride=strides[-1], transpose=True)\n", + " )\n", + " self.readout = nn.Conv2d(spatial_hidden_dim, C_out, 1)\n", + "\n", + " def forward(self, hid, enc1=None):\n", + " for i in range(0, len(self.dec) - 1):\n", + " hid = self.dec[i](hid)\n", + " Y = self.dec[-1](torch.cat([hid, enc1], dim=1))\n", + " Y = self.readout(Y)\n", + " return Y\n", + "\n", + "class Triton(nn.Module):\n", + " def __init__(\n", + " self,\n", + " shape_in,\n", + " spatial_hidden_dim=64,\n", + " output_channels=4,\n", + " temporal_hidden_dim=128,\n", + " num_spatial_layers=4,\n", + " num_temporal_layers=8,\n", + " in_time_seq_length=10,\n", + " out_time_seq_length=10\n", + " ):\n", + " super(Triton, self).__init__()\n", + " T, C, H, W = shape_in\n", + " self.H1 = int(H / 2 ** (num_spatial_layers / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (num_spatial_layers / 2))\n", + " self.W1 = int(W / 2 ** (num_spatial_layers / 2))\n", + " self.output_dim = output_channels\n", + " self.input_time_seq_length = in_time_seq_length\n", + " self.output_time_seq_length = out_time_seq_length\n", + " \n", + " self.atmospheric_encoder = AtmosphericEncoder(C, spatial_hidden_dim, num_spatial_layers)\n", + " self.temporal_evolution = SpatioTemporalEvolution(\n", + " T * spatial_hidden_dim,\n", + " temporal_hidden_dim,\n", + " num_temporal_layers,\n", + " input_resolution=[self.H1, self.W1],\n", + " mlp_ratio=4.0,\n", + " drop_path=0.1\n", + " )\n", + " self.atmospheric_decoder = AtmosphericDecoder(spatial_hidden_dim, self.output_dim, num_spatial_layers)\n", + "\n", + " def forward(self, input_state):\n", + " \"\"\"\n", + " 1. Reshape the input state to match the encoder's input requirements.\n", + " 2. Extract features using the Atmospheric Encoder and obtain skip connections.\n", + " 3. Perform spatio-temporal evolution on the encoded features.\n", + " 4. Decode the evolved features to generate the final output.\n", + " \"\"\"\n", + " batch_size, temporal_length, channels, height, width = input_state.shape\n", + " reshaped_input = input_state.view(batch_size * temporal_length, channels, height, width)\n", + " \n", + " encoded_features, skip_connection = self.atmospheric_encoder(reshaped_input)\n", + " _, encoded_channels, encoded_height, encoded_width = encoded_features.shape\n", + " encoded_features = encoded_features.view(batch_size, temporal_length, encoded_channels, encoded_height, encoded_width)\n", + " \n", + " temporal_bias = encoded_features\n", + " temporal_hidden = self.temporal_evolution(temporal_bias)\n", + " reshaped_hidden = temporal_hidden.view(batch_size * temporal_length, encoded_channels, encoded_height, encoded_width)\n", + "\n", + " decoded_output = self.atmospheric_decoder(reshaped_hidden, skip_connection)\n", + " final_output = decoded_output.view(batch_size, temporal_length, -1, height, width)\n", + " \n", + " return final_output\n", + "\n", + "\n", + "def count_parameters(model):\n", + " return sum(p.numel() for p in model.parameters() if p.requires_grad)\n", + "\n", + "if __name__ == '__main__':\n", + " inputs = torch.randn(1, 10, 7, 600, 600)\n", + " model = Triton(\n", + " shape_in=(10, 7, 600, 600),\n", + " spatial_hidden_dim=32,\n", + " output_channels=1,\n", + " temporal_hidden_dim=64,\n", + " num_spatial_layers=4,\n", + " num_temporal_layers=8)\n", + " output = model(inputs)\n", + " print(output.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5f4805e1-43b4-44f0-b952-63db3d3f90e8", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.19" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/model/Triton_model.py b/Exp3_Kuroshio_forecasting/model/Triton_model.py new file mode 100644 index 0000000000000000000000000000000000000000..1606b32bdbc4e7117cf479f9e2ed7ba21c17eaa2 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/Triton_model.py @@ -0,0 +1,507 @@ +import torch +from torch import nn +import math +from timm.layers import DropPath, trunc_normal_ + +def stride_generator(N, reverse=False): + strides = [1, 2] * 10 + if reverse: + return list(reversed(strides[:N])) + else: + return strides[:N] + +class MLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(MLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class ConvMLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(ConvMLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super(Attention, self).__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = qkv[0], qkv[1], qkv[2] + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class ConvBlock(nn.Module): + def __init__( + self, + dim, + num_heads=4, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(ConvBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = nn.BatchNorm2d(dim) + self.conv1 = nn.Conv2d(dim, dim, 1) + self.conv2 = nn.Conv2d(dim, dim, 1) + self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ConvMLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = ( + m.kernel_size[0] * m.kernel_size[1] * m.out_channels + ) + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + x = x + self.pos_embed(x) + x = x + self.drop_path( + self.conv2(self.attn(self.conv1(self.norm1(x)))) + ) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + +class SelfAttentionBlock(nn.Module): + def __init__( + self, + dim, + num_heads, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + init_value=1e-6, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(SelfAttentionBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop + ) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma_1', 'gamma_2'} + + def forward(self, x): + x = x + self.pos_embed(x) + B, N, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x))) + x = x.transpose(1, 2).reshape(B, N, H, W) + return x + +def UniformerSubBlock( + embed_dims, + mlp_ratio=4., + drop=0., + drop_path=0., + init_value=1e-6, + block_type='Conv' +): + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + return ConvBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + else: + return SelfAttentionBlock( + dim=embed_dims, + num_heads=8, + mlp_ratio=mlp_ratio, + qkv_bias=True, + drop=drop, + drop_path=drop_path, + init_value=init_value + ) + +class SpatioTemporalEvolutionBlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + input_resolution=None, + mlp_ratio=8., + drop=0.0, + drop_path=0.0, + layer_i=0 + ): + super(SpatioTemporalEvolutionBlock, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv' + self.block = UniformerSubBlock( + in_channels, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + block_type=block_type + ) + + if in_channels != out_channels: + self.reduction = nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0 + ) + + def forward(self, x): + z = self.block(x) + if self.in_channels != self.out_channels: + z = self.reduction(z) + return z + +class SpatioTemporalEvolution(nn.Module): + def __init__( + self, + channel_in, + channel_hid, + N2, + input_resolution=None, + mlp_ratio=4., + drop=0.0, + drop_path=0.1 + ): + super(SpatioTemporalEvolution, self).__init__() + assert N2 >= 2 and mlp_ratio > 1 + self.N2 = N2 + dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)] + + evolution_layers = [SpatioTemporalEvolutionBlock( + channel_in, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[0], + layer_i=0 + )] + + for i in range(1, N2 - 1): + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[i], + layer_i=i + )) + + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_in, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + layer_i=N2 - 1 + )) + self.enc = nn.Sequential(*evolution_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T * C, H, W) + z = x + for i in range(self.N2): + z = self.enc[i](z) + y = z.reshape(B, T, C, H, W) + return y + +class BasicConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + transpose=False, + act_norm=False + ): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if not transpose: + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + else: + self.conv = nn.ConvTranspose2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + output_padding=stride // 2 + ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + +class ConvDynamicsLayer(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvDynamicsLayer, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d( + C_in, + C_out, + kernel_size=3, + stride=stride, + padding=1, + transpose=transpose, + act_norm=act_norm + ) + + def forward(self, x): + y = self.conv(x) + return y + +class MultiGroupConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + groups, + act_norm=False + ): + super(MultiGroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups + ) + self.norm = nn.GroupNorm(groups, out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class AtmosphericEncoder(nn.Module): + def __init__(self, C_in, spatial_hidden_dim, num_spatial_layers): + super(AtmosphericEncoder, self).__init__() + strides = stride_generator(num_spatial_layers) + self.enc = nn.Sequential( + ConvDynamicsLayer(C_in, spatial_hidden_dim, stride=strides[0]), + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s) for s in strides[1:]] + ) + + def forward(self, x): + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1, len(self.enc)): + latent = self.enc[i](latent) + return latent, enc1 + +class AtmosphericDecoder(nn.Module): + def __init__(self, spatial_hidden_dim, C_out, num_spatial_layers): + super(AtmosphericDecoder, self).__init__() + strides = stride_generator(num_spatial_layers, reverse=True) + self.dec = nn.Sequential( + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s, transpose=True) for s in strides[:-1]], + ConvDynamicsLayer(2 * spatial_hidden_dim, spatial_hidden_dim, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(spatial_hidden_dim, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0, len(self.dec) - 1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + +class Triton(nn.Module): + def __init__( + self, + shape_in, + spatial_hidden_dim=64, + output_channels=4, + temporal_hidden_dim=128, + num_spatial_layers=4, + num_temporal_layers=8, + in_time_seq_length=10, + out_time_seq_length=10 + ): + super(Triton, self).__init__() + T, C, H, W = shape_in + self.H1 = int(H / 2 ** (num_spatial_layers / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (num_spatial_layers / 2)) + self.W1 = int(W / 2 ** (num_spatial_layers / 2)) + self.output_dim = output_channels + self.input_time_seq_length = in_time_seq_length + self.output_time_seq_length = out_time_seq_length + + self.atmospheric_encoder = AtmosphericEncoder(C, spatial_hidden_dim, num_spatial_layers) + self.temporal_evolution = SpatioTemporalEvolution( + T * spatial_hidden_dim, + temporal_hidden_dim, + num_temporal_layers, + input_resolution=[self.H1, self.W1], + mlp_ratio=4.0, + drop_path=0.1 + ) + self.atmospheric_decoder = AtmosphericDecoder(spatial_hidden_dim, self.output_dim, num_spatial_layers) + + def forward(self, input_state): + """ + 1. Reshape the input state to match the encoder's input requirements. + 2. Extract features using the Atmospheric Encoder and obtain skip connections. + 3. Perform spatio-temporal evolution on the encoded features. + 4. Decode the evolved features to generate the final output. + """ + batch_size, temporal_length, channels, height, width = input_state.shape + reshaped_input = input_state.view(batch_size * temporal_length, channels, height, width) + + encoded_features, skip_connection = self.atmospheric_encoder(reshaped_input) + _, encoded_channels, encoded_height, encoded_width = encoded_features.shape + encoded_features = encoded_features.view(batch_size, temporal_length, encoded_channels, encoded_height, encoded_width) + + temporal_bias = encoded_features + temporal_hidden = self.temporal_evolution(temporal_bias) + reshaped_hidden = temporal_hidden.view(batch_size * temporal_length, encoded_channels, encoded_height, encoded_width) + + decoded_output = self.atmospheric_decoder(reshaped_hidden, skip_connection) + final_output = decoded_output.view(batch_size, temporal_length, -1, height, width) + + return final_output + + +def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + +if __name__ == '__main__': + inputs = torch.randn(1, 10, 2, 128, 128) + model = Triton( + shape_in=(10, 2, 128, 128), + spatial_hidden_dim=32, + output_channels=1, + temporal_hidden_dim=64, + num_spatial_layers=4, + num_temporal_layers=8) + output = model(inputs) + print(output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/Triton_model_step_finetune.py b/Exp3_Kuroshio_forecasting/model/Triton_model_step_finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..714e71977bbd7044d94f3bb7a26ef3f837e98f7b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/Triton_model_step_finetune.py @@ -0,0 +1,534 @@ +import torch +from torch import nn +import math +from timm.layers import DropPath, trunc_normal_ + +def stride_generator(N, reverse=False): + strides = [1, 2] * 10 + if reverse: + return list(reversed(strides[:N])) + else: + return strides[:N] + +class MLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(MLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class ConvMLP(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(ConvMLP, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super(Attention, self).__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = ( + self.qkv(x) + .reshape(B, N, 3, self.num_heads, C // self.num_heads) + .permute(2, 0, 3, 1, 4) + ) + q, k, v = qkv[0], qkv[1], qkv[2] + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class ConvBlock(nn.Module): + def __init__( + self, + dim, + num_heads=4, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(ConvBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = nn.BatchNorm2d(dim) + self.conv1 = nn.Conv2d(dim, dim, 1) + self.conv2 = nn.Conv2d(dim, dim, 1) + self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ConvMLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = ( + m.kernel_size[0] * m.kernel_size[1] * m.out_channels + ) + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + x = x + self.pos_embed(x) + x = x + self.drop_path( + self.conv2(self.attn(self.conv1(self.norm1(x)))) + ) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + +class SelfAttentionBlock(nn.Module): + def __init__( + self, + dim, + num_heads, + mlp_ratio=4., + qkv_bias=False, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + init_value=1e-6, + act_layer=nn.GELU, + norm_layer=nn.LayerNorm + ): + super(SelfAttentionBlock, self).__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop, + proj_drop=drop + ) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MLP( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop + ) + self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma_1', 'gamma_2'} + + def forward(self, x): + x = x + self.pos_embed(x) + B, N, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x))) + x = x.transpose(1, 2).reshape(B, N, H, W) + return x + +def UniformerSubBlock( + embed_dims, + mlp_ratio=4., + drop=0., + drop_path=0., + init_value=1e-6, + block_type='Conv' +): + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + # return ConvBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + return SelfAttentionBlock( + dim=embed_dims, + num_heads=8, + mlp_ratio=mlp_ratio, + qkv_bias=True, + drop=drop, + drop_path=drop_path, + init_value=init_value + ) + else: + return SelfAttentionBlock( + dim=embed_dims, + num_heads=8, + mlp_ratio=mlp_ratio, + qkv_bias=True, + drop=drop, + drop_path=drop_path, + init_value=init_value + ) + +class SpatioTemporalEvolutionBlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + input_resolution=None, + mlp_ratio=8., + drop=0.0, + drop_path=0.0, + layer_i=0 + ): + super(SpatioTemporalEvolutionBlock, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv' + self.block = UniformerSubBlock( + in_channels, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + block_type=block_type + ) + + if in_channels != out_channels: + self.reduction = nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0 + ) + + def forward(self, x): + z = self.block(x) + if self.in_channels != self.out_channels: + z = self.reduction(z) + return z + +class SpatioTemporalEvolution(nn.Module): + def __init__( + self, + channel_in, + channel_hid, + N2, + input_resolution=None, + mlp_ratio=4., + drop=0.0, + drop_path=0.1 + ): + super(SpatioTemporalEvolution, self).__init__() + assert N2 >= 2 and mlp_ratio > 1 + self.N2 = N2 + dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)] + + evolution_layers = [SpatioTemporalEvolutionBlock( + channel_in, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[0], + layer_i=0 + )] + + for i in range(1, N2 - 1): + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_hid, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=dpr[i], + layer_i=i + )) + + evolution_layers.append(SpatioTemporalEvolutionBlock( + channel_hid, + channel_in, + input_resolution, + mlp_ratio=mlp_ratio, + drop=drop, + drop_path=drop_path, + layer_i=N2 - 1 + )) + self.enc = nn.Sequential(*evolution_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T * C, H, W) + z = x + for i in range(self.N2): + z = self.enc[i](z) + y = z.reshape(B, T, C, H, W) + return y + +class BasicConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + transpose=False, + act_norm=False + ): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if not transpose: + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding + ) + else: + self.conv = nn.ConvTranspose2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + output_padding=stride // 2 + ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + +class ConvDynamicsLayer(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvDynamicsLayer, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d( + C_in, + C_out, + kernel_size=3, + stride=stride, + padding=1, + transpose=transpose, + act_norm=act_norm + ) + + def forward(self, x): + y = self.conv(x) + return y + +class MultiGroupConv2d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride, + padding, + groups, + act_norm=False + ): + super(MultiGroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups + ) + self.norm = nn.GroupNorm(groups, out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class AtmosphericEncoder(nn.Module): + def __init__(self, C_in, spatial_hidden_dim, num_spatial_layers): + super(AtmosphericEncoder, self).__init__() + strides = stride_generator(num_spatial_layers) + self.enc = nn.Sequential( + ConvDynamicsLayer(C_in, spatial_hidden_dim, stride=strides[0]), + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s) for s in strides[1:]] + ) + + def forward(self, x): + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1, len(self.enc)): + latent = self.enc[i](latent) + return latent, enc1 + +class AtmosphericDecoder(nn.Module): + def __init__(self, spatial_hidden_dim, C_out, num_spatial_layers): + super(AtmosphericDecoder, self).__init__() + strides = stride_generator(num_spatial_layers, reverse=True) + self.dec = nn.Sequential( + *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s, transpose=True) for s in strides[:-1]], + ConvDynamicsLayer(2 * spatial_hidden_dim, spatial_hidden_dim, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(spatial_hidden_dim, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0, len(self.dec) - 1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + +class Triton_finetune(nn.Module): + def __init__( + self, + shape_in, + spatial_hidden_dim=64, + output_channels=4, + temporal_hidden_dim=128, + num_spatial_layers=4, + num_temporal_layers=8, + in_time_seq_length=10, + out_time_seq_length=40 + ): + super(Triton_finetune, self).__init__() + T, C, H, W = shape_in + self.H1 = int(H / 2 ** (num_spatial_layers / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (num_spatial_layers / 2)) + self.W1 = int(W / 2 ** (num_spatial_layers / 2)) + self.output_dim = output_channels + self.input_time_seq_length = in_time_seq_length + self.output_time_seq_length = out_time_seq_length + + self.atmospheric_encoder = AtmosphericEncoder(C, spatial_hidden_dim, num_spatial_layers) + self.temporal_evolution = SpatioTemporalEvolution( + T * spatial_hidden_dim, + temporal_hidden_dim, + num_temporal_layers, + input_resolution=[self.H1, self.W1], + mlp_ratio=4.0, + drop_path=0.1 + ) + self.atmospheric_decoder = AtmosphericDecoder(spatial_hidden_dim, self.output_dim, num_spatial_layers) + + def _single_forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + embed, skip = self.atmospheric_encoder(x) + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + temporal_bias = z + temporal_hidden = self.temporal_evolution(temporal_bias) + hid = temporal_hidden.view(B*T, C_, H_, W_) + + Y = self.atmospheric_decoder(hid, skip) + return Y.reshape(B, T, -1, H, W) + + def forward(self, input_sequence): + + base_pred = self._single_forward(input_sequence) + + if self.output_time_seq_length == self.input_time_seq_length: + return base_pred + if self.output_time_seq_length < self.input_time_seq_length: + return base_pred[:, :self.output_time_seq_length] + + predictions = [base_pred] + d = self.output_time_seq_length // self.input_time_seq_length + m = self.output_time_seq_length % self.input_time_seq_length + + for _ in range(1, d): + new_pred = self._single_forward(predictions[-1]) + predictions.append(new_pred) + + if m > 0: + final_pred = self._single_forward(predictions[-1])[:, :m] + predictions.append(final_pred) + + return torch.cat(predictions, dim=1) + + +def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + +if __name__ == '__main__': + inputs = torch.randn(1, 10, 2, 256, 256) + model = Triton_finetune( + shape_in=(10, 2, 256, 256), + spatial_hidden_dim=256, + output_channels=2, + temporal_hidden_dim=512, + num_spatial_layers=4, + num_temporal_layers=8, + in_time_seq_length=10, + out_time_seq_length=20) + output = model(inputs) + print(inputs.shape) + print(output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model.cpython-310.pyc b/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b7ed3b23b5479f3367c0ca74afc379008771e81 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model.cpython-310.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f180065f6f28290661633e96019b17daff29fa7 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model_step_finetune.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model_step_finetune.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b71f05909a83b32a9683d79a5dcee6b0612694db Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/__pycache__/Triton_model_step_finetune.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/__pycache__/nmo_fourier.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/__pycache__/nmo_fourier.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8b2022b1c6f6e4f1f0a6f24e1b59f3e226cf01cb Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/__pycache__/nmo_fourier.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/Untitled-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/Untitled-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..363fcab7ed6e9634e198cf5555ceb88932c9a245 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/Untitled-checkpoint.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/evolution-checkpoint.py b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/evolution-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..a5f73078401d297a03ae87c42dd605aae9a7f661 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/evolution-checkpoint.py @@ -0,0 +1,678 @@ +import torch +from torch import nn +import math +import torch +import torch.nn as nn +from timm.layers import DropPath, trunc_normal_ +from timm.models.convnext import ConvNeXtBlock +from timm.models.mlp_mixer import MixerBlock +from timm.models.swin_transformer import SwinTransformerBlock, window_partition, window_reverse +from timm.models.vision_transformer import Block as ViTBlock + +from model.modules_api.layers import (HorBlock, ChannelAggregationFFN, MultiOrderGatedAggregation, + PoolFormerBlock, CBlock, SABlock, MixMlp, VANBlock) + + +class BasicConv2d(nn.Module): + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=0, + dilation=1, + upsampling=False, + act_norm=False, + act_inplace=True): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if upsampling is True: + self.conv = nn.Sequential(*[ + nn.Conv2d(in_channels, out_channels*4, kernel_size=kernel_size, + stride=1, padding=padding, dilation=dilation), + nn.PixelShuffle(2) + ]) + else: + self.conv = nn.Conv2d( + in_channels, out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, dilation=dilation) + + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.SiLU(inplace=act_inplace) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.Conv2d)): + trunc_normal_(m.weight, std=.02) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, + C_in, + C_out, + kernel_size=3, + downsampling=False, + upsampling=False, + act_norm=True, + act_inplace=True): + super(ConvSC, self).__init__() + + stride = 2 if downsampling is True else 1 + padding = (kernel_size - stride + 1) // 2 + + self.conv = BasicConv2d(C_in, C_out, kernel_size=kernel_size, stride=stride, + upsampling=upsampling, padding=padding, + act_norm=act_norm, act_inplace=act_inplace) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=0, + groups=1, + act_norm=False, + act_inplace=True): + super(GroupConv2d, self).__init__() + self.act_norm=act_norm + if in_channels % groups != 0: + groups=1 + self.conv = nn.Conv2d( + in_channels, out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=act_inplace) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class gInception_ST(nn.Module): + """A IncepU block for SimVP""" + + def __init__(self, C_in, C_hid, C_out, incep_ker = [3,5,7,11], groups = 8): + super(gInception_ST, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d( + C_hid, C_out, kernel_size=ker, stride=1, + padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + + +class AttentionModule(nn.Module): + """Large Kernel Attention for SimVP""" + + def __init__(self, dim, kernel_size, dilation=3): + super().__init__() + d_k = 2 * dilation - 1 + d_p = (d_k - 1) // 2 + dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) + dd_p = (dilation * (dd_k - 1) // 2) + + self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) + self.conv1 = nn.Conv2d(dim, 2*dim, 1) + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) # depth-wise conv + attn = self.conv_spatial(attn) # depth-wise dilation convolution + + f_g = self.conv1(attn) + split_dim = f_g.shape[1] // 2 + f_x, g_x = torch.split(f_g, split_dim, dim=1) + return torch.sigmoid(g_x) * f_x + + +class SpatialAttention(nn.Module): + """A Spatial Attention block for SimVP""" + + def __init__(self, d_model, kernel_size=21, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.activation = nn.GELU() # GELU + self.spatial_gating_unit = AttentionModule(d_model, kernel_size) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class GASubBlock(nn.Module): + """A GABlock (gSTA) for SimVP""" + + def __init__(self, dim, kernel_size=21, mlp_ratio=4., + drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): + super().__init__() + self.norm1 = nn.BatchNorm2d(dim) + self.attn = SpatialAttention(dim, kernel_size) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MixMlp( + in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def forward(self, x): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.attn(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) + return x + + +class ConvMixerSubBlock(nn.Module): + """A block of ConvMixer.""" + + def __init__(self, dim, kernel_size=9, activation=nn.GELU): + super().__init__() + # spatial mixing + self.conv_dw = nn.Conv2d(dim, dim, kernel_size, groups=dim, padding="same") + self.act_1 = activation() + self.norm_1 = nn.BatchNorm2d(dim) + # channel mixing + self.conv_pw = nn.Conv2d(dim, dim, kernel_size=1) + self.act_2 = activation() + self.norm_2 = nn.BatchNorm2d(dim) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return dict() + + def forward(self, x): + x = x + self.norm_1(self.act_1(self.conv_dw(x))) + x = self.norm_2(self.act_2(self.conv_pw(x))) + return x + + +class ConvNeXtSubBlock(ConvNeXtBlock): + """A block of ConvNeXt.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim, mlp_ratio=mlp_ratio, + drop_path=drop_path, ls_init_value=1e-6, conv_mlp=True) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma'} + + def forward(self, x): + x = x + self.drop_path( + self.gamma.reshape(1, -1, 1, 1) * self.mlp(self.norm(self.conv_dw(x)))) + return x + + +class HorNetSubBlock(HorBlock): + """A block of HorNet.""" + + def __init__(self, dim, mlp_ratio=4., drop_path=0.1, init_value=1e-6): + super().__init__(dim, mlp_ratio=mlp_ratio, drop_path=drop_path, init_value=init_value) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma1', 'gamma2'} + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + +class MLPMixerSubBlock(MixerBlock): + """A block of MLP-Mixer.""" + + def __init__(self, dim, input_resolution=None, mlp_ratio=4., drop=0., drop_path=0.1): + seq_len = input_resolution[0] * input_resolution[1] + super().__init__(dim, seq_len=seq_len, + mlp_ratio=(0.5, mlp_ratio), drop_path=drop_path, drop=drop) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return dict() + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.mlp_tokens(self.norm1(x).transpose(1, 2)).transpose(1, 2)) + x = x + self.drop_path(self.mlp_channels(self.norm2(x))) + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +class MogaSubBlock(nn.Module): + """A block of MogaNet.""" + + def __init__(self, embed_dims, mlp_ratio=4., drop_rate=0., drop_path_rate=0., init_value=1e-5, + attn_dw_dilation=[1, 2, 3], attn_channel_split=[1, 3, 4]): + super(MogaSubBlock, self).__init__() + self.out_channels = embed_dims + # spatial attention + self.norm1 = nn.BatchNorm2d(embed_dims) + self.attn = MultiOrderGatedAggregation( + embed_dims, attn_dw_dilation=attn_dw_dilation, attn_channel_split=attn_channel_split) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity() + # channel MLP + self.norm2 = nn.BatchNorm2d(embed_dims) + mlp_hidden_dims = int(embed_dims * mlp_ratio) + self.mlp = ChannelAggregationFFN( + embed_dims=embed_dims, mlp_hidden_dims=mlp_hidden_dims, ffn_drop=drop_rate) + # init layer scale + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2', 'sigma'} + + def forward(self, x): + x = x + self.drop_path(self.layer_scale_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.layer_scale_2 * self.mlp(self.norm2(x))) + return x + + +class PoolFormerSubBlock(PoolFormerBlock): + """A block of PoolFormer.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim, pool_size=3, mlp_ratio=mlp_ratio, drop_path=drop_path, + drop=drop, init_value=1e-5) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + +class SwinSubBlock(SwinTransformerBlock): + """A block of Swin Transformer.""" + + def __init__(self, dim, input_resolution=None, layer_i=0, mlp_ratio=4., drop=0., drop_path=0.1): + window_size = 7 if input_resolution[0] % 7 == 0 else max(4, input_resolution[0] // 16) + window_size = min(8, window_size) + shift_size = 0 if (layer_i % 2 == 0) else window_size // 2 + super().__init__(dim, input_resolution, num_heads=8, window_size=window_size, + shift_size=shift_size, mlp_ratio=mlp_ratio, + drop_path=drop_path, attn_drop=drop, proj_drop=drop, qkv_bias=True) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = self.norm1(x) + x = x.view(B, H, W, C) + x = super().forward(x) + + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + + +def UniformerSubBlock(embed_dims, mlp_ratio=4., drop=0., drop_path=0., + init_value=1e-6, block_type='Conv'): + """Build a block of Uniformer.""" + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + drop=drop, drop_path=drop_path, init_value=init_value) + else: + return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + drop=drop, drop_path=drop_path, init_value=init_value) + + +class VANSubBlock(VANBlock): + """A block of VAN.""" + + def __init__(self, dim, mlp_ratio=4., drop=0.,drop_path=0., init_value=1e-2, act_layer=nn.GELU): + super().__init__(dim=dim, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path, + init_value=init_value, act_layer=act_layer) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + +class ViTSubBlock(ViTBlock): + """A block of Vision Transformer.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim=dim, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + attn_drop=drop, proj_drop=0, drop_path=drop_path, act_layer=nn.GELU, norm_layer=nn.LayerNorm) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.attn(self.norm1(x))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +class TemporalAttention(nn.Module): + """A Temporal Attention block for Temporal Attention Unit""" + + def __init__(self, d_model, kernel_size=21, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.activation = nn.GELU() # GELU + self.spatial_gating_unit = TemporalAttentionModule(d_model, kernel_size) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class TemporalAttentionModule(nn.Module): + """Large Kernel Attention for SimVP""" + + def __init__(self, dim, kernel_size, dilation=3, reduction=16): + super().__init__() + d_k = 2 * dilation - 1 + d_p = (d_k - 1) // 2 + dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) + dd_p = (dilation * (dd_k - 1) // 2) + + self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) + self.conv1 = nn.Conv2d(dim, dim, 1) + + self.reduction = max(dim // reduction, 4) + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Linear(dim, dim // self.reduction, bias=False), # reduction + nn.ReLU(True), + nn.Linear(dim // self.reduction, dim, bias=False), # expansion + nn.Sigmoid() + ) + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) # depth-wise conv + attn = self.conv_spatial(attn) # depth-wise dilation convolution + f_x = self.conv1(attn) # 1x1 conv + # append a se operation + b, c, _, _ = x.size() + se_atten = self.avg_pool(x).view(b, c) + se_atten = self.fc(se_atten).view(b, c, 1, 1) + return se_atten * f_x * u + + +class TAUSubBlock(GASubBlock): + """A TAUBlock (tau) for Temporal Attention Unit""" + + def __init__(self, dim, kernel_size=21, mlp_ratio=4., + drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): + super().__init__(dim=dim, kernel_size=kernel_size, mlp_ratio=mlp_ratio, + drop=drop, drop_path=drop_path, init_value=init_value, act_layer=act_layer) + + self.attn = TemporalAttention(dim, kernel_size) + + +class Evo_Block(nn.Module): + def __init__(self, in_channels, out_channels, input_resolution=None, model_type=None, + mlp_ratio=8., drop=0.0, drop_path=0.0, layer_i=0): + super(Evo_Block, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + model_type = model_type.lower() if model_type is not None else 'gsta' + + if model_type == 'gsta': + self.block = GASubBlock( + in_channels, kernel_size=21, mlp_ratio=mlp_ratio, + drop=drop, drop_path=drop_path, act_layer=nn.GELU) + elif model_type == 'convmixer': + self.block = ConvMixerSubBlock(in_channels, kernel_size=11, activation=nn.GELU) + elif model_type == 'convnext': + self.block = ConvNeXtSubBlock( + in_channels, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + elif model_type == 'uniformer': + block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv' + self.block = UniformerSubBlock( + in_channels, mlp_ratio=mlp_ratio, drop=drop, + drop_path=drop_path, block_type=block_type) + elif model_type == 'vit': + self.block = ViTSubBlock( + in_channels, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + elif model_type == 'poolformer': + self.block = PoolFormerSubBlock( + in_channels, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + else: + assert False and "Error" + + if in_channels != out_channels: + self.reduction = nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0) + + def forward(self, x): + z = self.block(x) + return z if self.in_channels == self.out_channels else self.reduction(z) + +class Spatio_temporal_evolution(nn.Module): + def __init__(self, channel_in, channel_hid, N2, + input_resolution=None, model_type=None, + mlp_ratio=4., drop=0.0, drop_path=0.1): + super(Spatio_temporal_evolution, self).__init__() + assert N2 >= 2 and mlp_ratio > 1 + self.N2 = N2 + dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)] + + # down-sampling + enc_layers = [Evo_Block( + channel_in, channel_hid, input_resolution, model_type, + mlp_ratio, drop, drop_path=dpr[0], layer_i=0)] + + # state-stacking + for i in range(1, N2-1): + enc_layers.append(Evo_Block( + channel_hid, channel_hid, input_resolution, model_type, + mlp_ratio, drop, drop_path=dpr[i], layer_i=i)) + + # up-sampling + enc_layers.append(Evo_Block( + channel_hid, channel_in, input_resolution, model_type, + mlp_ratio, drop, drop_path=drop_path, layer_i=N2-1)) + self.enc = nn.Sequential(*enc_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T * C, H, W) + + z = x + for i in range(self.N2): + z = self.enc[i](z) + + y = z.reshape(B, T, C, H, W) + return y + +if __name__ == '__main__': + x = torch.randn([1, 1, 69, 180, 360]) + print("input shape:", x.shape) + + model = Spatio_temporal_evolution(channel_in=69, + channel_hid=256, + N2=4, + input_resolution = [64, 64], + model_type='poolformer', + mlp_ratio=4., + drop=0.0, + drop_path=0.1) + print(model) + output = model(x) + print("output shape:", output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/evolution_modules-checkpoint.py b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/evolution_modules-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..5ef6d2ab8bdb00cca14ab6aa862dc9df54defbad --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/evolution_modules-checkpoint.py @@ -0,0 +1,1020 @@ +import math +import torch +import torch.nn as nn + +from timm.layers import DropPath, trunc_normal_ +from timm.models.convnext import ConvNeXtBlock +from timm.models.mlp_mixer import MixerBlock +from timm.models.swin_transformer import SwinTransformerBlock, window_partition, window_reverse +from timm.models.vision_transformer import Block as ViTBlock + +from model.modules_api.layers import (HorBlock, ChannelAggregationFFN, MultiOrderGatedAggregation, + PoolFormerBlock, CBlock, SABlock, MixMlp, VANBlock) + + +class BasicConv2d(nn.Module): + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=0, + dilation=1, + upsampling=False, + act_norm=False, + act_inplace=True): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if upsampling is True: + self.conv = nn.Sequential(*[ + nn.Conv2d(in_channels, out_channels*4, kernel_size=kernel_size, + stride=1, padding=padding, dilation=dilation), + nn.PixelShuffle(2) + ]) + else: + self.conv = nn.Conv2d( + in_channels, out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, dilation=dilation) + + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.SiLU(inplace=act_inplace) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.Conv2d)): + trunc_normal_(m.weight, std=.02) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + + def __init__(self, + C_in, + C_out, + kernel_size=3, + downsampling=False, + upsampling=False, + act_norm=True, + act_inplace=True): + super(ConvSC, self).__init__() + + stride = 2 if downsampling is True else 1 + padding = (kernel_size - stride + 1) // 2 + + self.conv = BasicConv2d(C_in, C_out, kernel_size=kernel_size, stride=stride, + upsampling=upsampling, padding=padding, + act_norm=act_norm, act_inplace=act_inplace) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=0, + groups=1, + act_norm=False, + act_inplace=True): + super(GroupConv2d, self).__init__() + self.act_norm=act_norm + if in_channels % groups != 0: + groups=1 + self.conv = nn.Conv2d( + in_channels, out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=act_inplace) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class gInception_ST(nn.Module): + """A IncepU block for SimVP""" + + def __init__(self, C_in, C_hid, C_out, incep_ker = [3,5,7,11], groups = 8): + super(gInception_ST, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d( + C_hid, C_out, kernel_size=ker, stride=1, + padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + + +class AttentionModule(nn.Module): + """Large Kernel Attention for SimVP""" + + def __init__(self, dim, kernel_size, dilation=3): + super().__init__() + d_k = 2 * dilation - 1 + d_p = (d_k - 1) // 2 + dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) + dd_p = (dilation * (dd_k - 1) // 2) + + self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) + self.conv1 = nn.Conv2d(dim, 2*dim, 1) + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) # depth-wise conv + attn = self.conv_spatial(attn) # depth-wise dilation convolution + + f_g = self.conv1(attn) + split_dim = f_g.shape[1] // 2 + f_x, g_x = torch.split(f_g, split_dim, dim=1) + return torch.sigmoid(g_x) * f_x + + +class SpatialAttention(nn.Module): + """A Spatial Attention block for SimVP""" + + def __init__(self, d_model, kernel_size=21, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.activation = nn.GELU() # GELU + self.spatial_gating_unit = AttentionModule(d_model, kernel_size) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class GASubBlock(nn.Module): + """A GABlock (gSTA) for SimVP""" + + def __init__(self, dim, kernel_size=21, mlp_ratio=4., + drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): + super().__init__() + self.norm1 = nn.BatchNorm2d(dim) + self.attn = SpatialAttention(dim, kernel_size) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MixMlp( + in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def forward(self, x): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.attn(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) + return x + + +class ConvMixerSubBlock(nn.Module): + """A block of ConvMixer.""" + + def __init__(self, dim, kernel_size=9, activation=nn.GELU): + super().__init__() + # spatial mixing + self.conv_dw = nn.Conv2d(dim, dim, kernel_size, groups=dim, padding="same") + self.act_1 = activation() + self.norm_1 = nn.BatchNorm2d(dim) + # channel mixing + self.conv_pw = nn.Conv2d(dim, dim, kernel_size=1) + self.act_2 = activation() + self.norm_2 = nn.BatchNorm2d(dim) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return dict() + + def forward(self, x): + x = x + self.norm_1(self.act_1(self.conv_dw(x))) + x = self.norm_2(self.act_2(self.conv_pw(x))) + return x + + +class ConvNeXtSubBlock(ConvNeXtBlock): + """A block of ConvNeXt.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim, mlp_ratio=mlp_ratio, + drop_path=drop_path, ls_init_value=1e-6, conv_mlp=True) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma'} + + def forward(self, x): + x = x + self.drop_path( + self.gamma.reshape(1, -1, 1, 1) * self.mlp(self.norm(self.conv_dw(x)))) + return x + + +class HorNetSubBlock(HorBlock): + """A block of HorNet.""" + + def __init__(self, dim, mlp_ratio=4., drop_path=0.1, init_value=1e-6): + super().__init__(dim, mlp_ratio=mlp_ratio, drop_path=drop_path, init_value=init_value) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma1', 'gamma2'} + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + +class MLPMixerSubBlock(MixerBlock): + """A block of MLP-Mixer.""" + + def __init__(self, dim, input_resolution=None, mlp_ratio=4., drop=0., drop_path=0.1): + seq_len = input_resolution[0] * input_resolution[1] + super().__init__(dim, seq_len=seq_len, + mlp_ratio=(0.5, mlp_ratio), drop_path=drop_path, drop=drop) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return dict() + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.mlp_tokens(self.norm1(x).transpose(1, 2)).transpose(1, 2)) + x = x + self.drop_path(self.mlp_channels(self.norm2(x))) + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +class MogaSubBlock(nn.Module): + """A block of MogaNet.""" + + def __init__(self, embed_dims, mlp_ratio=4., drop_rate=0., drop_path_rate=0., init_value=1e-5, + attn_dw_dilation=[1, 2, 3], attn_channel_split=[1, 3, 4]): + super(MogaSubBlock, self).__init__() + self.out_channels = embed_dims + # spatial attention + self.norm1 = nn.BatchNorm2d(embed_dims) + self.attn = MultiOrderGatedAggregation( + embed_dims, attn_dw_dilation=attn_dw_dilation, attn_channel_split=attn_channel_split) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity() + # channel MLP + self.norm2 = nn.BatchNorm2d(embed_dims) + mlp_hidden_dims = int(embed_dims * mlp_ratio) + self.mlp = ChannelAggregationFFN( + embed_dims=embed_dims, mlp_hidden_dims=mlp_hidden_dims, ffn_drop=drop_rate) + # init layer scale + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2', 'sigma'} + + def forward(self, x): + x = x + self.drop_path(self.layer_scale_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.layer_scale_2 * self.mlp(self.norm2(x))) + return x + + +class PoolFormerSubBlock(PoolFormerBlock): + """A block of PoolFormer.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim, pool_size=3, mlp_ratio=mlp_ratio, drop_path=drop_path, + drop=drop, init_value=1e-5) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + +class SwinSubBlock(SwinTransformerBlock): + """A block of Swin Transformer.""" + + def __init__(self, dim, input_resolution=None, layer_i=0, mlp_ratio=4., drop=0., drop_path=0.1): + window_size = 7 if input_resolution[0] % 7 == 0 else max(4, input_resolution[0] // 16) + window_size = min(8, window_size) + shift_size = 0 if (layer_i % 2 == 0) else window_size // 2 + super().__init__(dim, input_resolution, num_heads=8, window_size=window_size, + shift_size=shift_size, mlp_ratio=mlp_ratio, + drop_path=drop_path, attn_drop=drop, proj_drop=drop, qkv_bias=True) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = self.norm1(x) + x = x.view(B, H, W, C) + x = super().forward(x) + + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +# def UniformerSubBlock(embed_dims, mlp_ratio=4., drop=0., drop_path=0., +# init_value=1e-6, block_type='Conv'): +# """Build a block of Uniformer.""" + +# assert block_type in ['Conv', 'MHSA'] +# if block_type == 'Conv': +# return CBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) +# else: +# return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, +# drop=drop, drop_path=drop_path, init_value=init_value) + +def UniformerSubBlock(embed_dims, mlp_ratio=4., drop=0., drop_path=0., + init_value=1e-6, block_type='Conv'): + """Build a block of Uniformer.""" + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + drop=drop, drop_path=drop_path, init_value=init_value) + else: + return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + drop=drop, drop_path=drop_path, init_value=init_value) + + +class VANSubBlock(VANBlock): + """A block of VAN.""" + + def __init__(self, dim, mlp_ratio=4., drop=0.,drop_path=0., init_value=1e-2, act_layer=nn.GELU): + super().__init__(dim=dim, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path, + init_value=init_value, act_layer=act_layer) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + +class ViTSubBlock(ViTBlock): + """A block of Vision Transformer.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim=dim, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + attn_drop=drop, proj_drop=0, drop_path=drop_path, act_layer=nn.GELU, norm_layer=nn.LayerNorm) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.attn(self.norm1(x))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +class TemporalAttention(nn.Module): + """A Temporal Attention block for Temporal Attention Unit""" + + def __init__(self, d_model, kernel_size=21, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.activation = nn.GELU() # GELU + self.spatial_gating_unit = TemporalAttentionModule(d_model, kernel_size) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class TemporalAttentionModule(nn.Module): + """Large Kernel Attention for SimVP""" + + def __init__(self, dim, kernel_size, dilation=3, reduction=16): + super().__init__() + d_k = 2 * dilation - 1 + d_p = (d_k - 1) // 2 + dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) + dd_p = (dilation * (dd_k - 1) // 2) + + self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) + self.conv1 = nn.Conv2d(dim, dim, 1) + + self.reduction = max(dim // reduction, 4) + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Linear(dim, dim // self.reduction, bias=False), # reduction + nn.ReLU(True), + nn.Linear(dim // self.reduction, dim, bias=False), # expansion + nn.Sigmoid() + ) + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) # depth-wise conv + attn = self.conv_spatial(attn) # depth-wise dilation convolution + f_x = self.conv1(attn) # 1x1 conv + # append a se operation + b, c, _, _ = x.size() + se_atten = self.avg_pool(x).view(b, c) + se_atten = self.fc(se_atten).view(b, c, 1, 1) + return se_atten * f_x * u + + +class TAUSubBlock(GASubBlock): + """A TAUBlock (tau) for Temporal Attention Unit""" + + def __init__(self, dim, kernel_size=21, mlp_ratio=4., + drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): + super().__init__(dim=dim, kernel_size=kernel_size, mlp_ratio=mlp_ratio, + drop=drop, drop_path=drop_path, init_value=init_value, act_layer=act_layer) + + self.attn = TemporalAttention(dim, kernel_size) + +# import math +# import torch +# import torch.nn as nn +# from timm.models.layers import DropPath, trunc_normal_ +# from timm.models.convnext import ConvNeXtBlock +# from timm.models.mlp_mixer import MixerBlock +# from timm.models.vision_transformer import Block as ViTBlock + +# from modules_api.layers import (HorBlock, ChannelAggregationFFN, MultiOrderGatedAggregation, +# PoolFormerBlock, CBlock, SABlock, MixMlp, VANBlock) + + +# class AttentionModule(nn.Module): +# """Large Kernel Attention""" +# def __init__(self, dim, kernel_size, dilation=3): +# super().__init__() +# d_k = 2 * dilation - 1 +# d_p = (d_k - 1) // 2 +# dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) +# dd_p = (dilation * (dd_k - 1) // 2) + +# self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) +# self.conv_spatial = nn.Conv2d( +# dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) +# self.conv1 = nn.Conv2d(dim, 2 * dim, 1) + +# def forward(self, x): +# u = x.clone() +# attn = self.conv0(x) # depth-wise conv +# attn = self.conv_spatial(attn) # depth-wise dilation convolution + +# f_g = self.conv1(attn) +# split_dim = f_g.shape[1] // 2 +# f_x, g_x = torch.split(f_g, split_dim, dim=1) +# return torch.sigmoid(g_x) * f_x + + +# class SpatialAttention(nn.Module): +# """A Spatial Attention block""" +# def __init__(self, d_model, kernel_size=21, attn_shortcut=True): +# super().__init__() + +# self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv +# self.activation = nn.GELU() # GELU +# self.spatial_gating_unit = AttentionModule(d_model, kernel_size) +# self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv +# self.attn_shortcut = attn_shortcut + +# def forward(self, x): +# if self.attn_shortcut: +# shortcut = x.clone() +# x = self.proj_1(x) +# x = self.activation(x) +# x = self.spatial_gating_unit(x) +# x = self.proj_2(x) +# if self.attn_shortcut: +# x = x + shortcut +# return x + + +# class GASubBlock(nn.Module): +# """A GABlock (gSTA) block""" +# def __init__(self, dim, kernel_size=21, mlp_ratio=4., +# drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): +# super().__init__() +# self.norm1 = nn.BatchNorm2d(dim) +# self.attn = SpatialAttention(dim, kernel_size) +# self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + +# self.norm2 = nn.BatchNorm2d(dim) +# mlp_hidden_dim = int(dim * mlp_ratio) +# self.mlp = MixMlp( +# in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + +# self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) +# self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, nn.LayerNorm): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'layer_scale_1', 'layer_scale_2'} + +# def forward(self, x): +# x = x + self.drop_path( +# self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.attn(self.norm1(x))) +# x = x + self.drop_path( +# self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) +# return x + + +# class ConvMixerSubBlock(nn.Module): +# """A block of ConvMixer.""" +# def __init__(self, dim, kernel_size=9, activation=nn.GELU): +# super().__init__() +# # spatial mixing +# self.conv_dw = nn.Conv2d(dim, dim, kernel_size, groups=dim, padding="same") +# self.act_1 = activation() +# self.norm_1 = nn.BatchNorm2d(dim) +# # channel mixing +# self.conv_pw = nn.Conv2d(dim, dim, kernel_size=1) +# self.act_2 = activation() +# self.norm_2 = nn.BatchNorm2d(dim) + +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, nn.BatchNorm2d): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + +# @torch.jit.ignore +# def no_weight_decay(self): +# return dict() + +# def forward(self, x): +# x = x + self.norm_1(self.act_1(self.conv_dw(x))) +# x = self.norm_2(self.act_2(self.conv_pw(x))) +# return x + + +# class ConvNeXtSubBlock(ConvNeXtBlock): +# """A block of ConvNeXt.""" +# def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): +# super().__init__(dim, mlp_ratio=mlp_ratio, +# drop_path=drop_path, ls_init_value=1e-6, conv_mlp=True) +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'gamma'} + +# def forward(self, x): +# x = x + self.drop_path( +# self.gamma.reshape(1, -1, 1, 1) * self.mlp(self.norm(self.conv_dw(x)))) +# return x + + +# class HorNetSubBlock(HorBlock): +# """A block of HorNet.""" +# def __init__(self, dim, mlp_ratio=4., drop_path=0.1, init_value=1e-6): +# super().__init__(dim, mlp_ratio=mlp_ratio, drop_path=drop_path, init_value=init_value) +# self.apply(self._init_weights) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'gamma1', 'gamma2'} + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, nn.LayerNorm): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + + +# class MLPMixerSubBlock(MixerBlock): +# """A block of MLP-Mixer.""" +# def __init__(self, dim, input_resolution=None, mlp_ratio=4., drop=0., drop_path=0.1): +# seq_len = input_resolution[0] * input_resolution[1] +# super().__init__(dim, seq_len=seq_len, +# mlp_ratio=(0.5, mlp_ratio), drop_path=drop_path, drop=drop) +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return dict() + +# def forward(self, x): +# B, C, H, W = x.shape +# x = x.flatten(2).transpose(1, 2) +# x = x + self.drop_path(self.mlp_tokens(self.norm1(x).transpose(1, 2)).transpose(1, 2)) +# x = x + self.drop_path(self.mlp_channels(self.norm2(x))) +# return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +# class MogaSubBlock(nn.Module): +# """A block of MogaNet.""" +# def __init__(self, embed_dims, mlp_ratio=4., drop_rate=0., drop_path_rate=0., init_value=1e-5, +# attn_dw_dilation=[1, 2, 3], attn_channel_split=[1, 3, 4]): +# super(MogaSubBlock, self).__init__() +# self.out_channels = embed_dims +# # spatial attention +# self.norm1 = nn.BatchNorm2d(embed_dims) +# self.attn = MultiOrderGatedAggregation( +# embed_dims, attn_dw_dilation=attn_dw_dilation, attn_channel_split=attn_channel_split) +# self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity() +# # channel MLP +# self.norm2 = nn.BatchNorm2d(embed_dims) +# mlp_hidden_dims = int(embed_dims * mlp_ratio) +# self.mlp = ChannelAggregationFFN( +# embed_dims=embed_dims, mlp_hidden_dims=mlp_hidden_dims, ffn_drop=drop_rate) +# # init layer scale +# self.layer_scale_1 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) +# self.layer_scale_2 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'layer_scale_1', 'layer_scale_2', 'sigma'} + +# def forward(self, x): +# x = x + self.drop_path(self.layer_scale_1 * self.attn(self.norm1(x))) +# x = x + self.drop_path(self.layer_scale_2 * self.mlp(self.norm2(x))) +# return x + + +# class PoolFormerSubBlock(PoolFormerBlock): +# """A block of PoolFormer.""" +# def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): +# super().__init__(dim, pool_size=3, mlp_ratio=mlp_ratio, drop_path=drop_path, +# drop=drop, init_value=1e-5) +# self.apply(self._init_weights) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'layer_scale_1', 'layer_scale_2'} + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) + + +# def UniformerSubBlock(embed_dims, mlp_ratio=4., drop=0., drop_path=0., +# init_value=1e-6, block_type='Conv'): +# """Build a block of Uniformer.""" +# assert block_type in ['Conv', 'MHSA'] +# if block_type == 'MHSA': +# return CBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) +# else: +# return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, +# drop=drop, drop_path=drop_path, init_value=init_value) + + +# class VANSubBlock(VANBlock): +# """A block of VAN.""" +# def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0., init_value=1e-2, act_layer=nn.GELU): +# super().__init__(dim=dim, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path, +# init_value=init_value, act_layer=act_layer) +# self.apply(self._init_weights) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'layer_scale_1', 'layer_scale_2'} + +# def _init_weights(self, m): +# if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + + +# class ViTSubBlock(ViTBlock): +# """A block of Vision Transformer.""" +# def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): +# super().__init__(dim=dim, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, +# drop=drop, drop_path=drop_path, act_layer=nn.GELU, norm_layer=nn.LayerNorm) +# self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {} + +# def forward(self, x): +# B, C, H, W = x.shape +# x = x.flatten(2).transpose(1, 2) + +# x = x + self.drop_path(self.attn(self.norm1(x))) +# x = x + self.drop_path(self.mlp(self.norm2(x))) +# #print("x.shape", x.shape) +# return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + + + + + + +# class TemporalAttention(nn.Module): +# def __init__(self, d_model, kernel_size=21, attn_shortcut=True): +# super().__init__() + +# self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv +# self.activation = nn.GELU() # GELU +# self.spatial_gating_unit = TemporalAttentionModule(d_model, kernel_size) +# self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv +# self.attn_shortcut = attn_shortcut + +# def forward(self, x): +# if self.attn_shortcut: +# shortcut = x.clone() +# x = self.proj_1(x) +# x = self.activation(x) +# x = self.spatial_gating_unit(x) +# x = self.proj_2(x) +# if self.attn_shortcut: +# x = x + shortcut +# return x + + +# class TemporalAttentionModule(nn.Module): +# def __init__(self, dim, kernel_size, dilation=3, reduction=16): +# super().__init__() +# d_k = 2 * dilation - 1 +# d_p = (d_k - 1) // 2 +# dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) +# dd_p = (dilation * (dd_k - 1) // 2) + +# self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) +# self.conv_spatial = nn.Conv2d( +# dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) +# self.conv1 = nn.Conv2d(dim, dim, 1) + +# self.reduction = max(dim // reduction, 4) +# self.avg_pool = nn.AdaptiveAvgPool2d(1) +# self.fc = nn.Sequential( +# nn.Linear(dim, dim // self.reduction, bias=False), # reduction +# nn.ReLU(True), +# nn.Linear(dim // self.reduction, dim, bias=False), # expansion +# nn.Sigmoid() +# ) + +# def forward(self, x): +# u = x.clone() +# attn = self.conv0(x) # depth-wise conv +# attn = self.conv_spatial(attn) # depth-wise dilation convolution +# f_x = self.conv1(attn) # 1x1 conv +# # append a se operation +# b, c, _, _ = x.size() +# se_atten = self.avg_pool(x).view(b, c) +# se_atten = self.fc(se_atten).view(b, c, 1, 1) +# return se_atten * f_x * u + + +# class TAUSubBlock(GASubBlock): +# def __init__(self, dim, kernel_size=21, mlp_ratio=4., +# drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): +# super().__init__(dim=dim, kernel_size=kernel_size, mlp_ratio=mlp_ratio, +# drop=drop, drop_path=drop_path, init_value=init_value, act_layer=act_layer) + +# self.attn = TemporalAttention(dim, kernel_size) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/fouriermodules-checkpoint.py b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/fouriermodules-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..9107d25c70768d68b195508522698c4c179a9315 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/fouriermodules-checkpoint.py @@ -0,0 +1,183 @@ +import torch +from torch import nn +import torch.nn.functional as F +import torch.fft +import numpy as np +import torch.optim as optimizer +from functools import partial +from collections import OrderedDict +from timm.models.layers import DropPath, to_2tuple, trunc_normal_ +from torch.utils.checkpoint import checkpoint_sequential +from torch import nn + + +class BasicConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, transpose=False, act_norm=False): + super(BasicConv2d, self).__init__() + self.act_norm=act_norm + if not transpose: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) + else: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,output_padding=stride //2 ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvSC, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d(C_in, C_out, kernel_size=3, stride=stride, + padding=1, transpose=transpose, act_norm=act_norm) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, groups, act_norm=False): + super(GroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class Inception(nn.Module): + def __init__(self, C_in, C_hid, C_out, incep_ker=[3,5,7,11], groups=8): + super(Inception, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d(C_hid, C_out, kernel_size=ker, stride=1, padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + +class Mlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(Mlp, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.fc3 = nn.AdaptiveAvgPool1d(out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc3(x) + x = self.drop(x) + return x + + +class AdativeFourierNeuralOperator(nn.Module): + def __init__(self, dim, h=16, w=16, is_fno_bias=True): + super(AdativeFourierNeuralOperator, self).__init__() + self.hidden_size = dim + self.h = h + self.w = w + self.num_blocks = 2 + self.block_size = self.hidden_size // self.num_blocks + assert self.hidden_size % self.num_blocks == 0 + + self.scale = 0.02 + self.w1 = torch.nn.Parameter(self.scale * torch.randn(2, self.num_blocks, self.block_size, self.block_size)) + self.b1 = torch.nn.Parameter(self.scale * torch.randn(2, self.num_blocks, self.block_size)) + self.w2 = torch.nn.Parameter(self.scale * torch.randn(2, self.num_blocks, self.block_size, self.block_size)) + self.b2 = torch.nn.Parameter(self.scale * torch.randn(2, self.num_blocks, self.block_size)) + self.relu = nn.ReLU() + self.is_fno_bias = is_fno_bias + + if self.is_fno_bias: + self.bias = nn.Conv1d(self.hidden_size, self.hidden_size, 1) + else: + self.bias = None + + self.softshrink = 0.00 + + def multiply(self, input, weights): + return torch.einsum('...bd, bdk->...bk', input, weights) + + def forward(self, x): + B, N, C = x.shape + + if self.bias: + bias = self.bias(x.permute(0, 2, 1)).permute(0, 2, 1) + else: + bias = torch.zeros(x.shape, device=x.device) + + x = x.reshape(B, self.h, self.w, C) + x = torch.fft.rfft2(x, dim=(1, 2), norm='ortho') + x = x.reshape(B, x.shape[1], x.shape[2], self.num_blocks, self.block_size) + + x_real = F.relu(self.multiply(x.real, self.w1[0]) - self.multiply(x.imag, self.w1[1]) + self.b1[0], + inplace=True) + x_imag = F.relu(self.multiply(x.real, self.w1[1]) + self.multiply(x.imag, self.w1[0]) + self.b1[1], + inplace=True) + x_real = self.multiply(x_real, self.w2[0]) - self.multiply(x_imag, self.w2[1]) + self.b2[0] + x_imag = self.multiply(x_real, self.w2[1]) + self.multiply(x_imag, self.w2[0]) + self.b2[1] + + x = torch.stack([x_real, x_imag], dim=-1) + x = F.softshrink(x, lambd=self.softshrink) if self.softshrink else x + + x = torch.view_as_complex(x) + x = x.reshape(B, x.shape[1], x.shape[2], self.hidden_size) + x = torch.fft.irfft2(x, s=(self.h, self.w), dim=(1, 2), norm='ortho') + x = x.reshape(B, N, C) + + return x + bias + + +class FourierNetBlock(nn.Module): + def __init__(self, + dim, + mlp_ratio=4., + drop=0., + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + h=16, + w=16): + super(FourierNetBlock, self).__init__() + self.normlayer1 = norm_layer(dim) + self.filter = AdativeFourierNeuralOperator(dim, h=h, w=w) + + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.normlayer2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop) + self.double_skip = True + + def forward(self, x): + x = x + self.drop_path(self.filter(self.normlayer1(x))) + x = x + self.drop_path(self.mlp(self.normlayer2(x))) + return x + diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/modules-checkpoint.py b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/modules-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..5d76c1144a60c88d108284c1e7cb5fce0dd06877 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/.ipynb_checkpoints/modules-checkpoint.py @@ -0,0 +1,66 @@ +from torch import nn + + +class BasicConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, transpose=False, act_norm=False): + super(BasicConv2d, self).__init__() + self.act_norm=act_norm + if not transpose: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) + else: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,output_padding=stride //2 ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvSC, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d(C_in, C_out, kernel_size=3, stride=stride, + padding=1, transpose=transpose, act_norm=act_norm) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, groups, act_norm=False): + super(GroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class Inception(nn.Module): + def __init__(self, C_in, C_hid, C_out, incep_ker=[3,5,7,11], groups=8): + super(Inception, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d(C_hid, C_out, kernel_size=ker, stride=1, padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/Untitled.ipynb b/Exp3_Kuroshio_forecasting/model/modules_api/Untitled.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..d72c59d643a717860759e6cb016f45a7160299d5 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/Untitled.ipynb @@ -0,0 +1,23 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "5bf85826-7ccf-4aa2-a033-270f0bf3c022", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "", + "name": "" + }, + "language_info": { + "name": "" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/evolution.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/evolution.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79947e6c0cadb32531160a99bfe596810d6601b2 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/evolution.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/evolution_modules.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/evolution_modules.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..69dec8755982694c91cadd9bd79363d4490f7961 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/evolution_modules.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/fouriermodules.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/fouriermodules.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68241b509fa20eb0f08a60fd0423fc61214f3bdc Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/fouriermodules.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/modules.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/modules.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36dbe30871a307c495c7313d2c7f2ba193362d58 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/__pycache__/modules.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/evolution.py b/Exp3_Kuroshio_forecasting/model/modules_api/evolution.py new file mode 100644 index 0000000000000000000000000000000000000000..a5f73078401d297a03ae87c42dd605aae9a7f661 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/evolution.py @@ -0,0 +1,678 @@ +import torch +from torch import nn +import math +import torch +import torch.nn as nn +from timm.layers import DropPath, trunc_normal_ +from timm.models.convnext import ConvNeXtBlock +from timm.models.mlp_mixer import MixerBlock +from timm.models.swin_transformer import SwinTransformerBlock, window_partition, window_reverse +from timm.models.vision_transformer import Block as ViTBlock + +from model.modules_api.layers import (HorBlock, ChannelAggregationFFN, MultiOrderGatedAggregation, + PoolFormerBlock, CBlock, SABlock, MixMlp, VANBlock) + + +class BasicConv2d(nn.Module): + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=0, + dilation=1, + upsampling=False, + act_norm=False, + act_inplace=True): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if upsampling is True: + self.conv = nn.Sequential(*[ + nn.Conv2d(in_channels, out_channels*4, kernel_size=kernel_size, + stride=1, padding=padding, dilation=dilation), + nn.PixelShuffle(2) + ]) + else: + self.conv = nn.Conv2d( + in_channels, out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, dilation=dilation) + + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.SiLU(inplace=act_inplace) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.Conv2d)): + trunc_normal_(m.weight, std=.02) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, + C_in, + C_out, + kernel_size=3, + downsampling=False, + upsampling=False, + act_norm=True, + act_inplace=True): + super(ConvSC, self).__init__() + + stride = 2 if downsampling is True else 1 + padding = (kernel_size - stride + 1) // 2 + + self.conv = BasicConv2d(C_in, C_out, kernel_size=kernel_size, stride=stride, + upsampling=upsampling, padding=padding, + act_norm=act_norm, act_inplace=act_inplace) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=0, + groups=1, + act_norm=False, + act_inplace=True): + super(GroupConv2d, self).__init__() + self.act_norm=act_norm + if in_channels % groups != 0: + groups=1 + self.conv = nn.Conv2d( + in_channels, out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=act_inplace) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class gInception_ST(nn.Module): + """A IncepU block for SimVP""" + + def __init__(self, C_in, C_hid, C_out, incep_ker = [3,5,7,11], groups = 8): + super(gInception_ST, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d( + C_hid, C_out, kernel_size=ker, stride=1, + padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + + +class AttentionModule(nn.Module): + """Large Kernel Attention for SimVP""" + + def __init__(self, dim, kernel_size, dilation=3): + super().__init__() + d_k = 2 * dilation - 1 + d_p = (d_k - 1) // 2 + dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) + dd_p = (dilation * (dd_k - 1) // 2) + + self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) + self.conv1 = nn.Conv2d(dim, 2*dim, 1) + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) # depth-wise conv + attn = self.conv_spatial(attn) # depth-wise dilation convolution + + f_g = self.conv1(attn) + split_dim = f_g.shape[1] // 2 + f_x, g_x = torch.split(f_g, split_dim, dim=1) + return torch.sigmoid(g_x) * f_x + + +class SpatialAttention(nn.Module): + """A Spatial Attention block for SimVP""" + + def __init__(self, d_model, kernel_size=21, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.activation = nn.GELU() # GELU + self.spatial_gating_unit = AttentionModule(d_model, kernel_size) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class GASubBlock(nn.Module): + """A GABlock (gSTA) for SimVP""" + + def __init__(self, dim, kernel_size=21, mlp_ratio=4., + drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): + super().__init__() + self.norm1 = nn.BatchNorm2d(dim) + self.attn = SpatialAttention(dim, kernel_size) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MixMlp( + in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def forward(self, x): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.attn(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) + return x + + +class ConvMixerSubBlock(nn.Module): + """A block of ConvMixer.""" + + def __init__(self, dim, kernel_size=9, activation=nn.GELU): + super().__init__() + # spatial mixing + self.conv_dw = nn.Conv2d(dim, dim, kernel_size, groups=dim, padding="same") + self.act_1 = activation() + self.norm_1 = nn.BatchNorm2d(dim) + # channel mixing + self.conv_pw = nn.Conv2d(dim, dim, kernel_size=1) + self.act_2 = activation() + self.norm_2 = nn.BatchNorm2d(dim) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return dict() + + def forward(self, x): + x = x + self.norm_1(self.act_1(self.conv_dw(x))) + x = self.norm_2(self.act_2(self.conv_pw(x))) + return x + + +class ConvNeXtSubBlock(ConvNeXtBlock): + """A block of ConvNeXt.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim, mlp_ratio=mlp_ratio, + drop_path=drop_path, ls_init_value=1e-6, conv_mlp=True) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma'} + + def forward(self, x): + x = x + self.drop_path( + self.gamma.reshape(1, -1, 1, 1) * self.mlp(self.norm(self.conv_dw(x)))) + return x + + +class HorNetSubBlock(HorBlock): + """A block of HorNet.""" + + def __init__(self, dim, mlp_ratio=4., drop_path=0.1, init_value=1e-6): + super().__init__(dim, mlp_ratio=mlp_ratio, drop_path=drop_path, init_value=init_value) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma1', 'gamma2'} + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + +class MLPMixerSubBlock(MixerBlock): + """A block of MLP-Mixer.""" + + def __init__(self, dim, input_resolution=None, mlp_ratio=4., drop=0., drop_path=0.1): + seq_len = input_resolution[0] * input_resolution[1] + super().__init__(dim, seq_len=seq_len, + mlp_ratio=(0.5, mlp_ratio), drop_path=drop_path, drop=drop) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return dict() + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.mlp_tokens(self.norm1(x).transpose(1, 2)).transpose(1, 2)) + x = x + self.drop_path(self.mlp_channels(self.norm2(x))) + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +class MogaSubBlock(nn.Module): + """A block of MogaNet.""" + + def __init__(self, embed_dims, mlp_ratio=4., drop_rate=0., drop_path_rate=0., init_value=1e-5, + attn_dw_dilation=[1, 2, 3], attn_channel_split=[1, 3, 4]): + super(MogaSubBlock, self).__init__() + self.out_channels = embed_dims + # spatial attention + self.norm1 = nn.BatchNorm2d(embed_dims) + self.attn = MultiOrderGatedAggregation( + embed_dims, attn_dw_dilation=attn_dw_dilation, attn_channel_split=attn_channel_split) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity() + # channel MLP + self.norm2 = nn.BatchNorm2d(embed_dims) + mlp_hidden_dims = int(embed_dims * mlp_ratio) + self.mlp = ChannelAggregationFFN( + embed_dims=embed_dims, mlp_hidden_dims=mlp_hidden_dims, ffn_drop=drop_rate) + # init layer scale + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2', 'sigma'} + + def forward(self, x): + x = x + self.drop_path(self.layer_scale_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.layer_scale_2 * self.mlp(self.norm2(x))) + return x + + +class PoolFormerSubBlock(PoolFormerBlock): + """A block of PoolFormer.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim, pool_size=3, mlp_ratio=mlp_ratio, drop_path=drop_path, + drop=drop, init_value=1e-5) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + +class SwinSubBlock(SwinTransformerBlock): + """A block of Swin Transformer.""" + + def __init__(self, dim, input_resolution=None, layer_i=0, mlp_ratio=4., drop=0., drop_path=0.1): + window_size = 7 if input_resolution[0] % 7 == 0 else max(4, input_resolution[0] // 16) + window_size = min(8, window_size) + shift_size = 0 if (layer_i % 2 == 0) else window_size // 2 + super().__init__(dim, input_resolution, num_heads=8, window_size=window_size, + shift_size=shift_size, mlp_ratio=mlp_ratio, + drop_path=drop_path, attn_drop=drop, proj_drop=drop, qkv_bias=True) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = self.norm1(x) + x = x.view(B, H, W, C) + x = super().forward(x) + + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + + +def UniformerSubBlock(embed_dims, mlp_ratio=4., drop=0., drop_path=0., + init_value=1e-6, block_type='Conv'): + """Build a block of Uniformer.""" + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + drop=drop, drop_path=drop_path, init_value=init_value) + else: + return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + drop=drop, drop_path=drop_path, init_value=init_value) + + +class VANSubBlock(VANBlock): + """A block of VAN.""" + + def __init__(self, dim, mlp_ratio=4., drop=0.,drop_path=0., init_value=1e-2, act_layer=nn.GELU): + super().__init__(dim=dim, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path, + init_value=init_value, act_layer=act_layer) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + +class ViTSubBlock(ViTBlock): + """A block of Vision Transformer.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim=dim, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + attn_drop=drop, proj_drop=0, drop_path=drop_path, act_layer=nn.GELU, norm_layer=nn.LayerNorm) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.attn(self.norm1(x))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +class TemporalAttention(nn.Module): + """A Temporal Attention block for Temporal Attention Unit""" + + def __init__(self, d_model, kernel_size=21, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.activation = nn.GELU() # GELU + self.spatial_gating_unit = TemporalAttentionModule(d_model, kernel_size) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class TemporalAttentionModule(nn.Module): + """Large Kernel Attention for SimVP""" + + def __init__(self, dim, kernel_size, dilation=3, reduction=16): + super().__init__() + d_k = 2 * dilation - 1 + d_p = (d_k - 1) // 2 + dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) + dd_p = (dilation * (dd_k - 1) // 2) + + self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) + self.conv1 = nn.Conv2d(dim, dim, 1) + + self.reduction = max(dim // reduction, 4) + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Linear(dim, dim // self.reduction, bias=False), # reduction + nn.ReLU(True), + nn.Linear(dim // self.reduction, dim, bias=False), # expansion + nn.Sigmoid() + ) + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) # depth-wise conv + attn = self.conv_spatial(attn) # depth-wise dilation convolution + f_x = self.conv1(attn) # 1x1 conv + # append a se operation + b, c, _, _ = x.size() + se_atten = self.avg_pool(x).view(b, c) + se_atten = self.fc(se_atten).view(b, c, 1, 1) + return se_atten * f_x * u + + +class TAUSubBlock(GASubBlock): + """A TAUBlock (tau) for Temporal Attention Unit""" + + def __init__(self, dim, kernel_size=21, mlp_ratio=4., + drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): + super().__init__(dim=dim, kernel_size=kernel_size, mlp_ratio=mlp_ratio, + drop=drop, drop_path=drop_path, init_value=init_value, act_layer=act_layer) + + self.attn = TemporalAttention(dim, kernel_size) + + +class Evo_Block(nn.Module): + def __init__(self, in_channels, out_channels, input_resolution=None, model_type=None, + mlp_ratio=8., drop=0.0, drop_path=0.0, layer_i=0): + super(Evo_Block, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + model_type = model_type.lower() if model_type is not None else 'gsta' + + if model_type == 'gsta': + self.block = GASubBlock( + in_channels, kernel_size=21, mlp_ratio=mlp_ratio, + drop=drop, drop_path=drop_path, act_layer=nn.GELU) + elif model_type == 'convmixer': + self.block = ConvMixerSubBlock(in_channels, kernel_size=11, activation=nn.GELU) + elif model_type == 'convnext': + self.block = ConvNeXtSubBlock( + in_channels, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + elif model_type == 'uniformer': + block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv' + self.block = UniformerSubBlock( + in_channels, mlp_ratio=mlp_ratio, drop=drop, + drop_path=drop_path, block_type=block_type) + elif model_type == 'vit': + self.block = ViTSubBlock( + in_channels, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + elif model_type == 'poolformer': + self.block = PoolFormerSubBlock( + in_channels, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) + else: + assert False and "Error" + + if in_channels != out_channels: + self.reduction = nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0) + + def forward(self, x): + z = self.block(x) + return z if self.in_channels == self.out_channels else self.reduction(z) + +class Spatio_temporal_evolution(nn.Module): + def __init__(self, channel_in, channel_hid, N2, + input_resolution=None, model_type=None, + mlp_ratio=4., drop=0.0, drop_path=0.1): + super(Spatio_temporal_evolution, self).__init__() + assert N2 >= 2 and mlp_ratio > 1 + self.N2 = N2 + dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)] + + # down-sampling + enc_layers = [Evo_Block( + channel_in, channel_hid, input_resolution, model_type, + mlp_ratio, drop, drop_path=dpr[0], layer_i=0)] + + # state-stacking + for i in range(1, N2-1): + enc_layers.append(Evo_Block( + channel_hid, channel_hid, input_resolution, model_type, + mlp_ratio, drop, drop_path=dpr[i], layer_i=i)) + + # up-sampling + enc_layers.append(Evo_Block( + channel_hid, channel_in, input_resolution, model_type, + mlp_ratio, drop, drop_path=drop_path, layer_i=N2-1)) + self.enc = nn.Sequential(*enc_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T * C, H, W) + + z = x + for i in range(self.N2): + z = self.enc[i](z) + + y = z.reshape(B, T, C, H, W) + return y + +if __name__ == '__main__': + x = torch.randn([1, 1, 69, 180, 360]) + print("input shape:", x.shape) + + model = Spatio_temporal_evolution(channel_in=69, + channel_hid=256, + N2=4, + input_resolution = [64, 64], + model_type='poolformer', + mlp_ratio=4., + drop=0.0, + drop_path=0.1) + print(model) + output = model(x) + print("output shape:", output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/evolution_modules.py b/Exp3_Kuroshio_forecasting/model/modules_api/evolution_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..5ef6d2ab8bdb00cca14ab6aa862dc9df54defbad --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/evolution_modules.py @@ -0,0 +1,1020 @@ +import math +import torch +import torch.nn as nn + +from timm.layers import DropPath, trunc_normal_ +from timm.models.convnext import ConvNeXtBlock +from timm.models.mlp_mixer import MixerBlock +from timm.models.swin_transformer import SwinTransformerBlock, window_partition, window_reverse +from timm.models.vision_transformer import Block as ViTBlock + +from model.modules_api.layers import (HorBlock, ChannelAggregationFFN, MultiOrderGatedAggregation, + PoolFormerBlock, CBlock, SABlock, MixMlp, VANBlock) + + +class BasicConv2d(nn.Module): + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=0, + dilation=1, + upsampling=False, + act_norm=False, + act_inplace=True): + super(BasicConv2d, self).__init__() + self.act_norm = act_norm + if upsampling is True: + self.conv = nn.Sequential(*[ + nn.Conv2d(in_channels, out_channels*4, kernel_size=kernel_size, + stride=1, padding=padding, dilation=dilation), + nn.PixelShuffle(2) + ]) + else: + self.conv = nn.Conv2d( + in_channels, out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, dilation=dilation) + + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.SiLU(inplace=act_inplace) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.Conv2d)): + trunc_normal_(m.weight, std=.02) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + + def __init__(self, + C_in, + C_out, + kernel_size=3, + downsampling=False, + upsampling=False, + act_norm=True, + act_inplace=True): + super(ConvSC, self).__init__() + + stride = 2 if downsampling is True else 1 + padding = (kernel_size - stride + 1) // 2 + + self.conv = BasicConv2d(C_in, C_out, kernel_size=kernel_size, stride=stride, + upsampling=upsampling, padding=padding, + act_norm=act_norm, act_inplace=act_inplace) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=0, + groups=1, + act_norm=False, + act_inplace=True): + super(GroupConv2d, self).__init__() + self.act_norm=act_norm + if in_channels % groups != 0: + groups=1 + self.conv = nn.Conv2d( + in_channels, out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=act_inplace) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class gInception_ST(nn.Module): + """A IncepU block for SimVP""" + + def __init__(self, C_in, C_hid, C_out, incep_ker = [3,5,7,11], groups = 8): + super(gInception_ST, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d( + C_hid, C_out, kernel_size=ker, stride=1, + padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + + +class AttentionModule(nn.Module): + """Large Kernel Attention for SimVP""" + + def __init__(self, dim, kernel_size, dilation=3): + super().__init__() + d_k = 2 * dilation - 1 + d_p = (d_k - 1) // 2 + dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) + dd_p = (dilation * (dd_k - 1) // 2) + + self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) + self.conv1 = nn.Conv2d(dim, 2*dim, 1) + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) # depth-wise conv + attn = self.conv_spatial(attn) # depth-wise dilation convolution + + f_g = self.conv1(attn) + split_dim = f_g.shape[1] // 2 + f_x, g_x = torch.split(f_g, split_dim, dim=1) + return torch.sigmoid(g_x) * f_x + + +class SpatialAttention(nn.Module): + """A Spatial Attention block for SimVP""" + + def __init__(self, d_model, kernel_size=21, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.activation = nn.GELU() # GELU + self.spatial_gating_unit = AttentionModule(d_model, kernel_size) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class GASubBlock(nn.Module): + """A GABlock (gSTA) for SimVP""" + + def __init__(self, dim, kernel_size=21, mlp_ratio=4., + drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): + super().__init__() + self.norm1 = nn.BatchNorm2d(dim) + self.attn = SpatialAttention(dim, kernel_size) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MixMlp( + in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def forward(self, x): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.attn(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) + return x + + +class ConvMixerSubBlock(nn.Module): + """A block of ConvMixer.""" + + def __init__(self, dim, kernel_size=9, activation=nn.GELU): + super().__init__() + # spatial mixing + self.conv_dw = nn.Conv2d(dim, dim, kernel_size, groups=dim, padding="same") + self.act_1 = activation() + self.norm_1 = nn.BatchNorm2d(dim) + # channel mixing + self.conv_pw = nn.Conv2d(dim, dim, kernel_size=1) + self.act_2 = activation() + self.norm_2 = nn.BatchNorm2d(dim) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return dict() + + def forward(self, x): + x = x + self.norm_1(self.act_1(self.conv_dw(x))) + x = self.norm_2(self.act_2(self.conv_pw(x))) + return x + + +class ConvNeXtSubBlock(ConvNeXtBlock): + """A block of ConvNeXt.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim, mlp_ratio=mlp_ratio, + drop_path=drop_path, ls_init_value=1e-6, conv_mlp=True) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma'} + + def forward(self, x): + x = x + self.drop_path( + self.gamma.reshape(1, -1, 1, 1) * self.mlp(self.norm(self.conv_dw(x)))) + return x + + +class HorNetSubBlock(HorBlock): + """A block of HorNet.""" + + def __init__(self, dim, mlp_ratio=4., drop_path=0.1, init_value=1e-6): + super().__init__(dim, mlp_ratio=mlp_ratio, drop_path=drop_path, init_value=init_value) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma1', 'gamma2'} + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + +class MLPMixerSubBlock(MixerBlock): + """A block of MLP-Mixer.""" + + def __init__(self, dim, input_resolution=None, mlp_ratio=4., drop=0., drop_path=0.1): + seq_len = input_resolution[0] * input_resolution[1] + super().__init__(dim, seq_len=seq_len, + mlp_ratio=(0.5, mlp_ratio), drop_path=drop_path, drop=drop) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return dict() + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.mlp_tokens(self.norm1(x).transpose(1, 2)).transpose(1, 2)) + x = x + self.drop_path(self.mlp_channels(self.norm2(x))) + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +class MogaSubBlock(nn.Module): + """A block of MogaNet.""" + + def __init__(self, embed_dims, mlp_ratio=4., drop_rate=0., drop_path_rate=0., init_value=1e-5, + attn_dw_dilation=[1, 2, 3], attn_channel_split=[1, 3, 4]): + super(MogaSubBlock, self).__init__() + self.out_channels = embed_dims + # spatial attention + self.norm1 = nn.BatchNorm2d(embed_dims) + self.attn = MultiOrderGatedAggregation( + embed_dims, attn_dw_dilation=attn_dw_dilation, attn_channel_split=attn_channel_split) + self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity() + # channel MLP + self.norm2 = nn.BatchNorm2d(embed_dims) + mlp_hidden_dims = int(embed_dims * mlp_ratio) + self.mlp = ChannelAggregationFFN( + embed_dims=embed_dims, mlp_hidden_dims=mlp_hidden_dims, ffn_drop=drop_rate) + # init layer scale + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2', 'sigma'} + + def forward(self, x): + x = x + self.drop_path(self.layer_scale_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.layer_scale_2 * self.mlp(self.norm2(x))) + return x + + +class PoolFormerSubBlock(PoolFormerBlock): + """A block of PoolFormer.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim, pool_size=3, mlp_ratio=mlp_ratio, drop_path=drop_path, + drop=drop, init_value=1e-5) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + +class SwinSubBlock(SwinTransformerBlock): + """A block of Swin Transformer.""" + + def __init__(self, dim, input_resolution=None, layer_i=0, mlp_ratio=4., drop=0., drop_path=0.1): + window_size = 7 if input_resolution[0] % 7 == 0 else max(4, input_resolution[0] // 16) + window_size = min(8, window_size) + shift_size = 0 if (layer_i % 2 == 0) else window_size // 2 + super().__init__(dim, input_resolution, num_heads=8, window_size=window_size, + shift_size=shift_size, mlp_ratio=mlp_ratio, + drop_path=drop_path, attn_drop=drop, proj_drop=drop, qkv_bias=True) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = self.norm1(x) + x = x.view(B, H, W, C) + x = super().forward(x) + + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +# def UniformerSubBlock(embed_dims, mlp_ratio=4., drop=0., drop_path=0., +# init_value=1e-6, block_type='Conv'): +# """Build a block of Uniformer.""" + +# assert block_type in ['Conv', 'MHSA'] +# if block_type == 'Conv': +# return CBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) +# else: +# return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, +# drop=drop, drop_path=drop_path, init_value=init_value) + +def UniformerSubBlock(embed_dims, mlp_ratio=4., drop=0., drop_path=0., + init_value=1e-6, block_type='Conv'): + """Build a block of Uniformer.""" + assert block_type in ['Conv', 'MHSA'] + if block_type == 'Conv': + return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + drop=drop, drop_path=drop_path, init_value=init_value) + else: + return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + drop=drop, drop_path=drop_path, init_value=init_value) + + +class VANSubBlock(VANBlock): + """A block of VAN.""" + + def __init__(self, dim, mlp_ratio=4., drop=0.,drop_path=0., init_value=1e-2, act_layer=nn.GELU): + super().__init__(dim=dim, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path, + init_value=init_value, act_layer=act_layer) + self.apply(self._init_weights) + + @torch.jit.ignore + def no_weight_decay(self): + return {'layer_scale_1', 'layer_scale_2'} + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + +class ViTSubBlock(ViTBlock): + """A block of Vision Transformer.""" + + def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): + super().__init__(dim=dim, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, + attn_drop=drop, proj_drop=0, drop_path=drop_path, act_layer=nn.GELU, norm_layer=nn.LayerNorm) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + B, C, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.attn(self.norm1(x))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +class TemporalAttention(nn.Module): + """A Temporal Attention block for Temporal Attention Unit""" + + def __init__(self, d_model, kernel_size=21, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.activation = nn.GELU() # GELU + self.spatial_gating_unit = TemporalAttentionModule(d_model, kernel_size) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class TemporalAttentionModule(nn.Module): + """Large Kernel Attention for SimVP""" + + def __init__(self, dim, kernel_size, dilation=3, reduction=16): + super().__init__() + d_k = 2 * dilation - 1 + d_p = (d_k - 1) // 2 + dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) + dd_p = (dilation * (dd_k - 1) // 2) + + self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) + self.conv1 = nn.Conv2d(dim, dim, 1) + + self.reduction = max(dim // reduction, 4) + self.avg_pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Sequential( + nn.Linear(dim, dim // self.reduction, bias=False), # reduction + nn.ReLU(True), + nn.Linear(dim // self.reduction, dim, bias=False), # expansion + nn.Sigmoid() + ) + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) # depth-wise conv + attn = self.conv_spatial(attn) # depth-wise dilation convolution + f_x = self.conv1(attn) # 1x1 conv + # append a se operation + b, c, _, _ = x.size() + se_atten = self.avg_pool(x).view(b, c) + se_atten = self.fc(se_atten).view(b, c, 1, 1) + return se_atten * f_x * u + + +class TAUSubBlock(GASubBlock): + """A TAUBlock (tau) for Temporal Attention Unit""" + + def __init__(self, dim, kernel_size=21, mlp_ratio=4., + drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): + super().__init__(dim=dim, kernel_size=kernel_size, mlp_ratio=mlp_ratio, + drop=drop, drop_path=drop_path, init_value=init_value, act_layer=act_layer) + + self.attn = TemporalAttention(dim, kernel_size) + +# import math +# import torch +# import torch.nn as nn +# from timm.models.layers import DropPath, trunc_normal_ +# from timm.models.convnext import ConvNeXtBlock +# from timm.models.mlp_mixer import MixerBlock +# from timm.models.vision_transformer import Block as ViTBlock + +# from modules_api.layers import (HorBlock, ChannelAggregationFFN, MultiOrderGatedAggregation, +# PoolFormerBlock, CBlock, SABlock, MixMlp, VANBlock) + + +# class AttentionModule(nn.Module): +# """Large Kernel Attention""" +# def __init__(self, dim, kernel_size, dilation=3): +# super().__init__() +# d_k = 2 * dilation - 1 +# d_p = (d_k - 1) // 2 +# dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) +# dd_p = (dilation * (dd_k - 1) // 2) + +# self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) +# self.conv_spatial = nn.Conv2d( +# dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) +# self.conv1 = nn.Conv2d(dim, 2 * dim, 1) + +# def forward(self, x): +# u = x.clone() +# attn = self.conv0(x) # depth-wise conv +# attn = self.conv_spatial(attn) # depth-wise dilation convolution + +# f_g = self.conv1(attn) +# split_dim = f_g.shape[1] // 2 +# f_x, g_x = torch.split(f_g, split_dim, dim=1) +# return torch.sigmoid(g_x) * f_x + + +# class SpatialAttention(nn.Module): +# """A Spatial Attention block""" +# def __init__(self, d_model, kernel_size=21, attn_shortcut=True): +# super().__init__() + +# self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv +# self.activation = nn.GELU() # GELU +# self.spatial_gating_unit = AttentionModule(d_model, kernel_size) +# self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv +# self.attn_shortcut = attn_shortcut + +# def forward(self, x): +# if self.attn_shortcut: +# shortcut = x.clone() +# x = self.proj_1(x) +# x = self.activation(x) +# x = self.spatial_gating_unit(x) +# x = self.proj_2(x) +# if self.attn_shortcut: +# x = x + shortcut +# return x + + +# class GASubBlock(nn.Module): +# """A GABlock (gSTA) block""" +# def __init__(self, dim, kernel_size=21, mlp_ratio=4., +# drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): +# super().__init__() +# self.norm1 = nn.BatchNorm2d(dim) +# self.attn = SpatialAttention(dim, kernel_size) +# self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + +# self.norm2 = nn.BatchNorm2d(dim) +# mlp_hidden_dim = int(dim * mlp_ratio) +# self.mlp = MixMlp( +# in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + +# self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) +# self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, nn.LayerNorm): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'layer_scale_1', 'layer_scale_2'} + +# def forward(self, x): +# x = x + self.drop_path( +# self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.attn(self.norm1(x))) +# x = x + self.drop_path( +# self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) +# return x + + +# class ConvMixerSubBlock(nn.Module): +# """A block of ConvMixer.""" +# def __init__(self, dim, kernel_size=9, activation=nn.GELU): +# super().__init__() +# # spatial mixing +# self.conv_dw = nn.Conv2d(dim, dim, kernel_size, groups=dim, padding="same") +# self.act_1 = activation() +# self.norm_1 = nn.BatchNorm2d(dim) +# # channel mixing +# self.conv_pw = nn.Conv2d(dim, dim, kernel_size=1) +# self.act_2 = activation() +# self.norm_2 = nn.BatchNorm2d(dim) + +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, nn.BatchNorm2d): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + +# @torch.jit.ignore +# def no_weight_decay(self): +# return dict() + +# def forward(self, x): +# x = x + self.norm_1(self.act_1(self.conv_dw(x))) +# x = self.norm_2(self.act_2(self.conv_pw(x))) +# return x + + +# class ConvNeXtSubBlock(ConvNeXtBlock): +# """A block of ConvNeXt.""" +# def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): +# super().__init__(dim, mlp_ratio=mlp_ratio, +# drop_path=drop_path, ls_init_value=1e-6, conv_mlp=True) +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'gamma'} + +# def forward(self, x): +# x = x + self.drop_path( +# self.gamma.reshape(1, -1, 1, 1) * self.mlp(self.norm(self.conv_dw(x)))) +# return x + + +# class HorNetSubBlock(HorBlock): +# """A block of HorNet.""" +# def __init__(self, dim, mlp_ratio=4., drop_path=0.1, init_value=1e-6): +# super().__init__(dim, mlp_ratio=mlp_ratio, drop_path=drop_path, init_value=init_value) +# self.apply(self._init_weights) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'gamma1', 'gamma2'} + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, nn.LayerNorm): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + + +# class MLPMixerSubBlock(MixerBlock): +# """A block of MLP-Mixer.""" +# def __init__(self, dim, input_resolution=None, mlp_ratio=4., drop=0., drop_path=0.1): +# seq_len = input_resolution[0] * input_resolution[1] +# super().__init__(dim, seq_len=seq_len, +# mlp_ratio=(0.5, mlp_ratio), drop_path=drop_path, drop=drop) +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return dict() + +# def forward(self, x): +# B, C, H, W = x.shape +# x = x.flatten(2).transpose(1, 2) +# x = x + self.drop_path(self.mlp_tokens(self.norm1(x).transpose(1, 2)).transpose(1, 2)) +# x = x + self.drop_path(self.mlp_channels(self.norm2(x))) +# return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + +# class MogaSubBlock(nn.Module): +# """A block of MogaNet.""" +# def __init__(self, embed_dims, mlp_ratio=4., drop_rate=0., drop_path_rate=0., init_value=1e-5, +# attn_dw_dilation=[1, 2, 3], attn_channel_split=[1, 3, 4]): +# super(MogaSubBlock, self).__init__() +# self.out_channels = embed_dims +# # spatial attention +# self.norm1 = nn.BatchNorm2d(embed_dims) +# self.attn = MultiOrderGatedAggregation( +# embed_dims, attn_dw_dilation=attn_dw_dilation, attn_channel_split=attn_channel_split) +# self.drop_path = DropPath(drop_path_rate) if drop_path_rate > 0. else nn.Identity() +# # channel MLP +# self.norm2 = nn.BatchNorm2d(embed_dims) +# mlp_hidden_dims = int(embed_dims * mlp_ratio) +# self.mlp = ChannelAggregationFFN( +# embed_dims=embed_dims, mlp_hidden_dims=mlp_hidden_dims, ffn_drop=drop_rate) +# # init layer scale +# self.layer_scale_1 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) +# self.layer_scale_2 = nn.Parameter(init_value * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'layer_scale_1', 'layer_scale_2', 'sigma'} + +# def forward(self, x): +# x = x + self.drop_path(self.layer_scale_1 * self.attn(self.norm1(x))) +# x = x + self.drop_path(self.layer_scale_2 * self.mlp(self.norm2(x))) +# return x + + +# class PoolFormerSubBlock(PoolFormerBlock): +# """A block of PoolFormer.""" +# def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): +# super().__init__(dim, pool_size=3, mlp_ratio=mlp_ratio, drop_path=drop_path, +# drop=drop, init_value=1e-5) +# self.apply(self._init_weights) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'layer_scale_1', 'layer_scale_2'} + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) + + +# def UniformerSubBlock(embed_dims, mlp_ratio=4., drop=0., drop_path=0., +# init_value=1e-6, block_type='Conv'): +# """Build a block of Uniformer.""" +# assert block_type in ['Conv', 'MHSA'] +# if block_type == 'MHSA': +# return CBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path) +# else: +# return SABlock(dim=embed_dims, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, +# drop=drop, drop_path=drop_path, init_value=init_value) + + +# class VANSubBlock(VANBlock): +# """A block of VAN.""" +# def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0., init_value=1e-2, act_layer=nn.GELU): +# super().__init__(dim=dim, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path, +# init_value=init_value, act_layer=act_layer) +# self.apply(self._init_weights) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {'layer_scale_1', 'layer_scale_2'} + +# def _init_weights(self, m): +# if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) +# elif isinstance(m, nn.Conv2d): +# fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels +# fan_out //= m.groups +# m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) +# if m.bias is not None: +# m.bias.data.zero_() + + +# class ViTSubBlock(ViTBlock): +# """A block of Vision Transformer.""" +# def __init__(self, dim, mlp_ratio=4., drop=0., drop_path=0.1): +# super().__init__(dim=dim, num_heads=8, mlp_ratio=mlp_ratio, qkv_bias=True, +# drop=drop, drop_path=drop_path, act_layer=nn.GELU, norm_layer=nn.LayerNorm) +# self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() +# self.apply(self._init_weights) + +# def _init_weights(self, m): +# if isinstance(m, nn.Linear): +# trunc_normal_(m.weight, std=.02) +# if isinstance(m, nn.Linear) and m.bias is not None: +# nn.init.constant_(m.bias, 0) +# elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): +# nn.init.constant_(m.bias, 0) +# nn.init.constant_(m.weight, 1.0) + +# @torch.jit.ignore +# def no_weight_decay(self): +# return {} + +# def forward(self, x): +# B, C, H, W = x.shape +# x = x.flatten(2).transpose(1, 2) + +# x = x + self.drop_path(self.attn(self.norm1(x))) +# x = x + self.drop_path(self.mlp(self.norm2(x))) +# #print("x.shape", x.shape) +# return x.reshape(B, H, W, C).permute(0, 3, 1, 2) + + + + + + + +# class TemporalAttention(nn.Module): +# def __init__(self, d_model, kernel_size=21, attn_shortcut=True): +# super().__init__() + +# self.proj_1 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv +# self.activation = nn.GELU() # GELU +# self.spatial_gating_unit = TemporalAttentionModule(d_model, kernel_size) +# self.proj_2 = nn.Conv2d(d_model, d_model, 1) # 1x1 conv +# self.attn_shortcut = attn_shortcut + +# def forward(self, x): +# if self.attn_shortcut: +# shortcut = x.clone() +# x = self.proj_1(x) +# x = self.activation(x) +# x = self.spatial_gating_unit(x) +# x = self.proj_2(x) +# if self.attn_shortcut: +# x = x + shortcut +# return x + + +# class TemporalAttentionModule(nn.Module): +# def __init__(self, dim, kernel_size, dilation=3, reduction=16): +# super().__init__() +# d_k = 2 * dilation - 1 +# d_p = (d_k - 1) // 2 +# dd_k = kernel_size // dilation + ((kernel_size // dilation) % 2 - 1) +# dd_p = (dilation * (dd_k - 1) // 2) + +# self.conv0 = nn.Conv2d(dim, dim, d_k, padding=d_p, groups=dim) +# self.conv_spatial = nn.Conv2d( +# dim, dim, dd_k, stride=1, padding=dd_p, groups=dim, dilation=dilation) +# self.conv1 = nn.Conv2d(dim, dim, 1) + +# self.reduction = max(dim // reduction, 4) +# self.avg_pool = nn.AdaptiveAvgPool2d(1) +# self.fc = nn.Sequential( +# nn.Linear(dim, dim // self.reduction, bias=False), # reduction +# nn.ReLU(True), +# nn.Linear(dim // self.reduction, dim, bias=False), # expansion +# nn.Sigmoid() +# ) + +# def forward(self, x): +# u = x.clone() +# attn = self.conv0(x) # depth-wise conv +# attn = self.conv_spatial(attn) # depth-wise dilation convolution +# f_x = self.conv1(attn) # 1x1 conv +# # append a se operation +# b, c, _, _ = x.size() +# se_atten = self.avg_pool(x).view(b, c) +# se_atten = self.fc(se_atten).view(b, c, 1, 1) +# return se_atten * f_x * u + + +# class TAUSubBlock(GASubBlock): +# def __init__(self, dim, kernel_size=21, mlp_ratio=4., +# drop=0., drop_path=0.1, init_value=1e-2, act_layer=nn.GELU): +# super().__init__(dim=dim, kernel_size=kernel_size, mlp_ratio=mlp_ratio, +# drop=drop, drop_path=drop_path, init_value=init_value, act_layer=act_layer) + +# self.attn = TemporalAttention(dim, kernel_size) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/fouriermodules.py b/Exp3_Kuroshio_forecasting/model/modules_api/fouriermodules.py new file mode 100644 index 0000000000000000000000000000000000000000..9107d25c70768d68b195508522698c4c179a9315 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/fouriermodules.py @@ -0,0 +1,183 @@ +import torch +from torch import nn +import torch.nn.functional as F +import torch.fft +import numpy as np +import torch.optim as optimizer +from functools import partial +from collections import OrderedDict +from timm.models.layers import DropPath, to_2tuple, trunc_normal_ +from torch.utils.checkpoint import checkpoint_sequential +from torch import nn + + +class BasicConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, transpose=False, act_norm=False): + super(BasicConv2d, self).__init__() + self.act_norm=act_norm + if not transpose: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) + else: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,output_padding=stride //2 ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvSC, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d(C_in, C_out, kernel_size=3, stride=stride, + padding=1, transpose=transpose, act_norm=act_norm) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, groups, act_norm=False): + super(GroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class Inception(nn.Module): + def __init__(self, C_in, C_hid, C_out, incep_ker=[3,5,7,11], groups=8): + super(Inception, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d(C_hid, C_out, kernel_size=ker, stride=1, padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + +class Mlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super(Mlp, self).__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.fc3 = nn.AdaptiveAvgPool1d(out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc3(x) + x = self.drop(x) + return x + + +class AdativeFourierNeuralOperator(nn.Module): + def __init__(self, dim, h=16, w=16, is_fno_bias=True): + super(AdativeFourierNeuralOperator, self).__init__() + self.hidden_size = dim + self.h = h + self.w = w + self.num_blocks = 2 + self.block_size = self.hidden_size // self.num_blocks + assert self.hidden_size % self.num_blocks == 0 + + self.scale = 0.02 + self.w1 = torch.nn.Parameter(self.scale * torch.randn(2, self.num_blocks, self.block_size, self.block_size)) + self.b1 = torch.nn.Parameter(self.scale * torch.randn(2, self.num_blocks, self.block_size)) + self.w2 = torch.nn.Parameter(self.scale * torch.randn(2, self.num_blocks, self.block_size, self.block_size)) + self.b2 = torch.nn.Parameter(self.scale * torch.randn(2, self.num_blocks, self.block_size)) + self.relu = nn.ReLU() + self.is_fno_bias = is_fno_bias + + if self.is_fno_bias: + self.bias = nn.Conv1d(self.hidden_size, self.hidden_size, 1) + else: + self.bias = None + + self.softshrink = 0.00 + + def multiply(self, input, weights): + return torch.einsum('...bd, bdk->...bk', input, weights) + + def forward(self, x): + B, N, C = x.shape + + if self.bias: + bias = self.bias(x.permute(0, 2, 1)).permute(0, 2, 1) + else: + bias = torch.zeros(x.shape, device=x.device) + + x = x.reshape(B, self.h, self.w, C) + x = torch.fft.rfft2(x, dim=(1, 2), norm='ortho') + x = x.reshape(B, x.shape[1], x.shape[2], self.num_blocks, self.block_size) + + x_real = F.relu(self.multiply(x.real, self.w1[0]) - self.multiply(x.imag, self.w1[1]) + self.b1[0], + inplace=True) + x_imag = F.relu(self.multiply(x.real, self.w1[1]) + self.multiply(x.imag, self.w1[0]) + self.b1[1], + inplace=True) + x_real = self.multiply(x_real, self.w2[0]) - self.multiply(x_imag, self.w2[1]) + self.b2[0] + x_imag = self.multiply(x_real, self.w2[1]) + self.multiply(x_imag, self.w2[0]) + self.b2[1] + + x = torch.stack([x_real, x_imag], dim=-1) + x = F.softshrink(x, lambd=self.softshrink) if self.softshrink else x + + x = torch.view_as_complex(x) + x = x.reshape(B, x.shape[1], x.shape[2], self.hidden_size) + x = torch.fft.irfft2(x, s=(self.h, self.w), dim=(1, 2), norm='ortho') + x = x.reshape(B, N, C) + + return x + bias + + +class FourierNetBlock(nn.Module): + def __init__(self, + dim, + mlp_ratio=4., + drop=0., + drop_path=0., + act_layer=nn.GELU, + norm_layer=nn.LayerNorm, + h=16, + w=16): + super(FourierNetBlock, self).__init__() + self.normlayer1 = norm_layer(dim) + self.filter = AdativeFourierNeuralOperator(dim, h=h, w=w) + + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.normlayer2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop) + self.double_skip = True + + def forward(self, x): + x = x + self.drop_path(self.filter(self.normlayer1(x))) + x = x + self.drop_path(self.mlp(self.normlayer2(x))) + return x + diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/hornet-checkpoint.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/hornet-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..3e69c76d83a0e336be3d48ac604bde362de68b79 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/hornet-checkpoint.py @@ -0,0 +1,112 @@ +# refer to the code from HorNet, Thanks! +# https://github.com/raoyongming/HorNet + +import torch +import torch.nn as nn +import torch.nn.functional as F +from timm.models.layers import DropPath +import torch.fft + + +def get_dwconv(dim, kernel, bias): + return nn.Conv2d(dim, dim, kernel_size=kernel, padding=(kernel-1)//2 ,bias=bias, groups=dim) + + +class gnconv(nn.Module): + def __init__(self, dim, order=5, gflayer=None, h=14, w=8, s=1.0): + super().__init__() + self.order = order + self.dims = [dim // 2 ** i for i in range(order)] + self.dims.reverse() + self.proj_in = nn.Conv2d(dim, 2*dim, 1) + + if gflayer is None: + self.dwconv = get_dwconv(sum(self.dims), 7, True) + else: + self.dwconv = gflayer(sum(self.dims), h=h, w=w) + + self.proj_out = nn.Conv2d(dim, dim, 1) + + self.pws = nn.ModuleList( + [nn.Conv2d(self.dims[i], self.dims[i+1], 1) for i in range(order-1)] + ) + + self.scale = s + print('[gnconv]', order, 'order with dims=', self.dims, 'scale=%.4f'%self.scale) + + def forward(self, x, mask=None, dummy=False): + fused_x = self.proj_in(x) + pwa, abc = torch.split(fused_x, (self.dims[0], sum(self.dims)), dim=1) + + dw_abc = self.dwconv(abc) * self.scale + + dw_list = torch.split(dw_abc, self.dims, dim=1) + x = pwa * dw_list[0] + + for i in range(self.order -1): + x = self.pws[i](x) * dw_list[i+1] + + x = self.proj_out(x) + + return x + +class LayerNorm(nn.Module): + r""" LayerNorm that supports two data formats: channels_last (default) or channels_first. + The ordering of the dimensions in the inputs. channels_last corresponds to inputs with + shape (batch_size, height, width, channels) while channels_first corresponds to inputs + with shape (batch_size, channels, height, width). + """ + def __init__(self, normalized_shape, eps=1e-6, data_format="channels_last"): + super().__init__() + self.weight = nn.Parameter(torch.ones(normalized_shape)) + self.bias = nn.Parameter(torch.zeros(normalized_shape)) + self.eps = eps + self.data_format = data_format + if self.data_format not in ["channels_last", "channels_first"]: + raise NotImplementedError + self.normalized_shape = (normalized_shape, ) + + def forward(self, x): + if self.data_format == "channels_last": + return F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps) + elif self.data_format == "channels_first": + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x + + +class HorBlock(nn.Module): + """ HorNet block """ + + def __init__(self, dim, order=4, mlp_ratio=4, drop_path=0., init_value=1e-6, gnconv=gnconv): + super().__init__() + + self.norm1 = LayerNorm(dim, eps=1e-6, data_format='channels_first') + self.gnconv = gnconv(dim, order) # depthwise conv + self.norm2 = LayerNorm(dim, eps=1e-6) + self.pwconv1 = nn.Linear(dim, int(mlp_ratio * dim)) # pointwise/1x1 convs, implemented with linear layers + self.act = nn.GELU() + self.pwconv2 = nn.Linear(int(mlp_ratio * dim), dim) + self.gamma1 = nn.Parameter(init_value * torch.ones(dim), requires_grad=True) + self.gamma2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + def forward(self, x): + B, C, H, W = x.shape + gamma1 = self.gamma1.view(C, 1, 1) + x = x + self.drop_path(gamma1 * self.gnconv(self.norm1(x))) + + input = x + x = x.permute(0, 2, 3, 1) # (N, C, H, W) -> (N, H, W, C) + x = self.norm2(x) + x = self.pwconv1(x) + x = self.act(x) + x = self.pwconv2(x) + if self.gamma2 is not None: + x = self.gamma2 * x + x = x.permute(0, 3, 1, 2) # (N, H, W, C) -> (N, C, H, W) + + x = input + self.drop_path(x) + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/moganet-checkpoint.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/moganet-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..e861912689ce431ec99b7d0ff9b374efcfed02e0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/moganet-checkpoint.py @@ -0,0 +1,140 @@ +# refer to the code from MogaNet, Thanks! +# https://github.com/Westlake-AI/MogaNet/blob/main/models/moganet.py + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class ChannelAggregationFFN(nn.Module): + """An implementation of FFN with Channel Aggregation in MogaNet.""" + + def __init__(self, embed_dims, mlp_hidden_dims, kernel_size=3, act_layer=nn.GELU, ffn_drop=0.): + super(ChannelAggregationFFN, self).__init__() + self.embed_dims = embed_dims + self.mlp_hidden_dims = mlp_hidden_dims + + self.fc1 = nn.Conv2d( + in_channels=embed_dims, out_channels=self.mlp_hidden_dims, kernel_size=1) + self.dwconv = nn.Conv2d( + in_channels=self.mlp_hidden_dims, out_channels=self.mlp_hidden_dims, kernel_size=kernel_size, + padding=kernel_size // 2, bias=True, groups=self.mlp_hidden_dims) + self.act = act_layer() + self.fc2 = nn.Conv2d( + in_channels=mlp_hidden_dims, out_channels=embed_dims, kernel_size=1) + self.drop = nn.Dropout(ffn_drop) + + self.decompose = nn.Conv2d( + in_channels=self.mlp_hidden_dims, out_channels=1, kernel_size=1) + self.sigma = nn.Parameter( + 1e-5 * torch.ones((1, mlp_hidden_dims, 1, 1)), requires_grad=True) + self.decompose_act = act_layer() + + def feat_decompose(self, x): + x = x + self.sigma * (x - self.decompose_act(self.decompose(x))) + return x + + def forward(self, x): + # proj 1 + x = self.fc1(x) + x = self.dwconv(x) + x = self.act(x) + x = self.drop(x) + # proj 2 + x = self.feat_decompose(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class MultiOrderDWConv(nn.Module): + """Multi-order Features with Dilated DWConv Kernel in MogaNet.""" + + def __init__(self, embed_dims, dw_dilation=[1, 2, 3], channel_split=[1, 3, 4]): + super(MultiOrderDWConv, self).__init__() + self.split_ratio = [i / sum(channel_split) for i in channel_split] + self.embed_dims_1 = int(self.split_ratio[1] * embed_dims) + self.embed_dims_2 = int(self.split_ratio[2] * embed_dims) + self.embed_dims_0 = embed_dims - self.embed_dims_1 - self.embed_dims_2 + self.embed_dims = embed_dims + assert len(dw_dilation) == len(channel_split) == 3 + assert 1 <= min(dw_dilation) and max(dw_dilation) <= 3 + assert embed_dims % sum(channel_split) == 0 + + # basic DW conv + self.DW_conv0 = nn.Conv2d( + in_channels=self.embed_dims, out_channels=self.embed_dims, kernel_size=5, + padding=(1 + 4 * dw_dilation[0]) // 2, + groups=self.embed_dims, stride=1, dilation=dw_dilation[0], + ) + # DW conv 1 + self.DW_conv1 = nn.Conv2d( + in_channels=self.embed_dims_1, out_channels=self.embed_dims_1, kernel_size=5, + padding=(1 + 4 * dw_dilation[1]) // 2, + groups=self.embed_dims_1, stride=1, dilation=dw_dilation[1], + ) + # DW conv 2 + self.DW_conv2 = nn.Conv2d( + in_channels=self.embed_dims_2, out_channels=self.embed_dims_2, kernel_size=7, + padding=(1 + 6 * dw_dilation[2]) // 2, + groups=self.embed_dims_2, stride=1, dilation=dw_dilation[2], + ) + # a channel convolution + self.PW_conv = nn.Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + + def forward(self, x): + x_0 = self.DW_conv0(x) + x_1 = self.DW_conv1( + x_0[:, self.embed_dims_0: self.embed_dims_0+self.embed_dims_1, ...]) + x_2 = self.DW_conv2( + x_0[:, self.embed_dims-self.embed_dims_2:, ...]) + x = torch.cat([ + x_0[:, :self.embed_dims_0, ...], x_1, x_2], dim=1) + x = self.PW_conv(x) + return x + + +class MultiOrderGatedAggregation(nn.Module): + """Spatial Block with Multi-order Gated Aggregation in MogaNet.""" + + def __init__(self, embed_dims, attn_dw_dilation=[1, 2, 3], attn_channel_split=[1, 3, 4], attn_shortcut=True): + super(MultiOrderGatedAggregation, self).__init__() + self.embed_dims = embed_dims + self.attn_shortcut = attn_shortcut + self.proj_1 = nn.Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + self.gate = nn.Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + self.value = MultiOrderDWConv( + embed_dims=embed_dims, dw_dilation=attn_dw_dilation, channel_split=attn_channel_split) + self.proj_2 = nn.Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + + # activation for gating and value + self.act_value = nn.SiLU() + self.act_gate = nn.SiLU() + # decompose + self.sigma = nn.Parameter(1e-5 * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + + def feat_decompose(self, x): + x = self.proj_1(x) + # x_d: [B, C, H, W] -> [B, C, 1, 1] + x_d = F.adaptive_avg_pool2d(x, output_size=1) + x = x + self.sigma * (x - x_d) + x = self.act_value(x) + return x + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + # proj 1x1 + x = self.feat_decompose(x) + # gating and value branch + g = self.gate(x) + v = self.value(x) + # aggregation + x = self.proj_2(self.act_gate(g) * self.act_gate(v)) + if self.attn_shortcut: + x = x + shortcut + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/poolformer-checkpoint.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/poolformer-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..0c26fbc3aaed61a7cf1e1154202112840cf43e59 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/poolformer-checkpoint.py @@ -0,0 +1,97 @@ +# refer to the code from PoolFormer, Thanks! +# https://github.com/sail-sg/poolformer/blob/main/models/poolformer.py + +import torch +import torch.nn as nn +from timm.models.layers import DropPath, trunc_normal_ + + +class GroupNorm(nn.GroupNorm): + """ + Group Normalization with 1 group. + Input: tensor in shape [B, C, H, W] + """ + def __init__(self, num_channels, **kwargs): + super().__init__(1, num_channels, **kwargs) + + +class Pooling(nn.Module): + """ + Implementation of pooling for PoolFormer + --pool_size: pooling size + """ + def __init__(self, pool_size=3): + super().__init__() + self.pool = nn.AvgPool2d( + pool_size, stride=1, padding=pool_size//2, count_include_pad=False) + + def forward(self, x): + return self.pool(x) - x + + +class Mlp(nn.Module): + """ + Implementation of MLP with 1*1 convolutions. + Input: tensor with shape [B, C, H, W] + """ + def __init__(self, in_features, hidden_features=None, + out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Conv2d): + trunc_normal_(m.weight, std=.02) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class PoolFormerBlock(nn.Module): + """ + Implementation of one PoolFormer block. + --dim: embedding dim + --pool_size: pooling size + --mlp_ratio: mlp expansion ratio + --act_layer: activation + --norm_layer: normalization + --drop: dropout rate + --drop path: Stochastic Depth, + refer to https://arxiv.org/abs/1603.09382 + --init_value: LayerScale, + refer to https://arxiv.org/abs/2103.17239 + """ + def __init__(self, dim, pool_size=3, mlp_ratio=4., drop=0., drop_path=0., + init_value=1e-5, act_layer=nn.GELU, norm_layer=GroupNorm): + super().__init__() + + self.norm1 = norm_layer(dim) + self.token_mixer = Pooling(pool_size=pool_size) + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, + act_layer=act_layer, drop=drop) + # The following two techniques are useful to train deep PoolFormers. + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + def forward(self, x): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.token_mixer(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/uniformer-checkpoint.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/uniformer-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..a6817516e346736b8697237a2a744919a356f1b5 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/.ipynb_checkpoints/uniformer-checkpoint.py @@ -0,0 +1,153 @@ +import math +import torch +import torch.nn as nn +from timm.models.layers import DropPath, trunc_normal_ + + +class Mlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class CMlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + # NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights + self.scale = qk_scale or head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class CBlock(nn.Module): + def __init__(self, dim, num_heads=4, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = nn.BatchNorm2d(dim) + self.conv1 = nn.Conv2d(dim, dim, 1) + self.conv2 = nn.Conv2d(dim, dim, 1) + self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim) + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = CMlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + x = x + self.pos_embed(x) + x = x + self.drop_path(self.conv2(self.attn(self.conv1(self.norm1(x))))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + + +class SABlock(nn.Module): + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0., init_value=1e-6, act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, + attn_drop=attn_drop, proj_drop=drop) + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + # layer scale + self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)),requires_grad=True) + self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)),requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma_1', 'gamma_2'} + + def forward(self, x): + x = x + self.pos_embed(x) + B, N, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x))) + x = x.transpose(1, 2).reshape(B, N, H, W) + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/__init__.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b7835a19fb1507a7b119b24771cb3cbf401919e5 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__init__.py @@ -0,0 +1,10 @@ +from .hornet import HorBlock +from .moganet import ChannelAggregationFFN, MultiOrderGatedAggregation, MultiOrderDWConv +from .poolformer import PoolFormerBlock +from .uniformer import CBlock, SABlock +from .van import DWConv, MixMlp, VANBlock + +__all__ = [ + 'HorBlock', 'ChannelAggregationFFN', 'MultiOrderGatedAggregation', 'MultiOrderDWConv', + 'PoolFormerBlock', 'CBlock', 'SABlock', 'DWConv', 'MixMlp', 'VANBlock', +] \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/__init__.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4dbd3daffb4193801b41428e39175b6dc6881def Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/__init__.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/hornet.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/hornet.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e44c7787b2542a40cf8d177c9bacd9d0e8538ae2 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/hornet.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/moganet.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/moganet.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0c745bbb2006ca1887f302ddf1024bddf28477f Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/moganet.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/poolformer.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/poolformer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..604d490d73f173e58a8891f5ed15fef877fd10a2 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/poolformer.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/uniformer.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/uniformer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0c061308f9247174c81140cb35bca1917a037b1 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/uniformer.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/van.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/van.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d53d72b5ea446f91394a95f4a54b640004da57a7 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model/modules_api/layers/__pycache__/van.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/hornet.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/hornet.py new file mode 100644 index 0000000000000000000000000000000000000000..3e69c76d83a0e336be3d48ac604bde362de68b79 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/hornet.py @@ -0,0 +1,112 @@ +# refer to the code from HorNet, Thanks! +# https://github.com/raoyongming/HorNet + +import torch +import torch.nn as nn +import torch.nn.functional as F +from timm.models.layers import DropPath +import torch.fft + + +def get_dwconv(dim, kernel, bias): + return nn.Conv2d(dim, dim, kernel_size=kernel, padding=(kernel-1)//2 ,bias=bias, groups=dim) + + +class gnconv(nn.Module): + def __init__(self, dim, order=5, gflayer=None, h=14, w=8, s=1.0): + super().__init__() + self.order = order + self.dims = [dim // 2 ** i for i in range(order)] + self.dims.reverse() + self.proj_in = nn.Conv2d(dim, 2*dim, 1) + + if gflayer is None: + self.dwconv = get_dwconv(sum(self.dims), 7, True) + else: + self.dwconv = gflayer(sum(self.dims), h=h, w=w) + + self.proj_out = nn.Conv2d(dim, dim, 1) + + self.pws = nn.ModuleList( + [nn.Conv2d(self.dims[i], self.dims[i+1], 1) for i in range(order-1)] + ) + + self.scale = s + print('[gnconv]', order, 'order with dims=', self.dims, 'scale=%.4f'%self.scale) + + def forward(self, x, mask=None, dummy=False): + fused_x = self.proj_in(x) + pwa, abc = torch.split(fused_x, (self.dims[0], sum(self.dims)), dim=1) + + dw_abc = self.dwconv(abc) * self.scale + + dw_list = torch.split(dw_abc, self.dims, dim=1) + x = pwa * dw_list[0] + + for i in range(self.order -1): + x = self.pws[i](x) * dw_list[i+1] + + x = self.proj_out(x) + + return x + +class LayerNorm(nn.Module): + r""" LayerNorm that supports two data formats: channels_last (default) or channels_first. + The ordering of the dimensions in the inputs. channels_last corresponds to inputs with + shape (batch_size, height, width, channels) while channels_first corresponds to inputs + with shape (batch_size, channels, height, width). + """ + def __init__(self, normalized_shape, eps=1e-6, data_format="channels_last"): + super().__init__() + self.weight = nn.Parameter(torch.ones(normalized_shape)) + self.bias = nn.Parameter(torch.zeros(normalized_shape)) + self.eps = eps + self.data_format = data_format + if self.data_format not in ["channels_last", "channels_first"]: + raise NotImplementedError + self.normalized_shape = (normalized_shape, ) + + def forward(self, x): + if self.data_format == "channels_last": + return F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps) + elif self.data_format == "channels_first": + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x + + +class HorBlock(nn.Module): + """ HorNet block """ + + def __init__(self, dim, order=4, mlp_ratio=4, drop_path=0., init_value=1e-6, gnconv=gnconv): + super().__init__() + + self.norm1 = LayerNorm(dim, eps=1e-6, data_format='channels_first') + self.gnconv = gnconv(dim, order) # depthwise conv + self.norm2 = LayerNorm(dim, eps=1e-6) + self.pwconv1 = nn.Linear(dim, int(mlp_ratio * dim)) # pointwise/1x1 convs, implemented with linear layers + self.act = nn.GELU() + self.pwconv2 = nn.Linear(int(mlp_ratio * dim), dim) + self.gamma1 = nn.Parameter(init_value * torch.ones(dim), requires_grad=True) + self.gamma2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + def forward(self, x): + B, C, H, W = x.shape + gamma1 = self.gamma1.view(C, 1, 1) + x = x + self.drop_path(gamma1 * self.gnconv(self.norm1(x))) + + input = x + x = x.permute(0, 2, 3, 1) # (N, C, H, W) -> (N, H, W, C) + x = self.norm2(x) + x = self.pwconv1(x) + x = self.act(x) + x = self.pwconv2(x) + if self.gamma2 is not None: + x = self.gamma2 * x + x = x.permute(0, 3, 1, 2) # (N, H, W, C) -> (N, C, H, W) + + x = input + self.drop_path(x) + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/moganet.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/moganet.py new file mode 100644 index 0000000000000000000000000000000000000000..e861912689ce431ec99b7d0ff9b374efcfed02e0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/moganet.py @@ -0,0 +1,140 @@ +# refer to the code from MogaNet, Thanks! +# https://github.com/Westlake-AI/MogaNet/blob/main/models/moganet.py + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class ChannelAggregationFFN(nn.Module): + """An implementation of FFN with Channel Aggregation in MogaNet.""" + + def __init__(self, embed_dims, mlp_hidden_dims, kernel_size=3, act_layer=nn.GELU, ffn_drop=0.): + super(ChannelAggregationFFN, self).__init__() + self.embed_dims = embed_dims + self.mlp_hidden_dims = mlp_hidden_dims + + self.fc1 = nn.Conv2d( + in_channels=embed_dims, out_channels=self.mlp_hidden_dims, kernel_size=1) + self.dwconv = nn.Conv2d( + in_channels=self.mlp_hidden_dims, out_channels=self.mlp_hidden_dims, kernel_size=kernel_size, + padding=kernel_size // 2, bias=True, groups=self.mlp_hidden_dims) + self.act = act_layer() + self.fc2 = nn.Conv2d( + in_channels=mlp_hidden_dims, out_channels=embed_dims, kernel_size=1) + self.drop = nn.Dropout(ffn_drop) + + self.decompose = nn.Conv2d( + in_channels=self.mlp_hidden_dims, out_channels=1, kernel_size=1) + self.sigma = nn.Parameter( + 1e-5 * torch.ones((1, mlp_hidden_dims, 1, 1)), requires_grad=True) + self.decompose_act = act_layer() + + def feat_decompose(self, x): + x = x + self.sigma * (x - self.decompose_act(self.decompose(x))) + return x + + def forward(self, x): + # proj 1 + x = self.fc1(x) + x = self.dwconv(x) + x = self.act(x) + x = self.drop(x) + # proj 2 + x = self.feat_decompose(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class MultiOrderDWConv(nn.Module): + """Multi-order Features with Dilated DWConv Kernel in MogaNet.""" + + def __init__(self, embed_dims, dw_dilation=[1, 2, 3], channel_split=[1, 3, 4]): + super(MultiOrderDWConv, self).__init__() + self.split_ratio = [i / sum(channel_split) for i in channel_split] + self.embed_dims_1 = int(self.split_ratio[1] * embed_dims) + self.embed_dims_2 = int(self.split_ratio[2] * embed_dims) + self.embed_dims_0 = embed_dims - self.embed_dims_1 - self.embed_dims_2 + self.embed_dims = embed_dims + assert len(dw_dilation) == len(channel_split) == 3 + assert 1 <= min(dw_dilation) and max(dw_dilation) <= 3 + assert embed_dims % sum(channel_split) == 0 + + # basic DW conv + self.DW_conv0 = nn.Conv2d( + in_channels=self.embed_dims, out_channels=self.embed_dims, kernel_size=5, + padding=(1 + 4 * dw_dilation[0]) // 2, + groups=self.embed_dims, stride=1, dilation=dw_dilation[0], + ) + # DW conv 1 + self.DW_conv1 = nn.Conv2d( + in_channels=self.embed_dims_1, out_channels=self.embed_dims_1, kernel_size=5, + padding=(1 + 4 * dw_dilation[1]) // 2, + groups=self.embed_dims_1, stride=1, dilation=dw_dilation[1], + ) + # DW conv 2 + self.DW_conv2 = nn.Conv2d( + in_channels=self.embed_dims_2, out_channels=self.embed_dims_2, kernel_size=7, + padding=(1 + 6 * dw_dilation[2]) // 2, + groups=self.embed_dims_2, stride=1, dilation=dw_dilation[2], + ) + # a channel convolution + self.PW_conv = nn.Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + + def forward(self, x): + x_0 = self.DW_conv0(x) + x_1 = self.DW_conv1( + x_0[:, self.embed_dims_0: self.embed_dims_0+self.embed_dims_1, ...]) + x_2 = self.DW_conv2( + x_0[:, self.embed_dims-self.embed_dims_2:, ...]) + x = torch.cat([ + x_0[:, :self.embed_dims_0, ...], x_1, x_2], dim=1) + x = self.PW_conv(x) + return x + + +class MultiOrderGatedAggregation(nn.Module): + """Spatial Block with Multi-order Gated Aggregation in MogaNet.""" + + def __init__(self, embed_dims, attn_dw_dilation=[1, 2, 3], attn_channel_split=[1, 3, 4], attn_shortcut=True): + super(MultiOrderGatedAggregation, self).__init__() + self.embed_dims = embed_dims + self.attn_shortcut = attn_shortcut + self.proj_1 = nn.Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + self.gate = nn.Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + self.value = MultiOrderDWConv( + embed_dims=embed_dims, dw_dilation=attn_dw_dilation, channel_split=attn_channel_split) + self.proj_2 = nn.Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + + # activation for gating and value + self.act_value = nn.SiLU() + self.act_gate = nn.SiLU() + # decompose + self.sigma = nn.Parameter(1e-5 * torch.ones((1, embed_dims, 1, 1)), requires_grad=True) + + def feat_decompose(self, x): + x = self.proj_1(x) + # x_d: [B, C, H, W] -> [B, C, 1, 1] + x_d = F.adaptive_avg_pool2d(x, output_size=1) + x = x + self.sigma * (x - x_d) + x = self.act_value(x) + return x + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + # proj 1x1 + x = self.feat_decompose(x) + # gating and value branch + g = self.gate(x) + v = self.value(x) + # aggregation + x = self.proj_2(self.act_gate(g) * self.act_gate(v)) + if self.attn_shortcut: + x = x + shortcut + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/poolformer.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/poolformer.py new file mode 100644 index 0000000000000000000000000000000000000000..0c26fbc3aaed61a7cf1e1154202112840cf43e59 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/poolformer.py @@ -0,0 +1,97 @@ +# refer to the code from PoolFormer, Thanks! +# https://github.com/sail-sg/poolformer/blob/main/models/poolformer.py + +import torch +import torch.nn as nn +from timm.models.layers import DropPath, trunc_normal_ + + +class GroupNorm(nn.GroupNorm): + """ + Group Normalization with 1 group. + Input: tensor in shape [B, C, H, W] + """ + def __init__(self, num_channels, **kwargs): + super().__init__(1, num_channels, **kwargs) + + +class Pooling(nn.Module): + """ + Implementation of pooling for PoolFormer + --pool_size: pooling size + """ + def __init__(self, pool_size=3): + super().__init__() + self.pool = nn.AvgPool2d( + pool_size, stride=1, padding=pool_size//2, count_include_pad=False) + + def forward(self, x): + return self.pool(x) - x + + +class Mlp(nn.Module): + """ + Implementation of MLP with 1*1 convolutions. + Input: tensor with shape [B, C, H, W] + """ + def __init__(self, in_features, hidden_features=None, + out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Conv2d): + trunc_normal_(m.weight, std=.02) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class PoolFormerBlock(nn.Module): + """ + Implementation of one PoolFormer block. + --dim: embedding dim + --pool_size: pooling size + --mlp_ratio: mlp expansion ratio + --act_layer: activation + --norm_layer: normalization + --drop: dropout rate + --drop path: Stochastic Depth, + refer to https://arxiv.org/abs/1603.09382 + --init_value: LayerScale, + refer to https://arxiv.org/abs/2103.17239 + """ + def __init__(self, dim, pool_size=3, mlp_ratio=4., drop=0., drop_path=0., + init_value=1e-5, act_layer=nn.GELU, norm_layer=GroupNorm): + super().__init__() + + self.norm1 = norm_layer(dim) + self.token_mixer = Pooling(pool_size=pool_size) + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, + act_layer=act_layer, drop=drop) + # The following two techniques are useful to train deep PoolFormers. + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + def forward(self, x): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.token_mixer(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/uniformer.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/uniformer.py new file mode 100644 index 0000000000000000000000000000000000000000..a6817516e346736b8697237a2a744919a356f1b5 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/uniformer.py @@ -0,0 +1,153 @@ +import math +import torch +import torch.nn as nn +from timm.models.layers import DropPath, trunc_normal_ + + +class Mlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class CMlp(nn.Module): + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = act_layer() + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class Attention(nn.Module): + def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + # NOTE scale factor was wrong in my original version, can set manually to be compat with prev weights + self.scale = qk_scale or head_dim ** -0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class CBlock(nn.Module): + def __init__(self, dim, num_heads=4, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = nn.BatchNorm2d(dim) + self.conv1 = nn.Conv2d(dim, dim, 1) + self.conv2 = nn.Conv2d(dim, dim, 1) + self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim) + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = CMlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + @torch.jit.ignore + def no_weight_decay(self): + return {} + + def forward(self, x): + x = x + self.pos_embed(x) + x = x + self.drop_path(self.conv2(self.attn(self.conv1(self.norm1(x))))) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + + +class SABlock(nn.Module): + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, qk_scale=None, drop=0., attn_drop=0., + drop_path=0., init_value=1e-6, act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim) + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, qkv_bias=qkv_bias, qk_scale=qk_scale, + attn_drop=attn_drop, proj_drop=drop) + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + # layer scale + self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)),requires_grad=True) + self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)),requires_grad=True) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + @torch.jit.ignore + def no_weight_decay(self): + return {'gamma_1', 'gamma_2'} + + def forward(self, x): + x = x + self.pos_embed(x) + B, N, H, W = x.shape + x = x.flatten(2).transpose(1, 2) + x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x))) + x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x))) + x = x.transpose(1, 2).reshape(B, N, H, W) + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/layers/van.py b/Exp3_Kuroshio_forecasting/model/modules_api/layers/van.py new file mode 100644 index 0000000000000000000000000000000000000000..ac13d08a37488688a785cc9313d68215d3fff508 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/layers/van.py @@ -0,0 +1,119 @@ +# refer to the code from VAN, Thanks! +# https://github.com/Visual-Attention-Network/VAN-Classification + +import math +import torch +import torch.nn as nn + +from timm.models.layers import DropPath, trunc_normal_ + + +class DWConv(nn.Module): + def __init__(self, dim=768): + super(DWConv, self).__init__() + self.dwconv = nn.Conv2d(dim, dim, 3, 1, 1, bias=True, groups=dim) + + def forward(self, x): + x = self.dwconv(x) + return x + + +class MixMlp(nn.Module): + def __init__(self, + in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) # 1x1 + self.dwconv = DWConv(hidden_features) # CFF: Convlutional feed-forward network + self.act = act_layer() # GELU + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) # 1x1 + self.drop = nn.Dropout(drop) + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + fan_out //= m.groups + m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) + if m.bias is not None: + m.bias.data.zero_() + + def forward(self, x): + x = self.fc1(x) + x = self.dwconv(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class LKA(nn.Module): + def __init__(self, dim): + super().__init__() + self.conv0 = nn.Conv2d(dim, dim, 5, padding=2, groups=dim) + self.conv_spatial = nn.Conv2d( + dim, dim, 7, stride=1, padding=9, groups=dim, dilation=3) + self.conv1 = nn.Conv2d(dim, dim, 1) + + + def forward(self, x): + u = x.clone() + attn = self.conv0(x) + attn = self.conv_spatial(attn) + attn = self.conv1(attn) + + return u * attn + + +class Attention(nn.Module): + def __init__(self, d_model, attn_shortcut=True): + super().__init__() + + self.proj_1 = nn.Conv2d(d_model, d_model, 1) + self.activation = nn.GELU() + self.spatial_gating_unit = LKA(d_model) + self.proj_2 = nn.Conv2d(d_model, d_model, 1) + self.attn_shortcut = attn_shortcut + + def forward(self, x): + if self.attn_shortcut: + shortcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + if self.attn_shortcut: + x = x + shortcut + return x + + +class VANBlock(nn.Module): + def __init__(self, dim, mlp_ratio=4., drop=0.,drop_path=0., init_value=1e-2, act_layer=nn.GELU, attn_shortcut=True): + super().__init__() + self.norm1 = nn.BatchNorm2d(dim) + self.attn = Attention(dim, attn_shortcut=attn_shortcut) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + + self.norm2 = nn.BatchNorm2d(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = MixMlp( + in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.layer_scale_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True) + + def forward(self, x): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * self.attn(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * self.mlp(self.norm2(x))) + return x diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/modules.py b/Exp3_Kuroshio_forecasting/model/modules_api/modules.py new file mode 100644 index 0000000000000000000000000000000000000000..5d76c1144a60c88d108284c1e7cb5fce0dd06877 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/modules.py @@ -0,0 +1,66 @@ +from torch import nn + + +class BasicConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, transpose=False, act_norm=False): + super(BasicConv2d, self).__init__() + self.act_norm=act_norm + if not transpose: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) + else: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,output_padding=stride //2 ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvSC, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d(C_in, C_out, kernel_size=3, stride=stride, + padding=1, transpose=transpose, act_norm=act_norm) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, groups, act_norm=False): + super(GroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class Inception(nn.Module): + def __init__(self, C_in, C_hid, C_out, incep_ker=[3,5,7,11], groups=8): + super(Inception, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d(C_hid, C_out, kernel_size=ker, stride=1, padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model/modules_api/test.ipynb b/Exp3_Kuroshio_forecasting/model/modules_api/test.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..13e96ab3d041c3feaee103938202d21fe75ccfdf --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/modules_api/test.ipynb @@ -0,0 +1,561 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch import nn\n", + "import math\n", + "from timm.layers import DropPath, trunc_normal_\n", + "\n", + "def stride_generator(N, reverse=False):\n", + " strides = [1, 2] * 10\n", + " if reverse:\n", + " return list(reversed(strides[:N]))\n", + " else:\n", + " return strides[:N]\n", + " \n", + "class MLP(nn.Module):\n", + " def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):\n", + " super(MLP, self).__init__()\n", + " out_features = out_features or in_features\n", + " hidden_features = hidden_features or in_features\n", + " self.fc1 = nn.Linear(in_features, hidden_features)\n", + " self.act = act_layer()\n", + " self.fc2 = nn.Linear(hidden_features, out_features)\n", + " self.drop = nn.Dropout(drop)\n", + "\n", + " def forward(self, x):\n", + " x = self.fc1(x)\n", + " x = self.act(x)\n", + " x = self.drop(x)\n", + " x = self.fc2(x)\n", + " x = self.drop(x)\n", + " return x\n", + "\n", + "class ConvMLP(nn.Module):\n", + " def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):\n", + " super(ConvMLP, self).__init__()\n", + " out_features = out_features or in_features\n", + " hidden_features = hidden_features or in_features\n", + " self.fc1 = nn.Conv2d(in_features, hidden_features, 1)\n", + " self.act = act_layer()\n", + " self.fc2 = nn.Conv2d(hidden_features, out_features, 1)\n", + " self.drop = nn.Dropout(drop)\n", + "\n", + " def forward(self, x):\n", + " x = self.fc1(x)\n", + " x = self.act(x)\n", + " x = self.drop(x)\n", + " x = self.fc2(x)\n", + " x = self.drop(x)\n", + " return x\n", + "\n", + "class Attention(nn.Module):\n", + " def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.):\n", + " super(Attention, self).__init__()\n", + " self.num_heads = num_heads\n", + " head_dim = dim // num_heads\n", + " self.scale = qk_scale or head_dim ** -0.5\n", + "\n", + " self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)\n", + " self.attn_drop = nn.Dropout(attn_drop)\n", + " self.proj = nn.Linear(dim, dim)\n", + " self.proj_drop = nn.Dropout(proj_drop)\n", + "\n", + " def forward(self, x):\n", + " B, N, C = x.shape\n", + " qkv = (\n", + " self.qkv(x)\n", + " .reshape(B, N, 3, self.num_heads, C // self.num_heads)\n", + " .permute(2, 0, 3, 1, 4)\n", + " )\n", + " q, k, v = qkv[0], qkv[1], qkv[2]\n", + "\n", + " attn = (q @ k.transpose(-2, -1)) * self.scale\n", + " attn = attn.softmax(dim=-1)\n", + " attn = self.attn_drop(attn)\n", + "\n", + " x = (attn @ v).transpose(1, 2).reshape(B, N, C)\n", + " x = self.proj(x)\n", + " x = self.proj_drop(x)\n", + " return x\n", + "\n", + "class ConvBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " num_heads=4,\n", + " mlp_ratio=4.,\n", + " qkv_bias=False,\n", + " qk_scale=None,\n", + " drop=0.,\n", + " attn_drop=0.,\n", + " drop_path=0.,\n", + " act_layer=nn.GELU,\n", + " norm_layer=nn.LayerNorm\n", + " ):\n", + " super(ConvBlock, self).__init__()\n", + " self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim)\n", + " self.norm1 = nn.BatchNorm2d(dim)\n", + " self.conv1 = nn.Conv2d(dim, dim, 1)\n", + " self.conv2 = nn.Conv2d(dim, dim, 1)\n", + " self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim)\n", + " self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n", + " self.norm2 = nn.BatchNorm2d(dim)\n", + " mlp_hidden_dim = int(dim * mlp_ratio)\n", + " self.mlp = ConvMLP(\n", + " in_features=dim,\n", + " hidden_features=mlp_hidden_dim,\n", + " act_layer=act_layer,\n", + " drop=drop\n", + " )\n", + "\n", + " self.apply(self._init_weights)\n", + "\n", + " def _init_weights(self, m):\n", + " if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)):\n", + " nn.init.constant_(m.bias, 0)\n", + " nn.init.constant_(m.weight, 1.0)\n", + " elif isinstance(m, nn.Conv2d):\n", + " fan_out = (\n", + " m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n", + " )\n", + " fan_out //= m.groups\n", + " m.weight.data.normal_(0, math.sqrt(2.0 / fan_out))\n", + " if m.bias is not None:\n", + " m.bias.data.zero_()\n", + "\n", + " @torch.jit.ignore\n", + " def no_weight_decay(self):\n", + " return {}\n", + "\n", + " def forward(self, x):\n", + " x = x + self.pos_embed(x)\n", + " x = x + self.drop_path(\n", + " self.conv2(self.attn(self.conv1(self.norm1(x))))\n", + " )\n", + " x = x + self.drop_path(self.mlp(self.norm2(x)))\n", + " return x\n", + "\n", + "class SelfAttentionBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " num_heads,\n", + " mlp_ratio=4.,\n", + " qkv_bias=False,\n", + " qk_scale=None,\n", + " drop=0.,\n", + " attn_drop=0.,\n", + " drop_path=0.,\n", + " init_value=1e-6,\n", + " act_layer=nn.GELU,\n", + " norm_layer=nn.LayerNorm\n", + " ):\n", + " super(SelfAttentionBlock, self).__init__()\n", + " self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim)\n", + " self.norm1 = norm_layer(dim)\n", + " self.attn = Attention(\n", + " dim,\n", + " num_heads=num_heads,\n", + " qkv_bias=qkv_bias,\n", + " qk_scale=qk_scale,\n", + " attn_drop=attn_drop,\n", + " proj_drop=drop\n", + " )\n", + " self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n", + " self.norm2 = norm_layer(dim)\n", + " mlp_hidden_dim = int(dim * mlp_ratio)\n", + " self.mlp = MLP(\n", + " in_features=dim,\n", + " hidden_features=mlp_hidden_dim,\n", + " act_layer=act_layer,\n", + " drop=drop\n", + " )\n", + " self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True)\n", + " self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True)\n", + "\n", + " self.apply(self._init_weights)\n", + "\n", + " def _init_weights(self, m):\n", + " if isinstance(m, nn.Linear):\n", + " trunc_normal_(m.weight, std=.02)\n", + " if isinstance(m, nn.Linear) and m.bias is not None:\n", + " nn.init.constant_(m.bias, 0)\n", + " elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)):\n", + " nn.init.constant_(m.bias, 0)\n", + " nn.init.constant_(m.weight, 1.0)\n", + "\n", + " @torch.jit.ignore\n", + " def no_weight_decay(self):\n", + " return {'gamma_1', 'gamma_2'}\n", + "\n", + " def forward(self, x):\n", + " x = x + self.pos_embed(x)\n", + " B, N, H, W = x.shape\n", + " x = x.flatten(2).transpose(1, 2)\n", + " x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x)))\n", + " x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x)))\n", + " x = x.transpose(1, 2).reshape(B, N, H, W)\n", + " return x\n", + "\n", + "def UniformerSubBlock(\n", + " embed_dims,\n", + " mlp_ratio=4.,\n", + " drop=0.,\n", + " drop_path=0.,\n", + " init_value=1e-6,\n", + " block_type='Conv'\n", + "):\n", + " assert block_type in ['Conv', 'MHSA']\n", + " if block_type == 'Conv':\n", + " return ConvBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path)\n", + " else:\n", + " return SelfAttentionBlock(\n", + " dim=embed_dims,\n", + " num_heads=8,\n", + " mlp_ratio=mlp_ratio,\n", + " qkv_bias=True,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " init_value=init_value\n", + " )\n", + "\n", + "class SpatioTemporalEvolutionBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " input_resolution=None,\n", + " mlp_ratio=8.,\n", + " drop=0.0,\n", + " drop_path=0.0,\n", + " layer_i=0\n", + " ):\n", + " super(SpatioTemporalEvolutionBlock, self).__init__()\n", + " self.in_channels = in_channels\n", + " self.out_channels = out_channels\n", + " block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv'\n", + " self.block = UniformerSubBlock(\n", + " in_channels,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " block_type=block_type\n", + " )\n", + "\n", + " if in_channels != out_channels:\n", + " self.reduction = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=1,\n", + " stride=1,\n", + " padding=0\n", + " )\n", + "\n", + " def forward(self, x):\n", + " z = self.block(x)\n", + " if self.in_channels != self.out_channels:\n", + " z = self.reduction(z)\n", + " return z\n", + "\n", + "class SpatioTemporalEvolution(nn.Module):\n", + " def __init__(\n", + " self,\n", + " channel_in,\n", + " channel_hid,\n", + " N2,\n", + " input_resolution=None,\n", + " mlp_ratio=4.,\n", + " drop=0.0,\n", + " drop_path=0.1\n", + " ):\n", + " super(SpatioTemporalEvolution, self).__init__()\n", + " assert N2 >= 2 and mlp_ratio > 1\n", + " self.N2 = N2\n", + " dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)]\n", + "\n", + " evolution_layers = [SpatioTemporalEvolutionBlock(\n", + " channel_in,\n", + " channel_hid,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=dpr[0],\n", + " layer_i=0\n", + " )]\n", + "\n", + " for i in range(1, N2 - 1):\n", + " evolution_layers.append(SpatioTemporalEvolutionBlock(\n", + " channel_hid,\n", + " channel_hid,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=dpr[i],\n", + " layer_i=i\n", + " ))\n", + "\n", + " evolution_layers.append(SpatioTemporalEvolutionBlock(\n", + " channel_hid,\n", + " channel_in,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " layer_i=N2 - 1\n", + " ))\n", + " self.enc = nn.Sequential(*evolution_layers)\n", + "\n", + " def forward(self, x):\n", + " B, T, C, H, W = x.shape\n", + " x = x.reshape(B, T * C, H, W)\n", + " z = x\n", + " for i in range(self.N2):\n", + " z = self.enc[i](z)\n", + " y = z.reshape(B, T, C, H, W)\n", + " return y\n", + "\n", + "class BasicConv2d(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size,\n", + " stride,\n", + " padding,\n", + " transpose=False,\n", + " act_norm=False\n", + " ):\n", + " super(BasicConv2d, self).__init__()\n", + " self.act_norm = act_norm\n", + " if not transpose:\n", + " self.conv = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding\n", + " )\n", + " else:\n", + " self.conv = nn.ConvTranspose2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding,\n", + " output_padding=stride // 2\n", + " )\n", + " self.norm = nn.GroupNorm(2, out_channels)\n", + " self.act = nn.LeakyReLU(0.2, inplace=True)\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " if self.act_norm:\n", + " y = self.act(self.norm(y))\n", + " return y\n", + "\n", + "class ConvDynamicsLayer(nn.Module):\n", + " def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True):\n", + " super(ConvDynamicsLayer, self).__init__()\n", + " if stride == 1:\n", + " transpose = False\n", + " self.conv = BasicConv2d(\n", + " C_in,\n", + " C_out,\n", + " kernel_size=3,\n", + " stride=stride,\n", + " padding=1,\n", + " transpose=transpose,\n", + " act_norm=act_norm\n", + " )\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " return y\n", + "\n", + "class MultiGroupConv2d(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size,\n", + " stride,\n", + " padding,\n", + " groups,\n", + " act_norm=False\n", + " ):\n", + " super(MultiGroupConv2d, self).__init__()\n", + " self.act_norm = act_norm\n", + " if in_channels % groups != 0:\n", + " groups = 1\n", + " self.conv = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding,\n", + " groups=groups\n", + " )\n", + " self.norm = nn.GroupNorm(groups, out_channels)\n", + " self.activate = nn.LeakyReLU(0.2, inplace=True)\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " if self.act_norm:\n", + " y = self.activate(self.norm(y))\n", + " return y\n", + "\n", + "\n", + "class AtmosphericEncoder(nn.Module):\n", + " def __init__(self, C_in, spatial_hidden_dim, num_spatial_layers):\n", + " super(AtmosphericEncoder, self).__init__()\n", + " strides = stride_generator(num_spatial_layers)\n", + " self.enc = nn.Sequential(\n", + " ConvDynamicsLayer(C_in, spatial_hidden_dim, stride=strides[0]),\n", + " *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s) for s in strides[1:]]\n", + " )\n", + "\n", + " def forward(self, x):\n", + " enc1 = self.enc[0](x)\n", + " latent = enc1\n", + " for i in range(1, len(self.enc)):\n", + " latent = self.enc[i](latent)\n", + " return latent, enc1\n", + "\n", + "class AtmosphericDecoder(nn.Module):\n", + " def __init__(self, spatial_hidden_dim, C_out, num_spatial_layers):\n", + " super(AtmosphericDecoder, self).__init__()\n", + " strides = stride_generator(num_spatial_layers, reverse=True)\n", + " self.dec = nn.Sequential(\n", + " *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s, transpose=True) for s in strides[:-1]],\n", + " ConvDynamicsLayer(2 * spatial_hidden_dim, spatial_hidden_dim, stride=strides[-1], transpose=True)\n", + " )\n", + " self.readout = nn.Conv2d(spatial_hidden_dim, C_out, 1)\n", + "\n", + " def forward(self, hid, enc1=None):\n", + " for i in range(0, len(self.dec) - 1):\n", + " hid = self.dec[i](hid)\n", + " Y = self.dec[-1](torch.cat([hid, enc1], dim=1))\n", + " Y = self.readout(Y)\n", + " return Y\n", + "\n", + "class Triton(nn.Module):\n", + " def __init__(\n", + " self,\n", + " shape_in,\n", + " spatial_hidden_dim=64,\n", + " output_channels=4,\n", + " temporal_hidden_dim=128,\n", + " num_spatial_layers=4,\n", + " num_temporal_layers=8,\n", + " in_time_seq_length=10,\n", + " out_time_seq_length=10\n", + " ):\n", + " super(Triton, self).__init__()\n", + " T, C, H, W = shape_in\n", + " self.H1 = int(H / 2 ** (num_spatial_layers / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (num_spatial_layers / 2))\n", + " self.W1 = int(W / 2 ** (num_spatial_layers / 2))\n", + " self.output_dim = output_channels\n", + " self.input_time_seq_length = in_time_seq_length\n", + " self.output_time_seq_length = out_time_seq_length\n", + " \n", + " self.atmospheric_encoder = AtmosphericEncoder(C, spatial_hidden_dim, num_spatial_layers)\n", + " self.temporal_evolution = SpatioTemporalEvolution(\n", + " T * spatial_hidden_dim,\n", + " temporal_hidden_dim,\n", + " num_temporal_layers,\n", + " input_resolution=[self.H1, self.W1],\n", + " mlp_ratio=4.0,\n", + " drop_path=0.1\n", + " )\n", + " self.atmospheric_decoder = AtmosphericDecoder(spatial_hidden_dim, self.output_dim, num_spatial_layers)\n", + "\n", + " def forward(self, input_state):\n", + " \"\"\"\n", + " 1. Reshape the input state to match the encoder's input requirements.\n", + " 2. Extract features using the Atmospheric Encoder and obtain skip connections.\n", + " 3. Perform spatio-temporal evolution on the encoded features.\n", + " 4. Decode the evolved features to generate the final output.\n", + " \"\"\"\n", + " batch_size, temporal_length, channels, height, width = input_state.shape\n", + " reshaped_input = input_state.view(batch_size * temporal_length, channels, height, width)\n", + " \n", + " encoded_features, skip_connection = self.atmospheric_encoder(reshaped_input)\n", + " _, encoded_channels, encoded_height, encoded_width = encoded_features.shape\n", + " encoded_features = encoded_features.view(batch_size, temporal_length, encoded_channels, encoded_height, encoded_width)\n", + " \n", + " temporal_bias = encoded_features\n", + " temporal_hidden = self.temporal_evolution(temporal_bias)\n", + " reshaped_hidden = temporal_hidden.view(batch_size * temporal_length, encoded_channels, encoded_height, encoded_width)\n", + "\n", + " decoded_output = self.atmospheric_decoder(reshaped_hidden, skip_connection)\n", + " final_output = decoded_output.view(batch_size, temporal_length, -1, height, width)\n", + " \n", + " return final_output\n", + "\n", + "\n", + "def count_parameters(model):\n", + " return sum(p.numel() for p in model.parameters() if p.requires_grad)\n", + "\n", + "if __name__ == '__main__':\n", + " inputs = torch.randn(1, 10, 7, 720, 1440)\n", + " model = Triton(\n", + " shape_in=(10, 7, 720, 1440),\n", + " spatial_hidden_dim=32,\n", + " output_channels=1,\n", + " temporal_hidden_dim=64,\n", + " num_spatial_layers=4,\n", + " num_temporal_layers=8)\n", + " output = model(inputs)\n", + " print(output.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "envwu", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.19" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Exp3_Kuroshio_forecasting/model/nmo_fourier.py b/Exp3_Kuroshio_forecasting/model/nmo_fourier.py new file mode 100644 index 0000000000000000000000000000000000000000..4978108f534ef23484a93674733d388e2300132f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/nmo_fourier.py @@ -0,0 +1,194 @@ +import torch +from torch import nn +from model.modules_api.modules import ConvSC, Inception +from model.modules_api.fouriermodules import * +from model.modules_api.evolution import Spatio_temporal_evolution +import math + +def stride_generator(N, reverse=False): + strides = [1, 2]*10 + if reverse: return list(reversed(strides[:N])) + else: return strides[:N] + +class Encoder(nn.Module): + def __init__(self,C_in, C_hid, N_S): + super(Encoder,self).__init__() + strides = stride_generator(N_S) + self.enc = nn.Sequential( + ConvSC(C_in, C_hid, stride=strides[0]), + *[ConvSC(C_hid, C_hid, stride=s) for s in strides[1:]] + ) + + def forward(self,x): + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1,len(self.enc)): + latent = self.enc[i](latent) + return latent,enc1 + + +class Decoder(nn.Module): + def __init__(self,C_hid, C_out, N_S): + super(Decoder,self).__init__() + strides = stride_generator(N_S, reverse=True) + self.dec = nn.Sequential( + *[ConvSC(C_hid, C_hid, stride=s, transpose=True) for s in strides[:-1]], + ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(C_hid, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0,len(self.dec)-1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + + + + +class Temporal_evo(nn.Module): + def __init__(self, channel_in, channel_hid, N_T, h, w, incep_ker=[3, 5, 7, 11], groups=8): + super(Temporal_evo, self).__init__() + + self.N_T = N_T + enc_layers = [Inception(channel_in, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] + for i in range(1, N_T - 1): + enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + + dec_layers = [Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] + for i in range(1, N_T - 1): + dec_layers.append( + Inception(2 * channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + dec_layers.append(Inception(2 * channel_hid, channel_hid // 2, channel_in, incep_ker=incep_ker, groups=groups)) + norm_layer = partial(nn.LayerNorm, eps=1e-6) + self.norm = norm_layer(channel_hid) + + self.enc = nn.Sequential(*enc_layers) + dpr = [x.item() for x in torch.linspace(0, 0, 12)] + self.h = h + self.w = w + self.blocks = nn.ModuleList([FourierNetBlock( + dim=channel_hid, + mlp_ratio=4, + drop=0., + drop_path=dpr[i], + act_layer=nn.GELU, + norm_layer=norm_layer, + h = self.h, + w = self.w) + for i in range(12) + ]) + self.dec = nn.Sequential(*dec_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + bias = x + x = x.reshape(B, T * C, H, W) + + # downsampling + skips = [] + z = x + for i in range(self.N_T): + z = self.enc[i](z) + if i < self.N_T - 1: + skips.append(z) + + # Spectral Domain + B, D, H, W = z.shape + N = H * W + z = z.permute(0, 2, 3, 1) + z = z.view(B, N, D) + for blk in self.blocks: + z = blk(z) + z = self.norm(z).permute(0, 2, 1) + + z = z.reshape(B, D, H, W) + + # upsampling + z = self.dec[0](z) + for i in range(1, self.N_T): + z = self.dec[i](torch.cat([z, skips[-i]], dim=1)) + + y = z.reshape(B, T, C, H, W) + return y + bias + + + +class NMOModel(nn.Module): + def __init__(self, shape_in, model_type='uniformer', hid_S=64, output_dim = 4, hid_T=128, N_S=4, N_T=8, incep_ker=[3,5,7,11], groups=4, + in_time_seq_length=10, out_time_seq_length=10): + super(NMOModel, self).__init__() + T, C, H, W = shape_in + self.H1 = int(H / 2 ** (N_S / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (N_S / 2)) + self.W1 = int(W / 2 ** (N_S / 2)) + self.out_dim = output_dim + self.in_time_seq_length = in_time_seq_length + self.out_time_seq_length = out_time_seq_length + self.enc = Encoder(C, hid_S, N_S) + self.hid = Temporal_evo(T*hid_S, hid_T, N_T, self.H1, self.W1, incep_ker, groups) # + self.temporal_evolution = Spatio_temporal_evolution(T*hid_S, hid_T, N_T, + input_resolution=[self.H1, self.W1], + model_type = model_type, + mlp_ratio=4., + drop_path=0.1) + + self.dec = Decoder(hid_S, self.out_dim, N_S) + + + def _forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + + embed, skip = self.enc(x) + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + bias = z + bias_hid = self.temporal_evolution(bias) + hid = bias_hid.reshape(B*T, C_, H_, W_) + + Y = self.dec(hid, skip) + Y = Y.reshape(B, T, -1, H, W) + return Y + + def forward(self, xx): + yy = self._forward(xx) + in_time_seq_length, out_time_seq_length = self.in_time_seq_length, self.out_time_seq_length + if out_time_seq_length == in_time_seq_length: + y_pred = yy + if out_time_seq_length < in_time_seq_length: + y_pred = yy[:, :out_time_seq_length] + elif out_time_seq_length > in_time_seq_length: + y_pred = [yy] + d = out_time_seq_length // in_time_seq_length + m = out_time_seq_length % in_time_seq_length + + for _ in range(1, d): + cur_seq = self._forward(y_pred[-1]) + y_pred.append(cur_seq) + + if m != 0: + cur_seq = self._forward(y_pred[-1]) + y_pred.append(cur_seq[:, :m]) + + y_pred = torch.cat(y_pred, dim=1) + + return y_pred + +def count_parameters(model): + return sum(p.numel() for p in model.parameters() if p.requires_grad) + + +if __name__ == '__main__': + inputs = torch.randn(1, 10, 7, 600, 600) + model = NMOModel(shape_in=(10, 7, 600, 600), hid_S=64, output_dim = 1, hid_T=128) + # print(model) + output = model(inputs) + + print(output.shape) + + # Print the number of parameters + #print(f'The model has {count_parameters(model):,} trainable parameters.') diff --git a/Exp3_Kuroshio_forecasting/model/test.ipynb b/Exp3_Kuroshio_forecasting/model/test.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..7ccfb92fe9c63efdb91e79cd2913e2b1f88e530f --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model/test.ipynb @@ -0,0 +1,557 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "b141979d-b258-443e-96b9-7331d009160a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([1, 10, 1, 600, 600])\n" + ] + } + ], + "source": [ + "import torch\n", + "from torch import nn\n", + "import math\n", + "from timm.layers import DropPath, trunc_normal_\n", + "\n", + "def stride_generator(N, reverse=False):\n", + " strides = [1, 2] * 10\n", + " if reverse:\n", + " return list(reversed(strides[:N]))\n", + " else:\n", + " return strides[:N]\n", + " \n", + "class MLP(nn.Module):\n", + " def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):\n", + " super(MLP, self).__init__()\n", + " out_features = out_features or in_features\n", + " hidden_features = hidden_features or in_features\n", + " self.fc1 = nn.Linear(in_features, hidden_features)\n", + " self.act = act_layer()\n", + " self.fc2 = nn.Linear(hidden_features, out_features)\n", + " self.drop = nn.Dropout(drop)\n", + "\n", + " def forward(self, x):\n", + " x = self.fc1(x)\n", + " x = self.act(x)\n", + " x = self.drop(x)\n", + " x = self.fc2(x)\n", + " x = self.drop(x)\n", + " return x\n", + "\n", + "class ConvMLP(nn.Module):\n", + " def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.):\n", + " super(ConvMLP, self).__init__()\n", + " out_features = out_features or in_features\n", + " hidden_features = hidden_features or in_features\n", + " self.fc1 = nn.Conv2d(in_features, hidden_features, 1)\n", + " self.act = act_layer()\n", + " self.fc2 = nn.Conv2d(hidden_features, out_features, 1)\n", + " self.drop = nn.Dropout(drop)\n", + "\n", + " def forward(self, x):\n", + " x = self.fc1(x)\n", + " x = self.act(x)\n", + " x = self.drop(x)\n", + " x = self.fc2(x)\n", + " x = self.drop(x)\n", + " return x\n", + "\n", + "class Attention(nn.Module):\n", + " def __init__(self, dim, num_heads=8, qkv_bias=False, qk_scale=None, attn_drop=0., proj_drop=0.):\n", + " super(Attention, self).__init__()\n", + " self.num_heads = num_heads\n", + " head_dim = dim // num_heads\n", + " self.scale = qk_scale or head_dim ** -0.5\n", + "\n", + " self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)\n", + " self.attn_drop = nn.Dropout(attn_drop)\n", + " self.proj = nn.Linear(dim, dim)\n", + " self.proj_drop = nn.Dropout(proj_drop)\n", + "\n", + " def forward(self, x):\n", + " B, N, C = x.shape\n", + " qkv = (\n", + " self.qkv(x)\n", + " .reshape(B, N, 3, self.num_heads, C // self.num_heads)\n", + " .permute(2, 0, 3, 1, 4)\n", + " )\n", + " q, k, v = qkv[0], qkv[1], qkv[2]\n", + "\n", + " attn = (q @ k.transpose(-2, -1)) * self.scale\n", + " attn = attn.softmax(dim=-1)\n", + " attn = self.attn_drop(attn)\n", + "\n", + " x = (attn @ v).transpose(1, 2).reshape(B, N, C)\n", + " x = self.proj(x)\n", + " x = self.proj_drop(x)\n", + " return x\n", + "\n", + "class ConvBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " num_heads=4,\n", + " mlp_ratio=4.,\n", + " qkv_bias=False,\n", + " qk_scale=None,\n", + " drop=0.,\n", + " attn_drop=0.,\n", + " drop_path=0.,\n", + " act_layer=nn.GELU,\n", + " norm_layer=nn.LayerNorm\n", + " ):\n", + " super(ConvBlock, self).__init__()\n", + " self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim)\n", + " self.norm1 = nn.BatchNorm2d(dim)\n", + " self.conv1 = nn.Conv2d(dim, dim, 1)\n", + " self.conv2 = nn.Conv2d(dim, dim, 1)\n", + " self.attn = nn.Conv2d(dim, dim, 5, padding=2, groups=dim)\n", + " self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n", + " self.norm2 = nn.BatchNorm2d(dim)\n", + " mlp_hidden_dim = int(dim * mlp_ratio)\n", + " self.mlp = ConvMLP(\n", + " in_features=dim,\n", + " hidden_features=mlp_hidden_dim,\n", + " act_layer=act_layer,\n", + " drop=drop\n", + " )\n", + "\n", + " self.apply(self._init_weights)\n", + "\n", + " def _init_weights(self, m):\n", + " if isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)):\n", + " nn.init.constant_(m.bias, 0)\n", + " nn.init.constant_(m.weight, 1.0)\n", + " elif isinstance(m, nn.Conv2d):\n", + " fan_out = (\n", + " m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n", + " )\n", + " fan_out //= m.groups\n", + " m.weight.data.normal_(0, math.sqrt(2.0 / fan_out))\n", + " if m.bias is not None:\n", + " m.bias.data.zero_()\n", + "\n", + " @torch.jit.ignore\n", + " def no_weight_decay(self):\n", + " return {}\n", + "\n", + " def forward(self, x):\n", + " x = x + self.pos_embed(x)\n", + " x = x + self.drop_path(\n", + " self.conv2(self.attn(self.conv1(self.norm1(x))))\n", + " )\n", + " x = x + self.drop_path(self.mlp(self.norm2(x)))\n", + " return x\n", + "\n", + "class SelfAttentionBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " dim,\n", + " num_heads,\n", + " mlp_ratio=4.,\n", + " qkv_bias=False,\n", + " qk_scale=None,\n", + " drop=0.,\n", + " attn_drop=0.,\n", + " drop_path=0.,\n", + " init_value=1e-6,\n", + " act_layer=nn.GELU,\n", + " norm_layer=nn.LayerNorm\n", + " ):\n", + " super(SelfAttentionBlock, self).__init__()\n", + " self.pos_embed = nn.Conv2d(dim, dim, 3, padding=1, groups=dim)\n", + " self.norm1 = norm_layer(dim)\n", + " self.attn = Attention(\n", + " dim,\n", + " num_heads=num_heads,\n", + " qkv_bias=qkv_bias,\n", + " qk_scale=qk_scale,\n", + " attn_drop=attn_drop,\n", + " proj_drop=drop\n", + " )\n", + " self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity()\n", + " self.norm2 = norm_layer(dim)\n", + " mlp_hidden_dim = int(dim * mlp_ratio)\n", + " self.mlp = MLP(\n", + " in_features=dim,\n", + " hidden_features=mlp_hidden_dim,\n", + " act_layer=act_layer,\n", + " drop=drop\n", + " )\n", + " self.gamma_1 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True)\n", + " self.gamma_2 = nn.Parameter(init_value * torch.ones((dim)), requires_grad=True)\n", + "\n", + " self.apply(self._init_weights)\n", + "\n", + " def _init_weights(self, m):\n", + " if isinstance(m, nn.Linear):\n", + " trunc_normal_(m.weight, std=.02)\n", + " if isinstance(m, nn.Linear) and m.bias is not None:\n", + " nn.init.constant_(m.bias, 0)\n", + " elif isinstance(m, (nn.LayerNorm, nn.GroupNorm, nn.BatchNorm2d)):\n", + " nn.init.constant_(m.bias, 0)\n", + " nn.init.constant_(m.weight, 1.0)\n", + "\n", + " @torch.jit.ignore\n", + " def no_weight_decay(self):\n", + " return {'gamma_1', 'gamma_2'}\n", + "\n", + " def forward(self, x):\n", + " x = x + self.pos_embed(x)\n", + " B, N, H, W = x.shape\n", + " x = x.flatten(2).transpose(1, 2)\n", + " x = x + self.drop_path(self.gamma_1 * self.attn(self.norm1(x)))\n", + " x = x + self.drop_path(self.gamma_2 * self.mlp(self.norm2(x)))\n", + " x = x.transpose(1, 2).reshape(B, N, H, W)\n", + " return x\n", + "\n", + "def UniformerSubBlock(\n", + " embed_dims,\n", + " mlp_ratio=4.,\n", + " drop=0.,\n", + " drop_path=0.,\n", + " init_value=1e-6,\n", + " block_type='Conv'\n", + "):\n", + " assert block_type in ['Conv', 'MHSA']\n", + " if block_type == 'Conv':\n", + " return ConvBlock(dim=embed_dims, mlp_ratio=mlp_ratio, drop=drop, drop_path=drop_path)\n", + " else:\n", + " return SelfAttentionBlock(\n", + " dim=embed_dims,\n", + " num_heads=8,\n", + " mlp_ratio=mlp_ratio,\n", + " qkv_bias=True,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " init_value=init_value\n", + " )\n", + "\n", + "class SpatioTemporalEvolutionBlock(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " input_resolution=None,\n", + " mlp_ratio=8.,\n", + " drop=0.0,\n", + " drop_path=0.0,\n", + " layer_i=0\n", + " ):\n", + " super(SpatioTemporalEvolutionBlock, self).__init__()\n", + " self.in_channels = in_channels\n", + " self.out_channels = out_channels\n", + " block_type = 'MHSA' if in_channels == out_channels and layer_i > 0 else 'Conv'\n", + " self.block = UniformerSubBlock(\n", + " in_channels,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " block_type=block_type\n", + " )\n", + "\n", + " if in_channels != out_channels:\n", + " self.reduction = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=1,\n", + " stride=1,\n", + " padding=0\n", + " )\n", + "\n", + " def forward(self, x):\n", + " z = self.block(x)\n", + " if self.in_channels != self.out_channels:\n", + " z = self.reduction(z)\n", + " return z\n", + "\n", + "class SpatioTemporalEvolution(nn.Module):\n", + " def __init__(\n", + " self,\n", + " channel_in,\n", + " channel_hid,\n", + " N2,\n", + " input_resolution=None,\n", + " mlp_ratio=4.,\n", + " drop=0.0,\n", + " drop_path=0.1\n", + " ):\n", + " super(SpatioTemporalEvolution, self).__init__()\n", + " assert N2 >= 2 and mlp_ratio > 1\n", + " self.N2 = N2\n", + " dpr = [x.item() for x in torch.linspace(1e-2, drop_path, self.N2)]\n", + "\n", + " evolution_layers = [SpatioTemporalEvolutionBlock(\n", + " channel_in,\n", + " channel_hid,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=dpr[0],\n", + " layer_i=0\n", + " )]\n", + "\n", + " for i in range(1, N2 - 1):\n", + " evolution_layers.append(SpatioTemporalEvolutionBlock(\n", + " channel_hid,\n", + " channel_hid,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=dpr[i],\n", + " layer_i=i\n", + " ))\n", + "\n", + " evolution_layers.append(SpatioTemporalEvolutionBlock(\n", + " channel_hid,\n", + " channel_in,\n", + " input_resolution,\n", + " mlp_ratio=mlp_ratio,\n", + " drop=drop,\n", + " drop_path=drop_path,\n", + " layer_i=N2 - 1\n", + " ))\n", + " self.enc = nn.Sequential(*evolution_layers)\n", + "\n", + " def forward(self, x):\n", + " B, T, C, H, W = x.shape\n", + " x = x.reshape(B, T * C, H, W)\n", + " z = x\n", + " for i in range(self.N2):\n", + " z = self.enc[i](z)\n", + " y = z.reshape(B, T, C, H, W)\n", + " return y\n", + "\n", + "class BasicConv2d(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size,\n", + " stride,\n", + " padding,\n", + " transpose=False,\n", + " act_norm=False\n", + " ):\n", + " super(BasicConv2d, self).__init__()\n", + " self.act_norm = act_norm\n", + " if not transpose:\n", + " self.conv = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding\n", + " )\n", + " else:\n", + " self.conv = nn.ConvTranspose2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding,\n", + " output_padding=stride // 2\n", + " )\n", + " self.norm = nn.GroupNorm(2, out_channels)\n", + " self.act = nn.LeakyReLU(0.2, inplace=True)\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " if self.act_norm:\n", + " y = self.act(self.norm(y))\n", + " return y\n", + "\n", + "class ConvDynamicsLayer(nn.Module):\n", + " def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True):\n", + " super(ConvDynamicsLayer, self).__init__()\n", + " if stride == 1:\n", + " transpose = False\n", + " self.conv = BasicConv2d(\n", + " C_in,\n", + " C_out,\n", + " kernel_size=3,\n", + " stride=stride,\n", + " padding=1,\n", + " transpose=transpose,\n", + " act_norm=act_norm\n", + " )\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " return y\n", + "\n", + "class MultiGroupConv2d(nn.Module):\n", + " def __init__(\n", + " self,\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size,\n", + " stride,\n", + " padding,\n", + " groups,\n", + " act_norm=False\n", + " ):\n", + " super(MultiGroupConv2d, self).__init__()\n", + " self.act_norm = act_norm\n", + " if in_channels % groups != 0:\n", + " groups = 1\n", + " self.conv = nn.Conv2d(\n", + " in_channels,\n", + " out_channels,\n", + " kernel_size=kernel_size,\n", + " stride=stride,\n", + " padding=padding,\n", + " groups=groups\n", + " )\n", + " self.norm = nn.GroupNorm(groups, out_channels)\n", + " self.activate = nn.LeakyReLU(0.2, inplace=True)\n", + "\n", + " def forward(self, x):\n", + " y = self.conv(x)\n", + " if self.act_norm:\n", + " y = self.activate(self.norm(y))\n", + " return y\n", + "\n", + "\n", + "class AtmosphericEncoder(nn.Module):\n", + " def __init__(self, C_in, spatial_hidden_dim, num_spatial_layers):\n", + " super(AtmosphericEncoder, self).__init__()\n", + " strides = stride_generator(num_spatial_layers)\n", + " self.enc = nn.Sequential(\n", + " ConvDynamicsLayer(C_in, spatial_hidden_dim, stride=strides[0]),\n", + " *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s) for s in strides[1:]]\n", + " )\n", + "\n", + " def forward(self, x):\n", + " enc1 = self.enc[0](x)\n", + " latent = enc1\n", + " for i in range(1, len(self.enc)):\n", + " latent = self.enc[i](latent)\n", + " return latent, enc1\n", + "\n", + "class AtmosphericDecoder(nn.Module):\n", + " def __init__(self, spatial_hidden_dim, C_out, num_spatial_layers):\n", + " super(AtmosphericDecoder, self).__init__()\n", + " strides = stride_generator(num_spatial_layers, reverse=True)\n", + " self.dec = nn.Sequential(\n", + " *[ConvDynamicsLayer(spatial_hidden_dim, spatial_hidden_dim, stride=s, transpose=True) for s in strides[:-1]],\n", + " ConvDynamicsLayer(2 * spatial_hidden_dim, spatial_hidden_dim, stride=strides[-1], transpose=True)\n", + " )\n", + " self.readout = nn.Conv2d(spatial_hidden_dim, C_out, 1)\n", + "\n", + " def forward(self, hid, enc1=None):\n", + " for i in range(0, len(self.dec) - 1):\n", + " hid = self.dec[i](hid)\n", + " Y = self.dec[-1](torch.cat([hid, enc1], dim=1))\n", + " Y = self.readout(Y)\n", + " return Y\n", + "\n", + "class Triton(nn.Module):\n", + " def __init__(\n", + " self,\n", + " shape_in,\n", + " spatial_hidden_dim=64,\n", + " output_channels=4,\n", + " temporal_hidden_dim=128,\n", + " num_spatial_layers=4,\n", + " num_temporal_layers=8,\n", + " in_time_seq_length=10,\n", + " out_time_seq_length=10\n", + " ):\n", + " super(Triton, self).__init__()\n", + " T, C, H, W = shape_in\n", + " self.H1 = int(H / 2 ** (num_spatial_layers / 2)) + 1 if H % 3 == 0 else int(H / 2 ** (num_spatial_layers / 2))\n", + " self.W1 = int(W / 2 ** (num_spatial_layers / 2))\n", + " self.output_dim = output_channels\n", + " self.input_time_seq_length = in_time_seq_length\n", + " self.output_time_seq_length = out_time_seq_length\n", + " \n", + " self.atmospheric_encoder = AtmosphericEncoder(C, spatial_hidden_dim, num_spatial_layers)\n", + " self.temporal_evolution = SpatioTemporalEvolution(\n", + " T * spatial_hidden_dim,\n", + " temporal_hidden_dim,\n", + " num_temporal_layers,\n", + " input_resolution=[self.H1, self.W1],\n", + " mlp_ratio=4.0,\n", + " drop_path=0.1\n", + " )\n", + " self.atmospheric_decoder = AtmosphericDecoder(spatial_hidden_dim, self.output_dim, num_spatial_layers)\n", + "\n", + " def forward(self, input_state):\n", + " \"\"\"\n", + " 1. Reshape the input state to match the encoder's input requirements.\n", + " 2. Extract features using the Atmospheric Encoder and obtain skip connections.\n", + " 3. Perform spatio-temporal evolution on the encoded features.\n", + " 4. Decode the evolved features to generate the final output.\n", + " \"\"\"\n", + " batch_size, temporal_length, channels, height, width = input_state.shape\n", + " reshaped_input = input_state.view(batch_size * temporal_length, channels, height, width)\n", + " \n", + " encoded_features, skip_connection = self.atmospheric_encoder(reshaped_input)\n", + " _, encoded_channels, encoded_height, encoded_width = encoded_features.shape\n", + " encoded_features = encoded_features.view(batch_size, temporal_length, encoded_channels, encoded_height, encoded_width)\n", + " \n", + " temporal_bias = encoded_features\n", + " temporal_hidden = self.temporal_evolution(temporal_bias)\n", + " reshaped_hidden = temporal_hidden.view(batch_size * temporal_length, encoded_channels, encoded_height, encoded_width)\n", + "\n", + " decoded_output = self.atmospheric_decoder(reshaped_hidden, skip_connection)\n", + " final_output = decoded_output.view(batch_size, temporal_length, -1, height, width)\n", + " \n", + " return final_output\n", + "\n", + "\n", + "def count_parameters(model):\n", + " return sum(p.numel() for p in model.parameters() if p.requires_grad)\n", + "\n", + "if __name__ == '__main__':\n", + " inputs = torch.randn(1, 10, 7, 600, 600)\n", + " model = Triton(\n", + " shape_in=(10, 7, 600, 600),\n", + " spatial_hidden_dim=32,\n", + " output_channels=1,\n", + " temporal_hidden_dim=64,\n", + " num_spatial_layers=4,\n", + " num_temporal_layers=8)\n", + " output = model(inputs)\n", + " print(output.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5f4805e1-43b4-44f0-b952-63db3d3f90e8", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.19" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/ConvLSTM-checkpoint.py b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/ConvLSTM-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..e28ea210d7cd0516e3df32652c8da4a9a2d5bd65 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/ConvLSTM-checkpoint.py @@ -0,0 +1,198 @@ +import torch.nn as nn +import torch + + +class ConvLSTMCell(nn.Module): + + def __init__(self, input_dim, hidden_dim, kernel_size, bias): + """ + Initialize ConvLSTM cell. + + Parameters + ---------- + input_dim: int + Number of channels of input tensor. + hidden_dim: int + Number of channels of hidden state. + kernel_size: (int, int) + Size of the convolutional kernel. + bias: bool + Whether or not to add the bias. + """ + + super(ConvLSTMCell, self).__init__() + + self.input_dim = input_dim + self.hidden_dim = hidden_dim + + self.kernel_size = kernel_size + self.padding = kernel_size[0] // 2, kernel_size[1] // 2 + self.bias = bias + + self.conv = nn.Conv2d(in_channels=self.input_dim + self.hidden_dim, + out_channels=4 * self.hidden_dim, + kernel_size=self.kernel_size, + padding=self.padding, + bias=self.bias) + + def forward(self, input_tensor, cur_state): + h_cur, c_cur = cur_state + + combined = torch.cat([input_tensor, h_cur], dim=1) # concatenate along channel axis + + combined_conv = self.conv(combined) + cc_i, cc_f, cc_o, cc_g = torch.split(combined_conv, self.hidden_dim, dim=1) + i = torch.sigmoid(cc_i) + f = torch.sigmoid(cc_f) + o = torch.sigmoid(cc_o) + g = torch.tanh(cc_g) + + c_next = f * c_cur + i * g + h_next = o * torch.tanh(c_next) + + return h_next, c_next + + def init_hidden(self, batch_size, image_size): + height, width = image_size + return (torch.zeros(batch_size, self.hidden_dim, height, width, device=self.conv.weight.device), + torch.zeros(batch_size, self.hidden_dim, height, width, device=self.conv.weight.device)) + + +class ConvLSTM(nn.Module): + + """ + + Parameters: + input_dim: Number of channels in input + hidden_dim: Number of hidden channels + kernel_size: Size of kernel in convolutions + num_layers: Number of LSTM layers stacked on each other + batch_first: Whether or not dimension 0 is the batch or not + bias: Bias or no bias in Convolution + return_all_layers: Return the list of computations for all layers + Note: Will do same padding. + + Input: + A tensor of size B, T, C, H, W or T, B, C, H, W + Output: + A tuple of two lists of length num_layers (or length 1 if return_all_layers is False). + 0 - layer_output_list is the list of lists of length T of each output + 1 - last_state_list is the list of last states + each element of the list is a tuple (h, c) for hidden state and memory + Example: + >> x = torch.rand((32, 10, 64, 128, 128)) + >> convlstm = ConvLSTM(64, 16, 3, 1, True, True, False) + >> _, last_states = convlstm(x) + >> h = last_states[0][0] # 0 for layer index, 0 for h index + """ + + def __init__(self, input_dim, hidden_dim, kernel_size, num_layers, + batch_first=False, bias=True, return_all_layers=False): + super(ConvLSTM, self).__init__() + + self._check_kernel_size_consistency(kernel_size) + + # Make sure that both `kernel_size` and `hidden_dim` are lists having len == num_layers + kernel_size = self._extend_for_multilayer(kernel_size, num_layers) + hidden_dim = self._extend_for_multilayer(hidden_dim, num_layers) + if not len(kernel_size) == len(hidden_dim) == num_layers: + raise ValueError('Inconsistent list length.') + + self.input_dim = input_dim + self.hidden_dim = hidden_dim + self.kernel_size = kernel_size + self.num_layers = num_layers + self.batch_first = batch_first + self.bias = bias + self.return_all_layers = return_all_layers + + cell_list = [] + for i in range(0, self.num_layers): + cur_input_dim = self.input_dim if i == 0 else self.hidden_dim[i - 1] + + cell_list.append(ConvLSTMCell(input_dim=cur_input_dim, + hidden_dim=self.hidden_dim[i], + kernel_size=self.kernel_size[i], + bias=self.bias)) + + self.cell_list = nn.ModuleList(cell_list) + + def forward(self, input_tensor, hidden_state=None): + """ + + Parameters + ---------- + input_tensor: todo + 5-D Tensor either of shape (t, b, c, h, w) or (b, t, c, h, w) + hidden_state: todo + None. todo implement stateful + + Returns + ------- + last_state_list, layer_output + """ + if not self.batch_first: + # (t, b, c, h, w) -> (b, t, c, h, w) + input_tensor = input_tensor.permute(1, 0, 2, 3, 4) + + b, _, _, h, w = input_tensor.size() + + # Implement stateful ConvLSTM + if hidden_state is not None: + raise NotImplementedError() + else: + # Since the init is done in forward. Can send image size here + hidden_state = self._init_hidden(batch_size=b, + image_size=(h, w)) + + layer_output_list = [] + last_state_list = [] + + seq_len = input_tensor.size(1) + cur_layer_input = input_tensor + + for layer_idx in range(self.num_layers): + + h, c = hidden_state[layer_idx] + output_inner = [] + for t in range(seq_len): + h, c = self.cell_list[layer_idx](input_tensor=cur_layer_input[:, t, :, :, :], + cur_state=[h, c]) + output_inner.append(h) + + layer_output = torch.stack(output_inner, dim=1) + cur_layer_input = layer_output + + layer_output_list.append(layer_output) + last_state_list.append([h, c]) + + if not self.return_all_layers: + layer_output_list = layer_output_list[-1:] + last_state_list = last_state_list[-1:] + + return layer_output_list[0], last_state_list + + def _init_hidden(self, batch_size, image_size): + init_states = [] + for i in range(self.num_layers): + init_states.append(self.cell_list[i].init_hidden(batch_size, image_size)) + return init_states + + @staticmethod + def _check_kernel_size_consistency(kernel_size): + if not (isinstance(kernel_size, tuple) or + (isinstance(kernel_size, list) and all([isinstance(elem, tuple) for elem in kernel_size]))): + raise ValueError('`kernel_size` must be tuple or list of tuples') + + @staticmethod + def _extend_for_multilayer(param, num_layers): + if not isinstance(param, list): + param = [param] * num_layers + return param + +if __name__ == '__main__': + x = torch.rand((32, 10, 2, 256, 256)) #(batch_size, seq_len, channels, height, width) + convlstm = ConvLSTM(2, 2, (3, 3), 6, True, True, False) + pred, last_states = convlstm(x) + h = last_states[0][0] + print(pred.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/U_net-checkpoint.py b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/U_net-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..3d17031c407a6e4f9f54a10b30b5bafa3bc7bca0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/U_net-checkpoint.py @@ -0,0 +1,104 @@ +import torch +import torch.nn as nn + +class DoubleConv(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.double_conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x): + return self.double_conv(x) + +class Down(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.maxpool_conv = nn.Sequential( + nn.MaxPool2d(2), + DoubleConv(in_channels, out_channels) + ) + + def forward(self, x): + return self.maxpool_conv(x) + +class Up(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.up = nn.ConvTranspose2d(in_channels, in_channels // 2, kernel_size=2, stride=2) + self.conv = DoubleConv(in_channels, out_channels) + + def forward(self, x1, x2): + x1 = self.up(x1) + # 调整x2大小以匹配x1 + diffY = x2.size()[2] - x1.size()[2] + diffX = x2.size()[3] - x1.size()[3] + x1 = nn.functional.pad(x1, [diffX // 2, diffX - diffX // 2, + diffY // 2, diffY - diffY // 2]) + x = torch.cat([x2, x1], dim=1) + return self.conv(x) + +class OutConv(nn.Module): + def __init__(self, in_channels, out_channels): + super(OutConv, self).__init__() + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) + + def forward(self, x): + return self.conv(x) + +class UNet(nn.Module): + def __init__(self, n_channels, n_classes): + super(UNet, self).__init__() + self.n_channels = n_channels + self.n_classes = n_classes + + self.inc = DoubleConv(n_channels, 64) + self.down1 = Down(64, 128) + self.down2 = Down(128, 256) + self.down3 = Down(256, 512) + self.up1 = Up(512, 256) + self.up2 = Up(256, 128) + self.up3 = Up(128, 64) + self.outc = OutConv(64, n_classes) + + def forward(self, x): + # 合并B和T维度(如果存在) + is_5d = x.dim() == 5 + if is_5d: + B, T, C, H, W = x.size() + x = x.view(B * T, C, H, W) + else: + B, C, H, W = x.size() + T = 1 # 无时间维度时设为1 + + # 原UNet处理流程 + x1 = self.inc(x) + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x = self.up1(x4, x3) + x = self.up2(x, x2) + x = self.up3(x, x1) + logits = self.outc(x) + + # 恢复原始维度(如果是5维输入) + if is_5d: + logits = logits.view(B, T, self.n_classes, H, W) + + return logits + +# # 测试四维输入 +# model = UNet(n_channels=2, n_classes=2) +# input_4d = torch.randn(1, 2, 128, 128) +# output_4d = model(input_4d) +# print(f"4D Output shape: {output_4d.shape}") # 应为 [1, 2, 128, 128] + +# # 测试五维输入 +# input_5d = torch.randn(2, 10, 2, 128, 128) +# output_5d = model(input_5d) +# print(f"5D Output shape: {output_5d.shape}") # 应为 [2, 3, 2, 128, 128] \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/cno-checkpoint.py b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/cno-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..2c5570a6eab78f213e7ffe200525f5d2591b4ce9 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/cno-checkpoint.py @@ -0,0 +1,545 @@ +import torch +import torch.nn as nn +from torch.nn import LeakyReLU as LReLu + +class CNOBlock(nn.Module): + def __init__(self, + in_channels, + out_channels, + in_size_h, + in_size_w, + out_size_h, + out_size_w, + cutoff_den = 2.0001, + conv_kernel = 3, + filter_size = 6, + lrelu_upsampling = 2, + half_width_mult = 0.8, + radial = False, + batch_norm = True, + activation = 'cno_lrelu' + ): + super(CNOBlock, self).__init__() + + self.in_channels = in_channels + self.out_channels = out_channels + self.in_size_h = in_size_h + self.in_size_w = in_size_w + self.out_size_h = out_size_h + self.out_size_w = out_size_w + self.conv_kernel = conv_kernel + self.batch_norm_flag = batch_norm + + #---------- Filter properties ----------- + self.critically_sampled = False # We use w_c = s/2.0001 --> NOT critically sampled + + if cutoff_den == 2.0: + self.critically_sampled = True + self.in_cutoff_h = self.in_size_h / cutoff_den + self.in_cutoff_w = self.in_size_w / cutoff_den + self.out_cutoff_h = self.out_size_h / cutoff_den + self.out_cutoff_w = self.out_size_w / cutoff_den + + self.in_halfwidth_h = half_width_mult*self.in_size_h - self.in_size_h / cutoff_den + self.in_halfwidth_w = half_width_mult*self.in_size_w - self.in_size_w / cutoff_den + self.out_halfwidth_h = half_width_mult*self.out_size_h - self.out_size_h / cutoff_den + self.out_halfwidth_w = half_width_mult*self.out_size_w - self.out_size_w / cutoff_den + + + + pad = (self.conv_kernel - 1) // 2 + self.convolution = torch.nn.Conv2d(in_channels=self.in_channels, out_channels=self.out_channels, + kernel_size=self.conv_kernel, + padding=pad) + + if self.batch_norm_flag: + self.batch_norm = nn.BatchNorm2d(self.out_channels) + else: + self.batch_norm = None + self.activation = LReLu() # + + def forward(self, x): + x = self.convolution(x) + if self.batch_norm_flag: + x = self.batch_norm(x) + x = self.activation(x) + return x + +class LiftProjectBlock(nn.Module): + def __init__(self, + in_channels, + out_channels, + in_size_h, + in_size_w, + out_size_h, + out_size_w, + latent_dim = 64, + cutoff_den = 2.0001, + conv_kernel = 3, + filter_size = 6, + lrelu_upsampling = 2, + half_width_mult = 0.8, + radial = False, + batch_norm = True, + activation = 'cno_lrelu' + ): + super(LiftProjectBlock, self).__init__() + + self.inter_CNOBlock = CNOBlock(in_channels=in_channels, + out_channels=latent_dim, + in_size_h=in_size_h, + in_size_w=in_size_w, + out_size_h=out_size_h, + out_size_w=out_size_w, + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation) + + pad = (conv_kernel - 1) // 2 + self.convolution = torch.nn.Conv2d(in_channels=latent_dim, out_channels=out_channels, + kernel_size=conv_kernel, stride=1, + padding=pad) + + self.batch_norm_flag = batch_norm + if self.batch_norm_flag: + self.batch_norm = nn.BatchNorm2d(out_channels) + else: + self.batch_norm = None + + def forward(self, x): + x = self.inter_CNOBlock(x) + + x = self.convolution(x) + if self.batch_norm_flag: + x = self.batch_norm(x) + return x + +class ResidualBlock(nn.Module): + def __init__(self, + channels, + size_h, + size_w, + cutoff_den = 2.0001, + conv_kernel = 3, + filter_size = 6, + lrelu_upsampling = 2, + half_width_mult = 0.8, + radial = False, + batch_norm = True, + activation = 'cno_lrelu' + ): + super(ResidualBlock, self).__init__() + + self.channels = channels + self.size_h = size_h + self.size_w = size_w + self.conv_kernel = conv_kernel + self.batch_norm_flag = batch_norm + + #---------- Filter properties ----------- + self.critically_sampled = False # We use w_c = s/2.0001 --> NOT critically sampled + + if cutoff_den == 2.0: + self.critically_sampled = True + self.cutoff_h = self.size_h / cutoff_den + self.cutoff_w = self.size_w / cutoff_den + self.halfwidth_h = half_width_mult*self.size_h - self.size_h / cutoff_den + self.halfwidth_w = half_width_mult*self.size_w - self.size_w / cutoff_den + + #----------------------------------------- + + pad = (self.conv_kernel - 1) // 2 + self.convolution1 = torch.nn.Conv2d(in_channels=self.channels, out_channels=self.channels, + kernel_size=self.conv_kernel, stride=1, + padding=pad) + self.convolution2 = torch.nn.Conv2d(in_channels=self.channels, out_channels=self.channels, + kernel_size=self.conv_kernel, stride=1, + padding=pad) + + if self.batch_norm_flag: + self.batch_norm1 = nn.BatchNorm2d(self.channels) + self.batch_norm2 = nn.BatchNorm2d(self.channels) + else: + self.batch_norm1 = self.batch_norm2 = None + self.activation = LReLu() + + def forward(self, x): + out = self.convolution1(x) + if self.batch_norm_flag: + out = self.batch_norm1(out) + out = self.activation(out) + out = self.convolution2(out) + if self.batch_norm_flag: + out = self.batch_norm2(out) + + return x + out + +class CNO(nn.Module): + def __init__(self, + in_dim, + in_size_h, + in_size_w, + N_layers, + N_res = 1, + N_res_neck = 6, + channel_multiplier = 32, + conv_kernel=3, + cutoff_den = 2.0001, + filter_size=6, + lrelu_upsampling = 2, + half_width_mult = 0.8, + radial = False, + batch_norm = True, + out_dim = 10, + out_size_h = 1, + out_size_w = 1, + expand_input = False, + latent_lift_proj_dim = 64, + add_inv = True, + activation = 'cno_lrelu' + ): + + super(CNO, self).__init__() + + + self.N_layers = int(N_layers) + + self.lift_dim = channel_multiplier // 2 + self.out_dim = out_dim + + self.add_inv = add_inv + + self.channel_multiplier = channel_multiplier + + if radial == 0: + self.radial = False + else: + self.radial = True + + + self.encoder_features = [self.lift_dim] + for i in range(self.N_layers): + self.encoder_features.append(2 ** i * self.channel_multiplier) + + self.decoder_features_in = self.encoder_features[1:] + self.decoder_features_in.reverse() + self.decoder_features_out = self.encoder_features[:-1] + self.decoder_features_out.reverse() + + for i in range(1, self.N_layers): + self.decoder_features_in[i] = 2 * self.decoder_features_in[i] + + self.inv_features = self.decoder_features_in.copy() + self.inv_features.append(self.encoder_features[0] + self.decoder_features_out[-1]) + + + if not expand_input: + latent_size_h = in_size_h + latent_size_w = in_size_w + else: + down_exponent = 2 ** N_layers + latent_size_h = in_size_h - (in_size_h % down_exponent) + down_exponent + latent_size_w = in_size_w - (in_size_w % down_exponent) + down_exponent + + if out_size_h == 1: + latent_size_out_h = latent_size_h + else: + if not expand_input: + latent_size_out_h = out_size_h + else: + down_exponent = 2 ** N_layers + latent_size_out_h = out_size_h - (out_size_h % down_exponent) + down_exponent + + if out_size_w == 1: + latent_size_out_w = latent_size_w + else: + if not expand_input: + latent_size_out_w = out_size_w + else: + down_exponent = 2 ** N_layers + latent_size_out_w = out_size_w - (out_size_w % down_exponent) + down_exponent + + self.encoder_sizes_h = [] + self.encoder_sizes_w = [] + self.decoder_sizes_h = [] + self.decoder_sizes_w = [] + for i in range(self.N_layers + 1): + self.encoder_sizes_h.append(latent_size_h // (2 ** i)) + self.encoder_sizes_w.append(latent_size_w // (2 ** i)) + self.decoder_sizes_h.append(latent_size_out_h // 2 ** (self.N_layers - i)) + self.decoder_sizes_w.append(latent_size_out_w // 2 ** (self.N_layers - i)) + + + self.lift = LiftProjectBlock(in_channels=in_dim, + out_channels=self.encoder_features[0], + in_size_h=in_size_h, + in_size_w=in_size_w, + out_size_h=self.encoder_sizes_h[0], + out_size_w=self.encoder_sizes_w[0], + latent_dim=latent_lift_proj_dim, + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=False, + activation=activation) + _out_size_h = out_size_h + _out_size_w = out_size_w + if out_size_h == 1: + _out_size_h = in_size_h + if out_size_w == 1: + _out_size_w = in_size_w + + self.project = LiftProjectBlock(in_channels=self.encoder_features[0] + self.decoder_features_out[-1], + out_channels=out_dim, + in_size_h=self.decoder_sizes_h[-1], + in_size_w=self.decoder_sizes_w[-1], + out_size_h=_out_size_h, + out_size_w=_out_size_w, + latent_dim=latent_lift_proj_dim, + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=False, + activation=activation) + + + self.encoder = nn.ModuleList([ + CNOBlock( + in_channels=self.encoder_features[i], + out_channels=self.encoder_features[i + 1], + in_size_h=self.encoder_sizes_h[i], + in_size_w=self.encoder_sizes_w[i], + out_size_h=self.encoder_sizes_h[i + 1], + out_size_w=self.encoder_sizes_w[i + 1], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + for i in range(self.N_layers) + ]) + + + self.ED_expansion = nn.ModuleList([ + CNOBlock( + in_channels=self.encoder_features[i], + out_channels=self.encoder_features[i], + in_size_h=self.encoder_sizes_h[i], + in_size_w=self.encoder_sizes_w[i], + out_size_h=self.decoder_sizes_h[self.N_layers - i], + out_size_w=self.decoder_sizes_w[self.N_layers - i], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + for i in range(self.N_layers + 1) + ]) + + self.decoder = nn.ModuleList([ + CNOBlock( + in_channels=self.decoder_features_in[i], + out_channels=self.decoder_features_out[i], + in_size_h=self.decoder_sizes_h[i], + in_size_w=self.decoder_sizes_w[i], + out_size_h=self.decoder_sizes_h[i + 1], + out_size_w=self.decoder_sizes_w[i + 1], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + for i in range(self.N_layers) + ]) + + self.decoder_inv = nn.ModuleList([ + CNOBlock( + in_channels=self.inv_features[i], + out_channels=self.inv_features[i], + in_size_h=self.decoder_sizes_h[i], + in_size_w=self.decoder_sizes_w[i], + out_size_h=self.decoder_sizes_h[i], + out_size_w=self.decoder_sizes_w[i], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + for i in range(self.N_layers + 1) + ]) + + + self.res_nets = [] + self.N_res = int(N_res) + self.N_res_neck = int(N_res_neck) + + for l in range(self.N_layers): + for i in range(self.N_res): + self.res_nets.append( + ResidualBlock( + channels=self.encoder_features[l], + size_h=self.encoder_sizes_h[l], + size_w=self.encoder_sizes_w[l], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + ) + for i in range(self.N_res_neck): + self.res_nets.append( + ResidualBlock( + channels=self.encoder_features[self.N_layers], + size_h=self.encoder_sizes_h[self.N_layers], + size_w=self.encoder_sizes_w[self.N_layers], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + ) + + self.res_nets = torch.nn.Sequential(*self.res_nets) + + def forward(self, x): + b, t, c, h, w = x.shape + x = x.reshape(b * t, c, h, w) + x = self.lift(x) + skip = [] + + res_nets_idx = 0 + for i in range(self.N_layers): + + y = x + for j in range(self.N_res): + y = self.res_nets[res_nets_idx](y) + res_nets_idx += 1 + skip.append(y) + + x = self.encoder[i](x) + + #---------------------------------------------------------------------- + + for j in range(self.N_res_neck): + x = self.res_nets[res_nets_idx](x) + res_nets_idx += 1 + + for i in range(self.N_layers): + + if i == 0: + x = self.ED_expansion[self.N_layers - i](x) + else: + x = torch.cat((x, self.ED_expansion[self.N_layers - i](skip[-i])), 1) + + if self.add_inv: + x = self.decoder_inv[i](x) + x = self.decoder[i](x) + + x = torch.cat((x, self.ED_expansion[0](skip[0])), 1) + x = self.project(x) + x = x.reshape(b, t, -1, x.shape[-2], x.shape[-1]) + + del skip + del y + + return x + + def get_n_params(self): + pp = 0 + + for p in list(self.parameters()): + nn = 1 + for s in list(p.size()): + nn = nn * s + pp += nn + return pp + + def print_size(self): + nparams = 0 + nbytes = 0 + + for param in self.parameters(): + nparams += param.numel() + nbytes += param.data.element_size() * param.numel() + + print(f'{nparams} (~{nbytes / 1e6:.2f} MB)') + + return nparams + +if __name__ == '__main__': + + in_dim = 2 + in_size_h = 256 + in_size_w = 256 + N_layers = 4 + + model = CNO( + in_dim=in_dim, + in_size_h=in_size_h, + in_size_w=in_size_w, + N_layers=N_layers, + N_res=1, + N_res_neck=6, + channel_multiplier=32, + conv_kernel=3, + cutoff_den=2.0001, + filter_size=6, + lrelu_upsampling=2, + half_width_mult=0.8, + radial=False, + batch_norm=True, + out_dim=2, + out_size_h=1, + out_size_w=1, + expand_input=False, + latent_lift_proj_dim=64, + add_inv=True, + activation='cno_lrelu' + ) + + batch_size = 1 + time_steps = 10 + channels = in_dim + height = in_size_h + width = in_size_w + + x = torch.randn(batch_size, time_steps, channels, height, width) + + output = model(x) + + print(f"Output shape: {output.shape}") \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/dit-checkpoint.py b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/dit-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..50b9b3aeb61e7da9e202c49277ae62e6115aa1ff --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/dit-checkpoint.py @@ -0,0 +1,471 @@ +import torch +import torch.nn as nn +import numpy as np +import math +from functools import partial +from timm.models.vision_transformer import PatchEmbed, Attention, Mlp +import math + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + +################################################################################# +# Embedding Layers for Timesteps and Class Labels # +################################################################################# + +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__() + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + """ + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=t.device) + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + + def forward(self, t): + t = t.to(next(self.parameters()).device) + t_freq = self.timestep_embedding(t, self.frequency_embedding_size) + t_freq = t_freq.to(next(self.parameters()).device) + t_emb = self.mlp(t_freq) + t_emb = t_emb.to(next(self.parameters()).device) + return t_emb + +class LabelEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + def __init__(self, num_classes, hidden_size, dropout_prob): + super().__init__() + use_cfg_embedding = dropout_prob > 0 + self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size) + self.num_classes = num_classes + self.dropout_prob = dropout_prob + + def token_drop(self, labels, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(labels.shape[0], device=labels.device) < self.dropout_prob + else: + drop_ids = force_drop_ids == 1 + labels = torch.where(drop_ids, self.num_classes, labels) + return labels + + def forward(self, labels, train, force_drop_ids=None): + use_dropout = self.dropout_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + labels = self.token_drop(labels, force_drop_ids) + embeddings = self.embedding_table(labels) + return embeddings + +################################################################################# +# Core DiT Model # +################################################################################# + +class DiTBlock(nn.Module): + """ + A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning. + """ + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs): + super().__init__() + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + mlp_hidden_dim = int(hidden_size * mlp_ratio) + approx_gelu = lambda: nn.GELU() + self.mlp = Mlp(in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=approx_gelu, drop=0) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(c).chunk(6, dim=1) + x = x + gate_msa.unsqueeze(1) * self.attn(modulate(self.norm1(x), shift_msa, scale_msa)) + x = x + gate_mlp.unsqueeze(1) * self.mlp(modulate(self.norm2(x), shift_mlp, scale_mlp)) + return x + +class FinalLayer(nn.Module): + """ + The final layer of DiT. + """ + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size[0] * patch_size[1] * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + +class DiT(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + def __init__( + self, + input_size=(32, 32), + patch_size=(2, 2), + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + class_dropout_prob=0.1, + num_classes=None, + learn_sigma=True, + ): + super().__init__() + self.learn_sigma = learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.hidden_size = hidden_size + + self.x_embedder = PatchEmbed( + img_size=input_size, patch_size=patch_size, in_chans=in_channels, embed_dim=hidden_size, bias=True + ) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + # 将使用固定的 sin-cos 位置嵌入 + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, hidden_size), requires_grad=False) + + self.blocks = nn.ModuleList([ + DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio) for _ in range(depth) + ]) + self.final_layer = FinalLayer(hidden_size, patch_size, self.out_channels) + self.initialize_weights() + + # 如果 num_classes 不为 None,才初始化 y_embedder + if num_classes is not None: + self.y_embedder = LabelEmbedder(num_classes, hidden_size, class_dropout_prob) + else: + self.y_embedder = None + + def initialize_weights(self): + # 初始化 Transformer 层 + def _basic_init(module): + if isinstance(module, nn.Linear): + torch.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + self.apply(_basic_init) + + # 获取网格尺寸用于位置嵌入 + grid_size_h, grid_size_w = self.x_embedder.grid_size + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], (grid_size_h, grid_size_w)) + self.pos_embed.data.copy_(torch.from_numpy(pos_embed).float().unsqueeze(0)) + + # 初始化 patch_embed,如同 nn.Linear(而不是 nn.Conv2d) + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + nn.init.constant_(self.x_embedder.proj.bias, 0) + + # 初始化时间步嵌入 MLP + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + + # 将 DiT 块中的 adaLN 调制层初始化为零 + for block in self.blocks: + nn.init.constant_(block.adaLN_modulation[-1].weight, 0) + nn.init.constant_(block.adaLN_modulation[-1].bias, 0) + + # 将输出层初始化为零 + nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0) + nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0) + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + def unpatchify(self, x): + """ + x: (N, T, patch_size[0]*patch_size[1]*C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p_h, p_w = self.x_embedder.patch_size # 元组 + h_patches, w_patches = self.x_embedder.grid_size + assert h_patches * w_patches == x.shape[1], "Mismatch in number of patches" + + x = x.reshape(shape=(x.shape[0], h_patches, w_patches, p_h, p_w, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h_patches * p_h, w_patches * p_w)) + return imgs + + def forward(self, x, t, y=None): + """ + Forward pass of DiT. + x: (N, C, H, W) tensor of spatial inputs + t: (N,) tensor of diffusion timesteps + y: (N,) tensor of class labels or None + """ + x = self.x_embedder(x) + self.pos_embed # (N, T, D),其中 T = H * W / (patch_size[0] * patch_size[1]) + t = self.t_embedder(t) # (N, D) + if self.y_embedder is not None and y is not None: + y = self.y_embedder(y, self.training) # (N, D) + c = t + y # (N, D) + else: + c = t # (N, D) + for block in self.blocks: + x = block(x, c) # (N, T, D) + x = self.final_layer(x, c) # (N, T, patch_size[0] * patch_size[1] * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward_with_cfg(self, x, t, y, cfg_scale): + """ + Forward pass of DiT with classifier-free guidance. + """ + half = x[: len(x) // 2] + combined = torch.cat([half, half], dim=0) + model_out = self.forward(combined, t, y) + eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:] + cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = torch.cat([half_eps, half_eps], dim=0) + return torch.cat([eps, rest], dim=1) + +################################################################################# +# Sine/Cosine Positional Embedding Functions # +################################################################################# + +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0): + """ + grid_size: (grid_size_h, grid_size_w) + return: + pos_embed: [grid_size_h*grid_size_w, embed_dim] 或 [1+grid_size_h*grid_size_w, embed_dim] + """ + grid_h = np.arange(grid_size[0], dtype=np.float32) + grid_w = np.arange(grid_size[1], dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # 这里 w 先行 + grid = np.stack(grid, axis=0) + + grid = grid.reshape([2, grid_size[0] * grid_size[1]]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # 使用一半的维度来编码 grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: 每个位置的输出维度 + pos: 要编码的位置列表:大小 (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2) + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + +################################################################################# +# Other Components # +################################################################################# + +def stride_generator(N, reverse=False): + strides = [1, 2]*10 + if reverse: return list(reversed(strides[:N])) + else: return strides[:N] + +class ConvSC(nn.Module): + def __init__(self, in_channels, out_channels, stride=1, transpose=False): + super(ConvSC, self).__init__() + if transpose: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=3, stride=stride, + padding=1, output_padding=stride-1) + else: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1) + self.norm = nn.BatchNorm2d(out_channels) + self.act = nn.GELU() + + def forward(self, x): + return self.act(self.norm(self.conv(x))) + +class Inception(nn.Module): + def __init__(self, in_channels, hidden_channels, out_channels, incep_ker=[3,5,7,11], groups=4): + super(Inception, self).__init__() + self.branch1 = nn.Conv2d(in_channels, hidden_channels, kernel_size=1) + self.branch2 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[0], padding=incep_ker[0]//2, groups=groups) + self.branch3 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[1], padding=incep_ker[1]//2, groups=groups) + self.branch4 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[2], padding=incep_ker[2]//2, groups=groups) + self.branch5 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[3], padding=incep_ker[3]//2, groups=groups) + self.conv = nn.Conv2d(hidden_channels * 5, out_channels, kernel_size=1) + self.norm = nn.BatchNorm2d(out_channels) + self.act = nn.GELU() + + def forward(self, x): + x1 = self.branch1(x) + x2 = self.branch2(x) + x3 = self.branch3(x) + x4 = self.branch4(x) + x5 = self.branch5(x) + x = torch.cat([x1, x2, x3, x4, x5], dim=1) + x = self.conv(x) + x = self.act(self.norm(x)) + return x + +class Encoder(nn.Module): + def __init__(self, C_in, C_hid, N_S): + super(Encoder, self).__init__() + strides = stride_generator(N_S) + layers = [ConvSC(C_in, C_hid, stride=strides[0])] + for s in strides[1:]: + layers.append(ConvSC(C_hid, C_hid, stride=s)) + self.enc = nn.Sequential(*layers) + + def forward(self, x): + skips = [] + for layer in self.enc: + x = layer(x) + skips.append(x) + return x, skips # 返回所有的 skips + +class Decoder(nn.Module): + def __init__(self, C_hid, C_out, N_S): + super(Decoder, self).__init__() + strides = stride_generator(N_S, reverse=True) + layers = [] + for s in strides[:-1]: + layers.append(ConvSC(C_hid, C_hid, stride=s, transpose=True)) + layers.append(ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True)) + self.dec = nn.Sequential(*layers) + self.readout = nn.Conv2d(C_hid, C_out, 1) + + def forward(self, hid, skip): + for i in range(len(self.dec)-1): + hid = self.dec[i](hid) + hid = self.dec[-1](torch.cat([hid, skip], dim=1)) + return self.readout(hid) + +# class Temporal_evo(nn.Module): +# def __init__(self, channel_in, channel_hid, N_T, h, w, incep_ker=[3, 5, 7, 11], groups=8): +# super(Temporal_evo, self).__init__() + +# self.N_T = N_T +# enc_layers = [Inception(channel_in, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] +# for _ in range(1, N_T - 1): +# enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) +# enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + +# dec_layers = [Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] +# for _ in range(1, N_T - 1): +# dec_layers.append(Inception(2 * channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) +# dec_layers.append(Inception(2 * channel_hid, channel_hid // 2, channel_in, incep_ker=incep_ker, groups=groups)) +# norm_layer = partial(nn.LayerNorm, eps=1e-6) +# self.norm = norm_layer(channel_hid) + +# self.enc = nn.Sequential(*enc_layers) +# self.dec = nn.Sequential(*dec_layers) + +# def forward(self, x): +# B, T, C, H, W = x.shape +# x = x.reshape(B, T * C, H, W) + +# # Downsampling +# skips = [] +# for i in range(self.N_T): +# x = self.enc[i](x) +# if i < self.N_T - 1: +# skips.append(x) + +# # Upsampling +# x = self.dec[0](x) +# for i in range(1, self.N_T): +# x = self.dec[i](torch.cat([x, skips[-i]], dim=1)) + +# x = x.reshape(B, T, C, H, W) +# return x + +class nmo_dit(nn.Module): + def __init__(self, shape_in, hid_S=32, hid_T=64, N_S=4, N_T=8, time_step=1000, incep_ker=[3,5,7,11], groups=4, + in_time_seq_length=10, out_time_seq_length=10): + super(nmo_dit, self).__init__() + B, T, C, H, W = shape_in + + strides = stride_generator(N_S) + num_stride2_layers = strides[:N_S].count(2) + self.downsample_factor = 2 ** num_stride2_layers + self.H1 = H // self.downsample_factor + self.W1 = W // self.downsample_factor + + self.in_time_seq_length = in_time_seq_length + self.out_time_seq_length = out_time_seq_length + self.enc = Encoder(C, hid_S, N_S) + # self.hid = Temporal_evo(T*hid_S, hid_T, N_T, self.H1, self.W1, incep_ker, groups) + self.dit_block = DiT( + input_size=(self.H1, self.W1), + patch_size=(1, 1), # Changed patch_size to (1, 1) + in_channels=T*hid_S, + hidden_size=256, + depth=12, + num_heads=2, + mlp_ratio=4.0, + class_dropout_prob=0.0, + num_classes=None, + learn_sigma=False, + ) + + self.dec = Decoder(hid_S, C, N_S) + self.time_step = torch.randint(0, time_step, (B,)) + + def forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + embed, skips = self.enc(x) + skip = skips[0] + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + bias = z.reshape(B, T*C_, H_, W_) + bias_hid = self.dit_block(bias, self.time_step) + + hid = bias_hid.reshape(B*T, C_, H_, W_) # Now the dimensions should match + Y = self.dec(hid, skip) + + Y = Y.reshape(B, T, -1, H, W) + return Y \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/kno_2d-checkpoint.py b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/kno_2d-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..9ba1fc4b8f660a7cd14525f1c9e8d71a56e12609 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/kno_2d-checkpoint.py @@ -0,0 +1,162 @@ +# KNO model +import torch +import numpy as np +import torch.nn as nn +import torch.nn.functional as F + +torch.manual_seed(0) + +# The structure of Auto-Encoder +class encoder_mlp(nn.Module): + def __init__(self, t_len, op_size): + super(encoder_mlp, self).__init__() + self.layer = nn.Linear(t_len, op_size) + def forward(self, x): + x = self.layer(x) + return x + +class decoder_mlp(nn.Module): + def __init__(self, t_len, op_size): + super(decoder_mlp, self).__init__() + self.layer = nn.Linear(op_size, t_len) + def forward(self, x): + x = self.layer(x) + return x + +class encoder_conv1d(nn.Module): + def __init__(self, t_len, op_size): + super(encoder_conv1d, self).__init__() + self.layer = nn.Conv1d(t_len, op_size,1) + def forward(self, x): + x = x.permute([0,2,1]) + x = self.layer(x) + x = x.permute([0,2,1]) + return x + +class decoder_conv1d(nn.Module): + def __init__(self, t_len, op_size): + super(decoder_conv1d, self).__init__() + self.layer = nn.Conv1d(op_size, t_len,1) + def forward(self, x): + x = x.permute([0,2,1]) + x = self.layer(x) + x = x.permute([0,2,1]) + return x + +class encoder_conv2d(nn.Module): + def __init__(self, t_len, op_size): + super(encoder_conv2d, self).__init__() + self.layer = nn.Conv2d(t_len, op_size,1) + def forward(self, x): + x = x.permute([0,3,1,2]) + x = self.layer(x) + x = x.permute([0,2,3,1]) + return x + +class decoder_conv2d(nn.Module): + def __init__(self, t_len, op_size): + super(decoder_conv2d, self).__init__() + self.layer = nn.Conv2d(op_size, t_len,1) + def forward(self, x): + x = x.permute([0,3,1,2]) + x = self.layer(x) + x = x.permute([0,2,3,1]) + return x + + +class Koopman_Operator2D(nn.Module): + def __init__(self, op_size, modes_x, modes_y): + super(Koopman_Operator2D, self).__init__() + self.op_size = op_size + self.scale = (1 / (op_size * op_size)) + self.modes_x = modes_x + self.modes_y = modes_y + self.koopman_matrix = nn.Parameter(self.scale * torch.rand(op_size, op_size, self.modes_x, self.modes_y, dtype=torch.cfloat)) + + # Complex multiplication + def time_marching(self, input, weights): + return torch.einsum("btxy,tfxy->bfxy", input, weights) + + def forward(self, x): + batchsize = x.shape[0] + x_ft = torch.fft.rfft2(x) + out_ft = torch.zeros(x_ft.shape, dtype=torch.cfloat, device = x.device) + out_ft[:, :, :self.modes_x, :self.modes_y] = self.time_marching(x_ft[:, :, :self.modes_x, :self.modes_y], self.koopman_matrix) + out_ft[:, :, -self.modes_x:, :self.modes_y] = self.time_marching(x_ft[:, :, -self.modes_x:, :self.modes_y], self.koopman_matrix) + x = torch.fft.irfft2(out_ft, s=(x.size(-2), x.size(-1))) + return x + +class KNO2d(nn.Module): + def __init__(self, encoder, decoder, op_size, modes_x=10, modes_y=10, decompose=6, linear_type=True, normalization=False, x_coeff=0.1, skip_coeff=1): + super(KNO2d, self).__init__() + self.op_size = op_size + self.decompose = decompose + self.modes_x = modes_x + self.modes_y = modes_y + self.x_coeff = x_coeff + self.skip_coeff = skip_coeff + + self.enc = encoder + self.dec = decoder + self.koopman_layer = Koopman_Operator2D(self.op_size, self.modes_x, self.modes_y) + self.w0 = nn.Conv2d(op_size, op_size, 1) + self.linear_type = linear_type + self.normalization = normalization + if self.normalization: + self.norm_layer = torch.nn.BatchNorm2d(op_size) + + def forward_(self, x): + x_reconstruct = self.enc(x) + x_reconstruct = torch.tanh(x_reconstruct) + x_reconstruct = self.dec(x_reconstruct) + + x = self.enc(x) + x = torch.tanh(x) + x = x.permute(0, 3, 1, 2) + x_w = x + + for i in range(self.decompose): + x1 = self.koopman_layer(x) + if self.linear_type: + x = x + x1 + else: + x = torch.tanh(x + x1) + + if self.normalization: + x = torch.tanh(self.norm_layer(self.w0(x_w)) + x) + else: + x = torch.tanh(self.w0(x_w) + x) + + x = x.permute(0, 2, 3, 1) + x = self.x_coeff * x + x = self.dec(x) + return x, x_reconstruct + + def forward(self, x): + B, T, C, H, W = x.shape + + x = x.permute(0, 2, 3, 4, 1) # (B, C, H, W, T) + x = x.reshape(B*C, H, W, T) + + output, rec = self.forward_(x) + + output = output.view(B, C, H, W, T).permute(0, 4, 1, 2, 3) # (B, T, C, H, W) + rec = rec.view(B, C, H, W, T).permute(0, 4, 1, 2, 3) + + return output, rec + +if __name__ == "__main__": + # hyper parameters + t_len = 10 + o = 16 + f_x = 16 + f_y = 8 + r = 8 + encoder = encoder_mlp(t_len, op_size=o) + decoder = decoder_mlp(t_len, op_size=o) + model = KNO2d(encoder, decoder, op_size=o, modes_x=f_x, modes_y=f_y, decompose=r) + + # Test with new input shape (B, T, C, H, W) + inputs = torch.rand(1, 10, 2, 256, 256) # (B, T, C, H, W) + output, rec = model(inputs) + print(output.shape, rec.shape) # Should be (torch.Size([1, 10, 2, 256, 256]), torch.Size([1, 10, 2, 256, 256])) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/lsm-checkpoint.py b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/lsm-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..d1c4ce244078e8fdcdcd7cbe27f041dcc9abb9c6 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/lsm-checkpoint.py @@ -0,0 +1,277 @@ +import torch +import torch.nn.functional as F +import torch.nn as nn +import numpy as np +import math +import argparse + + + +class DoubleConv(nn.Module): + """(convolution => [BN] => ReLU) * 2""" + + def __init__(self, in_channels, out_channels, mid_channels=None): + super().__init__() + if not mid_channels: + mid_channels = out_channels + self.double_conv = nn.Sequential( + nn.Conv2d(in_channels, mid_channels, kernel_size=3, padding=1, bias=False), + nn.BatchNorm2d(mid_channels), + nn.ReLU(inplace=True), + nn.Conv2d(mid_channels, out_channels, kernel_size=3, padding=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x): + return self.double_conv(x) + +class Down(nn.Module): + """Downscaling with maxpool then double conv""" + + def __init__(self, in_channels, out_channels): + super().__init__() + self.maxpool_conv = nn.Sequential( + nn.MaxPool2d(2), + DoubleConv(in_channels, out_channels) + ) + + def forward(self, x): + return self.maxpool_conv(x) + +class Up(nn.Module): + """Upscaling then double conv""" + + def __init__(self, in_channels, out_channels, bilinear=True): + super().__init__() + + if bilinear: + self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True) + self.conv = DoubleConv(in_channels, out_channels, in_channels // 2) + else: + self.up = nn.ConvTranspose2d(in_channels, in_channels // 2, kernel_size=2, stride=2) + self.conv = DoubleConv(in_channels, out_channels) + + def forward(self, x1, x2): + x1 = self.up(x1) + diffY = x2.size()[2] - x1.size()[2] + diffX = x2.size()[3] - x1.size()[3] + + x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2, + diffY // 2, diffY - diffY // 2]) + x = torch.cat([x2, x1], dim=1) + return self.conv(x) + +class OutConv(nn.Module): + + def __init__(self, in_channels, out_channels): + super(OutConv, self).__init__() + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) + + def forward(self, x): + return self.conv(x) + +################################################################ +# Patchify 和 Neural Spectral Block +################################################################ +class NeuralSpectralBlock2d(nn.Module): + def __init__(self, width, num_basis, patch_size=[3, 3], num_token=4): + super(NeuralSpectralBlock2d, self).__init__() + self.patch_size = patch_size + self.width = width + self.num_basis = num_basis + + self.modes_list = (1.0 / float(num_basis)) * torch.tensor([i for i in range(num_basis)], + dtype=torch.float) + self.register_buffer('modes_list_buffer', self.modes_list) + self.weights = nn.Parameter( + (1 / (width)) * torch.rand(width, self.num_basis * 2, dtype=torch.float)) + + self.head = 8 + self.num_token = num_token + self.latent = nn.Parameter( + (1 / (width)) * torch.rand(self.head, self.num_token, width // self.head, dtype=torch.float)) + self.encoder_attn = nn.Conv2d(self.width, self.width * 2, kernel_size=1, stride=1) + self.decoder_attn = nn.Conv2d(self.width, self.width, kernel_size=1, stride=1) + self.softmax = nn.Softmax(dim=-1) + + def self_attn(self, q, k, v): + # q,k,v: B H L C/H + attn = self.softmax(torch.einsum("bhlc,bhsc->bhls", q, k)) + return torch.einsum("bhls,bhsc->bhlc", attn, v) + + def latent_encoder_attn(self, x): + # x: B C H W + B, C, H, W = x.shape + L = H * W + latent_token = self.latent[None, :, :, :].repeat(B, 1, 1, 1) + x_tmp = self.encoder_attn(x).view(B, C * 2, -1).permute(0, 2, 1).contiguous() \ + .view(B, L, self.head, C // self.head, 2).permute(4, 0, 2, 1, 3).contiguous() + latent_token = self.self_attn(latent_token, x_tmp[0], x_tmp[1]) + latent_token + latent_token = latent_token.permute(0, 1, 3, 2).contiguous().view(B, C, self.num_token) + return latent_token + + def latent_decoder_attn(self, x, latent_token): + # x: B C H W + x_init = x + B, C, H, W = x.shape + L = H * W + latent_token = latent_token.view(B, self.head, C // self.head, self.num_token).permute(0, 1, 3, 2).contiguous() + x_tmp = self.decoder_attn(x).view(B, C, -1).permute(0, 2, 1).contiguous() \ + .view(B, L, self.head, C // self.head).permute(0, 2, 1, 3).contiguous() + x = self.self_attn(x_tmp, latent_token, latent_token) + x = x.permute(0, 1, 3, 2).contiguous().view(B, C, H, W) + x_init # B H L C/H + return x + + def get_basis(self, x): + # x: B C N + x_sin = torch.sin(self.modes_list_buffer[None, None, None, :] * x[:, :, :, None] * math.pi) + x_cos = torch.cos(self.modes_list_buffer[None, None, None, :] * x[:, :, :, None] * math.pi) + return torch.cat([x_sin, x_cos], dim=-1) + + def compl_mul2d(self, input, weights): + return torch.einsum("bilm,im->bil", input, weights) + + def forward(self, x): + B, C, H, W = x.shape + + if H % self.patch_size[0] != 0 or W % self.patch_size[1] != 0: + raise ValueError(f"Input height and width must be divisible by patch_size. Got input size ({H}, {W}) and patch_size {self.patch_size}.") + + # patchify + x = x.view(x.shape[0], x.shape[1], + x.shape[2] // self.patch_size[0], self.patch_size[0], + x.shape[3] // self.patch_size[1], self.patch_size[1]).contiguous() \ + .permute(0, 2, 4, 1, 3, 5).contiguous() \ + .view(x.shape[0] * (x.shape[2] // self.patch_size[0]) * (x.shape[3] // self.patch_size[1]), x.shape[1], + self.patch_size[0], + self.patch_size[1]) + # Neural Spectral + # (1) encoder + latent_token = self.latent_encoder_attn(x) + # (2) transition + latent_token_modes = self.get_basis(latent_token) + latent_token = self.compl_mul2d(latent_token_modes, self.weights) + latent_token + # (3) decoder + x = self.latent_decoder_attn(x, latent_token) + # de-patchify + x = x.view(B, (H // self.patch_size[0]), (W // self.patch_size[1]), C, self.patch_size[0], + self.patch_size[1]).permute(0, 3, 1, 4, 2, 5).contiguous() \ + .view(B, C, H, W).contiguous() + return x + + +class LSM(nn.Module): + def __init__(self, in_dim, out_dim, d_model, num_token, num_basis, patch_size, padding, bilinear=True): + super(LSM, self).__init__() + in_channels = in_dim + out_channels = out_dim + width = d_model + num_token = num_token + num_basis = num_basis + patch_size = [int(x) for x in patch_size.split(',')] + padding = [int(x) for x in padding.split(',')] + # 多尺度模块 + self.inc = DoubleConv(width, width) + self.down1 = Down(width, width * 2) + self.down2 = Down(width * 2, width * 4) + self.down3 = Down(width * 4, width * 8) + factor = 2 if bilinear else 1 + self.down4 = Down(width * 8, width * 16 // factor) + self.up1 = Up(width * 16, width * 8 // factor, bilinear) + self.up2 = Up(width * 8, width * 4 // factor, bilinear) + self.up3 = Up(width * 4, width * 2 // factor, bilinear) + self.up4 = Up(width * 2, width, bilinear) + self.outc = OutConv(width, width) + # Patchified Neural Spectral Blocks + self.process1 = NeuralSpectralBlock2d(width, num_basis, patch_size, num_token) + self.process2 = NeuralSpectralBlock2d(width * 2, num_basis, patch_size, num_token) + self.process3 = NeuralSpectralBlock2d(width * 4, num_basis, patch_size, num_token) + self.process4 = NeuralSpectralBlock2d(width * 8, num_basis, patch_size, num_token) + self.process5 = NeuralSpectralBlock2d(width * 16 // factor, num_basis, patch_size, num_token) + # 投影层 + self.padding = padding + self.fc0 = nn.Linear(in_channels + 2, width) + self.fc1 = nn.Linear(width, 128) + self.fc2 = nn.Linear(128, out_channels) + + def forward(self, x): + # x的输入形状:(B, T, C_in, H, W),其中T=1 + x = x.squeeze(1) # 去除时间维度,x的形状:(B, C_in, H, W) + x = x.permute(0, 2, 3, 1) # 转换为 (B, H, W, C_in) + + grid = self.get_grid(x.shape, x.device) + x = torch.cat((x, grid), dim=-1) + x = self.fc0(x) + x = x.permute(0, 3, 1, 2) # 转换为 (B, C, H, W) + + if not all(item == 0 for item in self.padding): + x = F.pad(x, [0, self.padding[0], 0, self.padding[1]]) + + x1 = self.inc(x) + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x = self.up1(self.process5(x5), self.process4(x4)) + x = self.up2(x, self.process3(x3)) + x = self.up3(x, self.process2(x2)) + x = self.up4(x, self.process1(x1)) + x = self.outc(x) + + if not all(item == 0 for item in self.padding): + x = x[..., :-self.padding[1], :-self.padding[0]] + + x = x.permute(0, 2, 3, 1) # 转换回 (B, H, W, C) + x = self.fc1(x) + x = F.gelu(x) + x = self.fc2(x) + + # 输出调整 + x = x.permute(0, 3, 1, 2) # 转换为 (B, C_out, H, W) + x = x.unsqueeze(1) # 添加时间维度,x的形状:(B, T, C_out, H, W) + + return x + + def get_grid(self, shape, device): + batchsize, size_x, size_y, _ = shape + gridx = torch.linspace(0, 1, size_x, device=device).reshape(1, size_x, 1, 1).repeat(batchsize, 1, size_y, 1) + gridy = torch.linspace(0, 1, size_y, device=device).reshape(1, 1, size_y, 1).repeat(batchsize, size_x, 1, 1) + return torch.cat((gridx, gridy), dim=-1) + +################################################################ +# 实例化和测试模型 +################################################################ +if __name__ == "__main__": + import argparse + + # 定义args对象,包含模型初始化所需的参数 + in_dim = 1 # 输入维度,根据您的数据调整 + out_dim = 1 # 输出维度,根据您的任务调整 + d_model = 64 # 模型宽度,可根据需求调整 + num_token = 4 # Token数量,可根据需求调整 + num_basis = 16 # 基函数数量,可根据需求调整 + patch_size = '4,4' # Patch大小,确保能整除下采样后的尺寸 + padding = '0,0' # Padding大小,格式为字符串,例如'0,0' + + # 实例化模型 + model = LSM(in_dim=in_dim, out_dim=out_dim, d_model=d_model, num_token=num_token, num_basis=num_basis, patch_size=patch_size, padding=padding) + + # 创建一个示例输入数据,假设输入大小为(batch_size, T, in_channels, height, width) + batch_size = 1 + T = 1 + in_channels = in_dim + height = 128 # 输入高度 + width = 128 # 输入宽度 + x = torch.randn(batch_size, T, in_channels, height, width) + + # 将输入数据移动到模型所在线程的设备(CPU或GPU) + device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + model.to(device) + x = x.to(device) + + # 进行一次前向传播 + output = model(x) + + # 输出结果的形状 + print("输出形状:", output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/model_test-checkpoint.ipynb b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/model_test-checkpoint.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..123eea5e59583f252c9665dd898c3cc687feef28 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/model_test-checkpoint.ipynb @@ -0,0 +1,162 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "2d816f32-767f-444c-9231-17f98fbb4560", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "\n", + "def conv(in_planes, output_channels, kernel_size, stride, dropout_rate):\n", + " return nn.Sequential(\n", + " nn.Conv2d(in_planes, output_channels, kernel_size=kernel_size,\n", + " stride=stride, padding=(kernel_size - 1) // 2, bias = False),\n", + " nn.BatchNorm2d(output_channels),\n", + " nn.LeakyReLU(0.1, inplace=True),\n", + " nn.Dropout(dropout_rate)\n", + " )\n", + "\n", + "def deconv(input_channels, output_channels):\n", + " return nn.Sequential(\n", + " nn.ConvTranspose2d(input_channels, output_channels, kernel_size=4,\n", + " stride=2, padding=1),\n", + " nn.LeakyReLU(0.1, inplace=True)\n", + " )\n", + "\n", + "def output_layer(input_channels, output_channels, kernel_size, stride, dropout_rate):\n", + " return nn.Conv2d(input_channels, output_channels, kernel_size=kernel_size,\n", + " stride=stride, padding=(kernel_size - 1) // 2)\n", + "\n", + "class U_net(nn.Module):\n", + " def __init__(self, input_channels, output_channels, kernel_size, dropout_rate):\n", + " super(U_net, self).__init__()\n", + " self.input_channels = input_channels\n", + " self.conv1 = conv(input_channels, 64, kernel_size=kernel_size, stride=2, dropout_rate = dropout_rate)\n", + " self.conv2 = conv(64, 128, kernel_size=kernel_size, stride=2, dropout_rate = dropout_rate)\n", + " self.conv3 = conv(128, 256, kernel_size=kernel_size, stride=2, dropout_rate = dropout_rate)\n", + " self.conv3_1 = conv(256, 256, kernel_size=kernel_size, stride=1, dropout_rate = dropout_rate)\n", + " self.conv4 = conv(256, 512, kernel_size=kernel_size, stride=2, dropout_rate = dropout_rate)\n", + " self.conv4_1 = conv(512, 512, kernel_size=kernel_size, stride=1, dropout_rate = dropout_rate)\n", + " self.conv5 = conv(512, 1024, kernel_size=kernel_size, stride=2, dropout_rate = dropout_rate)\n", + " self.conv5_1 = conv(1024, 1024, kernel_size=kernel_size, stride=1, dropout_rate = dropout_rate)\n", + "\n", + " self.deconv4 = deconv(1024, 256)\n", + " self.deconv3 = deconv(768, 128)\n", + " self.deconv2 = deconv(384, 64)\n", + " self.deconv1 = deconv(192, 32)\n", + " self.deconv0 = deconv(96, 16)\n", + " \n", + " self.output_layer = output_layer(16 + input_channels, output_channels, \n", + " kernel_size=kernel_size, stride=1, dropout_rate = dropout_rate)\n", + "\n", + "\n", + " def forward(self, x):\n", + " B, T, C, H, W = x.shape\n", + " x = x.reshape(B, T*C, H, W)\n", + "\n", + " out_conv1 = self.conv1(x)\n", + " out_conv2 = self.conv2(out_conv1)\n", + " out_conv3 = self.conv3_1(self.conv3(out_conv2))\n", + " out_conv4 = self.conv4_1(self.conv4(out_conv3))\n", + " out_conv5 = self.conv5_1(self.conv5(out_conv4))\n", + "\n", + " out_deconv4 = self.deconv4(out_conv5)\n", + " concat4 = torch.cat((out_conv4, out_deconv4), 1)\n", + " out_deconv3 = self.deconv3(concat4)\n", + " concat3 = torch.cat((out_conv3, out_deconv3), 1)\n", + " out_deconv2 = self.deconv2(concat3)\n", + " concat2 = torch.cat((out_conv2, out_deconv2), 1)\n", + " out_deconv1 = self.deconv1(concat2)\n", + " concat1 = torch.cat((out_conv1, out_deconv1), 1)\n", + " out_deconv0 = self.deconv0(concat1)\n", + " concat0 = torch.cat((x, out_deconv0), 1)\n", + " out = self.output_layer(concat0)\n", + " out = out.reshape(B, T, C, H, W)\n", + "\n", + " return out" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "f572b5c6-73e9-405b-b3b1-ad33665fa8f7", + "metadata": {}, + "outputs": [ + { + "ename": "RuntimeError", + "evalue": "Expected 3D (unbatched) or 4D (batched) input to conv2d, but got input of size: [2, 10, 2, 256, 256]", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[3], line 17\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;18m__name__\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m__main__\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[1;32m 16\u001b[0m test_input \u001b[38;5;241m=\u001b[39m torch\u001b[38;5;241m.\u001b[39mrandn(\u001b[38;5;241m2\u001b[39m, \u001b[38;5;241m10\u001b[39m, \u001b[38;5;241m2\u001b[39m, \u001b[38;5;241m256\u001b[39m, \u001b[38;5;241m256\u001b[39m) \u001b[38;5;66;03m# B T C H W\u001b[39;00m\n\u001b[0;32m---> 17\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[43mmodel\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtest_input\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 18\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mInput shape: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mtest_input\u001b[38;5;241m.\u001b[39mshape\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 19\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mOutput shape: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00moutput\u001b[38;5;241m.\u001b[39mshape\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m) \n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/module.py:1518\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1518\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/module.py:1527\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1523\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1524\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1525\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1526\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1527\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1529\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1530\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "Cell \u001b[0;32mIn[1], line 51\u001b[0m, in \u001b[0;36mU_net.forward\u001b[0;34m(self, x)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, x):\n\u001b[1;32m 48\u001b[0m \u001b[38;5;66;03m# B, T, C, H, W = x.shape\u001b[39;00m\n\u001b[1;32m 49\u001b[0m \u001b[38;5;66;03m# x = x.reshape(B, T*C, H, W)\u001b[39;00m\n\u001b[0;32m---> 51\u001b[0m out_conv1 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconv1\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 52\u001b[0m out_conv2 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv2(out_conv1)\n\u001b[1;32m 53\u001b[0m out_conv3 \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv3_1(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mconv3(out_conv2))\n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/module.py:1518\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1518\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/module.py:1527\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1523\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1524\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1525\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1526\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1527\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1529\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1530\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/container.py:215\u001b[0m, in \u001b[0;36mSequential.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 213\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m):\n\u001b[1;32m 214\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m:\n\u001b[0;32m--> 215\u001b[0m \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[43mmodule\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 216\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28minput\u001b[39m\n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/module.py:1518\u001b[0m, in \u001b[0;36mModule._wrapped_call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1516\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_compiled_call_impl(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[1;32m 1517\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1518\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/module.py:1527\u001b[0m, in \u001b[0;36mModule._call_impl\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 1522\u001b[0m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[1;32m 1523\u001b[0m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[1;32m 1524\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_forward_pre_hooks\n\u001b[1;32m 1525\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[1;32m 1526\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[0;32m-> 1527\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 1529\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 1530\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/conv.py:460\u001b[0m, in \u001b[0;36mConv2d.forward\u001b[0;34m(self, input)\u001b[0m\n\u001b[1;32m 459\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tensor:\n\u001b[0;32m--> 460\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_conv_forward\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/miniconda3/envs/myenv/lib/python3.8/site-packages/torch/nn/modules/conv.py:456\u001b[0m, in \u001b[0;36mConv2d._conv_forward\u001b[0;34m(self, input, weight, bias)\u001b[0m\n\u001b[1;32m 452\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpadding_mode \u001b[38;5;241m!=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mzeros\u001b[39m\u001b[38;5;124m'\u001b[39m:\n\u001b[1;32m 453\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m F\u001b[38;5;241m.\u001b[39mconv2d(F\u001b[38;5;241m.\u001b[39mpad(\u001b[38;5;28minput\u001b[39m, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reversed_padding_repeated_twice, mode\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpadding_mode),\n\u001b[1;32m 454\u001b[0m weight, bias, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstride,\n\u001b[1;32m 455\u001b[0m _pair(\u001b[38;5;241m0\u001b[39m), \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdilation, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgroups)\n\u001b[0;32m--> 456\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconv2d\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbias\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mstride\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 457\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpadding\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdilation\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgroups\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mRuntimeError\u001b[0m: Expected 3D (unbatched) or 4D (batched) input to conv2d, but got input of size: [2, 10, 2, 256, 256]" + ] + } + ], + "source": [ + "input_channels = 2 \n", + "output_channels = 2 \n", + "kernel_size = 3 \n", + "dropout_rate = 0.1 \n", + "\n", + "model = U_net(\n", + " input_channels=input_channels * 3,\n", + " output_channels=output_channels * 3,\n", + " kernel_size=kernel_size,\n", + " dropout_rate=dropout_rate\n", + ")\n", + "\n", + "\n", + "\n", + "if __name__ == '__main__':\n", + " test_input = torch.randn(2, 3, 2, 256, 256) # B T C H W\n", + " output = model(test_input)\n", + " print(f\"Input shape: {test_input.shape}\")\n", + " print(f\"Output shape: {output.shape}\") " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ca19fd64-4232-4e71-a16e-ece369e89fd1", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.20" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/simvp-checkpoint.py b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/simvp-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..175e0fe960a2ec23fa678b52a12d062366f1d814 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/simvp-checkpoint.py @@ -0,0 +1,180 @@ +from torch import nn +import torch +from torch import nn + +class BasicConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, transpose=False, act_norm=False): + super(BasicConv2d, self).__init__() + self.act_norm=act_norm + if not transpose: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) + else: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,output_padding=stride //2 ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvSC, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d(C_in, C_out, kernel_size=3, stride=stride, + padding=1, transpose=transpose, act_norm=act_norm) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, groups, act_norm=False): + super(GroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class Inception(nn.Module): + def __init__(self, C_in, C_hid, C_out, incep_ker=[3,5,7,11], groups=8): + super(Inception, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d(C_hid, C_out, kernel_size=ker, stride=1, padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + + + + +def stride_generator(N, reverse=False): + strides = [1, 2]*10 + if reverse: return list(reversed(strides[:N])) + else: return strides[:N] + +class Encoder(nn.Module): + def __init__(self,C_in, C_hid, N_S): + super(Encoder,self).__init__() + strides = stride_generator(N_S) + self.enc = nn.Sequential( + ConvSC(C_in, C_hid, stride=strides[0]), + *[ConvSC(C_hid, C_hid, stride=s) for s in strides[1:]] + ) + + def forward(self,x):# B*4, 3, 128, 128 + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1,len(self.enc)): + latent = self.enc[i](latent) + return latent,enc1 + + +class Decoder(nn.Module): + def __init__(self,C_hid, C_out, N_S): + super(Decoder,self).__init__() + strides = stride_generator(N_S, reverse=True) + self.dec = nn.Sequential( + *[ConvSC(C_hid, C_hid, stride=s, transpose=True) for s in strides[:-1]], + ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(C_hid, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0,len(self.dec)-1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + +class Mid_Xnet(nn.Module): + def __init__(self, channel_in, channel_hid, N_T, incep_ker = [3,5,7,11], groups=8): + super(Mid_Xnet, self).__init__() + + self.N_T = N_T + enc_layers = [Inception(channel_in, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)] + for i in range(1, N_T-1): + enc_layers.append(Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + enc_layers.append(Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + + dec_layers = [Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)] + for i in range(1, N_T-1): + dec_layers.append(Inception(2*channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + dec_layers.append(Inception(2*channel_hid, channel_hid//2, channel_in, incep_ker= incep_ker, groups=groups)) + + self.enc = nn.Sequential(*enc_layers) + self.dec = nn.Sequential(*dec_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T*C, H, W) + + # encoder + skips = [] + z = x + for i in range(self.N_T): + z = self.enc[i](z) + if i < self.N_T - 1: + skips.append(z) + + # decoder + z = self.dec[0](z) + for i in range(1, self.N_T): + z = self.dec[i](torch.cat([z, skips[-i]], dim=1)) + + y = z.reshape(B, T, C, H, W) + return y + + +class SimVP(nn.Module): + def __init__(self, shape_in, hid_S=16, hid_T=256, N_S=4, N_T=8, output_dim = 1, incep_ker=[3,5,7,11], groups=8): + super(SimVP, self).__init__() + T, C, H, W = shape_in + self.output_dim = output_dim + self.enc = Encoder(C, hid_S, N_S) + self.hid = Mid_Xnet(T*hid_S, hid_T, N_T, incep_ker, groups) + self.dec = Decoder(hid_S, self.output_dim, N_S) + + + def forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + embed, skip = self.enc(x) + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + hid = self.hid(z) + hid = hid.reshape(B*T, C_, H_, W_) + + Y = self.dec(hid, skip) + Y = Y.reshape(B, T, -1, H, W) + return Y + + +if __name__ == "__main__": + inputs = torch.randn(1, 10, 2, 64, 448) + model = SimVP(shape_in=(10, 2, 64, 448), hid_S=32, hid_T=128, output_dim = 2) + outputs = model(inputs) + print(outputs.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/utilities3-checkpoint.py b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/utilities3-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..2ce7d1e4e90338075093ee87293d575f7a0e36d9 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/.ipynb_checkpoints/utilities3-checkpoint.py @@ -0,0 +1,232 @@ +import torch +import numpy as np +import scipy.io +import h5py +import sklearn.metrics +import torch.nn as nn +from scipy.ndimage import gaussian_filter + + +################################################# +# +# Utilities +# +################################################# +device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + +# reading data +class MatReader(object): + def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): + super(MatReader, self).__init__() + + self.to_torch = to_torch + self.to_cuda = to_cuda + self.to_float = to_float + + self.file_path = file_path + + self.data = None + self.old_mat = None + self._load_file() + + def _load_file(self): + try: + self.data = scipy.io.loadmat(self.file_path) + self.old_mat = True + except: + self.data = h5py.File(self.file_path) + self.old_mat = False + + def load_file(self, file_path): + self.file_path = file_path + self._load_file() + + def read_field(self, field): + x = self.data[field] + + if not self.old_mat: + x = x[()] + x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) + + if self.to_float: + x = x.astype(np.float32) + + if self.to_torch: + x = torch.from_numpy(x) + + if self.to_cuda: + x = x.cuda() + + return x + + def set_cuda(self, to_cuda): + self.to_cuda = to_cuda + + def set_torch(self, to_torch): + self.to_torch = to_torch + + def set_float(self, to_float): + self.to_float = to_float + +# normalization, pointwise gaussian +class UnitGaussianNormalizer(object): + def __init__(self, x, eps=0.00001): + super(UnitGaussianNormalizer, self).__init__() + + # x could be in shape of ntrain*n or ntrain*T*n or ntrain*n*T + self.mean = torch.mean(x, 0) + self.std = torch.std(x, 0) + self.eps = eps + + def encode(self, x): + x = (x - self.mean) / (self.std + self.eps) + return x + + def decode(self, x, sample_idx=None): + if sample_idx is None: + std = self.std + self.eps # n + mean = self.mean + else: + if len(self.mean.shape) == len(sample_idx[0].shape): + std = self.std[sample_idx] + self.eps # batch*n + mean = self.mean[sample_idx] + if len(self.mean.shape) > len(sample_idx[0].shape): + std = self.std[:,sample_idx]+ self.eps # T*batch*n + mean = self.mean[:,sample_idx] + + # x is in shape of batch*n or T*batch*n + x = (x * std) + mean + return x + + def cuda(self): + self.mean = self.mean.cuda() + self.std = self.std.cuda() + + def cpu(self): + self.mean = self.mean.cpu() + self.std = self.std.cpu() + +# normalization, Gaussian +class GaussianNormalizer(object): + def __init__(self, x, eps=0.00001): + super(GaussianNormalizer, self).__init__() + + self.mean = torch.mean(x) + self.std = torch.std(x) + self.eps = eps + + def encode(self, x): + x = (x - self.mean) / (self.std + self.eps) + return x + + def decode(self, x, sample_idx=None): + x = (x * (self.std + self.eps)) + self.mean + return x + + def cuda(self): + self.mean = self.mean.cuda() + self.std = self.std.cuda() + + def cpu(self): + self.mean = self.mean.cpu() + self.std = self.std.cpu() + + +# normalization, scaling by range +class RangeNormalizer(object): + def __init__(self, x, low=0.0, high=1.0): + super(RangeNormalizer, self).__init__() + mymin = torch.min(x, 0)[0].view(-1) + mymax = torch.max(x, 0)[0].view(-1) + + self.a = (high - low)/(mymax - mymin) + self.b = -self.a*mymax + high + + def encode(self, x): + s = x.size() + x = x.view(s[0], -1) + x = self.a*x + self.b + x = x.view(s) + return x + + def decode(self, x): + s = x.size() + x = x.view(s[0], -1) + x = (x - self.b)/self.a + x = x.view(s) + return x + +#loss function with rel/abs Lp loss +class LpLoss(object): + def __init__(self, d=2, p=2, size_average=True, reduction=True): + super(LpLoss, self).__init__() + + #Dimension and Lp-norm type are postive + assert d > 0 and p > 0 + + self.d = d + self.p = p + self.reduction = reduction + self.size_average = size_average + + def abs(self, x, y): + num_examples = x.size()[0] + + #Assume uniform mesh + h = 1.0 / (x.size()[1] - 1.0) + + all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) + + if self.reduction: + if self.size_average: + return torch.mean(all_norms) + else: + return torch.sum(all_norms) + + return all_norms + + def rel(self, x, y): + num_examples = x.size()[0] + + diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) + y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) + + if self.reduction: + if self.size_average: + return torch.mean(diff_norms/y_norms) + else: + return torch.sum(diff_norms/y_norms) + + return diff_norms/y_norms + + def __call__(self, x, y): + return self.rel(x, y) + +# A simple feedforward neural network +class DenseNet(torch.nn.Module): + def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): + super(DenseNet, self).__init__() + + self.n_layers = len(layers) - 1 + + assert self.n_layers >= 1 + + self.layers = nn.ModuleList() + + for j in range(self.n_layers): + self.layers.append(nn.Linear(layers[j], layers[j+1])) + + if j != self.n_layers - 1: + if normalize: + self.layers.append(nn.BatchNorm1d(layers[j+1])) + + self.layers.append(nonlinearity()) + + if out_nonlinearity is not None: + self.layers.append(out_nonlinearity()) + + def forward(self, x): + for _, l in enumerate(self.layers): + x = l(x) + + return x \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/ConvLSTM.py b/Exp3_Kuroshio_forecasting/model_baseline/ConvLSTM.py new file mode 100644 index 0000000000000000000000000000000000000000..e28ea210d7cd0516e3df32652c8da4a9a2d5bd65 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/ConvLSTM.py @@ -0,0 +1,198 @@ +import torch.nn as nn +import torch + + +class ConvLSTMCell(nn.Module): + + def __init__(self, input_dim, hidden_dim, kernel_size, bias): + """ + Initialize ConvLSTM cell. + + Parameters + ---------- + input_dim: int + Number of channels of input tensor. + hidden_dim: int + Number of channels of hidden state. + kernel_size: (int, int) + Size of the convolutional kernel. + bias: bool + Whether or not to add the bias. + """ + + super(ConvLSTMCell, self).__init__() + + self.input_dim = input_dim + self.hidden_dim = hidden_dim + + self.kernel_size = kernel_size + self.padding = kernel_size[0] // 2, kernel_size[1] // 2 + self.bias = bias + + self.conv = nn.Conv2d(in_channels=self.input_dim + self.hidden_dim, + out_channels=4 * self.hidden_dim, + kernel_size=self.kernel_size, + padding=self.padding, + bias=self.bias) + + def forward(self, input_tensor, cur_state): + h_cur, c_cur = cur_state + + combined = torch.cat([input_tensor, h_cur], dim=1) # concatenate along channel axis + + combined_conv = self.conv(combined) + cc_i, cc_f, cc_o, cc_g = torch.split(combined_conv, self.hidden_dim, dim=1) + i = torch.sigmoid(cc_i) + f = torch.sigmoid(cc_f) + o = torch.sigmoid(cc_o) + g = torch.tanh(cc_g) + + c_next = f * c_cur + i * g + h_next = o * torch.tanh(c_next) + + return h_next, c_next + + def init_hidden(self, batch_size, image_size): + height, width = image_size + return (torch.zeros(batch_size, self.hidden_dim, height, width, device=self.conv.weight.device), + torch.zeros(batch_size, self.hidden_dim, height, width, device=self.conv.weight.device)) + + +class ConvLSTM(nn.Module): + + """ + + Parameters: + input_dim: Number of channels in input + hidden_dim: Number of hidden channels + kernel_size: Size of kernel in convolutions + num_layers: Number of LSTM layers stacked on each other + batch_first: Whether or not dimension 0 is the batch or not + bias: Bias or no bias in Convolution + return_all_layers: Return the list of computations for all layers + Note: Will do same padding. + + Input: + A tensor of size B, T, C, H, W or T, B, C, H, W + Output: + A tuple of two lists of length num_layers (or length 1 if return_all_layers is False). + 0 - layer_output_list is the list of lists of length T of each output + 1 - last_state_list is the list of last states + each element of the list is a tuple (h, c) for hidden state and memory + Example: + >> x = torch.rand((32, 10, 64, 128, 128)) + >> convlstm = ConvLSTM(64, 16, 3, 1, True, True, False) + >> _, last_states = convlstm(x) + >> h = last_states[0][0] # 0 for layer index, 0 for h index + """ + + def __init__(self, input_dim, hidden_dim, kernel_size, num_layers, + batch_first=False, bias=True, return_all_layers=False): + super(ConvLSTM, self).__init__() + + self._check_kernel_size_consistency(kernel_size) + + # Make sure that both `kernel_size` and `hidden_dim` are lists having len == num_layers + kernel_size = self._extend_for_multilayer(kernel_size, num_layers) + hidden_dim = self._extend_for_multilayer(hidden_dim, num_layers) + if not len(kernel_size) == len(hidden_dim) == num_layers: + raise ValueError('Inconsistent list length.') + + self.input_dim = input_dim + self.hidden_dim = hidden_dim + self.kernel_size = kernel_size + self.num_layers = num_layers + self.batch_first = batch_first + self.bias = bias + self.return_all_layers = return_all_layers + + cell_list = [] + for i in range(0, self.num_layers): + cur_input_dim = self.input_dim if i == 0 else self.hidden_dim[i - 1] + + cell_list.append(ConvLSTMCell(input_dim=cur_input_dim, + hidden_dim=self.hidden_dim[i], + kernel_size=self.kernel_size[i], + bias=self.bias)) + + self.cell_list = nn.ModuleList(cell_list) + + def forward(self, input_tensor, hidden_state=None): + """ + + Parameters + ---------- + input_tensor: todo + 5-D Tensor either of shape (t, b, c, h, w) or (b, t, c, h, w) + hidden_state: todo + None. todo implement stateful + + Returns + ------- + last_state_list, layer_output + """ + if not self.batch_first: + # (t, b, c, h, w) -> (b, t, c, h, w) + input_tensor = input_tensor.permute(1, 0, 2, 3, 4) + + b, _, _, h, w = input_tensor.size() + + # Implement stateful ConvLSTM + if hidden_state is not None: + raise NotImplementedError() + else: + # Since the init is done in forward. Can send image size here + hidden_state = self._init_hidden(batch_size=b, + image_size=(h, w)) + + layer_output_list = [] + last_state_list = [] + + seq_len = input_tensor.size(1) + cur_layer_input = input_tensor + + for layer_idx in range(self.num_layers): + + h, c = hidden_state[layer_idx] + output_inner = [] + for t in range(seq_len): + h, c = self.cell_list[layer_idx](input_tensor=cur_layer_input[:, t, :, :, :], + cur_state=[h, c]) + output_inner.append(h) + + layer_output = torch.stack(output_inner, dim=1) + cur_layer_input = layer_output + + layer_output_list.append(layer_output) + last_state_list.append([h, c]) + + if not self.return_all_layers: + layer_output_list = layer_output_list[-1:] + last_state_list = last_state_list[-1:] + + return layer_output_list[0], last_state_list + + def _init_hidden(self, batch_size, image_size): + init_states = [] + for i in range(self.num_layers): + init_states.append(self.cell_list[i].init_hidden(batch_size, image_size)) + return init_states + + @staticmethod + def _check_kernel_size_consistency(kernel_size): + if not (isinstance(kernel_size, tuple) or + (isinstance(kernel_size, list) and all([isinstance(elem, tuple) for elem in kernel_size]))): + raise ValueError('`kernel_size` must be tuple or list of tuples') + + @staticmethod + def _extend_for_multilayer(param, num_layers): + if not isinstance(param, list): + param = [param] * num_layers + return param + +if __name__ == '__main__': + x = torch.rand((32, 10, 2, 256, 256)) #(batch_size, seq_len, channels, height, width) + convlstm = ConvLSTM(2, 2, (3, 3), 6, True, True, False) + pred, last_states = convlstm(x) + h = last_states[0][0] + print(pred.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/U_net.py b/Exp3_Kuroshio_forecasting/model_baseline/U_net.py new file mode 100644 index 0000000000000000000000000000000000000000..3d17031c407a6e4f9f54a10b30b5bafa3bc7bca0 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/U_net.py @@ -0,0 +1,104 @@ +import torch +import torch.nn as nn + +class DoubleConv(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.double_conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x): + return self.double_conv(x) + +class Down(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.maxpool_conv = nn.Sequential( + nn.MaxPool2d(2), + DoubleConv(in_channels, out_channels) + ) + + def forward(self, x): + return self.maxpool_conv(x) + +class Up(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.up = nn.ConvTranspose2d(in_channels, in_channels // 2, kernel_size=2, stride=2) + self.conv = DoubleConv(in_channels, out_channels) + + def forward(self, x1, x2): + x1 = self.up(x1) + # 调整x2大小以匹配x1 + diffY = x2.size()[2] - x1.size()[2] + diffX = x2.size()[3] - x1.size()[3] + x1 = nn.functional.pad(x1, [diffX // 2, diffX - diffX // 2, + diffY // 2, diffY - diffY // 2]) + x = torch.cat([x2, x1], dim=1) + return self.conv(x) + +class OutConv(nn.Module): + def __init__(self, in_channels, out_channels): + super(OutConv, self).__init__() + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) + + def forward(self, x): + return self.conv(x) + +class UNet(nn.Module): + def __init__(self, n_channels, n_classes): + super(UNet, self).__init__() + self.n_channels = n_channels + self.n_classes = n_classes + + self.inc = DoubleConv(n_channels, 64) + self.down1 = Down(64, 128) + self.down2 = Down(128, 256) + self.down3 = Down(256, 512) + self.up1 = Up(512, 256) + self.up2 = Up(256, 128) + self.up3 = Up(128, 64) + self.outc = OutConv(64, n_classes) + + def forward(self, x): + # 合并B和T维度(如果存在) + is_5d = x.dim() == 5 + if is_5d: + B, T, C, H, W = x.size() + x = x.view(B * T, C, H, W) + else: + B, C, H, W = x.size() + T = 1 # 无时间维度时设为1 + + # 原UNet处理流程 + x1 = self.inc(x) + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x = self.up1(x4, x3) + x = self.up2(x, x2) + x = self.up3(x, x1) + logits = self.outc(x) + + # 恢复原始维度(如果是5维输入) + if is_5d: + logits = logits.view(B, T, self.n_classes, H, W) + + return logits + +# # 测试四维输入 +# model = UNet(n_channels=2, n_classes=2) +# input_4d = torch.randn(1, 2, 128, 128) +# output_4d = model(input_4d) +# print(f"4D Output shape: {output_4d.shape}") # 应为 [1, 2, 128, 128] + +# # 测试五维输入 +# input_5d = torch.randn(2, 10, 2, 128, 128) +# output_5d = model(input_5d) +# print(f"5D Output shape: {output_5d.shape}") # 应为 [2, 3, 2, 128, 128] \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/ConvLSTM.cpython-310.pyc b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/ConvLSTM.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7851ce5842a34fb365d2e17f711bc8d5ebdea70 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/ConvLSTM.cpython-310.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/ConvLSTM.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/ConvLSTM.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a6a11592db6c7d61c15d4adf72c94e8433eada8 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/ConvLSTM.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/U_net.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/U_net.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d4b491f7dcf49c6b506bf6284d0b8cc21a4e0fe4 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/U_net.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/dit.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/dit.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..289194c52494460f4b9acb192d1b784f0499eafd Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/dit.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/kno_2d.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/kno_2d.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4dac9cc2c31742d137a51ac1037f0a24b43b6ff7 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/kno_2d.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/simvp.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/simvp.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55aa2ec71d56d5584010409f29b6f3f2f4ed3f09 Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/simvp.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/utilities3.cpython-38.pyc b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/utilities3.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5666eb5bd9762821a436aef0c685e6471ca7756c Binary files /dev/null and b/Exp3_Kuroshio_forecasting/model_baseline/__pycache__/utilities3.cpython-38.pyc differ diff --git a/Exp3_Kuroshio_forecasting/model_baseline/cno.py b/Exp3_Kuroshio_forecasting/model_baseline/cno.py new file mode 100644 index 0000000000000000000000000000000000000000..2c5570a6eab78f213e7ffe200525f5d2591b4ce9 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/cno.py @@ -0,0 +1,545 @@ +import torch +import torch.nn as nn +from torch.nn import LeakyReLU as LReLu + +class CNOBlock(nn.Module): + def __init__(self, + in_channels, + out_channels, + in_size_h, + in_size_w, + out_size_h, + out_size_w, + cutoff_den = 2.0001, + conv_kernel = 3, + filter_size = 6, + lrelu_upsampling = 2, + half_width_mult = 0.8, + radial = False, + batch_norm = True, + activation = 'cno_lrelu' + ): + super(CNOBlock, self).__init__() + + self.in_channels = in_channels + self.out_channels = out_channels + self.in_size_h = in_size_h + self.in_size_w = in_size_w + self.out_size_h = out_size_h + self.out_size_w = out_size_w + self.conv_kernel = conv_kernel + self.batch_norm_flag = batch_norm + + #---------- Filter properties ----------- + self.critically_sampled = False # We use w_c = s/2.0001 --> NOT critically sampled + + if cutoff_den == 2.0: + self.critically_sampled = True + self.in_cutoff_h = self.in_size_h / cutoff_den + self.in_cutoff_w = self.in_size_w / cutoff_den + self.out_cutoff_h = self.out_size_h / cutoff_den + self.out_cutoff_w = self.out_size_w / cutoff_den + + self.in_halfwidth_h = half_width_mult*self.in_size_h - self.in_size_h / cutoff_den + self.in_halfwidth_w = half_width_mult*self.in_size_w - self.in_size_w / cutoff_den + self.out_halfwidth_h = half_width_mult*self.out_size_h - self.out_size_h / cutoff_den + self.out_halfwidth_w = half_width_mult*self.out_size_w - self.out_size_w / cutoff_den + + + + pad = (self.conv_kernel - 1) // 2 + self.convolution = torch.nn.Conv2d(in_channels=self.in_channels, out_channels=self.out_channels, + kernel_size=self.conv_kernel, + padding=pad) + + if self.batch_norm_flag: + self.batch_norm = nn.BatchNorm2d(self.out_channels) + else: + self.batch_norm = None + self.activation = LReLu() # + + def forward(self, x): + x = self.convolution(x) + if self.batch_norm_flag: + x = self.batch_norm(x) + x = self.activation(x) + return x + +class LiftProjectBlock(nn.Module): + def __init__(self, + in_channels, + out_channels, + in_size_h, + in_size_w, + out_size_h, + out_size_w, + latent_dim = 64, + cutoff_den = 2.0001, + conv_kernel = 3, + filter_size = 6, + lrelu_upsampling = 2, + half_width_mult = 0.8, + radial = False, + batch_norm = True, + activation = 'cno_lrelu' + ): + super(LiftProjectBlock, self).__init__() + + self.inter_CNOBlock = CNOBlock(in_channels=in_channels, + out_channels=latent_dim, + in_size_h=in_size_h, + in_size_w=in_size_w, + out_size_h=out_size_h, + out_size_w=out_size_w, + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation) + + pad = (conv_kernel - 1) // 2 + self.convolution = torch.nn.Conv2d(in_channels=latent_dim, out_channels=out_channels, + kernel_size=conv_kernel, stride=1, + padding=pad) + + self.batch_norm_flag = batch_norm + if self.batch_norm_flag: + self.batch_norm = nn.BatchNorm2d(out_channels) + else: + self.batch_norm = None + + def forward(self, x): + x = self.inter_CNOBlock(x) + + x = self.convolution(x) + if self.batch_norm_flag: + x = self.batch_norm(x) + return x + +class ResidualBlock(nn.Module): + def __init__(self, + channels, + size_h, + size_w, + cutoff_den = 2.0001, + conv_kernel = 3, + filter_size = 6, + lrelu_upsampling = 2, + half_width_mult = 0.8, + radial = False, + batch_norm = True, + activation = 'cno_lrelu' + ): + super(ResidualBlock, self).__init__() + + self.channels = channels + self.size_h = size_h + self.size_w = size_w + self.conv_kernel = conv_kernel + self.batch_norm_flag = batch_norm + + #---------- Filter properties ----------- + self.critically_sampled = False # We use w_c = s/2.0001 --> NOT critically sampled + + if cutoff_den == 2.0: + self.critically_sampled = True + self.cutoff_h = self.size_h / cutoff_den + self.cutoff_w = self.size_w / cutoff_den + self.halfwidth_h = half_width_mult*self.size_h - self.size_h / cutoff_den + self.halfwidth_w = half_width_mult*self.size_w - self.size_w / cutoff_den + + #----------------------------------------- + + pad = (self.conv_kernel - 1) // 2 + self.convolution1 = torch.nn.Conv2d(in_channels=self.channels, out_channels=self.channels, + kernel_size=self.conv_kernel, stride=1, + padding=pad) + self.convolution2 = torch.nn.Conv2d(in_channels=self.channels, out_channels=self.channels, + kernel_size=self.conv_kernel, stride=1, + padding=pad) + + if self.batch_norm_flag: + self.batch_norm1 = nn.BatchNorm2d(self.channels) + self.batch_norm2 = nn.BatchNorm2d(self.channels) + else: + self.batch_norm1 = self.batch_norm2 = None + self.activation = LReLu() + + def forward(self, x): + out = self.convolution1(x) + if self.batch_norm_flag: + out = self.batch_norm1(out) + out = self.activation(out) + out = self.convolution2(out) + if self.batch_norm_flag: + out = self.batch_norm2(out) + + return x + out + +class CNO(nn.Module): + def __init__(self, + in_dim, + in_size_h, + in_size_w, + N_layers, + N_res = 1, + N_res_neck = 6, + channel_multiplier = 32, + conv_kernel=3, + cutoff_den = 2.0001, + filter_size=6, + lrelu_upsampling = 2, + half_width_mult = 0.8, + radial = False, + batch_norm = True, + out_dim = 10, + out_size_h = 1, + out_size_w = 1, + expand_input = False, + latent_lift_proj_dim = 64, + add_inv = True, + activation = 'cno_lrelu' + ): + + super(CNO, self).__init__() + + + self.N_layers = int(N_layers) + + self.lift_dim = channel_multiplier // 2 + self.out_dim = out_dim + + self.add_inv = add_inv + + self.channel_multiplier = channel_multiplier + + if radial == 0: + self.radial = False + else: + self.radial = True + + + self.encoder_features = [self.lift_dim] + for i in range(self.N_layers): + self.encoder_features.append(2 ** i * self.channel_multiplier) + + self.decoder_features_in = self.encoder_features[1:] + self.decoder_features_in.reverse() + self.decoder_features_out = self.encoder_features[:-1] + self.decoder_features_out.reverse() + + for i in range(1, self.N_layers): + self.decoder_features_in[i] = 2 * self.decoder_features_in[i] + + self.inv_features = self.decoder_features_in.copy() + self.inv_features.append(self.encoder_features[0] + self.decoder_features_out[-1]) + + + if not expand_input: + latent_size_h = in_size_h + latent_size_w = in_size_w + else: + down_exponent = 2 ** N_layers + latent_size_h = in_size_h - (in_size_h % down_exponent) + down_exponent + latent_size_w = in_size_w - (in_size_w % down_exponent) + down_exponent + + if out_size_h == 1: + latent_size_out_h = latent_size_h + else: + if not expand_input: + latent_size_out_h = out_size_h + else: + down_exponent = 2 ** N_layers + latent_size_out_h = out_size_h - (out_size_h % down_exponent) + down_exponent + + if out_size_w == 1: + latent_size_out_w = latent_size_w + else: + if not expand_input: + latent_size_out_w = out_size_w + else: + down_exponent = 2 ** N_layers + latent_size_out_w = out_size_w - (out_size_w % down_exponent) + down_exponent + + self.encoder_sizes_h = [] + self.encoder_sizes_w = [] + self.decoder_sizes_h = [] + self.decoder_sizes_w = [] + for i in range(self.N_layers + 1): + self.encoder_sizes_h.append(latent_size_h // (2 ** i)) + self.encoder_sizes_w.append(latent_size_w // (2 ** i)) + self.decoder_sizes_h.append(latent_size_out_h // 2 ** (self.N_layers - i)) + self.decoder_sizes_w.append(latent_size_out_w // 2 ** (self.N_layers - i)) + + + self.lift = LiftProjectBlock(in_channels=in_dim, + out_channels=self.encoder_features[0], + in_size_h=in_size_h, + in_size_w=in_size_w, + out_size_h=self.encoder_sizes_h[0], + out_size_w=self.encoder_sizes_w[0], + latent_dim=latent_lift_proj_dim, + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=False, + activation=activation) + _out_size_h = out_size_h + _out_size_w = out_size_w + if out_size_h == 1: + _out_size_h = in_size_h + if out_size_w == 1: + _out_size_w = in_size_w + + self.project = LiftProjectBlock(in_channels=self.encoder_features[0] + self.decoder_features_out[-1], + out_channels=out_dim, + in_size_h=self.decoder_sizes_h[-1], + in_size_w=self.decoder_sizes_w[-1], + out_size_h=_out_size_h, + out_size_w=_out_size_w, + latent_dim=latent_lift_proj_dim, + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=False, + activation=activation) + + + self.encoder = nn.ModuleList([ + CNOBlock( + in_channels=self.encoder_features[i], + out_channels=self.encoder_features[i + 1], + in_size_h=self.encoder_sizes_h[i], + in_size_w=self.encoder_sizes_w[i], + out_size_h=self.encoder_sizes_h[i + 1], + out_size_w=self.encoder_sizes_w[i + 1], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + for i in range(self.N_layers) + ]) + + + self.ED_expansion = nn.ModuleList([ + CNOBlock( + in_channels=self.encoder_features[i], + out_channels=self.encoder_features[i], + in_size_h=self.encoder_sizes_h[i], + in_size_w=self.encoder_sizes_w[i], + out_size_h=self.decoder_sizes_h[self.N_layers - i], + out_size_w=self.decoder_sizes_w[self.N_layers - i], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + for i in range(self.N_layers + 1) + ]) + + self.decoder = nn.ModuleList([ + CNOBlock( + in_channels=self.decoder_features_in[i], + out_channels=self.decoder_features_out[i], + in_size_h=self.decoder_sizes_h[i], + in_size_w=self.decoder_sizes_w[i], + out_size_h=self.decoder_sizes_h[i + 1], + out_size_w=self.decoder_sizes_w[i + 1], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + for i in range(self.N_layers) + ]) + + self.decoder_inv = nn.ModuleList([ + CNOBlock( + in_channels=self.inv_features[i], + out_channels=self.inv_features[i], + in_size_h=self.decoder_sizes_h[i], + in_size_w=self.decoder_sizes_w[i], + out_size_h=self.decoder_sizes_h[i], + out_size_w=self.decoder_sizes_w[i], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + for i in range(self.N_layers + 1) + ]) + + + self.res_nets = [] + self.N_res = int(N_res) + self.N_res_neck = int(N_res_neck) + + for l in range(self.N_layers): + for i in range(self.N_res): + self.res_nets.append( + ResidualBlock( + channels=self.encoder_features[l], + size_h=self.encoder_sizes_h[l], + size_w=self.encoder_sizes_w[l], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + ) + for i in range(self.N_res_neck): + self.res_nets.append( + ResidualBlock( + channels=self.encoder_features[self.N_layers], + size_h=self.encoder_sizes_h[self.N_layers], + size_w=self.encoder_sizes_w[self.N_layers], + cutoff_den=cutoff_den, + conv_kernel=conv_kernel, + filter_size=filter_size, + lrelu_upsampling=lrelu_upsampling, + half_width_mult=half_width_mult, + radial=radial, + batch_norm=batch_norm, + activation=activation + ) + ) + + self.res_nets = torch.nn.Sequential(*self.res_nets) + + def forward(self, x): + b, t, c, h, w = x.shape + x = x.reshape(b * t, c, h, w) + x = self.lift(x) + skip = [] + + res_nets_idx = 0 + for i in range(self.N_layers): + + y = x + for j in range(self.N_res): + y = self.res_nets[res_nets_idx](y) + res_nets_idx += 1 + skip.append(y) + + x = self.encoder[i](x) + + #---------------------------------------------------------------------- + + for j in range(self.N_res_neck): + x = self.res_nets[res_nets_idx](x) + res_nets_idx += 1 + + for i in range(self.N_layers): + + if i == 0: + x = self.ED_expansion[self.N_layers - i](x) + else: + x = torch.cat((x, self.ED_expansion[self.N_layers - i](skip[-i])), 1) + + if self.add_inv: + x = self.decoder_inv[i](x) + x = self.decoder[i](x) + + x = torch.cat((x, self.ED_expansion[0](skip[0])), 1) + x = self.project(x) + x = x.reshape(b, t, -1, x.shape[-2], x.shape[-1]) + + del skip + del y + + return x + + def get_n_params(self): + pp = 0 + + for p in list(self.parameters()): + nn = 1 + for s in list(p.size()): + nn = nn * s + pp += nn + return pp + + def print_size(self): + nparams = 0 + nbytes = 0 + + for param in self.parameters(): + nparams += param.numel() + nbytes += param.data.element_size() * param.numel() + + print(f'{nparams} (~{nbytes / 1e6:.2f} MB)') + + return nparams + +if __name__ == '__main__': + + in_dim = 2 + in_size_h = 256 + in_size_w = 256 + N_layers = 4 + + model = CNO( + in_dim=in_dim, + in_size_h=in_size_h, + in_size_w=in_size_w, + N_layers=N_layers, + N_res=1, + N_res_neck=6, + channel_multiplier=32, + conv_kernel=3, + cutoff_den=2.0001, + filter_size=6, + lrelu_upsampling=2, + half_width_mult=0.8, + radial=False, + batch_norm=True, + out_dim=2, + out_size_h=1, + out_size_w=1, + expand_input=False, + latent_lift_proj_dim=64, + add_inv=True, + activation='cno_lrelu' + ) + + batch_size = 1 + time_steps = 10 + channels = in_dim + height = in_size_h + width = in_size_w + + x = torch.randn(batch_size, time_steps, channels, height, width) + + output = model(x) + + print(f"Output shape: {output.shape}") \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/dit.py b/Exp3_Kuroshio_forecasting/model_baseline/dit.py new file mode 100644 index 0000000000000000000000000000000000000000..d3858601b5cc840684dc23884086e5598e98ea0b --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/dit.py @@ -0,0 +1,463 @@ +import torch +import torch.nn as nn +import numpy as np +import math +from functools import partial +from timm.models.vision_transformer import PatchEmbed, Attention, Mlp +import math + +def modulate(x, shift, scale): + return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1) + +################################################################################# +# Embedding Layers for Timesteps and Class Labels # +################################################################################# + +class TimestepEmbedder(nn.Module): + """ + Embeds scalar timesteps into vector representations. + """ + def __init__(self, hidden_size, frequency_embedding_size=256): + super().__init__() + self.mlp = nn.Sequential( + nn.Linear(frequency_embedding_size, hidden_size, bias=True), + nn.SiLU(), + nn.Linear(hidden_size, hidden_size, bias=True), + ) + self.frequency_embedding_size = frequency_embedding_size + + @staticmethod + def timestep_embedding(t, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + """ + half = dim // 2 + freqs = torch.exp( + -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half + ).to(device=t.device) + args = t[:, None].float() * freqs[None] + embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1) + if dim % 2: + embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1) + return embedding + + def forward(self, t): + t = t.to(next(self.parameters()).device) + t_freq = self.timestep_embedding(t, self.frequency_embedding_size) + t_freq = t_freq.to(next(self.parameters()).device) + t_emb = self.mlp(t_freq) + t_emb = t_emb.to(next(self.parameters()).device) + return t_emb + +class LabelEmbedder(nn.Module): + """ + Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance. + """ + def __init__(self, num_classes, hidden_size, dropout_prob): + super().__init__() + use_cfg_embedding = dropout_prob > 0 + self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size) + self.num_classes = num_classes + self.dropout_prob = dropout_prob + + def token_drop(self, labels, force_drop_ids=None): + """ + Drops labels to enable classifier-free guidance. + """ + if force_drop_ids is None: + drop_ids = torch.rand(labels.shape[0], device=labels.device) < self.dropout_prob + else: + drop_ids = force_drop_ids == 1 + labels = torch.where(drop_ids, self.num_classes, labels) + return labels + + def forward(self, labels, train, force_drop_ids=None): + use_dropout = self.dropout_prob > 0 + if (train and use_dropout) or (force_drop_ids is not None): + labels = self.token_drop(labels, force_drop_ids) + embeddings = self.embedding_table(labels) + return embeddings + +################################################################################# +# Core DiT Model # +################################################################################# + +class DiTBlock(nn.Module): + """ + A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning. + """ + def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs): + super().__init__() + self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.attn = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, **block_kwargs) + self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + mlp_hidden_dim = int(hidden_size * mlp_ratio) + approx_gelu = lambda: nn.GELU() + self.mlp = Mlp(in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=approx_gelu, drop=0) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 6 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(c).chunk(6, dim=1) + x = x + gate_msa.unsqueeze(1) * self.attn(modulate(self.norm1(x), shift_msa, scale_msa)) + x = x + gate_mlp.unsqueeze(1) * self.mlp(modulate(self.norm2(x), shift_mlp, scale_mlp)) + return x + +class FinalLayer(nn.Module): + """ + The final layer of DiT. + """ + def __init__(self, hidden_size, patch_size, out_channels): + super().__init__() + self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6) + self.linear = nn.Linear(hidden_size, patch_size[0] * patch_size[1] * out_channels, bias=True) + self.adaLN_modulation = nn.Sequential( + nn.SiLU(), + nn.Linear(hidden_size, 2 * hidden_size, bias=True) + ) + + def forward(self, x, c): + shift, scale = self.adaLN_modulation(c).chunk(2, dim=1) + x = modulate(self.norm_final(x), shift, scale) + x = self.linear(x) + return x + +class DiT(nn.Module): + """ + Diffusion model with a Transformer backbone. + """ + def __init__( + self, + input_size=(32, 32), + patch_size=(2, 2), + in_channels=4, + hidden_size=1152, + depth=28, + num_heads=16, + mlp_ratio=4.0, + class_dropout_prob=0.1, + num_classes=None, + learn_sigma=True, + ): + super().__init__() + self.learn_sigma = learn_sigma + self.in_channels = in_channels + self.out_channels = in_channels * 2 if learn_sigma else in_channels + self.patch_size = patch_size + self.num_heads = num_heads + self.hidden_size = hidden_size + + self.x_embedder = PatchEmbed( + img_size=input_size, patch_size=patch_size, in_chans=in_channels, embed_dim=hidden_size, bias=True + ) + self.t_embedder = TimestepEmbedder(hidden_size) + num_patches = self.x_embedder.num_patches + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, hidden_size), requires_grad=False) + + self.blocks = nn.ModuleList([ + DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio) for _ in range(depth) + ]) + self.final_layer = FinalLayer(hidden_size, patch_size, self.out_channels) + self.initialize_weights() + + if num_classes is not None: + self.y_embedder = LabelEmbedder(num_classes, hidden_size, class_dropout_prob) + else: + self.y_embedder = None + + def initialize_weights(self): + def _basic_init(module): + if isinstance(module, nn.Linear): + torch.nn.init.xavier_uniform_(module.weight) + if module.bias is not None: + nn.init.constant_(module.bias, 0) + self.apply(_basic_init) + + grid_size_h, grid_size_w = self.x_embedder.grid_size + pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], (grid_size_h, grid_size_w)) + self.pos_embed.data.copy_(torch.from_numpy(pos_embed).float().unsqueeze(0)) + + w = self.x_embedder.proj.weight.data + nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + nn.init.constant_(self.x_embedder.proj.bias, 0) + + nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02) + nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02) + + for block in self.blocks: + nn.init.constant_(block.adaLN_modulation[-1].weight, 0) + nn.init.constant_(block.adaLN_modulation[-1].bias, 0) + + nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0) + nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0) + nn.init.constant_(self.final_layer.linear.weight, 0) + nn.init.constant_(self.final_layer.linear.bias, 0) + + def unpatchify(self, x): + """ + x: (N, T, patch_size[0]*patch_size[1]*C) + imgs: (N, H, W, C) + """ + c = self.out_channels + p_h, p_w = self.x_embedder.patch_size # 元组 + h_patches, w_patches = self.x_embedder.grid_size + assert h_patches * w_patches == x.shape[1], "Mismatch in number of patches" + + x = x.reshape(shape=(x.shape[0], h_patches, w_patches, p_h, p_w, c)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], c, h_patches * p_h, w_patches * p_w)) + return imgs + + def forward(self, x, t, y=None): + """ + Forward pass of DiT. + x: (N, C, H, W) tensor of spatial inputs + t: (N,) tensor of diffusion timesteps + y: (N,) tensor of class labels or None + """ + x = self.x_embedder(x) + self.pos_embed # (N, T, D),其中 T = H * W / (patch_size[0] * patch_size[1]) + t = self.t_embedder(t) # (N, D) + if self.y_embedder is not None and y is not None: + y = self.y_embedder(y, self.training) # (N, D) + c = t + y # (N, D) + else: + c = t # (N, D) + for block in self.blocks: + x = block(x, c) # (N, T, D) + x = self.final_layer(x, c) # (N, T, patch_size[0] * patch_size[1] * out_channels) + x = self.unpatchify(x) # (N, out_channels, H, W) + return x + + def forward_with_cfg(self, x, t, y, cfg_scale): + """ + Forward pass of DiT with classifier-free guidance. + """ + half = x[: len(x) // 2] + combined = torch.cat([half, half], dim=0) + model_out = self.forward(combined, t, y) + eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:] + cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0) + half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps) + eps = torch.cat([half_eps, half_eps], dim=0) + return torch.cat([eps, rest], dim=1) + +################################################################################# +# Sine/Cosine Positional Embedding Functions # +################################################################################# + +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0): + """ + grid_size: (grid_size_h, grid_size_w) + return: + pos_embed: [grid_size_h*grid_size_w, embed_dim] 或 [1+grid_size_h*grid_size_w, embed_dim] + """ + grid_h = np.arange(grid_size[0], dtype=np.float32) + grid_w = np.arange(grid_size[1], dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # 这里 w 先行 + grid = np.stack(grid, axis=0) + + grid = grid.reshape([2, grid_size[0] * grid_size[1]]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token and extra_tokens > 0: + pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0) + return pos_embed + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # 使用一半的维度来编码 grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: 每个位置的输出维度 + pos: 要编码的位置列表:大小 (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float64) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2) + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + +################################################################################# +# Other Components # +################################################################################# + +def stride_generator(N, reverse=False): + strides = [1, 2]*10 + if reverse: return list(reversed(strides[:N])) + else: return strides[:N] + +class ConvSC(nn.Module): + def __init__(self, in_channels, out_channels, stride=1, transpose=False): + super(ConvSC, self).__init__() + if transpose: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=3, stride=stride, + padding=1, output_padding=stride-1) + else: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1) + self.norm = nn.BatchNorm2d(out_channels) + self.act = nn.GELU() + + def forward(self, x): + return self.act(self.norm(self.conv(x))) + +class Inception(nn.Module): + def __init__(self, in_channels, hidden_channels, out_channels, incep_ker=[3,5,7,11], groups=4): + super(Inception, self).__init__() + self.branch1 = nn.Conv2d(in_channels, hidden_channels, kernel_size=1) + self.branch2 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[0], padding=incep_ker[0]//2, groups=groups) + self.branch3 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[1], padding=incep_ker[1]//2, groups=groups) + self.branch4 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[2], padding=incep_ker[2]//2, groups=groups) + self.branch5 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[3], padding=incep_ker[3]//2, groups=groups) + self.conv = nn.Conv2d(hidden_channels * 5, out_channels, kernel_size=1) + self.norm = nn.BatchNorm2d(out_channels) + self.act = nn.GELU() + + def forward(self, x): + x1 = self.branch1(x) + x2 = self.branch2(x) + x3 = self.branch3(x) + x4 = self.branch4(x) + x5 = self.branch5(x) + x = torch.cat([x1, x2, x3, x4, x5], dim=1) + x = self.conv(x) + x = self.act(self.norm(x)) + return x + +class Encoder(nn.Module): + def __init__(self, C_in, C_hid, N_S): + super(Encoder, self).__init__() + strides = stride_generator(N_S) + layers = [ConvSC(C_in, C_hid, stride=strides[0])] + for s in strides[1:]: + layers.append(ConvSC(C_hid, C_hid, stride=s)) + self.enc = nn.Sequential(*layers) + + def forward(self, x): + skips = [] + for layer in self.enc: + x = layer(x) + skips.append(x) + return x, skips # 返回所有的 skips + +class Decoder(nn.Module): + def __init__(self, C_hid, C_out, N_S): + super(Decoder, self).__init__() + strides = stride_generator(N_S, reverse=True) + layers = [] + for s in strides[:-1]: + layers.append(ConvSC(C_hid, C_hid, stride=s, transpose=True)) + layers.append(ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True)) + self.dec = nn.Sequential(*layers) + self.readout = nn.Conv2d(C_hid, C_out, 1) + + def forward(self, hid, skip): + for i in range(len(self.dec)-1): + hid = self.dec[i](hid) + hid = self.dec[-1](torch.cat([hid, skip], dim=1)) + return self.readout(hid) + +# class Temporal_evo(nn.Module): +# def __init__(self, channel_in, channel_hid, N_T, h, w, incep_ker=[3, 5, 7, 11], groups=8): +# super(Temporal_evo, self).__init__() + +# self.N_T = N_T +# enc_layers = [Inception(channel_in, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] +# for _ in range(1, N_T - 1): +# enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) +# enc_layers.append(Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) + +# dec_layers = [Inception(channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)] +# for _ in range(1, N_T - 1): +# dec_layers.append(Inception(2 * channel_hid, channel_hid // 2, channel_hid, incep_ker=incep_ker, groups=groups)) +# dec_layers.append(Inception(2 * channel_hid, channel_hid // 2, channel_in, incep_ker=incep_ker, groups=groups)) +# norm_layer = partial(nn.LayerNorm, eps=1e-6) +# self.norm = norm_layer(channel_hid) + +# self.enc = nn.Sequential(*enc_layers) +# self.dec = nn.Sequential(*dec_layers) + +# def forward(self, x): +# B, T, C, H, W = x.shape +# x = x.reshape(B, T * C, H, W) + +# # Downsampling +# skips = [] +# for i in range(self.N_T): +# x = self.enc[i](x) +# if i < self.N_T - 1: +# skips.append(x) + +# # Upsampling +# x = self.dec[0](x) +# for i in range(1, self.N_T): +# x = self.dec[i](torch.cat([x, skips[-i]], dim=1)) + +# x = x.reshape(B, T, C, H, W) +# return x + +class nmo_dit(nn.Module): + def __init__(self, shape_in, hid_S=32, hid_T=64, N_S=4, N_T=8, time_step=1000, incep_ker=[3,5,7,11], groups=4, + in_time_seq_length=10, out_time_seq_length=10): + super(nmo_dit, self).__init__() + B, T, C, H, W = shape_in + + strides = stride_generator(N_S) + num_stride2_layers = strides[:N_S].count(2) + self.downsample_factor = 2 ** num_stride2_layers + self.H1 = H // self.downsample_factor + self.W1 = W // self.downsample_factor + + self.in_time_seq_length = in_time_seq_length + self.out_time_seq_length = out_time_seq_length + self.enc = Encoder(C, hid_S, N_S) + # self.hid = Temporal_evo(T*hid_S, hid_T, N_T, self.H1, self.W1, incep_ker, groups) + self.dit_block = DiT( + input_size=(self.H1, self.W1), + patch_size=(1, 1), # Changed patch_size to (1, 1) + in_channels=T*hid_S, + hidden_size=256, + depth=12, + num_heads=2, + mlp_ratio=4.0, + class_dropout_prob=0.0, + num_classes=None, + learn_sigma=False, + ) + + self.dec = Decoder(hid_S, C, N_S) + self.time_step = torch.randint(0, time_step, (B,)) + + def forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + embed, skips = self.enc(x) + skip = skips[0] + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + bias = z.reshape(B, T*C_, H_, W_) + bias_hid = self.dit_block(bias, self.time_step) + + hid = bias_hid.reshape(B*T, C_, H_, W_) # Now the dimensions should match + Y = self.dec(hid, skip) + + Y = Y.reshape(B, T, -1, H, W) + return Y \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/kno_2d.py b/Exp3_Kuroshio_forecasting/model_baseline/kno_2d.py new file mode 100644 index 0000000000000000000000000000000000000000..0e8ef7ab3f44a15754cf01b71f53feb938a29944 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/kno_2d.py @@ -0,0 +1,162 @@ +# KNO model +import torch +import numpy as np +import torch.nn as nn +import torch.nn.functional as F + +torch.manual_seed(0) + +# The structure of Auto-Encoder +class encoder_mlp(nn.Module): + def __init__(self, t_len, op_size): + super(encoder_mlp, self).__init__() + self.layer = nn.Linear(t_len, op_size) + def forward(self, x): + x = self.layer(x) + return x + +class decoder_mlp(nn.Module): + def __init__(self, t_len, op_size): + super(decoder_mlp, self).__init__() + self.layer = nn.Linear(op_size, t_len) + def forward(self, x): + x = self.layer(x) + return x + +class encoder_conv1d(nn.Module): + def __init__(self, t_len, op_size): + super(encoder_conv1d, self).__init__() + self.layer = nn.Conv1d(t_len, op_size,1) + def forward(self, x): + x = x.permute([0,2,1]) + x = self.layer(x) + x = x.permute([0,2,1]) + return x + +class decoder_conv1d(nn.Module): + def __init__(self, t_len, op_size): + super(decoder_conv1d, self).__init__() + self.layer = nn.Conv1d(op_size, t_len,1) + def forward(self, x): + x = x.permute([0,2,1]) + x = self.layer(x) + x = x.permute([0,2,1]) + return x + +class encoder_conv2d(nn.Module): + def __init__(self, t_len, op_size): + super(encoder_conv2d, self).__init__() + self.layer = nn.Conv2d(t_len, op_size,1) + def forward(self, x): + x = x.permute([0,3,1,2]) + x = self.layer(x) + x = x.permute([0,2,3,1]) + return x + +class decoder_conv2d(nn.Module): + def __init__(self, t_len, op_size): + super(decoder_conv2d, self).__init__() + self.layer = nn.Conv2d(op_size, t_len,1) + def forward(self, x): + x = x.permute([0,3,1,2]) + x = self.layer(x) + x = x.permute([0,2,3,1]) + return x + + +class Koopman_Operator2D(nn.Module): + def __init__(self, op_size, modes_x, modes_y): + super(Koopman_Operator2D, self).__init__() + self.op_size = op_size + self.scale = (1 / (op_size * op_size)) + self.modes_x = modes_x + self.modes_y = modes_y + self.koopman_matrix = nn.Parameter(self.scale * torch.rand(op_size, op_size, self.modes_x, self.modes_y, dtype=torch.cfloat)) + + # Complex multiplication + def time_marching(self, input, weights): + return torch.einsum("btxy,tfxy->bfxy", input, weights) + + def forward(self, x): + batchsize = x.shape[0] + x_ft = torch.fft.rfft2(x) + out_ft = torch.zeros(x_ft.shape, dtype=torch.cfloat, device = x.device) + out_ft[:, :, :self.modes_x, :self.modes_y] = self.time_marching(x_ft[:, :, :self.modes_x, :self.modes_y], self.koopman_matrix) + out_ft[:, :, -self.modes_x:, :self.modes_y] = self.time_marching(x_ft[:, :, -self.modes_x:, :self.modes_y], self.koopman_matrix) + x = torch.fft.irfft2(out_ft, s=(x.size(-2), x.size(-1))) + return x + +class KNO2d(nn.Module): + def __init__(self, encoder, decoder, op_size, modes_x=10, modes_y=10, decompose=6, linear_type=True, normalization=False, x_coeff=0.1, skip_coeff=1): + super(KNO2d, self).__init__() + self.op_size = op_size + self.decompose = decompose + self.modes_x = modes_x + self.modes_y = modes_y + self.x_coeff = x_coeff + self.skip_coeff = skip_coeff + + self.enc = encoder + self.dec = decoder + self.koopman_layer = Koopman_Operator2D(self.op_size, self.modes_x, self.modes_y) + self.w0 = nn.Conv2d(op_size, op_size, 1) + self.linear_type = linear_type + self.normalization = normalization + if self.normalization: + self.norm_layer = torch.nn.BatchNorm2d(op_size) + + def forward_(self, x): + x_reconstruct = self.enc(x) + x_reconstruct = torch.tanh(x_reconstruct) + x_reconstruct = self.dec(x_reconstruct) + + x = self.enc(x) + x = torch.tanh(x) + x = x.permute(0, 3, 1, 2) + x_w = x + + for i in range(self.decompose): + x1 = self.koopman_layer(x) + if self.linear_type: + x = x + x1 + else: + x = torch.tanh(x + x1) + + if self.normalization: + x = torch.tanh(self.norm_layer(self.w0(x_w)) + x) + else: + x = torch.tanh(self.w0(x_w) + x) + + x = x.permute(0, 2, 3, 1) + x = self.x_coeff * x + x = self.dec(x) + return x, x_reconstruct + + def forward(self, x): + B, T, C, H, W = x.shape + + x = x.permute(0, 2, 3, 4, 1) # (B, C, H, W, T) + x = x.reshape(B*C, H, W, T) + + output, rec = self.forward_(x) + + output = output.view(B, C, H, W, T).permute(0, 4, 1, 2, 3) # (B, T, C, H, W) + rec = rec.view(B, C, H, W, T).permute(0, 4, 1, 2, 3) + + return output, rec + +if __name__ == "__main__": + # hyper parameters + t_len = 10 + o = 16 + f_x = 16 + f_y = 16 + r = 8 + encoder = encoder_mlp(t_len, op_size=o) + decoder = decoder_mlp(t_len, op_size=o) + model = KNO2d(encoder, decoder, op_size=o, modes_x=f_x, modes_y=f_y, decompose=r) + + # Test with new input shape (B, T, C, H, W) + inputs = torch.rand(1, 10, 2, 256, 256) # (B, T, C, H, W) + output, rec = model(inputs) + print(output.shape, rec.shape) # Should be (torch.Size([1, 10, 2, 256, 256]), torch.Size([1, 10, 2, 256, 256])) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/lsm.py b/Exp3_Kuroshio_forecasting/model_baseline/lsm.py new file mode 100644 index 0000000000000000000000000000000000000000..d1c4ce244078e8fdcdcd7cbe27f041dcc9abb9c6 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/lsm.py @@ -0,0 +1,277 @@ +import torch +import torch.nn.functional as F +import torch.nn as nn +import numpy as np +import math +import argparse + + + +class DoubleConv(nn.Module): + """(convolution => [BN] => ReLU) * 2""" + + def __init__(self, in_channels, out_channels, mid_channels=None): + super().__init__() + if not mid_channels: + mid_channels = out_channels + self.double_conv = nn.Sequential( + nn.Conv2d(in_channels, mid_channels, kernel_size=3, padding=1, bias=False), + nn.BatchNorm2d(mid_channels), + nn.ReLU(inplace=True), + nn.Conv2d(mid_channels, out_channels, kernel_size=3, padding=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x): + return self.double_conv(x) + +class Down(nn.Module): + """Downscaling with maxpool then double conv""" + + def __init__(self, in_channels, out_channels): + super().__init__() + self.maxpool_conv = nn.Sequential( + nn.MaxPool2d(2), + DoubleConv(in_channels, out_channels) + ) + + def forward(self, x): + return self.maxpool_conv(x) + +class Up(nn.Module): + """Upscaling then double conv""" + + def __init__(self, in_channels, out_channels, bilinear=True): + super().__init__() + + if bilinear: + self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True) + self.conv = DoubleConv(in_channels, out_channels, in_channels // 2) + else: + self.up = nn.ConvTranspose2d(in_channels, in_channels // 2, kernel_size=2, stride=2) + self.conv = DoubleConv(in_channels, out_channels) + + def forward(self, x1, x2): + x1 = self.up(x1) + diffY = x2.size()[2] - x1.size()[2] + diffX = x2.size()[3] - x1.size()[3] + + x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2, + diffY // 2, diffY - diffY // 2]) + x = torch.cat([x2, x1], dim=1) + return self.conv(x) + +class OutConv(nn.Module): + + def __init__(self, in_channels, out_channels): + super(OutConv, self).__init__() + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) + + def forward(self, x): + return self.conv(x) + +################################################################ +# Patchify 和 Neural Spectral Block +################################################################ +class NeuralSpectralBlock2d(nn.Module): + def __init__(self, width, num_basis, patch_size=[3, 3], num_token=4): + super(NeuralSpectralBlock2d, self).__init__() + self.patch_size = patch_size + self.width = width + self.num_basis = num_basis + + self.modes_list = (1.0 / float(num_basis)) * torch.tensor([i for i in range(num_basis)], + dtype=torch.float) + self.register_buffer('modes_list_buffer', self.modes_list) + self.weights = nn.Parameter( + (1 / (width)) * torch.rand(width, self.num_basis * 2, dtype=torch.float)) + + self.head = 8 + self.num_token = num_token + self.latent = nn.Parameter( + (1 / (width)) * torch.rand(self.head, self.num_token, width // self.head, dtype=torch.float)) + self.encoder_attn = nn.Conv2d(self.width, self.width * 2, kernel_size=1, stride=1) + self.decoder_attn = nn.Conv2d(self.width, self.width, kernel_size=1, stride=1) + self.softmax = nn.Softmax(dim=-1) + + def self_attn(self, q, k, v): + # q,k,v: B H L C/H + attn = self.softmax(torch.einsum("bhlc,bhsc->bhls", q, k)) + return torch.einsum("bhls,bhsc->bhlc", attn, v) + + def latent_encoder_attn(self, x): + # x: B C H W + B, C, H, W = x.shape + L = H * W + latent_token = self.latent[None, :, :, :].repeat(B, 1, 1, 1) + x_tmp = self.encoder_attn(x).view(B, C * 2, -1).permute(0, 2, 1).contiguous() \ + .view(B, L, self.head, C // self.head, 2).permute(4, 0, 2, 1, 3).contiguous() + latent_token = self.self_attn(latent_token, x_tmp[0], x_tmp[1]) + latent_token + latent_token = latent_token.permute(0, 1, 3, 2).contiguous().view(B, C, self.num_token) + return latent_token + + def latent_decoder_attn(self, x, latent_token): + # x: B C H W + x_init = x + B, C, H, W = x.shape + L = H * W + latent_token = latent_token.view(B, self.head, C // self.head, self.num_token).permute(0, 1, 3, 2).contiguous() + x_tmp = self.decoder_attn(x).view(B, C, -1).permute(0, 2, 1).contiguous() \ + .view(B, L, self.head, C // self.head).permute(0, 2, 1, 3).contiguous() + x = self.self_attn(x_tmp, latent_token, latent_token) + x = x.permute(0, 1, 3, 2).contiguous().view(B, C, H, W) + x_init # B H L C/H + return x + + def get_basis(self, x): + # x: B C N + x_sin = torch.sin(self.modes_list_buffer[None, None, None, :] * x[:, :, :, None] * math.pi) + x_cos = torch.cos(self.modes_list_buffer[None, None, None, :] * x[:, :, :, None] * math.pi) + return torch.cat([x_sin, x_cos], dim=-1) + + def compl_mul2d(self, input, weights): + return torch.einsum("bilm,im->bil", input, weights) + + def forward(self, x): + B, C, H, W = x.shape + + if H % self.patch_size[0] != 0 or W % self.patch_size[1] != 0: + raise ValueError(f"Input height and width must be divisible by patch_size. Got input size ({H}, {W}) and patch_size {self.patch_size}.") + + # patchify + x = x.view(x.shape[0], x.shape[1], + x.shape[2] // self.patch_size[0], self.patch_size[0], + x.shape[3] // self.patch_size[1], self.patch_size[1]).contiguous() \ + .permute(0, 2, 4, 1, 3, 5).contiguous() \ + .view(x.shape[0] * (x.shape[2] // self.patch_size[0]) * (x.shape[3] // self.patch_size[1]), x.shape[1], + self.patch_size[0], + self.patch_size[1]) + # Neural Spectral + # (1) encoder + latent_token = self.latent_encoder_attn(x) + # (2) transition + latent_token_modes = self.get_basis(latent_token) + latent_token = self.compl_mul2d(latent_token_modes, self.weights) + latent_token + # (3) decoder + x = self.latent_decoder_attn(x, latent_token) + # de-patchify + x = x.view(B, (H // self.patch_size[0]), (W // self.patch_size[1]), C, self.patch_size[0], + self.patch_size[1]).permute(0, 3, 1, 4, 2, 5).contiguous() \ + .view(B, C, H, W).contiguous() + return x + + +class LSM(nn.Module): + def __init__(self, in_dim, out_dim, d_model, num_token, num_basis, patch_size, padding, bilinear=True): + super(LSM, self).__init__() + in_channels = in_dim + out_channels = out_dim + width = d_model + num_token = num_token + num_basis = num_basis + patch_size = [int(x) for x in patch_size.split(',')] + padding = [int(x) for x in padding.split(',')] + # 多尺度模块 + self.inc = DoubleConv(width, width) + self.down1 = Down(width, width * 2) + self.down2 = Down(width * 2, width * 4) + self.down3 = Down(width * 4, width * 8) + factor = 2 if bilinear else 1 + self.down4 = Down(width * 8, width * 16 // factor) + self.up1 = Up(width * 16, width * 8 // factor, bilinear) + self.up2 = Up(width * 8, width * 4 // factor, bilinear) + self.up3 = Up(width * 4, width * 2 // factor, bilinear) + self.up4 = Up(width * 2, width, bilinear) + self.outc = OutConv(width, width) + # Patchified Neural Spectral Blocks + self.process1 = NeuralSpectralBlock2d(width, num_basis, patch_size, num_token) + self.process2 = NeuralSpectralBlock2d(width * 2, num_basis, patch_size, num_token) + self.process3 = NeuralSpectralBlock2d(width * 4, num_basis, patch_size, num_token) + self.process4 = NeuralSpectralBlock2d(width * 8, num_basis, patch_size, num_token) + self.process5 = NeuralSpectralBlock2d(width * 16 // factor, num_basis, patch_size, num_token) + # 投影层 + self.padding = padding + self.fc0 = nn.Linear(in_channels + 2, width) + self.fc1 = nn.Linear(width, 128) + self.fc2 = nn.Linear(128, out_channels) + + def forward(self, x): + # x的输入形状:(B, T, C_in, H, W),其中T=1 + x = x.squeeze(1) # 去除时间维度,x的形状:(B, C_in, H, W) + x = x.permute(0, 2, 3, 1) # 转换为 (B, H, W, C_in) + + grid = self.get_grid(x.shape, x.device) + x = torch.cat((x, grid), dim=-1) + x = self.fc0(x) + x = x.permute(0, 3, 1, 2) # 转换为 (B, C, H, W) + + if not all(item == 0 for item in self.padding): + x = F.pad(x, [0, self.padding[0], 0, self.padding[1]]) + + x1 = self.inc(x) + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x = self.up1(self.process5(x5), self.process4(x4)) + x = self.up2(x, self.process3(x3)) + x = self.up3(x, self.process2(x2)) + x = self.up4(x, self.process1(x1)) + x = self.outc(x) + + if not all(item == 0 for item in self.padding): + x = x[..., :-self.padding[1], :-self.padding[0]] + + x = x.permute(0, 2, 3, 1) # 转换回 (B, H, W, C) + x = self.fc1(x) + x = F.gelu(x) + x = self.fc2(x) + + # 输出调整 + x = x.permute(0, 3, 1, 2) # 转换为 (B, C_out, H, W) + x = x.unsqueeze(1) # 添加时间维度,x的形状:(B, T, C_out, H, W) + + return x + + def get_grid(self, shape, device): + batchsize, size_x, size_y, _ = shape + gridx = torch.linspace(0, 1, size_x, device=device).reshape(1, size_x, 1, 1).repeat(batchsize, 1, size_y, 1) + gridy = torch.linspace(0, 1, size_y, device=device).reshape(1, 1, size_y, 1).repeat(batchsize, size_x, 1, 1) + return torch.cat((gridx, gridy), dim=-1) + +################################################################ +# 实例化和测试模型 +################################################################ +if __name__ == "__main__": + import argparse + + # 定义args对象,包含模型初始化所需的参数 + in_dim = 1 # 输入维度,根据您的数据调整 + out_dim = 1 # 输出维度,根据您的任务调整 + d_model = 64 # 模型宽度,可根据需求调整 + num_token = 4 # Token数量,可根据需求调整 + num_basis = 16 # 基函数数量,可根据需求调整 + patch_size = '4,4' # Patch大小,确保能整除下采样后的尺寸 + padding = '0,0' # Padding大小,格式为字符串,例如'0,0' + + # 实例化模型 + model = LSM(in_dim=in_dim, out_dim=out_dim, d_model=d_model, num_token=num_token, num_basis=num_basis, patch_size=patch_size, padding=padding) + + # 创建一个示例输入数据,假设输入大小为(batch_size, T, in_channels, height, width) + batch_size = 1 + T = 1 + in_channels = in_dim + height = 128 # 输入高度 + width = 128 # 输入宽度 + x = torch.randn(batch_size, T, in_channels, height, width) + + # 将输入数据移动到模型所在线程的设备(CPU或GPU) + device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + model.to(device) + x = x.to(device) + + # 进行一次前向传播 + output = model(x) + + # 输出结果的形状 + print("输出形状:", output.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/model_test.ipynb b/Exp3_Kuroshio_forecasting/model_baseline/model_test.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..20616f8f39825d8dc49fe0785a5b7982570a22fa --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/model_test.ipynb @@ -0,0 +1,499 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "id": "d4dd04c8-af04-486d-b648-b6f4b23a3599", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "import torch.nn as nn\n", + "import numpy as np\n", + "import math\n", + "from functools import partial\n", + "from timm.models.vision_transformer import PatchEmbed, Attention, Mlp\n", + "import math\n", + "\n", + "def modulate(x, shift, scale):\n", + " return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1)\n", + "\n", + "#################################################################################\n", + "# Embedding Layers for Timesteps and Class Labels #\n", + "#################################################################################\n", + "\n", + "class TimestepEmbedder(nn.Module):\n", + " \"\"\"\n", + " Embeds scalar timesteps into vector representations.\n", + " \"\"\"\n", + " def __init__(self, hidden_size, frequency_embedding_size=256):\n", + " super().__init__()\n", + " self.mlp = nn.Sequential(\n", + " nn.Linear(frequency_embedding_size, hidden_size, bias=True),\n", + " nn.SiLU(),\n", + " nn.Linear(hidden_size, hidden_size, bias=True),\n", + " )\n", + " self.frequency_embedding_size = frequency_embedding_size\n", + "\n", + " @staticmethod\n", + " def timestep_embedding(t, dim, max_period=10000):\n", + " \"\"\"\n", + " Create sinusoidal timestep embeddings.\n", + " \"\"\"\n", + " half = dim // 2\n", + " freqs = torch.exp(\n", + " -math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half\n", + " ).to(device=t.device)\n", + " args = t[:, None].float() * freqs[None]\n", + " embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)\n", + " if dim % 2:\n", + " embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1)\n", + " return embedding\n", + "\n", + " def forward(self, t):\n", + " t = t.to(next(self.parameters()).device)\n", + " t_freq = self.timestep_embedding(t, self.frequency_embedding_size)\n", + " t_freq = t_freq.to(next(self.parameters()).device)\n", + " t_emb = self.mlp(t_freq)\n", + " t_emb = t_emb.to(next(self.parameters()).device)\n", + " return t_emb\n", + "\n", + "class LabelEmbedder(nn.Module):\n", + " \"\"\"\n", + " Embeds class labels into vector representations. Also handles label dropout for classifier-free guidance.\n", + " \"\"\"\n", + " def __init__(self, num_classes, hidden_size, dropout_prob):\n", + " super().__init__()\n", + " use_cfg_embedding = dropout_prob > 0\n", + " self.embedding_table = nn.Embedding(num_classes + use_cfg_embedding, hidden_size)\n", + " self.num_classes = num_classes\n", + " self.dropout_prob = dropout_prob\n", + "\n", + " def token_drop(self, labels, force_drop_ids=None):\n", + " \"\"\"\n", + " Drops labels to enable classifier-free guidance.\n", + " \"\"\"\n", + " if force_drop_ids is None:\n", + " drop_ids = torch.rand(labels.shape[0], device=labels.device) < self.dropout_prob\n", + " else:\n", + " drop_ids = force_drop_ids == 1\n", + " labels = torch.where(drop_ids, self.num_classes, labels)\n", + " return labels\n", + "\n", + " def forward(self, labels, train, force_drop_ids=None):\n", + " use_dropout = self.dropout_prob > 0\n", + " if (train and use_dropout) or (force_drop_ids is not None):\n", + " labels = self.token_drop(labels, force_drop_ids)\n", + " embeddings = self.embedding_table(labels)\n", + " return embeddings\n", + "\n", + "#################################################################################\n", + "# Core DiT Model #\n", + "#################################################################################\n", + "\n", + "class DiTBlock(nn.Module):\n", + " \"\"\"\n", + " A DiT block with adaptive layer norm zero (adaLN-Zero) conditioning.\n", + " \"\"\"\n", + " def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs):\n", + " super().__init__()\n", + " self.norm1 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6)\n", + " self.attn = Attention(hidden_size, num_heads=num_heads, qkv_bias=True, **block_kwargs)\n", + " self.norm2 = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6)\n", + " mlp_hidden_dim = int(hidden_size * mlp_ratio)\n", + " approx_gelu = lambda: nn.GELU()\n", + " self.mlp = Mlp(in_features=hidden_size, hidden_features=mlp_hidden_dim, act_layer=approx_gelu, drop=0)\n", + " self.adaLN_modulation = nn.Sequential(\n", + " nn.SiLU(),\n", + " nn.Linear(hidden_size, 6 * hidden_size, bias=True)\n", + " )\n", + "\n", + " def forward(self, x, c):\n", + " shift_msa, scale_msa, gate_msa, shift_mlp, scale_mlp, gate_mlp = self.adaLN_modulation(c).chunk(6, dim=1)\n", + " x = x + gate_msa.unsqueeze(1) * self.attn(modulate(self.norm1(x), shift_msa, scale_msa))\n", + " x = x + gate_mlp.unsqueeze(1) * self.mlp(modulate(self.norm2(x), shift_mlp, scale_mlp))\n", + " return x\n", + "\n", + "class FinalLayer(nn.Module):\n", + " \"\"\"\n", + " The final layer of DiT.\n", + " \"\"\"\n", + " def __init__(self, hidden_size, patch_size, out_channels):\n", + " super().__init__()\n", + " self.norm_final = nn.LayerNorm(hidden_size, elementwise_affine=False, eps=1e-6)\n", + " self.linear = nn.Linear(hidden_size, patch_size[0] * patch_size[1] * out_channels, bias=True)\n", + " self.adaLN_modulation = nn.Sequential(\n", + " nn.SiLU(),\n", + " nn.Linear(hidden_size, 2 * hidden_size, bias=True)\n", + " )\n", + "\n", + " def forward(self, x, c):\n", + " shift, scale = self.adaLN_modulation(c).chunk(2, dim=1)\n", + " x = modulate(self.norm_final(x), shift, scale)\n", + " x = self.linear(x)\n", + " return x\n", + "\n", + "class DiT(nn.Module):\n", + " \"\"\"\n", + " Diffusion model with a Transformer backbone.\n", + " \"\"\"\n", + " def __init__(\n", + " self,\n", + " input_size=(32, 32),\n", + " patch_size=(2, 2),\n", + " in_channels=4,\n", + " hidden_size=1152,\n", + " depth=28,\n", + " num_heads=16,\n", + " mlp_ratio=4.0,\n", + " class_dropout_prob=0.1,\n", + " num_classes=None,\n", + " learn_sigma=True,\n", + " ):\n", + " super().__init__()\n", + " self.learn_sigma = learn_sigma\n", + " self.in_channels = in_channels\n", + " self.out_channels = in_channels * 2 if learn_sigma else in_channels\n", + " self.patch_size = patch_size\n", + " self.num_heads = num_heads\n", + " self.hidden_size = hidden_size\n", + "\n", + " self.x_embedder = PatchEmbed(\n", + " img_size=input_size, patch_size=patch_size, in_chans=in_channels, embed_dim=hidden_size, bias=True\n", + " )\n", + " self.t_embedder = TimestepEmbedder(hidden_size)\n", + " num_patches = self.x_embedder.num_patches\n", + " self.pos_embed = nn.Parameter(torch.zeros(1, num_patches, hidden_size), requires_grad=False)\n", + "\n", + " self.blocks = nn.ModuleList([\n", + " DiTBlock(hidden_size, num_heads, mlp_ratio=mlp_ratio) for _ in range(depth)\n", + " ])\n", + " self.final_layer = FinalLayer(hidden_size, patch_size, self.out_channels)\n", + " self.initialize_weights()\n", + "\n", + " if num_classes is not None:\n", + " self.y_embedder = LabelEmbedder(num_classes, hidden_size, class_dropout_prob)\n", + " else:\n", + " self.y_embedder = None\n", + "\n", + " def initialize_weights(self):\n", + " def _basic_init(module):\n", + " if isinstance(module, nn.Linear):\n", + " torch.nn.init.xavier_uniform_(module.weight)\n", + " if module.bias is not None:\n", + " nn.init.constant_(module.bias, 0)\n", + " self.apply(_basic_init)\n", + "\n", + " grid_size_h, grid_size_w = self.x_embedder.grid_size\n", + " pos_embed = get_2d_sincos_pos_embed(self.pos_embed.shape[-1], (grid_size_h, grid_size_w))\n", + " self.pos_embed.data.copy_(torch.from_numpy(pos_embed).float().unsqueeze(0))\n", + "\n", + " w = self.x_embedder.proj.weight.data\n", + " nn.init.xavier_uniform_(w.view([w.shape[0], -1]))\n", + " nn.init.constant_(self.x_embedder.proj.bias, 0)\n", + "\n", + " nn.init.normal_(self.t_embedder.mlp[0].weight, std=0.02)\n", + " nn.init.normal_(self.t_embedder.mlp[2].weight, std=0.02)\n", + "\n", + " for block in self.blocks:\n", + " nn.init.constant_(block.adaLN_modulation[-1].weight, 0)\n", + " nn.init.constant_(block.adaLN_modulation[-1].bias, 0)\n", + "\n", + " nn.init.constant_(self.final_layer.adaLN_modulation[-1].weight, 0)\n", + " nn.init.constant_(self.final_layer.adaLN_modulation[-1].bias, 0)\n", + " nn.init.constant_(self.final_layer.linear.weight, 0)\n", + " nn.init.constant_(self.final_layer.linear.bias, 0)\n", + "\n", + " def unpatchify(self, x):\n", + " \"\"\"\n", + " x: (N, T, patch_size[0]*patch_size[1]*C)\n", + " imgs: (N, H, W, C)\n", + " \"\"\"\n", + " c = self.out_channels\n", + " p_h, p_w = self.x_embedder.patch_size # 元组\n", + " h_patches, w_patches = self.x_embedder.grid_size\n", + " assert h_patches * w_patches == x.shape[1], \"Mismatch in number of patches\"\n", + "\n", + " x = x.reshape(shape=(x.shape[0], h_patches, w_patches, p_h, p_w, c))\n", + " x = torch.einsum('nhwpqc->nchpwq', x)\n", + " imgs = x.reshape(shape=(x.shape[0], c, h_patches * p_h, w_patches * p_w))\n", + " return imgs\n", + "\n", + " def forward(self, x, t, y=None):\n", + " \"\"\"\n", + " Forward pass of DiT.\n", + " x: (N, C, H, W) tensor of spatial inputs\n", + " t: (N,) tensor of diffusion timesteps\n", + " y: (N,) tensor of class labels or None\n", + " \"\"\"\n", + " x = self.x_embedder(x) + self.pos_embed # (N, T, D),其中 T = H * W / (patch_size[0] * patch_size[1])\n", + " t = self.t_embedder(t) # (N, D)\n", + " if self.y_embedder is not None and y is not None:\n", + " y = self.y_embedder(y, self.training) # (N, D)\n", + " c = t + y # (N, D)\n", + " else:\n", + " c = t # (N, D)\n", + " for block in self.blocks:\n", + " x = block(x, c) # (N, T, D)\n", + " x = self.final_layer(x, c) # (N, T, patch_size[0] * patch_size[1] * out_channels)\n", + " x = self.unpatchify(x) # (N, out_channels, H, W)\n", + " return x\n", + "\n", + " def forward_with_cfg(self, x, t, y, cfg_scale):\n", + " \"\"\"\n", + " Forward pass of DiT with classifier-free guidance.\n", + " \"\"\"\n", + " half = x[: len(x) // 2]\n", + " combined = torch.cat([half, half], dim=0)\n", + " model_out = self.forward(combined, t, y)\n", + " eps, rest = model_out[:, :self.in_channels], model_out[:, self.in_channels:]\n", + " cond_eps, uncond_eps = torch.split(eps, len(eps) // 2, dim=0)\n", + " half_eps = uncond_eps + cfg_scale * (cond_eps - uncond_eps)\n", + " eps = torch.cat([half_eps, half_eps], dim=0)\n", + " return torch.cat([eps, rest], dim=1)\n", + "\n", + "#################################################################################\n", + "# Sine/Cosine Positional Embedding Functions #\n", + "#################################################################################\n", + "\n", + "def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, extra_tokens=0):\n", + " \"\"\"\n", + " grid_size: (grid_size_h, grid_size_w)\n", + " return:\n", + " pos_embed: [grid_size_h*grid_size_w, embed_dim] 或 [1+grid_size_h*grid_size_w, embed_dim]\n", + " \"\"\"\n", + " grid_h = np.arange(grid_size[0], dtype=np.float32)\n", + " grid_w = np.arange(grid_size[1], dtype=np.float32)\n", + " grid = np.meshgrid(grid_w, grid_h) # 这里 w 先行\n", + " grid = np.stack(grid, axis=0)\n", + "\n", + " grid = grid.reshape([2, grid_size[0] * grid_size[1]])\n", + " pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid)\n", + " if cls_token and extra_tokens > 0:\n", + " pos_embed = np.concatenate([np.zeros([extra_tokens, embed_dim]), pos_embed], axis=0)\n", + " return pos_embed\n", + "\n", + "def get_2d_sincos_pos_embed_from_grid(embed_dim, grid):\n", + " assert embed_dim % 2 == 0\n", + "\n", + " # 使用一半的维度来编码 grid_h\n", + " emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2)\n", + " emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2)\n", + "\n", + " emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D)\n", + " return emb\n", + "\n", + "def get_1d_sincos_pos_embed_from_grid(embed_dim, pos):\n", + " \"\"\"\n", + " embed_dim: 每个位置的输出维度\n", + " pos: 要编码的位置列表:大小 (M,)\n", + " out: (M, D)\n", + " \"\"\"\n", + " assert embed_dim % 2 == 0\n", + " omega = np.arange(embed_dim // 2, dtype=np.float64)\n", + " omega /= embed_dim / 2.\n", + " omega = 1. / 10000**omega # (D/2,)\n", + "\n", + " pos = pos.reshape(-1) # (M,)\n", + " out = np.einsum('m,d->md', pos, omega) # (M, D/2)\n", + "\n", + " emb_sin = np.sin(out) # (M, D/2)\n", + " emb_cos = np.cos(out) # (M, D/2)\n", + "\n", + " emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D)\n", + " return emb\n", + "\n", + "#################################################################################\n", + "# Other Components #\n", + "#################################################################################\n", + "\n", + "def stride_generator(N, reverse=False):\n", + " strides = [1, 2]*10\n", + " if reverse: return list(reversed(strides[:N]))\n", + " else: return strides[:N]\n", + "\n", + "class ConvSC(nn.Module):\n", + " def __init__(self, in_channels, out_channels, stride=1, transpose=False):\n", + " super(ConvSC, self).__init__()\n", + " if transpose:\n", + " self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=3, stride=stride,\n", + " padding=1, output_padding=stride-1)\n", + " else:\n", + " self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1)\n", + " self.norm = nn.BatchNorm2d(out_channels)\n", + " self.act = nn.GELU()\n", + "\n", + " def forward(self, x):\n", + " return self.act(self.norm(self.conv(x)))\n", + "\n", + "class Inception(nn.Module):\n", + " def __init__(self, in_channels, hidden_channels, out_channels, incep_ker=[3,5,7,11], groups=4):\n", + " super(Inception, self).__init__()\n", + " self.branch1 = nn.Conv2d(in_channels, hidden_channels, kernel_size=1)\n", + " self.branch2 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[0], padding=incep_ker[0]//2, groups=groups)\n", + " self.branch3 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[1], padding=incep_ker[1]//2, groups=groups)\n", + " self.branch4 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[2], padding=incep_ker[2]//2, groups=groups)\n", + " self.branch5 = nn.Conv2d(in_channels, hidden_channels, kernel_size=incep_ker[3], padding=incep_ker[3]//2, groups=groups)\n", + " self.conv = nn.Conv2d(hidden_channels * 5, out_channels, kernel_size=1)\n", + " self.norm = nn.BatchNorm2d(out_channels)\n", + " self.act = nn.GELU()\n", + "\n", + " def forward(self, x):\n", + " x1 = self.branch1(x)\n", + " x2 = self.branch2(x)\n", + " x3 = self.branch3(x)\n", + " x4 = self.branch4(x)\n", + " x5 = self.branch5(x)\n", + " x = torch.cat([x1, x2, x3, x4, x5], dim=1)\n", + " x = self.conv(x)\n", + " x = self.act(self.norm(x))\n", + " return x\n", + "\n", + "class Encoder(nn.Module):\n", + " def __init__(self, C_in, C_hid, N_S):\n", + " super(Encoder, self).__init__()\n", + " strides = stride_generator(N_S)\n", + " layers = [ConvSC(C_in, C_hid, stride=strides[0])]\n", + " for s in strides[1:]:\n", + " layers.append(ConvSC(C_hid, C_hid, stride=s))\n", + " self.enc = nn.Sequential(*layers)\n", + "\n", + " def forward(self, x):\n", + " skips = []\n", + " for layer in self.enc:\n", + " x = layer(x)\n", + " skips.append(x)\n", + " return x, skips # 返回所有的 skips\n", + "\n", + "class Decoder(nn.Module):\n", + " def __init__(self, C_hid, C_out, N_S):\n", + " super(Decoder, self).__init__()\n", + " strides = stride_generator(N_S, reverse=True)\n", + " layers = []\n", + " for s in strides[:-1]:\n", + " layers.append(ConvSC(C_hid, C_hid, stride=s, transpose=True))\n", + " layers.append(ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True))\n", + " self.dec = nn.Sequential(*layers)\n", + " self.readout = nn.Conv2d(C_hid, C_out, 1)\n", + "\n", + " def forward(self, hid, skip):\n", + " for i in range(len(self.dec)-1):\n", + " hid = self.dec[i](hid)\n", + " hid = self.dec[-1](torch.cat([hid, skip], dim=1))\n", + " return self.readout(hid)\n", + "\n", + "\n", + "class Dit_plus(nn.Module):\n", + " def __init__(self, shape_in, hid_S=32, hid_T=64, N_S=4, N_T=8, time_step=1000, incep_ker=[3,5,7,11], groups=4, \n", + " in_time_seq_length=10, out_time_seq_length=10):\n", + " super(Dit_plus, self).__init__()\n", + " B, T, C, H, W = shape_in\n", + "\n", + " strides = stride_generator(N_S)\n", + " num_stride2_layers = strides[:N_S].count(2)\n", + " self.downsample_factor = 2 ** num_stride2_layers\n", + " self.H1 = H // self.downsample_factor\n", + " self.W1 = W // self.downsample_factor\n", + "\n", + " self.in_time_seq_length = in_time_seq_length\n", + " self.out_time_seq_length = out_time_seq_length\n", + " self.enc = Encoder(C, hid_S, N_S)\n", + " # self.hid = Temporal_evo(T*hid_S, hid_T, N_T, self.H1, self.W1, incep_ker, groups)\n", + " self.dit_block = DiT(\n", + " input_size=(self.H1, self.W1),\n", + " patch_size=(1, 1), # Changed patch_size to (1, 1)\n", + " in_channels=T*hid_S,\n", + " hidden_size=256,\n", + " depth=12,\n", + " num_heads=2,\n", + " mlp_ratio=4.0,\n", + " class_dropout_prob=0.0,\n", + " num_classes=None,\n", + " learn_sigma=False,\n", + " )\n", + "\n", + " self.dec = Decoder(hid_S, C, N_S)\n", + " self.time_step = torch.randint(0, time_step, (B,))\n", + "\n", + " def forward(self, x_raw):\n", + " B, T, C, H, W = x_raw.shape\n", + " x = x_raw.view(B*T, C, H, W)\n", + "\n", + " embed, skips = self.enc(x)\n", + " skip = skips[0] \n", + " _, C_, H_, W_ = embed.shape\n", + "\n", + " z = embed.view(B, T, C_, H_, W_)\n", + " bias = z.reshape(B, T*C_, H_, W_)\n", + " bias_hid = self.dit_block(bias, self.time_step)\n", + " \n", + " hid = bias_hid.reshape(B*T, C_, H_, W_) # Now the dimensions should match\n", + " Y = self.dec(hid, skip)\n", + " \n", + " Y = Y.reshape(B, T, -1, H, W)\n", + " return Y" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "1bce0311-b5b9-4429-b35b-9db334e86a76", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([4, 1, 2, 360, 720])\n" + ] + } + ], + "source": [ + "model = Dit_plus(shape_in=(4, 1, 2, 360, 720), hid_S=32, hid_T=64, N_S=4, N_T=8, time_step=1000,)\n", + "inputs = torch.randn(4, 1, 2, 360, 720)\n", + "output = model(inputs)\n", + "print(output.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "383ec020-752a-48c7-8bc6-f2a82aa1826b", + "metadata": {}, + "outputs": [], + "source": [ + "# best parameters\n", + "# lr=1e-3, hid_S=32, hid_T=64, N_S=4, N_T=8, time_step=1000," + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37fb1ee3-3476-483c-9520-bdffbf656781", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/model_baseline/simvp.py b/Exp3_Kuroshio_forecasting/model_baseline/simvp.py new file mode 100644 index 0000000000000000000000000000000000000000..175e0fe960a2ec23fa678b52a12d062366f1d814 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/simvp.py @@ -0,0 +1,180 @@ +from torch import nn +import torch +from torch import nn + +class BasicConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, transpose=False, act_norm=False): + super(BasicConv2d, self).__init__() + self.act_norm=act_norm + if not transpose: + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding) + else: + self.conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,output_padding=stride //2 ) + self.norm = nn.GroupNorm(2, out_channels) + self.act = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.act(self.norm(y)) + return y + + +class ConvSC(nn.Module): + def __init__(self, C_in, C_out, stride, transpose=False, act_norm=True): + super(ConvSC, self).__init__() + if stride == 1: + transpose = False + self.conv = BasicConv2d(C_in, C_out, kernel_size=3, stride=stride, + padding=1, transpose=transpose, act_norm=act_norm) + + def forward(self, x): + y = self.conv(x) + return y + + +class GroupConv2d(nn.Module): + def __init__(self, in_channels, out_channels, kernel_size, stride, padding, groups, act_norm=False): + super(GroupConv2d, self).__init__() + self.act_norm = act_norm + if in_channels % groups != 0: + groups = 1 + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding,groups=groups) + self.norm = nn.GroupNorm(groups,out_channels) + self.activate = nn.LeakyReLU(0.2, inplace=True) + + def forward(self, x): + y = self.conv(x) + if self.act_norm: + y = self.activate(self.norm(y)) + return y + + +class Inception(nn.Module): + def __init__(self, C_in, C_hid, C_out, incep_ker=[3,5,7,11], groups=8): + super(Inception, self).__init__() + self.conv1 = nn.Conv2d(C_in, C_hid, kernel_size=1, stride=1, padding=0) + layers = [] + for ker in incep_ker: + layers.append(GroupConv2d(C_hid, C_out, kernel_size=ker, stride=1, padding=ker//2, groups=groups, act_norm=True)) + self.layers = nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + y = 0 + for layer in self.layers: + y += layer(x) + return y + + + + +def stride_generator(N, reverse=False): + strides = [1, 2]*10 + if reverse: return list(reversed(strides[:N])) + else: return strides[:N] + +class Encoder(nn.Module): + def __init__(self,C_in, C_hid, N_S): + super(Encoder,self).__init__() + strides = stride_generator(N_S) + self.enc = nn.Sequential( + ConvSC(C_in, C_hid, stride=strides[0]), + *[ConvSC(C_hid, C_hid, stride=s) for s in strides[1:]] + ) + + def forward(self,x):# B*4, 3, 128, 128 + enc1 = self.enc[0](x) + latent = enc1 + for i in range(1,len(self.enc)): + latent = self.enc[i](latent) + return latent,enc1 + + +class Decoder(nn.Module): + def __init__(self,C_hid, C_out, N_S): + super(Decoder,self).__init__() + strides = stride_generator(N_S, reverse=True) + self.dec = nn.Sequential( + *[ConvSC(C_hid, C_hid, stride=s, transpose=True) for s in strides[:-1]], + ConvSC(2*C_hid, C_hid, stride=strides[-1], transpose=True) + ) + self.readout = nn.Conv2d(C_hid, C_out, 1) + + def forward(self, hid, enc1=None): + for i in range(0,len(self.dec)-1): + hid = self.dec[i](hid) + Y = self.dec[-1](torch.cat([hid, enc1], dim=1)) + Y = self.readout(Y) + return Y + +class Mid_Xnet(nn.Module): + def __init__(self, channel_in, channel_hid, N_T, incep_ker = [3,5,7,11], groups=8): + super(Mid_Xnet, self).__init__() + + self.N_T = N_T + enc_layers = [Inception(channel_in, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)] + for i in range(1, N_T-1): + enc_layers.append(Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + enc_layers.append(Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + + dec_layers = [Inception(channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)] + for i in range(1, N_T-1): + dec_layers.append(Inception(2*channel_hid, channel_hid//2, channel_hid, incep_ker= incep_ker, groups=groups)) + dec_layers.append(Inception(2*channel_hid, channel_hid//2, channel_in, incep_ker= incep_ker, groups=groups)) + + self.enc = nn.Sequential(*enc_layers) + self.dec = nn.Sequential(*dec_layers) + + def forward(self, x): + B, T, C, H, W = x.shape + x = x.reshape(B, T*C, H, W) + + # encoder + skips = [] + z = x + for i in range(self.N_T): + z = self.enc[i](z) + if i < self.N_T - 1: + skips.append(z) + + # decoder + z = self.dec[0](z) + for i in range(1, self.N_T): + z = self.dec[i](torch.cat([z, skips[-i]], dim=1)) + + y = z.reshape(B, T, C, H, W) + return y + + +class SimVP(nn.Module): + def __init__(self, shape_in, hid_S=16, hid_T=256, N_S=4, N_T=8, output_dim = 1, incep_ker=[3,5,7,11], groups=8): + super(SimVP, self).__init__() + T, C, H, W = shape_in + self.output_dim = output_dim + self.enc = Encoder(C, hid_S, N_S) + self.hid = Mid_Xnet(T*hid_S, hid_T, N_T, incep_ker, groups) + self.dec = Decoder(hid_S, self.output_dim, N_S) + + + def forward(self, x_raw): + B, T, C, H, W = x_raw.shape + x = x_raw.view(B*T, C, H, W) + + embed, skip = self.enc(x) + _, C_, H_, W_ = embed.shape + + z = embed.view(B, T, C_, H_, W_) + hid = self.hid(z) + hid = hid.reshape(B*T, C_, H_, W_) + + Y = self.dec(hid, skip) + Y = Y.reshape(B, T, -1, H, W) + return Y + + +if __name__ == "__main__": + inputs = torch.randn(1, 10, 2, 64, 448) + model = SimVP(shape_in=(10, 2, 64, 448), hid_S=32, hid_T=128, output_dim = 2) + outputs = model(inputs) + print(outputs.shape) \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/model_baseline/utilities3.py b/Exp3_Kuroshio_forecasting/model_baseline/utilities3.py new file mode 100644 index 0000000000000000000000000000000000000000..2ce7d1e4e90338075093ee87293d575f7a0e36d9 --- /dev/null +++ b/Exp3_Kuroshio_forecasting/model_baseline/utilities3.py @@ -0,0 +1,232 @@ +import torch +import numpy as np +import scipy.io +import h5py +import sklearn.metrics +import torch.nn as nn +from scipy.ndimage import gaussian_filter + + +################################################# +# +# Utilities +# +################################################# +device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + +# reading data +class MatReader(object): + def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): + super(MatReader, self).__init__() + + self.to_torch = to_torch + self.to_cuda = to_cuda + self.to_float = to_float + + self.file_path = file_path + + self.data = None + self.old_mat = None + self._load_file() + + def _load_file(self): + try: + self.data = scipy.io.loadmat(self.file_path) + self.old_mat = True + except: + self.data = h5py.File(self.file_path) + self.old_mat = False + + def load_file(self, file_path): + self.file_path = file_path + self._load_file() + + def read_field(self, field): + x = self.data[field] + + if not self.old_mat: + x = x[()] + x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) + + if self.to_float: + x = x.astype(np.float32) + + if self.to_torch: + x = torch.from_numpy(x) + + if self.to_cuda: + x = x.cuda() + + return x + + def set_cuda(self, to_cuda): + self.to_cuda = to_cuda + + def set_torch(self, to_torch): + self.to_torch = to_torch + + def set_float(self, to_float): + self.to_float = to_float + +# normalization, pointwise gaussian +class UnitGaussianNormalizer(object): + def __init__(self, x, eps=0.00001): + super(UnitGaussianNormalizer, self).__init__() + + # x could be in shape of ntrain*n or ntrain*T*n or ntrain*n*T + self.mean = torch.mean(x, 0) + self.std = torch.std(x, 0) + self.eps = eps + + def encode(self, x): + x = (x - self.mean) / (self.std + self.eps) + return x + + def decode(self, x, sample_idx=None): + if sample_idx is None: + std = self.std + self.eps # n + mean = self.mean + else: + if len(self.mean.shape) == len(sample_idx[0].shape): + std = self.std[sample_idx] + self.eps # batch*n + mean = self.mean[sample_idx] + if len(self.mean.shape) > len(sample_idx[0].shape): + std = self.std[:,sample_idx]+ self.eps # T*batch*n + mean = self.mean[:,sample_idx] + + # x is in shape of batch*n or T*batch*n + x = (x * std) + mean + return x + + def cuda(self): + self.mean = self.mean.cuda() + self.std = self.std.cuda() + + def cpu(self): + self.mean = self.mean.cpu() + self.std = self.std.cpu() + +# normalization, Gaussian +class GaussianNormalizer(object): + def __init__(self, x, eps=0.00001): + super(GaussianNormalizer, self).__init__() + + self.mean = torch.mean(x) + self.std = torch.std(x) + self.eps = eps + + def encode(self, x): + x = (x - self.mean) / (self.std + self.eps) + return x + + def decode(self, x, sample_idx=None): + x = (x * (self.std + self.eps)) + self.mean + return x + + def cuda(self): + self.mean = self.mean.cuda() + self.std = self.std.cuda() + + def cpu(self): + self.mean = self.mean.cpu() + self.std = self.std.cpu() + + +# normalization, scaling by range +class RangeNormalizer(object): + def __init__(self, x, low=0.0, high=1.0): + super(RangeNormalizer, self).__init__() + mymin = torch.min(x, 0)[0].view(-1) + mymax = torch.max(x, 0)[0].view(-1) + + self.a = (high - low)/(mymax - mymin) + self.b = -self.a*mymax + high + + def encode(self, x): + s = x.size() + x = x.view(s[0], -1) + x = self.a*x + self.b + x = x.view(s) + return x + + def decode(self, x): + s = x.size() + x = x.view(s[0], -1) + x = (x - self.b)/self.a + x = x.view(s) + return x + +#loss function with rel/abs Lp loss +class LpLoss(object): + def __init__(self, d=2, p=2, size_average=True, reduction=True): + super(LpLoss, self).__init__() + + #Dimension and Lp-norm type are postive + assert d > 0 and p > 0 + + self.d = d + self.p = p + self.reduction = reduction + self.size_average = size_average + + def abs(self, x, y): + num_examples = x.size()[0] + + #Assume uniform mesh + h = 1.0 / (x.size()[1] - 1.0) + + all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) + + if self.reduction: + if self.size_average: + return torch.mean(all_norms) + else: + return torch.sum(all_norms) + + return all_norms + + def rel(self, x, y): + num_examples = x.size()[0] + + diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) + y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) + + if self.reduction: + if self.size_average: + return torch.mean(diff_norms/y_norms) + else: + return torch.sum(diff_norms/y_norms) + + return diff_norms/y_norms + + def __call__(self, x, y): + return self.rel(x, y) + +# A simple feedforward neural network +class DenseNet(torch.nn.Module): + def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): + super(DenseNet, self).__init__() + + self.n_layers = len(layers) - 1 + + assert self.n_layers >= 1 + + self.layers = nn.ModuleList() + + for j in range(self.n_layers): + self.layers.append(nn.Linear(layers[j], layers[j+1])) + + if j != self.n_layers - 1: + if normalize: + self.layers.append(nn.BatchNorm1d(layers[j+1])) + + self.layers.append(nonlinearity()) + + if out_nonlinearity is not None: + self.layers.append(out_nonlinearity()) + + def forward(self, x): + for _, l in enumerate(self.layers): + x = l(x) + + return x \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/plt_triton/Acc.ipynb b/Exp3_Kuroshio_forecasting/plt_triton/Acc.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..b9b3eeb5a62db00666923f22283192e33a9b142c --- /dev/null +++ b/Exp3_Kuroshio_forecasting/plt_triton/Acc.ipynb @@ -0,0 +1,1089 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 16, + "id": "b67b2d79-a190-4adc-9c9e-fb5bb3738626", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "
<xarray.Dataset> Size: 12GB\n",
+       "Dimensions:    (time: 11322, latitude: 256, longitude: 256)\n",
+       "Coordinates:\n",
+       "  * latitude   (latitude) float32 1kB 10.06 10.19 10.31 ... 41.69 41.81 41.94\n",
+       "  * longitude  (longitude) float32 1kB 123.1 123.2 123.3 ... 154.7 154.8 154.9\n",
+       "  * time       (time) datetime64[ns] 91kB 1993-01-01 1993-01-02 ... 2023-12-31\n",
+       "Data variables:\n",
+       "    vgos       (time, latitude, longitude) float64 6GB ...\n",
+       "    ugos       (time, latitude, longitude) float64 6GB ...\n",
+       "Attributes: (12/43)\n",
+       "    Conventions:                     CF-1.6\n",
+       "    Metadata_Conventions:            Unidata Dataset Discovery v1.0\n",
+       "    cdm_data_type:                   Grid\n",
+       "    comment:                         Sea Surface Height measured by Altimetry...\n",
+       "    contact:                         servicedesk.cmems@mercator-ocean.eu\n",
+       "    creator_email:                   servicedesk.cmems@mercator-ocean.eu\n",
+       "    ...                              ...\n",
+       "    time_coverage_duration:          P1D\n",
+       "    time_coverage_end:               2023-12-31T12:00:00Z\n",
+       "    time_coverage_resolution:        P1D\n",
+       "    time_coverage_start:             2023-12-30T12:00:00Z\n",
+       "    title:                           DT merged all satellites Global Ocean Gr...\n",
+       "    copernicusmarine_version:        1.3.3
" + ], + "text/plain": [ + " Size: 12GB\n", + "Dimensions: (time: 11322, latitude: 256, longitude: 256)\n", + "Coordinates:\n", + " * latitude (latitude) float32 1kB 10.06 10.19 10.31 ... 41.69 41.81 41.94\n", + " * longitude (longitude) float32 1kB 123.1 123.2 123.3 ... 154.7 154.8 154.9\n", + " * time (time) datetime64[ns] 91kB 1993-01-01 1993-01-02 ... 2023-12-31\n", + "Data variables:\n", + " vgos (time, latitude, longitude) float64 6GB ...\n", + " ugos (time, latitude, longitude) float64 6GB ...\n", + "Attributes: (12/43)\n", + " Conventions: CF-1.6\n", + " Metadata_Conventions: Unidata Dataset Discovery v1.0\n", + " cdm_data_type: Grid\n", + " comment: Sea Surface Height measured by Altimetry...\n", + " contact: servicedesk.cmems@mercator-ocean.eu\n", + " creator_email: servicedesk.cmems@mercator-ocean.eu\n", + " ... ...\n", + " time_coverage_duration: P1D\n", + " time_coverage_end: 2023-12-31T12:00:00Z\n", + " time_coverage_resolution: P1D\n", + " time_coverage_start: 2023-12-30T12:00:00Z\n", + " title: DT merged all satellites Global Ocean Gr...\n", + " copernicusmarine_version: 1.3.3" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import xarray as xr\n", + "import matplotlib.pyplot as plt\n", + "\n", + "ds = xr.open_dataset('./KURO.nc')\n", + "\n", + "ds\n" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "712ebb37-0644-4814-9c54-b16e84cd2bad", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "idex:10227\n" + ] + } + ], + "source": [ + "import xarray as xr\n", + "\n", + "ds = xr.open_dataset('./KURO.nc')\n", + "\n", + "time_index = ds.time.to_index()\n", + "\n", + "target_date = '2021-01-01'\n", + "index = time_index.get_loc(target_date)\n", + "\n", + "print(f\"idex:{index}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "41db1068-8c61-495b-b12c-4026677253a5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "已保存 1993.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 1994.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 1995.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 1996.h5,数据维度:(366, 2, 256, 256)\n", + "已保存 1997.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 1998.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 1999.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2000.h5,数据维度:(366, 2, 256, 256)\n", + "已保存 2001.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2002.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2003.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2004.h5,数据维度:(366, 2, 256, 256)\n", + "已保存 2005.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2006.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2007.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2008.h5,数据维度:(366, 2, 256, 256)\n", + "已保存 2009.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2010.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2011.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2012.h5,数据维度:(366, 2, 256, 256)\n", + "已保存 2013.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2014.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2015.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2016.h5,数据维度:(366, 2, 256, 256)\n", + "已保存 2017.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2018.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2019.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2020.h5,数据维度:(366, 2, 256, 256)\n", + "已保存 2021.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2022.h5,数据维度:(365, 2, 256, 256)\n", + "已保存 2023.h5,数据维度:(365, 2, 256, 256)\n" + ] + } + ], + "source": [ + "import xarray as xr\n", + "import numpy as np\n", + "import h5py\n", + "\n", + "ds = xr.open_dataset('./KURO.nc')\n", + "\n", + "years = np.unique(ds.time.dt.year.values)\n", + "\n", + "for year in years:\n", + " ds_year = ds.sel(time=ds.time.dt.year == year)\n", + " \n", + " ugos_data = ds_year.ugos.values # (days, 256, 256)\n", + " ugos_data = np.flip(ugos_data, 1)\n", + " vgos_data = ds_year.vgos.values\n", + " vgos_data = np.flip(vgos_data, 1)\n", + " combined = np.stack([ugos_data, vgos_data], axis=1) #(days, 2, 256, 256)\n", + " \n", + " with h5py.File(f'./data_h5/{year}.h5', 'w') as f:\n", + " f.create_dataset('fields', data=combined)\n", + " \n", + " print(f'Have saved {year}.h5, data dimension: {combined.shape}')" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "9be50b7f-173b-416d-afc1-abe0eeb26444", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "已处理年份 1993,耗时 2.33 秒。\n", + "已处理年份 1994,耗时 1.32 秒。\n", + "已处理年份 1995,耗时 1.18 秒。\n", + "已处理年份 1996,耗时 1.10 秒。\n", + "已处理年份 1997,耗时 1.07 秒。\n", + "已处理年份 1998,耗时 1.65 秒。\n", + "已处理年份 1999,耗时 3.14 秒。\n", + "已处理年份 2000,耗时 0.78 秒。\n", + "已处理年份 2001,耗时 0.79 秒。\n", + "已处理年份 2002,耗时 0.69 秒。\n", + "已处理年份 2003,耗时 0.88 秒。\n", + "已处理年份 2004,耗时 0.81 秒。\n", + "已处理年份 2005,耗时 0.79 秒。\n", + "已处理年份 2006,耗时 0.80 秒。\n", + "已处理年份 2007,耗时 0.75 秒。\n", + "已处理年份 2008,耗时 0.69 秒。\n", + "已处理年份 2009,耗时 0.71 秒。\n", + "已处理年份 2010,耗时 0.78 秒。\n", + "已处理年份 2011,耗时 0.72 秒。\n", + "已处理年份 2012,耗时 0.78 秒。\n", + "已处理年份 2013,耗时 0.89 秒。\n", + "已处理年份 2014,耗时 0.91 秒。\n", + "已处理年份 2015,耗时 0.81 秒。\n", + "已处理年份 2016,耗时 0.74 秒。\n", + "已处理年份 2017,耗时 0.80 秒。\n", + "已处理年份 2018,耗时 0.78 秒。\n", + "已处理年份 2019,耗时 0.80 秒。\n", + "总共处理了 27 个年份的数据。\n", + "气候平均态已保存到 ./climate_mean.npy\n" + ] + } + ], + "source": [ + "import os\n", + "import numpy as np\n", + "import h5py\n", + "import time \n", + "\n", + "#calculate climate mean\n", + "def compute_climate_mean(data_dir, start_year=1993, end_year=2019, output_file='./climate_mean.npy'):\n", + " \n", + " selected_vars = range(0, 2)\n", + " total_vars = 2\n", + " total_days = 365\n", + " spatial_dims = (256, 256)\n", + " \n", + " sum_data = None\n", + " valid_years = 0 \n", + " \n", + " for year in range(start_year, end_year + 1):\n", + " file_path = os.path.join(data_dir, f'{year}.h5')\n", + " \n", + " if not os.path.exists(file_path):\n", + " print(f\"警告:文件 {file_path} 不存在,跳过该年份。\")\n", + " continue\n", + " \n", + " start_time = time.time() \n", + " \n", + " with h5py.File(file_path, 'r') as ds:\n", + " data = ds['fields'][:total_days] # (365, 97, 121, 240)\n", + " \n", + " selected_data = data[:, selected_vars, :, :] # 形状为 (365, 46, 121, 240)\n", + " \n", + " if sum_data is None:\n", + " sum_data = np.zeros_like(selected_data, dtype=np.float64)\n", + " \n", + " sum_data += selected_data\n", + " valid_years += 1\n", + " \n", + " end_time = time.time() # \n", + " elapsed_time = end_time - start_time\n", + " print(f\"已处理年份 {year},耗时 {elapsed_time:.2f} 秒。\")\n", + " \n", + " if valid_years == 0:\n", + " raise ValueError(\"未找到任何有效的数据文件。请检查数据目录和年份范围。\")\n", + " \n", + " print(f\"总共处理了 {valid_years} 个年份的数据。\")\n", + " \n", + " mean_data = sum_data / valid_years # (365, 46, 121, 240)\n", + " \n", + " output_array = np.zeros((total_days, total_vars, spatial_dims[0], spatial_dims[1]), dtype=np.float32)\n", + " \n", + " for idx, var_idx in enumerate(selected_vars):\n", + " output_array[:, var_idx, :, :] = mean_data[:, idx, :, :]\n", + " \n", + " np.save(output_file, output_array)\n", + " print(f\"气候平均态已保存到 {output_file}\")\n", + "\n", + "\n", + "data_directory = './data_h5'\n", + "output_filename = './climate_mean.npy'\n", + "compute_climate_mean(data_dir=data_directory, start_year=1993, end_year=2019, output_file=output_filename)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "0155c8e4-0155-48e7-b0b3-8cc66973e474", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(365, 2, 256, 256)\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgkAAAGiCAYAAABtUVVIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9e5BkyV0fjn4y85xT1d0zPbMP7a5Wz0XCQiaQZIS1LMZxDaxZYYKwrggHIrAlBIiwI0QErB2g5RqtBATCiJDXwNrrAAmJCHMlCNv8s74CvI69XNuL+Vk2PyOQFu0iaZ/z6unpR3VVnTon8/6R+c38Zp48VT3T3TPTs/WN6K6q88iT55XfT36+L2GMMVjKUpaylKUsZSlLSURe6w4sZSlLWcpSlrKU61OWIGEpS1nKUpaylKVkZQkSlrKUpSxlKUtZSlaWIGEpS1nKUpaylKVkZQkSlrKUpSxlKUtZSlaWIGEpS1nKUpaylKVkZQkSlrKUpSxlKUtZSlaWIGEpS1nKUpaylKVkZQkSlrKUpSxlKUtZSlaWIGEpS1nKUpaylKVkZQkSlrKUpSxlKUs5RPmjP/ojfM/3fA/uvPNOCCHwe7/3ewv3efzxx/GN3/iNGAwGeP3rX49PfvKTnW0efvhhvPa1r8VwOMTdd9+NP/mTPzn8zidyzUDCtTjZpSxlKUtZylKOWkajEd785jfj4Ycf3tf2X/7yl/Hd3/3d+LZv+zb86Z/+KX78x38cP/IjP4Lf//3f99t85jOfwf33348HH3wQ/+t//S+8+c1vxn333Ydz584d1WkAAMS1KPD0mc98Bu9+97vxyCOP4O6778ZDDz2E3/3d38WTTz6J22677Wp3ZylLWcpSlrKUIxEhBP7jf/yPeMc73tG7zU/91E/h0Ucfxec//3m/7F3vehcuXbqEz372swCAu+++G3/zb/5N/Nqv/RoAQGuNV73qVfixH/sxfOADHziy/hdH1vIc+djHPob3ve99eO973wsAeOSRR/Doo4/iE5/4RPZkp9MpptOp/621xsWLF3HLLbdACHHV+r2UpSxlKUs5HDHGYGdnB3feeSekPBpSezKZoK7rQ2nLGNPRN4PBAIPB4MBtP/HEE7j33nujZffddx9+/Md/HABQ1zU+97nP4YEHHvDrpZS499578cQTTxz4+PPkqoOEKznZj3zkI/jwhz98tbq4lKUsZSlLuUry7LPP4pWvfOWhtzuZTHDLygnsoT2U9k6cOIHd3d1o2YMPPogPfehDB277zJkzuP3226Nlt99+O7a3tzEej7G5uYm2bbPbfPGLXzzw8efJVQcJFy5cuOyTfeCBB3D//ff731tbW3j1q1+NZ599Fuvr60fa3xtRLu7swRiD1gCtNjAAtAGkAAQAKQSECL+F+w0ASggotg66AYyGMNo2QiLtDkZIgP4A/5mzcXU4IaO739NjGR3aVAqQBSAkWtM9hgjdio/VcxzeXzoHatMfPnMe+xHqiz2Xxn5SF+h6ycLfG/oEACXsH51765avra5kjzX73H+CqScw9RSD/8f3++XNF/7IHm8yBnQLo91gqltAaxjdwtRToJnBNDUgFURRQlRDyBOnIVdPAINV6MFJmGoFRpYw1QpaUdhnyxgYY/taSAGla4i2tc8MYO+VFPEzItiM0uj43kQXsGdb/nz43waAjtuj7Xj7Qtq+UPtcku3sXwEjld1Pqe45pM9S7hxy3/l+Rttrlp5L7pxpmTYQuo3bYedlhHDPl/DL/KHpuyzYOunPM7pf7vkE4lc/umz5xRAARrs7+Nqv/VqcPHmyZ6uDSV3X2EOLd+MVqA7ofldD47d2n+/onMNgEa53uSbmhsuVPkpnfX19CRKuQFpZeHDQakA7txQODggMiHmgoG0AVPlB0A8kEoYPOHNAAsCUd05xu+9WCbABnh/HDfQ6AQkcFEiRX84H3eg8evqeHgNseSopOIl+s/NI205xlwCgTAMxmwAGEMbAqBKQEoOT+Xdhtn4SemSAUmLA3pfp+BJMM4OZTiww8AdS7LuGKRCUhtSQykCIBlJqyFLADBVMqWDKCqYawlSraA3QuM7TM2RBQhPOVxVB4XAR0m8jCFBot4+jpeleRwrZK9UmVqq553Pefebb8E++rWCKk56/HNBJ988dM3dc/pzrxp6HbuJ1tB6IQYFuIIwIbRH4kQ7UqPCedM47Byb6zo8B1H2Bfif07Ct6B47YZFxBokrvyeWKO8Gj0jl33HEHzp49Gy07e/Ys1tfXsbKyAqUUlFLZbe64445D7w+Xqw4Sbr311mt2skuxooSAlHY+ImBg3OtMr6qSwm7jZqxcKdMAHA1KXGhwcZKbmeVmH/JKxgk/64kHAM4YzN19XpsL9jPuGH1AAejaMFOAANcOPyZvr2UXSAhhAYIARNtAtDOnJJqgSE/elO1H88KXYZoZRDXE5A9+wzIBa+uQa+swbQsMZ55FEBwgUB85wwBYRqEaQhSl3cdoxz40QDsDmgmULCBVGF6EO0+juoAxOViswHXmGcsJtWc0DMIza4wG0D2nrOJn64QxAazwTzqPeQq07xlKjunvG4lOwAQHOQTOdRO24+vZNeOslAUzRQA1qogBVnpO/FoCAZT1nBO9A7l3aS4YBwKjdMRiJzwHAyIK4sppw33IPffcg//0n/5TtOwP//APcc899wAAqqrCW9/6Vjz22GPeAVJrjcceewzvf//7j65juAYg4Vqe7FKsKAEYISAMPFgwBnmTAqfD+awmHczZIC0gYQQ6yns/ChhAfgBPl7G2c0DkUOcmKX0spAcK6bE4eFgEEOZJLxuhG8vg0H1pagB15/pc+o3/F8o1Zn6QCnJlDaKorIKvhkBRQhaVV/5GtxFICOCABvVgkhBFZUFCNYCRhVUwpMwaASG1HV0y1y5lZTrsEafPM5JTWL4dmjEbDYj57fQKgQv3LPt+A/uaXXcYrMzz698nd778U1DAGX/HEgZtITig65KwHkYW3iTXAQb++iXAYM57ywF5DvTn7i19dkDSEQlNdg7UBnBZIGF3dxdPPfWU//3lL38Zf/qnf4qbb74Zr371q/HAAw/g+eefx2/91m8BAP7xP/7H+LVf+zX85E/+JH7oh34I/+W//Bf8zu/8Dh599FHfxv3334/3vOc9+KZv+ia87W1vw0MPPYTRaOQDAI5Krom54Vqd7FKsKBAFCWgjsrZ75RSSYEopCw7mzcgywo+VKsyIeuczN2qXqNPc8TJ0bad/ffRuKjlQkrYFQAjpB5+UHcmBgZw/RKpQ+DY0gFObQpOZQftBVrQziGYCTEbAK77Ot6VnDertEVRVQA0HkCdOQ73sFRBFCVNPIIZrMCvraFdvgilXwjUT0rJEugngkM/qvV0c0KQwyWxA52EMjG4gZtpS24Cnu+n+5cwoYX+mQKMLOIfSZ9dSJKzC5VD89to3wewfdayfNfDnlLxM/txyZgx2XM/MZZ7byLSwD4AQKfeUQXB/Kdjx+80DBum7Sc8LO1cuEUDgfW0dOGgPx6FwkRwak3AZ8j//5//Et33bt/nf5FP3nve8B5/85Cfx4osv4plnnvHr77rrLjz66KP4iZ/4Cfyrf/Wv8MpXvhK/8Ru/gfvuu89v833f9304f/48PvjBD+LMmTN4y1vegs9+9rMd/77DlmuSJwEAfu3Xfg0f/ehH/cn+yq/8Cu6+++597bu9vY1Tp05ha2tr6ZNwBTIZ7YQBxC3jyi3re8CUQ9Z2D0QDpx+YMjOtSPrstblt+yQ32POZF8kC6nS/NHFn2+Tc+AvVua659nqOm86yRVsDZGpoa4jZBHI2htm5CL2zCb19EbOzz6OZ1DBaY3Dzaahb7rBOhidPA80MxZu/MzpGffEM9PAkWkg02mDSGus/IAVKKTqmGw/k0nPombELo2NnuNxMm7dpdFAinH0AgoLmy/pAoj9ABvClQDM9L85kaJ0FBtyhtE+8c2ruHNP3KvU3SM8lZ2JIzzU5x8gsQu8iBwmyWAi8Ov3IHSf3Pd03PW8A21tbuO21X3tk4zjpifer12Cw37GkR6ZG49far74kdc41c1x8//vfvzQvXEeykAYXEgbMVkuS0sgIM2BobcdoNuvoBQV9s730d6o4gCzt33FuBKyXppAQ0F1TCLXDlU46m6Pt+pgGtx/1g0wPUd9yCpYflx+TbU+DK804BTP7mKa2DohtCzkcolQSJ/7hg/n+JaKrFTSQqFuDmTYYNxoSAkoaP/ui28lNUtKdp/QOrgWLjgl9N5nzzdqnUwWZPhtC+ueJlvU+V/OEPTcdNoM/U9Sm6gcHPsLFBAdN31Z63B6lnrICqUNitg06j5RtS87Tv4fps240IJRrb4HyXARGcn3oA2xp36+S+Gigg7RxOF05lnIsohuWcsiSDHgkHij0zXSF7Npqc23zz1x7PcrXr+5rh/2eS+9zgNA3GLXdc/SAZoHsFzCItK0+BobW8bZMYoMmE0BCPcM5sgmpIMoS8sTpOFIhkenWBnS16hXdiROn/LpnNnbRtAYaBjy0nAgku4/9QX4rSgoUUoTICykYEyFRSBmfcx9YTM7XL0+vD7tvHRo/FT5L7tmWP0cRUEhAqDb2OthPg9wdl4mjKgdLHbNAep4pQDCMuaN+uLbp0/r+LNB+Pcyd0ICRmA/keZ/nOZCm5ooUOOSe+UXMxSHKtTA33EiyBAkvQVkUItix/yvJBjcwsCAgDJ99JU5Q1Fxu0E+dttixc8q6j9bthBJyO7of5MLe+eHdtxYPcGzA3A94yJ5PTli7nVh9Lmn4G/mIaG1NDrqx12u4aiMWTp6GGK7NBQmmHFplju4McLUUEMKyCq0xaLRBq+21nzQtZq3x4bKllBgUEqUSqBSLhtFAIYESdo6qlMiDJf7dJCaGzPWw12CWv5a58xQC9hSLrJmCP/8EEFoPhiRgAkMAeBcF9BlnuQ7yPj0pQ8CAq/dBoOc15/eTnCMBcw8W0kgU/uzKcL6pj0LqT+H3ZZ8dx0qTmBujk7dgRfA2+TXnXRTSsjMAoF7K8/PjI0uQ8BKUlC7N2pz7hAYfDha4pINUtgPx4BPta7RrO14+F8d7ANN0ZqPpbEwYs3j2lUo68+1TejnGZM4s1vZ5DkhATEdnTSgOJHh2qFyd295wda1/XWFNSlIAM9+EQUOX1yXg0toA0JAaUFLZyb20HhRCBE8KDat4Fc3Qc9fHJCaGPoDA9+mTebQ3+53zGaFwUw3OnOQRgUzMMBLW1EDvkWQAwYcspuCA1rl72kl+lHuvctR+7nwlN/0lgICeE59QqdteB9CkwCUH7AHn6+D6yLraAQvUN3l11M/S3HAwWYKEl6AE+rSbizwnWc9pNWew6tkXQD9AoGU0azJ6fsRRMhvNDmz7lUUDbzo495hSco6Scx3bTH9/PZDhrEi6rXAhbaqyA3RRwlQnFpxsvxS6xkpRQQkD1Vog4DqBUkrMhGUYADiwYJVryVBmCucohj6i8ufMsHslp6T4vYn8CfrDFHMAgbKPanZuhv3moMCGtxooKSJwwD386b52IoNS58R0nTu3XnNW33OaCTXuRGB40NAT1YCMKSTtd6Z/fH97jSSA1rIcRoL7APWBhaOWpbnhYLIECS9BabVxqZaZLZnTo0CXdmT05SKfBm6HTW32flzg40w6CDo2QUh0FUcm4YztZ+LxnVHq2Zhw3/m84s9KOmD2AR8jWew+az83s0yOmyrUKIkOtUnnVZQwqoIpVzCrTqC4wvFMNDVK0aBQBVYHFaatwKQ1qFuDSmoUUqDRzOSgpDUxSDsQl1KgkEClBKQIXiv0nKSe/n3MSHr9jRCWak8BG58RZwCB34YuO4ECxL4FxtgU0trYd8ODBLenBQP2HZHs3Mj/ohMNpJN7S2Yivi5RvnPzOeSUKX+mUtaAlqWsQbKfEdaM2AsOOuA0uVduXOBMEPVBMPBmc2hI+0wTcM4xS0u5LmUJEm5w2dzdi+yoQgR/Zp9REYgHBNqWvudmKoCv/RDa5qCDKTk2IPhZiwQAaQea3CBodOxcGPUrQz/3DTi5QZKdUxzfr7sDHm+KD6aZY3fYC+OSFAkdgaJ0Vpnt9pxEM5G5xLEJ1cteDQA4SCb5wemXAQCmm2eBpsagHEKVBUppnMnAMgl0Wyw7EACCklZxUnpvzlLN5XUyyt/uxGfUBPBUvJ9wWRx7wGt6S3JJqrQDCMaZ4ThA6O2yiAHCXNaAmRwE+YvQcn+CMVDOzrpTM0HOpJACKLYvgOhdzj7P8wBCdNEyz63RIaqFHVPIAhDGvgcMLFwtkCCwMIZjX228VGUJEm5g2dzd8zUaAHpZBLSwNrYIILgBS2SUI4lhg3DLZl0kAsaFyYkuvZxQ9DmzRYeu59R6HxjIDTR9M02gP5uc6xPvg0idHCMGwfQPcomSE3SN+P6MXu4VzuDwtrlCYcrisES01kFQ6BZFtQJVVFBCQgobJklAIeTgF55NUCIGoguF+7iQzKPXuVmhBxz4egILTE7EIsB9EljIdlNYs4MAouJnHiA0dQQOssxB8p4tVJIpK0DfJQGGLmuQfeaBvI9QHzhw50XrsgxCTrQGwNgFAjFGA7Jwjo3GRVUsYE8OUZbmhoPJEiTcwDJtDNLiTQCLfwdimjSRjmORG3i0CwXTiJ27OAAJjWTo5HTQc9tFqXSbJh5YM7RsNrFObqBM7aHpdzegpU5mfpM0lj2Z5afJgiJQlJuNpWAoNUVwIaAgZNC8bXI9D1uc4jOygNQNjCoxLCoU1RCNtkCh0fbek5tieL5iyVbddP2O7olE9x6n14LfR7rWgAetgAWutFeU1yE9RQRgwHGBFLazpBT4uVFoJ4EhziBQiOq8DKVp1E16PUg6iZs4I8ABQrqdr0hZdJ8LkzHv5FgDvq373gsKpOya/+xNiM5JONYgmCJc/5pMtMpSrjtZgoQbTF7Y3M3mENDGuDA1a0/tS56UnZ3w9QhtGmO/EE7gVSN7AQhnEXKKO9k2q2DT/foGVA4Q+pQpbz+tTcH7w0MrkwqUggZu3cBXS8wNuvzcMvZkYXTQZnzw1T3L6dxUlT+3K5Tqttf677Ozf+XOxaAAoFQFJaVlFDSyZcYlkHfo4wfJ3RN3fXwujj78w/blpoU+FmA/QmYTkk7ZdIRnuw8gxECyjZ+tRBETQ5aajvo7OJ9BiDKcygQk0HVFYMeiNNBpP4G5EUK+2RRIdzZofSgqJEK9kwXtHrYsoxsOJkuQcAPJsxd3/ffo/ePjkFPk3szAJDv4IAUbvC0BaYxvn5eV7jhy0fGEy4OQO4HcDHI/lGzKHtDyeQAhGuAytHCfkk8GTwPtB0EICcpdwI+RpZYZOIiAAimbdJZGQhW5/DkWtlT0UYljTqyzmX1uClUBUsKxxh2fFw4Qegv9iExq5AwgzTJHiVCOAykADeE7tB+GWQrhAHS8LAUGAHPu1ToGCJxh0vMBwhWF5BJASJclz31fFEendd6nFPy69b05ERhIF4t0vJD5d+Qqi0oA4BW1cUh9OY6yBAk3kOSAuSBHQsqExxW5d1ZgHsf0W3QTLrUBD4SIiDkZ5niYF0ADoh0wKPGKUfEjKNi20bmJYOfMUq09vgeg/pAZgB1jPw5b0fbULu3Gg8G1toWBcpIAld6ywo5l8dewp4qmKYoIIJhymD/uYQldn3bmzQuFqlAoaSMBkLIJPYqJnUfkXIiEceDXqq8/lEjKtadE6AMgFvojAGDK35kWEnDj2TDdQ9WnYIABBJEyC8QcJEWNxOUkFBKyU+aZTAyRb4rR3QRWCGCVm/Eip8ocg5aCaSb7qoHC38drABCAJZNwUFmChGMuX9nY8d8lRORU1WdL9bHcXLjC6nmZo8EzN8PXyYAS/TZuIPILYHgp35y5gahTRpdG2+bYg7TPpHi5H0Eadtczy/erU+pWsus3j+ngbEHS7xxA8Io/bdubTuLqfUbZYj0NJHb3xjixykpDH5KUd7weszNP+bwVQrcwTe2qVEoor7TYeaShmlwyDEJWFrA/EdMjrMe8ovYFOtVNU1MEDfrB+ZIBG93YDJCZiIRIeTIgHPkk6OCTQKDApFkw02yJHuz2KFMCCFQkixVoAtBND86e904uimg7BhAWONNeFigQzPSninBuuTLVS7muZQkSjqE8v7nrvLjDyKeiWVAAC8HjnNlTc/Q3STIQ8BkeEGYjkZ+Ak7mpi7MzEziwIDqzfSMEfDGpjtLO2HH5NskMfFHoot0m0SLzBkQTAuSiYS5zTRc6V9Kx6dz7BlxVRsCAlC1FmZw6sZrv72GK658tBe1y/nPzCm2TCjepZJpNn7GonVT5IQNwTeznYkt4x/kRlOgeuwN6PWNCEQohz0HWdMSZg7YO2zYNoFsbTsnAgUiAwVwWgT8jMn4m0myJnVBdUvaZ572TPVHKbh2TXF8uY1knm2OOAbmKAGEZ3XAwWYKE61g2d/cAwIcxUiw3YD8pux1nC3LAIBoI+9IA85m8+55LjtQXAcCp32y4VTrYd8CChHd0ok1kEhO/SPpSQc8BB/5YHJTsRwgscDNG1F4POAACRUzbIrEBc3BADmmqghGShZ4a9KUNPmwp73g96nNfiftnnNlIYv7Mk5QC4tm8FDEoyIKtFDRxSUNlOa3tjk0mLdXXBnsfOo6rPJSR1ZWI0mRzk8KstsBAt64dxhxIZf1jdGuBAnuuIyZsjmQTJSE8Zx0g7EMte3wLuAhX4XURUE2TkaWSy93A+ysS5u8qpWWmfDAHauNwunIsZQkSrmOhEK3WxL8FbBEdzhgUMpMFjuyO6QBC6wCv1HOzLC88tjsX3iUkgNYNAhqUjjUrObDABifvyMUHmf3UgVi0bj/sQa6v886Bn4vJ5JfooY6FbuHtLimFzfY1UsEUlTcpzFqD1oUfzrSJKi8eufQN9CR0rdM0yEJ2qP6OySr5nOes2HGE9Ru675ly4DmGq1Pjg3v7c2e+NgMaaF3TwDQ1oLX/DCfplDcDBLZSZ+X6JPJRPjnRGkJoz9oIjZCYiG/nr1/CFqTXKLoQEuSBaiTAC7ZRPz3wF7Kr/HPtuc+F+RquxnO7lAPLSw4kTEY7GK6dvNbdWCgXd/Y6MdxAHI9uWQLBfA6CIt93TnwAaHVImZrOJvgsy5sZTK9yE5DZSos+te682SEHCtSGkCFXAJccOEiX9TAXQAAInLnorZ+QW5+bdeWucx9tnvved22EtJffWPOCT2hk4OsmXNge4db1/uJNBxam+DvgrQ+oJYwUgC61Pw/A0vb7ZXf4/vTc5HZf4LCaravgWbTG+xqYepJnDzgD5lgEYhCyWRDnnKN/F9oGQhUBKAgZgGZ67uzcsut828l6kby7OXCwoN+5EGS7vHt++66sekBZmhsOJjcMSJiMx92FPcpxsjfqrL8egAOZF3iSlzjkMHZKLCQzJ3BnqzTWf54IUpZsZpKaDNKYaTagZpvkQIHT6kKA0ur2Vvbz1L3sAgUu+zm3y2ENgA4o8Md2bUU+BLRuXj/61u2n7xnRxma0pD+7jKj7Ix7EEqfJuewOUyQdRoorYSAo4rnS5h1U5ykZfo11ZnkGHMwFCAxQmFkN4xw4O6aF6DKo8CkVhFIhimg/CjIFzQ78R/b9eee9CBTwbfz97AKAffkTpMAhU547PebVy7i4jG44iNwwICFVqFbih9NmU2MhZkwBXC+SZoHrRioIb2MTRI22boaT1qjn7fa9sOlgvsAGSYpeGNkPFIwGlfeJwgRZWz6pitGuvS47QRXl9q0M9rs+N8BeruJJ90u2SRNIZQFFboZG18cpRWE0TFujUhWkkGi1gZJA3Vpm3Zjgg3KUoquVXjNA9pJlFG8ngQ9tt0ho5kxABQiJg9JNPTOB/mOks21uRkjyHHCAAKOjEEbPDujEtOA/FwCB6L3KPH/8WaD+athQ25T6z51fek32ea19f5K/rE9CjylhXt+C/8fV8alZysHkWIOE3b0xZBESyfBnjsdJU6GZOB47BguTUQglhJAYrh4hdbsP4WM+mRQWZnzLJUYh26hra+Fryehxn8uAFD39GQ1jY+CCDXPRAJRS1PaHZyms817SD07V70fmzZTSfuTa7aFHhRucfZOYP/vNJk9a1A+61qRgkuMb3aAQEkpJ9xwYX7mwOmqEAACq8pkQ03PhznOdqp88UiAtlewVenJt0myU/kAaUYEntg2t9/k0aF3PfdhXtj8+iaB7VBQQRgFt68FA9upfRiRDzskvdvDLAYdkn7TfHNABPsdI73uaApZ5ACEDDHrTnkcnmkmudhVkySQcTI41SKAaAgB82Vf7Pd6OIqZ58aHWUBphGbMKsA/8eDyOssRdDdDg5s3xsssBCGnoE31nFdc6Hvj7ml1wG6uMFSaFVBIbMPcEuzXlhUSY9fFBOTcTnyfzqFW2f6eHqdJBMkgalx+A9xMI0RDpyZjk2nPhIWHpAE8AgQGVqGndeI/9ShZQQsBAQBugyIT3HbY0xlb29O9LZnaadTLkAKEntXXHF8TfL21j7A9L0uNx0JP0id9LnsgLcOxYUaAzfGaYyY7vS3KNcrZ+4/wYIkCQyjyQkPTf9jl5JzL9Xlgwih+rr1Barj994P2qgYSlT8JB5FiDhNYYNA4l9IWDSRFqC2gBSOfwxcvYWvs3E4M4hnq/M9lDEJsOVnTOx4eMpQMan5kBWQ9r2zAb3NOZH9+Gf0//fK2AjBIzGqZvhpAqPg7I/E/rlEVmio7C5d+T2SMdP3tsz6ggO3gJjZgt6ZwYn7nFoXLR8RjYzA7IfNDnoV9CRvZ+n4EyUTjCuPh9AEAdvbhGFdkMe4cp40Y7HxgBJSTUolK/fc8oW+6TFCUFs0JpYbEYIEYKWaPPAdIeP8komL47tGkmHLYXUOeUZA5EJc9vtogTvWOJD0Df+ff6BvhjxWY803u+C5gD/pnzNZgHVuYxGFdxXF3KlcuxBgla98eKkyMXf48kYnBAkmuBfByuZpSOkgLCAI0xHUc07YCLopdYFW52Y0KWPiEhRJj9pDL3VNKZiVeObPCaM2jZcrDIDkLdjTPrpQSMi9eGDJkW5zEKySw2S6nKOQMYa0MY9xz0OW9m9vERG5m+GVnYe9FDzfs+JQNsFrhRG1TPIbkuQkoMTpzK9/WQpG6NS0pkUEoBI0VgFrjwvhHr5BUPhezFeSE6Sos8+GUBQ+F/bl2oBZAUgEoiFsC298v5en7cFFykwgBger86ytRoEDktyLeoR0H2hhcmwPFyWbQYhAf/oLCMbeeVfzGXMbDnOh8c8HF04bB5FQGCwiGYG17C7hPHGiT0CQ8T5I8iAYTLUfzaAGtHkO42JydWVzDaG8OIUPoWsOOsdPkD7My7sLMvVbhBSECgAlHjHSGlhniG20cv2nULEqeky/lMum9W3yORgqf9yd+B08H7HVhSoCBk91yjDjigoIEo2oMU3OUekw+a7tx6B1jehznHClUipQ1ZpRmi2Gea4wPIly/s+PfJPhfuiFJERZwABNCwn3ufm3EDSbVA1ZmNCrhnROfb6kTOZGb1WZ+IdP8ce5XOsjtAkitoajtj0Z4TWujNTqz9TtIkEj3nufHXre2cQ3RedE5pmudU9gEOFokRtsDb3H4fsshDMDccefTQdSw3BEjgN5Cqt9nvojerm0l+8+XXUpQAjCu3qGGz60kItHDMCaVfFgVUUQAqUInRAMIrujFvbcMHRyAe/HgmwH2Ag659P+zL/TnSQToHVLgZg2jjDlhgbeRozLmmk75zSpkK2oUfI9dG3yBKfUprTuxXaeZyu/FrZCgz3swukyqEEx6R1K2BEHCRFMEPiHqbLYwEBEVgFkSo9Ik3T8CyCdG9EtEz0PEzSL8jUbDzAELaRgoQ2Ow/223+jCjkAXXaHpLteiqYRhk+CTTOY0GQMF5JH7yjLEWL7PP9nzdORu8+/+Tvq5RXjaY9FMfFly5GON4gQQjhs83lgEF6X+c9kzTwXetnYbhiWYvdvbGd1EpEqZhbWJ8KCaABzdyk7bhgIZMFWBW7xMkxM9P2wpVvNKgGpzWyqc+r1scpTcGPl9LpmT5E9n8Apm1AM0oPGNJzmNPeIlMD/97rdZ86HfqZXuH6zNEohcCxEs6pEurp834TzBhVAkZjcPpl+9r+sMXAAlhIWy5cIwCFaKaePmfpbyFhfVHyYjNwoqvw/VdWfAnxdtkoE5KM70I2qRKSZzktfczWzXX84+dMn4siA/i2c8Bxeh65ZeRn0Ymecf3oS/uckz6A0AsO+vq2lGMhxxokKBH7GaTg4HKAKm3LwyivlpkhJ7yi39bunnWsZ0l0KApCZ2YRURZGUuhg7ElulgSw2RiLsWCUpREaQAGBpkOTMhK6k6PfNp4MOj1sRrQJzRJV4Qf1nI21I4tmQ+mA5c0NTczAMNraz2KFhCDnMtp3zkwxWmZ06D9rL1cQp6/PkeK7CnTtX57b9uycEHAJveh9Cy9Yp1ASZTvMpQiW1lzAM3B2HANzCpba75OUNWD9mLd9NtqBSRra6RN90WapU24KELjS5cBgHpCd93u/CjfZbpHPQdR/vt/+jhYfMwMUrlbypFQOJbphaW44nkJMQgoKOoi2s2P/4Ho9phOnCn8Xd2xGRp6NMa3vAFjnMiUFlLZOZpVi3tJNDRjhvfqz1GyOJoRjDoyBMQpCME98N9ALYe3jklPSzPlzrgd+blAkBUKrXZ8N1ZKYS82L3oE1UtIm9rrPlgWmvhNYAiJnut5zAPwsLcq1wOh338+U7mV97hQfAuLiV0cgz2/uotUhB4OE8PVClIyLiPnkXoC9hj3K1p6vM0Up65QYmZMWKUrfSIah4Lvtc5bda6JI2uiUCgcQVTDViCN+cgAhBw76wHHmlH3TCXvXa3KYB8L7nBF7mMF5OY+yrG3K5qTii1BdHePu0txwMDnWIEHxwQnonQl4oZcgNyDRJny760wqJWxhHxciyQFC634DcA6OJnIyK6SEUpUdWNrGfiYKMcqzwAECd7KCm1lJFzmc5D6gQawXbO13dp9bToMhKdiEVehN+ELL5j0b/FiGFDJRtWEgNUIEhU4lm+fNFlMRrggW9Yl/ZsQnI2pnQFuHe0RhckckSgjY/D9x2XHK+kmDZsog+KqJJslcSNt0TpDCTzMVP5MZetangNpgYaX+OQUsBeJ9IxLfhdxn0sZ+7lF6Pnb/rnNjbxZC5Fk4vpxPhPqBAuL3lvdpHkhJvqd9yany/ejMRXU5liaI4yHHGiRkq8mZbkgVSRT6M0+u04dXCUA7AAAIKGkA7QCBEdCZgi8GduxoDRtgXLght+fyEtDccx6mtVS7KqxJgVgDYxwFn1xLmkEsusZ9LAbm0JJ9g82VStqGYCGYIokaYGCAA4SOLErF69qKPuftQ8q2rSGa2t+X6o7XLj7OFcqF7RGKpEvchCXgZlZG2weM37/0GeIVRNk5db6n12IRmEwyikbA34PIBChcjvTMsH03EhCwH4ZgkWiDXhaBK27/HjMQm80yya5pJ3yZXSN+/PR4OZnL3KbvVK40/TzQeASyNDccTI41SOijuLz0JRai7dOX2CtGfV0UfEplZWUFKwBGzqlRGEDAQBgBCxVE9KJrY9cZAxgYtEagcEqOh6qFypF5z24BaZ0HVZK7wAgfr94ZVOawNZ3tqA86Pm52W9/n7jDmy0wvOn7yrHgK3Jk0TGpfB8IAq4oAEETGZs4qVmajTaitzD7CxCYMTttTbQHvyHeEwktPU0+7ZgVW+IifB92bdH3uWuQAgpBZBqbXoVRr77dLIafprNjf23aOUsrNqvn9nQcIuQK+XHbHbU8AgWeQBRDldemwc5wZpf7w0MKUPaBnFnmWYBH7z48fAYQMK5OW4g7mh3AQYfKFsQ5bpBAHDmFchkDeCMJeUO9k1ve+LlBA17so00DJwoICKSJzQ1o90voHGAhhEzUZyQCCCIOI4LPoRDr2/VQosQ1X9MBimjUdTKKBJDOAzKOIiWIWEtlsfekglfTHO6bR4lQp5RSY4fUm2qBUTOZ855k6hLQKTMiwG/kgUESK2y5XhvuwpeyxFbkAGvSmAAfy55hjGfjqJNtlmlCqQ1vnKPVshxNGQRUwBIbpfqdtpMCgL1NoyjTkgMWivjGFHXyM4jssGVjQJtjGoxBuwbJ85tIls2O1JhwHYG4yCUDgeWbSiLGOkyp9R3KvUlCfvgPHYKxdyjEHCQZs9sClZ/DofXWvcwYhleHaSUx2tyCEhBISUkooY8GCMfCREDwbJbEJ2tiIB+PyKwhUgG5gCjc4pzOAZJbXy77kaEWm+KL7xAcUBhA6aXP5MdJjppIM+AKyCxRSmfPc8HwSURZLUi65PsgieL4nYCI7UJISk+zYaXQF31bF1++ohGbl6cyy47y1D2c54/JFWOXR7XcfAO1knjyg8IRdvkBXnxmBwEGO1ciZIVJgwNflZvO0D/IAwdaYsZtpBNzal/OFn2NfP+kY6XF6M9Y6XxSNkAoqCxDYPc8Wb0qfe/5+66PN7+H7pQTEAT3Sc5l6XypyrEECL/AEBOosHdxyHrhR0pdjBBBIfB5/KQFZQMnCsQYOLMCyBy27GBrMN0HaREx28BKWSaCXWPbkdwf6adV5M2XCYLz/NKB4m3WiQFNb9oLZY8cDfZ6C2Y/yITCAGDBEx0CizHLH5QwGq4LojyEkhGEMAQchbFvK51/d+srFfb8M2d0bRzbmjuNchvaON0iuiwiOiD5qQTtHWedcl2VDevwyOvc/PV4Pw5NvTAaTUlLJMwIruUgT3gYp3r5ESMn5ZLMqIszqAcsEAmFGz4HCZUnanwSMpAChz8KgjfFAIWoe6DybvUxPDiCYMLZcrZBIqQTkAUHC0txwTIVeNCCmz3LgOMR6xzc7AgvHSVw4nEEJuBoBQkgPiFoIn8pZG1fjwsCnfFZCQqmhLbJnbCliPrM3XLklM8R955RP1neoyNzsmpanjpRM9jPbyxaoMbpLBfOER9FB7ByqbxbElZeAU+Ky6FZy1Aj75WZ6fB8gMjEQpW+EBIphvp8HlJk2UI5O5nZqbeKIGQWmtfrYJFrHFCE9lyAwRQC0B8il9zJyykuPnUtt3GlQx/1xQEGwZzwyJ8ngc5I+K5HPi0thvC+2I6H855kXcrVm+BjVmdyk39m5hhBpdyk6ReO6heT4uih7rTt2lj0ALhsgLOX4yOHxeddA6CWjF40j5PQv0PAOsaeNHSK1eTVkcNPtqG6+IwyCYn4OfwJUM20rZ9atRq0Npq1BA4lWVtDFEKZatX/lECgqGFXCSBX9QRbOgU+GPyEjh77OjEY7ANI2XhF6e3vfLNF33jEOvtyw6W6bztZy/SEFwP9U8pe0YYSIc+kbpuRy7AL90bm6374dWQCq8v0xUoVrCYRQx9nE5bTQPuT0sHMjUN6NHFA2DCDkxDCQ1rnXyXNhVGkzRKoKKIYw9Keqzl/n/nAQwNtVBYy0f8jtl73Xlf8zvB/VKky5ClMM7TrqryyAorJ/sgjLOEDI/fVeNKZc6VNYL6D0T7kcMBTmHTmNcome1eB3QH98jCSx7YMlXAt/pfuz60MfOs+IYf5EadGxOUzOYZqQ9i1KQhzwL1f5dj/y8MMP47WvfS2GwyHuvvtu/Mmf/Envtn/n7/wd6zuW/H33d3+33+YHf/AHO+vf/va3X1Hf9ivHmkkgoUdy3qBG66lctIbweejpBZjsjTBcXTvq7h6uZJRVROuLeIFxg4c119tZpNYmmbW4nApGA4KlzCVAkplB+Z/zJgp97EFOiBrOeaQbS2dD6/lJbIBeGhvozlr9eaSRCDkwkIbg0ebGgLJT5hgQ3zcKUXDUdkjo1EA0tc9hYbSGKayyHJy6pfdcLke2dvc8A5e7OqnJwWZaZHVQyAQiqFgP2zjNm+G/zjEJRDPT1N7BmK2UkaGU2GmqYb5vLv8Cfc5hKLp5EjLP4eUoPPfu0Iycxh1t7LU1zrnYd0Eg9gFgbXSadp8pEwR02QMgsKlSZEtP2W1YP/hv34/c7/Q+XiesgZAC4oDZkMQV8M2f+cxncP/99+ORRx7B3XffjYceegj33XcfnnzySdx2222d7f/Df/gPqOva/97Y2MCb3/xm/IN/8A+i7d7+9rfjN3/zN/3vwWBw2X27HLkhQAKNU1SEZr/2o8hD+JiKMAZoG1eaWCYAQXSQk6cXjVVaWph4QCLdJYQzSVRxSlqTqWgIpnBFxt6YmhfIYSkd9LkdlbYXNvzSJ+ZJjjtXcp7eOeGzonkDW3Y7pnDa8IJL1g7lVRBUjpfSTEtpZ6/O3CPbGYQO52qkginc7Hc/+Rf2Ka0BVgrhTArxO8Dt1gB8VkUl0M+g8Ix/SQnnaLadJl/IKBfDfmcd4UhSswMwf5aaofxTWeQY2Adssr/TdfQOZYCCPWA4YhYcpG3RT/fZF76YmleJFQCAjr9Htu/dc8mZDW17xm+zsN2rKFIJyAOCBHkFmuJjH/sY3ve+9+G9730vAOCRRx7Bo48+ik984hP4wAc+0Nn+5ptvjn5/+tOfxurqagckDAYD3HHHHZfdnyuVYw0SpAgx3cLRavxezp3UsqI0PEHJZG8EmOPjxFjdfAemm2c7g4c/L/fDXhoDqeJcCgEgxGl2vQ0ykd7ZYGoT5cAivRNswOwsT7YxfPsEJMy1ResGFIpoJ+7smJwdiGhT5kQZnbQOPgImpG62KXmtoYscuiIw4/oqhIQpB855z6UjBpxJovZ5KqgQlz9H91fdcmf3/A4gN5+0ab5He2MAcVnyRhsPJKWjM5UAFPj1Se5nGuXAckXMV852G+m29evI7J+bxfZJTiGJ2Aeg1QYtmR+TvvS9B5FuEXL+jDon6XvhFs9LnETre+ueZKRzrTr2I6a4dQbs9oGwlJHL+B4ACXOQa/M6AQwHle3t7ej3YDDIzuTrusbnPvc5PPDAA36ZlBL33nsvnnjiiX0d6+Mf/zje9a53YW0tZrcff/xx3Hbbbbjpppvw7d/+7fj5n/953HLL4bCMOTnWIEEgocVEogCTWGASjUCz8thjLpPdLQxPnMoed7p51n8f3HR7vO7S+UOlhvcj1IfJ2A76iYXBMi3OxmgMlftlNkrnpOQVQkIrRzO7jHRCr7gyN05heJvQPsoG8wHKaGsP5v3h29D3XJt+xoMYKKTnxAbL7GzIhGRGvtAUS9rEzSeCRzCQcpQF0MZ+Db7rkY9GCAkrXvnG+dfokCR1+G0TgCRp5ukWR7P7aMN+lsbuGM9+uRLkXv5c2mRZSp3HgNa27x9dwDMlxIw02qDViGqdUBQBB8aFM0EbYxfw4aFTh2QRu5Uo28tJdXw5yY0IpCLzOZfV61Xe9J62IfdHKovYk2Sba+W0KKQtynagNtyz96pXvSpa/uCDD+JDH/pQZ/sLFy6gbVvcfnusH26//XZ88YtfXHi8P/mTP8HnP/95fPzjH4+Wv/3tb8c73/lO3HXXXXj66afx0z/90/iu7/ouPPHEE1DqaOq5HGuQ4AcwJ6TwPVp3r10fWCA7IB+0eDrhyWgn8rLnioRKr9YXz4T2FiUdOmIZrqxg7IAC4NgEdw3K5Drx8TYCBzSrpXXpjIMPjjQwkwL2O8XXwNAi7cYas7iSY4hOUPCW05zNuWc5r2gZqldmBqmIUUhmQ+l3ivxgXv5RJAKZCgCYtoVQyj0n7lpRsSch/azcqBKimUK0NUQzA3AVAQJIQccFwwC4Gg184zme6Qnd31tgKblP3G7uMWSiGAnMpv0DACWNTREthY/QoC4TOKBEY62OP9PIAsWYA22AEqEtbsMn84wfK3I+K0CWOeDn4S8d63M6li3ACDHbwExBnpnKAYF5s/20/3Q/HViIzYvMPMJvSuZY1zqi4TDNDc8++yzW19f98qPyB/j4xz+Ob/iGb8Db3va2aPm73vUu//0bvuEb8KY3vQmve93r8Pjjj+M7vuM7jqQvxxokcCYBiLORRYCBspUhnjkBwYmR9qNZiQ/bkgUg4hePAILdacFM4iqLfSEDLZqtgCb8PzewtBCzOgz2vAyy38a98Jxqp4FfAxA6DJg5wECMQpvMxH2/44HGJ0MiT3LqU87ckc5WNIV0mgBI2lk2E15ngEsHNG6OSPfRs5g94OdVFB1ziGhmkVkDRkNMd21EAx1XXv1X0ifhYmYGwM24NQApUMgihMXmfA7YCsPvy4L3YlHcflrp1EYpuRh+7Z5vA5QSUcieBpkX7KeBjeyhkEB+XABo/dtvU5mjNZCCkp0bf014KLW376dmCHTBAV+WMgC56JJc6fqOewC6QMGnWOfFtXLPNRYob5PxPWLPs69GmiaRcvv2tRltdwxlfX09Agl9cuutt0IphbNnz0bLz549u9CfYDQa4dOf/jR+9md/duFxvuZrvga33nornnrqqSVIuFzxnsOwL1MuUQzgTA8eOGTAgtvOIEMxAvmZxCE6mV2uUHQG2ZtTj/R0wCAFKNoZjCFKPJkBsPPOppx1g1PkP8CvlRtw5g1KEfASLEJBhth1PgvrDM50HN/dAkY3sEl9jDufzAOwaEbVOU+DqC5Balaga8CUZydnQ1MHANPW/tjqrrcu7sMRi4+PF7EDsL2uRaw8SPrMPXx9Rrj5oC9ev080DBSEc7SET/6jCfu6mbjHcwZuvXGOzjaPCD1A3nQh7D4tAAMDaHs9WmMsKHDHCDlI7LWiMSMVPu5wMyBnDyJTAV8vZMSM0nmk7V/pHHnRO+mdlOkZT8c9344DUMb0s320T8rQXQUR6upHN1RVhbe+9a147LHH8I53vAMAoLXGY489hve///1z9/3d3/1dTKdT/MN/+A8XHue5557DxsYGXv7yl19W/y5HjjVIMOi+dPw7R+PRi0b70wAiwgI/gAhC6YxVAHymwyhjm+gmXpnsbvnvfb4NRylrqyseKERUJIvdJyHFJwAbuscbSh3Q+ujKtvEVJqMMhP4Y/ZQsSZRWRsioJHNrEIEEH8LHbdIONBijAakhtOywCh1JZ1l9yo5YEFLs1CYAUwxcnH8ZZ+vjrAB3fJzNQhuOmZGv/+b8cY9QugpHRPZ5wd4fAZuAq+Nz4iRqKjOz7ruufDafE2sys8rZuCdEZipc2Wejn6P37bjqXUJYtc7NK9Rrft7ahOgfI2zlVeWAgjBxlAIfi1KGM9etCBxkGCy7kfSAwbbV4xuQ7G9BN2LQnnn+O0Ahubfz0mOnY54HC0jf62TCcZXNDxYkHNAn4Qpqptx///14z3veg2/6pm/C2972Njz00EMYjUY+2uHd7343XvGKV+AjH/lItN/HP/5xvOMd7+g4I+7u7uLDH/4wvvd7vxd33HEHnn76afzkT/4kXv/61+O+++678pNbIMcaJJDoRGGQkC04QvNCOBNDED5IaatlnEOTiFkFWXhkLYyG4Z6+9CLtNwvbVZAoOsEBBE9t515UBxTQzsLsWAubVInPFlMKng90eh8vFFeefQraJcuBkBFASDPUpb4V7gCQQkIVBYRT6qZtACP6/Q4WCTM3GFWFNNZCQq+cgilXoQdrHcZDCdjrOpvY69rWEGIWimqpBbPwIxTrk+P66C4d9+5PJ1/BkVDGzxbyLJ1k55U67HUKGaXmJ/aTswGcxu/S9t0MgqT8Sdl75oCARs8Ekcwa9julJ3YAwZje0sERs8U+OSDozVK4SNLnJMdYUhVIY0NohTH2fesz/TC2QGS26fga9bFGTPkbIQJQoL6kkTFaL/bMPObyfd/3fTh//jw++MEP4syZM3jLW96Cz372s96Z8ZlnnvGRPSRPPvkk/ut//a/4gz/4g057Sin8n//zf/CpT30Kly5dwp133onv/M7vxM/93M8daa6EGwIkkPDESABAIVAknllwVCOnJknI25nMEJxV4DMpO1ud05mrjJZzkh2w6I/SMNMmfbZwkXjMegrdmiWEAwZR+2y7jih3nJyHPO2XZrNLZ7wIh2w9+EN0I8kzXbkZvaCohJ5Klwvt51R3wM1Iiju/wa9qdjZhiiGmrYF2HZKwDnWRmSpqjzEO1+hZsem5EWlb6neyuMvGJUo63Ta3D1+fsnk+2zHoNzd12I24Yu67YgQIeL80W57zC0j7ljIbOfOL7XsYH4DkfWPvXG+l0/3S7n3PJWcwM9tE5rV5z7Z79j0T4pV9wiLOE35e6XIOjjLX4qjlWuVJAID3v//9veaFxx9/vLPsDW94QzYBFgCsrKzg93//96+oHweRYw8ScmA0d4k5m0Be/+TIqNn9N6YfKASKu/+lyVGI88Ipj1KGKys27wOQpxSbkPxHFMjbmnOS0sxKBrCQG/ic4o+EJ+BJtiMTAw1c/H5SdjoOFIhm5oM40cGeJmdhmDSH7YAFfr4MNBCbZIoC1W2v7VyOqRyibQ3qNqamhWERI65Nug/Fq76+0861ECFE5NDLAQIfFvnz32kj+d3n/wMENigc375vdOV93hP327iNckM0ZxW8QzJjDoTbJgUKfULbhmyU4aiCfQYnxkyvTAoGdBxpwEJkD+L17yNJOJjuM0VwyflV9bTtt+dggTc1z8dnwTF69z8CEeIQqkDqg+1/nOVYg4S+ao9cUtsgYAcDRYOBsCNa3+NMQCEdIHPbS9gHspTBjyGdsV91SQo3AYhmr96uboIvAvcpEMZlPMzN8DmzIqS9AKkpIqmG15etzfeLPkWcfGceM8k9z4GgcOh58AWSkgEcvJ+RIjfhGnnqtuitnUB1DnzOeylQkJmhsQmSyMxj+9JTVOoqy0Cxwk6ZlMApu5KCBr+MO3ICsA53wZcEQDSD906DrGECJx5UMYDGJQLhbpsWFpTxiqcRYHROh0YwM0NGyAEyyvSN4KuhBPPbYOej+PXiAIAKlJGJz6Xc9s/YvNn0vDGD+ykQm6BYnQt2bTrf5ynx3D7zgMciJqSHQfDnfpVKRS/lYHKsQUIqOTtlJPTQeicgZ0ZgU58cYOSvQLCPxuFaNHtUMNBGdGZi11J8jn0UzokoKOFgRwwKvBOpQAOrRqiVQAN5ZgZDjoNg7QsaOFkOBj9g8P1YmmOqasnz3AMxm8C907PnDp56WHobIM0yo1mfy4nhU0Izu6ovxMSEHEOVFN6JrYAG9CxWBnQthUT1sldn+3kthEAzB8+9NnUhY1aEgavObJkiU1QBpSq7HaezbYMx0KBr3jYxwKQiXbl++ffY5Z+QoUw6iUZsppiXhwEZEMEzMXJwwEMgO0ApZQ7IaZWBxSjpFhhYb1vXMard0SIVIRUgFeBqeggRIoCEYJFBdA17GMG5/gZ94IBffw4CsgfIPSvMzHKVRCoJeUDHRZlLJvUSkRsKJHDpAwj8O1dCORo72j0BB2lClGhbWPvR9SBk5pjsbgWg4BS/QBUpyigjYdaO3h1UOOULIB83nrIHmENVag0BW4siAAXZCxREomzSVlMGIlKEtseQUvp9BZrYbMLO18jCJ6taWVnB2mq4x+Px2AKh2QTQTScT5/Uk1FcvIgAnAPnZZsZkAiDMDllcvqXATTDTuGfFg3Nqz4R2KepGMKVotAhmsIjdivtkAULhPf/jRD/JyRMgorBYIaJ3m5uuCRgAXV8JyQECBzicOSAwQCGvuvFRLWgaQLcWBGgNQ9/pmrKkXHZZa4EBAKEsSBBFCRQlhFSQZcXKlBfBOVgnoMGfnPSOiqnvQTaHCWfhUoaA+pyT9H3PgIyjlkMJgZzDQN3ocsOAhCyDMI9aY9v6sCKKZnA2Ttqjjz2Ijx+8wq9LodTGIpgfYrMCm9HzQSU1M1DlPXQd0wTIwRNZ2tVvlxsw4BgeY6xyQB1mNqqAco5+ERBgtuqc4xxPyUtDJHc0IyGmQbmEQSJBikYIGGUH3j6zh6z3wMMir3fhs0jOwHXAHVfKLUsC1Rclwhgme+/bwCxwRcXDUlOaHkHxgzMTHMQyMOtDlMnMwY/Tk7PEQPjnN2R+DOs5GEhFuOWkJD1LQNk327r722hgVsM0NdDMYJoZzHTsAQJctk0LGhxwSFgF2yHlQIKEqIYQhS3+JQYrATQUlWUZiIXhbAwBKbpPKgEIBDT4ffXmgiZkoOXPgb+omTH2KrMGOVmChIPJDQESegc4/pkuT0QxGzi3m7Y9AIHbJxXPvobrM7KnpWI6qrLXQMYzAoNkxsa+pwNp1/lM5GdWBBBytsfMDNUXEKLj06cu7UDn6OuUveDf01TdxjnmUbtzlWEbD/ak8HwSIaOx1ldKnA2Y1c1Xr0Lb5cru3jhKh8yjQySAgiJC+Gwxwxb0zii5MIYiKh+9aAZKs1zjclzMs7MLY9Num8BUpG2BH5/1iQCFt6aZfmDgDwsExosKchF74JSoaFyoq7bmBT0dW2BQT2CmE5imdiBhYtmEZhaxCtCtBwiGlUoXSnZZhKKCKEuIemJBQ1kB1dB9p/TxOkQvpeabVJKJlWdA+LkSaEnue87s0FuBlU04lnJ9y7G/SwcCCISmE/NDx0mRK0R/3OCkpgScjdGuyyY8ucbCHcgUm33RMgAd8wGPTyfpC8/x64FsTvveKnGpouH2aLI1U7tAqH3ATEWRrZt98iQ2OVBA3zmYiWoxAHFo6JxZ0eD0y+Zel+tFeK4JSlfsU5hTpAN6zHV8xs8LXvEZZc9scuE8jFPd1ISET15mf4dngoNIb94QmWNrto2MQ6I9CHTteLfUeY84B8DMjBAp0ra2tTh0Cz0eOXAwtsBgMnJAwYGGWQ3TarQz97y1Nv8KBwcAfDIgWRbWxl4WDhAE00QkZJooK8uO6WDCmwcShDEhUVzClHTMSouYWscm7Tda4qhk6ZNwMDnWICGL+A9o64pmp73HtcCgcADBDgostEm3qCc719WMkqr7GQNoXwwndThENMsEEGXE445b9nfsDxAltXLmjcjGn6YzBjyF7Qdb3ziZNxRMYYGHASAaWOqfZvg5IWqUKafOTJZXw+PAgDmdmaKMZrzDk8ejfPg84QCB6hoAcC+TrdUgpLDJw3Igjzvh8esWga7ghGcozJYzBuQ8WlS2CJZUoLwRQhah4GCbsBfsmYi8+cm8AeQVT45tSENw56RSj54drixTp0RuXpiOoesJTD2BHm1bUDAeQTuQ0ExqtJMaetZAtxq6bkDJ2byZAbDMAQAhJYSSUFVhAYKSKIYNlG5hirKb+YNH4lQDdxlY4iQEBsUeNGHyOBDgtSB4tFbfZMx2OP7OQaBzADZl1XvND1UOwdyApbnh+ErW/6B34wxd6b5zGzvNtigkih8rZRDsoNCEF8yFz2VrHFxDabSNG4cMZhTuP8GdAlN/DJutzjl4ESgQIYKjk39ea+vdTzNzR71G3t0mVtKkULzndll5SlLoFqbUVpEUjlFQBaAbqHSmTzPbpo6KUfG7EdnCadDj2SgBppCK4JNwA4gQAsYxCNLdPOlMZsL51EQTaTIX6QaimfjrJJqJvS8zR50zMGBmdaDRm1mwswOWKpfKUeUlRFlBVEPItXULCIG4RgpXVk7Jeb8UVcRRKsReZWar9p7biqDCGBs3yYUY9PS9FbETbqQsc2BJN8B0bFmCemKZhMke9HQMM9nDbGcX7aT2IKGtZzDagYTEtECfQtlSx6oKobOilWhdCWQJwNQT+964P1FW/p4IioTgYItd31ymxU4VyZQ94ONtZlzl4M2koI5yoBTXoV12KR051iPf5Tgo2h0S5x22KgUIuVa4c2KkFKUEDKPvrzOAAARfC8skxIwAsQGdjJR+vV1BTDCFlEUAgViCjI0268RFs/VZDU22WDCqOx3M2sbRl8bSp8aFaXL2gZsOIlMGom1SxZM6VtLARmmhr+dIhcuRXReyKUS4JrnYfwFue2egYDZ1Tnk1UE/tTNk54Hm7On3OZsFJj91fSOlBghys2M/hGoxurR19uOrBAICuUqKZsJF25i3DOGBoPUmq+Iy2552OF5wp6OzbIlWqnIKPWagaaJrIlIBmFn3qWWP/6gZtPfPfjdbQDCRIbQFA6I60QFpK6FZDSXv+xjk4+n7zUuVpPg5+LemctHbjVwD4AAND/Ly5ZBibNIeKd4BmKdZB+UaEhClnnTaOQqQQkAdMppRm23wpybEGCQDmU14kDBxETnhsk3QWzU3v9IAQQKAEKtQAt5Obef24BnJ2a4RWWybAKvh4fc6nQwmgTVJX++3Zfh0Pb5q9cw9v7vjkZ+7GDnikVHzjMjAJfMBxIozLYQBl2Qok9z6lx2lZCgqAri0diGY+phzCFIMbAiCc3x5htQjXUSIAWQIHnBnyDFlTxwBhtmc99OuJta3773uBUWB29sbR6QD8LJlmxrIsUAwriMEQZrgG2dSWUWhmEMM1SKW6oXtk4wbcPRVw9hH77CBRVO4850VR5CSbedQDzgAs8wChDtfAsQl0XQI4cKCgByCkkitOZBKHT6Nbx5yVeR+FbgOAUO5cwrJOkid+3n3XyptwihgYcDaOvdOmsCyhLq5OMiV65g7Uhl76JBxPSRFujgJjDzF33iNb7CKVTmliJfI57f1g5OjoTsKg60AUo5TJj6Iv6VQuwkMjKJI0gYwPidLMlMBDwNJ7JJwtlByrBitOMRe2miINMBSFkZommokdkKm5dPAiZiCRnE01oj7TWY+6SvbSqyT8mQcQhet2wMHMmYpmY8jpyNnY96D3toFmxpzxgs19Npp4Gn02GkPPGmtzT5SfdNS5rCxIUMMByrUhqpPnIVdXIYdrECtrllWohhCDEOYnBytW6QtpGSSanQrpy5x77O79FGTXtJRjj5gIAJ1Q1mRWLbzJoQ2AgFiUKYGD2oGnOm5fSYhWet8C02rIPlODMzNwnwT6Lkv6rlyUQxXMDcqZddz6SJnTcTyQZtEK7BzDdgkgScdWZlLIAgPGJBhhC7ZpA9R9eb6Xcl3J8QYJQITyOxQYPcBAJyYamA8QOLtECjJrf0+OawCI68h+XcrAgvh0wVyRsmsExNfHJkYK2QS5IuGmBfIr4Cmg00E2N0hFM5CihClXo3LL3teDTBfRAJ3OdJLZIj9MMoukYwuykQLWkYs9EdeT0+lBxZuHElAQZQV1vjViNg3e+dMR5Gzs7OqjGBxM7Pd2MkW9vYfZaIJmMkUzmmC2N3H29hZtbRWQUBLSOZCRd74aViiGFdrJFM2kRjHcQ7m2C7W66kGCdIABRQlTT0JegKKybINXRJXPBZJjFDqAgLNIOaDJPnOOegQOtMtxYOqJdTik701tTS5sVk/gSCgJ7Wa3FM0AoNdhUVaFB1ceHChpmZiysr4dPiTSfSewIFW+CFQOIPWwsllmhSZHPoFT4m/gnFEhQzZIA3hnWY3+WiCHLYdS4GlZu+GYCwcK9Nt95hwSgRggUL6WnBBA8E0DHWXTOWbOhneNpFLMVEKzRBJ6gQFL/2XCCTu56dsm9vBO7bRadxmE1E7MqGA/0yiGttSyqtC4ThSq8qyBAOx3MmFQf+iTGAz3e16aW57W1gMFWYAqXl5PqZMPQxQHeGBmIm9y0cEhcbYHUY9tee29bbR7OzDTCfTeNvR45JXgbDT2zEG9PUIzsh779c4e6t0ZmkmDtm5hmCZQlXIgQaAYFihWajRlgWZSo5zUkGWBcm0F5drEAwhTr3tGQTZrMM7Z0cgYMAjn2ArlQv7SWgYkKcDMmqq6oDJ6nnQbwAF9OnOL9cuwAAHcFwPwil232oEkG+pIQCFncuDggACCD4F0LIsoSve99IyCBQoyNt2l0nfutLoP1PP2HECIzIOeTQjRJwQQqAKoYWPxUcuhJFNagoRjKvQwckccUkBApPS0CWGAnWb2cf99kp6c8mczcd6H60EK04RIAzcjBxBmX0rDqAIiQ697+plMCn4mn4TDkZLJ5UIg8WYZ5kxFIEVVdrmf1cNXfxSUYpYcFkUDaJb2lg/m03EYtAHmRMeBkQqDaFFCDAFBCZqKqr9k9jGWgXRXk4evJf4j5JRodi7akL29HejRNvTeDvTEsgP19h70rPGMQVs37nftgcFsZL+3sxZtHZ4BqQRUpSBLCVUpFMMC5cTNiEc1ypW9iF2wnwOUa9th2YmT3uGR2AUfITFcgyhKm57YPUvG1TLw5iQgfnZ7wAABgSi5EbueafpkzgC4k4VQzndCOlBaKUC3UAOEfAxsfwqF5MwCiWcQWH4EUZSROYa++wRL1TAKLwWY0uc+GX0TGhGiOjqsDAf5+5HraDxcyuXLjTEiJnQ/kGcPAHTq1wPoxBjP5QB6zBupM6Q2wGw0hjEGp06s7vNEDl+Gaza2f3b2r3zyF5rBW9ut8EpZsfLMkc8BhVKloYPk4b2oaAsfXDig0xpCuKx6gA9ZFInDmmhdBjvnSBeBE6NDHvymx1uae9YrFQbYsoIurIOiKYcw5coN4aiYytBliZxubQQGiDICzqYQ022YvV20o23oS+fR7mxC7+1hemkXs9EEs72xNSM4tqCtNdq6hZ610K1BM24cKGgdg6BhWuNnxlJJtK2xMzopYFoD0xq0tYYli+x9l3Xrvf/lpEZTWhNGMRxAVgXKvQmKYWVn0+S3UFYQw1WIWW1t8nRvk3oGUTZGZiLjTJOtn6AjgEDhnHYbHWbnsCaByERAIZxS21Bdik4oKkC7CASev8AdR7iQYdXMrEPvHKBAIaOiKIPvRlHatMz03QElI2TezJcCBA6QMiGgc38bDe+05MYFC+qFvV4qeefdppr5xBy1LB0XDyaHDhI+9KEP4cMf/nC07A1veAO++MUvAgAmkwn+6T/9p/j0pz+N6XSK++67D//6X/9r3H77FQzO9IAz9gDoNy/kiITcrZdY7NDYMTMgPibtLwFsj8ZYX7u2BZ9EPQ6zJ5qd6wZoAyggh68otFTrGASkYY1XYlrJOhuGREtR4RkhQ+REO/MFcnyERFPbwTxxDvNN0yAu2QA7XAPKClpVMOUKTDm0rEpxYzkrdoQDPGIPZnvQly5Ab21YB8QLZ1Fv71mzwfYI9c4e2kmNerdGPZp5IEAgwLQGRhsLGpzyj0L5vBOe8J8p/Uv7GyWg3f6qLKCrAkZrtJMasiqg6wYz5/BYzhqooUtF3LYQzcyDPzQza5aoJ2FmreIy36ZtoXmip1xaZMr/kEu65MI4+8QDBr9JGfYjcQDBtK1tt6gscHEMWEekioAA+R94gFANgaJwZc2DU2ek2NMU6Yk5pedksiYLb4KBBQpRYTbjxhWjIxMmd4i+WlGF1rp4UJ+EQ+rMMZQjYRK+/uu/Hv/5P//ncJAiHOYnfuIn8Oijj+J3f/d3cerUKbz//e/HO9/5Tvy3//bfLv9AHBG7B3hevoM+9oAqCuZEA87DnzkuJi9LREWyfPi28atne5snYrYXcq7LxkYP8HwP0mWQ4yYcwDshCQ07+5xN4tl87wEZeGIDjOADrpAwbeNnXKJJWAIHFKzNPA4xg9ZRfL4fyF1VPPoOwNqzT5y2medUBT08ac9fFsH88VKgRIWE0K0FCPUYYrwNvb2B2XNPo964gOmlXYzPbaLesY6Ik80JpttTzMZNhyEwzMPVOJbAHsI6JgoloCoJVZIfgkQxLNxyZf0TZHAos+DAAK1lJKRqnDf/BKosIKsC9faeZRKqAuXqCsq1IdSwQrU+YeaHytvnvVJVlGQoucf0zBAoYBUZyd+A6it4YQWWPKhMszf6y82OS+wDAwmUZEoA0XPcyVRJFSC9/wUzKays+e+ohvadUawiJO+f0eB1SATN+oE4dNubaBIfg1Q8wACAFtDSalM3vgggRJ24Kp3BsVR4p+qjFiEtg3XQNl6qciQgoSgK3HFH1zt8a2sLH//4x/Hbv/3b+PZv/3YAwG/+5m/ijW98I/74j/8Y3/zN33z5ByNlgn4TA0kKDOZJum1UNCjZNhc5AQA3n7x2ZoaOTMfQurXOXkI4P4PCKmmH9gUACIMoTTnPmtjUELNxnIaXpM9xlNtBk/18mKOG9U8oSkArQBd+tiubaex8qHWggd0yn9mPBnqpooG0/Ob/Z7gMm2dhBic6DAg58k23NjA4dctBrvR1K6ZaBeo9iEkDMdpEu3kOzfnnMfrKMxhvbGGysY3J5h6m21M04wbT7alnD/RMR+YDILADspQeGHDfg3KlQDEsoSoJWSqUK4UHDDxRkNHaMxFketCtgYRGWwOmnQGTGVSlLKtQWlZBz2Yo1lZgtEa5OoOsSs8geJAwGEIwBdu5Jjp+tjpVGHkJZ7+dVfRmNnPXIWYpCBQIBhBEChaUAripgh0/YhNS8OEjGJyDIrVbdNNU+77Qp3azfqHsTJ9qOfT4D/HvHZ8O7hNE11JIS8sLC0oMAyUWkFsfEcqSetBMyUu5OnIkIOFLX/oS7rzzTgyHQ9xzzz34yEc+gle/+tX43Oc+h9lshnvvvddv+3Vf93V49atfjSeeeKIXJEynU0ynU/97e3s73iCd2TMTQ4p/0xKwpNhTNiHdLs2PkAKCeZkarwfxg45UEEb5CAxBTlm8KqRGnISGqtu1s5B6lkkn3BG6c0/CSubPQd954R7lZhuzSRiMuGOYlN50YGY1RFFh8Hf+4f6vw3AdtYatTcASQg36qjte53J2a4RK2VnZidW8Sevs1ggrhUQl4R1Y9d4O9PZFtJcuYryxhfG5S5hs7mG8OcHMAYN6NItMC0AACCkwCKGNFhwQKCjXKqhKel8CcsAjIWc9IeuoyBEXTdRcHSojCuXCCSd1iACYNVDlDHJozU/C+QKgtFEQpplFM3mgCxJs+/OTEUX7AB40uAsU+x7wNNTEaAAOtDgWizEawh1f8HTWibnDloKOwxy9zGPEHFAAEEoRGAHmSjlXek0SnlEI1SbJKdYAoe4K4NOpgxjCqyBSHkKBp/YlwDT2yKGDhLvvvhuf/OQn8YY3vAEvvvgiPvzhD+Nv/+2/jc9//vM4c+YMqqrC6dOno31uv/12nDlzprfNj3zkIx0/BwD2hXDxt7nZfI45SFkjXrOAbxvlBQDcixBHMfDj8UyN2hjcun59KJ3Z//7/BIcrIPI3EG0dpUE2fJ0vDxxnUuT+ADRIagorBHxoYSTMZ4RMNequtx7NCS+Q4coKhtfkyIcvz17ctdUb51ChZy6NANjaHQM9tUxQvYtm8xzazXMYnbmI8blL2Lsw8uaFZtL4WT2FMKqK6HIyFwRTgqqUjUUvlY9cUJUDCSdXvYmgGA68A16odthaZ8jKhkJKVXs2ISe61dZnaNagHcd+KLIsoGcNCm1DDCPfBGITJFPUoVGkqaPtuSZKmLMKZAYAYDAL7cM954w18ACBTGEMKHRP0AF37YACe8/sPgpRuWh+HO5zAoSKmvw4FK4IdEMcgXxlR554qodF4Psb2q5tWAXXyg6ORoekS/PMlYcohxIC+RKmPQ4dJHzXd32X//6mN70Jd999N17zmtfgd37nd7CycmXOew888ADuv/9+/3t7exuvetWr0MJm7+IOi0BsSshlFkxZBAIKtJ9iM0zBHOJ8lTpu3kjAwfUmZmyVBHl+g5KrqCokORHCgwa7k46jFnySpMbXW/CDKs8vT97k0mVMZKFnvj8YzHeSWsq+pdHWG93nschIpYT9m2xBjjYgpztozjyDduMMxuc2MdnYCiaGSfA9IOaA/AhyIYzB90BB+eVlCFlcqVCdXPOVC9Uwpvv1rPHpm6nY0X5SFfOsjYCdeTfjGtI5PDYARN247IQz593uTARcQSc+CVHq6LKCoVk7PeMZB0fWKYiihJEaogCglVX0zGHRMwqcZWCmCs/eAd6c1jkWBzgEenQLNAAKBKdfo1nFyyK8h4qFQ2ZAgq8SSX5eqWMj91XqYy142yyCiZyiqW9iAWOzlOtDjjwE8vTp0/hrf+2v4amnnsLf/bt/F3Vd49KlSxGbcPbs2awPA8lgMMBgMOgsp1h6IFb4XPhPbjowiMFCalYgpRhRfSrMxo8NriQ7K2BtmUoF72dKnUqDoLPLA+iYGtKaC1x8yV+KTyfHKcCxPcns5SrRjDe62ImidZQ91WNqWC0ExHQXcvc8sHMBzdYGmo0XMTl/EdNLOzad8iwOWRRKQKLrfFiu2DTAqlIoVgrveEjgwDoUDlGsDaEqCxbKtaEFCGUBWZVByekW2lVAlGUBPaxgWo2WgQRO+88FDAQAWg2jtFf2bW2dH6WWgDNlpL4Q1DbVmPDtltaMoYaDTkQCBxT2ejnlpyUDAPa74I6O3jdBuVTJIk5xDIRZuCvvLIm1c8fuFQcUbLtMyRPVT99ZUSyKZookxySkDuJJSONCp1/HSHoAIgsArXWAvgpyKCGQB9z/OMuRg4Td3V08/fTT+Ef/6B/hrW99K8qyxGOPPYbv/d7vBQA8+eSTeOaZZ3DPPfdcdtvaJOYFpuj5b74MgH1Qedxuul2abIXWOa9gCGkrKgLQNnzBtSOuK2fF6R/9v+2XZuYHOiOEBQku/XH0gmcGBaEboLYe3roDDkpvXjCcmUjpXJfY5kbLZHi1ZHN3D8YAM218/QUDm275VTefmLuv3D0PubcJc+E5tBsvot3awPTCRYzObPhQx7ZuvcK0AAGAV/6F8ykIDogEDGyEAq/DULmMiUOWFGngow4i2ltrqMoCTsVm9AQceGIhUsj+k63ny20TGkAD0bq8AlrCOHtypygSO0ZbB38In91QSchy4msoRPtq7U0nUttyzd7HwGf0LJF1YuTppHtm5L6+CAAo3Xk3RQ48EMvhQYgOs3dKEmYLp8QKH0hYA3adkmgkcPCRFuAKFyf+3oZ+WAbHAYZ20t33CETI7v27kjZeqnLoIOGf/bN/hu/5nu/Ba17zGrzwwgt48MEHoZTC93//9+PUqVP44R/+Ydx///24+eabsb6+jh/7sR/DPffcc0WRDb4SIfsNdAHBou8dAEEz6pzHPm3jHnoqv3w9EmemnliAUJSW/ZDKmhmKgc0u6EwCYYdgb0RTB2qwKOwsBYi8s40QDhS4rImclaAiV/NoyaXsW4SwpgMjBVpj7LjbibPJ7Pfcn6Pd2kBz9lmMz1xAvbOHycaWTZTkQhsBWCagVKjWrEmAHBEtQCgiUMDrB9jZdgAE1foqytUhZFWG2H3u0c8lM/AqwKc0jvIWJDkM9IxVU3SfqRgGJIB+cGFBwsx/J6Dgr6GUnrHgKZIpNBMooIbKp0jm9SZ46mRIBZRVSFXMcxmkfY9+ZGb0RgOScouoOKuoc0YWSoV6JB4AyAAI0YRjp8dIAZWv/8AYQqAD/Pw1I+CSi56AZTbkZLezfCnXnxw6SHjuuefw/d///djY2MDLXvYyfOu3fiv++I//GC972csAAP/yX/5LSCnxvd/7vVEypSsRov074CB94OdJip71ZezL+xIIhWsq7Rf+CO3WBsxk5Jf53O7DNWiePIiFN/GESaFKooBPi1wAwjA2AggAgWye3HbLRBiN8rbXHul538iyokRSDMf+blKPWybt0/8D7dln0W6cgd69hN3nz2N8bhOz0QTTSyPMxg0o10ExtMMA5SygYkwAIrZAlrQdKzbkHBLVsIJy5Z/lcBhltYxm0ySRM13IIWB0C1E5h8AkdwCFuwrdQpQzyNKWpG7Lxvs3pJER3KRgInYiNmUQOMi1Qdckrp1QhkgPJUNookuNDEp4lDII9kSjUMXeGbmX4JhI76kx2mdFNdD2Gqb1ScJFiMAARS9Z/KBj/wzaPiPeX8L5MMHtHrOtiR8TtZcBC6K5OkyCVIcQ3bA0NxyefPrTn567fjgc4uGHH8bDDz98KMeLKtmlD+I+FX2nqmDKIMzxBo7auUaOCvW5r9iSvvTSJZ7aYrgGsXoSGKzaWHnuMwB0r1ly/pQD37AkR7QO6WyIMTDlK77uiM74xpOnzm3bUuQCeO0tJ6N1ot6DItOQc6YtVIGyGOL89gjrooYYb0GOt2AGa1C7F9Ceew7N+ecxPXMGk41tjM5sYLazh3q39tELNnTRhixap8SiE6ZIYABA9Mln17zeghoOWKrgUOY5Agk8ayFihsEHE7VtNncALdPTsXUUbGYQqkYDwMjgk5AWTTLO78C0beTfECow6l5zBp2zpKiMqoiqNArv4BiiDnLgaCEY4MApSUbmgbt3Kg4ZDekadvI58OvKo5eAuOppmyj5BRJtY9ouwwFEadPJ2dluEvrIJzJHKofgk4AlSDie0inbzG3pObuaXKDse6gxvy9H/0A2gdI1ERkUiNGtrUmwfRGmbSFX1qBuug1mZR3t8CRMuRJCkABfAZBedl/quZ1lAUMHNNGg0GpAB2ZioTPTUjoiBLJZ6ORkKwZnzpQjqlWcXLsF6sILFiDqBvr5L6Hevoh2awM7X3kB00s7qLf3PHugZ3aQ9kWWVgoUq0PrYOgyGVJpYiDYYlOFS0qT+yTIsojs8LbzIUJADFaCR/6ia4GgUChLp5nVPhOiqoa2JPPMZuAslYKuZ9bxEY3vM/XX/gWA0KmN4M7X5myQPjV7yioQQBKSmVwqe87WHKHiaATdWodGwEcgQLhaEkazIlQmKHF6P3lSMttpb+o0FF7onQJ1AF1A7GxJ7yhiM4awsZrdi8/9FHLCmIF5RbJ8yvQk1TSxOHq0l29/KdeVHGuQkPU96AMIc8QICdGXAMh555sEIFDZ0zSz4/ZoDABXt1ZDAmzMrLZFb6SCXFu3DEIxAIohTFEFUAGAQpMwm/iyyVEuBdY2mRbiWY6IZhYeINygjj5fOLMNAHjjHesHbuvLF3agjZ2kDAqJYSGyUTNybzNce8DHl4vZGHK0AXPxBbQ7l2D2dnxxJpta2ZZzJoVI+QyIPaCwxHJt6H0KfDRCVYDnNACAlih9smm7e2zzILgMh0XFPPpDWuJQvnh+vgIuIUVJiM7x+QOmjjVjz5mUCoBdrtFAogDQsDoqpa3O2LaOJc8AAGILWg1ZBkAEALJyURpliWKl8lEb9oDOhNHUoU9F6Vi91jsyCgfirVIvgtc/FUVKowdyUQSGTARtBNqjglOUJI2WkxNj6iyZOw5JxqmxAw568qb4eirkRzILgME3X19Fx8WDRjfcoOPZfuRYg4RIcqF1+wAIXnL51zlAcPkRKGlTq0N2xZuOoMrj1u4ehIuBjxI6kYIg50JC50JYZ0qXelYM12zNgtV174cQeVTTKc4bJJgEZyvlY6090DAa5R2vP7RzvxL58oUd7/kvhICSXdr+oPLUOQsQDot5LKRN2qWEQCGBYSG7mTyf+mNgVkcVDa1duYWejqF3LkHvXoLe2kC9uQk9a2xYo8s94DMauqgF4ViCYnUIVRU+IoFMBgQSIqXunjFJpbczOQV86W1XW4Bo9lC/gCcAUj6NcEdyTm70pXEVSTO+DcZlK5QVRWoUMJLAjDVD6NY6OQKYW8RNSAmU6IAhVZUeQHkgpRhr4lI2UzXSkEjImkIEAFMAcBUrDZ/hG1cU6ZBMlr4uhPPzyEU8zIuuyDo0u+9kSqAqmiZNRsV+UxXNbJrp5iolU1pGNxxIbgyQkKBvwxZflrC0pf63a8gIm7iJwAEpJHlEjggaNkZaShFlRRSziT0xslEL50xUDmGMsbnRb7rNl4vV1Rr06k12psLyIfj2qKJjM7XJlNomxC/TAGK0D2005RC+MFJhPbVFe3Ve9nnCE2kpaRXvYYtykQUrxeEMGCuFQKV4umSqOBhYKLLlakqKVZTAcA3NmS9D723DTCfQezuoL21jfP4SmsnU2t99rgH7QEslIcrgfEdhijxkUZUF5Oqq98b3dnUS5kDoheojUK0EHgYI+DYEs9VHUTE93v0dMRpCNRaQ6BaawMbUzfIByy5IBVWUnbLLlAtBl43PxQCEOH2KXLDf45ln5KjpTAxR3gcGVDCrA61PZoci5IfwQKFtrX2JmxR8BMFimj/nQ8Up/igbJO1O94UzOUURAYbonadrk5gUeL6UbAbKBCD4dZxJ0G1v5dbDFp5M68rbuB7j166OHGuQwJMppZUZwdd5b/2EutuHYyMlHiKA0LrMiqS4DzPLIrEHUgBVapvWDeR0BCqpjNplgyRfibaxlR7bGdr12+yLLBVMtWoVO1GSTe1nAT7NMgGFNIESEAAC9UPIABAcBT44cerQrsGVyJ+9sIVx02JvpnHraomVQkIpm5JYSXstlRQ+zXatr8wctFJIFNJgWAhc3NmDcg7iuUeAnhHAPidK2voKymX3VAI4UVrgaf0JLIUsminqc1+BmE0gpzs2qU7TANJWJtR7E2D3Epqzz6DZ3kK9vecyFk7RTGo/UyZRlZ3xAoiiFKr1tSi3AWUC9BEJzlu/E5XAhRzzOOtAyxMhpUTmqLSUceTHkobV0SzbNgSIBnLg8Dz5PzhTh/VTcGYJorqbGZTLwdCWDaQHC2H440BAcFAQLVcRKMiaTHy/W8YwaMbIlEyhLhh+E1+r4D/kohxYLhdhjC19zc45d788KQl4P4koVJKW8z7MAwd0frnzj47NQBK/Nku57uVYgwQuoYohs9uRc1AfMmfbeg9inpHNDWCUtIm/CgeZp05GO4CQaBGndwbg6OfAHPhBsqmBZuKyH2pfmdEPtMwUgcJSmUYW1hch2Ua0MztrbepOVTeAzRyoD06u12iF7WmDmTYYKulzZ3DRALQ2UEJgbfXKazfccTrU47i4s9ehq4WArxeihIgKjBVSQMEO8gr2GhshoVTlBvbGdrStA0CTrhqmUjCNHVzNrIbZ20Gz7XIdjCZoXQIiIJ4F8yRARJULJT04KIaurHJlKyV2wAF99ilHYgiIHdiPpBExZMLqqSkghL1mBjoyjRkHCrxQbgDA2/79bNYpaCkVhKqhlUSb2Kk5MEhZgqguCb8GqbDsilHyqAVU9dyoqfSdJIBALKCbNFiGRzOqPz/z9deIfe/kVEiPnZoVgF5Tbmp2SP0QvKSmhyOUZcbFg8kNARIorXIHKOxXcmAiot7YYgFIiCjckRgAAJ3sjcWMefByB0jdQKrK1p4wwZZO+4nasgKehnYmAQDBcYhYBWbbMyUbOGVhWQTAzzrEbAwxmwTzAs0QHAXsz51dByMkild9/eVc0asq00ajVAKvu3mAWWtw500hC+HZrRGMAerW4I7Th+c70hoDYWJzkzGAkMyPhGy3RgPT2s/+oBuYag0oKqCtYYoKYqYhJ1sQzSzQxz6XRZwop93ZtOxB3aCZTK3znLJ2dKLNecSB9T0ImQ87IXoMFNjlTDkSw8B8IrgDq07AQSfHf/rJgQE5uIrYMZizfEY3FujwsEVywCOqnI7dzKzTLneac06T5FRoQzNbb47w+zIgxJMf+ZTKQDT7zYMEGTMvbL+0AiSYf1D0l4ZBRk6CgUGIgH3TuPNlvgDpufF+98zihTEWKBAQAXp9DvoZlIyjYgos5oGHIxApJeQBfQoOuv9xlhsCJGSL4OW8gklSMwOxDmym4gtHsZoQqf9BVFjHtUFJb7yS4NI2YTbk2AEJoIRF61G/eE4CZx4QbQ3RJpXh6GUtCpjqhJ1hTUfWc3o2hdo5i/rzT6C449UwX/NNtuQztZW+pO54RkgbCaFKm1dhH2aZaylve8UJ1K1B3RpM46xDGDca0yOotEHAtDWWoVAuIyIxPV6cw2urhpH5wYfvtrUFhM3UzpilAtQARlV2OSnLvR1nc57Z8tg3n0a5OsTgtAVEPFOgpKRGa+s++58YrgZlxSUDEjz17wCBkQW0c1j1ZgIOKLldnLfd8YRn14WH+aVmB7tBUFQgZo1F0VDooCwgisI76Ilm5vIpDB1gGILyLPQqLSAobqDfzJIqCp7nIWUdMtt5AEJpmXM1VAiEkYhQZhkSoXAT9xlQyjpMFoBo3PUqq47vSFoNMgsYGgofzfsbeOFRCvSdVa3kIY+pL0LWt2Up163cECCBJKsKUmdGtiyd3dB2aXgjLx0NsAGeFDkYfUxttU3s8AN0UpUKDiLaJu4PHwScUkc9gXZ1FLwHtfcqLxwz4MKqaKCpJ5i88DyqyQiDagjc/IoIvHCF4AGCUwSmHNq8CkcEEkZ7NlzUV9ys9/z5N+VqxLKcmhNBcqKnuBFgGQQJAzWnnPJ+5eKO7d/MeUm22jJLhbQ+B0I33WfKAcfG+bMAIaqBgKJopp7dAQDoKnpGBJuZAYAobTZDifAC8zwGZEaQKzaJlqjsd5BTIaiLjEZ30QaGKylSXioos5Bdkw0d3DzF22fZAaEbazqgXfoAAn8XycEW7v2VGjACwoT1IKU5cOZCioBw6cjRzKyTIGC9/XkHU5BA1wLogqlUUlCQVHeMrgMVoCKHTfaeeUaFrjGQZUIjllQYxjwUlm0pK2BWhyiKTn97ZsKOFeDMQ6d09hxlnqbLjhwVgS5AAOJjXQW5luaGhx9+GB/96Edx5swZvPnNb8av/uqv4m1ve1t2209+8pN473vfGy0bDAaYTEKoqDEGDz74IH79138dly5dwt/6W38L/+bf/Bt87dd+7RX1bz9yw4CEnArIuRRmwYJbnoY30sRFwQAEFIj24zNGbjcE7ACa2A+9MyCFLaYFpPggywdg3diXv7FJY8x4BB+DDNiZCWW2q6d2oBqs2pBIWGcrPWuw9eRXIP/qOdz2vT9gHQ+BULUxYVGMooQ4ReT0eFjylY0dDJXEyarL8NjZYwuUFjycWLt8E8GzF3chBbBba2sWgIDOPg2LZWt3DzNtUCnpHRWlAGatdUwshA0tVAIQbROupXP2JIfXmQ6OjJWwgEI0tc11QNkyp3swTe3CCF0+i7aGno4jRzRRDSFcfQOhah8fL6QrTkR+Bu5PrqxBDNcAMkWl9zND99ty4gk4IMDAnxeg+/xTEiPAPeeuzRRMpMdzETg80RdXjt7rnrejNYwpPRPHIyDI21/02b57FWeyfY8THp+N+1om7D2iT90Hhvi17AMI3LfKsTKRL4FuPFCwbBBgGsRAoed8orHQHy5jGuiRDkBwia7CcTgzGtr1+RyukvPitQIJn/nMZ3D//ffjkUcewd13342HHnoI9913H5588kncdttt2X3W19fx5JNPhuMm7PUv/dIv4Vd+5VfwqU99CnfddRd+5md+Bvfddx/+4i/+AsPhlXpbzZdjDxKyCZWE7FUJfWBCG/iZ64wFq3MTQxo2mIIAvy5lKlzkgGgm1tnQ2RANIW73slAGOXnytN3PgQEztYWaTD2xoXDEJBB9WZbQ4xHk2rpNojRYhZzZWbqu1nDq2/6ebXu0g4uP/i5OvPGNKF//ZmvzppmhG7Ts7HaGdvUmm3hJSDQ4XJAA2Bl4ow0qlwSigURbnkStrdlgVZts9sH9SKMNbh4qnBoo1K3BhXGDtkdPLBIlBQZK2PvrFBjh+lZbADA2wKQ1KOXAOiXCoglinAZKYCDaEEUynrlnwfmZkD/M8ARw6SyajTPQWxs2lHVgX/xgbqihdy5hdOYi2okFisNb1n1iH6Go0FAVRSqgIEVfxPcc6PcLYMrbqOCHkGYajXSt0XbG774LKVk4YNP1WSDJmAd9f7wDb+kAQvJ2E1sBIIrWoQgeBKavT/rqDMxLU8xDFkPkRZJ4TSofKpymMI+YkxSwJCGOwsjwnBBj1SpAO+dWD6zcWNIgZmH8pYpn9bSem0lyvgKd9TlwMM+pMWVXpIQorn3o9FHKxz72Mbzvfe/z7MAjjzyCRx99FJ/4xCfwgQ98ILuPEAJ33HFHdp0xBg899BD++T//5/j7f//vAwB+67d+C7fffjt+7/d+D+9617uO5DyONUjoqJDEtJDdPkeLssHDzhRDy0rAh8/5AYgiDsiPgAEHodtOwSQ/6OgmAIR6EryvdRucq3QLM3G0O38Jmzp6eSke3dOiRElLCTmYxol3VtchVteB07fhZFOjuP3V0MNTHgT4Nh3A0QPrxT/cR2jjZHeLXUiJ4Vp/AqMvnNnGrasKK4WNQphnQjiICAHU2kBog0YDEkB9gFDVxgBChDDYRptO0iNjnDOjEJDuUxHz1DrWIHVE5X4oTlmI4RrEeARRVmg3z8WDqxuc9ayxyZLqWVRkSFYlxHAt+CEMrC+CHKzYUt5Us4PCVzkgADp0P2QcqmhBNLrnDjBTnIxs5gZgUUNFMBGkwNO/lzLMnt11MYDNv+/AgukAjDK8h35W7Tz/W+Uc8uYIBxnkQ+Hv6z7RZQpsOPPCwTjPV0Lb9zcHX8RRoGNqEfQpC++r4Is9yS4Y8JKZ4RtkHDlJpLITGl7A7TIAQlzYizlRFlfH7CDEISRTcjdke3s7Wj4YDDAYDDrb13WNz33uc3jggQf8Mikl7r33XjzxxBO9x9nd3cVrXvMaaK3xjd/4jfiFX/gFfP3XW6fxL3/5yzhz5gzuvfdev/2pU6dw991344knnliChP1KbjCInAu54k6oQSr7rN0OEoji6+3LQaaDPECwv3VnsAlAwb5YcJ7YxjEEchjC68w4LnziPbUBm81Ox17S3gHJta2nY6sYpJtBufLQplqF/LpTaGXhHRMjs4hLEVu97NXZa3t+e4SBCzNUwoIKwXwpTFFhMh6jMcCk0bh1fa3TxqnZFmYrN2Ftjh/BQUVCYNIYr7g1uoptnnz5wg4qJXCitM8FmQpajSgvhhTCA0rOCkYAgcwKabIqbm7izFdpHQ5NPUHzzF9Cz+xsq7z19mgwpgFdli4L4HBgAcLaSeeoaE0MFiCUDiSUFhi67zSD7ZgP2HtBDrxaw/uHcDUgYRk4TotKgchBNwovptcPPTN1rS22Tfph90EAC06icEnjUka77KMUBWFSp0kAUYZVUr70HC8yr/UBB84gFM6UR9easzGL2uy5ZvYaWFDgzQ+qsHUcyMTJQmY7kpgQIufBNNqAOzqSwyuxntz/INd+psJnVopMLocjkMM0N7zqVa+Klj/44IP40Ic+1Nn+woULaNsWt99+e7T89ttvxxe/+MXsMd7whjfgE5/4BN70pjdha2sLv/zLv4xv+ZZvwZ//+Z/jla98Jc6cOePbSNukdUchxxsksEEWCACBFEJUAMpk/AhokASgaFCRyWBH+7YxEIjMC1rHs0ICCMksEbIAKtiXq5743OWCitWQEsiGUUnrfZ6mpOWOVj7EzbESQlgnqcGaH6z02i2Qe5sQk23o06/wSgyzEfTqTXMHyEoKGGMd8OzwoDAErAJsJmhPvAytQWR/5/K6ag+i3kMpJHCEIAEAtqYtlBBotMH5UY1Tw/0/6qOZhhIKZ/esmWKmDWathhQChRQhU6KyEQ1FLuxxxpgDTw+7yJLZnvczgfPG9wPucM2yQSdPo5nUmGxso51McdqBAFENUb18HWsu3XK1bgt4ieGq9T9YWweK0oKDYgBdDH2mTFMMrLJy9Tu4Hw69M8YY/yJpGM/sp0nDCBx5NWGML5cegHUwF3g/AwkAQUFHfgrePBdyIhjVvW/cZ0H0KWx3MF+LpO+x5ibD9J2lz4z/hf/uWZci9uPgrI2qfMbWyEzjxqfOOJX4dnSOm/4WEkAbQJHQdgwgvyV0zQwdFiAHEgBv0iTzZpSCmguLlEjLY/cBBWmOn/p59tlnsb6+7n/nWIQrlXvuuQf33HOP//0t3/IteOMb34h/+2//LX7u537u0I5zuXL87hKXhBUgZx3S8x2AQOmD+QumAYHGv2CK+xNonc1y5tujgZ+Wu/0EG2g6szRZQKwW1tHQeV+bWe2ckGTMEgCMKWAvnVtO21E8u581lqthwCqGvi9Ct0C9Z/tVrUYJlczKKRgh55oYBtKgNtLPKJUUgJGWpSgGaGSFmTZotYGAzVHQOsBwaqBQlgNAN6huztvcDkPObo1QSGCtlL540m0nKkwbjf/93CX8jVeeXtgG+UIMlYRWBpUWaJTwyZIGhcRKITAsJApdQzRNsLd7UFlDzKYxSHB/ZrLnwQFVMfRe4eMRKJyvuuVWSCUx3tjGxS98xadQHt5+K6qbbrL3fGUN8sRp66znGASUFbRjjzzdTZR3onQ9UDBxuvF5xItAAA1pnghtV4a8ITLOZeDpe8kUYuIAzL/7lN99Nnu+3yI/gpRNyPghcN8Ew8eC1M8IiJkD7tRJ7IG79i2kZbVYhAvg2DghnANs0r7ugqcUzPhrl+sT4J6jLojyZs0UICR+CoADGK5eRtZDiNdFSCcsLOlWTkRz5SbAy5HDZBLW19cjkNAnt956K5RSOHv2bLT87NmzvT4HqZRlib/xN/4GnnrqKQDw+509exYvf/nLozbf8pa37KvNK5HjDRKA8AIzoBCt4wxCHxqnMSACD7Su6c406I8DhKRtw2YYkc0XsM5GsoBSCno6BrwPQlITHsgnuElYBhvRsGJnjZTbQLl496IKERXGloA2QmBw6ysv80IDw9U1n61wa3fPOuWdvAkAMB6PMZ5pT8krKUCTUm1sKOKJkzcBbvvDlu3RGHWrLUCUAivC1lnQRmClAMbK9ufp8zu480SBlZU8k3Hm0ggrhcRKaRkCElKeSgpUUqBoJxDjMcR0NwBIEgcMRDMNkQlE0zqTkE82U09CSd0kNEyurEE1NcpJja2nn8dsZw+D0ycwvOUU5E03WfbAFfISlYtwGazGpoU0Dp/E2bBjBR6zClFf2IbcByEn2jVmnBkiTjAmgzlCN4m/AbqsQtQo72x4n+clcIoYvaTd6J5lGAKKJODjS2qWiSIVyLGThY0aEQOEmJERPnIquqY6jDFZ36bcefUJVYukU0vDFDMAwfbBhUbqNm/P574FfOLCIz54Qqpc1+p99P8QRLr02gdt43Kkqiq89a1vxWOPPYZ3vOMdAACtNR577DG8//3v31cbbdviz/7sz/D3/t7fAwDcdddduOOOO/DYY495ULC9vY3/8T/+B/7JP/knl9W/y5FjDRKiGRqQpQUjtN1HpfcMUNHgwwGHZxlM99g0A6EZBoUTOvskYGdToq2hZQFRDCCGq9HskidK8nkQpLJ5/HMDlSwikwKfMfqZGJ/xHIIUSeTB7kxjt9aYNAZ7sxaVkhgUAkMl0RqDcXO0A0IlgcY5kxQSPsERmQDU+AWgqKCHN0Fuvgis/LVsO7dMz8IICTkeQ0y3Q756Zx4y0zH03g70dBJMQ6zqYZRYxvmczM9QF1fMaya1T4pU3XIrAKBcHaJcHaJaX8Xg9EnI9Vsse0AOitUQKCuYctU6nXKfAy5tE9g1p9iUtH+tEVAAZtoxAqw+CRAzBqmkq+i1aWEHRhIpAhujpIBiLIMxOrALbTP/Xc2ZCBKFGUUr6GQfWn45Qu8PL4XOmUICCA6YUxQJpXT312ne5Jkpf+7zFFV+TZiX6Dqw62O/ttGYEj2PPCslkMk+2b3+VGnTT07SdN0FK+bFknL1hczmTElHIUKKgzsuXkGk1f3334/3vOc9+KZv+ia87W1vw0MPPYTRaOSjHd797nfjFa94BT7ykY8AAH72Z38W3/zN34zXv/71uHTpEj760Y/iq1/9Kn7kR37E9kEI/PiP/zh+/ud/Hl/7tV/rQyDvvPNOD0SOQo41SIA2/RUI+waBdPCJisnE+9HMn3ticxunsQsAJOE9xqB45Ruzh68vPBcAg5AwbQmhhxCqctn3pl7RdNLhci9pOheasRQDG2ZpDOBs4fx8jRAY3HR7rktXJKnjoS1+JDBtDEppndmUUwrFASoyXtgeYeqyKd51az5yYjLagdANBmoIBVspU0x3IOtxyD8wGcHUE4jpBO3eNur/7+9g55mzOPe/n8buizuYjRucfs0prN62bsslVwXacY121qAZTbC3sYdm3KCtnTdGpVCsFFi5aYhidWjrIazYSooAOlUFVVUgLSAEwNPBxlUpbOvGV27Us8aHNt76ptdBrq5CuugFSBnK8NYTyNWTELKAMCvuuUwAL/eZAexzoVsbBeDAApwXuGUThDcr+V3cfcw5MHLfhdQ3iG0FCQEtQMVLbbsAAJs6VwAhqVAHtOtwUJPMfBMl5B3/3O8sQEidNU0SJpkCAzInsIiPKDul4stDWXkuPKmXhL2mgq5BhjHIjmM+ygDWkZHGp74xjwOE1A+hL7FRaubM+RmwGhVpvgieGKrj5+SvV9U97g0k3/d934fz58/jgx/8IM6cOYO3vOUt+OxnP+sdD5955pko3fPm5ibe97734cyZM7jpppvw1re+Ff/9v/93/PW//tf9Nj/5kz+J0WiEH/3RH8WlS5fwrd/6rfjsZz97ZDkSAECYdCQ4BrK9vY1Tp07h3Fefxvp6ojgSFiBSqN4EED+00YAyxyzRiWhI2qhue+3CvtcXnovbdW1F9muea0GGgYloTP/icXqTigG5Ogz+/NkMdnD6ZQv7d6UyGY8B3aARBbanLcaNseUEyIlNCtx+qhvtkJPdvTHq1qA1Bq22poqZtr+NAQaFwKmBwtrkIvTwpAUFs7F1ZJuOIOsRmnPPQe9egh7toN644MMGJ5s72DtzEZPNPUy3a2z+1SVc2Bxjc2YTL60XEitK4qRzdGxmLXYbjc2ZtjkctMGKElhREmtK4PRKicF6hXKtxGB9gOpEBeVKMpcrtuIi1U+QZQHpwEKOvqRyxoAFFuXqCqr1VVQnV6FO3+xT+kJK74cA2FmgXFmDOHlTCG0lRZY+r0xB+ueKWAeWPIk7NZLwuHvu6KjRBQkpQAhOeoFNSBPFcF8inuE0mAWbwAxShBFfn/neCVnOSYa1iMYNni6ZMwY8+RNnD9m145lbo0O69pVgDtIULZRET8UmkgToJOHYxHSKNvi7UP4Vb96aAxI8i8DNBSlAYP4G3KSQZm+11zEDENzn9s4ubvn6b8bW1ta+7PyXK6Qnnv6Ff4KTw4M5GO5MpnjdT/+bI+vr9SzHm0novEAJ9SaknbEIGSqcsYfWsAfWZ3NLQqwABEqOnP+ABCm7AaNH6otnQIV9fB9owOGzl1bBYBDZStOkNzx3vikGwaxgNGDceZJns5BWYQDdczpkGTIbvxJj1HsNlAie/8NCYjIeQ9R7GJy6ZWF7QgCzxngWgZSOksDtqwXUzlkU22fQnrwNcrwFvXkG9V/+KXafPYPR8+dx6csXMDo7wnhzgvMX9rDNzB3/99b0EM98DAngVClxx7DArZXCipIYrhRQpUKxolCtWRChKmX/SipDLCCUyJYppvLOxUoFozXkYMX6N0wsfSzXb7FmhqKE2dqwzo+jbUhZwLSzOE9Gzm9GSAjHSgmixxu3TCrrr8BmxrQPiRFOAQr7RYsAFAAkfgh074gzcG0kjERrAnhoDQcW5Hvj8gGgsc+6dO/8PAdDIW2j9vXoODV2wAA7Tz4+RCDAM3vhXcyFiqbC/TM6UQ1J2ecO6+P7K6PzFEIH5kS3Yb1dCZ4q2sxqVx669v5PnobneRAyAIH8otIKoTydd/baRRdA5j+PWJZVIA8mxxokeHodyNNz/GWC9BXOoGEHSTcwpK905wHnD0iibP2Mfp6kL3wGXXsPdMCnX/XtczDDZ31FKBoEo0M/ogHNLruadNFoprFeKT+rXCtlJ6fCPBk3GuOZ7bENOZRQEhgoieFkE2JzF3K8idlzT2H38/8eO8+cxfYzF/DVP3oGf7Vb48ykwebsaEERFw1gc6axOauxpqxj48q2xIlCYkUJrCmJU6VEUSrI0jIMqrLfpZJQFQEDhXKtRLlSoBja/Adt3UC3LukWo4r19obNwqhblK94HWMVasuoMBYBRvvCPb6OgFN0QkiYduYBA1QRcgwQkAViIOxSdlOIo5CCmRjmm5Y4MMjpUi3gk1Fp43xK0jYIwLSNSzJEStF510cgOwP66XyAmF1M7eeZz7R6JTEHXITIm2o6ZeBbxozkAELfu8LMMX6sUmzcaJvALMoaUqmQk6WpbRGs1hXDSjMw9oUxEntAYdZUpIqZV6Jrlet7Og4e8cRlKYcjxxokAOiCgxwC9yYIvqyFkQgx2TJjO+Nt0OHS2QWCAp6Mx35WYI+nbWQB7ZwqcT7b4ccxLANcOoNhXtS+UiUsCFJREhNXyjdjF+2T+sJzqK4g6iEVSqJEdQ+oTsF+GATADqKFBEqncAfSQNR7kOMdyIvPoTn7DOqzz2LrS8/guf/fk7j01S2cOb+H/2tzjNk1Np6NWoNxa80SrTGodagMWrUGVS2gZxqytKBAKoFiaEEDAKhKolUCqlLBT2HiwiSLErJwPgmANTusngSGa34GKdoZ4CJYyOnSO0cCNl0vtx+70LZQXdCZq4S0/gocMOjW/tYCQmo/k1b8XaAAHsLuiH8DASCkialSybERkSSz6qzSob4Rm5A2xNiDTlpqv9ztROYFGWbP2tiTTE0wURdYgbiIOWiD4/XcyU56Pum50X7CJY4S0o5BLWMhhYAsK+ipBApX26KZ+RoXncOklS2TkuHGj0k9NScW3ZPcPkckh5lx8aUoxxwkxA4+nUxsC9Cst7H2gYWez2BrRNQWzzTn0/HSRjkGgduHVajQF9kjGXtglEvS4o5PtSY07BhonP1fCkA7z8r2cpTmIkbkMkVJSxvPHfScbO7uhfwEhcSwAMp2AjEZQY63IHY30G6ew+SrX8QL//X/YOPJczj7+fP4/PYUz46vrxzwGsC4NaikgRIG41ajkjYkszUCqAE1a1E4sECiWMEr3WrrNDmZQk0qmHoCdfK0TZx022ugX/wrmxvh1ldCF9beKnQLTEK2Ts1z7DNnWCM1IFtAO0VQuufWKRnBn3chgcRxz2b3cyYMlmbYO/Uy4EDPqhRx4bQ0OVOfpImGOpIoyZxToqFshBK9Zre5Dos0LjB/jdZ0TQu5lvkbT5U/I9+DvvDNnOSUa24GT+YHIWEoUsSZO4VUHpSYmU31LoAQMeE7GypcRpEKMkxSIgCVU6JGg5f89ovJAfaqFXhSkIuqeu6jjZeqHHOQEM/IObXYKeiSvnz04ggJ0TplLKzhMjJDcN8DxA5JUU9EGAyFG0hIoXuHojZxSKJ2ySGRusqpYuqro3hbdnyf1MjNVGiZTZMb7Lv7jd4x5eFlDwPs4L5aCIh9hD9WrpPFbA9yvAmx8SyaF7+C5vzzuPiFr+DiF57D9nM7OPP0Jr6wU+OFyfUBDNaU8OGWraPHlRDeuXFFCQyl/V5JgdKxBEJaXwTus1CdqKJoiWp9FYVLoGTzLGhbyOvCcxCDIdTJm9CWKzYxVr0HMduB3tuxHaO0w4mzHuXo9yxCYSso0gzRP7ueGejx7CfRjUPZiCurypAWWZEjpKtrod01Ikmf08henzpccuGplfk2c8x7nciPeaZAOm/GHMx0eMd4JEcq5KSpBVzRr3A+Wd+D1Eza970HCEX9d9v4+6UbwJQwLDOsMRoorR9RxxmSte3LWnNQoNiYuB8GIR2Lad/y6tRuWMrB5JiDBCfpILBgm0jYi+f9Fq6AWiKAYD+lXwbAhrkI6UrWMpCQVIXzQvnaM2wIByMaInhHA2gQMwca1r6r56a+iWWyu7Wvwk77EQVrb6VBdrqzaRVHU3ciLYp6F3K6A7l7Hs3zf4XRn/8ptp56HtvPXMDZPzuPr5wd4Zm9GS7UV39gcUy1l1JYUEMgIORkEN6GXkkLDir3tzoovIlBVRKqVP57uVahciBhcPoE1LBCubbivxfDCqIorV/C3g7aegLlTDemqCD3NiHrEdqtDeitjRCaFp2EDPZmHt/uTA9RTDv34KdomkxBqKzQ86phC3SrUKWwlNL7LBgEJi/KkEptJAxUX72G9LjZbIvz+juPQUiiPThzl0ZxRKyioPfURGCo02VXzr1jMun7Pk/6AAMpaKNBeSjA/SDSzif9821xnw3eft/x+f3i6/l28uqEQC4dFw8mxxokZOu352YHXHLsAkfoYn+0EinrnHA/gFBHwg46op1FffbOP50DuGWafXdUMB1bCaeIHaUohIt0YOMSAQUhBLZHtnz0+lo+22An6ckVyu7eGAUzBRlVebu3ZVVazF74S8id8zZ3QTWEGp6AufAcpl/5Arae/DJeeOJJXPjiRbywsYf/e2uKyeVUaFogEoG5AOx1rNhsnxR+6wZPAl4tG/RpHyUsGOAggT4rKTrOisWw8D4IqpKQpUJ1orR5FoYVypOrPv3y4PQJa1JwnuQUzgaprC8CYJ+hjecw27kEs7cDPR2HUEkfqiadp7q231XspW7D1xgAYOxWCg5MTulxZcaXAZZW56yC63O0nenuB/SbBogl9AWQWvIB6kY3RYwI7Z8WcuhjTUSXvSNzCRADhMi/AsJVk3XbCod5AFepEaE2BeyYE5klef8vw6Yf3Rs/Hgp4yp8YBKMtvTHHxJEzv+SOnX0eAKS5Y3J95ibWo5QlSDiYHGuQANCLzR/IxMO5DzD0oHRhjKsa595Rth3RoEBQ0vlZUHJMGgiFK8OceSnzHtgyjqwAPF1ZJBSekYVPg5vCnNYYDxTmyiEmN6mNRGsEWi3RthpDZYsiWV8NKn7Uoj3/POpnn8bZzz2JzafOY+urWzh7doT/dWmC0WU5VFgpE4Wf/pVMuZMil+4m2vBEe/VszgKn6JTAfjKu+UgFJTxTwL8XK4UPf7RsQuGZAlkVzsSwZsHCqi3WRGm5TdtC720DWkMUgLrl5TCqhLr4LCZf/BzayRRCSqgTJ60XOxB5oqMogWoIQeyBW4eCpxPmOQAyVLIJiXuyCqkPgDtWYaEsssszZRV1bR4FT5s4h0zfdyEBBICeJkKiYkymzftR0Lc47NO2LwSNDYFZAtyYQWHP5CeRtMevRbbQU05ypaeTe2GAbq0HMtP2tZu7lrSM+aHM3T5dx7bR1fVhMlzKfDneIGEuxYZoULusdhhlCtHPSnQKSeVkns2Tvaz+JaZVPZQq34YXxxHGlZDlsye3nXIUr3R28+3R2AIHAAMlgLb2s6aDzteJrTAmVBCkSomymUBtPgdsvoh28xyaF7+CS3/5DC49/QLO//kFPP/VLZydtjgzafYNEKy934KCU6Wz+zufAAIDJRupKYkRKX4+wyeFTtLOWrafUwAsU6JujVUirY7aFyxiweZCkB1wIF1ypXJtiHJtBWposzUWa0O7PsmgZjM4Kucn48r26sYX35Fl4Ys8UTperlwELymeS7nbRyNHnUgc1RKF4UUnADwFzqy90AFmLkjZAL+9IfeHwLDpsJ0vP00UPmuXlkfgRlF4pwwmFiAABBMKMuUcLa3JzzUlwjtG/giddNUAbE2jPBPAxQdULIrwYG1z4ZVw4w1jkHA5+VMWRn/l2IKeftF1vhoi5CFENxxw/+Msxx8keDNBhhID+uvW9wn3TQCjSyVR+d246Kg/mfZ4X7kdU4qkwM6CrmWrvpEDVAtA1OE6yHA9lI+rhq9yaYyNPrBJkA6vbDM58NH3QkpUEhBT63Ngzj+D2XNPoT5/Dpf+8llsfOEFG8L4wi6eHs1wsW7nmhaGUuBEYcEA5SEohfC/yTHQKmlr+xdKwLQGxrWrW22LvjgfgWqtimb5JG2dAQlK2tBE115btx4omNYE9sD1gRImAYAqCwsUGHtQrlrTghpWPitj6klNjoYoSghtlTxVjjT1xLINRWnrOAyGwHTiIhhkYBGoBkhpS4kLCoFMri+xCNzEEJka+PPl9wnfvQkAAK8xYNeZ+DgJgxcBiozJIHrXefu+kQxVnyjBFChEnvqAr7VAvgdAAAi5p5L7CNn2A0DgowF3NE7VctaNIoMIcmoqTZWdExEBGOlTAXsToO/I/gFDaLwLEnKO3fwnrWsP0YQ4T5bmhoPJ8QYJUsSDiF9Og4j73Udj5mYrfkADgNaF6RS2nDQLPyQkHPloe1+E+MUJpoj48D4aYh5CN0kVS6PjAZW8pdsmoF0RbMAQLISysL4BhZAopMyPRAcUZRoLStqZrZ0w24McbUBvXYDe2sDsuaex/ZUzGL24gc2nLmDjSxdxcXuKs1OrkMlXoDXGmwi4r8CKkhF7QJkNVSWtvd9nOix8FAHlIPA1EVrjGQQ1rGy6ZDcIyJJFmbTaMQY6WqZnjf1zwEA7VGRa7UFC4doVrgIdZVTkwIBMDBwYmNbFrbcsyY1UQDWEHKz4Z7s5/7xfL0/dAh7LbooKPuSxGloAMVgJ39NYdzaT5sWJIn8ZBwyydDYQqGsyB3h/gYQhcBLVGzD82Y7f0TTDID/WXMkxGEYDsnBAIWZPCNj2qS3/3vf5IcH5IMCBChNMMlTkireRMhNSiH2HhfYJN3fEDISJ19FyIUI2SwAQcl+RUH2bpH5YubPpyyVxlCKkODhIuIICTzeKHG+QAGSp02gd4QPoqKgMgC7zwB0Y3XLvo9Bjtpj3IgBh4PCHzGzrgQI/tutPFDrJUjv7zGqGJWLRrZ/tCeaNLACrBGaT6JxtSufDYRFGe2MUsz1fI0BOdyCmI4i9LTRnn0G7cQbTCxu49JfPYuf5i5hsTjA6N0Izsemb1x07cIrlDSDTAfcpGLishMQSWKe/0kcJhBm/cGxBkVXW6ezCJECAREibpIoKMWkASlU2LJGJBQzaH8cf0wEQWZYoViwgINOCrMoO/S8K58xFisXHsbcArzbJKvjZC87Yh6KE0DI4JjrnR5vG2RUEcwXGIoaA5QKIWAQgYtN6zWbsu3csVIWNcDEalMkomzgoARnRuxkxFbRv2+s056MG0veJn6uKGQSAK/HYDyFV7rl3Xjgyj5sZyPGVgwIPRrpOCNEvnTmKnGtwAIQb4Dp1MoTti3BttHDzA2YStPuHdNhAPxjo05cpMUCb8cXEvCz0j1rKdSPHGiRkbWSpMKDQRQmIBxrRTfzRJwKLzQP7kc4Lx2lSZ17gAMGzCkC2wI03lThc4/tpNLyDJ83aDqH/gMs0CYAXpiGA0G6eQ7txBuMXz2Lv3CZGZzYx2Zxguj1FW7eQSqJqjTUhsFHGOxo66r6PLSiGhU9jrIbB8ZKUNYBOYSVZlu4yhAI3Wmq/jEwKORAhy0BfpnZKo7U/BlWBpG2JMVBVATUcREVy4kqk5A8TEiBF63joIt+e9qHvkQ8Cz7BYhJLizCzVzb3Pno6kiBGfMUph9xGpQkZ4P2yT0rNhC/2F9uMfgQAYOk6URrMsiWW8jjvcJf47KUDgQv48XLjd38C+a9qYjjJPgQYQQEAvM5E7vtunT79KB8KI1aC+0G/qixTCOTnHpg/pzSHBPAH0g4VcP3OSmlSvNlBY+iQcTI41SAAWAwVPe2oEpydOXyb78xlPaCQMkgIhn3xKr9GyPqSdLp77EhL1qnnq1uCTENl3ZRLRAeQHf8qOx0wX082zBy4hLeo9qGo19LuZQM7GaLY20G6cwd5zL2L3+fMYn7/kwQEAFK7SYlFrDGetBw1CCVSuIJLPLcAiBIK/QYgOICqfl2r2/WMlmwk86FZD140HCaTY+fbhVsTMAmchVBm/QhxYcCDh/Q0opJGluY2UfuOq92kJgRJUeMdoa4IQUgLahTC69aaxmfOgWwjqDqXUpesg7Syayoqbchieifhkbd+NiXOGuOefhwMCSGaeodwzbysAl/g5pndNmMSXAIjDm1l7vSA+BRYc2KTrqTn3mfohpIeg8ykkYCC8shf7mCbEqafjEEoeStnnr+D7ypRqMBcsPHxHcumvu8fqRkIdFttOkxYpDq/NhcckwHzANl6qcuxBgpcesDC3uiOt79s/9VlIfBvIjqdYOzl7XOppzN+NznvC7ajkJS6Mzd4oC0AG0NCZuSWOXFEpYFn4ipF0LcTscKohyskWzGzssv6NIfa2UD//tGUQzlzA9ldexHhjG/XuDAC8v0BbtzbKQDUsIsCyBSs3rfg8AiFKQESRAcXQ0v6yLBP7fmARglJXzt6v0Uxq61OgJNpEqVMb1rHQMQ5t630Q9KyJzRFs/2BaKDoldX2aW6kgShd+SEmPUpDgohM6TALcYMW3J5MC/XbmBUjpfBBcMR5V2oJgzmeh43NAQgDZ+7M4/wQhI69/Eo1QvdH30WdPjOfUBCAMgCiTYs4PIT1vDixov8Rfwu+bAweJcN8im+LcMKVNtL3IKlUB64xk2Nn1zaglrK9BH/OYOkl2AEriANl3DL9tzoEyo4w90RItYx5WIoxNuf1zjol9k6A+M8TS4HA85MYACQsGBBKTKvv90Jo57990mWMYIi9vdAcFomf5C9XxV+B97DAgrvCObkJiIt4XbiYm5zNVWmc0do7cZHEYImZT97cHs7eNdvM82s1zqC9to97ZQzOpYRxXK5SEUoG+VzNX2Kg0PiKgOlGiXKs64YLex8AxBmo4cCChiCMDUtRPTEJRQmiNAoCpbIVFWQalHwGNwTAodQCS6H+qheAYCNqXgEiuWl5IZsS+UzIjij6ABQBGt6FKn6/Ql7lXKf3pWAlfwldKyMGKBQUu375P/50+6/x3qnBZobA+haZZDgLAKtFo5suUhVdilCdA9EQLJe+YrRkBZjoMvhK+r5n3uW+uzwGCyQAE/j0FCvSbbzuPXYxOy1hTg8kALiBmBwJjECtvvl0KEIJiF1nQEO8bt78o3DJlTDvnBmSBRR9YuGoiM2PClbTxEpUbAyQA84FCxoEp/c4p1Fhkp8hMh5EwNjmKfRFlb12HVDjrwF8uQQyA0YBICle1DSCE91Pwdl8acD3d6pRCUQUnNcDmRNDMjLEfv45FopuQFnhnE+3meUwvXMR0cxez0Ri6tkxBuVK43AIaWgro1vgMhAAwWB9E2QfJ0VBWBcq1FT/DV2ReUBJqdTU45VH+AACmmeXt+lJCrlrTiNK2BHOk6BNzgFfkTNH7mb6rsBjatixBBxAQk1BWHVaBF9IhQGKvadt/DmlNBgIdvIwvmRbcvc8BBJtgCIGaBzqzcD7jJkkd+Ggm7hWSgXMeNC6ltegABUPHykhkrvDmD+3vH5crBQf++4JQR4EYDABB2eZYBrou6TtP1zHnkAjMV9h9IhGcJHP5GVLQYNvvHo9vlzt27jyy/UkmQLn1834fmUjZeW6uqI2XqBxvkLBPapHWpS//ItTPt41CEUl4PLgIJgjJ+tGX1CR92ehFzYVFcgYkVOgzEesQOZYRe8AK09hd2EArK2ueOISHX8z20G7ZKo165xLa7Uuot/fQTKbQdWOdBh0tb1x1Q8opQGxBsTb0uQO4XwEHBmkeAV/itijtzL9iEQdaezu9mc18e5Hi9teCnADb+P5yVoDXOwBssiLWB1rvoxN4+0AABW5bb3PnPgBUeEkIQFQQlavuaBvwNnk9HcdAgbInlhYQhjK++dTKfh3Z7glIIAEEhgHZxOO/NejMvsm7HwCEMExxxbR5ruhYN9dA17ehVzjYZd9TtoD7A+wn3DA3LvBIgYOKdfpkDEzCDvRJHzCYxxrYdnkbImkj7At0QVYKrLp9EtG2vcmclnLs5MYCCegi2b6hgC9f+CBzuyk3VbBkMcKEOQJvTrHBNz0+n21ETAJtxAc+o+PysrxP6UwqdVhE3KbvippjZrkcEdIWH2pmME3tQID2kQLFsPK5BkyrIasihAuWznSwUkXggBgEMjd4gEBmAFjFK1fWrO2dEgkx8TP9BNh1ZvS0PWUqdPt62z7LQRABinlCDATQqTGSq05qWSMRZtjEBvGZvzMzycFK3C7fjpkVcuAg3i8wT+SQSAluUrXMw/gM+52+X16BOjZBSbfM/QYAARNFCki/bwAQVCitT2llpYcZI4DQatOZGCzy4E/1IV2XRWPGXB8FGAjRbdsDmEwUFu3LzQopa6AYMMiZKnw7iVmhz1eq75pfTlQCB3/XSoQK7+JB2nipyg0BEvYDBBY25XdKzBJJOCL4dnxA0toGG6WZ6IQrbCPi0DF6CRV7yX0/MuYMX142LTXN+yqLqHJfDiBE5+W2mYzHgNEYrq5FTdYbL6C65c55l811vgh2em2d++xhXDQBCh+Jpmch6RP5GqiqRBpRQDkFIn+Dsort/YMh5InTsS0eCLN3IJ5xE7ighEJC2L67ayl1a8FOdG4KKPorIPLy5N6Ew9ZFoI3vn/NvQWB7bN2OKmYcWrpvNrEXZAFTuFBGl0vDCNE1MYkknbK7Rt6xFQEgtKZ/ls2f31xYnz8HQQDYALobq5++mIbPeo2F+QQUVPrM+p16zIeJ8KJMuf76suoJ5c6VfFq8CeiPQvDd6xl8hEAIVRTcF4LvHO9DTIz/DgsQlOSTi1AnImdC6JP9hDnSeprYcNkPCLjmQGHpk3AgOdYgoTUhdvly0TAXr5hzyhfIRkWA9uH7AtHoIQBQ2KF1vLKXO/u4GfQrjraJj0HfeUpZeoiJ4RAJ0+AUVevmGBa49FyQ9PwWSWsVq2nspywLDG46gWJSoa0bzFw9BwA+l4FU0jse8lDBKBkRSwLkMwhKBbGyBjlcg1g7CTlcCy9wUYQoEAbw0poYxs28+YzaXyegw8J0AAJ/Vqj9tnFFq5qg6NP7w81TFM7nfUmsctdJkiO/DTmrkhTDkDhLt0ChYXgNDqqwx58BXt2QAWzPZgkBBVvpMuecSAq2dd9T4YqsT9LdBJiDoHt728TxEeQXlGFgFonti4BxHKOA8QEcMRBI+snOm69K8wzktu8715AfIN6gLwRSIlwDYhGUoE+bVp22iUw7QP/1yfhs5G5XzhTL2U6ShWNI2j7r1+XUjVjKtZNjDRJoBkQPYYcNEHJ/QCEBCNHDm+Sfz0piW46aNhpUBc/w7VmbnZdlXuSBm2FGyiViNFhGRn5+QkKoAqIYxklh+CzYSX3+mbifc6T9q/8LqKcwzQxCKhhpzQuzVkOWBXSrUQwHnfwBBARiB0XmfEehfOSUyESurUMMVyFWTtgy1Cm444pRFtB8Fs1qWnAnzw7QApD1nGeggBS90Y2b1VfB7JQwTVEbMn6mPJjw4arSMxSGAEVqKqC2OHtRDG2oowtb7FwLpiA0vTzhw8XxCxhjHQ6JVaDtia7vhLRlZpe+vSRFcWQjF+HYBBZ85AAAaQwaBxb4e04JnMCWcckqZ1gAJJyC18zRMkpylHFe9OeJvK0/JzGwyKzn5pbkutv2hWcQqGKpFAKFDKyBkiIAAz+G9XQoeZf5uJjbZZ5zIu3fOafcYf3KzDh6tUAC9zs6SBsvUTnWICErGeU4T9KHvVOeVSflTHPt5SjkdNYppC8W5UO6UhNG2v+MdMrc8oe3JYBgAMOyMerGMQwtjJBQqrJAgc+GG0azE0uRJNrZ3N1Dqw1OihpytAE53gKmY+jJyFP6QipAKQsKZpYxkCuVWxfMDMKBCR86KFVUW4D8DCAlhLPB07nK1XWgGvq4fzvqJ9EaosdxL7ln3oEvs11uoJcyXB+hG+f8GTJZRjM0rgky99XwfpKPgAwRCD66pW1sGCxnAgis6NCGcWaGyCHWXbN5dQfmiVckV7Bvq4MyNQimBg4EgAAWPKvgtknTB5Pi5DVTFjnccZpcQ9isgsK2xX0UgBgQcUmZkXkAIa3BsAggRNEGrGFiDMjXgEwLStrKptKxCXGCtfgZixgxejfYO5JOoI4iRJGDuw5QuEogYZlx8WByrEFC70OdOPSlL0OH/kpnoryYUuqg5pyKOjM6dzwDV0WSey17pd3CGBX6kLAPaS56wePPacbLbMnRsUmx07n4mS58PQW0NaRuYFSJoqisHwVR5M0EwMvsoXSD9sTLOi/xajOCKYcQoy2Iegyztw29eR56MoIZj2DqCUw9ga5nnfoHPtuhc1SUw2FwOKTkQoBH/b4oEQvtA+BSC8eFh4ySgBjEVDr5ZzgAEN1Xfz1lh34nr/1Wmyz1azMMWj8TWdh2uRe9354Gcbo3iemjL+lP9Jy6Qd2wnAJkIhC6sW2UNjqhMUDdGuiZ9oqEymDoxCxHs89UIvodYVadUs6KMQAh+oH2S3l7+2ETLgVV3ueNH87dRP2lkEoOFgS69m7eShqfz2fsQFeh52ReH+P+sn0Q2s2aXtwylZmP03HIpECsQdakoJvomUonHH6e0tvpMH6koaE5yaWmXiQBzCWpu6+mqUEcgk+CWPokHEuhFzDNY+CF27/SnXPmBW5aIJsxb4cN1LycdKfpDkAI+1M+g1y7wqBLCyaJaihJEimz8EJLGCMgRAEprXkeugGIIXCMgmhriLYG6lFgHnTr/QoAQMymUDtn7Y+1k3652j4D0UygN8+h3dqw6YCnE1CSIeNyB1DmR5s4KVRZFFIGcFCUEBSZwNiDKBGQKu35qsqaDNLZNgcDlFEyVb4ZSVML26sHUMhfNj1v8qwZADMdb8vt1zTrs+DCOq7yyUjfWNsdoKVTbsQukEOffXWNACYzbf1ztLHHZNO3ebND/k7QNeHnQ9doEYuQAgS+fRRjz7z2BbrAAUAAD4aBA2GZCAUBLeDYALtfrw1eBAaFWu8L3yPlR6CBRx3xvA7cHyE6VIZp8A6O3OkxF+6Y7osAElJ/g+z5iuAQvdA62MOs5kBumlUTQBT2mkqafyEstyvokYzAwj7MmUu59nKsQQLNajoAgc8W+xCrp2uvwFZG4IIDhUUPfApg5h2zpy2yNZPDZsve2BCiZhUFpLAlmwvEjEgziatKwoGktoZ+6o/tbyEgZnuA0WgvvYjZs38JUQ0h19ZtPoStDZjJqIPOKTGRpvBHKePcBs6/QA5W/HfLJFiAIFfWnINfAa2qYC5wnvyRDwbdW6LgZYEWjgngBvdENOJQuHRQprvQNxiGmgXGX3Ne2S/KwmcCo2DYbDAnfHFKAbcMuPA9aFmjA0DxuNYYtKwGSe646SnySABvm/fKsntBYuYh3T70g4RCHPnBTRruZ4KStZuGIkXCOVUuFKY4ufJblP449RNIz9H+NnMZhdQ5Me1vHzORKln+rGSPlprOnAlzoe8U++6BE7r3i7fS58QZNW0SE5ETXjQqhLYiVL29GrKMbjiQHGuQ4G1yOQUM9L4w3I4XMQaIWQBfcjYnzlktjHFOCacP/hx2o7MdQ9id2HplZ9S1jhWU3x3sRdfWAUo4oGDKoWMVhK2v0Ewtm0COd7PamgymE5/rwDQzyxJMRmh3d9BMarSTGvXOnk/6U64NUZ5c9SyB0drXNwDgKx6K4VrMFvAkRS4JkhiuQlcnrOOeKuJSxlLZc3AACYhnVFRXoG6N97znAxsHVLScHMJKKXziH4DP2OIBvHUamAbKVoe2jcnQ7LD+iXZmbJxd2XiGAcjPuvgs1pgQkpjO4qgt+z1Q17SsNcCs0Z6urpTwDAiPCgLiGTZnRMhilHPmS80MyK7rcm2mh9rgSlIIASOMPyNiEYxT5FpYpcRTIQsGDPqSKPWZFojC1wimAtqSg6vQvolyFKTb9Sn3XNbDhWAgkfjyyXBcsc/kU4nzanqduMSMUDfHRCfCI/E7Ef6ahv04ULhaIGHpk3AwOdYgYW5UAm2S80xn+0bmACG79L4Q3h4ljIwdGVmRmqytObE3R8xGjh5lSpHPkEkJtq3BLPEw5wMMt2EW0g0+zEdBNFPI6U50zUzbBkBAPgU7l1BvXLBFjeoGs9HERycIKVGdXIUaVqjW1yKEbXMMNABslkQfwkhgoGAggaU/loMV6GIIUw4sOKBSxjyWn64Bi+VPCVECBxwYtDoeACWCkjYGmOkAEPisT9P1cb+9wsyAgXn0szHBvk+KWbSxkgl0svF9o33pHDhrJACrKTPHVKJ/1k+smxIUuRAr0ag9WPChTUydpy3TDJySBIW+d9udm87XKRwNO/ukQtIG8CyCsDWmoSAyYZIB6NB1XuSI2FXidqEw8T1Iz3deG3F7IssQpPvPCxmdlxI5rdYY2pyj0HqAwTyTEgFq2obAWfqcpU2kICLTlaUcAzn+IMFJx69gHvIjm3n6ZqR2sgTpGuis840PiYNVooZHBVwmWiYGgXwPGgcOcnZiEQ00wcGplDFbIpoaop0FB0bAsiDtDLqpAW2zC5JToJlOUAwrNABMq23Og2EFNRwEMwFReK7GgGlqCKWglIICvH8BBwjWWaIMWRMdQPBVKp3/ga1Y6JIFAR4gkakg+GPY+8cL7tD1abTxzoek4KUIs1AJG49PI5UQngsKHvmOISAlzVkImgX6yynifaN76vZr+bGMSRSFs73rwBDQuVCOAp5Ip1NPANYDPvQnbKMRMwdAUBJcoeaEZtj+3JEf3PtqEvDj9bVP1xMIM2tucugm8HEmCIFIu6WmkRxA4NK3KjpecmwRbRevTJ/H9DnpslXIz/7ZbJ8zS1HfPfNj3LFEZ+KQk5TlSdmC9Nzi6Au2LWLgkFa65P4b5EdyOZkaD1WW5oYDyfEGCcBcai1iEfr2m0fNJSF1AGBkzCgInSQ60oAQmZwI/HfqCEnLiUlQASDMnLJLB7wUIBTSxU4L2EiFNgACb15omxjA1BNfkliunvRKH0UJubIGubOJ9tJFlC9/FeTquk1etH5zMFHsXoIebcNM9qDH8C9jWuGQ8hwIt4znQTCFZQ+smcEBhHJo/Quccm50MCHkqG+aZfNkP/a6WQo00JzCzaJjP4JwTeM4dVKitg8hZXEheUhanA4X4M54SGbX9CW+h3Yf+DbzVQdNPHAHfOPD5YBUQQXGpNbaO1IqGfwZUnYg1Y9eUSFQ8UC/8or6DasgjOluz0EV9Sn2RYiVaACCcI6G9gKkaift0uVk+uPHuhJ9xpmoDuhhfekAg9Qk6Z2j7Xfv5zDn2JxVmAuMMuxBXwGr1CwCfg4EfN0VlxCxjxRChk0CdMQQqX08O4cqyzwJB5LjDxIY6u6rFBfN+NNsd7km+Utj2jgMUUhQnn3h8g5AN+gUZFrkmJOYJXisPIXiUQIbbodP87MrKVAIAG0NMautQ2IzseDFOSiKNolwYEqeChRpuNm/Y1mKV7wOhWMJ9O2vg9ANRDODLge2LLTag6gnENMJUGkIqpPgKiGKwiVCcqGMkDIGB1JZEwMVoyqG0IM1kANi7cBRoy1QIqBA14ErQfJDaHTXJ4HEXkPbXvRo0O1AoFXtoB4aEBCQwkCQmQLWWZBqENjENuHe+IHT07Ld2+8jcyAicNCnnEqXOMfvz1Ld5XL200npZBsNQGvTSy9nWQIG0EyyzLZJs1p2aHc8BQvUqH9cuE0+zQvgz2eBss71N92lr1pjKmlUweVILkqBlkerMsAgYkFZHhWfD0PEad0N4C/mPJYhJ/zx96ai5FwXmUgWCX9P7eQmdjg1mA94lnJ9yfEGCdzLnWQfACEbCZETCk1MoxhIsbOXOPta9rXNHRLJvFBUUbue4ua7Ad6koASgoAE9s+YExhb477qx2RBZkSMzGXlgYOqJp/0BeFbBZjosALVqww9Xb4KYTSDkuJvd0JVhFcomUbLLAoPQKb1cDWxaZMou6BIAUVQDnTuZVjqXlNv2RUybcoAQMQ3JSKoyo53DENbWjTiG3W4uvCe+ZSjCPkYYtHaqlFUwqSmC0+hcOdJeXBGT9IGIeQqNZ/Drsz2npov0mqdKiFPKqT9Cqlx8G+7MZHJwblpIZRFAmDcTzUUQpMfgVDk/Zl9biyQCcCK/PB13InDAmEjADWv0hbUVObbypplpKTpkhrWK+t2zPAUIQNcfonMMBOaJ97lPrhabsCzwdDC5MUCCE84mxIV32IvZVyBpQXSEBwoUq0/tax1T+Pw4aTspOKDkSM7/gN6Z4HQXlJ53TGPMgazH8LkPZlNrZtANMKstGGhmMK6sMJVN1qOdaMaP4RoE7Mzf1BPbPVc0yBRD6JVTaGUFVVmnTtS7npEw/BwJnLkyzMKXV5Yh1bJSDhy4kEbHIBipAAeS6NzJDk8KjCj24Klu7ziZE4AwWKUAwToNhqXaiN4wNkvFA9pFJPBcHIBw4W3G0/12pgRIYWBM0PRc+eUodCU5DR3WpzP2cPywDe9XehYcYJDJxPahq0B4ezl7frptztzAgULYOGYXPKPSAV7zhfthpOdGbefs6Dppvw+8cKbFtzGnP2k/0uVznRlzEU5JXhb/XUjQUBMdqy80mgGEPhMCRRwAMeOUc/xEZl16nLAsPh6/RuG6sPviBrOrhA9cB+TBzQVLc8MNIm5GL3gOhAi5my6T4PbLvoAsKgJwQMFY66xnEmRme0f1WwU+67IWQgLkvc+TIyE4lAHBFg0R0rEWprE0/2wMOdmxwKC2oYq6nthcBfUEerRjoxUmI5sBUWsfxgjAFmE6fQJybR3y5Gn7d/MdMLKAaGvo1ZtgylWYagWFAwY2sY20pox6CjPagd7bhpnZ8sq+NDJg/R3gBgqXLdEIEVUvNMUQengS5H+hdXegksKGwBF70GprVjAIACBcK3f53bLWGGgNzLQODnpOw0gZ8uHnRAmBUong7xE5BSKrCUh5KeviilLEmfM4UNAmbN+wKVWq9LjCI7BITpS5vAudmX8CFCJd7jpkjPEKkwOB1GzDFQGfMXKlQedGTIxX7DD2OrAORD4Z7Ds5JnrgxM4tOIF2i0xRCJ6E6NyfnKOp7Wdeck6GdLw+fMMTL0XbdSKqElYzFe7fJBHlPxDMRyE6NmKFnQMy2phOUah4I8EAmOmERaa4Y162yhQg8IlOq63/xOVmb1zKtZEbCyTwFy55CTuRDKlkkpP0HqNtAmLm+3HTRgpK+DHIj4LtG8cPC5d9EQBImQFCN5DTEdBMIKcjyOkO9GgbejyC2duxnw4Y1NsjtJMas9EEzWSKdlKjcQDBRirYTIiDtXXbl8meZQCqgS0g5c5HzKYW9OgGoqkh6xHMzkVoSqrkwACkBLRzXCwBwDkvUg2G0rEHBBDIxIC8WYFEOyfEprVAoXH+CbntaEAiCl3rOHTQdlN09gv3wK5TQtjJBwMIIqMacrNhnkKXxCA42u1HOLvAw1pz4p8Zv19mm6TvfcpBGoMkCbk7xpWP5h4Ymf68AkBK1wfFMm/Wimif7jHpe6oU05l/b9+T7WIgM3/f0Ok5AAGYPzmhEOCMWOdNtznROQukG43BgB4DB0C/eepynwX+XlqGzt6PeX4Thy7L6IYDyfEGCTlWIFm/EBxw6Qt/5GwEmR0o5JGcDXnedG7WmHcMvhjxbIXy7gtyNnSliEW9C1GPIWZ7VkmPR9CTEfTOJejRNmajCeqdPTSjMRoHEmY7e2jrFrNxA1UpDNaHqE6u2jwIo21ASsjhGrRuIVZPAusn7LHbOmRobGcQzQRmtI1285w93u6l0P+ihJEaokAAC65wEwEE41IoE3tCBaRyE3MDOD8DOP+M2EfjcgYrz8jsQxQDCuRxzwGCd8JDoG650GyVmw/mzZhERgGFfscMBGcFbNvBo512582kJgtktuHb+f4cYPzuU57cx2Cebk4V2TwllTsvKWIzTsogiGT7RdIHJBYBhmi3eWMUEI8JsjuBiIT5QPExw4b09jSfa8Y9O/sBB/t516JnKFmXAwr8mEctwplAD9rGS1VubJBA25DkkhrtR2h7mg3w5ULCaNE1ZeTMGd4xUWRBiPVidst0YyMK6rE1KZBz4t6WBQWjHR+CqMcjTDa2Ue/sod4eod7ZsyzCuEEzaVDvOhODkihXCltoqbK3/sUn/hztpMZr3nEvAEDdcgfEra8B2gay3oHeeMECEZeFsd08BzOd+KyK4RQl5Ooq4LKb+aRJ5N9AoKAIKZYBy44Yo20KaSFtXghtbPZExgRwh82ZsH4FM1ZESjqlTqIgUPa817QfmSPCMSyDYG3+ti3uF0FiayTAh0Da6Ad4R0oAaFqAD53cQ5z7Kvhl7DxoPfldhGZMZ7u0jkEqWSc39j2i02GrJBrBahiwftr2YpOD3zcz3qcObLk0xL5Pc/RFTknlmIec5cjbwQGfGjjdf57w2Xa6PJWo7XlRDFIi9X4wfROUXAh1j3BCIXd9fN/pmMlJHIgxojaRMbckbYeopCs+3OUJTeQO2sYVyMMPP4yPfvSjOHPmDN785jfjV3/1V/G2t70tu+2v//qv47d+67fw+c9/HgDw1re+Fb/wC78Qbf+DP/iD+NSnPhXtd9999+Gzn/3sFfVvP3LsQYKvhpdbR4o7J/OyH2ba8p8Z0CEoDDIDWDpOjbCmDwMdSkcDcdikYQmQprs2S+JkBL17Ca1jDPRkhPriJTR7E8xGE0w3dyyDMKnRjBvo1vhBYLA+wPDUAMOb17Fy200WJCiJZlKjWl/FTEmcefyPIZREuTbE+pf+DOXLX4VWa+jRNqYXNvxxZqMxTKvdtitQwwqqLKCGFUolI8TtEy5R1sSi9MWZKKw09ckAnHMmKUht0yYbY00wBgZCCbQakEJGyoW+8mgGAg4CYRAtlQzbSOMHLQ4O5rVTyFgpWedJkZ3NeedABIDAoxtIefLffZJL0sP36drog+yHHs+l5qXjzVfi/es4OOBsQqeNqB+87Xzj84olRWwKm7lKY2KKfh8S0foLtuvdhoVpLwyLznzvu48G3WtPzxeifTIvSdLuUUhf+yFF+o3tlPCZz3wG999/Px555BHcfffdeOihh3DffffhySefxG233dbZ/vHHH8f3f//341u+5VswHA7xL/7Fv8B3fud34s///M/xile8wm/39re/Hb/5m7/pfw8GgyM9j2MNEgRD6BFQ0HOU/ryXlDyL+yIfeNgSbcu30cm6HHqlsEpjYHQDm51JBk9mdwzRzmy+g9kYhhiD7Q20m+fR7u1hNppgsrGFensPs70J6u0xZuMGetairTWEoySkEl6JV+urqNZXQXUWRN1gcPokZFlg78xF6FmDyYbEeGMbp1+3C6Ek9KzB3rlN1Nt7zq9hBlUpyLKAaTXKtoUZDnzaZn+ZVTA5aAYKInE5IWj2kSbbUXaqDGibhpfPdiGtnVubYAIgp0YgpjgJIIA+jVP6mvkhyK7XQTrjnmcaIKDA+8+96ucBBLp0At3wTB7Cxj3SU58HLh2wcASD8eW2uAgEReWn6Rg9B8kxB/nZfsj6J0TiYJlpPFrvlwWWJtedhVCDjyf7yZ3iZB4w6CxLIlj6QN2i52SR9DFGVypXi0k4THPD9vZ2tHwwGPQq6Y997GN43/veh/e+970AgEceeQSPPvooPvGJT+ADH/hAZ/t/9+/+XfT7N37jN/Dv//2/x2OPPYZ3v/vd0THvuOOOA53P5cixBgloW+sb4JSslwVmCKrHIEwcveAlNS+kjIMHAyyrIbcVC+FAA2JWMQEhAhI2WZMGTMD/orHhjKIeA7sX0W68iHZrA83FC9g7dwmz0RiznT3sndtCPZphujVFM2ls9cXWQFXKhixK6RS6ga4btLMG7aS2pZvLAit33AroFrqeoRgOMDqz4drdxmxnD0JKtLMG0+0abd2irVtrslgrUZ3QUJMasiogyxJCSZdfoQTPvEj5DzrX15kgKLNiOoslpWzLA1uanbYDXVbhahWAhS3KYIYgxc/xi3aMBc+eGBRYf5VGIFZKNvwuABTAfg8+DWymn4CDOEMiy5WALgCgrzkbeMqc5J72VFlw4MJlkWJOhZsbcpJjSbqZKfuOzc4nc5C0EmNuFj/PtMGPScft1CGge2RsromogiU7JvkG8MgGAcw1GeSumzb5FSkoWAT4UqCQhrTG4GJuUx1pM9epc/x9MjRAN3/JkckhZlx81ateFS1+8MEH8aEPfaizeV3X+NznPocHHniANSFx77334oknntjXIff29jCbzXDzzTdHyx9//HHcdtttuOmmm/Dt3/7t+Pmf/3nccsstl3lC+5fjDRJISCHzqILcNiRCOSWVcU5034VxyjzKVphuY8I6H7EgAovAZw5zTBseHFCWxNkYYrILvbeN9vzzaM49j8nGNkZnNjA+dwnT7Qmm2zXGm2PomcZs3MC0loaXSkAogUIWKFYKBxgU2lmD8blLqLf3UK4NUZ1cQ3V6HWL1JNQqsOJ8DOq1ISYb25huT9DWLZpJg7Z2yZdaA1GJKNufkNKaL8rCFXOyRZwoNTMAUEZKozWgEPlk+Ep+QvhMgDwpkk2hnC/UBNiB22f1k5ZZKKSMYunJdYGDA4ClAs4ABCHifARcWpcj146HYQPVAwpoq8BadGl4INjP+eyVPnk3aNatjRu40VV0+x2sva8EM2ZrhNwJFNFu3CySFD1/xfqi3nNmlBwYouGbwn8FO7ZUIrqXKXsg2LGo/agPmXNN8wrkQgYpDBOIzRXUTppVkIACLeuTHCuQyn7u3H48qrjpgee3oH4s8gvpS7I1r3+LQITp+X5c5Nlnn8X6+rr/3cciXLhwAW3b4vbbb4+W33777fjiF7+4r2P91E/9FO68807ce++9ftnb3/52vPOd78Rdd92Fp59+Gj/90z+N7/qu78ITTzwBdUQJn443SEiTkAD5JEbpNkbCM8MeKMgoNMkICkNkdCGPYhA2ykEY0wsQvAmkByhEDAVLoWxGl9BubUDvXsLsxWcxOnMRk40tjM9fws4Lu6hHNWajGerdmWUPtE0ZrJTyoEBVCsWwwPDUAIPTJ6CGlZ3tu/PVsxnqS9tQlWUMmtHEmiCcwpfK1oaXSsDQVFwBxbBwbTtfhKpEMaxQDKtQvInVbiBfC5s8ioGmBY5EHCyQCCE6NDGfWQnHLBhDZZGtMqcMiUDXRyEFCH0mAhINy1bwGHu+b2pOsOtjBRbNsBEP+NzRLjsFTs1cBxRSynyWrNxyLQJYIEdG3k+/vehmL6Tup8Ar50vhcxaI8N4ZxGABsEAlxxb0sSN9fgdplMh+hYjBtKhSxCBgMSWfm9WnkoYmAl22re+ap7+NCdcqBQtcUuDBAcYi4LKf1NepCeVqEQmHmUxpfX09AglHJb/4i7+IT3/603j88ccxHA798ne9613++zd8wzfgTW96E173utfh8ccfx3d8x3ccSV8u+8r90R/9Eb7ne74Hd955J4QQ+L3f+71ovTEGH/zgB/Hyl78cKysruPfee/GlL30p2ubixYv4gR/4Aayvr+P06dP44R/+Yezu7l7RCQhj7J9urR3fxfTzMs70Kfg6NqPvOD5y5e5ocXK4gyrsnwzKz4b1Kb/OuPTCWRskBwhtY6MW6jFkbfMemJ2LaDfOoHnxK5g882VsfulZXPrLZ7H51FlsfOkiLn11C9vP7WD7xV2Mtqeo92YwrUG5UqBaK60pYK1CuVZieGqAtVfcilvf9Drc+pY34Na/8ddx8xtfi9XbboJQCttfeREXv/BVXPg/T2PnmbOot0fQdQOhrJmiXClQurbKtRKD9QrViRLV+oplI9ZXUZ1cRbE2tAWhVtaiVMxGqpB+WbFryK4PHyf4laKMizTOSFK8iULT7I98xpWwIaS26FWojUB/hQy1FqLshUzZUPIq2l/JwC4oIVBK+1ep8FdKYdv2LAQLoxThHCjvgVeY7NwpZFIj1O3gTp3eWRZE4VOfbH0Hm8kxDO78j9pPhStM6iOdP11P+k3XUEXnA3+N+B8BAQJFKUCg/lN70Z+//uG8CvdJ155KotP9UMmfTPrYH84ooucj3SxypERXmXLzT+4vXUcKkhIWGWLL+J92hd3YMmI+DHsvcvc3Z2IKz6B7LkV8P9N7EN/T8FzxP36t+TGCKS2f2fSqgQMnlJb5oH+XI7feeiuUUjh79my0/OzZswv9CX75l38Zv/iLv4g/+IM/wJve9Ka5237N13wNbr31Vjz11FOX1b/LkctmEkajEd785jfjh37oh/DOd76zs/6XfumX8Cu/8iv41Kc+hbvuugs/8zM/g/vuuw9/8Rd/4RHRD/zAD+DFF1/EH/7hH2I2m+G9730vfvRHfxS//du/fVl9EaYNCp/5DvDiKHYBAQRtZ9J8O8gowUk2vwFXaGydEMEnocMapJ/0nVeNdA6Koq1h9rZt5MLuJcye/wp2nj2HvXOb2HluE+ONPdS7M0y3a9Sj2mcNXFkrnSIvMVgfoFwpUAwLDNaHGJw+gWJtiNXbbrIlngHo8QgAoIYVBqdPoJlMva+CVM4c4ESWhfdJUJU9B6kkitWhZRFWKhRrKyjWhihPnoAYrtk/lzxJlFW4D6qAKYYWHJSDTnVOMgNo9NOVNNYoNzP6/7P370GWJOV5MP5k1jmnbzM9t73MLlrE9bMhkIQCtMtK+tmSlmAX8AVpFRY2NhcTEEKsZFhkBAquQhKBwDIBxhBySEh8ZkMOyRa2FPJKaAmhcGiNETIfloyIACODgNn7TM/0THefU5W/P7LerDffejOr6pyeZg7bb8TpPqcqKzOrKi9PPu8lvR0CRR6kNHFkRLIbMK5Ra1AeUXAddo5WT5wNaIIs1fUz8cpMo1BpBzweDpeebuUcSpocbQMQyM4ipKtvonTAFIhULFbcD4xfbxfg+mv1UYY8UiLZG6BmN0z7upQnQmonQQ4QQk7MsI/OAf48GbzKITpmFJK3Et2PQ8NaUJ1IlVBkngegUPHGH6R3krJZoHJbdWGsgFQVcVaiyU+vX5daqbUKJIai/qmpj9qXNKqovvM7v7cUi9H13pZZJpMJnvGMZ+Cee+7BC17wAgBAVVW45557cMcddySv+6Vf+iX8wi/8Av7gD/4Az3zmMzvL+Zu/+Rs89NBDuO666/ar6i0ZDBKe+9zn4rnPfa56zjmH9773vXjTm96Ef/gP/yEA4CMf+QiuvfZafOxjH8MLX/hCfP7zn8fdd9+NT3/60+EhvP/978fznvc8vOc978H111/fvzI00XJmgFP75E7Io5wFzrAKND9N3sa5FsuQ9FOWHhXyuwYSBPgwVVmHON7xwYnOPoDy3EM4/9X7ceFrD+DSgxdw6aGL2N3yMQ/KaempfmtQTAqsnVjFaHWEyREPEkarY9jJCKunNrF26hhG66sYb6zClSWmW9vYPevZmlGtehjV4IE8E1ytvLcA3HgEV1ThnB2Pgoukj9i4gsnRdYyPHoFdPxozCasbgUWI4kPYImZYjA2rKlo5e8AAwTDEowntssgHIhor6butDR5p3U30NVDvqMiBQWKwkoe1ENCpDZL4RFs6H5JYAwyucpEhY8hH6MrjyIp6eF3KIgAik46Rn9Ofa1S3EZN1YB9Me8KJ8mLXd04K5PmTqYesY12FrPB8ZFZS3aMd09mXOk8GgnhwK00040OetwY2KTdprJmTnPolMvxFc5+8/0jhbTxVg1Q7I3VHdCyT/rLINyni4p133omXvOQleOYzn4kbb7wR733ve7G9vR28HV784hfjMY95DN75zncCAN71rnfhLW95C+666y487nGPw5kzZwAAR44cwZEjR3DhwgW8/e1vx+23347Tp0/jS1/6El7/+tfjSU96Em699dbF7i8j+2qT8OUvfxlnzpyJDC2OHTuGm266Cffeey9e+MIX4t5778Xx48cjlPTsZz8b1lp86lOfwg//8A+38t3d3cXu7m74HdxQ6oBDFP0QAGBpBcIAgjBK9EDBswbk4BAAQh3vIAkAKA8uWtouvXFV+fJKvxmTd2/04OD8V+7D9v0XsPPIJVx6ZAezSyWqegKfbIwDMFg7tY7JEW8TMN5YRVHbBqxdcwIrJ4/7gEbWojz7MPbOX8TFMw8BAFZPHWvsFKxFNZ6inM5Q7TXPsZiMfJljoBiPai+GEcYba6Gs8dEjsEeOe3CwfhRm/SjsyhpcMY4AQjJ6HNoDY1k1qgagWdGHx+YQVuhdQkDBT5om2Avk9mzQJBuj3gibBTaxe8ajGe6tYXnRP2MA62DEiotoaQJOfFI3AIoaNIxskzed90faIvPpkmwaw4z3lOeT3Amzq9CIqRPGrdr1XTFOatARgU7TMFGArnOVkyigtwNu3MiBghbyGGgAAqkOKN9UC+P33AcoSIAg34M03pSASJvUgTYb0CoXepecw/Rj/+WbBBJ+7Md+DA888ADe8pa34MyZM3j605+Ou+++OxgzfuUrX4FlthIf/OAHsbe3hx/90R+N8iEPiqIo8LnPfQ6/8Ru/gbNnz+L666/Hc57zHLzjHe+4rLES9hUkEPLRLDrp3JkzZ1qBJEajEU6ePBnSSHnnO9+Jt7/97a3jZjb1On3SxxkTmAJiD7ghI/fVD+xDrSP3CbgaghIKJkCqNaIKxQDBG2G5WL1AHgwUPfGCN1Lc+cqXsf21B3Dhaw/ikf/zMC49dAl7F6YopyWMNUGtsH7Vug+OdMLbA/gJewWTzXWsXHUSdn0TZjzG7P6vYe/sFlxVYby+io3TJ7Fx+qQ3Jqy3irbbOyh39lBOR8D2pRA/obkdi2IyDnYHo9VJwxasrPqy1hoVAyarqGh/BrJDoOfLn1UdPGlWuohBmNb7M9DAwuMHALVOl583Rp11XD2yGgOMifamoutVkaTGORjh5QGAcSaihGVwmtwAGgMIQbPTNVVcYrNBVQM0eNbGeJBQ1NT5pPC1Jv1+ywWY7BjsKKLbeb006QIS4XzfFW7upATySrwS0wW8M4HTSI3R1zdfqgI0hsjn12wqVQItt0ktv3CMxq2OuvQBCrHKjI71ey8aUGjVIYBLPU9u3MjVOhroOGjcYGxjsL1IHvPIHXfckVQv/PEf/3H0+6//+q+zea2treEP/uAP5qrHIrIU3g1vfOMbceedd4bfW1tbuOGGG2q6vgEBYV+FhsttqyHof1UFMOG081xyfs6aDQINynxbag4QyinM9GIACOUj9+PS/Wdx8f6zuPjgNnYe2Qluh8W4qNUJE6yeWMOR6zYxXl/FaGMVqyeOYnx0HeONNb+iP3YKdm3D396lbUyK86j2piiOHPUT+doGUJU+rPLONsbwjEG5520SSsYkUFTGYnWC8foq7Po6zGQVdmMzgAICDLQ/Q9jhsTbiDFtgU9TFoomu2Hy4u2MzKfrdCJuJiK+CyaBNCq3aJWNAACAM8NFAr0c/BBrwoumMw3NCGxzQhNTHl82hiUCn+bBLgKDm4QAHH07ZQZlMmU0NUe7a6hZoJpYhdP4QaTEKmoeSYP9UJmofPTxClmyVrakhUuKfZxO4CWC4j9Iw1oBft6ho4MAf38+31o0DpSeEBhQIoAdsua81PJTLJfsKEshq87777osMKe677z48/elPD2nuv//+6LrZbIaHH344afWZjGol7QfAgAKQ39zJVZ7bNhbGCKDAv+eO5SKp0UDHwzUTQCj34HYu+iiK589iem4LOw+dw+7Zbexu7aKclqjquAfFxGL1+CpWjq3gyHWbWLv6eKD8+VbPxbFT3nBwNIarShQnroHb2EQxm8KsrgeQ4HZ34C6e91UcTWBmeyhm0wAWiEnwbpAWxepKsDMwK6uwR457N8fJqlctGAPavCkJEOwIrqBwzDbsySDjIcT0PHuUkPpxfdVvRMRDCQAovkDEKPFB1jRuiajTlc7Vq0NWfquGbRkyAHIvDnn/KvULXb9PqhjvBdI2Du0TiVHq1fvch6xLb2ZCsgdoGEAuobvxA/vgCtrlIkmGjbR9dtekzm1WWoaOC4ADXks1HLWo+zzC88iBI/nEuU2DBAIcKITrhT3PgYjZB3WDOdzgaV/k8Y9/PE6fPo177rkngIKtrS186lOfwqte9SoAwM0334yzZ8/iM5/5DJ7xjGcAAD7xiU+gqircdNNNg8oz5V68dwMZPvUZPHhUxXLmNx6i41LFoLAEkchgSoCPsQAEV0dTlUC553dSvHjBb7X80BnsPvgQLnztAVx84Ky3P9jxq/rR5qjesXEFR687ipXjR7B2zQmsntqsvQnWURw94e0ANjZhjhwHdvxW0WayCvPU/x+wsgFnRzCXzsHsXfTbPG+fhVtZ9bs9ViVQVXBVidFsCjfbg5tOgar00RMBwNomQNLaBuz6UWBlDbAjr1pgRomRmyN/VtarHJyxmFY+dDIFSOL6d6BZkdD4ocamp0mfzTre+L4ZyOXwEw2utj1gRyoAVlYBAwrqkxI+ycu6NuW300sGoos1aLw4GtbD5+cJaQPy9mjYgNww3BUvoFHRZDJBu4zMo/KixC+JwIFUGzAs7jhAkEBBqiiUvsonr9S9+AVDs302jxeRNgVl1c08U3lGtm9ZL64mq6vVpKU0Cvuj1yt/nuebAgqp45IpkUVJPEAulgcixizOPB0UoLkCZTBIuHDhQuST+eUvfxmf/exncfLkSTz2sY/Fa17zGvz8z/88nvzkJwcXyOuvvz64gTzlKU/Bbbfdhle84hX40Ic+hOl0ijvuuAMvfOELh3k2AA2NL46FTZXYRNWKhEjXV5WfCMlYUQMJysAT7eTIVrCSvXCGXCxnMLMdVFsPw22fx+yhb2Dn/gdw6f5HcOmBs0HFYAuD1eOrtUHiCGtXHcHa1cexeuIoVk9uojh+0rMHBA5WN7wtwMpRmNEq4CpUxRjV6lGU1k/0ZmXDGxOOV2HGazDlFJbFizDOeTuFqoSrgQOqMqBvMxp7l0ZbNK6MBArI5qDeBjo8P2ubWAikYigdplVjd9AOlpR/3VIlwCcjZwBUsacBRc2rLUgaf31WUM4YrEsPS0KukakxWJtEnUGwdQBxX4kMpHEkJa2c94wIdDdM5JonIzfqeTdAQZtsVPXAopKKPkrByoYM6BpQ4EL9FelJMl6pC+NOUwd38gghWqnvB1vQx7MmFTQqFcI7VQd+bxpA7AIAUVqNhVKulTYcvJ4HxiQcykIyGCT82Z/9GX7wB38w/CZbgZe85CX49V//dbz+9a/H9vY2XvnKV+Ls2bP4/u//ftx9991R1KiPfvSjuOOOO3DLLbfAWovbb78d73vf++a7A23lIAaOVCREAwQ460T6MNkrBlSR0KQodavMyNEHe5rB7VyE2z7v7RDOPuwBwkNb2HnEx0Hw2VlMjoyxesK7L26cPhm8EUitQODAbGzCjVY9AJisN1b1xRiumNQrH3jQNK4n82ICR14cdeyIyKKcvDyYKsWFj/F2BfXEHwIjccDAXR7hB4lZ6YL9gWQPwmM2aK22SfjgQhsicbAQxivr2MTbFq47bvIWk29mos4JjzrYFR6Z+4trhnSRJ4NpbCyaPJuB1qtoaAKIQwmT14H04W/fG38e9TF+nr7IfrAIYEhdSwZixCrMa3AmVC3JZLxoE/+XYIGYBRJud7BfVvx9wQGvZ+rNhjbSo1z5NjQDy3COX9eXnRB9GDjAxXmKAR6ax6NUjFvC/Tq3trZw7NgxPPw/78Hm5tHWSiJMVkEnbuPjvNFI40Zla+ckk2BHYcUc6NJqFiI/GlcBsz3Y6SVg9yLKh76B2X1fxd5DD+L8V+7Dha/ej51zu9jd8hs0FeMCo7UR1q9aw/rVx7Fy4iiOPOYqjE5e5dmDI8dhjx6HWV2HG6+jWj3qJ31bwI1XMbMTTOv9D0bWYGzhVTI1u1KakQ+4Q5OM5soZWI89dSCPPElqY8Rgf4DGMG5aR4wrXRM5kAOC8Ghd85szA3zyLMLg3TAIISIgv6ZqotJJlsJfX5fP8uL1kNdoxoxSUjpaTTRKluwzKgaiePmFabMmzflmsJWBn+h4V319uhgkGOW7r3DbhiD5e6hIO5+udNrvhIFxbr8Efq/ymDYZckaCuzP635S2235Ba06pd9cHHOTsQtT74GCHnc8ZWPYJ0sUlgBul31kA58+fxw3Xn8a5c+cuS6jjME/8f5/E5tEji+V1/gJOftffvWx1vZJlKbwbksLo7ZbbYg0Q3GjSBga0KgYAFI2bIqBPnMLeIAjFXFCUaxwguO2zqM6fxey+r+LiV/4GFx94BNtfexAX7tvGrA6S5GMfTLCyOcGRx1yF9WtOYPXkJsbXPgb2+NWePdjYhJscQTUaw41WPUioJ3Yz20OxMsEUfoKunMOsMijMGDB+MppWVWuFEG86VNWdeIzxZBKM/TiQctIFlD0fAghl5bzdQT0BEnCRK+Sw+6EywclhlgMEMgwrmQ6fwjXX3oAYMT94Ag7R+2Grs6r+TfYHlDQwF4JmDnXsoGtbACKUGatFDGggNZDRX+XgL5mJCOggbzwHAOQLxO+/KwiQKpxhk8bDiYk+G3uEjmkGwKnyE79TK11tbnNA7aHSPGv+/uk3ZxfCdzADR6BlvKdJFxBozrXZHV4nebxdTr9VvgYQuOQAQp97JInqjbaq5FCuXFlykCAYguiYQbQTY4pyMvVGTbKbhonQRGnleR9amYWGrupQ0bRh02wX1cXzqC6cxezsw7j00DnsPnIBO+d2MbvkvQls4eMgrGz6cMmrp4419gfEHqwd8QBhZcOzB+OVYHMQGFptUmArVL5SbjwJPEVduGayIsxj4Q8UYpXnxLPmQtET/ff2aj56xDWL0KzSdPco7qUgV/UV4gFWC+lraCZAPLHzKIJc56wxHjz/nF53qJA6wdsXpNPlwIEmQwbwuSQxeacAQqek6NwDpHk5AIiqUP/n4CF8p9W9cy2goEkKGKRAAS9fOyfP83oGF0ToACm6Rpn8NSamj6TsfHKM3GWVQ3XDQrL0IKGlB6dWyI+TUZ1yfZNPfSyoHop2Ovnd0XbRIh7CbM+7Os52UJ5/JMRCuHj/Wew8tIVLD13AdHsKV+8l4VUM6wEgrF99HKOrTnsbhFOngbWjnjmYrMFNNuDGqyhhsVc1exeMa1VDWbl6ovc3VFYOe0LxXTqHWdmsHp1xqKparw4HVH5FDdu25Y6iUXKWhT5CH+/g4FwT+ZCrFyq4MBLR8VSMA76q18Q5H6pYo4/ja/yP1sBHz0IczoUErlwMFFLDiDZtBlUBHFzNYiyy8Y2MpJgai7k/vzENyJKRAvnEeEUt+DQQwlkN046wGJL1yD4FFOh6lVGo24HcbptLiikYwhL0fQ99QMF+SIoxyQFRziIcWLsyphtV98njUSrLDRJIjG0C9jDvhdg1MuHnSteQVD1WQsEo0TUeAmTsV0dTtNNLqLa3UN73Vex946u49NCW34/hoQuYbu+hKiuMNyZhO+f12kBx7dQmRledRnHqNIpjp+COnPLswWjVb440WvXRCmtAMKtq/X9t6V4YYG1smzj6FpigmWQ9m2DgirYOnibmwrKd9wxgZjuNgWPBPEfocaBmEKLVSMwkhPqYhsVo9J+hqh5MwLQAAu1VwIGAFG2ClNQxvb9C1B8yXZh0mntt5RXoacPYkLZoLa95VjU4Efptf0y5MFwfr/Z4vYYYhnWtj1rPI/c9I7nQ3EPyyV5LYMG093/ISSc4UwBIywsCjTcJgM5Q1W1Lf5YuVf4AITdwziZodaV6Vibd3mL2LN8uW8aWDOC3jw2+rUP5Jsi3BkhQRA5KWmClsA9bBQT3qygBG3zodwj64piKgQGEqoSdXkJ5/hFU5x7C7MFveA+Gh85hev5iE0lxUmCyMcbkyATjo+tYOe5jIYw2jwUvBrd+DNXKhmcPRhOgmGDmGkqf04rO8a2KTejYjk1eFNmPbAKkJ0CzfTJtwQsPgrgHRFXFNhiuasWlUHWbgsKnfRUAZpiIODyyJlpo4+QAK91jxTl6r/Ka6D/7rk6YxsflKJg3B5AAJ0xajIjCZGhGZgCt/okRiZkbltXwfeARP1sDZJ9HqHrHJNbLFmHoRKgFU1LiJ/SZhzonq32gmvvYfKjtS0pfw04GliLGo06m2lWY2KZiUUYi6pfK7S/CnA2riFgIzpvHo1SWGyTURnTemK5WGUgjRumZICd+SlMqnU8ZIHkY6Gjb52CDsOMjKZ57COUjD2D7jHd13D3r7RCqKQMJASAcweqpTUxOnAgAAWtH4VY24FaOwk3Way8BP2FwQzwyuCsKYGVkwiTvvQmaPQrI9bCoJ38/obigZimMwch6cDFi4CDYWzAJoazlcwTCds+u0SRErIFBM8HJwEly1cF/O9e49VVo73wov6fiWyR/J64NE2ClTJI1C0UqL2MsLDPi5DI0IJFDszcDTKP2qVxtQ1FDXB4/QYKD3MQkgYfq4SDdeimtNlElXBZbW6jL79qx1ESY8qjQ8hsAOkwN8FpZoB/I0CbTnL2KxiK0AMK8zIoCnjj7AbTBAt/sKsUU9DWElEL9F2hUDcDBqENInJwT5szj0SpLDxK0+ARGmRw0PbqBBVxtT0BxA4AmGBMvJzEQhY2kmIphdt9XUD50BpfufwTbX3sAu2cvYO/CXh1N0cDW+zGsXX0cK8ePYvXUJlauvQ722CkPEE4+BuXqUbjxGtxkHXsVC1vsYuv7SWFaer7CzeDMCNN6oib8U1jyCkCw4Scqf2INCjfzoEABBpHxT1V5vEV2IECwR/CuhIwOr9kMqVLgrAENJDL+QfK1Oxd8/wsx4GYH2b6Ut6KKCIc6gv20VmpMpdIZOIdPxPV/yVBENDfQ2iNAAgQ5ITXgLr6QVpwcHFB/6KWCE9ICBxpYUC9kbAD9HlRwuq+qQt5BQqUULksUIxmjPiGvNYmeufY/J32fJWKwQ+HJAdYuTRM0ikeY9GlcpGKhpiMNNCXY99+lq3ED8g8MKBwaLi4kyw0SSKijo97FUJngDOBXfTwaIw2CXGVQVcMstBUVAwGEiw88gr2ti5hemqEqHYpJgdHqyHsykBdDzSAUJ66BPXE1zMbxBiCMVwMjAPjOSjaIzrUBAlCDAjPysQmqtndBYX1HJ+Awsn5iKcA2oOJsCaAbfTIJEyFiXXlZkQfFfJLqlskwtBIcDu3YGQPVcBc2M9koMmgFJmn3OQYmMkSksoE2cGkVS1+48W3dF1rSA3x1AQTtcYQ6JFQPWp/cl42fEqCi651paqFwTvGgIVHjUHQAhNbW9R11b6UBVHaB6kD3QKwC9/aRqi0OMCTNwoEBkGa2uoxrD+XKkuUGCcpEECa4aLClVa3XUTs7igdEqXcvqzhoUKr42i4hqBjO+10dd8+ex+7Z89jbuojZzhSunpGLscVkY4JRvTlTUDEcOwV77BRw5CSq1aPBg8EZ29gUiP8AaqNdDxBIxUADVulodS/qLO7Br+ANUE0bgNBn8Kl30ewrfQ3Icum0QaZlJa1MRDxfjdbX6hHVmz2L1r4gCUCpUdDa/bVuV064HXVNieNuJMgMzIKJS4KDbGGZ9tIDINDx1KvvDdpzbTZl95ABCF3PWwMSMsQ1SVLFIFhQQL9ffqylTu0DjBL3zlURlWMsg2ERJgVYSIlmC9MVzfNA5JBJWEiWGiQ4yzYUAiJmAEAr6I+xI7iqgsEsMsgLEyMXppJo/O2dAi5mcBcvoHzkfg8Szj6M3UcuYLq9g2rq61NMChQAxhsrWLvmOCZHNzxAOHWVD7N87BRw7BpU6ydQrR7FzE680WE9CpUCGAB11MGabicDw5EdoQSwW3q3R/JeCGqIWrddGMBag0lhgv0Bj7DoaMe0FEVMz3e2B1dUKOrNmyrvwwhXuVqNAb8zY0Q3Ku8RCCGF6xp4l0Bl0JESTbysnhTYyb/DuKxcXn2OcbGhncSSoqGlRXxzQVrVQcKBR7RTYaJuEpS0GIP6u2p3ASSMtexc6of9lsHqjB6TqRP/K8biDZHUirlle5ADBqlnXO8zAyhMyjwsCqtfIYCcAUD2MBRrJLeteG7y18D/gXk3HIKEhWSpQUL08rt0eTwdMQWSXkUDKEj4vg9elWFgqjqfagbs7cLVuy9WO9uY7eyhmjbswWh1DFNY2PEIKyeOYP3qE1g5fgTF8ZNexXD0OOyxq1AygLAzq1p6aMB3wrE1dVyEOGaAVzGg1tc3BosUwpgGqJZrY8k2eTIm3rgq99wBcHfTJqxwo+IwEJswAZHRpXeF9PcnjRXpvPTlB4SrmUnvf8AHoSFGV3Lwyl1L5WisAaCDBaozMSGAwlCAPQfokz2BV3Kx1ACIyhxo37lYqxsc0jWF7ab/tetM11pUr1NWrdDX1kGOE+K4fE8cIPArcqUMBQfRM+wLvOoN6SLhdhxzAgX+ndoit4ehfhxts46OttZR3tyBtw7lQOVbAyQk9LhOORZcIcMOiGxVlRp4aDIsrJ9Ua/WFcQ7VbAo33YPb3QFmU1TTGcqpj6QIAHYygh2PMFqdYHJ0wwOEzeMojp5AcewUzJFjqFaPRQBhVjXuioAIX1wzCAbxxDOr9y1oTdLMmyG4NsIHfMJsrwFLfQdbNkAFw0VjfUTHyrVUHHw7465BRFtZSKDgjzUFEEAIVtOM7uUTuKT/c/s6DI8wp9RbnItj5TeTiQQZ2jOSxyK9cCYdSdZ2I0pYT5jUr2Sb4JR3oqykW2Ku3FQ+2rUaEEmwL+rzyLTvRVzyOgFCYGs8uziXWme/JQUYhcEjgQVNonuT37sMSA8IJPjFz2JMQJdd1reyLD9IoP8cLOQoR64q4I3Usm2PaXXBGkY0cM72YErAlSXcbA9uNoWbTVHu7HomYW+GqvTRFE1hMdlcx3h9DaunNjE6ebV3czxxDXDiOpRrx1CtHcNFN8beXhUCI5FHANkbFNZP8KE+AHZmVQgytFc1mwMZxh4ElUQNFky55z0YqthAscUgaM+SVpj1M/Ihn5u9GqalazEgcqMYaVPBV1l8wsxt4UzChxhXuTjkLYsgKEVbIRLYsEArrrxUV0jAEdWJndDKqU9EWzlzgKBSt9pqTynb71uRAXuJQbnFABD4kwmNbRiP3AAvz81rU5BjJsQxTTXTV6TdRwtAKtf0tjkA2uwBgYMEq5GUlK9+X4CvSULd4ehcPa5qTFcne6uNIfOwHYvKobphIVlqkOBs0ejPAcA1qgFa4QIIhommFDQfDYa0i2GICsgYBP6b8gqdvfQdfjaF29tBuTdDtVfbIYxHwBgo1jyDMNlcD3EQ7LFTMJunvIph7Rh2XREYhApODdlAUlYOZT2s7ZUOtvKTLa3ig0rBNGqJYHcw3Yv0oBoY8gdiZsHV20MH1sD52yb1AgEFGmADWVNnxyfckW1bPwPsGqlHN7F/NReKly/z0hiHlMgkJQCDeOMoeQ8pe4OunfVIAjPC1Cd9bAeiPLomfG3iyE1K/H0jVpfwiS/8zuXbqlTHeQ485pzwsqBgwGTMgVomFFfSALUF8lIAIVWfVJwJtbIdIEpLm3sXrG4heSptD7WB61PmoVzxstQgAUXhwwSHRlhPeraZ1AAa2GZA5VfBjjpuAAmFHm5Yo06rJhyzm03hdi/V9gjeULGaTn02hWcRxutrHiAcXffbPW9swq5voppswK1soDQj7JVV5NoININNUccPIEaAoqG5+r5oh0ValXu1QsMejAwAzh7Utgf+/vjMpKwmgQYg1FtBkwcFAYS+DK0ffBW7g/q/Big4QNCvMWyira9Xyu4zRMndEyvT2EGkGAkgDQz8udQ1aKlQ2onaAKHTuK02anNSBdcloq/0te7vJRlw4zRwcBnKVM9z4A/EcQDY75zI0ypAkMIjuyrgoDM6ZVSBDmYhdU6qBOi3wlR0taecXUHSwPKgxZh4FTFvHo9SWWqQELwbZKOvQYIzprFBcJUHA86Atod2tkCIncApKQYgwm9Xeaq+Kv2kW5Wodrbhdi6i2t7C3vmLmG7voKw9GuxkhNHqClZOHMHKyeOw60dhN0/Cbp6CW9uEWzmCarTqvRBIXQmaqBBsDwoDjItm4jfGhJV75TybMKtcE22xVkuMbd1Jy8ZzoRWaOjfIsEiCsKPgUkkAwbl4QiHwYo0w9DIN0CEbiegdBvuJuG6kRjEBHMUgIbhr1RM4+XbzSHeVKCPcntGPAwwscO8KkS/PO5XPPNKKdCjBQZcOuz4fvdbUQC1UdZwd0fT6vd3XEjrnnFtfzkq/U40w7yq1B1DoI50urbxIUxt8dkWlnBcc5Mrm9VPo/6FNOBt1E4juMRnn4aDkUN2wkCw1SGi9fFrxRjx2PRDZEVBMQrqoUxhl90igQf3VDKb0cQRQ7kW7O1bnz2Lnfh9V0ZUljLWYbG5gvLGK0foqVq65xhsorh9FceIauPXjfj+GlQ1MK5rsXWPhDz9BkvcChUqmO/KMg1/mFsZhbWTg4K8bFyYGCHTvo0kTSlnqQ9m9clAQrShd851HfoxUC2QwCH0HRz7p80mcHvs4qCAaer+g+yZ3VdT3ZfxeCY3+HDXL0az4q1BvfyCa+HqMiBWLK+1ErFppZ0H3zWXIVsGq/hroDw5alVcmZMSTk29G8cAnAUKoZxdA4CtNZdWZvQ9iP1JZ97Gt6AMUcvmQWpKqNHDR2Jk8A3padlVamn0UOe6pNibzGhdylkQAhiuGVTiUwbLkIMHbEkRW18IAz1kgYg7CtQrtBsSTJQCDGUzpgqGfKfe82+OlbbhL236nx509uLJEVVawhcV4YxXjo+sYHz3ibRCOHodZPwqsrMNN1vyOjsUEFAeBwiWTWiGKXzDdi6jIogY6znlQwGVEtghATFcb7tJGulF2oY0HKq5WkKtnHhqaT8IhK9MOt0xqAyl0TKoTtBU1n2iM9ee4MakxsXudBAitAElUtlIvLv5Z9LBtSOSXYin5vesZXgY9ruY+1+cy1+2BkdJ5d7q51XUarCYZqk7pykthFTqz7cpT1kFTqwhbEGC4l0UXqEnlF7nuGsU9lddXGjYa3Q1W3SiP7esRAOEBgoXDvRsWk6UGCa4YMxVCLRR2mQbDehCqUpSTChQY8CBd/mzPA4TpHqqdbVQXz6O6eB7T7R3MLvmtnwHAFAVG66sYHz2Cot6LwR45DqxueDuE8brf8tnPdF4HDtcYGVqDUbnjPSiq0q+iSR1SjOHsCNZYGON3fASaCYc8GLoMtZxPrOuHja3DNjuVrpfbS3MhQBBiMyhpZDCWBkyIFSdfTScGHWPlYK7cJ9qDZDKgkaxr4npNtLy0CHTxee0iQQUPEW2A3ifhBqXJVX/4okz2uXoxoNDKX7OQHyJ9r0uwCq36IAEO5mUzBHsw1B7EoN0+SWXSJUk3YM0WS3mnSaCQLJCB1IM0ZqzVpwvn8SiV5QYJ4xW4yXp+YC2g6tyiUL1yIHJVAwz2tmGmOzCzXWD3IqoLZ1GdfwTlI/dj9+GzdWTFKWxhYcdjjDdWMTnp4yDYIzWDsHYU1XgN1doxuJUjKGFrWwRfq1GtXhihgplegplealN+1EmrGYyxmJDdgLgfySJ4u4S9OEhUitas8y+MhbMWqIEC31SKqxikEECwJgYIEWkBDyBILRHqWinuYdoEwb/LbatlfRDrmTVx4AyGqe9x2FKO0/F8QxtSr/S51pcrgk/RhBU9TNG+5eCXAwqpOAhM5Cqaf9eCVtE1AEIIaznBBj18F1Cg/FR02W2/kM8gzieZVlk0DGIMuiY/pS3nAEJusteCeEnQMKQlU4AvytvkxlU63AUU5PPkm8MdFFA4tElYSJYbJBiLEjYY/vlJ0obvJKRrlT7szb4HgmpzfotkU05rBmEKU+7B7WyjOv+ID798wYdeLnc8i1BMaoCwuQ67vgmzfhRm4yjM6rpXLYxXgdEEM1gfC6GuDHkjjOANIwNzEO7R+N9W6bCuCoZIBXW6YGTJQlQb26y6eWOXu12y56qNLtqApakXgHrAEaMrAYSCWINy1gCDCNjEBTkoNK2yMggxFmBgUId5Dl4L6XvoEg4yVMYgpNMBQi7yoxbwSa7kAs2vGL21ZAiTwOhkAm2pCSr32KjuqRDZTh5agO2IjOCGAgRNNCZRnkvlmQMIKTCiqRyE9FV3aG1SWwylJBdiOVp0hET6xB7ALJ1LeXDQOZ7foVzxstQggYL5zOqewF3m+KBLunXSqZMEf3iauAA/wdZ7MpAXg6lmXs2wvYVq26sZ9rYuotzZQ7k3hasq2PEIxeoExZEjsBtHYdc2YCarQDGBG6/4/6NVTMsmtoD3Vmj2XkBJW1UXgSILwYvqukUAQagLWnS9q8MtW4Dm2QAUEgMtPS9utCglMr6rvxfKilnbjMqAVAj1ttSsnkk3K9hIo5QbaEh1ARiUcGHLW77drUyfEj5Y567jcR+knYWUPmAlAgpsUmmtGocalUFZiUsbAnaeQAN3C8zWGcrkwu4jZd2/L9J3wsmtYFOAoc9z7jL26wEO+ggtbrj0BQLRNeI3j9kBNMA4YhQSaodQFh+DNOYo1QYvtxwyCQvJUoMEcsejCcA4HwynsKh90U0ABzQxN0FyGhc6VA6GexEQexDUDrsoL26h2nrYqxnOXsDe+Yso93xMhGI88saKG6uw63UsBLJDWD2KauUo3GQdu7W7IncbLEgv7yq40aRp0Iohk7UMsfNtnV2lq0zoNwWZqq/lxkpAMxHQttTcFoGrGnydm++cXieQkIp3HxiEatbsulnHbeDAp8m8ATGtIFdUP24MVrMqFEKW6uZZJHJhTKsSkt4IHWk0b4UuFYOUOEJl891QW0hMOq0VXI8VelbvHyWs865ZKs7EpeakFqNADB27Bzd0wmV1BhITTB8VhNa2+tDk+wE+tLopwK/LloD6Vp8gYbImXZdQ7FXPwCEE+IqAQo975MWor4Hf/0FNvIcgYSFZapBQmCa6INB0HvLFN2hAAKzx3gRi/K7IBz4cIDXDDszeJZjtRzA79xCqC2cxe/Ab2HmIYiJcAoCaQVhpgiURg7Cy5o0UJ0fgJusozSjsX0B6+5FtNmlyzD0TiPWUWiCh4OJIQKaOCa/pm41zcNUMcP64qnqg8hAPKNaYYN0vXeG4W2NDM/svBZpVPTE0KGP2oKlgPNmrwo+XtQqlduvT9LIcLIQ2YBDAQk5y4aC1U6nhI6WiSE0EnK6PbGYyq//ebILwbAjqKE2sjUAI1YFPYjmLeX6+dQ99BlsBGlv665SNSi7vPsBgUeHtmJeVADF9VAP8mctYHVKGAoNcPj6aaQwUfB3ywFUyR6o8iifcZZSlBgnkS1+EgamhyvikaqwHAbxp8iYeAhVVM2+kuHfRGxDu1FtAbz2M8txDuPTAWext+Z0ey509WAq9PBnBrq7CTJqPq9UMbrwCZ0fBbmJc97bCGt1IDe3VRZhkXQXD7AjkTpb+WOVjQgBRhzWwgCtrl1CdynYsBgKfKCVA8fVqA4RGN8/qXFUNcwAgbOdN9Q8/0gAhRImM2BHKj92mvI5NbIWtN3xyLgRJ0iRnlNlX+CZTwDA7iHj/itheIX/hALAQVVa5rkIvlYA2YdHE0mLBEjegnVdjI0jpAQ7U59YHiEpVQ5/JTmPytDoOAAhqkQmgsB8AIdfGW21RA66pe+8yQL7McrjB02Ky3CBhtgM7q+MGGBs2uAmrpMpvsmTr72PeWPl2uA6wly4Bsx3YnfMw24+gunAW5fmzmH79y9h5aAu7Z8/j0kNbqOqIisZan3dhUaxOPDhY8R/PItRMwmg1dLCxNQ0DwA176uBFNLFWsM1ES4aMroKZ7oTASK3BPrXa4qt2Y/1IQF4BdRpyeSwZHU9AgVQJmuohKh7CW4TZHPCNpFSR6gZlEI/8JVwZx8NI+f+7KgYKtXrJuHiC46WlojGmojbK6+VFslZcFaEN9nz3Sj1PsuFgpXYBhMSzib6naHym+iIDYDK6bem1QeqqBqxzFQSQVlVwz4kGbDKD4o5BPqcC4RKpP7Q+kzI47KpHz+MSPPURqYaQO572kT5zHK9p1E4Rv0PVPidnt9D3GV4uOVQ3LCRLDRLM7gWYHVNb/xehcZKePgyopL8vp4GWjyYkV8FdOIfq0jZmWw/V3gvnsXf+Ii587QHsbV3E7OIOppdmKCYWxXjk92PYXMfK8aNYueokihNXw9bbP5erx+Am63DFGEB78gzfUQ8a1QwmhD62IZBSs1puvkcTL08TAQGh9+OTgB152wcqzzX2CCSSbndiACfwIIMg0Xl+f2r9NCMvOTinBm9uoxAqbFU1S9CrI6bBeThnoDHaApqJTwRZrK9lBzLPi0uJeIC2cyzxOJPUek4Zl0h19RTiYzCAVYPmcE1PcEB7UISqAKrqhd9x1+SosRD8XJf0cRlsAQWgvdpNrHp7vz1NlTcn/Q/oKisOFiRLmorR0cdeJm+kmz432CbjUTzxLpMsN0go92CmO/47D57kmLV8MJSbwe1chKsqoCrhqtLv3liWfoOm82dRXdzC9NwWds9ewGz7EqbbO94GYXsP5V4JVznYYuxVDKsTTDY3sHL8iI+JUMdFqMZrcONVuNEK3GiSDMMa6Vnr/2HFwI37av0wp+ulmkHVS9MgbwxgiuZ3vWGTQ9tQ0dfBRAO/NPZLRVCMBo8cMyC/a2n60s2cEZLgKCH0jA3/3rEy00I7a+f18hobCGPaBp0LS27XwBQNz9teIdKzZ0htBPDPiYMDsO9y3hmyKRYXyaLMS8mnJEmZ91Ap9GEA+tqe9JEkkaSUxXerlICBg4LUBK9VU5afjFKa6ttXkvgVzeJ5PEpluUHCzLMDdvc83HTPT/w1CCBxVVnv1rgDN9sLk62b7fljezsoL5wPHgu7j5yvDRN3Mb00w3R7GqIpFpMC2BjDTkYYb6xh9eQmRievhj1+Nezxq7x6YWWjVjWselsEmiDg6dOKdeHCtO0JCleFDZk8OEDDkEhwQEwCC3vakjrGAp9Iabtn6RbKVx90nKsYDMhts237QedD/YjxSE2gqZVxbqIPoCcBEBLUbkokwyDv3R93nRNEbnUld5cEmudMd54FXD0ku0HQwNUaMQdwzfe+tDYvSbrqyTy0e5fgQnMXHfpsAPk+FDuPzDOS7z6nbtPaRy59X+HPQbsueia9vHT0cnLPuRcwyIGEPoDscsmhumEhWWqQgHIPZgqUdYAjt7fTAgOoQQKqElUd0wAAZjt7IRjS3ta2d2nc2cPOuV3sbu1itjPD7JJXDRSTAqPVEUarI6wc38DqqU2snz6J0cmrUZzwGzi5yRG/cdN4DW7lCGYOKEunUrMW9UQbJu36RYTwz9PIwC9Qo5w1IHDAJmIHNCtD47fAdkW9v4Vo5IUBKlNvtSxWuCmA4FmE2OAykhyzIdPxe5PftQmf2yDU23rnAIImqYE+7W3QBgiabYKc1DQ9eA4sDJK6LSSBgbG96X1tEubAgIOlLlHVDAm7Dy6SWeHXaBEeS8b8tOqgrOS1qIRD1BiyzeyXyiACKZl0YOkKcX1OUnYtuSY3iC1gC5um0IzqK2Ss2H18C8sHPvABvPvd78aZM2fwXd/1XXj/+9+PG2+8MZn+t37rt/DmN78Zf/3Xf40nP/nJeNe73oXnPe954bxzDm9961vx7/7dv8PZs2fxfd/3ffjgBz+IJz/5yZftHpYaHplLW3DnH4Hb3fGqg90duJ1tlGcfxt5DD2L3/vux++BD2H34LC7d/wguPnAWF+8/iwtfexAXvvYALn7jIWx/w3stXHrwAi4+eBG7Wx4kTLenmO3MUE7LwCSM10YYrU4w3ljDaHUCM54A1sKMJ97+wI58VEUHH1XR1QaBVR3LQfTsesHmAwzNdkL4ZzPbCfEETDWrt6ieNcyB6GTOmGbb60jN0F5Z88HOmvaA61wbIABMzZBbhTCmArIeOTCgfLxFsmnuTYCBXGhhutdW/cRxHjQqHEvdmyJaUKmuuuRUE/xdcJajnVBRrzAbAqozv2etVC2NN2Blbde1P/weeAAz+rTKYd+pfckPpev6+DycbuHv4o92LMQDYfespZHPhvJaRPrYY3BPIfpoYjo+FBxOfnISpeNMX2A1yR7KR0o1JcU7aZjDaHyic4Buo3RAYIE2eFr0M1T+w3/4D7jzzjvx1re+FX/+53+O7/qu78Ktt96K+++/X03/p3/6p/jH//gf4+Uvfzn+5//8n3jBC16AF7zgBfiLv/iLkOaXfumX8L73vQ8f+tCH8KlPfQobGxu49dZbsbOzM/fz6RLjhpjIXiGytbWFY8eO4YE//A0cLQCMxnAXz6O6tI1y66y3KdjZQzWdwViLajpFOZ2h2puhms4aFmGvQrlXBtagnJaYXZphb3sKVzpUZYXJkTEmGxOsbK7g2Lcfx5HHXIXVU8ewdt21KE5c43d5PHUdqo1TfgOnyTr2MMKsHmBJaCVOdD0fDAoD2EvnPDNS7sHMpuok36LqROTBaKvnYoxgg2AbOwRt4JOrRgIIlJbiOtDGTSmhewmDCWM7ItVDanAgUFF/T91rMiAL+66tplODvQwAJUHSED10Tigpj9AINEhdtovItTTnjZDQm/eluvm7Lp0HtR4wiOuovsZ7vHCAEOWXYBB4+5KSM/5U6xAda1/bNazJa4YaRQ4dNjXbgJQKIHydl5HbL0mxBmwDtlxfjr7LaIvGYmtrC9de/204d+4cNjc397v2YZ6478yZhfPf2trCtadP46tf/WqU18rKClZWVtRrbrrpJnzP93wP/s2/+TcAgKqqcMMNN+Anf/In8YY3vKGV/sd+7Mewvb2N3/u93wvHnvWsZ+HpT386PvShD8E5h+uvvx6ve93r8NM//dMAgHPnzuHaa6/Fr//6r+OFL3zhQveYkqVWN5jJGuB2UT50BnsPPYjp+YuYbu+EbZvLnT3snb+Iam+GcjpDuVcFQEAqhaqsAligrZuNNSgmBVaPrGDjmg2sX7WGleMbOP6kx2D99CmvYrj2BpiT18OtbKCcHAnxEGAsJvUg6mCi3RQpHgPf2MjHZtiFmV5sdn4EAFOwzlmrE8IIWXc4UiO0Vtkm6pgkYRUlBvEUOJBSATCBduerSVPfX82M1C6bDgBso4IgVU/nwJIK39pjINQmdnlPQyexIaGcU/WR17RUT9Anj+YCRtMuOCFEYJEBxK73T3U0QBIgJMt0zXPVAJwM2KWJAUBOkZXpT4OqrY3KA5WfKDOl4x/ohsj3iuHSeuddk2/f4wmbnnQFlXw5OJB9V4xNABuf6Dy3Q5BN90CZBLNwnAO6/oYbboiOv/Wtb8Xb3va2Vvq9vT185jOfwRvf+MZwzFqLZz/72bj33nvVMu69917ceeed0bFbb70VH/vYxwAAX/7yl3HmzBk8+9nPDuePHTuGm266Cffee+8hSFDFWqAEqu2tABD2trYBANV0hun2Di58YytmDHZmmF7y/8u9EhUbEa01GK2OMDkyxsrmCtZOrOLotx3H6qlNrJ06ho3HPTa4OpqT16PcPO03bqqjJYYJwVVNzAZrwt4S1gAFqmZjIwqrTC6afOLVVt2maCZ+xhq01AvcqK8WlW5Ge/DmunetX5VyAqX/UhUhVhLOVbXSuefAoBnhdaRNraSl8Z2sAb+liEpXipIrXs1VkucVrxYbBkFjD1JlNicX1+cOAQfy/ZNOXO7yySUHQAHlHYkyU3fXBJhyvVgH6R7YxyZCLVcBw7KcHFBQr6G8jQAHPYwA+0bYjGrUZTSYAQid5aTeRarfspgmg/YeuUJEYxI0efDBB1GWJa699tro+LXXXou/+qu/Uq85c+aMmv7MmTPhPB1LpbkcstwgoargZtPaG2EH09pt0VVVMEI8//ULKKclyr3K2xlMS+xVDnu1zQDgB7+1wmJUWBSTAmsnVrF6Yg0b1xzBkcdcjdVTmz4WwqnTsEf9NtDlyoY3UISNemTlgLG1jCK2YSChY8ZVDWvAQipHIlcUctLVmAMJDjITbBdAaKV3DmKvzGSshDiRqEtuIJyDOQhZhXo2v/mECOTBgb82PdhLFQHQTPRaBEfndApdrsC7qOd2BmmgYJBWMWjPQgIEfvspe4u2i2y7RK628WW3AULS6SVRh6QhnniOUT1cM4lrQKGLBCCX1a5WyIGCBgo0EBhFJaX/7L1mJ9A+e3RQd6P20gUUhopsh0PyDEblw1Q284rLtLcheQDA5ubmZVGNXMmy1CBh+pUvYPvcOVy8/xHMtncwvbiD839zFhcfvIS97T3sPLKL8zszXCorTB1qYOB3BtwoDCbWf1ZH1qsUNlewftU6Nq45gpXjR7By/CiOPra2O9jY9HsyAKgubfsJoJzBjlcBO0K1dgx7GOHCXgmHWrVgARqawuBQW+YbMwOq3WhwoJgGuR0RQ7REEm7VXjD1AxelA1sQZWuSkyOxCXSWWAQDCitt2Oqyrp4YzNvjgG3H85flsu9DxhF5XZdaAVF6Ynv0iR2IV/9cCrQnSwkcUnYHvsx2fbg4ltZfOEz1ICn9UjxUsn9IrrYdf78eLOZiaMr2pL3CHAMzVDhI5dEJw9bXpgnJDbTvM2UjQe2fgIK2AyNJKh6BBv5S4CC5UdfQFbexYcUeba2dSy/LkSwkD8KlbQOtuV/nwMwBqhsqYWw7bx5D5KqrrkJRFLjvvvui4/fddx9Onz6tXnP69Olsevp/33334brrrovSPP3pTx9UvyGy1CDh0tfPYPfsBVy6/yz2tvcw3d4LAGF2yQ9jhYEHA0AMDI6uYHJkjNHqCOONMdavWsfK5qp3b7zmhA+SdOQIRqeug1mvt34+cswbBdpRvTdDvRX0aAWlGWFWutZERcZ+Fv5/a7Cn/xagqcSVswYsyA5ubXs1wAECD/FcS1gp8rrxLE0eKEQhc03silW6ZmB27Jgsn4t0GZQi7Qm040Okz1C06A6PUketGdalwIGWo7xVFSh0XBOdo9W8kqjPY5VAQS2jRz4Ae++m3ztV2RbELFbMdsT1diAQZFrPQbOTMGjUGlLtRkAhFVKb5yHrotodpADCUJsEYHGGIHV9DUoDK1H0KyfKjgOGJVQzDJXJZIJnPOMZuOeee/CCF7wAgDdcvOeee3DHHXeo19x8882455578JrXvCYc+/jHP46bb74ZAPD4xz8ep0+fxj333BNAwdbWFj71qU/hVa961WW7l6UGCRcfPIvxhR3sbu1gd2sPsx0f+MgWFqM1wBSeMdio049WRxivjWDHFmsn1jDeGGO8NsJkcw0rx49gcnSjVi2c8szB+lEUx07BrG4Ak1VUkw1gNPEGg7UHgRtN4IoJ9sqGpSBXQVppjyyj5HkoZd75DNuLvRh5VYqBju45exCpGkYBEKQm4Jz+VMaIJ4kiLsLA1avAEp7GJd/1FJtAx6gMqRpIiZZO1j81abuozv2AQpNn873Lir59bZpuzk4cMh/oQEG75nKRtvQIjXivi5ZH9afJm0tXDAOp7sjlzY9pdZYeLDzOBQcKQKx2GOzZwH90GSZqchCTah+2gRYnfepTj2tZtclBGS5i8TY7z/V33nknXvKSl+CZz3wmbrzxRrz3ve/F9vY2XvaylwEAXvziF+Mxj3kM3vnOdwIA/sW/+Bf4u3/37+Jf/at/hec///n4zd/8TfzZn/0ZfuVXfgWAH1te85rX4Od//ufx5Cc/GY9//OPx5je/Gddff30AIpdDlhok7DxwDqgc9ranKPc8c7CyuYLJkQmMNbCFQTmtUIy9rcHK8Q2MVico1iYYra7Ajn3cg8nmOoojR2BXN/x2z8dOwa4fBVY34MbrqMYr/v/KhmcMKodp7SZWlcDeXglrgElhcGRiMXLN/hCGbA9SHaL2BADgjfvoOpPuQFGMADsKbo4EEAB9Nd531aZR800eDsbVwy7Lb2RJPdEewXm5GljIidSlA039LNoDdjLQDruOkshJUKbvk69aVkIHLY9FMkBXnHpuLcNDNBNxye6VVEy8XeTmPb6aHsLm5PcAqCdfbh2vXMOv4wBB8xToIzlbjLK+z1ZExkQ+XDR1RDZUufyek772BIL+V22TUt+RZ+2sadJHakJ5D8oCyLjaWLHvTqX7LFo8lHnyGCo/9mM/hgceeABvectbcObMGTz96U/H3XffHQwPv/KVr8AyNc33fu/34q677sKb3vQm/OzP/iye/OQn42Mf+xie9rSnhTSvf/3rsb29jVe+8pU4e/Ysvv/7vx933303VldXF7vBjCx1nIT/8U9uw5HJGFVZoRiPYAoLOx5hdtEHlhitr+LIY67GeGM17LWA0RjGFj6j0RhmNPGqhNV1mJU1/3913asR7CjswYBigmplAyh8sCS+2Q19t8ZgZE0dVwAhSBJme151oG0uJUWzJ9B84m0BFKPgBumMzcYD0MLiStdHTfggSuMfv19SP5BKJWVItmgnzXkndK32+2ytK/NJ5aVJajLs7bWgyH6oWXhe3EaD2qycJElStz3knaafiZ75EKNRrmqgvHOqK94fSueSICHUlfVfzWA1Wc9wfawGidQMCZAwaFfPUKCY5DMAIPVbjhW5BUWnuiwDhLQYC1vnzuGaxz35ssdJ+MrX9ydOwmOvP33Z6noly1IzCXY8QrE6wer6Koq1CWjr5r2tiwCAyea6Nzxc2/BbOa9teIBAncsWMKOxBweTVW+YOPbqA2974Cfi8B2sE8EPCt6UoHEJa60eqqoBCCGokGK0YxTkb2wTbVCmLeIYCXIw1Cz6gzU+O6YNzpIOrth9eVq2sWHQBufUsVQQI2DYSj0nmnGZtodAH4t1KV0r13il3D7WV3IAIYcXcqoLqZPvkq6QvqlVf986ATGjozELHFBwt1EpnEED2m2Nqxu4wW4wxDV5VonqmGuiXTYrTWV0uj6i5jUjQCV9lKcsI1U2XV//z3kDSeEhsYmd4gDNaCqJfXDbXVRSETqH5vFolaUGCeMj61g9voG1U8dQrHqQAACTzQ3Y8QiTo+sYX//4wB6YldUWkwAAZjQBRiPPHhSTZnIuRn7TJgILxm+MNK0cZlVjf0C2B9HEEEIq1/sw8J0dXRV7MBgLNxrHNgdkbzCaRO6O0fn6JwGEVPREwA+4cttiabSl0cEAxF4NrgZIzb1r46I8FABHYiBapBP2cVML9RKVTVmsDwEE8prWyqp1cbq2KYDQ5+nkKHIOFKxz9VbZLoBAPknG3gLDYU6KFZCTPv9Ol0ibgxxLJO0IALIlaJct778yQAEDg5pRiOqZuTl5T0r9pOFkeJ6a2yD7renwswHFMq7OufaiBRrju8GmuqIx/qS0Q+IqxJQqIro36ZZ5meWbpW74VpGlBgkrx49g/erjWDl+BOW03oxpPMLaE/8f2PWjMCtr3migZg/MaOKZg/HET8BE1Y9WG1BgLFD/dqMJSjuJOv209ABhWjof2wjGG/AZA1PNYHe3gXrvBVRVEyzJNRsfhY5P5dsiGENGAIHYBRZWOWzdyxk90cG14DgOLopYBzSMQJGgVDmtO61oD4pavVCYSM/fuTJHM3Cl4hbkZNHhhDMhUb1SOmSkJ9ykGqFPDAhlNafJUA+PPowOTVhVPTmSbUJhEN0I3w48NfFpQu+1MKZzwpEr85SbaW7CpsG/L5vl6wZUzrD+YbITZFL10lHPLM7IrLRVd8UEMEipCvqIFlTLH48XDgAHvWKxUatuOFhIujeTfYKFH7wOCCAcyuKy1CBhvLGC0cYq7Po6ipHfbMmurKE4ddqrD2zhd4AEGnBgi2YTJD5JjzyDADuKPAb45MJXK5PCYGybDW1GBvWGJzFjIPWQGkCAUB0EFYNtjtGmNDzMc8hTdHB5LkuTspWbXBVFbmVAHV3NBfsDec0Q6ZpEmnrE6YdK2zq++xpNbZA6DyBvY5IQ7bZ7G5YusKqhFbWraYfWHgaUzsRBsnL2FaE6hm3zTECQJc6969Skq72uoDpITeDatWxSpvgJRf0snTFNuHHxZvrYv+SejfSYUL0E+kyYAmRKFhHsd5doe5X4a9sAgf82dRrpGuqBRhsotBiDbyJQeBQTAQvLUoOE0ZF1jI8eQXH0RBPLYHUDZv1IaKDGFjBFAWcLVHX4ZDkZu5G3QeC2BySmbuR+Ne0HiaKwtYsjGxTKvWYnNGN9wrJqJn/UDVUCAWIvKI0x8TEGEGg1T3nx1bmU2GixsSuQQYP4ijE12Ln6/MjIM7qaIKJYWR5SUpNGn22V+wwvQ63fNXCQzCLFGgyMQpcb1FOn5gUKHPDSRl00Ocp33za4a84XCav4KJYC2pEOuVohqlc4nwYmJJVLt3ujfZcAjgzp6nsoQr+rn6nhLAOisMxRWT3AjJQIKLRO9jRihg4QckbKUvqCg1YVQ1l1/4QJe2hoQKGlYuEeDxbDO+iccqhuWEyWGiSsXHMtRldf610WT1wT7AfM7nmYqoSb7nlVAxdiC0bjxkCR75hYTPyED/jGXbsWAsDYorYlKGGme02ermrcHI31+dbXensEVge290LLlZFUHqNVv1WvA1zVbM/b6A/bz4J0yYDvnLRaIiqRgALQgIUGKLSD0tBAzKPX0cZUJWxro6jwKGodf46S7jKclOe0oDaa9Il6l5IkXTwkqM2CBlpDxqFkHAyk75eOh0iEQA386GI2kTqk70fQ3zQZhDbH1BoAokiHmr2B5qmQ3MaafgAAqqpJREFUuoec2iOasBPgoHWvdA/GBsBQOt5/fKZaW59njotofKlGSKmoxPWasWGqjlw012a5V0kXw8Vdlyn6JrFFNsEoaJFVHRKqlcsgh4aLi8lSgwQzWfUui6MxMN3ze5wDwGzmB8va9sDVE7dbPRo3zGISqxpqPZ8hxgFNREED1Fs5s33TU/pDuZosaLXCDBC5eqG2j6B4B6VDvdU0aoDgL9E6NjcGk8FuCFCE9K62Q0A8wGmrAEnp8oGXjpVsKOedKBe+Vkou2mNzX3r0Oy7zxjEI1/MfuQklJ30Gvbrd0IC8qPqgVQX2PRdUqxXkhlbYch8RtiFPSBoyEdR5fV+8HVUOIUKhfDp8Nc7ZgyRAM41FDT271n0p99RywxPieHpjYY0NjIW8B17vPpJ7B5zG91/y7UdrJvO2H2J14vEj3pCMjwE8/gpXoRBQiA2iG1UOjSUtg0Y+Fh7KFS3LDxJW1gAAbmcbriq950Id/8DZop58/fdqstFc65yPmFg0BoMSmbfKE4GRnEWbTgNag1sAB2SYCDS7p5HKwcYAYUogoWMEkLviSTWDmh4eKATjNZNeBQBiZYaYcaBnJkPV5twbiYLmA9UisdVzg3aKut4XULDPoq3k5CpuqKiTFLeTkb76WljgyPSf7QegWerX7Vu2o0pwA5p6g763yufH6vzlZJZ8n5qfvhBj2yt8abVPbXyo9HlnUdkd53PStSMlV/349KwMARiAWA2RMvzl19M4YmqVjYEACsA3BRhUmN+miefxaJWlBgmwFm73EtzupabzbxyFXT/lVQ8UKXH1KHYrg9XZtp/kAZRrx0KHLMOK209uFEYZYANn8EwwMGDhkKU7IxepUiBbhPpciIpmR57Cr3z5FM3RufZqI+WSlnNbovWXNhHzDXsMPDDhNgoGtZpB6EN9XfSJQFtxNcZMXhVBmyIRVVkw+4OcEJvA3R67VnjJCWgOg0Of0YIDncImAG19uxyQ+aQVVUdmL35HLmq8DlCAQe5ZGBvSGivoYjGB83YrV/0acGu9OcU6PvrP0qRYEe5RlN0TgdvW0XW1+qRMPHO1zkzmVbcPgcpq+zHpPgj4fhfK4kCeVVhey9lI3U5EhK9mCw6qF2cnjQCUl1tI3bpoHo9WWW6QAPjOTwGRVtfhjl+HilbntW2A2buIldHE77MAAMZ6I8R60KSBwDEU3Oji2+5HLnCeIlASty9QgwfYllFicG2svGqBIuGVRE6wy4f6rAc7BUMUYEPvahJNwKyjk8ol5BvVKb/i0gy7KO5CZFXOAAOvD78XKRwoUB7aJjvRd2UV3bWv/b7oThNsk2HgS67WUqu3PhS2PKZOwn3BkhzQZchfaZw7UHq1aglI+P/E9yQzIu/HVXVjEkwgYxRSdewCA6nTi8w5KVDJ+2LOtbepnN5XyHiTe3wYBuLlYsMyMCHHD1mvyPPhUJZClhskVBXcbM+7N47GMKvrqCbrrcHOVCUw24ObrIfwxa5yWaMkGlijAdZaZhqcNuqKXBjretL1PO4BZzAkQOBUH5Wf6pxqNcSkzo3VulgJTVoeC4oOGkivclOW6pyF4FsuSyM3TQYPM8IltQscqLIfLALPhz1HoA0W6HeK5s09IqmqCO9QM5Kb91kwoCvLBOabDFPXqPeaAApGO64d63if1L6BvOon21Q19oNOiaQpY0xNOIBxmfQS1PcRsitwQBSACmiYSU0CkDBgBqsNK9nyfDgAOfRuWEyWGiS4nYtwhfNuj5NVj/x3zntvhWIER94JtXHiDBZl6cIKVNObF1r7l4MqG2CcAaCpEPhKy7Jra7sDYg6IymsAQxwIidN0UqQ9QmQ5zlbwfcAEqRj065vv0URb36cHIfFEkbNpIOHXhIGOqSMAjfoc1lvDgMTq3QIHinEeibpJDknCBmXwhMvaU3hsJo7RMWhSQpp9iIAC0BiVpYBD1I4V9RkaMNNVB27HwieMucffHCMkGYSe78S4Kq6PohaKTufqpf0WLAZ/D5p02aRIpoPnIwF6BPBT9a7rU/D2gYb1I8avqxuSjQIQs5IpcHQ55dC7YTFZapBgJqs+PsL1T0LYPEkYAjaJLcraGLByPixtYfzkw4FBa3Lj0RKrZjBvLmDsgDzHyg6DqfPGiD60cwMKSFJ6aK1+IXuTW8GbkAbQV9/SP12WFQ++syZyJE9c63GBeoJ38bnoPxAmJZqgkmyEBDiKe6UEEtydswVsNLHM+4TXLyq3p+cCJe+alDTjVvF8uC+/1kZy6gig/VwiYffjilFd38bmQKaRACEwckp54fKWIVxTbwkUUqCnpSrhbSlnlJhSRbQKqBqAVKscIqDAmR6tDXQBxJQBpsjP1O84xxql1ExS/SCPR+Cgh6qNv2ffj2zk1spZhZwtVKr+C4PDQzlQWWqQYDdPojh2DG6yBuxdynTQonVtyMO0DfCSK09ltSnDJnOhFQgfTIk94O6N1NGM6HhhYlcGDskQSFAQpVUAgmQNJGOQHJx5HnLVpVG4icGo9ayE/ld7lry+JNwGQrIf2uCYjY2fAwkDJGdJn7wGoo25qhmsGZgChhnFpbbu5mwC0Dzv6LYZwyIZBN6mU6usFEjhfU31vOCTpzxH/1MAITNJG1HPYDckgQLqKtA7SNStVV4CjCQnYqkWXQAoAB1MhzaeSVAVVCFVBAp53+RAAUDLjqiv7IcKoHdZwKF3wwKy1CDBnLoe7sQpBPdCttNiRInX6oDCEmVWX58avHg8BMADgGKE1uTHqFitvTsguDFWiLfnpX0QSjZwWRfXh8cFSBkOEosgN8aRwtUJKUtzPqBEeuvMJDvPhOjLrQei6F5iij19bSOaYWSKQeB113bcjP4jD1SaRB3PYwjdbSyzWLXhGYX6stWd1ubawKtdYw7+gqqBypagrxDPpA4spgEEfndd8EplvYas/Ok6/qwz10twII87Y+L3xN5DeD5sgSANWVX1VYcETZU83gMoROm7Eog+HHlrhX5BBZUMONVMVt1HKdgUVz9UDsmIlK1quCbIGlc7HYQ4pBmPIXk8WmWpQQIAb5S4d8kbC65swI3XgdlOfdL6CIrGwlQzjKhvWz4B+k/BO1A183EU6iBHYQ8FNkham++gFWMLODiQNggAGuPFuilyK/3CAs6ZAAY4Y8CPGdPtvQD0oCPl9wxAiAZnV0WDserdEVXED8SmGLWAAg2SQyS6BzFBJ9kDvjpGMxDkBufWMxMThqPyrFXq0THUuLKun4knK+PD2DrKi1Z8ClDQ6q6BwmSgG25nIYCTBAhRGcrttOJjiHqo77gPmOoNIhLPuwUiZIKSnbThELXp1LuP8u5ioui9SrCcUMFpkmT6FDZFgoPgti3AUVCrGAdnKg8WqoZZCJEpja1DWfta5NohAUn6TztIPpqNAZdJlhok+K2Y9+BGq8G10BnjIybWjT7EJagqmGoPkccBVx+I1Qhd59jmS3KQTOlSKY0TAKEFFhR8yjuUHGQ1gCAlp9+l8zK/pKQGY+FXr8WJiAZeOZHySbCqmgkw0J39kHsL3OQATevi9uQOMP16oqyWzzflxSbVABRSkjtnbANQBQ1ubHsVKyUFruRhni6wCwl1C382GkBolZUBB7Iuc7HVClXfSiIBQip9VzvhoIPKbgpJX5YiqDU2UvYBNmFLoKCqZ+R9aKoYOkbB4CRA4PWh8iVYIGaBMVoEGPw918W52JuKB9Li49tBGQNWzrU8w+bJ49Eqyw0S6l0Xg8uhsX7PBlsAZlwnYisj5xt7EOpXvMMAgT2Q9gYSIFBn4BMEpSEjSf+9YQ+AGCDwtpeKUsgBQUqt4AMvNfqJvpPFYEkBhD4rQECfBEnYKiwHFDRbg2i1lJKEBwNJCiDQMQkUouN84gq0bBW3vwGSekYh2mGfPMTvHDuTeta5ZwK0AUGqrCF16SvO2GYizAEHZWU96LeUBJiKbSkUYCzz5uBSAwpApIKLqsDz0sCxEiAr6qtc/SDu3fE2q4EFqqOIl6EZ2kZuzqwRVTg4PX9NFi+cx6NVlhokVKubqFaP+R+lt7p3roJx9Y6OTNcY9I+RZRLqhh4/BldMgrrAx1Pw11K357o1H2goHhCizuAaDwaveqjL0Nwv697UGBoaFJao2TY4iCg9eBuHCmhFTNT09p0iJ19A12VqKxIuYpUdDvNJkBYw4rpkfeUgJ+rVKp/+c/aI16mmePvYQ6jVgVhlUj24ygGo3chcloqO1DS87nxAFiLd4PqwRfwWu4AAT8PL1KQvGGgd1tpJB+NCE5qxQLT1sKuaZy1Fo+XlBCvLZXFOZB0i1RXVXwBGn3c9/kSxkOOVOwGFMFHX57QNksL1if4ZsXtKH03eKz/H6k+qhnAsfEx8jKklYKw3GTe22TSL2TL0g7qLy2GchMVkqUGC35vBxp1VuD7KgSJMToYZI3JXSVc1AMG1SUO5pbKf/NuDnryOVAyqoRdTLVAY5aFh4gm0zCqHwhgYuNrwsWEW+Go4K9oAIlboyVDUPSVps9AnTz44lrNwTLVeVwBKqywxGMuJLWdRDiSeKQ3+IgBXL1sN/l8aLtI5oZpRgWAGNEmmRqpbNMmBj31hriTI0t6bshLvBAoay6BNmpn23jpW2zXR/XV6QXQJAwrkhhnAgnweGmugMXsaw8DvPXWf8U3XRcftkns+tGwYgCayLADYUWAXuIfEoSyHLDVIAOAne8vsBuTW0CSy4zJjxFk9MPpVt22ihinZ0Oq8sS1wLeBAUtXujt4+gR2Xvw0wNg1AKJTxRdttLbo9RwZBAOC8F4cxKOACeqf6S2lRl+z/XFEJQ8ZiwssJ0a052phEMAgBHIgJJABCOsdp3YwaQLoG9g5kJPKK6PC6Tlo9W99TcRsSz7Flm6HUJeQhJhzJeqVEgpBeti05ZoeSKJe1dgxMXc9W4gEoAC1Q1gUUIoAgJ83UPRArJdmAIZJKz+rAPVxUhoPuQdoaUB8ZAghY2hYLUzHPB2MDaHAsPoyxoxosGM/sErNQzQA7arlSzrNh1lzisLB3w6NZ3zCY8fmTP/kT/P2///dx/fXXwxiDj33sY9H5l770pTDGRJ/bbrstSvPwww/jRS96ETY3N3H8+HG8/OUvx4ULF4bXnhp1MYEbrcCNVjwrMJqEj9/pcezP26LZGZLtumjQDHR8Mh1bg8I0H1IzkFsjbeccdm2kDwMHlSM1gAs+/YUxGFkfxMl/4s5C9Jg3diQvCeYpwRosxXkwpmH+Kl42hH1Ende++CnLQS6iIRWAEFYhRr++8gObcZU3SlVcWr2x6swzCLUXilw1dQovh19L5bFyjfJprdSqGVT3WxJr28+GGC9jgWLkP9azW44DX56OP0etfPZJPT/5jPhknzKEJWBc1N+L+nd4Hkr5rechhNpipXyorQb9vNau5DmkQVWOvckCBMqn47fT2rwonz7qfYSHkmhXsz31Y8pp+LTe/WzP9xH+Sb2PiHFw7T5F+ZZTmKr0/2dTDwRme01bC+WXDWip4j5D6k+uBr3cUsHty+fRKoOZhO3tbXzXd30X/vk//+f4kR/5ETXNbbfdhg9/+MPh98rKSnT+RS96Eb7xjW/g4x//OKbTKV72spfhla98Je66665hlSnIc2FWT/KEdpnh4UjcYt1BS2bNwi2ISc0AoMUQEHOQmoqIIZChlQHPEDhD+YbKKHm4Fmql+AlVqGubVdBjH7TBR9ek0GTCVmiuavTrNPCh6tSvR3nVEgZK5ZyvZDy5Zt0uM+WEslLl8MtYfr2Hgg5AkmVgJKMFNsH1YV9yrI9UC9EiVFntcpWDVFvwNIDwmuETTRcwE++aqzVSKg6y9ZETSdNt0s/G0WmheogTKewEtXGgDRYEw8OPpQBCsu2lwIHyW2XItOskc8AZhlQAKe3+upgXKsuQqsV6G68KfrmZYsgScqhwWA4ZDBKe+9zn4rnPfW42zcrKCk6fPq2e+/znP4+7774bn/70p/HMZz4TAPD+978fz3ve8/Ce97wH119/ff/KGAtpiSvPh05bH2piFfjfPPIbBwhA/T23CiEm24n/0Ffp3CBRXsuRqiw2GE46H5dP21+CH+JGjl1dNWlAxr63gEJ9bghQaLEHPVUQ7YyagVE1TNPYCmVSjspgx4L3QGbym0sFI91tteA8Q55N9BzESpi/P21rZ34ebcPHqNpczSAmJSD/LFKGll3RGlO7B8qULdBQjwERUAh1aXYcTU6AJCkvGJN5Z/SdA4S+75PXh0/uXUBMtgEBEpMAQesPBKo0oDBUjXKFidsHdcOj2APy8tgk/PEf/zGuueYanDhxAj/0Qz+En//5n8epU6cAAPfeey+OHz8eAAIAPPvZz4a1Fp/61Kfwwz/8w638dnd3sbu7G35vbW0BQLM/Q72JE4yIjGjiGPMlm7kJ/FZo9m4wQFJPFlY1xoQgz41VeMMuVC6e7K0x4byMqOgNeChx3BC1RlnCoYBBWZdZ1MF8ZcRFm7kPfi9Rio6BwEXJmsnHGeiDmJaftiLjkoq9H1UkX5YKDsSEHE1swgNBZS+G1lHUqXW/OVfMeQBCLiRxR34pBoGfD3mlytVErrpN7EZMUUiBdls3cKFvpjxOJGiQHgANUGD1V5iF0B5y7ERK5SDVC0CsUgDy7xroYTgoK9PBivBTGpuhsCEA6qhtNCoCLrQpn4cGyMO9Wtt6FrzsfdlqfQE59G5YTPYdJNx22234kR/5ETz+8Y/Hl770Jfzsz/4snvvc5+Lee+9FURQ4c+YMrrnmmrgSoxFOnjyJM2fOqHm+853vxNvf/na9QGqQHCCwQYliE5A9AMlY0YuRjQCJpENb1Gt9oHQGe3WiZoUUAwaSwBg4hEiKrUGSMQ7RNfV3YhQMXLBn8EaPSBpRDpIOOrdJYmNal4sYHJN7JLBBLlr9da32oFCncvBOGABGkwunmTVJGbL1oXOp3MQuk+rguZ8rt66Jqi6Lapxcqctrcsdo0lCkbzCmkK1zEVAgSbZvsfKVQKHOrVX3zkmsB0jIgdNU/lHb44+VVvNKXVt9h7N9QGuyz91LCvwYV8X5VBXC+kawUEnQ0SEHZY9wKIvLvoOEF77wheH7d3zHd+A7v/M78cQnPhF//Md/jFtuuWWuPN/4xjfizjvvDL+3trZwww03tKlkIblIZWSIFYkw6vFAwLa2NI7SGQvqkAQKuF0C1UMGUJLqBilcJVHUwzWlpeBKYYdHtAHCoIFfSRtRucrEFYMFdjw1oKZ+hwvr5ylX9lJyA1FqcM7UIdyHNvl17Cqora6i+A9d9d0HUd3/uPTZ4ZIOI24DDkp7yYG3fbjXCGMpgLe3p0ldnwgo5FiovveUYg+0tB0SsVrC5oeAAoA2WKD6KqDIp+8uN5JE3wbVr2DvnD8m5hY5hC3oYq/2Ww7VDYvJZXeBfMITnoCrrroKX/ziF3HLLbfg9OnTuP/++6M0s9kMDz/8cNKOYWVlpWX8CACogyAZ5+ogSlWwiNYmRzWkKX3qDhfRqCyAjRGDCY98Zxmapk2bgiqiBge8kQUvBFHLVBAln2+sNLamSVNY0wIIQ9u0TB8NOPze+SCZWoX1mJDiwpgumds+pAZwmWdq850USNCqwCa/1sAt6hqBA6VuketlXxlQ12hyoFVkCnvl8u3zXKC0ARL5nugYlZsA8bmdBCl1FHGUXcd/N5XsSdkb22YThtiXKM8yaX/QpWYI12eAAqUJAeCKpu1lwFqvvp9rDyxvbvDaAgyGvetMvq3dYoHgHXMQsh/eCYfeDZdR/uZv/gYPPfQQrrvuOgDAzTffjLNnz+Izn/kMnvGMZwAAPvGJT6CqKtx0002D8nbFGG5cg4fKT+7GwgMGxOFBSSImgICF6BQG9fnZLB6QaNCzNthDkErDwg8YDi4GJKZhAjhw0IQYApr4Y7sBMubS1QscGEg1CYn01EhtIKPG9af7l89DiMwuVYfUFBoZSQLxoJnZiS9bN8EcqLYYYtINgJEP3AHMMCZBDugZ9045cXSGWG7RNFXyXHIISw3gA1Z+WhvoZJKUsqQhImqXYh7ZJLWrZ3Ssr0Ef9ImoReXzVbmWVwIk9zFQ7Gynog00X5X2Dz+2NXUpYFy7/WbZur7vPdEvpP1GK8iXdo9gQIEDjAMCCYeymAwGCRcuXMAXv/jF8PvLX/4yPvvZz+LkyZM4efIk3v72t+P222/H6dOn8aUvfQmvf/3r8aQnPQm33norAOApT3kKbrvtNrziFa/Ahz70IUynU9xxxx144QtfOMyzAQCsaTpYoRucRROCAAchTVdjFSslV8dZID/vxmXSA4LKNioH7oZYmFoNYRoVRCiCAQTpupgySiR1iTTuymFezhD01g0njve5XlKgXdELW7IISyDAgfY91I3nk2M2Ino3Q1/L7z1XllmRdaRjqUku92zmsX3gZYiJo++7kZ4UmqcOpQvZ8Tp3fe+qv0yvTIYtyT0r7V4HqHjkO22Bita4FrdFMhwOoEe7h76qr9zEzQG0Vv8McxRFkQRjJw4IJByqGxaTwSDhz/7sz/CDP/iD4TfZCrzkJS/BBz/4QXzuc5/Db/zGb+Ds2bO4/vrr8ZznPAfveMc7InXBRz/6Udxxxx245ZZbYK3F7bffjve9732DKx9RmpwR4I21mkUDgQ/6UcUdX/pH807F0vANn6aVD6JEQit7GE+fOgMYJ9kAb+RYVi64XJJwgECAAfw3Ytag5bduureWjZ6dcqxr7o73ihgmtBqNAEJioEiurnusWLRySVLPJlqpikmjtWLm1e0zMe8HONBkKCjQpCdQSNolaBNHD7ZpUIjr1ISnneP55CYgw9QOKaYgxdpozyvHdPV4Hq2y5SkJDgNwaACDY31JtWGYVyQw1M6Lc+r+FNw1ky45KHXD4S6QC8lgkPADP/AD2S0+/+AP/qAzj5MnTw4PnJST1MqA/vPzHCBI33V+nam9JUS0O26dHQUzQrPCH1th38O/O7968gCh1sl2tD/KO3hfuCoOKlTXz+/13oAPvl3rvKKxE7K+2rbWUf01Y7M+A0QCFKTUKVqZrboqaWXMDG1DnXAdi5kdBmqeLjHQywFxiKFXEtDlWASgPbhrg72YnJJvMbfCzB3njF0q71z+c6YNtkRc3w9EHdOvwmv1nbPxmKFNjuy/5uqYU7W0vkvp2x5oNR7qRqqvqnFLluoYU9RpZGfoAbi04zk2QV6rtc2SgMPBTLxlhbCx3iJ5PFpl+fduAPRGmmjcrhjFtBfPgkcUZO6UclIiy1zL9kSQkurytD8D4Hl35xrKlTMIhqUnBqEwAIW7bVF37EZyK7V5dvzTAIIEijxKpSa9NAyZwVV7BzmRm26l0ifrxSfghPSa6JW21pl+KCMAtAdj7bu2Yu6qS+73kGuHnt8nSdP3grZHhcgwEEhOhlmAoKRPqb3mkWgnRvYMgxcH0HhyCKCW3BkzW2DmvpTfLYPdVFs7tEdYGllukMA7qmmCtUR6TmXiSUbUUyyzOXPAu5fUqVI6uaqWk6mFV0cQUODxleirNE4MAEGxp2iJq9IeCYhX2H2GC23VnmOSpPT1aVfPi/K76iIByrwuVpFFf199rppR5v5SdPZQkXXUvvepJ69HboW54OC+XxTzIINP7VqNtg86fmIXHLuGd1SWv7J9tPa/y6C3r0S2GuI+I9UYPeYc09RXMmxI0otnkTa9z3KoblhMlh8kACGqYiSkZpDsgmNujrzj0Namttni1MAParbOP7f3QZlpQ1SK5f8NWCAkE+Unu501RCfO4n0G5IAkaUYpptnaVdovOKRBQ1+AwCfp3C6BYTBLrLCqujIt9kDsm9GqCjMg5ec1f3vtGauy6AQOpCnbHMXLQFTNO8VJeFaplVqu7tp5DRBwdcGiE3wqMmRfidQiVd5AMEWBc3GVTtvXde2cFvq6ObLvQ4x9NaFxpllQ1GMVa0/eu6veQp2zCq6K2YQcCE6xA5l0V6pUzm+6t2gej1a58t9wDzGuQoHmE1HyBAzqndFMWRsuyt3KgMAeGCDSmzcdEgEsOLR3sQNL1+zO6K+mrZxpeCysQWGb3SDHFhgZ/yn4B+0tX51J6D/pPkvlw54FsQ19VtpyUKOdPVPH6L7DOSU/vtuf/Ewr9pvtqkk7boZj9XH1E87XYyTtxDnA8LLP+T4f/xBsv09PkW0uKq9vvl0AQoCDAKzpI3cYrKruD89Xtse6L2pbHqsfIICOFngR7CL1Vf5xqPsRC+1Ou8M6O/K7cdYfWKt/6HF1vLs+hJZsM62dMes2nes7EQNaL3Yc7TBaL4D8uQJhZ8p6Z9zWh50P18pdS5Vn0/nMos+cVN+3mAzdFfnhhx/GT/7kT+Jv/a2/hbW1NTz2sY/FT/3UT+HcuXNROrkbszEGv/mbvzm4ft8STEIXJWpc1WyVSsibb6BiCiTFtGMtAG1jt5CcRUFsX+OibaH5nhFJFkD8zoU3bjEkHPzQYoKtHvj+7tpqNbfq0WwP+rAHUqSNg8YURPthiHppQaqoLrRbZmUAW6uB+rpg9rFnkPdA5XJJXdtSCfHfUQZpNoGXTeXum6qE1wtoswBSXTJUtQEk9ePJUNdSlWKsGn+C0vC2IkvKrsh5WU7f8Euza5pXUv1OsnYVEAJQNccacN60BRu1AQfWBIaQOAm1SmcANZkm0VYOak8HD6wWZRL2qTKKDN0V+etf/zq+/vWv4z3veQ+e+tSn4v/+3/+LH//xH8fXv/51/PZv/3aU9sMf/jBuu+228Pv48eOD67fcIAEIq+fw08R+xaYGB2GFQlvB1YOMMyasIKQe3xkb7f0QFSsmndQEaY2fyKrEKB8AgmZvIKP9kYGU0rkigOCaqIDRAFvB06ri+hxQ6CNdVL6mNgCY2lSAgRxYyaU1PF3tOurg/PH6RXh1rQkbBGl4QR4bAqCGDCZNwK1m/wR1shVAQXN1VYEKz1fmlxIBAqL2yFbxkb5ezjw97DC6olZGP7W2nJqoaCUd6uYlteNkuJQxYfW3mgnUVXlqTj0mPU3NR9IHMJeyyDoHH+rdhDwLoYYILpLcDqND5g6xLiWlFrUHM/1cyd4N8+yK/LSnPQ3/8T/+x/D7iU98In7hF34B//Sf/lPMZjOMRs1zPX78eDKScV9ZfnVDPZlHLk8pijPYMDQ0G2pqUQIEovC0HcSsaSIianYEnN6sXK1aMAaTwmZ3aIy2/CXmQ1C/oY6533SPUpdo2T0ytQNXPQRVC2K1Se4T3QN7BvQJKppaHVChUSfMarXBtFYPzKqmU9PHRXSrTy/zJgq2Oe7CluAOCAFV6EmlJvM+BKjm/SE/Gh1cijry9kXHkioCRfWVq5/MN6pyRxktgCDtd9C0r+QmW4uI1uZTZWTsAug9UdsL7U79NKqqoNri70uqbwbep/a6hgIEn85Fn7JyQZVZOhdUayV7/5G6gKtUOj7yunD/8jcbg7MfNM8yUpkukWxtbUUfvjvxPNK1K3JfOXfuHDY3NyOAAACvfvWrcdVVV+HGG2/Er/3arw0yOidZbiZB0o+AOqABCAY7YVAjFGsFuqX8oIODkCRTrTgKo08ZYhakrIFT9WdGR5rKoCV9KGylXFI9UP2H0O19hJ4H3x2TJnlNraDmIX5rE71UJ7iaLuDRL7viOgwRbaCnMlJSsadLK1eqd2A3Mu84xyi0ynJNOZQ0lN5HTdB3K+MBA35vN7wUYyCTCdAT2hVigMDbXjsT71VUwndR65odKPkzjJgF7fsc95ADCFK823V8D9S2ibGUdW92rR04KSvpc29uLhbtAGQ/vRtuuOGG6Phb3/pWvO1tb5s733l2RZby4IMP4h3veAde+cpXRsd/7ud+Dj/0Qz+E9fV1/OEf/iF+4id+AhcuXMBP/dRPDarjtwZIsCOg3AMZJAYRqL/ZKKVZVQcGQqgsbLimLdLoj3eOqkb0QOO+mDISjFQNqXtLqBaiGA/a9YkVj9yrolUf6HteDBkA5MQpB2kODlIqhpz0rQsNnuQWSzYKMFQnk56kewhVY8ggD8R6Zb4VsrQvSFHdOdVDqr1KsDAIKESZdeujI8kA9tT5TulSN/CkStsD9AVA6ep9V2o7FtRsVFG/EGpHg4E4VQ9poJtrO1EAwxrM+GtZO+oBFJoMh4GFvv2tTzKqhsbQXi4hlmjRPADgq1/9KjY3N8NxdeNBAG94wxvwrne9K5vn5z//+YXqBHhm4/nPfz6e+tSntsDKm9/85vD9u7/7u7G9vY13v/vdjzKQIG0IilFLx4/RJKw0CAjw8Mp0vWPuhabcA4zfIpoYB5rYSKRBVKQTRpyutbEN0KontykwVaN5dKji+6RxCWLiSD0boEXJGjnp0DNh10aTCBodJ79nfo9R/koafi0HCKlBW0rK9VSThpKPQ1z3FT6JJpkCdixlY5GSaNfDejD39Y3Laa0A+Xtj74i3L+3Z8/vhYIjSR4aAmi2ClJ50vy+QKhHnH+XVx0bCDNtdk/dX59LHAD8J8+dCYNIo7yM8NznRairAPkwDusFldKVp0hU1KIhOR4CiAQoyuFirDplz+yEHSBxcVtnc3IxAQkpe97rX4aUvfWk2zROe8IS5dkUmOX/+PG677TYcPXoUv/M7v4PxeJxNf9NNN+Ed73gHdnd3k+BGk6UGCdIVkHYaM+UsmZ7ScuMmI35HUu/94NkAXQ0BCGYBDa0dykbTUZIAgdgMbuCFZtdBeU4VORj18eWuqsjvPFh0U3l8IDe2Fd9AY1VyQEGTIftOkKQGHh6MSjsuhd4NX3HlgFAfgCDpTV62NzJDuC5smayAhda+EgqrQNfM++ybivUEBkCntbu6/fE8rMEcEoH5cCz9RPj70M836qms6uEy3J/We6NFyIDZV7Jki7CFQz2EvplSYXHWYuibvfrqq3H11Vd3ppt3V+StrS3ceuutWFlZwX/5L/8Fq6urnWV99rOfxYkTJwYBBGDJQcK0AvZq94PC+u1fi2LiOwNZIUtjI6FGCDq7nE6Rr9yMbQ3IUsj+gAb91mDP8uUAIXLLFEAhWTcuxsbMQ0qknpnyKRkQ4N1CBq1hK6MUYCC7Bmvq50A2GlRVU9O6jE0YOvDISb8NCpprUsChS1LgAOgPEDrLgB/4pccMrXyzahBXhXfQ1S6lRG1R+65Jnx0OHdtWuFVoj8lUsQ/qa5+w30LvBoj1+y1d/wDmAGgzk5pQDtl9UXp4bGhMiJSU26XMi6fv8moKdVROHJSqAWiMnhfN43JIn12Rv/a1r+GWW27BRz7yEdx4443Y2trCc57zHFy8eBH//t//+2BECXhwUhQFfvd3fxf33XcfnvWsZ2F1dRUf//jH8Yu/+Iv46Z/+6cF1XGqQQJbyFv4lhl0S7ch3hgy1aWqGwOvfLUo0bAJNbkas1lDnyeMLpKIkAghbQoc0TKUQfpOLZgIghN+ScpbfadJ2cajnrKTyapXPAIEMXZ0ADO3VsIELT7e2SwDAnfT6TK5ysu+OpNgciZw9EgOvNhakwAGgqxi4cZnGXmhFUz406JcOoT2XjoUap2eu2ZSYeN+OXoa3XDUHxB4N0cUKc5CaCOs6qkBhXvsH+Z21Ren6mGtFwd23B6CMqs3YBKmOSoG3HFAIALp+z652z22lE9FYlQStQzn33HkNBqWxb1+AoNXHif+XW9w+GC7O4xXQV7p2RZ5Op/jCF76AixcvAgD+/M//PHg+POlJT4ry+vKXv4zHPe5xGI/H+MAHPoDXvva1cM7hSU96En75l38Zr3jFKwbXb6lBgoa0/YAaBxPpI4TsqTP4zm9R2BggQJkI25nVAy6AgjLvMxDLFQk/3lM6dbZaaFzxjFrBbDirwHCTk3VW6k6THDEKBi4wCNYAYKtfCjSV6o8yWBKQBwXadUOliz3wx9rXzVsPvnKlcltW6oDetmsAm2NmWuwBZ7SAdpsUciBua4n2HzZW0upj4uBJXBUENMCN2K7IhigD9KOymI4/nUjpV6JvSBslAgqaDJmMU9csAg5SMiRP3j34c79cq/Nlk65dkR/3uMdFbfkHfuAHOkHLbbfdFgVRWkSWGiQAus6OjHT8JJQIVCMGG26Fz43JADaI9B0glZVe77j3B02pcrVGimokgzE+6FVVFih4vqBOjkYVQdbXZHTlo1DWVQG7oEMoSZfKgYt8sl2Tw7wAoW99UiLVD7JOJgUmqUzoq7QWe1B/59sMq+wbgGRkQ00048d59fUcIPBjPLDYgD7D26I8LoVsFSRwAwauglnfIBBH4ECri1ZnLl3ppVH1QbobcpH9B4j70MFYqDTxLhbN49EqSw0StEYmB/SggkBteEjUeJ2eVh+FQe2a5a+l62cuzoNPfIU6srABssvHnE26kYpAo5JpNtL0ntpAqkkutC6A3FayKlDg9VV+NxM5YwusQQEaIH2Y6iGTbqiPcqt9p4o+MRL4ADfUe8GX0bMyCdF04arRXKp87WCOPUipuoDo3UbutxpQ6WLLKJncaCghoU3zti4BAmMRNF1/UE0NMKXren/J55sCQ4paro+xbpYNSkiIyYLu/DmQ4KoPNS0bUynvlKEvL5tvzMbtdqoDYhIOd4FcTJYaJJSVw17lYOCCLzMJvdMCuhuQ5lIXqEo6HvJykY64JXLFtOgKSk64UCbw1ApLXt9VjgIUeteN2IQOg63A6tBlDJQBiMMpX0ZNZUqnKnXMvJ7h+6By+qeVrnihflpaUa/cCrF1qg9AaGXSox0Nbd+szfG21rktM/1PMAip+SZS3cwB3Pj1yd1ERZ+XrGHUP0Ii2+oXOUm+z1T6ukwOGID9NxiUbTHFGmgxUR7Nq/NlkqUGCdPSYVp6gOAqpyJsY/zKFYgbtAQMtAqhhp0bHqPOLQZfIMMidKzS1FWZGFRbcez5dV2DuuaOlrqmjwV6jk0Qwt+NChhCwvZb1PTHnSskgfy72AOZXcrae4h6QT6NLsZEc8XjHg9AbKGuPZdwHXpMLCmmS2uTmldDQjq9GhIgOFt+XQdtP4HUK5Gr4qIPg6S8mNaup8mL9ecpg5+FtCa9cdegcjLPVZaRYi9S7V0umCiN5u3Az/NreUwUHqfioBbnV7J3wzLIUoOEmQP2KgdbAYWNO1VhDcbWBN3u1DWDcDsAiYm+8z0hqUNY4yezqGswy3A1NkMEHFx8HEBwWay/t6Rrpc9XU331xRwo5ERb/WjnkaCgc1Q0y4OrbFLdsBCgQh7TJvhmi24X/vP3LA2vWhbYSVew9OA2D0BIeUFwdQOneVMGd9pxFShwyYFGpV3NDRDCSX0Sy6aT4IT/FwCBT4RNXj0neMqjA0jwBUJgETrzzIN4CRSyNcixlJrNlVD9OfRTcwA6QADi8VLz9PHHY+ZABrHaDxVAXzlUNywmSw0SKtDgSbpjf8zW7nbkegf4l1w60cHhB2faSc1CoRSNabtEpgY4pQNnXRtRU+1GBEzqmqBl2TLEdNfA1WNV2ORhu20rUtKHjeBlJivT1LNl5AjdUEt1ZVR0qqk0XOY1sOoCBkAzkQewUD8FMpgjIbBLIuMphONAi3Xw3SB+13RNOBwVlgAG87JOmuQAR6584c0ANPebmmxV18+E9JkKQh7aeKCxBtozZMcOavqRz4tP3kCbASDhgJWn4+f9cR0Y0Dku1phkcLNDubJkqUECUVah8cP5eEDWAZXBFA6cFnCO7SMfJhu/lbA1Pl47JZf0eNJQKXM8AIQuozBXNZEVJXvQEyi4xPfsvvc5g7FUMg4Y+rIXUcbzTrlxZbSVUY66jarQY1WQq6XORqWvl0GWZOlhwq7TUAx+nq8GGDhQ4HnRz1YgJsnw8Amt3kK49/bActW66HslycVj6KFWk/006ru5fiXaVW9A1Nee46DAQaJcDSBw6bvviBSyN/B5t1UK2aoeEEY49G5YTJYaJFSOGqQLv51zKCsDZzxQcIhtFXgjpgHEGz26ls6yNcDwjq5ZMrPBMgIIOZDAL5chmPsIn6hVXW/3YJW0hai/JwEDJc9dL4/nfg+RHtR3FxhYZFrrM8ClwEHSyM7E6TlY4GXSyk7aK1AeMeiI8zbae9faShc46MN2pSJ7avkCadUC/84mPS0apmQPIpVArh/uF+jZB9YgqybqY0+klCfBQV+3XglSJfHE2YPSNcd8GU07lvlZc3CumYfqhsVkqUFCYf3H8Unfki4amFYO04pcxjyVS1Me7QaYo7wicGGsGIxjhypX+EdJURQdqshqv525TqN2bmLDBx4Z/VDm23U9EoMWX1WJAdNpq7EeE3ZLpKpBGZS7vCZa6TPnUkN+FyMwj2jgILe6ahkrIlZBAGixC2HQVlQQPn0eLLTesXYfyrEIZEj7E/4Ou4J2db3bBFDoWhFHWfAypWExlyHqNIU96wtqQnGZSveaODv6glaWBlCHxv3IlapdpkVH5WreQ3XDcshSgwQLAwuDktkm0NRdGa+HCFHS6mPSV9oH84kNcbjLJPd8kFHSAlBgzIIDG0gj3z5lMOSTO6CDg9SKTgKELppYOddn4GoFpOojA0FD70BTIv/UyryPUVZqMNQ8DID8AJmrPbWbPguRXBQ7UkPIOkoDx5QPO1dBaJKKr695TYDyG7Li1lgkpZ0k7WQGtKnWvaTcE/swHVIU241o+3Xtnvjll3FBmionxR4A7bbLVWk5Q1zLrqVolg0AbbcdgzZoOCiMUFWLx2Q4qJgOV6IsNUgoaoagqBkCvr2rdQ14oImf0qcN2wAYRHERStaxaEDW4y7UuyNWs9pqHHAWMNSTtAGVG2GlXMD4b7GKcRIcDBhI1ZWFkq4VW4IDhnlp2dSgLUTVkQuZp+/28S4g6fNEJQVL+UpKvDOfjkEzBxSoHpp7GhADhdR7lsIZiM46pliE6F3X/VFFYRmavqdePSsp9mCoCowDgfDge6j0BopUn+TScMmBAyAPEEjk60ndhV940ffmIoqi2mISlOsPQiq3uE3BoxgjLDdI8Oi0Xk25ZvAEvBGjV0Uw4AATGS9W9fUEDqr6Sxk6kgsImHccAh1A7Ldf1JOoM9aPDxXgCm6/UKgRDbWY9FJXO5TS7CPEkNB3fn9RVeqb171DbPS7yWRxQ7aU2xuQv1cZD582z2lZbXcs7+Ugloq1wGNrDL1rTsUC8XPsomM140ZpqyCNGqX0pb7VmuRURgmAkBNVbZZhjXJPJ6gaNEDaY/8SXu8WqMm5+SbqQtXuivPR552lZF72QJO+bT/YhUen47QpYCkD4F0uObRJWEyWGiRw4TQZN7XjKoic0BWlcy1DsMrFLEUfaYBCPJD0MkzMAQQFJKRo9846oj2IqLVjk2DJJl8ZKhjA4iwDFTlEfUJJ6FpRt1So2dwmTH0HyVz+nfVNAIShutqUigRQbBHQv53k1B/pixSDXiatCbeHPULfJ5vci2OgK250KlffVJtUQAQHCryu80iKEeoT/yD3JPjd9Ald3rq+C1QKOSBtw6EsKEsNEmR/0FzTJECI9Gzse1lBsAX+f4XGNz83TWmdPgAFyig6qRj9KZbe0bkOYyhNJ5kbjMgbhKqX8uWnSatE/YzCytxEKyNpLNcFFHrZInToeLWVGQ3I8hyt9FMBkVLjYme0xgxA0AZu2QY1cKAV2cuuAbraITd59IkymL7Yg4Kud5mdbDOutNpEmJJwqqc3UVSXvq6M8tgAN2AJFOYRk/jOmYoh7sBAGhz0BZZtkKvXUavrQUjpHGOH58/j0SpLDRKKOtAREFOv1umtj6IyFtZEg7dXQTSzbFGrF3yUxVivpoUjjYMvWcCOwqDjUE+GMtKhNvhoNgcyLQMIXW5NQFoXx12Y/LVOBRk+j8YoiWwxXR2EqnnutL12fSs0+TGjzr7S5R+vDYApwzs6F9RDBIoSA9RQPamkcVNjSUvPywZW1UVMyUPWmZelGTPOw274OugPp+VW2PVO+7JJmR0mU5Ndli1QQEIEYrSooylWIOfNEFUokZdQSfSZF1NvrUu9IoGC/K4J1Yyzg1o5cg8IrV4tl/FcLIqhxtALyKHh4mKy1CBhZBH2beADNKdxtZgI9J3O81Wzqa8ngEAsQnJzp1oILISJkUlYXXfMQJ0xDTIuYJIVSJYh2ALdJqF9HQ9C5Y02fRAqR4CKeYVwOjQ8MmXCcMZGg3ev6H50GoK1ECKZhPBdgIVFpEvPKxkNeQ5o27vkKF8rysst9rkx4xCRK0m5OgwAgWQelVJiEjU8Jgej69XbzNUh404b2luOvci1w6HGiDJ9j+fVe0v6hEhbo75gUXvfqfy1a2RMmFbf5uzpwIXDoXzzZKlBgqkncU9lumi2oBWaYfi3S9dL15BlrjExws5dRxImRsWwqVdXTQ1Q7HsXQMiNCV1hU7uN+UxtlxDba1QCKGT12WzAGLQqk8ky9eRARVtZyb0dkvl0GCumhNzCUnXMWX6nypSGmEC3EVpfsKDRzFmAkDMIjDK2cfrU+5RsQp+AXHOAlay3TMYoeGHJ1S8XoKqnKkQD+y3XYHG5ZBFURoDqw8oqZPOkCd9VybZg0Lxfx687ACmxDxEX96UmyylLDRJoQpsUTRAZ2RjiGAikWkBLL00qBu664xxQoq1/Tw38quqB/x/SKTIDQEq9wHOX+wOgdU2cTvahFLsWIlgyFoFCW8MaFSi0VkZ9rMLnXcFJi/v6Ok6XchaiUt6m3ONBrV7CWJEucS5vGAl0A4M0yDIBGJKdRZeQCqKd1ZzgQAYmqjomtp7vL7AJucmxQ53Qy+g1kybqX6LPAXr/l++qlUa7nz7ul9oxPp4kFg98bOjlzcDY19z71urX2nY8mbZmc4wFrIWplE3xLoMcejcsJksNEvZKh73SYVLEx0k/Kw1vODBIbf3p0BipGNQrZyDo3wvruQmOprt0axGFyiR3XV9gAOj2Bfo18fmobmgDA94vgr7btVcawWjRocUoUJyFKMZCX9A0dBXXMcgSYNDUEPTbp+t6o3E6vrIH2qqA1F30AQY5VYqmOtGeaB9woAIDoBscADqDMI9wNqEPk6BIUlWQ8QoC2v2MH0uJ1nZ4ACtAeY65731FeTbG2KT9QV8gqZajvW8S6UrqqsbVVbIhQOzZVVbD3bEO5Zsiyw0SKodZ5Q0SI4taMYFH+mkg2hSK0qA+TucjOt0YFkthmKuPRgOSaPnkBi6pX+TdVgLdLoCQYw400CztPRwaVUMDINqqB7pPzdtCZRiA1gCYGko6p/IEbWtq247IeBUxeEiJaiDJasIBQxfY6LLuThIJQFR/zmrMq1boCw6ADvWCSLuw9IxFAGRUCQmvoD4hi9Xs6meoqdQ63UYTAGFI1NEouqN4PpzB420jBRT6AOLk+24BR5cF6gYCLByUuuHQu2EhWWqQsDtz2CkrVDAYW9rS2QQvBhs6c/OCvQ0DUNZKJgqCFNLQZOa1aBFQ0ESuGLSmpAU5MeK4NthQ+hR1qLVbfq8cIPRVK/RysxMDIYEraaNArpI8tkJrhQWW10DmwCEDFDqoXGI0JDDgYCEl8nRk+Z2yJ8hnOVjkyl+qTXIqE8kcRMCgzySmgYP9GPD7uBNqBnDIqBgUgDAEfLeqiPjZctdf+aRVwDXkmfJr2T2Qfl92Vc6USWNdyXZ1imwLmQBULfYgpSIRYMFUB6PpryqXZI6H5PFolaUGCdb4TlsYg5H1BoeFNcEOgTozt0uo4FUNI+vZAQMXgYnSJVaLNZVQwEUD8pDB3ynf6foulyU+iOWMDDUGIeXF0C5DP55y36M6ENNCQIE8HwA0/vHOwdTqB7nS1oCDvB8k0kQr4D56XzpWpzV1/ImhLAKvR9fw0ZdtSkmuTpR3y0VNs4eo/0d6Z4doQui1+dF+AAI5qWublWnp+DG+ilYmUymy//UKJMaryKsgDJqz6hrN4j8VFjp1TIKjqmpuna6pn50R7dmr+hr2o3VfWgPNqQRT4CCVXktj5laCHMoBy1KDhMIajAqv+/Yfj1E7fYON30IaBjAinHPkwscmPqABCmSf0OUWCfSfICRY4OdIurrUIgABSC6Cg/QJfiKDKpXMwr/ZfKv+XT90zjR0laM9r9RKLqdPza3CeL5RGpFv63jO3S1jgGbYd61+fd3Rcq8nGePAVcNXtIsKv9fU9tAD3GH7qiRITROKFuOENm3JXBcx9EwCBNmeBFp3xqjsiR+cbLzHCVOpyXuULFmviJpUHsWWmEc0I9acgeM+S7kPTMKi1y+zLDVImFiDiTUobBNUqRXsSFxD/Y/YBVI1lPWxlm7fxTECpCW/nOC48MmyS3Kr6SHXayoGfnyI5Pz7uUR2G1AG2vqiABicB2ohgmNdYRqQc8+U8pNqm0hlkTOyYsnqqtRfqrAKAxJuXtr3VBrxWwIV1W+cTXTcXiP36gaBVBqY+65oU5JK20ddRGmYaiG51XnmGbSAVe6diOdLE2doQx02HSkA25c9AND2AIjOyUEnZkWijbEke8KbkFp32wIG8l5aTR2s/dUAxPC2Go4bX7eECigpObXEZZBDkLCYLDVIWB9brI2tiDzXqAOMIZe8eFVt0GwuQiCAG+alaHdiFgAAVbPxi4Gu7+ua7LhIq2hWzODIedHzYPkPsdY20fFh0EVWl1xOeTkcMFAZEjQA+kqu1wqI0rsK6n4CJVtxaVS3Rre2VnyZQa5j8o0mOW01zSYDaeDZpYKJyk2tZC/HQN1F+bP7Hbp5l5zceBrTBRaEeolW1FqQLS45T5NBXiAJ5iALDmTd6/Qqq5BTP5BKrU6fi5yo3g8XzibUeUdAQas7pU3leQBSVotP8uWjWDOy1CBBk5YKwBgfGbCGB8GAj620vTuZllf8OzLWQzPRBTdJZbTuiuzHLaV5GX3aNAXs6TrPgYLMuwsY9MEHfTAMB2HyXj0bQKAgLreCvrpLAoUUjS7PR5nV9eCrMSXdYFAQMQkJCtnYeuMQ2wSccZktsk1HOFutfHoWyhbOyV1J55UcOADQ2hK6w/NAE81TJpowBwKFVl6UpfidVT/1AQfye6tApe6cTUm9F65+YJe0HiGxZSJfNa+uurL6BqAgjvdiFQ7lipdvCZBAdLd0fQQaG4OytiWwNVCQLi1yAg2R8ESe2iDGAww1+fkLJXiQXVNaobe9DYYhYAI8jY1FDCSkXpLEQAcGqfsnofgTYGVK4d4hEiy0VQdtsKABhd5uZh30rkPFVmMAjwyXlJwxnywjxSQ41INoGQOGHH0sB90u4zI5YUWgoWe7GuhxkrxWY0vY9xw46FLFETvYOSkpq+uUV5F2rlUH/s57MAdRPkkr4YTRpQYQhIoqBRZU0Cmvlee07xlpAQVNvkmg4VDdsJgsNUioXBPgyII8G/QVgDUAKtNiFLg0Okpehq4K4JdzOr2lf2cXWmOiTXrmHX65SkSG/w0ggIGeQkzQqTzbhlj5FWXlXERfUvfPuWZysCBZhTDYC6Hn1GVRnpXEyrmZzEs4Q4O6ohfvmnwor9TEoK0Q6TqaMKkONbsQVCE0yPOBPbdiju6vox7ycnrnQwwHU5KzO2DMQc5YN6ob4vYh+2a0Qk7dJ38/CitT8Dz65gMFHGSYmj6iggJtos+1ATrF7zNlEzJXJZV77AIY+9GuBsohSFhMlhok+KBdDhNr63DKiC2Lgagh+lW2d2M0zNCudC4bTIiDAM39kIz2+KRH5XGRwYeGigXr9wIo8DK4FGwQ5SNiKiiUyiaIelAdyNCTq23ovIuew3w3HDFDfVzOUoOmZmxFwlUCdKe0uhfnk/VM+Yn3ABatvGDjyHQVYgt2SeOmJkWXsEHQqhI9aMWAcMB2yE2eymSQAAf7NfxGIGIAuOt9XJxreYVoTE0qr8wEqb6PjCQn6SYBy5sBCLJ14OUMjVXB5VC18C0pSw0SSGwKINRCXY5YBlfbKZRwWYo8kgR9HyVxaLEQvI5RnaluHROo3COArquAFthwLj8hy6BSTd1iYKAZCzZluFYdSIVD6ghu4zHnQqqpGys/ZY2dfYKKsVVSNMDQV+YBBnLFKg3USOpwxZGrWypPeSzhttZaqXaAg8GrTcUwUaoUuuwPcoaD2nnKk1vmd7ICHRNbLzsUyqenqkmtGx2eFyDkVvAMUBr+rOsfmn3MQmBBppnn3D7KYTClxWSpQQL1p7JeFnu9OkPHwqDIGIuwzYO1mDmDsnIh6mJEm7vG9bGLhuc/tUkxXhHz7/HkB+gDprZjodq9OhbshTGRgSbPo2+cAtpcCGiMMqsUUHD6s0kZhPLn68FHLk38WzMei+vdrMIdFJ18ZmDdN0lNXjnR0vbVJ2vHNBaCf5e2AzJdTpRrOBCIvmfQYy7s8SCRbIKc1HKsgJQerEPv4EKJdtDJ6OTq0afcRF0Ck2AYOKWkEizQ7wrt/pSymRHlsczS97KPUrp9UDcsutpZYllqkDCyJrlybq0oAIB2HQtAwgZXSN59XA0ayDWSVuBFZsTS1BBADCjoO62Otexyrop93Cm7BtUic67veKyFe00BBaBtDJqrbxuAtXffTLqfse/qClAABZ+HQuHzPIdM6Fp5sn78+JCJWLIJ2kSfmvRS9eoyLOyqV8rIjqqcAQap2ln4dH1dh1tV6ptQe1Ypt9WUakibODK2J+G3Vh0JEFLPXeQXqc/2QQ0Q1F0ReInrYlxVM3RN3mrMhEP5lpClBgnG1CoE1sFag0RGJ2hBq10TTZ6utlmwjkIMt4MshbQMHMgkRvmeAwjzyBBKtndI1oxUDkKtINQhJgYKVEafDa642oOrGeg8v6YFEHIDk6ZyGDKg5tQEfaTvREur7yETZO5eeL6piUpTKQwEMfLVpvYdAdJqqNQtq8aJopxF+tLQzaqShoh9QFmvCiVW7iRV1WrDzpg22L1SJ+tFQPeccmi4uJgsNUgoTBNtMdgkVLOwT7kaB77+lOydj0WbLZ0HDZWDj7HgGuNGOUYQOODBNjT3SX58ERkU4IWkHixaLEKOykx0ZM4OcKBgnWvCLUMHCpq07AsUVQS931b6hKGguooONKlYJfna9149tm/A5n8PuLYX3Yzm/mQo3iFl9QIHQmWgSctlV5zLbWmu5oc8m6ABcU0t4dj5uSdMTZWQOB9Xqud7SbWdnE0AAd4UUOB1G6KqSJWVEjLU5HYYKXXHEHXZZZBDkLCYLDVIoAkkGOHxxldVCBp4ivBmRyADKgdE7pJ81ROVAU+lE1jQ9kIoa1dMGqwMEHaRNHDeCwCN7QTfRnno/ZIMWlnPY1yU6OQyEE0LKICYgDZQyEmkloHOtvS5x84td1Mx6BMTydBwymq+Cek0UusYSFtgQalvaqBPeh+w/12GhTwNFy09r1VfsExAIccYyDpKFVfoH9q7co0hqLFIswkp6Xq3Jobl2cBVfEGj2YWEPKrYLoCudVXjFWMKGHLjTdmjyHuoy/f1Tt9X5OqZuq8+9j1XIstxKKocLO9zGaQVdQ2AsyO4YuQ7kxWdgJLyTHo2WOm5QAwDAQQ6LoXOE1Aow3Xxp6/0AgjsY7o+NftCH3l9ywCUld9Sb/S+C+W+lEmAswite5THUpIa/PnAqJ3uAg1zgC9nTPjwOmj1iiSzquPvskucESGRlQ+B6MrF7dkpHy4yfVT9zprp6SImwukfXjdZh8iTgj9Xfs+h8LS6JXpnivD3GiZa+VxtkU/H6pDyJnFafeX91G065F8vjro+uX7Qt31pdUm28QOSWeX25XO55OGHH8aLXvQibG5u4vjx43j5y1+OCxcuZK/5gR/4ARhjos+P//iPR2m+8pWv4PnPfz7W19dxzTXX4F/+y3+J2Ww2uH5LzSSo7401SOcqb6zIjqUmOJqMpNtiBQPUEQMdvPWvtHQlWjVFj1J8hKrOk7ZSDns/EE2vAAWZZxYg8Imcd+iBKyQedbDlm8+eIwdcOfuEIdLJIgBJgJDd5rjPM+hJE7dcETU2oY8qIZM+SBfl25UmlN+fOQC6YxjMM2bOOy3wNiXdcTnTAOjqOM4qROoHDgIoPTEKCao+CRS059uVTkrPeBTRhkv8moiuqdmFQJXWY43Ty1CDaFVVz7ZV5y3VHVwOEBRIudLVDS960YvwjW98Ax//+McxnU7xspe9DK985Stx1113Za97xStegZ/7uZ8Lv9fX15v6liWe//zn4/Tp0/jTP/1TfOMb38CLX/xijMdj/OIv/uKg+i01SABifSS3pvZiYeyk+cnUAZre3qsubKR68HkblPATuzFAUU+RFQDrDGAdnGuCJFHAJA5EKucrx4EBKS6M8xOqFsJZgg9uvJUCCPEDShzvEs6oy0GVfdeMEi28iiYYXUOfbBrAsw/GGhCDJxDbIXReaxofcjFpqiqHQfVKAAQtnzkCF7XuT+TR1+YgF/lw3jFSTuqLisxD/qZN1bQoqXyskJtmhfwAoGDtSFD6kSir+l6uoz3UWn1X7VF5Rbu8VtuVsR1aF4g8cn2nBYRTP3RZaI+QAXIlx0n4/Oc/j7vvvhuf/vSn8cxnPhMA8P73vx/Pe97z8J73vAfXX3998tr19XWcPn1aPfeHf/iH+N//+3/jj/7oj3Dttdfi6U9/Ot7xjnfgZ37mZ/C2t70Nk8lEvU6TpVY3OMU+oHINciTQwD8BSKTo9Fq04Ee2Hny8V0X8PfpYnYr39XN13aUVeHM8pGX3ye+RvkfNdsjElVMnLCiS+QgeCzLdwHyT99oaqGz08ZWwseqpVekO+l3LU6NQuyRVb6V+ssxW+XSdJgNYE9l/pBohpUKQqrI+n4OWXBtr1UYCJ0H/h3P8U78zV4z8uyFa344aip9/D9e1j/V631T3VFvU2q4d+Q9dw9Wwxch/UiqBXg85UbZ23/OWcQXJ1tZW9Nnd3V0ov3vvvRfHjx8PAAEAnv3sZ8Nai0996lPZaz/60Y/iqquuwtOe9jS88Y1vxMWLF6N8v+M7vgPXXnttOHbrrbdia2sLf/mXfzmojkvNJFRoXPKAeoBzTVBb8lAg4avyIrEyBpqVb2r1SxEFC/hNo8KJWuTKWFLufMOjmAlxUYjn3sJXOdqKp69cZqtj7ZZSu01KhiiAtgyroUmLDcgYLgZPhyEy78BKklr1ZyQY25ERWwYU9NmSWVMtZI0Vr4DAMnTH+9JaeZuKPAZsbCgoRN1aXDzbqAz0BMd9mas+rIU870ScjdSlqTY1mOkS7fOAPRuAOpjSgm2Wrr/hhhui429961vxtre9be58z5w5g2uuuSY6NhqNcPLkSZw5cyZ53T/5J/8E3/7t347rr78en/vc5/AzP/Mz+MIXvoD/9J/+U8iXAwQA4XcuX02WGiQAfsAq0Wzs5OMbuHCOS+lQT+oG1o6aqIzFpFExuJhCJ3sDD0hq9QAQNlEqWLeP2DY01+c29+3y8a4Qqx1Sk6ekTwVzzmgJMcFykZ1XrqI6RKoeTK1y0GwTciqG1L1SGTDKhjXJzDxLklIdSLDBdbiR/vZyWGPn3BC7RFrma0nkCfY7BRCGgoOup5Iam1uurh35SJHpefRSbreTDEyWyliA0OTj6AG6ckLtuKm/0taGGMZqjANPKtOxMlqgIQOMQt5D2mu9X4Tveww0HBBY2E+bhK9+9avY3NwMx1dWVtT0b3jDG/Cud70rm+fnP//5uevzyle+Mnz/ju/4Dlx33XW45ZZb8KUvfQlPfOIT585Xk6UHCUBNfbJBonR+8OINg+8+WMLBOlN3HIuyigdAGQY51VW5Tp0PejxKI9+REWgbRsp8+ghNni3bCcNCrNLkWH/PgoXWjTXUt7qK6ejcBNQqxEBBTyvuDW1QFO4P/QZgkvBM+PPg1ecrHGVCdZGBZLv+fXb4Uw3CWmkSA25y9teN7lRJTGZD2QMpsk/0WahpW4YDCNuBA4sZN2p7fHQFGxscH0NOzBmAkLPDyQYXk/24SxILhNyxlk0GKysJqJFgppQ6+MSCEZHVLXLxX69M2dzcjEBCSl73utfhpS99aTbNE57wBJw+fRr3339/dHw2m+Hhhx9O2htoctNNNwEAvvjFL+KJT3wiTp8+jf/xP/5HlOa+++4DgEH5At8iIKECauM/pnoQOnvaupno/LJqBhVyYyQhFYIc+BoVQZMXRQeM6mPaOySS8F0Z+YDFJ9EUVuADIQcK9NvnQ3lkmAVuve0yE6U8rpzPTSwWiDai6h1lDw1Q8OnyvvKputAz9rYj8aTaZ7WUXPUES3cgXoHFFcgaZuXK7kMd93lv/FwGIPQBBzkVgxZgLCUaoxQMXE0akEtJ3bHA5DpjkJp4uwBDT4CQe5y5c6qXhsnxkN35p8BIGA+k2k6oW1rSV70CtIFIoo6XW74Z3g1XX301rr766s50N998M86ePYvPfOYzeMYzngEA+MQnPoGqqsLE30c++9nPAgCuu+66kO8v/MIv4P777w/qjI9//OPY3NzEU5/61EH3svQgoawnYlcbEY5ts4mRQbxJU3vF37YV8CfaVB2/tjBG3Z0wpK9VFKjBQlRGKLt9L9qcIrtZHMFOGgmSSoV+C1qeaHduvS11hkCvCaxvlyGgAKRBgSZRjVyz82SZKZhfY9FYujtQ+O6Bq2/KCGjTsQCiyHcAXJ8pToK3PrStPNdXryvudx5wACj9pN9lvSQCDRnGiSTpgVgbEhcE3IxtgHjf2BZD4gA0VQYwkIXpSEv9GGiDnL4hqHNlqGzmkPakjAE5ZqSlSqHrl1DdsN/ylKc8Bbfddhte8YpX4EMf+hCm0ynuuOMOvPCFLwyeDV/72tdwyy234CMf+QhuvPFGfOlLX8Jdd92F5z3veTh16hQ+97nP4bWvfS3+zt/5O/jO7/xOAMBznvMcPPWpT8U/+2f/DL/0S7+EM2fO4E1vehNe/epXJ1UkKVl6kAAwnbeLJ1/yRgAQeSQAtRcEYxDoPK3+tVU+NWljTLTVcrs+7Qk8nGN+gYsOtprNBdWPA4YAFhQ1BB1u5d2jA+cmHb7FNc8pd885yroM+fa7LoSIZtEt1WfBJafjZWmDGsJUsZ41Rw9HwEMMkqlyu5idDpGDeGpQl+1oUbdFzrgNEYf2bnutSbI+Upm4P0YsorHhfnlUVfXdDAUG7H+OQViEnSlFP4quSVWvq6xEWs5IRv2io+2l1CudeSt5Pdrlox/9KO644w7ccsstsNbi9ttvx/ve975wfjqd4gtf+ELwXphMJvijP/ojvPe978X29jZuuOEG3H777XjTm94UrimKAr/3e7+HV73qVbj55puxsbGBl7zkJVFchb6y1CBBLEIAxGoFoKH3WwNJiJTo0KL62YpGTkqUT27jJE0XSunk5JkaolLdp2tII5qeJkZuvBXR7l20opTEKnzoFCJNI0I+vVe16XONHUjjIVKhBmY1o+AHLqsPWlSGdpCMBOk7rVbJfZTYmdwL6jJUHGIMlpAU3Tx0AhsiZHciJcUKaOAhVSU5uXEPIA4UeHrVPXgRw9OM/r3r+fYBWdkmk1kItNIO0UssIDlwFNQYLE0SKNiDmX5KV6EcGnJbyeNyycmTJ7OBkx73uMdF7eiGG27AJz/5yc58v/3bvx2///u/v3D9lhwkeNeWwpjgtkiNkgYTWvXzydIBwVuBVi/cS6GwsTqBykoxCLJzSrdMKdoqW1LlmmjNVI5BISBTTdHz4DL0bAzpU/vqYDODY+o30H9FOkSn3Uc0V1IOlLomydxprsoBEBtFGguy5AYatUSn8afyfOd9AppdRupcVB1l0hkUKrznBCWNWLV3ndOjc6CgXcNX4a1JU7Er6c3+DLDtSBk8z9OsS+UY9W8psTFoc/M58DAIWCjtONW+eL5cBcolpzrcT7mSgyktgwxasrzzne/E93zP9+Do0aO45ppr8IIXvABf+MIXojQ7Ozt49atfjVOnTuHIkSO4/fbbg1UlyX7FlCYp68newDMHtDskTeh+rwSiMxv9kq3BRbARMo3aYWQNJhaYmMr/LwzGlvKPyzfik4r2BhCDETe4eTCqc/GgUDkXPnSOvDycc2zPiOZZeC+GTFx3SsP+a4NjH9EmIboPeQ/R+cwnJZIFomfgnKvVTOnPrB5QUp+ZeI6lQxNIx46AYgIKJOOKCRz7HX3o2bPnL++NAhkN+cjnBpbX0HeTOm6RHzis8mnybD4Ae1f8etP+pKSx8fELhr2ywqzy7zq6TGvX8nuq/bMPPVdiK+h7WZdZ1l49JfVB1nZ4H019AHSm0dqlY+3Rl++iNt/L60JGbs0EWeP5Uf48cJbWb+jD+86hXPkyCCR88pOfxKtf/Wr89//+30Oc6ec85znY3t4OaV772tfid3/3d/Fbv/Vb+OQnP4mvf/3r+JEf+ZFwnmJK7+3t4U//9E/xG7/xG/j1X/91vOUtb5n7JmIEawJQoBV76XhnalbwhfGGjmPrQUFh/PdCDkyu0kM5K8InUa1+0XHMDxDi/F3rNwcMVA4NGHyQA5AfLNn5VJ/W7u+bEWXP26AkKG7EYEF+KOBKxdKWYuBtjrMBUAIGII6Ip3zCebRBjzbha5IDT/LSy7UI0sBAakDJgopEVE4uvP/Jd0ztrJkodaDZZCYYAt7WtWMCIPM+RJMjwAFLDHiHgNyUKqbrUyqLBNnmebtSyx/wDFrf2TPo+pSsvx2E5ED/kM+jVYxbYCR/4IEHcM011+CTn/wk/s7f+Ts4d+4crr76atx111340R/9UQDAX/3VX+EpT3kK7r33XjzrWc/Cf/2v/xV/7+/9PXz9618PEaA+9KEP4Wd+5mfwwAMP9IopvbW1hWPHjuH/fPXrOLK5ibIiBsBP+oUlwym/MuS6anJZ5GqI8DDq/47/TsQ3j9KI6/ggIqUr9gKXLv29tuqOqkn5sJU13XvY0trEaX1Gbbc/fm/a76Zu7Zvm6hV5P6l7Sd1Tq661yIkj7UaqH8/VIVVWWA2HMhs7kKguvXKdX8XAZb/Hsv0Eel3qsq7nn3rHZNvjV6z+uGf+DOsD9TWtCnSrGmS/Dt9ZX5btmN9J6p0k6yTK1fKIVQvpPLlXFm+nWiyJkE1C7SgXF6ln4M91t5vzW1t4wg3X49y5c71iDwwVmif+/r+5B+O1jYXyml7axu/ecctlq+uVLAvZJJw7dw6AN7wAgM985jOYTqd49rOfHdL87b/9t/HYxz42gIRUTOlXvepV+Mu//Et893d/d6uc3d3dKEb21tYWAJroDWCbjlm6tv4osrZ2gDMGBRxQqxtQMVVHjZ7LgLpta0LVdG+SzpMIm0vODTJUwwzTaWqnHHQdLhk2SqPGpmydOZgHIOSO95HcBDsUGPCBS6tR3wk27BjifP1Kqkudv6xHbhWdovQvh+Tew1CVw9ByUqA4muCUtz2keGsMCtuoGSqX77O+gDyZ2rVqngccSKF+Ko+16pCahEMfNsywEeA7zvIN5KxzwVaJl8WsGGLbnQww4PXq6l9SDmptXlYOdkH0/GhmEuYGCVVV4TWveQ2+7/u+D0972tMA+JjQk8kEx48fj9Jee+21IV70PDGl3/nOd+Ltb3976ziPW1DWdFrUohHHKYi9GJoOQoZ8ASm7pjPQQED+9lL66tUoQFBf9cJ+rHi1PLlFuBxENVsKOtedd786aSzCUOkTeKpVrhjEI1A3sB5+gOX1aShuwwuh/JVKhkOs8Pl9Gi6/5Lx5ukQDu9p7GwKutPyNAUam2X2De/kAOmDQJDXR831h/G9K0wYIQ4UDhXnzkd49FRqDz3AOJnIPBhABBhI5rmlM4Lzg4FCWS+YGCa9+9avxF3/xF/hv/+2/7Wd9VHnjG9+IO++8M/ze2trCDTfcUFPn8SA/Za3bGgNjXUS5Nef8f2ILODAgIYBQAjBwASxI0ULC0nE+qGgAYVFLfiDdMXPzp9wfga+U5ilrUdH2d5DnuXRNKBpbE68G63TzPn9l9abVyyeNy+BAIrL0V67tM1EOdaPtKymDQc2bJyUpUCiBgqxrF4OhgVILRKpGaRfD3YLVugpA0FwX17tJn58c1X1HMtLVElOupjnh/TuwiizWRFN4PmPtdJe6k8rP1e0g5JBJWEzmAgl33HEHfu/3fg9/8id/gm/7tm8Lx0+fPo29vT2cPXs2YhPuu+++EC96npjSKysrapQomsRTA71hKNrCGydSRy0MgkrB1fQbv05KWblGTcHKJ7EMKDT5NHsYSOH1Htq5+rgt5ZIQq6EBhT7SV82gldtc0z4vXeM0I8SuiSUVg4IXl3r284wDEgDQT15zcmPTGBC6noCDTDPEsJWeKffq0CQ3IWs2OvI40KbvNSqe1yknXTYdmtCEL/dcKVhdGhawLkdZOedWyEAeRPZtLovGL+Bgg7OnXerKlPDIslzmYRC7JFVH8tI4CDkECYvJoMWGcw533HEHfud3fgef+MQn8PjHPz46/4xnPAPj8Rj33HNPOPaFL3wBX/nKV3DzzTcD8DGl/9f/+l/RphbzxpQuK4dpbazkN2lqLNx5tEVef3rX06q2eq0BguZCKBt4RWnr39yiu6/uVsu3yb/5uMRHptNE1qTPxlF9rOpT5/vc+1AvDo054AZYfGKhD//Nr0sJ3Uofb4JwjXgfVeITPB3YJ7J4Z9fHdXHJsro+ueu4JMFD/awMGhsc+hjEn0KcGzoRdhl9yvL4J0pXqxd4NNVQb6TfP2cZGi+ABkCGsQDt90if5L1dpg9/5tYAhW36Bf8UNk5HdYruX3xowVQ5P5bSeLroB4ifJ3k1UJmHcuXLICbh1a9+Ne666y785//8n3H06NFgQ3Ds2DGsra3h2LFjePnLX44777wTJ0+exObmJn7yJ38SN998M571rGcB2N+Y0mU90Tvo1rwADWDNMWINOAvBrwfigbodtMUFvd4QcCClj448txkOUK80HZ2bqxqRqqSp2/B8tJXZfkmKPVj02fvv8bmh1U+l5+8GiJmanO7YIM2i9JFU2+Urak3aVvFiYlG8AMjAlQfqqmB6t4GUtX2XGPhnFdq8abwY6JyhfHvWRfY5fpVsI6m+Ng84b8pIjzk8TbuM7npwkU9DM4qU9dkPkeOcc/2ZmEXlMJjSYjIIJHzwgx8EAPzAD/xAdPzDH/5w2BbzX//rfx3iT+/u7uLWW2/Fv/23/zak3c+Y0sQiVPATt+9bLjJWpOiJhTXBr50YAZIobCiaxksDNsTxyrkQhyEK9Yz8BKvR3F3gIB6s4oGED/zc8JCLRnEftIGctnLt0q/mbA5y0eQW6cvtFffwzEjf26L+GZjTwICqO+45yaQGdK29UJ1I5aTeA7VpHlgnI8bYFlDgW4UDbZCSAwh9bAZC2WgDGjrmDUpNZGuU2wae982UYasG/njd/TET0ubERfl0v+v9AI7NsXYd+tgYDC+b5UfjW53zQcZJMIfqhrllEEjos0JYXV3FBz7wAXzgAx9IptmvmNJUG+46VbIe7mpjuMb+oNEgt1ZK4XjagI5YCV9mpl4dzyk3COWMoSR4kUBBijZYDTUQy8mirEGfovuCA358P/qzBhDk7baBjMmfT9SZ3uEiQKFLkvtadF2Y2kFRhDNOAQUAEVgA0FIx9Kt/onpo75hYJPo2ELdZGfyI8kutruUx/s5I+gIELc3lmjO7jII1yaXuw6zkwAE/dihXviz13g2k3zLGD9A0+Dl2Ho782L2ujoRP+FK0TtXV+Wlw6QMQot89AYLMoy9Q4LJfAGEIOJgnomRfwzqgzf6odVDeZTNp9gMVQwe1vo92P4HAIuWlmKgg2j4fmRgDXPUwlLnKsnFKOs4SaHur9I1VwlOlVH7A/Cv6K0kWAdPZMNkizysBIDjn4BZcORx09NgrSZYaJExLb7hYGANn/Ess0LikVXAoq5o1MA5rxvqQzTaeWGVkQzlw8BUKEE9aKbcpKXy1wo81+cTnSLc6r6SiAob8LxNA2G9QAKSBgTym1Syr0khc0yePeGXcVumEcz0fswR6Q9iEebZmjtgExS7FobE56CtE83PVSmrLdLVOLJ/UOSAGBxRpESCDPRNdwz0dovwECNAYhC41Dqk4CWB1va8hcVK08q50ybEH38xbqCq3sE3BoU3Ckspu6TCZuTrKmtdtFkZbNZja5c+zCmXpfKd29aqSD86CeqT/FRtIpTHUPB1fisZepFbJ34x1zH5sSiWlryqhz/1S7fr2ZY1N4HYSFiYMcFmDPygqnYEviDNCwXNGqB2ozn3vqW95HCiQayCVaw0Avq12n90SwZ4n4meRejep49pqNxUGOEqDNpDQALlsM102Qe16e6bSGhOMT+m4BhxzfaZlD3MZZlUtx6jNs/bT1YSzTOcVBBAOZXFZapAwLSvslRVsZTCq3YEqMiasW3nBflOIUgBAHTMhOoa0VbM1zYpFi6CnrSC40ZYm0ngvtSJMdVhNBcLT7pehYhdA6DMI9DFETPnoy3OhHvsw+BCbwIEC4O+JAwUp2iQwD7ghaU3cLvaIGAIW5ioPCLEzKGQ33+I3AgwJ6XodQ6M2SvWdjLMhJ3vaDTFiZBJsncYc8DS9GCYw9gAmAnjSBgTQwabWdw6aOZBAoU89+qj5+pR7ELIfm80dqhuWVHZnDqOZ75rjot7FsbA1o2BQ2JqCrJv0tGq8IKzT/XQ1q/vKuRCfn8CGHPAC4GDXqbHo0Z6UZHk5F6jkJjeGD+iL976u+PtD+0zKbkLzjyfpWpGn9KrqJlNgz72uD62cookQ8XvJTYsaOOj75PnkT8JpbEBnFSjdEHualMjyUrH9JWCgY+l8m+88Ga9VjiWI8orOp1f5FFukdFXY8M0ahABqhqWflzmga/m9N3m66L1xVsHfbz74UZP/wU1G/B1owKprHOF9J1drOcY1i5eDQQmu2gebhEN1w3LKhekMs90ZqsphXPgASuPC4uikwKSwIUQrUDfUCihRN+wqnhRIyO6AdxAaeIleBJooeuE84pUf0HbxMeK/ZQf6tMFC6bRt97E2u6DJUNuCRehQqlMKKORc4LTbSJXMn2EUDZOBAQ4UgHigonsq6D12vJc88xEfSU1EWvYBbNZAhu8VISefIZJ+bjGVRkyZEXQxVwnxvPoO9RIwaP0v1InSiYk8dQ/O+UXA7qzCtO7bhQHWxrZ2V27SLRptU0sbLRLqZ8jf2X6yQPOKNqFr76B77KBrY/VEyIvydnoHofwP6kkc2iQsJksNEnZnFcysQukcppVfMaw6cnl0AAoAgIW3J+CTtjdgbNPGfMXGOwuFQe2/Sot/dwU9GULFpmjtIe5XoazE8RRNCwyjGA36Gd+lrs2JjFYI9KcFpXoB6AAMPeuoew6kV7+puvEVKq8bZxe08jXpeiJywm+VSZNdRP1zEB3n134m8W+yV6Cym3dBYNINNvIzNSiYFBbGuFaZEhykXJB5fkNEdV8FAyfo967mcVek6+L6pPPgrEpzfT5/CSI0oMDzkn2rXd/htjuH8s2RpQYJl6YeJABAVRsWUGP2W0YDDkVYVTg0YZnH1jf0wsYr9Lbfc/Pd623jTkEqCG2S57pIbgGdm0j4CisnfSannAxhCuZdcWk+/yk2oa/I4jmF3GffAD5AynfWBRhyklMRaXUNxzKraF5HzWZB3oOsQR/GRUr8THSwAOiAga7lE6Ksn2zf8j5k3YLaR6iGJLCx9R/u6kxgpCsGAtmd8HeesyNIiQQK88oQb5Wc2imXR9+20uSHOr/6egUopPpWqvyDwgiuQlc8sF55PFplqUHCrKqwMytRWIOx9bYI07LCeedwcVphZWRxcVpiXPjzKyOLkTXNIALAOMAYFwwcg8uUMEYC2jrRnOQGGW1lLQfBLuna/CikQzdbkKNfc4xCV934qnceRkErLZpgo3p2AwQSrQYtxoANihxEDlnk5QCCNkmRcJqWP0OphuDnKU2+Pu1jErRp0QTDNthsUiD7BV9fevYxWCAJP51wiKxdkvmzKQzqIGieB3TONUxGpndUzgWVA4H/sdUnytDuO6bGPk01xRD2aeepJH1tAvpIX8DRVRKf/LuAQt96HZTq5dBwcTFZapCwM6vgau+G0rpggzB2NjALgA/NWhaAtUBhvQrCGAQL+7BSSgyiOX0o2SloHg/ttLpBU1+Zp1PlGIOcK1jXClivX5xvjm7l0f7aVG26jHZd+Yq2qT+XvuBGe7qtScDE9etT1hCAII8RYJBqBrnKN0af4JORCjP3YBLXpsrm6ojWTqihEBHimblUGmMjtYOU0E5M+xhJ5TyY2Z1VdR0MioL6+TAKP2KUBnS5Pkn7AGXNmHlRGcw0igpIliAFFPpIsEk4KCrhUBaSpQYJ08rBlg6AQ1UY2Mp7OJRViXHh19DWAFXlB4mxNRhbh1Gm5zsxm/FQztogTIfCags8TbscTq9yKjQlQzteSoaCA34uu+rltHyoc6yX5dbeKdFc/kLdRTpf15g5kPcILEbX0vUaLat5QWii1TvKXzxLnk+YqIkKd3HgngZQNWqI6Loe4CAlXaAhBQYp3gKMaYFCn7EI8dwVsdF5OwW+/Xc6CJh/V81uoJkb1MpTpvh5vFY0SakhFlW9XU7RGIgcUBiWd/z/csuh4eJistQgobB+ct6bVbg09ZEUJ4XFuPB0JUYWtmroyt1Z5V0lq8J3VouwEQ0NwjH1mm4Y2qqQizeCdCFgU+t6pDtJtOLuSdF3Df4pZoCrHORxGQxFZVpqxsbUIwgHBdoEyw26wqQCP7inJ7Y2W8DrJe9vv6SP7Uil1CHFvKTAllZ1CRgquG5WwfRzs9Mkx2y1wJdSPmeNOGMWTYwECjhAqFkEus6h/dy69uyonFctTAqLzZWGVaAN3ei+ApCvgZ3qoiwmsEXnMW5Dkntf/LFfDmY71T+6QHJO+k7yqWQHhY8OXSAXk6UGCVevr6CajHF+t8TOzDsl7pVEZFfArNYnF4Ct3/G0dABKYFTAwZ83lZ/sCoUe501DTgo53aZ1jeuag1MZhpQrlVxND1VN5ChvXq5kDjg4kMAgpZMLemiIyawHs0FAAUBnFEtpP6HdV06GDEg5XbFmpKXVJ6uSGOQfwp+Vi/Yo0VgFYDFqWRrlpurj74PSNWo3WNoBsrHxsQZxiGf2nYOCrqeiGUGSTAoDU81gdi8Cowmq0Sp24Ad3HnExpdIA9ndVr4F7DhTk+zpIYDA0zTxV62JUgRwrdChXkiw1SDi+OoabjEMkxGlV1asHB1sZWOMwrSpYY0PvrJxDVRlMyzpYs6WIi8YbMHILZ6XM1uoR7ShvdJzbOsjBga+8gHjFQeVEqzAMXy3L1FoQGY098OCB0rqsKxo3WiOgwMuvnDcKlV4jVI+GVm+fa+rNVt3K/aj1kmWx7zmmJjduzaMr3o/Bv89YytvKELCQZ8tiwCB/c7AQAhY5oDJoAQWg/U40cDBkwUZAgco21QymKuGqij0L/R1cDr1/SrTxQXtfwP4YKy7CqnVdmVrYXNGyD0zCoIb5LSZLDRKu2RjDrq4AAM7tznBxWmK3jptQOYdp6fdysMZhzN5x6RxsvfApq9oa2fpuGwa7Om3KeEuuuOVAQABBW3X5fF27RzrGMoj0QyVlhBnKhljFCYDQJ0Y+UE8IifL5s9LUONoKqivQTd/xr3TtgVnakfBBuivgU5dotit96zov+JA0dg5UdsVv6BKZNsobgjVz8MGEWP/wcUq67imdgKujJNtmDYBqBmcsMFmDKyaYCXBA9R36buQzTaUJ9WTfZbsCYjCseYHsl9psaC595sAU2NLGSP5+5o2Tsl9SOQez4HO9HOrMZZGlBgnHVwqM10dYGVmc25nhwt4Mj1zyYKEME5FPWzqHnVoVMS4AwKCsvBGjq4cCmiC5YVpLNzzHhJXTJ7fSUr3RTt8XzKbqlTNKlAwClS3VDiTzgpfUCorXq1W3BJMgRVZJ6pzlAJ0ayOYFCn0lNvb0tejlbseuS6pEoE9q+zVQa89fizTIQ5cbdmUqrHjIP/GCDSu5dA0wLaxp7reYYOqAvdKhcv7JcpVaCiCkwGSok14lVTQA2uQbg2LJnuX61NAV/KJjhc+jD3Jov9NUnI/m2JVOPRwKl6UGCasjg9WxHy4KM8bqyH8fFQazklQNJvi5V5VDaf2EXVVAZRprAYqZ4FfUXmQfiTt4vgORuoHSafpkKXziSgXPGQJou9zxJEAIabpUDAuwG1KNkmMMuliFXL2kzpfyBNIr8K6J9HIsJnKbSO23LAIU+ryDEMuhPk/7nZBI8AAAyqat3RVAA0Y8awHAjjCtvE3STGm8qWK6AMK8IsEC0H7+bYZtWBkHBgQ666EDIc1uhqc/qNgDzu2D4eIhk7CcsmIdjox9gKSJrbA68ns3rO9McXFaYmdWRR2J1BAEFsY1m0AN28G1NrLhQgN6qr3wzgG0gQQHClI0ewTJKgDtiS/KP7Myb9KI3yzVEB0/1XneHSZz4MAf011PswMe+54yEKM8uoCCfJe5+pMYcbyvGqHNeLQv6sMiDJU+gXa63hMXqU4gUNCUVx8X5UsJxsOZZ1dYbz9UwsGVvuxp5TCrXLTBE70TDWT3BQiLsGiS2ckxCylZZBWeereXY8qTfU32MyfSHpQcejcsJksNEky5B1PuYbWYoDAFViuHSWGwOjLYKx0uTktcnFbYnZW1V0M9kFUVxs7Uq5pGzQAAqHxAGG+RHZfnQYb/7lw7cAxJbuLkQZ60MLBAe7KTG0X5auY7f84moSnHqcd93Wp2JdObud86PS+6J81IEWhTrJreWN4DZzxykopUKPOXRneUjgMFmU7mwevXqgcDfL2MDuUklBlCuya2XHFDQ/hqAEG+i/j95cpmdQzfXfZeczKpgyWVlcOFmY+wWrkm0mIB07JL6aNG6jMXSCPceSe8rlW8dpaPBbLcVG6LxMzQJPUMVWAu1KZcumxU9kuqCjALx0nYp8osoSw1SHC1K5VxFYrag2GlMKicrQ0WfbrCADumwrTymz9RSFC/nXTsZUC++5q1PjfokwAh9uU3ydVGRMWJbi2NHSW7EOWDeJAYsuLzdUwDhKY+eZEAoTkuylIMuFr1WRAgUBr+DFPeIl2rHUmLSst+XpP9YCHnYQb6goOhET1TwXNyErc33g/qfKGxDL5eZY8SJMDwhpIOOzPP/F2aVdgrHYxB8E6S6j26VstTlpUC2Nqj5IfmNQ6dtwml+nwrXWJxMERSCxqAMaeCMdCYOZ7+UJZDlhokAAD5XNMWz5M60qL/bVGOUO/LYLDLlJV0zCA2VLRAcOHirdm5eADkMo++Sht0onC8aOv2gHhw00rtAgh9BohoS2A+aYgKWzH4SrfOuF66PjwX/jkHEFKDDt+sp49uNEeLSrCg1TElQwxNh4iq9lHTHexInNs7o2wn74Wu6BbIUNEYg6IG11XlPZN8KGYXvdeulS5Y2j6vyLH2mM57fpaGZN720rUQ6JM2JSnVKUlunGr6ChvXGGA4KDX/4d4Ni8lygwRjAetvwaEZBGhjFwe/b0NhbWAOiK4bFwYja6INn2Q7KJnqoXJCDTBgkON584HAJtIC7VWxliZKnwEHfVcOHCyF30qh0YqsQ1eurczl+aiuPQYQeU4DDBpYyKogErRonFYvUzOwlNfup8yjUlhUmjDJ3YO71tZdog12qSgMxVowtOGTv2avdDi/N0NV+b58dKWomcH62vopBQBr0qt++f5SAG/eR9snzkcv24Se/bitIut1mSpdbGKkPlXUfYDe3+T3yymHu0AuJksNEmZmhJnamf3AMrI+EpsFYEZetVBW3nhxXFi/TTTfKtogWPaT+x/39QaaDliiYS9SK+wuycUSSOlqh9CKQPfAog36uv64XS9t0NTunQMFfkxK38EsZ9gmAUMXWKC6aUaiPK1WXh/XzJS6aB7pk8W8HgxDBmxj/IRQwQXj1QrNHgt9gEQODEq1gDFNH700q7A7c2F315WRxerIYlyrDbv2YNCeTVcUTe1xSmYqF3gK0MFBbIc0bMI8qIWtFgQtmVZR98k8tLHgUK5smdc4/YqQ3Zm3M6jq1QV9vFGhb5gT62O6r44MxtZgZeR/23rgsUAAC35Aaho5uQmmQGQOXOYoT/qkJAUQtBU0/8R1c70HHmPSH6oPffg5q3xS4sQnqqvrWFHOOazw58Kfh1zp8jpVzsWTP3u+dI6n0e6nzz31FcM+fUXeQ5/0mnTlIN+LqfuT/y7pfb5yr/OPmIa20DWUp6tVC+d2fUyUwvj+vDoydV+W9em4AbVML0M9GLhI8Ni0rWZMSbXLvp8h0qd/5mQ/1CBVx7u+nEIbPC36uVzy8MMP40UvehE2Nzdx/PhxvPzlL8eFCxeS6f/6r/8axhj181u/9VshnXb+N3/zNwfXb6mZhAuzCpNphSNjGwyj+MBQGANT+HgKAHBk4o2d9kr/ARrGoDCejzTGoayVqMEl0hk4UzMHgc5uW/dL0Xzg5Sorh9S1FVnX6iN4FygBe7okN8hK97Uh0rXKjtImEvd5lsnyWduQbqgpWlTzKNGk79jRl1HYz1XWvBEXhwyHFgaVcc12zjBNqG4Tr0Iq5wOXaaoLA6kaaDyMKgBwLngsTcsKK6MCRyYF1kYWk8JgWsUAcIikNz/Kv99IdZhgG7hoNjaXgxHIgaNCOZe0aTjo2fwyyZXuAvmiF70I3/jGN/Dxj38c0+kUL3vZy/DKV74Sd911l5r+hhtuwDe+8Y3o2K/8yq/g3e9+N5773OdGxz/84Q/jtttuC7+PHz8+uH5LDRJmpfeJ3qknfGu8dfPIegMn3yG8K1TpgMIBZuRVEBenVdjzwQ9YTSMoTBOFUUoABU53geQDX99JTQMKOaPGnHDKj2TISjxlAd6ssPrlld8TIP7dSU0PmLZybonhvfTQo3bJkME9MqzsmFgul/RlFlKsiBTeli08wK7gvYec8yGUC3igzb0cgPqcqe0UFLZBGsTulQ7TGiCsjiyOrY6wUcdHKQyzKVDaVasf9bElYs9CvqNU++pq0328ieaRPt2xKw6HZjsFtG2U9ksO1Q2NfP7zn8fdd9+NT3/603jmM58JAHj/+9+P5z3veXjPe96D66+/vnVNURQ4ffp0dOx3fud38I/+0T/CkSNHouPHjx9vpR0qS61ueOLVm9grvfvTtcc2cPXmBk4eXcfmxho21teAcg9mtoPCzTC2PoTruP6sFAajwqgTfVGnHdn29tFArJZICacFU8Cg7/Vy5TEP5dglkXrBxMyBAYLbqEwvVRNdIo21UmoS/ukrXas+TnPvF93bp65JldASrNRydZTvnaukuPqO1HpSTVXU5+hj6+tMDfAtGrXf7qxC5RxWRhYbY4v1sY0MlC+HGPEB0rS91j54u2i1G5dXSeVUgH36HFcRDjmn5pVhOpdBiElY9AMAW1tb0Wd3d3ehut177704fvx4AAgA8OxnPxvWWnzqU5/qlcdnPvMZfPazn8XLX/7y1rlXv/rVuOqqq3DjjTfi137t1+by0lhqJgEAzu+l13/GVTDTXaCogLFFYSyOHFkP58+c3cYeXItaJtUD4BmIsvKrHh5kqVBYAdW637W/y4Eht6IcugKZR3/PwUE4FupmWulCWYKO5QaFmvGWZs29SEjilM6zOe/qeplWOmIWgnHVHPVwyr3nXMXoGsksSLfW1BvsEyFxP2WoISlJYGeMZ+ic84kCgWPQGqz4O6J25ZzDtHI+KqoFjk/GODKxQcVAro9lPQRIF2J+H9L9Napvx2xHz9ugASS87adUgF3tUxMJug5a+qjwkvZWA+t7UG66+7nB0w033BAdf+tb34q3ve1tc+d75swZXHPNNdGx0WiEkydP4syZM73y+NVf/VU85SlPwfd+7/dGx3/u534OP/RDP4T19XX84R/+IX7iJ34CFy5cwE/91E8NquPSgwQAuKS5OACoRqswxST8NsKPZVIr6KZVm2ZjUyOKwlOorfxN4/nA23tfXTmV1cpXeFSE42yCS4mmbsiJ1k/7AgT632cAHDKg7ofwSUiL9RCe5b6Vpx+X4EH6lVNd+gCFRQLzzDMcD431EO6BXVeg6Uxx9fUacaBZOo/vi5HBurE4OvHgwBrvpTStPMDXwBoJgTIOFGTp86hiZHuWbTkGxHr+vC3KgGT7DRD2a3+QZWEPLpd89atfxebmZvi9srKipnvDG96Ad73rXdm8Pv/5zy9cn0uXLuGuu+7Cm9/85tY5fuy7v/u7sb29jXe/+92PPpDw3d92PHnu0sxhUlgUBlhbW2udN8a7SVbOuzRGq34QzU6Jm3NkoUx5aJIDCta0Jwag/8ojDHiJOgwFCn1EWwGn0vnzNQOipMmBgyEsS+qaHKUmw2kPcfHSytIkmvAlIOjhJgbkGQUpfYZ/niaV737iNPWZZm5IAlO/+jNYHRFzZzAuvBuzRTxJUzwToAF9ZHeibaqWAgtA/hl0GSJKplAGmJIi7Zo0G4o+st8bhHGgph0fIrl7Org4CftnuLi5uRmBhJS87nWvw0tf+tJsmic84Qk4ffo07r///uj4bDbDww8/3MuW4Ld/+7dx8eJFvPjFL+5Me9NNN+Ed73gHdnd3k+BGk6UHCSn56sMXUDqHEytFa8TauXQJM1erFaw3aHSVCx4SRF+S/tSDBc4tOL/zXFUPaGLSo6RdQEETOcD4fBQWg00qOdp7EbDQtWudlJbhGKsrndcAQl8LcpJUgCOfZ4/VtlBD7LdtgGacyMHCfgCFIZMZr0fXtV35aI9Xq38fWl8ayVI/85OHqe/fA4PCGOyVVfAosrXKoiLGATQhx2ChrGM5wDV9YVEDVTkpa+AgtZMsv3cKAU/15iqwg9oZNCdDQUHX4qRL7XO55JuxC+TVV1+Nq6++ujPdzTffjLNnz+Izn/kMnvGMZwAAPvGJT6CqKtx0002d1//qr/4q/sE/+Ae9yvrsZz+LEydODAIIwLcwSLAGWKu3jm61j2qGEYDSjGr9eXOqvYGKYUGT6rzhB7KiWbaETt2iDPmEllAj8DrK2PcW7UmN7k8TbeXKRXbiaPJiK62mXvnr43tgACAc615x+WP5TqjZFUgZ2pFzm3Ttl8hnyieBvkChK28pGsM0yA4msWJOiWxDciIgcCBBAVgaYggKawA04dKNqwDnfZLtaISy8js97pVe1cA3XaPKGNPsFdHEbTBwZBThlH5K954AAFy62p8ECNqzD+doTHEuAByINtO3fE2GsGT70RW08SbVPg5KXLV4nIPL5QL5lKc8Bbfddhte8YpX4EMf+hCm0ynuuOMOvPCFLwyeDV/72tdwyy234CMf+QhuvPHGcO0Xv/hF/Mmf/Al+//d/v5Xv7/7u7+K+++7Ds571LKyuruLjH/84fvEXfxE//dM/PbiO37Ig4TEnjmBr+xLG9Shx6dIlmHLP/zAWMLYV5IQaOKfzHQBn6lU1G0w4LVpY+FgKiVXWkONc+AAXHe/RmVN62tymUnyionJa+CpH5Ufp4np0UbJRGWjbCnQxK5crtvq8+co6amqNHFAAEvYqAybw7Cq25231WUnnNtUCmjaT2tvDmgYgFISrXQVUcTxdb9HvO04Z8vT9U6szrdYBPwn7XSGbiTjUz7Rvsq/qT7YPjaXo6x0g81pkl8Q+LFlfADFPzBWZXqp9DhooXMny0Y9+FHfccQduueUWWGtx++23433ve184P51O8YUvfAEXL16Mrvu1X/s1fNu3fRue85zntPIcj8f4wAc+gNe+9rVwzuFJT3oSfvmXfxmveMUrBtfPuCXcuWJrawvHjh3D//d//gbf+fjHJNMFYFCDAlPu+Z0jjYWzI0wrYFY57MwqzKqGReADemFMUDsADSXKO7g3smqu1YSodrnK45NmzuKbZL9869XBWo0L0b98zXsB6A8QcjTwsvrqptgfzkrF6bvzbNlxzBksq6u8ecrRwnan+g5XMRSmdjs2AAXbN9WsqYuxcMUE0wrYKytcmFbe60jR++cMGX0d6Vw3MzeE7Uq13676XA5J9ZfcgqPvmEAyFDDI/TS2z2/hKY97DM6dO9dLzz9UaJ54/Mv/X9jJevcFGan2LuLLv/rPLltdr2T5lmUSAB+EZVJMmuAoo1U4oPa79tbRfoKP1QxkaUwTu3XN3vRWDHAAANMwCRTchbMUXChfnV1oD1opIyLKu+/g1gIcyrVy217S5YY61YGpuqTLDUzbTrg7T16P5RGuKpLul0B7FWsVLxouQ4w+o3xN836jpuC67VaG7CYYbABY3jwaqoOn5wy8sbAHDQ6lAUzlUFgD5wwKU2BUjDAqd+CKCUpY7JUu9NvGbqh2S+btHXr7ojqX9AwYGxGxDIr0WUsl22UPYKD1g3mWb42asRFer5zhrqqmWsA1OGVQLRmnyy1XesTFK12WGiSUlcPnz2zBweGpp4+pafw+836lYg0i9oDAgXO6sVnOKtc4EwEFW1ObuY4tQwun2h13h6Jr+qgnZL4pN8CQrmv0EkAimgByl2Umr3kAgpQ+1+0HkEhapc+RlwYWZF5dBmt9n6smKaod0On2vnXIpWndjwPIlZiqUjoX96Oa0TPwzMKkWEE5c5hVPpjSXg0Q5GTH3TWNMa3nIevNf0vQEPLsvtWsdAdcYwuT+j9vc0P6fJOnNjn3BwqpPIaUvx/5HMqVI0sNEprdHtPduXSNnbQ1fgDaK+PBRuYJNA28a5Xlc/bi80eSReDSFyB0SV8jqlY56J5Y/PWCnuy8ol2vIWqF/RLNrmHItX3PDy0j5Vlhe0wIOU+OXsApSs8m2B7XziPa/XBvgyYd13lXgSWwxmF3ZiLjRO45kAqNnqpHdjHI3kOoSybr3DPrawzbjDUxWFgUKPSRywEUtHpeCUChIqOzRfN4lMpSg4TNicXGapE8H1QE8J1iWpmwudPurDF+4pvJoGYWiH0gioyrGgDPYjhmr0BW/JVrqFAp0rtBAyiVi923ouvVTqgxBPx84tnoh+t6xBnwAZGrG7QVW64uOUkZd6bOyTS5snsDm8RxWX5Mx84pCzzX/PbV7fTUbumda1Q7lz6TXF/bGHVsFcdKuAjUljXbd2lWYlo6rI8LjGsXI24rVJnGIDGUR0UIYOB60uZBNQKjAgcgNogkSRkZ9xH5vjWgcDmkD1DYb5EbrB2EuKqEq3IjXr88Hq2y1CBhbA1WC4MKwLkLF3HsSGycsj4y2KsalQIBAMB3vIIZ0vCOSf7LzsUGig1jEFOmHCA4xOi/b1wA2ulOG7SjNAlJdbp53KVaE09ikLIiYd9Jtq8Moba5aHQroIOFeeq8HwOcNtF0FdAHEMjJ0MC00kXgRNQpVb58dp2qqoSkwA+p6nzcA38Xhe+k8Ftf19s/mXoCZ9dZdj2gMC4D9eomc285Q0hN+nioqNd1J7kscjncKyWbEIDCsKodyjdJlhokkBR1C9zavgQAGFt4/+pqBmsn2NxowMPXH7kQQAGNBXxnRz5wV84bIhJbkAtTnBJtTCTmQVsxzuv21NuljYzXlIFQs8sA0tbZ+w0K9ktSFGfXqj9lHJqTeb1N5n1GQydAfp7eefKeEitngOnta+EAsY/BXx/WierrQj1M+F9WDiPb7CDJRQMIQ4EBoPeJnPW/nMjncVWeR4bYOiyST5cMib2hAYWDChh1yCQsJksNEqaVw6WZw9EJMyIs9xivajFys9Z1Y2vg6khInNKmwaasXLOydyawA9J1i8SBVjQ1w1BHZEypHSgPDhT6DnLaQKalTU34uWt4/n0AQ0rmVd91Daq9VmCCDekaB7tW512TTYuqv0yx5LrecZd4v/RhlDtJuCcGIqTqomvl22WXQiwCv6fCmsD2RWlrSyCe5SLgAIjfWwoYdIEC7frU+XkDiXXlT2PKN0M6x5wEeL/c4qpqH0DCQVhSXZmy3CCh9O5Q53ZL72ttgfXRBAUDBs7Gt3hhWmFWOUysCYPQyAJeBWkwrVs6bV3LJfZmoMnUNdHhSPfr/MqnsACPxiiFdpKsYDywcO0BJNfxIlevzDgzaHLtQVenZN4Bemg5OZGBfAYbA/Z8pmkQol8076qyTx1IsvYZmmqih6qmTgmA2Ta4GDhw0NBVD1kun9wJJJSVawXt8v3b2wlUrE4yH17PRaUxIKbfizEFQ4DBPHKQtnWDvGlqkca532zG8VD6yVKDBAff0PwmkA6FAwrjsFKMAiVZOeDh8xdxaVZhr2yo9gret9oYvyqhmPGeDSCKNKUuaAACiYnOA854q2xjGt93LbZ5ZVxtWNno/v0GNmahSXeopKjDReuQ8+KIfLT34V4jXbXrXnGRpIDBvDYRoT6cXl3w9haty36UE3l1GP7M4lV4Ng/luTeeC5TGfyHD4qLum6gggML+i3R/lgBBa1NdBrbzevpQ2UP3GslN4PvBMvSKMinSSddf4OBAjStLuHJBJmHB65dZlhokkFRweNypo63jFy5eCt+pa0wKg8dd006bk68+fCFMIxQxrqxcCMzUCrdLq0w08RMARIFyCIBYGFTGB5ExznesyjRAAWICs8YldadysB1KtfO69V2ZzdvRg7HngGu6Vm7S3kKyCvwYT58CBouMYQb7M2lz2c/sSmLMhiyHOQPADnPAELEMrcvbYIyeEZ0plQZVOhfsjihtZCdD/7UJvKf6R4+GSb9jgJDqV3JR0Tc+SIoRavUNRa3T1X+GhjcfKilPG2nsLI2cZV0upzi3DzYJ7hAkLKWQJXRKHr5UYmVEHdxgbWQwz/akq3UeFKJZZlFW+mrC1mwCFyMGIw4ULB2sPFAgKretfnDqhGlTgwuTPpOgczEzErvnpfPryrerPtqQpa3Gmz0BmnOR3QRci1WI66evYHkybbIKexBkBleadPdj+NPqkJJ5t92tyu57Avx98RJ4GyFXYxKbCM7U53nz+yiUOslnQvVq1IBp+wIpXYaIPo1iE+Hy/YDHdADSEylPG2dW10ep+7THa44DoKXtKvpscJZjDYZ4AEU2LJQmW/L+yaHh4mKy1CChMDUVmWhu08phUrsxGuNZhNkcPOVKYVEY77tdgVwma/WEkdbV6UG3D3DnAykFZ9ImRc5g0KCYo+z7ToQk2gpTWx2nchgywWll0jfNs2Ie9iKnUkg9k9TEq+nL5fV9V+h9n1MOBOSAck74JKzlr91XuJaBBt4uJAvVR4Y8A7VfofFAIunakwAY7mYYgdLMudRukFravuX1UcelGLTo3bD0fQKq9Q0X3dfT4dAmYflkqUECGSuaRLx7H6mtCYY0ntOiaXNjLXzf2r7U2BogZiaGrOailYc0wmLfOQCQrIKU2vwxWZZDv0kQ8CtMbUDWJr/cqnuQVE0Z/GoOGFIDfsoGQT6PLppb1lubgOUKVwOG84CkrmeWAgN9DK+1oKSp/Oj+UvUhl8SQPqZxgmjvKm+v0P3M+LPm5XLAnFMLzBshU0rOVVkDB4vGbsg1J82uhxsCa0a82TgdQvramGjHL5cb6BA5ZBIWk6UGCaZmElID8mphsVs60Mi1O/MeBycWKHNivfeCQ6NXR+X8ZjUMNPABT7MX0HaNpJCzqtU5SG2iXKfQ511Ubp/VJ08TJo7M6DjvJNaawFh6SSfngAKXPvpvYL5noz0XnkcXdS/L6yojXJN4jr3eZanT96rYDMNkTOs+vSdPrGbpY5NBtSms78PW6OpA+TypLGIQmvxijyStrQwlEvtg3Zzh4lA2QF4zb3ppJ0KiubMuIlcCCOiSQ5CwmCw1SJhVPrRy6Ry+9MD5MMn+P9f4rTwfe+oIAOB//s1ZAMDGxGIVFl975AKmlW7s2CVmtoPCjuDsyA+ExntKlKwnEoDQhEIze4DBJjJwlYV+LbmJoU5LReZWw3wCWcTVd2jgk0EUeHDriK+hFasECslsxMC3X+CgqvSVuLymawXeR4YAraFqhj7pC2OisuR9cyNCTv9zoACg1/uKymVAIXWeCwcI3A5BMkp9H9FClvaLXKtIF2PYR9JeRTrbQOVKmdf4uU+9DtJl81Dml6UGCaWLJ4YKBq5y+F9fP4fvuP5YSPfgxT1UzuGG0SpK68K2zl9+8Hy4/olX9wQMsz0YMwPGqyjsyNsMoLEfABjDoAgBhBAqGs01OeGrEGIKuD956Vw0uA+dQDR9b8yGdK+YF5kcfQY6WCDXN5p4utiEIeBAPqcUkMpNnCHvDqO7XPqULMIezCPSk4DK5/c8BChw0Z4IT0NAAeKYFD0fBy0QWE4ul4txyqMi52kh65LyyGld1/MWuD2PVE+kAFYf4qmPITPJNwsUHAZTWkyWHCQ4zKrGPsAYhz2lLTxyaYrdssKRyQhmbYyNsfFqCqd7CeTEVCVgHJzLNxpaebesfKtGpdDljifVFBwYECggu4sAUCoHa0121zKbuOn2hMl/a/kt1uupHn7SaSacQI1zsMDsFaShXFSj6Jl6yQGDefq+vGaIvn+oUN6yzHlBSBeQ4x45EiykgEJfyZWcUhlqx4tmlqvrQu2Ag9oBYGHgRKuJ9K6JrhPgQO9+aYATx6Rg5Yi0OVCWAtaaJ4h0AQ1p6xO5TeUi74UrhCmoqhJYECRUh+qG5ZTdmcNq6DV+98WLU/8y//tfP4xpVYVd5Hz6ChenJZ5yenP+Qo2Fo85Sf6IOQswGYq8LUi+k5iS5+gVidUIOHFSViyYEUn2k9LtcNcIH+iTVnentOf17zgvAJ6iPW4PSIQCF5rzCLDCwAECdeSQ44HUJtiDiRXQZ8UnJ7E5+WUSWl6pXqT0/dry33n8AAJBswjyi1Um+E24Xw+tb2DhEswGiLdvnFfkEcgxWynC2CyBIewaNQUgZJEt3bK1qsWtqu1594kO082yDBblzpVY/KQcZLO5Q5pelBgmuBgY8pOvOrAqDiTUG62MLa01YtU4ThndfffhCref3eaXUD84YwDQjtlQtWOM7dLR9La3y0bY70NQIXDSVQgogtC3z2/WnVXtTt+6OmrJVbE3qndfxlZ4Jq9HAWNTvSAULigoC6Pa2mBcc8PNywpwHIAxddc8rUTm2uWdizoAeMRHmqOvCqqZasuqflEHlQDaor3sqldZnRb4IQJDfOWCQQEEyJH1eVQ4caDEU+gZZ6tq060oBCIeGi4vJUoME6jiXphV2ZlVY0VgLjK1FYQ3G1mATDrNqjGlZZVaMPrzz46/qsE2w+iMzILsIAPVAYp1BGdQODaAh4QCDswVAe1KLKPLOia1OJ1Z1IU9lEOAqCFJZcFHVFx2DrbymispprxKp3AZccHakiYlhbTN5y2BAfYw2WxNR4nmG1XEHtX45AYBmCyKPd2dCDaKbNRpyL1wtkVr1a+WpQZJ62Ib0Agwdwt03B0WbZNJlEwP0D+jE89QkBRT6SAqkaOoEDgz2O8Lqou6fi8ohSFhMlhokHJ1YrI4tygpYGfkmP7bGB1myJrhIziqHnZmDgc127qLPClEsI63hQZ2Yi2Pl6j0laoDgGuYDaEABoK92iS3wx1kdjfheI5OUPUF7QiR6OL6tUi77e6zOqAwJMIZIa4Ku4q8eNLiIXQh7YUT3kGM0ukEB3b7chlgKf/2XAxzkJv8+UREjEcCI2zfMxRYkvDw0wCEZO2nP0CXW9rcX6cpPu1fuRjlEehnzyf1ZEqqIrsegdaU+YaZzjEUKGHSVS9JnN08tn0PVwvLKUoOE608cweamX/l/6YHzKCyBBBMisE1LB2cMxtYB8MGXvvzgeYxss730kObr7AiojRZN/SmsdyWyrt53wTRhSCvnOxKpQ4C2jQEXTZ1AwidjqVvm/usSIGidvosY0IzXUtIHGKSMJZv6ONVoTqoivLjoms76JUZjXR2TBgo5gKDVY1EKnk/6fXT90rsgyourGwZMwF3SN58+xp5S9sPuozBxnwHSAGERzCe9BjSgcDkk1a1SwKA5n7/ZvhtRaU38m80ctKQs4eyCTMDhBk/LL5srFiNr/BbQobMCD9cvd1JYFNZhVJ93AB5z4sjgckozQmGaLkN0YGG8vYJxgDNAaXwNSqZmIJsDaYSoluPaBol+aW1UWwBKRxOfBhC6V3BtejJFExMF3wdMSICQmoS5a6WsKzEdxNg0eSeL7SUaCJNAQd6bNvEctKRUD9lQ2wIohLx6TvR9wEVvrw5GA/V9h/MyN9LIUZO+WWu3x90LfXn7OzH2844YzhhwSW1A1QcEqPl9s4EBE+cW92443ODpW0Aq5zsGDe6la+IokJfBpDC17cD8A/usctiDByMWMTtA8Q8q+mgWyagHqmZryOYeKrqX1MrX0xIVOAhowEGKQYh183reROuTeGaCnTftCWBK6oYEmODgQAMGQzwjUsetEpJbm0xSTIZ2XAMINJFxo9iu+i3sBslVL5m8umwVOOtE9ed1GwK0ZNocaOiq8/+/vXOPjeK6/vh3Zry7tvErjjG2IRBDINThkZYQfhYtjQQ/DEFVaFB/IUENRBUIatKSEESJkpCmTWmpVLWpUqpWEeaPkDaRoDS0jcrL8CNxIKFYBEz9Cy6pSWJDeRjj5+7OnN8f89g7s3ce62ds3w8a4Z25c+fM3Z17z5xz7rlWzIfR/3oFiHK/0x48x25KVE/jEwBwF0TrW93R7h50Ls0N9DwA0W2dieHkKiBN672SIPIkDH064xrimgQNsjUdx+wQZIcm7jc1BwA+v9GGEo6lwRwYiQmwY6c3qpoZf6BfVwaBJP3BVmQjYyIZLgqVHbz1/3kBYCpn4HHmRnAe5+HlFnAGKyaZ/T3ay+kRcAb6mYOu3zQ7lTPY+ZnsuesmOK7v5+pwysmTNQg8BSHlvsUlIM/re3WmhnZTFFgZexNP4VQwetp/OpWbJCWVSeTUk7wMXtdLOhZwNUwWXuIoc09v9UMn1jsFY6UwMybKEj+Y0muVR57lwKkguC2K5keQx00y/gm++AwbJaEjpllvvyHLbGxo4Iw50BwI/PoCWZLwn9Z2SAAKckZZ++PGstC2aXaAbfqk7k4wr6PHJOtWB0lPjCIBqmaPnjf/Zwd/E/btz8tykHwPqT3cvFkNzo7bK4rdLUBLl0XiWhjsAiRf121evxd+CoJXcKLNNO2wIphyKR4yOWdUpJw6mVljIUgqbHMAZeX1itUIonz5WUmcbideQKTKyOAVKKrX564w2NJEy+4KjtMS4LQYBLXwBHKfmffIXMNtiXBLaXCtLSGvpzXDHMAtGRKxD87nTkPCmuBUFNxiDYjcrQdJMQbMZ14zBc0ZMVBQHyRTErMbhgFpsj6L4UaXClUjhBQJOWHFtlZCyIxUhH8Ue5qsdwYxxxPSGdeQxsznNwcx1VAOCLBmMDg7edmwKCSyK+oxBzE1OVDRzO3gVASc1oOemLR5sYBB3rZ51/KfHqhPR5WNWApPFwMrjzmDQSPwTOT2ayQrBX7fr1XeRR5eamLrM8jVRM9ahHizU/jXctTh853aZNYkSxa3dMlAYhCzrGs9eCvnTce0KQUpKAtBvx/r2g53j5OgSZyCZttkn91A7hhOXc7VTC0ZUsgumVRPkrLAKArGPklijiPZpWBVxVyOZz1ww/8Fy1F+kC0Guruhd+4C4W4YBpiLOgHA//7rKkKyjHQlsZysLOmuApWAMICcdAWdnZ3IyMhwqVFHkYDGa23ojGtJHVtYkeB8RzBjIEzFxGbWo0TiJwKswEXNUBQA5k2VUQKc1oOgioGzzzGVDj99wC+bH98/bD83pEiW9YBVEHhv50BiYEncG0eT4TynbnEPQWIGvPBNssTxoztzWbjlq0iSCam5RJwKgNcCVID9LdupLAwkflYha/aBTelLHO9JwGhP8mYkV2KXhxd/w4sL6Y82JiSvX+K0KJiisMqCdT5HMTDr4O3vD8yfYy/CQAQDSEqx4du2bcPs2bORnZ2NwsJCLF26FPX19bYyDzzwgG5iZ7a1a9fayjQ2NmLJkiXIzMxEYWEhNm3ahHg83vu7YYhpGu4qzEFpQTZKC7J1FwERYmbOBC2OmAb8p7Udn91oSzrffE5M02dUNWIOSFc24hpZlgPbeQSbgsCu02ApCJToQFTTCkHEWAsSq1uag01MJSsZlDmdkp1W6SQlH6Kc6Jz9kgbZO+rEpjAKgSy7KwiyrHe0ZnmFs99sd/2adjmdm1mGjXvwyzXAbm5YwafGbyam2T8n/tasjT1uKnSmUue1AYmyXhvgb5UISm8C9dxm1gA9f2FzKgjm78Hcl6qCYH4XXjLa2p/5vtktqZyWrGDw8nA46+gvnBYA21RrSt7YfoOYf87+hHcubxsKmMmUeruNVFKyJBw9ehSVlZWYPXs24vE4nn32WSxcuBB1dXUYNSrht1+9ejVeeukl63NmZqb1t6qqWLJkCYqKivDee++hqakJjz/+OEKhEH7yk5/0wS0B43IiSVq8qukaUZcx2CqygpCsQiPgVlTFWEcdUZWQJuvrQ3TFdaUgHtX9hrKk16XKEmRNd02wOBUEa/DX9NkRTjdDUmZCjuXAmjnhmLJnxmF49fm2IEIlOa7AWcZZl1/AGDtY85QDsw4vk7G14JYMOLIkGW4a/zfQIKSShtr5vcSYc2PwbhenVSEoPQnOc3V9GBaH3mYYZOszCfpWHuTeeQqC7XMvvl835YBXnq+A8Z8V5+qYXla2oJkyg3w/bAnTmpBwH7L3lVyXm7WAbbIgv1SnDIDdBeEdm8TUM0CWBBGT0DtSUhLeeecd2+eqqioUFhbi1KlTmDdvnrU/MzMTRUVF3Dr+/ve/o66uDgcPHsSYMWNw77334kc/+hE2b96MF198EeFwOOmc7u5udHd3W59bW1s95QwrEjTHr9QctDXSrQxxVUZHTIOqAVFVw/ufXMd/3ZlvlR97WxY+u9FmWQ2645rlCggpEhRJgWI9aYlrSZKeuZFIz5kQJ7It0MKLQ2DxUxBs0xLBBtLZ6/Hqb2ROeS+84g68FARnHebgb/p7U5p65zDrpupTT2WNCd5sEd75XgGCfutoONsnqCLmhdvbtpVkqwcR/G6Drpti4D/omrJ6X7cnCoKToLJ6yclm4uQl/OKtjum8By/8FAO/GthB2v5Mu9+Um1IQ1D3ilNl0f5hyAHZFgZdcaqAUBEHv6VUqmps3bwIA8vPzbftff/11FBQUYNq0adiyZQs6OjqsYzU1NZg+fTrGjBlj7auoqEBrayvOnTvHvc62bduQm5trbXfccYenXGmyhDvy7dMXNesYkJGmrwppKghdcd1c7GTsbVm2hZ4s87KqKw6W28EwKxISpnfJMMOb/6cCT0FwwsuOyDN9AsmmffP8VORyq1uv3x5vwMNsO9bMHlPJMt+yG3tPTvzcBLzrOgcu52aVtZn1vRfOSuThcDdTB8E568NtY90OpsvLmZDLOQC6ydJTU7jXoGs3z9utMWy7Js4LeK2Acnq5B/xkDUJiurP7+ipu9zAQBHUPEOmKgbmphtvUTPDm3Fg3mrmx5wB2RWMAbzkwmqb2yTZS6XHgoqZp2LBhA+bOnYtp06ZZ+x977DFMmDABJSUlOHPmDDZv3oz6+nrs2bMHANDc3GxTEABYn5ubm7nX2rJlC55++mnrc2trq6eiwHtLiWuEjDQZWSEZIUVCe0xDpiQhTZZwtSOKirvHJJ9kEEmTACho7U50Dp1xFYACVda15rCSSPMsG4OmqukvzyoIChKLPekymkLqKnfS2gm2+zFN8YnXBeebZUxleioZACRXJYA3lZHF7YWCTZrkZ1kAzMHC7EjNSslWNgazPldxepWzwMtKwMJrj1TfNE2CRPMHmRKqSIn62QBWa4ltK0hUhmJMqdWk4BYar6mcLH6uBV478dqZN8U26HUDB+t6rPLpZ93xlyfxPbJWBds0TaRmIfPDFNGr1bxuw8s6EMR9BPhY/hyrsZpWBdOy4fy6ncGVAwGpGiD10t2gumiCI4AeKwmVlZU4e/Ysjh8/btu/Zs0a6+/p06ejuLgY8+fPR0NDAyZNmtSja0UiEUQikcDleb+/tqiKG51xdBtWg+LsdISM+IKxOeme9emdOeFqRxTpaTJCsgxAhiJpRgYEGItD6WZ2yZDBdD1A0xUERQZCkCEb85VimgZNkwBNQwymKyLRwZrXBZiBknnqnG6IZOuCZBvYnbD9R7BOOGGFYF0QKjEKhDX9k2+qt82PZzvWJL90wo3h52Lwm4cP8F0IbgQ1mZvymTjvgTfzgXd/3koQWRYL51oeYFxYptIRUiTIlAgG9cJv+iorNw9LeTWUDTbbqXl/TsWAl1jLKUOQ4Ecvpc5ezr+uIHgpsUHa0cTpouqpiR9wX3eGVye/bbyVv+RKzC8v8Vtm4168XCesy4ENjhR88emRkrB+/Xrs378fx44dw7hx4zzLzpkzBwBw4cIFTJo0CUVFRTh58qStzOXLlwHANY4hKJ/faIMsSSjKG5V0LKYSbnTGcLM7jpiqISuchuxImuF+8O5Nw4qEbhVIT5N1c7eceAv06hr0Z0YCSWSkbtWTKUnGvGeNJEAmq1P3e6vzyxdgKgqJRZHMtx93KVMxibLxD2Z/YA4CGkch0M9xf3t3rsFgm97HZo1BcsIdvnyBbyUw7Nu8c7+Jm4LDYmb9dAZz6uX5146pCcVANhVGWbJbjQBANeqQ9b9DirFf864f8B7gkoIQebkAGEUBgKuy4NVe1n6flM9eQZFu373zt5eKJUOX0f6Zp9C5Tdl0wyuWxQ1e8GmQn3sqyoHzWE/QX03Y69tjEwAkKQv9jVi7oXekpCQQEZ588kns3bsX1dXVKC0t9T2ntrYWAFBcXAwAKC8vx8svv4wrV66gsLAQAHDgwAHk5OSgrKwsRfGTYR+Ks003EVP1oMO5pbcnla1uuIrssIL89DRcut6G7riGW1ENo8IyQrLuihidoSA9TUJ3XELhqIjx9q8vTZ0mS8bKk0b0vgTrrcrU8s24BEWWdLOdpisNbMZFzaO/8JoJwL9/5m9Vz4kQJHMfEKwDtQdKJszfbi9/CWuCx3UlQFUJmqGAqUTQJL6y0FPYN1u33AYJeezuAlNR8BvonAOi822ZjdqXJcnz7VDVCN3xuBWbElJkXVkggqZJVryKKae5P6SvH24pDbzgOifOYLtUBwpe4iT9/vnlrOM9NMs73RvJ8Q7J8lv5DAIqDW45N9zdbHxZ3ZSwpFwXPm3O5l7wC3ZUZIlrTQiq9LFl/b4zp7JjfjJdDkDi2XcqCwO1JgRpau/dDSImIRiVlZXYvXs39u3bh+zsbCuGIDc3FxkZGWhoaMDu3bvx4IMP4vbbb8eZM2fw1FNPYd68eZgxYwYAYOHChSgrK8O3v/1tbN++Hc3NzXjuuedQWVmZkkuBhyxJtumIRECmEYPAoyQ7jHTdTwAiIKYBbdE4FDkEOU23MHx8I2opA/rDKUNJkxBRJIQVXZFIT7NbAlSSjDnJhpmciUfQwwoShkLzYZRlCSEYifAdnQA/2p4/48FeJljHAxidgWN6pfP6iVUS7Z10LIWH3X3VRb1NVNXoVGX9s2XOVPmdFve+XBIsAfoUUFZ2dl0Pr6RHZpU8d0GiDP8NWSVCl6oBqr0tQ7KEzJCC9DRZ/30ZimYiWDXNUkozw2awLVn7ABjBoMntGjSjIasUON0kQZUFvzgVXSb+tb3qZDM4Jh/3loW3P3nAs3/2shqkYi1IxfXglNGtrlSsD6aC4AywBIK5cgDY+kzerBneDAcn7O04uhdDvmCy9JYvupLw8ssv4y9/+Qtqa2sRDofR0tLiLw8Rtm7dit///vdoaWnB3LlzsWPHDkyePNkqc/36dTz55JN4++23Icsyli1bhl/96lfIykpt9eOUlIQdO3YA0BMmsezcuROrVq1COBzGwYMH8ctf/hLt7e244447sGzZMjz33HNWWUVRsH//fqxbtw7l5eUYNWoUVq5cacur0BM+v9EGxeh4b7Z1IKYRciNKUlpllimFOWhuaQcAdKmaMVMBuN4ZQyysIKYB/7rRgYLMMHIiaeiKq9ZDJoUVhBW9Mw7LetIoIr3zIpCRP92wKkjG6o+SoSwwb4BWu0gAZMlIR2wfhE2CKAgmVgfB9U8ab++wv5WYg7XNZcGe18dPNeuvToqhMJbF1jEVLP+lqb0Gb+u6jEnchDWNW+dxuj6nlYA3eLD7zKmuHTEVMVWDRnqQqSxLSFdkKJJks1wllAQAYRlEMjTSrVWJHPyKLQ9HTEssQ24m3LLLLNlkdg5Obm3ptCp4LebEq8M5mHLLuLxl26wTxnXtro3EM+JmJXLDb3VSp1x+CcZ6gjP7pJ9Cxhuc2SsHcj0EUBDcclSwgYk9QcQfuBONRvGtb30L5eXleO211wKds337drzyyivYtWsXSktL8fzzz6OiogJ1dXVIT9dj7FasWIGmpiYcOHAAsVgMTzzxBNasWYPdu3enJJ9EzhU/hgA3b95EXl4eLl26hJwcPR3z5ZZ2KEZ8gQINkGTECWjpUqGCMCE/m1vXtdZ2dKv6mg+dcRXROOFKRxSKJCGSJuP/rrZhXG4GikZFjMx7+pOWrsgozAphVJqMnIhiDG7G1DSNEktHa4lVIVUypxrpyZqiRodOGmzBaea0Ixh1sNMhAX8FQSUCETFvFPZjJux0SEDvCCTe4Mf0DLY8DZy3A7drsLD12TvshDxOM77tHEedvDn1/gGBCVKzuCT2S7KznewdqqoRonENHXEN1zqizHRbPYg1M01BcXY6xuboFq0wUzmRvR0lo16ZuS8i/XcWVQkx0l1YehxDYqExS26HAhQkYI73JkoBBhqpl6mU2Wuz1zevzU5HTJQJXLUhl/2zZ5IjxyG3+3MjSMCtlxWBrcNrsDZrMKczsvWm8r053WESEq4DN0+H8+cUZFRpb7uF/77vHrS0tCA3N9f/hBRpbW1Fbm4u0sr+B1BCvatMjSFe96ZtzAFSD6r3oqqqChs2bPC1JBARSkpKsHHjRjzzzDMA9DFxzJgxqKqqwvLly3H+/HmUlZXhgw8+wH333QdAz3P04IMP4tNPP0VJSUlwwWgIcunSJYJ9uq/YxCY2sYltCG6XLl3ql3Gis7OTioqK+kzOrKyspH1bt27tM3l37txJubm5vuUaGhoIAJ0+fdq2f968efS9732PiIhee+01ysvLsx2PxWKkKArt2bMnJbmG5AJPJSUlqKurQ1lZWZJmJ9Axc0mI9uEj2scb0T7+iDbyxq99iAi3bt1K7a02BdLT03Hx4kVEo9E+qY/Ivuw2gD6zIqSCGQvIyzdkHmtubrYmBpikpaUhPz/fNR+RG0NSSZBlGWPH6qst5OTkiAfUA9E+3oj28Ua0jz+ijbzxap/+cDOwpKenWz76geQHP/gBfvazn3mWOX/+PKZOnTpAEvWcIakkCAQCgUDwRWXjxo1YtWqVZ5mJEyf2qG4zn9Dly5et1ALm53vvvdcqc+XKFdt58Xgc169fTzkfkVASBAKBQCDoQ0aPHo3Ro0f3S92lpaUoKirCoUOHLKWgtbUVJ06cwLp16wDo+YhaWlpw6tQpzJo1CwBw+PBhaJpmJTgMSh9mGR9YIpEItm7dOig+oaGAaB9vRPt4I9rHH9FG3oj2CUZjYyNqa2vR2NgIVVVRW1uL2tpatLW1WWWmTp2KvXv3AtCnQm/YsAE//vGP8ec//xkfffQRHn/8cZSUlGDp0qUAgC996UtYtGgRVq9ejZMnT+Ldd9/F+vXrsXz58pRjQIbkFEiBQCAQCIYDq1atwq5du5L2HzlyxMpJJEmSlY8IgJVM6Xe/+x1aWlrw1a9+Fb/5zW8wZcoU6/zr169j/fr1tmRKr7zySsrJlISSIBAIBAKBgMuQdTcIBAKBQCDoX4SSIBAIBAKBgItQEgQCgUAgEHARSoJAIBAIBAIuQ1JJePXVV3HnnXciPT0dc+bMwcmTJwdbpEHhxRdfhCRJto3N4NXV1YXKykrcfvvtyMrKwrJly3D58uVBlLj/OXbsGL7xjW+gpKQEkiThT3/6k+04EeGFF15AcXExMjIysGDBAnz88ce2MtevX8eKFSuQk5ODvLw8fOc737FNRxrK+LXPqlWrkn5TixYtspUZru2zbds2zJ49G9nZ2SgsLMTSpUtRX19vKxPkmWpsbMSSJUuQmZmJwsJCbNq0CfF4fCBvpd8I0kYPPPBA0m9o7dq1tjLDuY2GG0NOSfjjH/+Ip59+Glu3bsU//vEPzJw5ExUVFUnZpUYK99xzD5qamqzt+PHj1rGnnnoKb7/9Nt566y0cPXoUn3/+OR5++OFBlLb/aW9vx8yZM/Hqq69yj5tLrP72t7/FiRMnMGrUKFRUVKCrq8sqs2LFCpw7dw4HDhzA/v37cezYMaxZs2agbqFf8WsfAFi0aJHtN/XGG2/Yjg/X9jl69CgqKyvx/vvvW8vrLly4EO3t7VYZv2dKVVUsWbIE0WgU7733Hnbt2oWqqiq88MILg3FLfU6QNgKA1atX235D27dvt44N9zYadqS0HNQXgPvvv58qKyutz6qqUklJCW3btm0QpRoctm7dSjNnzuQea2lpoVAoRG+99Za17/z58wSAampqBkjCwQUA7d271/qsaRoVFRXRz3/+c2tfS0sLRSIReuONN4iIqK6ujgDQBx98YJX529/+RpIk0WeffTZgsg8EzvYhIlq5ciU99NBDrueMpPa5cuUKAaCjR48SUbBn6q9//SvJskzNzc1WmR07dlBOTg51d3cP7A0MAM42IiL6+te/Tt///vddzxlpbTTUGVKWhGg0ilOnTmHBggXWPlmWsWDBAtTU1AyiZIPHxx9/jJKSEkycOBErVqxAY2MjAODUqVOIxWK2tpo6dSrGjx8/Ytvq4sWLaG5utrVJbm4u5syZY7VJTU0N8vLyrDXYAWDBggWQZRknTpwYcJkHg+rqahQWFuLuu+/GunXrcO3aNevYSGqfmzdvAgDy8/MBBHumampqMH36dNsKfRUVFWhtbcW5c+cGUPqBwdlGJq+//joKCgowbdo0bNmyBR0dHdaxkdZGQ50htXbD1atXoaoqd4nMf/7zn4Mk1eAxZ84cVFVV4e6770ZTUxN++MMf4mtf+xrOnj2L5uZmhMNh5OXl2c5hlxMdaQz0EqtDkUWLFuHhhx9GaWkpGhoa8Oyzz2Lx4sWoqamBoigjpn00TcOGDRswd+5cTJs2DQACPVPNzc3c35d5bDjBayMAeOyxxzBhwgSUlJTgzJkz2Lx5M+rr67Fnzx4AI6uNhgNDSkkQ2Fm8eLH194wZMzBnzhxMmDABb775JjIyMgZRMsFQZfny5dbf06dPx4wZMzBp0iRUV1dj/vz5gyjZwFJZWYmzZ8/aYnwEdtzaiI1PmT59OoqLizF//nw0NDRg0qRJAy2moJcMKXdDQUEBFEVJiia+fPlyystfDkfy8vIwZcoUXLhwAUVFRYhGo2hpabGVGcltxS6xysK2SV8usTocmDhxIgoKCnDhwgUAI6N91q9fj/379+PIkSMYN26ctT/IM1VUVMT9fZnHhgtubcTDXHWQ/Q2NhDYaLgwpJSEcDmPWrFk4dOiQtU/TNBw6dAjl5eWDKNkXg7a2NjQ0NKC4uBizZs1CKBSytVV9fT0aGxtHbFuxS6yamEusmm3CLrFq0tMlVocDn376Ka5du2atWz+c24eIsH79euzduxeHDx9GaWmp7XiQZ6q8vBwfffSRTZE6cOAAcnJyUFZWNjA30o/4tRGP2tpaALD9hoZzGw07BjtyMlX+8Ic/UCQSoaqqKqqrq6M1a9ZQXl6eLVJ2pLBx40aqrq6mixcv0rvvvksLFiyggoICunLlChERrV27lsaPH0+HDx+mDz/8kMrLy6m8vHyQpe5fbt26RadPn6bTp08TAPrFL35Bp0+fpn//+99ERPTTn/6U8vLyaN++fXTmzBl66KGHqLS0lDo7O606Fi1aRF/+8pfpxIkTdPz4cZo8eTI9+uijg3VLfYpX+9y6dYueeeYZqqmpoYsXL9LBgwfpK1/5Ck2ePJm6urqsOoZr+6xbt45yc3OpurqampqarK2jo8Mq4/dMxeNxmjZtGi1cuJBqa2vpnXfeodGjR9OWLVsG45b6HL82unDhAr300kv04Ycf0sWLF2nfvn00ceJEmjdvnlXHcG+j4caQUxKIiH7961/T+PHjKRwO0/3330/vv//+YIs0KDzyyCNUXFxM4XCYxo4dS4888ghduHDBOt7Z2Unf/e536bbbbqPMzEz65je/SU1NTYMocf9z5MgRApC0rVy5koj0aZDPP/88jRkzhiKRCM2fP5/q6+ttdVy7do0effRRysrKopycHHriiSfo1q1bg3A3fY9X+3R0dNDChQtp9OjRFAqFaMKECbR69eokBXy4tg+vXQDQzp07rTJBnqlPPvmEFi9eTBkZGVRQUEAbN26kWCw2wHfTP/i1UWNjI82bN4/y8/MpEonQXXfdRZs2baKbN2/a6hnObTTcEEtFCwQCgUAg4DKkYhIEAoFAIBAMHEJJEAgEAoFAwEUoCQKBQCAQCLgIJUEgEAgEAgEXoSQIBAKBQCDgIpQEgUAgEAgEXISSIBAIBAKBgItQEgQCgUAgEHARSoJAIBAIBAIuQkkQCAQCgUDARSgJAoFAIBAIuPw/CPwQDVSbrcIAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import xarray as xr\n", + "import matplotlib.pyplot as plt\n", + "\n", + "ds = np.load('./climate_mean.npy')\n", + "print(ds.shape)\n", + "\n", + "data = ds[200, 0, :, :]\n", + "\n", + "plt.imshow(data, cmap='RdBu_r', vmin=-1, vmax=1)\n", + "plt.colorbar()\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "f5c992a5-1661-4bdb-82be-4e4afd8e1381", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "0.0073994473\n" + ] + } + ], + "source": [ + "import h5py\n", + "\n", + "ds = h5py.File('./forecast_20210215.h5')\n", + "print(ds.keys())\n", + "prediction = ds['prediction'][0, 100, 0, :, :]\n", + "print(np.mean(prediction))" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "3f7c354c-4f10-4020-a303-3b8a8e612468", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Merging forecast_20210101.h5\n", + "Merging forecast_20210106.h5\n", + "Merging forecast_20210111.h5\n", + "Merging forecast_20210116.h5\n", + "Merging forecast_20210121.h5\n", + "Merging forecast_20210126.h5\n", + "Merging forecast_20210131.h5\n", + "Merging forecast_20210205.h5\n", + "Merging forecast_20210210.h5\n", + "Merging forecast_20210215.h5\n", + "Merging forecast_20210220.h5\n", + "Merging forecast_20210225.h5\n", + "Merging forecast_20210302.h5\n", + "Merging forecast_20210307.h5\n", + "Merging forecast_20210312.h5\n", + "Merging forecast_20210317.h5\n", + "Merging forecast_20210322.h5\n", + "Merging forecast_20210327.h5\n", + "Merging forecast_20210401.h5\n", + "Merging forecast_20210406.h5\n", + "Merging forecast_20210411.h5\n", + "Merging forecast_20210416.h5\n", + "Merging forecast_20210421.h5\n", + "Merging forecast_20210426.h5\n", + "Merging forecast_20210501.h5\n", + "Merging forecast_20210506.h5\n", + "Merging forecast_20210511.h5\n", + "Merging forecast_20210516.h5\n", + "Merging forecast_20210521.h5\n", + "Merging forecast_20210526.h5\n", + "Merging forecast_20210531.h5\n", + "Merging forecast_20210605.h5\n", + "Merging forecast_20210610.h5\n", + "Merging forecast_20210615.h5\n", + "Merging forecast_20210620.h5\n", + "Merging forecast_20210625.h5\n", + "Merging forecast_20210630.h5\n", + "Merging forecast_20210705.h5\n", + "Merging forecast_20210710.h5\n", + "Merging forecast_20210715.h5\n", + "Merging forecast_20210720.h5\n", + "Merging forecast_20210725.h5\n", + "Merging forecast_20210730.h5\n", + "Merging forecast_20210804.h5\n", + "Merging forecast_20210809.h5\n", + "Merging forecast_20210814.h5\n", + "Merging forecast_20210819.h5\n", + "Merging forecast_20210824.h5\n", + "Merging forecast_20210829.h5\n", + "Merging forecast_20210903.h5\n", + "Merging forecast_20210908.h5\n", + "Merging forecast_20210913.h5\n", + "Merging forecast_20210918.h5\n", + "Merging forecast_20210923.h5\n", + "Merging forecast_20210928.h5\n", + "Merging forecast_20211003.h5\n", + "Merging forecast_20211008.h5\n", + "Merging forecast_20211013.h5\n", + "Merging forecast_20211018.h5\n", + "Merging forecast_20211023.h5\n", + "Merging forecast_20211028.h5\n", + "Merging forecast_20211102.h5\n", + "Merging forecast_20211107.h5\n", + "Merging forecast_20211112.h5\n", + "Merging forecast_20211117.h5\n", + "Merging forecast_20211122.h5\n", + "Merging forecast_20211127.h5\n", + "Merging forecast_20211202.h5\n", + "Merging forecast_20211207.h5\n", + "Merging forecast_20211212.h5\n", + "Merging forecast_20211217.h5\n", + "Merging forecast_20211222.h5\n", + "Merging forecast_20211227.h5\n", + "合并完成!\n", + "merged_label shape: (73, 120, 2, 128, 128)\n", + "merged_prediction shape: (73, 120, 2, 128, 128)\n" + ] + } + ], + "source": [ + "import os\n", + "import h5py\n", + "import numpy as np\n", + "\n", + "def merge_label_prediction(output_file='forecast_merged.h5'):\n", + " # 1) 找出当前目录下所有 forecast_*.h5 文件,并按文件名排序\n", + " file_list = sorted([\n", + " f for f in os.listdir('.')\n", + " if f.startswith('forecast_') and f.endswith('.h5')\n", + " ])\n", + "\n", + " if len(file_list) == 0:\n", + " print(\"未找到任何 forecast_*.h5 文件!\")\n", + " return\n", + "\n", + " all_label = []\n", + " all_prediction = []\n", + "\n", + " # 2) 逐个读取并收集 label、prediction\n", + " for fname in file_list:\n", + " print(f\"Merging {fname}\")\n", + " with h5py.File(fname, 'r') as f:\n", + " all_label.append(f['label'][:]) # shape 比如 (1, 120, 2, 128, 128)\n", + " all_prediction.append(f['prediction'][:])\n", + "\n", + " # 3) 沿着第0维拼接 (把原先每个文件中的前置“1”给堆叠起来)\n", + " merged_label = np.concatenate(all_label, axis=0)\n", + " merged_prediction = np.concatenate(all_prediction, axis=0)\n", + "\n", + " # 4) 写出合并结果到新文件\n", + " with h5py.File(output_file, 'w') as f_out:\n", + " f_out.create_dataset('label', data=merged_label, compression='gzip')\n", + " f_out.create_dataset('prediction', data=merged_prediction, compression='gzip')\n", + "\n", + " print(\"合并完成!\")\n", + " print(\"merged_label shape:\", merged_label.shape)\n", + " print(\"merged_prediction shape:\", merged_prediction.shape)\n", + "\n", + "if __name__ == \"__main__\":\n", + " merge_label_prediction()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "3b989fd8-e6f3-4a1e-9842-a9c5f1a322b9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "0.0073994473\n" + ] + } + ], + "source": [ + "import h5py\n", + "import numpy as np\n", + "\n", + "ds = h5py.File('./forecast_merged.h5')\n", + "print(ds.keys())\n", + "prediction = ds['prediction'][9, 100, 0, :, :]\n", + "print(np.mean(prediction))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "b7930bc6-f84d-4b61-b677-dfd7f78e0d5a", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAACRUAAARlCAYAAAAtN7LBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAC4jAAAuIwF4pT92AAEAAElEQVR4nOz9eZxcVZ0//r/OXWrvfcuezr6xJiFhlU0UBSKL7COg8HEcl3H06wwznxlRx9GfM47iOCPjR0YBUWAAAVkEAQNhTSDBQFayJ53uTtJ7de13Ob8/7q3qqq7qvbo7nX49H4+y6p5b95xzq0Obeud93kdIKSWIiIiIiIiIiIiIiIiIiIiIiIhcynhPgIiIiIiIiIiIiIiIiIiIiIiIji9MKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohxMKiIiIiIiIiIiIiIiIiIiIiIiohzaeE+AiIiIiCY227axefNmvPHGG9i0aRP27duHgwcPoru7G9FoFKqqory8PPOYN28eTjrppMyjvr5+XOdvGAY2btyIN954A5s3b8a+ffvQ0NCQmb/X683MvaKiAgsXLszM/eSTT8bUqVPHdf5ERERERERERBMN40mMJxER0cQgpJRyvCdBRERERLkOHz6M//mf/8lpE0LgzjvvhM/nG6dZ5dq6dSt++ctf4n//93/R3Nw87H6mT5+OSy65BJdccgkuvvhi1NXVFXGWfXv77bdx33334Xe/+x3a29uH3c/8+fPx0Y9+FJdccgkuuugilJeXF2+Sw2Tb9ojuKU3XdZSVlQ35ukgkgkQiMaZjEhEREREREU12jCeNvokcTypGvCgQCCAQCIyoj46ODliWNebjEhHR8DCpiIiIiOg49O1vfxvf+c538tofeeQRXH/99eMwox5btmzBP/3TP+GZZ55Bsf8qKYTAihUrcNNNN+GGG24YlVVbr7/+Ov7pn/4Jr732WtH7VlUV5557Lm666SZ8+tOfRmVlZdHHGIwDBw5gzpw5I+7n/PPPx6uvvjrk62677TY88MADYzomERERERER0WTHeBLjSf0pRrzoW9/6Fr797W+PqI/6+nocPHhwzMclIqLh4fZnRERERMcZKSV+/etfFzx3//33j1sQKJFI4K677sLdd98N0zTzzp900km49NJLccYZZ2DBggWoqKiA3+9HOBxGZ2cnjh49ik2bNmHjxo1Yu3YtYrFYXh9SSmzcuBEbN27EN77xDVxwwQW45ZZbcOutt454/p2dnfj617+O+++/v2DwatWqVfj4xz+O008/HfPmzUN5eTk8Hg+6urrQ1dWFxsZGbNy4Ee+++y5effVVGIaR14dlWVi3bh3WrVuHL3/5y7j00ktxxx13YM2aNSOePxERERERERFRXxhPYjyJiIhoNDCpiIiIiOg489prr2H//v0Fz7300ktoamrCtGnTxnROBw8exNVXX4333nsv79wVV1yBb37zmzjjjDMKXptdfvryyy8HAHR1deG3v/0tfvjDH+LAgQMFr7NtG2vXrsWmTZtGHAR6//33cfXVV2Pfvn057Yqi4DOf+QzuvPNOLFmypOC1U6ZMyby+6qqrAABHjx7F/fffjx/96EdoaWkpeJ1hGHjmmWdw6NAhBoGIiIiIiIiIaFQxnuRgPImIiKi4lPGeABERERHluv/++/s8Z1kWHnzwwbGbDIAPPvgAq1atygsAVVZW4umnn8bTTz/dZwCoL2VlZfjiF7+IrVu34q//+q8hhCjmlHOsXbsW55xzTl4AaPbs2XjjjTdw//339xkA6ktdXR3uvPNObN++HTfccEMxp1sU9fX1kFLmPV555ZWC7589e3bB9w93G7L06r3ej/PPPz/nfcUck4iIiIiIiGgyYzypuE7EeFKheNG9997b5/tvv/32vPcXYwuyAwcO5PSZSqVQU1OTOT9r1ixYllX0cYmIaHiYVERERER0HIlGo3j88cf7fc8DDzwwRrNxAkAXXXQRjh07ltM+c+ZMbNiwAVdcccWI+g8Gg/iP//gP/PSnPx1RP315+eWXcdlllyEajea0n3LKKXj33Xdx1llnjaj/6upqPPzww/jGN74xon6IiIiIiIiIiIaL8aTimkzxpOuuuw6BQKDguccee6zgdnPF9uyzz+ZUbrr11luhKPwnbCKi4wV/IxMREREdRx5//HFEIpF+37Njxw5s2LBh1Ody5MgRXHbZZWhra8tpLysrw8svv4z58+cXbawvf/nL+O53v1u0/gBg+/btuOaaa5BIJHLaZ82ahZdffjlnBdRI/fCHP8Ttt99etP6IiIiIiIiIiAaL8aTimWzxpNLSUlx99dUFz4XDYfzud78b9TlkV9kSQuC2224b9TGJiGjwmFREREREdBwZ7Kqx/kpaF4NlWbjmmmtw+PDhvHP33XcfFi5cWPQx/+Ef/gErV64sSl+RSARr1qxBOBzOaVdVFY8//nhRA0Bpd999N2bNmlX0fomIiIiIiIiI+sN4EuNJI9FfEs999903qmMfO3YMf/jDHzLH5513HubOnTuqYxIR0dAwqYiIiIjoOHHw4EG8+uqrmePLLrusz2DFI488gmQyOWpz+fd//3e89dZbee1XXXUVrrrqqlEZU1VV/PznPy9KX3/7t3+LvXv35rX/9V//Nc4444yijNFbSUkJfvzjH49K30REREREREREhTCexHjSSF100UV9Jja9+uqrOHDgwKiN/Zvf/AamaWaOP/vZz47aWERENDxMKiIiIiI6TjzwwAOQUmaOP//5z+Pmm28u+N7Ozk489dRTozKPgwcP4lvf+lZeu6Io+P73vz8qY6atWLEC55577oj6WL9+fcFgUigUwj/+4z+OqO+BXHXVVZg9e/aojkFERERERERElMZ4EuNJIyWEwK233lrwnJRy0JWwhiO7elYoFMK11147amMREdHwMKmIiIiI6Djx61//OvO6rq4On/zkJ/tdnTNaJavvuuuugqvWrrjiCixevHhUxsz2xS9+cUTX//3f/33B9ttvvx1VVVUj6nsgiqLgL//yL0d1DCIiIiIiIiKiNMaTHIwnjcytt94KIUTBc70T14pl06ZN2LJlS+b42muvRTAYLPo4REQ0MkwqIiIiIjoOvP766znllW+++WZomoZTTjkFp59+esFrXnrpJTQ1NRV1HgcOHMBvfvObguc+97nPFXWsvlx22WVQVXVY17722mtYt25dwXNjNf81a9aMyThERERERERENLkxntSD8aSRmTdvXp/Vnvbv35+zxV6x3HfffTnHt912W9HHICKikWNSEREREdFxoPcqsewVZX2tLrMsCw8++GBR5/H//t//g23bee0lJSX4xCc+UdSx+lJaWooVK1YM69r//u//Lti+aNEinHLKKSOZ1qAtW7YMdXV1YzIWEREREREREU1ejCf1YDxp5PqrcNU7AWikUqkUHn744czxvHnzcN555xV1DCIiKg5tvCdARERENNnFYjE89thjmeOVK1fipJNOyhzfdNNN+MY3voFUKpV37QMPPIA777yzKPPob4/0iy66CLquF2Wcwbjzzjvx1ltvAQD8fv+grgmHw3jiiScKnrv00kuLNrfB+N73vocdO3YAAKZPnz6mYxMRERERERHRiY/xpHyMJ43Mtddei6985SuIRqN55373u9/hZz/7GUpKSooy1tNPP4329vbM8W233dbn9mtERDS+mFRERERENM6eeOIJdHd3Z457rwqqqqrCmjVr8Pjjj+ddu2PHDmzYsAGrV68e8TzeffddNDc3Fzw31iuFrr76alx99dVDuuaFF14oGCgDxn7+t99++5iOR0RERERERESTC+NJ+RhPGplQKIRPf/rTBZPEYrEYHn300aLNMbvykaIouPXWW4vSLxERFR+3PyMiIiIaZ9mlqr1eL2688ca89/S3p3jvUtfD9fzzz/d5buXKlUUZYzRN9PkTEREREREREQ0W40nFMdHnX2z9/Zkp1hZozc3N+OMf/5g5vuiiizBz5syi9E1ERMXHpCIiIiKicdTQ0IBXXnklc/ypT30KFRUVee+79NJLMXXq1IJ9PPLII0gmkyOey/r16/s8t3Tp0hH3P9r6mn8wGMTs2bPHeDZERERERERERKOD8aTiYTwp1/nnn485c+YUPPfmm29i9+7dIx7jwQcfhGVZmePeVbaIiOj4wqQiIiIionH061//GrZtZ477+hKtqio+85nPFDzX2dmJp556asRz2bRpU8H2kpIS1NTUjLj/0RSJRLBr166C5+bOnTvGsyEiIiIiIiIiGj2MJxUH40n5hBD9bkVWjGpF2VWyysrKcNVVV424TyIiGj1MKiIiIiIaR9l7lE+bNg2XXHJJn+/tb9XOSEtWd3Z2oqWlpeC5iVB+eM+ePTnBtGwTYf5ERERERERERIPFeFJxMJ5U2K233gohRMFzDz74YJ+f2WBs2LABO3bsyBzfcMMN8Pv9w+6PiIhGH5OKiIiIiMZJ75LBt9xyC1RV7fP9ixcvxurVqwuee+mll9DU1DTsuRw6dKjPc1VVVcPud6xM9PkTEREREREREQ0G40nFM9HnP1rq6+tx/vnnFzx3+PBhvPTSS8Puu3ci22233TbsvoiIaGwwqYiIiIhonAznS3Rfq8ssy8KDDz447Lk0Nzf3ea6ysnLY/Y6ViT5/IiIiIiIiIqLBYDypeCb6/EdTfxWuhrsFWiKRwCOPPJI5Xrx4Mc4888xh9UVERGOHSUVERERE4yAej+PRRx/NHJ999tlYtGjRgNf1VxI4u/T1UEWj0T7Peb3eYfc7Vib6/ImIiIiIiIiIBsJ4UnFN9PmPpmuuuQahUKjguaeeegqdnZ1D7rP3df0lLhER0fGDSUVERERE4+DJJ59EOBzOHA+21G9ZWRmuvPLKgud27NiBDRs2DGs+8Xi8z3Mej2dYfY6liT5/IiIiIiIiIqKBMJ5UXBN9/qMpGAziuuuuK3gumUzi4YcfHnKf2VW2VFXFZz7zmeFOj4iIxhCTioiIiIjGQfaXaL/fj+uvv37Q1/a3iqd3CexiEEIUvc+xNNHnT0REREREREQEMJ40lib6/Iuhv6S1oW6B1tjYiJdeeilz/PGPfxxTp04d7tSIiGgMMamIiIiIaIw1NjbiT3/6U+b4mmuuQWlp6aCvv/jiizFz5syC5x555BEkk8khz6mvEtgAkEqlhtzfWJvo8yciIiIiIiIi6g/jScU30ec/2s477zzMmzev4Ll3330X27ZtG3Rfv/71r2HbduaYW58REU0cTCoiIiIiGmMj/RKtKApuvfXWguc6Ozvx1FNPDXlOwWCwz3OJRGLI/Y21iT5/IiIiIiIiIqL+MJ5UfBN9/mOhWNWKsqthVVVVYc2aNSOYFRERjSUmFRERERGNsQceeCDzevbs2bjwwguH3Ed/X+iHU7K6v3LDHR0dQ+5vrE30+RMRERERERER9YfxpOKb6PMfC7fccgsUpfA/J//mN7+BaZoD9vHWW29h165dmeObbroJHo+naHMkIqLRxaQiIiIiojH09ttv48MPP8wc33rrrcPao33evHk477zzCp576aWX0NTUNKT+Zs+e3ee51tbWIfU1Hiby/O+//34IIYb9ePXVV8f7FoiIiIiIiIhoFDGeNDom8vzHKp40a9asPhPYjh49iueff37APnpXNOovuY2IiI4/TCoiIiIiGkPZq8qEECP6Et1XmWvLsvDggw8Oqa+ysjLU1NQUPHf48OEhz22szZs3r89VUw0NDWM8m+NXXwFHKeWYjJ9dpp2IiIiIiIiIBofxpNHBeNLg9LfV3kBboMXjcTz66KOZ41NOOQXLly8v2tyIiGj0MamIiIiIaIwkEgn87//+b+b4Ix/5CObMmTPs/q699to+937PDjYN1hlnnFGwPRwOH/ers0KhEBYvXlzw3P79+8d4NscvTdMKthuGMSbjp1KpzGtd18dkTCIiIiIiIqKJjPGk0cN40uBcffXVKC0tLXju2WefRUtLS5/XPvHEEwiHw5nj/hKUiIjo+MSkIiIiIqIx8tRTT6GzszNzvG7duhGVKS4pKUE0Gi041o4dO7Bhw4Yhze/MM8/s89y2bduG1Nd46Gv+kUgEBw8eHOPZHJ/62q8+Ho+PyfjZ4/Q1FyIiIiIiIiLqwXjS6GI8aWB+vx/XX399wXOGYeC3v/1tn9dmVzLSdR0333xz0edHRESji0lFRERERGNkOKu9RuL+++8f0vs/8YlP9Hlu48aNI5zN6Jvo8x8LoVCoYHtXV9eYbE3W0dGRed3XqkgiIiIiIiIi6sF40uia6PMfK/1tudfXn5lDhw7hlVdeyRxfdtllfW6XR0RExy8mFRERERGNgaamJrz00ktjOuYjjzyCZDI56PevXLkSs2bNKnju9ddfL9a0Rs0nPvEJ+Hy+gueO5/nfdtttkFIO+3HBBRcMeqy+AjdSyn5LVRdD7zGqq6tHdTwiIiIiIiKiiY7xpNHHeNLgnH322Vi4cGHBc++//z7+/Oc/57U/8MADOYvYuPUZEdHExKQiIiIiojHw4IMPwrKszPH69evR0tJSlMddd91VcMzOzk489dRTQ5pnX6uO1q5dC8MwhtTXcEkpYZpm3mMgwWAQ1113XcFzL7zwQrGn2Sfbtoc1/7FQWVkJTdMKnhvtkt5Hjx5FIpHIHNfW1o7qeEREREREREQTHeNJg8d40ujrr1pR9jZnadlVturq6vDJT35yNKZFRESjjElFRERERGMg+0v06aefjtWrV6O6urooj+GUH+7LF7/4RXg8nrz27u5uPP/880Pqa7juuOMO6Lqe86iqqkIsFhvw2r/5m78p2P7hhx9iy5YtRZ5pYZdcckne/JcsWTImYw9EURTMmTOn4LmdO3eO6tg7duzIOZ43b96ojkdEREREREQ00TGeNHiMJ42+W265BYpS+J+WH3roIaRSqczxa6+9hr1792aO/+Iv/qLPhW5ERHR8Y1IRERER0Sh75513chIqbr755qL2P2fOHJx99tkFz7300ktoamoadF91dXX4/Oc/X/Dcr371q2HNbyji8Tgef/zxvPabbroJgUBgwOtPP/10XH755QXP/fKXvxzx/AbS0NCAV199Na/9jjvuGPWxB2vRokUF2zdt2jSq47733ns5x32VzCYiIiIiIiIixpOGgvGksTF9+nR89KMfLXiura0NzzzzTOa4d2Jaf0lsRER0fGNSEREREdEoy/4SrSgKbrjhhqKPcdNNNxVstywLDz744JD6+va3v42Kioq89meeeSav2kyxPfzwwwiHw3ntQwmi/Pu//zt0Xc9r/+Uvf4m2trYRzW8g9957b85e8QCgadpxFThZuXJlwfa1a9eO6rh/+tOfBjUPIiIiIiIiImI8aSgYTxo7n/3sZ/s8l94CLRqN4rHHHsu0r1y5EieddNKoz42IiEYHk4qIiIiIRlEymcQjjzySOT7//PMxffr0oo9z3XXX9VlCOLtU9mBUVVXhRz/6UV67bdv4x3/8x2HNbzCSySS+853v5LVfeumlWLFixaD7WbRoEf7hH/4hrz0SieB73/veiObYn5aWFtx999157XfccQfq6upGbdyhOu+88wq2b926FXv27BmVMbu6uvDKK69kjjVNw6pVq0ZlLCIiIiIiIqKJjvGkwWM8aWxdeeWVKC8vL3juhRdeQHNzMx5//HFEIpFMe3+JSEREdPxjUhERERHRKHr66afR0dGROS52qeq0mpoaXHLJJQXP7dixAxs2bBhSf5/97Gdx3XXX5bU/+eSTePLJJ4c1x4H867/+Kw4dOpTTput6wcDKQO66666CJbx/+tOf4t133x32HPtz55135gRMAKCiogLf/e53R2W84TrvvPNQVlZW8NwvfvGLURnz17/+NRKJROb4Ix/5CEKh0KiMRURERERERDTRMZ40eIwnjS2fz9dn1ax0havsKlterxc33njjGM2OiIhGA5OKiIiIiEZR7y/R11xzzaiN1VfJ6t7zGKx7770Xp556al775z73OezevXvI/fVn7dq1BVeVfe1rX8PixYuH3J+qqnj00Ucxc+bMnHbLsnDttdeitbV12HMt5IEHHsiUeM72ve99D9XV1UUda6R0XcfVV19d8Nw999yDxsbGoo4XDofxgx/8IKft2muvLeoYRERERERERCcSxpMGh/Gk8dHftmw//elPsW7duszxpz71qYLb4hER0cQhpJRyvCdBREREdCI6cuQIZsyYAcuyAABXXXUVnnjiiVEbLxKJoK6uDrFYLO9ceXk5jhw5Aq/XO6Q+m5ubcfbZZ+PAgQM57TNnzsTatWsxf/78kUwZAPDmm2/iiiuuyFmBBwAXX3wxXnjhhT7LcA/G9u3bce655+b1feqpp+Kll15CTU3NsPtOe/LJJ3HjjTcimUzmtN96663DCr6Nhc2bN+P0008veO6iiy7CCy+8AF3XRzyOlBI333wzHn744UxbeXk5GhoaWKmIiIiIiIiIqADGkwaH8aTxtXTpUuzYsWPA9z3//PO49NJLx2BGREQ0WlipiIiIiGiUPPjgg5kAEDB6parTQqEQ1qxZU/BcZ2cnnnrqqSH3OXXqVKxbtw4LFizIaW9oaMCZZ56JZ599djhTzXjggQdwySWX5AVpFi5ciMcee2xEASDACXC88soreXvQv//++zjjjDOwfv36YfdtWRZ++MMf4tprr80LAJ177rmjtpVYMZx22mn49Kc/XfDc2rVrcc011yAcDo9ojGQyiTvuuCMnoQgAvvGNbzChiIiIiIiIiKgPjCcNjPGk8ddftaK06dOn97m9HhERTRxMKiIiIiIaJQ888EDmdVlZGS6//PJRH7PYJasBYNasWXjzzTdx4YUX5rS3tbXhiiuuwKc+9akh7SsvpcQrr7yCCy+8ELfddhvi8XjO+XPOOQdvvPFG0Uojn3rqqXjzzTdxyimn5LQfPHgQ55xzDj772c9i586dg+7PNE08/fTTWLlyJf7u7/4uJ9AHAFdeeSVeeOEFeDyeosx/tPz4xz9GZWVlwXPPPPMMTj75ZPzmN7+BYRhD6te2bTz99NM4/fTT8atf/Srn3NKlS/GNb3xj2HMmIiIiIiIiOtExnlQY40nHl8985jNQVbXf99xyyy0DvoeIiI5/3P6MiIiIqAiSySS6u7szx1u3bs0Jmlx//fX4r//6r5xrAoEAAoHAiMbt6OjICUIYhoHFixcXrDKjqiq2bNmSU6JZ13WUlZUNaizbtvG9730P3/ve9/JWUgHAySefjEsvvRRnnHEGFixYgMrKSni9XoTDYXR2dmLnzp3YsGEDnn/+eezbt6/gGJ/73Odwzz33DLms9mAkEgn87d/+Le655x7Ytp13fvXq1fjYxz6G5cuXY/78+SgrK4Ou6+jq6kJHRwe2bt2Kd955B88++yyam5vzrldVFXfeeSf+5V/+BUKIos9/NLz88su4/PLLC/4802pra3HFFVfg3HPPxUknnYSZM2eirKwMXq8XqVQK4XAYjY2N2L59O95++208/fTTOHToUF4/VVVVePPNN7Fo0aLRvCUiIiIiIiKiCYPxJMaTJmI8Ke2Tn/wknn/++T7Pf/jhh1i4cOEYzoiIiEYDk4qIiIiIiuD+++/HZz/72SFd861vfQvf/va3RzRufX09Dh48OOzrzz//fLz66qtDumb37t34u7/7O/z+979Hsf4quXLlSvzkJz/BOeecU5T++rNx40b87d/+7ZDvuz+XXHIJfvzjH+Okk04qWp9j5eWXX8Z1112XVzK8mGbNmoXnnntuQn4+RERERERERKOF8aSRYTxpfD322GO47rrrCp47++yz8eabb47xjIiIaDRw+zMiIiIiGpIFCxbgySefxLZt2/DlL38ZtbW1w+rH7/fjhhtuwHPPPYd33nlnTAJAgBNweuWVV7B+/Xrcdtttg15Z11tZWRluv/12vPbaa3jxxRcnbADoox/9KLZs2YKrrrqq6H2rqorbb78dH3zwwYT9fIiIiIiIiIho5BhPcpwo8SQAWLNmDSorKwueG2qyHBERHb+08Z4AEREREU1MS5YswX/+53/iJz/5CTZs2IDXX38dmzZtwp49e3D48GFEIhGkUimEQiGUlZWhvLwc8+bNw/Lly7FixQqce+65KCkpGbf5r169GqtXr8bPf/5zvPnmm3j99dexefNm7NmzB83NzYhGozAMA6WlpZn5L1q0CCtWrMCKFStw9tlnw+fzjdv8i2n69Ol44oknsHHjRvz85z/HY489VrDk+WBVV1fjxhtvxJe+9CVud0ZEREREREREGYwnnTjxJK/XixtvvBE/+9nPctoDgUCfFYyIiGji4fZnRERERESUwzRNrF+/Hm+99Ra2bNmCvXv3orm5Ge3t7UgkEjAMAx6PBz6fD9XV1Zg6dSrmz5+PU045Beeeey5WrFgBRWFRVCIiIiIiIiIiIiKiiYxJRURERERERERERERERERERERElIPLh4mIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKAeTioiIiIiIiIiIiIiIiIiIiIiIKIc23hMgohNPZ2cn1q1blzmeOXMmvF7vOM6IiIiIiIjo+JVMJtHQ0JA5Pv/881FeXj5+EyIiGkWMGxEREREREQ3eeMeNmFREREW3bt06XHnlleM9DSIiIiIiognpqaeewqc+9anxngYR0ahg3IiIiIiIiGj4xjpuxO3PiIiIiIiIiIiIiIiIiIiIiIgoB5OKiIiIiIiIiIiIiIiIiIiIiIgoB7c/I6KimzlzZs7xU089hfnz54/Z+JFIBO+8807meNWqVQiFQmM2PhHR8YK/D4mIHPx9SMe7PXv25GwF1Ps7FRHRiYRxIyKi4wd/JxIROfj7kI5n4x03YlIRERWd1+vNOZ4/fz6WLVs2ZuOHw2EcOXIkc7xkyRKUlpaO2fhERMcL/j4kInLw9yFNNL2/UxERnUgYNyIiOn7wdyIRkYO/D2kiGeu4Ebc/IyIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHEwqIiIiIiIiIiIiIiIiIiIiIiKiHNp4T4BGX3t7O77yla/goYceyrTdd999uO2228ZvUoPQ1taGDRs2YO/evQiHw/D5fJg6dSpOO+00LF26dNTH37dvHzZt2oSGhgbE43GEQiHU19fjjDPOwLRp00Z1bCklNm/ejA8++ADHjh2DYRgoLy/HwoULsXr1apSUlIzq+EREREREREQ0OTBuNDyMGxEREREREdFkwKSiE9zvf/97fOELX8CRI0fGeyqD9tJLL+GHP/wh1q5dC8uyCr5nzpw5+Ku/+it86UtfQiAQKNrYpmnivvvuw09/+lNs3bq14HuEEFi9ejW+9rWv4brrriva2IATyLv77rvxP//zP33+zDweDy677DL8/d//PVatWlXU8YmIiIiIiIho8mDcaGgYNyIiIiIiIqLJhtufnaDa29tx880348orr5wwgaHu7m7ceOON+NjHPoaXXnopJzDk8Xhy3rt//3783d/9HU4++WRs2LChKOPv2rULq1atwuc///m8wFD2+FJKrF+/Htdffz0uvvjion2+zz33HJYsWYJ/+Zd/yelTCAFd1zPHqVQKTz75JM4880x89atfhWEYRRmfiIiIiIiIiCYHxo2GjnEjIiIiIiIimoyYVHQC+v3vf49ly5bllK0+3rW3t+O8887DI488kmmbMWMG7r33Xhw9ehTJZBKJRAJr167FJz7xicx79u3bhwsuuADPPvvsiMbfuHEjzjzzTPz5z3/OtJ111ll45plnEIlEkEwm0dHRgYcffhhLlizJvGft2rVYtWoV9u/fP6Lxf/GLX2DNmjU4duxYpu3mm2/G+vXrkUqlkEqlcOjQIfzwhz9ERUUFACdI9dOf/hSf/OQnkUgkRjQ+EREREREREU0OjBsNHeNGRERERERENFkxqegEUmiVWW1tLS644ILxndgADMPA5Zdfjvfffz/Ttnr1arz//vu44447UFtbCwDwer248MIL8Yc//AH//M//nHlvIpHAtddei40bNw5r/IMHD+LSSy9FR0dHpu0LX/gC3njjDVx++eUIBoMAgPLyctxwww3YtGkTLrvsssx7Gxoa8LGPfQxdXV3DGv/pp5/GX/3VX8G2bQCAoih48MEH8Zvf/AarV6+Gpjm7FM6cORPf+MY38N5772Hu3LmZ619++WV85jOfGdbYRERERERERDQ5MG7EuBERERERERHRUDGp6ARRaJXZ9ddfj23btuH8888fx5kN7Fvf+hbefvvtzHFtbS2eeeYZVFZW9nnNN7/5TfzFX/xF5jiRSOD6668f8sorKSVuuukmtLW1Zdouuugi/OxnP4OiFP7Pw+/349FHH8XChQszbXv27MFf/dVfDWlsADhy5Ahuu+22TGAIAO66666ce+utvr4eTz/9NLxeb6bt8ccfxy9+8Yshj09EREREREREJz7GjRg3YtyIiIiIiIiIhoNJRSeIr371q5lVZnV1dXjiiSfwyCOPoLq6epxn1r+Ghgb8+Mc/zmn7/ve/j5qamgGvvfvuuxEKhTLH+/btw3/9138NafzHH38cb731VuZYVVXcc889fQaG0gKBAP7jP/4jp+3hhx/Gu+++O6Txv/vd7+asdJszZw7+7//9vwNet2zZMvzN3/xNTttdd92FaDQ6pPGJiIiIiIiI6MTHuBHjRowbERERERER0XAwqegEc9NNN2H79u246qqrxnsqg/Kd73wHyWQyc1xXV4dbbrllUNdWV1fjc5/7XE7bD37wA4TD4UFdb1kWvvnNb+a0fepTn8KiRYsGdf2ll16KU045JadtMIGdtH379uHee+/Nafva174GXdcHdX3v9x49ehQ/+clPBj0+EREREREREU0ujBsxbkREREREREQ0FEwqOkFMmzYNv//97/Hb3/623/LPx5MjR47g/vvvz2m78cYbBx0cAZAXSGpra8sLuPTlySefxIcffthvf0Md/+WXX8amTZsGde2Pf/xjGIaROVZVFTfddNOgx66rq8PHP/7xnLYf/ehHOX0SERERERERETFu5GDciHEjIiIiIiIiGhomFZ0g/vSnP2HNmjXjPY0heeqpp2BZVk7bZZddNqQ+VqxYgalTp+a0Pf7444O6tvf7PB4PLrnkkiGNf/nllw/YbyFSSjzxxBM5bWeeeSaqqqpGNH5HRwfWrl07pD6IiIiIiIiI6MTGuFEPxo0YNyIiIiIiIqLBY1LRCcLv94/3FIbsySefzDlWVRVnnXXWkPs599xzc443bNiApqamfq9JJpP4wx/+kNO2fPlyBAKBIY29aNEi1NTU5LT1vq9C1q9fj+bm5py28847b0hj93XNYMYnIiIiIiIiosmDcaMejBsxbkRERERERESDx6QiGhfRaBSvvPJKTtvChQsRDAaH3Nfpp5+ecyylxHPPPdfvNevWrUN3d3e//Qx3/A8//BC7d+/u95pnnnlmwH4GY/HixXmBwUJ9ExERERERERFNFIwbMW5ERERERERExwcmFdG42LZtW94e7kuWLBlWX0uXLs1r27x5c7/XFDo/EcdXFAWLFi3KaWtqakJLS8uQ+zpRafEo7MZDMBsPwTzSCPNYM6zWo7DaW2F1tsMOd8KOdMOORSETCUjDgLTt8Z42ERERERER0aTFuFFxxmfcaGCKkYLd1ACzqQHmkUZYx47AamuB1dEGu6sTdiQMOxbpiRlZFqSU4z1tIiIiIiKiMaON9wRoctq+fXteW319/bD6KnRdof5P5PF7B5u2b9+O888/f1j9nWgq921H8s0/wFSHmEOpqICmQahaz7OqQqgqoKqAokAoKuC2A4AQAlDccYQCiH76H078STj9CkVxxkmPpyhQfH4IfwAiEITiD0IEghD+IJRQCdTaKRC6ZxgDEhEREREREY29Ey1uM97jM27UN39HC5I/+/8NLW4kBKBqTqxF0yA0DdB0N2akOc+alhM3EkL0xHHSfYjeY2YFi2Qfrweal6JCKG7fiuLEkVQF8HihBEIQoRIogSBEMOQ8h0qhlJQ5sSYiIiIiIqICmFRE42Lbtm15bVOnTh1WX1OmTBlU/8fL+JFIBIcOHcppCwaDKCkpKer4DA45xHBXj9kWkLIgkQQwvByg44ZQoNZMgTp9FrRpM6HNmA112kwopeVOUIuIiIiIiIjoOMK4EeNGY0UMp1q1lIBpQJpONa0JHTMCAI8X2ox6aDProc2aC21mPdTqOiYaERERERERACYV0TjZt29fXlt1dfWw+qquroYQIqf0cEtLCyKRCEKh0KiPX1tbO6j+0/bv359XJnm4Yw9n/MlG2PmhHUMKGFJAFxIaJE74vBppwzrWBOtYE1J/Xp9pFqFS6LPnQZu7EPqcBdBmzoHQ9XGcKBERERERERHjRowbjZ1hL0Y7kaSSMPd9CHPfh5km4fVBmzkH2twF0OcvhT5nPqtgExERERFNUkwqonERDofz2kpLS4fVl6qqCAQCiEajeWP0FRzq7u4u2viFVooVur/+zg137OGMP1THjh1DS0vLkK7Zs2dPznEkEinqnAaS82dB2rBtK+usxK6kD78KTwfg7CjmEbbzgA2/sFGppFClGqhSDFSpKVQpJkoUE0o6+UiIXjubiZyncZPJjhrkRLo6YX6wCfhgk3OsqlCmz4Iyax6UmfUQ02ZBVFSNSTUjadtAtBsy3AUZ7YaMRYF4FDIW7XkdjwGG4VSRMk1I2wIs9yEl4PNntn1DIL0FXAAoKYMydQZEVS1X2dGk0/v/G3sfExFNFvx9SMe7SCQy3lMgOq4wblScsYcz/lBN+LiR3TtuBEjLxk/DM+GBBQ9s+BQbXtjwCRs+YaFSMVCtGqhVk/AKZIVhRNbCtaxYynjHiwAAImeeA4rFYH64DfhwG4CnnJjRzDlQ5i6EMneREzfSxndhmpQSSCYgI2HIaMSJKyWTgGkCluHEjkzDPbYArxciEIIIhoBAyIkbBUNODElRx/VeiMYTvysRETn4+5COZ+MdN2JSEY2LQn/wg8HgsPsLBoN5v9z7+o8rmUzCMIyijV/ouv7+wx6Nex/K+EN1zz334Dvf+c6I+njnnXdw5MiRIs1oaISUeX82uiwFdtZKtLgUiEMF4AQQDsGb148KG7UigZlKDDOVKGapUZSK/D9HE15nB7Dt/cyhpXuRKKtEvLwaifIqJEvKYHp8sLx+SHUQARcpoSYT0BMxaJlHPHOsx2PQks7r0V4daOlexCuqEa+oQayiBvHKGlhe/6iOSXS8eeedd8Z7CkRExwX+PqTjTe+tjogmO8aNijP2cMYfqokfN7Lz/mwkpYIGY3BVeUqEgWqRQJVIolZJYLYSQY1InphVsTs7gC3vAQCkoiJeXuXEWCprEa+ogREIoVg3LiwLWjwKPfOIZF5r8Ri0VAJaMgHRKyFsOKQQSIbKEauqQ6yqFrGqOhiBkqLdC9FEw+9KREQO/j6k48l4x42YVETjotCKL00b/h/HQtcWGqO/9uGOP5Sx+zo3Vvc+KUk7r2l/xQy0xKqG3NURAB9kHeteFcFSLwKlXpRU+OD165je0YzLNv9xwL6ayqfg2dMvzRxnagwVIbGmJN6N6zc8MeJ+AEA1kgi2NiPY2px3ztJ0WF4/TK8PUlGgWBaEZUFYJhTbgmKaUEwDosDPYDyoRhKhY40IHWvMtKWCJYjUTEekdjqiNVNhe/ITyoiIiIiIiGhsMW5UnLGHM/5kUygO0635EfEG8t/b61gCiABoRllOu08BqgMStQEbNQGJYFZBHwHgzD3vDjgvCWD9/DN6xnXnWWgOPZ2Lwu0FLD/wPrxmasB59EXYFgLtxxBoP4aqvdsAAKbXj3h5FQx/EKYvANPrd559fli6F4plQtiW8+zGkFQzBS0eg56IOgvP4lFnIVoyPuy5DflepISvuwO+7g5UHtgJADB8AcQq6xCrqkOkdjpSJWVMMiIiIiKiSYtJRTQu4vH8L4bqYKqe9KFQgKTQGP21D3f8oYzd17mxuvfJqFBwKCk02EXYBitpSCTbEmhvSwD7u6AHvOgKABdKFQHR/0opU9UQ8RUusz5So13xJ001DaimAU907EqUF5sn2o3K6E5UHtgJKQTi5TWI1E5DtHY6YpW1ALdLIyIiIiIiGnOMGxVn7OGMP+kUWAgVVn2IqsOvDhUF0GYAH3YB6AI0n45gVQihKSXwl/oGmVQk8PaC1cOew0BOOrxjRElFhWjJOEqOHi5qn+NFT8RQ1rQfZU37AQCpYCm6p8xE95SZiFVPgeR2aUREREQ0iTCpiMaF35+/5ZBlDb9cbaFrC43RX7tlWcMK0gxl7L7OjdW9D8cXv/hFXHvttUO6Zs+ePbjyyiszx6tWrcKSJUuKNqeBRKPRTFlCIW0Eg0Eo7pf9eMqACRWD39C+d4JO39cZsRSaulPwqBo0RYUQfS9i0nQdYpRWOCmKgMfTq0y3BBRIKNLuSToSwp2DO9H+5iMACMV9r9Mg3OeCH4nMfSHTY/b1PJZEz/0KoQCK+xkAKDUTqGvaBzTtA3Qdysw5UOrnQ5k9H8rMegivb0ymKG0bMFJAMgmZSrjPSacNAlAVQFUhFBVQVUBRAY8HoqwCQtMH7J8mj+zfh4Dz+3gkWycQEU1U/H1Ix7sdO3aM9xSIjiuMGw3cx2iNP1QTP24kc+JGSdOCAS9gDRSzGXy8yEyY6GrsRFdjJzSvhv2iEov0/regsyFGLW4EALquw2NnxY6khCIl1KwkKyGUnnhQOgZUSO84S068aDD3IN2PU2biRLlxJPf8WMSQ8u6lJ15WdawBONYAeH1QFyyBsvhkqPMWQ5SWj/68iEYRvysRETn4+5COZ+MdN2JSEY2LkpKSvDbTNIfdX6FrC43RX7tpmsMKDg1l7L7OjdW9D0dtbS1qa2tH1EcoFEJpaWmRZjREtoSiqFBVp+KMpuuwxFB+zrnBj77CF+l3lZV6oHQLpCAACQgICMUJBDkPd1qqPjplk6Wzms0SaibYorrxDwnAggpVwEmrkjZg24C0AFsiJ7koJ4giejrPBHr6+TAGI7Pfm/NCZoJHWR1nAki9BhsoiJT9uaZfCwEIJeuUG6iSNmD3nIeiuM8CwraAg3sgD+6BBcASCrQZs6HNnAOlshpqRRWUymooFVVQSssh+qhqJKWETMQhoxHY0W7IWBR2pBsy2g070g07EoaM5L6WidiAH2Ff966UV0KtmQK1ug5qzRQoNbXQ6qZDqa4d1YAkTQzBYHD8fh8TER1H+PuQjjeh0OhUMSWaqBg3GriP0Rp/qCZ63EjYdk7cSIVAyvYAA+Zx5X+/LhSt6P0uM2kiABNJqUJRBBThxI16syFGLW4EALZQYAvnnhVkx42cmJECG8K23biJhfyYkeLGjLI6TsdZ8mQtSMv5kIYQWBLIjyH1HPSKJw3Ut8iLS+XFv3LuRTjxIvchzBSw433YO96HDUCdMh36wmXQFy6FZ/5SCN/oLkiThgE7FoGMx5xHIu7EmhJxyHjMifWpqjPX9LxVDUJVoZSWO7Giyuo+41hE/K5EROTg70M6nox33IhJRTQuCv3Bj0ajw+4vEslf3dPXf1xerxe6rsMwjLzxvV7vqI7d17mxuvfJqPdWYEIILKkEDp4yF7Zlw7ZsyMyzhJkwkIqlYMRTMGIGbDM3itRXUZ70KJXlOtCNTBJR+g1SSkhId4GXQGtSRdu+VvhK/fCV+qB6ivTrWABCUaDqPdVqLNsJhqiQUCBh2TYaS6rx0Mo1ACSEdBKMhJQQbmKPFD0dSueDc2fv3lJO4o77JJ13BFJxfOGtR/KqFfWuThTXvfj5uTc548N2n93AjXDHdTu3s1aGyQFWuaX7+NK6X0O3nOCpExBzfwJZwS+4CV9OsMhyA2VuL5lEo/SzDbNhP8yG/fmDqhqEz98TcMqsoJOQqVRWv0OTGwfLCo4V/AgEAAm7ow12RxuMXdtyziql5dCXnArPslPhWXjSqAe5iIiIiIiIhotxo1yMG40eYecmwQgBVHkkquvrnLiRacEy3diRacFMmjBiKUg7P2mlUN5M73d5gx5URwwngceWsCABy40juTEKRbgxiUFX2R6CdKhK0yEtJ3ZkulWKFEjAtmECMBUd7aUVUADAdhKMJGzAdmJHAnDjHu4iOriJSW6iknQTVuy8+3DGqol25M6rQKKQLYG2YDmQiVfJnthV79vKiv/JrPiRFPlxpPTZylgXFPe6THpNwcQp6cR13NiOTCcZCefZbG6EdaQRiddeBBQV2sx6qHXTnMVe1bVQqmuhVtdBCfRd6UCmkj2LzaIR2JGwsyAt3AU73Am7230Od0LGhv/7IEPTodY4i9HUumlQa6dCn7MAavXIEgSJiIiI6MTEpCIaF4UyO7u7u4fVl23biMXyq3oMtOqrvb09b/zKysohj19o3v2NXcx7H874k82xpcsx79RTEPT5IKWNcCIFrduGN6Gjr2QXJ8HEeW0lDRiROJJdMcTbupFojyAVTRRY8OQ0hKZVo92YDyfEIKCqirPyTFWgKj2JRju7StC2rzVztR7wwlcegLcsAF9FEL6yABStwIqh7ASTrNLQmapDtg0BQMlK+hEqYNkCphtGUVUNUtdhqFqvPgsONPiFY+6QqmrCHsQqShMCCc3XT4xsZMEzW9Nh2woACduWUGBDs23ns0on/1joSTRKB4SE4gSMsisZuWR26etMOW8BSAMy0t8991WFKf/PXE8lpuGVgupJnMp9WF2dsDesQ3LDOkDVoM9bBM+SU6EvOxVa7dRhjUVERERERDQaGDcauI/RGn+yCU+vh/djn0DQ74e0LHQmkgjGTVREfchUrElLLx6ybZjRJFLdcaTCMaQiCSS7Ykh2RHu27eq5KOcoOK0S4baZzq7mihs3Ej3xn/Qir5hU0XUsAn9lCHrA42zFNaDcSj09RYRkJmaUfo+uCOhu4o8lJSwpYQNQFA0qJDoD5bhv1VV9xr8K3Fr/sjKudMvA1/9074CXpFQdvzjr+oH7HPTg7gRcX3n1AQSMeCYmo0JCk3av+8pfdJZJMgIyFa0yiUaKBWP/bhgH9jgJSdldeby9qgO56U2W6W53n8/5URaIKeXFkHq97o8QgGnCPHwQZuOhnkrlANSpM+A5eTm8J6+EOmM2K14TEREREQAmFdE4mTNnTl5ba2trgXcOrLW1Ne8Le3V1db8Bkjlz5uQFh1pbWzF79uwhj9/S0pLXNnfu3D7fX19fn9c23HsfzviTTSpUBmXWXOhuUK4awBnRJGq6EzkVhtKkmwzUn1gsgcZDrTjccAyNB1vQcqwTkEB5VQlWrz4D8swzEIslEU2kYFtZe9ED0D0qvLqG5hf+jFA03DOSKYHWKJKtUSTRgm5FoLyqBDVTKjFlWiWmTK9EWXkIXo8OpVdZ7JwUFCnhsS2EPn427O4uRBoOQDY2QDlyGGhqgEzEYEoJA1mVh0RWL5kPpHclogKBiT5iFVI6CUM914ucZ+GWmbZUPbfiUZHZErCks9pPUQRsKDAUQIebhJUOqqWDhFk/K5m1Ii1nG7i84NLYySnvnSMrgSz9s8oEnHr1kQ5yWRZSH25Fatc2iN8/BHXKdHhPWwXPaaug1k1j0IiIiIiIiMYV40a5GDcaPaYvkBM3qpISHV1xTD/YBpkVK+krFJD97dkyTXQ0tqGzsQ1dTW2Id+RWiZJSombRHOz0zs9pVxQFXo8G3as5z5qGHbsOo+2VrQAAze9FsKYMoZpylE4pR7CqFKqqQFWVTJWgQhPK+2Yv4cRBTAPW0q/BiHVDHDkM0XgAyuGDkO0tcOsRwRSKu2gpfWH6qZ94UX/xkl5vK1DoKZdwtoCTBcMTI4lZ9FxrKwpsRUW6lpIlJWxpQ3ereSMdOcxZdJZOMupJNHJiKLnVjNIyC78AwDAGDimlx+0zBlQEfcXzhAKz4QDMxkOI/fFpqJVV8Jy8Ap6Tl0OfuwhiGNs/EhEREdGJgUlFNC6WLVuW19bU1DSsvpqbmwfVf+/zmzZtyht/xYoVoz5+SUkJZs2ahUOHDmXaIpEIuru7h7VSbDj3P9lND3oxPTj0kuU9yoC5dQCcz7m9M4p3Nu+HZduYH/ICEqgMOP0nkgZiiRRi8RRM0wYsIBqPo/VYJ/SsbawUN2nFiUs4rxPtETS0R9Cw3fmzUhL0Yd6sGixdOA0nLZqOirIA/D4PNLWP1Wq1tSibOx8tCQMRwwKkRKqtFdFD+9Hd3Azb44FipHKDQj3L2PJWZuXpK4YjAKn2/X8v6W6tQokreU3DDxTZmuZUKpISNiQ0KSGlDQMCqqJCBZx90bKqPOWu+ioQCEqvKMtJlBJuwaK+55pJCMpZWZb9ukCVopzPf3CBpJwqRTnzSydG9Q5yCUhFgdl4CGbTYURfeBJa7TR4TjsD3lPPgDptJhOMiIiIiIhozDFuxLjReFGFwKLyAP5veWB4HazsSdjq6Ixi07aD2PDn/di17wim1pbjs8tmIGmY6I4mEI0lM4WUAQBJE0iakKpA076jKHcTVoRhQDS3IdHchsQHgEdXMWtaFWbOqMLc2TWon16NgF+H16vD79WhaYNP/uhKnY7WhJPsYnSH0X1gH7TGA4gfaYYUwk2u6bUora94kXu+v+JGaWZWPCVzsUgvRHMaTVXDkOJC/b5V5E3EhoAlnLiRIiVUISCFCgNOzEhFgXhRzkIuK7cCdqaakXNvOQu/+vgcBqNwTKlAbCm7veBHkP6A3M9d9JqrtJ0Fd+kiTEebET/2B8RffQFKaTm8K86C74xzoU6fxVgRERER0STDpCIaF0uXLs1rO3DgwLD6KnRdof6Pt/Gzg0Ppfk4++eQxGZ+Kq7I8iEsvOAkAYFo2uiMJdEfiiCcM+Lw6fF4dlWVBpAwTsUQKW3a25+1wZWdWQLky3+97ko3CkTg272jA5h0N0FQFc2ZWY+GcOixbOB11NaUoL/HD69Vz5qYKgSl+D7o1Cy0JA57qGmhV1WieE4PR0OEETowURCoJxUhCGCkIy0JuYCdrooMIgGT2sc8OMBRIwpGanvueAqS7LZlUFHd7MsWtbpSeXzoA4lQdEukt4GwL8PqBRBQQTlUmE4AqNAgpYUkbtrShSemUEFeVgglGubcr81acFfiEhq2/T2LwfReuUpSuEdVnye6sJCOz8SDM5gbE/vgU1Ipq6IuWQZ+/BPrCpVDLKoZ2U0MgjRTszg7YXe4jHoNMxiHjcchE1sNIAYoKoesQHi+ExwNoHuc4EIBaMxXa9FlQKqsZ5CIiIiIimqCOh7jNeI/PuNHEV1EexEfPWYqPnrMUbR0RHD7SAaEIeHUN3vIQKkoDiMaSiMSSSKV64g3JlIn9h1thZVVUTi9mEkLAtm3sPnAUew4eA97cAZ9Xx6xplaifUYX5s2sxta4cwYAHoYAXfp+n3+/GZR4NuiJwNG4AJaUoW3Yq2uYvQ2s8hVRDuxMrSiWhJBNOzCgrx8iRn2Q04HoxISD6WYyW/b7CfQzyu77IrRQkelWfFqoCSCcBy5aAlDZUN65iSglbKNDT8SLbdmInveN3APKrGaVbe8euBhn5KbTwrBj66SazkM6NGwklXa3buSe77RjiLz2D+J+egzp1BnyrPwLvyrOhlg99S0giIiIimniYVETjYtmyZdB1HYZhZNp27NgxrL62b9+e13baaaf1e02h82M9/gsvvJA3/lCDQ7Zt48MPP8xpmzp1Kmpra4fUDxWPpiqoKAugoiwAw7AQiSXQHU0inkjBo2vw6Bq6IwnougpbSkgpYdsF6mi7TbJAspEiBCzbxq79R7H7wDE898oWzJhSgbOWz8VpS2eisjyEUNCbEzQq0VX4VCdIlLBslHo0eBQ3oKH6AJ+vZ2jbAowURCrlPBspwDIz4xeUFVRSFQGP19P3h6SokKFSqBV1gD8AqKrTpqqZ11DVnu3Heg072DQR65t3A7EIzIYDSBzYA2/jAXgbD0BPxqEqKiQ0GOkt0GwbUtoA3OQlBT0Boey96icwDRJqVkAop2S3m7DVO8nIOtoE61gzEm+sBRQBtW4a9EUnQZ8xG0p5JZSKKufZ5+9zXCklZLTbSRTq7IDV1QG7s9057mjLtMl4tOez7rdyU/bPotefhkywToHw+6FNnwVtZj3UmXOgz6iHOnUGhNJHZS8iIiIiIjpuMG7EuNGJpqoihKqKEEzLRrg7js5wDDCA0pAfpSE/UqaFaDSBaDyFQ03tsEw7twPpfr92tycDkEk0Mi0bO/Y248N9R/Ci2I666lIsWzgNyxZMQ0V5EAGfB8GAByUhf8Fq1wFNxYygQHPMQAo2qn0aOg0DEAK2xwd4fLBCZT0L04wklJTzLCxryIvSBOSAC9EAAannLkaTqgqpqICqua8VJ56kKICqQgrnGCK9bVp+BEnInsVoLdfcgWC4Hd7D++Bt2Aets9VdkCagQkLaNlK2GxdKL0hDOo4kkZNBlP8JZB30xDmGt/Qpq8JQUbdES/fr9KnablIV+kkysmxYhw8gevggok89BG3OgswWadqU6WO2uEtKCWmakPEo7Eg3ZKQLdiQCGY3AduNLQtUATYPQNAhVd157PFCq66DWTIHi6Sd2SUREREQ5mFRE4yIUCuGCCy7ASy+9lGnbvXs3YrEYAoGhlRf+85//nHMshMDll1/e7zUXXHABSkpK0N3dnWnbvHnzkMbt67qFCxdi4cKF/V5zxRVX4Ac/+EFeP9ddd92Qxt65cyfi8Xhe33R80HUVFWVBVJQFYVq2s/osmsDUujIsik5B07FORKJJQHW+DNtSQtrOc58xAulWNbKlU43YTTI61NSGw0c68No7u/GJC05C/cxqlJf4UV4ayJS91hUF0wMetKdMGJbElbOr3KQlpHeJz8mdyYnvJBIQsW4oiRiQiAPxOJR4FEjEIBIxJ9ig6pCaBlVXYJ73cUD3ALoHMlgCWVKWeSAQAhQFqm3j8kgyq7CORDq/KjtcItxAUIGFbfkfT1bsShUCSqgUniWnIDFvGY4ZJiAltI4WBBr2Qm/Yj3dMD/aW1sFjppw+3SQuKZ0qSelVZXb26FJCQDrvldKZo9umSBs13e24fOuf+vpjkXEsVIUXll3gBNPS9+r2BwBSABKK8yxE5rWQyIyf9yydOfhTCVzxwcvI/oNkuiNphUp22051qoKVjLJW21mNB2E1HkSi1zZrwuuDUlbuVJUyTcAwIE2j57lgslAxAmGywKEzXxnthrFrG4xd25xzQkApq4TntFXwrjoHev0CVjIiIiIiIjpOMW7EuNGJSlMVVJYHUVkeRDSWRFd3HJFYEh5NhceNIW3acgCKKgovQsuWlWgEIBMnajzSgeZjnfjTmzswY0oFli2chiXzp6KsNICykB8V5QF49Nx/FtAVBdODHhyNG4iZFsp1p4JR7nA9C9My3+otEyLlVMCGZTpJRrYFWG5Vn3S+UFY/whbQPFmVtoUCGSqDXVoOWVoOu7QCdlk5jFAFbNRkkoh6B4JEr+fMZ9XvV313IRt0+BfMhkdVYNgXotUwoXZ3wtuwD77D++A/sAv+1iYnMcVNRJK2W+lIUZ3FaDkJRoPZdsz9DAvNsa/FbL23NxtmWlL/FEAApjuSJt3t3rKqdWeSjNxEIyEAWCbMPTtg7tmB2JO/geJWu/YsOw2eRSdDuAsJhxJ7kZYFWGYmYchqa4Hd1gqrvQV2eyuszjbYXZ2wO9shY1Hk7h84eMLnh1o7FerUGdBmzIY2fTbUqTOgBEsgdH3gDoiIiIgmGSYV0bi56qqrcoJDpmni7bffxsUXXzykft54442c41WrVmHatGn9XuP1evGJT3wCjz76aKZt06ZNiMfj8Pv7rrjR265du3Ds2LGctquuumrA684880xMmTIFR44cybS9/vrrgx43rfe9D3Z8GnuaqqCsxI+yEj+uu+wMxBMpxOIpNLd0Yf+hFjQd60LT0U40HulAyrAySUa27VQzGijJyLYlIGw0He2ElBKWaaOtI4q2zijKSwKorS7JlMmu8uqo9GhYWObP7qZfztf/qQXf2/e1hYMe2aGEBaWBAn0MKgqU896+Yi8Jy8axhIFSjwoLEhHDgllZi3BlLRLLVmN7UwcisQREMg4lmYBiJHMr4gyY99J7izRnMinRs/JP5ASMel5HvQE0lU8ZxD0OXSAVg5EpJS6hSKdKkeVOViv4s8kv151JMkonEAnFjYVlJSQBTqAnHh3WXGVOohGy+u1drQiFf9C9Vx9m5ppe4egGu6SE3dmGxKvPI/Hq81DKq+A5dSW8K8+GNmchFFUd1vyJiIiIiGh0MG7EuNGJLhjwIhjwwrJsRNwEo3g8hfLSIKbVlqOlvRuW3bMATdqFvxM7X4tFJn6UiRMBOHC4FQeb2vHi69tx0sJpuPCsxegMBxAKelFRFkTA31OtRRUCU/062pMCtpT48pL8OJB0v6/npNH0Cp+kv4MrqQREpBtKpBswU4Dmga07FWOM1d+D1HRnQZrP5yTqZFEA+KXEF9JbwGWN0W+elZSZJJbMhmN5CTzOk19ToLjvtSHRXVKO2NLliC1d7ryhoxXbDjYj3tmB8pbDELYFJV25SNpIL87KnpezhqpnaVp6IRiyjue0HkJ92+F+7sKxp6YehyunZRaQCSmhoOe1dGNPUoieh5Pt1LMYLmcOPXGWunAL5rSkt1iUEBJQhZuYpqjQFOHcTPp+00lGbqKRFErWojR3W76OViTXr0Ny/TpnYVdJGZSyikyla7WyBiIQdBajWSakYUBaBmCakPEY7M4O2N2dsLvDkN1dkMlEv5+PzIkdFXot8n/2gPPfSjwOeWgfzEP7kNzgtuseaDPnwHPycnhPOQNKWTmEP8CK10RERERgUhGNoyuvvBJf+cpXYFk9qx7+8Ic/DCk49N5776G5uTmn7dOf/vSgrv30pz+dExxKpVJ4+eWXh7Ri69lnny3Y70AURcHVV1+Ne+65J9O2fv16tLe3o7Jy8HtR9x6/oqJiyME1GnuKIjKBo5qqEixbMA3xhIFYIolINInGIx043NyBxiOdOHykA60dEdh2OsnI7jt6IoGptWWwJdDS3o2SoA8+r47OcAy2lJhaW5Z5qxAC6qgXaRnuAEO5rv/3elQFppRoT5oo11WYtkTCspGybKw90oWwYWcqKlmhMifQYiQhUkkn+JVKZaoXASjw2edXUFIACGXgBBXZ39z7PFXoRH61HiGlE8WTThDFdrc4UyFh2RKQlrvyLH29yJQIzwSNMolDVq/uc6sUZZcm76nEXeAPaaHkoULzH6re4xUaOrtst6I4wa7ONiTW/RGJdX+EUlkDz6kr4Vt1HrSpMyC8vvxOiIiIiIhoTDFuxLjRZKFmLUQzDAvVlSF0RxPo7Irh8JFONDS3o6GpHY1HO2EYZiZGlM4TcZdbOQkyCgA4CUbSdisxu3GQP29vwLbdTbjukyuxcO4URKJJ+H06KstDCAW9AJx4UZVPR4muImWne3ZkV7vurdDSJQE/UFUx5M+jd2Wb3uPmJzn1vr73PPqWsiU6UybKdBUpSyJp21CFgK4INAXKsKMcSJZMgZixCCKZgOIuShOWhf4Xo2UtQuv1Hp+RxKy2xgITz3ohgIaq6dhYf2qveyhOMO+Uxh2o72gG0pWu3OrSTtzIqb6kCeFUdRJKplpTJqEq/cjkfGVvl+Y8rK5O2OFOoGH/kOaWSRYqtPisaBWw3VSv7IVpdgLG3g9h7vsQseefgGfhMniWngZt/mIooRIooRIInVumERER0eTEpCIasa1bt+KFF15AKBTCpz/9aVRXVw/quqlTp+KWW27Bfffdl2l7+OGH8a//+q/QtMH90XzwwQdzjisrK/F//s//GdS1V111FRYsWIDdu3dn2n79618PKTjUe/yLLroIK1euHNS1X//613HvvffCMAwAzoq7hx9+GF/60pcGdX1LSwv++Mc/5rR97Wtfg84SrROOqioIBb0IBb2orQJmTa9CLJZENJ5CNJ5EJJLAwcZ27N5/FLsOHEUkmoRt2/nbpAlg1an1gASisRSisRT8Ph211SUId8fh0VVUVYTG6zbHTaVXh2FLdBsWqrwaGqMGXj3Shc6kCV0RvXakF4DqB3zOylMpJWC4ZbyNFGAaPaW8+0ie0WFDV3uvYspOvHECQJquOcEZRXGfndfSfc7euz69HVlOqe/egRTLgjAMwDKhKBJ6SSmQiMOWNizLdhLShICqCFhQIOBUL0I6EJYuay0UQNWcMTPblmWX886tUlQc+Z9P7v57Ive9OXonFMmseWfNOX1/Vn6Skd3egsQrzyPxygvQZs2B5+QV8Jx2BtSyCgivD8LjLebNjph0f5bcvo2IiIiIJgLGjXowbkQD0d3YTVVFCOYUC3Nm1SAaSyIaSyKVMtF0tBMHGttw4HArGpo6kDKtzAI0Ox0nEBKKokDTFacSdlalI9uWUDUF3dEEQkEv4gkDjUc6UF0ZyokZeVQFnklS0NeUTmXrKq+GowkDlpToTpp4/Wg3LAmnag9UIBAEAkHnc7ZMiEQCIhWHSCZ7Yg6ZEIXIyRHKpipK/u+wAl/vVSEKLGXrpffpdNXm9GQKxa6kk3umip73W9LND5J2T6VrKaGZhtOfqma2MpNSOjEimV21SeZsl9YzVIF4T84bZIHXQw04FagQ3p+cxYO941sCUlEAy0Jyyyaktr4HUVoOz5JT4DnpdKg1U5wt0oIhCJ+fcRkiIiKaNJhURCPywAMP4HOf+5zz5RXAXXfdhXXr1mHJkiWDuv7b3/42HnroISSTSQBAc3MzHnzwQXz2s58d8Nq2tjb88pe/zGm78847UVZW1scVuTRNw3e/+13ccMMNmbYnn3wSu3fvxoIFCwa8/sUXX8TmzZtz2r7//e8PamwAmDdvHm6//Xb8/Oc/z7Tdfffd+Mu//MtBBcfuvvtupFKpzHFNTQ2+9rWvDXp8On5pqoLSEj9KS5zElmTSwKzpVTht2UzEYik0Hu3Arn1H8eH+o2hpC2dKYc+cWomq8hD8fg8EBGLxJOIJA+2dMVSVB9HaHoFH11ASmnxVWGp9Okw3sWhDSxitCRMSgCX7rRfkcCsZ5ZDSSS6yTAjLdBNzHFbAi1jlVZDppA9Fge0PQAZCsP1B5zkQQhd0WIe7RnZjvSYv3Cr8dm0dui66G0p3F7S9O4APt0DZvQ2IxzIBItOtTqSpqpOkkn5IG7BswBI9yU2KDqG5SVZ9VRzqvTywUDAnsyUZcp4HE4PpWanW+96Vfq+X2QlGvct2A06SUSZ5S4Fx0Cl/HXvx9/AsXAZ90TJos+ZBCYUgvH4In8+pZKTpIwoeSffPjbQs58+PZWW1uc+21TPnnBLraSIv+UwoKqDrEB4vhNfrlBUnIiIiIhonjBttzmlj3IiGQtPUTAUjKSXiCQO11aWYP6cOqZQJ07Rw+EgnDja2YveBYzjc3OFUmZGAbdmwLUAoAqqiQBWAbUssXzYLqqKirSOKzu4YSkN+lIX8aG2PwOvRMxWLJpNan46UZSMFoNKrYWdnDGubu2BLZ71VHiEATQdCOoASJ0xhJJ3kItNwFqSZRk+sqEC4pGC/6b7hVGtSFAGhuovPVNVZAJZelKZmLUwTWfGbPqNcErBsZ16mAeldCIhuKMeaIDrbAMuGJSVsNzbSU7HIhgYbsEzAchNuVBVCVeCWxcps+Za7KC1949lVhgb8UeR+SgUXnw0vppT3aWTHinovTOsVL5LtrUi8/SoS61+DPmc+PKeshD5vEYSmQwRDUPwBZ5s0VjEiIiKiExiTimjY2tra8Nd//deZwBDgrIL6whe+gHXr1g2qj1mzZuGrX/0q/u3f/i3T9g//8A+44oorBly59v/9f/8furu7M8f19fX4yle+MqR7uO666/CTn/wE69evBwBYloUvfvGLePHFF/v9x+J4PI6vfvWrOW3XX389Vq9ePaTx77rrLjzyyCPo7OwEAOzduxc/+MEP8E//9E/9Xrdjxw7cfffdOW3//M//jFBo8lWhmQy8Xh1er47K8iBShoma6hLMnVWDi89ZgiMtXXj93d3YsacZq0+fA9utUqTrCoJBL7ojSXRHEtA1BaUhP5pbuqDrKnzeybUyUQiBKQEPurtiMKSEpgiYtuyJcwy9RzeYk/9/o/FANZqmnjxgD13xFMxCdbqHoo/LDVuiJWkCniCwZKXzsCx4D+9DYM82lOzZCm9LEyzbhpQ2FKEAqgIobvJKOoDiBpAk4CavOMGlwS7+GmCa7kk3sCRlz1ZzvctaD9RHtnSCTTqo5gagnEN3maXsnagjAelWoIJwkowsC8kt7yG59c8QugZtxhzocxZAmzMfakWV04/iBPOEqgKaE9wTEE7p8PT/N2aNkU4WKrh6z7YhY1HY0W7IaAR2LAKkUpBGCtIwII0U4L6GZUF4PIDH6yYP+TJJRCIQglpTB8UfAABYiWTuOPEYZCgEofSupkVEREREVFyMGzFuRMUjhEDA70HA70FNVQlSholINImSkA/1M6pw/upF6AzHsG13E7bsPIymY13O9me2hGlbgAAqygL42HlLEU+a6I4mYJo2OjpjEBAoDfnQfKwTs2dUwaNPrn8yUITAFL8HDdEkulIm1h0JI2U7i9CEGLBWkEP3Quo9CVlOWMOtaGQagGlmklbkyStgLpmXc7n0B4GSUiBYChksgQyGYHbbsNuiRbpLp9qQVFXA64M9cwqMCz/inGk5AnXHZuCDd4FD+7IqXauwFBVC2hC25cRs3JiRFG5ykVDcbcQUN06kQmYqXPVOKHKfRa95pV/1V9Go4BXuKAXylQaOIaW3PcvtT2QSowrHi4w9O2Hs3QWltAzeU1fCc/JyyFCpc7GiOslFPr/z8HideNEIZBadmabzuacXppmmsxDNMgFbQqaTubLjXZCZOF5mIVq6GrrucebnxpKIiIiIBjK5viFQUW3YsAHhcDiv/Y033kAsFkMgEBhUP9/97nexbt06bNiwAQBw9OhRrFmzBs899xwqKgrvu/39738fDzzwQObY6/Xif//3f+H3+4d0D0IIPPTQQ1i5ciXa29sBAC+//DK+/OUv4z//8z+hFPhH10Qigeuvvx47d+7MtM2dOxf//d//PaSxAaeU93333Yerr746sz/4t771LcydOxc33XRTwWsOHjyINWvWIJFIZNquuuoqfOELXxjy+DTxeHQN1RUhVFeEEE+kUF4WwLS6crR1RFBRFkQklkRHOArDsGEYPckE7Z0xaJqKgM+Dw0c6MHtaFXR9ktSxdqlCYF5pADfNVXAwksSxRApJO6tSUXbEwQ0qiJ7DTMDYyX+R6W3jM+XFbTeIEVQV+FWlzwBGus8ar45V1aHM+6T7v9mFeNKhK9EryCHTZakLjG9LCa+iQM+aryUlpKpCzF2EjtkL0HHxlSjvakXFrs1QPngXStMhp1oRkEmUQXZAIj0A3OBF9uqwzOs+tiWTWcfZVY0GlVA1nOVm6Jmz5QR9nO3jBBShQBfoSahR1Z4VdelKTel5WulPWECaBow9O2Ds3QkIBUplFbS6aVBKy51HWTmUkjIopWVOsCarApG03WBPLAo7FnEShtKJQ5EI7GgYdqQbMhbrmXdf1Z/6+szytooTUCoqoU2fDatuGryGgmRJOSAErJYjMCNdTqArGIISCEFw+wMiIiIiGgWMGzFuRKPHo2uoLNdQWR6EadmIxpLw+z0oLw3gnBXz0dYZwdadjfjz9kNo64wCEjh18Uy0d8VQVR5CaciHru44OsNxtHdF4XEXoDUe6cCsaVVQ87Z1P7F5VAW1fg+ipoUqn4amWAq2BExbutufDYNb0Uhqud+5jdqZSFQ7CX79RUasSLjg+WJsuCWl7Nkur7oO1nkfh/KRSxFta0Nqy3sIbN+E4MFdUG0LZnohmnSr+KSzeEwLgNVPlaSsqJro1dTv5EZ8d33MY2CKENCFs3isYLzIdu7Z7mhDfN2LiL/xJ+hzFkJffDL0+Yug2BZktCeZFenkK1UDNM15VhRkEqykzF2YZplOPMlNIhqxdDXy9GH6RTyW8ymZRm6VazmomB0RERFNJkwqOoHce++96OrK30rnrbfeymt74YUX0Nramtc+c+ZMXH/99YMar6+/XEoph/QXT4/Hg2effRYXXXQRtmzZAgB4++23ceqpp+Jb3/oW1qxZg5qaGqRSKbz11lv493//dzz33HOZ671eLx599FGsWrVq0GNmmzNnDv7whz/g4x//eObzu+eee7B582b84z/+Iy644AIEAgF0dXXhj3/8I77zne9g+/btmetnzJiBF198sc9A1kCuvPJK/OxnP8OXvvQld59zGzfffDP++Mc/4stf/jKWL18OVVVx+PBhPPbYY/iXf/mXTCALAC688EL89re/HdbYNLH5fR74fR5UVYRQEvKhKxxHKOCF36ejqzuOcCSR8z26tb0bU2rK4IGGxqNOkEgZbmBkgtIVgeluKe9ZofFdiVPt01FfMjZb0XWlTIQNC6YtUe7R0Jky0VZShebTL8T0cy6F2nYM+vZN8Gx5F2rjISfoIBRABQDVCXKkt0Zzgx5FiPL0yFqN1rMyLXPCfU/P6+zEq8zqv/RxodLVlpM8ZgsLlqJAVZVMkpiTg5MOgLnX21mr0tJJRlm3ax9rRurYkfxS3LmpX/n3mZcsVKTPMPv/c92XdtsxpNqOwZLAfFvC0j2IVdbBiq2APHU5IG3IWMRJTtM9UIIhiGCJs5puBNu6EREREdHxi3Ejxo3oxKSpSmabtGTKRFd3DIoicP6Zi3DeqgXYvKMB23Y1YX59DRJJE03HOlFeGkBZiR+GYSEaT6GloxtTa8qAFHC0NYxpdeXjfVtjrkRXMTXgxQ1zarClPYq9kQQMO72QK70UzCFl1q5cSD+LTKBCZn3fz4RRXJYEjsSNAecjIFDj0zPjp/uRhRakifzUmZwiQe416TiKBaAx1rM9ogDgVxVUVlZCnnMRjp3xESiRMKq3vYvgn9+EeuSwc8OqllX52Xbuy3ITVxSRE18ZisFHISRy4lLuTcrcm+072pITf0pXKxKZU7YELOH8o1levMjOTqpyY0amDWP3Nhi7twOKCm3mbOjzl0JffBLU0jLnfaYNaRpAcnhRIGlZgG3DNk2n+nS4A1ZXJ2QkDDsWg0wlgEQcMpmCTCUgk0nASDlb5Gk6hK47W7PpGoTuVLtWqmqgzZwDtaYWiu7p2arPZTUegpmocmJF/uCIKy4RERHRxCck045PGPX19Th48OCI+jj//PPx6quvDuq9LS0tmDNnDqLR3DKs5557Ll5//fUhjx0Oh3HHHXfgscceyzvn9XqRSqXygk719fV46KGHcNZZZw15vN527tyJG264Ae+//37B8ZPJZF77BRdcgIceeghTp04d8fhPP/007rjjDrS0tOS0K4oCTdOQSqVy2oUQ+OIXv4gf//jH8HiOrz2bt23bhpNOOilzvHXrVixbtmzMxg+Hw3jllVcyxxdeeCFKS0vHbPzxEk+kcKytG4mEE5hImRbaOyJIJHtWm6iqwJGWMJbOn4q6mlJMnzK8oOZEl7RshA0LVlYySs5zdgJLr987Oekg6VhG1pns/JPswFHvakPZ42WO+4miFMgbcbdvk5k55BW2gRN4sqXEkbgBS7pJRUkTzzd2oNuw8MkZFQhlVa1Swx3w7dsB357t8B34EFp3r390SK+iynwA/URsCt5gT8JQJoGor3tWNdgeb89D90LqOmDbUEwDwi0jLkwTSjIBNRnv3UMmyUiREpo7SR3SSahT1AG3ActJUspONCqadBRygOec96Ynl/mf3OOsOVtSIpq1z19QEVA9Hujzl8Bzykpo9fNyV1crKkQg6ASOAiEGjojohDFZ/35IE8d4f4eiyYFxo5Fh3Kh4xvt33mT4e4FtS3RHE+gMxzJxItO00NYZRdw99nhUVJYF0d4ZRcqw4PVqmFJTCgGB6soQqiom33Z5tpRojKWQtGykbIn2RDqmlptU1Pt1f3oSkWROvGkoy3kkcuNTo/UPOmW6ilKPhohhoTmWgiklQroK7UgjQu+9juC2jdAiPRXoZGarsKxOcqpap2NsQ7zbdGLWYGJPhWRWoQ3yvUJAEcKtSiWgC0DtN0bXk1jlBOd6DyagVNVAKauAUl4FpaISakUVlIoqCK+vZ/FeOs5kW5CJOOxIN+zuLshoN2Q06lS6Tle9jkWd7c6KSHj9UGunwqidhvf9FYiXVwGKgvPmzUaJL70YUzjVrkMlUIIlEBrrFBDRiWsy/B2RJq7x/g7FvwHQsNXU1OC//uu/8PnPfx6G4XwZraqqws9//vNh9VdaWopHH30UL774Iv7t3/4Nr7zyirOHM5AXmKmvr8cXvvAFfOUrXxl0ueyBLF68GBs3bsSvfvUr/PSnP8W2bdsy53qPv2rVKnz961/HddddV7SKDmvWrMHOnTvxox/9CL/85S9x9OhRAIBt2zmBIY/Hg09+8pP4+7//e6xevbooY9OJwe/zYPb0KnR1x9HS3g0PgOrKEJqOdsK2naSj197Zhcajndh78Biuv/wMtLZ3o7qyZLynPua8qoKaSVLK+3A0iYRlo0xX0ZY0sLG1G1s6YkjZTrLRm8fCmB7wZmI+Al6I+tMg608DJCAindCONkFtPQIl3AU1GYNmGtBsC7plQrNM6LaJ2nAryhLh9IZtfc5nb/UshINlsLwBWH4/pMcHeL0QviDg90MGQ0CgBFYgBKl7IN3EIykU2G4ikiqE+5BQAGiQUCERioRRevQgtOZD8BxthN56BFpnG6RIJ1jZUKQNU0rotg1h25BCZLZ8K/TrXGStWoObYJMJ5PX13HNx796yVsU5zyP6v5ABkrIAQFg20OsfF2CkYOx4H8aO96GUVkBfcgo8Jy+HWlntxN0iYViRsDM/nx/C53Oevf4x3SotvRovHaTLlARPJ01lJ4YpKqAoAyaJEREREdHYYdyIcSMaP4oiMtWLYvEkjrQ4iSB11aWIxJLo6IoilbJwtDWMgM8D07KRTJpo64yiujyE1vYIfF4dwcD4Vnkea4oQmOLX0RBNwaMAUwITc7twKWVumkvWAri8wtAAYqaN9pSJbtNCUFPQEE3iT02dCGoqLplWBmvKdHR+8gZ0fOxaePftgH/HZgQO7ISn/ZizNRqkk1wjs76zZ8tUixaFc32yF031WgDonOk14UyfWfGVXjeXnVeUM6qUWQvI0seADQnLllAVBaaiQOknZiOEcGNEWXGinPu3Ybcdg912LP9iRe1536C59ymU3B9gduVs0ev9zsyy7jvr2L1/mUzAbNgH+9A+zLUlbFVDd91MmPFTYc5fDDUYgtB1yHgUMh6F3XIEwpeVYDSGMSIiIiIaX6xURCO2Y8cOvPzyy1BVFddeey1qamqK0m9rays2bNiAvXv3IhwOw+v1Ytq0aTjttNPGJPNu79692LRpExoaGhCPxxEMBlFfX49Vq1Zh+vTpozq2bdvYvHkzPvjgAxw9ehSmaaK8vBwLFy7E6tWrj/vM2PHOlmQ2MWBZNg41tSGVshCOxLF5+2G8/u6uzOo0RRFYc8lpWHHybMybVQNNYzWSE1XctNEYSyJqmPj9oQ4ciiahuok5w2Lbzsooy3T2eLcsCNvCJUd34tToUUhNA1QNUtOB9GvdA1lSDpSV43FUosFSe0pMDyIxZrA+Nr0cJ1UEAThlqaWRhGg9iuTmd6DseB++Iw3QBdzKRTZ020JOWE3tO7loorJsiUjWP3CEvB4oEm6ijo3M/QsF2sx66MtOhzZnPlR/EKLQamZV60k0Up2fr/OsAmp+5aeeQJ27es+yIG3nz420LKfEtmVB2hZgmTltw1t76fwchccp6e0kQ/kY6CIi/v2Qjnvj/R2KaDQxblR8jBuNzGT8e4FtS7R2dKOjMwYAMG0bHZ1RRN0tsDweFamUhUTSwL6GFnzsvGXw6Cpmz6iCR59865ITlo3WhJHZ/gzoVaVoEP+kMpH+0UW6Fa7Dhon322M4HO2JI1w0tQynVYUy78tUz04mgKYGaB9ugbZ/J9SGfU6cKHt7sN4Vlociu4KzEICqwy4th11aDllSBqukHDIQBLx+IBAAfAFInx/S63e2LLN74gvCtgDTgNLRBqWlGWpLM5SWIxDhTicByJYw3flqkM5ObkLAo6oQytCDRLnbsWVXWurns8hLECrSgrRBzNO2bESMni35goqAXloGfeFS6AuXQSmvhPAHoXhzkwyF1w8RCkHxh5wFgydSQI2IJqXJ+HdEmjjG+zvU5PtGQEW3ZMkSLFmypOj9VldX47LLLit6v4M1b948zJs3b1zGVhQFy5cvx/Lly8dlfJr4VFVBXXUp9h1qxZub9uDt9/ZC2sjkbti2xB/XbcXsaVWoLAuipmryVSuaLPyagpCmYlNrBE1xJ1hou0VehvVVX1EAxQPoPQknEoCy5BMwS33ue4S7eqpnn/r0WOrBNqA7MZJb6lNWrA9CEU5J6emz4Z02Cy0fuQxm02GUbHsXlXu2AC3NsBQVqsxKYLHcZBdFcRJTTshgiIBQBQDF3clNZhKMzEP7YB7aB6W0HPqCpdAXLIMoLYXw+qDoXsCjQ1imW4a7u3D3ipugmO53hKSdtZ0b3MQk6ZRYF0Jx6pELxalSJNx1iJYJGTch41nbbCiqk1zkD0AJlUB4JtdqWyIiIqLxxLhR8TFuREOlKAK1VaUoCfqcqkUpEzWVJfD7kmhtjyCVMtF0tBNr3/4QyZQBr0fDxWcvQeORTtTPqDpBvx/3zacqmBEszvfGdAKSRG4cZiifaTqZJxP26Cc3ReY8y57cnvS4mfGdo5ZEClHDQkM0idePhmHaEnpWIs3rR8OIWzaEEFDQk+8iJWCFpsA6vQ7msvNgxeOwu9oh4zGoiQTURBRKIgY1HsWyg1tRF25F4Zk4k5HBEHbOXIL2slrA6wN8fkh/wP0uH4TweqGk56CqUFQNlqLCEAImBCyhwISACQBCgQLpvt+pcj3Lr2PaAhVWKgkzmYRMJYFYBGrzIWgfboGydydsw4Bl2xDSWYRlmAZ0tyrRUCoji15JQcerzDx73ZtQVdiRbiQ3rUdy87vQ6+dBX7gUau0052fhD0Dx+SCTcchkHDZaeuIugYBTzcjnL+rvDekubnQWo7nPpuksUstsI2e7lafSC+kAoaiZStdO7MiN+Xk8znwZHyIiIhoUJhUREZ2gAn4vSkM+NDS1Q1VVmLblbGflbgWVSJl48sX3cMf156GyPAh1kmwHNhlV+TQsrwpib3cCByIJp2CMlNCK+OVe9+iFK9v0Mpo1sew+VnwJIVAV9OPIzNnomDYTxoWXoaxpP/StGxH68AMgEnaCDbblJtg4AQnpbqs1nFVpE4FI73kHp8qQdAMwdncYyU1vI7n5Hej186DNXQi1dgqEogG6B8LrhXArUQmhulWK3KQe2+pzPCklYDmfbzrok9nizLIgbRuwTffY3fZsKIlJQnEequYEhzweiHQylG31lOtubwF0D5RQqVOu2+cb8WdJREREREQ0Efh9HtTPqEJbZxRtHRGEAl4caenEn97ciaajnc6bJPDaO7uwdOE0TK0uQ3ckgdIS/7jOeyJLJ1aMaPfzrMVaQ+ts4DdWeHREDQuHIkmYtnS3j3fyMADAsCXePtbHwqIcGhCoBXzp7/Myk1BVceZ5KDG6ncQPVXWqWqtq5nV6Udr7YWBfZjdH0RO3kAJIprenz97ayxzsBwF9Silm+PyAr+fPskylIKfORGrpclgdbYjt2g7/h+8j0HQAmm1l4meqaUCmk1Emw7brbixMupWujf17YOzdBbW6FvqSU6DNqoeteiD8Aefh8TrbzblxFwBOfEbTeuJHquYea24lKTdNzt06LrPlvWX1JBDZVqZK+nArXvW+Kq8XoTjJRV6fU5Xb54fQB45vEhERTTZMKiIiOoFNqyvH1ZeuwL0PvwbbdsrZZlYs2RLNx7rw/o4G1NWUoqIsOK5zpdGjKwoqvDo+MaMCD+w5hojhBEbkcKsVFTDYBKXRXNzYX/qJqghUenS0Jg1EPX74F58KMW8xIuEO1B3YDmPbZhh7dkCmUk6wIpNcZEGK7BVN459glM6dKvZUhLv6zkkwghM4OrAXxr7dEB4vtBmzoM2shzplhhMEyuvA+YzgbjGHdCWkTKnvkRR/d4KHmfWdQnHu386qiJROQrJNSCMBGc26Vvc4QS5/wAkSGSnYHa2wO1oBzQMlFIIIlkDxB0YwRyIiIiIiouOfEALVFSHYto2NHxzE43/YlNkGTdNVSGnBtiSefnEz/s8NH0FnOMakohOYX1MQ0FR8dHo5Du8+hoRlw5JOlZ9hURQATuKN04OEWlUB4a11FhD13tIr/UJRIYwYhDRQvGhV1rQK9JlekIRQCZTySviranFk6QqoXe2o2fU+SrZvgtndCcW2nOpFpj2pkoucHC6nOrSUgNXZDuvNtRB/DsGz+CTo8xZCRN0EHN3jbIfmdbejVwEYKUjD+d1SjO0A0wvTZLrSuJuE1FOdyI09pSsXpecvFOfnla6srqoQmu7EiRQbMhGDTMSALncgjxdKsMSpdO3lQjQiIiKASUVERCc0TVOxdME0fPTcpXh+3Ran8mvWtzjLtvHetkNYecoclJcGjouECRodFR4NYcPCZTMq8OTBNqRsWdRqRfogq/mM5p+x/ramB9yt4CwVEdNCR9JEnc8Hu3YaOqpqoC86DWhvhdizDXLPDsiD+5xS2NJ2+rUsJ3jhBlMyW7sNNKes1329V2QSb9wrZNbrrPuSPSk1vXrMyg5z55VevSfzPpTeBdf7mFM68AI4iUbShnFoP4wD+yA0Feq0WVCraiBKnGo/SkmJEzCy+q5UlJ6rFIA0DCCZhEwmII0UZCLuvE7Esx6JTKDIKW1tZoJFAJwgTyCYKb0t/H4ovgBEMASlshpKqBQwUk6ikZGENJKQ0bCT/OTzQ0mX5DZTsDvbgc52WKrmVC8KlTh98nciERERERGdoCrLQ5haWwZNVaGoArYlYVk2VFWBado4fKQDG7fsx6pT5yKZNOD16uM9ZRolFV4NccvGuXUl+FNTV161opERUL1eKKUDJ6aJthSGUn1oKAa6F6Fp8FZWoTpUirZgCM3l1TBWfQTBg7sgP3gX2sFdEG5MQpimuzBLg8gsgOpbf+ezpyX7eZ1do6n3c39j9XWcfa09iJSfnASjZALJze8gtW0z9AVLoM9fDCUAJ74TcS9QdUBTAcWtTqSqEKrqJI8JxYlxZRaJoae6Va/EoXT1Iqcy9vBTkwp/DgLQ9Z6FaOnq3Kkk7FTSWYime9wEo1JWuiYiokmNSUVERCe48lI/zlk5D3sOHMOu/Udgmj31XGxb4vCRDjQf7URNZYgrz05gqiJQ4dEwp8SHm+bW4J3WboQNC15FQAgBWzrbh9nSCSakV3Ap6RwVCCjCCSqZtoQpZc6zNshIU4mmosqrZUp323BKSVtukpPlzkO6YyvuuIpwngUELClhuO837Z6wwGCCIHHTwmtHurCwzA9NCNT6dSRUHYmyashgGeyyasjFKwDTgNp0EOr+3VD3fwilo8VN9pG9EmfchJ6sRB5Hr7lkJQn1Thrqj1Q1QNMhNQ1S053XigJhpHIePZlHWX0WCGwZcvhb0DkBMwFICavxIKzGgzm3p4RKoZSWQ6hKzzSk7Zy0LSdJKBGDTMSHOYNe3CAPOtsLzzcQgjZ7HrQZs6FOmQ6h604ZbssC4hHY8YgTEPP5ofjdBCOYsMMdQLjDCXQFQ1CCIedcoepM40hKyaQnIiIiIiIaNk1VMGtaJS45byl+/9Jm2LYFacusCrTAy2/uwMI5U9AZDqCuhklFJ6qApsKnKlheFcL2zjiaYqmRVSvqZbD9FKOazUjnEPToiIRKkQyE0JmMQ1twEqy5iyG6OqB98A70rZsg4lF39zXTjQe5C9AmqIEW6fWWSTCyTBg7tyC1cyu0aTOhzVkAbdoMJ1ZhGc4DPT/Xovx805WGVHcbNVV1Fv0JJSuIqfRUwLLTVa3dakZSOlurGUknPmSknIpKsYgzP6ECfj8Uf9DZCs1Iwe5sg93Z5la6ZoIRERFNTsfXv44QEVHRCSEwtbYcHztvKfYcPAahSCdIBDgVYS2J93c0YMa0CiYVneDKPSrChokpAQ/On1KGsGFBFQJ+tf/AR++4iwLnz5WC9Pd157Vh970BWTqP5qzaEpxZW+IGLGSvujkib6xC/QgBd2wnymnDSXTqr+pSYyyJ9ce6cTCShASQsiWm+D1oSxrQ3a20FCgQpZVAqAyIdMP2+GDPmAd53qVAuAPagd3QmhugdHVA6+6EGg27q9SA3uk7fQVKBABb98D2B2D5gpD+ACx/EDIQgh0MwQ6UQIZKYQdLIAIB2KoGWwpAEbCFgA0FUiiwpQ3AhrTcss5GEmq4E56jh+FrPgTPsUZ4utrzV2JJwJJOblAxCQAyEoYVCRe34xGQsQiMHe/D2PE+AECpqoE+ez70BUsgQiWQ8RhgGkA8CjseBSAAr8+tfhRw7qm7C1a3W/9a8zhbp/kDUHx+wOMdcVKPU6rbdFbimW75biv72XTeky7dnX5Or+aDu2WdokAoas9rTXOCX14/hM7APxERERERFVZRFsTyk2Zj665G7Np/DLZlw7IsqIoCy7KRSJp4493dqCwPoroyBHWA+AFNXBVeDQnLxmUzKvDbfS2ImTZspDcyG5nB9jGaSUWD+fZ+JJbCu20RHIwksGZmJaQvgA5/EN5kHAIa1HM+DuXMi+Hdvwv6rg+gH9gFkUwCcCvpKIqT3CLSG78NX39xpfz7ya5+nal7nfMuKbKucufXx7I42JBQh7AFnYCE1XQIVtMhJFUNWv186PMXQ5tZD6EokJl4h/s5SbunOnbWIj2hCEBx4xqqCriJQ85rp8oRjBTsaAR2dxgy2g2ZSjkJQobhvjacLddsy6k+5A/kVLlWAkGIYAhqsA7SMiFTKcDtQyaTzvxiEdix9EK0QK9K10wwIiKiyYlJRUREk4DPq2NefS1mTq3AoaZ2GNLKfGO0bBvv7zyMC89ajFg8iYDfO76TpVEjhECVV8eReAoluoqoacOSEhFzoC2rjn/pRKdsEhLtSRPvtUZwNGFAdasjCQBtSRNH4ynU+T1w6iX14g0C3qATXIjHYYcUYFkFsGyV07cEYBpQot3Qutqgd3VAScSdII2iQqoqpOI+VBVS12EGS2EFSiC9Xmc/d1VxquEMZzWbUJy7Tv9NzuMFgqWIT52FrtPOhrRsiEgnPA37Edi1GaWN+6G4W4iZUoGiKsMbdwKz21qQbGtB8r23oU6fBe/yM6HNmutsuxaLOglGyTjsZBzoEIDXC+EPQvH4ILwewExBRlKQkbDzJyZ7dZyqAprmJPZoWk9Vq3QCkC2dFDrLcgJpppMw1JMcNFzS6cfKDQLmBARVN8HI53OSjHz+SfezJyIiIiKiwhRFoKaqBJddeAoONb2KWDyVU7VE2hJbdzfh0vNPQjiSQEVZYPwmS6MqqKnwKAqqfDqumV2F99qi6EiZ8KnOVlXpxWLOs8xUlFaFcB89VaZt9+uwLZ2FYAFtcN9BK70aEpYnZxt42x3XkjKnTykBTRHQFAFdOM+aEEgP5VTi7qnKHdIL1222pcSBSBIbWyM4HE1m2htjKcwKeZ0K3R4fZIXXiQ9Fu4E5S4E5SyENE76DuxDcuw2hhj1QY1EAlrsaTrhxqiKt6pJuXCGTPCQhVQ22xwfL44XtPizdC1v3QEgbimlAGIbzbBrQEjFokXBOxW0hRF6lIksCmsxfZDgolglz706Ye3cCQkApr4JaXQulqgZqVQ3U6loIn7uotXeR70QMdncXZCwKO9INGYvAjkYgoxHYkW7YkbBTWagIRKgUev18aHPmQ589D0pZuTOHZAJ2LOosRLPMXpWu+0ow0t0FaAGIQABC9xRljoMls6oxOSuIbUg3JiWEAqhuJS1FZcVrIiIaNiYVERFNEtUVISw/eTYamjsyK84AABLojiaw5+AxlJb4mVR0ggvpKnwpBQnLRqVXQ9IafFJD+ru+nQ7kIB0kGtzWY0B6dZpwt1Qr3H+hssvZ33mlO156mzTATQsqcGHKkmiOu+WWpYTubtOmANgTTmB+qT/TTzro5YZpnPf5vFD8PghZDpFMAIkYRCrpVIxRdcBbCVlZmZl/oT3us6U/bYmsOJB7g9KtMiNVzV2ZpUJRBIT7rCiKE6BLbzVn2xCWDUhnj3lh286KrFQCUlWAykoYoSAaS0rRfMqZKDu8D9MO74XS0QLTNJ3KTuowk5omOKvxEGKNhyBKyuA9bRU8p6yAUFXIeAx2LAYYSSCZgEwmYAFO+WuPB8Lrg/B6ITw+CMUGTBvSNDL9DmcVorSlsz2c5fwcM5WLbDtrFV9WglImiivdPQJ7gkMQwlnBp+lQdC/g0SEs01m9F+12BhSKs61bqBQiEJyUP38iIiIiIupRGvKhtroUF521GM+/uhWWZcOybQhFQNoS8YSBfYdaEAh4mVR0gqv0ajgST2FqwIPlQsCWEtVeDX5tuBupD835U8rGZJxsvzvQhoasZKK099ujWF4ZQkpKWO7D1kKwAgGYsSjMaARCB5LzlyIxbylak3F4Gw/A33wQergTnnA79O5OaIkYAJFJ5BHu68JbjjmNQkpITYflD8JyK1ybgRIYgRCMQCmMYAipQClsj7cnYCbS8QHhxDAEoEhASBtCSghICFtCjUXgbWmEt6UJ3mNN8LS35C14kgAsFOEfD6WE3dEKu6MV2D3SzopLRsJIbX0Pqa3vAUJAnTYL+pz50OcshFJTB1FRBZlMwo5Hsypd95VgZORWulY1pwK2PwCh6RCa5ixEUwf3iTpVrXsqWcM0IW0TMHtXt3YWrg0pGuVWgoKqQnh9ULzpRWistkRERP1jUhER0SShqgrOX7UQz7+yBSk4FYoy1YosG5u3N2DR3ClIJg14vdwu50RW7dPRGE3CpyrwTfDS5dJNcOrrK/QUvwd7uxPY351wEofQU3a7KZZC1LAwPTjIRDq/B0CpM65lOeWU02WVTQPSMHr2ZwOyMqGcgJFIV7NRNaeEs6q5bapT4aZIpC0hUwkgEUe8qwOAs+Vax5zFsM+6GHVtTfBsew8l+7YDqSRkuuLOJEwukd1dSLz+EhJvvwLPklOd6kVTpkGaJmQ8BpmIOeWvpQUk45DJeDrE55bhdn5+2duPZT5HKTMrwzIP285seYZ0EpEcQaUwy31k31PWKQgF0D1uIpT70LSereqYYERERERENOkJIVBTWYIVJ9dj3YZd6I4mcnZOsm0b2/Y0Y+HcKYjFUwj4x7YKB42dkK7CkxRI2UBIUxA2LIQNa8ySisbDzKC3YFJRzLSxqzuOkyuC+RcFvZDVFbBiUSASduIGuh9i8RLIBQsgYzHIVAqGacAwklC6O6GEu6DEo+4CIWeLNJneKs1dLGT7ApCBIOxACND0nBV2mlCgaRr8bsVk6T5EeruwnrylAVRBTp/mLKJKJhGPR4FDe2HteB+B9qPOW6SEJQF1UP2dAKSE1XgQVuNBJN74E5TKanhXnAXP0tOgllcC5ZWQqWRPBaPeCUYer7sIzV2IZpmQkTBkJNxrIHdxn/uzldLOjRlltr0fXlVrmV75Kd2ljOkYpaI6W8sBzsI123LimMkELHQ67YrqzN0XgPD7ne3iJsUPn4iIBotJRUREk0htdSlOXjQdm7YeguKuOAIASODDfUcQiSbQEY5hSs3YrwyiseNTFUwPetFtWLClzKmok7eFmOzZiT39nF1uOl1GOrtqUCGZ9Jr0FmTpYEeBvvsjRM/4Mt2f6EkUKuTs2hIcjCRgudcpWfe4vqUb1ww2qSh7HqoKofqBdMnm44hQhFNK2ueHqnuAfXsz51JCRWrWfGDGHMS7L0bFzvdg7NoGq/XopE4ugmkitWUTUls2QatfAO/Ks6DNmgulpNT5byBdASqZhEwm3KQgw3m4hYqy/xwPnRNYcj5/5xmK4iSgucHGntLkSs9/QLZ0glC27QaG3KQlI+UmQ9lAKuHMPT2U7oMIBKAEgrkJRooKEQgywYiIiIiIaBIKBb0oCXqxeN4UvLftECzT3T7H/dK+Y08zrrjoFHSGY0wqOsFVeHUcjadQoqnoNiykbIljcSMvuUS65ZfztuJ2E9LS29QLuMVz3KrV2Vub5VwDZIJHvVMZRK+W9Fyyq01nb88GyIJjp3vJ3sZtVsiDt44526spInekja0RLCsP5MSReuYgoAVDQDDkVI2OdMOORZxYV0lpz2dkGpBVtbAMA6ZhOPGE/M6cZ0XpWYCmOhWsnddaT1JIf9IJRqqK9H+8UkrAspHeFgvShtB1CF0HQiWQdiUSpRU4VDsT3q52VO7fgZrG/RCWCVNRoE/wxYjDYbe3Iv7SM0i8+Qq8K86C99SVEF4fVI+3cIJR9iI0oTiVrj1uklF6AZqmIr2FvSz0Z6AX5+fWq6p1Ou5jWpkYUOZ5wEQk4cwtXelaUyE8HncBmg8ClrO4Lh4DOuD82QuVODEif4AJRkRExKQiIqLJ5twzFjhJRYqAbfd89TctGx/sbEQo5EN1RQjaCbwKiXBCVCmy3ZVTmcSmnhoyGQl3e7eZQS8ORJJOQCfrDQcjSTTFUpgWmBxBUREoQae0UKsA8fIqlJ99MYKnroR5aD+MXVth7N/jVFwabMDqBGQe2A3zwG4o1XXOyrTFJ7uBFg8Qct4jLdMpP53ensyyneBOOtiTWR4oelYdAj3BvXRgMB0wHEECT38/JWmkIFMpyFQSMpUEUknASEB2JWB1tfdKMEJ+BaNgCCIQcgOSRERERER0IqupKsHSBdPw/o7DsIWd8/05kUxhz8EWLNZVmJYNbYLHE6hvIU1BuyJgAAjqKiKGhaQ9xMopMu/FOBj82HNKfPiwKw5bSuhZsZCOpIn93QnMK+1/QZnQdYiKSoiyCshYxEnOSCUhbAtIJ/AUY02aqkFoGoTmATTNqUysZi1MGkTih7Td5JFEAjIRdxKvvM5iu2RZJZpPOwfhlRdh1sHt8O7aAiXSBUVVJ2WMSMYiTnXr9evgPfUMeFec6STZeLxZCUYpyGQi84BtAenj7uzenIpB6QVlmQy7zDb3AGA7lYZGWtUacBKIgJ6qRZBOn5aVWSAn49GeSKquAx4vFI/XSSICIMOdsMKdgKpBCZZAlJRC8XMLTCKiyYpJRUREk8zJi6ajsjyI9s4oIHq2QJO2xHvbDuKs5XPR0RVDTVXJ+E6UaACKEHBiGn0HNnyqgo6kibNrS3Eo2uJWVQKyYyHrj3XjqtmVk2LVjer3w/IHEDcTCMS60W6bmF5WAc/SMmgz6uHtaIVxYA/MQ/tgtRwddFDqRGS3HkX8j08h8frL8Cw9Ffrik6DWTnVWO6a3rxvvSQ5A6B4I3QMEnWwoaVlO0CgWg0zG+0gwCkBoek+CEYRT+toXgPD5nQeTjIiIiIiITjh+nwcnL56Op/6ow7YlTNPKihkBH3x4GIvnTUFXOIaqitD4TpZGjRACFR4NxxIGyjQVnl4xgUIxguwqQGnOLkxOFSG7V1WjnveKzPVp+RWscytoZ7emK2ADwq1I1DO/3mPbWX2IXteurgnh/8/efz5JkpxpnuBP1bizYBlJK7N4oQiKoKoAFGgDjUb39MzOzszN3Mke2bkVmS8nsiIncv/KfTiRu/1wsrI7e3eztzKztzuz3dMMaKBACoVCcc6rkgZ1Ym5UVe+DmrtHZEZmRmZGVmZE6E/E0izM3c3Uabo+/rzP++EgawrXtmtGL6+Nrmsqmt4bKRCdLnSspmrqGlOWUBU2+bgqrenkani+1Rv8Jq3ID2bJRX6wJ8YeIT1Euwvtrn2MygK5vr7tOmppmc3579D+2pPUZz9h7p1X0KuXbOLOYdSIqpLi5RcpXvkN4RPPEH37B3hzCwCzIrRJOlWTHG2K3Bbt6doaeTB2W9fTxOvrI7YnVskmYbzZNytcmyRayybh+rIU+kmSkbJro3XT/qwpQFM1VCVUJTodwoaAKEK2OlODkR5swGAD5fnWWNXpOoORw+FwHDKcqcjhcDgOGXEc8tyTZ/irX7yzvQUasLI25MsLG3ieZGmhgzyEVSiOg4UUgk7gcbodcrIV8mVaojDILRLWp6Oc/+vb54k8QeRJIikJPYE2UGuDMoba2PVPTy5wb+f67dJeXU9ZyUqUsRHayhhqDbUx1NpM1xpDJCUtX5J4ksT3SJptgb1+pe11q+Z2R+OApxbb1x3D2XHByrDgS6+Dh2FBFxwPPMZCMApbtHtzVNmI4bhPT9R4i0vIXg8xv0j4yOOYIqc++zn1+bOo819ilLoi7vwwYMYjKx69/CJyfpHga18n/NoTyCPH9p2YJjwP0elBp4dRta1O3NFgFFmDUdJGBMEsAntCGFlzURQ3BisP4fu7Sl0yxswiuicJT9u2bTu3aVu3ifh2tSJTfyK6elvE16Zi07VwczgcDofD4XA4bohjR+b42oPHefXtLxBye0ed9z++QFUrNgcZi/PtfTcfcuyebuAxqBQ5mrY8+EUlvdDjoV5i04ou04zOpuVNJ1zbVCEfmJkvjDF3zXtHCGHn9b3e9v1SMornaHfa5HGL9j1n6KxfoHz7Deqzn9l047vjLny1aEX5xu8p3/wD4eNPE73wQ7z5xW1XmRZ3dWbFulYHUbN2ZhOdY5ImtK3lvdiTVOttY5ISkNC8ladPXTNEo+qpwcjkOVQFFDm6yBuDUWwTrpM2ghrdX4f+OsoPkJ0uotNDxnsRxeVwOByOuxlnKnI4HI5DyPeee4i//uU7V7RA09rwylufc/rEIv1hxsKcqzhw7H+6gcewUnzvaI9/88mqTRe+rIpOGcO4Nozra0d6l7uM/P5slPPRIN/lCG8s0ng3hiKAV9dS3t0YUwQL031mVHL/UkKlDYNasdDu0m936eoMsbGGAPylZUxvHtXfJIhiggcewdQV6tIF1KUL6P4Gur+BybMbGvd1CaOZSNFq28ScMJ5V6E2r83xQatq/3oxH6DTFjEeozQ2A2yZu6c11it/+PcVv/x65eAT/xGnk0jLe0jJyaRnZnduV6GOMgaqylWu5jR3XeYYpsmkEuSlyWzG2zUwz+0NID9GdQ87NIbtzyN68Pb+/u6/3wvOvYTAqMP0C1d+wlZFRbCOwo9jGtjft1Hb0+QhbOdfc0e1lppNo7z1mZ79RI44mLbu4hCWHw+FwOBwOh+O6RKHP80/ey6tvf4EUEkXT3tlAUda898kFvv7wKdJxSad9/YIbx/5ECMGJVkhaKdQOE67JnHuW9jO7ncC2qZ+kBBlMs27mwszShKa3nxzEbFtduW22pxZNkokkk7QkMU0ZmpzbGNCTMZnZ+GUzBm0Mw0rx7SOdpgXalZrRy6sj/tMz280jN8vdYii6FkGni6pqBsJnYX6Bft2hE4UkS0fRoyHlB29Tf/4JFLvVvQ4YRlO+9QfKt18leOwp4hf+CG9h6apXF0LAXZx4LTwf0fKhZfVGU1eYcYoejxuDUYYuMthYgzhBJq3GYFShN9dhcx3l+VP9RSYtROj+f3A4HI6DhjMVORwOxyHk9IlFHjxzlA8/uzRrgdaIRG++d5Y//6OvMxrnzlTkOBC0fA9fCu7rRBxLAi5kFdoYvJsQcmq9s33hcm7m2Lsl9nZXqeTvkDT2642cM4u2Ai2tFB3fI5CCtbBD62QXOdxAbG7g+z7+kWV0tUA5GlKPRtQn7qU+cR81Ah0n9mNjOIBRHzbXEf11KAorIgqBlKJpUdf8HYTQamNaLUjamKQNrTY6tgYiz/eRgKcVUiuksqk1Cvu4V8ySnpQRKCkxUqI9HyM9jJSI8Qjvy0/xP/8I/4uPEKPBtljzidC4F+j1Vcr11csedB85v2TNK6I566TizJhtJqJrRp7fNALR7uAtHsE/8wDBw4/hLS1f/1bbDEaqSSZK7TjrCurKRmDbK0Mc24hvP2yityVI30axGw3XMeeBbbk5SSyyUdy2cm9awddcbq4w8m19D4rGcCYR0hrOhPSmceCmsEYtNtfs1aPYilttF9PtcDgcDofD4XBcjaceO00SBaR5Oa2BaaY0vPHOl3z94VNsDsbOVHTA8YSgFx78n4+MMaS15mQ74lQr5Oy4xJjt2sFHw4x3Nsc2VVoIvMZ0VDfp1LU2HEsC5nbxeK3lFZtlfcV+IawhyhMCb9v2bJ8vBX5zmRQCYwyVMZTKUGo9TcC+HoXSZErbabgxZEVNXwR0TIUHzPW6bIqQLBvRLlJCP2Bz8ThzZQYyIHzm20TPfBO1too69yXVx+9DeXsMRmaL0Wy7ge22nO7GMIbq7deo3nmd4OHHCZ98Dv/M/fs+NVn4AaI3j+zN23Zu4zE6S22LtHyMzsewuX6ZwajGjAaY0QANtmXbpMArCLekTPs3ZKwzjTaIUpclXdfbE6+nLeaMbbSom987MLZNXKMV4Xn2+fFs2rWIE2eAcjgcjl1y8L8VOhwOh+MKoijgm0/fy4efXZq2QGs8ReRFxbsfXeCpx+5BKY23SwODw3E30w08NrThO0e7/NvP1tEGvJsQIJS586aiaJcDD3YwFRkDH/Rzvr7QIlOazbJmOQ5Ia0VaA2EXc6SNGfYxoyFGe9CaxyTzNj1nKiIUzUkiWDwGR+9pRILAGjym9/+yPu6qtoNQatbPvagw43X79y6ToHbG9ppXyyco77kPEf4j5HAT/5P3Cd58GbG5jgAC4LZ1dqxr9OrF23Tw3WAw6ZA6HVJ/8Qn5i3+DXDhC8PCjBA89hnfs5HXFLdsirQudLkZr+7yXBaYooCzAKMhSazq64sYSpGeX6aVNWtHkT62btKLdvZeuf4+3r6d4vjURRTEijhF+MIvv3pzEdPeQ3R4iivdkLA6Hw+FwOBwOx0Gg14l59MHj/OHtL6YFaEJaA8P7n16iqhTpuKCsasLA/bzg2N8IIWj7kmGleHapw9nx+hUt0IyB/+XLjWse589OLezKVPTGxphX1ka3PG4pxDSJacIfn5jjmaXOdW/7ytqIX18aTv82WlNExxEYHq/6POhJ2r7HWHYZdjosF2NGowEjP8EsncKMh5g0RS6dwls6hf+N7xGsX8L7+F3Ex+8h6h1MU7u8X2brYmbbO11xekwprCbVtB4TQWC1jzyDLIWy3OXZb7IYzRiq99+iev8tRHeO8IlnCJ945orWaPsREYSIuRA5Zw1GepxixqktQMtSazYSaxCGiLDRX8IYgcKkQ0w6vPKgE3NR8wyarc+wdbo1xWZ7rx1dvg1sMUDFiLhlU6/3uTHM4XA4bgfuW7/D4XAcUp578l7+x796jVGao3Xz9b0Ri37/5mc89eg9pOOCXtf1RHbsf3qBx0ZR83A3nlaeaaxYcCPUu5zL3jbTCpDcotHv3f6Y7x7tkitN3hiLAiHwmmQhH4GcW8B05zHZCDUcosoCFSeoKKGuKuoiR1cVpq4x2mBKhSkzNDkexiYOYfAxeNi27bKRCTRiKgpN5AExvY6NLZdSNuk3Am0ESkgUwqYWIVBCYIxG1wZtbNKNAXQtELXGK8Z4pHh+gP/IM/hPfJPWuU9ovfYb+PxjQu6SqravAL2xSvHSLyle+iWi0yN46FGip5/HO3LsurcVUiJa7VkEtjFQlpgyxxSFNYlNKsKMnhnFVLXL0QmQ0i6iec6FaMxJsqkmk1d/skxjVFO2Ws0oNd1G1TAeoceNWCsbk1Ecb4npXkNvrtnWe90esjNn27s5HA6Hw+FwOByHGN/3eOaJM/zh7S+mhWj2e7qhrhRvfXCWZx4/Q3+QsbzUvdPDdThumU7gMawUj/Ri5kOPzVLt3gXTUO+yCG2v0Ducr9hlura8yhzbIHgrmOexccWDS22yuqQ0grQzR9xqY/rriDxHtruYuI0e9dF5RlUrst4SfPNHiO/9A7zhJt76CnLtEnJ9Bbm+ghgNbuwOCoFptTBJG93qYNpdTNsWQOlm23S6mKRjE2iuJfLUNSIfI7IxcvUC3mcf4n/+kTUd7UDAzRUiAphhn+I3P6f4zc/x77mP4JHH8Y6dxFs+Zo1Pt4jR2hZ+pSPboiwbg6pneohSNsWntjFzotVu9I4eoimqutkWfCII8eZCmFvAlCU622IwKnKbzj0EEBBEjcEotKYd37fpQEJYvUbVu7YLXZF0fcW2xpgtyUQTM9JEfJQ06dqXaU1+gAijHQxQApEkViPqdG1CtsPhcDicqcjhcDgOKwu9Nk9+7RS/ffWT7S3QgM++XGN9M6XTjpypyHEgCKQk9iQ58A9OzfMfz21yIdut8WGGugvan0W7NBV9Pip23D+uNWfHJUuRT79SDKsr23AJ7H0wIkJ1Q0xl+6mb3EZdkwTQfDSYusbUlRUvqoqpLGCm/2w5sNxmIsGT9myTdCPpTauBJvdyN9lFxjCtYjKqpi5y6roRUUYpABu9o5z4s/81QTokfOsleOc1K3wcIsxoQPnqS5SvvmTNRd/+If7xU7u+vRACoggRRXDZbwemEXWsgKUnN5jdzm7ZfVtFnNvAVGQrctvCrSxA15CN0NloFtPd6iCSBFEW6LUV9NoKIkoQnY4V3PZA8HM4HA6Hw+FwOPYjTzxyklYSzgrRjEZKiVaaN96zpqJhmjtTkeNA0PIkUggCT/LUQpu/vzhAmxsrGKt3qRfdTgq1u/Tn692v1/oFjywLeoFHv1Kzdm3tRYxfYIZ9pK6RrQVE1EXkGbLIEKXCL0cIP8a/72uIR7+BnJysyPE3VpDjFHYwRAnA+D51q0OZdCjjNpWQKAOX5x5ta3NvQChbqCaMQaKRGDxjrCQhfWsoafWg1YOl4/C1Z8BogotnCT7/kPCT9/BWLkyPXZubTCy6jPrLT6m//LQZtEAuLOEdPYG3fBzRam8xuXjTgipTFphsjM6yLa3ps2lCkMl2fvx2je/b1Oa5BfxTZwgefgy5dPSGjUYiDPHCxmBUlbbwrDEWoWqockyV75BwbVvXT41g0/52W9OuaTQ+Mytg2wN2TiuaGKAiazAKY4TvYbIxKhvDygVEu2NTrlsdl2DkcDgONc5U5HA4HIeUKAp4/qn7+O2rn8wqzxq00bz2zhcsLXbQ2swmgA7HPqYXeORKMx8F/PD4HLXWtH2P2ljhJVeaQhsKpam0uaJvvSfgRLI7k8HxJKCcS5BNApAnwBeCQAr86fEEQkCuNFmtGde2p33WrAXgy9ltAmlvt7CLOG2Ax+db/CLbOeL53X7Gn9+zgCcFhdIoY1u7KWPsnJ3tVXYyCPDnF5DMI7MxsioQVYlQChH5dmmuq8Eeb7JutifJUNvEHzENSLPpxtjrAmjpNUpXk6A0eT6ax87TGnSNVKo5lofAwxCg2gm10tZclOcUdY2qKgb9nDlfMnjhJ5z+4Z9Qvv57ild+s3Mc8wGn+vBdqg/fxb/vIeJv/xD/nntv6XhTMcwPbrSgc88RUiLiBOIE5i4zGWVj28JvGtPtQZIg2x3bBq3IMEWGXluxyUbtLrLTRYTRHb5XDofD4XA4HA7HV0evk/Dog8f5/ZufAfY3Xc8XaOCjz1coKlug4VqgOQ4CQghavmRUKR6bb/Hy2ohc6W0t0K7HV51UtBPlbpOKrnO/vsgqcqXphT4aW5ymjU2fFlEE4TJmnFKlI9vKqt3FJG1MlkGRWSFoOLaL50PgW61g8STiyA2kvpgtRWRG2zQe06Ql6yYp2ajtLdd3YmouEk3LdokIQ8TyaVg+Dc/+iPj858y/+iK9z94HrDa1p59sxqDXV9Hrq1TvvrGXR74x6hq9uY7eXKf+7CPyX/0dcmGJ4OHHbarS0RM3bjBq2s7RsSZTU1czk1FVNclEiiZuGioFN1zjtyXpWkpE8zxuL1prCtkmC9j11kSjSVs1rTBV2RRIXmaA8gKb8NRqI8IQkw5R6dAmbLc7yO6cNYUdlgh0h8PhaHDf+B0Oh+MQ89C9Rzl5dI4vL2ygm4nnwnybr91/jOe+fi9GG9KsoNuO7/BIHY5bpx14iLwikIKoqSxJfI9usPcxtl9faPP1hfaeH/dG+NZylwvDMW+mV1724SCj1HO0fY+2v/3+G2OmJiPRmKG2RWO3ZuYKoxSmLKEqrGBQldsrprZWHE0SajzPTv49iZB+Y0TxmsosD3wPIySqEayuOP9lGGOadBwFdY3Jx1bIkkDQgk6LcaVYzXLSLKNblxSbGwxCydzTzxM99wLVe29R/P7XqEvnb+KR3t/Un37I6NMP8U7dS/zCD/HvffDACSPbTUZNTPfYRoVva5MmPGi1rHAUxYgiRxc5en0FPB8RJ7Mlil2FmsPhcDgcDofjwJLEAV9/5BSvvPk5QjZhERgQoGrN2++f4xtP3Es6Lgjn3E8Mjv1Px/cYVYpu4PGNpTYvXhzeUAu0uyGpqNyjpCJjrG709YU286HPfDjZb2wbewM6CVEL86h0SD0cWJ9Pt41qt1FFhsrGTUuuClSFwbYaM55/WbuyptqsebA9o/C1xjcaX9d2G4Nk1s1qtghbpAYorOlRA1p4aCGsXjRpi6XKaRWbQUCW2nGEMcQR5cl7uXDyXtL1Sxx97UXE+2/gGX3LaUX7Ab2xRvHSLyhe+gWyN0/wyBOETzyDd+ToTR1P+AHCD6Dd2bZ/0qJt2sZ+eoPLHuRGPxRNitNtTbqua2t+Kq2uSVXY1+twEzXchCCcGYz8ADMaoEYDkN60pZxMWrdlbA6Hw3G34b7xOxwOxyGm24l5+vHTrPVTTp9Y4OH7jrO80LaTq+b7/Ch1piLHwcATgnZgRaKWLylKzbhWt8VUdLfwtW7Imxev3F9pw0eDnMfmr5z4iiYVyN+FeiY8D5EkkOxtm0SbPLQ75UYIAb6PwIcwglYbYwwmzzHjESbPaAUQ0aaMWoyKEb18xGap6Fw6j4hbBA89RvDYU6gvPyV/+VfUH3/AtcvcDh7q7Gek/8N/g3fsJPF3/gj/ga8dOHPRBBvTvQjzi5gin8aIoxWkQ3Q6nCUYtTqIOEaoGpMOt6RaNa3gghDh+eB71iTneY1xrhG8pt0AmxLLydJEeU+r5IxuWsjp7ZcbbWO8pq/Hy54TKez5AyvaiaYKVHj2/XBQn0OHw+FwOBwOx+1FCMHjD5+g3QoZDDUKjWmSrLUyvNmYikbjgoW5O1tQ43DsBS1fThOjH+m1wMDHw5y01mhsqrNdtmsFfpMuvduQd08KQu/KK1/t+DdCsdukol3ME9/v51cUywkh8GBmAvKA+XnM3Jxt2TXs2/bjQQKdZEshWmmL0OrKtia/kW5Wk3NJifCagrRmDj5tGyYF0CTWXHbXjLbza9O0a0cpTF3P2qTnBeRQ+SGXwjaDhWVaP/1nhN/5Y4LXf41442WoL2/AdnDRg02Kl1+kePlFgkeeIP7Oj27aXHQ5YqKXcPe0mhe+j/A7UwOU0RqTZ1ZPzDL72u2XqP4GBDGi3UYmbYQPerABgw2UH1iDUadrC9CcDuNwOA4ozlTkcDgch5i4aYH2zGOn2RiMScfltJ3xOC/pdRLScYExxn0hdhwIehNTkSfZxEZDl1oTHtDEkXsSnxBNyZX3751+tqOp6CAghJianYw2mDyjt7nJWl6QJV16rTbVeEia9WnnY1Q+RrS7eCdP0/ln/3v0YJPy/beo3n0TdfHcnb47Xynq4jnSf/f/Qh45RvzCDwkefvxAJ/KIKMaLYkxjMDLZeGYwmiYYSVudFkWIMEKEMcL3oMjtbe7wfTAUkO1ggxM2oUm0WsjEmqMcDofD4XA4HI7d0mnHPPrgCX73+qegrNmhk0ScPLbA1x44BsA4K9GN2cjh2M9IIWj7HqNakXiS+7oxj863WI6DK66rjcGYpqX7DeqlPzjW4wfHele9XBszNRdpbALSpF19rdmybWeAoSeJpLDrHcxKO/HoXMJ9nQgpBC+vjnhlZXDFdT5PC3Klib3r6wFCiGmai84zzHCAybMrCtGMNtZcpCcty/QsdmjCpEWZJxFCbi/auQmEFCCbQrQtGIM1j+RjKAuCuiSuFPl4xDBJWOj2GH7/H3DPt75P+ftfU7z2O9tK/RBRvf8W1ftvE3ytMRctLd/pId12hJSIVtsWLGrdaES2YJEqx2zmqM11iGKbXpS0EVTozTXYXNumw4ikfdtNRpP0dlRttyeFatP3V9MacGLo8jybxO0d3AJbh8Nx+3CmIofD4TjkLM61Wd9MaSUh6biczuPyoqbWtnQky0taSXT1gzgc+4TEk3hCgLTbY6UZ15owPJimCU8ITqkxn3idKy77bJST1uqK9mcHDSEFotWi22oxXO9TDgak1PR6cww6bdrpJmQj2yN9nCJ7c4hOj/j57xE//z3U5jrVe29RvfcmauXCnb47Xxl69SLj//m/Ry4cIf72Dwge/bqtBjygCCGmLdK2GYyy1Ao0ZY4ptxiIpA9RYy6Ssmnp5zfVk9IakaZXblTSyd/aYIxqylCV3d0kE2G2pBMZm2C0YxXnNCleNMKQj/D9mTjk+QgPTJZishTNiq3ibDVVdZ3OgX4+HQ6Hw+FwOBy3TjuJePyhk7z1/llOnFnmvlNHePrxe1jfTDEGqloR+B5p5hKuHQeDTmBNRS1f0q+gUBpljNWRtiCFuCJEdq+Q09Sj5gS3QbKJPEnUmIUem092NBVpY6Yt0G4EOZlXa4MpcygLmyRdlQgUIrpZfVnYea7v2bRqz85/7TxcWAOStIlGIBFSNClFtU0mmrTcqhW6yBB1NStGUxqTp3THGbmC0TijU4wxvR7jdofuH/0p0Te/R/HKbyhe+c0hMxcZqvfepHrvLYLHnrTmooWlOz2orwQhJaJtU4yMqjHjsS08KwsoMnSRweY6xC1kq2UNRFt0GFhpTEZx89oN7GvX92GSND2lMQJBowttMQtNEra2pm1NXs9a7TT03dw7qyUFISJpIVpta4ZyReUOh+MaOCXZ4XA4DjndTsz6ZkoSh/Y30MmPlwbyvKLTihimhTMVOQ4EQgh6gcdGWdMKPMZKk9YKXwg8MRNvpBBX6EPisuPcKsYYlGk8Abdx0nZPne5oKjIG3u9nfGPpyssOKnO9DmthRJpndPIBpfQpFo6QdHvojXWocnR/A0ZD5PwistXGm1/E+/YPiL/9A9TGGvXH76NWLqLWV1BrK1ZM2FOETcSJYmtymVbkbREtDZjxCD0cbPnQvj3ojVXGf/FvET/7S4LHniR8/Bm8YycOtNCw1WDEwhKmKjFliSkLTFFYAVHX1ozW3ObOpxXtMAY/QMQtKwyFEQIwowFqNIAVgWh3kL15Kx4d4OfT4XA4HA6Hw3FzBIHHQ/cd5f/yr/6UjX5KOi5RShOFPnlRk+UlQSdhPC6dqchxINjaAi0Qkspo8lrTDg5uMdaxOKAbSHZSNnZqgbZbhNwyr+41iSpVhSmLWVHNpPjGGNsyHGMLdqS3pb2Z17Qbv/HnwKYU2VbhW5GALgsYjdBZigBEu0vS7hKNcopxykhVzG2us55HdBbnka02yfd/QvT8dyl+/xvKP/wGU+Q39djsTwzVO69TvfsGwWNPE3/3R3hzC3d6UF8ZwvMR3R6y28PUFSZNrcGoriAbobORvWIQIqLEanpR1JiMxsDt1Y2MUk1Rmmn8SZqt7y+ktIY84dlCOExjWKptWtfGqjVANQYjmViTlMPhcGzFmYocDofjkBNHAUHgUVWKVmzTiiak44JOK2KUFhw7cgcH6XDsId3GVBRLgScEyhg2yhvrjy4FjRHJLr60piSBwGCm6bJ27mYjq3VjIppEVW9tdS+xopUvBF6z9pv4eGPMzDDQHFeK2fXldcwAC6akZWrG4sqvfe9sHi5TUcuTDKWkSlrk3Q6dImVj2KcdRcjjJ9DpyJqKVI1eu4QexngLi4jQmiq9hSW8574zPZ4xxiYcra2g11ZRm2tQlkBTVWSYmX6MQYSNuBBvXWK7L2lEhzDadbS30RqTDtH9TfRgE7W+SvXRe+jVi3v8yIHJx5R/+C3lH36LXFomfPwZwsefQnauHtt+UBBBiAhCaNv3itEaqsZk1FSJofWsakzrLWYvYZ2D04M1f0sJQtjnWsjt+4Scbk/3TzCXyVDGzCrX6kkFW21F2rrCjPqYUd+OI7RmNZm0EVFoX7vpEDwf2e1Zg1HoDMQOh8PhcDgcjhm9TkxdK5JGLxrnJe0kIi9qxnlJr5MwGhccu9MDdTj2ACkELd8jrRUtX9CvYKwOtqlICMFD7YDVzSsv+zwtyGpN4t96urcQAsIQEYa3fKy9QIYRLEYIs4jJMtviKkvptWJWw5jxeEQ3H1LkBcPz5+j0eojeHDJOSL73Y+Jvf5/qg3co3/wD9ecf3+m789VhDNXbr1K9+wbhk88Rv/BDZKd7p0f1lSL8ADE3j5ybx5QFepxa41BdWa2oKhsdBghC8ENr6JkkS0vPphVJzxYPTow/wNR6ZDRGNdqSUlaHmmpP2ha6KQVKg7nRtCKbUoT0IAyRTVGj8GwBoxmPbGB2ECJ788junB2vw+E49LhPAofD4XDQbcfbWqBNyMsKbQx1rciLiji6so+4w7HfCD1J7ElypZkPfXKlmn711vAzMQBdC22gNDtmg9wQk7mjBkptKDFwg3NBT1hzUdAYm7YamMq8QgCnVcp7/hwGsy1x6UJWslHULESH4yuhEIJu6LFe1IyUodOdo253GWd9WuMBst1BJG30YBMzGkCZoy6es6afbg+ZtK44nuj0rLHm3ge/+vsjJaI7h+zOAfcCkHz/J7Zl24fvUH3wLurcF+x1PZReWyH/xV+R//KvkYtH8I6exD92Au/YSbyjxw+8MUVIaVuf3cVVW0ZrTJ7ZNm55ZsWtpoWbGm7a6rlWx77mAb25jt5cn73WOz0nGjkcDofD4XA4aLdC1jdT4mbOWJaKxTm7XZT1VDMqiorIaUaOA0AnsKaixPfoV4pCabQx1y3o2s882A75zQ77tTF8NLzxFmj7CSEEotWCVgtTlrQ21gjSnLrdIWvFtNNNNsuc1mADxiO8xSO2IMwPCB97ivCxp1D9Daq3XqV461XMYPNO36WvBq0oX3uJ8q1XiJ75NtG3vn+FZnYYEGGEF0Ywv2hTf4rC6jBFblOum+UrSbkWW4rTJssk9XxiSpokGKmmJWBVoNOhvX0Q2cLHOEGEMaIqbcHl2opLunY4HIAzFTkcDoeDq7dAMxqypgptlObOVOQ4MMyHPheykpYvae1QcTVJB9oRAzVN4pC2qUO1MagtXahEM2ebrKUQSKzpx5M2XcgHpBRTA1BtDLU207XSs9tfflzdXF9tWYodumCpJg7pHjXmPX8OZWwq0lbe7Y/5ztGDnzYzoeVJBkJQG8Oo0vRCj+HcEXqLS6jVi5CP8eYXMJ0uqr8B4xRTZJgiQ/uBrdBptXedJnQn8OYX8Z7/HvHz30OnI8o3X6H4/a+nkct7hjHotRX02grVO681OwVybsEKEWFkzSthaBOY/MCm+Wi9vR+8VjZhR+umL7ye7TO66TsfIIJg1oM+CBBRYk1NS0fwlo7e1SafrxohJaLVhpYVf01dW5NRs1CVmP66fY0nLWSrg0gSmLzWVy8ikjay20O0u7aazuFwOBwOh8Nx6LA6kcDHIww9ylJR1wrfl9S1Ji8qWnHIaFw4U5HjQDBpgRZIQSAElTFkStP2D+6c6GjkXTXh+lZaoO03RBjiHTvBXL/P2vomI3y680co8ox8tE5cV6hL521x2dzCVBfy5hbwvvtjohf+iPqLT6jef5v63BfotZXb3rJ+R6Rnk2gaveW2UtcUL79I8drviJ59gejJ55Bz87f3nHcpwvMRLX+mwyhlzUU7pUsrxTUtRpMEa+nZ15mUTbqQbFoE2r+F9MBrrrcLs49NOWo0t7q2CdyNRkRVYKoCM+zb87fayHYXGUXbk65781YrOuAFhQ6H40qcqcjhcDgcxFGA73s20joKGGfV9LJxZk1Fw7TgyOLhijN1HFw6gccJEZJWitpMkn2suUcbYyuVrnZjAeFkYw80JSkEUkBwE8fT2lAZqLWemoy2Gphq5ZMCHVMzr0sG5koh6J1+xgvL3UNTaSKEoBd4rJc1o1rRCWxq1SgMSU6egXSAXr1kW9ItLWPmFlDDAXo0wlQV9foqZnPDikhJgghCmk51yOb4N4q+lomNSfrULNXKGLue3ie2mM+al1Ek7Fhku0P87R8SfeMFijd+T/G7FzGTKqTbgkH316F/G09xFUSnh7e0jLd0FO/YCfz7HkK2Dof4eT2E7yM6Xeh0bYrReIROR1AWkKXoLAXhIdodRKttRaMsRWUpiIu2Ki1pI1ot2wrO4XA4HA6Hw3EoEELQTkJGaUESBZSlIisqkjhgOCrIcmsqSscFSwuHp7W24+DiCUHie4xrReJLqkqR1QfbVCSE4KQa86F/ZcHZZ2m+Zy3Q9gudXo+BF6EGA8ZVSjtJGCYnidN1SIeY0QCVjZGLR5BxMr2dkJLg3gcJmiRrU9eo1UuolQuoS+dRF8+j1i7Zefj1CEJk0kJMljixxUCtlk0dbtai1bZFXJ5tr4WUU13KaI3JxujRADMaoIdD9GCT+tMPUSsX9vZBq0qK3/49xW9/gX//Q0RPP49//yN3dUHe7UZ4ni32ugpGX2Y4a56326nPTg1K+NYUtNUAtTXtWtWQDtHpEH150vXGKnpj1SZd9+aQnS7Cc1YDh+Mw4N7pDofD4QCg047Y7I+Jo5BxViGF/RE7y0uUVpQllFVNGLj/OhwHg7bv7SgKmcZcdDWbh94hVWiybZgZS2xCEQgEQoAvBJ6wZh9fimnbMo2h0lcuyphZ4hFAcxzB7PxSCiIgukqKSF7OJu+nVcpbLKDZnla0WdRcyCpOtA6PUaDlSwbV9rSii9mk9WOIWTiFGfUxwwFoD9NawMTzmCzFjFNb6TVI7SKENVkEIYQhIggJpSCWkAiIJQSXCQKVMeQaMg25gWrSMt0Y6xgyxlazbX0Jykls8SzC+Fo6gwBiaYibMcRBQPzcd4ie/iblW69SvPQL9AGL5TajAfVoQP3ZR3aHkPj3P0z4xDMEDzzi2nk1CCmnbftMVaLTEWY8sulRoz5m1Ed7gTUXtTuIILCi6WhgD+AHVtB0JiOHw+FwOByOQ0EriaypKA7pD3PyvGJpsdOYikqgTVZUKKXxvMP7A7Lj4NDxZWMq8hhUirxpgQaTIh8zLfbRxkyLgJQx08snqdWeEEhmBWVSNFN9wxUtkQRWO5Lixo0Fk3FM0qwN23Wo6x3vpMp2NBUZw4FvgXY5Qgh6UcDm3Bzjuk0nHzAuc/T8En6rjV5fA1WhVy5gWh3k/OKO6b7C9/GPn8Q/fnLbflPXNi3G6Cah2djkZq1s2k2c7Il+IaS0BULtDhzbMoYf/hS1uU71/ttUH7yNunD2ls81w1B/8gH1Jx8gOj2iJ58lePRJ5MLSoSlm3C13k+FKeLbIjLY1B5siR4+GNvF8W9J1oxPFsU26XsnQK00h2j5Idnc4HLeGU9YdDofDAUArCRtTkU9VKS6u9jl7cZMvL2zwwOkj/LM/e5ZRWrA47/7rcBxshBD4Arh6VhF7GfDqIQhuYr6lzcyAVGrdGJGsQDURjvLKp7FXcEqNeZvGuHLZRP6d/vhQmYqEEHQDj40mrajly8YA1hjCpED05jGdOcx4hBn2EXWFaHfQrQ560kKqLBFa2yqzsoAUDIJcCnIh2WwMQL4UxM3xcwOVvtw8ZGaq4g5MBEq7CPTkmp6H8AMrNvk+0vfBCwgaoTLTkAEoe+5QGHqeR+/p5wmffJbqndfJf/sL9MbqV/Co3wGMpv74PeqP30PECcHXniR84mm846ecmNUgghBvfhEzt2Ar0tKRFY1UhRluooabEES2MjKKEVGEqCvMqJqZjIS0Len8AJoWdZPXJAhbKcmkF6TkWp+ts4E119kawbWl4tLhcDgcDofD8dXRaUVcAqLIR0rbZntikKhrPS1AG2cl3Y5rSezY/7QDD5FXhFtaoJ0bl9dMGN5rJmYg2ay3GpGmZqQtRqJrNdmaGJx8IfCkPYBqzE/aQDUumTflVVugvdc/XKYigLYvGZRQ+z7F4lHibMT6aJNWEFMvn0INN1GjITrNMdkF2w4q6WyT2wQQiNniC/u8bjUM7WaGa4yhNKCYPG9WJ5r8fbXXpQBb2CjsD8Fbx+DNL+J96/vE3/o+eti3xWe//7XVuvYIMxqQ//pn5L/+GSJO8I6fwjt+D/6JU3jHT7lk6bsYEcV4UbxD0vUInY1A+rYQrdVBROGsPRrCmuK2JGw5k5HDcXBwvww7HA6HA4AkDvng00v8+pUP+eizFbQ2CGl/7/7IrGCMYZTmLM67L/wOx92AFILIE0QeXK1v2qCY7Y/QHI89LpQG7zLV4p3NjD86Pof3Ff9gb5pKvzthFGj7kmGTVnR+mlK0xVjUbBsZwdxRVJ7bRJe6AD+Bjo24NnVlq8yqyhqPtCJUhhBNiCHEUAsYbTm3MVAiKBEUCEq8K0WgaSIRTCUic9m1FDaSuKi33A7wfIIoJopjYk8QNsJRYWClBomh40mb4PPYU1QfvEP+m5+jVy/e4qN692LyjPK1lyhfewm5uEz0ze8SPv6MEzcahLDCD3EyjUg349QKilWBqQr7KhTSJnJFycxkhLaVa1UJ2dUFzT1BetaY5Hn2ufN8G7MdhjbyPQxd7LbD4XA4HA7HHhMEHmHoUZaKJA5JxyV5URFFAVlekRUVYeAzGhfOVOQ4ENgWaJJx0/arqtRlQcJNOjWz9KFJupAtoxCAQTFLEJqYf7ZyeTK1MmbaHl01Kdo3MsMSTMxI9qhbU4sm2zu5j0xz20kLtMnfEz5Pi0PXAk0KQTvwGFaKUa1p9ebIWy3G66uYOof2PDpsY/obVpfpD2A0RrS71khxFZnLE8aajJgYfGamH7+5TWnsUmir41TXMA7dDAJjzymsmugnPbxv/hD51LcQf/gN+pVf765F2w1g8oz60w+pP/2Q6ZF9HxE2ukIYWY0htGWcRtU2TVkrUHaZ7lPKPuZb9iE9a9byPJuu7NltEUbIuQW8xSPIxSN2vbDodINdsi3puizR6bBJcK8xoz5q1IcgnBmMfB+TjzH5GDZgajKKY/u8+P7s+fH9XevBRutpuhdKY0yzrc10bcyWDzchms9hZgVu/pYiOIfDcVO4d4/D4XA4APA9iUHz2dn1LRMVOwnuDzPW+ylCCOpa4R/gPuIOx0HmvsTnfFldsb8XeHw+Kri/e3UBeL2o+O2KtcbIprJpsp7EcwPUxlauVltaw022J4lKWxeAbuDxT88sshQH1xx/pTXrRT0VyuS2yj0xbdtYN2KZatrSKQORJ1gIfeSWHuVLsc9GUVPq2aeeYWZ22kYUIaMIU1UwHqOrAlTdTEgDiBuTkdIURpE34p/UhsBowsYjVDaGIt0ktthOZgKBnBqJrjantqKlHaRAWzOTqq2IUimMrsEYVF1T1SOq8YhRFCOiGN8P6HrQkbCpoNN8jAspCb/2BMEjj1N//B75b/5+j6Ov7z70+grZX/6PFC//iuQHP8V/4BGXgLOFSUQ67Q5GKWswyjNMkdvWf0VuU43stcHzwfemBp9JSpEQHtMIMGSzbtR2uHY5JVcxG2pll7q6olXAFM9vBMkIEbdc/LbD4XA4HA7HHtBuRZTlmDgKGI5yPv1yjfXNER99tkIQevwX//x7pOO9/RHa4biTdHyPca3pBh6J7zUtzJhqCrcTpU2TQmN1Dd0ETs9MSLP50kSX8a4xNquN2OPV2kzvx2Rd6ZKUWQu0yzWRw9gCDaDre4wqRaE141ojhYdePIZKR6j+BloGqPmjds6cpVAr6PdhlCLbHbwonhqHAuxzpZqUqPwGx6IN1Eza7M3Sigy23gy2G8Em25It5iWYpl5VjVlpG14Mz/8I+eS3WXq9MRdVJbeNusbUTTv2W2WijTVsvWvq3OdsU0KFQM4v4R05in/fgwSPPIFsND3H1RFhiBcuYeYXrUY0SbquSky/tO3RvADiGBnFthjN92Ymo52Q3vY0/a3bTVtAzLVy2G7mjjRp20FolzBCtDvObORw7AL3LnE4HA7HlMcfPoloJpTTGhzrK+K9jy/w3WcfYjQumO+17ug4HQ7HzXFfK+R3wxptZr/rR57kf/fA8ra0np0Y15p3Nq8yCbxF+mVN7F3/R//1ouZff7Ry0+eZD33+k9MLHE1sq7dQSo4126ap3pt0ItMYjJkkF9lKQwGIVoiYs0Ka1gqdF1DmqCKHskR7ksJIW1XWCD1Vs2zFByJhl1A2ZqEdEEIgjJk+XzNsYgxsb1tnVI3KC4rxmEIpijyjynOqIKAfxbTaMQWQa0O85aBCCIIHH8V/4GvUn31M8epL1B+/v/eT97sIvbZC+u/+O7x77iX54Z/in7jnTg/prkN4HqLThU4XAFOV1lCU55gybyoGK7uwPYp/j0dihR9PwsSsJJu0It9DBBEiaCrOVI3Jaivqsg5CIlptzK6C5R0Oh8PhcDgcO9FOIl5/50v+7tfv8vEXq6haEwQeVaXwfGlboOGT5SVJfHhaazsOLt3AY1ApcmWLhC5n0lLM22rQQeBJm1akacxBTaHT1hSiye0nKckTJpd7UjR51OJqwdRXRQBBM9ev9A7H2wHdXH/SAm0ndegwtkDzpKDte4xqxVqxRdXxI8z8Ucxw0xbfxC1M1MLkKWSZdf0MhuBlEMc2UVdaY9okkcibLMwMP1uL9Sqg0tb4M2l9thcIZuecrKXYvs+PEvrf+jGnnv8Oxe9epHjlN1BfWaC4bzEGvbGK3lil+uBtsr/9XwgeepTwiWfw733QFSVdByEEImlB0mrao6VNe7TcakNphU6H9sqNyWiaELQlUUoIYYvGbgCjdJPi3qQTaQ2YWWKRHeBkpDM3JrbtoPB9e70mbXuqX60AUYxsdTDGaUcOx9VwpiKHw+FwTDmy0OXE8hxfXtwEZX9kl0KgjeHDTy/x3WcfYpjmzlTkcOxT2oHgdDvk81HBREzqBB6eFNzTju7YuHwpaO0iRvtWp3WbZc2//3KDf/nQ0StavU3FvOnu65/N83y8tg/tNgFgtMHUJbFuInjRVLWmrG1lG0AoBZHvE3hiZo4QAiFks701sWg2BmNM43gy02odUxVQVZiyxNSVTU7yfPy2j99uk5QFZjxG5zmXKoOqKkbZkM5cj76MiHd4yIUQBPc9SHDfg+hxSvXuG5RvvYq6dP5mHvJ9gfryM0b/3X9F8PDjxD/4E7yFpTs9pLuWSSUXnR7ALC1rmpqlbLWhqq04NHXq3ardyIBRtvJzi0XvirQiYaPOCQMbcx4nNn47HaLy7ZXz5gbFK4fD4XA4HI7DTCsJ8X2Pz86uo5Wd2xhjbMumWvPZ2TUevu8Y6bhwpiLHgUAIq5MUSqOMmaYjT1qe3WzarX3fXP22k8Tlekv6sjaz4FebVjRJYJ6lR/tyluh8+fEm6dFVY3Ka3E5ijTMrhRUHJi3QPmHuinEdxhZoAHOhh8FQKDMtOpNCIH2JjJahzDGDgdVn2m1IWuhxatNZVEmdltSpQHkBOgiowxDhhzsmVE+SqK5W2uUxSbA2SK2QRiO0QmxtkzfZNrZQTkmJ8nyU9NDSw2ATj+qrRP9K4LgPJVCFMckP/oTo2RfIX/oF5au/u2ETyL5A1VTvvUn13puIdpfwsacIn3gG78jROz2yux7bHq2L7HStwWiSbF3ktoVeYzLaURESnk29tn9cfiET4xAT49CeFD1O0rabhG3fR0QJMoqgyNFFfoV25HA4ZjhTkcPhcDimJHHAA2eWOXepP00oEtJufHZ2Ha0146xEKY23i1QRh8NxdxFKyX2dmE+HBV4zX+vcBYLQXODtUpC79WqRjaLms1HBA9do9XazCCmm/d8nRM3SvdVjN0ajbfvCy1KKtMIUJWY0wORjZBhBGCGVojPK6Oc5I6Vpb6wxiBOWFueu2c5SttpEz75A9OwLqNWLlG+9Rvn2a3sTTX0XUn3wNtWH7xI++Szxd36E7Nzqs3bwmbY626Un0egbEIHMFpXTMI29Nko1xjplj1fXmKqEqrLGo0pBlVsjEUAQIZIWRm6f+qqzX1AXGXJuwUWdOxwOh8PhcFwHIQQPnD5CGHgopVFKo7VBSIlRmo+/WLWmoqzkyJ0erMOxh0R7rH9eT/uwqUKCvSr7mhzvWolH6RZd4KTK+HjLr/9SCO7rRDwyl7CTfLRR1LzXz65oqba1Xdwk1WnSHXtrSvQ0NRqb1KRNs8a2oHtk7vpztcntJtuT40m2j+latzdmZhjaihSCxSi4+smjDnQ76CxDDzaskWKug+m2MOMUUxRQl2hVo1RGnQtqIVF+gEaipEAJiZoUmiEQRuMbQ2A0odEERuFrjTR6ZrC4CQwC5floz0f5PjqM0X6AbpKQyiZtOzfQEjDUEEmQ7Q6tH/858XPfIf/Nzynf/MOW+frBwqRDipdfpHj5RfwzDxD/4Kf4x0/e6WHtC4SU0wQjYJvJCKVsMZq2hWjTwrHqZkxqYvJmvawwc/YBZbYWthmsIWly3knadjFJ295ACw+SBJm0MJdlydfnvkDpE8juHMK7weg4h+OA4UxFDofD4ZgSBj4P3X+UX778IUKIWQ9tAWVZ8+nZNR44vcxoXDDXdT/AORz7jUAKTiYBR2KfzVIhgPY1TCVfFb3wq/1K+ukovy2mojuNkB4iSSBJMHWNGQ3R6RABdHodhnGbOksp8iFRnrF5Ycziwjyy3bnusb0jx0j+6E+Jf/AnqJWLqItnURfPoy6eQ61egi296/c1RlO+/jLl268SPfsdom9+zxlO9pC9iBG/mhRsjIG6wlSVNRkVBabIoCowVYFWl4meRmOGfdSwjwojay7q9JxI5HA4HA6Hw3EVup2EU8cX+Ogz25LaGIPnSbSCz8+tAZDnFXWtrlm84HA47i7CLa3R501J1xcc60Q8Pt/igW58zXb1G2XNry4Nbsu47mlHuzIVnU1L/vtPV695HbElZQomJiYwmKk/RgrB8STgJyfnWY6vYSTaAZkkyCRBj1N0fwNRV9NW4kZrRFEgy5ygLBtjxXYzhWmMPQbr/5LXq6kTk/RraZetZiix5TqqMVNohcDgTwwVJTAe2sK4pIWIEgbaGolSDS0JIw1LW5K1ZG+e1p/+E6Jvfp/8V39H9d6bB9ZcBFB//jGjf/1/J3jsKZLv/QQ5N3+nh7SvuNxktBWjlE27vrzwbOvraWIUEmL2GpfyllLimJy3rm3ie1Vh8swanMYj9Hh0pXZUV+jVi+i1FZvK5ArTHIcYZypyOBwOxzYeue8Ygd9UnmG2tUB7/+MLPHB6meEod6Yih2MfIoQgkh4/Oj7HO/2ML9OCTnDnxd65cHdjuMl54xV8Njr4UbbC9xHzC4jePCYbwWhISxWM2x2yKCIabzKoS+bWVjDpCLm4hPCvL5oJKfGPncA/dmK6zyiFXl+hvngOvb5mK5GqElOWUBZ2e5Ik43m2h3qzxvOs0WTrfs8KY9bcIWw1U13ZCX9dW9NIkaE312+fgFXXFC/9gvL1l4m++X2ib3zLtv1y3LUIIWDSno02YF+bJs8w4xTSdNv11eoKen4O0eogygK9cgG9esmKRN05ZKt9B+6Fw+FwOBwOx91LEgecObnIx19MTEWzlkwXLg3IiookCkiz0mlGDsc+Yms6jwD+/GiLI/NzuyoA07fRVLLbY+9GK7KmHcPlfoHLz3duXPJvPlnlXz1y7JpmqqshW22blJuN7ZJntq1ZUwAGWE1D1dPW9miDwCBUYzSSW8wUjS4yMQ8JKa12coP6mDFsaVlui3EockxZWN3GG5IkbQZBiwIxfZzGGtqXSXbewhLtf/Qv0N//E4o3fk/55h8w6fCGH6v9QvXO61Tvv030jW8TffsHzlCyB0y0vz2SeXd3TiFmbc+2RMEZY6yOmWeYbAyq3HY73d/EyAVEGG4vTOvNI7s9hOdsFo7Dg3u1OxwOh2Mb3U7MmVOLfPDJJWBL5Zk2fPS5FY7SrHAt0ByOfUrsCQot+c5yl6XTi7sWaVq+5IkFW12iDShj0MYKMsoYVBMX7UtBIAS+bJZmO5STtSSQYroobb5yY9NGUbNZ1sx/xQlJdwIhBaLdRba7dAcDxmsb5H4Ai8uo8ZBxukm7yFDnzyJ784je3A1X/QjPw1s+jrd8/Dbdi50xdY3eXEOtXkKtraLXLlF/8SkmH+/dOfKM/Bd/RfHKb4iefYHwiWd2lezkuDsQnodod6DdQXYy+PiL2YV1id5Yhc0NRLuD7HQRQTATifwA2ekhe3NXtDV0OBwOh8PhOIxEoc+ZU4sIZj2MjLGGhLKsOXthg4fuPUrq0q0djn2NFIJC704r2uXVbordHnuvjQmF0ryxnvLN5ZtriS6EQLTa0Gpbw0JZQJ6h8wzKAhEEiODGkpB2PpFsirXk7HPZXjB1WhmtoKoQaGum8H2I7PzWKIUZjzFZCqrGG/UJSCmjhLTToecLhjuYiibIuXmS7/+E+Ds/ovr4fcrXX6b+9CNutj3bXY2qKV5+kfLNPxC/8EPCZ77pzCQHBCEEIk4gTmB+ETkaQXpuernJUlQ5hiBGdjqzwrTVi7Ywrd2xemqrfdMpSg7HfsF96jkcDodjG6045MEzy3z42cRU1Di5BVxcGTAYZfQ6iWuB5nDsUyLPg0qRNxGz1+orv5XFKODPTi3czqFdl6XI57987EQTU20NTboxNE22pbBGJk9AZQz/9QeXdjzWZ6OC+cXD9VU46fWIZUgx2CQtM3rtLmmS0B6uQZGhBxuQpXgLRxDR3W+iEL6Pd+QY3pFj031G1dSffEj51qtUH79vY8X3AJMOyX/xV+Qv/i3Bw48RPf083j33OcFgHyHEdiO06PagzG2V5qiPGvUhjJHtDiJpI6jQm2vozTWIYmswarUhjNzz7nA4HA6H41AihOCBM8t4UkwTrY0xCClAGz75YnVqKjJbWuY4HI79R6n09a+ELTK7XehdmlNux2fNZ2lx06airQghEFFs55RzCxitMHneJBQp0Nq2gJq0ZsI0ZiGJkJ7tgyYkeBKkbw1Eskl6vm6PtBnTdk91DVWFHo9sglK3i+l0MfkYk45o1ZoyzxmVOZ35HuMwojYG/xqPsfA8wocfI3z4MVR/g/KNV6jefwu9sXbLj9/dhsnHZD/7C4pXXyL+4Z8SPPSo+7/ugHF5iruIE6gKqHL0Rm4L01ptZLuLiEJMOkSlQ/B8m1zU7iLixL0uHAeSw/VLisPhcDiuSxj6PHjvMuIXAiHBaGYt0LThg08u8tyT97kWaA7HPiWUtnhJGyi1JpT7J3FMCkHk3dik7GgScCmrrtj/6TDn6cXD196oGwVUcwtkRYte3icD9JFj+FmK3liHqkRdOofoztlKm330+gAQnk/w0KMEDz2KHqdU771J+eYfUJfO780JtKJ6702q995ELhwhfOo5gvsfRi4ecYLBPkO2u/hLR9B5hhkNMFkGZY4uc9hch7hlDUZxjChydJGj17DibZwgkgQRt5xY5HA4HA6H41DRayecODrPZ+fWQFtTkSclCvji/Aa11vhIsrykldz9hQoOh+NKBKDZnWZ0EJOKAM6NS2pt8G/AuLMbhPRsitFXjJikFE3+nlvAZCP0aIgoC0TSgqRFqyjoD1JqpcjX14lbCcP5ORaC3T0O3twCyfd/QvL9n6DHKerCWerzX6LOf4m6cBZT5LfrLn6l6M11xv+//zf+PfcR/+jP8I+dvNNDctwm5PwiXuBj0iE6HdnCtHSASgcQhIhWx2pH2NcFm+sgpNWKWi1btBbFTjdyHAicqcjhcDgc2xBCcObkEp12xOZAY9BoA1JaU9GHn13iuSfvcy3QHI59iC8EQggiT5IrTaEM4QF/C9/fiXc0FX0xLlDa4O2xQHS3k/gSWYGOIsruCeJ0wGDc50i7g4gT1MY6ZCPbAiobIxeW9m2/eNlqE33j24TPfIv64/fI/v6v0esre3Z8vbFK/vO/JP/5XyLiBO/EafyTp/FPncY7dgoRhnt2LsftQzYx10bVmHFqRaKqhGyEzkYgbAs1kSSIILKdPsYjzHjUHEGAH1iB1vdt1ajv2yj0a32+NJX9aA0Yq5gbY7cNzTaYLdW5oqlYnVates22H1gxy4lUDofD4XA4bjNxHHD65CKfn18HbFqsL2xR2sWVPllW0m3HjMaFMxU5HPuUoDEKlrvQjBJfcrIVoo1NFtJNirTGTmlm281lWBOQYDZdmiRoS2G3Paw+vbDLlvW3YxpUa8PZccm9nYP5OSakQLS7yHYXXeSY0RAzHuFHEa35iDQdkeUjomxMv8yZW5q/YW1IttrIBx4heOARAIzW6M111MpFO6cuC0xRNOvcrsvCzns9HzwP4Xl2Le3a7tt6md1GKVA1Rimoa4yqbdFcfwO9vooZp7fjYaT+8lNG/+3/g/CJp4m//xNkp3dbzuO4swjPQ/Tmkb15W5iWjjDZGKoS019H9TcgaSGTpvDMs23TTJYCK7Y4LYyaFoQBBAHCa7Z9f3vrQph9qBmzZdGNmVtv30+jKxmrHwlEk3DWpJwJuy08z+lGjlvGmYocDofDcQWtJOSB08u8+vbnzcRPIz0PBHx+boOyqgkD37VAczj2GbEnqIFIWlNRrjTd4CrN0Q8I93YifrsyvGJ/qQznspLT7YMpEF0NKQRt32NYKVJlaM0vMm510OkqkhL/yDI676DXV6Gu0CsXMK0Ocm7+igjg/YIQguDBR/Hvf4TyzT+Q/+rvMOmVr4lbweQZ9SfvU3/y/mxnGCMnVUmttt2OW+D5CE9OI8wn0eVWQfVsOpSUdruJOBdxbIWJOLEVTvssQWo/IDzfJnR15zBlgR6n1jikFGbUx4z6gLAiTBQjoggRxgjfg7rE1CXALgP6b45rH1tAFCHCZpmOz035HQ6Hw+Fw7B1x6HPm5CK/eqXZYSbp1pJxVnJhpW9NRWnB0aU7OlSHw3GThB5kQKE1Ha6tGT3QjXmgG381A7sKi6HPf/bAsjUriZlpScPU5KQmBqemeEMK0VxX8O+/WCffod3bF2lxYE1FW5FNezYzN49eX6OtM7J2hyyM6KUblKomu3SRuNtFzi3ctB4hpMRbPIK3eGSP78H10XmGXl9Bra+izn1J+d5bth36nmAo33qV8r23iJ59gegb30Z2br11nuPuZFqYpjVmPLKFaWUBWYrOGvNaECKiRr+LYluclo+B26sZ7Qo/RISTJYKJhuR0RscucAqjw+FwOK4giUPuP32E1975ArBGaLATrsEw48JKnzMnlxiMMmcqcjj2EZHnUQOxL+lXUCqNMeZAVymcaIWEnqBUW9M+4FgcTsWkw0anMRXlSts47zAk756hM+6j11aRcYI4fg+6v2HNFOMRapwiOl1kd45SepRNmEogwBd2UnG7X0fKGAoDmYbcQG3Aw55/6zgmf8vLxiOkJHrqOcLHnqT4/W/IX/qFTaS5XWxtpbXXhDEyjhHtLt7R43hHT+AdO4F35Kit2nPcEiKM8MIIM7dgKyZTW0VJXUFVYKoCMwkq8vxZFaX0tldMXustYZj0mLWpRTBLLZpeftln1MR4JuwaIazwMzGqFbkd79bbhBGy20N2eojApWc5HA6Hw+G4NcLQ58ypxekP8pNCeSkFSsGnZ9d46L6jVJWiKGuiXSaNOByOu4dQSmsqUvtDMwk9m5Z0s5xuR3wwyC47pjh0mpHwA7yjx2mNhngra6ggoFo8Spz2GWYDotEAlWd4i8uIaH+ZrWScIE+ewT95Br7+LMkf/znVh+9SvvUq9WcfXTn3vhnqiuKlX1C8/CuCr32d6NkX8I+7tmgHFSElomO1FlOW6PEIk+dQFTbBqCqb4jQgiJqEa6/Rb/wtqVvyunqq0WamH02Trm0QwFRD0s1lk6QimoQiMUkuagoXm6I4M952ZxDtjtWNWm1nMHJcFfet3uFwOBxXEEcBD5w5Yr90CKYpipMWaO9/cpEzJ5cYZyW10viuBZrDsS+IPUEKhFIghRVISm2IvINrKvKE4Ew75ty44L5OzP3diDPtiMQ/2AlN18KXgrhpgTeqFfOhz6DWzC0cQba7qEsXIB/jLSyikg7j/gZ5UVAMx+TDHNNq23ZQcvYYCsAXBl9AKKAjIb7F1nKVMRSNgSjXUOyg8dTsvF8C855h3rvS7CSCkPiFHxI+9RzFb39B8epLoNUtjfUrZ2JYGmyizn8x2y89vCNHrcHo5GmCBx9FJq07N859jhAC0VShAbZFWtFEsxe5NaWp2i7MKs5ut/S84/E931aY+YGtOAtCRBBAWaDXVtBrKxDFyE7XGYwcDofD4XDcNEIIFnotji51OXthE9O0dJVSgoALl/pkeUUrDhmlOVHYudNDdjgcN0ggBQJb2HMY2saf7kR8PMw50Qo5044404k4lgR4B7gA71rITpeuDOmvrZLWJa3eHOM4xgxWEHWFunQe2ZtH9Ob2bZGi8APCR58kfPRJ9HBA+fZrFK/8em/apGlF9c5rVO+8hnfqXqJnXyB46FFn1DjAiDDECxcBMEpNNSOTZ9PiNKpiF5qR2NLPUTSV/nuoMEkfAh/hh7YFWxAg/BDhgxkNUKOBLWRrtZHdOWsw2qfvccftwZmKHA6Hw3EFUgqWF7scXepy7tImWlmRyPMkCM2nX65RVDVR4DNKc+Z77gdLh2M/4AlBKIU1EklBpgyF0kQH3Bj4p6fmp0Yqh6Xje+RKk9aaXmBfB+tFBUiqpRMUgz7Vxjq1CDDzR9FliUkHUJboNKUaZ9Bq4ccJgR+AgMrYJQP6CnqeYcm7MjFoJ0ptU4hKY01ChbZR5ZdTN9fJdZNUJLYszNKKELCuINWw7NvX++XIVpvkx/+A6LkXyH/1M8q3X92b6rQ7iVaoS+dRl87DG6+Qef+e4JHHiZ56Hu/UGScG3CLC8xEtH1ptAIzWtgJNKdDKrpvFKMU1xR9hK8fElgqyqXi0dXsrpnF5G23PPV2UNTZlNZPatGbA0GohW20buV3k6CJHr60gosS2NezuXyHY4XA4HA7HnSGKAk6fXOLcpT5oqxeB1ZIurAxmpqJxwdKCMxU5HPsNIQSBlJRaU2hNSx7soqzH5xKemE8InOljSicOGS4eoUpTVDHACyOKo6eI+2uQjdCDDVuMtrSM8IM7PdxbQnZ7xN/+AdE3vkXx8q/If/eiNYLsAersZ4zPfoaIE7xT9+Kfvo/g9P3II0edyeiAIjxrypnqRk1xGqq2OlFdYybFaeryAkdzHV1yi3a0TUOSTVK2mB3jikWDrqGobZEcW7SjIEZMtKPLDEayN2fbHrrCNAfOVORwOByOq5DEIQ+eWeb8Sh+wPac97I/DZy9uMhhmLC92GTpTkcOxr0h8j7KsiTxJpjSF3ucmil0QH3DT1M2Q+BK/FNTGkNWaduCxXtRbrtDBRAli0KcaDij9kLJ3hLwsKUepnfymhV2kxA9D/CjCD0JiCS0JA2VblS37hmQHU48yhpGGTWUNQpdjDFQ0RqMmqegKo9E2B8WMloB5DwrgbAVznmHhKgYn2Zun9Q/+KdHz3yV/8W+pPnznBh7JuxxVU73zOtU7ryMXl4meeo7giWdsD3jHLSOkhCi+ZpezrwKjFKYqoapsxHazjVGQDtHp8AqDEUWGupTB6iXk3IIViXwnDzgcDofD4bg+cRRw5sQCL71mvwVNWgRJIRkMM1Y3hizNt8nzirpW+Ic4Jdbh2K+EnqDUtgVa64BPE0KnGV2BLwWRFBTtNkW3RXu4wajK6RxZRqcJemMdygJ14RxyfhHZ6d7R8RpjbrlYRoQR8Xd/TPjUc+S/+jvKN/+wZ4VnJs+oP3qX+qN3ycGajE6ewVs8YttONYngst1BtDrN3Hx7G6vJ4oqC9hfT4rQdMJPCMfvXltXkdbd3z70tiqswtdWNqCtMVTVJSjmmn6P669Zg1G4jE2sw0pvr6M11RLtr3+suDf1Qc8C/DjgcDofjZknikIfuO8avXvkIBdsqz+pK8dFnKywvdl0LNIdjn5F4kj4zo02h9J5Mvh37j07gsVnWjGpN6MlptHlt7KI01GGHei6B0RCdp+CHyPkQWZbWlFCVGK2p85w6z0HAKAiJfZ95X6B9j7OVZD7wWQxsWlRtDH1lTUeTqbNuDESVgVLbdc3OOS+hgEhAIOztFKCa66vm77GBvLbGopa0xqX0GgYnAO/IUdr/5D+jPv8l+Yt/S/3ZR7fjYb9j6PUVsp/9Bdkv/prga18nfuGHeAtLd3pYjj1AeB7Cm7VpA/u9zRQ5JhvbCHe93WAkOl1kp2tFoo1V9Maa3Te34EQih8PhcDgc1ySOAs6cXJoZ9g1NCzQBAs5d6HPfPUdsuvW4cIVoDsc+JJKSEYpS75Qh7DgMtHyPoqwZG0l3+TjZsI/KNvHaHXQYM1pfZVxU5Ot9ZFoQzs0T+B5BkyAdCJsmLbmyLf2tYoxhbCBVVv9RBjxhpgnWXjOGSaq13LKWXHtMstOj9af/hOjZF8j+/q+oP/lgT8cOjcno4/eoP37vFo4iZuk0Qtg26GFkU4qj2BZARREyaSEXj+AdPYl35KgrJrqLEEKA99UYr21RXISIom37jVKYLMWMx5giswajzRy1uQZRYpOK4gSTDlHpEBXFePOLiE7P/ZZwCHGfHg6Hw+HYkSQOOH1igTgKqGoFxlafSTlpgbbKN75+xrVAczj2GUljJgqkwBMCZQyFNsSemwgcNlq+ZFBCqTUXsvKq15OeJFiYJzA9gnRIkI3wgxDaIcZAXRSoIqcqCiqtScuCvCy5CMxR0xaGdWAkIJSCEc2EWQgqAyM8xtv6hsNEHJEIQmEIpSTyIJAeUgqE54GUCO/K6UypYUNZk9K6grG25iKAcxXMe4Yl/+qvd//EPXT+xb9Era1QvP4y1VuvTqOBDwSqpnr7Vap3Xif6xreJvvNHLrnoACKEQMTWaGTmF63BaJxisrFt1TbcRA37kLSRnY4ViZqIaxVGyN48stvb8T3mcDgcDofjcBMGHr1ewkKvxaW1wbSzhhA2GfTCSp8sK4gCn9SZihyOfcmkhXipbXq9ayd/+Gj5ks0SKmOoDIS9edY7bepLl8i0wcwfQ6cjzGgIRUG5csmm7LQ7V5gNJGZq6PGadSSg40Gwy9fWViNRqq9Msp4UmZW7DBfyhKEtoSPZsfjMO3KMzv/q/0D16UdkP/9L9OrF3R34K8PMUm2MNSqZPLv2TYRELi3jHzuBd/QE3vFTeMdPuVZshxjheYhODzo926ptPEaPUyhzKDL0Sob2A0R3DtnqIIocdfGcTb3uzSN784hgf7dAdOwepxA6HA6HY0c8T5IkIQ/eu8zr73yJNgajDXg2aeLzc+ukaU4033Et0ByOfYQnBZEnKZQm8gTj2lAo7VqEHUI8IViKAzbLGqUNXmM084Ww1VzSbgdSzATEVoyp5zHDITobI1RFEEcEcUQMmKqiU+Ss14ZSGTa1T65r5k1FCZTKADWFEYyQ5Eis7GNFpgBD2KwDDDt5f7Z1PBMCEUQQBBCGiCAklIKjAoYaBhpyA5dq6HlWLNpUEEtD+yqJRdPHZ2mZ1o//HPODP6F67y2K115Gnf9iTx77uwKjKV75NeXbrxF/78eETz3vhKQDyjaDkTE2vWg0tFVo2QidjdBBiOz0rAhcFujVi+jVSza9qDePbLXv9N1wOBwOh8NxlyCEIA59Tp9cZGVjiFGmSbcWSCm4sNonzSrme5BmJVo3KUYOh2NfIGCqDyhjKF0h2qFECkHiS8a1Jq0VYegzlgHm6EnYXKMeDRnHHXIvQqcj/KrAH6V4WUbQ7uAnrammo7FJ08BU0EmxhWBtaeh50Nrh/wllDGPNdNHbLoPM2P212Z5I5Ivt6USCWULR5DTK2ATtgYJAGDoSujuYnIL7HsQ/83+ifPMP5C/+jU0C3q8YjV69SLl6Ed56FQDR7hI99Rzhk88hu707Oz7HHUV4PqLbQ3Z7mLpCDweYdGRbpW2sojY3mtTrHoJJ6vWqbY02N49sde70XXDcZpypyOFwOBxXpRWHPHzfMd547yw0lSketgXacJRz7lKfxfmOa4HmcOwzksZUFEvJGE2hXJz1YSX2JMeT8IZuI/wAsbCIXFjElCUmH2OyDFPmiCAgDAKOGRhp6GsogVUD86LGaMNIQzHpS28gwdCRmkhwWe/wBq1AG0yzRmu7GAXGYMrcVtCkYBAQ+IgwpttqkfgeG8pWqm3a0D26EoYK2rv8L0v4AeETzxA+8Qxq5QLFm3+g/uQD9MbaDT1udysmH5P9zb+nePUlkh/9A4L7HrrTQ3LcRoQQiFYbWm1MWaLToRWJqhK9sQqbG4hWG9nuIqJwll7khza5qNVGxImLuXY4HA6H45ATxwH3nlrkD29/Dsz0IiEE65spaVpQ1wrf9xhnJZ12dL1DOhyOu4SwcV1MCtFKV4h2aGn7HuNaM641c4GhNpDViizuUZoAPdgEL4DeAmVRwDgFpWCQQlog2m28INzScmxm7GlJm1aUarsEwtCVEEvIm33FZfLQViPR5YlEGmZy0jXSikSzBMLqQrGwSdcbyi6RMMx70NlipBNSWuPNo18nf+mXFC//ClR9S4/t3YJJh+S//hn5b35O8NBjhM98E//0/W7Of8gRfoC3sISZW8CkI/RoYM1FO6VeT1qj+aE1F3V6Lr3ogOJMRQ6Hw+G4Kkkc8OCZZaQQKGgqz5i2QPvki1Uevv+Ya4HmcOwzJhHGUSMKldqwklc2gnhLUs10+xarSpUxKG2mCTPGGDQ2It9go4+DJhXHTVr3F7Znewi9eYxWtrVSUWJUTVcr4qpivdRUAtbxp6VhEmhPo65v7tzGYCe0VYkpS6hKa0CqKkxVYdIRXqvFcqvDQHgMNYyVNRWNtX1dejf4evOWj9P68Z/Dj/8cPU5R57+gPmcXdeEs1PtXVNJrK6T/w3+Df//DJD/8Kd6RY3d6SI7bjAhDvHAiEg3Ro6F9T6UDVDqAILJVaEkbQWM62lgFIa2xqNVCJG1EFLvPbofD4XA4DhlxGHD65OL0O8BELxLNnO7i2oDjR3v0Ogmjce5MRQ7HPiLyJCUQTQrR9DUcGo4DTbQlsep8VqHN7LUg45g4OUacDjHjESqJqKKIOh9Tj8coVWMGfWrPt8nSYYjwG6OBsaYhH6sLtcSshX0TZj2lNNZklJsrjUShgERAJG0NmsKua7b/rZmlHE20ycJAoazBKBHQ8iACCuBiDQZD97KELhFGJN//CdGTz5G9+DdU777RiFMHAGOoPnib6oO3kYtHiJ7+FuHXn0GE7v/vw4yQEtHtITpdTDZGDwe2sHOSeu0HiE7PtkajRK9dQq9dgjCy+9odV5h2gHCmIofD4XBclSQOabci7jmxwCdfrE5d/veeWuTYkTlOHJ1jPC6I5nzXAs3h2EfEnkQA/hZxIL9GWpHAtsryhMCX1mzkCWGjhLeugdoYKm3jsatmUbucYAusucgukkA2bbjElvZbN4kxxk1gbjNCeoh2F7Z0SfKBE0ozKEpGRYnARlp3fPvcTp1lTLYvKytr9hmlQCmM0bbqTWsEGoLAVr80rZmMUjY9KUuhKjHjFDMe005aDMMOledRGis8pdq2RLtZZKuNfPBRggcfnZ5brVy08b/j1LaYGqeYLEWnqU10Kq3xySgNW+7LNcvovmLqTz5g+MmHBI8/TfLdHyPn5u/0kBy3GSsSzSG7c+g8s63RsjFUBWajQG2uQ9JCJi1rIPLAZPa1DSsgPfte9O2C7yOCRjD2PFuOKgRI6T6HHQ6Hw+E4IMRxwOJcm04rYnMwBjObc0khuLgyYHx/2ZiKijs9XIfDcQOE0pqKwqYQrVDaaSqHFCEEC6HHWlGjjUFiNcXYlySetFpdK8ZU8+jNdUw+hqCFaSfo0RA1TtGqQmc5JgPl+RDG1FFM5gXU2FTpPtbY0/GsjpSbZrms5RnMjESJZNpezQ72+vdHNxKUBrKmpVoNjA2Ma1sA1/OgI6GvbDu0nZBz87T/4T9Hfe+PKf/wEsUbv4fy4Pxfp9dXyf7uP5D/9ufEP/gp4eNPI6RLKzvMTFKv5eWp13WF2VxD9Teg1bZGoihClAW6LGBzDaQ3S70OQ0QQuSSjfYozFTkcDofjqgSBh+97fP2RU4SBz4mj8zz1tVMsL3VJxwUr6yPSrGRhru1aoDkc+wgpBLEnyZTmaBxQN0lCyoDCbmtjDULa2ISh2hhqYyhuslPaxHQkhLBRw01PcyFAaSi1xmBTk0o9meJvGTPgNSaoidFosi23bBuYmpkmBqeJsSmUgqUowG+Sl4aV4tNRzqfDgsfnEx7sJTf7kDqugfQk862Y+Va8p8c1SmGKHIoCXeZQFgjPQyQJJAm6LDDpEMoSkaWE44I8Shh3WoSBx+gWTUWXIzwP//hJOH7yxu/LpKWb1k2bt8nfyqYuFZltMZfbRec5ur+BunQOvb56GyrjDNXbr1K9+wbRM98i+vYPkK329W/m2PfIOIE4se+vcYpOhzYFbDxCj0f2SkGIiBJrMIpiq90W9v24q1eikPbDf9s+cdn2lv8kmsuEkOB5CM+3ZiXfR3ieFajCCOE7ecPhcDgcjq+KMPDxPMm9p5boD8ZNIu3ERyw4v9InL2qUtr/YZnlJEt9Y22WHw3FniD3BCNsGTQqBbrSV0DtcpiJlDBezis9HBcuxf2g1o8T3OCElyhgCuXPCuAgCvOVj6DxDb64jqhKv10N2ulbPKAqbbqIryCvIh3QRjKXPWAZUnsdISkbCA297MYrAJibFUhBPjUR2zmilEHHF9PJqTILYPSDwrCZU6sZU1BiYBsomaxdAZQzBNQ7uzS2Q/OjPiL/7I8q3XqV45TfozfXdDWYfYMYp2V/+O8rXXyb543+If/zUnR6S4y5gW+r1eGTTi+oK0qHVkIQHcYJMEmskAsxogBkNthxE2uK0MGoK0iRC+uBJW5A22RayebtbHcmZW+8sTnVzOBwOxzVJ4oBvPX0/jzxwjM1+NnX9J3GIFFDXmqKqiQKf4ShjYc796Ohw7AeW4oDz4xIk+Ag7o94BYxqzkbGCSq0NCoNuTEi6aWU2SSMS2Ko2X9p16En8xvhzPerGUFRr3axNE1Vsz6G1obqFRJdSGz4YjLmU13w6KljLq+lloScOrUC0XxGerXSh1UZijTmmLGw6UDpChhGEEbosMemAVlGRFzlpmdPrdciShNoY/LtgQiqknTTDrorrtmHKErV6AXXxPPWFs1Qfvrt3FXJaUbzya4o3XyH+5vcIn/mWNZ04DjzC8xDdHrLbwxQFOksxeWYNRlVp2w6O+oCAIADPt2Yf37fmnsnfUiIub6Fp9E2Fc12WY3Ylnm8Fq7gxO0WJNR05HA6Hw+G4LURRwJmTi7zx3pcYZZoWaILF+TZHFjsYbcjyik4rYpQWzlTkcOwTpBCEUlBqW5yVK0OhDeEh+Gq9UdR8PMz5PC04Oy4olZ19PNw73IVonhR4u1ArZJwgjp3EpCP0YBNBjUhakLRsEHWRYfIcyhxpDB1d0dEVRSUYIxljdREPQ4wmwhBhtpmGdq51FNYxJL2ZviI9u89vjAtX0X5CCSEwJ+F8bY9fYluhpRrmd/G6F2FE9I1vEz79TepP3qf4w0vUn398YFqjqfNfMvrX/xXhk88Sf/8nrujMATSp150estOzqdfpyOpGWk3bo4GAMLJaTRDahCLft+nzZWF13Bs7q31/C2GNRlIgZJOQ3SRlC8/fkqbt288Apw3tCc5U5HA4HI5rksQhw1FOHNr/MoqyBmzlWRKHpFnJKC2I5n02+mPme627xjGc5SVlpei0IjyXoORwbCP2JPd2InKlUcYaeiaJRZNUIqVtqakvJl8ar/7eNsZMK1N3rFpillIkBQjEtEJokiTkS9GkCG1/v2pjqBsDk2quq2DHRCWwKUa+EASeIBSCwLMVVat5xbubOZ+M8ivG99mocHHe+xwhJWKSstKbxwz66HSIDEMIjxAXJWKYouqavL9JXOYMF+ZZCPb3cy7CEP/kGfyTZ4iwJqPy/TcpX3sZdeHs3pykLMhf/Fvy3/w9wSNPED39PN7J0+79ckgQUYQXRcAsIcwUuRWL6mpmNGquv7MgJGYJRZMEoulFO2XWT6432ba3nwrEvt9se1Yc8n2EqjHp0CaUTQhCZLuL7PYQ0d6mpTkcDofDcdhJooB7Ty3RbcccWexy3z1LPPPYaeZ7LTb6Kf1hTpaX1lQ0Llhe6t7pITscjl0SeZJSKyIpyZWmVNpGuxxw3txI+d3q6Ir9X6SFbf/l5sDXRQiB6HSRnS66yGE8RmcpQtUzzcYAqrZJzUoR14pI1ywohVaaWcmV2GLMaXqX7TjjNLa3WZOCvsOljbkotMm7YXRFOy/RtFVLm9SiyIOR2p2paHoMKQkefJTgwUfReYY6+xn1F59Sf/Ep6tKFq4x9v2Ao3/g91QdvE3/3x4RPf9O1RHNMmaZeGwNlgc7Gs8K0MseUW5OtBUyMP0GwxQzozUxCUs5MQ9s+d401LW3fc82/7QA9CAJk3EIkLUSSWPOR44Zwj5jD4XA4rkkrsZVkYRhAk0xU1YrA9+h2YmsqGufM9RKoYJQWdDt39kebLC9ZWRuSNSkkni85fWKRKHT/7TkcW5FC0PKvPjuepBTVTUJRtcV4pMxksSlFQszihj0hiDxB1CQVRVJcNSJ5gtKGokkoKpWmaM43EW3CyQ/L15jMa22nDfLyVAystgBwrBXsaCoaVYr/76drLMU+C6HPQuTT9b3p/ZukNNVb7rfesr6nHbEcX78f9GvrKW9spEiETXRFEHmSI7HP0TjgWBLS8eWuzRqmMVOp5nHynMAFgPB9xOISYm5mLvKjkLYXMkpTsmxElGf0L5XMLy8gwuhOD3nPEGFI9PVnib7+LOrSBYrXfkf5zut2In+rqJrqndeo3nkNuXSU6OnnCR57yqUXHSK2JoQBGFVjqgrqGqNqu65rUM0yxYBRt6yhmqts2+jsEBFFtvotaiK0qxK9uYbeXIMwQnbnkJ2eFa4cDofD4XDcEnEUcHSpy3/5L3/MxZUhnhTM91qA1ZImpiKDoSxryqomDJwu43DsBxJPMqyUbXlWQaH3sxli95zpRDuainKlWckrjiUuce1GkFEMUYxcWESXhTUYjVOEqpq0kiv/T9ith2fmNTJ20QpjNKhZS3mjFFQV6BrqClNXQDozGbXaiLg11TMTCamCTMO83F0LtKve9zhBNgYjYJvJSK1cRI+GmHGT7LKPMHlG9rf/gfKNV0h+/Of4p++700Ny3EUIISCK8ZqiLlPXmDyzhWl1Zd+PRoOqQFWYRqK/9v8wTaHZtOCsSSZDNgllk9ZoTRGa5yGmadpNIZpWUChrdOw3LQqDEJm0Ea0WImm7NKNd4L7FOxwOh+OaRKGP50uoNXHokxc1eV4RdDziKCAKfYqyZjTKmO+1Wdsc3TFTUVHWrK4PGaW27Ys2hlppQuDL8xucOblIcAiqahyOvUJMU4qubeYxW8xFnpikDd0YnhS0pEfrsv16i5lnZuqxyUpqS6KSYbuZKJCiab8m2CxqZJNedOwaxp8v0oIv0ptrG/XHJ+d3ZSoaVopLWXXF/g+3tJVu+ZKjcciR2H5Vz5Qmr7VdK7uut7Sfm/Cnp+b5+oKLIN6K8DzEwiKiN4fur9Majhi32uRBiEnXKZUiu3CeeGER2e3d6eHuOd7R47R++o+Jv/fH5L/6GeXrv9uz+G29donsb/8D2c//I97xkzYt6dRpvJNnkMnl72THQWUq1OyAmYi7k2VSPdpsm6u9FqfXZ/vrVWtbyaq1rWpV2gpDSllR6vLqN+FBnCBbbVuFVhbotUvotUuIpIWcX0K2O3v0SDgcDofDcfiIIh8hBHFki9CUNtS1wvc9wsDHk8IWjxQVcRSSjgvCOfdzhMOxH4ibxPdQ2sRpZQy50tvahwvsD8EGm1ytabwdzd9iyxW35JECTQsrM7m9xTSakietfuNdJQn7dnKyFeIJgdphrvJ5WjhT0S0waVEv5xes0aCummIUZQtT6gpTN3O7rXPIqzB9aUzMBp68ar66URpTFVDaVt7UlT3fYBOTjhDtLiJOiITNTtdYQ1GMTSta2IP/ui43Gc3GVmPGY3Q6xIybtuNmMl/eMoeePC4wS2yapLYrZdtIlUWTLFxAkaPHI/TG2m1pw6ZWLjD6N/9PgkefJPnhnx5ITc1x6wjfR3S60JmlVc6K06qmKE1t13uMBqXZ8r/DZa99YHtY0baLLt9GWKMRQYSIY2QY2dSyqkRXJQw2QEhbhDZ/sApP9xr3Ld7hcDgc16U1aYEWB9ZUVFRT41CvE7OyPmKYFvS6LYqiZpwVtJKv7j/fulasbaRsDsfNhNQwSovp38eW5wD48oI1FrlWaA7H3rLNfLTHSCEIPcH1ZJtJ2zawhqKtkdRZbc04oRTUnuRYEnBxB2PPraB3OUHfzfXGtebTUc6nVxbHXefYN3b9w4TwPLzFZZIoxltZRwUB5cIy0ajPqFCEm2uYPEMuHjmQlSmy1ab1J/+I6JnnyX72l9SffbR3B1c16uznqLOfU/yuOd/iEbzjp+yEvNVBdDrIdhfR7iBbnVnrKseBRkwqyK52+R6ey1QlpiynQiplaZORshE6G1mDUauFbHWQcYzJxqhsDEdPIHvzezgSh8PhcDgOD2Hg43kSpTRh4FGWiqKs8X0PIQStJGSYFqSZNRUN04KFOVcE4XDsB0JPTpOQQykptE3q+arxGnORL6zO40v7t9ckNUthtRBlzDRN2m5bfVggZh2YmZmUJqnPhsav0RihPCk40Qr4Mr0y6ffzUcE3j7g2jnuB8H2Ev/ufp23BCkxtaJMnjmafNk0rtdqaEbSyf9c1pigQnkR4tj0TgNHapqekI1uwMtjApENEp0sSJKTatkCLPUg1LOzx/d+K8HxEt3fbTDmmKlErF1GXLqAunUNdPL+nrdiqd9+g+ug94m//gOi5797Q8+o4nMyK066dem60nmxsL1ibmO70ZL9dmyahzBahqalpEaOh1tbElI2sH0l4EMeIKEbGCSII0IMN9GADkbSR84uuCG0H3Lvb4XA4HNellTSmojAAMvKi2nZZ4EuqWjNKc3qdhLXN9CszFW0OxlxaG2KaX9PTvGCzP6aq9PQ6l1YHHF/uQQlnL25yz/GFHdsjORyO/YsnBd5VfqKOPEmuNIGUoDQnWuEdNBXt6Wm3H/v2HfrA4LW7tIXPcHWdXJW05hdIxyGLo1VMPkZdPIu3dBQR3dk2nrcL78gx2v/8P6f++H2yn/+lrVi7Dej1VfT66nUG04iIftCsfduuKggQfmjbUwUBwg8QYYTodJHtjm1d1ekiO117fYcDbMuzIIRG9DHGQFmisxQzHlkhKR2i0yHa85G9eWSnix4NnanI4XA4HI5bIIp8xuOSMPQpS0VZ1bSxelASWyNRlpdAmywvqZXGd4VeDse+IPYkaa2IPEFtxLZEIbCWhIlRp2lEMy3w2lpbcDW5ZLvZx+5TZlY0Nmn1rgyUe2SAuC4KFqOdTUVnxyW1NjeVzu24NcTkxXKN0pSrphRpY1OKihyT55iyQEgQrTYmaWPGQ0yaWiNCf4PISxm2ejbhummBVmpDuE+fdxGE+CdP4588Pd2n+huUr71M+cYrmHx86yepSvJf/g3lm38g/sFPCR561BWSOW6Z2WvoytfSbt+Nxhj73q5rKAqbbl0UTRFaislSazKKYmRnDtlq2X1ZivJD5MIi3tzttBXuL5ypyOFwOBzXpd0YhKLIx/4mbyjKmii0UdcTI9FglNPtxIzHJXlR2Qjs28gwzbm4YnsG5WXFZn9MXtSANRj0ugmjcU5VaS6uDjm+3CPLSi6s9Dl5bP62js3hcNw9RM3EP/QEVHB/O+atjTHVHjp81C4PtVOE9l6xW2PTYacTRYwWl8hHQ3Q5glabKgkJNi5BXaEunUf2FhC9ua88av1GmFRWyhscoxCC4MGv4d/3IMWrv6P4zc9tvPZXjaoxqrbC3k0eQsQJcm4B78Q9+Kfvwz91r6skcgCN6BxFeFGEmVuwEfDjFDO2YrHOc2Sna6vYHA6Hw+Fw3DRxGDAel0SBz4iCoqxnl0UBUkBda8qqJgx8xuOCXvfalekOh+PuoO1bU9Fc6DN3B7p+aW2ooWn/vmXRTLcnxqZJy3spJkVnAMImEDEJtjHTsBshtpuhaBKNNsuapWjnn01rbXhjI2Wx+WybmKGMYdqefmKEMgYe7MXTtKerMawUHw9z8qbdvcAWxoVSzNZSEniCQAoCYdc2scmmNxljqLSh0IZS6elaGcM97YjoOkbOWhvWi5pS24TvxSg4UMYpIYUtHIti6DWvgyLH9DehzBHtLibpYNIRJhsR1iVysEkdJ+S9LomwaUXhAfLIeHMLJD/8KfF3f0z1/lsUr76EOv/lLR9Xb64z/p/+P8iFJaLnv0v4+DMuuchxRxFCNMWMwSyprClCM0VmPwvy3LYLLHL0po/o9GzaOiV65QIUBd7R43f4ntwduHezw+FwOK5LEHgEgUdVKaIwIMsr8qIiCu1/I2Hgk+UlSRySjks6rYj1zfS2GnfKqubCpT4Ag1HG+qZ11UsB3U7MXDdBSkkrCbmw0qeqFCvrQ44d6TEc5Vz0Bhw74nr9OhyHgYmAEjZiju8J/umZRf76fJ+Nor7WTXfNrpOK9uRsNz+GWhsuZiW1MdzbOZhpPNcj9CShlFTdHqVOiAfrpAKWjp1CbazCeGTjboscuXSkieSdYYwhN5A3T+akYG5r/YwUEAtu2ZSkjGGsoTJQ01RtbtkGK5N6AjzYtm5LiK8hBArPJ37uO0Rf/wb5716k+P2vof7q4+xvBZNnqDxDXTxH+epLAMjFZfx77sW/5z78M/c7k5HD/lgQ26h7HYa3LaHL4XA4HI7DSBzbYrKw0YfKSk0vk1IQRVZDGuclYeAzTHNnKnI49gm90KfQhmGlrqs3TIw9QoDEmntmyUYw+ctMr395WzK7v9ZQN63MpBSEcM2EGG3MDRfaXIthpViMfAIpdixE+7vz/V0f6//8+Mnrxmn0y5q/Obd5g6Oc4UsxNTHtxP/xoaPXNRWNasV/+9Gl6d8tX/KP7lnkdOer6ULwVbNtfphl6P46oioR3S6m3cakI5I0I81zRnVFvNBjJPzb2gLtTiF8n/Dxpwkff5r6/Jdkf/cXqPNf3PJx9cYa2V/9T+Qv/h3Rsy8QPv08Mnb/9zvuDiZFaCKyn3GmrtGjwawdYn8dNdhEdLp484voYd+ZihqcqcjhcDgcu6KdRGxWY5LYCkKX1ga8/8lFPvj0Ip99ucap4wv88FuPMBhldFoRwzSnqhRB4O35WLQ2nL2widaGvKhY71tDUacVMd9L8P3ZOQPf4+hSj4urA/K8ZnV9xPJil83+GN+TLC24HxsdjoNOIJvqs6aSSxnD0STkv3joKP1KsVHUrBc1G+VsnW4RwydIIfAleEJMFyns/mSXEf6P9GIWQg9trGFEG1gvay5lFf3y1gxOOwUv5UpzNi04Oy45Ny65mFUoY1iOA/7zhw6nqQisSNavFFkQ0T5+itH6JZZ0jr+0jI4T9MYapshQF84hF49g4oSxhrGGzOwumSoScCIw161MvBqpMqyo65/LAHVjNNoa+bOpoOcZjnjXNjeJKCb5/k+InvkW+a9/RvnG76+eT78P0OsrlOsrlK+/DELgP/A14m9+D5aO3emhORwOh8PhcBw44rAxFQUeQlq9pqoVQaPLtJKQLK9IxwXz3RajtLhtWpHD4dh7luOA5cY8uLXt2dQnJGxhzV6n/CpjqJs2aLW2S2XMtDVarbcn9wpsQpHfaDVbk3Z0Y7qx47bbQohm3PYYo0qhjCGQAmUEx5OQL9LipscvhC32uR671ZKuRn2dBO7rGYpglu49YVxr/ucv1/lXjxwjPOAtrGSSIJNT6HGK7m8g6grR7dEKItL+kLxWqNUV8rl5Sj/Zty3QdoN/4h46/9t/RfXO62Q//0ub8nuLmPGI/Jd/Tf7bvyf82tfx73+Y4N4HbHKUw3GXIHwfb34R05vHjFP0aABViRmNYH4RjEu4nuBMRQ6Hw+HYFa0kZHMw5uzFTf7D373B5mBMGMz+Gzl3cROjNWXJNLVovZ/eljSgCyt9yrKmVoqVjSEYaLdCjizubBCKQp/lxQ6X1oak4xLPS1mca7O6PiIMfbpt90XW4TjISCEIpJxGOWfKUGpD5EnmQ5/50Of+7vbblEpTatNESjONlb5V7u3EV00IypXmUlZxKbfmn2Gl8KU1LCW+JPYkiWfXkSetoYmZsam3gzD/4SDjP57dvGL/alFRKL0rgekg0vI9+pWiUBoT+ogjx8nLEUl/HdnuIKKIamWFQVUxXlmjaHURnd5UKNUGcmOTpyaPoJiIqdhJVgGcr27cWKSNYVXBsPG1lcYuujEO6cbUpLCiqMSmE8nm/J4Av0kqGjSmpKP+9as3ZadL66f/mOj575L/8m+o3n9r12O+azGG+qN3GX30LvroSbonHmB4/MysDNbhcDgcDofDcUsEgYfnSZTShIFPUdQUZT01FbWTkI3NlKrS5EVJHFltaXmpe50jOxyOuw2xxcCzfWPv8YTA8wTXyspRTVs0Twi8WzR7KGMYVYpQCnLFLZuKfCF2ZbSK/duryUS7cDYFOxiHslrz4SDn8fnW7RjWXYdstRFJC5OO0JvrJHGEL3zqwYBC1cT9DQaqYGlhfs8NdHcTQgjCx58mePBR8l//jOKV3+yNoaIqKd98hfLNV0BIvFOnCe57iOD+R5BHjiIOuHnNsT8QUiI6XUQUoS6cvdPDuStxpiKHw+Fw7Ioksc2zW7EVgDDbI2arWrE5zFiYa9MfZSRxSH+YsbTQwd/DH603+mOGoxyDYXV9hKoNYeCxNH/txKEkDlla6LC6PmIwzPE8yVwnYWVt6ExFDschIPYEpaYxFUGlNdaKsTOhJwm/4uLZ2JOc6USc2cOI6VOtnY9lDJzPSu47pC3QfCkIpaDUhlRpuoHHuDNPp9Mhv3CWfm3oL55ADweY8QjGKWWWU3R6FGFCZbaFAl15fOCof+PGolwbLtW23RlYU9BQX/tcqllm2fKTY8GiB6mGCzUc83c3Bm9hifY//t+gVi5SvPoS5TuvQ1Ve93Z3O+bCWc589jFFZ47Vh5/E3HsKrilROxwOh8PhcDh2QxwFpONiaioqq5p0DJ+dXePTL9cQAh576CSDUU4czbQieRckPuRFRZZXtOKAKAru9HAcDscN4EmBt0fOpkhKRqjGYKM4ntzaT6f+Lj/f4ttoppDCJjddj7S2BW2Xpx6dH5eHxlQETVu0ThcRRqi1S7QUDOcWyLIhcT5kOEqZrzK8paMI/2D/tC6iiORHf0b45LNkf/sfqD//eO8ObjTqy89QX35G/su/Aekh5+aRc4vIhUW8uQXk/CKi3UFE8WzxXMKhw3GnOdiffA6Hw+HYM3xPEkU+p08skES2BZrRZluW6/lLmyzOt8nzmqKqiQKfzcGYI3vUYizLSy6tDQBrLsqLGikFy4vdXYlRnVaEUpqN/pjNwZhOK4IKxllJqzFNORyOg0nkSagUQbMurxMRfVCYDz1avmRcX1lZdG58eE1FYNOKyrImq62pKK0Ul3yf4cIJ9PoaZCl1p8fIj8hGI5TSMBiBnyPaHUI/wBc2rWjq52kShGpgpYblxlh0sYbj10gLMsawqWBDzdqZbSgomgOHAgJhbXDelrWgSS7CjmOSZpQ2bdpWFSx5kGk415ibdiMqAnjLx2j99B+T/NGfUr77BuXrv0ddPHezD/ddQzTqc+oPv6T67B3KP/pTgsefPtCVhg6Hw+FwOBy3mzjySccF/eGYl179lIurfQajfHr5fDfhsYdOMs4r6trGcQ7TnLlucqeGTFHWrK4PGW1JIjm23GO+d3h+QHc4HDPCRt8OGn058T0enkv4oJ/d1PGC3c67pSD0BOVueqzfIJG3u7Skl1dHO7ZRO5/t/+Kim0GEId6xk7TWVhkNU8pWFxEGVMNNyqIgvHgOubSMjO/c/2FbqYxh0CRd+8LqR2GTuL4VZQy5gULb5O3S2LTrRQ86V0m08paWaf+Lf0n1wTtkP/sLzLC/93dAK/TGGnpjDT69xvWCsDEYRYjwsiWKIYyQ7Tay20O0e3adtFwKksOxhzhTkcPhcDh2TSsJKYqaB84s89b759DGbMv5+PCzFZ576j6yrGIwzFhe7LLZH7M4177lCrRaac5d7IOBNCsYDK1AdWShQ7BDy5+rMddNGI1zqkqT5RWdVsRwlDtTkcNxwJm0+Qon6WrabEtbO6gIITjZCvlwkF9x2bnx4RSIJrQ8ySZQaE3dtLob1QohPaqFI/RlQLa5CX6EmYsQ+ZgwS4nrgrifEbRatiXaZQJFbayhqAJWazjiW1PPhR2MRdoYUg39LQaisYZNZU1CApj3bCuzq7LDSzgRsN4cc6UZQwmcreCEbwhv4P9kEUZETz1P9NTz1BfOUb7+MuW7b+z/9KJ0xPgv/i3+O6+T/PQf480t3OkRORwOh8PhcOxL4igEUgajnLfePwsCwmD2s8PmMKOqFYHvMUxzFubabPTTO2IqqmvF6saI/sAaBQyGoqiJo4CLKwM8T7o0a4fjEBI183q/Kd4xwE9PzNMLPD4a5IyVQjdtyQ0Gc5kHR1zWnr59A1r10Tik1obYkxgMpTYUSlNqQ6n1TRmOhIC2v7sxHEt2TmlbyStKrQkPoSlDSElr+SiB2KAabFJEMXG4zKi/xmI9Rq9cgN4Ccm7+ho9daGvuqQ0kElo3+ZuJNob+luK0y/GEmRanlWaWiL0VhS2Cy41hyWNHE5oQgvCRxwnuf4jidy+Sv/RLUPVNjfmWqEpMVWJGN3AbYdtZyd48/rGTeKfO4N9zL7LVvm3DdDgOMs5U5HA4HI5d004iNjbHPPrgcd56/xzGGOzXVvuFsz/MWNsY0Yoj0qxkvqlA6w8zFuZuvtrLGMP5S5vUtaKsFWsb9tvjXDe+KTNQO4nYrDJG49yaitKco0e6LqnA4TjATEwUnhR4QqCModJmV/3l9ztXMxWdH5eHwlh1NTwpiD1JrjTjWtkWaLVmWGkqoyFq4R0JidMhSZkRdluIdowe9SHPMNkYk2eIOLHVT4H9/8gX1sSzUlsjz6qCI01a0MWmDVlmYKRsotBE19HGmonGW9KJFj17vBsllnBEwFpjbrrUpCYBnGvMTfFNCFf+8ZP4x/9Tkj/+h6jzX1Kf+5z67Beo819g8pur4LzT1J99xPC//r+RfP9PCJ/5pqticzgcDofD4bhB4sh+0Xzo3uXpr/HGmG0ay/pmyrEjPUbjgrlei6KoyfKSJP5qCryU0qxvpmwMxjZ1G0jzgv8/e38eJdl11/min73PHFPOmTWrVCrNsiZrsGUZydjGwgNtY4OZGhsaGpp+t7u5TUPTt1ksGrqbXt3cexswvZ61ni8P7n1AA2awMQYLeZBlWYMtWbNUGmoesnKK+Yx7vz92ZFZmZVZVTjVk5f6sFSsyd5yz946IExHn/Pb39/3N1DtkmWJooEy1HHL0xAw7tw5QimyZXItlM+HOxooAT0pSpdBo7tvSx31b+hZtr7RGY9yKZ4VEq+UHrxw+6+OqF7/KlCaf93emNLlSOFIQSInvnLr3xPJcisDEjJZCaxjvZuwob97vw3KtSsP16DamiSTEg6OI9hS63UA1pimShHhgmFRIJOZYcOk5TPcEPZk2zkDdnkPQfI3YTAGDjmZghYGfttIm3tPrK+k5D7kCPMx9cdpYcEpclGjIlBE11RyT6BYrGPP0GV22hOcT3vMuvBtvJf7q35Pte3FFc74oaIVu1imadYojB+DbjwEgB4dxt1+Bu+MK3J27kdXFn3GLxbIYKyqyWCwWy7KJQh8EXLtnC9KRqEKhFMxff3vmxUO8+57r6cYZjVbMUH+Z8ckGriOprqLMTqudcHKqQZoWKK05OdlAKRO0Wq0tdbkUMNPoEsc5eWGET61OYrPRLJbLGCkEvhSkSuNJQVHMioou9szOP9tK2lrL9AABAABJREFUSweAMqU5GWeMbWKntlJPVNTKza3opRsKoOI5VCIft1ZGxV1jxZxnOH0DqKhsbJ/zzIiLuh1w3J7AKMJzXEZmhUUaJntlyDoK9qcLs8hybdrbymSJAdQkVKXJLlwtvjBCoonclEQb7zkW+RjXpJ2eXmSHvVyE6+Lu3I27czcAWinU1AT50UMURw9R1KfQ7Raq1dwYjkZZSvfLXyB9+TlK7/tHOEMjF3tGFovFYrFYLBsG13VwXEmlFDLUX2FyurVIVHRsfIbtW/rJc0Wnm1IpBUzXOxdEVNTuJBwbr5tyxkCcZEw3OiTJKaeFyek2UgrKUcCR4zPs3DZIGCzt3mGxWC5PfCnoFhpPQqogVZoz+anNiYguQI6WFILAEectftXvu3MJV6dztJNualFR5Eiankc6MIyOG2Rxh6x/kNT1adbrdOIcfeIksn8Q4S3vN0Npk4CmNJSkcZnWaAaXISzKtGYiNzEkMPGkegHd052zMIv/rjC3WTHR6e9wpsxjgw4kGHfrUVef1T3J6Rug/H0fJ9v/Ot2v/C1q8uSynvelhJqaIJ2aIH3uWwA4O64gfOs9uHuuucgzs1gubayoyGKxWCzLRkpBFHig4ZorR3n5tePG5WLeFdTrB07yrrddC0CzHROGLuUw4OiJGYazCkMDlWWNlSQZ41NNOr3yPLlSTE61yDKF4wiGB1fvLOS5DmHgEic57W5KXyWi2YqtqMhiuczxHUmqCnwpiAtIlQIuf1XRaOjhSkGuFnsdH+2km1pUFLkSLxNkPTGRIwRVz6HsygWZhjKMEFu2o5t1VKOO9H0YGkGlKTruQBxDkaPbTXS7CZ6P4/kMOh6T0iWWHpMYxyJ6GWNdDZ3CBJNmcTHBHH+dzHLcecKiDCNyGu1dAXYUVNfp8BdS4gyP4gyPws1vXfCYThNUu2VERu0mOk2NGCvLzGuWZ+g8h6x333tM56lpy1J0p31BnJCKY4do/sH/IHz7fQR3vAPh2stli8VisVgsluUQBR6tPOGKHYNMTrd6saJT7D8yyf1vu5aZRpdGqzvnGp3nBe4yS/SshjTLOXJiBq00aZYzXe/QjTPAOErUqhFFoWi2E05OtXCGBWHgc/jYNLu2Dy4o42axWC5vAkfSLRSelIAiWyKGcjkihWBLyWd/c2mH681M4EjjYCUl2cAIYafJkfo0BBX0QICemSYvcuLJSWS1hhuVcATGtQjzOzMrIkqUuWWcSjTLNPQ5poQZZxEWFb1SZzO9UmdaQ1OZ22xfQW+s2f6zXv/zs9oEJgHN75X5a/Tck8ZzGOwloR3Lluee5O2+CvfHf478jVeJn/g6xbFDq3mJLwmKwwdoHz6AHBhC3Xwnwu9HO5d/vNhiWSn2rNhisVgsK6JUCujGGXfeciUvv3YcrRQ4C1c/n3vlCLfduIt2J+XkRIu8X9FXiZiYapHlBWPDtTMKgvLCiIdmGh3AKPUbrZh6o4NSxrVhZLCK66xtxbVcCoiTnFY7MaKidkxRKJw19muxWC5dAilpUfQCRMWmCRC5UjAWeRxpLw4GHemk3DZ0ESZ1iSCFYDTy6OYKKQShc2aLcCEEotaPKFdRM1PoTsuIi3wfXQUdd43AKE3mar17wIAWTOLSdVzGXYlwXGJhbIiE44AQhI5DyRFEwgSd1hNnVlhUmAy0rgLPgdY6iorOhvADHD+AgbUdaDrPjSip2UC1m6hmneLYEfLD+9Gd9jrNFlAF8aMPk774LNG73493xVXr17fFYrFYLBbLZUoU+rTaCXt2jfDt5w6aEmPzzjWbrZgsK5AC0rQgTjLCwGOm2WV4mclnK0VrzbHxOlpp4iTj+EQDtIkrVcoBfdUSriPRWqOUpt1NGZ9sMjbSR4DLoaPTXLF98LyKniwWy6WD37sY93r3myVmBLA18pYWFXXTRc5zm42SK2lmBd28oFTrA98nnTxJR0K3b4Sk1TRxoGYb4hRRriAcs/Q++6qdfiQ5GKfqZs86aFZYpNEMzRPzaK1pKPPYbCmzWMNMz5EajEBowAFPzO5j+s60cTIqMEKAQJr7+W9lKGGq19fJHPodKPfckxKt2eKd/X0XUuLtvQ5v73Xkhw8QP/ko+RuvrODVvbRQ05MUX/5brkEyuecGpvdcd7GnZLFcUlhRkcVisVhWRCn0mQSu3DnMYH+ZqZn2oouLZ148xH13X4OUgmYrYXqmQ54rBvtL1Btd0qxg+1j/nIAnSXM63ZRunNLuJKjZ+vbdhOm62RfA9x2G+ssEvrETfWHfUTrdlDtv3r2suc8GihxHUo58pmbaZFlBmuX4nkuzHa+6pJrFYrn0CXvfOf68ANFmCY5sKwVLioqOdmyASApB2Vv+QoFwHJyhEXT/ILrdRLVbiDxDRBFEEbpQ6DTuuexkBEXOkMqZLDRJIZj1JvLQlFBEKBwBCIEWkkJKhJSmtqiUIKQpreY64DgIufJFDSlMYCgtjKio5pj7Qq++BNqFRrguTt8A9A0saNdam9Jrhw+QH95PfuB1U45ujajpCdp/9gd419xIdP8DyGptzX1aLBaLxWKxXK7Mlgq7atcIQphFTbOMeupc8+DRKXbvGKLZToyzdeAx0+gw1F8+L9cjk9Mt4jijUIqJ6RZoiEKPwb4y3rzzfyEEw4MViokGcZIzPtFgy0gfAIeOTbNr26BNQLNYNgHBaTGjXGvjurZBrpnXwrbS0g7WnVxRzwr6/c27lFxyeqKiQtFIC7raJamOoOtTaJUiqzW8bhvRbVNkKao+jYpKEJbnfgIdjJNQIM29K6BVwMw8t6F+xzgR0RMWtQrNVNFzG2JxqTMHE9spn/bzJMSp0mfnwhfGzXq61+90YdyU+h1oK2gWmqqzvOPf3XEFlR1XUEyMkzz1DdKXngVVLGvfSw036TL20rcYefU7FDPfhX7ndyNcWxLVYtm8vwQWi8ViWRVR6CGkwMPh5ut38JXHXlm0IJ2kOc+/epS33nQFruMwXe/QbBlb65GhKt1uysGjk/ieSzfO5uraz+2f5UzPtIl79e0dRzBQK1EuBXPjPPPiQT73D89SLvncesNOvHNkjnXjlC985Tlcx+EfvfdWpJREoUenm9HqJAz2uTSaXSsqslguY/zehbArhbEv1ppM6bn2y5ntJZ8nl2hvZQWNrKBvEweIVotwHEStH1nrR6epKe/VaSEAEZUgOrVtVOSMpjmdLEfogkgpXFWYgvZaz7sVoIpFWWwwL7NNCJCOyXxzHfB8RBCecyEmEjDNKQtsT6xvCbSLhRACZ2gEZ2iE4JY70HlO+uIzJE99AzU9ueb+s1dfIHtzH+Hb7iN469vmMg4tFovFYrFYLKcIAw8EVEohw0NVTk40Ucpo5Gd58/AEN127nWY7od1NGSjMYmOrnVCtrG85+k43ZXLauFlOzbTJc4XnSkYGq8glrEGFEIwOVTk+0SBNC05MNNgyUoMUjo3X2bF1YNE+Fovl8sKXAoFJPJofMwo2QcxoS+TPE4Qu5Fgn3dSiIt+R+FKQKk09M2sV0pH4wyNEnQZ+u4lTK6PLAapRN65F3Toq7UCtD+n5LHUIVYxxNdOFcZLWGMehmQJaSpP33guloa5M/Gb27alIqMn1cbqWAoZcaBa9cTQ4yrgnNVcRM3KGRyk98GHC73ov+Zv7yN7cZ5K/LkBJ+/VGFjnFU4/SfP1lovd+0DpZWzY9m/eXwGKxWCyrQghBKfRpdxJuunY73/jW6+RFwek5W996bj+337iLvmqE5zpMTDXpxhnHxuuMDVchNZbXAEpr0jQjTjLiNCeOeyfovfr2tUq0IOhzcrLJ3371eQDanZTvvHSIO96y+4xzfuPQSf76oe/QbBkb12uuHOP6vVuplEI63Yx2N2Gwr0w3zsiyYkHGmsViuXxwhMCTgkxpPCkoCk2qNP4m+MifKesMjFuRFRWtDeH7CH8Q0T+AjmNIYnSvBBpFjnBcgsgliJbeX2tA5aZMhFImm0spUBqtCygKyI3gyHhZ5+gi75ketdEIhO9DECC8AOEtzqCSAkJhrLJnS6C1LwNR0ekI1yW4+Q78m24ne+1lkicfpTh+eG2dZinxI18ifeEZwrffh3fNjcZNymKxWCwWi8UCgJSCMPCI44xd2wY5OdFE64VORQeOTOK5DmHgmnL0nYT+aonpentdRUVFoTg2Xgeg1Ulod1KEgOHBypKColPPQTI2VOP4yTpZrjgx0WTraB/tTkKSZASBdSmwWC5nhBB4UpIqNRczMqKiiz2z80/gSIYCj4k4W/TYsW7K9f2bOwl3KPBoZDlam9cqcqVxfY6GUaUyanoCATgDQ6huF92sI4sMpicQpTK6XFnSdXrWZWi6MPEZMMKiXBsxUWuekxGYmE7fvFJn60nVMXGj6Z7DdZ8D8RocrmWpjH/jrfg33opWiuL4YbI3XyN/cx/FiaPr/wTOI6o+ZZysb7iF6L73IUvliz0li+WiYFcPLBaLxbJiyiUjKqqVQ67dM8ZzrxwxnpvzODHR5NDRKXZtH6IU+YyN1BifbJJlBcdO1hmolcmynDjNSdJ8UXHhcslnoFZaVLs+ywr+/IvfmiuJBvDYt1/n9ht3IU9b4Mvygoe/8TJPfOfNBe1/8/Cz7NgyQKUcIKWgyDVxkhIGPo1Wl6GBytpfJIvFckkSSEmmCnwpiAtTAm0zEDqSodBjcokA0ZGODRCtF0KIuTJos2hVoNO0Vw4tNWKhojCiIKVAK1PT3nER5whWzomPip7QKMvQSWLa0gTSxPycSgcRRohyeUHgKpIQFyYwVHNMptvlaucupMS/5ga8q6+nOHyAxsN/C4f2r6lPNXWSzt/8GfKRhwjuuAf/xtuMmMtisVgsFovFMicq2rNzmG89ewCl9YJQUaebcnKqSaUcEictmu2YvmpEN87WVbQzPtkkzwuyvGBqpgVAXzUi8M/dv+NIxoZrHBuvk2UF3SSlHAY0O4kVFVksmwDfEaSKuZhRqhSLgt6XKVujpUVFRzvpRZjNpYUrBYNn+A2QUYQItqPrM6hWHRlF6CBAtRrQ7aA7bXSngwgCiEoIP2R+COZ0YZHGlCVr9Myt6f1fkxCe59ymWYfrHCNscoURGFXW+BEQUuJu24W7bRe847tR3Q7FyeOomSnUzHTvfopiZgqyS/d4y178Dvkb+4jufx/eDbecl9KtFsuljBUVWSwWi2XFlEKzgBYEHtft3cpzrxxZVAIN4Mnn9rNr+5DZ1vfYOtLH+GSTNCuYmGot2NZxTFZb6HuEoXfGcmZ/97XnOXnavjONLi++dowbr962YA6f/eK3efXNE4v66CYZf/0Pz/DDH7qbcsmn2UpodayoyGLZDASOpJUXeFICRS9AtDnYVvKXFBXZANH5RfQEPoRL2xTpnjORVgoKBUU+51SkVQGFQucZ5DlC6J74qHcZ1+tSFzk6SdBJbAIwqkB3WuhuG1GqIMoVhBCEvZ/IlFMBos46BIguZYQQuDt34/7AJzn+pb9l9MVvIYt8TX2qxgzdh79A/I0v4996N8Ftd9lMNYvFYrFYLJueUugzU+9w5a4RhBQ9p6KF7D88wV23XMm0YxK82p2USilgutFhy0jfmufQaHZpNLtoNBPTLZSCMHDpq57BMnQJXNehXApotGK63YxyGNBqxwzbWJHFctkTSEmLUzGjzZKIBrC15PPcdGdR+8k4I1UK37r1nhEhJWJgEFGuUExPINIEp9aPCkvoVgOy1MRrkhgtJSIqI6ISwjHBmLI0vn5ThYnRzL4LLiYhrHSBXnopIBCQaOhqqJ6nmJGMSshde2DXngXtWuueCKuNTuLFt7jnCp7E6DQxt6R332lBvrZYz3LQcYfOF/8C98XvEL3ngzgDQ+d9TIvlUsGKiiwWi8WyYoLAw3EkFIrRwSo7tw5ydHxmkRXmy68fp9mK52ysXddhy0iNmUaXJM3nbK+D4Mwiovm88OoRnn7x0JKPPfLEPoYHKguCUPe89Sr27T+xZD3oNw5O8NRz+7n5uh00WwmdboLqL5OmBXGSEdoMNIvlsiToFTL3e7b3mdJLiiIvR7aXfJ6bai9qn0wy4kIROjZAdDEQUoKULOcI1EXRExgZkZEJrCQIx0WUXCiV0RoTUGk1IU/R7aYRF5WryKi8KEDUvsxFRbMIKZncexONLbvY/vTXqTSn1tynjrsk3/wKyZNfx91xBe7uvXi79yKHRjbFd4rFYrFYLBbLfGbjKJVSwOhQlRMnG4uutfYfnuTuW/dQLYfMNLq02jGVUkC92aVWiShFq3eBzLKCExMNAOrNLkmSI6VgeKCy4nOzUuQbUVGcotEkSU6a5fieXU6xWC5nZmNG3iaMGW0tLf39qzWMdzN2lIMLPKONh/B9nNGt6GYD1ZhB+j4MDqPzHN3toLsdk0DWbqLbTXA9U77e84hcn0Hp0lQCIaAkemKjC3zoRb2YUaygKqGjuWCfASEEolyB8spFvFprEyNrNVDNJqrdQDcbFCdPkB8+gO4ujoeuhfzgGzT/v79HePc7Ce68F+Ha8wPL5Y89yi0Wi8WyKkqRT7MVE4Ye1+/dypHj03DaerRSmm+/cJD77r5mrk1KyWD/yrP5p2bafP7hZ8/4+GB/mbHh2oK2nVsHeeedV/O1J/Ytuc8/PPoSV+4YxnUlea7odNO5YJYVFVkslydBL7PKlQIpBEprMq3xN0GAaNtZAkTHOilXVsMLPCPLShGOYzLZglPvlVbKZGvFHXTcRRS5sdUOAlRsAioUObpZR7dbBKUasRfRLUyAqH0Zl0BbiqxSY/+938u2eBoeexjSZO2dFjn5gdfJD7xO/NW/Q1RqeFfuxd21B2dwBNk/gPBtANZisVgsFsvljec5OK6kyBW7tg1y4mTDlECbd5554MgkSikq5ZB6s0uc5HTilFLoc/j4NDu2DKxKWJQXimPjMyilidOMmUYXMLEidxlJbKcT+C6OFBSFJkkywsCn1U4Y7LfLKRbL5cxszMiTAoEpRZVrjbcJrpcHfJfAkSTFYkfvY53UioqWiRACUetDlMqoxgy600a4LqJaQ1dq6LiLjtuQppBnJnGsa461AEHg9typHYmWEi1dkMKUtnckCOe8Co1CCShINcwadcXaiI0uZYQQcy7hzvDYgse01qipCfIjB8gPHyQ/9KaJla2VIif+xpdJX3qW6N0fwLviqrX3abFcwtizYIvFYrGsijlRke+xY8sA1XJIN1lcVufbzx/gHXfsxV2DA0aeF3z2775NmhVLPl6rhHzfe5auY3vvHVfz2v5xjo7XF/dbKP7qoWf4/gdup9GMaXcSKqWAZjtmdKi6KbJQLJbNhiMFrhTkygSFEq3JCo2/CUx6+jyHkivp5IsDREetqGjDIqRElEpQKgGg0xTVaqDbTWQYooPQCI5aTVAFYXOaGTcmqdbItYMroKugvAnciuYQAuem2yhfewPdh79A9tpL69q9bjVIn/s26XPfPjVkqYzsH0L2D+L09SNKZUQYGcvzsIQslcz/3uqz8y0Wi8VisVguNqXQxIqu3DnMk9/Zv6gEWpLmHDw6ze4dQ1TLIY1WzMmpJqODVaLQ58jxaXZsHSAKl3dOpLVmut5hcrqFUppCKSamWqChXPKplFa3CC6EMHGvdkK7OysqileVJGexWDYOjhQ4QlBojS8liVJkSuNtgpiRFIKtJZ/9zXjRY0e76UWY0cZGuC7O4DC6f9CU82q3II0RUQRRZJyosxSyHJ2nkGWg1Smh0Wks+DWVEqQz53zNrODIcY1jjnRXLTxyBXhAhhETlXol0KIN/BkQQuAMjeAMjRDcfAdaKbLXXyF58lGKY0tXxVgJanqS9p/9Ad71NxPd9z7kKpyWLJaNgBUVWSwWi2VVlCMTmAkCF8cRXHfVVr79woFFQpxWJ+GV149x4zXbVz3Ww4+9zLElREFgLEC//323nzHg5DiSD7/vNh78o0fI8sWipGPjdcZPNghDn26SkSsFOXS6KeVVBp8sFsulTSAluSrwHUGiIFWazRAaFkKwvRSwr5e1O58jHRsgulwQvm8CV9U+VH0aum1EVEKHJXSnidNu4+cpaX2aTrVKLQpobzZRUQ9ZrVH+Rz9E9uY+uv/wBVR97SXRzoTutCk6bYqjB1kcHpyH4xpxURghoggZ9sRGpRKy1n/q1tePcK2rosVisVgslkuLMPBotmJ2bR/EcSRKKTjtPPO5Vw6ze8cQA30l8qKg080Yn2oyNlQlDHwOH5tm57bBczpIt9oJJ6capKmJ9SRZzvRMmzxXuK5kaI0CoKgnKurGKVCmG2fkebEq5yOLxbJxCBxBJ9e4EhJlSqBtFrZG3pKiomOddNOUgVtvhJSIShUqVXSaoltNVLeNAIQTwbz8Pl2oUyXvlUarHJRaeEPP/b3UkWnaBDiOcTxyXfA8RBAu+/0LJWTKJKCVeg7XQ2t+JS4dhJT4V1+Pf/X15EcOEj/5KPnrr8CSr+jyyV56luz1Vwnffh/BLXfYpDHLZYcVFVksFotlVXieg+c5ZFlB4HtcfeUoz7x0EL3EuddjT7+xKlGR1ppnXjzE48+8ecZt7r/7WnZuGzxrP0P9Fd577w184SvPLfn4vgPj3PGW3SRpTqeTUKtE1JtdKyqyWC5TAkfQzsGXEijI1GLnnsuVbSV/SVHR8W5KcVppAsvGRngezvAoKk3QM1OQxIhyFR1GRNMzpLmmU5+hUpRoV6qbOkDoXXk17if/OclTjxJ/82tQ5BdvMkWObjfR7ab59yybilIF2dePMziCu3M37q49yGrtLHtYLBaLxWKxnF+i0AiByqWQHVsHOHB4ErNId+o888V9R3ngvpvwXIeRwSrjk026ccaJySZjQzXCwOPQ0akzCouSxIiQOr3EiLwomGl2abVMWVspBSODFaQ8t61CkmYUSlNaIlEt9D2kgDxXxGlG6Hu0Ogn9tdIqXhmLxbJR8KWkg8KXkjaKdDOJikpLiyA6uaKeFfT7dkl5LQjfRwwOIdQgOo0hTdFZik4TyDOEIxGOKWd/JrRSUBRoraBQoAojMCoKE8soctDaxBaKHHo5hBqB8H0IQ4QfIpwzC2QjAU0g0aarDJOQ6cvLL2bkbt9FZfsuiqkJ4se+QvHis2vrMI2Jv/p3JE98neCOewhuvRPh2zUmy+WB/QWwWCwWy6opRwEzWYdyKaAbZ1y1a4TXDpxctN2x8ToHjkxyxfbla9onplr87VefY//hyTNuc+XOYe55695l9Xf7Tbt45Y3jvH5w8fz27T/BO++6miTNaXdTapWIVidBKY28DE+WLZbNTtArx+j1Pt+p0ptGULH9DAGiXGlOdjO2nOFxy8ZF+gGMbkV1u6jpCQRQGhymXm+SJjF5qwlZRmeon7J3aWVdF1pzMjdW266AQJiMuUCAL4w9+3ohXJfwbffhXX8z3Yf/lvyNV9at7/OF7rQoOi2KY4dJX3gaADkwhLtrj7nt3I2M7KKXxWKxWCyWC0cYeCDAlZJr92zhwOFJlDLVWWZJs4JX3zjOjddsRwjB6NB8YVFjTlh0+Ng0Y8M18qIgSXOSNCdNc1RvgV+jqTe7NJpdZvNEyiWfgVrpnG5CB45M8tXHX+Xg0Um0hiu2D/H9D9y+oFyalIIo9Gl3U7rdlND3aLZjKyqyWC5zTsWMzP3mciryEYIlk4aPdVIrKlonhBSIMIIwmmvTSqOzxJRBy3PjUpTPioeKueSn2ZJnZ4uG6KIwgqKigCxDJwmo3IiX0gRNHdyee1GpbPqchy/BKUySU4IxU+pouJwjhs7gMOUPfIx87w2kf/NZ/E5zTf3pbpv4kS+RPPl1gtvfhn/b3ch577fFshGxvwAWi8ViWTW1ashMo0Mp8pESrt2zhVf3jy9a5HvHHXuXLSjK8oKvP7WPx771OsVZLtrKJZ8Pv/fWZYt+hBDcftOuJUVF7U5Ko9FFCEiSnDQv8F2HdiehWgmX6M1isWxkAnlKVCQwebO51nibQFQ0Enm4UpAv8f16pJNaUdFljIwiRLANNTmBF3cIq1Vi1yXuNCinMY3xE5SGhxDzMuIyrZnMIdem3Khg8Q0B8vR2YapclOXqhT+51hzLINWzczG3Vm/BSAC+0Hi9eZ1+RAsBZQFlZ2XjO30DVD7yI2RvvEr3kYdQEydWNf+LhZqeJJ2eJP3Ok4DA2baT4JY78K696ayZiBaLxWKxWCzrgRCCMPCI44y9u0f5yjdfoSgUnLb8+Z2XD885WgshGBmscnLKCIvGJxuMDtcIfY+jJ2YWjaHRdLop0/UOeW5ODgPfZbC/ROCfvWTa0RMzfPmbL/PGwYkF7QeOTPL//OU3+Sc/eO8CQVIpMqKiTpwy0Fem000pCoXjnNsFyWKxbExm3Vi83se80HrTODsHjmQw8JiMFxftPtZNub7fiirPF0IKRBBCsPRahNYatDJCI1UYh6Lc3M+6FOksA1UgHOfU9X9Px6LzHJ10jcAoMyXWdJ6hOy1EpYqIKsw/xEMBbQ2xgtAxyV79myCkIK+4itfe/f2Mvvw0Q68tXfViJei4S/yNLxM/9Rj+DTfjXX097o7di4RcFstGwIqKLBaLxbJqotDH913SNKdcClEKtgzXGJ88peS+7YadvOtt1y6rv9cOjPPFrzzPdKNz1u2EgA+/9zYq5TMLfopC0emm5EVBuRTgey5X7hzBdSR5sbjU0WsHT3Lj1dvoxhlxnOFXHLpJakVFFstliCsFjhAUWuNLSaKMnbW3Ca7nHCHYEvkcbieLHjvaSXnrRZiT5cIhpIMzMoZqzBBNTZOGEbHnUm5N0y4y8hPHcAYGkNU+2oVmPIe5X8xVJGd6ArZ6KxfsZT1BUaah0DCZm9/+QJjAbgBIYay4k7PMqwmUlWZkFVe93p5rcK+8mnz/ayRPPkp+6MylWC9dNMXRg3SOHkQ8+jDhHffg33QbwrPiQYvFYrFYLOePKDSionIUcNWuEV5+4zinr0O+cfAkrXY8F9cxJcuqjE82iJOc8QkjLJJSkuU5WVaQZQVpVpDlxdy5qeMIBmqls8aHAE5ONvnK46/w8uvHz7jN+GSThx97me95540LnosQkGVqQQJarWrdBiyWyxW/l4AmhcAVglxrMqVxVpiwslHZFi0WFfUHLpEVU15UhBAgHPCds7sUqQKd55ClkOXoNEYnCcJ1EW4VylW0UugkQXdaRlzUbKA7HUSlOuemE0poF9DtiYlixaYR12nX5cRNd1LfsYdbX3sGJs587rBs0pj0mSdIn3kCEUa4e67Fv/p63N1XIdyzC6ItlksFKyqyWCwWy5ror0WMTzSplAOarZhbb9jJl77+IlrDdVdt4f3vessZSwoppTlyfJpX3jzBvjdPMDHdWtaY77hjL3t2jSz5WJxkNNsxnW46Z9XabMVsHe0n8F127xjmtQPji/Z79Y3j3H7jLrpxRpJmQEh3iawMi8VyeRA4kk5e4DmCRJ1/O2vV+0Jaz3JNq2V7yYiKhIDhwGNbyWdbyT9jabTlorQm7b2OvhSXxHO1LI2s9VN2ferjE2QAAyOo5gxx0iCcnmKyk1CvDSKkQ6KhWcxzJmKxa9FSbUHvgSMZjLmaaJnOgqnSHOu5I+UaJnLIAWYFRD2Vk4Mpgeae1u3sJ1kCVQltBWkG5aX828+BEALvyqvxrrya/PgRkicfJdv34tJe8Jc4ujFD9+EvED/2FWO9fetd1nrbYrFYLBbLeSEKfKbpEAQue3eP8tLrxxZtozU8/+pR3nbbnrk2KQWjQ7U5YdHx8cYZx3CkoFoJqVWiszpYx0nG333tBZ575fCyTuEef+ZN9u4eZc/Okd6cJGHg0Y0zOt0Ev1qiZUVFFstljRAC35EkhcKTgrwwoqJwE7i0AOwoB8ykBdtKPlsij60ln9I5SkpaLh2EdBC+A/4pF2qtFDruorsddNw1cZsogihCdTvoVtM4HdWnKTptZLWPwPUQmBJoqTbxl46C6iV4KHSVpl6Y+FCfw7o50cf9Q7g/8EmCl79D9+v/YIRa64COu2QvPkP24jPgejhj23CGx3BGRs398KhxrbJYLjGsqMhisVgsa6JaiRifbBJ4Lp7nMDbcx/13X8v+I5N85HtuQ57FyvGRJ/fxtSdeXfZYjhR8113X8I479i5ozwtFu5PQasdk+SkXIt93EEKQJDlT9TZjwzWu2TO2pKjo5FSLTtecGCapqVEcJxlK6WWXWLNYLBuHwBF0cvB7F5ppsf4iAaU1nVzRyRWJMt9NnhAEjiRy5UUT3tzQX2J7yWdrySc4R6ZZrjSdvKCdK9p5Qad3384V3ULRnbsv6BZqLlAvBPhSEjiCQEoCRxJIgZQCRxjHpNuHKoyEa8vGUVoTF8rMLzPzyrSm4kpqnkuf75zzOW5W/FKJcHiUZHqSWKWU+wdoxT4zjTrdJIPJk7Qr/TTdcLFJ0TI+LhIYdsEHjmUw4mqq58jsTHqCoqJX6uxkzynJBSrStKUaMkxgq6vPPpdYwWDvind8jTphd8t23A/9IMXMFMm3HiN98TuQLnb8utTR3Q7xow8TP/F1glvuJLj7nVZcZLFYLBaLZV0JA3OO73sOo8NVapWIbpwuSjh79uXDC0RFMCssqjIx3aLTzZACXM/Bd03MyfMcfNdZUKLsTMRJxh/+xTc5frK+ovn/9UPf4Wd++LuIQpN0UYr8nqgopb8nKrKxIovl8saXgqQATwq6BWRKwSLPtcuT6/tLtszZZYaQElEqQ6mM1hqdJuhWE91pIaMSOiyZ/7styFLU1ElEVCKI+ogRxAp8xyRtXWqiombPZXuWRgEDjqbf4YyJ7itBSElw+9vwrr6B7le+SPbqC2vucwF5RnHkAMWRAwvHrfYh+/qRlRqyXEGUq8hK1dyXK4hSGRGEtoya5YJiRUUWi8ViWROuI6mWQ5qtmGo5YGqmw54rRrjnrXvPGWDZe8XIskVFV2wf5Hvvfwsjg9UF7fVml5lGZ24hW0ool0Iq5YDAc8nygiMnZkwAKE65evfoGcc4cGSSraP95LkiLwpcxyFOMkqRLRNisVxuBL2LLq8nOEmVopkVhI7AW8MF2azIpZMr4kIt0DsITFmnLC9o5YVxc3EkoSOJHIl7HoLSuuce1C0UhdaEUjIQuAwE574MaOcF/++XV2fxqzUkhSIpwMg/FnNdXwScXVSktaZTKOppwUyam1uSM5Oa17CTqzkXqDMRuZK+nsBoNPK4rq9E1bvEoiAXiZLvkg4MkXRalJMmnbCMdgPUzDTTuSKuNyHMKJcrBL3jUy9xQ8/7GyP+yTGioEEHIgnjOeRaM3C6tVCPrtIcz4yIKO05FCnMETLswnw9kpoVF2lzdC3VY1tBihl30F16m9Xg9A9SevcHiO57H/nRg+Rvvka2/zXUxIl1GuECkaUkTz1K+tKzlN77QbyrrrvYM7JYLBaLxXKZ4HlG9JPnBWHgcfXuUZ556dBcyZRKKeCma7fzlmu3L7m/lJLRoRpa6zUtCD78jZdXLCgC43b9ha88x/e/73aEEJRCn0napGlBnhe4rkO7m1A9R8k1i8WycfGlBIpefKg47+7WFsuFQghhXHCCEFWtoWemIIkR1Sq6VEa16tBzNfLTnG6lnxiXmmNKoa31t3k9aRSakz1BUVuZuFEoYKqAloLhJVyztdbE2mwfK+NuNOSe291IVmuUP/SDZG/uo/vQ51GNmfP0rHrzbNYpmvUzRFR7CIGISohSBVkqI0plZLUP2TdgBEl9A8haH8KxUhDL+mCPJIvFYrGsmb5qRLMVUyoFTNU7pKmpcR/4Z/+Z2TraT7nk0+6c2TqyFPm8994beMu12xedsLY6CdP1DgBh4FIph5Qif4Hzh+c61CohjWbMdL3NttF+to32cXR8cWDptQPjXLF9iDQrSLLciooslsuYWfcar1eySQMzPZcyRwhCxzgKOUKYLB6MUGZOSKE1CiMiUtqIHDSaXGnUvHE8ISm55iYFxIUmLgriQlP0BEhxoZgBQkdScY3IaC0X6EprkqLnIFQo5se+OiikEETuuYVTkSMR4vxVelqOS9PL9S5/e3h6TeN0c0U3TznehVfqXZ6aaPHRK4YZjWzN8siV1DNBWq5CtQzTE+QaJvtGSNstiLv0xW0qRYys9SO85b1mSsNkYcqVTRbQhylFNlVAqjWDriltNisMSrUJ5mh6+/QERb6AIWehoAhA9gJFZ1vGKUvTT4YRKEVq4ePFGg9s4bp4u/bg7dpDdN/3oFoNsv2vk+9/jfzQfnRneSVdLza63aT9l3+Ed/3NRO/6XmRkM1ItFovFYrGsnSj0aLYKAs/lqitGef6VI9xwzTZuvm4He3YOn9XVepa1XBMdOjbFt54/cO4Nz8CL+45x9e4j3HzdDhxHEgYucZLTiVNqlYhW24qKLJbLmaB3Eer3BAmZ0peUmMJiWQ+kH8DoVlSnjZqZRpDh9A2gojK6Pk2Yp8zUp4mrfRSuD8I4RpfmfQxipZnITVwnlCb2U5Fn/g3XWtNW0FQmLlR1oP8crtZLMV9Q1FIw01PflAT0OybJ7GgGNUcz6Jj5tZQRE803q080xBlsdfVcMt3Z8K68GveT/5z48UdInnwU1FllP+cXrdGdNrrTRp1xI4GoVHEGhnC27cTduRt3+y6Ea2OilpVjRUUWi8ViWTOlyDfW0zmUIyMSanViAr9y1v2kFFyze4ynXzy05OO337SL7377dXOW0/PJ84KpmTYA/X0R/dVTi2C+79JfM6U8xiea9FUj2p2ELFM02zFXXzm2pKjowJFJdG+RMUlyymFAN06B8rJeB4vFsnHwpMCVglzBaOQT95yFUmUcfdq5pp2f+ZLsbDhCUHYlkevMBaBmKbmCkjvrjqTnxk2UmhMYSQFl16HsyjnXJKWNCKnQRgxhhExG7HTq/pQz0Xy5hOyJpLSmJzQqliUqkkJQchza+fm5QHaWEYzrO4c4dTV0c8Wj4w0+csXQuve90fCkxJOCTGkS10cMb2Hi5AQqj/HKFfojD78xA3lm7K/DCFGpIZyzOz1JAcMOzPQCNvXCiIcGpAnitM6gJe5qmMqNuCjoCYpWa+DlChhxzdjtXhbafCYL6F9d10siKzWCm24juOk2AHQSU8xMoWamUTOTqOkpivoUqj6D7rYhz8/R44Ule+lZ8gOvE73nQ/hXX3+xp2OxWCwWi2WDY0RFMUHgUS2H/Nj3v43d24cvyNhaa7741bOXJ9kyUuP+t13HawfGeerZ/Utu88WvPs+ubYP010qUIt+IirqzoqIYrWtWYGCxXKbMulu7vbL1Smtyrc/pZmKxbERkqYwIS+hWA9WsI30fPTgCM1N4mSJr1OkUEdVqhY6CkjS/tVPFKTEPGCejroJJAVWpqTmnHIASpWkqExOaL+qZzE3y2egZXK2Xol4YIRMsFBQBdDTEOfQ5JtmsUZjbfJSGWJu5Vh3wMQKkLd7yks+E5xPd+27862+m+9W/I39z37LnfuHR6FaDvNUgP/QmyeNfA+ngbNuBt/NK3J27cbbuRLhWLmI5N/YosVgsFsuaEULQVw2ZnG5TKQW0OyntTspA7dw15q+5crGo6IrtQ7zr7deyc+vgkvtorZmYbqGUJgxc+qpGQFSrRvTXogUipHqzS5Lk9NdKTE63mWl02XvFKF99fHHZNa3hyIkZRodqpD3Hkm6Srei1sFgsG4fhwONEN8WXAt93qOGgeqIcU7rLiHNmv8VEz9VI9P6WQiDp3QuB7LXNFxIJoOQ6VD0HVwo6vZJdcaEWjJsrTTsvaOdG1NTMCppZgdsLXq1U3uT23IhCRxJIgRCCTm6ci1Zi21325HkTFS1HLDLgn58yZQdaCbnS56Xk3EYjciSZKqhnBYXWiP5BorhLf2cGhwA9NIJq1CHpouMuOokRURlRqZ51EUUIGHBM+bIZBZ1e4GhWKJRr4yKU99yKZu81xoVocA2ColmkgAEXfAWTpz2WKBPUWk4m2moQQYg7tg3Gti35uM4zdNdYiqtu27y23d5r3LupbgeddNGdjrHWLs6vEEl32nT++o/Jrr2J6LvfjyxZUbXFYrFYLJbVEQYmA96fTRLQAqXUshyK1ooQgn/03lv4m4ef4/Dxha6nlXLA+955I9ddtRUpBbu3D7H/0AQT04tdJpM0568feoYf+/DbKYU+U3SIk5xcKVwk3TilFAXn/flYLJYLjyMFjhAUPSFR0osVeef/K8xiuSgIKRC1PkSpRHHyBIIMOThMWK+TxSnddptykdLqG6DaK3Gf9sKLHWWEOyVphDxghD4zBZSkSZBM5oUii17ilwL6JDQLQGtG3HO7FM4XFDV7SWxgHJIiAdOFiTVNF2aMAce41CttEtk6ysx7djpJbsqfBQKOZVBZQczUGRqh8v0/Rn78CPFjXyV/45Vl73tRUQXF4QMUhw/AY4Dn419/M8Ed9+AM2ARMy5mxoqLLnGazyeOPP86rr77KzMwMnucxOjrKzTffzK233nrJZlOkacoTTzzBwYMHmZiYoNlsUi6X2blzJ7feeitXXXXVeZ/DSy+9xNNPP82xY8eI45harcZVV13F3XffzdDQ+f1izfOcp556ihdeeIGJiQmUUgwODnL99ddz1113EYbWXtdy6dFXNaKdKPRxXUmeKzpxSqV09gDLlTtHiAKPndsGuebKMa7ePUrlHBbSjVZMnORICUMDFQSCWjVi62jfom1Hh2ocOjpFtRzSbMekaYEfOfRVI+rN7qLtDxyeZHSoRpLlaDRFrsiyAs87PwvbFovl4lHxHCInpFOoObEPQOgIQmdtkaLIkVQ9h7LnLHDkCR3JYACF0nPjtnOFK40rT83TdAsjMEoKRT6vRJMUAmf2Xp4SNRmRU+9vIfCkuZ3OrIAmX8EFcsl1MJfj689ynIpCRxI4kqRYnWvUmVBaczLO2Fqy5S0rnjMnZgOoeg59pT6olSmmTiLSBKd/AJ1VUK06pCm600LHHSMsCsuc7a2s9MqXTfXKoR3PT5URXIpyz6p6PS9TyhK0C0dOa+8oCC5SUFi4HqLqQbXGcs4wdM/WWtWnUY0ZVH2GYmqC/PB+dGNmXeeWvfI8+cE3CN/5Xvwbb0VcgMU/i8VisVgslxdh4IEAV8q5GFGS5ks6UZ8PRodqfOKj9/DMiwf5h2+8TNxLGPvQu29h7xWjc9t5nsOHv+c2PvOnX0ctcZ104MgUjz39Ou9461583yFNC7rd1MSYWokVFVkslzGBI+nkBZ4jSBQrStC6HKmnOX/0xkkiRxK6ksgxt9AxTt2lXlvJlZRch8iRyJ6YYzZZTvectzOtSQtNqhRJoah4DmPRuX8ftNY8MdGi7ErKvTFn7+Uluta50RCuhzO2HTU9AZ0Wlb4+mk6XuN2iiGN0fpJD/QMI16PQRjjU7X00Gr2yZqGAsmPuO71wnu65A7WViQ3NfpoKbZLKmgp0DqPumcsMzhcUNQozHhhBUV8vsDIqjHtRoyceGs+NqCjTC+NQs8W/MmAiN3OIJHP9rwR3y3YqH/kRivHjxN/8Ktm+lzhz1OsSJEtJn32K9Nlv4V19PcFd9+Ju2X6xZ2W5BLGiosuUJ598kt/8zd/k85//PGm6dH2BLVu28FM/9VP8/M//PIODS7uBXGg+//nP8zu/8zs88sgjdLuLF/tnufrqq/mpn/op/vk//+eUy+uXQdvtdvnUpz7F7/3e7/Hmm28uuY3jOLzrXe/iF3/xF3nve9+7bmMDHD58mP/6X/8rf/iHf8j09PSS25TLZT760Y/yy7/8y1x33XXrOr7FshY8z6FU8ul0UirlgJl6l1Y7PqeoyPMcfv6fvBdnmQv4SZoz0+gAMNhfwXMdXNdhdKi65PalyKdaCWm2Ygb7yhw/2aDVTtmza5inX1hcdu2NQye58+bdaCVI0pzQ9+jGKZ4XLWt+FotlY+FIQVUaJyGAtFB0CkU3NyXJtDZuJ7PCnVNORcaZyJl/3+svkPKcDjjzx1Va08oKGllBXKi5Emm5MvbajhA4PRek5SIwgpyyZ0qwHe2keL3noTCiJmcZDi3leWXSHCEoe72ATS9QFPUCRdFsQMmVCCCeFxya/TstdK90m8lSWo5wSwhBv+9wontuUZEQRszlSkErUyh99gv4E93Uioow7+tY5M25Z82W3MPzcEa3opt1VLOB8MAZGEbFMbrVgCJHN+rodhsRhKY0mrd0TfZImsDOZA7z4zMepkyZK0yQx+/9fT5YamYdDQPnZ7h1RwiBKFeQ5Qps2znXrrVG1afJD7xBfvAN8kNvorudNY+nux26f/9XpM8+RfTuD9iAksVisVgslhUhhCAKPLpxRhh4tPLkgoqKAKQU3H7TFVyzZwtfeuQFtGaBoGiWraN93Hf3NXz5saUdBr76zVe47qotlKOANO3QmRUVdWLGqJ3vp2GxWC4SgRR0OFW+abOLirq5otO7kaxv3zcNlPie7ef+fYgLxaMnGks+FvZiUiVXUnIcIlfiS4HSUGjduzEXk9JazyU8aWDQd7l/6+KE5c2IkAJnaATleXj1abwwInNdkuY0UZGjpiaIqwPU3XDOWb0ijXAn6TkCdXMjQCg7Rlg260w0iy+M6KerTRLaoGPEQDqHsXnCIq017V6Js1m3o/mCopqE2rxMLSFMSbNImn1ifcpRycO0R/KUe9F0TxQ1WZj40FpEE87oFsrf93GKiXHix79G9srzRk21YdBk+14k2/ci7s4rCe68F3f3VZesOYnlwmNFRZcZWZbxC7/wC/zO7/wO+rQvK8/zyPN8rv348eP8xm/8Bp/+9Kf5zGc+wwc+8IGLMWUAnnvuOT75yU/y7W9/e9FjQggcxyHPTy1B7Nu3j1/6pV/iv//3/87v//7vr4u45/HHH+dHfuRHeOONNxY95vv+nDirKAoeeughHnroIT7+8Y/z4IMPUq0uLWZYCZ/5zGf4V//qX9FsNhe0SymRUs49/3a7zR/8wR/wx3/8x/zqr/4qv/zLv2y/1C2XDP3VkhEVRUZUFCc5WV7guWfPwV+uoEgpzcR0E62hXPLnBEtbR/vO2sfIYJVWJyEMPMoln3YnZctwH7BYVJRmBRMzLYYHqqSzoqIko1a1oiKLZTPgOxLfkfRfQK2JFIKa71LzXdJC0eiVPkOCi1i0rdsTMTlCLCrDJsSp0mfznYAEgBC4QpBpTaY1Duc+f7h7pModwxXKrjNXRu1C0++7nOhm+I6g33fnbjXPoeItzEybfc6qV0KunhZ89Xidk/Fit6XjXVvechZHCMpL/FYLIRC1fkSlip6ZQbWbyDBEByE67qCbPXFRp4XutMBxjbgoDBHuQhmPJ2DMNVlgEnBYXzei1RArE1xcjmvWpYoQAqd/EKd/kOCWO9BKkR98g+SJr5MfWjpJYiUUx4/Q+n8exL/5rYT3vhsZldZh1haLxWKxWDYDYWhERb7nAkZUdDGolAI+8r7bKc7ifnrP7Xt5bf9JDh2bWvRYoTRPfGc/77nneqbrHbpJRqEU5NCN0wsqlLJYLBeOoBdr9nv350NUZJKxNElhkrIU4Ekx5wAUrNFFeyUorcl7CWhLXSN319lBej7JMl/bdn7mOcSFIi4U06sUPMXLcErabMhaP/gB0YlxCjzS/mH85hT1rKBbb0KpIIjKDLom5gNGWNRWxqEo51R5MjBxoNkSaa6ArjJinvnCoraCEzmMuHquvFneOzy0No5GZxIUzccVMOxC0puHL07Nce75CTPmTAHtnsAoWofD3BkepfyBj6Huf4DstZfI9r1EfvBN0OfvM7Te5IfeJD/0Js7IFsLv+h683ee/epDl0sf6mF9GxHHM+9//fn77t397Tjg0MDDAf/tv/41Dhw6RpilpmvLNb36TH/3RH53bb3x8nO/7vu/j05/+9EWZ9//8n/+Tt73tbQsERdu2beO3fuu32LdvH0mSkCQJR48e5TOf+Qy33HLL3HZHjx7lgQce4MEHH1zTHP7mb/6G+++/f4Gg6P3vfz8PP/wwcRyTJAknTpzg05/+NDt27Jjb5k/+5E+49957mZpafMG5En7lV36Ff/JP/smcoMh1Xf6X/+V/4dlnnyXLMrIs49VXX+Xf//t/TxQZYUOapvxv/9v/xic+8YlFAjKL5WJRKQc4jsR1HcLQ6FZb7Xjd+p9utMkyheMKBvqNS9lgf5nSOU76Pc9hsM8sgg3USggJA31lXHfpn8Hpusnwnw14deOlHd8sFotlvfEdyXDosbsSsK3kMxb5bCv57CoH7KmG7KmG7KqEbC8HbOk9PhJ6DIUeA4ER21ROK7k22y+cKoG23GBYv+8yFHiEjrxoIubv2tLHz1y3hX9+3VZ+7KpRPrhzkHvHatw8WGZPNWRL5FM97TlLIejzXXZVAnZVjAC1z3e5ti/iu7b08YNXDvPdNgNt2QjpIAeHcLZsR4QlI2aLSsiRLYi+AQgiQBiBUbuJmjxJMXnSlOrqtFBJjC4KxDw3oout45mVlHU2TkxnWQgp8XbvpfKDn6TyIz+Nd/UNsAwB4dnRpM8+RfMzv03ynSfRxcVZELRYLBaLxbKxiAITqwkDEx9K0vyixjDPlowmpeDD33MrvndqdVII2Dbaxzvu2MvN127H8xw8T4KGbi9podVeZ7sOi8VyyeD34iezbrqF1mRKreh7bFaok/YEL53cJJFNxhlHOgknuhkzaU63JygCE69pZAXjccbRTspUktHNz+3GvBKUNkKmZlYwlWQc76Qc6aSciDOOdVLiJQRE51NUlC6z73ZenHujVaI3UrmqC4gMI8pbtoDrEgvJRG2YbmDWWaqdJsPtadx5r53XK2m/xYWBXgm0koChXlufc+ozFUkYdkzEYtYtSPdESfvTntu1No5CjQKO5acERX1nERTNJ+iJmE4XFM0iBAy4poQamLHnk67hcyfLFYJb7qTysR+n9nO/SOn9HzUxGm/jCNiKk8dp//kf0P7CZ1Gd9sWejuUiY52KLiP+8T/+xzz00ENz/+/du5eHHnqIK664Yq7NdV3uvvtu7r77bt773vfyEz/xE8YyXyn+2T/7Z2zdupUPfehDF2zOX/ziF/nhH/5hlDr1Tf2+972PP/mTP6Gvb+FCz9atW/mJn/gJfuzHfoyf//mf51Of+hQASil+5md+hpGRET784Q+veA5PPfUUH/vYx4jjU8KH//gf/yP/7t/9uwXbjY6O8tM//dN89KMf5YEHHuDJJ58E4Nlnn+WDH/wgX/3qV/HOUPLhbHzqU5/iN37jN+b+j6KIz33uc7z73e9esN3VV1/Nr//6r/PRj36U97znPUxOTgLwh3/4h4yNjfFf/+t/XfHYFst6I4SgWgmZqXeolkPiuEWrk9BfK615MboTpzRbJlgz3F/BlZIgcBkerCxr/8H+CvWm+ZzXyiH1Zsy20X4OHp3CdSS7dwxzzZ4xrt49iu+5HD/ZmBMVJWmOUhq5jFJBFovFsh4IISidw+VtJXhSkBTmvltAvoFsu6ve2l6HO4Yq3DVcIVrH13OzIjwPZ2QMFXdR9WlEmiDCCMIIrTQ66aLjGNIE8gydZxCbksYaTLTGdRHSASlBSJMaJh0QAuE4gADHPHY+hWyJggomM656mR4a7tYduN/3cYrJkyRPfp30he/AGgKlOu7SfejzxI8/QvDWtxO85XaEf/YytxaLxWKxWDYvUWjipJ7nIKRxn87youdctBCtNVprpLx4OdD9tRLvfsf1HD/ZYM/OYXbvHKZ0mgtRKfSpZzFxmlEpBXQT635qsVyueFIgmHWMFuRazzkey56DtMQ4Rs+V0tKmxJNGo/W5r74ExhEpkJLQlbhArDTdvCBWpmRXO9dzDj2OEHjSzMeTAlcKZK+MU640+WyZL6Xn3F1gsWDmTHNzhKDQmkaaE56WxNs9i0vQWkmKtTsVrZWNEyW78IS+TzQ8RjwzhY67+JUq/YGD16xD0kVN5cj+wV5MxyAFlIUR9Jy1bwlDGEFRrGGiMAIk2SuP1uo5Gc2+Pw5GTHSufldKn2P6Pt0+ortOh5wMI/zrb8a//mZ0npEfPkBx4hjFxAmKiXHU1ASo8yeaWyvZS98h37+P6F3fi3fdW2z1nE2KFRVdJnz605/mz/7sz+b+D8OQz33ucwsERafziU98gtdee21O0KKU4hOf+AQvvvgiW7ZsOe9zfuONN/ihH/qhBYKim2++mc9+9rOUSme21fc8j9/5nd/h2LFjfPaznwXMSdsnP/lJ9u3bx8jIyLLn0O12+fjHP75AUPTJT35ykaBoPoODg3zuc5/jpptuYmJiAoDHHnuMX/3VX+U//af/tOyxwZR9+1//1/91Qdv/+B//Y5GgaD633norf/zHf7yg5Nt/+2//jfe85z28733vW9H4Fsv5oL8aMVPvUIp8HEdQFJpuki0KxKyEPC+YnG4BUK2ERKGPkIKto/3LPoGRUjAyWOHYeJ2+WolWN+G6q7Zy4zXbeMu1Owj8Uz+JSmmEgDxX5HmB6zrESUopsgtnFotlY3Iqw25lTkWXA+U1ipIsi5FhhAwjdJqiOy1Up40gR0QliEpopdBpAnmOznMocshzE7nMMjRLL74sOipFz9JIOkaI5DrguOA4JljleKt2PEp7g3W0uZa4nAMiztAIpQc+gn/T7XT+/q9Q05Nr6k8368Rf+SLJN7+Kf+vdBLfdhSyV12m2FovFYrFYLhdc18F1HfK8IPBd4jgnSfNFoiKlNPsPT/DagXHe844bVpzQtZ7ncne8ZfdZHw98D4hJkl4SmhUVWSyXLUIIfEeSFIrQlXNuQRoj4jHOQeeOrcwKkyRGACSFidEEjoMvWfT9VZKCkivRWpMoTbdQdHNlxEJaUyxTgHMuZgVKgRR4jiQQggI43k1Jl4gZLeVetF6k6hJwKto8YbIVI4RgJPLoeCPQquM360ivhHJd9MwU5Blq6iSibwC5isSj+cKipCcsEpi/Z/EFVCRE59H5uuJALKE7ry0+D4e9cD283Xvxdu+da9NFjpqepDg5TjE5jm41Ue0mqtVEt5vobmf9J7JCdLdD5wt/jvvSs5Te80FTHs+yqbCiosuAVqvFr/zKryxo+4Vf+AWuu+66c+77K7/yK/zBH/wBBw8eBGB6eprf+I3f4Hd/93fPy1zn8+///b+nXq8vaPvt3/7tswqKZhFC8N//+3/ni1/8Ip2O+TKt1+v86q/+Kr/3e7+37Dn87u/+7oKSZ7Vajd/6rd86535jY2P8+q//Ov/sn/2zubb/4//4P/i5n/u5BeXRzsW//bf/ljQ9VVbp3nvv5ROf+MQ593vPe97Dxz/+cf7kT/5kru3f/Jt/w3vf+96LmtFjsQAEgUcQuCRJTrkU0GjGNJpdosBbVZCn1Y6ZqndQSuN5DgO9MmYjg9UFQqDlUKtGzDQ6dOOMgVqZItd4rlzUj5QCz3NI04Iky3Fdh26cWVGRxWLZsPi98wOvZ/mf22iJZR0Qvo/wB5H9g6gkhk4H1WkhwDgYzUNrTHm0PDd15Avja61V0YvIKtOuVG9j3UufNI9pMphXjXTuCHZcRBAiwhBcf9nBpaxnoQ0mUBVevpqiOdwdV1D9xz9L/OiXSb71GGvNxdRxl+SbXyF56uv4N92Of82NONt3Iez1iMVisVgslh5R6NFsFYSzoqIkp9rTIo9PNnj25SM8/8oRmm2T8FkUigfuu2lZ8aMkzXno0Rfpr5V4x1v3nnP79cDvxY+yvDCCAmXmsdL4lMVi2RiUXSMqGvBdBnr5skppcoyoSM1LUhGCOeciwayYyAiKzoYrBZEjKbkOjjCOQO28IFUQOoLQkQz4Pbc3DblSZFqTKVNaTQEOAkeavhyMg5F7FoGm7I17OkJrBOZKMVd6wTbX9UUMBi7dXim3bq7mBE+dQtHNC7qFWrE4x5NiQTn7sxE6krHIo917jdYztGWjZGdHCkHFc2BgEBVGqKmTSN9HD42gesIiPT2JKlUQldqKhT+hhGGMoCid92ZEwpQm8y9QmME/bd6ZNqUPl3uMrhbhuDjDYzjDY0s+rosc3W6h2i10p43qttGdTi/Rr3ffmEHVZ86741H+5j4av/8ponvfg3/rnTYGtImwZ7uXAf/n//l/Mj4+Pvd/EAT8y3/5L5e1r+/7/Kt/9a8WuOV8+tOf5l//63/NlVdeue5zneXll19eIIgB41J03333LbuPHTt28EM/9EN85jOfmWv79Kc/zb/4F/9iWYKqer3Ob/7mby5o+6mf+ikGBweXNf5P/uRP8iu/8itzbkVxHPNrv/ZrPPjgg8va/+tf/zpf+MIXFrT94i/+4rL2BfilX/qlBa/hc889xx/90R/xoz/6o8vuw2I5X/RVS4wnDSrlkGY7Jk5yxiebjAxWl51xlheKyenWXJ36IHAZHqgghaBcCubERStleLDKoaNTlCIfBGS5IssLvNPK4gS+a0RFaU45Coht9pnFYtnAnHIqMv8XWqO0Pmdwy2JZLjIIIQgR/QPoJIY0NeXPMlMGTajClD5zl3cJqpWeExnpWRFSXqCLnvNR0XM+KnJ0p4XutEBKRBBCGCG84JxBrFhDSUBHmQDWZkB4PtH978O75no6X/wr1PTE2jvNc9JnniB95glEGOFeeQ3eVdfi7d6LCKwg22KxWCyWzYwRFcWEgXH46cYpx8Zn+PzDz3L8ZGPR9k89d4BSFHDf3dectd/Dx6f5q79/mql6B0cKrto1wpaRvvP0LE7hOnLOlTtNc8LAI04yKyqyWC5T+n2XQkMrKyh6ChYpBUZftLJ4iiMETk9k5EpByZFErsQ7bUG+5DoM4ZEpRStTdPKCuFBIKQiAwFk/N2bjmCQJHMlknEGv1FumTSk1d95zHAo9hnplLc+E0pqkMCKjuFeqTIieQ9M8pyYpBIGU+M7yBUUAbxko85aB8txYcaFo54pOroiLgk7v725h7nOtccTsay9Ola3rzcfMz7yTZde6XC8XGUWI0a0UEyeMeG5wBNWYhrhr4jNpgqz1I7yzHy+nE0gYE9BWRvgWyVNxzAuFd9p4GuNWVL7Ih4dwXESt/5zuQFopIz6qTxuR0cwU+dFD5EcOGAfx9SJL6X75C6QvPkP0ng/ibtm+fn1bLlns2e4GJ8sy/vf//X9f0Pa93/u9DA8PL7uPH/3RH+UXfuEX5sqQzfb5O7/zO+s61/n8+Z//+YKyZwAf/ehHV9zPxz72sQWioqIo+L/+r/+L//Jf/ss5933wwQeZmlpYIfPHf/zHlz227/v80A/90AJXp9///d/nN37jNxgbW1pNOp/TBU1DQ0O8//3vX/b4t912GzfddBPPP//8gj6tqMhyKVCrhExOt/CBsaEqJyabdOOMk1PLExbNdydCQH8toq8aITAOQltGaquemynLJqFQJlMuyYmTbAlRkUeThCQ1J1uz4iaLxWLZiHhSzFluO0JQ9LLaAseKiizrixDCuBSd7lSU53MiI3ruRFoVpxyLiqInIjIZVUIKwDGlzs4wlimzlqKTLiSJESB1O9DtoIVARCVEuXrGrKlYQUkaUdHy0gouH9xtu6j++Kxr0TfWzetdx12yl75D9tJ3QDq423fhjG7FGRnDGdmCHBpGODYMYbFYLBbLZqFcCoAmQeDNiXH6ooB2Nz3jPl974lXKkc8dN+9e9FhRKB55ch9ff2rf3OlLoTR/+fdP81MffyfuBVgUDnyXTjcjyU6Jivqq0bl3tFgsGw4pBCOhx0joobWm6LmWFNq4BM1WIpPiVJmzWZHKnHhFrEw4M4snJQOBZCBwUb0YTqY0qVKks38XCt0by5VGEORKU9bM7Y2/FAKxSNDTzAqSQuFKQVaY/sMVfqVKIYhch8h14Dznl0ghKLkOJSsGuigIz8MZ24aaPAlxB6dvABVE6Ga9Vw5tAlGumJjMCg5/V0DfRXxLl4oedTVslILvQkpEtYas1oAr5tp1kVMcO0J+eD/5wTfJjx4yyXprpDhxlNb/8yD+rXcS3ftuk+hnuWyx0bwNzj/8wz8wPT29oO0DH/jAivoYHR3lrrvu4pvf/OZc22c/+1l++7d/e93qUZ/Ol770pUVtd9xxx4r7edvb3rao7U//9E+XJSr6sz/7swX/79ixg1tuuWVF43/wgx9cICrK85y//Mu/5Gd+5mfOul+j0Vj0GjzwwAM4K1SZf/CDH1wgKnr++ed59dVXueaas2fzWCznG8eRbBvr5/DxacLAZ2yoxvhkg26cMT7ZYHSoumSpvjTLma53FrgTDQ1U8HsXB9VKyOhwDddZm51AKfJNplzoGVFRnFEtLzzhmc0yS9IcjaYoFGmW43v2p9NisWw8hDBBpVRpPGmC+UZUdLFnZtksiFmXovDsCy5az3MoUgoKBSqHvFc6bVacpAoTLAlDCENTKS1LII6NU5Iq0J02Ou4iKjWQiw/2eF75s1xr3E3m3CVcj+i+78G/4WY6//A3FEcOru8AqiA/9Cb5oTfnDSqRQyM4w6PIvgFktQ9Z65u7F751NrJYLBaL5XLC91yCwCVJcqLQp9VOyAvFD37gDv7ws4+RZkuX6Pji154ninxuvHobSmlOTDTYf3iC5145womJxQ5HJ6daPPzYy3zPO288308J3zOiorSXhGadrS2WzYEQAlewwL3nQiGFIHBEL4az8Np2vVyoPSlICnPfLUz5M4vlbAgpkcOj6GYdVZ9BhiHa91GNOiRddLuJTuJVuRZdSnTUube51BGOi7vjCtwdV8Db7kN1O6TPPEHy9OMmOW9NaNJnniB79QWi+x/Au+4t501bYLm42JXRDc5f/MVfLGp75zvfueJ+3vnOdy4QFR09epTHH398SdHOevDtb397Udv111+/4n4GBgbYtm0bR48enWt78803efLJJ7nzzjvPuN/Ro0d54oknFrTde++9Kx7/nnvuQUq5wHXpL/7iL84pKvqbv/kb0nRhRs5q37fTHY8++9nP8m//7b9dcV8Wy3pTinx2bh3g8LFpwsBjdLjG+ESDOMk5MdFkbNgIi/JC0ekmtDoJaTrrTgD9tRJ9FbPw6LiSseHaIuHPWubWbMWEvgd0lwwAea4zl0WXJCb7rBtnVlRksVg2LJ6UpKrAlQIKI6KwWC41hBAgHJBndiiCnktRnkG3g+p2EFlqBCl+APSh0gTdbECeoRszFGKhIFkACkg1+AK6CqqbVGTnjGyh8vGfJHvpWbpf/XtTSu58oRVq4gRq4sSSD4swQlRqyEoNWan2/q6aW7UPOTCI8PzzNz+LxWKxWCzrTqUckiQtSpERFbW7CTu3DvKDH7iTP/rcExTF4tU6reGv/v5pnnv5MIePTdNdhnDn8WfeZO/uUfbsHFnWvJRSNNsJcZIRBt6y3YbmJ6HN3mut7QKaxWK5KKxXWXu/Z2s0m2yTWVGRZRkIIRC1fkRYopieQKQJTv8AKg7RjXmuRVGEKJUR7sYTF2WXYSKajEqEb7+f4I57SJ9/muSpb6AaM2vqU3fadL7w57jPP010//twRrasz2Qtlwx2ZXSD87nPfW7B/+VyeVUuNbfddtuSfZ8PUVGSJDSbzUXtg4OrKzowMjKyQFQE8M1vfvOsoqLPf/7zJgt6Hku9BueiWq2yd+9eXn311bm2hx9+mHa7Tbl8ZkO809+31Y5/pvfNiooslwpR6LNz2yCHjk4R+h5jI32MTzRI0pzjEw0cKYmT7FTFDQGl0KO/rzznTlSrRowMVdfsTjSfcmSy4IPARUpjlZ2k+VxgaJZFltaxtbS2WCwbF18K2oBnA0SWywAh5ZyISPYNoPMM3emgux10GiP9AD04gu620a3motrxvoAMUwLNd6C9iUVFYAKB/g234F11HfFjXyb59jfXrSTaStBxFx13zyg6AhDVPpyBIeTAMM5g7350C7JcuYAztVgsFovFslyq5ZDJqRZR6JkYTKFJ0owrdw7zke+5jT//4reWPO0olGbf/vFlj9NXjXCX4QKvlKbZjmk0uxS9a6JZx+zlxHxmk83yXFEohYMkSU3cyGKxWDYqfq+qgNeLwdtENMtKEL6PM7oV3ZhBNerIMEL7wSnXoq6J1wg/RFQqGyZZKMNU8rtcE9GE5xPcdjf+LXeSvfIC8TceRs1MranP/OAbNP/gf+BedR3h2+7D3bJtnWZrudhYUdEGZnx8nGPHji1ou/baa1eVFXHDDTcsanvmmWdWO7WzMjW19BdSpbK6IHC1Wl3U9txzz511n6We22qcksC8dvNFRVmW8eKLL55V1LRe42/dupX+/n5mZmbm2p599lmbHWO5pAgDj13bBjl0bJoAl7HhGicmGj1XIuNMFAQu5SigVPJxexcwjivZMtxHpbz+ZTA8z8HzHLKsIPCNA1GcZEuIijwjKkoyqER04vQMPVosFsulj98LDHm971lrZW1swtu5oupdhpGBTYZwPUStD2p96DxH1aeh00KUyugggulJ6HTntvfylMwLiBXUHBMguhTPoXOtmchNqbZAQE1CSXLe5imCgOj+B/Bvup3uw19YWLrsEkE36+TNOhx8Y0G7HB7D270Xb8/VONt2Ihwb7rBYLBaL5VIg8F183yFNC6LQp91JaXdTAt/j+r1b+d773sIXvnL2WO65eMu123ngvpvOKuxZSkzkeZIwMG7W9UaHcik4a1KbUpq8UHiuJMsVaWrKus26HVksFstG5ZRTkfm/0HrdSqttNAqlmUgyTnQzptOc+7b0XewpbQiEEIi+AURUppg6ichS41qUVdDtlhEXpTF6KgbPQ5QqiCDiUj7EEgUVLl9R0SxCSvzr34J39XXE3/gKyVOPrjnRLH/9ZVqvv4x75dVGXLRt5zrN1nKxsFG2DcyLL764qG337t2r6mup/Zbqfz3w/aUVqGmaEkUrdwBZKqD+/PPPn3WfC/HanUlUlGUZr7322oK2wcHBJcVRyx1/vkip1Wpx8OBBrrjiilX1Z7GcD4LAM45Fx6bwe8KiyZk2UeBSKoVzrkRgxER9lYjB/jLOOroTnU45CpjJOkThKVHR/Iw0pfRcWTandzWVpjlFoc7rvCwWi+V8MRsg8nqnTvkmCxBpralnBSe6Kce7Jjh0opviScHPXLvlkhOTWFaPcF2coRFUuYKamkSQ4VRrME+I7zfrtIRLGkYoDQgj3IlOOwy01rQVaCCUp5y+LgSF1hzLTIk2gI6GjgJHQE1qqs7S8ym0ntsnEKuzw3eGRyn/wCfID75B8uSj5AdeX8tTuSCoiRMkEydM8Mvz8a7Yg7v7ary911kXI4vFYrFYLjLVcshk2qZUMqKibjeFPuPy/ta3XEG7m/DVx189Ry+LiQKP97/rLdxw9Zmz4LU2YqJ645SYyHUl/bUSlZJJZEuznCTJmal3GB5ceN4w0+jwxsGTvHFogv2HJrj5+h3cdsMusjwlyU6JiiwWi2Uj40mBwFw/OkJQaE2mNIGzOWIlzazg8ZNNxrsZJ+OMYp6g4o6hCmWbjLZshO/jjG1D12dQzTrS86B/AF1U0e02Ou5AlqHr02gxg/BDCAKEHyCW4Th4IZkfj9kMCNcj+q734l17I92//2uK8WPn3ukc5G/uo/XmPtxde/BvuRNv917EGXQClksbKyrawLzwwguL2rZu3bqqvvr6+oiiiG73VPbugQMHzlnGazUMDAwghFhUfqzZbK5KVFSv1xe1zXcOWor1fO22bFlcF3Kp/md59dVXybKFF5qrHfts41tRkeVSI/Bddm49JSzaOnJK4S+koFoOqVVCSpF/QRZ2S5HPTKMzl0k2PtHgxESD4yfrHD9p7pM05/qrtnDHzVeS5wWu65CkGaVo/d2TLBaL5Xzj9URFUp4KEOVK42+SANHBdsqf759Y1J4pTcu6FV2WyDBCbN2Obtbh5MLSGVKA126SKUVcLVMSRrATzdMNz4p6knmXLZ7QlHpuQeEqBTvLQWvNidwEsAoNU4URCFV685suzK0kNYEw2+UaMg1qXj+egK2eXpUYSgiBd8VVeFdcRX78KMmTXyfb9+JFKYu2YrKU7LWXyV57me4/fB7vupsJ334fwrMOAhaLxWKxXAwq5ZDJ6TZR4CMkZLlaUIb+nXdeTbub8tSz+5fd594rRvngd99MtRKedbupmTbNdgKcEhOVSz5m+dww0Ffi+HiDViehWgkJfJdnXjzEo0/tY6reWdDfGwdPcvete2h3U9LUlNi1oiKLxbLREULgSkGmNO68mFGwSUIljoBnp9pLPnYizthjY0YrQgiB6B9AVGvoVgPVaiIAUetDV3riom4btEInXeNiBOC4iCAAz0e4LjjeRXUyyjTMGr2nSs8lbF7uuGPbqPzoPyV56hvE3/gyFPma+8wPvkF+8A1wXbwrr8G7+nq8PdcggrOfx1kuHayoaAPzxhtvLGobHh5edX8jIyMcPHhw7n+tNW+++SY33XTTqvtcCiklY2NjHD9+fEH7wYMHGR0dXXF/J06cWNTWaDTOuH2z2WRiYuGCkpSSwcHBFY8NLDnnpd6bsz22lvdtpeNbLBeTwHfZtW2QExMNkiQnCFxqlYhqOURe4BOyKDJqaN9zcVzBU88f4ODRqUWLg9O94FGc5lRch05sRUUWi2VjInsBonxegCjTms2SGzIWnVlMcLybUvVWLm63XPoIIRC1fqQGDu4/1R6VCJOUrNuhrQuivhodBUO9x/N5LkFKQw54QAbUC3MTQCg1rgAHkBixkuz97QlWHXAaz429ttIwmUOKETc1lREzVRwjMuoo6Cyxf66ZWyY7nsF2b22uZO6Wbbgf+kGK6UmSp75B+sIz6xJUuiBoTfbSd8hefhZ3z7X4V1+HjKxzkcVisVgsF5Iw8ObK0Jd6JdC6cTonKhJC8L533kg3Tnnh1aNL9uE6ku1bBti9Y4ird4+xdfTc5WjiJDOCIgFD/WUq5WBOTFQuBQz0lThyYobQ9yj3XJSm6222jPQBepGgCODkVIs0M+dBaVYAkKQ5SukLHtuyWCyW9cSXkkwVeI4gUZBthISSdaLkOtQ8h0bve30+J7ope6pW+LAahOOYkmjVfnS7iWrWEeSIatWIi/IMkhidJpBlUOToTg60jcgIAa5rBEaua0RHF1hslAIh0NVsmhgqmJJo4V334l19Pd0vfY780Jvr03Gek+170SStSQf3ij2426/AGduKM7oVWVpfoxPL+mFFRRuYpYQztVpt1f0tVX7rbOKctXDPPffw2c9+dkHbc889xx133LGifsbHx5cUFWVZRpIkBMHihf+lnlO5XEbK1ZUzWunrtpHfN4tlPfA941h0sXEdSRC4JElO6HsMD1Q4cHjSpCXMY3KmhdaaJM2plALi2GafWSyWjYs/KyqSpi54rjZPgCh0JP2+y0y6WAhxvJtxdc2Kii5nhLvw0leWykRBSLPRJo4TlJ4i6R8g630kjmUmI63QcDI3oiKJEfKE0ty7wgh/zka/oxlyVxbpmsw1LWUMgSYLE8ByMKcoqTaBrG5uLuZL0giZinlORQWmXJsDjLpm/xM5bHH1mt0gnYEhSu/9EOE97yJ75Xmy118hP7Qf9DleiEsBrclefcGIi/beQOldD+But+6uFovFYrFcKCrlgOmZDlFPVNTuJvTXSnOPSyn48HtvZftYP489/QZ5XjDUX2H3jiF27xhmx9YBPHf5ThFaa6bqxnWiWg6pls2CcLkUMDRQJgrN0uBQf5mJqRYDtRKdOCVOctqdhD27Rs7Y95HjM/TXSuS5Ii8KXMc4W8/2abFYLBsRXwragNu7btxMMSOA0cinkXUXtY937XrAWhFSIKo1RKWK7rZRzQYiTYybsOcBVbTSRlyUJugsNYlMWkOeGfFRjzmxkeMgnJ7YaE545K17FYxEQeiY+E/fJjSscgaGKP/AJ8heepbuV/8O3Vna0WtVqIL8zX3kb+6baxLVPtxZgdHgCLJ/AKd/0DoaXQJYUdEGptVqLWpbS6mypfZdaoz14P77718kKvrSl77ET/zET6yon6985StnfKzZbC4pKrrYr9vFHn+ljI+Pc/LkyRXt89prry2az4UUOrXb7bP+b7HMIlRGEndxZcFgzadQBacnlcVJTrPVxHUKKqEgTbvUypvw7NGyIbHfh5bTydOcOCvQWUGRFXR7AsvNwpCrmY4Xix+ONtrEldUJzC0bg7SzMMs8zVI8z4cgpOi2aXUSvCzlWF8/XSEoeuKc6dzcgykrZvLlDA7gSSM2EpxyKaJ37wMngI4D/cssM9hSmume7q1eQNyL4fa7xikpx7gTxcqIhdKz9KWA8dTsmwD5CuZxThwPbrgNbrgNmcToA6+j3tyHOvAaJMn6jHE+0BqtNMVrL5Hu34e8/hb8D/4gcmRxSekLzfm69rZYLBaL5VKhWg57oiIPBGSZIssKvHklZaSU3H3rHu6+dc+ax2t3EtK0QEro6yUQjA5XGehbGEsd6Csz0zCLyH2VkJlGzEyjw7axfoYHKkxML/6N3n94grtuuZI0K0gzIyqKEysqslgsGxuvFxifvd9soqKxyOO1xmJR0XGbZLxuCCEQpQqyVEHnOTrpouMYHXcRFIgwhPCUeEQXOTrLoch69/kpsVGRo4t8LjAyd7Q6bk+s5Jv7NQqNEgX0REVarz1ZayMihMC/4RbcPdcQP/ow6TNPMu8VX1d0s07WrJO99vLCOYQlZP8Asn8Q2TdgxEZ9g8j+QUS5glilcYhl+WyeFYTLkGazuajNdVf/li6171JjrAc//MM/zC/90i/R7Z76gf7c5z5Hs9lc0nnnTPzhH/7hGR+b3/d8LvbrdrHHXym/93u/x6/92q+tqY8nnnhiUbm7C8kTTzxx0ca2bByqboHKU5IlKnkcP/ImflHmzSnz/xuvXti5WSzrhf0+tJxOG5i82JO4kDhVEq9/UfOBtuK1w8+x+cICm5ejp52bHgfowqHG+bn+WStHVrlfF7gg0n6nAntvgz23UJ48TmniOGF9irAxhd++BF9TVUBewNNPoJ9+ksaOKxm/9jbSWv9Fm9L8UuQWi8VisVyORKGP40rIIQo8unFm3Iq80rl3XiFKaaYbRlTeVyvhSonvOwuckWaRUjA6VOXoiRlq1RLNTkKWKxqtmCt3Di8pKnrj0Ene8da9RlSUZpRCn26cMXDuimwWi8VyyeI7ZlF+zqlI600lohgLvSXb21lBKyuoeDbReD0Rrotwq1A2a8I6TdFxx4iMclMKTTiucSNioUuNLgojKCoKyHN0nkOemWv9WbFR3J2TvQg/gDBChNGKj+cUU5oeYcrSh5fgxyHTmrYyrtrReSzFKsOI0rs/gH/jrXQf+jzFiaVL1p4PdNyhON6hOL5EhEw6RmjUN4AzPIozuhVndAtyYMiKjdYRKyrawCwlmnGc1f+oLSVOOZMwZ60MDw/zkz/5k3zqU5+aa2u1WvzH//gf+c3f/M1l9fHoo4/y+c9//oyP+/7SmSEX+3W72ONbLJal8T2Hasml2VmsKppqpOzaYmu5WiwWy0anXy/t65IjaQuXil5CWWqxWJaPlLRHttEe2XaqKc8I6lOEjWnC+iR+q4HXaeF32whVnKWzC4NA03f4DWqH36Sx40qO3P5d6DVcn1ksFovFYjkz1XLITL1DuRTQjTM63XRJoc9aabS6FIXGdSW1ilmIHBmsnXEhsVoJiepG6DRQM+XQ6s0uV2wf4sln9y/avt1JabZjpJQkqTmfSVLrZGGxWDY2sw5FrhRIIVBak2uNt1lERdHSoiKAE92UihddwNlsPoTvI3wfauZ/rQp0mkKeQ5qi8xSdGeGQcBzEEtftulBGkJSlRmiUpaCKU2XVGnVE0BMYBeE5BUYuxgU61lASxq0oPE2jorSmXkAGVOX5FfUsRaw0R7NTvkElqRlxT4kDzwfulu1UfuSnSb/zJPE3voyOL/KatCpQ0xOo6Qny/adKqeF6OCNjRmS0ZRveFXuR1drFm+cGx4qKNjBRtPgHrChWH5Rdat+lxlgv/sN/+A98/vOf58CBA3Ntv/Vbv8V3fdd38f73v/+s+x47dowf//EfP+s2S5U+g4v/ul3s8S0Wy5kZ7PPPKCqyWCwWy8anX535+3xa+lQKKyqyWNYb5Xp0h8boDo0tfEBrnCTG67bwOy28bhuv08ZNOrjdNl63gxd3LpjwSKBx464VFFksFovFch6plgNm6h3CXgm0NCvI8gLPXb/f37xQNJpmcWugr4RAUCr5VMpLx4pnGRmqcvDIFJVSQKPVJU0L+vsipBSoJUoAHTkxzc6tQ6SZuYZI04KiUDiOzYi3WCwbE0cIHCEotMYVglRrcqXxNsnXWuQ61HyHRrr4GnQ8zriqZtfdLiRCOohw8WuuVXFKMNRzKdJZClmGcCTCCWDe+rAucnRsyqyRZ+gkhiRGC2Gci8rVJQVKAL6EmJ6oCOhoGJj3uNKa47kRGwE0CyhLzZDLOcV4mdYoDcEaREh5b3wNpBo8oKPgUApDrqbmLN13ojQNZfYJBAw6IFcoQhJSEtx2N/6Nt5I88yTJU4+iu51VP5fzQp5RHDtMcewwfMe4eTtbduBdfT3eNTfg9A9e7BluKKyoaAOzVJmwPF/9QshS+66kFNlKGRwc5E//9E+5//776XQ6c3P46Ec/yq//+q/zL//lv8TzFiuDH3roIX7mZ36GN954A4B3vetdfPnLX16wjeM49PUt7Td7sV+3iz3+Svm5n/s5fuAHfmBF+7z22mt8+MMfnvv/rrvu4vrrr1+3OZ2Ldru9oMTPXXfdRblsXWYsSzPT6DBT79BNUkaPOBw6eQjvtJPIVuziV3cS+h5DA2U8z2H7loEz9GixXDrY70PLUhxppxRoJuOcTCkGfIdgHYP4lzrfPtRgaokAkdM3xFXD658lbbk0SDsdDj3/zNz/O2+6Fb9k3u+JTkJanyYscmINqt3Gy2L6UchShChVVlUar6uh0TvUyhIqvUDsgAsO5rG0tz6lMUGq2e0jAbV1+lg2lQkqzY7tA66AMXflQaMLjdbaWJY3ZmBmCj0ziZ7u3c9MQbZCRwBVoIsCpDQW6kIgvFMOt0M/9tNcueea9X0Sy+Sll166KONaLBaLxXIhiUJ/TnQTBi5xnNPppvRV12+htt7omEW6wKUcmUXF0cFzx0qj0KdWjWg0uwz2lTl+skGSFGwb7ePw8ZlF2x88OsWubUMUhSbPC1zXIUkzStHZxUsWi8VyKeNLQbfQuBJSBZnSbCYpzVjo00gXu64c71o3uksFIR2E74C/8PdWK43OU+NqlMbG5SjLTAm1simzprPMlFeLOyY+0O2g4y6iUkVElUVjecKIilIFOJAoIySSQqC1ZrwnKFLaxIBKAtoKOin0OZp+x4j1Zsm0plVAS52KB5WkZovLisuyaa05kUOhIdMwkZtY04ALvoCTObSUcS3yemLBtjJxp2SeVjoGcg1bzmzUdVaEHxDedS/BrXeRPPsUyZNfR3faq+vsAlAcP0xx/DDxI1/CGdnSExjdiDM0crGndsljRUUbmEpl8Rdcu736D2qrtbg+9FJjrCd33nknjzzyCB/60Ic4etTUXozjmH/zb/4N//k//2fe//73s3v3boIg4NixYzz88MO8/PLLc/t/+MMf5sEHH2RkZOGHfXh4+IxfwBf7dbvY46+U0dFRRkdH19RHpVKhVrt4lnLlcvmijm+5tPH8kG4Crh8yPNQP+rBZYJr3HRKnBXEqcD1JEJoFyFKpjLuJFuEtlwf2+9AC0HAS4kIRuhkqVwjPIfQ3z2XBtmrG9PTizJnJQhCWz++5r+XSwS+V5t7vPj9iOiyTNaaRSUwUBAx0m4hOE9IEgUD0D6w4wBMAbgEzymRDedIIhWaPPuGAp02mW7OAApAOhAKGHFgvvU8ATOYmwNUERl1wBMQSxrxLW1QEQBTCwABcceWCZq01amqC7M195G/uIz98AM7haqS1AIE515MChET0Fjbdq66j/9Y7ztezOCfn+9rbYrFYLJZLASEElXJAvdGlFAbrLipKs5xmOwFgoFdWra8WEQTLWykbHqjQbMeEgUe55NPupIwM1c4oKhJSoAtNkuW4rkM3tqIii8WysfEdSbdQeEICilwvdmq7nBmLPPY1FouKxrsZWusVxwUsFw4hBcIPemIjIybWSqPjDrrTRnc7CM9DeB5Uq6g0RbcaplRas4HudtGnOSP5vbc7xwhvXGFiK2UBJwsjINIaJntCnSbQ75q4zkxhkrwGHY3A/D3raARGiAQmCWyygOEVhmZPFhD3BE2TuSnTpoDx3JRhq0oz3qHUCJc66lSJNN0TQcUKBhzzPGYKTf8ZnI2Wg/B9wjvuIbj1TtJnv0385NfN63sJU5w8TnHyOPE3vox7xVWE977biovOwuZZPbgMWWpRsNlsrrq/pfY9n05Fs9x+++08/fTT/If/8B948MEHSVNTlmJqaor/+//+v5fcZ8uWLfzn//yf+cQnPsH4+Piix6+44oozjrfU69Zut1FKIeXKfRxX+rpdLu+bxXK5EAYeUgpAsnXUfD5Pv0AQQjA106IUDZJmOb7n0o0zqhUrKrJYLBuPwJHEhZqz4d1sAaItkc8LS4iKxuOMQusFGUSWzUHFlRSBSzI4hN9qUuk2ENUqynPRjRmT4TY1gewfMO42K+nbMUGbuoKGAimMY5HSJjOtpUzQB0xGWVUaV6P1PgwHHMhzyICpHEY9EzTayMe8EAJnaMQEfO64B50m5If2k725j/TF7xgr9BVQ/t6PnKeZWiwWi8VimU+1HBpRUeQxNQNJms85/ayV6bo5zy+XfMLAQ0jB8MDyhbue5zDYX2ZyqsVArUS7mzI6WF1yITnPFVMzLfqrZZI0pxwFJKktp2yxWDY2Xq8Uk9u7z5Yo/3g5MxYtLUJt5wWtXFH17HrARkJIgSiVoVRGK2XERZ0WOomRvg+Dw6huB91sQJ6h6smC/SVGWJRqIxpyhRHiJErT7OU0Tc1z/skxjkGhgP7eoXLytFODRJt4TKxM38Mu1AuIhKa8TFFPvTDjzwqackxMKRC9xLWegGnANW3tXuAp00bE1J4Xi0KZ8meTOQRCE62hHBuAcD2C2+/Gv+WtZK+/Qvbqi2RvvLriGM2FJj/wOq0Dr+PuuRbv6utw+ocv9pQuOayoaANz5ZVXLmqbmJhYdX+n7yuEWHKM88Ho6Ci/+7u/y7/7d/+Ov/7rv+ZLX/oSL7zwAhMTEzQaDQYGBhgbG+O2227jQx/6EN/7vd87V75lKZefa6+99oxjVatVhoaGmJycnGtTSjE9Pc3Q0NCK537y5MlFbXv27Dnj9uv9vq10fIvFshAhBKXIp9VOGOwrUy4FdOOU0zWGM80uO7ZCnGT4nkunm1KthBdn0haLxbIGTgWIJFDYAFGPXJmScKNneNxy+SKEoH/WrSsaQrV91NQkMoxQjmvKbOUZavIkotqHjFZWJq/aExY1lMlUS5QJIs0GcNzeNiWx/mKiWWQvUHUsh5RTGXaxgvJlEhMVfoB31bV4V11L+I7vJvnWYyTf/uayAlfuVdfh7b1w5ZotFovFYtnMlCIfKQUujimBluR04pRaZW1uRd04pRtnIKC/51I02FdasVhpsK/M9Ewb13UIfJfhwSqe55DnatG2R47P0F8tk2Z5bw62PI7FYtnY+L2Y0WzsKN9kMaPR0D/jY+PdlKq3mYrBXV4IKRGVKlSq6KJA1afR7SYyKqGDENVqwBKVYcKeqChWJgmsXpxy/JkqjOMPGMfppJdAFms4kZuksqo0ztTdnphnvr9y3BMAVaVxHvKlnksCPRNdpZnsCZUavfiSAIZ6Zc+iXuwpxwiaStI8h/kl18CIkAqMyCgUZrsTOezwNO46BKeE4+JfcyP+NTei85z8wGukr75I/vor6CRec//ni+z1l8leeQ7vqusoPfBhZLXvYk/pksGKijYwN95446K22RJiK6Ver9PpLMzY3rVr1wW3YN+2bRs/+7M/y8/+7M8ue5/Dhw8varv11lvPus+NN97I1772tQVtR48eXZWo6NixY0v2fyauueYaXNclz0/JU1f7vq1mfIvFsphZUVEYeAwPVDhwdHLRNtN1I2CMk4xaJaITJ4u2sVgslo3AZg8QjQQeTq+W+Okc76ZWVGRBlqvgeqiJcaQHenAEVZ8yltiNGVTcRfT1I+TyF6hqPWFRU50KOHkYMVF0HsVE83GEyVBLtAlcVQS0NZTP/9AXHBmViO59N8Ed95A89Q2Spx+H9MznbqUHPnzhJmexWCwWyybHlEALaTS7lCLfiIq6axMVaa3nXIpqlRDPdXBcyWD/ymPbUp5KPgt8lyTJ2T7Wz4EjU4u2PXxsmhuv3k7acyjK84K8ULjOyt3wLRaL5VLA62Xaur3K0QoolMZZo3vJRiFyJX2+S30J57kTccZVNSsquhwQjoMzOIwqV1DTk4gsxan1I6QLnVPl71SnTdBnyprOuhHNRhMbhRHkgBEURRIiTgmPZgVDrXllx8C4H0XCJH81leknEOBjSpdtc89cZi/TmhO56a+jzP5g3KlnS7VF0vRX74mYOgrmr/5H4pTQqNlz1Z4uzGfexwiLzjaH1SBcF++q6/Cuug5d5OQH95Pvf418/CjFiWOXnouR1mSvvUzj//PbBLfeibNlB7JiKwTZs9sNzA033LCobf/+/avqa6n9lur/UmTfvn2L2t7+9refdZ+L+dr5vs/evXsXtE1NTa26BNqBAwcW/F8ul89a/s1isSxmtt59EHgMD1ZAm4DUfCanjUp91so6TU2gyGKxWDYaSwWINpOwyJGCkXBp4dCJNWQWa61pZgXHOinTSY66SGXllNZkyv4+rRUZhDhjW8H1EI7EGRxGVGogBDpNUBPjqG733B3No8+BYcdkoA05MOaZQM6FrD4W9sbq9g6RzmV+qMgwIrr33dR++ucJ3nY/wg8WbePuuda6FFksFovFcoGplM1vcqnnCBEnOXGyunPxolCMTzZJswLHEfRVzYLv8EClV+5+5US9eQWBycneOtK/5Hbjk02yLEMpSHPjPRBbtyKLxbKB8aRAYASgs24l2UWKb1wszpRsdqJrv98vN0zsZxuybxCERHoL33vd7eA2pkFrFKecflo9MQ5AvzRCnlm8nlP0sGOcXWY/PaEwZca2uKYsWV8vyUxjytSrnhvS9Hwro3korTmRQaHNPGa3q0oTW1rwvIQRGo04PcGSgJqEra5xNIp6saiaY+Z1+hymzjCH9UA4Lt6Ve4ne9QDVj/8kff+vX6b6k/+C0gc+RnDHO3B3XomILpH0tyIn+dZjTP/6v6bzd3+Jji9dh6ULgXUq2sCMjY2xZcsWjh8/Ptf2yiuvLFnj+Vy8+OKLi9rO5fZzqfDaa68t+H9gYIA777zzrPss9dxeeuklPvShD614/NNfO8/zzinIuvXWW3n55ZcXjX/XXXetaOxjx44xPT29oO3mm29eVwWpxbIZCHwXx5WQK7aMGjtDpTXOvM9Ss52Qphm+75FmOb7n0rUl0CwWywZkNkBEL0CUaU2uNS6b5/xhS8njeHdxFsyxTkozKwikwJUCuYxzqq8dr3O8mzERZ8TzxKZlz2FPNWRPJWBXJZgTc6037bzgaCflSDvlaCdlPM5QWrOt5HP3SJXdlcCeG64S4Xo4Y9tQM1PGErtcMZbY9RnIU3RjGpXEiFofYpnvbyjhYp45hNJkq6XaBIsAEqUJLvOsUxlGRO94F/61NxA//jXSV14EbT6vpQc+Yj8jFovFYrFcYMpRYEqguQ6VckCrnTA+2WTrSB+et3w3yCwrGJ9skOUKIWFooIIjJb7vzomLVkPUS0IIfHM/OlzDLLktPmdotBOG+j3SNMd3HeI0mxNNWSwWy0bEdyRJoXClICu0SUS7TMpmL4ex0GNffXES0Yluuqr1V8uljRDCxHVKZcSJJarKJF28QpNW+km0JNemvBgYoU7lDJ+NUMIWCZk2Di/OEofNgANpbkqVzRQw6BqxUCQ1US9OU2hNozCxnKIXy5nsuRWFPbHQmQgkjJwjXDXgGIekHDP2kGvmEghNZalJrzNCSpyBIZyBIbjuLXPtOklQ9WmKmSnUzBSqPm3uZ6ZQzTpcQLGjTmI6f/tZuo88ROl7/hHh2+9HeJvP6d6KijY4H/rQh3jwwQfn/m+32+zbt49rrrlmRf08/fTTS/a9EXjkkUcW/P/BD34Q1z37of3BD34QIcQCJ5JnnnlmxWM3m81FoqZ3vetd5ywb96EPfYg//uM/XtD2zDPPrFhUtJHfN4vlUqMcBTSaXXZuHQBAK73Iz6/eihkZ9IiTDN9z6VhRkcVi2aCcHiDKlCbcVAEiH2gvap+IMx58xQj2t5d9Pn7lyDn7OthOGF8iW62dFTw31ea5qTauFOwqB1xZDQnnSiEsvPgtuw4joUewwlIJj403eXZq8XM52kn5iwOTbC/7vGO0xg67sLEqhDQuRSoqo6YnEIAcHEa3m+h2C5100RMxIiojyuUVlUS7GHjCBAFyjHV3JIxbUbBJPIxFGBHcfAf+rW+jOPAaxeRJvKutS5HFYrFYLBcaKQUD/WUmp1oMDZTJ84I4yRmfbLBlpA9nGefEnThlYqqFUhrXlYwOVfE9FwRsGamtadE3DDyEFLhIPE9Sq4QEvjfnXj2ferPLUH+FJM2plALrVGSxWDY8nhQkhbnvFpBtIndrgLHIX7K9kytauaK6AvGrZeMgXBfZP7iwrdYHnTZhlpI0ZmhVqyjHiEnK0jj9nAvvLKcjUhgn6/ECOhoCZfo9kcNWV9NSppTa7Ccw18ZFqMDEdgadtbtfO8IIicZz6GpTjq3WExr5QuNfpCQ0EQQ4o1twRrcsekwXBapZ7wmNjNiomJqgOHkc3ayftznpVoP2Z/8Qd/suvKuuPW/jXKpYUdEG5yMf+cgCUREYkc1KRUVf//rXF/y/detW3va2t615fuebmZkZvvnNby5o+8mf/Mlz7rd9+3buvPNOnnjiibm201+D5fDYY4+hTisv8ZGPfOSc+33gAx/A933S9FSG/COPPMI//af/dEXjLzXn5YxvsVgWU4p8Gs0uQwMVotCnmyx2sJhpdBgZrBInGbVKRCe+xGq9WiwWyzLxTwsQbabyZwBbzmBlPR9/mc4zI6G3pKhoPrnSvNGMeaN5bpvc/sBlLPQYjTy2lXy2l84uBtpW8pcUFc1ypJ3yP9+cYHcl5J6xKlvOEByznB0ZRYhgG2p6CjotRKVqXIsaM5Bn6E4L3WkjoghRriCcS/dSOzDmjMQaIkzgauBiT+oCI4KQ8J53IcKSzTK1WCwWi+UiMTxQIUkyWu2E4aEqx8frZLkpZTY2XDtr6bJ6s8t0vQNAGLoMD1ZxpcRxJdvH+ufKl60WIQRh4NHtpgSBR5YpBvvLHBtfvFA1XW/DzhHSzAiOVlvGzWKxWC4VZkUEs+XP8k1W/mzsLDGjE92Uqrd6JzzLxkJ6PnIwIpqaolHkZPUZqPZR9n361yk5y5fQp40T0UxhSpUBHJ53OpFqaBVG9KMxufBDrhElrcsceuXSpgto9hLPAmGERTsuwTCicByc/kGc/kG4YuFjqtuhOHmcYvwYxfhx8iMH0Y2ZdRvbu/7mTSkogkUeDJaNxrvf/W76+/sXtH3hC19YUR8nT57k8ccfX9D2/d///ec9uJplGcePH+f48eO022deCDkbf/VXf0VRnCrueMstt3D//fcva9+PfexjC/4/dOgQzz333IrG//znP7/gf8dx+PCHP3zO/fr6+njPe96zoO3v/u7vFjyX1Yx/4403ct11162oD4vFYij3FllD32NkqAKaBW5mAFMz5rtqNjMtTXPyYqGw0GKxWDYCs4KZ2ZJcmdpc32UDgYt/Dgvf5WbiDAfra3c7k+S8Uu/yyPEGj59snXP7baXlXd3vb8X8/14/yV8fnKKxRIa15dwI6eAMjSCHRkE6CM/DGRpB9A+B5wMa3e2gJsZRM9Po7NJcUAp7h3asTt0XmyxIbLFYLBaL5dJg62g/YejhSsnocA3HESRpzuT00ufBeaE4OdWcExRVKyFjwzVcKQkCl93bh9YsKJql1CuBFvZKoPXXll5Eno0VpVmORlMUiixbWYzXYrFYLiXmYkbObMxoc10vho6kz186UejEOZLKLJcZjotwXbzhYUZcSUUX9Dem6E+7a3YImk/VMSIeDUzl88rVa5jIjbin0xMUBQJG3LM7IK2GsoRSbw6TvTkkGuIN9vmXUQlv1x7CO97B/5+9P4+v7CrvfP/PWns6s2apRo/YeAIcMB4ChGAmE8CBkBgaEuDXELoh4RJ8E0LnAglNcoELHTcEyEAIaQwmpCFOYyCBMHSaBOwYg00wnqeyXVWahzPucf3+WOcclUpHVZJKqkF63q9XvWRtnT1IsqRznv1dz1P8hVdQeeNvUXrNfyK47OfQA8PHfPzii39lA67y1CSholOc7/u8/e1vX7Ltq1/9KtPT06s+xg033LCk247neVx77bWr3v/mm2/mgx/8IH/1V39FrXb0Gx8dt9xyCzt37mTnzp3LPofVyLKMD3zgA0u2/eEf/uGq93/Tm97EwMDSNbmf+cxnVr1/HMd84QtfWLLtda97HTt2LG/F1svv/u7vLnl/cnKSf/zHf1z1+e+4445lIajDjymEWD3XdfB9269ybLgPgOywG2yTM1W0gjQ13RVozaZ0KxJCnHq8zqqz9tt4mwUKtFLsyh+5A9Bqx5AN5zZvhvbYKo7d5zkU3dW33r5/ockND04yJ8GiddOFIs6O3aiCHXmsgwBncBg1MAy+HYtqwibZzCTp5EGyhTmyVnNZWPlECRQobLvsqH1Jze2VKxRCCCHESUJrxe6xfluTcR1GBkugoN6MbAcgIE0zqvUWByfneezALPVGBAoG+4sM9RdRKMqlHKftGsJdw/Pio+mEk4L2jeVKqXeoaGqmhlJgMojaYSLpViSEOJV1a0bt0EJqzLI6+Va3UreicRlxua3ooRHwA5TWBEPD9OUCiiqD6ixZrbqh5xp0bGgjxnYMmohhMrFdpsGOrx91NydQ1NHv2BFXGYvnPdXrRUop3B27yD/zuZT/f79J+XW/YTtXt2t6a+H/zOW4e04/+gO3KAkVbQHXXnstIyMj3ffDMOSjH/3oqvaN45jrrrtuybY3vvGNnHXWWava/33vex9XXHEF73znO3nDG97AxRdfzPj4+Oovvu3rX//6sjFiR/OpT32Ku+++u/v+L/7iL/KSl7xk1fv39fUtC+F88pOfZHZ2dlX7f/rTn2ZiYqL7fhAE/P7v//6qz/9zP/dzXHXVVUu2/X//3/+36v0/+MEPLnn/wgsv5DWvec2q9xdCLBe0V5/tGq0AyzsVzc437F04FgtEMgJNCHEq8g8rEGVm+3UqeepQ8YgfX22nopHc5o25Gl3FmDal1Kq7FXU0kowfTK1+MYBYTjm2a5GzYw+qWAYU2vdxBgbRgyOQy9vB9lmGaTYw87NkEwdIZybJalWyKMRkJ2YFu1Y2WASL3Yoap3iRSAghhBCnLtd12LOjH60VucBneMDe5JmvtoNEB2eZnq3TCm0oPhe47BguUynZMPfwYIldY/1HHJe2Hrl2R1LPdXBcxWBfsedN9STNaLZvMkeRjEATQpz6OvUQrRROZwTaKdat5FitFCqaaEZHXDB0siwmEhtDOQ7OyE5UvohS4PQPoAplAEy9SjY/u2Hfc0fZYBHYMWedu05FDTtcO+7M36QwUYdWkGsnRzqhosYW+l9aKYUzPEruip+n8oa3kXvm81BBbnU7a4fiL7xicy/wJCehoi2gVCrx3ve+d8m2D33oQ9x7771H3feP/uiPeOSRR7rv9/f38+53v3tV573rrruWnfeBBx7gHe94x6r2P9S+ffvW1CXo5ptv5q1vfWv3/b179/Lnf/7naz7v//V//V+cccYZ3ffn5+f5nd/5naPuNzExwbve9a4l237rt36L0047bU3n/8AHPoDnLT45+T//5//w2c9+9qj7fec73+Fv/uZvlmz70Ic+hNbyIy3EseisPtu1w3Yx6/V8cKHaAg4JFUmnIiHEKcjTCsX2LhCdUc7x0tMG2ZH3e7Ys9lf5vKrgOjx1qMSVO/u45sxhfv2JO3jJ3kEu6C+Qd4/tudlqOhWBHYHW57tc0F/gyp19nF46chcmgEdq4TFdm7CU5+EMDuPs2osu9y+OResbQI/sRA0M2dVPbvt7GceYehUzO002OW47Gc1Ok1UXyJpNTBwflyJkZwRauAWLREIIIYQ49QSBx66xflBQKgT0V+wNnlaYYAz4vkN/X549O/rZMdJHLvBtl6MdAwwNrH2l+Wo4jsZv14lyvkd/X8HOBOlhvmrHsYWdrtYSKhJCnMKUUod0K2p3uN5mNaPRFUZpNpKMWtJ7VU6aGf76/gm+/vgs9y00CVNZvbMVKK3QQyPosp1uoctlVKUfUJhWk2x2GrPGphkryWkYduwYsoqGnS4MOIuLQo+HziK0MFt8uxUXoirfJ3fZsyi/8bcILn0muEdeNJq7/Nk4I2PH6epOTpu3rFYcV29+85v5p3/6J2688UYAms0mL33pS/nmN7/J3r17e+5z/fXX8773va/7vlKKT3/60+zcuXNV5/zOd75Dmi5fXfv1r399HZ8BvPWtb+Wss87i537u5474uOuvv57f/M3fJAztjZDBwUFuuukmxsbW/sOcz+f5m7/5G5797Gd3j/epT32Kc845Z8VRYjMzM1x99dVMTk52t1122WX81//6X9d8/qc85Sl86EMf4rd+67e62/7Tf/pP7Nq1iyuvvLLnPnfccQfXXHPNkpsNb3/723nRi1605vMLIZbqdCoa6i8SBC5hj9Ewswt1SsVc92NRlJCkGe4qx+QIIcTJoFMgijKDpxVpaogzQ7Bx0wKW6azq1Rs5dPwYnVPJc04ljzH2848yQ5RlRJlZ00ixn9/Zt+T9cl+ec/vyZMZwoBnx4EKLR+ohtbh3Z5rEGKJ06Qv0vKspe6u7hp8ZKvK04cWbKRcPlXi0HvKv4wvsb/QOv85HCa00Iyd/vzaEchxU/wCq0oepLZDVaqg0RvkB+DbkZdIUE4WYMIQkhjS1nYyiECL7WqT7f4HWKNezRQ3HBddFuS5Kb8wPaU4DmR1/1qkNtzJDboNX+G+UZmZIjF2hdzL9DhFCCCHExikWAsaGK4xPLtBfKZJm4GgoFHL4hzw311pRKuYYGijie5t7e6OQ84mihMB3cR2HvnKOan15OH9uocnYcF+3U1EooSIhxCnO05o4S/EcRZhBvAVDBUeyUqcigIPNiLK3fCTm442I2TBhNky4c7aBVoqBwCXQisDR5BxN4CgCrSm4msHAZSjwKLoatUGvc2fDhDgzDOXc7iJCceyUUqj+QXA9stkpdL5A5jiYuVmII7KZKXT/gK3jHKOcXuwWdCJ0QkUpkBgbaGplUNzEmvGJpHN58s96PsHPXEbr+/9M+OMfLH+Q55N/4S8e/4s7yUioaAv53Oc+x0te8hK+/e1vA3Dvvfdy8cUX8653vYtrrrmG3bt3k6YpP/zhD/mTP/kTrr/++u6+Sik+/vGP87KXvWzV51tpBe16V9bWajWuvPJK/uN//I+89rWv5WlPexr5fJ5Go8H+/fv51re+xfXXX8+//uu/dvc544wzuOmmm7jooovWdU6wgaC//du/5ZprrukGi975znfy3e9+l9/5nd/hiiuuwPd9Jicn+fKXv8x73/teHn300e7+F110EV/5ylfw/bWNneh429vexsGDB/nABz4AQKPR4IUvfCG/8Ru/wRvf+EYuvPBClFI88MADfOYzn+HDH/4wjUaju/9rXvMaPvzhD6/78xdCLOp0KvJ9l+GBEo8fnMMYs+RJ/eRMjdN3DZGmhihO8D2XZjOiXFplm0QhhDhJ+FoTZSmeVrRSG2zZDElmmI8SGmmGAgqupuK5uCdReEEphe8ofAdg414la6XYXQjYXQh41hEeZ4yhGqeMt2ImmjHjrZicVqsuKvUKWewtBrzyzGHur7a4ad9Mz/3Gm/GquhqJ1VNaoyr96Eq/7TrUatrxZ1GIckDlC5AvAO2OiEmMSWJIEkwSQZyAyZaFjaAdONIa5frg2cCR8nyUs/b/Z11liwEJtqV1QdkRaL0KV2FmmM/s4/scjnthcjYxzLTzeI6Cna4hOIl+fwghhBBi4/RXCkRxwuxcg6H+Q8YVKygXc5SLOYqFYMNHna0kn/OYW7CdlAD6KoUloSLH0YwMlrqj2KI4tYspMgjbYSQhhDgV+VrRYLFT0Xbrbp1zNP2+SzVOGcl5jOY9xvIeYzmfoVzv3+0PticcdGTGMN06esg052iGci7DgUe/7xJmGdU47f7LO5pXnTWyquu+fabOj6ZruFoxHHgMBi65dqAp59hwUyfgNBS4BLLQbE10qQyuSzY9ifYDzOAw2ewMpAnZzBSqfxDtn9p1Nq1ssCg0tl5UUnYcW/Hou57SdKlC4fkvxX/KJTS/+RWSRx7sfiz/cy/A6Rs4gVd3cpBntVtIPp/nH/7hH7j22mv5xCc+gTGGmZkZrr32Wq699lp83ydJErLD2rANDw/zqU99iquvvnpN53v2s5+N1nrZ8V7wghes+3NI05RPfvKTfPKTnwQgCIJu0OdQWmte//rX88d//Mf09fUt+/haXX311XznO9/h1a9+NQ8//DAAX/3qV/nqV79qbzL5fs/r+OVf/mU+9alPUalUjun873//+zn77LN5+9vfTq1WI0kSPvKRj/CRj3wEx3FQSpEkSzumeJ7Hu9/9bt71rndtWIpZiO3O8xy0VmQZjA5X2qEi0BqG+kvsGKlw1mkjBIFHsxXTCmN8z6XRklCREOLU4zkKEvA2qZV1agwLUUo9SbvdVwxQTzIaSXRShotOFKUUFd+l4rucU1m+2u1YjvuEco6Cq2n0aM893owkVLSJlOehPA/KFUxmMFELmk3brSiOUGTQecwhTGYwaWK7GSXp4n+nSTts1ILIFiu7QaNc3v7zVr/QIaehltkVZwVtQ0WDhz0mzAyPx4sdlKoZjLnHr6NRM1sMFCXti3g8hlHXUHLkd4cQQgixFY0OVXC0plpv4ToO5VKOUiHAOQE3PvPt8Te+56A0nL5riN1j/ewa62dsqMJgf7F7XY8emLEL0KKEXODRCmMJFQkhTll++zVfZwzadgsVAbzijCFKroOzyte/D9VaR39QD6004/F6xOP13p2mW+7q//5NtOwxksxwsBlxsNn7mGAXqO0p+pxdznFWOUef/M1aFZ3Lo0Z2kE6NowA9NEw2NwNxZEfdF8uoYplT+bZtrh0qCjMotetF24UzMET+Wc8jvWiG+L67SA88Sv65v3CiL+ukIL8hthjf9/nYxz7Ga1/7Wt7//vfzta99jSiyfzQ6bzvGxsZ4wxvewLXXXsvQ0NCaz3XRRRfx3ve+l/e85z3d7kRnnHEGH/rQh1a1/xVXXMG//Mu/8NWvfpXvfOc7/OhHP1oW3Dn8/R07dvCKV7yCt7zlLVxwwQVrvuajXc+dd97Jxz72Mf70T/+0Gy4yxiy5Dq01z3nOc/id3/kdXvjCF27Y+d/4xjdy1VVX8cEPfpDPfvazzM3NASwbMVcoFHjFK17Bf/kv/4Xzzz9/w84vhLAC36XZijn3zDECz+XMvcOcffrokkLQfLVJsxV3R6A1jvDkXAghTla+tkUJt/02yjLCNDvmVUrGGGpJxkKU0HnNmXM0fb6LMbAQ27FbJyJcdHj3ue1AKcVo3ufh6vLi1vgqVsyJjaG0QuXykFsMjZkkxkQxxKHtahRFkMb2sdqz3YgOYQyYOII0sW9j2+GILMM06phGHbRzSMDoyG23cwpq2JVnYAtGiTHdlaipMYwnNlAUmsUeXvtjGHQN/Zsc6umcH6CewXwKAy7kFYwntv3+gLu9fp6FEEKI7WJooMTQQOnoD9xknufgug5JkhL4LmedNsJQf7HnwrKc71FvRrSimFzg0WxF9JU3bsGAEEIcT16nZtTpVGTMtquprCVk0xl7thmaSUacZd3vyUoyY5horr7OkxnDvlrIvlrIdw7MM5zzOLuc4+xyjh2F9U1m2S6U7+OM7SKbmoCohR4YJqvOQbOBqVcxUQvdN4ByTs0YRqCxXReNrUXF2BqMt41+/p3BEYJXPI2stoAunPjnpCeDU/P/ZnFUl156KTfeeCMLCwvccsst3HvvvczNzeG6LmNjYzz5yU/m4osvRh/lj9DRvOtd7+Kqq67iu9/9LpVKhWuuuYZyubyqfR3H4RnPeAbPeMYzABt6uueee3jggQd4/PHHqVarpGlKqVRiz549XHDBBZseoikUCrzjHe/gHe94B3feeSe33347+/fvJwxDKpUKZ511FpdffjnDw8Obcv49e/bwJ3/yJ1x33XXceuut3HnnnUxOTgIwMDDAeeedx2WXXUY+Ly9IhdgsgW+7EO3ZOUCpkKNY8JetLMsF9v1WaJ+kR1FCkma40i5UCHEK6a46U6CAzMBEK7YBIM/BX+PvtNQYWknGQpx2R6l5StMfOOQOOdaI4xG1R6IdGi4qug59vtNzlNexiDrnSFMyY8evDQXHPuP8VLIj7/UMFa2l2CQ2nnI9lOsBhe42k5lDRqLFmHZwqNPZSPk+4C8ZoWaiENNqQBhClmIaNUyjBo6LKhRR+WLPFXJB52cfiAz4CpoZlNvpockEYmM7BE23a6P9ju1qNJ1AKzOMuJs3Dm0igdTYa5hLbbhpOrEj2MoaZlKIjL2Gw39vhJmxxS/sqrrjPbJNCCGEEFtHPudRraW2+1AroRXFlFkeKvJ9l3ozImovQGtKgF8IcQrr1IxcrVDY11bJNgsVrMXho882Wi3OGAiOXKebi5Jj6kI+1YqZasXcMlnlCZU8z9vVR8Fd+7j17UI5DnpkB9nMJDTrOJV+Mj/ALMxDHJNNT6LK/ehT8H6ur6CdKyICAmy9yNuG/zvo0rFNKtpKJFS0xVUqFZ7//Ofz/Oc/f9POcckll3DJJZcc83F83+dJT3oST3rSkzbgqo7dhRdeyIUXXnhCzu26LldccQVXXHHFCTm/ENuZ79tnRr5n/0TGcbr8MZ6LVti21nGC77k0WxHlooxAE0KcOnyt7I12DTvzPgtJSj1OaaUZrTSj4Gr6jtJBKMoyWol9fHhI4cJRij7Podh+talYXOE1HyX4WjGSWxouqiX23IMbMNM9NYZGklFP0mUFlUaSUXAy8mtoH32qG831DlHNRwnNZHt9LU52Sivw/XZ4aJExph0uCiGK2iPUYlSWooIAgsAGjMIWptW049HSBFOdxzQb6ErfstFoStluRU1jR6D5ju0IVHZgLjXUMxtamknodh2bSe1KtX5tHxvGdhxaYKcpdkNIsbGBIE/ZINJaV7POp4bGIec32E5JKbZjUWxgoD2+LY5hwDVEmf1cwmzxegHmFOz2FjswCSGEEEKsRT7nU621yLVfz3S6Vh+usyAtjO3HoyghTbMTMrZNCCGOldOuGaXG4GlNlGUkmcGTX2k9PWmwQL/v8GAt5KFqi1qPewrHohqnDARHvqU/voELx+5faFJ0Nc/d1b9hx9yKlFY4w6NkC/Nk87PoXB7j+WTzs3Yc2sIsWdRCVfpPuS5fgVqssQSOHYFW2YahIrFIQkVCCCHEITpFIK+dwo+TdFlrV6UUQWA7GrVCGypqNCVUJIQ4tSil2JH3GW/PWx/wXcquw3yc0EgyGklGM4lwlUK1O5oopdpvIUwNqVka2PGUpuAqSt5ix6GS6zCUc7ttmvt9l7koWRIuaqUZM2FCYgyTrZiK51D2nDW94M6MoZXa626lGZ0rU0De0RQ9h0aSUk8yoiwjz/aphI3lV25bPdGKOL3H+AZxclFKgefZkWaLjY3s2LRmnaxeQyUxKpeDXM4GjFoNTHUBkphsZsqORCtXUHqxCpTT0ExtqKji2JVnzcww075XNpfZVWka26GoltkwUZzBYLua8PhR6pahgR1raA4WZqbbGWk+s222NTDq2uuby2wxKzUw5EAIHDzsGjJjr7tT8BhPYJe7/hWbQgghhNi+Cu2Avu97oCBJMpIkxT2se4PvuSgFaWKIkxTPdWiFMcVCcCIuWwghjpmnFWlqcDVEGcSZ4dTruXJ8+FpzdiXP2ZU8xhgmWwlTYWwX4aUZYWraC/JszWo2tLW31aquIqTUSLJuEGwj/GS2wbN39B1xsaGwdKUPlcuTTk2gAD0wbMeg1WuYVtMuCiuV0blT5yeoV71ou41AFEtJqEgIIYQ4hO/bYpHnOigNJrMFI++w3o65dqgojGIgR6MZnYCrPbJaPWR8aoE0yxgeKDHYXzzRlySEOMnkXc3pxYD5OLWz3zUMBR5lb7GDUGzaM4SAQ/4DsIGdwNHkHU3O0UsKDTlHM5zzlow+A9s6ezjnLQkX5RzNjrzHbGSLKvNxp2uRd8TiRZoZmmlGs12kOfTqfK0pupqCq7sBpyg1QNYdz7ZdlNpfh14Fq/FmLKGiU5jyPJTXj670Y6II06iTNeqoNEblC5ggh6lVMc26LWSFLVSp0h2JFrR/vCJsEAcFB2L7k95oB4gABh1bUMqp9tgx7Hiyzjg02vun2I5FqbHvl9sdjWYTw4B79MJTZgzj7c5ETWNDTAADDjgKSu23s+2OSRMJDLn2d1Fk2v/aQSSDLXiMurYINp3C4EZ94YUQQgixbQSBh9YKMvA9hyhKaUUJpcNCRVorfM8ljBLCKMFzHZoSKhJCnMICR9NKs+7Is+1WS1kvpRSjeY/R/JFX1zSTlOkwYSpMmG7FTIUxjSQj0Jpye7Fdqf12V2HlxWIdTxsucfFgkekwYaIVMdlKaCYprcwQtruS27eGbBXfy9QYpsP4iAvVxCLl+zg7dpHNTUO9hiqV7Ti0+VnbSXp+lrS9XQcnfx2uUy+KWawXhcbWhVZyIkNHxhhSkC7Vm0hCRUIIIcQhXEfjuJo0yWwxKEyI4mRZqKjT0agV2qXxUZSQpBnuSdLWulpvsX98rnv/f3K6iuc6lOXGsRDiMEop+n2XiucwFyXMRSm+hpGcR5zZbkQGO4bItP87M+A5ikCrbmAH2l2BXIdKu/BxJJ1wUZ/vMN60q7eGAo+8kzEbJYSZYbwZdTsWdc5tz2+IM0N02GgzTynyribvOvh66XWZ9jkBkmOYMX8qUkoxlvd5qNpa9rGNbI8tTizVHpum+wfIWk2yuRlUHKEqfZh8gWxhHpLokJFo/bieh4ctErUMFJT9WYmMDe6ADQbl2k9vchrGlA3oRMYGjGqpDRL1WmOZYANJMynktCF/lBWO0+ni6LTZdreikob8IU+v8hpcBdOJPf5Ecnjccen5Z1MbPJpPwctAItZCCCGEWKt8zqfeCAl8jyhKCaOYUo+wUOAvhopKhYBWS55rCyFOXV779ZurNbB8vLw4NnnXYY/rsKe4ceFTRx890GTaXcIfqLZ4sNo6Yl1oqpVIqGgNlNY4gyNkuTzZ7Aza9zFDo5hGDdOoQxJj5mZIPQ9VqqD9kzd47CobIkmwYaK8st2Kcj1uf9VT063NDLmGPuf4BntiYzgY2zpVoAxjHt0wpNg4EioSQgghDhP4Lo0kwvMcwjAhTpa3Fw18F60gTQ1RbEegNVsnxwi0Wj3sBopqjZAkTekvFzg4OU8u8JYFpIQQAkArxWDg0ee57XBRgqcVHkd+EeZqRdF1KLi2Y5Fe44s2T2t2F3xmo4TZMKHganztMRMmhJntWnQkgbZdkvKu7ha8wAaJip4NOAHsb0Tdj2+3UBHAWN7rGSqakBsdW5LO5VFjuzC1KtnCHMoDZ2iYrHnISLTZKVT/IDknIM5sN5+CtqHBmXYxKFBQOaxg5CgYcWAhg2p7PFr3vIDTfkzL2G5HOWWPO57AHs+suGqslhoW2j/uM6kNKXlAX4+CladgxLWPC9s/zh4QaPDbHZhSbOCoaWAhte26pzKNZxRSEhVCCCHEWuRzHvVGSC5wqdYgDJOej+ssQLNdraHZOvm6WgshxGr53VDR5tdSWmlGNU7JjMHTCl9rAkfh6ZNjAe9WYjsp+Yzmfa4YrVCNU7708BQzPf62TYVSM1oPXSih/IBsZgrCFqpUxhSKmHod06xBHGNmp8n8APIFVJA7KceK5bTtIN0ykAcaBgYOe0x8SMdpgKkE4PgGiyYTGygCWyM6GMMuz+CchF/TU5mEioQQQojDBL5LoxHht1tZR/HyJ9RKKYL2CLRWaENFjeaJDxXVGyGPj8+CgXozZGq2BthxbTnf48DEHHt3DZ6UT1KFECcHRyuGch79gUuYZmTGdgbqdCgyGIyxIaSCqwk2oEObageaCq7DeHuc5GjeYyFKCbMMDShlH6exgSFHKXKOxjksSJR3HUqupug53RePndV0nelLGXZ0mrON5sKP5uwqNVcrhgOPsbz9N5qTeMVWpZRClSuoQpFsfgZTr6HbI9GyuRmII8zsDH6lH9w8LWM7ks22Ow852C5DvZ4yKAV9DhS17SzkqMUwUcdCaoNHs6n92fOxwaJd7tJ22MYY5jMbZOrsFxr78zzo9j4/7XONuLarkQIO/3F2sGPTZtvX4Wt7DQdx2bP9coVCCCGEOAb59nPmILDPqaM4Jcsy9GE3uzuhoqh9Y5wMwijpbhdCiFNJJ9DjKfuaKwOaSUbe3bigT5hmzLdrPx1RZqi3e+FqwHc0vlbkHY1/kkwJ2ErKnsOOvN87VCQL0dZNuR7O6E6yRp1sfhaVxKhyO1zU7lxkohCiEKMUKshBLo/ycyvWQY63QEENCDPAsW9TszSsM9kOFLXa4+grjg0WORhKxyFYVE8NzczWs6ZSW8eKsPWnne6JG8e2FcmzWSGEEOIwgW+LRH67u0W8QpeMwHdptuL2CrQcjeaJXYHWaIY83u5QVG+FTM7UuhHxqZkaO0f7aLZipufqDA+UTui1CiFOfo5SFNzj29ks52j2FAOmWzELcUrFd7DRgJV1wk0l1yHv6p6rUDytbL8lpXCVIjGGxBico3Rh2kr2FgN+9exRhnKurNTZZpTj2PbbxTLZ7DQqjtADw2TzsxA28eZnoQhZLs9sajv7AAw5S0NCvbhqMax3uIpjV4q12p2PRl3bDWk6heF2JSLMDJPJYrehlrHdjwD6HVu8PpojXWNR26JWvX0NwwYMigmj2XP0QwshhBBCALZTEcqOAHJdTZJktKKEQs4nihMmp6scnFqgUsqRCzzb1TpKyAUerTCWUJEQ4pR0aC3FaddSpsIYP1aUPYe8o9d9wz7KMhailGZqXwB2uk0HWhNnGWFqiLKMDNvFqJXCQpySczR9nrPh4aIkM9QTO+ItcDQld/2f26loONd7XNpUq3dnPrF6ulBE5QuYRo1sfh5FjCpX2uGiOqbVhCy1b1tNGzDK5cEPUH6AOoHduoL2j0ACJMbWf1oGiu3ttXagJzMw1x5Rr5UdYz+RgKMM+U1c0JkZw3T71l01s7WlqcQuQGtmNvA0uvIkQLFG8mxWCCGEOEyn2ON69m2cZCRJxux8nccOzvLYwVkWak1ecdXTgCatdhvQKEpI0gz3BKyYaDQjHjs4h8kMjVbE5LQNFBULPlGUECcZM3N1RgbLTM/WKOb97ko7IYQ4mTjtNsxFL2UhSkmMaXcqUt2ORVoptIK8o1ddxPK0IsoMrlYkqWkXijb90zlpBI5mNC8r+rYzHeRQo7vIZiahWcfpHyBb0NCsk6sv0EhTGkUbOu7XtrPPsRpwbCEpwXYMGnJhPrUz7pN2V6ROF7S5zI5LAygoGwjaCP0OxKkNOM2kmiEDDaOYTQyDG3MKIYQQQmxxSiny7W7VucDj3x96nO/eei8L1SazCw1MOyD9xLPGeM7l51FvRrQi+9hmK6KvnD+xn4AQQqxTztE004yRnEc1SanHKVFmmA4TXGXDRQVXo2DF2kxqDJkxpMb+dyvNaCSLYaKCq6l4bnfMmu1PZLvaxgaiNCPMMppJ1g4YZRQcTcV3jmk8WmYMzSSjnqSEh4x2a6YZCpeSt32KRsM5ex+kz3cZDlyGcx7DOZfhwMMY6fZyrJRSqGIZVShh6jU7ph5Q5QqUK2RRBGFrMWDUbECzYdeLOy7K98ELUL6Hco5ftEO3x8uH7QVjJWXDOkVtf5an2oGeWmbrPgBzqf0JLujFMWTBJgWL5lPbPTs5ZIFaTHsxm2O3uYlhcKXVcGJNJFQkhBBCHMZvh4lcranWm/zrDx5gvtogOqxjURwnaIVdgZak+K5DoxFSOc7FIhsomu0Giiamq91A0fBAiShOODi5QL0Rkc+FlAoBBybmOX33EI60jBVCnKSKrkNxAzsleVoTZaktUqWQGHP0nYTYYpRW6KERzKxDVl9AV/rIHE2lWsW06iQmpVjuo7RBP3qOskGiicR2QFpIbQejiUMWOzYNzCbQaXZf1DbUtFGUsu2vJxLbAnsOh0FgJoV8Zihv3KmEEEIIsYXlcz7Nlu06NLfQ4L6HJ/AOe70yMVXF913qzYgosk94mjI6RghxChvOeexvRKBhwHfpcx0bLkoyEmOYjRJm2837Veefsh2O7AISw0rVFxsMcvHagQNHKSq+Q5waWllGkoGvwNcOJRwSzzAfJzSSjEaa0WhmFJcFklZmg0023NRIUhpJtuTaco7GUVBPMppptq1CRXsKAb95/k4ZL7fJlFKoUjtc1KjZf2EL7fvg++2AUQitlh2NliaQJphmshgy0hrl+uB54Looz0c5m/f/aidUFGa2A1FnMdhMasfRx4cEenxlF3TNprYeFCg4kMBuz+AdEkzLjLFBpcwGl8raLiBdi9gYZtu36xbaC9Y8bLgpNDCb2VrQbAquMlR6tLk27d9Paz33diWhIiGEEOIwWis8zyGOU4qFgMcPzuK4GuewlQ+Pj88x0Fek2YppNEP8coH5WvO4horiOOXxdqCo2YqYnGkHivI2UKSUIvA9+sp55haazMzVup2YxqcW2DXWf9yuVQghTqROkcptv1BMMgkVie1JKYUaHALXIZufRRfLuNphcGEeojqqmmD6BzdsJaKvbMei2dQWmgJtC0tZu9DUGbfmYh8XbEIN0+0Ei4AmmrpRVICFREJFQgghhFgd2+25ThB4DPYXMT0WKcwuNLr/HcY2VBRFCWmayaIuIcQpKXA0p5cCqnHKXJQQY7vZlD1DPcmoxilp+/ehaf+jx+9Hp919WiuFq6HkufiHhIn6fZc+31lycz/ODGG7M1E1tu1PhgKPsmdYiBKaaUY9yagnUbfDtaNAYbtba0W3O5LtlrT883OVai9q0zha0WofM91mNSMbypJgxfGitA0XUSpj0tSOQWvW2wGjAPwAAJNlmCiCOLJvkxiyDBO1IGrZx0A7aOSBZ8NGyg82rKaTU7CADeoYYzsB1VLDQjvQM9cO9OQUDDk2bNQ0MJ3AsAs+cCCGIdcQZvZjYcaSQF9dw053bR2xphN7jNBAo32wAdf+zE+nNvzkYhe2TSU2WOQpG2TqBJoiY4+R14YdroSLjkZCRUIIIUQPge8Sxyl95QKlYo5GK1r2mMcOzrFn5yDNVkytHtJfLtBoRCRJiruB3TWOZGa+TpYZWlHMxEwVk0Eh7zE8WFryJKyvnKcVxrTChOnZGmMjFaq1FguF4xuCEkKIE6VTrOqsXpNORWK705V+0A7Z7DQ6XyDTDmZ+BhOFmOlJdN8AytuY4fNFbYtGjXZhqeLYlWSd7kRl3VmZtiGn6ymnoawNVaCFDRUl8mtACCGEEKuUz9nnRb7rMDJYAkPPkTDzC02UgjQxxEmK5zq0wphiITgRly2EEMdMK0Wf71LxHOpJxmyUEKYZZc+h7Dndbh+mfYM+O3RfbMeSXmEBrRT9vkOf7+L0+LinFZ52KHkOg4HLfJQyGyX42nZQijLDXJgQZpk9Z7sLESv2RrKxGVcpfEdRdF2Cw7qXdBeiGSNjv8RxoRynOwbNJIkNGLUamChCaVC5HORyQPtnLLbhIvs2sf+yzHY2ikL7ODhkbJqP8jwbOloHT9mf4wzb/Tlgsft0ox3QUdjR851O0VOp3T6VwGg7iXLwsMaNibGhnlx7pFotg/Iqb6k1M0M9s1+Pufa1FLVd1IaCfgNzGSxkdpFZQdtg08rHs2GoYUnNHJF8eYQQQogeAt+lVg/xPYfR4TIPPTq17DGPHZilcIWPVpAkGa0wJhd4LNRaDPYXN/0a0zRjodoEYG6hgclskWtksLzsBY9SiuGBEvsn5mmFCfMLDforRcanFsjnfLxt1M5VCLE9dcJEnnQqEqJLl8rguGTTE+ggwAwMk83NQJqQzU6hKv3o3MaEj/sdiBO7qm2uvaLNw64k849TndY5QnFZCCGEEOJIHEfj+y5RlDA0UMT1HLLM4Bx2Q3pypsqusX7CKCGMEjzXoSmhIiHEFqCUouTZkE8jSZkNbbegzrizozW6cdqdhBylyLuaPs/FWeXKEq0UA4FLxXeYCxPmogRfK0bzHllmSLFBitQYjLHjlTJjA02OVrZDkurdiaTgOpRczUQrxtWLo9tSYwMJQhwvynVRlT6o9NmwXhx1R6GZsIXKUhsU8n3A3n8yBkwS26BRFEIc9x6b5rioXB6Vz6Oc1cdDVHuMWafDUOB0Rhsu1nYqevFnRbU7Fk226z9TCYy0TxcaaBmIMjumDOwCs752d+uSPnqQzxjDVHvnertzkm5fQ0fJgRTbLfvQUWydTktR+xqi9u+IEdcufOtzjARnjkC+NkIIIUQPgW+T257nMDZU4cF9k8sec2BijswYCvmAWiOk1gzJBR7z1eZxCRUt1FpkmSFKUlot+0xqsL+44hMv13UY7C8yNVNjrtoiF/jkAo+p2Ro7R/s2/XqFEOJE8tsjLG0rbPsCOMlMN2wkxHal83kY3UE2OY7yQA+NkM3PQBRh5mfJksSGj471PMqu+ppL7Yq0goaStgUnIYQQQohTQSHnE0UJuZzPYF+RqZnqsseMTy9w5t7hbqioVAhotY6wPF4IIU5BBdeh4DrdsWKdbkWZAcPiqDFXK5x2oGcjuv44SjGU8+jzXWajhIUoQWs7Ws06+jm0UnhaUXJtp6VOXWgqTMiMwVWa2GQkxuDKSDBxgiilUIeMQgMwcYyJQ1uviUJMHNugkeeB50G+YB+XZZg4to9LFoNGpl7F1KvgejZcFORRztEXm+c0NFMbCKq0t81ltnuRh63tHKpT/5lMbHjoYLJy/7BaZoNFMavrVrTQDgNlhu4Itoq29d5D9Tk2GNjpmO1iz3H4dSTGhp0CBbMJjBz59NuahIqEEEKIHnzf/on0PZfRoXLPttZJmjExtcBgf5FaI6TRCMn6ikRR0u1atJnmFuoAVGt2fm4h7+EdZexap5hVa4TMLjTYOdJHtd5iLKug5ca6EGIL66w2QylcpYiNkQKREG3aD1Bju0inJ1BRiO4fxtQWMI0apl4lS2JU38AxF4EdBUNShRBCCCHEKapTK/Lai7YmpxeWPWZ8coGg/bgwsmGiZis6fhcphBDHUaf70GrCPBvJ1YqRnMdQ4BJnNsRkA052BFrGYqciVylcbWtBTjvk1IuvFa3U4GqIU4gzQ06a+4uTiPI8GyAqLG4zSYyJIojCdkejEKU1KgggCIAyJjOYsIlpNSFqj0+rxpjqAsoPUKWKPe4KgvaPTNwO88TY0WewOPbscE47WDSVLHYl8oBA2+MFarGbUTVbXbei1Bhm2gc7NNRU1D0fTr8DaXsUW+eZmMZ2y/aVDRQ1DMwnMOrZ66goCc+sRL4uQgghRA++54ACjWJkuIzjaAzLXx49dnCWXWP9uK4mSTKarYhiPmC+2tzUUFG9ERJFKWmWUWvYUFGltLrxJP19BWqNkDBMSJIU13WoN0LKpdymXa8QQpwMfEcTphmOVsSpsSPQpEAESNcmYdtsOyM7yeamoF5DlStkroupzmPCFmZ6Et03cMRCkxBCCCHEVhb49sWD5zoM9hV7rrqfmF7oLviK4pTMGMggjJJu2EgIIcTG0EoRHN6iZJ1crSBdfJtkK/VWEeLkoVwP5XpQ6IxDa49NC8PFsWkkqHwB8gXbxShsYZoNiNsdj2amUIWCDRf1CPS47aBNgu1WVG13CCoqGxJaiatgzLWjyDS2g9GhKg5Mp6vvVjSd2CBRZI4eaqJ9vmHHXjOAp5aONEwNNBMbOGpktqP2TAqjK39K29oRvtVCCCHE9qWU6hZ78oHHyGAZY5a/kHjswCxgOwAB1OohYLsH9Xr8RpmZt12Kao0Qk9kQ1GpDTK6jyQX2c6s17fUu1Jqbc6FCCHES8dqvMr32q9hkE39Pn8ziLGN/I+L26Rpff3yWz9w/wZ/ctZ9mkp3oSxMnmNIKZ3AE3T8EKHS+gBoYBu1AmpDNTJHVq2zTHx0hhBBCbHO+b+sunuswNFjsdrU+VBSnNFoRjqPAQBTZJfWtUEagCSHEyaxTK+rUjrZrzUic2pRSaD9Alys4QyO4u/aiR3eiS32gHZTW6HwBZ3AYPTwKQR4wmEadbGqcrNn7PlGunSiZS234R2NDQUe/Hhvk6bWOMa9txyCD7RIENtTT677aXGq6j5lrdysqHCXU1Dl/Xtt/7mHX4CgbZgI7Ss20w0otI4sue5FQkRBCCLGCoF0s8j2H0aEVQkUHbaio2A4VNcOYJE1J04x6Y3PaW4dRQqN97M7os7V2Gepcb+caa42QNJWbyUKIrc1rr15z2wWieBuuOmsmKR+76wB/8+Ak3z4wz52zDaZaMcbAuIxlEG26XEGPjIF20J6HHhpZLDTVqmSzk5hEbowJIYQQYntxHY3j2FsqI4NlgJ5h6/GpBXLtmlJLRqAJIcQpwdf297vTWYi2DWtGK0klYHVK00EOPTCIs2svemQHqliyASPHxekfQA0MgeNClmEWZklnJjHx0ppPZ8RY5w5SvwMb0SSs0j5uLbOdgxKzGDDqqKWG6XaQaD61nYUUdmTasSppG5ZJgHr7f/MZaWvfk4SKhBBCiBV0OhV5nsvocKVnoWi+2qRab+G5jn28gXrTFoo2q/vP3HwDgHorJEnsGJ9iPljTMYp5H6UgjlOiOAFjg0VCCLGVdVabdcZ8pduwQJR3HQpu7xfH400JiYhFOpfHGdsJno/S2haaKv2gNMSx7VrUqJ3oyxRCCCGEOK58zz6XLhUCyqVczwVo41ML+O2aUqdTUbMlz7WFEOJk1ulU1FmIlhqzqZMITkaZMcyEMffON/nexAL/a980n7p3nM/eP3GiL01sAKWUrfUMjuDs3Iuu9APtzkZDo6hS5ZCazyRZdaF7T8xrjzLr0zDq2FFhGyHXo1vR7CHdipqZYaIdKKpli48Z2KBQk1aLwaZOt6IWmoZ0K1pGhvgKIYQQK/C9dqjIXblTEdgRaOc/YSelQkAYJdTqIX2lPNV6iyTNcJ2Ny/CmacZ8O6zU6VJUKgboXv0jj0BrTT7n0WjGNJohvueyUGvSV85v2LUKIcTJxm//PnYPaWVtjOk5L3wrG8t5PBiny7ZPSKhIHEa5Hs7oLszCLFl1Hp0vYPyAbGEOohBTXSBttdCVPpS7ujGsQoiTS7Va5ZZbbuHee+9lbm4Oz/MYHR3lyU9+MhdffPFJ+zcyiiLuuOMO7rnnHmZnZ1lYWKBQKNDf38/pp5/OJZdcQqVS2dRrePDBB7ntttt49NFHaTablEolzjjjDJ7+9Keza9euTT23MYbbb7+dH//4x0xMTBDHMf39/Zx77rlcdtlllMvlTT2/ENuZ77s0WzGe5zDYV6TeY4HW+NRCd6FaGNs7YVGUkKZZt9OREEKIk8uhC9EUNuSQmuUjk7ay+xdafOXRmWXblbKdm9w13oMQJy+lFapvAFUskc3NQrOOKpYwuQJZbR5aTUyjBkkC/QMopfAUeJvQxKeiYSqFenbIOLIM8sowbtfD08zs6DWwwaaNCjWB7cJUy2y3opqBIrZbUXkb1oyPREJFQgghxAoWOxU55PM+5WKOVhgveyLx+EEbKioWfGbm693uP77nUq21GOgrbNg1zVebmMwQxQmtVoJSUC6ubfRZRyEf0GjG1BoR/ZUijWa04SEoIYQ4mUiByBrLezxYbS3bPt6UkQxiOaUVqn8Q8gWy6SkU4AwMkTUbmOo8xJHdnsuhiiUJFwlxirj11lv5wAc+wFe+8hWiqPfv/x07dvDGN76Rt7/97QwODh7nK1wuyzK++tWv8md/9md861vfIgxX7rSqteapT30qb3rTm3j1q19NsVjckGtIkoRPf/rTfPSjH+UnP/lJz8copbjssst4+9vfzjXXXLMh5+2YmZnhuuuu4y//8i85ePBgz8f4vs+LX/xi3vnOd3LppZdu6PmFEEsXoA30F9l3YPnN14npKr7nohSkiSFJUlzXoRXG3XH0QgghTi6OVmilyIzBVZrYZCTG4LJ9ikbDud6xAWNgOowZy/vH+YrEZlOuhzM8StZqks1Mo4hx+gbIghxmYQ4TtTAzU+j+QZSzOWPBchr8DCJjw0QDju1WNI+t20YGZtqBoqKC8gZfhlJQcew5qinkDURKUcs2/lynMrlrKIQQQqzA8xy0VihUu1tR7xFojx2cBWzhupCzT6w7o8Q2cgSaMYa5BTv6bKFubwYXcj7uCmNsjqaQ89EKkiSjFcVgFrsfCSHEVtQpEAG4yr4UirfhCLSVikALcUozWd7BSAgAHeRwduxGl/rs+/kCemgU/BxgMK0m2fQk2dwsJpauV0KcrOI45m1vexuXXXYZf/d3f7ckUOR53pIFFAcPHuQP//APOf/88/nqV796Ii6369577+VZz3oWV199NV/72teWBYp8f+nftizL+MEPfsCb3vQmLrjgAv7pn/5pQ67h0ksv5U1vetOyQNGh5zfGcPPNN/PKV76S5z73uSuGf9bqq1/9Kueffz5/+Id/uOSYSik8bzHQGUURN954I5dffjlve9vbiOV3shAb6tBQ0WB/sWdX69n5Okmadh/b6oxAC+XnUQghTmZ+ZwRa++75dqsZ9fvuit2IplrJcb4acTzpXB5n52503wBgx6SpgWHQDiR2HNpm1no6I8gaGSTGholiY/97qt2tKKegf5NCPgUNHpAB1XZ8ZuaQMWxCQkVCCCHEEXW6FfneyiPQDkzMk6R2mGuxaFec1ZshBkOrFRNGG/OEu9YIieOUJMu67bXLpfV1KQLQWpFv31iuN+zNhOoGhqCEEOJkdHiBKNmGLw5Hcyt3khmXEWjiCJRW6IFB9MgOcFyU4+AMDKIHRyCwI1RN2CSbmSSbnZFwkRAnmVarxS/8wi/w0Y9+tPu6ZmBggA9/+MM8+uijRFFEFEXcfPPNvOY1r+nuNzExwdVXX81f/MVfnJDrvvnmm3na057G9773ve42pRTXXHMN//RP/8TCwgJhGNJqtbjtttt4xzveQaGw2C123759vOAFL+DP//zP130NP/jBD7j88sv50Y9+1N12xRVXcNNNN1Gr1QjDkNnZWT7/+c9z/vnndx/z7W9/m0svvZSHHnpo3ecG+Iu/+AuuvvpqJiYmutte85rXcPPNN3e/b/v27eNDH/oQAwMDgC2Af/SjH+UXfuEXaLVk8YgQG+XQrtaD/UUwy284GQOTM9XFEWjtulCrJc+NhBDiZNYJ1Hjtt8k2CxVppRgKencrmpJg7JanlEJX+tGjO0A7aM9DDw6D60GWkc1OkbU25/5RTkOgbHhowd5qI2sHijJs4GfQsV2FNkulHViq4ZC2A02daxESKhJCCCGOKPDtjVfvCKGiJM0Yn5oHIB94OFqRJoZmu1i0UUGd2XnbpahWb2Ey8H2HXHBsI0Y6bbcbLRtSarZi4li6VAghtq7tXiACKHkOpRWGoI/LjQ6xCjqXt12Lyn2gNMrzcPoHloaLohbZzCTp5DhZdUECRkKcBH7t136Nb37zm933n/CEJ/CjH/2I//v//r/Zs2cPAK7rctlll/HZz36Wv/7rv+52LsqyjDe/+c3cdNNNx/WaH3roIV70ohdRq9W623zf53/+z//JF77wBZ73vOdRLpcBCIKApz71qXzwgx/k1ltvZe/evUuOtd7rf+SRR7jqqquYnZ3tbvvP//k/8y//8i+85CUv6Y5W6+/v51WvehW33XYbL37xi7uPffTRR3nBC17A/Pz8ms8N8OUvf5k3v/nNZJmtaGutuf766/nsZz/LZZddhuvaGz979+7lt3/7t/nhD3/IWWed1d3/m9/8Jr/2a7+2rnMLIZbzPMeOh0Ux2F/AcTW9XlFMTB0aKrLPg5otGTcshBAns06tyG0/B96OC9GGV1iINiX1om1DBzmcsZ3geijHQQ8M2y7VxmDmZ8lq1U05b6dbUTOzXYqmEkgABxh2YYUmWhsm3w42ASxg66bzcqusS0JFQgghxBH43U5FLoP9RbTu/afzsQO2wKyU6gZ1Ot2E5qvHvio0DGOazQiDoVa3x60U19+lqCMfeOh2CKoV2uLWRo5sE0KIk40vBSIAxlYoEu2rhRxoREy3YqpxSjPJmA0THm+E3LfQ5I6ZOo/Ww5779nL7TJ07ZxtMNKMNCXAZY2ilskzoZKC0RvcP4uzag670g3YWw0VDo5DL2yVkWYpp1A4LGEU9R8oKITbPX/zFX/DFL36x+34ul+Omm27i9NNPX3Gf173udfw//8//030/yzJe97rXbdg4r9W49tprmZubW7LtAx/4AK94xSuOuN8FF1zAF7/4xW7gBuzfkN/8zd+k2Vz96x1jDK9+9auZnp7ubrvyyiv5+Mc/vuJrw3w+z9/+7d9y7rnndrfdf//9vPnNb171eTsOHjzI61//+m6gCOA973kPv/qrv7riPmeccQZf/vKXCYKgu+2LX/ziCes0JcRW5LcD+oHnMtjXewTawamFbqgoilMyY8gys2HdrDeKMabbfVsIIbY7v/38ztnGC9GGV1jELOPPthflejhju1BBDqUVzsAgqlACwNSrZPOzGz4aLDikW9FkAhE2yDLsgrPJgaKObrAJe8J4+/0KWJGEioQQQogj6I4/cx201gwPlno+7vGDc93/Xuz+E5FmGUmS0miu/gbs4ZI0Y3zapr8bzYgkyXC0opAPjrLn0SmlKLZHoNWanVCRtMYXQmxdXrtA5LbfxtuwQAQwmu9dJHq0HvL5Byf5H/dP8Ml7DvKndx/g0/eN84UHp7hp3wzf2j/HvfOruxlrjOF74wt8/fFZPvvAJH9y137+6r5xbnhgki89PMVNj87wT4/P8s8H57l5osq/z9Z5pNZiJoyXFO6SzPBIrcV3Dszz6fsm+OLDU2v6XH8612AylOLXZlHaQfcN4Ozcg+4bsOEi18XpG0CP7ET1DdjuRUsCRlNkkwfIZqfJalWyKJQ59UJsolqtxrvf/e4l2377t3+b884776j7vvvd7+a0007rvj87O8sf/uEfbvg19vLAAw/w93//90u27d27l7e+9a2r2v/SSy/ll3/5l5ds27dvH1/4whdWfQ1f/OIXl4xdcxyHT3ziEysGijoKhQIf+chHlmz7/Oc/z6233rrqcwO8733vW9Ih6cwzz+T3fu/3jrrfhRdeyG/91m8t2fae97yHer2+pvMLIXrzvfYINNeOQOsZKpqYx3UdHMfeHYs6I9BOovExYRjz0KNTPPDwBPv2z5BKuEgIsc11OhV57YVoqTHb7rXqcK73+LN6ktJIpG3LdqK0Ro/sQBVtZ1hdrqAq/YDCtJpkM1OYdGP/n+h3bHglw74dcsA7ToEi2PxuSKcyCRUJIYQQR9AJFbmug9YwOth7BNpjB2eX7ON7DiazISCA8anqusaKNZohDz82RbMZkRnTDfyUSzn0Bj3D6YSgOp2Qoig56VbOCSHERlnsVGTf344FIoCxdqB0PRrJ6m421JJsSVchY2AuTDjYjHikFnLffJN/n21w21SN700s8E+Pz/Glh6f56/sm+OhP9/Nndx/gsw9M8Gf3HOBLD0/zo+kac1HCRNN2UVqNMM341v45/vaxKv/HH2OfUyRFKgSbQWmNrvTj7NyL7hsEx0Op9qi0/k7AaLAdMNK2bXYUYupVzOw02cQB0ulJsuo8WbOBieNt+bMpxGb47//9vzMxMdF9PwgC3va2t61qX9/3l4VT/uIv/oKHHnpoIy+xpy9/+cvLtr3iFa9Y0n3oaF75yleu6ri9pGm6LIz1i7/4izzxiU9c1f5XXXUVT37yk5dsW00gqOPBBx/kk5/85JJtb3/72/G81Y3APvyx4+Pj/Pf//t9XfX4hxMo6Xa09z2V4oNSzA+PBqXmyLCPn25/DVnsEWqdOdKLFccqjB2e7tapmM+Lg5PrGNAohxFbRCRM5WqGwHVPSbfaydKVORSDdirYjpRTO4LCt8wA6X0ANDNm6ThKTzUxi4o17buMpGHNh2LFvA0mynDTkWyGEEEIcgeNoHNf+ufQ8l9HhSs8bXPPVJtVDOvx0gjoLtSZJmhJFCY88Pr3q4pExhsnpKo/unyVNMqIk5cDEPGGYoLWitAGjzzoC38VxFGlqaLZnI1dlBJoQYoty9WKBSG/jEWgrjT9bjcYqVyFNtY5tFXYjyZhoxkQ9KngPrXK06E/nGt1uVLPa50feIN/I7eSRxsmzQnyrUVqhK324u/bgjO22o9HcTsAoh9M/gDO6Az04Yle4BXnQ7YBAEmMadczCHNnMJNnEQRs0mp8la9TbY9O238+rEMcijmP++I//eMm2F73oRQwPD6/6GK95zWuWdObpdczN8O///u/Ltj3taU9b0zEuueSSZdt+/OMfr2rfG2+8kXvuuWfJtte+9rVrOv/hj//mN7/Jbbfdtqp9//iP/5g4Xvx75TgOr371q1d97rGxMV74whcu2fbf/tt/W3JMIcT6LC5A0wwNlHo+P0mSjMmZGkFgH9tqPzeuN058h8Y0zXjsYLveFCccnJrHYKjVQ2bnGyf02oQQ4kRytMJRncVo27PDdcHVFNze8YGpk6jbnji+dKUPPbwDtIP2ffTQCLgeZBnZzDRZc+M6ojoKcvr4jTwTqyOhIiGEEOIogu4KNIfRoXLPFWiwtFtRuRjgupo4zjgwOU8YJ6RpxqMHZphbOHKBJooT9u2fYWbOPhFbqLU4MDFHHKc4WjE6VMZ1Nu5PuB2BZkNQ9UbYPacQQmxFjjq0QNQOFW2zAhFA0XMYCFbf6eFQq+1UtJnFptWEiowx/HhmeVEjwuErB2rMyki0Tad8H903gLvzkICRZ7tkKc9D5ws2ZDQyih7egeobQBXK4Ad21RvGBo1aTUx13o5N6wSNqvNkzSYmle+jEEfyrW99a8n4LIAXv/jFazrG6Ogol1566ZJtf/d3f7fpN8XHx8eXbRsZGVnTMXqFpw4ePLiqfb/4xS8ued/3fZ7//Oev6fwveclLjnrcXowx/N3f/d2SbZdffjlDQ0PHdP7Z2Vm+/e1vr+kYQojllow/6yu2Fyss/514YGKeQs4+92lFCUmWkaZZd0HXiWCM4fHxOaIoIUlTJqartFoJM3O2VjUxvXBSjWgTQojjrTMCrZOr2W4L0ZRSDK+wEO1YF4+JU5vO53FGd9qFY46DHhyxC8UwmIV5sur8ivfOxKlPQkVCCCHEUXRCRb7rkM/5lEtBz8cdGirSWjM2XMHzNGliODg5T60RgoHxyQXGJxeWFOHjOKVabzE5XeWRx6ZptWLSLGNieoGZuTomg3zOY+dYP7kjtCBdr2LBFrkaLTtmLY5TKSIJIbas5QWiE3gxJ9BlI+V17VdfbahoE9ti76uHRw2DPd6ImD5CcOiOHoEjsXm6AaMdu3F2n4YeGkOX+1B+DlAoR6NzeXS5jDMwZLsZDe9A9Q+hiuV2RyOHbtCoUccszJJNTZBOjpNVFzDSfUOIZW688cZl2571rGet+TiH77N//35uueWWdV/XavQKLa01yLTeY4RhyNe+9rUl25761KdSKBTWdP4nPvGJy4JQvb4nh7v55ps5cODAkm0b8X1b7fmFEEfmew4ocLQmCFwG+oo9b6IdmJzHdR37eLPYrajWOHELuQ5OLtBsRqRZxvhUlaT93L5aa1Fv2brVgYk5sm248EIIIWCxw3WndrQdF6KtNAJNxp8J5Xk4Y7tQuQJKgdM/gCrZ+qJp1MlmJzGJ1Ga2IgkVCSGEEEcR+PZJtO85AIwMVpY9xnMdkjRbtm3nSB/5nIfJYGqmxuy8vYE5t9Dg0f0zPHZglvsfmeDBfZPsPzjHzFydLDO0wpgDE/M0mjFKwUBfgbHhyoZ2KDr8c/RcjcnojmhbkBFoQogtytdLOxXF2epCMlvNBf0FXrJ3kNNKASXPwXcU6githT2t6PNdhgKXbBU3ZPt9h9G81y3IbaTEGCaOskLuaKGhg6scSSo2ntIOulBA9w/ijO3E2X06enQnum8AVSiBY597KUejgwBdKrc7Go2hh8faHY1KtuuRUpClmEaNbGbSdjFq1DCrHNMnxFZ30003LXm/WCxy7rnnrvk4P/MzP3PUY2+0nTt3Lts2MTGxpmNMTk4u27Zjx46j7vfP//zPVKvVJdt6fQ1W4/D97rnnHu67774j7tPra7ue85933nnk8/mjHlsIsTZKqW6NyHU1w4MlMmMIfJfTdw9x+c+cxS+98Ge44mfOAqCQtwu5mi37/LPTJfp4m5qpslBtYjBMzlRtR2xHUSrYxXMzs3WSJCWKUsanFk7INQohxIl2eM1ou3UqAhjO9e5sPR3Gq6oHia1NaY0eHkWX+wHQxTKqf8h2nI5j22W6IQv5tpr19bsXQgghtpFcd/yZfTs8UGJ+ocHeXYPs3jHAnh0DjA6VcXoEfrTWjA6VmVtoMF9tMV9tEScpQwOlJe2uDYYoTomihDBKul2NPFczPFjudkvaTMVCwNxCk0YzpFQIWKi1GBkso450h1kIIU5BntZA2n27ysY7W9K5fXnO7Vu82WiMITaGODVEmSHODL6jKLgaX68t2HrFaIUrRitkxjAXJUy1EhpJSpgZwjQjar8NM0MryViIkyOOVgsczRmlgDPLOc4sBeRd56ifWz3JeKze+6bNVLsYpuXv3AmntEIFOQhy3W0myzBxBFGEiSNMFEIcoRwH5eQhZ/+/NQZM2MK0mhC2bBejaoypLqD8AIoltN+7y6QQW93ExMSybjdPfOIT1/X8/oILLli27fbbb1/vpa3KM57xDP7qr/5qybYf/OAHvPa1r131MX7wgx8s23bFFVccdb9en9v555+/6vMe6oILLuAb3/jGsuOfc845m35+rTVPfOITlxxv//79TE5OrnmUnBBiKd9ziaIUz3V58nl7uOKpZ3PmnmF0j0B9Pucxt9Ck2Yps/SdKieKkO0bteJivNpmetTf4pufqtFoJWsHoUAXfc4iShChKmZqtMTZSYaHapJD36Svnj3JkIYTYWrx27cORTkXLxJlhPkoZCCResN0ppVD9A+B7ZDPT6CDADI2SLcxCFNoR9mELVelHOUeu34lTg/zUCyGEEEfh+y5KKxw0nqc5/wk7ec4VTySf81e1v1KKgb4inucyPVuj0YyJkwUqpZwNEsUJUZxgDruPWioEDPYXexakNkMh79tQUXv0GgmEUbIp49aEEOJEWhx/tn1Xna1EKYWvFL6G4gYdUyvFYOAxuIq/J0lmqMYpC3FKNU6oximuVuzI++wq+DhruBF+TiXPOZU8D1Zb3PjQ8k4VUSrFsJOZ0rp30CgKbYEqskEjlSaoXA5yOUxmMGET02xAJ4gUhWS5PKrcdwI/GyFOjJ/+9KfLtp1xxhnrOlav/XodfyO9/OUv521vexu1Wq277Utf+hL/7b/9Nzxvda9RbrjhhmXbXvOa1xx1vxP9tdvo8x8eUvrpT3/Ks5/97HUdTwhh2UBQiOc6VEp58jlvxfpN4Hs4jiJNDWEYkwt8avWQwf7j8zy03gg5ODkPwFy1Qa1mQ/fDQ4uL2EYGyxyYmKcVJswvNOivFBmfWiAXeMdloZsQQpwsOjUjr11/SI3BGLOtFt4O5lyUoudoz6kwljqK6NKFEsr1SacnUMQ4A0NkzQamOo+JQsz0BKrcj85LSPlUJ+PPhBBCiKNQSnULKL7nopQijNY+P7hUCNgxUsFxFHGcMj1bp1prEYY2UKS1Ip/z6Cvn2DFSYXiwdNwCRWA/N8/VYCCO7efXCmX+rRBi6+mGitq/YlNjpH3zScLVioHA5fRSwEUDRa4YrfD04TJ7i8GaAkWH2lNcOQQ8JX/nTilKa3Quj6704wyP4u7aizO227bcdmwIXOcLOIPDdkxaoQSAaTXJpicglJF3Ynu58847l23rNVJsNfr6+paN0XrkkUeo1zevrf3AwADvete7lmzbv38/11133ar2//73v8/f//3fL9n28z//87zoRS866r4b+bXrNW6t1/E7arUa+/btW7KtWCxSLpePy/mFEKuzWCeyt1iS5MijVwvthWn1pn3+WTtOI9CyzHBgYh6MPefcvB11P9Rf7F4TgOc6DPXbZQVzCy1aYYTJDPvH58i2YZcOIcT21akZOVqhAAOk2+zXoK81fSsESqeOMopebD/K921tpmQXc+l8AT00YkfWG4NZmCWbncHE8v/OqUxCRUIIIcQqdLr1+O0n01G89lAR2NVpO0f7KRcD8jmPSinH8GCJ3Tv6OW3XIGPDFQb6iiesO1Cn9XYY22LYesJTQghxsusUiLRS3aDKdmxnvV34WtPn9X7pO9GUgsapTvk+un8Ad9de9MgOVLEM2kE5DrpcQQ2OgOtBp8uRENvIgw8+uGzb8PDwuo93+LgsYwwPPfTQuo+3Gu94xzt41atetWTbu971Lj73uc8dcb877riDV7ziFaTp4k3+M88886j7dWzk1250dHRVx+946KGHMIeFnY/l+7bW8wshVqdTH3LbdZQ4yY4Yvul0u262ou7bNN38OcwLtSZpmhEndrQZQKWco1zKLXtssRBQLtqxsZOzNZIsI4oSpmarm36dQghxsnAOqRW5ytYS4m1YM1ppBNpUePT7BVGWSb1lm1FaoQcG0SM77KIvx0UPDKNKZUBhohbZzCTZnISLTlXSn0wIIYRYhU7IJ/Ds/NcoPvIKtCNxHc3QQGlDrmujeZ4DTYjaYaJQOjgIIbYgrRSuViSZwVWK1BgSY1jdUEtxKhoJHCZ6bJ+UFXZbis7lIZe3Y9AaNbL5WbQHZnAE06hCrWGXmQqxTSwsLCzbVqlU1n28Xp1yep1jIyml+NznPsd5553H+9//fsIwJI5jfvVXf5UvfOELvPGNb+Syyy5jcHCQWq3GnXfeyRe+8AU++clPEoaLQcLnPe95XH/99T279vRSrS6/gb7er91av25b4fsmxHbQWZTlat0dbRYn6YqjwnKBh1KQJBlRnOB7LvVGSKW8ueNA5qu2M1G13gIDucBlsG/lQccDfUXCKCGKU6ZnaowNV5hbaDI8UD6u3bSFEOJE8rT9ve5qiFNItmF36+Gcy/09njKuplPRvfNNvvH4HCM5j50FH18rPK1w2/U4RykqvsPugo+npf/JVqJzedSO3WRz01CvoYplTJAnq1UhbGLCFiZsofwcqlRGrXKstTjxJFQkhBBCrEKuXRTy2kWjJMlI0wzH2VpPehc7MdnQVEs6FQkhtihPKRJsgSjMpFPRVje8ws0dCRVtTUorW5zKF8jmZqBhC1naz8OCrLQX20etVlu2rVhc+Uby0fTat9c5NprWmt///d/nda97HZ/4xCf4whe+wL59+7jpppu46aabVtzPdV2uvPJKfuM3foOrr7561efrBJcOt96v3Vq/bqfa921iYoLJyck17XP//fcvu57jGXQ6fGzfZo7xE1tbEoekaYomIU4TGo0aZCsvVfB0RiuKmV9YoFLMMTmVglnfaMPViOKU+bl5DIZqdYEsNeR8l7DVOOJ+fUWX8ekG9XpIvaBxtWZyarrbbUlsLfI7UYjlojCmlWQQJ6RxRjPSeCvUFbaqikkw2fKOerOtiFq1inuEoOkdk1VMljHRCJk4wrhPTyvOLHicU/LZW3DXPfb+UMYYvjnRYMDXjAUufZ5GK4UGtGLZfx8qajSO+L5YgyCPySBbmIMshXwB4wdkjTpEITQb0GzYUFEuj/J97MDBEytOErI0RSnVXaRysizIOB6vvY9ke/0GFEIIIdbJ912UVjhoPE8TxxlhnFBwtlZBxe+27U7JjEFndFfQCSHEVuI7mmaa4XbGn23DVWfbyXDg9NxejVOaSUbe3VohYWEpx8EZGiHLF8nmplEp6MoATtIOC2xAwVKIk1mvbjuuu/7n9b327XWOzXLGGWfwK7/yK5TLZb74xS/y4x//eMXHDg8P8+u//utcddVVPOtZz1rTeVb6nNb7tVvr1+1U+7594hOf4L3vfe8xHePf/u3fOHjw4AZd0frOL8RGGF/lj9Z0FaY391JWdHCNP/6Pth//0P1HfpzYOuR3ohDL1YG1RahPfTXlEgY7e37sjtvup9/0XqS1oFweWWG/w4XAv1fh38fBI2NX2mBP2mAwC1lvlaauHP492HXUxzkYBrOQs5IqY1mrZ5zl0Z/cvs6rEKvXhPmTI7RzuIcb9roefnTfCb4Sa9++E3sdcodQCCGEWAWlFDnfpdmK8T2XOI6IooTCKbRKa9/j0/yfW+/jZS/4GUqFoOdjXEfjaEWaGaI4Ied7hKGEioQQW08nTORqDaTSqWiLG/J7h4rAdis6rdT776LYGnShgMrl0NPTUK1Bu722Kqy/84cQp4Jms7lsm+Os/PvwaHqFU3qdYzPcdNNN/Jf/8l+48847u9uCIOCZz3wmT3nKUxgaGqJer/PII4/wrW99i4MHD/L+97+f97///Zx++un89m//Nm9+85tX9fmv9Dmt92u31q/bVvq+CSGEEEKIU1PJJGgM2SFxm5KJGc5CatqjP+0dKnrEKa3rfDGaR5xSd/+CSciblKJJCEzKzrRJv4mO2stmVq+uvpOimNQ5Jv0cFRNzTrLA7rRxEvTKEeLkJHcIhRBCiFUKAs+GinyXeiMiik/8aLCHH5viwMQ8Vzz17BUfMzVT41vfu4t7HxoH4Lu33seLnn3Rio/32+GpOE7J+R6tMKZcym34tQshxInkO51QkX0bS6eiLa3oKHwyoh5r3SRUtD0ordH9g2gvj4pDdDGHLm/euBEhTgb5fH7ZtjRN1328Xvv2OsdGiuOYX//1X+d//I//sWT7W9/6Vt7znvcwPDy8bJ8sy7jhhhv47d/+bcbHx3nkkUd461vfyl//9V/z5S9/mV27jrxyeaXPKU3TdYV71vp12wrfNyHEUmlmcI4wJkYIIYQ42SigbGLm1eKi6pryeFI6y0jWe6RZCjzqbszinYZyaSiXaWy9ZixrolZRuptTa18EvqA8bvOGuMvt45xkgb1pnfVH+oXYmiRUJIQQQqxSLrCr2oN2154wWhoqaoUx9z40zl33H2BksMSVP3v+pl7P/vE5bvhft5Bmhkopx4Xn7l7y8Vq9xf/5t/v44Z2PcOi98h/+5BEufcoZDPX3XjXgew7NVkwU2+Lz4Z+nEEJsBV6nU1G7tp8Z7NhHGYe0JSmlqGQRU3p5SHay1Xt1ndialOfh5AJ0zjvRlyLEpiv3CM4lyfqf2/fat9c5Nooxhle+8pXceOONS7Z/+tOf5vWvf/2K+2mt+dVf/VWe+cxn8pznPIeHH34YgNtuu40rrriCW265hR07dqy4/0qfU5Ik6woVrfXrdqp9397ylrfwK7/yK2va5/777+dlL3tZ9/1LL72U88/f3NfPh6rX60vG+1x66aUUi9K9TqxdK4w5ODFPkmaMTy2gFPRXCkxM1RifWuDgVJUDkwsUch6vffmlACzUWlTrLXKBx1B/Edd12LNzYMOv7eDkPK1WzEK9RbXWIvBdhgdW3z3CGMP45AKpMQwPlAh8l6HBEuWiLDrbauR3ohDLZcbwWCMCYLwRYYDhnNvudr19TEw3+eFca8m2hwf6uGxPuWf9bCFO2TPZYH9z4+8nHBju59JdR38Oe/vjVYLW+s4fZxl3NF3ucft4cjzLpRc8Eb9QWNexxJGZzECrgWk2MVEILN7AMkmCiUJMEkMSH/ohS4FyPXA98DyU66E2qJ4bJwlTc1WUUuwY6gfgzCesvKD/eLrrrrtO6PklVCSEEEKsUidU5Pv2z2eaGqr1Fvc/PMFdDxzg4UenSNvjcw5OzvOcK87bsCczhzPGcNO37uie78vfvIP+SoHdOxYLUf9+7+Pc9pNHlu2bZYZvf+9ufuUXLul5bK8dmup0YmpFcrNVCLH1eFqhAK0UjlKkxhBnhsCRUNFW1ZfFEioSQmwrpdLym8f1en3dx6vVaqs6x0b5yEc+sixQ9Ja3vOWIgaJDnXHGGdxwww0885nPJMsyAPbt28d/+A//gW9/+9srvlYLggDP84jjpX8f6vU6QbD2znZr/bqdat+30dFRRkdHj+kYpVKJSqWyQVe0dsVi8YSeX5y6CmnG7EJMADjzIfc/NMG/3nbbst8vjWaM6+VwHE1F+9RbhjgDP5dHoQhyBQJ/427VxHGKoUGQ8wjnQrQT0FcpEuTWFggqlDLqjYgMhyBXAOXJz8o2IL8ThbBmdYvUGAIVE5sMJ/DIudsrVPR0P88DzUkW4sXOl02jaLo5hnos1MkB/6G/j9kw4d6FJtNhTJIZkgxik5FmtlN4LU4J02xN1/J4mDGjfHYVVu5ElBrDdLKAWmf4q7NXSzn8mz/MbuNyfnHzXu9se+2FDiZLMc0GptHAtJoQGGiHW43BhouiFiaKIY7AZJClEKUQtUNvjovyPPADlO+jnPU9r1Jaox0HpVT3td/J8jdxM197r4aEioQQQohV8j0HpRU6A8/TxHHG3Q8c5B//+SfLHrtQa7F/Yo7dYxu/2gzg4cemmZiudt9P0owvfPVW3nDNs+gr21b2T3/SGdx6x8PMV5vL9r/7gYM8un+GvbsGl30sOCxUlCYZSZLiutL0UwixdSilcLUizgxuO1SUGIMMwdq6+kzUc/t0mMhICiHEltSr+FmtVns8cnV67btZnYpqtRrve9/7lmwLgoA/+IM/WNNxrrjiCq6++mr+/u//vrvtf//v/81NN93E1VdfveJ+5XKZmZmZJduq1SqDg8tfPx3NWr9up/L3TYjtxnU0jqNJ0wzPdeivFDAGDs8sJmnG5EyVHSN9BL6L4yjS1NBqxeRzPrV6i8DfuBtF8zVbB2qFEUmSobWikF/7K5184FFvRDRaMf0VaDQjjDGbtoBOCCFOJp62v6s9DXEKiVnF7K0tpuQ5/IezR7hjps5smLCr4HN+f4Gcc+TQzkDgctnIys83U2PYVwu5e77JA9UmUbq6r+3NE1V+6YyhlY+bGS4dKXOwEXGwGdFI1hZcOtzt8yHnH1t2XqyC0g6qWIZiGZNlmGYd02phwhYqTWxYyPOg3UTPxDEmiTFxBFEEaQJpgkkTaDVtYyPHRfl+O2QUrDtoJhZJqEgIIYRYJaUUOd+l2YoJfI84Dtm7cwBHq27HoEPddf/BTQsV3frjh5ZtqzciPv/lW/j//cozCXwX13V4zhVP5O+/cXvPY3zzX+/i9b/8s8uKQa6r0QqyDKIkxXcdwiiRUJEQYsvxtCbOUlwNYQZJj9/lYuvoy3p3JMqMYTpMGM3LOCwhxNZy5plnLts2NTW17uMdvq9Squc5NsIXv/jFZaGeF73oRYyMjKz5WK9//euXhIoAPv7xjx8xVHTmmWcuO//U1BSnn376ms8/OTm5bNtZZ5214uPPOOOMZduO5fu21vMLIdbG9xyah4SK9ApB9QMT8+wY6QOgkPep1kIa7VBRvREytIbRZEczv2BDRdV6CECx4K94XUeSy/lAnShKSdIUF4dmK1pXQEkIIU41rlaQLr7drjWjouvws6Mb26nFUYozyznOLOdIsn4erLa4Z77JQ7XWEb/OD9da7G9EK3Yr8h3N5e0wkzGGRpqRZIbM2NpPhn3bSjN+PNPgvoXli7EPNd5KyIzpOepNbA6ldTdgBO1RaGETE4aYsAVJjPI8GzTK29F0JjOYJIIwwsQhxLENGTUTaDZsyMjzUPkCKleQcPQ6SSxLCCGEWIOgMwLNswEbpRRn7B3u+di77z+A2YQVDHMLDe59aLznxwzgHdKG9cJzdneLVod77OAsdz9wcNl2pdTiCLSoPQItlNEwQoitx2sX1l1lf29u1qqzVpox1Yp5rB6yvxExFyVEa2zzLI5dycToZYPYLRmBJoTYii688MJl2/bv37+uY83Pz9NoNJZsO+200zatBfu3v/3tZdue8YxnrOtYP/uzP7ts23e/+13CMFxxn4382h04cGBVx+8ol8ucdtppS7bVarV1dyta6/mFEGsT+LZO5HkOjqMZ7C/2fNyBifnuf+dz9mZosxW138YkG/T6oN4ISZKUNMtotI9fKqxt7FmH62h839a/WmHSPn7v7p9CCLHV+N2akX27HTsVHQ+uVpzbl+elpw3ylvN28mtPGOUXTxvkOTv7eOpQiSdUcozkPAquZjBwqR4yiu1IlFIUXYc+32UgcBnKeYzkPMbyPqeXcrz0tEFed84oF/QXlnUY7EgNS0a/ieNPuS66WMYZHMbduQdn12nooTF0uR8V5EBpO13ED9Bl+zg9uhPVP4QqlMBtLyCMY8zCPNnkQbKFOUwsdcC1kk5FQgghxBrkA485wD9kRNj5Z+/kgUeWr/6cXWgwPrWwYqhnvW7790dY6TXMC3/uQvQhrRy1VjzvGefz2b+/uefjv/29uzjnzDHcw1qW+r7tThTHCRAQtsNFQgixlXRDRe23zSRjloSK7+Ac46qVzBiaSUY1SYkPWWWVGkM1TqnGKZ5S5F1NwdV40oZ302mgbGLm1fIVbRIqEkJsRRdccMGybQ8//PC6jtVrv17H3yj33nvvsm29OvisxsjICIVCYUkoqtlssm/fPs4555ye+5zor90FF1zAvn37lh3nSU960nE5vxBi9TqhG6+9+GxooMjMXH3Z4w5MzHX/O+d7aAVJkhHFCb7nUm+E3XH2x2K+arsu1BshJrOL4gJ//beB8oFHFKU0WxGlQkCtETIyJCMUhRBbX6dO47bfbtdORceTqxUj7fDP8TAUeFy1Z4DLR8t8+r7xnsvQplox/cfwd1RsLOU4qEIBCu0uRcbYwFBkOxmZVhOVpagggMB2VjRpZrsdNertDkYNTLMBrocqSPei1ZLKtRBCCLEG3U5FvgsK0tRw9umjK6bZ73pg+arQYxEnKT+6c1/Pjw0Pljhzz/KuSWfuHeYJp/ce/jsz3+CHP3lk2fZOaCqMbZgojORmqxBi6+msOstpRc6xPWxqScrBRkQ1TtfVbS7NDPNRwoFmxEyUEGcGhZ1DP5b3GQo8Co5GAbExLMQpB5sx482IlnQv2nQrjUCbkFCREGILGhsbY8eOHUu23XPPPev6+/bTn/502baLL754vZd2VIePHgPo7+9f9/F67XukkWK9Pre77rprXedez9duo86fZRn33HPPkm07d+5kdLT360MhxNp16ieea98O9fXuVDQxXe12I9JakWvfMO10E6o3Vu6etlpJmlGtt4DF0Wel4rGNKut0Vep0sI6ihCSRrg1CiK1vsbu1fT8xZlOmEogTr993qXi9g0PToSy2PpkppVC+jy6VcYZGcHefhjO2G903iArytpORo9GFIs7wKGpgGHJ5UAqSdveiqXGy5pFH4QkJFQkhhBBrEviubaeoFJ5rV6G5rub03UM9H99rvNixuPPex2muMIrs6U86Y8VE9XOfcd6Kwafv3nrfsoJQZ7xb3G7vGUUpmazGEEJsMXlHEzgafchKKF9rMmAuSjjYjGmssmDeSjOmWzEHmhELcUpm7Iz4Ps9lV8FnwHfxtaLgaoZyHrsKNmCUaweMosww2YppHmOBPkwzFqKE+SihHqeEaUYqRa+uvmz5uAbf0QRaViQJIbaml770pUver9fr3HfffWs+zo9+9KOjHnsjFdorTw/VarXWfbxe++bzK3cE+fmf/3nK5aWdOG6//fZ1nfvw/c4991zOPffcI+7T62u7nvPffffdNA8rkG/m902I7WgxVKRBwWB/qedN5yTNmJpZHGO4OALN1niq9RaN5rEFi6q1Jhi7QCyOU5SCYv7YQkWB76K1Ik0NrfaCs9oGBKCEEOJk1wkVOVrRqRjICLStazhYIVQki9BOOcr30ZU+nNEdOLtPQw/vQOWLgEL7Pk7fgN1W7gPtQpZhFmbJZqcxqQSnVyKhIiGEEGKNcu12l5320VGUcP4TdvZ87NRMjcnpas+PrZUxhlt//HDPjwW+y5PO27PivqNDFZ5y/t6eH2s0Ix58dOkqXc91Ue1OTEn7iZR0KxJCbDVKqW7gRwE5RzOW9xj0XRylSIxhOkx4vBEy1YqptkM6nZsEaWZYiBL2NyImWzGNNMMAgdYMBR478x4V37FBVK0YCjwGfBevHU4tuJqRnMeuvE/RtS/NpsNkXR2L4ixjqhUz0YqZj1MW4pSZKGGiFbO/EfFYPeRgM2ImjImz7dsRaSCL2JE1uWQgx0tPG+QN547xG+ft4BdP7x0OFkKIU93LX/7yZdu++93vrvk4//Iv/7Lk/Z07d3L55Zev+7qOZmRkZNm26enpdR0rSRLm5+eXbT9St54gCHjRi160ZNttt922LKBzNPfeey8TExNLtvX6nhzu8ssvX9ZlaiO+b6s9vxBi9TzPQWuFQuG5moG+As4Ko40PTCz+LsrnfJSCMEyoN0Mw8PjBOcIVFpKtxtyC/R1Va3cpKuR9HOfYbgEppci3u3Z3uhU1msuD+kIIsdU4SuGoTreizgi0E3lFYjMN9hi5lncU3jH+HRUnllIKnc/jDI/i7NqL7hsAp904oFBED4+iimVQyo5Qm1veMVdY8pMghBBCrFGnRXWnm08YJZx31o5NH4H22IFZDk4u9PzYU87f2w05reTnL3sirtv7T/+9D40veV/rxU5MUbtbUUtafQohtiBHKYZyHqeVcpTav9eLnsOOvEfFc1BAZqCZZsy1QzqPNyLGmxEHmhHzcUpqDFqp7oiz0bxHwdW2AO9odhZ8Ti/lGAhchnIep5dy7CkG9LXDS1orBnyXfHsE23QrJlpl8CfNDLNhwngzpplmKKDoaoquJtC6WwAzQJwZ6knGZCsh26ar6wZMxGXRFJcN5jmnkqfPd2VuuhBiS3vuc5+7bPTX1772tTUdY3JykltuuWXJtl/6pV/a1N+f55133rJtP/zhD9d1rDvuuIP0sBWnlUqFsbGxI+73y7/8y0vej6KIb37zm2s691e+8pWjHrcXrTW/9Eu/tGTbzTff3HMs3FrOPzAwwHOf+9w1HUMIcXS+3+lW5KC1Zmig9wi0Q0NFrqOplHIATM7UaIUxWWZ47ODcusaL1RshUWSf59cbtjtbuZhb83F66dTBOl2V6o1QRgAJIbaFTrcir11Sl05FW9dpxYCLKgFPjmd5RjTBVa3H+Y9n9PP8Xf0n+tLEBlGOg6704+zcgx7ZgQryKAW6VEYPjYDvn+hLPKlJqEgIIYRYo5zfCRW1OxXFCaVijj07Bns+fqNGoK3UpQjgkiefftT9y6UcFzxhV8+P3ffQ+LLxZp7XCRXZMJF0KhJCbGWeVuzI++wpBuQcjVaKPt9ld8FnLO/T59nQj6MUBjuurNOVaNB32Zn3uiPOFFDxHPYWA3YXA4rtkOahco7tUnRGKbDhJaUYDFyC9vi1qVZCcoSxk5mxXZIONCNqSYrBjnMby/sMBh6Dgcdo3o5Z21Pw2ZH3Gc55OEqRGkMkIy2FEGJb8H2ft7/97Uu2ffWrX11T158bbriB7JCwq+d5XHvttave/+abb+aDH/wgf/VXf0WtVlvVPlddddWybV//+tdXfc6j7fe85z0Px1n+9/lQL3/5yznnnHOWbPvMZz6zpnNff/31S96/8sorueSSS1a177XXXovnLa6YTpKEz3/+86s+9+Tk5LLP/e1vf/uSYwohNkanPtRZfDY8WO75uAMTc0ve768UKOQ9MDA5UyVKUpIk5bGDs6Sr7F6aphnjUws8dmAWsF2EssyOY8sFG/Pz3hnVFoYJSZaRZaYbMBJCiK2sEypy22+PVKcRp7bTSwHPHilwZlpjOAsJkLZUW5VSCp3L44zuQA+N2c5FjoszMIyq9LNi94BtTkJFQgghxBoF7aKM77vQHRGWcf4TdvR8/PjUAjPz9WM6Z7XWWrHj0dmnjzDUX1rVcc49s/dq3Foj5MDk3JJtnaJY3O1UJAUjIcTWl3M0e4oBO/K+DQgpha8VFd9hOGdDOjvyPoO+y452V6KiZ0ec+bodFCrnGM37BKtokayUYiTnUXTtMYZzLp7SpMYw2YpJD1sFF2W2Y1KnS1In2DSas6EhT9v23AVXdwNOqj1+Le/YbWC7FgkhhNgerr322iXjxMIw5KMf/eiq9o3jmOuuu27Jtje+8Y2cddZZq9r/fe97H1dccQXvfOc7ecMb3sDFF1/M+Pj4Ufe78sorl40nu+eee/jHf/zHVZ23IwxD/uzP/mzZ9le96lVH3dd1Xd73vvct2XbjjTdy3333rerc3/jGN7j99tuXbPt//9//d1X7Apx99tm84Q1vWLLtuuuuI0lW10H2uuuuI4oWRxSNjIwsC5gJITZGJ0zkuraOMtTfu1PR+HSV5JCwkFKK4YEyge+SpoaJqQWSNCUME/ZPzB21G1CtHvLwY9PMzTfs+42Q2Xb9qVgIjvnz6nAd3f0cW63OCLRww44vhBAnq26oqB0ykE5FQmwtulDA2bEbXeqz7+cLqP5BVG5juj1uJRIqEkIIIdYo8F20Vmh1yIiwKOG8s3euuM/d9x9bt6If3vnIsk5CHU9/8pmrPs5Zp43grHCT+94Hlxb3O+PUwijpvpX21kKI7aLkOZxWynF6KWA0Z0eh+dr+/vS0oug5eId0JdpTDDittDjSbC2UUozlvW6HpJGcPUZiDFOtmCQzVOOU8WbEeDOmGqdkxha1htodiQLHXstA4HJaKWBXIeC0Uo6zyjlOL+UotP9eebK6Tgghtp1SqcR73/veJds+9KEPce+99x513z/6oz/ikUce6b7f39/Pu9/97lWd96677lp23gceeIB3vOMdR903n8/zB3/wB8u2v/nNb2Zubm5V5wf43d/9XR599NEl257+9KevagQZwDXXXMPll1/efT9NU97ylrcc9XVRs9nkbW9725Jtr3zlK7nssstWeeXWe97zniXj6x544AE+8IEPHHW/u+66a1kY7L/+1/9KqbS6xShCiLXpjj9rB28GKoWej0vTjKmZ6pJtWitGh8p4riZJMiamq2TG0GhEHJxc6HmcJEnZPz7H4wdnSZKUOEk5ODXP1EyNNDXkApe+cn4DP0PIt0egdRac1RoSKhJCbH1euw7ktt9GacZUK2aiGXGwGbG/EfF4I2SyFROussOcEOLkorRGDwyix3aB59n3C+3XTa50ee2QUJEQQgixDp1uRZ3gTRQn9JXz7Brt6/n4u+7fv+5zJWnGbT95pOfHBvoKnH3aSM+P9RL4LmfsGer5sXseWhoq6gSmkiQjzTIwNjwlhBDbiac1Fd9lNO9zWingzHKOnQWfocBjLO93uxLlVtGV6Ei0Uuws+Pha42jFaHtUWZQZDjQj5qKEKDMo7JizocBjR96j4NrzVjyH00s5hgJvSaip06Uo58jqOiGE2M7e/OY38/KXv7z7frPZ5KUvfemywM2hrr/++iWdepRSfPrTn2bnzpUXUxzqO9/5DmmaLtu+2jFmv/7rv75sDNrDDz/MlVdeyeOPP37EfdM05Z3vfCcf+chHlmzv6+vjL//yL1GrDAArpbjhhhsYHFwcdf3Nb36T3/zN31wyEu5QrVaLV77yldx9993dbWeddRZ/+qd/uqpzHmrnzp18+tOfXnK9v//7v88NN9yw4j6PPPIIV199Na1Wq7vt5S9/Of/5P//nNZ9fCLE6nU7PnTpKpZxfcUHX/vG5ZdscRzM6XMHRiihKmZyuYjAsVJvs2z/Dowdm2Pf4NA89OsWD+yZ58NEpqrUWBsNctcH+iTlarQStbJ1obLiy6t9zq9UZgdYMbQe0MEyWdF0SQoityO92KrLvZ0AzzQgzQ5wZUmPIDLTSjIlWfEzhoswYGklKI0mXda0WQmw+7Qc4ozvRpQo4Lng+emj19962OgkVCSGEEOuQC2zBqFM46nTzOe8JvQvs+yfm+fb3715Xp5+77z9AvRH1/NjTn3wGWq+tUPTEM3uPaZucrnbbZIMtarntm9WdEWihhIqEENucoxRF12EgcCl7zpq7Eh3t2LsKPq5WuFoxnPPoHD3QmgHfZVfejjkruBqlFAXXYW8xYDRv91vJ4avrpFOREEJsP5/73Oe48soru+/fe++9XHzxxVx33XXdkE6aptx666289rWv5bWvfW03OKOU4uMf/zgve9nLVn2+lV77rPY1keu6fOlLX+LZz372ku0/+tGPeOITn8jv/M7v8L3vfY963b6GieOYBx54gD/7sz/joosu4oMf/OCS/fr6+vjKV77Ck5/85FV/DgBnnnkmX/va1+jrW1xA8olPfIJnPetZfO1rX6PRsGOH5ufn+du//Vue9rSncdNNN3Ufu2fPHr7xjW8wMDCwpvN2vOxlL+PjH/94NyCQZRmvec1reN3rXsett97aDW499thjXHfddTz1qU/l/vvv7+7/nOc8h8997nPrOrcQYnV8z0G1O1q7rkZrzchg785gBybne273XIfR4TJaQbMVMz1nf7c1mxGNRkSzFRNFCXGcYjJDK4rZPz7P3HwTk9lOQjvH+ukr5zc8UATtrt0K0sQQxrY2VJduRUKILa5TZ3G0ouI59Hkug77LUOAxkrOLzcbyPiXPQbH+cFGYZkw0Y6bDhOkwYX8jYqLdqVrqN0IcP0opVLGEOzKGu2M32t+4cbKnOvdEX4AQQghxKsr5tlOR7y+OPwM4/+ydfPt7d/fc519/cD/1esiLr3wSWq8u11trhHzvhw/0/JjnOjzl/L1rvXTOOXMU/nfvj9370DiXXXxW933fc0iSjChOyAUerSimwsa20BZCCLHI1YpdeZ/HGxG+htG8j2JxbBnY8FHFcyh7Dv4qOyR5h62uS4zBGLMpNxyEEEKcnPL5PP/wD//Atddeyyc+8QmMMczMzHDttddy7bXX4vs+SZIs68AzPDzMpz71Ka6++uo1ne/Zz342Wutlx3vBC16w6mMUCgW+9a1v8aEPfYg/+IM/IAztDex6vc6HP/xhPvzhDwPg+z5R1HshBsDzn/98PvWpT7F379pfPwFcdtll3HzzzbzqVa/ijjvuAOB73/seL37xiwEIgqB7bYf6+Z//eW644YZVd3dayZvf/GZ2797NG9/4RiYnJwH4zGc+w2c+8xm01riuu+zzV0rxlre8hT/+4z/G9/1jOr8Q4siUUvieQxgm3TrK8ECp5/iygxO9Q0UAge8xPFhmYrpKrRZiMoPWGmMMWZaRGYMxkGWmu/jL0YqBvgKlYm7TPj+wn2Mu59FoxjRbEYHnUm+EGz5mTQghTiaOUhRcTSPJ6PNXvqXu+y5l12EhTmgkGa3U/ss5mpLrkHNUz/pLZgzzUUotSbvns12rM8IsI4wy5rAdk/KOpuA6R1xQJoQQm0U6FQkhhBDrkGvPkvc9FxSkmSFJUgb7i+xcYQQawO13Pcr//Npt3eLPStI045bbH+QT13+H8anlRSiAJ5+3h1yw9pmulVJ+xWu897ARaJ1OTFH7eluhdCoSQojN5juanQUfrRS+tuPLFFDyHHYVfM4oBQzlvFUHimAxVOS0jwUyAk0IIbYj3/f52Mc+xs0338zLXvayJWGTKIqWBIDGxsb4vd/7Pe6+++41B4oALrroIt773vcuuYFyxhln8KEPfWhNx3Ech3e+853s27ePD3zgA1x00UXLbsr0ChQNDw/z+te/nu9///t84xvfWHegqOO8887jBz/4AX/+53/OhRdeuORjhweKLr30Uv7mb/6Gb3/728ccKOq4+uqrufvuu/m93/s9xsbGutuzLFvy+fu+z8te9jK+//3v87GPfUwCRUIcJ536jN++6TzU37tT0fh0lSRZuSZUyPsM9RcBqDciqrUWtXpIoxnTaiWEYdKtKRULPrvG+jc9UNSRD+zvk1YrBqDRjNbVkVsIIU4lY3m/2626z3cZDNxul6JdBdtN2lG24/Rg4LEj71N0dbdz0VQYc6AZMxclxIc8124mGQebcTdQVHQ1O/IeY3mPnXmfPs8laC9MjjLDfJxyoBkx3oyoximpdDASQhxH0qlICCGEWAffc+3Yscx2DIrjlChOcV2HFzzrQv7Hl7634r73PjTO5/7XzbzyJU/vzqQ/1Hy1yee/fAuTM7UjXsMlTz593dd/7pljHOixOu6Rx6dptqLudS2GimyYKAzjdZ9TCCHE6uUczWnFgGqc4mq7Mu5YRq11VrulxuBpTZRlJBl4sswEsMW8yZYt5l3QXzjRlyOEEJvu0ksv5cYbb2RhYYFbbrmFe++9l7m5OVzXZWxsjCc/+clcfPHFq+6wupJ3vetdXHXVVXz3u9+lUqlwzTXXUC6X13Ws0dFRfvd3f5ff/d3fZWFhgdtuu41HH32Uubk5qtUqQRDQ39/P8PAwT3nKUzjzzDOP6dp7cV2XN73pTbzpTW/igQce6F5Ds9mkWCxyxhlncOmll7J79+4NPzfA4OAgf/RHf8T73vc+br/9dn784x8zPj5OkiT09/dz7rnnctlll1GpVDbl/EKIlQXtMJHn2Y7WAys8p0zTjH0HZjhr78iKxyqXcnieQ7MV2TEcyo5W64xY01rhug7uGhYZbIR8e4FdK0pI2zfGW2Hcs7YlhBBbhaMUQ0dY2FsAKp7DQpwyGyagYTDwqHiGWpLSSDJSY6jGKdU4xde2PtNsj0dzlWIgcMkd8jvd1YqK71DBIcsMzTSjkWaEaUaUGaIoYQ4ItKbgavLHWDMSQoijkVCREEIIsU5B4NFsRgSB2w4VJRTyPqftGuSlz30yX/n2j1lpwdajB2a58es/4tW/eNmyj5WLwVHH0ZyxZ4jRofUXis89c4x/vuXeZduNgQf2TXLRubYI7reLYVGcYjBkGcRx2i2SCSGE2DyutoWljeJpRZoaXAUR27tT0UKcctv4AhOt2IaJ2qu9Xa04ry+PlmKcEGKbqFQqPP/5z+f5z3/+pp3jkksu4ZJLLtnQY1YqFZ7znOds6DHX6uyzz+bss88+IefWWvPUpz6Vpz71qSfk/EKI5TqhosC1b0uFHIHvEkbLOz4/uG/qiKEisJ2P1tOdejO5roPnaeI4oxXFFHMBzZaEioQQQitFv+9S8Rzmo5S5yIaL+n2XPs/QTA2NJKXVDgWB6XakrniODY4CQzmPiufQSDLqSUo9yUBDUTsUPRswaqQZjaQ9Hq09Im02sjWfQGt8x76VMWnHzhjDQpQwHSb4jmJ3ITjRlyTECSOhIiGEEGKdcoFLsxnhuy4QLikUXXzBaRRyPl/6xx+SpNmyffOBx/OfeUHP42qtuerZF/GZv/v+Ch9XXHnFecd07WPDFSqlHAu11rKP3ffQeDdU5LoOWiuyzBDFKYHn0opiCRUJIcQpyNOKVmqDM6SQbONW2WFmuGVyeUfAJDPMhglDuZPrBo4QQgghhDi5Bb59/mjrKJBlitN2D3LfQxPLHvvgvgl4xvnH+xI3RM73iOOQOEogF/QMTQkhxHal212H+nyn25molWYUXNuBOssMtXYgqM938dvBn4KrGcl5eO0uoSXPoeQ5GGO7FNWTzC6G0lDS9mNJZmgkGc00I8oy4swQZym0fy07ShFoRc6xnYxk8dTqHNQ5DjgFqsrjWw/Pk7AAwDmVPLtPk1CR2L6k2b0QQgixTp1Z8r7f7uZzWCHl3LN28JqXXb5sZZnral750qczMrRy2//Tdw9x4Tm7lm0f7C/yay+7nN07Bo7p2pVSnHvWju77hbzPU87fw6/8wtP4hec8ecljO92K4nYXBykYCSHEqclrF6vcdiFpO3cqGmivBOxloiWjPoUQQgghxNo4jsZ1bf2kM0r+tF1DPR87PlWl2mOR16nAO6SjNUAYyXNnIYQ4nFaKPt9lTzFgbzGg33dx2uMrK77DSM7Db4+0HM157CoE3UDRoZRSFFz7+DNKAbsKPuV2PaMzIm0s77Er7zMUeJQ9h0BrFJAa29VoJkrY34iYasU0kpSsRy3IGEOSGZrtDknpNl6ENqd99jlFZrVPfMjXYSqUv3die5NORUIIIcQ6Be2RNL7ngoI0MyRJ2i0iAZy2a5DXveIKbvhf/0a13kIpeMULn8renYNHPf7znnk+9z40Tpyk+J7Ds55+LpdefCauszGZ4IvO2YXnOjzxzDF27xhAr9AS1fddWmFCGCWUCgGhPIEWQohTkt8uULntt/E2LhK5WjEUuEz2CBDdMVNnXz2klWQ00owwzdBKoZVd6ecc8t+etqsNC47DmeWAsbyMfhBCCCGE2K4C3yVJUjzP1lF2j/Wv+NgH9k1y8QV7j9/FtUVx0g09rUdn306oKIpTjDEo6YAhhBA9BY4mcDRDgUsjzahGKWGWkXc0g4G36jFlnYBRwXXIjO1SVItT6kmKoxUFrSi0e4lkxhBnEKYpjcQQG9vRqJlmKCDvajyliI0hzmyg6NAKkVYwlvO35Qi1Stb73sdclJBkZlt+TYQACRUJIYQQ6+Z7rg3iZLabTxSltKKEkrt0NNjoUIXX//LP8vkv/xtXPPWsJR2CjqRSyvPMpz+BqZkaz/3Z8ymXcht6/Xt3DbJ319HDTb7b6VRkOxS1QulUJIQQp6JOpyKvXf9IjSEzZtu2wB7OeT1DRfsbEfsb0ZqPV3D7Ny1UlBlDPcmYDRPmooTZyL6NM0PZcxjNeYzmPHbkfRwpcAkhhBBCnBC5wKXeCLsdn3OBx/BgiamZxbG7w4Mlzj5thB0jleN+fT/48cN874cP8Ksvv5zBvuK6juG1a0RJktnXEpkNFgW+3GoSQogjUUpRdB2Kh907WA+tVHdEWmYMYWpotUNDYZoBEDgQOC4VH6LM0ExSGklG0g4kLbs+wFUag2k/JqWyDX+3l03vUJExMBsljOS8nh8XYqvbfr8NhBBCiA2Uz/nUGyG5wLOholZMqbB8tm5/pcCvv+pZS7oYrcYznvaEE77ay/c7q9BsmChJUpI027COSUIIIY6Pzmoq3W6xnRlbKPK3aahoNOdx1wYer+Cu/u+iMYZakjHVipkKY6ZbCbUkbQe9bLEqNYYMGyiqxinJETpL3dl++5+euIOiPvYCpRBCCCGEWLvAtzcaF0eEJVzwhJ1MTFc5+7QRzj59lL5y/oRc2+0/3cc//PNPAPjMl77Pr77scoYHS2s+juNoHEeRpoYoTsj5HlGUSKhICCFOEK0UeVeRdzUD7W1RmtFKM+pJRiNJ8bXC91362gGjemK7zLlK4zkKrz1ODWAhSpiPU5IeY9K2g6JJUBgMy2tl061YQkVi25JnekIIIcQxKBYC6o2QQs5jodqi2Vq5s8FaA0XACQ8UgV2FphRkGcRJiuc6RFGMm18enhJCCHHy6ozuSo3BVYrIGNLMwDbNiG50Iaiwir/zYZpx93yTfxlf6K4e3ChF16HoSaBICCGEEOJE6SzK6owIS1PDMy4554QvyvrJPY/zlW//uPt+td7iMzd+j9f84uWMDa+9Y5LnOqRpQhyn5HyPME4ob+QFCyGEOCa+o/EdTcW3C5bqcUo1TmmmWTdgtBKnfT+iRzOjbUEDJZNQVctrRtMywUFsY9u0fCyEEEJsjGLBjjkJAg+tIc0MYdS7ReapSim1ZJUdyAg0IYQ4VXVGoHWa6iTbc+EZAKN5r7sSbyOsplNR4GhqcbrhgSKwn48QQgghhDhxfM9BtbuCep59btgZJX+i3HX/Af7+n37E4Q0n6o2I6//u+xyYmO+538HJ+RXrW53QVJykAESR1IiEEOJk5ShFxXfZXQw4o5RjOOdRcB0KrqbPdxnJeewq+N2FV662f7/SbdqpCKCS9f77NxVurfs+QqyFhIqEEEKIY+B7Lr7voFDkczZg1GxtvSeXQadg1A0Vbb3PUQghtgO/EypqrzyLs2269AzIOZonDRQ37HiFVaxAz4zhzrnGhp3zUKNr6Lw0Lzd+hBBCCCE2nFKqOwasE7yJ4vSEXc/UTI0bv7E8UNTRDGP2T8wt3daK+Id//gl/+YXv8s+33Ntzv8MXnm21xXVCCLFVuVrR77vsKvjsKgSM5Dz6fJeC6xC0axpOe+1VagxmmwaLyqb33zXpVCS2Mxl/JoQQQhyjYiEgihrkcz71RkSjFdFfKZzoy+oyxhzzGDVbDAtpRRIqEkKIU5mnNZB2327XdtYdz95RIe9q7ptvUk9S8q5D3tEUXPsv79h/BkiNDQWlxpAZ/v/s/XeYJGd5Lv7f71ux4+S0ebWSVglJSEIBxAFEksCIHA5JYMIPGfPzQcbAsTEO2IAvc4zBBmPgWDYcCbCFJSPAgIXAgEGAZOW0CqvNk0PHyvX9o6p6QnfP9MxOnvtzXbpmp7or9Cxs9zx1v88DLwxh+QGqXgDLD2pdoOZzqGSjtEI3llrtVDRmu/jq4yN4dl8eF3ZnV+RaiIiIiLYqQ1dhWW5d8GYt/PiXj8Kfp0PmC551Ji48ZzeAqHZ03yNHcdt/PYxK1QEA/OregzjvzJ11I9L0+LVNdyryEQQh5DJ2ASUiotWliaSztYAAanUQdQv+055r0qloyvHgBeGydr0m2igYKiIiIjpJ2bSBickKTCNeheb48PwAagsdC1ZSoVTFVLGKMAjRlk+jLZda8rGM+LXZjocQIVzXh+f5UFVluS6XiIhWwfT4s+irt0VXnSWkELi0J4dLe3Krcr77J06+S1FalWjXVbTrKnRFYNTyMGK5LXcq+vVICUEY4idDUzglZ6LDYFmAiIiIaLkYugagOj0ibI06FVWqDg48Odj08edeuh+XXbAPADBVrOLm79+NIyfGZz0nDIF///EDuObVl81arKap0WvzvRBeEECVEo7rwTQ4jpeIaKNSZoSJFCHgxYuqVGy9AE2+SaeiMATGbRe9KX2Vr4ho7bF6SEREdJJSpg4pBVQo0HUFjuPDshxkM+aaXI/r+RibKMGa0Y5zYqoC09BqbbhbZTseDF2FrqmQUiAIQjiuD0NTUbVc5LIMFRERbSS1UFH8fdR1J4Q8yY52tLCy6+PJolW3Pasp6DY1dBkqDCkhBSAEICEgRRR8MhWBtjhIZDYILbfaknzK8fDwVCXeB3h0qopLe1cnUEVERES0FdTGn6nT3XyWo4P0Yt3/6FH4QePPiM+66FQ8+xmn1b43DQ2Txcbh9yMnxnH/o8dw7hk7atukFNBUCdcL4LkeVEOH7TBURES00WlSwAnCWqjIC0IYW7D8nwk9RD2r643aHkNFtCUxVERERHSShBBIp3SUyjbSpgbH8VG13FUPFYVhiGLZwuRUBUEICAl05DOwHRflioNCqYqezsY3Dl3Xx/BYAcNjxfi/AkbGizA0Fe996xUAosJY1XJh2y4MTUXFcpDLrk1wioiIliYJFUkpoAgBPwzhhSF0hopWnAoa31gAAQAASURBVBOEuKQnC9sP0W6o6DZUdJtaw5DQYi10k6ro+shpCu4cLWFm/uhEPNqCiIiIiJZHEipSVQVSAkGAaHHWIhd5nYwwDHH3Q0caPtaeT+G5l+yftc3QVbzo8rPwze/9d8N9bvuvh7D/lP5Zr0HTFLheAMf1YRqA7bgAlt4hm4iI1p4ah4pUCdhBtBBtKxIAsqEHu8FjY1bjLkZEmx1DRURERMsgkzZQKtswTR0oWKja7qquRHNdH2OT092JTFNFd3sWqqrAdlWUKw4qVafpyLInDg/jX757V932inDguj40TYFpaFGoyInOUbV4I5KIaKORYjpMpCahoiCEvrYTO7eEDkPFZb35NTn3gakq2nUFD07OXoF+ouKsycp5IiIios1KUWQUuHF96LoKy/Lgul7TUFEYhvCDEOoyBM0Tx4cnMTJWbPjYeWfuhJT1n/3OPHUAe3d24+CR0brHyhUHDx44hgvO2V3bpqkKABeOF413S2pFRES0cWlSAgigxjUCb4uGigAgHzgYabB9zOb7HW1NLB0TEREtg2zaAACYugZFicaErVZBpVi2cGJ4EpbtQUqgqyOD/u62WnjI0FSYpoowBArl+rErANDT1biDURgCI+NRISopgFlOlMa3bQ++Hyz3yyEiohWmxzcRkvsWXpOxCLR59Joa/u3weN3fteUHmHT8NboqIiIios0pqZ9oavQ1Cd4kKlYU0vnWbffgr6+/DXfd/9Synv+eJl2KhIhCRY0fE7jqOedAaRA4AoAnDs++tapp0Wtz3ei1OfxMSUS04SVhIiUJFW3h0n8ubNyRiKEi2qrYqYiIiGgZqKoCw1Bh29EM+XLFQdVyVnyevGW7GJsoA5jdnQgA2tvSCIIQhWIV+WwKllVEqWyjPZeClLNzxR35DFRVwmvwm8LwWBHb+tqhayqEAHwvhOv50FQFlu0iEweqiIhoY9CkQNUHNK482zL6UhqEABr9VZ+oOugwWBogIiIiWi6GrqJUtqFrUX3GiRed3fPQYdz1wGGcGJ6c9bnsiUMjuOT8U5bl3I7r4YFHjzV87JSdPWjLNR9R1tWRxYVP241f3ftU3WOHjo4hCMJal6Pktblu9No8z4fvB1CWseMSERGtLi3+N16N7x1s1fFnAJALGoeKphwPjh9Ab/B+F4Qh/uP4JAZSOrandXQYKiQ7Q9MmwU94REREyyTpVpQydQBAdRXm604Vq9G5s0atO5GmKdi5rQN93Xl0dWQAAGlTh6ZJBEGIUqV+GrCUAj2djbsVDY8Vas9JVttxBBoR0calxcWhpEjETkWbn65IdDcJOp+o8L2ciIiIaDkZ8ecuXZ/dzadQsnB8aLIu6H3o2Bhcb3k6/Tz8+Ak4buNjnX9W4y5FM522t6/h9qrtYnB0qva9pioQAgiCKFAEcAQaEdFGpyadreMcjB+GCLdosCjfpFMRAIw3eb8bsVw8OFHBbccn8U+PD+MLjwzilkNjuHushMIKvkcGYYiq58PyA1h+ANsP4AQBvCDc0sEwWl5cjkhERLRM0ikDYxNlpEwNEIDj+vA8v9Y5aLl5nl8LLuWz0UqzfC6Fvu78jJVjKrIZA6WyjVwmhfHJMoolC7mMCTEnJd/blcOJ4SnMNTJWrP3Z0FVYtgfLcZFNG6isQnCKiIiW1/TKs6RT0VpeDa2WgbSOkQbv2yeqDBURERERLafp8WcKIAA/COH5Afbt6sF//vJA3fM9P8DhY2PYt7v3pM/dbPRZytRwepPA0Ew7BzqhKhJeg3H3B4+MYltvO4BoXJqmKnBcH05c+7IdD+mUflLXT0REa2d6/BkgAIQA/BBQt2CznXToQ5VAo5jumOWhv8H73fE5i7YsP8CTRQtPFi386MQUekwN+/ImTs2Z6DG1uvszi+UGAX42VMT9E+V5Fwy26Sr25Uyc0Z5C3zKcl7YmdioiIiJaJilTg6JIKFLCjAtIK9mtqFi2AERjz3RVgZQCvV25WqAo0dEWdSvKZgwoioDrBQ2vq7cr3/A8w+MzQkXxajvbjva3bHfLrlYgItqo9CRUNGPlWcB/yze9/lTjTkUjlgs3qL9pRERERERLo2sqhBSQQkBTo1swjuthoLcdqSbdI584PHLS5x2bLOHw8fGGj517xo6WFr1pqoIdAx0NH3vqyOjs5ybj3eIRaA47FRERbWiqFBCIgqNKHDzZqp1uBIBOrfH75pjd+J7PsQU6QY9YLu4YLuL/PTGCLx8Ywu0nJvFk0YLTIMjbCj8EDpasBTuQTzke/nushBufGMH1jw/j58OFpq+BqBmGioiIiJaJEKK2Iss0oyJRZYXGgwVBiFI5GmOWy5oAoi5FjWbXp1M6jHh+by4TjWgrlKp1z+vpajz+rFS2a6/D0JLW3QG8IEAYhLD4AZSIaENJOhTJGUUijkDb/AbSjVeNhyEwVOV7OREREdFyShab6dr0CDQpBU7Z1dPw+U8uQ6jongcbdykCgPPOXHj0WWLvju6G2w+fGK+NOgNmvzYAsB1+piQi2ui02gi0pLv11q0XdeiNQ0Wjdn2INgzDBUNFMxVdH/eMlXHLoTF8/pFB/PPBUfxqpIjhqtPywj9TkXjFrk4YDe4JNTNpe7hjuIh/emwYX318GHeNlmAvMdREWwtDRURERMsom45COykzunFn2y6CFbhRW6na8IMQqiqRjs/Vnk83fX5nrVuRCQjAsr26Yk9vk1ARMD0CTVEkNC36+JDsv5LdmE6W6/oYGi1geKwwq/BFRLSVSSFqwSKVRaIto0NXmxaaOAKNiIiIaHkZerTYbG43n1N2NQ7sjIyXMFWsXwDWqiAIcN8jRxs+tq23DX3djbtTN7K3yTV6XoCjgxO176dfWxIqYqciIqKNrlYvissH/hZehNbZJFQ01uB+yJTro+wu7f5DEIY4Wrbxs6EC/t8TI/jio4P4/rEJPFW0FuwU1WloeMmODixlotmI5eI/B6fwpQODuP3EJCYahKWIEgwVERERLaN0HCoyNBWKKhCEK7NSqxh3KcpmDAgIpFI6jHgVXCO5rAlFlVAVpRZ8KhStWc/Jpo2mbbiHx6ZHoJl6MgIt+pBZXaFuTCerajl46ugoJqcqmJis4OjgBEe1ERHF9DlFInYq2vykEBhINe5WdKKyfgPCRERERBuRPqdT0XSoqHGnIgD46a8OLLlu8fhTwyhV7IaPnX/2rkUda6CnvWmN6eCMEWhaPE7N9XyECBEEIRd0ERFtcJqMCkVchAZ0zQgV7c4ate5NRdev6+4jAVzYncVAWodcSsJnhooX4MGJCv710Bj+/pFB/MexCQzPsxhsb87E/+hrW/L5HD/EPWNl/OPjQ7jl0BgOlWzeR6E6DBUREREtI1WRtdFnSbei5e7kYztetPpLxJ2HAHS0Ne9SBESj2TriTkb5eFxaxXJmFXuEEOjtbtytaHisUPtzUlhKVqCtx05FlaqNIycmEAQhLMeFHwSwbQ/lRbQgJSLazKaLRNHXrVwk2koG0o3DwycqDgtGRERERMvINJJQUXRD0vMCBEGIfDbVtFP03Q8dwc//+4klne/uhxqPPlNVibNP27aoY0kpsGdHV8PHDh6dHSqSAkAIuF50c5XdioiINrYkTJQEaPwtXCro1KZDRVIIvHt/H16wrR07MgbG53T1yesqntPfhv95Sg9+68x+vHZPNy7tzaHHbFyHaZXlB7h/ooIJZ/7Q7nmdGXQYzRedtyIMgSeLFr751Ci++sQIhtjVmmY4uf91ERERUZ1M2oBluUiZGkolO+7kk1m24xdLUYehTEqHKiVUdbr70Hza82mMTZahaypMU4VleSiUrdpoNADo6crj0LHxun2HR6c7FRlxNyPb8aL5vn4A2/Hm7ZS0moplC8eHJoEw6lY0PF5Eez6NtmwKE4UyspmFf1ZERJtdUiRK2lq77FS0JfQ36VRU9nwUXR/5dfJeTkRERLTRJePPVEWBogj4fgjX82HoKk4/pW9WR+iZbv/5I2jPpXD26dtbPlexbOGxp4YaPnbWqdtgNulKPZ+9O3vw6JP1xzw+NAnLdmvH1DQVtuPBdT3oqgLb8ZBpoUZFRETrU1InUuKvW7mzdVYV0BUBxw9xsGjh58NFXDHQhnM7M/MuzNKlxM6sgZ1ZA8/szaPgeHiiaOGJooWjZSe6p7IImhTYm5v/vbXi+fiNnZ0IwxAhooBQAGDC9vDIVAWHyzYWc9pRy8XXnxzFW07tQecSPkfQ5sNORURERMssCfikDB0Q0Wotd5naP/t+gEo1amedizsOteVTEC201FQUibZcCgCQz0ZfS2UbQTDdqrPZarmR8WLtg7KmRgUxhIATj3ZbLyPQporVWqCobNkYGisiDKaDWJWKU2s5TkS0lenK7FAROxVtDQPpxqEiADhRXX+dB4mIiIg2KikFtLjDQfI1qUdcev4pSM3TueBbt92Lw8frF3w1c98jR5veKDz/rJ0tH2emZp2KwhA4fGys9n3SicmNXxs7FRERbWxaXCdSa52Ktm69SAiBPfE9GAC4d7yMu8ZKtcdalddVPL0ri9fs6cZ7zujHVTs6cGZ7Gmm1tZjG3pwJXc7/3LyuosfU0JvS0ZfS0Z/WsS2t4+yONF69pxvv3t+PKwbasG2eutBcfhji7rFyy8+nzY2hIiIiomVmGhoUVUIKUWt3vVyhm1LFRhACuq7A1DVAAO1xUKgVyZi0tKlD0ySCIESpYtce7+1sHCqyHQ+FOJgDIDo3AGsdjUCbmCpjcHgKCKOf08hYCYh/5/G8AJX472BiqrKGV0lEtD5oSaeiuAYShFj0SinaeExFNm2HfYIjQomIiIiWVdLNR9eiz1+uGy04S5k6XvfSZ0BRGt+e8fwA//ztX2NsstTSefbt6sHTz9pZC/gkOtsz2LWtc0nX3t2RRS5jNnzsySMzRqDVAlPRa7Odta8PERHR0k2PPwPiCZfwt3C3okt6cjBnvF//ZLCAI2V7nj3mZyoSZ7ancdWODrx7fz/evK8Xz+7LY1fWqI2cm2t/W+v3f5rJqArO78riDaf04J2n9+HS3hxSLYSajrFWRDGGioiIiFZArVuRGSW/pwrVk16tFYYhSuUo2JMUdnIZE6qqzLfbLLqm1sZ/5TLRh9HyjA+GPU06FQHA8Fih9mcjviFp2eujU9HoRKk2oq1QqmJ0PAoU5TIG8jmzth0ACsUqgi38ixARETC98kwKUStacATa1jDQZATaYJWFIiIiIqLlpMejZZNQ0czOybu2deLlLziv6b5V28XXvvUrlCsL37js72nDbzz/PPyv33whXvb8c7FzoAMAcP6ZOxfVSWEmIUTTbkVPHZ0OFU2/Nr/2db6RMEREtL6pUkAgeh+Qgt2te0wNbzm1F5f05HBuZwav2N2FHYvo9jMfKQR6Uxqe0ZPDa/Z049oz+3H1rk6c2Z6udRjXpMCe7PKOFc3rKp7Zm8e7Tu/Hi7a3o6dJ90RNChiK4CJEAgA0XqJIREREJyWTNjBVqCKbNlCpOrBtD8OjBfR252HoS3v7rdouXC+AoojafPr2fHrRx2nPp1Eq20ibGsYRFbWCIICUEqahoS2XwlSxWrff8GgRp+3pAwAYcaciJw5Kua4Pz/MXFXBaLqWyjbHxaPXeZKGMyUIUvMrnTHS2ZeB6PgpFC5blwfV8aKqCqWK11rWJiGgrEkJAkwJuEEIVAn4YwgtDLG+ZgtajgbSOhybru/YNWy68IKyNxCMiIiKik5PUf+Z280mcffp2TBQq+NEvHm24/8RUBf/3n3+GHf0d6GhLo7M9g327e2sL2Rqd7/yzduH8s3ZhdLyEdJMweatO2dWD+x89Vrd9ZLyEYtlCLmNCi+tAnhcgCEPIIHqdS619ERHR2tOkgMN6UU1OU/CsvvyKn0eXEqfmUzg1n4IXtONgyULJ9aEtMPpsqVQpcE5HBme3p3H/RAW3HZ/E+V0ZbE/r6DE1tOtqLVhGxE5FREREKyCXMZFK6VCkRG9XDoahwg9CDI0WltyxqBiPH8ukDUghoOvqkgpEKVMHBKCqCjRNIgwBy56+pp4mI9BGxou1P+uaAikFgmB6pd1ajUCbKERzfadK1VqgqD2fQmdbBgCgqQpScdo+GeE2WeAINCKipFtR0u3YY6eiLaFZpyIvCDGyDsaZEhEREW0Wyeh4XVMAAQRBCM+bHSx61oWn4uln7Wx6jKliFQ8+dhw/u/NxfOu2ezExWW7p3N2d2ZMOFTXrVAQAB+MRaIoiocS/V7he0q3o5Dp1ExHR2lLn1Iu28viztaJKgdPyKTy9K7vi5xJC4NzODK47Zzue19+G/W1pdBoaA0U0C0NFREREK2RHf0ddsChYYrDI9fxaaCcZfbbUTjtSitpYNtOIvlbt6ZEnvU1GoM0cfyaEqK06SwJJlTUYgeb5ASrxuJYkdNXRlq7r4JTPRj+zcsVCEIZwHA+V6tJnHxMRbQbJSidVRF+3cjvrraTbVGuBsrlOcAQaERER0bLR4gVZAqLW0ceZEyoSQuCq5z4Np+zqbumYHe2ZZb/OZvLZFLqanG/mCLSkE5Mbd2Ja6mI6IiJaH9Q4TKJw/NmWs9SxqbT5MVRERES0QqQUdcEicwnBoqrlYDQe75UyNWhqVJTKZ1NLvrZMvFot6eBjzehM0NMkVDQ6UYbvB7Xvk1CR7bi161xt5YoNhIDtevC8AFJMh65mSpk6NE0iCKJxaQAwWagf8UZEtJVMdyqKi0RcebYlSCHQ12TV+okKQ0VEREREyympnejJCLQGtSBFkXjNVRc2XeSV0DWlVs9ZLXt39dT+nE7pOPu0bfiNK87F/7j49BnXFb3GpENRo9dIREQbx/QitKhe5LNcRLTlcbAtERHRCkqCRUcHJ1CtOujtzmN4tADL9jA0WkBHWxqGrkJTlVkp8DAMUa7YKJQsOPFKL4jpjjv5XAqySZeBVmTSBkbHS1FxSwCuF8D1fGiq0rSI5fsBxifLtdCRaWgAqrVwlO14CILwpK5rsZLuRNW465Bpak3Pn8uYGJ+soFi2kM+aKJYtuK5fW1FHRLTV6AwVbVkDaR1Hy/Ud+warHH9GREREtJwMQ0PVcmHoKsoVp+kCM0PX8IaXXYzr/+W/UCxbDZ/T2Z5Z9Q4C55y+DR35NPbu7EZPZ65hzSWpqyShomTxGRERbUxJnUhhvYiIYuxUREREtMJmdiySQqC3O1/rWDQ2UcbxoSkcPj6O48OTGJsoYbJQwdHBCYxOlOG4PoQEclkTO/rao7FlYumjzxKmocVz7yXM2hizqOjT3ZFFsxrV8Hix9mddUyEE4HkBPM8HQsCyV6/Dge8HKMdhonI8riWdMpo+P5s2IEXUjtuyHSAEpoqVVblWIqL1qNapKP43PwDgs6X1ljCQ0hpun3I8lF2/4WNEREREtHjp2vj56POXbbsIm3zmbsul8KZXXNJ05FjnKo4+S+wc6MSlTz8Ffd35pou4ki5MbjzazXF8BLwBTUS0YU3Xi5JORfw3nWirY6eiTa5YLOKXv/wlDhw4gMnJSWiaht7eXpx77rk4//zz1+1sRMdxcO+99+LRRx/FxMQECoUC0uk02tvbsXv3blx00UXI5/Mreg1PPvkk7rrrLhw5cgTVahXZbBZ79uzBM57xDGzbtm1Fzx2GIe655x7cd999GB4ehuu6aG9vx+mnn45LLrkEudz8rXCJaP1JgkXHhiZQqUQdiyYLFTiOB8f1EARR0cVxpm/kKapALmMilzGhxC1HFSUao5a0lj4Z6ZSOYsmCaWqwbA+W5SKXMaGqCro6srWRazMNjxVw9mnbaq9J11TYjgfL8ZBVFVQsd95gz3IqxaPPHNeD6wYQAkibjW+SRtcrkckYKJaiDlCmoWOyUEVXR3bdvh8SEa0kVQgIROOwFCHghyG8IISi8N/EzW5gxtgMTUbj0AZSGgbSOnT+/RMREREtm1T8uUvXVCiKgO+HsB2vFjKaq6czh/e86bkYHJnC8FgRE1NlTExVMDZZRl/XytbDl0pTo1CR74XwggCqlHDc5q+RiIjWtyRMpAhAAAgB+EFY61xERFsPQ0Wb1K9//Wt88pOfxLe//W04TuOuEf39/XjnO9+J97///ejs7FzlK6wXBAG+853v4Atf+AJ++MMfwrbr2/EnpJS44IIL8O53vxtvfOMbkckszyoNz/Nw/fXX47Of/SweeOCBhs8RQuCSSy7B+9//frzuda9blvMmxsfH8elPfxpf/vKXMTg42PA5uq7jpS99KT784Q/j4osvXtbzE9HKklJge990sKizbfrfLtfz4bgeHMeD6/lImTqyGQPR7V5A1xV0tGWQz57c2LOZMmkjChXFY8yq8Wo5IQR6OnN1oaL2fAqaMntUmGFEoSLb8ZBNG6haq9epKBl9Vom7FaVMDVLO34QxlzFRLNmoWG7UXSk+Tj6XWtmLJSJah4QQ0KSAE4RQZ4SKDE6F3PQymoIXb+9Aj6mi29QgGa4lIiIiWhGqIqPaiR2FbMoVB5btzhu4kVJgW187tvW1r96FngQpJVRVRp2sXQ+qoc8bnCIiovVNlfFdCSEgk3pRGEIBawdEWxVDRZuM67r4wAc+gL/5m7+pa6OqaRo8z6ttHxwcxJ/92Z/hi1/8Iv7hH/4BL33pS9fikgEABw4cwNvf/nb8/Oc/b/i4ruuzwlFBEODOO+/EnXfeiT/7sz/Dl7/8ZbzwhS886Wt4wxvegLvvvnve84dhiDvuuAOvf/3r8fd///e44YYb0N/ff1LnBoDvfOc7+M3f/E0MDw/P2i6EgKqqcN1oLJHjOLj55ptxyy234H3vex8+9alPQdP4CxrRRpF0LJoqVlGu2LBsD57nQ1MVaKqCzJwuPylTQ0d7BrmMuezXkrTgNnQVUgJBEMJxPRi6hv2n9CGXMdDblUdPVw49nTkYev3HBlPXUIBVG51WtaaDSStpsaPPErqmwjRUWLaHYtlCR1sGE4UKQ0VEtGWpUsIJfGiKgB0A3gq1tA7CEGUvgO0HUKWAqUjoUjDMsobO7ji5UapERERE1Jp0Sq8LFW02mqrA8wI4rg/TAGzHBcBaCxHRRqVKAXfmIrQwxOrMJyCi9Wj+5fy0oViWhZe85CX47Gc/WwsOdXR04FOf+hSOHDkCx3HgOA7uuOMOvOlNb6rtNzw8jKuvvhpf/OIX1+S677jjDlx44YWzAkVCCLzuda/Df/zHf6BQKMC2bViWhbvuugsf/OAHkU5PF8APHz6MF73oRfj7v//7JV/DnXfeiUsvvXRWoOiyyy7DrbfeilKpBNu2MTExga997Ws488wza8+5/fbbcfHFF+PgwYNLPjcAfPGLX8TVV189K1D0pje9CXfccUft7+3w4cP4y7/8S3R0dACIwk2f/exn8ZKXvASWZZ3U+YlodQkh0J5PY3t/B/bt7sG+Pb3Yua0DPV055LImUqaGfC6FXds7sWt714oEigBA0xTougIBUVs9VrWiwtbT9u/Ai//HOXj62buwo7+jYaAIQG276/rwggBhEKJSXfluReVk9JnntzT6bKZ8NipqlSo2QoSwLHdVOywREa0nWtz9LlltVvYCOEGwbMcPwhBF18dg1cWk46HqByi6PkYsF8crDkYsF0XXX9ZzEhERERGtJ2kzug2b1F5sx0MQrEyYf63oWtTu1PWiz/WO46/l5RAR0UlK6kVqnCTwN9n7FrXG9gMcLdu4b7y81pdCa4ydijaRt7zlLbjttttq35966qm47bbbsHv37to2VVVxySWX4JJLLsELX/hCvP3tb0cYhgiCANdeey0GBgbwspe9bNWu+eDBg7jqqqtQKk2P2NF1HTfeeCNe/epXz3quYRi44IILcMEFF+Caa67BlVdeiSNHjtQev/baa7Ft27ZFX/+hQ4dw5ZVXYmJiorbtPe95Dz73uc/NGqPT3t6ON7zhDXj5y1+O1772tfjOd74DADhy5Ahe9KIX4c4770RbW9uizg0A3/rWt3DttdciiG+kSCnxT//0T3jzm98863k7d+7EBz7wAbzmNa/B85//fDz55JMAgNtuuw1vectb8C//8i+LPjcRrQ+qIqGmjJY67Sy3dMqA41RgGjoqVXfRq+UURULXFTiOj0rFRj6bwlSxikx6ZV9LsTx79JlpLDz6LJEytVpb7nLFQTZtYKpYRSru3EREtJUYcZEoo0pUPAk3DDBSddFlajCVpa9BSToTFVwPSd1JEQJZVYEXBrD8EH4YwvIDWH70OVgVAu26ipTKtS9rKQxD2EGIqheg4vuoeAEUIdBjashpnI1HREREtFgpUwNE1M0nqUfYjrvmdQjP8zExVYHtekindHS2ZZZ8LE1LFp15AADL2XzdmIiIthI17iytxF9XqrM1rS9Vz8d9ExUMV12MWNECwcS+nIkM60JbFqu1m8QXv/hF3HTTTbXvTdPErbfeOitQNNc111yDP/iDP6h9HwQBrrnmGgwODq7otc503XXXYXJycta2T37yk3WBornOOuss3HTTTVDV6VxcGIb47d/+bVSr1ZbPH4Yh3vjGN2JsbKy27YorrqgLFM2USqXwz//8zzj99NNr2x5//HFce+21LZ83MTg4iLe97W21QBEAfPSjH60LFM20Z88efOtb34JhTN+wv+mmm9as0xQRbWzJuLWUOXO13OK6ReTSUSelYtmOv1rw/JXrOOH7AUqVePRZJeowlEm1XogTQiAbh55KlSicVCxZm26VIBFRKzKaAl1KKFKgN6XCkBIBgFHLRdlrfXVxGIZwgwBVL0DB8XCi6mDSiQJFqhDo1FUMpDTkdQWdhoZtaR39KR3tugpTkRCIClRjtlsLGdHasPwQn3/4BK5/bAjfeHIUtx4exy2HxvClRwfxzadG8VihioDFRCIiIqKWKYqsdSlKvq71CDTX9TE4WkC56sDzAhSKFoqlpXfDTzoV2XGoyPcCeIv4fYKIiNYXLb5HmoSLfJYBtoQAwH8NFfBYoTorUAQAwxYDw1sZQ0WbQKlUwh/+4R/O2vaBD3wAZ5xxxoL7/uEf/iF27dpV+35iYgJ/9md/tuzX2MgTTzyBW265Zda2nTt34n3ve19L+1988cV4zWteM2vb4cOH8Y1vfKPla7jppptmjV1TFAWf//znF+x2kU6n8ZnPfGbWtq997Wv49a9/3fK5AeBjH/vYrA5Je/fuxe///u8vuN/ZZ5+N//W//tesbR/96EdRLrP9HBEtTjqlz1otF4aANefD4kIyaR1SRAUp2/WAECiWWg94Ltbs0Wd+NPpsEaEiALVQkWV5cD0fQRCiVOYoSSLaehQhsD2tw1QkpBDoMVWkVYkQwLjtoTDnPSEMQzhBgLLrY8L2MGK5OFFxcLTiYLDqYtR2MeX6s8JE/SkNGU2BEAIpRdY6IGlSIKcp6DE1bE/rtfOOWS7cBQKuQRii7PqYdDyM2y7GbBejlovhqouhqoPBqoOhajRebXTOyqrkdVBjhiIQ1wzrHCrZuPXwOL58YAi/GC6g6C7/jSIvCHH3WAk/Gyrg+8cm8K+HxnDjEyO4+dAYfjFcwKGSDYfBMyIiItpgksVQ06GixdVelpPteBgcmYLnBdA0ifZ8tFhsslhZ8oIrTVUgBRAGUb0GWNvXSEREJ0eNO1sr8VeOP9saMqqCbJNuRCMMFW1pDBVtAn/913+N4eHh2veGYeB3fud3WtpX1/W6cMoXv/hFHDx4cDkvsaFvfetbddte/epXz+o+tJDXv/71LR23Ed/368JYL3/5y7F///6W9r/yyitx7rnnztrWSiAo8eSTT+JLX/rSrG3vf//7oWlaS/vPfe7Q0BD++q//uuXzExEBgJSi1m47+Wot8sOhlBKpuDhWirsVTRZWLlR0MqPPEqqq1LozleNuRZPFlbtmIqL1TJEC29I6smoU/OkypsdcTbk+xu0olDNcdXCs4mCo6mLc8VDyfFh+UGuBLQDoUiKtyLowUVqV2JExsD1jYEfGwN6cif6UjrymQJUCIg4gTXdK8uA3Cf64QYDh+BqKro+yF6DiBaj6AewggBOEcIMQThCNV6v6ASxvdghl0uGq6WakEEgr87ezLrk+fjFcxJcPDOLfDo3h16NFHCpZqC7DanQB4EcnpvCrkSIenKjgqaKFwaqDg0ULvxgu4ptPjeJzj5zAVx8fxg+PT+LhycqKhJuIiIiIllO6FiqKat9L6RS9HCzbxdBoAX4QQtcV9PW0oS2fhqpK+H5Yq7k04nk+hkYLePDAMfz4jkfxL9+9E393w4/x+KFhCCGg69Frc+JAv2U7q/KaiIho+SUdilSOP9tyeszG96kZKtraWk9v0Lrkui7+6q/+ata2q666Ct3d3S0f401vehM+8IEP1H6JSY75N3/zN8t6rXPdf//9ddsuvPDCRR3joosuqtt23333tbTvzTffjEcffXTWtre+9a2LOv9b3/pWfOADH6h9f9ttt+Guu+5q6XX81V/9FVx3+h9gRVHwxje+seVz9/X14cUvfjG+/e1v17b9n//zf/DBD36w5WASEREQrZarVh2YpopiCajaDoDMoo6Ry5goVxyUKxY62tJwHA+W7dZW4C2XIAhRrkZFqaWMPpspmzZQtVyUKg7a8xlUqw5c14fGucBEtAVJIdCf1jFiuZhyPLTrKhQhMOl4KM8J5CThIV0R0ISAIqe/zpVWFXQaaq07UUIRAlltevXTYMVByfPRbagYstxoFJrlotvUIGe0zal6AcZtF0F8jIwajU4DBBQRjbiMXg+QLKILwhC2q2BmT0/bD+CHIZRmLXm2uJQqWxp/F4bAE0ULTxSnbz5l4+5TvWYUTkurEmlVQUaVSKuy1kK9GUUKpFSJqtf8JlsYRsWsEcvFvePR32yXoWJX1sTurIEdaR26wjVUREREtH6YRtQpWlUUaJqE6wawbG/RnZdPRqXqYGS8iDAETFNFT2cOSvzZrC2XwthEGYViFbmMCdngs/0XbvhPTBQqdduHR4s4dXcvdF2FZXuwHQ/ZtMFORUREG5iWdCoSUR0oRNStqFHthzaXXlPDwWJ9yJjjz7Y2Vtk2uB/+8IezxmcBwEtf+tJFHaO3txcXX3zxrG3/+q//uuIjAYaGhuq29fT0LOoYjcJTg4ODLe170003zfpe13W88IUvXNT5f+M3fmPB4zYShiH+9V//dda2Sy+9FF1dXSd1/omJCdx+++2LOgYRUW21nK4BAnDdxc+9Nw0NqioRBFGRCgCmVqBbUbliIwxCuDNGnyUdlhYrZeqQUsDzAlSt+JpXcGwbEdFG0GNq6IoDoTlNQZehwZASWU2Juw/p2JEx0JvS0K6ryGgKTEXWikqKEDAViTZdxY6MgW3xaLWF9Ka0aASbFLUgkR2EmJgxtqzgeBiNA0WGlOgzNbTpKvK6iryuIFMLsEQj1pI/Z7Xosbk8tu5uKq0uvVRQcn0cLFr45UgRtx2fxLcOj+PrT47g/x4Ywg1PjLR0jIy6+IDvmO3h7rESbjk0hs8/MohvHBzBz4YKeGCijKNlG0XXR8CVlURERLRGZnaKNo24U7S9ejfnSmWrFihKpzT0deehSAkj7pyUzRhRt6Kgebeizo7GC9BGxosAAF2LOxW5Saci3nwkItqoVCkgEC3ekuxWtKX0phovFJ90PI6j38IYKtrgbr755rptz372sxd9nLn7HD9+HL/85S+XfF2taBRaWmyQaanHsG0b3/3ud2dtu+CCC5BOpxd1/v3799cFoRr9ncx1xx134MSJE7O2LcffW6vnJyKayTQ0KIqMiklxq+rqEgo/2bQBACjF48QKpSqCZb5hmxS2yjNGnylL7EQgpUAmHY9tq0THW4kgFBHRRtNhROEhgShc0pvS0BEHiJKVaqoUyMZdiPpSOnbGI8325kzsyBjoMbWWwkQJKQT6UzpUKaBJgS5DhQBQ8QJMOh7GLBdT8YirqBOOCkUK6DIKMHXoKjoNFd1xh5y+lI7eOCDVaajIM1S0KOkZf3fqMq5CTLcYFjqZUBMQdac6Vnbwq5EifnBsEv98cBRfenQQ//exIYzx5hYRERGtkcycEWirFbopliyMTpQRhlF4qKcrBwGBbMbArm1dyKQNCAi056PaeKFYbTiaracz1/D4o3GoKKkpOa6HECF8P4DLMbVERBtWUg/gCLStpdn4szAERtmFcMtiqGiDu/XWW2d9n8lkcPrppy/6OE9/+tMXPPZyGxgYqNs2PDy8qGOMjNSvdO3v719wv//8z/9EsVicta3Rz6AVc/d79NFH8dhjj827T6Of7VLOf8YZZyCVSi14bCKi+Qghat2KUvMUtoIgwOh4CQ8/fgLHBifqHk9CRZblwfV8BEGIUpPVbUsRBGEt/JN0QzrZNuHJNVcsB34QdWiqxIElIqKtLKsp2JY2YCgSqhRIqwo6DBUDaR17sib2ZE30p3V0GtGIK0ORJz1KTJUCA3GYyVQkOuKbEkXXR8UPIAB0xgEiIaJQ046MHnVXMjV0GlH3pLyuIqcpyOsqOgy1tn0uFsOau7wvj3ec3offPmsA7ztzAG88pQdnd6RPOmDUalhoKZ2KWuEHQFrhmFMiIiJaG0kNw4g7gzquD2+FV/x7no/xqWhcbD5norsjCwGBtnwK2/rao06hnVkAQCatQ9OibkWFUn09pyd+3lyjEyUEQQhNVSClQBhErw0ALIeBbiKijWp6YVn0vc/FWVtCXlOgK43rPxyBtnUxVLSBDQ8P13W72b9/P8QSivlnnXVW3bZ77rlnqZfWkmc961l12+68885FHaPR8y+77LIF92v02s4888xFnTuxlJ/dcp1fSon9+/fP2nb8+PGGYSsiovnURqAlLbjjD4cPHjiGW35wN774tZ/gL77wPfzdDT/GTf9+F+5+6EjdMVRVQSpOsZfjbkWTxeXr/DNz9JnjRKPP0kscfZYwdA26piCcObatwbxgIqKtKKVK7MwY2JM1sS2to8vQkFGVZe1cM5ehSPTHXeQymoJc3GFIEQI9pl4bY9ZlaOhP67UW3EvBUFFzeV1Fm65ClxJCCPSndbx4ewfevb8fz+lvQ4dRH9JqRathoR5Tw46MgdPbUnh6VxaX9uawL2cidZIdjFQpYDYpjBERERGtNNPQIKWAKiV0PfpctNLdikoVG2EYdUfqbIvGl3W0p9Hf01a7j2AaGrKZqFtRWy7uVlSy6roVNetU5Lg+CvE4eT3+vO7EY4wt3nwkItqwkg5FySIyn2WULUEK0bRb0Qjf17cshoo2sIceeqhu2549e5Z0rEb7NTr+cnrlK1+JbHb26oZvfvObcN3W/0G68cYb67a96U1vWnC/tf7ZrfX5iYjmyqSijj2GoULKaNWB7Xg4fGIC9z96DEOjhVmr50bGig2PMz0CLQroVKsOHHd5WmIu5+izmTK1a7Zr5/E5G5iIaM1kVAXdcfGiXY9Gl/WZGgxFQAqBbWl9yaGWmTj+bPFMReLC7izedmovXre3G8/ozmJP1my5A1Grz7uoO4vX7e3Gb+zsxPMG2vDM3jxevrsL79nfj7ef1ocXb+/AuZ0ZdDUpcjWT15QlLcIhIiIiWg5CCKTMZFFX9DnGXuFQUTmudWQzJgAgn0uhtytf97zujqhOn00b0DQFQYNuRd1NQkUAMDJeAjA9As2Oa0HsVEREtHFx/NnW1TRUVOX7+lbFUNEG9uCDD9ZtazRSrBVtbW11Y7QOHTqEcrm8pOO1oqOjAx/5yEdmbTt+/Dg+/elPt7T/L37xC9xyyy2ztj33uc/FVVddteC+y/mzazRurdHxE6VSCYcPH561LZPJIJdr/kvZcp6fiKgRTVOg6woERK2wZdkuersa/9s0PFZA2OCXiJSpQ0oBzwtQtaJgUeEkuxUFQYgTw1MoxsWs2uizk+xSlMikDQgB2LYHx/MRBmEtwERERGujPe6UAwBtugpFCuhSYmdGR3qZRmOxGLZ0QgjsyBh4dn8bXrWnC+85YwDv3t+PV+7uwuV9eZzelkJ/Skden93ZKnOSnYaEEOgwVJzdkcYLtrXjmlN78e79/bhyRwfObE8vGFrKaxx9RkRERGtrulP0dO1lpVi2C9cLIOX0edvzqYbPNQwNuWwUPGpvi55TKM1edGXoKvLxc+ZKFp/pWvQZnp2KiIg2Pk1Gv2Mr8e/1HH+2dfQ2CRWN2i4C1tO2pJNf3klr5sknn6zb1t3dveTj9fT0zAq7hGGIgwcP4pxzzlnyMRfywQ9+EPfccw++/vWv17Z95CMfwfbt2+ftOHTvvffi1a9+NXzfr23bu3cvbrjhhpbOu5w/u97e3paOnzh48GDdjfiT+Xtb7PmJiJpJmwYcpwLT0FGpuqhaTtNQkeP6mCpW0Z5Pz9oupUA2baBQslCq2EiZOqaKFro6skvqDGA7Ho4PTcBxfIQIMVWoTI8+Sy1PqEhVJFKmhkrVRblsQW/LoNDgtRER0erqNlQoAih7AVKKRIeh1lpuL4cgBIIwPKkRajQtqynIagr25mbfaArDEE4QouIFJz2+rNl5z2pP46z2NMIwxKjt4XDJxmDVwZTjY9LxYMU3w/I6Q0VERES0tjIpHSOIQ0UCcL0AnudDXabg/EzJgqlM2oQUArqu1DolNdLVnkGxZCFjGpjSq3CcaKxZRzw2DYhGoM3tYAQAoxNRqCjpVOS4UR0nCADH9WphIyIi2jiSDkXsVLT1NAsVeUGIcdurdRinrYOf5DawQqFQty2fr29d2qpGnXIanWM5CSFwww034IwzzsAnPvEJ2LYN13Xx5je/Gd/4xjfwzne+E5dccgk6OztRKpXw4IMP4hvf+Aa+9KUvwbbt2nFe8IIX4Ktf/WrDrj2NFIv1Y3uW+rNb7M9tM/y9EdHmlE7rmCxUYMYfCG3HaxoqAoATw1MNgzeZOFRUrjroCALAi7oLJWPGWjVVrGJotIAwCOH5PkYnSrCsaKVbR1t6WUafJbJpE5Wqi1LVRntbGlXLhe14tWIYERGtPiEEOg0NnYt7+2hJEk7yghC6wlDRShJCwFAEjGV8357vXD2mVtem2/IDTDkevCAEB5wSERHRWjKSUe5+AENXYdseLNtFdplDRUEQoBp3es6koyBRW27+xVNJt6JiyUJ7Po3h0SKKJQv5bKpWg+nuzOKJwyN1+ybjz1RVgSIF/CCE7XgwdQ2W5TJURES0AWlxhyJFAAJAiKhbkSJZR9nsugwNUoiGXYmGLZehoi2In+Q2sFKpVLctk8k0eGZrGu3b6BzLTUqJP/qjP8I111yDz3/+8/jGN76Bw4cP49Zbb8Wtt97adD9VVXHFFVfgve99L66++uqWz5cEl+Za6s9usT+3jfb3Njw8jJGR+l8U5/P444/XXc9qBp3mju1byTF+RJtJEISw7QoQAgIOfC+E62hoz5mYmKrUPf+pI0M4ZUdbw2Mp0oPr+pianEI2bWBwyEfPPAGlmcIwxOhEGeV4RZ1lu5gsVOAHIaQA2vJpGGoI26q/pqWSCCECB64dYmqqgJShYXBwBB3tS//3eT3gv4dERJG6f//sKvwwRMVTEKzAqnBaf9oAVFwfBdeHq0hYvopQChR8e8F9V8Nq/O5NRERE60M6paNYspAyolBR1XaRzTQeK7ZU5YqDIIzG3Zt61BUpn2s8+mymro4siiULaVOHEV/fVKmKzrhbUU9n49rO6HgRYRhCCAFdV1G1XDiOH4WKHBd5LHxuIiJaX1QpIABACEgh4IchvDCEAoaKNjtFCnQZKkYajDFttC1xouKg6Po4vW313vcnbA9PFi1IAezOGug0GHhaCQwVbWCNuu2o6tL/Shvt2+gcK2XPnj147Wtfi1wuh5tuugn33Xdf0+d2d3fjXe96F6688ko8+9nPXtR5mr2mpf7sFvtz22h/b5///OfxJ3/yJyd1jF/96lcYHBxcpita2vmJaGmOF4GUZmHQrtY9duDxp7C3Z/4bcSNFYHGxxIVZK5xRHFy9t75Vx38PiYgihQMPAAAYtdyaygDG4j8/tJYXMsPMUeRERES0uaXMKFRkGhoAC5bd/ObcUpUqUb0mm4nafmbTBtQWOkcauop8LhWNhc+lMGQXUSrb6Mino66QTRaMOa6PQslCWy4FIwkVuVG3aWuem49rLQhCjE2U4Ho+2vNppFPNx8MREW1FqhRwgxBqHCry53SuCeJtqhAQHC+/qfSY2qJCRbYf4LtHJzDleHhaKY3nDLRBlyvbtfrRqQq+f2wSXjD9v8u9ORMXdGWxK6Pzf5PLiKGiDaxarb/BqyhLX2XbKJzS6Bwr4dZbb8X//t//Gw8++GBtm2EYuPzyy3Heeeehq6sL5XIZhw4dwg9/+EMMDg7iE5/4BD7xiU9g9+7d+MAHPoBrr722pdff7DUt9We32J/bZvp7I6KtobvdwMFj9bddx6YcBEEIyXanREREREREREQtSYIrhqFBSMD3Q7iuD01bng6ajuvBdjxAoDaKfqHRZzN1dWRQKFZhmhqkjII3juvD0FV0d2Sb7jcyXkRbLgU9HiVvO3GoyPFqXYzWm+NDkyjHAaxSxcbu7V0wdN42IyJKqELARQhVAnaAWnjDC0IUXR9lz0cIQAogoypIq3LFgyS0OnpTGh6arN8+XHUbvq/ffmIKU/F7//0TFRytOHjJjg70rVBg94lCFd89OoG5E9oOFi0cLFroMTVc2J3F/nyKI/uWAT8dbWCpVH3rMN/3l3y8Rvs2Osdycl0X73rXu/BP//RPs7a/733vw0c/+lF0d3fX7RMEAW688UZ84AMfwNDQEA4dOoT3ve99+Md//Ed861vfwrZt2+Y9Z7PX5Pv+ksI9i/25bYa/NyLaWnrajYbb/SDERNFBV1vjx4mIiIiIiIiIaDZDV6GqCjwvCupYlgfLdpctVFQqRyGZTEqHKiUUVSKTbv2Gnq6p0DQFruvD0DVULRe248LQVZiGhlzWRLFk1e03Ml7Eqbt7oWvRbSfX9RGEIWSAWihpPSkUqyhXbIQIYTseTF3D2EQJ2/ra1/rSiIjWDU0KVH1AiQMklh/CtlxYflB7jgAQhEDR9VF0fehS1AJGch0GSqk1vWbjMWKWH6Do+sjPeF9/aLKChycrs543YXv42pOjuLwvjwu6Msv6v4WjZRvfaRAommnEcvG9oxP4qVbA0zszOL8rw8DbSVhfn+JoUXK5+lajnuct+XiN9m10juUShiFe//rX4+abb561/frrr8fb3va2pvtJKfHmN78Zl19+OZ73vOfhqaeeAgDcdddduOyyy/DLX/4S/f39Tfdv9po8z1tSqGixP7eN9vf2W7/1W3jta1+7qH0ef/xxvOIVr6h9f/HFF+PMM89ctmtaSLlcnjXi5+KLL0Ymk1m18xNtZOWyjZHxImzHw+hECYoi8fS9Wfz47h/Dn/GLQkKafdh76s6Gx3I9H6PjRQTzfLCbj6GraM+loaqr90GvVLExVaxC0xT0duYgpMCubZ3rcjVdK/jvIRFRZO6/h/1nnYei1KAIgZ4UZ61vFRXXR8H1YSoS7YYKQ4oVWzG3WA8//PBaXwIRERGtonRKj7oBGVotVJTLmid93DAMa513al2KsqlF1zXSKR1TbhWGEY0ys2wX+Wy0kLWnM9swVDQ6VgIAqIqEqkp4XgDH8aLXaLvrKlTk+QGGx4oAgKlCBeWqi+197SiWLFjtbjyajoiI1LjDixq/j9jB9D0CU5HIaQoMKWAFIcquD8sP4AQhHMfDpAOkVImsqsBoYQQnrS89TUJFQBTYSUJFk46H209MNnxeEIb4yeAUDpUsXLm9A5llCFAPVR3ccnhs1siz+ZRdHz8bKuCBiQqu3NGBbYsIWtO09fMpjhYtm61vNVou14+naVWpVGrpHMvlM5/5TF2g6Ld+67fmDRTNtGfPHtx44424/PLLEcRvYocPH8b//J//E7fffnvTX5QMw4CmaXDd2TMfy+UyDGPx3TYW+3PbaH9vvb296O3tPaljZLNZ5PP5ZbqixctkMmt6fqKNRDdcFCo+FD2ALLgIAZipDLb3d+DIiYm65w+NVWCYjVtoGwB0I4VK1UEQhgjDEAijD5LJVyEARZFQFQWKFFAUGX8vIdcgNa5qJoqVAH4AqLoJRUqYZhrGJikm8d9DIqJIKp1GRcZjJ9Kcsb5VeK4PxfGgKRKmqcFUJPKZ9dFxcSV/9yYiIqL1Z2aoCKiiajnLMgKtUnXgByEUVSAV3wxsyy++q33K1DFVmL4+255e2NrTmcOTh0fr9hkZL9b+rGsKPC+A7U6Hitpy66e7/shYEb4fwHE9TBYtIIwWmmXTBkbGi9g50LnWl0hEtC5ocY0+GR8lAGQ0BVlVgTZjpFRKEUgpEkEQouQFqHgB3DD6WvECGDIKIKWaLCAOwhCWH8D2QygCyGoKuxytMUORaNPV2kizmYYtF/vizxfHyg7cBQI+h0o2vvrEMK7c3oE9uaWHqKtegJsPjcHxF7+SfdLx8I2DI7i4O4dLe3IcibZIjAVuYI1uChaLxQbPbE2jfVeqU1GpVMLHPvaxWdsMw8Af//EfL+o4l112Ga6++upZ23784x/j1ltvnXe/Rq9rqT+7xf7cNvLfGxFtfkmLalVKKEr0ocr1/Katn48NTS54vPZ8Gp1tGXS1Z9HVkUVPZw49XTn0defR25VHV3sWbbkUshkTKVOHrqlrEigC4oBT/IuN60Yflq0GH5qJiGhjk1IgKR148/VKJiIiIiJaAWkzDrjrKkxDRRACw2OFhl2iF6MUdynKpgwIRMGipNaz1OsTEvCDEE5cJ+nuaByGHhkvRgvKABh6FGhy4pqKZbsN91kLlaqNQrEKABibLAPxrwOThQpChKhUHFSq9hpeIRHR+pF0KFKFQJumYltaR4euQpMCUgi06yq2pw106CoUISClQF5X0J/W0JfSkVElBKIOR6O2i8Gqg7LrIwxDOH6AguNhuOrgeMXBmO2h5PmYcn0MVl3YJ/meSCevWbeiEWv6ff3sjjRet7cbeX3+YHTFC/Cvh8bwk8Ep+Escb5FSJS7uWfo98DAEfjlSxNcOjmDMWj+fTTYChoo2sL1799ZtGx2tXyHQqrn7CiEanmM53HTTTRgfH5+17aqrrkJPT8+ij9Wos9HnPve5efdZzp/dyMhI3bZTTjml6fP37NmzbOdeyvmJiOYjpaititPU6Kvr+djR39Hw+eOTZVQtZ9WubzXo8et3XB8AYDv8cElEtBmpIvp12GONioiIiIhWmaYpyOdSEBDo7spBVSVcL8DIeBHBEm+0eZ6PanyDLJuJugC05Rt3l27l+lRVgYCojS1LgkE9XY1v5jmuj2I5GouW1FbsOFRkO14tcLSWgiDE4EgBAFAoRR2YpBRQFAHPC2pj3UbG66cDEBFtRYYioAgBNQ4LSSGgSYFuU8OerIFuU0NKleiKvx9I68iqCgQAXQp0Ghr6UzpycechNwgx7ng4VnEwZLmYcn3YQYgQgCYEspoCVQj4YYhhy8XkOnn/2Kp641CRIsSs0NCINXsh9va0gTfv68X+toW7Et45WsI3Do5icomLuS/oyuLKHR2Y28hqf1sK+3Jm3fZGhqsubnhyBHeNlqLJGrQghoo2sLPPPrtu2/Hjx5d0rKmpKVQqlVnbdu3atWIt2G+//fa6bc961rOWdKxnPvOZddt++tOfwrabryZYzp/diRMnWjp+IpfLYdeuXbO2lUqlJXcrWuz5iYgWkqxg0+Kvnudje5NQEbBwt6KNJnn9SajIstmpiIhoM0o6brNTERERERGthb7uPAxDhSolervzkDKqQYxNLi3QknQpMk0VmqpASoFcZukjRtKpqFuRGY+ET0ag9XQ27xAwMhbVuPVaTSmAHwQIg7AWMFpLYxMluK4Pz/MxWYjuh3Tk02jPReGrqWI1GsFjubWAFBHRViaFwEBaR1pVkNUU9Kd07M6aaNfVuvFkQghkVAX9aR17sia6DK0WSGrXVQykNLRpUUejENEotZQi0aGrGEjp6I+7IPWlNGTiok3R9TFkuXCCxa0Ic4MAZc9H2fXhLnJfmtZjRu/nfhjivI4MrjmtF8/pb0O7rsCZ00nKVCResqMDL97eMWs0XiODVQf/74lhPDxZWVJo7Kz2NK7e2Qk1Ps/pbSlctaMDL9/dhbed2ofzOjO1x5rxghD/OTiFf3lqFMcrm2vh/EpgqGgDO+uss+q2PfXUU0s6VqP9Gh1/uRw4cKBuW6MOPq3o6elBOj17xUW1WsXhw4eb7rPWP7u1Pj8R0Xx0PelUFH1McFwP+ayJbMZo+Pxjg5OrdWmrIil81VbTraMW3UREtHyS4oK3xJXgREREREQnQ0qBHf0dUFUFuqqgtysHCKBccWqBl8UoJ6PP0lGQKJc1IRe4oTafWqgoHmWWdCoyDa1pWGl0IgpEKYqs1ZVq4+XXuL5i2S7Gp8oAorFnQQCYhopcXPPSVAnfD1EoRaPRRmeMcyMi2spMRWJbWkd/SkdWm3/EVUKRAh2Gij1ZA72mBj0el5bXFQykotFo29M6uk0t6k4Uj6lPxqp1GlotlOQGIYarLoquDy8I4QUh3CCo/ef4UYBo0vEwXHVxrGxjsOpi3PYw7ngYrLoYqjoouT58/ru+KL3xZwEA+OlQAQ9MVPD0rgxevacbulIfMxFC4OyONN68rxd9qcaj0xKOH+Lfj07gHx8fxn8NFTBUdRb1vrsvn8Ird3fhtLYUrtzeUQu5dRgqnr+tHe86vR9ndyzcsfFY2cHXnxzB0TJHn86HoaINrK+vD/39/bO2Pfroo0v6oPvQQw/VbTv//POXemkLmjv6DADa29uXfLxG+843UqzRa3v44YeXdO6l/OyW6/xBEODRRx+dtW1gYAC9vb2LPhYRUaLWqWjG+DMhBLb3tTd8/rHBidW6tFWRtOh2PR8hQgRBCDfuWkRERJuHGhcb2KmIiIiIiNaKqirY0d8OKQVMQ0d3RzQ5YLJQRWkRnXJKZQuuF0DK6TBQW27hESTzScUjTwxDhZCAP6M+0t3ZeMLB8Nh0N/6kvmQ5ax8qCsMQgyNTQAiUqzaqlgshgK726HUIIWqj4grFKrwggOP4KJTYrYiI6GQIIZDXVezKmhhI6zAVCSEEdCkg4lFqbbqKgbSOvTkTuzIGugwNAkBaleg3NZiKRAhg0vFwourgRNXBYNWt/TdkRQGiouvDDgIEiLogGVJG5wPgBCEmHA8nKg5GLRdVz0cQhgyPLiCrSvTPCBbdNVrCD45NLhjO6jBUvGFvDy7sXngi0oTt4ZcjRdzwxAi+dGAItxwaw3C1tc8MOzMGXjajY9FMKVXixds78LJdnUip80dielMatqX1eZ+z1TFUtMG97GUvm/V9uVzGY489tujj3H333QseeznN7SwEAJa19A/ojfZNpZr/0vTc5z4XudzsNq333HPPks49d7/TTz8dp59++rz7NPrZLuX8jzzyCKrV6oLHJiJaDEOfHSryvABhGDYdgXZ8aBLBJuryoMYtwhHOGIHmsFsREdFmkxQc/E30HkZEREREG49haNjW1w4IIJs20JaLugCNTZYXDOJ4no/hsQJGJ6IOPOm0ASkEdF1Byjy5m2O6pkJVFQiIWq0ouZ6eJqGi0fHp0W16vI9bGy+/drWViakKbNuDHwS1bkVtuRS0GR03smkDuq4gCKJgERC9Ht5wJiJaHhlVwY6MgZ0ZA30pHbuzBnZnTfSYGjKqAimioFGHoWJHxoAuJaQU6DE1dOrR2DQ54z9lxn+GlMhqCjp1tdYFqTelocfUMJDS0aap0GUUTqr6AUZtD8cqDo5WHBwt2zhWtjHSYpBlKxFC4Nn9eejKdGjnkakqnioufE9fkQLP6W/DK3d3Ib1AqCdRcn0crzjoNNQlX/Ncp+VTeOu+XuzNNR8J+8zefN04P5qNoaIN7pWvfGXdtp/+9KeLPs7PfvazWd8PDAzg0ksvXfJ1LaSnp6du29jY2JKO5Xkepqam6rbP163HMAxcddVVs7bddddddQGdhRw4cADDw8OztjX6O5nr0ksvresytRx/b62en4hoPslKsihcA4RhFCza0SRUVLVdTMQFmc2i1q0oLnwlo9CIiGjzYKciIiIiIlovMmkDvV15AEBHWwaZtI4wBIZGCxgaLaBUthAEQe35YRhiqljF8aFJVKouIIC2nInOtgwAoD2/8LiPViTdikxj9gi07s5cw+cno8OA6UVrtfHyjrcmi9I8z6+NZZsoVOB7ITRNNuzk1BH/3IplC57vw/P8JY2iIyKi5gxFIqcp0GTzmIKhSOzM6GiP30symoJt6SgslPy3bcZ/vSkNHbqKjKbUuiDVQkcyGrvWF49dy2kKlBkBkhBAANR13+EitMjOjIFrTu3DM/vyuKw3h7fs68G+fOvdEPfmTLx5Xy92ZY2Wnm/5Ae6fWN77TRlNwSt2deIF29qhzelqNJDWsbfFa9vKGCra4J7//OfXjf767ne/u6hjjIyM4Je//OWsba961asgVjCRd8YZZ9Rt++///u8lHevee++F788eS5PP59HX1zfvfq95zWtmfe84Dm677bZFnfvb3/72gsdtREqJV73qVbO23XHHHQ3Hwi3m/B0dHXj+85+/qGMQEc2lKBJKnBzX4oCR6/no72lDs7eGY0OTq3R1qyMJVtUKX2u4mo6IiFaGIqJ22CFYKCIiIiKitdfRlkZHexRq6erIwjRVhCFQtVyMTpRx5MQEhkYLKJSqODE8hYmpCoIQMA0V23rb0NGWgRQC2YyxbKGiZJSaqcehoriTc393Hjv6O/D0s3biRc8+C298+SX4nbc/H///t03XppMFW54XwAsCIATsNegEXSxbCIMQluOiVLIBRGPPGt3/SJk6TENFGACTcbeisYkyfD+oey4REa0sIQS6zWgslanMH2lQhEBaVdBhRKPU9mRN7M2Z2JM1MJDWkVUVCAC6FGjX1emAUkrHQEpHf0pHVxykTTj8t78mpym4tCeHy3rzdT+nVmQ1Ba/a3YXL+/JN7zHN9KuREtxgeX/+Qgic25nBm/f1YmdmOkT0zN78imYiNguGijY4Xdfx/ve/f9a273znO4vq+nPjjTfOWuWgaRquu+66lve/44478Bd/8Rf4h3/4B5RKpYV3AHDllVfWbfv+97/f8jkX2u8FL3gBFEVp8Oxpr3zlK3HaaafN2vaVr3xlUef+6le/Ouv7K664AhdddFFL+1533XXQtOl/eD3Pw9e+9rWWzz0yMlL32t///vfPOiYR0VIloZpkBJrjejB0tbZqbq6jgxOrdm2rodapyJteTUdERJuHAGqr1gB2KyIiIiKi9aGnM4dsJhph1t/dhm397Whvi8Z0JQGj8ckKHNeHogh0dWTQ39MGXVOhKBL9vW3Y3t+xbDfHkhFqhqFCSMD3Q7iuj+39HXj7a5+F33j+ebjk/FOwb1cP8tnUrPNKKaFp0S0oJ66rWPbq11fKFQcAUKlGXzNpvdZ5qZEkkFUq23A8H74fYILdioiI1kw6Hpu2L2filJyJfTP+OzWfwqn5FPbmTGxL6+gyolFqych7IQQyqoL+tI49ORO9plYLKEkhIKWAKgW0+L+ZfLBWtJykELi4J4f/eUoPdmWNecNFZc/HveMr897bYah4zZ4uvHpPN57elcVudilqCUNFm8B11103a5yYbdv47Gc/29K+ruvi05/+9Kxt73znO3HKKae0tP/HPvYxXHbZZfjwhz+Md7zjHTj//PMxNDS04H5XXHFF3XiyRx99FN/73vdaOm/Ctm184QtfqNv+hje8YcF9VVXFxz72sVnbbr75Zjz22GMtnfsHP/gB7rnnnlnbPv7xj7e0LwDs27cP73jHO2Zt+/SnPw3Pa+0Xq09/+tNwHKf2fU9PT13AjIhoqYzaCLToo4LrRR3htve3N3z+scHJ1bisVZN0aEqKXq7rc1UaEdEmkow+S4pMLjsVEREREdE6IITAQG97FGwRgK4qaM+lsb2vfVbAKJsxMNDXjlzGBAC05VPYs7O74Uivk2HoKhRVQkDUxplZi+g2ZMQLYB03CRWtbqeiIAhRsaIaejX+mo6DUs2YhoZ0SgNCYCoOExWK1Xn3ISKilSeEgBTRaLPkv8VQhEBeV7EjY2BvzsTurIldGQM7Mwa2pw2k5nRD8lkqWhH9KR2v2dONd+/vxwu2tWN3NgpTz/Xr0eKKdYsSQmB31sDzBtpW5PibEUNFm0A2m8Wf/MmfzNr2l3/5lzhw4MCC+/75n/85Dh06VPu+vb0df/iHf9jSeR9++OG68z7xxBP44Ac/uOC+qVQKf/zHf1y3/dprr8Xk5GRL5weAD33oQzhy5Misbc94xjNaGkEGAK973etw6aWX1r73fR+/9Vu/hXCBlcrVahW/8zu/M2vb61//elxyySUtXnnkox/96KzxdU888QQ++clPLrjfww8/XBcG+9M//VNks9lFnZ+IqBk9LhRptY49caior73h84dGp+C6fsPH1pLjepiYKteKV63SNQVCAEEAePFrZ7ciIqLNIwkTJV/ZqYiIiIiI1gspBfp68jh1dy8GetuQzRh1AaPujixUKaHrKnZu60R/TxvUBUbDLFUSwjGTUNEigkG6HtWVbHttQkWVqoMwCOF5Plw3gBBAqoWxLUk4q2I5CBF1Z2JdiIho81BE1JlIVyQMRSKlShhyTqiIa4xXVEZVcG5nBq/e0433nNGPK3d0YF/OREqVMBWJU3ImXNbr1g2GijaJa6+9Fq985Str31erVbzsZS+rC9zM9NWvfnVWpx4hBK6//noMDAy0dM4f/ehH8P36G8itjjF717veVTcG7amnnsIVV1yBY8eOzbuv7/v48Ic/jM985jOztre1teHLX/5yy+lUIQRuvPFGdHZ21rbddttt+O3f/u1ZI+FmsiwLr3/96/HII4/Utp1yyin4u7/7u5bOOdPAwACuv/76Wdf7R3/0R7jxxhub7nPo0CFcffXVsCyrtu2Vr3wl3vOe9yz6/EREzRhx0UdXo4KR5yahog4AgBBAf08eFz5tN65+wXm49k3PrXU1Wg+CIMRPf/0YPnP9D/G3X/kRPnv9D3HPQ4db3l8IURv9ZtdGoK1u4YuIiFZO0tJaQfTVZ6ciIiIiIlpnFEUin0the3/HrICRkAKKItHdmcWeHV1Ip+bvvHOykhFoyciwxYRrku5GTlxbcRwPwSp+9i5XbABAxXJr1yPlwvUrQ9egqhJhEI2cA4BS2VpgLyIi2sjUuePPGGhZNaYicVZ7Gi/f3YVrzxjAtWf048XbO5CJ79HQ2ls/d//opN1www244oorat8fOHAA559/Pj796U/XQjq+7+PXv/413vrWt+Ktb31rLTgjhMDnPvc5vOIVr2j5fM26+SzU5Sehqiq++c1v4jnPec6s7XfffTf279+P3/u938PPf/5zlMtlANGotieeeAJf+MIXcM455+Av/uIvZu3X1taGb3/72zj33HNbfg0AsHfvXnz3u99FW9t0i7PPf/7zePazn43vfve7qFSiFqdTU1P453/+Z1x44YW49dZba8/dsWMHfvCDH6Cjo2NR50284hWvwOc+97lasCgIArzpTW/CNddcg1//+te14NbRo0fx6U9/GhdccAEef/zx2v7Pe97zcMMNNyzp3EREzegzx58JIAijjj1dHVlc86rL8KH/31V41xv+B17y3KfhvDN3oqsju+h2oyulVLZww7/dgR/f8WhtBVzVdvHt2+/D8aHJlo+T/AySDkxckUZEtHnMHX/GTkVEREREtJ7NDBidvrcP+3b3rFotJgkt6boGCMDzglpH64VoqgohAN8L4cV17tXsVlSKQ0XJ6LPUAqPPZko6GiWhoiSgREREm9PcUFEQhi3f86bltV7uNdE0hoo2kVQqhX//93/He9/73tr/2cbHx3Hddddhx44dMAwDuq7j4osvxle/+tXaft3d3bjllltw7bXXLup8z3nOcxqm+l/0ohe1fIx0Oo0f/vCH+MQnPgHDMGrby+UyPvWpT+FZz3oWstls7dpPPfVUXHvttbO6BAHAC1/4Qtx///24/PLLF/UaEpdccgnuuOMOnHfeebVtP//5z/HSl74UmUwGpmmivb0dr3/96/HQQw/VnvPc5z4Xv/rVr7Bv374lnTdx7bXX4pZbbkFPT09t21e+8hVcfPHF0HUdhmFg586duO666zA+Pg4g+gf1ve99L773ve8hlVreWdlERKqqQEoBgemOPY7nQ0qBXdu7amPR1psjx8fxxa//BE8dHat7LAyBn//3Ey0fK2nR7Thr06KbiIhWTt34M3YqIiIiIqINZDVvthm6CkWRkELURqDZLdZIpJzRCToeTV+JAz4rzXY8eJ6PECGsuPt0K6PPEsnYtySQVLVceJyFQ0S0aalz3lpDAD7LRUQAGCradHRdx9/+7d/ijjvuwCte8Qro+nTy3nGcWSO9+vr68Pu///t45JFHcPXVVy/6XOeccw7+5E/+ZNYvMHv27MFf/uVfLuo4iqLgwx/+MA4fPoxPfvKTOOecc+p+KXKc+l80uru78ba3vQ2/+MUv8IMf/AA7d+5c9GuY6YwzzsCdd96Jv//7v8fZZ5896zHbnr0K4eKLL8bXv/513H777S2Pi1vI1VdfjUceeQS///u/j76+vtr2IAhmvX5d1/GKV7wCv/jFL/C3f/u3s/6OiYiWkx4XivQ4QJR07FnPTEOD4zS/zgNPDqJSba14lXQqcmZ0KuLKBCKizaEWKop/7QgQrUAjIiIiIqJ6SbeiZJyZZbfezTkZm7baHX+S81iWizCIunEntZ5WGLoGGXdmSgJR7FZERLR5yQaBXY5AI4q0/gmKNpSLL74YN998MwqFAn75y1/iwIEDmJychKqq6Ovrw7nnnovzzz+/pfnB8/nIRz6CK6+8Ej/96U+Rz+fxute9DrlcbknH6u3txYc+9CF86EMfQqFQwF133YUjR45gcnISxWIRhmGgvb0d3d3dOO+887B3796TuvZGVFXFu9/9brz73e/GE088UbuGarWKTCaDPXv24OKLL8b27duX/dwA0NnZiT//8z/Hxz72Mdxzzz247777MDQ0BM/z0N7ejtNPPx2XXHIJ8vn8ipyfiGgmQ1dhWS40NXqv8Fpsbb2WerpyeMnznoZ/+497Gj7uByEeOHAMF5+38HtIEqbyvAB+EECREo7jwTBaX9VGRETrkwJAICoYKULAD0N4QQhdYXtlIiIiIqK5UqaGYsmCaWiYKlq1zj+t7lsoWbWOP5blwvN8qOrKdsFORp9V4jBTapH1HCkFTFNDpeqiajkwNBWlsoW2HKcGEBFtFV4QwlifQxuIVhVDRZtcPp/HC1/4QrzwhS9csXNcdNFFuOiii5b1mPl8Hs973vOW9ZiLtW/fvpMea7ZUUkpccMEFuOCCC9bk/EREwHSnHi3+6m6AUBEAnHvGDjx1dBT3Pny04eP3PHQYzzh3z4KtwqWUUFUJzwvguh4UQ4fFUBER0aYghIAqBdwghJqEisIQ7AFKRERERFQvGQVmGBoQd+9pFAwKwxDlqgPbcdHVngUQdSqSUsD3ojFkpq6hVLHRnk+v2PX6fjBjbFn0NWUu/tN+2tRRqbqoVB2059IoVx0EQQgpuRiBiGgrYKcioghDRURERNRQLVQUF4icDTD+LHHVc57WNFQ0NFrEiZEpbOttX/A4uqbA8wI4rg/TAGzHBcAVaUREm4GWhIokYAfR6jMiIiIiIqpnGBoURQJ+AENXYdseLNuFXaziicMjGB0vYWS8iJHxIqqWi50DnXjba54JIAr0pwwN5aoDy3Jg6hrKKxwqqlgOEEYL5DwvgBDTY9gWIwoileE4PjzfhwoFVctBJm0s/0UTEdG64zFURAQAOLnZV0RERLRp6XoUJkpWnQVBCM8P1vKSWqZpCt748kuaPn7vQ0daOk4SrEoCVZbtnfzFERHRuqDFo6DVuHMdC0VERERERM2lzCiUY+pRrcR2PBw8Oorv/+RB3PXAIRw+Po5qPGpsdLyIcMbn61Qq6hJUrrrx16jjz0opx6PPki5FSbekxVIUCdOIXm8lvvZS2V6mqyQiovXO2xi3Q4hWHENFRERE1JCmKhBSQAoBVY0+MngbZAQaAOzb1YO9O7sbPvbAgeMtjXObDhVFYSLHYaiIiGizSMJEahwuYqGIiIiIiKi5dGrGCDQAlu2ipzPX8LlV20W56tS+T8X7uK4Pz/MRBiEqMx5fbqU4VFSZESpaqmRsWhJQSo5NRESbH8efEUUYKiIiIqKGhBDQtahLUTICzV1gBJplu/GIsPXh6WftbLjdsl08+sTggvtr2vTotxAhfD9Y8GdAREQbgyaTUBE7FRERERERLSQJ1xi6CgjA9QJ0tGWaPn9krFj786yOP3E3o/IKhXNs24XvBQjCsNZxOh1f+1Ik+1qOiyAM4Xk+LHv91L6IiGjl+GE4q/Me0VbFUBERERE1lXTqScJFM7v7BEGIodEC7nrgEG697V584YYf41Nf+j4eOHB8Ta61kf2n9DddjXbPwwuPQNNUJWqPHU4Hqmx2KyIi2hRqoaL4ez8MEbBQRERERETUkKGrkFJAkbJWJ5JS1DoYzTUyXpz1fRJKqlhRmKhYsVbkOpNOQpbtAiGgqbK2aGwpNE2BpkqEAVC12a2IiGirEABCAD5LRUQMFREREVFzuh7datXmjAEDgBv+7Q588Ws/wXd/dD/uefgIRsZLCEPgqaOja3KtjaiqgnP2b2/42MEjo5gsVBY8hj6jWxEQrUwjIqKNL+lQJONRnwDbWhMRERERNSOEmNGtKBln5jUdgTY4MjXr+1rHH9tDEIbwvWBFOv4kHZCqcUek1El0KUpMj0CLjlkqr0wgioiI1g/WioimMVRERERETRlxmEhVo48MMzsV9XXnG+5z4MmhFW8DfdcDhzAxVW7puc1GoAHAvS10K9LnBKocdioiItoUFCGgxAUiNf7qBWt5RURERERE65uuRwuvtBl1ov6exvWhuaGipOMPwpXr+OP7QS34U7Wic5hm4w7Wi5F0Y6pWo2PatgdvRo2MiIg2n6Rm5DFURMRQERERETWXdOnR1Oir74cIguiO677dvQ338fwADz9+YsWuaWS8iO/+6H787Vd+hC99/Sf42a8fw9hkqenz+3vamgag7nnoCIJg/l8KpjsVRWGilQ5MERHR6qmNQIu+sFBERERERDSPpENRUieKQkVtDZ87PFasC96kauGclen4U4lDP47rwfMCSAGY+smHipLRb74f1jpYcwQaEdHmpsQpCn+B+wdEWwFDRURERNSUrquAABQpocR3XJNuRXt3dCOXMRvud+/DR1fsmh55YrD258GRAn50x6P4/Fd/jC9+7SdNV4md36RbUaFkLTiuLRn95sbjz1zXh++zlQUR0WZQCxXJpFMRC0VERERERM3UFp/FXz0vaNqpKAyjYNFM6doYsZXp+JMEfarxgjDD0CDjz/onIxr9FoWTkm5FpTJDRUREmxkXoBFNY6iIiIiImhJC1HUrSsI1Ugqcs397w/2OnBjHeIvjyRarWRckXVOhxtc419P2b4eiNP7Yc89D849A0zUFQkRdmpJCl80RaEREm8J0pyK2tCYiIiIiWkgyIl5VFEgZBYfasulazWiuE8OzR6A16vhTXMZwTjkJFcWhpSTEtBxS8bEq8Xi1iuUs2P2aiIg2LomoVuTzn3oihoqIiIhofknBSFOjr86MFWTnnbGj6X73P7L83YrGJksYGi00fOzMU/ub7pcydZxxSuPHH31ysFZsakQIUSuO2V4UJrIdjkAjItoMNBn9SqzE4SK2tCYiIiIiak5RJBQ1+gydLOzyg6Dp2PkTI7NDRY06/pSXaYxY1XLg+wH8IIBlR/Ub0zz50WeJtKlBiGixnef5CIMQ5Sq7FRERbVYKu1oT1TBURERERPOaDhXN7lQEAD1dOQz0tjXc775Hji77iq1HHh9s+tgZ+wbm3bfZCDTPD/DAgePz7qvPGYHGTkVERJtDo05FIbsVERERERE1ldRIarUSz0d/k9rQ4JxQEbByHX+ScWSW7QIhoGmyaQelpZBSwtCj11yxljcQRURE648ikk5FrBMRMVRERERE89L1pFgUfWyYO+v+3CbdiiYLVRw5Mb6s1/LIE41Hn23va0dbLjXvvnt2dDd9zt0PHp53Xy0eAefEYSLLZqciIqLNQIsLRKoUEABCsK01EREREdF8jGQEWtyxyHV9DPQ07lQ0PFaE5weztq1Ex5+xiRLGJ8sAgGpcs0kt4+izxHQgKgoVlRgqIiLatKTAdK2I3Ypoi2OoiIiIiOaVrMJSayvQglkryM45fXutFehc9z28fCPQJgsVHB+uX+EGAGeeOn+XIgCQUuC8MxsHoKaKVZTKVtN9k5+BM6NTETtZEBFtfEocJgJmdysiIiIiIqLGdD1aeFVbgOV5GOhtb/hc3w8wOl6ctW05O/6EYYgTw1MYHS8BAAqlaq1jUXoFQkXpVHRMy/bgBwF8L+DCMyKiTUoIAclaEREAhoqIiIhoAUk7a1VKKEr8Idqf7laUTuk4dU9vw30fevz4rHFpJ+PhJl2KgNZCRQBw3pnTI9AUKXD63j685qoL8f7ffAGyGbPpfkm7bM8L4AcBEE53LSIioo1Nk9GvxUlA1uXqMyIiIiKippI6kaZGXz3XR3dHFqrS+HbTiQYLxJaj44/nBzhyfByFYhUhQoxNljE+WQFCIJ81YRraoo+5EE1VoKlyVl2IoSIios2LC9CIIupaXwARERGtb1IKqKoCz/OhqQp834Pj+rUiEgCce8ZOPPrkUN2+juvjkSdP4Gn7G3cIWoyHH28cKurvaUN7Pt3SMdrzaVx07h50d2Rx9mnbaivMFqIoEqoq4XkBXNeHYkjYjgdjBQpURES0ujQp4ATRCDT4wFR8cyAXr7wmIiIiIqJp06EiCQggCKOOQb1duYYdpgdH6relUzompiq1jj/wgKrltDyyzHY8HBucgOv68IMAI+NFWFb0Ob6jLY22XOokXuH8dE2F6zmwXQ8pU4dtc9EZEdFmpUrADjj+jIidioiIiGhBSWtrVUs69szuPnTqnl6kzMYBm+UYgVYoVXFscLLhY2ee2r+oY131nHPwjHP3tBwoSiTdihw3KhZVuRKNiGhTMOMV1W2agrQiEQKYdDyM2S6CZVyJ5gUhSq6PScdD0fVR9Xw4QbCs5yAiIiIiWmmapkBIAQERBYsAuJ6P/t62hs9v1KlIUxVoWtTxJ+n0MzZRRqXqzHtu1/UxMVXB4WNjcF0frudjcGQKluVBCqC3K7eigSIA0OPRbUlnbtthfYiIaLNS2KmICAA7FREREVELdE1FBQ70OcGahKpInHP6dvz6vqfq9j14dBSFUhX57NKLOo88Mdj0sVZHn50s09BQtVzYcQeLhQpd60E455cdzwvW6EqIiNavNl1B1fdR8QJ0mRo0x0fB9VDxAriBi25Di7oYxYIwhBOEcPwAXhhCCgFFCKgiKjYpMvreC0LYfgA7CGD5Ifx5ClASUackTQq0aWptFBsRERER0Xqkawps24OmKnDdAK7nY6CncahoaLSAIAgg5ew17ilTh+taqFouMikD5YqNcsWGokrkMiZyGTOuxTgoVWxUqjYcZ3qRm2W7GBkvwvdDKIpAb1cehr7yt7w0LamNJaEidioiItqskvFnPjNFtMUxVEREREQLSubQ63MKJzOde+aOhqGiMATuf+QYnnXRqUs+f7PRZ71dOXS1Z5d83MVIClPJCjrH8eD5AVRl/TV+DMMQk4UKjp2YmLV9dKKIzs72tbkoIqJ1SgqBgZSOcdvDhOMhryswFIkx24UbhBiqOsjrKrwghBMEcIMQC9WSBFD3HAFAlxKaIhAEIfwwWunmhyECIAoqBSFs30WvqTFYRERERETrlq6ptVAR4M7bqcjzA4xOlNDblZ+1PW3qKBQtlCo2giBAOmVEXbA9YHKqgsmpSt2xQoSwbQ9Vy8FUyQLCqLt2b1d+1eozyYI71/MRIkQQRF2LNI5PJiLadJLaDMef0VbHUBEREREtyIwDNZoWffW8AL4fQJlRsBnoaUNPZxYj46W6/e975CieeeE+CLH4G6SDI1M4cmK84WOr1aUIiEJFUgC+H8LxfOiqgqrlIJcxV+0aFhKGIQolC6PjJXiej8Cf3ZnIslx4ng9VZaGLiGgmIQS6TA2GIjFsuTAUoM/UMGZ7sIMAk3NWHytCwIg7C/mIikvBjJBQiOkQkaEIGIqELgVkg/fBIAzhhYAfhpi0PXhhiFHbRY+pNXz+TGEYLum9lYiIiIjoZOhz6kSu66O3MwdFioY3XgdHCnWhItPQkMsYKJZtVKouKlUXEEDK0JBJRwEjRUp4no+q7aJqObBsF8GMUkc6paG7Iwe5ioF8VVUgZbRQwHX9KGDleAwVERFtQirHnxEBYKiIiIiIWqDrKoQUUCChaRKuG8BxPaQUvfYcIQTOPWMnfvjzh+v2H50o4eHHT+CMfQMtF3o8z8dP73wMP7/rCTT7zH7mvtULFQkhoOsqLNuDZbnQs+srVFQsWxgdL9ZagXu+j8litcHzbHS0pVf78oiINoSspsBQBE5UXDgI0GOqmHR9OH4IXREwZBQOUud5LwvDqAuRFKgLBSlCwFQk/DCEG0QBJCkEdAEAApqpYdhy4QQhRi0P3abaNIg05fgoez6kEMiqEllNWTCERERERES0HIw4TKSq0WIzN17A1NOVw+BIoe75J4Ynce4ZO+q2d3VkkcuaqFQdlKs2XDdA1XJRtaKAkarIulHuihQwTQ2ZlIF0Sq875mrQNQWW7cGJQ0WW4yKbMdbkWoiIaOUoYrobtR+E7CpNWxZDRURERLQgIQQMXY3CNJoK13XguD5Sc/I0TztjO27/xcMNQ0Df/N5/4/2/+QJkWwjhHDkxjm/ffh9GG3Q9SnR3ZNHTlVvsSzkppqHBsj3YjgsgKnqtB1PFKgaHpwAAXhCgUKyiWLbgu3bdc0sVi6EiIqJ5aFJiR0bHsOWi5Pro0Ot/bRYAdEXCkKLWoSgJCQkhoMY1piRElFYlUoqEPmckQxiGtX1HrGi8ZrepYcRyYQcBJmwPnYY6qxuR7QcYjzsaAVGHoynXR9HzkddUZFU5b/cidjciIiIiopNV61QUd0L2/RBBEKC/p61JqKh+W+1YmgpdU9GeT8N1fZSrNirVqO6UBIpMQ4VpaEiZGnRNXfPPs7qmxqEiD4ABZ05nUyIi2vgEovsiUgj4cf1GAesptDUxVEREREQtMQ0tChXpKsoVJw7WpGY9J5cxccquHjxxaKThMTLp+Vdt2Y6Hn9/1OH525+MLXs9Zp21r+dqXi2loAKqwnOjGr217dWPg1sLEVBkAUChZmCyUa63ADV3F3NhTpeqsi2smIlrPpBDoT+mYUjyM29ENAkORMJUoJGQqsmFXoHDGKDMR7zMfIQQ0IaBJYFta4GjZgS6BLkPFqOWi4geQjo8OQ0WYhIfcqCOdIgQ6DRVBCEw5Ucho0vFQdAXaNAXpZNV4GMLxQzhBANuPimBSAF2GBpPvBURERES0BLqmAAJQpISiCvheCNfzMdDThntwBAAgRLQgrL+nDTu3dbZ0XE1T0K6lo4CR5yMIAmiqAinX1+fWZNRZFCpCXCMjIqLNRInrPuqMUBF70tFWxVARERERtSQK1Ey3uHbim5pznXfGjoahov6efAsryUJcftFp+K+7Hm868gwA2nIpXHzenlYue1Empsq475FjGByZwuteelHd9Uar4VArlmmqAst2FwxLrSTPD2DHN7ynihUEAaDrCjryaUh4KI5NP9f1fBghUKrYaMulmhyRiIgSbbqKtgadipqJQkKAtoSVa5qU2JbWcaziwFQkOg0NY7aLkhe939pBADeI3hwzqkS7Pj0aLaVoKHsBCq4PPwwx7niYcgXCMETQ4FxJEMlco3ERRERERLSxCSGgawocJ6qN+J4H1/Wxd1c3rnzOOejvyaO/u60WvlmKqAvS0vdfSXr8utz4s7rj+AiCEJJjcYiINo1aF2oJIIjGnxFtVQwVERERUUvM+KZq0uLa84KGHW9OP6UfKUND1Z69SquzPbPgOQxdw/HhyXkDRafu7sVvXHEuUuby3AitWg4eevwE7n/kKI6cmKhtHxotoL+nbdZzpYzHwNkeLNuFpiqoVJ01DRVVKtGIM8f14PtR94mBnjYIIWBbs9tvV20X2SxQKlsMFRERrUOGItGf0nCi4iCtSgShignHqwWLFCHQrqu1LkRJx6SK5yMbdycquQGKXhQuAqKuS5oQMBQBXZHQhMCJqgMnCBGEYcOOS0REREREC9E1NQ4VqbDgwfV8dLVn0dWeXetLW3GaGtXGfC+EFwRQpYTtuMtWqyIiorWnSgEPUaciAPCZKaItjKEiIiIiaomuqxBSQAaApkm4bgDb9ZBWZhdMNFXBlc89Bzd//+5Z2zvbFg4VAcDwaKHh9nRKx4uffTbOPn1bCx2PWlO1HPz1P9wGz6/v43DfI0frQkUAaqEi2/GQywAVa+6AsdVVrkbnT0JchqE1/flULbe2D1fQERGtT2lVQV9Kx2DVQVZTEMRjz1KKRKeuQkoBAaDT0NCuKxBCoOoFGLNdWH6AvK4gq0lUvQCaIqEJ1L0vKHHrbjcIYSh8LyAiIiKixdM1FYAddxSa7tqzFUgparUx1/Ggmjpsx2OoiIhoE1HE7FCRN99KaKJNbn0NoiUiIqJ1Swgx3a0oGYHmeA2fe87p2/GGl12MU3f3IpOOCiqtdCoCog5Bcz1t/3a8543PwTn7ty9boAgAUqaOHQMdDR974MAxBEF92CgZA2fFIR7LdhGsYevTShIqisNNKVNr+lzP8+F6PsIgRLlqr8r1ERHR4mU1BT3xv+d5XUWPqaHb1CClgC4ldmQMdBhq7T0xpUbbBtI6dBl1L8poCnQpIISAKgWymgIlfr4Wh0pdtu4mIiIioiXSazWi6DbTRgwVPX5oGD+64xH86t6Di140psfdipxkXHGTGhkREW1MSZhIiWsoHH9GWxk7FREREVHLDEND1XJh6CrKFQeO27xgctqeXpy2pxdAVFhpNQs0M1SUz5p4yfPOrR1nJTxt/w48dXSsbnu54uDJI6M4dffscxu6CiGi8W+e50NVFVi2g3Rq9Ueg2Y4Hz/MRhGGteJUy5l8VV7EctGVTKJYs5DLmalwmEREtQZuuwg9DjNsezHjUaIeuonNGmGiujKogk1VQcn04QQBdSpiKhBoXwAYrDkqeD00KWD5DRURERES0dLoWdShS405FnhcgDMNlXQy2UsIwxA9++hB+de/B2raf//cTeOfrLke2xVqJpilAFXBdhoqIiDYjVUa1GHYqImKnIiIiIlqEVNylJ+lU1GrBxNDV2j4LcVwfnW1pXH7RqXjPm56zooEiADjz1AGoauOPRPc/crRum5Sy9lqs+PUn3YJWWyXuNmTbLsIAUBQRFbXmUY2vtVyxEfIXISKida3T0LAtraPTULEzY6DLbD7icqaspqDT0JDVlFqgCAD0eNSZJtipiIiIiIhOTlIbURUFUgJhGAWLNoK77j80K1AEAMWShe/86P6Wj1Hr4h0vuLPjjtZERLQ5xOu7kEyNDwH4rKfTFsVORURERNQyIwkV6SogAN8P4fkBVGX5csrvfP2zl+1YrTB0FftP6ceDB47XPfbIk4OwHQ+GPvsjk2GosB0PluMimzZQsdamcFSuRAGhZBTbQl2Koud68HwfKhRUqg4y6dXvsERERK1LqwrS6vyB0Vbp8So7LX7fdsONcdOHiIiIiNYfRZFQVAnfC6BpKmzbg+v5Cy52WmuDI1P4wc8eavjYgYNDGB0vobszu+Bxkk5NjusjRIggiLoWrffXT0RErVEACAAQAooQ8MMQXhBCUdZ/Rz6i5cZORURERNQyQ1chpIAUApqaFE82fnvnc8/Y0XC75wV49MnBuu1mHK6y4jBR1XIQrHK3hzAMUbGiUFE1DhWZpjbvPskqump83aWyvYJXSERE640RF75UERXGghDw2a2IiIiIiJYoqTNslBqR7bj45r/fBd9vHq6/454nWzqWqiqQUgAhR6AREW1GQoha9+dkBBo7FdFWxVARERERLYoZd+1Juvc4m6BgcsrObmTSjbv8HD42Vrct+Rl4XgDP94FwulvQaqlaLsIghBcEcJyoeJWMp2smCUOV47FpxYq1shdJRETriiYlBAApRK0g5rIgRkRERERLZCQj0OKx8q7nr+XlzCsMQ3zn9vsxPlWZ93n3P3IUpUpri7BmdisCotASERFtHkntJBnU4LOEQlsUQ0VERES0KEk3nKRwshlWYUkpcca+gYaPDY0VGz5f12e//qRr0GqpxMGgpFuSrilQFhhDl4SOLNuDHwTwvQDVVb5uIiJaW0b8XqHFq+1cdioiIiIioiVKaiPJyK+5oaIgCDA0WsA9Dx3B9/7zAdz4rV8iXKNQ+90PHsaDjx1f8HmeH+Cu+59q6Zi11x13aNoMNTIiIpo2t1ORxxoKbVHqWl8AERERbSymHoeKkk5F67y1dasGetoabh8ZKyIIwqil9QymrsFxfFi2h0zKWPVwTrkanc9qcfQZEK0c1DQJ1w1QtVxk0waKZQsps3GXJiIi2nx0KWD5cajIB9wgAKCs9WURERER0QY0Pf4s7ugcd+wZmyzhlh/cg+HRArw5o8amilW059Orep1DowV8/ycPtvz8X9/3FJ55wam10FAzuqoCsGHXQkXsVEREtJloc0NF7PZMWxQ7FREREdGiGEbSqUgFBOD7YV2BaCPq68433O56PiYK5brtZq3rT1Qwqlruqq228/2g1qEoOf9Co88SmZQBALUQVKncWktvIiLaHPS4U5Eq4xEVXGVHREREREs0HSqSgACCEPA8H5mUgeNDkw3rRYMjU6t6jbbj4Zv/fteiale9Xfna6Pj5JJ2akg5Njusj4OdrIqJNY3r8WfTVZ6iItqgtHSryfR/79+/Hpz71KYyMjKz15RAREW0Ihq5CSgEpBDQ1nh2/Cdo793TmIETjx4ZH60egGXGnJtf14QUBwiCsBXxWWjJqzfF8eF4AIQBDby1UlHQlqlgOgjCE6/qwV+m6iYho7elxIUyfMf5srUZQEK13rBsRERHNT9MUCCkgIKJgEaKAjWlo6Ghr3I3oxCqHir73nw9gbLJ+sRgAKIqEHncjEgI4+/RteMfrL8dbX3VZS92Ukg5NvhfCCwIg3DwdvYmIaHr8mbKI8WdhGMILQth+gKrnc2QabQprPv7s1ltvxR/90R+19NxrrrkGv/M7v7Os53/sscfwoQ99CH/wB3+Ad73rXfjTP/1TdHZ2Lus5iIiINhvD0FCtOjB0Fa7rw3E9pFMbe4SWpinobMs0LDQNjk7hzFMHZm1LCk+O68N2XKimgUrVWZVRYpVKPPosDheZhlY3nq0ZQ1ehqhKeF6BqO8iYBooVu9aBioiINjdDJp2KBASAEFH7bq1ZspZojbFuREREtL7pmgLb9qCpClw3gOv5SAHo72nDxFSl7vmDw4VVu7aHHz+B+x452vTxF15+FipVB7bj4pLzT0FbLrWo40sZhalcL4DnelANHbbj1bpbExHRxqYlNZS4ZBIi6laUhIyCMETJ9WEHIYIwhBeGmJshEgDadRXZBUZqJtwggCKiRd1E68Wah4q+/OUv45577oEQYt7VkUIIHDt2bMWuw3Vd/N3f/R2+/vWv4+Mf/zje/e53r9i5iIiINjrTUFGtOnGbaxv2JuhUBAB9PfmGoaJGnYoAwDBUOK4Py/KQMQ1UrdXp+JO04E7Ot9hiVdrUUShZqFbdKFRUstDdkV326yQiovVHkQKKEPDDELqUsIMAbhBC29J9jGk9Y92IiIhofTN0rRYqAtzaKLCB3jY8/PiJuucfG5pAEIQtL446GT+78/Gmj52xrx8XPW03xEnetNU0Ba4XwHF9mAZgOy6AxYWTiIhofVJFFAqCmK6l+EEIRRGoej4mHL/hSDQB1IJBThBgwvEQAsjNEyzyghBjtgsnCCEF0GVoMJWlF2v8IETR8xGG0XnVVXjfpc1rTcuGExMT+N73vte0MCSEqP230pJrGB8fx7XXXot3vOMd8LzNcYOUiIhouaWMqBtPMjt+M4w/A4C+rnzD7UOjjVfRmfHIsahgBFQtZ8VHyDiuB9f1ESKE5SwxVBR3lapaDkKEcByP7bmJiLYQQ4l+x44nVMBlK25ap1g3IiIiWv+S8WGaNj0mHohCRY1ULRfHhiZW/LomCxUMNhm11p5P4WXPP29ZPkPo8et24tdt2fx8QES0WQghoMRhHDV+z3CCEGOWi1Hbgx+GUIVAh66i29TQl9KxPaVjR8bAQFpHX0pDPn6fnHQ8FOP3irksP8BQ1YEThBAAghAYs1w4QbCk6y65PgarDoquj5Lnx8de2rGIgDUOFd10001w3ehm2MxC0MyCUBiGCMMQvb29OPPMM1f0epLzhmGIf/zHf8QLXvACVKvVFT0nERHRRmQYUcFE11RARKl3z2v8gXgj6e1uHCqaKlZh2fVdiJKRYY7jww8CBEG44l2byvHoM9v2EAZRxwlDX1zzSUNXoUgB3w9hxd2Opgr8zENEtFXocfvupI03Q0W0XrFuREREtP7pcU1CjRPrSaeinf2dUJt0WHj8qeEVv64DB4eaPvaqF1+wbCPKpkNFUT1osyy8IyKiSBImSt7SJhwPFT+AAJDXFPSlNGQ1BSlFQpei1okvia226eqsYFFhzvtE0fUxYrkIEI2s70/pMKREAGDU8uAtombjBgGGqw4mHK92vKUei2imNQ0V3XjjjbU/J0Wgmd/v2rULH//4x/HAAw/gxIkTePvb376s55dS4nnPe17dirfk+5/+9Kd4y1vesqznJCIi2gx0TYWUUfvOZEWa0yRlv5H0NQkVAcBwg25FqiKhxfNikjBRpeqszMXFqlZ0fMuOvprm4otgQgik01G3omLZAhCt4Av4SwUR0ZagxwUuLf7KUBGtV6wbERERrX9JqCYafwb4foggCKBpCvbu7G64z2NPNQ/8LJdmoaK+7jy293cs23m0uC7melFXad8PNsXCOyIiimhzOhUB0WKt3pSONl2FFAJpVaLX1LAtrWNnxsDerIl9+RS64gBrm66iLX6/mHJ9FBwPQRiNO5uM7ytkVIkeU4UqBbpNFZqQ8MMQo7aLYIHpCEEYYsrxMFR1YQchpBBo11X0prRZxxqx3Ibj2ogWsmahoqmpKfzsZz+rrSybudpr586duOmmm/Dkk0/iwx/+MM4666wVuQYhBH74wx/iqaeewkc+8hFkMplakSi5lptvvhkf/ehHV+T8REREG1nSpSdZkZaMANvI8lmz6Uq1obFiw+3JCLSkk1ES+lkJYRiiXLGj88TttFNLXFmXz6QAAJWqC9fzEQQhpopcaU9EtBUY8fI6Pf593AvDBQtURKuNdSMiIqKNQdcUQACKlFDUOLQeh2pO29PXcJ+h0eKK1iAs28WhY2MNH9t/Sv+ynktTFUgBhAHgetFomZXuYk1ERKtHjUNFihAQANo0Fb2mCl0KKELEYSIDeV1FWlVgKLI2Mq3DUGvBoryuoi0O4k65PgarLipe1PGoQ1fRaWgQIlrILUUULFKEgBuEGLW8hiPBAaDqBRiuuii4PkIAKUWiz9SQi0NMUgj0xMfywhCj1vwhJTcI4PgclUazrVmo6Pbbb4fvT6e1kxVn73jHO/DAAw/gVa961bLMs23Fjh078Kd/+qd48skn8ba3va2uQPTJT34SDz/88KpcCxER0UaRhFl0dfbs+I1MCIG+7lzDx4YadCoCpsNVMzsV+Sv0oduyXQRBCD8IaudbartuTVOQirscFUpRt6KJqfLyXCgREa1rySo7GRfAAHYrovWHdSMiIqKNQczoYp10K3LjGtGpe3qb7reS3YomCxW05VINH9t/SuOg08nQ4pvEbjwCLVl4RkREG58W/95pKNFosryuQAiBrKZgVyYKE82nw1DRbSbBIqUWLPLDEIoQ6Daj8WkA0Gmo2J0xoEsRdyzSIIWAHQSYmBFYDcIQRdfHiYqDUduFGx+ry9DQbWpQpYAmBfpTOnQZhZx6TQ2KEHAahJSCMETJ9TFYdTBYdTFkuRjnexnNsGahov/4j/+o/TkMQwgh8Hu/93v40pe+hGw2uybX1N3djX/4h3/A9ddfD13Xa9s9z8N11123JtdERES0XiVhFsOIQ0XrcBWW6/mYLFQwVay2HPTp7Wo8Aq1ZqCj5Odi2B8f1EAQhJgqVpV3wAsrxaDXLcYEQ0DQJNS7YLUVSYCtVLPhBANf1a+PQiIho85JCcAQarXusGxEREW0c0yPQ4nBN3KmoLZdqOmr+sYPDK3Y9/T1teO9bnof3vPE5eN6l+7Gtr33B61lIEDSvK+l6VJtJamPsVEREtHlkVAVSRCEdNf5vIK2jP6XXOhItpF1X0TMjWNSuq9EINVODqUhIEQWAOg0NSnx8Ja7ddBkqBICyF2DC9jBhezhecTDpePDCaNRZTlPQn9KQVmWt89HOjIGspmBbWq9d98yQ0rjtwQtCTDoeTlQcTDge3CCEAGrn81grotj80bkVdNttt9VWdAkh8IY3vAF/8Rd/sVaXM8s111yDfD6P173udQiCAGEY4gc/+AH+67/+C8961rPW+vKIiIjWhSRMlLS59oMQnuefVMhluQRBgKliFYWShSRwX7Uc9Pe0Lbhvs+LSyFgRQRBCzvlFQVUkMikd5aqDqWIVPZ05TEyW0ZFPQ1GWN79diUefWVYy+kyf7+kLMg0NuqbAicNE7bk0JqYqyGXMk75WIiJa33Qp4QQ+NClg+QwV0frDuhEREdHGEYWK7OlORd50t8HT9vQ2XKj11NFROK5XCyQtNyEEerpy6OnK4fJnnIZi2cLkVKXlTodhGGJwpIADB4fw2FND6GhL49VXXtjwuVEXb7vWxZuhIiKizUORAtvTOgpuVEPJaUqt6/NitMUdjUYsFzlNQVaVEHFYaSClQ59xL0GTEtvSOo6WbZiKRIeuYtzxUJrx/qoJiawmkVajUBIAmIpEj6nVxt4D0fi2bSkdxyoOdAl0GSpGLRcVP0AlXsQMAKoQyKgKsprEiOXBCQK4QVgb/0Zb25p0KhocHMTjjz9e+76npwd/8zd/sxaX0tQrX/lKfPzjH68VrwDgK1/5yhpfFRER0fqhayoURUJgus31WhdNwjBEsWzh2NAkpopRoMg0VQgJWLbX0vU1CxW5no+JQuPxYG35qONPueJMdyta5lFijuuhGrccrVrRh33TXNros5ny2ShAVCxbCBGiWnXYppuIaAvQlbhTEcef0TrEuhEREdHGouvJwrPollMy/gwATtvbeNyY5wd46sjoyl9cLJcxsXNb54LPGx4r4Ds/ug+fuf6H+PI3foqf/OoATgxP4fGnhuE16YKtabPDVI47e6wMERFtbEYc1mnX1SUFihJtuopeU4u6AcUhnh0ZY1agaOY5+9PRouKMpiCvKRAA0vG19KejsWlRN+qo69H2tD4rUJTQlSikJBAFjzqN6fsKpiLRbWoYSEej3aQQUOOX6PG9jGJrEiq6//77a38WQuB3f/d30dm58Ie51fa7v/u7OP/88wFENyn/5V/+BY7jzL8TERHRFlLrVhQXjxx37UJFlu3ixMgUxibK8P0QmibR251Df3cb0mb04btctRc8Tk9nDlIKdLalcca+fjznktPx2pdchPddcwU62zIN99E1FZlUdI7JePTZxFSlabFpsWzHw+Hj40AI2K4HzwsgBGAuMK+5FZm0AUUR8L0Q5Ur0OWd8cnkDUUREtP7oMioHaHGxyQ2X5z2LaDmwbkRERLSxJIvNVC0ZfxYgiEPr23rbkU417rT82KGVG4G2VKWKjf9+4HDdeHjH9XH4+FjDfZLX73kB/CAAwulRaERERDPldRV7ciZ2Z83amLNmMqqC3nhhcZuuYiCloysemSYAZNVovNmurIG8rs7bjc9QJAbiYFFajYNEKR09poZUXBtKOh4l3Yk4/owSaxIqeuCBBwBEBRdN0/C2t71tLS5jQVJK/PEf/3EtUT41NYWf//zna3xVRERE64epRx9oDS0JFfnzPX1FBEGIkfEiBkcKcBwfUgId7Wls62uvhYnScaK/XLEXXCmmaQo++O4r8d63XoHXvuQi/I+LT8cZ+/rRnk/P+6G8PZ8GAFSqbq1b0eQydCuybRdHjo/D9wI4rofhuGV4OqVDypP/KCeEQD4bdVoqlKoAoq5Fnrf6f5dERLR6jLhTkSoAASAIo1GmROsB60ZEREQbixHXh1QpocTtDVwvCtVIKXDq7t6G+z12cHjddfTZta0LRpNFXI8dHGq4XUoJVU26NEWv22KoiIiImlDisWetyOsquuLOQooUUIRAh6FiV9ZEf1pHOh492oq0qqAvDvqmFAlVCkgh0K6r2J010K7HIeH4Pgg7FVFiTUNFQgg897nPRU9Pz1pcRkte+tKXYmBgoPb9XXfdtYZXQ0REtL4k47eSTkVrMf6sWLaiDjsCyGVNbOvvQFs2BQGBVPwBOW3qUTceP2zpGpO21YuhaQoy6dndisYX6FYUBOG8xTPbdnHkxAR8P4DtehgaLcD3Q+i60rRr0lJk0wakABzHh2U7QBh1WiIios1Lk9GqtqitdXzjh8UiWidYNyIiItpYpBTQ4xuRuhovPHOmFyud3mQEWrFsYShePLVeqIrEvv+PvfuOk6Ss9sf/qVwdJ6fNeReWsMAmkFWCIMiCgAgIKkEuCnoV9hpB8RJUlKsg+QuKCIIYfmQQZAERRJZlYQnLsonNu5OnZzpVrt8f1VUzPd09090TumfmvF+vvTNd3VXP09VeuubUec6Znv3aY/P2lpxxHLdakbvgTtWotTwhhJDhUSXxmBaQMDUgYUZQQo0k5J2U1F9Q4DA1IKFKclqxzQhKqJUFCCzrVbXmUz91WnxGUkqSVLR582bv9yVLlpRiCnnjOA4rV670HlNwiBBCCOnlVioSBQ4M6yTJjHYLNLelWXVFADWVAfAsC1HkMKWpCtMmVUOS+LQEo3hi8BZoxaoM9VYrUnUDtmWjK0srMcuy0dLWgy07WrB5ewt27+9EV3ccep9KT0r/hKK2bpimDUnk0VgbBpelN3KxOI5FICABAHpiTnnvSE/CK1VOCCFkfJJS3yVuIIqCRaRcUNyIEEIIGXukPjEiAGnxoVnTasHmuPm5OUf1n1KaOyN7ElSkJ4n2zljW50S3ineq8rOiUqUiQgghw0fkWEgcO2A3hXxJHIsaSUBY5L2WZ0BvfEhIbTJtGxYtQCMoUVJRV1eX9/vixYtLMYWCLF261Pt906ZNJZwJIYQQUl4EgQPPc07STqoE50gm7fSnG6az8o3pbXFWWx3EjCm1CPidJBm3tVfQTSpKaiNWWlsQOART40ZSlX66etKrFamagZ17O5xqRjYAG0gkNLS2R/HxrjZs392O1o4ep+WZaUHRdLS0dcOyAFni0VAbGpa2Z/2FA855SiR16IYJy7LRHU0O+ziEEELKh+gGi7ykotzV9QgZTRQ3IoQQQsYeWUqvZq31WTgliQKmT67Jut+WHa0jP7kCzZlRj1z3bDfvyJ4E5SYVqalkIlWlSkWEEELGFj4VH2JTbdEAJ7GIkJIkFUUiEe/3yZMnl2IKBTnooIO83/vOnRBCCCFAKOgk0bhJPPGkNmpjJ1JjyRIPnmXBcSyqKwNp2fqhgJx6jQieZ2FZNpLKyAV2KsI+MAyQVHQomg7bstEZcVax9UST2Lm3A5pmwDBNNLd3Y29LFzq741BUDTZsaJqBrohTJUhRdbS293gJRfU14RFJKAKchChfqp2dW62oqzs+YglYhBBCSk/kqKw1KU8UNyKEEELGHimVTOQm1+i6kRZTmDujPut++1oiiI3iArV8+GURUxqrsz63JUdlJTeZSjdMWLYNy7IpsYgQQsiYwjEMuNS9FT7106BYEUEZJBVVVFSUYgoFqaqqAgDYto3u7u4Sz4YQQggpLxWpSkB+nwiWBQzDgjJKQRM3qcgvOwlNwYCUUf6zb7KMfxRaoAk8h4DPmU93j1PpJ9KTxP7Wbuxv7YZt2VBUDfvbuqEoBnTdQk9UQXNbFHv2d6GtM4p4UkVC0dDS0T+haOilTQdSEXI+y1hCgWlZ0HWz7AJ7hBBCho9bqUjs0/6MkklJOaC4ESGEEDL2SKlKRQLPgWEBy3ZiRK5cLcUAYGuO6j+FWP/hLjz94rvYvL0FumEOvsMg5s3MPt89zV1ePKovnmPB8yxgA5rmxMWSlFRECCFkjHGrWfOpLBKDwkQEJUoqUtXem1NjITjUd449PT0lnAkhhBBSfiRJgCjyYMDA71YrGoVEFMMwoWpOSWm/zwlcuVWJ+gulEp/cakpJRYM1gi1eslUr6om6CUZxNLdFYRo2RIFDbVUAQb8EjmVgmjbiCQ1tHTG0tkdhW4BPFkYloQhwSpWLAgfbAqJxJTXfxIiPSwghpDQk1q1UxICB05XToKQiUgYobkQIIYSMPTzHgkvdgZTcVmC64T1fXRlATWUg677D0QLtvY/24p0Pd+PPT6/Fr+79B/767Ft4d+NuJJTiKmrnqqxk28DWndnn675vJRWvGq1Fd4QQQshwcReeuZWK9BG8j0LGjpIkFQUCvReOmjZ6LVKKpeu9F36CIJRwJoQQQkh5CgedZJ6gWwkoqcEa4bKYblBIlnjwHAeWZbxKRP2FgjLAOMEdQWBh2ci6qmy4CDyHYCqByU3KcdudRXqcZJ1QQEJjXQWCARm11UFMaapCY10YFSEZgsCCZRmEghLqqkOjklDkcj9LNzEsqehUtYIQQsYpju0tay1SCzRSRihuRAghhIxNspiqVpRKrtH6JBUBwNx+1X8qwz4sPmQGFh88Y0jjJhUNu/Z1eI91w8RH25rx5Op38dZ7O4o6Zm11EFVhf9bnNudogSZJqWQq1fDmRQghhIwlQio+5P40KKeIAOBLMWgoFPJWbkUiEUyZMqUU08hb39LVfQNbhBBCCHGEgz60d8YgSyJ4nvVaoOVK8hkOXuuz1BjBgJzR+szFcywCPgnxhIqAX0KkO4l4UkMwR2WjAcdVNMiiMGiiT0XIh1hChaIYiEQTiMYVmIYNlgGqq4Je0pGLYRjIkgBZElBVUbrrDZ8sAohD1y1Ytg3WAjTN8MqYE0IIGV9ElkHStMGzgGpRUhEpDxQ3IoQQQsYmWeIRT6gQBQ4AoOvpbcjmzWzA3uYI5s6sx7wZDaitDuaM5RRiy45W5FoPlauN2WAYhsHcmQ14893tGc9t29kKw7TAc+nr9iUxPZlK00yYpgWOK8n6fkIIIaRgve3PnJ9U0ZoAJapUNHXqVO/3bdu2lWIKBdm6dSsA5yKyqampxLMhhBBCyo8gcPClknvcJJ9YQhmx8QzTgpJa9eWX3aQiaaBdvAo8AZ/zOkXVYZi50+wty0ZbRxQfbN6LF/+9EQ8/uQa33Lcav7r3H+jqiQ86R57nvDlFupNeu7Om+sqMhKJywnEs+FS5cs0t160ZA+1CCCFkDBO59BVolFREygHFjQghhJCxSUpVKhLd5Jp+8YTpk2tw4VlH4RNHzEFdTWhYEooAYNPHzVm3V4R8aKgNF33cuTOzt0DTdDOtMpJLFHgwDGCaNnTDSagq9xZo7jwJIYQQoE9SUeqxaduwKLFowitJUtGsWbO83995551STKEgb7/9tvf7nDlzSjgTQgghpHx5LdBSCTNJRYc1Qv12k6kqRZLEg+c5MCzjJQvlEgzIYFgGAs9BknjYNpBIqjlfb5oW/t+fXsFjz7+D19/ehm072xCNO4lSre3RvOZZFQ4gFJAgCCwqQjIa6yogpFbrlTN3RaGaWllX7gEwQgghxZNSwSI3aERJRaQcUNyIEEIIGZvc9l8CzwEMYFr2gAu6hsPHu9qwbWdb1ufmz2oYUuLStEk1XvWh/rZsb83YxjCM93pVK/8WaIZpobm1J22bRX8PEELIhObGh1iWAcdQtSLiKElS0aJFi7zfn3/++VJMoSD/+Mc/vN/7zp0QQgghvUIBGWCcVVmCwMG2gXhyZAIniVRAxic7K+CCfmnQdmQsy3gJT4FUNaV4Ivf8BIFDdY42ZM3t3Vm3ZxuzpiqIyQ1VqKoIDDrHciEK6SsKKamIEELGLylVqUjsEyiiFWik1ChuRAghhIxNosCDZRmwjLOoC+htBVYI27a9tve5GKaF1f/eiIeeWJOz2s68mY0Fj90Xz7GYNa0u63NbtrfAznLdLPZPKirjmEpzazdMM/3cxRK5F+ARQggZ/1imN5mId2NFlHA64ZUkqejoo48G4FwYvvnmm9i0aVMpppGXTZs2Yc2aNd7jFStWlHA2hBBCSPniOLY3acfvJu0MfyDCNC0vycWtThQKyHntGw76AKRatDFOgGegMs8NddlLZOdbqWisclfVuYE/VTOyBsoIIYSMfdlWoFG1IlJqFDcihBBCxi43puBWQe7fAi0f+1u7cfN9L+Cvz76FdR/sRFtHNC0u0dkdxx/+9m/85+3cbVJlScC0SdUFj93fvJkNWbd39STQ3hnL2C4JblKRE7tSlPJMKuroiiGeUDMWFMRSVboJIYRMXGIqVpRah0ZJRaQ0SUWLFy9GdXW1V3byl7/8ZSmmkZdf/OIX3u8VFRX4xCc+UcLZEEIIIeUtHHKSdoKpZB9FNWAMc2/2pKLBtp1KQoLb+sw/cOszV8AvguNY8BwHOVWSe6CVbw012ZOKWtp7sm4fL9xKRbpuwbQs2JZdVBCQEEJI+WMZxksscn9GNIOqFZGSorgRIYQQMnZJklNV2q3Yo+uFx4Xe+XAXLMvGR9ua8ezL7+Puh1/Br377D/z56bV46fWNuPdP/8K+1oGrSB80fzI4bui3wObMqEeuDmqbd7RkbHPfv6absGwblmVDLbNqRYmkhvYuJyGqO5ZMe07TjLJu2UYIIWTkeXEian9GUkqSVMTzPM4880zYtg3btvHAAw/gzTffLMVUBrRmzRo88MADYBgGDMPgjDPOgCAIpZ4WIYQQUrbcNmQ8z0GWneDRcLdAc4/nVkMK+MS824oxDONVNQr6nZ+xRO4VWPW12ZOKuqPJcd0SjONY8LxzmainqhUplFRECCHjlj/VmqJK5MExDDTLRodKVepI6VDciBBCCBm73EpFklBc+zNNN/DBpr0Z25OKjs3bW/DvddugDZKoNKmhEscun1/QuLn4ZRFTGrNXPNqyPTOpiHdjKjagpaoVlVMLNMO0sK81AthOq7NEIjNu1x1NZu5ICCFkwhBY594An/pJlYpISZKKAODiiy8G4NzcM00TX/ziF9HR0VGq6WRob2/Hueee6wWwAODrX/96iWdFCCGElDeGYRAKOsk6bmuy4WyBZlm9rc/8BbY+c7nz88kCGNapxpMrwNWQI6kIAFrHebUir1x3KlBXruW6CSGEDF21yENgGfAsg1pZAANAMS10jUBCqWJa6NEMRDQDXaqBDlVHm6KjNamhJamhU9VhUjITAcWNCCGEkLFKTlXq4d0qyIYFy7Ly3v/DLfsGTRoayLJFM3HBmUd68xgOc2fWZ92+p7krawVsN6biLtAqp4Vp+1sjMA0LmmGiI5LZvg0AemIKTDP/z4wQQsj44lYq4lm3UlEpZ0PKQcmSipYvX44VK1bAtm0wDIPt27fjhBNOQGdnZ6mm5Ons7MSJJ56InTt3AnACWMcccwyWLFlS4pkRQggh5S8cdFqg+X0iwDjlngtdlZZLQtFTrc9YiDwHMMi79ZnL7xPB8xw4loVfdqodxXIkPoWDcs4gVEtHtLDJjzFumXK37ZmilU8AjBBCyPDiWAaT/CI4hoHIMqhJJRbFDScBaLhENANtio5u3URUNxEzTCQMC4ppQbVsaJaNuGGhXdGp/RqhuBEhhBAyRkkiDzBOdQO3CnIhSULvbNhd1Lh+n4hzT12CE1csBJ+qxDlc5s1oyLrdtoGtO1sztktSaqGW6lxLl0s7sY6uGBIJDZZto60jCtsCZDE97qUbJmzLRk8sd2VvQggh45uXVJRqEGHaNsVpJriSJRUBwA033AAm1YuPYRisX78eixcvxttvv12yOa1btw6LFy/Gu+++C4ZhYNs2WJbFTTfdVLI5EUIIIWNJ36SdgG/gpJ1CJZLOcdzjBnwSOK7wy5lwqlqRP3WcbKvKAOf6pKE2lPW5lnFeqUhMlSlXUzeTVY3a4BBCyHgmsCya/CIYAD6ORVUqubRbNxEbwkpxVzSVSAQAfp5FSOAQFjhUCDyqRR41koAaSfDar3Wp1HaTUNyIEEIIGYsYhoGYqtQj8IW1QFNUvaiK1zOm1ODScz+JuTmSf4aqtjqIqrA/63NbdmS2QHNbwLnvW9PMklf+SSRVtHc6lYk6I3HougmOY1BZkf6+4qkYWaQnMepzJIQQUh7cpCKWYcCl/ianFmgTW0mTilasWIELLrjAu0HFMAx27NiB5cuX47vf/S66urpGbS5dXV34zne+gyOPPBI7duwAAG813OWXX47DDz981OZCCCGEjHVe0o7fSdqJJ9QhJ6RYlu214PKlWp8FA4VVKeo/P58sgmUBw7BylqKur8neAm38JxU5ATDDsGBaFmzL9qoWEUIIGZ9kjkWDm7ibSvoBnApDyhBugsQNE5HUd0iF4CQQVYo8KkQeYZFDQODg51n4eRbVEg8GQMK00E3fOxMexY0IIYSQscmt+iyJzvWknmeSuiwJuPzLx+LLZyzHskUzMam+Aql7mVkxDHDckQtw/ueWe+3uRwLDMJg7M3vC0t7mSMY2UeDBMIBp2tCNVFv5ErZAM0wL+1q7ATgL/2JxJ3GrrjoEjk0/wcmkCst2YkC5FuERQggZ31iG8Vqf8W5SES04ntBKmlQEADfffDNmzpzpPWYYBoZh4Ne//jVmzJiB//mf/8EHH3wwYuNv2LABq1atwowZM3DzzTfDMIy0VXCHHHIIrTYjhBBCCuQlFaWSdkzTRkt7D7qjSaiaXlSCUVLRYNkAz7NOb3oGCAaKCxhJkgBR5MEyDPypBKV4MvtKuIba7ElFrR09sKzx21+e41gIqTLlemplXbKEATBCCCGjIyhwqJOdm0AVIo8Az8IG0K7o2J/Q0JLU0Kbo6FB1dKkGejQD2gDfh4ppeVWHggKHcOrGUpDnUCHyqOpTpYhlGMh9qiT16CbixtCrJJGxjeJGhBBCyNjjVuoRUj/VPCsVAQDLMpgxpRYnrliIr56zAt+99CScf/oyfHLpXMyYUuO1VJs+uRoXnvUJfGLxHLDsAJlHw2TmlJq0x0G/hFnTanHg3Emw+lVvYBjGOwduBehStkDr7knANCxohomOiFOtqDLs85K/+rLs3ore3dHkqM6TEEJI+RDdpKJUNglVKprY+FJPoKKiAo8++iiOPvpoJBJOOUW3fHQ0GsUtt9yCW265BfPnz8dJJ52EFStWYPHixZg6dWpR4+3evRtvvfUWXn31VTz33HPYtGkTAKStenMfNzY24tFHH4UoisPwTgkhhJCJw03a0TQDAb+MaEyBohpQUjcVWZaBLPGQJQFBvwSWHTjP2bZtr4Wa27LMJ4vgi2h95goHZbR3xhDwiYjFVSQSGqorbO9awJUrqcgwLDS392BSfWXRcyh3osBDNzQomgFZEqFSKxpCCJkQKkQehmWjSzNQJfIwbadSkWHbgA2k/o+nWzfh51iERQ5Cn+90zbTQoeiwAfj7JAvVSAKqpMxwhMQx2JfQEBA46LaNqG6iSzXAMwykft/5tm1DtWyopgWeZeDn2Izv8L4s20aCEpTGJIobEUIIIWOPnLrWE3nnp66bXoW/Qkkij1lT6zBrap23zbLsUUkk6mtSYxU+ffQBaKgNo74mjKB/4OrZoshDUQ2omoGgXyrpQi23pVk0psC2AJ8soDJHOzcAiMYVBP0SonEFdWZoSPE3QgghY5MT37HAM85PqlQ0sZU8qQgADj30UDzxxBNYuXIlVNW5Ydg3SAMAH330ETZt2oTf/OY3AIBAIIBZs2Zh6tSpqKmpQXV1NSRJgiiKYBgGmqZBURR0dXWhvb0du3fvxvbt2xGLxbxx+1ZJ6Hsxa9s2qqqq8Pzzz6ethiOEEEJI/irDPrS2R1FTGUAoIEFRde+fZdlIJHUkkjoi3QkEgzLCQV/WIEU8oSLSk4BuOFUQ3KSiUJFVilyhVFKRLAvgeAamYSOp6vDL6TeF6muc4ImRpe3L7n2d4zupSOQRT2pemXJFo0pFhBAyUdTIAnTbRkw3USPxMGzAtgELNizLhgUnUUe3bCRNCwnTQiJpwc+zCAscGDBoVw1YACTWaWsGOAlL2RKKAMDPc6iVBbQrOipTiU1J00K7qqNBFsGzDAzLRtwwETcsmH3+po+zLGokPqN9A1LzbFcMqJYNlmEQSiU38aN8I4oUj+JGhBBCyNgiiU4FHIHnwLKAZQG6YXqt1odqtBOKAKcy0ZGHzc779ZLgVipyYimKUpqYimlaXpUk9+dAreIYBlBVA6puQBJ4RGNJVFUERmWuhBBCyofb9syNnehUqWhCK4ukIgA47rjj8OSTT+Kss85CNBr1tvcP2rhisRjee+89vP/++3kdP1ublWxZ8bZtY+rUqXjuuedwwAEHFPIWCCGEENJHZdiPpKIjGlMgCjxEgUc46APglL1OKhriCSdhpSeqIBpzVkGFQz4IPAdF1dHVnfDKRHMcg4qQD3IqMBUKDLwibDCiwEOWBSiKDr8sIRpTEE+oGUlFPM9hcmMldu7tzDjGrn2dWLZo1pDmUc76l+pWVL3olYWEEELGngZZgG0DccOE6P2nnwG49Ndplo2oZjiJRYbzj2Wc1gkiy6JW5sEwDII8h9ocCUWuSpGHbtno1gxUSzzaFKe9Wruig2UAtU8Qi2MYyByDhGFBtSy0KDpqJD6tqlFvQpEFlmFQJwsQWQYcw6BaLJuQCMkDxY0IIYSQsYPjWPA8ByOVSKSoBjTNGLakorFASrUW03QTlpOdD1UzvFjLaEkqOmA7SV2GYYFh4MXWspFFAaoBxOIqpEoekZ4EJRURQsgEJHLpSUVUqWhiK6uahZ/+9Kfx+uuvY+bMmWnBnL4lpvv+c5/L599A+/cd56ijjsJ//vMfCgwRQgghQ8QwDCY1VGL2jHpMaqxEZdgPUXTuQkoCj8qQH5MbKlFfG4Is8bBtIBpXsbc5gn0tETS39UDVDDAsUBmWMamh0ktKqqkKgOe5gYbPSzi1MivgdxKJkkkNVpaM+2mTarLuv2tfZ9YbUOOFKDjn2DAsmJYF2L0JRoQQQsY/hmHQ5BcxLSBhkl9Eo09EvSw47ctEHmGBA8swEFkGNbKABp8IXyqhx7KdpJ9aiQfLMPBxLBp8Ql6JqbUSDz/PgmUYp/oQw0BPtTsDAJljUSMJaPIJqJaccQWGgWnbaFN0RFMV9gZKKJrkFyFSG4cxh+JGhBBCyNjhtkATUolEmj6xWtHyHAueZwEb0FLVitxKQaMpnlBTYztzkER+wEpPgVRbt3hCgWXb0DQTieToz5sQQkhpCW6lotRXhmU7cRYyMZVdBO3AAw/Eu+++i6997WteYKf/qrNcwZ7B/vXf3z2GbdvgeR7XXHMN/vWvf2HSpEmj+6YJIYSQcYznWIQCMhrqwpg5tQ6zp9ehqb7CK7Xsl0U01lWgsS4Mn9y7iguMU455ckMlKsMBcCwLWRYwdVIVaqtDwzK3UEAGUiu0eJ6FZQOJLAGeaZOqs+6fSGpo74plfW48YFkWAu9cLmp9qhURQgiZWESOhZ/nEBQ4hFPty2pkAfU+EdODEqpSiUMiy6A2lVzk51nUyQI4loHIsmj0i3lXumMYBg0+ESLLgE8dU2BYVAg8JvlE1MkC/Dzr/K0PQGAZ1PucbTaAiGagXdEHTCiSKKFozKK4ESGEEDI2uJV63Mo8mj7xFim5LdCUEsZU4kk1NbYT7/L1q9DdnyTyEAQWltWbkBTpSYzsJAkhhJQdIZWAyjJOLAWgFmgTWVlG0QKBAO666y68/vrr+OQnP5k1IOQqZMVZ//3d51auXIn3338f//u//wuWLctTQgghhIwbPM8hHPJhUkMlZk6rRWXY7yT2SAIaasOY1FCBipCTTFRTGQDPcRAEDpMaKjF9cg38vqG1Pes/F7/PCaYEU9WK3IBJX5Mbq5DrPuiufZlt0cYTtzS5mgr+qerECwISQgjJjWMY1EgCpgec5CIGcCoXSQKEVFLQJL/oBaAKOW5Taj+RZdDoFxAWOXCpxKAKkcfUgIQZQblPVSOnghIDIGlalFA0jlHciBBCCCl/bjKRkKqCPNEqFQGA6LaVT8VSRrtSkaYb0HUTNmwvoUmWcrc+cwX9zkLAaFzxfhqmNXITJYQQUnYYhvESi3iGWqBNdGXdwHb58uX45z//iVdffRV33nknHnvsMWiac9HlBnjyXenocgNFsizj7LPPxhVXXIFFixYN67wJIYQQkh9R4NFQF0ZNVQBd3QlEehIQBR5ihXOJwnEsaqqCqAz7Cv7Oz1c46EMiocHvlxHpUaCoOkzTAtfnhqMk8miqq8C+1u6M/Xfv68QRB00fkbmVA1HkEU9q0FPBv6RKJa8JIYRk4lKJRJUCj4hmIGlaTvszWQA/QHuFgQgsi0l+ES1JHYZtQ+ZYhAUOgVSVIleTT0SnZqBLNRAUOIgci3ZFhw1QQtE4R3GjoYtGo1izZg02b96MSCQCQRBQX1+PQw45BIsWLRqxa/DhtG/fPrz33nvYvn07enp6YJomgsEg6uvrMXv2bCxYsACh0PBUOu1r48aNeOedd7B//34oioJwOIzZs2dj2bJlqKnJ3j55uBiGgbfeegsbNmxAe3s7LMtCdXU1DjjgACxduhSyLI/o+IQQkg9ZdJJXRIEDGMCybBiGOSzt7McKtwWcW6VJ08yMmNNIiiec6yJVNWBZzjW7m+w1kKBfQqQnAU0zoeoGJIFHTzSJ6srASE+ZEEJIGRFYBrplg2cB1QJMqlQ0YZV1UpFrxYoVWLFiBaLRKJ577jn8/e9/xxtvvIHNmzfDsvLLjmZZFgsWLMDy5ctx8skn4+STT4bf7x/hmRNCCCEkHzzPoa4mhOrKALqjSSQVDbIkoDLsH/FAS9AvgWEZiDwHUeS8XvFuezbX1EnVWZOKxnulIjfYpKZKdauakdFmhBBCCHFxLIMaefDVz/mSOBbTggNXKWRSVYp8HIuWpA6RBRplAQZACUUTBMWNCrd27VrceOONePrpp71ErP4aGxtxySWX4Morr0R1dfZ2wKXS3t6O3/72t/jjH/+IDRs2DPhalmVxwAEHYMWKFfjsZz+L448/vujPNplM4o477sCdd96J7du3Z30Nx3E49thj8b3vfQ8nnHBCUePksmfPHtx000148MEH0dXVlfU1gUAAn//85/HDH/4QCxYsGNbxCSGkEILAgWUZWBYg8Bx03YSmT6ykIlHgwTCAadrQDRMCz0FRdQT8w1eFeyCJVOszt0KSnOd1Osex8PtExBMaonEFUmUQ3ZRURAghE47AsgAs8IzzU6dKRRPWmEgqcoVCIXzhC1/AF77wBQBAPB7H1q1bsWvXLuzfvx/xeBzJZBIA4PP5EAgE0NTUhOnTp2P27NkIBOiChxBCCClnHMemAhSj953NcSyCfgnRmIKAX4KmJRBPqhlJRdMm1WDN+swbB93RJCI9CaeNW5mxLBvRhA7bBvxycUE7MVWm3DAsGJYFnmWhakZe5bLLhW3bMAxnJSBbZLUMQggh5c/Pc5gSYNGS1KDAgghQQtEEQ3Gjwem6ju985zu47bbb0tq+AYAgCDAMw9ve3NyMG264Affccw/uu+8+nHLKKaWYchrLsnDzzTfjuuuuQ09PT8bzoihmJElZloUNGzZgw4YNuPvuu3H99dfjRz/6UcFjr1mzBueddx4+/vjjAcc1TROrV6/G6tWrcc455+Dee+8dlkpJ9913H6644gpEo9G07SzLgmVZGIazCCAej+OBBx7AI488gp/85Cf44Q9/SAsCCCElI0kCkkkNksinkooMrw39RMAwDESBh6oZUDUDAs8hOUpJRbZtI5F0vpuSqdZnvgJiOaGAjHhCQyKhwq4MQNMMqKoOaQzFgwghhAyN1/4s9dOgSkUT1phKKuovEAjg0EMPxaGHHlrqqRBCCCFkDAsFZSepSBbRhQQU1cgoyT1tUu7V2bv2dZY8qci2bXRHk9jf2o19LRHsb+vG3v2d6EnddPDJHM70TcaB86YVdFyWZSHwLHTDgq4Z4GURiqqXdVKRrptQVB1JVYOi6FA0A7Zlg+NZTG2sogAYIYSMYwLLYLJfRFQ3YdpASOCKbr9Gxj6KG6VTFAWnnnoqVq9e7W2rqqrC1VdfjXPOOQdTpkyBYRhYt24dbrvtNjz00EMAgNbWVpx22mm46667cOmll5Zq+ujp6cFZZ52FF154wdt2wAEH4Etf+hJWrlyJadOmobKyEqZpYtOmTXjqqafw61//Gq2trUMe+5lnnsFZZ50FRVG8bZ/97Gfxne98B0cddRQkSUJrayueeOIJXHfdddizZw8A4M9//jM2btyIl19+eUjVnn784x/jhhtu8B7zPI/LLrsM//Vf/4WFCxeCZVls2bIFDzzwAH71q18hmUxC0zRcffXV+Oijj/CHP/yBEosIISUhSzySSc1bsOS2ARsPbNtGT0xBS3sPWtt70BNX8NljDs54nST1JhUF/ZJXNWikJRXdaTlnWdA0p6W9LOef0CVLAniehWFYSCgaArKEnpiCOoqpEELIhEFJRcQ1ppOKCCGEEEKGQ9AvgWUZ8DwHWeKhqAbiSQ0VIZ/3Gr9PRF11EG2dsYz9d+/vxCELpozmlD2qZuCNdz7Gug92IJ5ID0zZfdp9JBUTT734AaZNaUCwwBVxoshDNzSougFfKqmo3BiGifauGGIJFaaR2ebEhg3TsNDeFcPkxqoSzJAQQshoYRgGYZHCHYT09+UvfzktoWjOnDlYvXo1pk+f7m3jeR7Lli3DsmXLcMIJJ+Ciiy6CbduwLAuXXXYZmpqacOqpp4763Ht6enDcccdh3bp1AJyqStdccw1++MMfguPSK3JyHIcDDzwQBx54IC655BJ89rOfxZtvvln02G+99VZGQtFPf/pTXHXVVWmvq6+vx3/913/h85//PE466SSsXbsWAPDee+9h5cqVeOWVVyAIhd+IveOOO9ISinw+H5566ikcf/zxaa+bO3curr/+enz+85/Hpz/9aXR0dAAAHnzwQTQ0NOCmm24qeGxCCBkqSXT+u9ebVGSWcjpZqZoB07QgS0Je1Y3bOqL4+yvvo6U9mhEfOe7IBRmLsCTBbSvvvFZRRiem4rY+c8cTBQ58gdU7Az4R3VEF8biKgCyhO5ZEXc3Qq+8RQggZG0Q3qSj19WgBMG0bHC1YmHCo/jchhBBCJjyGYbx2Z36fk3ATTwVf+prar1pRXU0IRxw8HfNnNY78JHN45qX38K83N2ckFGWjagbe27in4DHEVABM05wVheWYVNTS3oPuniRMw4ING6puoCemoL0rhr0tXdjX0g0AiCc1WLSighBCCCETzD333IO//e1v3mNZlvHUU0+lJRT1d8EFF+Dqq6/2HluWhQsuuADNzc0jOtf+bNvGF7/4RS+hCHBagf3oRz/KSCjqr6amBo8++mjR7ceSySTOOeectISiCy+8MCOhqK/q6mo89dRTqK2t9bb95z//wU9+8pOCx3///fexatWqtG133XVXRkJRX4sWLcIjjzyStu3//u//8Pzzzxc8PiGEDJWcSvQWUnEFw7BgmpkLgUol0pPA/tZutHZE0dzWnVe8QBA47NzbmTU20toRzdgmpc6BppuwbBuWZUMdhbhKPNX6zJ2nTy48sdVt05ZQdJiWBdOwvGQlQggh4x/PMGAAsAzjJRJRtaKJiZKKCCGEEEIAhN2kIr8IMICmmdD7raBbMLsJRx42C+ecshj/818n4uvnfQqfPeZgzJleX4opY19LBBu27Cton83bWwoep28ADHCSk8opMcc0LcQSTlCrub0bu/Z1Yn9LNzojccTiKnTdgq6bMEwTtmWPWqlxQgghhJByEIvF8OMf/zht23e+8x0sWLBg0H1//OMfY9q03va5XV1daVVzRsOtt96KZ5991nu8atUqfOlLX8p7/8mTJxf0+r5uv/12fPzxx97jcDiMX/3qV4Pu19DQgOuvvz5t28033+y1RcvXD37wA2ha77Xr0UcfjQsuuGDQ/T796U/jnHPOSdv23e9+F5ZVPjfyCSETgyjyAANwLAued25H6UZ5VCuKxhREepIAAJZ1Yh7ZFpj1VxHyeXGS/lrbezK28TznvHe7N8EnGlcyXjecDNPyKhQlVed7xFdA6zOXKPBOlSkbSKSSlHpiIzt3Qggh5YNhmN4WaJRUNKFRUhEhhBBCCJzgCsez4FkWvlSp6v7BpNnT6vDpow/EvFmN8BcRjBluHMfi0AOmIODPfy57mjuRKDCpxi1TbhgWDMsC7N6y3eUgqeiA7QQmFcWAbQEsy8AnC6gM+yAIbO/rkL0KFSGEEELIeHXLLbegtbXVeyxJEr797W/nta8oirjiiivStt1zzz3Yvn37cE4xp5aWlrRqSRUVFRkJUvlYuXIlFi5ciIULF6K+Pr8FAd3d3bjxxhvTtl1yySWorq7OsUe6iy++OK1akaIouPbaa/Oe82uvvZaWTAUA3/ve9/Le//vf/37a4/fffx9/+tOf8t6fEEKGA8MwXgKO+7Mcqh8nkho6InEAQGWFDxVhPwAgFh88XsAwDBpqw1mfa+nITCoC4MWQ4qkFUSOdmJNMJQBpugHTsMEyyJkINRi3WpE792hMKauFZoQQQkYWzzqxdYFLJRXZ9B0wEVFSESGEEEIInKCQW62of8CkXDXUhnHapxfhiotOwMVnfwKfXDo3Z2DLZdvA1h2tA76mP5ZlvcQc3WuBZhQ36RGQUJzPyQ1MyhKPaZOq0VAbRmXYj0CqpZ37fLl/roQQQgghw0XXdfz6179O23byySenJbsM5vzzzwfL9oYQsx1zpPzsZz9DPB73Hl922WWorKws+Dif/exn8cEHH+CDDz7ApZdemtc+9957Lzo7O9O2feUrX8l7TFEUce6556Ztu//++9HSkl/l0P4JTTU1NfjsZz+b9/iHHXYYDjrooAGPSQgho8GtkCOnFnC5C35KRVF1tHU6bcpCQRmVIb8TB2KcysyaPni8oz5XUlGWSkUAvMVgCUWDZdvQdXNEqyi71ZyTqTiIJAlgUhUmChXwOXNXVAOGYcKybIqrEELIBOJWKuJAlYomsgmfVNS3JzohhBBCJrZQINUCLRUw0Q0Lhln+LQJYlsHkhip84og5qKrwozLsw0HzJuWsYLRlR+Et0ETBWdGmpoJr5VSpyC3B3ZtUJKQ975PdwKUGG3bW1naEEEIIIdmM9bjRiy++iK6urrRtp5xySkHHqK+vx9KlS9O2Pfroo7BHeIVqNBrFfffdl7btjDPOGNEx+/rb3/6W9njKlCk49NBDCzrGypUr0x4bhoHHH3980P16enrwwgsvpG076aSTwHHckMb/4IMPsHnz5oKOQQghQxVMLdxy/zZXNQNmiWItmm6gtSMK2wb8PgHVlU6FIp5lveSZfFqTNdSEsm5v64hmreIjiQIEnoVtjU4bMbdCs5u45J77YvA8B1lyYkKxpFtpKTnEGRJCCBkrRLf9GZu7UpFh2dCo1fK4NqGTikzTRFNTE775zW/inXfeKfV0CCGEEFJisiSAYRmwDNNbmSePFWrlQtNNnHr8ofjvC47HGZ85HAtmNWR93badbQUnS7llsjWvUlF5JBWZpgU1VTVJ0bInFYkCD45lYFnwXkst0AghhBAymPEQN3rssccytq1YsaLg4/TfZ9++fVizZk3R88rHn//8Z8RiMe9xfX09lixZMqJjuvbt24c333wzbdvRRx9d8HGOOuqotCpPQPbPpL9nnnkGmpZewWI4PjfASQgjhJDR5PeJYFgGPMdBFJ3kyJGs0pOLYZhoTSX9yBKP2uoQGDDgeOe/00GvarU2aHuvhrrslYo03USkJ5H1uYw2YnFlRBJ0VVWHaViwbBtqKobjk7IvOstXsN/cYwl1TCzCI4QQMnRupSL3Z99KRbploU3RsT+poSWpY19CQ5dqQC2T7wjdshA3TCSN8pjPWFbypKIHHngA1dXVef37yU9+Muzjd3d346677sLixYuxcuVKbNmyZdjHIIQQQsjYwDCMlzzjVubRxlBFG79PTEuomT09e1sLVTOwe19n1udy6X8+VM0YNMg2GhKpQKRmmDANGwzTO1cXwzCQU6vyFNV5PZXqJoQQQsYGihsNzVNPPZX2OBAIYN68eQUf57DDDhv02MOtf0WfxYsXF926pVBPP/10xo3ebOdgMKFQCHPmzEnb9tJLL6W1dMsm27ktZvxSfG6EENIfwzBeFSC/XJoWaJZlobUjCsOwIAgs6mpCYBkGAb+EaZOqATht2niehWXZSAyyEKmuOoRcX0k5W6ClWrMnVR2GZcE0rBFJroqnKiGpqg7bAniehSAUVumuP79PBMsAum45FaxtIDqClZYIIYSUDyG1SIJPfe9ZcJJ1IpqBlqQOxbTAAGAZBqZtI2aYaFWcBKOINroJRoZlI6ab6FCd8ZuTOjpVA+2qjs4yWSQ9VpVFUlEkEhn0X3d3N5LJkSupaNs2/v73v+Pggw/GVVddBYtKdBFCCCETkptU5AZcxnKbrGlNVRD47FGuQluguYk6hmHBME3ALo8WaF7rs1RAUhJ5sGzme/alkq0SqdclktqIt+wghBBCyNBR3Kh4ra2t2L9/f9q2+fPnF5WYc+CBB2ZsW79+fbFTG5RhGHj55ZfTthWTDFWsbO/tgAMOKOpY/c+druv48MMPR2X8pqYmVFZWpm1777336DqYEDLq3Co9suwkFyVVfVT/W9TaEYWmm+B4Bg01YXAsC1kWMKmhEqLAw59qHx8MOPOMDbIQSRR4VFUEcoyVPalIEFKVmmwgOYIt0LzWZ6mbpz6p+NZnLpZl4Ut9dl6lJWqBRgghEwLPAAycJGEu9bdkS1JHVDdhA5A5Fg0+EZN8AmplAX6e9RKMorqTYNSS1GCO0Pe+YdnoUg3sS2jYn9TQpRlIGBZM2wYDQGJZMADihoWEMXbv9ZRaSZOKmpub8c9//hMMwwz6b6QxDAPbtqFpGn7xi1/g5JNPRnd394iPSwghhJDy0j+pSDPGTvuz/jiORVOtL+tzm7e3FBTAY9nelnBqqiXcaK8szMZNKlJztD5zuYFLTTNhWBYsyy5JuXVCCCGE5I/iRkOTLXFlxowZRR0r236DJcYMxfr165FIpLePmTt3bsbr4vE4XnnlFTzyyCO47bbbcN999+Gpp54a8txKee50XcfWrVvTtlVXVyMUCg3L+LFYDLt27SrqWIQQUiy3fZYsCuA4BpbV25prpCWSGhTVAMsCDTVh8LyT3DO5scpblFQZ8vfOkwEU1Rh0kVlDbfYWaM1t2ZOKgN5qRW7iTzQ2vC3QnFiHEx9xYx654iSFCgTS555UdGj62I2ZEUIIyQ/DMBkt0GwAAsOgThZQJwsQWOfvch/HokYSMMknoEbqTTDSLBsRdXi/MzTTQoeqozmpIWaYaUlEYYFDnSxgkl9EvU9AKHWvJ6KZsGiBRVFKmlT0yCOPDLiyy7Zt2LYNlmWxZMkSHHXUUSM6HzcQZds2Vq9ejSOPPBIdHR0jOiYhhBBCyovX/oxPVebRzTG9kndKvT/r9q7uBNq7YgUdyw1EKYrzB0B8kHLgI80wLWipIKSiDpxUxHOssyIQvVWN4glKKiKEEELKGcWNhmbDhg0Z25qamoo6VkVFBXy+9GT1nTt3DtrGq1jZKvXU1dV5v69evRqnnnoqamtrccwxx+CLX/wivvWtb+GrX/0qTjvtNCxcuBBTpkzBZZddhh07dhQ8/nCeu8bGxryO79q8eTN0PT15v9ixixmfEEJGAs9zkCQnzuJWvEmM0kIfN3YRDMgQBR4cz2JKYzV4rvf2WDAggeNY8BzntWiLJgauIpQrqShXpSIAXhs4RTFgmCYsyx60KlIhkooG27JhGCZ03bmG8snDk1TkkwRwLAPT6F2kNRKVlgghhJQfrwUay4BlGFSKPBp8AmTOqQJUJfKYGpBQJfEQUwlGft5JMKqTBTAAEqYFJY9WaIrptFbr0QzEdROKacGwbO8ejWJaaFN0tCg6EoblVUuq7ZNEVCHykDknoQkAwgIHPlU9qVujakXFKGlS0cMPP+z97gaC+v5+9NFH4+GHH0ZnZyfeeOMNnH766cM6PsdxuOiiixAKhdJuFroBoo8++ghnnnlmxh/yhBBCCBm/RNEJtgg8B4YFLNtp+ZWNbdvo7I5j/Ye78dTqd3HHAy9h197yurE0uT57pSIA2LK9taBj+aRU8Et1gkdJRYdllS7hyg1iaboB07TBMr1JYdm4Jb/d/YYzcEcIIYSQ4Udxo6H5+OOPM7bV1tYWfby+ST2A8zls37696OMNZNOmTRnbAoEAWltbccYZZ+CEE07A008/DUXJfTNz7969uPvuuzF//nx873vfg2nmFzyORqNob29P28ayLKqrqwt7Eyn19fUZ27J9NgM9N5TPrdDxCSFkpLjVitwkl9GoHty3co8/ldBTUxn0qlO7GIZBOCQ78ww4P+NxdcBFZg012SvIRXqSOdvF8zwHOZVcFU9VXo4OY2JO/9ZnTov44bkNyDCM1yaud+7UAo0QQiYCt0JRgOfQJDuVfxiGQYDnMC0ooUYWIKWqFE0Lyl6CEcswEFkGAa9SkDHgd2vcMNGmOK3VunUTnZqBNkXH/qSGPQkNe+Mq2hQdimmBAeDnndZrdbIAXyqJiGMYBHmnUtG0gAQ/z4JhGFSlvn9jhgk1j+Qmki73XZcR1trainXr1nklqt2ftm1j8eLFuPvuu3H44YeP+Dx+97vf4Y477sDDDz+M6667Drt27Upbefbaa6/hG9/4Bu65554Rn8tIiEajWLNmDTZv3oxIJAJBEFBfX49DDjkEixYtGpUS4WPVxx9/jHXr1mH37t1IJpMIBoOYMWMGlixZgkmTJo3o2LZtY/369XjvvffQ2toKXddRWVmJefPmYdmyZUWXvCaEEDI4nmPB8SxMw4Io8FBVA5puZAScnnn5PWzZ3opoPD34s2tfJ6ZNrhmx+VmW7ZXnzocscqirktDWlZlAs2VHC446Ynb+x0pdeOu6BcMwwfMckoqGQCowONrc1mdulSJJEga8tvHJIrqjivd6TTO890EIIYSQ8kJxo6Hr6cmslBAOZ6+qkI9ssYhsYwyHnTt3Zmxrb2/HihUrsHnzZgBOssy3vvUtrFy5EtOnT4coiti7dy+ef/553H777V5ikqZpuOmmm/Dee+/hr3/966AxlWzvKRAIFH1TttDzNpY/N0IIGUjAL6GjK+5UGGbSYwsjRVGdxVA8z0JOLSILBbLHMCpCfnRFEvDJAjjeqciTSOaOedTnqFQEAK3tUUydlD0ZNeCToKgG4gkVFUEfYgm14FhPNppueJWD3LjHcFUpcgV8EqIxFYmkCqsyAE0zoaj6sLVYI4QQUp5CAoeIZkBMfVeJLItamYc/x3e4xLGQOBY8w6BN0VHBc0gaFnTLRtywEBQy99MtC12pFml+jgXDAKYNmJYNw7ZhA7AAMAACAocQz4FPzYcBEBZ5hAUOEpf+d1udLGBXTIXMsQjwLOKGhS7NQIOceS/BHLsNK0ZcyZKKXnzxRdi2nRYUAoDrr78eP/zhD4ctezofsizj4osvxpe+9CXceOONuO6667y52baN3/3ud7j44ouxfPnyUZvTUK1duxY33ngjnn76aWha9oz/xsZGXHLJJbjyyiuLXm01FCOd0FRMqxrDMPD73/8et956Kz744IOsr2EYBsuWLcOVV16Js88+e6jTTNPZ2Ymbb74Zv/3tb9Hc3Jz1NaIo4pRTTsEPfvADLF26dFjHJ4QQ4pBEHglDgyBwUFUDupG5qrk7msxIKAKAXfs7R3Rub7zzMd7duBtzZtRj7owGTG2qAscNfN00pd6XNalo9/5OJBQN/lTp8cGwLAtJ5KFqBhTNQJDnEE+qZZNUNFgQy1mhx8A0bSiaDlkUEE9qqAjlruZECCGEkNKguNHQxWKZrW4DgUDRx8u2b7YxhkNra2ZFza997WteZaLjjz8ef/nLXzLiWXPnzsXcuXNxySWX4Bvf+Abuu+8+77nnn38eX/7yl/H4448POHapz1upxyeEkJEiS4IXv5BFHopqIKnoCAVHLqnIrdzjVinyyULOJCZJ5OHziUgmNYT8EiI9CmKJ3DGPipDPi5H019LekzOpyO8T0dkdh6aZ0AwTIs8hFlcQHkJsQlV17G7ugmlY0AzTqwI13ElFsiRA4FnohoVEUkPQL6E7mqSkIkIIGeckjsWUgISYbnrJOfnc5w8LHLo1ExoshAUOXZqBbt2Aj2fB9dnfsm10qAZsABLLoibL95dl2TAA8Ay8tmYcw6BC5FAh8OByJOcKLItqSUCHqqNS4KGYOnTLRsywEOqT3JQ0LHSk7jO4c6PSLL1KllT0wgsveL/btg2WZfGb3/wG3/jGN0o1JYiiiGuuuQZHH300vvCFLyASiXgBoiuuuAJvvPFGyeaWL13X8Z3vfAe33XZbRlKNIAgwjN6yYs3Nzbjhhhtwzz334L777sMpp5xSiimXjc2bN+Pcc8/FO++8k/GcKIpecpZt23jjjTdwzjnn4P/9v/+Hhx56KGt/+kI988wzuPjiizMCZwzDgOd5r5y6pml47LHH8Pjjj+O///u/8X//938QBLpoJ4SQ4SSJPBIJDWIq0KTpmQGiaZOqsW1nW8b23fs6h2WFWS5bd7agvSuG9q4Y3njnY0gij1nT6nDYgVMxe3pmawMAmFLvxzubIhnbbRvYtrMVB8+fkvf4PlmAqhlIKk7wyE3sGW2GaUFLBe7cAJ5bSSkXhmHgk5xEIkVNJRUlVEoqIoQQQsoQxY2GLhqNZmzj+eJDgdn2zTbGcOjq6srY5iYUHXTQQXjiiScGTLSRZRn33nsv2tra8NRTT3nbn3jiCdx666341re+lXPfUp+3Uo9fqNbWVrS1Zf5dNJCtW7emPY7FYqNaPSkejw/4mBAychgYUBUFHGvCMlVEoxZEfmRakNi2jXgsBsu2wTMCVCWBgC8w4H9vBMZERElA4GxYpop4XEXcx4LPsZirttKPPc2RjO17mztw8LzsMRoAEFgLiqaju7sb4YCMllYDsIurSqeoOlrae2BbNjTDRGdXDKZlO+3hLR2qMnCbVk1VBnzcn8jbUFUVPd0WBDaANl2BLNjUFYMQMubRNeLgRDjVggr5a0I2LfQoOjjbBqOY0G0LHWoSYbH375RuzYBiWE7rMomHElchpL5X3CpFLg2AwDAIC5zT2kxlEM9cT52GtW2YSR26bcPntlZLAKwsgGMZxHUTUd1ZWC6yLMISByWuwscx6EF5tDsv9cKQkiYVuYEXhmHw3//93yUNDPV13HHH4cknn8SJJ57oBSzWrl2L5557DieddFKJZ5eboig49dRTsXr1am9bVVUVrr76apxzzjmYMmUKDMPAunXrcNttt+Ghhx4C4Pzxf9ppp+Guu+7CpZdeWqrpD6tCy0K/9dZbOPHEE9OCVkceeSSuuuoqHHvssQgEAohEInjuuedw3XXXYePGjQCAl156CUuXLsUrr7yCmTNnFj3fe+65B5dddhksq/cPqPPPPx///d//jSOOOAI8z2P37t3485//jJ/97Gfo6uqCbdu49dZb8eGHH+Kpp56CLMtFj08IISSdlCqJLaYy1XU9s1LRtEnZW5xpuomW9h401VcM+7ySioZd+9IrIamagY1b96OpriJnUlFFUEDQx0PPEqfb0xwpKKnIWX2W9KoDqaoBw7RyBthGSjKVzKTpBkzTBssAojD4pa0sO0lFSUVHZQiIJ9S0KgiEEEIIKQ8UNxq6ZDKZsY3jiq8GkS05JdsYw8E9r9ncdttteVXuYVkWt912G1544YW041133XW45JJL4Pf7s+5X6vNW6vELdeedd+Laa68d0jHefPPNnBW7R8Obb75ZsrEJmeg0AD0dIz/OvtTdz+1F7Lt7gDunPGJQ1cz/pn68Yze2T8pcoNZfRxRw3/6HRcxtIBqAaBHndu/uLXkf3/3stm0qfBxCCCl3dI04cuIAci1LGM1GzdnSdOIAMpe4lN6uXbtKOv7o3v1J2blzJ/bu3es9njFjBn72s5+VYio5feITn8Cdd96ZdpPpwQcfLPGsBvblL385LaFozpw5eOedd/A///M/mDLFuVnI8zyWLVuGP/7xj7j//vu992ZZFi677LK01Vtj2Ze//OW8X7tz506cdNJJaQlFX//61/Haa69h5cqVXqCqsrIS5557LtatW5dW1Wn37t048cQT0d3dXdRcn3zyybSEIpZl8eCDD+KPf/wjli1b5gV/pk6diu985zt4++23MWvWLG//1atXF/R+CSGEDE5KZcnzqSQV3bBgWekVACfVV+RMpNn08cgExD/e1YZc3T3nzMi9Ao5hGEyu96V+B6ZPrsbxRx2Ay84/Bid9cmFBc5BEHiwDmKYNNVXBKZEYZCnACOjf+kySMnsgZ+NLleRWVQOG5XyuyUFW7RFCCCFkdFHcaHj4fJnVGE0zM1k+X9n2zTbGcHCrNfd3yCGH4Jhjjsn7ONOnT89oXd/R0ZHWFq2/Up+3Uo9PCCEkf5Wh7O3kI1Eto5MEIYQQQshYVZKkog8++MD7nWEYfPe73y3LP2YvuOACfOpTn4Jt27BtG0888UTZljq755578Le//c17LMsynnrqKUyfPj3nPhdccAGuvvpq77FlWbjgggtGfWXQ9OnTvXNc7L9rrrkm7ZiXXXZZXmPbto3zzjsPHR29KfvHHXcc7rjjDrBs9v/38Pl8+Mtf/oJ58+Z527Zu3Zr3mH01NzfjwgsvTKtQdM011+BLX/pSzn1mzJiBJ598EpLU28v5b3/7G+65556CxyeEEJKdW/GGZ1lwvHOTSDfSV5jxPIfJjZVZ91/73g6o2vAnqmzZ0Zp1e0XIh/qa0ID7zpkawsrjFuJ/LjkRXznzKBx1xGzUVgcLrtDDMAykVGKOm9ATL0ELtISips1BlvJrBcrznFeBSkklEyWSo58URQghhJDcKG40PEKhzOtDwxi8akIu2fbNNsZwyPV5n3XWWQUfK9s+jz76aM7Xl/q8lXp8Qggh+asKZ08qMkwb0UTx/+0mhBBCCCknJWl/5gaHbNtGIBDA+eefX4pp5OWaa67B8ccfD8ApDfzaa6/hM5/5TIlnlS4Wi+HHP/5x2rbvfOc7WLBgwaD7/vjHP8YDDzzglczq6urCDTfcgNtvv31E5joSDMPAb3/7W+/xJz/5SSxcmF/Vhb/97W94/fXXvcccx+HOO+/MmVDk8vv9+M1vfoOTTz7Z2/anP/0JV155JZYsWZL33K+//vq0CkkzZ87EVVddNeh+CxcuxBVXXIFf/OIX3rZrrrkG559/fl4lwAkhhAyMZRmIIgdNMyHwHEzDgKabXls017yZjdi5tzNjf0XV8dZ7O/GJxXOGbU6WZWPrzuxJRXNm1A+aHFQdFjFzThMkOXvAqxA+WUBS0ZFUNFQEfV7VoNFiGCY0zVlx3ZtUlP9lrSwL0HQTiqoj6JcQS6ioraYbK4QQQki5oLjR8AgGgxnbhpL0FItlFofPNsZwyJX0snjx4oKPtXz58oxtb7zxBnRdhyBkJqaX+ryVevxCXX755fjCF75Q0D5bt27F6aef7j1eunQpDjjggGGb02Di8XhaO4ulS5dSPI2QURTpSSDSnUBS1dEZiYPjWDTWhod1jN5jM2isddrTT2mqBs/nt85+X2sEmmqgO5ZELK5ClgTUVGb+d2KybuLltyNAlqJEcmgKZs7KXVU66/tnkHYsGzZMC+BYgEFm3Cee1BDpSQBwYjVVYX/Bi8c0VUlreTZ56lyIkjzgPtG4ip5YEj5ZQHVFAKLEY1J9ZUHjEkJIuaFrxJFl2zb2J3UYto2YbiCmOwU3OIZBrcSDYRmEBA5V4simryQMC+2qDtu2EdFMBAUOAut8y9ZIAvx5XiuMto0bN5Z0/JImFTEMg+OOO66sV8cce+yxmDVrFj7++GMAwLp168omOOS65ZZb0Nrae5NRkiR8+9vfzmtfURRxxRVXYNWqVd62e+65B//zP/+DmTNnDvtcR8ITTzyBffv2eY8vv/zyvPYzTTMjGetzn/sc5s+fn9f+J510Eg455BC899573rarrroKL7zwQl77f/zxx7j33nvTtl155ZVZA1rZXHnllfj1r3/tlQRvaWnBLbfcklZ9ihBCSPEkUYCmmRAFHopiQNcz2wYctnAqXl27xUts6euN9R9jyaEzvKpHQ7W3pStnm665MxqGZYx8+SQRQAKqZsCybRiGCVUzvLZxI81NYlJ1A5aVSgIr4Dz7ZRE9UQVJNXUc1YBhWjnb2RFCCCFkdFHcaHiEw5k3aKPRaNHHy7bvSH02uY5bTOJJXV0d6urq0NbW5m1LJpP48MMPceihh2a8Ptt5i8fjsCxr0EVo2RR63sba51ZfX4/6+tw3zfMRDAazvu/REggESjo+IRONJPmQVAFBstEdN2BbAMOJwxY/AYBoIgaWkxAMypBkv5P8Ul2Z9/4WeLS09aCSE5FQumGYgCD6wLLpCTuSDNRUhdAZyUz+7IqqkGR/zjFEyUZP3IRl2bAZHrLkLALTDRNJRYeialBUHW6TA45jwLIMOI4Fl/o+iidMsJyEUFBCdUWg4ISi7POSB5w34JyfWNKCabPOaxnnu2U4xieEkHJB14jDj/ObaE5qEG0bWlKHZduok0VIHAOJYzHZL4Id4e+SMAAroSJhWHDr43IMg0l+EVIZ3x8YqQU9+SrJmdm2bZv3ezErjEZb32o0b7/9dglnkknXdfz6179O23byySejtrY272Ocf/75aUGRbMcsZ3feeaf3e0NDA84888y89nvsscewadOmtG1f+cpXChq7/+tXr16NdevW5bVv34QgwKmSdN555+U9dkNDQ0ag8le/+lXaMQkhhBRPTCXIuEEtTc8sWy2JApYeOiPr/omkhnc27Bq2+WzN0fqM51nMmFIzbOPkQxA48DwL2wJUdfRbiCUUJxnITeaSRL6gwJUk8mAZwDRsqKnPNZGgFmiEEEJIuaC40fDItlisvb296OP135dhmBFbkFZRUZF1e1VVVVHHy5b0kutchEIh1NSkX19blpVWaboQfZOZXLNmzcr5+uH+3AodnxBCRpokCeB5DizDQE5VhM61iKoYtm17cYOAz0nUCQYGrrzTn/t6UeDB8ywsG1BytLnP1Y6+pb1nwDEYhvHmF4km0R6JYU9zF/Y2R9AZiSOR7E0oAgDTtKHrFhTFQDyhIZ5w3mNFSEZNZeHt7YfCXVRmGBZMywJsQNWo3RshhJCBBQUOPo4FyzCoFHmEBR4Sx4BlGDT6hBFPKHLVyyLkVAKRn2cxNSCVdUJROSjJ2YlEIt7vRxxxRCmmUJBly5Z5v/cNbJWDF198MSOoccoppxR0jPr6eixdujRt26OPPgrbzlKzcxh9+9vfxre//W1cfPHFRR9j06ZNeOmll7zHl1xySd6Vfv72t7+lPRZFESeccEJB469cuXLQ42Zj2zYeffTRtG3Lly/PCFoVOn5XV1fa+SCEEFI8N0AiCBwAZK1UBABLDp0JMfWa/v7z9scwjOz7FWrLjpas22dOqYXAZx9/JHmBv1RijxvMGg1upSJFcVuf5ffd72IYBrLsBi6dY8UoqYgQQggpGxQ3Gh7ZWsP3rfRciO7ubiQSibRt06ZNG7HVknPmZG8jXOx42SrzdHZmtjF2Dee5279/f17Hd82bNw88n16to9ixixmfEEJGQ9AvAQB8qRbt7t/mw8Gp7mOD4xkvXhAqMKmI51j4UnGDwebYkKN122BJRUBv8pKiGIjFVBiGBYZxWrxXVfjRVF+B6ZOrMbWpCpMaKtBYF0ZtdRDVlX5Uhn1oqA2hqmL0W/OwLAsh1R5GSyUTZaviTQghhPRXm/p+9fMswqJzX6NeFiAUURW2WDzLYEpAwuyQjEl+CTxLlfYGU/KkooaG0W3XUYwFCxZ4v/edezl47LHHMratWLGi4OP032ffvn1Ys2ZN0fPKxy233IJbbrkF11xzTdHHuPvuu73fOY7D1772tbz2U1UVzz77bNq2ww8/HH7/wGU9+5s/fz7q6urStmX7TPp74403MoI6w/G55Ts+IYSQwXlJRTwHMIBp2TBMK+N1flnEEQfPyHqMaFzBuxv3DHku3dEkWtqztzwY7dZnLjcpxw0aJRRtxBOSASe5S9dN2LChasUlFQFuC7fexCQ3UYkQQgghpUdxo+Fx4IEHZmzbsWNHUcfKtl+24w+Xgw8+OOt2TSvumi1b9YaBrl1Lee5EUcxIqurs7Cy6BdrOnTvTHgcCAUyfPr2oYxFCyHAJpJKK/KnYgqoZsKzMmEsx3L/v/bIzhiwL3oKxYubokweuptRQGwbDALVVQRw4twnHLp+Pc05ZjK+cedSgY0gij/raEAJ+EeGQjPraEKY2VaOxrgIVIZ9XmZnjWIgCD1kSEPRLCAd9qAz7vYSnUnAre7sVoCmpiBBCSD4kjkW9LIABwACokQQEi/ieHg7UtjN/JU8qqqysLMUUCuLO0bZtdHd3l3Yy/Tz11FNpjwOBAObNm1fwcQ477LBBj11ukskk7r//fu/xypUrMXXq1Lz2feWVVzKCMdnOQT7677dp0yZs2bJlwH2yndtixl+wYAF8Pl/atnL/3AghZKwQBR4M65TedFdf6VlaoAHA8kUzwecoj/nvdVthZklGypdlWXjtrdzfK3NmZLZyGA2+VCKPppkwLAu2ZQ9rufJc3BLmTsARYFnGSwArhJcUpRkwLQumaSEWp2pFhBBCSDmguNHwaGhoQGNjY9q2TZs2FZUI/uGHH2ZsW7RoUbFTG1SupKJYLFbU8bIl5PRfJNZXtve2cePGosbuf+4EQRg0IWu4xt+/f39GhfNDDjmEgueEkJLz+0QwLAOe5yAIHGwbSAxDTMG2bS+pKOArrkqRy60iJEsCGNZp9aVliQvNnlaH73/tZFz2pWPw+ZOOwNFL5mLerEZUhHwZr83GL4uoqw6huiIAvyyCHSPVEsRULMat7E1JRYQQQvIVFnnMDMmYEZJRJRUe2yejryRJRabZ2wYkHM5eGrKc9J1jscGLkdDa2ppR7Wb+/PlFBQayBTPWr19f7NRGxZ/+9Ke0QOPll1+e977Z3tsBBxxQ1DyKOXfDNT7Lspg/f37atn379qGtra3gYxFCCMnkJqu4q6+0HC3QggEZhy2clvW57mgS72/aW9T4sbiCPz7+Bt7+YFfW5xtqQ3kHqfJRSKs2jmMhpsqT9lb7GfmkHK/1mepWKSrujw6B55y2dbZTUQoAOiLlc51HCCGETGQUNxo+p556atrjeDw+6EKobN55551Bjz2cDjjgAFRUVGRsL7YNWLY4SX197uT8lStXZsTXiomTRaNRbN26NW3bscceO2gbt2zntpjxR/tzI4SQfLEsA3+qyo5brSjSk0A0psCyiq+CrKg6TMsGxzGQUxWKi00qkkQegsCBZRhvYVW2KsduYtRE48bM1FT7M2fx18hXsCaEEDI+sAwDjhY7jBklSSoKBHp7vCaTyVJMoSCq2nuDTBRLV06yv2yrxGbMmFHUsbLtl+345eSuu+7yfp8zZw5OOOGEvPct9bkr9fiEEELy47VASwWH9BxJRQBw5OGzweVYTfbvdVvzDqxYlg3btrFzbwfufeRV7NzbmfO1c4ap9VmkJ4Hn/7UBv/7dC4j0JPLez2075ib4xEehhVhmUlHhrc9c4aATWIzGFFi2DUXREU9QtSJCCCGk1ChuNHzOOOOMjG2vvvpqwcd57bXX0h43NTVh+fLlRc9rMIIg4HOf+1zG9g0bNhR8rI6ODrS0tKRtq6ioSGtb19/kyZOxZMmStG39z0E+/vOf/2S088n2mfR3yimnZPxvaTg+t3zHJ4SQ0eC1QPNLYFOVgDoicext7kKkJ1FU1Wev9ZlvaK3P+s/RbTM2GhWaxwoxdV4Nw4JhWYANr009IYQQQsaXkiQVhUIh7/f+JXjLUWdn7828wVYSjaZsgZSmpqaijlVRUZHRRmvnzp2Ix+NFHW+krV27Fm+99Zb3+Otf/3pBFZqG89z1LyWe6/iuWCyGXbvSK04EAoG0/78YyfEJIYTkr39SkWZkb38GABUhHw45IHsbzs5IHBu35reqeu1723HD7c/ggUf/g9ggCS4Hz5uc1zFz2dvchb/9fR1uf+AlvPnudqiagTXrP857f3elXlJNJfoo+pBavQ1G0w0YhgkbtrcSbihJRQG/BIFnYZp2b7WirvKqLkAIIYRMRBQ3Gj7HH398Rgu5Z599tqBjtLW1Yc2aNWnbzjzzzBFvoXXOOedkbHv99dcLPs7atWszth1zzDHguIFvMp911llpj3fv3o3333+/oLGffvrptMccx+H0008fdL+Kigp8+tOfTtv2/PPPp1XxKmb8hQsXDphMRQghoykUkMCwDCSBx5SmalRX+sHzLEzLRqQniT3NXejoikHPs6rycLY+cwW9pCLneKpmjGjcYyxhWRaC4Nxi1FMxGkXNHTcjhBBCyNhVkqSi6dOne78XU3J5tLlzZBgGU6ZMKfFsen38ceZNv9ra2qKP17+XvG3b2L59e9HHG0l9qxTJsoyLLrqooP2H89xlK5ed7fiu7du3w7bTq1UM5XMrdHxCCCH589qf8c5PQzcz/hve1yeOmI1c91ZeXbtl0GpF3dEk/vFqftXmTlxxIOpqiktIBYCnVr+L+/76b2zcuh9939L6D3cjqeRXcUgSBTAMYBo2tFSQL5HnvsVobY8CAFTVgG0BHMt4remKwTAMwqn2cT2xJCzbRlLRR6WNGyGEEEJyo7jR8BFFEVdeeWXatmeeeQYdHR15H+Phhx9Oq7YjCAJWrVqV9/5vvPEGfvGLX+C+++4rqD3cCSeckPa/BQB47LHHMir/DOavf/1rxrbzzz9/0P0uvfRSVFVVpW174IEH8h5X13X8+c9/Ttt2wQUXZF0cls33v//9tMdtbW147rnn8h7/3XffzUiC6n9MQggpJZ7nMKWxEqLIg2UYhIM+TG6sRF1NEJLEw7aBaFzF/pbIoIuugPTWZ1JqAVIwIA1pjn6fCIZlwHOc1wI+35jJRODGZFTdbYFGlYoIIYSQ8agkSUWzZs3yfs+2Wqjc9J3jnDlzSjiTdD09PRnbwuFw0cfLVikn2xil1tXVhUceecR7fM4556C6urqgY0Sj0YxtxZ67Qs/bRP3cCCFkLBJFJwgl8BwYFrBsp6xzLlUVARyUo3pQW2cMW3a0ZH3O9dwrHww6J58k4NxTl2LZolmDvnYgU5qqsm7XdBNvb9iV9bn+WJbpbYGWCqolEiMTXGvviiGeUGHZNjq7nUqKbvnxoQj6JWclpGEjlqpW1N5VnpUaCSGEkImC4kbDa9WqVWkLyVRVxa233prXvrqu4+abb07bdskll6R9RgO5/vrrceSRR+IHP/gBvvrVr2LRokUZrchyEQQBv/jFL9K27du3Dw899FBe+wPA/v37MxJ7DjrooIwqRNlUVFRkJOHce++9eVfP+v3vf4/W1lbvsSRJ+MlPfpLXvgDwyU9+EieddFLatl/+8pd579//3C1cuDCvZCpCCBlNfp+EmVNrMXVSFfx+EQwYBHwSmuoq0FgXhizzsGygvTOGjq5YzsVaiqr3xgp8znEkiR/SQiTASRgO+JzYgz9VrYhaoPVyF+NpqUpFdG4IIYSQ8WloV1RFOuKII7yVPc8991zGH7nlpu8qoCOOOKKEM0mXbXVXIBAo+njZ9i1kBdlouf/++5FMJr3Hl19+eUH7q6oKXc+8uC323BV63sba59ba2oq2traC9tm6dWvGfEYz0al/275ybeNHCBkbdF2BZVrgGBOaaSAejw6YzLLk4Cl476PdQJY4l6ooUJVE1v02b2/Fpm37B5xLY10Yp59wMCpCvpzH6UtTlZyP502vhk/ivdLgfa15ZxsOW9AIjhs8/5xjTFimiljUgsTb6DBU+Ia2EDBDIqmhtd35HunqSUBJauAYBj5Jyus8DMYvMoioKroiOkTegqokIAv2kFqrEULKC10fknJXjn97lxLFjYZXMBjEtddemxY/uemmm3D++edj3rx5A+7705/+FDt37vQeV1ZW4sc//nFe427cuBHXXntt2rZt27bhe9/7Hv7whz/kdYxzzjkHt956a1rbs6uuugqf+cxnslZu7u/KK69M+28+x3H4zW9+k3frtm9961u4++67sWPHDgBAd3c3vvvd7+K3v/3tgPu1trbiRz/6Udq2K664AtOmTctrXNeNN96IF1980Ytj/etf/8If//hHfOlLXxpwv5dffjltQR7gfOYsW5L1pYQQMii/T4LfJ0FVdXR2J9ATS0KWBDRKFYj0xBHpURCNq1B1A3XVIQi8UzXINC10dSe8SkYcx3gtz4ba+swVDMiIxVXIsgj0KEgqGmzbHvE2oGNB/0pFmm7AsmywLJ0bQgghZDwpSVLR0UcfDcBpr/XBBx/g7bffxuGHH16KqQzqrbfeSisVvGLFihLOJl22ajs8X/xHmm3fbGOUkm3buPvuu73HRxxxBJYuXVrQMXK9p2LPXaHnbax9bnfeeWdGELBQb775Jpqbm4dpRsWNTwghw6U5j//ENlQy2NWcmezy3D/fBqs3ge+XrKMZFp76116oqpnzmPOnh3DEAhmdLVvRmd/i7gx7d6e3D5lWx+DdLcmM16lqEv98bQ1mTQ7mfWwNQE+qi8aWj4qbX6F2j0C+6o6I83P71gFfRggZ4+j6kJSbXbvyqxI4UVDcaPhddtlleOGFF/DYY48BAJLJJE499VSsXr0aU6dOzbrPgw8+iOuvv957zDAMfv/736OpqSmvMV9++WWYZub17fPPP1/Q3B988EEsWbIEnZ2dAIA9e/Zg5cqVePLJJ3O2EjMMA9/97nczqhT98pe/xHHHHZf32D6fD4888gg+9alPQVWdG9a/+93vMHfu3JytxDo7O3HaaaelLdBatmwZrrvuurzHdR166KG46aabcMUVV3jbvva1r2HSpEk538e7776Ls88+O61t85VXXomTTz654PEJIWS0SZKApvoK1FUH0d4VQ3dPEpXhACRRQHtXDJpmYn9rN2oqA7BsG5HuBMxU9aJgUEJl2A+eZcGwDCpS7c6Hyq1UJIsCOJ6BadhQVH1YqiePdW5SkWnYMEwTPMdB1ejcEEIIIeNNSZanLFq0CE1NTV4md98ARbm54YYbvN/r6+uxbNmyEs4mXd9qPS6O44o+XrbklGxjlNJLL72EzZs3e48vu+yygo+R6z0Ve+4KPW8T8XMjhJCJ5uA5lVm3d8d0vL+1O2O7rlsI+LInmPIcgxWL6rB0YQ04bnhXes2bFgKXY/XYh9t70m5EEEIIIYSMFoobjYyHHnooLRFl8+bNWLRoEW6++Wbs3bsXAGCaJtauXYuvfOUr+MpXvgLLclr/MgyDO+64A6effnre4+W6liz0GnPWrFl47rnnUFXV27537dq1WLhwIX72s59h8+bN0HUdlmWhubkZDzzwAJYuXYpbbrnFez3LsvjZz36GVatWFTQ24CQE/eUvf4Ek9Zbj/MEPfoCVK1filVdegaY5lT/b2trwu9/9DosWLcKaNWu81x500EF4+umnIYrF3WD99re/jR/84Afe40Qigc985jO44oor8MEHH3jnc9u2bfjJT36Co446Cu3t7d7rzz//fPzf//1fUWMTQkip8DyHxroKNNZXgGEZ+GQRTfUVkCUelmWjrTOGjq44TMuGIHBorA+jtjIInmUhijymTaoGzxcfc+8/FznV+sxNlsm3zZdpWtjXGoFh5F5ENpaxLANRcM6zpjvvUVGpBRohhBAy3pQkqYhhGG/FjG3bePLJJ/Hss8+WYioDeuaZZ/Dkk0+CYRhvzuVU0tLny8y0z7YCLF/Z9s02Rindeeed3u+VlZX44he/WPAxcr2nYs9doedtIn5uhBAy0VSHRRwwI5yxfVqjHwfOzNwe8PH4zPJGLDuoBpLYe3lWUyHi5E80Ycak4ttkDkSWOMyekr0aUVePhuYOJetzhBBCCCEjieJGI8Pn8+Hvf/87vvGNb3jz7OzsxKpVqzBlyhRIkgRRFLF06VI8+OCD3n61tbV4/PHHC17Y9alPfSpru60TTzyx4LkvWbIEb731VlrSVmdnJ66++mrMnz8fkiRBkiQ0NTXhggsuwDvvvOO9bvr06Xjqqafwwx/+sOBxXaeddhpefvllzJgxw9v2zDPP4JhjjoEsy5BlGfX19bjkkkuwe/du7zVnnXUW/v3vf6O2trbosQHg5z//Oe69914Eg861u2EY+M1vfoODDz4YgiBAEATMmTMH1113HRIJp2KqIAi47rrr8OCDD1LbM0LImFUR8mH65BqIIgee49BQF0ZFyGlrxrJAdaUfkxoqIIsCWJZBfW0IM6bUDHs784DfSSz1pZKLkkpmK3kAiCVUbPq4GS/+eyMeePR13HTP8/jdn1/DvtbMBWbjhVutSNOcZCJFNUo5HUIIIYSMgJK0PwOASy65BLfeeisAZ4XSBRdcgDfffBMzZ84s1ZTSbNu2DRdeeCEYhoFt22BZNq33fDkIhUIZ2wyj+Au2bPtmG6NU9u3bhyeffNJ7fOGFF8Lv9xd8nFzvyTCMoioGFXrextrndvnll+MLX/hCQfts3bo1bfXi0qVLccABBwzbnAYTj8fTWlosXboUgcDI3JAnhIx/mmZgX0sEhmWjpc0JAjXVVQzaH376LBvvbdqHzdtbwXEsDpk/CXOm1w54o2nWXOD4T1rY19oNUeDQUBsa0o0pTVXSWp5NnjoXoiSnvaaiNoGdf3kdyLJgfE+ngKOWHzzoOImkhq6eBHieRV11CCzDoKoyMGCp8XhSQ1t7Zv8yQeDhkwXYAKLRJGzY6OiKQ9UMCDyH2qrgoOe+WNG4ip5YEhzHOuceDCY1VEIUS3bJTAgZJnR9SMrdxo0bSz2FskNxo5EhiiJuv/12fOUrX8HPf/5zPPvss16lHfenq6GhAV/96lexatUq1NTUFDzWQQcdhGuvvRbXXHONV01nxowZuOmmm4qa+6xZs/Cf//wHf/7zn3H77bfj9ddf945r23ZafITjOBxxxBG4+OKLcfHFF0MQhn5z+cgjj8SGDRtw++2346677sKOHTu8sd3WaIBTFenYY4/Fd7/7XXzmM58Z8riuSy65BCeddBJ+8Ytf4I9//CMikQiAzMVmfr8fn//85/HDH/5wVGMxhBAyUiSRx/TJtWhp70FPNImqigB8sgCedxKNACAUlFFfExq26kT9Bf0SOjpj8EkiwAC6YUHXTQhC73iWZePOB1+GqmXG6/fs78K0SdUjMrdSE0UOSACqRpWKCCGEkPGqZHdIFi5ciJUrV+Kpp54CwzDo6OjAcccdh9WrV2P27NmlmhYAJzB0/PHHo6OjA4CzQu5zn/sc5s+fX9J59eeuTuorHo8XfbxYLJbXGKVy7733egEihmHw9a9/vajjSJIEQRCg6+kXt/F4PK2Udb4KPW9j7XOrr69HfX39kI4RDAYRDmdW5hgtgUCgpOMTQsY2y7LR0a1BAiBIKkzDBsuLkMTBb0wsO2wulh02t6DxJABzR+j7V5RkSHJ6Qm5Tox/zZzVh8/aWjNfv2NuFnriJupqBk1UFUUYsacG0bCRUoKbSj4Rqo7ZG9kqD96WoOpo7kpBkP2IJFaZlwScJ3uo2N/4kyX50ROLQTQ68wKOxvgLCCAUInffhQ1K1YVo2DItH0C9BNRjU1tJ3CCHjDV0fknJTTn97lwuKG42spUuX4rHHHkNPTw/WrFmDzZs3IxKJgOd5NDQ04JBDDsGiRYuGXOHmRz/6EU466SS8+uqrCIfDOPvss4e0EIphGJx77rk499xz0d7ejrVr12Lr1q3o6emBKIqoq6tDY2Mjli9fjsrKyiHNPRu/34/vfe97+N73vocNGzZg/fr12LdvH1RVRTgcxqxZs7B8+fIhVybKZcqUKbjttttw8803Y+3atdiwYQPa2toAAFVVVViwYAGWLVtGFawJIeMOyzJoqq+ATxbQ2hGFLDmxBkHg0FAb9ioJjRRZEsDxLGBYkCUeimIgoWioEHr/e8uyzsKk7bvbM/bf29I1ovMrJSm1EEtL3bvRNAOWZY/YgjBCCCGEjL6SLru+4YYb8Pe//x2maYJhGOzcuRNLlizB/fffj9NOO60kc3riiSdw8cUXo6ury1ttJkkSfvnLX5ZkPgPJFgSPRqNFHy/bvuVSqcgwDNx7773e4+OOO25IwbpQKITOzs60bdFoFNXVha8WKPS8TaTPjRBCxgOWZSCKHDTNhMBzMA0Dmm7mlVQ0Viw/bFbWpCIAeOOdj3Hqpw8dcH+WZVFbHURLexTRmAJJdBJy9rV0Y/qUGvBc780owzCxp7kLtmUjqWho74oBNtAFgOMYyJLg/VM1A9GY04Kttjo4oglFzvtgEA750NWdQHc0gaBfQiyuQlV1SMNcOp0QQgghg6O40cgLh8M44YQTcMIJJ4zYGIsXL8bixYuH/bi1tbU4+eSTh/24+Vq4cCEWLlxYkrF5nseRRx6JI488siTjE0JIqVSG/fD7RPTEFAg8h1BAHrXklaBfQndPEn5ZhKIYSCpaRoXmKY1VWZOK9uzvgm3bZd0mtVgCz4NhANOwYRgmeJ6Dourw+zIXmRFCCCFkbCppQ+2DDz4YV1xxhVeqmGEYRCIRnHHGGfjCF76AzZs3j9pcNm3ahLPOOgtnnnlmWmCIYRhcffXVJV8Fl022kt/t7ZkXrPnqvy/DMGVTVvzJJ5/E3r17vcdDLSk+nOfOXRHW16xZs3K+fsaMGcM2djHjE0IIKZybQORW0tF1c6CXjznTJlVjUn1F1ufe37wXsYSa9bm+fLKIyrATTOuMxKAbJgzDRHNrt/cay7Kxp7kLpmFBM0y0dUYBGxAEFiwDmKaNeEJDR1cce5sjaO90qvFVhn3wZ6l4NBJCAQksy0DXLcSTzvvu6kmMytiEEEIISUdxI0IIIYT0Jwo8aquCqAj5RrUaTtDvtJN34xOqZsCyrLTXTGmqyrpvLKGiO5oc2QmWCMsy3iIwNVWtSNWoBRohhBAynpQ0qQgArr/+ehx22GHeYzco8+ijj2LhwoX4/Oc/j6effjqjP/hwME0TTz/9NM4880wcdNBBeOyxx7yAkPvz2GOPxdVXXz3sYw+HbKuh9u3bV9Sxuru7kUik3zCbNm1a2ZRgv+uuu7zfJ0+ePOQVicN57vbv35/X8V2hUAjTpk1L2xaLxYquVlTo+IQQQgonpko5u0lFmm6UcjrDjmEYLD8se0KqaVrYuqM1r+NUhHyQJR6WBbR1RmHZNuIJFZ0Rp83n/tYIVNWAYVlobe+BZQGyxGNSfSWmTqpGY10YlWHnGO7ivXBQRmXYP8Cow4tlWYSDTqAwFneSipKKNmrjE0IIISQdxY0IIYQQUg78PhEMy4DnOQgCB9sGEkp68szkhsqc++9pHv8t0HTNiZclFUoqIoQQQsaTkrY/AwBJkvDoo49i2bJlXsUVNzhjmiYef/xxPP744wiFQjjuuOOwYsUKLF68GAceeCBqamoKGquzsxMbNmzAW2+9hVdffRUvvfSSl8jRd9Wba+7cufjzn/88TO90+B144IEZ23bs2FHUsbLtl+34pbB582a8+OKL3uP/+q//As8P7X+6pT53Bx54IHbt2pVxnIMPPnhUxieEEFIYNzgiCM7Kq/FWqQgADpjThIrXP8q6cm5/WzcWYeqgx2AYBrXVIexviUDTTHR1J1BTGUBbZxSKqiMWV2HDRntHFIZhQeBZ1FWHvOsvt+0Z4Fyb2TZGddWhyycLiPQkveQxTTNhmhY4ruT5+IQQQsiEQ3EjQgghhJQDlmXgl0XEEyr8soBu3URS0RD0S95rfLKI2uqgV3m5rz37u3DQvMmjOeVR4yzCU6FoVKmIEEIIGY9KnlQEANOnT8c//vEPHH/88ejs7ATQG6RxgzY9PT144okn8MQTT3j7hUIhTJ06FTU1NaiuroYkSRBFEQzDQNM0KIqCrq4utLe3Y8+ePejp6Ukb1z123/Hc7TNmzMDq1asLDkCNpoaGBjQ2NqK5udnbtmnTpqJ683744YcZ2xYtWjTUKQ6Lu+++2/useJ7HpZdeOuRjZntvGzduLOpYxZy7RYsW4bnnnssYv9CkIsuysGnTprRtTU1NqK+vL+g4hBBCBuYlFfEcwACmZcMwLfDjKMmEZVnMnl6Htz/YlfHc/pZI3sfhORa11UG0tEcRjSmQRB5Bv4RoTAEAtHfGoKgGWJZBfU04Z6IOwzAo8HJm2Ai8UynJNG0Yhgme56BqOvw+afCdCSGEEDLsKG5ECCGEkHIQ8EuIJ1T4ZBHdUQVJRc+4HzOloSprUtHecVypSBTTF+HR4ixCCCFkfCmLpCIAOOSQQ/Diiy/ilFNOwd69e72LsP5Bm756enqwYcOGQRNo+u/nyrafbds49NBD8fe//x2NjY2Fvo1Rd+qpp+Lee+/1HsfjcWzZsgXz5s0r6DjvvPNO1mOXWjKZxP333+89Pv3009HU1DTk4x5zzDEIhUJpLcfWr19f1LH67zdv3rxBz/+pp56KG2+8MeM4Z599dkFjf/TRR0gm0ytKlMPnRggh440o8GBYBqwFCDwLXbeg6wZ4Tiz11IbVpPpKvI3MpKKW9p6CgkE+WURl2IdITxKdkRgkkYfAc4hEE4gnNDAMUF8T8io/lRuWZSAIHDTNhKob4HkOSYWSigghhJBSorgRIYQQQkot6JfQCqfSMscxME0bmm5AEgXvNVOaqrB+4+6MfZvbe6AbprNgbZwRBVqcRQghhIxnZZUmfMghh+Ctt97CkUcemTWg46xYT/8HuO0xcv8baN++bNvG2WefjX/9619jJjB0xhlnZGx79dVXCz7Oa6+9lva4qakJy5cvL3pew+WRRx5BV1dvBv9ll102LMeVJAknn3xy2rZ169ZlJOgMZvPmzWhtbU3blu0z6W/58uUZ/xsbjs8t3/EJIYQUzq1W5JR0BrRx2AKtqb4i63bDtNDWGc36XC4VIR9kiYdlAW2dUcQSKiLdzvdsdWXAa3NWrtzPW/VKdxulnA4hhBBCQHEjQgghhJSWIHBeVR4vPqSlx4emNFZl3dey7IIqQY8lDMN4C8fUVCt5RaU4CiGEEDJelFVSEeC09Hr11Vfx85//HKIoZgSJ8gn65BNE6n/MyspK3HfffXjkkUcQCoVG580Og+OPPx6VlZVp25599tmCjtHW1oY1a9akbTvzzDMLbqE2Eu666y7v9wULFuC4444btmOfddZZaY81TcPq1asLOsbTTz896HGzYVkWZ555Ztq2N954wyvjXuz4VVVVOP744ws6BiGEkPzIqVVnopdsMv76w9dWh3K2dNvf2l3QsRiGQW11CBzLQNNMr/R3OCQjFJCHPNeRJnnJY04QLKmMv8+bEEIIGYsobkQIIYSQUnKrEompJBo3buCqqQp6C5X629McGdG5DSaeUPGPVzfAMIZ/oZwXR9HcpCKKoxBCCCHjRdklFQFOwsX3v/99bNq0CV/+8pfBMEzWYBAw+GqzXIEkd19BEPDNb34TW7ZswYUXXliS9zsUoijiyiuvTNv2zDPPoKOjI+9jPPzww7Asy3ssCAJWrVqV9/5vvPEGfvGLX+C+++5DLJbZK7hY69atw9q1a73Hw1WlyHXGGWdg7ty5adseeOCBgo7x4IMPpj0+7rjjsHjx4rz2XbVqFQSht0qDYRj405/+lPfYbW1teP7559O2XXnllWnHJIQQMnxk2fnvq1fBZhyuuOI5FvW14azPFZpU1Hu8EHjeueQMB2VUhf1DmuNocZPH3GCYYZgjEnQjhBBCSOEobkQIIYSQUvEqWYvZK1mzLIPJjZVZ993TXNii4uGkagYeeepNrFm/HY88vXbYKzJ7i/D0sZdUZNs2YnEVSUUr9VQIIYSQslSWSUWuadOm4Q9/+AO2bduG73//+2hsbBy2FWdTp07FNddcg127duHWW29FdXV1Kd/qkKxatQp1dXXeY1VVceutt+a1r67ruPnmm9O2XXLJJZg1a1Ze+19//fU48sgj8YMf/ABf/epXsWjRIrS0tOQ/+QHceeed3u9+vx8XXHDBsBzXxfM8rr/++rRtjz32GLZs2ZLX/v/4xz+wfv36tG0/+9nP8h5/9uzZ+OpXv5q27eabb4Zh5Hcxf/PNN0PTei9y6+rqMhLMCCGEDB9fn6QihgVMy4Y+gVqg7W+NFHU8SRQwpbEK0ydXo7oyUBaVEPMh8BxYBrAsQEslE1HpbkIIIaS8UNyIEEIIIaPNbecupCrz6LqRUeVwSmP264Y9zV1ZW7iONMMw8Zdn1mJfasHY9t3t+ONj/0EiOXxJNGK/SkW6bsI0rYF2KQuGYWLHng7sbe7Crr2daO8avoXzhBBCyHhR1klFrunTp+PnP/859u7dizVr1uB///d/cdJJJ6G2tjbvFWe1tbX47Gc/i+uuuw5r167Fjh078L//+7+or68v9dsbsmAwiGuvvTZt20033YTNmzcPuu9Pf/pT7Ny503tcWVmJH//4x3mNu3Hjxoxxt23bhu9973t57T+QSCSCRx55xHt83nnnoaIi+03OoTj77LOxfPly77Fpmrj88ssHvbBPJpP49re/nbbtnHPOwbJlywoa/5prrklrX7dt2zbceOONg+63cePGjGSw6667DsFgsKDxCSGE5E8UeHA8CwaMtypNGYct0CblSCpq6YjCGEIwaKwkE7kYhsmoVqSotGKNEEIIKUcUNyKEEELIaPEqFfEcGBawbMAw0uMlU3JUKoonNHRHkyM9xTSWZeOxf7yDHXvSu1vsa+3GH/6/19ETU9K227aNnXs7sO6DnSiEKHBgUouzdG9xVnnHzVTNwM69ndA0A0aqm0dHV2xI8S9CCCFkPMre2LWMLVmyBEuWLPEed3d3Y9euXdi/fz/i8TiSSeeCzOfzIRAIoKmpCdOnT0c4nL2Vx3hx2WWX4YUXXsBjjz0GwEl6OfXUU7F69WpMnTo16z4PPvhgWqUehmHw+9//Hk1NTXmN+fLLL8M0Mys09G/JVYw//OEPSCQS3uPLL798yMfMhmEYPPzww1i8eDE6O53So6tXr8Y3v/lN3HbbbWDZzLw7RVFwzjnn4KOPPvK2zZo1C3fddVfB4zc1NeH3v/89zjzzTC+R6Sc/+QlmzZqF8847L+s+O3fuxGmnnQZF6b3YP+OMM/D1r3+94PEJIYQUxi+LiMYUyCIPRTGgqgZCgVLPang11mVPKjJNC20d0ZyVjMYjUeShqAZUzUDQLyFZ5sEwQgghhFDciBBCCCEji+c5cBwL07QgCjxU1YCmGxAEznvN5MaqnPvv2d+FylFsDf/GOx/jo23NWZ9r74rhuX9txJJ5PJKqiY/3xvDCW/9BJKqA51gcOKcJPlnMaxyGYZzzoaXOB89BUXUE/NJwvp1hk1Q07G2OwDQtaIaJ1vYe1NWEIAk8YnFlVD8jQgghpNyNuaSi/ioqKnDwwQfj4IMPLvVUSu6hhx7CypUr8dJLLwEANm/ejEWLFuFHP/oRzj77bEyePBmmaeLtt9/GbbfdhgcffNDbl2EY3HHHHTj99NPzHi9XNZ/hKN959913e78vW7YMhx122JCPmcvMmTPx7LPP4jOf+Qy6u53yn3feeSfWr1+Pq6++Gscccwz8fj+6u7vx/PPP49prr8WHH37o7T9lyhT84x//QFVV7j8UBnL66afjjjvuwDe+8Q3Ytg3LsnD++efj+eefxze/+U0cfvjh4DgOe/bswV//+lfccMMNXgIUABx77LF46KGHhnYSCCGE5MUnC4jGFEiiAECBOg4rFdVVh8BzrLcqqzLsQ2NdBSbVVyLgzy+QNF5Ibulu3alUpFL7M0IIIWTMobgRIYQQQoabJPFIJDQIAgdVNbzKPC5ZElBXHURbZ2Yrrd3NXTho/uRRmadhmHjtrS05n68M+3DMsjl49sU3sac1AcsCJMkHhnXiQh9s3oclh8zIezxR5KBqzuKsgE9CUinPuFksrmJfawS2ZUPRdLR1RGGaNhIJFVIFj54YJRURQgghfY35pKLR0NPT4yXbMAyDF198sbQTysHn8+Hvf/87Vq1ahTvvvBO2baOzsxOrVq3CqlWrIIoiDMOAZaWXbqytrcXvfvc7nHbaaQWN96lPfQosy2Yc78QTTxzS+3jppZfSqgCNVJWivpYtW4Y33ngD5557Lt59910AwOuvv45TTjkFACBJElRVzdjvmGOOwcMPP5x3dadcLrvsMkyePBmXXHIJ2traAAAPPPAAHnjgAbAsC57noWnpLVcYhsHll1+OX//61xDFiXWTlxBCSsVdneW2xdINC4ZpgefGREfZvHAci5OPOQihgIym+kr4fRP3O8b9nFXNgA3bWb2mGxAFuoQmhBBCSK+xEjcihBBCyPCQRCepSOSd6kSqlrkIaXJjVdakoj37u0Z8fq7tezqyzg0AAn4R531uGfwig7aICitLx693NuzE4oOn593SXhYFRKE6yUQVQELRYNt23vuPhu5oEs1t3YDtVCtq7YzCTr33eFJDVUUAyaQGwzDB89zAByOEEEImiPFzB2wE6bqOf/7zn96/ciaKIm6//Xa88cYbOP3009OSTTRNS0sAamhowFVXXYWPPvqo4IQiADjooINw7bXXpl0QzpgxAzfddNOQ3kPfNmI1NTU4++yzh3S8fC1YsABvvfUW/t//+39YuHBh2nP9E4qWLl2KRx55BC+99NKQE4pcp512Gj766CNcddVVaGho8LZblpWWUCSKIk4//XT85z//we23304JRYQQMookkQfLMuBYFqKYChyNw5ZYiw6chtnT6yd0QhEACDwHjmUAuzdASNWKCCGEENLfWIobEUIIIWTonArW8BYd9a9UBABTcrRAa2nv9ioij7RNH2dveybwHM47bRlqKoNgWQZzpgSzvq6lPYp9rZG8x5Mk57zougnDsmBbdllVK+qMxNHc6iQUxRIqWjqchCKfLIBlGRiGBSUV5+uJKSWeLSGEEFI+aJn1OLV06VI89thj6OnpwZo1a7B582ZEIhHwPI+GhgYccsghWLRoEVh2aHllP/rRj3DSSSfh1VdfRTgcxtlnn41QKDSkY/71r38d0v5DwfM8Lr30Ulx66aXYtm0b1q1bh927dyOZTCIQCGDGjBlYunQpJk8emfKk1dXV+OlPf4rrr78e69evx3vvvYeWlhYYhoHKykrMmzcPy5YtQzgcHpHxCSGEDIxhGMiygERCgyQK0DTTKelcpv3hydCJIo+kokPTTMiigKSqIRSUSz0tQgghhBBCCCGElIicqmwsCM6CM8OwYFlW2v2WXElFtg3sb+3G9Mk1IzpHy7KxeXv2pKIFsxvRWFfhPZ4zJYj3t3Znfe07H+zC5Ibs76U/nmMhChw03YSq6uB9EhKKVhaL1mJxFW0dUQBAdyyJrkgCgFOxqbYqiI6uOGIJFfGkBlkSEI0rqK4MlHLKhBBCSNmgpKJxLhwO44QTTsAJJ5wwYmMsXrwYixcvHrHjl8rs2bMxe/bskozNsiwOP/xwHH744SUZnxBCSG4+WUQioUGWeERjgKKVz4orMvwkN6kotYpQKaMVdoQQQgghhBBCCBl9osgDDMCxLHiehWFY0HQTstSbVFRTFYQsCV7lm7727O8a8aSivS1diCe0rM/Nn9WY9jjoF9BUK2N/e2Z1ng1b9uGEFQshifndTpQlAZpuQlENBHwSEkkNyC8naUR1dscBpCcUhUMyqiucxKGAX0QsoSKRVFFd6YeSigW51agIIYSQiYzan+Whra2t1FMghBBCSJnwy04pZzeYomlmWntRMr6Iqc9ZTSWPKZoB27ZLOSVCCCGElBmKGxFCCCETC8MwXrKJwDvVivq3NGNZBpMbK7Puv3Nfx4jODwA2fdySdTvPsZg1rS5j+9yp2TtQaLqJDZv35j2unGqBpqhOQlNS0WBZpY2jWJaNpOLMJxZ3Eqcqwz4voQhw5s2xDEzT9haUUQs0QgghxEFJRTnouo6//vWvOOWUU3DIIYeAYZhST4kQQgghZUCWRIABeI4DzzuXUqpmDLIXGaukVJBQ1y2YlgXbsqHR500IIYRMeBQ3IoQQQiY2d7GZJDpJRbpuZrwmVwu0nXs6RjSWZNs2Nn2cvfXZzKm1WasOTWnwQxaz3zJ8Z8OuvMeWpd44imGagN2bYFQqiqoBNmCYJnTdWRgY7tfanmEY+P1Om7Z40plvNJYc3YkSQgghZYrq9vWzdu1a3H///XjkkUcQiUQAOBdgFBwihBBCCOCsNJMlAYqiQ5YExAwVqmbAJ5e+PzwZfhzXW8pc1w1wkoikqkNKrbwjhBBCyMRCcSNCCCGEAE5lm2hMgZBajKRlSSqaNa0Or6zZ7D2e3FCJg+ZPxoFzJ+XdTqwY7V0xdEbiWZ/r3/rMxbEMZk8JYuvezHZt+1q70dzWjca6ikHHZlkWkshD1QwoqoGgn0M8ocHvkwp7E8PIbQOnqE4ilyTyYNnMBKqAT0I05rRAs6sC0DQTKsWACCGEEEoqAoDm5mY8+OCD+MMf/oCNGzcCQFpbCwoMEUIIIaQvn+wkFUkij1hchaJmBlzI+CEJPAxDg6qbkCVAValSESGEEDKRUNyIEEIIIf25SUGCV+HYyEg0ntxQidnT6jClqQoHzZuM6spA1mMNt805Wp8xDDB3ZkPO/eZMDWHr3s6sz73z4W6c/KnBk4oAp1qRk1SkI+iXkFBKW6nIHd+N37nVlPqTJcFbWJZQNARkCT0xBXWUVEQIIWSCm7BJRbqu4/HHH8f999+PF154AaZpUkCIEEIIIXnxySK6kPBWKmlaZuCIjB+iyCOe1Ly2Z8kSl+0mhBBCyMijuBEhhBBCBuImFYk8B4YFLAswDAuCwHmvYRgG531u2ajPLVfrsymNVQj6c1cMCgcETG2qxJ6Wnozn3v9oDz79iQMg8FyWPdPJkoDuqOIl8SiKDtO0wHHZ26uNJNO0oCjOPNx4zkDVxv0+ET1RBYlEn6SimtCozJUQQggpVxMuqShXmWpX36CQu50CRYQQQgjpyw0+iDwHjmNgmjY03YAkjr+VS7ZtozuaxL6WCPa3dmN/WzcOnj8Zhx4wtdRTGzVuoFBNJRWpmgHLssGydI1ICCGEjDcUNyKEEEJIPnieA8ezMA0LosBDVQ1oupGWVFQK0ZiCvS2RrM/lan33OjMEAAEAAElEQVTW16EHTM6aVKRqBjZu3Y9DFkwZ9BiSKIBhnCQr3TAh8BySio5gYPRboLlVijTDhGnYYBlAFHLfGg34JCepSNFgWhZgAElFGzARiRBCCBnvJkRSUSFlqvsGhCgoRAghhJBseI6FKHLQNBOSyCOR1KGo4y+p6IXXPsS7H+5Gsl97t4qgb0IlFYmpgKBhWDAsCzzLQtX0sgsoKaqOts4obMuGJAnwSQJkWRgwWEYIIYQQihsRQgghpDiSyCNhaBAEzksqCmD0E2f62rw9e+szAJg3K3frM9f8mQ146T9bM2JBALD+w115JRWxLANJ5KGoBpKKDiHIIZ5US5NUlEi1PkslF0mSMOAiMUnkIQgsdN1yEqH8TrWicosBEUIIIaNp3N5hKLRMtduyJFugKBwOo6cnMzObEEIIIROXTxahaUlIkoBEUoeq6QB8JZuPaVowTBMsy4Jj2WGpomNZdtYg0v62yJCPPZawLOsFlDTNAC+LUNTySiqybRt7myMwDBMAkFR0RFLPcRwLWRLgkwWEg76Sr5okhBBCygHFjQghhBAyVJLII5HQIPI8ABWabpZ6Sjlbn9VVB1FTGRx0f55ncdD8yVj73o6M53bu7URHJJbXcWRJgKIaqXiZjESyNK3kE4oKAF4rNlkafEFgwCchoicRT6gI+iVE4wrqa0KUUE4IIWTCGndJRcWWqXa3u9v8fj9OO+00nHfeeTjiiCMwefLkUXoHhBBCCBkLfLKI7p5kb2ss1SjZXBRVR2tHFJbVe83DsQyA9ISgvtdE+Wiqr8i6vbUj6pWvnigkQYCuq9A0HX5ZhFLCzzubpKLDMEwYloWuSBySyEMUeUgiD9O0EE+oiCdUdETimD65xvvfLSGEEDLRUNyIEEIIIcNFTlWsdisc60Zpk4pUTceOPe1Zn8un9ZnrsIXTsiYVAcD6Dbtx/CcOGPQYTvJO0kvm0TQDhmmB59i85zFUum5C00zYsAtPKupJIqnqMFIt0BJJDQF/aatQEUIIIaUyLu4mFFOmuu92dxvP8zjhhBNw3nnn4fTTT0cgEAAAdHR0jPh7IIQQQsjY4pOdIIQk8mBYwLRs6LpZkiowkZ4ELMsGxzOwLBu25czHMtODWdG4CtkXyPu4k+ors263baClvQdTGquGMu0xRRJ5xBIqVM05p4pamhV2uSRSZbxVVUc8oSGeKu8Nxpm7KPAI+EXIooCu7jga67InjBFCCCHjEcWNCCGEEDIS3AU7Qp+26aZpgRvFxJm+tu5ohWllX1BWSFJRQ20Yk+orsK+1O+O5dz/ajWOWzx/0PUoiD5YBTNOGphsQBR7JpIZQUM57HkPlxUo0A5bltGUT84jbCQIHUeSgaSYSCQ3hoIxoXKGkIkIIIRPWmE0qKrZMdd/n3dcfddRROO+883D22WejtrZ2dN4AIYQQQsY0UeDB8SxMw3L6xCsGFE0f9aQiw7S8qjlNtRXgeQ6m5QSxEok4mqO9r40nVNT2uyYaSHVlAKLAZS3fvb+1e0IlFYmi87lqunOuNc0saaCwv0TCKefttqsTeBaWZcO0bKiqAVU1YBgm5FqhZCXHCSGEkNFEcSNCCCGEjDRR5AEG4FgWPM/CMCzohlmyWMGmj1uybg8F5IIXFy1aOA37Wt/P2B5PaNiyoxULZg+cpMQwDCRJQFLRkVR1iAKPxCgnFcUT/Vuf8XnHxAI+CZqWQEJRnaSimIKG2jC1QCOEEDIhjbmkoqGWqe4bFLrmmmtw4YUXYsaMGaMzeUIIIYSMK35ZRDSmQE4lFamqgVD+hYCGRTKVICJJPPhUOzKOZcGxLOx+JZ0t24aqGXmVegacFVxN9RXYubcz47n9rZGhTXyMEQUeTGqFnWGY4HkOqqbD7yv9KjXTtLxkIkVxflZVBuCXReiGiaSioTOSgKrpsOFU1CpVVS1CCCFkpFHciBBCCCGjhWEYiAIPTTMgChwMw4Km5x93UVQdH23bD003sfTQmUOai2Fa2LqzNetz82Y1gGULS4Y5aN4kvPDqh1lbuq3/cNegSUWA02osqehQFB0VQR/iSbWgOQyVW6nITSrySWLe+wZ8Irq6E1AUA4ZpggeHWEJFKDB6SVGEEEJIuRgTSUXFlKnO1u8+EAggHo97r/3Wt76F6urqQcfnOA7Tpk2jDGRCCCGEpPHJAqIxBZIoAFCgavqoz8ENkLjt2IIBCbVVQRimhUg3i+39Xp9UtLyDWwDQWJc9qShbCezxjGEYCIJT+lrVDfA8h6RSHklFSUUHbEA3TBiGBYYBZNH5jAWegxD0IdKThGWlkspEAQlFQ4XgK/HMCSGEkOFBcSNCCCGElIosCV5SUSKpQ89S7bkvwzCxZUcrPti8F1t3tMIwLfgkAYcfNB38ECocGYaJJYfMwKaPm9HWGUt7rpDWZy5JFLBw7iSs37g747mtO1vRE0siHBw4ruDGn0qxyEnVDJiGBcu2vQrfspx/PIznOcgSD0U1EE9qqAj60BNNUlIRIYSQCalsk4qKLVOdLVC0YsUKXHTRRTjrrLMQDocLnktlZSV27NhR3BshhBBCyLjlk50VTqLoXFLphgXDtIYUBCqEaVreaqtAKrklHPRBkgRIAEwjM9CRVHRUFVDxelJ9Zdbt7Z1RaLoBUSify0nbtnHXH/8JnmfBcRwEngXPceA4FsGAhHkzGzB3RkPRx5dE3kkq0gwEfJJ37kvNXemXTFUpkkQ+YwWiLPFIJHUoqu4kFSU1VIQoqYgQQsjYRXEjQgghhJQDKRUTElLxETXVNj2bfa0R/PGxN6Bq6a9Jqjo+3tmKeUUk/7hkScCxRy7AsUcuQEckhk3bWrB5ezPaO2OYPrmmqGMuWjg1a1KRbQPvbtyDFUvmDri/G58oxSKnRCpWoqrOQiyeZyHwhSUzBfwSFNVALK6gIuhDLKF61asJIYSQiaR87gKlDLVMtbtt6tSpuOCCC3DhhRdi1qxZozR7QgghhEwkvckbLEQxVcVG1cH7R6d6TVLRYNuAIHAQeA4MyyAwyNiabhaU+NTUkD0DybaBlvYeTG0afPX+aDEtGx2ReM7n3/5gFz61bB4+uXReUceXBB5RqNBSwT93pVupuYEyRXWqVmWrRCVLgpdUhBCQSLXNI4QQQsYaihsRQgghpJy4SUXuoitDN71k5v7qqkM5j/PB5n1DSirqq6YyiKOOCOKoI2ZD1YyiF79NaaxCbXUQ7f0qHwHAex/twdGL5wxaqbFUi5zcuIe7IMyt6FyIgE9EVyQOXXcW9cmSgO5oEjVVwWGdKyGEEFLuyiKpaLjKVPt8Ppx++um46KKLcPzxx1PZaUIIIYSMKIZhIMsCEgkNkij0VrEZpaSieCpAEvA7FZMCPjGjQk1fosjDMJ1kpHzLNVeFAxAFDlqW8t37W7vLKqnIMAYuMQ4Ar63dgkUHTh20RHc2bkUqd9WhYZglX6FmGCY0zXnfbqDMl6Wct08SASSgagZs2M5+ZVZpihBCCMmF4kaEEEIIKVeS1Nt+nGEBywIMw8ra4kvgOSyY3Yh3N+7JeG7T9mZ0dcfREYljX0sE+1u7EelJoCLkw6EHTMX8WY0Dxnxyzk8s/u9+hmFw2IHT8MJrH3rbqisDOOzAaThkweS8rqV8kpi2yMmttjySbNv2koqSblJRAa3PXCzLIuCXEI2riCVUSioihBAyYZXsLoKmaXjiiSeGpUz1smXLcNFFF+Hcc88tqkw1IYQQQkixfLKIREKDLPGIxgBFG52WWJbV2/rMn2rDFhwkUUgSOBgmoCh63klFLMugqb4SO/d2ZDy3v7W7wFmPLMO0Bn2NadnYsr0VRxw8veDjCzzXW7ZbNyAJPBJJDeESthFzE8sUTYdlOZ9XtkQhQeDAcQxM04aq6pAlEYmkRklFhBBCyhbFjQghhBAyFvAcC45nYRoWRIGHqhrQdCNrUhEAHDR/ctakIsOwcPsDL2dsb+2IYsuOVlRV+LFs0SwcesCUUf1b/uD5k/GvNzdj3swGHLZwGqZNqi4oMdutpuwucjINC6pmDCnZaTCKqsOybBiW5S3EylbVOR9uUlE8qaKqwg/oTsVov290FhQSQggh5aBkdxFuvPFGXHvttQCKW13W1NSEL3/5y7joooswf/780Zo2IYQQQkgavyygA6mVXwygaaNTASah6KnWZ6wzFgMEB6mQJEkC4oqOpKrnLMWdzaT6iqxJRXuau4qa+0jJp1IRAGzdWVxSEcMwXtnupKJBEnjEEmpJk4oyynlLfM7PVZYExBNaqmS3k1RUGfaP2lwJIYSQQlDciBBCCCFjhSTySBhaWlJRANljNDMm1yLolxBLFFaxp6s7gede+QCvvLEJs6fX4dNHH5j3grGhCPglrPrqCUVXac61yGkkk4rcBVhqKlYiClzRLeBkSYAgsNB1C4mkU/k70pOkpCJCCCETSsmSimzbHnAVWbaAkCiKOO2003DhhRfipJNOAssWdxFACCGEEDJcfLIILhWYCPhExBMauqNJ1FWHRnTcZCpA4lYp8vt655GLJPDgOMMJ5GhG3qu0muorsm7vjMTRHU2iooRJNX0ZxuCVigBgx552GKZVVEDJK9utOGW73aSeUnHLhnutzyQx52t7k4qc9m2lnjshhBAyEIobEUIIIWSskEUBiYQGIZV4k62FvItlGSycNwlr1m8vaqykquODzfsQS6g4Z+WSUalaNNS27/0XOSUVzan6M0J6F2AZ3vhDEfTL6OpOIJZQEQrIiCVUmKY1aByOEEIIGS/Kpt9B31Vn/R8fdthhuOiii3Deeeehurp6tKdGCCGEEJITwzCorPCjozOGcMiHeEJDIqnBMMwhB11ysSwbSSWVVJSqTpTv6rT0QE5+QZVpk2tyPvfxrjYctnBaXscZaQG/iM8eezAMw4RhWOiOJbHu/Z0Zr9N0E3v2d2LGlNqCx5Bl55wpmgHTcpKYCjmXw0lVdZiGBcu28wqUuc8pmgHLtgHTgqrqkEowd0IIIaRQFDcihBBCSLlyq+6IqZZn+iCVlA+aN7nopCJXS1sPNN0cE23NfalYVFI1UAkn6aeQCtqF6Bszc3+6sZxiBf0SIj0JrwqVKPDoiSkjmhhFCCGElJOyuNrou8rMvZCoq6vDeef9/+zdeZgjZ3ku/Lv20t579+ybl5nxDvYMEDA2xmCMIZA4gUDAJBgScpIQSDD7SVgDgQRiAodAMBwMAQc+E45tAtjYgAHjBbyOxx7PvvTeau2qvb4/SqWZnpa6JbV6v3/XxWW6JFW9rRZYenS/z/M6/Mmf/AnOO++8RV4hERERUX0dySjSmSI0RYauyzAMB7mCga6O2LxczzBteD4gyyK0SvEo3mCoKKIH3ZRKRuOjrxIxHX3dCYxO5KfddujY+JIJFUV0Fc8+9+RYM9f18MTTJ2BazrT77j8y1lKoSJGlattrw7IR04OW5YsRKioZp7TzrrweFKV+kE2RJciyCMcJwkQRXUXJsBgqIiKiJY91IyIiIlrKwlBR+JnccbwZO9ms6UuhtyuOsXSh5WuWTRs//vke/N5Vz2r5HAslrJmYlgPHDQJX+YIxL+Pky4YF+KhuOBMEQJ/jqDVJEhHVVRTLFvJFE90dMrL5EkNFRES0aiyJ3nynt7IGgF27duFFL3oRzjnnnEVaFREREVFjZEmsjgDrqPyzUDTguo2N42pWOPIqGglGXUUiasOjvMLdWZblwmlifVs39tY8fvDoGDzPr3nbYpMkEVs21A4OHTgy2vJ5wxFjZSMYOVYqmS2fay6KpcrOu+ros9nDQbpa6VZUeQxHoBER0XLAuhEREREtZaoqAwIgiSJkOajPzNStSBAEXH3ZeajXqCcR13H21n7sPHMNRLF+N589zwxi38HhOa19IciyBF2TAT8IEwHARKY4L9cKa2ZhrURT5baMxI3Hgk7hpbIJHz5M06nWVoiIiFa6RetUdP3118P3fdx88804dCho83hqkeiOO+7AHXfcgf7+fvzxH/8x3vSmN2Hnzp2LtVwiIiKiGXWlYsjkStA1FaoqwbJc5ItGw92AGuX7fjXMEtWDcEuiUtiYjSAKkEWxuj7DsBrucLR1Qw9+/fDBacfLpo3hsSzW9nc09gsssG0be/HUgekFttGJPHKFMpLx5nfFRXQFuYIBo/J3KBv2jDsQ54Pv+9VOReHroZGOQ7omo1AyYVS6N81ny3EiIqK5YN2IiIiIlgtBEKCpMkzTgapIcBwPlu3M2NV447pu/NkfvRAPPXEEpmmjMxXD2v4U1vSmptRqcoUyHnj0MH77xJGanZh/8LMnsGl9T7Vb0lKVjEdgmHnki0GHIstyUCia1bBOu4Sbp8LAT7s6S+uaUu3+XCxZiEc1ZHNl6L3s/kxERCvfonUqWrduHf7hH/4BBw4cwE9/+lNcd911iMVi8H2/2tba930MDw/jn//5n3Heeedh9+7d+Pd//3dks9nFWjYRERFRTYoiVQMqYdeifMFoexefsmnD83xIslAtjCQaHX1WuX+00q2o3MSOqo1ru+t2Qzp4bKzh8yy0bZv66t528Ghr69ZUBaIQtDO3KjsPF7rjT9mw4Xs+HM+DbQdraKhT0Sktx13Pg+f5NYuSREREi411IyIiIlpOtEpnYLUyAs2y6ncqCvV2J/CyF56LV73kIrxw91k4c3P/tM1fyXgEL/6dHXj7n1yBl7xgZ7XmBADxqIYXPXd79ZpLWTSiQlFEeB6QLwbditKZ1se/1TIxWYBpBjWOdoeKBEFAPBoEoAqlYP25QnnJdu8mIiJqpyUx/uzSSy/FV7/6VQwPD+OrX/0qLr/88upt4Q403/fx0EMP4S/+4i+wZs0avO51r8OPf/zjaiGJiIiIaLF1VWapxyIaZFmE6/nVQkO7lCvhlageFDJ0XYEsN1Y8ilTGpemV8V2GYTf8XkpRJGxY21XztoNHxxs6RzuVjcZCPKlEBL1d8Zq37T/SWqhIFIVqV6BwHWF77YVSqlwv7JakqlJDnZJkWYIii4CPapiII9CIiGipY92IiIiIlrqwU5BS+adlt3cDj6Yq2H3hVvzlG1+Et73+Mrzp2ufh7X/yYpy/ff2idx+2bAemNfvGtVRlM16+aMBH0Im7XTWJsYk8xtNBSCmTK8J1/aB+08YOTmGoyDAc2I4Lz/NRKLa37kdERLQULYlQUSgajeK6667DT37yExw6dAj/8A//gK1bt04pAPm+D8MwcMstt+BlL3sZNm7ciA984AN45plnFnHlRERERMH4qVilwBB2LcoVjJpfZpXKFsYm8piYLMB2Zt+9BlRGXlWKLbFIc12KACBSCRNpmgxRBFzPb6rItXVDb83jx4fSC9rtJp0t4p//48f4yi334p77nsLRExNwXa/u/et1Kzp0bByeV/9xM4noU0NFhdLChoqKc2jnrVfXHjyWoSIiIlouWDciIiKipSr8XK4pYajIbbnmMBNRFNDTFceGNV0QxYUPE/m+j4lMAY89dRz/89PH8eVv34t/+vcf4rdPHJ31sbFoZROe46NQDOoo6UxxzmsaGc9Vz5POFpHJBUGfzlS0rYErWZaq9aBiZRNhJl9u2/mJiIiWqiUVKjrVxo0b8b//9//GM888g5///Of4kz/5E8TjU3eZ+76PEydO4B//8R+xfft2vOAFL8BNN92EQqG9LROJiIiIGtXVEQMAxGMaJFkIZq2fEtpwXQ9jE3mMTuRRLFvIF00Mj2YbCvcEI6t8SNLJTjnNzJ6XZRGqKkGAgIgeBIzCYEkjtm7sqXnc9XwcHZxo+DxzdfDoGHwfGBzN4hcP7cf/vfU+fPrLP8J/3fFgzYLdtk21w1CGaePEcKalNYQBLcN04Pk+XMeD2cQ4ublwXa8aJgr/2cjos5BeackePrZkWOziQEREyw7rRkRERLSU6JoCCIAsSZDl4Ku3lThu/Lv/8xt84eaf4vt3PoKHHj+C4bEsfB84Pjw562MFQUAyHmyOy1bCOMWS2XI9xfd9DI9lkcmWAAATmSJy+SDs09URbWojXqNOjkALan3lstX2rlRERERLzZINFZ3q+c9/Pr7yla9geHgYX//613HFFVdU08Wntrn+1a9+hbe85S0YGBjAm970Jvz0pz9dxFUTERHRahSNqNB1BaIgVIsXuUqhpFAyMTiSCUJGApBK6FBVCa7nY3gsN2ur6HDEViSiQoAATZOhKs21cQ47Kena1G41jejrTiIWVaccUxUJZ23pr+7EWwgHaowts2wX2bwBUZz+9nbjmi4odUbE7T862tIaFKVSJPRPhnOKC9Txp2RYgA9YjgvH8SAIQRv0RoV/e9t24XgefM+v/g5ERETLEetGREREtNhEUTjZragycmslhop6uxI1j58YzjS0YSke1SGK4Sa8SreiSiioGb7vY2g0i2yuDB8+xicLyBeCQFF3Z6zaQbzdohEVUmX9pUr36myO3YqIiGhlWxaholAkEsEf//Ef484778Thw4fxkY98BGeccca0NtelUgk333wzrrjiCmzduhUf/vCHcfTo7K0XiYiIiNoh7FaUiOkQxCDwMjSaxXi6ANfzoaoS1vSl0JmKob8nCV2T4Xk+RsbzdcMdpmWjVNkFFa10GYq3sOMqFglCRWG7ZtNyZhwddipRFLBtYx/WDXTg0l1n4k2//zz83Vteitdccwk2rutuei2tcF0Ph4+P17xtW51OSrIsYfP62us7WCOg1KjIaWPEFmoEWvg6MCrFK02Vm2p5LkkiVCUIWZkLHIgiIiKaT6wbERER0WJaDaGidQOdNY/niwZylVDPTERxereiXKEM23an3TebL+PIiQnsPzyK/UdGceDIGA4eHcOhY2M4eHQc+YIRBIrSBRSKJgQB6OmKz0uHopAgCNUNe4WiUVn/7L83ERHRcrasQkWnWr9+Pd7//vfj6aefxi9+8Qu8+c1vRiIxNSHt+z4OHz6MD33oQ9i6dSte9KIXtXStcrmMD3/4w9X/EBEREc0kEQs6EEmiWC1kmJYDCEBHKoI1falqZx9JFNHXk4SuB8Gi0fEcysbJgIftuBhL5zE0moPr+RDFk2GWVook0YgKQRQgSxKUSrCk3ESXmldccQH+9A+ejxfuPhsb1nZBkhb27eSJ4UlYNQpNALB1Y1/dx23bVPu2wdFsy2GgcARa+PcqGxY8b/7HiJWMYL3V0We6OtPdawoLnYYZFDhLDBUREdEKw7oRERERLbRwE1g4sn5lhoo66t52ooERaEBQzxIFwLJcGGbQjTmdLVZvNy0HRwfTGB7NwjBsuK4H1/HgOC5s24VluXAcF57vY3Q8j2LJgiAEXZTC8WTzKR4LrlE2bfjwq+siIiJaqZZtqOhUz3ve8/DlL38Zw8PD+MY3voErr7xyWptrz/Pws5/9rPozABw8eLCh85dKJfzDP/wDPvShD+FDH/pQ+38BIiIiWnE6U0G3omRMBwRA02Ss7UuhIxGFAAGJuI6tG3sRjaoQBQF93UlEdAWeD4xO5FEomUhni8G4tEpnmnhMw9q+DggQoKpydedbMwRBqBa5otVOO40HSprpiDMfDhyt3VlIVSSsX1N7txwAbNvUW/e2g3XOORtdkyEIgON4sB0X8Oc/nBMWz3ycHFkWBoSacTJUVOl6ZNoLEogiIiJaDKwbERER0UIIP2urigRBBDzPX3Fhk6iuVjt0n+54g6EiSRKr3bczlW5F2XwZjuNifLKAw8fHUS5b8Hwfk9kiToxkMDiawdBYFsPhf8azGBrNoGzYEAWgrzuBaKT5TVetUBUZkijA904Gx5qprRERES03KyJUFNJ1Ha973evwox/9CEePHsXHPvYxnHXWWdPmuAqCAN/3sXv3blx88cX47Gc/i6GhoVnP38g8WCIiIiIASCUikGQRsiyhvzuBgd5kUHSQRKzt78Da/g4oioT1A52Ix7QgWNSTQCyqwveB8XQBubwB3wd0Xcba/hR6OuOQZQmyHIxPa1XYpjnseGQYjXcqWmz1AkCb1/dAnqFrUlcqhq5UtOZtR45PtLQWURSrwa5wBFpxnkegFUpBS23TcuB5QcgrHGXWDF0L1m3bHhzXhe/51YARERHRSsW6EREREc0nRZEgySIECNV6gWEtn5pLo9bXGYF2YjjT8DmScR2CABiGA9N24Hs+Dh4bx0S6APhBSGdwJINs3qhusDJNB0b4H8OBbXsQRQF9PcmWujjPhXraiLvyMqqtERERNWtFhYpOtXbtWrz3ve/F3r178atf/QpvfetbkUql4Ps+fN+vFogefvhh/O3f/i02btyIF7/4xfja176GXC632MsnIiKiZU4QBPR0xgEE46nC7kSbN/QgEden3G9tfwcScR0CBPR0xattlBVFQn9PAgM9KaiKDFEU0NudwNaNPS11pwnFoifbcYsi4Ho+zGVQ5CqVLQyNZWvetnVj/U5E1ftURqAJArC2vwOX7joTb7r2eXj5i85veU1h0SrckVYsz1+oyLIdjKULwXUq3asimjKlo0KjRFGEqgZhJI5AIyKi1Yh1IyIiIpoP1RFolbH31gocgVYvVDQ8loXjNNaZSZalamehXKVbke/5cDwPY+k8RsbzcBwPsiyipzOG/p4E+noS6OtOoLcrjp6uOHq74ljb3zGnGlmrwmuGf98SOxUREdEKtmJDRad6znOegy9+8YsYGhrCf/7nf+Kqq66CKE791V3XxT333IM3v/nNGBgYwLXXXovvfe97sO2l/wUbERERLU0dySjW9negsyOKDWs7sba/o2Y3HUEQsKYvhVQyEgSLOuPo60lgXX9HEFoRgM6OKLZs7EVXR6ylEMmpVEWGqkoQIFRDMfnC/HbYaYdDx8ZQrwHA1o09sz7+op0b8OqXXIR3/OmVePMfPh8v3H02NqzpmtNIt2q3J8uGj6CtuWW3v2Do+z4GRzLVjkL5QtCxKFYJoLWiOgKtEihjqIiIiFYr1o2IiIioXaoj0LSV26lo3UBHzeOO62FkvPHwdSoRARBsnLIcF/migcHhyepGqmRCx9q+DsRjOiK6iqiuIhpREYtqiEc1xKLajF2r59Ppnagsy4HreouyFiIiovm2KkJFIU3T8NrXvhY/+MEPcPToUfzjP/4jtm/fPqU9te/7MAwD3/ve93Dttdeiv78ff/VXf7WIqyYiIqLlLBHX0dedRDQyc/hDEAQM9KbQURnRFe5sS8R1bNnQg77uZFsLJYl4pHp+IOiw4yzx4sfBo+M1j3ckI+hKxWZ9/EBvCueeva46/q0dgpF2AnwPMM3WRqB53uyjUsbSeZimE+zYmwy6FSViWvV10oqINnX8Xdmwq78DERHRasS6EREREc1VuPlIU4N/2rYHz1va9ZZm9XUn6o5iPz482fB5VEWuPl/DoxlMTBbheYCqSljTl0JXKjanjWDzSVVkCALgOj7sSncmgzUVIiJaoVZVqOhUa9aswbvf/W7s2bMH999/P/78z/8cHR0dU+7j+z4ymQxuueWWOXcEICIiImpEf08SG9Z2orc7gU3ru7G2vwNqpWV2O3UkoxBEAbqqQNdk+D6QL5Tbfp128X0fB46O1bxt28a+RX2vFtGCYE/JCENFjXX8cV0PRwfTeObQCA4dG6/bKahYMjGZKQEA0pMFuI4PRRHR2UCQaiaaqkAUAMfxUDSCINRoOj+ncxIREa0UrBsRERFRK3RNAQRAFkXIcvAVnLnCRqCJoog1fR01bzsxnGnqXGFwyPMAUQA6U1Gs6U1VOwEtVaIoVOt14d+3zBFoRES0Qq3aUNGpLrnkEnzhC1/A0NAQbrnlFlx99dXVNtcsChEREdFCi0Y0dHXE5nUmvCyJSIXdihJBt6J80Vyyu+fG0nnki0bN2xoZfTafwl115UqoqGRYDXUfGhrNolwJElmWg2ODaYxO5KY81nFcDI1mAQC5Qhmlsg1BAHo6E3PerSeKApKVVuOTmRJ8+CiVLBSKS38UHhER0UJi3YiIiIgaJQhCtTNwWNdZaaEiAFhfZwRaM52KAEBRJKwf6EB/TwLr13QilYgsm/dXmnZaqIidioiIaIViqOgUqqriD/7gD3D77bfj+PHj+OQnP4mdO3dOaXNNREREtFKEo9ZiugZFEeF5PvItBEo8z8OxoTR+dv8+fPU7v8TBOh2F5qLe6DNBADavX9xQUVgktG0XjuvC9/xZd6dNTBZQLJnwfB/D41kUKiPTJjMlHDkxXn380FgWruvBsh1M5oJuRZ2paNt27CXjEUiSAMfxkM0HnarG0jm+/yUiIqqBdSMiIiJqhF7ZfHR6J5uVZN1AZ83j2Xy57qawekRRRERXq6Ht5UKvjLgLx54Zhs33hUREtCItr39DL6D+/n68613vwuOPP467776bbwSIiIhoxdFUGfGYBiAIlwBAvmg0/L7nsaeO4zs/eAif/vKP8bXv/go/f2Afjg9PYv+R0bauM5Mr4f5HDta8bf1A17x2dGqEJInVkE/YrWh8sgDXrd31qVQ2MT5ZAACks0UYhoPxdAEj4zk4rgvLcnF0MI1jQ2mUShY838dYOg/fC7oihX+rdhBFAZ3JIFyWy5er189UAkxERERUG+tGREREVE84Jl0/pZPNSnuvUC9UBAAnmuxWtFyplVqQbbtwPQ+e56/IABkRERFDRQ0477zzFnsJRERERPOiMxUDAMRjGiQ56FhTLDc2A37/4VE8dWB4WsHk4LHaXYVakcmV8PVb70OuUHuX27ZNvW271lyEI9ByBQOO58EwbBwbSsM5LVjkOC4GR7OADxRKJgqFoEORIASBpKHRStciHyiVgr9DOluEbXuQJAE9nfG2rz0e06GpMjwPyFS6FY2nC9PWTkRERLWxbkRERESnCmsEiiJBEAHP82E77iKvqr3iUQ0dydqbno4PZxZ2MYtElkQocvA1qxWOQDM4Ao2IiFYehoqasFzmuBIRERE1KhpRoesKBAhIxHQAQK5QbuixWzfWHjs2NpHHRKUTz1xkciXc/L37qmO5ajlzc9+crxPyfR979w+1VOhLxHRIkgDbdjEyFnQcMk0HxwbTcCrn830fg6NZuE4wzmwiEzxHHckI1vSloKoSXNfHeLqA0YkcHM9DsXwyeNTblYAkzc/b987KKLxCwYRpO/A8HxPpuf8NiYiIVhPWjYiIiAgAZFmCLEsQIFQ7G6/EDjb1uhUNjqyOTkUAoFVGoJlWECYKx9kTERGtJAwVNWGltackIiIiAoCujrBbkQ5BBCzLrc6Dn8mWDfW7BH3vxw9XwzStyObLuPl79yGTqx8oOvestRjoTbV8jVONjOdw8/fuw3f/5ze477cHmn68JIkY6EmdDBZVR5k5Qccix8XEZAHl8vRxZh3JKFRFxpreFDqSEQgCUCrbGBrJVMNZHcnIvI550zUFsWjQnn0yWwQAZPIlmA28DoiIiCjAuhERERGFwm5FehgqMldeqGh9nVDRiZFM3ZHwK00YGjPYqYiIiFYwebEXsBwkEgl89atfXexlEBEREc2LeFSDokiADcSjOvIFA9l8edYQSyoRQW9XHGM1OtoMjWbxk1/txUsvPXfK8QNHx7BlfQ9Esf5O/my+jJtvnTlQtG6gA1dfPvdRI4Zp42f3P40HHzuM8HvAXz60H+dvX4+OZLSpcymKhIHeFEbGc7BtD8NjOQz0JAEAh09MwHWCgtrEZAG27UGWxSnjzARBQEcyioiuYmKyAMsOQlkRXUEqUbuleDt1JqMoly0YhoOiYSKmaxhN57FhTde8X5uIiGg5Y92IiIiIThfRFeQLBlRVAWDAtFde2KReqMhxPIxO5LGmrz0bwZYyTTvZicqHD8dx4TguZFla5JURERG1D0NFDVBVFdddd91iL4OIiIhoXgiCgM5UDKPjOSTjOvJFA2XDhmU7UJWZ3y5ecsEW/OCex2ve9sCjh7FpXQ+2bxsAAOzZdwK3/uhhnLWlH696yUXV3Vyn8n0f3/2f32AyV6p7zXX9HXjdK3dXW0y36thgGv/1g4dQKk9tTe24Hu78xZP4g6svbvqciixhoCcZBIscD8PjOfRXgkUAkCsYKJYsCALQ2xWvOc5MU2Ws6UuhbNjwfR/RiLog41RkWUIyEUEmV8ZkpoTogIpSyUKhaCIe0+b9+kRERMsV60ZERER0uogedANWK7UP2/bgut68jTVfDH09SciSCKdGV6Ljw5OrIlSkKjJEUYDn+bBsF5oio2zYSMQZKiIiopVj5bx7ISIiIqKWpRIRSJIIRZYQiwSFr1zemPVxF+3ciC0beurefttPHkUmV8Lx4Un8v7seBQDsOzSC//v//RKZGsEhQRDw8svPq7YJP93a/g687nd3t2UUWHdXHL5Xe0zJUweGceDIaEvnlWUJ/b0pKIoIx/EwPJ6F5bgwLBvpymixzlR0xlCUIAiIRlTEotqCBIpCyXgEkiTAcTxk80GnqLF0juNciIiIiIiIiJqgqTIEUYAsilCU4Ks4015ZI9BkSawbHDoxPLnAq1k8WnXEXdCNqmRYM92diIho2WGoiIiIiIggigI6ksGIrURcBwAUy2bN3WanP+5VL7kIsaha83bDtPHd//kN/uuOB6eca2Q8j5u+8wscr1FkGuhN4Q2vfi6ikannXNuXwuvbFCgCgKiu4vLnba97+w9/vgeO47Z0blkS0d+TgqpIcB0fI2NZjKcLgA/EIiqS8fkfZ9YKURTQWRn7lsuX4bguLMtFrjB7wIyIiIiIiIiIAoIgQK+ETcJNRZa1skJFALCuzgi0VRkqqvx9DXN5jrqbrQZIRESrF0NFRERERAQA6EjFIIgCdFWBrsnwfSBfKM/6uHhUw6techHqNdQZGs2iWJq+S6tYsnDz9+5Dvjg9sNLfk8QbXv2carBoTV+qbR2KTnXRzo0Y6K29qy6dKeL+Rw+1fO4gWJSEqkpwXR+O40FRRHR3xlo+50KIx3RoqgzPAwqVv02xZC7yqoiIiIiIiIiWl3AEWhg6Wa5hk5msrxMqSmdLq6aWENaqTg0VeXU6Yy9FnudjcCSDA4dHsf/waLXjEhERUYihIiIiIiICEIRgUpUOOolE0K2oVG6sZfPWDb14/sVnNn3NS3edhURMr3lbX3cSb3z1c7FtUy9e/7u7q8W4dhJFAVe98Jy6t//iwWeQayBYVY8kiRjoSaG7I4bOVBRrelMQxaX/Fjwe0wAARqUgVjZYUCIiIiIiIiJqhl4Z7R6GiizLWXHjxdcNdEz5WZJErB/oxO4Lt8BbYb9rPaoiQxAAx/GCjtc+YJjLYwSa63o4PpRGvtKh2nU9DI5mF3lVRES01MiLvQAiIiIiWjpSyQgyuRL0Smtu2/Hguh4kafYgzKW7zsLRwQkcOZFu6Frnb1+P5z1r24z36e1O4HWv3N3Q+Vq1YU0XLtixHo/uPT7tNst2cdcv9+L3Xvqsls8vikJ1pNxycWrrbh8+HMeFbbtQFGmRV0ZERERERES0PISbo1RFhigCnhfUGcLP3EtByZhb+CUZj+BZ525ET2cc6wc60d+ThCyvrtqBKApQFAmW5cK0HciyhLJhIxrRFntpM3JdDyeGJ1E2bLieh7F0Hn3dSViWg7JhzcvmPiIiWp6W/jZpIiIiIlowmipDEAVIoghZDt4qWrbT0GNFUcCrXnJRdWTZTDau7cLLLz8PQr2ZaQvsRc/bUbeo9+Qzg3PqVrQcBQVPAX6l4AmszDbtRERERERERPNFlsTq5hz1lG5FS8VktojJbGnKMaeFsV0vv/x87L5wK9YNdK66QFEo3JxnmMuj4/PpgaKR8RwMw6l2LM/mV1cdjIiIZsZQERERERFVCYJQDddU23NXQiWNSMYj+N0rL5zxPh3JCP7g6ouXVKEpHtVw2XPOrnmb7wNHTkws8IoWX7VbUSVMNNfdi0RERERERESrja4FYRO98hnbsBY/bOJ5PsYm8sjmjWm3lcv87N+Kkx2fg79veQnXUFzXw/FKoMipBIosK6j9FYrBayJXMOC63mIuk4iIlhCGioiIiIhoinB3lXpaQaRRZ2zqqzvWTFNlvPaaXQ11M1poF5+3CZ3JaM3bjg9NLvBqFt+pI9AAdioiIiIiIiIialY4Qkqr1FoWu1OR43oYHs+iWLYAAehMTa2DlMrmIq1s8fi+D99vvkPTqU7dmOf5PjzPr9ZTlpIwUGRUAkWjlUCRJAoQhKDTkmU78D0fucL00BkREa1OS2dwKxEREREtCbquADlAq7TobqZTUeiy55yNkfEcDhwdqx6TJRG//7Jno7c70ba1tpMoiti8oQeTe45Ou+3YKgwVBbspy1NCRZ7nQxSXxsg6IiIiIiIioqUuok/duGU7HlzXgyQt/J5/03IwOpGD6/qQJAG9XQkI/tQNRI7rwTDtaoellcz3ffx2z1H8+rcHMJkrYaA3hateeC7WD3Q2fS5ZliDLIhzHg2XZ0DUVZcOqho2WgpkCRQO9KUzmiiiVbeSLJro7ZGRypWmhMyIiWp3YqYiIiIiIpghbcitK8E+nUvBqhiSJeM01l+DFv7MD2zb14qKdG/Cnf/h8bNvY2/b1tlO9wtHoRG5J7jCbT2olVOY4HhzXBXx2KyIiIiIiIiJqhqbKEEQBkihCVYPP2YsxXrxUtjAyloXr+lAUEQO9KeiaAqHGxqFCceV3KyoUDXzrtgfwg3seRzpbgu8DQ6NZfPv/PVAdAdYsrTriLqgflY2lVUMZnyxUA0UjY5VAkRQEihRFQiKmAwCKJQOe78OynCU9xo2IiBYOQ0VERERENIV6SsFLUYK3i5bdfKBGkkQ891nb8LpX7sY1V1yA/p5ku5fadhvW1g4V+T5wYmR1dSsSTyl4mtWCGItJRERERERERI0SBAGxygj4cBR8sbSwoR3TcjCWzsPzAV2XgxCJLEFRJKzp65h2/1LZhOc1t7lsOdl3cBj//q2f48CRsWm3lU0bDzx2uKXzVkNFlQ1ZS6mG4vs+cvkyAGA8nYdtVwJFPUGgCAg6VsuyCM8LQmgAkK08hoiIVjeGioiIiIhoCkEQqt2K1Eq3otXSpacrFasW+U53fBWOQAsLYqeOQCMiIiIiIiKixiUTEQBAPKIBAAzTgeM0P2q+VfmCAd8HohEF/T1JSKKIiK5g47ruapfikCyL8HygWF46gZh2sWwHd9zzGG6546FqaKaWJ54+Ac/zmz6/pgYj46xKDcW2XThNdv6eL2UjGGnveB4MI1hff0+yGigCgnpgPBq8RsNuTbmC0XT3ciIiWnkYKiIiIiKiaTQtKISolVBJK52KliNBEOqOQFudoaLgdWAu0dbdREREREREREtdLKJBFAXIsgRdD+osCxXa8TwfpXLQGSkZj0CAgERcx4a1XZCl6V8RRiNhqKR93ZSWQrBmcDSD//j2vfjtE0dnvW82X8bRwXTT11AVCaIAeN7JOtpS6VYUvgbMymYxVZGqGwlPFY/pEIQg+GY5LnzPR67Q2jg4IiJaORgqIiIiIqJp9EqoSFPCUNHC7aBbbOvX1AkVDU+2tFNtOTu1U5Hn+3Bdb0kHzHL5Mg4dG8eRExOYzBaXROGSiIiIiIiIVjdRDII8QBAwAhZuBFrJsOD5QQeisNbT152AIAg17x/RVUAI6gCtfv73PB+DIxnc++Az+Np3f4kv3HwPfH/x6ilPHxzG177zS0xkig0/5vGnjjd9HUEQqpvzDDN47kqlpREqCkNs5UqoSNeVmveTpaCLFRB0uAKAbL60ACskIqKlbHoMlYiIiIhWPf20TkWO48FxvZq72FaaDWu6ah43LQdj6Tz6e5ILvKLFo8gSJEmA6/qwLAe6pqBs2DV3sy0223YxNJYFKnVKw7AxOpFHIqajIxmp7rYkIiIiIiIiWmjJuI5sroxoRMVEpgjLdmHZzrx/vg7DS/FoMOo9GlUhy1Ld+8uigKiuoFS2USiZ6Eo1vr5svoy7f7UXB4+NTxsvNjqxOPUU23Fx+92PwW1wk1giruO8s9bh/O3rW7peRFdhmA5KholkXEehZKK/pTO1j+t6MCqdp8POSRGtdqgICLoVlco2SmUTXR1RmKYDw7SrtUIiIlp9lt63AURERES06FRFgiAKED1AUUTYdtChRpbUxV7avFvTl4IkCjULTseHJ1dVqAgIuhWVyjYMy66EiiykEpHFXtY0hZIJ+IBh2iiWLcRjGjRFRr5gIF8woKoSUokoUokIpFUQjiMiIiIiIqKlIxrRgjCPA8QiKoolq+nQTrNc14NR6UwTjQadkpLx2T/Ph6GSYtFEZzJat6vR6TRVxp5nBlGrKdGBI2OLUk85dHRsWsCplvPOXocLdmzApnXdEMXGft9aorqKyWwJhhl0fHYcF6ZpQ1vEQE74+1u2A9fxIQonx93XEtEUyLIIx/FQLFmIRzVkciUM9KYWaslERLTEsJpORERERNMIggC90qUoLDRY1tIde9VOiizVLZQcH5pc4NUsvtP//uXK7ralJtxtZ5gW8gUDQyNZDI5mkC8acD0PluVibCKPIycm4HIsGhERERERES2wZGUEWrTSNahYMud1LFipbMH3AVWVoMrB5rF4dPYuvhFdgSQHm60aCeSEdE2p2/35wNHRhs/TTk8fGql7WzKu4/Wv2o33/PnL8KqXXIQtG3rmFCgCAEWRoMgi4ANlM3ju8gs06q6eYjm4fjj6TNOUGX9PQTj5OikUgxFouYLBWgoR0SrGUBERERER1RTuolKVoC22Za+OUBEArF/TWfP48aH0Aq9k8elaEC4zK6Eiy3KWXCHJ908WOsvmydepZbmYmCzi+PAkxjMFOI4L23aRKxiLtVQiIiIiIiJapaqhIl2FKAKu61c/a8+HQnX0mVb952ydezVNhgAB8coI8UKTgZhtG3trHj82mJ7X37UWz/PxzOHaoaJN67rw1j+6FFs39EJR6o+Da0VED0Jj4aas4mKHikqVekllM1YjY8ziseC1apgOLMeF7/nIF1lLISJarRgqIiIiIqKaItVQ0dRQyWpQb2edYTmr6nkAgr+/IATFTttxAaDaPn2pMCtBJ9fzqn+fNX0pdKaiUBQRvgcUCma1ABYW0oiIiIiIiIgWiqYpUNUgtBOtdoKZn8CJ7bjB52MBiEaCkEsjo88SseA+YaikbNhwKrWARmzb1FfzuOv5OHJiouHztMPgaKYaqDndrgu2VMM/7RbRg3pataOyYcNZpM1ZpuXAcVx4/skAW7i+mciSiGgkuF/YrSiTK83fQomIaEmbv2GtRERERLSsVTsVqTJQCZU4rgd5ll1tK8H6gaBTUW9XHOvXdGH9QCc2rO1EVyoGQZhbK+zlRhAEaKoMwwwCVYosoWxYiDXQMn2hhLv+DMsGfEBRRGiqDE2VkUpEkM2XMZktVQtozbRvJyIiIiIiImqXVCKCsYk84hENhYKJkmHB8/w5j906Xfg5WddkyJIESRIRi84eoolFVRQNA4osQdeCWkChZKIjGW3ouv09yeAcNcI8B46O4awt/c39InOwr87oM1kSsWVD7Y5K7aBrCkQBcB0fpu1AU2QUSyZSidlDXe1Wqow+M00bvgdIklDdPDibeExHqWyjWDLRmYrCNB2UDWvewlhERLR0rfxvhIiIiIioJZoqQxAFiIIARV5dI9AScR3veutL8eevvwzXvOh8XLhzA7o74qsuUBRS1andqspLrFNRqbr7L1jf6a28Y5VdmYblwPN9uK4Hc4n9DkRERERERLTyhSPQdE2BLIvwPH9euukWy+Hos+B6iZjeUE1DEAQkKmsMuxU1M75LFAVsrRPYOXBktOHztMO+g8M1j29e3wNNnb+eC4IgQD+tW9FijUCrjj6r1ECiTQSCIpXXqOv61fNMTBbbv0giIlryGCoiIiIiorrCcEZYbLFW0eivRmbMLxW+7+OZwyO4/5GDePKZwbafX1eD5yIce1Y2bPi+3/brtCIowAbrMsygyBXRphbJZFmCLIuAj2qYqMQRaERERERERLTAZFlCpLLxJdwAU2hz4MS0bNi2B0E8ZfRZQm/48WFHnWhEhSgCtuM1NQb9jDoj0CazJaQzCxNKSWeLGEsXat521tb575YUdvMJ6xXFkrngdRTf96u1j3AdzdS6BEFAohIsy+RK8OGjWDKbei0QEdHKwFAREREREdWla0GYSFWCTkXmKgoVLRe+7+O/7ngI377tQfz43ifx2z1H236NsFORbbtwPQ++5y+ZgJlhWvA9H47rwrY9AKi54/D0YBRHoBEREREREdFiCEM7sUpgwzBtuK7XtvOHXWWiuhp0n1akpkZWRXQVqipBFATEKp2O8gWj4cdv2dCDek2RDhwda/g8c/FMndFnAHDm5tqhp3YKn2/TdOC47rx1pJpJ2Ti1XuICaH4DXSKmQxIFOI53Srei2mEtIiJauRgqIiIiIqK6wiBGGCpZLePPlpM9+wax75Ri2Tlnrm37NWRJhCIHHx3CMFHJWBo708KilmEG61JVCZI0/WNOGJAzwhFuS2T9REREREREtLrEoxoEUYAqS1BVCb7fvo0vvu9XR23FohqAkyPXmpFKRAEAiVhwjpJhwWkw+BSLaljTm6p520KNQNtXJ1S0ti+FZDzS8Hk8z8P+I6MYncg1dX1ZEqGqwQa9sF7R7o5UswmvF15fU+Wa9ZKZiKKAZCUEl82XgvMWTY6UJyJaZRgqIiIiIqK6whnwqiIDAuC6fsNFJFoYjz11vPrfJUnE9m0D83KdMFhmWuEItKXR6Sds5R12IIrU2XUX7sYzLQee78N1PRbBiIiIiIiIaMFJkoh4JfATBn+K5ZOBE8cNxo0VSiYKJROe1/jYLMO04Xo+JElApFLTSTQRogmlEpEg+KTI0DUZvg8Uio13K9q6qbfm8cPHJ+A4btPraUbZsHDkxETN287cMvvoM9/3MTSaxY/v3YPPfvUufOv/PYD7Hz7U9DqilW5FpcrftrjAoaJwE1ZYvwlfD81KxDSIogDb9qqv04kFGmNHRERLA0NFRERERFSXqsgQRSFoly0HO6yWytgrAmzHnVIo27axFxFdRb5gYHgs29ZrVceHVf7+xhII5LiuB6PScShcT71W3rIsBd2WfFTDRKUlEowiIiIiIiKi1SXslhOLBMETw3QwOJLB0RMTOD40ieGxHMbTBYynCxgazVTHV80m7E4TjWgQIEDXlZojwmcjSWK1w1EifnIEmu83FnA6Y2PtEWO24+LoULrp9TRDliS86sqLcM6Za6f97mfNEip66sAw/v0/f4b/uOVe3P/IoWowZ++BIdhNhqHCEWiGacOHD8tyYS5QTc1x3Gr9brZ6yWxE8eRrIZMrAwheCwv1uxAR0eJjqIiIiIiIZqRVig4aR6AtOUcH01M6R23b1Iu7frkX//b1u+G2uaOUVhkfFhalbNud992FswlDQZbjwnE8CAKgqfWLZGEBLSyotau9PBEREREREVEzYlEVkiRCliToelhvcRE2JZJlEbouQ5ZF2I6HobHsrBtjPM9HufI5NxYNAi2tjD4LdSaDEWjRiApZFuF6fsPddtYNdNQNMx08MtbymhqhKBLOPXsdfu+qZ+Gd178Er3/Vblx8/mZsWNOF/p7kLI/2MZYuTDtqWg72HRxuah3BuDEBnofqhqiF6lZUrLwOTNuB6/oQRaGlcFkoGdcr3YpcFI3gd0izWxER0arR+r9BiIiIiGhV0DUZ5bIVjECDyZ1IS8iBI6NTfv7Rz/fA83xENAVr+jraei1FliAKgOcFwTJVkVE2bSQqHawWQxgKCotzmhp01qpH0xTki2a12xJDRURERERERLQYBEFAIqYjkyuhIxGFpbtQZBGyLEGWRQgIPts6nofxdB6G4WB0PI/OVBSpRO1xZiXDgudXAkmqAghAItZ6qEjTFEQiKsplC/GYhky2jFzRQLyBc4qiiK0be7F3/9C02/YfHcOLW15Vc2RJxNYNvdi6ofY4ttOdsakPEU1BuUZ35seeOoFzzlrX1PWjuop80UTJsBHRVRRKJro6Yk2doxVheCkcfaZrMgShfr1kNqIoIhHTkM0byObKiOkacoUyujtjlXohERGtZOxUREREREQzimjB7jZV5fgzIOhys//IaNvHi7Xi4NGpu/u8ypbGLRt7ZgzXtEIQhGrXKsMMXgPlRR4fVioHRTLDDNYRthavR6/syjMtB67nwfP8JTHGjYiIiIiIiFafjmQEEIKuusm4joiuQpGlIFAkBCPIZFFEf0+yOoJsMlvC2ES++vnf932UDAtj6TzSk0GHnXilS1HQYWhuG4E6U0G3onhMhyCiMsKrsc/R2zbWDvKMTeSRK5TntK75IssSdp61tuZtB46OIp1trjtPWKcI6ydlw2p7Z+laTt+EFdb25iIZj0AUgtdAybAAn92KiIhWC8ZHiYiIiGhG4dgrVZEBAXA9H47jzrkwtVxkciUcOTGBY0OTOD6UxvhkAb4PPPu8Tbj6svMWbV25QrlmS26gfuEOAApFA0cG09h5xpqmd6lpqoyyYVdH4IXFqcXgOC4sKxi/FgaDdG3mjzeyLEGptI43LQdRXUWpbFXHohEREREREREtFE1TsGFNF/JFA6IgQFEkqIoERQ7Gnvk+MDyWRb5goLsjBkWWkM4WUSxbsN0sNFVGqWTBDWemAVAUEbFoEEBKxWt3NGpGPKoF9R8HiEU0FIomcgUDvV2zf47etql+beKZw6N49rmb5ry++XDe2evwm8ePTDvu+8CDjx7CSy89t+Fz6ZoCQQAcx4PluFBlCaWyVQ2JzQfDtOG6HlzPq3Zq1vW51z0kSUQ8riOXN5DNlxHVVWTzZXR3xKEoq6NGSES0WjFUREREREQzUpXKSCkPUBUJluXCsldPqOiXv9mP3z5xdNrx40OTi7Cak07vUnSqraeEigzTxpETEzh8fByHjo1Xg0i9r7sUfd3Jpq6pVTr9hCGecqVQJUkL3wC1GO66s2x4HiCKQkMtt3VNge2YMEwbUV2t7Bac/9bjRERERERERKeLRlREI7W7yAgCsLa/AxNqAePpApJxHaoiYSydD2ozlY02kiwgqmuIxzRolc/Fkiw2NKZsNoIgoCMZwXi6gERcR6FoolS24Lge5FlqAcl4BL3dCYxN5Kfd9uCjh3DRzo1t77LcDusHOuuu+5Enj+GFu89ueHOSKArQNQVlw0bZsKDGIyiUzHkNFYWjzwzLBnxAkUUobarhJeMRFAoGTNNB2bAQ0VWkM0X09zZXXyIiouWF48+IiIiIaFbh2Cu1EioJO9WsBusHOmseH53INdzyez4cPDpe83hvVxzJym5E23Hxma/cif+64yE88OjhKZ2NDh2r/fiZaKpc3WFnOy7gn2ypvdCqrbxP6VLUSOelkyPc7Op5fN+f6SFEREREREREi6a7M451A53VgMqa3hR0TUY8pqG/J4ENA13o7ogFgSIBiEU1bKjcvx1SySgEUYCmyNA1Gb4fdEFuxBmb+moeH0sX8PTB4basr90EQcDuC7bUvM2yXTy691hT54tUugSFI9CKJXNe6xDVTVhG+7oUhWTpZFgtmw9G2GXyJdi227ZrEBHR0sNQERERERHNKhKGiuQgVGRaqyhUtKZ2qMj3gRMjmYVdTIXn+XU7FZ3apUiRJQz0pmre7/DxiaavK4pidTdeqVIMyzdYSGy3Yrmy864SDopotXd2ni5cv2W7cD0PnuevqtczERERERERLT/xmIaN67qhqhLkymf9ns44InrwWTiiK+jrSWLbpj6sX9NZ3VDTDrIkIlnprBN22MkXjIaCMRft3IB6+3/uffCZJbvJ57yz19XtIPXAo4fgeY2vO/wbGaYD1/Pgul61ltFujutVw0vhP8Prt0syEYEgBL+PYVqAD6SzxbZeg4iIlhaGioiIiIhoVmEQQ9MqnYpWUQijKxWrW0harBFoQ2MZlOsUoLadEioCgC0bemre78iJCXie1/S1o5ViVLm8MDvsajEtB67jwfNPBoIabT0uSyIUWQT8k+G4xeq2RERERERERNQoTZWxcW13EOoQBaiqjJ6uOLZs7MHGdd3oTEVnHUnWqs5kFEAwrk2SBbieX+2IM5Puzjh2nrG25m0j4zk8c2ikretsF1mW8OxzN9W8LZMrN9VlSZElqIoE+EDZCGo54YiydnJcD8eH0oAfdK52HA+CAOhq+wJmQNitSAMAZPPBRrNCsf2/DxERLR0MFRERERHRrMIwkapIgAC4ng/HWR2tjQVBqDsCrd4Isvl24EjtLkWyJGLjuu4px+qFikzLwdBotulrh227DdOBU+n0s9ChnFKlS5Fl2fA9QJIEKIrU8OP1GiPQiIiIiIiIiJY6SRKxpi+Fs7b0Y8uGHnR3xqEq8rxfV9MURCIqBAhIxE52K2rE71x8Rs3jXR0xCG0a0VYqWzgx0t6NXxeftwlSnfXd/8ihps51+gi0XIOdnhrlOC6ODaZhVmo1oxN5AEH9o11j8E4VvgYMy4aPoEbILtBERCsXQ0VERERENCtVkSFJIgQIQbAIgLGKigUb1nTVPH58OD0vu8tmUy/MtHFdFxR5arhmXX8HZLn22/5Dx5oPRcmyBE0NCpZht6KF3pEWhoDCHX6Njj4LaaeFisqGtWRbrhMREREREREtBZ2poFtRPKYDQrBZybRmH+PV35PE2Vv7qz/3dsXx6pdehLe9/jKcubl/hkc2JpMr4bNfvQs3/dcv8eVv/xw/u38fhkazc/6cH4/pOOesdTVvOzaUxuBIpuFzhSPIyoYFx3Vh2y4mMu0ZGWbbLo4NpWFZDhzXxchYFrbtQpIEdHXE2nKN0wV1QgG+B5jVDVvsVkREtFIxVEREREREDQm7u4T/NOdp/vtSdObmvprHfR945vDCtuo2TBvHh9M1b9u2cfo6ZVnCxrW1Q1GHjrfWaSkcBxeGe/KlxnYntoPvn+yMZFRHnzW3KzN8DVuWC7fSbYk76oiIiIiIiIjqi0c1yLIEWRQRjwbjr3INdit6/iVnYqA3iWtf9my89Y9eiHPPWjdrBx3f92cNBj35zCC+9K2fw3WD8e7DYzn8/IF9+I9b7sW/fu0n+MVDzzS0vnp2X7il7m0PPNp4tyJdU6CpMjwPmMyWAAATkwVY9txqEScDRS4cx8XwWA627UGWRQz0pqZtPGuncINXuOGLXaCJiFau+e+JSEREREQrQiyqolgyEdEU5PIGyksoVGQ7LtKZIlzPg6bKiGgqdE2GKLYnQ9/TFUdXKop0pfBzqqcPjuDCnRvbcp1GHD4+jno1ta0ba48627y+p2Z3o+NDk7Adt+kiU1RXMZktwbBseL4POB4M066GdeaTYdrwPB+u58E0K6EivblORbIkQlFE2LYHw7IR0zWUytaCrJ+IiIiIiIhoORIEAR3JCMbTBSTiOgpFE6WyBc/zZq2/rO3rwPWveQEEYfZRXJ7n47GnjuG+hw9iYrKAeEzHprVd2LSuG5vWd6MrFYPjePjRvXvw8J6jdc+TLxiw5riBaKA3hU3runDkxPTNXXueGcQVz9uBRFxv6FxdHTEMjWZRLFmIRy1EdBUj47m63bFDjuPC9XyIggBBCP4OgiDAdlwcH5qE47iwHRcj4zk4jgdFFtHfk4Q8j4EiANB1BYWSibJpoxNBqMj3/Yb+xkREtLwwVEREREREDYlGNAB5aJoCQQQcx4Ntu1CU+S1SNGJisgCjEjCxLBf5gglBQBAw0lVEdAWq0vpbX0EQcPbWAdz38MFptx08Nragz8OBo2M1jyfiOnq7EjVv27K+dtjIcT0cG0pj64beptagKBIUWYTteCgbFmIRDYWisSChnGK1S5FdWYsIWWo+PKZrCmzbhGE41VDRfLUFJyIiIiIiIloJUskoxicL0BQZsizCcTwYloNoA5t9GgmbjE3kccc9j+PY0MkQT75g4Il9g3hi3yAAIB7TIEsiMrnyjOeSJRHPOnfTrNecza4LttYMFXmej4ceP4zLn7u9ofNoqoxkXEeuYGAiU8TafgWlkoV8wagZTPI8H8NjWeRn6QZlOS5GxrNwHR+KIqK/J9VSnaRZp3aBdjwPMkSUDbva3ZqIiFYOjj8jIiIiooZoqgxZliAKAjQ1COiUzcVvbew4bjVQ1NsdRyKuQ5ZF+D5gmA4msyUMjmQxOJqB47gtX+esrQN1ru/VDfq0m+/7OFjnWts29NYt0A30puoGfg4fn2hpLZHTR6AVzZbO0wzbdjGZKQI42V47bLfdLF2tjPGrhJPKhjVrW3UiIiIiIiKi1UyWREQq9YWwzmC2oZO1bbu4+76n8KVv/3xKoKiWQtGcNVAU0RW89hW70JGMznltZ23pR2ed8/zmiSOwm6g1dSQjkCQBjuMhlw+6YY9M5Krj20LhJrB8wYCPoFuzV6NmYdkORsaCQJGqSAsWKAKC14Ja2WBnVEegzX9tiIiIFh5DRURERETUsFg0CHBE9Klz0xdTuAZdkxGLaOjuiGH9QCfWDXSgqyOGiB50VrIsF5O56ePLGrV+oLPubqt9h4ZbPm8z0tli3cJZvdFnACCKAjav76552+Fj08eiNSJafQ1Y8OHDshxY9tzais9maCwLz/NhWDYKlRBTqzvgtNN21HmeD2MJjfQjIiIiIiIiWorCmlA1VDTHEWP7j4zii//5U/zyof3wvLlv9tm0rhtv/aNLsWVD/TpJM0RRwCUXbK55W9mw8fhTx5s4l4iuVNAlOZM3YDkuXMfD+GSheh/LdnD0xAQMw4bjeRgey+HY4CSOnkjj8PEJHD4xgSODEzg2lMbQWBau60NVpWDk2QIFikK6HrwGwm7SYXdpIiJaWRgqIiIiIqKGxSIaAFR3pZmmvejdXUpGULAICxmSJAICoMgSknEd/T1J9PckAQTFnlbXK4oCztrcX/O2fYdG4Hlezdva6eDR2gEgQQC2bJx5hNnmOiPQBkczLYVpNFWGJAnwvJM70grz2K0onSmiXLbg+T7G0wXAB2IRteWRa7IkQlGCj0Nht6ISi19EREREREREMwo394RdrE3LaTkM5DgufnDP47N2HmqEIAAv3H0W/vhVz0EyHpnz+U514c4N1a48p3vg0UNN1ZpiUQ0RXQF8IJ0JwkSZbAmGacMwbRw9kYZtu3AcFyNjWZjmaaEtH/A9wHV9+F6wyW6gJxnUwxZYWB8M60KGaU/rukRERMsfQ0VERERE1LBw5JWqyJBkAZ4/9x1pc3Fqd5loJfDU153AmZv7sW6gs9rmWleVSgDGn9N6z95aO1RUNmwcG5ps+byNOnBktObxNb2pauegerbUCRX5PnD0RPMj0ARBqF6zVA0VGU2fpxGmaWMsnQcQdGtyHA+yLKK7Mzan8+rV4lfwmiiW2KabiIiIiIiIaCbhZ2lFliDJAnwfLXculmUJV73w3DmvKRHX8cZXPxeX7joLolh7NPxcaKqCi87ZWPO2sXSh6dHyXR0xiEJQjyhUahFDoxkcHUzDdT2YtoOh8SxsO6h/rO1PYePaTmxY04n1Ax1YN9CBdf3BPwd6UxDFxfm6V1MVCALgOB4sxwX8k5v/lhvX9VAqmwxFERHVwFARERERETVMlkRoWrATLaKdHH+1WAzThu8DsixClSVACHZ8iaKAeExDf28SamXnXDVAMocRV1s29kKRa+9Me/rg/I5Ac1wPR+qEf7ZumrlLEQB0d8aQiOk1bzt0vLURaGHILHwNlA0bjuM2dQ7bdlEomnDqFG08z8fgaBbwgaJholAIim09nfE5F810VamsOxjhVjZspDPFOZ2TiIiIiIiIaCWTTqkNhZ+r51JrOWtLP7ZvG6h525YNPdh94Ras6UtBqJMVOntrP/7sjy7FxnW1x763y64LttRdwzOHR5o6lyJLSCaCbkqTuSJcz4NlufA9H2XDwshYFq7jQ1UkDPSmoCoyRFGEJImQZQmKLEFRpLo1qoUiikK1Y5VRqQ2VSssrVOT7PtKZIg4eHcOxwUkcODoGcw6vZyKilUhe7AUQERER0fISi2gwTQe6pqBQNFE2bXQu0lrCcVWRSseciK5Oa/cci6qwLAcRXUWxZKFkWNUORs1SZAnbNvXiqQPTA0T7Do7gyufvhFCvwjRHx4fSsOzagZ1tG/tmfbwgCNi8vhuPP31i2m2HjrUWKtJVBWJlR5ppO9AUGYWS2fDzWyyZODGSge/5EEQBHckIulIxyKcUxcbSeViWA8d1kZ4MAj+phN7y2LNTBa8XAY7jIZ0pobsjhrF0HhFdqb6miIiIiIiIiGiqiK7CNB1oqoJiyZpzF+uXXnoODh4dq9Y9YlEVVz5/J849a121zmKYNo4NpXHkRBqj4zmIooALdmzA9m0D81aLOVVHMoozN/dj36HpAaJ6m8BmkkpEUCybsG0PmVwZ3R0xFEomxieDke+6JqOvO7FoXYgaFdFVGKaDsmEjGQ9+p+WiWDIxOpGDZQWvO9fzIEHEWLqA9WsWq9pJRLT0MFS0wuXzedx///3Yt28fMpkMFEVBX18fzj//fFx44YUL8kZrrgYHB/HYY4/h0KFDyOVycF0X8XgcfX192LZtG7Zv345EItH26+7duxcPP/wwhoaGYBgGkskktm3bht27d6O7e34T747j4KGHHsKePXswPj4Oz/PQ1dWFHTt2YNeuXdD12l0GiIiIFkIsqiKdKUKv7EqzLBeu6y347Hbf96sdcmKRIGCSiGnT7hePapjMlNq23rO29NcMFU3mShidyKO/J9nSeWdjmA46k1FM5kpTjmuqjHX9HQ2dY/P6npqhorF0AYWigXidTkb1iKIQhLXKFsplMwgVFRsLFTmuh6HRLHzPh+O6kCFhMlPCZLaEjmQUXakYLNtBJhv8vuOTBbiuD1WVWg6F1Vp/T2ccI+N55AsGdE1GLKJhcCSLTeu7IS/wa5qIiIiIiIhoOYhGVGSyJ2stpmnD9/2Wv3NKxiO47Dln48f3PolnnbsRL3ru9mmbfXRNwZmb+3Hm5tqj6RfCmZv7qqEiQQjG0W9a34Mt65v/zkgQBHR3xDE8lkO+YMCHX+3OHIuq6OmML4vv8MJNX6Zlw4cP23Zh2y4UZXG7KM3Esh2MTeRRKAbPt+O6yOTKMC0b6/o7USwHHbVZFyIiCjBUtEI9+OCD+MQnPoHbb78dllW71eDAwACuv/56vOMd70BXV9cCr3Bm4+Pj+I//+A984xvfwJ49e2a8ryiK2LFjB17wghfg6quvxhVXXIFotLUvmsrlMj7/+c/jC1/4Ag4dOlTzPpIk4fLLL8cNN9yAK6+8sqXr1HP8+HF86lOfws0334zJycma94nFYvj93/99vPe978X27dvben0iIqJGRHQVgihAhgRVlWBZLsqmjXh0eqBnPpmWA9fzIYqAVilgxGqsod3rPXNzPwQB8P3pt+07NDJvoaLt2wawfdtAtSXzgaNjOHx8HFs29DQckNqyoafubYePT+Dcs9c1va5IJAgVFcs2OpJAsWzC83yI4syFr+HRLFzXg2U7GBzNQtdkdCSi0DUFmWwJmVwJYqV4liuUYRgORAHo7Uy0tagW0VV0JCPI5MqYmCxAVYKPSCNjWawb4K40IiIiIiIiotOFgR9VkSFJAlzXh2UHnYtadcn5m7FhTRfWNrhxajFs3diL3Rduweb1Pdi4tmvOXZR1TUE8qqFQOjnuPZnQ0ZWKtWO5C0JTZUiiANfzq53Ni2UTHUp7NoS1k+/7GE8XkM4WAR/w4SNXMJDNleB5wX3CTtz5Qhmdy+jvQEQ0nxixXGFs28bb3/527N69G7feeuuUQJGiKFO+gBkeHsZHP/pR7NixA3fcccdiLHcaz/Pwz//8z9i2bRve+973TgsUqer0MRSe52HPnj344he/iFe+8pX4l3/5l5auff/99+Pcc8/Fu971rmmBolOv67ou7rrrLrzkJS/Ba1/7WuTz+Zaud7qbbroJO3fuxI033jglUCSKImT5ZP6vWCzi61//Oi644AJ8/OMfh1/rG00iIqJ5JAgCouG4sUrxxDAWftZ42KUooqsQIEBV5Wog5FSCICAWmbre8LGtiEZUbFxbO5D99MHpHYzarasjhovP34zXXHMJ/vYtL8XLLjuv4cemEhF0ddQuiBw63toItKiuQBAQ7ERzXMAP2kfPZDJbQrFkwoePsXTQ1tswHAyP5TA8noVhWoAPeJ4Py3Gr3Zk6U7F52emWSkSgazI8Lxi35sNHoWhiMlts+7WIiIiIiIiIljtZEqGqwefzsBZjmHMbgSaK4pIOFAHBCLSXvOAcnLWlvy1j2QGguzOGVEJHNKKgtyu+rAJFIV2v1AfNoN5WKrded5tPQ6NZpDNBoKhsWBgcyWAyEwSKwr1xYfeiXMFYxJUSES0tDBWtIIZh4Oqrr8aNN95YDZp0dnbi05/+NI4dOwbLsmBZFn7961/j9a9/ffVxo6OjeOUrX4kvfelLi7V0AEAul8NVV12Fv/u7v0MulwMA7NixAx/72Mfw6KOPYnJyEqZpwnEc7NmzB5/4xCfQ19fXlmvfcccduOyyy3Dw4MHqsauvvhp33303DMOAaZoYGRnBl770Jaxfv756n1tuuQXPf/7zkU6n53T9D37wg3jzm99cDSjJsoy/+qu/wmOPPQbbtmHbNvbt24cPfOADiEQiAADLsvD+978f1113HYNFRES04GLRSkinUjQomwtfLCidEioCMGPnoWhEq9y3PSGos7cO1Dw+NJpFNl9u6By+7yObLwchlhb/XS5LYtMdl7asr92t6HCLoSJRFKuFtPBvki/WL7yYloOxdPCeJ50pwbZdSKKAREyDIIThojyGx7IoGRbG03n4XvC3S8TnZwSsIAjo6UpAEgVYlot0JggxjU7k5xRAIyIiIiIiIlqpwnpMGCgxrYXfcLYSCIKAzlQMfd3Jmh24l4OwLlSuBMuKJXPJfW/luB7ylaDQ6EQOI+N52LYHSRTQ0xlDT3cCAFAqB5vgDMOGac0tKEdEtFIwVLSCvOENb8Bdd91V/fmMM87Aww8/jL/927+tBmFkWcbu3bvxjW98A1/72teqnYs8z8Pb3vY23HbbbYuy9lwuhxe96EW48847AQRdlT7ykY/g8ccfx/ve9z6cf/756OjoABCMH9u5cyfe/e5348knn8SuXbvmdO2HHnoI1157LQzj5JdfH/vYx3DHHXfg8ssvh6YFb+L6+vrwlre8BY8++iguueSS6n0fe+wxXHPNNbDt1t4wf/7zn8dHP/rR6s+RSAQ//OEPceONN+K8886DKAb/Mz3zzDPxkY98BL/61a/Q3X1yPu/NN9+MG264oaVrExERtSoM6WiaAkFEtc31QrEdF7btAcLJoFA8Vr/wEoagNE2BKCJoyTyHwsBZW/rr3rbv0Misjy+UTPzf/+8+3Pi1n+CL3/wZvvCNny5IlyOg/gi0TK7ccmeesHNVubITrV7xyPd9DI1m4Hs+yoZVLeZ0d8XR3RnHuv4OJOKVcJHpYHQ8D8sKQkfdnfGW1tYoWRLR0xVcI18wUCybgA8MjgRj2oiIiIiIiIjopLAWoKlBpyJzjp2KaPkKO4OblgPX8+B5PgxzaYXMwq7apu2gVLYhCMGouXUDHYjHdEQ0JRjj5vooVzYj5guNbRwkIlrpGCpaIb70pS/hu9/9bvVnXddx2223YdOmTXUfc9111+H9739/9WfP83DddddheHhhvtAK+b6PP/qjP8JvfvOb6rGbbroJH/jAByBJM4+36O7uxq233opEItHStcvlMl7zmtdMCRS96U1vwvve9766j+nq6sJtt92Gnp6TX8jdd999+Pu///umr//444/jne9855Rj/+f//B9cccUVdR9z4YUX4tvf/vaUY5/+9Kfxox/9qOnrExERtUpTZciyBAFCtXhUXsARaGH3GF2TIYkiJEmcse20qshQ1WC9ehtGoAU7yKa//+hIRiCKQo1HnOQ4Lr71/x7AsaGTnQ7TmSL+646H8O3bHkCmMuprvmxa1133tsefPtHSOasdoEwHTqV4dGI4A/O0AtJ4ugCzcp/xTAEAkIhr1UKkLEvo7ohj3UAnknEdkiRAUUT09SQgS/P/0SWiq0glgm5IE5MF2I4Lx3ExPJad92sTERERERERLSeRU0JFQmUD10JuOKOlQ5YlKIoYjLevdKxaaiPQCpWu2uVyEC6K6Aq6UrHqxn5BEKqdosIAUjbPEWhERABDRStCoVDABz/4wSnH/u7v/g7bt2+f9bEf/OAHsXHjxurPk5OTU7rmLIQbb7wRP/jBD6o/v/Od78Qf//EfN/z4devWNXX/U/3bv/3blJFnyWQS//zP/zzr4/r7+/GRj3xkyrHPfOYzOH78eFPXf8973gPLOvnG6vnPfz6uu+66WR/34he/GK95zWumHHvXu94Fz+MueiIiWjgnR6AF/1zIHUhhYSIMo8SiWrUDYz0nR6C1Z73hCLSB3iReuPssvOW1L8BfvvFFePa59UPdAHD3fU/VDak8c3gU/+cbP8UvHnwGjuPOaX31RCMq+nuS045fuGMDLjl/c0vnlGUJqhqEwQuVXVzFkonDxycwOJKBaTkola1gbj2AyUwRruNDUUR0JmPTzyeJ6OqIYcOaLqzr74Sm1g+MtVtHMgpdk+F5CEbTwUehaKJUKToREREREREREaAo0rQNZ+xWtHpFtEq9zaiMQFtCoSLP86vrKVU2RYY1xVOFoaKSYcH1PDiOu+TCUUREi4GhohXgs5/9LEZHR6s/a5qGt7/97Q09VlVV/M3f/M2UY1/60pdw6NChdi6xrpGRkSndklKp1LSAVCOuueYanHPOOTjnnHPQ19fX0GOy2Sw+8YlPTDl2/fXXo6urq6HH/+mf/umUbkWGYeBDH/pQw2v+xS9+MSVMBaCpMWbvfve7p/z8+OOP41vf+lbDjyciIpqrWBjSCVscmzY8b/7npXueVx1dFgaEZhp9Fpq2XsuZUyD34vM24a+uexHe8tpLcemuszDQm5o12LT/yCjuf2Tm91mO6+GeXz+Nf//Wz3Hw2FjL65vJmZtPvl9KxnW87pW78IoXX1B9PluRiAYdfjI5AydGJoPxYQhGiR0+No4Tw5MAgtFvxZIFQQB6OhOzdnZaaIIgoKcrAUkUYFkuCsXg9yiWWEQiIiIiIiIiOlXYuTjsCh12qaHVR9endgYvG9aC1AkbUSpb8D0fjuPCsoJNfLVqYJoqQ1FE+N7Jjuw5jkAjImKoaLmzbRv/8i//MuXYy172silhl9m8/vWvr7b3q3fO+fLxj38cxWKx+vPb3vY2dHR0NH2eq6++Gk888QSeeOIJvPWtb23oMV/+8peRTqenHHvjG9/Y8DVVVcVrX/vaKce+9rWvYWRkpKHHnx5o6u7uxtVXX93w9S+66CKce+65M56TiIhoPkUiwYdvVZEhyyI8HzAXoHhUMmz4PqAoIhRZAoSTgaGZRCMqIJxsyez7cxvZFo/p6EhGG75/vmjg+3c+0vD9J7PBe6T5KMA8+7xNUBUJzz5vE/789S/Etk2NhbJnkojr6OqIQhIF2LaHsYkCToxkUDSCUI7n+bAdF+nK2LOOZLS6k3GpkSURiXgQkgo7WpXmMC6PiIiIiIiIaCWKVmpDeuXz/UJ2saalRVdlCALgOB5sxwX8pTMCLRxnFnYp0jUZklT7K/J4ZdNcoRSMPssXjCUTjiIiWiwMFS1zP/nJTzA5OTnl2Mtf/vKmztHX14ddu3ZNOXbrrbfC9+f3X5L5fB433XTTlGOvfvWr5/Wap/rud7875ef169fjggsuaOoc11xzzZSfHcfBf//3f8/6uFwuhzvvvHPKsauuugqSJM3p+k888QT27dvX1DmIiIhaJUtidRdSuCOtvADFo3DHU3X0WURrqNuNKArVYtdCj2zzPB/fv/ORpoopvg/86jcH5iWolYxH8L/e+CJcfdl5bR0tloxHsG6gAx3JCERRgG27GBsvYGgsi5JhYXyyAM8LijfJSmhnqdJP6WgFBK8V1+WoWSIiIiIiIqJQWF9RVQUQANcNNhStVr7vw7JX5wg4URSrm8fCeltxiYySz1cCQqXKxreZOnXHIifHuDmOG4xOKy2N34OIaLEwVLTMfe9735t27AUveEHT5zn9MYODg7j//vtbXlcjbrnlFhQKherPfX19uOSSS+b1mqHBwUE88MADU449//nPb/o8z3ve86Z0eQJq/01Od8cdd8Cypn6p2I6/GxAEwoiIiBZK+EG72uZ6Dp1/GuH7frW7UFgACOedN+L0EWgL1X3mvt8ewKFj400/7tCxcdz0nV9iIlOY/c5NijfxvDVDFEV0JKNY19+BVEKHKACm6WB0PA/TdCCKAno647OOiltsqjJ9h91cOlsRERERERERrTSaGnR8EQWh2q3IXEXdinzfx8RkAb95/Ahu/eFv8Zmb7sRdv9y72MtaNNX6YNj1eQl0KjJMG67jwfU8GGYQ+IrOECqSZQm6FryWC5VQFEegEdFqx1DRMnfbbbdN+TkWi+Gss85q+jwXXXTRrOdut9M7+lx88cUL9uXS7bffPq0TU63nYDaJRAJnnHHGlGN33333lJFutdR6blu5/mL83YiIiE4VrYR0wo5Flu3CmcduLqblwPN8SJJQLVQ0E44JA0i6rkAQgx10872D7PjwJO6+76m6tz/noq24+vLzqr/P6dKZIm657cFl12pZkkR0pmJYN9CJZEKHJApQZBF93QnIcnPdGReDKArTdtiFO9qIiIiIiIiIKBCp1ITCz9Bhx9+V7rdPHMFnv3oXvvCNn+IHP30ce54ZRLFk4cjxicVe2qIJXwthHcWygm4/i6lQDLoUGaYN+IAii1CUmetSYf2wWApCUYWSOa/1TiKipY6homVsdHQUQ0NDU46dffbZLQVzdu7cOe3YI4880urSZuU4Du65554px1oJQ7Wq1u+2Y8eOls51+nNn2zaefPLJBbn+mjVr0NHRMeXYY489Nu+j64iIiEIRXYEoCpBFEaoafCA35rH7T9hZKOxSpGnyrIWAU2mqDFmWIECAXhn7NZ/dZwzTxle/88u6t6/tS+Hy527Hs8/dhLf98WW4YMf6afeRJRGvePEFDY14W4okSURXKoYNa7uwbqCzbnhqKTp9hx07FRERERERERFNFY6a1077DL3SKYqEQnH65qPxyQLylSDLaqMqMkRRgOv6MKzgdbDYz0WhMrqs2vk8Ur9LUSgWUSEKgG27MG0H8IF8YXX+TYmIAEBe7AVQ62oFVzZv3tzSuWo9brZgzFw88sgjKJVKU46deeaZ0+5XLBbx0EMPYWhoCGNjY4jFYujt7cW2bdtqBqEatRDPXb1RbrZtY//+/VOOdXV1IZFItHz9U0NKhUIBR48exaZNm1o6HxERUTMEQUA0oqJQNBHRFFiWi5JhIR7TZ32s7bgolkwosoRoRG0oGF2utE2ORJrvUhSKRVVkc2VEdAVlw0bZsJBKRJo+TyNu+8mjdW9TFQm/d9WzIEtBzj8e1fDKF1+IC3duxAOPHsLgSAY9XXFc8bwd6O9Jzsv6aGZBqKhcLYQZhg3X9SBJS3dvxsh4Dtl8GZoqo6cz3tR4QCIiIiIiIqJmVTd+VToV2Y4Hx/Wq9Y6VavO6nrq3HTkxgXPPWreAq1kaBEFARFdQLFkolizoqoJcwUBnKrYo67FtF6bpwIePcmWj4kyjz0KiKCKiqyiWLRRLJrSUjFyhjM5UdL6XTES0JDFUtIzt2bNn2rE1a9a0dK5UKoVIJIJy+eRc0CNHjqBYLCIWa/+/7Gt16unt7a3+97vuugv/+q//irvuuguGUTv9u27dOrziFa/Au9/97qYDQe187gYGBho6f2jfvn2w7alJ/VavPdP1GSoiIqKFEo1oKBRNRKMasnkDpbKNQsmcMfBjWg5GxrIIJ3pFdAU9nfEZwxqFogHb8QABiGhBAaCR8NLpYlEN2VwZuq4CKFVHqrW7E5Bh2hidyNe9/eUvOr9mUWXj2i5sXNvV1rW0yvd9PP70CSiyhB1ntP5+ZblSFRmCALiOD9txocgSyoaNeGxpBnUKRROZbBDcNwwbx4cmkUpG0NuVWNJBKCIiIiIiIlq+NFWu1FSCLtaW5cI0bcgrfJNLIq6jqyOGdKY47bbDx1dnqAgI6m7FkoVS2URXRxSGYcOyHajKwn8lHXYpMk0HrutDOmXU/WxiMS0IFZVNdKYW9/cgIlpsrCwvYwcPHpx2rKenfjJ6NqeGeoDgS6RDhw61fL6ZPP3009OOxWIxjI6O4tWvfjWuvPJK3H777XUDRQBw4sQJfPGLX8TZZ5+NG264Aa7b2FzWfD6P8fHxKcdEUURXV2tf3vX19U07VutvM9Ntc/m7NXt9IiKidkvEdQiiAE2R0ZEMQj7pTBF2nZnpjuthLJ2H5wOqKkEQgxbEg6OZmi2yPc/HxGQB45NBkSZoQSxAksWWRmlFdRUQAFWWIMsifB/VTjTt4vs+bvvJozULSwBwwY71S764lCuUccvtD+L7dz6Cu+97alWOVxVPKTaFr82SMb21+VJRKAXvnXMFA7lCsFkgmyvj0PHxmi3ZiYiIiIiIiOZKEAToelCf0Sqj5k3LWcwlLZjN67trHj9yYmKBV7J0RDQFUjgCrTJyLLdIo8PCOkmp0qUoojfWKR045fdwFv/3ICJabAwVLWO5XG7asWSy9dEYtcZv1bpGOxw5cmTasfHxcbzgBS/Af//3fwMIwjIf/ehH8cgjj2BychLFYhH79u3D5z73OZx99tnVx1mWhU996lN4+ctfjny+fjeAUK3fKRaLQRRb+59Ds8/bcv67ERER1SJLIgYq47lSySh0TYbn+RhL56cFUTzPx9hEHo7jQVFE9PcksaavA4oiwnX96uimkG27GB7LIl8JRHSkgq4rANCZbK3lsCSJ1dbc4T/DFsjtcuTEBJ46MFzztu6OGF566bltvV47+b6Ph/ccxRe/+TM8c3gUQBASqxeQWunC4FoYKiob7Q2gtVOxFLyOS4aJdKaE4bEsbMeF63g4MTyJwZEMHNdb5FUSERERERHRShOOlNK1qRtzVrrN62qHitKZYnWzz2ojCAKi0eD1UCwHdYr8IjwXruuhVLl+uRoqanxzoiAI1ZHyYcejXH51/k2JiBgqWsYKhcK0Y3MZVVbrsbWu0Q6jo6PTjv3Zn/0Z9u3bBwC44oorsHfvXrz//e/HBRdcgI6ODkSjUZx55pn4y7/8SzzyyCP40z/90ymP/9GPfoQ3vOENs157sZ+3xb4+ERHRfEgmIkgmIhAgoKcrDkkSYFkuJnOlKfebyBRgWg4kSUBfdxKSKEKVJazp60AsqsL3gclsCaMTOeSLBoZGM7BsF5IsYKA3gY5EECRKJSPo6pjDvz8jlWKXXil2tTkosnl9D37vqmdBVaQpx3VNwe9d9ayGWy0vNM/z8PVb78Ptdz82bVfh0cH0Iq1qcVVDRZVuVoZhw12CwRzTcuA4Lnz41b+dYToYHM0gWyjDh498wcDhY+Molti1iIiIiIiIiNon3LSlVT5DW7YLz1t6n53bbVOdUBEQjEBbreKVME6pbMLzfViWu+BBs1LZAnzAclzYtgdBaC5UBACxSjiqZFhwPQ+27bITNBGtSkvz2wxqSK2uPLLc+p+01mMb6fzTisnJyWnHwlFn5557Lr7//e/PGLTRdR1f/vKXMTY2httuu616/Pvf/z5uvPFG/PVf/3Xdxy7287bY12/W6OgoxsbGmnrM/v37p/xcKBQWtHtSsVic8WciotViof//MKoBmYwJ13aRiEpIZ4rIZEyIcBDRFOQKBvJFAxCArmQMnmPChQRJFmGbNpJRCSIkZAslFAomwoyspsroSEYh+A4sy0VPRxxRTZjTv+9814FplCD6PjzPhGkCmYxQLYK1wxkbOnDdqy/BQ08cw4mRDDqTUVx6yTZ0JhSYRmn2EyySsmHCr1H4O3RsBDu3tT6ydbnyPR++Z8I2gWKxAFkSMTaeRjTSvtdKO+QKZZhGCYZpw7VNSKIARZZhWDYmJkzk88H/jhRJxMEjRaxf0wVJbKzl90rA94e01HFjCBEREREtZxFdAQRAFkUoigjb9mBYTrWD0UoVj+no6YpjPD39/fyR4xM4f/v6RVjV4tNUBYoswnY8lA0LsYiGXKFc3bi1EMLuQmGXIl1Tmp5YoqkKVEWCZbvIFw10JKKYyBQQj2ltXy8R0VLGUNEyVi5Pb7MnSVKNezamVjil1jXaIQwQ1fK5z32uoc49oijic5/7HO68884p5/vwhz+M66+/HtFo7ZEoi/28Lfb1m/WFL3wBH/rQh+Z0jgceeADDw7VHwCyEBx54YNGuTUS0lCzW/x8O18j+DDaRNbUA5E/Z3HVwziuqrdY62+GMfuCM/uB9SWbsADLNZXUXXEwpwzSnv5fYt/8odq5f+bsMZ3Ks8ho5tH/m+y0Vp/4VLQCFU/93tG+hV7O08P0hLTVHjx5d7CUQEREREbVMEIKNWuWyBU1TYNsmDNNe8aEiIOhWVCtUdPjE+CKspj08z4c4x41IsaiGTK6MYsmshIoM9HYlIAjzv8HJ9/1ql+ZyZQRaq6/FZCKC8XQB+YKBZDwCw7CD3ynKYBERrR4cf7aMRSKRacdc1235fLUeW+sa7WDbtdscnn/++bjssssaPs+mTZvwh3/4h1OOTUxM4Kabbqr7mMV+3hb7+kREREQz6evUax7PlxyUDKfmbURERERERES0ukUro6UilW40YYeYlW5znRFomVwZmdzS7VR9OsO08dDjh/GVW+7Ffb89MOfzxSKVEWiGDcfz4Fa6Fi2EcmV0veMFHbOA5kefhWIRFYoswnV9FIpBg4OJSXaaJaLVhaGiZSyRSEw75jitf9FT67G1rtEO9UIv1157bdPnqvWYW2+9te79F/t5W+zrExEREc2kr6t2qAgAxiY5N56IiIiIiIiIpgs7t+haMArNtj3YdusbqpeLTXVCRQBw5MRE3duWAs/zcfDYGG790W/xma/cif/56RMYHM3ikb3H4Pv+nM6tKBJUVQJ8oFQKwkS5Qv0pJu1UKAXXMQwb8AFVlSDLrU0MEQQByUTwnWa2UIYPH2XDRqm8OkJzREQAx58ta/F4fNqxYrHY8vkKhenJ2lrXaId6oZeLL7646XM95znPmXbs17/+NWzbhqJMTx4v9vO22Ndv1l/8xV/gD/7gD5p6zP79+/GqV72q+vOuXbuwY8eOtq1pNsViccpIi127djU0Uo+IaKVZzP8/9DwfJ4YzcF0XJcNCvmigpzMBSRQQj+vo6az/7yrf91Eq23BcF7GIBlmevxy87/sYHMnAtl0Ypo2JTPDv5M5UdFW06K5n4PEiJrPTR6A5Yie2nHH2IqxocXmej+HxLHwf6OtJQpFE9PUkEY0sjddIJltEJldGybAwmS1BUST0dU19v23ZDsbSBUiigIHeFABgw7puSHNsZ75c8P0hLXV79+5d7CUQEREREc1JRFchySLgALomwzAclAwLKWVlT1aIRTX0dicwNpGfdtvh4xO4YMeGRVhVYx56/DB+9PM9046nM0UcH5rEhrVdczp/LKLBskoolU0k4zryBQN93ck5j1abTaFYGX1mzG30WSge1ZDNl+E4HvKF4HeZyBQQjczt+SEiWi4YKlrGksnktGP5/PQ3LY2q9dj56nhT77ytBE96e3vR29uLsbGx6rFyuYwnn3wSF1xwwbT713reisUiPM+DKDb/pWWzz9ty+7v19fWhr69vTueIx+M1f++FEovFFvX6RERLxUL//6GmR3B0MA1NjyKZ9CCJIiIRFRvWdM46Pz2VWqBFAtAjMRw5MQFN9+ELMjI5A/mii3hMg6K0totpudu0vheZ/PFpxwfHCtD06CKsaPFFIg4M04EPGZquQ5TVJfP+YjJvQ9MF5MseRElDPKZP+zupmo/JnA3PBwRJharIkBUNiVj9zlQrGd8f0lIzXxt6iIiIiIgWUjyqIZsrI6prMAwHZcNCKrGyQ0VAMAKtVqjo0PFx+L4/ax1ssew4Yw1+fO8e1GpK9MiTx9oQKlIxmS3BMB04jgtZllAsmUjE568WYZo2bNuFDx+lSqgoMsdQkSAISMZ1pDMl5AplJOIaSiULZcOa87mJiJYDjj9bxrZs2TLt2Pj4eMvnO/2xgiDUvEY7pOp8U9jZ2dnS+WqFXuo9F4lEAt3dU9tRep6HycnJlq59apgptHXr1rr3b/ffrdnrExERLZSIrqKvO/jSXhJFqKqMtf0dS66Qoqky+nuCdXYkY9B1GZ4PjKXz8Ly5tXperjasqV00GhnPwrRaH9u6nOla0AHTMG0AWDJtrh3XC9p5Ayib9YtlgiBAq/4Owd9wqfwOREREREREtDLEo0FYJKqf/Pzput5iLmlB1BuBli8YGBzNLOximpCI6Thzc3/N257cPzjnGpAsS9C1oL9FoRx0D8oVpnfGbhfH9arPt2HY8D1AkgRo6tx7bMSjOiRJgON4KFbGuaUzrU8hISJaThgqWsbOOeecaccGBwdbOlc2m0WpVJpybOPGjfO2W/KMM86oebzV69XqzJNOp+vev53P3dDQUEPnD5111lmQ5alvYFq9divXJyIiWkidqSg2b+jB+jWd2Ly+G7K0NN9+phIRpJLBzrmezjgkWYBlu6u2OLCxzk403wdODLcWxF7uqqEiKwjwmEukMFquBIMs24Hr+BAFQFVqF8vCIppZ+R3CNuBERERERERE7RCNqBBEAbIsQVWD7s+r4bPnlg09dceL790/vMCrmapYMme8/YId62set2wXe/dP//6pWbGoVllH8DoolMx5qae4rofjQ2lYlgvHcTFRqenFIlpbzi+KApLxoHaYzQffpxaKJszK5jMiopVsaX6rQw3ZuXPntGOHDx9u6Vy1Hlfr/O1y3nnn1TxuWa29uazV8cCv1a+xYjGfO1VVp4Wq0ul0yyPQjhw5MuXnWCyGTZs2tXQuIiKi+aCpMmJRbcl1KDpdX3cSqipDliT0dsYBISh0rMZuLl0dMUQjtds3Hx2sH9xeyVRFhiAAruPDclwAQNlY/MJRsbLTL+ygpGkKxDqFzKUajCIiIiIiIqKVQRQFxCr1hLCuUFrioSLLdpDJlZDNl+F5rX1G1jUFmzf01Lztqf1DM35fNZ8ms0V85qY78dXv/BL3PXwAk9npm+fO2Nxftwb06N5jc15DLKJCEADbdmHZDuAD+aIx5/OeyvN8HB+ehGk6cFwXIxM5OI4HRRHbOn4vEdMgiQJs20OhEtaaWKUbEolodWGoaBnr7+/HwMDAlGNPP/10S29OnnzyyWnHLrzwwlaXNqt6oaJCodDS+WoFcnp7e+vev9bvtnfv3pauffpzpyjKrIGsdl1/aGho2ti2888/f8l/aUtERLQUiaIQjGcTBeiailRlvnu7Cx3LgSAIdbsVHRta2qGi+SrUieLJdtnhuLGSMfNuv4UQ7vQLA05hcKiWU4NR9hIKRhEREREREdHKEY8F9ZRwNLdh2EtyvLxtuxibyGNwJItMrozJbAkTk60HRHZsW1Pz+GSuhJHxXMvnnYunDg7D94Hjw5O46xd78W9fvwdf+tbP8fMH9lX/JrIk4ryz19V8/NHBNCYyrX1vFxJFEZHKOLywa1Ku0L5aWxgoMgwbjudhZDwP2/YgyyL6u5OQ2tg1XRRFJCr1wmw+GOOWLxhzHhNHRLTUMVS0zL3iFa+Y8nOxWMQzzzzT9HkefvjhWc/dTjt27EAqlZp2vNUxYGNjY9OO9fX11b3/NddcMy1488gjjzR93Xw+j/379085dvnll886xq3Wc9vK9Rf670ZERLTSaaqMvu5grGrYntk07UXbUbaYNqypHSo6Pjy5JLvb7HlmEJ//+t34xP/5H3zz+7+el45KYWAnHB+22F2sLNuB47jw4Ve7D4WFulqmBKNMjkAjIiIiIiKi9gvrKZoiQ5ZFeP7JjrlLge24GE8XcGIkg2Llc300ogACUCxbsG23pfOevXUA9fZ7t2OMWCueqjF6bWQ8h2cOjUzpcnzhzg11z/Ho3uNzXkc4gix8vstlC47T2vN8Kt/3MTiSQblswfU8jE3kYdsuJElAf08SsizN+RqnS8Z1iKIA23ZRrGw2S7NbERGtcAwVLXOvfvWrpx279957mz7PL37xiyk/r1mzBs95znNaXtdsFEXB7/7u7047vmfPnqbPNTExgZGRkSnHUqkUtm/fXvcx69atwyWXXDLl2OnPQSPuu+++ae0wa/1NTvfyl78cqjq1nWQ7/m6NXp+IiIjqC3fUqYoMSRLg+ViVO442rO2sedxxPAyPZxd4NTM7dGwct/7wt0hnS3BcDwePjuPrt/4K//OzJ9r6t1tq48PCLkWmacP3AEkSoCryjI8JQ0Xh88JQEREREREREbWTLJ3sTBN2KyovgdHyjuNiYrKAwZFMdXRVNKJgbX8Kfd1JRCqf+Qul1rroRCMqNq+vPQJt7yKMQMsXDRwfnqx529nbpk5B6etOYm3f9EYAAPDY3uNz7jQV0VWIogDH8aqbnMJOP81yXQ+m5aBUNjE0mkWxZMLzfYxN5GGaDiRRwEBPCso8BIqASreiWBCSyuWD10quUA5GuxERrVAMFS1zV1xxBTo6OqYc+8EPftDUOcbGxnD//fdPOfZ7v/d78z5C6zWvec20Y7/61a+aPs+DDz447dhll10GSZr5DcO111475edjx47h8ccfb+rat99++5SfJUnCq171qlkfl0ql8OIXv3jKsR/96Edw3eaS2adf/5xzzpkxTEVERESzkyURaiV4UQ2RmEtnR91CmakAMx9dgObil7/ZP+2Y7wMPPXYYX/rWz3Dw2PSulq04dXyYtQTGh5XK5pQ1RDR1prsDmP6aLptLsw09ERERERERLV/hhq1oJVxUWuQNLablYHAkg3zRhO8HXX7XVMJE4eaceDwIihRKZssBoHoj0CYyRYyn5zZGrFlPH5zepShUa50X7txY8775ojHnuoooCohWAmaFSi1jIlOE2UC9bTJbwpETEzhwZAxPHxzG/sOjOHxsHMcGJ5EvGPDhY3QiB8N0IIoC+nqSUJT5CRSFkvEIRCHYbFY2LMAHMrnSvF6TiGgxMVS0zKmqine84x1Tjt1xxx2YmJho+Bz/+Z//OaXbjqIoeOc739nw43/961/jk5/8JG666SYUCo2/KbryyiuxadOmKce+973vTev8M5vvfOc70469/vWvn/Vxb33rW9HZOXUH/te//vWGr2vbNm655ZYpx6677joMDAzUecRU7373u6f8PDY2hh/+8IcNX//RRx+dFoI6/ZxERETUmmgkKHRoajjuavXtNpIkEesGOmredmwJhYpMy8HRE/Xf+2ZyZXzzv+/H7T95dM7hsCnjwypBnlKl1fVC832/On6tXPm99BlGn4XC9TuOB8d1AR8wzMXfMUpEREREREQrRzgCTdcViCLgun51lPhiyORK8HxA02QM9CXR35OEpsgQRAEdySgAIKqrkCQBrutXP2c36+yt/XVHoD01Q8hnPjx9oPb1ervi6O6MTzt+zllrIUu1vzZ+5Mljc15PvNLdp1AwUTYs+J6PEyOZGTtAj08WMDqeg2HYwbi0StbL9TxYjgvDtDA2kYdhOBAFoK87Ua17zCdJEhGPB8G5sOtVsbQ49SEiooXAUNEK8M53vhO9vb3Vn03TxI033tjQY23bxmc+85kpx66//nps3bq1ocd/5CMfwXOf+1y85z3vwZvf/GZceOGF00aR1aMoCj75yU9OOTY4OIhvfvObDT0eAIaGhqYFe84999xpXYhqSaVS00I4X/7ylzE5Wbsd5Om++tWvYnR0tPqzpmn4+7//+4YeCwCXXnoprrrqqinH/umf/qnhx5/+3J1zzjkNhamIiIhodmGoSNcqo6JMe8HbVC8FG9d21Tx+bGhyyTwfh4+Pw22g087DTx7DF7/5M+w/MjrrfWcSdvoJi6HhCLKFVjaCDkOO58Gy3Clrm4koilDVYMeeYQZhudIidlsiIiIiIiKilUdTZaiqBAFCdQRaaZFGoLnuyZFb3Z1x6KoCQRTQ2RHF1o296O9NQtNkCBAQjQTBl2KxtYBIPKZPqaVEIyqede5GvO53d+N5z9o291+mQWXDwuE6G7DO3lp7Y7yuKdi+rfZt+w4Oz/nvp2sKEpVuUOOTBdiOC9t2MTSarXn/ickCJirdnTK5IobGsjg+PInDJyZwbHASg8MZDI/lUSrbEASgryfZUF2kXWKV10rZsODDh2W5HIFGRCsWQ0UrQDwex4c+9KEpxz71qU9h3759sz72Yx/7GI4cOVL9uaOjAx/84Acbuu7evXunXffAgQO44YYbGno8EIxAe97znjfl2Pve974pYZ2ZvOMd70CxWKz+LEkS/vVf/7Xh0W1//dd/jc2bN1d/zmazeNe73jXr40ZHR/GBD3xgyrG/+Zu/wcaNtdtD1vOJT3wCinLyTc7Pf/5zfOMb35j1cffccw++/e1vTzn2qU99CqLI/0kTERG1Q1jwUhUZkiTA81dnt6INa2qHikplC+lMseZtC+3AkcZbcOeLBr592wN45nBjIfhawgLVyaKR01C77nYLR5+FHZNURaq7o/B0J4NRlVDRIhV2iYiIiIiIaOWKR4NOLmGNZbHGh5fKFnwfUFUJqixBEAVs3dCDvu5k9XN0KhF0K0pUuumUDGvGDjozuXDnRlx8/ma84dXPxTv+9MV4+eXnY9vGXkgNfmZvh2cOj9Yddb69zog2ALhg54aax13PxxP7Tsx5XV2pGDRVhuv6GEvn4fk+iiUT4+n8lPulTxkXN5ktIpMzYJoOHMerdisSRQGKIiKiK+hf4EAREATnJEmA56FaF2K3IiJaqZhAWCHe9ra34dWvfnX153K5jFe84hU4dqx+S8Kbb74ZH/nIR6o/C4KAr371q1izpv4bilPdc889cF132vEf/ehHTaw8WEdX18kvrI4fP45rrrkGw8P1W0E6joN3vOMd07oU/dM//RNe9KIXNXztSCSCb3/729A0rXrsK1/5yrQuQKdKp9N45StfibGxk19g7d69Gx/+8Icbvm7oggsuwKc+9akpx/7sz/4Md999d93HPProo/jDP/zDKd0B3vGOd+BlL3tZ09cnIiKi2mRJhFpplxwWJeY6Oms5WjfQWbdt99ElMALN9/2mOw/5PvDTX88evq/n1KJRyQjCOPmi0fL5WlWsBIEMq/HRZyFdDV/TlXOs0k5cRERERERENH/CEWgRXQEEwLJd2M7075TmWzieKl5ZTzyqQZalKfdJxnVACDaXqaoE3weK5dYCIudvX4+XvfBcbF7fvWgbwffuH6p5PJWIYKA3Wfdxm9f1IJWI1LztkSePzrl2IAgCervikEQBluVWN6xNTBZRqHSHmsyWMDYRhIwyuSKy+aDm0pmKYk1fEusHOrBxbRc2ru3Cuv7ORQkUhSJapQtXJTBXYKiIiFYohopWkG9+85tTAjX79u3DhRdeiM985jM4cSJIELuuiwcffBBvfOMb8cY3vhGeFyStBUHA5z//ebzqVa9q+Hr13jw0+6Zi69at+OEPf4jOzs7qsQcffBDnnHMOPv7xj2Pfvn2wbRue52F4eBhf//rXsWvXLnz2s5+t3l8URXz84x/HO9/5zqauDQSBoP/6r/+aEix6z3veg2uuuQY/+9nPYFnBly1jY2P4yle+ggsvvBD3339/9b7nnnsubr/9dqiq2vS1AeDtb3873vOe91R/LpVKeOlLX4q/+Zu/wRNPPFF9Pg8cOIC///u/x/Oe9zyMj49X7//6178en/70p1u6NhEREdUXjkDT1KldXVYTTZUx0JuqedtSCBVNTBaRzZebftzwWLblQo8gCNXCaKky+ixXWNhQket61Q5F4T8jTRTQtEpgzrY9OJ4H3/NXZWiOiIiIiIiI5k9EVyBJIiRRrI6XLxsL2ynXdtygniOcrPMk49NDM5IkIhELOiuFHZYKLY5AW2ym5eDg0dpdnbdvG5hx0ocoCrhgR+1uRSPjeQyP5ea8PlmW0NudgCAEz3FYUxkazWB8soDR8eAamXwJmVxwW1dHFKlEBJqqQJYliGJj00rmW0Q/2c0aCLpitdrhiohoKWOoaAWJRCL4n//5H/yv//W/qm8K0uk03vnOd2L9+vXQNA2qqmLXrl24+eabq4/r6enBf//3f+Ntb3tbU9d74QtfWDNl/ZKXvKTptV9yySV46KGHsHv37uqxdDqN97///Tj77LOhaRo0TcOaNWtw3XXX4eGHH67eb9OmTbjtttvw3ve+t+nrhl75ylfinnvumTIK7Y477sBll10GXdeh6zr6+vpw/fXXT+n+dO211+KXv/wlenp6Wr42APzjP/4jvvzlLyMejwMIOjH967/+K8477zwoigJFUXDGGWfgwx/+MEqlEgBAURR8+MMfxs0338yxZ0RERPMgLDaFhS9zlXZzqTcC7djQ4oeKDhyt3aVIEIBrX/ZsdCajdR979MREy9eNRU62Q/d8H7btLmhhNOyQZDkuHMeDKJwMvzVCkkSoSrAr06x0OuIINCIiIiIiImqnUzflRCsj0Bb6s2c4jkrXZMiSBEkSEYvW3iAeduiJRVUIYtBZaTluMDt4dAxOnWDLTKPPQhfsWF/3tt/uOdLyuk6lawo6KjWbdLYIw7TheT4mKiPPsoUyMtlgE1lnKlozCLYURHQFghBs2rIdF/BP1myIiFYSJhFWGFVV8W//9m/49a9/jVe96lVTuudYllXtTAQA/f39eN/73oennnoKr3zlK5u+1rnnnosPfehDU1LNmzdvnjbOq1Fbt27Ffffdh29961v4nd/5nSnn9X0fjnPyzZskSdi1axe++MUv4plnnsHVV1/d0jVP9dznPhd79uzBJz/5ySnhIt/3YZonE+miKOKKK67AD3/4Q3znO99BMlm/VWQzrr/+euzduxd/+Zd/iY6Ojupx13Wn/O7RaBRveMMb8Oijj+KDH/zgjKlyIiIial2kUvBSlcq4K391divauHZ6qEiSRMSj+qK0LT/VgSO1d96t7e/AjjPW4K2vu7Ru2+4jc+i0pKkyFFmE750siC5kt6KwQ5JRKVRpmtL0Lj2tGpYLXtNlg52KiIiIiIiIqL3isXAEWlBjMS1nyvdU861YHX0WdB9KxPW636nEKmPRJFGshqCKy3Cc1d4DtUefxaIq1g901rztVB3JKDav765526NPHkMmV5rT+kKpRASxiAr4wNhkHo4b1JhyBQOTmVJlLZG6dZ2lQBTFajfocLPZcnzNEBHNRl7sBdD82LVrF773ve8hl8vh/vvvx759+5DJZCDLMvr7+3H++efjwgsvnHOHmw984AO46qqrcO+99yKZTOIP//APkUgkWj6fIAh47Wtfi9e+9rUYHx/Hgw8+iP379yOXy0FVVfT29mJgYADPec5zpgRv2iUajeKGG27ADTfcgD179uCRRx7B4OAgTNNEMpnE1q1b8ZznPGfOnYnqWb9+PT73uc/hM5/5DB588EHs2bMHY2PBl2WdnZ3Yvn07du/ejUhk6b6JIiIiWilkSYSqyrAsB7qmoFiyYJj2os1pb0RQmBPa2gZ6/ZpO6JqCDWs6sWFNFzas7cLavhRkWWrbNVph2y6O1Ok2tG1jH4AgELZlfQ8e2Xts2n2OHB+fdqwZsaiGTK6MYslEPKohXzTQ152Y98C3adrIFoLdemEQqJXXpKYqyMOsjj0rGxZ832dgnYiIiIiIiNomFtEgiAIUWYKiSLBtFyXDRrzSwWg+mZYN2/EgiKeOPtNnfEwqoWNisoh4VEOxZKFQMtGRjC6ZcVuzcRwX+w/X7up89taBhn+PC3duwOHj02surufj3geewStefMGc1hnq7ozDdrKwbBfj6QKiEQ3pTBFA8LfomKED9VIR0VUYpoOSYSEZj6DAUBERrUAMFa1wyWQSV155Ja688sp5u8bFF1+Miy++uO3n7enpwcte9rK2n7dR55xzDs4555xFubYsy3juc5+L5z73uYtyfSIiIgpEIyosy4GmBqGipdypKF8wMJktwvOBaERBKhGt7paai0RMx99e/5IlV0A7Npyu2877jM291f++aX13zVBRoWjCtJyWn6NYJAgVlU0bjucBTtC1KDaPhVHX9TA4moHv+SgbVjVUFO6gbEYYRLJsF67nARCD1/oSDs0RERERERHR8iKKAqK6imLJRFRXkLVdlMvWgoSKwnBHVFchCgJUVap2TKonmYhgYrKIiK5ClkU4joeyMb+f9dvp8ImJurWr7VsHGj7Pjm1r8JP4U8jX6Mr86FPH8Lxnb0N3Z7zldYZEUUBvVwJDY1kYpgOj0k05mdDRmYrN+fwLIaqrmMyWYJgOPN8HHG/Jb0okImoWx58REREREdUR7mTTq6OibPi+v5hLqsnzvGqgCABKZRtDo1mMjOeqnWjmYqkFigBgy/oe/MUbLsNLLz0H2zb1QpaCjzYRXcGa3o7q/cLxbRFdwfZtA3jJC3biLa99Ad55/UvmFLpSFAmqKgH+yXFk8z0CbWQ8B8ty4TguxicLAIBEXIOiNN81SpZEKLII+IBVKTiWKq26iYiIiIiIiNqlOgKtUmMpG9a8j0Dzfb/6WT0MMCXis0+AUBW5us54NPjncuo889T+2qPPNFXGpvWNT+CQZQkv3HVWzdt8H/jZ/ftaWl8tiiKhrzsBWRYhCsHIs65lEigCgvWH9ZWyGbzmltNrhoioEexURERERERUR7iDTVVkSJIA1/VhVsahLSVlw4bnA4oiorc7iWyuhGI56GRTNmzomoxUIjLrjrzlRBAEdHfE0d0Rx64LtsB2XBw9MYFi2ZoSgupIRvHnr3shujvjbQ9HxSIaLKuEUtlEMq6jUDTgecmmr5PLl2E7LuIxvW7QaTJbQr5gwIePsckCXNeHqkpzKrRpqgLbCUagRXQVZcNGZ6rl0xERERERERFNE49qGAGgqwoURYRte8gVjHkdbVU2bbieD0kWoOtBDWe20WehVCKCctlCLKojkzNQNmw4jtuWMfC+72NkPIcTIxk8+9xNcz7fqTzPx9OHRmredtaW/upmrEadv309fvWb/UhnS1OOb982gBdccmbL66xF1xSsH+hs6zkXUkRXYReC10pM11AsmehpQycnIqKlgqEiIiIiIqI6ZEmEqsqwKkGiYslaki2MwzFYEV2FKkvo7Uqgw3GRKxjIF41KC+k8dE1GX3cCorjyGpYqsoRtm/pq3tbbnZiXa8YiJ1tchwXGYtlEItZYoRIIug9lKgW68ckCOlNR9HQmpgSTDNPG6EQOQBAuMk2n2iJcEFoPSumajELJrLZGL5XZqYiIaLXJ5/O4//77sW/fPmQyGSiKgr6+Ppx//vm48MIL5/TvmYW2d+9eXHjhhbCsqf8+m68uk3v37sXDDz+MoaEhGIaBZDKJbdu2Yffu3eju7p6Xa4Ycx8FDDz2EPXv2YHx8HJ7noaurCzt27MCuXbug642/FyEiIppvsiwhEdeRrwSJxiYKyBUMJOP6vNUnisWgU0wsokGAgIiuQFUa+0o0EdMxKuagyBJ0XYZhOCiUzJZDUL7vY3A0g737h/HU/iFM5koQBOCszf1INBh0asSxoXTdz/XbtzU++iwkSSJeuPtsfO/HDwMAtm3sxWXPORtr+zvmsswVKaIryBUMlCsdoA3DhuN6TQe5iIiWKoaKiIiIiIhmEI2osCwHmhqEiurNpl9MYXvlaGX3HYQgZNPdEUMqoU8JF+WLJlKJ2Vt+0+xkWYKuyTBMB4WyiY5EFPmC0XCoKF80qoEiw7SgayomMyXkCgb6u5NIxHW4rocTwxnAB4qGiVw+GLHW0xmHMsddklolHGdYDjzfB1wPpuXMaSwcEREtDw8++CA+8YlP4Pbbb58WwgkNDAzg+uuvxzve8Q50dXUt8Aqb4/s+/uzP/qzu79Iu5XIZn//85/GFL3wBhw4dqnkfSZJw+eWX44YbbsCVV17Z1usfP34cn/rUp3DzzTdjcnKy5n1isRh+//d/H+9973uxffv2tl6fiIioVd0dMeQLBmIRDRmlDNt2561bked51XBHrInRZyFRFJCI68jmyohHdRhGAcUWQ0WlsoX/uOVeZPPlKcd9H7jzl0/i9176rKbPWUsmV8IPf/ZEzdtkWcS2jbU3Yc1m55lrcejYOM7fsR6b1s1vaHo50zUFogC4jg/TdqApMool1t+IaOVgRJKIiIiIaAbRSDAyTNeCoIVp2vO2470VpuXAdX0I4smQyOZ13ejrSUCWJchSMCIrHJMVFtaoPcICZbEUPK/5ogHX9WZ9nOO4GBkLug9lC2UMj+UxMp6D7bhwHQ+DIxkcHUxjcDQDx3FhOy4m0gUAQDKhV1+Xc6HIEiRJAHzAsoJuV3x9EBGtbLZt4+1vfzt2796NW2+9dUoIR1GUKZ2JhoeH8dGPfhQ7duzAHXfcsRjLbdhXvvIV3HvvvfN6jfvvvx/nnnsu3vWud00LFKnqyX8vu66Lu+66Cy95yUvw2te+Fvl8vi3Xv+mmm7Bz507ceOONUwJFoihClk8GgovFIr7+9a/jggsuwMc//vEl9b6ViIhWL01Tql15OlJB0CJXaOzzc7OKZas6Il5TZEBA0x2BwjBINKJCFAHb8WCYdtNriehK3W41e/YN4umDw02f83THhtK46Tu/wOhE7fccZ2zqg6K0tilJFAW84sUXMFA0C0E4OWYvrKsUS+ZiLomIqK0YKiIiIiIimkFED74kUhUZkiTA87GkuhWFRa2IpkCAAEWRoGkKOlMxbN3Yg56uePV2IFi757W/aLdaRXUVggDYtgvLcQEfKDRQOBoey8F1PVi2g8lKt6KyYePESAaZXBGe76NctlAqWfDhYyydh+cF4bbONu7k1E/pVgQAk9kinHko6hIR0eIzDANXX301brzxxmrQpLOzE5/+9Kdx7NgxWJYFy7Lw61//Gq9//eurjxsdHcUrX/lKfOlLX1qspc9obGwM7373u+f1GnfccQcuu+wyHDx4sHrs6quvxt133w3DMGCaJkZGRvClL30J69evr97nlltuwfOf/3yk0+k5Xf+DH/wg3vzmN1cDSrIs46/+6q/w2GOPwbZt2LaNffv24QMf+AAikeBLUMuy8P73vx/XXXcdg0VERLQkdHcG9YmYrkFVJXiej1yhPMujmheGOcJNQPGo1vQYqoiuQlUliIKAWDQIJJ3ebagRgiDgonM21r39Bz99vO7mHs/zceDoGB576jiOnJiA59X+9/nBo+PVjU61tDL6jJoX1g/LRlCnK5bMZfMezDRtpDNFHB1M4+DRMYzVCagR0erFUBERERER0QxkSYRaGQdVDWC0sDttvoTFJ10Lihdh0QwIiledqRggBKO6ZFmE758MkNDcSZJYfV2USsFostmKopPZEoolE57vYyxdAPxg92JEVwAfyOQMDI5kUDSCQmg6U4JluZBEAT2d8SldJOYqXHuhaMJxXFiWi+ND6XnZLUpERIvrDW94A+66667qz2eccQYefvhh/O3f/m01CCPLMnbv3o1vfOMb+NrXvlb9d47neXjb296G2267bVHWPpN3vOMdcw7tzOShhx7CtddeC8Mwqsc+9rGP4Y477sDll18OTQvee/X19eEtb3kLHn30UVxyySXV+z722GO45pprYNutvX/8/Oc/j49+9KPVnyORCH74wx/ixhtvxHnnnQdRDMq7Z555Jj7ykY/gV7/6Fbq7T3YTuPnmm3HDDTe0dG0iIqJ20lS52jEolQxCsPk2dytyHBeGGdQ84pHg39HJJkafnSocd5aK64AQhEVaqQddcv7m6oav0xWKJu78xZPTjqezRXzlv+7Ff37/fnz/zkfw7dseAFA7oPKCS86sGxxKJSLYsW1N02um5oWhItN04HgePM9fst2gXddDvmBgeCyL/UdGcfj4BMYm8iiXLdi2i3SmiFwLIToiWrkYKiIiIiIimkU4akpTT3b7WQo8z6sWy6KVNsuxiDblPqIoVAsb4T8NY+6hKMdx8czhUdx216P4+q2/mvP5lrMwyFWo7AwslSw4jlvzvqblYCwd7PiazJZg2y4kKQgL9fck0dedgCyLcBwPY+MFDI1lkS8EX2L2dMUhy621LK+79ogGRQmuNzz+/7N35+GN3fW9+N9nP9pleffsSyaZmSwTkkwmkEBDSEjIQkIhUFIa2nJbaH8thK08LdBLKbdQKFuhvQ1luSxlSxNKCBAICVnIvkwySWZfPB6vsrXr7Mvvj6OjGY8lW5ZlSbY/r+fJM9GRzvl+LUsz1sefJQfLtqHrFk6MpSmxiBBClpHbb78dd9xxR/m2LMu4++67sW7duqrn3Hrrrfi7v/u78m3HcXDrrbdibGzhYzoa5b777sP3vvc9AN74tlAo1NDrq6qKt771rdMSit75znfib//2b6uek0gkcPfdd6Orq6t87LHHHsPf//3fz3v9PXv24P3vf/+0Y//+7/+OK664ouo5O3bswA9+8INpxz73uc/h3nvvnff6hBBCSKPN6FbkeiPBq3Fdd17dXoqqX3jFg+c5sCwzrfhqPmKRIDieBc9ziIS8ZKhMTpn3dXieww1XnIdq9UHP7z2BQ4MT5dtHhpL4xg8fwVhpZDoArBlIlBOJT8eyDN545Q70dUenHY9HA3j7DRc3PI5AKvOKEr3n2o+71dLJutnS2SIODU5gZDyDbE6FbTlwXBeKZmAqUyy/HzOUVEQIOQUlFRFCCCGEEDIHP6lIlryORbputkULY7+lsiB4QS6GZcp7PdXp+6+305Jl2dh3eAw/+dVz+PzXf40f3P0kdu8dwuBwCpOpQp1fxdIXlEWwDGBZDjTDe27zRW3G41zXxehEBm6pWq2cLNQRBldqxR4MiBjoiSMWkcEwXoUb4AUD/aSwRmJZBr2d0XIi0/hkHpbjQCuNYqvWXp0QQsjSUSgU8LGPfWzasQ9+8IM466yz5jz3Yx/7GNauPTmyI51OT+ua00qapuE973lP+fYHPvCBaYk8jfCVr3xl2sizaDSKf/mXf5nzvN7eXnzyk5+cduwLX/gCTpw4Ma/1P/KRj8AwTla4X3rppbj11lvnPO91r3sd3vrWt0479qEPfYhG4BJCCGk5SeQRjXidg/xOQIWCNmMMt+M4SGWKGBpJYWgkhal0AWaV4h2fohrlzsF+IlEkLINl6+v2y7IMOuNewrLfrUjTrbpiKqv6OrBrx8aq999z/wvQDRNPvXAM//U/T0A9bY31q2b/GUcUeLz1uosQLn3dawcS+NObL6vaIYksjmB5BJr381uxzZKKTNPGxFQecAHDtJAtqBibzOL4SAoTk3nkC1o5VuV3LSKEEICSigghhBBCCJmTn8whCjw4joHjtke3Ij+p6GQnIqFisCzkd1oqjboyTHtGwK7W9e74xdPYs394xte/7/DovK83XwVFx77DYw177k3TxpGh5LSKwHqwLINA6TkulroVZXIqVM2YlpQzmSqU22BPZrwkrGhYnpEsxLLe2LqB3jgS8SB6uyLlYOti4HkOvV1RcBzjBZgmc7AdB6pqYHg83RYJdIQQQur3xS9+ERMTJ/+tkyQJ733ve2s6VxRFvO9975t27Pbbb8fRo0cbucW6fOpTn8KhQ4cAABs2bMDHP/7xhl4/m83i05/+9LRj73rXu5BIJGo6/0/+5E+mJTlpmoZPfOITNa//yCOP4Oc///m0Y/MZY/Y3f/M3027v2bMH3//+92s+nxBCCFksnR3emPagLEKSeDju9DHiRUXH8HgGuYIGxwUcF8gXdQyPZZBM5WGY02MCimpgZCKDiak8bNsFy54sroqWxq3Vq1HdigDgNbvORCJeuatirqDha99/GL988EVU+gi+fnXnzIOniYYDuPm6C3HB2etwy427KhadkcUVKHURVzUDLlwYhj3j9dpK+aIGuICmGxgZzyKdUaBpFuACPM+WC+b8pKjZuogRQlYWSioihBBCCCFkDl4LY6/Lj1xKzKm3208jqbr3Id8ffRau0tJblrxkI549pRVzHfuPhGWs7uuoeN/LhxY/qejg0XH8+OdP43NfuxffvvMxPPrMYYxP5mpOejFMC0eOJ/HAY/vwrTt+h8/e/kt87ydP4IHH9i94b34VpKJ5VWiGYeH4cAoHj43j6NAkRsYzSGWKAIB0pgjbciEI7KzJQgLPIRpenA5Fldbq64qBYxkYho3kVN5rf60YGBnPLKnEonRWwYnRNFKZ4pLaNyGELAbTNPH5z39+2rFrrrlmXh19brnllmnjNipds9n27t2Lf/7nfy7f/upXv4pAINDQNb72ta8hlUpNO/ZHf/RHNZ8viiLe9ra3TTv2rW99C+Pj4zWdf3pCU2dnJ97whjfUvP7555+Ps88+e9ZrEkIIIa0gCjxifrei0p+FggZNNzE+mUMyVYBte5+Ze7si6OuOQpa9mExR8ZIhJqZyKBS1cjKRYdhgWSAelbGqrwMcy0IQOAQD9Y0+83ndirxuPwvtViTwHK6fZQxaukqykiTyM0abVbOqtwNvuPwc8Fzzfv3rOC6GRlILLthaDiRRAMcycByvyznQXt2K/OS9ourtTRQ4JOJBrOqNY3VfB0IhqXS/F2/M0Qg0QkgJJRURQgghhBBSA7/CSxK9BJ5WdyrSDatcged3IApVSSpimJNj0QJ+UpRWX1LU1s39FY+PT+aQzhbrumat/ACV47gYHJ7Cbx7di9u//xC+9z9P1HT+SwdG8L3/eQKPPH0IQ6Np2KUuQuOT2QUniQUkL3BkWy5S2SJUzYDlOF5LacMqt48uKDqKigGGAbo6InW3YV8MgsChtzsKlmWg6RYmpnJwXBeFoo7xyVyrt1eTfFHDxGQORUVHciqP4yMpWHO0yCeEkOXsN7/5DdLp9LRj11577byu0dPTg507d047duedd7YscdN1Xbz73e8ujwV7y1vegmuuuabh69xxxx3Tbq9evRrnnXfevK5x3XXXTbttWRZ+8pOfzHleLpfDr3/962nHrr76anAct6D1X3zxRRw4cGBe1yCEEEIWQ2c8DDBe52W51K1oLJmDqplgWCAeC2CgN166X0BfVwz9vTGEgl5sQ1FNTKaLMAwbDAvEIjIG+joQj4bAsV5h2KreeEP2Go8GwPNcQ7oVrR1I4MJz1s/rHN2w8PhzR9vqs62qGXhx/zDuuvdZfP4/f4Vv/fejuO+Rl1u9rbbgF4YppbhboU2SinTDgq5bcOFCUb09JeIhRMMBCIL3M6ZfrFhUdTiuC9O0y12LCCErGyUVEUIIIYQQUgM/KUeWvOo4XTdb2gXF/1AvSwIYMBAEDqLAV338yf2XWjHr9QUFztzYV/W+vYfG6rpmLYqKjqNDkxXv6+uO1XSNtasqjytxXeD4SKrifbViGAbRUoVlLq9hfDKPEyNpnBhLY2Iqh0xeQUHRkSqNPYtHg5DE6t+vVhEFHr1dXrKTpllITuXhwkU2p7Y8ka4W2Vyp6k7TYTsONM3EsRNTbdFZjBBCWuGuu+6aceyyyy6b93VOP2dkZARPPFFbUm+jfeMb38BDDz0EAIhGo/jiF7/Y8DVGRkbw5JNPTjt26aWXzvs6r3zlK6d1eQIqf09Od88995STpnyN+L4BXkIYIYQQ0mqCwCEe8Tr3xqMnuw3KMo+BnjjikSAYMAgGRXTEg2BYBpLAozsRwUBfHOGQBJYFohGvM1FHLFTuzjzQG8eGNV3lAqyFYhimPLYsFllYtyIAuPySs6Z9zXPZed4G7Dp/A3h+fsnFi2X/kTH8y3/+Cnf96jm8eGAEaul5SKYKdSdbLSenjkADvPF8tu20cksAgHypS5GqmbBtFxzHzIhLSaIAgWfhOt6+AS/GRQghlFRECCGEEEJIDfxKI1HgwfEMHBflwEkr+MErf1/VuhT5/JbfkiSAYQHb9iqO5iseDWKgp3ISz77DizMCzbRs/Oiep6smtWxa213TdRKxULmq8HSDw1N1788XDcvoToQRCooQBO+jlmU5UFQTmayKyVQBjuMlpkXDlffRDiRRQE9nBCzjBZv8rlbt1LK7Etd1oZSCdtmcitGJLAzTgm07GEtmW7w7Qghpjbvvvnva7VAohC1btsz7Oueff/6c126GZDKJD3/4w+Xbn/rUpzAwMNDwdX72s5/NSB6v9BzMJRKJYPPmzdOO3X///SgWZ+/uWOm5rWf9dvm+EUIIIZUk4iGAAWRJRCgoorszjL6uGASeA8ezGOiNY01/Aj2dUWxc243OjhBYloHIc+jqCGPNQAKJUjKRIHDo74lh/eouRBbh83a5WxG38G5Fksjj2teeO+fjOJbBda89F69/9fYZScqtNNATR7Uau4PHaARaQBbAMIBpOjAsG3C9rsqtlit10PZjO6GABKbCLD4/vlhQtNJ5Ko2WJ4RQUhEhhBBCCCG14DkWcqnaKCh7H7AVpTUtgG3bgaZ7CTb+OLPwHElFksiD41mwzMlKpHq7FZ1VZQTa8HgG2XnMW3ddF9m8OmuyiuO4+J9f78aJsXTF+0WBw5qByh2ITscwTNVuRccbkFTEMAxCQQndiQhW9XZg7UAH+rqjSMSDCAclyBKPSEhCT2ekYuCmnciSUA4k+clzRbW9k4pUzYTruLAcB4Zhw7IcjCazXqtxin8RQlagiYkJjI5OT/g988wz6/o3aNu2bTOO7d69u96t1e0DH/gAUimvu+BFF12Ev/iLv1iUdSp9bVu3bq3rWqc/d6Zp4uWXZx8P0qj1+/v7EY/Hpx174YUX6BdDhBBC2sKp3Yq6ExGEAhLAAB3xIDaclhzEcyy6EhFsXNuN7s4IOJ4td23u64lhw5ouRCOBRfusfXq3Iob1uhXVOxpq45puvOLstVXvDwZE/OFNl+D87dUf0yqRsFy1Y/TBY+NN3k37YVm23K2oWEomys0jVrYYNN2EadpwTinG8kcJns6PL2qaBcuy4TguCsX2jgcRQhYfJRURQgghhBBSI7+7TCjgz0c34DjN/6WM36VIEDjwPAeGZcodi2YTKnUr8keg1duqe+umyklFQO3ditLZIr72g4fw5W/9Bp//+q/x7TsfrZg4dP9j+7D3UPVrXnDOevBc7R9r1q3qrHh8NJlt+IgslmUhSwKi4QC6EmH0dcfQ2RFuqwrD2ZRH5ZU6Falaa0f+zUUpJT3pp3wfXQeYTBWQOS2AZ1mtbz1OCCGLrVLiyvr16+u6VqXz5kqMabT7778f3/nOdwAAHMfhP/7jPxbt39RWPnemaeLQoUPTjiUSCUQikYasXygUcPz48bquRQghhDRad2cEoaAEhvVGna1f1Ymezii4Kp/zOY5FIh7C5nU92LSuGxvXdiO2iMlEpzq1W1E46Hcrqj9Z5HWv2lpxDFpvVxTveutlWFtjAVUrnLG+p+LxwRNTMMz2H52+2PzXR0HV4cKFqpktfV5y5dFnBlwHEHgWklh5PCDPc5AlrxixUIqz+OcTQlaupRHNJoQQQgghpA34La5lSQDPs3Act+6qtIXwkzwC5c5JIlh27gCanwzlJyBpulVXkkgiHkJvV+VfbO07PDbn+apm4Nt3PobxyXz52OBwCt/88e9w5y+fLbcQf+bFQTz27OGq1+ntiuCyi86Y197XV0kqcl1gaLRyN6SVyk8qMk0blm3DbdHrvVaK6u3N76wUDcvlAO3p3bCSqVxzN0cIIS3w0ksvzTjW3189MXg2sVgMgcD0X3oNDg7OOcarUXRdx7vf/e7y7b/6q7+qaxxYrRr53PX19dV0fd+BAwdgmtMTnetdu571CSGEkGZiWQar+zuwZUMv1vQnIEmVEx0q4XluEXc2E8Mw6OyY3q1IN+rvViSJAv7wxl1Y3dcBwBt3duG56/HON78SscjMZKN2Ui2pyLIdHBuabPJu2k9AFsCxDGzLLcfwWtmt6PTRZ8HA7IWJfudq//EFRYdlU3EWISsZ3+oNEEIIIYQQslTwPIdAQISqGggFRGTzGhTVKH/YbhY/YBUsJRVVa1l8Oj9oIAk8OI6BbbswTKtqddJsztrUPy0pyDc0mkKhqCEckiuc5Xn4qYPlgMbpXjo4gn1HxrD9jAHs2X+i6jUiIRlvu35neZRbrRLxEMJByRuJdZrB4amqgbGViONYiCIHw7Ch6RbCQQ5FxUAw0NzXey1s2yknE2mlgJ0sCwjKIkSRx8TE9CCvrlvQdXNeAWtCCFlqjhw5MuNYV1dX3dfr7u6e1uHGdV0cPXoUZ599dt3XrNWnPvUpHDx4EACwevVq/MM//MOirZXP5zE5Of2XYSzLIpGor1tAT8/Mny0qfW9mu28h37f5rk8IIYSQ6mKRAFIZL6k6EpKRy2vI5tWaukdX0hEL4Y/f8ipougmeY5ueKFWv/p44ggGxXNxzqoODE9iycWZS80rCMAxCQQm5goaioiMoezHErkR9nScXQlF12JYD23GglOIlc8UxQwER6UwRpulANy1IAo98QUVHLNSMLRNC2hAlFRFCCCGEEDIP0bAMVTUQDErI5jWomgHHcZo20ko3LNiOC5ZFOSGi1qQmnucgijwMw4IsCSgqBlTNrCupaOumfjz4xIEZx10X2HdkDBees77qub+360wYhoXnXh6qeL9tO3hhX/WEIlHg8LbrL0I0PP/KPYZhsG5VJ146ODLjvsHhqXlfb7kLSEIpqchEOCihqOroRvODYHNRNRNwAdOyYVkOGAaQxZOdvLo7Ixg6rTmRolFSESFkecvlZnZli0ajdV+v0vitSms02r59+/CZz3ymfPvLX/5y3aPAalHpawqFQnX/rDff522pfd8mJiaQTCbndc7p490KhUJTXku+0ztsNavjFiGEtCP6O3H+JMFFIa9AElw4tg5F0VFUePA1dJCuhgFgW95/S8WGVR148cDMcfX7D4/iil2bmjKSrp0JnAPH1pEv6AjJLDiWwUSSgSzVl4BWr6l0EbqmoqgacCwdAs/BtQ3o9uwdtgTegaqZyGSyiEcCGJ8wwDF2k3bdGvT3IWlnhUKhpetTUhEhhBBCCCHzEA7JGJ/MQRJ4CAIL03SgqMasnXkaye9SJEsCGDAQBA6iUPuP9cGACMPwuhMVFQOabs59UgXdnRF0dYQxmZ75gWbf4dmTikSBx3VXnIdN63rws/tfmNceGAb4/WsuQF93rJ5tA0DVpKLRiQx0w5p396PlLFCqplN173Wn6xYs2wHPtdck7aLqdZ7y24pLIj9tJGCl/aqagY5YsDkbJISQFqgUdAyF6q8urnRuMwKb7373u2EY3r9D119/PW666aZFXa/Vz1ur15+vf/u3f8MnPvGJBV3jySefxNjY3CN0F8uTTz7ZsrUJIaTd0N+J9RnKVy6aWs5CfBG6PnOkl66rePa5Z5GINjd5pp0dz3mvj6OH5nhgExgAjqbn8fg8kB73/n//y4uypbZFfx+SdnJq1+BWaK9IMCGEEEIIIW2O51iESuOfyjPGK7R7Xix+0oTfWnu+o9f8vQdKo9N0w4LjuHXt5axNldtZHzsxWbEF9um6EmH0dc+v8v6a15yDzesWNqJs7arK40tcF3j02TaI8LQRSeTBMoBteaPyAEBt4uu9Vv7rTTdKo89q6EBUy2uUEEKWsnx+5phSnq8/cbbSuZXWaKRvfOMbePDBBwF4yTFf+cpXFnU9oPXPW6vXJ4QQQgipRX9XANWaEQ1PKM3dDCGEkEVFSUWEEEIIIYTMUzTsdSUKlhJ0NN2EbTuLvq5tO9ANL7EjUEqaCM8zqSggCwADCDwHnmfhuicTMeZr6+b+isddF3h+3xBcd/Zkpe5EBK+8YDO6E+Ga1rvkFZtwwTnr5r3P03V1hBEKVq6Ye+SpQ9h/pHWV8u2GYZjyiDC11FHK7wrULizLhlF6X/hdr/ykuWoc1532fiKEkOVIVWdWjnMcV/f1KiWnVFqjUSYnJ/HhD3+4fPt//+//jbVr1y7aer5WP2+tXp8QQgghpBaiwKI3Ublr93CSftYghJDlhPr6E0IIIYQQMk+hoASGZSDyHESRg2HYUFQDkfDijkDzkzkEgQPPc2BYptyxqFYcxyIgCVA1E7IsoFDQoermvK8DAL1dUcSjAWRyM4NF9z2yFy8dGMGuHRuxdXM/uCrjsjat7caGP3gNnt87hAce34eiUrl7zNbN/XjtJWfNe4+VMAyDMzf24dkXK7eN/cmvnsOf3nwZumpMdlru5NLrRdNMxMKBqt+jVvE7hemmBdt2wbLMnCMBTdNCIOB1K6Jxd4SQ5SoQCMw4Ztt23derdG6lNRrlAx/4AKampgAA5557Lt73vvct2lqnavXz1ur15+sv/uIv8Ja3vGVe5xw6dAg33nhj+fbOnTuxdevWhu1pLsVicdo4i507dy5oxBwhhCxl9HdifSzLwYnRFFy4GJvIwnGB7kR4XuPpl4MdxSgeePzgjOM5FehddSaCARqBNpUuQjNMREISouEAJIlHf0980dd1XRfHR1JwHRfJdAGGYSEWCcyrONG0bExM5QEG6OuKgWMZ9PVEIUvL8/tKfx+SdrZ3796Wrr+y/nUjhBBCCCGkATiORTgoIV/QEApKMAwFRVVf1KSioqIjlfHaR/tddoKyCJat0mt6FsGA6CUViQIK0MvdXeaLYRhs3TyAx549XPH+0Yks7vrVc7jv0b249ILNuPDc9RUfx7IMzt++FtvOGMCjzx7C488dgWWd7Py0YU0X3njljrq+1mpedcFmvHRgpGKnGsO08eOfP40/uflVkMS5x2gtdwFZQDoLaIYJF67XGci02iZY6o8xUzXvT1niwVTrwV6im3b5nI5YcHE3SAghLRKJRGYcs6z6O7RVOrfSGo3wwAMP4Nvf/jYA7+eN//iP/1jQCLD5aPXz1ur156unpwc9PQsbTRsOhxGNzm8kbiOFQqGWrk8IIe2E/k6sXbZowjBsBEM2FNWE7fKQ5JX1+XLrGWvw2ycrx4SGxgs496zVTd5RfRzHxePPHYFp2Th7ywA6OxpXZBaP85iYykOzGHTJAQAM5EBw0WMq+YIGUQzAtGxYtgqW4xCLxcBXKfqrRAKQU2wYhg3L4RAMyrBdYcX8HUF/H5J2Eg63tviVxp8RQgghhBBSBz+BKFTq8KPpFqx5jkCzbQeqZsx5nqIamEwXAADhsIR4xAtSxaL1VZn7Y9vk0ogow7DrHt+2dVPfnI/JFzSkssU5HyeJPC7fdRb+8h2X43Wv2opXnL0W1732XLz9hosh8PWP/agkHg3ixit3VL1/Ml3AT+97Ho4z+wi3lUAUeHAcA9cBdH8EWht1K/I7eJVHn9VQMeePS1O09vk6CCGk0SoFHYvFuf89rqZQKNS0xkLpuo53v/vd5dt//ud/jl27djV8nWpa/by1en1CCCGE1MaPrfidn1WtvoKtpayzI4SOaOVEqkPHJpq8m/qxLIO9h0bw0JMH8O/f+y1eOjDcsGsHZAEcy8C2XGil10guv/jj4XIFbw0/ZhKQhXklFPlCpde5f51cQYVh0ih5QlYaSioihBBCCCGkDqGABJZlwPMcZMmrLioqes3nm6aNkYkMxifzGB5LI5UpVkzsUTQDyVQerguEQxK64t4vgTriQURC9XVGCsgCGJYBz7IQBC9Zp95uRQO9cawdSMz6GIYBdp63oeZrRsMBXPKKTbj28nNx/va1De1QdKotG/tw2UVnVL1/3+ExPPrMoUVZe6mRJS8BzQ+SKmrtr/XFpBsWbMuB47rQdC+o5e91NoZpwYUL23IoGEYIWbYqVdXm8/m6r1fp3MXoVPR//s//wYEDBwAAvb29+Kd/+qeGrzGbSs9bsViE49SXgD3f522pft8IIYSQlcYfIxUofQbVDavugq2limEYnLGhcsfCw8eTS+r52Lp5AADgusA9D+ypO052OoZhECq9VgqluGE2rzXk2tXYtlNeyy8KC9U5ii4UlMAwgK5bXodoF0hO1f+zKSFkaaKkIkIIIYQQQurAsgzCpaQevzrNH8M0F9d1kUznYduu1wHGBXIFDcPjGWRySrk7jqabmJzyEopCQRGdHd4c73g0iJ7O+tvvMgyDYKmSLlDqVrSQEWg3vf78WROLtm0eQLxK5VqrvXrnFmxeV31kx2+f2I9Dg0unum6x+EFSze/woxpw3dZ3cfKTm3TdBFyA508mys3Gdb2AmHcN6lZECFmeNmyYmdA7OTlZ9/VOP5dhmIprLMT+/fvx6U9/unz7C1/4AuLxeEPXmEskEkFnZ+e0Y47jIJ1O13W9ZDI549jGjRurPr7R37f5rk8IIYSQ2gRk0SvY4rkFF2wtZZvX91Y8rukmTozV9/NTK2w7o7/8/7ph4cUGdivyk4oUzYDtOLAse9GKtVzXRTKVB1yvoMo0bTAMEKwzqYjn2HK39lRWgQsXhaJOsRRCVpjmDCMnhBBCCCFkGYqGZeTyKoJBEalsEbphwbTsOUd1ZfMqDMMGxzHo74nBsmyksgoMw0YmpyJf1BAJedd2XCAYENCVCIMBg1g0gN7uhc/zDgZEFBUdsiQgl9egaAY6HLeurkDRcAB/9KZLcPTEJB5/7ggOD07/5dXF5zf2F46NxLJeUtTXf/gwUlllxv2uC9x173P4X2+7rG0To5pBlkUARei6BctxwIOFppvlNu+t4gex/MCtLM7dpcinGSZkSYCqmYgv/C1FCCFtZ/v27TOOjYyM1HWtbDYLRZn+7+TatWsbPkbr7/7u72AY3t/tiUQCw8PD+NznPlfz+blcbsax2c7/4Ac/WPH49u3b8dBDD007NjIyMiPZqBajo6MVr1/Nli1bwPM8LOtkJ716v2/1rE8IIYSQ2rCsV7BVVHQEZAGmaUPVzHICyUqxbiABUeBgmPaM+w4em8C6VfP/+amZcgUV0XAA8WgQAz0xjExkAQDPvzyEC89Z35A1JJEvP0dFxUA0LCOb18pFio2iGxZGJzLlIqpc0euIFJAFsGz9fUbikQCKRR2maSNf0BENy5iYymH96q6G7JsQ0v4oqYgQQgghhJA6BQMiuNI8clnioWkWioo+a/KJppvI5Ly55ol4CDzHgec4DPSIKCg6MjkFluWUHxOQBXR3RsCAQTQSQG9XY7IfQgERSXijojjem+2eyhTRlajvl4MMw2Djmm5sXNON5FQeTzx/FHv2nUB/TwyrejsasufFIksC3nLthfjmj39XMQim6SYefOIA3njljuZvrk3wHFsOgOm6CT4goagaLU0qcl23nFSk+klF8jySinQTiFCnIkLI8rVt27YZx44dO1bXtSqdV+n6C3VqR55UKoUPfehDC77mbNeollS0bdu2GUlFx44dwznnnDPv9ef73ImiiM2bN2Pfvn3lY6lUCvl8vq6xZYODg9Nuh0IhrFu3bt7XIYQQQshMoaCXVBSUvYItVV95ny95nsP61V04cHR8xn2Hjo3jda/a2oJd1e6ZPYMYncjiVRduxtbNA+WkopGJLMYnc42LwwUlGFkFRdVLyskXNfTYkXJccaGyeRXjkzm4jgvLcTCVKpRH2EfDgQVdm2VZxKNBTGWKyOYVhIIidN1CNq8iFlnYtQkhSwONPyOEEEIIIaRODMMgUhqBFg56f86WoOA4DqbSBe/xIQmhgAQwKLcRDgclrOqLIxEPguMYyDJfTiiKhGX0dUfBMPPvJFSJJAmQZQEsw6C7Iwww3mz3fGHhc927OyO47rXn4q/eeQWuvfzcBux28fV0RnHdFedVvX/voVHYttPEHbUfuTQCzU/gUZTFadVdK0034ZSCZYbhJYP5e6yFblhw4cKybJgVkskIIWSp6+3tRV9f37Rj+/fvr2t85csvvzzj2I4dO+rdWtur9LXt3bu3rmud/twJgjBnQlaj1h8dHZ0xtu3cc89t2M+ThBBCyErndyWSJAEMC9i2C92w5jhr+dmyofIItGSqgExuZlfodnLe1jU4fDyJb9/5GJ7ec2zafc/vHWrYOqGgBIbxRrEblg3XcRvy3DiOi9GJLMYmsnAdF5puYHQiA1UzwTJAV0doXrGSasIhCYLAwrZd5PJeIWQylYfjzP+zBSFk6aGkIkIIIYQQQhbATwgKyALAwOvkUiWAlM4qMC0HPM+iI+Z1M+rqCGOgN471qzu9AAMYRMMBDPTG0dMZBcswCIck9PfEGv4LoL7uGFiWgSyJiEe9yiJ/jFsjhIMSujvnX1HfKtvPGMCF564v345HA9ixbQ1uuup8/OUfXd6w6rGlyu8CpJUq3VTdbGmiVbGUwKeXkpxEgQNf4/eIYxi4DsqvdUVbedWkhJCV4frrr592u1gs4uDBg/O+znPPPTfntRvht7/9LVzXrfu/Sh14Znt8Ndddd92Mn7t27949768nn8/j0KFD045dfvnlc46Nq/Tc1rN+s75vhBBCyEolCjwEgQMDpjyO2x/PvZJsXtdT9b6DxyaauJOZFM2Y9ee+RDyEdasSALxuP6d6Yd8JWA2Ke/AcW07u8Qv6JlMFFIqzF2zZtoPRiSwODU7g0OAEjhxP4uhQEsdOTGJweApHhyaRy6tw4SKTK2IsmYdtuRAEFn09MYRLxZALxTAMOmIhAEC2oMG0bNiWg1S22JDrE0La28qOihNCCCGEELJAwYAInufAsSyCpaSL8ckcsnl1WtBCUQ3kS4GCro4wOJZFQBaQiHsfyCVJwOr+DqwZ6IAk8eBYFizjdSga6I0vSkW5JPLo644BAOKRIIIBAa7rVRqt1K48V75qK17/6u34q1tfi7+69Qpcf8V5OPvMVeWOVCuZLApgGMCyHJiWDbgot9JuBb9TUj2jzwTRmwTuB3tVSioihCxTN91004xjDz/88Lyv88gjj0y73d/fj127dtW9r3a3atUqXHTRRdOOnf4c1OKxxx6D40z/marS9+R01157LURx+ojRRnzfal2fEEIIIbULBbxuRf548JX4+dLvrl3JvsOjTd7NdHf+8ll85dv344HH92EyVaj4mB3b1lY8rmomDlYY61Yvf1RYvqAhX/QSi0YmMlUT0SzLxtBoCrm8CttyYFsOTNOGYdjQdQuaZsKybFiWjfFkDpmcd81ISEJ/dxyiwDds7wAQlEWvqNJFuctSKlOEZVH3Z0KWO0oqIoQQQgghZIEiYS+AFI8GIYocHMdFOqtgZCIDRTNg2SfHnsUiMmRJAMsy6KvQfSgYkLB+dRfWr+7EutWdi5ZQdHLvMjriXtekzo4weJ6FZTmYTFcOtCx3PM9h53kbEI8GW72VtsOyDKRSMo6fTFRUWzMCzXHccjKR3zlpPu28/a9D10udimYZW0gIIUvZFVdcgXg8Pu3Yz3/+83ldI5lM4oknnph27E1vetOyH6H15je/edrtoaEh7NmzZ17X+NnPfjbtNsdxuPHGG+c8LxaL4XWve920Y/feey9se36/sDl9/e3bt+Oss86a1zUIIYQQMjt/BFqgVOiiG9aMpOKVYPP6md2KRIFDZzzcshFZuYKKYycmkcmpeOSpQ/j37/0W//nDh/HE7iPT4gBnbeqDKHAVr/Hcy8cbth9ZEsqdwqcyRaiaAddxMTyWmZGYY5gWjo+koOsWLNvG2GQWw+MZjCazGEtmMTaZxcRUDslUHqPJLDTdAssy6E6E0dkRBssuzs/qfuf1omJA0024jotklWQtQsjyQUlFhBBCCCGELFBHNASOYyEKPAZ64ujsCIHjGZimg4nJPEbHM7AdF6LIIV768N3TFZ21YkiShIbMPK9FdyKCgCyAY1n0dEbAsF7SSLvPvSfN51dearoXfFNalFSkqAbgAqZlw7IcMAzKreZr4b/3dMOECxemaVNlHSFkWRJFEbfddtu0Y/fccw+mpqZqvsZ//dd/TfvFmCAIeP/731/z+Y8//jg+85nP4Bvf+AYKhaXzC4c/+7M/Q0dHx7Rj3/72t2s+3zRN/PCHP5x27NZbb0VfX19N5//N3/zNtNvJZBK//OUva17/+eefn5EEdfo1CSGEELJwwYAIMIDAcxAEFq4LaHpjxsovJVs39Zf/f92qBN545Q7c9qdX4g2Xn7NoCS5zeengCE6ffDY6kcWvHn4ZucLJUWeiwGP7llUVr3HkeHLaYxcqHg0iHJSAUqdwo9Rp6MRYupx8pesmjo+kYJo2TMvGWDIHTbNgmqUORboFTbOgqCaKigHbdiGJPPp7YuUkt8UiCjwiIW+NdClumMurK3LsHyErCSUVEUIIIYQQskCCwGHdqk5Ewt6IrEhIxkBPHNGIDDCA7bhgWKArEQEDBuGQVG553A4YhsFAb7ycGNUZDwMAMjl1RbbtJtXJ0smxYS5cGIYN02x+Mo6i6eV9AF7nofkEKQWeBcsycBzAKO2fuhURQpar97///eju7i7f1nUdX/7yl2s61zRNfOELX5h27F3vehc2btxY0/mf/OQncckll+AjH/kI/vRP/xQ7duzA+HjjRkgsplgsNiMJ52tf+xrS6XRN53/zm9/ExMRE+bYkSfj7v//7mtd/9atfjauvvnrasX/+53+u+fzPfOYz025v374dt9xyS83nE0IIIaQ2LMuUC3BkaeWOQOvrjuGqy7bhL97xe/ijN70S5561uuHjt+brxf3DFY93J8Lo7Zo+rm3HtjUVH+u6wAt7TzR0X50dIcgSD8cBJiZzsBwHum5hdCIDRdVxfCQF23JgmBbGJrOwLAcCz6KnK4Kergi6E2F0JcLo7AghEQ+irzuKvu4oBL5yt6VGi0WDYFkGum6hUBpNPzGZa8rahJDWoKQiQgghhBBCGkAQOAz0xrFmIAFJ4sGxLBKxEFb1xhGQBXREgxB5DhzPorc71urtzsDzHAZ6vX2FgxLCpZFuk6kCLHvlte0mlYkCX07G0Y3S6LAWBEuLyvSkovl29WKYk6Pc/Gu04usghJBmCIfD+MQnPjHt2Gc/+1kcOHBgznM/9alPYXBwsHw7Ho/jYx/7WE3r7t27d8a6hw8fxoc//OGazm8Hf/3Xf43169eXb2ezWXzoQx+a87yJiQl89KMfnXbsfe97H9auXTuv9T/96U9DEE7+G/fQQw/hu9/97pznPfDAA/jBD34w7dhnP/tZsCyFggkhhJDFED5tBJq6Qru2XLxjY7lQrdWSU3mMJSsnupy9ZdWMUb6reuPoTlTe++69Qw0d4cYwDLoTEQg8C8tyMDmVhwsXhaKOoRGvY5GmmxhLZmFbLkSBQ293DEFZRFAWEQpKCAclREIyouEAZElo6mhinmPLxZKZnALHdaFqZjlWQwhZfuiTJCGEEEIIIQ0UDIhYv7oLfT0xcDwLgefQ2xVFNOx92O7rioHn2vPH8GBAQlcpgNIZD0EUOdiOi0JRa/HOSLtgGAaBUgKPn4zT7NdHOluEYdhwXLe8Bz9wOx9+IpJeusZKrCQlhKwc73nPe3DTTTeVb6uqiuuvvx5DQ0NVz/nOd76DT37yk+XbDMPgm9/8Jvr7+6uec6oHHngAtj2zm9299947j523ViAQwA9+8ANI0skxEl//+tdndAE6VSqVwg033IBkMlk+dvHFF+Mf/uEf5r3+eeedh89+9rPTjv35n/857r///qrnPP/887j55pvhnjLr47bbbsM111wz7/UJIYQQUptQwO9UJIBhActyWtLVl5z04oHKXYoA4OwzZ446YxgG51XpVpTOKhgaTTVsbwDAcSx6OqNgWQaabmEydXJMcFHTMT6Vg+N4HaP7uqNtF0uMhGTwpaQoPy6UzTduTBwhpL20tu8cIYQQQgghy1QsEkA4KCGVKZardroTEYRDizvbfKE6O8Ll6qJISMaUUaS56GQaWRZQVA0UFQPxSBAFRYdlO00JcGm6iYmpPAAvqGfbLnieraul+qmj3ADAMGxYlg2+Se3CCSGk2b73ve/huuuuKyekHDhwADt27MBHP/pR3HzzzVi1ahVs28azzz6Lf/3Xf8V3vvOd8rkMw+CrX/0qbrzxxprXOzWppZbj8/HDH/6wYkJULjezGv1zn/vcjGOxWAz/63/9r5rWuvjii/GjH/0IN998M3Tdq77+yEc+gocffhgf+tCHcMkll0AURSSTSfz0pz/FJz7xiWl7O/vss/Gzn/0MoijW+uVN8973vhdjY2P49Kc/DQBQFAWvf/3r8Zd/+Zd417vehe3bt4NhGBw+fBjf/va38bnPfQ6KopTPv+WWWyo+B4QQQghpHEkSwPMcLMuGJPLQNAuKZiAmBFq9tRXJcdyqSUVr+hOIR4MV7zvnzNW4/9F9FbsS7X55COtWdTZ0n4LAoaczgvHJHIqKAUFQwHMcJtMFwAWCAQFdHZF5jXtvFpZlEA3LSGUUFFUD0XAARUWH67pN7ZpECGkOSioihBBCCCFkkXAci+7OCLoSYbgu2jIIUEk8GkRR0U92cjEsOI67ZPa/0hSKGkJBqaagTSOSZkIBEelMEaZpQzNMyKKAXF5FIh5a0HXn4jguRicygOtV7eULXiVcZzxUV8Dq5Cg3F4ZpQRR4qJqJSJiSigghy1MgEMAvfvELvP/978e//du/wXVdpFIpvP/978f73/9+iKIIy7LgONPHnnZ1deHrX/86brjhhnmt95rXvAYsy8643lVXXbXgr+Xf//3f8eCDD9b02ErjytatW1dzUhEA3HDDDXjggQfw9re/HceOHQMA3HPPPbjnnnvAMAxEUSwnHJ3qzW9+M77+9a8jGo3WvFYl//RP/4RNmzbhtttuQ6FQgGVZ+NKXvoQvfelL4DgODMPAsqxp5wiCgI997GP46Ec/Sr/YIYQQQpogFBSRzakIyiI0zYKmm+URUaS5ToylkclV7ppzToUuRb5wUMIZ63uw/8j4jPtePjSCq1+zHZI4/07Js5ElAYl4CFPpIjLZk3uOhCQk6ox3NEswICGVUaDrVjneVFSMti+oJITMX3v1SiOEEEIIIWQZYhhmSSXk+KOkBJ4DxzNwXcAwrTnOWhnyBQ0jE5lWb2Oa/3fnY/jiN+7D//x6N148MAxFrTzGK5Up4qvfeQD7Do8uaD2WZREotXYvKN4vULN5ZbZTGmJiKud1E7JtpNJFAEA0IiMg19f5gWEYSOL0bkXVnjtCCFkuRFHEV77yFTz++OO48cYbp3XPMQxjWgJQb28v/vZv/xb79u2bd0IR4HXo+cQnPjHtFyHr16+fMc5rqbjkkkvw0ksv4TOf+QzWr19fPu667rSEIpZlccUVV+CXv/wlfvzjHy84ocj3rne9C3v37sX/9//9f4jH4+Xjtm1PSygKBoN4xzvegeeffx4f+9jH2voXUYQQQshyEgp6iRRy6TOqppsVO96Qxffi/spdijiWwdYzZh/lu2Nr5RFoluXgpQMjC95bJZGQjI5YEAwDMAwQjwbQ2RFu+5/jeI4td4FWSiPl86VRaISQ5YU6FRFCCCGEEEKm4TgWksRD1y3IooCiZUDTzXLnopVE0QwMnpjCsROTOHZiCpPpAno6I/jzt7+m1VsDAKSzRaQyXoLNC/tO4IV9J8AwQH93DNe+9lz0dccAAKpm4Pt3P4lcQcMdv3gGt77plVgzkKh73UhIRlExoCg6nFgIhmFD1Yy6E3zmki9oyJaqDCfTBdi2C1Hk0FGlZXmtZEmAqpnQdBPRcKAcBCOEkOVu586duOuuu5DL5fDEE0/gwIEDyGQy4Hkevb29OPfcc7Fjxw6w7MLqET/60Y/i6quvxsMPP4xoNIqbb74ZkUhkwfv/7W9/u+Br1CMYDOLDH/4wPvzhD+Oll17C7t27MTIyAl3XEY1GsXHjRuzatQtdXV2Lsv7q1avxr//6r/jCF76Ap556Ci+99BKSySQAoKOjA2eddRYuvvhiBALUFYEQQghptqAsAgwg8hx4noVlOdAM0ztOmsayHbx8sHLyz6Z1PXN+Pzav70E4KJWLqE61++UhvOLsdQ3Z5+likQAipQ4/C/0ZvJmCARGabkEpjUArFDW4brTtE6IIIfNDSUWEEEIIIYSQGQKyCF23IIkCiopR7uSykuw7PIo7fvEM3NMKCyem8igoOsLB1rdzPnw8OeOY6wIjE1mEQzIAL6D2458/U04+cl3g8d1HFpRUJEsCBJ6FaTlQVAPhoIRsXq0rqchxXGRyChzXRVAWEJDFacEn07QxlswCADJ5BZpmgWWA7kRkwUEqv6JON7wOD4ZhwbId8NzSCeARQshCRKNRXHnllbjyyisXbY0LL7wQF1544aJdv1W2b9+O7du3t2RtnudxySWX4JJLLmnJ+oQQQgiZieNYBEqFKwFZRL6goajobZ9U5DguTMuGZdkQBA6isHi/OnZdF9m8ivgCC4Rmc2RwAmqVGNZso898LMvi3LNW49FnD8+4b3g8g+RUHt2dC0+Sr7b2UuOPQNN0C5ZtgweNQCNkOVp6fzsRQgghhBBCFl2wNN5KLo1CMwwL7unZNctcX3dsRkKRb3B4qrmbqeLw4MykIsDbezgowXVd/OKBPTP2e3RocsFt2P3W7oVSa+tcQZv3NV3XxYnRFJJTeUylChgaSePwYBLjyRwUVYfruhidyMBxXGiGiUypW1EiHoLAcwvaPwCIAg+WAWzbLY/4yxfUBV+3Hay09yshhBBCCCGEtFok7BX3hIJeTEVVp4+XbTXHcaBqBrJ5FclUHsPjaQyNpjA6kUUyVcDIeLb8Gb+RCkUNjz5zGP/+3d/i9u8/BNOyG76Gb8+ByqPPRIHDGRt6a7rGjm2VR6ABwO+eOVTXvparaSPQVBqBRshyRUlFhBBCCCGEkBn8jjMiz4HjGDguykkXK0U8GkRHrHL13LETk03ezUyW7VTdx+Z13QC8jkS79w7NuF83LExM5Ra0vt8JSdMtmJYN13HnHTiaShegaiZsx0FB0WE5DmzbQSanYGgkjYPHJsr3T6YKgOsFZ/21F4phGEilsX5+JePEZB4TU7klm5Rj2Q6OD0/h4LEJjCWzsO32CWATQgghhBBCyHIWCQcABpBFAYLAwnFPJlq0mqabODGWwfhkHumsgqJiwDQduC7AcQxE0SvcyeQbU2jjOC4OHBnDD3/2FL74zfvwm0f3YipThG5Y2HtotCFrnE43TBw4Ml7xvq2b+2suTursCGNNf0fF+/bsH8bwWLruPS5HfmGi/1r3RqAtzZgKIaQySioihBBCCCGEzMBzLETRqzSSSn9q+spKKgKA9au7Kh4/dqL1nYpOjKZhmJWr+zau9ZKKLjxnfdXzh0YXFgTjORaBUicrv5Ixm1NqPl/VDEyVRrKlMkVMpgo4MZrG2GQW+aIGy3HgljofpTJFWJYDgWfRGQ8taN+nC5WCX+msgmypS1E6o2BoJAVrEasnF0tyKg9VM+E6LrI5FUOjKViUWEQIIYQQQgghi47nWIQCXlffcndfRW/llspSmSIcxwXPswgFRcRjAfR0RbC6P441/Qn0dcfAsoBlOdCqjA+bD8dx8NP7nseBo+MzukDvfnlm8VMj7D00VvXz79k1jD471fnb11a975cPvbTg7s/LiT/izx+B5jhu2yTTEUIag5KKCCGEEEIIIRX5QQG51MmlEUGlpWbD6s6Kx1OZIrINqt6r15HjExWPiwKH1X1eRZ3Ac1Wr6xoxwi1S6hhUUHW4cKFqJnRj7uQz23YwOpEFXC/AWlRKwSYX0DQLU+liOcFoKlNEUTHAMEBXIgyWbezH2HBIRjgoAa6XTDQxlYPtOFA1E8eGp6Co7REAroXruuUEr6lMEZbjQNctHB+eglklAY0QQgghhBBCSOPEIgEAQLiUXKTpVssLVnTDhGHaYFigvyeG7kQE8UgQQVkEz3nde1iGQbA85nzhn4N5nquayDM4PIVUtrjgNU514MgYfvXwSxXvC4ckrF9VuWismrO3rEJXR7jifSPjGezZf2Lee1yueJ4rFyQqqhc7XCoj0AzTQr6oYTKVx8Tkwjp6E7KcUVIRIYQQQgghpCK/fbE/Hko3rBXXvnhdlU5FQOtHoB0aTFY8vmFNFzju5Ee9tasqJ0YdH5la8PczIAvgWAa25SUUAUA2P3e3omQqD9O0YVk2UpkCACAeDWBVXxwdsaDXdr2UYJQvaKX7g5BEYUH7raYrEUZnPASG8QJgoxNZGKYF23IwNJLGVLqwKOs2mqqZcBwXluMgX9AwnszCsmyYpo3BkSnoKzAxkBBCCCGEEEKaKRSUwLIMeJ6DLHmJFsUWd23xuyUFZREcy4LjWMSjQfR0RbFmIIHOhJc84ydCKarekE48O7atqXrf8w3qVuQ4Lh54fB9+eM/TVYuczt6yCizLzOu6HMfiqsu2Vbxv/epO9HXH5r3X5aw8Ak3zXmuFot62McSiouP4SAoHj47j6PFJjIxlMJUuzuiutBKLKwmphpKKCCGEEEIIIRX5o60kgQfLeoGaauO2lqtwUEJ3onJlWitHoBUUHeNVKqg2re2ZdnvtQKLi44qKseDKQIZhEAr5lYxe8k+uoM0aOMoXNWRzXpenyXQBjgPIEo9YJACB5xCLBDDQEy8nGEXCEroS4XK152KJhGX0dUfB8ywsy8FoMlsOvE6mCphM5Rd1/UYolvarlRK8TNPB6OTJBKnjI6kl1XmJEEIIIYQQQpYalmUQCXtdff0RaMUWjkBzHLfcHThS+vwejwXR2x1FRyyIYEBEvPR5W5YE8DwLxwUUbeGJUH3dMfT3VE6+2f3yENQFrqFqBn5w95N45KlDsz7unHmOPvNtWteDLRt6y7fj0QDefM0F+MMbd6G3K1rXNZcrf7S8plmwHAe27bTlCDTDtHBiLA1VNeA4Lly40E0LBUVH5rSO5H6RGyGEkooIIYQQQgghVfA853WMAcodYlZilc76Kt2Kjp6YbFnV1ZHjlbsUAcCmdd3Tbq/p7wBTpSDv+HBqwXuJBL1gqaKZsGwbtuWUk3FOZ1k2xpNeMlQmr0DTLbAM0NkRBnPaJv0Eo8542BtP1gSSKKC/O4aALMB1SslEpU5KU5kiLNtpyj7q5T/vfmCWYQDbcjGWzELTvS5GQ6PpJdOGnBBCCCGEEEKWomgpqSgYEMGwgGHaNY0KXwyK5iVP8DwLWfISP2Lh6UU7PM8hGPTuC5f+bFQi1I6tlbsVFRQdd/zimbo/ZydTefznDx/G4VniI4AXI1lIV6ErL92GgCTg8l1n4t23/B62bu6fEb8gp41AKyWxtWPsIZtTvc7YuoHh8QwGh1MYHc9iMlWY8Zovqnrbx4EIaRZKKiKEEEIIIYRUFZC9YJIs+yPQKKnIly9oC+70U6/DgxMVjyfiIcSjwWnHJFFAb1flANrxkYUnFQlCKXDknkxq8TsRnW40mYVtO9BNC5nSYxLxEASeW/A+GoXjWPR2RRGPekHWQkGHYVqACygtrC6di2naMEpBaj/5r7szAlni4TjA+FQORU0HXGBkLINCsX2/FkIIIYQQQghZyoIBCTzPgWNZBEtxlWKLusb6iRJ+slAwKEIQZn4G9xONQqXCIVUzG5JQcfaZq8BzlX8dfezEFH7xwJ66CrZCAQlznbZpbTfe9PpXzPvap0rEQ/jrP74Cl150RlvFLtrRUhiBlit1H8oXdZimDbgAxzKQJX5mQZsL5AuV41uErDSUVEQIIYQQQgipyg9++dVGmt6ayrpWWrcqUbXTTytGoDmOiyNDkxXv23xalyLfulWVR6AdH2nM/sOh6S3di6oOyzo5Ks+yHUylC1AUA47reqPEXK89djgkN2QPjRaPBhEMeMl0ftv3ah2Y2oEfoNYME7btgmUZBCQBPZ1RBANe56XkVKFcKZhMVR6fRwghhBBCCCFk4aIVRqA1O8HCsmyopfHY4VKy0OldinzhkAyGZSDwHGTJiwE1oluRLAnYsa1ytyIA2L13CI8+c7h8+/hICvc88AIcZ/bnKhgQ8eY3XFA1YenSizbjbdfvhCwJ9W38FKLAL/gaK4GfVKTp7TkCTSnFqmzHKcd5+rqjWDOQQF93DLHIzPdGpkrRHCErDSUVEUIIIYQQQqoKnJJUxLBeQothtmdiUa6gYmgkhZHxTHn8UyMEZBH9VVplHztROblnMY0ls1WDMhvX9lQ8vnagclJRJqcim194gCQUEMEygGk6XpccFxgZz+D48BQOHZvA4WMTmEx5Y8TSWQWm6YDjGCTioQWvvZj8178fhG1FELhWfrBXK732A5IAhmHAsgy6ExFEQhLgAqlsES5cGIbtVeURQgghhBBCCGm4aClBISAL4DgGtu02faS8Xxgjyzx4ngPLMlULe1iWQSTkj22bXji0UJdfcha6EuGq99//2D7sPTSKA0fG8L2fPI5nXzyOex9+ac7P3wM9cVzze2dPOyaJPN567YW4fNdZYFkaU9ZMAs9BFDnABdRS3KqduiT7XYoU1YDrAKLAzZp05rguDMNqq8QoQlqFkooIIYQQQgghVQkCB0HgwIA5pVtR+41AM00b6awC23FhmDYmpvIoNHB2+7oqI9COnZias3qu0Y4cT1Y8znNs1Y5Ea/orHweAoQaMQGNZthx09IOWqmZC1UzYpXbppmUjX9SQLwVxujrC4KpUFLYLP6lIL1XZOY7b0IS1RnFdtxzkUkoJUAH5ZGCMYRh0doTBsQxcB9BLY9La8WshhBBCCCGEkOVAEnlIEg8GzIzPy81ycvSZlywUDQdmTbSJRfzuSiLAAIZpN6SwTJYE/MH1O8udbCr5ya+ew49+/nR55NrTLxzDI08fmvPaO7atxSvOXgvAGwH+p2+9FFs29i14z6Q+IT8hrdRNOV/U2qI4y3HccjzK31vo9HFnp/ELzBpRjEfIUtfeEVxCCCGEEEJIy/mJFX71jt6GI9CKqg7X9RIpwiEJrgtMposN++C/YXVnxeOKaiCZyjdkjVodGqycVLRmIFG1JXcoKFWtChxs9Ag0VUdB0ZHJK0im8hgez2BwZArDYxlMpYsAvDbw/uuqnfEc61XZAdBKwaR2HIGmagYcx4XlODAMr/uQXOH5lUot7P2kIoWSigghhBBCCCFk0fjjlELBUhdc1WhaYZKmmzAtByx7cixVNDL7+PFgQALPc+BYFqHSOY36DByPBnHztRdWHVdm2Q5Ozz357eP78cyLg3Ne+/WXbcerd27Bn7zlVeiMV++IRBZfpRFo7VDQVFR0L25i2dA0LyYSmiXJDTgZM8kXtXLBHCErFSUVEUIIIYQQQmblBwRk0UsqasdORX71UDAgoqsjXA6UpbMK0tnigq+/ZiBRtZpv3+HRBV+/Vppu4sRY5c5Cm9Z1z3putRFoxxvQqQjwks5EkYPrAJOpAjJZFUXFgGnacB2AYbzW0vFoAB2xYEPWbIZgKTlHKVWytVPrbl+hPPrMex+IIlcxUCv57+HS49ohsEcIIYQQQgghy1UkHAAYL54iCCwc9+Rny8Xmf3YNBiWwDANR5Goq7omGSyPQSolQjRwDvqY/gRted968zvnFb/fMGbfgeQ6vuXhL1UIr0jyVRqDlC62Po+QKXtFhofT+kyVvJOBsDMOCYVpwHbc8Oo2QlYqSigghhBBCCCGz8scoSRIPhgVsx4Vp2i3e1UmW7ZQ7r/h7TcRCiMe8isBsXsNUurCgIJgo8FjT31Hxvj37hpvWyvno0OSMyj3fprWzJxWtG6jcbWkyVSi3RF+o7kQEoYAIWeIRDkroiAXR0xXBqr441g4kMNAbRzwaBMNUb7febvygq6abcOG99v3XW7soKl6gzk8SClYJFJe7jRleUpFh2OXW8oQQQgghhBBCGovn2PI4KH/UUlFd/OIOx3HKyUvh0rqxSG3FPdFSd6WgLILjGNi229Disu1bVuH3dp1Z8+PPO2sNVvfFG7b+YnBdFyPjGezZf6LVW2kLlUagNatDVyWW7ZSLsfz411yjz3z5UnJeJqcszuYIWSIoqYgQQgghhBAyK1HwqncYMJBEr+qrnboV+YkUksSD5ziglK8SjwTR2RECGC8IkEzlFxTE2Lp5oOLxdE7BibF03dedjyNDlUefRcIyuhORWc9dU6VTEQAMjTamW5HAc+jujKCvO4auRBixSABBWYTAc0sqkehUksiD4xg4zskOP41KwmoE07RhGBZcuOX3gp88dDpR4MAygOMAhuklRqlNCGgTQgghhBBCyErld/7xEy1UzVz04o6iasBxAUFgva7TzMl9zEUSea+oDAyCpT03egz4pRduxjlnrprzca+6YBOuu+JcsGz7/TrbdV2MJbO4/7F9+Op3HsDXf/QI7rl/T/mz9ko2bQSaZcO2nXKnoFbIFzTABXTTgmk6YJm5R5/5iooGx3VhGBZ1eyYrWvv9LUwIIYQQQghpO34HID9ZQTPaJ6lIKSVF+HuMhGT0dkfL/9/dGQbDAopqIpnK173Ots39VUeg7dk/XPd1a+W6Lg4em6h436Y13XMm7cQiAcSjgYr3NWoE2nJVHoFWSipqdEB1IfzKP1234DgAx55M/jsdwzCQpOljDBUKihFCCCGEEELIogmHZLAsA4HnIEveZ7XFLlQ5vRtLOCjNOerpVLFSt6JwqJQIpRpwnMYlQjEMg+tee27VMe0AcNVl2/DaV25tywKlTE7B//3eg/jaDx7G754+hHTW62JjWnbVuM1KIvCcF6NzgVzRGxvmP0et4Cc0+e+LgCzWlKjGcSwc52TcMZtvXWIUIa1GSUWEEEIIIYSQOflVRnKbdSpyHBd6aS9+4kc4KCEeDWKgLw4wQEiW0NsZAct6FYFKnZ1ZQkGp6oixlw+MLHql4Z79w151VQUb180++sy3tsoItMFhSiqajT8Cza9KUzUDdpuMDfODYppe6lIkC7MGXf2EI3+EG1XaEUIIIYQQQsjiYVkG4VCpW1EpyccbB7U4nylN04amWwBzcvSZP9KsVpFwAGAASeAhCCwcF3XHUqrheQ5vufZCdCXC046zLIMbr9qBi3dsbOh6jRQNy1ULdF4+ONLk3bSnSKkzVqF4stOPP5KvmQzTgqZ54+z9oqxQqLbRZ36csVBKjMoVtLaJBRHSbJRURAghhBBCCJmTn1Qhltpm27YL07JbvCsvuclxAZ5nIQq8l0RUCppFQjLW9HeAZRnIklgO4i0kieKcs1ZXPK7qJg4PLm412v4jYxWPMwywcU1XTdeoVgU4Ppltm0SxdiRLAhgGsCzHa2XutscINNd1USwFdv0uSoEqo898p3cb03WLgmKEEEIIIYQQsohikZNJRTzPwrIcTGWKi7JWQfESIAKSAJ7jwHFsObmoVvwp5/gxlsXo2BuURfzpzZfila/YhI1ru3D2lgH82dtejXPOrBx7aRcsy2Lrpv6K9x06NgG9jbp7t0pQFiHwXqefQtF77bSiW1GuVJynaSZsywXHMnPGTXzBoASG8ca4GZYN13HL1yNkpaGkIkIIIYQQQsicJJEHx7NgGaatuhX5CUJ+0lNAFsFxJz/mBAMSOju8qjep1GZ8IaPbtmzohShUbhlezwg0y7IxPJZGrqDCdd1ZH3vT61+BrZtnBq1W9XaUv/65rF1VOanIdYETY+marrESeYlpXtDJr0ZshxFoqmbAdVxYtg3D8JL85notiAIPhgFs62RioKq1/r1MCCGEEEIIIctVMCAhEBDBMozXmYcBiorR8M+Vtu2UrxkO+12K5LpGiEXDpRFoAe86mm4tSnGZKPC44lVbccsbd+Gm178C3Z2Rhq+xGLadMVDxuGU72H9kvMm7aU9+t6J80RsbVijqXqFWE+VKI8v890UwKNb8fuBZxhvjhpPdirL51o1xI6SVKKmIEEIIIYQQUhO/7a9cSs7R9eYGAirxEzyCpQ/5larv/ACALHp/mqZT96gygeewbfPMwBHLegGJuRKDTnX4eBKf//qv8Y0f/w5f+uZv8I0f/w77Do/CcSpfg+dYvOn1r8D529dOO37pRWfUvGYiFqpaoXh8hEagzcZ//WulBBxFNeb1/V4MhfLoM++9KIrctKS6SliWKY9A8xMDaQQaIYQQQgghhCyu/u6YV7AiCohHvYSdVKYIa5ZEHcO0kMoWkc2rc8YxFM3AyHgGtu2C45jyZ9hYeH6jz3yhoASWZcDzXDmukmlBp5l2tXYggXCVMVo0As0TLr2GTNMpxx0yuea9hlTNgGnacFy3HD+cb9cuv+t5UdHhwoWuW21RZElIs1FSESGEEEIIIaQmfgcUSZw+PqlVdMOEbbtgWUD2k4oqBHRkSQDDMuBYFkKpy5C+gADAOWetKv//qr44rnnN2bjtT67Em6+5oOZqp7FkFj/82VPQjZOJWSPjGfz458/g/37vt9j98vGKAUOWZXDt5efgkldsAgDsPG89Nq/rrnnvDMNgzWkj0DiOxZr+BOKR+gKNK4UfRNUMC5bjwLadlnf48UewqeXkuto6Vvldl/zXHyUVEUIIIYQQQsjiEgQOvV1RAEAsEoAk8XAcF5PpQsWClVxBxehEFrm8hnRWwfBYGlOZwoxuQY7jYipTwMRkHrbjltdhwECWBUg1jno6HcsyiJWSnzpiQa+7kmpQQkUJyzLYVqGbNAAcOZ6kz9nwxsT5STz+2LBsTq1aTNdouby3pqIacB1A4NlyTLNWAUkAz7OwbRdFpfmJUYS0C77VGyCEEEIIIYQsDX43IEkSwLCAZTnQDavc9aTZFPXk6DMGDESRgyjM3AvDeO2KFcWALAkwTRu6YSE0z+ok39qBTrz2lWdh66Z+JOKheZ9vWjZ+8qvnYFepMpzKFHHfI3uxdfMA+ApdZxiGwetetRVrBxLYvK5n3m3MN63thq6bWLuqE2sHEhjojUPgK490IyfxPAdR4GCYNjTNRDgooajoCAZqS+RpNNP0Rp65cE8ZA1hbcGxGpyLdhOO45Y5bhBBCCCGEEEIaLxoJoKDoyBc0dHWEMTKRgaZbyBU0xEqFPo7jYDJdgKJ6n9eCAQGO40LTLeQLOgpF73NoLBKA6wKT6TxM0yldX0ZHLAgGDDiORV93bEH77YyHkctrEMEjHJJQKOhIZxX09yzsusvFtjMG8OTzx2Yctx0X+4+MYce2tTNPWmEiYRm5ggZVM2FYNkSeQzaveolqi8h1XeRLI8sKivdnPXFAhmEQDkrI5FQUihrCQQnZnIqOaLDuhD1CliLqVEQIIYQQQgipiSQJ4HkOLMMgUPrgrKh6y/bjd4nxOyiFg3LVx54+um0hlXUsy+BVF2yuK6EIAO773V4kU4VZH3PBuevmTNbasqG3riSQ87evxS037sJlF52Bdas6KaFoHvwEIj+Jxw9MtYI/+kzXLTgOwLFMxaS6SvzXlmU5Xqt9F9B0qqIkhBBCCCGEkMXW2xUFz3MQeA6d8TAAr/OJbljQDRMjE1kvoYgBEvEQejqj6OuOoa87Alnm4bpAUTEwMp7FWDIL03TA8Qz6uiNIxEJgwCAUlLB+TdeCi8A4jkVnhxf7iEcCYFiv463fNXelW93XUU4GO91LS2gEmmU7cJzZx+vVSzhlfF6+1K0okysuylqnKioGbNuBZdvlkfH1FheGQzJYxhs9XyzFQccncw3bKyFLASUVEUIIIYQQQmoWDXuJO/4Hcb9bULOZlg3DtAHmZHeW2YID5dFtpWQow7SrdgpaTAePTeDpF47N+hieZ7Hz3A3N2RCZF/+1pmoGXLgwDBuGac1x1uLwE/r8BCdZFmruWsWyLETRSybTSiPQlBaPciOEEEIIIYSQlYDj2HKnn3BQQigownWBiakcxpI5WJYDnvce48dgwACyJKKvK4b+3hhCQS/G4bpAKCiivycOWRLBsAx6u6NY3d9RsfNxPeLRIASBA89x5QSadE5p2girdsYw1UegHR2abMvkK9d1kcoU8eL+Ydz70Iv4xo8ewT//xy8xOJxatDX913FB0WA7DgzDXtTnRtUMjE1mAXgj++B6xVX1FtXxHIuo/9rPKnBcF6pmIpdXG7ZnQtodjT8jhBBCCCGE1CwSlpHKFL2RYyxgWg50w5z3TPKFKidSSDw4lgXHsbOOfgrIAhiWAQ8WgsDCNL3Rbc0cXVVQdNz9m91zPm7HtrV1V08RwHFcOI4DfhE6MEmiAI5lYDsudN2ELIkoKjrEWHM/Wlu24wXG4I0uA04mztVKlgQYhjcKMByUvATBjoZvtSVs20E6W4RlO4hHg5CpJTkhhBBCCCGkjQQDIhLxEFKZIjriIWiGCdtyS/cJ6OwIl2Md/T0xiAKPdK6ITE6FJPDoTkQQi9owLQsh2YsfyLKAvu5Yw0fUMwyD7s4IRsYyiIYDyBc1WJaDfFGr2qVnJdm2ZQCPPXdkxnHXBfYdHsMF56xrwa4qO3BkDL95dB8m0zO7Zw+Pp7FhTdeirBuQxfI4+YKiIxYOIJUtLkrsK5tXMZbMAi5gWHa5O1J4gWtFwwEUFR2m5SCXVxCPhjCRyntdjGiUPFkBqFMRIYQQQgghpGayJEAQvBFo/kixYgu6FfkdkvxEilBQmrVLC8Mw5cQCWfLO0YzmdWZxXRc/+83zKCqVnytJ5MEwAMMAl5y/sWn7Wk5UzcDd9z2PT331Hnz6//4CP/nVcygUGz+ezH/N+Z19Ck2uPHRdFyNjabiO63XsMmxvX/NMnJFLiYD+2DNNN+G6y6PSdGIqj6l0EdmcisHhKRSK7VcdSgghhBBCCFnZuhJhSBIPnmXRFQ+DYYFEPIiezig41iucWreqE6GgBEHg0NMZxca13ejsCIFlGYg8V04o6uwIYe1AouEJRb5ISEZAFsAyDDqi3ji0bF5tSQfodtPfHUNHLFjxvpcODjd5N9WNTGTw4188UzGhCACGxzKLun6k1K3IT/JRFAO60bjOz67rYjyZw9iEl1BU1HSMTWRgWQ4EgV1wAhPLMuiIlV77BQ2mZcO2HExVeT4JWW4oqYgQQgghhBAyL5GQFwgIltptN3sEmuM45cCDn9gUDs0dHPC7EvlBNk1vXlLRMy8O4uCxiar3v/kNF+Av33E5rnvtuYhHKwejyOzue2Qvdu8dAuBVBO7ZP4xv3vE7pDLFhq7jv478172iGk0NpI5OZKFqJmzHwcRUHoDXiYubZ2t7/31gmg4sx4HruE19TywWx3GRK3gtyDXdBFyv4pISiwghhBBCCCHthGEY9PfEwbAMArKIVb1xRMNe559EPIQ1AwkIwvQOvDzHoisRwca13ejpiiIRD2Hd6k50JSI1j8OuV3dnBIDX8UUUOTiOiyyNfwLDMNh+xkDF+46PpMpJNK324OMHZh1ZNzKeWdRCo1BAAscysCwHRc37fJ7JKjWd6zguJlN5DI+lMT6ZQyanQNVOxmIsy8bQSAqZnHe9TK6I5GQBjuN1OO/rijWkm1AwICIgC3AdIJ31Yk2pbBGG2bjkKELaFSUVEUIIIYQQQubFn4UekESwLGCVRqA1i6KZcF1AEFhvHjrjBSfmEpT9TkVeMoVh2HCcxiaDOI6LA0fGsPfQKA4em8CxE1M4eGwCv3745arnXHL+Rmxc042OWAg7tq1t6H7qpWgGjp2YavU2alYoanh+39CM45mcim/99+8wPplv2Fqy5HWVsiwHhmUDbvMS6yZTeeQLGly4SKbyME0bHMegMx6a97U4zhsFCKD8/lW1pZ9UpKgG4HpBxbHJnBespMQiQgghhBBCSBuSRB59XVGAAXiOA8exWNXXge7O2ZOEOI5FRyyI7s5I08Y9B2Sx3G0mUerMky9qME27Keu3s21VkopcF9h7eLTJu5kpm1dx+Hj1QjfA68KcW8QEKJZlygWBfqJVtqDCsmZ//di2gxOjKUyliygUdWSyCsaTORwfTuHQsQkcHkzi2ImpcvGVl3TkXT8altHbFZ13EdZsErEQGAZQVBOq5sUfklONizkR0q4oqYgQQgghhBAyL5IkQBS9EWj+KKhqY70Wg6p5a/ldikIBqaaKI1kSy4E6nveTKRpbTcSyDLo7I3j02cP4wd1P4jt3PYYf3P0krCqdbHq7ovi9XWc2dA8LZZgWfnD3k/iv/3kcLx1on1bdszkyNIlqBXVFxcB/3f00xqcaExxjWbYctFXUUnVdvrbquoXI5lVMpb1KuKlMEZpmgWWAns4oeJ6b4+zK/K9D1733QbO7ji2GYul7omhmKbhXoMQiQgghhBBCSNuKRgJYv6oTA71xbFjTVVMn5lbpTkQAxouvBGQBrgukc43tDrwU9XRG0JUIV7zvpQMjTd7NTLtfHqoaMznV8Fh6UfcRCclgGEDTLBimBddxMTicqhqbs20Hw2PpcsLQVKaIbEGFqhmwbC8ZybJs2LZX9DWW9Do7MwzQ1RFCIh5qeAcvQeDKyXWpbBEuXBSKOooKxRrI8kZJRYQQQgghhJB580eg+TPJ/V/kLzbXdcvdVPyEplrnorMsg4Dkdyvy/mz0uCdVM3BkaBK1hCx4jsVNV51fd0LIYrBtB3f84hkMj2VgOy7u+tVzeOqFY63e1pyOHE/Oer9h2PjNU+M4PtaYYKc/Aq1Q1OHChaIYC0pQc10X+YKGXF6t2DZbUXWMJbMAvASmQsF7v3V1RspjzOohidPfB37C3lLmJ3ppeulr8ROLVEosIoQQQgghhLQnSRIQCcsN7aiyGASBQyLmdcrtiIeAUseW5TBKeyFmG4F2YixdHsvVCo7jYvfLxyveF5AF7Ni6Bm+4/Bz8r7ddhrM29S3qXnieK8dTJqbyMCy7PLrs9HiEbTs4cUpC0fhkDvmChnRGwfhkHidGMzg+MoWxZBaTmQLGJjIwTQc8z6KvO4ZwKW65GOKRADiWgWk65e5OE1O5RR0fR0ir1R99JIQQQgghhKxYkXAAU+kiArIAlgVs24Wmm4vedls3LDiOC45jymtF5lHFF5BFqJq3z0JRb0inIsuycWhwAnv2D+PgsYnyTPe5XHnpNnR3Rha8fqM4jouf3rcbhwdPJui4LvDLB1+Eqhl49c4tLdxddY7j4sjQ7ElFAGA7Lh56LoldZzvYsHlha4YCEjJZBZblQNEMhGTvdm93dN7XchwXQ6MpaKeMHuM4FgFZQEAWIQo8RicygOsl72WyKgCgMx4qd+uql1xKSDJMG7bjAGCh6yakJrXPbzTTtGEYNly45aC2LPHQdAvJVAFIeN+74fE0VvV2tHUFMCGEEEIIIYS0o0Q8hGxehQiv4Cxf0JDNq00bw9autp8xgAefOFDxvhcPDOPSC89o8o48R4aSVceaXXbRGbh4x8am7qcjFoJhWDAtB+PJrDfCTxQwNJrGQE8c4ZAEqzTyTNctWI6DickcDMMGx3qxQMOyYFkOHAfQdAsodV+WJR7diciiJ+exrDd+cDJdRDanIBQQAQNIZxUk6hhPT8hS0N4pr4QQQgghhJC2JIk8RJEHAwbBQHO6FTmOU67u8rsUSRI/r04/fkWU393FT1JaiGPDU/jxz5/BvsNjNScUnbG+Bxecs25B6zaS67r49SMv4cUqbbkffOIAjo+kmryr2kxM5Woev+e6wGN7pvDk84MLWpNlmXJCSr4UnMsW1Jq//6eaTOehaSYsx4Gmm3DhwrYdFIo6klN5DI+l4TheksxkugAAiEbkcrvtheB5DgLPAi5glBLsFG3pVpgWSu3Gdd2C4wAcy6C3K4pwUPI6FqWmdyxaDuPeCCGEEEIIIaSZOI5FZ4eXOBEtfS7VdBOOM//Pw8tJZ0cYvV2VC42e2TPYsufnuZcqdyniOBbnnLm6ybvxunb3dccgiTxs28X4ZA6KZsB1XAyPpZHKFKclFI0nSwlFHIPe7ii6OyNY1duBtQMJDPTG0JUIIx4NoKsjhN6uaNO6fYVDMiSRh+MAmZxX/DWZLsCqIy5EyFJASUWEEEIIIYSQuvhJDaFSoo6iGPNq9WvZDsaSWRwfSSGZysOy7Dkem4OmW2BZlBM65tvOWJYEgAEEngPPs3BdVBw3NR+b1nZj3arOmh8fDkq47orzGj7XfbE99GTlirtWOzo0Oe9zHnj84IKTpCIhGQwDaJoFw7TgOi6yeXVe11BUA+mMlyg3lSpgLJnD4HAKY8ks0tkiFM2A7TgwLRvJVB6uAwQDAjqiwQXt/VTLaQTa6aPPZFkAwzDo7AhNTyzSvMSidLYx4/AIIYQQQgghZCWJRYLl2IogcHBdUNEGgO1bKo9AyxU07D081uTdAIWihgNHxyved9bGvnLh3VwUzYBuNK4AieNY9HZFEZAFuI43Ci1f9Aq2klN5L6HItjGezME0vYSivq4YROHkACaGYSAKPMJBCfFoEOGQ3PQ4WyLuxWYKRR16KS6UL8wvLkTIUkFJRYQQQgghhJC6+BVpsiyA4xjYjlvzODHXdTGZykPTvU5BRcXAyHgGuYI6IzHJNG2MJbMwTBscz6CvOwZZFMBxLOLzTK7gOLbcktvvVuQnU9SLYRi87fqLcMn5G9HbFUEkJCMgCeBPq45iGGB1Xwf++C2v8hIc2gjDMLjqsu24fNeZVR9zfCTVkHFxjbZj2xq8+ZoLcP62NYhFAuXjpz//p3th79CC1uV5rjx+LFcKfqWzSs2JdY7jYiyZBeB12FH9DkGu1747m9cwMZnH0EgaIxMZ2LYLSeTR1RFpaKBMkk527QK8YOFS5LpuOYjtd1sKlN7rfmJRKCACrpcACaCuzlKEEEIIIYQQstKxLFOOa4QC3ueupfpZspHO27qmaiziyd1Hm7wb4Pl9J6p25z5/+9pZz3VdF0MjKfzkV8/hS9+4D8++WLnjUb1YlkFPZ6RcADSVLiKT94quLNvG+OQpCUXdMQhC7V3Km0UShXKhZaHoFTlVGzVHyFLHz/0QQgghhBBCCJlJFHhIEg9dtxAIiCgUdBQVvZy0M5tsXi13HepKRJAr3U5lFBQVA50dIYgCD003MTGVh+O4EAQWvZ1R8DwHjmexuq9jzsSRSgKyAE0zIUsCioqx4KQiwHsuXnfpthnHHceFZdswTRuiwLdlEMTHMAwuvegMyJKAXzz44oz7bdvB4IlJbNnY14LdVReQRWzd3I+tm/vhui5SmSIOH08iHJQwPpXDI08dqnjeYAPGuUUjMoqqgaKiewlulpcgFKmhg1ZyKg/TtGFZNlIZb6xZRyyIYECErpvQDQuaYcI0HbgOIIocujsjYNnGVt7571fNsLzRa5Y3ZnC+CXutpmqm935zHBiG1/VMlk9WXTIM473nqXqWEEIIIYQQQhYsHJJRKOoIBiRkchq00meyRn9mXUrCQQlnb1mF3RWKmE6MpTEynsFAb7wpe3FdF7urjD7riAWrdtzWdBN79g/jmRcHkZzKl48/++IgLt6xsaHfX4Zh0JUIg+dZZHIqMlkVluWNhrcsBzzvdTQS+PaNpYVCEoqqAUXT0YkQNM2EYVrTuioRshxQpyJCCCGEEEJI3fzkibA/Ak2dewSappvleeOdHWEEZRF93TEk4iGwrNcxZXQii8lUAROTOTiOC0ni0dsdA89zEEUOawcSNSUvVRKUvWo6qXS+YVjzGts2HyzrtWMOBaW2Tig61SvOXodQsHIL7IODE03ezfx4HWnC2HneBmw7YwCX7zoLr7l4S8XHpjJF5BdYQSaJAiSRh+t4bcUBIJ2Ze6SWourI5LwKvMlMAY7jdc6KhmUIPIdwSEZnRxirejuwpr8Dq3rj6O+O1ZVENxeB58BxjNchqdThZzyZw7ETk+VxYktBefRZ6WsQBW5Rni9CCCGEEEIIIV4CDRivyIrnWTju0h6n3Sg7d2yYcWzDmi687fqL0Ncda9o+BoenkMoqFe87f9vaqslBT+85hl8++OK0hCIASGUVHBue//j5WsSjQXTGQwC8jj9+QlFfmycUAV6HZI5lYFtu+fW/0FhTK3ldoL2i0cWKlZKliSJshBBCCCGEkLpFSiPQJOnkCLTZOv/YtoNkygtMhMMSQoGTY8CiYRkDvXGEgiJc1+v44rheZ6Herih4loUsC1gz0Lmgip+A7CUTiTwHjmfguGjLsV6twrIMNq3tqXjf4cHkkgsqvGKWlt6DI1MLvr7/HsgXNbhwoWrmnO+B0YkcAK8ttqZZYBmgKxGuONaM41gIAtfQkWen88e4TaTySGeLsB0Hum5haCSNE6PpJfH+KCilpKLSc++/zwkhhBBCCCGENB7HsQiWCsxCpxSarXS9XVFsWNMFnmOxY9sa/PnbX40/vHEXzljf29QuTs9W6VLEsgzO27q66nk7tq6pus9n9gw2ZG+VRMIyersiCMgCIiEJfaXCwnbHMAyCpcI8vzPyUhyBZpgWklN5HD6eLMeChscyrd4WaSOUVEQIIYQQQgipmyjwkGUBDBgESwlCRaV6EGkyXYBte6PMEjGvCqkrEcaagQREkQPPcehORNDTFQHPswiHJfR0RcAyDEJBCWv6EwvuPsJxLETRS0qSRS/xYCkkTTTT5vWVk4qyeRWTqUKTd7Mw4ZCMRLzyKK/B4YUnFYUCopdQZ7nl1366SjUgACRTeViWDdOykc55XY3isWBLq+/i0SBkyeu4lM1rGBnPIFdQ4cJFUdFx7MQkxpNe17B2ZNleEhQAqLr3PQjIlbttEUIIIYQQQghpjHDQK7IJlJKKVM1o28+NzXTNa87GX7/zClx/xXno6Yw2fX1FM7Dv8FjF+7Zs6EV4lpHt4ZCMM6uMvd9/ZGxRu/AEZBG9XVF0doSXVOdhv2BSUXW4cGEYFvRZis3aheu6yOVVHB9J4ejxSaQyRdiWA8tx4LhePKhQXDodrMniWjrvSEIIIYQQQkhb8keghQJego5SJYiUzatQNRMMC3QnvEShYFD0RqAFRKxf3YXOjhDAeJ1TBnrj6IqHwYBBLBrAqr54w6q6/Go6qZRcNFtnmZVo45ouVGuMc6jNR6BVsqa/o+Lx48OpBV+bYZjyeyBfGoGWK6iwbGfGYwtFHdnS6L+pdAGuA8gSj2g4sOB9LATHsejrjqGnKwJBYGHbLlIZBSPjGRQ1HXCBTE7B+GSupfusRil1KTJMC7blgmVOvrcJIYQQQgghhCyOSMhLppBFoTwCTTMovtLZEUYoKM39wEWyZ98J2BViEgBw/izdnH0XnL2u4nHXBXbvHVrQ3pYjWSq9/h0vJgq0f7eiVKaIw4NJjE5koZY6LKmagYmpHE6MpsvxrVS22MptkjZCSUWEEEIIIYSQBfETKmRJ9MaJOS5OjKUxlS5ALwWTdMNCJud1b+mIhiAKPDieRf8p8+QZhkFXIoL1q7sQkAWwDAMwQGcijL7uWEPHP/njnmSp1KlIN5fcWK/FFJBFrO5LVLxvOSUVTaYL5bFZCxEOyWAYQNctL4BaSsLx2baDbF7F2GQWAJAtqNB0CyzLoKsjvOD1GyUoixjoiaMzHgLHMjBNB8nJQjmZKFdQqwYmW8lvMe4H7yRJWNRxcYQQQgghhBBCAJ7nyqOn/W6xyizdq8nic10Xz1UZfRaLBLBxTfec11i/uhOJeKjifc++OEjdqCrwixf91387JxXlixqSU3nYtgPLspHJKzgxlsb4ZB6K6sW08gUNLlyoqkGFmAQAJRURQgghhBBCFkgQTgaROqKhUnWOi3xRx+hEDsPjaSRTebguEAqKiIa9JKT+KvPRJZHH2lWd2Li2G5vW9SxK0oXfmlsUeHAcA8f1upyQkzavqxxoGhpJlZPFloq1/fGq9x1vwAg0nmPL7a79VuCZnIJ80Rsldvh4EmMTWdiWA8Oyy+PROmLBiu+BVmIYBpGwjFV9ccSjATAMoGomTMsGXEBR2y9AXFS9xDA/0OX/R8zJsQABAABJREFUfUQIIYQQQgghZHFFwtO7V6uaQUVbLXRiLI1klbH1521dU1MHcIZhqnYryhW0qklLK1l5BJpmwHa8ZJ12jJ8AKHfQzhVUnBjLIJNVYVkOOJZBNCyD4xhYloNiKUEqlaFuRYSSigghhBBCCCEN0BHzKpjCQQmr+zrQ1x1BOCSBYQHTdGBZDnieLVc6JeKhOVtBCwK3aDPUeY6FKHrJHCdHoFFS0ak2r++peNx2XBwdmmzybhYmEpYRDp4chyUIHDat68ZrLzkLfad0y1roGoDXNceybdiWg5GxjFfd5bgwLBuZXBHjk1nA9RJf/C5f7YhlWcSjwXI3Lz9xx2+B3S503YRtOXBct/we9itkCSGEEEIIIYQsrnDwlO7VHAPbcamzSQvtfqnyeDKGAXZsW1Pzdc7durpqTO7Xj7yMqUzlxKWVShJ5CAIL1/EKs4CTRWftxLKdGfEdWeLRlQhjVV8HEvEQouEAACCb95KP8gUNpmm3ZsOkbVBSESGEEEIIIWTBImEZq/s7EAz6Y8VEdHWEsaY/ga5EGLLsfUDlWBayLKAr0fqRT0HZS2qSpJPVdOSk3q4owqHKiV+HBpNN3s10BUWfNl6sFlvXR/GKszpwzSv78b53/h7efsPFeNWFm6u29J4vSeQhSzzgAoVSYMaybWQLKkYmMhgZyyCT02BbLkSRQ2cbjT2bjT8q0A+KFRS9rVqd++PrNN1r0S3wLIQ26/5ECCGEEEIIIcuVIHCQJK+Ix+8KrVB8pWVi0QBCwZmFNpvW9iAWCdR8naAsYuvm/or3mZaNn/xqd1uOR28lv1tRQfFiQvmi1nZdu/IFDXAB3bRgmg5YBujpjCAclMpdrCIhCSwDmKZdjpWmstStaKXj534IIYQQQgghhMwtFJQQCkowTRu5gopsXoVp2ggHJYRLXYlYlsFATxwMM3e75cUWkAVkcl5XkzSjQNMtZHIK4tFgq7fWFhiGwea1Pdi9d2aV26HBCbiu27Lv4/N7h3D/o/uQiIe8MXlrurBudScksfrYq7PWR8v/X0u773pEQjI0vYB8UYdmWNC0k92vGAaQJcF7nwTEtngP1CIQEIFMEbpuwbJt8OCgqEbVhLNmK5baiftJTzKNPiOEEEIIIYSQpoqEZOh6ASFZRKGgQ1ENdMZbvavl4ejQJPbsPwHbdtCViKCvO4a+7mjVzsev3rkFr7pgMw4cHcdzLx/HkeNJuC5w/va18177sp1nYO+hUVgVkodGxjN45OmDeM3FZ877ustVKCghk1Oh6RYsx3vOFNWYs1N7M+ULXvehYqlAKyCLYNnpPWhYlkU4JCNX0JAtqAjIIrJ5FZ0d4UXrKE/aHyUVEUIIIYQQQhpKELwuLJ0dYSiqgVxBRVExwLIM+rqjEIT26CISCkrgOBYigM54CFPpIjI5FTzPlZOgVrrN6ysnFeULGiam8ujtilY4a/EdOe51Skplikhlinj6hWNgWQYXnrMer3/19pbsCQCCARGiyMEwbNi2l1AkSzxCAQnBgAhuCQZfeI6FLPHQdAuKaiAaDqCgaG2RVOQ4brlqzv8zINHoM0IIIYQQQghppnBIxmSqAFkWwLKAbXsj0Pxx2mS6TE7B0y8cw1SmiLe84cKKhU+O4+LXj7yMJ58/WvEaWzb04q3XXVTxPo5jsXVzP7Zu7kc2r2LP/hM4o8qI+9l0xsO44lVbce9DL1W8/+GnDmLTuh6s7uuY97WXI4HnIIk8dMOCouiIhgPIFbS2SSryOg95BVn+CLRqe4uGZeSLGjTNgm5akAQemZyCriXSdZs03tKLaBJCCCGEEEKWjGBARF93DJvWdWPDmi4E5Pb5hT/HsejviQHwquqiEa/Kaypd8EYpEWxY04VqDXUODU40dzMlhmlhaDQ947jjuC1PBmMYBr2dUSTiQSTiQazui6OvO4ZIWF6SCUU+/33rt7AvFPW2aOGtqAbgeq3XLcspdYOi2ilCCCGEEEIIaSZJ5CGKPBgwMz4/Eo/ruhgaSeGOXzyDr3z7fjz23BEcODqOn/zquRmdgGzbwU/v2101oQhAzV22Y5EALr3wjLpjEheesx4b13ZVvM91gZ/86jnohlXx/pXIHz3nd1XOF7W2GSGfK3Up0nQDtuWCZRkEqnR75nkOwdI4w1zeOy+TU9rmayHNt3SjmoQQQgghhBCyQKGghJ6uCAAgEQshGBDgukByKg/Lslu8u9aTJQFrBxIV7zvcoqSi4yMp2BVabwPAxrXdTd7NTBzHIhoOIBoOgOfboyvXQvmBJL+Ft2075c5AreRX1vmVdpLIz2jbTQghhBBCCCFk8UVK3WyDpaQKRW39Z8Z2kc2r+MaPHsG3/vtR7D00ilNrdF46OIIf3fMUDNNLzDFNGz+85yns2T886zX9IrnFxrIMbrhiBwJVuk6lswp+/cjLTdnLUhAMeO8DXbdgWjZcxy2PGmu1XEEDABRK781QQARTrZIQQDQcAOAlSFmWDdtyyolJZOWhEr5lLp/P44knnsCBAweQyWQgCAJ6enpw7rnnYseOHbP+ZbHSHTlyBM888wyGhoagqirC4TDWr1+Piy66CAMDA4u6tuu62L17N1544QVMTEzANE3E43Fs2bIFF198MSKRyKKuTwghhBCyknTEQjBMG5msgq5EBGPJLAzDxsRUHn3d0RWfpLBpbQ8Gh1Mzjh8fSbWknbk/+ux0wYDYsnFsy53Ac+WxbppmIhyUkC/q5WBZq/iBOU0vjT5ro05ohBBCCCGEELKSREIyptJFBCQRDAtYlgPdsCCJ9KvocFCatXPT4cEkvveTJ3DjVTvwk1/txomxmd2ZT9fX3bz4RyQs49rXnos7fvFMxfufe+k4tqzvwZaNfU3bU7viORYBWYCqmSiqOuKRIHIFFZGw3NJ96boJw7DgwoWizD76zCeJfPlryRY0dMZDSGeLNXfJIssL/U2+TD311FP49Kc/jZ/97GcwjMr/UPX19eFd73oXbrvtNiQSlauPF9u3vvUt/PEf/3FDrhUKhVAoFBZ0Dcuy8M1vfhNf/vKX8eKLL1Z8DMMwuPjii3Hbbbfh5ptvXtB6p0ulUvjCF76A//zP/8TY2FjFx4iiiGuvvRYf+chHsHPnzoauTwghhBCyUvV0RmCYFhTFQE9nBKPJLAzTRjJVQE9nZEUn429e34P7H9s347jrAkeHJrF1c39T91MtqWjDmi6w7Mr9Pi22oCzCMFQUFR3hoIRCUUdv5Q7oTaEbFkzThgu3PK6w2QluhBBCCCGEEEI8kiRAEDiYpo2gLKKoGFBUnZKK4HU0fuOV5+O7P3m8auflE2NpfPU7D6CWSeM8x6KrI9zgXc5u6+Z+nHvWaryw70TF++++/wX8eV9Hy8fSt4NQQPSSihQD8UgQBUWHbTt1j6BrBL9LkaIZcByA59maYijRsAxVM1FQNMSjARiGN9ItEmptkhRpvpVdcrsMmaaJ9773vbj44otx5513TksoEgRh2i9DxsbG8I//+I/YunUr7rnnnlZst60cOHAAO3fuxJ/92Z/NSCgSxZMVr67r4vHHH8db3/pWXHHFFVWTf+brnnvuwdatW/GP//iP067JMAwE4eRf7IZh4K677sKuXbvw3ve+F6ZpNmR9QgghhJCVjGEYDPTEIYoceI5DT2cUDOuNVUpnlVZvr6V6OiMVK6oYBphKLyypf75yBRXJVOU1N65p/eiz5aw8As0w4bguLMsuJ/M0m+O4GJ3IePvRTDgOwLEMBasJIYQQQgghpIXCpRFofhfZ2brzrDRrBxL4g+t3QhSqj0mfLaFI4DlES7GZ3q7WdNV+/au3IxYJVLxPUQ3cR2PQAHjxE4bxRtkZpgW4XiJOK5WTihTvPenHeOYSkEWIAgfXOfk1pDPFxdkkaWuUVLSMaJqGN7zhDfjyl78Mt/QvT0dHBz73uc9haGgIhmHAMAw8/vjjuOWWW8rnTUxM4IYbbsDtt9/eqq233NNPP41du3bhueeeKx+75JJLcPfdd6NQKEDXdaTTaXz/+9/H1q1by4+5//77sXPnThw9enRB699+++244YYbMDExUT52yy234PHHHy9/344fP47Pfvaz6OjoAOAlN335y1/GG97wBmhaa/8xIoQQQghZDjiOxaq+DnAcC0ng0ZXwqr5yBQ26YbV4d63DMAw2r/USdoIBEeeetRpvev35+MC7rsKlF53R1L08vWew6n0b1y6sbY7j1FAOuIKJAg+BZ+E6gFoKDOcLrfkcMj6Zg65bsBwHU6Vg1lxtuwkhhBBCCCGELC6/e0lAFgAGME3HS6poI5ZlQ1EN5IsasnkVqWwRk6kCxidzGJ/MIZtXF23tDWu68I6bLvGen3kISALecdMuvPePX4cPvOsq3PC6HYuzwTnIkoAbr9yBas28Xzo4gkKLk2faAcuy5e+xP7Z9MV9Xc1FUA5Zlw3accqJfaB7j7KOlRLJ8UYMLF6pmQlEpYXCloaSiZeQd73gH7rvvvvLtzZs347nnnsMHPvABrF69GgDA8zwuvvhifPe738W3vvWtcucix3Hwnve8B3fffXdL9t5Kg4ODuPrqq5FOn5xR+u53vxuPPPIIrrvuOoRCIQBAPB7H2972NjzzzDO49tpry48dGhrCVVddhWw2W9f6P/3pT/Ge97wHjuO1PGRZFt/5znfw3e9+FxdffDF43qu2XbNmDT74wQ/i2WefxcaNG8vn33fffXjHO95R19qEEEIIIWQ6UeAx0BsHGCAkSwgFS9V1qt7ajbXYxTs24k9vvhS3/cmVeOOVO7B9y6py5WGz7Ds8it89fajifd2JMKLhytVy1ViWjePDU3j4qYP47k8ex+e+di90g7qAzsavZPODRwWl+cHCdLaIXF6FCxeTqTwsy4HAs4hH5/f9J4QQQgghhBDSWAFZBMez4FgWodLnx1Z3aDlVrqDixFgGE1N5TKWLSGcV5PIaCooOVTPL3apzhcVLABnojePW339lxY7QlUTCMm79/VdiVZ/XcCAYEMtFcK2wdlUnLjl/U8X7HMfFC/uGm7yj9uQn7RRUHY7rQmthIo5fEKZqJlwHEAR2Xp2eQwERPM/CtlwUil58dGQiA9O0F2W/pD1RUtEycfvtt+OOO+4o35ZlGXfffTfWrVtX9Zxbb70Vf/d3f1e+7TgObr311oaN85qvBx54AK7r1v1foTD/0Quu6+Ltb387pqamysde+9rX4qtf/WrV1oGBQAA/+tGPsGXLlvKxQ4cO4T3vec+81x8bG8M73/nOckIRAHz84x/HH/7hH1Y9Z/369fjpT38KSTqZRXrHHXes6E5ThBBCCCGNFAyISMS8xHI/cUbT26uyrtm6OyMY6I2DZauUoy2y5FQe//Pr3VXv37y+t+ZrGaaN/7r7GXz29nvx/+58DL99fD+ODk1CNywMjabnvsAK5icVqZoBFy4Mw25qFy9F1TExlQcApDIKNM0CywA9na1p/U4IIYQQQgghZLp4qatJpDQKrVjUp/0OrFVs2ymPt5ckHgFZQDgkIRqREY8F0NkRQjzqJfpk82p5Isxi6E5E8M7ffyUS8dCsj0vEQ3jn778S3Z2RRdtLPV6z60x0RIMV73vu5eOL+twtFcEKiTipFowNc123nNjnF4bNp0sR4HUw90fvpbNFGKYF23JwYiwFy279e5s0B0XdloFCoYCPfexj04598IMfxFlnnTXnuR/72Mewdu3a8u10Oo1//Md/bPge29Udd9yBRx99tHyb4zj827/925wB6WAwiC996UvTjn3/+9/HU089Na/1P/nJT07rkLRhwwb87d/+7Zznbd++He973/umHfv4xz+OYpHmWBJCCCGENILfoUiWvMod3bDaIgi2EqmagR/d8xSMKhVQosDh4h0bar6ewLOYShcrBj4GT0xVOIP4JFEAxzFwHK/CDUDTWpubpo2R8SzgAgVFL1fadSUiEASuKXsghBBCCCGEEDK7WCQIMIAsiRAEDk7pM1yrFRQdruslFPV3x9DbFUVXRxiJWAjxSBCRkIxYNAiOY2Dbbvkz72KJR4O49fdfid6uaMX7+3tieOfvvxLxKsk7rcRzLM7fvnbaMVkScNG56/Hmay4oT8lZyU5NxMkVvG7LRUWHrje3Q3ZRMWDbDizbhqZ5RWH1jI+PhGTIEg/HAcancrAsG4ZhY3gsDcehJLKVgJKKloEvfvGLmJiYKN+WJAnvfe97azpXFMUZySm33347jh492sgttiXbtmckY73xjW/EmWeeWdP5V199Nc4999xpx2pJCPIdOXIEX/va16Ydu+222yAItc1SPf2x4+Pj+OIXv1jz+oQQQgghpDpZEsGwDHiOgyB4H5sWO6BEZnIcF3fd+xxSpWrCSm686nxEQrW1DQe8wM6a/njF+wZHKKloLuURaJrXtrsZrewdx8XweBq27UA3LUxlvC618WigvB9CCCGEEEIIIa0nCFz5M7r/p18U0iqu65ZHmvl74nkOwaCISFhGPBaEKHJgwCBcSrhoxmfdcFDCH73pEmxa1z3t+Bnre/COm3bVlfzRLOdtXQ2OZbB+dSduuup8vO9PXoerX3N21SSplSgclMGxDCzLKY8+m2pytyL/de+vL4k8BH7+hVkMw6CnMwJR4GBbrpdY5DjQNBMj4xnqTrUCUFLREmeaJj7/+c9PO3bNNdegq6ur5mvccsst0zrzVLrmcnTXXXdh//7904790R/90byucfrj77vvPjzzzDM1nfv5z38epnnyF1Mcx+Htb397zWv39vbi9a9//bRj//Iv/zLtmoQQQgghpD4syyAgewncslQagWa0/89ZqmYgmcojk1OWbKWQqhnlrlAPPLYPh48nqz721Tu34MyNffNeY01/R8XjoxNZGObSHnVnWTZGxjNIlsaENZrfJlstBaR03YJZpYvUfDmO61W7mRZ03YSqGSgqOsaSWei6BctxkJzKw3WAgCy0ZcUmIYQQQgghhKx0/me1cEgCywKm5ZQLU1qhqBqwbRccz5Q7U6/qi2NNfwIDvXH0dkWRiIdLe/aSjlTNhGk15rPubGRJwB9cvxO33Hgx3nD5OXjHTbvwtut3QhJra0DQKuGQjL/+49fhHTddgrPPXFVXospyx7IMIuGTI/UAL1mtUTGUuTiOW+4S5v8ZXkCiGsuy6OmMgOdZmKaDyak8HNfrwDQ+mWvInkn7oqSiJe43v/nNtPFZAHDttdfO6xo9PT3YuXPntGN33nnnss8qvOOOO6bdFkURV1555byucd1118153Upc18Wdd9457diuXbvQ2dm5oPXT6TTuv//+eV2DEEIIIYRU5idP+MlFWpt3KjJMCxNTeRQVA5mcimQqvyR/pv/Vwy/jn//jXnzjR4/g0WcPV33clg29uOyiM+paY+1A5aQix3FxYjRd8b6lYGg0hX//3m/x9R89gv/7Xw/iez95vFyV1iiSyINjvXbwmu4FhQvKwis4x5M5HDw2jsODSRw9PoljJ6ZwfDiFE6Np5AsaXLiYnMrDshwIAovuRHjBaxJCCCGEEEIIabxgQIQo8mAZppyk08puRf7akZAMBl4RmSxNT9qJhGSwLAOB5yDLPIDmjftmGAYb13TjgrPXYf3q2ptGtNpCElRWikhIBssAhmFD1QzABVLZ5nQrKhQ1uI4LszSqjGGw4G7PPM+hpzMClmWg6RYmU3m4cJHNqZhMLU5xG2kPlFS0xN11110zjl122WXzvs7p54yMjOCJJ56oe1/tTtd1/PznP5927BWveAWCwflVup555pno7p7elrDS9+R0jz/+OEZHR6cda8T3rdb1CSGEEELI3PwP2rIkAIxXWWc1oUqtXkVFh+sCksSDYb2quqWYWDQykYFp2Rgez1R9TFcijBuv2gGWZepaozMeKieLnW6pjkCzLBv/8+vdyOROJhEdGZrEt+54FKkGttdmGAYB2XtvFFUv0W6hweF0VkEmpwAu4MKF7TiwbNsLfJkWNMPEVKYITbfAsgx6EtFp3XYJIYQQQgghhLSXjpj3+7ZIkzv/nE43TOiGBTAnk2A6YqEZj2NZBtFIAADK3WUKRX3JxVRI7X50z1P41h2P4ps//h2+8eNH8O27nsTPHx3BI7uTmMzoDVmD49hyYl22VPSVzauwbKch16/GsuzyqLViqRBMlgRw3MJjKaLAe4lFDKCoJlIZBQAwlS56sR2yLFEUbom7++67p90OhULYsmXLvK9z/vnnz3nt5eTBBx9EPj89Y7LSc1CL08/bv38/Dh48OOs5lZ7betY/66yzEAgE5rw2IYQQQgiZP//DNsswkEWvSk3V27dbkVrqpBQJyejtjIBhvQ/3k+lCU9bXGvDcaLqJqTn2K4k8br72wgW1AmdZButWVe4S+vLB0XJb6qXk0GAS6ezM4E02r+L//fejDW1F7SfcqaX29apm1h0QM00byVI121SmiMETKQyNpHFiNIPhsQxGxrMYm8ihUPACel2JMASB2qoTQgghhBBCSDuLhgPlzj9+UU++SZ1/TpUrFcGEgxJ4jgPPcwiHKnfY8ce2BWURPM/CdlwU1daNbSOLa3g8g6HRFE6MpTE8lsHoRA5TGQNHR4r45WOjGB7LNGSdaFgGwwCaZkE3LbiOi8widitSVAPHhqdgGBZsx0FB8V7DoQZ2lpIlAV2JCACv0CyT9+JRE1P5ti7IPJ1lOzBNe8Z/hmlRQuFpKKloCZuYmJjR7ebMM88Ew8y/Wnfbtm0zju3evbverbW9Sl/b1q1b67pWPc9do9ZnWRZnnnnmtGMjIyNIJpPzvhYhhBBCCJmp3K2ozUegWZYNozSTXZYFyJKI7kQEYICiYsyZqFMv13UxNJrCXfc+i89//deYTC1snZGJDGb7zM4wwJuufgU64wsffbV2IFHxeCpTxO3ffwgvHxxZ8BrNdGhwoup9BUXHd+58DMNjjRntJksCWAawLAea4b0n6q1GG5vMwnVcaLo5o+MRyzLgOAYCz0ISeXQnwgjKC2vVTQghhBBCCCFk8bEsg1h0Zucfx2neL+ot24FSSgry9xCPBqr+HlUSeQQCIhgw5cSjZo1AI8032+/TXRf4zWMHGrIOz3Pl+GKuVMSWyamL8l5IZ4sYGk3BthwYpoXRiSwsywHPsw2PpwQDIjrjXtevTFaFZphewtQSKNSzbAeDw1M4fGwCR44nZ/x39PgkDh6baOnYxnbDt3oDpH4vv/zyjGPr16+v61qVzqt0/WbJZrN4/vnnMTg4iGw2C5Zl0dXVha6uLuzYsQOJROVfANSq1c9do9c/PUnp5Zdfxmte85q6rkcIIYQQQk4KBkTkCxpkUQCgNqQbz2LwOyhJEg++NBYqKIvoToSRTBWQL+pgWAaJCi2+62FaNl46MIynXhjEWDJbPv70nmO4+jVn133dkVlGngHA5bvOwuZ1PXVf/1TVOhUBXsek//7lszg8mMRVr94OSWzvj86u686aVAR4r5Hv/uRxvPW6i7B+ddeC1mNZBoGAiKJieO+PhIB0poh4NAh+Hq20MzkFimLAcd1yR61ISEIiHqqrWIYQQgghhBBCSPuIR4NIZ5Ry5x/LclBU9HKCz2IrFDW4LiBLPCSBB8MyiJW6EVXTEQ1CVQ2EgxIyORWabsEwLYhCe8cFyPzNFXUYnchhZDyDgd74gteKRQIoKgaKioF41CsKzOaViqP4qjFMC5blQOA58Dw7LW7iOC7GktlyEkxB0TGVKcB1AJ5nvXFlbOPjLJGwDN2wUFD0cnwok1PQ2eZxneRUHppmwsXJxK7TixxZx+u81Ky/r9od/Q24hL300kszjvX399d1rVgshkAgAFU9mT04ODiIYrGIUKgxv3iYi+M4+O53v4v//M//xEMPPVS1rRjDMDjnnHPwxje+EX/913+Nrq75B8Qb+dz19fXVdH1foVDA8ePHpx0LhUKIRCINXZ+SigghhBBCFi4U8CrTJIkHywK240I3rLZLMvHHUPktxUWRh2FaCAUkuB3AZKqAXF4DyzDldt4L8aN7nsKR45Mzjj+/dwiXX3JW3c/P6ES26n3nnLkKr7xgU13XraSnM4qujvCs4+F27x3C4MgUbrrqfKzq62jY2o02PpmrqXrKMG18/6dP4vevfgW2bJz5OWI+Tg2IxSJegDWdKaK7s7bPNZZlIznljT3L5BRYlgOOY9ARC7Z14IkQQgghhBBCSG1EgUcoKKGo6IiGZaQyCvJFrSm/pHddt/w52V8vGpbnLIQJhyRwnF+sJUBRTRSKOhLx9ooDkYWrJfbw9J5juKF3x4LXEgUeAVmAqpnIFTR0xkNIZRTEo7XFQKbShendwRlA4DkIAgeR56FoBgzDggvXe5+VXvsBWUBXR7j8ml4M0YiMgqKjqBrosL2EqVxBQywSWLQ1F8KynfIoxrFkDrpuzXgMxzFY059YUqPcFhuNP1vCjhw5MuNYPQk2vu7u7mm3XdfF0aNH677efN1yyy14xzvegQcffHDWOYWu6+KFF17AJz/5Saxfvx4f//jHYdvze1M38rnr6ZlZKV3p+r6jR4/O+PoW8n2b7/qEEEIIIaR2guB9QGfAQBJLI9DarFuR67rQSh+AA6VWxol4yKukYoBwUEIi7iUSZXIqsg1oQ3zOmasrHjdMG3v2naj7ujdddT7+5OZX4fWv3o5zzlyFvu4Y1q1K4OrXnI03XrmjockmLMvghivPmzMBKp1V8M07foenXzjWsLUbba4uRaeybAc/+vnTeGmB49284LD3evNHn6VzCizbqen8sWQOjuNCM7yAGgB0doTBshSmIIQQQgghhJDloiPmxSNCQQkM68UNmhFXKSo6bMf1xj6VRk911FBkxTBMORkiEiqNbVOaO7ZtOXAcF0eGkvjt4/tbvZWqagkxvXRgBEqpkG+h/NdVQdFg2TYsy66pQCyVKZYTikzL9rrruIBp2lAUA5mcAsOwYNk2xpMni87i0QB6OiOLmlAEePEhWeIBF+W109nioq65ELm8CtdxoZtWxYQiUhmlVS5huVxuxrFoNFr39Sp1yqm0xmIZGxsDAAwMDOCWW27BDTfcgA0bNqC7uxvZbBaDg4P45S9/ia997WvlTj/FYhGf/OQn8cgjj+CHP/zhjMSoavL5/Ixj9T53833eltv3jRBCCCFkuQsFJGRMpVxRpOlmW1Xb6IYFx3HB8QykUjvuUEAEz3Po745hdCKLaDgAx3WRyarI5JTy/fXatrkfv37kZSjqzMDKU3uO4YJz1s1IAPrdM4fAcywuOndD1ZbLPM9hVW8HVvU2pyvQqt4O/NkfvBp33fscToylqz7OdYFfPPgi+ntibdmx6NCx2pOKAO/r+emvd2N1X8eCXsvxSBBFxYCimtAME7IoIJUpoKdz9s83ubyKoqLDhYupdBFwveS3YCkpjhBCCCGEEELI8hAKShAEDjCBcFBGvqAhV1AhS8KirpsrdSIJhyQw8EZ4SzWuGYsGkMoUETh1bJuql5OMSHX5gobn9w5h98tDSJcKkLafMVBzV+NmunjHRmi6CZZhkCtoePqFmY02LNv5/9m783A56jpf/O/aq7qr9z579pANAoQtAQRZ3VAYXHHkjswdUUefO6NyR2dRf46DzjgyDnN91PHquIyo4zhedURwQ6ICQiBsAZIQEsiek7P23lVd1VW/P6pPh5PTZ+mz9TnJ+/U8PKGruur77e6TA+d73t/PB0/vPIRLzp955WxdU6CpMuyKi3zBQiIWxmCmgFBIG7eC1nC29LIqz0VkcsHXtSyLtX8kyJIISRSRyZdQdX2IooB00pzXNZaoacCy8yiUbMSiIdi2i1K5Ug/0LSQjG+NGqhWFDRWpRNC1yXG9Cauon864BXARKxTGlumfSauyRtc2GmMuve9978Pu3bvx2c9+Fpdddhl6enqgqira2tpw4YUX4mMf+xh2796NP/uzPxt13datW3HdddfBsiZPdNq2DccZm4Ke7nvX7Pt2Kn5uRERERKeykR+A9doP47btLKgdaiM7pgwtmJ+myfXAUDRioKMtCHjEIyHomgzfB8oz3BUoyxLOO2tZw3MDQwXsPzw46tjxgRx+88jz+OUDO/HN//dQfUFkIYhHQ7jlzZfgii1rJ92l9sRzByd+QguUrcq4gaiJSsq7VQ87Z1itSFEkmKGgReDIokwmV56wPLRb9dBX+/yzuRIcpwpJFOq7V4mIiIiIiOjUMvLzXiQc/PxYKjtz2lbIsh1UKlUIImDWgkDN/Mw50rYNOPFz9UgAgRpz3Sr+86eP4f988z5sfeT5eqAIAJ7cufDWUgBg87kr8crNa3HZRWvw2ivOgln7+jzZ488cmLV1wJGNXfmiharnoVKpYv/hARRL9pjnZvNl9A0ERSQy+VI9UAQAruvBslwUCjYy2TIGh4uouj5URUJXe2zeN20ZugJFFlGt+igUg9eyEKsVFUs2HKeKqufV3/OIqUMUxeCfWayQfqphpaJFrFG1HVme/kfa6NpGY8yVT33qU/joRz866fMMw8DnP/95KIqCf/7nf64f3759O9797nfjrrvumvD68V7TdN+7Zt+3xfa59fX1ob+/v6lr9u7dO+pxoVCY1+pJxWJxwsdERKcLfj8kmh1Vz4dtBYshAipwXR+FQn7StlnzpVgowKtWIQkSbKsEQzVG/b+XCEAWqyiWbEiCC69qo1jwoUpTa1M1no1ntOOhx/agUefiR558Ad1twYJdpeLiv3/5BKq1BcPDR4fwlf/4DS49fxW2nLt8zsswA0DFtiZ8DABbzlmCJe0m7r7/WWTzjRcLn993DNdecsa4lZZaYffeXnjjtBx786vPwRPPHcLTu44ACHay9XTEsaw7gaVdCXS1Retf29NlaEC+YKNUtJFTBWiqjENHPaTijTdO9A/mUSrZcNwqhjJ5wAfiZgiuY8Gd4wr4TqUCr2rDdTzYlgT4MnK5ud0dO1XcGEJERERERKeqqGmgf6gQtEnSZViWi+FcCW3JualeM9KCKRzSIIviqA0xUxWPhlAs2QiHNAxnS6hUqrAr7oJZC1poZFlC2ao0XCPasfswrr5k/YwqZs81URSxaUMPfv1QZsy54VwJLx7qxxnL22c8TshQoSoSKk4Vvf05tCVNqJBx+NgwEvEQ2pIRCIKAfMFCb61iTq5QRiZbBhCE48yQBrdaheN6cN0qXNdD1fOgKhJikVBL1qwEQUDE1DGUKSFfLCNq6igUbVQcF6qycP7OjGyIK5Zs+B6gKtKcV007VSycT5GaVi6XxxyTpOl/Q24UTmk0xmx6zWteg1/96lcwDAOveMUrmrr2H/7hH/Dzn/8cO3furB/79re/jfe///245JJLxr1uvNc03feu2fdtsX1uX/rSl/DJT35yRvd49NFH6+3tWuHRRx9t2dhERAsJvx8SzZ6jC6fQTt3xJuZUyQOZ5nLjDbXFBBw6PjaU8tzzB9FuWjjcV8ZLRwtw3LGrSvc9+Cye3b0X117UMaZV2lw7cuiFcc9ddX4E256tYP/RsUFM2y5j++Pb0ZZYOCXPH3+6H7Y99v//w4aEwtCLWNsF5DMKlnaGkI5rkEQBQAFusYBDs5w1ne7fi955/vtUyQPZ2tf/7ufmd+zxjLT4JiIiIiIiOtVIkoiYaSCTKyEeMdBr51EsVSBJRSRjE3fyKJRslMo2FFmCGdahTBBM8TwfhZJVr+gcrVUpikdDTa87hEO1tvFu0B6pWKqgULSgqWZT9zmdbDpzGQ4dG1tJuWw5eOb5I+NWvF4ozt3Qg60PPwevwb6t7Tv2z0qoCADSSRPHB3JwnCqO9WeRiIYRNXUMZ0oolSuIRULoGww2C+aLFoYywbpbPGrUKx1JkghtgXUWM0MaMrkyHMdD2arA0FVkciW0p6KtnhoAwHGq9SpKI5XH2NJw6tj+bBEzDGPMsWp1+uUCG13baIzZ1NXVhWuvvbbpQBEAqKqKj33sY2OO/+3f/u2E1433mqb73jX7vp0KnxsREREREQCsW954V6HvA/dv78Oeg/mGgaIRq3vMeQ8UTUaVRVy8MVUL34zVKETVKr7v42h/4w0FPe3BoqkgCLhgQwLtCW3c10REREREREQ0lxLxEARRgK6p9cq2ubyFbL7xz7Se56N/KI+BoQJKZQfZvIUjvRn0DeZgndTS3fd95AsWjhwfxlCmBN8HdF2GqsgQRKEexGiGIAiIRYPrRoIHI62TqLEz13SNW8npgcdemNOWd40MDBXQPzT1XUxmSMOyjsYht70H+upVbmZKVWR0t8dh6Ap8DxjKFNE3mIPrebBtN2h55geBusHhYDdYNKIjHl3YbeNFUay3kMsWgr/X2VwZ1XGqa8+3bD74/Cy7AsfxIApBeJCmhqGiRSwSGbuA77rutO/X6NpGYywkN95445g5/upXv8Lg4OC414z3mqb73jX7vvFzIyIiIqJTRWdKR8ycXpngnnYDK7sn3pHYKoosojPdeLfSob65rebajIGMDbvSeHGmp+3EoulIuIiIiIiIiIioFVRFRld7DBCCkE4iHgQkhrOleruyEY5bRW9/FsVSBRCAWESHoQdrD6Wyg97+HI72ZVAoWigULRw9nsFgpohq1Ycsi0glwuhIB9VRYhFj2m3X4xEDEABdU6DrMjwf6B/Kw/PG3zx1OlMVGRvX9TQ8l82X8eTOQ/M6n62P7MaXv/NbfPcn27D3QN+UPre1E2ye2/7MgVmbmySJ6EhHkYyHIAjB1/WxvgwsO6iyVbRsDAwHbdIjpjZpRa+FYiSAZ1kuKm4VnucjV2j9Oprv+8jUAoz5WrWicFiDKDIqM1Vsf7aImebYEnvF4vTr1xcKhSmNsZAYhoELL7wQW7durR/zfR/3338/3vrWtza8RtM0KIoCxxmdZC4Wi9C05nqqAs2/b4vtc3v/+98/7ns5nr179+LGG2+sP968eTM2bNgwa3OaTLFYHNXiZ/PmzQiHF8d/cImIZhO/HxLNHt/3cfDoEHzPx/HBHFzXQzIehtHCvtu+76O3PwvPB9pSEaiyhHQyUt8VdLLj/TmUrQqyhTIKRRvhkIp4ZOa7nC6vJPCrB59v6hpFkfC2N1yCiDk/ZYYrtjWq5VnP0jVQtYnHvsBJYuB3u8Yctx0gll6NZLz1O8QOP7YPmpYdc1ySRFx80YVQlOm3WW6W6/noHwi+Hkf+boiSCK+2I63iVjGcLcJ1g8eRsIaoOb8VVoulCjL5EgxdQTIWhqbJ6GqPz+scxrNr19ivNSIiIiIiolNJJKyjsy2G3r4sYqYBr+ohm7cwmClCEAWYIQ1lq4L+oQI8z4ckC2hLmNBrfZ4qbhX5goVCyUKlUsVA5cTv1iRZQMw0EDF1CAg21ZhhDenE9H9fJssS4tEQMtkS0gkTx/qyqDhVZHIlJONcY23k0vNX46nnDqLaIMDzwGN7cO6GJVCVuY8nZHIlPP9iLwBg34F+7DvQj3TCxEXnrsA568efQ3tCQzyiIJN3xpx7audBXLllbdAWb5ZETQO6pqB/KA/H8dDbn4cZ1lAo2YAfVE9KxRf27+pfTpElhAwFpbKDfMFCKh7GcLY0rRaEs6lQtFF1PbjVKorlILjF1mfNYahoEYtGx/YgzOenXsZtKtcuhoo355133qhQEQBs27ZtwiBMJBLB0NDQqGP5fB7JZLLp8Zt93xbb59be3o729pn1CTVNs+Hrni/hcLil4xMRLRT8fkg0M/FSFaVSBeFwUFa76kvQ9NYFSyzbAUQNiiwgYgb//9fenoI8zg4815PQP5hHGBJKFuB64qzM//yzV+N3j72IyhRLgMuSiDe/9nyk083/v/dsUTV90td+1tql+NVDz8OvrYPFowbWruzEulUdaG9LTnun42zqHSxCaLCrauWyNpjz/LOcBsCOBeXrS7aPeCx4f33FRzZXQiZfAXwFiiognTBh6PNfYrriihClKmRFhaaHYOjKgvnv4kLf0ENERERERDQbYhEDnuehbyCPRCwMzwfyBQuDwwXYtlOvIqJpMtqSJmRJglhr5a3KElLxMGJRo1alyIbv+4iYOqKmAbEWWjAMFW3J2fm5sy0ZQalcASpAKmGibyCPXMGCrisIteDn2oUuHg3hvLOWNazqUyxV8NiO/XjFBWfM+Twe27G/vp4zYmC4gJ/95lkkY2GsWtbW8DpBELBueRTbnh3bEadsOdi59xjOWb9kVueqKjK62uIYzhWRL9gojFTSMVSkEosvvBY1DZTKDgolC/GoAThBK7dWhniGa63rCkUL8AFdk+cl3HYq4bu1iK1cuXLMsYGBgWnf7+RrBUFoOMZC09HRMeZYX1/fhNesXLlyTKhoYGAAy5cvb3r8/v7+McdWrVo17vNXrFgx5thMPrdmxyciIiKi6QkbGkqlCgxdQb5gwbLG7lqaTyUr2Flj1Hbs6boybqAIQL1UuKYGfzqOh2rVm3E4RlNlnLthKR7bsX/C54UMFeduWIoLz16+4PvAA4AZ1rFpw1LEoiGsXdmB9lRkwbUQe8cNW3CsP4O9+/uxd/9xHO0LqhatWTH2Z6T5EDMNFArBjtGiZUORZQwM5VGpBIGzcEhFMhZeEIEsIiIiIiIiao1ELIxq1cPgcBGpeBie56FYqtQDRRFTD9pCQYCqSujuSECWROQKZQxnS4ADxCOhWkDJh1TbbKPrCtqSJkJG811JxiOKArrbYzhwdAghXUXE1IMQ1FABakd8wnWY09VlF67BUzsPwa2Obdf+8OP7cMHG5dDnsPK3XXHx5HMHG55rS5pYuTQ94fUru8N4YvdQw3OP7dg/66EiIPg6S8VNhHQVpXIFiiIhEtYX3DrUVOiaAlWVUKlUkS9aiEdCGM6WWhYqsm0H5XIFPnzkS8H3GJNViprGUNEidtZZZ405dvTo0WndK5vNolQqjTq2bNmyRbFbMhaLjTk2WUjnrLPOwuOPPz7q2NGjR3HBBRc0Pf6xY8ca3n88kUgEy5Ytw8GDJ/6DVigUkM/np1VhqNnxiYiIiGh6QkYtvKMpgAA4rgfXrc5q2eNmlEdCRbWwUNiYeIecrikQRAEyRCiKBMepwq649dc1E5desBrP7jmCcoOg1fKeJM7fuBzrV3W27L2arjdcc26rpzAhURTQ05FAT0cCV2xZi0LJxr4DfZMukM0VSRIRjRjI5MoYyhTheT58b2RxLIxwaPYWdomIiIiIiGjxSicjqHp+0FosacLz8rAqDpKxcD18EDF1dKSj9Y0piVgY8WgIhZKN4WwJ5XIFkihAVWW0TdAOfqY0TUFbMoK+gRyS8RDsihO0XxvKo7Nt7O8oT3cRU8dF56zAw0++OOZc2Xaw7akXccWWdXM2/o7dh2FX3IbnLjp35aRBHUUWsarHxP7esfc4ejyDo8cz6O6Iz8ZUxzB0tSWVnWdbNKxjoFJEoWgjFjFQLldg2c6chsnGk8mVAQClcgVV14ckCpOuodJYjE8uYmeeeeaYY/v375/WvRpd1+j+C5F/cv06YNL/ILT6vWv1+ERERETUPF1TIEkiREGArgb7M8p2a6oVuW4VjuMBAuo/kE8W2BAEoR5AGrnGqszO/KOmgZtefxF6aosqEVPH5nNX4k9vvgLvfNOl2Li2Z9EFihYjM6Th3A1LETWNls0haupQFBFVNwgUGbqC7o44A0VEREREREQ0Skc6imjEgAABbakIutpi9UBRWyqC7o74mEq3giAgEtaxrDuJVcvasGJJCiuXpucsUDQiEQvBDGsQICCdjEAQAct2kc2X53TcxerSC86AqjReB3rkyReDlnKzzPd97Np7DL/d9nzD84am4Jx1U6sytG75+EUgHm/Q2o1GC4c0SJIA1/Xqn/XgcGHe5+F5PnKF4O9ovmgBCNYsF2MFqFZjqGgR6+joQGdn56hjzz//fMOQzWR27tw55timTZumO7V5lc1mxxxLpyfemdvote3atWta40/nvZut8T3Pw/PPj/6PY1dXF9rb25u+FxERERFNrl6tqBbOaVULtJGKQLoqQxJFSJI4pd0+I7udtFooyprFUNTS7iT+5G2X4a/ffx0++D+vxWteeRbaks1X4qT55Xkejh7PwPOa/zmyEVEU0ZmOIZ0Io7Mtio50lOXgiYiIiIiIqKHOtijMsAZREKAqMiRJxNLuBJLx8KTXKooEbR4rn3S0xSDJIlRZQjIWzC+TK8GepQ1bp5KQoeLi81Y3PFdxqnjo8b2zOl6uUMb379mOH/zs8YZVtAHgvLOWQRkn6HSymKliWU+i4bm9B/oatnajE0bCf0Dwd8SHj0LRRrHWfmy+ZPNleJ6PiluFZQWVp0xuepsWruwtctdff/2ox8ViES+88ELT93nyyScnvfdse/DBB/Hggw9i+/btM7rP8ePHxxxra2ub8Jorr7xyTKuxp556alrjn3zd2rVrsXbt2gmvafTeTmf83bt3o1wenYKe68+NiIiI6HQ2qgUaghZksxXGaEbZDnb5jISbQoY6pV02oXqloiBUVKlU4XmzuxDCAMnC5nkejhwfxu8f34fv/mQb7vjKL/C17z+IvsHcrI0hSSLMsN6SstZERERERES0eAiCgJ7OBLraY2hPR7BiaRohY2H+0l+WRHS3B+3OImEd4ZAK3wcGhgqzvrZyKrj4vJUwxlkX2L5jP/IFa9J72BUH9z+8G1/93u/wlf/4HX78yyfx8JP78OKhfpRqa3KPP3sAX/7Ob7HnpbG/Lx4hCMCF56xoav7nn7l0zLFl3Un8z7e+gmtfUxA1dUiiAMfxkKt91n2DuWkVR5mOiuNiOFsEgPrXWshQWEl9muRWT4Bm5o1vfCO++tWvjjr2wAMPTBpqOdmDDz446nFXVxcuvvjiGc9vIpdffjkAIJlMYnBwcNr3eeKJJ8Ycu+iiiya8RtM0vO51r8P3v//9+rHHH38c5XIZhjH1VgF79uxBX1/fqGNvfOMbJ73u4osvRmdnJ3p7e+vHHnjggSmPO+Lkz22q4xMRERHR9JghDccB6KoCWRaDMr5WZV53ufi+X6+QNFJ5aKrj65oKCIAsSfX52xX3lOjXTpPzPA93fv2+hmXGDxwZRGdbrAWzIiIiIiIiotNdNNK6Nt7NCBkaUokwBoeLSMbDsCsuHDcITcSjoVZPb0HRVAWXXnAGfv37sZ1a3KqHB7a/gOuuPHvc63OFMr7739vQP3SibdbxgRyeef5I/bGuKVOqwr1xbQ9iTX6NrVnRhoipI1+wIAjAlVvW4dILzoAosnXWVIiiiHgshMHhIrK5EsIhDagElYsSsckrkc1EoWjjWF9Qldv1PBRLtdZntepJ1DzG6Ba5a665BvF4fNSxe++9t6l79Pf3Y9u2baOOvelNb5q3foLDw8MolUrTurZUKuHxxx8fdUwQBFx99dWTXvuWt7xl1ONKpYL77ruvqfF/+tOfTnrfRkRRxJve9KZRxx555BEMDQ3NaPxEIoFrrrmmqXsQERER0dTJsoRQqBbkCQdBnvku3WvZDjwfkGQBmhLsEwlNMVQkikJ9l9hIFZnZbIFGC5soimhLmg3P7T88/Y0eRERERERERKeLVMKEriuQRBGJeBAkyheseavAsphcdM6KcTfCPfncQfQP5se9Nl+0kMmVxz0PTG1Nqy0VwbWv2DDp804migJeccEZSMRC+J9vfQUuu2gNA0VNioR1qKoEzwvCREBQ2Wuu2sf5vo+BoTyO9A7D83xYFacWLgJUVeKmyhlgqGiRU1UVH/rQh0Ydu+eee5qq/PPd7353VFk+RVFw2223Tfn6Rx55BP/4j/+Ir3/96ygUCpNfcBLf9xtW3JmKH/zgB2PGvPLKKydtfwYEFX3WrFkz6ti3vvWtpsa/6667Rj2++uqrceGFF07p2ttuuw2KcqLsn+u6+I//+I8pj93f349f/OIXo4596EMfGnVPIiIiIpp9MTPY2WTWynGXLQeuW5238Ud6wxtarRWbrjRVdnnkB2hNDQJJdsWd5RnSQra8J93w+MGjQy1p5UdERERERES0mAiCgM50FAAQ0lVIsoCq5zesCny6UxQJl120puE5z/Pxte8/iG1PvdhwPaKnI4G3XHfBtIM8ggBcev5qvOttl8GcZoWaCzYux7vf/kr0dCSmdT0ByVpVokLBhu248Lwg+DNd44X33KqHw73DGBwOWp7lCmX09udQdX0oioi2ZGTaYxJDRaeE2267bVSIxrZtfP7zn5/StY7j4M477xx17NZbb8WqVaumdP3tt9+OSy65BH/1V3+Fd73rXdi0aROOHx+/Z+V4vvzlLzd9jW3b+Id/+Icxx//2b/92StfLsozbb7991LEf/ehHeOGFF6Z0/S9/+Us89dRTo479/d///ZSuBYDVq1fjXe9616hjd955J1x3ar/UufPOO1GpnPgflLa2tjEBMyIiIiKafWZYhygKkGUJuh4Ec4rztHCUyZXqfcgNPQiTh5tsvTYSKhqpVGRXXIZJpsHzfBw5PlzfaTVfntp5EE/tPDjt65f3pBoet2wHfYO5ad+XiIiIiIiI6HShaQoMXYEAAZHauky+aLV4VgvTeWctG7f1mONW8csHduIb//UgBobGFq44Y3k7rr/m3KbH7GyL4l1vuxzXvGIDFFlq+voRoijUN+XR9OiagnCt6vtwJgj8ZHPlpiunZ3IlvPDScex58Tj27u/DS4cGcPjYMHr7sxgYyuPA4UGUShV4vo/+oTyGMiXAB8KGiq62+Iy+DoiholOCaZr45Cc/OerYHXfcgT179kx67ac//WkcOHCg/jgej+PjH//4lMbdtWvXmHH37duHj3zkI1O6/uV+/OMf48c//nFT13zkIx/B7t27Rx1705vehFe+8pVTvsfb3vY2XHzxxfXH1WoV73//+yctUVgul/GBD3xg1LGbbroJW7ZsmfLYAPD//X//36j2dfv27cNnPvOZSa/btWvXmDDY3/3d38E0G7cyICIiIqLZI4oCImaww8kMBX8WSnO7cOR5PvoGc/Wyz9GIjnCtUlKk6VBRECZSZAmSLMD3gYrDakVT4bpVvLC/D/ds3YH/88378PXvP4TtzxyY/MJZsntfL356/w7c/esd+P3j+6Z1jyWd8XErWx04whZoRERERERERFMRjwatz8yQDgiAZbtwnPmrZL1YyJKIV25uXK1oRN9gftyKROesXzLl9mWyJOKaSzfgXW+7DF3tsabnSnMjEQ1BrP0dKZRsAEDfwNQ3tg1nizjen6tviqxWPVQqLoolG9lcGYPDRbhuFRW3imN9WRRLFQgCkIyH0JaKsG3dLGCo6BTxvve9D2984xvrj8vlMq6//nocOnRo3GvuuuuuUZV6BEHAN77xDXR1dU1pzK1bt6JaHfsfx5Nbck2F7/u4+eab8e1vf3vS51qWhT/7sz8bU43pnHPOwb//+783Na4gCPjud7+LZDJZP3bffffhf/2v/zWqJdzJ4990002jAk2rVq3Cv/7rvzY1NgB0dXXhG9/4BgThxDezT3ziE/jud7877jUHDhzADTfcAMs68YurN77xjfjTP/3TpscnIiIioumJ1lqgGboCQQQcx4NdaW6HDRCEVI4P5HD0eAYDQwWUrMqYgLvrVtHbn0Wp7AACkE6a9dLByXgYmtZc+1tJEqFpwS4rXQ2ubXZ30Onoocf34nP/9kt87+5H8cSzB1EoBosge17snZfx9x8exI9+8QRGvjx+/ftduO+hXZNuiDiZLEvo6YyPO8apwPM85AsWW/sRERERERHRnImYOiRJhCxLCNU2cLFaUWPnrF+CpV3jtxC7Yss6JOPhcc+vW9UJTZXR2RZDyFAbPmfFkhTe+44rcOkFqyGKjEAsJLIsIVqrVpXJleD5PsqWg1y+POm1w9kS+gaCdmmZfAkHjw7iyPEMegeyGMgUkMkVkS9ayBUs9PZl4DhVSJKAjnS0vn5LM8e/UaeQ73znO7j66qvrj/fs2YNNmzbhzjvvxJEjRwAElXgee+wxvPOd78Q73/nOenBGEAR88YtfxI033jjl8cZbvG52UXtEqVTCH/3RH+EVr3gFvvGNb2Dv3r1wnOCXG+VyGTt37sQdd9yB9evX4wtf+MKoay+55BL87Gc/m1alnpUrV+Lee+9FLHYisfqlL30Jl19+Oe69916USkE7g2w2i+9///u44IILcPfdd9efu2TJEvzyl79EIjG9fpo33ngjvvjFL9aDRZ7n4eabb8Ytt9yCxx57rB7cOnz4MO68806cf/752Lt3b/36q666Ct/5znemNTYRERERTU/IUKEoEiRRRKjWTmxkp81UeZ6P44M5lC0HFaeKQslG30Aeh44N1wNGlu3gWF8WFacKSRbQ2RaFGdIAAWhPR9CWml4/8JEWaJp6ogUaTSykq6g02HE4mClicHhsie7ZdKwvi//86aNwq6M3Pjz8xD789P4d426IGM94LdAOHh1a9K3w9h8ewL9+57f4l2/ch8999Rf47bY9Tb8/RERERERERJMRBKHe1isSHqlkbS/6n6vngiiKuOkNF2FpV3LMuc62KC4+b+WE1yfjYXzkva/Fu99+OW5716vwwT+5Fn94w2Zcfcl6XHbRGbj5D7bgf9x48YTBpLlm2Q4/+wlETQOKLMJ1PeTywe/e+4cKE75nmVypXtEoky8hky3D8wDHqcKyXBQKNjI5C4PDRQxlivA8QNdkdLXHoTe5CZMmxiaApxDDMPCzn/0Mt912G770pS/B930MDQ3htttuw2233QZVVeG67pgF1XQ6ja997Wu44YYbmhrviiuugCiKY+736le/ekrXP/bYY/jyl7+M733veygWi/Xjv//97/H73/++/ljTNNh241/Q6LqOD3zgA7j99tuhKNP/5rBlyxY88sgjePvb346nn366Po/Xv/71E87hyiuvxHe/+90pV3caz/ve9z709PTg1ltvRX9/PwDgW9/6Fr71rW9BFEXIsoxKpTLqGkEQ8P73vx///M//DFVtnMolIiIiorkTjRgYHCrADGkolioolipIxvxRVSgnMpwrwnE8SLKAVNxE2XJQsmxUXR+Fkj0qpKSqEtqTEciyBFEU0N0RR7jJtmcvFzJUZLIl6LWKRbbtwPenPvfT0ZoV7RAEoNEeiudfPI5LL5h+K+JjfVlkciVETB2d6Sjkl/V5H8wU8N2fbGsYaAKAp3YewrqVHVi7qnPK4wWhohfGHLdsB32DOXS2Lc4S4WWrgv/38ydQKgc/O1U9H797dA9y+TLecM05LZ4dERERERERnWpiUQNDmSIMXYVcC0yUyjbMWsiITjB0Fe980yV4/NkDuP/3u1BxqhAE4Pprzm2qspAgCIiEdUTCOs5Y3j6HM566A0cG8d+/egoXn7cKm8+dOCB1uhJFAYlYGH2DeWQLFsKh4O9Ib38WqQaV2LP5Mo73B4GibKGMTDaoahSPGtBUGdWqB7fqoep5cF0Pvu/D0FVETZ3rm3OAlYpOMaqq4gtf+AIeeeQR3HjjjaPCJpVKZVQAqKOjA3/zN3+D3bt3Nx0oAoCNGzfik5/85Ki/mCtWrMAdd9wxpesvvPBC/Nu//RuOHTuG73//+7jlllvQ3d095nknh3kEQcDGjRvxiU98AgcPHsRnPvOZGQWKRqxfvx7bt2/H//2//xdnnXXWhHPYvHkzvve97+H++++fcaBoxA033IDdu3fjb/7mb9DR0VE/7nneqECRqqq48cYb8fDDD+MLX/gCA0VERERELRKrt0ALFo48z6+HGSZTKleQLwT/j5mOmwjpKlLxMJZ2JtHZFg1KaMvB/2eHQyo622KQZQmqKmH5ktSMAkUjcwYAVZEhioDnAxXn1KhWVK16KFmVMVV9ZsoM6+juiDc89/w0W6BVqx7+388fx7/95wP4wc8exzf+6yHc8ZVf4Bv/9RB++cBzePb5I/jOj7dN+HV12YVnNBUoAoCezgQkqfFywIEji7cF2tO7Djd8r57adQhbH97d4AoiIiIiIiKi6VMVGaFQsMZihoO1mnyxuUrWpxNRFHDROSvwpzdfiTOWt+OS81Yv2o1NQLCus/Xh3bjrRw8jmy/jkSdfZLWiCYQMFbomw/eA4WxQcCRfsLD/8CAOHBkMWqN5PnL5Mnr7sgCAXKGM4UxQ2SgeNRCPhmDoKsywjng0hFTcREc6is62GGIRg4GiOcJKRaeozZs340c/+hFyuRy2bduGPXv2IJPJQJZldHR04JxzzsGmTZtm3FPyYx/7GF772tfigQceQDQaxdve9jZEIs21YIhEInjrW9+Kt771rQCA/v5+PPPMMzh48CAymQwKhQJM00QymURHRwc2b9487VZjk5FlGe95z3vwnve8B/v27cPjjz+OQ4cOoVwuIxwOY8WKFdi8eTN6enrmZPxkMolPf/rTuP322/HUU09hx44dOH78OFzXRTwex9q1a7FlyxZEo9E5GZ+IiIiIpk5RJBiGinK5grChIpu3UCjZkwZ+3KpXb5cVjegwdBWCKEBTZViWA11ToGsKUgjDdlxoSvBjWzikoas9Nm4YpBmyJEJVJVQqVWiqgrLlwLLdeju0xcpxqzjWl4Xn+RBFAW1Jsx6gmg3rVnbiSG9mzPHDvcPo7c82vRD2223PY+cLx0Ydc6seDvcO43Dv8KTXn79xGa68eF1TYwKAIktY0hnHgSNDY87tPzyILZtWNX3PheCF/cfHPffQ4/sgiiJWLEnP44yIiIiIiIjoVBePhlAqVWCGdWRyZdgVF3bFhaYu3F/De54Px63Cdatwqx4cpwq3WoXvB8GPkbZucyUWMfD26y+C36gc9CIxmCngx794Ekdr4RcgqK7zwv7jWNfk5q/TSTIexrG+LEplB70DWUTCOkKGCstyYFkO+gbz8GvBrFzBwtBJgSJqjYX73YxmRTQaxate9Sq86lWvmrMxLrzwQlx44YWzdr+2tjZcffXVs3a/6Vq9ejVWr17dkrFFUcT555+P888/vyXjExEREdHUxCJGECoK68jmLVi2g2rVmzD4MzhcQNXzoaoSErHgh+H2VATxaAiOU0WhZCFXsGBZTj1QlIiH0JaMzOpuG0NXUamUoetBqMiuOADmdtForuUKZXieD0EMFsj6B/NoT0cxW+/a2pUduH+cijePPPkibnz1eVO+19HjGfz+iX3TnsuGM7rwuivOnvbXxPKeVMNQ0aGjQ/VQ1mJi2Q4OHh37el7uN9uex2VuFeeeuXSeZkVERERERESnOjOkBW3MXSBsqCiWKigULWjq9Nukz5WSVcFwtgjHGb+6s11xIUkizBlWyZ6MIAiLtqqM61bxjf96CGXLGXPusR37GSqagKrISMXDGMwUYVkuLKsASRIQDmkwwzpUWQIA5IsWhjJBNaNYRGegqMXY/oyIiIiIiGiazJAGQRSgyhI0TYbvA8Xy+GWus/kyypYDQQTSyQgECDDDWv0HY0WRkIiFsbwnhVXL2tDdEceKJSm0p6KzvtASqlXwGdk5Z9uLv/3ZyGJOOmnC0BV4PtA3mJ+11m5tqci4LdCe3XMEmVxpSvdx3Sp+ct9TmO6GvJVL07jxVZtmFPxZ1p1qeLxsO+gbzE37vq3y0qGBKZUYf+jxfShbU2tTSERERERERDQZQRAQiwabtCJhHQBQLNnwvNltyz5TbtXDwGC+HiiSJAGaJgfrUjEDbSkT0Ugw/2x+ausbpytZlnDG8vaG5146NICBocI8z2hxMcM6ujviiEcNSJKAatVHLm/haG8Gvf1ZDGeLGBwOAkXRiI5ELNziGRMrFREREREREU3TyM6tfMFC2NBg2y4KJRtRc2zFH7vi1kMnyVgYqixBkkV0jNMyS1EkKIo0Z3M3XhYqEkSg6vmoOC5UZXH+mOg4VbiuBwiAoakwNBV9gzlYlltfiJgNl56/Gj/42eNjjvt+UK3otVdsnPQeDzz2AvqnucDU3R7DW6+7MNgFOQNLOhOQJBHV6thFzudf7EVHevaDbHNpz0vjtz4bocoyrrx43ay2xCMiIiIiIiKKRwwMDhegawoURYLjVFEsVRAx9VZPra5QtOD5gKbJaE9FIIlja4/omoJC0YLjeChZlfqGNBrronNX4JnnjzQ8t/2Z/VNaH2qlslXBcLaE4WwRw7lS7d9LsCsOErEwLr9oDTrS0TkbX5ElxKOhoAq85SBftFC2HFi2C6u28TFq6kgyULQgLM7VYiIiIiIiogUiFjGQL1gIhVQMZYuoVKpjwjme52NgOF/rTa/Ud651t8cgT9AqbS4pigRZluC6VWiqHJQctp1FGyoq20H1GV2TIdbCMO2pKPoGciiVxq8e1ax1qzqRjIfrJZhf7smdB/HKzWsRMsZfdDval8FDj++d1tipeBh/eMOWenWpmVAUCT0d8YYtw3736AvYd7Afl1+0Fmcsb1vw4SLP87H3QN+EzwmHVPzBtZsgSXMX1CMiIiIiIqLTkyxLMEMaCkUbkbCGoUwJ+aK1YEJFnucjV7AABJVfRgJFshxsaNMUOai87QRVZHJ5C7l8maGiCfR0JNDdHsPRvuyYczt2H8ZVl6yflfWb2eT7Ph556kVs37EfmVx53Of19ufw4sF+3HrT5UjG5zbUIwgCQoaKkKHCrXoolmxUHBeGpsAML4y/P8T2Z0RERERERDMSMlRIsghZFBGuhUmy+TIKRQuZXAkDQwX09mfhOB5kWUQyYQIAkvEwQsbc9qefzEj4RdcUAEAmW0KhaLVyStM20vqsXoVGAERBQFsqMqsVn0RRwCXnrWp4znU9PLZj/7jXep6Hu+97ety2Z2tXduB1V27E2et6kIyN7hW/cmka/+ONF08YWGrW8p7GLdAA4EhvBt+7+1F87fsPYve+3im1FmuVY30ZlMrjtzSLRw388ZtfgfbU3O2wIyIiIiIiotPbSGv7cEiDIAIVpwrLdlo8q0ChZMHzfMiyiLAerEWtWJrG6uVtWNadREdbFKnaelU0rAMCYNku7MrstJM/VV1w9oqGx+2Ki2fHqWLUSg8/8SLue3DXhIGiEXbFxa9/v2seZnWCLImIRQy0JSMMFC0wCyseR0REREREtMgIgoCYaWAoU0Q4pKFYqtT/OVk6EYYsitA0Gemk2YLZjhY2VOTyZUTCOmzbRdlyMDBchO24SMbCC75CzQjP82HXFuqMWkCqsy2GbL6McrmCZNxE8WUFedyqh5nEuc5ZvwS/3bYHhQYVkB57+iVccv6qhhWfRFHEVRevwz2/eQaF4uhrQ4aKN1x9DsIhDRfWFqWKJRvDuRJURUJbMjLrn8fKpWk88NgLEz7nWF8W/3XvdrzmlWdh87krZ3X82fLC/sZVimRJxOUXrcG5Zy5FJKwjX1icgTkiIiIiIiJa+MIhLdjU5AT/XijYyBet+kauVvF9v/7zcNQ0AATzO7mKTtTU0T+UD84bKoqlCnKFMtqSkfmd8CJy1tpu3PfQzvpGt5d7bMdLOH/jsgWztlayKvjto883dc3ufb3oH8yjLcWvgdMdKxURERERERHNUDQSLMoYugJZFiHLInRdhmlqiMcMpJMmujpi0DUVgiiguyO+IBYVohEDoZAKSRTRkY4iHg12AeULNnr7c3DdaotnODV2xYHnA7Is1sM8ZkhDT0ccuq5AFke/15lcaUbjybKEzZsaB2zKtoOndh4a99q1qzrxp++4AuesXzLq+Ouu2IhwaHTUKRzSsKQzgfZUdE6+XpZ1J9HdHpv0eaoi4ex1PbM+/mwZL1S0fEkKl120pt5ukIiIiIiIiGgujVQrGvk5tFSutHxtpVSuwHE9SJIAMxysOzRqaSUIAhK1+Y+scy2E+S9kiixh05nLGp7rHyo0bDnfKk88ewCu6zV93e+f2DcHs6HFhqEiIiIiIiKiGdJUGYauQICAJZ0JLOlMoDMdQzpuIh4JwQxp0BQZEICutljDKjatsqQzUV/0ikfDaE9HIIpBmeNjfdkFU6p7IiM7wnQ92P1n6AokSYQkiVjSmYB60u47u+LCcWa2KHbBxuVQx2mr9siTL6JaHX+hxtBV/MGrNuHt11+EiKljwxldOHNN94zmMx2CIOCm6zdj9fK2CZ930TkrTrSVW2DyBQu9/dmG59as6Jjn2RAREREREdHpLBoxAAHQFBm6LsP3MaVWU3MpV6tSFAlrEAUBmiaP21o9Hg1BEIVR88+x6u+ELjx7OcbbB/bYjv3zOpfxuFUP23ccmNa1zzx/eMab82jxY6iIiIiIiIhoFnS2x6CqQchEEAWoqoRwSEM8GkI6aaK7I45VS9sQMRdW1RRBENDRFkVnewyCKCCkq+hqj0NVJVQ9H8cHcigUF/YCUtkOWs2NtD4LvazijySJ6GgbW43HqswsLKVrCi6otSk7WTZfxnMvHJ30HmtWdOBP33EFrrvq7BnNZSbMkIZ33LAFt7z5UqxeNjZcJMsitmxa1YKZTc3eA42rFAHAmhXt8zgTIiIiIiIiOt3JkviyjVvBn4WSPeONTdNl2Q7sigtBBMxa67NU3Bz3+ZIkIlarUhSrPb9QtOB5zVe4OV3Eo6FxNzXt3ncMuUJrQ2UAsGvvUeTHWduTJRHppImlXYmG530feJjVik57DBURERERERHNAlWRsXJpG9as7MDalR1YubQNS7oS6GiLIpUwETF1KONUtlkIYhEDy7qTUBQJiiyhsy0GM6zB94GhTHHBlrt23SocxwOEIOgDAOGTdtxJ4tgtY7NRgWnLuSshSY1/rH74iX3wfX/Se+iagtACqAK0rDuJd/zBFrzrbZdh7coTi2EXbFw+pi3bQrLnpeMNj7clzfoCLhEREREREdF8SSVMCKIAXVUQMoJ1iuFcsSVzGQm0hEMaZFGELEv1FmjjScaC1miGrkJRJHg+kC/acz7XxezCs5c3PO77wJPPHZzn2Zw8Bx/bnnqp4TlZFvGBP7kW77v5SrzzTZciGRu7jrJ2ZQfOXr9krqdJCxxDRURERERERLNIbBBgWSx0TcHynhTCoaAkdjphQtdkeD5Qsiqtnl5D9dZnqgxJDFqeTaVVl11xZzx2xNRx9rqehuf6BvPYe6B/xmPMt+6OOG56w0V499svx1lru3HJeatbPaVxuW4V+w8PNDx3BlufERERERERUQvIklgPZ8RjYUAASmUH9gwrJjfLcaoolYMxo7WqQ8l4CMJ4vbpqFEWqV9keqVqUK5SntHHqdLVyaRuS8XDDc48/ewButXWVng4dHcKxvsZt489Zv6S+0U0UBVxyfrAGJAjAxrXdeM8fvhI3veEiLOlsXMWITh8MFREREREREVGdJIlY0pWo71wzalV/RsI7C81I6zNdr7U+M6ZQ9UcAXNeDMwvVly49fzXGW497+Im9M75/q3S2xfCm15y/4Nr1vdyBo0OojFNCnq3PiIiIiIiIqFUSsTAkSYQqSzBr1X+Hs6V5nUOuGFQpMnQFqixBFIV6uGgyIwGZcEiFJAuoVn0US6xWNB5RFMatVlQsVfD8vmPzPKMTtj3duEoRAGw+d+Wox+esX4Itm1bi/X90Fd74mvPRkY7O9fRokWCoiIiIiIiIiMYYWWga2bFk2Q48r3U7qxrxfR+WHVQcGqlONJVWXZoiAwDsWWiBlkqYWLeqc8xxVZFw9rolKC/QCk+nghfGaX2mawqWdnEXHREREREREbWGJIn1YE48YgACYNnuvK0RVKseirWWZSPVhuLR0Lgt3E+mawoMQ4UAAZFwsNkoV7DmZrKniHM3LIUiSw3PPbZj//xOpiaTK+H5F3sbnlu9vA1tycioY7Is4dWXn1VvgUc0gqEiIiIiIiIiGiNkqIAAKLIERRHh+0BpgVUrsisuPM+HJAn1oFB4CpWKVCVY5BkJJM3UpeePbRFWcaooWZUptWKj5vm+j737+xqeW72sDaLI5Q4iIiIiIiJqnXg0BFmWIMsSorUqwMO5+alWlC9a8HxA02TomgIIQKLWkm2qRoIlkbAOUTyxzkGN6ZqCs9f3NDx36NgwDh4ZnOcZAY8+/RLG61q3ZdOq+Z0MLWpcZSMiIiIiIqIxJElE2Aiq/oxUK1poVXdG5qNrQeszTZMhj7Mr7OXUWgDJqsxOSKqnM4HlPckxxx9+fB+sWaiGRI295oqzcMHG5fXF2RFrVrL1GREREREREbWWKApIJYJgTjRiBMGcSnXO24h5no98rarQyM/LUdOY0nrJy5lhDaoqQRJFmLVqRZlcCZ43TkqFcOHZK8Y9d99Du+CPl/CZA45bxVM7DzU815Y0sWppet7mQosfQ0VERERERETU0EgrMcMYCRU587oAMplyLbBTb31mTN76DABUVQYEwHU9uG51VuZy7SvOhHxSGfGy7eDJnQdn5f40miAIWLOiA9dddTb+/I+vwXv+8JW46pJ1WNqVxOrlDBURERERERFR68UiBlRVgiyKiNbakGVypabWVhy3ikLJhl2ZvNpyqVzBsf4Mqp4PWRaDKtQAkk1WKRqRqFUrioZ1SJKASqWK/qH8globWkg60lEs6x676QwAjhzPYOfeY/M2F0WW8K63XYYLz14+pi3blk2rIAjCvM2FFj+GioiIiIiIiKihSDgI6eiqAkkW4Hn+gqm841Y9VCpBIEjXg0pFIyGoyYiCAE2tVSuapdfT3RHHW667AN0dcciSiCWdCfzBqzZhy7mnZjnp/sE8evuzrZ4GgCBg1JGO4rIL1+CP33JpvbIWERERERERUSsJgoB0IgIgqBYkSQIc10NhitWKCkULR49nMDBUwLG+LI4cH0YmV4Jz0gYpy3bQ259F32AejuNBkgQk4iEIEBAKqdBqFZ6bFYsYUJSghVtbKgJBDDacDQwVGCwaxxVb1o57buvvd8OtevM2l1TCxOuuPBsf+J/X4OpL1yNi6ggZKjaua9ymjWg8cqsnQERERERERAuTLEvQdQWW5cDQVRQKNsq1f281q9b6bGTHnygKMPSpL5LpqgzbdmFXXJjh2ZnTmhUdWLOiA57nQxRPrR1fvu+jbzCPXXuPYde+YxgYKmDtyg7c9IaLWj01IiIiIiIiogUrYurQMsEaRCxiYChTQiZXQtjQJlw7yBXKGMqUAARrH45bheN4yDhlZHJl6JqMkKHCsh2UysGGKUEMqgpFIwYkUYQgCmhLRqY9d0EQ0NOZwKGjQ9BVBe3JCI4P5lEsVyBmi0jFzWnf+1S1Ykkaa1a044X9fWPODedKePyZ/diyaX43oBm6ildccAYu3rQKA8OFMZWLmmFXXPzoF0/glZvXorsjPnuTpAWNoSIiIiIiIiIaVzikwbIchGqhopJVQRKzlMKZgZHWZ6FakChkqE2VbtY0Bchbc1J56VQLFL2w/zh++cBODGWKo46/eLAfdsWtV30iIiIiIiIiorHaUxEcOjqMiKkjV7Dguh6O9WWQSpjQG1QRyuRKyOTKAIBoREcyFkbV81C2HBRKFizLhWUH/wAABMAMa4hHDMhSEBjRNBntqWjD+zdDU2X0dMZx6NgwDF1FW9JE/2AB+YINSRQRj06vtdqp7JpLN2DvgT6cXMxp5dI0lvekWjMpAJIkoiMdnfb1rlvFf927HS8dGsCBI4N4+/WbW/p6aP6w/RkRERERERGNK1JrKaZrCgQRcF0PdsVt6Zx834dlObV5BVWTptr6bISmyoAAOK43r6WnFyNdVcYEioCgBd0L+4+3YEZEREREREREi0fI0BAKqRAgIJ0wIcsiHNdDb38OQ9kiPO9E+mQwU6gHiuIxA8lYsLFLEkWYIQ2d6RiWdMWRiIegqhJChoLujjjScROyJEFRJHS1x7BiSRohY3YqTRu6ip6OOCAAYUNDMh7MKZMrI1coz8oYp5K2VASbzlxWf9yeiuAPb9iMm/9gCzrbYi2c2fR5nof/9/Mn8NKhAQBAxaniu/+9jetCpwluJyQiIiIiIqJxaZoCRZHgOFUYmoJS2UHZqrS0Ok3FcVH1fIhisPMOCBa1pkoQBUiiCFWRUKlUYdkOzCZDSaeTns4EzLCGQtEec27X3mPYuLanBbMiIiIiIiIiWjzaU1EctAahawq62mMYzpZQKNrI5S2UrQpScRP5ooViKWj3noyHETV1AEA6acLQVeQLFvJFCwAQMw3ETKN+f0kSkUqEEY+GmqrkPFXhkIbu9jiOHs8gaurwfA+ZbNCiTawFnuiEK7asxYEjg3jFBatxzvqlM6pqfawvi6N9GciSiK72GNKJyKj7OU4VijL9lmaT8TwfP7nvaex5aXSAyK16+P492/EHr9rEtaFTHENFRERERERENKFwSEMmW0LI0FAqOyhZlZaWty7XqhQZerDLT1WlphZPdFWBh6D6UqVShc1Q0YREUcD61V3YvmP/mHN79/fNSws0z/NwrD+Lno7EnI5DRERERERENBc0VcaynhR6+7OwLAfphImQoWIwU4DjBFWLAAAC0JY065unOtqi9TWYkKGiPR1BoWQjX7BQKNkQBQHxqIFELAxJmtsmRRFTR3s1ir6BHOKREKpVH/mChcHhAlRFgqowejAiEtbxvpuvnFGYqFr18PPfPYsnnj046rhaq0bV3R5Hvmjh2T1H8dbrLsT61Z0znXZDT+48iGeeP9LwnOf5+PEvn4RdcXHBxuVzMv5sc6se+gZzsG0Xhq5A15TaGiONh3+ziYiIiIiIaEKRcBAq0nUFAFCpVOG6Vcjy3O2Cmki53vosmE+zrc90XUHJ8qFrCnJ5C1bFmfU5nmrOPKNxqMiteth3oA9nrumes7E9z8fdv96BZ/ccwRtffd6cjkVEREREREQ0VzRVxrLuJIazJfQP5RHSVajtcQxniiiWKhBEoD0ZgaGrgAB0t8cRqVUrGiEIAiJhHZFwcNz3/TmpTDSeRCyEqudhcKiAVDyMarWKUtlBJldCeyo6b/NYDGYSKKo4Ln7ws8ex70B/g3NVHDgyhANHhurHfv7bZ7FiSaq+Vjabzt2wFAcOD+K5F442PO/7wL1bn0HFcXHJeatnffzZ1DeYw3/dsx1D2dKYc4IQ/P1av7oTS7uSLZjdwjW3cUUiIiIiIiJa9AxdhSgKkEUReq3d2EiwZ74NZ4uwKy4gAMZIqKiJ1mfAietGqus4jge36s3uRE8xS7uSCIfUhud27Ts2o3vnixYe2r4Xv3zgOTz69EvYf3gQJSso9+77Pn7222ewY/dheJ6PH/7iCezYfXhG4xERERERERG1iiAISMbDWLEkDU2TIYsi2pIRtKVMdKSiQcUUUcCSzsSYQNF495tv6YRZr54Uj4UBAKWyg4rjzvtcTkWlcgV3/eiRhoGi8eSLFu5/ePeczEeWRNz46vNw3lnLJnzefQ/uwosHpz7n+VatevjBvY83DBQBwaY2y3Lgef48z2zhY6UiIiIiIiIimpAgCAiHNOQLFgxDhWW7KFmVKS1uzaZcoYxs3gIApOJhyLIESRaDHXxNUBSptltMhKpK9RZoMlugjUsUBaxf1YXHnz0w5tze/X1wnGpTLehG9A/m8a0fPYxSuTLmXMTUEQlpONqXrR/zfeAn9z0Fx60umrLaRERERERERCfTVBnLe1IYzBQxOFyob5gSRQFLuhJNr3XMt3TSRK5QhipLCBkKSmUH2XwZbclIq6e2qHiejxcP9WPV0jaIooBMroTv/Pc2DGWKTd/r8WcO4Oy1PVjaPftVdkRRwOuvOhu6KuPhJ18c93n3PbQTty555YyqNM2VHbsPY3AK7+vIJkQ6gZWKiIiIiIiIaFIjZbVHFrUs24HnzV91n0LJxlAm2EkUjxnBfASgMx1reqFCEIT669DUoGoRW6BNbv0ZnQ2PV5wq9h3sa/p+rlvF//vFEw0DRQCQL1ijAkUjRspqN2rHRkRERERERLRYCIKAdMLEip4U4rEQ4rEQli9JLfhAEQBIkohELKhWFKtVLSqWKnDcaiunteg8/2Iv/uMnj+Kr3/sdHtuxH9/4wUPTChSN+On9O+DO0WcgCAKuecUGXHXxunGfc3wgj53jtElrpWrVw4PbX5jScxkqGouhIiIiIiIiIppUyFABAVBlCYoiwvfnrwVa2apgcLgAIKheE48Ei1Ud6SjM8PSqCxl6ECbS9WChwLZZonsyK3pS9fftZM/uaX7B6HeP7kH/YH5ac1FkCe3p6LSuJSIiIiIiIlpINE1BRzqKjnQUqrJ4Ag3xWBiCKEBT5Pp6QS5fbvGsFg/P8/HAY0HQpW8wj5//9lkUiva07ycIwOrlbZjL5l2CIOCyi9bgtVdsHPc5v9n2PKrV+duIOBXPPH8YmdzUvjZHNiDSCYvnuxIRERERERG1jCSJCBkqSqUKDF2F41goWRWE57hlmF1x0D+Yh+8D4ZCKVDwMICizHa/thJuOkDG6UlHFqaJa9SBJi3/vjV1xUHGqMHQV8iy+HlEUsX5VJ57ceWjMud37jqF/MI+21NTKnB86NoTfP7FvWvOQJRFve8OFWDYH5byJiIiIiIiIaGpkSUQ8amA4U0IsYqBsOSiUbMQiBmS5+Rbp88FxqiiWbfi+D0WWoCgylBbNdc9LvTg+kJvSc6OmjpvecBHcqoejxzP1f0baeS3vSeLaV5yJ7o74HM74hIvOWYGDRwex84VjY84NZ0t4auchXHD2wmhb73keHty+d8rP1zVGaE7Gd4SIiIiIiIimxAzpKJUqCOkqcnkLZcuB7/sQhOn3SXerHjLZEqqeB1EUIEkiJFGEJIkQBQGDwwV4flBRKJ00AQDxWAiphDmj16JrCgRRgAwRiiLBcaqwK249bLRYFUs2+oeCqk6iWEJ7KgJdm70dVhvWdDcMFfl+sBPtrdddOOk9HKeKn9z3NPxpbJ2TRAFvue5CrFra1vzFRERERERERDSrkrEwhrMl6JoCXZdhWS5yBQvJ2qawhcR1q+jtz6LqjV2QEDE/1bhH+L6P3z06tXZcbakI3nHDZkRNAwCwpDNRP+e4VXie35KWXVdevA679h5ruL7zu8f24Jz1S6AorQ+XPbvnKIazpYbnzl7Xg8s3rwkCcUUbh3uHF+TXbqst/i2YRERERERENC/MWlUiXVMgSQI8z4ddmX7bMM/zcXwgi0LJRtlyUCxVkMtbGM6WMDBUQN9gHlXPh6pKaE9FIUBAxNTRMQttrwRBONECrRa6sez5XUCaC7mCBQCQ5ODz6RvIzerrWrkkXa8WdbLd+3pxrC876T3uf3gXhmo76ZohigLe+JrzsWZFe9PXEhEREREREdHsk2Wp3qY+Vgu9FIrWgmt/BQDDuVJ9nSli6tB1GZIUbJRzT5pvo+DRbNrz0vEpVSla1p3ELW+6pB4oOpkiSy0JFAFAKm5i04alDc8VijYe27F/fifUgOd59RZzJxME4Iota5GKm1jSmcDqZW04Y3k7DH1xbzicC6xURERERERERFOiKBI0TYZtuzB0FYWijWLZnnYlnFyhDMfxIMkCEtEwqp6HarX2j+ehWvUhCEB7OgpREBAKqehqj83a6zH0oJ2brsnIFxZ/qMitevWQV1c6hsFMEWXLwfGBHNpSEYRmYVFEFAVccfE6/PDnTzQ8v/WR3XjHDVvGvX7/4QE8+vT+cc+/9boL0dMZR99ADscH8ugbzKFQsmGGNLzigjOm3F6NiIiIiIiIiOZHMh5GJl+Coav1daNcoYxEbOFUfLErLoqlCgAglTChKSdiEq7noVTI42j+xPOLJRuh0NzNPxkPY8MZXdi9r3GlHwBYt6oDb3zN+S1rzzYVr9yyFs88f2RMKAsAfv/4Xpy/cdmsVtBu1rN7jo67se2c9UsW1NfoQsZQEREREREREU2ZGdZh2wWYoVqoqGgjHglBkporhOu4VWTzZQBBqeywoU34fE2T0d0en1GrtZOFdBWDQH1HV8WpwvM8iOLiLOprWcHimKpKkGUJ7ekI+gfzKJUd9A/m0ZaMzEp7tw2ru9CRjjbcUbfvQD8OHh3Csu7kmHN2xcFP7nt63Pueva4H61d3AgAiYR2rl7MiEREREREREdFCpygSoqaBXL6MaERHv11AvmgjFjEWzBrLcDYIloRDKjRFhiAKkCURjlOFLIrQ1NHBl1LJhuf5EMXZW4d6ubZkBG953QXoH8zjgcdewM69R0eFiy44ezle+8qNczb+bImaBi44ezm2PfXSmHNl28HDT+zDVZesb8HMggrpD05QpeiyC9fM84wWr4Xxt5iIiIiIiIgWhZhpAAKgaypUVYLnA/mi1fR9hjJF+D6g63I9UBSPhRAxdYRCKlRVDoJKAhAKqVjSlWw6uDQZQ1dqi0gSFCW4d75oz+oY86lsBZWWQrW2bgIEtKUiCIdU+D7QP5RHsTTz1yeKAq68eN2453/zyG74DbbZ/eqBnfUg2ckipo7XvPKsGc+NiIiIiIiIiOZfstYqPaxrUBQJnufXW7S3WtmqwLJdQAAS0aBVWyoexqplbVizsgPLl6SQTJijrqn6PkrluV8jaktF8KbXno8/fceVuPLidbjk/NX4ozdejOuuPHvBB4pGXHbhGqhK42pK2556CYVprBvOhp17j2JwnCpFZ69bUv+apckxVERERERERERTpigSImEdABCLBP3c8wULXhO95kvlShCAEYBkPFi0icdC6EhH0d0Rx9KuJFYuTeOMFe1Yt6oTS7uSkGc5UAQAgiDUSzDrWlDBZzhbwvGBHFy3OuvjzSXf91GutW/Ta23OVFWCAAHppAkzrNWCRQUUZiFYtGZFO3o64w3PHTgyhBcPDYw69uLBfjy589C497v+6nPYs56IiIiIiIhokdJUGREzWC+KR6e3XjQXfN/HcLYEAIiaOmQ5qO480vZKFIO1oWht7i83n6GodNLE5RetwbWv2IAVS9LzNu5sCBkqLjl/dcNzjlvFA9v3zvOMgipFD0xUpeiiM+Z5RosbQ0VERERERETUlFRtJ0/IUCHLIqqej0JpagstnudjqFZyOh7RocoSJFlE+qQdYfPFDAVVkpLxEOKxoApT2XJwtC87rQpMrWJXXHieD0kWoNdKdi/tSiIWNYJgUcKEaQavdWCogFK5MqPxBEHAVRePX776Nw+Prla0pCuBC89Z0fC5529cxlZnRERERERERIvcyHpR2NCgKMF6UavXVoolGxWnClE8sTkunTQnrQIkCEDFqcKqbeCiiW3ZtAoho/FmsSefPYBMrjSv86k4LjpSUQgNPuaNa3uQirdmHXKxYqiIiIiIiIiImqJpCsIhDQIERM1gQSZXsBq2vDpZNl+C63qQZRHRSFByuj0ZmfXWZlOVqLVcEyAgHgmhuyMOTZPheT4Gh4vo7c/CWQRVi0pWEBIyahWXdF2BLEvobIshXivtnY6b9V2Dw9nG5Z+bsXJpGiuWpBqeO9qXxfMvHq8/VhUZr7tiI26+cUt9EQ8Idi9e+4ozZzwXIiIiIiIiImqtkfUiAIjV1nyy+TLcqteS+Xiej+FamCUWDUESRaiq3LAq0clCRvA6coXGbdxpNE2VcdmFjav/VD0fv3nk+Xmdj64peNNrz8d733EFzl7XUw8XCULQro2aw1ARERERERERNS0ZDxaHzLAGSRLguh6Kk1S/cZxqvXR0Mh6GKAgwDBXRl4VM5psgCOjuiKO7Mw5JFqHKErraYkjEQxBEwLJdHDuemXFln7lWrs3PMIIqRSOLeADQ0RZFovZ5JWIhQAAc15uVsNRVl0xQrWjb82PKnK9a2ob3vuOV2HTmUggCcMO1m6Cp8oznQUREREREREStl0rUqhWFVKiqFFSszhRaMpd80UK16gcb22pBovZUBEKj8jUnGVlXKZWdRbHZbCG4YOPycQNbdsVFtQXhsrZkBDe++jy87+Yrcfa6Hmxc24N0klWKmsVQERERERERETUtZGjQdQWiINSr3+TyE+/eGswU4PtAyFAQ0lVAADrS0fmY7qQiYR0rl6TrAaeYaaC7PQ5dl+H5wMBwYUxAZqFwnCoc1wME1FufmS8LFQFAeyoKSRYhCgL0WohnNkp4L+lMYM2Kxq3L2pImKo475rimKrj+mnPx3ndcgeU9jSsdEREREREREdHiY+hqvSJ0OmECQhDMKZbseZ1HteohW1unikdDEBBsbAuftF4yHkUSYejBGku+0NoWbouFLEu4YsvaUcdURcL115yDt73+whlXKXfdKjxvesGkVMLEja8+Dzdcu2lGczhdcTsgERERERERTUsyHsbR3gwiYR3ZfBkVp4qyVYGhj+2hXijZsGwXgggkY8GutWQsvKCq1EiSiK72GKKmjt7+HACgMx3Dod4hVF0fVsUJwlALzEjrM12TIYkiJFmEriljnhfSVeQLFnRdgWW7sGwHkfDkJb8nc+XF6/DC/r764zUr2nHlxevQ2Rab8Lq2ZGTGYxMRERERERHRwtKejqJUrkCFjHhERyZnYShbhK4pMw6WTFU2X4bn+VBVqb7xqj3V3DpE1NRRthwUihbiUQOiyHotkzln/RI8/MSLGBguYGlXEje+ehPi0dCM7rlr7zH84oHnkC9YkCUR6WQEHekIOtJRtKei6EhHETKmtl4nipNXqaKxFs7qLRERERERES0qZkiDqkqoVIJKP7m8hWy+PCZUVHFcDGeLAIBYxIAsS5BlCanEwiw3HA5pWLEkhUPHhmDbLkK6hnzBQrlcWZChonItVDQyt5OrFI0IGbVQkaoAKMOyZl6pCAA622I4a003imUbV168Dku7krNyXyIiIiIiIiJafGRJRHsqgmN9WcSiIRTLDhyniuFsaV5aT9kVF/liUF0oGQsCLRFTb7gBa1wCYGgqFEWC41SRL9qI1apb0/hEUcTrrzobh3szuPi8VTMO8WzfsR8/++2z9cdu1UNvfxa9/VkAwYbHay5dj5VL09DUJj5fagpDRURERERERDQtgiAgEQvjeH8O0bCOXMGCZbuwKw40VYHv+8jmy8jmy/B9QFHE+gJMezqyoHcHSZKIqGmg384jbCjIFyyUyhUk4z4EYeHM2/M82JWgxdhImGu8Ut4ju7Y0TYYgAlXPR8VxoSozXxq4/tpzocjSjO9DRERERERERItfNGIgV7BQLNlIJcLo7cuhULIRDqkNK1zPBrviIpsvoVQONlHpugxdUwGh+WrJYUOD6wXVigaHi8gXLURNfUGtCS1Uy3pSWDYL7e4ff/bAqEBRI8PZIqKmwUDRHGONLiIiIiIiIpq2WMSAJIuQ5RPlpLP5MuyKg6N9GWRyQaAoHFLRkY5CgIBwSJuVtltzzQwHr0fTFEiSgKrn1wM8C0XZcuD7gCyLQahHwLjVlFRFhixLECDU285Z9uxUK2KgiIiIiIiIiIherrMtClEUoKsKopFgHWgwU4TneVO+h+NU0dufxaGjQzjWl8VQpohC0ULFceH7PoBgbeP4QA7H+rL1QFHIUJCOB1WRErEQFKW5dYuIGcw3HNIgSQJc10OpXGnqHjR9T+08iHu3PjPp83wf+Mmvn4ZbnfrXFDWPoSIiIiIiIiKaNkEQkKj1Ro/WqhCVyg6O9eXgOB4kWUBbykRbMgJZkqCqMjrboq2c8pSpigxVlSFAqO+iW2gLSGVrZLFMrf8pSeP/qD/yvJGS37MVKiIiIiIiIiIiejlZltCWCioExaMhyLII1/UwnCtN6fqyVcGx/iws261v9MoVLAwMF3H0eBA0OnJ8GL39uWB9RAg2tXV3xtGeikKWg3WoVLz5lmu6pkDXFYiCgEht09lISzWaWzt2H8ZP798x5ef3D+bx0Pa9czgjYqiIiIiIiIiIZiQeDUEUBaiyhJBxotywGdbQ1R5H2NAAAUglwlixJAV5EVW1GdmZZtReV8laYKEiO5hPSA/mZ47T+mxEuBYqGglJWfaJnX1ERERERERERLMpHg3BMFSIgoB0IgwAyBfsSTc55QplHB/Iw/N86JqMro4Y0kkT0YgOvdbW3fMBx/EAATBNDT0dcbQlI1BlCaIoIJUIY1l3csLNVxMZ2URnhnVACNZQFloF68XscO8wDh4ZRKFk19emnn3+CH5y31Nodqnqwe0v4PhAbg5mSQAgt3oCREREREREtLhJkoh4NIShTNDHvOJUkYqH68EVTZPR1RaDpi2+/uaRkIbBoQIMTYUgAq7rwa649fZhrWRXHFSrPkQR9fc2PEmoaKRSkabIEEXA83xUnOqCeD1EREREREREdOrpbIti/+FB6JoK09RQKNjoH8ojahowQ9qo0I/v+xjKFJEv2gCCsFAqHg5auSsygBPrHhW3ikrFha7K9Q1skiQiEQshHg1NO0w0ImLq6BvKAwg2aRVLFQxni2hPBW3daGY622L47n8/ggNHhqBrCpLxMI71ZSYMFG3ZtBIA0DeYx/GBXL2iuOf5eHD7C3jzay+Yj6mfdrhqSERERERERDOWiIUwlC1C1xR0d8QhCgIgAOmEiWQ8DEFYnIstmqZAUSQ4ThUhPVhAKpXtBRHCGWl9ZugqBAhQVQmqMvG8gtLfEiqVKnRNQanswLKdBfF6iIiIiIiIiOjUoyoy2pIm+gbySERDsCwnaIOWLSGTKyGkqzDDGlRFRv9QHpbtAgKQjIUQNQ0AQcBHU2VYtgPLduG6VaiyBHUkTCSLSMXDiEVCsxb4EQQBiWgIA0MFxKIhlK0KLNvFwHAebcnIol3rWgjsiotf/u45HDgyBACwbAdHj2cmvOaaSzfg0gtWjzpWKNkYHC5AlkR0tcfnaLbEVUMiIiIiIiKaMVmW0J6KoG8gD1EQYOgKOtpip0RYxQxrGM6UYIyEiqwKErFwq6dVb8U2UhFqsipFI0K6hkqlNCpUFIsYczZPIiIiIiIiIjq9xaMh5AoWLMtBV3sMxVIFhZKFSqWKYrmCYrkCQQB8HxBFoC0Zqa93pJIm0glz1P1ctwrLduG4LmRZghnS5iTkE4+GMJwrQQXQnoqgdyCPUtnBULaIVNyc9Hoa6+DRIfz3r55EJlee8jVXXbxuTKAIAMyQBnOK62E0fYt/dZeIiIiIiIgWhEQsDDOkw/P9UyJMNCIS1muhIgUQAMfx4DhVKIrUsjm5VQ+VShUAoGvBez3VRZSQoSKTC0JFAGDbDnzf5w47IiIiIiIiIpoTgiCgpyOOI8czsCwHUVNH1NRRcVzkizaKJQueB8iyiPZ0FKosQRAFdLXFEDH1MfeTZQmmLOHl7dDmgiSJWNqZwMGjQ9A1FW1JE/2DBeQLNiRRRDwamtPxTyWuW8Vvt+3Bw0/um7DF2cleuXktLrtozdxNjCY1s0aCRERERERERC+jKNIpFSgCgkpAkixCEsV6gGekSlCrlGvja5oMWZIgikJ9B99kDCN4nqrIkCQBnh+UnSYiIiIiIiIimiuyLGF5TwpLuxNBUEgI1iZS8TCWdCXRljLR2R6DKkuQZQnLupMNA0XzTdMU9HTGAQEIGxqS8aB6dSZXRr5otXZyi0jFqeL5F3ubChRdduEZeOVmBopajaEiIiIiIiIioklEQsEiVtgIdsCVygsjVGToQbWhcBNlvmVJhFYLR41UK7JsZw5mSUREREREREQ0WsjQ0N0Rx+rl7WhPR6CqMkRBQNjQIIsidF3B8p5kfc1iIQgZGrrb4wCAqKkjHg3WiQaHiy1fI1osQoaKW958KS6/aA16OuKTfr6vuGA1rrx4HStrLwCn1vZRIiIiIiIiojlghjVkcqWgys9wEXbFhetWIcvz3wLNrXqwrCAENFKdKNxk//iQocK2XeiagmKpwlAREREREREREc0rWRKRiIWRiIVRtioolSuQJBGxiLEggyQRU0d7NYq+gRzi0TBcz0ehYGNgKI/2dHRBhaAWqnBIw5UXr8OVF6+D7/solisYHC4E/2SKyORKUBUZZ6/rwaplba2eLtUwVEREREREREQ0iZChQhQFyAhaoFm2i5JVQdQ05nUebtXD8YEsPB+QZRGaEvxYHzam1vpsREjXMIxSfcHLrrjwPB+iuPAW7YiIiIiIiIjo1Gbo6pTburdSIhZCtVrF4HARqXgYXtVDqeygfzCP7o44JImNoqZKEASYIQ1mSMPynlSrp0MT4Fc1ERERERER0SQEQYAZDkpbG7UAz3yXt65WPfQN5OA4HmRZREc6CiDY5dVsxSRDVwABUGQJsizC9wG7wmpFREREREREREQTSScjiEUNCBCQTkagqhKqno9sodzqqRHNCYaKiIiIiIiIiKbADActxsK1nXOW7aJa9eZl7GrVw/GBHCpOFZIsoCMdhSJLUBQJnW3Rpu8nSWK9StHIn2yBRkREREREREQ0uY50FOGQBlEQkIiGAAD5ggXXrbZ4ZkSzj6EiIiIiIiIioikIGxoEUYAsS1DVoDJQ2ZqdakVu1Rt34ala9XB88ESgqDMdqweKlnYlm65SNGKkZdqJUJE7vckTEREREREREZ1GBEFAezoCCEHrNl2X4ftANs9qRXTqYaiIiIiIiIiIaApEUagHcUK1P4uz0AJtKFvE4WPDONybwcGjQzg+kEMmV0KpXIHjVINAUWWkQlGs1rIsCBQpyvQCRS9/DbomAwDsigvPm5/KS0REREREREREi5mqyIhHgipFI38WSjYcViuiUwxDRURERERERERTFAnrAIBQvQWaM6MgTqFkI5e3AAEQRMDzfJQtB5lcGX2DeRw5ngkCRVIQKFJrgaJl3TMLFAGArqmAAMiSBEURa69n8Vcr8jwf+YKF4WyRZceJiIiIiIiIaM6kEmEIogBdU2DoCnwfyORKrZ4W0aySWz0BIiIiIiIiosUiHNIAIdiNJssiXNdDyXJghrSm71WtehjOFAEA8aiBWMRAxanCtl1UHBd2xYXjjASKovVA0dLuxIwDRUBQecnQVZTLFeiaCsexYNlOvYLRYpXJlZArWACAfNFGV1tsVt4vIiIiIiIiIqKXk2UJ8aiB4UwJ8VgIZSuLYqmCWMSFqiy8KIbv+xgcLqJUtgEAkixCliTItT8lSYSmyFxHoVEW3lcyERERERER0QIlSSJChopSqQIzpCKTs5ArlKcVKhrKFlH1fCiKhFjEgAABmiJDe9mik+f78DwvWNiRRSztTszqolTIGAkVycgXgspLi5nn+cgXg0CRoohwHA99gzl0tsVaPDMiIiIiIiIiOhUl4yayuTI0RUY4pKJYqiCTK6E9FW311MbI5ssolOz6Y8/x4DhjK3BHTA2puDmfU6MFjKEiIiIiIiIioibETCMIFZkGcgULlUoVpXKlqQo/JauCYqkCCEA6aUKAADOsIWoasGwHZduBZTsQPUCUJCiKhJ7O2Q0UAUEbt0EAmqYAACpOFYWSjbChQhCEWR1rPli2A98HZFlER1sMvX1ZOK6H/qH8oq/ARDQX8vk8tm3bhj179iCTyUBRFLS3t+Occ87Bpk2bFtz3Ad/3sWfPHjz77LMYGBjA0NAQFEVBIpFAOp3G+eefj6VLl87LXHbt2oUnn3wSx44dg2VZiEajWL16NbZs2YJUKjWnY7uui+3bt+O5557DwMAAPM9DMpnEhg0bsHnzZui6PqfjExERERHRCbIkIhEPY3CogFg0hGK5glLZgV1xoKlKq6dX57pV5PJlAMFalKbKcN0q3KqHarUKt+rDdauwKi7yBRthQ4OuLZz5U+swVERERERERETUhIipYzBTACpAJKwjm7eQyZemHFrxPA9DtbZnUVOHpsgQxaDFmSxLiJjBL4N930el4qLq+dA1BaI4+7/cN3QFgihAhghFkeA4VQwMFTAsCYiEdZhhHbIkzvq4c6VUrgAIKjDJooj2VAS9/VlYtgvHrbZ4dkQLx2OPPYbPfOYz+OlPf4pKpdLwOZ2dnbj11lvxoQ99CMlkcp5neEIul8OPfvQjfP/738dDDz2EbDY74fO7u7vxtre9De973/uwdu3aWZ1LuVzGF7/4RXzpS1/CSy+91PA5kiThqquuwkc+8hG86lWvmtXxDx8+jDvuuAN33XUXhoeHGz4nHA7jzW9+M/76r/8a69evn9XxiYiIiIiosUQ0hEy2BBWAGdJQKNrI5MroSC+cUM5wtgTPB3RdrlfcVuSxbc4GM0XkCxYyuRIrPxMAYPGsDBIREREREREtAIIg1EtARyIGRBH1akVTMZwrwXU9yLKIeDQEAGivBYpOHkfTFIQMdU4CRSNjhGthqI50BPGoDkkWUK36yOTKONI7jP7B/KJoi+b7PkpWLVSkB69JVWSkkxFAAKpVv5XTI1oQHMfBBz7wAWzZsgU//OEPRwWKFEUZVZmot7cXn/rUp7Bhwwbcc8898z7XPXv24F3vehfa29vxx3/8x7j33nvHBIpUdWxVtaNHj+Jf/uVfsH79erznPe9BPp+flfls27YNGzduxIc//OExgSJVPREqrVaruO+++/DqV78ab3/722dt/K9//es488wz8fnPf35UoEgURcjyiX2jxWIR3/rWt3Duuefi7//+7+H7/N5HRERERDTXJElEMh4GAMQjBiAAZctZMOsplu2gWA4qZidjwTwjpo7O9hhSSROxqAFdDwJQMVMHBMCy3QUzf2othoqIiIiIiIiImhQxdaiqBFkUEQkHlYUy+dKk11m2g3wh6F2fToQhCgJCIRWxiDGn851IOhmBokiQJQnxaBhLOhNoS5nQNRm+DxTLFfT255AvWC2b41TYFRee50OShHp5bkEUENJVJGOhFs+OqPUsy8J1112Hz3/+8/WgSSKRwD/90z/h0KFDqFQqqFQqeOSRR3DzzTfXr+vr68MNN9yAr3zlK/M21zvuuAMbNmzA17/+ddi2XT/e1dWFv/u7v8O2bdtQKBRg2zYqlQr27duHL3/5y1i3bl39ub7v46tf/SrOPfdc7N+/f0bzueeee3DllVfixRdfrB+77rrrcP/998OyLNi2jePHj+MrX/kKlixZUn/Of/7nf+Kyyy7D0NDQjMb/+Mc/jne96131gJIsy/izP/sz7NixA47jwHEc7NmzBx/72MdgGMF/TyqVCj760Y/illtuYbCIiIiIiGgexKMhSLIYVKEeWSvKTb5WNNd8369XzI6EdaiKDEkS0ZGOIhYxkE6Y6GyLYWlXcsz8s7V2aXR6Y6iIiIiIiIiIqEnTqVbkeT4GhwsAANPUoGsqhFrbs1bSVBkrl6bR1R4L2qFBQNjQ0NkWQ3dHDGY4KImdyZUW9C+mR957o1Z5SdNkdLUHZbqjplFvK0d0uvqjP/oj3HffffXHZ5xxBp588kn87//9v+tBGFmWsWXLFnz729/GN7/5zXoVIM/z8L73vQ933333vMz1+eefh+d5o4694x3vwJ49e/Dxj38cmzdvRjgcrs951apVeO9734vnnnsOH/zgB0dd99JLL+Haa69Fb2/vtOayfft2vOUtb4FlnQhWfvrTn8Y999yDq666CpoWfI9sb2/Hu9/9bjz99NO46KKL6s/dsWMH3vCGN8BxprfD94tf/CI+9alP1R8bhoGf//zn+PznP4+zzz4bohgs765Zswa33347fv/73yOVStWff9ddd+EjH/nItMYmIiIiIqKpE8UTa0WxiA5BDKr9jFRVbpV80ULFqUKSBMSjwSaEVMKEdFK7+2D+wc9ZI9WKFlK1JWodhoqIiIiIiIiIpqHZakXZfAmO60GSBSRqbc/akiZURR73mvkiCAKiEQPLelJYsSSFWNSAIApQFRmpRBiSJKDq+Qt6Ienk1mdmWEckrCOdDBb0kvEQdL317zVRK3zlK1/BD37wg/pjXddx9913Y/ny5eNec8stt+CjH/1o/bHnebjlllumHc6ZiTe84Q246667YJrmhM+TJAl33nkn3v3ud486vm/fPvz5n/950+OWy2XcdNNNowJFf/zHf4y/+Zu/GfeaZDKJu+++G+l0un7s4Ycfxic+8Ymmx3/mmWdw2223jTr2r//6r7jmmmvGvWbTpk343ve+N+rYP/3TP+EXv/hF0+MTEREREVFz4lEDshxUg47W1oqGM0V4Xms2aVWrHjK5oNpQLBKCJIrQNLkeLjpZLHKi2tLIJjNWKyKGioiIiIiIiIimoZlqRcWSjVytfVgyHoYkitB1BfHowmvLpWkKOttiWL2sDaoqQ4BQr/4zUSWmVrIrLlzXgyCi3vosEgoWv1IJExFThwABbckIZIlLIXR6KRQK+PjHPz7q2F/8xV9g/fr1k1778Y9/HMuWLas/Hh4eHlU1Zz7ouo4vfelL9Yo8U/Ev//Ivo0I9APBf//VfePjhh5sa+wtf+MKolmfRaBSf+9znJr2uo6MDt99++6hjd955Jw4fPtzU+H/1V3+FSuXE993LLrsMt9xyy6TXXXvttbjppptGHfvwhz88pvoTERERERHNLkEQ6pubohEDsizCcb2WtUHL5EvwPB+qKiFaq+DcnorWq9KebHS1IqNerciuLNxNZjT3uJJGRERERERENE0vr1Y0sjjz8oUiz/PQP5RH/1ABvg+EQyrCugYIQGd6/EWchUCSRMQiwc4182WhooXYAq38sipFoiBAUSRotXARAHS2Ba3dJFFEPBYEuWRZaslciebbv/zLv6Cvr6/+WNM0fOADH5jStaqqjmkn9pWvfAUvvfTSbE5xQtdffz2WLl3a1DWhUAi33nrrmOPf+c53pnyPbDaLz3zmM6OO3XrrrUgmk1O6/k/+5E9GBZssy8InP/nJKY//4IMP4t577x11rJk2Zn/5l3856vEzzzyD//iP/5jy9UREREREND1RU6+vQYwEdHIFa96rP9sVF/mCDQBIxoJ5REwdodoaz3hGqhUpsgSztmFrpNoRnZ4YKiIiIiIiIiKappdXKzLNWrUiJ6hWZNkOjvZlUSxVAAGIR0+04kolzFGhl4VqpK2bpimQ5KAFWnkBtkAbqaBk1FufaaPOi6KA7s4ENC2ovCSKwoKsEkU02xzHwT//8z+POva6171uTBWfidx8882jqgQ1uudcev3rXz+t6xq1CDs5pDORr371qxgaGhp17J3vfOeUr1dVFW9/+9tHHfvmN7+J48ePT+n6kwNNqVQK11133ZTHP++887Bx48YJ70lERERERLNPEAR0tscgiAIMXUWktgltcLgwr23QhrNFAMEGN11TIIhBBefJiKJQDyHFIi+vVuTO6Xxp4WKoiIiIiIiIiGgGGlUrGhwuoLc/B9f1IMsiOtuiiEfDECAgGjHqO9UWOkWRoOsKBAgI6UFQp1RaWC3QHLeKilMFBMDQg6CWGdLHPE+WRCzvSWH5khRWLmubdGce0ang17/+NYaHh0cdazak097ejs2bN4869sMf/nBOq5YpigJN06BpGs4+++xp3ePkQA0AHDx4cMotwH7wgx+MerxkyRKce+65Tc3hDW94w6jHruvixz/+8aTX5XI5/OpXvxp17LWvfS0kqbkKayeP/+yzz2LPnj1N3YOIiIiIiJqnKjLaahvLErFQvQ3acK4452P7vo9MrgTLdiGIwfgAkIyHoShT+5kiHg1Bkk6uVtSaFm7UegwVEREREREREc3Ay6sVRWrViqq1nWemqaG7Iw5dVSCKArraY+hqjy3otmcnG6lWFB5pgWYtrBZoI1WKdE2GJIqQJLEeLjqZIAjQNQWyxOUQOj386Ec/GnPs8ssvb/o+J19z9OhRbNu2bdrzmsy//uu/wrIsWJaF888/f1r3SCQSY45Vq1UMDAxMeu3Ro0fx6KOPjjp22WWXNT2HSy+9dFSVJ6DxZ3Kye+65B5XK6ADnbHxuQBAIIyIiIiKiuZeIhWHoCkRBQDoRbC7LF+ym26B5no9svozhbBGlcmXcjRKe5yNXKONw73C9XVksYkCWJCiKVK8+NBWiKNTDSKxWRFxFIyIiIiIiIpqhkWpFUq1akSQLaE9HkI6bEAUBhqFixZI0ohGj1VNt2kioSK+1QPMWWAu0shX84j30stZniym0RTSX7r777lGPw+Ew1q5d2/R9zjvvvEnvvdCMt9Auy/Kk1/70pz8dE55s9B5MJhKJ4Iwzzhh17P7770exOPHu5Ebv7XTGX4yfGxERERHRqWSkDZqunWiDNjBcmHIF1Yrj4lh/BsPZErJ5C32DeRw6Noze/iyy+TIqjlsPHR05PoyhTAnVqg9ZFpGMh4JAEIC2VASi2NxaSSIWrlcrGtlols2zWtHpiKEiIiIiIiIiohkSBAHpRNCXPmIa6GqPByEXAWhPR7CsOznlEtMLjaJI9co/Iy3QiiW7lVOqq1Y9WHawS64eKmrQ+ozodNTX14djx46NOrZu3bpphe7OPPPMMceeeuqp6U5tXgwNDY05FovFGlYwOlmj17Zhw4ZpzePk985xHOzcuXNexu/q6kI8Hh91bMeOHQuq2hwRERER0amsURs01/UwPIVWYoWihd6+LBzHgyyLiJg6ZFmE7wOW7WI4W8LR41kcOjaE4ezLw0Rh9HTGETUNCBAQj4XqG8aaMapaUTT4s1R2mq60RIsfQ0VEREREREREs8AMawiFVEiiCFkUoWkyVixJI9FEeemFamQ3XTgUBHfK5Qo8r/W/lC7VqhSpqgRZliCIAkK13XNEp7tGwZUVK1ZM616NrpssGNNqu3btGnPs8ssvn1KoqpXvneM42Lt376hjyWQSkUhkVsYvFAo4ePDgtO5FRERERETNS8TCMAx1TBu0kXbuJ/M8H4PDBQwMF+H5gK7L6GyPIRUPY0lnAj2dcSTjQWs1QQR8H5BlEanESJhIh4CgavbS7gQ60tFpzz0eDUGSRKiyVF8TOj6QQzZfnvY9afGZvN4vEREREREREU1KEAT0dCRQLNkQBAHhkHrKtOGKhHX0DeShq0p9V51lOy0P8IwswI3MwwxpTZfzJjpVPffcc2OOdXV1TetesVgMhmGgXD6xcHzgwAEUi0WEwwszOLlt27Yxx/7wD/9wStfO5nvX2dk5pfuP2LNnDxxn9M7f6Y490fjLly+f9j2JiIiIiKg5nW1R7D88WG+Dli8ErcwkSYChqdB1BYamwPN89A/lUXGqgADEowbikaBKkCSLqFY9KLIExZQQNXX48GFXXGiqDAHBekgopCIVN2dlzUaSRCRiIQwMFZBKmAAKKJYqGM6WYFccpBMmRHHx1bFx3SrsigtJEiFLImR5cVYXny8MFRERERERERHNElEU6lV9TiWyLMEwVJTLFYQMFbm8hWLJbmmoyPP8esntE63PtJbNh2ihefHFF8ccS6fT075fW1vbqAo3vu/jpZdewsaNG6d9z7n0k5/8ZNTjrq4uvOlNb5r0unw+j4GBgVHHRFFEMpmc1jza29vHHGv02Ux0biafW7PjExERERHR7AvaoEXQN5BDIhaC61ZhVRxUqz4KJRuFWpt5QQgqD0mygLaECV0L1jvi0RDaUhH4vo+SVUGpVEGxbMNxqtDVoGV9OKQhlQjD0Gd3rSYZD9fHbEtGoKkWhrJFlMoOjjlZpJMRaOriiZ1YtoO+gRxeXoBbEABZWnzhqPmyeD5dIiIiIiIiImqZSFgfFSoqW0ELtFZVBipblXqJb1WRASFYQCOiQC6XG3MsGp1+2ftG7bcajbEQPPPMM3j00UdHHfvoRz8KXZ889NnoNYXD4Wnvvm32fTudPzciIiIiolNZIhZCvmihXK6gIx0NqgzZDsqWg7LtoFKpwq+1O0snTMhS0Oa9Mx1FNGLU7iIgEtYRCQc/21QcF5blQFNlaJoyJ/MWBAFLOhMYHC5gcLiIqKlD02T0D+bhuB56+7NIxsKLYpPdSGs5zwcURQLgw3E9+D7guF6rp7dgMVRERERERERERJOKmDr6BnKjWqCVrUrLgjwla3Trs5ChQuKuMqK6QqEw5thMWpU1urbRGAvBZz/72VGPN23ahPe+971TurbV71urx29WX18f+vv7m7pm7969Y+Yzn0GnYrE44WMiotMJvycSEQXm6/thNCSh6voolSqoVqsQAIQ0ASFNhev5cBwXuiaj6tgQIaMtHgF8B7mcM+F9bduBbZcnfM5MqTIQj8joHyoAVQ/JqIpMrgTLdtA/aKNc1hCrh58WplzBgm1bkCQRyagOURDgw0e16sN1q3CrPjyvCtsqAcLC2ZDR6p+9GSoiIiIiIiIioknJkohQSEWpVEHYUJHNWyiW7ZaEinzfR9k6ufXZwt8RRzSf8vn8mGOyPP2lwEbXNhqj1R577DF85zvfqT9WFAVf+9rXpvzaW/2+tXr8Zn3pS1/CJz/5yRnd49FHH0Vvb+8szWh64xMRUYDfE4mIAgvl++GeVk+gCUN5YOh4q2cxdQcy458brP350gvzMZPJvbwVeStwCx8RERERERERTclIee1QLUhkWQ48b37LQ3uej/6hPDzPhyQL0GvlvU22PiMapVweu0tVkqRp369ROKXRGK1k2zZuvfVW+L5fP/bZz34W559//pTv0er3rdXjExEREREREb0cQ0VERERERERENCVmWAcEQFNkKIoIzwdK1sQluGeT61bRO5BFqewAApCIBm19dF2Bokz/l+5EpyLDGFt2vlqtTvt+ja5tNEYr/eVf/iV27NhRf3zTTTfhgx/8YFP3aPX71urxiYiIiIiIiF6O7c+IiIiIiIiIaEpkSUTY0FAs2QgbKjKOhVLZnnGVoEyuhFzBgiQKCIc0mCENsjw6JGRXHPQN5lGt+pAkAW3JSFClSADakpEZjU90KopExv69cF132vdrdG2jMVrl3//93/F//s//qT+++OKL8c1vfrPp+7T6fWv1+M16//vfj7e+9a1NXbN3717ceOON9cebN2/Ghg0bZm1OkykWi6PaWWzevBnhcHjexiciWkj4PZGIKMDvh9N3vD+HslWB7bgYGC4APpCMh2HUKku3WtXz0T+QQ9X3ETF1RMM6JEnCkq44BEFo9fSmZNeuXS0dn6EiIiIiIiIiIpqyiKmjWLIRMjRkchbKtRZooji9YsjZfBmZXNCKx/N8ZHLBY0NXEAnrMHQFxXIFQ8MFeD6gKBI6UhHIsgRRFNDTGUfIUGfzJRKdEkzTHHOsWCxO+36FQmFKY7TCb3/7W7z3ve+tPz7zzDNx9913Q9f1pu/V6vet1eM3q729He3t7TO6h2maiEajszSj5oXD4ZaOT0S0kPB7IhFRgN8Ppy4UCuOlQwPQPB+uJyGXt5AvVRExI5Ck1jfOGhwuwBdVaIqEdCoGAQJ6OhMwwzPbIDefWv2zN0NFRERERERERDRlZkgDBEBVZCiKBMepIl+0EYs0306n4rjI5EoAgGQ8BEkSkS9asCwXZctB2XIgiQKqng8AMHQFbakIREGAqkro7khAU7m0QdRIowXwfD4/7fs1unYhVCravn07rr/+eti2DQBYtWoVfvWrXyGdTk/rfo3et2KxOO3wZLPv2+nyuRERERER0alBliW0p6Po7csiHg2hbFXgOB6GsyWkk60Nw1i2g3wx+FkxFQ9DgAAzrC2qQNFC0PpoGBEREREREREtGpIk1tudjQSJsvkyPM9r6j6e52NgqADfB0KGgqhpIGxo6EzH0NMZRyyiQ5JPBIqiER0d6ShEQYBhqFjWnWKgiGgCK1euHHNsYGBg2vc7+VpBEBqOMZ927NiB1772tfXgzKpVq7B161Z0d3dP+56RSASpVGrUMc/zMDw8PK379ff3jzm2atWqcZ8/259bs+MTERERERE1KxYxEA5pEAUBqUQQJCqUbJSsSsvm5Ps+hjJB1VfT1KBrCgRRQHuKFaiaxVARERERERERETUlGQ8DCKoWKYoIz/ORK1hN3SOTK6HiVCHJApK1BSdDVyCKAhRZQiIWxpLOBNrSJtJJE8lYMGY8GsLSrsSCKKFNtJCdddZZY44dPXp0WvfKZrMolUqjji1btqylJdh37NiBa665BoODgwCAFStWYOvWrVi2bNmM7z2b792xY8emdP8Ra9euhSyPDkxOd+zpjE9ERERERDQdnW1RiKIAXVUQjQStqAeHC01vQpsNvu+fWHeSBMSjIQBAOmFCUaR5n89ixxU4IiIiIiIiImqKoav1UtHxWLAwk8uX4VantlBUtir1EFIqbkIWRWiajKXdSaxe3o7O9hgMXYEAAWFdq7dca09H0dEWhSAIc/PCiE4hZ5555phj+/fvn9a9Gl3X6P7z5dlnn8U111xTr+CzfPly/OY3v5mVQBHQ2vdOVVWcccYZo44NDQ1NuwXagQMHRj0Oh8NYvnz5tO5FREREREQ0npE2aECwIUxRRFSrPoYypUmunF3Fko2jxzPI5oN1p0QsDFkUoaoyErU1LGoOQ0VERERERERE1LR0MgIACOsaNE2G5wfBoslUqx4GhgsAgIipI6SrEEQBXe1xCIIAURQQixhY1pPCiiUpJONhxKIGlnUnufhD1ISOjg50dnaOOvb888/D9/2m77Vz584xxzZt2jTdqc3Ic889h6uvvroeKFq2bBm2bt06q0GZRq9t165d07rXye+doiiTBrJma/xjx46Nadt2zjnnMJhJRERERERzYrw2aMWSPaP75gsWDh4dwoEjg+jtzyKTK8GynVE/31q2g6N9GfQPFeC4HiRJQDIeCjaqIaikxJ+FpoehIiIiIiIiIiJqmqbKiEYMAEAiGvyZL1pw3eqE1w1mCqhWfSiKWA8JtSVNaKo85rmapqAtFUFnWwyGrs7yKyA69V1//fWjHheLRbzwwgtN3+fJJ5+c9N7zYSRQ1N/fDwBYsmQJtm7dipUrV87qOG94wxvGLDY/9dRTTd8nn89j7969o45dddVVk7aNa/TeTmf8hfK5ERERERHR6aMjfaINWuxlbdDsijOt+2XzZQxmivA8H74PWLaLTK6M3v4cDh0dQm9/tvZPDpVKFYIIxGMGejoTiJrBelU6aXJdaQYYKiIiIiIiIiKiaUknTEAAdE2FrsvwfSCTG79aUb5ooVR2ACGodCQKAkIhFYlYeB5nTXT6eOMb3zjm2AMPPND0fR588MFRj7u6unDxxRdPe17TsWvXLlx99dXo6+sDAPT09GDr1q1YtWrVpNfu27cPf/EXf4G/+Iu/wH/+539O+vyenh5cdNFFo46d/B5MxcMPPwzPG90WstFncrLXv/71UNXRC96z8blNdXwiIiIiIqLpUpQTbdASsTBChgLPB/oG8nAm2Yh2slyhjOFs0D4tHtXR0xlHKhFGOKRCkgV4tZCRZbuAEFTE7ulMIB4JQRQE6LqCpd2JetUkmh6GioiIiIiIiIhoWhRFQjwaVBsa+bNQsuE4YxeJ7IqL4Uyx9lwDmiJDkkR0tcXmb8JEp5lrrrkG8Xh81LF77723qXv09/dj27Zto4696U1vmtey8bt378ZVV11VDxR1d3dj69atOOOMM6Z0/aFDh/C5z30On/vc5/Czn/1sSte85S1vGXOPZ555pql5//SnPx31WJIk3HjjjZNeF4vFcO2114469otf/ALVanML8CePf9ZZZ2H9+vVN3YOIiIiIiKhZsYhRXydKJyNQVQlVz0ffYG7Mxovx5AsWhjInAkXxaBiKLCES1tGWjGBpZxLdnXEk42FEIzp6OuJIxcOQRRGqKqG7I47lPSmEDG3OXufpgqEiIiIiIiIiIpq2VDwMoVbWOmQoAIBMrlQ/73kehrJF9PZn4fmArsuIR4KFpY62KGRZasm8iU4HqqriQx/60Khj99xzDwYHB6d8j+9+97ujFn0VRcFtt9025esfeeQR/OM//iO+/vWvo1AoTPm6Ec8//zyuuuoqHD9+HEBQJWnr1q1Ys2ZN0/dqxnve8x4kEolRx771rW9N+XrHccZURbrlllvQ2dk5pev/8i//ctTj/v5+/PznP5/y+E8//fSYENTJ9yQiIiIiIpor7ekIQiEVoiCgPRWBLItwHA99g3n4vj/htfmihcHaxrRYJAgUAUAiHkIiHoKmyQAAVZYQNXUkY0HgSJJFdLRFsWJJGhFTn9sXeBphqIiIiIiIiIiIpk2WJSRjo6sVFcsV2BUXxZKNI8czyOUt+D4QDqlIJyMAgFjUQCTMBR6iuXbbbbehra2t/ti2bXz+85+f0rWO4+DOO+8cdezWW2+dUssxALj99ttxySWX4K/+6q/wrne9C5s2baqHg6Ziz549uOqqq9Db2wsA6OzsxNatW7F27dop32O6YrHYmBDOV7/6VQwPD0/p+m984xv1ykoAoGkaPvGJT0x5/Fe+8pV47WtfO+rYZz/72Slf/4//+I+jHp911lm4+eabp3w9ERERERHRTAiCgO72OFRVhixJaE9FIIpBu7LB4eK41+WLVv18NKIjEQsCRemkifZUFO2pIDR0xop2dHfGEY+FEA5paEtFsGppG+LR0LxW1j0dyK2eAM2tfD6Pbdu2Yc+ePchkMlAUBe3t7TjnnHOwadOmRfUXateuXdi0aRMqlcqo45MlGWcy3pNPPoljx47BsixEo1GsXr0aW7ZsQSqVmpMxR7iui+3bt+O5557DwMAAPM9DMpnEhg0bsHnzZug6F96JiIiIiGjhSMTCGM6WoCoyzLCGQtHG8YEcPC/4eU2WRaTiYRi6CgDQNBntqWgrp0x02jBNE5/85Cfx/ve/v37sjjvuwM033zxpOOfTn/40Dhw4UH8cj8fx8Y9/fErj7tq1C5/85CdHHdu3bx8+8pGP4N///d8nvX4kUHTs2DEAQHt7O+6//36sW7duSuPPhj//8z/Hl7/8Zezfvx8AkM1m8eEPfxj/9m//NuF1fX19+NjHPjbq2Ac/+EEsW7asqfE/85nP4Ne//jUcxwEA/O53v8O3v/1t/I//8T8mvG7r1q343ve+N+rYHXfcAVHk/lIiIiIiIpo/kiRiSWcCB44OQoWMtmQExwfzKJRsyLKIqKmjWvVR9Tx4no+K4yKTKwMAIrUKRACQSppIJcwx946EdW5YmwcMFZ2iHnvsMXzmM5/BT3/60zEhnBGdnZ249dZb8aEPfQjJZHKeZ9gc3/fx3ve+d9zXMlvK5TK++MUv4ktf+hJeeumlhs+RJAlXXXUVPvKRj+BVr3rVrI5/+PBh3HHHHbjrrrvG3fkWDofx5je/GX/913+N9evXz+r4RERERERE0yFJIpLxMAaGCohHDBRKNjzPhyACsYiBWMSAAAEQgnZpybgJUVw8m1yIFrv3ve99+NWvfoUf/ehHAIL1j+uvvx733Xcfli5d2vCau+66C7fffnv9sSAI+MY3voGurq4pjbl161ZUq9Uxx3/xi19Meu3evXtx1VVX4ejRo/VjfX19OPPMM6c09mwxDAPf+973cMUVV8C2bQDA1772NaxZs2bcVmJDQ0O44YYb0N/fXz+2ZcsW/N3f/V3T45977rm444478MEPfrB+7L3vfS+6u7tx9dVXN7zm6aefxtve9rZRm/A+9KEP4XWve13T4xMREREREc2UokhY0pnAwaNDMHQVyVgYQ5kiMrlyPUB0soipIxWvBYoSYaRPChTR/OL2lFOM4zj4wAc+gC1btuCHP/zhqBCOoiijKhP19vbiU5/6FDZs2IB77rmnFdOdsq997Wt44IEH5nSMbdu2YePGjfjwhz88JlCkqmr936vVKu677z68+tWvxtvf/nbk8/lZGf/rX/86zjzzTHz+858fFSgSRRGyfCL/VywW8a1vfQvnnnsu/v7v/37OKjURERERERE1IxELQ5JFyLKESFiHoSvobo8jHglBgIBQSMXKpWmkkxEGioha4Dvf+c6oIMqePXuwadMm3HnnnThy5AiAYM3jsccewzv///buOzqqcn///jWpBEgFEjoBlSKIoUjAH00FRUGIHg6oHBUVC3oQKaIiHIpd8KjYvoIIijTlKEhT6WIhoFRpoRNpCRAIpJf9/MHD6GZmkmnJJPB+rTVrsT+z7zIzyYa5uPfeDzygBx54QIWFhZIuLCj64IMPlJCQ4PR4jvIKZ3KMf/zjH6YFRb4UHx+vL7/8UsHBwdba888/rx49emjNmjXW7C01NVVTp05VXFycEhMTrfs2a9ZMixYtMmVLrhg8eLCef/5563ZmZqZuu+02PfPMM/rjjz+s7+e+ffs0ZswY3XjjjTp58qR1/379+mnixIlujQ0AAAAA3lAhOFA1osMlSWGVKygs9K+rC/n5XbjCdYXgAFUMCVREeIh1QVFURCVVjQr1yZzxFxYVXUays7N1xx13aNKkSdZAITIyUhMnTlRycrJyc3OVm5urdevWme6hnpKSop49e2ry5Mm+mnqRUlNTHZ795S2LFy9W586dtX//fmvtjjvu0MqVK5Wdna2cnBydOHFCkydPVu3ata37zJ07V+3bt9fp06c9Gn/06NF65JFHrAuUAgICNGjQIG3dulV5eXnKy8tTUlKSRo0apZCQEElSbm6uXnzxRT344IMsLAIAAADgc35+FlWJuHDmWFRERcVUDVNggL/8A/xUMyZCdWpEKSiQCyYDvhISEqKlS5fqqaeesp50dvr0aQ0dOlS1a9dWcHCwgoKC1KZNG82YMcParmrVqpo/f74GDhzo0nidOnWye7utW2+9tdi2Z8+edWmsktazZ0+tWrVKsbGx1trFLKlChQqqUKGCoqOjNWDAACUnJ1v36d27t37++WdVrVrVo/Ffe+01TZkyRZUrXzjG5ufn691339V1112nwMBABQYG6uqrr9b48eOVmZkp6cLJhePHj9eMGTO47RkAAAAAnwutVEHRVS8sEIoKr6Ta1SNUr3aU6tasotrVI1W9Wriiq4QpIrTihX0iKqlaFRYUlQV8o7yM3H///Vq+fLl1++qrr9amTZs0bNgw60KYgIAAxcfH64svvtD06dOtIVJhYaEGDhyohQsX+mTuRRkyZIjHi3aK8ttvv6l3797Kzs621l555RUtXrxYN910k/VMtOjoaD366KPasmWLbrjhBuu+W7duVY8ePaz3t3fVBx98oJdfftm6HRISou+++06TJk3SddddZw1+rrnmGr300kv65ZdfVKVKFev+M2bM0IgRI9waGwAAAAC8KSIsRJUqBltvdRYZUVEN6lRTaGXubw+UBUFBQXr//fe1bt06JSQkmK6ek5uba70ykSTFxMRo5MiR2rVrl3r27OnyWM2aNdO4ceNMV82OjY3VhAkTPHsRPtKuXTtt375db7zxhmlxkWEY1lujSReuOH3LLbfou+++01dffaWwsDCvjD9gwADt3LlT//73vxUREWGtFxQUKD8/37pdsWJF3X///dqyZYtGjx5tev8BAAAAwJciwyupSuSFqxAFBPhfyI904US1wEB/VagQqMqVglU9OpwFRWUIpwheJiZPnqx58+ZZtytUqKCFCxeqXr16Dts8+OCD2rt3r3VBS2FhoR588EHt2LFD1atXL/E5O2P58uWaOXOmpAtnWAUFBSkjI8Nr/WdlZalv376mBUX9+/fXyJEjHbaJiorSwoUL1axZM+vlpH/99VeNGTNGr776qkvjb9u2TUOHDjXVPvroI91yyy0O28TFxWnOnDnq2rWrtTZx4kR16dJFt912m0vjAwAAAIA3WSwW1a4RqZzcfAX4+8nfn3OZgLKoTZs2+uabb5Senq7ExEQlJSXpzJkzCggIUExMjJo3b664uDiPr3AzatQodevWTWvXrlVYWJj69Omj0NDig+GDBw96NG5JqVixokaMGKERI0Zo+/bt2rx5s44ePaqcnByFhYWpQYMGatu2rcdXJnKkdu3aeu+99/T2229rw4YN2r59u1JTUyVduFp548aNFR8fb73KNQAAAACUNVWjQhUeWlEFhYXy97uQHfn5cTJEWcaiosvA+fPnNXr0aFNt+PDhaty4cbFtR48erc8//1yHDx+WJKWlpenll1/W+++/XyJzdUV2drbp0trDhg3T7Nmzvbqo6P333zfd8iwsLExvvfVWse1iYmL00ksvmeb39ttv68knnzTdHq04zz//vHJzc63b7du314MPPlhsuy5duqhv376aO3eutfbss8+qa9euXNIaAAAAgM8FBxE3AOVBWFiYunbtajpxydtat26t1q1bl1j/vtK0aVM1bdrUJ2MHBASoXbt2ateunU/GBwAAAABPBAb6K1D+vp4GnMTqg8vAO++8o5SUFOt2cHCwBg8e7FTboKAgPfPMM6ba5MmTdeDAAW9O0S2vvPKK9u7dK0mqX7++/vOf/3i1/7Nnz+r111831QYMGKCoqCin2j/88MOmM8+ys7M1btw4p8f/6aeftGTJElPNlduYPffcc6btbdu2afbs2U63BwAAAAAAAAAAAAAAcIRFReVcXl6e/vvf/5pqt99+u0uXWe7Xr5/p6jb2+ixtO3fu1Jtvvmnd/uCDD7x+6eYpU6bo9OnTptoDDzzgdPugoCDdc889ptr06dN14sQJp9pfuqCpSpUquuOOO5wev0WLFmrWrFmRfQIAAAAAAAAAAAAAALiDRUXl3IoVK5SWlmaqde/e3aU+oqOj1aZNG1Pt66+/lmEYHs/PHYZh6IknnrDeFuyf//ynbr/9dq+PM2/ePNN27dq1df3117vUR48ePUzb+fn5mj9/frHt0tPTtWzZMlOtW7du8vd37TJvl47/xx9/KCkpyaU+AAAAAAAAAAAAAAAALsWionLum2++sal16NDB5X4ubXP06FElJia6PS9PfPrpp/rxxx8lSWFhYXrnnXe8PsbRo0e1fv16U619+/Yu93PjjTearvIk2f9MLrV48WLroqmLvPG5SRcWhAEAAAAAAAAAAAAAAHiCRUXl3MKFC03blSpVUsOGDV3up0WLFsX2XRpSU1M1YsQI6/Yrr7yimjVren2cRYsW2VyJyd57UJzQ0FBdffXVptrKlSuVkZFRZDt7760745eVzw0AAAAAAAAAAAAAAFxeWFRUjqWkpOjYsWOmWqNGjWSxWFzu69prr7Wpbd682d2puW3YsGE6ffq0JOmGG27Qk08+WSLj2HttTZo0cauvS9+7vLw87dixo1TGr1GjhiIiIky1rVu3+uzWdQAAAAAAAAAAAAAA4PLAoqJyzN7CldjYWLf6steuuIUx3rZy5UrNmDFDkuTv76+PP/7Y5tZi3uLL9y4vL0979+411aKiohQaGuqV8c+fP6/Dhw+71RcAAAAAAAAAAAAAAIDEoqJybfv27Ta1GjVquNVXeHi4QkJCTLVDhw4Vexsvb8nJydETTzxh3R40aJBbtwNzljffu+rVqzvV/0VJSUnKy8vzytjujA8AAAAAAAAAAAAAAFAcFhWVY/v377epVa1a1e3+qlWrZto2DEMHDhxwuz9XvPLKK9qzZ48kqXbt2ho/fnyJjXXu3DmdPHnSVPPz81NUVJRb/UVHR9vU7H02RT3nyefm6vgAAAAAAAAAAAAAAADFCfD1BOC+9PR0m1pYWJjb/dm7/Za9Mbxt165deuONN6zbkyZNcvtWYM6w95oqVark9q3WXH3fytvnlpKSotTUVJfaXHp7t/Pnz5fKz9JFl15hq7SuuAUAZQ3HQwC4gOMhyrrz58/7egoAAAAAAACADRYVlWP2QsdKlSq53Z+9tqURbD7xxBPKzc2VJN1555266667SnQ8X79vvh7fVR9++KHGjRvnUR/r16/X8ePHvTQj98YHAHA8BICLOB6irDl8+LCvpwAAAAAAAADY4PZn5di5c+dsagEB7q8Ts9fW3hje9Omnn2rNmjWSLiyOef/990t0PMn375uvxwcAAAAAAAAAAAAAACgOi4rKsaysLJuav7+/2/3ZW5xibwxvOXnypEaMGGHdHjt2rOrWrVti413k6/fN1+MDAAAAAAAAAAAAAAAUh9uflWMhISE2tYKCArf7s9fW3hjeMmzYMJ06dUqS1Lx5cz3zzDMlNtbf+fp98/X4rnryySf1z3/+06U2e/fuVUJCgnW7TZs2atKkidfmVJyMjAzTLS3atGnj0S3mAKC84ngIABdwPERZt3PnTl9PAQAAAAAAALDBoqJyLDQ01KaWn5/vdn/22tobwxtWrVqlzz//XJJksVj08ccfe3QLMFf4+n3z9fiuio6OVnR0tEd9VK5cWWFhYV6akesqVark0/EBoKzgeAgAF3A8RFlTuXJlX08BAAAAAAAAsMHtz8oxe6FjRkaG2/2dP3/eqTE8lZOToyeeeMK6/fjjj6tt27ZeH8cRX79vvh4fAAAAAAAAAAAAAACgOCwqKsfsnVl77tw5t/uz17YkrlT06quvKikpSZIUExOj1157zetjFMXe+5aRkaHCwkK3+nP1fSuvnxsAAAAAAAAAAAAAALhysKioHKtfv75N7eTJk273d2lbi8VidwxP7N69W6+//rp1++2331ZERIRXxyhOaGioqlSpYqoVFhYqLS3Nrf5SU1Ntag0aNHC4v7c/N1fHBwAAAAAAAAAAAAAAKE6ArycA9zVt2tSmdvToUbf6Onv2rDIzM021unXrev02Wi+++KJyc3MlSVFRUTpy5IgmTpzodPv09HSbWlHthw8fbrfetGlT/fjjj6ba0aNHbRYbOePYsWN2+3ekYcOGCggIUH5+vmlsd7k6PgAAAAAAAAAAAAAAQHFYVFSOXXvttTa1gwcPutWXvXb2+vfU36/Ic/r0aT377LMe91lUH44WFV177bU2i4oOHjyo6667zuXxXX3vgoKCdPXVV2vXrl3W2unTp3Xu3Dm3blt26NAh03alSpVUr149l/sBAAAAAAAAAAAAAAC4iNuflWMxMTGqXr26qbZ7924ZhuFyXzt27LCpxcXFuTu1Ms/ea9u5c6dbfV363gUGBha7IMtb4x87dszmtm3NmzeXxWJxuS8AAAAAAAAAAAAAAICLWFRUzt15552m7YyMDO3Zs8flfjZt2lRs396wevVqGYbh9sPeFXiK2t+RHj162Cy82bx5s8uv59y5c9q7d6+pdtNNNxV72zh7760745fW5wYAAAAAAAAAAAAAAK4sLCoq5+666y6b2tq1a13u56effjJt16hRQ23btnV7XmVdrVq1dMMNN5hql74Hzvj1119VWFhoqtn7TC7VvXt3BQUFmWre+NycHR8AAAAAAAAAAAAAAKAoLCoq52655RZFRESYakuWLHGpj9TUVCUmJppqd99992V/C63evXubtpOTk7Vt2zaX+li0aJFp29/fXwkJCcW2Cw8PV5cuXUy177//XgUFBR6N37RpUzVu3NilPgAAAAAAAAAAAAAAAC7FoqJyLigoSEOGDDHVFi9erFOnTjndx6xZs0xX2wkMDNTQoUOdbr9u3Tq98cYb+vTTT3X+/Hmn2/naY489psjISFPt888/d7p9Xl6e5s6da6o9+OCDql69ulPtn3vuOdN2amqqvvvuO6fH37Jli80iqEv7BAAAAAAAAAAAAAAAcAeLii4DQ4cOVbVq1azbOTk5mjRpklNt8/Ly9Pbbb5tqAwYMUIMGDZxq/9JLL6ldu3Z6/vnn9cgjjyguLk4nTpxwfvI+FB4ebrMIZ8qUKUpLS3Oq/bRp05SSkmLdDg4O1pgxY5wev2PHjurWrZup9uabbzrd/o033jBtN23aVP369XO6PQAAAAAAAAAAAAAAgCMsKroMVK5cWePGjTPVJkyYoKSkpGLbvvLKKzp06JB1OyIiQqNHj3Zq3J07d9qMu2/fPo0YMcKp9mXB008/rdjYWOv22bNn9eyzzxbbLiUlRaNGjTLVnnnmGdWtW9el8V9//XUFBgZat3/88Ud98cUXxbZbtWqV5syZY6pNmDBBfn78SgMAAAAAAAAAAAAAAM+xAuEyMXDgQN11113W7aysLN15551KTk522GbGjBl66aWXrNsWi0XTpk1TjRo1nBpz1apVKigosKl///33Lszct0JCQjRnzhwFBwdba1OnTrW5CtDfnT59Wj179lRqaqq1Fh8fr/Hjx7s8/vXXX68JEyaYao8//rhWrlzpsM2WLVvUp08fGYZhrQ0ZMkS33367y+MDAAAAAAAAAAAAAADYE+DrCcB7Zs6cqR49elgXpCQlJSkuLk6jRo1Snz59VKtWLRUUFGjjxo167733NGPGDGtbi8WiDz74QAkJCU6P9/dFLc7UXTF37ly7C6LS09NtahMnTrSphYeH69FHH3VqrPj4eH355Zfq06ePcnJyJEnPP/+81q5dq2effVbt2rVTUFCQUlNT9e2332rcuHGmuTVr1kyLFi1SUFCQsy/PZPDgwTp+/Lhef/11SVJmZqZuu+02PfXUUxowYICaNm0qi8Wiffv26fPPP9fEiROVmZlpbd+vXz+77wEAAAAAAAAAAAAAAIC7WFR0GQkJCdHSpUs1dOhQffjhhzIMQ6dPn9bQoUM1dOhQBQUFKT8/X4WFhaZ2VatW1dSpU9WzZ0+XxuvUqZP8/Pxs+rv11ls9fi0fffSR1qxZ49S+9m5XVq9ePacXFUlSz549tWrVKt133306ePCgJGnx4sVavHixLBaLgoKCrAuO/q53796aOnWqwsLCnB7Lntdee01XXXWVhgwZovPnzys/P1/vvvuu3n33Xfn7+8tisSg/P9/UJjAwUKNHj9aoUaNksVg8Gh8AAAAAAAAAAAAAAODvuP3ZZSYoKEjvv/++1q1bp4SEBNPVc3Jzc00LgGJiYjRy5Ejt2rXL5QVF0oUr9IwbN860oCU2Ntbmdl7lRbt27bR9+3a98cYbio2NtdYNwzAtKPLz89Mtt9yi7777Tl999ZXHC4ouGjBggHbu3Kl///vfioiIsNYLCgpMC4oqVqyo+++/X1u2bNHo0aNZUAQAAAAAAAAAAAAAALyOKxVdptq0aaNvvvlG6enpSkxMVFJSks6cOaOAgADFxMSoefPmiouLk5+fZ+vKRo0apW7dumnt2rUKCwtTnz59FBoa6vH8V69e7XEf7qhYsaJGjBihESNGaPv27dq8ebOOHj2qnJwchYWFqUGDBmrbtq2qVq1aIuPXrl1b7733nt5++21t2LBB27dvV2pqqiQpMjJSjRs3Vnx8vEJCQkpkfAAAAAAAAAAAAAAAAIlFRZe9sLAwde3aVV27di2xMVq3bq3WrVuXWP++0rRpUzVt2tQnYwcEBKhdu3Zq166dT8b31KW3itu7d2+pjn/+/HkdPnzYur1z505Vrly5VOcAAGUBx0MAuIDjIcq6S78z2bv9NgBcLsiNAKDs4JgIABdwPERZ5uvciEVFALwuOTnZtJ2QkOCbiQAAAABAOZScnKyWLVv6ehoAUCLIjQAAAADAfaWdG3l27ysAAAAAAAAAAAAAAAAAlx0WFQEAAAAAAAAAAAAAAAAwsRiGYfh6EgAuL2fOnNGaNWus23Xq1FFwcHCpjb93717TpbPnz5+vq6++utTGB4CyguMhAFzA8RBlXU5Ojul2QJ06dVJERITvJgQAJYjcCADKDo6JAHABx0OUZb7OjQJKbSQAV4yIiAj16tXL19Owuvrqq9W0aVNfTwMAfI7jIQBcwPEQZVHLli19PQUAKBXkRgBQdnFMBIALOB6irPFlbsTtzwAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAAAgAmLigAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAAAgAmLigAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAAAgAmLigAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAAAgAmLigAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAAAgAmLigAAAAAAAAAAAAAAAACYBPh6AgDgbdWqVdOYMWNM2wBwJeJ4CAAXcDwEAAAX8e8CAPgLx0QAuIDjIeCYxTAMw9eTAAAAAAAAAAAAAAAAAFB2cPszAAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAABgwqIiAAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAABgwqIiAAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAABgwqIiAAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAABgwqIiAAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAABgwqIiAAAAAAAAAAAAAAAAACYsKgIAAAAAAAAAAAAAAABgEuDrCQCAN507d06JiYlKSkrSmTNnFBgYqOjoaDVv3lxxcXGyWCy+niIAlHmnTp1SYmKi9u3bp/T0dFWoUEE1atRQXFycrr32Wl9PDwAAAADcQm4EAJ4jNwIA4MrCoiIAl4UNGzbo9ddf16JFi5Sbm2t3n+rVq2vAgAEaMmSIoqKiSnmGAGDr4MGDql+/vtf627Bhg1q3bu12+2XLlmnChAlauXKlCgoK7O5Tv359DRw4UE899ZQqVqzo9lgAriynT5/WoEGDNGvWLGtt2rRp6t+/f4mO6+uwe//+/fr999+VnJysrKwsVa5cWbGxsbrhhhtUs2bNEh8fAABcQG4EoDwiNwJwpSA3IjdC2WYxDMPw9SQAwF15eXkaPny43nvvPV16OAsMDFR+fr5NPTo6Wp9++qm6d+9emlMFABtlJRw6d+6cHnvsMc2ZM8fmuaCgILuhe4MGDTRr1izFx8e7NVcAV44FCxboiSee0PHjx031kgyHfBl25+fna9q0aZo0aZL++OMPu/tYLBbFx8dryJAh6tOnj9fGBgAAZuRGAMozciMAVwJyI1vkRihr/Hw9AQBwV3Z2tu644w5NmjTJGgBFRkZq4sSJSk5OVm5urnJzc7Vu3Tr169fP2i4lJUU9e/bU5MmTfTV1ACgzTp8+rQ4dOpiCodq1a2vKlCk6ceKEcnJylJ2drZUrV+r222+37rN//3517txZixYt8sW0AZQDp0+fVr9+/ZSQkGATDJWUc+fO6d5779Wtt96qZcuWmYKhoKAg074HDhzQiBEjdN111ykxMdEr4yclJalNmzZ67LHHbIKhv49vGIbWrVunvn376pZbbim19wcAgCsJuREAeI7cCEBJITciN0L5waIiAOXW/fffr+XLl1u3r776am3atEnDhg1T7dq1JUkBAQGKj4/XF198oenTp8tisUiSCgsLNXDgQC1cuNAncweAsiAvL089evTQli1brLX4+Hht2bJFAwYMUHR0tCQpODhYN910k5YsWaLx48db983OztY///lP/fbbb6U+dwBl24IFC9S0aVPTZatLmq/D7t9++01t27bVpk2brLV27dpp4cKFOn/+vHJycpSWlqbZs2erSZMm1n1WrlypNm3a6MCBAx6NDwAAzMiNAMAz5EYASgq50QXkRig3DAAohz7++GNDkvVRoUIFY+fOncW2GzVqlKldZGSkcezYsVKYMQDYOnDggPV4NGbMmFIf/4UXXjAdE6Ojo42UlJRi2/3rX/8ytWvQoIGRlZVVCjMGUNadOnXKuO+++2yOLZ07dzbVJBnTpk3z2ri5ublGu3btTP3Hx8cbp06dcthm/PjxNv+e3LBhg1vjHzx40KhSpYqpvyeeeMIoKCiwu39mZqbRvXt30/5XX321cebMGbfGBwAAZuRGAC4H5EYALjfkRuRGKJ+4UhGAcuf8+fMaPXq0qTZ8+HA1bty42LajR49W3bp1rdtpaWl6+eWXvT5HACjrkpOT9d///tdUe/XVV1WtWrVi27799tuqXLmydXv//v16//33vT5HAOWLvbPM+vbtq+3bt6tTp04lOvaYMWP066+/Wrejo6O1cOFCRUVFOWwzevRo/etf/7JuZ2dnq2/fvsrOznZpbMMwdN999+nUqVPW2s0336wPPvhAfn72v3KHhIToyy+/VMOGDa21vXv3auDAgS6NDQAAbJEbAYDnyI0AeBu5EbkRyi8WFQEod9555x2lpKRYt4ODgzV48GCn2gYFBemZZ54x1SZPnsxlAwFcccaNG6ecnBzrdkxMjB544AGn2latWlUPP/ywqfb6668rPT3dq3MEUL4MHjzYeo/3mJgYff3115ozZ46qVq1aouP6OuyeN2+efvnlF+u2v7+/PvzwQ4fB0EUVK1bUu+++a6rNnj1bGzZscGl8AABgRm4EAJ4jNwLgbeRGF5AboTxiURGAciUvL8/mL//bb7/dpX909OvXz/SXtb0+AeBydvz4cU2fPt1Uu/feexUYGOh0H5cGSadOndKUKVO8MT0A5dx9992nHTt26K677iqV8XwZdhcUFNhcCaFXr15q1KiRU+27deum5s2bm2ojR450qi0AALBFbgQAniM3AlCSyI3IjVD+sKgIQLmyYsUKpaWlmWrdu3d3qY/o6Gi1adPGVPv6669lGIbH8wOA8mD+/PkqKCgw1Vw9lrZq1Uo1atQw1ebNm+fx3ACUXzVr1tSCBQs0c+bMIi8f7U2+Dru/+eYb7d69u8j+XB1/+fLl+v33313qAwAAXEBuBACeIzcCUBLIjez35+r45EbwBRYVAShXvvnmG5tahw4dXO7n0jZHjx5VYmKi2/MCgPLk0mOpv7+/2rVr53I/7du3N20nJibq6NGjHs0NQPm1YsUK9ezZs1TH9HXYfel+QUFB6tq1q0vj9+jRo9h+AQCAc8iNAMBz5EYASgK5EbkRyi8WFQEoVxYuXGjarlSpkho2bOhyPy1atCi2bwC4HGVkZGjVqlWmWsOGDVWpUiWX+7r0WGoYhhYvXuzR/ACUXyEhIaU+pi/D7pycHC1ZssRUa9mypSpWrOjS2I0aNVK1atVMNXv/IQoAAIpHbgQAniE3AlBSyI3IjVB+sagIQLmRkpKiY8eOmWqNGjWSxWJxua9rr73WprZ582Z3pwYA5cb27duVl5dnqjVp0sStvjiWAvAlX4fda9as0blz54rsx93xd+/erT179rjVFwAAVypyIwDwHLkRgMsFuRHgPQG+ngAAOGvHjh02tdjYWLf6stfOXv8A4As5OTnavn27du/erbNnzyovL09RUVGqUqWKGjVqpPr167vdN8dSAJcLX4fd9p73ZPwffvjBpv9rrrnGrf4AALgS8V0HwJWC3AgAikduBHgPi4oAlBvbt2+3qV16H1NnhYeHKyQkRFlZWdbaoUOHlJGR4dYqZQDwhrVr1+q9997TokWLTMenS9WtW1e33XabhgwZ4vIXEW8eS6tXr+5U/wBQEnwddvt6fAAAYEZuBOByR24EAM7zdW7j6/EBb+L2ZwDKjf3799vUqlat6nZ/l96D1DAMHThwwO3+AMAT//d//6eOHTvqq6++KjIYkqTDhw9rypQpatq0qfr27atTp045PY43j6VVq1a1uZVAamqqzp8/71Z/AOAKX4fdvh4fAACYkRsBuJyRGwGAa3yd2/h6fMCbWFQEoNxIT0+3qYWFhbndX2hoqFNjAEBpOHHihCQpJCRE/fv318KFC7V//35lZmbqzJkz2rlzpz788EO1bdvW2sYwDH355Zdq1aqVNm7c6NQ43jyW+vv7q2LFik6NAQDe5uuw25vjR0dHO9U/AABwjNwIwOWM3AgAXENuBHgPi4oAlBv2/nL25JLT9tpylgQAX+rcubO2bdumadOmqUePHqpfv75CQkIUHh6uxo0ba+DAgfr111/16aefKigoyNru0KFD6tKli/bs2VPsGBxLAVwufB12nzt3zmvj85+WAAB4ju86AC535EYA4DxyI8B7WFQEoNyw9xdwQECA2/3Za2tvDAAoDXfccYe+//57XXXVVcXu+9BDD+mrr74ynR2Rlpamnj17KiMjo8i2HEsBXC58GXbn5OQoLy/Pa+MTtAMA4Dm+6wC4nJEbAYBryI0A72FREYByw969ov39/d3uz94XmuLuRw0A3lS9enUtW7ZMy5Yt0//+9z/TWWTF6dmzpx555BFTbdeuXZo0aVKR7TiWArhc+DLsdlR3d3yCdgAAPMd3HQCXG3IjAHAfuRHgPSwqAlBuhISE2NQKCgrc7s9eW3tjAEBJqVChgrp06aIuXbqoQoUKLrcfM2aM/PzM/5x76623ijxLgWMpgMuFL8NuR3V3xydoBwDAc3zXAXC5ITcCAPeRGwHew6IiAOWGvXuG5ufnu92fvbb2xgCAsqp27dq6+eabTbVTp07phx9+cNiGYymAy4Uvw25HdXfHJ2gHAMBzfNcBADNyIwBXMnIjwHtYVASg3KhcubJNrbh7QBfF3hkZ9sYAgLKsQ4cONrUVK1Y43J9jKYDLhS/Dbkd1d8cnaAcAwHN81wEAW+RGAK5U5EaA97CoCEC5ERYWZlPz5J6h9trylzCA8qZFixY2tcTERIf7e/NYWlhYqMzMTJs6x1IApcGXYXdwcLACAwO9Nj5BOwAAniM3AgBb5EYArlTkRoD3sKgIQLlRv359m9rJkyfd7u/SthaLxe4YAFCWxcTE2NRSUlIc7u/NY+nJkydlGIapVrVqVcIhAKXC12G3vefcHZ//tAQAwHPkRgBgi9wIwJWK3AjwHhYVASg3mjZtalM7evSoW32dPXvW5h8AdevWZWUvgHInPDzcplZU2OPNY+mxY8ec6h8ASoKvw25vjp+ammpTa9CggVt9AQBwpSI3AgBb5EYArlTkRoD3sKgIQLlx7bXX2tQOHjzoVl/22tnrHwDKuku/zEgXzqB1hGMpgMuFr8NuX48PAADM+K4DALbIjQBcqXyd2/h6fMCbWFQEoNyIiYlR9erVTbXdu3fb/WJUnB07dtjU4uLi3J0aAPjM2bNnbWpVq1Z1uH/Tpk1t7ue8c+dOt8bmWArAl3wddvt6fAAAYEZuBAC2yI0AXKl8ndv4enzAm1hUBKBcufPOO03bGRkZ2rNnj8v9bNq0qdi+AaAkbdu2TT/99JN++ukn5eTkuN3PiRMnbGrVqlVzuH/lypXVuXNnU23Pnj127wldnEuPpRaLRT169HC5HwBwh6/DbnvPE7YDAOBb5EYALhfkRgDgGXIjwHtYVASgXLnrrrtsamvXrnW5n59++sm0XaNGDbVt29bteQGAqwYNGqQOHTqoQ4cOSkpKcrufjRs32tRuuOGGIttceizNz8/Xr7/+6vLYlx5L27Rpo5o1a7rcDwC4w9dhd+fOnRUaGmqqbd682eWx7bVr2LChGjZs6FZfAABcyciNAFwuyI0AwDPkRoD3sKgIQLlyyy23KCIiwlRbsmSJS32kpqYqMTHRVLv77ruLvJc0AJSkI0eOuN3WXkDepUuXItskJCTI39/fVHP1WLpx40abezn37t3bpT4AwFO+DLuDg4N1++23m2q///67srKyXBo7KSlJKSkpppq9/xAFAADFIzcCcDkiNwIA95AbAd7BoiIA5UpQUJCGDBliqi1evFinTp1yuo9Zs2apsLDQuh0YGKihQ4d6bY4A4Koff/zRrXaHDh3SqlWrTLWIiAh17dq1yHY1atTQAw88YKrNnj1b+fn5To89Y8YM03ZUVJQeffRRp9sDgDf4Ouy+dL/c3FwtX77cpfEXLVpUbL8AAMA55EYALkfkRgDgHnIjwDtYVASg3Bk6dKjpvs85OTmaNGmSU23z8vL09ttvm2oDBgxQgwYNvDpHAHDFZ599puzsbJfbjR07VoZhmGpDhgxRWFiYU22Dg4Ot28eOHbMJfBw5deqUpk6daqo999xzCg8Pd6o9AHiLr8Puu+66S9dcc42p9vnnnzs9tr3xb775ZrVu3dqlPgAAwF/IjQBcbsiNAMA95EaAd7CoCEC5U7lyZY0bN85UmzBhglP3ln7llVd06NAh63ZERIRGjx7t9TkCgCuOHj2qF154waU2X3/9taZPn26q1a9fX88884xT7evWravBgwebai+88IJOnjxZbNthw4bp3Llz1u3Y2FgNGjTIqXEBwNt8GXYHBATopZdeMtW++eYb7dmzx6n2P/zwgzZv3myqvfrqq061BQAA9pEbAbjckBsBgPvIjQDPsagIQLk0cOBA0z1Ds7KydOeddyo5OdlhmxkzZpj+8rZYLJo2bZpq1KhRonMFAGe88847Gj58uPLy8ordd/r06brnnntMtdDQUC1cuNCps80ueumllxQfH2/dPnHihHr27Km0tDSHbV599VV99tln1u3g4GDNnTtXISEhTo8LAN7k67C7T58+atu2rXW7oKBATz75pM0ZwZfKysqymXffvn1Nx2UAAOAeciMAlxtyIwBwD7kR4DkWFQEot2bOnKmbb77Zup2UlKS4uDi9/fbbOnLkiKQLfzlv2LBBDzzwgB544AEVFhZKuhAMffDBB0pISPDF1AHArrfeekuNGjXSm2++qY0bNyorK0vShUvwHz58WNOmTVP79u310EMPmUKkGjVqaOnSpWratKlL4wUFBWnRokW67rrrrLVff/1V119/vaZOnarU1FRJF+71vHr1avXo0UMvvviidd/g4GB9+eWXatOmjScvGwA85suw22KxaNasWYqKirLWli9frn//+9/Wf3teKjs7W3379tWuXbustQYNGuijjz5yaWwAAOAYuRGAyw25EQC4h9wI8IzFKG4ZHACUYbm5uRo6dKg+/PBDm1W9QUFBys/Pt/lLuWrVqpo6dap69uxZmlMFAJPk5GRNmTJFU6dO1dGjRx3uFxQUpLy8PLtnLlgsFvXq1Usff/yxoqOj3Z5Lenq6BgwYoK+++srmueDgYOXm5tqMHxsbq1mzZqldu3Zujwvg8jNlyhSdPXvWpv7DDz9o2bJlplrfvn3t3gO+Tp066tu3r8tjnzx5UjfffLO2bdtm6mvMmDHq2bOnqlWrptzcXP3yyy+aOHGiFi9ebN3vYtjtyb8PExMTddttt5le/4033qgXX3xRnTt3VsWKFXX27Fl9//33GjdunHbs2GHdr3bt2lq9erWuuuoqt8cHAAC2yI0AlFfkRgAuR+RG5EYon1hUBOCysH79er322mtasmSJcnNz7e4TExOjRx55REOHDlWVKlVKeYYAYF9BQYHWrFmjxYsXa+nSpdq1a1exlz6tU6eOunXrpmeeeUbXXnut1+byww8/6M0339SqVascniURGxurJ554QoMGDVLFihW9NjaAy0NsbKwOHTrkUR+dOnXS6tWr3Wrr67B7165duueee7Rlyxa74+fk5NjUO3furFmzZnFrFQAAShC5EYDyitwIwOWE3IjcCOUTi4oAXFbS09OVmJiopKQknTlzRgEBAYqJiVHz5s0VFxcnPz/u+gigbMvIyNAff/yhvXv3Ki0tTenp6QoKClJkZKSqVq2quLg41atXr0TncPLkSSUmJmrfvn1KT09XcHCwatasqbi4OJcvlQ3gyuLrcOgiX4bd+fn5+vTTTzVp0iRt377d4X5t2rTR0KFD1adPH1ksFq+NDwAAHCM3AlDekRsBKM/IjciNUD6xqAgAAAAAcFnyddi9b98+/f7770pOTlZWVpYqVaqk2NhYtWnTRrVq1Srx8QEAAAAAAGAfuRHgHBYVAQAAAAAAAAAAAAAAADDheq4AAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATFhUBAAAAAAAAAAAAAAAAMCERUUAAAAAAAAAAAAAAAAATAJ8PQEAAK50Y8eO1bhx43w9DZNOnTpp9erVvp4GAB/JyMhQVlaWx/1YLBZVqFBBISEh8vPjfAYAAAAAcBW5EYCyhtwIAK4sLCoCAAAAYPLiiy/q3Xff9WqfFSpUUM2aNVWrVi1dddVViouLU4sWLRQfH6/g4GCvjgUAAAAAAICSQW4EAFcWFhUBAAAAKHHZ2dnav3+/9u/fr7Vr11rrlSpVUqdOndS7d2/16dNHlSpV8uEsAQAAAAAAUNrIjQCg7GJREQAAPta5c2e79enTp+vQoUOlOxkAJergwYOaPn26TT0hIUFxcXGlPp+yICMjQ0uWLNGSJUs0ePBgPfTQQ3r++edVo0YNX08NAAAAAHyO3Ai4cpAb2SI3AgDfsxiGYfh6EgAAwFbnzp21Zs0am/qYMWM0duxYr49nsVisf+7UqZNWr17t9TGAK93q1at100032dSnTZum/v37l/6EXOTqcamgoECZmZlKT0/X4cOHtX//fq1fv15r167Vpk2bHI4TEhKiIUOG6D//+Q+XuAYAAAAAO8iNgMsPuRG5EQCURX6+ngAAAACAy5O/v79CQ0NVq1YttWvXTv369dO7776rjRs36uDBgxo3bpxiYmJs2mVlZenVV19VXFycfv/9dx/MHAAAAAAAACWJ3AgAygcWFQEAAAAodfXq1dN//vMfHThwQGPGjFGFChVs9tm1a5c6dOigOXPm+GCGAAAAAAAA8AVyIwAoO1hUBAAAAMBnQkJCNHbsWCUmJqphw4Y2z2dlZenee+/Vxx9/7IPZAQAAAAAAwFfIjQDA91hUBAAAAMDnmjdvrl9//VWtWrWy+/zAgQM1c+bMUp4VAAAAAAAAfI3cCAB8h0VFAAAAAMqEqKgoLV++3O6ZZ4Zh6NFHH9WWLVt8MDMAAAAAAAD4ErkRAPhGgK8nAAAAyoZevXpZ/9ysWTMfzgTAlSwiIkILFixQy5YtlZWVZXouKytL//znP7Vt2zYFBwf7aIYAAAAAcOUhNwJQFpAbAUDpY1ERAACQJM2fP9/XUwAASVLjxo01fvx4PfvsszbP7dmzRxMmTNCoUaN8MDMAAAAAuDKRGwEoK8iNAKB0cfszAAAAAGXO4MGDVa9ePbvPvfbaa0pNTS3lGQEAAAAAAKAsIDcCgNLDlYoAAIBXbN68WUuXLtWvv/6qPXv26NixY8rIyFBAQIDCwsJUr149NWvWTB07dtSdd96pKlWq+GyumZmZWrx4sVasWKGtW7fqxIkTyszMVEREhKpVq6aaNWvq+uuvV8uWLdW6dWuX53ru3DktXLhQq1at0tatW3Xw4EGdPXtWBQUFqlixoqpXr66rrrpK8fHx6tq1q9q1ayeLxVJCr/aCnJwcfffdd1q1apU2bdqk/fv36/Tp08rJyVFoaKgiIiIUFRWla6+9Vtdff71atGih9u3be+1SwQUFBVq3bp1+/fVXbdu2TX/88YdSUlJ09uxZZWdnKzw8XJGRkYqJiVGbNm3Uvn17de7cWZGRkV4Z39GcfvjhB61du1a///679u3bpzNnzig9PV3BwcGKjIxUdHS0rrvuOsXFxenGG29U69atS/yzwgWBgYF6/vnnNXDgQJvnMjMz9cEHH2js2LFu93/8+HEtX75cW7Zs0bZt26yf/9mzZxUUFKSoqChFRkaqYcOGat++vTp06KCWLVu6NdbZs2eVl5fnVtuoqCj5+Tk+F+T8+fPKzs52ur+KFSuqYsWKxe537tw5ffvtt1q3bp1+//13HT16VGfOnFFGRoYqV66syMhI1a5dW9dff73i4uLUuXNnXXXVVU7PAwAAAMCVhdzoL+RGtsiN4CpyowvIjQCUCgMAAJRJnTp1MiTZPMaMGeNRv3/vq1OnTh71VVBQYMyYMcNo1qyZ3bk6egQGBhp9+vQxtm3b5vKYaWlpTo9Tr149U9ucnBzjlVdeMSIjI53u48EHH3R6bocPHzYef/xxIyQkxKX3IzY21njvvfeMnJwcl9+P4qSkpBjPPvusER4e7tKcJBmVK1c27r77buOzzz4zMjMzXR47NzfX+Prrr40+ffoYERERLo9fsWJF46mnnjL27dvn1fckKyvLGD9+vFGzZk2X5xQdHW08+OCDxuLFi438/Pwix1m1apXL/bv6M13aSuq45Eh6erpRsWJFh59FcZ/Bpfbv32+MGTPGaNGihWGxWFx+/1u0aGHMnDnT5XEdvW/OPA4cOFBk3w8++KBL/RX3WR09etR45JFHjEqVKrk810aNGhlDhw41Nm7c6NL7AwAAAMB15Eb2kRu5htzIFrmR+8iNyI2KepAbAeUXi4oAACijyno4tH37dqNNmzYefdH19/c3hg0bZmRnZzs9rrvh0KFDh4xWrVq5PEdnw6F33nnH4ZdYZx9NmjQx1q9f7+In4djUqVPdCmXsPSIjI41hw4YZ+/fvd2rst956y4iOjvbK2MHBwcbkyZO98p5s2LDBaNiwoVfmVatWLWPkyJEOxyIc8o577rnH4fuxcuVKp/o4cOCAcfPNN7sVCNl7tG/f3jhy5IjTr6G8hEOff/65W0GyvcfFIA0AAABAySA3so/cyHnkRrbIjTxDbnThQW5U/IPcCChfuP0ZAABw2bfffqt+/frp/PnzprrFYlHHjh31r3/9S02bNlXt2rWVkZGhI0eOaOXKlZoxY4aSk5Ot+xcUFOitt97Szz//rAULFig6OrpE5nvkyBHddNNN2r9/v9f7zs3NVf/+/TV79myb56Kjo3Xfffepe/fuqlu3rkJCQnT8+HFt3bpV8+bN0/fffy/DMKz779y5Ux07dtRnn32mPn36uD2n/Px8PfHEE5o6darNc6Ghoerdu7cSEhJUr149RUdH6/z58zp27JjWrVunefPmacOGDTbt0tLS9NZbb2nv3r2aP39+sXP49ttvlZKSYlP39/fXnXfeqV69eqlFixaqUqWKCgsLderUKW3ZskUrVqzQl19+qdzcXGubnJwcPfbYY9q4caM++ugj196Mv0lMTNRtt92ms2fPmupXX3217r33XsXHx+uqq65SWFiY8vPzderUKW3evFmLFy/W/PnzVVBQYGp35MgRvfrqq3rllVfcnhOK17VrV82ZM8fuc/Pnz9dNN91UbB8HDx7UypUr7T5Xt25d9e/fXx06dFBsbKwqV66sM2fO6MiRI/rpp5/05ZdfaseOHaY2P/30k1q0aKFly5apefPmrr+oMuijjz7SU089ZTomSVL79u31j3/8Q3FxcapTp44qVaqkzMxMnThxQuvXr9ecOXO0bt06m/42bdqkyZMn67777iutlwAAAACgjCA3+gu5kX3kRvAWcqPSQW4EXOF8u6YJAAA4UlbPOJs3b54REBBgM6+aNWsWe/ZHXl6e8fzzzxt+fn427a+99lojJSWl2PELCwuN1NRUm8eNN95o9+ycvLw803Ph4eHGgAEDjAULFhhbt241jh49auzYscOYMmWK3TORijrjLC8vz+jVq5fdz+lf//qXkZaWVuRrWbNmjXHVVVfZtA0ICDAWLFhQ7HthT35+vtG7d2+7c7rzzjuN1NTUYvv47rvvjBo1atjto1evXk7Nw97Pb7NmzYwtW7YU2/bIkSNGz5497Y4/ceJEp8a/1JkzZ4yYmBhTXxaLxXjjjTecuiRxUlKSER8fb3dOjuTm5tr8nM6fP99uH++9957dn+tLH6dPn3br9XuLL84427dvn8Ozmlq1auVUH/bO/rNYLMbYsWONvLy8ItsWFhYas2bNMsLCwmz6iI2NNU6ePOnS6yksLLT7ey/J2LVrl0t92XPu3DkjNDTU2mfPnj2LbfPLL7/YnI0XERFhLF261Kkxly5daveY4emtEgAAAAA4Rm5kH7lR0ciN7CM38g5yI3Ije8iNgPKPRUUAAJRRZTEc2rBhg1GhQgWbOdWuXds4evSo0/1MmzbN7mvr0KGDkZub6+IrusDe+1WvXj3jjTfesG7/4x//MI4fP+6wj3Pnzhk33HCD0+HQ4MGD7b6OJ554wul5//nnn0ZsbKxNH2FhYcbevXtdeQsMwzCMoUOH2p3To48+6lI/hw8ftvtlz91wqEmTJi59iS4sLDQGDhxoM76/v7/x22+/ufRaDMP+ZzV69GiX+sjMzDTat2/vdDhkj6NLW0+bNs2lfnzFF+FQYWGhKez4+yMwMNDIzMwstg977/uHH37o0jx27NhhREZG2vSTkJDg8mt67bXX7L6eYcOGudzXpSZPnmzqc9GiRcW2iYuLs5nL8uXLXRp3z549NpeuJxwCAAAASg65kWvIjS4gN7KP3Mg7yI3IjRwhNwLKNz8BAAA4ITMzU3379lV2drapHhgYqK+//lo1atRwuq/+/fvr3//+t0197dq1Gj9+vMdzvSg9Pd16ieH+/fvrq6++UkxMjMP9K1eurPfff9+pvpcsWaJ3333Xpn7DDTfogw8+cHqOtWrV0v/+9z/5+Zn/WZaenq4nnnjC6X4kaenSpfrvf/9rU2/Xrp1Lc5KkOnXqaNasWS61ccRisWj69OmqUqWKS20mTZqkNm3amOoFBQV6+eWXXRo/Pz9fn3/+uakWHBys4cOHu9RPSEiIZsyYoeDgYJfawTMWi0VNmjSx+1xeXp727dvncp/du3fXwIEDXWrTpEkTTZ8+3aY+f/58bd682aW++vfvr4AA2ztRf/bZZ6ZLuLtj8uTJ1j/XqVNHt99+e5H7//777zbzb9u2rW655RaXxr366qvtHhMBAAAAXBnIjczIjZxHbgRPkBu5htwIgDtYVAQAQDkzbtw4WSwWtx/uevnll+3eW75///664YYb3OovIiLCpv7GG29o165d7kzRRlpamtLT03XddddpypQpTr3+Nm3aKDo6ush9cnJy9OSTT9rULRaLPvjgA5ugpzgtW7bUQw89ZFNfvny5li5d6lQfOTk5Dr/s/ve//1VgYKBLc5Kkzp07q3v37i63u1SXLl1sQh5nBAQE2A0LFyxY4NLPyM8//6y0tDRTrXHjxgoLC3N5TrGxsbr77rtdbgfP1KxZ0+Fzhw4dcrm/kSNHujWPnj17qm3btjb1N954w6V+qlevrp49e9rUT548qa+//tqtuUnS5s2b9dtvv1m3H3nkkWKPR4sWLbKpxcfHuzX+PffcU+RnBQAAAKDkkRs5j9zoAnIjcqPyjtzIOeRGANzFoiIAAFCslJQUu2cS+Pv764UXXnCrz/DwcD399NM29by8PI0dO9atPh1599137Z7d4UhsbGyRz0+ZMsXuF9KOHTu6FZRJchjsTJw40an2jubUtWtXu19mnTVgwAC32170z3/+0+22t956q6pWrWqqGYahJUuWON2HvTOSQkJC3J7TzTff7HZbuKeoM1qPHDniUl81a9bUjTfe6PZc+vXrZ1P77rvvVFhY6FI/jz76qN36388Yc9XHH39s/bO/v78eeeSRYtt4+/fjpptucrstAAAAgPKJ3MiM3Mg15EbwFLmRc8iNALiLRUUAAKBYn3zyiTIzM23qbdu2Vf369d3u97777rNbnzdvno4ePep2v3/XuHFjl7+srF27VllZWcrKytKUKVNsnn/vvffstnv44YfdmqMktWrVSldffbVNfdWqVUpOTi62/aRJk+zW77nnHrfnJEm33367W18Uu3btqn79+qlfv34uXw737ywWi9q3b29TX7NmjdN9pKSk2NT27t0rwzDcmlNsbKyqVKlifaDkVaxY0eFzGRkZxbaPiYmx/jwOGTLEo7l06NDBpnbmzBlt2bLFpX5uvfVW1atXz6a+evVq7dmzx+V5ZWRkmC49f/vtt6t27drFtrP3+5GUlOTy+Bddc8011t+N8PBwt/sBAAAAUH6QG5mRGxWP3AjeRG5UPHIjAJ5gUREAACjWpfcVv8jTSxw3atRIV111lU29oKBAX3zxhUd9X+ROOBIUFKQKFSqoQoUKNpd/TkxMdPjF6Y477nBrjhfZOwvGMAx98803RbZbt26d3S+TFovF4zkFBwcrLi7O5XYvvviivvjiC33xxRdq0KCBR3Ow9wV3/fr1TrcPCgqyqZ08ebLY99WRLl266OTJk9YHSl5RAaW94PpSTZo0sf48Dh8+3KO5OApcXPmZlCQ/Pz+7Z4QZhqFPPvnE5XnNnTtX6enp1u3HHnvMqXb2fj+WLl3q8pl8F40ZM8b6u7FgwQK3+gAAAABQvpAb/YXcyDnkRvAmcqPikRsB8ASLigAAKGfGjBkjwzDcfrhq37592r17t93nOnXq5OnLcXg2mCuXKS5Ku3btvNLPRYsXL7Zbb9Kkic3lll3VrFkzu/XVq1e7NaeGDRuqevXqHs1Jkv7zn/9owoQJmjBhgh566CGP+3NVZGSkTc2VUMbRWZGPP/64Nm3a5Pa8UHqKukR0cfd+9zZ7P4+SlJqa6nJfDz/8sPz9/W3q06dPV15enkt9/f3y17Vr13Y6GLb3+5GVlaV//OMfhJ8AAABAOURu5BpyI3IjcqPyj9yoeORGADzBoiIAAFCktWvXOnyucePGHvffqFEju/V169YpPz/f4/6vv/56j/v4ux9//NFuvXXr1h73be8y1lLxZ7I4mlPDhg09npMkdevWTcOHD9fw4cPVq1cvr/TpCntf/vPz801n1xSlY8eOCggIsKmfPHlS7dq108iRI5WWlubxPFFysrOzHT5X1CWuS4KjMOr06dMu91WrVi27IU5KSormz5/vdD9bt25VYmKiddtR6GSPo8vMJyYmqnnz5vrkk09cDqoAAAAAXDnIjczIjciNUPrIjYpGbgTAUywqAgAARdq8ebPdepUqVRQVFeVx/44CjJycHO3YscOjvgMDAxUTE+NRH5dy9H54eqlmSQoLC7NbP3LkSJFBiKN7cnsrHCpJ+fn5SktLM10W+tKHo8sUnzlzxqkxqlSpon79+tl9LicnR6+99ppq166tAQMGaM2aNUWe3QTfOHfunMPnKleu7NWxMjMzderUqSJ/Ju1x9ufxUo8++qjd+t/PICvO3/f18/PTgAEDnG7bvXt3h8evY8eO6dFHH1W9evU0evRo7dy50+l+AQAAAFwZyI3MyI28i9wIziA3Khq5EQBP2S69BQAA+Jv9+/fbrTu6P7Sr6tSpU+TYzZs3d7vv0NBQt9vac+rUKZ09e9buc7GxsR73X9R8Dx06pOuuu86mfvr0aYdz8tZn5A35+flatmyZ1q5dq82bN+uPP/7QqVOnnLqvuSOuhDhvvvmmfvjhBx07dszu85mZmZo6daqmTp2qmjVrqlevXkpISNBNN92kwMBAt+cI7zh69KjD59z9OU9OTtaCBQu0ceNGbd68WYcOHVJ6errbZ7q6GyrecccdqlWrls296FesWKH9+/cXGzxnZWVp5syZ1u1u3boVeVy9lL+/v6ZOnaouXbqooKDA7j7Hjh3Tyy+/rJdfflnXXXedEhISlJCQoJYtWzo9DgAAAIDLE7nRX8iN3EduBE+QGzlGbgTAG7hSEQAAKJKjL9OVKlXySv9F9VPUF0JnePtMFEfvhSQ99NBDslgsHj3i4+Md9n/8+HGX5+Stz8gTaWlpevbZZ62X633ttde0dOlSJScnexQMuSo6OlrLli1TrVq1it336NGj+uijj3TbbbepWrVquu+++7RgwQIu5etDf/75p8PnXA1mlyxZoptvvln16tXToEGDNG3aNG3atEmnT5/2yqXzXeXv76+HH37Ypm4YhqZMmVJs+7lz55rOdnvsscdcnkPnzp01a9YsBQUFFbvvtm3b9NJLL6lVq1aqV6+eBg8erHXr1rk8JgAAAIDLA7nRX8iNXEduBG8gN3KM3AiAN7CoCACAK4xhGNbH6tWri93//PnzduulEQ45GttZFovFo/aX8nQ+nnB0Geui5uTrcGjGjBlq2LChJk6cqJSUFJ/ORZKaNm2qTZs2qU+fPk63OXv2rGbPnq2EhATVrFlTI0aMcBjUoWQUFBRo165ddp+rUKGC05eQP378uBISEtS9e3etWrVKhmF4c5oeeeSRR+TnZ/vVbPr06cUGVn8PkGrWrKkePXq4NYc+ffooMTFRLVq0cLrN4cOHNWnSJLVr106NGjXS+++/r+zsbLfGBwAAAFA2kBu5j9zINeRG8AZyI3IjACWPRUUAAKBIOTk5dusVKlTwSv9F9eNobF/Jzc312diOvnAV9R556zNyx2uvvaYHHnjA7n3Ea9asqZEjR2rp0qU6fPiwzp07ZwotL32MGTPGa/OqVq2a5s6dq8TERPXq1Uv+/v5Otz158qQmTJig+vXr6z//+Y9Pfx6uJDt37nT4c96qVSunLjOenJysDh06aMGCBTbP+fn56fbbb9fUqVO1ZcsWpaamKi8vr8ifSW+rV6+eunbtalM/fvy4vv32W4fttm/frl9++cW6/fDDD7v0M32puLg4/fbbb5o7d65at27tUtukpCQNGjRI9evX15w5c9yeAwAAAIDyhdzoL+RGziM3greQG5EbASh5Ab6eAAAAKNuCg4Pt1r11VkFR/Tga21eKms+0adPcPtPDGaGhoXbrRc3JV2d+fPrppxo5cqRN3WKxaOTIkRozZozP7zffpk0bzZ8/X8ePH9fMmTM1c+ZMbdq0yam22dnZeumll7Ro0SItXrxYNWrUKOHZXtlWrlzp8LkOHToU2z47O1tdunTR3r17bZ5r0qSJZs6c6dJZViXlscce0/fff29Tnzx5su6++267bSZPnmz9s5+fnx555BGP5+Hn56c+ffqoT58++uOPP/T5559rzpw5Sk5Odqr98ePHde+992rJkiWaOnWqz3/XAQAAAJQscqO/kBs5h9wI3kRuRG4EoOSxqAgAABTJ0f3lMzIyvNJ/Uf04CkR8xdF7IV34QlW1atVSnM0FRc3JW5+RK06cOKHhw4fbfe61117Tc889V8ozKlr16tU1bNgwDRs2TElJSZo9e7Zmz56t3bt3F9t206ZN6tixo3777TeFh4eXwmyvTEuXLnX4nKPQ5O/Gjx+vpKQkm3q9evW0atUqxcTEeDQ/b7nzzjsVExOjEydOmOrLli3TwYMHFRsba6pnZ2drxowZ1u2uXbva7OOpZs2a6c0339Qbb7yhn376SbNnz9a8efOUmppabNsZM2YoKytLX331lVfnBAAAAKBsITf6C7lR8ciN4G3kRuRGAEoetz8DAABFqlmzpt26t+4TX1SA4WhsXylqPt56P1xV1Jx8EQ59+OGHSktLs6k3a9ZMI0aMKPX5uKJhw4YaM2aMdu3apd9++03PPPOMIiMji2yzd+9eDRo0qJRmeOU5fvy4li1bZve5+vXr64YbbiiyfUZGht555x27z02YMKHMBEOSFBgYqIceesimXlhYqE8++cSm/tVXX5l+1x577LESm5vFYlGHDh304Ycf6ujRo1q8eLH69OlT7Nlk8+bN06efflpi8wIAAADge+RGfyE3Kh65EbyJ3IjcCEDpYFERAAAoUoMGDezW//zzT6/0f/jwYZfH9pXIyEhFRUXZfe7MmTOlO5n/X2RkpMMAw1ufkSvmzZtnt/7444/LYrGU8mzc16pVK7399tv6888/9d577xUZIsyaNUv79+8vxdldOT755BMVFBTYfe7pp58utv2SJUuUlZVlU69WrZp69+7t8fy8bcCAAXZ/T6ZNm6b8/HxT7e+XsK5evbp69uxZ4vOTpICAAN1xxx2aO3euDh06pKFDhxZ5Of2XX35ZhmGUytwAAAAAlD5yo7+QGxWP3AjeRG50AbkRgJLGoiIAAFAkR/eMPn36tE6ePOlx//YuLytJFSpUUJMmTTzu39vi4uLs1nft2lW6E/kbR3Ny9N6WlDNnzmjHjh12n+vUqVOpzsVbKlasqH//+9/avXu3evXqZXefgoICLVy4sJRndvk7d+6c3n77bbvPRUVFacCAAcX28fPPP9utd+jQoUyGlVdddZVuuukmm/rFs7wu2rVrl3766Sfr9kMPPaSAgNK/s3WNGjX01ltvafPmzWrcuLHdfQ4cOKCtW7eW8swAAAAAlBZyIzNyI8fIjeBN5EZ/ITcCUNJYVAQAAIrUoUMHh885c//w4jjqo127dj75slMcR+/H9u3bS3kmf3E0J298Pq44duyYw+fq1KlTijPxvvDwcH311VcOL5v822+/lfKMLn+jR4/W6dOn7T73yiuvqHLlysX24ehnsiz/PDq6HPXfzzD7+58tFoseffTREp9XURo3bqwffvhBoaGhdp/n9wMAAAC4fJEbmZEbOUZuBG8iNzIjNwJQklhUBAAAilS/fn01bdrU7nOrVq3yuH9HfXTv3t3jvkvCnXfeabe+fft2nT9/3itjJCQkKCAgwPro3LmzW3Pas2dPkYGNszp27KjY2Fjr49lnn7W739/v030pZ77IO5KRkeF2W0maP3++xo4dq7Fjx+rrr792u5/AwECNHj3a7nPeOPsSf/nxxx/13nvv2X0uPj7e6fvAO/qZ9OXPY3HuuusuVa1a1ab+3XffKTk5WTk5Ofr888+t9S5duqh+/fpuj/fOO+9Yfz82btzodj916tRxGFLx+wEAAABcvsiNzMiNyI3s4Xuxd5EbkRsBKF0sKgIAAMV64IEH7Nb/fllVd+zatcvuPcUDAgLUr18/j/ouKa1atbIbluXk5GjRokUe95+ZmakffvhBBQUF1kdx97xu3bq1rr32Wpu6YRhaunSpR/NJTk7W2rVrdejQIeujVatWdveNjIx02E9RwVFxTpw44XZb6UI4NG7cOI0bN07Tpk3zqK//9//+n916hQoVnGrv6CxK7h3+l0OHDql3794qLCy0eS4qKkpz586Vn59zX2Mc/Uz68uexOEFBQXaPuYWFhfrkk0/0v//9T6dOnbLWnQ3KHHnnnXesvx+eBv6e/n4AAAAAKJ/Ijf5CbkRuZA+5kfeQG5EbASh9LCoCAADFeuSRR+yeobF+/Xrt3bvX7X5nzZplt96nTx9Vr17d7X5L2uDBg+3WZ8+e7XHf06dPV1ZWlnXbz89Pd999d7Htnn76abv1OXPmeDSfS19TUFCQbrvtNrv7xsTEOOxn586dbs/hl19+cbvtpQ4cOOBR+0qVKtmt16hRw6n2js50ys3NLbLd66+/ri5dulgf3vhZK4sOHDigzp07KzU11ea54OBgzZ07V/Xq1XO6P0fHkbLy8+iIozO3Pv30U/3f//2fdTsmJka9evXy2ri+/v0AAAAAUD6RG5mRG5EbXYrcyDvIjS4gNwJQ2lhUBAAAilWlShUNHTrUpl5YWKhXXnnFrT7Pnj2rSZMm2dQDAwM1ZswYt/osLQ899JCuueYam/rChQuVmJjodr+ZmZmaMGGCqdanTx/FxsYW2/bhhx/WVVddZVNftmyZ1q9f79Z8cnNzTV9EJen+++93eBZPVFSUmjRpYvc5d89O/O2337Rv3z632tqzfft2HTlyxO32hw8ftltv0aKFU+3DwsLs1ou7BPq6deu0YsUK68PRPePLs+XLlys+Pl4HDx60eS4wMFBffvmlunTp4lKfjs6A+uWXX3TmzBk3ZinNnTvXrXauaNy4sTp06GBT//PPP7V27Vrrdv/+/RUYGOi1cX/44QeP2nv6+wEAAACgfCI3MiM3Ije6FLmR58iN/kJuBKC0sagIAAA45YUXXrAbiHzxxRduBSKjRo3S2bNnbeojR45Uw4YN3ZpjaQkICNDkyZNlsVhMdcMwNGjQIOXk5LjV73PPPWf6Yuzn56dRo0Y51TYwMFAff/yx3eeGDh2qvLw8l+czYcIE0xkofn5+Gj58eJFtevToYbf+8ccfmy696wzDMPTiiy+61MYZb731lttt58+fb1MLCAjQXXfd5VT7unXrKjg42KZ+7NixIttdGpCV5TMyXZWamqrHHntMt912m90zzapUqaJly5YVezl3e2677Ta74UlOTo5bPwc///yzVy5X74ziLk9tsVgcnpnmrj179ujbb791u72934/rr7/e7t8dAAAAAC4v5EZ/ITdyjNyoaORGtsiN7CM3AlCaWFQEAACcUqFCBc2dO1cVK1Y01fPz83X33Xfr6NGjTvc1ffp0vf/++zb1Tp06OR2G+Frnzp3tBhcbNmzQPffco4KCApf6e/PNN23ek1GjRqlp06ZO93HLLbfo+eeft6n//PPPGjRokEvzWbx4scaOHWuqPfvss2rcuHGR7Z5++mmFhITY1M+ePav77rtP+fn5Ts9h/PjxHp8BY8/777/vVr+HDx/Wa6+9ZlN/6KGHVLVqVaf6CAgI0HXXXWdT37p1q8M2R44c0fbt2021+Ph4p8YryzZs2KCBAwcqNjZWU6ZMUWFhoc0+HTp00IYNG9SpUye3xggPD9fjjz9u97nXX39dK1eudLqvI0eOqG/fvm7Nwx29e/d2eHanJN188812zzL11FNPPaXk5GSX2y1cuFBLliyxqY8YMcIb0wIAAABQxpEbmZEb2UduVDRyo7+QGxWN3AhAqTIAAIBPZWRkGKmpqTaPG2+80ZBk83j22Wft7p+ammqcOXOmxOf77bffGoGBgTbzqlGjhrF8+fIi2+bl5RkjR440/Pz8bNo3a9bMSE1NdWoOZ86ccer9qlOnjsP3Kjs72+P3orCw0Lj//vvtfk7t2rUz/vjjj2L7SElJMfr162fT/qabbjLy8/NdnlNBQYFx33332Z1TQkKCcfLkySLb5+XlGRMnTjSCgoJMbVu2bGnk5OQ4NYexY8faHV+S0blzZ+PPP/8ssv358+eNJ5980tomPDzcbl8bN260+VwLCgps+nvwwQdt2oaEhBj//e9/nX6PV69ebdStW9emn5o1axppaWlO9XHRSy+9ZNNPcHCwkZycbHf//v37m/aNi4tzaTx3nD9/3ivHpRMnThgHDhwwtmzZYixatMh45513jH79+hl16tRx+DMiyahatarx/vvvG4WFhR6/lpSUFKNatWp2xwkJCTGmTp1a7Dhr1641atWqVeTP4z333GPz+s+dO+fR3AcNGuTwPZo7d65HfV9Ur149m75r165tLFmyxKn2BQUFxscff2wEBwfb9HPHHXd4ZY4AAADAlYzciNyI3MiM3MiM3Mgz5EZFIzcCYBiGwaIiAAB8bMyYMUV+SXLl0alTp1KZ8+LFi42wsDCb8S0Wi9GxY0djypQpxrp164zDhw8bu3fvNlasWGG8+OKLdr9cSzLatm1rnDhxwunxO3Xq5PF7NW3aNK+8FwUFBaYg4++PgIAAo1u3bsaUKVOMX375xdi/f7/x559/Glu2bDG++OILo3///kaFChVs2t16663G+fPn3Z5Tfn6+8fjjj9udU1hYmPHII48Y3377rbFlyxbj6NGjxu7du40ffvjBGD16tN3P6Prrry82VPq7wsJCo2/fvg7f++DgYOPhhx82vvrqK2P79u3G0aNHjT179hjLly83nnvuOSM6Otq67913322MGjXK6c/1wIEDNvOxFw5dfNSoUcMYOnSo8dVXXxlbt241Dh8+bBw7dszYs2ePsXLlSuOtt94y2rdvb7dtrVq1jF27drn8+SQnJxsVK1a06a9Ro0bGl19+aRw4cMD4888/jRUrVhh33323zX6zZs1yeUxXDR482GvHJVcetWvXNl555RWPQ5VLrV+/3qhUqZLDcZs1a2a8+eabxs8//2x9/zdv3mx88sknRrdu3az7Va5c2fjxxx+dfj0PPvigR/Petm2b3X6rVavmdFhbHHvh0MVH8+bNjZdeeslYunSpsXPnTuPIkSPGkSNHjJ07dxoLFy40Ro0aZVxzzTV223bq1MnrnyMAAABwJSI3Ije6tB25EbkRuRG5kWGQGwEoPSwqAgDAx8pjOGQYhrFr1y6jXbt2Hs3X39/fGDZsmMtfcspSOHTRjBkzjKioKI/mFBAQYDz77LNe+9I3ffp0IzIy0qM59e7d2zh16pTLY+fk5BhPPfWU2+NaLBZjyJAhRn5+vku/I/bCoTVr1hjdunWze6aju49bbrnF7ljO+vDDD90a9/7773d7TFeUZjhUo0YNo3///sbixYvdOsvSWb/88kuRQUhxj/r16xsbN240DMNwuo2n4ZBhGEbbtm1t+h0+fLjH/V40depU4/rrr/fa5+nn52cMHDjQyMzM9NocAQAAgCsZuRG50cUHudFfD3Ij+w9yI/eRG9lHbgTAMFhUBACAz5XXcMgwLpxtNXPmTKN58+YuzTMwMNDo27evsW3bNrfGLYvhkGEYRmpqqvH888+7HMgEBwcb99xzj9vvR1FSUlKMESNGGBERES7NqVWrVsa8efM8Hn/p0qVGfHy8S2PHx8cbq1evtvbhaTh00cGDB42XXnrJiI+Pdzsoatu2rTFnzhyP3xfDMIxPP/3U7pmbjn5nRo0aVaLhyd95MxwKDg42wsPDjVq1ahmtW7c2evXqZQwbNsz4/PPPje3bt5fK67nozJkzxpAhQ4zQ0FCn51+pUiXjueeeM5095Wxbb4RDn376qU2/SUlJHvd7qcTERGPw4MEOzyBz5mc0ISHBWL9+vdfnBgAAAFzJyI1cR27kPHKjv5AbOY/c6K8HuZHzx3VyI6D8shiGYQgAAMBDmzdv1pIlS/Trr78qKSlJx48fV0ZGhgICAhQaGqp69eqpWbNm6tixo3r27KmqVav6esolJjc3V99//71WrFihjRs3av/+/UpLS1NOTo4qV66siIgI1axZUy1btlSbNm105513KjIyskTnlJOTo++++04rVqzQpk2brHPKzc1VWFiYoqKidM011+jGG2/UbbfdpjZt2nh1/A0bNui7777Tzz//rD179uj06dM6d+6cgoODVa1aNevYvXr1UsuWLb06tj2nTp3SqlWrtHXrVm3fvl1JSUlKS0tTenq6MjMzVbFiRYWHh6tatWpq3ry5WrZsqW7duqlhw4Zence5c+f0xRdfaNWqVdq8ebNOnjxpfV+ioqLUrFkzdejQQf3791eNGjW8OvaVLD09XfPnz9fatWu1YcMGpaSkKC0tTfn5+QoNDVXdunUVFxenW265RQkJCQoNDfXZXM+ePavIyEhd/NrWuXNnrVq1qkTH3Ldvn9auXatt27Zp+/btOnDggM6ePav09HTl5eUpNDRU4eHhqlWrluLi4tS6dWv16NHjsj6uAwAAAPAMudFfyI1skRvZR27kG+RGRSM3Aq48LCoCAAAAgDJqwYIFSkhIsG7PmjVL9957r+8mBAAAAAAAgDKB3AhAafDz9QQAAAAAAPZNnTrV+ucqVaro7rvv9uFsAAAAAAAAUFaQGwEoDSwqAgAAAIAy6Pjx41q6dKl1+8EHH1RwcLAPZwQAAAAAAICygNwIQGlhUREAAAAAlEGfffaZ8vPzrduPPfaYD2cDAAAAAACAsoLcCEBpYVERAAAAAJRB06ZNs/65Y8eOatSokQ9nAwAAAAAAgLKC3AhAaQnw9QQAAAAA4EqQlZWlnJwcSVLlypUVEOD469iaNWu0e/du6zZnmwEAAAAAAFy+yI0AlFVcqQgAAAAASsELL7ygyMhIRUZGaubMmUXu++6771r/XK1aNfXu3bukpwcAAAAAAAAfITcCUFaxqAgAAAAAStn69esdPrdx40bNnz/fuj1w4EAFBweXwqwAAAAAAADga+RGAMoSFhUBAAAAQCmbM2eOTp48aVPPyMjQww8/LMMwJEkRERF6+umnS3t6AAAAAAAA8BFyIwBlCYuKAAAAAKCUnT59WvHx8Zo9e7Z2796tpKQkzZ49Wy1atNCWLVus+40ZM0ZVqlTx4UwBAAAAAABQmsiNAJQlAb6eAAAAAABcifbv36/77rvP4fPdu3fX4MGDS3FGAAAAAAAAKAvIjTibM+0AAAJmSURBVACUFVypCAAAAADKmHvuuUfz5s2TxWLx9VQAAAAAAABQhpAbAShNFuPiTRcBAAAAACXm+PHjWr16tX799Vf9/vvvOn78uE6dOqVz586pUqVKqlevntq1a6cHHnhA/+///T9fTxcAAAAAAAClhNwIQFnFoiIAAAAAAAAAAAAAAAAAJtz+DAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIAJi4oAAAAAAAAAAAAAAAAAmLCoCAAAAAAAAAAAAAAAAIDJ/wfGzouTtfXp1AAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAB38AAAEjCAYAAAAyvcuoAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8hTgPZAAAACXBIWXMAAC4jAAAuIwF4pT92AAA8yUlEQVR4nO3de7xVc/4/8HedTh3dji5Eyi3kXi4liiaiy5gMowtCxjCML+N+p3zNUMptYpihieH3dYlcasYwkxKVTFSkkZSUpHRxEjrVaf/+8NA49t517tXq+Xw89uPR+ay93uu9z+Gss/drrc+nWiqVSgUAAAAAAAAAW7Xqm7sBAAAAAAAAAMpP+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkAA1NncDAMDmsW7dupg8eXJMmDAh3nnnnZgzZ058+umnsWrVqvj222+jVq1aUadOndhhhx2iRYsWse+++0aHDh2iQ4cO0bBhw83dPgAAABUglUrF22+/Hf/+97/j7bffjhkzZsTy5cujoKAgCgoKIiKiXr16Ubdu3WjUqFHsueee0aJFi9hzzz3joIMOitatW0deXt5mfhUAwPeqpVKp1OZuAgCoOtOmTYsHH3wwnn322Vi6dGmp98/JyYnjjjsu+vbtG3369Inc3NxK6BIASmf9+vWxfPnyctfJzc2N/Pz8Uu+3atWqWL16dZUeEwDKY9GiRTFs2LB45JFHYs6cOWWuU6NGjdh///3j8MMPj44dO8axxx4bzZo1K3O9p556KgYMGBBz586NAw44IAYPHhzHHXdcmesBwLZG+AsA24jp06fH1VdfHa+88kqF1WzevHlcf/31cf7550f16laTAGDzmTdvXuyxxx7lrtOxY8cYN25cqffr169fPProo1V6TAAoi6Kiohg6dGjcdNNNsWrVqko5RsuWLaNr165xzz33lGq/ESNGRK9evYqN5ebmxvjx46Ndu3YV2CEAJJdpnwEg4QoLC+Paa6+NoUOHRlFRUdr2li1bxgknnBBHHHFE7L333tG4ceOoU6dOrF69OgoKCmLOnDkxffr0+Ne//hUTJ06MH143tmDBgrjwwgtj2LBh8fDDD0erVq2q8qUBAABQCkuWLImTTjop3nzzzbRtOTk5cfzxx8exxx4brVu3jqZNm0b9+vUjJycnVq5cGQUFBfGf//wnpk+fHn/7299i9uzZWY8za9as+PTTT0sd/g4YMCBtbO3atfG73/0uRo8eXapaALCtEv4CQILNnTs3fvGLX8S0adPStp144olx/fXXx5FHHrnRGgcffHCcfPLJMWDAgPj444/j3nvvjQceeCDWrFmz4TlTpkyJdu3axYMPPhhnn312Rb8MAIBSmzZtWjz//PNp4/369Yvdd9+9yvsB2NyWLVsWxxxzTMyaNStt25lnnhkDBw6Mpk2bZtz3+/EjjjgiIiLuvvvumDBhQlxxxRUxefLkCusxW6A8d+7cCjsGACSdaZ8BIKGmT58eXbt2jc8//7zY+A477BCPPPJIdO/evcy1Z82aFaeddlpMnTo1bVv//v0zXq0NAJvDuHHjolOnTmnju+22W8ybN6/Sj/+Tn/wkXnvttQ1fewtedR555JE455xz0sbHjh0bP/nJT6q+IYDNKJVKRdeuXdOWAapevXoMGzYs+vXrV6a669evjxtuuCEGDhyYtq1OnTqlnlZ6v/32iw8++CBt/OSTT46RI0eWqUcA2NZYnA8AEmjmzJnRqVOntOB3r732iilTppQr+I34bqroSZMmxYknnpi27ZZbbombbrqpXPUBAACoOE888URa8BsRcdNNN5U5+I34Ljy+/fbb4/zzzy9Hd/914403po3l5OTEddddVyH1AWBbIPwFgIRZuHBhdO3aNVasWFFsfKeddoqxY8fGrrvuWiHHqVWrVowcOTLjnTO/+93vYvjw4RVyHAAAAMrntttuSxvbaaed4tprr62Q+oMHD44mTZqUu84ZZ5wRjz32WOyzzz6Rm5sbBx98cLz44ovRpk2bCugSALYNwl8ASJCioqLo3bt3LFiwoNh4tWrV4umnn45mzZpV6PFyc3NjxIgRGd/kX3DBBTFlypQKPR4AAAClM3Xq1Hj//ffTxn/+859HXl5ehRyjfv36cfnll1dIrb59+8asWbNizZo1MX369HLPXAUA2xrhLwAkyK233hoTJkxIG7/gggvi6KOPrpRjNm7cOO6999608TVr1kS/fv2isLCwUo4LAADApo0dOzbjeLt27Sr0OL17967QegBA2Qh/ASAhPvzww4xTedWuXTsGDBhQqcfu3bt3HHrooWnj77//fgwZMqRSjw0AAEB2M2bMyDi+4447Vuhxdtttt9h3330rtCYAUHrCXwBIiCuvvDLWrl2bNn7OOedU+Jv6TLKtFTV48OBYvnx5pR8fAACAdIsXL844nkqlKvxYBx10UIXXBABKp8bmbgAAKL/p06fHqFGjMm4799xzq6SHk046KRo1ahTLli0rNl5QUBBDhw6N/v37V0kfAAAA/Fe2pXgWLVpU4ce65JJL4qijjoqIiJo1a1Z4fQBg09z5CwAJcPfdd2cc33vvveOQQw6pkh5q1qwZP//5zzNue+ihh6KoqKhK+gAAAOC/6tevn3H8jTfeqPBjdejQIS699NK49NJL4ze/+U2F1wcANs2dvwCwlfvmm2/i6aefzritW7duVdpL9+7dY9iwYWnjCxcujJdffjm6d+9epf1sKb788st44YUX4rXXXosZM2bE0qVLY/Xq1dGwYcPYYYcdolmzZtG6des49NBD47DDDsv64cwPLV26NJ5//vn497//He+8804sXrw4CgoK4ptvvon69etHgwYNYvfdd49WrVrFIYccEscee2w0bdq0Cl4tAFuCjz76KJ544omYOHFizJ07N1auXBn5+fnRuHHj2H///eOEE06ILl26RL169Sq1j3Xr1sWYMWPin//8Z0yZMiXmzJkTy5cvj8LCwthuu+2iYcOGsccee8Shhx4aP/nJT6JLly5Rq1atSu0JYFvToEGDjOPPPvts3HXXXVm3b4s+++yzeOyxx2LcuHExZ86c+Oqrr6J+/frRrFmzaNu2bZx66qlx2GGHlbjea6+9FiNHjoy33norPv/881i3bt2G94DHHXdcnHTSSbH77ruXqselS5eW8lV9JycnZ5M/6xUrVpTqwu38/PzIzc0t1X4NGzaM6tWL35M2b968eO6552LcuHExc+bMWLJkSXz99deRl5cXO++8cxx00EFx/PHHR69evaJRo0Yl7g9gm5UCALZqzzzzTCoiMj6efvrpKu1l8eLFWXu58MIL056/YsWKrM//8ePss88uUQ9nnHFGierttttuJao3derUEvfYsWPHYvsWFBSkrrzyytR2221X4hr9+/ffaD+zZs1K9ezZM1WzZs0S1/z+ccghh6Ruuumm1Icfflii1w6QBGPHji3XeaC8OnbsWOy4JVGec8+CBQtSvXv3LtG+jRs3Tg0dOjS1bt26Cn/dq1atSt1+++2pJk2alOpcVb9+/dSVV16ZWrx4camON3z48FKfF0v7vQXYWg0aNCjr77pzzjlns/a22267lfj38scff7zJevn5+SWu90Nff/116oorrkjl5uZucr8OHTqk3n333Y32MXny5NShhx66yVo5OTmpiy66KLV8+fISf8/Kel4ryd8+pfl5RERq7Nixpd7vhz/HhQsXpvr06ZPKyckp0b61atVKXXLJJaX6fgFsi0z7DABbub///e9Zt7Vp06YKO4nYcccdY9ddd8247aWXXqrSXja3d999Nw499NAYMmRIfPvttxVSc/DgwXHwwQfHiBEjYs2aNaXef+rUqXHrrbfGPvvsE8ccc0y8/PLLFdIXAFuG8ePHx6GHHhpPPfVUiZ6/dOnSuPjii+P000+PtWvXVlgfY8aMiQMOOCCuu+66WLx4can2XblyZQwZMiT23XffeOyxxyqsJ4Bt2fdr8GYyfPjwuOaaa7bpZXoWLlwY7du3jzvvvLNE58M33ngj2rRpE88++2zG7X/605+iffv28c4772yyVlFRUdx///3RoUOHWLJkSal735p9//fCk08+WeL//goLC+MPf/hDHHzwwTFhwoRK7hBg6yX8BYCt3JtvvplxvE6dOqWePqoi7L///hnH582bt828mZ0xY0Yce+yxMWfOnAqred1118XVV18dhYWFG8aqVasWXbt2jT/+8Y8xYcKEmDNnTixatChmz54d48aNi4EDB8aBBx6Ysd7rr78eTzzxRIX1B8DmNXny5OjevXt88cUXpd736aefrrB1Ge+9997o0qVLfPLJJ8XGa9WqFaeffno8++yz8e6778bChQtj2rRp8eyzz8ZZZ50VderUKfb8FStWxFlnnRVXXHFFhfQFsC1r27Zt7Ljjjlm333HHHdGuXbsYP358FXa1ZVi2bFl07tw5pk2bVqr9CgsLo3fv3jF27Nhi4w899FBccMEFsW7dulLVmzlzZnTp0qXU+22txo0bF927d48vv/wyIiI6deoUDzzwQLz55pvxySefxKxZs+LVV1+N66+/Ppo3b562/6effhqdO3eO0aNHV3HnAFuJzX3rMQBQdl999VWqevXqGadDOvDAAzdLTxdddFHWKZr+9re/FXvu+vXrU1988UXaI9O+JZ32+auvvkqrd9RRR5VpyqtUKpVat25dxh6bN2+ecXrIgoKCVIsWLTaMNWnSJHXJJZekXnrppdT777+f+uyzz1Lvvvtu6u677041bdo0rUamaZ+ffPLJtOc1b9489dZbb5XoNTz22GOp+vXrl/l7CrA12xqnfS7tuWfZsmWpZs2abRhr37596oEHHki99dZbqQULFqQ+/PDD1JgxY1JXX3111ukwq1Wrlho/fny5XuvgwYMz1j788MNTM2fO3Oi+8+bNS3Xu3Dnj/lddddUmj7169eq079fQoUMz1nv++eczfn9//Pjyyy/L9f0A2JLcdNNNJZpWt3379qknn3wyVVhYWCV9LV++PO3371VXXbXJ6YKzWbZsWVq9Pn36ZKyXSqVSP/vZzzZ83aZNm9T999+f+ve//5369NNPU7NmzUqNHj06de6552adlninnXZKrVq1KpVKfTfVc40aNVIRkapZs2aqb9++qRdeeCH1/vvvpxYuXJiaNm1a6sEHH0wddthhWb//Q4YMKdX3b968eRk/E2jWrFmFLOswbty4YnXvuuuuEu/bv3//jK9xwoQJqUaNGm3o8x//+MdG6xQWFqYuueSSjLXy8vJSkyZNKu/LBEgc4S8AbMWmTZuW9U1jt27dNktPG1tP6p577ilRjfKEv5n8+IP3ivjQP9OaRh07dkxdeOGFG76+8MILUwUFBVlrLF68OLX77rtvNPz9+uuv00LiWrVqbfJD9B+bNGlSqnbt2sJfYJuzNYa/2WQ795x77rmpiEjVrVs39de//nWjNRYtWpR1DcLOnTuXubeRI0emqlWrllazTZs2qZUrV5aoxpo1a1Ldu3fP2NvTTz9d6p6yrQP8/fqEANuSZcuWpXbZZZcSBcARkWrQoEHqvPPOS7300ktVFgR/L1toWJLwN5Ozzz47Y73/9//+XyoiUjVq1Ej94Q9/SK1fvz5rjcmTJ2e9gGrQoEGpNWvWpPbff/9URKT22Wef1NSpU7PWKioqSl155ZUZa22//fal/n536dIlY61Ro0aVqk4mp59+erH3ocuWLSvxvtl+jm3btk1FfBecz507t8T1brvttoz1WrRokfrmm2/K8vIAEqtGACRYqnB1rFv82eZugy1UjSZNo1qtvM3dRrn8eErFH2rcuHEVdvJfjRo1yrptwYIFVdhJ1ZszZ0688cYbERFx8803xy233LLR5++4444xaNCg6N27d9bnvPTSS/HZZ8V/j5188smx3377laq3du3axU033RTXXXddqfYDYMv2/bmnRo0aMWLEiOjatetGn7/TTjvF6NGjY++9946vv/662LZXX301Pv/889hpp51K1cOiRYvinHPOiVQqVWy8Xr168fzzz0e9evVKVCc3NzeefvrpaNmyZSxcuLDYtosuuig6d+4cDRo0KFVvAHynYcOG8eijj0bXrl1LNLXwihUr4qGHHoqHHnoo6tWrF127do0ePXpE9+7do2HDhlXQceW7/vrrIyJi6NChccEFF2z0uW3bto2HHnooevXqlbZt+PDh0aBBg5g5c2Y0adIkXnnlldhtt92y1qpevXoMHjw4pk6dGmPGjCm27csvv4y///3v8fOf/7zEr+O8886Ll19+OW38z3/+c5x44oklrvNjy5cvL7au8S9+8YsK+dm/9dZbERHxzDPPxB577FHi/a677rp4880348UXXyw2PmfOnLjhhhvirrvuKndvAEkh/AUSbd3iz6LgrgGbuw22UPmXD4jcXffc3G2Uy6JFi7Ju21xvyDd23B+HmEnz6aefRkREt27dNhn8fq979+5RvXr1WL9+fcbtmdYwOuKII8rU3//8z/9E//79Y82aNWXaH4Atz/fnnquuumqTwe/3dt555/jlL38ZQ4cOLTa+fv36GDVqVJx33nml6uHyyy+PgoKCtPGbb745mjZtWqpaderUiYEDB8aZZ55ZbPyLL76IgQMHxqBBg0pVD4D/Ou6442LkyJHRq1evWL16dYn3++qrr2LEiBExYsSIyMnJiQ4dOkSPHj2iZ8+eGddj3Vp88skn0a1bt00Gv9879dRTY7/99ov//Oc/xcY/+OCDuPbaayMi4g9/+MNGg98fuummm9LC34iI5557rlThb48ePaJJkyaxePHiYuN///vfY+HChbHLLruUuNYPPfroo1FYWLjh6/PPP79MdTLp1atXtG/fvtT7DRo0KEaPHp32/vnee++NX//619GyZcuKahFgqyb8BYCt2I/v2PmhWrVqVWEnJTvuxvpNipycnLj33ntL/Py6detG48aNY8mSJRm3z5kzJ21su+22K1NvdevWjTZt2sSECRPKtD/JtaxwbaXV3r5mjcipVm2Tz/tyzboo+tFdgxWlfm6NyK2+6R5WrlkXayuph7o1cqJWTvVNPm/V2qIozHIxSHltl1M9atfIqZTabF75+fkb7l4qqR49eqSFvxER06dPL1Wd9957L5566qm08bp165b4w/Qf6927d/z2t7+N5cuXFxv/05/+FDfddFPUrVu3THWhNJYu/6rSajfIrxM5JTgnrCj4OoqKKueckF+vduTmbvqcUPDVt7F27abvEi2LunXyIq9W7iaf99XXq6Owkv5Wqb1dzai93eZ537S5/OxnP4vx48fHL3/5y5gxY0ap9y8qKorXXnstXnvttbjyyivjmGOOib59+0bfvn0jL2/rm1nrd7/7XYmfW61atejRo0da+Bvx3V2yBx98cPTs2bPE9Y4++uho2LBh2vlu6tSpJa4R8d3MGf369Uu7QKqoqCiGDRsWN998c6nqfe+hhx7a8O999tknOnbsWKY6mVx++eVl2m/fffeNn//85zFy5Mhi4+vXr48HH3ww7r777opoD2CrJ/wFgK3Yt99+m3VbzZo1q7CTkh13Y/0mxfHHHx977713qfaZP3/+hqkya9Qo/udZplD4ww8/LHN/LVu2jA8++CAiosTTcJJ8v59WeVOy39C6eTQqwQe7Q2d+FisKK+fD5d/st3PsVX/TF038dc6SmLuy5HfBlEafPXeItjts+v+5F+cviylLV1VKDyfs0iC6NjNlbhKddNJJsf3225dqn6OOOirjzBPvvfdeqeoMHTo0bbrniIiePXuWOaTNzc2NU045JR5++OFi4wUFBfH8889H3759y1QXSuPaQSM3/aQyGnjNKdG44abPCQMfeCmWraiciyevOr9LtGyx6Sne//T/XosPP168yeeVxTk920f7w/fa5PNG/G1KTHw7/YLIitCjc6vocXzrSqm9JWvTpk288847MWTIkBgyZEha+FhSqVRqQxB8ww03xKWXXhoXX3zxVnORzr777huHHnpoqfY56qijsm47/fTTo1oJLnr8XvXq1eOoo45Km+3pgw8+iLVr10Zu7qb/hv7eeeedF3fccUfaOXnYsGFx4403RvXqm77g5Idef/31YiF3aWcF2Zhdd9012rZtW+b9TznllLTwN+K7O5Vvu+22Ml8sDZAkpfutDwBsNUrzprOqjpvpw+Gk6dOnT6n3qVWrVuTl5UVeXl5a+JspTH/iiSfKfBf1sGHDYunSpbF06dKMd3wBsPUpzdSQ36tdu3bsvPPOaePfTyNdEmvWrMl4129ExE9/+tNS9/RD2T5cz/RhLwCll5ubG9ddd13Mnz8/7rzzzthzz/ItibRkyZK4/vrr44ADDoiXXnqpgrqsXD/72c9KvU+LFi0qtF6m7/vatWvj888/L3VfnTp1ShufP39+xvWAN+WHd/3WrFkz+vXrV+oa2Rx33HHl+ryia9euGcPsFStWxCuvvFKe1gASQ/gLAFuxjV3RurnWdf3hmkA/Vrt27SrsZPM48sgjK7TeHnvskTa2aNGiOO200+Kbb76p0GMBsHU67LDDyrRfpjUJv/qq5FPdvvHGG7Fy5cqM2zp06FCmnr534IEHZhwfP378NnExGUBVqVOnTlx++eUxZ86cmDhxYlx00UWx006bvis8m/nz50f37t1LvRzB5nDIIYeUep9s35u8vLzYd999S12vWbNmGcdLcz7+XrY1ef/85z+Xqs6KFSvimWee2fD1ySefHI0bNy51P9lkO8eXVKNGjbKG8JMmTSpXbYCkEP4CwFZsY2Hq6tWVM3Xppmws/K1Tp04VdlL1ateuHXvttemp60rjuOOOyzg+atSoaN26dTz77LM+BAfYhtWrVy923XXXMu3boEH6NOCl+bB5/PjxGcebN28eTZo0KVNP38t2Pl22bFnMmVM5078CbOuOPPLIuO++++Kzzz6LSZMmxbXXXhv77bdfmWrdfvvtcfHFF1dwhxWrLK8t23vwli1blnpq5Y3VK0v4my2kHT16dCxatKjEdR577LFiSzZV5JTPERH7779/uWvss88+GcfffPPNctcGSALhLwBsxTJN1/i9FStWVGEn/7Wx9aKaNm1ahZ1UvaZNm5bpDf/GnH322ZGfn59x2+zZs+PUU0+NvffeOwYNGhSffPJJhR4bgC3fxv4W2JRMM4iU5uKxadOmZRwv79ShERH169fPum3mzJnlrg9AdtWqVYt27drF7bffHjNnzozZs2fHkCFD4qijjirVdL333Xdf1uUBtgRlucM52+xbZb1bOlv4W5aLuWvWrBlnn3122vi6deviL3/5S4nr/HDK57322iuOPfbYUveyMZkuPiutvffeO+N4tr9NALY1wl8A2Iplmq7xe0uXLq3CTv5r2bJlWbftvvvuVdfIZrCxD6rLKj8/f5Nr886ZMyeuvfba2GOPPaJdu3YxcODA+OCDDyq8FwC2PHXr1i3zvhtbPqIkst2BWxHn+5ycnKz9udgJoGrttddeccUVV8SECRNi/vz5MWTIkKx3Xv7YxRdfvNlmpdqU8pxDK6pWtjC9rLM7ZbtL9+GHHy5RzYkTJ8aMGTM2fP2rX/2qXOvzZlKvXr1y12jUqFHG8YKCgli3bl256wNs7Wps7gYAgLJr0aJFVKtWLeObuAULFmyGjjZ+3GxX5yZFRX548ENnnnlmLF26NK644oqNvmFPpVIxefLkmDx5clx33XXRsmXLOPnkk+OMM84o97pKJNsNrZtXWu3ta5bsLcfF+zeNokqawrx+bsl6OKvFjrG2knqoWyOnRM/rsWuj6NKs/HdDZLJdzrZ57W9Ff6haWuvXr6/U+uVZUqG8s1Vkm0Ly0UcfjUcffbRctTfm888/r7Ta8L2B15xSabUb5Jfs/9trL+wWRUWV8zskv1725WN+6NdndIy1aysnSKlbJ69Ez+v508OjR+dWldJD7e1qVkrdJGvWrFlcccUVcfnll8fLL78cN954Y7z99ttZn//FF1/EY489VuFTB1eEilyWaEtZ4qhly5ZxzDHHpC3NMG/evHjllVeiS5cuG93/h+sD5+bmxjnnnFPhPW5s+aqS2tj3e8WKFbHDDjuU+xgAWzPhLwBsxerVqxf77rtv/Oc//0nbNnfu3M3QUcRHH32UcbxatWrRtm3bKu6malX0FdE/dNlll0Xr1q3jV7/6VYl/trNmzYqBAwfGwIED49BDD43LLrssTjvttMjJKVkIxbajUa3czd1CiUPiylR/C+ihbm5O1A3/j1akGjUy/1zXrl1bJcdfs2bNhn/n5lb8/2sVvdxAaaxatWqzHHflypWb5bhsWxo3LP+daeVV0pC4MuXXK98MARWhXp28qFfCoJiqU61atejatWt06dIlhg4dGldccUXWOy6feeaZLTL8rcj3b5X5XrC0zjvvvLTwN+K76Zw3Fv4WFBTEiBEjNnzdo0eP2HHHHSulx/IS/gJs3Ob/dAOgEtVo0jTyLx+wudtgC1WjSTLWn23Xrl3G8Pfrr7+OefPmVflUy9nW4dtnn30qZG2fbVmnTp3igw8+iOHDh8fdd99dqqmd33nnnTjzzDPjlltuiQceeCA6d+5ciZ0CbDlq1sx8V9e3335bJcf/4XGy9bK1+mGwXZW21OlDAbZF1apVi0suuSSaNGkSffr0yficN998M4qKilyEWkVOPfXUuOSSS2LFihXFxl988cVYvHhxNGnSJON+jz/+eHzzzTcbvj7//PMrtc/y2FjYXlWzuwBsyYS/QKJVq5UXubvuubnbgErVrVu3GD58eMZtU6ZMqdLwd8mSJTF//vyM20488cQq6yPJcnNz4/zzz4/zzz8/Jk2aFI899lg888wz8cUXX5Ro/48++iiOP/74uPbaa+O2227boq5QB6gM2abkLygoiPXr11f6nbM//OB1S5kSsqLUqlUrYxD7q1/9Km6//fZKO25enjsAAbY0vXv3jv/7v/+LF198MW3bypUr47PPPovmzStvqRH+Ky8vL84888z4wx/+UGx87dq1MXz48Lj22msz7vfQQw9t+Pcee+wRxx9/fKX2WR4bm33ERecAEdvmok8AkCDdunXL+iHo66+/XqW9vPHGG1m3nXJK5a2btimVvd7i5nLkkUfGH//4x/jss8/ipZdeirPOOivq169fon0HDhwYV199dSV3CLD5ZZv2L5VKlfjCmbL68TEaN25cqceratmC9aKiomjcuHGlPbIdF4DN68ILL8y6bfny5VXYCdnu2n344Ycz3hk7efLkmD59+oavzz333Eq7ULgiZl/5+uuvs24T/gIIfwFgq1e3bt049dRTM2576aWXqrSXv//97xnHW7ZsGUceeWS5apdn6qavvvqqXMfe0tWoUSO6du0ajz76aCxevDiefvrp6N69+ybfrA8ZMiReffXVKuoSYPNo2LBh1nV/P/nkk0o99uLFi4vdGbulrptXVk2bZl5CY3OtBQxAZh9//HHceOONGx5vv/12pRynffv2Wd+DJP092ZbmgAMOyPgefM6cOTFmzJi08T//+c8b/l2jRo345S9/WWm9VcR/C9kuJqhXr17k5uaWuz7A1k74CwAJcNlll2Ucnz17dkydOrVKelizZk08//zzGbf99re/LfdVw+VZ329b+hA6Ly8vevbsGX/7299i9uzZ0a9fv41OaXrrrbdWYXcAVa969eqxxx57ZNxWmrXTy+I///lPsa9btGhRqceratlez5dfflm1jQCwUZ988kn8/ve/3/B47bXXKuU49erVi/z8/IzbTNlf9c4777yM4z8MeiO+m5b7qaee2vD1iSeeGDvvvHOl9VURfyfMnj0743irVq3KXRsgCYS/AJAAhx56aHTv3j3jtmHDhlVJDy+88EIsW7YsbbxZs2bRr1+/UtWqWbNm2lhZw99UKhWLFi0q075buxYtWsTw4cPjjTfeyPrmffz48cXWowRIopYtW2Ycr6w7n773zjvvFPt6n332qdTjVbVDDjkk43hlh+oAlE9lTsG83XbbZRxP2uwXW4PevXtnXBbohRdeKLYsxf/93/8Vm0Y525TRFWXWrFnlrvHhhx9mHG/Xrl25awMkgfAXABLirrvuyji90fDhw2PJkiWVfvyBAwdmHB80aFDWDwCyqVevXtpYWa8Onjdv3kbXA9oWHHnkkTFq1KjIyclJ27Z+/fq0cAIgaQ4//PCM45U99f2Pp1XM1sfW6uijj844vmDBgli5cmUVdwNASc2cObNS6qZSqVi6dGnaeF5eXuyyyy6Vckyyq127dpxxxhlp42vWrIlHHnlkw9cPPfTQhn/vuuuu0aVLl0rta8aMGeXaf8WKFTFnzpyM24S/AN8R/gJAQrRs2TKuvfbatPFvvvkmBgwYUKnHfuqppzIGiMcff3ycdtpppa5Xt27dtLH58+eXqbfKvqursg0YMGDDY+7cuWWuc9hhh8VJJ52UcVumD2gAkiRbSDljxoz46KOPKuWYBQUFMXbs2A1f16hRI9q2bVspx9pcOnToEA0aNMi4bfLkyRVyjLvvvjtq1Kix4VGvXr1t/qIugPIaP358pFKpCq/7ySefxNq1a9PGDz/88IwXolL5st3F+33gO2XKlGLv5c8999yNLhtUEcaNG1eu/V9++eUoKipKG69Xr16lB9cAWwvhLwAkyM033xxHHnlk2viDDz4Yr7/+eqUcc+nSpfHb3/42bXyHHXaIv/71r2Va6zfTVeGffvppFBYWlrrWc889V+p9tiS33HLLhkd5129u3759xnHrbwFJd/TRR2ddg/DH695VlL/+9a/Fliw45phjMl7ctDWrUaNGnH766Rm3PfvssxVyjOeffz6Kioo2PDp16hR16tQpcX+ZVEbgAbA1WbZsWbzyyisVXnfUqFEZx3/6059W+LEomdatW2eceWT27NkxduzYYn8H5eTkxLnnnlvpPX344Yfx7rvvlnn/bO/x+/btm7i/tQDKSvgLAAlSo0aNGDFiRDRr1qzYeCqVil69esXChQsr9Hhr166Nnj17xuLFi4uN16pVK0aMGBE77bRTmeoedNBBaWPr1q2Lt956q1R1li9fnvUDiK3Rxx9/XK79s31Ynm09YICkyM3NjVNOOSXjtj/+8Y8Vfn5cuXJl2nIIPXv2rNBjbCn+53/+J+MdQs8991yZLtr6offffz/t4rXSfB+zfQC8Zs2aje536aWXRufOnTc8fngHN0BS/O///m+F1luzZk3cd999aeM1a9aMvn37VuixKJ1sd//edddd8cQTT2z4unv37lU2Pff9999fpv0++uijrBeYXXjhheVpCSBRhL8AkDC77LJL/OMf/0ibhvHzzz+PTp06lXn65B8rLCyMU089NW3KpurVq8fjjz8eHTt2LHPtVq1aZRwvbZD7+9//Pr766qsy97GlKe/V+Zl+9nl5ebHffvuVqy7A1uCSSy7JOP7111/HWWedlXGayrJIpVJxwQUXxGeffbZhbPvtt0/sB9/77rtvnHXWWWnjS5YsiXvuuadctW+55ZZid+nuuuuu0adPnxLvX79+/Yzjq1at2uh+48aNizFjxmx4fPPNNyU+JsDWYuLEiXHbbbdVWL3+/fvHhx9+mDZ+7rnnpl2cTNU67bTTMl4QNXr06GLnxPPOO6/Keho2bFiZ1v694YYbMk75fN5552W8iBxgWyX8BYAEOuCAA2Ls2LHRpEmTYuOzZ8+ONm3axD/+8Y9y1Z89e3a0b98+XnzxxWLjNWvWjCeffDJOPfXUctX/6U9/mnG66D//+c8lXp/2b3/7W9x7772Rk5OTmA8b/vWvf8X06dPLtG8qlUr7eUVEdOnSJerVq1fe1gC2eK1bt856fnr11VfjF7/4RaxcubJcxygsLIxf/epXxe6iiYi48sorEz0N4eDBg2OHHXZIG7/tttvKPGvFs88+GyNGjCg2dt1110Vubm6Ja+yzzz4ZxxctWpR1n1QqFXPnzi02VtaZTAC2dDfeeGMMGTKkXNPhr1+/Pm6++ea0GS8iIpo3bx633357eVqkAtStWzdOO+20jT6nWbNm0b179yrpp1GjRlFUVBSnnHJKfPHFFyXeb+jQofH000+nje+yyy4xePDgimwRYKsn/AWAhGrVqlVMnDgx7S7aJUuWRLdu3aJHjx7x5ptvlqrmvHnz4rLLLosDDzww3n777WLbdtxxx/jHP/5RIdNa7rrrrhnXpy0oKIgePXpsMgB+7LHHonfv3lFUVBT9+vWLFi1alLunLUEqlYqzzjorCgoKSr3vfffdl3ZldbVq1eLKK6+sqPYAtnh33XVXNGzYMOO2UaNGxUEHHRSPP/54qe8CXr9+fbz44otxyCGHxF/+8pdi2/bff//E/65t3LhxPP7445GTk1NsfOXKlXHCCSekLQ+xKePGjUu7m7hTp05Zp63MplmzZtG4ceO08Y2tM/jmm28WmzVku+22i4MPPrhUxwXYWqRSqbjqqqvi2GOPTZvRqST7vvrqq3HEEUfErbfemrY9Pz8/Ro0aFfn5+RXULeWxqbt6f/nLX6adxyvLPffcE9WrV4/Zs2fHT37yk7TPFn5s/fr18b//+79x2WWXpW2rUaNGPPLII/47A/iRGpu7AQCg8uy5554xefLkuOaaa+K+++4rNj3SqFGjYtSoUbHvvvtGly5d4ogjjoi99947GjduHLVr147CwsL48ssvY+7cuTFt2rQYM2ZMvPHGGxmvCu/WrVsMGzasQteOvf766zNeeTxp0qRo2bJl/PrXv44TTjghmjdvHnl5efH555/HlClT4rHHHosJEyZExHdXAN9xxx0Z13lcv359xhC5QYMGaW96V6xYkTa11Pr169P2Xbt2bdZgOj8/v1R3K2Xz7rvvxlFHHRXDhw+Ptm3bbvL5a9asicGDB8dNN92Utu3CCy+MDh06lLsngK1F8+bN46mnnooTTzwx43q08+fPjzPPPDOuuOKK+NnPfhYdOnSIAw88MJo3bx75+flRq1atWLNmTaxcuTIWLlwYM2fOjEmTJsWLL76YcWr9Ro0axciRI6NWrVql7rU8557c3Ny0D0ELCwvTlkLItiZvpnNZw4YNM67t+70TTjghHnjggfj1r39d7G+Fjz76KA4//PC4//77o0ePHln3j/junHXnnXfGgAEDiq3Lu9NOO8Xjjz++0eNnc9JJJ8WwYcOKjb3wwgtx1113pd2NXVRUFDfffHOxsS5dulTI+RtgSzZu3LgYN25c7L333nH88cfH4YcfHvvtt19sv/32Ub9+/UilUvHVV1/F8uXLY+bMmTF16tQYNWpULFiwIGO93XffPV544YUSXTyT6XyXbbr9FStWpP3urlevXrHz7KpVq2L16tXFnlOa892PLxrK1F8mhYWFafVq164dtWvX3vD12rVr0y7kzbYUQUFBQVq9H7/W0mjTpk20bt06pk2blratevXqce6555apbll06NAh7rrrrrj00ktj5syZccQRR8Spp54avXr1itatW8fOO+8chYWF8emnn8a//vWvrFNE5+TkxF/+8pfo3LlzlfUOsNVIAQDbhGnTpqVOOOGEVERU2GO//fZLjRw5stJ67tWrV5l7q127dmrChAmpVCqV6tixY4n3mzp1alofu+22W7m/V2PHji3T92BjNY866qjUHXfckRozZkzqgw8+SH322WepBQsWpGbMmJF69tlnU5dffnlql112ybhvnz59UmvXri3HTwdg6/XPf/4z1aBBgwo9J/74seuuu6bee++9MvdYnnNPx44d0+oNHz68XK/n448/LlHfjzzySKpWrVoZaxx22GGp22+/PTVu3LgN562ZM2emRo0albryyitTTZo0Sdtnt912S82ePbvM38cpU6akqlevnla3bdu2qdGjR6fmz5+f+uSTT1KjR49O+3uhWrVqG/6WANiaffvtt6mnn3461adPn1T9+vUr7dxXo0aN1IUXXpj68ssvS9xbed9rDR8+vFi9s88+u1z1KrK//v37F6s1duzYCn2tpXX//fdnrNutW7dy1c2mf//+G/2b4p577sl4ji7Jo0GDBqlRo0ZVSt8ASeDOXwDYRrRq1SpefvnlmD59ejz44IPxzDPPlHj93B+qVatWdO3aNc4999w48cQTM67NW1GGDx8eK1asiH/+85+l2q9BgwbxzDPPxFFHHVVJnVWdgQMHxsMPPxwfffRR2raJEyfGxIkTS1WvVq1acc0110T//v3LdAcVQBJ07tw53nvvvbj44ovjueeeq9DaOTk50a9fv7jzzju3ySkIzz777Nh///3jnHPOiffff7/YtrfffnuTUzv+UI8ePeKBBx6Ipk2blrmfww47LK688sq44447io2/9dZbceKJJ2503xtvvDERf0sA5OXlRc+ePaNnz56xZs2aGDNmTLz66qvxxhtvxDvvvFNstoWy2GWXXaJPnz7xm9/8Jvbcc88K6pqKdsYZZ8RVV12Vdmd1aZdVqCi//e1vo127dnHxxRfHv//97xLtU61atejdu3cMHjw4mjVrVskdAmy9hL8AsI1p1apVPPDAAzF06NB46623YsKECfH222/HnDlzYsGCBbFq1ar49ttvo2bNmlGnTp3YaaedYs8994wDDzww2rdvH0cffXTUr1+/SnqtXbt2jBo1Km677ba488474+uvv97kPt26dYv77rsvMR86XHPNNXH11VfH2LFj48knn4yXXnopPv3001LXqV27dvziF7+IG2+8MfbZZ59K6BRg67LLLrvEyJEjY8qUKfHggw/GiBEjYuXKlWWu17hx4zjttNPioosuipYtW1Zgp1ufNm3axNSpU2PYsGFx5513ZryAKZtq1arFMcccs2Hq7YowaNCgyM/Pj1tvvTVtKtBM6tSpE4MGDYqLLrqoQo4PsCWpWbNmdOvWLbp16xYREatXr44pU6bEzJkzY86cOfHRRx/Fxx9/HF9++WWsWrVqwzTKubm5Ubt27WjcuHE0bdo09t5772jdunV06NAhWrVqVakXBVMx8vPzo1evXvHII49sGNt55503eTFUZTriiCM2fC4xcuTIeP3112PWrFnF/ibLz8+PAw88MI4//vg488wzE/NeH6AyVUulMizcBwCwhVmyZEk899xz8corr8SMGTNi6dKlsXLlyqhbt27sueee0aFDh+jbt2+0adNmc7da6WbMmBETJkyI999/P2bMmBGffvppFBQUREFBQaxfvz7q168f+fn5sdtuu8UhhxwSbdq0iZ/+9KdRr169zd06wBZr3bp18eabb8bEiRPjvffeizlz5sSiRYti+fLlsXr16li7dm3UrFkz8vLyonHjxrHzzjvHXnvtFQcffHB06NAhDjvsMDMqZDFp0qR4+eWXY/LkyTF79uxYvHhxfPvtt5GXlxfbb799NG7cOFq1ahWHH354dO/ePVq0aFEpfXzxxRfx6KOPxuuvvx7vvfdeLF++PFatWhW1a9eORo0aRevWraNTp05x1llnxfbbb18pPQDAtmLAgAFxyy23pI1//PHHsfvuu29038LCwvjmm2+iVq1axdZNBqBkhL8AAAAAAECFKU/4C0D5uCwZAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABKixuRsAAAAAAAC2TitWrIiioqJiY998803W59atW7fYWO3ataN27dqV1h/AtqZaKpVKbe4mAAAAAACArc/uu+8en3zySZn379+/fwwYMKDiGgLYxpn2GQAAAAAAACABhL8AAAAAAAAACWDaZwAAAAAAAIAEcOcvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEkD4CwAAAAAAAJAAwl8AAAAAAACABBD+AgAAAAAAACSA8BcAAAAAAAAgAYS/AAAAAAAAAAkg/AUAAAAAAABIAOEvAAAAAAAAQAIIfwEAAAAAAAASQPgLAAAAAAAAkADCXwAAAAAAAIAEEP4CAAAAAAAAJIDwFwAAAAAAACABhL8AAAAAAAAACSD8BQAAAAAAAEgA4S8AAAAAAABAAgh/AQAAAAAAABJA+AsAAAAAAACQAMJfAAAAAAAAgAQQ/gIAAAAAAAAkgPAXAAAAAAAAIAGEvwAAAAAAAAAJIPwFAAAAAAAASADhLwAAAAAAAEACCH8BAAAAAAAAEuD/A9SYrqTYZLngAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "import h5py\n", + "import torch\n", + "import numpy as np\n", + "from matplotlib.ticker import FormatStrFormatter # 用于格式化 y 轴刻度标签\n", + "import matplotlib.lines as mlines # 用于创建自定义图例\n", + "\n", + "# 设置全局字体为 Times New Roman\n", + "plt.rcParams[\"font.family\"] = \"DejaVu Serif\"\n", + "# def lat_np(j, num_lat):\n", + "# return 90. - j * 180. / (num_lat - 1)\n", + "\n", + "def lat_np(j, num_lat):\n", + " return 10. + j * (42. - 10.) / (num_lat - 1)\n", + "\n", + "def latitude_weighting_factor(j, num_lat, s):\n", + " return num_lat * np.cos(np.pi / 180. * lat_np(j, num_lat)) / s # 使用更精确的 π 值\n", + "\n", + "def weighted_acc(pred, target):\n", + " \"\"\"\n", + " 计算纬向加权的 ACC,并返回适配绘图函数的值。\n", + " \"\"\"\n", + " num_lat = np.shape(pred)[0] # 纬度数量\n", + " lat_t = np.arange(0, num_lat)\n", + "\n", + " # 计算纬度权重\n", + " s = np.sum(np.cos(np.pi / 180. * lat_np(lat_t, num_lat)))\n", + " weight = np.reshape(latitude_weighting_factor(lat_t, num_lat, s), (num_lat, 1)) # 调整为 [lat, 1]\n", + "\n", + " # 计算加权的 ACC\n", + " numerator = np.nansum(weight * pred * target)\n", + " denominator = np.sqrt(\n", + " np.nansum(weight * pred ** 2) * np.nansum(weight * target ** 2)\n", + " )\n", + " # 防止分母为零\n", + " if denominator == 0:\n", + " return np.nan\n", + "\n", + " acc = numerator / denominator\n", + " return acc\n", + "\n", + "var_idex = {\n", + " \"U\": 0, \"V\": 1\n", + "}\n", + "\n", + "# 定义绘图的颜色\n", + "color = {\n", + " 0: '#E64B35CC',\n", + " 1: '#4DBBD5CC',\n", + " 2: '#3C5488CC',\n", + " 3: '#00A087CC',\n", + " 4: '#F39B7FCC',\n", + " 5: '#8491B4CC',\n", + " 6: '#91D1C2CC',\n", + "}\n", + "\n", + "model_index = {'Ours': 0, 'Unet': 1, 'Simvp': 2}\n", + "\n", + "\n", + "datasets = {\n", + " 'Ours': h5py.File('/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results_open/Kuro_Triton_exp1_128_20250322_forecast_results/forecast_merged.h5'),\n", + " 'Unet': h5py.File('/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results_open/Kuro_Unet_exp_128_20250324_forecast_results/forecast_merged.h5'),\n", + " 'Simvp': h5py.File('/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results_open/Kuro_Simvp_exp_128_20250324_forecast_results/forecast_merged.h5'),\n", + "}\n", + "\n", + "\n", + "ics = 73\n", + "var_name = ['U', 'V'] \n", + "variables = [var_idex[name] for name in var_name]\n", + "titles = ['ACC-U', 'ACC-V']\n", + "model_names = ['Ours', 'Unet', 'Simvp']\n", + "time_steps_per_day = 1\n", + "\n", + "acc_per_model = {model: {var: {'mean': [], 'std': []} for var in variables} for model in model_names}\n", + "ds_climate_mean = np.load('/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/inference_results_open/climate_mean.npy')[:, :, :256:2, :256:2]\n", + "for model in model_names:\n", + " ds = datasets[model] # 获取对应模型的数据集\n", + " for var in variables:\n", + " for day in range(1, 121): # 从第1天到第10天\n", + " time = day * time_steps_per_day\n", + " acc_day_list = [] # 用于收集每个初始条件的 ACC\n", + "\n", + " for ic in range(ics):\n", + " # 获取预测和真实值的数据\n", + " climate_mean_idx = 5*ic+time\n", + " if climate_mean_idx < 364:\n", + " climate_mean = ds_climate_mean[5*ic+time, var, :, :]\n", + " else:\n", + " climate_mean = ds_climate_mean[5*ic+time-365, var, :, :]\n", + " \n", + " pre = torch.tensor(ds['prediction'][ic, time-1, var, :, :]).numpy()\n", + " tar = torch.tensor(ds['label'][ic, time-1, var, :, :]).numpy()\n", + " pre = pre - climate_mean\n", + " tar = tar - climate_mean\n", + " \n", + "\n", + " # 计算 ACC\n", + " acc = weighted_acc(pre, tar)\n", + " acc_day_list.append(acc)\n", + "\n", + " # 计算该天的平均值和标准差\n", + " acc_mean = np.mean(acc_day_list)\n", + " acc_std = np.std(acc_day_list)\n", + "\n", + " acc_per_model[model][var]['mean'].append(acc_mean)\n", + " acc_per_model[model][var]['std'].append(acc_std)\n", + "\n", + "days = np.arange(1, 121) \n", + "\n", + "fig, axes = plt.subplots(1, 2, figsize=(8, 4), dpi=300) \n", + "\n", + "plt.rcParams.update({'font.size': 15})\n", + "\n", + "for i, (var, title) in enumerate(zip(variables, titles)):\n", + " ax = axes[i] \n", + " for model in model_names:\n", + " idx = model_index[model]\n", + " mean = np.array(acc_per_model[model][var]['mean'])\n", + " std = np.array(acc_per_model[model][var]['std'])\n", + "\n", + " lw = 2.5 if model == 'Ours' else 1.5\n", + " ls = '-' if model == 'Ours' else '--'\n", + "\n", + " ax.plot(days, mean, label=model, color=color[idx], linewidth=lw, linestyle=ls)\n", + "\n", + " ax.fill_between(days, mean - std, mean + std, color=color[idx], alpha=0.2)\n", + "\n", + " ax.set_xlabel('Forecast Days', fontsize=15)\n", + " ax.set_ylabel('ACC', fontsize=15)\n", + " ax.set_title(title, fontsize=15)\n", + " ax.grid()\n", + "\n", + " ax.yaxis.set_major_formatter(FormatStrFormatter('%.3f'))\n", + "\n", + " ax.tick_params(axis='both', which='major', labelsize=12)\n", + "\n", + "plt.tight_layout()\n", + "plt.savefig('visual_acc.jpg', dpi=300, bbox_inches='tight')\n", + "plt.show()\n", + "\n", + "fig_legend = plt.figure(figsize=(8, 1), dpi=300)\n", + "legend_handles = []\n", + "for model in model_names:\n", + " idx = model_index[model]\n", + " lw = 2.5 if model == 'Ours' else 1.5\n", + " ls = '-' if model == 'Ours' else '--'\n", + " line = mlines.Line2D([], [], color=color[idx], linewidth=lw, linestyle=ls, label=model)\n", + " legend_handles.append(line)\n", + "\n", + "fig_legend.legend(handles=legend_handles, loc='center', ncol=len(model_names), fontsize=15, frameon=False)\n", + "plt.axis('off')\n", + "plt.show()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Exp3_Kuroshio_forecasting/plt_triton/nmi_vis.ipynb b/Exp3_Kuroshio_forecasting/plt_triton/nmi_vis.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..ea3cb81ba1c0e1ca6d81d8c21e5d37115f865d2a --- /dev/null +++ b/Exp3_Kuroshio_forecasting/plt_triton/nmi_vis.ipynb @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4b1d0f49ae0e5234edb7ffeacc89fbe37a5acfa9cd4f0e60292d63e993644bca +size 15197104 diff --git a/Exp3_Kuroshio_forecasting/train_Kuro_simvp.py b/Exp3_Kuroshio_forecasting/train_Kuro_simvp.py new file mode 100644 index 0000000000000000000000000000000000000000..a2b07b4ec0f2b1e3b6f0f073eee3a8200579f7ef --- /dev/null +++ b/Exp3_Kuroshio_forecasting/train_Kuro_simvp.py @@ -0,0 +1,185 @@ +import os +import random +import torch +import numpy as np +import torch.nn as nn +import torch.optim as optim +import torch.utils.data as data_utils +import torch.distributed as dist +import netCDF4 as nc +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from model_baseline.simvp import * +from torch.optim.lr_scheduler import CosineAnnealingLR +# from dataloader_api.dataloader_kuroshio_ruiqi import * +import torch.distributed as dist +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from torch.optim.lr_scheduler import CosineAnnealingLR +# Setup logging +backbone = 'Kuro_Simvp_exp1_20250224' +logging.basicConfig(filename=f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/logs/{backbone}_training_log.log', + level=logging.INFO, + format='%(asctime)s %(message)s') + +# Set a specific seed +seed = 42 +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False +set_seed(seed) + +# ========================== Distributed Training Setup ========================== +dist.init_process_group(backend='nccl') +local_rank = int(os.environ['LOCAL_RANK']) +torch.cuda.set_device(local_rank) +device = torch.device("cuda", local_rank) +num_gpus = torch.cuda.device_count() + +def reduce_mean(tensor, nprocs): + rt = tensor.clone() + dist.all_reduce(rt, op=dist.ReduceOp.SUM) + rt /= nprocs + return rt + +# ============================== Data Loading ============================== +from dataloader_api.dataloader_kuroshio_256 import * + +config = { + 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc', + 'input_steps': 10, + 'output_steps': 10, + 'batch_size': 2, + 'val_batch_size': 2, + 'num_workers': 4, + 'seed': 42 +} + +train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config) + + +for sample_input, sample_target in train_loader: + print(sample_input.shape, sample_target.shape) + print(f"Input data range: [{sample_input.min():.2f}, {sample_input.max():.2f}]") + print(f"Existence of NaN values: {torch.isnan(sample_input).any().item()}") + print(f"Existence of Inf values: {torch.isinf(sample_input).any().item()}") + print("mean, std", data_mean, data_std) + break +# ============================== Model Setup ============================== +model = SimVP(shape_in=(10, 2, 256, 256), hid_S=64, hid_T=128, output_dim = 2) + +model = model.to(device) +model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank], find_unused_parameters=True) + +# ============================== Criterion and Optimizer ============================== +criterion = nn.MSELoss() +optimizer = optim.Adam(model.parameters(), lr=1e-2) + +num_epochs = 2000 +scheduler = CosineAnnealingLR(optimizer, T_max=200, eta_min=0) + +# ============================== Training, Validation, and Testing Functions ============================== +def train(model, train_loader, criterion, optimizer, device): + model.train() + train_loss = 0.0 + for inputs, targets in tqdm(train_loader, desc="Training", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, targets) + loss.backward() + optimizer.step() + train_loss += loss.item() * inputs.size(0) + return train_loss / len(train_loader.dataset) + +def validate(model, val_loader, criterion, device): + model.eval() + val_loss = 0.0 + with torch.no_grad(): + for inputs, targets in tqdm(val_loader, desc="Validation", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + loss = criterion(outputs, targets) + val_loss += loss.item() * inputs.size(0) + return val_loss / len(val_loader.dataset) + +def test(model, test_loader, criterion, device): + path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results' + model.eval() + test_loss = 0.0 + all_inputs = [] + all_targets = [] + all_outputs = [] + + with torch.no_grad(): + for inputs, targets in tqdm(test_loader, desc="Testing", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + + # Collect results + all_inputs.append(inputs.cpu().numpy()) + all_targets.append(targets.cpu().numpy()) + all_outputs.append(outputs.cpu().numpy()) + + loss = criterion(outputs, targets) + test_loss += loss.item() * inputs.size(0) + + all_inputs = np.concatenate(all_inputs, axis=0) + all_targets = np.concatenate(all_targets, axis=0) + all_outputs = np.concatenate(all_outputs, axis=0) + + if local_rank == 0: + np.save(f'{path}/{backbone}_inputs.npy', all_inputs) + np.save(f'{path}/{backbone}_targets.npy', all_targets) + np.save(f'{path}/{backbone}_outputs.npy', all_outputs) + + return test_loss / len(test_loader.dataset) +# ============================== Main Training Loop ============================== +best_val_loss = float('inf') +best_model_path = f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth' + +if local_rank == 0 and os.path.exists(best_model_path): + try: + logging.info('Loading best model from checkpoint.') + checkpoint = torch.load(best_model_path, map_location=device) + model.load_state_dict(checkpoint) + except Exception as e: + logging.error(f'Error loading model checkpoint: {e}') + +for epoch in range(num_epochs): + if local_rank == 0: + logging.info(f'Epoch {epoch + 1}/{num_epochs}') + train_loss = train(model, train_loader, criterion, optimizer, device) + val_loss = validate(model, test_loader, criterion, device) + + scheduler.step() + + if local_rank == 0: + current_lr = optimizer.param_groups[0]['lr'] + logging.info(f'Current Learning Rate: {current_lr:.10f}') + + if val_loss < best_val_loss: + best_val_loss = val_loss + torch.save(model.state_dict(), best_model_path) + + logging.info(f'Train Loss: {train_loss * num_gpus:.7f}, Val Loss: {val_loss * num_gpus:.7f}') + +if local_rank == 0: + try: + model.load_state_dict(torch.load(best_model_path)) + test_loss = test(model, test_loader, criterion, device) + logging.info("Testing completed and best model saved.") + except Exception as e: + logging.error(f'Error loading model checkpoint during testing: {e}') + +dist.destroy_process_group() \ No newline at end of file diff --git a/Exp3_Kuroshio_forecasting/train_Kuro_triton.py b/Exp3_Kuroshio_forecasting/train_Kuro_triton.py new file mode 100644 index 0000000000000000000000000000000000000000..17bdcd5d57fc86c7da274a1a0d865a89d18a997c --- /dev/null +++ b/Exp3_Kuroshio_forecasting/train_Kuro_triton.py @@ -0,0 +1,191 @@ +import os +import random +import torch +import numpy as np +import torch.nn as nn +import torch.optim as optim +import torch.utils.data as data_utils +import torch.distributed as dist +import netCDF4 as nc +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from model.Triton_model import * +from torch.optim.lr_scheduler import CosineAnnealingLR +import torch.distributed as dist +import logging +from tqdm import tqdm +from torch.utils.data.distributed import DistributedSampler +from torch.optim.lr_scheduler import CosineAnnealingLR + +# Setup logging +backbone = 'Kuro_Triton_exp1_20250224' +logging.basicConfig(filename=f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/logs/{backbone}_training_log.log', + level=logging.INFO, + format='%(asctime)s %(message)s') + +# Set a specific seed +seed = 42 +def set_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False +set_seed(seed) + +# ========================== Distributed Training Setup ========================== +dist.init_process_group(backend='nccl') +local_rank = int(os.environ['LOCAL_RANK']) +torch.cuda.set_device(local_rank) +device = torch.device("cuda", local_rank) +num_gpus = torch.cuda.device_count() + +def reduce_mean(tensor, nprocs): + rt = tensor.clone() + dist.all_reduce(rt, op=dist.ReduceOp.SUM) + rt /= nprocs + return rt + +# ============================== Data Loading ============================== +from dataloader_api.dataloader_kuroshio_256 import * + +config = { + 'data_path': '/jizhicfs/easyluwu/ocean_project/kuro/KURO.nc', + 'input_steps': 10, + 'output_steps': 10, + 'batch_size': 2, + 'val_batch_size': 2, + 'num_workers': 4, + 'seed': 42 +} + +train_loader, val_loader, test_loader, data_mean, data_std = create_dataloaders(config) + + +for sample_input, sample_target in train_loader: + print(sample_input.shape, sample_target.shape) + print(f"Input data range: [{sample_input.min():.2f}, {sample_input.max():.2f}]") + print(f"Existence of NaN values: {torch.isnan(sample_input).any().item()}") + print(f"Existence of Inf values: {torch.isinf(sample_input).any().item()}") + print("mean, std", data_mean, data_std) + break +# ============================== Model Setup ============================== +model = Triton( + shape_in=(10, 2, 256, 256), + spatial_hidden_dim=256, + output_channels=2, + temporal_hidden_dim=512, + num_spatial_layers=4, + num_temporal_layers=8) + +model = model.to(device) +model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[local_rank], find_unused_parameters=False) + +# ============================== Criterion and Optimizer ============================== +criterion = nn.MSELoss() +optimizer = optim.Adam(model.parameters(), lr=1e-3) + +num_epochs = 2000 +scheduler = CosineAnnealingLR(optimizer, T_max=200, eta_min=0) + +# ============================== Training, Validation, and Testing Functions ============================== +def train(model, train_loader, criterion, optimizer, device): + model.train() + train_loss = 0.0 + for inputs, targets in tqdm(train_loader, desc="Training", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, targets) + loss.backward() + optimizer.step() + train_loss += loss.item() * inputs.size(0) + return train_loss / len(train_loader.dataset) + +def validate(model, val_loader, criterion, device): + model.eval() + val_loss = 0.0 + with torch.no_grad(): + for inputs, targets in tqdm(val_loader, desc="Validation", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + loss = criterion(outputs, targets) + val_loss += loss.item() * inputs.size(0) + return val_loss / len(val_loader.dataset) + +def test(model, test_loader, criterion, device): + path = '/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/results' + model.eval() + test_loss = 0.0 + all_inputs = [] + all_targets = [] + all_outputs = [] + + with torch.no_grad(): + for inputs, targets in tqdm(test_loader, desc="Testing", disable=local_rank != 0): + inputs = inputs.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(inputs) + + # Collect results + all_inputs.append(inputs.cpu().numpy()) + all_targets.append(targets.cpu().numpy()) + all_outputs.append(outputs.cpu().numpy()) + + loss = criterion(outputs, targets) + test_loss += loss.item() * inputs.size(0) + + all_inputs = np.concatenate(all_inputs, axis=0) + all_targets = np.concatenate(all_targets, axis=0) + all_outputs = np.concatenate(all_outputs, axis=0) + + if local_rank == 0: + np.save(f'{path}/{backbone}_inputs.npy', all_inputs) + np.save(f'{path}/{backbone}_targets.npy', all_targets) + np.save(f'{path}/{backbone}_outputs.npy', all_outputs) + + return test_loss / len(test_loader.dataset) +# ============================== Main Training Loop ============================== +best_val_loss = float('inf') +best_model_path = f'/jizhicfs/easyluwu/ocean_project/NPJ_baselines/Exp_2_Kuroshio/checkpoints/{backbone}_best_model.pth' + +if local_rank == 0 and os.path.exists(best_model_path): + try: + logging.info('Loading best model from checkpoint.') + checkpoint = torch.load(best_model_path, map_location=device) + model.load_state_dict(checkpoint) + except Exception as e: + logging.error(f'Error loading model checkpoint: {e}') + +for epoch in range(num_epochs): + if local_rank == 0: + logging.info(f'Epoch {epoch + 1}/{num_epochs}') + train_loss = train(model, train_loader, criterion, optimizer, device) + val_loss = validate(model, test_loader, criterion, device) + + scheduler.step() + + if local_rank == 0: + current_lr = optimizer.param_groups[0]['lr'] + logging.info(f'Current Learning Rate: {current_lr:.10f}') + + if val_loss < best_val_loss: + best_val_loss = val_loss + torch.save(model.state_dict(), best_model_path) + + logging.info(f'Train Loss: {train_loss * num_gpus:.7f}, Val Loss: {val_loss * num_gpus:.7f}') + +if local_rank == 0: + try: + model.load_state_dict(torch.load(best_model_path)) + test_loss = test(model, test_loader, criterion, device) + logging.info("Testing completed and best model saved.") + except Exception as e: + logging.error(f'Error loading model checkpoint during testing: {e}') + +dist.destroy_process_group() \ No newline at end of file