Spaces:
Sleeping
Sleeping
Commit
·
aa68823
0
Parent(s):
Initial commit of AutoML project
Browse files- .gitignore +9 -0
- .gradio/certificate.pem +31 -0
- AI_SQL_Assistant.log +158 -0
- app.py +327 -0
- catboost_info/catboost_training.json +1004 -0
- catboost_info/learn/events.out.tfevents +0 -0
- catboost_info/learn_error.tsv +1001 -0
- catboost_info/time_left.tsv +1001 -0
- config.py +6 -0
- frontend/BGround.png +0 -0
- frontend/index.html +91 -0
- frontend/main.js +298 -0
- frontend/style.css +313 -0
- models/supervised.py +104 -0
- models/unsupervised.py +64 -0
- models/xgboost_model.py +37 -0
- rag/memory.py +104 -0
- rag/rag_query.py +72 -0
- requirements.txt +16 -0
- utils/data_cleaner.py +101 -0
- utils/export.py +12 -0
- utils/metrics.py +56 -0
- visuals/charts.py +579 -0
.gitignore
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Ignore Python cache
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.pyc
|
| 4 |
+
|
| 5 |
+
# Ignore virtual environment
|
| 6 |
+
venv/
|
| 7 |
+
|
| 8 |
+
# Ignore temporary data files
|
| 9 |
+
.cache/
|
.gradio/certificate.pem
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-----BEGIN CERTIFICATE-----
|
| 2 |
+
MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
|
| 3 |
+
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
|
| 4 |
+
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
|
| 5 |
+
WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
|
| 6 |
+
ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
|
| 7 |
+
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
|
| 8 |
+
h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
|
| 9 |
+
0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
|
| 10 |
+
A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
|
| 11 |
+
T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
|
| 12 |
+
B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
|
| 13 |
+
B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
|
| 14 |
+
KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
|
| 15 |
+
OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
|
| 16 |
+
jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
|
| 17 |
+
qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
|
| 18 |
+
rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
|
| 19 |
+
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
|
| 20 |
+
hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
|
| 21 |
+
ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
|
| 22 |
+
3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
|
| 23 |
+
NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
|
| 24 |
+
ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
|
| 25 |
+
TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
|
| 26 |
+
jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
|
| 27 |
+
oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
|
| 28 |
+
4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
|
| 29 |
+
mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
|
| 30 |
+
emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
|
| 31 |
+
-----END CERTIFICATE-----
|
AI_SQL_Assistant.log
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 2 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 3 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 4 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 5 |
+
INFO:httpx:HTTP Request: POST https://api.groq.com/openai/v1/chat/completions "HTTP/1.1 200 OK"
|
| 6 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 7 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 8 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 9 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 10 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 11 |
+
INFO:httpx:HTTP Request: POST https://api.groq.com/openai/v1/chat/completions "HTTP/1.1 200 OK"
|
| 12 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 13 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 14 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 15 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 16 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 17 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 18 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 19 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 20 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 21 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 22 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 23 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 24 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 25 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 26 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 27 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 28 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 29 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 30 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 31 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 32 |
+
INFO:httpx:HTTP Request: POST https://api.groq.com/openai/v1/chat/completions "HTTP/1.1 200 OK"
|
| 33 |
+
INFO:httpx:HTTP Request: POST https://api.groq.com/openai/v1/chat/completions "HTTP/1.1 200 OK"
|
| 34 |
+
INFO:httpx:HTTP Request: POST https://api.groq.com/openai/v1/chat/completions "HTTP/1.1 200 OK"
|
| 35 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 36 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 37 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 38 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 39 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 40 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 41 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 42 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 43 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 44 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 45 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 46 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 47 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 48 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 49 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 50 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 51 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 52 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 53 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 54 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 55 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 56 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 57 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 58 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 59 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 60 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 61 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 62 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 63 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 64 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 65 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 66 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 67 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 68 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 69 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 70 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 71 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 72 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 73 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 74 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 75 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 76 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 77 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 78 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 79 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 80 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 81 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 82 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 83 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 84 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 85 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 86 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 87 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 88 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 89 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 90 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 91 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 92 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 93 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 94 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 95 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 96 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 97 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 98 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 99 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 100 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 101 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 102 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 103 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 104 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 105 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 106 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 107 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 108 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 109 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 110 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 111 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 112 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 113 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 114 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 115 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 116 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 117 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 118 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 119 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 120 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 121 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 122 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 123 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 124 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 125 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 126 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 127 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 128 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 129 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 130 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 131 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 132 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 133 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 134 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 135 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 136 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 137 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 138 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 139 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 140 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 141 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 142 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 143 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 144 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 145 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 146 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 147 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 148 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 149 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 150 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 151 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/v3/tunnel-request "HTTP/1.1 200 OK"
|
| 152 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
| 153 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 154 |
+
INFO:matplotlib.category:Using categorical units to plot a list of strings that are all parsable as floats or dates. If these strings should be plotted as numbers, cast to the appropriate data type before plotting.
|
| 155 |
+
INFO:webbrowser:Groq client initialized successfully
|
| 156 |
+
INFO:httpx:HTTP Request: GET http://127.0.0.1:7860/gradio_api/startup-events "HTTP/1.1 200 OK"
|
| 157 |
+
INFO:httpx:HTTP Request: HEAD http://127.0.0.1:7860/ "HTTP/1.1 200 OK"
|
| 158 |
+
INFO:httpx:HTTP Request: GET https://api.gradio.app/pkg-version "HTTP/1.1 200 OK"
|
app.py
ADDED
|
@@ -0,0 +1,327 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import matplotlib
|
| 2 |
+
matplotlib.use('Agg')
|
| 3 |
+
from flask import Flask, request, jsonify, send_from_directory
|
| 4 |
+
import pandas as pd
|
| 5 |
+
from rag.memory import store_dataset, get_dataset, store_model_results, get_model_results
|
| 6 |
+
from rag.rag_query import query_dataset_with_groq
|
| 7 |
+
from models.supervised import train_model as train_supervised
|
| 8 |
+
from models.unsupervised import train_unsupervised
|
| 9 |
+
from visuals.charts import (
|
| 10 |
+
plot_histogram, plot_bar, plot_scatter, plot_box, plot_pie, plot_heatmap,
|
| 11 |
+
plot_confusion_matrix, plot_roc_curve, plot_feature_importance,
|
| 12 |
+
plot_elbow_curve, plot_cluster_plot, plot_dendrogram, plot_tsne
|
| 13 |
+
)
|
| 14 |
+
import os
|
| 15 |
+
import logging
|
| 16 |
+
import json
|
| 17 |
+
import re
|
| 18 |
+
import matplotlib.pyplot as plt
|
| 19 |
+
import io
|
| 20 |
+
import base64
|
| 21 |
+
|
| 22 |
+
# Configure logging
|
| 23 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 24 |
+
|
| 25 |
+
app = Flask(__name__, static_folder='frontend')
|
| 26 |
+
dataset_name = "active_dataset"
|
| 27 |
+
|
| 28 |
+
@app.route('/')
|
| 29 |
+
def index():
|
| 30 |
+
"""Serve the main HTML page."""
|
| 31 |
+
return send_from_directory(app.static_folder, 'index.html')
|
| 32 |
+
|
| 33 |
+
@app.route('/<path:path>')
|
| 34 |
+
def static_proxy(path):
|
| 35 |
+
"""Serve static files from the frontend directory."""
|
| 36 |
+
return send_from_directory(app.static_folder, path)
|
| 37 |
+
|
| 38 |
+
@app.route('/api/upload', methods=['POST'])
|
| 39 |
+
def upload_csv():
|
| 40 |
+
"""Handle CSV file uploads, store the dataset, and return a success message."""
|
| 41 |
+
if 'file' not in request.files:
|
| 42 |
+
logging.warning("No file part in upload request.")
|
| 43 |
+
return jsonify({"error": "No file part"}), 400
|
| 44 |
+
file = request.files['file']
|
| 45 |
+
if file.filename == '':
|
| 46 |
+
logging.warning("No selected file in upload request.")
|
| 47 |
+
return jsonify({"error": "No selected file"}), 400
|
| 48 |
+
try:
|
| 49 |
+
df = pd.read_csv(file)
|
| 50 |
+
store_dataset(dataset_name, df)
|
| 51 |
+
logging.info(f"Uploaded {df.shape[0]} rows and {df.shape[1]} columns.")
|
| 52 |
+
return jsonify({"message": f"Uploaded {df.shape[0]} rows and {df.shape[1]} columns."})
|
| 53 |
+
except Exception as e:
|
| 54 |
+
logging.error(f"Error uploading file: {e}")
|
| 55 |
+
return jsonify({"error": str(e)}), 500
|
| 56 |
+
|
| 57 |
+
@app.route('/api/columns', methods=['GET'])
|
| 58 |
+
def get_columns():
|
| 59 |
+
"""Return a list of column names from the currently loaded dataset."""
|
| 60 |
+
df = get_dataset(dataset_name)
|
| 61 |
+
if df is not None:
|
| 62 |
+
return jsonify({"columns": list(df.columns)})
|
| 63 |
+
logging.info("No dataset loaded when requesting columns.")
|
| 64 |
+
return jsonify({"columns": []})
|
| 65 |
+
|
| 66 |
+
@app.route('/api/learning_type', methods=['GET'])
|
| 67 |
+
def get_learning_type():
|
| 68 |
+
"""Determine and return the learning type (supervised/unsupervised) and target column using LLM intelligence."""
|
| 69 |
+
df = get_dataset(dataset_name)
|
| 70 |
+
if df is None:
|
| 71 |
+
logging.warning("No dataset uploaded when requesting learning type.")
|
| 72 |
+
return jsonify({"error": "No dataset uploaded yet."}), 400
|
| 73 |
+
|
| 74 |
+
prompt = (
|
| 75 |
+
"You are an expert data scientist. Your task is to analyze a dataset and determine its learning type (supervised or unsupervised). "
|
| 76 |
+
"If it's a supervised learning problem, you MUST identify the single target column that the other columns would predict. "
|
| 77 |
+
"A target column is typically a label, outcome, or value that is being predicted (e.g., 'price', 'churn', 'diagnosis', 'category', 'sales'). "
|
| 78 |
+
"If no such clear target column exists, it's an unsupervised problem. "
|
| 79 |
+
"Respond ONLY with a JSON object, and nothing else. Do NOT include any introductory/concluding remarks, explanations, or markdown outside the JSON. "
|
| 80 |
+
"The JSON must strictly follow this format: "
|
| 81 |
+
"{\"learning_type\": \"Supervised\", \"target_column\": \"your_target_column_name\"} "
|
| 82 |
+
"OR "
|
| 83 |
+
"{\"learning_type\": \"Unsupervised\", \"target_column\": null}. "
|
| 84 |
+
"\n\n"
|
| 85 |
+
f"COLUMNS AND DATA TYPES:\n{dtypes_str}\n\n"
|
| 86 |
+
f"DATA SAMPLE:\n{df.head().to_string()}"
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
try:
|
| 90 |
+
response_text = query_dataset_with_groq(dataset_name, prompt).strip()
|
| 91 |
+
logging.info(f"Raw LLM response for learning type: {response_text}")
|
| 92 |
+
|
| 93 |
+
# Attempt to parse the JSON response
|
| 94 |
+
try:
|
| 95 |
+
data = json.loads(response_text)
|
| 96 |
+
learning_type = data.get("learning_type", "Unsupervised")
|
| 97 |
+
target_column = data.get("target_column")
|
| 98 |
+
logging.info(f"Parsed LLM response - learning_type: {learning_type}, target_column: {target_column}")
|
| 99 |
+
|
| 100 |
+
# Validate the target column if learning_type is Supervised
|
| 101 |
+
if learning_type == "Supervised":
|
| 102 |
+
if target_column is None or target_column not in df.columns:
|
| 103 |
+
logging.warning(f"LLM suggested supervised learning but target column '{target_column}' is invalid or not found. Defaulting to Unsupervised.")
|
| 104 |
+
learning_type = "Unsupervised"
|
| 105 |
+
target_column = None
|
| 106 |
+
else:
|
| 107 |
+
# If LLM says unsupervised, ensure target_column is null
|
| 108 |
+
target_column = None
|
| 109 |
+
|
| 110 |
+
except json.JSONDecodeError:
|
| 111 |
+
logging.error(f"LLM response is not a valid JSON: {response_text}. Attempting regex fallback.")
|
| 112 |
+
# Fallback: Try to extract using regex if JSON parsing fails (less reliable)
|
| 113 |
+
match = re.search(r'"learning_type"\s*:\s*"(Supervised|Unsupervised)"(?:,\s*"target_column"\s*:\s*"?([a-zA-Z0-9_]+)?"?)?', response_text)
|
| 114 |
+
if match:
|
| 115 |
+
learning_type = match.group(1)
|
| 116 |
+
target_column = match.group(2) if match.group(2) else None
|
| 117 |
+
logging.info(f"Regex fallback parsed - learning_type: {learning_type}, target_column: {target_column}")
|
| 118 |
+
|
| 119 |
+
if learning_type == "Supervised" and (target_column is None or target_column not in df.columns):
|
| 120 |
+
logging.warning(f"Regex fallback: Invalid target column '{target_column}' for supervised. Defaulting to Unsupervised.")
|
| 121 |
+
learning_type = "Unsupervised"
|
| 122 |
+
target_column = None
|
| 123 |
+
elif learning_type == "Unsupervised":
|
| 124 |
+
target_column = None
|
| 125 |
+
else:
|
| 126 |
+
logging.error("Could not parse LLM response for learning type using regex fallback. Defaulting to Unsupervised.")
|
| 127 |
+
learning_type = "Unsupervised"
|
| 128 |
+
target_column = None
|
| 129 |
+
|
| 130 |
+
return jsonify({"learning_type": learning_type, "target_column": target_column})
|
| 131 |
+
|
| 132 |
+
except Exception as e:
|
| 133 |
+
logging.error(f"An unexpected error occurred while determining learning type: {str(e)}", exc_info=True)
|
| 134 |
+
# Fallback to a default in case of any error during Groq call or initial processing
|
| 135 |
+
return jsonify({"learning_type": "Unsupervised", "target_column": None})
|
| 136 |
+
|
| 137 |
+
@app.route('/api/train', methods=['POST'])
|
| 138 |
+
def train_model_api():
|
| 139 |
+
"""Handle model training requests for both supervised and unsupervised learning."""
|
| 140 |
+
data = request.json
|
| 141 |
+
model_name = data.get('model_name')
|
| 142 |
+
target_col = data.get('target_col')
|
| 143 |
+
learning_type = data.get('learning_type')
|
| 144 |
+
|
| 145 |
+
df = get_dataset(dataset_name)
|
| 146 |
+
if df is None:
|
| 147 |
+
logging.warning("No dataset uploaded when requesting model training.")
|
| 148 |
+
return jsonify({"error": "No dataset uploaded yet."}), 400
|
| 149 |
+
|
| 150 |
+
if learning_type == "Supervised":
|
| 151 |
+
if not target_col or target_col == 'None':
|
| 152 |
+
logging.warning("No target column provided for supervised training.")
|
| 153 |
+
return jsonify({"error": "Please select a target column for supervised learning."}), 400
|
| 154 |
+
|
| 155 |
+
model, metrics, y_test, y_pred, y_pred_proba, X_test = train_supervised(df, target_col, model_name)
|
| 156 |
+
if model:
|
| 157 |
+
store_model_results(dataset_name, model, y_test, y_pred, y_pred_proba, X_test)
|
| 158 |
+
logging.info(f"{model_name} trained successfully for supervised learning.")
|
| 159 |
+
return jsonify({"message": f"{model_name} trained successfully.", "metrics": metrics})
|
| 160 |
+
else:
|
| 161 |
+
logging.error(f"Failed to train {model_name} for supervised learning. Reason: {metrics}")
|
| 162 |
+
return jsonify({"error": f"Failed to train {model_name}. Reason: {metrics}"}), 500
|
| 163 |
+
else: # Unsupervised
|
| 164 |
+
model, result = train_unsupervised(df, model_name)
|
| 165 |
+
if model:
|
| 166 |
+
logging.info(f"{model_name} trained successfully for unsupervised learning.")
|
| 167 |
+
return jsonify({"message": f"{model_name} trained successfully.", "result": result.tolist() if hasattr(result, 'tolist') else result})
|
| 168 |
+
else:
|
| 169 |
+
logging.error(f"Failed to train {model_name} for unsupervised learning. Reason: {result}")
|
| 170 |
+
return jsonify({"error": f"Failed to train {model_name}. Reason: {result}"}), 500
|
| 171 |
+
|
| 172 |
+
@app.route('/api/plot', methods=['POST'])
|
| 173 |
+
def generate_plot_api():
|
| 174 |
+
"""Generate and return a plot based on the requested type and columns."""
|
| 175 |
+
data = request.json
|
| 176 |
+
plot_type = data.get('plot_type')
|
| 177 |
+
col1 = data.get('col1')
|
| 178 |
+
col2 = data.get('col2')
|
| 179 |
+
|
| 180 |
+
df = get_dataset(dataset_name)
|
| 181 |
+
if df is None:
|
| 182 |
+
logging.warning("No dataset loaded when requesting plot generation.")
|
| 183 |
+
return jsonify({"error": "No data loaded."}), 400
|
| 184 |
+
|
| 185 |
+
plot_functions = {
|
| 186 |
+
"Histogram": plot_histogram,
|
| 187 |
+
"Bar": plot_bar,
|
| 188 |
+
"Scatter": plot_scatter,
|
| 189 |
+
"Box": plot_box,
|
| 190 |
+
"Pie": plot_pie,
|
| 191 |
+
"Heatmap": plot_heatmap,
|
| 192 |
+
"Elbow Curve": plot_elbow_curve,
|
| 193 |
+
"Cluster Plot": plot_cluster_plot,
|
| 194 |
+
"Dendrogram": plot_dendrogram,
|
| 195 |
+
"t-SNE": plot_tsne,
|
| 196 |
+
"Confusion Matrix": plot_confusion_matrix,
|
| 197 |
+
"ROC Curve": plot_roc_curve,
|
| 198 |
+
"Feature Importance Plot": plot_feature_importance
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
if plot_type not in plot_functions:
|
| 202 |
+
logging.warning(f"Unsupported plot type requested: {plot_type}")
|
| 203 |
+
return jsonify({"error": "Plot not supported."}), 400
|
| 204 |
+
|
| 205 |
+
fig, err = None, None
|
| 206 |
+
try:
|
| 207 |
+
if plot_type == "Scatter":
|
| 208 |
+
fig, err = plot_functions[plot_type](df, col1, col2, data.get('color_col'))
|
| 209 |
+
elif plot_type == "Box":
|
| 210 |
+
fig, err = plot_functions[plot_type](df, col1, col2)
|
| 211 |
+
elif plot_type == "Heatmap":
|
| 212 |
+
fig, err = plot_functions[plot_type](df)
|
| 213 |
+
elif plot_type == "Elbow Curve":
|
| 214 |
+
from utils.data_cleaner import prepare_data
|
| 215 |
+
X_prepared, _ = prepare_data(df)
|
| 216 |
+
fig, err = plot_functions[plot_type](X_prepared)
|
| 217 |
+
elif plot_type == "Cluster Plot":
|
| 218 |
+
from utils.data_cleaner import prepare_data
|
| 219 |
+
from sklearn.cluster import KMeans
|
| 220 |
+
X_prepared, _ = prepare_data(df)
|
| 221 |
+
if X_prepared.empty:
|
| 222 |
+
return jsonify({"error": "Data is empty after cleaning for Cluster Plot."}), 400
|
| 223 |
+
# Perform KMeans clustering (e.g., with 3 clusters)
|
| 224 |
+
n_clusters = 3 # Default number of clusters
|
| 225 |
+
if len(X_prepared) < n_clusters:
|
| 226 |
+
n_clusters = len(X_prepared) # Adjust n_clusters if data points are fewer
|
| 227 |
+
if n_clusters == 0:
|
| 228 |
+
return jsonify({"error": "Not enough data points to form clusters."}), 400
|
| 229 |
+
kmeans = KMeans(n_clusters=n_clusters, random_state=42, n_init=10)
|
| 230 |
+
kmeans.fit(X_prepared)
|
| 231 |
+
labels = kmeans.labels_
|
| 232 |
+
fig, err = plot_functions[plot_type](X_prepared, labels=labels)
|
| 233 |
+
elif plot_type == "Dendrogram":
|
| 234 |
+
from utils.data_cleaner import prepare_data
|
| 235 |
+
X_prepared, _ = prepare_data(df)
|
| 236 |
+
fig, err = plot_functions[plot_type](X_prepared)
|
| 237 |
+
elif plot_type == "t-SNE":
|
| 238 |
+
from utils.data_cleaner import prepare_data
|
| 239 |
+
X_prepared, _ = prepare_data(df)
|
| 240 |
+
fig, err = plot_functions[plot_type](X_prepared)
|
| 241 |
+
elif plot_type in ["Confusion Matrix", "ROC Curve", "Feature Importance Plot"]:
|
| 242 |
+
model_results = get_model_results(dataset_name)
|
| 243 |
+
if not model_results:
|
| 244 |
+
logging.warning(f"No trained model found for {plot_type} plot.")
|
| 245 |
+
return jsonify({"error": "No trained model found. Please train a supervised model first."}), 400
|
| 246 |
+
|
| 247 |
+
model = model_results['model']
|
| 248 |
+
y_test = model_results['y_test']
|
| 249 |
+
y_pred = model_results['y_pred']
|
| 250 |
+
y_pred_proba = model_results['y_pred_proba']
|
| 251 |
+
X_test = model_results['X_test']
|
| 252 |
+
|
| 253 |
+
if plot_type == "Confusion Matrix":
|
| 254 |
+
# Need to get class names. For simplicity, using unique values from y_test.
|
| 255 |
+
class_names = [str(c) for c in sorted(pd.Series(y_test).unique())]
|
| 256 |
+
fig, err = plot_functions[plot_type](y_test, y_pred, class_names)
|
| 257 |
+
elif plot_type == "ROC Curve":
|
| 258 |
+
if y_pred_proba is None:
|
| 259 |
+
logging.warning("ROC Curve requested but model does not provide probability predictions.")
|
| 260 |
+
return jsonify({"error": "ROC Curve requires probability predictions, which this model does not provide."}), 400
|
| 261 |
+
fig, err = plot_functions[plot_type](y_test, y_pred_proba)
|
| 262 |
+
elif plot_type == "Feature Importance Plot":
|
| 263 |
+
if not hasattr(model, 'feature_importances_'):
|
| 264 |
+
logging.warning("Feature Importance Plot requested but model does not have feature importances.")
|
| 265 |
+
return jsonify({"error": "Model does not have feature importances to plot."}), 400
|
| 266 |
+
# Feature names are from X_test columns
|
| 267 |
+
feature_names = X_test.columns.tolist()
|
| 268 |
+
fig, err = plot_functions[plot_type](model, feature_names)
|
| 269 |
+
else:
|
| 270 |
+
# Default case for plots that only need one column (e.g., Histogram, Bar, Pie)
|
| 271 |
+
fig, err = plot_functions[plot_type](df, col1)
|
| 272 |
+
|
| 273 |
+
if err:
|
| 274 |
+
logging.error(f"Plot generation error for {plot_type}: {err}")
|
| 275 |
+
return jsonify({"error": err}), 400
|
| 276 |
+
|
| 277 |
+
# Save plot to a BytesIO object and encode to base64
|
| 278 |
+
buf = io.BytesIO()
|
| 279 |
+
fig.savefig(buf, format='png', bbox_inches='tight')
|
| 280 |
+
plt.close(fig)
|
| 281 |
+
buf.seek(0)
|
| 282 |
+
img_str = base64.b64encode(buf.read()).decode('utf-8')
|
| 283 |
+
return jsonify({'image': img_str})
|
| 284 |
+
except Exception as e:
|
| 285 |
+
logging.error(f"An unexpected error occurred during plot generation for {plot_type}: {e}", exc_info=True)
|
| 286 |
+
return jsonify({"error": f"An internal server error occurred: {str(e)}"}), 500
|
| 287 |
+
|
| 288 |
+
@app.route('/api/plot_options', methods=['GET'])
|
| 289 |
+
def plot_options():
|
| 290 |
+
"""Return a list of available plot options based on the dataset's learning type."""
|
| 291 |
+
df = get_dataset(dataset_name)
|
| 292 |
+
if df is None:
|
| 293 |
+
logging.warning("No dataset uploaded when requesting plot options.")
|
| 294 |
+
return jsonify({"error": "No dataset uploaded yet."}), 400
|
| 295 |
+
|
| 296 |
+
# Get learning type from the dedicated endpoint
|
| 297 |
+
learning_type_response = get_learning_type()
|
| 298 |
+
learning_type_data = learning_type_response.get_json()
|
| 299 |
+
learning_type = learning_type_data.get('learning_type', 'Unsupervised')
|
| 300 |
+
|
| 301 |
+
if learning_type == "Supervised":
|
| 302 |
+
plots = ["Histogram", "Bar", "Scatter", "Box", "Pie", "Heatmap", "Confusion Matrix", "ROC Curve", "Feature Importance Plot"]
|
| 303 |
+
else:
|
| 304 |
+
plots = ["Histogram", "Bar", "Scatter", "Box", "Pie", "Heatmap", "Cluster Plot", "Elbow Curve", "Dendrogram", "t-SNE"]
|
| 305 |
+
|
| 306 |
+
# Ensure Scatter Plot is always available if there are at least two numeric columns
|
| 307 |
+
numeric_cols = df.select_dtypes(include=np.number).columns
|
| 308 |
+
if len(numeric_cols) >= 2 and "Scatter" not in plots:
|
| 309 |
+
plots.insert(2, "Scatter") # Insert at a reasonable position
|
| 310 |
+
|
| 311 |
+
return jsonify({"plots": plots})
|
| 312 |
+
|
| 313 |
+
@app.route('/api/ask', methods=['POST'])
|
| 314 |
+
def ask_question_api():
|
| 315 |
+
"""Handle user questions to the AI about the dataset."""
|
| 316 |
+
data = request.json
|
| 317 |
+
user_query = data.get('user_query')
|
| 318 |
+
if not user_query:
|
| 319 |
+
logging.warning("Empty user query received for AI assistant.")
|
| 320 |
+
return jsonify({"error": "Please ask a question."}), 400
|
| 321 |
+
|
| 322 |
+
answer = query_dataset_with_groq(dataset_name, user_query)
|
| 323 |
+
return jsonify({"answer": answer})
|
| 324 |
+
|
| 325 |
+
if __name__ == '__main__':
|
| 326 |
+
logging.info("Starting Flask application...")
|
| 327 |
+
app.run(debug=True, port=5001)
|
catboost_info/catboost_training.json
ADDED
|
@@ -0,0 +1,1004 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"meta":{"test_sets":[],"test_metrics":[],"learn_metrics":[{"best_value":"Min","name":"Logloss"}],"launch_mode":"Train","parameters":"","iteration_count":1000,"learn_sets":["learn"],"name":"experiment"},
|
| 3 |
+
"iterations":[
|
| 4 |
+
{"learn":[0.6345975935],"iteration":0,"passed_time":0.1663500986,"remaining_time":166.1837485},
|
| 5 |
+
{"learn":[0.5821738159],"iteration":1,"passed_time":0.1771992163,"remaining_time":88.42240891},
|
| 6 |
+
{"learn":[0.536109203],"iteration":2,"passed_time":0.1885927051,"remaining_time":62.67564232},
|
| 7 |
+
{"learn":[0.4922448434],"iteration":3,"passed_time":0.2002379859,"remaining_time":49.85925849},
|
| 8 |
+
{"learn":[0.4506230198],"iteration":4,"passed_time":0.211385181,"remaining_time":42.06565103},
|
| 9 |
+
{"learn":[0.4165109727],"iteration":5,"passed_time":0.2229659417,"remaining_time":36.93802435},
|
| 10 |
+
{"learn":[0.3855723247],"iteration":6,"passed_time":0.2341421355,"remaining_time":33.21473436},
|
| 11 |
+
{"learn":[0.3589960512],"iteration":7,"passed_time":0.2453144979,"remaining_time":30.41899774},
|
| 12 |
+
{"learn":[0.329146501],"iteration":8,"passed_time":0.2556955964,"remaining_time":28.15492623},
|
| 13 |
+
{"learn":[0.3036517077],"iteration":9,"passed_time":0.2649330663,"remaining_time":26.22837357},
|
| 14 |
+
{"learn":[0.2793441382],"iteration":10,"passed_time":0.2732549281,"remaining_time":24.56810218},
|
| 15 |
+
{"learn":[0.257702187],"iteration":11,"passed_time":0.2812988546,"remaining_time":23.16027236},
|
| 16 |
+
{"learn":[0.2407448692],"iteration":12,"passed_time":0.2893249441,"remaining_time":21.96643999},
|
| 17 |
+
{"learn":[0.225226436],"iteration":13,"passed_time":0.2973872518,"remaining_time":20.9445593},
|
| 18 |
+
{"learn":[0.2095610351],"iteration":14,"passed_time":0.3053869407,"remaining_time":20.05374244},
|
| 19 |
+
{"learn":[0.1951945625],"iteration":15,"passed_time":0.3140369075,"remaining_time":19.31326981},
|
| 20 |
+
{"learn":[0.1846442912],"iteration":16,"passed_time":0.3224891323,"remaining_time":18.64745983},
|
| 21 |
+
{"learn":[0.1732121946],"iteration":17,"passed_time":0.3307273809,"remaining_time":18.043016},
|
| 22 |
+
{"learn":[0.1632697999],"iteration":18,"passed_time":0.3409342268,"remaining_time":17.60297245},
|
| 23 |
+
{"learn":[0.1537118642],"iteration":19,"passed_time":0.3497122001,"remaining_time":17.13589781},
|
| 24 |
+
{"learn":[0.1460680989],"iteration":20,"passed_time":0.3581062691,"remaining_time":16.69457321},
|
| 25 |
+
{"learn":[0.1393085269],"iteration":21,"passed_time":0.3662326631,"remaining_time":16.28070657},
|
| 26 |
+
{"learn":[0.1315902081],"iteration":22,"passed_time":0.3746211411,"remaining_time":15.91325456},
|
| 27 |
+
{"learn":[0.1265649653],"iteration":23,"passed_time":0.3832210241,"remaining_time":15.58432165},
|
| 28 |
+
{"learn":[0.1211223295],"iteration":24,"passed_time":0.3916200493,"remaining_time":15.27318192},
|
| 29 |
+
{"learn":[0.1175602502],"iteration":25,"passed_time":0.3997712422,"remaining_time":14.97604577},
|
| 30 |
+
{"learn":[0.1119215835],"iteration":26,"passed_time":0.4081216403,"remaining_time":14.70749467},
|
| 31 |
+
{"learn":[0.1077986165],"iteration":27,"passed_time":0.416597623,"remaining_time":14.46188891},
|
| 32 |
+
{"learn":[0.1029698384],"iteration":28,"passed_time":0.4250312612,"remaining_time":14.23121912},
|
| 33 |
+
{"learn":[0.0986395114],"iteration":29,"passed_time":0.4334259948,"remaining_time":14.01410716},
|
| 34 |
+
{"learn":[0.09615119676],"iteration":30,"passed_time":0.4415918122,"remaining_time":13.80330536},
|
| 35 |
+
{"learn":[0.09377886222],"iteration":31,"passed_time":0.4500360072,"remaining_time":13.61358922},
|
| 36 |
+
{"learn":[0.09163242394],"iteration":32,"passed_time":0.4582566078,"remaining_time":13.42830726},
|
| 37 |
+
{"learn":[0.08899570965],"iteration":33,"passed_time":0.4664002304,"remaining_time":13.2512536},
|
| 38 |
+
{"learn":[0.08655943737],"iteration":34,"passed_time":0.4747450102,"remaining_time":13.08939814},
|
| 39 |
+
{"learn":[0.08411792667],"iteration":35,"passed_time":0.4835791508,"remaining_time":12.94917504},
|
| 40 |
+
{"learn":[0.08245623879],"iteration":36,"passed_time":0.4924466791,"remaining_time":12.81692303},
|
| 41 |
+
{"learn":[0.08067619472],"iteration":37,"passed_time":0.5007063717,"remaining_time":12.67577709},
|
| 42 |
+
{"learn":[0.07827483014],"iteration":38,"passed_time":0.5090206182,"remaining_time":12.54279011},
|
| 43 |
+
{"learn":[0.07692957542],"iteration":39,"passed_time":0.5172461706,"remaining_time":12.41390809},
|
| 44 |
+
{"learn":[0.07555168248],"iteration":40,"passed_time":0.5254859501,"remaining_time":12.29124454},
|
| 45 |
+
{"learn":[0.07391568055],"iteration":41,"passed_time":0.5337677492,"remaining_time":12.17498818},
|
| 46 |
+
{"learn":[0.07177635588],"iteration":42,"passed_time":0.5423935348,"remaining_time":12.0714096},
|
| 47 |
+
{"learn":[0.07028396644],"iteration":43,"passed_time":0.550597241,"remaining_time":11.96297642},
|
| 48 |
+
{"learn":[0.06840369806],"iteration":44,"passed_time":0.55908336,"remaining_time":11.86499131},
|
| 49 |
+
{"learn":[0.06745874221],"iteration":45,"passed_time":0.567509604,"remaining_time":11.7696557},
|
| 50 |
+
{"learn":[0.06587893724],"iteration":46,"passed_time":0.5756937978,"remaining_time":11.67311041},
|
| 51 |
+
{"learn":[0.0646966767],"iteration":47,"passed_time":0.5837952251,"remaining_time":11.5786053},
|
| 52 |
+
{"learn":[0.06327847649],"iteration":48,"passed_time":0.5922069887,"remaining_time":11.49364992},
|
| 53 |
+
{"learn":[0.06244927114],"iteration":49,"passed_time":0.6005894145,"remaining_time":11.41119888},
|
| 54 |
+
{"learn":[0.06149193877],"iteration":50,"passed_time":0.6089033392,"remaining_time":11.33037782},
|
| 55 |
+
{"learn":[0.06045234473],"iteration":51,"passed_time":0.6171963906,"remaining_time":11.25196497},
|
| 56 |
+
{"learn":[0.05980879338],"iteration":52,"passed_time":0.625193463,"remaining_time":11.17090961},
|
| 57 |
+
{"learn":[0.05862077701],"iteration":53,"passed_time":0.6338217964,"remaining_time":11.10361888},
|
| 58 |
+
{"learn":[0.05796779561],"iteration":54,"passed_time":0.6423448126,"remaining_time":11.03665178},
|
| 59 |
+
{"learn":[0.05723820058],"iteration":55,"passed_time":0.6505543202,"remaining_time":10.96648711},
|
| 60 |
+
{"learn":[0.05668393745],"iteration":56,"passed_time":0.6586974211,"remaining_time":10.89739769},
|
| 61 |
+
{"learn":[0.05583438662],"iteration":57,"passed_time":0.6670647489,"remaining_time":10.83405161},
|
| 62 |
+
{"learn":[0.05545563798],"iteration":58,"passed_time":0.6752536853,"remaining_time":10.76972403},
|
| 63 |
+
{"learn":[0.05452632489],"iteration":59,"passed_time":0.6835812514,"remaining_time":10.7094396},
|
| 64 |
+
{"learn":[0.05396694639],"iteration":60,"passed_time":0.692114834,"remaining_time":10.65402999},
|
| 65 |
+
{"learn":[0.0532831949],"iteration":61,"passed_time":0.7006550391,"remaining_time":10.60023269},
|
| 66 |
+
{"learn":[0.05305155948],"iteration":62,"passed_time":0.7090113389,"remaining_time":10.5451369},
|
| 67 |
+
{"learn":[0.05212511105],"iteration":63,"passed_time":0.7171926356,"remaining_time":10.4889423},
|
| 68 |
+
{"learn":[0.05163175536],"iteration":64,"passed_time":0.7252384093,"remaining_time":10.43227558},
|
| 69 |
+
{"learn":[0.05108733309],"iteration":65,"passed_time":0.7340316332,"remaining_time":10.38765978},
|
| 70 |
+
{"learn":[0.05074313014],"iteration":66,"passed_time":0.7434996595,"remaining_time":10.35351018},
|
| 71 |
+
{"learn":[0.04995906245],"iteration":67,"passed_time":0.7517449406,"remaining_time":10.30332772},
|
| 72 |
+
{"learn":[0.0491270113],"iteration":68,"passed_time":0.7601741118,"remaining_time":10.256842},
|
| 73 |
+
{"learn":[0.04865425364],"iteration":69,"passed_time":0.7686734001,"remaining_time":10.21237517},
|
| 74 |
+
{"learn":[0.04784921475],"iteration":70,"passed_time":0.7772802362,"remaining_time":10.17032872},
|
| 75 |
+
{"learn":[0.04723317029],"iteration":71,"passed_time":0.786488982,"remaining_time":10.1369691},
|
| 76 |
+
{"learn":[0.04680454552],"iteration":72,"passed_time":0.7948144753,"remaining_time":10.09305505},
|
| 77 |
+
{"learn":[0.0461453941],"iteration":73,"passed_time":0.8034633907,"remaining_time":10.05415},
|
| 78 |
+
{"learn":[0.04523684268],"iteration":74,"passed_time":0.8121011726,"remaining_time":10.01591446},
|
| 79 |
+
{"learn":[0.04456790962],"iteration":75,"passed_time":0.8208038558,"remaining_time":9.979246879},
|
| 80 |
+
{"learn":[0.04429176872],"iteration":76,"passed_time":0.8293280085,"remaining_time":9.941165609},
|
| 81 |
+
{"learn":[0.04371691534],"iteration":77,"passed_time":0.8378567671,"remaining_time":9.903896657},
|
| 82 |
+
{"learn":[0.04331697618],"iteration":78,"passed_time":0.8466933537,"remaining_time":9.870944034},
|
| 83 |
+
{"learn":[0.04278334029],"iteration":79,"passed_time":0.8552559775,"remaining_time":9.835443741},
|
| 84 |
+
{"learn":[0.04215936635],"iteration":80,"passed_time":0.8644953272,"remaining_time":9.80828649},
|
| 85 |
+
{"learn":[0.04188383767],"iteration":81,"passed_time":0.872710254,"remaining_time":9.770097722},
|
| 86 |
+
{"learn":[0.04157795788],"iteration":82,"passed_time":0.8817325129,"remaining_time":9.741550775},
|
| 87 |
+
{"learn":[0.04135640998],"iteration":83,"passed_time":0.8910555889,"remaining_time":9.716749041},
|
| 88 |
+
{"learn":[0.04090694022],"iteration":84,"passed_time":0.8993673638,"remaining_time":9.681425152},
|
| 89 |
+
{"learn":[0.04072319587],"iteration":85,"passed_time":0.9079068189,"remaining_time":9.649149215},
|
| 90 |
+
{"learn":[0.04048617238],"iteration":86,"passed_time":0.9165633283,"remaining_time":9.618647341},
|
| 91 |
+
{"learn":[0.04004556143],"iteration":87,"passed_time":0.9254792899,"remaining_time":9.591330822},
|
| 92 |
+
{"learn":[0.03989673999],"iteration":88,"passed_time":0.9335730213,"remaining_time":9.556011487},
|
| 93 |
+
{"learn":[0.03953182104],"iteration":89,"passed_time":0.942555451,"remaining_time":9.530282893},
|
| 94 |
+
{"learn":[0.0394369881],"iteration":90,"passed_time":0.9507263462,"remaining_time":9.496815919},
|
| 95 |
+
{"learn":[0.03915172486],"iteration":91,"passed_time":0.9593610207,"remaining_time":9.468476161},
|
| 96 |
+
{"learn":[0.03901884894],"iteration":92,"passed_time":0.9674640637,"remaining_time":9.435375331},
|
| 97 |
+
{"learn":[0.03869289892],"iteration":93,"passed_time":0.9762159772,"remaining_time":9.409060376},
|
| 98 |
+
{"learn":[0.03815146211],"iteration":94,"passed_time":0.9847038399,"remaining_time":9.380599738},
|
| 99 |
+
{"learn":[0.03783431435],"iteration":95,"passed_time":0.9931292516,"remaining_time":9.351967119},
|
| 100 |
+
{"learn":[0.0375035486],"iteration":96,"passed_time":1.001284856,"remaining_time":9.321239437},
|
| 101 |
+
{"learn":[0.03729480873],"iteration":97,"passed_time":1.009845981,"remaining_time":9.294704842},
|
| 102 |
+
{"learn":[0.03685405537],"iteration":98,"passed_time":1.018402488,"remaining_time":9.268491335},
|
| 103 |
+
{"learn":[0.03640809608],"iteration":99,"passed_time":1.026545695,"remaining_time":9.238911251},
|
| 104 |
+
{"learn":[0.03575616916],"iteration":100,"passed_time":1.03745151,"remaining_time":9.234345619},
|
| 105 |
+
{"learn":[0.03569260107],"iteration":101,"passed_time":1.04583944,"remaining_time":9.207488401},
|
| 106 |
+
{"learn":[0.03523461797],"iteration":102,"passed_time":1.054145833,"remaining_time":9.180279728},
|
| 107 |
+
{"learn":[0.03514318762],"iteration":103,"passed_time":1.062416581,"remaining_time":9.15312747},
|
| 108 |
+
{"learn":[0.03468352676],"iteration":104,"passed_time":1.070521797,"remaining_time":9.124923889},
|
| 109 |
+
{"learn":[0.03434562089],"iteration":105,"passed_time":1.078673139,"remaining_time":9.097488548},
|
| 110 |
+
{"learn":[0.03403483312],"iteration":106,"passed_time":1.087550262,"remaining_time":9.076470882},
|
| 111 |
+
{"learn":[0.03364170803],"iteration":107,"passed_time":1.096254252,"remaining_time":9.054248085},
|
| 112 |
+
{"learn":[0.03354889364],"iteration":108,"passed_time":1.104565077,"remaining_time":9.029059479},
|
| 113 |
+
{"learn":[0.03340411922],"iteration":109,"passed_time":1.11304815,"remaining_time":9.005571399},
|
| 114 |
+
{"learn":[0.03310352241],"iteration":110,"passed_time":1.12265041,"remaining_time":8.991317247},
|
| 115 |
+
{"learn":[0.03281770859],"iteration":111,"passed_time":1.13122365,"remaining_time":8.968987514},
|
| 116 |
+
{"learn":[0.03218629616],"iteration":112,"passed_time":1.139834543,"remaining_time":8.947196814},
|
| 117 |
+
{"learn":[0.03186893819],"iteration":113,"passed_time":1.148193117,"remaining_time":8.92367633},
|
| 118 |
+
{"learn":[0.03157080811],"iteration":114,"passed_time":1.156461223,"remaining_time":8.899723327},
|
| 119 |
+
{"learn":[0.03135488728],"iteration":115,"passed_time":1.164920974,"remaining_time":8.877501218},
|
| 120 |
+
{"learn":[0.03126355296],"iteration":116,"passed_time":1.173370459,"remaining_time":8.855436879},
|
| 121 |
+
{"learn":[0.03111994846],"iteration":117,"passed_time":1.181690351,"remaining_time":8.832634658},
|
| 122 |
+
{"learn":[0.03081413272],"iteration":118,"passed_time":1.190004474,"remaining_time":8.810033123},
|
| 123 |
+
{"learn":[0.03041541944],"iteration":119,"passed_time":1.198656701,"remaining_time":8.790149138},
|
| 124 |
+
{"learn":[0.03007503256],"iteration":120,"passed_time":1.207534682,"remaining_time":8.772090789},
|
| 125 |
+
{"learn":[0.02976127995],"iteration":121,"passed_time":1.215599876,"remaining_time":8.748333535},
|
| 126 |
+
{"learn":[0.0296251658],"iteration":122,"passed_time":1.22400353,"remaining_time":8.727244679},
|
| 127 |
+
{"learn":[0.02939174213],"iteration":123,"passed_time":1.232348895,"remaining_time":8.705948646},
|
| 128 |
+
{"learn":[0.0293002632],"iteration":124,"passed_time":1.241032961,"remaining_time":8.687230728},
|
| 129 |
+
{"learn":[0.02921549833],"iteration":125,"passed_time":1.24956817,"remaining_time":8.667639528},
|
| 130 |
+
{"learn":[0.02886017345],"iteration":126,"passed_time":1.258629896,"remaining_time":8.651841726},
|
| 131 |
+
{"learn":[0.02860820514],"iteration":127,"passed_time":1.267132739,"remaining_time":8.632341783},
|
| 132 |
+
{"learn":[0.02834298725],"iteration":128,"passed_time":1.276079701,"remaining_time":8.616011005},
|
| 133 |
+
{"learn":[0.0278810478],"iteration":129,"passed_time":1.284362746,"remaining_time":8.595350684},
|
| 134 |
+
{"learn":[0.02769682564],"iteration":130,"passed_time":1.292845593,"remaining_time":8.576204735},
|
| 135 |
+
{"learn":[0.02747245776],"iteration":131,"passed_time":1.301079976,"remaining_time":8.555586507},
|
| 136 |
+
{"learn":[0.02734022169],"iteration":132,"passed_time":1.309548584,"remaining_time":8.536681368},
|
| 137 |
+
{"learn":[0.02710595116],"iteration":133,"passed_time":1.318357141,"remaining_time":8.520128984},
|
| 138 |
+
{"learn":[0.02685099955],"iteration":134,"passed_time":1.327307149,"remaining_time":8.504597658},
|
| 139 |
+
{"learn":[0.02654279781],"iteration":135,"passed_time":1.33607339,"remaining_time":8.487995657},
|
| 140 |
+
{"learn":[0.02633697314],"iteration":136,"passed_time":1.344489637,"remaining_time":8.469303337},
|
| 141 |
+
{"learn":[0.02607809897],"iteration":137,"passed_time":1.353067414,"remaining_time":8.451768919},
|
| 142 |
+
{"learn":[0.02586316258],"iteration":138,"passed_time":1.36163892,"remaining_time":8.434324535},
|
| 143 |
+
{"learn":[0.0256751753],"iteration":139,"passed_time":1.36978445,"remaining_time":8.41439019},
|
| 144 |
+
{"learn":[0.025665838],"iteration":140,"passed_time":1.378336611,"remaining_time":8.397100348},
|
| 145 |
+
{"learn":[0.02542802598],"iteration":141,"passed_time":1.386817619,"remaining_time":8.379503643},
|
| 146 |
+
{"learn":[0.02528695237],"iteration":142,"passed_time":1.395341131,"remaining_time":8.362289156},
|
| 147 |
+
{"learn":[0.0251071283],"iteration":143,"passed_time":1.403708052,"remaining_time":8.344264533},
|
| 148 |
+
{"learn":[0.02501060697],"iteration":144,"passed_time":1.411914093,"remaining_time":8.325424481},
|
| 149 |
+
{"learn":[0.02490475149],"iteration":145,"passed_time":1.420339531,"remaining_time":8.308013419},
|
| 150 |
+
{"learn":[0.02474863095],"iteration":146,"passed_time":1.428654766,"remaining_time":8.290085142},
|
| 151 |
+
{"learn":[0.02465164397],"iteration":147,"passed_time":1.436798419,"remaining_time":8.271299005},
|
| 152 |
+
{"learn":[0.02444501689],"iteration":148,"passed_time":1.44540644,"remaining_time":8.255307922},
|
| 153 |
+
{"learn":[0.02432844792],"iteration":149,"passed_time":1.453655793,"remaining_time":8.237382828},
|
| 154 |
+
{"learn":[0.02426986159],"iteration":150,"passed_time":1.461914967,"remaining_time":8.219641105},
|
| 155 |
+
{"learn":[0.02419508709],"iteration":151,"passed_time":1.470067101,"remaining_time":8.201426983},
|
| 156 |
+
{"learn":[0.02401890055],"iteration":152,"passed_time":1.47856705,"remaining_time":8.185269876},
|
| 157 |
+
{"learn":[0.02375445555],"iteration":153,"passed_time":1.486975807,"remaining_time":8.168711252},
|
| 158 |
+
{"learn":[0.02341849817],"iteration":154,"passed_time":1.495589604,"remaining_time":8.153375582},
|
| 159 |
+
{"learn":[0.02341033255],"iteration":155,"passed_time":1.503946026,"remaining_time":8.136733628},
|
| 160 |
+
{"learn":[0.02325700825],"iteration":156,"passed_time":1.512647519,"remaining_time":8.122050054},
|
| 161 |
+
{"learn":[0.02317235421],"iteration":157,"passed_time":1.522469473,"remaining_time":8.113413265},
|
| 162 |
+
{"learn":[0.02305525018],"iteration":158,"passed_time":1.530947056,"remaining_time":8.097650781},
|
| 163 |
+
{"learn":[0.02284465716],"iteration":159,"passed_time":1.539629438,"remaining_time":8.083054549},
|
| 164 |
+
{"learn":[0.02272297479],"iteration":160,"passed_time":1.547823292,"remaining_time":8.065985975},
|
| 165 |
+
{"learn":[0.02244912653],"iteration":161,"passed_time":1.556253428,"remaining_time":8.050249212},
|
| 166 |
+
{"learn":[0.02240931173],"iteration":162,"passed_time":1.564682575,"remaining_time":8.034597028},
|
| 167 |
+
{"learn":[0.02216938805],"iteration":163,"passed_time":1.573451979,"remaining_time":8.020767406},
|
| 168 |
+
{"learn":[0.02196063647],"iteration":164,"passed_time":1.581750209,"remaining_time":8.004614692},
|
| 169 |
+
{"learn":[0.02185233913],"iteration":165,"passed_time":1.58983697,"remaining_time":7.987494175},
|
| 170 |
+
{"learn":[0.02180497711],"iteration":166,"passed_time":1.598291764,"remaining_time":7.972317602},
|
| 171 |
+
{"learn":[0.02168866178],"iteration":167,"passed_time":1.6067172,"remaining_time":7.957075659},
|
| 172 |
+
{"learn":[0.02156183083],"iteration":168,"passed_time":1.615175603,"remaining_time":7.942076485},
|
| 173 |
+
{"learn":[0.021410091],"iteration":169,"passed_time":1.623832589,"remaining_time":7.928123819},
|
| 174 |
+
{"learn":[0.02134540498],"iteration":170,"passed_time":1.632443045,"remaining_time":7.914007512},
|
| 175 |
+
{"learn":[0.02128399529],"iteration":171,"passed_time":1.640629151,"remaining_time":7.897912424},
|
| 176 |
+
{"learn":[0.02120614953],"iteration":172,"passed_time":1.649285993,"remaining_time":7.884159051},
|
| 177 |
+
{"learn":[0.02107470954],"iteration":173,"passed_time":1.657993691,"remaining_time":7.870705685},
|
| 178 |
+
{"learn":[0.02094395728],"iteration":174,"passed_time":1.666399984,"remaining_time":7.855885637},
|
| 179 |
+
{"learn":[0.0209405784],"iteration":175,"passed_time":1.675169226,"remaining_time":7.842837742},
|
| 180 |
+
{"learn":[0.02088292865],"iteration":176,"passed_time":1.683480939,"remaining_time":7.827710809},
|
| 181 |
+
{"learn":[0.02073307079],"iteration":177,"passed_time":1.691931701,"remaining_time":7.813302573},
|
| 182 |
+
{"learn":[0.02064925028],"iteration":178,"passed_time":1.700156863,"remaining_time":7.797926172},
|
| 183 |
+
{"learn":[0.02043668523],"iteration":179,"passed_time":1.708603203,"remaining_time":7.783636813},
|
| 184 |
+
{"learn":[0.0202902692],"iteration":180,"passed_time":1.717285979,"remaining_time":7.770481862},
|
| 185 |
+
{"learn":[0.02020970094],"iteration":181,"passed_time":1.725333998,"remaining_time":7.754523133},
|
| 186 |
+
{"learn":[0.02013228071],"iteration":182,"passed_time":1.733823115,"remaining_time":7.740620137},
|
| 187 |
+
{"learn":[0.02008803333],"iteration":183,"passed_time":1.742098842,"remaining_time":7.725829647},
|
| 188 |
+
{"learn":[0.01996854812],"iteration":184,"passed_time":1.750216185,"remaining_time":7.710411841},
|
| 189 |
+
{"learn":[0.01993370199],"iteration":185,"passed_time":1.758565328,"remaining_time":7.696086975},
|
| 190 |
+
{"learn":[0.01985323879],"iteration":186,"passed_time":1.766882862,"remaining_time":7.681688595},
|
| 191 |
+
{"learn":[0.01982112827],"iteration":187,"passed_time":1.774642024,"remaining_time":7.664943211},
|
| 192 |
+
{"learn":[0.01975542876],"iteration":188,"passed_time":1.783024425,"remaining_time":7.650967242},
|
| 193 |
+
{"learn":[0.01964357296],"iteration":189,"passed_time":1.791342836,"remaining_time":7.636777354},
|
| 194 |
+
{"learn":[0.01957327402],"iteration":190,"passed_time":1.799358304,"remaining_time":7.621365803},
|
| 195 |
+
{"learn":[0.01948483903],"iteration":191,"passed_time":1.807492552,"remaining_time":7.606531158},
|
| 196 |
+
{"learn":[0.01933879333],"iteration":192,"passed_time":1.815650931,"remaining_time":7.591866847},
|
| 197 |
+
{"learn":[0.01923854039],"iteration":193,"passed_time":1.824242918,"remaining_time":7.579071093},
|
| 198 |
+
{"learn":[0.019190388],"iteration":194,"passed_time":1.832183386,"remaining_time":7.563628849},
|
| 199 |
+
{"learn":[0.0191418315],"iteration":195,"passed_time":1.840886117,"remaining_time":7.551389989},
|
| 200 |
+
{"learn":[0.01910594571],"iteration":196,"passed_time":1.848813577,"remaining_time":7.536026915},
|
| 201 |
+
{"learn":[0.01907350926],"iteration":197,"passed_time":1.857368683,"remaining_time":7.523281232},
|
| 202 |
+
{"learn":[0.01902672477],"iteration":198,"passed_time":1.865917629,"remaining_time":7.510552867},
|
| 203 |
+
{"learn":[0.01895616503],"iteration":199,"passed_time":1.874603519,"remaining_time":7.498414074},
|
| 204 |
+
{"learn":[0.01882914777],"iteration":200,"passed_time":1.883302631,"remaining_time":7.486362201},
|
| 205 |
+
{"learn":[0.01871791658],"iteration":201,"passed_time":1.891710649,"remaining_time":7.473193554},
|
| 206 |
+
{"learn":[0.01861451426],"iteration":202,"passed_time":1.900209704,"remaining_time":7.460429232},
|
| 207 |
+
{"learn":[0.01854089645],"iteration":203,"passed_time":1.908834198,"remaining_time":7.448196186},
|
| 208 |
+
{"learn":[0.01845122694],"iteration":204,"passed_time":1.918383717,"remaining_time":7.439585633},
|
| 209 |
+
{"learn":[0.01835347959],"iteration":205,"passed_time":1.927475847,"remaining_time":7.429203021},
|
| 210 |
+
{"learn":[0.01826695602],"iteration":206,"passed_time":1.935744482,"remaining_time":7.415678135},
|
| 211 |
+
{"learn":[0.01819670819],"iteration":207,"passed_time":1.944107227,"remaining_time":7.402562133},
|
| 212 |
+
{"learn":[0.01814156866],"iteration":208,"passed_time":1.952874323,"remaining_time":7.391021958},
|
| 213 |
+
{"learn":[0.018070904],"iteration":209,"passed_time":1.960788311,"remaining_time":7.376298885},
|
| 214 |
+
{"learn":[0.01795127031],"iteration":210,"passed_time":1.969447265,"remaining_time":7.36442603},
|
| 215 |
+
{"learn":[0.01784787433],"iteration":211,"passed_time":1.977618481,"remaining_time":7.350770579},
|
| 216 |
+
{"learn":[0.01777268738],"iteration":212,"passed_time":1.986003251,"remaining_time":7.337955675},
|
| 217 |
+
{"learn":[0.01767715377],"iteration":213,"passed_time":1.994283841,"remaining_time":7.324799529},
|
| 218 |
+
{"learn":[0.01764840114],"iteration":214,"passed_time":2.002325396,"remaining_time":7.31081598},
|
| 219 |
+
{"learn":[0.01754269125],"iteration":215,"passed_time":2.011571732,"remaining_time":7.301260361},
|
| 220 |
+
{"learn":[0.01745075062],"iteration":216,"passed_time":2.020162635,"remaining_time":7.289342596},
|
| 221 |
+
{"learn":[0.01735769817],"iteration":217,"passed_time":2.028048328,"remaining_time":7.274925655},
|
| 222 |
+
{"learn":[0.01733215984],"iteration":218,"passed_time":2.036360126,"remaining_time":7.262087937},
|
| 223 |
+
{"learn":[0.01722590523],"iteration":219,"passed_time":2.044623959,"remaining_time":7.249121309},
|
| 224 |
+
{"learn":[0.01716068204],"iteration":220,"passed_time":2.052771568,"remaining_time":7.235787564},
|
| 225 |
+
{"learn":[0.01708117818],"iteration":221,"passed_time":2.062446023,"remaining_time":7.227851379},
|
| 226 |
+
{"learn":[0.01696783973],"iteration":222,"passed_time":2.071202225,"remaining_time":7.21670013},
|
| 227 |
+
{"learn":[0.01695045758],"iteration":223,"passed_time":2.079287769,"remaining_time":7.203246913},
|
| 228 |
+
{"learn":[0.0167943347],"iteration":224,"passed_time":2.087371139,"remaining_time":7.189833925},
|
| 229 |
+
{"learn":[0.01671577198],"iteration":225,"passed_time":2.094972801,"remaining_time":7.174818354},
|
| 230 |
+
{"learn":[0.01665562814],"iteration":226,"passed_time":2.103072211,"remaining_time":7.161563081},
|
| 231 |
+
{"learn":[0.01652320493],"iteration":227,"passed_time":2.111888275,"remaining_time":7.150779597},
|
| 232 |
+
{"learn":[0.01648382118],"iteration":228,"passed_time":2.120501315,"remaining_time":7.139329755},
|
| 233 |
+
{"learn":[0.01635097002],"iteration":229,"passed_time":2.129012495,"remaining_time":7.127563572},
|
| 234 |
+
{"learn":[0.0161417807],"iteration":230,"passed_time":2.137679041,"remaining_time":7.116342782},
|
| 235 |
+
{"learn":[0.01605773708],"iteration":231,"passed_time":2.146416458,"remaining_time":7.105378619},
|
| 236 |
+
{"learn":[0.01596407594],"iteration":232,"passed_time":2.154607242,"remaining_time":7.092634141},
|
| 237 |
+
{"learn":[0.01594520768],"iteration":233,"passed_time":2.163204217,"remaining_time":7.081258247},
|
| 238 |
+
{"learn":[0.01591689707],"iteration":234,"passed_time":2.171863685,"remaining_time":7.070109442},
|
| 239 |
+
{"learn":[0.01588654619],"iteration":235,"passed_time":2.180475182,"remaining_time":7.058826437},
|
| 240 |
+
{"learn":[0.01581277018],"iteration":236,"passed_time":2.187789267,"remaining_time":7.043389074},
|
| 241 |
+
{"learn":[0.01572653481],"iteration":237,"passed_time":2.196228979,"remaining_time":7.031623875},
|
| 242 |
+
{"learn":[0.0156983478],"iteration":238,"passed_time":2.204641242,"remaining_time":7.019799101},
|
| 243 |
+
{"learn":[0.01560128375],"iteration":239,"passed_time":2.213847637,"remaining_time":7.010517518},
|
| 244 |
+
{"learn":[0.0155544796],"iteration":240,"passed_time":2.222117925,"remaining_time":6.998288404},
|
| 245 |
+
{"learn":[0.0155163642],"iteration":241,"passed_time":2.23065298,"remaining_time":6.986921317},
|
| 246 |
+
{"learn":[0.01543025639],"iteration":242,"passed_time":2.239361311,"remaining_time":6.976117336},
|
| 247 |
+
{"learn":[0.0153856539],"iteration":243,"passed_time":2.247360909,"remaining_time":6.963134621},
|
| 248 |
+
{"learn":[0.01534378503],"iteration":244,"passed_time":2.255645523,"remaining_time":6.951070898},
|
| 249 |
+
{"learn":[0.01528890953],"iteration":245,"passed_time":2.264345912,"remaining_time":6.940312267},
|
| 250 |
+
{"learn":[0.01525849806],"iteration":246,"passed_time":2.272941432,"remaining_time":6.929250601},
|
| 251 |
+
{"learn":[0.01523708916],"iteration":247,"passed_time":2.281293368,"remaining_time":6.917470213},
|
| 252 |
+
{"learn":[0.01509147349],"iteration":248,"passed_time":2.289908126,"remaining_time":6.906510051},
|
| 253 |
+
{"learn":[0.01496231303],"iteration":249,"passed_time":2.299569368,"remaining_time":6.898708105},
|
| 254 |
+
{"learn":[0.0149081613],"iteration":250,"passed_time":2.308233275,"remaining_time":6.887915231},
|
| 255 |
+
{"learn":[0.01484867909],"iteration":251,"passed_time":2.316701114,"remaining_time":6.876557275},
|
| 256 |
+
{"learn":[0.01479387844],"iteration":252,"passed_time":2.324959843,"remaining_time":6.864604753},
|
| 257 |
+
{"learn":[0.01472755735],"iteration":253,"passed_time":2.333361826,"remaining_time":6.853102055},
|
| 258 |
+
{"learn":[0.01465281837],"iteration":254,"passed_time":2.341828313,"remaining_time":6.841812131},
|
| 259 |
+
{"learn":[0.01459762906],"iteration":255,"passed_time":2.350077316,"remaining_time":6.829912199},
|
| 260 |
+
{"learn":[0.01453720265],"iteration":256,"passed_time":2.358175986,"remaining_time":6.817606059},
|
| 261 |
+
{"learn":[0.01442247791],"iteration":257,"passed_time":2.366400842,"remaining_time":6.805695445},
|
| 262 |
+
{"learn":[0.01435413075],"iteration":258,"passed_time":2.374569137,"remaining_time":6.793651469},
|
| 263 |
+
{"learn":[0.01427821476],"iteration":259,"passed_time":2.382680927,"remaining_time":6.781476485},
|
| 264 |
+
{"learn":[0.01424404426],"iteration":260,"passed_time":2.390996265,"remaining_time":6.769908965},
|
| 265 |
+
{"learn":[0.01421051387],"iteration":261,"passed_time":2.399213306,"remaining_time":6.758089388},
|
| 266 |
+
{"learn":[0.0141825945],"iteration":262,"passed_time":2.407524727,"remaining_time":6.746561688},
|
| 267 |
+
{"learn":[0.01413925248],"iteration":263,"passed_time":2.415839836,"remaining_time":6.735068634},
|
| 268 |
+
{"learn":[0.01405514175],"iteration":264,"passed_time":2.424053573,"remaining_time":6.7233184},
|
| 269 |
+
{"learn":[0.01394847452],"iteration":265,"passed_time":2.432546448,"remaining_time":6.712365012},
|
| 270 |
+
{"learn":[0.01393462023],"iteration":266,"passed_time":2.440792753,"remaining_time":6.700753137},
|
| 271 |
+
{"learn":[0.01386261472],"iteration":267,"passed_time":2.450119704,"remaining_time":6.692117999},
|
| 272 |
+
{"learn":[0.01383535245],"iteration":268,"passed_time":2.458200189,"remaining_time":6.680090476},
|
| 273 |
+
{"learn":[0.01374885708],"iteration":269,"passed_time":2.466313347,"remaining_time":6.668180532},
|
| 274 |
+
{"learn":[0.01366862251],"iteration":270,"passed_time":2.474359922,"remaining_time":6.656119496},
|
| 275 |
+
{"learn":[0.01354363456],"iteration":271,"passed_time":2.482595119,"remaining_time":6.644592818},
|
| 276 |
+
{"learn":[0.01347180334],"iteration":272,"passed_time":2.491036593,"remaining_time":6.633639572},
|
| 277 |
+
{"learn":[0.01344631734],"iteration":273,"passed_time":2.4990588,"remaining_time":6.621593754},
|
| 278 |
+
{"learn":[0.01335943218],"iteration":274,"passed_time":2.507387997,"remaining_time":6.610386537},
|
| 279 |
+
{"learn":[0.01334372505],"iteration":275,"passed_time":2.515332864,"remaining_time":6.598192006},
|
| 280 |
+
{"learn":[0.01332632611],"iteration":276,"passed_time":2.52336932,"remaining_time":6.586267214},
|
| 281 |
+
{"learn":[0.01329704442],"iteration":277,"passed_time":2.531585911,"remaining_time":6.574838228},
|
| 282 |
+
{"learn":[0.01328218517],"iteration":278,"passed_time":2.539923503,"remaining_time":6.563744967},
|
| 283 |
+
{"learn":[0.0132616189],"iteration":279,"passed_time":2.54831006,"remaining_time":6.552797297},
|
| 284 |
+
{"learn":[0.01323389187],"iteration":280,"passed_time":2.55672879,"remaining_time":6.541950178},
|
| 285 |
+
{"learn":[0.01321662738],"iteration":281,"passed_time":2.564663783,"remaining_time":6.529888639},
|
| 286 |
+
{"learn":[0.013088863],"iteration":282,"passed_time":2.573064847,"remaining_time":6.519037086},
|
| 287 |
+
{"learn":[0.01304560876],"iteration":283,"passed_time":2.580950775,"remaining_time":6.506904067},
|
| 288 |
+
{"learn":[0.01302215867],"iteration":284,"passed_time":2.589361161,"remaining_time":6.496116597},
|
| 289 |
+
{"learn":[0.01299059063],"iteration":285,"passed_time":2.597683477,"remaining_time":6.485125884},
|
| 290 |
+
{"learn":[0.01294543058],"iteration":286,"passed_time":2.605905075,"remaining_time":6.473903548},
|
| 291 |
+
{"learn":[0.0128675241],"iteration":287,"passed_time":2.614327282,"remaining_time":6.463198003},
|
| 292 |
+
{"learn":[0.01279021538],"iteration":288,"passed_time":2.622644439,"remaining_time":6.452249814},
|
| 293 |
+
{"learn":[0.01273897007],"iteration":289,"passed_time":2.630712186,"remaining_time":6.440709146},
|
| 294 |
+
{"learn":[0.01266642635],"iteration":290,"passed_time":2.639134259,"remaining_time":6.430055634},
|
| 295 |
+
{"learn":[0.01253702444],"iteration":291,"passed_time":2.647494558,"remaining_time":6.419267628},
|
| 296 |
+
{"learn":[0.01248791376],"iteration":292,"passed_time":2.656167331,"remaining_time":6.409250181},
|
| 297 |
+
{"learn":[0.01246083669],"iteration":293,"passed_time":2.664682414,"remaining_time":6.398863213},
|
| 298 |
+
{"learn":[0.01245533106],"iteration":294,"passed_time":2.672714607,"remaining_time":6.387334908},
|
| 299 |
+
{"learn":[0.01241379657],"iteration":295,"passed_time":2.680849916,"remaining_time":6.376075475},
|
| 300 |
+
{"learn":[0.01228910535],"iteration":296,"passed_time":2.689390166,"remaining_time":6.365795577},
|
| 301 |
+
{"learn":[0.01224158652],"iteration":297,"passed_time":2.698944811,"remaining_time":6.357916971},
|
| 302 |
+
{"learn":[0.01222041169],"iteration":298,"passed_time":2.70771451,"remaining_time":6.348186861},
|
| 303 |
+
{"learn":[0.01215861084],"iteration":299,"passed_time":2.716071763,"remaining_time":6.337500779},
|
| 304 |
+
{"learn":[0.01208227025],"iteration":300,"passed_time":2.724358637,"remaining_time":6.326666736},
|
| 305 |
+
{"learn":[0.01206734939],"iteration":301,"passed_time":2.732230159,"remaining_time":6.314889572},
|
| 306 |
+
{"learn":[0.01198636086],"iteration":302,"passed_time":2.740301915,"remaining_time":6.303598796},
|
| 307 |
+
{"learn":[0.01196627431],"iteration":303,"passed_time":2.748491205,"remaining_time":6.292598286},
|
| 308 |
+
{"learn":[0.01193734653],"iteration":304,"passed_time":2.756913695,"remaining_time":6.2821476},
|
| 309 |
+
{"learn":[0.01189021108],"iteration":305,"passed_time":2.765058281,"remaining_time":6.271079891},
|
| 310 |
+
{"learn":[0.01188992952],"iteration":306,"passed_time":2.769585387,"remaining_time":6.251865386},
|
| 311 |
+
{"learn":[0.01181813844],"iteration":307,"passed_time":2.777654155,"remaining_time":6.240703491},
|
| 312 |
+
{"learn":[0.01178334869],"iteration":308,"passed_time":2.785609725,"remaining_time":6.229308478},
|
| 313 |
+
{"learn":[0.01171860976],"iteration":309,"passed_time":2.794018984,"remaining_time":6.21894548},
|
| 314 |
+
{"learn":[0.01170155551],"iteration":310,"passed_time":2.80239498,"remaining_time":6.208521355},
|
| 315 |
+
{"learn":[0.01167457252],"iteration":311,"passed_time":2.811168814,"remaining_time":6.198987642},
|
| 316 |
+
{"learn":[0.01165058964],"iteration":312,"passed_time":2.819452275,"remaining_time":6.18838247},
|
| 317 |
+
{"learn":[0.01162250535],"iteration":313,"passed_time":2.827497758,"remaining_time":6.177272172},
|
| 318 |
+
{"learn":[0.01159511846],"iteration":314,"passed_time":2.836557929,"remaining_time":6.168387878},
|
| 319 |
+
{"learn":[0.01153381926],"iteration":315,"passed_time":2.84507701,"remaining_time":6.15833125},
|
| 320 |
+
{"learn":[0.0115255573],"iteration":316,"passed_time":2.853048894,"remaining_time":6.147105346},
|
| 321 |
+
{"learn":[0.01150211479],"iteration":317,"passed_time":2.86116447,"remaining_time":6.136208077},
|
| 322 |
+
{"learn":[0.01143771166],"iteration":318,"passed_time":2.869471133,"remaining_time":6.125736181},
|
| 323 |
+
{"learn":[0.0114218584],"iteration":319,"passed_time":2.877180867,"remaining_time":6.114009343},
|
| 324 |
+
{"learn":[0.0113450207],"iteration":320,"passed_time":2.885215654,"remaining_time":6.102995106},
|
| 325 |
+
{"learn":[0.01131809955],"iteration":321,"passed_time":2.893449803,"remaining_time":6.09241915},
|
| 326 |
+
{"learn":[0.01130333043],"iteration":322,"passed_time":2.901699758,"remaining_time":6.081890825},
|
| 327 |
+
{"learn":[0.01128106374],"iteration":323,"passed_time":2.91002435,"remaining_time":6.071532286},
|
| 328 |
+
{"learn":[0.01125953996],"iteration":324,"passed_time":2.918238883,"remaining_time":6.06095768},
|
| 329 |
+
{"learn":[0.01119687629],"iteration":325,"passed_time":2.927116219,"remaining_time":6.051767888},
|
| 330 |
+
{"learn":[0.01115300154],"iteration":326,"passed_time":2.935635023,"remaining_time":6.041842113},
|
| 331 |
+
{"learn":[0.01114158927],"iteration":327,"passed_time":2.943701185,"remaining_time":6.030997549},
|
| 332 |
+
{"learn":[0.01113205822],"iteration":328,"passed_time":2.952045863,"remaining_time":6.020737915},
|
| 333 |
+
{"learn":[0.01104892889],"iteration":329,"passed_time":2.960569992,"remaining_time":6.010854226},
|
| 334 |
+
{"learn":[0.0110357006],"iteration":330,"passed_time":2.969509319,"remaining_time":6.001817929},
|
| 335 |
+
{"learn":[0.01100847604],"iteration":331,"passed_time":2.980400479,"remaining_time":5.996709398},
|
| 336 |
+
{"learn":[0.01097898527],"iteration":332,"passed_time":2.988784637,"remaining_time":5.986544604},
|
| 337 |
+
{"learn":[0.01093170466],"iteration":333,"passed_time":2.997041701,"remaining_time":5.976137045},
|
| 338 |
+
{"learn":[0.01090073537],"iteration":334,"passed_time":3.005583583,"remaining_time":5.966307709},
|
| 339 |
+
{"learn":[0.01087907954],"iteration":335,"passed_time":3.013913031,"remaining_time":5.956066229},
|
| 340 |
+
{"learn":[0.01085420435],"iteration":336,"passed_time":3.022366227,"remaining_time":5.946079551},
|
| 341 |
+
{"learn":[0.01081370827],"iteration":337,"passed_time":3.030428372,"remaining_time":5.935336042},
|
| 342 |
+
{"learn":[0.01080466045],"iteration":338,"passed_time":3.038495345,"remaining_time":5.924617766},
|
| 343 |
+
{"learn":[0.01076322013],"iteration":339,"passed_time":3.046575752,"remaining_time":5.913941166},
|
| 344 |
+
{"learn":[0.01074414382],"iteration":340,"passed_time":3.054766088,"remaining_time":5.903492235},
|
| 345 |
+
{"learn":[0.01070159479],"iteration":341,"passed_time":3.062871964,"remaining_time":5.892894012},
|
| 346 |
+
{"learn":[0.01063228356],"iteration":342,"passed_time":3.071600699,"remaining_time":5.88350338},
|
| 347 |
+
{"learn":[0.01058941825],"iteration":343,"passed_time":3.080128021,"remaining_time":5.873732505},
|
| 348 |
+
{"learn":[0.01054232276],"iteration":344,"passed_time":3.090088639,"remaining_time":5.866690024},
|
| 349 |
+
{"learn":[0.01049204129],"iteration":345,"passed_time":3.0978571,"remaining_time":5.855487119},
|
| 350 |
+
{"learn":[0.01044611222],"iteration":346,"passed_time":3.105908311,"remaining_time":5.8448361},
|
| 351 |
+
{"learn":[0.01041678514],"iteration":347,"passed_time":3.114238303,"remaining_time":5.834722337},
|
| 352 |
+
{"learn":[0.01038512585],"iteration":348,"passed_time":3.12226552,"remaining_time":5.824054021},
|
| 353 |
+
{"learn":[0.01033542487],"iteration":349,"passed_time":3.130483872,"remaining_time":5.813755763},
|
| 354 |
+
{"learn":[0.01029840434],"iteration":350,"passed_time":3.138699212,"remaining_time":5.803463784},
|
| 355 |
+
{"learn":[0.01026454348],"iteration":351,"passed_time":3.146502626,"remaining_time":5.792425288},
|
| 356 |
+
{"learn":[0.01024568955],"iteration":352,"passed_time":3.155185975,"remaining_time":5.78301792},
|
| 357 |
+
{"learn":[0.01021468551],"iteration":353,"passed_time":3.163272358,"remaining_time":5.772525263},
|
| 358 |
+
{"learn":[0.01020861444],"iteration":354,"passed_time":3.171304925,"remaining_time":5.761948385},
|
| 359 |
+
{"learn":[0.01018232817],"iteration":355,"passed_time":3.179494122,"remaining_time":5.751669143},
|
| 360 |
+
{"learn":[0.01011292396],"iteration":356,"passed_time":3.18789674,"remaining_time":5.741786006},
|
| 361 |
+
{"learn":[0.01007270865],"iteration":357,"passed_time":3.196042495,"remaining_time":5.731450507},
|
| 362 |
+
{"learn":[0.0100286336],"iteration":358,"passed_time":3.204119514,"remaining_time":5.721004481},
|
| 363 |
+
{"learn":[0.009984063543],"iteration":359,"passed_time":3.212891194,"remaining_time":5.711806567},
|
| 364 |
+
{"learn":[0.00991697125],"iteration":360,"passed_time":3.221457291,"remaining_time":5.702247117},
|
| 365 |
+
{"learn":[0.009898808149],"iteration":361,"passed_time":3.231114696,"remaining_time":5.694616509},
|
| 366 |
+
{"learn":[0.009884166713],"iteration":362,"passed_time":3.239251276,"remaining_time":5.684305958},
|
| 367 |
+
{"learn":[0.0098288797],"iteration":363,"passed_time":3.247702079,"remaining_time":5.674556379},
|
| 368 |
+
{"learn":[0.009824673469],"iteration":364,"passed_time":3.255804105,"remaining_time":5.664207141},
|
| 369 |
+
{"learn":[0.009778547476],"iteration":365,"passed_time":3.264102321,"remaining_time":5.654210031},
|
| 370 |
+
{"learn":[0.0097341422],"iteration":366,"passed_time":3.272182831,"remaining_time":5.643846681},
|
| 371 |
+
{"learn":[0.009708824885],"iteration":367,"passed_time":3.280328158,"remaining_time":5.633607055},
|
| 372 |
+
{"learn":[0.009608369158],"iteration":368,"passed_time":3.288700634,"remaining_time":5.623767209},
|
| 373 |
+
{"learn":[0.009567940297],"iteration":369,"passed_time":3.296807703,"remaining_time":5.613483386},
|
| 374 |
+
{"learn":[0.009538137709],"iteration":370,"passed_time":3.305054736,"remaining_time":5.603448595},
|
| 375 |
+
{"learn":[0.00951365415],"iteration":371,"passed_time":3.313427511,"remaining_time":5.59363569},
|
| 376 |
+
{"learn":[0.009478970481],"iteration":372,"passed_time":3.321514567,"remaining_time":5.583350223},
|
| 377 |
+
{"learn":[0.009425252688],"iteration":373,"passed_time":3.329587222,"remaining_time":5.573052408},
|
| 378 |
+
{"learn":[0.009381720318],"iteration":374,"passed_time":3.337831155,"remaining_time":5.563051925},
|
| 379 |
+
{"learn":[0.009318977253],"iteration":375,"passed_time":3.346139916,"remaining_time":5.553168371},
|
| 380 |
+
{"learn":[0.009295126396],"iteration":376,"passed_time":3.354299838,"remaining_time":5.543047212},
|
| 381 |
+
{"learn":[0.009261297277],"iteration":377,"passed_time":3.362475872,"remaining_time":5.532962943},
|
| 382 |
+
{"learn":[0.009224172634],"iteration":378,"passed_time":3.370692678,"remaining_time":5.522955548},
|
| 383 |
+
{"learn":[0.009196197709],"iteration":379,"passed_time":3.378462023,"remaining_time":5.512227511},
|
| 384 |
+
{"learn":[0.00915397227],"iteration":380,"passed_time":3.386577807,"remaining_time":5.502077855},
|
| 385 |
+
{"learn":[0.009143010656],"iteration":381,"passed_time":3.394975442,"remaining_time":5.492394825},
|
| 386 |
+
{"learn":[0.009111064712],"iteration":382,"passed_time":3.403889451,"remaining_time":5.483550369},
|
| 387 |
+
{"learn":[0.0090753869],"iteration":383,"passed_time":3.412207683,"remaining_time":5.473749826},
|
| 388 |
+
{"learn":[0.009059020836],"iteration":384,"passed_time":3.420708803,"remaining_time":5.464249127},
|
| 389 |
+
{"learn":[0.009044292152],"iteration":385,"passed_time":3.428653918,"remaining_time":5.453869186},
|
| 390 |
+
{"learn":[0.009028571955],"iteration":386,"passed_time":3.436768635,"remaining_time":5.443770474},
|
| 391 |
+
{"learn":[0.009003510292],"iteration":387,"passed_time":3.445017289,"remaining_time":5.43389325},
|
| 392 |
+
{"learn":[0.008948575332],"iteration":388,"passed_time":3.453142886,"remaining_time":5.423831114},
|
| 393 |
+
{"learn":[0.008927785017],"iteration":389,"passed_time":3.461286437,"remaining_time":5.413806991},
|
| 394 |
+
{"learn":[0.008898643636],"iteration":390,"passed_time":3.469468629,"remaining_time":5.403852672},
|
| 395 |
+
{"learn":[0.008857753005],"iteration":391,"passed_time":3.478111976,"remaining_time":5.394622657},
|
| 396 |
+
{"learn":[0.008836855572],"iteration":392,"passed_time":3.487987794,"remaining_time":5.387299213},
|
| 397 |
+
{"learn":[0.008819993356],"iteration":393,"passed_time":3.496173253,"remaining_time":5.377362923},
|
| 398 |
+
{"learn":[0.008781864961],"iteration":394,"passed_time":3.504144447,"remaining_time":5.367107318},
|
| 399 |
+
{"learn":[0.008773163869],"iteration":395,"passed_time":3.512271482,"remaining_time":5.357100947},
|
| 400 |
+
{"learn":[0.008764997895],"iteration":396,"passed_time":3.520491162,"remaining_time":5.347244762},
|
| 401 |
+
{"learn":[0.008745352937],"iteration":397,"passed_time":3.52861718,"remaining_time":5.337255131},
|
| 402 |
+
{"learn":[0.008700106937],"iteration":398,"passed_time":3.536494653,"remaining_time":5.326900467},
|
| 403 |
+
{"learn":[0.008669267161],"iteration":399,"passed_time":3.544574133,"remaining_time":5.3168612},
|
| 404 |
+
{"learn":[0.008665167176],"iteration":400,"passed_time":3.552803262,"remaining_time":5.307055246},
|
| 405 |
+
{"learn":[0.00865106658],"iteration":401,"passed_time":3.56107187,"remaining_time":5.297315867},
|
| 406 |
+
{"learn":[0.008628502455],"iteration":402,"passed_time":3.568929604,"remaining_time":5.28697512},
|
| 407 |
+
{"learn":[0.008599820527],"iteration":403,"passed_time":3.577278567,"remaining_time":5.277371351},
|
| 408 |
+
{"learn":[0.008592827908],"iteration":404,"passed_time":3.58544405,"remaining_time":5.267504222},
|
| 409 |
+
{"learn":[0.008580059017],"iteration":405,"passed_time":3.593529927,"remaining_time":5.257529006},
|
| 410 |
+
{"learn":[0.008575735356],"iteration":406,"passed_time":3.60178173,"remaining_time":5.247804831},
|
| 411 |
+
{"learn":[0.008555173847],"iteration":407,"passed_time":3.609995479,"remaining_time":5.238032655},
|
| 412 |
+
{"learn":[0.008536121408],"iteration":408,"passed_time":3.618369237,"remaining_time":5.228499313},
|
| 413 |
+
{"learn":[0.008517698752],"iteration":409,"passed_time":3.626793287,"remaining_time":5.219043999},
|
| 414 |
+
{"learn":[0.008493650377],"iteration":410,"passed_time":3.636273781,"remaining_time":5.211107681},
|
| 415 |
+
{"learn":[0.008491332272],"iteration":411,"passed_time":3.644929558,"remaining_time":5.201986846},
|
| 416 |
+
{"learn":[0.008471728139],"iteration":412,"passed_time":3.653203341,"remaining_time":5.192325329},
|
| 417 |
+
{"learn":[0.008455698854],"iteration":413,"passed_time":3.661631991,"remaining_time":5.182889726},
|
| 418 |
+
{"learn":[0.008433111025],"iteration":414,"passed_time":3.670096432,"remaining_time":5.173509428},
|
| 419 |
+
{"learn":[0.008402435488],"iteration":415,"passed_time":3.678145075,"remaining_time":5.163549817},
|
| 420 |
+
{"learn":[0.008383857994],"iteration":416,"passed_time":3.686064994,"remaining_time":5.153419404},
|
| 421 |
+
{"learn":[0.008347193301],"iteration":417,"passed_time":3.693912744,"remaining_time":5.143199084},
|
| 422 |
+
{"learn":[0.008335345338],"iteration":418,"passed_time":3.70202317,"remaining_time":5.133354324},
|
| 423 |
+
{"learn":[0.008332796612],"iteration":419,"passed_time":3.710261615,"remaining_time":5.123694611},
|
| 424 |
+
{"learn":[0.008285398481],"iteration":420,"passed_time":3.718430572,"remaining_time":5.113946084},
|
| 425 |
+
{"learn":[0.008256300922],"iteration":421,"passed_time":3.726787161,"remaining_time":5.104462035},
|
| 426 |
+
{"learn":[0.008222580284],"iteration":422,"passed_time":3.734992537,"remaining_time":5.094777054},
|
| 427 |
+
{"learn":[0.00818066542],"iteration":423,"passed_time":3.743261202,"remaining_time":5.085185029},
|
| 428 |
+
{"learn":[0.008162043995],"iteration":424,"passed_time":3.751522193,"remaining_time":5.075588849},
|
| 429 |
+
{"learn":[0.008093292714],"iteration":425,"passed_time":3.759885596,"remaining_time":5.06613693},
|
| 430 |
+
{"learn":[0.008087739268],"iteration":426,"passed_time":3.768181659,"remaining_time":5.056599743},
|
| 431 |
+
{"learn":[0.008061960811],"iteration":427,"passed_time":3.776870867,"remaining_time":5.047593776},
|
| 432 |
+
{"learn":[0.008045936282],"iteration":428,"passed_time":3.785407613,"remaining_time":5.038386357},
|
| 433 |
+
{"learn":[0.008044581486],"iteration":429,"passed_time":3.793961109,"remaining_time":5.029204261},
|
| 434 |
+
{"learn":[0.008040640648],"iteration":430,"passed_time":3.802883653,"remaining_time":5.020512294},
|
| 435 |
+
{"learn":[0.008010030454],"iteration":431,"passed_time":3.81127725,"remaining_time":5.011123791},
|
| 436 |
+
{"learn":[0.007996326952],"iteration":432,"passed_time":3.820115036,"remaining_time":5.002321537},
|
| 437 |
+
{"learn":[0.007942410484],"iteration":433,"passed_time":3.828753592,"remaining_time":4.993259292},
|
| 438 |
+
{"learn":[0.007910942028],"iteration":434,"passed_time":3.837623364,"remaining_time":4.984499312},
|
| 439 |
+
{"learn":[0.007873121024],"iteration":435,"passed_time":3.846768027,"remaining_time":4.976094421},
|
| 440 |
+
{"learn":[0.007846428431],"iteration":436,"passed_time":3.855541294,"remaining_time":4.967207663},
|
| 441 |
+
{"learn":[0.007846387279],"iteration":437,"passed_time":3.863228092,"remaining_time":4.956927369},
|
| 442 |
+
{"learn":[0.007840208992],"iteration":438,"passed_time":3.872076585,"remaining_time":4.948143426},
|
| 443 |
+
{"learn":[0.007802075254],"iteration":439,"passed_time":3.881529541,"remaining_time":4.940128507},
|
| 444 |
+
{"learn":[0.007799556873],"iteration":440,"passed_time":3.890886734,"remaining_time":4.931985678},
|
| 445 |
+
{"learn":[0.007764761094],"iteration":441,"passed_time":3.899217917,"remaining_time":4.922542077},
|
| 446 |
+
{"learn":[0.00773336828],"iteration":442,"passed_time":3.907649359,"remaining_time":4.913229555},
|
| 447 |
+
{"learn":[0.007716015062],"iteration":443,"passed_time":3.915916892,"remaining_time":4.903715747},
|
| 448 |
+
{"learn":[0.007677880567],"iteration":444,"passed_time":3.924471922,"remaining_time":4.894566105},
|
| 449 |
+
{"learn":[0.00765945737],"iteration":445,"passed_time":3.933173744,"remaining_time":4.885601467},
|
| 450 |
+
{"learn":[0.007600449499],"iteration":446,"passed_time":3.941493324,"remaining_time":4.876165118},
|
| 451 |
+
{"learn":[0.007543751261],"iteration":447,"passed_time":3.95006508,"remaining_time":4.867044474},
|
| 452 |
+
{"learn":[0.00750578375],"iteration":448,"passed_time":3.961816996,"remaining_time":4.861828875},
|
| 453 |
+
{"learn":[0.007490615602],"iteration":449,"passed_time":3.970707485,"remaining_time":4.853086926},
|
| 454 |
+
{"learn":[0.007457151195],"iteration":450,"passed_time":3.979220131,"remaining_time":4.843884372},
|
| 455 |
+
{"learn":[0.007456453256],"iteration":451,"passed_time":3.987823428,"remaining_time":4.834794776},
|
| 456 |
+
{"learn":[0.007442275136],"iteration":452,"passed_time":3.996901028,"remaining_time":4.82628005},
|
| 457 |
+
{"learn":[0.007428831374],"iteration":453,"passed_time":4.005831479,"remaining_time":4.817585875},
|
| 458 |
+
{"learn":[0.007390666556],"iteration":454,"passed_time":4.015435379,"remaining_time":4.809697322},
|
| 459 |
+
{"learn":[0.007379457959],"iteration":455,"passed_time":4.025516147,"remaining_time":4.802370141},
|
| 460 |
+
{"learn":[0.007355067749],"iteration":456,"passed_time":4.034471472,"remaining_time":4.793693674},
|
| 461 |
+
{"learn":[0.007353381052],"iteration":457,"passed_time":4.042886979,"remaining_time":4.784377168},
|
| 462 |
+
{"learn":[0.007326337583],"iteration":458,"passed_time":4.051629353,"remaining_time":4.775449847},
|
| 463 |
+
{"learn":[0.007324318419],"iteration":459,"passed_time":4.060262571,"remaining_time":4.766395192},
|
| 464 |
+
{"learn":[0.007320714123],"iteration":460,"passed_time":4.068683303,"remaining_time":4.757093927},
|
| 465 |
+
{"learn":[0.007298096541],"iteration":461,"passed_time":4.077241342,"remaining_time":4.747956368},
|
| 466 |
+
{"learn":[0.007295718379],"iteration":462,"passed_time":4.085716346,"remaining_time":4.738725006},
|
| 467 |
+
{"learn":[0.007281984834],"iteration":463,"passed_time":4.094409987,"remaining_time":4.729749467},
|
| 468 |
+
{"learn":[0.0072357476],"iteration":464,"passed_time":4.103437587,"remaining_time":4.721159374},
|
| 469 |
+
{"learn":[0.007207076623],"iteration":465,"passed_time":4.112474867,"remaining_time":4.712578495},
|
| 470 |
+
{"learn":[0.007206992694],"iteration":466,"passed_time":4.120091293,"remaining_time":4.702374003},
|
| 471 |
+
{"learn":[0.007167258679],"iteration":467,"passed_time":4.128889574,"remaining_time":4.693524046},
|
| 472 |
+
{"learn":[0.007167251075],"iteration":468,"passed_time":4.13670421,"remaining_time":4.68356063},
|
| 473 |
+
{"learn":[0.007127704952],"iteration":469,"passed_time":4.145588244,"remaining_time":4.674812275},
|
| 474 |
+
{"learn":[0.007117703258],"iteration":470,"passed_time":4.154143105,"remaining_time":4.665693636},
|
| 475 |
+
{"learn":[0.007085232553],"iteration":471,"passed_time":4.162802977,"remaining_time":4.656694855},
|
| 476 |
+
{"learn":[0.007063322255],"iteration":472,"passed_time":4.171324656,"remaining_time":4.647543538},
|
| 477 |
+
{"learn":[0.007034470472],"iteration":473,"passed_time":4.180267534,"remaining_time":4.638862285},
|
| 478 |
+
{"learn":[0.007032188716],"iteration":474,"passed_time":4.188451225,"remaining_time":4.629340827},
|
| 479 |
+
{"learn":[0.007024564736],"iteration":475,"passed_time":4.197068155,"remaining_time":4.620301919},
|
| 480 |
+
{"learn":[0.007013930081],"iteration":476,"passed_time":4.205312716,"remaining_time":4.6108565},
|
| 481 |
+
{"learn":[0.007009407133],"iteration":477,"passed_time":4.213641814,"remaining_time":4.601508424},
|
| 482 |
+
{"learn":[0.006998254308],"iteration":478,"passed_time":4.22207429,"remaining_time":4.592277046},
|
| 483 |
+
{"learn":[0.006973868897],"iteration":479,"passed_time":4.230315431,"remaining_time":4.582841717},
|
| 484 |
+
{"learn":[0.006973866731],"iteration":480,"passed_time":4.237818003,"remaining_time":4.572614436},
|
| 485 |
+
{"learn":[0.006973845072],"iteration":481,"passed_time":4.24533981,"remaining_time":4.562419131},
|
| 486 |
+
{"learn":[0.006943321632],"iteration":482,"passed_time":4.253869084,"remaining_time":4.553313285},
|
| 487 |
+
{"learn":[0.006940065737],"iteration":483,"passed_time":4.261925867,"remaining_time":4.54370609},
|
| 488 |
+
{"learn":[0.006937864638],"iteration":484,"passed_time":4.27098865,"remaining_time":4.535173515},
|
| 489 |
+
{"learn":[0.006935555785],"iteration":485,"passed_time":4.280188053,"remaining_time":4.52678325},
|
| 490 |
+
{"learn":[0.00692491522],"iteration":486,"passed_time":4.288623828,"remaining_time":4.517585265},
|
| 491 |
+
{"learn":[0.006911291123],"iteration":487,"passed_time":4.296839674,"remaining_time":4.508159658},
|
| 492 |
+
{"learn":[0.006874462173],"iteration":488,"passed_time":4.305277413,"remaining_time":4.498970875},
|
| 493 |
+
{"learn":[0.006874460549],"iteration":489,"passed_time":4.312143941,"remaining_time":4.488149816},
|
| 494 |
+
{"learn":[0.006866285818],"iteration":490,"passed_time":4.320321702,"remaining_time":4.478704168},
|
| 495 |
+
{"learn":[0.006840612118],"iteration":491,"passed_time":4.328597927,"remaining_time":4.469365339},
|
| 496 |
+
{"learn":[0.00684042804],"iteration":492,"passed_time":4.33470221,"remaining_time":4.457797202},
|
| 497 |
+
{"learn":[0.006816577915],"iteration":493,"passed_time":4.34290428,"remaining_time":4.44839993},
|
| 498 |
+
{"learn":[0.006805090388],"iteration":494,"passed_time":4.351136301,"remaining_time":4.439038044},
|
| 499 |
+
{"learn":[0.0067836729],"iteration":495,"passed_time":4.35940438,"remaining_time":4.429717353},
|
| 500 |
+
{"learn":[0.006783647992],"iteration":496,"passed_time":4.36657044,"remaining_time":4.419285576},
|
| 501 |
+
{"learn":[0.006783647992],"iteration":497,"passed_time":4.372086848,"remaining_time":4.407204011},
|
| 502 |
+
{"learn":[0.006755373729],"iteration":498,"passed_time":4.38095083,"remaining_time":4.398509752},
|
| 503 |
+
{"learn":[0.006731995486],"iteration":499,"passed_time":4.389160394,"remaining_time":4.389160394},
|
| 504 |
+
{"learn":[0.006688334077],"iteration":500,"passed_time":4.397335695,"remaining_time":4.379781461},
|
| 505 |
+
{"learn":[0.006639195663],"iteration":501,"passed_time":4.406053615,"remaining_time":4.370945618},
|
| 506 |
+
{"learn":[0.006591016179],"iteration":502,"passed_time":4.415316788,"remaining_time":4.362648993},
|
| 507 |
+
{"learn":[0.006585869945],"iteration":503,"passed_time":4.423704901,"remaining_time":4.353487362},
|
| 508 |
+
{"learn":[0.006559010087],"iteration":504,"passed_time":4.432114154,"remaining_time":4.344349518},
|
| 509 |
+
{"learn":[0.006544999895],"iteration":505,"passed_time":4.440528435,"remaining_time":4.33521946},
|
| 510 |
+
{"learn":[0.006525506509],"iteration":506,"passed_time":4.449210127,"remaining_time":4.326352254},
|
| 511 |
+
{"learn":[0.006512860293],"iteration":507,"passed_time":4.554358648,"remaining_time":4.410914281},
|
| 512 |
+
{"learn":[0.006483240412],"iteration":508,"passed_time":4.562923808,"remaining_time":4.401563044},
|
| 513 |
+
{"learn":[0.006455247147],"iteration":509,"passed_time":4.570774246,"remaining_time":4.391528197},
|
| 514 |
+
{"learn":[0.006437132665],"iteration":510,"passed_time":4.579002714,"remaining_time":4.381863654},
|
| 515 |
+
{"learn":[0.006418526967],"iteration":511,"passed_time":4.587233286,"remaining_time":4.372206726},
|
| 516 |
+
{"learn":[0.006396143981],"iteration":512,"passed_time":4.595490768,"remaining_time":4.362580904},
|
| 517 |
+
{"learn":[0.006383333675],"iteration":513,"passed_time":4.603536844,"remaining_time":4.352760518},
|
| 518 |
+
{"learn":[0.006359021363],"iteration":514,"passed_time":4.61143397,"remaining_time":4.342806748},
|
| 519 |
+
{"learn":[0.006339552058],"iteration":515,"passed_time":4.619795957,"remaining_time":4.333296983},
|
| 520 |
+
{"learn":[0.00633029094],"iteration":516,"passed_time":4.628033607,"remaining_time":4.323675497},
|
| 521 |
+
{"learn":[0.006309557125],"iteration":517,"passed_time":4.636146984,"remaining_time":4.313943719},
|
| 522 |
+
{"learn":[0.00629316232],"iteration":518,"passed_time":4.644070332,"remaining_time":4.304042061},
|
| 523 |
+
{"learn":[0.006269392072],"iteration":519,"passed_time":4.652058723,"remaining_time":4.294208052},
|
| 524 |
+
{"learn":[0.006257486148],"iteration":520,"passed_time":4.660430905,"remaining_time":4.28473398},
|
| 525 |
+
{"learn":[0.00623798539],"iteration":521,"passed_time":4.669962908,"remaining_time":4.276326188},
|
| 526 |
+
{"learn":[0.006198282146],"iteration":522,"passed_time":4.678193742,"remaining_time":4.266727371},
|
| 527 |
+
{"learn":[0.006186719709],"iteration":523,"passed_time":4.686228253,"remaining_time":4.256955436},
|
| 528 |
+
{"learn":[0.006166191819],"iteration":524,"passed_time":4.694172256,"remaining_time":4.247108232},
|
| 529 |
+
{"learn":[0.006138351745],"iteration":525,"passed_time":4.702066096,"remaining_time":4.23722306},
|
| 530 |
+
{"learn":[0.006138353369],"iteration":526,"passed_time":4.709065635,"remaining_time":4.226542781},
|
| 531 |
+
{"learn":[0.006110262098],"iteration":527,"passed_time":4.717257458,"remaining_time":4.216942273},
|
| 532 |
+
{"learn":[0.00609903615],"iteration":528,"passed_time":4.725542924,"remaining_time":4.207430467},
|
| 533 |
+
{"learn":[0.00608738731],"iteration":529,"passed_time":4.733431135,"remaining_time":4.197571006},
|
| 534 |
+
{"learn":[0.006045059112],"iteration":530,"passed_time":4.741658688,"remaining_time":4.188018691},
|
| 535 |
+
{"learn":[0.006033755239],"iteration":531,"passed_time":4.749457539,"remaining_time":4.178094226},
|
| 536 |
+
{"learn":[0.006023305559],"iteration":532,"passed_time":4.757318375,"remaining_time":4.168232048},
|
| 537 |
+
{"learn":[0.006021433135],"iteration":533,"passed_time":4.765353061,"remaining_time":4.158529076},
|
| 538 |
+
{"learn":[0.006009082613],"iteration":534,"passed_time":4.773590603,"remaining_time":4.149008655},
|
| 539 |
+
{"learn":[0.005974244599],"iteration":535,"passed_time":4.781393627,"remaining_time":4.139116871},
|
| 540 |
+
{"learn":[0.005950949179],"iteration":536,"passed_time":4.789503048,"remaining_time":4.129497041},
|
| 541 |
+
{"learn":[0.005931164193],"iteration":537,"passed_time":4.79751166,"remaining_time":4.119796258},
|
| 542 |
+
{"learn":[0.005920806843],"iteration":538,"passed_time":4.806610256,"remaining_time":4.111034004},
|
| 543 |
+
{"learn":[0.005920806843],"iteration":539,"passed_time":4.813282648,"remaining_time":4.100203738},
|
| 544 |
+
{"learn":[0.005905041092],"iteration":540,"passed_time":4.821844473,"remaining_time":4.090991891},
|
| 545 |
+
{"learn":[0.005888681483],"iteration":541,"passed_time":4.830755919,"remaining_time":4.08207788},
|
| 546 |
+
{"learn":[0.005873286218],"iteration":542,"passed_time":4.838929868,"remaining_time":4.072543186},
|
| 547 |
+
{"learn":[0.005864314953],"iteration":543,"passed_time":4.847077318,"remaining_time":4.062991282},
|
| 548 |
+
{"learn":[0.005822944883],"iteration":544,"passed_time":4.855495066,"remaining_time":4.053670193},
|
| 549 |
+
{"learn":[0.00579759051],"iteration":545,"passed_time":4.86399284,"remaining_time":4.044418955},
|
| 550 |
+
{"learn":[0.005769865632],"iteration":546,"passed_time":4.871879407,"remaining_time":4.034664299},
|
| 551 |
+
{"learn":[0.005769851553],"iteration":547,"passed_time":4.878879259,"remaining_time":4.024185082},
|
| 552 |
+
{"learn":[0.005769838558],"iteration":548,"passed_time":4.88592772,"remaining_time":4.013758473},
|
| 553 |
+
{"learn":[0.005754848765],"iteration":549,"passed_time":4.894568749,"remaining_time":4.004647158},
|
| 554 |
+
{"learn":[0.005754842267],"iteration":550,"passed_time":4.901949434,"remaining_time":3.994510518},
|
| 555 |
+
{"learn":[0.005746696729],"iteration":551,"passed_time":4.910643524,"remaining_time":3.985449817},
|
| 556 |
+
{"learn":[0.005744734962],"iteration":552,"passed_time":4.92066275,"remaining_time":3.977461572},
|
| 557 |
+
{"learn":[0.005741601441],"iteration":553,"passed_time":4.928966052,"remaining_time":3.968084583},
|
| 558 |
+
{"learn":[0.005719971745],"iteration":554,"passed_time":4.93757711,"remaining_time":3.958958223},
|
| 559 |
+
{"learn":[0.005710648026],"iteration":555,"passed_time":4.945990081,"remaining_time":3.949675532},
|
| 560 |
+
{"learn":[0.005692326108],"iteration":556,"passed_time":4.954143761,"remaining_time":3.940189742},
|
| 561 |
+
{"learn":[0.005684342495],"iteration":557,"passed_time":4.962266749,"remaining_time":3.930684414},
|
| 562 |
+
{"learn":[0.005659451677],"iteration":558,"passed_time":4.970670278,"remaining_time":3.921405353},
|
| 563 |
+
{"learn":[0.005634417601],"iteration":559,"passed_time":4.979211981,"remaining_time":3.912237985},
|
| 564 |
+
{"learn":[0.005606237033],"iteration":560,"passed_time":4.987508969,"remaining_time":3.90288135},
|
| 565 |
+
{"learn":[0.005597040584],"iteration":561,"passed_time":4.995739601,"remaining_time":3.893476771},
|
| 566 |
+
{"learn":[0.00557240228],"iteration":562,"passed_time":5.004154589,"remaining_time":3.884219459},
|
| 567 |
+
{"learn":[0.005563605464],"iteration":563,"passed_time":5.012642671,"remaining_time":3.875021639},
|
| 568 |
+
{"learn":[0.005563605464],"iteration":564,"passed_time":5.018168536,"remaining_time":3.863545687},
|
| 569 |
+
{"learn":[0.005534976506],"iteration":565,"passed_time":5.026689531,"remaining_time":3.854387379},
|
| 570 |
+
{"learn":[0.005507760801],"iteration":566,"passed_time":5.035702607,"remaining_time":3.845607106},
|
| 571 |
+
{"learn":[0.005485963079],"iteration":567,"passed_time":5.045482783,"remaining_time":3.83740944},
|
| 572 |
+
{"learn":[0.005466836085],"iteration":568,"passed_time":5.053843209,"remaining_time":3.828130796},
|
| 573 |
+
{"learn":[0.005443505963],"iteration":569,"passed_time":5.06216613,"remaining_time":3.81882708},
|
| 574 |
+
{"learn":[0.005435063272],"iteration":570,"passed_time":5.070440944,"remaining_time":3.809490657},
|
| 575 |
+
{"learn":[0.005426874486],"iteration":571,"passed_time":5.078525417,"remaining_time":3.800015522},
|
| 576 |
+
{"learn":[0.00541900934],"iteration":572,"passed_time":5.086642367,"remaining_time":3.790569443},
|
| 577 |
+
{"learn":[0.005396359877],"iteration":573,"passed_time":5.095051497,"remaining_time":3.781344839},
|
| 578 |
+
{"learn":[0.005376238611],"iteration":574,"passed_time":5.103603655,"remaining_time":3.772228788},
|
| 579 |
+
{"learn":[0.00536865576],"iteration":575,"passed_time":5.111747712,"remaining_time":3.762814288},
|
| 580 |
+
{"learn":[0.005348265829],"iteration":576,"passed_time":5.120119223,"remaining_time":3.753570938},
|
| 581 |
+
{"learn":[0.005340543254],"iteration":577,"passed_time":5.128837568,"remaining_time":3.74458383},
|
| 582 |
+
{"learn":[0.005319017376],"iteration":578,"passed_time":5.137464087,"remaining_time":3.735530882},
|
| 583 |
+
{"learn":[0.005288491699],"iteration":579,"passed_time":5.145715675,"remaining_time":3.726207903},
|
| 584 |
+
{"learn":[0.005274006114],"iteration":580,"passed_time":5.154081438,"remaining_time":3.716970951},
|
| 585 |
+
{"learn":[0.005254930771],"iteration":581,"passed_time":5.162370613,"remaining_time":3.707681987},
|
| 586 |
+
{"learn":[0.005250145702],"iteration":582,"passed_time":5.170446078,"remaining_time":3.698243593},
|
| 587 |
+
{"learn":[0.005228178522],"iteration":583,"passed_time":5.179082456,"remaining_time":3.68920942},
|
| 588 |
+
{"learn":[0.005209792145],"iteration":584,"passed_time":5.188312284,"remaining_time":3.680597603},
|
| 589 |
+
{"learn":[0.005183617277],"iteration":585,"passed_time":5.196847207,"remaining_time":3.671492737},
|
| 590 |
+
{"learn":[0.005154915268],"iteration":586,"passed_time":5.205073663,"remaining_time":3.662172782},
|
| 591 |
+
{"learn":[0.005135169787],"iteration":587,"passed_time":5.213599231,"remaining_time":3.653066128},
|
| 592 |
+
{"learn":[0.005116151013],"iteration":588,"passed_time":5.221978249,"remaining_time":3.643859186},
|
| 593 |
+
{"learn":[0.005096986535],"iteration":589,"passed_time":5.230364348,"remaining_time":3.634659971},
|
| 594 |
+
{"learn":[0.005081596709],"iteration":590,"passed_time":5.238276661,"remaining_time":3.625135625},
|
| 595 |
+
{"learn":[0.005052898564],"iteration":591,"passed_time":5.246630623,"remaining_time":3.615921105},
|
| 596 |
+
{"learn":[0.005034060572],"iteration":592,"passed_time":5.254740082,"remaining_time":3.606541675},
|
| 597 |
+
{"learn":[0.005015975774],"iteration":593,"passed_time":5.263026858,"remaining_time":3.597287718},
|
| 598 |
+
{"learn":[0.004989973022],"iteration":594,"passed_time":5.271508105,"remaining_time":3.588169383},
|
| 599 |
+
{"learn":[0.004975079729],"iteration":595,"passed_time":5.279806507,"remaining_time":3.578929243},
|
| 600 |
+
{"learn":[0.004957126014],"iteration":596,"passed_time":5.288372758,"remaining_time":3.569873068},
|
| 601 |
+
{"learn":[0.004933304966],"iteration":597,"passed_time":5.296615434,"remaining_time":3.560601011},
|
| 602 |
+
{"learn":[0.004908607193],"iteration":598,"passed_time":5.30487702,"remaining_time":3.55134505},
|
| 603 |
+
{"learn":[0.004891804045],"iteration":599,"passed_time":5.313504085,"remaining_time":3.542336056},
|
| 604 |
+
{"learn":[0.004862332434],"iteration":600,"passed_time":5.321892079,"remaining_time":3.533169616},
|
| 605 |
+
{"learn":[0.004848515548],"iteration":601,"passed_time":5.330156225,"remaining_time":3.523923883},
|
| 606 |
+
{"learn":[0.004818839988],"iteration":602,"passed_time":5.338745884,"remaining_time":3.514895714},
|
| 607 |
+
{"learn":[0.004801272323],"iteration":603,"passed_time":5.34723692,"remaining_time":3.505804338},
|
| 608 |
+
{"learn":[0.004798084916],"iteration":604,"passed_time":5.35562612,"remaining_time":3.496648458},
|
| 609 |
+
{"learn":[0.00477162949],"iteration":605,"passed_time":5.363997287,"remaining_time":3.487483384},
|
| 610 |
+
{"learn":[0.004755774489],"iteration":606,"passed_time":5.372190337,"remaining_time":3.478205605},
|
| 611 |
+
{"learn":[0.004732855983],"iteration":607,"passed_time":5.380543839,"remaining_time":3.469034844},
|
| 612 |
+
{"learn":[0.004719117494],"iteration":608,"passed_time":5.388925072,"remaining_time":3.45988457},
|
| 613 |
+
{"learn":[0.004712359287],"iteration":609,"passed_time":5.397196335,"remaining_time":3.450666509},
|
| 614 |
+
{"learn":[0.004685021941],"iteration":610,"passed_time":5.405361359,"remaining_time":3.441383909},
|
| 615 |
+
{"learn":[0.00466990064],"iteration":611,"passed_time":5.413441165,"remaining_time":3.432050934},
|
| 616 |
+
{"learn":[0.0046464856],"iteration":612,"passed_time":5.421946244,"remaining_time":3.422990533},
|
| 617 |
+
{"learn":[0.004639946715],"iteration":613,"passed_time":5.430647226,"remaining_time":3.414055096},
|
| 618 |
+
{"learn":[0.004627783519],"iteration":614,"passed_time":5.440406419,"remaining_time":3.40578288},
|
| 619 |
+
{"learn":[0.004615681558],"iteration":615,"passed_time":5.449531131,"remaining_time":3.397110315},
|
| 620 |
+
{"learn":[0.004604460484],"iteration":616,"passed_time":5.457715811,"remaining_time":3.387852764},
|
| 621 |
+
{"learn":[0.004593699688],"iteration":617,"passed_time":5.465859636,"remaining_time":3.378573432},
|
| 622 |
+
{"learn":[0.0045883751],"iteration":618,"passed_time":5.474199784,"remaining_time":3.369418607},
|
| 623 |
+
{"learn":[0.004571890998],"iteration":619,"passed_time":5.482121227,"remaining_time":3.360009784},
|
| 624 |
+
{"learn":[0.004559601121],"iteration":620,"passed_time":5.490301539,"remaining_time":3.350763741},
|
| 625 |
+
{"learn":[0.004535919875],"iteration":621,"passed_time":5.498412841,"remaining_time":3.341479186},
|
| 626 |
+
{"learn":[0.004529983405],"iteration":622,"passed_time":5.506709464,"remaining_time":3.332310542},
|
| 627 |
+
{"learn":[0.004524350903],"iteration":623,"passed_time":5.514917948,"remaining_time":3.323091584},
|
| 628 |
+
{"learn":[0.004508322265],"iteration":624,"passed_time":5.523472026,"remaining_time":3.314083215},
|
| 629 |
+
{"learn":[0.004492304327],"iteration":625,"passed_time":5.531932451,"remaining_time":3.305020346},
|
| 630 |
+
{"learn":[0.004486782062],"iteration":626,"passed_time":5.540261948,"remaining_time":3.29588151},
|
| 631 |
+
{"learn":[0.004465177965],"iteration":627,"passed_time":5.548235863,"remaining_time":3.286534619},
|
| 632 |
+
{"learn":[0.00444066963],"iteration":628,"passed_time":5.556355577,"remaining_time":3.277278091},
|
| 633 |
+
{"learn":[0.004426744967],"iteration":629,"passed_time":5.564520559,"remaining_time":3.268051757},
|
| 634 |
+
{"learn":[0.004403929859],"iteration":630,"passed_time":5.572696039,"remaining_time":3.258834926},
|
| 635 |
+
{"learn":[0.004367450679],"iteration":631,"passed_time":5.581089848,"remaining_time":3.249748519},
|
| 636 |
+
{"learn":[0.00435923509],"iteration":632,"passed_time":5.589400073,"remaining_time":3.24061584},
|
| 637 |
+
{"learn":[0.004353080923],"iteration":633,"passed_time":5.59797437,"remaining_time":3.231638201},
|
| 638 |
+
{"learn":[0.00434186019],"iteration":634,"passed_time":5.605884913,"remaining_time":3.222280305},
|
| 639 |
+
{"learn":[0.004341854505],"iteration":635,"passed_time":5.612895374,"remaining_time":3.212411818},
|
| 640 |
+
{"learn":[0.004341862085],"iteration":636,"passed_time":5.620201814,"remaining_time":3.202720971},
|
| 641 |
+
{"learn":[0.004335830268],"iteration":637,"passed_time":5.628720795,"remaining_time":3.193725592},
|
| 642 |
+
{"learn":[0.004325640458],"iteration":638,"passed_time":5.63726134,"remaining_time":3.184743887},
|
| 643 |
+
{"learn":[0.004318267205],"iteration":639,"passed_time":5.645191936,"remaining_time":3.175420464},
|
| 644 |
+
{"learn":[0.00431314103],"iteration":640,"passed_time":5.653312696,"remaining_time":3.166207891},
|
| 645 |
+
{"learn":[0.004298439961],"iteration":641,"passed_time":5.661361128,"remaining_time":3.156958386},
|
| 646 |
+
{"learn":[0.004292708981],"iteration":642,"passed_time":5.669302775,"remaining_time":3.147653329},
|
| 647 |
+
{"learn":[0.004283102967],"iteration":643,"passed_time":5.677538116,"remaining_time":3.138514859},
|
| 648 |
+
{"learn":[0.004282367101],"iteration":644,"passed_time":5.68520453,"remaining_time":3.129066059},
|
| 649 |
+
{"learn":[0.004255209016],"iteration":645,"passed_time":5.693471797,"remaining_time":3.119952037},
|
| 650 |
+
{"learn":[0.004249618278],"iteration":646,"passed_time":5.70177203,"remaining_time":3.110858619},
|
| 651 |
+
{"learn":[0.004231335076],"iteration":647,"passed_time":5.709665528,"remaining_time":3.101546706},
|
| 652 |
+
{"learn":[0.004211062267],"iteration":648,"passed_time":5.718068199,"remaining_time":3.092514542},
|
| 653 |
+
{"learn":[0.004211053333],"iteration":649,"passed_time":5.725528648,"remaining_time":3.082976965},
|
| 654 |
+
{"learn":[0.004211046565],"iteration":650,"passed_time":5.732719546,"remaining_time":3.073301262},
|
| 655 |
+
{"learn":[0.004197778511],"iteration":651,"passed_time":5.741141501,"remaining_time":3.064290249},
|
| 656 |
+
{"learn":[0.004187183136],"iteration":652,"passed_time":5.749017203,"remaining_time":3.054990765},
|
| 657 |
+
{"learn":[0.004168454299],"iteration":653,"passed_time":5.757040298,"remaining_time":3.045773613},
|
| 658 |
+
{"learn":[0.004158197346],"iteration":654,"passed_time":5.765371264,"remaining_time":3.036722269},
|
| 659 |
+
{"learn":[0.004146825318],"iteration":655,"passed_time":5.773658874,"remaining_time":3.027650385},
|
| 660 |
+
{"learn":[0.004129546329],"iteration":656,"passed_time":5.781527338,"remaining_time":3.018362065},
|
| 661 |
+
{"learn":[0.004117483825],"iteration":657,"passed_time":5.789855051,"remaining_time":3.009316759},
|
| 662 |
+
{"learn":[0.004113762262],"iteration":658,"passed_time":5.798018371,"remaining_time":3.000188565},
|
| 663 |
+
{"learn":[0.004108743322],"iteration":659,"passed_time":5.806108877,"remaining_time":2.991025785},
|
| 664 |
+
{"learn":[0.00409816611],"iteration":660,"passed_time":5.814600601,"remaining_time":2.982072018},
|
| 665 |
+
{"learn":[0.00407443313],"iteration":661,"passed_time":5.822795161,"remaining_time":2.972967922},
|
| 666 |
+
{"learn":[0.00405420271],"iteration":662,"passed_time":5.831135409,"remaining_time":2.963940623},
|
| 667 |
+
{"learn":[0.004044180828],"iteration":663,"passed_time":5.839533579,"remaining_time":2.954944702},
|
| 668 |
+
{"learn":[0.0040299847],"iteration":664,"passed_time":5.849282276,"remaining_time":2.946630921},
|
| 669 |
+
{"learn":[0.004019793831],"iteration":665,"passed_time":5.857205818,"remaining_time":2.937397512},
|
| 670 |
+
{"learn":[0.004007991011],"iteration":666,"passed_time":5.865571032,"remaining_time":2.928388536},
|
| 671 |
+
{"learn":[0.003994558844],"iteration":667,"passed_time":5.873298371,"remaining_time":2.91906446},
|
| 672 |
+
{"learn":[0.003977400557],"iteration":668,"passed_time":5.881337885,"remaining_time":2.909899611},
|
| 673 |
+
{"learn":[0.003967748209],"iteration":669,"passed_time":5.88961393,"remaining_time":2.900854622},
|
| 674 |
+
{"learn":[0.00394917527],"iteration":670,"passed_time":5.898420276,"remaining_time":2.892071939},
|
| 675 |
+
{"learn":[0.0039457068],"iteration":671,"passed_time":5.906869423,"remaining_time":2.883114838},
|
| 676 |
+
{"learn":[0.00393461548],"iteration":672,"passed_time":5.914656274,"remaining_time":2.873837447},
|
| 677 |
+
{"learn":[0.003924946817],"iteration":673,"passed_time":5.922908699,"remaining_time":2.864789668},
|
| 678 |
+
{"learn":[0.003920642649],"iteration":674,"passed_time":5.930888701,"remaining_time":2.855613078},
|
| 679 |
+
{"learn":[0.003908077902],"iteration":675,"passed_time":5.939494294,"remaining_time":2.846739869},
|
| 680 |
+
{"learn":[0.003898624005],"iteration":676,"passed_time":5.947981576,"remaining_time":2.837811003},
|
| 681 |
+
{"learn":[0.003876262089],"iteration":677,"passed_time":5.956365651,"remaining_time":2.828834424},
|
| 682 |
+
{"learn":[0.003864436257],"iteration":678,"passed_time":5.963439326,"remaining_time":2.819240094},
|
| 683 |
+
{"learn":[0.003861716424],"iteration":679,"passed_time":5.970843743,"remaining_time":2.80980882},
|
| 684 |
+
{"learn":[0.003850099322],"iteration":680,"passed_time":5.979598288,"remaining_time":2.801015938},
|
| 685 |
+
{"learn":[0.003842953465],"iteration":681,"passed_time":5.98805625,"remaining_time":2.792084879},
|
| 686 |
+
{"learn":[0.003829765279],"iteration":682,"passed_time":5.996681863,"remaining_time":2.783233017},
|
| 687 |
+
{"learn":[0.003808679893],"iteration":683,"passed_time":6.005023366,"remaining_time":2.774250561},
|
| 688 |
+
{"learn":[0.003797601768],"iteration":684,"passed_time":6.013435132,"remaining_time":2.765302287},
|
| 689 |
+
{"learn":[0.003782490508],"iteration":685,"passed_time":6.021142822,"remaining_time":2.756033303},
|
| 690 |
+
{"learn":[0.003771832616],"iteration":686,"passed_time":6.02965255,"remaining_time":2.747134277},
|
| 691 |
+
{"learn":[0.00376294203],"iteration":687,"passed_time":6.038197912,"remaining_time":2.738252542},
|
| 692 |
+
{"learn":[0.003742079825],"iteration":688,"passed_time":6.046465658,"remaining_time":2.729246472},
|
| 693 |
+
{"learn":[0.003731586023],"iteration":689,"passed_time":6.05428499,"remaining_time":2.720041083},
|
| 694 |
+
{"learn":[0.003721365117],"iteration":690,"passed_time":6.063177086,"remaining_time":2.711319421},
|
| 695 |
+
{"learn":[0.003707134025],"iteration":691,"passed_time":6.071140833,"remaining_time":2.70218407},
|
| 696 |
+
{"learn":[0.003704911244],"iteration":692,"passed_time":6.078841799,"remaining_time":2.692935689},
|
| 697 |
+
{"learn":[0.00369646767],"iteration":693,"passed_time":6.087003639,"remaining_time":2.683894976},
|
| 698 |
+
{"learn":[0.003674965664],"iteration":694,"passed_time":6.095323901,"remaining_time":2.674926316},
|
| 699 |
+
{"learn":[0.003666434913],"iteration":695,"passed_time":6.103420214,"remaining_time":2.665861703},
|
| 700 |
+
{"learn":[0.003662391442],"iteration":696,"passed_time":6.111553759,"remaining_time":2.656816053},
|
| 701 |
+
{"learn":[0.003642016519],"iteration":697,"passed_time":6.11999994,"remaining_time":2.647908284},
|
| 702 |
+
{"learn":[0.003631173194],"iteration":698,"passed_time":6.128496529,"remaining_time":2.639023541},
|
| 703 |
+
{"learn":[0.003623114339],"iteration":699,"passed_time":6.136170389,"remaining_time":2.62978731},
|
| 704 |
+
{"learn":[0.003618134927],"iteration":700,"passed_time":6.144286187,"remaining_time":2.620744037},
|
| 705 |
+
{"learn":[0.003611635322],"iteration":701,"passed_time":6.15320862,"remaining_time":2.612045825},
|
| 706 |
+
{"learn":[0.003601396014],"iteration":702,"passed_time":6.1617848,"remaining_time":2.603200691},
|
| 707 |
+
{"learn":[0.003591910982],"iteration":703,"passed_time":6.170309376,"remaining_time":2.594334624},
|
| 708 |
+
{"learn":[0.003587096697],"iteration":704,"passed_time":6.178353511,"remaining_time":2.58526849},
|
| 709 |
+
{"learn":[0.003583766914],"iteration":705,"passed_time":6.186746307,"remaining_time":2.576350445},
|
| 710 |
+
{"learn":[0.003579130233],"iteration":706,"passed_time":6.195221776,"remaining_time":2.567468148},
|
| 711 |
+
{"learn":[0.003574625943],"iteration":707,"passed_time":6.20371868,"remaining_time":2.55859584},
|
| 712 |
+
{"learn":[0.003565632842],"iteration":708,"passed_time":6.212506075,"remaining_time":2.549843819},
|
| 713 |
+
{"learn":[0.003557394302],"iteration":709,"passed_time":6.222163631,"remaining_time":2.541447117},
|
| 714 |
+
{"learn":[0.003547782046],"iteration":710,"passed_time":6.231596744,"remaining_time":2.532955638},
|
| 715 |
+
{"learn":[0.003534162846],"iteration":711,"passed_time":6.239839004,"remaining_time":2.523979822},
|
| 716 |
+
{"learn":[0.003526256169],"iteration":712,"passed_time":6.247915682,"remaining_time":2.514939412},
|
| 717 |
+
{"learn":[0.003509989693],"iteration":713,"passed_time":6.256081886,"remaining_time":2.505937562},
|
| 718 |
+
{"learn":[0.003501361499],"iteration":714,"passed_time":6.264082307,"remaining_time":2.496871969},
|
| 719 |
+
{"learn":[0.003497592828],"iteration":715,"passed_time":6.272258442,"remaining_time":2.487879047},
|
| 720 |
+
{"learn":[0.003497068903],"iteration":716,"passed_time":6.280256307,"remaining_time":2.47881804},
|
| 721 |
+
{"learn":[0.003494003655],"iteration":717,"passed_time":6.288337818,"remaining_time":2.469792848},
|
| 722 |
+
{"learn":[0.003483512537],"iteration":718,"passed_time":6.296293778,"remaining_time":2.460721212},
|
| 723 |
+
{"learn":[0.003475120992],"iteration":719,"passed_time":6.304344471,"remaining_time":2.451689516},
|
| 724 |
+
{"learn":[0.003457106338],"iteration":720,"passed_time":6.31308118,"remaining_time":2.442926004},
|
| 725 |
+
{"learn":[0.003446459275],"iteration":721,"passed_time":6.322114352,"remaining_time":2.434276717},
|
| 726 |
+
{"learn":[0.0034265863],"iteration":722,"passed_time":6.330596376,"remaining_time":2.425415209},
|
| 727 |
+
{"learn":[0.003409362695],"iteration":723,"passed_time":6.339154183,"remaining_time":2.416583639},
|
| 728 |
+
{"learn":[0.003402224373],"iteration":724,"passed_time":6.348130798,"remaining_time":2.407911682},
|
| 729 |
+
{"learn":[0.003394745252],"iteration":725,"passed_time":6.356299424,"remaining_time":2.398933942},
|
| 730 |
+
{"learn":[0.003380096907],"iteration":726,"passed_time":6.364993167,"remaining_time":2.390155619},
|
| 731 |
+
{"learn":[0.00337582385],"iteration":727,"passed_time":6.373424511,"remaining_time":2.381279488},
|
| 732 |
+
{"learn":[0.003373330354],"iteration":728,"passed_time":6.382196908,"remaining_time":2.372531361},
|
| 733 |
+
{"learn":[0.003373329813],"iteration":729,"passed_time":6.389283985,"remaining_time":2.36315983},
|
| 734 |
+
{"learn":[0.00336313653],"iteration":730,"passed_time":6.397890211,"remaining_time":2.35435358},
|
| 735 |
+
{"learn":[0.00334907704],"iteration":731,"passed_time":6.406420803,"remaining_time":2.345520185},
|
| 736 |
+
{"learn":[0.003346926274],"iteration":732,"passed_time":6.414722037,"remaining_time":2.336604071},
|
| 737 |
+
{"learn":[0.003342835942],"iteration":733,"passed_time":6.423041718,"remaining_time":2.327696317},
|
| 738 |
+
{"learn":[0.003342828903],"iteration":734,"passed_time":6.430593435,"remaining_time":2.318513279},
|
| 739 |
+
{"learn":[0.003342829444],"iteration":735,"passed_time":6.438086533,"remaining_time":2.309313648},
|
| 740 |
+
{"learn":[0.003340960788],"iteration":736,"passed_time":6.446688671,"remaining_time":2.30051441},
|
| 741 |
+
{"learn":[0.003339254892],"iteration":737,"passed_time":6.455336401,"remaining_time":2.291731893},
|
| 742 |
+
{"learn":[0.003339254892],"iteration":738,"passed_time":6.462665653,"remaining_time":2.28248408},
|
| 743 |
+
{"learn":[0.003339254892],"iteration":739,"passed_time":6.469938575,"remaining_time":2.273221662},
|
| 744 |
+
{"learn":[0.003332448223],"iteration":740,"passed_time":6.478711975,"remaining_time":2.264489071},
|
| 745 |
+
{"learn":[0.003332451472],"iteration":741,"passed_time":6.485937791,"remaining_time":2.255218262},
|
| 746 |
+
{"learn":[0.003332430354],"iteration":742,"passed_time":6.493634479,"remaining_time":2.246115829},
|
| 747 |
+
{"learn":[0.003332430896],"iteration":743,"passed_time":6.500893887,"remaining_time":2.236866714},
|
| 748 |
+
{"learn":[0.003332439018],"iteration":744,"passed_time":6.508359957,"remaining_time":2.227693676},
|
| 749 |
+
{"learn":[0.003332435769],"iteration":745,"passed_time":6.515920123,"remaining_time":2.218557254},
|
| 750 |
+
{"learn":[0.003320449048],"iteration":746,"passed_time":6.523838286,"remaining_time":2.2095463},
|
| 751 |
+
{"learn":[0.003320449048],"iteration":747,"passed_time":6.531269656,"remaining_time":2.200374269},
|
| 752 |
+
{"learn":[0.003320449048],"iteration":748,"passed_time":6.539024646,"remaining_time":2.191315335},
|
| 753 |
+
{"learn":[0.003320449048],"iteration":749,"passed_time":6.546393214,"remaining_time":2.182131071},
|
| 754 |
+
{"learn":[0.003318643829],"iteration":750,"passed_time":6.555126669,"remaining_time":2.173404182},
|
| 755 |
+
{"learn":[0.003308927834],"iteration":751,"passed_time":6.563120794,"remaining_time":2.164433453},
|
| 756 |
+
{"learn":[0.003308927834],"iteration":752,"passed_time":6.570186718,"remaining_time":2.155160849},
|
| 757 |
+
{"learn":[0.003308927834],"iteration":753,"passed_time":6.577716721,"remaining_time":2.146045508},
|
| 758 |
+
{"learn":[0.003302428205],"iteration":754,"passed_time":6.585978415,"remaining_time":2.137171804},
|
| 759 |
+
{"learn":[0.003288610307],"iteration":755,"passed_time":6.594192365,"remaining_time":2.128284308},
|
| 760 |
+
{"learn":[0.003282053506],"iteration":756,"passed_time":6.602566879,"remaining_time":2.119450134},
|
| 761 |
+
{"learn":[0.003278692294],"iteration":757,"passed_time":6.61091943,"remaining_time":2.110610161},
|
| 762 |
+
{"learn":[0.003278689045],"iteration":758,"passed_time":6.619210876,"remaining_time":2.10175207},
|
| 763 |
+
{"learn":[0.003278702582],"iteration":759,"passed_time":6.62699301,"remaining_time":2.092734635},
|
| 764 |
+
{"learn":[0.003278692836],"iteration":760,"passed_time":6.634429197,"remaining_time":2.083611798},
|
| 765 |
+
{"learn":[0.003278690128],"iteration":761,"passed_time":6.641849983,"remaining_time":2.074488577},
|
| 766 |
+
{"learn":[0.003270898291],"iteration":762,"passed_time":6.650553905,"remaining_time":2.065768382},
|
| 767 |
+
{"learn":[0.003270898833],"iteration":763,"passed_time":6.657960759,"remaining_time":2.056647564},
|
| 768 |
+
{"learn":[0.003270891793],"iteration":764,"passed_time":6.664976689,"remaining_time":2.04741114},
|
| 769 |
+
{"learn":[0.00327007958],"iteration":765,"passed_time":6.672722027,"remaining_time":2.038403335},
|
| 770 |
+
{"learn":[0.003270078497],"iteration":766,"passed_time":6.680031369,"remaining_time":2.029266374},
|
| 771 |
+
{"learn":[0.003270069292],"iteration":767,"passed_time":6.687499963,"remaining_time":2.02018228},
|
| 772 |
+
{"learn":[0.003270076872],"iteration":768,"passed_time":6.694989489,"remaining_time":2.011108676},
|
| 773 |
+
{"learn":[0.003270073894],"iteration":769,"passed_time":6.70235833,"remaining_time":2.002003138},
|
| 774 |
+
{"learn":[0.003270066584],"iteration":770,"passed_time":6.709573451,"remaining_time":1.992856446},
|
| 775 |
+
{"learn":[0.003270069292],"iteration":771,"passed_time":6.716657672,"remaining_time":1.9836761},
|
| 776 |
+
{"learn":[0.003270070916],"iteration":772,"passed_time":6.723917715,"remaining_time":1.974552809},
|
| 777 |
+
{"learn":[0.00327007606],"iteration":773,"passed_time":6.731159305,"remaining_time":1.965428944},
|
| 778 |
+
{"learn":[0.003270059545],"iteration":774,"passed_time":6.738170378,"remaining_time":1.956243013},
|
| 779 |
+
{"learn":[0.003270064689],"iteration":775,"passed_time":6.745176506,"remaining_time":1.947061259},
|
| 780 |
+
{"learn":[0.003270057379],"iteration":776,"passed_time":6.752743776,"remaining_time":1.938046154},
|
| 781 |
+
{"learn":[0.003270061982],"iteration":777,"passed_time":6.760589003,"remaining_time":1.929114086},
|
| 782 |
+
{"learn":[0.003268352519],"iteration":778,"passed_time":6.769359238,"remaining_time":1.920447229},
|
| 783 |
+
{"learn":[0.003266124887],"iteration":779,"passed_time":6.777490596,"remaining_time":1.911599912},
|
| 784 |
+
{"learn":[0.003265681643],"iteration":780,"passed_time":6.785939988,"remaining_time":1.902843607},
|
| 785 |
+
{"learn":[0.003265685974],"iteration":781,"passed_time":6.793422709,"remaining_time":1.893818607},
|
| 786 |
+
{"learn":[0.003265681913],"iteration":782,"passed_time":6.800942947,"remaining_time":1.884807943},
|
| 787 |
+
{"learn":[0.003265690848],"iteration":783,"passed_time":6.808268339,"remaining_time":1.875747399},
|
| 788 |
+
{"learn":[0.003245993087],"iteration":784,"passed_time":6.816503588,"remaining_time":1.866940473},
|
| 789 |
+
{"learn":[0.003245995252],"iteration":785,"passed_time":6.823731263,"remaining_time":1.857860675},
|
| 790 |
+
{"learn":[0.003245996606],"iteration":786,"passed_time":6.831047447,"remaining_time":1.848809538},
|
| 791 |
+
{"learn":[0.003245993628],"iteration":787,"passed_time":6.838805212,"remaining_time":1.839881605},
|
| 792 |
+
{"learn":[0.003245995252],"iteration":788,"passed_time":6.846048877,"remaining_time":1.830819155},
|
| 793 |
+
{"learn":[0.00324599417],"iteration":789,"passed_time":6.853757058,"remaining_time":1.821884788},
|
| 794 |
+
{"learn":[0.003244925838],"iteration":790,"passed_time":6.864333981,"remaining_time":1.813711507},
|
| 795 |
+
{"learn":[0.003244920423],"iteration":791,"passed_time":6.871681537,"remaining_time":1.80468404},
|
| 796 |
+
{"learn":[0.003244909052],"iteration":792,"passed_time":6.879219361,"remaining_time":1.795710476},
|
| 797 |
+
{"learn":[0.003244913384],"iteration":793,"passed_time":6.886657987,"remaining_time":1.786714793},
|
| 798 |
+
{"learn":[0.003244916633],"iteration":794,"passed_time":6.89434021,"remaining_time":1.77778584},
|
| 799 |
+
{"learn":[0.00324491555],"iteration":795,"passed_time":6.90173437,"remaining_time":1.768786195},
|
| 800 |
+
{"learn":[0.003243304094],"iteration":796,"passed_time":6.910242236,"remaining_time":1.760074246},
|
| 801 |
+
{"learn":[0.00324330247],"iteration":797,"passed_time":6.917472963,"remaining_time":1.751039522},
|
| 802 |
+
{"learn":[0.003243305448],"iteration":798,"passed_time":6.924614504,"remaining_time":1.741986878},
|
| 803 |
+
{"learn":[0.003243300575],"iteration":799,"passed_time":6.931930755,"remaining_time":1.732982689},
|
| 804 |
+
{"learn":[0.003243296513],"iteration":800,"passed_time":6.939302713,"remaining_time":1.723996554},
|
| 805 |
+
{"learn":[0.003243300304],"iteration":801,"passed_time":6.947189273,"remaining_time":1.715141491},
|
| 806 |
+
{"learn":[0.003243296243],"iteration":802,"passed_time":6.954528681,"remaining_time":1.706154608},
|
| 807 |
+
{"learn":[0.003243300845],"iteration":803,"passed_time":6.961785209,"remaining_time":1.697151618},
|
| 808 |
+
{"learn":[0.003243294348],"iteration":804,"passed_time":6.969137592,"remaining_time":1.688176187},
|
| 809 |
+
{"learn":[0.003241708612],"iteration":805,"passed_time":6.977881216,"remaining_time":1.679539647},
|
| 810 |
+
{"learn":[0.003241715922],"iteration":806,"passed_time":6.985170477,"remaining_time":1.670555021},
|
| 811 |
+
{"learn":[0.003241715381],"iteration":807,"passed_time":6.992602715,"remaining_time":1.661608566},
|
| 812 |
+
{"learn":[0.003241699678],"iteration":808,"passed_time":6.999978377,"remaining_time":1.652652497},
|
| 813 |
+
{"learn":[0.003241697512],"iteration":809,"passed_time":7.008674789,"remaining_time":1.644010136},
|
| 814 |
+
{"learn":[0.00324169697],"iteration":810,"passed_time":7.016644503,"remaining_time":1.635198287},
|
| 815 |
+
{"learn":[0.003241689931],"iteration":811,"passed_time":7.023740771,"remaining_time":1.626186287},
|
| 816 |
+
{"learn":[0.003241693992],"iteration":812,"passed_time":7.031141512,"remaining_time":1.617249032},
|
| 817 |
+
{"learn":[0.003241697783],"iteration":813,"passed_time":7.038181731,"remaining_time":1.608233172},
|
| 818 |
+
{"learn":[0.003241695075],"iteration":814,"passed_time":7.045407072,"remaining_time":1.599264182},
|
| 819 |
+
{"learn":[0.003240128291],"iteration":815,"passed_time":7.053844438,"remaining_time":1.590572766},
|
| 820 |
+
{"learn":[0.003240132894],"iteration":816,"passed_time":7.060978984,"remaining_time":1.581590152},
|
| 821 |
+
{"learn":[0.003240129916],"iteration":817,"passed_time":7.068585201,"remaining_time":1.572717001},
|
| 822 |
+
{"learn":[0.003240132082],"iteration":818,"passed_time":7.076030269,"remaining_time":1.563811329},
|
| 823 |
+
{"learn":[0.003240132082],"iteration":819,"passed_time":7.083380776,"remaining_time":1.554888463},
|
| 824 |
+
{"learn":[0.003240134248],"iteration":820,"passed_time":7.090688517,"remaining_time":1.545960103},
|
| 825 |
+
{"learn":[0.003240133165],"iteration":821,"passed_time":7.098237037,"remaining_time":1.537087825},
|
| 826 |
+
{"learn":[0.003240126667],"iteration":822,"passed_time":7.105536051,"remaining_time":1.528165104},
|
| 827 |
+
{"learn":[0.003240133706],"iteration":823,"passed_time":7.112750275,"remaining_time":1.519228214},
|
| 828 |
+
{"learn":[0.003240123689],"iteration":824,"passed_time":7.12057425,"remaining_time":1.510424841},
|
| 829 |
+
{"learn":[0.003240128562],"iteration":825,"passed_time":7.128201783,"remaining_time":1.501582458},
|
| 830 |
+
{"learn":[0.003240124772],"iteration":826,"passed_time":7.135665836,"remaining_time":1.492708814},
|
| 831 |
+
{"learn":[0.003240120981],"iteration":827,"passed_time":7.142869649,"remaining_time":1.483784516},
|
| 832 |
+
{"learn":[0.003240114213],"iteration":828,"passed_time":7.150734886,"remaining_time":1.475000803},
|
| 833 |
+
{"learn":[0.003240108257],"iteration":829,"passed_time":7.158149913,"remaining_time":1.466127091},
|
| 834 |
+
{"learn":[0.003240106091],"iteration":830,"passed_time":7.165571262,"remaining_time":1.457258175},
|
| 835 |
+
{"learn":[0.003240102301],"iteration":831,"passed_time":7.173189365,"remaining_time":1.448432468},
|
| 836 |
+
{"learn":[0.003240096886],"iteration":832,"passed_time":7.180646065,"remaining_time":1.439577302},
|
| 837 |
+
{"learn":[0.003240092825],"iteration":833,"passed_time":7.188077628,"remaining_time":1.430720487},
|
| 838 |
+
{"learn":[0.003240096886],"iteration":834,"passed_time":7.195326365,"remaining_time":1.421830958},
|
| 839 |
+
{"learn":[0.003240088764],"iteration":835,"passed_time":7.202582316,"remaining_time":1.41294677},
|
| 840 |
+
{"learn":[0.00324009093],"iteration":836,"passed_time":7.210110968,"remaining_time":1.404119579},
|
| 841 |
+
{"learn":[0.003240087951],"iteration":837,"passed_time":7.217617458,"remaining_time":1.395291203},
|
| 842 |
+
{"learn":[0.003240082266],"iteration":838,"passed_time":7.224863107,"remaining_time":1.386415924},
|
| 843 |
+
{"learn":[0.003240089034],"iteration":839,"passed_time":7.232172291,"remaining_time":1.377556627},
|
| 844 |
+
{"learn":[0.003233733168],"iteration":840,"passed_time":7.240692552,"remaining_time":1.368929983},
|
| 845 |
+
{"learn":[0.003233732897],"iteration":841,"passed_time":7.247827867,"remaining_time":1.360043709},
|
| 846 |
+
{"learn":[0.003233729648],"iteration":842,"passed_time":7.255395659,"remaining_time":1.351242133},
|
| 847 |
+
{"learn":[0.003233732355],"iteration":843,"passed_time":7.262774108,"remaining_time":1.342408485},
|
| 848 |
+
{"learn":[0.003233733438],"iteration":844,"passed_time":7.270411337,"remaining_time":1.333625748},
|
| 849 |
+
{"learn":[0.003233717736],"iteration":845,"passed_time":7.278149408,"remaining_time":1.324864077},
|
| 850 |
+
{"learn":[0.003233726128],"iteration":846,"passed_time":7.285513077,"remaining_time":1.316037191},
|
| 851 |
+
{"learn":[0.003233727482],"iteration":847,"passed_time":7.292783743,"remaining_time":1.307197086},
|
| 852 |
+
{"learn":[0.003233727211],"iteration":848,"passed_time":7.300189382,"remaining_time":1.298384684},
|
| 853 |
+
{"learn":[0.003233735333],"iteration":849,"passed_time":7.307943114,"remaining_time":1.28963702},
|
| 854 |
+
{"learn":[0.003233713404],"iteration":850,"passed_time":7.315541235,"remaining_time":1.280864447},
|
| 855 |
+
{"learn":[0.003233723692],"iteration":851,"passed_time":7.323000806,"remaining_time":1.272070563},
|
| 856 |
+
{"learn":[0.003233721526],"iteration":852,"passed_time":7.330048661,"remaining_time":1.263208855},
|
| 857 |
+
{"learn":[0.003233716653],"iteration":853,"passed_time":7.338222819,"remaining_time":1.254543948},
|
| 858 |
+
{"learn":[0.003233721526],"iteration":854,"passed_time":7.345519597,"remaining_time":1.245731394},
|
| 859 |
+
{"learn":[0.003233720172],"iteration":855,"passed_time":7.35313089,"remaining_time":1.23697529},
|
| 860 |
+
{"learn":[0.003233313477],"iteration":856,"passed_time":7.362183747,"remaining_time":1.228462399},
|
| 861 |
+
{"learn":[0.003233306167],"iteration":857,"passed_time":7.370049165,"remaining_time":1.219751727},
|
| 862 |
+
{"learn":[0.003233305626],"iteration":858,"passed_time":7.377979045,"remaining_time":1.211053603},
|
| 863 |
+
{"learn":[0.003231793825],"iteration":859,"passed_time":7.386483877,"remaining_time":1.202450864},
|
| 864 |
+
{"learn":[0.003231798428],"iteration":860,"passed_time":7.393549339,"remaining_time":1.193615979},
|
| 865 |
+
{"learn":[0.003231792201],"iteration":861,"passed_time":7.401643684,"remaining_time":1.184949917},
|
| 866 |
+
{"learn":[0.003231796532],"iteration":862,"passed_time":7.410314323,"remaining_time":1.176376665},
|
| 867 |
+
{"learn":[0.003231793013],"iteration":863,"passed_time":7.417695006,"remaining_time":1.16760014},
|
| 868 |
+
{"learn":[0.00323179572],"iteration":864,"passed_time":7.425080883,"remaining_time":1.158827652},
|
| 869 |
+
{"learn":[0.00323179193],"iteration":865,"passed_time":7.432495525,"remaining_time":1.150062818},
|
| 870 |
+
{"learn":[0.00323178462],"iteration":866,"passed_time":7.439932772,"remaining_time":1.141304566},
|
| 871 |
+
{"learn":[0.003231789222],"iteration":867,"passed_time":7.447105628,"remaining_time":1.132509151},
|
| 872 |
+
{"learn":[0.003231788952],"iteration":868,"passed_time":7.454501528,"remaining_time":1.123751093},
|
| 873 |
+
{"learn":[0.003231792201],"iteration":869,"passed_time":7.461919664,"remaining_time":1.11499949},
|
| 874 |
+
{"learn":[0.003231785703],"iteration":870,"passed_time":7.469204669,"remaining_time":1.106231231},
|
| 875 |
+
{"learn":[0.003231790576],"iteration":871,"passed_time":7.476420328,"remaining_time":1.097456195},
|
| 876 |
+
{"learn":[0.003231787327],"iteration":872,"passed_time":7.48403836,"remaining_time":1.088743267},
|
| 877 |
+
{"learn":[0.003231786786],"iteration":873,"passed_time":7.491176963,"remaining_time":1.079963727},
|
| 878 |
+
{"learn":[0.003231786786],"iteration":874,"passed_time":7.498299756,"remaining_time":1.071185679},
|
| 879 |
+
{"learn":[0.003231789764],"iteration":875,"passed_time":7.505784629,"remaining_time":1.062462664},
|
| 880 |
+
{"learn":[0.003231780559],"iteration":876,"passed_time":7.511781943,"remaining_time":1.053533842},
|
| 881 |
+
{"learn":[0.003231778393],"iteration":877,"passed_time":7.51905655,"remaining_time":1.044789179},
|
| 882 |
+
{"learn":[0.003231786515],"iteration":878,"passed_time":7.527001263,"remaining_time":1.036140106},
|
| 883 |
+
{"learn":[0.003231786786],"iteration":879,"passed_time":7.53504872,"remaining_time":1.027506644},
|
| 884 |
+
{"learn":[0.003231776768],"iteration":880,"passed_time":7.542512193,"remaining_time":1.018795631},
|
| 885 |
+
{"learn":[0.003231775144],"iteration":881,"passed_time":7.550672051,"remaining_time":1.010180614},
|
| 886 |
+
{"learn":[0.003231772707],"iteration":882,"passed_time":7.558998204,"remaining_time":1.001588663},
|
| 887 |
+
{"learn":[0.003231768917],"iteration":883,"passed_time":7.567083278,"remaining_time":0.992965679},
|
| 888 |
+
{"learn":[0.003231770812],"iteration":884,"passed_time":7.574800715,"remaining_time":0.9842961381},
|
| 889 |
+
{"learn":[0.003231773249],"iteration":885,"passed_time":7.582521639,"remaining_time":0.975629195},
|
| 890 |
+
{"learn":[0.003231771354],"iteration":886,"passed_time":7.5901045,"remaining_time":0.9669467965},
|
| 891 |
+
{"learn":[0.003231769188],"iteration":887,"passed_time":7.59761891,"remaining_time":0.9582582409},
|
| 892 |
+
{"learn":[0.003231760253],"iteration":888,"passed_time":7.605378367,"remaining_time":0.9496029232},
|
| 893 |
+
{"learn":[0.003231764585],"iteration":889,"passed_time":7.612679637,"remaining_time":0.9408929888},
|
| 894 |
+
{"learn":[0.003231759441],"iteration":890,"passed_time":7.620143596,"remaining_time":0.932206119},
|
| 895 |
+
{"learn":[0.003231762961],"iteration":891,"passed_time":7.627859272,"remaining_time":0.923552468},
|
| 896 |
+
{"learn":[0.003231758088],"iteration":892,"passed_time":7.635403439,"remaining_time":0.9148803673},
|
| 897 |
+
{"learn":[0.003231761878],"iteration":893,"passed_time":7.642795968,"remaining_time":0.9061928105},
|
| 898 |
+
{"learn":[0.003231761336],"iteration":894,"passed_time":7.65063214,"remaining_time":0.8975601951},
|
| 899 |
+
{"learn":[0.003231746717],"iteration":895,"passed_time":7.658081935,"remaining_time":0.8888845103},
|
| 900 |
+
{"learn":[0.003231749153],"iteration":896,"passed_time":7.665801473,"remaining_time":0.8802425326},
|
| 901 |
+
{"learn":[0.003231749153],"iteration":897,"passed_time":7.673332835,"remaining_time":0.8715812352},
|
| 902 |
+
{"learn":[0.003231739136],"iteration":898,"passed_time":7.680788265,"remaining_time":0.8629139208},
|
| 903 |
+
{"learn":[0.003231749965],"iteration":899,"passed_time":7.688074816,"remaining_time":0.8542305351},
|
| 904 |
+
{"learn":[0.003231737241],"iteration":900,"passed_time":7.695738735,"remaining_time":0.8455917145},
|
| 905 |
+
{"learn":[0.00323174076],"iteration":901,"passed_time":7.703193127,"remaining_time":0.836932291},
|
| 906 |
+
{"learn":[0.003231737511],"iteration":902,"passed_time":7.710348633,"remaining_time":0.8282434301},
|
| 907 |
+
{"learn":[0.003231738053],"iteration":903,"passed_time":7.717576762,"remaining_time":0.8195656739},
|
| 908 |
+
{"learn":[0.00323172614],"iteration":904,"passed_time":7.724891445,"remaining_time":0.8109002069},
|
| 909 |
+
{"learn":[0.003231729389],"iteration":905,"passed_time":7.73205223,"remaining_time":0.8022217546},
|
| 910 |
+
{"learn":[0.003231732638],"iteration":906,"passed_time":7.739509841,"remaining_time":0.793577084},
|
| 911 |
+
{"learn":[0.003231738324],"iteration":907,"passed_time":7.746939301,"remaining_time":0.7849321758},
|
| 912 |
+
{"learn":[0.003231724516],"iteration":908,"passed_time":7.7541695,"remaining_time":0.7762699939},
|
| 913 |
+
{"learn":[0.003231717477],"iteration":909,"passed_time":7.761824452,"remaining_time":0.7676529678},
|
| 914 |
+
{"learn":[0.003231724245],"iteration":910,"passed_time":7.769069726,"remaining_time":0.7589980303},
|
| 915 |
+
{"learn":[0.003231731014],"iteration":911,"passed_time":7.778123571,"remaining_time":0.7505206955},
|
| 916 |
+
{"learn":[0.003231719101],"iteration":912,"passed_time":7.785969774,"remaining_time":0.7419270211},
|
| 917 |
+
{"learn":[0.003231733721],"iteration":913,"passed_time":7.793161673,"remaining_time":0.7332734178},
|
| 918 |
+
{"learn":[0.00323171856],"iteration":914,"passed_time":7.800345053,"remaining_time":0.7246222181},
|
| 919 |
+
{"learn":[0.003231721809],"iteration":915,"passed_time":7.80778356,"remaining_time":0.715997619},
|
| 920 |
+
{"learn":[0.003231724516],"iteration":916,"passed_time":7.814956569,"remaining_time":0.707351576},
|
| 921 |
+
{"learn":[0.003231723975],"iteration":917,"passed_time":7.822899101,"remaining_time":0.6987774796},
|
| 922 |
+
{"learn":[0.003231719101],"iteration":918,"passed_time":7.831357678,"remaining_time":0.6902502415},
|
| 923 |
+
{"learn":[0.003231709896],"iteration":919,"passed_time":7.839130136,"remaining_time":0.68166349},
|
| 924 |
+
{"learn":[0.003231712333],"iteration":920,"passed_time":7.846706174,"remaining_time":0.6730616588},
|
| 925 |
+
{"learn":[0.003231696901],"iteration":921,"passed_time":7.854323374,"remaining_time":0.6644655349},
|
| 926 |
+
{"learn":[0.003231695006],"iteration":922,"passed_time":7.861797643,"remaining_time":0.6558596083},
|
| 927 |
+
{"learn":[0.003231696359],"iteration":923,"passed_time":7.868984083,"remaining_time":0.647232457},
|
| 928 |
+
{"learn":[0.003231695818],"iteration":924,"passed_time":7.876442621,"remaining_time":0.6386304828},
|
| 929 |
+
{"learn":[0.003231688237],"iteration":925,"passed_time":7.883895372,"remaining_time":0.6300305157},
|
| 930 |
+
{"learn":[0.003231678491],"iteration":926,"passed_time":7.891480582,"remaining_time":0.6214434547},
|
| 931 |
+
{"learn":[0.003231678491],"iteration":927,"passed_time":7.898985887,"remaining_time":0.6128523533},
|
| 932 |
+
{"learn":[0.003231673347],"iteration":928,"passed_time":7.9062473,"remaining_time":0.6042449498},
|
| 933 |
+
{"learn":[0.003231663329],"iteration":929,"passed_time":7.91349953,"remaining_time":0.5956397496},
|
| 934 |
+
{"learn":[0.003231663871],"iteration":930,"passed_time":7.921215148,"remaining_time":0.5870717994},
|
| 935 |
+
{"learn":[0.003231662246],"iteration":931,"passed_time":7.928905532,"remaining_time":0.5785038371},
|
| 936 |
+
{"learn":[0.003231655207],"iteration":932,"passed_time":7.936680073,"remaining_time":0.5699437995},
|
| 937 |
+
{"learn":[0.003231653041],"iteration":933,"passed_time":7.944336053,"remaining_time":0.5613770658},
|
| 938 |
+
{"learn":[0.003231639504],"iteration":934,"passed_time":7.951774924,"remaining_time":0.5527971872},
|
| 939 |
+
{"learn":[0.003231641129],"iteration":935,"passed_time":7.959044156,"remaining_time":0.5442081474},
|
| 940 |
+
{"learn":[0.003231637609],"iteration":936,"passed_time":7.966225013,"remaining_time":0.5356159827},
|
| 941 |
+
{"learn":[0.003231630299],"iteration":937,"passed_time":7.97374045,"remaining_time":0.5270489423},
|
| 942 |
+
{"learn":[0.003231624072],"iteration":938,"passed_time":7.980902386,"remaining_time":0.5184611774},
|
| 943 |
+
{"learn":[0.003231626509],"iteration":939,"passed_time":7.988578517,"remaining_time":0.509909267},
|
| 944 |
+
{"learn":[0.003231612431],"iteration":940,"passed_time":7.995832042,"remaining_time":0.501332721},
|
| 945 |
+
{"learn":[0.003231617033],"iteration":941,"passed_time":8.003752278,"remaining_time":0.4928000341},
|
| 946 |
+
{"learn":[0.003231613243],"iteration":942,"passed_time":8.011598597,"remaining_time":0.4842641782},
|
| 947 |
+
{"learn":[0.003231606745],"iteration":943,"passed_time":8.018865704,"remaining_time":0.4756954231},
|
| 948 |
+
{"learn":[0.003231608911],"iteration":944,"passed_time":8.02621914,"remaining_time":0.4671344473},
|
| 949 |
+
{"learn":[0.003231597269],"iteration":945,"passed_time":8.033823356,"remaining_time":0.4585903396},
|
| 950 |
+
{"learn":[0.003231602143],"iteration":946,"passed_time":8.0411343,"remaining_time":0.4500318035},
|
| 951 |
+
{"learn":[0.003231585086],"iteration":947,"passed_time":8.048640653,"remaining_time":0.4414866181},
|
| 952 |
+
{"learn":[0.003231582649],"iteration":948,"passed_time":8.056163787,"remaining_time":0.4329445238},
|
| 953 |
+
{"learn":[0.003231578047],"iteration":949,"passed_time":8.063402416,"remaining_time":0.4243896009},
|
| 954 |
+
{"learn":[0.003231570466],"iteration":950,"passed_time":8.070840217,"remaining_time":0.4158477083},
|
| 955 |
+
{"learn":[0.003231567488],"iteration":951,"passed_time":8.078494439,"remaining_time":0.4073190473},
|
| 956 |
+
{"learn":[0.003231559637],"iteration":952,"passed_time":8.086157459,"remaining_time":0.3987926554},
|
| 957 |
+
{"learn":[0.003231560449],"iteration":953,"passed_time":8.093007554,"remaining_time":0.3902288757},
|
| 958 |
+
{"learn":[0.003231550702],"iteration":954,"passed_time":8.100611896,"remaining_time":0.3817042255},
|
| 959 |
+
{"learn":[0.003231549349],"iteration":955,"passed_time":8.107689539,"remaining_time":0.3731572591},
|
| 960 |
+
{"learn":[0.003231546641],"iteration":956,"passed_time":8.114909382,"remaining_time":0.3646197528},
|
| 961 |
+
{"learn":[0.003231541497],"iteration":957,"passed_time":8.12247493,"remaining_time":0.3561001535},
|
| 962 |
+
{"learn":[0.003231530126],"iteration":958,"passed_time":8.13039008,"remaining_time":0.3475974904},
|
| 963 |
+
{"learn":[0.003231537707],"iteration":959,"passed_time":8.138018496,"remaining_time":0.339084104},
|
| 964 |
+
{"learn":[0.003231531751],"iteration":960,"passed_time":8.145466729,"remaining_time":0.3305652471},
|
| 965 |
+
{"learn":[0.00323152796],"iteration":961,"passed_time":8.15295322,"remaining_time":0.3220501272},
|
| 966 |
+
{"learn":[0.003231507926],"iteration":962,"passed_time":8.160589472,"remaining_time":0.3135428977},
|
| 967 |
+
{"learn":[0.003231515236],"iteration":963,"passed_time":8.168243214,"remaining_time":0.3050381283},
|
| 968 |
+
{"learn":[0.003231514423],"iteration":964,"passed_time":8.175406231,"remaining_time":0.2965173244},
|
| 969 |
+
{"learn":[0.003231504948],"iteration":965,"passed_time":8.183970715,"remaining_time":0.2880486587},
|
| 970 |
+
{"learn":[0.003231500887],"iteration":966,"passed_time":8.192381887,"remaining_time":0.2795745628},
|
| 971 |
+
{"learn":[0.003231496825],"iteration":967,"passed_time":8.199857264,"remaining_time":0.2710696616},
|
| 972 |
+
{"learn":[0.003231499533],"iteration":968,"passed_time":8.206878812,"remaining_time":0.2625523665},
|
| 973 |
+
{"learn":[0.003231500074],"iteration":969,"passed_time":8.213953228,"remaining_time":0.2540397906},
|
| 974 |
+
{"learn":[0.003231491681],"iteration":970,"passed_time":8.221284982,"remaining_time":0.2455378625},
|
| 975 |
+
{"learn":[0.003231493306],"iteration":971,"passed_time":8.228803605,"remaining_time":0.2370437252},
|
| 976 |
+
{"learn":[0.00323149114],"iteration":972,"passed_time":8.236193363,"remaining_time":0.2285480173},
|
| 977 |
+
{"learn":[0.00323147652],"iteration":973,"passed_time":8.243571216,"remaining_time":0.2200542624},
|
| 978 |
+
{"learn":[0.003231471647],"iteration":974,"passed_time":8.251215048,"remaining_time":0.2115696166},
|
| 979 |
+
{"learn":[0.003231467315],"iteration":975,"passed_time":8.258563967,"remaining_time":0.2030794418},
|
| 980 |
+
{"learn":[0.003231464608],"iteration":976,"passed_time":8.266154743,"remaining_time":0.1945972969},
|
| 981 |
+
{"learn":[0.003231469481],"iteration":977,"passed_time":8.273761878,"remaining_time":0.1861173429},
|
| 982 |
+
{"learn":[0.0032314619],"iteration":978,"passed_time":8.281385208,"remaining_time":0.1776395193},
|
| 983 |
+
{"learn":[0.003231454861],"iteration":979,"passed_time":8.288991158,"remaining_time":0.1691630849},
|
| 984 |
+
{"learn":[0.003231447551],"iteration":980,"passed_time":8.296996716,"remaining_time":0.1606961647},
|
| 985 |
+
{"learn":[0.003231448363],"iteration":981,"passed_time":8.304912742,"remaining_time":0.1522285431},
|
| 986 |
+
{"learn":[0.003230462336],"iteration":982,"passed_time":8.313305506,"remaining_time":0.1437702885},
|
| 987 |
+
{"learn":[0.003230466668],"iteration":983,"passed_time":8.320883536,"remaining_time":0.1352989193},
|
| 988 |
+
{"learn":[0.003230465314],"iteration":984,"passed_time":8.328939664,"remaining_time":0.1268366446},
|
| 989 |
+
{"learn":[0.003230457733],"iteration":985,"passed_time":8.336506104,"remaining_time":0.1183682408},
|
| 990 |
+
{"learn":[0.003230459087],"iteration":986,"passed_time":8.343560896,"remaining_time":0.1098949257},
|
| 991 |
+
{"learn":[0.003230455838],"iteration":987,"passed_time":8.350729057,"remaining_time":0.101425859},
|
| 992 |
+
{"learn":[0.003230454755],"iteration":988,"passed_time":8.358047608,"remaining_time":0.09296109574},
|
| 993 |
+
{"learn":[0.003230447175],"iteration":989,"passed_time":8.365074395,"remaining_time":0.08449570096},
|
| 994 |
+
{"learn":[0.003230443926],"iteration":990,"passed_time":8.372479792,"remaining_time":0.07603664796},
|
| 995 |
+
{"learn":[0.003230445279],"iteration":991,"passed_time":8.379729729,"remaining_time":0.06757846556},
|
| 996 |
+
{"learn":[0.003230441218],"iteration":992,"passed_time":8.38699211,"remaining_time":0.0591228044},
|
| 997 |
+
{"learn":[0.003230440948],"iteration":993,"passed_time":8.394119021,"remaining_time":0.05066872648},
|
| 998 |
+
{"learn":[0.003230437969],"iteration":994,"passed_time":8.401356162,"remaining_time":0.04221787016},
|
| 999 |
+
{"learn":[0.003230440135],"iteration":995,"passed_time":8.408450779,"remaining_time":0.03376887863},
|
| 1000 |
+
{"learn":[0.003230437969],"iteration":996,"passed_time":8.415493234,"remaining_time":0.02532244704},
|
| 1001 |
+
{"learn":[0.003230440135],"iteration":997,"passed_time":8.422638912,"remaining_time":0.0168790359},
|
| 1002 |
+
{"learn":[0.003230436887],"iteration":998,"passed_time":8.429892239,"remaining_time":0.00843833057},
|
| 1003 |
+
{"learn":[0.003230431743],"iteration":999,"passed_time":8.437191207,"remaining_time":0}
|
| 1004 |
+
]}
|
catboost_info/learn/events.out.tfevents
ADDED
|
Binary file (54.9 kB). View file
|
|
|
catboost_info/learn_error.tsv
ADDED
|
@@ -0,0 +1,1001 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
iter Logloss
|
| 2 |
+
0 0.6345975935
|
| 3 |
+
1 0.5821738159
|
| 4 |
+
2 0.536109203
|
| 5 |
+
3 0.4922448434
|
| 6 |
+
4 0.4506230198
|
| 7 |
+
5 0.4165109727
|
| 8 |
+
6 0.3855723247
|
| 9 |
+
7 0.3589960512
|
| 10 |
+
8 0.329146501
|
| 11 |
+
9 0.3036517077
|
| 12 |
+
10 0.2793441382
|
| 13 |
+
11 0.257702187
|
| 14 |
+
12 0.2407448692
|
| 15 |
+
13 0.225226436
|
| 16 |
+
14 0.2095610351
|
| 17 |
+
15 0.1951945625
|
| 18 |
+
16 0.1846442912
|
| 19 |
+
17 0.1732121946
|
| 20 |
+
18 0.1632697999
|
| 21 |
+
19 0.1537118642
|
| 22 |
+
20 0.1460680989
|
| 23 |
+
21 0.1393085269
|
| 24 |
+
22 0.1315902081
|
| 25 |
+
23 0.1265649653
|
| 26 |
+
24 0.1211223295
|
| 27 |
+
25 0.1175602502
|
| 28 |
+
26 0.1119215835
|
| 29 |
+
27 0.1077986165
|
| 30 |
+
28 0.1029698384
|
| 31 |
+
29 0.0986395114
|
| 32 |
+
30 0.09615119676
|
| 33 |
+
31 0.09377886222
|
| 34 |
+
32 0.09163242394
|
| 35 |
+
33 0.08899570965
|
| 36 |
+
34 0.08655943737
|
| 37 |
+
35 0.08411792667
|
| 38 |
+
36 0.08245623879
|
| 39 |
+
37 0.08067619472
|
| 40 |
+
38 0.07827483014
|
| 41 |
+
39 0.07692957542
|
| 42 |
+
40 0.07555168248
|
| 43 |
+
41 0.07391568055
|
| 44 |
+
42 0.07177635588
|
| 45 |
+
43 0.07028396644
|
| 46 |
+
44 0.06840369806
|
| 47 |
+
45 0.06745874221
|
| 48 |
+
46 0.06587893724
|
| 49 |
+
47 0.0646966767
|
| 50 |
+
48 0.06327847649
|
| 51 |
+
49 0.06244927114
|
| 52 |
+
50 0.06149193877
|
| 53 |
+
51 0.06045234473
|
| 54 |
+
52 0.05980879338
|
| 55 |
+
53 0.05862077701
|
| 56 |
+
54 0.05796779561
|
| 57 |
+
55 0.05723820058
|
| 58 |
+
56 0.05668393745
|
| 59 |
+
57 0.05583438662
|
| 60 |
+
58 0.05545563798
|
| 61 |
+
59 0.05452632489
|
| 62 |
+
60 0.05396694639
|
| 63 |
+
61 0.0532831949
|
| 64 |
+
62 0.05305155948
|
| 65 |
+
63 0.05212511105
|
| 66 |
+
64 0.05163175536
|
| 67 |
+
65 0.05108733309
|
| 68 |
+
66 0.05074313014
|
| 69 |
+
67 0.04995906245
|
| 70 |
+
68 0.0491270113
|
| 71 |
+
69 0.04865425364
|
| 72 |
+
70 0.04784921475
|
| 73 |
+
71 0.04723317029
|
| 74 |
+
72 0.04680454552
|
| 75 |
+
73 0.0461453941
|
| 76 |
+
74 0.04523684268
|
| 77 |
+
75 0.04456790962
|
| 78 |
+
76 0.04429176872
|
| 79 |
+
77 0.04371691534
|
| 80 |
+
78 0.04331697618
|
| 81 |
+
79 0.04278334029
|
| 82 |
+
80 0.04215936635
|
| 83 |
+
81 0.04188383767
|
| 84 |
+
82 0.04157795788
|
| 85 |
+
83 0.04135640998
|
| 86 |
+
84 0.04090694022
|
| 87 |
+
85 0.04072319587
|
| 88 |
+
86 0.04048617238
|
| 89 |
+
87 0.04004556143
|
| 90 |
+
88 0.03989673999
|
| 91 |
+
89 0.03953182104
|
| 92 |
+
90 0.0394369881
|
| 93 |
+
91 0.03915172486
|
| 94 |
+
92 0.03901884894
|
| 95 |
+
93 0.03869289892
|
| 96 |
+
94 0.03815146211
|
| 97 |
+
95 0.03783431435
|
| 98 |
+
96 0.0375035486
|
| 99 |
+
97 0.03729480873
|
| 100 |
+
98 0.03685405537
|
| 101 |
+
99 0.03640809608
|
| 102 |
+
100 0.03575616916
|
| 103 |
+
101 0.03569260107
|
| 104 |
+
102 0.03523461797
|
| 105 |
+
103 0.03514318762
|
| 106 |
+
104 0.03468352676
|
| 107 |
+
105 0.03434562089
|
| 108 |
+
106 0.03403483312
|
| 109 |
+
107 0.03364170803
|
| 110 |
+
108 0.03354889364
|
| 111 |
+
109 0.03340411922
|
| 112 |
+
110 0.03310352241
|
| 113 |
+
111 0.03281770859
|
| 114 |
+
112 0.03218629616
|
| 115 |
+
113 0.03186893819
|
| 116 |
+
114 0.03157080811
|
| 117 |
+
115 0.03135488728
|
| 118 |
+
116 0.03126355296
|
| 119 |
+
117 0.03111994846
|
| 120 |
+
118 0.03081413272
|
| 121 |
+
119 0.03041541944
|
| 122 |
+
120 0.03007503256
|
| 123 |
+
121 0.02976127995
|
| 124 |
+
122 0.0296251658
|
| 125 |
+
123 0.02939174213
|
| 126 |
+
124 0.0293002632
|
| 127 |
+
125 0.02921549833
|
| 128 |
+
126 0.02886017345
|
| 129 |
+
127 0.02860820514
|
| 130 |
+
128 0.02834298725
|
| 131 |
+
129 0.0278810478
|
| 132 |
+
130 0.02769682564
|
| 133 |
+
131 0.02747245776
|
| 134 |
+
132 0.02734022169
|
| 135 |
+
133 0.02710595116
|
| 136 |
+
134 0.02685099955
|
| 137 |
+
135 0.02654279781
|
| 138 |
+
136 0.02633697314
|
| 139 |
+
137 0.02607809897
|
| 140 |
+
138 0.02586316258
|
| 141 |
+
139 0.0256751753
|
| 142 |
+
140 0.025665838
|
| 143 |
+
141 0.02542802598
|
| 144 |
+
142 0.02528695237
|
| 145 |
+
143 0.0251071283
|
| 146 |
+
144 0.02501060697
|
| 147 |
+
145 0.02490475149
|
| 148 |
+
146 0.02474863095
|
| 149 |
+
147 0.02465164397
|
| 150 |
+
148 0.02444501689
|
| 151 |
+
149 0.02432844792
|
| 152 |
+
150 0.02426986159
|
| 153 |
+
151 0.02419508709
|
| 154 |
+
152 0.02401890055
|
| 155 |
+
153 0.02375445555
|
| 156 |
+
154 0.02341849817
|
| 157 |
+
155 0.02341033255
|
| 158 |
+
156 0.02325700825
|
| 159 |
+
157 0.02317235421
|
| 160 |
+
158 0.02305525018
|
| 161 |
+
159 0.02284465716
|
| 162 |
+
160 0.02272297479
|
| 163 |
+
161 0.02244912653
|
| 164 |
+
162 0.02240931173
|
| 165 |
+
163 0.02216938805
|
| 166 |
+
164 0.02196063647
|
| 167 |
+
165 0.02185233913
|
| 168 |
+
166 0.02180497711
|
| 169 |
+
167 0.02168866178
|
| 170 |
+
168 0.02156183083
|
| 171 |
+
169 0.021410091
|
| 172 |
+
170 0.02134540498
|
| 173 |
+
171 0.02128399529
|
| 174 |
+
172 0.02120614953
|
| 175 |
+
173 0.02107470954
|
| 176 |
+
174 0.02094395728
|
| 177 |
+
175 0.0209405784
|
| 178 |
+
176 0.02088292865
|
| 179 |
+
177 0.02073307079
|
| 180 |
+
178 0.02064925028
|
| 181 |
+
179 0.02043668523
|
| 182 |
+
180 0.0202902692
|
| 183 |
+
181 0.02020970094
|
| 184 |
+
182 0.02013228071
|
| 185 |
+
183 0.02008803333
|
| 186 |
+
184 0.01996854812
|
| 187 |
+
185 0.01993370199
|
| 188 |
+
186 0.01985323879
|
| 189 |
+
187 0.01982112827
|
| 190 |
+
188 0.01975542876
|
| 191 |
+
189 0.01964357296
|
| 192 |
+
190 0.01957327402
|
| 193 |
+
191 0.01948483903
|
| 194 |
+
192 0.01933879333
|
| 195 |
+
193 0.01923854039
|
| 196 |
+
194 0.019190388
|
| 197 |
+
195 0.0191418315
|
| 198 |
+
196 0.01910594571
|
| 199 |
+
197 0.01907350926
|
| 200 |
+
198 0.01902672477
|
| 201 |
+
199 0.01895616503
|
| 202 |
+
200 0.01882914777
|
| 203 |
+
201 0.01871791658
|
| 204 |
+
202 0.01861451426
|
| 205 |
+
203 0.01854089645
|
| 206 |
+
204 0.01845122694
|
| 207 |
+
205 0.01835347959
|
| 208 |
+
206 0.01826695602
|
| 209 |
+
207 0.01819670819
|
| 210 |
+
208 0.01814156866
|
| 211 |
+
209 0.018070904
|
| 212 |
+
210 0.01795127031
|
| 213 |
+
211 0.01784787433
|
| 214 |
+
212 0.01777268738
|
| 215 |
+
213 0.01767715377
|
| 216 |
+
214 0.01764840114
|
| 217 |
+
215 0.01754269125
|
| 218 |
+
216 0.01745075062
|
| 219 |
+
217 0.01735769817
|
| 220 |
+
218 0.01733215984
|
| 221 |
+
219 0.01722590523
|
| 222 |
+
220 0.01716068204
|
| 223 |
+
221 0.01708117818
|
| 224 |
+
222 0.01696783973
|
| 225 |
+
223 0.01695045758
|
| 226 |
+
224 0.0167943347
|
| 227 |
+
225 0.01671577198
|
| 228 |
+
226 0.01665562814
|
| 229 |
+
227 0.01652320493
|
| 230 |
+
228 0.01648382118
|
| 231 |
+
229 0.01635097002
|
| 232 |
+
230 0.0161417807
|
| 233 |
+
231 0.01605773708
|
| 234 |
+
232 0.01596407594
|
| 235 |
+
233 0.01594520768
|
| 236 |
+
234 0.01591689707
|
| 237 |
+
235 0.01588654619
|
| 238 |
+
236 0.01581277018
|
| 239 |
+
237 0.01572653481
|
| 240 |
+
238 0.0156983478
|
| 241 |
+
239 0.01560128375
|
| 242 |
+
240 0.0155544796
|
| 243 |
+
241 0.0155163642
|
| 244 |
+
242 0.01543025639
|
| 245 |
+
243 0.0153856539
|
| 246 |
+
244 0.01534378503
|
| 247 |
+
245 0.01528890953
|
| 248 |
+
246 0.01525849806
|
| 249 |
+
247 0.01523708916
|
| 250 |
+
248 0.01509147349
|
| 251 |
+
249 0.01496231303
|
| 252 |
+
250 0.0149081613
|
| 253 |
+
251 0.01484867909
|
| 254 |
+
252 0.01479387844
|
| 255 |
+
253 0.01472755735
|
| 256 |
+
254 0.01465281837
|
| 257 |
+
255 0.01459762906
|
| 258 |
+
256 0.01453720265
|
| 259 |
+
257 0.01442247791
|
| 260 |
+
258 0.01435413075
|
| 261 |
+
259 0.01427821476
|
| 262 |
+
260 0.01424404426
|
| 263 |
+
261 0.01421051387
|
| 264 |
+
262 0.0141825945
|
| 265 |
+
263 0.01413925248
|
| 266 |
+
264 0.01405514175
|
| 267 |
+
265 0.01394847452
|
| 268 |
+
266 0.01393462023
|
| 269 |
+
267 0.01386261472
|
| 270 |
+
268 0.01383535245
|
| 271 |
+
269 0.01374885708
|
| 272 |
+
270 0.01366862251
|
| 273 |
+
271 0.01354363456
|
| 274 |
+
272 0.01347180334
|
| 275 |
+
273 0.01344631734
|
| 276 |
+
274 0.01335943218
|
| 277 |
+
275 0.01334372505
|
| 278 |
+
276 0.01332632611
|
| 279 |
+
277 0.01329704442
|
| 280 |
+
278 0.01328218517
|
| 281 |
+
279 0.0132616189
|
| 282 |
+
280 0.01323389187
|
| 283 |
+
281 0.01321662738
|
| 284 |
+
282 0.013088863
|
| 285 |
+
283 0.01304560876
|
| 286 |
+
284 0.01302215867
|
| 287 |
+
285 0.01299059063
|
| 288 |
+
286 0.01294543058
|
| 289 |
+
287 0.0128675241
|
| 290 |
+
288 0.01279021538
|
| 291 |
+
289 0.01273897007
|
| 292 |
+
290 0.01266642635
|
| 293 |
+
291 0.01253702444
|
| 294 |
+
292 0.01248791376
|
| 295 |
+
293 0.01246083669
|
| 296 |
+
294 0.01245533106
|
| 297 |
+
295 0.01241379657
|
| 298 |
+
296 0.01228910535
|
| 299 |
+
297 0.01224158652
|
| 300 |
+
298 0.01222041169
|
| 301 |
+
299 0.01215861084
|
| 302 |
+
300 0.01208227025
|
| 303 |
+
301 0.01206734939
|
| 304 |
+
302 0.01198636086
|
| 305 |
+
303 0.01196627431
|
| 306 |
+
304 0.01193734653
|
| 307 |
+
305 0.01189021108
|
| 308 |
+
306 0.01188992952
|
| 309 |
+
307 0.01181813844
|
| 310 |
+
308 0.01178334869
|
| 311 |
+
309 0.01171860976
|
| 312 |
+
310 0.01170155551
|
| 313 |
+
311 0.01167457252
|
| 314 |
+
312 0.01165058964
|
| 315 |
+
313 0.01162250535
|
| 316 |
+
314 0.01159511846
|
| 317 |
+
315 0.01153381926
|
| 318 |
+
316 0.0115255573
|
| 319 |
+
317 0.01150211479
|
| 320 |
+
318 0.01143771166
|
| 321 |
+
319 0.0114218584
|
| 322 |
+
320 0.0113450207
|
| 323 |
+
321 0.01131809955
|
| 324 |
+
322 0.01130333043
|
| 325 |
+
323 0.01128106374
|
| 326 |
+
324 0.01125953996
|
| 327 |
+
325 0.01119687629
|
| 328 |
+
326 0.01115300154
|
| 329 |
+
327 0.01114158927
|
| 330 |
+
328 0.01113205822
|
| 331 |
+
329 0.01104892889
|
| 332 |
+
330 0.0110357006
|
| 333 |
+
331 0.01100847604
|
| 334 |
+
332 0.01097898527
|
| 335 |
+
333 0.01093170466
|
| 336 |
+
334 0.01090073537
|
| 337 |
+
335 0.01087907954
|
| 338 |
+
336 0.01085420435
|
| 339 |
+
337 0.01081370827
|
| 340 |
+
338 0.01080466045
|
| 341 |
+
339 0.01076322013
|
| 342 |
+
340 0.01074414382
|
| 343 |
+
341 0.01070159479
|
| 344 |
+
342 0.01063228356
|
| 345 |
+
343 0.01058941825
|
| 346 |
+
344 0.01054232276
|
| 347 |
+
345 0.01049204129
|
| 348 |
+
346 0.01044611222
|
| 349 |
+
347 0.01041678514
|
| 350 |
+
348 0.01038512585
|
| 351 |
+
349 0.01033542487
|
| 352 |
+
350 0.01029840434
|
| 353 |
+
351 0.01026454348
|
| 354 |
+
352 0.01024568955
|
| 355 |
+
353 0.01021468551
|
| 356 |
+
354 0.01020861444
|
| 357 |
+
355 0.01018232817
|
| 358 |
+
356 0.01011292396
|
| 359 |
+
357 0.01007270865
|
| 360 |
+
358 0.0100286336
|
| 361 |
+
359 0.009984063543
|
| 362 |
+
360 0.00991697125
|
| 363 |
+
361 0.009898808149
|
| 364 |
+
362 0.009884166713
|
| 365 |
+
363 0.0098288797
|
| 366 |
+
364 0.009824673469
|
| 367 |
+
365 0.009778547476
|
| 368 |
+
366 0.0097341422
|
| 369 |
+
367 0.009708824885
|
| 370 |
+
368 0.009608369158
|
| 371 |
+
369 0.009567940297
|
| 372 |
+
370 0.009538137709
|
| 373 |
+
371 0.00951365415
|
| 374 |
+
372 0.009478970481
|
| 375 |
+
373 0.009425252688
|
| 376 |
+
374 0.009381720318
|
| 377 |
+
375 0.009318977253
|
| 378 |
+
376 0.009295126396
|
| 379 |
+
377 0.009261297277
|
| 380 |
+
378 0.009224172634
|
| 381 |
+
379 0.009196197709
|
| 382 |
+
380 0.00915397227
|
| 383 |
+
381 0.009143010656
|
| 384 |
+
382 0.009111064712
|
| 385 |
+
383 0.0090753869
|
| 386 |
+
384 0.009059020836
|
| 387 |
+
385 0.009044292152
|
| 388 |
+
386 0.009028571955
|
| 389 |
+
387 0.009003510292
|
| 390 |
+
388 0.008948575332
|
| 391 |
+
389 0.008927785017
|
| 392 |
+
390 0.008898643636
|
| 393 |
+
391 0.008857753005
|
| 394 |
+
392 0.008836855572
|
| 395 |
+
393 0.008819993356
|
| 396 |
+
394 0.008781864961
|
| 397 |
+
395 0.008773163869
|
| 398 |
+
396 0.008764997895
|
| 399 |
+
397 0.008745352937
|
| 400 |
+
398 0.008700106937
|
| 401 |
+
399 0.008669267161
|
| 402 |
+
400 0.008665167176
|
| 403 |
+
401 0.00865106658
|
| 404 |
+
402 0.008628502455
|
| 405 |
+
403 0.008599820527
|
| 406 |
+
404 0.008592827908
|
| 407 |
+
405 0.008580059017
|
| 408 |
+
406 0.008575735356
|
| 409 |
+
407 0.008555173847
|
| 410 |
+
408 0.008536121408
|
| 411 |
+
409 0.008517698752
|
| 412 |
+
410 0.008493650377
|
| 413 |
+
411 0.008491332272
|
| 414 |
+
412 0.008471728139
|
| 415 |
+
413 0.008455698854
|
| 416 |
+
414 0.008433111025
|
| 417 |
+
415 0.008402435488
|
| 418 |
+
416 0.008383857994
|
| 419 |
+
417 0.008347193301
|
| 420 |
+
418 0.008335345338
|
| 421 |
+
419 0.008332796612
|
| 422 |
+
420 0.008285398481
|
| 423 |
+
421 0.008256300922
|
| 424 |
+
422 0.008222580284
|
| 425 |
+
423 0.00818066542
|
| 426 |
+
424 0.008162043995
|
| 427 |
+
425 0.008093292714
|
| 428 |
+
426 0.008087739268
|
| 429 |
+
427 0.008061960811
|
| 430 |
+
428 0.008045936282
|
| 431 |
+
429 0.008044581486
|
| 432 |
+
430 0.008040640648
|
| 433 |
+
431 0.008010030454
|
| 434 |
+
432 0.007996326952
|
| 435 |
+
433 0.007942410484
|
| 436 |
+
434 0.007910942028
|
| 437 |
+
435 0.007873121024
|
| 438 |
+
436 0.007846428431
|
| 439 |
+
437 0.007846387279
|
| 440 |
+
438 0.007840208992
|
| 441 |
+
439 0.007802075254
|
| 442 |
+
440 0.007799556873
|
| 443 |
+
441 0.007764761094
|
| 444 |
+
442 0.00773336828
|
| 445 |
+
443 0.007716015062
|
| 446 |
+
444 0.007677880567
|
| 447 |
+
445 0.00765945737
|
| 448 |
+
446 0.007600449499
|
| 449 |
+
447 0.007543751261
|
| 450 |
+
448 0.00750578375
|
| 451 |
+
449 0.007490615602
|
| 452 |
+
450 0.007457151195
|
| 453 |
+
451 0.007456453256
|
| 454 |
+
452 0.007442275136
|
| 455 |
+
453 0.007428831374
|
| 456 |
+
454 0.007390666556
|
| 457 |
+
455 0.007379457959
|
| 458 |
+
456 0.007355067749
|
| 459 |
+
457 0.007353381052
|
| 460 |
+
458 0.007326337583
|
| 461 |
+
459 0.007324318419
|
| 462 |
+
460 0.007320714123
|
| 463 |
+
461 0.007298096541
|
| 464 |
+
462 0.007295718379
|
| 465 |
+
463 0.007281984834
|
| 466 |
+
464 0.0072357476
|
| 467 |
+
465 0.007207076623
|
| 468 |
+
466 0.007206992694
|
| 469 |
+
467 0.007167258679
|
| 470 |
+
468 0.007167251075
|
| 471 |
+
469 0.007127704952
|
| 472 |
+
470 0.007117703258
|
| 473 |
+
471 0.007085232553
|
| 474 |
+
472 0.007063322255
|
| 475 |
+
473 0.007034470472
|
| 476 |
+
474 0.007032188716
|
| 477 |
+
475 0.007024564736
|
| 478 |
+
476 0.007013930081
|
| 479 |
+
477 0.007009407133
|
| 480 |
+
478 0.006998254308
|
| 481 |
+
479 0.006973868897
|
| 482 |
+
480 0.006973866731
|
| 483 |
+
481 0.006973845072
|
| 484 |
+
482 0.006943321632
|
| 485 |
+
483 0.006940065737
|
| 486 |
+
484 0.006937864638
|
| 487 |
+
485 0.006935555785
|
| 488 |
+
486 0.00692491522
|
| 489 |
+
487 0.006911291123
|
| 490 |
+
488 0.006874462173
|
| 491 |
+
489 0.006874460549
|
| 492 |
+
490 0.006866285818
|
| 493 |
+
491 0.006840612118
|
| 494 |
+
492 0.00684042804
|
| 495 |
+
493 0.006816577915
|
| 496 |
+
494 0.006805090388
|
| 497 |
+
495 0.0067836729
|
| 498 |
+
496 0.006783647992
|
| 499 |
+
497 0.006783647992
|
| 500 |
+
498 0.006755373729
|
| 501 |
+
499 0.006731995486
|
| 502 |
+
500 0.006688334077
|
| 503 |
+
501 0.006639195663
|
| 504 |
+
502 0.006591016179
|
| 505 |
+
503 0.006585869945
|
| 506 |
+
504 0.006559010087
|
| 507 |
+
505 0.006544999895
|
| 508 |
+
506 0.006525506509
|
| 509 |
+
507 0.006512860293
|
| 510 |
+
508 0.006483240412
|
| 511 |
+
509 0.006455247147
|
| 512 |
+
510 0.006437132665
|
| 513 |
+
511 0.006418526967
|
| 514 |
+
512 0.006396143981
|
| 515 |
+
513 0.006383333675
|
| 516 |
+
514 0.006359021363
|
| 517 |
+
515 0.006339552058
|
| 518 |
+
516 0.00633029094
|
| 519 |
+
517 0.006309557125
|
| 520 |
+
518 0.00629316232
|
| 521 |
+
519 0.006269392072
|
| 522 |
+
520 0.006257486148
|
| 523 |
+
521 0.00623798539
|
| 524 |
+
522 0.006198282146
|
| 525 |
+
523 0.006186719709
|
| 526 |
+
524 0.006166191819
|
| 527 |
+
525 0.006138351745
|
| 528 |
+
526 0.006138353369
|
| 529 |
+
527 0.006110262098
|
| 530 |
+
528 0.00609903615
|
| 531 |
+
529 0.00608738731
|
| 532 |
+
530 0.006045059112
|
| 533 |
+
531 0.006033755239
|
| 534 |
+
532 0.006023305559
|
| 535 |
+
533 0.006021433135
|
| 536 |
+
534 0.006009082613
|
| 537 |
+
535 0.005974244599
|
| 538 |
+
536 0.005950949179
|
| 539 |
+
537 0.005931164193
|
| 540 |
+
538 0.005920806843
|
| 541 |
+
539 0.005920806843
|
| 542 |
+
540 0.005905041092
|
| 543 |
+
541 0.005888681483
|
| 544 |
+
542 0.005873286218
|
| 545 |
+
543 0.005864314953
|
| 546 |
+
544 0.005822944883
|
| 547 |
+
545 0.00579759051
|
| 548 |
+
546 0.005769865632
|
| 549 |
+
547 0.005769851553
|
| 550 |
+
548 0.005769838558
|
| 551 |
+
549 0.005754848765
|
| 552 |
+
550 0.005754842267
|
| 553 |
+
551 0.005746696729
|
| 554 |
+
552 0.005744734962
|
| 555 |
+
553 0.005741601441
|
| 556 |
+
554 0.005719971745
|
| 557 |
+
555 0.005710648026
|
| 558 |
+
556 0.005692326108
|
| 559 |
+
557 0.005684342495
|
| 560 |
+
558 0.005659451677
|
| 561 |
+
559 0.005634417601
|
| 562 |
+
560 0.005606237033
|
| 563 |
+
561 0.005597040584
|
| 564 |
+
562 0.00557240228
|
| 565 |
+
563 0.005563605464
|
| 566 |
+
564 0.005563605464
|
| 567 |
+
565 0.005534976506
|
| 568 |
+
566 0.005507760801
|
| 569 |
+
567 0.005485963079
|
| 570 |
+
568 0.005466836085
|
| 571 |
+
569 0.005443505963
|
| 572 |
+
570 0.005435063272
|
| 573 |
+
571 0.005426874486
|
| 574 |
+
572 0.00541900934
|
| 575 |
+
573 0.005396359877
|
| 576 |
+
574 0.005376238611
|
| 577 |
+
575 0.00536865576
|
| 578 |
+
576 0.005348265829
|
| 579 |
+
577 0.005340543254
|
| 580 |
+
578 0.005319017376
|
| 581 |
+
579 0.005288491699
|
| 582 |
+
580 0.005274006114
|
| 583 |
+
581 0.005254930771
|
| 584 |
+
582 0.005250145702
|
| 585 |
+
583 0.005228178522
|
| 586 |
+
584 0.005209792145
|
| 587 |
+
585 0.005183617277
|
| 588 |
+
586 0.005154915268
|
| 589 |
+
587 0.005135169787
|
| 590 |
+
588 0.005116151013
|
| 591 |
+
589 0.005096986535
|
| 592 |
+
590 0.005081596709
|
| 593 |
+
591 0.005052898564
|
| 594 |
+
592 0.005034060572
|
| 595 |
+
593 0.005015975774
|
| 596 |
+
594 0.004989973022
|
| 597 |
+
595 0.004975079729
|
| 598 |
+
596 0.004957126014
|
| 599 |
+
597 0.004933304966
|
| 600 |
+
598 0.004908607193
|
| 601 |
+
599 0.004891804045
|
| 602 |
+
600 0.004862332434
|
| 603 |
+
601 0.004848515548
|
| 604 |
+
602 0.004818839988
|
| 605 |
+
603 0.004801272323
|
| 606 |
+
604 0.004798084916
|
| 607 |
+
605 0.00477162949
|
| 608 |
+
606 0.004755774489
|
| 609 |
+
607 0.004732855983
|
| 610 |
+
608 0.004719117494
|
| 611 |
+
609 0.004712359287
|
| 612 |
+
610 0.004685021941
|
| 613 |
+
611 0.00466990064
|
| 614 |
+
612 0.0046464856
|
| 615 |
+
613 0.004639946715
|
| 616 |
+
614 0.004627783519
|
| 617 |
+
615 0.004615681558
|
| 618 |
+
616 0.004604460484
|
| 619 |
+
617 0.004593699688
|
| 620 |
+
618 0.0045883751
|
| 621 |
+
619 0.004571890998
|
| 622 |
+
620 0.004559601121
|
| 623 |
+
621 0.004535919875
|
| 624 |
+
622 0.004529983405
|
| 625 |
+
623 0.004524350903
|
| 626 |
+
624 0.004508322265
|
| 627 |
+
625 0.004492304327
|
| 628 |
+
626 0.004486782062
|
| 629 |
+
627 0.004465177965
|
| 630 |
+
628 0.00444066963
|
| 631 |
+
629 0.004426744967
|
| 632 |
+
630 0.004403929859
|
| 633 |
+
631 0.004367450679
|
| 634 |
+
632 0.00435923509
|
| 635 |
+
633 0.004353080923
|
| 636 |
+
634 0.00434186019
|
| 637 |
+
635 0.004341854505
|
| 638 |
+
636 0.004341862085
|
| 639 |
+
637 0.004335830268
|
| 640 |
+
638 0.004325640458
|
| 641 |
+
639 0.004318267205
|
| 642 |
+
640 0.00431314103
|
| 643 |
+
641 0.004298439961
|
| 644 |
+
642 0.004292708981
|
| 645 |
+
643 0.004283102967
|
| 646 |
+
644 0.004282367101
|
| 647 |
+
645 0.004255209016
|
| 648 |
+
646 0.004249618278
|
| 649 |
+
647 0.004231335076
|
| 650 |
+
648 0.004211062267
|
| 651 |
+
649 0.004211053333
|
| 652 |
+
650 0.004211046565
|
| 653 |
+
651 0.004197778511
|
| 654 |
+
652 0.004187183136
|
| 655 |
+
653 0.004168454299
|
| 656 |
+
654 0.004158197346
|
| 657 |
+
655 0.004146825318
|
| 658 |
+
656 0.004129546329
|
| 659 |
+
657 0.004117483825
|
| 660 |
+
658 0.004113762262
|
| 661 |
+
659 0.004108743322
|
| 662 |
+
660 0.00409816611
|
| 663 |
+
661 0.00407443313
|
| 664 |
+
662 0.00405420271
|
| 665 |
+
663 0.004044180828
|
| 666 |
+
664 0.0040299847
|
| 667 |
+
665 0.004019793831
|
| 668 |
+
666 0.004007991011
|
| 669 |
+
667 0.003994558844
|
| 670 |
+
668 0.003977400557
|
| 671 |
+
669 0.003967748209
|
| 672 |
+
670 0.00394917527
|
| 673 |
+
671 0.0039457068
|
| 674 |
+
672 0.00393461548
|
| 675 |
+
673 0.003924946817
|
| 676 |
+
674 0.003920642649
|
| 677 |
+
675 0.003908077902
|
| 678 |
+
676 0.003898624005
|
| 679 |
+
677 0.003876262089
|
| 680 |
+
678 0.003864436257
|
| 681 |
+
679 0.003861716424
|
| 682 |
+
680 0.003850099322
|
| 683 |
+
681 0.003842953465
|
| 684 |
+
682 0.003829765279
|
| 685 |
+
683 0.003808679893
|
| 686 |
+
684 0.003797601768
|
| 687 |
+
685 0.003782490508
|
| 688 |
+
686 0.003771832616
|
| 689 |
+
687 0.00376294203
|
| 690 |
+
688 0.003742079825
|
| 691 |
+
689 0.003731586023
|
| 692 |
+
690 0.003721365117
|
| 693 |
+
691 0.003707134025
|
| 694 |
+
692 0.003704911244
|
| 695 |
+
693 0.00369646767
|
| 696 |
+
694 0.003674965664
|
| 697 |
+
695 0.003666434913
|
| 698 |
+
696 0.003662391442
|
| 699 |
+
697 0.003642016519
|
| 700 |
+
698 0.003631173194
|
| 701 |
+
699 0.003623114339
|
| 702 |
+
700 0.003618134927
|
| 703 |
+
701 0.003611635322
|
| 704 |
+
702 0.003601396014
|
| 705 |
+
703 0.003591910982
|
| 706 |
+
704 0.003587096697
|
| 707 |
+
705 0.003583766914
|
| 708 |
+
706 0.003579130233
|
| 709 |
+
707 0.003574625943
|
| 710 |
+
708 0.003565632842
|
| 711 |
+
709 0.003557394302
|
| 712 |
+
710 0.003547782046
|
| 713 |
+
711 0.003534162846
|
| 714 |
+
712 0.003526256169
|
| 715 |
+
713 0.003509989693
|
| 716 |
+
714 0.003501361499
|
| 717 |
+
715 0.003497592828
|
| 718 |
+
716 0.003497068903
|
| 719 |
+
717 0.003494003655
|
| 720 |
+
718 0.003483512537
|
| 721 |
+
719 0.003475120992
|
| 722 |
+
720 0.003457106338
|
| 723 |
+
721 0.003446459275
|
| 724 |
+
722 0.0034265863
|
| 725 |
+
723 0.003409362695
|
| 726 |
+
724 0.003402224373
|
| 727 |
+
725 0.003394745252
|
| 728 |
+
726 0.003380096907
|
| 729 |
+
727 0.00337582385
|
| 730 |
+
728 0.003373330354
|
| 731 |
+
729 0.003373329813
|
| 732 |
+
730 0.00336313653
|
| 733 |
+
731 0.00334907704
|
| 734 |
+
732 0.003346926274
|
| 735 |
+
733 0.003342835942
|
| 736 |
+
734 0.003342828903
|
| 737 |
+
735 0.003342829444
|
| 738 |
+
736 0.003340960788
|
| 739 |
+
737 0.003339254892
|
| 740 |
+
738 0.003339254892
|
| 741 |
+
739 0.003339254892
|
| 742 |
+
740 0.003332448223
|
| 743 |
+
741 0.003332451472
|
| 744 |
+
742 0.003332430354
|
| 745 |
+
743 0.003332430896
|
| 746 |
+
744 0.003332439018
|
| 747 |
+
745 0.003332435769
|
| 748 |
+
746 0.003320449048
|
| 749 |
+
747 0.003320449048
|
| 750 |
+
748 0.003320449048
|
| 751 |
+
749 0.003320449048
|
| 752 |
+
750 0.003318643829
|
| 753 |
+
751 0.003308927834
|
| 754 |
+
752 0.003308927834
|
| 755 |
+
753 0.003308927834
|
| 756 |
+
754 0.003302428205
|
| 757 |
+
755 0.003288610307
|
| 758 |
+
756 0.003282053506
|
| 759 |
+
757 0.003278692294
|
| 760 |
+
758 0.003278689045
|
| 761 |
+
759 0.003278702582
|
| 762 |
+
760 0.003278692836
|
| 763 |
+
761 0.003278690128
|
| 764 |
+
762 0.003270898291
|
| 765 |
+
763 0.003270898833
|
| 766 |
+
764 0.003270891793
|
| 767 |
+
765 0.00327007958
|
| 768 |
+
766 0.003270078497
|
| 769 |
+
767 0.003270069292
|
| 770 |
+
768 0.003270076872
|
| 771 |
+
769 0.003270073894
|
| 772 |
+
770 0.003270066584
|
| 773 |
+
771 0.003270069292
|
| 774 |
+
772 0.003270070916
|
| 775 |
+
773 0.00327007606
|
| 776 |
+
774 0.003270059545
|
| 777 |
+
775 0.003270064689
|
| 778 |
+
776 0.003270057379
|
| 779 |
+
777 0.003270061982
|
| 780 |
+
778 0.003268352519
|
| 781 |
+
779 0.003266124887
|
| 782 |
+
780 0.003265681643
|
| 783 |
+
781 0.003265685974
|
| 784 |
+
782 0.003265681913
|
| 785 |
+
783 0.003265690848
|
| 786 |
+
784 0.003245993087
|
| 787 |
+
785 0.003245995252
|
| 788 |
+
786 0.003245996606
|
| 789 |
+
787 0.003245993628
|
| 790 |
+
788 0.003245995252
|
| 791 |
+
789 0.00324599417
|
| 792 |
+
790 0.003244925838
|
| 793 |
+
791 0.003244920423
|
| 794 |
+
792 0.003244909052
|
| 795 |
+
793 0.003244913384
|
| 796 |
+
794 0.003244916633
|
| 797 |
+
795 0.00324491555
|
| 798 |
+
796 0.003243304094
|
| 799 |
+
797 0.00324330247
|
| 800 |
+
798 0.003243305448
|
| 801 |
+
799 0.003243300575
|
| 802 |
+
800 0.003243296513
|
| 803 |
+
801 0.003243300304
|
| 804 |
+
802 0.003243296243
|
| 805 |
+
803 0.003243300845
|
| 806 |
+
804 0.003243294348
|
| 807 |
+
805 0.003241708612
|
| 808 |
+
806 0.003241715922
|
| 809 |
+
807 0.003241715381
|
| 810 |
+
808 0.003241699678
|
| 811 |
+
809 0.003241697512
|
| 812 |
+
810 0.00324169697
|
| 813 |
+
811 0.003241689931
|
| 814 |
+
812 0.003241693992
|
| 815 |
+
813 0.003241697783
|
| 816 |
+
814 0.003241695075
|
| 817 |
+
815 0.003240128291
|
| 818 |
+
816 0.003240132894
|
| 819 |
+
817 0.003240129916
|
| 820 |
+
818 0.003240132082
|
| 821 |
+
819 0.003240132082
|
| 822 |
+
820 0.003240134248
|
| 823 |
+
821 0.003240133165
|
| 824 |
+
822 0.003240126667
|
| 825 |
+
823 0.003240133706
|
| 826 |
+
824 0.003240123689
|
| 827 |
+
825 0.003240128562
|
| 828 |
+
826 0.003240124772
|
| 829 |
+
827 0.003240120981
|
| 830 |
+
828 0.003240114213
|
| 831 |
+
829 0.003240108257
|
| 832 |
+
830 0.003240106091
|
| 833 |
+
831 0.003240102301
|
| 834 |
+
832 0.003240096886
|
| 835 |
+
833 0.003240092825
|
| 836 |
+
834 0.003240096886
|
| 837 |
+
835 0.003240088764
|
| 838 |
+
836 0.00324009093
|
| 839 |
+
837 0.003240087951
|
| 840 |
+
838 0.003240082266
|
| 841 |
+
839 0.003240089034
|
| 842 |
+
840 0.003233733168
|
| 843 |
+
841 0.003233732897
|
| 844 |
+
842 0.003233729648
|
| 845 |
+
843 0.003233732355
|
| 846 |
+
844 0.003233733438
|
| 847 |
+
845 0.003233717736
|
| 848 |
+
846 0.003233726128
|
| 849 |
+
847 0.003233727482
|
| 850 |
+
848 0.003233727211
|
| 851 |
+
849 0.003233735333
|
| 852 |
+
850 0.003233713404
|
| 853 |
+
851 0.003233723692
|
| 854 |
+
852 0.003233721526
|
| 855 |
+
853 0.003233716653
|
| 856 |
+
854 0.003233721526
|
| 857 |
+
855 0.003233720172
|
| 858 |
+
856 0.003233313477
|
| 859 |
+
857 0.003233306167
|
| 860 |
+
858 0.003233305626
|
| 861 |
+
859 0.003231793825
|
| 862 |
+
860 0.003231798428
|
| 863 |
+
861 0.003231792201
|
| 864 |
+
862 0.003231796532
|
| 865 |
+
863 0.003231793013
|
| 866 |
+
864 0.00323179572
|
| 867 |
+
865 0.00323179193
|
| 868 |
+
866 0.00323178462
|
| 869 |
+
867 0.003231789222
|
| 870 |
+
868 0.003231788952
|
| 871 |
+
869 0.003231792201
|
| 872 |
+
870 0.003231785703
|
| 873 |
+
871 0.003231790576
|
| 874 |
+
872 0.003231787327
|
| 875 |
+
873 0.003231786786
|
| 876 |
+
874 0.003231786786
|
| 877 |
+
875 0.003231789764
|
| 878 |
+
876 0.003231780559
|
| 879 |
+
877 0.003231778393
|
| 880 |
+
878 0.003231786515
|
| 881 |
+
879 0.003231786786
|
| 882 |
+
880 0.003231776768
|
| 883 |
+
881 0.003231775144
|
| 884 |
+
882 0.003231772707
|
| 885 |
+
883 0.003231768917
|
| 886 |
+
884 0.003231770812
|
| 887 |
+
885 0.003231773249
|
| 888 |
+
886 0.003231771354
|
| 889 |
+
887 0.003231769188
|
| 890 |
+
888 0.003231760253
|
| 891 |
+
889 0.003231764585
|
| 892 |
+
890 0.003231759441
|
| 893 |
+
891 0.003231762961
|
| 894 |
+
892 0.003231758088
|
| 895 |
+
893 0.003231761878
|
| 896 |
+
894 0.003231761336
|
| 897 |
+
895 0.003231746717
|
| 898 |
+
896 0.003231749153
|
| 899 |
+
897 0.003231749153
|
| 900 |
+
898 0.003231739136
|
| 901 |
+
899 0.003231749965
|
| 902 |
+
900 0.003231737241
|
| 903 |
+
901 0.00323174076
|
| 904 |
+
902 0.003231737511
|
| 905 |
+
903 0.003231738053
|
| 906 |
+
904 0.00323172614
|
| 907 |
+
905 0.003231729389
|
| 908 |
+
906 0.003231732638
|
| 909 |
+
907 0.003231738324
|
| 910 |
+
908 0.003231724516
|
| 911 |
+
909 0.003231717477
|
| 912 |
+
910 0.003231724245
|
| 913 |
+
911 0.003231731014
|
| 914 |
+
912 0.003231719101
|
| 915 |
+
913 0.003231733721
|
| 916 |
+
914 0.00323171856
|
| 917 |
+
915 0.003231721809
|
| 918 |
+
916 0.003231724516
|
| 919 |
+
917 0.003231723975
|
| 920 |
+
918 0.003231719101
|
| 921 |
+
919 0.003231709896
|
| 922 |
+
920 0.003231712333
|
| 923 |
+
921 0.003231696901
|
| 924 |
+
922 0.003231695006
|
| 925 |
+
923 0.003231696359
|
| 926 |
+
924 0.003231695818
|
| 927 |
+
925 0.003231688237
|
| 928 |
+
926 0.003231678491
|
| 929 |
+
927 0.003231678491
|
| 930 |
+
928 0.003231673347
|
| 931 |
+
929 0.003231663329
|
| 932 |
+
930 0.003231663871
|
| 933 |
+
931 0.003231662246
|
| 934 |
+
932 0.003231655207
|
| 935 |
+
933 0.003231653041
|
| 936 |
+
934 0.003231639504
|
| 937 |
+
935 0.003231641129
|
| 938 |
+
936 0.003231637609
|
| 939 |
+
937 0.003231630299
|
| 940 |
+
938 0.003231624072
|
| 941 |
+
939 0.003231626509
|
| 942 |
+
940 0.003231612431
|
| 943 |
+
941 0.003231617033
|
| 944 |
+
942 0.003231613243
|
| 945 |
+
943 0.003231606745
|
| 946 |
+
944 0.003231608911
|
| 947 |
+
945 0.003231597269
|
| 948 |
+
946 0.003231602143
|
| 949 |
+
947 0.003231585086
|
| 950 |
+
948 0.003231582649
|
| 951 |
+
949 0.003231578047
|
| 952 |
+
950 0.003231570466
|
| 953 |
+
951 0.003231567488
|
| 954 |
+
952 0.003231559637
|
| 955 |
+
953 0.003231560449
|
| 956 |
+
954 0.003231550702
|
| 957 |
+
955 0.003231549349
|
| 958 |
+
956 0.003231546641
|
| 959 |
+
957 0.003231541497
|
| 960 |
+
958 0.003231530126
|
| 961 |
+
959 0.003231537707
|
| 962 |
+
960 0.003231531751
|
| 963 |
+
961 0.00323152796
|
| 964 |
+
962 0.003231507926
|
| 965 |
+
963 0.003231515236
|
| 966 |
+
964 0.003231514423
|
| 967 |
+
965 0.003231504948
|
| 968 |
+
966 0.003231500887
|
| 969 |
+
967 0.003231496825
|
| 970 |
+
968 0.003231499533
|
| 971 |
+
969 0.003231500074
|
| 972 |
+
970 0.003231491681
|
| 973 |
+
971 0.003231493306
|
| 974 |
+
972 0.00323149114
|
| 975 |
+
973 0.00323147652
|
| 976 |
+
974 0.003231471647
|
| 977 |
+
975 0.003231467315
|
| 978 |
+
976 0.003231464608
|
| 979 |
+
977 0.003231469481
|
| 980 |
+
978 0.0032314619
|
| 981 |
+
979 0.003231454861
|
| 982 |
+
980 0.003231447551
|
| 983 |
+
981 0.003231448363
|
| 984 |
+
982 0.003230462336
|
| 985 |
+
983 0.003230466668
|
| 986 |
+
984 0.003230465314
|
| 987 |
+
985 0.003230457733
|
| 988 |
+
986 0.003230459087
|
| 989 |
+
987 0.003230455838
|
| 990 |
+
988 0.003230454755
|
| 991 |
+
989 0.003230447175
|
| 992 |
+
990 0.003230443926
|
| 993 |
+
991 0.003230445279
|
| 994 |
+
992 0.003230441218
|
| 995 |
+
993 0.003230440948
|
| 996 |
+
994 0.003230437969
|
| 997 |
+
995 0.003230440135
|
| 998 |
+
996 0.003230437969
|
| 999 |
+
997 0.003230440135
|
| 1000 |
+
998 0.003230436887
|
| 1001 |
+
999 0.003230431743
|
catboost_info/time_left.tsv
ADDED
|
@@ -0,0 +1,1001 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
iter Passed Remaining
|
| 2 |
+
0 166 166183
|
| 3 |
+
1 177 88422
|
| 4 |
+
2 188 62675
|
| 5 |
+
3 200 49859
|
| 6 |
+
4 211 42065
|
| 7 |
+
5 222 36938
|
| 8 |
+
6 234 33214
|
| 9 |
+
7 245 30418
|
| 10 |
+
8 255 28154
|
| 11 |
+
9 264 26228
|
| 12 |
+
10 273 24568
|
| 13 |
+
11 281 23160
|
| 14 |
+
12 289 21966
|
| 15 |
+
13 297 20944
|
| 16 |
+
14 305 20053
|
| 17 |
+
15 314 19313
|
| 18 |
+
16 322 18647
|
| 19 |
+
17 330 18043
|
| 20 |
+
18 340 17602
|
| 21 |
+
19 349 17135
|
| 22 |
+
20 358 16694
|
| 23 |
+
21 366 16280
|
| 24 |
+
22 374 15913
|
| 25 |
+
23 383 15584
|
| 26 |
+
24 391 15273
|
| 27 |
+
25 399 14976
|
| 28 |
+
26 408 14707
|
| 29 |
+
27 416 14461
|
| 30 |
+
28 425 14231
|
| 31 |
+
29 433 14014
|
| 32 |
+
30 441 13803
|
| 33 |
+
31 450 13613
|
| 34 |
+
32 458 13428
|
| 35 |
+
33 466 13251
|
| 36 |
+
34 474 13089
|
| 37 |
+
35 483 12949
|
| 38 |
+
36 492 12816
|
| 39 |
+
37 500 12675
|
| 40 |
+
38 509 12542
|
| 41 |
+
39 517 12413
|
| 42 |
+
40 525 12291
|
| 43 |
+
41 533 12174
|
| 44 |
+
42 542 12071
|
| 45 |
+
43 550 11962
|
| 46 |
+
44 559 11864
|
| 47 |
+
45 567 11769
|
| 48 |
+
46 575 11673
|
| 49 |
+
47 583 11578
|
| 50 |
+
48 592 11493
|
| 51 |
+
49 600 11411
|
| 52 |
+
50 608 11330
|
| 53 |
+
51 617 11251
|
| 54 |
+
52 625 11170
|
| 55 |
+
53 633 11103
|
| 56 |
+
54 642 11036
|
| 57 |
+
55 650 10966
|
| 58 |
+
56 658 10897
|
| 59 |
+
57 667 10834
|
| 60 |
+
58 675 10769
|
| 61 |
+
59 683 10709
|
| 62 |
+
60 692 10654
|
| 63 |
+
61 700 10600
|
| 64 |
+
62 709 10545
|
| 65 |
+
63 717 10488
|
| 66 |
+
64 725 10432
|
| 67 |
+
65 734 10387
|
| 68 |
+
66 743 10353
|
| 69 |
+
67 751 10303
|
| 70 |
+
68 760 10256
|
| 71 |
+
69 768 10212
|
| 72 |
+
70 777 10170
|
| 73 |
+
71 786 10136
|
| 74 |
+
72 794 10093
|
| 75 |
+
73 803 10054
|
| 76 |
+
74 812 10015
|
| 77 |
+
75 820 9979
|
| 78 |
+
76 829 9941
|
| 79 |
+
77 837 9903
|
| 80 |
+
78 846 9870
|
| 81 |
+
79 855 9835
|
| 82 |
+
80 864 9808
|
| 83 |
+
81 872 9770
|
| 84 |
+
82 881 9741
|
| 85 |
+
83 891 9716
|
| 86 |
+
84 899 9681
|
| 87 |
+
85 907 9649
|
| 88 |
+
86 916 9618
|
| 89 |
+
87 925 9591
|
| 90 |
+
88 933 9556
|
| 91 |
+
89 942 9530
|
| 92 |
+
90 950 9496
|
| 93 |
+
91 959 9468
|
| 94 |
+
92 967 9435
|
| 95 |
+
93 976 9409
|
| 96 |
+
94 984 9380
|
| 97 |
+
95 993 9351
|
| 98 |
+
96 1001 9321
|
| 99 |
+
97 1009 9294
|
| 100 |
+
98 1018 9268
|
| 101 |
+
99 1026 9238
|
| 102 |
+
100 1037 9234
|
| 103 |
+
101 1045 9207
|
| 104 |
+
102 1054 9180
|
| 105 |
+
103 1062 9153
|
| 106 |
+
104 1070 9124
|
| 107 |
+
105 1078 9097
|
| 108 |
+
106 1087 9076
|
| 109 |
+
107 1096 9054
|
| 110 |
+
108 1104 9029
|
| 111 |
+
109 1113 9005
|
| 112 |
+
110 1122 8991
|
| 113 |
+
111 1131 8968
|
| 114 |
+
112 1139 8947
|
| 115 |
+
113 1148 8923
|
| 116 |
+
114 1156 8899
|
| 117 |
+
115 1164 8877
|
| 118 |
+
116 1173 8855
|
| 119 |
+
117 1181 8832
|
| 120 |
+
118 1190 8810
|
| 121 |
+
119 1198 8790
|
| 122 |
+
120 1207 8772
|
| 123 |
+
121 1215 8748
|
| 124 |
+
122 1224 8727
|
| 125 |
+
123 1232 8705
|
| 126 |
+
124 1241 8687
|
| 127 |
+
125 1249 8667
|
| 128 |
+
126 1258 8651
|
| 129 |
+
127 1267 8632
|
| 130 |
+
128 1276 8616
|
| 131 |
+
129 1284 8595
|
| 132 |
+
130 1292 8576
|
| 133 |
+
131 1301 8555
|
| 134 |
+
132 1309 8536
|
| 135 |
+
133 1318 8520
|
| 136 |
+
134 1327 8504
|
| 137 |
+
135 1336 8487
|
| 138 |
+
136 1344 8469
|
| 139 |
+
137 1353 8451
|
| 140 |
+
138 1361 8434
|
| 141 |
+
139 1369 8414
|
| 142 |
+
140 1378 8397
|
| 143 |
+
141 1386 8379
|
| 144 |
+
142 1395 8362
|
| 145 |
+
143 1403 8344
|
| 146 |
+
144 1411 8325
|
| 147 |
+
145 1420 8308
|
| 148 |
+
146 1428 8290
|
| 149 |
+
147 1436 8271
|
| 150 |
+
148 1445 8255
|
| 151 |
+
149 1453 8237
|
| 152 |
+
150 1461 8219
|
| 153 |
+
151 1470 8201
|
| 154 |
+
152 1478 8185
|
| 155 |
+
153 1486 8168
|
| 156 |
+
154 1495 8153
|
| 157 |
+
155 1503 8136
|
| 158 |
+
156 1512 8122
|
| 159 |
+
157 1522 8113
|
| 160 |
+
158 1530 8097
|
| 161 |
+
159 1539 8083
|
| 162 |
+
160 1547 8065
|
| 163 |
+
161 1556 8050
|
| 164 |
+
162 1564 8034
|
| 165 |
+
163 1573 8020
|
| 166 |
+
164 1581 8004
|
| 167 |
+
165 1589 7987
|
| 168 |
+
166 1598 7972
|
| 169 |
+
167 1606 7957
|
| 170 |
+
168 1615 7942
|
| 171 |
+
169 1623 7928
|
| 172 |
+
170 1632 7914
|
| 173 |
+
171 1640 7897
|
| 174 |
+
172 1649 7884
|
| 175 |
+
173 1657 7870
|
| 176 |
+
174 1666 7855
|
| 177 |
+
175 1675 7842
|
| 178 |
+
176 1683 7827
|
| 179 |
+
177 1691 7813
|
| 180 |
+
178 1700 7797
|
| 181 |
+
179 1708 7783
|
| 182 |
+
180 1717 7770
|
| 183 |
+
181 1725 7754
|
| 184 |
+
182 1733 7740
|
| 185 |
+
183 1742 7725
|
| 186 |
+
184 1750 7710
|
| 187 |
+
185 1758 7696
|
| 188 |
+
186 1766 7681
|
| 189 |
+
187 1774 7664
|
| 190 |
+
188 1783 7650
|
| 191 |
+
189 1791 7636
|
| 192 |
+
190 1799 7621
|
| 193 |
+
191 1807 7606
|
| 194 |
+
192 1815 7591
|
| 195 |
+
193 1824 7579
|
| 196 |
+
194 1832 7563
|
| 197 |
+
195 1840 7551
|
| 198 |
+
196 1848 7536
|
| 199 |
+
197 1857 7523
|
| 200 |
+
198 1865 7510
|
| 201 |
+
199 1874 7498
|
| 202 |
+
200 1883 7486
|
| 203 |
+
201 1891 7473
|
| 204 |
+
202 1900 7460
|
| 205 |
+
203 1908 7448
|
| 206 |
+
204 1918 7439
|
| 207 |
+
205 1927 7429
|
| 208 |
+
206 1935 7415
|
| 209 |
+
207 1944 7402
|
| 210 |
+
208 1952 7391
|
| 211 |
+
209 1960 7376
|
| 212 |
+
210 1969 7364
|
| 213 |
+
211 1977 7350
|
| 214 |
+
212 1986 7337
|
| 215 |
+
213 1994 7324
|
| 216 |
+
214 2002 7310
|
| 217 |
+
215 2011 7301
|
| 218 |
+
216 2020 7289
|
| 219 |
+
217 2028 7274
|
| 220 |
+
218 2036 7262
|
| 221 |
+
219 2044 7249
|
| 222 |
+
220 2052 7235
|
| 223 |
+
221 2062 7227
|
| 224 |
+
222 2071 7216
|
| 225 |
+
223 2079 7203
|
| 226 |
+
224 2087 7189
|
| 227 |
+
225 2094 7174
|
| 228 |
+
226 2103 7161
|
| 229 |
+
227 2111 7150
|
| 230 |
+
228 2120 7139
|
| 231 |
+
229 2129 7127
|
| 232 |
+
230 2137 7116
|
| 233 |
+
231 2146 7105
|
| 234 |
+
232 2154 7092
|
| 235 |
+
233 2163 7081
|
| 236 |
+
234 2171 7070
|
| 237 |
+
235 2180 7058
|
| 238 |
+
236 2187 7043
|
| 239 |
+
237 2196 7031
|
| 240 |
+
238 2204 7019
|
| 241 |
+
239 2213 7010
|
| 242 |
+
240 2222 6998
|
| 243 |
+
241 2230 6986
|
| 244 |
+
242 2239 6976
|
| 245 |
+
243 2247 6963
|
| 246 |
+
244 2255 6951
|
| 247 |
+
245 2264 6940
|
| 248 |
+
246 2272 6929
|
| 249 |
+
247 2281 6917
|
| 250 |
+
248 2289 6906
|
| 251 |
+
249 2299 6898
|
| 252 |
+
250 2308 6887
|
| 253 |
+
251 2316 6876
|
| 254 |
+
252 2324 6864
|
| 255 |
+
253 2333 6853
|
| 256 |
+
254 2341 6841
|
| 257 |
+
255 2350 6829
|
| 258 |
+
256 2358 6817
|
| 259 |
+
257 2366 6805
|
| 260 |
+
258 2374 6793
|
| 261 |
+
259 2382 6781
|
| 262 |
+
260 2390 6769
|
| 263 |
+
261 2399 6758
|
| 264 |
+
262 2407 6746
|
| 265 |
+
263 2415 6735
|
| 266 |
+
264 2424 6723
|
| 267 |
+
265 2432 6712
|
| 268 |
+
266 2440 6700
|
| 269 |
+
267 2450 6692
|
| 270 |
+
268 2458 6680
|
| 271 |
+
269 2466 6668
|
| 272 |
+
270 2474 6656
|
| 273 |
+
271 2482 6644
|
| 274 |
+
272 2491 6633
|
| 275 |
+
273 2499 6621
|
| 276 |
+
274 2507 6610
|
| 277 |
+
275 2515 6598
|
| 278 |
+
276 2523 6586
|
| 279 |
+
277 2531 6574
|
| 280 |
+
278 2539 6563
|
| 281 |
+
279 2548 6552
|
| 282 |
+
280 2556 6541
|
| 283 |
+
281 2564 6529
|
| 284 |
+
282 2573 6519
|
| 285 |
+
283 2580 6506
|
| 286 |
+
284 2589 6496
|
| 287 |
+
285 2597 6485
|
| 288 |
+
286 2605 6473
|
| 289 |
+
287 2614 6463
|
| 290 |
+
288 2622 6452
|
| 291 |
+
289 2630 6440
|
| 292 |
+
290 2639 6430
|
| 293 |
+
291 2647 6419
|
| 294 |
+
292 2656 6409
|
| 295 |
+
293 2664 6398
|
| 296 |
+
294 2672 6387
|
| 297 |
+
295 2680 6376
|
| 298 |
+
296 2689 6365
|
| 299 |
+
297 2698 6357
|
| 300 |
+
298 2707 6348
|
| 301 |
+
299 2716 6337
|
| 302 |
+
300 2724 6326
|
| 303 |
+
301 2732 6314
|
| 304 |
+
302 2740 6303
|
| 305 |
+
303 2748 6292
|
| 306 |
+
304 2756 6282
|
| 307 |
+
305 2765 6271
|
| 308 |
+
306 2769 6251
|
| 309 |
+
307 2777 6240
|
| 310 |
+
308 2785 6229
|
| 311 |
+
309 2794 6218
|
| 312 |
+
310 2802 6208
|
| 313 |
+
311 2811 6198
|
| 314 |
+
312 2819 6188
|
| 315 |
+
313 2827 6177
|
| 316 |
+
314 2836 6168
|
| 317 |
+
315 2845 6158
|
| 318 |
+
316 2853 6147
|
| 319 |
+
317 2861 6136
|
| 320 |
+
318 2869 6125
|
| 321 |
+
319 2877 6114
|
| 322 |
+
320 2885 6102
|
| 323 |
+
321 2893 6092
|
| 324 |
+
322 2901 6081
|
| 325 |
+
323 2910 6071
|
| 326 |
+
324 2918 6060
|
| 327 |
+
325 2927 6051
|
| 328 |
+
326 2935 6041
|
| 329 |
+
327 2943 6030
|
| 330 |
+
328 2952 6020
|
| 331 |
+
329 2960 6010
|
| 332 |
+
330 2969 6001
|
| 333 |
+
331 2980 5996
|
| 334 |
+
332 2988 5986
|
| 335 |
+
333 2997 5976
|
| 336 |
+
334 3005 5966
|
| 337 |
+
335 3013 5956
|
| 338 |
+
336 3022 5946
|
| 339 |
+
337 3030 5935
|
| 340 |
+
338 3038 5924
|
| 341 |
+
339 3046 5913
|
| 342 |
+
340 3054 5903
|
| 343 |
+
341 3062 5892
|
| 344 |
+
342 3071 5883
|
| 345 |
+
343 3080 5873
|
| 346 |
+
344 3090 5866
|
| 347 |
+
345 3097 5855
|
| 348 |
+
346 3105 5844
|
| 349 |
+
347 3114 5834
|
| 350 |
+
348 3122 5824
|
| 351 |
+
349 3130 5813
|
| 352 |
+
350 3138 5803
|
| 353 |
+
351 3146 5792
|
| 354 |
+
352 3155 5783
|
| 355 |
+
353 3163 5772
|
| 356 |
+
354 3171 5761
|
| 357 |
+
355 3179 5751
|
| 358 |
+
356 3187 5741
|
| 359 |
+
357 3196 5731
|
| 360 |
+
358 3204 5721
|
| 361 |
+
359 3212 5711
|
| 362 |
+
360 3221 5702
|
| 363 |
+
361 3231 5694
|
| 364 |
+
362 3239 5684
|
| 365 |
+
363 3247 5674
|
| 366 |
+
364 3255 5664
|
| 367 |
+
365 3264 5654
|
| 368 |
+
366 3272 5643
|
| 369 |
+
367 3280 5633
|
| 370 |
+
368 3288 5623
|
| 371 |
+
369 3296 5613
|
| 372 |
+
370 3305 5603
|
| 373 |
+
371 3313 5593
|
| 374 |
+
372 3321 5583
|
| 375 |
+
373 3329 5573
|
| 376 |
+
374 3337 5563
|
| 377 |
+
375 3346 5553
|
| 378 |
+
376 3354 5543
|
| 379 |
+
377 3362 5532
|
| 380 |
+
378 3370 5522
|
| 381 |
+
379 3378 5512
|
| 382 |
+
380 3386 5502
|
| 383 |
+
381 3394 5492
|
| 384 |
+
382 3403 5483
|
| 385 |
+
383 3412 5473
|
| 386 |
+
384 3420 5464
|
| 387 |
+
385 3428 5453
|
| 388 |
+
386 3436 5443
|
| 389 |
+
387 3445 5433
|
| 390 |
+
388 3453 5423
|
| 391 |
+
389 3461 5413
|
| 392 |
+
390 3469 5403
|
| 393 |
+
391 3478 5394
|
| 394 |
+
392 3487 5387
|
| 395 |
+
393 3496 5377
|
| 396 |
+
394 3504 5367
|
| 397 |
+
395 3512 5357
|
| 398 |
+
396 3520 5347
|
| 399 |
+
397 3528 5337
|
| 400 |
+
398 3536 5326
|
| 401 |
+
399 3544 5316
|
| 402 |
+
400 3552 5307
|
| 403 |
+
401 3561 5297
|
| 404 |
+
402 3568 5286
|
| 405 |
+
403 3577 5277
|
| 406 |
+
404 3585 5267
|
| 407 |
+
405 3593 5257
|
| 408 |
+
406 3601 5247
|
| 409 |
+
407 3609 5238
|
| 410 |
+
408 3618 5228
|
| 411 |
+
409 3626 5219
|
| 412 |
+
410 3636 5211
|
| 413 |
+
411 3644 5201
|
| 414 |
+
412 3653 5192
|
| 415 |
+
413 3661 5182
|
| 416 |
+
414 3670 5173
|
| 417 |
+
415 3678 5163
|
| 418 |
+
416 3686 5153
|
| 419 |
+
417 3693 5143
|
| 420 |
+
418 3702 5133
|
| 421 |
+
419 3710 5123
|
| 422 |
+
420 3718 5113
|
| 423 |
+
421 3726 5104
|
| 424 |
+
422 3734 5094
|
| 425 |
+
423 3743 5085
|
| 426 |
+
424 3751 5075
|
| 427 |
+
425 3759 5066
|
| 428 |
+
426 3768 5056
|
| 429 |
+
427 3776 5047
|
| 430 |
+
428 3785 5038
|
| 431 |
+
429 3793 5029
|
| 432 |
+
430 3802 5020
|
| 433 |
+
431 3811 5011
|
| 434 |
+
432 3820 5002
|
| 435 |
+
433 3828 4993
|
| 436 |
+
434 3837 4984
|
| 437 |
+
435 3846 4976
|
| 438 |
+
436 3855 4967
|
| 439 |
+
437 3863 4956
|
| 440 |
+
438 3872 4948
|
| 441 |
+
439 3881 4940
|
| 442 |
+
440 3890 4931
|
| 443 |
+
441 3899 4922
|
| 444 |
+
442 3907 4913
|
| 445 |
+
443 3915 4903
|
| 446 |
+
444 3924 4894
|
| 447 |
+
445 3933 4885
|
| 448 |
+
446 3941 4876
|
| 449 |
+
447 3950 4867
|
| 450 |
+
448 3961 4861
|
| 451 |
+
449 3970 4853
|
| 452 |
+
450 3979 4843
|
| 453 |
+
451 3987 4834
|
| 454 |
+
452 3996 4826
|
| 455 |
+
453 4005 4817
|
| 456 |
+
454 4015 4809
|
| 457 |
+
455 4025 4802
|
| 458 |
+
456 4034 4793
|
| 459 |
+
457 4042 4784
|
| 460 |
+
458 4051 4775
|
| 461 |
+
459 4060 4766
|
| 462 |
+
460 4068 4757
|
| 463 |
+
461 4077 4747
|
| 464 |
+
462 4085 4738
|
| 465 |
+
463 4094 4729
|
| 466 |
+
464 4103 4721
|
| 467 |
+
465 4112 4712
|
| 468 |
+
466 4120 4702
|
| 469 |
+
467 4128 4693
|
| 470 |
+
468 4136 4683
|
| 471 |
+
469 4145 4674
|
| 472 |
+
470 4154 4665
|
| 473 |
+
471 4162 4656
|
| 474 |
+
472 4171 4647
|
| 475 |
+
473 4180 4638
|
| 476 |
+
474 4188 4629
|
| 477 |
+
475 4197 4620
|
| 478 |
+
476 4205 4610
|
| 479 |
+
477 4213 4601
|
| 480 |
+
478 4222 4592
|
| 481 |
+
479 4230 4582
|
| 482 |
+
480 4237 4572
|
| 483 |
+
481 4245 4562
|
| 484 |
+
482 4253 4553
|
| 485 |
+
483 4261 4543
|
| 486 |
+
484 4270 4535
|
| 487 |
+
485 4280 4526
|
| 488 |
+
486 4288 4517
|
| 489 |
+
487 4296 4508
|
| 490 |
+
488 4305 4498
|
| 491 |
+
489 4312 4488
|
| 492 |
+
490 4320 4478
|
| 493 |
+
491 4328 4469
|
| 494 |
+
492 4334 4457
|
| 495 |
+
493 4342 4448
|
| 496 |
+
494 4351 4439
|
| 497 |
+
495 4359 4429
|
| 498 |
+
496 4366 4419
|
| 499 |
+
497 4372 4407
|
| 500 |
+
498 4380 4398
|
| 501 |
+
499 4389 4389
|
| 502 |
+
500 4397 4379
|
| 503 |
+
501 4406 4370
|
| 504 |
+
502 4415 4362
|
| 505 |
+
503 4423 4353
|
| 506 |
+
504 4432 4344
|
| 507 |
+
505 4440 4335
|
| 508 |
+
506 4449 4326
|
| 509 |
+
507 4554 4410
|
| 510 |
+
508 4562 4401
|
| 511 |
+
509 4570 4391
|
| 512 |
+
510 4579 4381
|
| 513 |
+
511 4587 4372
|
| 514 |
+
512 4595 4362
|
| 515 |
+
513 4603 4352
|
| 516 |
+
514 4611 4342
|
| 517 |
+
515 4619 4333
|
| 518 |
+
516 4628 4323
|
| 519 |
+
517 4636 4313
|
| 520 |
+
518 4644 4304
|
| 521 |
+
519 4652 4294
|
| 522 |
+
520 4660 4284
|
| 523 |
+
521 4669 4276
|
| 524 |
+
522 4678 4266
|
| 525 |
+
523 4686 4256
|
| 526 |
+
524 4694 4247
|
| 527 |
+
525 4702 4237
|
| 528 |
+
526 4709 4226
|
| 529 |
+
527 4717 4216
|
| 530 |
+
528 4725 4207
|
| 531 |
+
529 4733 4197
|
| 532 |
+
530 4741 4188
|
| 533 |
+
531 4749 4178
|
| 534 |
+
532 4757 4168
|
| 535 |
+
533 4765 4158
|
| 536 |
+
534 4773 4149
|
| 537 |
+
535 4781 4139
|
| 538 |
+
536 4789 4129
|
| 539 |
+
537 4797 4119
|
| 540 |
+
538 4806 4111
|
| 541 |
+
539 4813 4100
|
| 542 |
+
540 4821 4090
|
| 543 |
+
541 4830 4082
|
| 544 |
+
542 4838 4072
|
| 545 |
+
543 4847 4062
|
| 546 |
+
544 4855 4053
|
| 547 |
+
545 4863 4044
|
| 548 |
+
546 4871 4034
|
| 549 |
+
547 4878 4024
|
| 550 |
+
548 4885 4013
|
| 551 |
+
549 4894 4004
|
| 552 |
+
550 4901 3994
|
| 553 |
+
551 4910 3985
|
| 554 |
+
552 4920 3977
|
| 555 |
+
553 4928 3968
|
| 556 |
+
554 4937 3958
|
| 557 |
+
555 4945 3949
|
| 558 |
+
556 4954 3940
|
| 559 |
+
557 4962 3930
|
| 560 |
+
558 4970 3921
|
| 561 |
+
559 4979 3912
|
| 562 |
+
560 4987 3902
|
| 563 |
+
561 4995 3893
|
| 564 |
+
562 5004 3884
|
| 565 |
+
563 5012 3875
|
| 566 |
+
564 5018 3863
|
| 567 |
+
565 5026 3854
|
| 568 |
+
566 5035 3845
|
| 569 |
+
567 5045 3837
|
| 570 |
+
568 5053 3828
|
| 571 |
+
569 5062 3818
|
| 572 |
+
570 5070 3809
|
| 573 |
+
571 5078 3800
|
| 574 |
+
572 5086 3790
|
| 575 |
+
573 5095 3781
|
| 576 |
+
574 5103 3772
|
| 577 |
+
575 5111 3762
|
| 578 |
+
576 5120 3753
|
| 579 |
+
577 5128 3744
|
| 580 |
+
578 5137 3735
|
| 581 |
+
579 5145 3726
|
| 582 |
+
580 5154 3716
|
| 583 |
+
581 5162 3707
|
| 584 |
+
582 5170 3698
|
| 585 |
+
583 5179 3689
|
| 586 |
+
584 5188 3680
|
| 587 |
+
585 5196 3671
|
| 588 |
+
586 5205 3662
|
| 589 |
+
587 5213 3653
|
| 590 |
+
588 5221 3643
|
| 591 |
+
589 5230 3634
|
| 592 |
+
590 5238 3625
|
| 593 |
+
591 5246 3615
|
| 594 |
+
592 5254 3606
|
| 595 |
+
593 5263 3597
|
| 596 |
+
594 5271 3588
|
| 597 |
+
595 5279 3578
|
| 598 |
+
596 5288 3569
|
| 599 |
+
597 5296 3560
|
| 600 |
+
598 5304 3551
|
| 601 |
+
599 5313 3542
|
| 602 |
+
600 5321 3533
|
| 603 |
+
601 5330 3523
|
| 604 |
+
602 5338 3514
|
| 605 |
+
603 5347 3505
|
| 606 |
+
604 5355 3496
|
| 607 |
+
605 5363 3487
|
| 608 |
+
606 5372 3478
|
| 609 |
+
607 5380 3469
|
| 610 |
+
608 5388 3459
|
| 611 |
+
609 5397 3450
|
| 612 |
+
610 5405 3441
|
| 613 |
+
611 5413 3432
|
| 614 |
+
612 5421 3422
|
| 615 |
+
613 5430 3414
|
| 616 |
+
614 5440 3405
|
| 617 |
+
615 5449 3397
|
| 618 |
+
616 5457 3387
|
| 619 |
+
617 5465 3378
|
| 620 |
+
618 5474 3369
|
| 621 |
+
619 5482 3360
|
| 622 |
+
620 5490 3350
|
| 623 |
+
621 5498 3341
|
| 624 |
+
622 5506 3332
|
| 625 |
+
623 5514 3323
|
| 626 |
+
624 5523 3314
|
| 627 |
+
625 5531 3305
|
| 628 |
+
626 5540 3295
|
| 629 |
+
627 5548 3286
|
| 630 |
+
628 5556 3277
|
| 631 |
+
629 5564 3268
|
| 632 |
+
630 5572 3258
|
| 633 |
+
631 5581 3249
|
| 634 |
+
632 5589 3240
|
| 635 |
+
633 5597 3231
|
| 636 |
+
634 5605 3222
|
| 637 |
+
635 5612 3212
|
| 638 |
+
636 5620 3202
|
| 639 |
+
637 5628 3193
|
| 640 |
+
638 5637 3184
|
| 641 |
+
639 5645 3175
|
| 642 |
+
640 5653 3166
|
| 643 |
+
641 5661 3156
|
| 644 |
+
642 5669 3147
|
| 645 |
+
643 5677 3138
|
| 646 |
+
644 5685 3129
|
| 647 |
+
645 5693 3119
|
| 648 |
+
646 5701 3110
|
| 649 |
+
647 5709 3101
|
| 650 |
+
648 5718 3092
|
| 651 |
+
649 5725 3082
|
| 652 |
+
650 5732 3073
|
| 653 |
+
651 5741 3064
|
| 654 |
+
652 5749 3054
|
| 655 |
+
653 5757 3045
|
| 656 |
+
654 5765 3036
|
| 657 |
+
655 5773 3027
|
| 658 |
+
656 5781 3018
|
| 659 |
+
657 5789 3009
|
| 660 |
+
658 5798 3000
|
| 661 |
+
659 5806 2991
|
| 662 |
+
660 5814 2982
|
| 663 |
+
661 5822 2972
|
| 664 |
+
662 5831 2963
|
| 665 |
+
663 5839 2954
|
| 666 |
+
664 5849 2946
|
| 667 |
+
665 5857 2937
|
| 668 |
+
666 5865 2928
|
| 669 |
+
667 5873 2919
|
| 670 |
+
668 5881 2909
|
| 671 |
+
669 5889 2900
|
| 672 |
+
670 5898 2892
|
| 673 |
+
671 5906 2883
|
| 674 |
+
672 5914 2873
|
| 675 |
+
673 5922 2864
|
| 676 |
+
674 5930 2855
|
| 677 |
+
675 5939 2846
|
| 678 |
+
676 5947 2837
|
| 679 |
+
677 5956 2828
|
| 680 |
+
678 5963 2819
|
| 681 |
+
679 5970 2809
|
| 682 |
+
680 5979 2801
|
| 683 |
+
681 5988 2792
|
| 684 |
+
682 5996 2783
|
| 685 |
+
683 6005 2774
|
| 686 |
+
684 6013 2765
|
| 687 |
+
685 6021 2756
|
| 688 |
+
686 6029 2747
|
| 689 |
+
687 6038 2738
|
| 690 |
+
688 6046 2729
|
| 691 |
+
689 6054 2720
|
| 692 |
+
690 6063 2711
|
| 693 |
+
691 6071 2702
|
| 694 |
+
692 6078 2692
|
| 695 |
+
693 6087 2683
|
| 696 |
+
694 6095 2674
|
| 697 |
+
695 6103 2665
|
| 698 |
+
696 6111 2656
|
| 699 |
+
697 6119 2647
|
| 700 |
+
698 6128 2639
|
| 701 |
+
699 6136 2629
|
| 702 |
+
700 6144 2620
|
| 703 |
+
701 6153 2612
|
| 704 |
+
702 6161 2603
|
| 705 |
+
703 6170 2594
|
| 706 |
+
704 6178 2585
|
| 707 |
+
705 6186 2576
|
| 708 |
+
706 6195 2567
|
| 709 |
+
707 6203 2558
|
| 710 |
+
708 6212 2549
|
| 711 |
+
709 6222 2541
|
| 712 |
+
710 6231 2532
|
| 713 |
+
711 6239 2523
|
| 714 |
+
712 6247 2514
|
| 715 |
+
713 6256 2505
|
| 716 |
+
714 6264 2496
|
| 717 |
+
715 6272 2487
|
| 718 |
+
716 6280 2478
|
| 719 |
+
717 6288 2469
|
| 720 |
+
718 6296 2460
|
| 721 |
+
719 6304 2451
|
| 722 |
+
720 6313 2442
|
| 723 |
+
721 6322 2434
|
| 724 |
+
722 6330 2425
|
| 725 |
+
723 6339 2416
|
| 726 |
+
724 6348 2407
|
| 727 |
+
725 6356 2398
|
| 728 |
+
726 6364 2390
|
| 729 |
+
727 6373 2381
|
| 730 |
+
728 6382 2372
|
| 731 |
+
729 6389 2363
|
| 732 |
+
730 6397 2354
|
| 733 |
+
731 6406 2345
|
| 734 |
+
732 6414 2336
|
| 735 |
+
733 6423 2327
|
| 736 |
+
734 6430 2318
|
| 737 |
+
735 6438 2309
|
| 738 |
+
736 6446 2300
|
| 739 |
+
737 6455 2291
|
| 740 |
+
738 6462 2282
|
| 741 |
+
739 6469 2273
|
| 742 |
+
740 6478 2264
|
| 743 |
+
741 6485 2255
|
| 744 |
+
742 6493 2246
|
| 745 |
+
743 6500 2236
|
| 746 |
+
744 6508 2227
|
| 747 |
+
745 6515 2218
|
| 748 |
+
746 6523 2209
|
| 749 |
+
747 6531 2200
|
| 750 |
+
748 6539 2191
|
| 751 |
+
749 6546 2182
|
| 752 |
+
750 6555 2173
|
| 753 |
+
751 6563 2164
|
| 754 |
+
752 6570 2155
|
| 755 |
+
753 6577 2146
|
| 756 |
+
754 6585 2137
|
| 757 |
+
755 6594 2128
|
| 758 |
+
756 6602 2119
|
| 759 |
+
757 6610 2110
|
| 760 |
+
758 6619 2101
|
| 761 |
+
759 6626 2092
|
| 762 |
+
760 6634 2083
|
| 763 |
+
761 6641 2074
|
| 764 |
+
762 6650 2065
|
| 765 |
+
763 6657 2056
|
| 766 |
+
764 6664 2047
|
| 767 |
+
765 6672 2038
|
| 768 |
+
766 6680 2029
|
| 769 |
+
767 6687 2020
|
| 770 |
+
768 6694 2011
|
| 771 |
+
769 6702 2002
|
| 772 |
+
770 6709 1992
|
| 773 |
+
771 6716 1983
|
| 774 |
+
772 6723 1974
|
| 775 |
+
773 6731 1965
|
| 776 |
+
774 6738 1956
|
| 777 |
+
775 6745 1947
|
| 778 |
+
776 6752 1938
|
| 779 |
+
777 6760 1929
|
| 780 |
+
778 6769 1920
|
| 781 |
+
779 6777 1911
|
| 782 |
+
780 6785 1902
|
| 783 |
+
781 6793 1893
|
| 784 |
+
782 6800 1884
|
| 785 |
+
783 6808 1875
|
| 786 |
+
784 6816 1866
|
| 787 |
+
785 6823 1857
|
| 788 |
+
786 6831 1848
|
| 789 |
+
787 6838 1839
|
| 790 |
+
788 6846 1830
|
| 791 |
+
789 6853 1821
|
| 792 |
+
790 6864 1813
|
| 793 |
+
791 6871 1804
|
| 794 |
+
792 6879 1795
|
| 795 |
+
793 6886 1786
|
| 796 |
+
794 6894 1777
|
| 797 |
+
795 6901 1768
|
| 798 |
+
796 6910 1760
|
| 799 |
+
797 6917 1751
|
| 800 |
+
798 6924 1741
|
| 801 |
+
799 6931 1732
|
| 802 |
+
800 6939 1723
|
| 803 |
+
801 6947 1715
|
| 804 |
+
802 6954 1706
|
| 805 |
+
803 6961 1697
|
| 806 |
+
804 6969 1688
|
| 807 |
+
805 6977 1679
|
| 808 |
+
806 6985 1670
|
| 809 |
+
807 6992 1661
|
| 810 |
+
808 6999 1652
|
| 811 |
+
809 7008 1644
|
| 812 |
+
810 7016 1635
|
| 813 |
+
811 7023 1626
|
| 814 |
+
812 7031 1617
|
| 815 |
+
813 7038 1608
|
| 816 |
+
814 7045 1599
|
| 817 |
+
815 7053 1590
|
| 818 |
+
816 7060 1581
|
| 819 |
+
817 7068 1572
|
| 820 |
+
818 7076 1563
|
| 821 |
+
819 7083 1554
|
| 822 |
+
820 7090 1545
|
| 823 |
+
821 7098 1537
|
| 824 |
+
822 7105 1528
|
| 825 |
+
823 7112 1519
|
| 826 |
+
824 7120 1510
|
| 827 |
+
825 7128 1501
|
| 828 |
+
826 7135 1492
|
| 829 |
+
827 7142 1483
|
| 830 |
+
828 7150 1475
|
| 831 |
+
829 7158 1466
|
| 832 |
+
830 7165 1457
|
| 833 |
+
831 7173 1448
|
| 834 |
+
832 7180 1439
|
| 835 |
+
833 7188 1430
|
| 836 |
+
834 7195 1421
|
| 837 |
+
835 7202 1412
|
| 838 |
+
836 7210 1404
|
| 839 |
+
837 7217 1395
|
| 840 |
+
838 7224 1386
|
| 841 |
+
839 7232 1377
|
| 842 |
+
840 7240 1368
|
| 843 |
+
841 7247 1360
|
| 844 |
+
842 7255 1351
|
| 845 |
+
843 7262 1342
|
| 846 |
+
844 7270 1333
|
| 847 |
+
845 7278 1324
|
| 848 |
+
846 7285 1316
|
| 849 |
+
847 7292 1307
|
| 850 |
+
848 7300 1298
|
| 851 |
+
849 7307 1289
|
| 852 |
+
850 7315 1280
|
| 853 |
+
851 7323 1272
|
| 854 |
+
852 7330 1263
|
| 855 |
+
853 7338 1254
|
| 856 |
+
854 7345 1245
|
| 857 |
+
855 7353 1236
|
| 858 |
+
856 7362 1228
|
| 859 |
+
857 7370 1219
|
| 860 |
+
858 7377 1211
|
| 861 |
+
859 7386 1202
|
| 862 |
+
860 7393 1193
|
| 863 |
+
861 7401 1184
|
| 864 |
+
862 7410 1176
|
| 865 |
+
863 7417 1167
|
| 866 |
+
864 7425 1158
|
| 867 |
+
865 7432 1150
|
| 868 |
+
866 7439 1141
|
| 869 |
+
867 7447 1132
|
| 870 |
+
868 7454 1123
|
| 871 |
+
869 7461 1114
|
| 872 |
+
870 7469 1106
|
| 873 |
+
871 7476 1097
|
| 874 |
+
872 7484 1088
|
| 875 |
+
873 7491 1079
|
| 876 |
+
874 7498 1071
|
| 877 |
+
875 7505 1062
|
| 878 |
+
876 7511 1053
|
| 879 |
+
877 7519 1044
|
| 880 |
+
878 7527 1036
|
| 881 |
+
879 7535 1027
|
| 882 |
+
880 7542 1018
|
| 883 |
+
881 7550 1010
|
| 884 |
+
882 7558 1001
|
| 885 |
+
883 7567 992
|
| 886 |
+
884 7574 984
|
| 887 |
+
885 7582 975
|
| 888 |
+
886 7590 966
|
| 889 |
+
887 7597 958
|
| 890 |
+
888 7605 949
|
| 891 |
+
889 7612 940
|
| 892 |
+
890 7620 932
|
| 893 |
+
891 7627 923
|
| 894 |
+
892 7635 914
|
| 895 |
+
893 7642 906
|
| 896 |
+
894 7650 897
|
| 897 |
+
895 7658 888
|
| 898 |
+
896 7665 880
|
| 899 |
+
897 7673 871
|
| 900 |
+
898 7680 862
|
| 901 |
+
899 7688 854
|
| 902 |
+
900 7695 845
|
| 903 |
+
901 7703 836
|
| 904 |
+
902 7710 828
|
| 905 |
+
903 7717 819
|
| 906 |
+
904 7724 810
|
| 907 |
+
905 7732 802
|
| 908 |
+
906 7739 793
|
| 909 |
+
907 7746 784
|
| 910 |
+
908 7754 776
|
| 911 |
+
909 7761 767
|
| 912 |
+
910 7769 758
|
| 913 |
+
911 7778 750
|
| 914 |
+
912 7785 741
|
| 915 |
+
913 7793 733
|
| 916 |
+
914 7800 724
|
| 917 |
+
915 7807 715
|
| 918 |
+
916 7814 707
|
| 919 |
+
917 7822 698
|
| 920 |
+
918 7831 690
|
| 921 |
+
919 7839 681
|
| 922 |
+
920 7846 673
|
| 923 |
+
921 7854 664
|
| 924 |
+
922 7861 655
|
| 925 |
+
923 7868 647
|
| 926 |
+
924 7876 638
|
| 927 |
+
925 7883 630
|
| 928 |
+
926 7891 621
|
| 929 |
+
927 7898 612
|
| 930 |
+
928 7906 604
|
| 931 |
+
929 7913 595
|
| 932 |
+
930 7921 587
|
| 933 |
+
931 7928 578
|
| 934 |
+
932 7936 569
|
| 935 |
+
933 7944 561
|
| 936 |
+
934 7951 552
|
| 937 |
+
935 7959 544
|
| 938 |
+
936 7966 535
|
| 939 |
+
937 7973 527
|
| 940 |
+
938 7980 518
|
| 941 |
+
939 7988 509
|
| 942 |
+
940 7995 501
|
| 943 |
+
941 8003 492
|
| 944 |
+
942 8011 484
|
| 945 |
+
943 8018 475
|
| 946 |
+
944 8026 467
|
| 947 |
+
945 8033 458
|
| 948 |
+
946 8041 450
|
| 949 |
+
947 8048 441
|
| 950 |
+
948 8056 432
|
| 951 |
+
949 8063 424
|
| 952 |
+
950 8070 415
|
| 953 |
+
951 8078 407
|
| 954 |
+
952 8086 398
|
| 955 |
+
953 8093 390
|
| 956 |
+
954 8100 381
|
| 957 |
+
955 8107 373
|
| 958 |
+
956 8114 364
|
| 959 |
+
957 8122 356
|
| 960 |
+
958 8130 347
|
| 961 |
+
959 8138 339
|
| 962 |
+
960 8145 330
|
| 963 |
+
961 8152 322
|
| 964 |
+
962 8160 313
|
| 965 |
+
963 8168 305
|
| 966 |
+
964 8175 296
|
| 967 |
+
965 8183 288
|
| 968 |
+
966 8192 279
|
| 969 |
+
967 8199 271
|
| 970 |
+
968 8206 262
|
| 971 |
+
969 8213 254
|
| 972 |
+
970 8221 245
|
| 973 |
+
971 8228 237
|
| 974 |
+
972 8236 228
|
| 975 |
+
973 8243 220
|
| 976 |
+
974 8251 211
|
| 977 |
+
975 8258 203
|
| 978 |
+
976 8266 194
|
| 979 |
+
977 8273 186
|
| 980 |
+
978 8281 177
|
| 981 |
+
979 8288 169
|
| 982 |
+
980 8296 160
|
| 983 |
+
981 8304 152
|
| 984 |
+
982 8313 143
|
| 985 |
+
983 8320 135
|
| 986 |
+
984 8328 126
|
| 987 |
+
985 8336 118
|
| 988 |
+
986 8343 109
|
| 989 |
+
987 8350 101
|
| 990 |
+
988 8358 92
|
| 991 |
+
989 8365 84
|
| 992 |
+
990 8372 76
|
| 993 |
+
991 8379 67
|
| 994 |
+
992 8386 59
|
| 995 |
+
993 8394 50
|
| 996 |
+
994 8401 42
|
| 997 |
+
995 8408 33
|
| 998 |
+
996 8415 25
|
| 999 |
+
997 8422 16
|
| 1000 |
+
998 8429 8
|
| 1001 |
+
999 8437 0
|
config.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from dotenv import load_dotenv
|
| 3 |
+
|
| 4 |
+
load_dotenv()
|
| 5 |
+
|
| 6 |
+
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
|
frontend/BGround.png
ADDED
|
frontend/index.html
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8">
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 6 |
+
<title>AutoML Companion</title>
|
| 7 |
+
<link rel="stylesheet" href="/style.css">
|
| 8 |
+
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
| 9 |
+
</head>
|
| 10 |
+
<body>
|
| 11 |
+
<div class="app-layout">
|
| 12 |
+
<aside class="sidebar">
|
| 13 |
+
<div class="sidebar-header">
|
| 14 |
+
<h2>AutoML</h2>
|
| 15 |
+
<button id="sidebar-toggle"><i class="fas fa-angle-left"></i></button>
|
| 16 |
+
</div>
|
| 17 |
+
<nav class="sidebar-nav">
|
| 18 |
+
<a href="#" class="nav-link active" data-page="load-dataset"><i class="fas fa-upload"></i><span class="nav-text">Load Dataset</span></a>
|
| 19 |
+
<a href="#" class="nav-link" data-page="train"><i class="fas fa-cogs"></i><span class="nav-text">Model Trainer</span></a>
|
| 20 |
+
<a href="#" class="nav-link" data-page="visualize"><i class="fas fa-chart-bar"></i><span class="nav-text">Visualize</span></a>
|
| 21 |
+
<a href="#" class="nav-link" data-page="ask"><i class="fas fa-question-circle"></i><span class="nav-text">Ask AI</span></a>
|
| 22 |
+
</nav>
|
| 23 |
+
</aside>
|
| 24 |
+
<main class="main-content">
|
| 25 |
+
<div class="container">
|
| 26 |
+
<div id="load-dataset" class="page active">
|
| 27 |
+
<div class="card">
|
| 28 |
+
<h2><i class="fas fa-upload"></i> Load Dataset</h2>
|
| 29 |
+
<p>Upload a CSV file to begin your analysis.</p>
|
| 30 |
+
<div class="file-upload-wrapper">
|
| 31 |
+
<input type="file" id="csv-upload" accept=".csv">
|
| 32 |
+
<label for="csv-upload">Choose a file</label>
|
| 33 |
+
</div>
|
| 34 |
+
<div id="upload-status"></div>
|
| 35 |
+
<label for="column-list">Available Columns:</label>
|
| 36 |
+
<select id="column-list" multiple></select>
|
| 37 |
+
</div>
|
| 38 |
+
</div>
|
| 39 |
+
<div id="train" class="page">
|
| 40 |
+
<div class="card">
|
| 41 |
+
<h2><i class="fas fa-cogs"></i> Model Trainer</h2>
|
| 42 |
+
<p>Choose a learning type and model to train on your data.</p>
|
| 43 |
+
<select id="learning-type" disabled>
|
| 44 |
+
<option value="Supervised">Supervised</option>
|
| 45 |
+
<option value="Unsupervised">Unsupervised</option>
|
| 46 |
+
</select>
|
| 47 |
+
<select id="model-dropdown"></select>
|
| 48 |
+
<select id="target-column-dropdown"></select>
|
| 49 |
+
<button id="train-model">Train Model</button>
|
| 50 |
+
<div id="train-output"></div>
|
| 51 |
+
</div>
|
| 52 |
+
</div>
|
| 53 |
+
<div id="visualize" class="page">
|
| 54 |
+
<div class="card">
|
| 55 |
+
<h2><i class="fas fa-chart-bar"></i> Visualize</h2>
|
| 56 |
+
<p>Select a plot type and columns to generate a visualization.</p>
|
| 57 |
+
<select id="plot-type">
|
| 58 |
+
<option value="">Select Plot Type</option>
|
| 59 |
+
</select>
|
| 60 |
+
<label for="plot-col1">Column 1 (Numeric for Box Plot)</label>
|
| 61 |
+
<select id="plot-col1"></select>
|
| 62 |
+
<label for="plot-col2">Column 2 (Categorical for Box Plot)</label>
|
| 63 |
+
<select id="plot-col2"></select>
|
| 64 |
+
<div id="scatter-color-container" style="display: none;">
|
| 65 |
+
<label for="scatter-color">Color by (Optional)</label>
|
| 66 |
+
<select id="scatter-color"></select>
|
| 67 |
+
</div>
|
| 68 |
+
<button id="generate-plot">Generate Plot</button>
|
| 69 |
+
<div id="plot-output">
|
| 70 |
+
<div class="loader"></div>
|
| 71 |
+
<img id="plot-img" src="" alt="">
|
| 72 |
+
</div>
|
| 73 |
+
<div id="plot-error"></div>
|
| 74 |
+
<pre><code id="plot-code"></code></pre>
|
| 75 |
+
</div>
|
| 76 |
+
</div>
|
| 77 |
+
<div id="ask" class="page">
|
| 78 |
+
<div class="card">
|
| 79 |
+
<h2><i class="fas fa-question-circle"></i> Ask AI</h2>
|
| 80 |
+
<p>Ask a question about your dataset in plain English.</p>
|
| 81 |
+
<input type="text" id="ai-question" placeholder="e.g., 'What is the correlation between X and Y?'">
|
| 82 |
+
<button id="ask-ai">Ask</button>
|
| 83 |
+
<div id="ai-answer"></div>
|
| 84 |
+
</div>
|
| 85 |
+
</div>
|
| 86 |
+
</div>
|
| 87 |
+
</main>
|
| 88 |
+
</div>
|
| 89 |
+
<script src="/main.js"></script>
|
| 90 |
+
</body>
|
| 91 |
+
</html>
|
frontend/main.js
ADDED
|
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
document.addEventListener('DOMContentLoaded', () => {
|
| 2 |
+
const navLinks = document.querySelectorAll('.nav-link');
|
| 3 |
+
const pages = document.querySelectorAll('.page');
|
| 4 |
+
const sidebar = document.querySelector('.sidebar');
|
| 5 |
+
const mainContent = document.querySelector('.main-content');
|
| 6 |
+
const sidebarToggle = document.getElementById('sidebar-toggle');
|
| 7 |
+
const loader = document.querySelector('.loader');
|
| 8 |
+
|
| 9 |
+
function animateNavText() {
|
| 10 |
+
const navTexts = document.querySelectorAll('.nav-text');
|
| 11 |
+
navTexts.forEach(text => {
|
| 12 |
+
text.style.animation = 'none';
|
| 13 |
+
text.offsetHeight; // Trigger reflow
|
| 14 |
+
text.style.animation = '';
|
| 15 |
+
});
|
| 16 |
+
}
|
| 17 |
+
|
| 18 |
+
navLinks.forEach(link => {
|
| 19 |
+
link.addEventListener('click', (e) => {
|
| 20 |
+
e.preventDefault();
|
| 21 |
+
const pageId = link.dataset.page;
|
| 22 |
+
|
| 23 |
+
pages.forEach(page => {
|
| 24 |
+
page.classList.remove('active');
|
| 25 |
+
});
|
| 26 |
+
|
| 27 |
+
navLinks.forEach(navLink => {
|
| 28 |
+
navLink.classList.remove('active');
|
| 29 |
+
});
|
| 30 |
+
|
| 31 |
+
document.getElementById(pageId).classList.add('active');
|
| 32 |
+
link.classList.add('active');
|
| 33 |
+
});
|
| 34 |
+
});
|
| 35 |
+
|
| 36 |
+
sidebarToggle.addEventListener('click', () => {
|
| 37 |
+
sidebar.classList.toggle('collapsed');
|
| 38 |
+
mainContent.classList.toggle('collapsed');
|
| 39 |
+
if (!sidebar.classList.contains('collapsed')) {
|
| 40 |
+
animateNavText();
|
| 41 |
+
}
|
| 42 |
+
});
|
| 43 |
+
|
| 44 |
+
function formatAIResponse(text) {
|
| 45 |
+
text = text.replace(/\*\*(.*?)\*\*/g, '<strong>$1</strong>');
|
| 46 |
+
text = text.replace(/^\d+\.\s+(.*)/gm, '<li>$1</li>');
|
| 47 |
+
text = text.replace(/(<li>.*<\/li>)/s, '<ol>$1<\/ol>');
|
| 48 |
+
text = text.replace(/^\*\s+(.*)/gm, '<li>$1</li>');
|
| 49 |
+
text = text.replace(/(<li>.*<\/li>)/s, '<ul>$1<\/ul>');
|
| 50 |
+
return text;
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
const csvUpload = document.getElementById('csv-upload');
|
| 54 |
+
const uploadStatus = document.getElementById('upload-status');
|
| 55 |
+
const columnList = document.getElementById('column-list');
|
| 56 |
+
const plotType = document.getElementById('plot-type');
|
| 57 |
+
const plotCol1 = document.getElementById('plot-col1');
|
| 58 |
+
const plotCol2 = document.getElementById('plot-col2');
|
| 59 |
+
const scatterColorContainer = document.getElementById('scatter-color-container');
|
| 60 |
+
const scatterColor = document.getElementById('scatter-color');
|
| 61 |
+
const generatePlot = document.getElementById('generate-plot');
|
| 62 |
+
const plotImg = document.getElementById('plot-img');
|
| 63 |
+
const plotError = document.getElementById('plot-error');
|
| 64 |
+
const learningType = document.getElementById('learning-type');
|
| 65 |
+
const modelDropdown = document.getElementById('model-dropdown');
|
| 66 |
+
const targetColumnDropdown = document.getElementById('target-column-dropdown');
|
| 67 |
+
const trainModel = document.getElementById('train-model');
|
| 68 |
+
const trainOutput = document.getElementById('train-output');
|
| 69 |
+
const aiQuestion = document.getElementById('ai-question');
|
| 70 |
+
const askAi = document.getElementById('ask-ai');
|
| 71 |
+
const aiAnswer = document.getElementById('ai-answer');
|
| 72 |
+
|
| 73 |
+
csvUpload.addEventListener('change', async (event) => {
|
| 74 |
+
const file = event.target.files[0];
|
| 75 |
+
if (!file) return;
|
| 76 |
+
|
| 77 |
+
uploadStatus.textContent = 'Uploading...';
|
| 78 |
+
const formData = new FormData();
|
| 79 |
+
formData.append('file', file);
|
| 80 |
+
|
| 81 |
+
try {
|
| 82 |
+
const response = await fetch('/api/upload', {
|
| 83 |
+
method: 'POST',
|
| 84 |
+
body: formData
|
| 85 |
+
});
|
| 86 |
+
const result = await response.json();
|
| 87 |
+
uploadStatus.textContent = result.message || result.error;
|
| 88 |
+
if (response.ok) {
|
| 89 |
+
updateColumnSelectors();
|
| 90 |
+
setLearningType();
|
| 91 |
+
updatePlotOptions();
|
| 92 |
+
}
|
| 93 |
+
} catch (error) {
|
| 94 |
+
uploadStatus.textContent = `Error: ${error.message}`;
|
| 95 |
+
}
|
| 96 |
+
});
|
| 97 |
+
|
| 98 |
+
async function updateColumnSelectors() {
|
| 99 |
+
try {
|
| 100 |
+
const response = await fetch('/api/columns');
|
| 101 |
+
const result = await response.json();
|
| 102 |
+
const columns = result.columns || [];
|
| 103 |
+
|
| 104 |
+
[columnList, plotCol1, plotCol2, targetColumnDropdown, scatterColor].forEach(selector => {
|
| 105 |
+
selector.innerHTML = '';
|
| 106 |
+
const defaultOption = document.createElement('option');
|
| 107 |
+
defaultOption.value = 'None';
|
| 108 |
+
defaultOption.textContent = 'None';
|
| 109 |
+
selector.appendChild(defaultOption);
|
| 110 |
+
|
| 111 |
+
columns.forEach(col => {
|
| 112 |
+
const option = document.createElement('option');
|
| 113 |
+
option.value = col;
|
| 114 |
+
option.textContent = col;
|
| 115 |
+
selector.appendChild(option);
|
| 116 |
+
});
|
| 117 |
+
});
|
| 118 |
+
} catch (error) {
|
| 119 |
+
console.error('Error updating column selectors:', error);
|
| 120 |
+
}
|
| 121 |
+
}
|
| 122 |
+
|
| 123 |
+
plotType.addEventListener('change', () => {
|
| 124 |
+
if (plotType.value === 'Scatter') {
|
| 125 |
+
scatterColorContainer.style.display = 'block';
|
| 126 |
+
} else {
|
| 127 |
+
scatterColorContainer.style.display = 'none';
|
| 128 |
+
}
|
| 129 |
+
});
|
| 130 |
+
|
| 131 |
+
generatePlot.addEventListener('click', async () => {
|
| 132 |
+
if (!plotType.value) {
|
| 133 |
+
plotError.textContent = 'Please select a plot type.';
|
| 134 |
+
return;
|
| 135 |
+
}
|
| 136 |
+
|
| 137 |
+
loader.style.display = 'block';
|
| 138 |
+
plotImg.src = '';
|
| 139 |
+
plotError.textContent = '';
|
| 140 |
+
|
| 141 |
+
const body = {
|
| 142 |
+
plot_type: plotType.value,
|
| 143 |
+
col1: plotCol1.value,
|
| 144 |
+
col2: plotCol2.value,
|
| 145 |
+
color_col: scatterColor.value
|
| 146 |
+
};
|
| 147 |
+
|
| 148 |
+
try {
|
| 149 |
+
const response = await fetch('/api/plot', {
|
| 150 |
+
method: 'POST',
|
| 151 |
+
headers: { 'Content-Type': 'application/json' },
|
| 152 |
+
body: JSON.stringify(body)
|
| 153 |
+
});
|
| 154 |
+
const result = await response.json();
|
| 155 |
+
if (result.image) {
|
| 156 |
+
plotImg.src = `data:image/png;base64,${result.image}`;
|
| 157 |
+
} else {
|
| 158 |
+
plotError.textContent = result.error;
|
| 159 |
+
}
|
| 160 |
+
} catch (error) {
|
| 161 |
+
plotError.textContent = `Error: ${error.message}`;
|
| 162 |
+
} finally {
|
| 163 |
+
loader.style.display = 'none';
|
| 164 |
+
}
|
| 165 |
+
});
|
| 166 |
+
|
| 167 |
+
function formatMetrics(metrics) {
|
| 168 |
+
let formatted = '\n';
|
| 169 |
+
for (const [key, value] of Object.entries(metrics)) {
|
| 170 |
+
formatted += `<strong>${key}:</strong> ${JSON.stringify(value, null, 2)}\n`;
|
| 171 |
+
}
|
| 172 |
+
return formatted;
|
| 173 |
+
}
|
| 174 |
+
|
| 175 |
+
async function setLearningType() {
|
| 176 |
+
try {
|
| 177 |
+
const response = await fetch('/api/learning_type');
|
| 178 |
+
const result = await response.json();
|
| 179 |
+
if (result.learning_type) {
|
| 180 |
+
learningType.disabled = false;
|
| 181 |
+
learningType.value = result.learning_type;
|
| 182 |
+
learningType.dispatchEvent(new Event('change'));
|
| 183 |
+
learningType.disabled = true;
|
| 184 |
+
if (result.learning_type === 'Supervised' && result.target_column) {
|
| 185 |
+
targetColumnDropdown.value = result.target_column;
|
| 186 |
+
}
|
| 187 |
+
}
|
| 188 |
+
} catch (error) {
|
| 189 |
+
console.error('Error setting learning type:', error);
|
| 190 |
+
}
|
| 191 |
+
}
|
| 192 |
+
|
| 193 |
+
async function updatePlotOptions() {
|
| 194 |
+
try {
|
| 195 |
+
const response = await fetch('/api/plot_options');
|
| 196 |
+
const result = await response.json();
|
| 197 |
+
const plots = result.plots || [];
|
| 198 |
+
|
| 199 |
+
plotType.innerHTML = '';
|
| 200 |
+
const defaultOption = document.createElement('option');
|
| 201 |
+
defaultOption.value = '';
|
| 202 |
+
defaultOption.textContent = 'Select Plot Type';
|
| 203 |
+
plotType.appendChild(defaultOption);
|
| 204 |
+
|
| 205 |
+
plots.forEach(plotName => {
|
| 206 |
+
const option = document.createElement('option');
|
| 207 |
+
option.value = plotName;
|
| 208 |
+
option.textContent = plotName;
|
| 209 |
+
plotType.appendChild(option);
|
| 210 |
+
});
|
| 211 |
+
} catch (error) {
|
| 212 |
+
console.error('Error updating plot options:', error);
|
| 213 |
+
}
|
| 214 |
+
}
|
| 215 |
+
|
| 216 |
+
trainModel.addEventListener('click', async () => {
|
| 217 |
+
trainOutput.textContent = 'Training in progress...';
|
| 218 |
+
loader.style.display = 'block';
|
| 219 |
+
const body = {
|
| 220 |
+
learning_type: learningType.value,
|
| 221 |
+
model_name: modelDropdown.value,
|
| 222 |
+
target_col: targetColumnDropdown.value
|
| 223 |
+
};
|
| 224 |
+
|
| 225 |
+
try {
|
| 226 |
+
const response = await fetch('/api/train', {
|
| 227 |
+
method: 'POST',
|
| 228 |
+
headers: { 'Content-Type': 'application/json' },
|
| 229 |
+
body: JSON.stringify(body)
|
| 230 |
+
});
|
| 231 |
+
const result = await response.json();
|
| 232 |
+
let output = result.message || result.error;
|
| 233 |
+
if (result.metrics) {
|
| 234 |
+
output += formatMetrics(result.metrics);
|
| 235 |
+
}
|
| 236 |
+
if (result.result) {
|
| 237 |
+
output += `\n<strong>Result:</strong> ${JSON.stringify(result.result, null, 2)}`;
|
| 238 |
+
}
|
| 239 |
+
trainOutput.innerHTML = output;
|
| 240 |
+
} catch (error) {
|
| 241 |
+
trainOutput.textContent = `Error: ${error.message}`;
|
| 242 |
+
} finally {
|
| 243 |
+
loader.style.display = 'none';
|
| 244 |
+
}
|
| 245 |
+
});
|
| 246 |
+
|
| 247 |
+
askAi.addEventListener('click', async () => {
|
| 248 |
+
aiAnswer.textContent = 'Thinking...';
|
| 249 |
+
loader.style.display = 'block';
|
| 250 |
+
const body = {
|
| 251 |
+
user_query: aiQuestion.value
|
| 252 |
+
};
|
| 253 |
+
|
| 254 |
+
try {
|
| 255 |
+
const response = await fetch('/api/ask', {
|
| 256 |
+
method: 'POST',
|
| 257 |
+
headers: { 'Content-Type': 'application/json' },
|
| 258 |
+
body: JSON.stringify(body)
|
| 259 |
+
});
|
| 260 |
+
const result = await response.json();
|
| 261 |
+
aiAnswer.innerHTML = formatAIResponse(result.answer || result.error);
|
| 262 |
+
} catch (error) {
|
| 263 |
+
aiAnswer.textContent = `Error: ${error.message}`;
|
| 264 |
+
} finally {
|
| 265 |
+
loader.style.display = 'none';
|
| 266 |
+
}
|
| 267 |
+
});
|
| 268 |
+
|
| 269 |
+
learningType.addEventListener('change', () => {
|
| 270 |
+
const supervisedModels = ["Logistic Regression", "Naive Bayes", "Decision Tree", "Random Forest", "SVM", "KNN", "XGBoost", "CatBoost", "Linear Regression"];
|
| 271 |
+
const unsupervisedModels = ["KMeans", "DBSCAN", "PCA"];
|
| 272 |
+
const models = learningType.value === 'Supervised' ? supervisedModels : unsupervisedModels;
|
| 273 |
+
|
| 274 |
+
modelDropdown.innerHTML = '';
|
| 275 |
+
models.forEach(model => {
|
| 276 |
+
const option = document.createElement('option');
|
| 277 |
+
option.value = model;
|
| 278 |
+
option.textContent = model;
|
| 279 |
+
modelDropdown.appendChild(option);
|
| 280 |
+
});
|
| 281 |
+
});
|
| 282 |
+
|
| 283 |
+
learningType.dispatchEvent(new Event('change'));
|
| 284 |
+
|
| 285 |
+
// Add click-to-copy functionality to output boxes
|
| 286 |
+
[trainOutput, aiAnswer, uploadStatus, plotError].forEach(el => {
|
| 287 |
+
el.addEventListener('click', () => {
|
| 288 |
+
const textToCopy = el.textContent;
|
| 289 |
+
navigator.clipboard.writeText(textToCopy).then(() => {
|
| 290 |
+
const originalText = el.textContent;
|
| 291 |
+
el.textContent = 'Copied!';
|
| 292 |
+
setTimeout(() => {
|
| 293 |
+
el.textContent = originalText;
|
| 294 |
+
}, 1000);
|
| 295 |
+
});
|
| 296 |
+
});
|
| 297 |
+
});
|
| 298 |
+
});
|
frontend/style.css
ADDED
|
@@ -0,0 +1,313 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
:root {
|
| 2 |
+
--primary-color: #6A5ACD;
|
| 3 |
+
--secondary-color: #20B2AA;
|
| 4 |
+
--sidebar-bg: #2c3e50;
|
| 5 |
+
--text-color-dark: #34495e;
|
| 6 |
+
--text-color-light: #ecf0f1;
|
| 7 |
+
--hover-color: #7B68EE;
|
| 8 |
+
}
|
| 9 |
+
|
| 10 |
+
body {
|
| 11 |
+
font-family: 'Poppins', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
|
| 12 |
+
margin: 0;
|
| 13 |
+
padding: 0;
|
| 14 |
+
-webkit-font-smoothing: antialiased;
|
| 15 |
+
-moz-osx-font-smoothing: grayscale;
|
| 16 |
+
}
|
| 17 |
+
|
| 18 |
+
.app-layout {
|
| 19 |
+
display: flex;
|
| 20 |
+
height: 100vh;
|
| 21 |
+
overflow: hidden;
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
.sidebar {
|
| 25 |
+
width: 260px;
|
| 26 |
+
background-color: var(--sidebar-bg);
|
| 27 |
+
height: 100%;
|
| 28 |
+
position: fixed;
|
| 29 |
+
top: 0;
|
| 30 |
+
left: 0;
|
| 31 |
+
padding: 1.5rem;
|
| 32 |
+
transition: width 0.3s ease;
|
| 33 |
+
z-index: 10;
|
| 34 |
+
box-shadow: 2px 0 10px rgba(0,0,0,0.1);
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
.sidebar.collapsed {
|
| 38 |
+
width: 80px;
|
| 39 |
+
}
|
| 40 |
+
|
| 41 |
+
.sidebar-header {
|
| 42 |
+
display: flex;
|
| 43 |
+
justify-content: space-between;
|
| 44 |
+
align-items: center;
|
| 45 |
+
margin-bottom: 2rem;
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
.sidebar-header h2 {
|
| 49 |
+
margin: 0;
|
| 50 |
+
font-weight: 700;
|
| 51 |
+
font-size: 1.8rem;
|
| 52 |
+
color: var(--text-color-light);
|
| 53 |
+
transition: opacity 0.3s ease;
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
.sidebar.collapsed .sidebar-header h2 {
|
| 57 |
+
opacity: 0;
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
#sidebar-toggle {
|
| 61 |
+
background: transparent;
|
| 62 |
+
border: none;
|
| 63 |
+
color: var(--text-color-light);
|
| 64 |
+
font-size: 1.2rem;
|
| 65 |
+
cursor: pointer;
|
| 66 |
+
transition: transform 0.3s ease;
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
#sidebar-toggle:hover {
|
| 70 |
+
color: var(--hover-color);
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
.sidebar.collapsed #sidebar-toggle {
|
| 74 |
+
transform: rotate(180deg);
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
.sidebar-nav {
|
| 78 |
+
display: flex;
|
| 79 |
+
flex-direction: column;
|
| 80 |
+
gap: 1rem;
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
.sidebar-nav .nav-link {
|
| 84 |
+
display: flex;
|
| 85 |
+
align-items: center;
|
| 86 |
+
gap: 1rem;
|
| 87 |
+
padding: 0.8rem 1rem;
|
| 88 |
+
border-radius: 8px;
|
| 89 |
+
color: var(--text-color-light);
|
| 90 |
+
font-weight: 500;
|
| 91 |
+
text-decoration: none;
|
| 92 |
+
transition: background-color 0.2s, transform 0.2s;
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
.sidebar-nav .nav-link:hover {
|
| 96 |
+
background-color: rgba(255, 255, 255, 0.1);
|
| 97 |
+
transform: translateX(5px);
|
| 98 |
+
}
|
| 99 |
+
|
| 100 |
+
.sidebar-nav .nav-link.active {
|
| 101 |
+
background-color: var(--primary-color);
|
| 102 |
+
font-weight: 700;
|
| 103 |
+
box-shadow: 0 4px 15px rgba(106, 90, 205, 0.3);
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
.sidebar-nav .nav-link .nav-text {
|
| 107 |
+
opacity: 1;
|
| 108 |
+
transition: opacity 0.3s ease;
|
| 109 |
+
}
|
| 110 |
+
|
| 111 |
+
.sidebar.collapsed .nav-link .nav-text {
|
| 112 |
+
opacity: 0;
|
| 113 |
+
width: 0;
|
| 114 |
+
overflow: hidden;
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
.sidebar.collapsed .nav-link {
|
| 118 |
+
justify-content: center;
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
.main-content {
|
| 122 |
+
margin-left: 260px;
|
| 123 |
+
width: calc(100% - 260px);
|
| 124 |
+
height: 100%;
|
| 125 |
+
transition: margin-left 0.3s ease, width 0.3s ease;
|
| 126 |
+
background-image: url('BGround.png');
|
| 127 |
+
background-size: cover;
|
| 128 |
+
background-position: center;
|
| 129 |
+
position: relative;
|
| 130 |
+
overflow-y: auto;
|
| 131 |
+
}
|
| 132 |
+
|
| 133 |
+
.main-content.collapsed {
|
| 134 |
+
margin-left: 80px;
|
| 135 |
+
width: calc(100% - 80px);
|
| 136 |
+
}
|
| 137 |
+
|
| 138 |
+
.container {
|
| 139 |
+
width: 100%;
|
| 140 |
+
min-height: 100%;
|
| 141 |
+
display: flex;
|
| 142 |
+
align-items: center;
|
| 143 |
+
justify-content: center;
|
| 144 |
+
padding: 2.5rem;
|
| 145 |
+
box-sizing: border-box;
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
.page {
|
| 149 |
+
display: none;
|
| 150 |
+
width: 100%;
|
| 151 |
+
max-width: 900px;
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
.page.active {
|
| 155 |
+
display: block;
|
| 156 |
+
animation: fadeIn 0.5s;
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
@keyframes fadeIn {
|
| 160 |
+
from { opacity: 0; transform: translateY(10px); }
|
| 161 |
+
to { opacity: 1; transform: translateY(0); }
|
| 162 |
+
}
|
| 163 |
+
|
| 164 |
+
.card {
|
| 165 |
+
background: rgba(0, 0, 0, 0.3);
|
| 166 |
+
border-radius: 16px;
|
| 167 |
+
box-shadow: 0 4px 30px rgba(0, 0, 0, 0.2);
|
| 168 |
+
backdrop-filter: blur(10px);
|
| 169 |
+
-webkit-backdrop-filter: blur(10px);
|
| 170 |
+
border: 1px solid rgba(255, 255, 255, 0.2);
|
| 171 |
+
padding: 2.5rem;
|
| 172 |
+
color: var(--text-color-light);
|
| 173 |
+
margin-bottom: 2rem;
|
| 174 |
+
}
|
| 175 |
+
|
| 176 |
+
.card h2 {
|
| 177 |
+
color: var(--text-color-light);
|
| 178 |
+
font-weight: 700;
|
| 179 |
+
margin-bottom: 1.5rem;
|
| 180 |
+
text-align: center;
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
.card p, .card label {
|
| 184 |
+
color: var(--text-color-light);
|
| 185 |
+
font-weight: 500;
|
| 186 |
+
margin-bottom: 0.5rem;
|
| 187 |
+
display: block;
|
| 188 |
+
}
|
| 189 |
+
|
| 190 |
+
.file-upload-wrapper {
|
| 191 |
+
position: relative;
|
| 192 |
+
display: inline-block;
|
| 193 |
+
width: 100%;
|
| 194 |
+
margin-bottom: 1rem;
|
| 195 |
+
}
|
| 196 |
+
|
| 197 |
+
#csv-upload {
|
| 198 |
+
display: none;
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
.file-upload-wrapper label {
|
| 202 |
+
display: block;
|
| 203 |
+
padding: 1rem;
|
| 204 |
+
background: rgba(255, 255, 255, 0.2);
|
| 205 |
+
border: 2px dashed rgba(255, 255, 255, 0.5);
|
| 206 |
+
border-radius: 8px;
|
| 207 |
+
text-align: center;
|
| 208 |
+
cursor: pointer;
|
| 209 |
+
transition: background-color 0.2s, border-color 0.2s;
|
| 210 |
+
}
|
| 211 |
+
|
| 212 |
+
.file-upload-wrapper label:hover {
|
| 213 |
+
background: rgba(255, 255, 255, 0.3);
|
| 214 |
+
border-color: var(--primary-color);
|
| 215 |
+
}
|
| 216 |
+
|
| 217 |
+
select, input[type="text"], button {
|
| 218 |
+
width: 100%;
|
| 219 |
+
padding: 1rem;
|
| 220 |
+
margin-bottom: 1rem;
|
| 221 |
+
border-radius: 8px;
|
| 222 |
+
background: rgba(255, 255, 255, 0.1);
|
| 223 |
+
border: 1px solid rgba(255, 255, 255, 0.2);
|
| 224 |
+
color: var(--text-color-light);
|
| 225 |
+
font-size: 1rem;
|
| 226 |
+
box-sizing: border-box;
|
| 227 |
+
}
|
| 228 |
+
|
| 229 |
+
select option {
|
| 230 |
+
background: #fff;
|
| 231 |
+
color: var(--text-color-dark);
|
| 232 |
+
}
|
| 233 |
+
|
| 234 |
+
select:focus, input:focus {
|
| 235 |
+
background: rgba(255, 255, 255, 0.2);
|
| 236 |
+
box-shadow: 0 0 0 3px rgba(106, 90, 205, 0.3);
|
| 237 |
+
outline: none;
|
| 238 |
+
}
|
| 239 |
+
|
| 240 |
+
button {
|
| 241 |
+
background-color: var(--primary-color);
|
| 242 |
+
cursor: pointer;
|
| 243 |
+
transition: background-color 0.2s, transform 0.2s;
|
| 244 |
+
}
|
| 245 |
+
|
| 246 |
+
button:hover {
|
| 247 |
+
background-color: var(--hover-color);
|
| 248 |
+
transform: translateY(-3px);
|
| 249 |
+
box-shadow: 0 6px 20px rgba(106, 90, 205, 0.4);
|
| 250 |
+
}
|
| 251 |
+
|
| 252 |
+
#plot-output {
|
| 253 |
+
background: rgba(0, 0, 0, 0.2);
|
| 254 |
+
border: 1px solid rgba(255, 255, 255, 0.2);
|
| 255 |
+
border-radius: 8px;
|
| 256 |
+
padding: 1rem;
|
| 257 |
+
margin-top: 1rem;
|
| 258 |
+
min-height: 200px;
|
| 259 |
+
display: flex;
|
| 260 |
+
align-items: center;
|
| 261 |
+
justify-content: center;
|
| 262 |
+
}
|
| 263 |
+
|
| 264 |
+
#plot-img {
|
| 265 |
+
max-width: 100%;
|
| 266 |
+
max-height: 100%;
|
| 267 |
+
border-radius: 8px;
|
| 268 |
+
}
|
| 269 |
+
|
| 270 |
+
#train-output, #ai-answer, #upload-status, #plot-error {
|
| 271 |
+
background-color: rgba(0, 0, 0, 0.2);
|
| 272 |
+
border: 1px solid rgba(255, 255, 255, 0.2);
|
| 273 |
+
color: var(--text-color-light);
|
| 274 |
+
padding: 1rem;
|
| 275 |
+
border-radius: 8px;
|
| 276 |
+
margin-top: 1rem;
|
| 277 |
+
min-height: 50px;
|
| 278 |
+
white-space: pre-wrap;
|
| 279 |
+
word-wrap: break-word;
|
| 280 |
+
cursor: pointer;
|
| 281 |
+
}
|
| 282 |
+
|
| 283 |
+
.loader {
|
| 284 |
+
border: 4px solid #f3f3f3;
|
| 285 |
+
border-top: 4px solid var(--primary-color);
|
| 286 |
+
border-radius: 50%;
|
| 287 |
+
width: 40px;
|
| 288 |
+
height: 40px;
|
| 289 |
+
animation: spin 1s linear infinite;
|
| 290 |
+
display: none;
|
| 291 |
+
}
|
| 292 |
+
|
| 293 |
+
@keyframes spin {
|
| 294 |
+
0% { transform: rotate(0deg); }
|
| 295 |
+
100% { transform: rotate(360deg); }
|
| 296 |
+
}
|
| 297 |
+
|
| 298 |
+
.nav-text {
|
| 299 |
+
animation: fadeInText 0.5s forwards;
|
| 300 |
+
}
|
| 301 |
+
|
| 302 |
+
@keyframes fadeInText {
|
| 303 |
+
from {
|
| 304 |
+
opacity: 0;
|
| 305 |
+
}
|
| 306 |
+
to {
|
| 307 |
+
opacity: 1;
|
| 308 |
+
}
|
| 309 |
+
}
|
| 310 |
+
|
| 311 |
+
.sidebar.collapsed .nav-text {
|
| 312 |
+
animation: none;
|
| 313 |
+
}
|
models/supervised.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sklearn.linear_model import LinearRegression, LogisticRegression
|
| 2 |
+
from sklearn.naive_bayes import GaussianNB
|
| 3 |
+
from sklearn.tree import DecisionTreeClassifier
|
| 4 |
+
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
|
| 5 |
+
from sklearn.svm import SVC, SVR
|
| 6 |
+
from sklearn.neighbors import KNeighborsClassifier
|
| 7 |
+
from sklearn.model_selection import train_test_split
|
| 8 |
+
from xgboost import XGBClassifier, XGBRegressor
|
| 9 |
+
from catboost import CatBoostClassifier
|
| 10 |
+
from utils.metrics import classification_metrics, regression_metrics
|
| 11 |
+
from utils.data_cleaner import prepare_data
|
| 12 |
+
import pandas as pd
|
| 13 |
+
import logging
|
| 14 |
+
|
| 15 |
+
# Configure logging for this module
|
| 16 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 17 |
+
|
| 18 |
+
def train_model(df, target_column, model_name):
|
| 19 |
+
"""Trains a supervised machine learning model based on the specified model name.
|
| 20 |
+
|
| 21 |
+
Args:
|
| 22 |
+
df (pd.DataFrame): The input DataFrame containing features and target.
|
| 23 |
+
target_column (str): The name of the target column.
|
| 24 |
+
model_name (str): The name of the model to train (e.g., "Logistic Regression", "Random Forest").
|
| 25 |
+
|
| 26 |
+
Returns:
|
| 27 |
+
tuple: A tuple containing:
|
| 28 |
+
- model: The trained model object.
|
| 29 |
+
- metrics (dict): A dictionary of evaluation metrics.
|
| 30 |
+
- y_test (pd.Series): Actual target values from the test set.
|
| 31 |
+
- y_pred (np.array): Predicted target values for the test set.
|
| 32 |
+
- y_pred_proba (np.array, optional): Predicted probabilities for classification tasks.
|
| 33 |
+
- X_test (pd.DataFrame): Feature values from the test set.
|
| 34 |
+
- error (str, optional): An error message if training fails.
|
| 35 |
+
"""
|
| 36 |
+
try:
|
| 37 |
+
# Prepare data: clean, encode, scale, and split into features (X) and target (y)
|
| 38 |
+
X, y, label_encoders, is_classification = prepare_data(df, target_column)
|
| 39 |
+
|
| 40 |
+
# Split data into training and testing sets
|
| 41 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
| 42 |
+
|
| 43 |
+
model = None
|
| 44 |
+
# Initialize the selected model
|
| 45 |
+
if model_name == "Logistic Regression":
|
| 46 |
+
if not is_classification:
|
| 47 |
+
return None, "Logistic Regression is for classification tasks.", None, None, None, None
|
| 48 |
+
model = LogisticRegression(max_iter=1000) # Increased max_iter for convergence
|
| 49 |
+
elif model_name == "Naive Bayes":
|
| 50 |
+
if not is_classification:
|
| 51 |
+
return None, "Naive Bayes is for classification tasks.", None, None, None, None
|
| 52 |
+
model = GaussianNB()
|
| 53 |
+
elif model_name == "Decision Tree":
|
| 54 |
+
model = DecisionTreeClassifier(random_state=42)
|
| 55 |
+
elif model_name == "Random Forest":
|
| 56 |
+
if is_classification:
|
| 57 |
+
model = RandomForestClassifier(random_state=42)
|
| 58 |
+
else:
|
| 59 |
+
model = RandomForestRegressor(random_state=42)
|
| 60 |
+
elif model_name == "SVM":
|
| 61 |
+
if is_classification:
|
| 62 |
+
model = SVC(probability=True, random_state=42) # probability=True for ROC curve
|
| 63 |
+
else:
|
| 64 |
+
model = SVR()
|
| 65 |
+
elif model_name == "KNN":
|
| 66 |
+
if not is_classification:
|
| 67 |
+
return None, "KNN is for classification tasks.", None, None, None, None
|
| 68 |
+
model = KNeighborsClassifier()
|
| 69 |
+
elif model_name == "XGBoost":
|
| 70 |
+
if is_classification:
|
| 71 |
+
model = XGBClassifier(eval_metric='mlogloss', use_label_encoder=False, random_state=42)
|
| 72 |
+
else:
|
| 73 |
+
model = XGBRegressor(random_state=42)
|
| 74 |
+
elif model_name == "CatBoost":
|
| 75 |
+
if not is_classification:
|
| 76 |
+
return None, "CatBoost is for classification tasks.", None, None, None, None
|
| 77 |
+
model = CatBoostClassifier(verbose=0, random_state=42) # verbose=0 to suppress output
|
| 78 |
+
elif model_name == "Linear Regression":
|
| 79 |
+
if is_classification:
|
| 80 |
+
return None, "Linear Regression is for regression tasks.", None, None, None, None
|
| 81 |
+
model = LinearRegression()
|
| 82 |
+
else:
|
| 83 |
+
return None, "Model not found.", None, None, None, None
|
| 84 |
+
|
| 85 |
+
# Train the model
|
| 86 |
+
model.fit(X_train, y_train)
|
| 87 |
+
y_pred = model.predict(X_test)
|
| 88 |
+
y_pred_proba = None
|
| 89 |
+
|
| 90 |
+
# Get prediction probabilities for classification models (needed for ROC curve)
|
| 91 |
+
if is_classification and hasattr(model, 'predict_proba'):
|
| 92 |
+
y_pred_proba = model.predict_proba(X_test)
|
| 93 |
+
|
| 94 |
+
# Calculate evaluation metrics
|
| 95 |
+
if is_classification:
|
| 96 |
+
metrics = classification_metrics(y_test, y_pred)
|
| 97 |
+
else:
|
| 98 |
+
metrics = regression_metrics(y_test, y_pred)
|
| 99 |
+
|
| 100 |
+
logging.info(f"Successfully trained {model_name} model.")
|
| 101 |
+
return model, metrics, y_test, y_pred, y_pred_proba, X_test
|
| 102 |
+
except Exception as e:
|
| 103 |
+
logging.error(f"An error occurred during model training for {model_name}: {e}", exc_info=True)
|
| 104 |
+
return None, f"An error occurred during model training: {e}", None, None, None, None
|
models/unsupervised.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sklearn.cluster import KMeans, DBSCAN
|
| 2 |
+
from sklearn.decomposition import PCA
|
| 3 |
+
from utils.data_cleaner import prepare_data
|
| 4 |
+
import pandas as pd
|
| 5 |
+
import logging
|
| 6 |
+
|
| 7 |
+
# Configure logging for this module
|
| 8 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 9 |
+
|
| 10 |
+
def train_unsupervised(df, model_name, n_clusters=3, eps=0.5, min_samples=5, n_components=2):
|
| 11 |
+
"""Trains an unsupervised machine learning model based on the specified model name.
|
| 12 |
+
|
| 13 |
+
Args:
|
| 14 |
+
df (pd.DataFrame): The input DataFrame for unsupervised learning.
|
| 15 |
+
model_name (str): The name of the unsupervised model to train (e.g., "KMeans", "DBSCAN", "PCA").
|
| 16 |
+
n_clusters (int, optional): Number of clusters for KMeans. Defaults to 3.
|
| 17 |
+
eps (float, optional): The maximum distance between two samples for one to be considered as in the neighborhood of the other for DBSCAN. Defaults to 0.5.
|
| 18 |
+
min_samples (int, optional): The number of samples (or total weight) in a neighborhood for a point to be considered as a core point for DBSCAN. Defaults to 5.
|
| 19 |
+
n_components (int, optional): Number of components to keep for PCA. Defaults to 2.
|
| 20 |
+
|
| 21 |
+
Returns:
|
| 22 |
+
tuple: A tuple containing:
|
| 23 |
+
- fitted_model: The trained unsupervised model object.
|
| 24 |
+
- result: The clustering labels or transformed data.
|
| 25 |
+
- error (str, optional): An error message if training fails.
|
| 26 |
+
"""
|
| 27 |
+
try:
|
| 28 |
+
# Prepare data for unsupervised learning (cleaning and scaling)
|
| 29 |
+
df_prepared, _ = prepare_data(df)
|
| 30 |
+
|
| 31 |
+
if df_prepared.empty:
|
| 32 |
+
logging.warning("Prepared DataFrame is empty for unsupervised training.")
|
| 33 |
+
return None, "Prepared data is empty."
|
| 34 |
+
|
| 35 |
+
model = None
|
| 36 |
+
# Initialize the selected unsupervised model
|
| 37 |
+
if model_name == "KMeans":
|
| 38 |
+
model = KMeans(n_clusters=n_clusters, random_state=42, n_init=10)
|
| 39 |
+
elif model_name == "DBSCAN":
|
| 40 |
+
model = DBSCAN(eps=eps, min_samples=min_samples)
|
| 41 |
+
elif model_name == "PCA":
|
| 42 |
+
model = PCA(n_components=n_components)
|
| 43 |
+
else:
|
| 44 |
+
logging.warning(f"Unsupervised model not supported: {model_name}")
|
| 45 |
+
return None, "Model not supported."
|
| 46 |
+
|
| 47 |
+
# Fit the model to the prepared data
|
| 48 |
+
fitted_model = model.fit(df_prepared)
|
| 49 |
+
|
| 50 |
+
result = None
|
| 51 |
+
# Extract results based on the model type
|
| 52 |
+
if hasattr(fitted_model, 'labels_'):
|
| 53 |
+
result = fitted_model.labels_
|
| 54 |
+
logging.info(f"KMeans/DBSCAN trained. Clusters/labels generated.")
|
| 55 |
+
elif hasattr(fitted_model, 'components_'):
|
| 56 |
+
result = fitted_model.transform(df_prepared)
|
| 57 |
+
logging.info(f"PCA trained. Data transformed to {n_components} components.")
|
| 58 |
+
else:
|
| 59 |
+
logging.info(f"Unsupervised model {model_name} trained, but no specific labels or components found.")
|
| 60 |
+
|
| 61 |
+
return fitted_model, result
|
| 62 |
+
except Exception as e:
|
| 63 |
+
logging.error(f"An error occurred during unsupervised model training for {model_name}: {e}", exc_info=True)
|
| 64 |
+
return None, f"An error occurred during model training: {e}"
|
models/xgboost_model.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
from sklearn.model_selection import train_test_split
|
| 3 |
+
from sklearn.preprocessing import LabelEncoder
|
| 4 |
+
from xgboost import XGBClassifier, XGBRegressor
|
| 5 |
+
from utils.metrics import classification_metrics, regression_metrics
|
| 6 |
+
|
| 7 |
+
def encode_dataframe(df):
|
| 8 |
+
label_encoders = {}
|
| 9 |
+
for col in df.select_dtypes(include='object'):
|
| 10 |
+
le = LabelEncoder()
|
| 11 |
+
df[col] = le.fit_transform(df[col])
|
| 12 |
+
label_encoders[col] = le
|
| 13 |
+
return df, label_encoders
|
| 14 |
+
|
| 15 |
+
def train_xgboost(df, target_column, task='classification'):
|
| 16 |
+
df = df.dropna()
|
| 17 |
+
df, encoders = encode_dataframe(df)
|
| 18 |
+
|
| 19 |
+
X = df.drop(columns=[target_column])
|
| 20 |
+
y = df[target_column]
|
| 21 |
+
|
| 22 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
|
| 23 |
+
|
| 24 |
+
if task == 'classification':
|
| 25 |
+
model = XGBClassifier()
|
| 26 |
+
else:
|
| 27 |
+
model = XGBRegressor()
|
| 28 |
+
|
| 29 |
+
model.fit(X_train, y_train)
|
| 30 |
+
y_pred = model.predict(X_test)
|
| 31 |
+
|
| 32 |
+
if task == 'classification':
|
| 33 |
+
metrics = classification_metrics(y_test, y_pred)
|
| 34 |
+
else:
|
| 35 |
+
metrics = regression_metrics(y_test, y_pred)
|
| 36 |
+
|
| 37 |
+
return model, metrics
|
rag/memory.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import os
|
| 3 |
+
import pickle
|
| 4 |
+
import logging
|
| 5 |
+
|
| 6 |
+
# Configure logging for this module
|
| 7 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 8 |
+
|
| 9 |
+
CACHE_DIR = ".cache"
|
| 10 |
+
# Create the cache directory if it doesn't exist
|
| 11 |
+
if not os.path.exists(CACHE_DIR):
|
| 12 |
+
os.makedirs(CACHE_DIR)
|
| 13 |
+
logging.info(f"Created cache directory: {CACHE_DIR}")
|
| 14 |
+
|
| 15 |
+
def get_dataset_path(name):
|
| 16 |
+
"""Constructs the file path for a dataset pickle file."""
|
| 17 |
+
return os.path.join(CACHE_DIR, f"{name}.pkl")
|
| 18 |
+
|
| 19 |
+
def get_model_results_path(name):
|
| 20 |
+
"""Constructs the file path for model results pickle file."""
|
| 21 |
+
return os.path.join(CACHE_DIR, f"{name}_model_results.pkl")
|
| 22 |
+
|
| 23 |
+
def store_dataset(name, dataframe):
|
| 24 |
+
"""Saves a DataFrame to a pickle file in the cache directory.
|
| 25 |
+
|
| 26 |
+
Args:
|
| 27 |
+
name (str): The name to associate with the dataset (used for filename).
|
| 28 |
+
dataframe (pd.DataFrame): The DataFrame to be stored.
|
| 29 |
+
"""
|
| 30 |
+
path = get_dataset_path(name)
|
| 31 |
+
try:
|
| 32 |
+
dataframe.to_pickle(path)
|
| 33 |
+
logging.info(f"Dataset '{name}' stored successfully at {path}")
|
| 34 |
+
except Exception as e:
|
| 35 |
+
logging.error(f"Error storing dataset '{name}' to {path}: {e}", exc_info=True)
|
| 36 |
+
|
| 37 |
+
def get_dataset(name):
|
| 38 |
+
"""Loads a DataFrame from a pickle file in the cache directory.
|
| 39 |
+
|
| 40 |
+
Args:
|
| 41 |
+
name (str): The name of the dataset to retrieve.
|
| 42 |
+
|
| 43 |
+
Returns:
|
| 44 |
+
pd.DataFrame or None: The loaded DataFrame if found, otherwise None.
|
| 45 |
+
"""
|
| 46 |
+
path = get_dataset_path(name)
|
| 47 |
+
if os.path.exists(path):
|
| 48 |
+
try:
|
| 49 |
+
df = pd.read_pickle(path)
|
| 50 |
+
logging.info(f"Dataset '{name}' loaded successfully from {path}")
|
| 51 |
+
return df
|
| 52 |
+
except Exception as e:
|
| 53 |
+
logging.error(f"Error loading dataset '{name}' from {path}: {e}", exc_info=True)
|
| 54 |
+
return None
|
| 55 |
+
logging.info(f"Dataset '{name}' not found at {path}")
|
| 56 |
+
return None
|
| 57 |
+
|
| 58 |
+
def store_model_results(name, model, y_test, y_pred, y_pred_proba, X_test):
|
| 59 |
+
"""Saves trained model, test data, predictions, and probabilities to a pickle file.
|
| 60 |
+
|
| 61 |
+
Args:
|
| 62 |
+
name (str): The name to associate with the model results.
|
| 63 |
+
model: The trained model object.
|
| 64 |
+
y_test (pd.Series): Actual target values from the test set.
|
| 65 |
+
y_pred (np.array): Predicted target values for the test set.
|
| 66 |
+
y_pred_proba (np.array, optional): Predicted probabilities for classification tasks.
|
| 67 |
+
X_test (pd.DataFrame): Feature values from the test set.
|
| 68 |
+
"""
|
| 69 |
+
path = get_model_results_path(name)
|
| 70 |
+
results = {
|
| 71 |
+
"model": model,
|
| 72 |
+
"y_test": y_test,
|
| 73 |
+
"y_pred": y_pred,
|
| 74 |
+
"y_pred_proba": y_pred_proba,
|
| 75 |
+
"X_test": X_test
|
| 76 |
+
}
|
| 77 |
+
try:
|
| 78 |
+
with open(path, 'wb') as f:
|
| 79 |
+
pickle.dump(results, f)
|
| 80 |
+
logging.info(f"Model results for '{name}' stored successfully at {path}")
|
| 81 |
+
except Exception as e:
|
| 82 |
+
logging.error(f"Error storing model results for '{name}' to {path}: {e}", exc_info=True)
|
| 83 |
+
|
| 84 |
+
def get_model_results(name):
|
| 85 |
+
"""Loads trained model, test data, predictions, and probabilities from a pickle file.
|
| 86 |
+
|
| 87 |
+
Args:
|
| 88 |
+
name (str): The name of the model results to retrieve.
|
| 89 |
+
|
| 90 |
+
Returns:
|
| 91 |
+
dict or None: A dictionary containing model results if found, otherwise None.
|
| 92 |
+
"""
|
| 93 |
+
path = get_model_results_path(name)
|
| 94 |
+
if os.path.exists(path):
|
| 95 |
+
try:
|
| 96 |
+
with open(path, 'rb') as f:
|
| 97 |
+
results = pickle.load(f)
|
| 98 |
+
logging.info(f"Model results for '{name}' loaded successfully from {path}")
|
| 99 |
+
return results
|
| 100 |
+
except Exception as e:
|
| 101 |
+
logging.error(f"Error loading model results for '{name}' from {path}: {e}", exc_info=True)
|
| 102 |
+
return None
|
| 103 |
+
logging.info(f"Model results for '{name}' not found at {path}")
|
| 104 |
+
return None
|
rag/rag_query.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Module for querying the Groq API with dataset context."""
|
| 2 |
+
|
| 3 |
+
from groq import Groq, APIStatusError
|
| 4 |
+
from config import GROQ_API_KEY
|
| 5 |
+
from rag.memory import get_dataset
|
| 6 |
+
import pandas as pd
|
| 7 |
+
import logging
|
| 8 |
+
|
| 9 |
+
# Configure logging for this module
|
| 10 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 11 |
+
|
| 12 |
+
# Initialize Groq client with API key from config
|
| 13 |
+
client = Groq(api_key=GROQ_API_KEY)
|
| 14 |
+
|
| 15 |
+
def query_dataset_with_groq(dataset_name, user_query):
|
| 16 |
+
"""Queries the Groq API with a user question, providing dataset context.
|
| 17 |
+
|
| 18 |
+
Args:
|
| 19 |
+
dataset_name (str): The name of the dataset to retrieve from memory.
|
| 20 |
+
user_query (str): The user's question about the dataset.
|
| 21 |
+
|
| 22 |
+
Returns:
|
| 23 |
+
str: The AI's answer to the question, or an error message if the query fails.
|
| 24 |
+
"""
|
| 25 |
+
logging.info(f"Attempting to query Groq with user question: {user_query}")
|
| 26 |
+
df = get_dataset(dataset_name)
|
| 27 |
+
if df is None:
|
| 28 |
+
logging.error(f"Dataset '{dataset_name}' not found in memory for Groq query.")
|
| 29 |
+
return "No dataset found with that name. Please upload a dataset first."
|
| 30 |
+
|
| 31 |
+
# Prepare context for the LLM, including dataset overview, summary statistics, and a sample
|
| 32 |
+
context = f"""
|
| 33 |
+
You are an expert Data Analyst. You have been provided with a dataset.
|
| 34 |
+
|
| 35 |
+
**Dataset Overview:**
|
| 36 |
+
- **Shape:** {df.shape[0]} rows and {df.shape[1]} columns.
|
| 37 |
+
- **Columns and Data Types:**\n{df.dtypes.to_string()}
|
| 38 |
+
|
| 39 |
+
**Summary Statistics:**\n{df.describe(include='all').to_string()}
|
| 40 |
+
|
| 41 |
+
**First 5 Rows:**\n{df.head(5).to_string(index=False)}
|
| 42 |
+
|
| 43 |
+
**User Question:** {user_query}
|
| 44 |
+
|
| 45 |
+
Answer the user's question clearly and accurately based *only* on the provided dataset information.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
try:
|
| 49 |
+
logging.info("Sending request to Groq API for chat completion.")
|
| 50 |
+
response = client.chat.completions.create(
|
| 51 |
+
model="llama3-70b-8192", # Using a powerful model for better understanding
|
| 52 |
+
messages=[
|
| 53 |
+
{"role": "system", "content": "You are a helpful data science assistant. Provide concise and accurate answers."},
|
| 54 |
+
{"role": "user", "content": context}
|
| 55 |
+
],
|
| 56 |
+
temperature=0.1, # Low temperature for factual and less creative responses
|
| 57 |
+
max_tokens=1024, # Limit response length
|
| 58 |
+
top_p=1,
|
| 59 |
+
stop=None,
|
| 60 |
+
)
|
| 61 |
+
ai_response_content = response.choices[0].message.content
|
| 62 |
+
logging.info("Successfully received response from Groq API.")
|
| 63 |
+
return ai_response_content
|
| 64 |
+
except APIStatusError as e:
|
| 65 |
+
logging.error(f"Groq API error occurred: Status Code {e.status_code}, Response: {e.response}", exc_info=True)
|
| 66 |
+
if e.status_code == 503:
|
| 67 |
+
return "The AI service is currently unavailable due to high demand or maintenance. Please try again later."
|
| 68 |
+
else:
|
| 69 |
+
return f"An error occurred with the AI service (Status: {e.status_code}). Please check the logs for more details."
|
| 70 |
+
except Exception as e:
|
| 71 |
+
logging.error(f"An unexpected error occurred while querying the AI: {e}", exc_info=True)
|
| 72 |
+
return f"An unexpected error occurred while processing your request: {e}"
|
requirements.txt
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
pandas
|
| 2 |
+
numpy
|
| 3 |
+
scikit-learn
|
| 4 |
+
matplotlib
|
| 5 |
+
seaborn
|
| 6 |
+
plotly
|
| 7 |
+
xgboost
|
| 8 |
+
catboost
|
| 9 |
+
langgraph
|
| 10 |
+
python-dotenv
|
| 11 |
+
faiss-cpu
|
| 12 |
+
tiktoken
|
| 13 |
+
groq
|
| 14 |
+
flask
|
| 15 |
+
scipy
|
| 16 |
+
gunicorn
|
utils/data_cleaner.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
from sklearn.preprocessing import LabelEncoder, StandardScaler
|
| 3 |
+
from sklearn.impute import SimpleImputer
|
| 4 |
+
import logging
|
| 5 |
+
|
| 6 |
+
# Configure logging for this module
|
| 7 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 8 |
+
|
| 9 |
+
def clean_data(df):
|
| 10 |
+
"""Cleans the input DataFrame by imputing missing values.
|
| 11 |
+
|
| 12 |
+
- Numerical columns: Imputes missing values with the mean.
|
| 13 |
+
- Categorical columns: Imputes missing values with the most frequent value.
|
| 14 |
+
|
| 15 |
+
Args:
|
| 16 |
+
df (pd.DataFrame): The input DataFrame to clean.
|
| 17 |
+
|
| 18 |
+
Returns:
|
| 19 |
+
pd.DataFrame: The DataFrame with missing values imputed.
|
| 20 |
+
"""
|
| 21 |
+
logging.info("Starting data cleaning process.")
|
| 22 |
+
# Impute missing values for numerical columns
|
| 23 |
+
numerical_cols = df.select_dtypes(include=['number']).columns
|
| 24 |
+
if not numerical_cols.empty:
|
| 25 |
+
logging.info(f"Imputing missing numerical values for columns: {list(numerical_cols)}")
|
| 26 |
+
imputer_numerical = SimpleImputer(strategy='mean')
|
| 27 |
+
df[numerical_cols] = imputer_numerical.fit_transform(df[numerical_cols])
|
| 28 |
+
|
| 29 |
+
# Impute missing values for categorical columns
|
| 30 |
+
categorical_cols = df.select_dtypes(include=['object', 'category']).columns
|
| 31 |
+
if not categorical_cols.empty:
|
| 32 |
+
logging.info(f"Imputing missing categorical values for columns: {list(categorical_cols)}")
|
| 33 |
+
imputer_categorical = SimpleImputer(strategy='most_frequent')
|
| 34 |
+
df[categorical_cols] = imputer_categorical.fit_transform(df[categorical_cols])
|
| 35 |
+
|
| 36 |
+
logging.info("Data cleaning process completed.")
|
| 37 |
+
return df
|
| 38 |
+
|
| 39 |
+
def prepare_data(df, target_column=None):
|
| 40 |
+
"""Prepares the DataFrame for machine learning by cleaning, encoding, and scaling.
|
| 41 |
+
|
| 42 |
+
Args:
|
| 43 |
+
df (pd.DataFrame): The input DataFrame.
|
| 44 |
+
target_column (str, optional): The name of the target column. If provided,
|
| 45 |
+
data is prepared for supervised learning (X, y split).
|
| 46 |
+
Otherwise, for unsupervised learning (all features).
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
tuple: If target_column is provided:
|
| 50 |
+
(X (pd.DataFrame), y (pd.Series), label_encoders (dict), is_classification (bool))
|
| 51 |
+
If target_column is None:
|
| 52 |
+
(df_prepared (pd.DataFrame), label_encoders (dict))
|
| 53 |
+
"""
|
| 54 |
+
logging.info(f"Starting data preparation process. Target column: {target_column}")
|
| 55 |
+
df = clean_data(df.copy()) # Ensure we work on a copy to avoid modifying original df
|
| 56 |
+
label_encoders = {}
|
| 57 |
+
is_classification = False
|
| 58 |
+
|
| 59 |
+
# Encode categorical features (excluding the target column if it's categorical)
|
| 60 |
+
for col in df.select_dtypes(include=['object', 'category']).columns:
|
| 61 |
+
if col != target_column:
|
| 62 |
+
logging.info(f"Encoding categorical feature: {col}")
|
| 63 |
+
le = LabelEncoder()
|
| 64 |
+
df[col] = le.fit_transform(df[col])
|
| 65 |
+
label_encoders[col] = le
|
| 66 |
+
|
| 67 |
+
if target_column:
|
| 68 |
+
# Supervised learning preparation
|
| 69 |
+
logging.info(f"Preparing data for supervised learning with target: {target_column}")
|
| 70 |
+
# Determine if it's a classification or regression task based on target column properties
|
| 71 |
+
if df[target_column].dtype == 'object' or df[target_column].nunique() <= 10: # Heuristic for classification
|
| 72 |
+
is_classification = True
|
| 73 |
+
logging.info(f"Target column '{target_column}' identified as classification.")
|
| 74 |
+
le = LabelEncoder()
|
| 75 |
+
df[target_column] = le.fit_transform(df[target_column])
|
| 76 |
+
label_encoders[target_column] = le
|
| 77 |
+
else:
|
| 78 |
+
logging.info(f"Target column '{target_column}' identified as regression.")
|
| 79 |
+
|
| 80 |
+
X = df.drop(columns=[target_column])
|
| 81 |
+
y = df[target_column]
|
| 82 |
+
|
| 83 |
+
# Scale numerical features in X
|
| 84 |
+
numerical_cols = X.select_dtypes(include=['number']).columns
|
| 85 |
+
if not numerical_cols.empty:
|
| 86 |
+
logging.info(f"Scaling numerical features in X: {list(numerical_cols)}")
|
| 87 |
+
scaler = StandardScaler()
|
| 88 |
+
X[numerical_cols] = scaler.fit_transform(X[numerical_cols])
|
| 89 |
+
|
| 90 |
+
logging.info("Data preparation for supervised learning completed.")
|
| 91 |
+
return X, y, label_encoders, is_classification
|
| 92 |
+
else:
|
| 93 |
+
# Unsupervised learning preparation (scale all numerical features)
|
| 94 |
+
logging.info("Preparing data for unsupervised learning.")
|
| 95 |
+
numerical_cols = df.select_dtypes(include=['number']).columns
|
| 96 |
+
if not numerical_cols.empty:
|
| 97 |
+
logging.info(f"Scaling numerical features for unsupervised learning: {list(numerical_cols)}")
|
| 98 |
+
scaler = StandardScaler()
|
| 99 |
+
df[numerical_cols] = scaler.fit_transform(df[numerical_cols])
|
| 100 |
+
logging.info("Data preparation for unsupervised learning completed.")
|
| 101 |
+
return df.copy(), label_encoders
|
utils/export.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pickle
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
def save_model(model, path='model.pkl'):
|
| 5 |
+
with open(path, 'wb') as f:
|
| 6 |
+
pickle.dump(model, f)
|
| 7 |
+
|
| 8 |
+
def load_model(path='model.pkl'):
|
| 9 |
+
if os.path.exists(path):
|
| 10 |
+
with open(path, 'rb') as f:
|
| 11 |
+
return pickle.load(f)
|
| 12 |
+
return None
|
utils/metrics.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Module for calculating and returning various machine learning evaluation metrics."""
|
| 2 |
+
|
| 3 |
+
from sklearn.metrics import accuracy_score, r2_score, confusion_matrix, mean_squared_error, precision_score, recall_score, f1_score
|
| 4 |
+
import numpy as np
|
| 5 |
+
import logging
|
| 6 |
+
|
| 7 |
+
# Configure logging for this module
|
| 8 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 9 |
+
|
| 10 |
+
def classification_metrics(y_true, y_pred):
|
| 11 |
+
"""Calculates common classification metrics.
|
| 12 |
+
|
| 13 |
+
Args:
|
| 14 |
+
y_true (array-like): True labels.
|
| 15 |
+
y_pred (array-like): Predicted labels.
|
| 16 |
+
|
| 17 |
+
Returns:
|
| 18 |
+
dict: A dictionary containing accuracy, precision, recall, F1-score, and confusion matrix.
|
| 19 |
+
"""
|
| 20 |
+
logging.info("Calculating classification metrics.")
|
| 21 |
+
try:
|
| 22 |
+
metrics = {
|
| 23 |
+
"Accuracy": accuracy_score(y_true, y_pred),
|
| 24 |
+
"Precision": precision_score(y_true, y_pred, average='weighted', zero_division=0),
|
| 25 |
+
"Recall": recall_score(y_true, y_pred, average='weighted', zero_division=0),
|
| 26 |
+
"F1 Score": f1_score(y_true, y_pred, average='weighted', zero_division=0),
|
| 27 |
+
"Confusion Matrix": confusion_matrix(y_true, y_pred).tolist()
|
| 28 |
+
}
|
| 29 |
+
logging.info("Classification metrics calculated successfully.")
|
| 30 |
+
return metrics
|
| 31 |
+
except Exception as e:
|
| 32 |
+
logging.error(f"Error calculating classification metrics: {e}", exc_info=True)
|
| 33 |
+
return {"error": f"Failed to calculate classification metrics: {e}"}
|
| 34 |
+
|
| 35 |
+
def regression_metrics(y_true, y_pred):
|
| 36 |
+
"""Calculates common regression metrics.
|
| 37 |
+
|
| 38 |
+
Args:
|
| 39 |
+
y_true (array-like): True target values.
|
| 40 |
+
y_pred (array-like): Predicted target values.
|
| 41 |
+
|
| 42 |
+
Returns:
|
| 43 |
+
dict: A dictionary containing R2 score, Mean Squared Error (MSE), and Root Mean Squared Error (RMSE).
|
| 44 |
+
"""
|
| 45 |
+
logging.info("Calculating regression metrics.")
|
| 46 |
+
try:
|
| 47 |
+
metrics = {
|
| 48 |
+
"R2 Score": r2_score(y_true, y_pred),
|
| 49 |
+
"MSE": mean_squared_error(y_true, y_pred),
|
| 50 |
+
"RMSE": np.sqrt(mean_squared_error(y_true, y_pred))
|
| 51 |
+
}
|
| 52 |
+
logging.info("Regression metrics calculated successfully.")
|
| 53 |
+
return metrics
|
| 54 |
+
except Exception as e:
|
| 55 |
+
logging.error(f"Error calculating regression metrics: {e}", exc_info=True)
|
| 56 |
+
return {"error": f"Failed to calculate regression metrics: {e}"}
|
visuals/charts.py
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import matplotlib.pyplot as plt
|
| 2 |
+
import seaborn as sns
|
| 3 |
+
import numpy as np
|
| 4 |
+
from scipy import stats
|
| 5 |
+
import pandas as pd
|
| 6 |
+
from sklearn.metrics import confusion_matrix, roc_curve, auc
|
| 7 |
+
from sklearn.cluster import KMeans
|
| 8 |
+
from sklearn.decomposition import PCA
|
| 9 |
+
from sklearn.manifold import TSNE
|
| 10 |
+
from scipy.cluster.hierarchy import dendrogram, linkage
|
| 11 |
+
import logging
|
| 12 |
+
|
| 13 |
+
# Configure logging for this module
|
| 14 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
| 15 |
+
|
| 16 |
+
# Consistent theme settings for plots
|
| 17 |
+
FIG_SIZE = (10, 6)
|
| 18 |
+
TITLE_FONT_SIZE = 14
|
| 19 |
+
LABEL_FONT_SIZE = 12
|
| 20 |
+
LEGEND_FONT_SIZE = 10
|
| 21 |
+
PRIMARY_COLOR = "#4C72B0" # A nice blue
|
| 22 |
+
SECONDARY_COLOR = "#55A868" # A nice green
|
| 23 |
+
|
| 24 |
+
def plot_histogram(df, col):
|
| 25 |
+
"""Generates a histogram for a given numeric column.
|
| 26 |
+
|
| 27 |
+
Args:
|
| 28 |
+
df (pd.DataFrame): The input DataFrame.
|
| 29 |
+
col (str): The name of the numeric column to plot.
|
| 30 |
+
|
| 31 |
+
Returns:
|
| 32 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 33 |
+
"""
|
| 34 |
+
logging.info(f"Generating histogram for column: {col}")
|
| 35 |
+
if col not in df.columns:
|
| 36 |
+
logging.error(f"Column '{col}' not found for histogram.")
|
| 37 |
+
return None, f"Column '{col}' not found."
|
| 38 |
+
if not pd.api.types.is_numeric_dtype(df[col]):
|
| 39 |
+
logging.error(f"Column '{col}' is not numeric for histogram.")
|
| 40 |
+
return None, "Histogram is only for numeric columns."
|
| 41 |
+
|
| 42 |
+
plt.figure(figsize=FIG_SIZE)
|
| 43 |
+
sns.set_style("whitegrid")
|
| 44 |
+
|
| 45 |
+
# Calculate optimal bin width using Freedman-Diaconis rule
|
| 46 |
+
try:
|
| 47 |
+
iqr = df[col].quantile(0.75) - df[col].quantile(0.25)
|
| 48 |
+
if iqr > 0:
|
| 49 |
+
bin_width = 2 * iqr / (len(df[col]) ** (1/3))
|
| 50 |
+
bins = int((df[col].max() - df[col].min()) / bin_width) if bin_width > 0 else 25
|
| 51 |
+
else:
|
| 52 |
+
bins = 25 # Default if IQR is zero
|
| 53 |
+
except Exception as e:
|
| 54 |
+
logging.warning(f"Could not calculate optimal bins for {col}: {e}. Using default 25 bins.")
|
| 55 |
+
bins = 25
|
| 56 |
+
|
| 57 |
+
ax = sns.histplot(df[col], kde=True, bins=bins, color=PRIMARY_COLOR, edgecolor='black', line_kws={'linewidth': 2, 'linestyle': '--'})
|
| 58 |
+
|
| 59 |
+
# Add mean and median lines
|
| 60 |
+
mean_val = df[col].mean()
|
| 61 |
+
median_val = df[col].median()
|
| 62 |
+
ax.axvline(mean_val, color='red', linestyle='--', linewidth=2, label=f'Mean: {mean_val:.2f}')
|
| 63 |
+
ax.axvline(median_val, color='green', linestyle='-', linewidth=2, label=f'Median: {median_val:.2f}')
|
| 64 |
+
|
| 65 |
+
skewness = df[col].skew()
|
| 66 |
+
plt.title(f'Distribution of {col} (Skewness: {skewness:.2f})', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 67 |
+
plt.xlabel(col, fontsize=LABEL_FONT_SIZE)
|
| 68 |
+
plt.ylabel('Density', fontsize=LABEL_FONT_SIZE)
|
| 69 |
+
plt.legend(fontsize=LEGEND_FONT_SIZE)
|
| 70 |
+
plt.tight_layout()
|
| 71 |
+
logging.info(f"Histogram for {col} generated successfully.")
|
| 72 |
+
return plt.gcf(), None
|
| 73 |
+
|
| 74 |
+
def plot_bar(df, col):
|
| 75 |
+
"""Generates a bar plot for a given categorical or discrete numeric column.
|
| 76 |
+
|
| 77 |
+
Args:
|
| 78 |
+
df (pd.DataFrame): The input DataFrame.
|
| 79 |
+
col (str): The name of the column to plot.
|
| 80 |
+
|
| 81 |
+
Returns:
|
| 82 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 83 |
+
"""
|
| 84 |
+
logging.info(f"Generating bar plot for column: {col}")
|
| 85 |
+
if col not in df.columns:
|
| 86 |
+
logging.error(f"Column '{col}' not found for bar plot.")
|
| 87 |
+
return None, f"Column '{col}' not found."
|
| 88 |
+
|
| 89 |
+
plt.figure(figsize=FIG_SIZE)
|
| 90 |
+
sns.set_style("whitegrid")
|
| 91 |
+
|
| 92 |
+
counts = df[col].value_counts()
|
| 93 |
+
# Handle too many categories by showing top N and grouping others
|
| 94 |
+
if len(counts) > 15:
|
| 95 |
+
logging.info(f"Column {col} has too many unique values ({len(counts)}). Showing top 14 and grouping others.")
|
| 96 |
+
top_14 = counts.nlargest(14)
|
| 97 |
+
other_sum = counts.nsmallest(len(counts) - 14).sum()
|
| 98 |
+
top_14['Other'] = other_sum
|
| 99 |
+
counts = top_14
|
| 100 |
+
|
| 101 |
+
ax = sns.barplot(y=counts.index.astype(str), x=counts.values, palette="viridis", orient='h')
|
| 102 |
+
|
| 103 |
+
# Add count labels to bars
|
| 104 |
+
for i, v in enumerate(counts.values):
|
| 105 |
+
ax.text(v + 1, i, str(v), color='black', va='center', fontsize=10)
|
| 106 |
+
|
| 107 |
+
plt.title(f'Frequency of {col}', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 108 |
+
plt.xlabel('Count', fontsize=LABEL_FONT_SIZE)
|
| 109 |
+
plt.ylabel(col, fontsize=LABEL_FONT_SIZE)
|
| 110 |
+
plt.tight_layout()
|
| 111 |
+
logging.info(f"Bar plot for {col} generated successfully.")
|
| 112 |
+
return plt.gcf(), None
|
| 113 |
+
|
| 114 |
+
def plot_scatter(df, col1, col2, color_col=None):
|
| 115 |
+
"""Generates a scatter plot between two numeric columns, with optional coloring.
|
| 116 |
+
|
| 117 |
+
Args:
|
| 118 |
+
df (pd.DataFrame): The input DataFrame.
|
| 119 |
+
col1 (str): The name of the first numeric column (x-axis).
|
| 120 |
+
col2 (str): The name of the second numeric column (y-axis).
|
| 121 |
+
color_col (str, optional): The name of a column to use for coloring points.
|
| 122 |
+
|
| 123 |
+
Returns:
|
| 124 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 125 |
+
"""
|
| 126 |
+
logging.info(f"Generating scatter plot for {col1} vs {col2}, colored by {color_col or 'None'}")
|
| 127 |
+
if col1 not in df.columns or col2 not in df.columns:
|
| 128 |
+
logging.error(f"One or both columns ({col1}, {col2}) not found for scatter plot.")
|
| 129 |
+
return None, "One or both columns not found."
|
| 130 |
+
if not pd.api.types.is_numeric_dtype(df[col1]) or not pd.api.types.is_numeric_dtype(df[col2]):
|
| 131 |
+
logging.error(f"Columns {col1} or {col2} are not numeric for scatter plot.")
|
| 132 |
+
return None, "Scatter plots are only available for numeric columns."
|
| 133 |
+
if color_col and color_col != 'None' and color_col not in df.columns:
|
| 134 |
+
logging.error(f"Color column '{color_col}' not found for scatter plot.")
|
| 135 |
+
return None, f"Color column '{color_col}' not found."
|
| 136 |
+
|
| 137 |
+
try:
|
| 138 |
+
plt.figure(figsize=FIG_SIZE)
|
| 139 |
+
sns.set_style("whitegrid")
|
| 140 |
+
hue = color_col if color_col and color_col != 'None' else None
|
| 141 |
+
|
| 142 |
+
plot_df = df.dropna(subset=[col1, col2]) # Drop NaNs for plotting
|
| 143 |
+
|
| 144 |
+
sns.scatterplot(data=plot_df, x=col1, y=col2, hue=hue, palette="coolwarm", s=50, alpha=0.6)
|
| 145 |
+
|
| 146 |
+
# Add a linear regression trend line if both columns are numeric
|
| 147 |
+
if pd.api.types.is_numeric_dtype(df[col1]) and pd.api.types.is_numeric_dtype(df[col2]):
|
| 148 |
+
# Ensure there's enough data for linear regression
|
| 149 |
+
if len(plot_df) > 1:
|
| 150 |
+
m, b, r_value, _, _ = stats.linregress(plot_df[col1], plot_df[col2])
|
| 151 |
+
x_line = np.array([plot_df[col1].min(), plot_df[col1].max()])
|
| 152 |
+
y_line = m * x_line + b
|
| 153 |
+
plt.plot(x_line, y_line, color='red', linestyle='--', label=f'Trend Line (R² = {r_value**2:.2f})')
|
| 154 |
+
plt.legend(fontsize=LEGEND_FONT_SIZE)
|
| 155 |
+
else:
|
| 156 |
+
logging.warning("Not enough data points for linear regression trend line.")
|
| 157 |
+
|
| 158 |
+
plt.title(f'{col1} vs. {col2}', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 159 |
+
plt.xlabel(col1, fontsize=LABEL_FONT_SIZE)
|
| 160 |
+
plt.ylabel(col2, fontsize=LABEL_FONT_SIZE)
|
| 161 |
+
plt.tight_layout()
|
| 162 |
+
logging.info(f"Scatter plot for {col1} vs {col2} generated successfully.")
|
| 163 |
+
return plt.gcf(), None
|
| 164 |
+
except Exception as e:
|
| 165 |
+
logging.error(f"An error occurred during scatter plot generation: {e}", exc_info=True)
|
| 166 |
+
return None, f"An error occurred during plot generation: {e}"
|
| 167 |
+
|
| 168 |
+
def plot_box(df, continuous_var, group_var):
|
| 169 |
+
"""Generates a box plot to show the distribution of a continuous variable across categories of a grouping variable.
|
| 170 |
+
|
| 171 |
+
Args:
|
| 172 |
+
df (pd.DataFrame): The input DataFrame.
|
| 173 |
+
continuous_var (str): The name of the continuous numeric column.
|
| 174 |
+
group_var (str): The name of the categorical or discrete column for grouping.
|
| 175 |
+
|
| 176 |
+
Returns:
|
| 177 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 178 |
+
"""
|
| 179 |
+
logging.info(f"Generating box plot for {continuous_var} by {group_var}")
|
| 180 |
+
if continuous_var not in df.columns or group_var not in df.columns:
|
| 181 |
+
logging.error(f"One or both columns ({continuous_var}, {group_var}) not found for box plot.")
|
| 182 |
+
return None, "One or both columns not found."
|
| 183 |
+
if not pd.api.types.is_numeric_dtype(df[continuous_var]):
|
| 184 |
+
logging.error(f"Column '{continuous_var}' is not numeric for box plot.")
|
| 185 |
+
return None, "Box plots require a numeric column for the x-axis."
|
| 186 |
+
|
| 187 |
+
plt.figure(figsize=FIG_SIZE)
|
| 188 |
+
sns.set_style("whitegrid")
|
| 189 |
+
|
| 190 |
+
# Order categories by median of the continuous variable
|
| 191 |
+
order = df.groupby(group_var)[continuous_var].median().sort_values(ascending=False).index
|
| 192 |
+
sns.boxplot(data=df, x=continuous_var, y=group_var, palette="Set2", order=order, orient='h')
|
| 193 |
+
|
| 194 |
+
plt.title(f'{continuous_var} by {group_var}', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 195 |
+
plt.xlabel(continuous_var, fontsize=LABEL_FONT_SIZE)
|
| 196 |
+
plt.ylabel(group_var, fontsize=LABEL_FONT_SIZE)
|
| 197 |
+
plt.tight_layout()
|
| 198 |
+
logging.info(f"Box plot for {continuous_var} by {group_var} generated successfully.")
|
| 199 |
+
return plt.gcf(), None
|
| 200 |
+
|
| 201 |
+
def plot_pie(df, col):
|
| 202 |
+
"""Generates a pie chart for a given categorical column.
|
| 203 |
+
|
| 204 |
+
Args:
|
| 205 |
+
df (pd.DataFrame): The input DataFrame.
|
| 206 |
+
col (str): The name of the categorical column to plot.
|
| 207 |
+
|
| 208 |
+
Returns:
|
| 209 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 210 |
+
"""
|
| 211 |
+
logging.info(f"Generating pie chart for column: {col}")
|
| 212 |
+
if col not in df.columns:
|
| 213 |
+
logging.error(f"Column '{col}' not found for pie chart.")
|
| 214 |
+
return None, f"Column '{col}' not found."
|
| 215 |
+
|
| 216 |
+
counts = df[col].value_counts()
|
| 217 |
+
# Handle too many categories by showing top N and grouping others
|
| 218 |
+
if len(counts) > 7:
|
| 219 |
+
logging.info(f"Column {col} has too many unique values ({len(counts)}). Showing top 6 and grouping others.")
|
| 220 |
+
top_6 = counts.nlargest(6)
|
| 221 |
+
other_sum = counts.nsmallest(len(counts) - 6).sum()
|
| 222 |
+
top_6['Other'] = other_sum
|
| 223 |
+
counts = top_6
|
| 224 |
+
|
| 225 |
+
plt.figure(figsize=(8, 8)) # Pie charts often look better square
|
| 226 |
+
|
| 227 |
+
explode = [0.03] * len(counts) # Slightly separate slices for better visual
|
| 228 |
+
colors = sns.color_palette('pastel')[0:len(counts)]
|
| 229 |
+
|
| 230 |
+
plt.pie(counts, labels=counts.index, autopct='%1.1f%%', startangle=90, explode=explode, colors=colors, pctdistance=0.85)
|
| 231 |
+
centre_circle = plt.Circle((0,0),0.70,fc='white') # Donut chart effect
|
| 232 |
+
fig = plt.gcf()
|
| 233 |
+
fig.gca().add_artist(centre_circle)
|
| 234 |
+
|
| 235 |
+
plt.title(f'Distribution of {col}', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 236 |
+
plt.tight_layout()
|
| 237 |
+
logging.info(f"Pie chart for {col} generated successfully.")
|
| 238 |
+
return plt.gcf(), None
|
| 239 |
+
|
| 240 |
+
def plot_heatmap(df):
|
| 241 |
+
"""Generates a correlation heatmap for all numeric columns in the DataFrame.
|
| 242 |
+
|
| 243 |
+
Args:
|
| 244 |
+
df (pd.DataFrame): The input DataFrame.
|
| 245 |
+
|
| 246 |
+
Returns:
|
| 247 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 248 |
+
"""
|
| 249 |
+
logging.info("Generating correlation heatmap.")
|
| 250 |
+
numeric_df = df.select_dtypes(include=np.number)
|
| 251 |
+
if numeric_df.shape[1] < 2:
|
| 252 |
+
logging.error("Not enough numeric columns for a heatmap.")
|
| 253 |
+
return None, "Not enough numeric columns for a heatmap."
|
| 254 |
+
|
| 255 |
+
plt.figure(figsize=(12, 10))
|
| 256 |
+
corr = numeric_df.corr()
|
| 257 |
+
|
| 258 |
+
# Generate a mask for the upper triangle
|
| 259 |
+
mask = np.triu(np.ones_like(corr, dtype=bool))
|
| 260 |
+
|
| 261 |
+
sns.heatmap(corr, mask=mask, annot=True, cmap='coolwarm', fmt=".2f", linewidths=.5, vmin=-1, vmax=1, annot_kws={"size": 8})
|
| 262 |
+
plt.title('Correlation Heatmap', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 263 |
+
plt.xticks(rotation=45, ha='right', fontsize=LABEL_FONT_SIZE)
|
| 264 |
+
plt.yticks(rotation=0, fontsize=LABEL_FONT_SIZE)
|
| 265 |
+
plt.tight_layout()
|
| 266 |
+
logging.info("Correlation heatmap generated successfully.")
|
| 267 |
+
return plt.gcf(), None
|
| 268 |
+
|
| 269 |
+
def plot_confusion_matrix(y_true, y_pred, class_names):
|
| 270 |
+
"""Generates a confusion matrix plot.
|
| 271 |
+
|
| 272 |
+
Args:
|
| 273 |
+
y_true (array-like): True labels.
|
| 274 |
+
y_pred (array-like): Predicted labels.
|
| 275 |
+
class_names (list): List of class names for labels.
|
| 276 |
+
|
| 277 |
+
Returns:
|
| 278 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 279 |
+
"""
|
| 280 |
+
logging.info("Generating confusion matrix.")
|
| 281 |
+
try:
|
| 282 |
+
cm = confusion_matrix(y_true, y_pred)
|
| 283 |
+
plt.figure(figsize=(8, 6))
|
| 284 |
+
sns.heatmap(cm, annot=True, fmt='d', cmap='Blues', xticklabels=class_names, yticklabels=class_names)
|
| 285 |
+
plt.title('Confusion Matrix', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 286 |
+
plt.ylabel('Actual', fontsize=LABEL_FONT_SIZE)
|
| 287 |
+
plt.xlabel('Predicted', fontsize=LABEL_FONT_SIZE)
|
| 288 |
+
plt.tight_layout()
|
| 289 |
+
logging.info("Confusion matrix generated successfully.")
|
| 290 |
+
return plt.gcf(), None
|
| 291 |
+
except Exception as e:
|
| 292 |
+
logging.error(f"Error generating confusion matrix: {e}", exc_info=True)
|
| 293 |
+
return None, f"Error generating confusion matrix: {e}"
|
| 294 |
+
|
| 295 |
+
def plot_roc_curve(y_true, y_pred_proba, class_names=None):
|
| 296 |
+
"""Generates a Receiver Operating Characteristic (ROC) curve.
|
| 297 |
+
|
| 298 |
+
Args:
|
| 299 |
+
y_true (array-like): True binary labels.
|
| 300 |
+
y_pred_proba (array-like): Target scores, probabilities of the positive class.
|
| 301 |
+
class_names (list, optional): List of class names. Not directly used in plot but good for context.
|
| 302 |
+
|
| 303 |
+
Returns:
|
| 304 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 305 |
+
"""
|
| 306 |
+
logging.info("Generating ROC curve.")
|
| 307 |
+
try:
|
| 308 |
+
# Handle multi-class or binary probability predictions
|
| 309 |
+
if y_pred_proba.ndim == 1: # Binary classification, single probability array
|
| 310 |
+
fpr, tpr, _ = roc_curve(y_true, y_pred_proba)
|
| 311 |
+
elif y_pred_proba.shape[1] == 2: # Binary classification, two columns of probabilities
|
| 312 |
+
fpr, tpr, _ = roc_curve(y_true, y_pred_proba[:, 1]) # Assume second column is positive class
|
| 313 |
+
else: # Multi-class, need to binarize or choose a class
|
| 314 |
+
# For simplicity, if multi-class, we'll plot ROC for the first class vs. rest
|
| 315 |
+
# A more robust solution would allow selecting a class or plotting all.
|
| 316 |
+
logging.warning("Multi-class ROC curve requested. Plotting for first class vs. rest.")
|
| 317 |
+
# Binarize y_true for the first class
|
| 318 |
+
y_true_bin = (y_true == sorted(np.unique(y_true))[0]).astype(int)
|
| 319 |
+
fpr, tpr, _ = roc_curve(y_true_bin, y_pred_proba[:, 0])
|
| 320 |
+
|
| 321 |
+
roc_auc = auc(fpr, tpr)
|
| 322 |
+
plt.figure(figsize=FIG_SIZE)
|
| 323 |
+
sns.set_style("whitegrid")
|
| 324 |
+
plt.plot(fpr, tpr, color='darkorange', lw=2, label=f'ROC curve (area = {roc_auc:.2f})')
|
| 325 |
+
plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')
|
| 326 |
+
plt.xlim([0.0, 1.0])
|
| 327 |
+
plt.ylim([0.0, 1.05])
|
| 328 |
+
plt.xlabel('False Positive Rate', fontsize=LABEL_FONT_SIZE)
|
| 329 |
+
plt.ylabel('True Positive Rate', fontsize=LABEL_FONT_SIZE)
|
| 330 |
+
plt.title('Receiver Operating Characteristic', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 331 |
+
plt.legend(loc="lower right", fontsize=LEGEND_FONT_SIZE)
|
| 332 |
+
plt.tight_layout()
|
| 333 |
+
logging.info("ROC curve generated successfully.")
|
| 334 |
+
return plt.gcf(), None
|
| 335 |
+
except Exception as e:
|
| 336 |
+
logging.error(f"Error generating ROC curve: {e}", exc_info=True)
|
| 337 |
+
return None, f"Error generating ROC curve: {e}"
|
| 338 |
+
|
| 339 |
+
def plot_feature_importance(model, feature_names):
|
| 340 |
+
"""Generates a feature importance bar plot for tree-based models.
|
| 341 |
+
|
| 342 |
+
Args:
|
| 343 |
+
model: A trained model with a 'feature_importances_' attribute.
|
| 344 |
+
feature_names (list): List of feature names corresponding to the importances.
|
| 345 |
+
|
| 346 |
+
Returns:
|
| 347 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 348 |
+
"""
|
| 349 |
+
logging.info("Generating feature importance plot.")
|
| 350 |
+
if not hasattr(model, 'feature_importances_'):
|
| 351 |
+
logging.error("Model does not have feature importances attribute.")
|
| 352 |
+
return None, "Model does not have feature importances."
|
| 353 |
+
|
| 354 |
+
try:
|
| 355 |
+
importances = model.feature_importances_
|
| 356 |
+
# Sort features by importance in descending order
|
| 357 |
+
indices = np.argsort(importances)[::-1]
|
| 358 |
+
|
| 359 |
+
plt.figure(figsize=FIG_SIZE)
|
| 360 |
+
sns.set_style("whitegrid")
|
| 361 |
+
|
| 362 |
+
# Plot top N features for clarity
|
| 363 |
+
num_features_to_plot = min(len(feature_names), 20) # Plot top 20 features or fewer if less available
|
| 364 |
+
|
| 365 |
+
plt.title("Feature Importances", fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 366 |
+
sns.barplot(x=importances[indices[:num_features_to_plot]], y=[feature_names[i] for i in indices[:num_features_to_plot]], palette="viridis")
|
| 367 |
+
plt.xlabel("Relative Importance", fontsize=LABEL_FONT_SIZE)
|
| 368 |
+
plt.ylabel("Feature Name", fontsize=LABEL_FONT_SIZE)
|
| 369 |
+
plt.tight_layout()
|
| 370 |
+
logging.info("Feature importance plot generated successfully.")
|
| 371 |
+
return plt.gcf(), None
|
| 372 |
+
except Exception as e:
|
| 373 |
+
logging.error(f"Error generating feature importance plot: {e}", exc_info=True)
|
| 374 |
+
return None, f"Error generating feature importance plot: {e}"
|
| 375 |
+
|
| 376 |
+
def plot_elbow_curve(X, max_k=10):
|
| 377 |
+
"""Generates an elbow curve to help determine the optimal number of clusters (k) for KMeans.
|
| 378 |
+
|
| 379 |
+
Args:
|
| 380 |
+
X (pd.DataFrame or np.array): The input data for clustering.
|
| 381 |
+
max_k (int, optional): The maximum number of clusters to test. Defaults to 10.
|
| 382 |
+
|
| 383 |
+
Returns:
|
| 384 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 385 |
+
"""
|
| 386 |
+
logging.info(f"Generating elbow curve for max_k={max_k}")
|
| 387 |
+
inertias = []
|
| 388 |
+
if not isinstance(X, pd.DataFrame):
|
| 389 |
+
X = pd.DataFrame(X) # Ensure X is a DataFrame for .dropna()
|
| 390 |
+
X_cleaned = X.dropna() # Handle NaNs for KMeans
|
| 391 |
+
|
| 392 |
+
if X_cleaned.empty:
|
| 393 |
+
logging.error("Data is empty after cleaning for Elbow Curve.")
|
| 394 |
+
return None, "Data is empty after cleaning for Elbow Curve."
|
| 395 |
+
|
| 396 |
+
# Ensure max_k is not greater than the number of samples
|
| 397 |
+
if max_k > len(X_cleaned):
|
| 398 |
+
logging.warning(f"max_k ({max_k}) is greater than number of samples ({len(X_cleaned)}). Adjusting max_k.")
|
| 399 |
+
max_k = len(X_cleaned)
|
| 400 |
+
|
| 401 |
+
if max_k < 1:
|
| 402 |
+
return None, "max_k must be at least 1."
|
| 403 |
+
|
| 404 |
+
try:
|
| 405 |
+
for k in range(1, max_k + 1):
|
| 406 |
+
kmeans = KMeans(n_clusters=k, random_state=42, n_init=10) # n_init to suppress warning
|
| 407 |
+
kmeans.fit(X_cleaned)
|
| 408 |
+
inertias.append(kmeans.inertia_)
|
| 409 |
+
|
| 410 |
+
plt.figure(figsize=FIG_SIZE)
|
| 411 |
+
sns.set_style("whitegrid")
|
| 412 |
+
plt.plot(range(1, max_k + 1), inertias, marker='o', linestyle='-', color=PRIMARY_COLOR)
|
| 413 |
+
plt.xlabel('Number of clusters (k)', fontsize=LABEL_FONT_SIZE)
|
| 414 |
+
plt.ylabel('Inertia', fontsize=LABEL_FONT_SIZE)
|
| 415 |
+
plt.title('Elbow Method For Optimal k', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 416 |
+
plt.xticks(np.arange(1, max_k + 1, 1)) # Ensure integer ticks
|
| 417 |
+
plt.tight_layout()
|
| 418 |
+
logging.info("Elbow curve generated successfully.")
|
| 419 |
+
return plt.gcf(), None
|
| 420 |
+
except Exception as e:
|
| 421 |
+
logging.error(f"Error generating elbow curve: {e}", exc_info=True)
|
| 422 |
+
return None, f"Error generating elbow curve: {e}"
|
| 423 |
+
|
| 424 |
+
def plot_cluster_plot(X, labels, title="Cluster Plot"):
|
| 425 |
+
"""Generates a 2D scatter plot of clusters, optionally after dimensionality reduction.
|
| 426 |
+
|
| 427 |
+
Args:
|
| 428 |
+
X (pd.DataFrame or np.array): The input data.
|
| 429 |
+
labels (array-like, optional): Cluster labels for coloring points. If None, points are not colored.
|
| 430 |
+
title (str, optional): Title of the plot. Defaults to "Cluster Plot".
|
| 431 |
+
|
| 432 |
+
Returns:
|
| 433 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 434 |
+
"""
|
| 435 |
+
logging.info(f"Generating cluster plot with title: {title}")
|
| 436 |
+
if not isinstance(X, pd.DataFrame):
|
| 437 |
+
X = pd.DataFrame(X)
|
| 438 |
+
|
| 439 |
+
# Handle NaNs before dimensionality reduction
|
| 440 |
+
X_cleaned = X.dropna()
|
| 441 |
+
if X_cleaned.empty:
|
| 442 |
+
logging.error("Data is empty after cleaning for Cluster Plot.")
|
| 443 |
+
return None, "Data is empty after cleaning for Cluster Plot."
|
| 444 |
+
|
| 445 |
+
plot_df = X_cleaned.copy()
|
| 446 |
+
xlabel = 'Feature 1'
|
| 447 |
+
ylabel = 'Feature 2'
|
| 448 |
+
|
| 449 |
+
# Reduce dimensions to 2 if data has more than 2 features
|
| 450 |
+
if X_cleaned.shape[1] > 2:
|
| 451 |
+
try:
|
| 452 |
+
logging.info("Applying PCA for dimensionality reduction to 2 components.")
|
| 453 |
+
pca = PCA(n_components=2)
|
| 454 |
+
X_reduced = pca.fit_transform(X_cleaned)
|
| 455 |
+
plot_df = pd.DataFrame(X_reduced, columns=['PC1', 'PC2'])
|
| 456 |
+
xlabel = 'Principal Component 1'
|
| 457 |
+
ylabel = 'Principal Component 2'
|
| 458 |
+
except Exception as e:
|
| 459 |
+
logging.error(f"Could not reduce dimensions for cluster plot using PCA: {e}", exc_info=True)
|
| 460 |
+
return None, f"Could not reduce dimensions for cluster plot: {e}"
|
| 461 |
+
elif X_cleaned.shape[1] == 1:
|
| 462 |
+
logging.error("Data must have at least 2 dimensions for a 2D cluster plot.")
|
| 463 |
+
return None, "Data must have at least 2 dimensions for a 2D cluster plot."
|
| 464 |
+
|
| 465 |
+
plt.figure(figsize=FIG_SIZE)
|
| 466 |
+
sns.set_style("whitegrid")
|
| 467 |
+
|
| 468 |
+
if labels is not None:
|
| 469 |
+
# Align labels with cleaned data if necessary
|
| 470 |
+
if isinstance(labels, pd.Series):
|
| 471 |
+
labels_aligned = labels.loc[X_cleaned.index] if labels.index.equals(X.index) else labels # Simple alignment
|
| 472 |
+
else:
|
| 473 |
+
labels_aligned = labels # Assume already aligned or numpy array
|
| 474 |
+
sns.scatterplot(x=plot_df.iloc[:, 0], y=plot_df.iloc[:, 1], hue=labels_aligned, palette='viridis', s=50, alpha=0.7)
|
| 475 |
+
plt.legend(title='Cluster', bbox_to_anchor=(1.05, 1), loc='upper left', fontsize=LEGEND_FONT_SIZE)
|
| 476 |
+
else:
|
| 477 |
+
sns.scatterplot(x=plot_df.iloc[:, 0], y=plot_df.iloc[:, 1], s=50, alpha=0.7, color=PRIMARY_COLOR)
|
| 478 |
+
|
| 479 |
+
plt.title(title, fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 480 |
+
plt.xlabel(xlabel, fontsize=LABEL_FONT_SIZE)
|
| 481 |
+
plt.ylabel(ylabel, fontsize=LABEL_FONT_SIZE)
|
| 482 |
+
plt.tight_layout()
|
| 483 |
+
logging.info("Cluster plot generated successfully.")
|
| 484 |
+
return plt.gcf(), None
|
| 485 |
+
|
| 486 |
+
def plot_dendrogram(X):
|
| 487 |
+
"""Generates a dendrogram for hierarchical clustering.
|
| 488 |
+
|
| 489 |
+
Args:
|
| 490 |
+
X (pd.DataFrame or np.array): The input data for clustering.
|
| 491 |
+
|
| 492 |
+
Returns:
|
| 493 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 494 |
+
"""
|
| 495 |
+
logging.info("Generating dendrogram.")
|
| 496 |
+
if not isinstance(X, pd.DataFrame):
|
| 497 |
+
X = pd.DataFrame(X)
|
| 498 |
+
X_cleaned = X.dropna() # Handle NaNs
|
| 499 |
+
|
| 500 |
+
if X_cleaned.empty:
|
| 501 |
+
logging.error("Data is empty after cleaning for Dendrogram.")
|
| 502 |
+
return None, "Data is empty after cleaning for Dendrogram."
|
| 503 |
+
|
| 504 |
+
# Limit the number of samples for dendrogram for performance and readability
|
| 505 |
+
if X_cleaned.shape[0] > 1000:
|
| 506 |
+
logging.warning(f"Dendrogram data size ({X_cleaned.shape[0]}) is large. Sampling 1000 points.")
|
| 507 |
+
X_cleaned = X_cleaned.sample(n=1000, random_state=42)
|
| 508 |
+
|
| 509 |
+
try:
|
| 510 |
+
linked = linkage(X_cleaned, 'ward') # Ward method minimizes variance within clusters
|
| 511 |
+
plt.figure(figsize=(12, 8))
|
| 512 |
+
dendrogram(linked, orientation='top', distance_sort='descending', show_leaf_counts=True, leaf_rotation=90, leaf_font_size=8)
|
| 513 |
+
plt.title('Hierarchical Clustering Dendrogram', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 514 |
+
plt.xlabel('Sample Index or Cluster Size', fontsize=LABEL_FONT_SIZE)
|
| 515 |
+
plt.ylabel('Distance', fontsize=LABEL_FONT_SIZE)
|
| 516 |
+
plt.tight_layout()
|
| 517 |
+
logging.info("Dendrogram generated successfully.")
|
| 518 |
+
return plt.gcf(), None
|
| 519 |
+
except Exception as e:
|
| 520 |
+
logging.error(f"Error generating dendrogram: {e}", exc_info=True)
|
| 521 |
+
return None, f"Error generating dendrogram: {e}"
|
| 522 |
+
|
| 523 |
+
def plot_tsne(X, labels=None):
|
| 524 |
+
"""Generates a t-SNE plot for dimensionality reduction and visualization of high-dimensional data.
|
| 525 |
+
|
| 526 |
+
Args:
|
| 527 |
+
X (pd.DataFrame or np.array): The input high-dimensional data.
|
| 528 |
+
labels (array-like, optional): Labels for coloring points (e.g., cluster assignments).
|
| 529 |
+
|
| 530 |
+
Returns:
|
| 531 |
+
tuple: A matplotlib Figure object and an error message (None if successful).
|
| 532 |
+
"""
|
| 533 |
+
logging.info("Generating t-SNE plot.")
|
| 534 |
+
if not isinstance(X, pd.DataFrame):
|
| 535 |
+
X = pd.DataFrame(X)
|
| 536 |
+
X_cleaned = X.dropna() # Handle NaNs
|
| 537 |
+
|
| 538 |
+
if X_cleaned.empty:
|
| 539 |
+
logging.error("Data is empty after cleaning for t-SNE.")
|
| 540 |
+
return None, "Data is empty after cleaning for t-SNE."
|
| 541 |
+
|
| 542 |
+
# t-SNE can be computationally expensive on large datasets, consider sampling
|
| 543 |
+
if X_cleaned.shape[0] > 2000:
|
| 544 |
+
logging.warning(f"t-SNE data size ({X_cleaned.shape[0]}) is large. Sampling 2000 points.")
|
| 545 |
+
X_cleaned = X_cleaned.sample(n=2000, random_state=42)
|
| 546 |
+
if labels is not None:
|
| 547 |
+
# Align labels with sampled data
|
| 548 |
+
if isinstance(labels, pd.Series):
|
| 549 |
+
labels = labels.loc[X_cleaned.index]
|
| 550 |
+
else: # If numpy array, convert to series for easy indexing
|
| 551 |
+
labels = pd.Series(labels).loc[X_cleaned.index]
|
| 552 |
+
|
| 553 |
+
try:
|
| 554 |
+
# Perplexity should be less than the number of samples
|
| 555 |
+
perplexity_val = min(30, len(X_cleaned) - 1) if len(X_cleaned) > 1 else 1
|
| 556 |
+
if perplexity_val < 1:
|
| 557 |
+
return None, "Not enough samples for t-SNE (need at least 2)."
|
| 558 |
+
|
| 559 |
+
tsne = TSNE(n_components=2, random_state=42, perplexity=perplexity_val)
|
| 560 |
+
X_tsne = tsne.fit_transform(X_cleaned)
|
| 561 |
+
|
| 562 |
+
plt.figure(figsize=FIG_SIZE)
|
| 563 |
+
sns.set_style("whitegrid")
|
| 564 |
+
|
| 565 |
+
if labels is not None:
|
| 566 |
+
sns.scatterplot(x=X_tsne[:, 0], y=X_tsne[:, 1], hue=labels, palette='viridis', s=50, alpha=0.7)
|
| 567 |
+
plt.legend(title='Cluster/Label', bbox_to_anchor=(1.05, 1), loc='upper left', fontsize=LEGEND_FONT_SIZE)
|
| 568 |
+
else:
|
| 569 |
+
sns.scatterplot(x=X_tsne[:, 0], y=X_tsne[:, 1], s=50, alpha=0.7, color=PRIMARY_COLOR)
|
| 570 |
+
|
| 571 |
+
plt.title('t-SNE Plot', fontsize=TITLE_FONT_SIZE, weight='bold')
|
| 572 |
+
plt.xlabel('t-SNE Component 1', fontsize=LABEL_FONT_SIZE)
|
| 573 |
+
plt.ylabel('t-SNE Component 2', fontsize=LABEL_FONT_SIZE)
|
| 574 |
+
plt.tight_layout()
|
| 575 |
+
logging.info("t-SNE plot generated successfully.")
|
| 576 |
+
return plt.gcf(), None
|
| 577 |
+
except Exception as e:
|
| 578 |
+
logging.error(f"Error generating t-SNE plot: {e}", exc_info=True)
|
| 579 |
+
return None, f"Error generating t-SNE plot: {e}"
|